From 688448db7547be90203440cfd105703d8a853f39 Mon Sep 17 00:00:00 2001 From: xingjl Date: Fri, 14 Mar 2025 15:41:56 +0800 Subject: [PATCH] =?UTF-8?q?=E6=9B=B4=E6=96=B0=E4=BB=A3=E7=A0=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 254 +- CODEOWNERS | 98 +- Dockerfile.ci.dev | 158 +- Dockerfile.ci.lts | 156 +- Dockerfile.linting | 65 +- GPT_pretraining.sh | 157 - LICENSE | 545 +- Llama_pretraining.sh | 211 - README.md | 13 +- docs/llama_mistral.md | 924 +- docs/source/api-guide/custom_fsdp.md | 183 + docs/source/api-guide/index.rst | 41 +- .../api-guide/multi_latent_attention.rst | 14 + .../api-guide/optimizer_cpu_offload.rst | 4 + .../images/custom_fsdp/FSDP_Allreduce.png | Bin 0 -> 86481 bytes .../images/custom_fsdp/FSDP_workflow.png | Bin 0 -> 568617 bytes .../MCore_Custom_FSDP_Class_Diagram.png | Bin 0 -> 510903 bytes .../finetune_gpt_distributed-1.3b.sh | 126 +- .../detxoify_lm/generate-1.3b.sh | 0 .../selfgenerate-1.3b-unconditional.sh | 0 .../msdp/data_processing.sh | 0 .../msdp/eval_knwl_generation.sh | 0 .../msdp/eval_resp_generation.sh | 0 .../msdp/prep_resp_gen.sh | 0 .../msdp/prompt_knwl_gen.sh | 0 .../msdp/prompt_resp_gen.sh | 0 .../academic_paper_scripts/sc21/CONFIG.sh | 114 +- .../academic_paper_scripts/sc21/SBATCH.sh | 26 +- examples/academic_paper_scripts/sc21/SRUN.sh | 36 +- .../sc21/run_figure_11.sh | 92 +- .../sc21/run_figure_12.sh | 108 +- .../sc21/run_figure_13.sh | 92 +- .../sc21/run_figure_14.sh | 94 +- .../sc21/run_figure_15.sh | 94 +- .../sc21/run_figure_16.sh | 86 +- .../sc21/run_figure_17.sh | 108 +- .../sc21/run_figure_18.sh | 108 +- .../sc21/run_table_1.sh | 290 +- examples/bert/train_bert_340m_distributed.sh | 0 .../export/ptq_and_trtllm_export/README.md | 4 +- .../ptq_trtllm_llama2_7b.sh | 0 .../ptq_trtllm_llama3_1_8b.sh | 0 .../ptq_trtllm_llama3_8b.sh | 0 .../ptq_trtllm_minitron_8b.sh | 0 .../ptq_trtllm_mistral_12b.sh | 0 .../ptq_trtllm_mixtral_8x7b.sh | 0 examples/gpt3/gpt_config.yaml | 601 +- examples/gpt3/hostfile_gpt_567B | 0 .../gpt3/run_gpt_567B_1nodes.sh | 6 +- .../gpt3/run_gpt_567B_multinodes.sh | 6 +- examples/gpt3/train_gpt3_175b_distributed.sh | 164 +- .../gpt3/train_gpt_567B_1nodes.sh | 33 +- .../gpt3/train_gpt_567B_multinodes.sh | 38 +- examples/inference/gpt/gpt_batch_inference.py | 315 +- .../run_text_generation_llama3.1.sh | 112 +- .../run_text_generation_llama3.sh | 110 +- .../run_text_generation_mistral.sh | 106 +- .../run_text_generation_server_345M.sh | 62 +- ...eneration_server_345M_8_tensor_parallel.sh | 58 +- examples/mamba/run_text_gen_server_8b.sh | 100 +- examples/mamba/run_text_gen_server_8b_gpt3.sh | 0 examples/mamba/train.sh | 210 +- examples/mixtral/hostfile_mixtral_8x7B | 0 .../mixtral/run_mixtral_8x7B_1nodes.sh | 0 .../mixtral/run_mixtral_8x7B_multinodes.sh | 6 +- .../mixtral/train_mixtral_8x7B_1nodes.sh | 36 +- .../mixtral/train_mixtral_8x7B_multinodes.sh | 39 +- .../mixtral/train_mixtral_8x7b_distributed.sh | 116 - .../combine_lm_vision_checkpoints.sh | 114 +- examples/multimodal/config.py | 480 +- examples/multimodal/dataset_helpers.py | 1720 +- examples/multimodal/energon_util.py | 48 + .../multimodal/evaluation/evaluate_infovqa.py | 48 + .../evaluation/evaluate_spdocvqa.py | 48 + .../multimodal/evaluation/evaluate_vqav2.py | 270 +- .../evaluation/evaluation_datasets.py | 1868 +- examples/multimodal/image_processing.py | 261 +- examples/multimodal/layer_specs.py | 274 +- examples/multimodal/model.py | 470 +- .../model_converter/internvit_converter.py | 324 +- .../model_converter/radio_converter.py | 152 + examples/multimodal/multimodal_args.py | 168 +- examples/multimodal/nvlm/internvit.py | 552 +- .../nvlm/pretrain_qwen20_72b_internvit_6b.sh | 0 .../nvlm/pretrain_yi_34b_internvit_6b.sh | 309 +- ...text_generation_qwen20_72b_internvit_6b.sh | 282 +- ...xt_generation_qwen25_7b_internvit_video.sh | 129 + .../run_text_generation_qwen25_7b_siglip.sh | 222 +- ...run_text_generation_yi_34b_internvit_6b.sh | 0 examples/multimodal/nvlm/sft_34b_internvit.sh | 321 +- .../nvlm/sft_qwen20_72b_internvit_6b.sh | 0 .../nvlm/sft_qwen2p5_7b_internvit_6b_video.sh | 184 + examples/multimodal/pretrain_mistral_clip.sh | 254 +- examples/multimodal/run_text_generation.py | 1110 +- examples/multimodal/sft_mistral_clip.sh | 260 +- .../text_generation_mistral_clip.sh | 218 +- examples/multimodal/train.py | 716 +- examples/retro/preprocess_data.sh | 0 examples/retro/train_retro_2b_distributed.sh | 0 examples/t5/train_t5_220m_distributed.sh | 156 +- gptnodes | 32 - megatron/core/datasets/blended_dataset.py | 402 +- .../blended_megatron_dataset_builder.py | 1107 +- .../blended_megatron_dataset_config.py | 349 +- megatron/core/dist_checkpointing/__init__.py | 24 +- .../core/dist_checkpointing/exchange_utils.py | 1063 +- megatron/core/dist_checkpointing/mapping.py | 1448 +- .../core/dist_checkpointing/serialization.py | 848 +- .../state_dict_transformation.py | 270 - .../dist_checkpointing/state_dict_utils.py | 112 + .../strategies/async_utils.py | 767 +- .../dist_checkpointing/strategies/base.py | 455 +- .../cached_metadata_filesystem_reader.py | 38 + .../dist_checkpointing/strategies/common.py | 314 +- .../strategies/filesystem_async.py | 935 +- .../strategies/fully_parallel.py | 954 +- .../strategies/resharding.py | 633 +- .../strategies/state_dict_saver.py | 409 +- .../dist_checkpointing/strategies/torch.py | 1949 +- .../strategies/two_stage.py | 522 +- .../tensor_aware_state_dict.py | 347 + megatron/core/dist_checkpointing/utils.py | 538 +- .../core/distributed/custom_fsdp/__init__.py | 3 + .../fully_sharded_data_parallel.py | 687 + .../custom_fsdp/param_and_grad_buffer.py | 1971 + .../core/distributed/data_parallel_base.py | 192 +- .../distributed_data_parallel_config.py | 127 +- .../core/distributed/finalize_model_grads.py | 609 +- .../core/distributed/param_and_grad_buffer.py | 1718 +- .../torch_fully_sharded_data_parallel.py | 238 +- .../default_conversion_dict.py | 4 + .../core/extensions/transformer_engine.py | 2632 +- .../core/inference/ammo_support/__init__.py | 8 - .../inference/ammo_support/gpt/model_specs.py | 2 - .../ammo_support/gpt/state_dict_hooks.py | 5 - megatron/core/inference/async_stream.py | 67 + .../core/inference/communication_utils.py | 104 +- .../core/inference/engines/mcore_engine.py | 348 +- megatron/core/inference/inference_request.py | 91 +- .../abstract_model_inference_wrapper.py | 553 +- .../gpt/gpt_inference_wrapper.py | 192 +- .../inference_wrapper_config.py | 94 +- .../multimodal/vlm_inference_wrapper.py | 208 + .../t5/t5_inference_wrapper.py | 440 +- .../inference/modelopt_support/__init__.py | 18 +- .../modelopt_support/gpt/model_specs.py | 131 +- .../modelopt_support/mamba/__init__.py | 1 + .../modelopt_support/mamba/model_specs.py | 89 + megatron/core/inference/sampling_params.py | 71 +- megatron/core/inference/scheduler.py | 302 +- ...oder_decoder_text_generation_controller.py | 73 +- .../simple_text_generation_controller.py | 10 +- .../text_generation_controller.py | 1074 +- .../vlm_text_generation_controller.py | 40 + megatron/core/inference_params.py | 131 +- megatron/core/jit.py | 16 +- megatron/core/model_parallel_config.py | 779 +- megatron/core/models/T5/t5_model.py | 77 +- .../embeddings/relative_pos_embedding.py | 173 + .../common/embeddings/rotary_pos_embedding.py | 428 +- megatron/core/models/gpt/gpt_layer_specs.py | 733 +- megatron/core/models/gpt/gpt_model.py | 640 +- megatron/core/models/gpt/moe_module_specs.py | 162 +- megatron/core/models/huggingface/__init__.py | 2 + .../core/models/huggingface/clip_model.py | 22 + megatron/core/models/huggingface/module.py | 50 + .../core/models/huggingface/qwen_model.py | 36 + .../core/models/mamba/mamba_layer_specs.py | 134 +- .../models/multimodal/context_parallel.py | 99 + .../core/models/multimodal/llava_model.py | 1882 +- megatron/core/models/multimodal/llava_spec.py | 176 +- megatron/core/models/vision/clip_vit_model.py | 443 +- megatron/core/models/vision/radio.py | 325 + .../core/models/vision/vit_layer_specs.py | 190 +- megatron/core/optimizer/__init__.py | 1065 +- .../core/optimizer/cpu_offloading/README.md | 13 + .../core/optimizer/cpu_offloading/__init__.py | 2 + .../cpu_offloading/hybrid_optimizer.py | 465 + megatron/core/optimizer/distrib_optimizer.py | 4060 +- megatron/core/optimizer/optimizer.py | 2234 +- megatron/core/optimizer/optimizer_config.py | 393 +- megatron/core/package_info.py | 58 +- megatron/core/parallel_state.py | 3996 +- .../pipeline_parallel/p2p_communication.py | 1270 +- megatron/core/rerun_state_machine.py | 2442 +- megatron/core/ssm/mamba_layer.py | 261 +- megatron/core/tensor_parallel/layers.py | 2433 +- megatron/core/tensor_parallel/mappings.py | 6 +- megatron/core/tensor_parallel/random.py | 749 +- megatron/core/timers.py | 870 +- megatron/core/transformer/attention.py | 1485 +- megatron/core/transformer/cuda_graphs.py | 1661 +- megatron/core/transformer/mlp.py | 528 +- megatron/core/transformer/moe/README.md | 820 +- megatron/core/transformer/moe/experts.py | 1742 +- megatron/core/transformer/moe/fused_a2a.py | 202 + .../moe/legacy_a2a_token_dispatcher.py | 631 +- megatron/core/transformer/moe/moe_layer.py | 298 +- megatron/core/transformer/moe/moe_utils.py | 1173 +- megatron/core/transformer/moe/router.py | 698 +- .../core/transformer/moe/token_dispatcher.py | 1495 +- .../transformer/multi_latent_attention.py | 801 +- .../core/transformer/transformer_block.py | 1282 +- .../core/transformer/transformer_config.py | 1604 +- .../core/transformer/transformer_layer.py | 899 +- megatron/core/transformer/utils.py | 383 +- megatron/core/utils.py | 2966 +- .../inference/text_generation/generation.py | 938 +- .../inference/text_generation/tokenization.py | 271 +- megatron/inference/text_generation_server.py | 465 +- megatron/legacy/model/language_model.py | 4 - megatron/legacy/model/rms_norm.py | 4 - megatron/legacy/model/transformer.py | 3700 +- megatron/legacy/model/utils.py | 4 +- megatron/training/arguments.py | 4642 +- megatron/training/async_utils.py | 108 +- megatron/training/checkpointing.py | 2806 +- megatron/training/ft_integration.py | 477 +- megatron/training/initialize.py | 982 +- megatron/training/one_logger_utils.py | 929 +- .../tokenizer/multimodal_tokenizer.py | 563 +- megatron/training/tokenizer/tokenizer.py | 1756 +- megatron/training/training.py | 4006 +- megatron/training/utils.py | 1006 +- megatron/training/wandb_utils.py | 63 + megatron/training/yaml_arguments.py | 916 +- mixtralnodes | 2 - pretrain_bert.py | 385 +- pretrain_gpt.py | 634 +- pretrain_mamba.py | 534 +- pretrain_retro.py | 489 +- pretrain_t5.py | 615 +- pretrain_vlm.py | 917 +- pytest.ini | 10 +- requirements.txt | 19 - requirements/pytorch_24.01/requirements.txt | 31 +- requirements/pytorch_24.07/requirements.txt | 30 +- requirements/pytorch_24.10/requirements.txt | 17 + run.sh | 16 - setup.py | 251 +- tasks/orqa/evaluate_utils.py | 351 +- tasks/orqa/unsupervised/qa_utils.py | 0 tasks/orqa/unsupervised/tokenizers.py | 0 tasks/vision/segmentation/metrics.py | 0 .../python_test_utils/common.py | 379 +- .../python_test_utils/conftest.py | 61 + .../get_test_results_from_tensorboard_logs.py | 101 +- .../python_test_utils/test_ci_pipeline.py | 96 - .../python_test_utils/test_fp8_ci_pipeline.py | 113 - .../test_regular_pipeline.py | 57 + .../test_resume_checkpoint_pipeline.py | 140 +- .../shell_test_utils/_run_training.sh | 212 +- .../shell_test_utils/run_ci_test.sh | 270 +- .../golden_values_dev.json | 53 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 4 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 46 + .../golden_values_dev.json | 71 +- .../golden_values_lts.json | 71 +- .../model_config.yaml | 4 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 4 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 4 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 4 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 4 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 4 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 4 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 6 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 54 +- .../model_config.yaml | 6 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 54 +- .../model_config.yaml | 6 +- .../golden_values_dev.json | 51 +- .../golden_values_lts.json | 51 +- .../model_config.yaml | 6 +- .../golden_values_dev.json | 51 +- .../golden_values_lts.json | 51 +- .../model_config.yaml | 6 +- .../bert_release/golden_values_0.10.0.json | 1 + .../bert_release/golden_values_0.9.0.json | 8064 +--- .../bert/bert_release/model_config.yaml | 94 +- .../common/ckpt_converter/__main__.py | 1260 +- .../model_config.yaml | 43 + .../golden_values_0.10.0.json | 1 + .../golden_values_0.9.0.json | 32050 +----------- .../gpt/gpt3_15b_8t_release/model_config.yaml | 174 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 56 + .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 57 + .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 54 +- .../model_config.yaml | 5 +- .../model_config.yaml | 7 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 54 +- .../model_config.yaml | 5 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 54 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../model_config.yaml | 56 + .../golden_values_dev.json | 1 + .../model_config.yaml | 57 + .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 56 + .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 57 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 54 +- .../model_config.yaml | 5 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 54 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 51 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 51 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../model_config.yaml | 5 +- .../model_config.yaml | 5 +- .../model_config.yaml | 5 +- .../model_config.yaml | 5 +- .../model_config.yaml | 5 +- .../model_config.yaml | 5 +- .../model_config.yaml | 5 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../model_config.yaml | 59 + .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 58 + .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 6 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 54 + .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 54 + .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 114 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 104 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 115 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 55 + .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 58 + .../model_config.yaml | 7 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 116 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 53 + .../golden_values_lts.json | 37 + .../model_config.yaml | 62 + .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 38 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 38 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 38 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 54 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../model_config.yaml | 52 + .../golden_values_dev.json | 1 + .../model_config.yaml | 54 + .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 54 + .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 142 + .../golden_values_lts.json | 142 + .../model_config.yaml | 54 + .../golden_values_dev.json | 142 + .../golden_values_lts.json | 142 + .../model_config.yaml | 63 + .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 51 + .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 613 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 7 +- .../golden_values_0.10.0.json | 1 + .../golden_values_0.9.0.json | 204 +- .../golden_values_0.10.0.json | 1 + .../golden_values_0.8.0.json | 15510 +----- .../golden_values_0.9.0.json | 21879 +-------- .../model_config.yaml | 192 +- .../model_config.yaml | 192 +- .../golden_values_0.10.0.json | 1 + .../golden_values_0.9.0.json | 276 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 54 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 56 + .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 84 +- .../golden_values_lts.json | 764 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 2 +- .../golden_values_lts.json | 2 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 764 +- .../golden_values_lts.json | 764 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 84 +- .../golden_values_lts.json | 84 +- .../model_config.yaml | 5 +- .../golden_values_dev.json | 1 + .../golden_values_lts.json | 1 + .../model_config.yaml | 5 +- .../golden_values_dev.json | 84 +- .../golden_values_lts.json | 84 +- .../model_config.yaml | 5 +- .../t5/t5_release/golden_values_0.10.0.json | 1 + .../t5/t5_release/golden_values_0.9.0.json | 40224 +--------------- .../t5/t5_release/model_config.yaml | 120 +- tests/test_utils/python_scripts/common.py | 33 +- .../download_coverage_results.py | 98 + .../python_scripts/download_golden_values.py | 94 + .../generate_jet_trigger_job.py | 333 +- .../python_scripts/generate_local_jobs.py | 22 +- .../python_scripts/launch_jet_workload.py | 654 +- tests/test_utils/recipes/bert.yaml | 163 +- tests/test_utils/recipes/gpt-modelopt.yaml | 18 +- tests/test_utils/recipes/gpt-nemo.yaml | 28 +- tests/test_utils/recipes/gpt.yaml | 893 +- .../test_utils/recipes/multimodal-llava.yaml | 66 +- tests/test_utils/recipes/t5.yaml | 169 +- tests/test_utils/recipes/unit-tests.yaml | 223 +- tests/unit_tests/data/test_builder.py | 685 +- .../unit_tests/dist_checkpointing/conftest.py | 49 +- .../models/test_moe_experts.py | 787 +- .../dist_checkpointing/test_async_save.py | 206 +- .../dist_checkpointing/test_fully_parallel.py | 783 +- .../test_global_metadata_reuse.py | 170 + .../dist_checkpointing/test_local.py | 548 +- .../dist_checkpointing/test_nonpersistent.py | 280 +- .../dist_checkpointing/test_optimizer.py | 1201 +- .../dist_checkpointing/test_replication.py | 163 + .../dist_checkpointing/test_serialization.py | 1703 +- ..._cached_metadata.py => test_torch_dist.py} | 214 +- tests/unit_tests/dist_checkpointing/utils.py | 516 +- .../distributed/test_finalize_model_grads.py | 62 + .../test_grad_sync_with_expert_parallel.py | 209 + .../distributed/test_param_and_grad_buffer.py | 23 +- .../inference/engines/test_mcore_engine.py | 339 +- .../gpt/test_gpt_inference_wrapper.py | 270 +- .../t5/test_t5_inference_wrapper.py | 258 +- .../inference/test_modelopt_gpt_model.py | 43 - .../inference/test_modelopt_module_spec.py | 207 + tests/unit_tests/inference/test_scheduler.py | 16 +- ...oder_decoder_text_generation_controller.py | 2 +- .../test_simple_text_generation_controller.py | 466 +- .../test_vlm_text_generation_controller.py | 160 + tests/unit_tests/models/test_gpt_model.py | 199 +- tests/unit_tests/models/test_llava_model.py | 1916 +- .../models/test_multimodal_projector.py | 150 +- tests/unit_tests/models/test_radio_model.py | 61 + tests/unit_tests/models/test_t5_model.py | 727 +- .../pipeline_parallel/test_schedules.py | 658 +- tests/unit_tests/test_model_configs.py | 37 + tests/unit_tests/test_optimizer.py | 275 +- .../test_optimizer_cpu_offloading.py | 141 + tests/unit_tests/test_parallel_state.py | 1041 +- tests/unit_tests/test_utils.py | 485 +- .../moe/test_a2a_token_dispatcher.py | 199 +- .../transformer/moe/test_aux_loss.py | 286 +- .../transformer/moe/test_moe_layer.py | 378 +- .../transformer/moe/test_routers.py | 380 +- .../transformer/moe/test_token_dispatcher.py | 633 +- .../unit_tests/transformer/test_attention.py | 250 +- .../test_multi_latent_attention.py | 383 +- .../transformer/test_relative_attention.py | 38 + .../transformer/test_retro_attention.py | 405 +- .../transformer/test_spec_customization.py | 482 +- .../transformer/test_transformer_block.py | 367 +- .../transformer/test_transformer_layer.py | 237 +- tools/autoformat.sh | 78 +- tools/checkpoint/convert.py | 327 +- .../{loader_mcore.py => loader_core.py} | 808 +- .../{loader_megatron.py => loader_legacy.py} | 752 +- tools/checkpoint/loader_llama_mistral.py | 1331 +- .../{saver_mcore.py => saver_core.py} | 1098 +- .../{saver_megatron.py => saver_legacy.py} | 830 +- .../{schema_mcore.py => schema_core.py} | 286 +- tools/copyright.sh | 0 tools/preprocess_mmdata.py | 338 +- tools/retro/preprocess_data.py | 591 +- tools/retro/sft/sft_retro_lm.sh | 0 tools/retro/text_generation/evaluate.py | 400 +- tools/retro/text_generation/metrics.py | 160 +- tools/retro/text_generation/retro_generate.sh | 250 +- .../text_generation/retro_text_generation.py | 526 +- tools/run_text_generation_server.py | 333 +- unit-test-job-lts.yaml | 107 - 826 files changed, 84637 insertions(+), 193346 deletions(-) delete mode 100644 GPT_pretraining.sh delete mode 100644 Llama_pretraining.sh create mode 100644 docs/source/api-guide/custom_fsdp.md create mode 100644 docs/source/api-guide/multi_latent_attention.rst create mode 100644 docs/source/api-guide/optimizer_cpu_offload.rst create mode 100644 docs/source/images/custom_fsdp/FSDP_Allreduce.png create mode 100644 docs/source/images/custom_fsdp/FSDP_workflow.png create mode 100644 docs/source/images/custom_fsdp/MCore_Custom_FSDP_Class_Diagram.png mode change 100644 => 100755 examples/academic_paper_scripts/detxoify_lm/finetune_gpt_distributed-1.3b.sh mode change 100644 => 100755 examples/academic_paper_scripts/detxoify_lm/generate-1.3b.sh mode change 100644 => 100755 examples/academic_paper_scripts/detxoify_lm/self_generation/selfgenerate-1.3b-unconditional.sh mode change 100644 => 100755 examples/academic_paper_scripts/msdp/data_processing.sh mode change 100644 => 100755 examples/academic_paper_scripts/msdp/eval_knwl_generation.sh mode change 100644 => 100755 examples/academic_paper_scripts/msdp/eval_resp_generation.sh mode change 100644 => 100755 examples/academic_paper_scripts/msdp/prep_resp_gen.sh mode change 100644 => 100755 examples/academic_paper_scripts/msdp/prompt_knwl_gen.sh mode change 100644 => 100755 examples/academic_paper_scripts/msdp/prompt_resp_gen.sh mode change 100644 => 100755 examples/academic_paper_scripts/sc21/CONFIG.sh mode change 100644 => 100755 examples/academic_paper_scripts/sc21/SBATCH.sh mode change 100644 => 100755 examples/academic_paper_scripts/sc21/SRUN.sh mode change 100644 => 100755 examples/academic_paper_scripts/sc21/run_figure_11.sh mode change 100644 => 100755 examples/academic_paper_scripts/sc21/run_figure_12.sh mode change 100644 => 100755 examples/academic_paper_scripts/sc21/run_figure_13.sh mode change 100644 => 100755 examples/academic_paper_scripts/sc21/run_figure_14.sh mode change 100644 => 100755 examples/academic_paper_scripts/sc21/run_figure_15.sh mode change 100644 => 100755 examples/academic_paper_scripts/sc21/run_figure_16.sh mode change 100644 => 100755 examples/academic_paper_scripts/sc21/run_figure_17.sh mode change 100644 => 100755 examples/academic_paper_scripts/sc21/run_figure_18.sh mode change 100644 => 100755 examples/academic_paper_scripts/sc21/run_table_1.sh mode change 100644 => 100755 examples/bert/train_bert_340m_distributed.sh mode change 100644 => 100755 examples/export/ptq_and_trtllm_export/ptq_trtllm_llama2_7b.sh mode change 100644 => 100755 examples/export/ptq_and_trtllm_export/ptq_trtllm_llama3_1_8b.sh mode change 100644 => 100755 examples/export/ptq_and_trtllm_export/ptq_trtllm_llama3_8b.sh mode change 100644 => 100755 examples/export/ptq_and_trtllm_export/ptq_trtllm_minitron_8b.sh mode change 100644 => 100755 examples/export/ptq_and_trtllm_export/ptq_trtllm_mistral_12b.sh mode change 100644 => 100755 examples/export/ptq_and_trtllm_export/ptq_trtllm_mixtral_8x7b.sh create mode 100644 examples/gpt3/hostfile_gpt_567B rename run_GPT-MOE_1nodes.sh => examples/gpt3/run_gpt_567B_1nodes.sh (52%) mode change 100644 => 100755 rename run_mixtral8x7B_2nodes.sh => examples/gpt3/run_gpt_567B_multinodes.sh (67%) mode change 100644 => 100755 mode change 100644 => 100755 examples/gpt3/train_gpt3_175b_distributed.sh rename train_GPT-MOE_567B_1nodes.sh => examples/gpt3/train_gpt_567B_1nodes.sh (88%) mode change 100644 => 100755 rename train_GPT-MOE_567B.sh => examples/gpt3/train_gpt_567B_multinodes.sh (87%) mode change 100644 => 100755 mode change 100644 => 100755 examples/inference/llama_mistral/run_text_generation_llama3.1.sh mode change 100644 => 100755 examples/inference/llama_mistral/run_text_generation_llama3.sh mode change 100644 => 100755 examples/inference/llama_mistral/run_text_generation_mistral.sh mode change 100644 => 100755 examples/inference/run_text_generation_server_345M.sh mode change 100644 => 100755 examples/inference/run_text_generation_server_345M_8_tensor_parallel.sh mode change 100644 => 100755 examples/mamba/run_text_gen_server_8b.sh mode change 100644 => 100755 examples/mamba/run_text_gen_server_8b_gpt3.sh mode change 100644 => 100755 examples/mamba/train.sh create mode 100644 examples/mixtral/hostfile_mixtral_8x7B rename run_mixtral8x7B_1nodes.sh => examples/mixtral/run_mixtral_8x7B_1nodes.sh (100%) mode change 100644 => 100755 rename run_GPT-MOE.sh => examples/mixtral/run_mixtral_8x7B_multinodes.sh (61%) mode change 100644 => 100755 rename train_mixtral_8x7B_1nodes.sh => examples/mixtral/train_mixtral_8x7B_1nodes.sh (87%) rename train_mixtral_8x7B_2nodes.sh => examples/mixtral/train_mixtral_8x7B_multinodes.sh (85%) mode change 100644 => 100755 delete mode 100644 examples/mixtral/train_mixtral_8x7b_distributed.sh create mode 100644 examples/multimodal/energon_util.py create mode 100644 examples/multimodal/evaluation/evaluate_infovqa.py create mode 100644 examples/multimodal/evaluation/evaluate_spdocvqa.py create mode 100644 examples/multimodal/model_converter/radio_converter.py mode change 100644 => 100755 examples/multimodal/nvlm/pretrain_qwen20_72b_internvit_6b.sh mode change 100644 => 100755 examples/multimodal/nvlm/pretrain_yi_34b_internvit_6b.sh mode change 100644 => 100755 examples/multimodal/nvlm/run_text_generation_qwen20_72b_internvit_6b.sh create mode 100755 examples/multimodal/nvlm/run_text_generation_qwen25_7b_internvit_video.sh mode change 100644 => 100755 examples/multimodal/nvlm/run_text_generation_qwen25_7b_siglip.sh mode change 100644 => 100755 examples/multimodal/nvlm/run_text_generation_yi_34b_internvit_6b.sh mode change 100644 => 100755 examples/multimodal/nvlm/sft_34b_internvit.sh mode change 100644 => 100755 examples/multimodal/nvlm/sft_qwen20_72b_internvit_6b.sh create mode 100755 examples/multimodal/nvlm/sft_qwen2p5_7b_internvit_6b_video.sh mode change 100644 => 100755 examples/multimodal/pretrain_mistral_clip.sh mode change 100644 => 100755 examples/multimodal/sft_mistral_clip.sh mode change 100644 => 100755 examples/multimodal/text_generation_mistral_clip.sh mode change 100644 => 100755 examples/retro/preprocess_data.sh mode change 100644 => 100755 examples/retro/train_retro_2b_distributed.sh mode change 100644 => 100755 examples/t5/train_t5_220m_distributed.sh delete mode 100644 gptnodes delete mode 100644 megatron/core/dist_checkpointing/state_dict_transformation.py create mode 100644 megatron/core/dist_checkpointing/state_dict_utils.py create mode 100644 megatron/core/dist_checkpointing/strategies/cached_metadata_filesystem_reader.py create mode 100644 megatron/core/dist_checkpointing/tensor_aware_state_dict.py create mode 100644 megatron/core/distributed/custom_fsdp/__init__.py create mode 100644 megatron/core/distributed/custom_fsdp/fully_sharded_data_parallel.py create mode 100644 megatron/core/distributed/custom_fsdp/param_and_grad_buffer.py delete mode 100644 megatron/core/inference/ammo_support/__init__.py delete mode 100644 megatron/core/inference/ammo_support/gpt/model_specs.py delete mode 100644 megatron/core/inference/ammo_support/gpt/state_dict_hooks.py create mode 100644 megatron/core/inference/async_stream.py create mode 100644 megatron/core/inference/model_inference_wrappers/multimodal/vlm_inference_wrapper.py create mode 100644 megatron/core/inference/modelopt_support/mamba/__init__.py create mode 100644 megatron/core/inference/modelopt_support/mamba/model_specs.py create mode 100644 megatron/core/inference/text_generation_controllers/vlm_text_generation_controller.py create mode 100644 megatron/core/models/common/embeddings/relative_pos_embedding.py create mode 100644 megatron/core/models/huggingface/__init__.py create mode 100644 megatron/core/models/huggingface/clip_model.py create mode 100644 megatron/core/models/huggingface/module.py create mode 100644 megatron/core/models/huggingface/qwen_model.py create mode 100644 megatron/core/models/multimodal/context_parallel.py create mode 100644 megatron/core/models/vision/radio.py create mode 100644 megatron/core/optimizer/cpu_offloading/README.md create mode 100644 megatron/core/optimizer/cpu_offloading/__init__.py create mode 100644 megatron/core/optimizer/cpu_offloading/hybrid_optimizer.py create mode 100644 megatron/core/transformer/moe/fused_a2a.py create mode 100644 megatron/training/wandb_utils.py delete mode 100644 mixtralnodes delete mode 100644 requirements.txt create mode 100644 requirements/pytorch_24.10/requirements.txt delete mode 100644 run.sh mode change 100644 => 100755 tasks/orqa/unsupervised/qa_utils.py mode change 100644 => 100755 tasks/orqa/unsupervised/tokenizers.py mode change 100644 => 100755 tasks/vision/segmentation/metrics.py create mode 100644 tests/functional_tests/python_test_utils/conftest.py delete mode 100644 tests/functional_tests/python_test_utils/test_ci_pipeline.py delete mode 100644 tests/functional_tests/python_test_utils/test_fp8_ci_pipeline.py create mode 100644 tests/functional_tests/python_test_utils/test_regular_pipeline.py mode change 100644 => 100755 tests/functional_tests/shell_test_utils/_run_training.sh mode change 100644 => 100755 tests/functional_tests/shell_test_utils/run_ci_test.sh create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/bert/bert_release/golden_values_0.10.0.json create mode 100644 tests/functional_tests/test_cases/gpt-nemo/gpt3-nemo_126m_mr_mbs1_gbs8_mcore_te_8experts_tp2_ep2_pp1_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.10.0.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic_dp_last/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic_dp_last/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic_dp_last/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel_dp_last/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel_dp_last/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel_dp_last/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel_dp_last/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel_dp_last/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp1_pp4_memory_speed/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp1_pp4_memory_speed/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp4_pp1_memory_speed/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp4_pp1_memory_speed/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp4_pp1_memory_speed/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_persistent_ckpt_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_persistent_ckpt_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_persistent_ckpt_disable_bias_linear_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_persistent_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_persistent_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_persistent_disable_bias_linear_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp2_account_for_embedding_loss_in_pipeline_split_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp2_account_for_embedding_loss_in_pipeline_split_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp2_account_for_embedding_loss_in_pipeline_split_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_ddp_average_in_collective_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_dp_last_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_dp_last_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_nondeterministic_dp_last_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_nondeterministic_dp_last_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_nondeterministic_dp_last_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/golden_values_0.10.0.json create mode 100644 tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.10.0.json create mode 100644 tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/golden_values_0.10.0.json create mode 100644 tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G/model_config.yaml create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/golden_values_dev.json create mode 100644 tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/golden_values_lts.json create mode 100644 tests/functional_tests/test_cases/t5/t5_release/golden_values_0.10.0.json create mode 100644 tests/test_utils/python_scripts/download_coverage_results.py create mode 100644 tests/test_utils/python_scripts/download_golden_values.py create mode 100644 tests/unit_tests/dist_checkpointing/test_global_metadata_reuse.py create mode 100644 tests/unit_tests/dist_checkpointing/test_replication.py rename tests/unit_tests/dist_checkpointing/{test_cached_metadata.py => test_torch_dist.py} (72%) create mode 100644 tests/unit_tests/distributed/test_finalize_model_grads.py create mode 100644 tests/unit_tests/distributed/test_grad_sync_with_expert_parallel.py delete mode 100644 tests/unit_tests/inference/test_modelopt_gpt_model.py create mode 100644 tests/unit_tests/inference/test_modelopt_module_spec.py create mode 100644 tests/unit_tests/inference/text_generation_controllers/test_vlm_text_generation_controller.py create mode 100644 tests/unit_tests/models/test_radio_model.py create mode 100644 tests/unit_tests/test_model_configs.py create mode 100644 tests/unit_tests/test_optimizer_cpu_offloading.py create mode 100644 tests/unit_tests/transformer/test_relative_attention.py mode change 100644 => 100755 tools/autoformat.sh rename tools/checkpoint/{loader_mcore.py => loader_core.py} (97%) rename tools/checkpoint/{loader_megatron.py => loader_legacy.py} (97%) rename tools/checkpoint/{saver_mcore.py => saver_core.py} (97%) rename tools/checkpoint/{saver_megatron.py => saver_legacy.py} (97%) rename tools/checkpoint/{schema_mcore.py => schema_core.py} (89%) mode change 100644 => 100755 tools/copyright.sh mode change 100644 => 100755 tools/retro/sft/sft_retro_lm.sh mode change 100644 => 100755 tools/retro/text_generation/retro_generate.sh delete mode 100644 unit-test-job-lts.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index 7960574..1b59cd8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,122 +1,132 @@ -# Changelog - -## NVIDIA Megatron Core 0.9.0 - -- Uneven pipeline parallelism - - Enable pipeline parallelism where first and last ranks have fewer transformer layers than the intermediate ranks -- Per layer CUDAGraph support for GPT training with Transformer Engine modules -- Enable different TP sizes for the vision encoder -- Enable pipeline parallelism for T5 & Llava models -- Support multi-tile multi-image input in Llava models -- MoE - - FP8 support - - Runtime upcycling support - - Dispatcher implementation optimizations - - Shared expert support with overlapping optimizations - - Qwen Model support -- Known Issues - - When using sequence parallel, during the transformer block forward pass, dropout is not using the appropriate rng context. - - -## NVIDIA Megatron Core 0.8.0 - -- Multimodal - - Added initial support for training vision language models using the LLaVA architecture - - Added initial support for inference with multimodal inputs - - End-to-end multimodal example from data collection to training to evaluation is provided in examples/multimodal -- MoE - - Context Parallel support. - - Distributed checkpoint support for grouped GEMM. -- Mamba - -## NVIDIA Megatron Core 0.7.0 - -- MoE - - Token drop support - - Several efficiency optimizations - - Improved model parallelism - - Memory optimizations -- Distributed checkpointing - - Enabled for Retro - - Asynchronous checkpoint saving -- Several minor bug fixes, speed improvements, and memory optimizations - -## NVIDIA Megatron Core 0.6.0 - -- MoE (Mixture of Experts) - - Performance optimization - - Communication optimization for multi GPU and Single GPU - - 23% improvement (323 TFLOPS/GPU) over MCore 0.5.0 on Mixtral with Hopper BF16 - - GroupedMLP enhancement for Hopper - - DP Overlapping. Support overlapping computation with gradient reduction and parameter gathering. - - All-to-All based Token Dispatcher - - Layer-wise logging for load balancing loss. - - Improved expert parallel support including distributed optimizer. -- Distributed optimizer -- RETRO - - Data processing -- BERT - - Distributed checkpointing -- Dist checkpointing - - PyTorch native distributed backend - - Improved saving/loading speed -- TensorRT-LLM Export - - Integration with TensorRT Model Optimizer Post-training quantization (PTQ) - - Text generation driver to perform PTQ in Megatron-LM - - Llama2 and Nemotron3-8b examples to use TensorRT-LLM unified build API to build engine after training. -- Several minor enhancements, bug fixes, and documentation updates - -## NVIDIA Megatron Core 0.5.0 - -### Key Features and Enhancements - -Megatron core documentation is now [live!](https://docs.nvidia.com/megatron-core/developer-guide/latest/user-guide/index.html#quick-start) - -### Model Features - -- MoE (Mixture of Experts) - - Support for Z-loss, Load balancing and Sinkhorn - - Layer and communications refactor - - Richer parallelism mappings and EP can be combined with other model parallel techniques for larger MoE variants, e.g. EP + TP + DP + SP + PP - - Token dropless architecture with Top-K routing - - Performance optimization with with GroupedGEMM when number of local experts is > 1 - - Distributed checkpointing -- Interleaved rotary embedding - -### Datasets - -- Masked WordPiece datasets for BERT and T5 -- Raw and mock datasets - -### Parallelism - -### Performance - -- Activation offloading to CPU -- Rope and Swiglu fusion -- Sliding window attention (via Transformer Engine) - -### General Improvements - -- Timers - -## NVIDIA Megatron Core 0.4.0 - -### Key Features and Enhancements - -#### Models - -- BERT -- RETRO -- T5 - -#### Parallelism - -- Mixture of Experts support for GPT -- Model parallel efficient Distributed Data Parallel (DDP) -- Context Parallel (2D Tensor Parallel) support - -#### Datasets - -- GPT Dataset -- Blended Dataset +# Changelog + +## NVIDIA Megatron Core 0.10.0 + +- Adding MLA to MCore +- Enable FP8 for GroupedMLP +- MoE Parallel Folding +- Enhance MoE Architecture: Support MoE Layer Frequency Patterns and Configurable MoE FFN Hidden Size +- Multimodal: NVLM training and evaluation support in MCore +- Mamba Hybrid + - Increase performance and reduce memory footprint of Triton language/compiler distributed caching + - Add more unit testing and fix bugs + +## NVIDIA Megatron Core 0.9.0 + +- Uneven pipeline parallelism + - Enable pipeline parallelism where first and last ranks have fewer transformer layers than the intermediate ranks +- Per layer CUDAGraph support for GPT training with Transformer Engine modules +- Enable different TP sizes for the vision encoder +- Enable pipeline parallelism for T5 & Llava models +- Support multi-tile multi-image input in Llava models +- MoE + - FP8 support + - Runtime upcycling support + - Dispatcher implementation optimizations + - Shared expert support with overlapping optimizations + - Qwen Model support +- Known Issues + - When using sequence parallel, during the transformer block forward pass, dropout is not using the appropriate rng context. + +## NVIDIA Megatron Core 0.8.0 + +- Multimodal + - Added initial support for training vision language models using the LLaVA architecture + - Added initial support for inference with multimodal inputs + - End-to-end multimodal example from data collection to training to evaluation is provided in examples/multimodal +- MoE + - Context Parallel support. + - Distributed checkpoint support for grouped GEMM. +- Mamba + +## NVIDIA Megatron Core 0.7.0 + +- MoE + - Token drop support + - Several efficiency optimizations + - Improved model parallelism + - Memory optimizations +- Distributed checkpointing + - Enabled for Retro + - Asynchronous checkpoint saving +- Several minor bug fixes, speed improvements, and memory optimizations + +## NVIDIA Megatron Core 0.6.0 + +- MoE (Mixture of Experts) + - Performance optimization + - Communication optimization for multi GPU and Single GPU + - 23% improvement (323 TFLOPS/GPU) over MCore 0.5.0 on Mixtral with Hopper BF16 + - GroupedMLP enhancement for Hopper + - DP Overlapping. Support overlapping computation with gradient reduction and parameter gathering. + - All-to-All based Token Dispatcher + - Layer-wise logging for load balancing loss. + - Improved expert parallel support including distributed optimizer. +- Distributed optimizer +- RETRO + - Data processing +- BERT + - Distributed checkpointing +- Dist checkpointing + - PyTorch native distributed backend + - Improved saving/loading speed +- TensorRT-LLM Export + - Integration with TensorRT Model Optimizer Post-training quantization (PTQ) + - Text generation driver to perform PTQ in Megatron-LM + - Llama2 and Nemotron3-8b examples to use TensorRT-LLM unified build API to build engine after training. +- Several minor enhancements, bug fixes, and documentation updates + +## NVIDIA Megatron Core 0.5.0 + +### Key Features and Enhancements + +Megatron core documentation is now [live!](https://docs.nvidia.com/megatron-core/developer-guide/latest/user-guide/index.html#quick-start) + +### Model Features + +- MoE (Mixture of Experts) + - Support for Z-loss, Load balancing and Sinkhorn + - Layer and communications refactor + - Richer parallelism mappings and EP can be combined with other model parallel techniques for larger MoE variants, e.g. EP + TP + DP + SP + PP + - Token dropless architecture with Top-K routing + - Performance optimization with with GroupedGEMM when number of local experts is > 1 + - Distributed checkpointing +- Interleaved rotary embedding + +### Datasets + +- Masked WordPiece datasets for BERT and T5 +- Raw and mock datasets + +### Parallelism + +### Performance + +- Activation offloading to CPU +- Rope and Swiglu fusion +- Sliding window attention (via Transformer Engine) + +### General Improvements + +- Timers + +## NVIDIA Megatron Core 0.4.0 + +### Key Features and Enhancements + +#### Models + +- BERT +- RETRO +- T5 + +#### Parallelism + +- Mixture of Experts support for GPT +- Model parallel efficient Distributed Data Parallel (DDP) +- Context Parallel (2D Tensor Parallel) support + +#### Datasets + +- GPT Dataset +- Blended Dataset diff --git a/CODEOWNERS b/CODEOWNERS index e89c62b..5a01a3c 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1,49 +1,49 @@ -[Core-ADLR] @mcore-reviewers/core-adlr -megatron/core/ - -[Core-NeMo] @mcore-reviewers/core-nemo -megatron/core/ - -^[Core-MLPerf] @mcore-reviewers/mlperf -megatron/core/ - -[MoE-ADLR] @mcore-reviewers/moe-adlr -megatron/core/transformer/moe/ - -[MoE-Moe] @mcore-reviewers/moe-moe -megatron/core/transformer/moe/ - -[Datasets] @mcore-reviewers/datasets -megatron/core/datasets/ - -[BERT] @mcore-reviewers/bert -megatron/core/models/bert/ - -[GPT] @mcore-reviewers/gpt -megatron/core/models/gpt/ - -[Retro] @mcore-reviewers/retro -megatron/core/models/retro/ - -[Distributed Checkpointing] @mcore-reviewers/dist-checkpointing -megatron/core/dist_checkpointing/ - -[Distributed Optimizer] @mcore-reviewers/dist-optimizer -megatron/core/optimizer/distrib_optimizer/ - -[Inference] @mcore-reviewers/inference -megatron/core/inference/ - -^[Quantization and Inference (QAT)] @mcore-reviewers/quantization-and-inference -megatron/core/inference/ - -; [Context Parallelism] @mcore-reviewers/context-parallelism -; - -[CI] @mcore-reviewers/ci -.gitlab/ -.github/ -.gitlab-ci.yml -Dockerfile.ci.lts -Dockerfile.ci.dev -tests/ +[Core-ADLR] @mcore-reviewers/core-adlr +megatron/core/ + +[Core-NeMo] @mcore-reviewers/core-nemo +megatron/core/ + +^[Core-MLPerf] @mcore-reviewers/mlperf +megatron/core/ + +[MoE-ADLR] @mcore-reviewers/moe-adlr +megatron/core/transformer/moe/ + +[MoE-Moe] @mcore-reviewers/moe-moe +megatron/core/transformer/moe/ + +[Datasets] @mcore-reviewers/datasets +megatron/core/datasets/ + +[BERT] @mcore-reviewers/bert +megatron/core/models/bert/ + +[GPT] @mcore-reviewers/gpt +megatron/core/models/gpt/ + +[Retro] @mcore-reviewers/retro +megatron/core/models/retro/ + +[Distributed Checkpointing] @mcore-reviewers/dist-checkpointing +megatron/core/dist_checkpointing/ + +[Distributed Optimizer] @mcore-reviewers/dist-optimizer +megatron/core/optimizer/distrib_optimizer/ + +[Inference] @mcore-reviewers/inference +megatron/core/inference/ + +^[Quantization and Inference (QAT)] @mcore-reviewers/quantization-and-inference +megatron/core/inference/ + +; [Context Parallelism] @mcore-reviewers/context-parallelism +; + +[CI][2] @mcore-reviewers/ci +.gitlab/ +.github/ +.gitlab-ci.yml +Dockerfile.ci.lts +Dockerfile.ci.dev +tests/ diff --git a/Dockerfile.ci.dev b/Dockerfile.ci.dev index c631282..074d203 100644 --- a/Dockerfile.ci.dev +++ b/Dockerfile.ci.dev @@ -1,76 +1,84 @@ -# syntax=docker/dockerfile:1.3-labs - -ARG FROM_IMAGE_NAME -FROM $FROM_IMAGE_NAME as build_causal_conv1d -WORKDIR /opt -RUN CAUSAL_CONV1D_FORCE_BUILD=TRUE pip3 wheel -v git+https://github.com/Dao-AILab/causal-conv1d.git@v1.2.2.post1 - -FROM $FROM_IMAGE_NAME as build_grouped_gemm -WORKDIR /opt -RUN pip3 wheel -v git+https://github.com/fanshiqing/grouped_gemm@v1.1.2 - -FROM $FROM_IMAGE_NAME as build_mamba_ssm -WORKDIR /opt -RUN MAMBA_FORCE_BUILD=TRUE pip3 wheel -v git+https://github.com/state-spaces/mamba.git@v2.2.0 - -FROM $FROM_IMAGE_NAME as main -ENV DEBIAN_FRONTEND=noninteractive - -RUN apt-get update && \ - apt-get install -y --no-install-recommends gettext python3-venv && \ - apt-get clean && \ - python -m venv /opt/jet && \ - wget https://github.com/mikefarah/yq/releases/download/v4.44.1/yq_linux_amd64 -O /usr/local/bin/yq && \ - chmod a+x /usr/local/bin/yq - -COPY --from=build_causal_conv1d /opt/causal_conv1d-*.whl ./ -COPY --from=build_grouped_gemm /opt/grouped_gemm-*.whl ./ -COPY --from=build_mamba_ssm /opt/mamba_ssm-*.whl ./ - -RUN \ - --mount=type=bind,source=requirements,target=requirements \ - --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ - --mount=type=bind,source=setup.py,target=setup.py \ - --mount=type=bind,source=megatron/core/package_info.py,target=megatron/core/package_info.py \ - --mount=type=bind,source=megatron/core/README.md,target=megatron/core/README.md \ - --mount=type=bind,source=megatron/core/__init__.py,target=megatron/core/__init__.py <<"EOF" bash -ex - -pip install causal_conv1d-*.whl mamba_ssm-*.whl grouped_gemm-*.whl -PY_ENV=pytorch:24.07 pip install . -EOF - -# Since megatron does not have any dependencies (and isn't a dependency to any other package), we can install it separately to make everything a bit quicker -ARG MCORE_REPO -ARG MCORE_REF -ARG MCORE_BACKWARDS_REF -RUN <<"EOF" bash -exu -# Checkout latest -cd /opt -rm -rf /opt/megatron-lm; mkdir megatron-lm; cd megatron-lm -git init -git remote add origin ${MCORE_REPO} -git fetch origin '+refs/merge-requests/*:refs/remotes/merge-requests/*' -git fetch origin $MCORE_REF -git checkout $MCORE_REF - -# Checkout backwards-ref -cd /opt -rm -rf /opt/megatron-lm-legacy; mkdir megatron-lm-legacy; cd megatron-lm-legacy -git init -git remote add origin ${MCORE_REPO} -git fetch origin $MCORE_BACKWARDS_REF -git checkout $MCORE_BACKWARDS_REF -rm -rf megatron; cp -a /opt/megatron-lm/megatron ./ -EOF - -RUN PY_ENV=pytorch:24.07 pip install -e /opt/megatron-lm -ENV PYTHONPATH="/opt/megatron-lm:$PYTHONPATH" - -##### For NVIDIANS only ##### -FROM main as jet -ARG CACHEBUST=0 -RUN --mount=type=secret,id=JET_INDEX_URLS \ - JET_INDEX_URLS=$(cat /run/secrets/JET_INDEX_URLS) && \ - pip install jet-client jet-api --upgrade $JET_INDEX_URLS -ENV PATH="$PATH:/opt/jet/bin" +# syntax=docker/dockerfile:1.3-labs + +ARG FROM_IMAGE_NAME +FROM $FROM_IMAGE_NAME as build_causal_conv1d +WORKDIR /opt +RUN CAUSAL_CONV1D_FORCE_BUILD=TRUE pip3 wheel -v git+https://github.com/Dao-AILab/causal-conv1d.git@v1.2.2.post1 + +FROM $FROM_IMAGE_NAME as build_grouped_gemm +WORKDIR /opt +RUN pip3 wheel -v git+https://github.com/fanshiqing/grouped_gemm@v1.1.2 + +FROM $FROM_IMAGE_NAME as build_mamba_ssm +WORKDIR /opt +RUN git clone https://github.com/state-spaces/mamba.git && \ + cd mamba && \ + git checkout v2.2.0 && \ + sed -i "/triton/d" setup.py && \ + MAMBA_FORCE_BUILD=TRUE pip3 wheel -v . + +FROM $FROM_IMAGE_NAME as main +ENV DEBIAN_FRONTEND=noninteractive + +RUN apt-get update && \ + apt-get install -y --no-install-recommends gettext python3-venv && \ + apt-get clean && \ + python -m venv /opt/jet && \ + wget https://github.com/mikefarah/yq/releases/download/v4.44.1/yq_linux_amd64 -O /usr/local/bin/yq && \ + chmod a+x /usr/local/bin/yq + +COPY --from=build_causal_conv1d /opt/causal_conv1d-*.whl ./ +COPY --from=build_grouped_gemm /opt/grouped_gemm-*.whl ./ +COPY --from=build_mamba_ssm /opt/mamba/mamba_ssm-*.whl ./ + +RUN \ + --mount=type=bind,source=requirements,target=requirements \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + --mount=type=bind,source=setup.py,target=setup.py \ + --mount=type=bind,source=megatron/core/package_info.py,target=megatron/core/package_info.py \ + --mount=type=bind,source=megatron/core/README.md,target=megatron/core/README.md \ + --mount=type=bind,source=megatron/core/requirements.txt,target=megatron/core/requirements.txt \ + --mount=type=bind,source=megatron/core/__init__.py,target=megatron/core/__init__.py <<"EOF" bash -ex + +pip install causal_conv1d-*.whl mamba_ssm-*.whl grouped_gemm-*.whl +PY_ENV=pytorch_24.10 pip install . +EOF + +# Since megatron does not have any dependencies (and isn't a dependency to any other package), we can install it separately to make everything a bit quicker +ARG MCORE_REPO +ARG MCORE_REF +ARG MCORE_BACKWARDS_REF +RUN <<"EOF" bash -exu +# Checkout latest +cd /opt +rm -rf /opt/megatron-lm; mkdir megatron-lm; cd megatron-lm +git init +git remote add origin ${MCORE_REPO} +git fetch origin '+refs/merge-requests/*:refs/remotes/merge-requests/*' +git fetch origin $MCORE_REF +git checkout $MCORE_REF + +# Checkout backwards-ref +cd /opt +rm -rf /opt/megatron-lm-legacy; mkdir megatron-lm-legacy; cd megatron-lm-legacy +git init +git remote add origin ${MCORE_REPO} +git fetch origin $MCORE_BACKWARDS_REF +git checkout $MCORE_BACKWARDS_REF +rm -rf megatron; cp -a /opt/megatron-lm/megatron ./ +EOF + +RUN PY_ENV=pytorch_24.10 pip install -e /opt/megatron-lm +ENV PYTHONPATH="/opt/megatron-lm:$PYTHONPATH" + +##### For NVIDIANS only ##### +FROM main as jet +ARG CACHEBUST=0 +RUN --mount=type=secret,id=JET_INDEX_URLS \ + --mount=type=secret,id=LOGGER_INDEX_URL \ + LOGGER_INDEX_URL=$(cat /run/secrets/LOGGER_INDEX_URL) && \ + JET_INDEX_URLS=$(cat /run/secrets/JET_INDEX_URLS) && \ + pip install "jet-client~=2.0" jet-api --upgrade $JET_INDEX_URLS && \ + pip install "one-logger" --upgrade $LOGGER_INDEX_URL +ENV PATH="$PATH:/opt/jet/bin" ### \ No newline at end of file diff --git a/Dockerfile.ci.lts b/Dockerfile.ci.lts index ea0cf31..a3d15e8 100644 --- a/Dockerfile.ci.lts +++ b/Dockerfile.ci.lts @@ -1,77 +1,81 @@ -# syntax=docker/dockerfile:1.3-labs - -ARG FROM_IMAGE_NAME -FROM $FROM_IMAGE_NAME as build_causal_conv1d -WORKDIR /opt -RUN CAUSAL_CONV1D_FORCE_BUILD=TRUE pip3 wheel -v git+https://github.com/Dao-AILab/causal-conv1d.git@v1.2.2.post1 - -FROM $FROM_IMAGE_NAME as build_grouped_gemm -WORKDIR /opt -RUN pip3 wheel -v git+https://github.com/fanshiqing/grouped_gemm@v1.1.2 - -FROM $FROM_IMAGE_NAME as build_mamba_ssm -WORKDIR /opt -RUN MAMBA_FORCE_BUILD=TRUE pip3 wheel -v git+https://github.com/state-spaces/mamba.git@v2.0.3 - -ARG FROM_IMAGE_NAME -FROM $FROM_IMAGE_NAME as main -ENV DEBIAN_FRONTEND=noninteractive - -RUN apt-get update && \ - apt-get install -y --no-install-recommends gettext python3-venv && \ - apt-get clean && \ - python -m venv /opt/jet && \ - wget https://github.com/mikefarah/yq/releases/download/v4.44.1/yq_linux_amd64 -O /usr/local/bin/yq && \ - chmod a+x /usr/local/bin/yq - -COPY --from=build_causal_conv1d /opt/causal_conv1d-*.whl ./ -COPY --from=build_grouped_gemm /opt/grouped_gemm-*.whl ./ -COPY --from=build_mamba_ssm /opt/mamba_ssm-*.whl ./ - -RUN \ - --mount=type=bind,source=requirements,target=requirements \ - --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ - --mount=type=bind,source=setup.py,target=setup.py \ - --mount=type=bind,source=megatron/core/package_info.py,target=megatron/core/package_info.py \ - --mount=type=bind,source=megatron/core/README.md,target=megatron/core/README.md \ - --mount=type=bind,source=megatron/core/__init__.py,target=megatron/core/__init__.py <<"EOF" bash -ex - -pip install causal_conv1d-*.whl mamba_ssm-*.whl grouped_gemm-*.whl -PY_ENV=pytorch:24.07 pip install . -EOF - -# Since megatron does not have any dependencies (and isn't a dependency to any other package), we can install it separately to make everything a bit quicker -ARG MCORE_REPO -ARG MCORE_REF -ARG MCORE_BACKWARDS_REF -RUN <<"EOF" bash -exu -# Checkout latest -cd /opt -rm -rf /opt/megatron-lm; mkdir megatron-lm; cd megatron-lm -git init -git remote add origin ${MCORE_REPO} -git fetch origin '+refs/merge-requests/*:refs/remotes/merge-requests/*' -git fetch origin $MCORE_REF -git checkout $MCORE_REF - -# Checkout backwards-ref -cd /opt -rm -rf /opt/megatron-lm-legacy; mkdir megatron-lm-legacy; cd megatron-lm-legacy -git init -git remote add origin ${MCORE_REPO} -git fetch origin $MCORE_BACKWARDS_REF -git checkout $MCORE_BACKWARDS_REF -rm -rf megatron; cp -a /opt/megatron-lm/megatron ./ -EOF - -RUN PY_ENV=pytorch:24.01 pip install -e /opt/megatron-lm -ENV PYTHONPATH="/opt/megatron-lm:$PYTHONPATH" - -##### For NVIDIANS only ##### -FROM main as jet -ARG CACHEBUST=0 -RUN --mount=type=secret,id=JET_INDEX_URLS \ - JET_INDEX_URLS=$(cat /run/secrets/JET_INDEX_URLS) && \ - pip install jet-api jet-client --upgrade $JET_INDEX_URLS -ENV PATH="$PATH:/opt/jet/bin" +# syntax=docker/dockerfile:1.3-labs + +ARG FROM_IMAGE_NAME +FROM $FROM_IMAGE_NAME as build_causal_conv1d +WORKDIR /opt +RUN CAUSAL_CONV1D_FORCE_BUILD=TRUE pip3 wheel -v git+https://github.com/Dao-AILab/causal-conv1d.git@v1.2.2.post1 + +FROM $FROM_IMAGE_NAME as build_grouped_gemm +WORKDIR /opt +RUN pip3 wheel -v git+https://github.com/fanshiqing/grouped_gemm@v1.1.2 + +FROM $FROM_IMAGE_NAME as build_mamba_ssm +WORKDIR /opt +RUN MAMBA_FORCE_BUILD=TRUE pip3 wheel -v git+https://github.com/state-spaces/mamba.git@v2.0.3 + +ARG FROM_IMAGE_NAME +FROM $FROM_IMAGE_NAME as main +ENV DEBIAN_FRONTEND=noninteractive + +RUN apt-get update && \ + apt-get install -y --no-install-recommends gettext python3-venv && \ + apt-get clean && \ + python -m venv /opt/jet && \ + wget https://github.com/mikefarah/yq/releases/download/v4.44.1/yq_linux_amd64 -O /usr/local/bin/yq && \ + chmod a+x /usr/local/bin/yq + +COPY --from=build_causal_conv1d /opt/causal_conv1d-*.whl ./ +COPY --from=build_grouped_gemm /opt/grouped_gemm-*.whl ./ +COPY --from=build_mamba_ssm /opt/mamba_ssm-*.whl ./ + +RUN \ + --mount=type=bind,source=requirements,target=requirements \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + --mount=type=bind,source=setup.py,target=setup.py \ + --mount=type=bind,source=megatron/core/package_info.py,target=megatron/core/package_info.py \ + --mount=type=bind,source=megatron/core/README.md,target=megatron/core/README.md \ + --mount=type=bind,source=megatron/core/requirements.txt,target=megatron/core/requirements.txt \ + --mount=type=bind,source=megatron/core/__init__.py,target=megatron/core/__init__.py <<"EOF" bash -ex + +pip install causal_conv1d-*.whl mamba_ssm-*.whl grouped_gemm-*.whl +PY_ENV=pytorch_24.01 pip install . +EOF + +# Since megatron does not have any dependencies (and isn't a dependency to any other package), we can install it separately to make everything a bit quicker +ARG MCORE_REPO +ARG MCORE_REF +ARG MCORE_BACKWARDS_REF +RUN <<"EOF" bash -exu +# Checkout latest +cd /opt +rm -rf /opt/megatron-lm; mkdir megatron-lm; cd megatron-lm +git init +git remote add origin ${MCORE_REPO} +git fetch origin '+refs/merge-requests/*:refs/remotes/merge-requests/*' +git fetch origin $MCORE_REF +git checkout $MCORE_REF + +# Checkout backwards-ref +cd /opt +rm -rf /opt/megatron-lm-legacy; mkdir megatron-lm-legacy; cd megatron-lm-legacy +git init +git remote add origin ${MCORE_REPO} +git fetch origin $MCORE_BACKWARDS_REF +git checkout $MCORE_BACKWARDS_REF +rm -rf megatron; cp -a /opt/megatron-lm/megatron ./ +EOF + +RUN PY_ENV=pytorch_24.01 pip install -e /opt/megatron-lm +ENV PYTHONPATH="/opt/megatron-lm:$PYTHONPATH" + +##### For NVIDIANS only ##### +FROM main as jet +ARG CACHEBUST=0 +RUN --mount=type=secret,id=JET_INDEX_URLS \ + --mount=type=secret,id=LOGGER_INDEX_URL \ + LOGGER_INDEX_URL=$(cat /run/secrets/LOGGER_INDEX_URL) && \ + JET_INDEX_URLS=$(cat /run/secrets/JET_INDEX_URLS) && \ + pip install "jet-client~=2.0" jet-api --upgrade $JET_INDEX_URLS && \ + pip install "one-logger" --upgrade $LOGGER_INDEX_URL +ENV PATH="$PATH:/opt/jet/bin" ### \ No newline at end of file diff --git a/Dockerfile.linting b/Dockerfile.linting index ff1a28c..608e258 100644 --- a/Dockerfile.linting +++ b/Dockerfile.linting @@ -1,33 +1,34 @@ -# syntax=docker/dockerfile:experimental - -ARG FROM_IMAGE_NAME -FROM $FROM_IMAGE_NAME as main -ENV DEBIAN_FRONTEND=noninteractive - -RUN sed -i -e 's/^APT/# APT/' -e 's/^DPkg/# DPkg/' \ - /etc/apt/apt.conf.d/docker-clean - -RUN apt-get update && \ - apt-get install -y python3-venv && \ - apt-get clean && \ - python -m venv /opt/jet - -RUN pip3 install --no-cache-dir \ - black==24.4.2 \ - isort==5.13.2 \ - flake8==7.1.0 \ - pylint==3.2.6 \ - mypy - -COPY . /opt/megatron-lm - -WORKDIR /opt/megatron-lm - -##### For NVIDIANS only ##### -FROM main as jet -ARG CACHEBUST=0 -RUN --mount=type=secret,id=JET_INDEX_URLS \ - JET_INDEX_URLS=$(cat /run/secrets/JET_INDEX_URLS) && \ - pip install jet-client jet-api --upgrade $JET_INDEX_URLS -ENV PATH="$PATH:/opt/jet/bin" +# syntax=docker/dockerfile:experimental + +ARG FROM_IMAGE_NAME +FROM $FROM_IMAGE_NAME as main +ENV DEBIAN_FRONTEND=noninteractive + +RUN sed -i -e 's/^APT/# APT/' -e 's/^DPkg/# DPkg/' \ + /etc/apt/apt.conf.d/docker-clean + +RUN apt-get update && \ + apt-get install -y python3-venv && \ + apt-get clean && \ + python -m venv /opt/jet + +RUN pip3 install --no-cache-dir \ + black==24.4.2 \ + isort==5.13.2 \ + flake8==7.1.0 \ + pylint==3.2.6 \ + coverage \ + mypy + +COPY . /opt/megatron-lm + +WORKDIR /opt/megatron-lm + +##### For NVIDIANS only ##### +FROM main as jet +ARG CACHEBUST=0 +RUN --mount=type=secret,id=JET_INDEX_URLS \ + JET_INDEX_URLS=$(cat /run/secrets/JET_INDEX_URLS) && \ + pip install "jet-client~=2.0" jet-api --upgrade $JET_INDEX_URLS +ENV PATH="$PATH:/opt/jet/bin" ### \ No newline at end of file diff --git a/GPT_pretraining.sh b/GPT_pretraining.sh deleted file mode 100644 index f0f3fc9..0000000 --- a/GPT_pretraining.sh +++ /dev/null @@ -1,157 +0,0 @@ -#!/bin/bash - -# Runs the "7B" parameter model -export HSA_FORCE_FINE_GRAIN_PCIE=1 -export OMP_NUM_THREADS=1 -export NCCL_P2P_LEVEL=SYS - -export NCCL_ALGO=Ring -export NCCL_NCHANNELS_PER_PEER=16 -export NCCL_MIN_NCHANNELS=20 -export NCCL_IB_TIMEOUT=22 -export CUDA_DEVICE_MAX_CONNECTIONS=1 - -export NCCL_NET_GDR_LEVEL=SYS -export NCCL_NET_GDR_READ=0 - - -CHECKPOINT_PATH=./tmp #$1 # -TENSORBOARD_LOGS_PATH=./tmp #$2 # -DATA_PATH="/datasets/oscar-1GB-gpt_text_document" #_text_document -VOCAB_PATH=./gpt2-vocab.json -MERGE_PATH=./gpt2-merges.txt - -GPT_MODEL_ARGS=( - --num-layers 12 - --hidden-size 768 - --num-attention-heads 12 - --ffn-hidden-size 3072 - --seq-length 1024 - --max-position-embeddings 1024 -) - -# export NVTE_FLASH_ATTN=1 # 走autlass -# export NVTE_FLASH_ATTN_TRITON=1 # 走triton_fa -# --transformer-impl transformer_engine - # --use-mcore-models -TRAINING_ARGS=( - --transformer-impl local - --use-legacy-models - --micro-batch-size 1 - --global-batch-size 60 #240 #512 #64 - --train-iters 100 - --weight-decay 0.1 - --adam-beta1 0.9 - --adam-beta2 0.95 - --init-method-std 0.006 - --clip-grad 1.0 - --bf16 - --use-distributed-optimizer - --ckpt-format torch - --disable-bias-linear - --overlap-grad-reduce - --attention-dropout 0 - --hidden-dropout 0 - --ddp-average-in-collective - --recompute-granularity full - --recompute-num-layers 5 - --recompute-method block - --no-gradient-accumulation-fusion - --swiglu - --lr 3.0e-5 - --lr-decay-style cosine - --min-lr 3.0e-6 - --lr-warmup-iters 1 -) -MODEL_PARALLEL_ARGS=( - --sequence-parallel - --tensor-model-parallel-size 2 - --pipeline-model-parallel-size 2 -) - -DATA_ARGS=( - --data-path $DATA_PATH - --split 949,50,1 - --untie-embeddings-and-output-weights - --use-rotary-position-embeddings - --normalization RMSNorm - --no-position-embedding - --vocab-file $VOCAB_PATH - --merge-file $MERGE_PATH - --tokenizer-type GPT2BPETokenizer -) - -EVAL_AND_LOGGING_ARGS=( - --log-interval 1 - --save-interval 10000 - --eval-interval 1000 - --save $CHECKPOINT_PATH - --load $CHECKPOINT_PATH - --eval-iters 10 - --tensorboard-dir $TENSORBOARD_LOGS_PATH -) - -RANK=$OMPI_COMM_WORLD_RANK -LOCAL_RANK=$OMPI_COMM_WORLD_LOCAL_RANK -WORLD_SIZE=$OMPI_COMM_WORLD_SIZE -DIST_URL=${1} -DIST_PORT=34566 - -DISTRIBUTED_ARGS=( - --rank ${RANK} - --world-size ${WORLD_SIZE} - --local-rank ${LOCAL_RANK} - --dist-url tcp://${DIST_URL}:${DIST_PORT} -) - -APP="python -u pretrain_gpt.py \ - ${GPT_MODEL_ARGS[@]} \ - ${TRAINING_ARGS[@]} \ - ${MODEL_PARALLEL_ARGS[@]} \ - ${DATA_ARGS[@]} \ - ${EVAL_AND_LOGGING_ARGS[@]} \ - ${DISTRIBUTED_ARGS[@]} \ -" - -case ${LOCAL_RANK} in -[0]) - export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 -# ${APP} - numactl --cpunodebind=0 --membind=0 ${APP} - ;; -[1]) - export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 -# ${APP} - numactl --cpunodebind=0 --membind=0 ${APP} - ;; -[2]) - export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 -# ${APP} - numactl --cpunodebind=0 --membind=0 ${APP} - ;; -[3]) - export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 -# ${APP} - numactl --cpunodebind=0 --membind=0 ${APP} - ;; -[4]) - export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 -# ${APP} - numactl --cpunodebind=0 --membind=0 ${APP} - ;; -[5]) - export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 -# ${APP} - numactl --cpunodebind=0 --membind=0 ${APP} - ;; -[6]) - export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 -# ${APP} - numactl --cpunodebind=0 --membind=0 ${APP} - ;; -[7]) - export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 -# ${APP} - numactl --cpunodebind=0 --membind=0 ${APP} - ;; -esac diff --git a/LICENSE b/LICENSE index b4193af..57e1320 100644 --- a/LICENSE +++ b/LICENSE @@ -1,272 +1,273 @@ -The following applies to all files unless otherwise noted: - -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of NVIDIA CORPORATION nor the names of its -# contributors may be used to endorse or promote products derived -# from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY -# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR -# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - --- - -This repository also contains code from Hugging Face Inc., Google Research, -Facebook (from their Fairseq, Dino, and ParlAI projects), Microsoft (from their -Swin-Transformer project), Philip Popien, the Mamba project (Tri Dao and -Albert Gu), and the Triton language and compiler project (Philippe Tillet and -OpenAI). Files from these organizations have notices at the top of each file. -Below are licenses used in those files, as indicated. - - --------------------------------------------------------------------------------- --- LICENSE FOR Facebook, huggingface, Google Research, LLaVA, and Mamba code -- - - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - --------------------------------------------------------------------------------- -LICENSE FOR -Facebook, Inc. and its affiliates, -Meta Platforms, Inc. and its affiliates, -Microsoft Corporation, -OpenGVLab/InternVL, and -Triton language and compiler. - -MIT License - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - +The following applies to all files unless otherwise noted: + +# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of NVIDIA CORPORATION nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY +# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-- + +This repository also contains code from Hugging Face Inc., Google Research, +Facebook (from their Fairseq, Dino, and ParlAI projects), Microsoft (from their +Swin-Transformer project), Philip Popien, the Mamba project (Tri Dao and +Albert Gu), and the Triton language and compiler project (Philippe Tillet and +OpenAI). Files from these organizations have notices at the top of each file. +Below are licenses used in those files, as indicated. + + +-------------------------------------------------------------------------------------- +-- LICENSE FOR Facebook, huggingface, Google Research, LLaVA, Mamba, and vLLM code -- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +-------------------------------------------------------------------------------- +LICENSE FOR +Facebook, Inc. and its affiliates, +Meta Platforms, Inc. and its affiliates, +Microsoft Corporation, +OpenGVLab/InternVL, +Triton language and compiler, +and DeepSeek. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/Llama_pretraining.sh b/Llama_pretraining.sh deleted file mode 100644 index 5f9d820..0000000 --- a/Llama_pretraining.sh +++ /dev/null @@ -1,211 +0,0 @@ -#!/bin/bash -set -eux - -#export FLASH_ATTENTION_PRINT_PARAM=1 -# Runs the "7B" parameter model -export HSA_FORCE_FINE_GRAIN_PCIE=1 -export OMP_NUM_THREADS=1 -export NCCL_P2P_LEVEL=PXB # SYS - -#export HIP_ALLOC_INITIALIZE=0 -#export GPU_MAX_HW_QUEUES=20 - -export NCCL_ALGO=Ring -export NCCL_NCHANNELS_PER_PEER=16 -export NCCL_MIN_NCHANNELS=20 -export NCCL_IB_TIMEOUT=22 -export CUDA_DEVICE_MAX_CONNECTIONS=1 - -export NCCL_IB_HCA=mlx5_1,mlx5_2 -export NCCL_NET_GDR_LEVEL=7 -export NCCL_NET_GDR_READ=1 -export GLOG_minloglevel=3 # 打印error级别的nccl日志 -source /opt/dtk/env.sh -# 导入hipblaslt库 -# export LD_LIBRARY_PATH=/data/hipblaslt-install-0904/lib:$LD_LIBRARY_PATH -# 更新rocblas -# export LD_LIBRARY_PATH=/data/rocblas-install_qwen1211/lib:$LD_LIBRARY_PATH -# export LD_LIBRARY_PATH=/data/rocblas-install_qwen1228/lib:$LD_LIBRARY_PATH -# export LD_LIBRARY_PATH=/data/rocblas-install-0118-bf16/lib:$LD_LIBRARY_PATH - -# torch控制多流转单流 -export ALLREDUCE_STREAM_WITH_COMPUTE=1 -export SENDRECV_STREAM_WITH_COMPUTE=1 - -# prof采集添加同步, 避免卡顿 -# export GPU_FLUSH_ON_EXECUTION=1 -# export HIP_DIRECT_DISPATCH=0 - -# 采集rocblas size -# export ROCBLAS_LAYER=3 -# 采集 fa size -# export FLASH_ATTENTION_PRINT_PARAM=1 - -#增加编译缓存 -export cache_size_limit=64 - -CHECKPOINT_PATH=./tmp_7b #$1 # -TENSORBOARD_LOGS_PATH=./tmp_7b #$2 # -DATA_PATH="/data/datasets/nemo_pretrain/oscar-1GB/oscar-1GB-llama_text_document" #_text_document - -GPT_MODEL_ARGS=( - --num-layers 32 - --hidden-size 4096 - --ffn-hidden-size 11008 - --num-attention-heads 32 - --max-position-embeddings 4096 - - --normalization RMSNorm - --position-embedding-type rope - --untie-embeddings-and-output-weights # 分开处理embed和输出权重, 增加灵活性 -) - -# export NVTE_FLASH_ATTN=1 # 走cutlass -export NVTE_FLASH_ATTN_TRITON=1 # 走triton_fa -# --transformer-impl transformer_engine # 走core用这两组参数 - # --use-mcore-models - # --transformer-impl local # 走legacy用这两组参数 - # --use-legacy-models -TRAINING_ARGS=( - --transformer-impl local # 走legacy用这两组参数 - --use-legacy-models - --micro-batch-size 1 - --global-batch-size 60 #240 #60 #512 #64 - --train-iters 10 - --weight-decay 0.1 - --adam-beta1 0.9 - --adam-beta2 0.95 - --init-method-std 0.006 - --clip-grad 1.0 - --bf16 - # --fp16 # 开启fp16需要指定loss-scale - # --loss-scale 1024 - --use-distributed-optimizer - --disable-bias-linear - --attention-dropout 0 - --hidden-dropout 0 - --no-gradient-accumulation-fusion - --swiglu - --lr 3.0e-5 - --lr-decay-style cosine - --min-lr 3.0e-6 - --lr-warmup-iters 1 - --ckpt-format torch - --ddp-average-in-collective # 在dp阶段通信中, 梯度或参数将被直接平均, 而不是先求和(到一个设备)再平均 - # --recompute-granularity full # 开启重计算降低显存增加耗时 - # --recompute-num-layers 5 #0 # - # --recompute-method block - --overlap-grad-reduce # 重叠ddp grad reduce - # --tp-comm-overlap # tensor parallel comm和gemm重叠 - # --tp-comm-overlap-rs-dgrad # reduce-scatter和dgrad gemm重叠 - --use-flash-attn-triton -) -# --use-flash-attn-cutlass # cutlass fa -# --use-flash-attn-triton # triton fa - -MODEL_PARALLEL_ARGS=( - --sequence-parallel - --tensor-model-parallel-size 1 - --pipeline-model-parallel-size 2 -) - -DATA_ARGS=( - --data-path $DATA_PATH - --seq-length 4096 #4096 - --split 949,50,1 - --tokenizer-type Llama2Tokenizer - --tokenizer-model /data/model_weights/llama2_7b_hf/tokenizer.model -) - -EVAL_AND_LOGGING_ARGS=( - --log-interval 1 - --log-throughput - --save-interval 1000 - --eval-interval 1000 - --save $CHECKPOINT_PATH - --load $CHECKPOINT_PATH - --eval-iters 10 - --tensorboard-dir $TENSORBOARD_LOGS_PATH -) - -PROFILE_ARGS=( - --profile - --profile-step-start 4 - --profile-step-end 5 - --use-pytorch-profiler - --profile-ranks 0 1 2 3 4 5 6 7 - --profile-dir prof_data -) - -RANK=$OMPI_COMM_WORLD_RANK -LOCAL_RANK=$OMPI_COMM_WORLD_LOCAL_RANK -WORLD_SIZE=$OMPI_COMM_WORLD_SIZE -DIST_URL=${1} -DIST_PORT=34567 - -DISTRIBUTED_ARGS=( - --rank ${RANK} - --world-size ${WORLD_SIZE} - --local-rank ${LOCAL_RANK} - --dist-url tcp://${DIST_URL}:${DIST_PORT} -) - -APP="python -u pretrain_gpt.py \ - ${GPT_MODEL_ARGS[@]} \ - ${TRAINING_ARGS[@]} \ - ${MODEL_PARALLEL_ARGS[@]} \ - ${DATA_ARGS[@]} \ - ${EVAL_AND_LOGGING_ARGS[@]} \ - ${DISTRIBUTED_ARGS[@]} \ - -" -# 开启profile -# ${PROFILE_ARGS[@]} \ - -# export HIP_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 # # 4,5,6,7 #, -# export CUDA_VISIBLE_DEVICES=4,5,6,7 # 0,1,2,3, -# ${APP} - -# 使用numactl绑定 -case ${LOCAL_RANK} in -[0]) - export HIP_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 - # hipprof --hip-trace --trace-off numactl --cpunodebind=0 --membind=0 ${APP} - numactl --cpunodebind=0 --membind=0 ${APP} - ;; -[1]) - export HIP_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 - # hipprof --hip-trace --trace-off numactl --cpunodebind=0 --membind=0 ${APP} - numactl --cpunodebind=1 --membind=1 ${APP} - ;; -[2]) - export HIP_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 - # hipprof --hip-trace --trace-off numactl --cpunodebind=0 --membind=0 ${APP} - numactl --cpunodebind=2 --membind=2 ${APP} - ;; -[3]) - export HIP_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 - numactl --cpunodebind=3 --membind=3 ${APP} - # hipprof --hip-trace --trace-off numactl --cpunodebind=0 --membind=0 ${APP} - ;; -[4]) - export HIP_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 - numactl --cpunodebind=4 --membind=4 ${APP} - # hipprof --hip-trace --trace-off numactl --cpunodebind=0 --membind=0 ${APP} - ;; -[5]) - export HIP_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 - numactl --cpunodebind=5 --membind=5 ${APP} - # hipprof --hip-trace --trace-off numactl --cpunodebind=0 --membind=0 ${APP} - ;; -[6]) - export HIP_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 - numactl --cpunodebind=6 --membind=6 ${APP} - # hipprof --hip-trace --trace-off numactl --cpunodebind=0 --membind=0 ${APP} - ;; -[7]) - export HIP_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 - numactl --cpunodebind=7 --membind=7 ${APP} - # hipprof --hip-trace --trace-off numactl --cpunodebind=0 --membind=0 ${APP} - ;; -esac diff --git a/README.md b/README.md index f5e8385..d69e737 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,8 @@ # 更新日志 +2025.3.14适配最新代码,shell启动脚本在examples对应模型目录下 + 2024.12.16适配了torch prof 使用方法: 启动脚本中添加下列参数, 即可采集对应的prof信息 @@ -23,9 +25,6 @@ ```python # 采集torchprof mpirun -np 8 --allow-run-as-root train_mixtral_8x7B_1nodes.sh localhost --profiling=torch - -# 采集hipprof -mpirun -np 8 --allow-run-as-root train_mixtral_8x7B_1nodes.sh localhost --profiling=hip ``` ```bash @@ -38,14 +37,6 @@ TORCH_PROFIE_ARGS=( --profile-ranks 0 3 # 采集全局rank 第0和3 --profile-dir ./prof_data # prof文件的保存目录 ) - -HIP_PROFIE_ARGS=( - --profile - --profile-ranks 0 1 2 3 4 5 6 7 - --profile-step-start 4 - --profile-step-end 5 - --use-hip-profiler -) ``` diff --git a/docs/llama_mistral.md b/docs/llama_mistral.md index 11601fd..81f1584 100644 --- a/docs/llama_mistral.md +++ b/docs/llama_mistral.md @@ -1,480 +1,444 @@ -# Llama, Mistral and other Llama-like model support in Megatron-LM - -NOTE: In order to simplify code we now only support converting llama-3.x and mistral checkpoints downloaded from Huggingface. - -The [Llama-2](https://ai.meta.com/llama/) and [Llama-3](https://llama.meta.com/) family of models are an open-source set of pretrained & finetuned (for chat) models that have achieved strong results across a wide set of benchmarks. At their times of release, both Llama-2 and Llama-3 models achieved among the best results for open-source models, and were competitive with leading closed-source models (see https://arxiv.org/pdf/2307.09288.pdf and https://ai.meta.com/blog/meta-llama-3/). - -Similarly, [Mistral-7b](https://mistral.ai/news/announcing-mistral-7b/) is an open-source model with pretrained and finetuned (for chat) variants that achieve strong benchmark results. - -Architecturally Llama-2, Llama-3 and Mistral-7b are very similar. As such Megatron can support loading checkpoints from all three for inference and finetuning. Converting the checkpoints and loading them is slightly different for each model and is detailed for each below. - -# Llama-2 - -Llama-2 checkpoints can be loaded into Megatron for inference and for finetuning. Loading these checkpoints consists of three steps: - -1. Get access to download the checkpoints. -2. Convert the checkpoints from Meta/Huggingface format to Megatron format. -3. Setup arguments for launching the model. - -The following sections detail these steps. The final section lists benchmark result comparisons between: 1) Llama-2 inference code running the Meta-format checkpoints, and 2) Megatron inference code running the converted checkpoints. - -## Contents - * [Download Meta or Huggingface checkpoints](#download-meta-or-huggingface-checkpoints) - * [Convert checkpoint format](#convert-checkpoint-format) - * [Meta format](#meta-format) - * [Huggingface format](#huggingface-format) - * [Launch model](#launch-model) - * [Megatron](#launch-megatron) - * [Meta](#launch-meta) - * [Huggingface](#launch-hf) - * [Benchmark results](#benchmark-results) - -## Download Meta or Huggingface checkpoints - -Users must first apply for access to download the Llama-2 checkpoints either directly from [Meta](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) or through [Huggingface](https://huggingface.co/docs/transformers/main/model_doc/llama2) (HF). The checkpoints are available in two formats, Meta's native format (available from both the Meta and HF links), and HF's format (available only from HF). Either format can be converted to Megatron, as detailed next. - -## Convert checkpoint format - -We recommend passing `--dtype bf16` for training or finetuning. Inference can be done in bfloat16 or float16. - -### Meta format - -The Meta format checkpoints are converted to HF format as an intermediate step before converting to Megatron format. The `transformers` package is required, and must have version >=4.31.0 (e.g., `pip install transformers>=4.31.0`). (**Note**: we have specifically tested with versions `4.31.0` and `4.32.0`; your experience may vary with newer versions.) Assuming the downloaded checkpoints are in `$CHECKPOINT_DIR` (with separate sub-directories for 7B, 13B, 70B, etc.), the following example command can be used to convert from Llama-2 format to HF format in bfloat16: - -``` -python tools/checkpoint/convert.py --model-type GPT \ -> --loader llama_mistral \ -> --saver megatron \ -> --checkpoint-type meta \ -> --model-size llama2-7B \ -> --load-dir $LLAMA_META_FORMAT_DIR \ -> --save-dir ${MEGATRON_FORMAT_DIR} \ -> --tokenizer-model ${TOKENIZER_MODEL} \ -> --target-tensor-parallel-size ${TP} \ -> --target-pipeline-parallel-size ${PP} \ -> --bf16 -``` - -Valid values for `--model-size` are `llama2-7B`, `llama2-13B`, and `llama2-70B` (for pretrained-only models), and `llama2-7Bf`, `llama2-13Bf`, and `llama2-70Bf` (for chat-finetuned models). - -### Huggingface format - -The HF checkpoints can be converted to Megatron format by using Megatron's own Llama-2 checkpoint converter for HF format (see script `tools/checkpoint/loader_llama_mistral.py`). One important argument that must be set correctly is the tensor parallel size (`TP`) for each model. The following table shows these values: - -| Model size | Tensor parallel size (`TP`) | -| ---------- | --------------------------- | -| 7B | 1 | -| 13B | 2 | -| 70B | 8 | - -Using these values for `TP`, along with the path to the Llama-2 tokenizer model (automatically downloaded with original checkpoint download; see `${TOKENIZER_MODEL}` below), run the following command from the root of your Megatron source code to convert from HF format to Megatron format: - -``` -$>: python tools/checkpoint/convert.py \ - > --model-type GPT \ - > --loader llama_mistral \ - > --saver megatron \ - > --target-tensor-parallel-size ${TP} \ - > --checkpoint-type hf - > --load-dir ${HF_FORMAT_DIR} \ - > --save-dir ${MEGATRON_FORMAT_DIR} \ - > --tokenizer-model ${TOKENIZER_MODEL} -``` - -After this conversion, we are ready to load the checkpoints into a Megatron GPT model. - -## Launch model - -### Launch Megatron - -If loading for either inference or finetuning, use the following arguments: - -``` ---tensor-model-parallel-size ${TP} \ ---pipeline-model-parallel-size 1 \ ---seq-length 4096 \ ---max-position-embeddings 4096 \ ---tokenizer-type Llama2Tokenizer \ ---tokenizer-model ${TOKENIZER_MODEL} \ ---load ${CHECKPOINT_DIR} \ ---exit-on-missing-checkpoint \ ---use-checkpoint-args \ ---no-load-optim \ ---no-load-rng \ ---untie-embeddings-and-output-weights \ ---use-rotary-position-embeddings \ ---normalization RMSNorm \ ---no-position-embedding \ ---no-masked-softmax-fusion \ ---attention-softmax-in-fp32 -``` - -### Launch Meta - -Meta checkpoints can be launched with: https://github.com/facebookresearch/llama - -### Launch Huggingface - -Huggingface checkpoints can be launched with: https://github.com/huggingface/transformers/blob/main/src/transformers/models/llama/modeling_llama.py - -## Benchmark results - -The tables below list the benchmark comparisons between native Llama-2 (using Meta's checkpoint and Meta's inference code) and Megatron (using a converted HF checkpoint and Megatron's inference code). - -The values are the percent error between Megatron and Llama-2, calculated using the formula: `| - | / `, where the type of score is detailed before each table. Across all tests (80 total per model size), the mean error is 0.15%. The small difference in benchmark scores between the two models is due to minor arithmetic differences in implementation that alter the numerics slightly. Some of the factors that influence this difference include: - -- Megatron performs batch matrix multiplications in a couple places, such as within self attention and in SwiGLU, that Llama performs separately. -- Megatron uses `torch.baddbmm` within self attention, versus Llama using `torch.matmul`. -- Megatron uses a `sin`/`cos` implementation for rotary position embeddings, versus Llama using a `polar`/`complex` implementation. -- Llama calls `torch.set_default_dtype(torch.float16)` during initialization, which Megatron does not. - -### Big Bench - -Score type: multiple choice grade. - -| bigbench / standard | 7b | 13b | 70b | -| -- | -- | -- | -- | -| date_understanding | 0.29% | 0.13% | 0.12% | -| general_knowledge | 0.00% | 0.00% | 0.00% | -| human_organs_senses | 0.00% | 0.00% | 0.00% | -| intent_recognition | 0.00% | 0.11% | 0.00% | -| riddle_sense | 0.00% | 0.00% | 0.00% | -| similarities_abstraction | 0.00% | 0.58% | 0.00% | -| simple_arithmetic_json_multiple_choice | 0.00% | 0.00% | 0.00% | -| undo_permutation | 0.19% | 0.19% | 0.18% | - -### Multilingual - -Score type: multiple choice grade. - -| multilingual / xcopa | 7b | 13b | 70b | -| -- | -- | -- | -- | -| en-template-mGPT-remove-punctuation | 0.08% | 0.00% | 0.00% | -| et-template-mGPT-remove-punctuation | 0.00% | 0.13% | 0.25% | -| ht-template-mGPT-remove-punctuation | 0.26% | 0.13% | 0.26% | -| id-template-mGPT-remove-punctuation | 0.11% | 0.00% | 0.19% | -| it-template-mGPT-remove-punctuation | 0.00% | 0.10% | 0.09% | -| qu-template-mGPT-remove-punctuation | 0.00% | 0.00% | 0.27% | -| sw-template-mGPT-remove-punctuation | 0.14% | 0.13% | 0.13% | -| th-template-mGPT-remove-punctuation | 0.25% | 0.13% | 0.13% | -| tr-template-mGPT-remove-punctuation | 0.26% | 0.00% | 0.34% | -| vi-template-mGPT-remove-punctuation | 0.00% | 0.11% | 0.00% | -| zh-template-mGPT-remove-punctuation | 0.00% | 0.10% | 0.09% | - -### LM Evaluation Harness - -Score type: multiple choice grade. - -| lm-eval | 7b | 13b | 70b | -| -- | -- | -- | -- | -| boolq | 0.04% | 0.04% | 0.07% | -| hellaswag | 0.02% | 0.03% | 0.03% | -| piqa | 0.00% | 0.00% | 0.07% | -| winogrande | 0.00% | 0.11% | 0.20% | - -### MMLU - -Score type: multiple choice grade. - -Note: the number in brackets is the number of sub-tasks for each supercategory. - -| mmlu | 7b | 13b | 70b | -| -- | -- | -- | -- | -| stem [18] | 0.79% | 0.05% | 0.01% | -| humanities [13] | 0.19% | 0.01% | 0.02% | -| other (business, health, misc.) [14] | 0.08% | 0.06% | 0.12% | -| social sciences [12] | 0.37% | 0.21% | 0.01% | - -# Llama-3 - -Llama-3 checkpoints can be loaded into Megatron for inference and for finetuning. Loading these checkpoints consists of several steps: - -1. Get access to download the checkpoints (weights and tokenizer). -2. Convert the checkpoints from Huggingface format to Megatron format. -3. (Optional) Validate converted checkpoints -4. Setup arguments for launching the model. - -The following sections detail these steps. - -## Contents - * [Download Huggingface checkpoints](#download-huggingface-checkpoints) - * [Convert checkpoint format](#convert-checkpoint-format) - * [Huggingface format](#huggingface-format) - * [Validate checkpoint](#optional-validate-checkpoint) - * [Launch model](#launch-model) - -## Download Huggingface checkpoints - -Users must first apply for access to download the Llama-3 checkpoints from [Huggingface](https://huggingface.co/meta-llama). - -## Convert checkpoint format - -We recommend passing `--dtype bf16` for training or finetuning. Inference can be done in bfloat16 or float16. - -### Huggingface format - -The HF checkpoints can be converted to Megatron format by using Megatron's own Llama-3 checkpoint converter for HF format (see script `tools/checkpoint/loader_llama_mistral.py`). One important argument that must be set correctly is the tensor parallel size (`TP`) for each model. The following table shows these values: - -| Model size | Tensor parallel size (`TP`) | -| ---------- | --------------------------- | -| 8B | 1 | -| 70B | 8 | - -Using these values for `TP`, along with the path to the Llama-3 tokenizer model (automatically downloaded with original checkpoint download; see `${TOKENIZER_MODEL}` below), run the following command from the root of your Megatron source code to convert from HF format to Megatron format: - -``` -$>: python tools/checkpoint/convert.py \ - > --bf16 \ - > --model-type GPT \ - > --loader llama_mistral \ - > --saver mcore \ - > --target-tensor-parallel-size ${TP} \ - > --checkpoint-type hf - > --load-dir ${HF_FORMAT_DIR} \ - > --save-dir ${MEGATRON_FORMAT_DIR} \ - > --tokenizer-model ${TOKENIZER_MODEL} - > --model-size llama3-8B \ -``` - -Valid values for `--model-size` are `llama3-8B` and `llama3-70B` (for pretrained-only models), and `llama3-8Bf` and `llama3-70Bf` (for chat-finetuned models). - -After this conversion, we are ready to load the checkpoints into a Megatron GPT model. - -## (Optional) Validate checkpoints - -A Megatron-LM text generation server for Llama3 can be launched using the script `examples/llama_mistral/run_text_generation_llama3.sh `. - -Once running, query the server with `curl 'http://:5000/api' -X 'PUT' -H 'Content-Type: application/json; charset=UTF-8' -d '{"prompts":[""], "tokens_to_generate":100, "top_k":1}'`. - -A reference generation for comparison can be obtained from the Huggingface transformers library by running `python examples/llama_mistral/huggingface_reference.py --model_path --prompt `. - -## Launch model - -If loading for either inference or finetuning, use the following arguments: - -``` ---tensor-model-parallel-size ${TP} \ ---pipeline-model-parallel-size 1 \ ---seq-length 8192 \ ---max-position-embeddings 8192 \ ---tokenizer-type HuggingFaceTokenizer \ ---tokenizer-model ${TOKENIZER_MODEL} \ ---load ${CHECKPOINT_DIR} \ ---exit-on-missing-checkpoint \ ---use-checkpoint-args \ ---no-load-optim \ ---no-load-rng \ ---untie-embeddings-and-output-weights \ ---normalization RMSNorm \ ---position-embedding-type rope \ ---no-masked-softmax-fusion \ ---attention-softmax-in-fp32 \ ---disable-bias-linear \ ---transformer-impl transformer_engine \ ---group-query-attention 8 \ ---attention-dropout 0.0 \ ---hidden-dropout 0.0 \ ---rotary-base 500000 \ ---rotary-percent 1.0 \ ---ffn-hidden-size 14336 \ ---num-attention-heads 32 \ ---swiglu \ ---bf16 \ -``` - -# Llama-3.1 - -Llama-3 checkpoints can be loaded into Megatron for inference and for finetuning. Loading these checkpoints consists of several steps: - -1. Get access to download the checkpoints (weights and tokenizer). -2. Convert the checkpoints from Huggingface format to Megatron format. -3. (Optional) Validate converted checkpoints -4. Setup arguments for launching the model. - -The following sections detail these steps. - -## Contents - * [Download Huggingface checkpoints](#download-huggingface-checkpoints) - * [Convert checkpoint format](#convert-checkpoint-format) - * [Huggingface format](#huggingface-format) - * [Validate checkpoint](#optional-validate-checkpoint) - * [Launch model](#launch-model) - -## Download Huggingface checkpoints - -Users must first apply for access to download the Llama-3 checkpoints from [Huggingface](https://huggingface.co/meta-llama). - -## Convert checkpoint format - -We recommend passing `--dtype bf16` for training or finetuning. Inference can be done in bfloat16 or float16. - -### Huggingface format - -The HF checkpoints can be converted to Megatron format by using Megatron's own Llama-3 checkpoint converter for HF format (see script `tools/checkpoint/loader_llama_mistral.py`). One important argument that must be set correctly is the tensor parallel size (`TP`) for each model. The following table shows these values: - -| Model size | Tensor parallel size (`TP`) | -| ---------- | --------------------------- | -| 8B | 1 | -| 70B | 8 | - -Using these values for `TP`, along with the path to the Llama-3 tokenizer model (automatically downloaded with original checkpoint download; see `${TOKENIZER_MODEL}` below), run the following command from the root of your Megatron source code to convert from HF format to Megatron format: - -``` -$>: python tools/checkpoint/convert.py \ - > --bf16 \ - > --model-type GPT \ - > --loader llama_mistral \ - > --saver mcore \ - > --target-tensor-parallel-size ${TP} \ - > --checkpoint-type hf - > --load-dir ${HF_FORMAT_DIR} \ - > --save-dir ${MEGATRON_FORMAT_DIR} \ - > --tokenizer-model ${TOKENIZER_MODEL} - > --model-size llama3-8B \ -``` - -Valid values for `--model-size` are `llama3.1-8B` and `llama3.1-70B` (for pretrained-only models), and `llama3.1-8Bf` and `llama3.1-70Bf` (for chat-finetuned models). - -After this conversion, we are ready to load the checkpoints into a Megatron GPT model. - -## (Optional) Validate checkpoints - -A Megatron-LM text generation server for Llama3.1 can be launched using the script `examples/llama_mistral/run_text_generation_llama3.1.sh `. - -Once running, query the server with `curl 'http://:5000/api' -X 'PUT' -H 'Content-Type: application/json; charset=UTF-8' -d '{"prompts":[""], "tokens_to_generate":100, "top_k":1}'`. - -A reference generation for comparison can be obtained from the Huggingface transformers library by running `python examples/llama_mistral/huggingface_reference.py --model_path --prompt `. - -## Launch model - -If loading for either inference or finetuning, use the following arguments: - -``` ---tensor-model-parallel-size ${TP} \ ---pipeline-model-parallel-size 1 \ ---seq-length 8192 \ ---max-position-embeddings 131072 \ ---tokenizer-type HuggingFaceTokenizer \ ---tokenizer-model ${TOKENIZER_MODEL} \ ---load ${CHECKPOINT_DIR} \ ---exit-on-missing-checkpoint \ ---use-checkpoint-args \ ---no-load-optim \ ---no-load-rng \ ---untie-embeddings-and-output-weights \ ---normalization RMSNorm \ ---position-embedding-type rope \ ---no-masked-softmax-fusion \ ---attention-softmax-in-fp32 \ ---disable-bias-linear \ ---transformer-impl transformer_engine \ ---group-query-attention 8 \ ---attention-dropout 0.0 \ ---hidden-dropout 0.0 \ ---rotary-base 500000 \ ---rotary-percent 1.0 \ ---use-rope-scaling \ ---ffn-hidden-size 14336 \ ---num-attention-heads 32 \ ---swiglu \ ---bf16 \ -``` - -# Mistral-7b - -Megatron currently supports loading the v0.3 release of Mistral-7b (which does not use sliding window attention and offers a larger 32768 vocabulary) for inference and finetuning. Loading these checkpoints consists of several steps: - -1. Get access to download the checkpoints (weights and tokenizer). -2. Convert the checkpoints from HuggingFace format to Megatron format. -3. (Optional) Validate converted checkpoints -4. Setup arguments for launching the model. - -The following sections detail these steps. - -## Contents - * [Download Huggingface checkpoints](#download-huggingface-checkpoints) - * [Convert checkpoint format](#convert-checkpoint-format) - * [(Optional) Validate checkpoint](#optional-validate-checkpoint) - * [Launch model](#launch-model) - -## Download Huggingface checkpoints - -Users must first apply for access to download the Mistral-7b checkpoints through [Huggingface](https://huggingface.co/mistralai/Mistral-7B-v0.3) (HF). - -## Convert checkpoint format - -The HF checkpoints can be converted to Megatron format by using Megatron's own Mistral checkpoint converter for HF format (see script `tools/checkpoint/loader_llama_mistral.py`). - -Using the path to the Mistral tokenizer model (downloaded alongside the HF checkpoint), run the following command from the root of your Megatron source code to convert from HF format to mcore format: - -``` -$>: python tools/checkpoint/convert.py \ - > --bf16 \ - > --model-type GPT \ - > --loader llama_mistral \ - > --saver mcore \ - > --target-tensor-parallel-size ${TP} \ - > --checkpoint-type hf \ - > --load-dir ${HF_FORMAT_DIR} \ - > --save-dir ${MEGATRON_FORMAT_DIR} \ - > --tokenizer-model ${TOKENIZER_MODEL} \ - > --model-size mistral-7B \ -``` - -Valid values for `--model-size` are mistral-7B for the pretrained model or mistral-7Bf for the chat fine-tuned model. - -After this conversion, we are ready to load the checkpoints into an mcore GPT model. - -## (Optional) Validate checkpoints - -A Megatron-LM text generation server for Mistral-7B can be launched using the script `examples/llama_mistral/run_text_generation_mistral.sh `. - -Once running, query the server with `curl 'http://:5000/api' -X 'PUT' -H 'Content-Type: application/json; charset=UTF-8' -d '{"prompts":[""], "tokens_to_generate":100, "top_k":1}'`. - -A reference generation for comparison can be obtained from the Huggingface transformers library by running `python examples/llama_mistral/huggingface_reference.py --model_path --prompt `. - -## Launch model - -If loading for either inference or finetuning, use the following arguments: - -``` ---tensor-model-parallel-size ${TP} \ ---pipeline-model-parallel-size 1 \ ---seq-length 4096 \ ---max-position-embeddings 4096 \ ---tokenizer-type HuggingFaceTokenizer \ ---tokenizer-model ${TOKENIZER_MODEL} \ ---load ${CHECKPOINT_DIR} \ ---exit-on-missing-checkpoint \ ---use-checkpoint-args \ ---no-load-optim \ ---no-load-rng \ ---untie-embeddings-and-output-weights \ ---normalization RMSNorm \ ---position-embedding-type rope \ ---no-masked-softmax-fusion \ ---attention-softmax-in-fp32 ---apply-layernorm-1p \ ---transformer-impl transformer_engine \ ---group-query-attention 8 \ ---disable-bia-linear \ ---rotary-base 1000000 \ ---rotary-percent 1.0 \ ---swiglu \ ---ffn-hidden-size 14336 \ ---num-attention-heads 32 -``` - -# Other Llama-like model support - -*Note: Experimental* - -Many models such as Yi-34B use the Llama architecture and may be converted from HuggingFace to Megatron using the commands in [Llama3](#llama-3). - -# Known numerical differences - -It is not expected that the megatron and Huggingface implementations of llama3.x and mistral models will produce numerically identical results. There are multiple points where small numerical differences are expected. This is a non-exhaustive list: - -1. TransformerEngine (TE) uses the model params_dtype inside RMSNorm whereas the Huggingface implementation uses fp32. See for details: https://github.com/NVIDIA/TransformerEngine/issues/1132 -2. Huggingface `transformers` implements the q, k and v projections in self-attention as separate GEMMs whereas mcore combines them into a single GEMM for efficiency. This leads to small numerical differences. - +# Llama, Mistral and other Llama-like model support in Megatron-LM + +NOTE: In order to simplify code we now only support converting llama-3.x and mistral checkpoints downloaded from Huggingface. + +The [Llama-2](https://ai.meta.com/llama/) and [Llama-3.x](https://llama.meta.com/) family of models are an open-source set of pretrained & finetuned (for chat) models that have achieved strong results across a wide set of benchmarks. At their times of release, both Llama-2 and Llama-3 models achieved among the best results for open-source models, and were competitive with leading closed-source models (see https://arxiv.org/pdf/2307.09288.pdf and https://ai.meta.com/blog/meta-llama-3/). + +Similarly, [Mistral-7b](https://mistral.ai/news/announcing-mistral-7b/) is an open-source model with pretrained and finetuned (for chat) variants that achieve strong benchmark results. + +Architecturally Llama-2, Llama-3 and Mistral-7b are very similar. As such Megatron can support loading checkpoints from all three for inference and finetuning. Converting the checkpoints and loading them is slightly different for each model and is detailed for each below. + +# Contents + +- [Llama, Mistral and other Llama-like model support in Megatron-LM](#llama-mistral-and-other-llama-like-model-support-in-megatron-lm) +- [Contents](#contents) +- [Llama-2](#llama-2) + - [Download Meta or Huggingface checkpoints](#download-meta-or-huggingface-checkpoints) + - [Convert checkpoint format](#convert-checkpoint-format) + - [Meta format](#meta-format) + - [Huggingface format](#huggingface-format) + - [Launch model](#launch-model) + - [Launch Megatron](#launch-megatron) + - [Launch Meta](#launch-meta) + - [Launch Huggingface](#launch-huggingface) + - [Benchmark results](#benchmark-results) + - [Big Bench](#big-bench) + - [Multilingual](#multilingual) + - [LM Evaluation Harness](#lm-evaluation-harness) + - [MMLU](#mmlu) +- [Llama-3.x](#llama-3x) + - [Download Huggingface checkpoints](#download-huggingface-checkpoints) + - [Convert checkpoint format](#convert-checkpoint-format-1) + - [Huggingface format](#huggingface-format-1) + - [(Optional) Validate checkpoints](#optional-validate-checkpoints) + - [Launch model](#launch-model-1) +- [Mistral-7b](#mistral-7b) + - [Download Huggingface checkpoints](#download-huggingface-checkpoints-2) + - [Convert checkpoint format](#convert-checkpoint-format-3) + - [(Optional) Validate checkpoints](#optional-validate-checkpoints-2) + - [Launch model](#launch-model-3) +- [Other Llama-like model support](#other-llama-like-model-support) +- [Known numerical differences](#known-numerical-differences) +- [Using legacy model format](#using-legacy-model-format) + + +# Llama-2 + +Llama-2 checkpoints can be loaded into Megatron for inference and for finetuning. Loading these checkpoints consists of three steps: + +1. Get access to download the checkpoints. +2. Convert the checkpoints from Meta/Huggingface format to Megatron format. +3. Setup arguments for launching the model. + +The following sections detail these steps. The final section lists benchmark result comparisons between: 1) Llama-2 inference code running the Meta-format checkpoints, and 2) Megatron inference code running the converted checkpoints. + +## Download Meta or Huggingface checkpoints + +Users must first apply for access to download the Llama-2 checkpoints either directly from [Meta](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) or through [Huggingface](https://huggingface.co/docs/transformers/main/model_doc/llama2) (HF). The checkpoints are available in two formats, Meta's native format (available from both the Meta and HF links), and HF's format (available only from HF). Either format can be converted to Megatron, as detailed next. + +## Convert checkpoint format + +We recommend passing `--dtype bf16` for training or finetuning. Inference can be done in bfloat16 or float16. + +### Meta format + +The Meta format checkpoints are converted to HF format as an intermediate step before converting to Megatron format. The `transformers` package is required, and must have version >=4.31.0 (e.g., `pip install transformers>=4.31.0`). (**Note**: we have specifically tested with versions `4.31.0` and `4.32.0`; your experience may vary with newer versions.) Assuming the downloaded checkpoints are in `$CHECKPOINT_DIR` (with separate sub-directories for 7B, 13B, 70B, etc.), the following example command can be used to convert from Llama-2 format to HF format in bfloat16: + +``` +python tools/checkpoint/convert.py \ +> --model-type GPT \ +> --loader llama_mistral \ +> --load-dir ${META_FORMAT_DIR} \ +> --model-size ${MODEL_SIZE} \ +> --checkpoint-type meta \ +> --tokenizer-model ${TOKENIZER_MODEL} \ +> --saver core \ +> --save-dir ${MEGATRON_FORMAT_DIR} \ +> --target-tensor-parallel-size ${TP} \ +> --target-pipeline-parallel-size ${PP} \ +> --bf16 +``` + +Valid values for `--model-size` are `llama2-7B`, `llama2-13B`, and `llama2-70B` (for pretrained-only models), and `llama2-7Bf`, `llama2-13Bf`, and `llama2-70Bf` (for chat-finetuned models). + +### Huggingface format + +The HF checkpoints can be converted to Megatron format by using Megatron's own Llama-2 checkpoint converter for HF format (see script `tools/checkpoint/loader_llama_mistral.py`). One important argument that must be set correctly is the tensor parallel size (`TP`) for each model. The following table shows these values: + +| Model size | Tensor parallel size (`TP`) | +| ---------- | --------------------------- | +| 7B | 1 | +| 13B | 2 | +| 70B | 8 | + +Using these values for `TP`, along with the path to the Llama-2 tokenizer model (automatically downloaded with original checkpoint download; see `${TOKENIZER_MODEL}` below), run the following command from the root of your Megatron source code to convert from HF format to Megatron format: + +``` +python tools/checkpoint/convert.py \ +> --model-type GPT \ +> --loader llama_mistral \ +> --load-dir ${HF_FORMAT_DIR} \ +> --model-size ${MODEL_SIZE} \ +> --checkpoint-type hf \ +> --tokenizer-model ${TOKENIZER_MODEL} \ +> --saver core \ +> --save-dir ${MEGATRON_FORMAT_DIR} \ +> --target-tensor-parallel-size ${TP} \ +> --target-pipeline-parallel-size ${PP} \ +> --bf16 +``` + +After this conversion, we are ready to load the checkpoints into a Megatron GPT model. + +## Launch model + +### Launch Megatron + +If loading for either inference or finetuning, use the following arguments: + +``` +--tensor-model-parallel-size ${TP} \ +--pipeline-model-parallel-size 1 \ +--seq-length 4096 \ +--max-position-embeddings 4096 \ +--tokenizer-type Llama2Tokenizer \ +--tokenizer-model ${TOKENIZER_MODEL} \ +--load ${CHECKPOINT_DIR} \ +--exit-on-missing-checkpoint \ +--use-checkpoint-args \ +--no-load-optim \ +--no-load-rng \ +--untie-embeddings-and-output-weights \ +--use-rotary-position-embeddings \ +--normalization RMSNorm \ +--no-position-embedding \ +--no-masked-softmax-fusion \ +--attention-softmax-in-fp32 +``` + +**Note:** If you converted to the legacy model format (i.e., `--saver legacy`), please see [here](#using-legacy-model-format). + +### Launch Meta + +Meta checkpoints can be launched with: https://github.com/facebookresearch/llama + +### Launch Huggingface + +Huggingface checkpoints can be launched with: https://github.com/huggingface/transformers/blob/main/src/transformers/models/llama/modeling_llama.py + +## Benchmark results + +The tables below list the benchmark comparisons between native Llama-2 (using Meta's checkpoint and Meta's inference code) and Megatron (using a converted HF checkpoint and Megatron's inference code). + +The values are the percent error between Megatron and Llama-2, calculated using the formula: `| - | / `, where the type of score is detailed before each table. Across all tests (80 total per model size), the mean error is 0.15%. The small difference in benchmark scores between the two models is due to minor arithmetic differences in implementation that alter the numerics slightly. Some of the factors that influence this difference include: + +- Megatron performs batch matrix multiplications in a couple places, such as within self attention and in SwiGLU, that Llama performs separately. +- Megatron uses `torch.baddbmm` within self attention, versus Llama using `torch.matmul`. +- Megatron uses a `sin`/`cos` implementation for rotary position embeddings, versus Llama using a `polar`/`complex` implementation. +- Llama calls `torch.set_default_dtype(torch.float16)` during initialization, which Megatron does not. + +### Big Bench + +Score type: multiple choice grade. + +| bigbench / standard | 7b | 13b | 70b | +| -- | -- | -- | -- | +| date_understanding | 0.29% | 0.13% | 0.12% | +| general_knowledge | 0.00% | 0.00% | 0.00% | +| human_organs_senses | 0.00% | 0.00% | 0.00% | +| intent_recognition | 0.00% | 0.11% | 0.00% | +| riddle_sense | 0.00% | 0.00% | 0.00% | +| similarities_abstraction | 0.00% | 0.58% | 0.00% | +| simple_arithmetic_json_multiple_choice | 0.00% | 0.00% | 0.00% | +| undo_permutation | 0.19% | 0.19% | 0.18% | + +### Multilingual + +Score type: multiple choice grade. + +| multilingual / xcopa | 7b | 13b | 70b | +| -- | -- | -- | -- | +| en-template-mGPT-remove-punctuation | 0.08% | 0.00% | 0.00% | +| et-template-mGPT-remove-punctuation | 0.00% | 0.13% | 0.25% | +| ht-template-mGPT-remove-punctuation | 0.26% | 0.13% | 0.26% | +| id-template-mGPT-remove-punctuation | 0.11% | 0.00% | 0.19% | +| it-template-mGPT-remove-punctuation | 0.00% | 0.10% | 0.09% | +| qu-template-mGPT-remove-punctuation | 0.00% | 0.00% | 0.27% | +| sw-template-mGPT-remove-punctuation | 0.14% | 0.13% | 0.13% | +| th-template-mGPT-remove-punctuation | 0.25% | 0.13% | 0.13% | +| tr-template-mGPT-remove-punctuation | 0.26% | 0.00% | 0.34% | +| vi-template-mGPT-remove-punctuation | 0.00% | 0.11% | 0.00% | +| zh-template-mGPT-remove-punctuation | 0.00% | 0.10% | 0.09% | + +### LM Evaluation Harness + +Score type: multiple choice grade. + +| lm-eval | 7b | 13b | 70b | +| -- | -- | -- | -- | +| boolq | 0.04% | 0.04% | 0.07% | +| hellaswag | 0.02% | 0.03% | 0.03% | +| piqa | 0.00% | 0.00% | 0.07% | +| winogrande | 0.00% | 0.11% | 0.20% | + +### MMLU + +Score type: multiple choice grade. + +Note: the number in brackets is the number of sub-tasks for each supercategory. + +| mmlu | 7b | 13b | 70b | +| -- | -- | -- | -- | +| stem [18] | 0.79% | 0.05% | 0.01% | +| humanities [13] | 0.19% | 0.01% | 0.02% | +| other (business, health, misc.) [14] | 0.08% | 0.06% | 0.12% | +| social sciences [12] | 0.37% | 0.21% | 0.01% | + +# Llama-3.x + +Llama-3.x checkpoints can be loaded into Megatron for inference and for finetuning. Loading these checkpoints consists of several steps: + +1. Get access to download the checkpoints (weights and tokenizer). +2. Convert the checkpoints from Huggingface format to Megatron format. +3. (Optional) Validate converted checkpoints +4. Setup arguments for launching the model. + +The following sections detail these steps. + +## Download Huggingface checkpoints + +Users must first apply for access to download the Llama-3.x checkpoints from [Huggingface](https://huggingface.co/meta-llama). + +## Convert checkpoint format + +We recommend passing `--dtype bf16` for training or finetuning. Inference can be done in bfloat16 or float16. + +### Huggingface format + +The HF checkpoints can be converted to Megatron format by using Megatron's own Llama-3.x checkpoint converter for HF format (see script `tools/checkpoint/loader_llama_mistral.py`). One important argument that must be set correctly is the tensor parallel size (`TP`) for each model. The following table shows these values: + +| Model size | Tensor parallel size (`TP`) | +| ---------- | --------------------------- | +| 1B | 1 | +| 3B | 1 | +| 8B | 1 | +| 70B | 8 | + +Using these values for `TP`, along with the path to the Llama-3.x tokenizer model (automatically downloaded with original checkpoint download; see `${TOKENIZER_MODEL}` below), run the following command from the root of your Megatron source code to convert from HF format to Megatron format: + +``` +$>: python tools/checkpoint/convert.py \ + > --bf16 \ + > --model-type GPT \ + > --loader llama_mistral \ + > --saver core \ + > --target-tensor-parallel-size ${TP} \ + > --checkpoint-type hf \ + > --load-dir ${HF_FORMAT_DIR} \ + > --save-dir ${MEGATRON_FORMAT_DIR} \ + > --tokenizer-model ${TOKENIZER_MODEL} \ + > --model-size llama3 \ +``` + +After this conversion, we are ready to load the checkpoints into a Megatron GPT model. + +## (Optional) Validate checkpoints + +A Megatron-LM text generation server for Llama3 can be launched using the script `examples/inference/llama_mistral/run_text_generation_llama3.sh `. For Llama3.1, please use `examples/inference/llama_mistral/run_text_generation_llama3.1.sh`. + +Once running, query the server with `curl 'http://:5000/api' -X 'PUT' -H 'Content-Type: application/json; charset=UTF-8' -d '{"prompts":[""], "tokens_to_generate":100, "top_k":1}'`. + +A reference generation for comparison can be obtained from the Huggingface transformers library by running `python examples/llama_mistral/huggingface_reference.py --model_path --prompt `. + +## Launch model + +If loading for either inference or finetuning, use the following arguments for Llama 3.0: + +``` +--tensor-model-parallel-size ${TP} \ +--pipeline-model-parallel-size 1 \ +--seq-length 8192 \ +--max-position-embeddings 8192 \ +--tokenizer-type HuggingFaceTokenizer \ +--tokenizer-model ${TOKENIZER_MODEL} \ +--load ${CHECKPOINT_DIR} \ +--exit-on-missing-checkpoint \ +--use-checkpoint-args \ +--no-load-optim \ +--no-load-rng \ +--untie-embeddings-and-output-weights \ +--normalization RMSNorm \ +--position-embedding-type rope \ +--no-masked-softmax-fusion \ +--attention-softmax-in-fp32 \ +--disable-bias-linear \ +--transformer-impl transformer_engine \ +--group-query-attention 8 \ +--attention-dropout 0.0 \ +--hidden-dropout 0.0 \ +--rotary-base 500000 \ +--rotary-percent 1.0 \ +--ffn-hidden-size 14336 \ +--num-attention-heads 32 \ +--swiglu \ +--bf16 \ +``` + +For Llama3.1 please use the following arguments: + +``` +--tensor-model-parallel-size ${TP} \ +--pipeline-model-parallel-size 1 \ +--seq-length 8192 \ +--max-position-embeddings 131072 \ +--tokenizer-type HuggingFaceTokenizer \ +--tokenizer-model ${TOKENIZER_MODEL} \ +--load ${CHECKPOINT_DIR} \ +--exit-on-missing-checkpoint \ +--use-checkpoint-args \ +--no-load-optim \ +--no-load-rng \ +--untie-embeddings-and-output-weights \ +--normalization RMSNorm \ +--position-embedding-type rope \ +--no-masked-softmax-fusion \ +--attention-softmax-in-fp32 \ +--disable-bias-linear \ +--transformer-impl transformer_engine \ +--group-query-attention 8 \ +--attention-dropout 0.0 \ +--hidden-dropout 0.0 \ +--rotary-base 500000 \ +--rotary-percent 1.0 \ +--use-rope-scaling \ +--ffn-hidden-size 14336 \ +--num-attention-heads 32 \ +--swiglu \ +--bf16 \ +``` + +**Note:** If you converted to the legacy model format (i.e., `--saver legacy`), please see [here](#using-legacy-model-format). + +# Mistral-7b + +Megatron currently supports loading the v0.3 release of Mistral-7b (which does not use sliding window attention and offers a larger 32768 vocabulary) for inference and finetuning. Loading these checkpoints consists of several steps: + +1. Get access to download the checkpoints (weights and tokenizer). +2. Convert the checkpoints from HuggingFace format to Megatron format. +3. (Optional) Validate converted checkpoints +4. Setup arguments for launching the model. + +The following sections detail these steps. + +## Download Huggingface checkpoints + +Users must first apply for access to download the Mistral-7b checkpoints through [Huggingface](https://huggingface.co/mistralai/Mistral-7B-v0.3) (HF). + +## Convert checkpoint format + +The HF checkpoints can be converted to Megatron format by using Megatron's own Mistral checkpoint converter for HF format (see script `tools/checkpoint/loader_llama_mistral.py`). + +Using the path to the Mistral tokenizer model (downloaded alongside the HF checkpoint), run the following command from the root of your Megatron source code to convert from HF format to the Megatron core format: + +``` +$>: python tools/checkpoint/convert.py \ + > --bf16 \ + > --model-type GPT \ + > --loader llama_mistral \ + > --saver core \ + > --target-tensor-parallel-size ${TP} \ + > --checkpoint-type hf \ + > --load-dir ${HF_FORMAT_DIR} \ + > --save-dir ${MEGATRON_FORMAT_DIR} \ + > --tokenizer-model ${TOKENIZER_MODEL} \ + > --model-size mistral \ +``` + +After this conversion, we are ready to load the checkpoints into a Megatron core GPT model. + +## (Optional) Validate checkpoints + +A Megatron-LM text generation server for Mistral-7B can be launched using the script `examples/inference/llama_mistral/run_text_generation_mistral.sh `. + +Once running, query the server with `curl 'http://:5000/api' -X 'PUT' -H 'Content-Type: application/json; charset=UTF-8' -d '{"prompts":[""], "tokens_to_generate":100, "top_k":1}'`. + +A reference generation for comparison can be obtained from the Huggingface transformers library by running `python examples/inference/llama_mistral/huggingface_reference.py --model_path --prompt `. + +## Launch model + +If loading for either inference or finetuning, use the following arguments: + +``` +--tensor-model-parallel-size ${TP} \ +--pipeline-model-parallel-size 1 \ +--seq-length 4096 \ +--max-position-embeddings 4096 \ +--tokenizer-type HuggingFaceTokenizer \ +--tokenizer-model ${TOKENIZER_MODEL} \ +--load ${CHECKPOINT_DIR} \ +--exit-on-missing-checkpoint \ +--use-checkpoint-args \ +--no-load-optim \ +--no-load-rng \ +--untie-embeddings-and-output-weights \ +--normalization RMSNorm \ +--position-embedding-type rope \ +--no-masked-softmax-fusion \ +--attention-softmax-in-fp32 +--apply-layernorm-1p \ +--transformer-impl transformer_engine \ +--group-query-attention 8 \ +--disable-bia-linear \ +--rotary-base 1000000 \ +--rotary-percent 1.0 \ +--swiglu \ +--ffn-hidden-size 14336 \ +--num-attention-heads 32 +``` + +**Note:** If you converted to the legacy model format (i.e., `--saver legacy`), please see [here](#using-legacy-model-format). + +# Other Llama-like model support + +*Note: Experimental* + +Many models such as Yi-34B and Qwen2.x use the Llama architecture and may be converted from HuggingFace to Megatron using the commands in [Llama-3.x](#llama-3x). + +# Known numerical differences + +It is not expected that the megatron and Huggingface implementations of llama3.x and mistral models will produce numerically identical results. There are multiple points where small numerical differences are expected. This is a non-exhaustive list: + +1. TransformerEngine (TE) uses the model params_dtype inside RMSNorm whereas the Huggingface implementation uses fp32. See for details: https://github.com/NVIDIA/TransformerEngine/issues/1132 +2. Huggingface `transformers` implements the q, k and v projections in self-attention as separate GEMMs whereas Megatron core combines them into a single GEMM for efficiency. This leads to small numerical differences. + +# Using legacy model format + +In all the checkpoint conversion examples used in this document, the saver format `--saver core` is used, signifying that the newer (and recommended) Megatron GPT model class will be used. I.e.: + +- old class: `megatron.legacy.model.gpt_model.GPTModel` +- new class: `megatron.core.models.gpt.gpt_model.GPTModel` + +Using this new format is the recommended approach. However, if your use case requires using the older class (i.e., convert using `--saver legacy`), then when launching training or finetuning, the following args must be added: + +- `--use-legacy-models`: use the older model class +- `--ckpt-format torch`: use the `torch` checkpoint format, which is the only checkpoint format that is compatible with the legacy model format diff --git a/docs/source/api-guide/custom_fsdp.md b/docs/source/api-guide/custom_fsdp.md new file mode 100644 index 0000000..5784ae0 --- /dev/null +++ b/docs/source/api-guide/custom_fsdp.md @@ -0,0 +1,183 @@ +# MCore Custom Fully Sharded Data Parallel (FSDP) + +## How to use ? + +Add these flag to enable MCore custom FSDP. + +```bash +--use-custom-fsdp +--data-parallel-sharding-strategy optim_grads_params +--no-gradient-accumulation-fusion +--use-distributed-optimizer +``` + +## Key Features + +- **Sharding Strategy**: Efficiently shards optimizer states, gradients, and parameters to reduce memory consumption. +- **Communication and Computation Overlap**: Optimized to enable concurrent execution of communication and computation, enhancing overall efficiency. +- **Supports automatic mixed precision training**: Compatible with BF16 O1/O2/O3 recipes, as well as FP8 compute with FP32 parameters and FP8 parameter training, allowing for flexible precision configurations. +- **Tensor Parallelism (TP), Expert Parallelism (EP) and Context Parallelism (CP)**: Compatible with TP, EP and CP configurations, enabling efficient scaling of large language models. +- **Distributed Model Initialization with Meta Device**: Allows model initialization using meta device, followed by layer-by-layer initialization of distributed model weight buffers via the `Module.reset_parameters` API, facilitating the initialization of extremely large models. + +## Configuration Recommendations + +### 1. Disable `CUDA_MAX_CONNECTIONS` + +To ensure full parallelization of FSDP communication and computation, disable the CUDA_MAX_CONNECTIONS environment variable. This step avoids potential bubble in CUDA stream. (But it may slow down TP and CP to some extent.) + +```bash +unset CUDA_MAX_CONNECTIONS +``` + +### 2. Add `--calculate-per-token-loss` + +For gradients sharding mode optimization, include the `--calculate-per-token-loss` flag in your training script. This improves performance by reducing the frequency of gradient scaling, which is also a sizable drain on SM resources. + +## Design of Custom FSDP + +### 1. Overview + +The custom Fully Sharded Data Parallelism (FSDP) implementation in Megatron-Core is specifically designed to optimize memory consumption and performance for large language models. The core design principles include: + + - **Optimized for Large Language Models**: This custom FSDP implementation is tailored to efficiently scale with models containing billions of parameters, ensuring seamless execution and training of massive models. + - **Efficient Memory Consumption**: By strategically sharding optimizer states, gradients, and model parameters, the custom FSDP significantly reduces memory usage. This approach enables the training of models that would otherwise be too large to fit in memory. + - **Efficient Workflow & Overlapping Communication and Computation**: The implementation is engineered to minimize the number of communication steps required during training. It maximizes the overlap between communication and computation, thereby enhancing overall training efficiency and reducing latency. + - **Support for MCore's Efficient Training Methods**: The custom FSDP seamlessly integrates with Megatron-Core's advanced parallelism techniques, including tensor parallelism, expert parallelism and context parallelism. Additionally, it supports automatic mixed precision training, further optimizing training performance and efficiency. + +The design of Custom FSDP draws inspiration from PyTorch FSDP [Zhao, Yanli, et al.](https://arxiv.org/pdf/2304.11277) and MCore's distributed optimizer. The introduction to PyTorch FSDP is referenced here to clarify the underlying concepts of the custom FSDP design. + +> In DistributedDataParallel, (DDP) training, each process/ worker owns a replica of the model and processes a batch of data, finally it uses all-reduce to sum up gradients over different workers. In DDP the model weights and optimizer states are replicated across all workers. FSDP is a type of data parallelism that shards model parameters, optimizer states and gradients across DDP ranks. + +> When training with FSDP, the GPU memory footprint is smaller than when training with DDP across all workers. This makes the training of some very large models feasible by allowing larger models or batch sizes to fit on device. This comes with the cost of increased communication volume. The communication overhead is reduced by internal optimizations like overlapping communication and computation. + +![FSDP workflow](../images/custom_fsdp/FSDP_workflow.png) + +*Notice that the unit processed in workflow here is the “FSDP instance 1: N layers”, where an FSDP instance is the smallest FSDP processing unit (also a PyTorch module), which means that we can safely release this module weights after using it (executing the forward or backward of this module), and there will be no other computations computations relying on these weights. This capability is the foundation of FSDP's layer-by-layer execution and memory-saving strategy. An FSDP instance is also referred to as an **FSDP Unit**.* + +*It is worth noting that an FSDP instance can correspond to multiple FSDP parameter groups. These groups are separated by Data Parallel (DP) communication groups and the data type of the parameter or gradient. Consequently, an FSDP instance may require several parameter-gather tasks before execution (forward or backward). Each **FSDP parameter group** corresponds to one **Data Parallel Buffer** in custom FSDP.* + +At a high level FSDP works as follow: + +In constructor + - Shard model parameters and each rank only keeps its own shard + +In forward path + - Run all_gather to collect all shards from all ranks to recover the full parameter in this FSDP unit + - Run forward computation + - Discard parameter shards it has just collected + +In backward path + - Run all_gather to collect all shards from all ranks to recover the full parameter in this FSDP unit + - Run backward computation + - Run reduce_scatter to sync gradients + - Discard parameters. + +One way to view FSDP’s sharding is to decompose the DDP gradient all-reduce into reduce-scatter and all-gather. Specifically, during the backward pass, FSDP reduces and scatters gradients, ensuring that each rank possesses a shard of the gradients. Then it updates the corresponding shard of the parameters in the optimizer step. Finally, in the subsequent forward pass, it performs an all-gather operation to collect and combine the updated parameter shards. + +![FSDP Allreduce](../images/custom_fsdp/FSDP_Allreduce.png) + +### 2. Custom FSDP underlying data structure + +To implement the FSDP functionality described above, the custom FSDP is designed with the following Python classes and data structure: + +![MCore Custom FSDP Class Diagram](../images/custom_fsdp/MCore_Custom_FSDP_Class_Diagram.png) + +### 3. The custom FSDP interface: FullyShardedDataParallel + +The custom FSDP provides the same programming interface as PyTorch's DistributedDataParallel (DDP) as FullyShardedDataParallel (FSDP). For example, you can apply FSDP to models as follows: + +```python +# Initialize model and optimizer +ddp_config.use_custom_fsdp = True +ddp_config.data_parallel_sharding_strategy = "optim_grads_params" +model = GPTModel(transformer_config) +model = FullyShardedDataParallel( + transformer_config, + model, + ddp_config, + fsdp_unit_modules = [TransformerLayer, LanguageModelEmbedding], +) +optimizer = torch.optim.AdamW(model.parameters(), lr=lr) +optimizer = DistributedOptimizer(optimizer, [model], [model.param_and_grad_buffer]) + +# Training loop +def train_step(inputs, labels): + optimizer.zero_grad() + for mbs_input, mbs_label in zip(inputs, labels): + outputs = model(mbs_input) + loss = loss_fn(outputs, mbs_label) + loss.backward() + optimizer.step() + +# Save and load model and optimizer state dict +def model_and_optimizer_state_dict(): + state_dict = { + "model": model.sharded_state_dict(), + "optimizer": optimizer.sharded_state_dict(), + } + return state_dict + +def load_model_and_optimizer_state_dict(state_dict): + model.load_state_dict(state_dict["model"]) + optimizer.load_state_dict(state_dict["optimizer"]) +``` + +**Key Notes:** + - You can configure which modules should be treated as FSDP units via the `fsdp_unit_modules` argument. This configuration is mandatory. + - The custom FSDP must be used with a distributed optimizer since it provides distributed checkpointing. + - The data-parallel communication group for parameters is not explicitly shown. Custom FSDP configures these groups as either DP (data-parallel) or EDP (expert data-parallel) based on parameter markings. + +#### 3.1 Initializing Models on the Meta Device + +For training particularly large models with FSDP, you can initialize the model on the meta device. Using PyTorch's `reset_parameters` API, you can initialize model weights layer by layer during the construction of the `ParamAndGradBuffer`. Most PyTorch native modules and TransformerEngine modules support this API (e.g., [PyTorch Linear](https://github.com/pytorch/pytorch/blob/v2.6.0/torch/nn/modules/linear.py#L114), [TE LayerNormLinear](https://github.com/NVIDIA/TransformerEngine/blob/release_v2.0/transformer_engine/pytorch/module/layernorm_linear.py#L1107)). + +```python +# Initialize model on meta device +with torch.device("meta"): + model = GPTModel(config) + +model = FullyShardedDataParallel( + transformer_config, + model, + ddp_config, + fsdp_unit_modules=[TransformerLayer, LanguageModelEmbedding], +) +``` + +**Important Considerations:** +1. *Custom Modules*: If your model contains custom modules, ensure they implement the `reset_parameters` API. Otherwise, you may need to force parameter initialization on a CUDA or CPU device. +2. *Tensor Initialization*: Be cautious of tensors created during model initialization without a specified device—they will default to the meta device. To avoid issues, explicitly specify the device for these tensors to ensure compatibility with this function. + +### 4. Interaction between Custom FSDP and Model Forward/Backward Propagation + +Custom FSDP implements Fully Sharded Data Parallelism (FSDP) through a series of module hooks, gradient hooks, or by adding functions between modules. This involves inserting communications and manipulating parameters and gradients during PyTorch's module forward or backward propagation. + +Module hooks summary: +- Module pre-forward hook(`module.register_forward_pre_hook`): This hook unshards model weights before the forward pass. In the case of an FSDP Unit Module, add a RegisterFSDPBackwardFunction function that will release the module's modes on backward propagation. +- Module post-forward hook(`module.register_forward_hook`): This hook is used to reshard model weights after the forward pass. +- Root module pre-backward hook(`root_module.register_full_backward_pre_hook`): This hook checks that all model parameters are resharded, in order to avoid unnecessary memory spikes. It also marks all modules as being in the `TrainingState.PRE_BACKWARD` state. +- Module pre-backward hook(`module.register_full_backward_pre_hook`): This hook is used to unshard the model weights before the backward pass. +- Gradient accumulation hook(`grad_acc.register_hook`): This hook is used to accumulate gradients and trigger the gradient reduction pipeline. + + +The gradient reduction pipeline maintains a map of gradients to FSDP parameter groups. If all gradients in an FSDP parameter group are ready, it launches a gradient reduction. Note that this assumes that the model's gradients are always generated in a certain order (reverse of `module.parameters()`), as otherwise, FSDP would maintain too many parameter group grad buffers, leading to excessive memory usage. + +#### 4.1 Optimized for Activation Recompute + +Using the activation recompute will cause the same module to execute the forward function first and then the backward function in the backward prop, which will cause model weights unshard twice and model weights reshard twice. If we can tell program that this is a forward + backward operation, we can just call unshard once and reshard once. + +To make this determination, we keep track of the model's state with training_state, `FORWARD`, `PRE_BACKWARD`, `POST_BACKWARD`, `IDLE`. It's worth noting that pre-backward hook act before pre-forward hook, and we'll let pre-backward hook execute the model weight unshard, and then mark the model as `PRE_BACKWARD`, and when pre-forward hook sees this marking it will not perform the unshard operation. Similarly, for model weight reshard duplicate, post-forward hook act before post-backward function, and checking for the `PRE_BACKWARD` flag in the post-forward hook will cancel the unshard. + +### 5. Memory Mechanisms and Features of Custom FSDP + +FSDP can fully distribute the model parameters, gradients, and optimizer states, and for mixed-precision training, it can also fully distribute the high-precision main weights. This is pretty much distributes all the memory except for the activation memory, but FSDP will also face some memory issues. + +FSDP frequently unshards and reshards model weights, which can lead to busy memory allocation and deallocation. This results in untimely tensor releases, causing memory spikes (or even out-of-memory errors), crashes of the PyTorch memory allocator cache, and a large number of `cudaMalloc` and `cudaFree` calls. These issues can significantly slow down the system. + +The problem of untimely tensor release can generally be addressed using the `tensor._typed_storage(). _resize_(0)` API, which immediately deallocates the storage's memory. Custom FSDP provides interfaces in `AllGatherPipeline` and `GradReducePipeline` to replace the temporary buffer memory allocator used for parameter gathering and gradient reduction with ` StorageResizeBasedBucketAllocator`. This replaces the tensor release operation with the `tensor._typed_storage(). _resize_(0)` API. + +The PyTorch memory allocator cache crash is a complex issue that occurs frequently when the actual memory usage approaches the GPU memory limit, leading to poor performance. This problem is challenging and can only be mitigated by avoiding frequent hits on the GPU memory limit. Using a self-managed memory allocator like ` RotaryBucketAllocator` is another potential solution. However, note that `RotaryBucketAllocator` is not yet mature. + +## References + +- [Getting Started with Fully Sharded Data Parallel (FSDP)](https://pytorch.org/tutorials/intermediate/FSDP_tutorial.html) diff --git a/docs/source/api-guide/index.rst b/docs/source/api-guide/index.rst index dac785a..5212fdc 100644 --- a/docs/source/api-guide/index.rst +++ b/docs/source/api-guide/index.rst @@ -1,20 +1,23 @@ -API Guide -========= - -.. toctree:: - :maxdepth: 4 - - models - tensor_parallel - context_parallel - pipeline_parallel - fusions - transformer - moe - dist_checkpointing - dist_optimizer - distributed - datasets - num_microbatches_calculator - optimizer_param_scheduler +API Guide +========= + +.. toctree:: + :maxdepth: 4 + + models + tensor_parallel + context_parallel + pipeline_parallel + custom_fsdp + fusions + transformer + moe + dist_checkpointing + dist_optimizer + distributed + datasets + multi_latent_attention + num_microbatches_calculator + optimizer_param_scheduler + optimizer_cpu_offload encoder_decoder_parallelism \ No newline at end of file diff --git a/docs/source/api-guide/multi_latent_attention.rst b/docs/source/api-guide/multi_latent_attention.rst new file mode 100644 index 0000000..c4d043a --- /dev/null +++ b/docs/source/api-guide/multi_latent_attention.rst @@ -0,0 +1,14 @@ +Multi-Latent Attention +====================== + +Multi-Latent Attention overview +------------------------------- + +Multi-Latent Attention ("MLA") is an innovative attention mechanism introduced by Deepseek team that enhances the efficiency of attention computation by leveraging multiple latent spaces. This approach is particularly beneficial for large language models (LLMs), as it reduces the computational burden associated with traditional attention mechanisms. According to Deepseek-V2 technical report, MLA achieves better performance compared to Multi-Head Attention (MHA) and requires smaller KV cache. + +Enabling Multi-Latent Attention +------------------------------- + +To enable MLA in Megatron-LM, set the following flags in command line: +- `--multi-latent-attention` to enable MLA in MLP. +- Set `MLATransformerConfig` to configure MLA. diff --git a/docs/source/api-guide/optimizer_cpu_offload.rst b/docs/source/api-guide/optimizer_cpu_offload.rst new file mode 100644 index 0000000..7d80994 --- /dev/null +++ b/docs/source/api-guide/optimizer_cpu_offload.rst @@ -0,0 +1,4 @@ +Optimizer CPU offload package +============================== + +.. mdinclude :: ../../../megatron/core/optimizer/cpu_offloading/README.md diff --git a/docs/source/images/custom_fsdp/FSDP_Allreduce.png b/docs/source/images/custom_fsdp/FSDP_Allreduce.png new file mode 100644 index 0000000000000000000000000000000000000000..66e2391ed0415b5919f53b3510d25d53d2a6b728 GIT binary patch literal 86481 zcmeFZ1y>x~y08m`5Foe(cbDLe1$TE(f;ZZ@LvVL@hu|9A-8~T8-CaU}3JUfD3JRtH9tQYC(e{=Ec!73M zkraWdoFF^|{tz?Kk_OAmL(v28;h~^|&7ojlb^#vvzyk{ERSq=NE8rFS<+B`^e|-yk zk@M!R&#|u$$aCZ*8e=%?Dgi2%gq{=7#g-mZ)HvYdu1a< zImY4jzCPUeVrh=Xhbu2!}%Uhb69puTviV6LFdy*hswLz5CLq~)D`(_J2l=EZ%zm5>=N`^I_ z`gK(!ZT|No(4g_^ej$bU!2fOg8RvtfqbC5OV}c)cTm>^pdIHXWjUOXiHu+x{ z5#qy)FqCtg(%jr!UPk7x zyXpBN3em(#YS{l4LthAd+jDnOI>^R+VSu?uUR->46Z3uv@|2QsQV!pJh4HfI`iq7v zOVafJTSIvHeRtO@&gPhGlsX0QQt?>zJ+B_xu*>Go+P3f5%#4&&VgTJBc@g4m)%c13 z5k53hr3HTP#!QL*-Mv0iHkY+H++4)J2Knv1Ek$Chc zKN+H$)k#Uni2a9FJXv^?Un%p~cCvujlS?Cb-mmXDAkvMxh`rjCN)#OV9H%utdlEXD^8} zoWJwsR)1B;FZ!>04CZTXZVPqKkM|_}?lN2lRdw)6s4#u^iHP-jgi~&5TysC}cs$FedP?qm5{H943ZU$+fnZfebDqyy z&kxwkDk{ESv<1^-dtT8MWqdCs?RdUhe)rimcbL(6$-GGL`B1Sa%bi|_laBE(Z88&p zE-$$~o*z^Q_G)~~+;(x_`R93~+uul*o`VcV@xvvo{^;mvdyPixeCv6~W3b@UX7-;a z8h(_usRlbEO`}ion&)fnZ;p7|Nu6CY@ zSoDP?>2n$x-Mia4c5tBK3u1(2lfQp*SqKztfXG4+Za`9zC92* zs{l;i{`I8bBrW-N@MqTMcnT#wy~zxVjxTaI=U>h}9#3jf-|B)OOB@b+W7@Ug=~1PR zAD{9`o)3V@{`I*2C+p*T=|OsW`X!U(Pa(g(|J-{wu6udZIUb-4oUOX;XYP-sV#-VW zCZ95ZS7Jz;&#kVG;jzBio635+&i=DvfOxTT1SVGg((xA9T`iehQ9B zOwwe0au!!`)`%BBo^Lg+%6g3_pv^V0M+Y80;GF;1YsfM7K_i~kNxT-4?m2~l<{KqU z8i=$sNbC^-{l-dmF6x`mTmL!7=I3+oKbDenH+RZ{SC8LD-y2oVS2V8C`!^7g1b+iU ziE3Ef=Kd|idR$o=EmrY(>+|;ehFp6}NgrP#%-E^|Tg#*z_6csIl_on!>nz(I1PCaY zw)q=vQWZkm?=#8W#Kg-Rm1!;dgS86j?b_-bkLjc>7qHhHWl~t!$n)JoYIfU4k`o%^ zVAE71@81vm>}7#qN_M&w-!VvhU?O0Kh2D_H(0hPXNLbMjl?K4kyhp`EE-TXukvD-{ z&0*!p#JyjWKg(cCOIV>{Q1F49%Re^9)vVyMkqMuCAKN)Xo78AA8HJsFtBs9A!HlI_ zZ-I?@VK_u91uK?AgfKNfpu(Cjbg~p$8pPnGUNY$Vyq#=QNck!&g8fkzx_eif^_OsMgLcV@P?ne z%igC;^@@Ve9NEy$md8KJ=Cw^Tf3$nJSnf8=oBB6c<}qWHs$8mIHpETTDPvkF)6Byc z5F05=FklN0hY4}Olo(F2xkP;xK$^6lLwSzne&6xaFk$bu_3^AV-+UeXySR^s*H}^< zb8SN(f}PH86t=&+TeD5ui4Ha(s7N)#|MoL1Q>4V<5il^driugVm2FoeX5;clCK?xK zoqv9pFNLLS5ZJT{A3(n$?4mrih0b3qR?a_Y6XVy>U$*IdM&?@42fac{HcdCnf>|!U zJD4^aiXp=a_RT3#Eeetyk%UcC)ZgB#I0ji;Z)Y#HwCq`Ck<+ELuar=YeHJFuQRb-* z@o&s34GQNC9v;RuA~U>8(g|+~$B%fIJ)dpf7*8$lez#~^Ar!KGJFmx;IQfRe6Ai=I z+T4&F4Uz$Si@R^E=uz8`{v1?}Y*b>=S8##Ej1oDI+aH>wfaHC|FJi7Cv23{1hrH;} zovxp!{@(SW!>FABV#nR}1Q?RvE6on==34_@K|3{r#FkIlZhLsvr~LsAPu`c|dbrtN zWs~h60r#Z^O~|$8njdQEecg24`Ns?H;lX*sPSv@v$-$8jT$)&ea?NSn(6i;^6R*G< zT7}^3v3I;YGBo`S7O{2zykX|xi!;&kj^cR_ab1StzJSHgjdth7n&ASpq4D9UOf{K6 zw$_w)a}5L~ien;BzkaR(-VCIX7-&D6de9XvcO0cn+?H5Jpxx=*<{_Tp^z(9%iYv*$sMKUdOg9lm}0Rz5zEOvOy>kzIYw zy<9cOeq5+t&pAgu2tQ4J0~X_Vy6Ar6p{wR-p+ZzWG*Z5Gfk?o`gndM@>@+C1r??W1 zGn{VIf%iTdRh^RaUA;=5(h*jg&+J}0mkkPfj(H$Y*z?J)sULZ(Bb($Upv#DO20w3R z6l0m0nCh>LM=G;C89rPMQK7k&TQA1M?sjI1tXSL=Gr`nH?(SG#i#urX z_uJ?cKY<2{@|+w}G5#a<8FsU0xB67+$E#6hRihLIUV;Gs(w}+Z@2?sfi*>bBNA7a} zJWP7G#mzw&vM;g(C`4ELAGnZq7is4%0cl9?VD|`r#kyqcn9a)dW^|4qAYC3uY4TXw zj@bLjF05B%>tf55YTHGXC!ggC@Es4YP(O4vsyx}n28hq zT1uO9e7+lSA9pWa2kvrc3>|79SefoUfrS8^qlIhhtCZ;Nc9X)o#KG?E0p8u~ja$m` zbnBMT71Oa4^t{ps{JiAU98ARztV{Q?T%66~IrRt^DoXJ&Z{IF0b|MGk;pZAn)G-;! z)tlL^YTXO_PfhseI|VC`i#von(3X@8v9=ZF$%dn&oWB)+jNy&sMe{xR9%j3Y6{lR)hwB(!Y_XF*R_aAntklikV2@=kB>SuOD9+L4QF7ghTZ4=w z{9tEFc_Di3qjyj3p)*u zw!vWB>doQIVNL$7u>_@5w&C|@MFMn^hBpP;LH-g*k~pzLie6WkM*>XWWw(TBWyE0e zB~eDl#%>y>DZ=W5OxM@=C6VwFw#$@5LI}-j+JOKIR{1sS+g=9IuQHAJ0vQG4$78e^ ztl|r_f%M5?;c%FuervQ^8Utxx*<*PqJWlK1J-f_8c|R_%YdIU>hkt(9Yf2?Ii+~J7 z;-!(=jr25;T)S%H34aks#V=Eb;4pXmB`u2Nm06n2Q@+E}*unYs5)^s{PESZ+2IZy@b-lxRC9r0^i{sls*(Ez-r{k&A zUiB&*#(DjViw@rj)Z`THs~|`YMiuI9X$=^Ud|>>Dr|{OGiVfATkueWNt(l{e4Wipel#3$o*#}Tv7wxuXXa^H+5GF_I$hu$hUOZf$Y*HTThq{Gur z%?~Q_iy~x#ERt&SfZ%qdO#OSLko$4*JuY-1{EkTHXV)K_<5Je>W^OQ^E6dMp4%k0$ zRBv4qH~D^GrGO*n*P9+hIFY$^Kx;}Jx1r(|DUi>9Laf5^a<-3kzA5CEROZ91p;+FflAj^R(f$tz|`P2}qd)oXY6Rc9tt|25BrHsQN;_|dsz8xJ{G=dqX@ zxZzcN*vi(^j>l|QyY4NUNx;Y*W423Txst+WaYppG6ZiW|5en7Oz; z+r=llb=NG5`i(KcINQ92ukIUTkgy?oqYe{{xW2p~n};5vZdFR*HFJw|1>crB!UR(x z-*wn&z8?NAn))5{D?puY-4n@%7L++m4$d#(HmU0#Zeueg2M^yxXt^^}2VW#krubw! zi#xA1U?DSGAaWEr@QZO-r$T|LY1l)Tg3?YULVCE1ZtGBF`mR7C&1p7MY*h=zlH=kd zpm)bPk!Gvh^cVH`|VsNxvi-Gb6tWFZtc{!_@`MB#b)rwZuO^$&Ti{H^4 z&P2HR-aH;rh$Zjrg(>345W@hK)ztY^bSQ#;=gH|z5{y9IJ8l+gDs#|!PQ9!JUMu|M z+X@kSPgO!k6lRoZc{}*}egs8!Ws;;~)`1=oL(zrD^l9QCNv(18IBI1df69t6*V2!e znKofDj$^?6Ig?f4&=nQY9m(M&LPn}|@0_*z3~qA)^4hsK!4Ey+3<}NHzeZPOfbu2? zA!+`NYSJWSZ!TF+8(rc2*w}A)gk5Ba*Q@X8g+x9E=p}$+K{6vs$>5k^+Wh72--e1~ zvHK_MQ1vNM(t7zPDRf;3a63p))6KYaN4mFffM15lu?gM$Xo5IHXxu&RI=V{1ZU>c1h`UzoMSyLzfKGN*WD-!%bi$Jm$hZg=Rr03np)+9VcDvdSFpAxbdv&B+@ zVoeshpj-PK*B*JGKSwy@d@y57Pb4dmnl*l(kSaHXB;k&U8(@%I;(`!VVlN$8GeceW zF`dq+bMDls`a^(RD)i7tu{-MvIL`I)9idu91Q5(}WHS=dOoRRH~iu|$#%SSxdGxiTcrmL_}_wGVZ* zJmSP+fsEVYX-H0G)eZ-}9RhOVoJfS~vA9h(>I3)dubtVKqMl|};^k>6%M!Hp0gL!h zToP$KW@I`)#7>hxhL4n`*u}>#c0{$cC0aBUT-NmGI)`Zy8kv$?S<*0i29@Vi4ZTCY zj?WKte?IK*t=sO2?_aZ8lN=FvzAGRq0DGa$j?czos^8RQIcRC*aTZI#6aEQh5nSt^gasqyUsKa+q;SA!IOqOhh1y2dE9& zTtgYJM%TKTs`;e2d_j>?=VOrfp)2`YjXM0)i@@>$LFY!STLdtBA{4(}$`mzXYXU*= z;?+2i6B2d2!KO=bIxjw5)vDx)6tX0vDb3mu&-n^pS1UZ)Sf9Gn0Ve)DEWUZ9&F+t;}RD*@2@9bHQCl01Zn3@r^8f-xk zequSF;H#}hxi*R5ebc?keQPnk=43ahHi=o z&I0wUo$FnHD?`Vn5e@x{@9PXQTg;jO6&U7sTkm*q-IZ@F(Dk91&{7y5y=$yfOYsgj z8C}#XzeSYpK1&~*lX1?3gdjC)^?*&4_5RpjYGGe!BY7|YzzBg_yxSJcR z+8km*%Qz$q@RK3}nE_qplO1V|2g~JSatc}2+vLfbYLSGXgB#2!MkzP{B(LX~QUndh zKgtx-gD#J!jkPXcQ?J2zOD@^DJqwZ=DWBv*4l2^DcoBHEsxr&n?NIo%9)5=Ro_)E4 z3cXO_a41e)dr5>+dwHZ5J#HF*uusq@2T{E0kLbH1>OLDx8Tnx5Aw(pr^V17SvqRjo zycCH)wZCpU&IHlDjug0!d>AZ{9&YR|HKS9Zx-UUSG(EcJrr!c&bw;PGYbH`mKQO(I z$NH*&PhGpn{fcI6>8t<}uF($}0Yx}g3#=&xh9%L)-a1gP%qHHf-4GxCYRPewrTK3B^sl02Tz-YIfBaCPFLt83MYN?X1 z&{?S%4chvUDcm&-L7hIl|(GhO zBcnUk*4h2a=^ontIy14{~0mV%umAwvQr9C~fd4&1nnum+dz z&qht-d+){Eu7+tS!tB8MQtu_kPU1wEja-k>EJ0feOd9+Sl@fg?U(9bezTaY+`=!F3Z+< z#1?MP6bp8}%Wg`EeJJ)COJqtys`LcM(8aTrmgGMFelG!088PY41-hs;rDKp$^|bK< zeS|ekK*=dZLoI~YzK+@VYNT|xY_5hke>nTQy=nT&!cygWliT44r#y9_kURu(Qn19I zU}6UT!ApTWD}t^Y?aIbzKs{B%BUR=#n`#V=Neun0x@mi{Z+mF#t%`6Vk-nYZ4s!$d z?IB}+@Hpz(&x2~)aoIv6vWq)Zn(+sGU6HWtWa=V5<=xhs&$U2_2Eld~9$Yor5Fj;~ zk=inl5ij`rGJ37t{3a#RucQ-~3d|Dk(BYNi0!>e*j&CUuWoa&Q<2L2+ z$=yYzef=F42=dl5I=f7?C#C+VPUq8m^7aND-T;0_VS21pvR}>8#?NG zySjdEB4hx$?3!M&5T`C_A;$~9XIsc zgQl9sxc0^PN%LEyKW*&C4vasR3N;KTvuZcudEOe*n1~#j9^&*%?dFFlE>GL zh6p^f@N)3U0jiNgtv@u{DhGJKlJyTm*EBCQcN8LqGp%gp)_pqIr_fF+t3mLUCmE-M zfxKNJah$LV^RIceXn&pQvSxtkaED?Ln&8rx*vybIBI->B;l!7Va83CV9h<9~-Akjz z{I0_W8n0%=PlIAgi*FDXyH=Xc;rjyVZ=g~Z!p%2PJV3fBB5c+d!)G`#$gzf@@g)QTc7(;TXKM)#*gQNPAV;rlH4+o|F6M%ak0 z(D}scL|#TT%NBw&l6YfG@YU?bM(brq%dNRtPFd3J+JG7dHS?Ix`z61H7R}FYRJ&J6 zLpZ_c;&I5#D0<{%{g>c(o#`^m8L@9wI5yRYe^Id#r4squNj*;rueb_a_F)!CpBoQ- z4oX-mXAmZ_Nt9)z*A>yOvnpHRzh9Z5l_2*t(O58amyT@Xp-ftwFbfYH zV_~#@Z^Igigi6$#*ub{O$|*>3rxCO$6tkc7rkfhQP!#qxEk!12gr!{ub@bZE==2>d z^%=iG^rr!Z1@mBPZ(vb&9zwf?iKRRI;~3<8oW|uf<}6R`dg0X?O;Y>C+4*R~nDw0R zHahhTy<&qSK&G!J5TC;ob-tcp z1Bxd@I(wgMH`9M|Q5*zOFN*&8c+%BZeTAb7z236i`x3HD?x5e+RT?Wni%gIbs`Hs_0 zMJx!Ng3iNS)DXjZm7d^d-SR?QH|6#eRrMY{ZBNmh!NYFFs}kcezt0v(-&j0QYD|2%!t9nQY=`|=MF)s08pYu*ww>aJet>o={d#%~-hmqgEU8zm+m=l@#r;bNHBz*fK9e>CY zI9_MLLRW$v`5|hLi?p|Ppdmub>Eh_i!*254CnJr_$Pj$6MFfK3N{>dfR;ip(AIYvc z#}e&;@V(y8w-dBA2vpi~W|+zIA1L29I_v03wF@hWL<)IeQQ^3{zP~~TYGYFZ6m^bI zGq1*{NX`k$OnW=i$6HUIA<@WIvDU6OBbQB!cf*bafy|mlM!dBiStYC`)Mluxqk+HK zpAL=QG83tmr+OQrGUO{p;UFCvjf+s^2~bXamj75dE*Yp`6v9K}Sh?=~=b8FA1br}? zBpQE;U0#m;V+6u@4zdx2V{dR@9$z+fG7oX+N4W~g-UQ)S?q_(#2(j}OfGOd_$Z=;q zF8{fdW4@J%DpGGd|Haoc zS?dpl&76xaJ5p{$FTuWw0_~WiJ%EJ`<_tF7l=uDJcU;xECYSRyJf0dqS8M6!6~??J zWxCdMn7_MLZO!%79>KD_B(bV;wXC^|#7ad10^iu2L5>r!1h^?ZeUD!=VxpgFpasap zDE6>)JawvA@%NIo%>lX8@b@jWA54yW6A0&us_O$zKkRDPHq0SzZ1{KMHScG-WI;%_ z&F{Z~;ylsi(m|1jD}@LXh@_4i>3;UBc7sF_r(6zvW>o_A;X<p6C#jAk=x+mN0F6YAIFOf!vv6KyV__l@P+Mxe;pB9btR zm^sqNI`;UemLZuS?qZ{tD*NMW)KH9ng=?kgQu%lZ7Q7E?B;Xa+!|+sd$8-aUpye8} z-8>a=18xO5K4Cnf{VFVks=bidxNy%(x3@zp1}3Tw!0h(V7-a5rxxIEO8$ca{cD7fl}uG{Kx7^U-lR3byjk{R8hdNT|&!$JnIxgS|BDxum{u8rO^rRv;n)DPo zyuDmbscZ@&htLim`Hqd@MA^u$ohCXB4|D)i`Mi@@1@Gt#aA$mvG(x-Y0A2tA_(Rvv z@VKx$4q3vG)eNs^ccp4>T&^TsXrE$AKsAbw#57UsKam?F4E-zkGVG?F@IjfMx;E|D z-V^R7FtSZPFGagFY0}!%OYBeoaexr8)DOLfTYI!dI5)1t)L_vV-m}1(#WS!_hcrpv zd#)fW{ra8n97PWL(mz3-e=|0`+|XlcAZX6ut(WDwldnAz2UaLcMT#U|wZnwFg157( zi`|v}{z{i(IhpWg0S%d547vaXVS2n;`XHOMzXxt)jTqOJwKqD;`VQn-d@qCqKnY^% zzbzh)r*XLJ`}6x=aKLC0Cu%eyBy$J1cK6B&`fD3}0UJQzl{`5Xq{BNJ!-&G&t*g~J z4PU84^rRG2aoMW%Gz#gv8ceGQUnr^^U_zo@=0@yeZu7*$xyeJ;PY5T>n2MZJ{oQ!M zHR|-}QqV9KlV7_;V-U%w=nziYj~9Huc+AhvHOL-5-k%aZhJ-s$%lwmu`k4kb6Fu0J zTqR4kE8eB|!b~2!N$a8?aKm1;IuY@?iw)96%aH#EYc#b7?9{+xZ$kS&A2Lbiw0*O9 zD0N!YuEBRYTHl_^PhnO9OaUo9uS9o(z`0mq=xrI2C1xML?JX^v@M$XhwFv9o*J zGY(13U*gp8(5bg#;A1V1?q~`x&tN%GA|37Akjen_F{vCv# zYJkPZ$CnENxKK!Xtv#l1F`%Xr;`vbj)Y1?Mp^6qW;~i#T1?>`EX`4x>&TJq8Z#0=% zuiipkShzceO!N+53xOqdmLQYIIbEB?&3>+%}vO3kz)2|4UM^4 z)5%awnjpzWf$(?CpR4tu8zqU|jIFIJZNNVpODFsL=cv9bsl}Y;5d!JU33DqoWfB`KYRTeFWfez#^=g-V0f# zDP)Z<72*R5lmRm3e;7JmI%plIU_yKNapn=lvMArc+DU=O;`}IlfB-%HFjMLQ)R&B= za@FUJmPh3p*(+N3?-u;0gK(#@DPOENL~a8h3HJd$BaD+zzYD5-4&V#V02-#R+wAn4 z$!IZZDoBntkt}Q4F@JBR<+T5U@zIzdz*htY2HxJ@a<2hcirMroGmQKTl}bsD@DHRO zaQPNLobD8TMwnqTqr2wyyubWV*Hk`J&X*X^;JGfZofsLj%2<+?B<6Q7k2C4Q$xo1p zvzzB8+uhlTAm;y-W?7la>qO80MXdAbYIHk-ffXH|hSVRF5&GiuU$&fBA04Mi#vOqF zJ>Y=#dKlHyP*-P--N_9m#%DDY>$9(2z)rs-0cbl(k}nC_*(A!;LHodub$O|`331AP zW~GdWH`HLyAGDk+FDWbQ9ZwV^58HbCg6ChR6cqd=RYI-kN@bO+Qw)F`@^vQfIx%Ea zXEO~w0iZz=(eN6hK^1BDH#SD_!Ea6h#Lm^wjSYe40|yUDA%~GszSex@CjiOpvmSm~ z)q2&_-`_t>Rjfs)AOr^q6C^G92kkqR1v^`6srYL|7r3TO_zysc0hYQ3Kp7hv?6@%; z%exJK)E6LgJj#7xxZVBM@I8m6<6?2DXply^R;+PLSW*w5gM}Bpu@?V;fKTNCJ-G9N z8PNkyNCfYB`$LmjGon{x+-o9(r(hE5Jc{pZfgBG&P`%UmJwT=~&j9nN0}#1|^64P! zm7h=7lMa=rZqERwXdU}8K}rVQ?BjB#iiLFQ9Cg+D7iFrn^>$7-n(vGoC4iBh-u=(>z4!0y>~xJ)085(&<0i%edv?N4d5Lb8 zmzS@#djy*)5L^L8cZAEGhMb&@K1@ZV`{+$km6SC2leX)LlpI#dZPYY@FT6bf*}L@T zdHqkS7Si`TT)yEy`+|R(e;t)=E6T)qkpWE%%!;M`H^zFmz!H$j*-G;!-M6Y=Ks5w& zy8Np%H$X!76(XzB|n+iVS@y%;1622%1s`pP(!jsb%3dF--WF=!{dEUFW=0@9jR+- z>e&xdp? z%lZb}@ZeKeo}^MP4qsiqu2lA93m@cJonJH}rDra^nwG3_aFK`Db3_1c5@dsRQ zdSsm-GMGAC^GCO;g*{qo)^TwURxWi`PZxIO{-%k6h5z}iE5)<2RzRTifY#yQ#a-&U#FgUgM zoILa~LFNU%{L=EC>tIby)eb&+*Tu%msO!v=1=v*4`V2uVJ4svuO_XemsRW~Jfu_D$ z@SE!w+P@EvMIXx2j8%_fV8594?z(`SrO;d~6*r4()e-X+8|DPY>AN&de~jGw5Ok!L zIjxD9T>ip@UFT}?>^cJW6&+o)*V?lZNhnk`=8z{l!X=&$iJ`Zbi(XgY2E|w z54XVe7<*OcZ{Avaxw5FBRY03OfC@)$^f<7%0mSkb82UQvFO3UJpBR%N;2(n@l3(DM zbl3PZbP5Sl(Zqf_KC5^5@?HSJ4~tA9@WEJxBMS*X-OdQqo6c%9@O;0nNZ>SAuALAr z!+B?wX0|viXo6rC05Tzwm!%(AIGL-^m5vqtn(Bg1aj2@mL&am6#GxH)S?IYSFKlFt z<&Cz-sxy5bL>GRwV}I7X59;h>@p?^32ZS`K2vfgU=fHjPN*LruI!+uXC!BU$V$QSS z;PJR7Ko%*g7Akn1xwM*W$=+yQeCj%4+XJO*e2U<-VZC~3Y7sIe?ed}pf*49Flfw6- zGfPcF98YplFAQ3q$@sy020D0tprJz4xJ@;DaVy0=df5M^D388vy97iF!9&M$=Pzl| zGs3`EL5I-T{Kqi)vF)%JHW4}eMTbfL#qXKhfz?Ucb`tV^q}KrJ>H%==Un9~~fCd(z zYXq=Iy#hMeazIK(4LvvVZG$8>oVmjlz!^U?Ob_(N^;YXgoNsZhzGk2<8BW9F_sA)V7)E--8ihouPrt{^+=4xO15(pXls87I_yFW1#{t6-} zg}dsGxPNy8Pr8pUtsmB{rvpP!nU-qR5!h43TUHWj?tq!v7OG!b($R4`u@gB{^WoS& z5G_zW2ZF!CB4(YkD6g)?T2s})Zz}Ux8N0?7Kd3J8l(5hbGGHfwZk^xHkE*9isnsVv z#5ij zGB$mE8^@{(CsN#kHW)Ic~>u!$J5wKKtZP&fTyk6z=6F zl&L|IX(tgE?dM;j7g;k%h>0_WvG*EmySDeUy(iqPtQ@V3&3 z2mL@FmP2H@o7ik;tNIMfKS>lm`TZxEMh)ZKpPqQ+`VUEoU#mtP6Q>_d_}ft&I6wk> z7nU|L^N`1j@-_q$DdTRx*JHY12j`?&HV$FB-#36l0$7@9Fcfu{D`qSZgA2hf`9Seo z&Z?0msuS$U;>eK|!rf`~*zJu@!D?poU|{~$u!ROq0+ct8^Me!HO6BHW7A8U`eYt=X z4RVFxbl78vfT9o~_!=%avYLmQRY!~f-`a+~EZ~b$lU7+^Ch(7~Bxj@;9QD!=mho zT$Hhs*J1Y|Az-$a4ZpY5dD1)e^Pjg#ttPVd>6i!Tm+3lRi0&F2;@p$2v(+F(5|E#% zCO=Bc#uTZC*88Ja7=mvULc%{Q{-ar(p1w?@Vzt_0{1@jjsPa>%`D#$!EkQU42eMe_ zW(hPXG4)gF%F2Yxj>yQ2j*c*lFwn!n5EqXm*{eB z#6PDt-o5S|)C>r$p~*AUAe*%lqC_GAPwr9GMd_4CMJup#ve87Yycu$Zv@fGnOumbi z-yS8I$7m|1YLNmb0gAs zCV8mvb!RqKjn|s#U3tPvSaA(Zo(iJW;t~Ra=0Zs=VX&@CY8rh?i0&VJ<|3W9l%__n z17BfSlPl1F+@woE`iN&GaK-bl{{=v?G1AUglyxJ?EH6jwPr5x`;ik8Fve4MD*UOaHlgysHS zDb&^5L;+(}>L$W%+9Mu|_%?$49jj?oQGCp@m%UDAWv{t69HD|L)4^ebtSBrzGRM9j zG8>ljvWu`N|CEXoM)$gWZ_{eRr^hs6A0e-X;e$1b^MW8#p?o3osNk~1as1|-Wolxn zxV~6Wpe=@#-1sGO@zD?*1+RCeE`}pHZ?gPKGlF}IHJ5qg&_F9|(z}`!@(l`@b;dtY z=<|>x-l9T>GR`8e-a|XS#N3DTfLU#qca3q5M+&N+T5gLe44*Q^R_a#tX1{jh&!Mw; zNpPc`NvsTH6G$Rgw&cu2;)bB5N#!(H%dNoAzZuz0pzF_$EosYe55}i6d$%kTrqnoq zII9=6e5xeGs};{vvvTmX*q~6GsFQQ&Twqm><|4ro3jwfgzpMztXBc{(_D12x_@?dC zr|5)OOss_5<>ckAT||!&GYq5zqSPtaRk_us$aVCnlwpqkY}e&78vlmV)FQDcIS@Es zUx6A(>bDr*$SpoSJvB^^?TpezA)z?9pAN-RZ1e1=&Aemn4>id{YlPIPzbD_2gsF=^ z5sR>W-nMvj4Ons3G}0ZBUNL`TZHd7JI{a`dJ}AA+vLNH|1F>MQ0meU)m*M+@K2ihW zDpm%E9J%W7p(GLB7;T?i}Kc$Qs$VD`P13#LN5o4S^d}h zYVNtAt{9GeFg3-5pIlt2LcOK@qM{vMhwV`wEc~&?P#~X^hpZHb4X($NZk};Lhc-=T z=uP}c36W;;fo08-4+v`s%+h*GDXPAsW@pWGFsoTOf~;ntCaX|kNpDm%XHOYHbT+_N zpF)A=h_|Uvo(tDTv8(pQ7xokD_L>tA1}`Izk68T?bpRHKx{73^Um_77c-W4o#^)J1 z#$4pEDUZ154Q#%dV)Q(?g_}GXG307x{xn{B$s(TJCj?NLZ8}cwrL`3s?`eb3tBFvN zRwgt)+?j__ZyIQ>Zl6xfQR&s6e9}$5JXv)sgb%wC4%UQRTu=2DuiG2V8CA6yHc$0f zdaOadVy{|Y;~dlw>iDF)hgov6k0+Tm1BD3plhnvdPxllMpI|v}?F(ymy%-65x#qCh zKQM;ket*q>70qp%{px;=Q^Oh&@bnj`(5ld>Iw6Ta?&j=x>yt}HK5BMFW;?XXW4~)d zcyiiW9fg(XZa&%Nz&JVKLEBgMbfFQ^*crOc;tF= zk#lJQKTURa`OE`oKDTATaWxx)Wsh`%bX#LR0>ZNpd@>6kX$lM5Sty$|VzLb1x#Z=8 zyv1E};g(3j`E}52Q!RvZkR5m94kw^~lWiR>&UTZinhjvXQ=-A zZi*O4O&i`g?E;DNzxMv;n<*<8-r$Bs_?NcdzimPuXi}@3b%fLnUZTqSg2JGAwm*Kg zOPQU%)f*%(GfdmZuCJ|Ed4#Anq{z=u^rNy99m)Cr$dOy)`}RRBs28LsF-OOHF}J8U zN4V{ydcogu8DhQ-xAPOXUjg;^e}s(xdiKVY`sDB2U+VNWiO~V12af@EU~1i?+Bgl; z_HM9%AHuH^D&F4LNr)OheAw5iw1%~`gQI$dQ|w@ArOZW&yI3!i8AAPeL$S*Y*%%GU z(X+xwnpCU@ffIpe&Sv9%Od^7vHy?RqhjpAp6Kj;m`CdaswZl8P`EAfJ&#;c&V84l) zGa>bqE6AO|8}w*BOB>S>X^v_Bp>p&9ZHzOf@Ue*1r12v)dTnykwllju1`;Bl_bOjm z3jqe2ZJ$%<|DHHU7?_gQAm<-T$hu*&9|=JEgI9w(jZqA)roUT+_mU%xPjrZlgjSxn z16q>pY`3k<%>t}^j-BYg5h zA19qsZsc9_mt!5zB08I|U2%oEW{2>EPdH8*&n_b1ao*ur3OD|Tyk)wcH5LOw2uTQ98+$70 zp-yvUq42xKHyPcGga(5Q8#YGGVffnHjtV#S0e*joOiABrX8V*&Gp3|r+kJgS4n64; zk)CD8cbJ0oVaqxuJ=TuXB8)*xf+k72x5@92W5D660Rfh9ZV*B4I4&_UF*cVIexgV^ z`9FnD55AyMc=;P_B#~)3y9&w*Z`44mP_fa`YbrnQ$hJ#o(NEzsQgE0K`EO3UArv5L z+-eH`X@6;(M=o2_->@pqHG$@=dSAf-Z|MFVeN46kPxIw+ z3!!ORdeQOfMpH#dI^%DJ7_PLks7n+nK8#8&(y-w!%yIc1?3(5`XDhhW_N18B)wB=0 zzu&ImGT%2^=E6M_b^A0O+Z+Br&fYpKs&D%rMBa0Z~vI zX^`&jjzJ`(hlUx3A*4&X;XC-g_rBlvexBbyzvtP{d4O}+d+mMJUh&#%t%H%1iWU*7 z@SMeI{854Vjwy-v&%glRJ&^|WN||+?|5^yF^N&(nYQm0sfe#*bKEdNmbGKIIe2*>w z^?x8bJR26(ZuR^FsQp#|(v5!&^b_wjhK#t&0?eX*Jl|t70|7{;;>uL>dDN-;RQu%l zyfLTnZt-j^;+@4+IhcvoGy4aah$G0ZsVv@4IllezoI<5jb@0iuW68%B=@IT*(WHKO zq?K41uaI6Ri-kpSH7XInVqIc!U1SUH?&elAF~Ki3I5GB@T=`3uEHa~}_hg++@E&}r z_%4C!DFk5_DXdAYNT+_GF*dPMGyM{Sz4+i}ITDhO(*ru$O*t4Wta*N; zkSmWnXWD9dz~1k6_bc3cTM!T>sWhN;c*5iNJMci%F*7E_ZH-&6+72M2p{e6_wIqvy zw_yNjHyE%>76}_R`aN8xzFrU|F+7%sObW+dk&mVp5W0QUVSK(DnmO{tL?VIl>X>8R zX*3=4c$WtJb*JzM+DelL<=)5uUyX=r9`%`Ar5AaUw~a2omKzzN*q336Xt0hAkYRf0h6P_9C|Jzo129nUY&da z*-5$B&e`t|Q9?KoXFdDlmPX#2j-tCUG13M)#s`Ia!?&V=f@DvNT?A0D+595<^P%)F5V*ltdLtr5a;>}Cm2RMeZPi=Y|5 z5l=z&bvO0*R@C#B=}~C1IhuA%s7GgE?Y*P8GkpZ|WDofAoDWHb?J40pJ zH<*|3f*VIT)O(OzcV?1lfLoLuhj(^gV7HN72lT?1t3j`j^EP7cOLUzepYu3o>$jc8 ztMgaQ`SI6KJh@tlyAq}Y0?>7Sj5L)byW*eYp(~R2Xds(Rv;v2EaIY!l-1Iv$LlFfWJ6FbwdRgt_nx=CC250SX{kpPuuB>#^)aHD-71{Qd04u=D%uLb; zS_$c~#)!^|^`L^!N*!+)*m-NMqiH+d@TYz?PzuXM=ikgsPoHaS1bYJVWE=X;XWpRk~sp`*M!oSH2TsT9l4%CJq(Q<)vin5C+Ry`#Z-q`P?i!YuagOu6W214<-BwcM#3!ige0(*WSbB7Yr(^5Z$nUuD) zK)>JkWVla$&y{ET%|gz7wHL%}qBuAdC7C-J?@&~^jbOi<{c)=PGt1}}4rO%;sB)hQ zx#Xg5Us_==SN_o{fv!N;)`+W5Cn;CV^|zTs^Fhjhs!ocXg@sz&?ya#dhz<|z+B5Je zoGelIEz>2>&;G%EK?_kZtS;X`Ku4P_AwGdw|N3~Rtt>-!f|umUv>IQyKuje&Vmx2D zi@8KwM>k1lO55VJL}_~(L#e@-Ob_p|B|pES-?@Q0V?M{<1Jgr**0fkX##P~R4C^jY9No~2t&+BnuT&l6 z9NmedM^{vfY-<@dYWbb}4>aZ;NlQV7D%h=KX6^hJNc6|R{mOtm&KGtJ`=^xui?s;D zQAGRl>JLwOkj22nQ9k|0^r_=}0CI|$8Rv`7_@mRMYLS)y0Avz!|F<%p4L1^IfQ7gH zSJ2sChM)}@GSqIktMKRlW%ymWEdQuo+sbh^y7bb;>gdpz*^Ov}lu+ZtgJEBjjZy|U%zTG&@w zVYrXSZbrr$GwL(TF=SM5AeQ?J}V6lkB00Awg#K?^2O8 zri0Vvo|5i&!RiuWa@Nmz)&rcoSd)yr4q~K(&HGe8LCz%8Ja*FsL+wAf0gBaA3O{GV0j4(5nQs zd8q-609hPHfEIzFEnaizKl+G?xY}z-Zq)- zW@di;M(k{=b)Wm@<*T@%XlI4lR2IDq70E8Ub%0!-~Q- z)lB3ouRoNpGEVXHAL(!{t#3aL2LX5Ho}tgZF?64jZ7gHc50y0k`q1Z84_uFBemB#;hO5~4=<8=Xemb9=o@sA94p-OZY`gBH!{w$XP03+<3Z3C;D0`y=-1Sm@G?Qob#Q2jqgSYT7db$5VtkxyocFS%;j!%-HQr0hm*v0aRhJV(!U+{IJ-{gF_yQNHOpVzjf~q*<+;-$O?%;(*A}?i z%RlP(du!WdDEYAexI);QoI0RHyQ89Gk0AQ?p*`6)xxrah`sr3;oUW0<&79zoqyJLJ z*RUFdr0cf7Bse>(ltBNkz2&sY9T5i@t9qRxb`2QYB1i&h>lhtOCOL6(>bor&BM3GR zv&a`S>BJ)W2V-^!n5moBV`z?QTuZj?*2@hp!E;()ykJ-AcdNeSYwPaytbqlt6TRfd{=$?fA*SU1wI3nAcL9BRg&9%&;i7`fXTG7-5*N1$hptCews$UP z;9NO%{yS;^D}qhQpSKYd)i>F`G{5%|@TNxlShe&6ozGq2x*Yzdmv6;JCHCqN-|l$J z^yYCVBH@n5?K2B4@R%hPownUr-*;ariFu|RP44_cs;xX}JARmqwD?_D(d-aIvi&h` zx4P-l+xE{O7sV8nDmTY8rPq_{2%~CK{zg!;dp}Pr-A5#OqP)Hc9uh~9{ek&12M1{1 zf`9)&Va>yEcdyY0r`A^s!PRP@szFmhYX7mSu3MYig8^?X@{vcZs6VRg!vuNie8(i1 zX@y*M%kI_kNh0@*;SBU(@D`jv#NzU`JP)fMMa_4d;R*?WZdXtWWe#k z)%Ld5P2?#npcq+hLIHUQvR$n`aL?W``e~y}TtnMQFn35bEEJ8~gE6%(Rg;Nvn z{EwwSR0Twf9DgQEIS`E!T`Dp^=-wzV!Y!-7jXmh$1k5p<3KMDz0$6LpKSr*ptNCUe zyi0`M{+fg|H_e`|XF7|RMNEv`inxEVQhMCh(y+ELlldzDf%uj2QE(>jJY~g#dbG&R ztT_I~0a0Akw-;j}9WS===FLN+piV=M(-T4sbT4+0EZRFfzfzLA74wD;4jdl1=y?!# zxhkrKTQHPaU0@NXol%~*VE8f(w(CRC`=S1}H;5uaF(M7UWy*v9S$4|_PLOqme*`PD zY$(3u`#V{+On3bH?*j>g2>&}(@CLUyQexsFb>ijodvr$1`q}RtcJ7)VTB9GjR=}RP zmc3=GXY&^(pzD+gXM$#(vda;UbV~<;=RfB&Jhlj&t1^Jog*%1IE8+FuZ4ialPP}LU z<*v6D^9nn@Z_XSqrYA(A=Jyw`E8tC*;^ikQ(tXU~>+w2?xwAbA>2taEA3qRwmp}(Q zKzn%)qfCD=YkjOw6HLypjTn~am>jDBsT%Fhui`K~I`64fa1?aVue1DcfH}DKu3j(^ zz@uzUkyf8!9C{GinG?z0e(QSmNy^A**IfOUfMGhhMm7XDv+3>`?zqJDhJOI-hhrot z^a5$PZpHmEHPSz!t8e|G=O)Kx1clhkfbvo% z!tQpsK)dO}&uPXj=HysJ{*>AV*lS!f7CX5rin>4Q<+}@=azOVSP_)i!t9EelJ3hc< z*Wo%ca>=X0a<{s-5RN8v=k9eNQP4#=dB8=zxsS_o6sJHh=br9@UwK~Lqz`4^Wh7NZ zF37}s*XBGSCDybd*zB33m(8Qg`1qv9NeR%*6FDkLlNV=acbDwzmsJv{eTCmvr@SY0 ztj!H7IzF{WFkE|PqB0{hY;f{EjBe5QaqLgES6K(FguGK_(|Ox6^%JRKUMP&E9xM!7 z_7I`_{rWN`QZpk(x}d4iRq<2)s64^4S0iRU;zy_n?#_Dac35~RDzbdUp`spYM7PA% zhl%f=zoiw9aXVIehbf`!CVnE8{7GiW{knukUzU$e{x$k9PzQl6cG6W#8uV<=#r#`6 zXia3biR9K5Hq7zCf3v>W>XFNfyur2dkueEsVS~jvirElKg}aU$0RR^pl(_0fMkcMr z(z#BxQi}`x)C>uf@GTXX7_Y!lD0tk)38EIh_d0sbONi411CQ<>HCSHMEU}H~AN5F4cP&umA zYE^!cnz!j`qk*Efx9#tb3!lvnRa8-tu5SCy2vx*DhTn%;_bx*{y_7y0-|^gN$=_Yd z>UW5I-Q*su@12R4u;~+Htiv*n2&f@aF=qmD8Rs7L zy)atmWAL4851$x^%Ro`0gPZ0Vb^X*|mT#ApcnAOnhDX#`sDM9Y7F3EO$;OwUC%ncj!IiAahjgZjS{jZ)Qr}6pSuRo|UO;~5ze#h_|HaSXOyW_S`IBk|N|0Hau zHqo02Zb_4x9he3KCArEeFq94%un?vk=Ig${6ln&h5)g@t^gY?J_1B!^*&Sh)2{9OC z8vq+Lt`*N1dzoIA&MbKI4w}tv^^;~-`M&?KC!b`{?FyJ*eu_r{jbxdN%CMq7N#kcH z9{>K0YY@NRf#oqE23RY~DO~JkqgA0Xm{P};S(o7r$7A75Z)&J$Xe$UCj`^{v5n|@4 z?-~F!M%E9~OhIXXJlekKKwY&?dlhiBD-hnv%Ug?aOxFa2r>+}&9R}V!te0j>l56=Q zwMCNE;IOM?f&th9;pS!EGc#XOC&?%##{1E>jSn`qJj@pa;Oj^{%iq&YGfwkQ#66#!TmNyb-5+-aa+c#$3mGes6ZR}ao)?ffCKXgJhqK>}kP?Jp7D*Vze7&EJ{ z24&KIQl2TdU<7z;%jtR!TZ=s+1>($lyelWd)5oxCOiYT2Pl#Zin~cxSuXi}2bl=W^ z@eA_4h=+?aznE!hs^QC##tHgrXMJVEo|2j$r?Z6Cks)Zkl;eF4zFlL#{)n@9G<|H# z$k+qVJzkZIpjSt7<7c$w0W+)PSGWKobyZo2FAzfwaQm*k>50Oq8|Ix)YlAwKWo0EV<(|wI zf)E)|ijpAM18M^)D;I8;Mj5!rklHjaI*qS`ByD*z+0`%<4Z~JAU^?FNAa#`pN6yYl zza}AqWerFD^nyHciu5pUmrI2O4q|`E+ zOdDiHtBbY_`(9F1}vKT;91Bf}!i5SKsaP^@0x z1*yn1D5;4YdPRIY3_DaX4(Rov#JrM`6zkCo7BquE+WRm=YGf@me>La^d_V9Ng)R8C zyB;7+nk1L;z0=THO63SE#cm7sscQLK3X`FQ zuH1R8Y){b7Pt7^e^>c)yR$Qm938>lU#Ip^mKpUX)3J+VP3oli8yZ!6o-u8$(&TjD13{(#4LDpd zpwDgIyA#M9Df%LhFcbh*$aGYrgV4~a-1?p5pag|r7oD9TfK!pP!-@%Sxrm<41jjG{0=!4$F!!rXloEJyUe2Y=fxwHNGmnY?kC8E~KD zA@0ta@)8&PlO?EUC^qQ)d%hk;WI5_QNPJyaSHkP4;Op>=_{skK&Nm`|Oi-h(QE_WM z_wbo1wDln0&U8XIv#c%h{l3 zW10ahnkLBuI3F@sTkt-fb^dhZ>gsg2YeJWKIiFjpvY@HiW-})k^(-w>z_FEvE7g6k z8-9u%{WVoWyk!mHwfES`|6(a#y?|S<*3V6kM^b+3Svw|mYu|U~9XGTBqtAzBc{ekk zbR};W;%C6g8hS?c@248E-dx{!&fk2KRihjmofBZ4s>HJ1EU?TgbQ4prtPt-dn035c zTJinm9+Pov)j|1p5jNMtEF&?(F(>giOHc zB@uJZ;k}@T_Gn3LbpBwHJnO#VU8pEx89gd{^_fiWo>|ARgr=lwLq+v2o6C&=l zectn+t2=LNF&{k27tT{j;q*%QJCch9_Q;I0$6$jF*Pbm8d*M9mUsa|=1czUqw8O;- zV7|+R>K(Cll+F<5zx{bx{8XsP;a&a3q zdaSICFVlL%FUgL#;O*A9375wk-g9T8H=7!Tm8{UJ>ZYP9gXp04OF6Nh%<}Z6(&g|5 z+o9h>M$wCf2NEvi1^aWYH2k*fKLp|drbP`3YBL=d;taIE;=G_?W7BBBd6z1WU}>?`Ra0FM4GkT^>%vKFY882-U{df(eW zTh_#VxZx&k^P_*F+JAgInqVX#7}Kd}T)d)?cfo@~@8n9if0UC4kS7w|22i z^#1RMRi3L$G5ml37ZDWrv9b*5`295%{op{Lup?fHlGgnnkV%4oM^lP)ZDIfC%QgYX zw7Thi^*5LQ9uSOWlVlDO=eGS5Qr%Aip@jaLV=ef5a`*4#9w-ncM&A10L8l=N7~1)N ztoihKDJy2q#=nM?cn*O;EH_(D_&q!%CfhYuF~VXe<=Ung8Bb0W<<;3n6QkcIIUH!@ zmt{L5Pv-`=Cqq~df3s0;)x-@{b8x#ERkq7*u$4hE>598PZ3M1d@YusLRT`22t zobdMiNjQ(M}L3+g1)vM!jdYO>=A7KA(LJvR!hSNU4Xwo(8|jHgB3tlR$pG; zM^!&55|6(;>n)5?50S47&`-K9`wqjbUv$BUH7(XgFFWZOUugCSi;N7hO&b%XQrnU614@I z&=)n9o#|N4)MC5*RmhA}W+kwT7*0xv>&y#%M6F@oqLR&>B3g zt;qIt(Pm4kP=woxCm~J8B99r^@+j`B>roBn@qYWnMr;R{L_6_O5{Ggf z)H5tIPQ1#g(na*V?cAVY#H3AZY?=AE+Y-X`QMv10kU>V!LQXgN?I0))X~5}R?>SJ* zIqVnlPc?N!zRBIM<$~8`15kZj=&$(xjjsZ1;?=C-{g#tH)pjj>4n@ii$z5!5Mg}uZ zT3^`@9O!p6lsIX zOOZrr?S658;%@-~CMy(w_?KY(mv|f?j!{gjiS9*&^=F-&9mSpgr3wCZUBpD~-*Mdu zbye&GGCd7$NT&;|K~_ikoAw|zsl~fFQG#=(sMTl#9k>1V_vTtEP6cNR#8`w_d15kM z%G|%zjb(heV`z<`%CEE0Q9mMUme5RuNBTBe>LqI5F@$41>z8NV-VxpOY$lk4rNPrL z_AcDrY8%6`%BkF4&6BZ08+eLxRS8iZ{N0?nQ9nakbU}j2_0Bd_xcro;|G0_rpph{U zs~iRtZw>j8ZJESvhxuQ32|GpVnlRqcql6|hRuz$?j*xf9zZNR9haj{e_N{hLCdxQ|d>Q`i6WJImPV7!zd#*YFj`u7X`S z+icH?v*7=C_s+VgJ3joL09w>Yg+Mq~dC>u`?BIV0h?JMgo%)qTuKQLyc1 z(}086$*6%zjIYd*1fNFoa;NN0ONV{)%X)f}Nt%$AA^gx#v~~SF+cO~9ygZ@t73E_x z3MwLgtIp3A08xFMCvK0F!>xzB*$ZDp)1F~?Y+tRB@Y=`y^6Jnjg!$fX49FT({L0%t zZlrzrO2)J@eq_Z5*Zy&mgVvp!AUCLGnK?mU)jIRZZD$OAJ0Q$Oz-?qtuDQkx=OmBb zrAl3@N&UT@Uui?EVur^EtmbB`+JhYbiM|n`eQtghg}@fF|*s+2tntS$tT5C|JMa0*x#0EXU~bEiL6w z*6&W%mDMFpLRm%6_ERK|n%OKL_hX@<=9jeKn>uD_+E1=NMurm&h>IPD;iwylGzRQ9 zEYP(2pA1~lXRk(^t4S|t(RF-H$Ta7uc*9n1ZN--q2c4PEUTV zeueq0!tm@0WSkraEv~%K%ahIuKqJg9r<07w_{05GOfhjut+rQW1DP*;cEV0d$vLi$ zK5eRh=`pic9MA&;j(96a-j+_8heR(_0Z2ZbJA@t3$7L)9+%3Wu17QcD10^LpALdsL zW@ts+{O*bw-0r>!9xX8s1Da#~k5ULshPhc)qST+8oM~QaAJvK}4JIV;uK6cxX-bJi zZ@qjkGiq9$tr0)H>0v$2%{g1ow!4-vs&l);m1o__+3sgd zFVrm*^bby# z`E9L|_oubrEnMARZIYv^FWfnIe^ls|>rFmQ{L&xXa&obEqTX7+`wC5959T-$G(WSs zKl0ouvHSMybbT$Os`&}`xTNFd#Ky4NmiX2u-&@OLD@C_Up9b>@olLPq&Fyn5cWuAA zovZDO6#4Lmbcx?%hjc&FXy!X+N=Y)Rur_~p*|{;^#8&7nX)C&*{}=xJ?}BjChY;LhyS<4{epJ%dwJl}hZpzWSHf zhYRuKt^E7Hx6YZr(yt&3X|bFQP93qh^X9mLllFb*KUqhH;oyQgW^RbHKi?7th_^(^ zOv_97PN?s91cX&YDrhO3s+XKx+1V%meDENH*d~2EMWTRl)yc}p{5u-3osx`HK4FBu zbFURwsBUD7dGg`COm2HtWd`4>IdpZk+@}#3-X;2dmEODYAKN!@kGk{?S^6 zu(vP-?Cb{9WCvane&FEu?wB{1mUaU0JN!(oy!`X9&w2732Zy6;=(9Xwe{El`yO6Pu z5?^RoK3;jv`1&x`t>kJzDA1^%zHcL8>K1g%+JT47b{x%@la92aJOCmr-;MY#p?1DU zy_b1`QZ#5R81*(i#Kg%$R$_Pp1+NN$H2RGy!l2FNu2tn0kCxS;WV__c&ftPN9A=wm z;qX5EuR-F_!ql|62EBp#83}>WCEHX95aY19481KG%av*qvq#oVmtT?tRe}ZI((AR0 zQ;q(3rn^k5u;o@G2p!rEv-7IHzM}23eos5lmA%_lI70QSQRSLr`HCL{@%u9)4Enmr z*?Jqwd#y_u)Y-$Hq+Xbw8wktu?W0H&%olWk8EUXPp`}wcC^qMO9pc-qqaE!Jw>G(3 zTWM#;m;3lpTw(#8#w#_csVOJ^gN6m>tGNLnouv zIF0I;Pbj!yV%sGt9SahiBx&s@lXs639`B2K|LU^@H6_b#udlK{RxX(;DQ%{xfPOh? zEJYr8YBJ{f&|iPpHleaZc@RL9LjSTV4HP0b2@iABG9cIVGciNtC69ShlmNYo- z_18K|giyPk41^lrn{SH_sC92gD{g#?rn5S;K36$!!)N+l-0hY`ZhwQnl-$ZKe&Yk& zP--c#M!eP0J(9I;t>5`NpJ?GkYGGSP*^1MgwAj;Sun;iQq~oAJ;+XeAby%a(fY2sm zz85k2BB67NY8~FSh{5yKBdJ2VtVn@j2UY501FcbT&i&|}?5Js;fC-y=`fTSrX!&<( z>FE}y*$&IgA?w;BSGD}~-PwZl`oO_;8@|SG;^k`FX1(|}u$#^vl3&BO9V`2rvVhSm zxq}P5KF>2JSOR?um({J&a6bq4J9k%{FX*rRVG2~!JKr%-_Y?#79`?={i;;}zIRUoKbh;h8Ny}JtdZP!5g91dV$ z242OsTX;BF@Zre-d5(M=x6HmRdtC&nQbq08G(>ge-ntGuxy_7bp6-@KbUD~RARl2Z z6cp!ip z4x!d_G(IznZ^oW-V?#vh62e3i3&$bIq6+B9m(qSJ_>z_Hd5v4AP z0`W|%Zf~aIYt`&vmRwQWNtf8Nx~#U@N#Ip?{8bCVX2SSf{K_GA z@oVDcUAi@@Nq#Oyel9tVZrRwuGhyTbcINTcc(1|Z>^G!j#1yo&@MhWlu^&vn-px>J z@<`~JqXbv5$C1mvACM~ zw%Gtxsb$WjMFWG(8^pgFCSw0S^{qm*Yh5mA(Q^-Svsb)jlerG2N0p8I5xqU_2T!{z zH3+P=M$jC-yYW!QkG%2jDm|&hY-Ej&BRJs(!5{g?{GOd@#5>SltpMcW(KuJg2 zQ*Ctz6-m!GV;RnWjq}%N_wqqC5RQ`X;X6O@Y7yIa-~;~0l{Wlx-;XZ+Y7D2d z4NX^w%>U^G^8xT-Jdg!aL|@i=goFs{L1RLH3?k%v{i_8kle~xLuZV~PA9QA~`rOG% z{=I5{mV1Giu*vfy83J;6I`C=ve@8$;EhCotn5f-WME&=Q-jmsxj$I|Xn0RENCrF3S zxnB8z1?%5y`PUpf4N%Kk-3^(4aG?m*8SaBNF#qSi3F_Vr9_ILe@dUTyV|1$?Atob3wo=WKciIE!G*GQRf~2HG;`;OMygVz;cRe#3 z3FPFzEDRNEvqPi^pHS11QIHT)vk^8aQyZ6Rm`K)wpP$L=NjK=^9uh=8dK7IxT?}qC zkrJoPbbzeB)>*12(XStup$IB0OfvcG`Tk?KAcV2Jp=t(>zsWd!=pUL|SB3TOS^)2= zq32nETe6?W8-R>{B^Xd!iK|Vmax^}HyV-fV0u_i=Tz_1b^%x6{xcXCC-puB)yHeqd zw~tSloEh`WrxkUfmrA>DAS$n(W@)1Poam^2(Q|D7I%W5RB;>o4X|M$DhM1U4GB`Hd zv4jlgNTz+;(#@Q_K<#a~_`_FtkJ_puE1a$#CxkH+j!YOM!YEt)UcJN8dfe9bd}?c} z>?;Q}C2=*i?bY){C62v1&#~$nO+6hh?$Q_&wb8b48*w$!gHz*BGPCM;#>GXlotLD- zoyP*q;=Jq2^|;%$*8>M9rv)b3r|!Ud13PcM&11~Ix9oW87n}z5Mjf`2zB^5C+@J?H zl72$D_^Lm%X@qSrtFR~h{BE`hSFTvob=LN>}EW7Vic^tFdCB; zdIc0o*1JaK@HGmsw_VGoxznb^h(MX27YI=NItmZev39QTVo(n=t8>l5@oIMCZ|R9dV-w^{*>zctX)W-BF|j|@q%MyWXUAa8QswdK zI9=oMJ8fltyI$_n)bpk(_=JY`=@Z0W`l!p}Gt49GvRbK>=tac1rkB#|jKa`P*bm`- zmj0AccCP1IF%0654#^&uHImNm*UfCM7{L z`QrJRp#8YagS~{KR8yDaIyo>E9Ho!GD=y7+?xmbiVn;xrdu!A{9!leCtZ8fUuBwsz zho=;9az2LsaDXjA{zqL;Rm{_t8;tXDKr}*qM=4(^cjD703|{&egLj5{+MI`kzCKtK zkAJ0BjRY@Gv@CyZ32=1SEGraM_p))ed1G|OVu0S9py22&XPNNuf!)>c;#p>Befl8T zo%{la_|c|&i!%8Ljc6Try#k#*&&=S#K`wbmW2TUY2q523{vnBFevdLJwH-7#KyceQ zGT1kBg3BCU^7s%8PLZzG3-&;LfNm+9LoD_Q38#O%jtJkfTm1r19>l3-?zm2W)4enG z)Wlii?P)^^-}CAB@x1n|7phaGvQK#f zy6fJs*}lG9hcN2173xk+MNrH6P-fx44d@SQBML++VnJ`qFn94;L;)_`#2*Q?pnW z$Y`UWsrn^F}Hm${aRT$pJ;`B7vhy1A7H}) z6+i*qkSJSYB2+Dd{LwJ2PI`H&vTz+`q}ArN3D(%dXca`ezn0qc@F(!?B0fIn%{cgQ zj*b)N$qN>nWSfc{v`)IYo@+Mut#vdsesKl|fiF{<1MTxRix)@L3$0CrRo9+3Y)YJG z>*l032&6D^v(MkmG!_E6z(5~FN%I}W*>X|{rpyeqD;2mSev<|h7(;pNt!SxXwN}ij zSJ-GfHDx{Rr>zmQaJ$e#f6*Au&;vj{-lioYO(yQ!#}uU$_G4ua?T9c#OPhZmHulNu zst(&jEh|D}TaC2hjdmv2P@xd#gvh3bkknJ9(_d%F2KAv_22P*VM5+|0pzrnS-wZA{ z@nhDR>A#|2Kq6DOkCMsUt-ZrEHOs!vpgjs7q@G3$4|^u3*^|b9mh%Ijvkr{_O`76m zTm6rqZSxK?0v9F|3^_UAl$(W-a;h3$Ub7XF*|neFFMEg;!yAl@Gj6uoqj9sj0qQjK z-=!J1E61}vz?AKC)8Yz|{y+hAtKscqoJVvw`)8XlXsxfViCB%tT}kF;6sXs*6MX{d zmkJb!*V*zK&~f-}rFalBvjd&My{&$c?7{y{dAfqA@_|6yWR5 zWI+8Ig42w`EN4Sw180?szx0VjB$m|C=mRX&aSLODi^GEaq_*wRntb9avH$s`5g$;fwoYBF; zgr6N-EfG*5aEFF&#BcZGMMwupMy%Jxo36DrR?;YcpxSzN9IL5T=O!c^>1|U5S$7=d4YoMJ!TW5vtu~1Y^q8fKo0Ne*$`v6x>aEv-kY=PLR z_%Ji}pUoEt&&DD7CwRgzRbB-zqxv|9o?RHNC)wRpr^1O zR?KjIsr`G@PK*byZ054UL?Y4llBGkzZyErG43!sB?n5^OsbyKspgxu4>mM_a`%n<3|F!Zdz*lD`eX9VV&h2o+tY96mySy>zM>BxIG`h|$n_RpScF8?MkF@Vl(o>uukCQW&xXw73`jW0`piN(Czz#MwE`kz1 zqx*Y!slaG7!)G#S2`K@`5Q!W~t1i+oXo{z-VO;l!k#B~OkMl4BByo0_CNq;xVlzKU zo|`sL6g*`!X(@+Jp*u$tkiYPSUyQ^Uc1|eq4*yzL;g@f;*1^&JgMoCmjte)uKId9R!cb=rZha|KI_b& z@yEok7uu!_C1u-%m`e?w!W~lG_;8fK9O61Sj6-46;07}*}I(*d;pEk zM6p%5X#Odci%lPtK~&IY;ca;La?7f>y8s7Q3N>_j-u*HuF=sV)4X?O_TJg|n4H1iEcTw>xsOFji;MW_8zMe2~qj!N4aQ#RdgK;N2&E>}8I zal~ort*(0bu06L`oEaj*qF*8I=Sx>Jh|hfdcxKXa$N}y)c`|Wj6R=))v+Wy|Vq9qC zPQE%sKI*}?H6O<(KHIYkrr35E_x|X{t^oK42?RluPGnr=;zfq)AtwRr?XsKn`?mo{Q4s+%k%jJ2bE0` zi@|-mlK8Nj(f~Wyy73($j9khi!Ed$kZl=zta?+FlTdo2b0A9#GT6V!E2p~|zc*PjX zTnN=dd-*p#i-xHD8t9(5oKJXKDpwo17=Zrz3!|=W9xIfnQ`yRm&^A30QRfYom5*)m zO9t=ZE1+M)R>oj2hkdJ;NoKXiNQq|AIN1mK9enpJ{HNv3c5NXbKt*|PZ+uSOShU^~ z_F?Yo_I4p*s1in&%C&u+c$ZJda~%ZH6fY)dizKkwW(xUR25=A|9Wur$vEoFd=nMoA z6lYz{8m>|pMa}2#P6MZ)C7SBc+S(*h@C3k$bc(x)6=}Ns#N@O@t&PFIHd*;}jZ3HE z$yQAiMXTM0m+dScHy@J}f*rUPDK|%CnN37%&kn0;yf?|$+9k4KKa0zKBOhUXA;l82 zJzb%)KBY4h<(6Ebr=Wt@sqVX|H-}8E=|Hw>E|LgRT#xn3YjTn$QinxqjaYfhrk;Pj zbnm#syZihq4wv}v#YDP?kT*zg8*&KChPQlwv5?##@i2svr2a-3^%rD--6HgKAJ|iz zeS9R5rk%DM&z<;s|7r$D95cFDHUwPAG;|Jxb#dW^^Qjg zQnMZ7n{oO2dE#x{ipX8ks*&VzUupd;16G*rg7Eo~h!7OCcgRz8z^+## zKwmki?`Ki+8f&?I(#vnpLQk(MC9=C4RP7CiviYM58U!+@ZSb(8ALY9AazZ2h-ut|> zD65aK|K;`Q(~p9VjB)?j>fvxM8o0chi7h`R0YT)_gdKsjS=t_HkLsqt5PwAbk4M4& ziGo;y(x#4Y+R3 z@vuDY8WsHP;!Ro=jzV8oZ?0dvVzbM1om2Ee(z(vGv5(+OmHY7ylMI zH8pUUL#^i$rIUL~@CE5so*d#8IZaV>azD8o&!pDIvpWb1bh&UGuWLv$Ta$+n5Vf-n z^bfLgaoR5M&Go@x8JcVSO;Rm+Z5)+NPuckBpU0f3i&2&{f!msLN?uM%eg>+4H)5L1 zD$ReRqiL*1L`S>Z^CO!}AT#fYxZd{htFGHqr$;j(|Wnh;eQ9_^A-nYnkjS zIoDc^;LuUOLp}A;Rv{UWnfdqD>s+_1`!rN#X*j=6qVdCDhbeo#@Ak(YbD2hs*UJBY ze7yx!Ra^Hy3F(}^Zy&t(eeeCfXYN&d&bj9E%(;~47AoCOf-ppKNk#pVjteH(@E(tJ*DP!72Qm-fWBMr*aJ>&>U{oK!8Xl>s{x<`RJ8H;UFdanECyy; zC{$If^pllHkT2okuHdC3EGOBD%<<^FLWF8h#nJ8Fhc658ES^h?rfN| zigta+zy&<2Bse=Np_DX_jp@#^{`I=USA2_*LRsQE>KeHvza7nPPM9?_zFj(fQ2Q+5 zMzRaUm%t#w-hyDkGx5V!L;$4lRJk2B#?+A+pPes@%i&(HI(=B9Zbx;n-yz4Zqo^Xe z7&h`T>30r%r#Zb&gXhX=?pY$;z{5IX=+u--0{4wFi*vS+2?)j5WYd^{)5T#Xq2?+5 z`1s8gzX8!lDr%p{*y$nNHmiZCh%ZG2aa-t{pqY3VAgp9->LtyIDZ=5QR$AZ+kO$7b zsYYPCOXPU#@a3L5M5r{8AM~`Ib?c-Ze|TbB7`SLlbzB=1(d|Wd=p)&pp928m zu7`O0r-V>9)M6!oN>D__^n*u0fU^@}Mmeg)pM6}&3kqKbFW$hi)bD(3-u9A~8Iv3e zU{a8VhL)T{tvv#;B8kwI2Ex$&sQ@YXXYhTF58;&A)?z>Y3Ie92rT zWm`ER3rUW4Fn zs;H4ap4MAmtv6j%oubO8d4q^Vez^GEe-!mp+jQ_rOFv*1`Ulk%XaD+0*ps0>pwNOD}`$FsS;>}EAZ`FgvkuJ)ow>%ULRSl0M z$DND0l1{8h%=n9y9(M&sK7~;#Wpi!^zbcuz3-o+HZ3(Ao*?)fNh#>Co8w+PgB2+wD zkWiW|vq&?@c{&onuE~0&Unxfu@JN{Eu+fRD>w;gM*wK4LRd^I)Q45?k+z$jIfnwszH%Vm(W8a037%KC`3IWw>AE17ZksB6pOtv}CGrfmwt5C8micB`n>?S{`~1#G$DA;>{2G@J)8k9hp)6Xjo)*f#HqZS2+A~a$hH*Y@1dX2CR@Oa| zI`)}Wtd^iH*`z4=4xYczMX21Eqs3H6GqlFL_D(KPqv8C#w!ZxsQ@`#B6*FU2a?vVB z_t@u_U%a$rR5YvIQM_tG$ZaRml6>sc)^5qp3=RsHs686)ptZvX3)A#T3E(W|+bK(?I$_mwB07{Q?^Y;{l3^h(-ijNv5 zK@IR%=LF76nbE_141@D2tO(~885ZK(7s12n4D>$-&pS&j=VS>87u<6{C9|1*u8CoB zxNf1O|71D3GI=8RwnaVW8=I?V@UOVUZW(sBs=UwIF>{JsWz`I21LeP2%j)Xezihe7 ze1#KXIT$Y!oBjwfWi(V@`(TE9YN&n02>jrq^?M{{R&?bA4CRCrrf4A8UEmMF$x_WG zj0c6}xclpy^@O~Y(%B;~`Q(&=mUMqN{(X{>#Tx-WM<`%ORz{eP(Bl4zkSEo-^6l`I zyR?y2*so9VIXoG0Y29B{N{Zyx$WRVy&}7tQN(IYK<$HSBz?7KvQS4PU_Y1WG-Ge&$hn}DVd zWmY#^J=6|POmtlF>si^I*57r$w-0PK7~G5qSK@G4nX(&K8PsH=>Jij;*o?~Hm*KDF z<(~MZeIOs*+MFA2OR+e@L)43W(uF?iv3;e>5D>kSt*%?9Sw^&5I8xL6$`EP&e5#5n z65STZshA+jM5kOVH^umd)d0%D{UAE{6`(z_;LsC|a3;S*h@RK@aYLiczI++E`3y0L zz~8rcM@Z&=-^qM=fv3k3P3DF@G0ouWx>K}qi6eu2jN&sue6zUV4h{2iR=3=q)I1CO zmgKkRFkR1_*M9prL93#gM(S+s+6ex`!RS}-1fT0Yl$clNDBEL2ExaPw3PEHwbfpxe zFv%k193l~YyG0wYugXMb*=0_yE}bOPB6K9a*7;;6>Z9c~hsHEfeatOgbl9K>sR=h* zy^|}yEzJeI+7G^m_=nA`jWuCX7@d#|sb?7iEOfDdum5U^P(CTLDm{dz!C-Hew;N@w8>p?|#nnE%v`RCLEJbDGwLU{rB=O*sy_nHt z!{^rXI{F%5Ty3ZY?(K91Po~W!mVpY91 z8=!=5kzOGqy1@-J+j$OfTlae+3!u4~$mb?U`sIX65JK$Wn)4 zZ(x?{@~u4KH@U8b9vGN3>|I(td|IrFV=Ova9`!;Z&~flAo+=2(9{X8!p%m}_G2VRZ3{R<>w~RNyD2DCLW}CoU#*PYybt)Ii>|K`nEO4=c>c$iPPW@rd^<)J-{6CioiWol{e? zOBJ8w>*6Cex_MVmcIDIaFR)^t*us@uzD1<7+Qz-jV@JF!WL=kqHY3X5W#4wR{IQPg ze@EirRo&2kj{FJ|(i9Eu9EDjWKFuTdW7ChJqN#Lu4i2VH{F3d0}{gArNS{ zhoY*vJwmc9;E|E!FuS?xN$Q$yyRtitN!!YFZfDo%e@~d+OgUu?-32wz_-T9i+~4nYg+yCR zI#7)d&jqV&6si=LZklkEB&p=zSqzqS)%ZN}ej59I$O9+C%%qYRmpJi?E%LYq?^EuE z=Oah$$?~A%`rzV{cYCHuUB17`1Zn`#ZtQ4?L70ubH)@3x!+kCD?G^w{h|2C2rwIl< z)id5Ii~769$JSi^UEymqd>hBaFQlKGH-c)|CUXmO(p`7lvf1y3`pbA(FYHqc?g!6L zE_X)0v+nq;G#=!q@=wt`TqluPT3>afw>@vW|PP@z(E{<5-&q4MNVm<0|^UoxN z^Yul7Ivbw>>>XabISvuaQ-`bn zm8Ap9GIr&uu1(i#)jZu}5A)8eGK7u1KJR3YpdLBx*?qKbimhEw1an(vC&+->HWk_q zxj%&j3QN7WhHa6xcm#+w5iptY76s%U&h3qeNWzKOO?1xAQrMXb9Gun8Fh z4bsq$lg7Wknp~D17%8A21G7^vpr@BXsaP1LQ>J--70k9)t=t!yvDwyab+EN`e_yTP zX!)9w&TPmOuP4oPIy+dnf2;pRBN*ZJ#e?nxCu6-6J`=6|ZNH8nQhT>Tm)urlBI9qDPaVV-56#?3`*?$+5j<9X(NZ2x5tol&L_DR*eQUfYWizv;ev-nd#hR$YwM zYn*#w-fN9B{&XosgAVd~>ZYGQWGiXlGQa*!PR2u19G8NX&(h+!`Pj!RB)8IkCF66F z_#Xxjl~_J9109hR(M;7&Vr?2{`O{B(CR6FFAMh&`qB#8+z)>9cF@UjOWj;)s8GkU>f+DN(mB zUQG{HdS$216%D{p$k;W12wJ)Q4cPi?p-jteG?q)(n^}6QbyA%o-M-Mo_@9oLvcDPo*LvGNvNs zu!I_Ak>QOO9v8VG*=$GiYQgJuZlaH-khvXk^T0%`t6b5&7Mqfstk$fFK@ns znybVC+PL`pR>RJFZR!rF&n=xg>|qW)!|X-8*2e*`x8ZEx%2(UT)_4@xfBUprq*E^O zjMli_q2Q@x>IL|*3bqdN%h>aP9`R1p_3D^^wx?EEGgDDvLEN#)`8Dj6MTa=wAAoO^Yuq4Bhw zjiZ`ppQYfL-+75fs2lkC>w5Osr`EiBOKTC>%Y!D{uvle)Q^LJG9c*VFsZrpv^^)}dH z%ge2&Ay{6!(raS`m`a*j3M%qC(iuHL*RUecLD<0rnr3`n+a$BJOgFCALtndbKi)k$ zzc6ztp^*eO^~axmOCdqX7{l7^vE-Ctv zZ3#2koO`iCKeB4_aR{G@+y8n>J13N#8Rs;qJltQAO&a*OL6#NVM*Wo{xPQ@VeBZ#8 zKdu2L2fihM>Q9^h``Hd0LZ!`>0&<{;1;1FF6O-~Ve~GdnUtT28pB#cQ1^>D48o+$I z90WHPx9u?t|9Ms*;9OnH?-(F{{%I7=Ydv;>-tVhJMaf`cX_ImVuzv~r&l73j>#w*a zD$~J#Kugc@cO${kl)tXy&k(p8I-lOeJu~tDJ7f@$i)eZ-&5SF)!u?H_Pze8rHFO*V zu}6nDEm4jLflNDnM{xTbq4|f{M2M!0uMTO5drKps^|BEE`o5kKGD1|7Nc9-oS4jP! z{L#jyRi|tmp&EI7O-L{;cz{h{L~i>TR|&cY;d zX@(;VI&+!2(d`}2F5Fub>8H1qB1hN6DM32f<9jerUHTfQCRGlISNObxU`mm&j=HjP z6c*Pd3qtS|zT{U(uu~w5I@a0d9>#kmA)`8+PQ8(&IRU7*#E`58QGtUI*M}9I)%}0$9GYSdUa|P z2Jb0hK}h9ckVJ)(UT<% zTjZ4aWlC6=b_DiIfONkksey>HQ;nMVG}m~g^t$?%fKS2tT%_pO@$b>7NLs& zB@#FXvqwM#Qc)#~oxV4?kL{d$8vlB?Yr*QVyXM`8uQ~Cob%#g<{NuMJbZA~(A2YV-m3HVqIRW&Dn^!CDlblnIM?jk{mYuBZvJhxLyv<0x8-=X@;F*O{hvX|h;2 zuVYZrHV52sn(0nurtmmrmnIl2)Jf1od0_e*XRlFz^E-J-t}J)9D$d&2V_=eql&yj-kn|XOChQg0XHoGMMemjC z-Q8`qMM8Tf1gyw~yMr0tNLUw{d|ah)u?ceAhHNyV>KLA#S;TED^HQ0n*x~P0U$S>I zZtEjl@?RJ4E&524UlYLgIplw7|9CX{{p(H{KJkM+zi|NKvt}Dff~FA1S>GF{V{B!N zFL0UDNQ=bP=q`$Ob+L$~luTy|^^} zXlK08pH~UJuLxDeg-}rK-arK<&b<2m12;zqv%H1zE-2XATK=CpzutDlwAzl4^@0$*_p3cmWEM)+z1d}xPa{!FBQsLDUx@=XK2 znuSL&_}@m%gR@pOA0YpS5gX>)UNf+g!1~WVw!CW4x z48@B7Kjs1GM!5)J88nR)=>Mld5XC@){$D>5_@&ZWfc4xz-wx<=_Z9_i!L*Faq+b@y zxreJY@j~8fuSd*SZ#FtHl;g7=1*<&}j-8wAxuHRSn!q6oOySzT{A;iXSIOm`-nDTZ z&(Jt5c#*k;G9(nFD)XL3P>RmOGHpt}^tZXhvarxEEO-vg6@MqFru^RoRUo_Jt?95*s%q;Q5_M86;q|#CT7f7Xb)3OycsLj5_)6GPCe7#b5CG_A=d&cR^ zV|w*nOT8jcQ8Ak2S2zCvd4Z*;WJ(W-oMxF*Li(cjwn?&x;8}dqg?VWot^j$Zb+%^t&oE~la11?AP{1Gt_js~V!mh8CyJHKRqUwHgu z#ix`0;by%m$Qli86&)L|zIDVCQXU}Y(@$+Q74+5Gturv6E={w|Me`_fq1AoE>-1R0 zfGcgtgfbR_vl@LD7#ALe|6)n*|4%GQTIH*GB~`=D2$$fdrl|viSXOC5aS;uAcB+BM z4F(uD8VU}_Etl&(>+)*jL75?eL){_>nnYnSZ1n5|*FRx0~#1EYZx#@u|m z_CrwZ+mQ=OeuBBO22RU`16`Y;BDl^!u1%^qAve9t_$95<%EQ%so##RkO#h?j%95Pa zF9QI@za*%3i4Yv~9QWA9qSd`!BRK0x@d7GO%%cn^$B_s6JSU+Fe3Ug-R(2LVJOx>z zq;a-BO++{N45i#0$L3XK`Td}7dtCTrCYjpiypwjsPwZ1Bw{_^>bj7^k$SIf|W;%37 zk)PumhnvQw=NYw9krh_Q-29cH((A1|pQIu^fdY^14 zb1&l0I=*zPZ7%ODAi>qZcfwSH0XshAsMlV-LI{0MJ0m*}s!LjRPKNsSqq`A%WRdSo zwQDlU=npV&)*+r)o>U!>b$w0%fX?IRDor<(5bJ{GJ;<4Y4>^ z*6c23fkOI1pHdqba)-5Bu6vtpf#A7F zbDR7^7Os{h-&AO>3ir>{rh2LNC3YSficz696$$^c#re9s-^1wT3-;G-YcG912uITz0jPl+ zA;%o6{Ij}El@lmbqSL5|+fC!z`H|V)&PXi!F;;7TPGAL2}R460l*u?ThpeHc9KyK<(j@*^=M{&2n%KedVB0QB)$h1*h zh(%!iE#L)TqSlto7lwrVs?jBhs9Ds8_QB&)Xg2yy_o2o`bLW}v(>3qwJH07Qx%<6} zroJtNkS{LF>B7SFUkL+d?h5kJG35i3%T_hbZi2fFSE?%@6a z`z{)-)&Pqv$S%dgxYud3PG;kk7^IkV>;cw$zaQ(C>+1L(e4+W=57NwI2}*@yT=IJ) z0QiEia3R~v!n!X52q@1)nNk*^6T73_Zr@Csu{c=AX!VotUFvcOYK)3iwd(Li7S^4P zBoMIC=2Z?yxEo2={fJclxuIzNJtllNy59ms?pDQ5pzg@B_rBA_^zvIAF197DKQ7pW z+aW>OM<4W}rpd;pJI2vt{>a*pk`t5ze^o&5hX~NTfWjv=(%8(4N{J_XGZ%Ag0MR{9 zI_y_SRsPb$zM75F<`?^?ucC;mN@M$2b(cTK3{I8)mCuS=R=ZfO zao|YMmM4#G(zoEW-+q?S%I9;S3qvY06@xpF^)5Y(T2i9IWE7dMOMp1RXV%IUMQHBx zLNTBEBk>?&?pQ$tXX%M;IT)8An2AwUFt8p?pcacX)&1_7EEy__N6*u+@#zrM`yCV* z{k&6dCPT${*n`n^6T#PgwbE^c>6r!p92ocQhfXNf5ZqM|uXXB2k4;|_`;+?GSHzma z&cmV~1H^2I68lBoD=4{=1l~EE0mqZfi2t-?D|>pF%kXS&86<5;&6^DB8ca3$D6A2X zU;-R?*v1Q4+3?m&yHH7aJg0k6XVFar23C$B+p9NEnZM=2%~L77fdqD(6r&DP#{$#1 zfhHntmZ~UTNKU`N2NjYgCjL;CTZkKUes;KywY{dvO}$e8Hlo7@`?VJp0Nun#L>2T# zksj z$1bCOw<&iHPlr*aQapt>*Pypm6gxfK*09|_^HG;^lUs#3UlQK(6$_}ss&uXe{8#>Uypz04XKd9Prmr1V1Bas(1ee7=Nrh7k1Txu zIs8^@uxTgB;CnLGyRJ&~n6wc`*K*tT)Oh}k{(1*aa$eG@E|@hn;Rve@WW0V!XqgLxOLdXv zo8d;eHJWGl*VhvD+Hu1no9JVDDjR?eZQkuS=wi#g>n52qq(@RhMqqK3yhA1^7fY{V z`fnof#m+>;qUcloK8lBu%$Ja((N;t^L9wB3-{RaCX~ zo}j}{g}dCpb0W5>v@()B4$yW0FH0*{z}#&HNHXrJUCzw>a{QcRoH$fETR-G|m-4GC z`na&V>Jn?EB&S_?q)J#-k)HQ1k2h9(lswTsdX&WHFoxXEng$BPx z0DQipV=%Rpn)>rhDI{MY>8<6p`g-Tf-~_A%Xi{)ABvS_(h(YI#TI?83ds)_|&w97J zKV7a37x?YZr+(ZTO49g#1W&KiV#QT0o@0vb=?n0K!aURQE&gOETQaR^s5DYqr)QeR z4tV;Lzrod06&P-|l4%qqu(_=}kc}v+NnUcfx6bVay-L2e_yh205oR;Y-H*_KhG}VY zIs|laUVIw-bsarGL#_77BJ2k){nF)VFIld#hLgeuh_XidTi1uI=7FT+M6UYRF(QHB zh_jtF6q5UR8EjwwI}f^r-y#^l#9Yo3UQT4^y`)75t`)S;e%@39KyWn`O3hrhlxpb6 ztdgEU8Q&u783qoigeXX5*wb>VJj76?gk74{xB@g^uA)MeJ#eV4NGp4C@-_AMf!Jlt zW<@lxL}$!?>ZYj5Jr2iaJaSFDf6p9f-tRDtj8)5zb4H$-%Vcci12w*#-dFe58xLA* zD#0Lo(WY^>H-B+PaOa#b8bW$>b*6llesi*rW{RTTf5hdHq@`P#WgUw-WqK3;k&ynu zeMwh&*Fw?5B2gi4*toxF-O-u;(P;W}iG%;1O~pZ@&0C8@N>n`=4MZkz_sc~DMl#vG zG_iljCPsT0EQRPwC{mpq%&ee!gxY<087pL;L!9{%`~sDW_+ln4INI?~@)>-Mp!AFY zjVZ|i?#fKHAvI^U?Z7pP@!0XiYUS(Dz1Hh4YW10!tBA&Fj;^HCjND??X6`M+81pF> z^MP`OE^OBmfNM_F;#j%l;0A-ltW)oO^R0ht$Ioy+bCgV2(#UvyJ4TZ_R`q^wF@*MY zO7sh1Q#mb;yB|-jB}FP@F(N1t>3T6+R-g0oy>D7~=pRoZ2NmuYf<%}^Hnt}q&(UZo zrM-7%&ZXvtaDV&!Sk8#r;o~6UNqBAK+VK7JF#If$9$dbH?$CDW{u-!zdTn{JRP}U+abQ1MLG!fu8EDlYehk$4U17SO8S}k~p=KDh4 ztsrEDe3f-+b)`F@0+vR(K7*@ybn}H{Z2qWLx6^USnG!M2JWG`9 z^O#yg_?Z!|McH?z8!Ah%JTX@*$sN+0YwpJf-aO$L+ ziY;gaIjSQ>@DHB1CNI#Et}d=FtI^+JqPH*Bn*ti>ucV2;8LT6>AQ9Rdf|DGzt3(W zx#ST>-G|T@MjaoD`jq*lU4HG@FvThIh%wwb87kCMFHKfzY4z6qWS_Q+TNuB}Jel{K z&imfDNMW*C0yPSHTIIf-atZmI9d1nz3tup;3J%q;t6^s;B)v7<%k%!8`%G&c4jB~o ziDcjzgHnMf`PDf-WbsC;l!t{Pz?sg{tjz}9IQOKhdQO*wizC(fZbMr-hUfRTFT9b0qT)|NBn`M^!GKN27&dynrOOXcpxA8CkhMm+ATT(c2z;KzYB{U-9g z(%-ie0fBW#y9gAk<*K~`Bi1PVKZXJ91^#`x5JLn{Ex6-eg6H}l7t347a1IkJR$L9| zoA1tl{dxI&-(VbSlAh-uPm}tl@D+O@27?pj=WF1XsX6|1Y9FnA!JoIb;2n63IK;q> zZG!&0vF$Y7bn^`iNYKB@d=oFM-iSUtH9}nJlb-qaoN4POMSSd0I>26`Y-U>@O=ZlO>65eh(@ktuLPeayQQYk z0z}n{DHSPjsfVH!B|)OuQ@FJ#8bj~y8BrQ`c^oP2CaS!l6HY7LiTs;^o(*i`^qg{AQxOFaO=L3o6 zaXfBS1{FQsVKWC(BBsM{i0@u^I~6--baX>v4}%yechkikA5_iZ3QgZmFIE3mw^ zM~fC#6(&_FC22D$)`oV@ZGJzUv{8bahk)Y^+FuY?p?Mt`{1Tq`U2_#?*sG@v6_wH^ z@6N@9daA7*$}g7)Mmc!AGvXwxiZuppI$Z^Xuz@h?pWzAvq76P}xq3)Y=k(1pEc#I8 zqPcfn-;FLY5kuKfn3BB89Zp#MyB0uj0va70I07w@ zjRuE=yg!{?C7R91QPDN1epu|p`&6yB^+f61XC@^bx;L9uGje@9(;D-`?@^n(rnFRL zX{95j&hq?V(308jyL9%tw&MWIK(T{GviI!HN{~Xlpr5xkcg#%@_mckcn75w*AsVyC zedhb@mzRC?bfe5I6K@r42=xUWgmAON+WgK}jhN9V_h)kP$dxVOa_8qg5zO+P%_dPZ zIcG#&&5l#q)3}NmgSuxb${xi#oPHy6F=wtxStcFY7 zI9E%@m`1d$!aS9zii!{kJn=jv<;KG+=L1?G6{$w{kO=Gv?sz`BZh0pK!~AT$gWuZ0@hum^uko=yD}6|;UXsEt{C>W9 zpPXTf^|QwTAx>P1N`Dv17=YV8cQ*|GjK5nQIRrIWA|d_wLG@Cj^<5zNy~%_mKSQRD@h+7M@0hxLRRBv)zf%T8a)o zQc-cSxn2@hNBp^&vN3&yyh@ym$HT1c`j|!y6-j%_)SXEVEQn`ki*!mEW49RLxfnpc zEbWOYk2xeQ@y;44yn@opy_JV>Pkp zs}x%+B}t%Q1Tpvf4q^BFT9DFwa$k&sdMW()?(b>=Ok$AP;*j^kY(vlu^1x3l%-2b? zWQEe32z&l_x31kEf|z%`p4a<-6u9p=JWI7O&YO3a`xAF*HZOIl!e1f2W6Xrrt#)cm zN6Y$gXyvWsN^u~YABylFg$Ew&c5Cnt_i8h>7wwWAv=wqE%xnhh4!8D%r6?PlCmB%x zJvW(X;6?J`nr`xwj^N2kta)H(P;aRjX~hq(MmAgQBSp-gZ~xhI;64%z!&M-!fVfU2 zJ1@fbQ#jp>{D56+t`~x>jNj^?dHK)hJ)y6Y+0Cfj$TNiW*K+t{dfUNd2}DBh-sK-Q z0DQj;3)D=y-zGQvPpO+{B?lI&CkP-?FsR_vqJNq@;QrVj5a)gVruu*7l@kI+66%?~ z^RE^F94yPsOobVqp|GB{g5IhZtAAi_F|LPY%AZ|m9Ur_3Qb# zTpk}#`ebm1|K8E{?x{in4JCpAZTZN98&Ez_LjN zJ-y~c8RRwys7+iAtrvgI!U%8@4#h}vTzfSm)g%+A;QOvj?!0wEppaBM1{8XZQ#-4* zunmKam)4H(;VxuwLX+2v!9OcmD;xqTH7pd){54KzPib;JogsUY!W-0Q9)k)zOGH!% zG75$}!hvld8QzyjxHkwZpx?s-GNkq)_2%6jR?q9oPn1Ytkup&8z*d6ZLPH$|d$^!c-@*2emJ(7I-Kr+6j(M4-pWcJ^=4G-3t?FfI*|mTxn}$q=`Ph zbHSB%m?8ZTkMnJMjMjMjU8ym1sFa>oJAdl24=^Hiz+`TXSE_&KMFuIo4~Ys1M?i6s zl4Sr^eETF^wS+|LC*BVH;96^hs*~PQQG`ndB8chF@U%Vdt|PBQqxFf>h2&~f3|lRo zG+Ol4J2g{O@|pQ9r2#!37zho&GV&-4{{;BM0FSP9QD^YY+P$aep#@){X$m>3VEAfj zCU{Y_BSHmrdh93F{GOAVttAB>)XYF2Sqbirl%9621_BnzFAw%fWO7pwJb3tRe111cZEiHebs%)bELweIqwRC=z0}@C1Pklg% z;E8Uv!XSB@GZ?n9@UU=@QO!$*PF5^j_4#c84IzVi49Yc5NkXE==#qGG_5Y$J+ml93c*YY)$-g%W^#HhoAi@+IdcAkMA$C(JUgr zw;bJY*kGExR=lqgRo0}Ld1xHhtDIw+L0he02-#=y#u41eeCK(3X(X)BEe|ax+M6$@ zrG_{k^??noL`J@K*oU9Lu9KDpeC{stD@Y|?h5gp${IgJ_pv`#kZ~1E326vc|ZtFYJ z^?Pe3eZj5`>N1&MGFoR1@=W%I=@A#%w-jf=;x2%!QCrb-*0hjkj}9Djv38Ig=!PDY ziC0JdN0Hc>Lb2>}INk6h zU1A?og|{iIrj*IH$uG>XL0nNt(>)*-$JjJ zB94t2^`Q4(T%2sS)G$0-RW(9^ShQ;CMnh}-WF%5PE+#6PxWFm-X9H&!=zFnk06>aV zU9N_upHQRD0Xv&Lw3=FUF6bj|}%j>GL@HMbKN?3!Y@Zd_dwP~hA!Q%mT)8(x` zRis78n6G_x+vd??qJ->u6}yVArcYvQ34J{R9Ate1Qs*v8&axDXSAko7ez}&QS#ZW) z^66rSeiP`oOr!d0zft%?`8mJ~j3k7?!S}tFXKpVGzp0pDm(s3h zoBAzXLK9REILm}~P}M{r=V1KZ>;moar5UvTpNRK)zITL#a8O8lQmXrPwfm$=Ste z>>}daIs!s>tg$irFQ-0E)K?Ub-CW?z^g&X}*Y#N^k@^J@)s&>uB6Ui^uEaF})0mu6 zPS{trxpi=`Ls#L+Cq1+@JwDeFe~Syf!k_xws6;Y>wFrO9NeWJ{wOm9|$mjX6fVvPi z!v@7olKtdxe&d^I>~d%Ki)&V*q_M3oc8Ko{i|?<&jX*xl$a*Md@UqIZfcqNPDb-#i z7^MOHyC}{V6v4W7*h%@Tt{#1i|?&12-}o)Wut!S>Ffg}z@?F#ap*>|cig2@L7Lvc{WZQBptv zzX<=m1k9lc&7_F^U9;iqo48=F-EUmoT&ze4^kITIQnZqcezNPcjn|GaoD zl`G2cS~g#=Nfzrktm%arJc(#APpC%4i2b?sCn_w|F#K|Y`w&!CA_-wdb&2|5)YNAO zduPwy?_cqNF(iS^Rea~QX4C6R*Y!o@{cWS2y#vGiwa_;nUS?|cvi4&dE7_fWv!j(w zgSfzRwWno;&kE|lza=fYj!9mN&WN^}8W|ICJN6c6?Mqt`VbgA?J%8eKwfduxv-W!DOs`1#eYH-4#dmJO-^h5JBKY8f5l zLcb!*R4jhFwJoM0?q9~WlA*vYe(*t?o~(sBYI#gK{JeUFODaOv?xoe$*8pj>)>KewK!DKqhTmF9mCavgLR+WW)?J z0$%a!@$DbP`SdZF@x2jEkZKzu8#uxmLvsr)r+cjW5e+vtWHd5nBFbl%H8mBR)zkT9 zN}gTN<{IR%YobL}5tH%U?1L7sp+V{=-|hZV5)AMBW?ApKYs?KQSBt)9PE-#tCzkSm zkt+%91H$Fc?=K3V_fE?dR$l-4Cq2A8L10}lPQG?*?{&ErZGH+w*K>Sn7*6eYfS?|T z=Rcd%|9%pp0@ek}+^qh8LIs}0@-xDbj%TOLl@8$=b6;zE%(tes5W_t$TEAKqqoAWPVJ^OFt|P`pEBiGW%l< zL#uY~Sc8fjD1~6!-*vG!S!nL+G;On({qXA*5Y&tV%;O+VFk7MRugB$zlaMEy`9YKX zS9OD(y;Ev@aKZ;+7xpCuNnuGIRmF(A7=HsfOpotx2J}XWMpqz z;Q-X0lB1AzD10kugY5A%5{_0TTcDpyju{lp6qT~awvJvMKn@ykVS5aJ^yCYrNa`@!CIwC;v@4xLEl^=w= z88swR(X%D3db7>yZYLi5($GZq9Qk+>G%&SzIEUoJf?(TfxzQC|cjR;3uiDc6YE&kZ zeSc+Z!CZcrTh(^toy(q1hl-33Z@Ke%xJz}2M+El$N}29QKP9UUw!s2~B5@Clzl}BU zpaNW|?JN-PP{DJC)PQ)qqQT5l1e(vF=KR%#{=?$i3ai8S*2ZgmZgmW8dQ;mj$|7O0 z-Ahq-+gCTHD5>HS;$wO5o^Go5w&>Dk9}I>;8ZD30KGo)ncZXG_9k&m653fT{jxUnW zCxe$Bm&nPT@pGcY>JA>jHZ8|tgI-2?KOMN)ww6WwH5tCv(6SkvC*749 zJw;!a-3bO$JJ`;se&9QeW;!#{@{*0+>zcs=jiRL!YFD@-GUJ@JV|XrvmHx(Tys8Q~ z^z{S}b{ot5l$`){e^~ePv(TnP$JF3u*<4qFdT16=H;mhjQwkeL#aTmUZt22A!1`9_ z4k9H+DESHeK>TvoMdAI&;UKz?E3i}lA7^hJ6=m1HfhrOLCMhi_ASKc{q|z;&(%l_H zh=72UbjQ#zbV#F=NY{`5KH~G@NJ2puA<_;9XY8|)7o#6qa@il z5TB4J(^rPCX6u;f6SLG{kcbjqRp0aZZa-I1e7q`S46)EI)Re{)gIx#98)tTE)|GNRTtP$89f%vzSQ_ih zM=h=i!&BWXiqj$Y2oD8Km0R#aKNUlM4zC%isxF^EU#hF* z`=rD$6`H@*W4{f_E0WunUZA80RO)l{@=R@ZKwIU;)wOi>w%aEUfzBQJ@QXmK$gCec zHrnzy3oEr1b?tG$#Ya)(X43oJSTV*s!Gry@tc&>`ya2UX9$7K+JE>zPGSr!634^-X1R#PfaWvm8VlFS^tunACb*hDflS1*Y;#GTZ)9^}sm1+q!ts(>W9=CN z8aA4oU~Xq$K0~vM*zreKO&mmXQ4MfG8eMDo+ANV7i*?a{TiY)I0cTB>DZTXrKVE_2 zHwhPYS`@OlS$DBj1K4Y)pWfQYfM4{{X=xspxn^vF5+llsi%Vbvt@8v`>4?PFlJJ&- za~6w*GUr`wSE)&5kAX%)H6yuI4H@uPviefPxxMKYR*YvL1K|JVBUY}>B2b18Q{TEE z5SyFwBI?d{u|yy1tPnKgIJb|Bj(U+;6RLi-NYTdyE|}qP+FNo1H?ps?-keM(=*_xQ zv5C;c4+qzb5&1g0Y|rZ?mT$wZ&y!Bwm<`IwIzBOgoME;z`{|w>r8cF;Ij_g^1V-2- z>vods9O?=(9}~`kJ#N(D{FfXz#BjW_^cLQ_ATu~>`Vj+Ht_b2@F#*TK# zd+34CYs#~x+5PptTuXdLH-u($^oM0NRvVYscFRxG9-aa^I@Sy}4IZ|jBWI`8D&nFNr&4mV@zwk;Xt$@Gd@%Ui4X`p%bubc*_Y!6Cz0 z?{SWIIg&_hnb4IBHZxeZGEI&JH~F=fQ=X@R+-077`Q8NRP4j83IMtW6Y@GUvj9kn& zH1OqGGJJK$S{rsM>;3R&H3-sIAQIa)eXbi`xp*y%P`1j~#PrndLdFJWUof%6s*Az8 z6Q6#bbN&{hxXg|hjd^{s`?$y();mDU!jShR?%N;fB=gHBu1gP~Xzzj_Qf6rlrbmk2 zGvITua5qvxgAz<|<9Sd;nJh{0W3O`@|hQ*5w#Bu!1QfW zLwoZjTG9s%8G9g$?~jjGaQHUQ6@{3uXA;d{g&jAW@fBWV8@hhnQhEN8w?-e1$;MMQ zTDZK_(d{s3aQxp(QvGzNSK;17=~5ok3QnO`ha>eZ9_D@j%{H|%7RA7G)6%V}R@_j2 zoN|5CWA7c#SX#0$3H7mNE%-Zs%*W#vsM4K&y{7u4BK-t{Cigapq4zyI?bnLP=8_Wc zo}AZSq@VSbl-ltG228pkXS`eCbf;J#@6`7Mu&i&M+x!o^x!<>}F%f-)`PeA4|Bc@N zzHi@;PMWTE7f60bX3u`ljzR989qN=!EZ}`i&u>bhtUCsew^H&SSsg|yS7`GHxNZDK zG zaFTP;TbTo0a_2#DQGx6Dvg!pg(Sm|hjxn*17xmU25Sr7F`un(eam!z-hvuk8*h?et zz#kVc_03&&c-0bSp&XK}(Z_W;p55wZ_CMW`r^*v8Sn1Eo5t4>#$b}Gs#>(~DRLFYU z+VgXIJ8F=2;cK=QYUaDiDJegWRrDLYHfN9)*L;H>Z@sQBgE$(mV&!~7PM+qj7a=vu z^}cH?^s;K-w>^Tn5^RiDGRad+dTnj}mS8)MnMazK~Xd zz_ZFs>nlV%d~R$48r2N1{#p{kyUo2H)ST~+Ju?%{(hd-PJy_O=Iv(Z#y&J6(84uzz zJq%G-SHD~qm({nO&mZAc*rzEGr6l=G1r>aS@&!bOCS@BR=Nljsbp5>shj*7q(LrW`xVW_u>+2V2K8#~-SiRpT6c|ItXEXtHsQEtg-q+Gu*k!xlT^4QtiozJ(F3S8XM zupM7S%WQQ`trq;N|0!wqLt%k?g zu|w{&00vw$dA)Yj z7Ua|%kBdukLh8L-wyo{=M7BS~3tG~X%_Jp`IygFJRk0Bv70a?YBO?`*W9nte2>v=pg+CQ*Ux!azi31Sw0s z+j{6nF|CehQYb0LnB+Va=PUosOx-pVfjniao51U>UwT{RWC9iVdth^Na)aZ;oS#=> zq!tvetC8`l>~7Jb6iR|3<2*S*Q3ZVi-UkJ&@Eqly8{4sF4P!fON)IgJ2lQ`Teoq1M zm*4R(0Y9IEtM3m5{I2&)Si4&%bHPmB^jM50bM$*eYsCGeSBRy7rpc>|p^v9Rsb{Mp z;|;|+*JncowYzGUkLfp+e-u+)XB{O*wfdGUM?Y*UPii_W$O?8%{t`@3qJ#0{b^3l$O%xFAsHSaoVH`}V5J>1{*;#Afejb3scp@$lt5OB9o z4!f6f`;g}nYyGR+1uV$+$Kw0>WUV9kz69UoeOUjhpcZF z6@8f=8TahlIBz;V%U?G(0x)6E2<-&^C0T3m$Bvp-l_JeHM>WgdDWV}NQGu#*vy+|f zmV#9rT5->TL41OZ*@lL|^oO(GQs**!pOW&F&qp@7p{U=Vortd=;+jnOfM&SL%@4Px z)IYR+CL%qx-~8sMwCw92B@oJAr)BNrT^Qey3XAO{<(0p)vQofzeY_Q(LtJKj{fsc}uQ(QAQ{I`28YzHclH}BPn}+J;Avx z&SJ@c1ifF4oj~xJJ;agVSF)?lX@!>%0+{8yd8lGCN7pB{g}j5j|d4AR$}hwwJeS(UpS^>NmrAQInn(cnpIn0y}6*q zk!=kxi`D*C@OsWCE=dOLey^!^RN|0bq@E74K1pcl(cLrGjFuuRy%d8M%~^gj);y?L zGIr5$9`ptEB5dAezY1Qf5V3);AS{Qa@UFnjgd^s7tmBepxYLRGVID>o${$h^`<8J) zVQ74tp%*B~qhVzpZ((GLp5~u=|0SYDZ(X!y!v*P5j!z^LK%~dzr=CZ_mprXd_S&U( z$4Is>@(;OG=z~`C>nTz|xyAIsH`7v5K?mL!=VvvXac*3t?ED+gPQ%}(=f|qQPPCmn z&eSa<$?M+~&r`8HzSN7z*qbzeGxjadbG+)^tpA;ej0Ha>$hxd-ceaO00Y*@efSWB{ zDjAQJev8)t`xo~L=M81f2tfvhrTr1GX|Bl-ZN~kXyL#GF)0zftM;kLxUa#fOv}3Cv zGgUn6nYK@$&H+PddB1X4gg61t>e`igMiVPh`D~tJn|py-jd}v#mZ@|yXR-Qnt4ghB zrkSnzZttr`As=^Of4Kb|ccN{#%+U*l3pZ5ut1ZaMshjnJU>q$A21mdAa~6i6!I_6* z$LepZ%`F*AQxiiFY8tGk-*ir2(Si7a)?lO39r2dEuW&9e4tN?F!h-_Ig7l*r-^Q2{ z>o5}w{Q+VK@v`8i(!Cy@UiEf*gR#^|#X7r*NW6t+Lg&px&*YsTjR3WcAAPEz^`kvI zUFj^2LbvnnJ~*;wPiifRL53~g8(%W{ssdeX^+rZV@})6~v8`F>xCyspl}u^TbG8U)6Ah#m%7 zc(Jg6OT9xCmzKa>6fS({7zWJ(x|6J5`N!XDj7!3uy4GGaCY}-GE7z_FA*G0&Gp`AH z!DQB(S$5DQwes_oT3DjnfS9ET7npoF&vY+RzV?71%g)N`D&}g23RL3vpxDmn)4AxP z*SM)RfmL7CM3_12IMmiwOoy}8Lym(yz0>OG^FG+Ro^4Pa7w5PLaU){>0~+uI_`B6y z>%{NV^*5XHYNY|SFGy|^0M#$;WjTcS5BpK4qpq0wZ`n`qqh<1P*EczbRY#9pNRyJT zdM*6Hnfmu`Khb7qq`RmL2%V1p^7vx{|Asbqu{h`U_;KEQR@hQYN81HBa9&n7>gB|R zOu571kFF74&GI@gIQ8i{qlI+sXrcDu%=AbZ=c&Oa{-B1atcX!nx9YWhmputD%RvSuY*x69>nmXq4vkA=>b#io5`apXG^w%pxNt9 zA*1%}2m!zp6|h|X-hj2#zc!dOJp__`f~wz;zEa!LE3^`2_N_(RseBS>Uh{IMYfTt? z>`1hcYl+DJpuBSL{DBK1LZ>0Gc{VyJdn`8x6dbbI>W%T_@EKs(#tv(9md3eRCGT(A zbMDklr7L#~9c`aVa@i`)PL6RcNqbe62%b&V$eos;ynHx}U8*mdT{G+Gwx;!~-wdT? zPQtyEF7;A^G%rze^_C5#cDtmOli|NbAss>N_kn^RUv{70-xLdc>MfJgTflA-YhBCc zt>nV76F%7lFR=x!eRkmy05r}^-D=Nd9D4f z@(g#sX_8Lujy9?d{e#B*11jU=Erw*-D!k?1jSzG=bdu%-=(5->sMW z11K2ALq{B$-ZZ$Qq1sRJmGiy9o~(E|%Eb=YY$d=*!*f4IUoD!r2Xfq`cvU~vja;ro zQRu+jE9$f5ixbLb6-&s8t(CUgKpu=B zHRjwWD~(U20%^>|+Dy2jhafK$qU5m@EcwfTaOcFqbu=}&DaO#13lV$eC9qD?c0!Dt zt*1GKpRKiaj(#Pv*9}I6C>W>}tGaQk>&<*}tj{*&yTAX%-)vWk{GS8gLCb&M?kOtk zJsO<`af~+|&bnrj!N=P$Xlhg>>HX#rdXh^$X@n4xV+Q-~gcc@=C8tSbo@`|$+w>L$ zHm3K2XUdAcY<{O7ByM2EI~oGIGH>w<%rM0lcbJzYz)H3F8rI9_^6dB<)nsG?0pAoL zL+4L9BZC>a^_LF;?$VL71*!$<+Att~BnzJT!o)*Dj6ck;>6Q3n6d814S2VxG0)ICD zsZFaYX0Qt0yIhWv9MWYv?WX&_u(x}7Bk{}Za|v)Y`LzC&+GB@rE&pDb7+w20r8YGW zrO%(rlmDf>r~#1#q0@GU`~QA;J6?PPh#DU8P|E%P{>mcF-BPFiF7?2Hzh|-Y@{VZV zpj&|2e{T1G2CGl}vskd6y#7Z%|37*LpfAz-X|^f7y5^sH6!YW*_hfkd`5 zOIOSr`WFI6gB_YoMdN^=K<-s+AETMNy24<~Sx6m>o3L;2t8T;j+SPnumZ`LO$AjBD z|0?kgDDyM?qqTPkW@2jGD)u3cX+lDR1PMdDg>MC=pxn=&{p^e{aPiGoRvH?3D~Bt7 zarFLlt$c|mTd5hB6z6qz#^;r`jt~`lS&$XIvwkt=na33%lbawzi@nJ(t3X~_BQu~H z)2Z^qCyGR^RR;R2cR-_Lz;7W{YxwIVUg+C0B~py<@QGx7H2tWSt~<}kus?OHR2J4e z!z{N;sWMI~kt-Vdk$+R3-V%WU4kz*a#>;1BwmRqIQO{{Ei)Lu`Ar*zkglp_~93h4w zYEAI^<*mI5D1M=I{N92#eo9M6E&VB7VX|+M^zdkPy0YoCrnUai`Wi6-f%-nEUU?#! zt5Ubfp`xCv7{|(|QWIeBU9VtZ2zwRFLF}=d7k|O*)T<5~8=D;)+rw%nLM=~S;u@#W zpup-tNMO1$I(n)mwT4$?10w<9<}4=;=@pnHz7;JL3e5$0=x#GP=>b(D?K57~GgP{z z&AT-PKaM2{efwT_9} zry9S=7`SX}bYGRLkW5|#dmGNNhApkq{88Zf`-fr|7P(8VEPl`kkfg!aIdtk$))1Fb zCjvp9seM@sQOZkLH6_l^C5w--4ei2{B~y$7f@H{p>e|}u^zV~M9R%RqXT`}KulDATzqfCdU zT@X&PT>9qvuBDBHjJ>(-`s#`)?b1m}f0ML3xXeCpQ$>^PZ;0^I9D{~)V8_8Ui)-<3acR!kL%iXegZi6=CVm-(#(;d(60 zEEZW#Z^=07Ta)vM7#aiVb4A?uJU)Irig20tC{V}i+nRC$)B(j#xa%~49P1Am7<{6wHKg3F1Gu?I?eNVG1af7O1(_e$Nbfjxu*d(XF z(RVO-+2|WGgusuE*1lHh9hLeENONm)9!$aUF5LPJ|gW_N3+8#O$M$`I?>t_hA2FUz=t6XG6hv4{4^ zqN|-E$NBrdDGHEpb&p3T{WLzcoZsm-5ZZU~HWUstFmLP1%6cf=GkEE^5GpI!w7&Fj zjl>^L=l+wsKn~XA7`pAAX{0^AFxz4~7Tp>5WqG}fgUu0};nHrWG|dN!%3TEj%p%ny z?p@G;hx|v_oB+E9%Y5lv)g1lo*>0;86lk7CKE|Oq=w`n#HT@m2ShP{oPktfw@x;+d zdxHCk)&KR998VWRvp`P?#pyc=fn4sTMLFD;*K<&XL)loTQksE8+gvYgM|${|36AZ9 z*;3${vwqtvbToLafztz?!A33b-Ix}t*zab?JEwo#A!45OI=BGELL>tI_pv^2XGGH^ z2vZxlYy7`+`z_MPKQU(H>LQ8y{-RaX;T(NCpPMgDQ@#P>fCJS5h}<_7Xh zhmbpuF<9IeJ5>G#9`Es=gwH2)jD4SWJowUpbB=4QHV1C*1|QlP`hFjx@>O$#x{?Mx z0QE4Cf?Me?>Pz2yEdRh6%I zC(Xc(2RNH7UKq(X8`jbHX)1veXINiGKvXoi7jYSto5G(dq*ChLbZU}E1^EZ!=0TkU*}E2 z-!c@1vhlD|^0h*=mfbQ3E_+A6hOmo5*8P~W*Sm7oy;_(3gCA-iB3puePv6m4TYIH1 zE$?nF1nHYQ41+1>i)e4&KK&z{dfx7h_R^^7@m`zrD&}F_yvUwG+ZEhiSdZQME)*?q z(jBiR9SfNwTb5y0gA)fSaW>LzRv%WelozNmWA{4q^u{{lwp_TlM$USL=LGz;^>TA_ zbVru!bqs#b2&276b4nhNH4%kd`CKUv9R~-<*Ez!?ulrpe_V)J5N%=ej@9rS3wu2x0 zTCWU;-da|=g|^hD(do@|so3Vrx!jdA5Ee}DOQfLRCVKC+BV=Jcd2%K4<8kvmjMy|k zpUx6_S-{~NCzXrlB%J7YGa7?0kjjlyFiA4!MD;Pj@03ySPss%}Ug2!1At1@=plcCY zlpom93RN}Q+ZZl$PYfh;-|C76NMa=DI?zONYIL*1kT*#}90c zPOzR^OfN`O{H-d?5=1jnF1QP_^gp<1y$F?-(PQnn-a|gR=M_h1(Z_TUv>k=$CR$ma zw1z3QWr3+#gaC8n;P2n5B9@{)sdVv-e7r)fdN9`f(XXF!0&Y&kZ5Yt9y_q*H%ht2S zWY{xb`r~%edY-!V^c5iaWpSvnG~(Bcjy@1USk$!sbG@=nCVYoC5^9IepaGNY#~ zQ906bcd5z|FEFK`P@_Rf;y(SI<(bTDbB0Q0qR=Q{g_6@OuB?o{sp&fS+2R<5aQbP3 zpCW^crDO>J>&9F=Myy0&P+DH5^K!L{P9K+%z^`cMzDo@)<(2mat zM|1B&yIoG&9rYlPN10!xm~~vO{Y;K1;b%sMEl!EPlNx$+eYtYdQbK+!>{=J1& zg5@jR%C$9jXXFZO`@wzfva9s(`hTo)05Nuzy(V{m7l8Q($dn6?3j5I-6-aPt6-##&zn{6!G<9$(b-yC&8D^N zE<5H1v6$#I%EV}=ogJ=ZUF99dOd-sPeF{*PF8(9HBqzqurI!ZshG8ynTJ(y(`;CoZtXakmZ+hu!T1q&YeRz3Vb95 zPM_(M)~k#rZMFH$8raNcG+&Yr6fU=voKnpQ7-B3P1*Vy zjdS8xaSZvs=ZP>@?WwUoIA4x@$9}$RK{Je=r+l$zRp9ntvJ}o3j$GjN`!3FiQ95Qo zBy-w*(%ZY*fI_neU&Dv?|87!{b%?E0KpRo>zM7YwBVwPg%>wZ2pn&_T76QpYZ&q$iX) z`XMhC&j>LWgEvt7#y#;kK&+%hg5DXgjJv2pOb^}mVhK3!UbA5X%5o>5hAZ>kIJDRmtO)1pLS8m2xSU&CN-L z8yi0hKVe0=ICI&Il@3j1yI5`;l(;Sf$SoK>YXhll_@4Y>gen08fBgOmdF{Q>sh=EJ zuNIdnL|WWztdeD_-u?8-c*4OWjKO=%-;wjxE~2<3I9b5!=z6)BBb(9MY))|iK!^K_ zWCV%}?bJp*z$%A7TnrYW2Jd(%icN6??8=W(c4I1!4a(sq+``gQq@d+pEk7`8jFtZK z#rXX*!|es64)>!yLgliC4Bz>5Oyrj)=bNQ;6u`fM3@FYfL!liDVDZ$-cW=*{;w#M3|~eSULi#&{g+ZBM+)f}}|jeO<5g?*)|FpmISL7M2TlOKHFN zA&Le|I|k}q4-2NB2R7E{TuKh=8rR5o_5J>!P^roJ&D%NxRZxyI3Kp#w!m`&)vXMbU zri92}EroY+tRW1VA{g9f4XlOEmuK6(z5{CY>xxxBK)?OX8@Wc7*{o3tJE+bFh%z6m z(7kwvvF`oMR0CM2Qm z_c+&AqK1K=QVrnKnMM{_XXk2KJn6TRR{~8NYrid$GJq6F9abioK7*v3vnQ8q;pntc zIjs3|bQEW$>}IzHIYZTSb8UKheL_5CTRB&o4+?8y#WP(u>F!xrsFGb4YM5=oO`Kbg z%0fCg?~u+*dADgvGha2sgHIpXKT~lA*jj7%jA8xXrtPCY#3sSGl}SWFTci1g<&pBN zdTqj|>$T0t+S_sZrtJmc zt}qzV5a0TAFe(Xsk6%E1gFURDv+yx;_u|3UX(M;AUx3rxFTMzfC8KF+K&F~gB{6(vYz$FdJwKW%xq z$0*{T@otOT)&b>IRMHTg!lGUZZP5etNEV;d!$Wi&G%Y%#=lMvq%pWHl(y6jEx}6-h zV?$mMKb#I1BBaf^OU*0Nu7vhY=K-y0oy#$8b^&3h5v~TYyNb*A+U>_19I&yDNgGU6 z)90Z938(eq73n;oKE0B+he3 zx5tFjj1IHZ*z>S;Smu zWRaS1r^6%8=bk`e#%34NJvAYH&efrB)TxyPV6o|X=;H)f)gRcMs-Ep=PkYV87E@M+suJ0S~B?6&QEIz*Jv#(cLng~-inxH#0EOBkMG z!A5TuZw&v5^GOc`_l_d*TAJF#S$Ri9^Ze|cFUI3Lewcqk6&18g;+jv}9&u)L@lgfg z(j$(LHy6`_l!W4SjeG(!;U^`F!SMX*ua;mQVywH3Q7|4j_|sQji4T*%9&(1b&9}92 z+AYXfnX0iAuH}{sU3&Ch0>uq|`=1i7E`i`(AkqF+jKmJHX2dP4f6>NfbbTRQD0zB1 z)XEg|;OTz?&3Ji#Vev@W7n0T2WRnkbO8{w}tZc`!2dU`fH+^}hH;4oL2Ho$*in7iT z>bUr&D2uC61WxBshQBUdCWqaOz>!eW`Iw-)y<)N)5x(nrN4EWlI>;22kqNQK232nC z{oIYKe9oHri?7jjA;Gldm9CreH*M0p7kb_Qf=l@{?Jq=2v}#Mv%3ngmj}TMyO9PxJ zqdkGL=@+RzlG3NUw$PAc8;;f5))izcexk>YKtuk*6{T%{GvNF897P*hlm1zW68;I+ z+JI<&6?uFl?e`U^2MLcCPrcYcY`@=li*S_kA8$ue0G2bVkSGv^7 zJ9G8^ggLC|(B}Y@F#C7y|lsc=i|6U5? zvHyTOy$t|}FFprwM(tYdm+?}9@HE%d=k(>z*AS4|$9UJ8)jWV~4KC}0GxqpJgQ6sG zu%!j>$FUZ<2PqWvb(Df7DhDUT0J;4hwjhJqAS%5)5DOuni zwk%z5mi9Y4Cd}ks_E?Urj3gHVt4$$r8<7z$I?Ev*vqH%CXW{KYmj^8oR`KlOL~ z1mMW#{!HFK(RRF~$i6iVHl@fa+`14OT}0#wP(_R}((mw}RrGJg*4b?dnV&6A)IXr3 ze+#-t)7$LqpBRtZm6W9Zv;o9?>Yv4RA0O}ky#q-_#2s{&S#9)_f6pir`@Szu(c<0x zb40K`Fp@EUN11o%QY=fV;1ms z>Lp;8^cIkn?ZZimqaV^Jj3fN{GlQAc*e{@2yZKcNEX>?o^TM!JZYH?ZnED1{$AXppx`b6RfhrNtTKhd?TaAxrM!c$( zrQtD;Uv~4N@(kn`&r}VU8L^3ME+Qde?P4o1i_6wKoAdS}-Vg^|eiVz*CLl}vU~X*s zgI`r<0zQRejQ=&zH8Iv9uH)o{1A3bm{_b}>DOava5%=}#y_e?FgVUNM>lmP+P!<+j zID7{*XE2YM|7q^h=|V0SFEoCfSKwi#e3{D3^vtubDyhMri_~(C=}9~pw2U5p&tXd8 zx%Y0#$v6zp%9&xJcJW0E_ z^1)zWDEyQE+V#pAb&RH0V^iT~N&hotchvWenmDwgpBK}aAPt6<5{_oUInp55Q!TQC zyshokY^7J5#9<|N!qv0$(;Yp15uCf#_XDrDH`l(drRS+SHMytg*1EePkuLR>`Lk2Z zSU_=bMUK*+8_E|jek?aio}eP@?bT~L3aOOU>^(%I*uD3TOQyTc=ZkK(gK80h#I>09 zvEbFKnc2n$-j?QvYMN*bnaT0iwx)B&5kZ=zn@dkeIvhp<(*aBa|FuA{)Dh6W9zpI>_a7_<)(ga!E zd6u1U*lU|goK;=q3AeS3?d?8b(19@463q&PftcTRud)}1$w)E3 zUmGP;F*fxSQ~7l@gqVr+~|842>rYq0(EySp{efd-$YF)bg7& ze@IXPbgDt>5lKd$;Dyq~{R`j-US3fUM7rFT2aPKe5gXzZFY^S^Kuo9la{ zWa+sz|I4N1u+L>f1pkuvNHHNmN`KDi9~7mfMjCD7>G z#YLI+iGoo72|4~5{HZ+mvEIN@P+BCzJo^iR|HW~D_jW%t!DMmSBeSdKOa;agM2VK? z!pMBSsDo?%-J?Goq~}~xjkZ~OO6!-v$deS&6CxrbD5_?^8`<>OvpKn`HvX-^4_J0C zie>~U;NzB=B9NQVc0Bzxv<{}?Rixb1$OFwSZlJYDOzcstaz5fR{I8_v_5|Bm(FC8? zTs795Ap$EK<;U1d2fq_KC*8(_aQ#%M_k@d~y31$v)1zU4X_UazAQQ(m#&k{YT&Dd9 z^rW;(H=V2hD&t~oB`LR3iQ=?(TV|#5Xl$v7=`F8bFj5+xVKO$q{67#43cw(pf8wfR@6<*IHY*T|@=9`cUyQhbS%1W_4-V1}a*?Q?zIUUv_ViU-Q zYiVsoTP*i1^Xc$vY83r9txe@awHizt6wMB2!7~X?sBCAsAwj;`7O=57R#RX>&KSG% z#tZD3o?gpROQvW4m;B9t2~e)yG>pLtm_QTb0jXjArYi@aWF!A=*YB#YD(p3bjl9#b zK#v20nP)^~cBEta4hzc5t-56dk*J#Z<(`F`zN}mg<`DHkSDY%H3Y+b&?Nhl9;=F&I z_g@BD2m6EatGc=es9iilIDZSPNxc`Un+@S{MuJ`p8d{X!< z8T6TH8c%yr<0k@;o#0G^N2dUyQlqz?y!fxU;hz8_pqK{ul8)M||7`t2{Y6YtN{6g+`lJ)O&*^dTk`UE9HkwL-9bwzY3r2*!41tdC zy?=SmBQ1J@g+}il7>FNpyv$SJ;v$9W=9d(MIa4`*gjh%`N5V)eV_uk(9$hULnEsqT;VhTEFT^^=qR21G* z2=tn7aS9v+WW%I2Ru_G(6A<&}j$9WP$7{E%%!jj_vu$oDh`{NBfnfcfbw?PX$~cuS z^P^h`{dNtsGoufSTN9Ojd=)oQLXMk*Q9Rt6;~1=_DO?Q1=NDqM@w50`(x4BOTPRrkB6eL-%zrLnuXb~-;TaS8cb>vPm(^KHpUL53SVf-pfy=d~L z?$Yv$Cg*C?P~)Rr)ZVG=t9G~m`^yKlGVeMfL090$2ir}?=U%pM>}!2DzE`V| za(p8y%NMsO`3DxDYfVN*a$I8O?#c#YWy6qK!kyT97AJU<{*Gu#Ca#famM$9R;9I78 z@|U;sAt3SP;@0X&nSJ7^S46ME&@(<$K_u*!RQ(m#x59zhQp6&-Y51{OX>A9QA3=Sv zNvzqTZb4xy*L!D|Pr*1Tb{RiO_O{IO$dbL5|5{(S#VjIh)oQ*73Juh&lma>}!SJ5s z=e#e9ODw10^EF$B^zcj!(RJu(%aD%SPPQV4N$+Nt+Om$3()Gh8*>m%U`hCB~r(2oJ zF55TLCaCPvKEikoZ2YSEhVY*ycl>07zjP0v;`a1gMA48xD#rk5uJgV&Jl7?(8s@QuLoW~1UHR`510UZ2+^c^yp`M-o zny|GQ$ipMv@(~jgnGRXcP8w&PiYOz`#YLsExR9-C8;3V(-%7&$1|&(+>2eu)xGt45 z>VdQuU9c~m75eyNd%i;DK!sz*v6cP-jpyjl^Qjz@e{utWKJo zwhYj$=U|w7&^gA?Hc|NO@y795X%Q}+pG?PBu7i}MNPyI?(Tq!nfC~aZuU*q(H_*KJsVqp8+Touv-)LHLRyv8Ql{J&#fLXJPMl_(QTK=X2Kib} zU!g-^bAFl@7x@_$DM5U5IJpv6<6#vE8$h{QfcnQk{y!MwxAb@Nz>*2E<614}WK2c2 zEsL-nOLly1nibe2XN`8P>*-Lzpsi`NN_{h=MaRxJUsqphk2h?P>7I9sx!B^`YaBp| z@=00fAFG%YbH_Cd(4CWWZzAv7N9J0+#0zZ+6mJsJdt^7&J1(F3)~qn|D+uHKXem%C%)JlaXXwQVB1K^e(8kln!`2r!g700pksoJO zp+#$j;MsWt60X?>_|JJgI*XDDpz{KbjSSeuuQcr`YJcVz(wQ#mrfaCFj{B}KT3%i2 zhvUn(?REW7J_#rZ!Bnny!KAfn$&g%{fMc<2~amf2RCjA_tj z=TZLofVLz~2DNq&iaGtPXjW3^4Zd5#Tfdl4WBRrJd6gDN62n`%Oos{51=CAn{-#WP>q+FwZ=_*fMolSX!U>UJ|J}^UDGdCW^u{I;XKP(+nB&`^SLmQiM zd@$TdapZJ-UyttYHAX-&tx{`FtY#G1IWlEnp~hk~+%F9ABR;S*vz0&i@maLK?4~WQ zfKCCAgfH|IZ}LT-{S`p-nDt!}MR-nK-~`#A<`;^d!gYNN$b=Fx$r5dbec|*YQ^xKZ zZEF#kt>?8@1j|n(Bnr!TMB+V@)`hx>++Zv+Mv?-0b~l`5WX59`T7`3ScgY zBiE%R$`95MPcqt?jtba1G^2nv>kzl9Xawu{QrKI-yalRQ&KGs z-2QLF=r?g9zIK!kxe=SQxtf@xu`Tyf*kS^=Yna?>=b{Qu$Pvmpa<*miewVTZCoh1A zl?&~ox{K5QQ`}p}MYVNtNfl0&6`n}|RQS!pLMa0*_{$(|s}^tucP8vHrJD5w92cbRf!A{KQLOgfmxPe!yb!}Afe_lq>q*&cY`;w8 z?7eckOJ5(~hC-|L=PEB}+>gHnC8Q)hPNGFrm%em?HDzs}69XV3T(b=mNqpoIGQ$J{Etbadez^Rl?h&l(AB1CI`+5;FoJJ zOvya1_Df9kIMJC8n2Te308an#l;H0ZYAYy< zK5l1fF}Elr-Wev>k^N%95Gd-5DB6t5tvGT>ZOdH;UdEvlNdwmJiSK#Zq0=66Dqor2 za}OQb8?z4=3iDu5L*thp@sJ_pI#5e=_7E60;xw5%ftt(K$Hc<@rKPLFq>?0OYQnII zijA) zT?c`juSMbKTcQyx+*cWD9bBoBiMWS7Vk8PTOV#e(qiTxt6_;jPOQ$5(Hk zxqlyLy&9Pdc{w-|6-U};c(^~Ar$P+flawH*;zPx$a#FWU6{y~Ov}ocqn8!O^SOMGW zyF%=?uS(414Bfa6&yWj3MHr^)jyg?2t(!)5|(=T>?LrcHJB(gZ62hx+F| z^qZ-TSFemlP2|u9Yv2ZUC%=kfSpeI|T#)Fo=_p}=^^)2Ml`8v9c(uhA4^B1n8;@Ua z&F{n8;DNLgdd1GZkXNpAvNvmfDp=G+fW3H$y*M+%%Cc!<>*M#rEHkFgctRlbquqK) zHsc7`cA(4jX*D@B>?6}+0h!qt3t-P@d|(+a&grX)cLK9!zDpt@jw(hxlIBy zh*LD}(^UQoGz%W8(Y7&c3T$dEhvJXhPNX8m=mik(>{g0|FcnyF;Fr0dvZ;mkGT26r zg%3T->0*RuW7!7kAJTqr{9@KR8$SR!rv&-Lz$nv&&9}bsrCf+Kn7==pY@(38uQCrL^27I^!kc!d`0 z|JNaM5FzaFy7cy>8%Hkh;<}*=Or6{ldB0gb@QdP(f?!$LBJZP->ladah$0jDkt9nD z@k0f92P=;qt|!C_EA-=bLuL4GtK+j{8=i^B!G5MthxRT|)H;fveX}Pd9>0O%?VIh= zS4hZ7$Cf8Wl)#qLSvw4&mJKP4Zu937VXJ_Yaz$_Wk(Nj00ZVh23sWlKRxGBj>9RxI z;FC z`|phwFND|erUe?b+AafTzfhAe*QW3c4*r<0bIBW5GH^_tjYexgWxS`6vFxAYb6q`= zP77sDydBmru^u%QDf^x>7zoU!f01pApO}X*=gERyYmJ0L8T$FeJ3gHq5Uc!cZd0&Gv)M+^TEUe+NTheQCHv&`y=6mUhPWMxi9Kh6$9B$wLQhF4G;_$> zJpJ=}t-`j1Db0Is8pd)9RU34%A-wYWB9m(+9ic?z4Q^GBUHVw5Oq+8n#M% z>s(RDU>$bdXMRSY-HX<-vc)+mMf#H>bk&yKt#_3|m@wA5y=J^opskT5h>wN?*I$Ug z1r7Q<{ScTJMX8#GPB!P_sMecDS1uH?QZ;^hF?P(6_8m(x&4cb`6k#Nq1rDYQ6>Sh^ z6ZbPJ$)@_~@~oDNi))90F_UnX!Xo+Y9qte*ZA!A5+_%$nb0Y4$PYac=UhB`;Y^EyW z;E2S=?o|vf_8+X2I0BH9JO`6dsH?x-PB3B0S=MuCXxF0EM&Vcw*_m6Ve8@qEA#rYn zJ+eMXQF$Z2+=o$3$$J(99RoG*|0!FDIF6M{Nn6Q)+I>u*%rKXNqMw+o9X=S4NX4MwU}E)dBY$Q_v{Bf1q)o!81yh*E8o-`E zdQ^T|KWWAyAhbI{1frPEX>;bu#4dkB1^ToA_U2xdSU3>4(}K=WsdYu-cpy8B(Z~x8 z-tagSU2V}5F$J76Ew%CBbRFm#6oh!^5z5I#DSCSVc;BF_9tKFBAoC1Q+didUNSShK zDW!&&f_?XSWrhI2rbMj^i)4K|?qI1TO0u%tQkNt9rx@jGt#xiSIZsTw{uuZ+kRf4R zN-=AmXoR39uq z(Ng`ghc4U`I}hgFxVfK}GTFw@G&dTL4A?C24*!+rxq&^tC25ITLc3WbPH=32GS2Gw z_(Tx8TnK(GcuvH&uz)DzsqNNT?QKTw)Ae}^o&bO-Xx<1iV?*_6sQcASqj`wny1vwo zX{jhk%Mv9d-*1XiFc?i>tEGOb+sBR6$3N%!8zG3o05G|tpoh%6RdiY%XJ?K3!)2NA zMeH$K-rn80GyB9T2Zz=EbM3A@#i=?PJtd)I&yAwCJ>sh!(kEkPTb8o+7m?fsQbi8{ zue$9e^PNo}S6@3VyOHXG`c+o>?<2X3}ZE~ zLGdQ%UQK%@g48w#BCqouO#y6C{21y<>U-D3E5}&Bg#yiAPW6<-oh|^XZQ@Pt z`(?cQ)T=gOAMRJG6)NXu3^?!hLi#gGgr>%+->t2xp)U6W0=^h5423Ww8hBhTQKx&@Z6BZQ z-|^ZjRP=#DAO(<67x>vDFf)RipeZVnlhdRuBG{|#e)bZ<{l#Xitw^4uEuZcCMSwl} zMx0gKoHHO)G0koI|f4Q_qlyA zF!8WWn2~;$qrL#$he;#5b0b2=#zM>4p_?`t;3o?(_wLox!Df)qZMww*B4!q@aK`4IBZlLgC={Ek9b8`!rZ<0>eM98ueWw++7u7H z%d<{1zd-G8wyZ2k5vi9al9?5}X{1>#_ZVqsOO+{%qm@!`rX&GrP&d#+q&Hnk))~nD z!F9wZ6JxQOvxkL!@c?qCbMH_KsO05ii(35Z&wyJXQ_NBI*d}$lgL>uQEc1rn_Qt5| z-WOQf;9I0OKpu`36-6Mt;~{Fe4Hq1t`%Cn|aIE`^*1II#$V#VKXT)(xK&D&ACnh)N zO;w>|ux#w0DWj!64t65iv=vsiB#gU_Wj_V zKRAg!$+}gM^`B>er@4d$NVx793oZEMS)Bs{T3r{wk{`vcH8(0$(pnQeZkunt?lAti zH!l>!EdU0=T58tek~;x!JI*Yq?mrh9f`Z6yf-wG)5(A6O8i@J%0fcM5{sxRt9BXCo zk{juSSx`-ggt}QhDfUYQQ#!}B9%CB2rQM^a0?^w$GepcYg33IMohC1vb?qEKuU`S} zy@BV>8bq-CmdmxXHlxP9?S?P!XHlDWF8r6?auz!=wuY|ERZsAw(ESDe)ioq7u!zDy zC#Fs6B|N7bMdwh;dt@y`9ThX!JDQ*XwEbS8R~Ao_^-6YwI(ej)s;BzV|Jf^T#C?dTRY63HqlAA#=`28UYo{uSxe` zR|3`0kGkNeLlFMwbNv?I>q+2;NxR4BGJtlp3~e4DK!N1 zuLt5K20m9O&NIf7lFGpHNKGe7+|^VZcO6()7naT{9<$jEOYL#^LDS zdrPJHFa=-CuepC68QVX6-j|;~6vgaE(7rK(1|J_l79Zx2{F0yzFHu_y)cQV^r}eNH zz_ykoAph3OI)ccO=^fzc1va;kE*s z_BfnZR4MI{DCiXhX=Vx7Zb<6UJgeb09$bgqC*_=Dh#impm?3IeJ6G(l7YD%rtgnwh z0&sF_Jx4k1KvT~74C>%Cpea|522?1Ymxn`!}pq7feirK+r{(X%)(3x{DaT8F9Z z^>#?e&AeNwkgm4fYTB2PZgda?ka-w)1Qz|xUn_keao9OBr=~={TmzSL!Y3wdTjBmI zdfU+6Bpv0Ox&(%w>ZRIlVM>EP?viY%Rgiip?`?Z0zDE#DIiT7kEZ9gwGhWq)-gAQm z;8v#H&Hj)a8-#cB4O_50yGI46Et^4eNgd?N zIicUF1k;fbOhZfz;WCbKu>_<)&-YVa(WqPz@pNoFrwdzpBbwObEwHa6CA|%zw0(h| zyViD?qL4=^yr0R;e5pTvlRAx!>L02Q=yXH8;M7lctY>y>3{+l-4%(W69Gg zoULzf+fnY;4QjGsv`zW!T3VACePq|#-o$x!)gMUwiMAF}2Ui&++^fI${w!T0f~i@B z8|N;M1)gpumyOxJGX0UelKqq3R8|&C@vu6<@!oyw$erul$PzlZG?~n~x7-?)w!1_D z|J~bFUMv8R09F%O<1;9R(V^GN8A-~p+;TrlN8PqAE#he^SF#$2Ldx`BRIf=6<>k~% z?azaGE?)1cs-_;<7Z`NW6(#r@U9A`bV%VD z(|o&~gs!i4?K2fDz#!16zK}iTf3?WRUd!gaBV!&B{+8$u(CJ)&#M_Em4+bQ2xAu2+ z&@1oOi|dR%5G8p2Z1k{})*KI?_qUd_PJSz9oI^1j*Qp+cp@x%T1rGK1m-q^6!Gie& zfyJ1baQ@Lsf9Xa{8YyY|_dj(OF=9>Xp9u@%{!ar)Qs7H&k;46fu5S672*uqz8y4Bh z-3hjjPk7Uw(CXtj$juYX5swy)#n08!6O4#~B@5;TxCMARp zE{Nk=wNn$DtuD?Pr!P$HtkjKXli$B0q654?;D0fw$@@8H&-bKRxza!MOCeejCpg_g z&OTf<>xKQZmj(t3W23t*w4(BBvyc>ffM5NZ^$)iv4~wUfefsqeIAO93c@tW9wFTI9 z&lCGp(}g?t-us=byr`F8gXl;s!b*#+g;R7V1ZgA$IQ9-UC;i|~s8De}wf0wZ3q1YV zbM+`K{Q_5NeI;vTWUp|#InICYR(LO@9^mgTvcg6ZaI!=_nAsp!+?GRz^(J^SOW_jK z$yyD;#H16N(stF;Mr{7m7_jtn<8zk5gL0i_w{KSOYOHjf-i1dDPCBCCG%{k_CP0*7 zzf1rA0(EQ1KW~UW=aD`1=+6DL-}~TycFUki8nU<2%2s6upPB*_J+Zt`tkpbs>#25=_J~U?JS{lbjDLG3jsJwNe=%?4 znQIhhsU6#00qM|xB=}Skh+YYK;4v%7lX@*&E%~tvC6Q|bO;BP7ShF_D6#ifa1PGG( zXBU5|Dixi=pzJ_I&hxKbT#+E+fT;XIH~EX-%^y5@>OL7Q!HVl(P|Lmj(}Je6|H4(H z3EXHh%5Y~6Htpv%XQHdv5t{vG_nUz7j7v#Y`e%3hsdiVi)3ogrm@-|wA|AEgRH<$C ztWw{ruFY2@`A?G~+a3FTSiO5jHPzn(hQ*dWfdfCge!FmamU(+1zBl({s9#aiH8Yde zYDfVRMJM1MT-}8*$yu&3$Yb)IQ?1jUNO_gjC+y0?F5%Gwn{2t>^tD&59qAN&DT%Cj z7uVwL_s-s}LtgzaVLWpO>*0Oyvd5M0 zglf0v05hSVH93Z7X4%dp{4a-j&zMviZq^&}JJlx3+&E+|zHoO^#w>T4bHXmZqWoahM zicg$HpA_H2v|q!stCL<(QBeb#KXI&j0liM}w0&G++7$44z}7vfmN29%Hk*Jc)C(US z8*RVXn%nYtDv!@LNe93{n;WO7%8+(%PJz3dErUtgtH%!W0i=bA$&KXq91U8>X*%7P zGj&)vU9$``^S|7g>+xLN${gO3J|3zt-mIFl8bt36!@KaB5C@0OL}vpKcY&{!=!vL* zm)7I?n}wl@oy0)|`&7V%Aa5NjP{4DI6(u85UP|xD<5e{^j2SxP?V2Ka(tacqed4JU zrM5Th@W!dJ{{qHBDbL}#odG;N`mPCUocx9bz}_mW%R-&M>&UbYk$#vLg{S&bnU0Za zvDyr0Byn6n+C7;r?tM{ym!5{6&$7m(Lg!u2X7f3Ao0& zw0K6)2OS1jW(3w>FUqWl*=Rr7FFE>LZHIPz*Yx`7RjRjm__3JlId-3!VA@9XmqL=! z=u|=P!>dx7p|pT^-4dxr64!;!%dg4tL84nkm(EbtoOkD6CqWe-1k0Vyrb!ORgC)kv z((l2Y#+O!nNhWoSRqwPJH6NC`XSr>|xA!=h@(H79Zp{QIqPM@i+^wQ#d~Wv6!F{(K z;CVmC;)U07!e;@MjUrzyJ$+D?uG@NlLDtXQ=2+U}PcJyuofJHy0n?Rp{8J5j<%I^C zHXiN8oZbqy(FKO56%3PBI&rTkjj1|ozK-kL((hr!_iV_~qfP4S>#+MIf33E_V{HBx zFWx?4bQhKog{0=Dj0F%CG>jSHKLMp%zy_O~PkirmlVrWW(z257ds-=eiZ#)?(bMSG zb65l8KK^ut*xzqxlPSd?wHIy?*>^eB-4?8w=#5gcHbYG^CX4n(s_u7z-LgI7c15Ms znr^-A77snMd_&n2kE|(qyxE>sbw_tLwCOf?KDAUs( z`c^7&Rn2Fh`qIs-Mj-^0AfKddIi~nLYQpPayz565N#dOEIC*zp*o9wAF~7p~#r4F9 z-97)14qDGMgj-%<7ae|i;2(s>e=Er)Pppvc7_Aa5<$YiVh3;ug&|3wSR;#W#waO}e z2mE41H#p-Iflicm#nSbfltGjuj%*60rr6^c(Hdj7*RLez3Mb5SaL7%w5xJCbETAdG zOnyq-{oD7l*5U%ZoV9Svj73a<+IlnH+Pcizu&Ouw3oTms&gn4sjj zJPrf}pMO+6=-o(CBJL1=nC1CYmox#t05vtECYr%((vq<3&RN*s=o;jk?2`pAm#Xce3zSYRFForD?9zvrL@xSw zNlgsP{fi50dh@V(&?SR79h4O1EPYOI`x?W;Pg*%%=F#Y&Dq&Fm5#G6%@~1&06@;~w zFk=?T*L9?C9Pr0FQiCvyyB09%IT_G_s*^F2WFbl;!Y^kI!w3C#bfwUf%Z((c;vBSS zF}|;~$q6As{>T@b`8MCDALZB@*JedeP4&X_^GVSRLOG4j+NCyY`F4NcY?F4O&DdQ| z8`Lvz&HC_e6Nak0GJg4jS)O8i6MG!FkFe2IC28!iO~)pKyIhPeOu%Sv!yEW$1If={ zaWGa(O2C%$ovr&h3M75@x0Bt#jVVgJZ6UaQV~O63J`MN`u{EG$9E$C;Y_{IEyDtJ5 zle=X-ay_C8p9Mfp;ez+pVahGH0fJdzZY91f@Rjb!G7;Kbmwze-TN)ZJN$lMKGRN!zN+jd|0%lOOyto(h_GCJ*wo|I`onYEuL$$ zmA0xeK)J%=i4U6GDSd@%bb?hW$cF|U;S-kdrmOSF06%{baM5z}&xbbbe)OTRjq@W< zv`UmWWw?5p8yGl_{Q;SyV~akpuM&S4R;9~<#wO)G9q#ymWGrP_CV@=5mL0u{!cfG*M|v-fe0 z%9Dp=Wsc-%}Lm31Nn|-wLN_G|mAn*FR!vQzC`^KF0Y+HRC@1 zZ4?XRCng%-yreBjF_%v<^-Eq;Qe^1uQDsPC%N$C__7axqPmrErj0Jcw`o7x~4G-B& zqd=|Rfkv)O^oS01`RzAaVeEODRBhEl`#Zz6vdd2_R#sUbnjxOM_gu!BJ1;3Elw zk8y2h>P6@hgui+{Ap21|gt$v!;qr|+4l0YPfjqNAeI&kXStZC-JVOinuEcWam$`6Z z^IkL5RaoY^m38`M!KLFI53reSsM%@@1dYDE@--cuQc5p*)axSpW+d*}aAV7N*`v&n%MQNAY{WFd!F7aFTo!!}f`*ryt}rIy zWdHSyj=eX8KKzL#8u|txA#G0EWB0&^;W8q;{R0g_A|yiyo9~9WyE1G;6EG4QA7&rsbwb z%iH!LZFMu()Q67ggv0aoXkGhq0dMqP*a>K2{RUqhw@gqorn>WltFm{HTiAK2vm0)2 zYF|>CAA>$JC{M-+j^O};4(LqWRT9YpchDMw!uc`+?kSMo9bG0;WbhO@oU=$PjAAcS zer4S~XPWcXz2C^Hpj6G*Q!(E5Jh^n4!~FOTG$qI4*A#K!83w9nb(-3o-0q0g z3&DX0o33`cqaSCY!2WY5yK(l*KZ+?4{af+A07)R?i!YdC-rv=TVnDolvh`DP!0p)? z_!MA9ILB5WaC1c*wdk{%)HtM}80Fx@ADVJa{&Y%PQ*>jCv^8H1Nh^X|i&T3WEmKZF zCw6Yu!~!l2{z3iBg=D05Bs-Y@^7Wnd2i;K@ToDtN9sr;ytFhhV0d8UUM|OaM%g+0I zev7QvmwEIBA~;}f%l+EPtPB7%q=Pp8rFE&TimpqP2F2WW`EY^DQqCjkFcJ4X0yaqX z$YApW<%9MxI5$BaPkXz<4>VZ!0sEn5L20n@440gVj#dq#UStpZzden(4e-TzV*WVO zRE464Xyu_?8-Da%rKJ_{K`-5?gV|$1$c@F+cSbtttOdDsf zbe)p&jBkWQSesyK{8QQD@rY+Cy6SGL!I>2)dxRQ4Hqr@A%K-xFUg3NXJ`az!+RnTy z?6UTNTNKoArF{P`tT~fru))iLFY$AW&pWyeJDiCH?KO>(f9hLX_~W?A>&TvuW&7V= zh|9?y25VvT*p}O)1+nhV?wBmzB!g+R8zNX*H&Qrw>mr?_?gp4X`wL?~ zp~@zVCT@SdoO5WysK5c)ev~kxTZcYHLc%#z_)c?d(frYc*(3ZAxvU{y^~O+1ei5?> zbg{KxLVGC2PT}H?EUi%`c@yREoAhqBm7Of7D6^UjJ)Ai}@?K^>^&&>AASo>@a^4Ys z8%;iz)R=V<*23Zt{%F(wypqRxJ z2W>I{V@?lxlpR_*>e^4m*gMf6pLIw0D--JwbUXIV^bUT&y_26y_>aPP4o94}TS@28 zMwx+H;O8E0(B+dlt0_xC#SKdv+lM7p|EC*3wTN6bYFhQ0F{Ke4!)i?$WmPTE0Nt!T zFf2b5On1WC|I?ifL}7PI_VKQ{1T^~+eB2ekG;}m8Mj<|o#A_6L?Jd)ePi+~Dt%DRo zM{H0h@f8HVjum`Rq4`OpxCME5fM@#oxjaktL|^?sRAnlmFT)4H8%<7jM-$$x9Qn+ z@VG)70weVxo15}6nn)e47T?)U7PtA!i4*vj{^ZGh8G1BxQ|EtyZ*@K%uD#usY;qpY zbR#%_SY?AJm0gbQe-7tCh(UN9{J?NTj%`e#%*HCL-0O1Y%w%)APP57c`hT&>TT8QA zPnJu(hniEW`rIh9&l5=$jWxQ58oW2dgqlcqYR~r+HlH}n+E3Fd{b?e7g<<_{T>TP{ zKJ8ib1>H&ZB1oPHA?V&8xCf#rKKKqr0k7@D0(QM7=;ZbYOj%OSRJNs-tc7{z7 z2OMX--4?7QktH4ed(m?U-w$f^mu6GE&fz>KBWK9L59{bYatMYMrz`@D4-S;o#4je5 zUsv-|N3B6wIkMVr+KXc3T27csIXqH!h7o@i#k^2*6S<=Nkd;o!8BWc4!T!c1c z$_wqDRA%~B6bneU{p{F>qQ14UO=b-}=Le45Kx;-`AJ`_v8?z@sB^O&<7N+>Whw8lr zH)MJr)XTXR3J_)G`bT{;?SWS+m7Qbo-#`jOra2M2BxBWc&sd4rZ1{STnL}UrOPRk_I`{%4D~^@pU$)kA4T;c3Hm%xE*@iN&9z4r3qnQ@UwFfZ z!j{Kwh6yA4Tr0z19GsQYSCiPfwPxq>bino)8c`xlJfm<7Xh)4|L75v}eKFK;&yacc zxRFnf9F0Gqjn4|UAgq{q^SbZEs8T7hfSZLvt<&|Z6w?5)Zw@bdm){vG5 z({Yn)YVtMUPuv2Vlle$72V0CAA^a~84-di|_@N$$x#gGs`2{aYr4#ghN7^cRvHOeQ7G8DS>P@wXTHH>CMh69c~-jEAlh@)u7-G=K6acJcl!y+3|bsTYi<=&jl|s~|28+>hb3 zZC?Q{p{&{D%sIpldFXyQt4pSs-i|L0Pj3`%=B&4&I(Gb_p>)Cyx&>O;xHxMx$veXD zqsJnjm)Y~Dc1X(zyGKjl{9zYzBuggI@kPb5)r2F(_1vg*qD0_@t+w^c)j)}i`Ewop zich6(qIF8euUTu?o(e04=#$%j#=jxTQhPM;=~f)1;nEmwmLa+Mg3u&cczdE!S#qXs zi?E_@Ws&PjhdH3@yX-~PhNdVu`uIjlkNM8eVL>@GXLtVIUYgwAogIQDDakW6E&Rs0 zGV()mGwtNBQ5>em@&Wme63ayO_EtJ%NK8Pyh|`4JE9wv{@mCydPH7R5=p#i-(}e_# z-HCh_zP2qW^*)YS7uC&?qIsC?FW&rq>v{RJY)@!z{4??oYBTnzOyfkOSE8ZeA3og! zGOT{+8tQdveEtXxN0hBuoq0`{N-jLhfwaVv`-yJn?=FZv+3tlp1)$yc^b;-6tM%W&sD?9aF}Ze9?m3awCZ zrmAkcSS}R;URpYH|9Y(UWF>FPW-}tpI{Yz4(1LIG0H(F}Q&o1hPBcL?w;K#w{nEJ^ z@n9<8tQEk4W|Iwb*wM=?NRT>PO*-i6EIa0H$NI13!Ot!ga7w)?maK*Sjmwdv^9@%AiJLtvO6KtEG?PTKNonef?W*H(O#zPjr^aT^mw~&wb z{X&f@bV`L|B*5^yTK~g031O9khJD63#{Vm#W}J}|5l0T=tm{T` zzHk3~e|zgb|M~aX*G13p#AhRGZh_?Gq||QR!XvzO3#0BX zI`EfzxNa}-4aG%GR^nFK0Qm;+iv!<>qghsQ>dS9$_Zh|NM+mck@Q=vO&bHTjICmrJlX?MA@FjFd{UE_v|CL^VQLA zqoU{Ls1ZK@oN0u@;<@?6do#_0Z%FuA_vmPq^xz4TrR1~EJ~r5hoO{)jZ^kKw#cI;Y(~K!P_&~oSOV%S?^T3p{Hvf9)iZ{>lXUaj@ zsHgi~_0ir;xZ<}^ZvPj*6tP4&HfwKD+;4Uj;k`bj9cm%OGfb_Fp@?zs zW;=*~!FxPZw#!B~vE`YBt>|_(QCYGhl>a+Z2H)(@`}Z{d6ItZS!);Hn+`MD4kwZ=~SHHb;kWRa=m&AW&(rkuI9KVdB zFIwL1NkXZmXvQ=zM%Tn|7Jla5(L}oP*n;I1nM)3~{A-xHP{^+l>g6+F%Q8Kx_`_Jg zzWh5o{o`+Adg4^OqEeyYjdaoRg|pNZ|>3Z<{qao^MwDCd#ni% zElJC^Pt5uf(CC2GBm2iG+f_3^sk9Y?3lL}HoWF6BuDn};G_SpMHVA*coc}coyU}Li zSnGN(C%6fGa?oji@#jJnKF>^IyLyf^s3~`1C8qiHD93t_veN4^RwpF`uP6ua?|bTm zT6;fB6e{|^t?`YwZYH#UpPn(CEN2+ix$)CLbcP|#iwkxTkzikC$s+oMM`Oe<-T{(o z&jqXh-%oaPd{V6SpsAh>mFr?OcF~dw2H`(oP}rMxTGGePiiDA0sDC|fU?CQOK-`?M z!+yDx|JBaomzY+Ga@i*~t=OSGhI{LHf24!dd!E&XXgn{Mdjxi_O8L0xjy3Jwq6XJKNBU0ht`P^8h)m|fEFxazG*6`=*}`GSaJa9nBR zIZJwZIFm5OpU(Z)^IP40H7szq=RbjnfoDWR7!|rUd(N&_gl#P)&6b;wpb)lr$JT3| z)yuOSyV*vKTE{sypVM6;Xl1TOvF1pvy>8!FmR~3~*X7Y@o_>>i{9gS%+sgwe`@t%i z<5^^rhX1;@;_-Lzi<%jjf8+AM zzWJAxU%2Biw24l}VA6RH?zcvfgy_VoG>1YyOEB1Vv2cHRa|bKc&A@Y4Q1CjKV0m&a zgB=YW+f_og(?L&uMc1+QNmL{iJ7pB3(f6y1Q|NS!ZCX~3ap*Zj!|f_Vt29=gfDuv4 zO%QszEuSeDAh+@U{Qhm-o#97G!;^Xh7WPHithnrJmRE>l6Mx6cbBHb-FE8)(`+fgr z*wN_`l#~hoEP@B_qm2B;yC0PSuswWhRB`Q>m;T}r{|3p1l>~?e%t}RZFjUHh&$bMn z@5??!_R>}|FcwUI;Gw29^gGYvK3X4^+y(!9)@KK+)wwvoZrU@u{DPy^sJu3imgZK} z1FXBM0y}5Y~Iz&Wacj(K;N(273Q|+{V-_=er`8}uUam(BaY{Yw2Z54C5JEFMqb}hrBQ))Ax2(((L#h;iy z@@wV=auOyeJHZwI->m2V>xQGDchb&vCx>46T{o;PQy~JtUU{#VzK+!B@^4Jn6uS;n z^YUu&+5LD#Hc_6y-gc?xzP$3*uF+*prQ!AVtDA*fo;e)A(x%j_@Huthd`8!i=h)`Y zwYXiC=XJPhGQrui`QuX@w|?3&zr)OfX4_Rf@kifKtJ*FG&Rg(SzVMBG0oB+Gk5-l_ zgNOqBf3bnM?@gU%xE2Th2Q>Qs{1YxO%HezsrJd6l_5)fBgzsv<^A{ zPzY=q@QU-ZFE}eTT88ndsi^_S3JMC#dr?nm^c!7c;C_hPELiDNgbY7(fL+rUT#$in zFrH*c;L4(|J&Z8+khsa#xNWEoJJaVv!AN@2wyS*0e{mt;4F#xW0y`wX2IT(_<_he^ zN-YWfOao5*A=ueiW4qFEv4;c8%=!}2r$~yg_98KCibGIPdq8xJ{JbZE{oNEpz18!G znbW$qzU!-tAd)-%Yiny5{V7StUqVC2yqJrcT0Mln0+%R`hA&WxavWdwOIQ+5y~#^! zjETF{zix+g8M8`ou+;-apw3;mNI7E2s;5v4<3zncdU_vy6!F$>F4hFd;27nj6lGts#IToZ*Uki0WdFz-)1XH2iL z7H3TT|C5W=4n&EW^|RaYn;OYG__94k{ejf&JFoM>6O4pX6K#l| zhC^yUjxWp zo|l2aA=9rBAAX4~e_P7uvZ$tC=@k8;GO_tcEf)E{bd$Or$nAZ)`kgNPx>w(Vewvfd}u)|8K--_1rX zu9qtbrkT=uMdz*-L$~F8T1%5Oq&7LocTw=~b{!|X(Uw;Jbgu*F+>nmlJe0kKu(A%= zI=znI)23*|rsj<2sPe%V9I@Wk%gX>|vGZkuhRw}lKesATq@Md8e5b$sdWFQW#lz&p zW3F$4Rkyh+_Txvow{H==HISk*F(7;U!6wZXWp@VO5izGOl}#9Z1l&e&$2vWoZD+bB z4h0RpcRvVS9*C03jJ2Ow{>FHx*V`-F=`Qj->yyXVd zMe>lGJpw%?yMgYT*A~rJwJe6UN)zRV>?tlT^rbIot!Eo+j`rpZ2Luk4ZZb?`Q|Ue; zUw^MZk(wF8o&X}_liST#j!jH_T*}3@{P_z`LY8ExLOHx8Z7It2-m~b+R8bu8m}`Hlb0)O>ES*&K5g$G z6di>c)v}BZ8aSQ*48oxk6r9rX+q5o_*STXX5p%75#7pPT;BBP^=~4Aq$NY_#q8!qT z{-)gdTj~f@3luXekjr@^L%JLTvD39qzs>xe?KMAsU2cON=oX%QmA=qU1!+=2jO`r zbQX4~@+CC-t7Gx#@nIn`VQdMgHM2SV<-6nOzKT!Jy|KWp=vm8KS0(4k>Di+?dzGg; z^s$E7hxu?O$fZP8f8G~Lijz$ak*1@_gM*YF{9nt{KX6AoV8Lj@YzpO6VlA**I2$AJ9jBFVdW{Kp}6$* zgE`X}Um?aNoNO z3ak2q&sS8$CG*JE@16UcejEFwzA{(dvU=O#r8yZ8B@1lCzb00gcHg#nsd=)=ng8_g zee=bDaBK7jCs_TgL&M61u6dz9pUT6h0!Hz3`tC{ZTTi9~8j}YS-9?1GDo4uzfTva8 zv=k|3;BXB!TyF80i(kv|o2Vb!J3d9kU)(YJ*>kD75gojo}3 zPipl>ViFW_YM-Wzx3+-Y^ingVBZ&K!qC}Y#!H6fBjw|2IEG)heY3b8jrn_l+6~8RL zzDbw`>AscUU`mrN<9te=m2<(w#WU@Qy2bAh^AF|V@A=)oCJXUQ@g3=T^xXu(e`6m2 z`rx0Lx|_yiAJHWgH}tp{G|gx>!pG8fE8Xm-zO!<>Yj2RE$CfGcNV>F8t4zialk#Mh zb15Fz%*bE=^pDdcezEi3HIRD(^sVSS^hfo&m^l$#+fiS;#&J$5n8XQHEA`MQzD094 zAT`5%4{qi*4KWBAa!e|I{_fp7GejG5Cph8hQ~zeyEqF_3sm1qX z`lt}Lb4r(^x%jn{g)+!rUIUu(Os zk|f$<;W6f&^6kOzp&zzPW?zThOT)S(F-HTr(F^P6K^N@_+0@WtU1JG0Y~f2MZN#W5 zN{av0N3%@BB!)OCXqH(pH$)e0_UQfk-q6XR9K}Tim5>#OJJmOg&EO^aq zdL?Z2SA9db%($(rg7}A?p%IbeGp;?8Bv)B?c9x||$1?GSOY2xoz4u0>g}+J2^7u*x$=ERu5`(DlimT!3 zq;hIBWls}U`j}@76B$1>dGrQj-Z9584gNje!(bdbo98Re&}G%D=g3_MrFtzTr?1Sc z@^m9NT0CZEMsLo6%5$B%PrGG0DFX=C2YcWvXkEXOp4YgB%H#05k+kSX^xfS9b{dnX z4!UXkvQ+*L_>^+x;~1Rx^AZ%w4O((-C(A0`wlvS>(bVDKJrPW7>!~(y+vQb^?DD5@ z0!eIrH8q+jYa@B5L^&Oub`SY{>#O_vKjtf^e5I3&0sq%1Sl(N{*mNr3L9%~ui*`uL3jAK;3N5(Kx zWvME-cX^msfXL6!?Op7SFrO&VKIoIB8n375nD{`(4>7r7Njfz9_RunIMOdaem5Shg zm9kjQbsoUjIPcRZ^acc?i3itfR3SNS59H@BAs3|%vVWcvdnOaq+?8I zz10rZ)+oTReWl0Q=q%x^U6v)9D_s6S8IyV21LNKJki@AfcJr57(QM}+@1 zs*t`1*iO*u%N`ZkW4=8;1ncHdG_m$XWVF!lTfJd^D2iqMDyj-`N{n>5T@I!VdZc%= zsi{ecO{}fL2=KE}XlhHTsUhLdI6Cg?@Z8CAU8Z9YATrm0OXo5FWJkRlO!OFw(K;V1 z#oDtsYZasC{Gs#LJTYaJ-pFFt>GKAH1iFH}b%rMU2tVg-Fy>^z4vfXWUVx^*K-3X-6GL* zZj+3bZ8rYJmF*i*=i3}0%DR=KTEvj=_%1k*xP>sT;fVtW=+dN zeM;52LFgo|pS#98Ons(PD)NcfUjaszl$g;emY#gFlQaOCSpOmXqA?hZC!QI3=SY@I z1I(fBwmsvucY!y43!gDeidoNGtYaX`Jv$_39hn69Wx*?wz0cjj(LaP)U?WEOASzBb zQVAMLt@Irx5aVkJdAq3>FKJ+I9x0f#pOy#)7k(SZ1>^$Lggrv+lG4*D)wD+65gVU0 zxNS`&@A75C+J0UhRLjXzvF4OD$Ryi>Gkh=i12oc;Ii9IJ z;E_w_v0f}j?vWUt<+l*<+w{shC5Cw=T=d29xFSTe4cmmiR<$DZ!)tW}l$_orOP; zpl;4RWlnQEgC~%53Vfj}Zq*mgm_}#ulO0Hf?i+(=gTI_@Ts%y!DgB$&R%jadb8cYbb&kMO)VpIYD9}cI_UMl? zbGBMXs38q+o@L6hrcadVH!-ZIL?Q>l3GU97UIbe^*KQSjJM!`yAC0t8b|(Cnt%NZN zKP~nZm9MqFn>FSAr-1heftqtY!qOK%4R=<=h*}wG$88F#X93+Cu8_lk)nX4{h~qSm zC^PG8YCo6Lw#CH!bZtF4bmx$%*%xH@gSnP)8bY|0AV*JHooQNZS8_ZK~$jG9c{+ZzTL3kt1p`gOp(&mqe_oNiMvAGw+ zD|v3yB&9BpJN~_{k_y|H>w-L!uXHvrvf|U5Dv|tDSlpjXMIr`LL*nY85XOWwZJ&OT zi(`T+ZLydXw=`CS^)w5mPlQA6X!>_V7HsiJ1s{pmQ{Ao{XucT+{Oun3TkyZ`+CM?a zqiIwiPGJk?V%in%jATnkMC&`uW*bs}b@~IYZOIt=*f$u{Tr(pPmxn6=-J1Z=6z4@A z%PW11RA3g1q`JfabfihAj0&01SX4aL(MW{+Wxt(ZWxOSoFVp?2#a zBGL2AYA!OSYx3B+XYg|A9AK@_cVUibCvgDjprz-gA+BGa)FByiLuQ!oFLo~wYw1{` zs_!;$l?8Cb9PqshJ*?6f^XM>N`4-=yL4DYh;q+$xDs3d&T+w>6ER8%ip5E``7)zU- zlxX-&`@M{q0QxVq0KjeS0BQ?0;mGY9ZlKTw#g8)o$oj32BPJ-U_STx{^ddRVU!m;g zhxWo3;jv>v%pEFL<%5SF(UtAE-``r52Vzt37-a{+Z%>WEGDg-eTabs1uG>;lo?2Fq zOk1>SC*X!_X4DylNB&V9^J+0VKB8Zqx?ePoW_iRFahn>*UXzOp9eQcHaNNuE?jEj7`O z(dw4Ge71cP0dF2nU#-55V?q7Q$g4r=c#-}<%x(mH$5$i~Q=3|q^aI6F1@ zYO7n}=U&q#VsW}hzV;I!KHD8)Xf2*qr#eHUUUe!=c7XGnMF*hc zShD!**A)asMqjVQ$G~XW3 zcwm8F-IgO01!>xA<~I6+&OV>aS4~hRdXp{v zBZzu|m_wd1V_p`0U(L`6dJWccu zl)R2dK0K(>h8vc)j{@w*MG>gaQjKsycR8~j(UW2?R+`HiKQm4zWLW7N+rEm#LOyz~ zAE-Um&86tRVPO$1(YAv&*RWBSIUP5}cAkSCMQCFs|Bjipua?J-j`O>_VloVfB~|Gb zO|A_xrt5>gjjZXeSE(t&D=7&BijCL4{FBl5ORJLCqkNxU)@$ijWP=EE!>`NivbS9M z1+ZgO1WXSvM(pJ)b8UY1ioCT+wnde#p12)tS7{HTeyvTWPb#-KoYfnvtoE5&D_;rI zSEIn;_u9@(YRvW1k0V7I%s|*s7q9c!nWne?z+>1z3x0^PVmQ<)fvq@ z3_*gQtIK4pq}(PzPZhddbnEP~w5iogi`1t#&(FNZi#F{ROKef~Y;wj54qa}_I}wjL ziV=H)-W&dLz=+s=b7qQ!!};Z0YYim};Ya|}%dXBr6Qblkrr$Zm3qpW7c=v<+-tQwt zC#F?>`{ONT1Va2DPeW zU#Y4^oCuxF;yG|7rOuTJEsRiW55}aWn(LbG&KCKTjl0A>wB3=pYN_d9igBy8FJ)$e zx2fdQS>K9HH~hVt>^0L+qjJg(hwwcmuW{jF{C*cwMh5R|T&a+;cN?U@~o{Y1qiLqTy~E^FUljx8Dnr>O`-|7-33 zpPS6(a&ZjG%fZirmfv#^e!P~Y(qC|o(R)QA77??Jb0PCP{M#TN)cT@!)pt893v->! zn4o454H`lzv|A{tPN7uYEIKT~iAp>0l-o3Pt{XS*Te*A0@F$T#ppCmVMlFfz?%DB{ z#NcYBZZ4hR`9o9gD$7BYme<=q0yJmcFLss&NjVqG0u%r$yAVLeGuYA&9ip?x4ix=c z;*bpv!%+64(--T^bgn4>6k)R;8Brn}M{^|Kq!nfXMLvzYmfy?Shg&&Yq@;^as|==b z(_SlV5bV6L()*6H5FBhJ9#;B{8bQ~J9<*L9OV!9vOAoQ*K%IJpv(hSRl>9bUZ{8(qs6-^b(R+XSKkub=+4=dZ0E7w7k~xQyd{G z0D-t{nI7YJ9>0d%gSY64jyaKWuZvWe)T$~d6tU@ln2b6ZH2FZ*kNp>5@OP4shD|(E zt}9$n+>c}^rRUh)N>DNs>MEd)K|m?-NxXfB&kUt|#?D_#9K_9w&q2-FLC?!Ou2)k8 zk1w82lkyau>BjSOY47Nsl&vqVXuLpp@0xav=CqAk+UgpNED&`{n=F?M-T!+z6?jJQ zfR~B$NrOKBPNF}_Jl7n?kt;fMH$N)|Y_k`z z|9*FWjjbk%nLbgJ2OEqTw|Q*fGYto@xhuBJvV88g5(A3Lh{!adw`1K3Z+n(7t#2Yr zF-75L4@WBopPa8B&`z!ffnL{CUmV&dQ?m7^z(*}Ur;sI8foGKUd#&#@RtU8pcA4kitcUq{k;Xy0B#wMNZub1Kt&3=~SVYZ~ z!^zv$Hg+Y$F!$S6nV4%J-| z#vtOUmu*ebTYup;3TaUY$YX!xH;D@S2aB@9)$J66B<(=B6^$D~%UK?}o2Dxjua45& zXrPsowy+6-XVmFH`JI3s@`JuIf2hi_RO!5JXmYsQ#L|iY^{$kbQD-o!$UK8`i&Lxa z>F|~M2CIF zD6`I+->lEj^V=#SJ3L8eW9lfgX#h1Ho07t8KV6MkDE1?X-Qs&Jub?1GO3L$4YQ_xm z=Ol{U+S*xrL@=Otz3TK*B<#zPg>xulmd&DTQpwSkcuTdj@U1!jhT zya*Lb3k#u_4D-2_;xQ0b-H^}8eCE{-6gSp~d{2Sux(*#{@4!OsnzY4S#xV&oYe7@S ztS`Hk&N;s_yz08|1*?lV_sS6Qsr4;ive&JH6xSXVWYArdHJL2}>8DABj#z5Tk`v$F zGUIocBIZUS&sDZvla946xyooo!mu&algC;7Z9A!ge*eLv9vo7-1-7%7cWjyr?sD`b3R^}luXi^BqQh``JN73}Vm^K(w9Y1J z^Tkog5?O3n;Z^IfL~_L^X0Bt^q(=eULzAoH107RX0L;fV$mA)sDhP=#x!)%7 ze5a-^pRq}eOM%C#%iQ`_crj3oRqjmGLciT55MRWemTXBLFQ+;#f0wtwWU@Q5w{{sm zT=hHYH0LcDStN>|F`jme-XUA z)MyV5As??R2l@+>b>R)6KY{i=gK1eCT+8D#fwZ*rf$MXl@Ttj)9*VV@Dv!?>ACBA? zP}%u5TS8`!)4y#Ju3byDfs!}<-Z8xFB6JyqtE-A`)K$B3Denz88P5njK%2lPVQ*{978|DUL(wBeU!o5VQ7>s1iUpS8xVC(O_+OX3VNF zTUNBi8+;g7c3OokeC2A%!N#mr_HnQET*hJ`^~tIMY#tCJ_-a@9HURBm5ziN0t2z5s z4=TP^-6*X76@~?d-6gMI5Tj|bEVw(yj^bB1Gh^mghh4Yz-1|>uD7!}|M`tOOUP7Bkxl)?Yc*DFU+sQ{QCGV#5MdYjp z{56|VTG0Z&>DS13VsDhJ%1W&$PHFYt@3ruNdwYDs?%UBIQZ^Gdah0ncv1N#^V?3aD z^CFS8YqlDu@wyzA?as{&Wrmv_3yNJozG-L1ErPo>wEB`P(}*07?`=Ug&mSUxeld=! zp=DRqEY+c(^S>xnO6QUH_q#qnDBv_r{hWBuqX>)EYgg&4uVregm7#iMh~N-C%xAY1 zc3yH$Y=!#)v+jHo=!3sqOONXt6lK09u`}79xtNA?F!VEXqz^4Ty&`i84|!*0g8dh; z7>LfqVWH%#_8Llw_|S_dWMvfw4Gvj1avQ~Hf>J7ZP;RiH9b}XVbA!;I42AZAiD3rT zx(K7z9-U7}zqZa&oOlvDc!OrXSZ(>PPEQPwS%8_q#G~iwU#C|$;1Dh2D9HPUQVq4& z&bm=B8(VF6U8uYHG(@(*%^;Z9k!p10OVrhb9hRiQ2%v-2#%tpI& zhd6|srCx_($e7XNx5l%LZpAN_@{wDH16OZ4gddhsqgUv4-Z8k$W)CGRp{CMcU8Ohc z$fCMFHB7#^>#+x?_$*v?*;kMO0ZI(?vCtGuY-Y?l6$=Xsxf(ANeGMqk=1ZH8D=IS_ zKkhsq823?#660l+@npcVX5^pxO)QcGtNjHGGD1<@YkK_rTDSL9v9;o~ZI8MTB;hMW1P7dc$*# zG?Ul~vHy`*FsMS_PpnEOFF@Tvsr4IQ$ZdkYvZaeF2SXZ0+w)(VfAF)+w z)`#z<^F(Im{>tIYrF zz|i1s;AAMzQw;C-f6wedK9@1~kW-1xv8zDXc=pC1j2c9v?cFNyTRNx>!iugOh4-do zNVmYnYg>{Fq@*ULW1$^d(`=LXU`yKCdW9%7>Sk$j?(cXQtQ&ytXV z37NKs0^d%<&C<5=OWo7wok$%UQpH?%2@`(U`w;1x`jn2g1qzbN)#yB}R4|p(oT|0e zcoAb6uha+kHgZ(@t-Qub+^DeCZaV|v%t1i8z`6?mEQ~~Gc|9{UfK2qxMCx3b?5gvy zQ(&<%@GJo}>YHLy8#t0)7ar2W>DQ=tlrkr0V^dUD)VZ(7-uj^OvX?gMX#9H`cMtzg zn!s`OXs&5bBz)>#&m8zvzxTkTg>kg){sRv28Hc(=AKsRgQvSJ*b2dg&3JUM)ie@Sr zGI;Ux0K790v3t3v>wEZte|co?x>kXGlAB#J!ZkeM@bE}owU5L*YX9p8vNWH#(a}-! zuRePk-m2TfJ>;}nQKa;$qSv0cj)~jm2vD$Wj2UKHMgiu&i!52^I@^Ui;fjC}V6p*3usz-&JfJ4)~w_;1iR z7|+yy?%@dEn_RyW{Lfku(#aG)q&7@9jROxEG~jt*zfnR`&Q||Z0=em2VCjFN-2nZj z`!f#x-BV%ncb6NDTb*V2A6!U$=SvYu-legJT+7RGek8J{`!g7)pHK$shDE_yV1j{> zP|NGdTNkPc|G?IZFN~JuJk(^Ob{7@aZFM!p63iu}a?3>t;GM3Mr)Kqz*}J$$ZQZ9< zK|CB%l9DVS(Ecmtn(xdJ5%)B|ai2O4L99og{DJ#wn0|JCX~L4~XBJ7&s-KIQl8=2c z#xR`rid4u%F`wRD7*7jwjoDY(JutFUbtD|0PqfPzLcBRA;qsv*=Xz(O(CqW5S6BmFT)irTL^_GY3k0P}u> z#uv%2vrdxY8Iyp}%k=t)(d%pvoB)7dbe3W}i?|Sy_oF0nD{E_^CEhR!4>{+`zQGT( zBMOGq@kvQfR<9);0ba(%;FibK9(b>m$s-lOYt#x+LjnXHwNaQ3R&dB6(rl`GRfydCa?af?u}~olSCthh9Qg z@41)#K(#+1H^U>q$N~#^Y*0_SyV+>hXm_;4Dq{iC4LzR%(tzC$BQ|9S@u8qWvuDN8 z<~USWW4AuZ-~Wx`5`*%aVr(0D8ZP2Ts8^qfQ~1|Glb@VPf6k%(GxP|v4;*rYm^8Ii zV+ZPx425banVks>;WDUgHO9ae{w8$^_uu~{q1^F5&jRoVWG_O7Galnu(yI|y8UBdA zhA=KLyg2|pDdGtjyo~2%v)C2j&22ieLN(fskL2-C&wxM}Xa_`fLqJq)_D73aKue>$ znaC)|;OW`fQq+pRa>A)^7k{AtVv5IEx?STVtRs1;Gy_!0at!EMj$$ZNFcL)L+Z$W>s)o8Htq3DmDnE{CubmVx@oB=ndRMkaaCKWm80xl|qvR z1XAB5N||O<@3V~6Zk4J?HF99*79- zs;*|E?=uk(^ad`D%U9}|b{RckR)dBfSI5ph_H}p2Pl)`eVFAEc7ogR~DLl=Xs&mYE z4?Z)-#Fmv)RZYl=Sc z<6UC+YPWL_1-RLDcZA+*XGGg&Z$7smgLpJSE}prpSxxkM9qwaAwd43ls z9U2(C8OfV^2cRDl&3DVCvDQTJQH}vI_kDo}+`n5XPIiF(>d&AdeQ+nx^7sLx%z^-U zHstn1sXp`4G4y=ixeHIWx5ayyvC3X|jwX)Sk!?QF>;w=AYZ_dxiTOeMyP6wZR=(hf zIzcYjIXWnqwUXCTus^;85hUf3+({@2qcP!CiC*?1^HAk;7Pl8mu=Qv+l7;Hu4S$!9ABn zdB(mc1!)Z6i8}T(4EMy$$eUUqNagyBXZZBxq406PSKK4J{`J(hh3i>K3Sgg&?c?YkXNBhy%v;w; z43l{u0?;FZry=mN3$Cjp_EkNLIlnqzpgg=DuMxH1{lH7jmv0X@+5MDy(9d1z)PZ__ ze21iWfpo^iAhpi9``-C#isQ1hpP=4P#e-(p#EF2xK-9SxZ^-%ixoGxHBZH!Dj*TDg z8kgu}8Kig(Z{gA&{8DGLYpC=?HUq$NAC@_PcDKo&;+{Gl=*JXJ~n(qv#kAic5q!`5(p}$MK6$uPs^?>V8Mpr?1 zRwWV}&ejZx*>wo<9|MSNZ7yWqnxH-Qs;*5QsN)@DFr_(z>be#iPHR6@sESh=#gMoO zjB0(k{gEnK^w(Utf%9a2{^}mvTH4aTt3Cwd=n)0_ElYc9_noS zuPT3>SUH8s21znlIKI=N6~MhYlK2&V;`I+vzqPe{_SFwN696nWl7AL`oB0W}ayt@Y zPhs;l+Z1*Qs?H1Y*C+Kpbv%M-(S>M%V2ab-FR z_%L4_t{`M*cinmQgn;2O=r8s8-zn|P*1NeIij53g5d*u;OW5c8p;QvdA{Q5X-S=11 z+xQS_oCIt}wzs-Yc3|}U{QS$;&yW;H+vAGY?#L??_Voip%Hq+;jrirM?)V0u zKfCUcgQ-E0XHxAr;bDWH&7zcHfIJ1%Rf1+G>t(cF*S@itOloQ(A9c+9?5;yHP zJ(P8=v7&OiH7914)DXRF*ng*Jp3b@IvQq4_16=cY^zXW;AhvgJ^&67oVPEvdbQ+w^ z^L%b*pl{^QHyS5KzpLmHmJL|BVIC}rsJ~k+ZrVWmq1Ui9bs&u}?=AXzDa6bjszES3 zxOL`}!z%L{D7c`E|ajttWoxYs<{64nMNuO^&9n>v~-Z=ow2T{SdQ>5Yz0S%2a50LQ;e zYz096%d3i>RFbqSSv2m>cwnA=>a%|fcd#y{o<|m(`Lt)NtawBurUN!{+>EL7z~$=r zAUHYEXXR267^X|isx;Z{;0dJc$csU*zts4ms}vyTbUp`0GFucDChjOd3P)lsDt(3? zm_u`$TE_<3T(>5acZYmX_9hb|lEev-m7e=1oRaz<0Zy)O34!0Tb(B8kbgBxO%r|K9 zfo8wOLL=5%Nrwf1n3BQ^4i9}=5+<8d9f7vaoulbX7L3U|CG-3)v8`5)x$r$*-4!R$ z80RT0nJ<``62z?2;$6f3oGw-@Vj)tM;xA$9e~^;+_Dw84-?G3wH3orv_Hxi6E_Qo5$zp=4l zqSjNaHXS05!XJKmZ4M;2SQ0|GF&m}p=2+j2k6K$>J7*b~c>)vFoowau<|%xuc}amQ zL8o&b2>;n16|u?5r1hWe*#0Jra@RJGXDDrpXQy0z;}5#0wY`<{9c_WLZuK6DAa+*2vcu#YQ$E|k=ZcE}SPyD>+7 z{!Pv{kk5?R>;%_v+VpUxh{}3cPbIHw7bcb9%Sryx?_V6WWaFnPwKl*QuhLwf&X`Xx z)ONAFYtS= z&Ig@8!l7peitOPHvfD-q32gey--^qLUD%EN*ht=!16+3F#a>gn4!g>zPxbO?f+3rh zo>yMI-T=QD_5C1~r!SO#e2G9_TFM#8_kQyyTAi6GJN*C%W;MTB&xeC=+Js-=r~Ixz zIIM@W-vU0Ye6l;c)h6FCx_bHj`q~H3cl$SNjkyLk3?ALtLFO#=K(!40wKsPAp(kJA zPC=;`#fITtC>eDxj~03MgvKd)py!+!-7wx}1~DWuaS&8)9MmuW z8ZB)Ye)epa$vfC)nnT55^gWvckz=8uaYtFkhsYPK2#I={itUV}Oc&Oue%6W-ukJhP z-CXn+?a1|aHW303{@lPdESb;9NCiwf`TXxUt#M8%$8E<-vNC7oT+{E;EDm>Un&@g; zR^;Wt+0)70n}iC-QYO`pKDVSk%g7l*`a9_?xEH(=IZ5A{hMP~r=^M@D!!gld<)iAA zlbZ^G!5=Ve%gG7H%e*5XL?B&hV8_uJ??0f+?U_uhFzmgpQ9Rz68UMrc?Tn5tHV){0H zq7TjBi=YA0A~o#>9YZGm<&WyipBOXl`?TeITmZx$bU~J!n#9oi6F?=lf8s--mhbCR zd-Z%?UzwQY)qDze`a1Y%_#io5(M1%Qb2SA_+5BW0@GTffGk@P&{k@rd&j`)DbGJTK z;qagy>9dT-c}hEwET|4VYJyg`&MpQ>{Bnn@-_r(T&!1A{om!p<9>`g;OTT@K;={am z#|4FXL#fJQ+{eSTIW@!VYKk1Wp2MYOpOMCuHxiv_cBMy=lTkHi;GT4}Y2G?edS&9T zMY!pedb0Tc5cbtkRc_ta0umAe0+P~=bV;Lh34(NYgLHWa15iM^q#LBWL+O_8kZz=< zzkTj~t*gKB{o{BB4B_zXz1N;=uDRw4C~@v3SiP~5IAH)0c|}KfB=+_r6$r&2+YSC^ zVMxja-R8Xqk8;VGpOq4mKY#AF6x@qxLCPZ3^E$2vG%c1(0I9z<$E9cU*-^vxXauBq zCpTNqAQwR+Zz#e&{Hd)kC^z@v9&C>0=N>L3xm1GUuLlieadZS}ExWI#0q&{Y!h#Hw z-JPD7s&iRVn@O9mY1m^7n7rNK#AV$Zt7+|6GThwVBe`XUn!>%utT(WpjnI|b?a=ck z-b%^IYuT?Z%-cqDJ$-Mlz%jZ~L9>HTnx_s6v&jB~S~o>Sms0&kPCI^0q0oX^>V zU17-Q%8XLu2sqKhM$_JeAe6%*2Z4$noM_HM$%^}7G&Dq2N+rnJ1~~ujxdmyxN5@Q; zC8jCIov06~rl`^=8OT~UQa@@Y9NC)T7IQeZ3>N1fL_NKf=}i_GJI?bBDEsVuFYBE1 zW!2H3pXQKnC{}g&9OlySCpYw7AXzB76FJ8`KkelPT}w`5+{|{leOL8p ztUkolTAxrsbzFWfS`=Zd?O_1#P0pavaA{eP!tNMp_UB+t!(OrCpPatzM_k{ zbRJN+gC%S5i2FqVPhXFE>5E*3c=<7muqR_$i#Nsc)EcI%ZHMFK9r}%O15wvgnC;k^ z?GK{-h&{*o@b`*u6=X}hwnFu4-{5ViwWz9MaQp7{RNtPJ=TAs#xIF7@0fM3D^?t|E zpga$CwUIkhXBfl?7C&=if45TVoQ_Z#gApbf&EC%|(^I6g>%)~o4p{GqPpRZtXePW` zZJYi_UBAy(&SRMI{k}5C)k1}dy?r(DCTn5WAnBygG4w9NZ@a5lC zbuj_bD=*$ioue~RS9`2vC3PW^JSSZ-2$6bLXg)zqtw5PZ;6wiA&6}`-heU4fcKH?E zpY@xKpkkV1OAr1yu7z}`MM2G zr&TAv`PM$N+&)w1t($lTnjn7e&DXFZC0KVnq)Cdk+Be|ZW^aV<@aFfEL`Q;-!1o=2 z))T3GiIxEZswA$7mXX-K4Dk^~FA)7GntOHrq1zXfh2gj}I|v|IX>P5Ht7P@V^F^XA z(zusOb7BX4(r^-138V9vQzhsFXT0bfW6!4BCKCp_m|Wm)caQ9+OxHkK7|2yG=2540_|4Ra&6g= zm-uH#ySh`gwl|2@5g~)QCZhfM`HR_xUIwT(joR1oR@`S?R>%YLTBB z@a+~atY5?&%z^l5`t`Vwsojsr-ajU#hDuQ=V8j}6^Cv(0^N@(j1Gm04b%%7ee{5f~ zrtXmHly<|xeoYTw?TQh8}X&rs~S!(izJ zr{nqNI6*iR;Q}W4hn<(Ocazh^c9<4hM}={+OrB4L@zIdxk*xQ+k7BvU$?ZJQ)Kzg| zVCm{_!#lR`x`Gpd-5Y7TqwB>|Ye4wFTQJ+VDcx`Xt*GZ`#vTl{wm_-JOg5uWv~Fqh z?+k`uMl;TE)}_^6&Fo~KoLJ^b%;2mxs^4GF-5z)tplLZR6*gRQRZzL5BliKP1~ky{qn}jH7ozG z4oqRvE0AlxX>=es5E#V$!Z{{Rj6bbVZ|vmuO#9V}pSW4$78f!sajxbI*_Up8i6G(fzjzZQ|yJV!9C$Q6MPGO z?{ELh04mJEQ_IrQVyI`%u!!j4$0kHXJed}|bEj4l2*tvP7XpFGC3J(v6mh3s+#yPh znyehy)HPld-rA7kc-TN9fWj>24jRfpd2vA6}LzDX69 zHVYfVoO&EhcRg;n(>vtFX(W2rRg0I&j`2Q77ei$UN#3vA8@OBI@E0r#3XQ(r zx3Eu-<7}TN%v3vMO*^&UvsZO?Zn`XNz;QGHon!W!z1%WBZyp{qyhK0?%j|uBI>Pc7 zPe^E_+@l8-hPp)cGaQwsitVS`pLw<((Qm#MRyjW=+n&z$A)oPBgN%&OQom|xj6tD* z3Y~)6DdYuC)2|IoC)2CuSZv~f%ZlhA0VlHhPX2hgWwgs6H`nTm7nd^?_tjtFZslIbeWkpdou`E8Ifn>7 zm>P^GOl1$fUjc1qJ)5ntq7hDSD!tZ(sv$%9slV&geekejLh5rRmk_OE@nO3`s6Bj< z$ZXYO|4CeS&EK)2&pve+#lnd24HUhI!QqdhNm~$mD={l+Y+&ef`mgK=z z5A_5Z#-rV23r_G*1QSg2ebMx2RIP|Fwf-bQ_Jh3DlN=o=vOINl2F z7rcCWRqJdmjCKZc)Nt9mRNM4JU?r1l!6 z4C#)75y(003lL6eZFrx}dw)Z`%+CGDWv0kjKxK!W=pZS} zqg0Cp*$|?U?Mw|PX<>38kPN%SC1IBd3_fQdxCE?hE3(!ghy=;K<>o`;dCMD?g^`7$ zd7*$CkY^hlf^=y-l#pgmmNehuOS5f7QvdOjJKp5Uh`PYn0`%R8%7Eqsa6b?KbbWab#P9mhpj6p<{3SMVj+jyS5h< zFD$|Tir7BLbS%BqLyRR#?G4kW7m$SFcgGX?a++F*Xlnxx`%N5K^ZP5@8}#ATm`Bn- z%H@rdTbJ!}xf?j;j8u!T_e4mwJ85kmOg5b+VN5G9%;hg$~&YA zKJYcpo78{=XJkLr8NKL4Cf#qlI6W;zzOd0ouUFkYvKlYa_c0#-iLicqij?v8CXclA zVv9?KC&1Qzb!S;U*)7DI9Odv)N96U1mtwxg-cGRRX+|_`COD&Gb#;1}6a9_zFAU(Z ziV4)~EtGXMt97S&c&c^llj_}%fDaa4XJ_>F=%?5AK?tk4MiJiek7*5ba0|*dilN-^vRX0!X;{}F6SW@ z)s5!TTzmT-}QPSz51pD4|@7Ydt>S_zQ`H%OvH*Oc$BmJq}li z24RXpChm|)5@VUGxJ0Ls&sc@C?q?%>+zTGET27%$Qkq9v=J_#g8qJI9amHO5v0tXO z_bE~l{~C7bGnk!Q&3BBAvX|cn&G|cxR0cr<~u`1is^Pr#B<0l2jN)DL z1Yx(ClhD50x5Laca5Zm}+2p>z0@4l8T&~isU+7#Q@zT$1K19g;dslhC9@ipTrEapJTtrh?*?%dw%siQlcw5 z1{vF#V;Q(ZSh*{2^3fGlM`GZ=6-8NYv#om8@}Ye(Gw8@+TMaf%ytTEBQ((v>{EC2S30C{@n4jWN=x~@^X2*3)M89nFLB^L(_2u_>OCO4o z%u@Y`Ps^%JWiK4obaV4>+#~sh>X27zo`-<{22&FeQweunk6`c#oM*%i7J~19+RHMu zyUko`26+Lzw^@}Gz1U?9^Ala%UIyZ2p3dd1_g$~!9YqBG^_>fkU={5|LcWGd)G>#g zpJpW)fmE?XGa8WhZ<@bgY%W7a2kGfL&-eM_bYy>ixSt0MJPD6rlO{`!Oz-v(_$cD4 zeQhJuKT)rDcfxo4^7`zqd3}(Y(p&>%`EW=+n$vQ0soZUAoVD6%m=-@Sj66CRFEl#o z9AHhx{U*ps=V<;YWUyg9YRV@30$1j-z|qq=*qVy9{K`5XL14-m?`^Hp2EDC#^j}y_ zRHlVEB3|^`cjffj?}XUP%60PX z!~P>X!DZ=Zt%SoZAtpSW@g%F{36Tv^$Z%*3k!#`EoV90{P3ow!4Cr3&%1=CT zn>}5Haxwd(`OGRpfT1yfKj}!?sXfu*S(Wz+0~C`~vo9JIAKw(=2;>wPBdHQhw+v@> zAxnEW@7^9af@iH(eHkBE_XQ!z&B(~80|bERcf5YU8AsB@B-7S{`o3|-yjD%aRnU+idw9_OeYW(ozt>C%9@j*G{3;8^ut7aFBjt!33LNB=!dn@RN>nkQQ|H1sb`=R$HyAa(Ng9va;gUu&(4CHU( zv&w>SI-p14i-{M?Td3&RxaOuOx4lawpvos9B(`|v3Xih-q7 zaa5{e@X+G}gu}1Qrtsva=T&wF@ECjYNszM~H5vWR54P}>6Xl!1(aO6(vf?>iCiU2Q zYBXzuE-RjV5pa@-*Lu&GP(gj`k_mUP>WGe=|H%m9Q!vw8V3m9KZN`IBHzRjwR& zs@WFaGVCu$^o-^Y5HXu8KmwUvnUtONAnld6x7-8gV%Pn87)gz}rtO`6NZo z;@FjvM6~mhlHB>*vUVsW8biF~d5F_#)qe(|Qd3jQZ#z!N`izB3K7rMZI##mJqwx+z zlQDx92E;_%9ETmMH{6V+zXtf?5+IX+XhmFMQ-Sb>i7crTn_xR;KQqK`)|yYLx$`^ZoDFXbF8v+$L+SR-_A&NEM`ELAbHk zyS`Ff??onN_4)`Kd)~#czf-$=cha`^J*RO~@ zBMTePJ~ZuvyzC#vDR9)yP%YG%wO8@RsBI=I$P-GG(5&opS2z3$Om-6Y0n}jAjplX| zY=3tTMWaYrF^n=RT@us+jECBS9(IdT3}KQmD48J)$_%c(r$+Ea&f317_QC`65wU;NynA{Li$~OGry;;63P-jkUJ{M^#tGr>`&%%=x)(lx2ig!B+hM-n>(MeuWGrZSUm6{gW`8W?+-+Mv% za^y#f#7gEn0Nm;R3+{}LRtbHdq+Satch<$+{vIFZ89?09tL%jL)hA3PMA(3fXMBz# zYP)@MgPi2O0TVa>j=c8J^%e?^k7uE;QknL?B(ZzL1M(qpFiSX*sOqgvx>Qte2c2>A45nC z-y0`^zCPmrtBL(`T4=psZ(`GVou>~5`-5!N6PNuV2*1fC$j|JuOLo*(zBlnw(L9S| zr(^2FGhh!&Ia64vK4UR+eL@_7mol6EKF7N7voN{L5!y+2NtXK+!@?oTL0eIKPp>N;w2CLvN zmjL*mO-dS8R-G9MeD*c|_{QCzNmsDA^FHAH0+ckjKg0$ED;Dc#pB1r zmy5{48_HkjbukE!jod}0j2PveZBnq!$I)C7t!#Edjg>8sJMw{tCBzAk6q`uAk){mc@{0Zs4 zOO1B-GT*(z^15&5RcPmHcH6QP1<{OcTgLCjnFYAFKdffjQE@Wwx)pYiqOq{iiZ`!L zYz#Zf*l%!Wk};qL=RJE<{V0>iCF<#Q*`M=Xer|$4pM;-4_nx%>_MhLn0V@b=*ST1` zQ5p*v2%-=DkB_UR>-V(4Ixt(ig64c>Z5s`*8wKyW#2a{2=_TRWyb`DJhnYIxMIO(Cp+~9%9E$?-V@_QBtg2 zJKaK7;^^?7()#kAZc{8iF^m9i`v-{o+o$~FYazPe%0K1&_URNcnw0A4~DXp*HN91e#(iHq_*ZP}4aW=^3hW&!X>> zU$UDYXYHwuPszEZ{xYYr$sg~*TeuY$Z<(L2&xX)qoWtASJTc1!txdv1@vP!_SGHNp zWatKYL9>KVBowx73!436aOT7B!_zm%;ofebT?zhD6;qj7ZTIzRbK5w5wIqQr)h(l< znHaz~7qz!a^|pxSC{-2!Ns9QLBM(~h*43nL&^ZtJa7gk%fq)BDsKV-OZ?WWSFE?Z| zZ!Xyfm58H*hB+QuBI{m27yO#zZjvMj%=eeVajlF?`y9T!fG;`TENrz{9WOS#Lqfug z=4gfH*syxM%@D2a=~nasbJY&@wYXYXyMh@-%HJTUu<{)c9hE1idVT?M-w!#hD5*_Z zX`JA=Yb=<)Q-IFn+{HkU!5tDy-p=`;lMRi0-=;!hHrl)|!42~D7ZtBh35IK1bt_eg zSZ(6!c0(aMS3zlQ8lD}HC-k$)uBWZGTT2_55$I7qoVuiFlEj^QlIF0nnrVKY8?AqT zYlg5P3V;piRNKk_v5Mb64+!QT=t96$0TL!{dfUd!ii@w8Cl4{T)YT2(bBdy-4|{rI z?MFsbJ>Zm{=BhAq*M9DInRAm+$=6ahi(Rk+f%K3ExaqPvW*-VJqP}@EAek?go*@&} z!IofpHJszH>JR&tXvmwK-L$7;E**5&v(m$e9a{+FURP|66zUoYhgO0XP?{`R%;5G# z_JW=s*@*^7t&90oK|+6pr0N6TPw3K0;9aMmN1pw;t#80aLdnBOmpQh-_+}^5!cZ_x zOlXpX)%6tNN591rYh3t{efP35F4cv=eYTr5$YrMOYB?+_YFYP{(=W%cE2zmni_fF_ zdG2Cy)Woda%!ZX&FzZB?fEmFpY5LOaw~UXL$$Ap0Y9wo4f=bLrxy1D;e;#D+ z;mh6iGJr)}@+Gv5_!7O5(MzAGQm8?tMC_3#`8qv)O&fLvk3KB!FLx@4Z4rFnVv4BM z{^b1&toYwO=5q%dkCmLd;z_%Mfhe56>q~!0b_gk(MJM)9elLV9vo3dT&LM;6W#i<1 z3xUCZ#OnWxmQ?q@209eV7^NH=Fp`h)EgI&=30M;tiHb{K@m21 z%HGTo?O`9dJaOKj30&1yMT=$L{}}_M>Ay}+SF<^sId*ow*^*cobY_un~mEdt1?AvFkI012w{NDLD?3){EogQLp(&|HxYaYKO9alW_^Sh8@OdMT^hSNC!!g^H(_a-`fjMT?pIr z?^eI-_F95pO{oid{>xCs})Tl_Xn0-7`)onY7k0dJMF-a`3p;W~e4SMr%uXj4OWq*=>kz#sbLv%8CnVFvP z&UhS#07rb8A(P&8{bpbLi*I%l)rNjxLWul+>suUyjaK37LoohAx_o0K1Nb6d#l6*X zc|ZgowdcwomXM%5e>{zL{hnQ-#&MlI>trJ`PB}v`m{@>5j>{T3rnH>d?=~SdD0ke= z?MZowm|>i&0<-+6cODN}IdK2)@0SFoekAaf@H%qc*H0+1lZ&8ZG;;R8n=RV__H5MA zBb8Nw#H3F4B?OhdCHtNnj8c!&Lg)!U*MYXn`#fwr_tXhN>tDdKf)Bt79IN60{`Bm< z^N1*I5SaanfIDy`NZ_!<0kO0FDBtbM@cKBoLRDs}Ui4**)&fSYr1SgRCb8q4L73Dq{6YU(pE*I$%Sntu)Z6o%78$-p23Rj%mf>o7X3_C*Su%?Jrx z0nMPxEI-I=_vW5{8O6GDCtY=SR&epLwDi7ue`ATqSiwc^>@G-%!pqi&;PxrN8-DvV zS`u9|Oy=N82f z$87JhQ9TVi@icLyuldbi!h9Dk%T)CoRf%Ml2_t!D=R391kN8|eTe;|4ha)#2rfa5k zE}FwvtcP@1iz*c2Hswm-qu(6;0niHO_v&!eyPdNXBpLR0yK`yK$)qO=-x#mG>F46 zF7^C*of<^5J4C;EX@_q&!fDQPbLbGb>MsMK1u|007p3eMdGU_>o%=>15arKbL6bNf z7aHCJ7EqjjXWzn8etpmXex3+!pK>sQgAjOf>Zy6vUSEa0e6z$A26|9S77qHM<7leYXZx!Q z>&VdqNp{0=w<8HSEjs)e3PN~*OZw2E37W|xUNVJz_oHmZXQ*P4;cY+v_FtZ8*`fC( zzQDb|Kx#735-E@7vHiZemE|fA{R-{#XR_kdYf<7yqMGc>nwO?Ncq_1~?+?x~#KnG_ zQ0MR4gwiKl*Xz&1_g|C9ZbDbNRsI>43S4l(ex5zqq)VZJ~?7cyc&L zP&tZ8o!n*GDO&BJkTjdq?f^ydDJT%ZFDVZP1`A#&ydx1xFQ+`l7005cR#i1jKNQ5l>3Czy`^7F~Zo({@eS8L+^WmIZ1 z^giWTcz>J2S_K2MT8)JA;<8*Z^fl(d4f0F`boG2&LER9l30vvOrKG)IrB*)nloy<$%jJo# ze4a8xa&KT@pm*Kdm}0v{@mQeS>Hpj=7c-BgV04_PUIJ|YlGsjFSL(0auau=EC6V}& zk=jGhsTjWh%AdgpJUul78|&%XO#A#FVLbT`GG1|Kc;7o#elIS$DI`cyp^j1IQ3A#w z9yu`@>3(d(>kGMP*V%}iK3#^CdrH5+r(}ENc%;*Z0%YI409XtJZW%FR8JDqt)R)wS zAe(t3kP;;~;!lcOFZd3$QMcW@L*X)R)%y-ylLkW^bG?0GC&m>HUc+Z8WPij3T1+42 z#&3@RBL|tRcp(Et_DOZmRG@@jVdI`JB{lWiUK5n8H$|IU)5R3YHl-`QaQpNIzBDvh zT4-^G*;?DT|6aTj76shG8~@<0KVOxwId){^M74t;aKFJXT;ri+*I537|GM(X#DRf%v4h!2J3f)2Ef3KY0Nv(jtJeyj5zQH}96; zAO*q3_M;PVt@Q9gVBmYC&^PGZcSmztlg0E_d&)(#q|Qk*O>o#%@2PJ`U>IwOSFX^i!)!_0}HeZyUax8 zO$WD7IjNWaL_;RCxXqsqFlrQGW<0uZyLdf;b-*k?$c+GoIcg zDnp0lRCX1feaY2re5|y9UbAOfii&e%Hz;hYrblh>&@@V=zL2H(?_$1UD)O3Z@RpX! z*F;=WjtGQ9ye;`*h^{@D1Rc+Ql}Q_RpWad&YNer(XEbxZN}`ai4F$5wW9}X}N;Smh)!lLmvs%VVQq#GpzwqIA1Jj2kV z8N+qGE|o!3It4J|kD5D#)sCNG>dixW+WTJWV5Jv2F2>#y)KWmk7=o||hU0Yh0W;eV zhT~SVhV8_-G@Mvi8INab2F2$%LbB&Xq!_>i!V0U|Ev1a>CZE9~eUYR(1(m_0)MZeg zKg>~Vdwr;0&k>AIEV`MlC)k06ASj;@oY-|IJmWD!b`6kgxs`drNU`7{xX z7144&UA`Jo`>~D~{=jCc$uyXiX%z9KfD3KCu=w=QmMv5abAXh5{Y-d%>a1zjvLJ26 zz)j!4q|8_5w`t!BZ*Fo_Nknzj@={o0;sf@INr$7zpH)~Ejm1Ec(bn_k^;%&Z!5N|H ztMYNf9$BfxLlRcb`4oo5d$E^mUy$Bs(*JsZ{)rr~Xff{NiEe9PUSFWwVTa;W?%0;@ zC_mp#a4{N46UY_$1CwW-qx%(~8&O9;?l-ldq)4*qRO0HD!DpyfnEAr|DDUHMI_ zmZKft*iwi|uUi=>Qo6E*=S{ga0M5IehR zm$ItvHk#~{e2wzytw%z0BPgD^Dh2n;WbL>#Fi+0oAr+|jyieVe?0j2VTCmlv*Tbkv zWLIN_oHnL;QoYIBIg&ei@Y(md*Xzyj^S#MEII{!1&W|?6n87zRK_6GRQl@|V;vT*# zK$li?4KuZ6)?KgOU24UBnV302i5 zc9SuYUCN((GOo<=Q@afa{5vt*(@~5D+81t*&v*?DYw=d`GI^fD1)`43F=mNdq*%9D z5;nIMp;fl4x=3}U8)vpZr{HVWqI8IF6q^)0`tx3V=5u-WiRJofuCw)Z9-I30roajI z*#gUEA{Rr9n7Fuj%A$;KCG8;pGk5Q}?XK13w-UEO03*3LVFkkn&Aw(e^($QPEK|`LEhl7$5m& zHMbI+chnc}t2Oujof!kh(9ESN+h|am7N0~tLb)@KM=$C7{MS$M<1e9)7Z&CAR<4?0 z(}RBB=MlV49G|KQkg0;O{W^-lA`M4vDWx;KwFb%U%lt<%oZz!#xHI3^biqPQh@R(i zHUvCu8{4Ys2&TR9>Um%8D5R^&{;ZKA->o`MB(N*T5+84=dLsYoD5bU|d^WB8p{CnN z6<+*~n7-ThckSXi1bH9#D^UqyY1?$A|S-kDtJ;7FV5?)F$*YvP6 z*PmhJgK9KlTW~b+!Bc7Zq8h{MfT|p$-75UY4}cp&I8 zK@*xmB(&rT-0$dKk}7V8z+_2HcG z{)$g3I_{^&QiJnUucUpk7c4Chq`VXS&cW>3_V=!RYA^;XNsaZ&ODjxBb)LMir5bne2z6lbVcc zrsGob*64T!4WWyyx=!*go|$Oy;+rV7PV~RmwVq(a2nn?_9;lkhjM_I&Z&=~uni771 zdFZ>29^>iGL&h|%Q&w=5&ek{R)mI{Op+ul%RP5Rlt?hx%90tU{ucnxee#vqFIo{bH zV4XOV{k_h%E9WjsyNv2BD{Y_yeO{c83SmDD2#zLjPl|Ucj90GEK0j7u?ubXW5v62e z`YMkT8yovtna*BQCFHF5;Tc^?3bKN?p}%1^EKw#Z=1;8m<0&}?T1ceEZcU|mcCuZW zWL-M3gFd)m8Z2})O!08P^U)1JME@X+h>33d@pBo!jUO#FZQ{BP1U_(Y9R?}QGkh2m z6F9p~OQ*J@a94Ul>Es2L!%5cP;%rgsFcWF~Mi`wnv_MvYwGVV2-_0-nW!EJiyaqdI zy2BqHs}}76!40AjI+QR9{>8@J0O`=FjhB}YcZ0Ch7Q30~1E;}}eq~WOLWc})v+GpH zUfn|1Y_*z6f8Xs|*8eO|1|y)u1FrmIGiM%$Zzhy4THxUla#hR1y+ma}wR^=!6vTKZ z_luaXWFTwHYx6+b#;jf0PJb+#1f+71C?jY&?IQ>a-hk z#Ky8|Z1|;aX>-S8hi>245auO1MVg6$fpGp}dobeCYv`Z@ypv&McSU@x_7J3Y1!Mk_ zSYV<3YmNj(-3pxWNVJNaCmykV8goxaMM-K1wuF!%9_mJ3IVZLupzuMf*i@6v-MNl^ z-Ej`=!^7K?Tp!gq?dlj2o}y?$?Zou1J>48WzPOkuv0s)xU5?hJl!AK~@wDsSfxirM zB!$GgZ;l6`o)<}-?BCz=R?V(j3x*14`!GTzeqtx#kFf7p`;_G3?9vd*&I6)KJo_z> znIrt^GN)LKbFQu+t&xUIVfa|VZgGphcc>?mqtwmwr+*aF8K$eF<(}0X(n<40(C1*n zWkp=K&Dr?!SkB9BWZ1bNfYnio%f+42sbDFF*|h(eCzegzpD&so$#H#9>~2reF8c@= z1#_M!QDQJd^1yJC15Ek}I()Frv0fkpwo_}3UtwMPd(NYNWKX(5Z}O`XDve&m&Gy16 z&ufcxP?Nz-IuV^k3EClLNx2}arT#7Npu z(78;LL7_MR_Th2Mm+?>qRMWRTLAL>&oF{3%fmI90u?N!*f^xQ_derGyup=M*AiG?z zzAvuu2vf8ze_qE8OH#eY5i z1kFi6)s$0JoF{kc-ijWpRrLB<2o}tvPw|EN93#J*`}%) zd+z4^fi*I4)*x;8AUXowDUX}OG^pHd&6dj$aGQrk84^6SXffn3YCfc2uOFcmybCY>(P8ax))?Lqf7-;)X?rjztz>Aqplw=%k z%xL=FzDNJ=+c((uV-O-budhyoCQ95&ttLuZHRo(9Wwv>p(!tsf=BhIDY;Kii=+wKz zSA^Dnb@%LQwor zLxBU<64&y<={EPA>#AvxT_wVT{{RiHJnyLyk2+}Es+9rlo(FU!`H@rsxQHAhHBOd1 z*Dp|D_XFbed)-dACMT?uxa(+&<@ zaSv2ChstsREU2H$U+fghS1*@7UOgzf(!>M#n-}YkQkM@s9Y)*&KZAH6;ZfPOG$g?J z5hW6V=u!yTN-lA+K$3vd!dPgT*l(~@IQbEBk8`HwvrulwE!&QxK39nss4QrU_N+!R zWa8V;R*mompG>)q?-U6b0m!w%+7n$W7fWN_NAaaXO5nF02TlNexbUM#FVs#3Amt`O z{bfLuPgyzb4m8`@3@Qpidd#c=ONq@2?F#YxKwXDSTX9vp3%mcQ^BYx1K(EEf?Q?me z6i4k1V;*182fosmMQI}c{RL2{dIwr`zx8nt?2T76dhpjVOO7kb1)$&$HT+Zr0`p*@ zGg@X{&+}eFJR_(`01VTlDNTfMF(*H=Ng?634$De%mxf9;ZKC>DiP5Hu%@@UOYwhgp z*e=3kmY~AW$=6!*Q12jFA|b)Td%1Zy3N6U?_N(_?Iv%4@E+idW=wWU?ioik50Pdsv z9cjVR3%j_v&{|R7Ev&k!mm{T2YmBaoXgrsPL(7ScmvZkpEmw5!Z#iwE!Ki$_byGh4suvslatt-I#8FBmL-WA=tOyLW;fv6JeRHmj4wr^;0G&A@yxU26O;aDzt8wk$WTCo16208J!_sO>e^>@+kI1_lPO z6*!Ul^?^3axVks7H^&D&-tdq%vKc)?wL?|*U5w%R@2v;TBK&7F#h|~tD-;BaTX;b@ zq#tQw{B>`E(eofn?(Z&tA;Ev#L17<`fJjFhBdPOct`WfR?hChrCEvyiv8$8C;H$WU zQHK|S{cm}NG}4X{QK-XI_fD8cUGqu+EGp4&MyUQ$vk8iddxe$aZ~@b7_US}?&OKgA zb{;gtKdNj}#Z*;P%>1e1`Zp)@ZWO+TflCL2)&(a<>!W}o_ox%a zxYZpd{u(7e@2r_a@=dQwgN>9e@EPK;81682AH58-g}>#$pp@;6vuW%$Dzmju!@wd1 zxU+pzWw}vjb!NMd6H;nLucaj_CNJhHNXh$>82pgKCOGFruN&68&RZ&0KU5(d1I z(Ngvkm;)%I;byjPWLG-_Glkqe{^3L*^Km@hGu=#_r-oo6>}zGlQtPavE&R_XrDrc{ zXKJ0{mvEy(Gnhj>_TSLJzAdwy(^*(tgxhD3LHHMH^1P4zNBsHETK)J(p&Z^I_xLat zf`2Op%%SRIN^4lJfJJuYZ#W60rlfRbHZk51BVf>eU?e%42UX7Re>~q^&0qB_Cg0X| zIvN3*YZ@pB+nQ?x4Psvq-LC6T0C&;Q$+&V^HVIkSYd4+@#1}k;&nN1{#ZHezD5%;h z=}zy>$$E>^>$P|?*WJj0Bq{E@D|2+W{wPY`q4zy^^p|KY;C)gaw%Goz^Mqi_5U^2iw?MWMN94~5h7snETNTAKBG|qXhKHfkp2h}|62hZwWb=Hy?YRzDS4sGC3cj}hEC8$ z3}#qkKyIjeS{1UHatZW+n1qQdtlF2(0S33io^Bmg$zEU(O6qC5z3gs3k=tU> zelQ|Q#KJUlwZ*cu+gT?KwB_M6ZqKvvd5b@&f<%2}ZDD;n@y>kZFYQWC#b6T23*LGp z@K9i+G`Vb!N?FA`M9R}6&Z@8`97XCAIMJ$JxW1C-by;7?Zv^$+WzZKC0;-c9V_qXD zTuK}m{$3lOQ(npr`kw{Gadb{aWDlH#Yg9%(#MPm7NCf7+?gKSKN5O}^z=-eqbRKT^ zSjZmb+WClctRp{7V)Nk5*}H=|C3LjUcB2!f-Ew2P%)n})dO|QMTFX5LSbQA6d1|ZO zQ39JWooS2IffjF|+|}6`s_S`5#S7$V+Xwi5JaE}SDl#IzHX|f z1hxVqCthbRaq#K$W5?CWB7&5kG5((g6yAde(%lVZoXwF&`SJF^U0POFpo|jQ+^q?J zJJ&%%b;PZ2dMOrTB3~vzO_y`PZ_wu~@8r2BajW4!L4;J{^d`78m-;+*LL*oW>rRMQ zlD)=6&Aw8=f~)?-NaH{#nb~5h*dc3caCt~xaF0na z(o1(e^CbV6goSiGQ#!I8x>Is)n^FI5r#yj{NeAxrmtp#g&I7kVT+3TD37{o)zss`l zwN^Q3+IRq3nBD{oncnb{U%gnDH(;z@Kjm?tY-Wb_)I`%^GaSu>g-ce$%65Fn%S!MlPM&}#9PiIUS%m5MeJ=`gEQ2oKq5CF@I4Jhz=~buwO|_N zRx=f6yZ_%%@QP;s{Z%lU#(}5y6>%K@y8M~h5ND>oPC?_jh2LZ<0%(kC#3~FJXpIW1 z(eXNv>lL)Piko|$BCYrz$Fby8B**PZWn$06C-~P*a4^hb4z)9YlQh^hn2|BBFYB$* z9ju|*{rvIe>&FA`15=_9n3!G@hB7U6+qy;IT?`F#%=GL>!LXJb*>B62sJolo*EOHy zXdG?WkIlH?D(4Mat>P`Ea^7-NNC$}n?bF8x3h+{!m;yCaRp9PUVvn!h0yB^ z2hKZQMc(9eF0<-toa-Rvgqg};I~kBXM<+ru%T(D8*{w%2qaeWd8mS})Me4ibPeH*$ z4dw|O8yoU{Lu<(&MT4JoRkA(OgSH3qM=f}1M_-CIKe=V!Px{0(bjZKi73Br0&5Wn} zr*D7UYyZF>g$meAKI>03HlgyAJZ^b2d0wIV;c;?9Rl>|v>-`@;hpDWmADQr35TL*B z?2A3)dscIGdzw&#A>vn+0~81+U!cT~WCdR+=@yyHM7x0Ivt+mmSo5VuFE+E3Id*7M zXYk`#a*(iodK(#FeY80iot5=C($4F;c59+E>oY43O^hU%GPhjZP7<9ynvY_gTVlNg zT@I^uj)T7~M;LTD24_tQke({|w7J8q3|CmA+^fgIoVzxV2jc4taL!+kcF*7u5;COO zREtr+BS`|R#55Rl(C=Op$pxAT(SVk^f{vMa{1~{UNP&Sg`&~EB2U#R*{m}0rt8g6h z2Nmrm<;^oLlWi9tXEFU&bNG2My7Ik;R`3`^05BRTow_>P$^9yCCU?LtAeOjezwN37 z=r*QGB@?~xkm`RcKyGfX&J=no&8lnTKs{j?80!9J5rH+ZjqCsq%#xeB=@Ea?D=7t; z=&=)8hMpj@N)h<~;-Ri$VGyv4N4F-}He*efO67$jpXuBN{bQialNo0OP*AJ;fTr6Q zv$>R){MQQsVZ3&!c{=6vTlxv3rCGAFa``1El=dLqczsENj`QOeyBV=A^h{4!M;{*y9|x1n@j6M2J*T1=Un}p~Z06xkzLoy?LW9F{ zvMk%eeJ+=5kDyhEoxQZ9B;{^)nhzJ7^VZiK6L^MOcUxMr6yVizRO)N^lRxU89dB#G zh<9WB{2-6+;yHK;)IQlZ5zuZ>7V(d_8bPNC^G6`#vGHG>%cL>&wVV`cKyf2_`lH_S z?`Nm=@QLly<*4eI%P4esQD5p7O4dd@6moTzV@DH^bjY14;-x)n)Y3AuB>7l7o?<0@ zhhsHtp;B1tk5^pTBt?^SA6@XG=w#X2mQjm#_r3L_dWiIpnw7*9Z~|)9ny$&hXT?a(jc897Si2a zf`oLJq@;96cO%VE^E>17J^{Y(_xgSO!)u0Nm}~C)oPG9Qd#$yVLH6#tx0Y=Tj30_m zd@&XQsco?%^r4uOuJ(=?F~BtLHZ0yZpURR8T}pq^IK%Ayqp5s&Z@*VpP-BMY9o+~k zcK(I@l?L*kMGG*PfVU2C++nP4h@GNj(4!wq^K;G3>wF3C&*Li0l#Fy|3N-N)v+TmD zZ@9iSR&Np|EZdpRHg+8Tdfy$qO(6D$v@huV*BKY30|W zlL}gTJ{uc)CihgiNs?36SJ|fiD5IP!k?Nvfi93 zX(uu=0^pfCe=VPC-90 zwDom?$Pt@|Zp+mR%T(qThgF$5XeBv>0a3b0NCJPn4G^)@W?Y!-u)3lu8lB>F(I3xugrvq z-S@r3(rx1R94;MTE?aZtQY?8%;@MMk-{OB($DfZP)NX8unWJ&qzAFRpPgN~?iD;Yg z6yI329MO(4#e-*|;ymvjzsv1N?dAAq_y2V%A*hUioQGO>hR=`i{Ar3Ga3G@NArA7P zP(?gY499sfE%aclK{pQP{t)Hl>Y3ZkTyM1}qlZ1tPxc@7zq7{xKk#XS%fq_7@Rz|AQ}ndGYX!2d35^uwsLI4wQKf4j)0L;v+8_jE z%FDk8Aabs-9fGntg#KIwWC>$iPOwdpQD3C+PfDUJALauJzsq--83GSxqo#AHU|r#h zx(QQ(ut;t&7noo$ONuCPer5p-!m43PJY>8DnkD`jt^bqmA1wF&U`xG=CvTF7A~6Y@!Oc?SED4ZPnY=)TZgXEvmkVg zEEi4rrwg{P18n7kEG#cy+-#W1Y2d4QW00oLWbt8ayNs{9pHST>j{cFwM_KO^VHzt% z-2eX<joNcEaTp5`GdD(Mk`3H29w4gStY&S;hw$!bnBtiJ=X1# z_Gd&74OOzM*}5MY2klpM2Djh+eZTy#%auZbGDbI`M?eqafnta-V9g?MZ4ETyr&80$ zh+jsie~warB1UH@4RXtE0XIZhD?lK+;hJiq{)Ch3^5;|GvR^FSLI(DiYnO)#U{Cc5 z(PtdOE6fy+{E4!gdmaCr!YA5#r6qw6TY0oezfM?>1xOJ}X6S$7g1gTcPA~Dk`6=4b2o|jD04G-=fV4 zUvztfhbVF*17PFVF5~}v8u3YZ$wRtc^qU#jWu585{$w}AxNN402NW@gACtjp=RkF5 zt(|<>S#A$p5)ftA4yzof67(3LL>i^G`j^jlj;ZD1;N9|@dtGelpr?Fe%0|6G$KKu^ z^g6;*xe0*@LOn9gM3G^;4?o*q;RBM(bT4|7zd4ED93n6>MrlBlkD>qk&Uj12QrH5) zd>n78)kvwI!nZrUPsuBuT+8twbj-7HtkB!^!kIE`3Ty&dDzXkXG&Jw@DG3b%Dn?7K zih|`5Y0@uo@%#BBZqfc<Sb=&H&!?AX{2~g7A?6Oz8pj#Ych4Xk9nj6iXmS z2V7Vc@xK6475geJHmmh#aC)7p$7NNw`^BLTT{Ba?x@Gy^p*No@SsYNnLjyPk1hl77 z@84_9R=h#4>wOprQv7yP3t8C!U^m(NDEM!Yc*;8JV=JICnga~zKC5?IGR+|}hmg-9 z^7rLfr?NUMNJ5iNzo<0;FQMaMl@iJ8cUqJt%u601!^DOQ&5VMNZHOV>K$Idq*^y~$NXpa1cGJc`XnD{TMPN|yzXHgW^XJd?{UHh@ zIcZcthv0=0L# z_I^O&O0D;Dr{Oqh(qUBFezp`f@C05dz)9dQDJsEB4S@I zeoMc>vOoT(pvQSKtX7CD`7Yu|+V2%My6~o&F)$Bt<;-imR}n-=Wytwm>`NoMX1QhP z?d=+c@>?i~c3oB#pg{p$U}|k?=|}_#4Xt6Dq`bU*QsUtfeOayLi7n60`F@=j+_>Vi z$1kTU%gIWmW&X1~OZ5gp?jI}r3V`ETVg6%@^ANO^PmeYpbqQULnSOYW{va%($S~8i zFNw`)jAd=8s${58cZLsk6UdB%68VgynUcNs{G4}l&d;Pl$0rl!bEAnX^l$%dko_q2 zuA{%Vi^p%%bxaHV8k=X{dCOCA!*5eynVxNC4K*8W{d%yz#|VM<3CuNjc*0cP$5O~Q zv$$bbSTVXs>D<-cl9akoQ*|4;xi|UL{i9~I{q=xw{jdF1ErH(yG;Ks|6ty5X{W~%5 zl+B!k?v$XZ4XjT7e_1NBi?I1 zo^kiptJH$@QES}bc=z(2Uv5~n(-s|u(CMcD@7WvQIy?7b%aRO& zMHhK^w&8`_Budw+-$Z%ki$OXWX5*GUx+1w!Ys}r422U1eaGVR=(CC$zjpT~>;_{sX z?9iF41v;-+p8r~0S3x!KLMrM#MapNaoezD0kg(FD1=;naU*Zm00H)=A)q7zZHnsgk z>J;SUX=X=5p8zs=Zu}AE=g#PhWTV z3{3OX+JhqB1rk1Z&iCwZwD03v;o#f!Zl~y8Ln;NE8lD4tDyO>ZxkU#*nC%-MU6K<> zoTIV7!#I~%mfV?F|NYQ~P~iSFy?gBS2_hyx*hn$Op}y>$z7;$j%_|i)nVfa>;2`#3 zZ^XZxWa-L1`fUYXc@<)Y6qe2?$%AA=2UOqM}VCr7xVA{gf8O_X`SmPAg#Z)6p$YF_Jq?PetE@0DGMOm4Wlk+(c3~kly zO%|jqz%Gtw96E8zVQ^b<1;L9_%OB-(@Y#@1qfTr98`$*jKFohssH0s_xlUKLJYkXf zEJr!?BhTK}0xLwjbB%}gqftQ_-h1>&9D3CbVEV%}+QQB+dkAHX)Z1>r z=-GPXkG;7J&EtP7x?6M`5piQFr>Z?8ls6)DPSy7G&}v^$+#D~VHD*2lMx3{H)U{rb=d>;nZF$SMwV^rlJ$ z+KcY&;g?3nAT(SY#h(|yj0E7!qkCj&vg5@7&%sp(ysG=iN1-tS%%?iWfTsZ5l#FVC z-292S)8-gCwQy$>P;diYurf%@bKK8&4PL*DD2)I^b2mow!$H9YD_TAI{4AgDjHB@* zN{Vv6cwnQ&6t5#C=Q}bws-87`!ah+n7nM&9*+Jwo?|y;bidsn4yQ;epj#E@#RVv=h zyy+OroJ_2UUr8AkJqL8gP!a}dqvqT zcI2}y=P>q*XQ17VKG<1xNGB-0 ztny6FezuOfwzZYWI&h}L;Yjmenp^+#h!usCl_&}Ju9u^aN03ehwWb!N>lDrl__UbR zs?>Q7%#9jOT46@ZBau5DEq>!gBiUDrKyWEqUyv;r@1SnMT$84K3v8>jna%VfP1{Cr9+{#a2R(}EVD(^ReRb{QzTxxyOh33;AAaGrwu8rmkLK4! zHJVW^lP|foRrfe<@BehZZX$d;x?6tzLJH5~iKI|99qk_0DrwLdI1T*oOaF(b{5aAZ2u@zy^NIVT~Q&= z`1~cLaAvlP?LC@$gqrL15JDN$2-tCp>EFVQchkg*9aiL_;ADXx7C`9cm;?B~2~$Ps zT7WXb^B^ee9E?Bc@;2%1&M{Q&-I;vxawLx+ti6pCVSIdiZ@OmZo$af&iRZa%JdY=g zde^|ILNXv*!`>GGni6joiFvh9&_mFW4PIjtnhZwKt;+2R0`#ms8go)4Tg#_fqeU zUwbPGxz%T1Sw&i6!x0QXu0xzL#2HDMz5gEs? z^y_Et@1CY8T>E(NR5*-2*h zg0&Jf-%3c5X9-nbrMZs72#Z3x^>4Nrov_FIpELxkul7A4RLm%qncUf!IrinCAHh#` z$px-`~wn3{e4ukeZd|Lw_!s(S|-eZAPH>HRR<_(7vc5=MU-2_Z^m&D z8YFT4B5Lj0m5d{YsZ8(pLTmO_(o3p?OZFH)4~T{s7Cykl2^It*G+!KD%OtP^8|l*} zBphe;ghSOkfqaE3x19F(5b&RSU~361E*8fGVFB0M{~32CFBOdy?AP8q#&~L{>56}x z8*e@3&(an12sXKUEI)M0pOllKOCNCUo~3+1s;xiJ-%DF@+TjkPP!ZKSwBHE1O!SU? zG(1d}-u@8pTTJ!|^$TnD&8zx_c?MfRm4T0fS@^`LQ6AyXTqR2PJeM-7ds@IfL+9cY za;V;krP%UB<{v@8wL(A`AAr$so{-_w)C|DTYbS@UCyFr$YQuZ#Sg;&#i5HZ$ z^j+6I8&8mld0%zdr2vcO$9t~{ilQ(^7RpwUf`sM}UF}xCr|lpM{1)&3b1S|gS2iz( zUT1@Xv_p}s>NsVe-#nXXP>-U}{5Tz*dLGaFox(Y@G4U>E2R`dhyVri$&EX2s*Nz40 zq()I2@zXDmikCn8+|Rwf^*Z`6sii-Qw#b_7Z?G;{8oQ?JyoUz?(|g*3mxRXcpEdjXZAvApCONU4rQt?Qah`n4VkZ=0m4ei5lkx7 zS~{}f6Zd}EqvO_B2$uLr5lA@e+OwF&XksdqT;{}m-ST6NH=-EbHfE?mDH$OhNhby5lS{80&HIZpDIc|)cEUO%>5bvW9v)Ep@j8p> z&F3@N0!~YG&kWgSy@nH?xXq``0~j`ygvSiu&|F2egsFx7UJjk6tN|D z$dn$pU&?R;T&iN7A1etC^0S=k^(?w~PS3~B;%Iib-5bXK1`+;sRCQ>R2_`cwPv1Ve z9#njU0}zq~eG5R#f!VAK%OGbR4~CH3;rpWbwilj^KAE~b&M zNslZ`Sy?#@Zi6tear9%KKZ=$#*rJd4QH(5EC^>WOJ!Fde-RyE4pBxG3WtjuuuTbA_)0tzJysKp5?1PA(6c<()1T$B_T>_aa&JRmA% zFdS>lei%tkdgUyB9EAk9E)31OZc>7=yw4AGUOvMHEDW|?UkrpNI7yzDt~|RL;c27k=2^I+bUM|s=S2D@N2_ppaNzau;`;+gfW}iQx)b0&oDRg{X>RP%4s_Bb(q4ny1 zY!5Zxr~sflk4uBF5fC;m*A)&Q}tA18(sUCOJ+4`pSPzQ}IO!G^u-UEkA z!36#nUj2>i%s>`q+!=wgZC~5AFi}GC>TAu;tzJJ)=7DOImSVTVmvBN-Ch*b<{kTNm zz|xnzrkGYYv-eh%>RLq$p3dUq*^(|Sn4XA{eF46M3hd{Xr$~UPSa;ykqFC7jxtmjCL ziSss{1(GJ6f4*pv`*VinKcV9CzKfEtj%*?u)w&*q_b~Q&k>UHv>5}ZcBBY$3F8m_~ zC}leM3uPF#vA1p#fId-olW5*n%q+o{E>XA$%Dhwjlw!y6&B2m8>7HhLBOU)uA9e?>j43H)xk`Rg2y z42T#-ri+&Qkzu@GYZzCaE^`Gi#b?m0=7@S(6~(x@ua+=lQ}wmz?`bUmq#%#m5W|9t zcG)Ho@8PqX1fYDb$-1#q7~nK~!~8^@IpYHuBFq&5TATgM1$g zN{9)rtba*{U-n3XuuXP}Y&ax3s_k6|=M4`49BLfQnfqnRIG&x%`}z;K5xS1cHV*;o z%V+X@3Ih89d73p`U+VNOJ)pA}#|}rak($KE^?O=kKJ$;yPI;?N-?1X+&PzNkN4r;p`w%#EoY z;n)gSSTrePGu~PoR?`XXiRT(G$#mUcwg-5rWQmHxu+Y13qb1-N*(PxIo|M?wpRO5^ zb3Ai2f^%5l(xqyf6c#dKH6aNoEr(DFCb$dhx#_a3>sfw<5inaDnP&sRx@xhw3jn^H zltMy8fb0>nYg51WX}!PPrgB#t-hs;m-bux<#v9PJ23X`ynS$;HPf3<`jgueGqho=e zKAe1`hXDr%2^W!c&%@{otEJ`n*C>G|rC8PXfI+KLvHPqkN$~V%bK}`yL`jy5w)=?T z#3+~~!U%g-vYeer{XF+sD6Bw>cCg`s>t{)`2A6ibpvM@C>(=3D{y0=4xE)I0yHnwj zs{D_q_TM-RO)6ZKp86dpv1F?R89O_>ZEx7&vOd3Z)O1R~p*l1N zE4aw^Z>@Mr<(-TJlF7EEqu}qb=46sC_49d{Wb!kQ;Mz@XqW^llvfjssF7nPin7*U^ zZF}c%TNjEfAk@1+cPRa?BNFo{{Hi-8u?hka=ZCNRgvc#P@`6w!g7X%vFx8F1m_Qsl z)reb!im4k&cpYPEx{ipt>fp@t*oCe@F0ur$o;5O1$MU!w`B2Cm;BQTo-Px>wVS$?W zLh{F39-R$Vx!Ir@Ab5D$*u^`e7>UAc3Q{c!YnZ;nG;OM=D3QPEpt0;N6dT)%Z;X&T?v=1wt}3!R z2<7L*-WqhZZ2SXGB9V6@*h=2zUzv1(+mG(0-3nse*YN$8faOpidoM|SfC~5=Iv=b( zqz+MFNgPl0)$(pegB(x9MC+oP+}5c8(2avhAFfJU+`Qe^i(pe-ZfXU7XhuvjUel23 z%!&!m^P5vKjZalqk~t;&^I+!^ya|q~UtH>S?wp+I{bXa%xR5nMIB8xxdMg#SThz|iW#T+_TWTyWv%0{n{s{Di1Wf zZF-p;UM$l=TbK8l6$iQjDu`N^ktD@spEGmg*!MvQZO@RqR~xOPIy9?g|2E6jn=6~*L; zz2SPcg?SS}{Z4$9Tlgb6FkL;RLT{qRG&qJQ&_tIa%O!9WvFZyomfn(w=a@D}4v1Sp z2JfvfIM^L%R$kt_>t0>t?(80Ac(!g)5Q;(=u-JHc5#+hsIhH@_CvcI4f!<9Y%agX; z|(;o_`eMes0u?7dvbyiNs|!Rm;~8v8%#jM)2NuI2E-iPdFqz+51%L zSIU5A-ip{?*?vESQn~7LjiUo1Ha!1GchT2%54x$rCd-l_*(Cb9m8o`>0{AjK9?!yf zcbD2>JDTAI1>?Re(|cF{mNgvmCI1gfg@$1KEr_J2(lS8Ck}V5zckuiR)CpFY>$`H@%TA2u7`C zyuT4e_guQI?obut^sQS!8#Fm249OfA?j243eoQ@g|P?P)xt1ZSI*i^Z$R2lW2W zrO&FE&ra>ul-rf~9M|SMx){?Jcvd7q+1}yNb4Vyjj;X@mZtQU&>g&W7o@$=@9L->Xr@MqsMnJE24X(K`CVTb(V*iQQikrqDu$hP|tUr~8 zk1w+79>@v=wQjRUM&oFY?;ZAm!KV`fOA#s}<@*z61Eo3X9c^et#IAE}X`Z%8Raw#& zEHTaR(U&-|m{KIcPUg8?k)Pmv2`x|Ph&OdwokgZ8*1tIIz_B043f@qTkUF_vl&1HX;Glkj>x_mr+w1II^^gVicxw6QHl7#qX}{Fz=T|Td>LBw^{#CK1pzdg_is!V5+AI^B;NFVyVR5#|IIY>K zQ)6nF|0pSUdk@fv?;(pAFdG#_|J^JI;E}HrdU(>V+&<0p9}uH-(L?eYi6Lh0*$p@aT%5x{5pr*i!Dh~@=?VeN*t=WVos?A!cA)_SLk z5Y(i-0xtpz-{!4HR~sK4*^NyeBmVAX#zL2?k4;LO&|c%nv4qB%n?tHRnraWBckkZI z(^W6@W1CLobN-0JRW#RUd{OOa@R}B+gshY-GS=ws2TvT2jFwx(;|*wutm2EJYTzZ+ zlbg4Bb7Or7X5^Ad)Vq66(Fhk2@u8ajxCRI&*hZ*$C(YeYr-9|1(u1~y88hX$dhawj z<5$~xoGFr(JbhJV&+m=uWE>en$Cb)7I3>u^&cOx(We&DB?%FgkqH(xLS%340NiGQOfv4MdR{Ek*u&6t@v@5b# zE-!3ie>UBQV0_;?`*>DfwTnd?^8sbiZdBm71DePyMx3>Kw(ifoC{U)E%{`Jw%g3h) z>f$Qld$B5dfV80FXXwKR(53+O_3o4H8a+-OOd2T!s&!{cDPGn9IMxyH8np7nTz#mP!o4Kme36To#AM^iJZZFjIqlRO{6`K3NzD> zhRIjW)#EMZuRujwiX2|BDF18SplZ$v>yxIWOw3?X>xlq%oq=^dVAGJ_5QC@u@V+WQ zty?Em^;KzdcTJ3qza7}rhxQ46=I@?KCGz@!Smyb+he{R<(*L%3~lsvs%HT(5W=I~nfiU#vUVkp9F zO#K5nRJ>IRE%zcsY`dzdxwsE8YWQF+__pqqH0LjT%6cN!-S<&XgVgwsC_U(`1%I!u zluH9rUC>_H*tr*9r=9YHE8qouQ%F||TP7Dr+TMNVEQ@@uqbzPNc#nkZ)hENpV2`uC z8e@T1o>jN7Sb+fj^P}k|-=*>-fd^YL7{l#>4>mf4ChlTwwoG!cx7VM>Su9%B&Gny` z4ifiRPuIS{Wz<^jVBRKJe)kP%zzLl2mreSA5K>ZB=uu>)@w7*4ImOnpdez489vQtZ z=ifb9fDlC-c{Xi5CG4M00yY2!eW>HT3W*`N#)MN&?nK1Qkhj8~u^@oklL9yg2Sl%Y zAGKcSs@-i86+PFLQ8n2IE#jTfIYcblX|+G<{&AfBGYX{KLjrsr&{Sg>z~uNYiKEbJ zuO>R7Ab2H72*H2|KtHKaKJT97bI*NIk|jXjx3xa}nXhwmqG~WyZHPdOb^2R4^*2s6 zZ7Sc}XkkdF->Y6DQyUN;Z~a)A<+wp;AjP?7S72x^6jyva?d?6A>X6wL;g?lO{Mc5w z{yU(q9&HiDxW@sXw&!(|s8$oDv=vj(=?bcD-|erQss&nAw&N0sYU<#w1`W%~_?OaK z)?c_y)mf;G*}EFIEu2S29Bb-CDQz-yHO!>p-x0eW;{fQ03m6Ej9(%$4sidAU`-ndK zP!G1T4M)TP8tv-RS{5V8Zlw;`a9xd74!7;5`SxVho=~8@PRzFhXceAL2mq2EqwnX7 ze_}RY1KApuyB<0gn7V?-#-?bK0?-!Pb#ixp5`vUb2#@zsDKJDL;52w6z-P)gDVJ%> z&Er6~D($tdzEKh+jB?L*I+E#zuL1I}xNV2MIt6<(RPS4F>u*upHvXA#D)hqQV*HYR z@IBgQ0_(b8a@yyowExL{JqiG zj|Y;1hjhy}qZapCn;@b`PhSbH9&P-Gs@=kv?9A_rEbpRiRQ(u}wE@L9qtxmc4z2-ktk`CqJ9_Du6ftu)mk{6qQhS z92Gnr%@LYQbk2YoVv-u*Dswdu0S0lTH5l}3sO!#{;m7%wxSMiO&u&cpaKEqEwh5s< z)NK_l35562P*S;fRiUpm`;s7y%rNNXSqL4ed=dkEyl#SGp^l_iMxfclY0Zt5?4UBi z4P+)Ctlb_GEU5RY*pS*%9yhDb)II%nzX3KLaC5IRV82b!mod&) z@z(DFCfCR7b_*+n4FO+Ha~p2je=8{L5fxYuJcgsp4@(cDa299O)7 zO`}`yLH6YX3RAJ^vGofU?|O#s(9Hcq$I;$I%wWA`baybwRf)NQxJXuq5406GXzXEN zybf?bc>paksyy&i#LoFrSSM*XQDc+*?2|g{fVD(D^o60^+;sBDMd-+a-KF%z8Dp3+ zHuQ5E%~Q;Of9XHhGT9t3WDNV6{Q(^}!)Lm3B8R)6`_a}gYVnjy#o=(YE-(5x@a2X} z5gtJMO=3DWrV8p*#qSc~D*qaZBKm~;i4ZD#nc4V2|2ZhciCtTH77{CFoL<3(mV1_x zxhH2kf;ktq0V(<|FoA}{D!E=4hoklzRuxElCLYI$AU=_Osdws$3$ZsbY^rrF9yHkA zb%6uB%e>mvPL8FC$pK=#NpRzFSzRD(qL1gY#vaU;j-;0XC}XDM#t3}=DvI+0Mcl&H zNWASbKA2S$1%CYP^5V?6VS4s|w0I$g;)4Zc-GWv&0A1Yc%6(o8G%e!=H{3f{-E-be zw^$D6M#9UBFxAgdWzzK{Y5?mk;UxRMx71~PT%c`W{Va3Uz1lvK4&ntg7^=LVBFB4e zK8~vI{b+bRe8wZ+KF9Ja`cO$W4xSa6ge?-%&%a#wXs>n3-6hgsG!@5lzH6+cnu>qr z9QcJ>-J=MJWh8YAJ5N8nG>Z*+P@THFuEM{qtj#VuU9@9JbY-c7bTL?idH3eWHz*lj z#2Oa8!+Np&sm5>Kj7dSD{v?@}X$!fHYFI0n+{b!6qz-t?)GV-4!}cJEt3#+q_oRS3 zo0W!2B=QZhZsKk;Ww`NdHdthtGJnfGD<#iMmomKigKR8iQ0& zYwUfX=Gn4%Bd{nZZm&72-XL~`eq*^oSM9vRlr9m7nsXU=>kY~+Y=ZQD*yz6+#sykD ztlUTj`3|L^+Wi@r`t;q0sx=%YtHBiFW$JNfw-N>_Dlt$-ph8FGxTP|sBfsJ}FFxRP zQEUq&dPG`#oD>uPCN#{|4N|qn1;8!@Zuh>8At|_4=Xn}(r=4Ojt9bGsdh2n=d7N9r z@sjbJl^yWJY!Vod+{6%|OyqSg$MtiY!Ve^76;Ulv$1D#*=``f;=?fqyBwp4)BYf2y z$i3|5Ffy%J+h!nSjbI%g|8yVgi|(smT?aBHq-s}zB#yPhzBD&=Sai~TId0C74)P13 zi^hO_`gd>&Z~sv0-T$%#RrMl{o!9tW;`5p#`@!5{ze~6Sk{JgQE!@opRst^jj3Hoe z8H2MsOp-#S)l^!_AyMucqF!na0-FJb@_nnR*9@l1Sk_}-$XT3=q{6B7Tf6BYR9tnR%20{>hd87w^>5Z=h zREyAP*i4TsB_dv8oy>KkKM0EZ0+{f7ml;yZsUmMHy(?`H7{*k*gcvF+!jXT_10mt& z=J%a9sz6=P%e#?>u%QEN=7@o8)q0Xvp`0|`tiaq> zFmo{W1XM6S1bu=qk0kC66EnqVa*f`}p+KrwB0pVkXePtxA0+9Vpvnt9aRwr#<7rQ4 zGJGkWc9zi=knPf19A>@BTbrtwiBfWF=A4K~<))Y+6w;|r=26l4?!#za_HyY3r;g#b zl81gujg!Q@G5Ff4c)z~3A5+MryDeYE4_Msg_G6yY`qPGh_R7_ZkZ?3j4up@k8qho*O$j1nyf?jQe1#AbF0UU3Y4q;Y_ zzQzj6b5?#apg#{-IehQDr=cqnII=Cz5&(HJ-7Ctf-WZTDsAy?vjYyW^12C3Zv5kc`(&IaIEEb#;0?n1)-BLh@$NF!0{=~YH={H>=`p7?6L#4gl^W4^B{ z0f-AB(h7P2?Phtx1W?xXPXg{EdBREU3wE1b^bds`_g=COT;g$lZy%ynNkgYLurdXM z#qHhuk_?h?F=-hG^~i1lA_q^*hcW?&=FJCai8u(5tVZ27wDWd?pwwa z)8+p(&n+~UbMC@>=d!vEGP{H_)}E4IJj3tj3B9`gD-Xj&a&zg*I3nQaPFzgJv0}jt zOfPR=Ii3m=w48;qO~|l>it-m8T<>;54CrR}Ondt>H@w5Gr^4*V`fM z;qq&bXL&TY@#X%-G5Pq?;d!F{dJeg3ktOyKlzw}sLY={ZcH_J$LPp=|YCGxAjl~d> z(2rM$ro!MU7p03iyLS2n^$;Cy-gshQJ7K(~*Je#U21W_WJ}Kp0RSv71Uh1@c_^-BXnAiV-%*qTi?gQ)*v9+ z75Y$O@$E`dIA*#y&&0QpY@ECOK2(CbW(=utu(%bt7ADrbf@4tda3|at%ILPRO&>@i zB$q6$x2`b=NK&Ek!<`Xj*=oA3RJMlQNi(&s4prwL=`&)d|6o&)Qm-<28^hXnId|K= zpTC0Ja*S+Y#$!;aH$0Axd(y>_tkWC7fW|P$&Amb3q6eC3w7MJWdFCMBGdOjd0o$_V z>Bs_WRLD*0Bk?GwKIjLZmTmUBUtEe0CBo7+HZp2qKhEpTHb3r-0ezLSczI!Az@Bwr zG3tMDc!7KZg&kEPp!oQnz7TuiPE}{BAx+2qnSrg}b-!OH=IW3-GK?D+L{GDgPkX}S zZY?U6b~qnb?aR9!%)Nhlx?3Pah2HPwJlJWannChzJcOR}k2J6EjPAWQ3qK79P7&VcYaN#B$%~3)~InW+FQtQ!I}! zfV=M|G}xXkL^^L*t#pp+dJ)(^W}n$LPHd0X$bBYR@Az~^EePsu;tiSs`K<;zJ5$-g zlWzMxJU8TJZ&I(e;sL>$Hki&fdenVp{B6FK+_<~812n|wD49u!cB_vl0+;H6BVQ<} zE_OYLzq=+d*Z+K-?sgn+jmmd>ScyeM9KbksUJYV;{#0j=F6zo@qELS)nBM>$X`mXavUF@j z`rgLib#uTrKeeH*jb z#s?sU)s*Km$t=BvMUc3 zK{sk`+z}k73^U~doVl8VtWZ{rEDkgs`vSp8g9Pz4(z6MRrp-?}@6jI6QKvO|;XNVy>uGyU;Te_&O5 zze0mY*c3^mT5AQ}(F=-#_QNh7S|!KUoteCFFE`VGpL-nPpBibd9%JEp%=*VpXQaD94a!D;8_S@H-b>HwC*&(nCjdP zLO|c=+wfF14{w7i>EbXf^}P{TR6(8sHVxXq$NZs?65BlMbhte4H`Ym7Qg349gZ{a^ z65=!2cdlQe)K+Tq0WILmSr1RqBX*mg=@>$H`OWAnZ2-jlQ}F#+6hU2dbp zEf`*u_#4#$POAZlj}xG>CM>{CyO{UPV3+pCb;rxyTixS1^zUEkY|#_u8Ehx;j9BI< zXMW@m+A0mHRp$n>LyBk}r`VW%BRPe;Bn)6GNu(E{OZ&lSoNP$+F|{l*e|LAB%}5V_ z>&xNKKT7U+^63Cb3j&{}u{|`)(9CJpV^`kDyK%VE3W6qS(ph00R&>~OBN>Z z(2RY+D+%3Oawu*{=3zG#sI;%R1^sdx3uXIGJ&E^M6ni-CqDS1szh76rUg?Ipe7i)`w`&a;h7<8z*PQ|^ak!)dmpa!Cx2HYQt( zPlX>TynIRK(%OkbPx-K!r*)YB+4{T;6AD-8m%_IKI{UU|0Db&`vaK(Nu{K!+KWW!o zdU+u4Jz{Sg#8x@8*PqGbf;r)JI(e;6M=I8cRovMink3&FnJT7VsdQBFd7OOV2V5}- zPc|rzms9n#nEmmXqw3s`#^PzitblD5?7PKw8LOd4kj?VVIG;zR7Az5fb1mhF)@dia zGG+~D&kf&B9+#6_g3no1KAhm6Fk04=PZrchAM5>5nxg_IheJ}R4v9EIWb2N44q;$) zN@oGfzLFeSsE+fbUFp;}wF|&ft8%mkMU;L1Ezq-EspA^m=+jy;2`Y6JPD1%=px}J4 z%}7hzFHqQ~Gg_@Zwo>@GVJ1%=s?IIcAu{s`YW9ZizvTE7D4-My?Y-scSJ9a0*0{6a z`8{1=8nhdU<(}m|`xdH(O>jKptUH^fm|++n&p}Q0vC1`ElTNK5;b2%2Mn(JJv27zO zF?;W8dFskH0&nk#7&5Cw{OIC{s}qRL<`PAvHehsLdam|uQ7YVMuq}{heEHVwsIDiK zi7pFcKauZ<(d)p3nVfJ=`gyl5)qLW!&CxGaMdse9J~^)pkGF%Wj{-gyR*6%avD`nC z<8kLHxd3L#JjFd|1Cy`)5b5D|(YAEJi{!YaBA3fb-9gwx@0dHOz|52ZdhTAM!FjC3U{U6;qjI170&2;?1Tt3a$fyp zD|2Df)5xAfwv{4_SF^w(6#mRsSX?EJ=2T@Q7li0^+SPOxB72mA3kn>plEujGS~hm) z&yO>{k`$uU!|nP%qL3LV0HoBNRmS?;5cdtKJd9J&#)T`fpOZ=YmTmhzN}>ry#|OIh z%gPusmXT6WTCmEbWqGf8e1(B=(1V;T?>S=|xsLcZyf#i}`UEwr*AkjvOX3`on_L{$ zqPc9cw)@@y1>^(dbA#6ni&5RKM7)+U8JU?&s(Ig!TK6YPFuo!Z7Yi!(YZaP&W7M`= ziiFIvRz>Ha4#*uc+9h-I(lmKA*QUsg>KjxFAgq(M8xAZ1~p&9h*x-BTqlEZDj} zMe&CE{7f*51PA7v_r+55hpu;r8~b;u4dOVt*S{jQAv<&!gc@6Gy!EHHwl>0kB`xjL z=2v{Xfo9c^M9!TWwWey(9_Ad&%=L7I|-^bbTe+ICx9y)^eLR@OI2^{aO$W_^btf^S6?#?CLaZ zs5Ml!0EG_SG|{-H({XAtpg0?h-F#=%YREwT|B?39QBig4!#aYL2m*rAB_Pr*If5ui ziqfgHq=YoXpdukHU8B+|-6`E6UDD0a9p4#!U#0F{_qV?P&YE*($5Z?1jUu^-iJLT? zS-nj8kLFoi(Sr49dxPX4zG=z6?M^!XTaTTZn7Hz~Gu8|BB0Q=H_&3=Pe_i_YsU zP;&J5tFoC&dpHvP0KJ6O>4EgBd1E%scs0$3RLvR=U$|yF?$!{z_|TVRIa8rlp){>E zVlima3B<>$@(3~{wY5REz~y^PnjcDb0wi?&uF82#zcA?_t%%IOLG}1aiz5;e>pGPt zG4FH0XG5?wh90NJpCPbtDcX7ZHY--X7tNhJcRUKO!b^{j3_KsJ(~!<&6qJpAd324@ zAbM|C*Z^Pk0&-X-Fn3-r+41$bL5}SO7GfOI2%*V*PT!anl=9sFdj!{es~3R35NO)s z?N>5h@Koi>y^UV68Q@|Q2A^TyK7C873R0IUJTCtRjI~-H!!E+lqs9@>3oWLE6mV8X z$wHW{H=`VByS zgz(ztt5u*0~OG z6ffCtTs?J&{qh(W`h?!Cp~-AlOJKpMKwD*-VA{6jwQ1Lfx7PgI zYPVU9mHO;#D$7E_3{oKEj(_jV-h76zER6_H_HA8aA z;gO#o9ha=uhR$G2_oQ2oHnfBzGbA`}_)98CtgloI1+XqTcS%_++s)JvgKKk$B(TXQ z3ZBhKPX~?LErpeZez+EH)QIj((Ya0wu`?j1G%cz@Z#x|20i6j-PCebgN)Y&>c&aML1pq<;E7V zQ=W#tT$&1JKX74=nD+iQJOn5U1FE!_+yr>u8D`jnP$HD^r`zw>;#$ITe!S zwEw0qQ@ep>A}`+dV~u?Yl191eXz{eom47zr?=zw!W7+$4ytn3#xSjtjgz<1sJ?h5o z@_jPD*S=&8Dg67tRIyW(2NlL`m$^$>gL@-C#)b0zRhjQ6*p;{##5<$+wYAPc64?z8 z1sQj!3WUj>Gn%|IdRmj;IJdMEWG4F=3mqP3gCU;hL<1_cK3nV~u;p zIew#h&pD#CPaAFfrG&$=0MT}ZYmS&dI}{}@tS|tR6Wa@|RxVqwMQb%uQJc@H_G9Fd zfkN|@Js`pMCTiX)P<4egeF@Ns?ytb|0WDr)0%$Kyt?oJ(rXPcujz61%HjBMbXaJPX zeFW~zyAL#iPgTx-bd+zOYb>A8oXT)FMS-Tm^EEFJ1*k@%xVxwW(ou72;e^&STdQX} z7B2SK`A zUm-%0Q%ShvQ{D?EIye39+1sQRh9eo2bmVD^Y?b*rslxU(e_C9aP*sssO z6qxQU;@P{|R`$>`m7Y+xI=cD8mWQA8{@`e=>2Q&M@fU9FCkx)*S@u_2hd#0CMsS$W z%YXlP*MbV-xMDI2nXK<{l3^E+Jk%+DASVliv^c(@;IVRFe~B6GuLzL@5<`>IxodGa zmr!x9eO34f1c3UQz3Gj>H!RsZ)o|mghQyjWiA=l7cOMBJA6T+--`JkV%=ijK7T;zf zS+H_jZp?gH>>%p!my0F8D-pur@%&D*qXR6!_T!yopPMG1X*X=XyMI{%J^qysA$AK| z=F7F=zF-nkF!1PL%{w>S`B)*Q3G%GR98S<<{O5+KUvlxcaHPap5#>*p7`m~Y`KK-1 zFW)K;FEk`)Z?{Oz%BrP0{|#_vR5R2)p>JLd2V>~5LT{n-A&OL*59G7)@^$t`B(DU7 z-gzp=;x7GVgSN-HdVK(6tA*d{n`Li$WecU#zlA@Mx7cqSlkuA6tVBzPHC6VXFEK{N z%LB65&{adXNI(i~8c>-XQ_y>N5B#Pj&AXJmr<&T|6&Y9GqQh{JaIO%Z(uHvbAf zL^?@pZyv9E+e~0=EI(Pvvcd7l{xZfxUz$t%I-qBRMbLezm7W!9&}wzG}%%+kbtUc71Zx-{_t?&V!1lt3B3k*6SvKg-w?p2g(xA&G4&}4ch?)==N+T$CW zLs}$I|J7%J4g~#FHnL18rc-ca>OL>zwx>ClblYN5;r7;s59KV=nWC{@0;LLdmKOiU zmspxzwe(aL&NB@Qd=x4i@2Yl{=Ky!t5kqp8p8l@ig9_uYRN5ZLmFGv|e#4k^;W(d2 ziCV4dLVLYNHFDxks*5u=j|^Y0bcin6p!kv!aLF{uuqqHHr(!O@jYuJgYU7i!AF)p< zVYAO^?#=s-JXmkGkNh;lad*u}50NYn1c6CR-1vOH%6_^{%W1kuR|xd;e>~Emm#SRc z)A6u$Z`1S>OJI`Sbb znibb;xlRV@@IqZijE3X9t-Ez?pqx)ce*YCOwIXV7|CLH5m$=cn!Cb&E(I!`yrmE)| zN*}M6$=)Q@-ppg5Wb+e{jl;L5-C&*+ z^bb$e?=dXZwM^T9T{k|M{=xv>XRrmlaw%Zw2y3IVwDmag-CT&l25w5# zBF)(j5eRkT)n$FFZ7p9%XGQa%(@E_x79=adRvBK}^_M35M(U0DwX6oe%MGE9==|yB z3Bt5CGuaf$r_IM-xb$EET4_HNc({yAXbg2 zCudOmMjl%Y1o~rvekwqW!ZZ{{2oxL#eho)+^A}5cS>x%moT^kdAMgHyiUs2b@eYs1WXgZqo`sl`_ofQnZC;^o4}z_lcFx{Z&@ z6iXq;&X3i*;yL9#fPNuBj!0S!WxBquuDHg7*Yl*1lCNb+xDxF+kFUG{T#msk9~hHU zbM1dZi%Q~z_BZ!`{_3Fx)mA5ugTn?l6{}=#gwQf$l~batqLC*Xm5mM;~55 z{{E=5QZlba-HFD1a=wb$H$FJ28o}P7&z3~q-%DsHEV#Kv_W zGwA%}D`@dNLp~)q$F!@~U-Q##z_PF=c1Bb}f~+QK!|Xm;>bJdlZqkz)Mg=!u)^dNViB?H?j`8;CwwTj>LSM1<`FsIGUmU}QkH zTtgC~W*E^Lp*2sLb-xa{Du+PB>S;P4&6FL!;RNc-09Vp%F(~JoMo}@#)dI|L1u8?i z#JyYhA#+c+J9#Mi-Eh9$x?aD3(<*?7ZzNut@o2e=xDZ$MIF|DH3i6n38DYa@maDHL@M-%7-}XpE~cIbDY%QK=lrY4rCm+ zL0d;=AWc1)9}gZwn>~o!9k54HS0EU9YVf=G70Qp?nOiNj=is45X(xxc@cNWy3nZ0# zUc!C%q%ll~_&p7b>=sjdjpJ%bJX6R5n`DSx(PDR;SOA*j(=x|hKW%*-oqG2d@kYB6 zi1aox16g#3BA}wQdPzE!>?RGWXiNCC)j`m85?*Su4`}XdiXQbwz?FcXfUx9yJh3NF z0-i;1V>R#%cuPm|iUX-%!r`nlOPP;J{Y6mL$ueq@e%#uwC8Agg$He8BXudv8YCIm# z`@&U6GMeWpP`6HOoiIu&H1c#Sr%Sr2K{d_DW73*bi*)9=ewh?M zl?Y~mwlCr-2XC+9>?(5L5q?}g1iGV51Kk^%KgRRxo`E+4`(JL>-$~4SB*{2X_WD$J zHJ)H3t8^bbdpQZfM<6kwm->q-(WtasRdRbrA5Y-KKeoCca_;^XY(eZ(Nr6pw1GG2x z*<*)gs_hO7%^}+hQS~I4#C`Q~uCa7Lipga^@?N-3%(G$uqk>kp0H&9`1xyO3VZ@|y6N!9BnR3f==Q`AtAQN-SFh$%%}pG64CPc?$c%=$?barv5#_V0BcHtDS^=LZuVK*V zo~>NxZQwR)rf<_g=MgEO?R|M;V+w2HMxG)U@NhKtKrfCRo$(q@e9S3aw)JFA*=Wm!FknvnID(CGZp zqX5pfsW%apdaZz1PDp5y;vU&qngQZ(%%LHJ3nEzjT6>+fpPpLwmly^ztn6!cw7j5c zi?#Kh^8I>qzMLk!>9YEpg*N!2umFztukMX_s?bTnw?zhvB{rKvYw~M^?~XL>$~6z1 z*grT#bu_OfH=k*^g^&ZVM8FLTsU8esvYBL!yRFT3wOZm-rS3msIkX$KMpXC5?%1 zISIzV$tH-{2VeX@}*)`$BU2noVVzgM^$8 zBPAnS2S48ovbXXG-Mu@v@H5Y@+Z}_THHxn{viIBFU|!zZ6swOkvJ%prTWuX%`ja{b zg`I#(b}#TD&4|})YHoXzhd?pzgC-uqLeX{fJvKVZ z7QCpUV#eQObb*o0a`_H7dWsY$I1hrP1vY;)0txwZvHSW)%MN=KN$jES^^Xage^!>i zy3-SWRc7aj8J#qGSIRt7zC+Pf)!ip`+Yq28gjXX3k8y@KThwcSclmlaf?CfN}=sEwcfum{QX(rbd4LSlv|A1s*;r0fKmN6wL6K>8~Gml2c1H1>MyK8NEZo zWkudR2qPzkYEeQxosR=5Ql`ilyF|R1b9LUQ&Rbi&^iG!7e~&?fLpQt4*Xn)s*6oa? z)_ktm!<6|a($U(k@k;A0doeR>?2HqD^Pl-(W@|{)3*RV4BNyJoRcnOprwG(+DYTYl z05@RAS!qHXSp0>Wq_R+OS$9`&d6mW(|L*jP%?%~RQuv`oNw%fQz?3eOyZ zZ~+R+8#f;8lXlZ@tv=ReG%lZ+k~&1wjPEqBL%-Z;zb{j4#ftTw_i0E+5%JydX!GX? zEdz=}$0AbqLmbkU)Ae49^3Dnp`=5TUfAkP{4Hx&34%MeP(090@+-4)GTWPAA>6HY6 z?O1$GN_>ffq{BxESJ`(GuEx~=U|XFkXTYxc2#dz} zM+bE?bKdWF>?6V%dXO*mEmoShRXR>ijzi)7XvS6LSAGG#SOQT`?Z8=CRaZ0V5^o%T zd)0jzlcu9pzU%;Hpl*F}>D4ixk%m5rYW!7KrHq9^HRtl%&!kAcukI1zkUz?|47hGB zombDf&SlCU8K`_sAxk}-Z`k>mjieau2uG}FxkI;raOgAb>u9~@-c_=WP4*vYQKlh# zUv=us$(qBs;nqezd8I53aBl9H7F)42^odxE?UK zy)E;mu*h(jz6a=oYy?#GG4-&v$Rz~Xw}h^K>a1Ysoh=*a$9`b2Y zq9l$m{;d?EOI36=Ie6L*kRG-LyTfltqysOCrcaCj z*PUYf(pBfdMLz?eiy?htAraVW~u}qqGdDFkvbj|vJW(q0U{kZ~W?=CxgTV$#E zn5UR7Iz6PdF{oI-EoxA{ZMa^{BwTP|_f?{V1Ia^G^?9Ry_sWMkX`czn$Bi*LvRUOK zJi>)+mWN_C+*SmNcwGBtcOsHSZPfSX_5e?;kI(lTk`EW8g#yC*zY9Lv?8~q=&6|lZ zX(?9=$sZCIXW!VAP}k^)%VxS?PL@0D^s2a4;dtjwLd-~2FzxzGo96WTuIUfn+dWPL z(Gpla3YyE^T9o+r0~|GxqH;x?kaA_=wZ3ITZYNi%7^HO zCFYmT;gnA2GK2{Wdj%QvjlcrM;B;?FcWLm9z*4vFM<#tk%=MMTdserTn|g~v?WB2i zJ%8ugfWZ~~I+p276IQI2A7p-vl5JYgAxs~ytm>Ts(=c*sl^Kuw>(iK>ygn|}@lj;ctx*GIQW3OPn>Vam z(ePhxO$Fx+hw+Bw6_ZW?UFvap?kDeq4>IhTyiKXW8u#T!8^^Ay3uu`JkKC!1UaYP~<9U6Z%%zfsIPdZmkfnzR z&TM4Lf~Hxud)QohbaYsBm|$6r{%ra-Al2cjY01mU_T{`xd|D%HS;qLcCJ&5{P^h}b z_fyG^;muvrg8_{Dz5PY_-YsN&G2J9&n6}D2Gd8+Y6E!QZ6Z&EpU=4%94*LXIQ$EBt zP=Dks*n|G>sK@IjfKvL^zhP>m?V)C4nj+z45E3eOVp7GR{Q#e|_z{oS^5*KR`4*pM zNN;cPzN`5mS*engoh~XiIoRdWuaAE}STp)j_v^Yx-j&L3x<-Q-DGA+GyX7BzjJzcBveUp zz^QY0RXG%Vi0ZiC%BRgSWjVT2%T3zR(X-{h_@v)VZym+Ew`!4c5LdasZIR?wpLwsk zjnw4ie2>4i{w5yE;J~x~ca+$b2X!BJ7z@x&26@lDR5|U@^8rcRqIrFF%4NUqH!|K z%E4S*0}oxmf#^B9m$|N+NOVzo_G^AlTUud&iI}}n{8?peHThMU+!MuSad;nS(=}S2 z-M<%utURm6qu+}5ert;=tBf>yletVVFm?{wFAZ;~6dr469&e}$lo8w*rj71WV7~kr)hayJ=NEr3tF89wXW2ggw3(5z zm#ZUiC%A)9nyye*+r&Z5`gCbtD0Q9g;L2=B{zPCu*P$~$-Xuz{iXy)Il)N%|Fnjh9 zLsQQUp#{UOqmJfze<(I-!`|k@ymSV`wJym`-8IG>>R+sAD+*iBcaRnz2+YtOr5N9? z^nCWFeMmvllZ?0^Zw7u3|C9Jx1R1bETe}yybl)yJ$Ejs!FX+x}o0KfBPp!q?_?LlC z;^OSz$=i28{kGb;CnN!Ybpul9R<{70;oYP}zgF1$iA{RUxnZZ8qhsv!{mHKrLFehj zfCJj<#Z*pFeD~*p#Zs$lfM&amdRx4OUFTmx^552B;0P6-uz7%W`s)fos86>58U4Ge z7I|b`@$Z^j0;PwNl@A^5Pk(m_8h=wvgf+v=5NC+l?*moTNg_QV%1*5SjEkDX{eaBs zb0w1C>kd&fEEAI$;=x6ZcaWm-`4kAmxXA7#ML(x%FWg%@_NN}hs=XHx|5-??(|6mf zWU=c)a2Po@4Y*%)elmSXyPF(KPmR-P3 z7*-lPLK?%}3I&Wwl%Zm*j2d0}nLE&x839+1AKQA}yk`c7^?<}*ljk=+2m7lL-9Cf+ zidZBjzJL0>;QDryvAPMjlYqJeqFr*y7RMZ02mcrVsaRU_K!4AfjI9wPIKC`B-4(@{6X^&7EuOa;-N9Rh*bEnqP} zMxJ}00xSzANS#34`L!4Zw_fGLB6|6l__6md1IoU_i<-)wCq?p~oekWD5j&@4N`~R# zLRrYeb{glGpdP5HK>x(o_+r#o@ftJ&-GEtek#D*`vkogL{CbnXVWtd6}}_mflP z&xtpyG;FD>Fnm*)#s77*R-j-2o|pc0wZZi$!gLM!Q_BbT33bD(Z;iV)$;r>?wpGGE zckaChs+Ak|TMveTa>)R-@KUlFQs2Y4Qi=;eq!o^YgcnAm)QxiznP3wnUM3YADT=!X zt+u&w6o(pSntSmhGTZz-8p^xprVGZVqj_6+$PjB%Xm=JomqGIAzu+Sla7$L__!D(Q zvP4hneH(}?_t|j};A-{+WJz|FY22xovfkjj`Do1deyO)Jj0vk0_qXEBFk%+$-jrmm zogy>@R2ivcQLhv_0iYS;Kg&{=p!5Lc045)P5S~ncM0QRrwL0iLnl6LbX@IT`AD^n; zIQ3HUA#6c=A)S%>y;rSf{7zw60^CWn0LW3=(G5BYzh9Wo@dLA7o8HR99g8CZ&2dAhH2^Bc7ZVO+d$)d|Q2IytViFIGXvZ?;u zj(|;Q4IrU+DP((|hI&OcazMJ5y&y?Im2Kt2t6kKG-DL|*Hzjq(YlF{@q9O_ly*B+O z*AGy%&Wo9kR6x1UKu!O5;C@^Gp}XS0*~!%O@;OW*5F?oaP)k>1ip42L3v~f^=GqS@ zh|sAi8HRCSzWuDkUw%B$WSaO~n)DPt5i+71Ie@wai%#y^Qrhy@<5K_zyHrg6LoVK` zA^>E9#A_K7E@11+AWi~o+1Tk=dg_xr1{Fu8RwvkWq;arlF;i4|Ll-I*e)MEC!ND~3 zf|a;{@x=Dli93Zm{X=NasU@=FzvOSW13v(m*9p|(3hG&UErk>K&(GlU7o|bNF;MB` zEah&VV(SXP6i@G75F`>FqY4U^5aGoR7)RMpFJ8=`4+?yIZk%s2u+Z2-AhpNPVCLc3 zx1k@w`lzg-^*}FaDS(l(#Ji_%p4$9BfQHc^WbnJEcTPc;MI0Kt@TEy8L?CxMpPo?& zPcrtFV4sai8^MJuVUZ(mz@!U^@}0Y--05%%*#LQgIYB@NTJ^{%MR(wW^5_6+W$T7; z&KZSL7uQph{)3ty4*}~koF)0)DMZpx_*tcCY?o(E391?Pf7YTj+>6-*KofrvH=%yK zK^G4|Cd-luh5RcG{{AA8ls7Q6A02`I zwtck=CGH>aNZUTZI46~WvyH{@@?}vPDg6hhh6zmt&(!reu{lgPn>|+ngbhTj3u{B& zra$CDgvn}<-o67 zU-E`R9N;Q(8S#MBBAu3SOImd(@@Wb88$F?ZfV{ySmkT|Gt$51zqfBd|5WK1x`fgS{ zNovNja(GwBp;Vm3+8Jd5*djd!H3cAV0%$;<&!+>=Q4aQR$Z${^T9^e+b{bdeSYW7n zo|^CXn1J4aO2>ACQP+s_TT1!y!rafFtJ=rHW^hqXW$2R=^Cn6FmQnd}xG+WGT{LkA zjVm8qJ*emGoO(AM?`M2Gr&}S8$W9hNMK%mBfSgEu&VHcv=@&VDp!T>d8qa@6JZnCH z)2KDdLX`fjq(`hlOAR{oKy894=@L{N-Eo>draz2vubrA8AGk{94|96cnUBFqo}w;s z>q}|+^&T*U)@zA*!Qq2_>p>-Ko#!!;OmOR(I7C%rmAXKayzLS1iLnravOIyYzq=)= z(~(%6)Qr#&$2FUeI+LDjEB~GI&*T6?@aXlM&N!i-;VBJgf6Go=FzOF{6|9(i7jtgD zY{9-vE$QPq{qPbL4VBsuVmik$SKktN_H4zrRsKhE3j#D2F7+>s87X}BJb|S@z@xhd z==93?9)A}Id?kwu#ex)ky(fOeD_+$QG%q2t>buXavDN=X_xxFpD)}7dMFCo=d&fe|W@JQJtK5OpDkDt%aW0RBbVA?yht3zFZ-eLO{fG)(RHXTAwFEFaoF zj=R&vahOtOl^Mf4_3nopZ2gP1fVh}*u0_;b{$AC3rld+?q}n+|Lp{)`-+pGc z38MxY`*=~kJ+l;dP=CW)1j*zWBINaOLr!rD+Ds7A06h|(I5J++L)tTzSWeLi1~YjQ zpwoTFs3(%o>qLa2dR0Sg^W7$q02fbG@m0K!iX@NPKGDNO!80(c6kK3C z^-40N;=Mo}zVKQcH5!RsiddJG4PmF|wU{M4>I;PrA!^PH3SNWToX&*nIcEqo3#81@ zsbfc2C8cZmm|D*t>{GATcFR8p4n%eQX&P)KI_qe3xWf*5yHhlsrfR)~h$D1Qxgr0- z1T+Z^5T2Yu5~pm~ifogZ0p@v zi7eH939(bh=PCT0Q~_ZAfH7zon--ULl3){`L0f&(_v8HV1Qn9(YI1Dlk833|Ql39L zr%StlF_GVt_C=_w0w+hh_q7Z7=j6b@MV=JGhkB)FMRg zY$pg{NmwLob;!IogtzsnCGafAVurj@XcN|%k5Bx>=PJo{stJV)-$mbyTyx!dAsF|UEktu zCgOTq&AU>4&Vl8pF5oBbF=>(>Wne<-94vPb-&rIxy%$33eeTl}$o&IcXe~Y+$D8%s zvQ{Kd2O12`UP%C%5G zp@qhL#Lh&WBh3-^kE5)l6#}($ErePNN`a5dn#tEqi*tkT#9}-?$8kPzA7h z+(sO&`*&XAZ%?clzUo&@FPIK5PDnC6>HLw61J7j1F9-*e+|RJZM>B`r@+>D7-2X7R z)+qLu=>FBlLvKSl&>Yr#e)I$VkfXiN>WUZP1>s}{IR2|yC9m=E9ww1#v#Te+Y*W1M zW}LFq!Yk#3q7$$WEdcM&N87jw@4$nCB|BG&4~ZO1FBCl$l4!A|c1BMEE7LFVh@C>q zACbqA2jIuM^l(0RvxecQZi>@%=r>G^)Dq@TuEHHS!`26d*Yw53)*uq`bG;7S8hOr? z-SlVGcR=^_1{nep&>>U%|AHHG2)2@ljl_h$s@9y8??&$%NngR;r}-jx;_F}}Nhk%REW=KfA$V1QVO{d%{}M6X$&xi1X(20kMbK;$j1qMo+q|uf#0x_6|8-;4#Xk^mB{lHmfnAm}_hb zl)jie&nyS54Y*P|`vx;cR5hE&p=yFPkATc{qx3=tPiv*fzoH0w7u70tc^3#e2n8yN z5_T5v=M8Nql{j7m4mv+KfL7Z`;ViVwGH0!qe|hTwwlZq5omVMV~WK8x>O%Nvb2^f3m3}i#Yp~uL9Y^-s|Q5@KtCUNlh7N_itTCVHoH2rBLH+!d1SPPMyK6mZz4#$(r}l`U#Ln^ulSGh zF)&gutl58Zvm#%TVQyaQx{zFfRiGIIN@?9jQNW6wCQP>+xz?NtZ}MbhX^~(tw(3tPO}Y5DTpM+L0UrCLStE_bET_niLYmBy6HSP64{~S z%h-({@We$r)%Cr*&$$LGk$6RT1n%NJS;lGuc@BND76toX9gGe?f3CHwfu`VuuhIqQ zrAW|HN575LG8oifv6CVsCNAMWEyTK*Wl`S?CFTU@gr=i$r;f^B=rh^7QSVx}++c>d z&iYL3_zWEXP}RTZHDM-jUc-mcTN7+i$~{HTA22vYx)1I))8j^R_ln|cPjdNu&!TECyLNJVQoUga1Ai# z$R^VmG3UUVvzmb+%;((tV3;7X=X)zfjivzV=9d5Sne-GmlR%zof?2Em2fraf)m3sr zO#>{+0imL05(@E=@cde8Ob9-g^V-;jhmtit%dIyvxeQX@Oa2cE6xczVTXTU({+9&S z0B)v@6Ay^Q-e=+ADM)|1|7G~E_zr&nlSPI2_4)!Pic{$v?fOM0!H*1ZCs)C}cP0AX z)Iit(9^b|GS5hFxYR3j0im#RWa2)1ldP|A)@pHBCpAfHqreg=ZX!XtZs)P4HH5^jR z0C77CIvh626LBB?3}S$vkmRA_KmtohIj4A4#9B{S?A+$qUUc1mTHNQpyn8u8W|}y1 zmqi=9#7-Q%zi2iY574NMxb#JjOB%;L&jsOtSy|qK?0hN0D`}MEQ_z32(b|SD3JOYZ zZZg#QuUtq(feMQq8)@)m!51Cu9rgu-E+n2ZWP6l-o2fh&aU{Hk^KrdDoOAL4mWez- zRJ^M)`V&;c!m|^d!e#21Sh@ds_Yd%p2o+!pUj^zCDC}%BP&#vV`ppj$qFb9n&TCAv zGav`PH}6~5OeDKl@*RDipGMbtas2xiLUr=6hU@&BKb{GNq^%@}sU=vSf)==}qON?8 z`NKhr6BgrL#V-PPV~^MT*GYMeJ6xd_yPf~yuZ77ks_p25NHFtplpZ|c=$}EPm{!KH zY^+-Dmt?3hclll4P@VZY0(2a}rOZlRDC-BL+(tNAmTeiy2=im$%s*Vjc0EN`PfIqYA>2PTL# zq^&?U_~0I53*?%xNw?1jUo8IY2k`(4L*jBee*D=+izQ#8e6=>v@uiDd@ zLijaT<)0ie$&iZ1d&iXh!=svcd;3Q#BFAMq-#>WivfnusKSZoS@6k`PU}nuDKK5*S zqZ8WRzcRa>7dXapTJw6Iouq_C0=js^opPIF+v9^iZhg{1{>U8TxjvosKRd>x@x$}N z5czB__m)%D^0%W6N{G=6pGUR3ENZ`xA}v{DR3hAcicAm-qM(}xsZ-9CySSI zPFMw(L4o)?RRUPkOvNu7zO3YF`_x~gOSCJ2)&bXBDrS7{ABg<7hgS%v878*;N!I}M z5svL^GqZxJ$~udP188p~b^!H--UGyB$p8P7?F<4HoAxLuOo*K8uOj*X+Y8|Lw?Kj>#Ma!>)GNfQ4efZ;Q}IHj z&Oj1j8Cw<`a-QGI`Cp((h%n(zfu@t3468&kwVTZx!ap7tuY2diC4_4UJ)fdN_#7q& z_Kzw(#2mQS@>jb$!C>dM2n%rWe;H(TDkcT~r!eEZYQ$j;10;86NN~zr#gzbn-v@6|i3| zJs^4(rfg<(GC)z0in@V$-OWsB>-lm~8}D-Bg`&=z(Y(E6dFVQSp3`HcZEtuPF~82^ z9YcIEoyTSn@m;S`Akh)c6Y1o>|6=HjO=8Ue$So2xVyTnM0H+}>+wB#$^RX!96{TE89Csjkp0DAKtx2}X8F2i5&ZK? z^%lukL&%%!$T#f9Ye-(0No(~tyqVbiz=-jfL4&2ghVgt}%?0RjsbE08#ma>-t8g-F z8ybdRaJZUasc2Vw}@heRHOM8H90*Qr!%6?VBxO} ze#I=C=oIwoRN@ZQ9_JPV{`V*u;|7z?xeOdxYJ$h-PDq#!@&Nk#Z%2{&ZmoH9#T%o+ z)MJsjzDe51r%-!NK>R@iEV5!_SpK=G$sb+r*Q&H?k(US8g!_htWZ#sko0yDoejt6$ zB|~{muz?RTh#~ElU$nH9nGO){1#4f&l9+A{%iEKe{4>XYd#Knz$VGwlS0{CN@Gz?5 zaRXza$$jhH2cJ_)8M!sj?c4>J`JiVm;awoseRCDUy|SP>X^B$$lf(SyU2EAR*ib_= z_oT$q>OpqiSnL_%Dp_?GxhPi<70}rnK4bU9tOvLIm-bl9FB@R#li27mraUJ&2bc>d zXQ!4OEcVb#>`*wZ40U7huMFi4Rox3=Y`GwHfxMjuL`$4Qt+tV!k!JW-o5lau^XhX6 z1s75mcmjy9cG?a~dYufK%I1Q@C^RpQ83(gB$Qew}dIw z>{?Yt9&hn>8;Wm7u1tT$ig?8te8QOiFXW^Zj1=!o(80htbUN`_%MnT@l+T&p{u+f8 z8(lmgg86LX0idm~B1!YxiYpzmCjeL9!JRi*DX!980GW$~YI`VX`N*(&DgKmRmH6Z* zEwgbOqs{mOiHqqez2dzYFK4P+p4Af)TX^HmXDTCAf2G2?!|=qbW`vTPyWIA9RP5u& zN_)!jA0qpaP{Gc(4yhx~G9R@m+A}Ux`19(|#FC;vyHS`iQ`#I&3ieb-W6U|>0){p3 zLNYJptCK+Z%?6u*Cs-^`;3c-ds@W*<%b$J@n%7C?G$VlCQ_W=+*{U}09W+gAxlJKf zkhj-cT|ZXCDD|UWfBH$eFJJPnIrYQ0qKj_}`3Xbs7ZCy=kF!j#3 z#>hyAh@E#9d(f=K!TxBk@BUB155|P7!V9a9kAP$44{aH^Lvesi3rT76qkWIdkH%wu zQU@FEseSo z-vOGJl~;cM->B&i*bygL@HILMBm9SxjZ!v0;0hgVAWV7|wt{*-86n_4_}ZsqpC+!G zwvQNa{aRUQcttS5oA(Z+_HPmc7*%aP=x6ORDwC&&no_bn0~v~aw;Xm57HiB{waM5E zk*#lHD0lxsgQt{drJMg>y`t~4Xq?4vv4%#M?X(r-O|0|`MmO~Hrx09~g_m0+4IFEj;6yX|n)I8*r_EM39 zmCIgxVY-`6a8<-saK9uRwYtJ(JNN|;4+a~KFDI+4RwS{F+bz9Ch<5pR3`tJy4TXoW zDda~-N7x3*Q$p7-{YC_o24|-)ECVy2e?w`fBU|;}7JY4;ldlWqO>dq5Npmv5g9f9~ zSv0trxjAT(@(YdB8b);C^I{obEe)H9i1LLjYJ1MnfYRt$3T#-eEjL_^)q4i-Ibtcu zzqOaQsW|C=1^2@hFM#ghyc4sYQ@TA0R&GOi0S^?_G8IyBAr;D?SEJox<@c;aPQr{R%Z!Y zxTfQsYw?DrZ`*j*+Rf`-?JkUkTtv?X#L##wIvf;RXG#njwviup;K#F#N7EUzq#22h zAjZ4xJyG6|1hh1Ji$iQMO93u>DeFHhv#D^62?yoNkoo%I3qY1#C2Orn(0``q)MVo8 zHfu@=h7}?|y1qerzo4z6EUbTd)j^NH3~hjeeWZu&KJ;aP5KpWhIvv~n?jCh|QX7Cy zM%!(qE?+U)zKX;TXKKFYW%=qP*f^;<*aSOnRZ8(7Q|XFW@7{7U_Wwu>$PZWD|2gz zPpPwNtnx!!$+vrIj+KUYc5&jFuw(R`r{ekIHT(&I>OnE6|J(f$yFK9M1a_ab>J`I* z0M#sQTLhPY23qFS6L*x_V)O@km|YdP;TJZjappDQ8l4W=9}kb;!A)K63uJgh5W z%{Y&W5@QbT=l4jvgM9^5dkOm5GvkUw)^2IdXZU6i^*DG<#&}1M=RV(vTITdpkY^(i z1vyJM>NPVoYy+A&4CIsw~AD#GL%kCpRuMspWL+(z0+it;G^Otp=j>tMBg(kkLwdB{L# z8V3v|A;jPu#T5Z@65r?gV-RKEXU|IL9&? zE*}Fzh>Y+DX_F08zYQwOw^Bj@UDW3Eih3w;&&7TU>7)R$Qgv6AQTAbS|IBr`(F|$N z)V`icUpJdP6ncO@kVV^y{w);V_2j-t%v-ih-IwP6w;0T~pOxNx>s1Z^N>;RyA~Fg` z&P+aNxDlG1ICHAdTJ7L_+$ zx;=^e{jxIABWYi=VbY_y&yOF|NbMZ9mGAYL*H>>ylt!uLx4mdQ$Kwun1GFsG(9EPv90+1K9u>4%RWZ$Z_1Qu!Ftc~ zH6NRU>q}4mCzpxVMKjYnSpVo%6)>zH!1;n1Mjd!bHE$We`r8qgH~tNBpl-&$E?lX` zyxm`1q4%Q~=K*J!-Vj~xY?tImiX>6K?!UeYEp(7`o8;xTF9^P4IRvrWbGKW?TubJt zDC=lo6ZJeT=bdbRCQWg=d<|$ak5>P)e3s7(Wh|eDy3l|v-NO&X&cTk*aVpCE z))KmxVhdUH<`bX?0LC?}#X*1M*Pb%CZ4pQoje!j^WM(l$D9+6Fu3Tu;0cLa`E566#@q$PMCY8>t-n#U>0uD zuvoCTL2q^CTXX3`A0p1qjWlozJJYv(Z5Cre9tRfio8F2q>PutoCZHc}JCD9q{Xfdy z0xHV4TN@{a2I-LQPHE|s4q<2zY3c4RX#@f3?vU;-LAnKLl$7q~|BU+k{_lIvd(OAM zwVpK&Bg{Pa&TC(L-}i*#@ZB^8y@&bra%*I1%Lz4=-wGJeHO%t&dD!>C+Tgy`wOwQm zTi>^Vv7!|tK3nI3Lcx`TWpHl-GZ?JuJ4`s3PI9Hnm`G-!*fB5;!*$j%eUa+r^l?` zt%^lD@c4Ylic3#Kydh=v$kJHM@#T-$XK$X?C5-V?PHteg$I+hFx#`dz`Y-Q~j#hA8 zuRf7k@cxKS>&N{fHfOUOa@>=_mRPruv@eS_oHT+4L1poRNChxImg#Y?+#)r{$WB@H zb&w(3tud~umzVV^4q-Q-j@rnTPD*kB<3t9&QcI^+kilDrcNf_GheyoGJ2D;^o(!1X zgdEICa_u=9iUca57VC%E+64zQcl(7K^Ga)))EXa>BP~jHur^OPkShE|1xfov3sE#i zM$x!%W06k0ZM*}Ejm2TDXI@%uxNmpul{cqK_=}Pt_We*2yuy7#y)S!!4_Aikbetxs zuhfdJWo0kwWMN*Od~TpMkB@hFn0#?*0Tm#P*~Ck<)AksuFQJSb8}tNZ;BWMbc1^A? z;klP+rww5gsSMwUoTa_CU--k`0#i4c(-GTxw#`1Z*&(%^{B{4K?iZa#=28@myWm~I z+z4r_{%A;PEX6qfboxsp1ik%arY1mukxe_*Yo`Zpeeccfjr}HXCw9lh;Ew`0+7IzU zI81&1lpfRkbR=)^p{1l7j_{RUvbL>*hkO0*T6KHX*bWI&X5)|E!|F$o{1g^!k`#?Q z5y3k#D_QSOF_s?)2m@S%2$`0hXB_B#S5UFPs2YrU@Ah6u`6|iTcjrsdzW6z=)rvtp z-P<}*gQ^xS#%G1G13Zu&rl%vD~$W5yni|Q^^y-f|3FO zc*W82DG*{5UR5R8fvTNT;PBic-XtkNrt6Srj^Fl$n)m)w{7H19n75$K{U+B37AVT2x?j<@L*w;6Iz09}YH6hI{2r`^F2Y6JM*dVH%=AnTOG)s$>-dLx zR%OFb=}^CLOh!C&i>#)X%ail7R1r4sy>z$GSihU2-iBqbFB-FsXATo@+fFP8s9IlL z2(4J-2Eph9061@Iw2czOPIB1mHp^$3NofU>yh@pTrI;)WkJN63taUt^<`S*6YPI6r z<7=xd7^+Cl@)Q8c+yOa1Mu%>4^f1cSy8-}K!+uye2W(iXafhGN3DVIO%`TZ+d92^_ zSvjh5ydbJFP?91H*urH$wsUPba2LZeF576X8H4>U@hv_EqZ2)uP>np)n}2iAG)E;Y z)}0;GLNUNSlZp6P4yd3Gb5h)G1mLHaAJpv@vRkSrjyI4)1(aOMFfY zuAA&|>Z;=7|8>~v{}fZSj4Jx-!Qpc+dpEz?wdCy+;+N06eSEc`5S|Ab01Vz{O+MOI>1KZDCxPXxGz?}3ZeSM z%*xPVy?%fE)oBHjbu2Jqp8-2N-z0!mIgB^qOC*(7<{`=Kv^yp>Jc3RGmx%pHF`SM? z;S`!$V0hgGf|w?fg1DjYr9S-%LH)yH295jx%EWt11*G3(3b0u3N9kz>Si z0>O@js1NYYu)CSgw)#FKAbMo6)6&e+Q3nGq$ ziQcntxR&x*y!F0L&kixZ82Rtq4}dIyO#mEB(L((w@|%=118Lv%B};7=Kl9nkULTLQ z=*fSSOIECy*pn?mvZ5WN=Jt?s_F1i--dqCR^@{eIz zWIpo%)^T+ewAwb`9AUOFaVzF8svWR?ChXvUGXIWmVrJg?tb#>lv@7n~BVu5pzpQ4v z&wQ01vX_YfkKmC7YupFi!pbK&*gET*hAN;408JmK`vbR~)+A)w7phvDK#AraaIMcp zYPa!*cuCJTqI>B_GsW~z5OEOgwiyEq$db`y^eX;6WEvXXi>P&szK7zti31DyuMPWGX)aXv&5egP*XZrPmXmR<%`O9z51C z$=jCyj_~oCtP@w*qG*JX@_Cko1S48z~ zRRQ#`S$;+-|1M!@*eA;~SpvINL7X-klw%Ws&=V~~Mtxcdzw34E^vIPLOl7s4uzaMwn?7Mr9(Tjk@``l}GSr)@WOz+FAs68I;|N7eD%E!S z7sh`mbD!f|WSwx&*@t+*y5Lo&YWCK1xq&OSeP#F#vkPc4?(Z|Mg6)LQEo43Gk^DRt zuX-ot(Ge>}jBbjhRy_Ar*DH^1-nt(z;7b>1rt<&lGk#@yg@zjHmH8bja}oHCy|wx> z>CFtEsT1u75yC{CY0fK=GwbV-#v?@h_RT2Ovctn2#G~==cXX@mR_dN*V|{5}Zh|#l z>Q_IyE&<3&ti*q~rn+14vUW2>pW;~uu>jf|8NxJM$Q)xLXv5ddUb%X|gLh@#^8~I4 ztkXtwx)CjZqQ&KkrC?GX208X90Y8j);fv2!?do@p*W~c9?M8}ce&gdr%o|#-PvbrS zBL0ZdXWSEr>m;$-5CD*Z_Ud*&nqz=EcjzWdm9c_XZPc*a{n-f6R|57lqoUpaIwbV0 z`SlN?VHOq~Tx_nwO2w#@{fV~cq@Oz@4_HD43;TUqQG08}&ldTF@J_F1+ArDC{Vs>* zLQ89|V#L7W4ATBxQOH1qLw6ITtJ~eW8`2>4bL(5%A45vqjFa&f#YXA#YT4+hoVqC| zQ2!ahZD(f$r%yG}2Gh9Ga3p;Ehf{W{(lzK1;dsR3na%2|+4ml+p}G4)+3zF(fyx}C z^P#tk8an^p5yQr|IQFL;Pdw)lMq7|7z)D&QJ~PnugdLU`Z^sb!`zF*pte#eV^2QE5 z%j=0Pl%FI|#0+S`@07wuln(~R6ML=2tCqE0?Xu+9*|Q5a*m$}Bys21E#=LpYVnCoI zwEw~W>f?Bnj#;63$7ApN9-Hs(m6*bAF#5;1FKd<57x1%s}x&aVrb z`X9r#zie%A zzl?G6GLAg7o*PUA6dF_*vHSqWMfVUx`EhaSE~GQjv$e40In@Sp@>1)5#cB8;P2#@M z%91(YTC~M$nCoVZc~GWp`)d&A(3%Xun=*hvU6$pe^iUecHf$At2$Bm*_Lox~4~Wr*GGt=Jw1%3&YDF)AoDhbe^+T;^KsbZ@(93R+ z^=0GfN9{BCfIHjWRme_fT2>5Ltpndu-gBE@WcA0lM2Coe9yhJ83b>~gnw@Y?T3e5f zM(YwULIB@EMqP%|rOmcx0;Q%FgP9U}wW<;u8al9mIurJZ){HF6QTH%a@?MB4d9aT; zkybSO0<_Uw#TSjn_`>@DK+r>3NCVkJ!j4E7#puv?x8JSbyJr+H{d9ZvJ8RaJilXYw zH|Gnr*>B6!fB;CLJkf)Niv-$xWk?@3Xp`0pG_~q{2@7brvBS1KHX5S+IGP6G%qwvu z8_Zf5fl`?V&`K)W#rFzM(odkS(u|@%Q#Ybzh&Y7xc_+hG_b^4pKj9AkAH}`B@y*M5Q)b3n2+D#anXApMCbLDPz811NWwd=np2H7m?KA>8-}lsr z=S5sCfeuEf-D`q0b4c8qMMj0gdKg3tOzmjNVk zm%rq3UA?LER`^i~TOl8}p-vcRPh`NHxYuTn)PHhsJ6WWAv%*2YD*X%^<^ss{BK0;{ z<4Bv!zZk;SXRKKi{YB4^)RF6O#?AZhwiO}Nl;}P0bH6)3%DWD#WSIx+PrKH#_Xb`7 z$KLav^}%v_1%W%mcMsqsTmepE!F}m=cDUnZ9UGHdw7sjY#eo`T*g)~M54A5__&|xS z9TUkkf8s66-wgm<4Whm9IL$zFhUWBy+LD;ss#Iz1r-Xcv=hEFwt+szmT?=w4{l&Sw zKyMzKH50?l7flk4cpjd0#aCpQcq0o}Tv_XF1o=Ng^u2&$4ycWK^hJC;pS-=Nli(vgk7Aa}%yeMKoU?g!<~Tdg`u?s0fle-*G^; z(iiKHuk3TVM$moG;m%fD`C0?VBnMj=<-+-UhIbQgaOs8@GAd$+-5uh0p1`&sjd!RN8KU*oUhH=5kN4+3)>z&3&h*{HXqY{mVTFVQ1SKwK%R8uP;>6$qo3HIG|p_)g#I)X)o?AcEa7VF z?>S8=nLnW@uBHqhrx z9N@(vVs{pTIp~o11>g(*VdC2o5gH_CMT-0feoaIJxYr5&V7e+03D8>8-!^BCZP=< zBW$;8Pd$EOB!dHT#1_&H>Ah@Sta~q)pO#6#w=RYrz9R0R<0wqqQ#Q0eG9*}1BevA? zJ{bLM7u)_tsA2Ym`0kd-H3VhM;Q5Nnmr3JV$Tl5dHpd8xxx3QLhjT;+HWbz{BYimw zHx~REJo3G;>dy%g?7v{ph*a{I$frYiFbPtfXZ#C`ST#s!HyejBVA*eDLSXrqu9Ai=j8u0Q;BB#g4%glB_8jAZrx+JOCE>xK zIsa`Zt@1_QTL?UxQ82uw4U5C&`2mSkak}%6^OJs})X8O+cn8dlacEW5H+HUI7b5^k zV#qE;*LGDeo}$YGDI7AA%ex8FGLTk3{_+Ps?_SIYXH>#~brxtCG#Vf_RgZ!J zP~2zv8ghP71p`J9-lYu@qJ#h4F-~RkfT5yhc}b=Jfq?5iqO!=vj6LU7C#zN!IdO~k z&7+4MB3FxO+qYwEq7Ln9q7z~(ZxvfqXZ^#Kln9jJbU=j%+#CEYXA)N`{=nFg3DAaV z?v~ozRCThkgH>l zX}Qei&5hGbd1UL=+D2-dQl!m2y%%n_{g$g9Jsde7sPl&IhkMf`p4K|TDycYcBm0lX zMLf9fH-JeM+V@Txq$0ndr5X}X^ysq(iw{i9>O}7+4Q!t@Y}cN4Xhwk{5awqZ^B1DYfl}~6BLa41KxvZgF$LlYz|4`mFq3eU?cIaGE8LiE=e>hlIaJ!y`p$<% z)q^O`fed?b_baazwRqM5fbr||b4vvJ0cX%BtJbL0MwX-jO>BpUEBNGQ_R0{chjGBEmZA zwY>g5R04mDjP(Umh5$2=c3w6-oALq< z!~#l2{qY&sAHn6)vdl(teJcXpF!AJw{uYu)*JG_yfzz&TW^S(EAIH!3Yn@j3FWkls z_s7!D{Q5^_Kp0Ta80=|KWot~ZtD;c_T`2B}6cv)AQYr8bkpF^$_5ro|R;>S`4K6t! zh0BM5yhNK_uPXYBm*2u866y_G_1k4#f`Y@ z$f00>D*)-5_ntwm@B10P=~ID&UQj-lJt_?Onl#My=Wo20L~i;M-l08d*1#8bfq?5A z8_~H_`j=b%c1qEs=&zHELhXRtrRI6C!$DiO%KbhSb1RNHeIZc{p zt^TOV%xMNb%zzSOFSTEsaz8krQa5Eoq(wQsM;^+kj!iIz+49S`k{(^BAmKAp=<>?@ z<8}g?5ZfRqM1SBC-N&-ueCEQE1N|r#nVlsn?oE7wUY4ke@cz{WelF)GUv$}PS`^i2 zzQhZv-oPm%#@C1eT+RkXYNHjHbu9W&lYa@C{i7dmb71_&Bn#4+GL&tiu9nt zgumMxw{!AZjRtWz&*W3&=hh=}hJMH{18!H9I{QL2h*!$$VJ^am)0(A4`&{$+>(ezf z*{`7AT0s2d|E-c+C$f%Q0r${aOc(|`d-xmB*bXirdzg#9dLK=}3?4rCMr)$vOr_H4by zs#NkGRVp%rkplIP#;`%xzDaXOrWLER*eUja5)B!lAKT!tu-8|4+8jRdno#vUgC=!b zi6XOzo+w<#-wA1q8F+?qjsvKR0~DxV+Z2b<qA_6`AbgLwJY?Jem!+0KG{7Pq4)ArE3s=?{a4PiOzemycY#AZdw z9bp0v!{3D;NExKH%5`xM>i-Ct3xq-h(fS|Vc3Iz>yJHu;$p5r1@;)Bvw_9Yt_n#9Y z>S)B4zp6dA7bnCB!wUe+I8U2v)oN?BMdvD=o3m?&0p>o7+}xnh&Z$FVU}f1QfF}YU z24b&TQvQBK;!wkk-w%u5`{jv34U-s$XaFAwv1NZ;&`boD#th+G5*;ED?AHg_(P32r z4JKsIiSl9|p@-x@m93i zT}v&JYfm6EAxW;Rbm*OYh7hmu3y`wu4z>}c__!&hV2vT`CgYd48z%lipPpr9$8~<8~sF&-AzuQ5l8!&pjY9p}!MR*P+ z2`qq_4nq!V_!K@z{1wJAiXlxQjmnkBg@F=Y7vqq8Q9TnA$)o`n%x|lJ7=|ZEkAWUb zh`1W;Xr-Jbr;^Xk^pVwV$j(esu7FFoGkY-gd169(a(3zI9r|arg>%Bg41r-DvAA63 z3+b@T9188}qZFZ_!Y!rv1#u~!EMWw(IF%3fq`%)^6d#&Jm1lKpC#9u^3iG$_p%8IBLEFvvd$sqglShyg(%Duait9QNN3jfpYwoqhBw z{`$EBGYaDdAx?L`xCS7t0n9EppEZhgw8J4NXa*Y0K8P205kMEMjx#bc=C&So=+2hN z^7Atip^~rR!4UQP%;dRPzAwVSb{Iy~`KFYKtvx*rCaS+N!05{=CqkG3!Bi_i!Dv+r z-rn9mgJ$+g|A72gQf&(4E5jKQtAW@}bJJyVDr(jp+x0cX^|!2%DHb8EAx)tRIfeNj z7*F?`DTInFYDxvrXi*x98U4HKL3M3ubor%lu6m1NT*SZ)#(}hKG@hp})>lpsIorls zc}Pv^oNr$G<~~5;n;~z2R>$1@DCq?x_WNTafpAhCjf^J?!;lPX4f_yPfmcNn5G|Hr zPwh@)UMI$|X+(zqAc}^ExECP6s&66{M{{P?aUd}z(a4X2a7K`KT~h$k8M+{=&iDp} z5kWHk*l$lHh0*Cu%WxKn43V(JXI5!1O#Jj~mvqYCp9)FvY0hEPb#unV^84E;8<;j# zU3p2n=|hzKi#xuCcR5$Uo0s_uj zpZGa~s(m?G&GFsgF?ZxkTx(gGI!`UC+eo@nNqD#!Zc$l!V3c#JzIwKtS4)ASDW~NQ z2`Nu1+^#-wmbx))XVNzVd}laCAhkk4YhrX=eP#+pivIo6@AUz~0Fo`7D+c%ZJh^;J zUj4*tt7kGbYZ(9d{_l(@C_ygeey0{nwsSwht6ZPFYJtCOV!14M!Ua+zVTMf*vRI6}Lv{1oyt%UJ;PmkL%$4Ikqi7E_2D z17;v7DJrkPmIR0EfOb0ZQ(6;vi579K)fc9}FW~a*V{rHxAH?#2D4!*2lzMwt*WAu_ z4-z-;^)0`EOoqZ|W!*!v2112mmA36bK6;&5L~jqr1mCxIx@3kLWd%B-x>L zdeUE?kd|4<_@-l^Fr^=b}@N+%l<;T#;({;Tnf0xl_*v4_XL zrIgasRT00q+mPZguBs+EE4!>i{65M2a|lZQ7xDbt^EFABdFGuyRk1Wc2Kk_>6J
s4I@QAs|4K$y^w%9%k`#&rj06k@;>WJ_7Ci7+`R;E1#dCE? z;sgbhGhUDq`a?rjTZ;rh3+vo!Fe!@RJN^tH2>mV_e!rC+mVb5_?*Q1XvLT&c`9OR^ zoDgf{$8MQhMeb6addb3s!)`var^O>NIP-IW*zMXqP2xl50;hVz!S>!eXKOvvZ^rK1 z=i1#cv*4|xpO!Qd;Upzleu#S|4~gI>w5Sju0f48E!(ezC)X zy$gAJQCYyTpPj2P{h&J);;|xPcFb& zy3j!6{+pqyj;=%${+s(JV!!@MyvrJ^;>6EN?-D8f6^LF`e zsgq9Vbrat$VAALNMTa?oBhKJ>H>HdMbT&PWg`?6;rF2cg&(5xsyGa@)Und+^kJ)|f z*W($-OLsRD664EjURe&xDCN>=GUsObZhfrF^^PcQ%9&KX-F<(j?n>|<)&Bqb;y#Ui z(_v~Uv`!C3sU*9u_WT*$2Q86sX()!T~k9>P%t&^wj_=|H%5yG zS^%$#Rx` zqY+F+#pm2lvFWIBX_8sn6Q&f_xv0!O@$m^VyRx#v4)H_xda0=bvNREtqgTBZ0wpJ& zjWfZ<|IO6@gslI!M+A%sEN_{y-XtOf)aYNSA3oqIGc&%J;Sij>Ou97K;OFr(TMtG< zfLJ8PGiXG^5D7GoIz&+@`AB5zzh=6o2-DQMUgO|nv=A7kVQ?i%L!Jl<~QBzCZYfETy!5}5NF)({i|2+>`k~_Uq)yCQ%pr7t$jb(=$4rNR}T}E@`oc! z5VOjwc!9R@cg==0fGo6S7Z+W`poSNJM6UeJe*OsnOv~poT1%D^g$em3PXDv4(k`D3 zdGZ5b3Gi~#+9eDfnq8oH>`F2L#i!eKVZ!G`AGPd=S2(@CL=O(U8VJFVadL|tB8^#J zR@>hBiW_O7J?vQXZZZMu<-9%Ej8|YvL{Q;Z<6%C@HfyRr*U;ZQ^}j1UAerh|AkvC~ z=IcDwVcaCg#FJA^+ zernLI^RQ*=l)ju4;o7%)6W&A7|9+coGR4ZK^t>C@!-!d!;fv(+C+_*V zWce145i*r4N%>`^ktB^XmpE$U3pY2f)68~!^rGHYu8k>4dBVRh_Ok!L8sRZ)g%A}C z$W_Im;d`7+_KPM1y!Q2R^H0UKd3OGv!#q5tM%3~YU}vEP5VaJ$kh1z`IT?#zi11qK z+wJWYHY#V%GOr@7!s-|wAC}Z&>fhA{%Y36ku`=>ndqqh10a4Y22r|e-m=V)W<8w_F)Ai{ z8b}nm5}K(yswbk}(qaZ$4;xa_olAby#mUm1j#$gD`4z#BTPq=3|rQpIEYk z-ET&d@3zJwQnL}+#DBse-S1(bdGgNK38L|DyGOu@-hEsH?6hPKjrbOo{1k(M>!gCmUWIc-i4=gn(d| z!S}<9mQeLAnK|pwP(IsITSCGZ$!)Mo?|XdK>8`=B=d%h4aa6%9hj=kW^Bxi<;@-xT zaeRZrGFR<@&)<%8wwegHGu%FGbpH@%7k_e6U^!&l7a~r|9>k)cJ{@(jqUWa&`>wuv zB1>^SHQ|sk$C&;)A2|RgFOZUnsc-^_y(N2w&?tBsyJT}Bw0ck5nyg7ePG(|JEaqcG zSSw+3g3YG4TrJu^np;cHUZO2EdY-iT)$Xj?#bzS!2ukWK9dH(5+kJ@Ts=GX&ymY$g zXc=#Gk<=kR4e~Dlr5|gzH5)N5(@IL^;>Lv4O9p_X(vO9*x#5?#WcvDE^dCLjy!h#D z)%sQplL=x6K^CJz2`Dg16jfS$9BFSftUaByw2pY+$6{A5C&07hQ19Lt4wxYgEHJz_ zF<Ar||eAcFt|nFTQtmw&?31&Ey9NXTiK+ZCBjFpf2BT0ezyy`lD^uQGK*e51dI zS5qmG<-=?0D=CD&+%gg)hjIAz=wN)W5aj6)%>?xj1ddDbx*z-fD%vhtcGw1Dmv1`c zhKGkuJnkd6?!PEi+fP^+R;t4Rt;p3e(pPW&x+ zaVsg3fW1%>06hr9xH6j{%`N5!yO**soIYrjv9J_+uX88#OWqT)U0<03(ailKqCZo= zL+Yn_Kjo->&|EHIyQx&oS_H4F`)rwI9S`1jZjGs%HIQAz)a4q%zxmldxxbg8tIDek zKqQm2ALtInDoPoz+O^o%2ab*?@E*gc%PT&oHWksZyeDB^5-yA9NiY7GO7+q}RX8NG z;|Jc5f-H~TGm$hOlWskuZ4|bB0R=V^;kQjuGB22MjVMIE;sA5xUiBl(Be9+&PH#~x;Qi5Irjz(@dt zf(7I1i+3RTv;PR1_f6|TI!ZASC|$-MZYF_J!mR#dR?t!B5HNE#2;;xx6wPx68%E2Mo|8k(T!lY!MFwyVI&^AFJM^4<4PZ?+{K< zev@(MSoXuY%IyzgFM+5sPi{CMj>l5N5RP;X#Q5`_9>O7+XKg0|ynG4Uo7V=Crsvs% zy*(~KMq~-zXWcuEVyJjyr5b(SHfh|B^4YyN$PqgjzpYf-$v8*fm?i^O>9~;I20ccM zTnAK+m*yB~h*4WNT$Z7K5z6V8&d$50$d(=_K?*@f?4O_x15$>u3xL)0#dc-X7Gk4B zc|VMfa`CWU!GCV7cc7&3djpTmyz(|QTc!=e<2nd8()(%#W)?W#`D$5Olo@MnHFEM~+;REXE%fX-P>nq6+WZa?ZKsTxSPAUrV$|M+deL$tAG!qxQ_(uQOBrvavio(>D#z)M{(C=$6|B}nq6o>`+4A>I98ju546ON{S=937}noRRuUwZP6L zxz7I;?1u>hxj`z6!xAv_e=atxiAK5JNTF9*e~TD$1sDysTh!W=L3jwe5JPpG(toyd z^@(L>sm-vin#PE;@Am!N#GX~I&t?;F7(Y(>aM$kiOAM3

_whnroH} zsS!5a%YdYW+buE+lLxP_`Fz~hUp!m|uVoKwh8jS_qE&g)&YZs-=2|qJIo?m z?j9CAd?I?FRa*@T5BDZN{N>{|?Mg`Uaaf($+9Dn)!ahigTJo20nuc-nm|+e*)5pRs zlW9|m7EDHJB-PZt9&hazD<6DvOQg(DaI1|nWfqTrTwP5Bz(3^D-D{_C^JisZh>z*? za8HM@kDZ<})afF2?V!E!+*K@XEfiwA@uP_B$VU?USi=7?*l@%|hchp5=*&^QAK%n` zYIVhpPKZTP>XsWMr~VvZW)Lv#d(2PL>}+z{KYgATr%3|z?t0MRL^}WCMuh=UOQ8$3 zo0KI^vcguhJYo=aWrT+~hq;0S$%Hd6!*g3Io2Lt)RsT=||HGpKR3K$C!r$L9{NO(u zDU8O0{`EJZ&koImS96~-pKM<_&xc8W{_NQ#+`7lPx7GWjjKw~bJ9%2uNkv>W1_5rV zL~bxM-&0wu=Jc5t`efxL893vcag|YbB7S@6_v-42i0Ua$^EzR+Xi#8dvF=ScGm z1UL^XIlt*jz$&XUf%yA36eTPB^S-Hu>Q4Nn-+>=#ci$Z$1xW?krm{##=Ur`FPQ=Y{N}BVo|oz{IaxORESJ@Rjod9vw0=m#y^P zgq*-j9wj!?mxpQZ{hqpXX4}lgu<`brSq~Yg{6(2Ds(t}>pv&g{#}4N6Y=2i_YpdfL z$OBT9bo9>wguM&W(b2IOE>tWv8!P`DnjIE_{I_-gAA*1(3p8Sc)-X~1AI&$V`!C}8 zv=H;A0YS%kY;T`vJKDt_g)6B8D|E7#l5$mD(_5J^?2Z z8x+(eC|F<)ctPE6|BjyoUKp0AwGVH_%RzZ$PP<)ND5Nde= zbZ`daQIRO(sG>)mD~gn9>3hNtadTUx;iQrhKTYDG1uT;t%o}Sd^sP>iMYatO7hOw& z?|FRv09;S;vr5qt`Hn9+d5~y8d0Hfr2ZZv_OXXbJ3>*?>M7sM`Rz2mD#P;`>fu;pC z;`EZwuUD1;5a&F6{tmlnD-lmM0>kq;jxiqdOoa1L*V%wf*rg!H6khwgH@H0{S92rr zG|E^T{5Q{cXTC4e)uxf~ugr*xJQ||=>^3CU;j0~^|o_b%0WAIBOS zCoIBV9=%R3O(;W5l?bUr@R)jRrtB z17UB<{N%9Cjv&p?x9D^$1u#_D()|Iaqzkj3Z7uf>b70XNF+Z44#BjWkV3yR;1EzaM zS<{)hk*bpoT9y`QYT+WaIFtit$qt+0CkNhgd`f)lXcjt!lgNjn;8)zVI*7Xr*2hh} zRiQ`ZOY~m`FnM#+g;!TJ1d~5^0bsM1vVVVRLN^<6Q61I)EfbTkp#ink$rrv{{6F4` zhhhIKR~cm@8yN!ICa9~j>ypis#xyhTHan=!GckT3^4{NlhRxrEbH0c|P6Zc#W?S++ z|2f^dHUub5r*O5bbf(aSc%-b9tNriu5>0TUE%S_khRz6O{Y^Uq=PdqX%m{&PHCt&^U9ueW?viTj5v3DT!=I4NJnTP^IBg!YA zHTnnZZQtchGD{Vdh!GZ6)Kn2+))+iRmbikG)mKY?+WIR~C6*miBek^nDweNvXQ+c^ zXEq11=qV>N=_2+NBEKLdji^2Mgr`cQ8afT^U&YF$9Qwf0=W)}B41?H*3{B^acVKeR z02Ivztp~(!X3&6Q0zE(rudL=hpCzQd`oCRwNTMozj~t>XfBa5QysIKoSU(Vd6fo9f z@_+%n*8 zEKu)gWh(%qb^aNFVrExxoBw27Y{WIvWNc|@8{me*cmmQGi%Lt2?T*wJV4lYVjkC*? z(>oRB)9N4h{o%|<85Kp7)z#sAWaZ__;vT#2Ug%~IRwPbCxG~pMDbbZq`@ClhvaoTP z-4vdupDHJeMV`2QfHJnvi0lva9>+LSh}@poX_lI+zgWLG zRxPy*Sk)zFm`gR|-C&;C|E>&-s#t$-xGWk*F57PM+Pk3BzX7&O35ZH&bUwe$1vemmpZT_(<_tXB?y?azgg4j~NUdGsxQ@JX zzBIQe5?7E{nFUZ|FEO`a@hr4>%|l1<=wBE+X70^|7g~wuo9LOM57_${vQt81zRMsB z4=nFhx<ILJRAJ6mJT}6{qOkx7 zoDIu<2oQJ!MBvkaz#aYw919|FuiZS?|C_+y1I>y3^%$K$-3>kI-{v3sd=&7TV~l`Q ztop`@u}9zUzMog>3_3K63=}dy#VHMiJQjkBRX@bRY1&?u?p;QQNPm$tE>`4d9!<+- z$dINWH-v_!Lxk6!P*jKhTbLwabP2J`v*%#}ca(;hPe(IZWNAMAQkFW)Z;j3fDT#Zc=WEbt< z^sVv^yD3swwE7}Eu*x6p$EsD%c@MQEz;KpgX+rpw!D=Rw9;^*+rKk8NLn57}AfQc% zfXH_M;Ih}eKIl!G06urm1#lU(7m{fCkilayzIC}xTCI{HaVY-7za8pQa?t-kC%Yn_XVsW0m* zIv6{nDkGnDypt~BqWy>b&9gyN0!jB#c4yH9W18H8J^q?4lBOQma?r8NjB$uWXJ z>Er}BLGzQQf6ONq7~Wxzea74Z2SZBNwSi##!#K5FK}jY%p-*yXTqZBKT){|8+|**H zO=)$xPU77&0=AKnY$-K4lWnh%!M^twpGsC|#2g`1U|hX-jd1D~ z_ifSP$}y18qtJRjDZGu3EsK?&_}1Avsv)C7GZn2LorkMr>RSTNlt#2;E!>Zkk7uU4 zi7=Gj0j-jdp%)-N8Mb?vl?c>==WhQ=$q9m;6rljc)e7XPPH{68B* zt`ty*Pf|uY{_!_Ki7vEAVIf!|5}-kUj2ZyYOr;&^wv$Jj;6#4^0A5poPIFh2u9Ij& z`8;1%s;&~e_|*WcJ7$Rzv2#*-E4!|(NvIVpX@xDB59FYhYWAh-_!rz~^NK4tqx-x= zFaZ{r_7R3Eb+_;TexPWV938H1A1f>6km*7 zc6Kz!J@8`1pr9KufP53z-!qDMPjY*|(rboeT8s)fDc{ZagG&!Xiz#vrfITr3)5fgk zQ|DVZIpcmgYyHUS%oYi5mLmTeeny$jAMu-i{2zD|z#~`4p~NulqNqH9P-aL4j8Cwt zIC+r?IaDr9w?!+8&(dO7c}*65T^4hIBrS3(jEWHE=6Ae!H+M|st#exhFgJ5jh>K#Nt|KCny!&mrpta1rc*<7LN6GYzXhq0HiH5#s10NVKaIP;K$Wpk-q{Ey% z&e|s>I4Ar5_;}H(sYWk`j7Q?({z&Y3Gx)V?(Q8Kzds(>Fk1#~+q+AF$SIWbg-*-h zsd*)bLg#5kX~Ut4%GnYzf|`apY}|&jx(S#oYzpwl7mgg^Z>u!_^OcWYP>^Z`U38l<~Ry1To(rA0tMy1PNTJES`#MY za>&@w;fQHq{0`16&D2|0!-G(_#W)@W{;UyRKv@Ils9+fOSte_P+URsdySBXXRBzu- zKOP2~->pLGFOZj=eeSAIv1S%fB&KEAy8|_MgsCYz9fzn=4p3jk%+!6b3}K-j8H5$Or_fZ`KvJW>6!$o$ zJ;^~s`$k0-m(MxzOC;BL9RP^Im|OijzpxIAZL*$zd3e98aC*IGIdw>qo15a;)myTi zVk(3s*s%$dcBWa2O49cFATR1W(1&tl2^W@G2A0=WYbQUga(t>uZ{Im6<(_g5;ARkt z4?BOZ>i_pol{C=Ns<GRW%7RU;qZ7Jd>(WIm7 z4Es9t7SH2lG4VO`gi5`^s}f9|rty+uWi|{gUbP=vvW?rdBDO24tU4)um~_zND5>yM zdWE1@zc;5x%3WVZh%+`%+mmQ$+rIsCmv-npcQbnVIDpIscn*vMECfPGbP`zJLNX*S zBTgiCLWvcp=L+ZXWGg=|Jw<5rkTWm2$EL+>Bg&_>#z4&MZVo{Sk7&MusnDCfv=}n?v97lw`*ZG zO3Npe2Wc67*)*1h(J4kvSrV#(_?vnC$8h-IP9m*U9qreSmsf)m43snpWMxBx*`;Vd zv{0bo;v~Mx;A(NolT)+q8XV`-IOJ6&0~&8}t`R?K3aQW{902hu@aXn?IDHI0w4c`j;@|;j_x#v{x=BBbaFi>kA0-7yI z{xr?*G5cZk?+cOg3y7ky8K!K1e})8KL!<3l5>kRJO?U~k%XG4VlD6yA#jX;RDaNhFEf&Es+ zw`(zpM=KjvPPN-VND#J^%E1`SD0*D(zFackW+o+y310zP)RV6o=rCW6T*z~e|MPJ* zp!W%gD%Te_Ryno%hTfSd8SpDyi3lI*+ImYZcmhDJMxovs^9l?Y98>m%-S*<16@}MP zYeAmpQ)OsEq9B~vSGLT#t$5`| z!pe0Zf!Uq^}xd>q}uDhYpmK)6w$>0pNEUp(q10dTKEp)81xE0BY zlKmlP5cg*bMXH1`WM!$T1{xvM{A1;?tW!dk#PJx|V||#;q55V{WY(+JgZ){k7mH3z zL;Jq-s(Xpg->sKGxES{&~R{ssf-eHmo`BDQt4E^Lvt*0AK7zr5bH6e&FV>jrtR(qn2@HeqC11SWk2^%{JAQ~nY%Z(P`E z+26}=hPWBwSXFF}K#Kh?oG_#;19Jh5Mzq$~0k80X&nC~m(=iT%!PnR(^l?j{#9w$O zxVI}4N1aD&?4*BX=P~OU){Kx7c7+TCNQ_&4+U(+Mh-!Dj;Awfxah9uO<@3p?5z-;8 zE-V&=voxkoMLAhldv9^L%i2Yn2?~I|R*NBp-uLJ|q{O zh$N7;NLB|)GYbP^VO2Zp);+5l>ha!J+!#7d{Uyb*fDnX9&1XxN^iragN=%9U%-4Gs zBqGe%NB_J^*stUCdHjnH{KWw(8K@XHgH=0Wday0LPix}4U4Vg|d}?uQDwVVkC07Qp z&UAy*zyWTP_QyC@?x{GmSEcg%UP7{Vh8n)OT8E~{5okowfSNAHkt;6(R_k#uLe^nR zE0vZmjMv?3FbJ~v`bf#z+!!hhRd#j*gieV*pK>0Kbn=F$%E8^~o(oH~u3)CYHkC|p4kLR_M2icUMY2B*PK9Q$ z|M8Ns09+-)Fo+7M+7`)`LsUmdv$Q1@1)c274=T>z)Pgb3x>(gD25B?3jkXj`$SC(^ zQ7AXPKl{vZGF1&jh7xg@ILZwq=bfw3F0>^|CrS`b>W+jlGhWj~p0>c6mCeVz?^cH? zvMXnLe4oGKYci^T$zU(?us*bgoWWGgaizWxtX&T|K2Jvo}dA+M4#5YqYZD7^o~3><21NNQny3YtV>6ba1NkPp`iZwWPCs%o&8iZe;ljETgN!7#wP&t?L#)3Ohxhd zg2s+bs^;?o&jH{hj=u z6gw5mOHxnk?RxpVm^kVSczH447FhXTe5Bw`PUPBK`HgSDO;A0YF;>WCA0 zwk!pcLH{&1c2E-r`@xdMt$sRvBGFonOrE4t1zA(Rd_popMzjD#jD=?lxZkJ_*164O zui)%X9_rk-M{+8b>Ovo{6HLAjHjm9pQBplbl_AsK{?TXPQ+&3-(A6_juqQ{eGPZe(eLVd78dreq%phOEEHR?M99Y4Oj19e@nj}1Oht`_q0WOF% zc@W)y_c#4lhX{5A>ZN*R_2RzkVY_Ms`T=-ewwO>viziK|Os;aEFdj40!D*V zeh^Q6W#D_IuO@6>AMSp;4#TQGp1UraVPcF8417!J_p;yNu;7&#lPP5+C+kwSmPDpw zU_cEk3Z>%b9i|{%yF%k;A0(xYLIs*k){N54rjvQj#jK+qZ--x37A7FA?cg~lc@UsBZhGu#d?hm7E z4SWyCVhPMh$g*#POeXoh-eIpEpGgmhxD?DSYfQA=9l4JjS3I((Z);LeD97%s8gbbp zL^s-OdD}xfjzwck2TIqHR)En{6}A@+{RKSzH-Fmvm&-TUZ`vCtCo9@==fmntUx?re zzgU9cGNYM;0GUrXpqXK`dX0P4QMws1Cm?|2v*8b;T8$RaC~oA=vq;p%9=B?<^jpe} zK~FKoujfW@=8sD&klkbPAIanI=2x$FMJ8EO)QK^7Jq+|%AIy6{X=>%h02G?JRHUmP zq~88QEDSy=16KRdaPkfk6}2GUAM$x5Gl@I)E&Pu~Ku-Q@sDkH54*oQXT?inE(MRzN zq(c;cmul`n3sE-0Km!`73##T#lar`k`3fDwaX7eQ2c4KK+gJCTRHYe`c6>mPW!B&> z;N|qBTf7kkCpNUVGmSCbWRBxpP7yqs$#U`?2HzM4d_oLW2q+&vSVP>hv~{@S4a>pe zp`nBeS60%^cZ%d5yY7TW5#oeWlE%9eX;TX^pRQWNU z_&;}R$y=Nt^zu^7kP^-3EsUaP)AcqrIz^zr ziQsTkRz^C@zxFyxuQT$`>>GoEg@=_t{>Y@ybk8@zu=diJWeWK-o)jlS7|1Y>bq#L8 zK1k1VFIi%qSe-7omSqD;%A3I9Tnw`;nxrM=$zWfuOgZpl6#O{KW{<2F)G_F|O$!O- ze!Ff-xofRFXls;+o}ENf`%S=kmAK<2c{fAOdsl@GPAVS$R?mZ48=_yq8jJ<(3-m3v z!|aANRBi>!^cbyGjs1Yat#$ftVo`@F{u62r@i@GAISVWrn#*6+c8uyn=h9CwS`4zC zK1>er(K}{#hFb?{3jmCDQf46%8u0s+2{9gDB{IaZ=84hHrBU-_=wWo)nEE$&7ku74 zwz|anzTWZ}0dk!;y!5337R9^dAfOGap>N1KP$>b6mPHNoOAXliz)<7qPlE>)v{;X-3yP^ z1e0PTU{6jq4ObOOzFI3J&+%(aBnqMT&7%SvRjO1qv#RgUJ9^gJSQrnpssv!Kd78#; zLA>a&YHOrdrPE>J<=1w0kVb)u7re6KeWLd?6T<9JkrX!3kr~TS(H8Q!(LEawi}Wyh zpKs!CKa{Gc=`4o5gn8w87f=L(28bx{NxrZzpWkk~tqj$%sm7Ygj8I^JaZn;d!vp=V zIkecyWa1CwG?36Kfr`f~($7A?3tg7ao>&L0iQcR*glZ;;X55xl+9HFIe7SP}5yAlu z_Z4HNm@A=tW8@gf1RCIFM>8U3&%g{C2>L_i7{t8@LsKm0!VC+mERxV)H=wkO`CJvX zrhOhz*%yX+ocU_DDj%(`}f_zgdD zSo7KYiHHxW5`!5HHy6O7YG>ZYCc<2CUL^a~Tln{ryQ{GF9t@W|7dG5i%9$Eyfh%vJ zj|p02#P>?C9MIpU19l?a--OgT2(ct6O&;Tgi{L7az9Zz|!`sw(*Kd^i+8uQD{Tj=6 zfQhwu82MoH9GOVPEHGLx1zdZVCRFL~=Ar>=5Z!m{JAhS}yZp0=GL4GY%p}YO`0rxO zO>b{?5QipXA5Hk-K@e~;w(S1qBnCv)0)!X<@1XG^7q{#wBc=|%$vFN+2ERZ18Lek3 zIdbet;zSO(u~9sDj`}4xD;A#duJHP5`lOkvFY#o6#KA!hG#6QySe{{+ZQ4n*^j56O zM+PFkj7JTgpAU05shyuCY}Fmn%cgfG=P5=5RWoW^wR$V?c{kkxTLBB;0SkA{HNcqf zFnHe8(3+S~LE;n%@2VsQ?3Nc`+ zn825b6;ioSQHN<1P&g(hF#PiRzybumX5;GDpuYs6cBR7vc za*p&1A=UBCl0t_d%~W zpwc8v*e34MLUp2r!JyN8aVE%h4DMk5avYwoCQqoAFWUXUMiFe@lu7LRAF06=GH9ck zhnZ6*Exb~B3o@!W)p)r!#K#{fYrr1&1xO_Mg%?tOe!fBqnEfm)%pr5Xa2(2-0?x=} zXb%>tq9O_>mo-SwqnIyk3(f)+h623O%@{TyMQk}BD-=h{$wZX%{yv$%#u@(^nHz?M zTo0~Z0UKPcr!g)gACzWjH-_8(btDB~rfg2}c11+N!g@!9v1j{pP-50u@fg7dkNqpG zg*72KLNs&W(>3O6nm@LB5wVrun3rG%XCd2HK6H+lDu_*1GY-YTI_^e=Y{JmMChs6! z4!d?wr67d#NZ&S}_UG|8W-7raecH=nYD7~P0KH9UW%3il1Nx)@)1#;8&zutn5dSig zKmv>{I~0!>Bbz5}Qil=703%@SG77lN8Y#zJ{Cd+qOi0`JyXp#|sulo*bz! zUb1xqGvoXM?F&vCCi)6$GJxvXR7giSp3pn{d!_f|K|OkLP41fK=sd-~%9olq%U;LA z6Su?qbRsH`2Hi%w({G|8$4^Vn5En8_)9D81om<0F(0m!IVM2qI+&8wQZq5&u06J!o z7}G@&Z8dC;SH5-*EwW61XprF3{I}!_MItoa}K40DX>;JD8SP;r8H!S~E{%x^J?_ zb{xc7u7hF#F8GgEJ{uGU>gw>N&z|InYz`0zL}jK&-X#Jp==*G5VT0zgLnfmA+Rl>t z*>Xe97A=CBAgZtofk5vmud|${*sCG6d@Y(11sVuMA?cSZF;fwVTh6>0RVxE8En$9Z z;k+3=@BC)0p1RArDjSN4_K+#dz0g~sLWe#F1(j8C-+1ULW&SK<@9lOy?dPvbnI+Q4z**q$+GT?2ZdQD6@n@dRzhrC-G(T;k(sKs!X5&q@ z^Z=*M!_*=R8HKi^oW%;?bJ)P+RvULPdotC})ZS4Az#~8LQ7i2j`EmVHH)WHT_UN20 zU8~qd&Vt8Tlzyy)rveS6n4#VyoX=0Xhl4|)ZmQg!&M2=E0YBOC)O6EU{{F;ck)cBG z+bs{}WB|_Se|H@V4OyFDy~E4T^lX&35aS7jv6T%W*1;2xPs+KpPX13^A&+F^r z@nqMD|6Q53)a}EgjuEVz$@M{XfWxI0_hFNRs0+3Ey);9sq$6>r+6tMwg-|6a9cN6z zmQ2oX(O8QJusYT33B6KWx}=xeaykT3VuQ+0$&Au=GlMZ_RkdO?pW-^ zBR|cl=iR*;;%23?ZBPl&JD46zoPd(ozJ=quaLuhw8^lTk+yY7BS@{nniZRC?H(#g+ zMj0y%;yxq^jE;dH*6Vd4LlmD9H#djhjq)`Dxk6i}-{+?3xG|}fh&Kg+et_;zVo&h`S>d!N3qtV@*)tbT| z1>-6GSJPu1^W054Qq!Ngi$QU@fzRhP>jdrXLDzb5N(&!)Hr#L3t5%Z(nDkIPNqSSu zf$eQ21AieM$`u(9_A-eoy^XLMBU%0VAqG&hXb?}Gw1J_QC~;MQj&3$U-0Wr7O9v@r zXv;>SxNI1I@>B@cg+Ty7?ki~Em}JR zLZ-Di2{V6KzTzW&M8cr144&Dx(E?O=>>0z?=R3&bXNBF|AV#12dv`6D^I>YN8(7%q z$fN&#iVa@*+#ppfQx8zhQV%ka=QzG)d%lEh-$P+w^p{HIZnTJ4&KK(06U{xw6!qoq z?;|~IeJgXlBVHcPXUCin4G;kIc=Bi;GBr+2SQe=9%_+!53chwDfF5@e@>5ig!KuV9 z*;-~#-dis1F}yFWITXcH?ExhCiv%In=*?jKTI zqFt9Q0(0e8Yudu#!=9Y2xj_!iGg3fhc^EYOH|3-y>M2&nClU$ z>5=N^Qb_}JNaW?qNN^0zS{Yues;p=~cGAok+`y@!Z?#LL9eoX=r#&JAZhxg_d_LV@ z53{r=Z0ylDKc0#pRQ@P&*Eu|>va5MgaQ z@`w4SV`X$)E1ZrSmn`g6{x5i0TSploIe_>s_}Gw2m~k6npw?%t1-bduHIY*PC~H-3 z^*H@)4>3FqOMTOHX39Qz7m!{GF_r=0->GVO;EXlku1(mWRzI^P zTB{wKE70U=y^`l5B}l8k$`N|_fsvjTlJf>2{-A)U43s*DD{A>~YXif@#V}lm{Fs@t z%zREs?Ja30A^Gb#0S#NXBBEli#o~3T2ncewffbbJaJG78v3kZ-=wLiXtx|_6mCwBY z3vmu#;`=PpiIdG~8r86Yp6;!SW9u95rIKcYBIhOCE1<#-&KJW%T5F|WmLz)t=C9j3 zS#IESol(E%d&7bvC*f8D_KyiPN>H?2Y#%?~9*)O$Auxg$1PQ6iGpqZqi3OTn6Tw$8 zSM22kImBNgd{14|TXxRjssxz7c7%QW&1_g{Ad2^6ub1;VFcX6(S(fGg%oDKYqyRcv z=P$7~Xi21O@RguNzs?de1m6?iM732!xp%*C7D54E7+^R7c;J|f6ppsJX<411=^x?1 zv^<^>q=2^)0^LkUqs2UWfz*g#6$g>{ z4)6a;jo=*kn;J3k-K@*#yVu1QxvS~oVTy0_qi?m6a}!MDG~jZZP@rG4$F5nN)1PNDfc2<@5!E;m z9o68r&gk~)_;Az#J_P;_dXb%Ki-ychool`YKQ})=}gs( zax5xenCl#N8Oy3C60kGKt!S-EZmYj!%j{6gJG4UIJ?}`@qONVZ{+U7Ak$fdm1?+|isDW1t^QBb_&5rrs(Dy57(LhT(K7lyVhguGKz zlgsx%Yz~HHf&}Uijr~CI&R^A%XOJBkiU!2143u;Q0Kckg;n{Q?Y{-OcLG(o50;{^U zJ@NHhz7liQivi$1A4^VPOI|$aJ1xMtJ$>!w9+r}4QWz2-mx-~|=>u6x#vZ{E3fqK z67T8_;`sl`Sn2a>_*`NdyqtZ1t4zezi?4Fy^xms*<4?rOMiwIf;)K+)i&Jwx4#7R* zd*dJRlfZ(`#W*|DjpNuf9DUX*k-gjYHk@Y;Sm?U$((t=SwOwt_6H(_x^f{{40M4HE zzs??tw`?uD*aY!BwU#GKl@9$g{n6_7a6$=MW? zdJNndPQ-%IV`qQWD`%fk|P<4Gty>m(+AS|=ruY);j4~0KpKVvP$;#8~ivW=YDlQkb&oXV#x+h9&4ws%WW(!`EeaWAl z1W(I9j3M08TP0tgmL_fhk&PxO@AV@te#>W7Wd;8L!A`g!e*r)_l@z-9IH)d+u|4I2 zQoYKBdLOINaKe!3>8*saIVg4g`w5Sk#v)9xkd&(=zh{3DQV!1AQV(iW4+jG68Vi20 zzy0EYak$Tve8y{Aee=%al-*`ReRfF-VQ^zjLhSa7icaD$L#5_M&p^}`+rzho%YEzg z&;w6s&OJxvr_6J68ki;T2L^X{(}5K&2A~IN2AZuVkHXvkrhpT003zU-tR8ifiW;x* z(&rSY|NKYq-#}S^_Z+^)@{lgkS#rosOU-kH{|h|;RTO%Eu~gx>57WE7=;JC(?m+cX zFY^a`aXpv4eg(X^ihEG}yKPyD%EJTEVhvAW1yUNa01N~Mem90){gYZtSI%x}worCB zq(jHqIYhWnp86uO>4xuC){wymag{H*fnIZMsh-3b?*Ld`IDg1Yol8&IX}xHh6PXYy zD$7oT@c@`-UU=J6$4z`Q=$rG-!>0NW0ta5@8NEz@3p-v!=NREKRT3MLcV*fCg zJu34QuINoTR_eCYk?xuvfjuI73t^fTEyk!#2?D|g(aJ{nu+kHcE`e1AzxuJ>Hbz*{ z+rYqG6?>X#%0>cn&_l}-)wzGKIQ9^Dt#NKzm`5#Bzez)r$D{a5q>Mryb9lt%Y=8!6 zbZ{n0-+c(@SW~5EaCNT^f39)g*SOi4Be*hQww%fo$rtFgrtX)*cDrNOBda`e_xLOO z_xf#ZAzK*o1>9zGg{sU0jt`M5Za2s2O#(pP%d=5VhzQPQY3~oNCVfL{iwMOdZ8xkC zFg>P$RsxR@C;gsNRAi8>aZKiaK!L2t%P+1Mn~4^`&o))Kz4*xBgQox3)m}g!5fOHY zEKOR{b&1p#bpZOk6?AA?>aWVNlGb+X0AH*D>27HNz5(*!N0U4L4-o4t_^k**@U=ks_B+h$t5HT1&wy-D+m@UaM zy!!84v{jpIUQ*iRgnx0C4i0_RRvYTZYrHY<`T1ntK(Of+B*3T`v%Xz*(>oILWgti1 zG)pH52az6Vvk_l%WvoEB{IgO(VqkB|0BtcWLIy1~UQWt*mEG-Xy>6EtKY^}>Ff;s_ z(8d^_hvLUugVkod?VK(B*f7Mn)NW1vxzJmRi<5s@|8V?qgv{9B z37934=W~D2BR-caKhR6f0&$N7)lAv0 zs!r{0z4VvBYW5l%od;Defabt|8l!M1)@!aR^%_0mBvrUKL_fRJiT#k!lgOXEW7L1~ zUgwweY}{C`32gvSJ^VKY45x#UD;vi?s6xT<`a`Bbo&`Ecu^ifNI4vzkP(5I>ptJN% zX2-ZVvG~IjWqGO}$ytE2DP!H*E6r8zPg z1%+Gl`VKIO17Zm8j{Be){=#X*r4N~D2tJf_Y|qhv&OtB2LMxq>2Jb9`KthG9*yEGi zz84t2#6Na4Y$Mqd($V!q@>8j7ij$;5){sK+<@fIu+#*l%h!ANh&yu_Al$6KTQ&3zy z<%W=0G!D8-PxKaH=wjfGEUJja}dx3M3xi@m+1Xk5L~geVh27S1BH+w;cTGlR$S{!gfL3; z*gEi><+IxVxgPA9Ds|w&qG7;L&E0i8iAXVu;`R%Ug2V7fyjpWQVDqrRL5MG{-Kuym zTg1^)97v3&y=|v^GUGf?hP6Q#IxSbWT!}Jy`DR98Kw1j@N8bH08Y5Y!L?tXxbg^!bqAXJ;7|K*iV^I@Ki z?4sW16(2LIGtTq*+`?dewE*BE`y3|lb^4t6*L{tW$Y5xl8E}NcDMtQr!U3}x@ET&h zr|jgt^a)zqJ-81SYl1KsG=&f_846TZYjOUneg2@e#x^6KNWQ|4*a#tc@dcE_!OxO# zZwk0;QPH_eN2)vZwv*M1Q9#%J%`A@M-VPLcE&1!ZP4{C#9DL{pfJP=Ja;UnRh>tG& zURErW5ZGFW<8h*^twc)WMmL_ zR5Q6vy~=re`KPX*ADX<~6wreXs>&qqGjQKU4B@1qV(3`NG88(iI!^QmQH00Gwo+LR zll+1puk3rXNK_z*-e6BCGufAt&7jIwQ56PrTU^W^2+sbMp+jU#%Er@h2G}}NGroe> z{tpQO=7d^$p~tLyXij+cRq+pT2+&c_3a!!zj$0rO$iUxw7Wgi_W@yZ~xE9#<6;xrc zIj7$%t$!U_fB&k_(01?Mf?vOWWxoBIVC=Eg&0;l2wAOi7NlH3MqCE}Q==py$VDPDO zcBPlgg}afiVJ&6+qxy4#yY`QhQYw1d%PNf(LSB<3YJ)61&vS|C*BsD-$aK^g^Yxj{ z5>KN8-PKy^s`2|MXqwEqLJ2k{U<#&prkLnO)oGtBRDTn*0$w3trJI~)u^32)p{*F` zD|7!t`Mn^|cNh(sJo}HKrew;Y)!(u;^WWqrV~G8^a(qtUt}%H$EH8#m5Cr9v;ebU3 zpyEwDHnIHiH_FBVe4>x}3}2}*L`DBe36lQ@Cqzd1Qin|23r~ATAp(1t;5Yx~AiaQA z151SN7t&hR=TIx}O9bN-w4J)|;f*TCyWQWj#v}flYXJ&+AtKtD9r(vfvLXjv^+?;D zo*St4zYPrL$iY#QbCHjV1KHU`b2V=4cpSFgTMQZk(g}3ALqjrUM*jrGRYrGoiqBSH z6XZ@0P}^Ic;FbMNsQd`-E0$7+3k}XljOrd|UVD%{Re}YD7Da2=7*y;RrfPrFB0jKF z?pBdPD-I$tHip7c0e0KP@EZtF4e705MvpAiCJT#VWdP2p)Jq^Ha+Q~a_KhmcmPvj1 zn%!EVuR4)8g$9I%t)c?r>(1&mfkMZ5z;5|$f^sMn7di%m$jOzsYod?z6a|;P45OU5 z!ZV)v79blNV2t$J-6np}iA+AX>yCMt$V{tzvrMNE8+}^;i%WOm;}$_c<q2TS``va~&|5bi4dx`kZ#dFSqY=y=|5I+vK4_l{euh*vk`iFIVUQ#E zlrc$7pcNY_#uU`+x!%?cT9b$D{k2r(02?Bu99gW*|NZ1O1PSSbvx3_nV^$5hDs2ex z_?)A+hBM`*6B!B*4n96vwEX=u8ZrVOt31W1UdID!5R3fVt5@0!AGAxF0is`aa4@D0 zn5fO312>0ENx#N_m5HJ)jWMw}J}jICzNY*8KITvS;Fkpc;(ZTy_l1T`PZ18)4IsIpDZkOYi5H9Z9S33h2^xuhb$)onmSzG)wi_>MBcF$OO z6X&>r*nOr5@BZXn2Yp#b=Lp6mtEWvduDM@4F;;-Y3Funs!Kof&Q`53GnLJissxl}U zi0Cq^3ksF5T$-0iYC2%rWpq!g%m_2y9%xW-Oqc$7@1SpW*}ARu+@RgAZ#WW zDU}5{eEB81yh^d<>qAo?RJk6jS-H*QlM^#OePM+f+#lOfu7?tg0es7^Xy^Mv0ELvR z=dmG)X7tQU7@?4mN_I#@bGYsSuQ#mjGv~?SQ`(5be}=a-l{GA#JoAYjFijJu;d_aTL!$98S+0_t$_AmWAd#MVw+)D|PLOG@}~Ni>&@z zApadlloEm|(!BN&bB+9%!a{L*-IKOvrpBt#X(%b!T~kdj>Jk2=0Ta%Ef_qg<;yp%^ z6SG-}9+`Z!3*>WCX$t$DahoR4VzMMmrQmtn%ri=d&V>$0qO< z5JDku5qAe{NRdmJsf0ZPbp$0{mWCP=vLuy+p!3Pe-`)B~)XIilu*G0tGT*$WN}cN| zMa>F9`-pNOOZzxMLXKkL(f23E1qkpKE&XOU0tv;G)3+}YR8}EPtB!vC*QwJCQwg4F zHV(`9?;yZifCFaX@}tn4cz$E~(?VR%M@#d|5f6T^_LG0!q6hS@oiz)ZK;<6rkr72j zJ+3lcP3(ujtUrC%mI$hW^W2~ikJD~$^w%kEcK|9__FtV>EQse}N*O7fjpd`((^}XA zIeLiEzckQi=$5Dvb1WR3Tx0R5iBsd=BG(aIt+vl(o7>wsbgy>*R!S#f1$_b5hh&a$}5D}lYOspEjb1}F2U!J&OKeaKAFNG%%O``AZ#$#YrZM-HZVxG z#L3z*UPih@-}X7e6OiRK%E18g975oHeztu7?c6zv#{yTi50RjokTOTK`<1qp4MDp3 zcQID70Mxo=QhNMf+B11_RKtlNyOd$TS7Dh`a=?R60i=R$8 zU+QP4r1w4KYe3fPCv@dZrji+)&2W&3L>ZU?i?Gp8AgjAV?h|j9NE#29ISxRL(;`a* zqk1irUjIOrRv1@*K3gZ0P(X{`>B!Okdo=&~6-*Ar&X)?Bnx0DzaJUMEzkNh-gC_I& z*YXUFvSPOUbA3+E`1;*TrWo%XTj1LlggtPL)5)*-jgdAQ{WA02kF;&ZydEzeRN|22 zvzYk0$36aD*;vG2`6qri8)$-Qm&FHyS_C>mqFNwjIT)`hbr>t0iS1VvDuV~=yliTN z_k)QYi;h)V= zOdR52{>aL3em*tktC6|>W77U!Rrwoqkg-Q@`mEc9BuA7W z^n>tMI3r>vXJaN(a4-q<8SZ{_Hac$D%xYM!@_vL;X#Oz4xGTe$&7Tg70S!hEN#G9f z{3k2$4_F7TcMUQ^oYwY>YbC9khq!6tzSqwWix6xCJxiJT-R9TTImoWz&9s}yJZD{9 z`;@ILO58ElWSGQXJ#nxlbeo9i2LpND^T_SvH^HGI7uI6i|McSyd>f9R$0^+VyOs_i_e%N^2O6bl}Mt< zE*Nc`BqZhh6MgqPsM)5b;Mvvgs;ep1UrLazloD%+v^Duqjd* z>y;i~HQ28y)o9iQvd~;$!zYvC*7SAcnJv0J~B648)o@EC$A?_2FS^Vp_IIxK8mywZ>3h z{A7CVnxSUk9=(&py9uFqLTEbI{SQpYGb&Xd^j)p-4&Ng?RjFVluSI%)>`WZqH+>aE z%}gx;RHrQtR0!j&%;u+kHGv1o#&HDTRQ6?qpC6ZhqyUPe@@+ItwpEANaQ~DEHBZ2f zD;ps(%|BS|`H?<9)e<-Z-OlNT*FXR|IM^QAnjKz8zGIdPq0Lyw>uyZ&`Q?C8yu~#J z!u--p^L17*fl#%caQJ`PE8#6jK!f!52;23|bCJ-Kk1a0BmX(z`_+2$huIzYvBH}K~ zoadE9Zhq&=%w8Dg65H5Ryh&nunX7*M8%N^J+R6uOx_&DM7tUc5!@ zPs&b$P}QpM3Wk?i9O4Z85Vm;*Wu-mEC5AeucQB%7H$K|SuB!cOeQs67qf!B@XlPW7 ziq@9LyNcg^oxrwn0{qGRdaDYXl%^_w;v5q|&Ti!V4!t6`izy@tdjjNtkC7Q1sS;8? zqTQvKks*(6VaR_9 z105Pgp?}`#2Rnhj)MU@5_P19q|AnWF;$QfS8eNPS=>Ts^4+J^E&)XVIT*how2+{t4 zgd_Iz8a6nsCkd9-J2-yK&_9$k<+uEE+y$|iF%EDBlGu?nf-S0Su^3LW;SG$XQ3%}x$7w3#;~<=-n8$W4Pnqp3lS`S7%+T)OJwXVgF{ zvmr4w3FDuCw$BO(vYa&!iL(C9ffQy9J?nb6BQuCTx?h%C2brF3aP#Mz%EiyfkUMui ze%OjNo9eHR60IyazjYkl|16Gq$UQGHddw|Bkj56m_udVlw=J$fQFt-JNNV!g`bTqS z&2&`J$tgkzJ>e5^50gG5EG4D>QbA@&dCOXga{No*Q|BrSpYC3u;X0E=e~tav*fTmZ z4+)u*uX&^%`6*{0{HN9X5rAt3LM?iC1AW?P4FgE(4y`%ltp$0ofB=%Q>wDxqYKzZ? ze23ro3ndXQ^F%HLl=o1WVr_qms0B@E`~A$Jy1li-=kN)bHi4Yp6~5LJ#F8%mttj5^ zdYHA00aOMi^84o(C-4gtY&;n-h-YYpg!mVMHKxAE*>Adg%yN1@SpW3{riUJ{Wy@wf ztrPQcgbP4f`rD<`fG?S06^7*S0b+X%@-cqW70u^rla5CoD{l5%9kbB52?FE zWU2^}{{iu9i9kWDT`d`J5Rr^kpp-PO3abb);1aor8VIGA^NkJ-@GKjF;JhEUW14T={z4*HdKt+W@f6-5nAMl{QK`c0AmSl-X4VPX5WpBH3XdZ zp@OTTp04J&Zo9>g3d7^jWP?N;NHheIWdqpdZ#DY2Zaji^Y}Z9^Z+V>*lyWYaotA6i zNQnv6zgHxqUnSQ&EMQ4V%1mwi_SnL8tO*|Y$z1RzuaJ~%BD5IZ)sWxU97*7Zk2>_3 zj*N_SUfU6$W5)RO6|ZjFaW}X{|<{5E|?}>%}fNBs9RCkrd@aBrAa6s8lb6`UXID?}*5T$G*&PzGE ziNEW;@$6$H{o7go`4uQImNHq&pXc>|iU$0jiiU#F7k5~!-ANsZJ8`cl0-lh3ZLit? zUrx|l8ZHSG)>vD(g8^zx>4*^y+KHIeLy#me8%Yrbm=T2HBN|#tBXakcp;R6}bhO=r zgS^1o+pOE|vPzH@GH$p<*R z?|PiB@=gI%GS(Tw08LDtgNjBZb;cvSANLuIrYO<4@gU{I-s)5zW14`D1ldVAQXYa{f`bq&MeE-N>IXn^~0GyQPLyOYu4uL0k!&UtwRn!gAszZ_Mk( zKAAEO5rQbQ$^J61W+sO2Ki80sr|N4ZEk2&E< zS#P44F#EXpa{&Z%LyNj-#wEte!8U0e+KU&L>@RfCZpNrC3L*tzX?loBhRp7rOg4Tl=1D^ba^gI2&KQ zE#9icz2Kri%yA^~o+Hu9pG%a(_S|;Mt&2DmJx7PkCVcPQzTMv857ftHM3fa}!G>Xs z_uadb!TsZg_d&)gV<1o6GCUPG;6HBiN)qLnN=W2v9Mwxd+Bn@0ikkemwBK$pL@8T0 zSVH{~0l;Q`*#j*)vb?WH^s_SB__%;wIRyjjq{j3G%F?pFu9C!^^8q8+*4O(c5}U$O z3=azRmxh}bFrEFNPG^=1Gv7|Gaj-IJDp*P{993cu3xzS>uBaccLP#uS27Q4!KNQ8$OUFt7*yMtoN2csVIHLou zBb(ax3njqIgE^tYOykC5qedchUi+HgR57#)Es2O$Qt^N)DgTZ#GA{tw?{3fZKi^-l zF!W7};6tLmCSDLpZ}fA&Bjf_fl=sepk?hX5&so5uS}hl7M1!7h1}4cDRuoJw z+Dw%H9QWWifS!%h@oXdW>41Vi`}g_QIAnK{h!!Q8+ea-oT!0c5Nr>ukY*aAFk99We z+s-8!)%tXp8s+YnrS+RO3g=bJpv;CASEhJ{de-kP3nZjU8Y&xQ+!FuwEefEYxXDLl zt5_O2Zy@U#DL2Wl%CGCS-0ip_FPTo-a&z8N8Ng2Y)JSd}_%rXYnLL7-rS4|K zV$Vns84Khe`U1CDNEGAZ8Ao}0K!Z`MM&FP?E#o&t+ok z1(Di%uP7v=c498UU<*;Ht-&AxM?e-L%%FuV;ng~gC!u@$J~DhUY$piyikFNv9v&WE z0>;GO?;Zz>qb+$$Qay6|X39l2^1Ml}F2L8`LNi%hA}_C2WfAS4+s~I!;h|%)VR6hN zK7WJswIWHV7_(2#0j==WsBWvJ1cGR1GvfBHk{iXQ$=hiaeO;{&nhK=JUd^<4>|)A# zp8TKIc#lF`?QE1WHHF zS6qxWn~hGUDU!Y9P0`mA+n1IbKY<7uM!r>3opW?m0D6x7i`V-atvjp9$~;Jz`Heiz zZI$j!9=BH&O48~LUq9}*I%{Ao@>}O*4^1Cg)oyNJ3xA)4I}_jFKf$X0|JZs9s3_ZS zeHai0M34cb5g1aWyL0GHX^`&j4rytS?nXjNkVd*2q(i!fM*4fk_x+v!Io~;J&0@Jg z1?Ik=y|3%q*WUY2j7)2*!{#RelpWGFngBcgw&{smMf|^PN5}wi?H4v4XCiFwQh+<9 zKQzj{^&AhF(fSV?`uh>W32%K7eS8YC_8oqbZcLzI-+M=?xXc+xLuFv-Xx z6n46i(VR7Dv0920tn9%8_3o8Lq1p;FQkyEC;EV1F z9&1ZIUF>V|_|=bi24~drxp6%lEH|h~=PAu^r=l&EG&jtt`?#{=nwm^frkf3y<4G0EutRF&N%j;{|LhpA=~f%l-EP$#xJBG^YQWNVU~65UYp59@N#}k z{XBYq=fdUH9`vb`GuvZ3^wyboN5S{1cF4W<{Q5AlI?b&=ig3*DoOku6X68P35b0x_ zVw_lOwTKWRfV6c)yI*fEsj7-dwWli3)8rycNorm?*xK5Tr?9ZH$^|n@OTXY0e6b+; zQ3=FGe7u z>!*Lni#UuAz)p&d$;H|s z@2?yMd>I2ogBK^f>ZwD>L<*71)%yG804{R>dMH=(IvxWoXry3tEQF(AQ~Z1LqTuYX zhTe*>y~9SkqNc!pu5G99_cUJ64KaEOd46YYdFsG=UXu(>XvtnYa%|Ep=Q1 zR+>aDpZz1p!;Z)xMhFFJ^K`crDSI7G!n0~zRYU5?>E?YgFrJw87%B}8FLpiUKl-^b0GXY z2skm8{-0_Rnf!AWb`RW5g)PH%`W3p%bx`a4y?O)8?B$k7syEREK<1ER@0y(29WX)N zYb(W)L=^r$$By^NjALTv`Bpa1osGw>v8ct-@zIh{iqQ24_$#Gtmu0zl3fL-^j+kh2 z@ob8C(HWqDmi(S`skQQ5{LIUF4D?l6Gi`lxW1UpfD+I_3GpwQ=UH zQRPO$-njqvDncxZLiO|uv}pa0nrVliaTg7}i<$VF3xLx`5kUH{f3Z#h_c6|XQv6>T zU6d>KJ27Eag6KIeT~1jz78!*h64BYevWveQ!T{ZEQaW?u%NiERtJ0yYW5fjL5k2*o z#!9+PEo#xcw!2{aN{w0`$*tA?`Iedyn$a&sZ3#flnwjV8o4dQScnDW<9!o??v-(OE z6GI>zcVjfn4q|&&V?VjPDDzZ%z>mru*Fy^%0Z;iRz#G`!>hT79Uezn@_8E4{345X| zPPY3-@Z4VZv5treFh*YUy+kyrhmYkcU(942a(X|BGB+Jpd}rX>wnL36%G(8PLa9fx z#d?$9eGBx4X;1A9&_b6!g;c%sWcOxKAH{{6%dCel?R9TcS#{sxjNc8&j5wE^8VZ>x z^%DR@MDzD2m^cLN4MecFeu-pK_t$$+Eef|vSnb5f4D+S@k5||WggqiUGRo5jliSLJ zxl(F>V7?4NG1q>W%I9(w{jCzNxbL!h*=?ium6r=KV7$Gy(AtOT0t)Hh$%Y@AqDJKh z*;|7Afdg{e6+k*$YU&&tM@MJDe%DpYs=PVDfDFNfFVBR`J&4%-n-odEAcuYHhZTex z8&e|wz(@(~M+#8H_(n?;pP4Dw@GZ8#dm!N*u&Su*zmzWo>XRt1592coP~R7&hce8c zA3*m$3T|gSi!t91U$v0KCnow_c*@AjUX*v;Ei$&}l_~2d&JulPFL;MyM_j><95GBs zpO<$i3#2^V0i1K_)#j?<+fednL>B}O1w5)76amD=guiY|le;@5>yeUP<^d+y#sb8FP(a$L0w+s_NIOF@W!}53W5AeH{WBj=%cS4zHsfZQ% zcjzJPod9(ZX_WNm>A~I@dun))%by9+YkRg#Sp}rmS?xj!4}0`-W=~_or4-RDmUmmD zK4d5yxwfKy85t=;D%@~AOB0lA^^LLOujH=7!f6hom*>3pM6`L^d3KkQzSe!;rpR0) zMar$Q=y~pJCHA9FiYhxF6g51Iry*f?yj$RO$mNuEr?l!h^V{2KdmPJpRe;F_VT%c= zKGVV$jVmWuS)l}avz*X7(T6*%o|bP{u}tKgjYQieJPhhsXkcwnMNFZ>`Fz1A`c)=^ zhS8~{51!?8yC_I6nu}_zbbFKm9pV(=TH`mK+e@{7(7rB@c%QD}nFv z)mq_!+p=W=XZ8v`qnT-(IJ}Y56f)ZJV+x7oJ7;tTlWMc|5*|Sk?^3KSr7&p7!l=FW z%8Gy5^@t#I8!B}w8YK2#moA9f|K`UU{z6438raJ9?(;qHY^;-!B7kcwn!SfpPT0cK z|G8ruU;G6fVm}Ydrg8cS$ffcmVS3au{pXy7NZk?k1_mgq432gduP3*nD-FI@tF_I^ zP1-@P#0t;u1>UWxF&LLcPV}F} zdN;16$UuSE^b5smF15DK{LozP0e6x2SKL}2)yN)}gR;A6m+Jx`T)ZvGl1=jX0nbi` z+BJ4GPr^(VrPaBMDKlazMDRtK1_Dz?-vC8mgMG=LR#BovnSj*11Rr=(;_H*X5K3}Eb!3- zjQQ11{PfGSiOY?nXrq>Y!!(8_LdrALtaj_&E}~zg62k##H60*i2v$IQ3a62fEabMe zvKJ4s@wkWe?!6k}^!kV971Z<`NlRrgjA58XaRv3PwrUbYW_{j9>pKI}=H4|9$D!g% zA;?L_B2gpAy;4xfe}hH4C-3{__0ZYuo53v?N)NergV&okC*>9w`|AXrkISN2>{M0H zZFDd|DGJ|cLct|@*h#w*060!)FA8NHGfa{-n)He5qI`#+7C)c!xJ^P_^7*Px-nph0=*J{T* zthhLyD;13OYC&SI{eI}P;@c3o@sxVyw-L-7+_U(I<2X&bkh8FtcTM$ zl^%A`+TGrHly<*O(%atfruk=E663Mp&)n=6^zm(iBwOv7)2^AsSJ{iL`kBUeTsr4Y zac7GW1yyMc>FYRg>~;^LJOG*YLKc$j=D$YZhwUy7a3{;jKKRhfpSY8~J)ABvRrdAW z00mcL{OkWW>~Ni$R9K;{_9G$C7KeM)JK{}kj0Txa@Se}adXJVoN5pF(IYnL0ttI1E z_>urT2Q4WhMG?m;E}?eUtqMQ?_x!Lm;zB0YvsxJ((|sb7pqpfM`G@qxVqD3hp(k z=6KU^q*1bM+vcCO8>)q>h?S4UB%Z#mgIXi+vyoV>wwzlNax?`B0)*JGFplc+3&pZX zSk7;L42Fyqt z{n$SdMBq3`ynmmp0CGUIyMJ_8xoTV?p+H^P^Rm)HRuQI1in%o`5k^qB@k3} z@YV0z5d5IACS5Ig^Vpw}wjv?r-Gcza4)4C#DY*X*NKr-Jrqj?k98Hz$=Y110InC`) zBZ-f`HrQ;TAqnGixEC?;ggayHiyt)1cS*bs7TvPuVh;9a>V5F>Lz*t<5xAi;veqaC zIy8eqk`&mgO4NUq2>&+-qL`R$^OKX4=2uHURQa8#0JpPM6yx~^_J;cdDZgppfclW&XxVXqI4)^1P}vr}^wW-~~vttT^v z!jU12m3VRoT)QplM>16v`MPGKO+O+X7CDh4KAp}AKK``&uD$4d#WLjJs# z^-Ijt6(#R{94=yBv=ELU=cDh9VVasck^O29_c_Q_T$kU3&&Th#{DR}YY==uJs(3AH zi6;@4>lZknRs@6R_>l$G33Tl&5$kO1KdgdxxcwL4pp;@4+r$jK6INeNsy)z zt7JFYw28-V6fa&W3fe6=MLx_Azq|OAVJS!Q>_}V56^GyFN&5DRSKESBLchW2EM0s1 zHAxA&@7QPL$o?0m)dmo;Zcgdh*p^h}Z_j6hJoCgpJC*U1UE|zPfUI>Fg_|;amL(;O5k~u|tl)-ham7Nz&nt5Tw z`$2;Ixr7WS$LA+v`UxE{-LYhJ5&D{99dPiS+=2)3uj1R3MR zL?_$r9`AQ-a?5xjT-Iu+WFYOvSYYiGOw>d~G!~Gf3l3 zaF%#sS11}A-Gs)94VR$(+r6N6UA~y(EWQ;h&Th>eRVUQUmgx--TQq3sD+tS+^>N7;x|QXcQ-6_9wxEoDLYuw|a5OGFa|thvrk1lmjkMu->^oo#FY+ z%n;J;)n1R2#nTV*{k3+ij{qL(1jYI(taL-V2b8jOlW$Tf8ROyyhw%12DkW2wa`!@d z&sjn}Az}iTcOUw*Sxx!)dGAuQypc4kA?QiT!y=5Hwf}lzSc9ASee_30D=t>cGX#!O zqM+rqhtyQ^&5zdR((!$`5P?HX%es-+yi#qH@)q?U}c{}rDryUUQ=6;Z4R`^~FFHT0|5Y!!5)jpdPZCuaCxL!@B( zn(abuNkF`@2f-tUH}0C|yoaGYj2%Q}4L;+^#CL37$2!t7GLcRMcz` zfwSI+*S*7tQuip9yNh>^MMrcP8`_Ex{a*SVU@a_oqGoI*xgtez@4!)LjtX~6#pBq% z%*qz6gzse!Cv5cm^+Vq-j5(58BO3pn*-55na#@Izzo;>o0O&(}@P(;sL3{~r4XFSh ze-y3w=H${uAw{%`Czi{vF-O&Qbaj_n!Yz5-YoGel--Dq z0{y?hfvYV&{_u)Ig#A7p>Pt|l-whj9AOb$V=KQdd9U>WCcCxfAVy)3FG2l{a#M zVIUmEd9M5P;^gx{mS4iYG#~*@WMcJBybp~oO+f2Ay*Nzlce#E#^h0|!jQQo9|K`3X zI8BC;gil;>sYNAW8%nWsbhAH}tfwsjmd3HgGCdF^k?i9tsWR@hRNIu zx@OpZ)1o{JPJx@17eQ?d4ldl+$f)AuG|M3aEvG8R?wrOENM1NFr9Ln5=Dr6OD;?(7 zolZ_0vI>I^o<_;pp&yCDJt7u&c80x(y~_x_XTa}jaQK9$VOLQaVQ+7b{r!gu0(}8f zS(i2B5u;45p(8>^u+Vt?iL#9!mh{f4jPgs+2Kyr*M zOB1xnkoWMz0XJ<<@;1BY%hH=whUj!>m958{|Ct4F4aX1c?EtyQa)u9u2=}<2ZO^Ag zCMQjWwS71VS&3oHuvrL|t1sqvIslZ!qPe7Qp!NzNLI*9cLkj)@!iJGg1RD_C2EHcf_9^Fwo+ z`Uut)R5TvQoX_U+u-5@vA=MQrq=iq?*3@|F({{Tf$}fsX&g5$_6ZEmUJPVkmq>V3N zKNtUi2k68Gh{u`Ag&w0a^cA|g7jBHLTOgJ*wF4LQrc;23(xo|vv^{mu67+U*OkZ_e zNL?(f6U6_pVeP7&`uuVdEGHyT1+KYNsH!Y9`Q1CN(!j{#yw_%dMM`6s6ZlDk>cmzw zUtAH=OA975wW2pAGEKh0c?EL#%JOKV1*HZTP175ytMN#%X^D0cYEFBl27 zcgjDL$MVHpsYL>lm`R5fUjX6g0#Sp)mGDX6K3U=MXw`Hoim)BkC(`_%wkkl`0r{td zvCH=ZJ78e9GZS%mcI%w$7lesQvCG|95W@DaK2LZ0#oh;l#ski}M33j<^|;Sj@|9m9ch_ z>nyrQD-p!?s4U57-swMSkv17D(zM)0R6bK(Z5fy8{%9TZ85fdLOOtbZG>2G^$o z>GT&+6vh^zm?d4yr9m!1?$(0vL#7ULN2te2j=y=KlM^ewp14>#*A!9^A|*gJK&K-( z#=I?|^5P@rGHOT({_KW$HVA zoR1*T`5%o%S8PgG3du|jD%Uhp%nMe{6=YI$xzc?G$`tnO}@+jVp=Kh#VXpC&Cn zQE~(O;-~NR(rUnHDf-G*{dbjQ?HAp5!8b@n@;Of{r5@K)aDu>TL8 z7N#ERBiJm@y$VWpIWs8db8F1~lf+=e8zRdbYab(J4}Z$L>Q_18>@0Qo^v(KSqpRL; zONh2vKAXMZP8zRAgkjaszMd-ba6*^I_cQfLe=5BEzzw6U!3)!_H7XL?sd9tL^f;FCu#1yrj@pKi((sQP+>HQ z8sqm1Pw*wAfADp)0cW)%Kyz&1go2k0difl9>6VrR9{{A=Y&l{R}cAx zZ12+*Z;_tCH(=4cE%NaNw)7nOn2iu4`v6FrsMS6$IHrzpSnS)ftRmu=ygr-5h#aCG zpM5te4cPDnCYyM-vjHi_Yee+3Rk*W6x^w#Q}r5B)AT~uqE zDvMhV3Y}R$K0HXEZn&z*eUiO$6e`j9W}cEA`FwPx7Z_3rC^*7^f+J*%yc-MwPodWR zt)Bjy@&4w$Q@Nb&Jf)Jhc%Nq!u_1^rtF6LOM9rh+FEsXJT`Dv@>S#Y_u*v=gMKaA?39=X&}(8AY$b3t z0YeF8zXlAIucWJT=T%aAQdXqEp=_ITudBuG` zm=uS%j}KF~~qTV_G*AP;ax&2A~2jcYTG;|1X{WuS!(FFKWfPr9m%)Cbnm9 znB$Pjs4XVTvxaxrU0a1hkSR$lt!uQzRJXB6R{b>EQfidzH%M7hfJ(p#PZ6(+unfiT z-UXgTL$LHSySJ?0*}pvCQSCy+6Yt%T;O$@~@2FRkFbV;O7)V9B@3e1@Cmjp`*GL}E zdpmcPvvgp@T$?z6W=$LG8V&2nMm~s?^90=zuvX4f!%d>9j$$uGpG2#>bCS{GVnX;$O0FQp^bYtjUEV{wZW*V z!X1q8;mQedEO^X1FRdp>B>q%TK=4Bf3sY@n>o6ylq>~v=wr_W?iP?->ljiI%{9O9Q zrBkt{7OPy3IParpcTU|N0qq=zbBCi8=LN+RH|Jrq)^ zZLd&Qs3d4So_*%AGRNLv-gF&Mibd*KLKcXH@l9R`3ZX%mb&|`nQny_4EX$fD5$riw zli2&Qe77gjQrQ;VQ*sevR-A=Ax-4se%)69^a`62Cp}*1N)h;12m+;zht@s4Zd+^b% zOZ@qV#gaHbJE#p;@!l6}E3Ikd~X0TvxydmnE}favJ= z@Y4lQw8t_1V>|MQ#h70&~&~Ns5Xe^8F2F6ca$0T5=BdD zqRC}?pU_r!#=(MRzT(l;`85Tw7iL*(+_lY8?6%IV+j8ld5sXt3cw?1EgKmC`0z10a z*FXQQCXq6rESX|j*m~*m+i;sWd%i>*U+jB9J~e=Foi3p;&g1&6#;AJ#m1QXW*)8mf zJ$;UShS&lEIKV-iMlc!prRN<5cXqU7`6#Pc)Tsm<=u*UM3;_Ri-WAi)=AP;_WUm4B z9#D<(1dyslfE!TlNX?AifT6+dkWus=6MlEi!ws^Vs#CBGp&?Fslu{)sJ|s@KOw)1A zn&4fcBVdg%KF?o(AR3>)tH3M2ZJPP*ic@jF>*QB}^mNy70mIL*goiY$`;9_1J|k90 zrY~xOjZ*0gM<&tX94|FRgkn3@82wfu1vt&NVWfVcz6rrK43HQ%uWa-ENY|th%&O|} zZ!hO5&r7Y{zmqNN2Xg8A8XcL&!x8^2mvsLGOQbX4N373V2n8bIV#UfmhTpfAeFtJQ z%UwhMXpj*`7(2op!RJ75!pn0Xbd_#*(_P>Ecf#~n(}V}xd+}?se+Ey{gNO!_Y*ntD*tgr!e>cxdL$vgZ;nz%>J77X;7@0H18Coqzmj; z+v9Pfh)qiOOj`6Fe}_T=i$!k8y&sSLbSw@k3O*6WR{pu^c+AtGUK(f9G!up9>Yvlu zn9!4lqd%4pXjde%i7YmTp;Df|RD`3R73d+_Ui)DD&A=DbbD0tn)e z9Y)iRB|=!YXYAWdm5Lt{nBkYyeBt(@3DVg6VS#=ePA~1gkn%XL=b4=k0-A_Kes%Q7 z(9CB^Y=!GgcG^Wt7O@jZc(+zoI_o$pAnPV**Ki}kmbpdwL#C%B#uLu9R_@n^yv3%q zOVUcz!TU!%`B8PnmAvZY`@!UCE1_m`D)|LH_r&!oZ)~)kn9&s|(y~0sbG-R4_Bl|s zdcgAYo;xQ>++F235X-Fl)?v6&qB zJf^S=O3Tb7dL;YJ=^x|i5L&Lgfam+U-7eO8<>r}M9StcTJjdYWG#fI*m7n_^#qzNyhMEsuN-T*KzhWKZF zyVM)uI8;7A`bTBTvUXn67+R`@yh>p&zN9(3i|-99cejxORoY|uAALtF4l@puk;O^{ zW0V`h%K2w*i)`8j#EeG<9&;Ih_b(KAsj<2h+8f2_cb96m;#?xG`s{z0Wnd@|0|8<2 zfWREUXbCoL_9mkkD|>GH?*p5JU3y9RmWho2uG4>RpWaB)LybK}`m&vsYx`-qc@rkWAn; zA}cy)VsK6v;W5HLvv`>$4b(N62VP^vAOmkji?e_AW{MZtmg_HvojKnNSBa9QV3d-nV7ORPm0~YnhO+x zRrbDFTQ<8!J#Y2o%hO{QrU4d8VM+k+ZyZ2zze%pb*FmuXrG;Az^U}ZL4zUbiMl>9? z{z2N-BEng@?>iyg%&M-eAH8&2g%ypBO+%>eka6@U(tTHC{rHJ?-c#Wb`^1Tb?skt~O$Z5utAxsheNN;I6kDYHsaVss z%HQB5x)^~*B3&axZhn}XSrA)aT6#E+kffcY(LMW&Zq#@|6{lN@4@s_M8|86 zqntfH>Xaf(ZuJF#Jcka7IVGtKL6_;$?t|ob>37^jdrf%AqJ{1*Rsr)w)9>OIoaCDi z51zTkS8BahCQYc|5{KhKwMs#4r>$eD-@4Q^dAI@-&RqTp`q25;!f|kosc!;u!tmb| z{PX=0<#J+iOD@010Ll=B9aala;m?z``MuS8=7zCW(enk()a7`SXeCU)kA z@ln^=8lWI+O9_4%^7IU-#;%)1;~2%jf%k*Av%qP3=8X##Zze7dF-3hb!W3XEbk3)b zNWk1$#Ssf1U4JjZ;H^_Ce}1IZqP*H_hYNXBn~`oJ&usk-%)vsf6~(BAjZ>yU`&U1 z>3t4Keb0a#0pKPMBfqVv%ZsDC0*EjDWH**bc5JZ7=(WTh$_?0KXnu#pID|{Q^@^X9hj(lAc~tSrC5n7W6fc) zE!N;$216(qxr*b4PzXU8(kupKTE1aTF%e_>1;OTB!T(|7{;%8dDZZoUO8&O*!|bHs zio^RY6N0&|qwhZ92X|)&2PIdIk;{2XneCBcs#$vNQO1u$$;%d)b(&tZZuGYjuA(x&!U&ObxhDPyoH+6XJW?f0nDo%58dnZQaZw`Fy(Zs%$BUaj80G z^No~KEg-E1B&bjb5}Wg?qoi=NgE0^8+_Xu;)s#U>O!rTi7~$;q!xM&wAR$QW_>u}_ z3Ki``b3HgMOVkf=o!;*&s)Me=Nj{Krk-%hP&CA3w4=mLG&=& z?+~}<^!W~CP>!gjHQEO7$uYuYYOS*v&nR=U&3f4Cx?JKJa2MQU6 zad1cCqiP9#FnNJ-iRDutUu$D5&ya?Q9rUMFUx zaK-X9mv^fw7JwN;LyFABe1U2Z360rhM^u}jZFV-+ciWWO8qb(#Be*DW^+IBg9-*{-2vkuoK?+*kwIM zDdeogG?nT^?Ab7Pp8M)C<0+p927w9#DQAc8~r5Gu)kL%Kr{>x8Xb4;GCc!! z<5c3OA)6Q%;5`@P@yv*kU757Nelal?xy8cR3S_q_>R^E2sTJn7viDh{Sx5&$5GGM0 zhBs-1#DM+Lk7cYAA&K8Jyys9OB$t1mO8{mVss57pmB>VvEt1*y%R?6*&dnx>)!w`? z6R=}~gyjN4(Yfd6Ot>wN^YbGwO`UB`)KdX)l_3;ON|G<(4>dZyiT7QY-`y>;Hb@ zx#WZpJny@P?52RA;(XUwUDxQyW>Q#mYg2S^>*86wjos?n7-Gq|h|02BhV>T2tfgJA{tfFI{us9l z=y+_)X+QI3IyIQ?d>&9L5T_Um%Trgw9nqjy$mW8%SA*dHbeFhO8hqEV+Cgn0eVpm= zk0q?Z`Nc!5qwYPi>|hiYM%4}xQcElIRuKtC9c5~ZXayb1qNZ&e+OZLQ$z<-LW^T;! zF%9gF3M!x3MW%$YDuLIdfG*I2wR`wOe$4upJfM*(xXG6kh`q^Y8+!qzTx|!BMZeLJ z=dgM*KWFgV`SZ?COviygD=V`S?yX7Yu8~sEkqg?zIkA^i-c6?v=?Y3%$>^km(F>-; z9dzn&y?3i*mg)S`2iOeq%k}roDt2I&JREFL@#VWiJ4(Y7OBL^LiEI`Mc?sf!2$|PR z2UW0g-7V3O#X7cTq)9X$FdF4=x6l9YBbVG7QzX{V!90C)iL*P$mWk6c`4K1{P*mA#7c9Jik8jR;$c}w|ovp8rt4e4$mqFpd zZE~g=k4wjcETl-mbByH)H1okBC`pi#s~v>VK8qdEP_Kv}&+)O{L-;z+ej~5bbf}_; zh%QHxl~2a1_XhY%}Q;S4OLa=k01e;<`kCT8As3OMR!bWe$@xVKkxO-ClQ zZofXSWj6sFbvgRSOJ-r{RRh?a38v8o&Ow40mcmybEyRU0kX=D?K|<4*vB;)?TNLW? z=B(CU+ybPQj1AO-5eTxfj@@)H-y8!i0{lL5?`Sa7cY9ByBKJB>DPzZm${YL94i#6U?fbbqekX zmMni1Ld2&dZ#}@)+8<3QU+3*<=3=+;0Y@q@YmS1L7Y7j7C6_rs)cy4U9IC;R z;!8lL!J{(7H>8*eVO7>fmCynrJVbgL z97Nv5NW?0=%s+P8d5%kyozHqx6BH#EG6eK%08QrRT5k7=>uH3T#&_npcC;jeztxAW z^*;OvaRVf*4rI3P^-~8D(O!DWqWBWt>FPRMq%85BWBRNKXxm^~@Ky}qHy#KyD}Eud zl;OQs?~-+SVE@{k-!JCqBRpbH$ceDBNZ`($(Mwe9VPakBGmn|{`%3qNU6iIlLqkBK z4G>1S8%2NgQdXwMwR|o{d*+CSpz7rkNG?6UFhqaP+a{CX#}lbhY9o7Wk6BGy?}0_5 zv~%iu$gxVoIX3q0)iR4&;RUr0G#5t`b&hNNzmHP>U(Y#&7l3b&J7UNaOpfg+L}YMr z#`FAQM79v08{36)y8Wtb?XJr!)ZFOEVr%-!OUYGzJP;h`XdmDCb70_#UgGael+@op zu%g9;-40Ek+eUWNE{i~fi;pW?Ju$*(-ZE~!03=@wr7^`t!Y#}-@DX%b&s#=Yk|wyS(DsLly`JzvV%o#u6_8ohz)v`KYtR5?oD}81 znSIwIo3%{C$u|hM!3V{^zkabVhM0ytkYa;mrnCLBy9gWim5vJf6-R~&x=pLZsIwh6 z!@xO?^D=sjB)8O(#m;c)H|72hs{E(~%o!@F!j#A6sY599-VmXLJdIeOrve_!CuL2F z?m>PH=3JJN@53(@7zT?SqmlLJmMmSlH!PN0p>`~4Jcd^{YL)A-6;&Px4=utvpFFqJ zlCmY9pre#*V55?v-1&UzlF!-2mJ`%`yzF82m>&^;ck{`S5XaX2c_9ED_WOCJ~XvH5iSh1|DFj1 zE6*k7AuKpFb*hZ13aBqWe{}OYmQLq+R~B>WXVh=7w3d%Equ4d>K@FMxIWEWcKL?8b z|IUk97&sJm87IE8rDG8ybvPBZ63dQ0pY&R=E#25UI5sm~G`#0Ja z&{6?|F&;!c(Bgxx>&)iq1)wg>NL6yn%TN1qj0m(>?$PleATW@}0*bZTa}PGa5vISF zb)ApPFnJ8ulN!ygn>W__=LP@WIDcYRMmfICRzHY97zCW9yZr_N0yy9#6K(Nb%yWq8 zZX2-F!lYJX)aGI{dxZ!?#$Gsi;yFeb4Lr!A9p=a;nT$^;VPQwnvUFX;yWI90-T(#;rjoU%)Ot>iq(fW$p~vc!y}Z-gBWl}BpiEJ@e(0A2wpu?V zU-&1z0q698yaLRrVlhTCV=mjwii#9pFk*E6b%+w60P}L{sG{>RgGb7A_u6_v9Nz85 z&%UXl*0Yw(_>yMxqR`08W1@)LMIy?9?{u>VDi7k0{(HtRDFVh^oW_+xmAU9 z`7Cwn3Hj)>N{HbHhE3B~g%jMfa|wx#rclgMU0=T;qGecb4el4lA_ayFqyDD6vY)Zr zsy}NSjNZ`?EVN{(*l1Olr5x8o%Uy-dRs5Kd)Z_T9JVH_o4}`lh zJ*N8?S&B@PQ`PvCVp*$I`+CQx<0xP1?{f=wa;MXFb3>1=_VGjsuyc2Y#Y)|^YeLQYuu-{byUxP+bi|9Tx_ z*%!}6JUBpHQ06gM<8VK1M>FJF>S16OKXX_D^-)!?wE4!1kvOby+dl^OmF_DVVO?HE zE*86JbD3gC4b91y*m3zr$fsK0HfUMlL}q!H0PY+wCkfnE=aUkuk=nUk*VK;A|6}6eGjH43??+?(`=B|!^SzAkooAABL z37hrimClx{Hc#{3VbWLUFPtU6n(w}(L&P!cKl?-6t&Vpo9Bx$?;|Cqf7Jn#YVt@aw z$Q$%%y{NZ;e1x{>J|26Hh)rEgBG9;SqFcbl{Qwrp*58qc1lV~U;xJef5OqkM0BHUr z5P;_1KYd`Cg(~_An2`nE=XNs#$&GP;i7P{yxB!d2Jqi^11G|JU)#n5jt@uO%*I*lR z|3cVG&D7+D6o_BYuRbbTw-KUN*1v-}qBHB$UhAtp?t`VYD44BnQ?k=q8xJIyhda1;l!ry)4z%Z1r3FN-8hCPdc1vqN@!vr{({wFS!rGIv)nvQ%ZEzR^u zM9skQ_3H)ELA%7sAseYkpbT=D-=bgnqKLk3>(Y2Wl9?^3tpbP z?o%cs(9=`S2cEAosO(%67m@ zFRF=CecNMf5Hn@YtnO$&mOTbWO|IMV3n_|G_dR;Qlvp3l9`UeXX=u&L#n_`2jD~k7@t(3IK&Wz!H%ZX5-5+QM+qpblN z83BKx(*9O3$NTSz`3iE)rH^)$?G1dGrS9asO;UHay9g_Tq{N6n<%BXOSJ#o<&W4rK z`9dEXpTc&yi{xKlI<)Wvg``>0&Az5C)@b>Pr$T|8B39#UL^0dvOh^cLM=t5swm)e` zb<^6s13eeBS7fFnH~>UC%6DsSE^O+Q-~;i;%E#*lPK*7|67UQV!x?Wxz^ez3r3l_H z%{IbRs@mQ#c}bO*wE)kd2j0JlD55?GsSihBlnpm@d`1KcY1ZXLSy|iROEgf4`H#Gc zJ0B?1R%%}_ccCvWjCf)KU}%-NH;xqOo+c%NU0d`)b8>9hA6}M~U?Hwh8&LJl!^BrN>a@R!l zrEBVQeV|F)Jl^gdBjv-=)|(X!`t-HK7dIta{U@H)I2P8I1*er**!ZSkXCtbx`~YC~ zD_j6P0%nT%tcar;ypu8dy86|#NmN`k>>7JgFvPR@*GjI!h8ccV<_j80`{x0zzzCE2 zE_P>5zw1)D?I;p`mh8vd8(<_*JiFj~7ocYfu!&kwn;l> z5VlezV_2i8hb{Wk=9d*46RJ);;@1GgdnUw@qgsa|9p~E8tzj+ zM!&37uH8y=V4HoPVYVT7#giIc4%Wux0}W84P>MF}IW8$<3JdT5l~^KYi(gFF7#{Hi zbTLKxcBczdG^ugPb(;wEgV6-ncGa_gT;dRvEq=xvZOfqtY63zKyNfSf=xPc}9QxTr zXHNQ~IdhuW{i8AtFJ|1dGK4=?3kx|oPR58kIL4Rwzzi{~h{HrBsq@v(f3Wgn*9bV^74vGT+ zc;g|s?glwZ3ZNVte2|3U0L0-7DR)exA8Yjt>J4A{{a>o@g2=*QE3jz zuZE~n8E5YTj|H6M6UG(?)_WtTeRSVrvwV3VEXLlpia50Rrk-7-h@H`B;mh#So_j$| z%vesh6kJmzU+ZP9W;|ls`u_$x{BJEA4&hW1uK6Rm{hSIW)}H>co-`0X_n`sKVmSXF zTVDYc<-2tcLyJgvBQVn4HAuIFbV-MF$B+h{(ka~yLx<8`(xr4O-TYtt-MjAh-S5Aa z3kMerF5dH;v(G;J><8N3Y$FgJ_Z%ayv2;+D%pDu=k433|vuei|!j=x_OU}Wu;omdo zF$JrDRjtVFZxPy2FTl`>G*F%FLe4knsBqz}Q}b$r_mHribOK|D=} ztADBlP*LsIYmBH@FR_$jwa-msWy{pV>!RM?2y2cWN}D7I(&c2Y(302!OB2wVau;w; z^#b;4Tb;mid}YXGgt;i#hzk7`ZwFETbj)9g5!kBHv`e!A=lvl9ftmg z(}V~=Z3`eCT@(t`?9yhOv`k_+jn4xYHW_*zv){Y!+uxvdY}(FVhOz}xHwN-pdl$XZuVO7=K?PXrKoP8PVTUm%KP zgJ*2#L0>xrWonWKX0NznOjWhJjG&D#K*~5*(QbMes+)NaDvlZgPFspfoPr6u)<)gF z?az{|F_=Fh=%n_dL#8Pv{!0TS>A=!rTB{ozNe`7YF|Y(-S{s{-F*a&#o_*Y(I$ z)2zR+D*7+1a<>IGv|74jC=r2O!6%k8*oqEl*R9%Ic3_KW(}p$^uByBv1$1=QlS_^@ z>#hjsmg(HCQXpbsV7ae3;#_t0k&}{Y+RbVD{r)~h-s95K zE5W0U{@;4a860Lh!tPIl2cpTB3C3Hs#tqo)YWET?R9F~@0K+K*7U=)9Qlb#HBZIjj z;Ubf;!(jLW zp$VEo($mIDB76?^|F>+Fv>)#}8~;<#LJUDir@CQ=iHY8ZF$r=*ie#0NRli_9n3Icr zHgY*$X(_0mmUx_mMTAW`<;-5yCpdkG7XnA*YP}#=2h05U1Gwt?-L}}Acuw^*`Jtzp zUH#n3pbekZR3aPbjV2S19!^oNRm{wP@uWwDq2a=yg6S7Nd8E|pYJAAo~on22a~dL z*6liJq9|-a1@saQ{uU8{UVE1HI^7DlJVJV}L%VM_TM}&;fqwO2s?=7ledr)L;?(H4 zec5#?s8Y6U@)M5g?@EH<8SY|9)BA-~rBvCssfzSq`;gw7Ki9PkEIPM^g_5Qs-d#kh z6K1H?DxA0auiwya%uPU^KS8wtdQ^@=;IGxiupU66Ntc@G7d*?dX-<~!2x{}}B!bs^ zXSQjuCGbfid76oFUNWh4@$B^7%3N*6HuNB*vxIc~(SUk()S&uONWy^f$>42C>hB1d zawk&cNtfA&-Xz}n3~(6PxAuJq2`uB3;r&V#eUV{DCR^+B`*#GxwBjwHiD5 zNo;?=L%hcC7Fa>VAdO)1io5`0uI}q7c!z$JdBI~yhEBlhb zcz3h_VBriPplNVQSQ!!LwVdZ^zf&-k!n5#$Rn@oRYzJb$3c0IQAUj7*lUEx#v4dZ? z@8z}l$SoS4;Ed1Tx@?H6ZK2)5`i?r!|EDW9*}S2aO3P?^enG4@${Tp11Zva%W_^URPjzFe5|hW-Scm3`YdJTDeArc`(rAv^P1Bler0?! z^YMsDnrQWslsGFgakn6V3A}POiTFxG6|4P&6bzik z<+dXnF?E1AlF9)-6En9ou9YqKwy*K}IU1U9R_(>b@SS$DC_%PNtxyv0ixLqHNf9s5 zEAh?P7a&K)?G5Fz(ez%v3jdi;OHWesNrJnzSIZ3N%1vfhF|RJ_%iNCdUF0 zhsgUCI;dL07$w|6Sg=r+D;v;8zyovn&S2vMTJ)_@)XyoQYe%?jt;&zC+c>FedG{lX z*HXh0hFQA8(F@I$UcBGJJol`yDC{zXTDUbrS6)t@ zx>yYVqGO4pvHkOnX#F2=+yB*w11()T5~$B*IpG^QzV1YxAo7{RyDcwjlvF zEG&UhVkQ?^J^>&iiOOG21x6XH9Dq|I)*ApK0Yps)ASeo7^@)xR4oHOnCackoH~qsA zfo3`~5nocuhl*}#PS>7pvVf*aNl5O@q?zaE$Nl3s<%QVdb9)&2#}3#DHu62UyEz%m z7e!JiyO8?eC51N*cwUd9C4BlyWH0tNt^XY`sMtgps@tUz8Rb%V}`b=|d!?{feB=USuq{$vp8igI`gf*wY53oPVTmKTL^cy_je8>&&pO&*k@H8nw7K6tTxmA;yHGZ8=Nu6q#?hnWA zi-j5k43Imm-U_0_Ousn6`PEXm!??`$Xf_~;0WGAjexteUSaAt36jNKr z{`~{S#Z8w&5JjA?Qc&NgLNW+BLPhIvUtuUB5IuX`-Y#U)4Mt z7;>H}?*sxlNv{E;$cr*9|FE8v0z4#E8anY0z+#usj}v+qAz*WZT(_Up9<;hXb{0+1 z{8_tEOV(J@~m8-vJ@FD(;(sm@b8&D`mhYz5lL5 z{hOCzs=Jai5E}XjR6*pSz-}$#=ct0G(eQu$p~^&sXwYUJ@%931ZdhE1b)~KxT+Ej* z@s??^Dir5Y zJK=k(q+JOs!*kh?9=TUIBl2XWl9G~kIM&Q$fSu;nUz=(}eWPp_cRj zQ@1tzlq4Up1@1F|8t)+*erD!EHQ5mwhqfglN$zR4t^=a4Y$D~kltiYp1io2y(aN?8 zi+71a;3%%hr3#0AP87cwz?lT#M^*kLR<}yCTuR#2T(sf=}Xe7a>A4eBvi$# z-~EXE4LTN&gJZ?uu%emv-t6$#E$UN3iGq-aGm4xS@>w4h;pnovwE=x5=m3Bh}-quWlAlAHQ<*DER1e^+Z%X`O^6pe z5{h;=sVB4=?YLme%$P%nH=wLQhiwz9)C!P3FX5f|V_$iU+0SoY-?37PC6*|JD?Tdi z-ufr@_`17rM`jd~@~fpD_t^6Z1n>3zJQgO}&e4o&F60BJ`yNaahh02BOylR0qu>|V zmHSDCNTUE3s~x-;5D*gd_5O|q#}?Rhy0`-F5TZD{=~Vl-7XYAzmZCcl z^Itzc8MJu=Q8k6jk!kv3xL-2Mq+vki1gKFe&j9OYQBV#18xYvw98V;-#v z9dk`%xQ!A)LJ`Ac@?OLeB0Ve$Un8d<*7p{)*G_TQ+a7s%!w& zoDZ8xf>}v?4Z}d9>5$y}0rxl`m(y;!_kA@E?HdYY;6EP6_2nwF)uz|I4DQTKPD^QK zJSA@qXwa%dF=%CDtZ3n5cHrQbPFd^MMn?}!JgUvzFa6KTs$49#h6Orm)8{i%PNOnC ze8%`XNW&TR2Jh}%7MZ7%asM2h>M6w}haz1kmfYR-I}LH;qEfMo2DWWw5<+*$>9_{v z^K0_nFb?}E7L|Q((=VKa z<3(96owcGBIr&Df=`ltj5Nsptl@`_P$D5c|=e@G+x5k+uEgk)XYv%pPY;Osx`0L9& zzPI&h{`=m6T3JQYlk@n_>PxO%R#oycw_7xA#hOMFib}JB-Rr&a>)0zetQbkmB zP$P_}Cc)KviR<@f%BwTYn{s8kg|1#X#;vz$;OZN{UtZCWGbGO(!!xCN$;<*m(%EYH zO>1vcG!9C5#9o{=vN;Uu(ZI-aIWz~+?aptfCf0~J)Y-vn?f$<*e7BAkrTIDktSnT>d}?Jef{AkPb1DW z!Ws+h%=UKc;zWmiqr(tPRQWg;nmKAsnwkVW*}y$1Q4lIs0Pq0+^FdN6Ae%-tH;)Q< zW2y!aB3@{DW%-$1;b{L&3QwXh^k!_9((8?(pdomy9V)-a3e(vtJozAcFPqi;D~%h{ zn{bpC_?kPW^WhVHcpCDFC(J8omg^Stwt@t9={2u&GS{5YaLAKvjN&aJEgWxu{A0>; zGq^NSm$$`a4{rjVjTSLZ3X(;X)Sp%y*AwbZ1N`p>=&^tW`udDNew>Bz<$9UjC2k7m z;Ag!p9bP!4Q{YTt#2oP240!JkcbjdF3eS?yQ4pA4j$b76NM(B6+WV)kt~LQYVi20+ z3+Ch(r$8hJPUFtGpF{{ZTYS_|dPQNEj_h(ZQ*oa=)e3OYpalBtN1IdQXRBw&C^#-7 z%di)%GXDvFDnk&E^OMd-4rvAZ7e-Qlzxbc$;_oprp@O`LCCXN#1%9tscJQDuNj=wy zlZM%h_~;5fI(2ZOcH;2JU|hKZp{F5BhJl4~pOnDT9L?w9eH)`J^LZ;}Guht8upzS5 zy_%lmT^ZdJLM3DT$Fs>$LBO~nU+Nc^10a@Okl zaD7qg+>J;eury6mPnedU%jXR*{qX*jenN{TZfYvJ`4+{@^4?3&&3%?n*3zis%f@!( zXxjVC%({15-1FvJJ&filAq-L$;7Xy4Cj8BsvSu}2wp1o00f7c3UW0vZcx9Z^vx0_9 zLp><{jrP1nT;RXYOt)$PxVjfRP3a@;+L_)5aP=q2zSpQ!YijU7dU%ZbY3==o&ocbq z=L=HvtksO7;P+2G&5+A?aJc9vn{7h`1JNsjiDgPTD{_qNUHW2vS=R^G^w0nlwEq)X z|CNP*F~=}?r1Zi0GLPy-Zf>D6;vgA7t!&P_aTMf)#?q7GlPa`NdQ)GqF~?l8!VF1g z+!mUBHnwGyBkG&&9&WXm3EF16(kfCpj>JUH8qms}UM>qbBo0@+IvtI=S+(GDznyi+ zdZB&P$5+-cOiD%)ciiUHA4fPU`OZFSiNm9SSjCVoKKz-;rH@8(hu1vcYfgPW%$TNy zS+HQmZ58qDqlH8+7`b!J@d`7s33~B7#YpE}@rrHX=-_SYPZij_)aruPom^9SEMrUTh8>Cq}oI`Ma7N{4xg13Y?0`7 zncADM9Ef;`NAbc2$Ez;YiOO8Jis9g;aDEH$&#q8s`P--6UEcDBBKHt6jl4>Gb+gdy z8VHVyI=b_iHHrapnyjtmrFLnH`vVkxTRYP)4QWe(fpOLD7X?% z#pL|Qn#lh+PyYl$w;H6<?AH8?|MMpQb++;v;KZeR=Q|V%6%$A3i$WX*`NWyOVZ5qwn&G}4`7z`h z{-<>|`#D=?5iT_5U6CB@(TCK57~>#&KQbaBMAY-j#;Bt(dC@{CNl5WctaiQxVHyH5 z9!H?pd%HtoqUS={A&Ir#y7li3`L*mbM4;TBA?j&0wVYB25j(aI8_%vI6BHz2*QTe& z#Lz~?)$?)aYziC+iEJ-lM$-~+MF35YmlrQC%M@R>tkk=&cEqcKlBdwhYBYj38>3CN z8<+NNbXKiMJhLd*JGOynohR_7ej~mQnSQ4%*FPrWbJk~MyO8WZSfI5>>~CqloeOq+ ze*TfN2^@FB$ZXSQmZc+W{P?Q zx*pC8$Rb2Y@@&b49@m9>!!Me>UswCP_fPE+AK=T2FC!qFc#L(>*KcKiDawmYxKmfw zOx)>8{U6U3lGW|qRBBnrxB{+@viOZ0$dC4$c7|WPmiWIR5*87@XOFOm#rFJZ?o`i~ zH9LvMhYyfe#DXt)X@%%4M9caeO@X_iTJ8~|95$}V4*zww|IepX6rkE))v<{0zmsav zzFj0#pA3Fs`h}-ow$b|VjHK@^_p9%NsSHu4AahoZ#j(MlcFSwwBz0bUyMwS zeqJr205>X&dS)&0_`8oA(*!QOO-_3Wlq817o7m5r7xP>o7U}lrr#$St+orcM#_RBm z1h`EKe@Ks<^z8GoE}PNq8UknT`y6u$TzB~W4wGA7tu2iNAw;}g43Sr#^+@uuKeyj3 z*k4~=`q8Ky`zY)!E_^RZMgBHtLW2q2hpmD=(lRtS^ApqyWfP33V$jL8HUTgjFv4#~ zLv>AB+%-eT=ivLS(fPTB6;(31<>*k>-Hy)uPZtE^=e#@?v9q5)ktGcbPd92GsYtwd z^ef~?eBtb`P(=N&vw?{7TQu~CNBfJ^@h)6i#9s`Z@!IRfBMtxct6ZXwd4_54m}Xsc ze!Z~ovr#f80iw`tc*7WeINK({61|CqDzj!vsu?O8^DV8t7N*H+czC9K!Y2*VM}d8F zeqQ6v{t*sTOx@YOvJ!o8d`Q3XbQC9W)*5kH)3glh4IW!puhT{8< zn+&9-r8j_!TufQ)Q~Ybr!#1t86H}-atq`+TwVP-^(3OWTbbqr3H#6dp#Fe&+>itjAttbiDdZqPMY$>o$4hGG!#(KyF1UMUNBjKRG5*iKZL> zeAl~Dgi8LcfY7noj?iAn-^2;w2i?%lc#Nb_!`g7|rfz9x{1@uQ(oM7p0z;5IacsL? z%l`R7;Hy8dbgso#4QRu|BLWq$0)bs^vkSxNp;6M@g?y{|mso4H)`RaUdFIL4R!DS( z5uMR|!%N0ZbyQFw;+6_)lF;@jNd~m8_^qG!&f$dh3KbR`e3bZ~I13>lJ3~y6=2pQo zS>-SPH!S{}ZE0Z;IcbGSZYmk?eD3ui*9gV zXIN_ohoo)e5>gn#OXx$@t70cU`mQi%s4)PbLR4S|W^3_*kpDZw+V{Xu^tNB&3vQ6f z5k?WU1syL!XNCeTqfDa=Wgdq`U^*0|KlPT*zrGV#SlX+0;BtbixHu0-e$T+tpY#M( z{IWVMM?G~#U8wQX(3d|2J_M) zzQ2`8n;F76)p3)z+gj~<$MxwkjAM2-M8`@^^{YlAp4SWmTV0>QxZij-P1e;oKgVyhZU8@P@H+HiNXq1U|7S^9_zWBsOP>tm!^OJg>cn!rF%TltEu+cyv}wiCbt@ z$;EPzmkYv^epaI6_xLdlrQ&(5bAv!5LFx@lX~o&W@Q)v2yCZ&VWMV9{!=tM#1G_F0 zra_@QQE~st(sDqSE&;zf6H2Ft;_Q!MQYOt**z>)OZ_XKRKCu?5ZRhMJaTkn zjb0JnQx1UyAvL7!*Q{3ZGf&Bby={IK>_?&%ygTKN&tbW&=nYQCv?VbC}Yze|UiQ*-^%8ncgwZnNE3wpcXR zG|E0~j7CNUathwwZla(>4^~EHe#z|W>U3Lam$6ULWO>}2;o)rgWr0O>{LJ6+R&aiG z{8gLkv$9_-d?fxV4TiCokEb1<%yjMFz6vT=UM#~kK2g#@^i zKW|Z@`W3)?W{z_LuR{FjS8F{3@3!{x)$D^Jt_pU|aW@~NPB)WeRIV&eAIJmqp1;a* zt7K-2o8jHhmbmZAcpHgN5Gz;Tv1|~CQ_kjyZ2Ch2^cX_q{;W+{& z(SedKc?r3(&jME$IIK=N|F7EKDMw|1V!Tt2`nX>v0hNdgo7KsHAJSFA!B@q>j+Zn}IfBCaWeZwug2I-h z-o%arm%|dD-aX)lv!Jt;H=sc3nAfynwQMFp~PGUnmzm-?r42lAtPqM+Z{&j=YW^F?1i<%Jx?Nv|~{K|@QW>FtO^ zngED5I=?}%Q!{N_)gCYf zQ7wee|CuyQf=ZPQQtr@c>dP4WcYmJO3_Mu1w$JOcq8N-gv?E{Mr{J53*7_nwB?pj8 ze4jxfWG+jmO(7;0RS@W40lGN>eDlN&|rHNPS9vhFh^~;$M)`v}{41V#4lZTeqf>^heq=(jp0W{zdOIl(zhuqT+ zy{E%EPVpBk(5AtG1LDg2)C${gXf>gX1?>+`+iAs9?7F{n;g93B+p>fr>l6U)^IxYn zqsK-9i9qFf;VWdLLc^!%NxpL}}72N?Sdobdb0JRI8z) z=c$Zf>33R-{W=rj@MIW%3aK2naSl$UPfF8RgH1#7vcs*_Rn9N7EQLT9E{RI2aGIGs?Kq7)AnMU_+QTpV zvU&7RqC&$w*HG^R%tVg?gENjdJ49X5ypVFqe)qdv&&QMeOZ(W4Ip;!OlPT}({ssJ# z>`)W#%;|-BmJWXBZ<>=^plHVR?Uc1nWEnX}1dGZPZe6ErAHHP)^Zasdj z=b+j34#4x>d0HqkGJXMa&igds3Gbf7?cRt7UY|GVGB8#b)3>IEV86M(^kj@Da76&W zeP6*7R%nhy{}->*;5d@x@#TMeFl!EZW=Z$2XDKTP)N5LUB-M9_YHC!=C=UW6ja?(N zI_f79vbg|ZxrvnCez^jB=ROGYDMNOCPNEQU4U}X>(7`^!CBf}&C-3kRK&QcX^!8jm zxlV{B=<^1i7^HJI^L61d6kZcd3LhRY1@~Cue3Vi{#R2d<*l1XAOf(Exg}X?jM6~Hc`J6 z;!;1-VeivS=zu=Fo%cA(VG+~*^bq`T^Ab~+VB(dY2U{}B7`c%7Ky*t>(qY={y~CTcF?uy-cJ9r)I43V757AZ5OQ z!YL|;LKMEnYeCyoKSgU^5Grj`z|`eRg0(OC(qaZ#a4XK86kMPNma!3KBjNliO2fM+ zf(5G>qLO>Q+n^hz4p%5UWGRA()kwYOYKKmWNQ1poQY$vBi6Abp?0_eCT^0-nl$Y#V zoJS`a@B699b%v>y!noXX#7<`|G3z%`yeXR%@<_!>P16d68rX`oA*y+`HcBxcWM(Vs z%G%B3^MSDsnnLEWWC3wUySI0}L!#wUt;K;u6^enjP@D8c1`L_xkAYX$hFn~hDTeqO zUTc>}hMA}TtnjS_6~JQlM*&Q~+I^C8dMsw8&QAsfgsP%>+rs@^HTM!cwYVfpYk7Cg zg0d`~mo58j+9whzb6ySN1xo2u8Ta?j%8nA*X6?@orNA34J(6!p zx2k^3^>Z8uO6D=}SQ?`cvSu>EtjlUW)g0guD)%g``3CYDi@RP{at{FVBS@;c+0FsiV$zJuysOWgV zV$~tQ5_b~sMII$Su;epM4v*-IG&g%|Gq+mX0ZWSk9wusp3?#T-=)Tuc5JfEWu!6P;A%QHcy?OvR#pMP@!Cny6x6OzV>L1rnzj~s~`^8*{wi4W9* zz9Zy|OHTX+tG8zpxUQTY5%^O z+tb-8RwGp_dxMf{<7T|7)vemg)ycsT+o5+l6gSWNb*RVWg~#_D)QRp|1i!s?uwORNWLG6-Ey(snTH7vBg2#kii zZ+(d~xi0lsrF%dzUmjg?A>6QXv904HK0i6@E}06gVIj4Yg6a)uP>CJH7sHB}Km%;! zs}_-(#Vp>g`!yR5+C(gl%~hkuzuDn*@SYHrc&9bHZh{1)5G%}indV`ewk}Bd(9NKy z80#q+Kms)|xDz01vks|UFDs5|7(*ZnXzag_rIUmVqQ_57GTcP0FH^F+o+<-3(iutP zx|<_%LuG7hqnR@Fb7WC;MD{m*{c9_d6 z;s-{(ynwv%j*gwWFDah1SO8-14?Y!~rf-|Nm#y+F7ivJlpa?})1CYzTM^V_g=b!0u zn%A(5rG4$<^qs0sRLEKX+BP7T9kO+cLj8v|EhUy(p+LhMEaI*dxQGAFT-JQ<&?0m%)-#U!PoHz&UJq(euFtILy!sJtQzhl zEtg=9Q9I(@d=2mC&UM+WSDHJL|L4`WeK4k3PWc9dUs;IV!Y zzr~Tk;V&vHO{x!P;0}Vw2K;c1F95AvW=MZ*RITm0y_1Ek2uXP~6_pu?Nmnkj>2@s) z9~c`rY)2Qr`?YCCCavrf)`E23oUi% z0NKZq-Nlm*;t_xzM|v#hREyZj0B|Dzm7Rm8_?5T>%SZO7#F1AOpuU!`fySNiZJD>C zKp^Az^o2H?fu?XpjSlfsz-a}*6Y=$&T@G7P7&x?ymTW(7qIa4K^>_-QMv(Gsms@B5 zR%GpW7Dn@=ZK5wV1`6M?WJ!Tm;Hz8x(eZg8y^5GmcMYgRfSFA33Avg+b6g^h4*?eT zn<3e@dL*<#wtA}D%1xki@5U(06BRvvz73V9J}^QZf?!N8Y7Ql;_w!&*PURbo(-AQM za>-_Drs$=zl0|CA%4J1yx{*{}lsImeWYEb6k<+!07K?%krA(UuG23v5cJ0OK@BEQ& zRV<+YYcByVzp1RVZ!cYChT+R6d|s2OJe;9H|IvcE)(*4UT^GO!N0= z!&n_avtrwEcR1rC8LVT4HIzh-L3uyGC14-TKGox(JD zhg0b{KO(i4`mF4a-T`dY!pU+>wbxH!Jsv||ae>~nO#j<2EZMU5?uk!yv)}s{c-8VG zcjku?Boheb4!Xd|X9eaPyCcpxefImLXVN&R^a_m85tkIt@=-uQ&BQ&dR*5Qgcgtms zGP%d2t+Ks&h=Vi!nA6^K)!Z*n5QA|VPMp?hsgR>FLj~x|CH1ml)vHcM2d*5<3ybdR zUdmwg3{ZJv@#r+t6WpMO7g@Oi-(zGAh8`akFSXQR06>;0J2*aL6m546q_+2rU7OwQ zHZhSpCIa-lv+5!>6Mb6zs67zJ0TH@~&u3^^KaH*; zS@7i3ea0X;rp|WJ)`}_c!VL=EPd*vXPs{+Q0YK;M;&o)~N*26s77=_~gWKw-`IE`a z+Qer&(2pd{L7#`UwUdA2?(i>cCb^iNsP@Y7H^%@Xomh1K<-~u{$2?fXoDF<`Ouwsy7r0_*Xys57pT$Hkv*L z&_HJ^hZaYY%s#5bMB}TM()LBTQIb(Ka2xBmC7A1iU+7!aZ6Xadx?P+Z*Ggm-ouBQ!Pf_^bu$=TR9H@{Qy zqfQab_rwC&92ThwD;E&d5p}_a*Fz)X=6l^Zi(3sDHJN6$ELg95e->!sLe^6m{DaJQ z%M}Q^xwHF!wEJBrqA{MN_}IhVYk@_Z?el@WC%Ta_E`q)_HyA>@ZsbA9F4XZSO}ggL zab_Z%G{e&Sed$i9Rv~3w1JV@Lcdx8oT==Q%eQO$!6aJ19(OUWKFINTSJ_?bPlD^U< z&P$|11=z#aHlVECft345@7vQD8OA*>HLNKQpO8%s3;VeHvZh%HN@@)czY-S_b8-Q> zN-efy3LL`Y0eQ}>4$s^zuF7%!@U)BR(t9iFcOW1XiPWja-5l>n)?HalhC%ASz$}Ad zrHdOg5Dy>dZv6W89v--&D?IK-zIIH6Z7)!k5IksV1DFX*RtbLWn)tK$E(uxUDpB$L zcHN)>$a`fPs9}~d4c8tz)iZ+jT?RabyrFpGP+0q2ydRkUi7=TU7++mN>!pLCj_|y2==(7)y^=(}@tN=I1+j827M) zNx!83%~lkjxzDmdhrm&CD}aUdq@q$GR4f^z6IyoY(XoHN{SmxwtUgoX$;UOT4=vgB zr#dyTXgxL(^pfT#3bF6w+uYvh&swTheONVWJ4hq1y=)5(KG`U3(O-OQ$C$-6(^_$T z;hhUMl;cALVN|WjZTr3kfTu1fFw?Nwe7(%dj>dX$BEHz0tMc(u_M%B{>Z1k#m-Lyl zv4_tso;`k%=i?Ko|JbSVATs(yF*>SK{XkG!e$Q}xheX7@^Otw*)`L{4@FNLX#=tz0 zv-vY9UH08(JpJA0D4t+%7~E`^glY|2?)D5n^1MMmz>9c1tE!A{HA+7Uo}4`6)72C& zz*8IvZ*`oJ1fo+-15#O!(mZ?dr;q4MX3ny5tv-GBYy-t|vtp*-MM%EIOWHRbHwgJu3PF<5iqB zhyhS}9H2e_rS~g!cZOs1mb481b?cA~BzAS)Gu&hH^pfDS(;QF-S%%1L-X{W0iGm@g zGT!DqC9so-VWC5n z_X|`*ckUIs$8V+vW|&ScE05hH2=Hx)tM!$04VFcHKH1^2WeJ)Ng>$dx$Ge>*hth0N zLU#h`&`p5%+P~#7eVqPG5!d>DUo4+zs&G(*{$#d+=*X9((tQw}o7xzwJr$$2s`02RExT11P@u3hJR~b7}ZvE|I zK+D~wdbD#*o_@J>i*7S>P~BI55t0$af8UjC*Ii$=GFS*H&rP2ngR5<}Cw|v3i~E2+ zH3AMK9RR(*5=CzAnC&{wy4_=H2x`4##nR}OmH=vJUK1p{@MGcWy@B;ZZ1y!8&fNPW zCf|3LI@Wfz4@Z~Cm8_Y1G1;lrIxsXW)ImFu*HA23O8EN`rKenSY*oiSUhe`jmXhET z8KO#-J-BVV$Pr=hpJbQ`pm6~wC|C!cUk@*FtARFS}`0t0Y*mJ1Q`Z~jC5Wz zknup!7i~?~WUGBA3YCK@tIo2_uARu^y`94(>--rdCFZJzAA)=w>Lh%|fHni$!S}Ua z=3Va~9d>0_2tJ#~W&h;`?x#xO(A5{Lbmv4;aM9kE<#`!p^I-r+K|-QZpYb7zJ!Ew|D0E5w)a z`Lz%5y-|>f7Fqk`AOf95--b@rvwAH^ir$EH6zS{s0<*FPBs{0R;OYjCABgBpChnh_ zC8m?w#U`g2fkAw)?hl?*5yF{hcL8bkX>_va)Bp%=DI^=7pjoU`8&DLM5~u^GG%7&4 zZ}wQ`fguA$g@1XtR9aZB^HKeu@kMjM1M+x)=Yw_r^+T3TXs1T;Ol}>3)=lVQUIZ4H zm?trAa%!``^^S9=iqQw?b@cL%AOp)E@XUMtBY(c?)Bvn_UJ@jm)ZuO5{@Pb40YCD` zt!_1-4Dti0dZK#vzHJH)eB73Cw-pk_l@G-D3Hpwc3<81R@?w4mNBiACOMFLc9b6zS z4k`hW5~X)(=xKr6^jD4&o2u34mv+fYRr5qm>dYY2B|Iv)871@;B*O7iHm#M`t;QJS zU^;573dpDa;eBQ5!$9fpkC$Ym&^Q3u5|Vtzw2fX@XpO&AWnyC*s{PK!fgQ+A5|11|j(HM*Hi&J1bFrd3p~bBZ5J+B6((@`=3}5W`ex3N= z)Bjv=Hy|0T|4*4XzWfa(n;juEU;UYTTm(86%#4ObZSuhGizf3bd5HI zO}0=WyO{fED<*Z+xGN@E8x_DhwCpqIp&EBkPKsv8tWLe6Na38rD2rxUn^*9lXG2f( zQ>tS`jc`kUmuQ0 zXlLphfR4?`<=xb!spjfgL>vY^lE3Wkg^=L9UoF4P@=H#o?(!to>{Zka{b&~j zxZCc)Ptw5oVX*~(?}30vSM8kh=?Y)H{(c>PRpbD7H@8H^a@Ic)v&sbBjTlnX;G;Zg@Lc&A>0uUgaoF^?Q$K zHE-sNShBF3BjP(<%aJZavuuxlw=ySKf&yvrQ7?ie($21OCE1_P)O)W_qxHHh&V5?4i*Sj-{DMw z?dNQ&pnS8`OXCvL@PTJI072t@?$KCIzP}*Q!S@Z{+$$)w`;qI7PqiE%>2h@ael0N1 z>7dq>-s&p#%*Dmb=||p#*Nw&Fb$FunV<-YWWFQ1hI_@1P)H5F0iU{KsXZ>W_;dRU7 z>s)ie6fmpDB0^YMvLM|Ceetpb^j54-|KX31eNsJFS)gIBekjswoNlKZnsv`i}6L}F{v*c6+?*+5~zSDOAGgy=-9bNN~XFzlUm0nIuVsu6^( zY#;7`$-xl4@gzyq3%1jtrREq23BL0oxy_$#eG&ygia;4#-)hw-yOs~4drY$@-0r6! zxBrj5?~Z4)-}^76t<_R(wKa=UwOVTLs#U9{_N-A`>>Z>VEk&℘c<{P0-qVZ=%GC z5kv$Leph;*``kUxbD#P2_uu&^uaGObzT@+KfA;(Ggozw{xx#sSux& z+HV`19RVutl zm@|5vc>NQ`JKPl4?;yLD`Ak57_J^r=IQ0xgG;_;63t0wHGP2wU&RG+=t$C?A{IPhr zhksAvHX}z7{z63Ft76nykK-UoWUDYRVle5Q6S}FdU@Q9f@pavpx*c4=a#g;pB@T$X z1wvohmd2a2_CKJke5RKkbRwd@G6ISko@Ke79b))h$6JGSPXG@Vk}4Y0!6?j9jZ?I5CNcys#Cupg$Vb};ABjk#=h8M zNi|URJtJiEQnJz`O|shSb8fP$+`N#P#Oa+CgL|bFTkjyUO~iiZJ#sT|>%=AoK_l{(h?QP<$g#bE(K4i( z+nc`rTyz;wiJyXPsGA=y(H~;i-<_Rfou=*!6Fq`6f+$a+VnfSl*>f5W*yu%m*mX6s zsbLz>+x7k0Vk^F9>Rg+SMdzs0ng?sNtFU(TgsA4ba7adrz~dt-BGg-tqgKu2RjF5I z50W{7n4n}#>^UG5`kbA5%`Vt5IE7*me6G>+nY&+p$%j=@?JiVOCRA#!vJiOsOTD*S zXYshE1bHN|)miKSzm!L+@=`)dYWG~rWUAz~Zp7sa=V>Vbu%!e_o?1@`DMtb}ApFVU z;+dySJb+JWM}%@!V!oNV^!4QRKueOSYs`?QGjA}AipNq|VJg>|JGZB4(RoOi(dqB2 z&+ud3tx1VM`nN%p1P}mpjPoS;jbZvmt9Ur2e?BUvix&aga~0+MmhET4)+c2&bKsAf z+|oePL5fHOUM}QMFb7o!<}SoDgCY44BC%mVg-g2XS9t^E7T%hZ`;f+nj#q zb=8!?=cgDGB(efLM%F?FxZ2u*f=O;A^iFr*X5o;jT5>QeU@E2O)hGdI&AVRfKEnfW znK`suvK|mehf*x4qNZ7|T)Fb;1xn0YZia#FOwaPyvmir>ngu!~jkZ_1{3Ts^DGwYI zp4?@>4g|znU%Gv2eEWL$*<2l$7`IFqf74-(EDJFaJ}r&2qcY?saV@oOzj7L{lTne~QD~!%KIIc%GghwqtRbujvtLjdklE@A(L4_H6iz-;W3GGMLKBP7n%6 z1MxwPY=qR2$}@7L_rxPjKiB3U+0v!U7Abe-&9fh&z!R@d05y*TLVb@|&TZ!rrzL%} zX{a841B9Ilff5&U7w}hS!5Idsd$krvR{K+WX0AN<0b*^c?dpONhuoQ2*6LfM#*FQ4 zfZAoJ?wb$gkZQfyfdPS{D;MWtUiX-V%k{gaUm&4&xnypJT04(2mf}yx;j*{yPGWpP zCVSoAV`lk*2z*KKv8zok3+qhzHvKmOo=;uBm4dpJBDmTDAJSs7ES59nyBk zir;=i$Msl_>3Z9gLqy-CTV4S?s`s7;>CmW1{Oed4LF@ZO7Ri)BR^1M?5}g*G9|jni zR@U z*~q(?({(d9z;)ir_v2`UxbJCPl4s#1*|BpzO;JlWOY1<{GE^=Jm{-O5b8narU~)O6J)f%XSk$Icyqvx_@fcgJgXT$!AKJ z3wt<$irBj}eXFz|cCfEt%-pYESE%J{`dt5doU_ycL%%q^z|C=@;CNpEK2R&d>{n3X zx3uD^M{v^L@eo(@Q;68Hm`Gti<5aagNJUR6j7ASo%g(+$f%C(H&);FGGd0`aC{fz) z{tnwnQv~`IH6Y6264mWMF$X$Hgn4Pv07$D^+?d6mMaQ5vVn)h0f{on7K^?&RzpBMyCk z@lsnRs#*uOxvCePO4@FuK&3G_GRVEqGMRWE=(Y(5dJW2|{Xn{7K@{$w!0P)vunekza;fqkP`7J_w22F-k3N zBlN3HaWt^hk73WWrjF5@`lhuq^G3-prZHdDZ#otS7p`w6vi<0c zV;Lven7lPfTX<_6sSc)5)+557{j{Wxk?5l3U~&Hvg5poRf4$YsFXG({D0}G^!9WR~ zBqT7(nLu4>;OKfE7rt0C-Ovfal>3F^o4Vbm#9~6SS41h)jb$<}xwlF(6GiDoIcz zB&Ib|=auyuSac8kWkIRRnFxH325?l7o`kZ%BVv*d+jNh`9}%I%0#4<=y4m6eSi)+- ztF(l!2s~u3BDku7Kzr_Eznk z5(himcv?H!2TiMP^PMi%_cZ&rV*!OhklnY{jo=o+Q9;>$N;jwlU;1hBtpLl}p41DE zmQ)_Ar!0}>QTj1Qh_ABFIzwg@X=gur>x=j9kmP+yIWzEjFTIasM`GSajhZu)KBo30 zyK3A;np&)d3oaXS*7 zq5wE~$${=ZwuS~7<~NsXF?DjLC&OyZoufuC^gLsbHc~RoPQvw@)PI_DUfB-lP$V3V*zl)Cg1> zND?JrYPkTP2hQK050UNzg^|_f;d8+n1ks36Xuwto1GT`PM?Y$75{KMtjntxciEd~N z>y8a(J@OnpaqWA0QEY+GBt6p=vdp%0gYp%_%)@WHQb4yT+&Tw6a-Bkw=q9jdfe$}D zpae4e?SKgrqwfCXzAnePg7UNjt2GJu4I9AIWw2aK)E$1*sU0DSAX_ZP9xA{M!cpkr z5<=5%8{n-LcEi1pxuw{hiS0otwyTjnbH8%|tP_X&NEn0ekLY#AIBNiu9D8B1&x!q> zs+~3kjf--xr^qs08?V$Gr0wNi)V!1_7drFRKk+~bdI=f>wLVp6<4^o%`EW|WVdoA? z9H`bJ@}{tzeU}EirgSpwosFuFnF z>?X`k2lz7Dvd;`W8K@ZQ;^)JC0_kYtZwkpAmy#2a5K~aw-YleY|4O%6m-d2a^*gTU z6!8iNP)V|ft@M`3Tk5^DZwkU_RFr%AFK+$V*aVWD>I#m!-8{R`RZOeXqrL)rW?unS zdA*k63y`qm2M|`Qy3acZg|AB!!k!Wuk_>cbS2EL51&(in|5QKTFlN855%k96_+H=9 zBRZFZv*?j5)d7p1m~H&^DnBzUdaX4iYIa( zAsEyM0Je1sJe^wL2vvF}Lzb zo!6~0OK1***BeTub;!_+@xRoy1-%X&(+;>}of43fTNu{HxHXKKA(6Ya`R5{(eIG-B z+)F#sZnzj|kWDZ!wdejO@@WexR5hg@Oi|D-vp&Heg&GRpn09TyvGLQ4%F)4@bSGJw0=Yt3 zk!s5R$8}nV3bQYRkI4k6?7!Egk$>7F|AO;|ZKeh+tkhX<Yu%iIjTKjOQNL{MF<6EF| z>jc|v{A{VtF}}?E$}}jNV0c^6^ntH%?UMi-Zw7s6Npc>j`Ie46hy+d66pWrfM1f~T zlo0s@Z@GvL=nC_hM^mr6*cn*M+6}Afi^#h%#Uh2nD0W|3`?gw7_MB2{E1%YsDb%1&Fs;?bn(ulXf6%sbKWAAWcKnm&sY_c<3WbnVSmzcKfK=ME z{dt{|51&Vu>~3V2oz>cp8XpZ@sPSinx*?eL(+fFzNiMg$>al$R98?zSw1x+$!e!Yl zCiK-+-29LFj7Y(+-5Fi>KtxlUgww%}_JJB+6-0div84MOCjUg~|J z{UABtcwp6mUy=vXptqYo+X8Uhpg_EaG8ww^h>*A^Nxl; zj{d{fTmv`$zy^{F``?ZaKL>c=Z$^L~Iy*N}WrG~zP|TvN1&IxpzOh|B_40!9ED$|M zlNj$eWl#Z)_yT^26+?iG?8@+pPVN4kO{7?ZYx&S;Zle9mMi{L_T)S40;Qq@bo%-mKD}%XHHmI zmRVt#5oXkor%|RM0EM#B0OdblkiS_oWj}}^ z?cP_ey79i{_fk(x0S*=hP!*3V{;3#4CAF#VdL7u(*`b!A6e|jnm-q6BUyr+lh;aV+ zsls~)`~_4>1=QbF1h}_KS2^ucR)HEB421Fw1q20Jc;+lxh5Dt(0ImyLmnau4rHH!x zqRls636Ym2s)i~&+iZe%YjdFw*(!jRO8O@w@h~12!=b2A5ufRJv-j&9E@7}U(-z-d zlP=?;*D4LI64cezBih>9f)vYz9SBK&pm=tcif@~so(4wNx#kMVgzo~z5`wmG_L?|L zRmWkn#Zi8|Oacol8B%e&0#=!63S=sjpV~J(Ayn=*k@JW&=H6*LAG!Q(^)$*Gjgt0% zWC-7PX3`)d$skt>v!;<{(E?U=91-|H$`kp@u}bBAzsL8lY*7jAzWeM*!EC^42KO#Wc*Gzx zwp?fu_p<)S=(D&|pGk`7b;Wkm+ZX}y*efbO)EDK5mzj(mRI=JL%W>wGs>g#pRsWvq zf~cVz3#2av6xkiv^KY|i&%8es&YKFk$0UfWzqrWH6+ndYcE^OV-x~Hgwg1Y@XygZ+ z(XsSBTGr+H0epYDUVpZRg#Qst<;GRF{JL*wH*aIdxaD1mN{zfSS_ zjntFh{<5^HfQ@NqC&-W1JNd?T0Gr1V@m#i)B;STgRjEI9XW?vT*T`4fi>hLFd=Csv z5j6UbpT4p+6bmZ0sb5T(4Z2MjrkXw%P+?)g0}S)>No~o$CwzZzF@Fq;`Z|H`g)GYA zrwKkFVr0nYPv9?y>|NCdC;;w|q>NRWX)sH1)m6!Fx7OEdjEp=|_@v*VtWDG_&81Z11(sB0w_W-&f`|Ispp@?tom%I7oLN=a z!D}M5Jvg7Ke=&qRQ3)I55^QN|v_ijCHc+@@}Im85Ezcef1aNe6q=O-SV zzy91mA65X#nHzJ@8jb#D^bPC8n(NsykhJgp_8ShivB{XXZj-rLpL}Zp6*zRk{9Ta! zE_dD^U0nbCHa{&n;bMTE@=DlUE+ESC^9L*k#Dqz!t-BgEYHo;{cd`*`BmV6l|M?q0 z_TtQqC>U6?^XDxBoWPU`9qyCz-^vY9zs;{a0H`hhH`eXXE*{VZn6_5NQKeUZ9(I&k z>gOD#lq@Nx!VzFr)GkbdfA-~n>wg2PggXtxO9AerKfW0CXg15FOyJfXhQeLk)eqJ- zV9SR6uK(+-2=yPtq3N1tgIYh`i&|L=5}VKDo3XnWUl!!n3b`Hk_~)_wF{*##9b||O zI1xzfZC*lA)9+t|u+#b2>*Y}b#@u))n5w(D1m@bcp#Reqe_Z?TykjXN$i@U-b@lf< z5x&jRPHI@~he(*1zg98V&UpH+X)Ni_vhyF^R*N7RS>pu+*Y=N_Y2EvMzuX*lh3<6r z9JIWPvyuDhia)OXH{JmZT7*@^aPj_pLLnDuY*&zxHue#(ray3cGo0~f)cwDU1|vZ< zG|UFh|2$~r+mKAJ;f{trm$AHw(LNrDRr+tH5f}fBDg3-je|$|#p0JAj5v7FE%D;|X z>#yg_H78}tm1WM;fad4V|G4sBe+L*UfM`tNmDYdWdKLxJzrY4fg|nG_$yzSxjSpD& zXIcG^ZmetytRg!I{?PrWX;CXb{QZDc8etLPC+GQ|5&!Em_&Z`6r3oxy`>4&(<)25Y zW&9UGl-$t$3*OK3z4|}hEy^gBh0};*amnAb%D_NnIKJx*C4m+LD3Z_5@&F(`0xr$i~VUe@bSr2W#*>kZ6-n5@(T(_`CXqqV;`44mmd6_44sfgYRZrHRGa~K8 zZGXJmKU$2uuX*%pH20kuqXZYBb3T7pfwS=Ssi*rOj$5xpzJyi~OPzdHjCOpsV^Y5X z^u2&`aRm0WC?&NZzmsW)jq9scLGs!|#$W9nKky|bWUOb5|73~+d5~3-zhL^qg3)M+ zv-j%M$p0^=)}_87iYs~{&6+}q+i&-N4Zsh!vs_9)Q|r(}xgQ4))CUNzJP9mL$ZCL6 zGGJrIyItmJ#NRrWOjuxsKx?-@0;PiSyDr&n&)h zcWS&>`sORZ#Nyw$<`Nb*xn1{cbOF9Q9PV7eE~6=3WQSkA1G3^1-iC&pp^*V>$-lqxx#Bb?t3K7Xz5{v zvjImCZ2v45|4jz(0O@c$=_HC%K~PeeqBS{Q zchfP~f=aIjmD<%2Dy}ymjs21{O^N`v7rA~WM3?@6e-@tKm9hTn^ODWqq(S*Xq)Q)x zc=P8g|M<>7e+O893D%#t8+BB9%Ypzrh+Vm-Az5f8*I>??490JFGva{$kP@Pty|=k_ zZm6$f+mMT@=@}kRovHwSgeaiL-Kv1>!UUj4ZHQVSUlC+a$iepM}s@wxe5p(+V{Bd|v>J8dC-po6bM9a) z04Dp>R26Nb<)|wqPCNi-Z`W2{&&1zA!3pB|i+{0$A}ZoVj# z`Yg&nY8QJQbcAs8^;o~Jnxa8z%p6r8;FWkdO;JEf#qqH-#R2v17}X3w0G4?`MYW@= zZIgt)$m->-G|ze<`qUXEBXL-6yj=ODy~c7Q_uYh@0ap(#9PK_{Y@70)?ow0LrIm`R zME~F&2MOLkyiJ0b7+>2Q$~SaBZz;OyIsU^~`xaq!6&+gip9=>77hlK-Pue8VIo>@PX3G-6Ji=7Qn?b~VWIK4u z2nJFNC#wcH7)4aPVug0@9%ELwf%4e)qnDd4bBB|#3Iw14wx^#j;pej3i>*4Wi~Bvi|Jk{sIICz~26)ZKRP<7~@@aNth5w;E^;VdC*3u2`-A zVQ<{MxVKkry&X9m)@pnhR%zy>6Wd^nYk1Hm{`p!H8uoUtbn6z@6Oo5XRouSo<9 zq#49U4nKf&`>;lrx=Xs4E2Y^91aMk<5daO8b*!&Z9M7$F56!;IstYp-K6M*OnGv<< z(w<}2*}|Q(rr0N2A{_*tVSyXXKLU?`o!fWvE;rGY*++^h8sXaIoS=^9tO+t%JhK5$ z{(8y(ICuZbpXo5!v@1c|8_grK7XYR-exMvE-}aC#CvF1iiX6DOKXW8se&{0jqq}So z=-N1BaJc-UW~uw^~DXy2HLSA%W;x5KfpEtZ#XvX{aN3eU9ZuELF4Z+*`pV zgypK<+gQ`2I0xw*^a3Z}x#t3g-`NA)1#=&q*G;+kXJmB$@ftKxL9jdt{0dc7m%@Qo z1fIc=5ApF(ejqVe<|J)Mblp(A264Jf$#DS=P`p}=x&0`do$k6inUslTyy|@-mS94G zpFJB8ymbRde|15^-(dlG%J)a()zu28UVyoY)#1Tqlf`Z{oe~{*{Mu|gKnxV#<2|29 z!8n=@%bV6J!}6K|Y(^S&mIiM^&XDG&d9w06AO26(#7}zY51qnNM2s?(glj>7_7Jgw zA^UdKp{4<3d%3y;-n1a6VnOM{R!3F|Kt`<-JKPyYZVfcx1NA5t?Lnh!SE3SQtlI}! z02)2UpcAqAM?^pPERz>efURy#Z>f&3deSkCwlTUiC{PZqGAPi~80|zk*D4fu70;9c zY^z)VK@&g~42oSnf<5#$eG(J*b|fb|eT*a_D;B&&uY~rYm4YNf!51|I*ILrL|D&gGl#YfXSzf=|r< z^EBsD%cQ5Ey`wLhBW`j#T*xfc5De7CDp>`suB`x}Zkw!00;qk)JxP7%MSBM@fTOLi z07(IWftttHIdPLJbS`faVC6*_9VrqR;2)m)Eczd-`#;&oIo`tTC!tQ!z%jMyRDIO) zFf_o{XNu?@-jb0sRyC&z~foSS#=9D{@)C9?TjX*09E6-19 ztgNk8ARK1jQwe4oY)=V)XTJBuOCbSB1G^guX@8U^xu6o_oKEwr1`N!7g=d!e0RYw& zJCu~wFVjV3Ms2z#@gRay=I4I@z{U^#hmYIXJapUHm% z8agjCFvaR=LdzCp3Oyjaz3Nwv zOzT-L32LEeT}KlL5CQMGF068+AV`)bZ}w>i`RK6V#hdqJBOfg*O#DKzcvnvR8DOg` zmmqa6t>>t?GF z-|5`t6`;}$KH{7Jef+e-(WJN`YAJ*4_`rUeLp&gG`q7eA9tDv?RN2xOkzXdqz}yuZ zxKfpYDgp;bMYT5(hii^T;Np^x%-g8|l9eatfbr$G*w&@M zcFa!5w9I5zl8ZgD9AnoBYv*tnOON7xsM6tew8%r-GWiB123~F+x1{0It_{T4WK9FQ8 z+Q$r1zV=%e?YOpf#!H63Tejv*(>XI6&d=r1JyJq0l&I;UYJJ#iY|O;Pi`O=bEQhTn z_!tafTC_&S9*wcc)_m~v0~4REhG=+B{+Mfj+ji|NBr~Ol|8%dnGhDY0e=w?Nw%5bP zhRWL^&NwgTh4k=zJiVx_PCM7+FRj5p`k(eBnC z3JY9p{=`NbYV5idj+~8LvN0^k;a?F;YwNdUTiB>}8FIDyEXL!rl;V&>Aq=|OzCVu& zLbVmcC<2k)BDo!83SVTRrx9cdUe54|3WIzRNJ9(Rh^G$eg$J@kzv$%&e_@gMgC~BA zd*g1A1x6%%Gveacz7=U_8jYAHG{b|oK1V(U1!69;b;j1r3dNZUM3F`(ykB|n?Kx{n zZf^?}efGMMa9p!6EU;JI|7sk+&G&9SlZSPWv7bBJEu1HrWWe?}P#N>ywxnin&LoKm zPslh=-I|IJPis4qNbw8jap|Uu-(J|}`{|t*`!7c9s4jYKcO^tlI8WEwS&iT48$0ew zi=2b|VPb%Zl$gucZLNB(Gqlh3f9rkAgKN=-GaKx(jT#FTJ5G=ukDr4ZrQtZB#B9jt0-257nofV_1GCZ(|mohJe;{-l7#T)^nPuEb>#IhC%(%RKo>zxbOVs#0vXePcxZR<-@) zVs}Z0^OOBGp73s9YoENx8+@a|?6Ags4zZXIlY%-dx&I>R1OxcxR%w#4b02}x zx}nN`uXFR!T-TN$w^m%4uI=D+N{I?56VL9wuii{ovIc8b$%JMM)sVM~wIWDmj@dwY zUM7`btL_W|%g+T2qL($}nBce!sbP9Av+5nc51go){g%k?lsr!Geg_0~miIVf#zoyz zr3eayPiha4pWt~K$x;&i@FSaZ=Qtd%+)NGJ5(L50j7vWjcnhm}4Bolc6RPV*b)m;f zm2G9mg&FyrmF+IyuR0L`0P0f_D>2IyUCk~+uDUEQZ$*m7((1g)EhMIZZLYt{kynbK zimA1mZ$XaE;CJmucb()rIelF*Jz+WZ$MT@7j1qFfI8B%OHK^u*)vP+hbcJ5l$b&u=#H)I|pi%Ha)PNbt(rs00J z2>dGFq{+0$f;C&Gp1s_ls{s=&w@yXbBj-UYRoupeKTAa;nL_c)-d_d^M5Ma+QjfNA z$<~dW=b(Ia`_*!@Rr9T@DmQx0)}dcc$DOS+rQ$c0^esMMM&Ub@b-c_x9%J}tj%SeB zs(ehopef>seRw@eYM3-p_-*b1KMIdV?yc-vE^iji4~O_?Wp zF%nySY09VBdRS&(LPorp9JlCxF3`qy-`X=V2vMeY4nUXs<6Snk&I7(mtiyyOWC!2W2-W&L(d^;gO2NlHK$yr+x*oj zgq_D8;HO3Eiw@{y{QW)0!zq+>t8j_ubj$(~vu)dMjmPK)k(_6*pxktN6?I1+#8KEs zqsD&iZT<_y$Zc4~lsEL#4LI+Jr&%8<^o{BmT<f*u3_I(a3-G3 zoSOKR>qOoqYYsQ@KX=&I3!avkupIWW=*tWtPOK;|7+pPxSGtF{0*5>O#ewLU6Kw6!c`BMUp zUW5u9qzl^g>Oc<%aoG|0t{Nqdy)pOwob2O;rDIbJ-D&8T!1@#)1idD>Ufgecp`9U~ z644RnP1$w)eKPP%ijS3zr7KqIzy~7}w0{(d4cu;#V(a|CVR*&BuIp(H^I=b0IuD)W z%p*MF?Wi6UpL1HGB4zZb=fzCG@WQ?|W(p8~_)vbv5%Eq4+lr&7O;s0}!hIUEUyIf) z+0`W}!4`IRX>PjiY|(1m7_bBdl%)jB%hY3H(}X}UWT>_JjqP(FypjCA`oY|Ho3m6L z-)~g-7o|8_p=^!|tLJyUAAzcn9oy@nA(DHzg^FWa%-1d2;Yh!M5Go7_Qw@?#iEqNM zvF*UOu_7ff<6sGTY~gvhvr=1$7K_dgSVLoVkB2pD>nyh4Cf&d~75I10Ml1|1y`bgH z>GU$zHz~C9nXYB*WzL>Fh9C9Q<&-q1^ML9OI*`cZZ_!zWM|3dsnRA`@gMg~OVcK3P zv!$T+2huW_Mfw(v8v+G*#*5>wjike?-c*{z=(1KOP?85n`If8x>=-TQOS;-a>*| zDLu`uEr;Wci0odez2tJ8Q=R-X*vG6>sHksahWIz2r2Lf(d`pmu*j z>8vjE(KDHR(`-(PmHR`l+GShRIku3p@GqRi`!f{$;XKeM zjZ5`aK~^QBLxC~@-8yWYHdUz(LE1furr>dNi+JNjJ!FWB8lE&+Z1D2CTOniOIoY0GD);TN9 z2_;L}LB1%xY+z~XA4%3OMP+ z1g)iCMKI+A;#9FZ^*cfMz-F|7iO+9tnn_jV0T%A-V>Mn%akMvyqhCE91i>Cp@Yobj zM;No%%lzeN6EGT1 z*JJVFLY)4qMc;aLj=QI^kM!0mzZ@h?Z10NH6;)eJ`osYqt|7ir1ZIMttoJJv4?OU2 zNE@xQ*J}=J4+#;1?eu-}?JrJ|!3`84`OiulOz$3yQZArlbMp25>Xz|!RWneO-+Y|M#*fg?og@rt2{@7ZLUC;l(!*PQB>_M ze>w9x1 zRQHcsm;)cL_zp%Zpisw^DeO;{0FzK>AAcGqwlhJuPRu_N9Rj#lezl|M(tva)Zq5sK z(XLg$4xAdklI~8IVHsRG~^~7c`}T`zdD_51|Ef* zW^X6v*Vg(?188LV3Tk9nJ^@V_kct_Q(^GoPd8IOn_R&ux-eW^}YJvVJA| zP#zvselTJA;~14RBv{(y&@oAiZ$AOs1|F4FMV19dePZ|!98Ux+VhxG2Gw9k}-)=jHwV zVFInxnCMu-hMKRm2p2* zYffji2~RfZug+Wi+M6Pwho6C+ewn@Z!v!8k>=4V}fa%9Ktb-=Bou&#OXVsp3bykmH z1P(YrODQD%kI}>J-!c_-zX%LH8HsVyC0Hid9kY>_Dhhj1 zby+5}5%pE|en3n~m3wUpmUrL}s*0L{y71{DOGE8rTA4ykp3+w9H^ z?wJ06FcGg3aaz zwY&M|;TG*u-U}+Tq=xoEn;d=_H_u&D9xT$POSQk5l@dX=ja$FMIeoGg}aj6P$ zU5lKD3E`RFTKhxY)69*a-anl;!0 z=^AYEGs@Vl2-;AdlfCM{P_^nR2CFC_qhz!+dEA!9-wr3!XCoO+gxaIS5SV!VrgR|F z(segD@X3{x{%U4i+o((pw#OP#RGI~hfU3LuRp}Rop@sM#1c@E@RMU9R zY#(U?xd8pT!FGj_L%uR1@?1_xBRi`n+@qH}nx#9>32=W!3Pb0<)pl#C&78)3MT^hpW17G zf`hBN=0fTx3Y{aJ;)%(5jLql2s+P+Bx;_5=u*4?@(}s4v9eMK$&+%`2tX~WKGSAG^ z`CtCa{~hzsa`4}E{&_Y2D;qzr#(!nw=hgVHZ2Y_$|CNoOSL463@$+i@|CEh}#i|9z zmj9EblzS3V0A6=+cWO@i_9|x*j!p^b^jS`Om5o0qW4xs-?y>y9v$DrwG~eXeXpJ{2 zf3F7P1frMR4)|~nhQp$uPX<3%7jC6U`Db)|vSXW2Gs;wd+_YPgs|P)}Gn{4ZMzKVmIZ<}_m6gOABdGpa;Z+995wTvZh|~UHkXrlSUd>!qcIP8fBJL=m81x%E9)^V zVu%Yl;GLL{seY<&G8u#Ft~RwkQZJZ9O6LUIXeaP#o7Yjen1a`jaHqS|do2n*C!|3% zy6RBw67p~n%=lnnkB0&96v=$?y_7$Rb2M~NP;-9R4I|?=^LO}D2c)5}0d=^6(pATL zQx%jQ>S3q7%`mIpQ@4fL=0M&-N9bhF&=6rCE7o^kWU%SeUXxC=>ZW&nnek(-Eh9N7 za#u5k=p;3NP3HA_+(r%V(zrvN!BCE-oQdag-)R}qG8JgC9$I_~-Hs3)oRX>kFljxV zL7Uq95#@9rn$wFn+iN%->zVI?S#76e|Qp4I05&oq~s1KVILr9wN&olJF4WwgC$ItQv-cnSf0Y8NldGs ziVl`S#ouu-rnXsol8&PNq9xBIE>P<4a24cioEv^78oib%y^t5<$#7Yp+uz=Eq=c(b z9bPv@^xU_sJPY6Z5;-=%F^CCYnC3nhGQJInj&&ftHI4;9nl@f6wmJc;ohY3kc}>Fa<2un>ys%sz@mP^ z)1W*F{2sksMZEM$Hc3x1%EwC0qD&)yXy{dUG4m`1vo%zzd#S6^((d8Ho19mupR=_M z2XwF7MHi>F<{XSmJk^W)J$vn-2MS zfCgzYl2f1w6<+eJMuvNi9`)|$W-jel?goCtl-gv1I>EqB#-76KOJ()B^~B@{f`yV^ zfpI1WZh`JSYlck? zC)U%Fup@#)yHy62nhmz+Pfw_X5|KakCn@KsY793Y%heXURhUGkS&bag!B(ymI^ZyS zroe83y#w~X(K4P}?Qass(51>aATvJ}tUw3nMjW$2?=|YgIJ51mRV(vn#eE}iNg;l3 z{L}001yiiW=8@n3#5rs5qh;9`Skha5(!T+VHjcYJRbI?y+9Z1KO`eP3Ub@Jfpn;1X zWnSK4`NG!q7Yk~sw6L!HG7 zmbp3c0z)1`@s8XF^P)4Iyep>iqr?xk7pdX zcE<8vw04{>cgkXzkbkqfdoc4|rT)#)Ko~#Y!r+q1&TZQD72(|iSR=|?i#M#tohI{I7uLKj$Xo;a6;o(I2?*(JjIBGYA_Na@ zC0+Iw)%$qvyVRU~^sbqDK4Q9p*?A(6sk38V23s=@LBBxT#zyBLiZ6ZtYNNk4>Z|`D z^LKG@fl;Vool4Or-MhcZ>jR41T?o-r(bfpY8~_uOW1n?TmV-bHmswak9bxr{Y3I~S zq3(UQQB+ylSDtfzxp?wq^i-~5AJ5bF!LyWBnHhe!zmpMFsDN7%-h1|9E?2Dk`7^^a z5(^nG>x2CLVC#>A&3QAIwcV*xLSC5$d^^{jZh(NIQ&cjj5&*D#`O)MLS^X<2=5j;w z&Fm9BB#!i&>ED2J#%eOell9;MwYVrq=cKf0^1 zZ-pt6$)hWUe`7<;m4r_$>O zklta-k2`V;ahE2_x#GMrf+crW4uqBrVztYA|F-<4p#*+>H1M2te`ac-ZBqg1>KhXs zvPrtehVe8Gph@(9rh=gLrOBL+z=hOf5z)^Uaw~Afow3>Uqk7=__s3I_Y{TxJRgoWa zeWrJm8Qm-rw@shqtAx;>k6S^{zZR3(B|DogjcK1$oVKXpfcj2JFnFeJsO?2zRcb3{ zm}@w$o1mO3z|mw3ev1j8RieF&1GWbCzt)uz)LQZUa{sH`5p?02cYn=xc1|qEcXI|i zF@TQIR6kY$F0M(};h&X4y^0)#c@;nVTJ}dc0C;wKnlk97-lHGEqg0brhRf!xp$i$( zW;@PTFTbw7RVXW)IO|1HnWab4Nfubb7lPrd+8Owa;YiMlPkH|x>xo|X za)+yzSsny@VPaX9P?j@gN76baTa`}~JF8>9p2ity-rRQUHp`djCEHF0wL>6ir-)Smr2fB^6PLMCM^>Ukoaj4(s*qt=db&Ic_u-v{Sik%_OOgk(YCZswB z7rb=_Io!%QYCVO{s7B?shV-;HZMBjNm!uRf29!!dCym2Z`(&DZ>kLgg0>h=>h__xA zvOJ$eKd6zby*rLD*>x=`Y_DIG>>k_XL7FrDY<#%{QXu%^qGiiaHjZp3+6H&2d8m9y=j&IS zxTBuSjZ^^daf4qo)AS}~XgsVaSDJvAA5WAUoa;=WEOpQt!1O6j-eM-|(LA6&V42fz z>JP$)ODt-S-Ry}eih5l|vudi*vBVnfAO8m)j>I)=*RR*YBpu)W1|^;6^$`sxAO!*L zmXwt5KSk7ol$Bhu-&$y^- z(`~bDDI8v2^YK-w51Lm*EUhLkS(vln%zM>6qBT4QGCg*`?|*Q+<;FYZ=nLD(9G=>6 z>w9nZ=r!6`XtXBJc__JE#$seWnp1NGUaLRzoYl#?%J}za+E1}YbEh*d&ukA5I7Jd7 z5)n0^999l^xwOb90b!buF`cHqtkbtWm^#I8pOs@BVX6=QFuy*v)=5XQr{%LxioKB& zkdCLp((9qdyavdcaU(AdEpuNiS>p_Z`I_(XII7xjKi-#wjd2tez&$|Av)ZE;g{nzs(<^3XI6>rl>eQLnO6DKN zTR0PLS&E%Jxdu9%Q?z}$*_phbB>9O~63YHvb^<(Z{fsCqa}@%)EE)nK3dpbDsLmn= znRs%o8lUQ_DL3gv0b`w;Z45bGGvbEJ!)=qCc^W4Ys06R9P0|Z=I3-<4lyKDcCyL}o zs_YqRZD?y;MyEdSLd;f0Wm<~(o5V2*4sp(1;H0#$?Y*es&abw=#rOZny6(87*S4SI zXr3%rITFRxG}AQPm}0}3I~$ra&CC=P%@PAgHpE5dPLi^+Wg6~13Po{{X`-vuC*OpdlFTK?b+;m>DwS0GltW(C0buBtDh;1nr zX@C1^UjV3DJX63Zd%Q|(&hoh|`WSL%HQY8_$JYG^Sz?rpKlqnv`~Y_cWx9RR2M7g zo1cPoGbl``Z3DC30-1DI;lpkctNpbq{(cg(y$RPi6O$8FH?eREWi98Ltoy2zlhN3X zUmKILo`1sKPD*hR%c+2n-yoS6W_p>Wo3z#LdKY2Ddns5wbd~x0n$Ol3VaO9BEN3}L zO)w9BR2p-HP-&ZBD@eQ-%!u;YRgJoeuZQV?{B=^*!xz7qXEfj0-F8Km{d~Fk__7L1 z?kht8{@p0jyv6oE_sw^KS3^2SCm&b;tA_Yy@69>`i?t1tM!g6J4@6^m{l!!bZ(jNK zVh;!J+lq{8H7|6%dxLXBdL-|nD*xn|Js3LG(5n<`r7iG#FtK1@FQi@Rk4KH^2?@5b zFF&t@eRT3JdmlQOQt<4RBiz?_wADj9lTkAs!~DJ@3on~MGdz?#gD<=&+ar)iPn%>0 z=w_T$`sL)~<^`}r{9V`nuQFmh`A2a+tpV*0R%wt{0EIem~RpyQPd1u(f-2x_c{?JSAnc z=s2fm+vGnTZ(Hpi#?*bUrxDuW9kx(mRw3gn`C2yk==v}DK%ei^k+hTxD;XCa3i2>_hc+*x}qCky+Ya8Jsp)?es8I&s+Das1ttu@sx< zzmLRr%FKU#jB>w`QEOCi@5xtH1{iuTQW0{+x9QJtZC%J&9~)0|wZWv?9aYBmRiiK0 zq_c-|tQ$GO zLx5JjlzAT0p2x|~U9M$UbYfZJ(M$}!(WMyACZk+`R2yC_Q@2fa0 z>w6N#_dLQkew*HUx=tPWskNIQ6Nh=Vat#c6XXAGg4Y_{8$@!eTO=V&;FK=R)d1=4y zD)2u~^`C>uQ|OL<3%sxZ+aH>2Yu#vCeblkN_ce%Pr4?xJ>!iTEk*eCE9IewFjwSF7 zLqarAz(}ad-4mxzYZh7O`|>lI*lwg|SV_aV^Up&WX*0=t(YME8~cn zC~+B8((02TD!8`O@I-mZYIx&_(BF?bbf2dvl{{>^uJCscX~XY{VN_xg`1mL7%US(s zWXO}FMtzckejVQTLrk%1pR<@oi*Ugyxz|cchoqvN?~K`dPG~hG?XevE2#|f@4N}sx z0}?%$0s^p*?>Rr76{Qb`WvqJQL3Ra2$mI}IF`BTnVYJYyYn3@s9oLD*AR8U<_wpzw`<1wtaEtBYmUb1N(&hiE@XmD$LHcj40^O2;9HtpZ|;) zm3vp7(;5Pn``Wk8-%wRkWjwl|b_|VA+x3!)u{OQG+R0qplySDjHzv(y`%9)u*_)t* zaa*M(7G7}Gj*OOft-8~wtvSUg4Rzyu!(!g}dQIb$Bt869xJ36=>8K8yL&#?;mtCius9umQtGI`QihtAW7 z+J}6Gk>YAQXiLuH&F}V+U*hq~+R`9gkl!P-kD&6y5YUK-;)b|Us_b~DP`BG@ttV@Y z+uKoOgO1%iRo~sOVvI{48@0Kqwdzu*mQ8_1c&8v4Tnim9jq-VVH|tRLkvK^>;q`zQ z-NGY!{(P1UE@^D28u}#@VJA@CwTi*tZ;ZfT8t;&yeVJt!)=U>PG7r*!W40ZV@3eyr zY$-x=2X`N+WOA|y5+D1f-Hs9KyidDsJ%hj?0p zA=mGP-a@`ttn{CO2}uOz`-y8Yx?&flzV%Y_e>}Uc2XKI8UZci=1e(A$G6> zy!X!)k)}cF-bSl9`14{09dri5f{#|lYNf>JulAM<)R<-Xm>RCy^*gZ@vBD!bCvZj} zw*G>2-2c{xf5V#_mLFXFU0=K3s9BQrXO{9rwC(mEd>wqXwoyZ{rs^gMoT%R}9ty%T6HY^t()GW~43k z_jUvIBk}7Dx^vF+?&o*&abkY+#UZkjjOw%6|4KGH2K*8U<4+agWDG64idG;F@z-#hGtnGQvNY0X(NZeUFU zXSRyD>qY(^TUq*FZTf>fWGrmya3^BjIQ zuQidI7iph*FHbtZR_4^=F3luA?}${mm1v|i#52&kpu0Ld^AL5p%L)0x6Z-pEl&wUh zLgTTWpY?_18`KFLCA0xLZ&sVp?mPJ!RpbwkP369OB|1SZ{9Vl59L|M8uQeE#!o4>U zzY>8VrrPdh8h6eqg0ojsxxYHvZK;n>MS7bucB~fr7mhbnG8zwG@peB@hH|Z0px%^0 z18SnhcMDe($*^>n156f~T=w3q;%W!j7Td2c(qA?SBa|e|b-i84^R?{D4_af{95Ivm zwnJaZ?I*i|#3_@`9*LPhYF236_#9g)AYQs!-?&OERY9AZ0!X$5|{pEs{ER6tgt(?&W(B48Es z878uA7JRDwk}39ZU!{`wuA(o*#l1_N$gO*pvJ={?TO9=7h}2iUF@EGG+--Y*04XGN z%qaH^7@BaLonm72BiB7sB_lBOibreJiQz*W8UYh1SS|A6?*7uM5m#+W)gSX3Mhy?P zYuOP!58n5PMVaDCi7ija2VP_C4A!KRV$p(+$=kcIng~mS&tJ02$$w(}5n&g^eTV z03hYk4GKttVC(9=as^W}SfQD{FdI4X^sYzC#htUKvz~WflWVb(CxEznG7*LCcQ_zM zf6Z?&!a#vQLYZC)6r*sE@M};TuK@=b)S!`p;o$3MSxQM4cB2yx7f>>`>eA8?am* z7Hq*_r2IX)^PL}1)3)8!=FaSXyAzVjbXqo9C-eiq_sPDZV_wE)rW!f8qO*q|-Jyf# zicP||GK`xE&%ZiwaUapp<2pI*@C>dm8%Wl{AqNu)#rnNqOS515p3Qw_b=_lufdN)v zC^ab~@CxuSJhcL?%d6%81~}&_gk;{0h!hyC$@FKZvR0jUQgCi@o-A%}slp80t1;=$ zjBZgA+})H!_SB+Y{dgjq>=(-SZ;Pd9x9A1^LyeRLx1c!d)@pnzMA8%sAnz)g=t zj>X=fRCATP5zEZ6Ldd$w3^b5~>H30tDMd);CJ7VZ3IoMq`BAqh3>QcEP6r!q7SU{a z1xwDjd5YkGl|6xD*myC#ODe7^asg!y4BtU4GRXx#mQK05;)C?mc)T?tlc_rW&5Llw zqriA_qs;cAW%QsQZaMI!2$lC=tr^CMw^lTTgt?ze_!D?e%LF? zcB>~wq!=^X7s?+T_hca`_oa~nz>LWVV4cV@{xL*CVSbay1xsILVU-+@%h-j2o@EOe z2R<5vGZR(MuOATpLDP9~P4Uia;~PaGS}Mm3L;VEjh7C@ByDSu7>06I6XoC92=RdVI z%`x&jFmZDu&~tlOb*^(+dtYd0h{<9%@(d(nbRKS=KJJ!Fu$}jBfI(B&464#jACXE~ z$%i#)c9Jg(MWy<`7yWFt9k`mu)l8QV+CwxXeb7oDuf{`Yd3lz=ZC*l8d7>t@dpo5`)>9tm4Q}V|H>{W6|)Qo!Zdq=d-)=4D#+bf3_ z25^d)2cc=Tf%pJ7$jAE1b}s5rHjGwXE5V#5=Uk~wG?6l&MsI|Ah(d-)=#X<@>V>Sf z7<;QTsV%c9Oxh3flX4YNc|_MKvdUZ{*k2ne6^4%qfDho8P~&SF zkIRkA-s?y?+Oyxp0W{P%AHIv@xK#&YeCoFK%erw|YG8HHPb=4+LLU~!5H!;Y3A2-h z?Pw|Y?|!ptPGa#Wa64*LszLu_qxP>dE@key{edCMMB0W5EOq&@z_vZOxbq&An?cW9 zNdQPeky_nxOezpPx8Ll}k0WJyr1#Zpv{ksH{Ik9v)iLD`?v98IR3tVsbu--#EC#VF zo+Wi;{~w8|Q`0rv?p1@BZ*rz4ROy|uKCras*!moqN&Y5 z!~CT5`T^@xGVn6ROh>EaVNd*=8ze>KN>LNdTI(tIX{4MM`eF|G4p5d8?!>WMMUr7y zz3$Gtl(mIK-~~~q5)`-H;A&PjusgV^6B{+^r!=;%5DPT0ghjH;1y6lR1huHc~ccr`NaZD!x*1xvUjj{?@d# ztYLnn`BNHZH|yNJoBNic{kZ;OMpU&HZ8lWyfAE;8Hai+g}Y` zZiGw@m7bYo6X7!3?jHAkg>gzkQjQEXCo!Xt=@z1qcR+o4CNN9Ebb>Dc?+MkbJ z>_wg4OUL^N!i2-Lr+~!Xga{tX@JRf+(#bP(^1q&0M&6~r=)OnRBMt%rG^~JmiG09sObmU`Y-j6f zWC`8EIeS5FuQKReh7>1&8(pUP;yF_)N+GbV?wkHeae6zoXkMf2eOjW@5iaM@Tho)` zQL44_Hze@E@(*qvSGkV}kON#%#r-<>F0>O6CP?%X8I1NkaqB~Z)5k%*Mw z$}gAq&}rtg)$&H6nA@2KxL8*u)AS|d$5cHu+3U1V$i?g*-K9fczS>?LB)yVdG`Y<2I z3XtrkNU$F)SobT4;E$Nlbafp&IrK8McnglzO}g9fIQtF-YbQ@lw9LUpG*Pr=^;WcBzIw**=eltU-! z>7*AF?`RTJ{dG3wa&ieu+A{wZI%HH^0be%RmCe6B(EJQwBhCe&4 z_ztoz9vL^OyvUL#S4W!@L~~1RT~kA!GT(PbjuH_2`>I4V2m-Zb;W@K5$R;;O!Ty}M zv4KkygwBX`u|;+CJz`fPQ_s)Gez*+I+RFg31SH=fLV zHpu|fze)G7ua|g9otiof3rr7Q#A;G5=X_3+pcKv);ZK~Ik-Q`uYq5h8Y10hn}XUObzJ)DtWpabLHY$tLI zpgG@Ct$Qq`-dBzw5Nm(@tj~|zrnp{HDS`wy(;u_l#!LdP)BzEr^)5qGV;{PM+0P6o zZEIdF%%gw3HIChU4PXQ%Ut90;&tPV;ZjGkKK_&|3q!?lhv+e7P*rYdfe?znkPGNnb zBT$EM(F5zAQ{X?*KN*4yIND(yah%c)v#guXWY;F|q}KPx4s|cgnoLIKW4;1(5EE8K zo}1z6Z-3>&dr;AykkF!~HwTRA7l*3m3p#~&i<>nV)PFli^3qy=tFMNz^=1Ac$b z;cMEse@bTD^Rv{=o)%F{rTCV;#8o+2r9GpE1fTn79P;~ zxU40`o8JZWp&Y5&Yugdyc?-3o$(ds54sElgN>{)qPx}UYeuLZ(6Z~*z$jZe!G#pyM z%5RmR@DQriT28|LmL3nazNJqcWLT5eBsPl70D%%c$x0Cv(WWe0vx0fnQG=0Oy5$oz zyPn^Vhg!8yZ~7O?`hNbr^LW!mkx5TbyB{_-HauvU!^jdABIoRUq| z5HbM=Mtp;aO?A}S-J3RDI5pfOW33E}&C(D%{;d>o&YVZHI!=3{$|DsA)7l0j;`1Q) zW247fr313C*)H0J62pPtQp=U2NxLW593bUei~bM2%{e!nQH$7qY(UDG)&E~i(>f}x z)N$JMQSBa3dD0uneK(WA)m1ss4yjS%{_2o6V9?P}Ko9Fx(ze7oltoEUh6vdP53`Zz z{t0(uA*A19jB9%I)(&-}%q<(u%dE}dXq2$Aqs`b@AMWh9yR=d6@qv-`x#-q#AcTLe-LQctX#7&mrf;!1&Fr&Jn1fvS} z5Vun`cz$l4nS_1n1i-pjnM#&n@P!NYeFo`pzem^FH*sRdMkmpEDw(Y1XLPt}5|mSJ zLv}k;pu*bl6rwXCmSZ~}BiOMww&AL4fe;yu#BHm2<27mo@dN()-Iki|%g0zx^ov1uXKa)KEJR=$^{$FO;R+WU54%pP!VXSee0T!*tVh zi^(VC;Ai#^*4j602Bm6-byi-%domlBIlutU-vrs;ELqIKsofpUzCO>7W?qxaHkkB- z0*{c^lW$*@atA(_fTi}J)m~d6>yL0IxG$xLoWw22F zQ;H&0gQ6I0f?CJm`n#o=lBf4Vz>CwPhHPyh9J|3sPQgq16|~{t zyW5FWkkiCrimUaaEA{HrT6d4+d*@JpSWd@0EkB7GwrnJ?$ij}6lD{f1W>62uc{Zw) zzEugE54vlAP`n$iwLrZJCaUZV-Es1ToBND0#(s2oDk23n*7V3+dJ|w zBg+rW6*))j;^xQbC_BnTQJa4z1vsnsT}gjj%YchB^QaI0n7=ozNSPyK+YhRw2hN>o znC-4#q|!{$c=$oCVa_L>^e%;qnCoDjZ`59kx`GUnXDASC@FxDJ*gcmy6*t2}$<+X+ zse07&8kfgga%X`FVUWAaXqocR&@1>&YGbl#xAiEV(~Bd3ugh7>RlQ!FMewkl2>kfU$@`**64xVZz|31J)lgJyjmeD_aERAPj#dATBHArW-x1fASlHOjh2I77!Rz;dgSNTbQZbwD8p%La#ql zjQoaqA>TIJ%u{}?@4E?Z?k#4?ocRPH$R>1R>V*Q_bckOctkDZ98~FX|(gY8Q6kB@N zZEXPvvVvb!bK5~)YXYL8-vTX?hIy*=PJN4%Z&|$%EyEGR1t@z5Eb%C+i zE2km{+Q`bA`|kwMZ#-P%^2}9?9~0%D)?G6q_QDyA5iI0UOEq6kKqd<`&m5w<0)}N0wnF1AR+cTt-l#g2BURcAQ}XO z@@3y491)*BrsLl) zzix&hB(DUUOoY>|;&{wX-a$}lvCbaR%kOrNv=qxDc+cD{D$ZofdS7b;Kd@|Wt0CoC zGzWp%fx8)DxyY>Cg@z8*&H`jB45-mzFf0tkotu-uT^;(GZA$EtRwjMVsJ@*RhsU+Y zfM!oeyB?qdpgyfhx`CWdoIGpPRBp9|m=u}mZ=U|roo;3e!C22b^<7I(92Q$XEb#1g z=s>B5qnd6h8f8cTK|a4j*C8+r9UnR=>zxY4V)y&H-ow}j-3JK)Cwpg8VeHI#8>a| ze0N@vyY8{GD(uyN(}}!BH^Fk>@3;XU&JQ22%ecQJRO`jbRMANLV?G~Yym3&KoX(2d z9DWr1=i=Sudxd@%GT0MsZj*%EB?q+|5+WX@)*IJ~ZH!BAPsgMp`*5d|3GZa0Xq#CF zVOm37>lnf+fQ${8*PC?eh*Fg5o}p@{zd44^yOH%(cg|`{(?`sDE5#qYNKaXS$Z9j(vK(=-*1BmPMMEQd$|2-=ICAb3$_%DhjUK$LNbr@Dnld@H z<{pAwKX>P)BmTrNuSyMHNcYJCxeXo7CeOH78Rt9#<)2a1QCdnWu=HZA+QfD2yz#CZ z=M)B|Ga_`e-G|0HzA*6pBMQov%y=xNVhZbJ~)SSK32;f%jpFyytrJHiyT`F*IG zYA{zo+LzM~Iy;$#&IoD8DIyOt2X-0UG)f=kSSEoxN1Cu1tlAPqE!}c%*~*uKMf=5P zDZDoo8`0j}yJo}yvJ9$G%S|Lf+mZP4>0wkvH-ViO^z7Tw4nz@232m(2iES1kAzF+$ zk8ItG%l?^1eDCm^Z%nq&v;X!K^Qg9We?T^I=o8)7#(35vK7KyW4JkpT&u#5dh_<@M z7lB?owMv2E@e6g%j4*=(2b{}nz)F(c(ySz@wk#Xa$#`g~YiV#vL#OrqZtxzr9ECHl z0S8m!B`7mf`UGpatS$io!Sqofp)Uy$ohtm7B#zNHQk5=cooh{NbrE&592e;`Iy!o` zs6s{lYZ-P6(vKQdBkt2uWs1J%>$xSvfTVlXK8M{b)VEk0ikt|!2U%4 zcMxwGgJ1Fc7W|&(WZTSN*_@l?n8&tMZRC2#1c|$O)AkR1%AVy|9rzsyfz3-yk+9b| z!)g>Z;6}^IZ(W>jmWPq(>@;$&ezwM$#7|%|^^Kh*5;9kDnYw zRcY~NjJaCU{22KH5K%8lxD4NUd@UrG>zq@y3OX0%AXRSxf&5<)lBa*PZV7Ja-orbo zpLl5O?YV0NwFBffx})6$ShmwAx9DAOc~#pI9}?*qSZKsF`~iO!0CxN$^Ag2Jx8O== zuzO$OU)bR3DxW$EjyL$dWhvtHLvJ39ZI8_HA3gXOty4a5!|(doO`i$gMq^Hfaa7iT zA)hw4ZY+23=;XJ8X&zqtMjr z8b1fh@dUF>qt#>nkF<3BYqYvijdimaq!ss@;%Ja z%XPrMZOh6}v$zsXCcZXsgrE;y()}+_z|0Zn9i@?o?qBtBDCYh}&Gat8I;*MX)1~OT z8&veJk(70^tgSn6N2ic_*K;`{t>Nml1I)p)DQKU@w11+E>sBV!7+L`Q;8k0!S$4$@ z8~q0=xXos3&**t|&!lx4(|8VF~M0AyS|kPwR8O*y^=)mVzTc|DHF=UZLY<6fNKeMFumcgz5sK}B zN|bO@OQ2!27jQ|nIt`cgi^5H95jBjZ1N)>dU^L6L6jxXPMlG!x&ow>JhtnUq>IGLxUr@~Qb1vO#yv6jt zDbKj5_=|>_9#;9*gG;gmPD4b{06*lRb%GOp>POW3Go6A6m192q|n z+RsYxEZf4E%rJ4q_d?vU!&c1<(`g;BBR(qZHKLaYvm`Err3g45FyzD-mQs|LAnUY_ zdMKJx__emMcU4nes*95q)kC4Lc9>!Dg^`AfX$QEaT}i}^W88TElal0KC^Qu*MKFWV z!T24zCy^lqN@p7{9oBi_;ARELfF!vEdSF!NAZ6B@+I~mtAovRrT94NJPNgj8R_l#8 zq#t5b_ZFpMe8|fPv>ri*t1Ah;5offzz%hQM)5+kUR2S*)41NQQM84k#D>KGM_kuw~nKO~+p?0k7qFVeir1Wd1wa11{%Q{UkxK%wohZJJu#bjm10 zWfdm~a+!RDyMGw-8kEzDaW8dlp33b?#Menm9cP*3+R91Dx33$SZ@OHTQ{BY9q!yEAp^YwPqk8K0foPa#Hma?l*a9rr7;zfM zOThRJNjC-)D}vlhfeV3?cvjmc^i1SLib+IIBnIa-krTyM5-Fp6*I&!BMAq4YFSvWp zt4}bDuLD;UWLpi_B?dE0mHThpA2GFxPhaQR+kp&d8{W8ZSbNQYPLn?d50g$jO{5zi``Gkj-Fc%VF(xO~5bUg)zNDn+=r!pdrYap8a@RZ+mxA?oKwx6gGA z(qx4EP$0IQ)t6R-6n1(S9Ae#!sP1rRX}saAc46#6cX0FPR(LM?jTcnFYA_=&;l$*8*~XCFkSDl_giFg;;=f<+YAHTRAbmIKoDc(xZ!njQUuP zj_FNdqvjbm1A>EVqe2*E&_sWtNCe53la83ou2q*~ZGNfGghV zGCa2u&;M(pUyW0g^3zgE=gMk!8tD++_#RbeTbOsygy}Hv4t;l{waCI>c@f6aK^}OE zSAgpLOUX#%k6WW`4H7n@so1~VGcc!&YPDp111$Y(efheXy-hpFxOB0igj@-yQw#jj z)KgCNXHJcFNY55LOJ|v~F1nX(Jv`8Ncm7x=_0T{m4l_p9AFA3ZKdriU=Jm$>F>c^m zz@o}@UWaO*$L7&K%Q-p9fc2dpnGw`o{;emLE05hSC}KZxx(~zWpF%n278Ak_+xwU< zyPjlOy`N1s{bl&z|HeSKtUbHxGhw6n)w9ikw>%+RafPzBcv&~>E-9?EEThgaJ$Ua< zORs?qe$lTfX|jEH(!FnYy9>Qb(AgZ#WJgYK>h8puFCf3l`szCPc{J-Qu4mKp1URtq z@~e{M@C;*Zc9f46+S;@3vcHdi=6YO-6q<9`d^#9r)@_2nQ9F|s+&!F5@O1Q6BrPL@ zaUpeRxQ~?U;asB09rsFlb`8qyE~Vaq9yM;{*65pgYa*G6KWmZwMrHJp z+;tj$o^e zVB#|&p^c~Er9h^^eo^tMshP9dGgqkX@WS5Pp78=``T^1KSN?*jdll)ex3KXVQEnE* zwql-H2_Rhg@r3bnfzzOqPXw9)_W(f6THng|}i?rc(Ou9IdPE9)N@?a4;IG32ppyB5C>9hcB(YwOPXF ziqJ)qXw-@6v2?_6E=<6T02lm8&Eti?ufbJ$NBJ4EKwydm@7YZBA=nAjtVmGP-x|-< z&$(A?&Pk&KS}x|;%4M)4Mc+rN3Z46vJKX#pOWMIXn6kb5PUZaL)chyy^)H~R;ISdG z9?5iv%t*ZDW=OZgYy6+nLg%Md@Ow}=alLCr_p4!h*`xr8NhRP;(kJmaw=~>?(hVrE zA}FlBUCJ&s((|rC8ifWE@jC&7T`Le!I%`YkA8gFmpB;HIgO z(I*c$JLNeZM>he^1$q%*$Dv7It3e`cVrFi|eg>~I(Q>|_sOP);yIp{oJXjSbgl#E9 zNMx+O3eA#ZT)blTN_0+!a-qd@W-4+jTSuFQY-O~nqUt+jvWKkj`_ zAA0QKnYVZ1OlEgsuOOV=C0xju3VCL$rGT5IEGLA^w4 z2jx<2VM`~LB*;fdKXQy~sBW4}Lv2NU<0W~BPM3OzMs#0H-2%V~rU={m9Gd>gsJ-XB z2PDjY+MC#kjDq{EoX8dXq-uNla;l+zEtcUb zVxlyki+B1^fP&E*!voG-tX6lPyx}qfEK^sIRXQmhP=2Cqqxv8pWCA$$ z#EomIyyx(lYPX>k<4~(fo}0pmz@VbGACqgdC_ipvQuo=UcTcVp_I-PMMFuKYBAYN$ zTHr%q81EY7ZM@a!G%rP`MH)NRXQC<9&mu7$Q?@HXR3WEMO_FSSWb-w1F)Lb|4CLH~ zXV+>Ga6S@Zu-bHPwS-&d89Fl2>{IUBfN(X5defJ6DDmgYm3|U6mm59TCA0UJbu^kR zw9(!4Gx)!lnSWU&cxjy|)0WRQ7<^hPIJQ4qhP;C>sQn2d92zOqalp%{C#8s=LlMr5 zedxQ`oTrbE?VUKZoV3cLW#>&U_++}7E2?xHE>lOUYD=};w8LcMP&W9g z%iq=KB+&4&fU&es0iwyKpZ{c(Zom{Xlk(u{E|$n>{JRsY`41YQlxn)mZgdN2C%+vg zjXOnD2*f(DAt@$=-j+sfb+s4he432JtO^l?`ksV*-mOLbgiK#92aH;J%|=E$?o>X^ zqhGbJ3>%B}y3X?uquL1^0*J+nSR1W@FOTjCIBAtfvV`cU&~+5;xnKDz%@*}u6e2DYa-qlGek^oBBT5;Y z?Od;F_*UYs&u1hhI%22j0O(b_X}cp^ z?NQcSEs!#qD56xVMUS>3swuollJ%%IH~3AdL&X{VXputYC+4Q|s3XOX;x@D@yM8p1 zbP2B$nIDp}e5(O#LWp@cS#kkV8lJ>-U;NaY=_Z+YL{S`+SerxL3@MZ{3|Hv(0cgVZ zr}0Gvg1c+Y^YBs7l7Z_N6*V)$2I_HIKp=QcZpn$8SJsBQ%gXI0W=h=7mSE^WQL^rZ zgGULBV=7Kp@Bb*)TT!fiLjD% zy8xdgXu3ar*?>JgoP3Zu6PZ_UMWk4pXOXvm5g^hZ9k*lyDy|LS)xlqFA$DwB$HpQ5 zEPOI2BndIBvxPI?Ky+gF_s22V$iIiIc>WZr@Ut21?jBG7@~*VPmBjU;7%7~f0tfL| zVamR3K$#IinY~u}f1G)DMO5_3UBSh@0N7VW9$Eeem5iu;5}5PrhqT}lQxdA0WY(oB zA^PXZ)6R~{lJwG!gakxSudcB}K~R|BNS^-V*5iCHB79$**f{5Fa)02O@ki_XEe?EN ziW3s{Hd=bzxbzr4l8Zbo*GR)GWqwYQL$Z=lm;Ih7@qeIQnyBqA58{8CXR#~EcXo83 zrY|e_ndekbzTS_QfrTEMKlY}>cl>;u3aN;ispzDgBk6bhNP874wED<ewJCNvH=vnBdqUO0Mb2-Udr@?4BFI4CCq90rA9ftxN zpWycT{h4Zs@11rLj7o+jB?WclN=;`Nt1{_2$HnhWUFo*rJR)}9{bVZn6}-};mk4?U ziJDP!>ClN5)d^v{IneaVjPLQhVJfSs`r1PI{?6b#hbRmJfTZl*&ic|@CK1>T`jF&= zbJ(Is8&i);g!Kg2b1`?q5jg}iPLx881XHyS3`LwI!zHf;kEw6m3i$0WC+b#eGtWi!F|d*3zetJPW}BFj2*L!(06I&Im~7i!w0i@f z3<->Ooa|$D(3Bk`y#M6>wes`tY`1SeKWFz##jyrb)m?Yq!v5ODbf(NuP=T*1VX-!6 z26Cj7RqKrkHl3D%vlGgDe(XG}^{)(&j-s0co|v>&5aQkP1*1j6I(tHxQ5!|l$f45P$>vb+pN{_9)GKqzHLjQ14u;JhiZ0FY2#!~No`+P zcSSImE+HIr9-Jc%dpuTH4Bga9F^!nd@b~LS4f8>1Zf_m&xLU^ZS|oy$2+Z^7MtTN4 z^9NPt`)f@*m3TNF%ZT$uuWEmYvp2fXr}>v*AU$knqvsWLz&<{c>f*J~jXW}`i6Yry z3jA(xwu(qJMY@ewW}z##+$n8A4gT7D%i$?|v##tGQ3VJbG*ZKSIjTlGPQm>mJt~fA z`1kI8Z|S`GIFS$k*ep+fJl&iZ>h+!>FDUbtXJ)#`UnYS_YfC6j;2Tm9+&_0(M`ls* zfvoHl`e55_{#$}Z%A~e}eVgi&V+RxZ=S{V5o-q&+>~JWqc(-;{R`!<^M*GB8j! zGFbM}QNnpq(URgRC&9g7@49|1A;`^hSPSujJJ|^(5K5o_^G4uF_5BLFQRe;X5A6ne zd+J0-ornP|T`j@SvlV53eW1i3mcFEDfAB_BD}KKB#z%c`cillmnChm`xBop%{HuN17OZvAA@FTN4GOA~_`=WW zvi3~>=|B~Cso6UEFiB_GM)ZE377Q_Tbf#720d*g~;{7Z;qRZ&+I_jRd)Y0OnHOvou zFy5?DWi@xvM!%14t273s>~Q)PeN@J;=EnF3qTJEw5a$V5Iv~1tSO<03=dI(9 z=&@w_8i@f}VolzPj3kd&dQPH0>~4=X666f&70f_zF}M)v(J;fx4p3w~i7@>h8Z3Hi z?Y){=Egn}DXh0wnWQ^{MOa_b3iD8G8V?-72+Wy& zhK^6nT*#eX{XsXLeugQIyHadAZb*qHM3{!Y**x$Z1>ecK9ag~}iXwk~YkLx?`|_M=8^sZ- zyg&H(H4~f=Thx1Y08QCU1iQ>c==fg!1M|HP?+LA_eajemMXOMFfKKd{HItZk=sx7|JzZ6~f(Dzc;YyTb0bdP0@pEhmYskvINIZRMiu zZr>k+yN!jXDB*R3{zGN@fxkmfG?1vcnjeV|41VEEgkqjQ^B6kr6D5kTx?pnj&A^{D zK%xJW)_Jj=Z(^SebK)a~Hg3ez>)s;B7oWgA++Ay8PPdG%RIjq(eHlOCl4YWoKP@#O zCQdoA>-&)K!e#uJ+}8@k1vQhd*8pzy`}x4W3bWPp2RpH5PG0W@PwCG!u|Q`yhc5y$ z$MCL{PpGTxo$3Qq!*sO-4CClOr;GR1{Og!al?$@FZ*uWD6^=WX%q_+dW^3mxR9Fuo zbv8m?YKs}MDEo+KB+bGod80qq^X$;j#R%03u|oxvfTpPKLfzu#(1OSblIpSSwf9C< zi-Z1bCadASNO*-eI)NUQ?&AmN``#}jXWReb{6r^>&Et|x+rII#?QX_uCTp^Csi`#0 zGIUaLqo$cMH%eS{tH>M|G$q`DoMvh)lX62-p(d4ZW882BY*JBhX#Zn|eB_TvW z^rdBH?zx}m{e7PIzuzCvf0uY&=XGAkc^>=sI8LC~JNp#1(x06noLgm)QNQHb(u2_l z|0Y(`zota1jjW*{3XsX-uBskAd99zv5(P2haAkPm1HC3D_gxBYYL6*8kDbHuD}5)- za^V13!NUPXjXlHCM^9NKIuL3ttk3d_P*ezO7<{T-`_~8qdSS(8h^4@~qT(KW>=C-b zzUPF0niozsnH|dh`^PB&Ez8r^6D=dyd+xVdg10tf|Z6s`yTR0 zOC{5jZZ2n8h;vdj#gSb1j9?j*D+}f($$U)bERlLA^iI%iGYnkrmya2)!u<%VaX+M; z($~s^stwK7sw2EOf{VMO9v@ew52;)V6~=ebQl;gn)Ed=5^4&>Ze5#v1xhTK7g!VNv zt&);SaGS4v<_-$*Ch?Pk)ocwGE_H6GBkrw70Mz^VP8htCh%sup3%A9myDxn#e0`sQzjA8vmsD$v+W?%o_g zu=Oam|E(@obLvN`^~UX1r9O@HWM_&kApy6Bsa-Q__`@GeOT(8J&iI)2@`n~-ql5gN z&xjM?v^M+a_<oG0A<8{B@5PwvmYLK`KFT;G7r zfY!d!yaIlJ)_fbByI#(ZnZI9PSX-XY31G*IttR|~9DZxsc!p1j`ZYVCH|)0PhJ)Tn zMSI|mE^>7_RF^%K5!g_kmc4BEGIBomGqKlR5x2Y2$H=(D)*MRy!PXI^hWUXOjfEw|Y^3a1&~UHCQP#SKjFEF=xdp1GB$ z)qBnn7<-OO(Rmi1nz@qN`dfrDM-Ljf*!h$jj)PwBSYN-}7Pp*M;6~uDe0HVHvNV7P zPokQcr`eNvM!uqqASZJ=Gsx5%^hB#uN=#uv5g?$Qj41LPh_l-2YQgFsSd zV?j{D*gW@hirt|6Aac3k%d6bQOc>ZDW`K5}G)ZYc9r-f~0v5RTtR*^4-v413GYj?! z%9Gpdr<<;t9Xr5ZQlg%6sr26yKB`PLv9S9eXn*GqNj)?!H5k3i0Iw0}yqjEvtawt#YY_1x{R8-(f`yw2 zB6my*t*6p@a1s14vOUI=_MJ&J1_BKOSI%)ds_BzeG$K1C*u%N#HQ1E8^ct}smdBbZ zLyV=i0t!4KUt!;6f3UbFRId6u3-<4jNTIN(9_LERC+#PoE@(I!yE z#+aj~o?YgjN2 z<$kZZ_VMX+n+>MpbkY-=DsD~@tv7BxD%`J($Fp4QdZ)Bzw3(9i?eP4>COb<^l-a|4 zNq+-&_wVreXKPl;(O#S(vY)+3SKr6f^TcsWd|)ij#76H3PhwwrR-6MN3D4v(_ z>S~Ni=V|rNS{fIw-)ZS;TgrV9u#hV+vQ6r=zgZp8RS|bKT6C*{A5l(!Qp_^KCw$W> zFLhk7Vy&;AIo(ldcTN9-$V*7CHQ3w%9ek1$pxQC{czJ--ka18pH##6uY@mme1aF>j zaG}A2rWx9^R3beh<7`3oVs~#szXoxW{EwS^4&$r~k|$22d@r~$E0oOmMXZih2L9?e zz`3qCoF1AkiE_J_lpPbmu+LVmQNGS|&xa}|sV`mDR8d;>UdXcZ|B3Ago4>G_V+f8pey0KnJwmixur5Kab zg~^V}R(_~IM!C5VcQrggFl<%n5Z$#v?EGHbSW&JeazQ%!Gjaw@uwnI6l}vBh6tH+8 zqohu5GC6E^;XAVQg(!htztl!^^?ereAZ#YIi9TA%Sj~wyAhk9{9%(a@l6#&I^-qw= z?bhQ(+E;1^L-(c_N!7hVEahw&FiPmyfS=ldC>4X+p}Neete8Nplqnqu*B4%wA>|j3 z_hyjOVF=E^}4GK*))A z_o2mI+;tHCQnGZ0s{53R9`&^j;vWzuDK_=_ zYdQ6^>egquL#J1QZeG?;!fAW9Jvc%>zmVH`AOEJC+TD1nnI5Zn{0H#+A?dCw{Hb_+ z_Bp?h`StZFM$_$*)?M(f_PidgQpWn6A7ZRyj$ zz?TcomgH?slS03ukDQf%a3VfP39IarD=b~oPQjjp{wXw^4B9CwNwKY3c_QyMI(;-l z-KebgZm)D962dE&+TlDOS4^7SwxSzVCNJa^t#7!-T&f!QV~eVH3wz)48CV}OE8{-hOoAE__%Ug-2gqk}og z&{5L|VJjRxo18gA*FaDQR&wsjNn%E9RBAlvZ1I*wKqXOxY(^TyiTG?C65N$CYZUBa z;Kt7;R(X{jhJm7LofQ`l`w^%JhC*eZ2Owi57fC{(OuG5Eeq1`eVn%!JIHx;a>24(F z2Qn^lH&3b#4aXoY3ws9RZKUQRLPaquEsbDYo%F%OYC(Q={7DDWUa?s4UaTH--M5Q# z!s^J+y&Dh+P4`Q1Zh_LBUr9IaupB;w@wL2E8wn?2Z$Zu~cb~gJnqweyK{DN;Tby<9@ItHVPpop}> z0v@~3*yD}^-r+F5TV7kufQ~^8w18(W!Tndp(H;}JqB#2CAU+A<5!koYEN&$W8)!e% zs&i+Euk+F-88UvO=TJ)^7Y1E{-UPwq5d6x!&`N)eWuN`0#0uB$#VQn%RE#Vdb7v^e z4Spp_Dsd&))`I2VQ!kn_4RxQ|9l}SE#m%v0HS;=nczfJ<%Gr&x)4Lp_QKYTU_WXcM zO?jGD8z|oYLDl|ck*tr|)iFUq7)AYv+$1}+^58jkaMuIv(;F{M8@zIa>ijwm8ic<7 z4w+Z#J!^Jt0KzyrR(7%rJ^w;q9Xlm&`PQW~dNec|GeEFkI}cDCth(=f8)C#^e)n05 zsewV$u?in11_NEbzPwBhl-Xz0u>IYRg2;Kz6=YA<_NFPD*x_-yS*$LzdzV9!9d|7- z8+eDX68r^PvF?byEvPQf*DMIvICB_Qr!WP3JX*M&Hn1wl=&;ZNi;S}65oPYG1JZ;s zsEF=e*JUrl3yRa<)wUWSk8pY|J88Y zwTTG$ogXpAWJ53R4C_L%uIlgf5(=H|dgGszg%%|EPwuS_sFms9&AKYbg04*0jD?#L zE0|hm-&8&ryi@lsu>E3j(y-_*v_LsDf+Y4;826+jbtl0dyVB9^0#-bTRO-T)1B**& zM!so*BW>EfO#8z5@_SE%zI}*ux^F>!Y@rc&4%WuzSn?T`y0B!8dHt`j^$!um13R$=;B-Aw6`{;+AoiqRBQw zsOy&R2@5(XN^ruUY~6IECc!=5bw7kPcL?B_-88v6VKP9jcEh zX;k0E+5UKBKyCDSSrc;G6Lm?Ck&D%CX`T7*$!{-OT;s7g7m?%oS_2$jexJ?8l87mQ z^m}Vc3wUJ;9DsZUUb$ZTMDKdHL@*0P8cj#;FZd;^Z@2*d!s?AZ;yB_5|GloNip<3( zr;EHM+q3RxFXSg&Z;Z|7sm4zgM7tEN*cB|BJ*&k_SaWpu0HCGxoQrV>J_xc1EgA&A zjyih|6gXh|=9+IUO=ZhPiY-rXoSU9b>6!@SFQbyuJOchraDTLQEe(&i?=y~ZyXCx1 zcN81=ZJGAzHn;8T%9pk7X)~{vQ1qc5!>+W4N^h+;T*-2*9k-PrCcTE_q1K+LB(|Oo z(-<8f+{Nf!4DR3Ng|L{OAvlfs%Z~h#a-Y3#M03;y|NY_<@DJBr>Br{E72Enu|A1B) z$wo8lFLCLD1396+$i1Db-Y^)JdcQ##)WBXJZDJmRl$saaKlBxD>1`es_VwIr9Y~gC zkW)Nda!PSXpaaNmsC#u+WQY-v^s(G`*4GdjCwrXaX++Ur88!woXG((RR(S|O+s})n z79PkHyIzynR11U_Nar|?Pv84Tm1z`{;f0y0yir3Je-YavJ&?O-FYom?L^IU^9v9nD5_`K-X2hAtt2h0*uLlrb6)qAI zeO~aWq5=)yf~yx*WL$X%V@p~$96=(0)5nSQ{n2v;=fLD5)D~RTAVNx(x~k%7ED~9p z7W(<{s&}N*WNXo+I3>PyALo}qe8;}Is0hc@Li#yDyk7PFM>nrUpa|Z_ic@xaHk&DZ zGj_Muvhk4c)ndu4>KJsHh%I8vfrt`PVt;R9fpelu-W&Q+1guAPX|-qXWFKeINcsaL zoWugl(@Rv-a%=9C4;KmaoQdpE_*l-tu~uG#qHxm4Wt`VVuiJ{G9;kS3Veq11kghVI zDpKynxsOzMm4ufrjYm^X8=N4`C4P(>uAf$i5wB9S4t>;lKAYY56~<~dFTK8JGq%i) zJ-kZ*y;*}sL3-H;&8u&Hiim3=(p>uFXJvbLU>nnW6|;4a=Xevy^NP<_lAdl5PrKd) z)OsPuYHk&V?uy6lb2kkf&vT@VgOMm-Q5#%yM7Cynbdd@>!_%8c&Ar?7YqgbHK#{T zbTjp($QngYf`%=MP#NDI#j1vbq6-AOToWDde!m!twGHLu>A@u@-O|n#XjOMNc0N<9 z#cdCR=#nIta8#Fmbw|v<+0p#N4`_2oczX=k0m#(A3PZECt@hBAXx;N6p_Iwn?-A)1q3>NXK9PU7q1yPN7^CJ`eZMXdn%{9!^(kzTAz z>M=GOcqRl6B((nEYP*ju6!y87LC2~HC{TbpQqj)`w8w(|vmtyg%{;m*ahXUiNV%Bk zYyv8bH6phEK^XUZ1P?qXNNNLA2Sj>h^^)m&94C`+w8qkmxR+J*&mkgK(aO5pQY04c zW-#F5Xfw)Ug*6O*7Pex;0>(lTMf`l?m8sEe3A{~NMMJngx;UQr$(;&Wua|G&+wjR9 zC4BMb6t3}s>&d4Y<%59VddURqjf#VMmVo}HA;5#j4l9S=^xxVzb{UVX?MX`3v)#*n zw0C(o+lD1vpJnWE^EY+|@cAqiO$-Zh%JLG=bDqJ*rl1->WQ{M!uJi~sazaF$v{S6P zb@^KBE7E95+2nz`hvctrc^?egs~Y_+0p5+KWCZygluzMrBqZPXY7l6~)sjmab^3Lb zYR8)ryB9-oElj1TuIW#&|2=WPdtUf1HNkc@$_tUdd!ss149QkMRh8)_PZBurs2swXCm_aZw)sNfiAR2#oN2 zOt;w%+O0fj(YB#3tA?&;e2fD)JjKqrfnTB-T;3;lqGBX9|}S!N6K)pzzB= z{GeD5WBPzvZ(SVn`AmjGs>)BXU!f;!^x!$o(%fE;0kK!{7PYVRyhx%729yI`b-mAa z$A2ewkW-t+c08H(C~nH?r_$sJtia%NN2}W z#T|?7G$VmdAl-m3M`@iITk2EVD}iM3qQm=0*GH$dZ$mpx6_y(4&$+rPu2Y&K1rSn` zPeMP6AGz%OZ67Hrx?kONAxP!IKAR`zF8Wk>M0Yw^RvhG4u3yBOUPHBs<>Dg)d zZ0@R>0<%tcXZ&3)n$kgYaP!N^wlcSNmyAK#a^F@73A`pl^roiTCB&A% zOB(AP<}iLU5-n7t#yf&3-Qc#xcB!PX?*70F=kS0Kj=U*b@`;$UrkB2bsG2J~_kp$% ze*1PYkPn;@ng?$NsN;F$4Vq!{_1jtMzmH#-aew~n`i<|eUp~0$q#C)*NKae4_Lj9V z0X?e=*o>3zgZv2MXn0V5+L=^FPh#0@R8(0m*doxHUyQfL#15a?Mm^MGrFRQ9XG2*kgr z^R~2avj5`Gj~B_zoo@h)J`i7{>tGT4T275x;3aDo{b~i#1auE^X%% zY2V?`OLI6__TCNR&j~`ewEp6p@GJ0?)3PBSgU{bA{EjMP!-B_@NY2hpLbXbmt6&}w z$rjU}xA8L*no4ut2uGA=p46jK)xCZ^a>j||81g zsH0UOsRP!NQ+dmTl_Ed6Ork!ydyrtSn(^4JtUXf^6q@R8P*#?CZu7DD*~@eELw1W{ zs)2jX>PmtW5E{$>1y6-KdNB$gP?w+?$|$PfNPGr)1H`GZ$-$<8O@u9}T0^NBFy}0O zF;{4x@&_+aUy@7Si_sIROL9i!X4e9Zplgp{5=`feIL}g`q4|>oQS*MI95niM?$LYqk|FC#h)g0{;P*vaZ1m@kg5ND4RPdgB5U8C0*v zpCrN|1Ghy~Gm11QA0Z`l#UGd}Li43D++IrvtVG-i6`VJ9D!8RELuyRcRO&SenI5Py znqs0I&>Rv531U*E@QVec#4;1!nEUNY=ez(JlEf3};Mpm2q*A33I_>o8xc=(=7wjV* zn2%ELh6aal-FA%3bN(m^XZJtY9UT@kwcZ;JC^&gpV@4FmxA6i_U$J3M{cd~err)LD z*SFUj2)?zo^x;bq{J#gPQvpBj5^8)iX%WVe=IjHzr2h(i>>k07(p)+COZLet8)WcQ z2v#vsZaky*8})uEyLqF$dIo?nUOTP%LutzK?KfQ~%jOhdD{(s_MYa_N>CqMqV2EGgq0Yw6(NZbit9TaD?VSB{Q)T#J`hcKoV$o01us_ zn&d6+DRtg+oCl0hEC207Wi?>_mGVRm2D+~SYj_N@h|J~0;h=jY)+@j?q9t*X@74=x zAE1@QhP<#lj+dMr3E7s74bos%iq@_Sh?4Z@GotN@&2DMq@@%!sxw;Ac=1xU*kL+#n zH`CadEZfq#Am5NWApXA6i{EVwCTECV=uPw{LinT2v@ijEzp%JWWpKM@wI(?{(w2Tz zE?Cc$6msep_=884_&-(8Cmm#!brN&IASNB1vete1ILD>T0ML#A{1iY|ivvj6P=vjc zGoZ3SlqetyzgOO53oW-Lxve{zjjruFj&(ioQrS}VvrfkH zxBf0~(;Llq5(Gu(Qp?>$<{gPp{CBURJMS@L#HGbq3HI@(OwW=ye@uv{swAjbPb;i6 z)0!6PTrIY0o7Aq;RUTxu!Aebl`tFRA(ke#cLhpi*;{dvJ<#7srdf5%RJ%ti0VVGlf z9;lE-+dFEK=~3XQt;D$k1)yDM5IZy#Er>2>g7_UM*X{;e?)q9f|4_mH2Ro;5u+xXGuSsQ&Lr|BADcnPvG*Z$087UplblO z`V^DDr0?RF9a6^?&mL})L-fl|Vm1>3czXbJ{v6XfolS%`q}WyB2D+C^hlb*owR{LA;2E{T(XQ(QDs zK0pKOrs@ZQM$l+@lClaS>%6*ZPPOCTN47&SlehY>G2k@?=9}f(N4n%@2s2)35`-&` zgd}3XjPyx=eoC{YtjU%c;Z#{Zgv!1TUAf(10b!wfI;!Meu*4MKMv-1Oy< z>lZl8#aNfkHxE93xTaUzZvgOH>GUh6Or5tsr{S@Ac{*O;vu)*Sm{6l#Zi4tm7(7<^ zS$KJX$tsg^GtMwS6^+rbXCMzw6F5n?LkY1nm{Wb?y0)&%0t6hKd zrlfH);CUCZR|cfJ@Zfi4(c={g9Mn6R0|^h~+CnfTTRjNS4bMv{!F|7eCgyuz#TSoC zR2hm#7WoR_kp6|zAVtGLPvp(S5!cnMKY+TAy6Z0Q5a(hYj6yGk=_H=0b%IqB7KW!><phU*RaOha&5$wSpOWgbkk_lTAFFj&GXB8-FIoQ?jjGSg~e8DYspIWqZkhzP1qDn0Bx z@cIwCF#S@$VS>Q)nJjb-f_p}Ky7g&9L+Sg$qm7@jMQ94)5l+I_?Te`|fYIq2jYtcEE`FfdrqIA??XfQBX%J= zrOH>^kUzM7rC#NnXAMVq7azCp@z@=G)E)Zu5q##;b~Gzd53J7Du=TWD{h2>KX<+pt zg4^1ob@S17dTb|bPM905!EOPEQy}8R-LwHd%AbQ3^}_AEp^qfZR|btBuW#hJ2`(MQ z!>)0AjPB3bLBB93I=seG3d%U_8q%J1-t;3%_M~IO8GYzyf&Kwc4fV9y;!cb7V&^cM z1reK1Pd!v-foffCudF5ZdwB69uBC#eY=?vsqe-r{quOXjj2k-L%${7==>(*E`ghw8 z0Gyi2qbh_W-)4)5i+1!d{19&_B0V+@?I%GlX2(*532^v4t+UcGaR#(6S2NwK2jVf_ zToWg04N-E~H%e~k4W z^ZTr(vgFJFMj5nLtY9_bJFM|nr`lpPJkUPcOLIi?tCVoBWATbqU|Qww-9z~ zaJ+Q0j&e7F3nL?14U%(tUVYJWZQTf_ae-#fRMZu;Pe9ecyl<5GNN~xTQSb53h6pfr zl}~Zcfp}aGet|tHMrm|Vp!-BHj;jgNoM|X(;6E@yM;HZ>63(CMA}Zq2c&>c#wI^T( zfY>tEkcn76(UJ)`_14zY|GuhG;>g96WvrDEMNP#zliq`uCT_U0O%01*Jl-@!=<$Vc zgp0%2sfUt&_oi-z2R$Lc$3kh2GVPtY`bHj}h=V&@*Sp=$tnIm_@_S?}mb(Kb!;O-z zqOBvn_UHaqeIImSOPEH=A;21ef&6t)k!Vdaxfy!)kx`097!BW zU_2r}+JRjz-JC;MrlShWyehlHpPGd2O}Vp}b2YyGMt@AzcxBIiqe&QHTz0}E8-@p$ z!?i?3x(29#sKuI;fn4N99=@bm4Swz^oG*%CcLRv@+zta}I{KV&G zl`S^KCSeTBnNe*Qtvu8pcI=ZJQ7)&?36xR0L58_C(O0 zUH~L$izDNaRG41?Z>x+%^zS8|*u2 z&3Hg7Vh-}XbJqbQrWzPn2z7q_aC!TI&f30WcdZ6bU{yFHNBf+Ao>Y2&_xv8q)n>Il za>l8E5#{K7q_S~S}L0MiH_0eHMAa{CCB}Q7> zn8AaP%yCrp37^r@`oKT~>2Z9J$`-6-6lT}O-_Kpdsmk`sAmgdR(#@b~TQ?RU2vp^4 zaP!>kuOhzHh4Zkd?K#(mmkx1tZpF36hT6zY)C%v1@E1Uw2fAHc@In|xt40%JiKmr? z-cz>Pk`jz^3OR`6TkAtq!Tgd*UW&_c~D2CO>okIBg#Iub$P3dwl z&hu0OV(nxm*?MC;w5bC6SaezA(&l#_C&NR3%_pWM*CgMthZ%bfJ`dehJJ#&#*z|18 ztA-nz?;azqH(v48LKTjBjE`Vv)ome?0q21@NZ+dr*uj@M(BF>j+g)mcy|gqC)dOfe zVzhOb>w*yGFCwJ=fG$6Oz6yRg7*todm9SE%Dh}X#olXpX?L9tzTNxa(i2^m8i*;oH ziEd(t6{+0_hvk0fS~*)^ec~&)^QrYai(1aq1%7vuDZNmg1~TE^fpWT)cM7MlX}3^m z=Yp@KeZO&`_^CRtZ!BG;m9L8qMT$N_RQ?hSYBb!^sn^?2!~rYQ?&=@>s=D#ZxI2L- zU7YtXs5@yQwVE_7Tr~|VGZB^dF4SFd9G(vy3z|KEF?wP3(w1k{c8OW(96-CUZxN|6 z+S<|Q^#E0Fy7xyfKt#C#<)SMJU=phxq7VT z;eZH!ju;E!XWL)CFjyPPhXjFjNs3%*P3Q~hcs{DF68tqcMSjeMu~6|1V!WhRF`0?% z$%S)Z>So~P(xXTX)iQSP`jWXtn4Y+qd*cGaCP=rJg0L+ntIIEs$GR;Mmz&T2WglG9 zPhQ@VN25fV@kEl=GP5Vp_ab7+Z!bUL5SqicJ_MCV-!E{Fy3-iZi8%ZEvZ|4?xSzd? z6H#0dB#|ZYd_=IsE-`>p6HxK<_Om&@X;igkqwS}rzh%QaWC_$VR!)tu>pcM*yq`b< zc`jcI%}?+@Jw3fZ%(MoMn^29$M@U-Lkv_C`s;AvNyF_JewUKzw$5c2KSagw)FF>fX zwDihI)8!QrnDtzZx2hCyp7D($4bBZ`_ia>Yx6Yo`t<$+hG9gW72&@XnNKH@B{&|cj z*I5uh)9+63UiG1CATN$L6`Khd-dU{jqPJhDG3KTlMGs;jI;IQJQp zAiKLu1+4%^EiA7oKUSS85JAxP!C!MSjSy5)U3tDTDS&=ytPyyPVpnJjASFCf^a8>!;2!_pcupf`M*Gd$2MlySbVEqO7eS$NZ>;!mLO%UHe4ui@b{!KZ0g{+Ow{G^Rm0T1G6!rT;rq>SNqw>j^lcp9vC>_t32dY z1tpf717vB*nTf`Sv&;@1T7$n7$s$?TD=sZXs%?^+tUy&)@a>`s&-hj>z*;q9P zH1fQoM;LJxr^xoxo5drGIk35ehysRrwFLn}8i}ADD52mIK+{8nZka3nvzKV-O#Z3n zK~#p<${`*l!G2zS_;RDIx=>-nyT^sMBTq+Yzc;bRvZ>Hyl@YBpZGQWEXl-VL##_o5 zOI{`JS-GzkLl%ZV;GzlhWfCGR6{AiPuE@SiYs+z)zwx_sLGZM?O^~`1>tBNB!^K4N zhT@H;US&qAFQleXDXp3bnuLwT!Ea+2ujyCohhU)2_C3%f zge{?!24>_h4x^D|=cVan!?Mrt#^#VzIpJ90{d^}!)f=th z!X%;-da=lq@??C`j4R)C{&XJ?EDjT3nq;SsANOKQB-eVgQ;%ig(J>3je0;=tyVkXHfUO#l$ z=UONQ#$RS6`o&f_m4jqazD*9gd+Ag25O7#uT4rgaIjK65<9Ef^XbwguQdq#|u5B>~g^0$7_SGa4&#%X;-d`IGUskK*8FON0iACK!eJ zJ86A{gCbR~XYz_E68Wc-V0Sluy3bvfXdYM64T!_ZfnyCuU2n@NQV z!FUXvIPnm+_7QYQX6+|^Q@h=-T*H0yJ6t1lpY#~*#tBPpas~8~{~DO);x+wJst5n# zwlu^oBLAr>b8H|%MS(5WhKJiPEMP=P&FLGO1HePorTW{an$$)|)$n3m>SQMD9Xt#M z$3p{z5eYiX{6?zUlW^gbKUepf@3Ik%4WLpvM6q-$sU~Iom>boFh&Qhqtw#${M=y1m z!^(Q$}bGC30ycXQ4ngj}rDWDS|q=Shb44k$Pi}hOx|Z9cKPB zt4Pr5kfJL%iLdHXrrB@ZeJX7|jWrPP8 zlmx4=XCT>+YAiGSu8JQ5%J2CE*#M_g?hyQPZ7ubNT?a|Frg^O)iOsc(hon@4b3J5w zr$wElUKX9bOWtL8s^bc*z ztt#>xylw8|GFVx|49&}9*t6R5!O^&(Yz4_wwyc!+D-I=~our6?6`ndroY(n%LZajD zY8pfAeR$rU60TvaMqZ*?S)z>-@cn_QwiI_>e zF)j6SBQZ02DsV-wr}cKqp@G_#V?iDDh0fQ!qi({4*AnTD!!dn=i*pMu1Ino}5~*E! z1P7#|kC801?AUBOSQREn@PhmfH-P0Kgzgu;&>4{xa0o5HucFDxjuSNMOMVC&9C}4G zVZxE}WjU_+e%T^on8jZjut*o> zYqymoIVr`XQmbGa;GM3hXH|3Jdg6FQHK(Q^)6yOpK3Bgyv{fj9DH|*72Gzi`G%cVM zA+#Og3VSUJpyc7A^0F|I&`&|;+k%sR@Gl7Pq7KxSVV5Qw#V#-#`az+AUKYBk%qEMw z6tKGRI}(Ey3{sWsUKn&(A#|Aw!A5&2&&9{uePsmJ#RORNeyZ^6|GaDOm(hyoKUKU{ z^7=P1Tgq^uhFaecTe>Tfn~nQNPktLn#px@&pU-~wQ~2jK<(%>27OzE7et&TlX6=}Z zeNY1|P`3GBK~0;r4S&JyeHXjAc&-odV8Tt~@rj}9{nX4hd!A!K2e~i18f-M#*h4iw zP4%A)uOtq}lhj55nNdrRQ991XXt3W^OWeAd`pRt;s zX=@yG2^JACbL$d+8WKM+NmZ(W6xIR7UB)8Y>7@!MI87^56<#WMb&Q`=#>JfF{TFnO zRl!NxtEC&h@z9=~pf?%YgNcLp+048o_)}4Z5da4RJOK0#F6x0V9Y>Lc!q)RK4N+J8 zS0({73&m;&hN*-OgZ(Rj94Eh{h^KU7OnESn+zEw*n*!$EeOZWpEDo$&1s12!gMLhJ zH7f=0SDFXo(<9qd2_`rEEG;!)#_+62Eb7t4;89Ns$o?7#ilyuM9(xLeAzizUuiSBE zj4Bh`RaSS{N*#i+EUtNLXQ{Z1zHsfQDBs%A3Jh>Q(63#Y2vX$805T{}`|H1;y4N8Z4W1RW%SsD4g*IpX*c4sHS0@fu zmDbQ-GK?)e7Vc#NjCf^I&&b)gfh;EGSC`#god!V;dU7=!`0nQ>bnpA@hE5RMlAq7Y zLYCNPAzz=Fr1p&UnqWKq=~4ojJtoKrQu(RjeFG!~*4k5ggjt-%QzWt{^Aqx=ZnW*b z&#e78;M^)_un|N@Y3)P85(P;~^qt0QO5f$J17b>nC^<85rDgExSsDek8UL|vm-*$hd+#b+l>z55avgCagJsH) z1b`8jadbe-2$u6sEzv`AiMQwX@bPGWqq$zq5kq8^#XyEzq*2kOJ%4eiY zsyIddsy>1}V(hZ)1JaGf+Q;`d;N{v{VYpo@Jg$4w9RHabnBHj}ucs(Dp53H>&p1}^ zh(|nI7jv=az(>z!od!+PqrdzdPn}XNiSa8ln=15dzrfO_Zatg*V1HjSdLR_L&@!>7 zNDTqB&dfOHE8BOT9SY*4aAvJj&PJ$Da4h8@I6FBVQylnfPl!Cy#vu2`uiM-U7?b32 zr{UF3(y9VsTZTQb(6KN*TTg0=N6n+IC7!rh6KUFumYU;ta~o0q(o(QVwCdF(!zLCf z{3-V)&44p_a7_B^Yk}6Y>VPn8K zWBKaxSx^Ec7o_aNR*bLo#0G$Gpq~9fHVcTXx64f&wak@CyqF1-{Yb_LhTh(V{Dfxl zd(_e<>-5{RyHhZ zpWq62+I}hym``tMi(Pzle(IRVS>?Cy2b1~lvmBk~=JdR}rpP>EnI=o=f%F(hW&yM` zbXpRGvtkAL_0x5gV*V zef!$@tX^zESsk{H{BT)fOB9#E-ti>OB*>X~QZ`9SSTtu2#u#%ik^=aZ;yIDaQ8q{) z59R?pilZnijf=H)l4fj@C(nm`Yjg_Fzf(Bk>T(0utcVdS9!MNmpzj}!I|tlFOE$i2KrFlSwiKN;%tDSJ@1 z;TRP>U51FPysjqo_epsMABA|D@k_(aE3&{9@gRgbbWs)e>Pg0W{o2Q7O893<19@Z*AHQ% zdCL18H>x~Pzft}EuuIJ@AEQb6-#mpeh$E$86T6bA6=RC|I#`5_GiOY~oe?G4s_(97q+EQDox?lf{HHLIld1zdX<@KkbrVUZA6%B^} zzPtZ?YP9TR`yN^6>0j(xqFH&pFIx~kym0JqKUO`KRY}Y0 zvM;TcLE1Jjw|D8oYQ>^OvH-h<)>15kOVeBgLBD+9gF}pqc;p z`Qxqz;$4Aq#-Q)W{Y^92>yytl{qFr;!1d>Rzp#4Pa%Ar8X9SCfmF-}A`P5)6Ro>0k z&pWvE=y$D8+sb)^vwCeeo%xTojnmj&-(+;CWwo59;*{3eN(ZD@R_CJIuYXE^|MiZ) zFMcaMX#gZ`?omfc7rG;pCIzP1mWIo3o5m5UMbOg3CK5jJ1IPd4v()`Qx<7BOby<^N z{z3Xk>zE5|+$9B@w7sAC-Ph`xPEN(=iz_^DXvX?Lc-*G}@`*P$f8X=2n`$&Fs6rp5IBwYM-ZoJR0{Pr6-ZrDl?6AkLyhwhi5Ja z{kPLaX{7W^-__EVnS|?(8_543R_j-8p z`uWv(zBPo20gxGBJhtT%s;pJ-%C1ldSAw^=68Kgr6(kx7t0Uz zN2N=aAB7a5-#*{Qf_(D%|M~D}ddt$f+eo*GfWp*sU-b*W#(jze@J-a1M!3B-1?iq z!cCMS!FGr&nX#Ru%Up5jvlkzA_TQUCEmV?ycXW79whF0g9xglD*2GoKm*SOg@32wx z>pn3}OT*@7mU(={A&A~T*l6GN?~-M!17LrZ0;D2@!OMn!my{;|UFz|V-JqW3gtvwe z)%lI3{?eg-s1ftdKPmM7Gg@A0?2cT<1&z+%Go{>+GLDeSRt~XKP2bCb-=|vcXfZ!F zY51c4vYN3+kJoqqBiGO}_pgHw0&hhJhMV6BwpdudV*a%LPf_rXkNz+-EBAd`KTmO; zn0Z@SMem&4X>E-`&ES8d>7Vm&>Hh4hU)0KOucYdE@~36WcU4PJLUoq;pErH@nEc$6 z8)U+TuDb-QzgF@;E?RWI=-9F;*QC&32Qj5bc^NhP>}3Gw{3E@S_g+ zziolPW|EwE=zPYVs|1%D>XJZ8!6()6-#&KTLStJ`@xjmjJKp-Q%kw|^rRDj1R?9j6 z4~yi#6950r8|6-*U>tsvHjEm{AG;md#$VSd1|CUAAV{gkN zk-}0ebsOV@K@8p?>w;yHL)og&ZF56Z@l^m^Z)l3?ECG)#2LdK zfTr1Y=&REoM*MC2Qa5+${x;K1r&Xh$)09oZHx`ud=;9 z&8PpXRXA}ex$QZy`o?$>i6c7-kyGsG-Qbs8WQA#=_ zq(Qo7k01(2OQ)pL4U*DGgLHRyY?`w+7LJI_^UV8xIp@Rp+r!@XiYxwCu6378Fbr)d zI~B>(M~KFImFzD>9!xYlELwTy{*D79aoKI;qHVI$bww{MFGuaiQ~^s)%W3>j_e1+x zmhakThVc<9%faa@YK$+o_BY5H#?>@}5(kS7meN)I(#^gpIcbV-spFGuc$p0n4@cT4 z^`)ah@`vDtpvprVsQS!4uPQZG>Ly~4$`-A1(xJ!rc*HRXHw`hCB^7E?__y&*o?X-HO(tqOMNpr%4XYn-%2p321v+QHkljYS45N97sZiqDbuF@{14iNU1Up06Fg;fRSX#G(rYm5{g1%hA?R@!Jde{i@E|_mjS&<$~lK*gdB+E)uaslBh8^3%gbHGMJE?l}%cJMh6 z1f&Y0N}uT+d7NRrR1;yh_{W-ACgzu)f7Sc%J}f4{2t8$**L6f_Z4h6{%)AS7cTtsl zSH1kBzdY;<#ENlynuh!9xR6kcZp($`;a0N!X?80PgP&&m% z(pThO5iZSYWLWQPyP`%GE7}~aUKi%yX(S?4{O8HmV^0G{-hu~gB{?s8{S#X+H0;R4)fAQh-F7?} zsnA)9cP{>>9$(}le-4J4`-pv?+l*m*s=3@fag41sJ()`1&Gf^*P4Zw<=XoX+!xFU+ zWQ?HQB+;(I9QiGaIQ@%~KR(O|Ga0^~0h!;f&&ub8D5nt+^4P`_?Qfz4GJKP=6NK5M z8WyyTJ&n;llA}BXOW0m9-K$1tril3LH;wAMk1WS`k3tG5f!=g^ChmW`YsLf@;&6PoiVV!T#)&0*1SC*lwKP>3+xe`ooV^^V{A19ZuLdSY=CgMD zluLy%VfY*ZO0nfuw|TR}@Huz=`!M2tKhyEQaM;0;k^3al0x6UJc=nyAAG9m>S@9#)O|)hD>0n7l1BM1Jlv zuz5NCxFbpe=3@5rlRkZ}VAxy@WAPt9&js9t^X^sV3Dlck3=8k=!POJcIz0&(mQ;;^ zuU;Fm2|t1ocYpJkQQ9{T;v1>IZZw}0mOl*3VL0;2c(H2JXc?>y8K}tQ>G=v8>`XKF z9(^+#Uv)_>9!z(a+WniioIeGZO0Z~ykiPqsz9)SMQv&`s6;jaYeAcOkzs)91}!efRY+ zI{e_CkLpf}kh>5+i%jRlF45jx^ZqD**NH2VwKpXn=beG>4%;)rfee3SW~o18DyJylv-fV0td zjps*;cc!O*oZMsvrq1hRiF-dK=)c-B_;E-X#zH~DnTH#uhsTnWI~?lBE8XY!bWtcd zG_HmgouFOE7Z2%zrP((KRlaHW=>#alLEIZ_c3{MC-CM=MynXpQLHmjsUp|Na%(?d z?(iziHMXg_`{Go5Kb>>m51$MR7a!shovI}NW&`;=fH6vg*Y9h51JX0g?#aTSj}Rfh z`{Hx;4}y2a`hbjRJ1sTBdKA0nC77`Gb{}Y`!;H6*w4;6 z6B?Kn`;*0m!Io|UMIhv{-W{EtEp@E$WW-`B9pN=TKE>x{=WD^db3WS--<;%=H*}01 zN`I9-MW&7+J=Mhg8~PJpyAPy}C7S;GlIpQ3Pgwb>5(CTzkUAtG5uE=_>Kb5?^mo3* zsVn#;718qHVNzFqPx{V(CUq>pn`XxNpz*(;-1qBdygB?rj5?P2tgHraAF_aZt%er=0;C{afM=^AE;?sjy$L0pF>qO$u~7;4CgsB>oJmWw z?3!$#B1VPhs?F*pt+e!n}RDl9p8=@#N>D(_K3|&VEnVAGx6qbTBiZ z`uvaI{15-aC<$aC(5k^d=zqkgW5oHg$^RQ2k%MT5>@Rl#2>gHIlvrylQ1N{hH=_r` zctZ!9$dWwP%j@~ARqUnP9V@&i1s_V8AVj4g;{8_0I9+04GQfBfCJ`q7{zn;vMVGQE z{=@CN_^WA)RZYDjKVT5VjsfKT(ojpo|3wI5BryE`l0gOK=}z*ml;OC@=aT@G>_^LP zJR`uIAh`cffP5s_h|ZaY|J{iHf39)waDL!?*n^G%j>RmOmYeT%WA*PY=lpd+UWFU( zv3=tZF2;?t>lroODlsIRrIlfRemeK zs_{<#P#>?W_|K>%hCYSu$Qw zf3RcU8R*Z0ritzrThV>Gz6el2Mn(JB{yRUS)*KLgbq_9t?;qRwH){4Bd_X?Hp}Rfk zO#hXcJ+7zU4e-x}5H}-`W7e%9>+YQSkzeUCEwV7Rv_i$_i=h6;Tm8CH2oJWm6nQgK z)Xb?Rlz%f(B0X`m@S{t8|Jj~bm7-Xir;I-lF-i^I(Y{lmFfhPBOu6dsh(e=}iPvQ+ zT}}|*@wmV2)NO2f?w*=R`$~$V54B`4shZXZriJ}4X0CvqscG};*RRLgm6op{`iVpXMl9C0*YR11Rat5>{j_8YTCGjmER zvbkKm+W)*U5GbtxA96l+K(kzP|JkVzu}1^CLX_&u35)Oq{LrqlXy5ha(7Nd-pM3kr zppRAWcW8OY`b3`f*=ZMb8Qu+V1YhFj|Bh|Pm?1!hBuw2V+#+#m8SNjw!l|ehbse4@ zQ}5!VsZ)RTZ#K{(45W+|J-hu(LVlKMbuYrKv0%^Hjpwf@!0P*#GwDnC1=jyeZgB;p zvad^w;-7iIC`!0Tz^Xq~QnDAZjog^)+~++}w!hnoG%!IT8|mL}R`~^EIEhUv(i}Hz z!>XIj)oA07Nz!-2eu5215e9WrAx`Lj7!h_{00xfX^)}A*S2Ldut;Z-UZ~%1F*Qu+)vE@}Fr`EkDdKE`26s1iSJ{k~M>3VpCopR~4fqe+b_)$(4%!@J*$K+<$$vnGj1~pO ziTr2|&P|{*2H+Nc=A*5|9NpMyo3J^1qwv5jkgO)Ixg-W4 zbg^kR$_hQ_**$UA3~c*<6~Wlk5J%SrV&$Hdyu?EAzFMTWtE!+0k_oZ-Tac^VT)ajn zctPJ0w*-^=z^wiIQ*OT?>UYoou)1wVfI%gyH`h*&3{M~AI~kP{0az}2^}mZE#|vR{ z=7`YvE8_gHx$o9>Ox-}7+xBnbTmcEhWYxUIstuAoVh84`xvo+_!mMBAo!pb*#>c>y zdlOLoFDQR1g8dG+O$9XcK$(T*KjXDjihy2@5I#U>f!~2(mzF$Is$R@6BAV;cSRl|Z zl<%wVrR{tze+7<{WV!0Yt14tHMtdp;<`l)e!R=l=<1jzMJjuTVVlE_m>g>{YUdeF8 zoa=Y$!ghCaUt#HYh$heoqi8`eAw1%E zzQA45HY$mge0fwi3*ltd7cwN?Yj6yTc__S^GKgrkOpvXDH7LgzmBzkPhJ-uPozAJQ z3k?C^>m6%JS>nBzfa5!A_4d0heZQVFe1)qVmEYPt?F;}d zQ3$ftVkwfGP)RaXy;CDkf$>I2RS?!OBJdKywwjRNePdv2`^o*}fyX?5qx=j3&$I#v z7tGHW5*R>2>$ZnmAz9!Z?a`^SRZMnZ8@e0zsntpgG26|@Tq7KhKds3vhdkz63*(ul z_$R5OAh5WVSiS{j(Ocx*JtyPN4~Ykz9jrPbGNK_pM@Gngu&3@K$umo@O}S99>3}Tb}xxu16)9aT+;jN4LG6(x%n=JGh! zAbOH`JOHk8Ec!EXuwF@GzMyomunxftQA%ICz#0$Oo}!H4rYf5YwpZS}t9BY*Ek3^F zmSwn=A*L0Dlys-$nrd5tvhzG|W4nCntK5NucK;yj! zmkz`c5{CJ_^m#9h#QcbS-^4h)xZ4rwKPnGTt*kA!yhGK5Kc2~A^$^%&7pswHa*rt` zn)@g(>6s&!d_)^oL&&%ed!0iHw?;S-Ki%RBi-dS_jg6DYaRNZqGJr_aY|b_a9PBy7 zdS3eEAtw7F+8mqIC%O6^+23ifs+l*^3c=oo;pJE95a7E|>6DP|z7c4;xKw4Um#jE7 z#?^3yB)GSuZu8XVX*zH^T6%bG@Z(M-OMFtlFH&^M((tZ$OKHXM0NqT-L6C5t;?rV@ z7Mxgmc@)yWq6c!}c(j1tLBbo(Q&@QbB- zb?69FTs^ui<(moksif-~z_MA2{o9CW-LV3D2<=80tsS8TiJyAosPQ2VV^c#`7j!sf zkL~J+9)W-!2OLm4&4mar{cAcf_SGUm+Bn?TGypMDFV?=m2?lU+eDOaGtU{synu#yJ zR%C|opyjhQnY!}?CD0r}7GwugxFjb>*#SkD{`g>8bZCdXt3(+K$?{$wa=k2%=263d za~NCE8P#b(WovCxKmPj@q~Yswbbo^p;kOQNc*Kt?#0SzdmKjj1$NYbP%KH)yxcPIb>bq%94i(p!cnkrHkZ z(?J+BI*>9B*pfPNkMk%B1#YFJ&M3of)oTOG;_4l_Bc^(%lqsxa>D((~ceO7d$vjMc^Lo?@~qeqZGg3B_50(z@R9*z!k`-fbPq+6mC_H z%v6MCRJhUUO`gdyQi)N)j>^yDAJEy{$0+jK>RM9jg}KfYv0yRZ2CexDR9EfBkhn_r zoGjhe*UO=bMm-O*Y&jh}dvp@WF>N0AW_yei@!D;?y?@K+%JpdB9orq~lB(e?A1Xp8 zo;?bcp=Y#D&!eU?N7sJbaeIt@i0>)4sw3 zr2H~9V_2L#ILU)RygUFW|CECD@3OZtN#)=j;DizfgV6G)s66SdTBIG~I<4g{Bh_U8EKPzlbjYbx z@0PrUIQ+R9SK8bZk@JSf4WJYTWXsr-yl^&BV?oK!vG)`B*rMlCIAU$2C1hMYbCIJ1 zc-zvKJV4_5B^{cRX|^2CSkp^8S)FrOPJK1TtFwOnaAm}Gq{zGpi6ve# zZOs~^#w*Rz!&I|I)~*2(Pj*eGDQp$m*A^!h+VnalI~VRyrq6TdxV#c0L+CU@Nfff% zW04U3U%hgPr3Emk`zJ(pMG0CHdC4K}a`ViCGa)c|%nYtB4-GazEdpfUOdWmFyAQ6J zAF7l;KI&e99qFVp@u!%WScL@l68^c1_*c2C7V&%Yj8zY=RgPArEniGaUjo_t?@(O`r-ihlRmS0 z@!Z2vcc$_^Nj@ZGY{rW~Gm)=0EW(zyGjB*rOalbhBnF4*O1P8&(flX%9l ztfIdBN=^$oBdKR|Ep;4O zp^EI6_q$X>=ro^UGE?h=x9Rs%)03AiY$Jv!u5#5u=i_v%de?xZ{`#XZrxBNly2qKk zroO|E7U$AjhXzcAtSu)SJdCn5{|+7>2_GN9cM-S&XIJG~AUYAgUX z!?0KbJ%< zBH-xQA%laY;O9)KN#vM+2Rent@DS6nUHFA!GuQ6CzcZ<%cxZsnUX0G&e#Jm%QaX)W zKIn*XOl1nn@PzjX)4!>F9-RS=KX2WuJyks7Qdmil>k(uySK8-)0 zQe?f!L&8hsgJ<-FmMwz+22;G=dmNYHufTcIhc|FlZwfbo|C!zcZ`os@V>w!cmOt`t z?gNpoaHPTN%Qz_imKw)q{uo=x-PqTvz8<4ihr6XC)0&=d5Dl&Oho7RBJ3t2u)%p8g zlA8*z)~~&)&-3JAUSH_TGo$9gb;{u9trHgggByWNN=XU7T^PF+fb*0Nho^Rgr zm>llQxKtrNK+JxD*C#t~ef)TgfCGqCb6u-U53=~GneQwjlS89ZJ&qjNoBuwe+5l((Z!uJWa_9ro#q?W5M2=UR+@bB-cOzAp#>DaYOL|SKUlJ%TTt6S;Sr*1={$a>mERfNV z%iZW5-M&!~)lkd~EOfRKls* zIPlH_c`Cxmk(!34jw8Tu*n_3{B1?oV?Sq53b(a-Ixx>xXdi5z~%R0*-?}Oj|td~tG z_vBu@`p$9&-SMz+D9&R|%AR;MofbVfmNoOf37imt`Q7}(W+w>+IbSvH?aW8f%DbC1 zrrhwfpjs)7R6%)+Bo-LYQT3Myg}RfBVy44UkC`MrBon$xm4^>;EqiUIuBi=|wG^07 zweSswQq$2PyJIt<7?*CyTH988q^rIE+ien0r7KwhgK@h~vixPt9T!O9;yV(R_Lp56 zub23r7>0g0@20UjJlU94SR9tj^35jsOoX-74B_wgB-4c2 zY%KP%hic>Xez8fHb~_C1SE{hp52zxY#~lsKQ1joeuGA2@`m7Z5Oa>^v<)WA6fNLC; zZt~pND_8J+by`^RH9i4}5O!E$?d6;r7JSG%fv$KIwfzp{2HTmq<(Y&?ZB~w@zmabx z1?9&u#u@hI0!{Y1l()Z`M^nMGvNfhag(*@JZeeL(8*PSjm8e>nuwiSKti!q_&)}5s!3h01}{fNu*oa#Pp(RMW+jj+h5>s(qf zTfap!Z?r-d`FVlHdYFyi`f4NC7%3WaI1vb0^4&t0tLpp=q= ztEs7!JH)_%J_uKij|zBv{j5$B4`j5O`xOiD!lE8Raytl3qw0`(*eo7Ddw*~qA8iwy zX!o!-ggcLlhvjRGEiSs+80_k>7OFn9zgcgaxPG44*Ym0C#hw-?U3~sx9#$hI+ge8q zG+{zdVWyP6Y!TERDk+7sDb4R&+&(@e=8Fy2a^%SE{9`(Z9gbbeN?9J)&n-4^T%~s( zmG7!2?_&N-N{lztf#fk;1BVC396Ldpw72k7zew?6DERuN3jF>3ogFp@tOTdao}yfci;IJK2jn6jbRr!B z(6jD$!!gwuON$@I4+NwtdBBMPRHP%7T$M0fq>4k2rFK|ta+xc z^x%f3X~i>C)YAOPke2voFU0-+MAeBjm*8Nf92RF9(=hW5N~Gj1s@vCcjC4A%5iKO= zG@jVit9$>7PEMPgP-I3cff&K}&o&u~44auMY-F=ytXCu^!gtPINS`GKE?4;06H0-o z2zXy(VHrd%MDkMV5{)W1e`^4|6`tG+{^&__O%ZkrS8Uhk6~@$6tPi%+O-9Q79qaOn zEvH-51(O|iH`SZ3CMn6Lsf7DHk6ktGbD>DLTR_E`47uH;adu{s z^Ttf2Vd(YRI-^@FG>yK_N!*M1npbU?-_xvUO+{yDvS`4$FqwS3UZX|NEOIT&z9-Wo zU*J8`Rx)kA*yEdL(=G!ekcMG{UNLcm@h(wzxB`{Gfg^V}>SlB{D|b+-P zSA8zynNCOJWVlwptbdM#86mLxvQSRdLdM!=bKaxsl86|qU+X{Z-N(o>z72MNAG3RE zonlNblKlIunvY>FtnL**%a3u{AVq-hFrR@V2!l+VjF$)pmzOpgB?^U@O@_->?Hp7G zv_o4;oeupM&8M0!IRP)%KPbE$dwQ_g?$uJy_SzKo!Dz}d-dtDO(%+x&+9dX7l#-QY zqe@nFvbn=RXGdI|@{0^En(&p{msFYer?fs@q3VxJ?>6)#L?w5?sBpbSyGQli6TP5= zkN0V&Bud^`c+vX)c@dX~Xj3B5w5mf$WdzHA7&L3i=$H8nx{E_Ox&&P<6qr4OBH zX=~_Iz?ZPz=#ZP^X@aE{p3HuH(=--~>TQp=>~@8&T+z4$N5ylfE!5u6Xkt#KwM3{n zVq|N{K0%|4?wC0SoYB>V7_+V;d6e295QcKy(1hD1G7)$MeanoieVN0$JKbbAC9ir4 z+UaVfXoO=q8?x>ipqLor3bqgDItoZuMeWJ7S4I)9I_@2cZI-e3t0wYVA; z-?fm851od+T9xn@q=~nQ93S3HZFv33dxJbg?!N|_BK4LtAEHM6qW%?<4ofI*jVf}1 zrJE@O`;UNgf4)0I^NxWzeEi)Ad=T@6n$13ahLXZOaz{^3?<$+Ub^g+z-ThvwI?7d= z=-SziBtdC*o$dNqkqxsQ-_cQ+QG3MOiIWhH)tjrI*Se{Hu-%;gxZ~~1+&3Is{MgEv zAn29NnES`IEv!KTcYn*cjbEhyG`y@VRQU)Bd;KJ}8^%gKIY`N5`vmfg2FjU$MKs82 zWFryrf7tFzt}b4hc3)Z=eY3}E=#n6Tb<0ilhDP>;eseMOxG%FhSwK^RIWXLX|)T5`8jhv~Yq% zcqZRSLZl<^Cx8)5p>W8$(X6hXiz9a`;|pPtT#Efa20E4G;V+3@!NgKy_kJpG&z;qZZStL#npObd#a+`1^NEYQpQvS zNIs(AX@J-S)vmfbTTgWoUD&se@`bP`-(=9G1p~}f0pGH1&pzX3d9}#FVnL8rxT6$B zj9{;QW&BYN^)V}PN|3-72*fE=Y|sYgQLFjEMaJnK+BF_@XW{}MBg~l;N7@B@Yj3T&!lFwBPX%OOG))CH zUgN~oA#OE1W)AGVb+$-B_abCI@`I5_56Afkz zO-u$=AU2PaoDc0DXSA@{o{uXL$`J2}Gl&+$26?77tzhmtHJ^ucU#7;y2NUDFAEmq6 zGBj)6p>_c1gA%B1{=u&&6E$sxoV&$%*-2)$Tjh5l}6HuuD0FlxAyGzB`AZ>c(+{LTu zBmwn!fu8sZ30jPg=DNCUZ2X8ESc`UMlPn0QLS2anxh!TMOqssLB^JpoBBk_qoNHvS zsAnq0C&cAudr-1?#R1r59M8ihD_YB9tdhT)lgmr1Wxw67^wXLPOv}hXZL#fnx5N>9 zZ(vOK$BEr>m1H-+<rxXh(HPVPGySsF&Nz5{qV1Y`h}85Oi2ar9R2h z>>TTq8aM{7%1iGWwCl1BLB^s_1drc~5pq?w>PzV~Pc*C{$GPh(75AFFD}6NDm2+jp z1IV8%C@Q#BOr%jzP(;fu4URlFcA(1!>6wWjg71}~hV)}Yl=G0lRX7RI)(l-9;<2g#ezti4BV zv9_EJB6t6`2k_aUo6I7?&5&)_WfbGkxwjUnE>qMTBEB8z&UgWMi1qPr*jz}4?>tsl z5=uH2{G38a(2fS;A|GURT$qdDP1BO2-{mnpD#_pYTn1aWtljQHp7S8>^kwN zCiAUAVE=iF&9AK$<$Iq1Oot3ljnxU&P& zc6mdN+!NNNj{D`geTISPNksq?BbRd)@=(n+hunP0jR+2F$wn&uB;F$0Fi1gseSMcB zcjqxAxW_!wJ7g8FOw%%as=uFwEUcaao2B=9Io3?Ej-)&NCb5k*adE>7R1U3B`xxLI z!@MVl=N|jADGwi2zP(_--YPT#GBnAvQsEkPdGGkp{iODmv)gA`J)1)1h;2&4fq$%i zu(v(Z6U=g-lErGV-$ZZkPRE>4J_-2(=JzTD6x{*oNw;^HRA03-j|JQ%iNhtu1+>bMw+=vgmb&z ze0t6$&3=OJ#@k}sjkl|Tvwr7E{maq1AC>)OGJa3!5xw?8Z_Y@doT~CcN;1xQgdhB{ z9JFca+qZn=4mR_Wg>cdQCOjiq_53#Yp*>V{*ZrPhP~AE-Fl|L0yfI*Y4SP^s{wl zz0t~R79afAO;R@bcjvRjH$#_e$#4vxp%CD@x$8sk*-opwKJtL(1*)RdF*VY;keJNQ zB)F#J4svN;hsBT3!|i4q(|WTlEU-7LA^Buv)xShu~Gzmw15-XuT8p0`6E0<+rg_l_uzy@)vTw zdE{e0p#$Hakttv90yqYov$V@ZubmmEUQFtpGT#*mv5we@E#bd;0;2jtN*|FmUW|thF5-<79hU7BEogip=zm zdaU8mqeml6Lx+4}rn7HGa<)>EdICU@?;2`UbRFM(+y~}?8DxUDB;RKyl%8(SGoJ2B z8=~Bm?IIRoGabHp{t>?8leSn`cnIb(hWgFKtk)XZlXxi4=8YanhZdDoh%!sc5pl{UuXG>&^+)+yGcPt}r7^KM zleq@750U-hu6igTZ~6O*XD-0UWOy>yd*jJ$KPRwSt}rnme^CpobVfMt%q-8+r8$qk ze>N-1EaB=&z>XimpKsv`FX zRLV~Y$E&qE&Qk zEcp+f^Jx}^m>VRyGR@nL8sN+XhWC(}=q1-pp~_Y+8*^Pc4bxAdL#S5$mWnpwD3o=7 z`Wovh1G{#oZx*hODJ{}+XC~Ul9cw~-vgE+ldhI2?L zPHg6&CEpZu@;|pPSX1kLil9pPW}p+*ajW8-B=u2>9^6XsOiakLA!r=t@`9u^5hR}hz36)M*B9__Gf*XU-hJ380#hQtsKu#{y zO!w}G&EbMcTJMec6bRC7x$R z@HVs0IDDNT_RJ#?paZxljCa+~iR3}^QSnvvKit(%i*sF=$|(E&JmJ-&4E_~*)e;^L z!W*<8bNbJx+Y1H5vylP|P1tj;ii13-5HJIqQ2R&)|)Q!O!2 zfMx4&7ElQJc*p__)k^oFsK9c6(2XFUkhvfjb^Gq3eQOoATtN^yWL=2J;laf9o5z6g z7*L%Q4yla+>txVha}V}N?M(=IbWOO^o+iiMbFy0?g*?Yv;n-({$5^`h!KB#%>Y~;E z;SmVS)Ia#syb_$~12u|(%Snc6y7);zhGTZ~kCM3C3?_pJ*v_CYNt<5dvAtJg7$?g~ zRCw_fDr3ZapFoe0@-oIo37d{Y^~x;V+EgT#bGWHjxu35D-EncfQ->Q67G+47ZGJBR3#Q$Rje9sY!^^x{V0|v z5d_AoP=%-`ERjPqN1z~(0?+ki8jUf3x!h#a%kjZMN`li1RJ5Yyy)_aW0gd}{Fv)I- zx~bk=ym_H;F;_9CamaqHak*!8f~F>v-Ix%TfAg1Dw)C}td7$bbT8qE9)}PfJKi{RD z0J85nGqQsT%7vCV4t=th>vbPglvP&)4-7XVzIRe4CF`xcLqnWmp3%tpmG|MH#_wn} z`J9T@nyzWpdVA#mV3WZiTv#U6`k-K{+<9e;mE zn9vk1XliOI55!B@_c=MV`wQS2tg9#SD>sAFjg(g}id#;=j6*`Wqpj}X>#Aoc=BiG- zuu>OCaTWub!5Hz+6PAt5uB+uHohr&CbeocFQ z+CLhG`v(WT`Xh5gTX#hJB@Ql-zwkRMOH9rr+{enW-(J0D`!UgEW+E`*Qv@lXexcIU zI<)3hIL4e!h9ymd%)=|6yk_qTIET@GUxPsY7fwtC>?JC+8Nw*44EKN9p22ZITk;@* zC5b%b#o<7rJxyfIA2inHi1kQX_MQNHEP6^ZMSPR(pg6Gn6ug4f>IU`q4cSlNQUVnEgs5BO=j6YFKjYsyTCdU63`SA&h5W7wx{r@WQZq;7k$Ujp$@(NmhWTWwfO}TzUi;4LjhY8HDz5TxMzX8!OuhAd z@wJVTRrz=5V8~)ln$ZNT8;>uTpPybSotXZ4+S-`@ltg&>YBL`)0k`GF@KMX9!Qu)K zA;_^i!}EE;lRv*y zGZBqCrF&WQpRUF?6DQRT0{%QuSy-n@b19Zi`YhzDH;E}RL>!GL-pivlx^9PjE{e5` zBiP=7%&aV3v#X9JHNIhU>9?PXQWG-f)ZRsL?=L#=^EC)Wp&=N(boad*ikKkwP_%=O+2Q*=gMBpdv4B0R%w|}|2 z`~lW9hSh;Qr&{Q@pZ-IUz1@L3G(IYGo$6=n__jF#(7dcg$~)F}2YY&z{&PA}7ZO-rqENM3&jol$qzPX!#U z_Bx0W%;x_NI)i5%C5135m{(YnL!gp4m-YE|tx+OK2AAdRZu&jUs5)Np|3&~nMH>oW zbybuZ{f>9&{e-&XIc1Wowwsh+j!X$(;kZG0R}9^q`lnQYgaMQypJIH0^i|h>9*JK7 z2bK)_Zlun(lq&{b`TT&me3ZD-?sakN{eYideKZa$^m5vp-u-r3=OChUe7$D-ieyP?UH;|ug%)gPS5hVNv)?%5Yp9M#tTXTPBBZc$ldo~jXiBP3N z(#bP)mQ&w_6eF7}$aZwYfy4v}U;aRMf<=iatVOTBPv5)9$@eX*`0q}Rtc`Pnh)G6R z&DRqzH;rNboH@8i{+)?A)j$2gaou`MjpoPyyYjfI42u{*IxvVU^pJkrC4|W>7flV#K%H8o^oV zg%4m<#-;Qmifpwg{R0J2(J)8G#{R>2o%+b%2sDQ0F5#f>Jj!_HFQLZ`9dxh~&Gdq0 z(B|_F5A;$pha5ngLHU>$FBae4a>X@%m8am{z~uA?>+LP`bgD>-H96#CpfOqX6399N z3-?3{Ru0;H%C1%q7Vr$@t`M_y8fmRmj*hbwgX1hpi|vl?3<%HEUX8guGVueKv>{*bJWY^Jntp zXp7$;o-^%{WNnO7ym6ch$K7Y|BpB4qzPirzGno3O=>j8&>kQy%g@`6%(fX*n`g-EM zk%hdW)YT_IlGOF?GWvdMdIsG^CUeG)_al1(l@DVWsnpya8K%Sk3g)AsJ1L;I^Q3(mv)J z5px;SGOBS3S=#EZ1eS;usGQ^%iLSqv6_IG0@+Ftv-jEl4@ea#k zqp)e_1F7SjnUFTBZKtuaAaCvImT2|*#ew^t9ZwQS4)+pgl(LPb;SN&UV;S>)2B@%0 zJDSdTX@>wIa(44rL8vX+g=Cc++C9LR3a=)vqz}$Sp&=hYA^;O~?l6a0RO7%ZO)_kf z=(l0%t7o7V<}t>u~4)Vv!lNGXXwIX^hz5~JgHM`CERe0ir2s2UZ!+mJhOl~WipC)?v zx)!O3E5&sh$<>H~{)zA)p6T4712%l~kM~A4gZe41C+g{W@bf?BqFM;`|EqM4^-aBD z#!s;4-`pVsvB%O4Z}ww{R$qptV73(Qo?9JPJ$yKY(d1c2?6?<0Boq2zRKL%dsKjpc z1ya2Q$qonOE827NIK-rw$EtQyn zz{J(%dd3}PQD6D-DLLgU2h4iX%FpWFEx|eoBsjOnuWh#qWi4i9JUiT4t}q$ooN~r# zPcTkah`|qK(UI6z0uc8wVeP1A`(UivGi6g=lStS%VsE60yTYmFkRO}jq5sAV-deL) zGF6=VeGJ|9k5~r9KOqh{?Tzd|d4SpY*D%Ra~bQEUbn|qRAT24UzUt zGD(=nn>;vwxoEANA#V^@R8*9?ns>%-2)R8$9ND;Z-||BSjUSckAE;c^^PnY%X2rnr zju-9~EGCt(Hr8#<`P#^R`fP(9W(t*ork#zsdOy>>5ih5Ujf&ZcsS3q4Y5DV+-pR%9 zGQj~tSZ6AUh-VuB>2$M+56i9m2bTwJ=Tep%+9h4J24^O){qs@WIPpM{op{NVa+2=D-It^9Cp3;ir5MyjB5(0mI@G%v9#8ev$VzY z$Eo;{*sJxqj!q{T)3$(YLW^NN1A7Pkmj-TsDpL_(y$3RU2U}y;m{Uzb8^8k#Yrl*x z4Qbz_j`79sJH|r@VRiC5f#U?L%^gWBqcR8wPMeE=@V%v77~Ap#w;&X=Lt6YPo!>?Y zUy1q{s?svgai{Ddwe`l%fftz$JRAp#QDY&PPGC24m z$%GYO+*52wlEL4RH!{5{%NO_5gqjKRxC;|os25)`NXsr(!vtvI8&UJ&JD<9O1 zvp>Sq+zsYY=(UWZz?_FP)Y zDt*aFeCT+;?FP?=o=h@3w0nr{<^|rnQMa`HGbUAvRD+(Q_(nmqjo(dRy}1{2r&h4l z?zP~MlPaatZc5C{b5<9xRX-J%zY7j!5Y%g2kAbR0;&ZgH-~Dsm{apv!U7z|PBe69L&Y)nQTf)2r_5^b!FiBRF#9;Ozoml8Go0vdsl8R}Bi-TL zymf_F^3uIlzTbc=>K7?a-13p;ezF>_mEEOXn@WzyiDJ^yzILW22Y2@l5EsLcej<}! zIFgzE8-!G0t8t_S$Sx#|G>YVcst3*(N4P`0rf^>266n-rXK8;@V%x5hO+Zlf#-cCx zvc+Jl+NFhc^KQ1)m(RGTE$7MlbQ7*zBIHz_04;B*IMk~FoN{&vIjLMs_n&) zBAc}jrK;99pG3K=i@FsM@eJ2d#ff&jUG*$zHej-Ka!g!tP;Vi&v$z|tYJXre7m;MA z*~pYH5OMWdnd#xsKsDb`z6gfR(om}9B#@f6DKZegaH(it3y!f=gia{|^K|8nweYbc%O+4~%yTxKTR)g^;SU|*P zTLTaJIw9<=PlTEm21o>XZc-tOdw)o5<`0}ZJb-$;TEsg#Ir9)>ol0RuV(;7Vqgl14 zCRdMG!6_eQ+8!T~rC6lvlfYQ)7R;iv!`AAfPdYH@Hc?NYPWn!LAxIMyV`V^rE$O4@ zP08TaAR}<92xp^5NAKASXRaJW#k>z<@qXQxx>T9g8g+&I*AY8X^QRD9>_kI!;C?*s7Cmz7h@j%{L*! z@gX`o`g}*GZ3iuIa$m+Y@tKQtrAmXMvfZm2yqon*x6fZjOt{E4LZ;YQEMwOiY*y0Yp#1C>add7w&_(8C+kuWR<9^+T!oYRo&;1 zLG%@#W*{uC!^wJ}to(RnI@2|d-NH{}q4T{sDeKYk^b4xGcgrd6U-7EuW~*kqNd;UB z>ovFP&TR96y(gDH>IO{yZ1ka>NSTy~&2oPd4L5mUJ@5HSW<%JPtM?vJmtu|#VSh%r z!1;hOfz6kl`S!(1HQk-U;xov%^gu6!wER3S{t@Uql`6~?+m*0GG%D=VGq;@kop-=t zHW1EN?$HG z&wBt3zIsRYx6VcW=5hQ9bdb~ESm+GK_`>r0LSJBjwulCIfT2lBdbL8#l<~;cK$FF; z7^QY+bgW$SDIFfS0BdfiueacGd~>m)-cSnJq=5Rm1;9jaR9UbWDivvJhGgnK_Z($# zFSOI+S|Ku_{q$lJHmhJ=r_0%;U@Jp^S+5#$f~Zml z1}{8o(?oAd?$n|i*v+TRmYao#vQp;Phv>dHN0esPbI+WVP|HP=OjTn{;WKHE6P0R} zgqtVq49SlMZ8xK{q|oZ6zp1btqFzuHxLBd&G9abC75({SS8|z22c`VZ<#3Zaj6cVq zM5~v*jB%;?1x&SSNSkNKK7?Yaxdn_8Np{|ST!^9VV!Tacmb!_ha@(xksmC|BG2SZm zFeB41Ri!io_9VlCq|!5V{ZUiY5^P)FbNfJ>S}q`7*AuQLTd17|c4-6i@J?5lM`(z=z0Y&pQgN<|(A&ef8rKTCXvE{aFL<9q;iv+t;9 z;%RUW8_SXRa&Pjwx=N#6mdZZQ45Lnc7wDpv&8r;d#RzvtwK5(NdN01Nr71|K+{!SZ z9n*D7uIKJ`$M)a^)WKq-axV8IAK z)|-*0uq39Mc7#wX$4$37UbvYi2rNnxD2)2#@pp9`GR2!)&0A7&oZ zlHq@|2zqzpU`|HQ{LP8?*?}7TERzURt?j(fbE|f>_~VgMG@*MEQZx@1+R^7PSt)7reE+?o@1~C^CggcGu=||i%x{M6(X5^=IeR#;Q5=Iya(`? zD@V(2J#G+ed_lr(6QYec_0mvfpOD&h?V@LYp9bK+-cUXmlNs zZT3t-Bz*2;LCGOnuz5t+smr?8rEb@aMuGhrx?f)IUxADRx?Z%bZItrm+R@a+~vm^<>*wjxhP(_0lF_VPU1KcB&pQ^Iv`* zX`Y1)OggM7Qr}gVN0cA&9}5<

-WrpltxB>%lC3o?B9lCcllD} zY$x+>e3xKL)e~)NT{y1V?}FOm&C3xzW~69wh>i18rN_Y$sNi9W8gP5@E=qQNjdR$T zww8fzw7NvguI%wRD~PMVdxg)qM*KMztcTzS@5UuE#xbtdnV?v#Vb%gI*GKK8IB`K) z|CiWD;R2@sf58tn=_cLohca8QC#2Lp+q+YX?y(|vH%s1wbv0>mk%jaoKqXj#hLZwA z&J>pUO^FatUP?z7yrv?oqL?m8`JO>MZCsGR*Er`49{edNBW$4`VReTRIZ|!+ z-5`RRxazH_No438k+5r zwv8TrWF=J4p+x=IW)1+1J<(euw;rf!$MPY}*}Q>--RXK}QeKKH(PsbdZl|*lGdKy$FV4_&Y3uq^>=7?LJxVYyudlyY<^G>KNDs z$mQqb&WB&~O1m~ZpT-&@yQ@S*bfyJ;@6f3$Xfk3{dQ_c0w znzn-U2?=<9zz~66wN8XiVd(7P%l$|6gvg90%UY7 zCMXGk`?Wh_L?xr!{aDWq)Q>{l%*?-m;ShyMb?M7*e zKI5Ud%bSUM14?9(^E!6>7A=MqwE)yZ)e^v)fW~^w@tca07&g69qGs%f_fx(6vG4(0 zLO6GX4|UFLmZk^e@`aoJ;H*|IN4q?+5E#1=5U?5d<@znkfAd0!*iZ;^z|uTQntquM zRmjqr+~o8m>f)x4?BV)r-hQ$pj^invQA*Fl48N45ArI$?ntTcdfe;lk=|Zc=FX-E9 zjJXY-+9}D}>4fUzRT&6;K2*0aDKr|sIck3tLoZ9o+l$XZ$9|7F9Id#!#L#skR0S;3 z=jCPA9Sc--_C0Y>V zSkyf~pnRH+^pdq3n9A-E!9LQ^an$ZugvUmFbXX&a2&4#X5ZoG?lJxS4=|t`~K6A=y zpuiN3BoLZG_kXzF(iqZMaU=acctFO>$8!s7Sj)m*FJREx(gE|Qr6E&w={tAP1{dSq z2+?c@{ig(pu=yW60Z=QN0suYxYO?zz!+K1H%j??SMc$KwKJ^wI|SW zx9Q2IU1B<@4W7aWqRK1x_$iBfXyu*6xy-)D=)31A^-V?5qMCBq*!5mJbbK=k7TIkU`x0TGEHB)Z-{Bv?0WV@g+bIIC_}ZZoPw{Dt zpen+K zy~JD*Gnz_i`h-FRt2D!_u*zpeKU<3(T*HXvi-bfQBh}5I{VtDIP9L>}R!9y#yUQ6!3uh>k!rS zS+tW#k9xEgM+&W#JW4oAkQm^!29u_waNgD9+<-fkIo^Bqh3e}#?$JBH#;5isNYE3Y zD~x*GYcV;zUg2<`-!zuLWZ>*+o|wJTcgf=qs4HLbPAYlOc%m+uK_+m(b5Xs(!-j8m zTwa-BL)K@A#jJ9U@YO?8jCuIxj-yzt)lQ#zIfj`kBvpFMw!;mcDTKhX8Loqwo2E9e z!;{AS*${_21oz%=t~6907NsU;4j6S6FRnc@UYJ!rHXxyIdN=wpnn6-_QFZUkLT1Ki z$ZtTw4qzdk$apx!HC@0MBQ!X@9aIMKopePxJzLpR%xle`JxHC(3cS$cD%cLL zVsKe1$ZgG%?cLO-HkO`g_bo*ZnBVzGL)uuXy~9b7j&NAA1k>zv)uaqQXBU7S2eOc&q9JB!LMhya9x*n!3tmDnPyKwGz3eGD2QL=U0Q zuLB0S1~bc~cn}gr&+91Qxf#qp>m70Z#~~hKcntI=x1g)&eOn8?Co{bmSRJ>{vKKQq zZrADx?MRNx60pDn@Hij{*-%#@`T&y9w&fTIWqxVo?X@pgNhx@oK6TXDYuKf7 zP}viN?Ja$2k#=G!EwvxBoIhI4Q&!R)@(%gf`yS4>rPZ}~#L0;fMsC@-aE-4Y-{s@n zBMCi@q<`!Ac_y`t9fK|hfyQlnAY=|kPnEihdJt8bHK@zhVovl-e4Ma2IZNOVxO~bS5`T;B? zz>@m#a_L2AqL?ONwtFzM$?^_F1@k#hUucz>1unL%IZDNVF9r%JMO?x^pmJxZAO1+* z^-U@zrI4ezAiAU?dd#{)OxI3VN%T0++%7Br#Y7W}!C4X~fsW_)vbHnqn53?!HpHUX zDF#-~2R{Ric`ol%jNFKcWhF#v2YUwrS)h*1dov{V7HyHA)u&8-PB0pE)MM`x1UYutAu_tig z1iVrF>1vzli66YZkG*}qT7Lf6`!PY{5ybpWMw>u0093d-xRLNMZ^A@#!}8y~B}(0N zS{6&Y30?!;Ax*H=uH768RInY~Ql6kz^~v2kAYt&F%PM)JEBW;8nTL8sIy1Rf%U8KO-f}8GO@L1h zGr=kIoZZcGj+ep*BdRKuApF|CNYQgMp0uDl6NCsj|z(~g`Sqpo{jHA^*Bb?RtW?&}%a#Wr7q6VfdjMeSSuocc)u z43uoaba0F$Y^Uz2jMd22!R=D!>N-m^KOB)0bQH6$35k1i`f#fsl& zKuI1L@(N8mdG=pu!rzcSDn*_xz|bv7Kt%w~rioF&y9X7K?G_}Ao5Qtgj&gRDl{jZ? zH>_GOu=5%SC>))l7zmMdHq(sJm8(5``Vh2?8!&+Kkq1UAm_E?jT(|3#KmV1IUO!tt zQCMtM;QH_=eW&`#BQh{}KnPN2+utl~M+d3P>*kh5J2y+sJecY1GzBrkPQZ=}Y{`_f z%mC&+Y~m}|;n5QA7S+ngBLvM- z161v2Y)UPYsfUvkC@D_|$TjvEiOeHs`c3(rn&fW2dDS5$_PR@mHp2J)LPKebIu7>o z{|@vwxZkpMY*R(2`A5_Oh8aU z@_P44&#aXyr#xqQv+VhJ5)zmuso_kkDW|5Y2FK9b4M7EfEe4}a$S9w8S%Qw}nD)Mo z_3@;~C7@wAs>rpNALb+{{ zceHXXGnIv$E~)n>aC*jjOpTf|_fUtO^Z`)XZ7*FnAHkYN1U3tR+BN8Nl4z+e&u)KP%Y%|@rb)M?h0n9T6r@UvT= z`+Do>#{o?ZbNG($X^~Nq&#ViS-sb1iRWmC!3@q=?q@Yhkn6KrU_)&7ahZBe44O9uM z4-S$>3mBjZO~>j9x_g7)h5n(t80~3iqARnkcOu}Oma9C(?*O3B0GH>QJVIJHJscb= zMmqheXBfOBZk-P__2}XVaHccy57yjVM6HME-%VvrYU}&<4^00+H7Bf1m$L$4oEm-X zh;Y92AE-uzWEFoflBvBhOZ&bHgL&8QjXX>VlmK}hF%tD^WaHaQaeM$L?~^4=l`!(` zB^kRMRmsCsqGN3}hn0z%A5^jx*?l=F-LvU6=Vk!H;(*Hs6=rl%MB$aH;%k*R9&PBq zavbtGVrmv<8K8UCEBr;FMQ^uEVh&*?`E{Myc(EKG(aeF&7>eYLAL_q|r395s$J?wA z)8^W;!o=-7t^3KBi3lCw@8T2MkA|`ab(Pb;WFgCB?l7#eP~)}reaPtzk=3fCL*Qo6 zMw{p0vMG=ntEw~~mJsDwfGz}@mrA>ciq<&e-Z?8@0aw@7Epkmv?W{0wAG8BHVtl?G zd5slNIg14G067)oR;hFZ?Vz)BjV7ZhP@?E z^b7LOBVLEjBk{lvc7%5;bnsSPc+Xu$>!Vb`yU__2BKFN6SJ*6h88o_yF8KKmNK4;M z$w}zw11%~u1WC2)7$0blFQ1)0zKUPVb7l+- z*ngOy;=R;9rS-18Gbjw({$!QGx)&7|HXI`cZ{9(Jgdt{q`>p_-*~5SfW^I~psC-1G4~8MrSfFT<{Fb1^byk$3 zSXoa5Qp(vB#m^o!lqR@J61~jFckd7&3{15EbB(xc#3n#fWDryzCTDrj9a!6iCDm}- zzE))-z~p`gox`3ovOwGF`@5M2rVgq-3XjR`$)~cdP6AItAgg&+asa> zNF>Nnz_<%a`BnxO*e2>4%6av@w!lC~aR3`&1c~3ZqyP_UEfgW5`FI>4?*a2h0D0k} z;q;P?^vUhmJI1gzn@Okls_yhRsts|VL7S0yYM#{@&jX`Ta@?(rlz>;>Cp%3Z@O0?A zF$qr_G-CV8}9O7CQ-^2aKxu5^mENRw8eit3Vm z%FU@z_{u(%$~79bOmF&b9od$q>x#_)2FePz5vtlkCC0WgcWLs%CHn?=m)a<;(>pk8#PsYf1rZv*lO zkkv=2^gRyRz7bt7T9iJ#a;S`F@Zj~``3M=yQPam+%`5DdU5D4ly3G(O+a;#8KO(KVeXsa1qR}(ezX-An&Jnohw~n)ikZw4TkSrHY$7eR zF$(L72+|9E$y4SLK>HGFB2tTuwOBeQvgOqy-d+k*w|sN!LWyRADJl8+i5*&CW`ol2b3pX#!c2Tqdff4|zh>lsYbG@Zw_3o!OAwYV6Gj0`xBx2Nb2lj9WI;5QU zj!UzTmnR6w*0%7uW()_|fmZO%^7-Ihf~TLlZB$R~L+VEG$y6a19S$Zrx}yvsSTJAd^p*O6G2HyN+DlZ7L~Lbo2O9+ z8bte}zC+nadTpVNO{;1JM51dT0z&R$*P0U~h%z-P47uvjrzh`4eMm~U%wB+q&Vuz) z;vj`4o@bhwz)2N^SQm`!JQF|CY?!Bob$vMHVptj?i#dI#eUffhz=7~P@Ilj9Kwg;e zo!poSthufa8(_NzEk`6CC&Xg_2@#XPbafDS_3_r-GI8}*a_py|qpH@dx&U%ntffvxr%PLzfmzZDb-Qro*pOms z2FU!l7&=3l`U54xC_CCliaX-#c0~t7vgqhTddCY}!+X{T7gK!P zJB&J^>~b*7kLU;7NKvTXRWSRMlZllx1Q}*mKzmVOHM-l~4#`94)Yw4w zUP7;+c{`jIcSYfV^jt{55LjjCfy)hB_{JYF%#7qTA$Mmm;N`1p=>CEA>rIPVGp4vo z1=L?e;DyLRtU2KWhq(9JMQ2|S0F^5}^ws{DA=4zwqx_Nnx>~1ca_kS`=!l6i^*Mh+%nI$Ol$ZH-9l*BMqbAT zdyvs!t}AISJh6=5;BOXgT!x*iI|3x5^yzCJz;>IFB(4632L* z)mrbXx}5#vYZuPRL*|tr9>lLT3Z|^!A zd}2|Jp}qcMJ4=6~V%4PFa#=9)G(35Yg^O*y@R<2zyAMM(AAazz!Lz}Kk-Xu)u%Y;K zwiA=iiR*s7wQ~%QXm%+7kmDakR0w*tJg*qQd<$bZE|B1=%m%No=?g~d%gA2-FD0Tr zB4_>M<3QSg)bh{;whW5c^9*HY|30(HvAA=bG+)4#M3Gka(Hk<=xa;Rf~=ZJ-=;0*-9qi~bal?kazG8sn(y%2i2OQscceni_Yl>7TZd?j z*s!G~H$krE#P?=UQ2FYhv#Lum-rxnXN_3lb=X<5fA^>4}N(5rLr#)1RrMMZW=4nl<|86>FW0UG#?mWD?JDZ^NzS#kdaxBUMM)psHKx3>V>ujSE04 za#oFb$%K(NL!9S9)W;8T(T_~#@4XpkB+{;~x9{nafA}oa&ESyKd3$6d44B)>C9dOf z64vJT7%)_As*%F=Z$)!VW7G-}T4eMECL!1F5k}|`c=qBfzJ}jP`B2?$DA&qVk*(x$ zJbZ6H?`$*=#`jc(-n~l@UgvU3A4zci2Qd8qQv@LBD8)3zcllxY&ZAWe7I-Xa#Q(&qHS$h zH0B$9!6$B0BwGQek$E-uyNsn_UZ64dh=z9lrK?4xpDF_m4tR9TLy}>Cf3)%8h>D-^ z!*!if@vp2yqdn?zMXD~sJO1;-jOt5V3lui9$kY^zzUs`0fwicDu?}Xj!CT^ zP75rY&xB=SWDVsyrKD0aiyx9p53rPIe;tyuMoRK-bN!aeKHt3%XNA>vxz{Tm@PBzZ ze+{~$Rg*Mhl4qS`Th71(CTf6RG8jma zP_zV#QAS2qH2U4U6@mhNG z~Ae6{gu zzkMlc$+fD>sLA^}0baKenD?39!3u4pq`Pt|{TeX6Y-lhCRW`crhgQ-9__)cl>}Wb* zKupTHgH=|m#@*&Ca2jIDrpMvD9AL(2_psR6J12peM|-@k$UQRX)tff|W%tKju5&Tb zxnZY5O^Jo9+-(1kudj}Z^6l1EL@A|1M7mqLQ@Xnclx`%4A*DN|yPKgqr5h=cmhSHU z9`yIF^PY3w^Q|?1j5=!xbMJfaYhU}i_LJ>=|I78{z)w#7%GCeW1a_P8Y}a;7ulz92 zp7R1}aa=!U)r|l8nafRo8kkHMX1iYQKy#AB9&e{4tNWbGdbWJ}%e1U06d74^^mj(e zhw0K0{J`FYHp|SR0!JGXbm}+5>Mw21ZXw-kGYcF<`NM23vpU(w%waKbPVk0Ff(&jr zH6vh#6h>fzGof~GukNxo0}ihrAN9Nas*Y#L?@U(M+SUW_;O-`yT-pk1xzA(dGOg;~ zl>{%PmhKYiBKrmi{(zr1-N1t;v5~HK|6KYm@T)~ntaq=?xB0-rgGGMou-S4^zA~{+ zZ$zzLr=Fd8VKeuJYfK;aAu))z+^S{=Z~;bGz-)6;_B&-u<(H5 zSC9ztIhNZOZY;m{e^*Qpz4N*<28l`s*?UoBFy^MYK#KRc-R1Z{>jpBW{ESp^cZePX zgZF}{^AuIpXzJ>Q9M_NUmJJxRv-~#~`Dh|aLG<0CPJYvH@r-f+O)Don8P9SvZAB3c zkmO!>6p`UU>50y`NkP0PP3whE)FuOrsMxczpMlfc`o6Tba$550i4y;1`}-4Mf|(ef zUYGnRd$xlDw3A1DJ4Q;%2R5rd61fLEyEqH;pC-J(9}h)H{X%*dRg~!;Z(eYO*xLv2 zSA94-U!^qqWJs2_=H^$*vVx5_)bpkh(kT=0Oxzg;sI4N8YOH#5LVFxGTI6jX93LO% zZo^D{+rWvs^Bk=WJj6m#$)S;b>IZGo1HlX5wwIW^8S@(%2VHvoF|ownlj{maIi8vf z=G$nXg_Agxz+lr9jSK9x$+A9rRvd`U-5P^3t~;7~SI`$FrXB5Wy&F!RM}QBY1GlWE z3$w$w51X?0fUfI%L2`hGaG&Wks2Nt&Zkhl0)GMt;!>+EP;X`B-PrE&RbzyIk#J#2}NBo@KMp6*` z8shW&mepp;oFYr{p%$awewNxbAcAV2*}9-#r^XN8O0u7ZV_0p~eW@)AlevTxy2*uh z@nAqi2JnD^ph5yHH!rp^j{590AtnW`c`DWOvfWLceC=mMV`o86bP-T4C9nVj;nF58 z@|V`y{f=@h+xEZGLgZ%{_+Z1g_33*Ii6H0bRMbsM!mpSH%Um?0Cy78Y3eg>BX1`0~ zIF=WiyOqY3)*><*G3@2N)g=i3hm`q~2qWoXLcNSac&~wx*EY$IPdyn2rl~kL%HMr$ zT5CZnSERUg9+JDjjMACAv7uujM0+jqML_4UB;gXbO)anO=)Mw27aOc~V|}q&93Nuc zT7F!*aMHN|w<1S61`nuLq8PhN*N}B_~g6PWcBxK`Z)2cm?9%*j_6hTXfRt>mLZUcM@E|_$tRTXuy?J# ze&pBHYeZ_eYL=!3A}3^-&VSgx+^|{vMeN?$?=HZ{(gE-F47G0q%#`eWa?X!B*~*hV z4FmZIXeng|1pSa|Z2F(g0`)4$g0vKi=uZdpoCH;ib(gAadPPedwfS$I@CwB^I;ff# zKEP#si~A~0P%`)SW_xP~$~|sRXvs{`w9T@Mfd*Q2qrO0|u2q$AaUk zTYs5iURZDDW0o{-G;Dytvw=xmWTHkNK8ehb{gkVhE%z&kabN?UpS~+)jvyOYRFG(kjgjL9r68hw-Fm4I& zPsnlKF(i>$7>^!tV^x;2>60GA2bX-v>XXYi3hy;K z;SS2zspuGi7RL&>{3d|h1&jucAoS(j_MM2@oz5qobf;TM?#z7nW<BqeP?~tk85e6T0aD1~ekhS}q(_1hFwi%jk zpL(!MH)4nY$g%%sePnb&1u{51^7vp@Kt7Yt07AH5S8AX_0z$^i5)FQ*Yt-_XL=Pxr zj|f)_Sergb8v|`E?xn0UP$~F<%Dn9?&!=kSdA27#zUWAu>7azxe~2t+lProVCxqst zDl3X~CEsbIeJwxc+2^((J0|d4_?4Gn-phPDA=a%Ir<;8NfXKOVp1}Ul^CFcLx>gli zR!qeIy5S3ms+T3+)h*^HkAx$n!R^K$tM*$jgOeyGCdBZZxsb`WW1)uwOxaw{%A0z$ zDzlD}M)?Oiqm#G4)yP^41oEa~bbBOP=36{Mu#u*c3K_Zags;Yd|IF}7S!j8WUdw}8 zNk-b{bix%Nq~{wgtZ>xwp|eV%fE(2decLmN&_Y>U-YIKuEcUwfQYPU2?jAdy&{<#i zB7Y$beJpMbTi)yQN~Z(5F(&kUWyPr0<>i>5G|mZiPgbU=r{<6F@x%z101ruY|6rHH zz+-{AZMhNNu)=H*kb3iWi#4A-CFj_@gQ$5~u^bYDI=j61jJ_G?=npWIi4Afk!l~?s z7xK5xX+<=&QQK4BjFNE6R7Z57&`PC;qlc!oDuFA%`ZhCNCg|KN{{@l{F1T@cUT+2D z1}~=I4r52}*EGj0Uch-}p!LNbX4#$I@Kbc0Dx};es<-;t5%zVr&K2`9+v1Z&!`Tv{ zlED;SSwr|HoSFJ!TgkB;f8KP@m6@hkZj@g4t|}?2wM~ohu9EDCZCtC$-Iog#82Q+d?tEtC+ z*_%ECMZQWC{y0qhAik{asn!2&h zp?sRFD1aiFu>fFi0vMB-3nT)kIg{0|Sw~Hwg?U;VlT!ndq>X5UvF{X<$itMazB?%X z_Ojo!s?|u70a2=PwO-FU%|Hg)wt_s=vE|cgBHj@L!$C_gw4Mf|Mavmu*iUzjOOMNp zYez2+EtJxcRL^q9;gg06ll5T@@&-X-SN2ang6q6l^XpYO0Xq4&icuaYUg+wuuTlGm zVH%D=D)04X&b*ti7q>&dQJ>ukLeCdw8j+bjmZOManPq-Iy6z7LlKkAJl43ZcpJ`4v zWI1%6W0{y1YhEYi{cmCLZxI2!#UyxcxP@3YT*&n2Lu5kjhh3v&l|(d8*(4P02f|VR zL-#D!=2EgKLVClxf1o}IpcAPC##MI?heD8@b&j<*{>vga*SWcqK_Bt8f%Xi_5JZ;c zI~4YL>Nd!P)aa>4Xrs@>1xCKXgV_wX3pAnF76Ts1bz`x6kd(L|quUa=+NhuZ$D*&>tw z%2W>Q8HE8N2n54zoC^b+&!~qGH14sSpS%ty#@V$k6%w+Zi6xz~k?;W62d)JNb+qd} zXK%w(?BG@13LINae~Bz)ZNE9$8ZHKn1&E2Q4;n{Z5!!fg6(t0U@s7Dmwl=O~UF-2& z)Q>ANAzCV2irTcEg>0@{jJ9pyo-eCP^X&({RTa*& zan|5=z5OW6L=$-WFYm$%+t1p+&V^u+N)lvP7*V2$rEbfP{>xiIh1&y5U5vFifKekr zkocT_n^>agUtiC^K+djtLM*2@-SaE4&KB@U9i=hi!=vHN<}>Eks&Uux3c zJ(+3HswUPiV4bE1C!dmnd5zjidZKx;DLLKQ0%mr_vW^G3!t+$aR5_LNRTb8)Gowow zMw5A(D*Fn=Q<-EPa+;B~bvm#7(TCqSD~y-Bg*jOxNwdUN@U#T+rEaU#*;wpYM(3^| z4amt;%LcF|#6T8ts{cj{#DFf$jrN&M*e4+$w3}v=V;F5?>38`vcOJZ8M)N4(me1WQO(+~Z1!;ykRfF#31BwM5f*Zav+wR?M6L z^l6oTI>x>%aia+ga3i4d8I1FiR37D4hb0gLf6}5cDaqX?V+-1GqHMRj>MxGF2KhO0 zKY4vVdDFR~E0OwJpqX0ND``@s2ncE{_|+fbUfnye(Q}j|+{+`MW;>IG%WRt}GAOWU z&#hJr(>uZhswqscYaX5TI90bzJjDA>*z)S}Eb|;{q5D!+rF_#4_PKf6H7YWU5OHMb zp+5Wd+Mqj8fjgLeE)AF~c@3dbb+wYS81@6kqkrQe{>NL?*FZ-Co1IEU{wECpj8ul~ z-OzJGOn`8q`UF!t@oo)l(>C+1y;xj6Fjz!qhwzBk1~))Vd>w%7>(?K9|4?r{TAP~r z6#Rs3yX@D~k5?1bApk^tX^}tTI^-QS>UrgZVN{OnpnqG7kxyr{r3_Ye2jFHh{R~Jo z*}}qddmCCriQlv|3)6fkxd|<5AcMx5YD1V#4JQ0B8;i}aMv9h}YUKHieZM#aES;d= z8r0Pk?|>KQ*C`<#f<&vZ%*32nx`jHZZ*GETrf!E}y9zsYqxx)XPh@jS5?dM2sYz}B zl1+}VZ(m5`ODh2o(k`MMnG}8*6El%*z425161okbNW^yNgAdx{njpr3UI0pL)*mm# z95~Q~qH0d}^f%P}J&$)T0Cz-5{SvFKO(F*mS&9D4*(uPY;4uZncgiB*ivtg2zkUD+ zC7FjYoi?<3-e0zg9lfK#%qBNCQ!d=eNj!vx12+>jf1;uRfS>*QNE>n~T1-{_q0^Xy z8D>?c*0~6q4legyt3W7pDH8~M_SE;~N%1Dh72`E2Lc41IHMdC_ zgjy!j*_G8Q{6@Ym4>w`>fZ-EP{Fq(s5- z7f${!7XL5Ev_>ztF^New%2D|zMWhg+nyqYx91?(IDDq@If3Sk0>CRGkhaKDFN$31LzzKblEe9^W$&^cSka_9S$@`r)s zKfmGl-b{UJ@jJ2hmj%_S2x;Kt+qfUnO5uMVG_f0C^Bz~L(uA1o=Uu@Gc@%aA{KRi)5wm{?gkAV3A!+9=Hnlm+6x@%P{|L34E zX&&CeMcO_pekV7O*uz14hyvtQJLX2iCAjR#grj^R*!AA2jSiZ+&X);;2h$&T*(n4T zTz_D>E!=S_scS9+Bz_=}{IeI}^n-&mAT91UC61iOxC|edKRdkV?Cl%T-Hx*7Q#abA zZi1IcMC;X9C&ork(*Yz+nzNjI^M+meF781$T2ai7sT$Y0JEt*d3 z-DNNnWjsCa*s@*YtMkYO(ES9V=zSMJr>W!ZMhLI9lsfE0@np0;g+*+z>F2ew1H65I zqWV7)^lMK81iNBBsyBb{HI-L}+})633MLZJ_u0b~18u(OM1G_B^+Mf6Ux-*Jsp4gw zHaK{iA>)aq3&C5sCQph;cW(6gHlfMm=s#BqQth+?%JDLL*{W$%k$*rP(EZgx@bm4} zr*yEVPv+EghoWmVo2a=bDx6duXO98+c#PH;XR6?qOUC;52PM(~t{;+`tD+Y0YdnV$ zI|4KTc$tCDDay{0tF7mY+d*V$m#BAFrgPCjryj@-cc%Nzn$QfqrFl}C>V(kSZZELW zCufZtfB%mLR-SF{@ovW^+5qtQ)X^^nT)Zd0-?Yu%QR)R$*;*)So#kD24`ULmq;g}q+~Ee7b?tH#j(j==RxbH61*y!6TrYO}a(|cON*^=%qNIQBi9=(tPmmtr zNVWbo2d*Moelgoh^9|2SI4@!1NF<=}RwA#s6PZ5c+4!eCyN613)=JJZ*^l-A%d?Sq zu&9{*VNMyCf38*JDy%p6F-vcS5-;#w$RoI&__M1IBkOI&93K2yt?ae9IxMqfK_U>- z89UJZwNjZofjasJWRJuJraJLjVe!;leT0Q@bYEFWzXl}#g6yI`%zE1Yiiw=`X9l0P zO=OS((%cDQ{wH&<>emaXG=nzT$3KpSK7Qm}7{W3F9wxN)ZqlbAA^Y;Q4?x{Xe^kvK z#vVTX9@n6xreleeMM6}D-i~7?26iE!|hRooGK&>D_dnW_c63jyjOhsXX;o z*W>QshJj+yn{YH*#qZJ1^MK1acWIJso<5z4Z+@3scbrq!d|=SZNm}8FTu*6r=yp2T zXkfAO$Tu;n(r!20>}MJIgm$0UC`W=#6ofIG*1f@v0tSqrb~kqN_0*-kHh9NzPg);H zNum`_nC;8Go@J5Z!&t!)o z(F+({U2k%f+F_XhR4u4h;uRKIVPQFv`c~HF$l^l~)q?wdzPV{L-Aybgt3Z%ALt;`V$~@SNUKx72PbkPrq;+&vTxo*&ZSxL+x6?BvpCI0&5p8$PoBy z{XHh)KY}e_+l;}dzuRU2xp0w`^!l_qL+s;a)U8NwWXHnDHHMZ&=K&1Nu8M9TLXl z@O75?)o`^}a$c*zAScJuxr^HhR(C|^y*8X28u}pAs@hE~d;N_&LF*4{KA(^5+N|<< z&~K(jftM+WqMp~SO*xK9+C#>-K@-Om@eWrQpm-fKnc`YcPtfxZPb(GHW`X=K;p0RI zFAU`W3$myD?+Ux6UD=zK^6!M}zlHVRx4;rBGIc~@u73(dgA`F(P7W(l{%|larHBB^5D}Aa3P37A zH1LuEu%3XOM%7y|4OngEQ*q2RsuhQYBMp%Zl1bH}mSvmD()X1E?~EH=f7{e`Od`F( z12!nUsY!AdT#UghRxxvivx!!4Ni1T22o)Nd$MwO;mhLhn)HGx@%x=SnIrFma4|6YLv@<3xt!^gK3(KF6Gz&+Wu3~Pt;MPm|8a)=dKY_dt193 zUn`Z1wq(nVZ*#XeG8D1zcz(_kLZaN!Di8hkmIzf4Ch>~FqpP`Cm&HPC+`*%H6Tjit zT)YGKF&S$0tsukYf>0~+N`o;xl9vpI24At{nQmI3YcKroZc@uFjtGa#zrDXr+g<6G z24^b`qt(Bnx&Mq15WhY1Ch?m&n0)sqL%N~EcXY|8s}jF``tCzfH2_Uk>}clc0~q2c z8vf~rC@w${f?}eq^LaSkA3E!LTa@z;irz6$Wvx#Fhfa8mjd@&Eyi=B#>e!hY90{mS z`_2HU;7y{{Y8hlfKC8AYqYQapy@08#2_E+bQsG*#sY2t9IIz3o&uPn`1I+H9r4qpG zx&9uH)ke$YvqSvQQR* z$qDx{O`qrWRtqyf|AC`<#|C*hi|x4W(UR^W%}23N@(F0ul6yW>@hIugDHTRKg3(&gF5FL5NHDq}P)gxDV_1HXiMyv)WA-PMl zA5zW?@j(5Vy;$6>y0rN_p4hnmqm}`NBSD3nquG>qz=x>+s2hP{!Z&fk@MEEX9=rPcl6q#UYBIP#Hb4%SYVm1>T+@5$}~oE+f&>1mH9mv95pk}2$n-hON_ z@U886EQv0a8)nk(#eEb9u@9DC|5l982n^N(6d8k>RbM|OS-@kAmv@}MeytUtd4D(V z&qE$Z9eGyAD^7S=FF}4c9QqL_F8aZw#nFrfIJ<^JIaO(+Rc;B@8A&*p z2C<9+stOyG)-(E`DSo!ngyH70k9vyAR5G^xN9b)(=j z%k|x+ub-dWA2$NL31H-YIw}~TGs^wz-1Q=PQbT98^*;iLp;d%YZ8U{gRt%nO{;B&x zcG7vxF%ur~Rx#uPYMr>GAn;6C+9 z!bDM6?dCl68(9mXEetbHL7QyaW7j78V;Qtree91vR+mSObXO@;HWl?Ox^tj!%G9am z*pV4rqJRIaK4ln)&H;Kl39Q2A2BLQk+Jh+)-64Ze9x{RZ4eKS_%Gk0)82^_;XO3ub zZ2z3X^QXb+GiFTy0GSbIc;g$O`4UJ4hvZaNivS44NXp31x=G4`vK8=keQrANQBHz;!>*T;L@``E9{l0V~MK5Zs) zjJ;s`o(hnrgZTS9>{ROHc)SIqUhf$B}r>Orjb9#S_r`_QJaiV=Yx96C{cQ=ZEmysPRbs? z{YbF>ZHmqxiAc@NWYu5ahto{wo7<68luQ3`)!$Epil;qAQ2V&dIe0z5`JboajFhcm z%Y0wB`t@lFu>7So96)jOK#y)iD`syMcK49FyC$FRI%Ryh+VG`D9`HtyRPCBmdNZbv zaoU!^S!|9}U#os0NqQETc6+q)62n#{Qe>{~(mg5|izS(2L=%oj9)c93u~|B5sY9u@ zxSYH6t^C@)e7RIP4HASH8Ov(5z*YY^$< zW<>fMU?#p)Q!Y9nwYPk}Hci;-Ng~q>heD_K9;DD^5M|}AdI10ARP2xUJoav$u>G91 z<9R&5uE}rEC<6{GDd-Cf58=BVi3%$BKvBuy&`~gyT&<3L+w<#|_Jd6l%V6B(*oZZ- zZ|75$pQ}?EuK@<8;^#1|VcE+4u0n2~%r<~#sl_!esW-rgGR3$yAD#^KN_9vRk;dOx7pD|HGy@vEL}dYTs9`#Xdn`^_ zJv#9RSi7(7?R^EWliFmsZsKI3IfKdB*z1UDZ4O%sa zJmb-j8wQ&x9){{yr@VX9UMDh=aRJg4$jA0TjA0fMQLwom#m|No&n3b@`A9RuxzP?+D((0$T@lqENAHi4jVOpTgt~(cdtsi$usVP=CLGRy((6 zIP?2gP)W}oMInz)-uQ5e(yA1+YI=dA38uxSujh%wdFOfw0`wYU+K^aMP3_7iYKmlm zW8z2~SS9**lj{{4jJn3|outQNQI_j?V4a1g@^_pCp0W<1`mX=Qz6Y0+20T+k?nb7;Ip?5qa6jIJeSuQh?eI*o|di zi$8>P;>cORuY&-c8?aMSHf6#G|1h=oI+Xw`7MG)nxnrX6nHGd3mB&Iv>1m=euzAoZ`;!N1h2=ALp{$68zd4*YahNj!Adhad4iv9;boc?F z<^8(afO7U4xiMHvQ){QP+u%qm1z+|TRs8LN^NuVaz9*^&nqcoar{fr2oSSCn0qMVj z{Qm)v0pF1Y|3%mmyTz5~9NiWQ{hl}{*&My_17PSR{&492IeXMlNe5O44a)bFS*R%h zh!E^$Ox=(nb2o%ePzkt0PQE_jvj*pyQF5wr4P|;tW!@QAV=Fsv5%hk5FK8N{+FG96oj8rW zRnyfT)tmU&S3B#qr|ab8CyynMk#`R$9Q{8E9zpY$DF=~Eq|dTosN%HEeJNCi*8DYx zm2^}L6F;W&$cf=RQyg10NL?5=7m=i^b1_!l`tmbIYnrX%bWM|1V{+6IM5%~`wVrd} zak|N96Kf_Wxb~{ok!|A3)rmW}ALH?C5cQU-Kj%CqKC-=OlQEA?9z8%X4*_<$DZz|l z`MTSlP@#f0UwOIEPzMl^(7Kd~^Q&d(95j_OKaKj<1OS5R&Sq6->+~d@ne9Jt-$aD) z1(AWcjJI*&i6GCp6co1XC;BrDEddor^SqDap|wDxwPhDB-IW^Ov3j&zQXDxa5e)0l zp$0PYn^ddUOWOn)6{5msp_-Z(1UsvViUM2h2#DLpov_Nf7R=F`4Uo%-2b zDM7HjY)!HewlgqcNX!|~ zJE*G`yA@u^sB7nDSuUG7%X%2APkj-7zlr!<`iuD&LzR{L5EI1&RXwaXE)09}_oL1B z8c)^oF5g5i+Vi^2gJe!VrP5p84GNfVJ&%Lna>wXAI&#wmxywS96|?UTzUcC_b9;-N z@92x>pAPimz5+Hh0G2mW0I+1%aGnoyz~HAIFeSJZ8#L~)00RzjURWj-g^q3Z1d|Ad z23KtQNW_RGXi_>qTw$bGy+wi!`RUGXhb#i2r}MFQbA~X7iR$Qpa32W(uJeM zW2Wm`H0*09Zu5gQP z$EP++zo=i3l0w~(rrLcog{>evsb#R?NlV^!j0qOSi@>g`>uTI@M+L%rIuSuBl#Ps( zQAIpO4|y`@a;bYoH0V~BATaG!RZe#4-TIzDOne4Bz|3xn_jQm1A`SNg=$zsGxF0)e zTst5py3Q>FRa-?Juf6snV98r`h|TTaC)y>KK)3oHz~XuLbQ*|$09Tu6Vb;^e+}jvw z=K^eHgRG2c=a|uuxCT|bYT8!NCMhRCa|$F5m^$#}F4H})qnjZs)f4Zh$_z9;kNYEL zI|}Oe{!x-zIi2eOYd>FXGV}!-Hz^zf<*QGxxp5eapP&;EdoVYRv`E~nXYU`AVK9{- z*51w7m-TzzUcFFK)8D*{S`S&XBCWsO8N)zZ<&iHa=#Bnh8w4nSJ_?=TJJrwGZ={5V zcY8>ec$1_ze(n}gaX;j1%Umq4&A)`Am2I(+kXPuimwnSP3UPc;di=_J-BGsI=60s5 z{zLwq*ns)r%qi4n;G-Lg3S z*;rwS06frS!1S?3mgC$$XXy&A$FP8BgEt03L#OW>b---%8_1~Q>9-O6aoxc->Wt+j zZh0|ZTep6SGU!4__RJpuJ;bsj4Z|E|sA-V>a$amZSGP4jX4fZmXlN}m*+7bT12yG7O0VCUtm%-aD!H2{nE zF?hv?ep<9ysO(V-WM?t651-jT%R%#y_M7{J9mC_Pw;P^WMsV7O`rU@B1V5$K4BKrI z&$i`@$^PWE6)DW-v?eeCW%QQlh86(=35sA;Uy$kqF*J6kHtAWCw~sS01QLoPd+yat zk}>#QiT)@?{VR+vZW$9TNFy<{*eSHkRI9}&3LGg*!MFn8P*ivtfJZ~O;p%BaHxVD2 zME~KplKU(Gwdlt1ZPukIbw{?-`3z1SG+BVBdQ8|kVNmSr=eZ!|R$tnV zReZKXIuu8pY6OF=>j(zDJ!-0~&Dz>@2ZZ3QS8oKCJSAqE7|bGl=RaCW>l_0bBN>l; zoePmSz}4=rtKEFr z>S^#Y=%!GZ{zYf!2H)j5U?wxJ3c_O9TcPKXyOw60o0gm$xZ!L#oz$Fh_zi-z>x3-J zwNXYaL6Yz04j+dsPnI83J)PeLz2ALXf;h^#U}3xmV42|GQ^Y--F5 z$;Z>~&h<{n)5puD1Vw_Ww9}=#L`76`?uDnntdQh32dpGdXlksrheVY9h&SYWCqtW7w|*Yfz_K-G zQ+jD3H1{bJ8(AzbfBc?>qDgmUY*(OeKhsGU^A!>bO!8%F?h8uq!|eea?l4)ayh;2^vqX5FF$6s!i<3gnm4&tZ>H<2ci6=P<3}UP6;knqU8w0yFD1i11)bfw zV_P9j^7UDx$Q>2!%-llT()F9lV%zp@H|us}$5K%aiJrlHgiWD)$2uwX{#v+QLP5kg z;jRMx;sMHrcJA%<4F2mg_}6IdZ-1d86^nb_?3>l#OQ66A3vN&o(dge-5v|lwBQha? z5yIYih0JMAs#&Z|W|Zj}qLj|f^~&wQiqdG1Iq5%0a1ZE{YH9b5VDR(1a#O#&T+uh! zuHucjt(jgt@2CDwk3qAo=Ht=gxRxWoZZ=o)y3OIJzyjG-t>4tBdN8qD-9}SN&*Pe2 znGS`6RY~0j*i7qLUMWX)k$LbEz|*39AFaTj zUEYrVRYCRBbrSUgwP%R&?7ITi)Ty($>kT$@9744of9F8c0}YInV>) zOvuw-w4FJp3JeMHb0u zi=q`d(zt)1AQ85QdDDt6QAwEiTUfz%0hkp&dFe$4z{M7l*oB2zb z#-AM zuLwPZ6&7nJXXxl4X|QLewg{d@2+vc@eNj=}y7MpM>_`k;XO*3S9?lYUH~I;m}mE)9{70+HcbPoI#Q$(Yh4w?2tMhVO~zQ+8t4g`84j~pPLs&J90Mh>Z)MK$pI@6 z!nOUj8QnE|jcT(_+HY=w{A$hzZ_-VUO*1;Nd+xuTh6c;?XQlO8LeR=Wa*8^;1gMfK3AZYS{3q14_xaAVLDgrkH?WmSFa~%BR-LfvXOxNwJGHbFrEr@eGkv=J4lyITEqOd_Wz`7}=bd$m zwrY6?DeS^GwwIF#XAxj1>s{Kt%eigi{7>kxv9v}Hyl1m*(`U`gQ_iK7=rRAz>Zse1 z;+HNpn>8@Rv!}C1U$!sp-S1z#n6+#ySLUZ|^uIWm)MzN$xeGSQZpc z)F`H~-2d<<&52zd*v3Iw_((c&zJ#T_QKMu`H=n*>d=tnTCHl-Fk!089-8sd(bb7J( zB-A6+qs2dZH5svxhrIDTmM@z?7%?%d^kwvW zDXoOQT1aLAD?3CeN~@uhJK5eO;bF{uKat8Yfx+0auC&NOGparR&88$9ggtjq%9|2m z2~)^KXz6c`*Ei;oi_2K1`wzg&o0`QxCB7A-+ zJWa^!gt5MvH>3VRs#4^)3z*~HxzovI=OahqNxyNJXA2n1ef7LXWof*Kn?olfv~Bf% z6`upo+6A?uNe;=|zH@4QyLrxMLSV@;gdxazXI{ZRsq5pymyL^}qBA>-0Cm+Fi52#I zi0b`W2NxZC!NWE6A~=Hm_2Kr^0wwLC6Nr}9(R?D9qkEJg5vi<5>jS*c*Eg^ttfq33 zXh9~nxeYJV(iR%@aKj7&FbXWp^ESef2c#r%EeivKcoozI zUV)|NZ-%^5v|mVs-P+b zgB){!rJ?IcBJC)3Iz~4C75b$#_CW$mp2TIcx$vUI%~5sGnTgM6fg<0fT(XFX=2vkG z=C<@M1K&d4UdG+`1NK#gWUjskRi#6hXE#<3US&9R^{`pTiRd~VixyS8Hj3tGaH6rK z=eyX=b?v7!=1#&6ZP&E7er<=-| z+POx=aK(A!Hcp=nqHY_P`M$ss(V%nIdeO}UAqn^};l2yyhJh?7 zu)~`oe9A12lQ8Y5G*xETH06*8C5RF#p23^sfaZ#U~?AdFaLMN&d&R<{AC(dKL|7|liI0!Uui)0MkgW|}BC z*fzl^C*Fm(zXqX}fz$7Re+?pf4f7L$Q;4Bwxn*e}?+#Lk^=_*&XxF@@1|!Ub`VVh?FrtKO}~$DildwNF$=U<~Oh_v78jtDD>8 z_~^h0Dp?UOk!QlX`YTkzzC)rS-KILiiR$cXkAq2Jc|Dae*KS-7U~CN>=*vWNs3vv) z8JL4H)6>~8Y?`HQ!@s-4x><2CnbsD*iNO?0hqM{*YE zwZaG->`%+GT>m^Lmn-D+LzN>0J(x`NH;8)rBi7gcN!+O!ftE3M#37OQbFSKwyh$h~ zw3?A?%19>PLIuqB*L}om6ycu|kl-YG``d)W_{E`~S4-03BMKM=&<2^u8*K|b(T@wp z^5+IRLm0U3%|ihXF1`FNetg8V`xik5u|m_fY)D>IqT&t+bRy06)&2yj0?M)3RJ7lW zv2mjFnMlzl?{9Vw$CZn%nK2g}apIysF}{h)HY;)OXA@!prdpx?|QjTj#+2R6GY^% zIk@o#z4A)kEFM}nR&d5@6f?bwn~h{TU!Vg@TH5FTq$wk!*;n>UTLEItKFoky8cKmC z2sv*5%)RI=ie&@NxX~xQcN>(O&Z01yp>F=MoxMkYCs4buD7O0RJFGsOi}UB@NQnTS zWb92Hl80T?RwotGb8@KwaSs>W>~bw~laF(iVn+(kAt!gSI;-dzJWB<$zp|R}sbu;S z$x|qSE~l-l!UP)Nnib5BJ$%Wu;~puArg&kc3t45IIR1m2D}zWv$}f zy7aq;rU8TQDAQC>%j6ePzWC)ckCjC9Mq13Gw(cM234P(WUqfHIKln{=7uPXl7Ln9& z{y(GRLix&q^h zN-US6vn9knp^fS@2(*qR>zvfbb4c~l#5Xh423?^irP3jU@^UI3XWaG{&|b9iy}CbI zmt#N5HgU~Su=l?+aGst$^F<7mSYPJ#^xrzkEfh@^_4y`U{53mxSp^$Tm6S2PcrZRx zK{@opJTN8jA|UI9U?50{#1dXN$6|*e0iN6Qa)#}~4GT$)%`AQ3Wy)nH4P4tWzM~Pk z8w0})a>DKYcS&}fo-&$A+a;I7rjAE#TAulpG7eFmJuzH!XjS;KEz~2@R*^5(#^WQ- z^T%$|nud-g0_OZyNWV zYchxHodJkQc@&=_s7`l=MU%&t2v(4U8y?Umb62vsBLKGy#<0`>T3C`~&NA;4q06l+ zO5(n8O32{*$;LJ`E{1fei~WW_)84pMfh;<{CtUzT4>bq!z2v zq?XU8c(8T+wNrFAj?m|+r>5E5cG+&tOEB~N>13}J5^5KDEBzBA-?Lrv3HP5Z^Jq)i z5b(>gsZ_oV>iK!AD*yp3jBqd^-GNYBpDvPycWk5xhT(?OPb``C>J}f^+$$vrbIO04 z6GzPB4UPvkdgZuonfXwf|27oo#BrLPlm*DQ|BG8tykRTZ*(RkJIwTynNAt|4ixk_w za~GWKW2(P_JZ)0Qw@+k=Wska7B)pj9Enu!s@WI6rzv3NRYFh&WMDIQ?>br%=DK7#O z?H;j-eUzQLKL}P_tdOGS9#U4<6h`uMRee-CnHpx6@(0n*eyN-?Jk`)M4rCtQKMme6 zL}L(EN4L>*R{gN=Wtp~Z5Sm*33j+Y`he&t2!7eBw8jP|h~dz7`xN{I0Oj6-_MFG9)=Q6Ve>^HY zD}KMV3GMh5;hBk`OImKizUtbKkSnx0I2vNr;n<$haW$`gH(xPgYUp0NEYtQJh+Co~ zo_eW7d>@X&lc4>TqLL_9FaVr_1F-I@I9hb9n6Q>06v=h{tjBd)c7;>z@L)X8os?yF zIm4bgn==<7wcuP{5ZKsqRjx$$Lw zDjs795hc{>l|87vmi-A7JM3Laf3se3;=e)u@Ml&r!IEUsTBL-A{>@w19H-aQ98u}{WJnz$wp`!8#3>N3DqX_Rd^~QIZBmxKC^Wse z6-xJkfD8d|*%3mTNs=PF;ICJMb&ux^ZL&J;P6$evn0E^WUk+yOTHA6ehF;0d@vDS} zLryr#OISb)`4SsJ1{Rp9Bp(?Dm+gvys}BvDAnhhS(6!vn95+vY|p}t?Ltvc zpULloq!ZK2at(Cq3{~>@nTtSW%CF+dTlZ?*(ouyXNQ9-+4Y{r-pD1a(WgM^y2{)9DJ)8$fG-E>gS?0odGKmPTgoSD zs@TkU2^O^-eQ?ToPout?Q3_3jtHn3&Q@#pD9i>3nx7w!UrzAE>Aw|XWv??XM92!a+ z3rwOys2DLJHVkW;Dx zMnOjvv%-8auRGT;CVLm8vRp7km+x`j^je*}F`hX#FzaG>XE*0M5u9wgpA_;CoWp?m(*9GC0RceyO`;(hHN^-e7tlp=^6g9(~YPg z7Z_RrwNoC%0{ACbw4I&3f>jaj?uy^ZQV8E@1F!u5AA-k~48>ptwk8ZrWA(c>;Obz0 z6Acs9E++oxXPdqig6Cf>h=Oo2FjdF&@^W)*h&5Og@O@D5BJ?kkqCe@OIsJ4g;T{S_ ziK@Mss{5nk-X`lLj4oeAF`X{LSPjemE4{E&ol#0@XlnSAA`mP36HCr2OzhCg*qMCt z=|=!W-ooia1~6YU2%+I0ku$&Hp=wM5y5;+(X=WkLYFH8J{N~Qt>e#h=X>M?~Vj#10 zn8aTv^90|7-Qw+f%lY(L*jfYr;^K0;c?%0Xk_fE;5BY$q26=mF*T~-{lUyGZP@Cg@Jhvs{wG|xk zAa5k7FhIl)bySxLEJ-$9cZh1X(7F>5K7rhScuvuHGaA zD;nfwpaj7gm@<_&7t{7uPql?BkL5=GjKGd?L>VlW23Fd3TgiO3F_;lIz8##e%q-)2CU;1gkRi?&Gukn1RxI zAG*?#2kY(JfJ;%B?^YdndI|+hsUY_H%JTvh^ZQnaZD**d0PtBk(P!I^=?FO|aemUf zp2G(RR139Z6u*I`L*H3ts#V7dOThlu4!~b5sOt)C$HXUc;NCr`KL3-5CVUElYk>a% zF0B`7qG4$yJ;9ul|DYURBcBld&Fd_!YSPDNpFTKrwhLDjvY%i2ZR9r%?gR@lFed3k z!72P-H1#hdq$f_&4~(L0WohT*t9QA!cK2E4Pv!RF9?js0VrFJpEINJrjKKhDeVgY6 zbp^&}O?!fZ@Ww_C&S06hW=gaJ3De2xbV~S3T<)=iR}H0W<*>}LXuD)eil5a|XAxFt zgCa}U7W|ar`acO72lANidFkqQHDq-p5n_FFc2aNgJq!pf?xMO4GR{z-t9EDk{AOXRWeL?aQi8!{hYH;vJ_mf3Io; zdaUB?OVbQT+e_y>HF=lz&Cc(HSr2DA1jJ}a>L!Txda3= zKC`-+oPzSb=qV(9HpL~12%-7sNY2J~q=(!NO9qI_C&Xz}k!;$=S!t0r_KG>{ zLp|axi3v|2hB1;vqq1DQ6L^!l-w=ZEQ*S2*GmWxMzef_{0^q6q#*Vc2EbS9tDbsyL z+Ic6w`ad>9afZD!Kr6N(Aw8IXuMZl9>H8_yuh>Q0?u7PmoJyt#W?%)%;*cHU2)i*C zE83q6nkud0eIOc2ck#AX-@yw>1!|_i6D$E#pmNI8{*yi1*_i@Um%fkv2i5=2@f3m8 zpMBA!w!eGFFdGs6}e!I}GR(B}O4f))`6wq`OwVJ<|%S)2urkL%5 zuvNlyL^llxwEojWV6&*gX%#hK!K#b;ffBJ z)m@i3;&IUc?zhw~+vRTZ`Y045YV@vNI?qA3LE2__(*aQkb^NQyXGvPB?h>g#e|8kS zb;cP!uxQH|Cu79;kyMv#u|8aO&z0hL0cgI;AC$Y4SC2%wA1|ahP(#NNmskw6MC~83 zCoF#iDHui)ryx?x%)yXxXv&R zYfgMYVFX;+vV0f)Ok*8TkRX1IHBdjGXV2es)7ZhA3e2(p>-oh|yM$H6wcfJ6d^X>+ zjQ}$Vf6O)^2}mub%eOiyIGgp;j?xD$Z;vh^sg%3foE9Jr!u&m*ZUC_Cjl-|(FJw1g z!G;wD9k=9(13TvmMIv}N`^{_OAfif-$B0S~G|c9~PTI_yUS1EV9E`v34Q$G4l9S%` zsO3eMqlMP8ET0ZCI=pbPe*&xTE9ly!vC_-y>*<^?uS)JFNK-k@&_=bYeeE3_DzLs> z{N15g2%i@_?@C_CWHxPZe*E!PV4E@3n#9!K7v4IUF{JI$c$fEX`mqPX7)Wni2n-Eg zd~WgSHw5pG_E7FXBuH08nG6lztuy3HrqFD$QveExBZ7gl1oOA8W>$GqZcXhYsRTTSfGPdd1)-Tgtvu#TNx7i^U>Onq z7fZHh4PSF}=c{I!Cg@fx^x_J2-Fw?7ncZ^IgMP7Gv~#j?YgO9E7cxEBq{+-Nry z$9+$=nRpG-z2PSLAuD(i8Th*TfrNr0i=*%! zA2IBn4>8QM>UyEsL|Nde?Na8=>a;;U#`C+AKqK_J+CEw}luKKrG3w zo-juRR2Yz7$N1*O<3o*R8aW)$15d7pesob=1FRU`Af_7a10kKlMOkc zRa-fa3C83&&qc(YaXfn@E+q~ldz_E&K9}2^s;{O-Qp#DqTX^??G&AEc{(Czh29g8m zpo$VVLV*%;xpvJ{zyKKEZi4nBPJytdAOge#^Vhl3_tC_^O{%dC&dHWADk*)xuNK~m z4GdB!DkIE@k1N%_iTSQiF-l-lgj>R(WoGfM?sb(>h+x>Sc{S;;Y)aC~p^432#SbQydZAr`P@exZgjFJdHXomjBUF;k?#5DBAK0askB0=YoGit}+??d-=qPK~pM_pGM}f|lQ8vBJYnTJ}O9`?mzg9S1HY8|h8Hf)$ z?3b5x^NC*-nVU;!7Z@3_Mg@q5hDBVX-73Ji6G* z4p$u?GugyO_2F^pBA=FXV|Ra1QSP?jyd7p5kae1B2|Oe%nNZAngvw5b&O?-) zQ0lBkzsf|C3cYe)hx-oBgXg)dQo6v4gYVZtnKo{UYKD?zRKgEl7gR}&OT{Aap*RW% z_UXlFge?<-(_yb)T4>9wOP`i|Qgstsr*-WK!=>OJkzAh1*wl%#6@35UX?yG5Dl1PO zWz%GmrV&4M&reigTGv{w`#N+3znER?_{f<9?wCZE*3X-{qXM{WN%_a$tV0_8#8KyQ zry_302TZ`E^%m`~7n~I*3H(MHoE(JyX6Z8B7IWf%WsLBAeumlv(J7fCfKW9)DNiRF z@&!M$D%{MrN^%~#CzZQfY zf}a(G>IOCDhzLEO{_M1CQZ}vTGrygHU=7=8(OHl~FFf9N2A}ehk`P%U0shZdNOo@u zO#4@AiX5gW_>0gsR%}H@;gm*GH|5TE^u72Vc72{yR31ICCRLkM+qq`UMYqQ?3siL6 zhs@a(cZ+1x3ehk~1ITO@;gY2tTPhpaZcemYY?HuAs<&lvPD}3&D{II2H5WSs2@mO!y(>D*pB93(D%8LfLmzvHcvubMNxZ z26i33I5C(wkrf>BsmN7$FW>;>zr0ho=3$W8Mc)w^=Uzhgi9Pz%t3--^~6)I>cOe;e6VM620k7So5OzS*+lYucMeBKKzY1f?dFEfplxW zVZIb1pchG0y-0TOsO2~qv5eg8C^3v^3DNe<%*<4@Jii$X--9bBC&|PeyRi#4H$Q8B zT2q-b8BfnS)cDo{+|Y*d0tt1vX;MWe`A0HWoOR2sapydEc0zFcX%)GJ8SA~;8d}Um zMm37v=9$ratyx?E!B(8H@ms4;Y*|tnpKijkLw{|g-dNNeg!9N#`0#v0G}QVue(SKk z)3=>fQVSZ@EL92%wQb99u!A0q<4p>C3-;2dSg5kXtj+r&9@ zyJ$i$PZ?Is%j&SF>#6Tau*jYhtIZ4bYW|D7?MjUA0`_|BdT~{qTuNd6-qMWGGktV4Q75zYt=KPZZvUbH@Zoou9>6E28ceE z?YWkiC0Yjz_Y>IV>OOt0{vjd1;kv%5s}$u1d?rfs-hKeSD z1Z8OHuJ#G1QOu_fak!=wDxh!!kO!%P*hZYjr{$CkWj92`lf*)fe>3TSK?Eq(*W;cx zmnY;OxAFBWR3;PY_sEaHVuE~qi<;pnohyPqm6R>J0yl8Urn~k{Q2Y~jY*Ajo=%K!9 zcq{Y5uNq{V-;%oIz8>LQ?*@>G^`4s_JBK^^{Xt^g*a&Tt(>g*iS69y5E?4C`hv)HX zuuS)Ca{3s~!REcyw5Nh8@YOdKOrh=8!?g!1coQ8Kl^!JfpX{4eb>xC>ZMYZ&vZkh| z6N+q~`9~66e<%UWk@_LDI`n!4cyUg@eXYd{^-H^il|aX4&Jou)zhwz#u<>`dHJqog zlitf?rU3|$*yu&{(`=(Srhw*3V~By0WQ&@_MpwYg7RPoia3>E%R1h$p1q*r!sGS6h z$&18)80M7AU?+wZtrro-==e1!d7RZLQTI^{xlvb0jp^MKyIFJg(P*AO-Pi{A*JF^s0YI3nuWHF;j& z11wED))$j-6<$sRh_^-k&^3LJv?UALLdzrj$CIGALci9FNz!&*t!O=J4q9{T!35Zu zF5VY%-AG$fC+W31WR)vi1=}o{u($^(iNQ`S#eqRrb5kzJ{)CxTUvF8QHC^Pu{gE9W zchV`xIot8-ObLR&Hmya01ogdo?N_nQ&E~w5zx;}BhF}SMm}P2^D6t<*+x8dTyW{)6D$)j1<^@4G_zMR}G8+F{XM^~a6 z!#;g{EX{?UANLCK^UZ)Iq0*<65g3_Qbx?#HISW^a2zfqm6OY#KZ~%O9Dr2{&vB4}f z0@Ou9c5v@9n-Mgu=z{ObQG%dmt>5e?fdpKm*GoE)3WvdpsI#9u0{J@~`TGWtw7T9U z6x9DYfK&gJqs*l4Q>&Z+26wffH_!;7Qg0`oV%A@&A`zC`Tr{YvbOZq^)PtWGTDYG~ z7^U|fCk)$NvPO$!6<>vgb_tTm!!pJYQPommL(YUJ859%hauX@`Gs%tr!ZRwWkv8)2 zKglK#1$_lKP`YkZHZ-L!U?DF_7lRtdDbhl%e+Bu;Ukb{B^804QJn%i_L4t-*|Lxm! z|MgPdB9Aiwn$Z{-?=VoHYYxac<&A?*0w{$71w#{NzxM!}AMIfxRQYB~Th8dur$qLs z_g>c|cX|U(a?&4j@9#a)gm70$d|vR5!KN38SGtC;z%XaX@5FaZs{ zvT}gIUw2E4@R_gLz0X&yh>nTp`@mH6`vn9HW8<9`+nfSXZ0zD-ukF3vvrA{Zl+4o7 z+|s6r_-qMiADd>g^!e4Vo-PZ$9t(?COGDguWpH&=k<%?!fEU|5dAupSdh3F*B-9lx z==m{S!81jH99A^E_$$?sy9&o4kB*$f00vT{uAUk~1w&~;c|56JB)hWGypy|xpDAMv z@~eH-_MIFxMJ*0&7w6_v&&$Mui(71MNV;@nDs`MEZaCpJO}Tqk@8~?4sAvMTZHNyd z5BjX|8=fFbRX{qB*k9|RR3~&H6SWTgI+OpW_;d_gJu&0rJotP@CPknS_49Vk^Q!)H zS|Hz`sKGKmVyc1B+h0KZ93^TOOTVNFj1}+`{440W6zYrRC=%-n2TMLH<$W|&cwe|rmk&l)2NC*L%Mw8KeCi`aE zyU~@t2d6r0W^@nU<3f4*sIQ>Ez^(z!Q>F_s4=|s%_?0^V$RpYFVgO%FM&fUIE=z-3 zZl<=P!K9=}hpevPqaMF)MxUg2n;s9}C20ecu&g@G!+v`iMD|tyNCwa0?1yX9>Fx%2 ztP6WN+iORs@f#IPY^n;*$|>jvC5iwUy418-Sps=MI^bLtF8jpFL_vJ;$)M<@II+%K z=Nq~=j5E~a(7+$;$4GIq8MHllVdti|0K-ZMIb5K)uC-FMy9+NC|9LK(F#Q@5_TnVy zgumGg&u_c)DUX=Xh}|oDN=TJl-cZ4s95r}#^ou^Il637A z+Ajogs&$)u#n&IH`sm{axVX5y^FGSSabR>%(csNP?OMEp3kbOA`he^;VY5rt`I3t& zN`~3)v^8e@ZB4A@*7d2JXE*rBZ8Rw%Lq;m`{dp=yJY|4UtY6QzeB%(frGViP{>R4V z=45VxR7qQIH<$t#_yAcKUk>PpyNDCJp*y7WrO>YCK@$1Y3{e-EjO_g%yCkneB{sz_ zfs34YJ1Q+ttk4H&zErNWiCZ|@9TSx)AcRWz167Ji;wvH$kH`xrOc00$0jLbh*>X3tz##e)6iq=J;wlBUFfXB9` zmBn~&81Y8$GJf5OnhOPSdhj@Y@^roCqK3P9EC-)#W-M$i}Gex)X@+q_QS z*Dl-jubae2(2eX9OQw3pMBUL)Te96Jxj9464nq&tHBHs{&E$rlW0?SZ+5S5hhZV7zt0mcGJY2^{9YE5dQ489bjC1H!9~HU2_qj45rIxH7?@huxeYaFP0Wlz zWn2x5kH?_FW$CZ+67K7?Sc&*J`Mrwx>Z6i^-?GfZxgVKhbH(DZy}WNG6s+ilv@qWE(&7h? zaqVn4qj!Oarbj#Z58DiQndg*eDWk2X_TQ;!e*BQfWC6WP$y9AlKFVN^5kWEZSmatH zHqbL}+3zP06))gYE4@Dw^pc{LBvd|JGK<@>bO@uzN5)Y6B+OetK`#F?EGd55SW)r{ zKIyEK{xl-E2yBxF2-Ep8uJAfNEpe41FLWYiv?QIpU{l{ z+9D`2#-{nb66R$HQw-51@e3%x&bBx&=p3D9;e;F@hq~)KtRW21f0qNFm#{lVkkgGH zS-is(D2D^tEA5LPH6r^7;+>Ec)(r*>9jLnUpF$2$C?ugnbL}sF@x1(kG9-Z+W*Rl+ z2bSthrl>N>tO&v&pfpIv zJ>mMo;=Lm2eGcFDTL6d(gM*ORoPJJ&k7b!}mH;`%zTB^=MMcWD{X^d~6#IiBvIo2E z4P;=If}~-&Jv?^5wJveQph=da`DOsz%^j??!CYC6_N#D zozwXT!m`{Yv2IMQV9MUW4OgD7RaJGg1oDsPlIE^$3^Ctq(J`=G?d2F;j8wFiBw@x1 z>(%1eT;5rc3)4-S_V9`1#e`&dVIBLN4C_%&A`f>PRy;^6fZ#O&=DCEOT`0e1Hj=63W^Xy)Vp;Y8ICFr$TJ)hD%#%S;ET>= zwg|(T@G0*)2OmKKS*m?`BFf&rDOP7ermFhj!U+28g`XHn$-y2=HGr-YPrd5AEP`Is zA_wB_C7tb2sM7ekvl5jOo3VyAYB+KMp8m`Ej*QizqaXIzPY0ec_HRvC%Q)}1YVxdz zGudNmQv#;OSlRw;{sc{c|3nV0?@WhnyW5m@f(?Z}T*- zCrWdL0sOkB4u$Jt&?Cg)K@j)nv!cfwPM?FS{cB7vnV6$gChHwCn%O{>sc_kJ?(Y_^+_BNu5{whaS7H zqaxsbyYq2|g+yG^c`Ez`hB_$}78mPG_fE^Qt?E|+$3^V!(;|8Xy znvNM!PLh1x*W}!9z|0@E8O6GBG|?1zGMm%;TsQ_ji7!=PcH$& zB|l1I3So}@8A+J=`_fw;Ry1~PU4k8Zn7|ItWADmvh|cxXa9YHs=x32@`%E&VBnbkU z;J8;j1Tu!FF9&|R79=vT|2qo+rqHsRpknT8iKWV08|ZlO&u-+c1IRRncJCWbzRs%o z22%NyUs(pt&RKeTZ@(3Xw$h#YLJIqC=iD2z78gW2`wzcj&@V`INBu~aeoxnQvT$_0d%3sn zy?mVE_*8$sl#-VE5oj3PLnx~_CG@m8-aSM#_ZnX22^;=siW?RRJN)*0|z#&K^-qlK)miYrv=I z`FX1QELP|gWw-9v#w{H**%=-OP14c!cHPfEYkz`v4pU#9W|KIP52p4H3B_x?bVqs=6JeOPgD`@K|B3!*a>uO0G6l`Ey9j13=-!#^)f z<#vzbQya`>D*JWNGb)U6R#iR!2$84re_ob&g|Wm_-uPi;!rYr>catTj!8mf79_44D z5Dsj2h9DJiR2UJr`=o~v=H+FjX}N;T^N+@f>4Ine^LeY+TKGKNzAJl1|U9o zffj%tOZ((;RkHXnh%+}y(S10{?O6=*FQLDaFs5*zE%7;Z>nZ^w52k(};Cz>C{K|py zB!HEMV)s_Z^3x+#ESPd@EZe5DVX47wHp!lR{C-N`U4K(QDY|loS>!;3eS3L`$lcJP z!}77N$Ivfw650UuZL66l7tllcm2hTOJJ1rys4E{XX3qjDYguCBb{wR(CtZG7*0<9G zcU4SE3-%jiYBu;6MvPq}e4u!;dK`^)t2dwGyjyBb+xanL!Jn{R|Hq_hiO0)7N;(h_ z38`hK$vnBNT7tL#{ITGe43UxvG(*MZ1~xbX!5=TWl{`!g)ELw<_M-ej#;{=BA5=8i zhfdpX#_>gmZO{+#Ch#7;axu`EhOw>faCxntuD{V003{(W>lRP2PqNSQ0-F=>gX6mF z{3nn8?b_UTd5RtAqKL;5$0jB|PrpMR9u%P`$7ehEJ3FJ_`~)BG(^5?|M5*j+=YFG6 z0z#?7zkoon37s)@yK$;WRSg~+WMSHVgeIAEdt;Gt_b~h2Tp2Hl!(0Y713wX`s# zn8-P{`uD9IYc{!^>U=}0t{B=Fg|&0oTX9nl$BUkBLg(@&UP0?gb`8ZfsQ$o6Brfsg zz(Y)jj>N$fp?<-#qzZGGGL7#}>879HELO@Pw8;kFe;LT3{O)vz7Yn%z&|wzt9JGzL zV~v{!;2lqB$Ys8;`n3*pP!M4I>bV`Gd?r>ZgT+uzWzuKb? z1H%|$YpC?hgv0z{V^*Q?^QQjJro^W2|_F2(G!j?$Uy zcR!{4I$G!ZA>OR8SbVR#1VxHZzFiaa9Y8+V=csL~1MN5J$sFg~V}0qb9$Yf{xA7GB z_pe!vbU!1m0qzRq2i4yNTBQ+FAqIrwT44NkBz`|w?fAQ!&;S~yP#_Brx|`F8P&_cG zE1V*8DEn-6(O?yjf?I#rp>)x6-8g0ZSYg_?!SgMgkU#sjLbu?Ji#jS+%;<>zV`%8$ z9aW0>nITAf?XwW7**VX__npiV^jALUBf@zBlpjqs7S-gmgn0qb0S6soXLnJ-W1Ty6 zP(Gkjwo+5(({YY<2Xv+71JPdwd%G7nYs)+`J>tUCu%UI#yT4t^^9LR;3N0d1cEcrb zFvzF?z29>8zpk;R$H@}A28BP@p;9H9>*zW2+z-=KWI0PXQv7-k}2 zW?XI8BJMtfA{VkTs8&9 zzwG??cN0DPstt+V4%?5(9$K}vZ}QyZw;Kiq@T(?DXqPVec;?i^JmioYfUN!2qJmD| zyRU&rMMIgY`xe(7HF|%Mj&|?6a`^kA-W$%vIpU?(kJu*cYj3raRvd*;>%4?oCQwg~ ztx;N>^R@tOQ`$X5_Dnmdln1B_{oqMfHip#cj6;2qRZ&`e;bet;!)bE;eW%-NxQEco zJb^Boi4FloSA&Kqqozbwuk_W?MwIDSQ}rC?Ft@m^NF&h+*&ksKsV`E3)9AUQ$f~G)W69Q)L4}( zZKWhHGMu8Oxidf5SlwnQ&)Zupx0WIgE4IOA{h5AooT#N>4112}n`I$OtE$>?<_I(% zh&-F6F!K)DjSlKnJK{KH9Hr2Rd-53Rohk@&fj302!Pd$&I`K-i%7)7X;=s8|XLK^f@2#^HP;1@z4$j*J9~@uYX@jHSr- zsgb?Zpk7kF$|xg#6LhzND78IJsMzme8A$i5#H!_E5xtI6`v{20PLNbM4)NASnIi1K z?0ri6wbw)A=EZNVKf6FTy15*F;O2FtCG*vmkY-0+mj3ZBXva0Bk4mvQ6B>)Lgdjx* zpuurp=K{4sI_@*^le|!$piNvl6nmF^d&9Rf8!}&wCr?aveZ;Bp)#6Y95#easG%A*uX)u`|_(1x4~G_#<~#gtrg-)*LbuKH)>1#El_^hb>$omnUT5<~*`5H|HF@=qs@fG7LZ2l1 z5nME$q|T_2TQa0Rv2b;kfGlk4`*v{LC=tv2GIt3dgK;_=P3J6c-eHAWuFF6x2cS0? z!O;-xnWz+ZqQiyK4g+j=brgWm4h%wKA?OPV_MVbN7!+3|TflX`rC@&+4MY1RC5G@9 zyOW@(Lsw{NwY+r2*ZKL5J(v-V{FI_lPagKcaoQOR{*<0dKYpCrZY^Biq87Xqw^*p= za7dkza6=v7#``P1&tPm+xfbAfih*Dy67XCN1+37D!0A=XzJSd#=hY6arcH%~rR%3i zQbjfV@`VL#`ccFTb*5?nO&fyxAIlUOB%5+$Nm4?1SIX)rY&CN@Q4z!5(ot$^<{zNk zH!PvA>D+#zf2TPFUE(bnQXmSE^+7b?$2MwvP7aH&60{KPlF4S}2_q^_U3=TIl=b_vgN9(ZE92mMe_XNz?NcVCb*zfW+8Z)DCsSSs zGCC);mf1<`ws(PsdnnA`xi)y%_DTUOofQoowPp|~1MDJuAh)o)deDfghPjRaO&(gY z<8`JV4J_~vPbbgstk7wljLgZ73>C1707_Zox;9wG$%||VL{>wvbnu{ZZZavyerOwT z3UJcCHn1YPW}7#!wYCC(?OzPE2kmHKjUMiTJ^bU|An?At0(uJ6q@6 z6bl<%`o9b#EqD)QPTnhbj$Bd522V1+o6p!|Zp%4jV{!-yFRmI#JdPp;v5 zw*>cLL+x|-a^JYSTH8^jcqSjZ>KhF+=199_IQAc^RL7#m^GQjWVmg`~UGmZ*?4Ni$ zt5QYoHDF)y;rR$;_WApRq*XI{$YIxIpXT};v^r_agV$QmU*7RyjDK{=No7go#>;_1*VgJp{W5iatdEQK>mJ?nL^!jE0 z>SdB-O^L}Kv_pnIVy>i;MS#((rC&8kD<5@%%O#{@@dS8!a?dJ4_2*i8Bqhq*xQ3YO zuMJxcD=< zJw`ncDro3Wf=z(XYgxt8neAzw6C>V^NHl`{4IO=U`Vwf3-#R-n6aU?Zz5{M*Bet29 zwISv+6OTbv69akFKKvr&aJyi(ESSc;=Kw7QJY5xV>`q-e)2hW_0V#wuB!&1BP;2oa z>snpdhyC-q&XhyAU%=yC9vq3T$coSsc_oIx!VtbmlunC&JnN2KLR-r9bl5h|uhx>S z`u&cng-J8lCBnHyu(%BozmP>#MlPl6dSguTfWblE9~K|afN(7O<`G;DnjoS~mL5U^ z&*P)RJe$t;rFfJ;@nTgUxo&6L_0w z`ds~h;9=&gua7{9qWwxyNdg|a^S6U(&loE7mXX6kRxu26U#5=%i(7Y&jo)r(wlRZo%IYo-DGZ z4VHsHqbl4joL5b5t0v21?a;nY3b^GT>*Vlm@Ty|w-i!M1kU_P_00rgi`vIX^;(-0p z`le#sdlfN-mGPwWtrbP;v+RHbr2rO?D;HoBEkXpV!1M`SRtACCf%@$!?!o-G_VoVcXnY9kq$rd>Xws>DQUgL+ z0n=ZL`3_Ko|Cbu{N8`?fTS3D~!xyn7Mk&?;_u3R=@j z3F3TnB$QtY#PrF;ys(cws1Y(Tt4yL!kL=_$w(2Lv{6Sl7#-Z1IJO|0I3e0>)w8zq> zI@;`GtYfNL7qpi=KaDQI1b8WG@Msw@tN}CP;``F%X&eMjuDF=B1aL|4_d0s<|ID|v znw^)h9IX*tF3G_dUfoG#H6Z2`uo#~q+wxH|aO~ba-WHtmhK2xlD*^1|&TE#WB+J7X z$uDi;tSXu5fLOuJp75XOUTX#q9Ct|Z`X!bcm<7-M3ovJ^X?Vw=7-8_b2h(8W^Qi0T z16ipp6--pkwl(UcBA(4fHgWlH;p2>AB{5_Vx^56?>$K#T+J)c@hLZcO^}X9t@b#h# zN6FfvAM7LFEil%n!1XGzvYsBR+;eaATKZ|saT~~;uc_1aHG$CrJj0jtL|-Vd!}X8% zlRlsJz*(=w>Iu%T;LEXsCA0 z+aop(HWbSRqUGvlRDCe}JxN3skjWmOJ|I@s}8+sLWTz zj^t#Eu?<77=rw;{GG>AW%72Dae$4gp^x!+>_R>yT(&)v(vcKK^;!+YDoP8#+pCpA zCZn}+`po^asYSiG>d;zCpD3fAA6}hz#EN|j-7qIQ#!)*TJuWAhl2n|G(&r3r-{2wb zanx%U2^{f>043(rodyN|IO0^+Ob%?>9D<12R;q?&emjD8Wv?HWiqVjf1#ch|Cf?Aa zrS2pE_@-^`yClj}?qfM#a$iPCBw* zI^onfPqX1dENgRJsa`F-X)X}`{G;=!{%Tr=N9L*j=hg$)c7}?&w0ce_z>fat#N;rI z^FUhePlHSNH7X2xrK))XgegF>l;3P{<|+;h@xbT!m9G529?$-ycPwPTW+QCI@CG(r zR`&w_@c`XQ0-B?k*1I+%P zH}L_ooZZ@?Hf7diPL|sV!v+@g-RmvIBBbqE?08H?iYQ{5Lv?Y=Lr$-;TEUudV&e6h zO@xCJMKhOEf~T|~IjTHBp;?Gfto4vQ7Wnla48M{hm`!P6be=x75Z5rw$uf|M#nSH5 zrktdOrPQ4pO;RJ`4=- z??;8r?KLeukoG2=Oy7G#Gnr-_We-{W%nDr1N*l8oiX5&~))rU}5Jhd0S;b1A<<$Nv z62YpqkH<79HlR;E4z9!JvNdVP&3nrg6TLacjzOntobM@B%2Pxmk#6DX=3sYzrxy9mB8lylg*iJwi@%Cnd`T{~Xo2IBi?E$Fmj|;!IrFr?b(9U#A zQr7&%$U67S0NHEgI8GvKZq6`3~;UeLzs--*FrCC&5RI)EKv_`f>n*K41AiqErCLvDIJN*-}%T`+}|MH@r!u+9mZR7kSY zsJaVfxjfi40~9Cwc~-sZjSHl#=9Xvb!jr!d7p~aMa?wXc<6G|2hXmClcs#_nro!uV zF)NU>ZpcMAqvC`EJc}RXei#3kb}XDo#dzFlvNX`qogkP4ZSs{~8w~-ewnY!`=M+w1 z8+{g9a)3>eJz8 zQMDBT*PkK*xsIzLP9|Ms>xsqN1W*p5L&iQHiW4wc`| zR>-=!drYy0r{5Q}*iYM#>S|{ohfo6?Ix5wYbn|y2BfRoyY#2&t5<>vv{J^zxrhTu4 zF)APsiE1B_u#w9{+c)5tWC0k^p#D3c@s|yijRqU=w^~>&udR3g9a_$3J@`vjz`qf+ zJK35nbG;Y#X)m7ENVAK^9W6gnGV!4%2%6RtP%qZSKk#gD}`;cF(y5(kNN_9~6`2o-op zl1^Fil2?Wj(2k1-gdnj|)^fc7WmwBFF63=?3o6N!|M`!%xkmG_s;X z9J7V~dNDi0M7pNq0}@_0rmm-L&!1Mo9t=-F)aC(abX=Yk_|2&`nrk%_@|;yy?@fJP zTrupBD4E;#tu#Oz&UhGLoAMVD|^ldXXZvW-$qbK6mmz`#`(tYF23WR{h_TXN-OZ36Kj zCI*#ETf(*u$nQ?-00Yb8pyXzE(hHnG)_KMwFjE;PzXA;oVl_1vlOEFi*xI)bm{ z8xgXO?6AgvY6}1Rxt+%c>7Bz%P&3g(0Km_Ck7+1_S^slq!0T*`pPiLoIqn%yX>_!U zLXYaTTqjNujP^XMa6{}PH1^!YZbn}G{Walp21qT=Zpn~n`yq5-theT(zB0=8d?NS@ zk)R51!}zus4pb;?J9HHqX7@kP^;U)?KDy?-XTR9w=aNjX@Pf0 z={q}`&>MtgvNf_y1k(3wN;btE=iT>k!zQ*sX#&3ihF$gj*b0;&hb65|ED291exmV( z6hI@LTu~c_wN(xsQn>nrA@4E&t%R1M9ionNrZV}kBN1?J4l;zR* zTcEh2m#z<^tXPGMd#Ga>==|v^a~su8?+Ny?m{;OR5fgJtX;`bMub;z7jL*~gMz^x= zeiyb@@e?&U@&vd6-?mg{WQVm_J{WF+uf|nD+*R07|lNO4gJYzR_|W@tU$ZpYDwJuI5YIWv^^cB&w;2Bthtn@t8OaSm-o!{# zv-IqQSl3$L1up0|bHZqHEv7c(E~AJ~l>*QejzM<$9)~muNPOn3o^a<6?dKb!iMfD} z5tWk)gW+4ApH<$AztZF)0aAUQ(#;!g7JvijdB3K8c|4Biy7?QfuwjWkex>wn-ijdk z-HQ2aO-QZ1DDyoi(KMWy%k3)Buoc*U`9aw0<42)Z%aHnIKL67mKs9n;QH?T_Tb7#~FsjoJJrc+@dgj~n)ZPU2I|epse@J7l#H8 z?(%9e77(3la^2aLEZp{ZuR1rbuWCaa1Pb;Y`bK0Zvs1H_!>(KHy zq1jxxZw_zLDa{+nT+YD7TyFS~7v6q`iH%^$k{Vti^IA~SUKN5N+-UAg^W>sof)Y#q z_F)X`_3Z}&K#y5Hq(=trR2V@7yS$wSHc~;X`!7h)b1|5P53c90pyibN9YBJ!0;M@m zSEQT~`U!gt-KH2&Bd+6AvDtt|dyxo1_63@54=&`>T@F+Dk)rJDm{>)pMy$Vi6gRKt zjuhlM^v$j1B%@}LWH%+kvW3|h=+GJC>a-CfLb z!H(_dPR~C~)M>Vy%X?;JtLF>w;kcO$Gri{AexIu)*B%P_kDgL~o&U`clCflR+2EMg z#R~)qQ^+I>xk++*dI49_AGA@hTPrNa%?H!%nK?j!bhf65b6Wzmn3OeiA^+uu;|?&6 zTtt^%=s1};w76)!K+LFt9FSxy85kH*&+?2oVqsKaz#ChVG!Jme@Pt7yu9w|bIo3CY zlLDckqhZV@4h@@$mC0*0b{YaA`q}!}jCyF8B}b;hk3?8{cELT7J4mo53%?ic!|OhK zP}%e0u%kkeg|^>#U1s%>7fbIV|GNSIZ=$j&z0Y6W#km4Q)(~SeKzLE$-@=Pkc>$ia z<~!wk$+3Dl;-Ou$*H5$8E!D@OCHhWi-)2Uzr9|ZSkPyyLX?*bj^&;E5ghC8w8KM&| zVYHIdpu!FTC_wQO0Hp3~+YBMDv;K9z{v&xz2@4=oYjy^c;2%NeAY$o3$@2DoR#sq1 z3LylQo1x=RDisJA8{ftV`Ojo)VIQdD6(pfRTm!zagnV4|!P8%(d&*X<0EKiFI~6yu z6GHIs?cj@H2HFT$Mih5VzBsO7AqY1nrc5MLX?>qxckoR9$F0qFxAhNWC(dV|Z%gV{ z9L@v=hxA<`chrTo_LddCxSQXJ+)G_7{*c-+FU6R`Xyae}jUglcpp^`&Sg-rKYN%;veT_4uKsd5E^bydSGCikv*|S zJrgtP?aRCy3NuY9+v z<5GrsoigW^ndt$AgKssK<|hl4M)L#cA|y~`$lOSXh|CF+%-r^)n)yHA^O}nOe!ZMr zuq!(5PZc?<^d%Jq5Hh}O~{XLF7g?P2?-at~zx?QFUN<3pra z;SX_fYMnr}f?l#CR^h11C3-2zdV$!kvmy$z6APZwXPyd%4Q@E`BJ(5WcmCgA@DEhb zUdZM}uH-QKWfcCK639QU`ZS>@F1sT+kAs6RrQ8y2s zQ8*-2WLC|9I}PfcLn~)H{OS)t-VN?god+aL%oUSP(=+o4THQG{TbJt5ac7}_DDL~8 zoF`1iSzZ=`CY2@^i0rn+eE=lZOzthfd;*5%sRiA)CojG9JdVR)?KYwAeQ*B&DJ~o( zz44ImZ2Ox9Jn?e~VZ}pz=(s?rlD!HEl5GB+51QPCecKJf0smId;-=+b9=e;|>_Tm# zMnT*~#<)TS>}4`4{FqxnTb99(ENHRNPKHfUmmN$@RD{#czb|duniWk10#UcMI$_$a z##7Pt50E@tV;prWkH__7DlW-)d7v?9p9_32Oq8<{%*r@opZUmqqbieNG`Mvi5U^M0 zhT;-4b91a!v{ZB~x3Lqb{4idt%}y4lIgMrVvd{!vhCQ z!gUA=)L`e#H&`%||Cu&?&1nAf5SjyVjk^auU!#=4S1jWDn>DOt{NVKGCF>4*ak+4^ zz^H$v)US=Xc*rEWv!M21uwa@qQY4oY)}P>$Pid3WAMSY{U+El>9K6*Jt*|C88w@kRgLn2#;-PYl31kc*F#3Z z8sW;9FvB$;+hXAiS2n(G1uY`fV+M#nI z`TbnV#Oa3O`k)jx_*l+Kl9dk21j#^Z0Ghsx=Ajd{0_3O z+xK|%>Dl=HFj%a*w0VdmGDcY%6fQYyL{WcK}e`0-U`Jc|PTL@brOO@Q$o*Szfj6BOb(43Owz<-rE`#L>|hF0c&&`Cc(yJ&D&d*Dl+`t zI@eM>lz+Ls88{qhvE)Bt(~_xb3Df{*p{?vN++2&VV-ykaeMzhnAx4le`-w6=MW`v! znxj*RoqbgACobW*8*Q^B_>bMU2c30Kvr1E3S&i9sVxq@JrTQ7Q5TsKmY zekGde*!CN*1@$Iv|DqFIX&bXk2oH9<{bX2G$tzS9gUKATW;+4S%c6ZJzy&%Tz4{T;ffI3 z_k4cValN_Zme1qb23J?7ghLRS;Ns#E=4jVAUM4LocdtAGPH`AYZTBOfw&v?qbxsu+ zw1&h(A40j;;(sn`mvH8irqlY_Ine(o)R&C!YgSDck}Nyx{+~K~>#xf;d6fHY)?6Gn zCz>_(q=h$InqSNDw6RZQEH%flM0ObuPgBf1sqDeiByRuJYX5VsAhTAF+PAYh$$LG~ zJ#_XfDVElghMn|-GFSg@X7i;{Xq(8}uGV4l+vn}0?zYcr)TP54j$y;ora@j#xU15S-~F)x@93xML0b3vI%{A^IilhoBg(#2f$%{RN-pkF zsmKiGe|QBK-2=+-Hoh566~-a10G#}G~lewGDZ3;yJ1lD6XZh}CFx zHOeCahR8^U;L=g(hv-n}XVHX+3)nwp_jpFH%|VR&mnLq%@T_ zhdw)u3Qyh?GkytlTJVP046O4WLa*2T)ZjbT`JZHAUpLJ0J5kZEp;rI zB@g|{_A$?srMJCV#RMfUjd!r1p2`T-f<-8o*0|q47`B6oOmoL6+M#WVQ%_*J-$2Mi zA`TyelEZq*$TftcvTwN($wkFNiLTVlL@nK0 zK*Mp>!orI)gW0#v>Ip#LLa(rBHj_8b8jf>{icgGHSv5{R1&x3-TzrTRd3+2-*0vqm zyb=VO4A9OD+SN$E8ur9HyXcgjyMg=pTs7&FZ^KSykXu!DNs(dwP=El-T~wMp0fQw6 zIHvErZyjblLYMpmeGsv_nB&hDf7^29H+31h zy_3m7~(u;Nw9knJuMChR}%B6Y)I`_oS4|{ zBjE3HspTvLvkH#bl|@nCwq7k@Z7uT$#t}44shklk>N;yf`8*J8iW1`re%!{`9-rt3 z$%;(4v$knAJDwwI3wv$iNLzWoV)w#>=1lM#Wpf;{g}7fB1p&X>=plFH-uOi<^BC^W zanpb5w00N`Fl6UzHDI^tnV-J+IFD7}!u)J~#1i6Ss*ZZfnhW~aT%J$0i-+6INQ$5<$d=0?q&~_U+mJoh5%ZySeDYHBpKb@D8ZElU1 z9=@U$Y^K+6(LwgOi^Zq!osr<3TzJ=kz>jVmf$c3&V~x#Ucft(hFkxTm=`tn`=_O@c zP(wnBv3E-1S%dz@j9H%$xGio8Koi%S3Uc@Dg(!7>UkVWG3ZI!M!vxCLR&46=VC%`+ zA<@MTa0%FfG^QP}YQY_*pq^MX4HZ$RG5L}E2?Ab?-x~gX zcTeIMuZ`428GV(v93!dgpM=py1icP`x=ZqQ|1zy%_gf!ZvUY2R7ae|`5?JOTI>z*- zosn!~k;o1+dW-wDOsV@^)St{60pAx!%`ADt9MO;-=znSEpka*1-uZv*E3kFP^Z2@j zE~X~x7TV*o6XNzb4$!+}zh3xyS!-E+7h*?Ss|XJg(UbasmAh?+=}4eAPSL{UCaW2+ zTj$L1topuJ3D^vfJd@n%zTnX+*U+t1QBt!_w>)LrAdQbPv-s#?SbaEUWEeU4bPQqY zQPwbI=0{k508lc@)4Bc=-2R6P)X#vG0PJy!zqGe;iS~lOz}@49EDi^hoLRM^us;T} zt>je6b1~Rj@a60nTdo4y9z56e_J2^h)V!8?b2JKrAzRX6-MRPrL3F%6)Bz4 zH5>v=rDF@f4rQd|r`(>M-ha;Im#?)BSW_-L-M5N!yhaM_@4lZ(uFo%O%n5T!ZAu@_ z$9CKF-??_%8NS{K9h0!eT$xH+^isRS41|tD@Eb#73eKk=nO!a`wI8^lcf&S`_zB>! z#^4{eK*NZc%v+$Gxv-e}H|>3UX8KQs?s5{nN4a&GoB7)oJs3Y}1K|@yQqlaHU|&Ac zd+HbQ$=(Z-Cbg?N^`z};bOo_ToP?`q!Y-os{3tOC)0dy%7Are-fw# zSVPcKqgX$HDTPQVpptpiLi%M$8wd%!6l84e-7!hO>r1uot*E)2`6}{N*kg7j>6zJl z1BRj)ytMcq;k3}LEmBU43$OAyHR<0>F8usSc2vsRFrD%SmUz;2%24>{lplB>Z6$hs z8)J`u-%c$CC0eiTJzw+j6*F$!Byw)eb$pAX32wKVZvwIEtn2z1i)-0-b5R#j7lMD? zxh)7|9ILRPC`m`qPUFswi|Ke>a-i_{Ckq2#j{7+dCbK_5eeTcw+cOomR)8DyqXNQ? zziEq@WhqcCDESa!50lis_B(LzC%xG^MUlC6UW}sjH}yoiyWR=0Q~Q88)>tFs_z|8} zJ9TWQ;yv_b4$DSHs|?iRszD;P&%!7SsS>gV85bEpD0!B~WC9M7{OZeGgfD(2(?^d5 zYv0vp8N7S?_CVD1vx*_*R_&>Urpe2?2?rh02bLU_81!+=pu^e3{wEphfR{XnZl;Go zFk6a+c@9(C2ScJ1= zNuiXrZld>@B_Z#aPX~ z!+e;gnURcDwQ6{WJ)}n;DXwX1vg^KV+>jHSlJ~>BeL^G5JBB_n(FfS_yfxJb)k(qD znlK{N6-0n*GHggh5~jKiw~oSC$o|d{&|4-!e1*J-bPNMkp#tU9Z4jRy(@`U@&O+s- z5C>{Xbc_xXG!&)`CUY@0T1}N2<%Yct7}4ni^Zk zU%4dt)80KLJ?sKeMtD`NDOjM+K8U^LYRh=SmKSnXCQVAxZsvabus(H~ofYvW_wczU z5o-fDWVGM1(iy}{HL1^@Ps5jY%F=rsJu%;QlI`1=tG`{14NAx-Oi&Y~nZP?pJ>Z5} z=M}J`GK+inhL^N zLLbloreX^YKP8|c!cW!ACVts=KL38T?ZT2e;e2j~g=BTI_rdsV+dLwW$m7$p5#taO&Ou-57mA2#L7<2DLj8l2%UCzS`IRyDwrHUy= zLEvz94v;(|*UXd%dDct|M{)8Okx4-U@E5jnVl zi!c~V%Nv5JchJ_nfI)jc31b3b0H%bmKJp1_3QkV2=|yg`>Qlr=RUdq@k%xqx>vRP_TVg6T14ne+gMZ^lUJoj9SxEVWUl=0+@J*9T*caHphU9a zxBDB9RKTG6q_rXDI@S5*$$~GSCF4ajl+7%xEyN~)#a~CzQv;u1;^=k`M=CgCb}QrK zkc2nT1Xm+(KZ>he4IbSP@V4716TILa(X!w)7P=;|6Zyx)?NRl}+AkJ8>y2SNPDW~S z4R4s@KhnF}W>@rlLxgy(tb%349t3or*PG?C;_uggP)a+9-o4Dc-FS(&cnqZ-9+`lv zxu~!pHR`+|A@8qG+temayd8I`a_m_k5k-Ch-$I?SfdFD?h9?!gR(~Yx{||4A620d5 z<#INRbUCBNYG&tS*~4Fw@n2##g#lcpf6H$yW z%+ENrL5WSEgZ2y)eW>MUQ6$(6(|u5P(C3C#S zak*ycRmHzDe2Q2YA5NsgDuV)!t|Mgcu_rBGmA zVP=8FhPaK3Mg4u;>fmisolM}*e2$YYoBfT@vtJ3t*yJURINU(u**#^}Oj4o*n`5u* zhgn7DJMhH7%;p2Gh>WDNH>ISaxxszmr{VESgZ#G^0H4q?2vq4&p~z5jxfQpPo<}@K z!6&kheIT0@`BoTBH+_Qu70}WIlFfY9NX#ZD2MEg=KdBeCdoSY8Y8)*Q{?s!($4IEV5t{O|d) zuw2K5C2amoOevpwTL%X>n!eT9Uap5lqsY~ZyAR?)YjQn~%(D>y`iP`xjZttLwk!Go z;5LD5#!2cEuKa-PP>-@s$pZ6_w*%0x|^~YsTal=KL5@0P4Z28)m zWnYnb7Wmpc>><1jd;x#|lIEq(rFPCYD&@vlD@-y0OOS6#=4qo6*2fRumxmSsZDzd+ zZTU1oKQSxFo(&33N+gF8gTL(bh@u!N-Ri!e4)Y~DoH~x0rUSObSxt-g)DA->pUN3I zL`D9y$L9ZX+W-C*Xt%}(G9-LQh>eXD60Eo6_b^$zLM z4Aer=i{00FNY;Yhpv>UO5*B~>0pq&5Y?Mn%3j1eRJ_Xidh)o{KU+f)(qi=Kt`#u!a z^#TkqcLtO`IPSEz#YP>Ze1bAu4W@HPPQZsj)5sUSAUa+7D@wCw4mZQA*0hP!teHK3k5*>GanH;<5Oi@es11C7GXl&0C>SDhI%5v>hX zin%#*fH=azwU19V@+rj;htR=R%jtBjBpx=c?|Qp{ZI*-mMGn(9fz2PBp(x{|i+2K- z_XqRt4MZM#J{q!f=(XK=L?q5mlWyJzk)?h!049YB4|wJ;j+$BV=3x2=(fH_kl&96p z51G-sz5w~HA@j8`$v!NB&DRW{_4Nw-3$81E2h)UYIlMl}UO8r~ayE7%&9Q|}N&jCq zyWhhp?3}FCN8uFOHf0$WNQ~WF_9!|Wb7__q-#jz+PXz|LxOryxpwu!#nMJeT zb%$JE3sc_4jNz{sVVGi4y78~AOI5qWJCq$WI}EE~=bgkkoNiUNDITYNt+VMY%K=X1 zD;$m(|9~I{MuKMk$qU!gn31_ctX(F(d_Kc47SR>ro-zU{=-L~LhQsxWw#6-XMu`Ae zcgk?fDkkc!3kA*~U$xYr8y$S_5qOuYsht%uN4B6Re^CZWgEC#h%i)ARfVC6mIw@w@|VNN~C7k!=m& zBWReGYe*z;-K@}~+YYPmN9d||+*|rq$9qjIu)aI=_IoWf8&Oixffn4=wK}8wLj_A< zAdei47D0ZI{enUFqrUL1h$x*RGVO4G%Pbp7Br;49aB}(@cE4-0VEw-TZMts_dux+TEmT2i4K3S z^+zm;*TJuA)Ag=2&ck)0oU91(eYLLUluyk_*%=aPTmXBahnzDxY0bQ&S(K)d$Ji^S z(&^sm=OPB$Sd-2tYUG$7T&~nU)LdUC{#4f#h`H4&IpeAn3}JBVsXGjJz^<00T`bC# z%f0M|QUp^&8eL!-rS^P`;?x2`b^5pKjQ2U#%C2rZtn6T8PmG?dib_idGhn_~;3Fr} z(xSoqXrTWs!MiEZHXkU+S@S9%-&%$VDQjo3UK@uJ;)6cwHoG+J-C2ePc%TM;1KN65 zhP1usc+euDePIu&8^jp1hQb%0YI^CkFDH+ha(Wi2ljooQwDOt-fjRJ3pK_=Ig`UG7 z?icX)_Fy>1haTZdGfA~jZ9934@ZGMQ`UH+yY_BO7Ku9hvOqZqA(;#giz=d=E9GIQm1;OPl^|?b-*!l(tK0rcYM|i zz;NnM(1zI;U?(YajjINYY%Dl`6dR%X`7B&>52#6!q`pCX3z3KV8OE~B^&l*l{!EX- zP3^vC7WTD zLtBEa`g^TNUg*>iu(#quSG|SSWyU&89@5LHRZ8;35v!gdjKb5P1HtBds`W44NfdK$7S2tc4a=x7~U={$*_Gvn}=NCrV%O4~8VydJq zB1}hUl!@@eR(3`TlrJi_xTvshkbS`J2&Z6d;qoxh>iEr2O3vdn*2rYG%7^3fU1IK~ zv0``2B7FoZOmwfemL>a{S2k`(E^;dZR9IOAHWn;!6O2qL6!9b3apCd&Yp#k!$D6m0Inea)&9r}+8+z2*fOcqe=r=a7x~x^7%-M`fCIRTBc&S!Km$YYd zM*z9RQe%x%HgxoP!_b+=P|^#llf2LG)~E~>(rD8LpUBH}1trM&I>X@TmBf>|z=AKj&7-NMeNTg1fj*oLpL^%gyicLckr*BPRR|f=%~i@p;z|-^c;j1B42)6*=jiJ2? zB?Dc`r}>|ri5XF7uFvf(bq%n}4}0CUlMMN=T;QsBrtXpW!%D->hKZOfv`l?0`PEcRK{i1y)ILRT1^Rn3uyv%-2brVAmyNJEy!l zG5YSF3m)UAv&WKC!ZWMm!-K4nJj)oDEc*N5HvKdH&e09`byOQ4+t9vM?n>@NTIuIS z+q>Xf@%_nnVpmT2t3jD~0`UKA-1$G_hcFrQ%L%)_&%N02)5r zyS~~vB%XGD>-vRmt*|REAs5(@Vvtfta}6~JYH9pf{W+}%S+2GkorzwX%>%w43jMes zc@G$?D;@hmm^#(BQgk~@`7RCobNvD)VxlFc3iE`0AClpU{d&Uv<&pa`%?+hc7$9?n zlc4T*Xvmn@cUz|$b?$)>jIxtY zi_E*)N^fs$CM!VWP@u|+G)JmARO76Bz-T^+gsD@nL#U)+MeSuP<)%Om{K?TY85?W! z!4I`lIxy^mSd5Edm#VZX#$Yw@M@x5EozNjcjuC{Ark&iJPY-R?JE>^KFVBl7l6_-4!8hNIVc78mXP-Xk zWDl>l12tV>Tk2XXm2i~MbX{UpwI7sNx6txV8@&?=C}q621L*X5C%j>L_j2aev?pDp z8jli;n%dXV7Qs~bi^v%htUM%tH_O1AuTxE)NIDItV{7>~WB;}D?SEwodOUq&OiRaq%nwh0M z)6>QF5haT3VTSX9pH<;kbH!-nkhVHlJLF9f36;4M>=1*who2UFc2+IW5waGKLeE>0 zMwrkp)e7`eQ+NSq%u4B$nib9oJ19ho%^#mGavd?S@v~Z*ads060)cWhACLwB!$yD8 zfb1r3nbh0IVQpqgZhhbAwi>^StQ7BMKp$%>VG@upx%2el>^OL_gj8OgEwi;oSWycT zEk}WTM&lyE@-_$9f&~>qft=ek0!;CG5M!U%um3!~LI&SlKGJw#wx`0OZ6jw9{w>zE ztDZ1)Ng~fYg%f_z<0H+a&Nx#Kk)KOP#LV_T5GI?xafKXf(S;T3dy7pg>W>(Dyp7fz zWR?QlcYlZ6fdo?hYERm{Q(r%D2Ul{BYdwYf zCpeS-f5-yXlpr&85i-f-N(lZSyrMNeR6t1>M#zbX!9EUDh7UxmF zdV)R`b>#}Pg5_t}AObA-hqy*JM_;m`-@$#9HtP3yFFE8?GQMPIop0UcEZ{44rAdy| z6cL`R`iPA6wB1sSSmvI+QT6qq-Em;Kf`szlS+XSbOF9eRZbO5Tfg)_=#Tcl35#KVM z&jHY(qMOBQJAj$QFH(fA2MliE1^2YKFvrIlwLHK(4_bkq1W@S==}#C|Z444qGVWcu zeym8!i}|AdwN_AacZ7dt&Z@{%9xFwSz2H-VZCO*~Tta+;@alZMI2iWiH5L{~fxEZF zLLTw4Kqgg+KWPN{l@s#Ytj~rE8X2u-zdqtqh&& zwnji*tS5xiP2uS9AP;dp$HQLbM`z;0ZfuO4(!Q{);?v;D`!qpYndKf`pc@PO+Kq)R z9HOR&2fDH46AFL4(dQgmQ;)~EDC1)UeSZ3_CDZALc$@T!Y&;^mSu8T<=4uRznm-evT1 zQX+gM$zKe3E=TsNa+!7Ca;VOZQN<3i#jGQfqVMfJ%+FeCaNDeRlWDU*ix-?qcwUWg={%VnI9eK{1`l{h)dOq_8lqj?7m-EG%S9>90N@jwb<-CT_l z&xz8v7k$V%QRK?p@B|hth?4ufy$bsxj3#ezP^xyu#WpJO;o9<~ZEkn}S0K_3OLW`a z6W!(M#2x(Rlpj>upvK~ga9a_N)CpV#6CLDjHV4CZ{We)9UP$aY) zx3nVfUO5~>QbK}XsKR`OnqaG8ZN!_?KYnn^JSZ`xywW1N1Zbtd0>u#4jf^`di|j-e z&?;_CKFetEtd2{X_D}>BB*IxXn;ClA`N)&eJ2~h_sPHuei-}hEuJl4OP8mX z&|_gP>&5TZ);1&hk}be@02wKiLpfG-0)g_Cej2WfE&KzmD1?Y;yZMURvdi{2VDh1q z5e&L~$#qm+wb%zt^=G^8t0t9KpDx-;NuPAH$_3{oDJjW*NrhS9P!>_1;?@quoTk62 z4DV4}cV`2+!C6wubz9wWwo?Vpan^-N65po_$GN5z{+Oy?X=Sq>S86pAacd!cA9c(2 zE-%1#s}ZYRhR_;l2L(F-1{XvRIym43Y-o%N_X3c`%B+PT`-R3%o zs7LeGBlSSc@^QKGX~@MoQg_i>q&f=Qrjj{$rrO|bN9jp@JWit+{nE?=X~k)^{1iFB zs%sa7tPxS#EAn!Uj*Z2bL{gMCOSo+ted0(BT^#qoKEPzjJu=9sj2Hh?1>N}cO=@uu zfdp#{DfiBy1({R}-|PVO1fO+t=jZt0Z}}|LFi)pd{rjPF|K2}6ynXbr!SVSdBpR>; z7*AuYi_YVmELSTXr0dzGF|>#qDy@Ni^JiYVDje4F#S9xeBnV(rcnF(mcWcE4M1a3_ z8QwBLFj!+CWA@xF5{%0!C%dq<>dM0T>p*|>jz?^4*;us8M~)@cuv(}bQ8cmDo+EY^ z?nJbgPyg?x+W*_2+dUyOiFYdQZUiJ@blv~hJt#^9|Kpy0NYc~Zg97PDhFKtc`*i{7 zm;tCvfDdYDSN!#qtSbn2v%peMh<*lF3F}a)u3CeC8SQLlG}&aJuNC3SQ2#_J4cOg% z>cdMnKuAj6iGX4JU3xiFz}L78P66$eg1umnfu+-q@D_p!9d<^~x?0Y}exUsu$-C09 zn<^^<2udzo8sII9$de>Dzp4S!%@Q=jy?cK4=bQodpkQW3ypg~B!GfYaM5%wX{i`rCVMln+5lU z@K2(Hb|(k1z*?f_{s?YA++F(w+L~)L8<01EPAS)Sz+(AkzYB(7B>#)k6e7SpdV_7? zEtC&;>Ws)s6;GY~KEGCfhF#j143JO|EXwmcJgLaV+R$yr861(9SM-G5U}_`QMKjD` zVVAojvfMk)psWyzPZ5Lau2P%!`+QJxODom$yGpzm0dsq-Jt+ z1zzWU5z!Z1X-&GthJEPjFzgn5oi}xaq^~u+;oZv;SsFk_FxI?`dpT<+6vL4=)C2pi z@O#MJy+=^!Xs%`-z+A+uLrytuoxCMDNosP;`HcuEoco^JN zNvr5kHG}%k{r@ft{crP+X>h;rM*Z>wqhMdD%R=q*f3fI&0rq#4p2Q$S_dTZNX5ZF@ zJ4$VgXn+joAi}(%yW+B%q)33tsk8B9rUW)$_PlPK&2#pLDSnX$=LKTgDQoKRC?qiV zM$?IyOZ%zfTDNjB@%4W{;8N>dvDa*Ji-BM~`k*ceNE>UA%{h_z2%*e~9~e>QA<6_; zyc#0+nHTRl#u1eJRWF{^X!3%qeK3{Eh+;Juged0Gg*Fg^!=2<8m zu5VgsT6d3y#@Sb7$M=YLQTwqR_v3C&`MEXPL=YLk`Jzb!wZPfJnPV|{x!t!2$}iX; zF=DlD4qR$AD73Y@!&O2qDcy#x9~EDQ2hvEeVoj-^_ziuKS8?!gm9bhcHf}!SdDT@1 zi`_i(juQ+d`@Pg~;t+TyH%y7UG2LFb5IwU4v@=+G2v4l!lU*fGpO&8oQ{$eECf~{b zaR}p*-eRyHpWMSa&gH;V!5rJv0C)4>eX0LkKt+4sUVB+Rx5!U)U=zFW#n1NC{Nr?* zson^qoh013_r^||n(zHFSiU=i*rmEzl}wVK8MCC4lNj!Vy!PzYQLXNU#I@$1^%qgQ zO2LLTPdJ0jT=at}ds*-jb+h56R*MmcZbg|C4kprW?-0zz~Fv&+JR5A+8;Fflyd*cL)+7cyJF+kOX%N?!n#N-QC>@4#5c)2=1=I-QTI)`+eQ- zb@%8u20zX~s;G1JUVE*%=A7&7%?BMkNy*D;4HZaojL&wRbohSGQJdb3S=~xKlo<>O zV*h@jqJ!FO*p~oHs6Grj71B2N1t$Vvr+LuFpo4MY*|hfy7qgx#nCF0eGa%Wo^BOo6 zLs5e|_itUFb)E+KPS+zeo7#bfL9 z>CAb%=v(8W58C^+SN!wz*yiVq(T%&pd}mRuy;8L-?-=<+*GI}RL|2L3ERD~W9DlUS zILivmFG=}PQ230XPeQEX{_|zS1bsYkxBMgTG9WE6YWrN74B>Vp^HuMY3==At-3Phb zG)OtRwz1t0AXno4u~>8cA0PU^pZjT8 zrt#uF4ld3%j=hhruqH=J{)Irz$wg4&iqf; zE1*9AU+?4p`W@7-pgcv|%01<5ib1a0?Qf`9XDAdrpWcs1%u?1~{DCb6_@0Ot^>G=o zpPWXgqmVDSj=Ddu0H^M`earr>H~_fV5ly{H3drk8dXt4(i$8k=aIx4ZsSjPfM>S;h zkFFuTV1=itTG*CTvUTujvqzAF-pX2GR(i}&5>E`8t%)LR$X z0Xm8s-lxH;51l9ae!0+0YzG;3DG5N3an+7wa9i~68L~UdXkoH;oa|G-jT)=^l`*;U zoBH5+OBI)=x-Bo)-9jLm-~pDvXlrba`Dghfe7*fQ=21olMdu6rySZfCZML1|cgH;1 z(&OBSK$w~U5*B>C>6L)vN^b*NpK5f>p>av23%+Zo{MFW%uLjhCCVn`Cyw~^G_xB4* z-FD^8y!{plUk$-kMR@f!`hmHHJt*ElW>^pCMa}jPn8i@So*K3l99E(@e=nMu6|6Kh zp)I@aq@$`%2=pGK@;;(r-mPSqV`UzDM_?ew{MB8OopBFm21fK_bpIcg%D;uofA`t{ zxbP7~U$Y%s{ZpA+wd}e%XZWnYuIYNQg?E5Gh7G8e=s{ki^%`u*neIzZxA}@GTBLxl;SggPT*6yonO1)&^(cBTU`FPPYn9xqy=7eXNW0HvNf0 z)-}IzU&HJQ@0;5ACL=kx%&zAhuhz)s?J?2!NuVDjT~;J#OGmX)stFms`YYDu(u=q) zNLlusEf(@R3A%)|L^gy_+EHVbpuvo$(&eo6Yjvq9-qU@RNHI2Hoff|v8RqgO4H>S6 zfD=^Cc{$anby>3eEHMr^5dul2wpMrm(+uKOj?VM;vVE9Ies|vv7M6U65IDc`f+fSk z%0bPaRTz3`Fle~b_WCMBjzR9tFKBGvdE0A{$9ll5Nstk`t~bLj%lvu_+I~$k?o98u z4&$1SjmL0`p#R<&Z8V@qVJ5c`fYIg+0%l~qg#yfO@*~XuamoSGK@nDHdg2fW zlToPa4kp*-Of;X<+U8lSaC{vC7$kj=1+kC5D{5n_qdYs=kHj>Xmmk0k}r=Kiq)H%eBZ^~F?UKhDh)#}thRXrXnYH@ zR=lX+rz(DxW@d?t+dr9E6(Q(d@Jdb>^iA$OJ&O4LYl-~giBh1)VNdbQYs<~494q&& zYTR+^}9zlAhE{5-t`YAw>e&H7BHwkt#(H?ZP^{fR#RPI$khmNTpA?DXq? zNXX@0YJ&aMfzpC&;f%s%&F1ubOKxjb12}rCYTxnp3nPti559#!wVlAzBL(9sahsZ} z##6JGc>?Ui$VxiKycbN2OlKdnYKxNqR|x(6B)v4}&1i3@uGXq^hGI_dSGEqEdw*SI z0Mhsml9evv-Cnc8+2C(=D{rZA)qh`xa?SHDZBQBp!<%CUT&anP{GVU$vW2O3?zL6& zMf&Hd`bP^E;VL}tKe+9k%q-_``BIDXw%PMP*$hXb?jR&Zz6SlK{`z~=u)shRfIWp7 ziTIftqawP{z3n2p&P5L(YX{W~WU~8F>s1p#HV4)S2{f-;Zq?Z5jWx95^i1QHIo*Nk zcSz8rx(R}8h-n&z{{I>y0DtNVI@`Rb$w<8dN}4btCyyty@o5VMD>cRZa|*dc;^$3@ zEEXaW?+`(8Ym(_kCwWuHnYkL^uuDzJl4hxisgX#H08}OrKEMAS2Tp=Bbt)N#dvVKL zvE01qL$j5ndc~UfDEzd8^h_0yBYs zeyauQmymcmwAx@ij>wLBS6E2D_`#UbB+kC~nnsJV%%Nv{nw#k}qVD!%*#@_<`e^P) zuU4~nqu4j4BMW^meiZsoo@r~N0uzz)R%Qs>1x2*GUyG7I;55BaRouGpat;U-$@rm( z0j3i|V-VAJq;06b*^}xeS*jUgZmG6eGCepboRv#g_Dp|~OxY}{E!O??_3d;3oRgKV zs-pk6z`R9;t^2<^jqzlFk?8iGCN>pSj!r;Ru(bUYpvFXLH${d9&YIxmu4^f=r!)rl zm8>i%+m1cA9HLa&lKXI;dCQ==q$?yBvF(gZoA9omRKywYg_=BO=+?UBssxAFe8hW0 zb!d^`@H6nETD16oGvINUIRZMTFQ;yV1a1aT7cp^DlNDZ_H|gm5-rhGbYZ23KMx4OF zo&PMye=f*l1lbrnba#>aYi44e(L&>El&M#%7W&fdL8tkj__iQhtwdju-U?J)!pm^FDl47Ce*3%TJs*N;`R%t@TgGdFy*%?VHv47MC zRtf*#hyx()nc#hcCUX^D`)^|m7^$sa`3646_Yg$nSV**2|E0g-T~c&NXz+G(R=U?o zTmu9mI}_ZcK%W$lpUua>TxU7ECny6(N>MlCAPoU<&rG?_pSBG&6?Y4ygx&4Ejhf(I z+aYdit$Y+Gh-7iWEl1mN5KRvt!2#t#l5`(S%j?&BcdrKarVXa2c(!(4wy&4#ZS={W zqOihDq{bY`a2vcK48v<3i){Whf2Aq=rzef&NimYUtCxoOkp(=3fW4nmT1!b$nYmf; zMcf^@tA6Gpsbc2X3t$MQohCQ^>sPT$ddB7jpUXoz-enCeR{VPrv3mA*bbjn3>9W9R zsRHEwMjyZ}BJ?ynPEuY3iCfe+J2=NGn84_{-L)-Mbx9u}r!Dh2QN73bDJo{rPy&ae zG|3_Q@la9YUzuL*O)uZ~l|6XwMgj%uf5{Uzwe)-ze>8bqyfPxe(>! zy`M~rRJ~1Qj86!kc^_Za-N&TsUQxK^w|@?&(=_$|{6C{fSb#!-HqWP$O0D)nQ0`&;-CD*ik+SF;4)$I-Ud+9+re+*6lWq-MyX#_JC*o9GF zh@>$R?upc`<6(|AMAiBUZlMYCoV3VBkOvalRT~7O*J`@;+h^cM{&R{H!>o15QTlX} z>Iy*L@mdAv&dcHdE2_Aa7ToDlyUyG@aj@~c1av31Siudgz_hqTsIX9^FD%GfGVB?y z341jtqN171Ktzh{g05M)qe!T;*Mp!69!@-AQlSn(1XcDS4wL&W)qUiVSVA!=cNm}a z3B>W!v_kpz7|>ty=yYtqmXc(;QLBR*6zFWvD6gE@`EZcs^^A27phWiKeMnkEcK&;P z1AVDpj*rl-VrvZR@u>}{^)vIc;yMo(8dHL=7HGhODe zW^bpe#}%szRfeOY%%*0H*eLJ18kUm5JE$OG3J_I?TJ3!lbbh>Y+Ls2p?tuA2A^A50 z{&#k5@0Q*{9~#Tn@zFgB(vObS^DXz&Eh8L$m&;@>;1epA^YHqu{!>bpg$U8H3!o{n zt7pIlFl}<>%$=A=zjQ)^1-aLcwnB@Itr|$^cf-tr*l%o10bb zF42P9&4>*qBq;Hstv%Db8#w3sZvtZkp|M3-@jGvR2zKFsYWtpQZ@%hG_H+H41L;!x ztn=3et9v?zW+Vx!e-_pq3`hp!=+GanIs$~0%n)=;Pppe#)IYd*(>x`NKwgn-yUAOa z22_%Odxrg4rhrYj>+9DJdSTR9``21WBMIOXMfjQsiE0_)DPYg}l+c`HloVLEH}(OZ z$f;1nDvw_utXzxk%K_-b$xl^*gkZDzxnDN;Gy>0ss}P<4?ih}^Po$o@0l_lBYo%@Z zEn%zxe?+0o;0!=7|GUHe^HR0j zL!CN*mUg=c$)#idQqfkN${P_8-$2g`^MO@igZL^z|CVX8Z@v2{WXrL(;9+vZps=6O zhSE6(sW`-EGkhi?vD>>H=Icqjd6G=c4Y^)2ex{I~s_7sg4DpVKT5x%nSR+_}V?a|4 z4SBUjv}mr=G@j*_ApJ!aUw=VTU--ctjGrqHq&@w7z{DgsWJW~Oo?2}0P1W!$KH?tk$>$?ml|}clp&jYMp13;R7aisd zOi^gtb!BB0#!g;nCNjE}n7*NZC29Sp@!5`9Re2tsY}xO%_%6>U6fV0!?NA?cr+0Ure8Qy=| zowVa41I*Vq@bV!|cPJklDsRUXkE5J+r+;&@Q?<75 ztD&KF!1lA3DY6D=H1n>|3L-F&@a_!@auLohE@&6W!Ps=?CY(F-^)Ams9H(c5TEKPi z?pIhe35|I7n!m*PpkOPR4i%#Y@5<+1tdwRdEco0Lkbf`Iy=Mo9R==7lth6dEHO~eF ztNmj#!4U;gWZ1Pg!ab_VwiN6}grg(Bt1jB7mOtX~RN>|YEPyLdV%mN%J~=7K z{6VJB!~v~x4;TO81?ZWU@K`F%e_gQ4(Z<29;Ca81IWklJ8)x{&R5dvCR$ob&$=K27 z`9(@nIgn9Obd(DqKy-iTA(`qA8RUijRpLv&glf_!(u4$5>TI%)SU0WXjyqpk_m|Ej zB@3(N;d=Xef2HCn$jsj!r)ij;xSZe%KchThkR9iIP@Zow?8kHkn$4EgmWvqxC%xxA z&z_r8<%a^7PE>n6!wQOs`;fE3U}*3hTT!|`tVYEZB35k=vgY9+$6P8uG(|NY2{U$g1fKwfha420zpKm|eA;U{uX@|Et+D(oXT1v%%Ne@?8p8TsQv zUVIH1nph{s!(i}!tz8nw7C;v-VZOc;eSI3bud9Xwq(QH8tp`mmdhe-$1{f50PSm@z z6%gylYAcwkG{?y5f(JW3e2vt2W#^D@_LfO%y++rNlTXm(+*t58$OS8dY)lAN@EVx}s zYa7hrn_vy8w2|n*3BXH4)B^SX~j41a~gFJn}c8jz@P~G=2PKAbJrM0 z;!L3!^q8)c!#hBH(}mDTD2a57|XGJ8mR!oj9U{A3KvFxU}UG+Wpi>+@Yuy#z6+BMwSVZ(Xb#xn@=_uxG!7?_ zeHapcYdF6ZQb<2M6Ol%)hGfz$yHM8 zr-9D5GrMv>A|Yb zgXF2@L@G@!ITFq`!|2{I%T`T&4c(&B9Stuj%tMZSkuQ`GdNT*`%l1Q0rl%$ci=FvS zf2{C2^xMx&Em;87vpK?dRtUEkAp6wmO4pc_6Kg7(?iMT+_K{P)m><#p{+KD}y6K-1 zvDhnY6V0P+NMv}JloUwHG1i>_CvRoQGSzO7j2gK09jG=R2C*zN1ZvO4T7QKy-e8OY zehpC2PB34?Ccf$u9#7hAF!O#)SEU=o0HXA7c%++%#a2t+Vz?MJzN%IQybl4JLG!Z)gWkR4V23 z8{o`Y!-)2dx zYgh0X!~N>wwYaao5x8LDqzs5DEtXF(X5Gye;U?Km(sYCrzSZUr{6V6Zkjt}PZcGhF zG3@Biyl}8DFXw@ z3-bEq*35i1f^dvYb}MEnzu(kyfp2%J$55Fed)XU)ZQkzz3dUEuYq}}%(_8W;uAb>` zAEjjK;$8i2%z0{su9Fp%`_Y<59ToN*=&qzlSha0UqWKp1E`p8V0or-+Sk2W-`9@hQJe=|C>5vc2`CbSJ>vqG_IlP8*P*9 zI!hw9X;NI$wOXWlW>{Ci=D5j_=}g|A4@KeNN(4MPwP2I zst~;oS#E=CPNjWmffB*iwByMy)FVzOAFB*zB`0EF+;NA2wIGSC(uQ7J!99HivnCHa zRbYqn#kc!&l2uaWrP{?W7#sV~uy%Vuu^UVvo`;OUoZ^LZ6aBkF!!DMo%vE#15e|4Vvi!*XxnC*3&{JkVV>e6;8-5bY!ak!Q@(Gd( zLP4GY6qeV(@QO#nZon{mQXY`NC24sEMaf)ivneKl9U)7MR2Y6xB`q zv|NP;RL3kCnxB)!|3BV`-h=CbMr5hX!e^-tLqLA<8}OpoAKT1;nBbzBr!*)#Owilp z@-ZxSc+A+R>S&VkY$?r=pk!qOV0G1)EfyEYhdz6NdeVc5{nOjJTtoCROmVZ}X4S=j zj?Z@dHIT(dW3vKscmTmy@g!Uq9x&vlJzl4X7`3nN2;qLXFW4pf#p+Xyg}IJ{IgiNX zlzA>Kvy;v?UV!o~9HL?EXthPIx%Hn3WgYnaU6d0s@=s~Igj+zhgtSW3Hv`DDg{^WV zfdBgL*=VwMG9*4DwwzCEe&_WQfRzD@^^K|u|7d)GlRs%t=k-p=Zz`b}|0Y#P7mYDA zNQ5+^PX{{*6GOnFK{+!?+G#vP@-ezrACMf0;si8>PN;fPGV@1%+1Pxi#TKgI4<3DM zi-H@VR=>;FaP_K$C5~OT`1{?9(X840y1o>k2A^@4zCo(DaXSv-AQ%YNyNe_ghOf$}W2WV|JM_dS@VT{Cp~|`Q7{bjJWtMf z+qXT}pMp$+sT~-)IrSa=&lC`tYZ^1X22Qw20mRtJ{#UE-ew}^!*QSHTnhP@i37{(QBSwcKsk-*MA%fB_TK5{&zuR zI=?Z(1^b$WK^W?PkMmcOA=PxqBB=ct$jKL2D5K2!Ac`fKq{`? z2lP`fN2QSdaj(J-&BX(*C83!{IZv-|19X4zp_?TpnXnr-U;j0(60AbSWX2EU5zRY; zp-M^qt^2?PBbs{_D3}7AUKZ4(1T;y=U*|p7Nh$XZp7AawwlJxxqUJ(qPnO7$)pvji*h#Edq{o z$T!@91M|sX?1+!i)7tc8g0gqj6lgV^rkCeILMAk7lNjKk*_>+qutyE*WNN=&;##;q zZY?~ZdH=p3ZMK{_fC9krB7n%p2h`tu4-22QCT`nNb4|G4&fP_Oms!8|MA^BL{m&xv z)INLk$^&Sh159TRZ@ugQI}9x76lbyl5AflGCfUa(0^o$TeKj!t5kOG(68C3Usio^! z-%*lfy24!vI|A9XgNnXSIKT+k!ab=T3|5wZ`;I?yC-`c$ZAZ(To-Ox7$4bbK^g?#F zRaD)X830U0u6Pcs49dw-Bt)_6Z1$VpR8kfpzd% zKCNb30P4T~lccuV9OkRvUO74f<8ISOU^)CD|3$qL<$T(00wm%1ez`_EjkxKn6zj0gRJW5XKojV8S_@H+JRa?rGWN@p)`~!N1&jlMIskcnd0GM8gxDw{p_Ef^}kB2dY;Aje$#gQWGXUY}6dHn1N8J!HS^6Ud|nj)-g%ZiKdZc4cw9sCmzkh??Kxhk+!da|MM-{8Z$q@ zBp%nERK#0fs&SC z8$TgwH*2@pcHz1NJZw)xdQ)cDgSBx7Yg9)S79A(-9^PP>qb;c_3m43*1*%p$scC*~ z=d|vNm%O~VxtVM&8gGsar$F$j#=%^N$92TRjw@@`VE(w8mUd=_0e6SMBFp&VSYRjQV

@G5%rOA8*}#?k*& z?5OxaWIEaeA`_zJ-?)-{@F?qUw^ywT2dw2#_|v)|SVThu+Q*2qDxEgU(EaQ^QdCXQ zvB-p?IKmz)vQ-A>nb~m%-V8$;C`XN28UGvxgeK;lrBZuhX_YlcMwqFDro7IMIWS!p zmH}HCR(;pbz9x*G37;--xj|8onMW1~46&I>3lO43xb!7AY@Ux#NiS~|NkDxQqb8#P z=Wuack~FwFeEPQCa!9{T*bD1RN}{HxsN}m5WI{`m;y+Oqcka5@qE@l;JWf9n^vY>O=aY z2}GXHt|$7j0TRNWeMNCXr>mSFDLrd}hqN0)_Vc+S*wBM&7Ro|)8R&mIY~GnrPDnpr zr{^sbl$G_ZEgcKXi+G_xRvi>fIr<*&!uyDy#G2%zGhr-5T+6w1GrB<@L`g0FHGdIf zJ*n)n!r6GcSrPlG-Fv5~uzi(lB$3`1A&B_xDYIV1~lbykGMn+bU zEzg;Fh;jZPn zAe@_Z0*X(x==0$;pyU3Arg6*@Ml_{mZJd6s)=`7?C{O*8S9V+vL!x#>h45xxaP5l|BVg-| z2EyGO_>!g-&CxhYAT0$zM)bvN|FbnMq=nDt_fcvtJ@>K3;6O|%Lo-{1@A%;4yY`~6 z8{Q{Klel{Er2n|5a1n$->VDonm@YpmUu(j(`uj%%{dR*_5 zj+!D96-#Vta&`oNBN3}rIyr6kZkB}=5#m5#rnmN)kQaUV^(|#nNktI*34hV>_uQ{1 zYqDv4xu3UIIb#lE%Rl{;mdi<>GVko2Q!SW~!zmRk@9*_RcV|UG(0gP3Cxs6Vu-sPP zOkikv+Fxz?6dS;}MC9a>n*>Mf<`Fi)>e=SOamI*x9ppCPA4Ja9@6SlT2QR?)jF1a2NiqSPyHj#NYIM;#1eOonB`CQ@D}rgrXxoc6i}} zn3@nN8_ZIKoZz1&kBn20z`QD5R-dLuqf;nzsG`)M?`^YNEGLkYqYlB`5(J527(VHk zH8W#Z##TS$jFTRgqhbKzs2b+i7+hji*+|Wswj@yBjLXI0j z<>)3Lih!4yf^8!BIN%yv+)DJwz|<@9D7L2rIA!1i;zt1Z@9l=q6Xx`{oXFSLa{XXQ zWZ&GWgHyO%(lVC~a24tSU*zA%7YA%>e) zxYt_5Jw9EiO*A47w>Jl8X4@-SDw=|jB5IgQj!laRsQCc^P;wPhpPh-zCm0QLJxNy^ zX@OqxDtyJKd2h+IN82}pdm|)a0qhQes4uR=ZN15T6 z1Q^@~II^Kpd8W)j0k&voh0ng z-L7XeJ}xzvC#qTQCt+1^^xhj?aWYh9Uvf=+V*8CD?@o`SWcy!|8GHSib=gKyFVLjA zB+zsi4i~jm?{xHSdhnf&p1+^qDZI0**ZRw_M+8Oarp9H;>rPLQ zhp59y+U|Pp@*6hGe89lmL^qYrxD;A1wek0%{O&wF%80^xY5+jhNt(Tk)D`hXQJ2feg#TY!))9 z+=aGWcf&eP171K>QugELYH3(R%ahG27ceS?Ru~v00E{2`Qn8i%t`^+1BtVKd6L9q= zb$9fDTx4Cgtx07#016P{#eT~=Ta6n{GRX>Zo53pmpG^HkhR)7fOny^s3CfFzZ{5p^ zz=zt@lz(FRRXTqm@_jB}m1aSwle!ufln8&i_3oIa2ZnZdkY3!r4E;Wb6G}vsep5sQ zHCPI?#9xPlI1l1KhSHDvRecC^OB9R4j*lZ%j5z-dJF1fy?_U_73iQ~v?n9k&rmOUp zK;R>g!RezH;{U-_&g`K#LjbHKpnOc;uKg?{?f{BpE2gA*DFJ@$^0a1%PcA?ZayWzS zr>UcYcE7%Gk1YOdI^Rd&eb74L59yHj}y={Ad_C%KPFi2P z5>xC3E!K_Y<^4?7c2z@|kq<15^|ja1KHN21Nlb;EMvHEV>I4k=x_$`o)=n!B5`NG6 z*C|%PA9rfXcx#&`?SdM+Sm>t?b`2*33_dSjSR~+)UbyuYUMP?=RU^KL$Kk7e{F@o^ z#B2yEXm+w+@Mj8BOU*h{S*pS9%|Umy0i(}SJ?8xntwYmvF+$^;JMJu4QBJ4-i8c_| z(|u+GJ}kW^2NH<)#YK{@38Sc+OAs@|ZTy@j_Hr zU%dV-!zUBY>XF=XuyVe>5hCmr@@*3Gg_8&g%9kgnJY%Aquce=lq@et1G#hgCwITSe zbw>n-^jHJI@UVl$@XA}+VhP#uxfs)VZV4$&2`N$_N`rmjth}3|@Q^CGphi9&d{FB- zuFOn1=5jOgJbA#^x+*Qz7ZyWr7`y z^Mv1+&e$9_HcYAZE#XY~AdZm`T!hbvRgTGLAY5f<1s#S5f&tsZUtV@xSPM9L&AR5f zkb5?r`^mr|I5Iykt<+z93C>Y;h(o~0MpS0|=ndubnFiI1B;*98Z~G#vqpa!YBQDO~ z5qC$Lc>gV8?=gn6M)vJa*ov^o7tYxqUqU+g2G%aBJnr(==~m|mwdd_h7sbxUr|;;Ce8ALddtTD&`Hm^zG@g zw4F}?h2>Jev@ShDZ-lX-|n!E2@LB!(Z205owjFlBbI(9LAE~LX;trg55{EKOf?t zZX{cZa-!q;(b;JAM}6tW?hwX`3sDh4R)^}Xc>izLakB6O!5C-n-;8+qB-~e*IQ5}A zs0>S(Vae3;ncgTf!*;aB70z$Me$Z1BFeW_aT0K(oQ~8gzxk$yLr+U1iFG4v)C1v7< z*$~Y^d`Pv!xoKI^9}H!B-07!@}UbQXAq zoA|l37lpWADp{iC#nx-n6Gz7>m}8pbsEr296mb5@yQ>&`Fyo@d#X8ulS%d6XhM+6a zD)Qknd01NIF^;+PiPu&!{PY&kb%iVU#+u%Eb)n%^u zW1y7|0>JBv- z<(=W;`sQ6-p3%Kv&;02^h{AedE>wb>RTG1)76!4`c7*4Vx#c%aqn!Yw%d(o!jb|OK z5qj5GVF=d;Nh==a&g;Komdx99g;H2aG#lvac53W!SR80q&~dMP$oG9b>5v_@_l9v| zf3x#qkp8Ok)?sqJ-ankY4Rim*9O)IZ377WfYidX*D!T<&79wU!qkF7wOn5W`C{H6h zofOS0p`mtG@UfY>qbT7%n{kEI<)FDxvp5)Y=ci_cjCa=_da!_3zny*wx&UvKSA=1C z!sYHYBUO%KAIEGTX_u} zt2TT%7O<%gxF!q6Zai0uGdB(1!hn&|1s{2z8kd~n9wV7;O4Z4kctK~oNzUzSKCRY+ z0n7vDg`S>xnFh?s?foaK{K-qnBvE#}I|z_>3GLFj0fdQeQKfI&>Z*LODEbV`BNT9F zRCzY8o!AgGO;5m8!~$5yO;&od$4~a??lC%%89XGinlw-1!WXmBMFzJLp4o$LN!nQq zKlyabS_vI9xE16%QjI#!FZ)j)vK}RpdG*|XSUo-6EaO$CeV}-Jh|q}j4!ia>+#aFN zj}*fo0(#sM)}@& zmaj}LQ%K<<{$_lg#J$^rvBfqPv2LiU9suPtL5b?MV*#*4?Qe1>RJ~Ruy{ulY-YVud z^~%Rw&qOlW9VrSF;cO;T3+nbFvK_VIPM+p(WQ6E>bc=*4joiFNU6s3dp{*d8aht3S z@0!E5*<3v(e7WQ$60nY-d|{on@8JB({5{tTj7loM4LYkkQJq}Xu>x#SZPtx4>XQVCW+0YkU7(fd)9LY8jo}pS|2xXp5u200_VA)&t*Gx=Ijo2r}Zt7 z0hH|r(d?fvQcsWP=b69Pzit*RT`35(D$YY6>T#YeWn_Nebh-`*zVtRkg9cFulX^sj z&bDVs`Bq7JFnWso2^LbOCzZ(8XCXi>HuFh}%%lNhz9Y6k0+-}pHa=hHcSeqlNu?5g zJ|q_KB76K{m)4CVaLK{4>A6*?)Eph^)oE)UEW6D~-)^LQd$%|n=$7;z-l3r(g|erF%nKv5(ZK);3Ys z1y=H(#L=wRB&|0>a>y}^B8FkZd$%!+u&9a(7M-aCvs+;GZ6w-kdHBBry6$^{mBr$L zdzt0-yURx2On{z}NhnpoGO#)Et&I=ft_7cpF&(#%A!|y*4%pCwx*~{|*l*FD#vaBr zf;gWuFYn}5mCm0wD^R;GBuS?+1=6DSUeI34c?)(3_9R|3NB@3ovFGmK$k+2Q%`gUS zBW&sPL@nlXL^rwQYYH8r;qrm@M{)Lz(XBd)D-ATRw-ZGuVP4U+D7c`Dq!pZkJumfU#Y6sg$F3Jsd3S!*xCj|yc*!^QWx+{F=SMT)o) zyxM5vlaE~>6U^rC)Ec{FE7i?gubF=WQ4?m?pRsQL6`D@qvLd(fKm9$vAm zOIkeghO_>?r2I$9?Y(Bi^{u-wz30mkN{&U>{Mu`&59dBu9RAyOMoH>ZMJ*~(Sx8lc z0@Fp7(w3Nh)GH3l-KRg4hFQXJ#?#R}4`WaQ3lkHo^~QfxzdsZ3IQw#J+mOdPlF9pS zB#WJLsl!>Q$!%z(`s?+`CxSbyfw2q}V+Q!_BZzL6VUT;SQR=FG4RjrHfBcGUdNi3z|vg871 zJ`2po%X&w=GxHwnS?=nh7iB%8<=oDD9~0bF#Ln}pQOcK}pLxY_t~r8s3@o*vWxLbb zT!EwiJ&1Xb_O-6&+-xjj~&g>LSzOe3M%->Kl z4LH6|1ni6dx`hX#N2d3Mhl}hCxu7?2^23yV>>6j7wj~Vrg(RL@q#Yy$ ztF00*#nSf!^P7()VqQ$T`E6pMFD6Z>F)`n^!s7&2VXm`Gk#;o}yD#rUQeNu^cMlK9 z8XdTm1?P}OE!CH9nJvD#A=LfyEid;cN&@(RM!g_+{-woX!25M*@7T*KGQmd!JL%iz zMJt34A3mgbenU`6AX*%Jf zbh4lSP$%sEIN56TzPYXS=h)cT>7v}rTSA#`jre0&m{5D`xK@|?pgwFg;A79hY%rd8 zJ~cns?Pu$}9GA7f4%b> zBXU4ogQ3Z^yWCdqJome&T9lBi6*kG5UoIC2=CvkiU{EjmE0GO*Bq`dP-F~mMbc)}d znEY$62!0^`0AW`O!Pzr$B)J=gItUFMfIXJle!O&gfSMe`yDGP$;wy^bdlO3I&ZP zZvkqKzgrUHko^TPUD*SnY|gdGK6|j{ZS?w9!12p3O(Q6Zrrh>C*2go%MPa1o+l``VX=KwJg}{=6La=mgUO}lewF?_-$U61w1hKQ*kb) zIow+?1}OvqQd$!Fe7E|Rg_G&JF6a!DAECaYVq@C)z{l7IgVEoKmz*4K!Err}aj@Ua z$-BlmBjF4&GOLRGEf^3k?LRupZANf}d9D9;g5UfD{qDK#xq;EEeo?;kl8*uzJ48w>YM43JZko5Kca?gO<>BA2XF5kmQi-5e+ z9*B{MPn$(PA4qN%Wd*uoDs;^GZgjSQx4s|uqY0k@Do~n9QJIuAloPYYmdk}B@ z=&xH(TXYOGjj+TDa(7);z`BwZ0Fvw#YAwWduO%P@e za2ovIZ{EMZlbr=xjQbV#1(@+}P*CbtVcy|R*=BqGNmT=Lr}{`@`zXWG^r)fpxzeVQ zYf(J>&_wD!ldGho><$R)mHSdu8~l&Qtc?Bw_d{Xp?b1~!B-#!1ZKvlOT6XRhCx! zJ?pCLw%LXU8*h?-V{g!0BGTvYSnZvut;_X~ume$qcZ$UoPbb^fdNPpDxmP8K&+wl- zN7HV3xod85kb|~7D%Lo)+ z_)nuq5`pKEU-SFKv7cS+k1+r9^tu*7o>_QnN!=}Sy!Z2ON!J8+Ie1Ke^fF!Sl-wy5 z!XveG<)3YKx0NXLq?g3?EG-#Gae5vLHyQohvSEjR74rYE_ugSmW!oFDA}9#bk=~>! zNE7K*83m*Y3euYp>AjasP(e^YK$=uVK#(fZ2|+@YCPhLIp?3%Yl2F2Tn0DvR+VqU)Q1}raIpL5-n|W14(Ak`1Lu_vV7@h%C*Z@)D!oh?Oh$f0;M$x z3oSk!k8eNYhkQw1y<#d<6-Bh%w^+@esmr&?jKpp9Mp^B9<$!Oo8>a%E&p_IOjT8;u z7ulp?NHPBJu;5p74kR%tF}o1aD>cvYP5w*2#7AEBXIL&y9mu7}6h#4mz1&Ot@y9Sn z`Aco8Kuv^E-HjSNotbNnx+J`E;`Gtk9p_c4AY*sDY>X=52dtX-@ zY5Y`K`IJ)$|8NBAgdkun5Y( z={v%20(hc#d6kEU`tc7IV|9{7$gf|ieK*Gvjn)1o0R0b+r!o(Ct)O&8>nT#_@sE5! zQRM-W@^HN2ilG%1aw>pr$i%493F1Of}oRo!yrFp}= zzIjE`>HJF1-+(ee(z=ph6`_2Hk9~?C%v)d?2{?R+o3T_f{yKtem`xHBCu8pvJ+{N_ zeeEAjwUm56IJ@cy7bahbs870jzWQo|l$k_;zKHreelmv0+}$tV+_#%s=RB^k$@?iC zx?>4tSgqOYnC0Q882CXllKP@n7R!Qm8tsBkM*Ne=hWR1?0$DbBusEA;n`g~(u)ias z@$1ll-1ICy)pYoYapbMgfQs@f3eZKB(EF`}k3+gSYaQ%x^Vr6ECo|X)oD&<=Z62L^ z*))3#>@%8X9H)XLsd5>mwRx}ZG5X2F_CaN!kGk$kYgf--_N%5QayHpu8RtL(UJL%b zv|eQ5C=>f|<^CV%oJVbXb1EdrC`FxZcd)+V2EB67c(nCcOC*W>ZZxAMN4!d0@ksyLKf(HF%=I%EC3nG7wRWk}qAS1n&)eK$)#J5kGICPJv}T z@uWwBk-_*oBmUFwkD!5->~ee+tf`Xy z%&7V0`4+CY&~ctirgoJ zyO)(0i1v89cwysPQRJM#mML=O2UZCsiO>0MraSv4eGPCO+8>&ws~Xq{?Omk|#dq;* zXFeo@nt!knN)UKKg)KELPfqNM=e`U*eM3Wu0bDV zKdCnziK93*(;djq$TnbpVoOVFYiq9Mfqyxy{TXhTZdnDpxB#W_pz}mm2yl$4s#@Yi z*U@vo2{uCMxhdqLSlr>HrC5lG{|<24-Ls3lOkd%rb{v$(pF)8oLW)4I4ooYKXfpFhcOR&Lg)uq1?ut94H&i)k%neXp(QhKcK>!>EC0qf#a$G?r-J)Jp10W!Pv4y?f)4 z=9oGKgF~YyU+`w7qQm`LeXAEmNr9Yg**6U+xkgB07M@7!WGwq;pe(52P6S(88%(Lv zKC0x+Av|dFynIXsX|jmra-oE{KM)$u_8E5FA{wu9CKJE+uKFa%l{xON#0&IJRaK_{ zRbKw*_+)Er)mfB^N?0AfNF6-enhXG|(*lol@rsnz+)XD_pt{t+lsV;gboIi0 zU9m$gxSI z$>Uk02K*7n^BeNbn~K#1h@*;|$0Uvi$R`&`cSL@U$CX%y1j%1Q(iaoNoj(unFTb)C z)0FRO$-3f*jHDe2UQ!#`?THh`fSA&}0+uMR7 z_MTWCRlq1c()8!OuuuW544F4WZ^P;my+9GVu!E|wIvFp#V-OC643PuMC0WbjTD&De zOU?#3gsvhhVSJd~qbeIHx6C!)J}=mn5@`$G99ShEKI}5Gpi+C?%QyrD<1g5)gFfGe zi(v$ANl}#;)7a}~UZ>z;4l<8n4XSsJjZn23PZH)1k9uV)11Q50_Jl9IyL4 zLfPAm$q1{nb;oSVch?a&fDNUu;#7=-1(ps15Sa`wdWVc~GczvRIKkr++_sSSsLO85 zPdzXa>5*{>z&v=Q9i|hC+DogK$`C6NQK$(FzzxT&$TXUt5qj;v+<|t_MmawS>?xXv ze?*X|E+RV)7}UJw^qTzc!oWl~Ozoeio^JvRn1Nnd8n?v4BI|)>iaWYkK9WWBaA|VL?^OJ_lpn)7W%ZQt(F~ zwY8ZT{#8(qs} zEg=WU2xvv~{#BU0;RA7y@8`Enq+Yd9MK-x{dSKV%>VvAwWVei~m&U^C9Mkb=LbFO- z0i6JE>9@phGDCMI`5~8bR@@Ddy(ifSz%tecn^=0&$?E;qR|c!3e%W#Pj!qRJpg*T1seCWmkK%F=6l0$ zz}h@VHVeZ|$3$toTw=G7GXv1btm~~Hx5#?vyPqYu6^eO z*#2OpLz`I+xM>*lsFMJMSt}^JGLUCHl1X#e>|JHF zTQTg;$S4R}Z&^Og+E+VVrBn+8E&EslOfak*>OJTGsAg#3zF$=>pR;0cgOifZ5Z?`6 zDrC*ulQC{TzB_;}HsuA>6ihEbKw0!nVNZHfm9Xhe3WjC3(Vaug8wm{^{qL~nUnhl2 zsLZv$GW84Rrt+%-)|eg`P zk6QaXRhv%ypfk59rKA|##I9W}L)GOS7-fyc?nZl$pJtxQcW|LEfQMGI3GIGf}w&E;#3 zlyOaT9)9iY`tiqYGyteN*B(VV!d+khmAnn#KV~t=*I}gxI{_uxdsV^_k*Lvf( zf23M=_5zINH7$y}g06JN^G#)OjidS^ry2CpAkv_I&kl$Y0>}=LdRsJbT;%?ccVc0% z2qk^wV>8Lk*M)4WkCGGg%!w(ID3|CKOPXn+jgef!i`adS*=x4UM$%h%&tdYldv?4^ zDuwAJJgaIYE2cgH5`wxnh5k!z45=lVW0lGVMrPp@GOgs#2Pj?n#uo>CZ|I7c^`P1*i{$ah08 zaiO`@?MSf~3tdKqHX}yza?PVjw5K`f1SoL0i@hmH4izYUue=RsO(|!)nw!FTT1=c% z)jRv$Onw~7%nRBX1GBb^kp`*j_F9EhSP{$kg3_cYCcXjBdB@N0cSvUFi>&hch(<4G zDM{PG%YVr_eDs)4xI>WdCglgiQjVd}>pK6tZ~wBWkT@=CRo;1YbW!}e2LQBYHL|qr^d@WCiYsdQ*;73tRQ2jU3I-|AN;x$zCYs>b4A3-_RoK1 zLVvC%{PN1}Pbw}l4-^Q1q;5tKqe!ZimU7|Yqgv0(2;p$w{^jS%*Vs#-z7{8O15g9? zU8jL-MQPebs>DIqbX~NyQviDEQOyT9e8t;`Az(p-ND8BCieiPbHpNO+u~8}gC(D&x z)3UHsBFJ9S8os=WKZqlmH4!U#_0bdsq)3GVSL2Vic~BeZ6DpNTvEmNjM#6a4N;h^_=I99m6G5f&u zVy|e*=BAeRKocO9e)w9}d(SLy;~QN*94=jQ^*sy#uP{8J`|xnx37(G(F_rNk*!bf| zBFlitSvI>A>0XsvnLV0|=sy2p!5mO#>Pc`K*9)G=^Uf94o^}BRo1>g6hF1MlINv_)l^9dfVsai+{NQnKO{Y^8yhRTJPO141L{N6Xv;M6 zaJn{33IP8Gt57bGQjMoEZg#Qy>F@`mb00@fJ2>6$|9KkZSaPGyT&@I!Dm-xXBZ+#Y ze^>3V55MhM)0@ZV;1#S=5BGdKFGxSZBK<7$EdEg7XCj8A?3eMvCijwJ^rQ=tEGpLts6EfvbS8~m3ZdHBR)5Ph#{LXQnNg&%;npP*5M`ZkfkP|Kmidn3|f(ckVJ5hQ;@O=n`v6o`G+ZYmCFmMff^E*L?)F0gr%DPY2ZUc!JHY!jT*j_ z)_@B80;R!33mJczjJ3c}TpI%_d{m--;ous&yz+xYnj|ko6kwYAdf>hb&*3|7ZM{o! zC7tEGdCiuW-Y=DxqMSv_aVMYgLYtgunl6Wd)w}(TH@lvm57SB zL#rQgKH}-KtYjg{=S{~5xc$euZ6bQ_UPZ}$H?jje%e0WWJZyWEd|mLKqemj%mAJRE zivI>ZJBg+nfYDZ=H6Rg-MmEBvgy_R?@t`krTZysCY`|X2Ns;f?MEmigEmb&~k5B4- zFgA5jb@khZr7FnO1fJy_AX_o%^1S2%48Iiu>_B;>+9}>Y=>!6Gw^`8bj{VU79O zp*kP?{LHG$04M?WbRb&hwG6;R^_WabA7ZgU>YF^?^0LTi#pjCNyWsuj2j^7-W|B0B zdn6_+0Ud-q^X}iV=r|&FYxuO^wYs1YzXj*dlp=sI`hBSweNHyyM#`x7k~;0{a^=D8 zX6C`>9OVOd+}A?8RJ&Y3Anj?^o&0nycpk)Y-YZY1w`jYCH3;5zCPHGjl>uCfiFU`W z)y}NABjQ^%#;L+vCGtT@3`Q;G!D|cOP{3LAkNF;JpC~Wg7xcY8+jCk#uePjjcLAC# zYeazr{yu}Yv}G-AsONM~w-i>ylr~R0{bd1XNfb~fOI{8{rnYHiL=6zRfJz@;}!Nto1=Zow-k<)?xFybi^JYLYG=L!4AxKp-TNW&gWJUayVg3 zu64ey4}kp##J-13+q?a(r&Nui!G}qb+-JiY+_%*EpV@xiDL`%TuFax=MgzVI&_(C0 zUN%6;-lFv7$~t=n>VxtW8{wOpayl9AfjIW{)+QL04BHWP^+Bk{t3;I&D^TP8LiUI|e5MWznWaC8kp&AI`Xm8 zKJGE-3y*oLmjv)2Dsm8gctOi<76AN8yB~nc=RFzUjo<;oPMGgWZPFMhTljPje%8tm z}mIt^#C%7ILZc_+xwESfH6MxiF6y6Hx>u2SuOj$V-R)eLl_Ra~m*agVOIV zV?p|b%Z|{y=iH83a{73sJHT9%gL}h!UxPmhYo$mk6>m>GZdy=PgR2dfteDKU;k{IW z=iKu>7Opd7$&1{J>D{XDYoG?|_CHnnPb#!ow>{sNUped?W=8=}MBD!qQrF+hJ2VUE zB#*X6>s_h*gpKJ}9GwPl*(qnaxRM?FtO}umN;fGzj-$FxN}>lz7M5o0KxhHym6S3l zk3dC(7kbC9%^siZvYzxeRF0k=-H2%=Cm*XFnBu5nmo?M_<$Y4oq~+JYW>z4@+VTn+ zrVCNb4dfd-}X z3|D3!HM(!P-^f-!!g-+12)UB#Hk4Pl7pJ{8g@1~q3o-+hT~w;TT0M&em@~PuSJ&&U zsa%9zvsl!DD^L(8qn1q}_FKD9n$r$02p%`2Z z@^h#eV*5#y&nR!PDc)drBEVPR$Mc0dDpXg4tuN!(CCwbkOsP2gp1z$!Ac1Xrf-}>K zt&#}YqgwF%K$v(#`jgI2yzTnS=F`oMN!@okYqwLAkAsiGyTclynvG1Sh!VcreyilL?5LQRItQoEPY|q)1NSiuo*99((+0 zId^MhVcmLfPQ%)jR_vM|08gMVb7oc@fXEh}%KO9IBb$(&3H(P@abIAUF?1!iQRxAE z!x1YD!+ImU{peIfx{(RQtw2|brC7S#W^j(xl$HbJxkie5kkj67=EG0H1v?ivhtAUn zv^W2z@%;{9qDW}kYe#UJVI$QzSAkBtNa?|9psOw6q7GYLT=WZ9y6UZ-WZ&>U@P4`USV#)dD1(>aHT}r^77Ws#?eXh5lLdJH6;mo4>i+6NPt29lb?#L^Dvj7+@EGR&xrh3 zG~el)Z@6ggprRr<{jx^;#~U>7KYSoh5YX*a;MYkJyL{wuB47P3i-g4S=$WQLjT@nr ziYM)VNilI1JaDlApc!tfXI-8=kO1&D>)?z>5F&aBRsOB7(Ly(Y;)D_S9BW^A^f^ku zp0vxtU(4o$Yl8Rg*jr!1&#%XpDyJChy|poml)F!S_P!N9ih*0{`^zb$^yJRRm|fQ4 zkePl_zwoUzd>0@pYUm6Fd0`+Z?ZUIwFPcqFeTmCNz)zG&hq@9pK`K7DV|Z`rOpo_A~2C;`e^w7Q07;&`D!a`sjP#$j`;7 zG#rWV*S!fcijK5_gbOdK2EP*2Na;8lKfz1=uDTC&mj^Ol{2l5X;9LhLLH$9R4I#-N zfdD)ajTLSmyCZCvOnNv~k=6__O_ArZlMQm+X>z$vQ^b(Z^f-CHQu5i=mAX3I*upvr z==U?n@Si8Kd8tmJDR|%UCayj)JhH`?IO^5Fz5k&;@~6TeT;wG=L%{`XG7F${sVytz zGt=U@W0-w~j1K>p(`~~I2D%X9gkaAYvx6q#eQ(SZa8VZfdI)I$&xe;>(Ce~$2$sRiyLlxWfra~KKsf6#| z^0t_VR#kgH0S-9`qM(he*E3+<0LzjYtn_UR?SX8^{|0^4Daae@k(0`TBs;1R0L;pb z{rVG!9r0934=lNI(mFlU0O(C1DEepImjI)kl9N8dIU581^g{v6mIHx23$*n74NDt- zlPf%`=m3{vd0rA_1tNuwnr8udXQ%c7y8q(J%4#z(QO$F==)69(xudGyOl8ne-Tl#P9JkcO(NR7qrVc!fhqFgR}D?^AXSIN*N#D`l<8ciHbts zP7d3hUR%pRyy+_Esv2vmw)te&%lH2SQMFa$Ivb&tA?LHUR;Kt*(H(M^XUn&&BV<(x zzZ3$R`b&*?Y)mCu}tN|ONA1`~2xBy@VnP|e5=r_^hFfVf_K&ZvaGJ6)PozB3er^KerksP?(cl@ z05Z35*dG5`xoTrJc8b~&9@2d4_S&24?nF6vxfn0Sr>99Zu!mh=I=HO&@N1;c-$8>^ z`F={G5*j(4$6hz%O>N{RW2zMmpp@+N-u}d^bJCf?WCc+zhxYcCaVsH)pFDoUqp-IZ zaK-R~F|hgfa7ga^wJ3m}(sz(Ec6z6gvvv*@0tN(V%;7Xl1LK!TqIUQn%6=z_I=TFX z9K@006q8Sf@3T`7Yr6OBRhd42)QbJ@Ce!pI`3u&qi<0WujF6BZY&fR^{-J&GNej?9 zj=;-*L$dqJ3}gob@`HI(W3L~H$HvE1F}a$l!((G(!0{yCZSTLhtsvd|VrL(xzpVcL zgZKOc1oV0UO=uy0iHY@ZSjC@K2yz|}$lN2f_xT&V{O9}pb`_|*zK)G4{~KuK&lB-p z4`}vyYGwHE8Z`ehR{XzX8pTRpp94An<-q}DeEK|_q9?C&DC((}!1Ifulzx+5x_{cz z`en65fz{fNjidSN6ajLBuEu*=U&eo0CGQAL$NzBU-oJ8s;3x)1wKlQn_>Cc6L7g~{5J@Iezt%nQW_@dn{5s$|BFFz zkrtU%3Lb7A<2XSkB?SeYL;tb_1|0iEO3%#P-g)`2=Uh$Mq%G4g>+0(Momhn;R%Lz5 z`-9QnwrIp>PZ~b0-aGy+3B!r$Hi?k|rS~vp+DG&6;L<07&PojMCW`PvslUGFzZ&}=ZUv}o)mX0>RRdC!`NI1sudL(X z+u3<}{08~|Tb(-llZvgat?P|orJy}$*TCT-%^Ar;AhxsEd1gL0c&Ci5p<>VLtLAhbm2gBo3ciEISb@?E4- z|JE3C(465n#Y|WbCD@FvLvb_D--we@1tNv7C;BH_1Pt+3u&%{br*L>_Vm)v-H8^0l?)LUMPm=Pwp6L+r{uR+59B6I z!Dj+Oqc$zm64XK?Hy=~+9nc-z{cWbsS^!Sdl;#+BG6A=1RRT{taEaXXQ)3wXJbUg8 zI!wKm@!ibRL}=&JSGWmN4bhf%s*rnd;$flrL+nhao538C-fs~EE)3bPKCwEs+)C}1iL8}yv$*9Hv`ZsCfAGDb8+TsePl!eXyUX8iiU;9Xt87i>7^i9=g99YS`gj?3O|%v9Eo zga)@b`T9-z7~PU~F&eWh_}rS-YO5iPfpwWJ`#sYPQ7*feH9wR)dk3-pT({0ewofry zF*FO8b=2neYOwpgsX(L13_WVHSE+eUEP!5KfK-`N^AZ zSDdua279mn#PlelRCF0%p8W{-zsn!mUsV0-+YXTuOwU2z&5zOJ3$yB)yfB^- zb%6P-a(!w2lHnLgx6i{FZyU9mH_OtIwt;E?Iyf?Wt9&X39BP{5AQxhOU}bfk{o9-I z|W@yIP2S=wTS)R?wI^6rc1&K+K><5__(pLKS z2tzIIHJBf=Unb}nt<>8am0I)^Ph~3P56-qhed@oq?C*!=&1~wRgB;L)aNOj`#=fRB zOVLs6>@%1_`%&Tb>4#-)^jKNsA}WGo1>uL)haWC}W(x{c=CP~8`LG|&I!*Z0#d#8+ z1EuHodf^pY=h27#h|OWcgJZa7JL55|FtD4ovGVZ3wohY^Topi>SyIFMFW#gsv%e+v?P5WPfkHg+N~r}%Z)Q1 z`@Qwn$^VFg*+dY?`I4pLF3+1K&r4Bz!}fIw&|!|!$yWC`PuF|u)zJsY@#Kg>pP;}P z+YUE@3$l@jETJRMKtEKS;x2OX<4&OczFC2;-@306a&pA>SSWt=WylecuisJhlYLyB zj(MRhu?_Eom{#$&J{`@B{4Uvi$_OJiH$U*CWK2E`45_SV9k&@j<3QVIyc{4=m0@}I zW7eVXKdq=Q$xLO=PTfH6b4GY8IpbM+TYFDvk<3ocy>n=6-{EFr+vfRp^rxy%PpXE8 zC-|4KPgdi_;y#IpJlow(;D36>Am4YFVLZwu2{Va0M5il+DuAojmeI5A5i%wFywkj3 z@T|m~T~vYCNF&GjZuif(l!re}<6K|+%uV$|&k zSt1;wjqUNAVgr-s&VqPsLsu_qHyy)ca+~fTW=)~T)qXD zvDApP)o5pky^*e~Y^I@H_H6xvVa+YMy{8Csqs}t5o;oEaCdmlT5ntY8O{51k`glkk zS5`c9a7W?z9cS=6&WScE%>&aJ+4>1=>YalqX}Nctf-U5S2Km}Tt4Boxp+$cx@G)r; zJ!aT8{~`vhGH^65T3u#wZMev&oTjdv^=!3$brl^4_EIT|1sY((aeRl3PXbz`&c1E& z*jdCpgD|NUNa<(!LyIR^FW;#gOs;DSo$&8ZCKOty%^)hd!W?A;d7LQFlJ^RCe078? zd-R5O_Zn&2*|7zs;6v`?y!SIl%z_DTR`UIiyMGr^#H0YYD#JZPKxeRJXW!CWDhqwH zr}(k^I7A>6j5phsH)CsmH5X)p=*~z>6Pc2NS}?Y=A!1QH;tzPCHx#OjJGJrU&=JF{ z=mYCPQ19An(GB_D{G;|wG0R#{KCF7Xx!^SK1}GQNVuDoSFNZDZxps1zdUtXfO_V-j z4H-Zlox66Am)({vXf5b!(#)!-ZGV$evcStszu7S~2P8lv8Cj}TfdRb|<8OohQJ zGdL-D<*h1kz~m>hXwPU2(89bjfb&Gn75u2|HL$`;$ruZn6x;q^&zrIOL=Ldnb+r+~ zNib>3KL3uJ{S5xIimde|Xo#2?PwmS|a}Cjj_9r_#;hTr#$iyd6EmZ^LEAzo(FJrDu z@>>KypOs0zs(xYcNEa}k@q3;$N?@(y1Ze$orEq;O+>FMIi>&ohP6v_JkF@IjTbwTo z6Gv2RR2_*oWZml=n7AhC(ymtBbCy%oROY@tAVJ;q338pdn(6rTIh+2A7y2(f3cJXq z5>731FzSOeo$YIBa4+F(B;16qd;A2MMwYo#-Dwwqm%fW~07{PZo|=M02$o z8M^lF51KrqjI1>;T{4Oy}9+} zE?148$I(Q0Nl#LaCFzonHuWMHbbqYzS*XxhD$yXP+sO%n!)FUP^@#2n?|G^@Xshj+sV(z>=;t7*? z@iygrK`GRAo?<|LZ{nm?QPn$2$ z+jG_Bk&Tr)7YEJI<%R@H!e`~0yeO|JOAh~@!s;s6^Jb2|a@?lPY<*7z4U?^tBG$dL z;%tOa=nTc~=ri}YTB<|k<9$oZPj5&|8hYhZpQBwCnv(vs`m2WBZd8<$@xQ(?El9jE z&jgJ4&D`B~6)7kaS=OD7afd^enFGi6rpREmI)Q9|Z++er8JE#P<3uZDLZ}v9FdNM` zvbU-Y+`Ytl$Yyb|4}J<*k>JIa^V|i;BwE$iiGrxb1U!fCM-G0<3@XSvKK#@SMi&IuQYRvs1m{lSfX?^I17nkI(%=nS%pN6v-g=_pg&)DQL$l|~Ah$WF%i`Mmt%B1cohw5eDm)-%_haiI?R<6b*46eO4qZ?xJHmG!H*wxY zf6m}+6_9gbhaD!Crb`=*-oaLxGn$z{)o$ll#SHWpvFFqXQkyHGQv(@0sO)1o%;r<l>Al|Zwknda6a zPQA^_9ea4A1sR32@T7Q`_HxUElC+nP?6U9NyRw2ODu%S}O@_6LEeYTbER}Eknzw4< ztDEp-1x~*hwx7|Wp^^NWb4%hcyv3nM?mqS?H zm!xHozWJa_WpF{sJT%YW&=ika)T{GnCGtEwS5RrUvtRwd#eFO_T zK2l{fFWy`VyvV4Y-(mAq;zZf69Nu$QV@j2JN5H3Pc>K%@(dfv)|y zUHK3NvT^D&8}3~sQ4-#@3%v$H^eBmBWUVOP4nQZU%(EzWcl}UVd2-Ef{=a^Ex@YNS zb$#(<-EXgPlTHkZ|Hu~W-;~bgL7^V&UL}4`5ey(?Q-K%r6u&?D%)xtGs|os3=w9H- zRHdh#NQE4h!S|f`JzI=Vv%2MXnKD|hiKf-yg0G;ZRyf+5)(-tkU_hfrlK`pvgkA92 z1md+&^v+kJh#QTI`#C?Gf0mO4*QyS!rM*O^meyDrphR* z_LA51q&;ZYzEv_mto-i;z8H{Jnx&Uh}iXC1_HLnmSbd=sX z2j@w9-~DC7E4pd?MN_x5wJHlM9rS_?0hKc^Duqb<5Lt@{VmnUidinV8p-hFR>faJ8MdB&8bhLvD@=&xoxzRJ9Ch`AXpGS4$;YM_SHB z&$0400*f!mkK5$*8|SJEZQrbz=c^sXUkRK#0)LbK7}c`FIN3tMuG7w)l+Tl%#_c_zs`cVMp-tL_fZ> zf_aw%gBK0vzLga#6&QF$e_X$v>&Q*^oDyxU2V0@MEt&)4vho<&A2_$0EUOo~tzWJz zggpMu@~Wv%n27M&kgNVLOr#!iDxROX@*??|;!R3*)5o_H`Bc_8TCgUuTX;^?S zMT-Wvig11O0bBd2#m{`7Tby?LO8O&hPu0@5pK2!?Sm(e^JgzBvr!(gNYC@xN^e&!Q z=@}2QayH4QdqvSxgsFqhQw+aZ09w(`XapZ!S8~&ix`MFms5a@V6?L6+KHvFzuW2h2 zmm8ZD13M^o3t5Qw=qyjwmf;n18h5GIZQ-x9!}d*o8~?A)_6rrB)+R2( zV2gH<-IbfHu3}-FSkRdmQdkQ|j38?qIidYNFOy}N+x&zRz{&dJIBMPsciJ<1XOl;Y zD9gI6$a}gc@AJm&J58%rgSQW0;Dw;&>lh8QsJ>^<>ze7lIR%LoEgyLCY?W~7Exs7D zPlF^_9;_Jkv3hJ7qzG8PP4NoF8->mn)m`Xy`S|L>E!-Q9-K}BGmJ*-eW5utxno0T$ z8R`EreLCjSjDO5|i}!6Sg3?YZcHiV;lR|hEMb7IcGUdA4j85~Sy{czg_Lgo7h>CUF zUkKVZd);PIUnNYn`+Su$Hua+ioz$IYFC5HES>*)G75PK&kE#Wz1P(~=4tuUH4t;J* zl5t9R3aPd4u~hz`n`V$dg3i@0UIy``kN60K_GAM$K3{Svo&S2}i1?t++FLuhPi;jr z6Rl2_cI&;H_Ry}p0INx}qwr>&+|H(na_Kj(p2}|WFCFRP+xyO-SmcbpqP^6cB1gvL zBxqt^1;Z|fdyKFMD@XtOv$6tG9laOZnlxC-eJ4z&to*Edk1Hr7O}2x7TN5)zAz_!g zxuPg(7lK=N6YYgNBnMnu2F-6e^rg5hH>z?z=w0|)8TiR#vth3B3=B<>S{Xt9gGNj@ zk_La_pQz3hf&L>6a{W<-@9ayQPre85-rZE$vmd)^J_<&9d|Jy{22K{aq4QASW0&i! zlnlBJ-mp%M-crAz0^XV2%=|z>wp{#UoGFS&-T$`zQ+J(^m=3Eay_Yps1Nr*fy&Fs8 z^k{0u$Tl~s$&B)cdS~(xc9H-B5V2$P z{lLf2Sf$srAIDrmWo}Y#mh8MLqa|$jEU0>&{fKwrF+#NUQ@}^22kZ@;N6id2v#to( zN+YpsvE7Ku2AP|nz#(tb^#X^GaKwP(=Uq8Je&}rcg?47~g#^&6rMd+xX3P%u4^H!2 z8tHFLO4=ObuPRPvw1J@&m>tt{H^#BBkx^M)VYAxN znAVIrJ7W1ZOO9C9D2w>T?O=}FzLD^L&TBR^a=q7B2l=7iQ!fLAlV5M^t2GK8FP*K~ zzTH=zfzR9y;T(T`efDnV(&E^Iwp_RK#q*c_@~AS!T<+|fT9@{!x0TlE>q^~I#oqC0V+ii@K3x{3`9}$(c5A(d%k}~SgZMVO`44NPd zy+lh^EYsu@C0V7_#_o^XHu@kR4C>Y=xA|{;Hm&I&RS51(zhQ~V757}uQ)MfK~Mr2VdRB z8|mh&`Ukv=F&uHqnlgRQT21B8?#0(8zZ~EOCp}x<*`<&;g7He2Emeol>l-KPse1O1 z*3Fq;Ixp(+<wd?vum!_+(?f0by|DVUfPrtU76dB z7*xdK!e!0Lpj;am*5YIJn$90B>No1%cLwqW62V$Jv3}R$PN2d~8B!|b?D2=`TO6P3 z`$;(2dKuc6+;%jslK2{Cxb3FuphufVZ-2SZxx18*Qxm%{Op@Ch_UK!3w2`B%#_Il* z^w)J%y(7C)MTFwKrlvqu#i!-ZG$kcxzZz0%E8ZU<2rHE=L}oe>p1sR*2^0ozZ+u=* z4=JeTKvnI9BVK?4_J~CgA2E2+7(|tT&xYdg9=eY(8ktkw-`6cI-U|v^Rjd!a&&LA4pm%Nu5aA^_+lwLU2}u46Zwa-%k62tOL_s>MUyKF z7ekK^OigmGR*t%@@+rYtcH*vi+6=#&nJa=TWAD(gb@WRqeY~KyT6h$AeKRHW1&Gc_ zgBC=<8#REojjMAv+oLt(+v3ch$`>ytvQoBye3?Zgqy!bATTzmEeRZDvHkUeeby$`9 zWRi_Dq5FJpJ4NMrpKlruuc`eo<#B<6G3DM`Vyc`$mY35=y-w9w&Ar%$l>xA6`hX$( z7n2o`&jZ9cnCVz$Aof{kh)#KBj`o@ZT+-p%-iXDT^JZ`Xvas=xM)XFK#rY1M)roh) z%vhy!oLQBovT8MQUM$^v6VJPS0N5}r*T7!ZTxVI_`BYF9;p$&2hm4uuJfc(;WlH&$ zpxr$rITbYB7rT@qtD5nppsx8=DC_yn)N|OC$>o1ABr8rL(OE`hW0iN|ufRWDeEU>C zzk1tEx#v6k_Hw)=Hm(zp0PX4($yuOA@S^V$u*jXWz6gnYAfT8w)-kU z(@Bllrd!BM$i1OA3fERa>v!8ssOJT0-7uD?oWdR$*erAe!xsFXqeHZhY%N@F4H57T zbcrMS`+Kx$>|Nk5o0>n!)DJJbK)&FsJNtNv{pxLbIDBNQvNO_5oJ8Ba60tm zwB{bM?(p#WS1o)$2pXZxa8YKk#kh@tQznWMSuHFA-c*xYTBtN6`1lBMx~#fLh>GHc zT{DNJ;bR%aN6M>oiz|7ow#Ku5%in??o>^lCNhX=XvUb6(2KjEQJ}#*m^Nse1MEMGz z8>QcpKmnk|!pv`O*YfUHeaex^-x#eFFWwAVO`BI7JLjCN99in-V4GZ?S*d-!?NcLS zZz9MR(x1F5puFE7kP|Hmo)?n%HZ^@WpG-Mm4DN5I3-ri|rmtRa^;gqwI(8Po_tR0btdZ$Nz9FpAU|D?%Y8{578 z|FHGm@oewy|9F)et?jh-s+Ou%)RyQ_p{i=PW-CTDc2F~Biz3vhO~gmGV6;m4mq~QDg11Ee@x|=?k9THHTW`AMmYh#)q zF3akgcd`6m49p5X`VpH@?BkzyvExR z>s6N0mw4EfnISu!lw&~O%UoC>x_+9OX@5)dorNO7Wv8my*qeQ{)W zO(05SEHv-6-9NBn(SWS7JBDXziZ;3`cRlw-fmCPrei@ysou<*lvnb*eL7A?vu zepR!u?ebXKJkPSk+<)K50)-1}+O0O6?dmaztdcz4II8y(nsnYw^j|jW-y|^PR_c$^ zf~?1X2dU)Jdn#timgUy(M!dBSW)~s4NAFxw zwa3LB%VIMYci$rk#d5=zi4%@l#V(HBwOv!MP2qcSC$>>Y#M#a3YqJ)BzCL_x(=O_| ziFeX9T;ry>i8a@Dg~yZ7$o5|HMc=}ys=A-<%c+pKQH6yw<|&}UUbsW9<5`o==o}|0 zCra#YMDzJ>NY*-XKS!boC1vVK0Oj-pW&b+*EvJmDbWgTI9TKPhEQl#CGF8Ar6rT)* z%d0Q}mj$+?v+&`{+KkU7(jS<)tVUlf3($YA!J>1wqi@s;{eCy3tu~98*7nfvSnkC^_F{@R!$$7>gjkRtIsp-W6NurwV#qUg3r43(O%&2M-zo{c`9ReMyLUf zQ z_jclV(WU;xrv>mOsgr|`olkc@>dA#&R?RXJU&3!C8CtEev=`i{|4L2uFQ46ab0NS@pF66x;Y z?rN{>WJ9wT*A@2lf1nBGZ8s9CK%=We|8A9k5A`$D!+k2w_iL}DJ&8QpcxqrlwrVadgj$}6D;PhQn};44^ELW869P1UAq_Vv{6 z?I?Jwfv`b8Yc;Sfm?G+A%_59#fde8`6$5 za}7zjwVy6%+XA`0r;HDX+m1RCMH&vxq&M;>M<@H6pQ(lqFo#`k(wRxb zLcbI9O(#3rP?lLGqb-6)-YlT{>Zv6|f3&-J6+~90cY+1(?~s*M)?Z+8%4JanUG$ry z;`R4^uIMPpOfuV7I6}Jtq%zY**8Tr=<+oe|eRZt`1S58P#A#pU*mn5Pq zR!;mRW;W_N(hT7J6P*^zt2ja4ii0VYmmjwaqos}> zXkXhWo+WuL*NhN&n(a;!DRI_`J_rL0|KMQwR57*CTj})M*j+h?{L!i>8JTG~Mcqsp z*s|mh)ltnW&LQAv&IfCv0%UdE_k~OL`vqFav)2=Gij~q1OF;^{ae|USX3EaerI5mgYe`tr|_<8nGFtg0no|5-T%V zEV4A%euA@HOyDd|GB87-7m@^4s9pM*_MR<|sln4SZ@M(kTg8o*_1Cx+qNR(X4vi@r zxVLW)46T4BwA32zY1{PHAsBimTZiiIu93?Y_(I>T2(E24g?o(Ts9XS^|78m=aL)?> z-ht+?&(|J(Riun@4WskFF~R>hhEpA=%HGyGYy5h2{eFA<@7hzE6EtUG~;ez43v@&@$RNBGY_9?TDl)3$E?e-D~1dFh12}DO}wt%|Su?-JCdp@FbnQKw4*F0@17G)~g&xMs9f@V9?ioRt<$=6j~I+YAC9G73UzsW-n z^i8ZUAJ?Muum#uRa{2`0HFe=y@T{7>`AuJhl;gYf1I@>B4$m@$Q=@yzEcw&#myWUA z*EF*3Eky^5wKiiwa%rpP8&!dHa3DEh(SY!fn0!{{HzW1(lxd>6dc186-Rd%BUymVs z%&A7DLR^qLbnF&AE$(>PG5*e$kTYH23yxEEWr@>yUmZCLmrw+6e+GWl$%P+dAf%`rb@zohdDBJ8lhe=mdch2*6L#xg6{Gz)p#s)7IBNPes@GB6l;`= z|8iQ9raGu90Zh)K6S9WjJDQj)6GdjxVVu>?1rjCR6e_t*qo1mVQ!c=ado}JbjO&Kg zlKZ%8LOr4wYnutq#O<>3up`dCpu&8uin+tlh03%irSvlNgFlkZ32BN_ANwT;N`CL* zQ3JcH9MD2@hl;wZ%ELyZbDD$^S!3|FLsos89nN-ksG38MhGEK-0={3E214|~?cxZ^ z)9*I;rrYCs@iX`|EuD&1vh=}{=*Z%zsM&HeP|x5&Z!-I1uz$2Q$BBK$s{e6j1)Ipx z8Fhm|SyP&YVjFcP6)%(nAx1L)`L2=GjM?eg=xa^hsh3qOoUkXoH5DEP;$n84;E53b z`>;Kq2h%=BlVeZ2)BCJ(Vfy_;3VxR|Uo|iN(ot9eHb=NP2DMH&eT^UZM<1=FyPP>Q z?WAX(I7n@Ob7%W(6Y2LK(n@f;M4l$^`F;Ptf=T??A_FJ8)ix2l%SBFT zbyW?!qHvRsL61rzBtS)#d_gGiNf-R{rRNz~Z&gG7Cy&jQbHIFLMy1R<`@@%0pN>&v z-EPW$@?4;!x@+wz+uiLv@PmN6Z!-DY$H3~&2d3er(1jrtMa1Vg2t3e*E4z1G2f+TA zS}FSmDKZs6F>-qhDOjZKe(R2oU#r#do^EG=YbA)2keaoP;~=lP7?prn<(6*6H)qoqE`p02LeZeS-+MdX6`%$LYf#Yd2J{ z*dFJElMft@rH5XHEq>LrcvaQUj@Zg&s%Ng>VzRcSFMeJN=nn%dzfbvdGyQ99`7dSY z=}qBWJMZFhEr{GjKxT{?8&(m)`>2JWut-IbW<6rezt5Y=SY$Irs7+QsdgGqGt*c!ua9X70 zzQ^4B1Hks97Xp{}VGyA}9*H7^p$sloDbM|k?~S;}BUJm5rLAF=yZj2a&!yGEJaV|P zEV=&8xmrZI#p-@ENp9oydPdwqjy-tXQhNQTn`Z?CUVm_BY#Fx$-Z)UrBXz564@o!f4lC$6pnft(oC## ztp9u*)D){IH&XVdc_$PS(Nc=~Zsh;D5~aH$Awk_nx^_ zV(d^g14SvG`25`srg#E3g{mAaY!i>*0y~-Kd;YL(A%9|S=EFC%ZpY7pfCGQ*ZY|Pk zU&XT@_nQ_V`08`MvR>ODzE+5BwdA6iLodMkBm?*!Tpk<Df=CWsJ98{|cCygU4TcC>3vo^q9aFuv&{t%X3Y$eTOdgL&1+*q^Q6n1^r=Q} zkb(J!#$46cR=>F7B(qDPWhR*r-qt`z!lCNz+%a$Bs62IkF_$U;`{Z`sK_inh?MuBI z`BqK!>bT%vMa{_q3(TDX^#$N-f?SF#A7ehcJ=$oc__#*y_4&jEPPVp`?{IaC$WQUD z{35~XzMS)&5BAwTO}F6bIe1FJdw7()cl3|aWlbc-a zameTyra!rY?nbh0yh&Lev`X@=>F}<3_!c|Rz-V-u;oY1gS2iMF?q<5AOmzfsgzTGE zR=d=510qMmM0}m}G8&$Cm3B`rf&W;Omubd z=rEXHXoCt_;C5G#kEr|9q=bc>g zAvCaS+dKL4*?y70zHXfvD?EAa+qtW;)T-hEZl;IXR|B(4m*ZY!dWl#f_`1V~PaZbC zf1Bu1XvWU@OIqn&oI4jK$@7QUt8o{x!ghx=64Sz|)?ITt;5 zj|j;zy>kbH)COTTkc}ekJZ+uYP{C(Y&HMBz?3Ni z= z<^)^hjG!3gVCNYWQXJj(;UdSkQXGT9T(#v*0~aDL(8TLJ|1ZQqhqi`KQfm3|RCWb} zbx!yDzyk1VQJ;f!En=Y*l(rVp{5o^@<@&Wc^PYmEPRg2#e>3#Z1k`!SY($IsfnPZI z0c;tfr*4Q{kqz#S*go&TRY^Nm*D;LRomE$N_wK<@azi>ZFwr@88rk z$2Ju#0>hRS%f38Y9T9fC=GbWTouO-j2t^hWl`(aVE=h<>K`hx&>HI>D)NU+OG9C@tAu`)Ah zAWm$;S;%uxLa`fkaqjmHgp7#(e}5(ai+)dC4%}H0hM*ExCNKjWNLMbfx9Q8v>Mm55 zT}PrrTTB*!ZHf9kstUD5t1C&~)$(wiP#=GW?Qa>cA?@+%%PFo~H$cl73aQa5us-Y$ zQ!;cmUL4Z+0zZASx(Vl5>Hoo(F9G{OM0FEa6gKA?3L1}+AK&#gIzq>XR}`e)4N0V< zUrUs9{=!`#)M!`h;qgq{oYbFe=FrxmIjR_eWjEz zvS_94?6xfs;Bv;vkLsbqz032vuof<**yELuJ&upvz`OR}CfWx`5|V&3qtrqR&6sAd zOn|ldV4|kPFHiVpzv%-x5>;z<6Mk7Xlrde#{L0ND+i^V}pbp7u2Vm&A@kZ5y`%@ z0^`|FjMv|=so^Fhuo<|Q4z|J=xFd1ISng%Z4P?zNcQLB-z50DCuqu~XZMh`!tdP)) zh{nQUB`&^r8@g22YGNpSAI(#z zC4Wd*E2Id-f0o%7^_YAaE!GtbRd%}bV#BwXp6wkpib-w@ZeRO##-uw73>l$X*THeg znm2U5pVCC7d3BG;noBc?@7^x+S1hZjm!xnMOdA8ZHD~vBul-UX3XUyu9AI7iaV3=v z{cF;G*(Fov3sG-wmq^xkBCOHcq<0pW9_;YHd1PrZ_>}^; z@hkPG*X0vNG7qY%JWw}3KZTUb)krv*RlB+(A6@*1w3h;W2df?t_b-p!bh|zA+wm)Y zeW;_dVNiQjOcA)gYC8-3`qn!P9*8$~SWf|85@x~2z!t@~OxO%MF9{7Y(;#>P?F77j zd$#IaJk=)oIx9Bvo7i`l#_Xr2vWk!4>#Ygqjka z3p(nmhs&(E40OE>Kd#fL0cOc9rbTS*(hK+CbyH)Loc6}>%-K(@qKipw&pB}LurifU(TG1uKhyw+mUA5b}sDLh55``sWwK8=Z+Vz zUs2!^i2Utt4)SmS6mhp?UMwz(>-DD+I*BD;-^W0egt^a0>F<6QZ`QCdq+$3_Ld$U7 zywUZ=^#~$6Qmue4sxenl-I-?Ag51q4dOE5ROqZ63cq(7%jlfSkvmNgz1BM28l@3u| zN7Ostob~%>LM?5^7T0WQf9JJee}=yVG?ji&nyY*W;`XoYR7bflbBtVBPZ8RL76Vl; zr5*_-uy2e#+O(|(P8_ddpK{}_J9ER+9LB7^IpV(Z_#1RquHEKpXGrA*v3HJno?>R_ zdHmx0Y>qQ3@~KIL+o`Zh$HdA6GyO@uk6Ly&HNn5!U)CaucSGw&4LG$G6rS=*MxRKZ zN#&NfoMs_#`OTD2jS!@48}`ixrc!hBX{wC`{a0IGS%`PRXrmQZ>U(L6QN_i_z3t)2 zScx?U?;wViN8H6G zp)j}@7;YurduW{Nm4~-lNpOK_HGAI|vU(nwVXp%A-X|Mw1PZ02CmId0VQ*mIJC99Z zhbFU%=+FwE;PTUDU^dV*W@W$s-ahhAoMi6JA5b_$aJ|TXJrtg)Ob>n=PM75!FuWD; z+SMsbv`WoS35Xx9fN$+-X~A97iPisPD4b zZS%D4>(u@((owEB+Ql_0;&sPqqqRyWMuoqK@$JPWF+6_LABBJPak{M7>(NO{2cwyH$W5MbCD;1x7Ida#jC-e1LYEWKSS zaj>V}`M_az@@V|X#+E<8)~Qum_DaBvW!f}L`)4kM`^f7tZx|`<_}6g^37q0Eao_G=C-)zebPebdX!j4sJb4jWxljDP#gH9C zk`>E4qi(#irR>WGymc0~)9?9dfZ=ofLUc>$+`Zvlecg4$R5#PbkbMhpu=UPKVW&q> zY=`ds?Q@=H4=2FrkHWxW`5Kw&eTSpo9czD`%f_R!F(rP1KE{hFyKV`E9Fyv3NX~(s zuZu8?QT1|WzG4XaKI|ssL8=3H1h!JLJ2oRH&-Qr0cXhjtI+eiMJQ1wF z1#qH3XA-Z6lG64w=8aAr5z(9s#eLwxid`H7*lEih<-AfrycDzF`WyzNspgR6i5y5S zHat&v`HmRo_b)ZfYz|M8<5cRqCbK!g^$@iATmioZ_O>Bmy!Z)`pgrA{Ll<-1P~@ue ziukW?655;5q=%v5SZ@%e0kZlW-F<9{@n}+ zhI#*)_^2q2GrA&C8E%K4<`6xgzLr|I@TnZ8v?q}(lU7yi&SzSVN;qtH@;|Xpy|;Jz z<$Ld#ZLQd108d4gAEt4By3tMAA9d?W{t)ZpvAImb>(Gd&EGjcpI0OS*WMq$i8Cx}Oh^H|*&StRv zqC6ll3oRmS_<7CSbKOP4N8*tl*+QIok}d9O{dkPeFAM-N18VCT-Ocf>HrS? zWlplPWwsaMcxn1TE~eF>q;ly}H=24?9^i+NMuYd%lTYXLbuBfJ@|0f?qkH)R*x?Vi zn0`NwC2cNp{++|sa=9p}D;a~5OBP`z z1CCI06Pu;E8O2UF{?_!_V1O==xpB`=*|6N@mi^ZTdZt{FAmfZ1rL1Xe9w3N zC07snaBkg)E~vi*aLX~y|KU0@s~OBoOyK@$XVfX!nV8;|DZRC2=Azs&6Q$#!)X#1& z3LTIE*HWIK-Yl8F6_rrX{+^n_a*J-8+DzI)F|ll=!WCjXdcaJwD|AKifw7qEelhkv z&9%ZUlsK3C&ERm~DBTK_+S(Elz;f9OJo78!W?YeS9%@b{43 zyG&jWS`TllsCr&Y(+EQ6bgIm~R;JK7zBNlA#m!?*lGx^L0+Hg)_a@toMO9j z0~7f@Qz#TpfQ!9p{&zJj3V5;Af>qj(<|^en%0y=aV#~*)`K30U+r{2CoO27s6vc*s zSZo&)=#_(2uV$uz>u(Y9W_w<>Fs(`j#U7)*e!PJ>vv$`Zd+WHPO90VE^X}~3ul2-N zZ7Pll4-=16t2EeU)V)RgEoP%HaY}7t``UJ7Lw)X%Z6)^roMrLE6NuNdZn0T>hJs}@ zx~Tfcg`eC2dw=Z<$#yG(3H&*e&oU#TqjRC5PWRY7ZM^}YONd9}?6zbaYf%2Uaqlp? z{<>TuU6vmc^!Lw+uaczWu}=GRxxN3;miz}IyU-+GV8+EgPE!K0v+WpgcyYIP-IqA` zi@ZZ2ydsG9dt=IeDou^geEblBqvYwoHXR^OljUH=7TjI8}s!D5bC z$+~J%OK8(eA9$*MMRG^th&u-&GHS*+UcqGBhBgdc0Ve>~(K6iPWfcMpbA=7?&9RA; z=XGi|S?<5C36xFV>r(zK#Vw*@YX+R~%izuPss`qoC1kPqlioJJjVzX_iE`Ptglt=9 zn#fNt)&pfh^9K9m&9~F)iE3y0_~%|d`p-}%%m0Fv zG+JF48|u@2b-w1Ju)ka(t+JISMT5Um;+sRL25WZa*^Wqsmx_xk`Jcqt9V@SRZNURg z;zYwzV?ynxGWi}_vEXwTD%H*+kQvD zZO@9Hu9^Dm#V%b;uk2kNc_?l6w%&F|R&^=nS1RG8*?24SsnVk@Y@VTqSNvQe`egxluuezq5vSouxQwE9WV&J~n-J35d)+al zolmz@Z-wPlXIVGMzlTX@U0%v)H5m7dI4FE?UfZ9vSi#bFTH(l(16M@LwOZ(GqyPVb z*p@}6mUh2m5eqNRpGP6x!lHoe7pIKz9f>=SZ*KukG`*ILEt>LaBj* zjm1!(Q&~Hc?pKO;{eL6Sv=pTxg|wE@u3N&l70Q?8E6Z4yU@rk!q8qVDR~hBzWW zmF$Hgc@*x-Pm8dnO=J6zG(=l%QG|RfnUp&iRyu3Q2b=o@0(hplxmy}nu zTa6(D5gG*j%%Clvnmg+54wVV8&3a-Hd*&iX_Ck5p4HL-XMhc-wWnGR?N+%mD4^1^~ zkzYr}2lDO+Wy(02p>OvUM`J2C2yaczF#{9(X%sM~#s`?Se(q!oDWU8lZ?5&cQFEg0 zrmRhkCB!AhX-M#)`{FfU&8TqhVi9_C{#aIG3ZDC z=s@>vyGukiO#L;S*YWSH#f77_DloZ!N+~?QQS&i=xpC@~;?{ThRnx0dp(84Vh8x3> z0rOzbwJ^SN&%+modNK0@>EirPeStT_l9Xvr>jc<^>-Trs0DdZN2^U(^r%uu8PDaEIInS%%ICESX3teTV` zXI|uorc|a0b#18M-cOg#Rb5am+OtU?LP?5Pxc6qKWpx5J3#@tkM95CU$osa*0)Op< z^mxUkT&HrreKek3{(9;kW61v&;UVa&>!q5!Vm!*H``UIGZ|iySF@Jm_otp2Nwh&x_ z?4#$n@|`G)viz?1K+=nHR^fRgHMm=`q~}Vv!bl~0L{5j$bemWISjl}ygKPYf5v+Bn zW;p*K)K$-8*=*Np{MPu(F%eK5vT4~ocI!gtMN%}G340j~8(-j&6iU*zfkonwzu4I2 ztPyqx!$Nd`zS}!iax2Vxy2OhSKHZU>?3C9L03J#eI;P#TvGlWDOzKXRNIbf4RA6R9 zTamCA3=;CcIn9jS|oi8|A8&Nnx^_nCBhE;NHa|SzTaelu5~}!(D-Y+cR^J zO@CULCID z)`R@sM|9?OO2-w?cTQAxt7;W@zlJMS%&?7$_~4pl`)d1jW(L@_HtW^O4?MRZQ;><; zgv8UAXxJYPr?|G?LL<<}$G;4B?wtk6AiSqucch4~E0_l29mOz{ufXC7Q4&`fu5Wdf zIessVKODeDjZkxI+&ehNaCY8_%ezM9laCIBhqsb?kfa{%tANvzO)oz@8Sjp$y?FJT zA@T#oh+@A0MTY$Ec*)v?{m^dEnebu29YcpASN*!PDfkvk_qymj)5BM8;5XdNv2xIpQP%n zp-sr2w1W79OS^TpI(`oaLd#XDKuc`+*31u{)2}pmWll3VBup_n@dBTJimy57RDtE^a)j z1sceBkW`s5M1+gSRsSHJ8H=J7g>>n z57xQsB3$5qtMR{|qCtGVsM|8vtQrMh{N`_d%1FAZ>GUAS;xcW8mkd&MT7N0uk9pn$ zPO1COWFin;r*!r7Oh6y`bmDCFL`?C+!`n+%%sl6$HtW zy%G@u-?g)WotQ#9G z`fMZ5&*};~*arOsYeDM+SU&etze1U_F@!OQ8!Y>@WqWBV$!Wd8^RaH{x?zToIWHKp zD^f9L>~SU==3%vX&97xkIul}XB;@PSL71?xRL^Xf+gnfyL)aUda7A&ZLfOvQPcCrI znE@!{N-?1Px|8U^_$eFev-8aL*2+i>vX`rFA5d{b6*@Xsw%OlYSSkswHt*7Y{l!iBpm8y!7ZQf2Rkd!#H32R-3fz3y1>c*-@wpsd8TX zNEn=Um7j99^xTNP$S3d4rTvQ(orw&eRTAP$av6J&DUu(Ben=29k;@e@Eo7Oidm|%o zwDy)%e#*GqU(7^8QRQNITvYP!*oB&v|1-3p(->+rZedDuu6-S4FYM&Wt5<9;xc z-08arCfs|RHyKoVS3HTSdgEivzx=Cf^jhRKl4}Obdj8@p|5q}}5ok@Zb9(tGBUoRB z0(MC*j1|Ek{5#7zBjU>FPG+}Y-BA$;x}VTaE0oY@aJcZP=c1*7~6oq z9=MYtY7ng0w|Xm@6z3fM_*hapmhJMc#*P>Zkml%OR6Gb0@HjO;aDBG8JS;tHspU{q z&nmxCu|C1Yo1!qwHp~1{Ka;{jji8aFeRCu;u6(`eiGo1^LvY3|3HrRX1DNcOa-FAJ zAMlvtH%_l0Yb0P8!^df>tg>-smDI%qZhuweFE_fiX3U=_<-gkqU%+gaqP@MgWg2SF zWY#4d!@3Z}A`+6@UNcPSHdLr-HXjA~U{rl@HD{jp(Y2tG#8^rXp8aZb>j)-@g1Pu0p#?Jd9ZhQs_g zZz*F8?H!g4&Al7eQLHGkt>Yex+`E_Sk8NjwNdDb|-coFZwM_0toWn1<5Bz!Wwm=`ezL495)GoD}T_eUcxzOIa z^Q#Te(NP>6QoTc47wZ-lvbsFQCYs;|0jaX<<}OQ7$WxAKy5G#n1}tfN^d(@q=G|bdtd&I6`G|QC) zMW}#wm4fub7ON*{o0v!)Y45~S6auPb4&Nb#VT%w8NGJ)qv5Lw6v+w>dVIv=K3BL4) z+J5D!)ivAKEa6S9ezk1=@}kX@)}CWzRKf2(PfOc`Tcmsu_XmNoM#KH;L0ux5(yk9R z8a(zX(E#SigKR&e!4B;8Sp)e3<*zYRrxkVTvTW?U$|@%Uj*Vmp7aj6M0!@EpG?|>H zoHzAl^T{Psle-efQ^e|t^I}N@KPZKgGJE8l_*WTN5&%c&HzzbyMn2)luq<) zqvKmH@3DxZwRES#`Y-&Lw|rEDoq%xdHoqUlu0CN> zA=BXi;P8nP1!7U|_>dY-_vX-U*(K3c2BvU`W6K2>}2C-Shy5f3S z^O2(O1D>Li&CKGp9dk>zU7J^s@_bDPSwgs6HmHLfhaq6r!p(ZB#s?-6`jEd3RNEiS zf5-+c%(3Vd^bsZf?HXVL^J}hWhMmrhHFvk)Dy&3GR^J$X0ft0>IZhiIwH1l{(BVbw zU5P>}dX$HMko~Ea0X?}Ze=6fdcbV`8a*+!5bR~P3|I0zX zl|-(mKC)M6{?q6dUAG5sKo45mlRXsXWYx|&mRthwbpjDK_x~(`3;&n!<)&|4xi9x~ z|Jt3#ZhZSW~qX=j;MlB|B9v$ggl6I_yt7A@lJU50+fA5#%1O@$X0f2kUV0M`uoocL6qHl4P+UFXT>aE&J4qBTaXfIXZqAP6Y>|t zC3TsqY+Ul}9x%o$f@n|hyY(87`eKSB-FURP2bS0qTo@V#ByPP&-|G-+Wq8})OD?{0 zh`p0#(RP%h`je76!+m5c&Ja zo>M@gjA;~k4@%HI-6KBJXVQK?eKST7r;godLR%AD>ePp~_Io>624G0Ch<@8jopUj?OYJO-9`j`HyHDYkA=)MU1 z@i?tNewCS07@sP;RGWOZ}g}tV*nn_#op!p zA|$zU_x{SLEK5EZ2|wxH?4+u!AxoINma*T3Rm@yk{vfPGT=IbvMNGfeVn0JF-<*DS zu`=(kg*SunppvG}DXMI_l3|}<+l;yNgPdoc?7_0Rs9gita0K=LcuNmKa|an6C+kx( zf6e9FT$csd;k0FW^xvSH!3~fORqBtRKp>R%)}<(Vfg90->t+b5U6yj1x~fFdfD`rC z7?rMH`^#0b5z3$V21yPIly?>GY(w2?voF+I8a{qyMzHOyX!E!bnP^xM*ks3&%SGdT z{vYC`RHH2obv5++Q))0-BR?rha&aE59Rf*nLDOYD6&&gV&}mo%gR`L2?weel_Xg`G zT=4eX7vaq{cP6N_6t!`F>KBbv7ZH_k5XNW3Aj_BcYYnu!O{VKFv&< zwQ_zpD~~Qw2sphl5X#k3f;}4gq0QxbIL!Y1KNQ4YPyTm_?A)j3n!f|0+r5H>E<9Bx zop9Z1RClA%<~Wboqmk840Rkz>s_|*EEp$|c1o`%=mSrY><=mDH4pWk;oFZ1@YpTlP zB`QeT#xilI&{J(~acED$RLEYcc`m7CovA_-Ih(54q+e2VJF|Hyu8RxKBI{(M_j!L4 z3K36dl>0LaV4JN&{I%H}vA0GZ$D{IupuxqC#Fp)J3w_(TFlKZo2r~OW`RRJ&g?7o2 z`n!pEXhfDG!s~a4R_I=%^SherWH}oc^>LLLXJpH?pcN6meV;+s`vDo;4{83mu|(#7 z_OM1(3jB+E)2BSua0J7gajM_NAmN0$pY|Xdq360~4&r>ERuo=Y;h2)-{%UZ47e+l7 z&@lqvg}&>cdf7fgNZXq?TCeXn-ZHv~u=C&6hV5s`E#R}8^l|m3?3hr`oNlq(YEFHy zLVA$gjavP~rsK$C<&YAy3(8(%MDCza(n9hc22;w}V-}%}TIUglh@RY-9XTJggp}AF zQswTlpGbzLXxdt46mtT2!y)Mj@3*0gYL9Sxv|1gttrpV}td=15)Rk(<8F_E{1q}yN zu2&O9KUZ4Wqfv?o$Wr$qoEHivm}NVHIOC?SvK~^p|3v9BSOmDJ&8`-yeA%Kyx_Q4T zt`lpnjsK^g@gn0X6G7zFvl#scn8eB#bEUe-g?22PKLmb-D3-%qG$QQMqiADo(D`8E z6*oD4>eE=#$Sih7HK1KM#Gon24IR8t)+dIi8ef)RNFBw)VQNyhA3QgI*q~nju4cfc zcq17cFfANWn{iN_0F8$5^ClFVB~+GpJB^sR6xem{RdBsYkurvbG#9`>8kS0S<&*bMNE0X2pOixrRfd zT-0aT+3x@|wZGS_|E-s)8-d+A3+g*sUe1TcnH`aut$j97%;%C?hn^YH;^t|3qbJeb z1_oxcz2mc6A6mK|@2bq@w&~EkEGFsiw`}^XPPt57Iy6o#Zf2G(uS{0wac<2YeA!gUc`P-kvGyox@4d8KgfM+qcXkzjNuyil_C8v3(JS#iyr76vz3Y#4q_taqd5i zf&bx<&`e~=&|)DRc`cUEu|{Sw^-JfM)Amw3Imz->#1vF3Mv=St39kJY269W%2q*!F z*);BPV0t<6OjT^5w`f+4=KT8BR@OG`FfF8ysKh)Z_r(lpOSi$rljwVP%|f2{bdp6{ z)NGk%-P3hbA-viv-*l-F4%~fX0e>{0lUf#E{zVqTe)81Ydrj`qY;hUvxhJvaH^>IDSe8e6M%pQRhAT=8>{pq! zJM(~bFX>Dm!J^nN&GnxmhrbBmcYmHAXFUqs&B;fKaIy%yS+VTTREaT87bI=>ohZKI zTkBe!<+@|5Y!7r}acnQICK(G?OPGbV4BFiMR57{Ti&4XOQcBuO%bLjj%*!U}Tpzcm zI94l=UUzV&<-5Fi=kbIQp6SQVs)b7{LB}82M}HRHSewo4@1IP#bq~JoS1ncO4EJAAjrS-2Ua$b6-BSTUv|v z<;%HX$9&Mmig8)G`PX%2L!L+m(Dg~M=0+vZ6TXcqH0t+}C}|#MtqpZh8`#gS|^*+^ni{T6$|FIz~~M^cfA3B@bBE{d#}$ZDxnA#MG8@@&8uj#+~&)f&$9AMrRL??9GMDk00>v`8I7o-SU~S!uh0QZK28}WfHj%EJ-2*$YS>| z_=_X3hK>vwUGe4jYGsneVk>ilbOhG4Jqtx=9XE|edS8S%%@!m2Fx2}k58v6ERfqv% z{6x&^)3ljhE><7xK=48zw}x~RRYoc?J`ml}<#|j&-_|P?lSD)K-Q}W@lP-w+&}Gt9 z#Lrsl(ehB0Larq=j5xISQGEOk*Ni)OKC+@)CyYUE1!GSpSX#t9`>=n+`$8fC*DR}& zF|R~<0~0bvF(%vWnv!J`rk#D;qV`rk#o~v(7MU*NpR#g6M>(r*8Ws(#pf9B|aGVXn z6Ibk#{cM>%LYDC=`orU?#o$SfP@f%*1~^ye?(^BI>UzV>h~Go)%X0o+Sb3sbB$S)G z5&vLT#UFlk{IQ$El5Z=S@%72F3oYMqG)0fP2{-u-Y&YF;-jvU0nr=b2(p}Y&EN=45 z*L(5jAj?oQ)djY$!*R(aZ3K-~A;U@Zve0zKV&v~7{UHF=edv4IVOhSb^U+h$k+)v= zkAmr@mQ_h!G;T|5dGKl+n>st!wUN4*y~Q%P*{E79I`yY{c~TY%i5=qs7f@=Xb}ATs z?qx@=V6c2UkxO(GPLcC^Ce_&!hqpI_jBX#JUux~_KF}NyW4PP9U$~copUc>NJu@Ng z7*!{kNfy`Y`58MxcQYXe5$5$?T+d}C}#ibaOvm&mgJ z$J$rNMYVN*D}tbu2uhcfh=NErf)bK~fHH(QDAF->42}vY-H4=8(%mtXGz`)V-Q8U? z@4*}D74P%&d7uCI9L}7*_FCVSd+oim{T^#=WOnB!A}+&A)Xj7U{7yO1x?J4PGxLYT z^9~ZzCVKm7hI&U)A`x%f+t2U>#6{>;rJ6ll)*(jAG+DQ}T%Fn72zB1dFY`_kEKKtW z3SKGVS>6s+ZH{zNZ|m4ib9u1QBQRXY9G-g1qYK(+Bz>dsJC{-^h@pFg>>WPPMUROc zwv3=x?NDuwbrndUqGfkGHlzG-dxwQ4Eu+-(+HpVW4B249NN?~)X}>un;(U(}scBzW zPUo9A%HU6SO9;oxnElVV1MxW-L}>;lr)2#~8b5BETe5#C_UmO(o~TNTK?C(nXKHlf zW;)LmO0o$jKjFg|ercQTUGWiPdDGp2_Z3#=9exspO=3OvgPcpME{GD1qelno8B>Gq zdJKLksi73E>Ex$HYL0yA-f3Hraa{zCTlUt4pA3`dnb!reIrmoof?jdon zvA2+rUwY;gWoGsV&u%=>lzPnVt?bsORPY@K6Xi75*Mu3vED+K9{zzV=Ra9rtN+1I? zr(oo$r0s*#p<8~Yg=04X7mM~s(9CzmQZP=M`GkF1n&Qy6TA|&Y%8#P@iFdizPX(9< zxi89i@@I_}ScE}YE8l=C6+g3b7!6p6_uB+OgXNW7wKtPY>hF;Z+|Vq!ckSLQW#HUD zOh`WXl!RY^k-5w@#vtt!FeOd_u*EDfpd$UwLYnmeZf^X$d*seyku~YFkz&c%clk#>Vo( z8gQBX399w32fZldLbgYjH&~n+?3$1f4zA2UK5<7j2T7VqEYe9xm7d+4^0DGkO+uo} z-lIz--bJ{?M6`0#49RGM`4wf+0Zo;Z}$Gkyttw46}kB3S#z`0Hohs#=&soD ze22S^8xLqC8nx!2p=biTB2@X)HrO$sNR{I=~>QZRKF? z#)U&wkzKC&uJ2Aw{BeKns4Y&{sJNDgm=>|`IlouHwsregLzm4}^50tu9a4|E$Cnp! zoKjLP#@FjpN{z@2FfZg38htLX`Y6Ib7&pXi`p;Dq|~Nyu2Dr%2HexW0d5m;=U3d zz2;P&=EOb%Tu$)3j&J-+PVnh4ZXTL1}ENyX46O%?M9$&q!N(Dc7 z%8wT;{~8{9CLq&FH$)eyIpV5P4b-n%e^Da zw|=)+Bfia>dL-c~Awt`VFrMXTL0C_I)^9DGe8N65XcAF239*-m5$VifvNw**pkBIj%$Kx<3d^4ZqvLmmvrW5y=J>R zMrB*x12L`SO?>8k7I&PkOpj1RxX6j9O4EMZocGul=wP~4pMd!L&>;lDX-09T^OEEA zgcmN*Ph2kwz(o00dd2ZT)pj_y7eMU|zt!6#Hb+1O5#PM`T0?V~_>tiGA| zmRC$uiy0@d)F+%JGWSn4_M8Q6rf(!>j}B|TO7a06L57#mo9eXSBVndEI6MJ2gIG zEnbFCbJ6<{piAkab(-ioaMZ}h{pTNZdDTZA568b-W7J-Misv4bM#wg6kdv3(kVa$s zWbv9``&zT3H{ZO*90YL${o|UD6EHqu2CNUA)kb`If0=`;S9cXjHC*JGr#=ZS7Hvjl6Nm z#V;{(J&d+Kk{&snri7TMBX$u~W5V!SWb<{S;cp9@e!x})vvH{;q|5@V-I_~Ggf?*D8T6|+r!}QD$O@EYN5NZ_laU7G_WS3Y#wKesW&;p zgDK%IO}eHKEwJXmHNPwtV-aI1hbQ&P^-}mmy`NKL5x#v&n>iJ=ONoP%$xiz|NVi^j7S5_l8;EjS0CBNF~=q-rD98 z`PjVhvI%>ua-~Bco@C44&+kpTc5bOOx7DOqUrEK(2C3{wPRs-*le=N-Q-^p1s}{S}O>;|Y5LQIwJ5DK%!N>coQwc6#z`_(B`w#b5 zO*#hOvt$Y<6knYX6TUJq(h;I#{WL5b5jk*BDvA8AC9H_x_-tyyL&3jqDOgp;l)5F? z&Yp9b-s4bm^R;;F!#BxP22Gmnx(j5VC~vjQ_X&noD(mAP4RB_;^?@@q>c!ur_`|vT z#iGjcPFJE~w&2JiP{@Heoq!MW0ZBBN0#Hg!>_MKc>c# zacS-xdcRT(Pa?R@x(9H{bhnS@KRv8F{W{Bw%RouYgUp%L7e=l=QOvJoCF+~v?3y9% z2uZBdBN-2+7AECnXQ)fJY$;&NraB5Tv&E}gp@DBjb$0oBB0Ulz%~pB;L*|rwh*7#Y4y(CPgB6 zj`TJ2+|T*fL!>>J&@y6KS+9{tZ5kQA7pb*zsObpDXI{v-ZeRbS3t0nE8l& z3bStzecB9q66|2J>Dkn(0SP8?zfWi!;Nwfh|v0oxEr(kDAG7W-i-mY$SS@*IP!b4$tU3-QL^K z-K}2uxV?*6x9*KK!9fMjjx(PxQoCK33$EqoUg^nplFQH)dk}Mn=Fk)E!w&Rvd}-dZ zj2;W~=U581H3eq}|FlrFV|g`TrbFTcuu=drD)#hz@|K+Y>Nyk zPQ5P1!ikc~aI4s_+?B-}4{Yn%u}EHUIx-|i$jr-VJ)S1%8+d*@MngC#lP0}YFDd%{ zb_114vs3L%_RzlA##qAE@dy#=1O++2dgN`)o%PNK( z6~PLb{X7%W;bSVTRd-_vs$`lx!bC-5UstH+L~&2)>Y{%uW^DhE|Pc9&~F8U3wz07 zLOl^H)@0OYxN?fA&)i;IlYSj2b{b-WfB$pccA(heOYjQ?brs{w&b$o$ecVFH4j*1@ zrut&q)^jq6h@=i>61@g?wlqxS?q38tjzX1WReUCSs3#)743`A078BLL!@=T#;o!%S zs+$|{!HMb#G<|T!dhu>o#7tg5oKarA$R2H!`%qh?!Qz1c&Hw<&4No!NXv^XRV}*Lu z`n2$#_Yr1PW{9NU8_v4WiOCOg`n={-nj*e^xulL1s>=?brRK8amV56HDt7cJ40oDL z&XAC`gMoG+k8FDW$5iN@b{q2uLG8b)Z9lH{wc`vDoEBD`3OCAPMqStuvcehDhfw8L z-T#~f^IIeBZ?2NbjBFxI@D`CC#HJSJ|KN?VpiHXVOqz7x$s043S1lGidMSsyLW?GL zb%XO%aRoL_`I=9p+I&8{$5q{^cCghyjX_j#z2Mb`P>F2LEpPH|HrJsncuznHjYFi5 zT?I}Vn{K=)%l*SAXTy$RkpWDEPLVz_BjSrUy+JR;adpCAAwAh&zi0_kRs zi5SlHd2&q`O||Kx^pbu0@&q9~%Hc`#aEE)@Cvmj5>0qa{&GMH*^cr{Ag#wes2k_TE zP6hhz`ocIYcU=Kn|JHhq6MTx8iUrx2GeN+m7jfg4!~FE zyBc|+QtfR*>)BIK!U?jRf;zQ_xRZ7{TV4wHYeXJS7^KhLo(~}G?1{@RX;RIE-}sP%Ee`ex8ylD<-vU;eu6mI`)9^$5*<4$C_Goy%fKs>NqXO#2x5} z_$AUd59N&TpIv7b-J&0sK+z@p8&zpKWDxPrmTVR;3x71)j&)$V*?(por#}SVMQsJ# zaV4YO>#vew=_z0(6Ynxl-8h^QwD4M_$J$tkpf^XX(PhzfT_~eb=Cx4-r`L5LwI-)u zVq4>d;E}1uwM>%`xLpXGYxY*%N}xGGAOlee=@(Yb+?n|nhyU0N1Dvz}8qpd-TSGglY~gW;QPgcgm)kRT@mkUw zV=`LJhvU8mQvx0aRj}nT$u6bYdxS4HD#O(X8usEG8#=8hF{>$2E9lLbXxuKoeFDPt zk-ApnI!#1m_uE$t;P%5snA`gp>f|#lb?klhS0^GK`M}mb9C0E#?6#6UsHLMFC5aqI zdH1BM!`913vE~HM9!RZFjbRf{mq?1n-I>exYy zacCxcvBEfP)#|@|NCNVrncq=e1T*R?m_BV7bF#;z4POtHwd^^suIS9enR< zQK_iY&6Medf9~3-Oedn9S7&Gz)E3<2Tt`3$Y3Z)pc?-!M-X&f;pdjOH12&NHSfA!l<#cA5 zCoB%MxX7W?9|3tX23_OkmCC-X@df0iYHl?i_B~6G4-ZBveeIAv$(Ns-48meL*sXWKp|o@PJ7j^h2KsAWB`6VIc;=+ zu!*d5-F-MU3p5aPiHhoqC4}eS7Sk1krYg$C?DQ?E46l{hPKXnOgzCiZ!FdQ&r}4bB zN~hM|ECCyjV_L0iLzV*8bP7qsM4qy=Mi;0JYl?2u(Kg!eZgjq*LoSv~1hy8#$Ck>zX_N-ys zxEFc9&P&unE67rGVsJo`zgK*`h4|%t29x&{PIr;Hxl7iATxMM6ZB!}qT9l<~N!dqR zNTfkY55DqjP=&wwqF??RT%(uAWnc4GvA}hP3p7=dr~HB~&!{<$x(drEm|TAX#u}&i z0JeAAc-tvf6(Sd5e;q4?#oX87RJ9$U_)36f5QIv3Gi|SYirv{n1itm?5pN&#t;7!b z<=g@pyY!`QR77``(oHj77^S}P1aRkuN3LxiITj^lv20}bRes?zT^jY(QxzUaEcCQ# z#bV+24q6O0n|Hf-4@Hx~O^FN>eLN$rb!OeNB2B_qth-VB z?TSz(OgV_`5@NO~v;~~Img)M_T9+NxX?XWhL7kyKerQ3As~lWYZf3CO%gUU{ni^ z2tHoWa47d&Nu^xSrxs}D3cbxT_<)y*JkxKae>Qk2*XCQ~u4D^8g< zeL`)-^+If=^I(GZI?tp9r-gtWyT|)4d!}#d*^Q}Oqe0AVsE?%2X@Z{^%te>LeG6{5 zOf$XWVb&NB7Rq8GC$!1vZ-PuZPVKgENZ&o!ZSbxsYfvV4s{fGZG!2@|2vO4}onFpv zo2j1hbKLs--j`8r@zHk%E861m1Am|4U_!ULrTRioJI$OCJ}`B?VNRt{;t~zLblj+% zEnYmnob>Cb%~LoQc;1wKSC`&I(d!oCH1NCKstAs8Y@>B|B|j4)?i6M4icX)H>ON3t zo5xYsJreztyQhZYc~nXeF|v#7Wxui{`8p#+iQ~xIpXyvEM9h4Y61M6v`94!zSKw+C~h;e zvs@Q_i%Rb{{L6C7X~UW@jYREzvhVL5{4q<|`5l zp^7nbcIR|O&9*sH2{m}kUPM6sqI`IzI1Hs84V)s43BkthV;!oqQjNx@kg8CNF5xsE ze!Ux`6y6qzOvT>eI;)~C;=7Cc1U-UU6U&foi}8u!Tr#+58u=+AE{%z2SX%e?XBJAV zs%uM;dgfYH_RQg`R5iM6Y*SZ-?3>`~ehYG2McXpPcg60G7#|2tnNaIFrIuJA$KHjM z_D&aA4?DKye-rD zrSzTfi3s^PUDFuEopQMi=Am);qfYu=rq*F6iRpJO@b#p$*)WuaPX_9i7(eNUg<<^0 zz@(p!97@1>!v%E6gUNSqz-?%m=GVjW@Af=#S_nf3Ni^-Cb_M~dT7|4a#SCxXslKxx^{Z` ziyZml=|XT+*|X;S^!=WZ>riN7?YVz$9M84=JhbO6KK5@S5^8@oMG8Cqu3CghbMmTS@ z3Tk-b{Zi$#^dsZGR^HJJ!5c#>9Wy*j2{FCMg08~#cN(*Nb;XFySPt*WfUsR*N<;@u zcy2Yvc+Pw?E}*W@1kpIG1ilfUm!5UHTHGu1s1DKAmv1JAyudqlY;TtI?crq`?Rerl{zRZS;!k9*0=xVrDcn z66NQ*vjUxVj`{lQ(PH`+GA$9s*UeJ3N>pucOavmVb1DlvxiRz#`eH&TB@77zls7=T z4B)!$UT)(UNlZEtVG{*ExpO32&s`fn+-I>#Cvq}&6dgytU*058|8f_7CAZGD+T6U7 zByDr-VI9pu`o2_BLfraJu)uWEz(v;dmPeo+!RyZiZ}e|GRLV*+|EwtJGF8OX`#5va zdyC20=%_9vTLh`Bkh>{xv{-cM)Tii~tN9+d5pOpF|yX>FXFt<(tr?JpFo-KC7LCvaIWXn{G(Io(C95k zWTC-eQ<6{Jaz*q#+03ZkDf%Cd^ zO%UUQ@0a)Etwcb5D`Rav^QyfqP`~vD3|WiDeu#Lh_h9mgoxDrT6H;g}vVUh4x@OSY z@nFE=+G3o>{$oP(2w`7jtkMo}jRu6nPIF<+!8~j8-Ad?)`4^=xGsk@#&XF|WSDjwi zie^I^J-p-nDbG?(_RPC|<+74=otZ(vIqwj}Xw1V>mX-WQWG{MdMoi57c_9A7d0}On zzKur*<6Qkg6^Z>58fBOpn$kzkJ{dP14b8?<-2-sqjrkx0Oz7bREzsYL zYIdD7kfC`@2n7cF)+}Hmjda8sR zUn}KcCbCX2yMabUxNZ!iw}3l|3^e!oP}3vME3W%at}9$K2Y3E_-xBkBL+;hEQudY& zsW@9as``9L+1tvNHZ$47t1**B_0NKMG3f{335w=dR`W)l?T)-+e4zAMHMKjp{{BWD z(DBMa1T*R_wi|N7>&2!x?t_jV5q!sYnq8#bg0&#urOCbxLZXcjjP?zgTp1xUc)0)P z@!XZAuPVK4cemZItoVMgmHR8|e=s=G8VMl4)GSlHj?j)BdX z#l<_&y!5e9MC7!r2S`{rz~|~S7pplbg!F)o;Xc#Q7`NH-=>wA&_Xo~5moLe?Y^Fc` zWNr)|Vz3Q`4qb!Pnx^~C3Yqx_2!klSTC6pzgLU$0=5jd2H{UGtXt%Zc%JtFC!Sig4 z(>m8W6dbpEKKS%@an;NsA%(L^v;HWGgMqDbm*|`VU%4G1h0Rjyu|K-MRCLzKfveDG zzjuYVzdHf}?tbw*6Y)C!f*xw|(wBet?ooSEs;H`nkEnH$jX zD7zEx3CizefZS_px*yK#u!jP!Wu*wx0?ubak#&B56P-#JvcO4q9F_u(9(7l*JTsntG+t>(_1JLiAz;wy=ZIAo{iFgoZzTzm7Riv7(e z>q$RPIvm7~Jjq2?l{>-LE+m}8IKLaellc0gERG=~<7;t>#$mL;jzjN&ZSIoU_A(SP z!!;bv#bwa87hO789@Rf`flmB|xCiN{`2$XBsIBHraoCj`m5^(vF)(p1&}H>tcB~|= zyRJmxlDFAhEib!SUUZZ6qA>Ju*W`tx862U=GK!9wvsN{$1YZB72s7*MH!Wute{t%M z-+%qXhqOWD&HDo;SEQmjT}#N$unS*WXQr-MZjI=sE}uBt@j&1I0;Ybk;i!UXr~h87 zk-PX?@qJ&lb*G-l0?5pysGQ&s>{j*OR{BNyDjT0~1b=)X9-v;^ zMe@VM^Tc3U?_}m573;1i>8{$X42xJVY{#HU1&#uB|C;KLB02s!x7uiYkdoPI@LvKP z;$+2h*_{1iGjI}(7oO8%q^hT%XO*8!-`kKJ*%c~}E_dvD z2}lQN8^0eoio-2UC-MeF#Ta;ig)Uvgf02ww#{DT{Zf&Bz_nSCQpi%u;5@xIIgS5bn zn-e|@z5@uCw}QAoVGNj1k9*~d(`U}J3pq`<_($`!g^)0+9HwRak4E|`{YhRqE)I#| z3Rk@A_X4A9zXAhUPCCpEGH4vIqs5nyb z0aS=SB+oGL8L)q%<@*Qco@ZCOqBz1Y^CkRa=ynvQ3dU}pVP_7t+}tSZ9mbH&a5OIJ z0gF5%>(w77_MH-nLohpNi6`&vgI{ejcWTM*Fsd{-EQ{VvK_3DqNDV9n{V1-lWW(~W z`lB&wHigQfpY8`j(K}J*uB9zBYJX|kHXEIC{85`vG|~6!F3yHS0>{6} zmu;Q2Eb#}#e*q*`!QOjko58ZI+hn9v;r{uYM^XKBT{1-Cd73>cf(YNBHe45-NOT_{*pG5sULGc43nW`%~Dt*vtNu@vN zg@qn#OYw)x-^d4G5qlof3O+7e3#KYRSAA*^_(}iDA z{XL3*`A+*6tP-2e(-gEPk@atr1h}XSubfrjJvSmU_B!y4k&)58`3EQ12RdbH42-)e z;ONc3UsCbQ**v%tD;LI2f8#GW{=i3S=}PlM!8R?5bN>bLCk_dtQ?3pBVT}L4f)l^; z7d7J87cX4xfH8L}{{XrFpYP+4tWp1;8V-u3o=#oLnV9j z`4|bz|Dzc&VQAkoI#DJ5FVgw>9p(IEMY5f@j{G0Z{)KolV5wAyd49p2-$!bSx_GQi zw_=np@&5{?S1(<-s$%rf{eL#)?(;Y#$;jkR%YQYrzXO2;KrHtZx=ylvKMV2y@Z^&w z%uHFE>E{aYj;3Lphv&9{n{yU5srTbF8>1-iGu;7#Vysy`M<;Lomf!F zrYhg>GvSym$YOCn8j)qqVyme>+hK(vhQEc@ZUXC0)qr;{21UtyWjazBP!7r`r9Wmn z|3{w0GT`D-@!veMoDvgSC-d?7r%06XF-$LBU0=?5YPYrYMY6<%$C+XC7ZFYMe1>-F zKK?~0$BLZ%H$43KN$$I&b1X4bw0`KjYXV#bEk$f)Xq4<{U?Z3I(hgX&Mx62l30z}s zXNFygFv)pq-G8s`TPl@RH?qgKZ}ISOc`cayuZ$kILabaTfgSg+)$d19{pYzYJp!n9 zil%n?2G757`rrN0eh0`w6fhzD|0^P4+t=IEMeGzu1QW%XgC3~`HI{*dOXP8y|EG)v z*w9%m_-Q%Hc8ZF;|6L{Cm{LvQ1Mj)uq&|lA5A0vi;szK;dVK%ye5uLO-|mE zp~Oolu-%$ZA8%&Seg4Y081p0+`&^h!Cyea<>nwndH(;t?7jb?K`3GU*%=)(s3T4B% z!o_fi4T%j&PbjgpKL>>ElW0YE5+(mW-%m8Kb3x2Q!HxPlpY;8}2jNI0GWVJTzUfKI z(r$eWDn7{B|6-i8F!=`kwhVn~V~KaSEY~MJ-9sPbpppM5_8k-3ejf{nd%MDU*neB_ zvzr*i%nBu}L5&X>R~ZT-lGD>gQac&WoWS6A0$`N98rd&QzdZ_F`#p?s|HjCnd#0DT zRf(&E-v+U0Fs8+iO7NZp%jY>j6vrGS0Q+65fQj?Kuky8QkXe)dt#pb*qJieFo}Oqm zk@JrCe)bBnkaIXBb6OjwdH*i(lUT8bd5by2LFqt+K z{!Qp_vu$?uFX80fJ0)Ynq6z{zse4FsiJ$N#wd+8!$CE{=_xHWNI0M$#*#y+{+CNml zQDs()9=tZjh_BP2c)~!K?{5Gj^;;`4{=S{5g&0YT;A)Y`1sye)k!hvI$1hE zSUVTFVb*m5Pfr5PH!-#UDIKPojB|QTwT(d_l6xJ23qoNhzzJ3dfWR;^DDK}sdD2wH zyK&{;anB{nGk2zm``haz78Go!rAXgAbpq3(!~r|a@=jm!zhD~ra>Hx+>BZ^iU9WFG z9iMD$J8Qhe& zFk_F7nI;DL!17O?{O4g~Te1FBG*%pfw*!9&r;LmJkCf_1!o*XL$GIfsbJA2#vPs|A z21~j0RXB&ky?>YuEP95$gY*+nPS;*>?ATZKz6hOQx&Ffhl`a2@hH_3{S>yU@H9m99 zpZK8%@_-Ti{G8{P8QQOBnIc6;d@DRx)%?#`$fQW7ds9dHB&%`t0#IM6)O+=f75`Z# ze`v0?;$L%#PCX8TAm_<=e*P`Ym!Um<0`IfTkD~fd!&#fb$T0nN_>0uE3f?roD>Atl zhO(R^4YL^^cY95An_fH?3d7o%v7_a-UtIwg$sA$#t14hLKQRb9bPA^t)AelbDiYRI zRoyT(U$ehRBv(^$N3LBaT#JF=@Ipvn4bAQXd zAk3l`Re7*B;#UvOT=8t?;ndRC?Di2q!68tP_}?6+^Naf7>eNpZ{%6Ka<0)Lx&==Q`iSD+t-O$k|TGysQ)OLXc2_zYddxMwe*S96MGNo;l^o>n`KESx*skR}qZ_1T`-gk` z6j^yTd#-dGu1cx4d{Oh~%<%aQfRpJ=$<#kV=_fdUuUP2}7J4Sr?LCI~{#eJ4Rl{=s z+_UWa;mR3MzM9X^{ynb*FhA-n-E{x@*?%$~Hhr#EtQ}<}yfHf+wRHUt_Az5b)TSQt zq9W8fnKd9pKM@Xlb{w!@jtJ7cvbvj#Kw0*us4ZOLNFBDHV7p=1Tl~*K{)Hl#(d9R; z?~ca$(734%c>9XSrZY-vw35F=Rj~Cre(?7!IsUA&8%D?DRErMl@PGd2*Mb8UWWncZ zZw72?tw>t#J*=k)|8lG>;@FoT64e*nRZwVUqqg^Um-?sK21@4azh5$Mx5TU<^0*=& z-{M?>Mjhw@y){p_A7rE?_ZrWyg)9_@0uHl15kq96oBeqwTM5}u%n^^nQVA^n&6u-N z^J0Vkt%X`SCBNp_{4O_J%l_F5I`0K}gb&*#$C6oM@Nh3*$H66K$6G1i(fO2|eD2j* z{PQ*L__&f2Ib+yjWOXZrx@4lx_Nkjk`Mo;0u93GAmP=MD4$NkOdNY}@y#<-DzD%Es zpGdVHa5BeGJ_Y1(TWIkD3RMX2jtWxWCg)=}$*MYwWy6b7db~B?J+^P0d1F)tB7C(C zmSc<6(u&;53{0h^LJPS#g>0(*To{j%iUlQ+C&MoOta<nuhi85Urse;7=eED4ITb{pKKMehdNBU_dqS5!eJp;I{p4=Z zbV_PHlR*QlMrwA(^}8dJ<;F%vD9!G;c87mj0dZ{N*^a6@F#m68JAm~R z*-_%`T*=hrn)L480kd+RNc+RJ*7L-y%QbQw(T*wh8m|Vzw45zp9YBO2 zKsq5JYF7cYDE0VO;&PN-sIaaj8uQde3sN%Y*gAeO%y6_m5j!dW)iR9r9TqCBo#QlphVX)aL}CgG_IGv0raH?a>$vn+C_6L-T>v{P<_D znz!}Tq>93OlNJphJVF;aG9KpW%gWql{QPvmwBQ^zwX?-T*P|^-L1t~^I3K>Y48Eyd z0mw87pJ~J?81H5K!!Odg=%MEm&9*(w)RB$)J(}3`rPS*TuHIMx|;%yS?l#nQ&;ABskdza@f%E$G0T93BK5;hd_$H4M#Wa_uHZ>&>@VGA`)w@S`PhkT zLTO=%iug*xDEr0=bLav{4^jR;EL}xamDpEse>Q8utTZI=z=Kt_+ItRI`l3vk9ZFJ? zES1~l+1d>}d)$x!I9a@A{achaY4qgyKY`T|t3=JbckB1M48+xNy_4sM`LZ{9wOni( z$w4N?Pj|<|Djukeba@ghi3$j>)e`IVcO+mX4MPwjV-0b>DXFFCii7GvCb^t}PF<(; z4AH{%-8esYAg`O@l>}B*tD8dDBCHR0kG4~V;7n{vb!%noO>FdA5JxT8vfuXRK|iD!Gw!J$r38?7&i%Yj63m+|lOPN6r>5 zm*};;O%qVyJBaK`IyQ)9>+8*5P0R3L@du}!Wxe*%D!trRK0=^n&V0AwGDv?wsb1bk zkH7fV-Xwy6Yq5pZD zPc?ziG8uv3Y-~H1W*D0(r&vgI1j&`n_E&`NeB*X}Hy|@D=jh!}r56r6>K$$M5=KZb zEREY4A6u@WlKkNq-s?VS>Y|-YSA5RCD`g~byU$YyyJ2$2?ZrnLe6Zqf>UlSR))#eObLYzu>Up7>TNMMO#kf z=v~^o5xLPB<1Vt@#{f_1U+C4cQ9E|p=i6vGpWG=z)ln|YSF9Ez+TCCV=ZE>xWgFEH;DY0Jy?z%~%n5%< zLVVplWN*%Cd}CpHnyk(8o$3$`9dffG)V@jIU2xl6M}l0RkUN;lY0#1$%ZNL%j5HH6 zmkGUO245;D@%OQsu~|a$L^>i3^+tA&`iysma0&K0!r)ZCKnv*0l2DU)QEl5srhd^I zx)o(riMB|GnbmUi4!SzV3@&oGpVK_=t8kkEUb5LyvNYH<;U{ugQ5dy5_>4DlGEwZ1 zKGJ47e5}H0B|R0PD_cboR$Cx#QlzJC18Px9n3;AX)xnuTkCW=ndvWymq<#qbAnD(xWmQ$KPP7%JqcO+bc!tW`1fjwYuGs8TFSa+ z;NzGzX(pO75x%WiJe00pcy)U&UTT|FdvnZ#=DaR?UZbX9z--`?C}Z5Xm0?~Ahdd*7 zsnt}|O^f!}l1}6k=rs|(%(C@+ybqr~!Xq*=2^X?6+s%ZuotuLbv(_@X-{;=$%Bk(dJdJ7#YN9HJ00 zVq#Q~%`ap5XC(xpee~!I4p%)C2#6cuiszw4)AOktsrPzFIdrHhwpz`3ugNo_Cl@lx zd0lhWdUIVoMSafkH^Ns3ye_-JQBxdyYCbx;7iK2+dAtq%M0W7Xop#y^`Y&&&&K1j! z_^B!ivT`%YGtzRqppype!O(6^Q069LLX-00fL4*}(LoPye8x{1(DP{V!s)2#Ka$jc zOZR>Zwl(S63$bR@hmsLuXlL0NKJ==G+`Oh{HsI0Rk~u0KwzrfyY|Iy+7vN~2mUp5e zrAND9KQ~G!`k=(caEelz=Fx?F9r>WbEXVE?W@_E`#|(VS`x`MeQFep(t*qhDBVfBR z{}ty+2HJCgJ102KvpDM3hSrkA+{XPwr<%jQ#!c)+vmXO()iZUd+q_^*04T8z_BZE< z2?TdXZ<-zMW>vL+&>OPpZ9VJT5-uV%=(003whc1JpBp^1IxsDC%al&CHYv91X9|1W zpZhFi5oy(YqlVsthN8|a`!27{G%XMmcNEb&u2uztL8aq?UK|9u!}f9U{n_ApcS%B_ z>)t*UztPH9GlJeOwAfP9aJ$Qy#{}1MmTKs((vhXkP3Kq^zc9Vd^{fv_PA+z>c9@BJq-Fj)3t#Q0vEHM5*VW!+ zucjB?b1d(ljtJ|@&l{JdG=0!fd$_l;Fz-%COVbf}uaNAr3f0w|M|cALhDlNLz6f2z z`L(?n-aJ%YOSs+>JPcNLT3;90Dr`v){M3Q`Wyb}#fH*0)qwNu~V;fz)0&HvoaKnVf za=6zflzXsY7vJuyq3gl}zqaYCcZIZm)$cy%Cq_6P`7Oryr>TBWZs+aX7<_43n+L~< z!VoswSr2KTye)` zF~6ydfvR~&;Mi!gpEe`K8eu)jcqI{Qie+>wyoyuPu`rP7bq}1at3VRM^E>7bb_>Ic zl!HBa2?*X~wdL5(1{*@EUSi!aE%-R2ySAX~@l-DCvKAN!(8fNJZ3oaY2fB*)W@y*& z0lQBv2G6lfY8do#>`rmuVu-6up;1fbO!ZM@XGA?~?syXJk9vrb97n<9^EVCJA_Gev ztQ{<5;C@x&ZE8ODNUQl|EWInm$NlQny-#9m`I@yD?BwOdGEqY-63juY&!;P25j1VH ziUCIRMIO2(_ug>KJ6D;wUe!-^en6^XA`Ud9YfEU_4w)8O%o`?su1_}3S<3r-_`uO@ z_^sFk`aN%Y3Ilt74%xmQg6J-^!r|Mbm(SFr+`NUbi4 z{*=+2@alK&`_r3BAbh@uTpB1e|H?`{GUblimwOv>^GMatxd>s^j;J_4fkcy2(rd67 z!Nsgq7!kwylo6f0tAmi8AuOBEr}2;Ww{Q;@OU7?M!yI`R({qk&qIA<;1f?;_mRoHVp_T(-K$Qv#Qw?E0C!X+!{uCVa<^ zf=u6Bo3$DvO}ENcf3CkgSgbc(<^W)@e5I1~zyy%47=77RRT3rloIwY9P+jEhd;Cf&-)XZDPVIZpW=COTQvAxD zfSQ7rnk#WI<7LS`1fb^%0-ZuDMGOZb2g{cAZb05Z>McHSVoFV8inn*(0CXgr z(S5t*=YBl#q>ERNd>oP}Dyn|UL%)O436(o#Sy+a4sk_x_5;l!A9M%hQJk610@9%4= z*h@LxK!|L}%l2+l1F3n&2TzghK`zD#Ah+tJ-Ughd&4MA5fM1_Q&7w+H9Ly(SrJOX{ zWxIcMl2|F2N)0IBsq9qaas+oRH}Hh7Inwtqc=nh6oWAXkLLCay^3XF^sK^DQ(-zy_-qFb&8>VMK&Bw%ya=Yn z6-s25Zm4C%-vW( z4(uy}Ot)a~qec80Ry3Y%&4Xy6>y62KHa!cMJ~cUH+Hu?dyzq14lUQ;L9v&XO_SoBc z05Gu(;2Uj19}t2IBwhK|6F%Wwa_^sKj$tLoAw+kyWL-T7DAiSZmG7P>?teVne$%NZ zt)xZ=s9Tk6@uXjWm_^4}7>?G_iQ!~u@t%PLc*LS-GBz^!Qtig%foCXTaYxnnMzONU zCEQ`hRDDGI^X}kV*W2phi3-L|tTy$D7s5hAd)~zqypKS*j^U1_aUrggTb(Mfh5(z>475Xz3+jZIug!@wupw&@V20B9niIoT-!XwNs*GcbB zEYPF_x@c?c(cX z+txn3MG-*>K>=xy1p)%n9g0YI*Fsui(F>$Sr9?owB&AcjLFrydcZYO${pQ*y-m~A0 z=R5m+uYY)Xfy#R3oMVi8-1j}lqr>j?@?PZkcW4A^C9&(rKN8DRe)CU@m}&4K0Dmi% zi}|r`E&tw%%1P@+mP&SgjmNQ6IoK0qLqam?e!89p&0E{P-2u!*kj_-V10;D|Ug-Tu z7Bw2Pl3l2`1)S&}Od+w=_pZ!_17tBhchPvtX0D!1PKea9;&}09%lT;C#ryNB`V}{~QN(gZJ=^Sl}9pxk4gMXr_jL zq~mf4^~=d2-0F`1Ea1PYqI%zXplEx(l;FZ-Q8J(k*=Nj*;;=JrTVCgpPccEnL(V5C zyz|*h!ieXzZsWhlzK5zQU9p^I-31^eSI3QL_WhWq7G`L5c{%+ZBK8bqM!3h{7vo8_ zaMBM_pLeBvVgBWtYY=!qE+PIqXpAB~htL8EJqH&p7&oLh7yPgRQg@=FUTVKg)2%C5`a&w}P%dK+P_OMX@TJEK=nd!LwlARZ08|GPQ+V{`H6T&Jn^ zvX8|;GRAdrIUU(&Y>)0OLu4$9x0@mG_0;6j($W-$j!0(aOf97*m(3bA+g4t;Q~dt( zgLZ}nC!tHHFAdLcHb41<%zbCmdwsV@Gp)XW-G@}EvfDfcB+SMs>yVaJO^9GBI89-W z?Ke`{56(bFas6g@%M|&oMco){YT{G`PWihmxdlF+}nrq%|%9Tx#)t={C zah&jtz8Zf)HXI=o6tEW3@aM<4RV5Vt@{?i`HI>;nNlVxY=2ih^TPbkDB_K9 z0pMQs?~YxS8m>&oe>+CSS;82Y$3F+Pd`#lqEAu9hEIV;togj(J*)|;BrQ-xeY5>d8AcVz0twvE|4wVW?wq^bvS@m^rZe>L%#p*VUR0pU0zLY z3-!??Et#YC)3Vyj269>ct;BcTk1~oCR1-CqYSv!T>sC-f^(C|yTNW0|t8pSv=R87< zTjYLyk@;U422U)CubbsIsGd}&;vQ!WWW7UhmPhYANT&*Ih;c3h8an zHkk{4MHK@*Lh-_QqX{iiw-mwf1!_!Jdpj5#&Sc6nE2WEpr1Z6w!={4X zIbw3%N-l1I;6E#=ME8arr`#H>Ja%J0y?tnFw9pE8FEH zGks;yj=^QKNbU=ejJAUWPF`&kS0YpZ)XZH{bpXu&r+6uyxUx;8KJ3ck{i4wX+seT4{MK_zFwTH<`;}GFx9yRv=r~XDFpy0D)u&W zImOvJP$WL{*!6Z`iU54ykn3@x^yu)hQSFGsm=$^8$s$+7{4N5KkF~pd!Kd)`%Y9D8 z%l7cnBd5oXI`Atp3O%GJRT0xXQdkS4QFrAau!C+pCckN6v^<6TxR?69dL}aC2&%W37XFm-YJlx#hBsYlQ(k^bi>P3!riiNo z&mE#v=W()}ge>Cw>zVpUQ2Dbb2~|h4_?1HWwl^`x%E}lr>x(CmHW?vm4)d~OfQmTb zY-!a0^GC(NO`$0OaI68ojwC5sIX{$I8Pu_=`MS?JcCWvVv?T;ZaZtGzZfQ0jHPo+q zap8F#%^OLu)X^g$leUYDGeV_d0g&(LhZp<#nB@H!E@S%cqxzQkkJWLE5%Fba5odz%#`R^mrn{(mq+#)0MF!Em!w- z0zBFXOe&r|r34H5Q;QLF5eO)?Kx?(@?r=JN3pHQhM32-~8gPF*)6AQ|Y*BHZym zG~7LDF<}up`4Kc2$En}wTe&~p?tgh^!u@l4@0R7ck&!FSR$Vs_E%6-%SLF?->e&;U z?TYWTWFFOtCo$X+aSoF_A4xc}5(UeGTin>$wAtO01ak+sN-mq@SwBA1NdgV zktP9Vx8siOyB#sw@QJ0{2qE%rUr%^zbS~)H6QY2DJcZat@_638Y0|O8@ zyj?bG`7VpsPP_qDQ5^dGVXyO*jm8rXcS4*5M@BVzz?E*qtXu)plqG>UooZO3W#f6& zF$dkIT54JN`9Zf6Ra1rn)#|gyq|;1=Ndn`Tv1GfPC*k3Qi`7ri=yT8ah%~6x9+X(% zycof2S7vhCtg!|WcoEbe*ByyQoy__h zjd?K8)TQqrZ7cS6Of^5ISGN~euc48t9~^npr#JIRa#42ZFx1Gr94$7T2^J$mM;btm z`t^LgHc4D8IG>~?arV_pcIjY5@SL^?gH!V)yW>aEAmRQyz3n{ zKTclx*^YIaQKZ*hC$5yyx$2?)n%$g~A+OJOBjuVs&;;?U=4*SMye(6%NUwihWnOP# zXH{ka11`J8&bQZ6mri7PmQ%jExzcH1R1I$%kau_s&Xb2hA0PCHU>jYRh<96zKGT7( zlArbX)H}}EnGYx)3>yT2f1oft=k$~WX{$OKVnzIzr@FIYNmmVK6GoPW3yO~&{5aVV_}25pldra$vTX~ea| z)gfkkpntl^rvm8VqW{NfQok>`un`h^463c($MyAiF8GMkO-#EqzX!rT){a8QaS^#4 zxw#yGLMKtp0)Z?}y>7sqxj5apE#z;7Gn~?inLF#S5oAToogZI@d5=7{F3$2!s`B2F zK~Gx~3%c9ZC(7fh+k*tnJ$|8b{4B&T0EN`~qB44~&kli!^a*rM4rV(;Ccp8r)F!>u zn3Rd)D2cbMPUAvLxp%&KUU#X+saK~>yc~dad0JM-K{yz~$K16a7Hf%d`H|};^T!z~ z0q4Eeb^7C+`SH|aMyAMSC)w{D`{8@WUY_rIJN%Pv^ry)MLbB1gg8U8^>a>;n zzB9Mza^}dlA%@F5WwV7tlP)F=>N~4kW3IbjH$bVnw-&4`%eUW;^nn=Z_Ha-Oaz)3F zeP%O}J*eoS)i0M2LAZ4LBPb#mmQk&iMJ>ddBik0xY*UuR{a=hFl%^K;#N ziZ`YEI)Fmdz#>LY%2YM`0ZG3OjeRR(xikMUN`JB^px#;T;xO=Dp-tC-nh)M=0MJ=% zr0eDUE{jaM=*y}mD}hW}I_btZ5$y6%M5+GyPQKGBW7+%QhhpAx1{>YP{**Qg=Hf;UZQW!XNOdgk0mzbue87aF; z6cQN3ZQ=LUc6Owgy<}QGwy6xbSr)d@%T;z zaM<(9U3BnD`e{_Yi<)5fHV^0>Mmj9K<-T+|PE!xyVi+mAmqM&brz4meGxc{pm$Blh z_3v^na9$iNs%@7rRn3PKa?zB1G(GkqaehTeY(-*B@jigzg9roPZ;h7cppDW{4UjetX?{!-D(xtx-vaEq07`*}mI)=XSvn??p3^2^`4sFx7(6f;E(8mOiLIOt|$IG7p zKh)N{KH1wykMo7;eqK6{>sdidz1=pdUHGHrR0p-j?exg`R1?x6WkL9`-b^{3>@6SH zS-hF`>Ln%+TeWFQvDI~qOX(qYp|$=(A7?m#E{6%;PHG)Wid5>YcMfZZvXX?CofgRB zoQ$~{n40tiDzIchBQS0@VFK|WTV>L}f>HQqQ=LnTr(CMdbA`+Q0klXBr_%UJ+{!E{4nq24Vl!MZkJNK3tHM9w!AZz3%&w)^@EDVA?x|> zo&RhS47e>Mju=Z_@LEdv-8NJXjDnI-eAV$E(Cv{Dw}S~TSE^d+)dqtW{7uV#2WJTl z8;yKvosFn8vIIWn%OY*!4ecJ+w9Do}$kkj4`{i5~VCL z4=Mj{m~4T%c-^Lv!;%x+eqQ5~PGAosOV~X_mMYSVb8S5gwB;W~Ch8UXq7zOCfkW*H z=ZbK^%rj||FR^tklg~y?f|OzFE@?tCt}pjshwu~4wLiNC0>HL8)Yr` z#)Zo!rd*TB6N1hJa-?X0Bq2=dq?S+jejI&Vj?jXI!)DII8E$K?OjJ33cMu_7Z89++ z)*7{Sr|02XuITjx*jyY@2E|V>(NPkXMjX{hBJ$n0J9M#@AzmP6>h3el@k95kro5u_ z>*8ykW3WpiO4C?@3chBteW|>&RB)tu)fHgnXB4RHk8Wf=qqs;q$}ISDsdPUsH(A!5 zmBwiaO)4c%sORJJtrKt8gCGx*4G`sTCy=)F$i`0rd;Uk>?U;3!Zc(lm$EZF?GiEu{ zV0!G!uf&Mz!>+t>zN*{s z;0JA;>oJ2a`qbHA*-mzS5sTqGpD29!&Aob?4UbF5?*&c7co$0^^^9Co+VVPe0P6GP8b^qdj#LlDjvq=Nzn|>D>fDy=%W0 z0{yU$thjr#Uy;t#JwL4FR1?z&MI**toJb}n3TbVHc+sUeh&9QF+>;J@#-&;HSf`Tx zce|;vtAb8UAA97xo7h;((Y-Qry-+Ds*!q zTB{JsG@JkqFTa#fWD?hFm*Fz9Qe7aD;A-5cINeyQ)z&$YrdvFQpa!O}Z#Rd*N+(L^ zzZ!K!-0yGNeEB*>x~Dy|?quNYTszG5WUHY8KBPks(Mar+tAqa{Xg0)BH79eex`XL= zsX@5<8msHk7SiBQKR9Mll-5^xTIH}#g!K1wkex;&+)JtD4Q;A3wY2F49GiwZVClp{ zLcDVyZ#Fz1U9Dbfo-TU+vPIp_LK`F0R}xF<&w}y4vdl<$dggJkah*Sl9yWax`;5r+ z{f%ehH=fLM8Jw%jkZpZ|{w; z5FE*8xXwf=u7{n6U9NUZvLVqDvC(IUUG1g#!+5(5o;nw?c%O=Df@?Q$gulh1Dkp#H zh+$`eZSW&jqGFu~Xebijd#(} z(L=09jMCtIA^gt*oTxblp4fvMMRy9?v6PqgGgr*NG+Pbj#hGSX(5f|?kCzu@bcHLb zOo^#2J|hP|_c!DZ|HE%2cPWy5A4(S6pk3P!YIrxUN7eb=#?yX%{Wp4%N;v2}{r7%m zUUYdJi$X{r2pE5qc3*s;GpEca)a_dNx`58SqMazO)OmkJLRk1sp04Tr{Rot?WOwdD zrqjdq&_2#KzPyb-u^0|>9c-?DLigoTejO1F4+YN%4g*6^nXb_G?rvyuGL=-(Lpr($x7|QVLEL`;OtqNp-g`HZ(EPh*^5ISelxZAGdzoxqo!@ zde<^DVZ(VsD((qdm~d2SrjS&;;(d%?J?~XZD!h&_ZZWAHDR0<^EhKJfnU_J(9Ik>z z!cDOPu^JQ==o_tdb6LeX{l$*F8AC21j5**?wo{FIT#UNS!3!cNSY=^wspe4_#;wOi zFkE1luIF74K=eT02(=%Z8PmXE;i$kM!dNVjq+pG&Ff1i0z)yzox0PDzDC+AlIM%V|>6p^mP#=Hf(>?4R6dkADN{skumK78f z#PSYP_!m!h)nfgIQX(k~xp%zWLe6L|=yH%F9mB0aBNI(EGcyxWzTX`$;9^*hCnx^& zsV7|B`EY}A=I-4}!u$U_UjH7x-*iy5Plx6=Dv1^s;3%U)w}SfdIXYMIRi*I5oOV+5 zQUg9qKT5vo>+jzOx!2a(`swV<6&(YE^ zg`x7)&3^Bx;4A&X&YDz#pr7Ki)ASj|BIK5LKVKlwsI}0DUdJZqr8d*rYNN%)$ET?& zCNIgWx^P||tZ|7xx4_P-RZ+>B9S!FkIyyS)#nPDFaGhWbHEfB0Qz>E9TN<=RO6`61 z+FZSm@-8<(YJuEdL=dc37EOq`quF zKR=eew(jtwWq&_&ITWxnnF>LXuq3Oqq^=(z52Syg@9Ow4;HTzI~y$sb zJXS0)3<1)SSn=8+DIXkne=hvtLI-p0r@ASRx(3U@kINRZS=A1yT}?oED;9|;uhMnw zcs^1_r>MphDcT0*eqJ)C0Hh- z`P7EGwH5px#z1H!{qY%7G>VMDn^t1XPYVhP(rD=0+HNbQ$Hb1E=jhheQsyn*pL?Em z+!;!XX`t*t@%V9PnEG=F0gu~Cn8ie)XsP7{y;d<|Ehk4&uFPr^8qI1NQhhwAm0Fsa zbzQ}6ou1W7u43c9#utLaTriCzw)A|y$=uNN7f*9_Kw)7>A1w(P4R6it{P4JV2y{PK zMNP7SJ~a=I#)az`Oi@mX{@K$%K!JY-$PIp(H}9$99h%I3xJpS%))y}ju_^XbU6<1} zbA9$!51qF7Jyot|8E0b$Mg}$YAcUXlnX=+tJ31z&h#?aMry64dB_-uf>(Mq~H+0c6 zvu$q=GcR?=v609dXhvoz;(oCIIsBtiBl8rbq7aoA&w>>BRHxsZY@1CdK24Lsw445R zZRBh&sL*Ea#Yml1g5G=_pF=Bo-6a#FRtYOfc9t+)yXr&d1YW+2=-{fC+nAX-Rgy0O zR6XMT)oXF(!j)5f>Q4*czwDK=Jv*}D$q-P>j5k+7WEvYAOTK)W_*_gZmUEw~iboy(pypd_F)0U!(qWo2-@>*X z_pRxtJRTR-mf{1(2gFw5eQ7dq8<+cI?;!`Hjm^zO^_iKO-s+~Fp|o-zU?CyBqm{VK zj~;z0E@oHolL`q5S;GQ8wA%_}>mKLX7Gsqv@i9-H?%3MFVJf}BE7Fe?Nrz<1glQQz z+$Z9+WnmWG2@gxQvxIzqbXO+Tdj;OBY_c0Q@*b)&Lla1qD)~~ae%FE7{yfn{;KZ^D>t1{X(-cMri5*4j< z20q(H1VWBLtJFSoerah{KUGCFPhDX&38QK1^*Ox4sPR@ssjg6kl+?YQ#-nXT@M|M# z0^AqcAQl+yVN#NERg6pJ&C?MboXZ=kp)$`hayzz5iO{i*@c#=3;MXv5s}6Mt=gv-X_UxLqkJf$x%J6ll{&7+o@@3D=S&O)uOjW zI-%3R5>n_o!rG8jFBNHDu;qJexwj;n$>D{TzvL%HsKU}w=XZ`su88&RoSerz?q}r? zKi{2+B9kB(Th;z8m`o{z{c$I!a_as{xx;;pH&xKX4>_3;`?-_El@T+A+J9j;uiD|i zf9>!-D1xU@Vq+Rbv>OP5*RRI88>j@2j|@i29P=qHNn1NQLQV6lsUj_KUHY@thbN|_JRi)}F(+kRr=_MQ)K__sk!P+J%uzlE z&u?`$)6l)tEQS6b?dO{}5^p?Sbp(J4a?=x%;AZU0Lv#zYS5Jp@XhF41V{$t^2=4&r z3nXT<2eb8Fw{0(f{@CdmF0kEg4UQ;xqJ=<2=N=jA4PSuB;22x}`6w&>^IUhH0w%o7bet3P! zVeEB_oAHU=iGBL1y5pt>Vj<{8^&A!*J$+###_iiigC8Fh+PC4Fj^)Mm z=W3~;pj?+}<2_iLWCT~&iUBUu?yIZ2L&n;uUE-Q$=jG$$FS((oL&JZZq7C04Kw=8h zo}&)k$#uID8deYl^)!ot+KhSp6|(pOiql5wN?GawA<%E9 zsN#=i&xfM!Q^~KRl=|^1KlAr#2?+H-d^Q!I4UUPSH)F`RUmxrW>uiAXg@2q+z@z;j zNujTGaXR6cTw3^HB_B5=!|>r^NN_M^-R2ZCviSN)CopQ2f@z{OZM&Bzkr^qE28|H$ z0?DN-OpcTAND|)H@|>2#iF_rJ)B41H9AC4vC(?sHRxjXZi|?jfDc<8#vYko=6}_pzE$tU))IAeUF@+yka%p#c*(K1eb_N(`;x` zO$S0>;BB@kjqJW$w|gowluFjX~$9WymlR}-JX%>gQWM&tBS+g(-ebr*+5Rm ztT@fxKFw$040+YPneL0t?|kkAo0Qz3ySU&8HOS)4k8|qEbFCbMI~rmBvR?lW%=yp$ zBe3AnUhxSg(*W`2D6QvQ>4 zEQ^TyNUhGK(H$@ zN27ywmyc1vSFbv^wX<6Yn(~MQs0bOtN2jM!_R=4VEhjWJO|@k1aill+V0R?F zLxbpOU8dGQ%gjE`YNRtekK={ohQDtlm4ZV-vYh%oScg*niV)cTybF#)Uc66e*AWUq`|`VW#ZNu_xc zl`2hJl`JC9o~0-)b*ITVCf;N)lU*MzVS{mSV@AsA#0_<)qF?CJ{x`Wuo8S>WJ+7{< z?hokO`4fu090kv3>kF`zR9g;}avw9v_+6^2x~b_J6pll#_(}8j_Wvw2{+RW?Nugqq zaZ?Zv=0^e+M)JiA@09fPj?6WSOgtQlfVo-fJR1YQprAraQdKn~N25k_Xulm=L(RgXKv1N{lap(=90Dl>mmx$0>}Kjw z0S6L72(x4vbw1r&XLx8?I|3cTXesyk{4l58rL`iuhv<*b^!M;U-~zN{qQh+bUjfX4 z$Dd+j8HfjSL+>-FwoXpD7r7kxqoO8wXHm ziW9cVUS%)&l~3-751yP)mvr$j5jX!hT^#gZ&6i}O*=>>yjg5H)f{IKK?OK|w0bnXf z$2)AH$3s)r@A?eNN0YvOm9ITtB<@qKb9bva1%+Z*hO2`2^U&T{p;M8x%R;#AP-s+? zx+Sp~u{Aaiy(r(+v6>-$ZK|E*W5IuOe1E%Ub@#~rJBriKO%F%9Xx|b~$;U`|Tj)GM zziM5wZen86QLLsL?zCB}1zWNo!t07-Ye~vfQGIrIgVLwm^cT&i3dOW8ssStYUm;fi z*~Lu%KtTYC(0nL|4vwi)=@7oy6)UAZ*W8=<^mi_+e&=5$B>a*#{SiXU z-zh1Pjdw%A4vMFzXDTPMF9&*hxbwK{^tsb+ia|@Dty2)$qI~WNKWIkRrs}4?8p9dU zupdwb2L~sQe&%u0jNvplezj!r#-&w*W%S`cw(}1ePS^<0w<)X~4>~A4!{%Bd&4pr~ zk8uvVv(tSIP!Rj;btSP9RrRs1C0DdBAXoRQ7Kx5m;~XVBA{eFOR3#-pxNc3l6uM9F z>JWz?8MLz@T378B#ZM2`;Vtc1u1DJ~AsG;6y6lV_G^nVkA@T7{W2F|sz&#@FU23HT zbvriF`1&gN==gYqY^U7$)vJuXpCd|Kn~janbtWo9+-Kjz2a2b7=_H^obC@F4$=5cL zK4APJB5u&r3qK16-<~q{Z1N}2?dLV(n+V4G!`l3F1Fu}Ue|%TD;2SU_8G=z(-bFWV z-fU;+OOq(un80kYxQ@iBbO-_3uHt$8&Ca-NK5cdNN93Kcw0*|i{Y?y-3T?7)-@bhw zrv>A>^MDE%4Kvop3q-i~z)K4k0MNMO$88_LpJJ;KO2gXFT|y5#si>&%7P%f8idpdC zX1NbNy|~9OBIpi?Aj0=7|6K&h5j+BiZ7EGU7_f*Fif`Y&GpdaEtufe;YLIlZdX=fd zwDH#JC)3OH1osdTAFKzn;Sc2P+~Fto=-Aj?cdGQxokemuZ!kfXH%0p+0Zw;Ie9N(s zgn`madS!CDL60P=!e6xgH6s4+TQ%K=GUjoiaj-sNu3j$`?<~pdxUBEXhf8?c`mpz_ zo`^^vNK?f6;iMT_WqC;C-0;;Wn>*TX=Dj7!EiClR{t+8&6?)otlvi%DFzSsnhMvg0 z|KeOwym>QpD@UiO`D+9+-T{Te(MXC3fKErNmQ^^Q*aqPmpa$@>x>skUkgI4^+OL}t z3bL{F;q3N*YsMs|rloCRJ!jIKjXZjPL+U&>zb;~T2@$(O=Yy>^yBL=n%Ow>NE(QcU zYHT7y!L>cw!%uyN_E)pzp7fyfiQoUu`uM*|V6Ov(fu4~uk5`JkfB@J717e3P_qhPB z_S48EF#z&7@Gi|g@NyzT%{fvl=H1T!Pe+ecAv6{|4pXjHRjY*Mu zv~w_CVN1=SGSpmI_-f|aY8IgSD?iJ>cM%tp_yu2OrjF$1T(5Bp{5PMzzxrEHET;qz zwk)aWE1u_1*1%cR=qcf=?ez%3(5)@&?freY(XGf+5+R4qo-!NPYRW}F&)Hi{4q|o^WSgemi9H5HVUeTRQzU7K|A+pZ_obNc0p33*0nTVT#pXlZIJELM z$jU>^igNYpD98VXE=rFG3I4}n`1eQUkKg?!f?8Nugvl1<`;ooTjB2ZKE=cP2>(`m- z-3SChcqbJzbACVPiZG~+{J@+X4deb<`P<>ARfoN+dqmH6)8L@+iX%!R{?vr|7q@r76htr6V{P7X~0V>Rdzm?tWZln91 zpvlrm%*YUzTssq2>FS%dXTrFLk6$?)YWbJWPasBrQRu6=wP?0L2gg+h0HIj-n+O)X zzrUMsj*l1Q*`SoGd)s+6MBc>RI?P-6N^}EfxWq0U)Zs6^lqm1s8UpQQb`x>QNl#i9 zg2w-P`VC;4pn+rp^ba092+6Ano>q1!?MJ`$soBN`cc{Fev86={2m-0#YBg^1x%f}% zW1vyDp|;Ibcr1U;Rlw(43-y$~tGQ-qK| z(H!7BaZ}vT42mn7UahY8mF0gdKQKnsyhT3=mU>H6En;0!Aglc_(n@%N( zc;SA~?!o=g>9i}eN|~xthuUUWur%2;^tcOVc*HllA3 z07xzq`YRviSK)AEW9al5EXX>wdR%K3s02c6xjwTd4EQMxw2(&$#O( z684!FjP>K zEtk-SnlCR@Hm1qgN5RAxk{CvGQ4;;+yMH0`O@|4$1d74^plgU+~?lVPO1_MfJwr$5}$%5-a}ufH>=!k*L$7_vJCqkKV0!Ds-wgJyqR zgzWVatI<`Z$OfV}0?vAscc71GTb{8RKE$+LwJ~nh9 zU$I}f!Lt9t3)xiiH3KB)cfzv1LnhD;TZLhECkQd09&U0ymeOefqP)1FVM+$4iEKhd zT35`2(Z7^Wa5AEV5e#`aLPA0lQnkIkz0~K8=zg&N4E=ZJHEQ{q5YXxsnjpyf4u1HM z$3RgI_Sg{0uh-g%zCl@ib8>PpFg@m5?}~W1EU~cfd;`Vo{BLW=#&+e}<>P?0Q`3Hd z?F_YnIqVyF@BKOr-VfYe@jCOo;JQht^kR#V!hVj_l$2Jq%^KR( zq1?U_1wWy*S3=UYf+m50d4)RWArbrFbw} z3v&V!gp9xoyVJ;xU_nlr0_^O{&Uw1^(eT%Dah9;K zz-}rOr3ALBI`Olk#N?oP+@p?Z(Lxk5FbpKVB(2}u3Nu!myLa`t&o^|c?y#7k_+XK; zvIq4*3l{>sT*TkIv=VPx3|<8yr!!G%<9Tfs0{~p=SG~6XWp+)Y=uQXrojYL~MJ63~ zO6d{N($C=*qs5uhP8&QGfCz8k26F$-_i88a*P_?yM?SR-3kkt)?eE#UN7XM%8X4dIYSeYWuZizkTJ6DSQvgsw>kD7knJ zP$HvO>Mvd%_ojM(2_WKNtuAiw$%wfw0UIUkE;l)}1Zoy*BPn`DWbk>hX=suu@SNUZ zu>x4c2HUBlrjz%o+{&+uG)s73YFqo=y9!_T(#`uFBj7V?zxpX?_)~s+AD-V*frpEmqSxSES&>Cx z%vcnPBRZ53_QCnPI-qPfG7=IJfImZlU^flp5C`Ph2}I6S-PN}u)bUlTM1ir85xz(A zYja;9#9a9I;nufeDCNyjOE1rg*2b!azFv6n?}yoM^fuu*Qa?Ee7P}*Kk?9U36UP)6 zuTi8>_+rx5U+-fBAOrMESa3*i6^SFeV*bJ=dlKRK`ucuS_##H!eYwsx)e4L_l6rbc zGwK?i{bBc<9Ci!~+@w_@1IR)88an1{Q8kgVHEfP~DGA&&{hu+4Bh$Njp!~*Kpg{=+ z=F_pSL;V##jT^Uj%6+OA@l7bNDqv*P)0%|>?PNkzbl`rH`6wkGDXzzHYS4gdB{_yGup zQ8l3z*vSl9A5kDFA5AoHjnviEvG=g9X5Tp-ny`KcE`AR54;~2dxu2`_6Ake9m2@7v zj}oIWnfE@Ac@-xo>hQ`%M_XIL=OTXxk4E#uNP47X`&r0ivN(SOGFAxRHEt^`l%Mhf0MsJ( zuhD@S&pq`Td^*|}Iu^j#%Swd-yLYrDCYT97aNl$9kj#1SugV}6qZ9Os$=2}KD{6q} zYxUFETDHm40nOYFaPi-;U*NE{F5UyWCU9snp}KB$u}mAyVc?yTnmR%j0(42xB{O~# zXcU--9kQ=pW$W3JMoQm{aD%~HV9 zN;*4N0%Jj&{W!D^rd8G+0$2#7ZGr5vMJUB)uDP^JJ1@JDk^Y(&Ou_2e*7kM*CZ7s& zO4tIReWl%iTtE?549_l6Ya#xvvvd%zyQ8Bh&w1@aa6LW_Q1q4;cqOMTpXKLa>j9Wz zic0X7Tn2x9rs6!wSFawSi`r8uUzPrT@iovMZUKk07`cabv*1zUz5DllLdZ?c!uT%??A!P>S?;v=TStZD6NiJxKCN z0n?al8$MAc$8&uQ4CKre;$x5sBo$(!Encs(s^fqi45mXzIbTi-yZBVoHzT)Kg^S() z%I#H0Ztq&E!R)R6YGDwxdbspY-!et}%J_p>mLM2f9@+t`AN2IF&S>GoePBP*$BI05 zTJD8tRKE&d_Q?VRWkMrzHN!`1Tf0fHNqMP*0!bMFT1-7)BbP^)y>P!g-`P0b>;NIY zu)RIEP<>#a>tvLD@NOnb;mdqqlfR4vMZ5WC>oz$T40o*TUd9bPpz=7RxJ1LAtqg{H zX;Ov#ja_0af*Fw)NA{=MxVjo8q`hcnC_SdEctP`B;xA;#Yd1dQ%QG)JE4WaOfH7r^ zD_$=LQ-RRIYt3k980Y4NYsbvj_KWj(Y9*j3HG-;HYexLoHhK z$6z4mYWZ}GtBq`vJ5OKp3Gs_Ula!EPfe1uzHR54G@K_F&mQJQ?4m$@hJT^v(TAsW} z?~Y?w7i(&5ZIvYMc=Cf41$ZW`xMQNCUims1P*6~OBaUo}`Iis&@7Wvh-F%!QrF`hl z<`6XW$-0qF>iux>x#1sQ3W0Z%jyYPq$hd3XXuYECTVi6bw_WV(;)9)XfP$pJYhF6y zLLat0Cu(kP*BX^t#Rb#n9Qm!xeosC!hiv9oo=*O29*9_0wimx$bHoR9`k)Lmz=g^* z7l2Y%I;$kM{7Ry`JD`S?rKBdRzZC8v-7CGoX>STx0YJ^QLDzjsRdn6yK-M1^@7&95 zj=NaN-Z_c2TA4Lc>5k(w;Te~f(q_?byk+_$D_pz!wZg&qL2PwCo7Qp%vL z^aO6o+d}94PnH%(;rW;gUXjocWk$UK`V|>0kV)9rV=wJCGZaEVGh0FLesNX+`~DK` ziXQHY!}I$dW!&^UAmwcxFPn-$+O03oH~0$NWP5|@`tx;lH);=|5BLpzk6e1}Ucvya zcX+W&jx7HpKtg;dRWg>Qb-2NLJYoW&r*Ge|vBE}n%uc5Q-6vNag>khv9?@&HL*CJp z@}?+N4jcBzXph2szNoAY$|6#M574$F;qoyluLJx3`ON9&LW@jn3h0N@PJR@#N2fth z3K(G}69uCA_N+MF3W??r!A*|^J)j-tH$OFym6J9W!6PFpiRkXCT5V+f0of?lKj`WG>+ zkn{{e`qc|Cl8v4=N!&PXpI!Pi{&S`fAD>oy9IL3Ut?ivv)+sLU>ri}f833;&GufTG zROV4giW_+zn3~S4#nExqA^cTH@Jme>(#Xso6X=G0{CEe+f#hh{YIlF;QQDx=*Vi{J zKjH%@Dh}SQi1JBjAw3PC%iuTF=)C_IB0)G2{}#K&uzd3?1WRD$ zrl11z0BF?LYl0(eO6={S{b27T(B!OlMt|subJ5U zi*ugLw$7#XyObE)m8}Rr(hyDNG!n&*GC(V@t-aRYri_1PLL<`)!7JeU7O3mN@xFzB zOE|Obc@FVZiqWd>&WMSvG-Tzd&USIn6e(?G7%bcmkD7O1*ER|lbUPWAdBl3~L#$(k zMj*eu-ui5Kd(JzG!SnHYi`W?M-3O+`S-~fGK|)Lig$J>;p$Q4h^puiNXgrVMAsa}Y zWMq7U8!T_HJU_o=t;*lMamFg}jNlf5t@GJi`_QS3ktolGEi!6Gm74Ko62$wu_`0;- zwlcrcbUy73_%@pc|N1bTEkn=YVRh4iN_eysxKqGoG+$=fpDINFt|6zxGy2^;JUsq+ zuP#=ZajxcJs%S?9?w504HPr$~e&Csr=w%+3qAK=;CszygS1TI-emP1szm=)x$2UEEBb<60Fw>EIi``(~iXVIeEqM6- zpJVw68Eb2U3*wR=;3fU%SXkAV-=K>AfXYq{G!;z5b-jU#8l4?}qJfZ24v>^mRm^0A zyMLYjgcA)064^FPfA4loL;DN#*_9IO|2)_m*>AdPUx0Q4rQ6ynw_&v(eiAvjIA@rD z^!~lA%i3^7Uk^>G!b6dqoHvp*y-zq*1IhV}O&tI`)Zwnd$HKy5M9N88B%_-AHh@I@ zQS#_Orji)&$_!YhM*KY4hp(!tjzeSswd^00KFKU1kRi73-? zWn2TdU6N*Q?&0@Qfs8eg5}6Viv%VKAV;V}Px8?mbEu=*8%I*zIoXuJ-@s}>82L8`_ z|Ns0`c@fCkoWrzvc1^ZUv%I`8UG2w$8o?az%a5RT>U-q4UUd93R z@vHdIPy=5cJYZ;NV^>3$d-U%Q{$Kd+8`|4P&O7tb|Btn|4ybxfyFibmN{J#UDJ`vZ zgVK$3gQRqKgD54j>DnM6-Q6f9(p{U7ZloLTyU&@q_xolzGiT<0e}cM&-|u~&SkGGP z;X$}tlj=j>^;~J_{xl>t)tZ2|+KMSbSQ{1cij%q1PEH3AgWGBA9p@&fq0wKeBBlb? zDB{IQ+{+BnE&wMRhOk#Pz9%RO(;f7B`}XYz(ybZ9528j$@KTM_Y?t?g=&fP)wyNdd ze~$ys=OWN0{PMx4`aZbW1hKJz{X(ju^Nf#Ad%SIYT=g5s+App14jUHkm&$yA#Kuw; zO-$Oub-Fk>INyOxaVY~!AbM}Hqbt;Oc#EwtGQLrm1PquSCw2oi3;NY|O6Ne}|IY*R zKf>~S2RxK6hO~}6_@&|t2o0@6vNaI-wl8un!tZl3$!0ZP3kkss&=1RCfe-9~J$u?i zb2#)gPk;=sKMUI120)$sU+d{YfKg@j8_W+k8stDI!q$LE27@woBBvFq4 zS`E%>3e-de`dGs$y5jFyQgH>X3dK5n>#*8##+AYtfUrpc9V2?TCYV9&pPuePQc^5A zjhRXS2g}uPORcN7myNJ#c}x@DEj=h+Z>dB?dGZCUVncrF*w44$eZ2j_;20sv`&?q@ ze0pw4+$`_suU>L7{QnTK{+I7r?h%Mp+1f8xLVV)xg3*&IWAmlmjG2;t;DuOtY(Fm3 zSbJ|wbTkfJ5wZ_79xD;c_C1JLfyLI_r@Dy%$L2jhT}_kK*qF9PJ^r7$p6FR^J%yS~qt=XX=;1FagEK z{QM*iOyiy`%o47R|-= zfu4W$YW~~5*J6O|!RNFh4QL>|I%%fEU-m(92WOF~Ntv1D73fq-@@qDz!BuN#Mi)-? zUBY!vn^a(Or?;j{fZ)9`Ds_D|We#S@IvaM%%%2Rz!he8eOj!+vWW9-OQE}NaQ2?<~ z=(PWcI5;@SY<_HE3O0Y#4%<>Bh__CtxtfCuY%$va+k5wa_zL|%<-_}v_N5q7gWCK` zB>8fCoGv2^&4b8c>*Au;>8PjFeS0TiqXUxM`zTk(KA&Yy^douCF^fKO^3@r%Ce-4 zmiL$8?gZUuQqw?{ANiA9^gBcKEbXgTud>)!+APO2AocIf#c)+G5-EX^SS(w< zYBv}Vz)|I8S#k;rVMwL>J3z!?gNV0{_4B@U-wQOm+(}&hO8tKof?b+?!dIDmAG;*B zJX-AdbhvkiX#SK zOAO2E2g>BeemDq~|ju3W?JaSShI-7Callh+;%NZgTv?Mb0#KQ(a8?^)A(7ma*7 zDmVPMckXZP2nR0ExNtOaN@KuLi8L|<-ez@dzVlmjBQekCb%P9E;(!M#PlLdqQSqSi zjn*SL_>ZEok6u~1JMgvg6>mwLWa5xkz4eIbe*!l>_{HxF0`1<3^Uj`$>bXlQA>Hdg z>6aeld_w5E`s8Mbqu=}AoO__mgG;uvq+-7xr+UW%X>P5}Pe0Jc0$OEg+dHP2jlZ#? zW*P|Bqf+CtH|@7ueBm97cIc0r`(VJ)m7VI5z9>%2DxO|xEONnhdRi!XnVB;TL2ZXU(>nLD?hciFV=lXKf`9l-}v zi;+aB-q-&T_WwtQ{e|ueoaldmiaoQYMvdbqz>+1Tq*&mJD~C?P?Q*?q5AA05?Wst4 zVe0*x$1(GIeM#2W-h$Kh5mDp@JI%Uy_E~n!nAe2fMp1HryeiT@L*>+dg|5O3n|Vh|qiKsBan zhB}85h*ps#C6mqN%z^4q+h;fFPY8<-bkYB1t?{S#!WlcB$0SThu~hc?!n;%;(fQc% zu7q{|IHDcCqh{5PmQ~}I3d6t2J~7unk_6U8f<0{U@_CFf`r@@7Eg4< zM}G#+0$(X9IQc0HFV^UGQu@CvHs(#?WM06N{kskVMriY;pF`K%a;L)t0}ZgPCBvb7 zj^*Og(j%j}w;j>nzgNR3_!@hBdoAc+{%3!Zgkqipsg(H>OFg?XQez^UtZ+x~V{=hB z=4{wepw4D**BAUJA3K6@XHKI;H1@x0J&B*$>@A42(i`aOqrcqrq4O-bC=dUW{|K=2 zIpD@c1jV%Ge;SPb)hj~s6MCJj6GZ2k%zuyQ&kv=I!5wc7D1qW_qqIN&`2RP2pSn)7 zpDl(9P%Qs?D%`Fn_nh$%Z! z0Gc;KS9PW}Xk1RY0N_!2-vKE9AwaeaOg*CPP5LF@yorPdN;Q`nxD0cu(wusRu^G0c zH5*^m*>AR3sMe(AHP@^QCn->~fc=YB+HkSA!{(R_+&u_Xo!_f8v&K2BCZ2I|amCt# zO$TIq_wL2kHKtr$9CrZ{T13WFNce> z+S}XV3PB^VJP(rRxymdDjfT&G%iDxavu5x%X5YV~~$;)3rPnixOMJM3b+G)73Qv8NG zjcMJVLMK+y_B1O-1PyNkGr&v&HN66fvhO%o{B`6HtzxT}Dlu9Nr5&gw9?wGfuatnA zzc6ggc^FJ8;5u6%N5lbA(scnnA3zK*5ax==c}=x0 zeW2sMzJQw#F@rI@cG8c6t>K+;iAU5MACfsu6>{&1z-K?@#yt_9?k5}P@pO~w>R+^D zEDWwYNs=C#!C1_qwY*y~R;ZNsbDI#Y8?NiN23Jgq4b8f2My@g*uJ(*P{0RfD)%)Yk zSH00K?KxWNz-E_8YgVslg;Taif-YfZEWaOL(K`^3dINtT{{_&VD69Ai^OTnso3NTw`rc`n< znqoDb%!?7af_;ADSa_0T-4vOSCDDcfZig7?q4-5S(*K4yqe4!JnMIKlACu_7d=jgCn=g|^;# z8$x}(hqbr2XIoR8fgu2hEpSJvF!s0*jfZjJ{Fm}g7sr|jMU+eMdulaDwp}-c>liLQ z2nmgjvFLz_X*c)B(MQfzwz04n_ktSr!2xNji6hK?j9MGmgncpBC!({|rF_ff@+*z^ zW+4Pl7uH~jTTph~?uoLk-c54V@wla`ygR~pXKt!eOXa^(+!o#M8Gz{gjY}}L+SSt& z{X3O_U4j+?0ihs?t72D@k|z0Kl22fA`gWnCV* z>!z4{ov&^S6ycA4ozy&Kc^i)Ro!8yTNLoCQH03zU$HyaFmk{mW=7N9x#0Dk3URB+$ z4ds0Fd^9w)sA;Qa?Wt;OCQ!OZfU(CBdenH5LDM%k`3 zS7#JLuTe8#C%p~hF^h)VfdrkMT}XL&Gz@#vBXZU#DsNWudxE6UGXkjqCNP>GDM2TO z0`#ST?49-2qTv01k~zJ!#)`D*bPLo-^NJnM06V`{tX#p;vYW?#^~at;r$y^f zse#y2c8{8h&4E{!mzRcsF_k{nxc-p@-O`*k4U%YReiH2{QM1Kd3wQT}<24|Lw|91m z$wGN)Zmy0s&rUPKmw+$%Snskw7Jme(&_}qr6XT8Ry!NHzi_pywQKt~Zj z!6lYpT0Ie>$_)Wn)<^&V+F*?>eO4?YZq3t<^|2ZhEvM}GiPawcFpjW?#m%C9C!L{adQetn2FxRN7% zp>6*OSgyzzzw0spVb_qWs3@dPhZZ|L!{|GZC(e<$i7Szj*sqTQ7RL2>b7GUZb@Ndy zm&Lg>!2aO0tpaVcifv99+vn>I7LkUu`A(zshQi0!SCB%=_I393h>mJWb*Z8sPHvg$3D zEtm>b*?a|iw;9LUQPByXhYOr#0*+j*FOX-$3MrSKSekg)CvBYbIv*Fn-DaHY0NsrM z5~RivI0z{UEw}HVq@pU_S?swE7r1^WIq-IdnwC~lXA}~kM*XOizvx(*ECNr*O#)A4 ze0h$=WJBLb8O~6=aoM2J1`+nyyZ%5{Djj-by35~^`FyA|ygBK&t_0*)688xql)?8U znM25~cX{Q|saFbC6A+sb5OLqcbt@5tE)e|7@hpwhyu)t#>mbpBx$qQ@~_42xv zy3J&D-zz-U2x)+pQVUFly1?oUubV_R#ku}5w1R5hcM1Dhn)p79&h5`Sl|SvAp7{vY z-u1YdafWV&HJs|t`QYsMWv6)65l&Q?iXX@JCK>%$>LPUwa(Ga@SLd%alJiCg1)cE8 zW07&I8Qx{5?WxBQldpKjX1}33pHBy3@6h+}!5Lhly@}9E7h{W|)wUF`>oXJQ*wvbT zY%%f8vBE3>8`^CQe70L(j_ZDNb?*A(F}1-K8k0-jjLqVwxBdp4XeWS>S$DGbv*)2Z z3hwo(fjD%|7ul_LWy<_YHed;ZXE8LwfCu2F{?x?J*Quqd7GYC(C)7AHJz4SsISg#CBeUVaKD$yf=}-i zXxFO%&eLm+U9iaDnVOKeaXca#q0MKM2Q^n?nnIHFnpN~moNq7}xMn#(3;`5Ka`+BL z3%Kd2WDKo{$Y71Ffm|&N28#e@Zg@Y8RprAn!uU17f;DV~zTGsl(L81?|KQNQducya zo9OrP+02$qj~B!F??2IYYJY6katGC=Pg698R(UB)RcDV+6(WuHWDk&Z-?I)bwIF8g zMv8q{65)R0Hf738$HSBP9sHPCmi11gt~>s%K>Q;#U14kD9;QiR7f9WIrKRRu=5@@Q zGLiaLfCqDTmJhc6&Eulh_IT0+*k|y{$D8m-f!YgfAq5|5{~+8wZfmak9U;wavf~IX z1SA}Zq6-o08$ATT0g3|EN@?Ii=F)=_JVvMH;NV1^#0)?H3|V(8jb-X>8oCX@`SWgM z|K8;@Mto0Z?55^V|EVa_*nA?0m9OIgvGra7E=Z4dLd)_t6@a@S0{7b z$&=-L%7ADgw7=4yN@8b$+IVrew-nI*gw;^I76qr&hZI-XU@u*SR6F@})}7gP-*A02 zKkl%i&_vF3pyaK1LK&x8vsdHROgVMtdMhRm{ZdU1Vsi^ASaJP4FVXNEDPj-oK_0n- zk8V8PmlBYDg%BKlRIch`JKk4{h*D$sjf8lxx|llcI}rG^ss`Fn5uY#u%Sr50&VXBf z)rd^^BOD?StEPjtFbUkyr+ObJ6lm2lKWLvzpSGT=dBe>5#WG*g_K%x|IgGb^2D zt`R5dx@pCNT8R+Ao~aG`1+o6_j$@LFvc#$pixDS{$i+B zDlQ(^{D761s+H+i=aQU2jN|55KrXg+)y!hL!WS>^bQxu~2O%tJfxYgZC}TRx@pjxw zFSl~C4{T?A1A__VDTq&s$UNr0%~U@*h*~+(3|&m;{AcJg!U6v3>oi3_(m`5eJtOzd zqG^~-hbYeWZm#>C-{H2y5MEE0@0_q#p5P~Pt{Yo>-T#mnK3&f@p z<#JXca?c|rQ-Ke;TD#q`29Nw!4&P5=~0j0 z=s{5uYqn!GzoBFWvaS>Yl?4mFya5b&3TJ^qU1zGz>&%5Lw?3UG9$;?nTd33MX1C+I z+LfD~qLMc*KGM;Y!XifOF@LA@?oBBb8Z+~Dk~M;661(LR)O|NbjjUk$sI0tf>FdQV z)BX?x+cz)UA9#~5kjc>6o-_*B87#Y0rH5t^-m7K(=Og$hi0PaA7U5-DwRrhjwPo2@ z?dm0Yul!MC)4_dtbEsj3s{$p2MSQva>J>`WH|Ya0Zg-At+I zjz~fAxQ=&Y#lTr)KmS4K))4Ylu;g^Lb(|dQixA7IKW6+=gtM!UJzOOIo~(ojsfNHZfGim&#cpD-`*&E7N|LU{KF zgx;V&%FVv>3Yft%ajbuH63GJWXt_wfm$UVUhx@v%%xrARA5v^TG#t#j+l{h~i+9SV zP*75~v#>XZVhNlIGS;r8=XhRCJ@28*5UIELS+_54bhZ^h&_B!P{w-y{cW&8D#{eTx zapK+6r#!pbB4cLZs7>kwF0ZVhV6}QRaTVO&3{HHUH8BQ{d29 zF|_T2h#3J^YoqQ=gQd#kn(WrcN|kyqRCR3wv^=*f;e{vkU6 z8tCrMFTP$kR+^8AgZV$-StK(+9|-KG{Dj-4M8y$}93J%VX_SPlrh1M!Q<$2X4y7A) zw_LX2a&ruAp1IvH&CKhm=9*4->FP z8K<;`#2Mz6AL?~(%C%L}`npeq+vRLjH7g5+DJYuWr;GR#k`+_9@n&}{keNw@Z3aL` zz31m{Vr_v6<9I)hEQ{%jEI>8d{9M$cFMxOF2coh`nd_xTu0k`h44SwKR=~<s%bQ~zDQZgbx!oV@Ut^yFR5$N7YMg>R=0Jd9#h?Z?6d=WYPTVn`kNRj@;Ab^#aZR0UCk$BHj}f;IIS@hxA}{tNh3ep zW%^~rt%Wbgd}miPjDoM7yxPw=jWSn&9z*4zG`tSSGkG%eiWAh5WCs! z@|~Pwz6DO~`*|q$f^ekUXs53D)}s}d08s@ue^C~<#Asy3Ii&F61Daah4{DRI{82v^ zJ~Tfo!LUW}UBbSiFZ3BL(Wi!#_Y6TIrHn2?hHDTON(VR9@F;`iH{2R#OzffGg*tYOVrh ztxW2jdgq-Avt>zFv%`1JPT%*yj$xs&?KF4^(4Z$;uLc z$8z%_hEEg}Q=i>`1e#>0?XnvVVr@5NycN4_S$?$S+V1F1+lCIFlWA+dRvxDopB6G3 z=p+88V4MSmHF#dU-gi4(BLxwZ)OEMMVw*NZ4BuCRsXi_E@a!XkTP)WcmS za#pqPN8?)LRq>i{)_(!+(k`CP8jbNQUP;KtCQK$tYEv5Q@`QlZHr(Z8JKj7dvU<`& zj$i8xKu7)oi-786&RMsOJIj-q7=%rJZ+2I^8b#=(ZQX-(9yZIVk(9@yw!AxVydm9> zzHWe3dtiuXuXfEX`Zmb?IU*@}T32**sxOXbYd0n$o-V|!EaT%Q2$d!A&Y2gciP#8ptmViZdI}SfZ z@FZ>w!fZ{%9D!8)(sN0iYd>xzQCK{S%L?nYCNYEX4jUMbK~9J5fub zgED{Z2F+v9zhHaXp#vo1V9bHOz5%ZP@^WITbAI*(pViEyUgIc$t@M2R@r*B?Si0G6O?*pt zb#(Dedh2{&F>j^ZqAhu1`7pM}JA=uwS4cOh37 zxFJUG%Ov*P8D8IT-O__AJ@-Al&xVhph>dK#k%mHb(E{q3T$8(?mU$RLrVEP(&W|R^ zu3tjEF83m+zPuaR$!9&UL%w?6z2sGEIIaPnlDZCak!ADoMm}`Y5%?Tj=M8R4B^EUu zYa{@P$JSIWOJ**O;+OBp-Z$=VXXA7|NT;iBn7;G@ucZ42{ygOlM!p!B2LLPTnr*y6 zTO+bO1H4M4B)Y*>LZW^-fh!89nJoM#Mx#Gs^sjQYv%^?ThlJ@D+)|r7vp}=ZUI)B> zliG^jV()EJjsXDPaC)q;OiooHM6_U7nA2Ij3oU6_@EkTT4iu z^0}W?N>f`=-dxrP3@WrR8{E0#RAB`8cC17y?s{KVC}@>toskL_G0HVeY_A z7tjQ4Jlzk!Iq|kzkf3(mPkG9&`U_O|LCE7a)M!>eWx*1>WBM}V0}6M*!DVpd&)59{ zjFkYXN@43d#)MlGc#1D1e!Ku>#CMi{e!3XPYrUGxIl5A=${i3gS)5gRI^%Sl60~e) zT?1ap7TiJe*aMwIcf?{?$OP2W#DX!CB`Yhd%QQQ6r-%07$^|qnD0V4*j$Zo-CKRx% zlCyHK^D-j)JkY*BCXw5uqJ{>bK?%*)?T(J4?wyW3q_^~Bf?W3>$Y&Dn)jB94=)TL% z#C)#LWK93!J&(4{&wz9$gqg{gM5!M8UHc31FMYXjE$e~L)6g3ZG@!ojB70Y_=-SL` zcamMx$-_3#sjBO>4ouBcZ??MVWIrHhKY6MD9=`jNbW~s%Pqoj7tbe_`Xqi7B3yH4r zXeH_EOp6I9Lzk=PywJMriBDsgOa-w+uCH@MM{j^4H6@M~R;^44Nchin!JHR(j{Oew zhBfu%0ynF-3F16}HxLmA0QgzAMU2wtmp_L9abr2VJ!BbE8s8a|+Bq#A%zm4e1wH zoMDgbs&$*ahP3fKKpU#^_Qs-bN`#wapE;$NZ7Y-oeoLzaM|-(ff8avwvLo?y3FCK{ z`{be*T|!U5qWB|pXAtv{5j+q1_Qm_Ty46bc17Nn(mcF&FUdWd#*cuTb&U3C}^lB0> zCco*BJ7QNPRWcCdvrPKwI>Wr`4L84;ET>rb7Ue1D2lsBHQzbFEboX+EF#KAn{w<-G z&qHMusW15@xf@%}LVGpw+FsR6FP571U9WFZ3pcy-G0C_tiOX2ZQ6bE< z^#fPA{%m+>qtwc-W>4De{Yh?T`$&!?!dudY@BTwDlW^^_pLvHoJL-26TxCNRoAlDt z?Yk8EW(KksXS(&=L+5I-7DXa})R^ds{GmPaCr>C|Y%2W^VD zcg@XL)a3`Mc%S>ubqRGWP?T!{1r8|uy@ZCa1 zfC$r?tZvoToB%^;VB;9RhV0Un}>@gu!xKl6ejKH+2Z-MlJt4=VEXOd7+Q+g9Y`#}$vz|ac9?0&zu zqmTq| zoBJfe$949(WUo^g91cHth*wO>^T$xjToq)G4tLZf7?z;OY%w1XSddDc> zvFGem8qV-NpfObabTORNazZLo4*;e2VsqBtVjS-6iIF_cci$W@jtc?XCsx6_1Y8D< z)2h8j6vSE?RjiWvXYDe_3hQhb3$`&$Etqjt8?^VUxIgInB-=RBd&4%1#kjY*IF1AG z47cK6LNDJjTNq(#A}=fh-v^CKvon>TaZV2=XnewP6->pF^eJ@>-4owGw0r)9kH%iUl3*qz5mWRU~?DL8tjwRjayX+|u93b`Ix?n{lVf zDD!necO+O6saKnvOjybV#m)ShfXTJ=pmw=xdI?&;i;IS^D9t`TYTcAy4(zs4S@hpu z0^FBb+24*c3R%o{m8+lUB%J&F9lSs_Q3*P>(`}bJYuV zW{re1*NT4UO1F&%{=c%iuEk(#AwO%@EEyj*|460*NcL>&+$Ob*3q@oqbS zk{<+#Wq4@VXzn`BV=xwL?9=kWFxdZ$L8SmkWDc*vWJp?Lvs|zLIvoGSGLSFgI7i@W zw$Fb?C5{-B<6Ew+XqMnseP3 z^Qhn_)~vT~pCUEgW*pS}Sba{940ru{%d`qm|I7FP#@bK!lhVH|q>J zLXAwR>YXz{0cSPZw7f?4u9QMEqIV}1xsaw9r{AQXtHa&VYl{dUW?bKwL{B@Z??c|l zBiA=t$f^i$+q~u^=PygVRGnke_?2&C*!>VpbGV_^Kq1PMKvHTUQ-$Sf<{OSolz25} zPcyK&2$oTyS=g)+IiEI(Z=}6?Li>6u)4(CSYI~}-T++j%q+crSC~P%LB8o~siPAl2 z8T0v&ihuIq&dhW*NonkfWgE49R+Ec|Jfd&SaB2mU^LqDBP;H~hjgG8c92a15?QlLl zxIBQS;loRzRF6yBd5^c0Go@)n0>J!?MGA93k4(;75W{&O5ftpfmMPrMu`K<(ilQ4K zQmbfGXj;ri<^4-apQLwXojw7QWL-I;uW(V_N@4`wN8GEMc!ThmxQ?)xmrM=wxsPZT zL2NrJSQ*gqFM{PUwE};B6}8H|BAmD5>)t&t_33PQ3tlM$GgP#GOYUZ&cKu=zFB+i7xt2t>xmME$X<-@Sn~jPOg=o zYLr}5sg0(Q3N1w|%HHdtL0kEFai}SnrxGucQ@wzDFy@A<_ zpm63EP|b|471#ZWV~AE!SqBLmm6y8dqDWf4A5e|N8lr2%20%$gJ&2Ii1~J3gV+RjKTGP83XH`t74EaS?IN$c6yb-+rULkPv>Q zRBd&Ho&gxl&<2R{$dReN(EGSN%0?a25g^I{6P&v@eIs9F2wWxoJ>Q4GtOtKGHLQ{4 z7A{uKBcQ(3v=Ms3g-RxV>rVSb_ z^J(^`%A6+|b~r;#05W+=FRrKtu;j9sY)}WiG}Tw+Sc}LkcFXBtWamJ3^)qXS9c6l& zSnjLylki&4O)2$H9GrbZm|Vid!(5JlYF=ix{G-MAV6LYuhH=LT&4bNs+S-H7 z#vMRLhvWNYimS|w>Se;eug%Sut#x%(kSVkXgK`0yi3I&!30??t*AsRR7rpYcs)N!98Gr_aeL=*l4%@95WeRb}+fx0-@}|l9>viW)Q`}n$;WQ)wUeeqlR$in|^VUnwc_l)%{#Y(-PFZSIl7QXnkX4E9hk%Vc zZLD3#5!l@RGVdA;nh^)PoB{1<@5eEuCw)LQx0Hg&lSCZ>&4a%dVGK zpr2wC*k1@~MR{b4%fEd%Z1fGZ8F9&9J=|S+jX-9#|7w_z3Q84%r!Q>MSm?Hz8ePCY zUsd&newR+g>+>df`AvJq-h%0>!}JAY`+TTAhxHg)^92PfoTh~BAA}?YXiC&2+}{ee zw)9l=i~``(7R%{MG_W|8Fn~z>GNpH_ee+>+(&2+!e)Yc#jQ;njEB5J&fo0nJ6^*eJ zyYy2z$pXGqdzFABZTFr=8uIkzN^4A&^=O`Qmf~ukZ(+qWOrk(a@cu zVVJofpI|x_G1aZE*;XI%&^&8Nr~{%3U_0qx955p{zNeEz_Xg&*Sx9OGFhE?p%Z_Pl zR24L}O0$>=m26p}TrlqyPq0D#M8I*J{G+$`Xo*3iMxNdL#Um5$jkOH8V{sq-oTvL| zmOsc)xC;e(U-`WTW3ZCDRPff^mm>QL0xyK*cw~~nbomz`IiVp2w;2#(^TfMQi>>vK zLvG}y8f*v2|6-HoD-h_@=k2PzN{lh^6MBI9l)3fyj3%z*`*`Z;08)K%c#Bnh&M87J zn2&~*q~BeWD#PH8eOtFJ{pICouIBjg44BHe>_c8I{)SQTVq?~Q`TZj~>5iR?V=h1p zzC@4*l%mdR4+-Y+0`)7)K}5yE?0vqF1MloQ`YN;#4Z*o3hO4s< zw??I8d9EDb3At~$=>m?gJRQ#NKIrG)P#+uY5 zIB`iUs_6_I*n_2Ft+%^6*KrFk!{#d?^Y+o#(D>;8s1U@Vw8_Q&C2L2Uhr9a60SioR z(PoWx9IH6^2ldXy!5{Gfr)BjW3K@#aZmMGv0_X1Pck9X*XYKicdDc#_Zt zn6Am>spR8AREH2v^aAxXG4R>Zcc@^iCjyuA>x#yU9nc2xr?}F;69t|1&-Rxi^WG;< z143c^bnw;mzEQVfz?d$;7`CRXnKCUh{d<$xU1d2!zw#%Rd7U`#XQ%!E#VPN27-ee* zvGm>(v0obZuFw3uoULVk@elF+#RGN+DgEISCH_yMM*K1@V%J_PZevfjSXEEo3>^fd9?O&_D@bn ziIs&_{AfB-xv6uPvqNB4DOgF zzj6X3$TnyY?Ri11B9NI#Uv06RfsPpt?|}&b!wy+H_wfa0$HSg>=q51q_zs2!tvUi) zY084qrbiW?bdBasA8BQc_uPRkCQL7UEwySnQauoA+t> z$sgb;<)fO{-g1q~WmGwE>*#+RK0Uw!Lzp(91oP-RpX)pOk38QvRj^-HSusdnA!JSH zHJ;C&xWE(UEB|di-L5EZaWAhXV7Jf?o2Uf4T)}$i;94E}$ADM2PSXVEYUt2E1*dZ# zr`m@)D+Zt4V$IZS7E3@pTQ-IFjIv9uQa>QujSLkh7tWn&{=|AJS6(M)2^|qD+DW?>MO`t{+o;s*+CE#oXIN7q->eyO_%#33AXQ&*r5>6 z;B10cyaSRY#V5mR9a*zyOKL`MFksiy8I&CJHbj*{}fiWVt*_WCPMoGlVltXk?p1&`-lDu^0K!0bPs zhd<#w-HmSY>5Ej)EIb}8Jftp-@p3lBhCh_Fh5HjoJD-sr<<@Y-NB$E&BiN4fe8F2t zwdWN}UNCu@cLXdi<1FbK_2T8ZYqJ&KLrVufupR@TOmL|4?YIiirGU+Ak?^Ju;2j5` z1M7nFxf_1Ul?(2%Z)bC7*@I)xcVp?4~(K-hlKTPN$aN zz-V?YVh9(B3P^&$#1IFf3-d2-x>NvLgK-sRppVdkBXe?rw*|Cl(q60FSVqn(GZaib zKP1b_$Y7W*wgTA-0QJ~2rGK@AYE)Rz08UygF1|-QLP^EY6`YfTjb^i7{3gS%z13?h zU$x)9?|d}vt&p5oP3RGzLh?NRZVi&t>gCbR>Bqx$gWcnhRD}_`_}>%E!=>SY-BHVm z>&xorCx0C=rELJUk$NGR)NBfpjBP{i3+SQz&dZ5-hk!d`R&&jN&Nav_>ixe9sz1U9 z8QKGRiZU#dC}F`H&a)V&%Iu~W1Uae4Wm_&YwCgH6xo$@rLqPnTz-O)^u1h7G{2WeJ zq+l9DL@Ox$)k(rPO8-nWJrm$@u64`+mWJQ)=IitPY?s2RN

2-jXy@IFPZsa@~~X zkNnZTORUEg54i`2ebuheLP-wn8)4;Mgrz_P_7B?`?dtL z4Ho@ZBJ|6vtKDmzB+gLGE!9E|Wzd=9ua6g}D*c@`l7uXPB{TMgXB6X+e5O!m*iL#= zCm4AuDCs2|qY^FUnmg*>(^xQViNOxHtzlHx`FLzp(elRNX!WXSe*;)WceHI=P+`dhD1SiuTXZA*Tf4 z=7I$F*vSQbE1W9LJ104PTAs++VCFh~8m(^9TX^oXW^GoJ81Z`TanfQ(5VFB@iNw>L zdgunI|6+lv%5A@!E(4(3P${N(0bP1QFE=Z1$ZUFUC*IrU`g-Z5~YGDoZ&I^zZf2e8CElO0m@!y%XpK(!1ib9_QcsL zu>4(=<0RF8flarGsiN=dMFTdQMKxMXGRx`IgaURivR~?Z9_usIGZzM(isuTC=j`De zmZP^tc;U9EPI(qA+aLW!O1$N&^BpdtuC;darlabxrG;OK|5Lj8lknoLbtgrLMMBK_ z(ZSBBMgTrAGGyl;O4uVs0DJjjlGG)3Def`J= zzV(#7MN1EZWNIqu_!ni)yR&ng2Ywf&v8gY-E(HrO-jy_Ynh}tLH9g#P@zoyf!RK{1 z%D*P~bqQIFg{oVzNQQt9TMh!Bmd#ErhgoHLilxO&RWH>4O|0`tiu1Lyz_sbc*G%?e zz@WcunzzmDf8#vonE-8`P&YmgeY(3@s$C82biYhF@0ris_r_}4ACe*nUz>K82iZQ@ zBdnCt(RC8@g3aB-#}}`J6T%(l+0oxJ96XO!Teu!1cma=WCS$iRzXk=ux%z0pW6R6WjFM0UD#Osxg&c}7IsmXaIrrj$Q%GYV8 z+mB&dpQb1Eb+$1jyFaC#oC`>nCbg-4np7srSywfrNs=E7R}u!%^(enq-otSMLZWCxM8qyuNQ zOWaYPq1b>=yOVo%)XRH&IIfSEp<1|`&3VPN4Vv`D$Y_QDvT8rj=zd}xGc@Eiq!^Qu z?MN6fzCM#bK8#PgO1A2X@!j@1w1D3E!BdmS7B9et*ie;~OD(=KU*bMHu1H|{Siir; z^A?Jje1?2>b`uXx@oZ-4Z?qoIg+!JL9|-w<%V}-gu!$;faPus>;_67q?%#AjZ@_e1 z378%~SFH5%UUl7G30 zegqNgvXAM>$0l^oZ`qPT<*aM1hL#tuazZpTGt3H#Chz7xy7UEf4P0Z%}ak@Cb)k=e1*rp5&{q{a>%< zv#--Puwhsylkj;#3=6Z)$Zh~nd>~HXM7N#XkcayocA|oU)tde3&yNAd9|*I|1|Z(% zvhQ9*Mvyg=EUhkU7aLT(+YXSpaLGftUy65v9hIn4Y$W6#^ll(68V1Wwe!Tb+bMfKX+V)jQ*))u`O~X-ej7=4E|lP&g+)ml7Lugw_0LImN`6%fVu?a;{a&(=-B-$bA$D z84%acP)&*-rROO43Epu{)Rv?jCP%eo-p+@>|hPGYYeVqV1U9 z;Ncc%IBNgm928(X?c{N5{*u$#@~z3t<_S?JD@LMS;cUoYz}bZ}D91)+{r#i|va3=q zlE>;L4!p2x-pIdDoyMg3V#GKi$W0=X#AUDWJ?IJ*tfu4@PK_F`jP;g}^7GU1Is)5o z&g@ck8X3pj0`5p1@{dLx9zLB~wYMnfbGz7G-PGvNsS=AZ?n^i*bW@j4jO@EV^+Ee0 zr*OJjw^8q_-(2!4SW2RkC1Lyp+3M+X&s?*B*A%QaKjm!i$;Sh?TU{Cc>{8xegJq^n z)puP~3yL|up-9oEWIML5idR^pC={f*{#}@m(}Ga%Xff?L{iT`uSZUstnf11oH_nGA z$9I$UBk;RQL;%L8bBF7Li%J;rzhbXnR^^pj@)c!vddo=2-Q=U>ejjD-K6EL!LfuTN z|Ik+kz4G%bxKz&X8@ilBWO2D4WBmaItv21x?Oc$^MBE0IZ%pkGvwSCKiKOULUJpMcV`lV%Um00qms&|7<7!-p%S)!k+E5bYLinFdrGF|;0ENbze>DTV8 zX5!#X#N)3&!5orv$KFhG$YEhM>-kj>L+5;kD?ZTUNkgOh1sBJo*r5Ne)0-2z_2c*W zTIn1wr4FjsPn|ctoE)7p#QAf_yPM&9d<9kb8{HIT!^{*t@ZT`CFbK&ZBkEPc) zI&AtPIeS_1idENFFaP~_i}J+%-FD)qbTxKZ+NwU~SsQ)5GOAyHMeN>RomT7euVgja znVT)sV%5|<{53{6J@WST*Xu96zu5b(#I680vlG&Y=DxQX@Lwm0`DEjV5R z?k8d&@MZCGR(Ml?K(F^$CNm?;9;VBiH``xfU}ZM3IH30a=KrTJPVeU7)wmF4&V5Y4 zvcb{uVAI^{@AFKQ4>kt@uP90Hee#_DieS`*-VL6|^Hv%JxbA3rwSA3(`S%V`Ez2Li zY|E!~orn#Yzs}5b{<6A9l!6V>-M6o_1*mi^Z%C0%u7&;Du4Cv&K>T%$szpRHFfi> z@AsMh54juX>FfPF$lm_y+sM0YFT;N9_PKp`)i&>#ul%oSe>{whscby_=FOY;z|&U$ zFZ=q<>U{QW&8mZOzY5o~3Ga+JA334_#C_Lv=Dym!$K2Ldud~~H?|`AE=FF86er6`HWp$}&@tQUFe->2U zn!f&1aFEsW&YqI#7Z)hhmp*6hFV%T$!f~>&%J%+0meZ{ETVopg9-96Ak#Dem#rXp> zK7IO?DL-8_^jdKo@URvU5uS|>dh?3=dMbJj7h0BKn+CwvG$15~7>NwcY0TW5u77C( z*WqvPZeMq5=K168E*%;mz?-xos9FBb7Hj?WTiFCE!d_?oX;~5E@qoR%bA!Hau6Ky3 z_r`#-vTts(-zV&8YPxr;eR;`k3E$6Ad9$aUKY#YEMeVaQjKDI_^5PZkw?$#KMc!N}O8)fuQ&pD2M88|dstvzu+uJ|+7g@{eC45is+9x~NOCKxO zz0^r_H2U`B*|sZ1i@!+5y|sD1d4J?-8^zz>N@q=+TDp5)zl4rUZ~oQvC9ALfyt91m z{mool+g^NC&%WvSE9T4P1=8NdMMeKse=YkP=kYx#_;;JkZa$L}=R%e{f_h)%JUxcJBtB(&c93rC~<8iy$0%4q53q9P`zuj`uXu3gC{dt-56eO6XN zqpDO-(#LOicZ=&sY+ALw_NH5R-diokx^m@LRr~tY-)zI*Kfcto zWlL)I^>wkEN?u;N%X#6h(c31w{jy*8m3j(iYG`VfeoeZ4C1EDpg@|oszy*azOP zwr%!WxpJ-P1$XUQvsu{%6W&(Mt*rhG9CZaQwsg(p_nLq5)z;OauP4o)RMc_rxES#6 z6Ro}L?`H)}*txWHFR(u^8)9j@x4&=Y#JI!rzBAp8Ulg}LYs#$n(j4utQL{q#h^#LZ zSzmsI=uuG|Wi_F^Fsf}dgfK_Kunw?|CY8~oGMZFIvj(=l-l*7U={Q!MHlyx)G_Z~y%EK91e|XiZ4&-1l{@b*}R~ z*SeE%%)xrimr7sC$;qv;`N8T0{Le#9PG0W6K8IJ1HrRa%|NAKHg!MtW(hikLIk|7- zY^=UN8J#^ng1vpJH^XSUde^y+jvw2%{_qu}b?<`LtPN7R5x?zLlTVSZ(&BEV!OaGt z@h`g^dJa(gJHwR=?*1yWxli33bpL+*k5S+M?@24yO_%h*v)_;HH(uP~5 z|NXh#i63e|{_ijRuyqn$`rnUluKaU={}@?ZW*QQ=KBQ6uxH$9ok^lrf910YJi+_@ zP3fh~9{$574;@M-b)Gw)I+5L%NY-eN=F)maF$Kgr{FU+^0;5O&5os{wD%ZnF!N;xC zdSGKMj#*7+vJa4Us=^^Q5Ed{<(o8QWHskZ2Y%Ia{aUnI6M-2x6{hJ2aGP>(37Q zYwxG;XiBe)dnw9V$Mhwgs|m3nt!`~?wWxDQ;2X?V<8kv>Gc+jSp$2#5ryg^R=slJ8 zL~{P9zI$5Co38{v4%R3&r-wIizuh!s*8SdSjcnkZc>CMh2NG#Km9bM|mfA%aT(Du6 zCTsfT+Qg}K4O|u24T=WO<+emK$93uY(NDUVQEr@!;fWU|p&V6A!^?*WIzfUau|}v` zG4nz|!4^*8)r&QxYU)T$#8KIri41)v*Ip`(KOgSVwPuGnqQu3VE3_Wu8w8TN2y^t# zMUV6*tcJZ9{sXU%gq?gMuse!#8wx2I*)OnTN&4XgB;4CWW_L}bRK9^)lti@cr`(o4 zi}AZ|xoiz)R0ehAws`MWO7V;`mHC>&|JW79lylTF=vX=49l;`jx zVe4W0kwo7vockw%8!#hTR+H?ZrGrOTH&s_Mzv9O&uzqaC z4h@F*<07i4aGF7ZElR>G>9H{&CzIwHrtH53_ugI=byMd~dOBaW=jmuQn4C~oRYzbS`r|eSMwlhokHUQs6d~mo2_xHtmlCz4iZ((4zE?m-gfG)$fVRLQH zV=Re%*}h%FDf6?Oo~1S8hV&gCjL*r12Eokt0u@$UxQDVwy4ftJwx)ZS?#>Q)pYN3? z$^@+2PN`+}D5|Fp7Nm<|tvuY>*dno6+j4fqu<@C60Mm*_=3M>Wj9Jd6EZ4?yJE#D`OnDt(K$Ya=Vmevz$$PsV%8E)Y}>~lO1mv?TMZo~JfB`MQo11wuTy8iWm zAwW5O`q}Gb8>YG&T(z`&-rarpw7|{-mr)wvX=%-g_UQ6t+&=e=ei?m-pf3r#Z>FaE zNT+in**8(=ZS7N?l+i5GQKM}u*!bO00J9I`LR%+-&nb_8*R}h;_q9RABZEO6TdUu$ zt|K)_kIC#a2k%@6hzc&Oo(sAx!!K-W;m91i@d2_mE`O9T2oHENfy;g+$~R7AS;@o z?;mYZF;9|G98K^O+&lRC79nAz!b3;;t5GVcdrWXz%yQZrs*%gl4?R#z&b&CH+^)3- zbK7>(m%G@1D__!z`4{t=&ppf{eECijoBW6XM6C zf;+l|9SZKh-}wBgB!qlZ2--xxKfsb)DG23(SXVa;M7!OwZ4h`GdwAORB@7i zsCBV0Dp#4#ZI5%vGP5+ycF|kT&WOhBoX40F(=w|SCNe(9_zsZrt_m)MaeC9#Lh`W& zld(@S1d(`we{iGk*a31rVYIu$dUB#Dn-z#7?7#?n#NUo!+b*bCka%5Ei{3Z&6X7G> z3nVf9XJd>on>Wk~bens}1oN=Y zb2mL;t-$Ujgn;Eo?(9It$x>f6cF=^x&6M;sz(`h*P5VH1A%<$aCKu9d;R+7O@1Qda zWrOAZK@I#rL2JW)8yC4_znmD2h|i1{%{Py*pXQlXx|yv6@kSr(5Q!{cf6t%we%Gfn z1lN&t*cIxW^ci~n%b#=p6b_fXw!sIF zX8-l&KNkEcFz($2CGpSU|5+S4xql)DCDK1>1t{|mC;&kJe+@BbhD=~ z<@NUWD`nAqmTN;yL}34ut*vcsd9lLT&Jy?PcfCa}(+$GW&W2cF)!L2lG5dl4s2lfu zhVkMV{t#1I)tk~?>QQ&sIP6@JdO;z6nXOL4$d(vSeJ+2#<~pBEt3En(P%mV(K6<*0 z=C$-;lK=La`b5t}B4*i3<-+ri`0XKhy#*#grlBglu2mY}lB!vtt-Sy256csVjeaK@ z_35dO7F%3AB2IZN_7pnV9+)h>e}5a%e4)Q-E-Qp$Of;K&_0-33_`)Hx<@Yb`O@gsl zZ1}rBw$_GEt{)0%jGL|(bd(JR*EQe!{xa*vp|uH~4KW6Vm`u)O0g2eA(`d$v^zrdI z?&+yEGgO^IR5d-4zSrYEtVP+&wWdv_b+bz1nQF{e$_MtuEKCo4c|5hHDM5j)X}M_Q zwJ4xls^^$Q`17lR9&YyjeAfYk<3iz4LNziBp?>E4RZTCYRs8tkmzwc3chsVI*~Rfz*Qe)?zs@$+ zQ;T``+b$iQltc37`W?oc?Ea8O$)HKkrZTKx;_kkm4t|*I)+CzF)>N%6XzuOlu{w3? z)X7t)4#UM2dp4M)?e$Pvw@#;V`6BvGeg#Rg=Lk_{3nwSabNXqwLy(HD+#e_Z<^eW! zRcqzs&U*k8xRl0!b6K7Wt2O%I*P^Qv?OBSxq`1i8P@3v-C-$jB=WXD0`NLuK<|v9F zr)IX6>-F;LnjqB^6?P;Ga|esXv6`8xA+M3zh#*ymtisY#VuFGub$3B_*EYdExjzn^ zAc*<@D_!9j1k0VLK;+Qu4M#S>WoY+t(zM-KhK-B%!{Ocb+I^bGn0YA{O1`<+b9>yY zic1c)451ZO^awwGCcN`7|HUfxrx8YkDUHUzI_&}z*6E@q|TYG;}KIV zmLBz{d?I^fAr-M}M65(_!9;tI>d_-`m7%NzO{$2`UTx7=sB`GflccD$B(})OR>*7&`ie97uCGIs(d=;zm7d!YdZbp-9QDW zj}S6YzOyQDV13qspzC1ki9U2YdQWyeJbwK6O(#Ow#Os7Q9>tU*Y~WQoziM}eZ)i)? z4&d%P-98%2Q~hMs+G3$wL_|a&9OXdp@MdmbN#b~0I_>u>tHX}wXPU=G)-TVs=|aBt zeYr*M{7?Uz{OLd7U@7@v^x=;~LndFH?pJU$yIF>NeOEVh_8I30ji{=8XXeSvwVjOw znbFd_Ja6(NQyidbEbA!D3hhu%8 z7RT}$42hOf(K}`%VfA4f3!=%?qs^v~fqgL>H*VxJ8l?sh*`>v%cyss$#Gd2-O{UY1 zt@@J%pU-^!_;EBl*>HVZit)r*_t(gzV)m=;DX(3tYimtrDdO26h0XSTby2*{dgtsA5+daTMLA>dbn4Wd zGF*G60aj@2)f6B7q0?HSO;N`)JBl>0$D>w%6c)j?J~|Wts2MP6vnj7W?5i#NchpAC z40_QDA3we>WD+bPL>gQx{ZGLvSR4o%+@NX_9^a8;YEAZRxF5o#=>GWp(!1ejVTr1S zIg0`gZFy1P-F$sxXP$M+=6yeY_mz_VuEA=$g#HcekZ#oM@S{hM;-PlYAb7uy6E7rh zJ#^Iyu>7>{k_n1l!_nyN03(~=-DQ5=`R@iR13t_Q8GgQ2WpmwZZ9tJq6yO$rFo4{w zNy4p1>j-5H<(CR7WTOqS)vdbjN9!VISW`u0q6;R@z?tlsQ`Lc>iA<0LWJmP5VTBkc zCxt{s9U_2-4*E6}@sKy?nUuKjsh-_v_0eKXS28?1D}3n_0M;A5muV_tcFWVgmWh=C z42`}*xDboDrLke(tvT0r+NL!zWm2JQZMYvgLgY|T^=2rK;c(=4)bf&^o}MQ6kcN%0 z-#_iaP|W&@-4t?6qlh_?gS!{{y;qj%X@GBu?xlmL6e0-Z5C1FJf;PO=rF-u6-Mf0e z4uj>#^Nuv95SH25OU4kq>}ExboqJ@{I_oBagh4PcXxaojzmcXE8(4h0^4lOR>=a;5 z?kn!Ysi;w`CS|@PWM>+QAYDX9jG?_610+sTa4_0R{`CHf%d2Qt6g}^{<~nwLcfhH@ zAsOWyij=40s-}@iht?YJKnH04d6ioK7#md&4TtQ_bW0iTq_gw65)71Rv=itQA05xT z56NV#%7LV@%L+%lUzE%NdTG#-|A>-C4REj=$jRMntt*!Gp@d`8~}z^Sdgm}#f*#c z%4w9$gkX1iGzdCO`5a4`alS*2Qd_zXz0h7Jm9X!66BB^o7<-qusY~Bq6E*ALZ#t1+ zS-gi~W#%mcjG`WPXY#A7=H}-58nVkJL6F zw^rR)kQ?KVnBUO8gK`&t`cIL6{kV8xhF!xZCR$4QDSmJf7&S6w$=|c(pi{nGiz^-& zUY|Isz#->;6mJxjh$<-{JsW|pR%y+)g^?&`wiin2>`>~KXP*sQ!68y)q7>N=cEv@N zkd)C-Z`lLBq-3aK7NxaQkKgr|d3Vs<%Fo)IiJR}`3mawTWYOcIA~xhtBEvIQ8!rU%L^vm@npO>zF#y<8Sg=D_6nX_q>V)Oc<=Lpl#4i*yn%sPIHUdv*2{0s|4(KaR zP!6uys^OTMUFKeK1lYh=a?Fzj2x+pqW9CO z$;#}BtPrBL8g_@j|6Htgbm#rUp~8_U8tflym8Ix=99xhZTIE6h~Q{$bvodZG!z^1A!-Q)N&r%=w#~-RFLsuTm? z!zypvo2czxHFi=4ft0B0O&!a*(EG6ohNNaW##b%-$64+d!={5Ka4>y_2#=WC3{a7&Wn5Pt(SuNSppq_?6j+CGlp z_BGElFeKeCiVZfzE?0i5rNxFbYIQAeuMSS|xe~30#EhoKncgV|88h7|lg?XcHI+TB zf@^PzYV>$^``ll%0OAo5TU~b7>GxBIQZ?+acRN~|Lm>X~W#>nKZ1|g0|81__b6uli zv=2g^)HDYiR)cdk{mN=Z>ixrsz#D*r)LVp{2dxqMa*bhA{JQO|B)yJ!ATZjV(QmJ~ zHsa$PX54>=BE6E*P#*LPB|vb|PP=hSJn9bYrHEKfs?X3XZ8+9T%Y{-{h13dKC}0)(>^=&^DJx#WImuRHGtgE=fxiekkW+|*~Hb!@=9s! zBgvLI{WYQ9RFgGPmye!0&v~muShTgvI-niU5H2|NTk^xo5~w8|Cfwq?rcuEaODNfs zlnwW?=^x1jnieKFOM|~yiP#M$f06d@bd?SKPkIfs-qB*;aq(;^B-z!SL1=z;jdEI5 z;Hw*3b#bEi*~E4Hfmvf<*ojC?e5QEjYirerlgo`Vp^OEQkn}8!b0EO?R-lFG&1KP8 zs$*s^O*Lxxroqh_kbT$XEkyU+XnyZtGuwB+)yhH83XHkDZwRCh`}rsGKy5)>or&l@ zh4_SqYF%HsPg~}2Wbflsq2qU4CbDh^m5R86_F`Q)l7UW)kLy1p9rk)fwoqY;Qb$mNMy(7?0oHbUi;@ zC2BY{lv`4bgva0)_t5E(5rV2I{_<%Tu1g6RR01;>z@sT zN+7g-sj)>s7eYa_Hp4mQ`OKOAq+q=1yfQZPM{n zkvDt)9YF9WZlnr?>Ipv0F=`9IO^-0wHJha6$GrJ@6${AJE7yWZ{2-VINOSX-xMUjA z#gKhA?gVgnke1zS76iC&l1MBTN7VzKExYsU@r`Xq?CEn&egNX}U?zWUTHPElgtE&~{M4rY$wN3%QVrLZrHdP_DRE@CAZ~8ReA;+Y7rkbgC z=;~K~CnJ~nG#Y@TD$@5jkQRPJjX<6fbjR6{15$L&_tj7QYS9o@+(;KSb3{@N&>8lq(?;eUKTZ=nuxm>st;5IPdU z@Uc6$=Tl@gQRg0TupQaPTR2JT%WqckfnAvf4WKKrTl|21M34Y5-_ zPL9b=j^+m2wjpUOTO!Lcqo>s+@2o?+Y&3VKRUcknyu07$Xr2`fvc(EiG}1|+IaEPk zbQl+si;MU0aU15ak1CPKv(e1i0Pci)F;oB`-|L_b`RijgzH~j^1)hyJq)WoMM_ptx zN&2B6Irq*_FRwO3{3E)O0#N@j$P_RL5eikHDj_X5K6-WeZ`0v~tL@#7<+M?UX+s@i z^JRg6aS1xkj6cZ&RP^3mg*$Sj5-|-74DUwCLfdIW7oCO=Cz7`4X#_tTPixAlpX-Hy z7xj|yy}RB%cBbc=vuT#nP^tw&-==buFF^e79>^F6bg|F6a3^M3ufu428E$cId`pz5 zr5S*Ji%i0lscL3lIQW&Si9SU-Po_aJX{!MT&FlS}7xPmN8T)rSm!8S!XY8$;ULQ3Q z;o`OJcQ9=>p?`ju>Md&Fj{+-C@G+11fJ$n~@!wCC=)_Ih@_`iWH;I_KU4?_nn~)G_ zo4z+0mUS<`AK8Ma`tk?f^_92}UwFC3*xWRz5b$J6n_=+q-30+65-yEPy(@&N9ATO6 zvz^=S*VmgNW5mZS*eSBT&3Y?M21Ta{O30Li3H!bgICH`ly>m^#hXsR&XPOiolYk(b z2f1w}U5lqjUaUu^0F)&n=nlZd5{~P%PhslSD8C*wSS?R z{WZwBD5^>9Or;CpqgsNgzR%HUqw(8LtlLfw3Gwvx01up;m68CL*!(c*%-f$Q^^$VZauj z4T7u2`epUrANJ^s3H|^nU&Fetbsro9?oTtIwURBg#>$>vwMo_3nYIQ6>ryZITp;tI zZMqS!UVvz_t-BP8TF8jU+Dk$7ze)Mczk0bF>Ok>rHokav7}nLV4Mdfa%bC!_aIs(_ z?n0EdiLyc#z5JjwWrug!=mLko3jl7HZKMk|a9~)Khb-vdnrqc@j6VO{!U?JI6g#`-g0hh>Lh+D}dMH3uL**o`uAqc;FqTOt0p zoNFNV4>L+VvU;lipmud3;RlS}agX|eKvxIJGPhA0z`DzMouL3MwG-%^2JMUQx_hlg zm|BvN^(&-tSNLD8?n@{3!2-dhBaaAVMYl%F*+HB0&o6z%f2b2zXu}Vm?$EZH_csMYqBfg7uOPKH&8!XrSdi1LLAsZ zONqY=j;NO#Jf?vn8qZgN!rhtHo@J;5tf6byvv=)=OuN8tC|bp)fe%f_ZXCLPVDhmy zji?=242)-oCurnoNE|k#sylT@ef}t?r0_0F9rgi50Lt}Eyjb^~Yq(pdvL4?p^?M9) zZh9cF|As3Cz;2?)5yEVx79x*0xfR41tE77LIyhEC*LcS77mUE8T!x#qD{NMc1GO=qmER!rztEpij=#L9vh z1oVv!GY|;?D%0k-D)%ZPgzuo`;}B67K6%R(%;g+mS%9`@z0pKRjYRIoA9am<`WFjU&GQg?uJ*EG6hg_TPeIV&a$ z^148>Tnx@~aKbFp4d3c^6Lr=t=ClD=kIq#>S#K(b5QrANMKkC*;WjQA>)%#2_gCKu ztm_!Bbvl8(4{*fL*P~)Tm=iWvR-$m``c^3-w9u>i@9i`;(R77G)6IoR&{``V_yT|B zqMAw6tP0{z6-19+-hOK`CaCf8!29wK6H^^eD9LHJ{b$JO_hjcJ4R-0$R1{?M7}-W; z!{ES^RbarZgg!I^hL+s&JxbRCng`w5a{}3htK^BuevE1ap*~M0s3IAnSA~Uz$hsYV zhwk9m9`}L*t&mxu>xfO1Q@$akS*+l zF(g-lds3YH;`^8KdTBgBbvH5MY|B$Fjjd4OELvNuMjYQ=*(8H#@tW>0)81rT`sJkA zm?_5KB2nk3SKo^b#0WpZKOs0c-gxjv_VHi(c{Obb#L``gT;xA zJg=??Hhyi;$C=DdErk|dMz?N&zds8qUh?(p*ImI9IeGG=6;Si#p+<>2xWw9Iktr-P z@{qmR)Z|bg^_qRL(D<91s&>vFDLUN+Fup+%g9}Bpm>h|y5u!v$#5bAP+t*Hk_w6}f zq3R1tT6+*M{N_Bn0NR_P#@1D7Xneor#hDa3!ZrX;$neZB;85> zfpQ=FsVH~Xy2zk2;HMxwE{+h-a}_~*Bp?NMW9uO=R1}~)(F`tbW`BO+7iNpI9X-%@ zNE!#%y{)J*(A4)~cTZCy&LCSE-2MB=FfzS8Zkm=43xO<}*Cb0z?;O-=Dw_t`*1r3h z&o$M^coc-`jmzVk&7vHJ2W2RwLN%ZW8a3ivzEj$^A-2Qrsf z1n9L19+QH6i;9g;Mi>X?>pab`zJ1VKezC)4-4)Zc zz8gV!Jm|r0e52ekv%kHKzuUL(Q?`<2|%3zR&MO9jk^VQZmihG1u55< zY)ETU??s^AETE8qUBsOb+buOi{Z)hG+cBuY2jqK4S_&8m__Kf+z?0{EtAOUyEWXlUp`wsHw7Ln7TcR5+Eeze4I~@D!NcD5TbCAQl#wYjSPm+~ z(BI$xqQj-Os;ralSP<+`mS7jkP-(Z_GXxe28<|kxIO%%JGS*X0o4$D1&kosrB}OfC zKYinr>*+)J&vdu3pgRDh@YvYj7oYn3BBtk~OU^%`)=eici>7i%ONoAg1@!ncb>9|j zPEs=Xwrm=4fIUzw&(U*ps&JsbuOn~6Ev2(Mgo%`j*CO&Kpex;>7O0eBzTirogQk;y zA+v7*`U)DeCwoEXBT~Cxn0$#%$LraI5NXy~+gut0y*S+kyKy@j4^b zbrg^U6~Mvr=0rsraNhL(g=?0}KlP2JL53v>0`osj52Sxr6)@7rSG<1ELVmGog<(UY z8?wXRLz0d6ID(r+wbGLtB>r2w8p#q(D;gVi80=~j# za5WsKg@EVRY3c+sb{Xnj0gDTwnhYHU8dDFa913EShFahe)}#l5_56X3pE7XoU#xOt zw?sOgt)*AYY)TK3p67~IAKsmF?SztT>HeMOI#4swWKPmdH5hx)IaSXC9x)10iO!4# z&F^?*qEh(DA*eiUgho!#SdJg?s^Qj@LCB1x?-KaLu4qQ_yS)v{Agko-&@t zLf^w0daMdx9E$j>FJBPcS_(a7;Cv!n)Ad@)-$|2lY zf*Mv?h9PLoEbm8_&ZiH<)sFh@d8UN)IP{bg1YD3}D#%Z}x!pQn8~KxZ8YOCx{XZGc zbo|$M+(!wujV?=dJ6AZ}v3Gw>1bsJ&DnE4&B~djryN#gmAgm}uE#t0-w%#xwrwfJa zm!X#X#%O}=w!&0`~4`rLBg;_ibmYEhv-z_@z?+=2hVcKpXML~?(@Wx>i5416;0|)ItUH_g( zE2B4HHY6iEtR5?1L9;dv*|!OCLcmzvty{OPpbr|dN@y`h6Hjg0)lCTWRx~vPdjcSR zv}B&)4HcmbfMogep-*8@AgEJH`S4Ju8Ce6q#TBpsN1zd{kZO*ce)yQ%nTstuQCW)@ z1-j&*$qY2BBLc3GYe0mH^xXr)Ei-H=q&DsgeeNNMBzPeifhDMIJ4G=lE%mnpz2}WH?|>Sl1~qr&y(y83=Gu|7ayEqK0reFjrCr z7f`trnmGk+rzA89gfo^QfJLO7Ogr|BmCIC%bdN-g!0%Qk3MRY9I3mSZiW-<__A#X+m?GGG_+-Bx_)6$De`4H`jvtMhCdAQ2Pm~ zkvE6ZmV`WYWJIHpRO4QCc(22}6qRuV$Mv_|x0N$F;9vgeDKW4+>i6I33_=1!=%$3j z@JI8K85rha7U^fB+wCqzg3&Q95P~;{lElR_{!s-_^})i8DL}Ml1^^;UbW|Ywr(U z7>y=R5WPItd;uC3&1i&mX3#)J*GDrI4lSXT$Zw&(fBP~ky!dCCWvoysV8BiBtK~+{ zLHU{bpc(h8g$MFg&p-K%+(SM2;(F_^`3jF0?2mt{zxxZ_-cOIOH>lU|$}~FN7=N`% zF~|4{rPR`+N9DAAS5~^w#9v)n5-CeDaB2ca~VP&zIM*mws9tGhxPkD-`cH z=S){F$etC;207sp66d-W3(c8uX4NX)!6a88$MhDlpn7_I%ac5pU;eHh%JZ4UI5N;( zQL5!)Y6poh42Qo4W+P@nqmTLO(2!0{XCe#O z5ua*E3Ekd&!1pC+EH+4PTV7x|nC;DEXe!Zol2P9LCJ_!cgNP=#M)svgLdh}0FGs6D zNWAr4;4#%hqF*W_veN%bBw41*a4ME~v$g6L^;FiDJem-mFyJhyA(7LF?1nZuK4$Vxkk@#xDG1FeTjzL=BAmA zmch`PLPM@dA|^Nzmbyz;&1#|pM$Z>v8cyoA+)yzI-&G2vT2w$-D(GOq3hk(>syZ-- zr(-+9Sg^J$^J6fo^CBAcU@fI=OIC{}DLZ!hy#DpJy>)v~PlN^uX4(I74LUa{xh&|} zVC%Uayz6w_gkBCgZZ?dhj2c($H22_)O&s){o}%9ssmPG*<05Y-8XBeQ0IXbMGjklZ zJy3^P4cKV3u#TxpTIwlaW%%a$H3SC)Xa&O+p(duu>dnsc_Z1s?I}9SnbVw+e7z3!(u4D)f@rs=R1x98lL37^Bg^2=gqL@>Xgy%otR3 z#Rj#)rJ-$q#>rJ?3e1@b>1Qt5s94~P@b9kScj3pZ@n^R)mIFKmnKjU(USa+zxUuc1 z^k`zD-aP*qO@*@}$gv(~JS;s6FZ0ofbwhQv#T%XKXgv-_W~soul|Zu&v0*knki*?u z|NeKQi(@~a7H^4H_DGVY=Fa&;HbHJwFj3e+A+hc_SjI#otStBmj!hsyMq^)t8fHOA z@>^Xf`*X#^oZN>$eB|0>bnf4CkKaV)g)&mHa?gKkn9oh+*YR@AcZq#vJVJ8-F}osh z-A7MSSI9cYLikKH$e?TjdUzdeBurSaku963Ly4|_ITVUoM7Vc{)Tfpgp;5dMWLz*^}7JCH0?#X?>RI?lWMiL6_;dlOKwfMNE;J9Ih?5nR5GY)*%d z0ZTI!MtM-$uwN(ZW>|d&VpmD z3Cr>IN}heD6a76Rb#|r>^6KGs%ghrMi*T=r>v9(6ZwMZ5Y@~C=7tj z9E==X09l81nD)W2O55)UL|-c)5Byu0YhzwVmYw?iCh z3hU_m=ljFo9YeBoa(NNZU|Lj2S}Kel;d&~jWAKY^NngP!`~1~U*}NWw0&>jvg(Kl( z2Db_D?~rjj!DU$ob0S2PbP(|?qYh{c72P~DvQ`0(VNBH($3FbeZ95bNodFt}umT3z z9()b9>bcTi{@YhAxg&6MOuyT|Zj-%uO#AN}Zoi4T4z~te`0Fo!5!4@l*Z{YLyhWq) z@ECjk_v-((Lg?jJf8TDq_4r@$125nBEGeS85r0_cyElQW;mR{^Aty@^@?h zKFx%`;s9R0r1GDug^ptU@8$pJ#8)yF7UiE6|MTad{+(F=gv$S+!ioBo1*F^AEBSG<#}4Cs%-iZu|^9M zN$rd$0J)AfE0sihS`uB7VmtBN*~ak3oW-bKe31}CP${hOl~g-P`WS>TanO3@kgz{d!(Gsb9J#!N=_Gdh|o}&1!x|tpP!M#MbX|g(UIgvv6 zc6Q_yU3sSZiXe=D&mfUprmI`9Q9CSkMnl5bv)NIrn;NVdOX4g?f53a5txtbVzf(PQ z5)VI{Fi$ntq|Zd-bD0F&`JwfT&&bRZp~CTuhnBRHL)9l5=Nl-Yqdv0%%I$Ph{vt2% zjAq;-)}c}RhVO)M$baz}FD7S{;aua8Mv9uL&z`FGv7~+Q(9s%CKQ$X*i_r`i;~Z_& z&LS}u&BHqD>s#Q5UNi$f3~7|6Zd?jVUBu4Q3Chco`Vn)i1W)-f*KLnZ$bwKX$Tf_{ zB+f*V4~!<`ZO1akEoo<$#EZW#KEs=L1gt{gFZVonrK&4iH-SXoFP-{X^R&F{Q?>p~ z3%OJaC&njA6{~*OCV6vsp3{lH=hY*^&WI~lg?G{DEL+>=^q32exgU||SzP=#`I0xs zzNL)Q9`+FjKN%dd45^GAt?5YA>7AWgbmHVJ*GQi`Flrv`pzU18Jj$*;f@=xQxzZi; z-Ng}Z$wti1(&VRgdXxqJ{HoRoT6%dfu0qN);hiH_pB>JTpDpR1&3UcYE%eA630u9T z@0Fewm2qcPw|p&G9mA}Nq|;j7CYW_!*YUX~u%lhRlQ?QwJNILd1zDB3r?iJdj38G_ zS6$$YWwic0!-ayYJ<*ZgPRN}k(dW5Ac--!G(G({~k@*WFl#cV#6|IMsl8S{9= zHi@f2_34NUn~c=C>)W@a(r`9+GLt^Sf4Gyja|?4pXv(&Qjx*b;G02EAOV?Zdn)4aQ zULfk=myR&E>8dFQMvb>!8ESZr^V}_qYN~$E@7$c;fn^XHwD0(uux)L}GY0C1ex~as zuo-ah-$aqqG45OCJMg$orPUpRj;xd-j}Tu2`pE)}7G;X-#NSEv>1Md=51I2s%>p|H z&LP=W9JsQPa@c{26|Tj7{mF)kRn8q|a-BHoxrJYw&+D5Ap1*u_@s~&H80=_7jC2xf zofnfs9}{(EC^*nbCiuCWqqyUVI;Z)`{`__#NfEPyrL6Nc^U)?u-gbAsea8@yy!4H| zDP;g_T@sp+bm-wovM68cSBBb~$4!QDb3U6eGso{vUcQ>jFQqCbhz4g$Y3Wn+qm2d= zwx*ONe!J+~ko2SwmG0`o*BpOwEnB(dYv%8ap_;gcUXEayTQW+RsmT|Vl1JFaGsKpm zQ`E|ek#?@O?%o8p;X3BoZAr1LA`ekZs6FRJPmR@nij`V%B{@blDkZ?2uFtj|e?c+R zYg=7tX4FP+H`BZM%BjT6l|=8Z@5=B!5;kXN>R3s8s3YGo;jPckqS${+8Hj!=czsqn z|C&QI)6E{bob^>)dZ?rmfDSl*HzQVa5iSHbU5~1q7Iyt zm`0tmo0oRp^j(^J6HZBbJd=wvz3Hn#)5-MBpDFEtYuqFO9Gkc1O2h@#ViXT~Ax1;)mEd)W%?6UvOHnhyk` z!wAE=LGa9AZ)aeXDZME>)Q?`5YnHmCGLr6ln%_RF-j3(C%evPlpT?D#Iho@-I2lVd z`ZUw^f`=x1+eMjd^F*DACWZu?e@g~iNiEc(%$oIQ`KIO26DM9A#ibe^o#@13^COtk zqWV*9dJ2=`bDSEh@TYaUyc6s$1D*zBNMp{<7gxbHr9Irw%hx;~ms_aDl*UER)|?z{ z=;Lv3w(2D>gF=&tm-f7lRLhEfX46;3f zq9mW?O)-*JJ)PprY~k^zf6h8xe}{kfgYY2R1ADLFz^E34{?`FNFWx%6wN8iPFE+f` z@Ni<}I{QRG)V#?4X`M-_vcGW5N4Z5$mEUeo*&m`oA@Ef+Jxy064;aXmr za_oK8iq(zG{X%m6PycA)$PQNbFeYrL@0jIkF)g>9X5aFEbNAZo)>NR${o?75pcKX*jt7N&H2VV^kLbY3Tq%*Nm# zD<)JiSh_KT--=SY=ye@YG@-^!BD6CZ?hL^R?75551#E$4QFydDy-7bmJa-ZzFEJ$h ze297JLUc7xy;pRuyWUbhl32}TL`}vFH8UAS3gSQlakScSFMf)5>t8KG z@o}qVraHlSnbanASh zvd77(pDhJB$Raq;7A1XHjN1g|K-|}n^WwodM$39h-_3L&E%9$dzoW-Vg*+_TS?vja!>j&U9?l~kKL ze#Q&4b8IXzA5~sjuJ#sIwqO;ftL-N^5v8fkc%CbTxs0cXc$`#`1V5@fYT6Z&yj&li zYvZ9=Cl)Ub^JgckZHAf8*rR^SiS6zyW$4lf*cNhM8WQ8P2+P^g?Ki_BO*f#wys7JN zF`dnkMnx?ut-SL)wT05ZdPGyN9#O?ZzrPyckrL>#0r zQ%bEY4BZk%V?-=RCy3Ho^h&VRUbtQ?b0ZVFh5XOw-kiS`nfB^(Nlhw$XI8a&K-w!k zcI)bi*PM<;qhzv}^7LqKh9tUS(QH(Weu`DZNzM(;XzJA4ySqKqPqpz#ZeeBYoWwrK z79>Jk?qqz(L;AnvL3{h+H)Y6@{9xet{AcXXQXK1jxPrk< z>=Q19(o-Kmah#G?J+J9WAvcVy7p71mLrBGgv(7l%qyW3RfChT!O||qpf*-e2DtYQz z9eSzhA-gDpSNhg5;U|sPHn{{5=0(mjdVCgkMt3deX z61J1_IIq*M5JG||vV18tYjC5_nCiDUWFm|iBv-tTdNCOO1NBc!_J%zA-S4*=@~kA* z>&rpw^5B=mj&yU}slBU?9`RNCB6GP;GS>9r=Q}N13gndxh$OQz!#Mo8i_7ODaRO{} zIBtlO{K&kfGpWdu5?UA6E?}?%GR@BKPY79>amJ-ev(26unT8lpV(NVP&BaXr_7b!H z-gw^YHedPQ?UuAvF+TOF{KJm8p;h{)# zlx@T?adz#TL7bmD(?7}=rz!}IX$TU!Y@@hU(2_&_Ue_I_?);X$fAYp)4e=cR@KdVK z;Otb*LRH;<-V*g;4DL0Nrz86XC&GfB&hho1A&w84=QM);o#SSUtsn5z z1#>?Ze+{lrk$v4cT*a(~pnI&I+i`hb;4oqDsFpNPf5w!4k{4o*Jr}8JIQM-?-KH$M zFJ)Sa9E0RVvv#cP#sy!28TCQXcINy%7A5F2{DW)XnLjDlM&}lG?t-K8YZ>PloxkrDX?m{l;HBQnUQO z*n7{YtgrQ3l*E`A6^#WH5iAjvs`QRgY!p%H9R;Kpk>0IXP*4PvE(jv+#h2c(0U}L$ z2Lb6#r1v|QF+1+F^S^i8G0vBB#^y_k_`big)>Gy)=UhSSLL8q7<_#}BPj!~W&FTou z2|LNT#0;8??aP~+R31%l6MLMMcsh%k-pbS!Xfkbd#ZG0sbRx&kF1j$ac7)c&OM6#d zxuanJO~ufd^6WM-g^Fv#V%7FKiM;{J%)=$TXPHK4r@a+JrQE$j@12;lw&@l*StuxM znc7jKAa(>Rp$=}9>`f;*ugHFB6NZGa8*j}a>Ep! z@=&@cj*m?2D|-7vtzudau|36D4?p+gDjnwBcl>Z!{x8Hrcrk14iQ;5s{J;uHzkQ;j z%nn97bi^wr=F57W3zNGB6Gw&eW*3cBTmJ-G1w}0vXA1R97;h!c{h=^FbHlDp?UJU+ z@N@Cc$&2I4*}eA$b7tg+DWQjK+lsx5s;j5($w*()+&GrNM==HTI4R)z;^2j`e?wc7 zr!_e8d~LgeCf(~D1)XzQx+p!b>c)(-{SBXuW*^UH9M2RV{R?hAQGBq@JJC`zySMAp z=cA#<1&@;6I6j;DsLz(S)Fo>FV@!E(YFAK!-&_lIWYRy+E4xA`=s@jN??T7={5IVY zm&S$OfTdZ@{5Al*Rd<1gIgW8UFin^CDt##(Dq?(pEq5%cDfexLg|22#iy#R`js^)8 zm00nV*%l@p^-G#QW9IJpya-qO4#`b4dvpyhi6sMCz1YU*J-U6gX_0ed7xk5$N}gs$ zE09>fFYYH?*6i4E&#FJzezCq;-tif4UW#I9Kv_1^jMzgVH+(qb9m^t0I^CC#Lq&!i zPJ?2jIkOo}lXlAEvbJ-nopE^*bUM140 zlK2|E%EkVuRDEEl>Hb1j)A4nLbJX1XJ1JdLv(dc?l%3*~?d_Vcglrc+iTce%(Cegh zGS+qmF&#RPck->E_RhSsoO#Djq;&=*d!$6S^|7ORE5ok6nKDNOIO?6seJnTo7` zR>ww)Iop0czSOyI-lfS@ZNpK4a2Z%t^K~E~@u492mAJ zLYVcN5g9M#%i}#XT6T?TEVaklx@}L7^(D>NqJ4!?0+icU{Wy6bE79AjcYL>~Zc1X; z(*Fe@>)X`-9U$wsxn1FgF+)q?L)zA#z%ck)z|L?@HT1ok$S)F4{L{+&YhnNYNfCIW z{zO~}Ia8OOM+duR_-J*3=G@$Ui-x9$v1sUT1E+s6 z$%DE5svFVccQQ;fS+Z`*mUVD2>R%W&-bwN zS9?rIda^g2@zz-=(7EfrJnbIlmYbSNN8BVcJU2wOx{Q~N5%=RUA{QhySbMi#`%dtU zy&!rXu`{Bf3HcXQ?r%olg|kGD)U{)&V@I7xQU20k{#TZIe?76x1?AbH^}}Uhu(S`n z&2F2dw60rm=gR|c7yYhYG1oa8V%Wua=sP;twtU8Y_fb4s? z1?sg_Ew#&&;t+zQhLj6*K;U?bSPD-ByVyJT%jj%_25NF*u&cKbe{STwgiB{b5VSIRi?GcTPk7Y#KJYV#mxM34L9#YO>XI87nXi+j4R~D1SC6 z1AZ7?(3!d4ElLtMYf?D9=sqa^sOzyMC4Gnf~P_$3sym zgT0%c&A5fU5pFXaS#%34g}X(0X2o=7EKFq^!}XvMt_$z_!KXA!ODoq&2_74lbOjIv zk3IcHBgMa8+=qIJ=3|E{H~kGrdgGuadm0=ZywTfstn-*F^k}oquU@?xfJM^nqLt?w zA%~xUc$k%irJyl}hS?mNtaXfo@ceGdaZG+wxPp=mZC)*WycdQW8dTd%F^IcR`S*c| z8Jl((?!&iAZ(%R=mo+ka4nJWddh&Ndd{1=*hxEb$&<2vbg<)tj*!S%D^sH<>%^$`M zxUj0~YP#*)H9M)W|9U<&WcG{x%B|vX&uN0~!_pLp?TPT4ij!riBeY`5jc=q;eAmm6 zGa!gwzNuHx;PdPH_4V*~4hcC<4Gu88bSX&!zP$_PJ9g}VbWu;P7{BLZ)RIB98*K~8 zcbyziDkv-*fFg^1tN~I~{^oVR{(4qjT^)~NPYld<3`X0pU;ha%lFE1v)|X&#Nz)@c z*FNgE@Xdq~|3j0hr1eKU;3aAHLN7)^Iryu%FXcl=ASxP_fBl#zJu5pqyL&cNVyoA$ zx7R%mu52`vlB-v(I_S9w5f}^eG*oa+Y33SBb^$yWYQWjw=)I=pIgFkRnqfQipm;)M zE+5J5-%ZQS4`W)Bo-<~V|r&uGJ<_+ zX=qrwdV8Z0f=W2jE|kz~<+)@Y?ILbV@9{U=E#M3I9{S%1?kiVeKVqiL=K$v)c)y&M zD_w=B+gErbF2Cv<^Bdp(Qk;ZI^0-xfpTS~?2_bJTEhz~6J{3jeFlav%bM`!1_BR5vZ%17PVCSbPT($dl}+Q<)zzkGvcJWpGP5o2THres5J zxlpl~yLZ>l&d##3u}Q=B3l1Q@N_x=nXhJ7h4VRn?JOP`xZHt9wG!`%U%jBf3DkUHw z;Et!Kg060ayn=$?!-r2lf4+F*=1um!dyhYO@L*lgU=|GK3W|ybp{K}l7>kHBEwH5B z#&_o4Hva7rVc8;=KHUPtI{eco{ivv@7|qN}c-tzeCbf#ua!iKNy*#k5BC0@^J$v@^ z^D8khFs#N>gPEIBf;vC+xp6f;67XAtA$Sz5eAqcSWMQ$OAS-+Kl-{RLpZc(-MO7)>{`^@!AbF4M z-iPa$L?Yp%D#^;qii0f_92_!s|NeU|Oh{M${IgR{PMS&evwJ_TSmB5J+yU#35D5Pb z>K$TYV&)*+8Bce0ae)AGAE)YzCvP*`-ZAuA!aWC6|Ja!shp1=I*hED&!o$O36{5ZH zwI2oA4W*m$jOljlNc8pHtd()`a9&;>n@(O%L_|c*lfx_gI5q0dTf_6rRUKyMOH(k# zv4O%vetrQR;R`>h<@sEnZ}x0h2$$msFMwu!(H$zwLo5UwR}oMEk;I03%;HU*OAt0F z!tbk07e*uss;ZCo^nWUT7$|5NT1U|`t=lzleAljBChd8-&>C7!(pJ^=G$!gC!du)T zQVx${zVoktN^ZboQ-VFnn>Re$l1k=KY zmo8o6@UnuJ=O8pkXCaKF^$w8T3-!0Xdd9_YNIcBO91a~iMAFeVF^3nsckhlbUpx*$ zVbhEA*I=roFx-?H_d$~-1}}S{CgveAeO$<|*8cU^Uty?l1yL>3s>2X1*`;z12XnXrJE7&MMh43t}07fo)4izFzurEq6s@#o$#>UM}{jiAd zt;utBrrmn*kIKqQfr@b$uNq-d8Bk#|VzRU_E8wl<%c}6q2;umK#1f1Twr2H{^9P7= z@f{{-NX6BY^l!tEEPi5Q;>fh;-Mfk~D202!dn+Ukb^8u5lvl&)H3qf@S(m!+KtCv+ zpq;CZ8#S0$jFCSEQKXY%{q9?d>}7%};U+hV??A zt05~^G+AUSa&=iQvq|Lk;D>hh<~k-eoBWqA|421!lqXLhkl$3f(=mw8c(=Q|d-7<2 zj?37M~ctNhE|zA_Z=i0R;*Q3X?<0 z6$9AM2U0#tXVtL`z#&NqJBMz|7DMY$lz_FcN{v;~c07h%u%Aaoii8`ShOkK-Rh)W` z163>0Tu#ml?vyJ?CM8=pe5Hzy4dg+KDC%ku75)7E4aPM`*m!vrQ2EwjLB+!rCH~!2 zb}4CT=_)9e!*L77*z3na!$NytYU>1D3EXvP64a8Py}L>)H3wIO@vu1T!7bVDRurrX zybEm|_r=ukkJ3I+jU~W^_>acMMtbkl_(Yf)#>2zi7>oKC9+5oKJ-qm^n4FBe-}~A1 z;{92O<75%ykc5k^O6{z`Q2+xwmS=7f@92$(bXdj)6Z2zL61B-&3cqo>Dz#o88+seR ziLW+Fy^Xy?@s2XFFuyct#<$+NcQ4g6{%V0Z%;FPU9b4b2^^PKpMML#u1fNs#nyR3I z=hG`(i#?kE&Xl@u4sNWqPfx5bgJzgKi1+W_k@v%A)^Jc9m5GS+l-cc`vb?$V-snz_A@P%&gu8>`BscE3 z8d@j6`}faHa@T8R*+{{Qvl4cwW^A8}OG|6uCYy5Z)sM*()`D4hP4MF+mECJn#E#*4 z!*`vPQ!A_H+4OyzgeT_6XRsvBke8R2W|gdmD&Amnq*VjX5oxmxEiIbFx|oDSyh(HW zbvs-q9@vevsC|-vU4Y7&Gxv8fZ`l$JO|C4wE7Bd?ki${YBvrGb5Gp(R?N9)`sgH6l!6EqPey#YegDyBO;{yU+nXb0B zHbRO&@~th3%@AShXCD~D9&xBez@O_mzLcdr3d?o%+O@tm35+!;8q1jTA>Xxy6~yC$ zAAkIDZ5Q-zFE4I##XFJ-Zueo4nP^{Ls^_5Jyg3pstf#Qf71Y%Op%gqvH9C)MGVme% z8W#7;5FxAALsWcsEG*!80%S$p7BULTPavto)Ht>c-J`FbL8e= zAmfvmnCRouYh!oH8T1`-ndFZBO zWC+nRi>bmrNIvsYw}tXt!5uHJI#?3y>Q}FPJJOn6#=L&b8U=VIl$s7=d~N>EfO4hL?Df{1-p|!Ks;X2KhU!e^l6}?>?NQHHX}g zr^5>LuxyMyZ(Ns|{xYNpmX?vJMz#q@-P?W04UHVBhCK4C7B%zbGzM<8Qv^BlzCT!ApWvW7(ICDo@XO_QofLPQjC!ndL zVfLca3$X<9F}uu!hyBBfpCQ3ymPeD<=!Sa_fKWmJ#hQ13PO{_L@{pyFCPzKPB;i;O!bt+anH>-n+!&mx< zi~oTr#cyqD{!vgFdg;BlZ^cGs2 z9}rVxoepE_+%FXsNF6vQepJ9Y2aTW;Tn6tj?c25MFphYL4L|n=s~6s3wb&wSncViU za&w=BYe_X6YRWpXQJvs=!e>~%Jt(Uc=VsK$ZzZ9^1kEd!N;Lc}jj%GlOid}HDk=5K zMg<1TF*?ate(CAyaBE3GAF0maFX}Y$yv=3&l;l`3bq=oYNTmTc&>*4&1J8}2WQJz`Zjj-xMLz~Cefxuw=D-@3Ql z7{?-!I_x|-I$!{zXTSXX{E~{nlUSDg>{)QHsRB??6D6}Ht_Y@?JJ#y?Ur#P?$1Rg+ z;^E-Oro?AZx;53Jwdr#MWYQC1%L^w+jQP}9zq7w-zE=K~66EF+rMwlYIdVa=d(B%i z&LVCIWV^xQ#~5#neEX3jN6gBjloQkup5hyOE6`6&U~xBz(W85K`aL%Bkf&S@tJ#-J z`L1o=pkPSR+qXu@SWY;zhlAezE&dA*G4!6HZnNE69%ZRzp6j^3=ce81TeojlVguGB zXb4LFvL994z3swPo#QsSA7f<$c*sj7hzY}L%AOdsWN>B5d;R)#X=i3;Ch4Enl(D1V zgFAHSE^RbyNS2CZ6&IiwAny`OFB&0^ky#`2QWPv975j2W6{0U?!LT8PGR9_ zd&jB8{!Tr`=s_T#v3y@cs4jeMfEWa+aqQoJ5{sW2>N=(7*&A$=5L5=IEPkvSzXLjL zLsV4UCMb^z%3IOF(e99F8PY*C8(!9vZiF43`F_>a>(?h$F4_zEo(X)$u4{YMM_7)Spk!l%{<6`hB*NwE)$2gQ78 zX$jZpL+Yy|*$?5?rhtH;h-F>Yi88AeMW6Y5@q-7?0H3Qz?PUSG`srzve78kD2-V+M z+vZ;EHOO?MMRQyUr#G@ZBs(7NQHobR3)7{m_(n?y4smfU81~tFIx>+5?lFEI;EeK) zu*Z*$>K<_6v(1~+IM2$Ji@5JQ6{z2LQW;KbNCY2rU-{W2%o9J^8t9-qmpIa4#_)qH zd|Nzba2P}+PUbjtLpwSby$4nA1dMf6(cAMl4?&6_DCU@?H9Ipi^BmUv9^1#id>Pud z9u>d?3yze|Zzo8RB47XtW}268P{5n+M}L45bt2xqd$)TUf1wI4OPlHFF3?w$lvLxa zLghf#CVdp(2F>TspT9w$+Q3Gh%Cl1nH&)Z#X|e445bnWPl*kBYXKKhv;4_IZ0)PEm zy5bqTR#XALsI|8mqV+pwSCm!W+^hjBQw8)MYrR_=y5pXZ<7l=#*Q|%XS?&9 zuV7&C4Effq!5-Vx1ZJ^R>)H8vIBc}>*vP3T8{9(yq;Fe}2a!g4i6J*c(i)gYS>D<* zlxE9I{E)Fdtf=#?xX3K{5v?7A*1aI1$Mz?HdA36hl^kg(%`O-isN1@{`jJ>iH`UNm zb|PWI)FcR5N{@aG);8+n_^hdXoGs{4?h#=SI`*`NFq%3a6b&@}lB)>|H`ua2xRq}r@-L6OWPkjGiDX=S$1$toaE z=6$*fTV))#Oo;&<81>-V34p|dYk%D;pvQ2;GP;G=sfG2PX3=UwkFB0Gm zs{BO)9IwKBjzMI%1hR-n`RzKKHI@vZuS|DJ`x=MqGvVx8FrD}#;=9Lq;ICmp-7Io} zB4C;zFKP`P5bTn(tbVtm813G?dBenG2SoK$2=8h;PoGG%v+Axcq7A5E@Jq$h z{eT@FePg z4+}F?P;68D91Yz7G3D&&Iy8IZ$EN|gqRi&?ZD7{dVqFcTbPV z?vRby7dx)*>Uka!2N&4J6nA8GTB@`kTO*%Gs^F^~Vh$gEGp%|4;FNXib6|lUNA8kS zHY`4+^Cc!|Q78x?SG0;UlL)j6{9eFQZQ!y&0?&o_Tib4)@Y>E@+Tpc*k(o>3+1>Ex zh59_KTwLX`U9PUK=4@Uv($YLGOEs}dC7r#07-Ns64USXJUFj|3*n_#mi_C-3sBFHB zkMnvqI?-OL%ZUpAih-ac#~8@^LD6Q8tfbU9jl;;XU0q$YMNQ7Mu{!yAxEdZ5%dk~` zT(GdXC{v@Bgeo5!u~XOd`i&b5xTXYqRga$M3Ma^*+aZ_XHIS|EfpU2Cn6PeRVbR=Z#n30 zH$H{KH4F38bGsC*;)fM4#JqWwD;TO1mX4flz{CR}9PQ{iWg$Bp8~KJhMNik*yjbkl z>QJteZab(SQX&VYU$zFdj0U#qGq@sIdT;!l=s5$ty`-k zr*~@z4=&h#;@`h#&r#&*8r051B5ib9IS!Q{m^e^}V;NuU6RUHhw7sjcPY!CmKq`%m zq$i8YUjBRTB(~%01W3qUhdWI^N}3?WO7kO4sU`6=PjQ-)@(zu_mfSN>cghG3&CSh; z=$oAoQ>n9~B>6?H(*a)3vaPym45cO?dsb47-|IFa8dooSehYO0CgT5|mf) z0;Ni~U)(Ha(;ZF@CU?E_(r3|q7*=cgm#@?)+8=XO>v7AKWXj=Co8j@Xu}Z|&1OQAt zhe$tJS}Kc_&p5S=(!Vqfrt9&rg0C}bQR_|oJ@GKmIFr-tS(j=ZIb5~X*n%O*%xL_6 z5+qeTG6#s1%Nt#1g})rv$V>V92R?1GZd%54vcgAR=_X&ke0gJjn*ZwKtLZn;1=p99 zlysh2s#~>c)x`$WZZ-?$N~KqFdq;4RRLO72sl37bMH=&(jp_$4#Hc=roNa#JRPI1| zt5(1BboP{Oe^r5*D4spK4{F9J>A=!}b*p#IhS)Btv!$$CyEesH?lfEBLyNZ5SD}s^ zTBBOw*4y!3?%hxxnRfrm#v;r4V`8E0s0m`gx>t9o7x`pX9`d*Nd75wKnpNA+x;zL9 zGQH(z*8ziwuoJdf8BUY2yOd!WdUK15!x3s=RDO8mXqQjU%~tB=U){9`u7OdFT2TyE z5)z}F!90_MizNpsbG)2op`7U{0H0 zPPxH9dj14;LR|;AyJF9e9B6Gl8>fIBr64vWH=y@Dm)4qUs&cb*IDWkTLH*8{Pd9Y; zK6Gt2lNl8G^uB&4qCY7OX(*S!hxGrNOsfj1Y?R@>R$s|G=XD%tuKa{51_~c z#&%P!6!`>)gc=agZkrdULvZTGjT_CH+U4!(owIHrFg3W`O~p`WEfo-_Zg0mbJjIl(*ihT;qu{<#X3a9Ez{j67yJQejm!lJ z4b?dd<*yJV8!4k#Yr18ambH4;tGxY4V$1d&2sHu~rh})Z(t!V5*;N5tfgw~wW4pMM zA^>cbQ)S(%PV0*b3L3?=Ei5Pm1igj&s&$0hb0e^_7cUMbH^7g0i0$?LNSq50gF2VM zqZo3>0~!+AeahhlvL!G{IRk1#L&L(4L07dXIP*gLW*J^`I?xsm*P+~>36$Dndh(U2 zF21B}ckqLnkP@*Qsdy}u9RwQ5d(C@{WfoLN+Upas&zL`1~rjH4{YF}infB5 ziosSB|0t?0tO@-5mK?2dQp!T&fE-FMoarTytO}e@ZJI#?KsUM7EKDhC!77*>YE;0f z!8u9E$xII@3aua!{mDH*qi+|MPo+}HI9n|qnVL*#TZQSP&vi(Va@M@rU|dK_N($Jq z!;&3X1>qShUPOMa1W<3(p-E&Z{+!wME`l9h7bQGVMUF2xrJq0D1JG2Cs*K>;)ASUYSu^nO0teeG7qqtf9_s zmC?Ezuas4CK`l!09CGB8b#N4-KUeg&BH;&4xlx){Kx$0ZlTIs1!u2|z5&qi~i2 zi;hnF#@_yu{^jkRtH($IGRlo&2ZWg>G57ODYEJ7#<(t#YX<_(3oiE@$j%Joo7v(@Y z`@V%#KI5vRXbBCdK$O`fB(zbzmwVU7C=`WjwFf&Q#W2r?oor|}3kdK15K}ueGldMN^Fo z!!wp)@~3+%7hSpZ>C@>Z61Z{f+>)LVRWvT#o2LO3nF3i=W(osy0)wY!zG!hWjRgrK zpa5}<4}7qoLIT2*HuDym@7$xaem^@`w4?54(8U6;Mx$&NxQwx6Ex>BpG3>rZKCgr& zJj^(-DGaC}av0(Fl)WE>hEgzQ3oLMPahXWZUo6`t{5shW043s#fG03Qs$#aL7AfIT zcnD~&!SL5$GW8E1ocv&}>}!xf@XO!kai~MQ#1hg{S{!32Cy?f6z`v7KG2_IsQA$B~ z+;I?+*;rsBV!kI+{vCxEqRPl)n=w!NA0P3epYq)h3D+!~VEyER(!%!L!pTOH9?;Sd3uj-MnCJ0slCQGuTwg&hs zgRn^Cdy6_R9v0PV{Kvs>(RS)*c|-dJaa;xG%-s>Q1L{Oj%f>@8A*I`kD=JQ-j4+^r zF1N*56W-=)nLh(k)G)f+$70G_=|vwKKY#6+7oePNxmht7KQ1UD zq7I+tgPv%P2HIG5h>1bh zd1{EvEaTa3m0{0f_&^)73)B@p{dd0wjSIWZrF4jx_h*A}beZ?V_k^{-;<4xJuPsWu z%lJJU@63p8!iE_u;mnkm3k=nRE+hHsWj!jL~k&8K|hJvyR;B2a1(K^o}tR-pW@ftGVt_f3$M32It z3pPs)45ERpNW9U*v1fA#(Ua&LOaVa5j$wM#6mpFp{@mdhnJ{+ZWR-AW`B5HBPbouk5hxTBI8bbB+u%wZl_ZB?DL6k3&Q;@Lx@z1PgpUy+@jqLdVZO z(WDv-G(dGB?+Z>5#~9BM)AfU5{s6iVn`AH)e&;TMIoohxyE?AZ8*>;5Nf+GIwu9#` z1FKQ*qhi>%jYt0$Ned1`IAQn2V6J|@C~|I@88b5X6TW|2DSuNNBMu_$J;0!)@m>{y zwQ-ykA0FL815SH*f{{`X6C(z+^qlHO$OyAkfOy9cZ2@|?mdFCy;5YiPQ}P!=^cuuj z54AXmrv|62;oO|!`g#>4T=XB02#?4r2mfY@vq}rfntqt(QadvN-YB9bCN zSx!KyEc^BiV$pa|n6bzd(D5pTJRYLky7f7j4dhr00h?t)dcq@$1TQQWbrh)*2%8Pd zY!NTTBY!vx@hT1ko+y87DKa8Oab`e8M_~$T(QGqFwb!v|&!?S^lJ=XxKu+U8-b_T} zY?z;46praTx`v+P13=FhU?7Ayhx12jfxM+y_sV(R*mU^tVPfYGcy!=n{s$gRUvhEq z!Zx9cPt;x&Al#9$L0JIr36Kv7@$Kr>tG+%y5lCk0LqB~Q9!|jZW@~2M6P@DnLcmx}p6Dct;dA(|d|$nq|91+VJS8CDnB6 z{}8}JwU(~>>__vbo^D}IO@4ik;Me=Jd?JBc1<+}sA{sz+x^w%sl>>#+gFv8!F!t

4&*Y) zgffWO&u=%R`i6F?o&+>td~6dlKs!}%oJ254YvzIV%$Utrf%Q89{#qZ3V~?Ws%wo^6 zu+;hMQ1nINFhwDeV>bzu=Y|)(dzv-=jS6-RI8A(*YO90mW#El*Uq4CJY&Q{fPKAgl z*$&iDxr~1SN7n;sJpn9EhC4lA5M!2@rCanFF*ko{@_;Q~QA*YIUk1_46-2O;C4jz8 zu^0Ug*-s>p0If(0&k#Ux14mpbyvWJXvHzZ6+Xu{8%-HbsP=**|W6a?s#J^lwZw?+Vnla z?`9XGPrS(YA5(V2_Ew~)m*y{YIqrPH5ejq~pRKV(JmJcnr-5$6fZDk_L#&t#_^|5H zy_NOotyr;w-=t{HA7mj{OpL+}}k&Ifwr%2;h0*CB#7zRM6S}$YH z`4kVn4_uP;R>6DCPtkA_@*-h=s??{Fhm1MT$jEL%1ypLDoR>I;7W^V%x~_oD`XVj} z#pWX%ZWa#ws%7G8+#u;czz=;%iHPazGa~yDBRTdPrJq$kJU{z8_7OuTSc0_(=JEsM zqJs;dm9x53u>HjaNJXS1(M8?0baKQ7D=CRwZI%{!(*#L26QQ6xA+%0pSEKrzsr@RH zV2qZZAv?)qtR=8x6a;4+JfV!SxG#p#TEZwu*;!eOISLKoAbOGI9}p1s!ifFA{c+pp z9|-P2NL9W2%gU7u7EiP>o%+4n+7YEjCFUR`LcSdSHe)GXW+Y8LAQ*0NQ}z zz;wW}Bt7xF%v?lXi21NcCiIxs9RUyM4A`E-frD8XQ1_#PuP4+cP_H&;g?W2>UQt6Sa#cM}7vW-}<2fsHCA}JmQd`xf{YHme>chLldBoK1Z>2W2CpM>nXOH zF+dIaFm{a@jP`H>Pz~BuG*Z#K*ZANuM1V7M7I~g9cVzey#Zj&k-Wy>wlBA1puQbF> z5KtU3IDwO8$v~{YEo{H^t5IF_O7x=s*d{_HK-j)#+lM?V*4N!73|3re#w>%A_1WV~ zQ$Ftg3SrI*!H+kzyu!mubzgSF0F4#M3ZxAnHIWg6V&wZ*I$+?hUyfJ zQNFP?L(3P74uM9JkQ}k?=nxx7Dnm>vXqy>b3&5@#xxg2d5l$+94?K*JwvY-HaWG<# zs8?oSWaJ(!5*c{26Lbu$ugx1agrg&g#hOvxlHZMj8hc*Ey5}yrL_l-sDcC@`M-V)V zbvQh*(=Hh#aU(Qfv_SMOyj}`S?eVqLCn!o}W34 z?@w;vFJne|)0%CsqN}UxW`@#)pwW_r1pK(ZC((H+y`wAPR-9?bqdz?j(4zjq{k1J! z(W+|G&0@@bOlAKuoH$vDG3t2wFKf3x!Nj-xHUofwg~&xIMY|0IWVH_G6;R z+f1mztQlBiT)l;Dao?{orazJO#7_Gkq$5#YBWo77=K&?Dqg?RQMFI}$29ZG>?{CQt zGaG(~&*%0TV61Vj@qbyY-#$qB?lEIR0YGzWPOm}Lz40&!7dtbk2F}MNYwPiJ^e!>a zpYQ+kLm0m4(Qr4HMZ^DMzh^Lj@DYcg*JB9)&lEk|S9!+A#9iXzt??hlckcWHz1mj+ z2F;Cs*8d2W06yp*9uCPDH?8B?X8^9(=lT~m{qj-K(Y=;9jZv14P4S}caeVRr^a(Op zsI>sQ7KRCn2jk;EH6qFQ1XHBe+2)n`-ng}lZU2bgSn;BXmqidNZx03c5Vs!k7!%0L zy@5FB+3isr&^RMPJsBIbx}^O7auo{a&OHGYU-G}x9yioxZuqOt=p9HIi|M@9n z|6*%%1fR#3ktLs8jcfR7{ z4)4YXW{CXNuCBp<;pZjYzqKg;{nLMH?f?6%uyXzep8u~|Ie$0g*yF&A{Rt*CJ<{qE ziR=XNNd1e`XlT*K9FPJJwjK%xCk5-DXoS}PV>I}q|0G&Zs5=PhIY;w z9E;mMv>Ds-+)YAVCb`yqDF-tQ(ko;71vCtXWZO_1*!Ee4?m-7Y+5yx9EQFr^B0~q1 z31|wA1<2N0q#r>23OFJ!$bTQrOUM5hZT%~4*q%o7iHhTq+R%d&%#{e=A-)eTjQB+V z4l4O^!qUe)fQ;<=+<@Z)*3Y;dAbcFP3@PO4vytE?QL+i`+eLHH3YcRJknHjFrx6ik zwt!@#@`-EoA+%binBo~wziD~o-uLx1Wm-pryynw>rP$|Z+HFR!uC81sTwV>8LGD-~ zv$}2VM3m=2Hv&`K zXp9>f?S_X(kA3==i69d!HnCz*=lw7@9^;122ZaxTXMJX;#hkszkK+BGH83#v^GS3o zEiB(#2eB7s`vzV5`ue`oUNkiIe|$d)_*2z$H}*}odKQygHa-Yx5iwhuonBITQgTQU{UEtIue zG^NB4pthEZKOsOHQ4C2z5JKSh1XOht#Y-z`gm(Pn4jjSwRuS+NpC_TN%e{Ygkt}tL z`)Ym*0@4DOzA^opwQF{m+>EnPqgnuL^bR%dq3VzE5BWUdn1uv2EYBO+@feX(zL&Y8h%I^8b-){?z8-oC9~c&$$VcLdeYKZikNQgKR7 zP8sF;Qa;*`ub=M>6HvaAN_nWuns9_hpYv%&AM50PPnfROqLi#1$`qHJ8iyf~w z_O7D2DE^Nf_SdkyefVp+Jpu(ExY|R^iRiZsngQY~gJs#;^#hIOrteq7cXgG8Z%9Z2 zA03wOe`y6`5ozS0L?9d>yz^+xl+ufJ ze2A0{nB)KgIEu`N===$z7mqU*IIZ%%2f@Mdvp9EHVqity%W&WkXv>z8Mb{D5xT7>p&;R+FKV=Lbrha zn&?$XZ-)jFW4Rc>>|oeZ!6=Zn9RVzA3}9LWGg3rSB8MeA5X$IOr?r^%;ncN3EVS9t z3PS3Eup0Z6qyY5Qo9XF$KC~>Gj6P%}^b`=aY9YfS`x4PA1|2Nqb)haUGnv#qTsnF2 z&lXK^4-t}AFv7jb< z*VlhFpd!V`0X|Xg?l2+(V$5$mTY90R_&_)#x7>kt#A;?H?Ft3jO}bDrU&54GMV zAO!X$I2s`}`Y+I8{if&_Zv?g=P0;!R*Vk{}pxd$}8eX72sivU-b=kzl#ogzT8p#2q zdx&0%C~treYxj+Db90kOgTeX;{S9}@m+5CfEI_lb!z@3=u!0#Rp)HyDSbhjh;kZUs zh+R-V*s9!u4k5_=bdbO)f_jlLBILY8SP8*6Z*w$oL?`g%Ye8^0(yk)NzI-*Y9*6nW z0@qmBj<^%~1B4Hx8j~07`90 z$HRKhN9Gumj7iJGVq*f6<5MAF_3stFLoU9q>>>i0b=3`$vlOOkPC>= z*=U2&GJz$5F36_q4QCr;9Ai7)b1J(RVV<3jkFVO1i2Tv(681UjLumLK2b`c;G--Gd zPSnTZI6Usl#6+o0c7Lm53+xU)0QX>#0!ywEV5Duw>y@6uh|0vu08$3p#4)T=HJ7FK zrM2&F11}`*3Z#^D971uo8?F&tdrvJdX%~M-7HZr;6cYtjMXD~CEJLXqIO{dWkmE9Rq~Pccnk_2UevL zw%l-&cd`(~Um!yVc8^IfyLe!sX<+}=Vn%u2lwTI(sO>Wu)tJ5#O%FIGu0#phF$P4| z2$_h$gIe=q_!h72!V#oN`1I~U&&~kx@z#DZsZpp~2`1+;I-gD~RPcVQ?$EJ+9rEBP z#QncrF*!lryad!C;7{!?u~m?lClP44G5G}97r0R<{HwPQy93EW9=pHm56;DL)Nf_m zfUVYkcI{qXxz%*yl*KY2@3MXhV2-*3KXgGx&;Pa1AWI1R}8e2avZEYxea3uks9~sweNT?`n3VhgYxR6)UxKp z5(4f8h79Qg(9x345dnFaQ&2~^#M%ZP3Pda}u#!?G3Vipg%rD))G#+DpB8t))b;lfo z=&F&TE~vPcpF=(GG*+->*exGjR{JE>Y5lldVaj2xT(|}% zd@;;Y5J>){eQmcFJzbRzf+~rA-Sciv=tBdcUM8#QeEr^q>H=fv2606rHYj>g6cJF9ZyWR3+1UpxZfQeW96#ruY7mPT6(LoL7{KmdiVMD1OhPO#F`-6c$q>=!P7n~ zfpUR_NrxPyD-St2{7TuFTXNjd+Z#=)B5ykiqv*iKNLDR^K*?mEAN1Wp%i9?kV$fkS zUR^`N9`Hgy_fdp_a3=f+^!}h|MW=wU0m~4Lc=y>a7rR^OUz$t8qhfvTxVE*(>tM`y0H!Ltb@lCtX>xCS0*GAi< zgle(C30(tuiNw{S77MQ^_0!D-kv3?W2^~uW)Ts{A9%2-85|a}J$nC&6iGm3e`4AMn z0I9i4A)vlP_S`)t(KQ8rTY*@|ee2w@>hW##<9n1rv{$IHpx7Yxyl& zgv=Y0j`_s@yl==);<6&futcipG}j+ZdxiRK>`vawv6n1p-3Y+~v!7;;1J~_;p0&+6 zwD9@bv6CZd60UR4L3RaES-BUDg(=D)BGwG&EyePtV_;x)cXto+Qu4u}=Iov?<-wID zkUy!%P?-@=Gw`Hr+FhrGFD&*;-ad9`Gbv<;x6r?hufkhFmdKbiTDQtcfjQh+f!udia*2F_WBUlgIn=|AyF$njpSJKoB zM$N(xI%I2(W3?_LwoVjY91l7dXV8jPRqZ<0Lnp|glNg*x)0cAAYRYdoR zDI!LFKUNp)!~X=AiT5W8=Bgx7f#+NO+gfQ9mSA$kVGZnu(7ftU7^5&TIZ^*96Wgpd z;@BE8wSz%d6`BxuDxP0u!3@5*$@2_grz8(!{K(VN3wJhtm?NR1YtR)#Ka3z%E4g0W zw?h+&Azo@(LKVB5#6lvGjko{wZUfQLilE*om_ZX@i@Y;P(2H?(^Raiod-nmo23lw6w5o!@QMzJuE~Z*^dE5)UK7I14)?v z{AW^DBATHL`v6{B3aD~S&`Et(_tjq&tX`}a6SNc0F?)G~xdwhrc5=)7PCl#8?<3muViJ%FYfRM>>yLTuXTy@wm9 zl4!6zp3o=J!`uq|3$?qgNK@Oeck#m$Y{(NZ0aO>Dz!9R=F38O*p%ulNWx?V=Y4jp(|)58dXM+<_~pzENYKL@Y6qqh~I@F-4&p5_KnzGl1eC^bELR*=8=*_0EaqJd4bYye*9Ble+${XKw3>87 z5JS`#1>c5fm08$3m2v_br4I`s)OxJ0EB#kj)4bdMZ)I!_4<_Xqo3Sq2q#to5fKhF@ zH6yDGuZX~-HoK)xo#NLFUfUde$i3~uu4n*Q0y?Xj^WEM3IMk|ihswQ#`w0>?KRueh z6^WCC#8cs!UtgtTcMXU<4~=R1(*V_*fPm~Vh6T7yBuOYaN30DG0JeGWbvcBDgh~#| zI$`N)=wtrr8Hj`9vuG!kuY z^X73^;+nBEi4)74nUF1UqALttykH>_bRNHrxBlFHdC6m}w2jxYEhh%EgYtt*@E0=N z8Yz{CE(Y?m^d!v+HO+jt)`2lE2HwJFze!@o&4>0*Ga&|rC5gnl&UJQlgrkvCy{nb) zrbT22TiLa9GVso>ajZ~DN17(o6u!G*Vvam-gh%W`_4i^{j%EKoX+;#E5E8*rFsj{! z&X6D;j%vDB#$YYg2k8*K9+q!4l%Tw{nyo$k{V`xPp9as0zN@37gQ(bBClPveFi#Nt zi4y64L>sh@q^0I^8_ru(8(I?}oTTuhJ>#+b! z1QtijMF(J&Qqhx%F5BwEhMojipB^XZkFgf$nq=1EF?Q?Ltro5lPE>CN2ki_`Tq^}1 zk;o`iaQ!gnWF(poLgxhRUONf?A+26w&X+p~%!cC4c+5aT!K*S*Z0hKm17R%BU9~mo`rYL_uk9&#`?euB1W~7%az8Y=wmrx3tBs#P zLUXJVdt4Xb$#mAC4O_PwtvaLb?0%8Y(Ai7ccmH{n=xn$Jy!-8P$jV=GyK^vC?vK zXwRp$?tDW}CqCPZ5Pa3}SaQ z@>gsit>xCi#^mVMX2+fYLrhVsw%1pqoCwM~_)Nzgz6zMI3WhyJ96y&f1&dKf&w4!r zxQ>SGWB7ILS~PHFLxI&;Cj@%p;i(d%N1F`FOOWNNk9XFA5sJH0y$kxLI>rlBXAOYs zM8K4aDb;ubY9d=;^Q5kRWxcqM_LdJ`x=56QbC}^>;zs)BB3*4{nraa7*jIDmi zoH%`AqqFGclk%vRE^W_3iRr;5mu7^91r^uk9Xl+U0Y}+;nG~;|waP(%mmFuRcKps} zR^6>KQc_fBQOu6Nz868Wh7UcqGS~@QllNU@VUT=mZ#QK1bNSTv#(8^=#c>9I2^$tE zFtU{2XdsZMs!olK6)V4sM+n>|?v$b+5uJ_!2%&({lTMSAHO?CLt+tPlLt*{dvAq_s z)bH7Kf@qM?z_yOZXC^;Ajb|?!vM-^FBH0^-Ib{q!I8#ae3cy_EH>!4d zk}YdF%rR{e#lg)byo5evMdi0t} zuT=$@?NF#fiqTno7MFj~p)Ceq{U64#Yl*WSDF-l%i${^XAJx-|ufWy$QuTTs4eZ+k zCsI$6qC6*eI6wayMl~Hd_rTY~=*7$Qz0ArE2R%JKvIb6tiZy;#RS$H1lu5^>sfuQ< zJfX_WCuq_cuCTy|1QIP1ctga;Ek~t1h?{ZB8kIy~ z!d$SLUDkHB?E6Ce{QQFNtz?MRBVc{Vf-bz#gPyDo9UXy#NvlpI6_8+%#-`DA%p6n9 z^QBG4GSc>|B}I~f3no|wrg{2{yG=Ee61BB}9?F~mANVd-tYUS(k|Ek%>pX`7o=_eCblN3$a|B-?y!BO~ zt^Sp>{T97a27n?TCOBd$&3Cu^)BFwglgpNV&EP9MjGKS4FDE3VUKa%#)>9vU^c{N6 zSoBkYkE-o|UP-gr?7NZV%3s4l57X2?t6XD7F?D{~YzU%Xka zv9gb{lC97}R!C+ki74YSLPjE+ib_UQRwy%ME5GZl(>a~*_xtn5d3B1% zXT0C<`@XO1y01Gs`#PYWB;EW|$m#0ID}lw0_K&AlPlow;5yL|K{f}mLYVNG2>*n9B z31m#4mUg``dgG2wqvrS{By09(Hwxq1q#M427oBw; zTG|1|j|HpWIjsk^c=s1bjY%Dp0Z*{oJFWpIJ&kgN7;h0CmKcU2l)tiaG2_VZnzuJ&DvO zD`n_eSMQx_>s;Z(mToyVLvk!^Y!AT6b^~Eh2W{+UR^drvNrlqKy3S8o=Xf`EusT+; z=TugiiGzU`3*p!!c=YJegem*b7D-B$#Gl}Z+l(^PuP|__ePVKwq`XAC4T5t!;SG^a zJU-J8gnI;O2kl$Ps}^m!Iy|4H0Y)^pYe=~5i;}D;s$QD zyH6fKK0@?EfD#jMG83~e!c3X>$#0DKAzPuSsX4mEJyUA?_8ca5qqG?@nLV?~@-d=gJ<;Mlp1A?9#L<(5NDs zW&YWYYp-9cqUlAV#tt0!X*qf50O$mXMDZYmGWFw>4O!8n^FQ<(^$iHeePnrTAo{Xb z1s#YbQ!(JbpcLX(SCyho-3`2nZ*;J%d7r<0F2(%dzd{$)0OsGMQh^MCJCNX?Ws9Si5+4z<1s1$4=n2TlUufsMBPJC>NW#zia~9l)&}fKphS^Uo@BFVPNjlyMu* z>FPp_M4mE;>ho}3X9HzHm`;#KTeYo-?e3brvu{uTHabbG)kKjFA_zHWc7B3e$|LjY z3^fOU&Nj!7+c{HbJ)s!UI8CyeH2L z3Uekyhyq@Xj)F%DylI`*0o9d+GY=>`HR6Sg9Ko_a6Q`DC&H35l3{eNa>hXn7AeWaK zj+$@Wsgpq0&Ndi}m_7-P0f9-&7nIUoq*u%0%UyALi}`l&m7jS94w|VtKjX(-3ZuZE z+rPNx4a8iRo|fQ2lpR7dExys>R5M!bNS6|1ZU&CTD4P&mi7=h>E~X46l6#6 zBHrJ+(1i5!@wNdJ`m;#^4GGWZ_V%06=Z7>;tsQ_~a4E-W4pGgMU|k*yFXgRB;r<#~ zaQKwamyz6g4j24Tb;Y4|Q`49DWHEQA2J`M!9EC}Vn5_k={xf&f#KguDJz+NW*5S7H z_6+0;MdPSB`<7{!l$5NJ^Ox+7{Y*k(+M2hp43PBQhJ>;^2YiJ`YVWj;)`jKP0$}f+ z=$j)nz5jFyZE#SF+)U3lNY(a{h5Uzy94D?Wfv~l z!V?H+33D~_Fxi4|;xGsjCg^*A-&QzGn_de945%T}Y*(-2kvoGT^(fjj><4VHz3i*A zp_OvX%(NA8WOmHm@g8xBS8D{&FClcNcEk&DEq;^`hjI124zT^ zFA7_+n;8JrbDuoEIOCeTH^c8OY73tZKSGcAW&AH)y zV-+N}RBVhy9gswtT}I&9hHjnmGvVQB?5>r4`3`1HqBl0p;u4BNY= zCapEwE#Kk%CS*9A|2M(8A8efc*#yVIHV<(#4gm*DI}jU10oRm~46z|JEHCLYt2E#< zqSF%xc?RO^Rst;X&(_>dPl=Ti>Kd04cJLW$TZ)gM)V-iJDmc5Goc$!uSEoM_4$Y`f zmF%udx~<;}a5pny=MmU5Kw)?1w(2?#XiiC&luK|AFR;(?#sz57xLv2*J^ zANdo{i&3F`Um(B_raLQ%tO!z`hNstwRE6w99nBB~8+{Sb_C@zj9sms%2Q`?Mi%9gh zaFT&|`TN%sZ|qYu0Eli$7<=naqfxoS$}eN{0RX;Ii{|n2@{RyGMg2ST{I6xU>9F$KKg4^K8hMFuj!rR~F4 zzf^$0^^kDAnz;?QGWyUJHiu6ib%{3Y=8XI2RN@dRHVV&CGaLj`XAR-Y30xwigogj^ zXw`Q%UNnFH7XWNeNP+qWuCw57AAyiVX6J(1L#jR`7@>v<#;>tm_9$pcro+@uxatgY z*72MLy2NCN3_G*OPLhZuG}4N@4V}2pi~PMdWq0i&ovuaWNab;ib&GVti$(>7y6dT+ z5EB^pnsZJ;cl(`ye3M*|P@Dl}dP~ye$f=OfDC85IdrJ^{?H7cTp#zXli4~Pe?r?RG zJt~m~GbZ95u0eNzZ7_jz!8ozE6LlCNK_WP~xDs(TX!!!8A}u5u_Ut}re#5(_V}BZH zL`pgmVyW!>k4Za%U`t(V?s^$+IKt(O8&7gQ`qBdZaH+H-ii+j61H%p;bVB=qZ|Q6t zokQ{4|9|Ed(rab|JY)~VFBc3kF?vPCYgbt%IQkHI0T}Ny;zU$L+R=g1)WQu!GGYx9 zTeP{)_Oo)10cF97C>b#ZlAd%wdsyVbt8x-@y(If}o*2U@+gqqqtasu+L<2!o{D%Ye@^}9JZOF`uck2Zvim~;HG#x;Z-Q<-y-dahxLnO zjh@C|T@SI5!ozo6$Rvay;RU+eiQo-O#+cy(jDE>nqHxP5*pd*pZhf2$9Yi)(>vG`W z!5-uf$HAe#8lXCOghw1h4op~=)uq*tNP@TO0klSxnzh;Bc;BHf56TU+FO&XLXr}^t zMl@?MTB4`{-wF`|&@>M^ylfUx1hf7SY%{W)J&4+zGQ{rS>;4!oCJhXa$li^B>`CfQ z+Sz#kj^1kB&~@c6H2+MZtK0D09(w?;gk!~vK8gi(4CpZxnJ@YV1tCsA-dO`WDsf>9 z8RLQTSh(m1&;nv)e7-NEUSzKs7BzuKd1U}M4r1A?BHWWKRJge2{+8p{AIXfP$|riNIb;z*8rfoV6K$eGBHm+8&&TA$AMh;qZh34+=b zPJdtDcck2gGC-vZzWB#+L9d}k5G{obuXSGC_fxJD#t@g+7>IxbCT_6=vj)OVYzAQ7 zNY1yaNcm)x@@9Cv9^bq(y3p-?h*0RYF1hVafp>}6HfWP!e?dC(iyt_8h>8GNCmq|n z8%H?tQ7-BEDTV5hG~?r$YxMU7tDhpR6-%FY%uMCa0Jlsy2|AqE;h+M91ch89Q*$Ws zA_rI5)#5o^2Db$|;2|@tRrdBLxffh8+=Bcn5UNI)Hw0iEejh|4sJ3qihbwW`F!;h4 zs*aZ>aI?bb%^P!wAG)zB{hs8xOxYLF5TpH`ETLHOoS7UOQ~qNrM3vubv`1|A+gV!p zp1@4RIu{ZJ37rA-qc4!N?SSnpk*8wodn{XnECM66e6TnSYBGt!kemZAM(`X7#Y#^5 zlxhI6#L8h+)yTJRuaSCY0Bq-SrxPvAy?g6^!1ALr6^ait6vrA>KyBzS4xj^2H@Pq~ zAu8|p1PnaBC!Ywsdr14#8gdQ;An2dr1S$`n30JX}NiC!l~`wO1Xx zzw(s?6CoVQ-oJhJnYb(wjojYR%POp%!Ka7U-Ehb_> z_5mqq_VICmyT^`dNWX&)fAa2Ox~yUu8Wu&i5^8()W*5YrEjY(JrN=Xf2TTVrKaD>7 z|9SERPG|qXbO|fYe$FK(-!X`y+(^PWrwUZW(4$*V;ixV7>h3Nrf`Had;}w|Uu@ED^ zR~)H$#pG~SMLurv3CrFeRedzLD0wZrSW*BYqN)Ow4Y_vBVqh=Ihi<_6M8pIVfna zD#@UO)DCWl#_`U+!ZslUHS1v;lsXTgUnKt7YPr3i4{|&vWG>uu&SuUfq-!m9h^>)h$ z60LGaJ5Jxddv|u02eSNzcZ*-%B9_%K*HFjiv&qg$BvZ%?CQuSU;H~SI8ICxO>sU2< zQ^=`2Eg5n|1a}D>z30!L zCz2{v2+*;$|N3&*27SXshqw_**dfG@7a2I*r=?HfL6g0XH-`6gF60h^cQPGi{#V>~ zdRAC2(pn*On?ZYT&ZG6rIUD#%s4=I+M#l3$`U~zv^5W;k4NJekjQK50?$d$zAX?FM zo`3=;fHbj7ccx2>IpTUq%hbxFvQflvmPml1pv5e|{U9eus~mLj^>BPjKz^#_Ms&1H zO~Q~t+^z4$(nBK`mAqy0D=a_mKw@H&<%-h;BQXw#nz{5WHa2}soC53wmV@jy@yJ_4z3blwvAf;MQ1*%t-M+d1*w zTdaRxD2VnSIB=_s3=BOAWW8!s>WM`Yu?zD_MXn*;)HIJGz41S#53i``^_f_B+WM># zp9%T-q$MQg;$|b-_@2ePYN9+1edbD7K{iQ1VR*m5|LaDDQ6Ow9NYV)^S|;bI>d?QgoZNW&yPZC;_Z6=?J_S0ON)yIQNLcC`SAo? zW=Cu5dL$dfSqiVzN;M7r14J=*R~!nTjz z8Q{FgTCK352`CU^;8kDegS zCCtnL2~%j9sd;{N5gF-$Ks7w94K*lHNH#s+Y+~P2Cko1jO!`B@N+$S0`O6Cccurik^wzLOon!wfJ08%;G)RX2X5L5 z%E4^BIAW9r6eqll%mM(3`aTcg)AAAMVWck27t6pkYT$3Gf;Hz*s(34-i}IJ?HyPPf@k!xKfOO6>h~XYZ z=-7QXo(sL=KB>^ot=DSOMCHG~B_7Y^J>%IqlnC9)oqLA<1cE;KZN|rc7S+_mugQ9X zL+uM>5b2y)Y2ip>dK;^7$w7gE2I81L6x2RRVbvx=Xkc3}^{bHa3d_oV=pPG3x%bH+ z8+|fE-&&44Q>^xXZT|Ql$raftfTvY3n6e;`Oam0lgn4&v<_Z)P;73-MepQ&4^#mUa zWx!pDEpZPXy#Lr|it}Ix^mb0!INOL53cVIxXPf^w#lz*`3$)MbEKsFneRIH2@c&3V zJfo1$b2luID0yod|kF#DAuyrXDsRdt%6Whxa2TD`nMEpq;1E7bqHS1|RLSY}szh)(`d;NZE;gnmcm_%Z|(%42;1h zN_=cUm4Lv7lo;MRCO|(l4Vp@!cm2k8_1ux+Gdhz zd$=RxVf#o7AIrnEtZdDMoi?$<^UF6Qhj;n*d=bQQ{(v$`g-T#PZ1QTWDnxoDO`C^@ zNY^h;*^YzR6v`syn*4nA|7?T1e{Tb%U)T;x=ka+&t_igSzt-@fV4(+SYWa$GiQu@6 z@|)(Qu!6oJ(}A86ReKMbqSWfqSy=~9x0f%7cnH0Zx4X8@rEvNG2_&{Mt^ytTYpoVo zkhMyrvt$qe(ytrW4t6%P?}F?d4kJj=X-K`*Awn2>beysPJxh7-P~a;aN3Bkz-U&UV zKL|waO+I9z-6%r@UM{9sZulSh%dq01L+uBwv9rTg44?gaafxMkD2I#c-fCOHwgPY5 zX^Sth3))P71!K;$g8(LXG2&VAtNG2Ek0zM~~d_0G~d>gp(B(zc>$y!?+KmF@V zuN0F%TBiTm@MXUpzN`MQcfPmo zA3XT)pN*LFfAcTD{#kmS&pqnuR1nd>zFIy!<<`L;%eAy~{{E|iRWbQ{PFgE_1A@dU z^#D;3zKIGXLIHr_QiB%h*VpdzzwYXv0}v18`@ldjPax*`673=xrWweG><$3t?R_-B zAN+1O7=L|n{fcFN|18OfJ(Ft`G^RdEApDX!P>7QJO8m+}ssK4iLWS!{Kt|}6^7|8Z zKl^p>|2oZ>1U#z=4vmXzOgD{2vp70c-*iu69lb`?9^33XbvO{+`erK%u^vL=;!J+B z!q)!R*N0`lUhqH9PWtNi?Vdy5zP}@lIsl1@P#nYjYLL5<$_c=s#+|jGus|gO2vZJ( zq4+!a_1Q2a09gCu8tMPnHP-gcY|%`A2o>x-G*u}EBDH#e=!Iv5Ud>P(8%dU10M=m7 z)dL;iS0b4Tw4*QACxRu^;^CRz`%i36yY{OP|MM}9pE!`O#g0rC>C17nqwpzhKl_Q4 z=lB31DaM?Iq>TXVS1hCkN`a(j1OkJ2B%`E@4)R6@eQcCpU$_5mj$8cuZ%p)(YN%@7 zIRBA%B>++kj6w2+=CL0(QIS?HQQBVWc;KMOZ6vj_UvrrxaWd^_=rQ?7)MEBB= z9KKKd=b2<}@JF#sFIkALKHsp5hi0^_4!xDmj3rqfIj+_vmdwapY+{I zYU0gTAz=brG3X<+e2*&t{I{PW0V6n^v_*eEKP$ff^@Dz2Y=aR_>8J6eJDKYcZ7uGu zVP?LE&4bki*0H)s`S+XL?K-PXTkW>4n55pavmS%I#7-lcgeg98wR4ISI1@^ zPC5SjcL&tZQUX{00W1sNOB==|Bx>i_R&W)-0P>C>&o0y955R9U+)|Lc4gK-6P5 zS>2APYk2wkrZebs%e#K)%!a6e6rQB>8SsHFR4*k7 ziGt^qjahBH@iKi(g=8+AwE)0{7gWrW@_+u=ISg^vz{K_^4EnH5ZtTn^u8s`)$S%6{ zAN;=8b;7d;-JX9BI&E!i#-=cfNOpN#HE%4T^!4gTv;@0vdVj7HHUKTGnKrrK@yGDZWeF93|7(0Y{oqa9w zl(8D5sCY(oPkRAU;{0BuW<=&a#v!*03x!=X;06KwoeuJK(tLm-L@|)q9H1~jEBcF6 z_c;{Ct$06u$h5G>5D`cdON}gwW4P)Ta22A4z5~hpowC~BE3c(d_U|wolQ5?%1H-RK zvi-(5%N^7ZSq#JiGWN2t5)oj_Q5O_YeW7vZ$^z09uy847ON+NQG4CO{*1^y@Sz7^% z5f_}!4h$F|W`@fW87M7`*!vlX*k3zBb?bnJ+ky@zC^v54B=B}XvXQM%35tkrH6ZO& z$We%z0V*;w6YVYGR=#}s;w<~mIjTKxHYi3MtV^Jjn?R>a1vv@kUrYz0?>8M;?#PJC z??)>fP!Tv0%ON(W;Ep8_#!Mnh0iNe4TwVekhe)*mDd(ys-4~u4P!Fg(0nSLnLy$Y5 zn(ilg-vXV$!Cs(c!ie;!0}t@U zq`&*f-43+Msqu|cblOqLL3_n!T62@&YtIf;&XplMsaMmF#Io5eK)K0)e-TGIGVVjA{d zG=8q4rsf1Gg8n}ANI(XdIN>DtdnZZn`rRnBMA{$A#}tBRzs*H`v?bbzEGh`wB-Wn< z0*78+1+Fz~ZdcdN5iBQ0;vj3nLL;3zSApu0N!d^@ghW)=obVV~uxAWA@z5`mk{PMuJNVloA41v(-SUL<)_-n<`l8+(eBJ`Z?7+vm1V_EQ2J`kzf%5}6({i<^AJHw%&vqUq++UPZZgFRF(6bLiVQRjdqxaFd-)x&_da zhmZnDNv%r&EGmAs@tI3*sUNp6wm&(sNI1Z9LDWbh{prIien<9z{D1j+IDfsy5%}^2 z6a@7EEeI@R5+KLA{zuJq4?i`EbsMlw5Yv$&rTx8k7g6Y);HS1YVUL zIp`%P{KxgR@~23TKIK&_&>d;gx@@0kOe*ZwSj(Hh~#cS-+4WKXx<$cHZ3 zE~mhaGK&YZtC{3z_W&uyg0mhQ`J^+QmvWZEauZZFt4SSKx;gl#~{K9~r^7c;fsjs;XKR zA0J;^7pW?!uy^lVz{of5+_~P?X28kGSx{QKaCCIEwlS0s^Q0VyJ}*9%?|S{pmAPNK zy9-~wTng+z;Z1#r1nRr>a&kQQp7IwjuKM{c6A=;7J9~CHFK@JI-yuQP4VyPF0})?d zRfTjzj{I_S^QDIly;z-aHzv_leo>UFCxi)49S7&WfB&9*1f9NlDHD^vfx%p~3mkaB z?gVaN$yzyQ=CcV8AL@Z7trfSW>^^d28QyEaty_$T4jstE#JqPH{b6_-Oo*nNF{U*n8T#|fn7Tvacy;RXZbf}TD;MG$DOXYsn1oa_&nDhk_A z^~8yU@5Bzt)7yJ57TowPV{>y~6rUty)$TZD{PL`(=4v!5V6utd^t3Y)fqUp*va+&z z`1V4!oYK*w2hGhzU^NH}umyH@cARV06n*%>g-0oo=V!DHvlp}h**)c6vEnkgS8Ve8 zjg#x#n2+aseamka{_60e@hFwb=I7^UWougu;LiW;+v5bSVqj$4tEaaPIbA?#XxaPB zhYuMbR(lTo=NnL@y(dnr1YxQG@0FJBZf_(OyZ7yDmz3YImtnmd^Y8yC$fdt^$NxR* z`r?;rcS1p7K2Ckk)PMly)2C0bWA`fCW&AKy%j?JG@v_hOI@<11qBo|UX~$1MI|RMr_i#j*5fcN zDf>Rqyf%M&$fsBi8Rjd`>#HOe!g4yn))}4l(ieZqg7@-;@LqXW9TpfK0jHd^G?03c zkp?{b^*}q?ej@N2K)dPN09C!dg@w-7F!?fB@Cw&c^Du+pd;iBZXbWmcKeKYPPR{%& z)pF@r!tcH¥swy)y3I13I`_?fl%09sgMoH}r z7w)+bQ>i)*PC#UPuD3xPuWbd?Kkc%m<>d^xgd#{GSY7Yx=;*9U=0==Zx>k;>Ag;Iy z=m`^gG+3IN8XFD1e|f_xW!_+-L`hJ>PLV#>HXjm4vf`@@9c6jAxVXe1;mpd)I{7Fc zdz@>h{Ze!+9&gTvRiKr}_Cf!`=H|$4uDf>aA|k6WsAb+2%NxIfZuIOq^pijnHxFV87XO+zZ!Ski^CGrt zea%=+>4)JwQlcYmuJoSl^nt~ZO(40>O^sR%j>1DC1^!r+NV*KNtwOvnemN5`rJOR> ze1E+Ro5Rh`UH22&NtO@ApGM7oL_jHFbTkf?ySs!3 zF9hY}>_GeS71S5Xc~Xie1~1%-jTLCW5SNIz>kcMN{VAS>FTj1_tpXZgj#}~jvYdtm z0G9Ic#3dy$9Xxo@cda}R(yt`zeC!<$^7GrUkz_=mXRquMaASTerurZu=P))Q*-$74 z24Nt%dmZySwDU!{f&bj)l|Kb4d4r%J1JD$T9Q93aZ32h3j?Np~7a45rylNrO=&Iz6%Fw7QAu`s5jF2J?ic$5!iQ18cF z4m`Lk*RCxRj#6TWTy-1-J=ti`k1OvQ#SW8p9v6el_$BBs7UN=ra^?5SI*&7h%LvH{ z$a)I)pcU{>mhD#-)1bKAf=RqQ2prGUPd(zWcWp z07tgi)~!JaYtI1fOM$|}Q{ioB0V1#g{)KD;k`uyTgti7dzZGh;y(kObwJU5`!6WDF ziE8FP4xVwOTAz;?sT@68hKS6}!m^=e5F4-vZ{Oc3ocgE_)*!8^+ik3 zn+1R)m>SZx0sZMj;;}y(vSwJ!#l^+cA=PDdHut7qC1-c>pD9gq*Y681<-LdN< zVh9xr;l~%oEV*{rdfMnByr8wo11c444W5Yh_q?!h@6!Q*wP3U6_53(~Ok10a z5DDbq0Du`PyhlxAc+HwMOU|AhP)@D@5lPNO%jRd-fm7WFM`t}oRYy6_4vVloYHDf^ z$~wEcd^oo6l#xL%jO)YW3$`NOpcA|W+o!Dosrr5;bu2)m1Htre4!KBwayM8LDAX3f z{+^kgUEoMz&c&H$#RG{rG_&k_*m0!`T27=n;xU&f(MZeHaK7-0KNs_(sgpswDCBS6 ziR2iX|5(q;nCd)X?X_6l7(26Y72IP=aqb%*dADiH&Yf#;Bo*W38EM*Tp@EbEXQL=W z>B%#NR41$*yn{kqCjS_%!>+4}8?uGU2rKFv2(!eED_P-ml5^`|>ETtwv>PBZiro?< z5T82|E02Xjgl~YhGKr1aR0x#a;Ga=W6*ojdMFR@=V6l1!1fGUC5`#A%2_+NCQc;pC zucTLAz|2{oyQQ>CI_e*C9hT!<44#_Ey)d>J(wb}c?{5fi0u!%?(@tVzX^^_6rZ?mh z^CbUzS%5wD*)$>1wPOW0_jTBlru2JSnwj}v*D0!G0gfq!SiD~{hXd?8!JN^nu1MCZej}A0GZQVQ5z@Gxm`?*8A9XGo?8(}-2e^MIT}A| z#BrsEJ&lESRb{a{8?Ze+sE0lM{OE6`g#<3f#>E{<>Q%-|3_6Ilu^ml{(mQt+H8rgP z{H~IYhOFaw^&4)(9)DaN*__AyTZbn&#q=n`Kfkr^6QQMCl9VYxqJSrI&E0)L zdPc^L@Nh4vI$E%wm>J!D4dJ476!s}CsCwzPPm!`=lN`jO>T)INZBtVbej1BtHZ^_f zxc-x_rY0khxv;RXlZ~PGQNZGXnPPc}wIRvH%)ph2Nl745=i_cacXn=AzkUw7C=qsy zxFV6tVUM+;Yqw=EY+wgI|FoIeyzz8F>GkU`r{=?DGEqnaZo?7n)KXbBRt}D97-y^O ze=}%$Oa@(*?y1}>R~CgyVvpgCrZom%Oj!5U_z{$LR{d%{) z{(7ME=*GAVFU-=^xtA$W6G_{2E>$^v_<`RPSXpvtbjbllvF)j25)cp=A0OmB_0;jX z;=-wKt@A)f2#idpW!d+wMj^qO3OfZo%-lMStZdv_4y=L|Yy<1A^x?R{vBZkP(>*WG zCAe%zhLjLc9vg^$fYxzAVq)SwI^7Q!A$wueci4qv<;ts2oV8(TC8Z}L@Ebz&6X8>g z4@E}A2uSHmPYu!ou|v0j7{79I|{%N%^WJJKwiK>>L1h%rw<>B4cv)NoWMVqV1fy){;5P}W@gIc$5&HR#WCqBAR+>j zR2cA%B>i5zdQ}2{9UuI{=OrcXc09nIOSkES^f)1!332DAIgfvvL)vTe>dZ(z3&q^RA{8B_rE>Td0GUU4X@?QN4|VaKSJ$QGM|>v{8@R zr?rQ*-e6o(3Xa68`p1_q&%vHuU}Iy0Ww=g8W)<>6UpcT9I?2N8Jry?K^#s{*&!zDA zFVfP|vh8hHap1rKK^!J{_$%1iVIlHpoh8w9_4|1J+kuAv$ppK+4qA-My}ZVKhD{6#-#0v2JBctl05kJeFBV@Ia& zT(dwA1tl_5Lx`(ehTrQhY0kgsPx4@V_eNeqLLn8j;}h`Ib87Rye#n+2YC2SI zk4KRgv|*F8Zw$q*sfr2hPJ>ZUU11jhOari1i3$%TP6~(aa73NMz>rA~5p_rJ^I+T@B_@!8g`>d@cP(R&^kJmiY zw)Zd?leLByAS(5Blf$I-I&n7O!@7=>-#798Wo+yY076+M1y;r-xU*D1eWe?|efXd~ zfBt;E)Ugrw-lza-M=~UV)Ki}wAR;{Az6ERgd++;|D91`6i$N~oQJ<{)I8Xzz4JjQh&1kX7 z-Xo+M?dVv5yQq7HQkfhT(3R%}S&QCVXo!Y3zA(^IAMqqwBJ%fw1R8+zuo=nFP< zv7Zykl0gC>kYkDXGu|WWcEJD{u^w$>H>5MW zp%X0EwE6^bk2$7ycF$!(f$I#1FyJCZcm|wPxY}anocPU^VFx!GC+|bb393AVs*!~J z4h=yOi}ma(AX;42l#`Rw0M!1SDHZXTRDEC_QqbR6`V6#d3!;oM_0gus02NC=P$C0m z;CqMiwBUVdVc}e~JQsrnI5$0hPJ<`p<-6M2RD55RQL+Rw60|GI0JjLhlwZG%8#lJ2 z#zVcTnc??H;Eb7BJJjQQP_@a7nVFgzU@ke_?U>N(v`T3?7AC{8Wu*wP{C6(`F@ywI zuh`bkjvVhun-$GXEG_*2{zfBY((+w*MK^=52m7)J^6Lz`t_Cu4eQ4BxDd`uZ`OO0x zLMd@ly%RN-Z|a-^p!+SKKl5eifG$b_yQmT-cKMA1YV%XioV{>2HkpT%l@0g~&UqLT zzqz9tQ%-GP-rBaLySuwBOp$LdGMj`gsJOvILtIzwy+tE!K46BM4M()J)_^p`g7F9q z<={QG6)TBk46q0+9v$)W_GUyz;DO5ST>rr9sCjsfgJ@_I6S@$5^hhtxS@_{rhKm^@ zX@jPFH!!jr>B}i-emhc}%e=*@Vr;Z+7j+KEZ+HM}5kl+JAi=3IHQs}Wsmvb8J zT{YVO@rAd#)D~HuqfaUCU%c=+wrvc@M*~L3LEFtc=7==G+}xZQi4bSBs$kH#&NYOR zP5XlBicYs~-TKm;FAsiaIRy{B00D%VlhYFiq1&}J*GKV8SK$560fEpKt5X1jRL1i_LgtUiDQSR|sR2_t z-znS~BCQI}=u7eq+r4f#n)!;Q0Oy#9s=yO>363D4y!OQScp{Cr?b}zslDF(r^=#FV@+RiFIXTLQ zvxKUVWl%*oB1ghN;?J@_@h%;w$HjSL@k{~A973sZ79EjaK;5Yr-Xt>m7y<3zGaqxd z;aX^eXh!gIh^1c#@4Q94HC`5I#ZvAaL2M|hLK*~6P*b_R%G&jDOiARcqpj(=#-=`; z`wdMeQrx%1Ocb%e!qb`YB1|pyy1iZUR7cuH)IH z<yJEp+ie7EiGwJEV+Rv6090fk0DZoFR0(&-JW)y9$$fu+)_MO_ziAHCC~!>U?CX7 zZ(rW<){SlY&uUU&53=1)3RxSwAAYIG$G@OOS7nC!)aD`>DDl#QopeB5i8C>1+y)g+;C7j(4rCkR)8#w&KVq zASg~wb`cr{&|H<_7976bL{Chg(1FrdMM)?P-k6f;EZa0ZSXEU;vOwMtJQ!jc@xswm z6W%wiy}csCgGitaOHdr}@mxekp=J(+s`bW@BRHdiRZif{4}W+M5aZxb$00{Y#}xW5 zd>Gl=Teod9-f^^~C+q3JiS66Bhuyk$A4T&xI_Rz<+m8mUalB0D7*=FQ_X!qc$T-ms zb+#i5zZ*kdB2DaNh6d*DTjn@Bw~V#yEd2 zV1uhGB)4uYhG64!e`r~IDhm^6B^lu>vgf}ufFbb>yLodZ_>s@uHJ}8rlCAxzY6z2w z4S1Fv%D09_M@d^WiAN&8PXDX7ZfV*rJ8=f*4qAXe_vdatcH+cU508bs?3|o}>inL_ zlv-i)PLgBvwvvJ@?Dp-)ek;8c5{3SP!kH!EwxWTlsiwt1C^}EdKY9o&0iMvR6^~e5 z7OcR37y>KW;uyAMCu6iGoSXUSa=^E&ebaZYVMkfWQ{36Qlr2xF<)>k+c`-a#H5RL5 zJrgr!4eH&PM`7h*At9y8Dgh*BBe}U68M&4mr7!R9T7%wEHrT^mZ%3`(Y~Y- z+(+rCB1xx4D!CYK{04GEmRzO}R*s7%0vzeL5 z$*mSiDkBbV31vaST4drfO;VL&xi6m5Zp`qMx0Ce}dsoATIs={R~T&Cf4njMl>N) z<5#5upxc91kbawr#|*kGNX^u0g*3i**ZNTJd;z3S<*DvkH_)ST;>68hv15a~G&`}J z1cihQigRqgUP10$xxIV_GvABjEmv}Ilqnmis^teODJv6@Iia!&ODqNU!r3bsgq|Ip zqlXm*>x%&CjnwDCDthMT+M-?kFnD@dCS>M{REC+G`<3UTgaku~?-@tTPy)i=SIl6; zjfYj*J6ZWfQ9Xl6+~*5I%X5Z{~#KUw;V@A16mArznIj{Q4pkg{c5{WszD)$}#@uWoE46*C7xm zl8VU3KTkuE^v-=Q8*;@|1L8)Ib&+nm#DA^)UnP({(o%urY{Zy6 zumoLV>xWadYb869=mI?#4rvVECTGBSYG$UIV74}J3mvb(kBYnugf z58&+Dv6(|6m#FL~PMpa0+cKja^gi#xc32DP=*|MZ-nv!kz3JA;IB?J)<=gRY7&8`8@F3lr z&?Fid9CHKxUt<|PHplr|y?tW3O|&cIyaj55&t4qs*Ex{bc7cXBK8z-Za{`-O0ce3f zDFrh2ndc^ORh*&j0I5?(|7EP&3}1YTLt^pz?c2cIeDB)FLTU~Pj8306R^HO3GfGjj z|57y7ZMwNFp|ZF*M!7FrNL{I@y<)WfQd(zdk4=hD^J7Rqu0mkq-c&CIVqTAW2UwE= zFusJ0wNCUd<1BjgwqwQt*s(u&3uj!D6G;eetR}3m2h+-C?Tx5IpPX$z0Qv;=@wf3o zL$8Z9kbkrzIg)mn+)BBJtklZR&K(>kG=u6L`9+Af(y6@RZGJRCm4`T${Z0ZG0hQwM8W*3ozg3#szJ}0Y{K>NR2j*WC3DA?zqsT zD&q33TdN5$i?r-}PyNx(u2=(!J;noPoyvz-B_~dF9Bd)43Yw6Xm@h-YKm(8X%LOLsqC6LH~@45~qV|78? z!1IoqDNsG|3+SWaot-ecAi^llIj!m%b3+mm682uKAv^n@*YxNItL_g%F$CRu=84D2o@(=WRkx7VY4{IORTJc#N+h_k|U5Q%0s2Rk*a%5Z{N7 zAI0IPp`CX=qqH>6*C{AAice=wK$Ve6% z2#*4Q>zEl>cQhk~ zN&(QY71rFb?IgslUhPdj*$!qT?3cVv=QEj z;Zu4Lu8)IUN<|l_=~0#wrLxK=YLtXBm?EpofP9M~yb+DZI5fnAEzjqL2kt|y>51d5 z9i%@5bBh3t>9n^eb(=*Cv7OAiyOK(+jb9p9)vg(Es&-7Zq_m6 z2AE4%S1u%7izqZ2O-x81 zO3&`cvJ3xQd;v={+68WS$_ms+82Mc4Oqpu>TGlXkw`Kwp^ zfe-aJ=lg@(0Q;$IfvCF>xF&|VM?>M_>jHKvvHiuy`{ly&^17{=yK6t4)o!PoKFG~E zqN^LpmQXpZY^rL-eGcoq$foK8;~B9Cbmi#>>JW z2wX=|QrL5{qAMzKBB{UndG$nIs_cH@5voUmX4vD$lEeXl_eVu^PJ$gyjx*8< ze|oe*IcEblNw*9*>Y|bi$5G@M+wLYBA_q~-VPTkG{_>?8{98VBv4Cl`aUo5WrLfW< zIX`%r;<7Swzfj^hj(=O<)z%0N0Ku2TfTLtw$6YG&)zsFy-@d)(!-o$PXXW@w&=*!H zp1>*l+oU=Ip9Nv!hDa{iraj=Ze65_IY-arCfa;2hV_Bl=2KU3mHK-mZy3XZYe4LgR zZ+nD3$~JXOE&pv+Zf+558WbMM&pH&P&L#kVuzckI!+Q zJPZhrC|_oUHSO*EnP{Obg$R?t`IJk+!EBJilz7qL2Qn_=fBDb%;_3V!@j#I=U_0+T zNW4p@I+PcpR6g-V8^dOj-wJsJm9-D*85&;2goncVdJc4ZaX{8uo-H^7_2qJ9gRN2&qPRS>m$#W46w1&}+jh_)E$OIA*f z0T@vtr-7=ac(FzYbYyLS$0E8~&Ywa%uX>+h($YM-;$hXgX|=Adb>F_`M$0Sq>Q21u7`0Q>Oc5351Ox;IuNAVSlih!xLb-`nuKaSYU3 z8)&LpDJkPuyb}t7td+qhqhez{fU2|sQIDwIi8QFPA@lL$gzuahXdHk{ZFf|rF;e7M z)m%)NE4RN`D*4Ut8q%t7qojQhGy8aQd=b%SMWzc8Dmd2AhM z&JPvVm<61`%(Z^`QroG?93m(Bg!|+b+$^`gYwj(*AV~xkeqaLNFpX_&w1!E-~l*qpX6h<8r6Y!4}kj;OS zfmQxh=pgQ;r>83$w&(oBOp2$7*le&*n820=PGBEcl4pvrJi3t?c z*0O={Ao4|IrByrVh)szNkQc3IsmKrNL;}QkJwpNl0(9NEgBy4T!Jo8@j6PPWbz~gG zoS=%0OUMyp7)A-&?$7idIomgca6_PfBu_=>JO9{y8~Apkq;_LV4*Oto=8PAr5J>}g zT{!-j%=pm0Z0XXV+{639hBHz*G3#$f&#q4h;6na!bTd?kcuJh92wv+j^EGPtW8MW0 zQW6wHTtHjwieSHDrGSba|61(DLO6t(wiO{?NQE#1Etj+RhCu$oD+gO5f-Q?rrzks+ z&gya`VXww;!u|V&%JVl12^He(X6xQAEW88A;*q&7+D5bdaOsOu_cD z?)#Unv&aG<32$kR11f^7WNTPQ9^zwZ1)SK{2mY8Sol%M;8j6aD(0oitng{eadlG8B zQ^7(GibY++=TO%{hD7!}vzG!?I|uCTGe2~-onOXB9XodH#!Sx>(A(=T47M#G(lXrS z{g{~fSOx$dtRoNB)t3Uup`|uooYA2_Lj?EbRpzufAUi#d(UsklG940`uWVB~dgaHsArUX`a3SX&4_LA7%Xe#GQ~3 z53mq4oEa{3?|OM2|AWEZs4>dj5T-m5f!>DguoJ?B0#9hG&l%JLlrdH=92y%dgIM=9 z+?W~~8uUO{N!#~w^6UR8Y*S>q|F1?<${fuQ1|{UVeZa9!AC7?P4xFN4@kuIGMtFi! zfJmqTm^O1g1`@~q{PfJl2fS|OwgXPo#6k@6xPtSc+|Cbb`MTlz2?3rrF!uCwA{=VymATLsOl#x-oG@kdU$yFf~T$KT%4B%55p2{5!xWKFGUOW!HLig z$)Iv_h{FA!q`?MYc&%NwOd;Q4Rd#LDY&Z|;YPo%juQzzZ6Z0#ZqS= zK1o5Bi`q1V1p#5^LWfIwBEeu|gLKF(M``GB4VC7QJa9ImUpR*Z0#O3?Gd$?C2m>vE zyhk9z;(aP!$6i!GuTdltY#HlRU77mVpQ-qxR3!@U0Bg zZHqMq#2{+pm_;HG5}p}vhAL6JTGzh5MnNS`Bu68 z*Ez+nur>&91j_2fFcGiQmAA?~;_-K45)uZ*Xw4yMLwGge z7?9wL86cPw8X9!{k|ltlB31}v7ozd7xK=PG+j&9)jM%#21|eREDbcGPe(E++3ZU?= z^uBQZyjqiE_YTyAASkb|0I*Mk#n;OUVm{xhEjWX~c*uE1C_#5W=(M8BO79gb8Qs4P z4(>U3PQ1qv*|OS5D55W%e!YhsH=`2+b4CS)tEhd8C^))lUdt<$bB@pK-Y(QiV7MNpp=8Sme5=KUpNt;jP$5;a@LNWdF&dyLay!(x0GGF+l zHrz&3+7mp5-|Uc<=0ww0e7Xey72?^Xnm+{BkR8IK0DGn}v{QnAErb>N@G((5eAu{n z!dVWTt((Rife7={C1|k*#%g%G3M}!q`#p^y#3)4_OwxX!AH`tdQJRpT2+nXrMjr%R zvvhle^a@}>d*GicZc@b+A${&jV9FnW#QGS<-%AAD4@_(P%ZebNxEB@m+!IV=(Tf*L z@RQhzYo(i@t1W`$8F52!(?i11YiOVUySn?Eh2)ry;017 zY#Ej>F9Q-Gjxkl$Y0URYc3dslA)jsZs;1GJ8H&v3S; z326q0gZM?&RP1SNWTc1M8Z*%3RdGt1-n@O=2ivQltZdQVsFbv8{GpgAPEr!Qp0>nq z)VeVkpwSH1Y(34~wPDobCB6@YtZxIJgBYG-Z_|_I&<02JNk~Y1LuH{4nR;T^d(;Dv zcUdI$s;>aR{cUvgFa9Ku?go(WMOdIYwbC0_4}~8;>X^3OP!%jj02eo_6H#mH1uzyP z5CrF*;M6;J?ugp3m32dea7W5wHPl$uo?Z$Y645K!y)6x55`tsmwWElxXejF%Ztd$M zx|>HX>MEWKF2TKzG^`+KL22VS^E2ml1Zjrm*eGL08&Asr&~tn|#x;M}|%ge40e+LVg_r9TU$Zn?XjW(&9pn%2EFa^D2) z!uN|y=KCkTE}FlcX_Kg;#mW^co-2EVZ!=?`1x+R(%5lRZvFdLho*+C3k-CDAK>$MJ zMCU?;P1Gi!IKZzo*rc*PprZH(Q2=NKD&e(*qSF2a%!+~k_PBXd=5Jb zY5${#4izEx$Z!ndjJOZ3DLHTcL?3{1|r=GIcI#D+*Gx zfS~}30qme5;s6vGL>~;K!pln!8rVVrygFy{(NJ(1 zYR`9u*o6M@t)iZ%R})4p%VkQ65X}s@!|+C&l8=RQva_fRM(qH)(cpxhlAi7cjGFWf0?n)Agi;L!)0R&m9h1(0HsSXbZWDiE?v4*487~6!`km1uA})&{F&itQ&Ve!{lV(0s+T}&O7iGB zA*zC6bqfz)1%Vi8$3yj6>TqueUIbo1Rh3m3LjS^fMbcHnX>uF-s^ zXF@86VrE>9Fx-H|%jEC&I_UmH)g^+Hl0>msAeHsTI7_7;T48@~&eamn(iZI|$o8%M z4!-;aar5Uuj0{XQg@qs4_Wxt=&;PMr*Y|N;t<`E(t3h)irAet26-8K0qJaierjV%& zNtBGM(X28h<4PKkDf3t|Bt?7`Pz4qq5pU>+$ zuX8w#<2*04B|;9V*klFiL?_VHLN8qUL998-s?RLe^L;aJ<~njN--zo3T)9NtIirhY zC*n;i;&i7G!m(Hr?j7g!Iz(s%=c1@!L3{BL5AgoGFimQ$`Z538w_o_33yZWea)jmz z>Mjiaok`VoFWe{O3OJ6k_su_``wXD^kvUNga%EeAwOyjiEi^oEY|vJco|6b<`1Je8 zXD}hr1rdeRHG|v|y;gTceXx|6B=e*MjAUXip5?T0ws#y3B56yN6O9%Lyd|+8bt*N9 zgWrXBm<;LHDT^%UJCJ|lai8xyg)F459QPyHBZ^|0)h_hewWuNe5K{VEXyeAGfK3F{ zPrirDkYp}+vDx`pxGxYE9zt$`(@er@+=#>j2CtxE^=v2LkOeAJ9`GRq5PS{6yHHOR zH4A!1c_~;$N*sPayqJqi2hQ}i*!c|**e@C)MFX_QkJ{jlXF{9eBqMVR(;^mQBIAW` zpC+89l@^eR6GtZ+JoII=gAdmLMXkuPmn1q$?jBnE;9`e|2TCr1}>22gKYM|eBi z9$j=#;M38S4`5pY0sBIL76bGk*^v;87jy`+?64)CH*PF73#M`rEve5D$f!x}Y9+PO z0$u&q-R=1(dnN}I1k->~ct0vRun4wnR1q@)pQ>aKP~Uk$9I4|})D#mSZ0Zt0wFs-7 zfr9_a&6|h4^W}qkzP|khq54{AXlS*umH3itGkM(se-zke98t`2h9AxYtD@YA+qyb; z#&o@V_bvtl1aYA~k4>g_pLf&8463N7BER)>AA>^q)Y73>xkk^v}d$l2!yG1RQ`SzDq>0_#C6O zuHkf{S^9uY6uMwaYnA^68r=BJ4H*M*2M{y*cFldehKoSx2V8VhHk<~O`=XPxY$?>`GYvhv0D`4ojCprH4lncW74sgzYVEC$2 zfe91>=ahO+g}4q(;y@-z2dUPkdpfNNLLl_mXP2~m`osZ;eXgxi;q$12T=^9s2ig!f zkq{rgc6DbDjg5BY5u1+`3>@!q??HuMk-LE%rI>-bW+K4%NavIDa4q*jyeLJJ6DB4a zVWVT1*^ozlE3ZV9&{MvX1Lf0;#%xw6g7sun{u40p*kRv_Y#=ka-ZAjgmm?$BIko9% zYeTmC3m9{b;Mu7!m9m94Z5mLUNuA>V@VQ>j@WEgAp2er`+rcvD=RNJw;F2_+k*;)i ze5Z)2AEpn?a0}FRH$6Rnfg?e~&~w1k2qUNmx-`VFm*2j9JCQ>T5rD4C45(QEmMjf8 z`|R1XnCLJQy)alfFtEQ;&a!38Zn+Ur9v+*bB5`ZY(0iS_k2~m{-#anjcJ?V5Ky(jGv5IwYtJx6OF z(pmIugx}mPquVo-axX0GGgR`sY-bhInaFzn9nh3 zsHf#R{x^mEHxXB%jzVJ^4NQ3XG8=9+obuB=SmI~eJVZm^uZJU$e)SV*+tq1oiBlhYJ{ci{mB3VT<~)&gdN zcs?f|5`5?{HP$zRP%YVb@>}(2(le#2h6NV&)34q54cI3i@(?sk1I#?BuV47iC!e+H zRt(Pknkg>-vaBPfhuPhNGri?f>n8k;R_Pimxw>bA<1F>ALM+{n0nLy15#ysahtt&RDbx!zwr^L zCSm3Tq8*+F{5AhkRj6{1jtJk1admS$eIe&xPI^|C6~CRTKAnSiQ@qpj1woD-u{0st5DCiFEA@n6dy>>jM2q+V!MSBmR zx+0qrI)n$mw-)1@nJYL<qU<4wX6%(W;I+k7uo>nQ|M$ANay^P=FLiuRCPPzk1~F0{yedJBHsX z_}_-0^hS0mJqdNm&(__zX_J-_`p9@E{v{PqB(3ILmCLcA(<_vn2fr5{M$fwe)$*lzw+fLa8wmVFJE3lIrXXy zDG@b}DGt*V+pY>K8Jg=LRj%mm>r*P(hj(p2Cn^ncsxwA@ilj*0X*qc7Bfj2p@r-Sx zA*NIR9XKBrq);?P<;A^^DSIwT*|(w`S?Q%@EhVLwZ)>B zSPCc_MIb~!>TPG*j1@tcpx&)WH0B~I3!_s)0n-5Q+yrg*KAC@~PqV`6Q0)wgOlK^n zbDNh(f^_!;o8vEL#q69pr+TxO*u(97;P8Q0%QmS>OHZaLOR}|Fw9tN$Y~DCS0MHxb z5)qp)s^C`+>K#Dzc64<;xl(n416ns{@JjqKZJNtsUXe$spc4R^>^B_ofLH_~FWHu? z4Q3V=y6f&H#>PGaJ{O)a^Wu&hgBy12;0fL9w$pC(L5BurzK8-gz9n6DW*&wI^mnpk zTWSjmrr^-Ywglg3_5n7BB;Pqp5N(jlmoDY7k zroTJz_?DEZj$u_gg;99D9D-T~SD3XHs-EmCvtDBSYBD=~+5^c>3?V&;XNy`TODL#!x10&q-eVUG+sB?)n06QY^ZPYtAQNdXxDXh zuIAWaIjcg)V?Cyxd0o2pr$AQHl?)}w>a=!R36&pZ0h;S#yuN z_CP7ubq9~x3`5c8fvIcj-bbld83hA#rxdcT^+~ZiykG+cS62wbOx!UITMT5`9Nf#I zQEl5-WA<#)g1M>d9BM4PUyI;n#rXKN--P^WhGJzwG9QQRUw%;z=b)G5hE~K4mCxSY zyO9HeBoOhNavEVW7OJ9n+tcXL!>oAc2SFFtnk0X|hB)Im=aA?!{0`&`Zvpn(w9lYP zUh+F%Hn+5l9Q=ht_SKiPQ`*fnTR1yEU;d--Knx+zsH-oXE0n%Y zt(aa`R<;QU+KMYZ=yQ@r9iwO~ARxfXnIm^#r!r#5_*hk;*{Ks9W}m#zOq33(xZO1M z#Z{6WL8^Oe)_Xj3DD>SCncm*sn@+yC;WbMUetdjf_ZtmNsgs%_#J<;F-UtT$o$?NWk&m+tqGyPHau96{Fs}B(;cZ%fqtCfPgAw`7?#DBq>ip;P5bs*miD4A}%y1VMyk@=1(!+jHRcy>2#t&F0lU(w%A7@W)=8x$s%xgdXFw)WXYLr998_m%!U+j>oX@<8& zEgTHIC9v|fYzy4B0@rV@d~raD-drknQ|UaZqhK^I;?)Are=JBfB?8Lp=pmUYS_+Da z4Kc^*_bSr*jk!3yM4K=bkb@&+q1Tpv`>dp2``^FzXY$QOG>Tu~#$1C=H&Yw5G-%)gW-JgGHlg0f@J{4&amz=n zWBV#>*a|ZI5oefzC|8SysI=ARXOj=vGK;wee?%iltTZgAZ zTfcs*s=7K4hRPa<;~HvCPGsJF_@XJR1D3K^f4Wy$dHE3EHV*zxo*m&%kIt8`D9j3~ z*fFYWls^&UAo0`M_EC^4?eyPqe7!ZJ7d|^r;QtfG)QKp*YFQ*Q=seV3aqW<`^`Qe1 z2W&f9YS<^SB4P@;qf-l0*Sa`b)5K?TimK{cEz1umdCz+*{dMZn2XNgi$mHdwXyJab ztu3SIvon~Ty;e}LSSy$QKwj-9Kf@mh_k95;0satPk!D4y^mPE7m;L=T@Lsmfdu{$- z&&J`rACX(U`;{;(dOFwElQmh;Ntxk761<3q{V;3%59j{;hpO<_s8}{n(Lzt97>0A+ zwirV=grh>Q*a~JTrP5aYWXMBD5Q1@!B90+0J#|7Q8adQ~XbA(sNP_j}X?-2|XrBFl zAAq4mL`I~uX0x;Jh@CegmKIp_?672 zf8+mc`ej8s(~L1mtBC$dDCR5^#+$ z$PnqgQE+R{7+5ZdndO;|jujxEunNF;UlrQXBg${5)W-HEH;32lg7alL@6t}+s@Ifb z=QR^BniW62mL0xsH3{-6oI@t|>1T2|-n1W6QX&`-#{(VCcY$xUj85i0BL$c0xZM4P zRs#K**F?sArtebHCnDgu?=*$S@eRU0VkX1V7eexLXs0sxTw{gbTbA<< z?8C=EnNiICn0RS&ZFMj=dTO@gC2koDN1;^Fz@cD=+xP5a# z@oEfa2Lxl!zuee$R+$;=pDRU)*$qFo_tDnD#yA(iWod&D#~$Y{&@{q(@s57xJ+f1o zLI>OaLT3GBxFNS6Yh-L+c_jhpwPKGX?2Psb7wH}jqj&$}^3&;vUrqbVPH!-7B1K_j z=Ju~4D;z8;4Ikv$C1D%}#fU8#aMc3Q=_TQ#_!dVLy!l8QDDQup zWk1HPK#7->>QC8`!;vmn)BP(kz@*=Xk6!0KiyezV?yxOaz+XJ9!=iS{`r3M5Xbs zJn(<~eQzF@>AhE1$|`5QzbU=M@70wp7r%c+Te_+HS~8^*tAARP6TY~TXFUQNu1|~) zAA2j!iyRQQ0?D!eqQaVqHvdk3Is7sPn}2WMP#U)BS!VB0O9h zz$eZnl38~&vNgtNlg(!Tyj;#l5tGGnZ6F>LJxZ~FtRsicqvSZWz3Cp3K8);wNaE-W z0ZOS~%>2okLTn$R*B{V3q6a2L?j>>e$d9sMr!tBI7k8eK#}p|H?0h1oeMfj8W~5((V@0>pvFFMuEs)`K4k% z{r%IAT8PxKC!SqGFv34E!Qu4#5eRV>qYmA-QyG_zYXjE!!e0IFKfiuPDX4F4j2y)3 zql<(ju2_9oDhfX2!t8?2>Zi?lq+w86Nkjd{ ze(_#K@2eZIdF%ft&3DL9LdYg<`XrYO+fA@}t4j=_Vf0_SuEnE__3}eJ^UiflT znvd`9f08^5BV?Cu#!R)Y_H%CG5Qql&*%&l>F~DxH2^9pA``ft798Zf|KJksqb;iJ^ z=3~?=H^!2qEemLf1UwAOU2Mb4m({i~7mmlP$-@?jfV#m??o;GRQd?1MEY=S&+Z$k( zhYIa5(sFiSm6IGf_wN0Bz1A#16BoYzeZB7Q>q~GMG1$s{;82L+oASArGb!fDJ>+o$ zB#{^W_}r)M7)GKRVAbGf*5IeVcXtlNBRWWmu`Re2lcI0z7=qfYtg(>}R$tbbEgl%D z1|B%nLt;BScZrvz>M(QPjQ`*#1zpjr*`=YwJSN$*PWz{%OOn(`*5+`y6^p2j0EO4D zGWtCt7DL$bz zGz|Wero=p-QOG!JsZC6nB!j2ktl=g(d*_84muWve&zC)3d75pc&-sIJEVO9g44|Yw zjgJcD9WlTM-`F*yfc;U+D9nf{Dk&|1_TL^@{Ei@)in_q!D^Ywu(x4zboP%~e5P7W7 ze!+});UJs)JfM~a&;eZh%R{>MXK5`RtiaT^#HzXme^Ynd(Gc1sJDy(eq4ZkHItHod z1vmw)D%T<jD4#miJowury|T=n zo-MUmZwIf{Zyq)E5z5d|*}dC#{8DzRufFql)hhX5@lw)TQ z(X!_9(|=a>Mf-JGvdl$>x*yLQsySn`!9|g$&qqVDbilvO>UQhSJ44=suE8yj#XCw3 zMv{GAx9D2gzpER4P-RkDZYmU|H!S|4bja-;t7fsWxkSf6=b@pqVq^A=zm>WlU2hvC zQoXOCO(dP?)!zFfCYiRWslAc*ij8Ih5~fErqr+IK)_t26>Dp%9d~FeDt)u_4Wy^p~ zb*Wj3q4DFiT7!`h){?+t!@NgG8XiWQj)<0Cuuikjx>-N%WJ=8rp0T*9L;Hh$4{B~$ z^OeO(RnzW-g!sU%nYH_@+TL|WM|RqYh}k84&%ED@`2baNk|w(LYEQD{ zs=v1dw+_aVL0Z<~DqlceloUN~9*gy`1x!%(Pc9kHSkdH%`SZaB+Mb<7TG4tI>!KtjHAIsjgwO_2cKNOf+v#w9f*tV&* zJg;YE3s?L3%ad$7SlYNp0^5*-Dio~`<-Zo-B|jb0;YYu(B+kIH{$RP5`;D!G^W z-c7j4V}B^CvUzavaQfRJpSW6Up`y(nOnO;=x|FL{j;7hGS-J_QnPr4=>03U@tf}g# zS*RcPHP}$R!gKJBSa503`PbE^y23N`wg>xjP43=NbaHX9>WFFrS3 zjA#!Pwy}%v6rHoExvH(@@l~_?WqrcNwi#_U8P|qt_Li%5J-elA>z9_5M# z(Z>DqBi8;UQAvA)J5&PCA8Z~P8Zbg#6cTt5Fd2%P=#1Xu5rc54&k!i!=Ia@EbwZ9j zAEzn+RJHRp7M;Ok1HlYbZ;Yagw|2BQi6x*pAdAtX0Q}o88lA50sqj(mdg=b~qgrs+ z37N~Q9V{hg`ZfIZ@Z6nV)8s()nU;G(Cm*rwkuwhd+nyCHa^Z~d(J}R&FyE+5^{Rty z!R=f&eUX{>XL@Zm@@@L7?_FDBd-AC2NfZuJ;0`StW@a^f+~)De^9k#kiwDcgG{ zlpn}n85p0x@}O;+g<4-oW@L0y;7q=-nzYvFq*Xmz9!4DO71OajIMi`5qqTXzzFbRH z&!~Q-NtK4FVT+0AjIB+<8qf0lzF72~$l?ktK3JT#V<8#90oDnXN}=BL%m(+<4c%t? zeVbVaJO@h(Iz>yrbuVXG1fi!g}}p z(}MjUf9zP7@3$k>!=zr+v&p7NP4l9m$NtK}(a~<#p0KL+qB6seNn4`?yGIV-Q1~<+ zWQ{KJIRCOy_1?B6#>utQ=Rd&*6gAc?9ut0t)ZE~lnhGiGWQLp)OC1IFbUsg z&>)P9`8?C!@c?fb!9Gw6ZYTob@Gvs+K$YH`NvBx@<)_cv?rJNVydrLRRqeaUX(cvC zZR57vj$NA>d9r8v>!u}d4F1kMX?w3-^UK4`9b>C%e;e|R%eXeuB4q#kTT##3*Pw+6q=zqj3a=R0~~v(?jerM;!x9y{*%otTM5dSohB)Yw{^ zbj;?o`{Mt=ems2eNQO)AX!fwK1zN$-ttGY$!sBrs!AFBD<9s1Txrf=Y8 zS$}PvXz9+eynJ0#xS3wT_h$F64#x33xRX9+40rT3*fKz-Ex1gyG$iw3^nS^r&CRu~ zqPIIQ?$=amwkj>LYaEnFo$kK>dD~X=;Kha?%j5IIrb|fPakXC4v*mo|X2}_OMXPID znoHj$`Lhlj(Y)y{y#AeUhuM9Zv6_XG{ewK3S_6|IRke z<2#Brb4$B?lh}UL_Crwd+uwQ;F2D_El|T8=@!nPVX53$R^*gLPT3G@!DkXad3QZ?+ zguDa8FNAU)sZV0~7>1{!-mV0zarW4n#9z1%|Ih*;QF=3*%D34BJHOdAGZ%_M9h4r6 zs?c%8hfBg{1rNS^aJ;%f<>=PD;NXwFeG3fwpB}yVNK$U(x5_VnI;l?*xhNmmX$eo0 zG%|>3qp8NHjcVeySY_nRYzL{ZHz`WJ)I^dca!7<aIINunL^%fisEiV279o=UtO`51AhY7R=HZ@yW> z#1Db;N(di=EC$&-pgS5B#7@`^4xliaYJ^S~Geq3{=_1v$(DHG%ax1`>P828Gqf(p0 zMLTombgOAq5j0YLuJmB%bKcR7xFqIA;%n$Mbug$OrKJ9slhffZ7de|TE z!7Wa_KraL!7{E`w^nb`SLxr(W_w8$u^Fa^KhV}@P`HSFU0Dwy*hd<9`LBVve>ON!w z4AqFR=*uLD%}E z>e&l4ZOqsRb;#UEB0K-xB2^en4j_v=za3?zWP$60+qHyVdHq&kv z;6rOPc}W=ilY$~^v?VL=d}enbP|07qXjL$Uz1p(M*zEaqksMTv!HKWU@_80OfbLXm z*ZagOY6b^KT=$FR9`|gvJ@w7}9+nK8eHyN6!%x^(2qi#mARV?IhXp-$;3{SG5l6ET z>ac^@psp`yn)K+vAlJ?a&5^a}ptzIx8WaGpui9A|wR3Ox0Fw0(SE@+F|Ff7L*Jf;} z)+1}A%@k{6@$Rr0ZXU^RlpGu27Z4~kzY1bE1K*HOw8T1Iuz$D^ql-D( z7|_5*4b7^}@>ed4#I)W$|Dc9m^Wg?x#DZ1G>&xuRxPVK{Nef`?|9pnnIxI~`j#6fq zH}JDvK>|jq;8k3LM_TpGYAnig#X8bG6JK*mb90hlL(s0%<*GMrXf(In$vk7~?x{ z#=gc#OKprw$E9SpQ8mTi<3VI4w?w7Y-8miQgl>fU#^x`UUC+T_nYwE8VmIhr#g%^N z;IM9(rrE!<8uUj&7djG`5CFqAo)e~unDmS3dD`Q!s>+j9Eu)}UH=#9S99BhZ+F50~ zuRLB5h{Bs|r=T1GeC~$!9|qlwty!cWK=TD0QGkDmO|2&6kAyX3ycaOcy)<#4B41*@ zlDdUL0PdALtfJyaOA}&TeSzh+2~Ali2f*CBLB2IVhL2XKA}ljgf*1!Z5cJ|t7}Rxj zz?q%ns4H|K4C*RnM##lhv~D%@dyMsaFotcKIiOd$6|c~lD`>YVD!I;Clqf3LbOc7b z0rnTcg{`yE<0)6wK)giVOMz(DXuF;W4BYW%KNp%e1R%}wUT5DR9P9*uCx(sFNa+E_ z?8M5#bTI8F7ww8bgjG})5uk@>+oZi4GZR;iKO12pJ7NGkCD;3?)=5ha9>yQ7GWI7N zu{^Uk;r2$Yow$livUpx%2AVzlYdY)}kon+fZ!-2rU64!eff#Z0hkr@g38>Bu1*0_f z<1Xxyk8iNIjf-wjIHN#|b|a8zTfkI+(J_MMMXEg@PRx^70ja{-muubvg=Is{SBarW z`wW-l4)_g=)aml@(Ic6v`?&0nMXZw}NVj4~(b2I6dR z?Poqbo+jNOi|k7T16?>FAOrgcWZDhNbKROOXka#>TLAz&{z<--ltOnP=T>#yHMY@Yo6pV2{4I<;- zON=%Sv_73<5`)AW#ClHm)w*>JlzQy|cSI*kO_ephe zg&SNjm6Zvw{OP0^1D`;EHJ^ly{JkwR*?#g7RH&-!j>q$Z7=*9~C^!z-A&G%6SR$Aa z%9&S;km1dC_@-~?Vfc=`WBvJNh3k`Y=*0ojL5tKFcwR#dDiAaH^v0CzYAa~vbn0@P zI%~&9Y7NWJgT~pA*OfwzU+rwcN;BMmys9v3xL%Nz-d!}F zjL6%7sF)2QDd1K7(-EuzObR=X;e8}>$)2+McM`eWP{LZZCV;@?ELT6T4u7^|9BAli zS-|W~_B26P@Wu65Diz=n2ZF@65rrPF6+x54fdVfiCqNYBq=&O0Y%51)Ifryep7kyp zQu*z`EYD-Ra(=u2QV~FcR)(k!zm7{=c|qSX9Ew9mAT3iM=%V~Bw{m!h+p! zo$p7`3=2|tdvApfBSA7CW?l9O)TKzLc+5zSiu)u26LD*wLV#`W2eaIHO3W4@kl?mT z1twP_hX|X*zM=OUiKAdH#>3DUT*j0tlgL#>rM0J?9H5X%j(z;x$yO2jg0Vqt_T}+Z zt)8aR)fA_%#JF#3xXq-NQft6vhTNk_YxnIkZw^U=NpV@Z70oPe(>5e8#fL1w{BdNA zDWu_|&8@@o$)F{2vx+8oVaN&(soD0FnSm{uAA+#UKOI1eyiMdcfXH^c-IRw z0;{ldIfo2%&$nnZ^i<0S{b0BE;)|0Yt9bw!)eP?23}<986P$=aIptQ2c_d|x80VQr zZtwqenMDp6LUKts%-eXBp9>P3cJp8twBhBDHlVQ?q;ov3YmxkD)fjj(1r6jhqm{iZ z4uKN*M4WPbUTrvoP|d=zN2nti1C!r|ECk&7W&>S|PA}X~aHp#=W`}Z9CP|8I#FB1A z*)q_N$SM{C^3q`^P;&NzrDkmj<$1CBOZ@3MPIP>uFy9G=epR8Ic<1f_%Syv>t9fw} z*&N2myn>nnqPj#lplHo3JQ{ewJItE!{|0v$&xPGHL#J~t*@K^cP(g(WF`!!6S46>r z<3a4ui@MEjn8lk<00|n}4XYSpkRAl%rMfBHx2i|sT5gQY(SF&5ibG?3HH`kPY{4`& zqTleWMTf(X2+Vbc@7rSvL0PAZ<}ja0YSVp$Q<0KHDEJ+VZQISskNC8{Iz(c4boQ^; zsc4=P+`n55!}!b)zZ+77kTHwxKsnp#Bxl2?GTLZA+SuP_YOgx}^);B88BUT|s2`Ao z{NF4WBLogHAeEp=T3cc=v1aJ^l&iXroTRfX+HMOuzR5i27J9Bpu(BL}+=xLN^$Po3O?%AYx5r*N1Nu|5ts3fCUw)6Ii@P1`9t6 zD%Rm{H%8y&!`&~G9Qsg9@-D+FqJuk4$9hdW@qS_$bwN3*DQdA!WF70sZCpUCFG<{n zY(*Qz@;@c~i3iZs=Yiw%tXGIoq@kMQ#p;b|xH#1W(ngF~hd2l26Vy%+b^9`is(}D( zW3C$oW~6Ssh~uWjQXBi@QS+JPsy|PJR2WuK8~aFfK@}3*2p;pHr{on*+y>bxlXD^>>;<^7B%+Yot8)bH2MYT?_GK=>e`=N_E`L!?DZq}3lA|oiJZjpY`V4040gu{`q-zdL+6_bZ zwix@9Rp6>G9?kyj1y>%!rR_6D3QB4M%7z^s>D~ZX!!$Lvg3pLv0?q?bpfr4xfOUw! zP&cu+C=iSC!Ufvb9Kw5&s?Y8p?uoYlKk=S{!>GH*$SlY{(s5lGKn=?ZEozD(Vf)?m zba7^79+Nhiu2Q*GkM6U4lw&`!?x{Gp;~cf{k}GcJYP6_%UH>8oz3!VMdP{YsKn7_vD>a`7aVN_7fH?hL(@jg=EM!P5U+1j_;QB6M!yQA4h2MIvs+xre$_C=aQmF2hU@!+C_qm3V``1Ak&X~v`su~H!i*Q2ar zya=p%d8za(t?NAVYX^$X*AexGdSe-qt77|+a!e(J(NJxhmT5mW$|O2ecp~v($}9r` z!c=HS3c6yno|BYkJRb3Hq5lT~0M4~I)f8%+KbDA#0vyARFncBsr2~h-9Q*#~yxKTd znAjBQYD|hFsy-uf;-z8%F6ouI z1#;>Q1WA$_Wt|`vn!Mu=75sWKKH6en1$wWYp^F1n5Emyzzk@mp1zDU-<>mN_`7@G1rq*CJu2D&p*P~oYt9gu+?+E(vHZBEQn@gG8@ zIBASzgQ-19@a|e#Le(mup6FaOecgqj&?7+^JBOeNx*6B79G$dcl#&AViPe;t$RXuq z{pqjCb&*esWPP#*%cAoLpBaCiZ-2vqDW{$@M7TntsL~f59_|b=8hu?O@y90`sc_1Q<}ZB0liN6OV$@m&(!p z_eq)aU^j)#o-Ip<=jbFggNg*g4GSB}ul6TF7(gU2Rs`e70cx7JxiK2UrJ)#DC_(3oVrzbm6zIBw|K6geL+_eiy#wx}@0I1}hLA^;3_kV*tql`xYU2x(1Tj|oM4#>gO zY&vrZwZvFCCFXIQbJ6h9)KAdaNh^fQO&D!8zGH7LLi6ZIWhT0Ma2<@r7!}Y!nm}2| ztPY%u0W&Wc{2~QnKB?WiF+nN!mzm;pm#_z_u9HS0aD`OZ4M&glWC3*2qCSaL2k&@w zy*asyN*Z#PbPC}c&11%_zrQU$;fge5G*$rp*`f}05v}n7(MP0L)s9%*JJ3?ZR{D? z6;wwBsrEnml6ETZy7q??jyB_sVyMrh_Xv=558jcl(gcI#p|7VTu?c%jC;{Vh5Visk zK1@*_Z!#>o#~`as5=%&aT8pSx=PVHA2p&C}gHn%NBOIl-002%7pelBSJ9#p`U|&9B zw{@OfnuNu{KWQF6@F#%J)JuxvFW#Ahu1WU(eE=XdLp8asX{ccngm0K}* z&J_aUjf?@aah@y+!!zCDmp_lM4_^;L)IzhqXgbU zX*i7(=^Q5>V2)r8nn7SeXbjOo?u%UnYXCovaf1u8@k&#nzdRvG{0Ebt!=oJ}V2uqb zCuR0LCbbD z$%&2(R|t%A!RysO3=e+^&y)l>@iq*3xIrY#3FAFiwFsb@6IhoEVL!-Hl-WW?oFTEJR$lta4MRyv$SnF`1U0I|XDI@JVL?(1XMXv&>9c8}&SIfLenpQ~ zJ`>ZautRUkqAKbcq^J=51Lv7HAA0(9&T=*LgTu{AS+ZP-;i%34oQXtb4mP8|Rf<3- zj>nW6HU%f<{HdQW4<7ZRT_{f>{Uduv%VWEUw$i1!hrM}(*w5j)6G>A8f2V0q#Z)+4D zQeHddG_a*dyqRlr601!7Z(d2LGD-8^h=#}$yZD->hD=wDIq|0&GCiV1zYN!Z2MsGk zNVF*0cy+GxSn``+@$N*tWpGFzRTHOq0*ad}H8I|U>>G2$cVk|Zz^r8)95KtpAl|RT z@5mU}!iYKpJ8T|Y;zUg1;4l&eROxl|N)Jli`QBF&YAmaAF|PB7jIw&_xiwnxSUA;H z^f=VI#sITe>YRIb9py5YZfCg)*P;8_1B8*+w(OU<1>BCJm09UQkzCZia|)>|lD4S= zedLAQKrCWn8Kuyt$&&kwXCgY1U!S^{=}e6IJP7cUhHXAt5!o1Iy}>zUeV1_Xz=6H9 z-_uE*y%)_!@hfjdaa7(wm7h=WD^TjffoVHn7Um29h<4-L`~D`!8-5pH0R?vo4M>yXu+iq=GUiE+dqR~gN$2|5?Duf% z^7xNNiM|xq6_J!3v-jsTnTz-FCwd1NktYPon}dLc2GnPwPbMG1pM@%Bts6N*o`u z;6n15{x%t9v9|c5PJDo{*B&gHCnfL%L^H!cQq;qs$sMr=(1}p@HPpi$^mi_BeT|PC z0$)+;yJ9MvIQg@F{GYQ6ahs7KGL5+aO9g0UIHpthPHqm5I8sGe)G&0X2~FkcAO50kYZKAt#`V16DetIf`FIMv3Eb4x$!|`0CTq8CyRh4agDlH%?Rm zL^5jX3dK?8x2u9=yeeqWBpQi<(GgZ=7ZUhGiI-cmq#e%6<8t&v)g`ms_ZtTzgCP`g z+;XLseT02uZBXMUJY*3YsJB9^4Zq5MjxBhOd0R2JEE&#~1sRDONdHwnAN2f<8J?DK zy|*9+g`}0Yk+Y77N?l7tvNAvlZ9i%qz@2e*(5$av)+HsQxAJr>X!!eyxQ}=~%>tOT z9R3oX!aE;Qj7qFvQm{jhnIb64ZgylIx zu=7SD^<0n2?bwo7q3yyviSGtC!cjn9-|Eh?>uLQUOoam>s(zu!iun&9_14CK2X_Ah zJveUmNzXpa_?0)@$oC5OWe=iFLH%7P()|56#{hIR@9sl*Z@7iXtOr1OpBno9Ne5QH z?g<&qhJGRQF3H2eywQU8!w6H)CZqk3_c=VsacQ8c9bBx?jXrdjEwXqWw|YP z7v)RXfzcBENTU^>IWiB#p9G#2CYk}*e-BSBs$Uo{=ye}{G%_spzbD`@oBEXrUZwL% z?e=pDjp7Pc5tS~ANsT(dan>OIBmj0~4m*5E(ZHS(Y}0IFpSj^5-bSgX2}y&BdSgTE zAPSMN#PevC#Gp=-ADdG`_!fK`1VoEz0>ZO=MFoXiAcj7&*^80opy2bNkXVr{lRDAh zPl8SvUX=n25_~V^hSK2Ca{(8dAJKZ}lX=1QGu%&=aS{m{%tu?k2U=FHp*}`xqyz2Z zwQVOHoeyr`Xml->MBA40MObMOD;o#^Tj=qV(8&iBrk*IAc+SV9AvNIjegIk&1LR%o|rWiZ|Z@iL|jaV!R1lHNwqKGqOq^^fw_xz zq<7ch^tl}rsfmBZNo$jjp6uDw?0jknF2Zf{k_#a}9!3#{o(sMwSe9wfOh<)3iMJLu zM-;%?DJ`cphmIQom|N+Je#OeIIOdb6p`5f$rr38LHU&b2(|O-jj%B?7hVN(<2|Qr*kH*(3X4&yH zOJi~5jR==CjY0y$_%C@xgO|oG2;;#p9ky=(+1X*wbCYg7he(VUGck1O{5J|dHHyuh zZ$_CFe6vnI&-r!Zj|8uvNgj}1RvED@(FsZU>LO9rA7k#^IJ5kL0K=%Bzn^H2tUMpA zmB2zqvB$wh1Hke~>Gvztu=Ky>iku0n8eCc5kk@c2Hsqx zw<2B*BP1a@A~^g<^wf#R0lAI52j&Axzyp&u9)4npd^qV@f#iYCU3;3TQQ`+Z#-`Tp zFPZmrJ57Q>uFX&&fD#XC`w7~7p2m112zt`rj-DzcfQ9HL;!vE1=z;Tr(Guk54B8|0 z)mucccnLX7oN5I-@~Dlt9wd`Lob@Nq6_NC(*gFz|0h*sGVFw53h6ESOm4LU%p`4d7 zs&qtnu8gt1u~AJXRDFLSS-*}ten?egGgMXU4uP2WrquT=f^#Wm06EWE*f1@~=Da(K3a4138H}b*&DlSTr>Pq$ty5Y1ot`niDO%!gS^IE5K z?r|Y?WYU_!Rq~mQ%~v(zgs{Pb2ByX$X8JIxYN8e#kvoZ39<@=uUP{1kEwkn}PL zaRHBobe0p)`TG8TdVa)-VEm8ZD0oo!3ToxRCgNz-yiq?f%r9w8Xna-d0eAxfc5y%4 z3-T&{%Z+wWXEgvQJIwnRFdKu_&u4VD!`;P%J$kbrteyydB1|8(shXs$q#ouU50_SBXr0NitqR7pa+ zSO=^5QS!VICyIo`t}rv|uP1%gC66)Md(7`TxKgVJhZ8A}5@Tkfc(&=4VCQ&<&>@ys z0>pZb5b#AuBJPT|w?tMTjKAd8YGEcBp^zYH-=S(8^C1$zQAWeaO4;UbOe_UPVQxvN z0r^K~aQsj@_#^;VqMpg4GRidb3D|qwCl<)A!mmiu)=q)QSL0R;2ggHx8KtMcHGu}F zi!Lq ze!#e`n9tiVp@LOxKWctz2y`nm>ufwt;CQKz+7fYI4#$mbUOC~E!n1tpo~QX4ve}qU zdVR$@4h~t_l6h|$C~hL$@e&9J5?x3?1;yfOVp65K&30qfocE^e_qrepHX)xS;CxOW z0#H6-)pR1&EkA^@S11|AmwsE^>g=f@(zJqoR~S!t9|qvMK2cI3(-XltV9U zg}T6og7&P$^s$#LW^^#j(n&WFCqR^79l}X@ zEGzzI76@$z)OtlI7~M((1O}b+W^eZ_U<&VsukhGg;ioK|Crz7wFLA;8<8xJw6BK@& zwM3eei3G1$y!$}W(A`@`=s(84+J!9UaJY}u?sx{W#yoJtFJUuR?Q%9Oznq|1n^YIKwg^1y31^dUKKn{+M=N>B5A=J(s7Xl1fOVA#dRNv9SJLLv~{|TnUP}P@&jd3)M!X)*E^&?0J!IIQu3Tck@ESy#kjOIdsAR*OV zAW`sXsMzQzQVH;8F{-2ES8)?e4%{9SRe=kCS=R?Ozz#atB+n~csNV7&*hB7~1?qFD zubf7j#2x^Tt6g$W2KNJGF^)#+7F3basO*nap@LG!u*|ISEb?id7FY=^2A#sg`5maz z@h=3u5&9F7aO2#hWA8{%P0jD5s{05jMUD{AEoMv$<+C`p)(E1uB;g_+kOBuy##AP8 z+9tcTP(+JOTPz-3-v3H#`B5$RZEsLwxk&XhD z)beW(qZkN<^*@=oG0a)iJ=ghKd8Lg}xlTb=Iy$`RrPXQylN zbi(8ulv=lvJBTb}bY_@vlZU+PaKnq^(G=22Pn=^*pgy@<9AA}&hp<^?O%lM7J|#b z6YO9vr@vF`FrQu*xIpSe0;=C|!yicJ_4=D;s{FkV--`-cTi#}Hfa?|;9C!tme!eq(P5qXzo% zzez3re;@Gw)&K_k|LoxZ=Nd>YAviG|OpEu7&=XWQpo1m3MC->HiZ=wVfS;w;p$e;w zMGZ}1n5M{i&v+n1pdMA>@S_W+E!YrwPV~p$8RIqr`tSR`!KLoSpVU!*05uYTBVI^d zgJI8QQ7(h1U5*%h_#m+y<7y~( z0k2l{@We5<^$=fRB;{P{P|RZ$OG3whG~9a%y9NPi?*_bv9%l*>CrIl~eeXxeWIg1X}$&>@Q^;$q6grK4L7x`}Hr&5CLO>Trh$pXKdm}5)*5QN2iL^KqV zE;|99k^~F=h{GQYG~u>s)-C#V*3Mcq{vlFTtJ#x-&x%s6S5-S|S(k}6sx{D=FCt{a zFVvYz2_2N^ZZzYuotMU$f%ARErVH>nMxty#D76Hvku8wlRqfWm$U|vPE?&TAeGsvE zhoyPu5du_Q3*?M+1%>mO!L0ZICRL^f`htMV1;)QH#U8vhg}Ym;x0jWhqiJI|W`=6} zfm|ScGGW1+X%Zp*I7|;@Z>>Q#?$Oo8)iPFv)D5r+p(|kQukdYCM?8+xuF_NIwg5_E zekv-E7t^0lBwPu|Nw-hRzg{#+IfG!t?;ua8GE1+=K&m5MV~IZ;d`v9TCi8>gEs7Fc>TCl^V;I*8!9XZlD_Ybc?`Qu|or?b^#Yu#ILPEkppCOc> zEX>|4e9V}gg$eg-9~wa@qYX{+p7I|tOUi_OltO;T9}+8esW2EUI|XJc`#g0Q zY*1TB^OB&$iU=_;hrS+J>Qwhh9JeDAq<&}t*w5jRJ-(WJMAqX~j+_~wzR&T4_)4V# z@>&M81P-I{%}N?cCC1bXCX2KGo`d7+c<^lbJlGP=@GGCXC_^D=trKPH=1LJH^A$LW zI+U3NM*|rm0oA2KX7D#YMn3tDEPHp24`V7E3rY-E%0oWs5LxjRUl)LZdW`&`r-Bxh zdLqL|F&A?8LAU6>9}gn?%${u+J)1EykY?x|&ZjQAcJSvFIK?!-d=qA}-klRgl4p90 zW}2+W{7h|paE8`5WDyDc`$2NdCD8qFKDAc1C}={(M>AksEKt?6f39{0GY&0e5jCKZ zE@i(uj07#Rt`8q|s>=TOy|5MRgLuP;5Wi?^(0?DFD|-`Z+9zNeEYBc}W{ zuDg#2K-Wb_RC&8*hy}!}*(GLDXjz+fAbqQ8j$fZv63uA>&>V zs?K1J;mWOWa)ds3)~`Agu#lu1yVzO={N9)4?6-XR=^B@bBep7F2&bQipz%d)-Wb2M z6txL`ru=V&4zxsPNto3Vx=Eq1`IP2Ar#=3+_B#4K2cZD~$2`(J7RN&KS0no1bf2EY zN3M{5mO72EMJU}@fY&@)1^C1*c=Y>&A9X%H!)S{)Ud142a**1h=*)yw-Oo}TAHEz` zRhW0!cTjbKLdz1!vj$I!h2Pz`LY_;+TiS_kK~Y_xrQHpYNZ)KYr)-s#l3~p3m!fU5{~p+#mPH5582jXKqIl z#YU_)Q9?I*oURc_cj@8_OyssHPv zgy=2>b+YNKXTO~)L>f0ZoKWlx)-ib{#oxwOD0L20u+6uY7uT*y9}zTEu-`l5V_Esg z^Iz8fzc2QVDV^)yP|cS)CarTuy2+F}IwDH%yf^KRWnyxR8YQ%hp)D55{L-S=NoBCB zK8#;Fc{IrW$obM7*Ljrotf9hA+g{nr;nr%m{B*`CRR!#v>~BuNNf}Qo%kuJ--Z_W3 zvz23n&4SMyv7+uXQOX^cvCj;>ADupqW6})!^^MQn^i7s- zJ(aa$jg`) ztKyEkD#agpxYA&K;3^ZRMm;ZjZ2#(1?ZuSwW?t%T#(DX@ik$|hq*~gkahDCgvFP7e zeG2^py78q2fmr319755x>s6pO*`Kr!_3(u6goK=mJs25b-morC`7}152hN|unTD+gx0oq#{h6Agp z(LKkHx;JyvBt9zacb&ry;IhhDeSfu=S6rIzQ;LA3m3RKZvoD)zL3!2b>+4xp`U{Vj zNn$NFCevwR9RQ-zZ#oVUen9iA}kS;OATGA&Jb{kg3SnGr(IGW-I? zZnFbyN1s#q%w(HZ(wr*aHfJpmi!sS=QR;f{Tg%+CGcMSAg_xCy!CGdj?LIES+T!cE z4Q96@*D$vz&)SrDwat2kIL)>~t7AwYi^8D!A9o!d5zX#h)3Zm~YYjWVsP33EkL-)a zmy3?Ca8Dom@SaQc_QBQVS!U;g`BH56Wkq*J)Hr1~bnglj8*Xt*U<`*m`I4-fiitj_gu*4IgXHJ#w^}hmI=2&Yv*Kc}lYd^f_x5WBqq4Qa` zdCq+%0WC^FW5Ftvbv~Yjk_HLa3gr8Tf*&wl7$a_cl^<$qyAO)4rS-5LYZ~Yd1*;5Y zZHt(^>e!Ls7frh};`d9ubLeE4zQg(vABqN5 zZ{66L!3;+2{Do&m7x!fRoUi4*+A`}?+BH3Yr}Y^LWBvyir|az5XJfu+a8`a*p-Hxh z-3o92EN<|QY1YRX^U_jOuuGYD{MS(V^X`lrob3MGl3*UIk4~>@B9F(?pMMCk%Tdpy zr`l>XwlrL5k!#=Qrs14b)#)Q?_-;vY4V7VkCi#e}d*uf{u~p&yCcipqeQAuwBdxUZ zp~5n1fO43%`4cddD#K3oC=|1M>B;tnYf=#KEe`~5__-P;2yxP;xc8;H^dew zDSpUh6%%@;St-XE&tMgexur(pQti?mR;yBMHAnDK8qPB5^tX8}O7F^)yu{4v4`@oc z+pzXED~&mY)>GT+48CP;I~BU(c1$XLyF&Gl)g_1PC^T8in*@OP)JyA` zTjD8q1I0cJm^jx8mQWw&QW#dh^l}HKsuIPN`b;!5rA*3wN{Xxcv43)$dsA$4oR%C+ zwk;JG{$;yQr`D-isaizydzRTx_Az$ZQRmz;)6wTC%2SvTk7;w9Mv~q*`LNw0OUCZq zp#{Y`_ZA2Bvx6VhJ=r`K{NNd-jCuhB*Y;JJ*YPSlk!p$9 zC$D~5h#RoxufU?Q9EGANqEHpGOW5s~S7Qe_tyMUSgQDyH4yOxl`J8`@bpNjq zu{lgUKC&q?BC6B*<3sKSh*hz*i?S4B`JySJ9}P6*v#OX|w4@aFNh!dDCtHf$%l&jm z%(ngv9qa3Vj;H7=6>nEg9d1@qb>H&m6tJbaTM8*wsyu$*ik13J@)$hVd4*3VM z(Y2)L#wp70vOceIA8t|FCo|(qQ1aMz=lZ2()HZ9eXB5$D31#;!S+dQYw0lmW-Thle zq;W-?Qf=*Jlu~TZq}rYgwJ#18)8b(!0-LyyIxXU)}1)n(c4Q+1d3bn?N? z)lu7+5x2&QZe>Pe^N;Or$ogDHJ$}l4Iopjddq16NyYDhPAmOXD*EM^~%B4;r-PKWcGn(Kb>FDZzWzPtMsBn~b2Mi~z=){O8oGg@ zR|nI$%krX(jGfVevzKo$BWJzX!yLWsUe;cirV{Gba>(|lwu4K%yvIlD@Q`)30?$tR zREue?3FtTO_+&a3K6Ye0^R@N(X6am3Qp+l(`6(-wD?5*fB~a2gdgp5;h_A}`kr~@y z(`WLrA?1FJpxyo>8s+g8w|mwE@R{xkEvuoJx#tWN%eb|3{yIP2*HTq3UMQN_{;s$} z=z79NZf4A9C!bxRt5+zimWUVYn=Gb(`86C;Uwpe~lP4q2I@p}&v+4N9{@uG1E9t<;mQTLZM{&zti8h1oXQd+VHx||1Z&oTDw#~KNLJQJk zIC%GougX{-9s0}BJ74=p_szwP+lKnn4TGD4wG=xawzyo+)o)%+OV4+$x07nf&@+|R zPSm~KnEqAL`%<0dIk(+`%G#X@RZlyST)QH#4Gp{6)Dw8?SpW25ZzD{1#@aI`dr7`6k_ha}xu1J%I{ zRW<5H@8(XHYr^L{sUCQKCwuQV?xS~(QcYIn$|gz~luFY^B-OuX1T1o>%sRRNZXDSNDWq}GourZB#ue{5@hMqO}Ea7u!4 z{zG$V_tFM#zM_utdt*hEP+OY+tqNK$uU)sWWt6ayH90$hWI!Eh3pjY-JrU#};#$`? z7FeKnli+JalbzIKF>`5bCqyMQ+d$~pV+!El)HLhQ+jQTrzht4>2C@KNr>>?9J+^+? z>+kjab!@v0pAU{+d<}%key+$JI(TjbG>ChIZbp~{8aranOf<%VQ`rMNNa4VH0{0T) z2qBMu1$U%i<}8jEQ(_Z9INy8Z!tqH&?68o)=YmuwFj{*%6r{cy5rme=vVr`hdij>O z4ES>|F}~vY;)M1Ts|Hy@(fRm&@2@r&r zycY#x6{Kg{5v520E{zD%08M(x9#6MNuLqGnnelWg$2Z4`%%kx7$%*^k8k-?`8!7sW z%U}rI1}LTgqTn;vf6zs6?dBd>PWarU92N`&j!egsADM+gQXokr>^%c+=bt1WZ>VxB zCPsO2V!N2YN}$OlE~K{){QS)mm@AR{xCoX?n z5cDY7u+86w?YCv(79Z*Pb~i%9ZoitE+7bdh089!to)AyOiWz}wzWHt=7U~|M#G?`z zkW>UtAz^}Vz6(zIQS6tnFE)mg&#)jS;n+{q&CYv}5+kt70^MdJE=FXk0B_X)>{kYt zdjVzi_X*4bC>9|%h}BPatOH_#j+com0=4|OM!(`u4ffd?`?1k>D_Wpj=hEeecsG`lF&8@J?&+d8@sLTu5kiO z@&Zuk_aFn6ok_pWx#A;wZ%6^%Qqz^$LL+2@_vjJ}+I|VZ{|Cqu1!+K<=L13g$u85& zeLiR6IyY-IjX2`X9uUk7iWs^oRfNiajKc2gkHNHh39{9P7zX3jXR1)i+(UX|`(tK- zhfjbe3qQ`xiMLVFw6j|bXLo9v7(iyy0WD{oh;JngXQN;_2@Z{-Gv*Yoh!3q^S}Mc% zHvv+;zCv5bvA6JFb<68fF|18JF>Xm$p5Tq2ivoqbQi#hCC3S4HzUcWF<6Qon==>!( zU~4VuP;MkG4&nghbx4)vzu#c1BmOi1a0-nRHUpW0N%+Q#2NThhP4rwgDtO+@$jFmb zyx3m8Eg`)*@QM+D)Fyy<9(x4XoGbtHnw=mlNW$2;Figm}gqQ+7Y^{%tgb_z%@lc7^ zRcC|Iu7IM%ay!tv1VR1eNY)nWn1BfOYFAV!W0d3B6T%5_Q8O{l^2T;!G;-U>Ch;Z6x{|_;F=YWGAKl zr=E5Ay%AhN!Hi>*1gigOv2xJnco3ZNJF;CwUJm!S+_jdd+YmbY*Ze6QCB4|0dxL!4 zw$ZK3Chz96#;`0e%&$Dr79k~)tWbzJ$`US?H+|aVUfvtjlqWQ?Wgcmq40(@|*tgmJ ztzS11(lce`;0%-Mk6%*^@h(E4QC!L{$eYISw0o}wrgdcS`WFZ;`{=lfX?f+;L z_*=%499nvgYYdL(H|(D}hil*<*U|^KRvt7vcyO=4(ydQZ4xKdpP^t6ilh*O!DL+&F zp8nkTJYn5o3*$xa6F)6~v+jh-ltob+X(*Wy<@KC{`2mYm&U+ub4R;7w=t z47o9iw81>7mq-tmph}=Xqyw{?v11^+i-n1R z)SEztDb7wIJK@IxNUv~IT=^lzNq&&LX5arIyO!2Lia#C@trHLw{L(#2RtG&~8^4h? zxyDJ>w0(Pmh1a`YU&x}o^3%S6S<0kfY2MNK>noVqi+Wyv+>36g6}xXvTf#`$yhuO2PB;bF4U%zj@JI?E<(Iaw0 zi7d$}VJIm!Jo5898z_5e;5=d6-13$p^Z0NoYGUvt{SR$^4+Yh)=rTTnjzh&a6hJ{O{acGn?hl)@8I>AN4DMW z-JQ`Zt+Kt6#!~Vxbn;QUIO(kH+;O$QHrICj*FRm~iOReg8?>3KUz}R-j;z_rhf5DY<%eunc$+w|Je}u-IUe~^xTg!En{o9^hRL%Gu z)xheuh@uvL8qJo`tz?UvjMQw-ZDXq+&-L$F=Rs4hVD=g5*~uLB%{8wt$jo(5+NtVV z9iob2OJ7fv4C~vF|A(O_8(BAMWGWLn)*d~vPd(6vvCLgKhL_&bK=l}}pE)r2uG2<5 zjz%9J$;-{9=+`hhSphBy*<7ce;97vWlwBD~n6I)iz$sP1Wzd%P}^VR$-ntny;*Vg*~YM;mA1EKTcL_*@sxq z!Aja&IXz15m|L*m+EJ$WhnDKT?s%^jC9i=YX4V-ec2=;}sqq9midHPsF752^)a-dGS=IYsrb?yC{4OL=mmQlZ~qxg@FW%pa@Jz6X*YY?NRJKtYc zP~mVL|M2PCVeld`Gm_?TkxfQkTzhyqH$4tfQ0lK+_`KkYu+RPlsn>5LToyXp?Jw*R zb7FRD&bpGAVESOw1NG)t90f@--iD6?%C%mG#TLcOn5=lD@oP}Oo^5yV(x%Vs(W&h# z{FycA#~HC{I?LFq&0mE>n+l{^9~!0An~zgXD1})DSrsNLO;)_nPR^hBvz?pC z$a9hG0y}SoOxN-j=iH&z74C^6Ro9*CtA8+<-)(ZIe-oBv*q#(`>9!MNOJ94E^Vmx& zCm~bY^(tbHpTYTG%>($or53`U(3<_$=c&_+Hrx-Do)YoxPPAAYG}Gpy^TwY5BeG?M){ z``VsHCRc?uU%A`McWI5!sfAZ~9SRuRY^A)C8Z83yogO*$*#>wGI1b#^cD2JKypGS3DORnT|BtQZiAbM(hOniN z#UFbqeA#26_^|$@THG`VV^*8k!@?B>qse&%4;XpD)Uo}NnJQ~!>z=@=a3~&-Q~z92 zWh2CSUw!Pgg6lHQ>w1il=&ZuL$UEZ(+6Esk>oG7>zL)ju71xVbG6|g5iv6T?bDr|x zd8gA`Y!J+0X;#>1$S&-nXZJESo?|n${8lx%_L0ZS zZ4RD>cSn!Y?=-J9`d!b$Mh-SNdYVh+Nq4FD9ZCBblGK0Qs@7WF%xlo?c2^p+?7BU} zzvLs2cBnxGd6Gtsq0~g{{CxI`1nbCixsP3PuvLT`J#}40);h!`W~RCgWOa41f4TG=|I<9tb(RUE0)yT3wjAmS_6^@|G z{I<@&lzZZylqK6{V8NZLv_>1(B3k$P+-tVA)@FWd8(-f4$YVgOssG4hMVG-<)KUzd z+?UcQjNXu$%CM}WVU=FxaEx^guX1ql$`@M3`YLQP9it4mB+0ZJ!_8gqnQi*+8Yy#qh5Twply00=Y53D6qP2)m?WzIof&7A zty$o26LaDMQ_WTWpsRe-zGq4OiY&9YzPheLP5jH(r6=f0W_otR%HG2}OETvYx5GVtdL7%A zJIK1_yhLK-fp2)m?9=t3?P6V9-&jZf%wdkYIK?aL%Nj_I4EI(G>yK11B{hOid}ygJ zv~%e6=g1G~)ycXq)l4nCv!usddO)(v(X+cgpC^|(jmM<$;Kf>N?gbk~y3`*H8r4Qu z(?aQW7p%Oztn+`HD(`LdWUx=)w9ePf9YQB?HzoHYj~FiwoW9$@-9$#0Gp}#IMs}BM zzLAf0B#zub=GsQPzPT6I>tJaUNJK~FMD-b-p3`{%%L zZ;M1p47F~oE1>_b31ya4*Q4A;ztf(@(Ccu(`qK-~*ACB-`gcApS0uld>}Vee!%i3f zD9Phn-7Y4>c%QL(axdN5pU-I6+IaDJT#(`I5Cm{h%VWlF{1flY7JcU`8(QU5?s$Ng zM1L?*hcp~sC#@y1-YwnGpi7fq#s0iyvt#YTw3p)GoT0wsR@&r`?)_b0;@=_DaF~R*|GdU7h}TY4>d0Qhacjp^P>#uq>q>9c=1b!* z7EZ?b18(0ZzKq|(bbf!Ux{1p?Kl9--Tc2xZtxzPKD=WX!~Mt8VO}5UKLmJr*fQaIo`O)X)?R>SjTO>hiRh{x)YL|{AQ<)I|;^AuZj(#MQYO#gnLO8m;1m$mlJxOxi#1bIzQ zbg}z~&! zNdubh;ef(pfIuBqR6HKE$D;hDkwQhZ8eip?3CLo4-j55MyIv03aIDLlz4y=Fo4v++ zUXa!EW0R?J_F52^_x5Ad=4qM!ay36EKuHlq_G^27;ipd}=z=i}%(1MK+`M`7_3XG@ zPhOcAXoZ<0d#=u~iw!;&NfVhfXU@5XoNVxTqoG~O5Bw#3&-)}$cp1R!MU@&}7+zhx zB;aGOmZfC^A)KmLUAs0N*?$Z`u*1iWX&yd&XO);`oMybvw%xlmfW>H{zj0~|6Toc- zg#MJu44j&l5$GstK-1(&-)dJJK`uF7sgWI0UV&fM_5R5QU93CQAX@$Use5TY~t_QEi5hv5}G@w}3PZ=Z^bE5|YD!4wp0$qV0cX;)_tLAf-R zGS=gT78A|5AYy(GP$zxJx4$^`ThUE5wA96aO!d@Pc;dOe5+TP3gFG+(c1~@r9>#0! z!}OqxJ9qEC(;r8k?;9G*>oCQ9czD2{7Yy@xNKH{Wd{XDI1A1j9T8oeglIWvoNwO8Un_{A?~Q|EIO~$$0O~iLK@in zp_g%>PaM**tI*S_*WqZq@`i|r$S%j&r+pc!}%f_9k>oe`mqjg*9LJ@600K#|+%ex)THLCAgLf8v3xO3MF0mv5!p# zY3?qQ$KMc3P?0&1s$Qfv7!w!PZ*y$ z69q0insF6wnQ%$mT)5WLZV6Wyfb>|P%P3wqR^1P2z>?yHc;Ck>3<8Z}mid87S!Kp<<+k zrZGY0S!3hjWxr=!(}k$$?o)3#u)7q9Y?X0+aPXSWx=gXdf+Wg0{Rx2(Vv!{7f1PIg zg=yX-YFX!*E^GSxK57xSve4ZT6ao1oHT08kNE z+hde!Rwj?@pac3>V*dK|>p&LIn7GT(3ydDPIP}{PRtuf1N6GdJoRVFwhrJ=bz~vyPwbr%oVK0-ZRHmk?x(oE~2VS-qYSIQl=;QD=WjU^wFv!3F-9nkh9F=JJyaaOz-& zLw5p-!b2MMLC~mRpo*Fb;lCwVnM5au*NC7w;t(0hxp?v7E3%d7RKkL7cZEiRAN#Sl z_fwQd&C$P-(fai6RLPIfMMgEQZX-|6B=B0@KyEpo&>*kgN9pQ~-eF&ts0Zb?2{ufF819zv-#4bCGay7z~ z!{LFe?AN-FzBFcmx}Rqw3u%Bdl*yh$lrc+(gn$YC0@g+ox-jZ%nm!SbY%nHb-5|N0L__VpLCm?(si2Z?XFqyJ9 zLR#>@1)n~t$jjfDId5ectjSHgmKPYArUPXv5v7J##&;oDzPu+aHZN~qVV`NoK+_jg z|NBu@--;zN$tfx#XXP*kT~!cS2X&Qt@^N502#AT%P4wZ75%Lg`K@tI>3ry=I)N-e+ zE%QVz(E?2fN{D&`MMOn0p-H8)HeK}lt9t0S&_%w->>D3r zzq64R$SK~oe2On+Z0H%brx10DRsxl%LEN7x1jmw;xr&n9nt$;oI#9Fa8dXTSY+7B=Fy1yW{HyE<}({v`N&nr>a~r%mSF z9phnGv6!0^9t1-JG+_jzT`ZPe+^k4$S2uHmpp;aN$TuSFa}H&trMhQ2QKv0O?6~YT z3j@*;V0drF=cvE~%OeAr8`A+<0dh!&eZ^n~QGfEvK@=V4+2mZ**LM+nk}urZzPmmi z@=YI1a)`j<_qwd9DXt^-u24t!bE%EC7?1~3c1=$5)fwC{@S z4T_D84UDlkb(oCRy7l^98FUDe;nE(opUAY2hui=Ubr?GE(YV!X7vtvQE=bIch2rsJ zJvD=aglE$}WcRD7$qa4lvdoEn|9Z`W)V)hC3D4r#aF64_#0_y=3nSP9rNd{UCgyQr z65F_CpqZ(sSPG?4m8R?<%Z$$=T&EHIWaDruov@?I@ zj=eaysx~&S;7?L64O+(QrK?s|RYf#GDl!hL?-en20}hwgEVx<2z~2guGMjzFkz5sy z=AgAsj@W-Z&DikpP4E%Q=!cK)bA6`yir25@_dz(?*cm3~S@~jfrX zhh1ty%4i8po^>m&)IZA9D98^@{6rVv9a3oqY_A* z2MIHWOEjbuNNuB{4#CezBFgeyBeCJ#Z>uY0+RehK`9JJ2ZSEXmd6MPVjDV=~6@(_i zX^v#+FtyAKZHD?~kWg`tO-?RP9))(oMF_uT&R9vhAy-LWjDlF@Q-m@#=IKxhH^o_k zG<)p!;-Vag4<$m4A*eP)Gyj~*?} zW$3^m5fP$m<)JW+5aW+47@09+Mh2K${cx2G=-FACfoHR0ewufI!%P-N#H=j~6Ht_N z?u(*-&oCtc6Wv1~dQ=*es#yHIdfU~xBZJ*y@lv9qY7!C>5pBv8_C;Th*tr-sS&2~~ zQ6NMeK}s0yt&jW<2FMI?SVaGb4O)*|v3nx1+0=*i6ZNx6jD)kW^*hUREhV9-s3@$a zF&Y^Z#xVuuX&Qu)FpucG0z*mg_Ivy|iUI*s67z;SHFK9qf{6IEudk1P$^jA?1I)0{ zv4kLG1}GC*5rALXL3OKypGWV(H?lhm@jiKa?^5b#k1U8&cDCQ zCCX{_Nq}1@^KR?HtX}4^y{W+7J6Z;DdSmT+f1*)l zIYnz3(%K3zsvfj2&>*#RP^=K-?aA#_hvK0O#!To>OVFFoEB$TPg=5~t&f=Nyce}%q zL<}kcn}Uv!EY)I-giW~Fc${wPF<3}h#tR>$CJe0L>owDO7Tm;%JEKiQbJOcL1| z<_V6yS#!?q`VV+_1`Sv#(S}Mj1?`5p0MCXhIQ6sSE^5T{8J6 ze2=^ivQ)t`CX)%HGrrrjWG|#3uMlX7|6+JO4)GV z__4d>DL^ZPn7ak(Fc-Pi-;zv}%iM|z3RIEa8T315KmmcB;ra3bo)>~x;A6*jn(s$q zKU#m=0vpD!t-k9jhEnWKJUj8;y?JFJ@|)VQ36i%P#A*t{3zUd>RUFf?|88zLjw^av z?)_T|M~*~rhI!MG8a0S`6t!KHN5Xy*!zbOE%uB*98#^m1D*6sW$3?2}q?OAIPxV94 zGlP&&%kD327tf(n+pVJU^SQx<6X!60a0~<(R_%=KrC>Tr<}&T!tw3mrXoG#WOe`!Y z=z}6}Q1jqm{ey5szJch^{37N?H=4ELu!*Nrsp7(kvD3ddBi<6LQ#0Csj5X=Ct zAe;AZStbpXMIZv6FBn_63XVn=krR}~9!H3s|E7Ww41L-(Xz+z2tP6;6hlG?fHk>cO z2X(29U$oxrJ$CHaBb~*8P+-HgTEL43VU||{6e~1=3Jqks#Ou5q|HA9;Akv#66_C2l z3$3%-egR@uB*hp*}Ig+uK`RPp@Nl zJkSYD$9`goI5n)A+YC1E#nEy2o@eFWZ&u!TLVO0?9yusTQxYDuC+#nJJpaHqEb0Uo zKm!hiUEu%V!Kr{Qi^10zQ&lJeOXxl-`C9n$^o(>#46n+wadF?gX;aj{Am_kcW>5VD zb%6ocw^zmoK}>rtmAys@DIMolcW6p0l}UHcFc|y@r3s640iWC}*pCrA(?ou`WfJZDdACAHOLh2kKS80J$QaZY z79|?kjDlt8B#%dMlWX5Di5!KXoYa1cs#lxC_BbPi@Y)4)_oD0P&lhi}#sK3O@)dr3 zYF%tybl$dajYF$1EWZVuUKPj@=cVkI-_0FFmC61!BLj>tbqa87)q85U@W$O-*U-l+ zhss3>u<`RszL&)Rj+#4?AQvlsTuL8{-rta3<5$*p z??hb!(!VSGbR^U}Qp26M2?=Q;#~&>=(9}E|*v!2NsA51^&9Ym0d3jN5`1D5SN6g)F z%4c%V5g~q&2u9_kEel+yCSo3;xvf!b*RB zxckhl2vrmYvR-xMCFNe7M^M!f965uB$->6B1FEO+QlpEwaOCb~eSoK1q35dxt9;a9 zY0});cYX<}!8r0o<825qcl?}^1`da3)yu}W2o0xZ$exv=D{6FKhmwm#rZRqU( zJ@W1M{mHfav323DXh-gM5ga>P{_RTFf8Nm{^Hx1u>kH6Dqi-XZVbR{-BnT5i*Sjoh zWeiWE3n{%Ud8V)snM8pI4a2K6^z_y|U#`)QD-Vh|iK*Dm2>Fwl0U0c;pVg#&U*eEplcPU9BNv+eTLRa?i(Ou(TVi0&tzj%I#0M{OA z-(PZg{Kw+D{Fd{^OQ~ioQtW|IER@6^L^175An)N4-#YjEhrE#_o|f4DI%XX1cr+%z z?c$A#3mw(!rq$;eBA|3@fkTe%?d?5UmU*%5=K!eT)fpfV3o@Ag6(;jcNN%`J=)~yA zl&FUtRA2fb`E5ay=iSo3^&~YF}6OZ`)`vb0U0q0QcQR&CXR%LW#?3uuJ zUL_@w_$92}D;%m3tt4%OzS>ddnX_m4fx|*N=pIQW@LD;CK1t?0N!FB92u@LrXj@3jtcFG0WKhq-Ik#!T`Mnw~=`IC?>}Qkj;zrgLx)LY_YOOn@Yx#14){q|$V zYu$uaOnjvkF|F4Gm?0ov^uode>otnK3nmQgw02We1 zLq+O#n4Sm_+ZB#Qs=-T^0$GT{&~-C}rx<hsNLa5Trly_Fk;I+E55Z8G^{c5XFQe27(uC#kAg(xJFF+ji+DwjtPs$u^r9Q zeFe16p)eBh_BKA0jxp%t0e)6rpI#@fx$t$xuF3S{zb9Rs%L&AieGC4JG=86WG((X{ zKu9jPU-lSm_%5g3Gi7xF$3mr}G0lQvIOq4BYlxr)Pz`8sAJ3*`A^+7$w|Ne|Np3$Q zK73o_%*e}8|;X23yk_1R#+CCil z-B$I|g>FZW9xX@b&yt7sjih+i1_`kesB?y+!zCKQh@{(S8o=jBgimi@okwKW4_&Rw zU2Hg(atIfn36#aVSbTdSN3MkAix?26jCL=Tvu7g!pILGbi6hBGU^FEAcGV4X3}1iz zPerHw3u=^w2_OFt*PV+OIMU0b-F2C%2;la~9z%+RGZ9aWm^2RHD+D(HtO$(&Vwx(j z4Ho5|H9$XnEecRR7o0tDDb;=y%}`8^VKrxRV0&0omo zhHWqFgFB2tpao_B6f^vJz!X}hQ+Nhn6$+IRF?f%H@}(0yYd1-3MLKepa@?AFmC(G% zi%WX{E4GXIWbqMye~+=I0l=EJGixurn{Oypj~M<;K#mjhEL@B5`Fze)g1SI0FEKZUDaD(%9 zfFA3*8CSoPkr3g%sFIcAZCgbf;BYH2!{C8uQw$tG!dUM$9p0zzOgy|MILk=Ra?=I& ze<)acf3NY5|4#vF2Lk|78IU-ovY_4O%P*+h_r*Aw!-$eSjYCnRyWZ*ePz43kYJv1VNZvtUFAvvbK+pP4xh!t?9evDk0;TlKkS&3P_cg$EQRoel>FY7B5%7ogP zUGIqF4a1>Y839e=QI!Hj4gw>Ym!5zjw&Xb+K6>Wa@F8a){&pBCJyT-JqqT3}zHgpdf0IjP z)KUZ;_Vr>&ikKO`ht#)ViuWfZ!eX#6D&`sS00_s*&;!%%P(?r+RQu*`eUty=LMz7C z-5(=(7rGcpDoJ+%D8NE6xt$R}7Hs#qUxP0fu9fX@s5d3eDp(a_>}P1mN=Rs98xS&u z1`N`U)CHmBnFGw%Ktn}rgE5ZOuG8;lLz2y%)6~VhJ}bT6nIVX*ZCC5547B>0A{+(` z=c>|VzG?y&TP$VrUrP(4ox_Tzlc$q*!x$q>!e zGK1b1qAg#u?>fW~^tV4J(+f~1HfkC{SLj#NEwU!^Qa6+e0(moGQJvAswcD;;z0inn z^X4|pT{;fDW14tUt2s`&zH{4VSKxNe(Dmy_p<}DCFzwY*=%-zTlWs*!*PkA=I!^#q zIy^2ltOUxGR&dWs`~m{8-ol$V9|VD@61;_SSfDUZQoDu&KSAstCOf&35R1UY#sPr; z&?}=rmK`Sgiln8|1W}@9=e^!H0#^?$@xUgVcbQ*MP~XaUJb zeG>;eYa$Ul3EvQWm5YHG2Jn5(O$@lPj~{-j9jO1bZHBX*pA(AXJ+$09itL@u&C2)^L z%pQyCR#bQ9JLj)-U%GT@D_$mx{d|L^q?|*Y>Y;)sWr_%`8Xn?Uu=o?)`?6)rqT%Oa zfI@)#hUdAOh@XeA?VezlN=fLC%X3HG1a1Q{0?KImD4{3+QPM)3i@Ye-#!j?-7d3(wLNxvEruxIvJSes}Ru#Zdel8B7U zh&R`)007pWXQMYUkVQ@c7!>?w0?fKOmJKyB*}_eT6M(+7S0zQxT(Bkzsq)t3^*6SB z*q1JJOV)rAarWbn3tQK(mjj<6MlI5lm2uvDA$IdD1 z0}u)%VB7ZrO@~Rk-0W-rSHfp-6Cu%Exd4<`DMS*tAFqmLzP-jrjvT=y3SmDT!RR{> z+yXJ0=sA3kd{oMdA*x=F6_W)?xQ96qflJ;PO3{UpP*0 z5EfP@@Hs$Yqq?QItqnJS?P)ltK;SSH!O66G;+hph#$ZEQE?1 zoP#l;5_Rt@Jc;KXAZ#|p2#1%JGr5)kX(0y{nyX6~5nw%Yt^D8l(H@T_{|D0BTV7&w zLu6eg$(27v*b4OTks-5gC*E2@VNIO-v3qua0rtL_tW{53_uiD#1oqqBK5b|XhsVMj zh7;xC?z~cjTqn^#3!kHgG417Ha?=R^3oY@Oms^Sx&`uZ9M9$exPo!dY#G;?v>w>`t z!k{gaM6(-)AOeBZ^||e042sK$Q;Fqrtqg+2MB{rqxIFV5d@kQggW0Y%q>u zkg6P~b4=GEbu{C8?55nQIj5{(XXr@V<4}0jX|Bm}&78~0 z`K+9Oog_(0NQDwIz`3CQ?Jvy2Nxh>QOS)aFWWFEXKuV|L&3{U#lgIMFn!klPkK6e0 zn6NNp#3(55w)u6aB4*jQ#X0}*-Hs*~r6NG^XumFljH4A@^e?lN=e<+0x3^!aJL7;d z>=$p06@8@;=Y)L&pE?0$V(1jv%OUOFL^kKkyY3(P=8BJ;UHIw5pxpfvc# zXiy-|Ay~mbwA!vn+)K7NwDwC8Q`>=aLXs@x*yRWvV-Wh?>_73)27}h3@OCf={$`&B|7<_6sNTo}+$4AeIGdW#d57 z#b8|+cSZanE*ZG+(gE7RxP(hDNHG;|VH<_guuB>AWHQ@r>+6C{48GULC#m2|V1t!E zJ;;e7u@g=xGyR!i*w0Q$`NLp9<&3e+u``rW{Ujcj>;?GvSRg=#GRpxT|1kM?`rG8g zcSmQzy87w614+L0CJ1QnCy^cN5DXwT;~W-;MuI;fR*>p4pb>(Qp`;gCFm6Fl2r(We zBdDkt3S^XMMk?1NtsNb8VI!`l-&rQ0uA|fL7YW3u9hoKuF+4#M;2htAIE#*7iCAL> zG9kARt&xGwid=VhB6zTVMj%Fu9wa9dX_y8M{pLD^>liayf*1)uk{IT-L=g1bmq*2> z>V;>xkGsM3AHl57+f6B`9UVsyKuFFfA?iGZ=UtGgfjHN>@p!DEgs>1T9f6Jf~$ZUx`#Dr)1;JiIj4Ytm)Z2~i$*jxxvU zGHJl{y)3uCKHCP-1S;0v^kh82HpodZ0|&s4h(>ho9S$G+g-S8No0lO!&ThsO@#k*+ zu?B5mBM~C;(VE7EnFjePtWkmuB3|#_-3dl|DF*y$3_Am`50tQtfC`2vVFwz+n(aoV zfmA$O7ChAKsBhumqOF-bs=cTBe`&_icsxTy0}`?i8l(}>+B z{M;neNgm@ZlZ#AN-XQ!I5SwDbhBlmY?QS?JNJWgSzNLmUh%MBP6%w7Vdcl`UCDlXl z8VCb$6{a((yuDD-pE11=Ub+&VbRh!h;ge?(4*O0y10uf%k^d;!X=GRTF8alb5)m2A zOwP`RTmy__dE9nF`v)vnAS4V3=iIr^6a<`g-;FeGGFC1?f=5zq134c+*5!b19I`=C zLf8B&67iP2M7VfMnsDB#-f{gKzvPdR48A(-_~t&F{()n!dWeLrJk@l0&u44ihNSvb8`DShP{z6p;3M_GwqJWl@-qLb@<=vKir41||o) z&Kz(gfYptKYZJgr+*xu~{sZuX6zuClX(Ek4b0)DgyZ{47zDL0EjT(R-uzX{XfXq}+ z$k6vc1rhiIcq^;DV7vj*?gMGC6@43Oz&Q+zPm^^*L;zR(qG*Zdsmi@TVh+NN^-E}G zfNW7gcyFKwO}`k|eu3xR4_Iy%l&F6V3fV0Kb1W81qzvZz{MHkISPq2#U%TCA8?Kk( zP}c&ziJzZeM9B*K3)XVQhvhR&umqT0e4plmL_V>%-bYzB8naLN&szk@x@KUjbci-! zT}Z`YMs}Jj9`-6o$Ez4`w19p$#_!!TV*wrdZp-Hj^I3;kPb|pi>^oP8bs<|8n9z`K zuB3tgUgFl3F$h5A?dnj%dB;v;6|IP6@LIx#=3pBOG9A0pR+y0l2tcyBiVBzdFie3d zLNT|dcp>^ubwVD2xcgOJWw+5D1z=uZ6SK&@D+Afi{;#cZH+92qXZug%SR~J}fkPck zTcN@}ILrglo1dVRmSZq1eKi4{DNBA^%8x+;%M;@ew(A#fWPDy0K)Qd`t|2YbDzId06 zA6+IGoH$OxC7GcA#R999k3QH$=sOP_41>T#rG>#sIXRAa**C0*Dx1Ma7wg0q|YZHg7AO_LZ_*^v#Z;8PF$?-n8tnk(|(UD zByzGeXcv2V4=Bj$pFq_-`J{i$vBv5CfL-2?Eo__heTML-P6rIk`sk3uBm1@fFBgCr zQgE@p*Dv+GS!9T2Mkgl8Sp?fvYLgLdzLtb$(RhCzdaCI;d&a&m99x$IeH41cOBMHiE%#;Qx7`C5QEw|ck?ZE#G(eJhAyDWG zIx)reHl_l(_5`;`^9_zPdP)iDv)i@uyV>>K8Z+;@_WDrokqJqFRJ1Z&SG5~&8?BL0e_?^q? z{qLd12#->_dZJkAu}-L)$Lt+4G8Z}IhgQFH8$h-0ly``7`X`;9vGDo*ROo(1w3gBR zHsfZluQI+J05Fsyjki+Bd|MG%R-tGoL=}Vb*fIPLVCr{cCtk?*=SzdOX7n8M6)OKNZS0;s}4BkRGhN9HPcRMhJO5I=CsmmoYkVdY^vcFYIRzI_c1px9=>=)%`bGH zc~k~_n+M#ZD1;j25w2TYl(DwLBy^gxQ#4*9+-p$EJ6-G!HK{QWlTtz6As(Xc;}j_G z?O0lT$w$w)5yw#xY(;cB8{LsT2Kz+=+i#9dVbobj3ntNmGjaK<3oU|O4h}O4Vh9l{ zg-aqL@Vy=4B484NKBM5kH`R-Qz zRNJw;=BF1-`o0EkHCX6?PjE$L;Xv*VNix&MM3wnx;WHLzG<$N&{AH?l(jXX91lDXrIprHoS-r-{I<8OVy=nMf>?6 z1`ce7Bh-gO30Z2ctW*86f-zaw0z%(b6_-c#qM%OKhHS^D*OuLtLyg_ZhRn2FmGSs| znMw-yb4Ap|GqFmf@t3VTJ(M+b>Gj#&fs&IDS5Z<_LR~N;jQG<#4e4`c__ zq{!Uz>%AklXR&<`wCzSaSIL6JVfkJ=r)4`OX{)==B_`=7&JkICUT$3yTe|&u*vVY{j+4e~=yyY{7#Wynu=G)+c%EDIaXQT(=Ls7a|HPGUJg} z#rayY&9Ucoxb`D|GES(U{Jn6T`qkoFDQD?Z5`+g9C459v`gwvs+3E zhRK(;1LsFU3~w)>maI+$o9#3ALyK8K%j*QO4UE{W4%}M8*m}YWbl}JJNFXx?jl@c@ zDFh;LPp(0f&Jjt*XnY9(#d{H%9x~toBB*+dpaarAkkRIK62MX~iMEZ-`CjCb9-CS{ zSN+LL(NYUI+b`SGcUJ}5-QHs00KsE6IOESyIh2A7I}Gw;y-9#N1IXrpROrOlH;XIQ z?^*7BOWOZ7f{Ix{UNh`|cGxR>ciEoNw;z`sNPAN|XPYCcE29j25P+hPf@SCGgIjvi-{;g)iiv>0<^YKfqmKhmk z^u%lPCz>dJSs~qzeR3xbyTk&J^39Us?fKF9t(0WM$laf`%DXhxKZfk@4jE7JjZo}M zweWxb=uL9 z7TM2LHw+*&o7-j0&`oFEAi8z424I=jK%Dbu2z|~Ln>P$h;mz@xBGtNZ9Wb+d2I5)&JKn0rspypo+JIOCbEG%0 zE8-5aDaUjOOr0?~+_;?3e?s&%C7VawVYL^(#lZb?8_ZLy5@j=8-xxy1^QAwQqSqFZ z7lOccA$RE2`jjGQmMf`S-CsSgGBw3EV{TV9V&FDQI|9lqx^t*!LH_dD;rj9Jr(jt3YYM7xms%bb#_wbf+xyG4WH4?PQK7`~%`uws)s_Yz%I3;6bpp zJKwGSRPiVrrq@+o1Zau7zT^`}&9Z{ctfAC528x5BzekT>-Z3b%Ri3UIuFhmBI_BU$AMT~Ia>8U$mV z(dLT+u9ln~RF+fQ@x)rWQX#!=Lf!grkonKV$I(PCOD?GWRVq%eb03RLzCatiLpe z3Mph)Ai46v76Z^t*2)GzKnEY*d?mQA`U)Q;Tz4%1o7{bXnw_5CY}mfOQTt|4NDIJp z)EdXwpdDZBE`W?@9kSzJ_`>O41C^}{34mMkf(v#;01wnvizI>n@g7IqJ^@^sdXE4i zoR`uRES-u~X6FW52O~SPQK9hTcH|tM`O7&hesU=Otx_J&W=Tb)Rkp*S1k89<99L4E@qjK(7*u8nC$*uwMm$Q?hsfqqgaBFC`;A3_YoefM2GG4~c+T-s01rfv zxo^mKp?-u)0x%X4V6Njg=#T{ph3v{$zxqN!+~ShgIhfbG5G?5 zGEEe_h-BtL21z)Ifl2Twdcd!f@K{!|GnA00eGeF)h0mZtr1evSlGu3_n^s-^{R9Hy z_WCU8{0TOtO$iyUZ>%DWD-u=`_?-l4c(JgB<YyymyzS?Soq~!7u%J zgqdN&vL*XVpx1V(Kp)`fDFobr8eS4{qj>&^b4n+&$+g4b=5*i>Q9FycNd%_`E8sCY z7TbX&59`TOhu!xQA%3NRi8R4XeU#1Yq9VB;5AYV1L+^)hhA+pRn0nPrug@(_0ZVhK6cXHfb~F8U zGKFN}`zHq23RBwrg0dY*;EGf$rt8SlM$`RvGfqI;!qKM$;Mf5ai1Wh`Q}9N#@b2xv zT1Oev627A>&jN7KZ@hUxJ+c-K4$dkbiT@381ZbRTj(u9?st>H~)X#GIoT^4$M0ihV z#>Sxa_jb?ylzd2huj~t1u>x?q?rs1c^5!8KLWh01sB+QPzFmm|F~RvzhH zSkQltT_E-@#TH3QJ4O-ZH6dkmw?ASJB%Kph0I-qrg=m*|$xN6%jG_di1bzX+m25b# z86nQr1RYWb;Pn}2s=!@f9#$hg*e)Eg7ZBI=EJqNIynFO6z@)jw1kzhcG#f)^#x$bdVv7po}qHl7Ga=;aE610yuaR~a3iAc-MrwsQ6mzN zKtOj&Si2d;%1%`*>_wu25pW!#onS2@0xK15R{-3=i{=dBNbg84DIDQavK^0%6&A-< ztU&Z%t%GSPd49Q2=$yJwp1NN>>i#lTXq})#Wxik(>52#g9k^OMjg;pcfX@0Ja^}Xw z1V6>VVwj&UPj_9548&mql6liyJz>8i~P&5 zE?t19MBHa9Jr`6K5GC?PO}nsBNb`NH;q2$3F-gGyPA{MFJy~5#j|r z7)$sIo*}}o{ygAZ7)Siltz)@^K!tw?DQbxxbL@h;ys6~zXg_3cEEqxJc`vw6`U%#A z7?TB)m2)z&;F+y0^GWNcyW%fYqKzWAKp0m#KbpMah|~%+hO++13?{C%bYS<`YiGSz z-n=ctp<01Z9#{{GoW!e*h(nd~teBI>dpTAE#ZP{%!t zv@HaDf`E5L7JN9E#ng$BZN0gCd&WcPuhX6zFy+cl$$u zmk0&kA8iQwHUW=_-S&VDGA}=uezB321I3Ytodb|s^}f!nKkND3;^3!Okg5jqI>|C% zP^>-AB3`dY{$89{!`{g5ywdV&|2*Z+U zV|CrHLS)+IL$QUjVp^kc9k5|L;$O=QgMH3XcpV zDbfd&kFb$)M-TT~!Lq!!x(1Pm!KbXkPaQ2Zk zaDb}0b)S?NRXi*3kd)V_6<&ZRsEjS2NP#n}*o_7Zn-{n{qAMJMHmGW`*~l*-))Ug` ziAF<8ES45~DT3foc~=w3l#wU}iGaso{|dqv$^<_3CBQLK6;}1tlE&_BtoKw!j1~jS zE}9$7g^R&guwP*v#7 z0hTDFs*u7P1(!SaT%t_gmX%>%3D}0i+A*^}V62Yh7Il$nH)+sVEU8)X7$kbUkX$#9 z#{}9M{uSH}ry|}cd1e&ZAdP%Z7*KD0$?Ce0UrPJPCJOihy*56lnmxKsPU>rBe7evi z?ljC#n5G9#<(6qEjrflHICl3lp4f-mySQ_|@W-yygpkr^rn_t?pQwST`8$^(U1}I5 z`Hn1q@Cecl@|$-GF)|P;T8)5T9lI{4yU`y|Sc|CdilD=XsQ<1-LJ&tN7-f6+S0i~i z1-uIe5rsHV#=3Y~OmE`Ebdx^cRG4xy1T&0m#JeB`4yXxPydt~hoo-aWk1R3--vOT> z3XvAl0a|2l`j%<49)dAvU7CRjQfG7?Atw5kK?7j7N8yi~g;2Go!1shfZd3`0uQ+7f zo`?kv!%n|O8b{@FgMqwO7eqJN03{|uGs*$p>^>4IZJvL&CGK>YNjx&ah-Ziu zCa)Oiu+w>bq%#NLWmlvKvtpO*Bfp9{3-3+)=W5S-pRp3>Scya2y(%K1Y#rr0;=BUI zi!I=G?6=*1CZwUce8_w%K2;dVGI6)pUd~4RkuF%(1m8WMFM$^vgV5aPIW-RqLM1qf zUD=cZo5(Flfw`;mbIgb2VJ9TJ__X54yXWqRRrUhwvV%6ezmWaUNiFg+6((=@><}wy zeq0+gj$Y=hT52*&;A;PME1h`21VTO74VNEoHGMl{DQpW)n#?ky+NUO`S11l1EfZ20 zUjCYblsb@nc=ma6G?KG7sy<>>?c(Fsy|WdNd&+mdbfeujBYx?xG5eo>3HjB-kBr|- zpFuv7_wS=I<^e_no`zFZt6$^RAhGEy{wjpH5hN27T(5SKb*Miw2>94q%RijR@jruL zn_nN0LjK_g@aH3Of|GUM`X>( zzZMNX@dLlO{30Gcdud&dFT#V8dWI(lchlk7Y1(-DySQ)iCgHu8wcXR*DgI=%IK2Pt zW3u~JBK6*j^LnKIDG@3Naep_)|UP;6=Aj!!Dkdp9hK9=@<*(+fj+kWS!9Pq!u^LB70 zo3b%YIFFz-oqm4tH~p+9z6(SypX`@g=gik<6M6cqy;}@cx;!}?xrK@I%H^RC`r?ik zn4DJaED$6dTB- zYE;?M^c|DS(*BOo=|2^NJygVYdi4OGGuaZ0Z)EP^?A@qe#3fBcUZ7eP7Y$&Tl_hP| zc>S{DC~cx-L%sK?YqI~Io7fhEw1)aiP3VsL&)9PD{_^kG-G$N`mz!Sc!d;pP|99`d;d~an$Yc@H1wj34vKu%zSgnS(I+hU-XD12$n;QvokuFJ6;Lk z=kWrwzO;AOU)2{(7k5oya$EbKby$83`tlryPj_*?Hehog5pAZkKHIK;;6NVL>SgN{ zs#R0HH&rLe|4e7NVu$4m#>07}f_;Q1sZ8mQnyQ1^W+CO>A5Ys|`pSTnp5+A)#81BJ zz;*^oxoF#7sLr*ZZ{=r9Pl+nA>*=B{&L6F1xgDIq`hxM`LN`*u^Rm_i=XSpgoRq?j z%=gszs&8Ed^uhI1`}RiAEE5s2xXdXrG++xlj)IA*y^Wipzld8{y)`PLhuKe8$q_ZK zMm;!v{Ctk+-r`PPBZK~!m#vaGh}(cS%!*GOzp>e9+^V?jZTN=RXY`r=yP`hFNLdb7 zPVV9Ne7HPB@n!Ew^0W+H<}*`CNxt!0&5&BiPx&m;G|G>*<1MOW)bTO*wF^mn^hH2v zBv+{l-F`K_k*~NWT&WNx*MY9?JcKQ{!H&Ps-&KQlCf0kN-m~nC$`W}JTBH3lKNTyb z)S0DQh6&wM1<6vAmrlHG(K+JiGAa> zgz-$?v#)n~UEqd%_?=bs36zrwa=z=%w^J9>%;vkXq>1e7)f0yk8J{Mnl_$zC&alN> ziM3;?KSK61v1-Rd{Tpkf%cIGw?Awn`Aj>OqxHX9W^@8Fy=xo|cHPYwx-mYhkoS&I9 zHVtB^MInSQ{P|x;OV~+U*lTS2WF*r#3%|NhRYFs<`PBM|Zs`L3x8Xfr`8CSRX)yK+>BWS#Qwif>4f-rM`#LniK$RnNx}M|hVq0H!^P4`oD%Ph7+f;^%sSUWxPo-hf;7@vI zN*C~m?~@zQ&)<J(kk}#xVUkcfSa78 z6IS@8JR`62rgtdEUVf@)LI+lDh<)ARhVnT3`dw~q($5cEqb%!UR{Klx>Y9`j^$!)O zHzwmfLeD(%Nwr>sB3Grx_SI;aOI_m`W#XN=Mg^QTXaa4(lTWhZq}wue8-w>erb+_&;)3r_3r?tbiyA*K zS)NwlnORWrtT1wOo$gVRRBA}VZMWdWh0X00Hxs$s4T=QJ=8yziYzZg)xOOGwAeP-$ z-mpbsKqtyrg&fBup#+S~ErUCJ2XV%2R=+odG$iR62WiO};IsnPTzORtMpcWMH<}kc<>cTKAjXfDN*FDy!!M;LfJ%)sPckl=X^bl3{oVv=_M+p zzsu&qu9qn0u;w_+GLl?>^_3g8rn<;V*`Utr63@s$;MRrWR)#g2F3O z4mPSL0OpR?*Z7K|(Y;u42X!CI5`;^X28VQv4YY_A-pW$WC$uCY)J_o;q~xsXACk~~ zO^a4!03JQ)oTYCB+SaY;yDZ5Y)8Ve=QD_O+!SK2_#%Zj03=h6^D-n(aERrulG8God zb}yXXqEwV&W&|AeVX9wiyo0tMe_M4;QDDNpeyK`tW7q1UHuPfghC=dcX6FaSqOJT? zUf4~_WFbKgEXC=G_#Ceg&(I6Jy5+368unAHX#(LJEB*=h-c6R)1V+y4*=DTCMGL%T zC|~zjb>iD?Ix<#e6VjO;Gv=yu zEcY@!{AFH5=ujk`pHS2G@Oy8r=CFpB^1z)+2kQ>WOzG^A)Xs8|9d+5m);pNp1FX6xx|*8MA#41o^`KU z20w%}aM2A7UZDvD{39;!NXZzPW$u! zInzecSV%Q}h499|Cfesw4arQ(Z@F_<#R9?+S(O)to4napox4Cjv%*s`ad?BPk);IOYww~W;*W;?CaU*>76ZA^Pjv6 z(GpQoopT%cnNkIkKMT&u{(BiBFgF4)3h4A%y@JCr|5%3z$`w1m1J`S*;HLv6~H1n_Uv1N~Z=lZ^)HtsV=`r)>|pfp>mhrf!hg zW?Rg6kLfUcmU!@q3Hmm+rPWyVW0O49?A>Du^UL-QgVCw36D1u6g0Y-Yrn|3|r$4RiK) z)Ld@LBH7gtsVh3_3bUw2^%|kRonL4w;;^Y^pN50y=CplS(_Hb$Ffx!T2*F*LW9%z2 z=yZ5%3VtsST4m$f_16)O;6ANw~O zK7C;8t8Z$(w_UzWJ$G-GivCvo@an@CQml7@gk}x1i7Y#y!7eCRK#g@LxeQ3Gk_}k% z%=+%=#leXSxGn{&mxKpiX;=)JQ5aQ5?`L*zDO+1pX!^v~zl+Ah;EH@jH=A~aeIpd} zQp7p0vhd?G4H#Ye4EFI@p+WGxamOwN(h>LI`=1kM$UEqTxn*`S_jKlUk{?iv1E}Q_ z4#|^^1733L15d=-b_XViP7h9F2ZKpYB{1^d=28fB(+{A`L zQ%+GNIrcZWuH9Aj4NdWKhCemKb30q>>{9e7)B{5?##$>*W3PKTe-G6CKHYJ#Qn-H) zR3%Famt$?#hpXBM$}t$%uD_|he5X9kKT5si?Y8#v)$%lqGLvKL-93MQH(rOfl_~nL zSag>T&D}&H)>c_R5pz&QwyZ90`cZQ1w*)(?k3nOdIa+s6X-hRmd0mX59rk%VJ_D0{ zGTPQg &;sHi#3Tt4=0Fe%0`E9n(U#R^hIg;evCjE8F)JIl)gv2l~FRXF$HxUDW_ zH@p*T4(=%%p%yUgTEcVsyDT5XSUrtd-56#YTr?TEx>^f|t(-iC#kNn3^SwjnOd`cF z9JaB1y06JFgeKl;N8>DA3nGUfw|q`?D0;-o+ON}*6!9*@dO@_$StY7`a={d(zQX%X zPmsDy#I3af&y#y3ROz2qhU?60S`U-CbZ`6Qso0*i#6&vvF3pLU_$9&4i5E6q*t8?H za(zG}owK^}m5WLX#n=mzB@OU_*rhIwD7n*{H-82OMvb)(gFue9fURsk8qQc%HANm>G{x9C6yr?m#r?X~g=(HM7Haf&i166svy~wCjyx65 zj{(d(spsm)`nw70jkVetJ^PdC>mwR#srf1^js;hVHyRd;dIi(m1(*&i%~lRtql~^d z_jqvkji|EuR-EbNZc;!~jnd0|9;o~@a;)uu?qX`I)mvC}2l$LZ;`GEsw4q(_4eu)5 z!y~EYXO$tVH&t$7$*`bOYKzu86RL$)i5*moo$(HD^roxC&P>1y#^T?PX2shcDw;^N z%eQVERG}s(C#dyD+B`PLys&U8dej_(D(mGqHP*Z%V6=`1s9f%W@}3g@tAgIlZDq+@ z$_}lAc~Y(SlFKb>kX<|~ui$cIbFAU%2%UD!y`V1IqZ-iOv<;@L4~Q(EeiU2N7_Q@j z!^w}v))5n5>JetVWneMakkAcx8jFrn?$^-<9eyJHMdz}!* zN>Yg(_i}jWm8M-5>Fu(UK5?HoufuM%P;F4#dTl@kk+SQ)PP@0HLl-T^aLM*q2fhBz zliB?HK3z1YSLOWZaGe|8mQ#gMXxxDa9jE?C!>rc{M`Q!M`%mqmFL}+5yZn)zZ(DHTQL{4RktDj4jJd zyu0pT1ZUd{n*>FJBerK4dHo4lY>(gr*6aoFDI(?g)uq)bY(3ZOUUKmf3n<1`Z)Qv6 zMjucALgO8EuH5%zS&zg8noLt#m@9lK~KQz0& zGim~y-=B$^$f#DxeUKfJP%D_IcitP^Jx<$EG1MP&g5zuQx;WvcTkOeNlk7x^8iibu zTrd8fssQ+p2#vQk%i3+X4{MkjFYO323z5vghDm2-+O?!;)N^-$W6-~-DzmyK-K$)& zqsF2-v|%d2g}Rj2vgI@HFvVgt5HCx-xsKsevcrLYp>chx9V<5E5dTaJZ~t%sMrv3J z?Z&e5PRQ`et@6vjbV(|cKh_@D6i>Q~5mW04Xi9fG;<%0>(X4zVL00K~AmJ@{l*9)2 zca4}B(}H#uWVfKHRO`tC>Wd$mdrtHB>|kHuC@ZG@7}zLUnxvt`9>b_uULOuRgsYY& zY$0F!{NqQ5Q&5rl{f|Z+PHB6eSd)BcyVJ?W!5^E^+0VX3hdi@e+G6|Uo=)7n$ySE^ zsM86~JycI@MErq!?A@c@Qk2kqn`O%E)X^MlDXwn_mqb=DWw>wtS7S}yQvSD^Gm)Qy zvf8&AW9q}B>JeQt5!1mW8{5^ic~)|X_s{++m3FNEG20zed+}4kygSjkeU(rRvEueW zgIE97huV$+0g(LgZ$N83??t+Nm%r8_85kew)y+NtZ|<9G)B&28YvN`F7uMJ# zeK^6}h=1?=l(rMQgJKF!|7!gh{$uN>w0)uK%2|b_drJRtG{4~0_muBnybNhe`|~Vd8G(GwDb8tcEyyv^kuv|W@SPy#~e$w=jRFy@F;#($+F_O zf$0_VhKF~BvY__9NOv)mfLx`VebgUrDcT$5ud?v+p>0C_%FFW~Be{zH2(q>p_Ls=b z-pk#&YB74!D|2Uf9KN7IbXK{ZY4D-siL|M7V;*Sdv zz!N-cZB{-kA$J@`mVDb^Zc@Azseq-@`?U`8iC1-A3wMRnKV30`?|GW?pL>RzX8+Rv z?;T80q~+Q&b7xj^ruLD#c;I~L^jZDW!x~5i#H2&y*v1*4v0_c~rAk=s3J-F)BYi0}2UM*_NkTG&J0 z%p#c+_{#Zmj%y2iXno;oh2$kNCWh5oQJ5(IlYXU@3=NJfw7;UJ2beZr>zm4@Uu0m` z*~pf(D-@xWle^D%at`v0qPWL-0c*_(cg|6DI2d-ys(C}Xyx~Z$+hg*`*v(OmXxeG% zv@K-ACUgc>ET#301GYq8V_0Kz9<%-kw^31JoRyuLDAt7L5=Uny79`=uFfrpHA2fMJ z*{Iyu{zc!M{jj;TLk3#0QfaD*kvVY|g!Fp!B)eQrsg!(=tM|+|#GPPJq90?~!5v8< zzabg3l#}(27=^|RXvAmTz?iJV^6jV;FZn2T)=@BD}{xu}Y>n8vDoW-Jm-Cv5{f?YK>n-JayaKd=CH7B{t5?L%zc z+I>`%dwdW$t=jAAYGX(9^+`#%hTVdvCCr2pc3Azj!K0CbBRwIi-$G8{L(V4*$Q}#` z*TBzAQv8#${3LeP;?!wU&LtyPud1`(@5gQ)7ZuGC`JzV9+#{1$S@nFc1xj8 z!?gaPmH>bOuDDQ^)yvQ+he<{A5U(Ls$$Y!99h=}oT*D3>`hs!`wiR8>hXzgwv5FPHt3E)R=%TsIpG0`Csp;7jUOB1(y(8m*q5gt$vahP-zDBLbnH;U*8L3d0mdmNrHEs+a$WIXEZ}XYD z!c0u1*f(KUrHsCPo}kDx>Mpb)lb{{g0-Y&tEsF`A|17ISu9PO3CY_dpGUOW3SY8SI zR15r#$yRf`mS_S)eQm)D6+$~xMUfgwSzQyvL+A82>ec_oH6oX0cydLDHTpTy4EFHx zUCTx^$}a4#XDl+})$K2rYyRd;E|lhQYx@FJ`4>VKOlT=4e4shkXsvlho{&zNJ&HQ?r9 z6`HKMwrEm=&RCSl^MyuO$js8A+)TWf%tDDCbOtxbt5mv%eZ8FHx`(`!AlD2XL7Yij z(hiHB$oD14rr#U)uYeP`y4%XLA?Ru1KeLg%2M&Xqsx4evUUiO_cvy&H=>B1(o&tC*P`;o~uf&o>c zeR^QL+v@iRp$$DDBKY&wTEYDZ6R|!|YS_~^9XP(#Tg~z9(-+R{jc#>5q;+JXp70J8 zHRMfKp4gsFYMhL%3wUPj-7nSBFSX)f4LUzG`G$9W0im2*Yp}TA+B?CGaA!+X{V=>Z zxo@5IvxKPqT1PgLIXC3pX-M0UC-Vp@0^Y{s{R!u5irQv!Xe{ZMsR?2H)L60yr>}ae zPeSb?Ka+xxgmJI8r6^(xdYohAP4S1OF9~fKI>~~Qw(#Dy(Efz=CQn%L(8xr3(&pW0 zZdQa-f5I!0o0=j(Zwk8u61rH~fUy|w@6y|E$gP_k8X49o^QQmWQ27o++`9c>T9Hj_ z*n^JTPpFEi`OZ0j_8H^u%;eNVA0I`&(}=Y_gAXNE?TLEEH5${l)>wu#Y&ot5yc%jr z!v-Ii2a2g3HjKY*-DLb-uC+aZ){nVEkUI!lov5(BK(r-t7;o5t9%o64Q~YrRNhbUH z6Ry$@>g^tm*h`XaQQxo+;Ayg~fH{{iyqpH1?WnL2SfE;1=cahN_&oEHJ@Z(%r$1!83wiWma+F4mFg>cTK+fX%D7TDUV>qCK z#fnQ*sk`f$by83Ls8-R~jd2U8@o!q^P&*Z3b0!q4%T%mxOh3xYg1Sx;x@zHKPE`vo zHCA1R+*r4p8n4n*i6T$aoW{S}44xwYiM@-qtcr4X&ZB9#J3H!=?|R#L64rHvug}ZP znFt$wZQ9k1KXTDIlp1f4MdM7OGwB7p`Ij2}E-6v1+?=<2Td71qWhxV(LM{KV+%AG+ti5=hPOa`!PapVb#+B;Ie! z@@75*z+Uq(ApcyPi>KZL&t-~*j$(AFRzZ5!`BbZ~J+9Du(r(cYf3&9c zJG1V*E8QyKAQXSQG#A#yV39o_Y``8X7PJZz#>1roR>`MDM@BUN>>+n}kC#w>kg#uf zKD#cjjEO!dgzDM*ChCuT`vc4Mm&+-?x^JOb+mT+?<GXmIvpH|0W@}y46hg#Ms^ke)A*n!@(ubH@*EnzjwQm za_^?^cQ;*Kyy@!P58mY8ZcF`GeC4_Umwn{;<0QJlK<&g9Mk{k_#MsE(&&V&p=wvzr z2yNl{-DYMavgF?=%{DB*d2W%q7i5wfs(;^$0ENN%tQ~ib?c#q#3E6MA7lu^)N;^gN zrhm`P70YGHcOIYZcLKGisv3~!%_@WiiK$P8&9V=`*PH}B$IB9bs{LHA{g;Spmp-Dv zE_|Mz-eoj5>+;-fI=_}G*t_aA056nWAVec%D`xoVI$kX?Bvxs*1=xuF;>hHrZ zG_i=t_fv;~L3bPEtspu%b7eLiN51l|%gVs{t9RZJ)^cEy29Zh9BOm*-ngbsaxw3PK z;nO>CpI780LJ%i>K0MGb>7&a|A_NVWPrdJ+w`}(3A|l7;;}b(4Cmw0Puu@ppfpI?g z&*Su;oKw2?Pg(Qx|MM^rW^Li~Ol@l`sQ~FD|M~3Quw=7-L-Kd0C9mdwN_hWE=SbxD z#Eq5%R^7LeR`9X;0b;W)dH5}Pam*SKP@z(dm~Ddt1M`=A*Cd5Ybla+D-qM+00C$*` zX31LOyfyScmDOTZd+qsZNICG+4z}3rH38%+&5lP^C_cSxFx#RR5!oM$Ow#|p>=tv4 zx}bP?TXnX~0sHOdbo(4T90@B%0#AxpHkYh32yPl;KIHTVn9>+Ml8Z#{oq0}v4@e1wqe{*vRvJ^k0Q zba?Wc?VS0?XKQINNyW${{qM{EaplEcxbLv{(TdBz3t!;!&OVZI5${&0kV*8kC;Rej za{zYYsrSy#vT%m-R-{Sw7VaUZZ@f-u@zwaH_X9 z^mABNYt;G}VVf8xL~f9IMG{G#JNn_%_hE?-{8P<*>ts*5#4X*E!Sx@@@rJK$J_OZY zbBW!F_aRsGyhKswbHX&0pOb>@I}fhmB!rMvZt~sR$77m(GMn62*6`YCScO%jpk1vy zp3aa;N_%g#D5tjG*k0(~^)o?Y27Qk}y0%Ub{oThS358cICvC@)D@dzn5drp{T}0j(8hEG4z&Mnu&+W zb{dXqIb_~ErSgGPXTa1^i_@^@?RON8$J|ddq^d$8jc}?5e zn`51>9HZDc!uOXO8qz%X+5FH>w6Erk=J^)u)_5I?mLqbf26AJm(WK$fCUsGX?OL(s z6Vw`p-4gsXXPg|quVgh=nIUfd{t;DBY_%psJb)0`^3ks3H!eqss@Pi`q9Vn#N-{0E z?%5q$qsa9UKbaIzV$q@XV3;moa(Zx;u@YaH%Re#2KQZm!;5)gAo04vcWQC}6rHTHF z(#f1RdNw_b>*LmIT9mJ_9F<1`_LmCWNal-JI#+aK^QrGi{FQOow8#EqA>9W?0;YR# z%7X#ZeJ{v&@L0K*)ST0kI?sA=gv5d`Gi@{`+*4c9&+&eF#Bw-5iV^#qoA}`Kl@&t) z2_0J>Vdc8U_UcaPU>Oo=Jk=*`e3M^R}#2Me4{NnhTj&^pjRcA`!Fx^H>yh*tpPkN=B6>;wD5aof2 z;6sQ_8kyd@siRU5dfKyn+Ae^6qh|n34e)TrkDs@DI>5(NmDosSHqmN<4Z7| z&}>k&d&sY+`J+N@V5ZEkY%las9t}9od1n~N4|kHXsP&P&NN_BCLsr0a(wJI#KHn*$ zS9Ma`a3-s@mYtsAC?S(L9Wig-T1H_qIlCatlt3{{BKT)_ZQD1gV?2856hGXw9qg|XUJ;uf`LxcEp8c~@Al%wqbBCRt)p;jPP!OpNf3e z;IPM(wVVLn4yW?mX6^`F2$8N9eNFaKghIW@ZL6`J&41Xvr>hmLy+oec&q?5Pu3TRe zOeC*qUj6!|f<=SX2Ap&Qnlo-{oJCbFlaP{B-O*oFq${qp#e2I+njoTeu72*@-;)4*wWc!yRvQy2Ez0{Qbndod(_Z>#=jED~to=qhIi-DhMl%7M z_GN7E7(e_+V0RcjV6o^#6XQ4CWfPfpEBGb-)0GoCH`s=6`QdEV$<2srQR zoamwXk%k?^2_(OXo!_By9ZPH!aCGgQ-jJ(Ils%*do0Jdb=bDrR+OP-j%?qsw^CUQ~ zX{}$zH~zuwGyZqK_sp~ZBl=euae&lw(m?KLG zQ`_Ffm>(=zP1K2R3c9*uXDP`=-_D|+gMHgH%%4tDv3O6GJA_vmXB(`b`eYdIsype+ zckm({%+U=Q%jaz^?cBPZe^)@&c+C85tm{T7`O!Ev)AL6<<8Eg|m@nB!;;_E-UOz`1 z{6c9jiOS~b9!(iSDU@qUbjc|TP%4Mx3hhd5JBhd-Ue1J-R~Sr(M?=(lf3hY9e@p)D z*x8Y!lFLKXpVraM?4XO3?noX=<@c#w7=7EdWNOL)nL4LSV{3Po6tGJTR3$v9QCTz_ zC0!SY3H#Q_i8!$Ye?t%C;N8?(atxjq7bwUM-<>@v8rGb*+eXslQ^Y3R<<75M!R=UjX z(-+ImoGpfk^hWFUGByarjm>cFe~SSKi?5hten=BD^%l$Nml*9SvjRoeW5I`_oe2eKs$T?B1&X$AEMQP$lT2Ed4AJVUZ8WJ04h=9if5XExak=%M5Kd~ z1nBE!%%`7!k_seJX~23s3^Eu%+iwQEm}bEKfk?^UU$^@d$hLkOuM|q&MLtSMcn!Sw z8T+m+*p;~gUcwD?+c+C@3Xz&-#s#U8HF}GvU+tWdXN_+TqZZjxy*Z^;_P)il) z9S*(Cmk-mGi9pmSK@`F;7^Y1KLQ9D}D*tw2{)@D-SKUVtPRM{fX_p_&ki%7>V-L7*{h1>uXlP@aFH zZqO+cJ=P;c(c${_>lWq+hRTDI`+rL0f4cyR2CyR<5f&G3Nnq9l?dqt-c5IND1X+_i z2U*9Wfyi_im|I#wj3!Js6J!TtD$-9dpY;Xl%E14m^|97GHlr1&)!*5_pKEXmm{EW% zCm zDG9}UeWC7&*+@1*o7BeXwRCWJ4&+lCm&&57Bi~)9TUQ2z;D|c0qcwo)MGC{;V|49* z{JN+CvOfw3ZVso#PSN~8*?bMI?a+7sk+x@uw^b-q6`#;*#(3gET4ESxwGn6#lYxT%Q?d4K(?j!hY9H@< z@tfV5k-auGDJwp zoGBurRAk7IsgRinA;Wi^_P+1$`MvM^``*v<+|RS`z1#KwU)OoAb*y6@YaKb(lvL^} zUo(|b=6)DsKW!tn-IX_EEmjEJ+syU3$X5&dhYjo~+5Im=cq#IK5Ll|2BO^BO2a!M6 z;dl@0V;Hdr6bAP>SSM88e6Y)7Uy+LhF*dGmXkhPBJ9bP1*4)%CsgoutY!?pYH^T&) zyc^rjV#%*M^3m_BtLtDpwOCRLFJSCRNndkOyO8~mhd!)i>htY;!tdQXH8Oz^K`!Cy zw)aB&_MP&^ilixgE5(Fo0r78d(q0^>i?`qA8iDVN)g{g2e{t8eU;w#w+ct9B_g`w4 zcnmL2#Jv_~%bRtcX@7a~s*@!&OARi?bMGR*!e9&*X$pvj!bXX) z5fMr7R?7Iji})?E_k}F5YAl42~?N7P5w6r#*$; z`bsdT6x{f5b>MGojnU({ll;pGZW}r9DyW}vSZ;CZo`ob9uU>nm`{Eg5#6G^* z)k`F(O9c3${6IhVT*jkac2O>ShAa2g8d@LsUTnn`71#3-J|o(De% z;;EK5g@AB^*GOil%F*57p7c_Wv>4lJS;Q&^0M(xIq7#D+L*G!nO z{@C73W_i-T?_2)Akedu4*idJUKpmNb^ih>^V5ze3F^qU}1m3WVobiFXlIqC9#91ta z%*%lnG08WPIf&0nyLWIVBQ%lte(WkOn?xKuA&!ynZ5zLYXGCn6$vVH#lszAO@P$sB zn35=tz0tX6%6!XnU?WCc(4T1~sjBF`Y^=sp#5wBSZfCju2J%`GJdhCmKTjGSO;U{@ zcH{dmcN&q0fKR#KYr$2}D8J5mgt%A}&v6)7g)VI8Jz5pQzTdneAZ{`mv6qB>;!I`R zRnjrp2_KmKX60*|*U4V`miH<0E4=M%)YY|?UXQ)LVx2OldJhSyNMt}Y%*1=qiFiZt zVOQA3i-ZDiK68G@n;>Guw(_HJuG(&KYU@Az^il@vUTpNEm8?$KU77WG9W6)WZRUN{ zraXK03}JJ2po($(XXJl)`9$VmTlcqB$(#|iW17GfvRGY8+_H$h?jVf8BL5`APWjvA z(wL5Pjpx`u-{LERrP*nSKx%bOaQvB`>yJ(IUS8rhuGH$mDmD9!t}jj{hrsDY2MWv@PBqV3Z(wE#nUS9SRHbV zSj!>^(=1EF7X5je_7DlHuoTJU9!37UrD@wP3_sh*NW3_Fqhw$=m~9&5O(m&Cy^A{# zzytFmV*My!4Xmbz2dtf>%HT9uUpd(cA2}KqbNOySH4=wg>Q9|IHI2VW!OBlM$&)ZH zBfI}v>OD*KzF)!#X^hY_u~sw9K6hf{g>CsTConYFX%VFHJK}(>+f>GgdI9V*o7c4= zAje&w6`lmS;XxaHgd!q3sKDxyS&nXv!7|FYAM$0d`0TV=1N`oHh)0oytR!d{NclA zfH5hEUC*#2wDI$6V3il9r6vGP#G@4%|4H^|DgF!nS00gzT@O1k1-@ZmT;efAZ2X=m z-y!i^6{#9IRtIkh8B-reF;KNMEHd&b;`;@d;R1c?5JSM?tH0wu5Ti$6PDi-3vP(KB zDu=LXV(}`RlE}Z_^wQK6kuJDRoezdZ^|(F6zla$A!Z+5x_6^(xST&eQ2*N*tAKz1) zz@)@P`p#zf2-2kuV^6V-b*H>ul##)Bu5k*y3WhQ8N`pRjGu z*Tj|Af@>YD!n9FA#9;LwF@jS3J5*dOV+;3_Uyp0wzt_YeaYURMBE~w^o?|%;+Rf?4 zPT0xkZ=ClMYdudznYg8v%n$La&BF=S5jXax(WT-;kGSnUpTw)jNM)ugw0|r0E`=`( zF?eH_ay-Vsr>%h}L<*r8?9%2lE%DI)><&j@xbkVkua>wc;*&HLI$hLB(THy;hP7@z zPA!e8uxW`AT-&sPZxXb~BtgZ7hL7-iza>uT`fu}eO9(n#M=mW{?jSts;Rmk((#f*a z$Q<0!i*WS}sk+>fA^)n~A&7evR*zaR>fLRP@8pKvkW+|!2F?MK#MmB&#e;C9Zi4g4 zwl8i=*w9xG480in8>TJ@w*y7$R_(@LigJbmjd>6waifJexOzzSGD+h(@QC>%6vAImzy zP&bXF@5sSVgI9!MeFY?e#miBTul{6yp`TeE`4X3+5OxYv*dyN=)|q^A2kdVR>xt_z zZt@x!^Z+%G5yL)`8KQk&xG=i0Q2;n;kcV4l#$8ewCsg)ba@`UozRK|Zw&aJsA+ z7PWYnq824YEW1pu_Az{VuumDuDbfNbH5pla1#@>PO~N%63kf5=X1}jngl8v!(%vi% z@`ze8pJ3y#loMiLOaGI@FzgikGYi{AY8U1jTv!xH;_wnbqtj@c4%x+5M=4x_7*0O8S{q&?e7RW|O4QZWzv4At+OUe;!)VDX zHeDyKP1?RC=h)6A>f^`b#N3{sa>wpBlvN9Om(5t>oI*l8u+Xk1`46;0(PEZ?=a5oS z8%lDV!u5f{;9+8%QBBE${iBTt^0tGO(|z9uI*MKqLr$z5zdF=jkPVm5MB)$hfTuI| zgekX^=aatfGQYf>AFDF6va%Xq7#>tJz+v1EOWnB7p7;R9aLIMUHl<&y@QkYylgtq7l9xvrvjfB zr=sT1756(k4$XG2Cbo7n?5HwyjIVs!Pb}03n^0;AD+pb9szr(}Vyk6>+1ndbI?A|i z;bMm<^%i7UK8QPe{nHPIB;@Ujc2+y?6|KeI?_-q8r^SFog4v~Y5x@8*^ARC00IbZ9 zdoQN}DxV^6raI{UlR@~MXB4Mv#QO^6cl&14OuNds!COGw;@xfYJAdJqHI2oH&#;S^ z<&cMKD2>G~LZiSe@L35yrB6s#p;H=htt4O?*CeASVdzX^a`Hu3b%q2ra1UZ1^p{@f zyX5tfzM$%`vA)(`O3Gy%Cvoqkg}Bcy2sq?~635aeJ<}KXMN(_MsD1#Yu}C?lbXERE zlKTow*YxVLso@z#n646_NqpxkXZ$-sPeclwPT*FHOhKL_P)r-9D0hd1RpgdtpFGBL zPxI9q;3oK_Ki1oXpi~pDIZKq!Dj#U?vhF)~`|0rqT4%i)#HSG_tlx)mvxu5sC(DiR z4$HXD*chJC!zHk-0>@Fc)seez9p!Zv$G+Mu%bF?gIGIn3Z_BT5fC=nlc#Ia5`}Cg?0IH6}y3Onizn_8i zNoq{Qk`yG^rnG2R&jV_c^GAbD#hHjuez*Jrlsg57n<0=*l^l;ylfk_C0z4cGVd6u4 zfn%=zd|(#i-JWkx{4;yp0nHmgE;QmdbsB1Xf0>!eq6<%m{wX$oGZ8AEBHZcFvNJr7 zEgz};7i#~U@!?nDWBMoWuSkU=S&Ly|=!Si4n5_wZwN9@_y`vOc;uT-hLtQYxf>cF{`(2>Y)3dXTW6#) zF=rDwH645db#m+DT97zHWJRoIa692EQC@t(CETt73WRvxqCkXc&53F!{T!>%E}yhj z1R&y13=*t4$a@heMrn+X2G~sOjlbMus)~^RQ7lzpn*rMz{wQbpfqr_V^%UymjYUH6lhr^4xVvv%J+`hkO)kOZz?Npg zu5HtKY@5!joB?WBU*tvLxB6olYn3$kfTI95$GO)7vD%hp85AjGe1T@E6+m(9^&b;Y ztdI7O75z%eb8=crKI0W2O$byaK7IOZrM!iQb_C#r9Z%Td-zsxOLbD{Q63bxA^XI>H z0?a=oL;x~=8saB`iy(_~%qyu4H9miQu+Is><24w~N)Lx3&_ovjNtHP6-M^p0A?vor z*tuBJYa!p_jIyS55&^7zo0a=9oNBbd)u}gP2?>d6a*ki2Z1HqVC9J&w4?|$ zd>rdFuR^})rQ(TqaYH5cJx5m}4o5$$OcdPHv0A*=pvd{fXbTLibFCYY()O>=HCwla zKX{M|%Gw%$Ii$#!9Rvar9`(G5(QCKdr|K~mT1ELpMKwt!1@Gb)DF3YhvO-2>P*yH} zr}eG_{qzLp^!lA?u)2UP{qw}c-GBub0kuQ=fz$d?j0kY=S^KAUlzd1KWtoZOmWyiC zifklS@owNI1P9@+8E{PQwb!B>`QYHFa}#5+&J>aSF`sry+|-mKsnwlthkI{=g_`#u zh*ka>Bb?*Mo!-lyRew+$Oh%55qI8>|xnq8UrPVeJiqh%=@3=hSOnrJsKV z+kQ;63JeajZUQ-7hrRF5a8C{EFT*M#&c!h(uWND4C9)a!DvPR`q+h_wyhISEBxeqI zbY7*t|1=dgjKXAah#$1so?Iiz$ODADuu1A^+_?h?+ZY1}Q zF!1miS7DEnu$_iW)>An6>Ed{1v%SJoBksZWJmGj7r-=D>kU!#oI2CiUReHavM z%7$%wtVp#rHTS#!;)n3Pg>Y#Llznw@ydRK@a4G&YZ(!sWhisS%5J>p;XYH<2NaVx# zqw=P}pZ)zrR!ny=t|!$nF-Ir_&9Ja`f(_LT7MX{FM7N^R2DVbIgXiL&6Ip8n!2BtY z8-vG_d!=8U__)G+n}BKVE>Q4-l-sCaEa6N%!@c~rAt@s$CWF`b=B)Avw!46TIv{F& zhR4~vL(z0VFJurt0@+hlPpvP2fp^2Pwv~^AEkiV^_DxM;P36{oZ5^0h!y0=qGYxQk zNWgvk5>^5t4}_G+KzBLH#n$@ysllEArCj;zw!HXo;0hjxVYXXwaCjjcamUB9~nE;4yLH(g!^2q#&#t$r9d88BohAxJ3e zbrfnltcxD^mk(B#mshfj~tqI+J>=_ul0AsbrrtY^*>2r^~yv>+lQEu1B%h{2}+OILI2i z=CbhcaPwN@GXO0gK78Qw0K;wUG}e9^cl=9<=ieO1K`q2@p6-}~wV-4XJUg@0+L5fB zaP8L_@!)&|9@0<*9F%`>F5{n3VzkuGAB&t8xui&X$Vt9M>)0&i%i><*!;<9g2x}4v zj%E;l!5p#%!lesYhhQRK<4zD%2%DriO=BQKGP@{e0q<)>akt?YuNuJ;SN(y3W-7cn zG&ivx$f06f!1L4XD)H8~-yeu~)y_=%tA)p^ivk>{~>4Nn%WBw+a?=rwJZ=f3TXH17QLNP<5i~lK_&u zRx`b_X*K_LI@M!t@}Y>CLS|VA3hzQRD{w^li9<@{ALsGT7pTix)0H%-(=Gg|8Hg8m zi|@^>?Cf0UQ3JIt_p5qVz^SDy?072IdUEgCLy#`_w9Tj!1!ad^>*d-^m$aZZlGm?WcCOPpG1YA1*&EhsIIZ|*Del2?^O++^y=_qp1Lqr#M9MGsqe?S^jh@DT9W4)xh-CWlh zbzrwUc@=_1*B?GjMKQisCPbh`@Qb@mF*+njw^TW&hzwnIS1c6;G$bFn{;Ts9>p62H zA_54tLU8=rTJJ9oejtXd&?;0~TX1ZSM(rlSg6y35&|v>hiKlH3$KCXfY)(y0rK4Go ztJ#RY%_`~vX4E3KW(C#{^Vqk4ct$$4sC+XM32#Nd*NllkU6IS>mD*qYPSq2fERlhE z?R2BlxQ%NHmZ|vb_QKY15DIt+$(SLJ)Q5QOX7_Tt*}!txM9@MKAQ*s&uqwlVRi| zkKCXI$i=1ms{U+!qRNMwn(V|)EaJS~Z(75{=}GMgqqe8qo`~>~J}rkyaZY+X+})}l zb_6ly{!ZN1H692AShZ7~ge|)N6A?MG6zhN%o>#ti99;@XfQ!Gn%Xak{qf~x#jTv`L zGfK!G=wHW^#gqz(==^Hu*XOD~xs&HHSIl)GI$<0BymQ z661o-dOTA1Y2U3RVF4WSqc$|34pdgZrU|&HiDs@JUq6})^`y~^lT)5U3JbJ@^)6hv z@TPBSysIluU+S@dLC!IdfdVrU4dP&RRY+GD6j~d&+|X-6#AqXwrkF|C#O7+u{rlVN z$JAq_UB03$5wKozX5v>xMDe9|{Uw{cyJ~0f567|gR>mCX-s;e}8q%$Jv$%)@H~0&3{i1d^z&6crq~Qa(ni9QXFrc`C|-z zxd~k`pF5?buk5|#p=O}jWsRCUGl@_c^3%p#M{n2R7Q_LNu8Hmkdyz+4PgK!zUw`zG z0}=dm&xwZv#M3Fr_>1{2LBSI=OhP${MDYL`kos9PSWZ%K)!jRJ==6OQScv`NRXg>( z`E;MB0Fg`Ht1`^=UzTZmP zFLbGTGUx)5_Jx2of&_4-UY#X^H|`7Wm%YEWq@<)lp3MWdnGt3?N#ZnD*YA7}1=025 zec^KW|BbLI(drc?j}+}?>SZ-`_04=OFMJxt&Y`;?AGK}d3hDO`qM~aU>nI{zFu*lI zV)C&EwWevG$~m_U4#ByK$RJh=^`O(1wZ@oT+F78xn5aAu_ig&-#`Jp=fm8?-B)uoY zdO2L>48L^Z2^JUPGOzx-dGk~2Hs=9;(~@Ffo^6ei7n`0Xk;^UmrfNqr1xJ^$-6{t4 zI@fD}MO4#^EVQ-f1YTYiN0a(A+Jz}B2W(Pceb-`tzAS45gliVn4&tc1iz<~I7~$ey zz6fm84Y=g=+I2cOFI;&?(stu%)kgk|+@XdSb9m$I8l#BEvC69z-h95i@~$Y>AMwhM za9^Fio3OmR-0ia6yfkPYrQu7hji|ELd#>S2lTc<&N4j5JTF&IIQtugT%}(YXQ2o#~ z_*`I}v29C6S=QU&@Y0JX6%-VX^$`V1UJm*cYuFq~b+#WEE%8c}F4`=0aB~64p2&IU zVq;1u5(E9rT(qyU!<8$3Ldj@b$+MdxE5*6!UHQ?d9^Y6BdX;hqTv&As4MZsQ(0?39 zy}c&7xl2gz+DI1YocnWYjXhy_-1h2ntVP`MIA2?mZmKaPRHp=@sV5VpYv!6%6|I|_ z*%jIfexVyj&BvPz%#kPF<<eEODpFCfaB+PctS zfb>1znraJzK+0p*?u|t?s;TRam%O9jm6a618=E3-H=V0(V^LQV1F`O&7+*Wz_ojBAHF|&kZ1Yvo0+5jnZ+ebh_R-KPP{YusLVkN5Y3gUnpy*%D(S^mv2Q_p)tE?PVEGwv zjJ@Xpvi^hu8hiRhi3ct;R)e`yfrId7iM#wQZ@&DHpmGPYQ=B5ZdjhydMpnCu{ch7hk9C>z&G6kvw#8_;A&qi;O}Z5qvD>N5r3OjWdWlDC~_YKHDRi z`P^}8d;LNj&_&z9AxrDjDO5_vq?FdeQ1oO2QZU0uyq?EIzhGgEv*nu+F-mC zTC0#DuSVM9S6}7)U_*P9RdF6%Ja*^gZ=1_lNX1qB;@z96qRrJmyH z9$%eYn|HaZ<`d@H41*f@<({mf_JL6P5%b0 z*w5JqT6-A%?5{X?qb`LxZwxh7qTipwfuTqtBgX@RD!c*8cNMgV(+l zaRj$9guogFj!OZ)P;yDJKUTp6Mp_wjyGO=nmdP{jZVYBLZZa!-Nht7I@ zw|96^;(tdml}~SZ5yN9%Y3&k5lzxdLcFuPvJ`GQ}I{v7*6);-Pa9NdefKOW55b2SU zsw1Lt>~K;CG)GAwK@EwK~m6nqf(KX<$C#(tuqQEq9178*9m6b~ZSLLax0;QpaT6=-MH4=zmn1lFI<5;JIR3s?4YuBzH@)>0#N0|miZ=Di}e>8RiUpZXl=Nk(CKQDt&GU3NZjake{ z?)6HYZyzK#!s!<6oM>lY!xCus;r+*tImPL?=@ri14qHck0_-G51@_zjP$Y!n+FW@F zkhw_W9b7dCB7qjii$W?=F#{e(D;r zRvr2Z`U>`O5BHr9;r3iwm}8N!J5%jsmmTJHz~ZXslNWH3yYTh-1KdSR6@)@}?b_j) ziGYT5El>(g05mLqZHmf?E_NhvMDP2w@Fq>f%ZQqH|9?BuWH9x=9chX)Jvjm5T0xKR zut{#sECzQ#*+1RE&A%nIt%KhH8TFjnw;~r|c|H396xdE1^DngJq>`>EfhVL0%anGJ z7(Jl1_2=i9;q8r~P%xk~ISj3dE}&{)$MkO9X<=)mj9lY!@ckD+c0io4zM$P3rg8=u zXRdMEqCY|!%No)ZiiTi5Y^=X~99+X%*MKgPVO+#R7p`iy`H6}c=YJex~r;MIPx~uz&@IwDbz~D*0*-PcHa-F zs^L65ZFx2^i|tO?8I8*GXx0Tv2v14PEchBtO~keEp3S^V;Sn+Ec>k7aMZmL7sB|+v z5QUrI03R3E5h7_pd*m&jbQnOXmD)Gd*~ctNhC|0yzHboPBf^ZMA~me9y0hcuL{HUC zZQ6r@P1(%D!>qRIi_c5raEwptmGyMzMTVN-2aTGtWXRJXJg=Ox2Rhhi=LNo6L$QH=72{Mo z>9FJ_%|)_X^Xop(pgN`dTwl{0kkF5M=x(&1*c0RBu*iG&o(PxBmPdBt*JjjQ0`COc z{jyrDsKwD1OJyxcRcx#YFR8$H5lK`2&p2t%Ll7>wh-le(KGu^_OSYX?xAxp~ApHkV zQ&Cm6)z?b&Y^>_3`QVxeB$sD=;3Td!5Td`)PrSukh%eQ4>Ip~&`^CJg!879JYH3~f z%Y4g!2i%$MGnO8Z0qP%-f7h`Dn4)M>RZ+=IL=QqaaReMJ9#5?IQi0$n(>fgzOi6Q3 zK6YPpWjS{|5JjG`IPPfQcGqnIXj2Z|@m_dR=rW#;(r6g$>Q^*cxHNU>%@Abra*!c! zXn)21u(5_hVOInLZ}Pk;6b+=1ud{q%R8SwuI65DdgMLVh^CpNjpQ0PI-MRIKSqgM+ zc~gWBKxfP>YR6teLXM{7Lp(>=(Sa-vml^6t7#0rX8+OwO7k6*(Wff`=SCetwty}w) zHT`nV0upT=d;RX#RUy>7xw%_Ql4;qc=%=oB9O4K~q!G%06UnH)r3YA(G@eQD_ne>l z0{_h?_>-=ms?5O6V->gB;`a~_@eRX2i z*k-<3Tt!l;su=()Hl-U&SM|g&hQBl|J(O}~37U)$Y4>~;Bo`aoyXd}xXA&OBLI@=a z!dWS(h>3Oxy5hI#Jb^!Rd`iL2%133Jy-DNWZSUkWkv|~Othc}Bl{0Kw z54nV`TG8YOe!tG9kFkU`@|D>=h@$=0vAIwOV2!r=B^1%?bZ&qgMT_l-ls7&AVIRNM zXJ==V)*Mi!K%zqo8(LeSk6b)-6ZS^_a|BTjkfJ(o3OYa(0cG$T5q*zS=L9aprxu;- zzQXQ(JZL6nCwYSM-$nIprTT(~AaTL@aPv#@0ai;T*d*`_OmtO5<s!uFK55$<=<*zJ8EvK1KW4YGHPQ z()+xWHJ>w)?ulY~z$j*i) zkYplQJJ~@ zxWyA_{P8iGs>zStKIByw8020c5NPoym=;}}aGSwex#eQ9efxrB`cYp^!C_i*Y2N5^Jo%B4c43D9c(IctJT6NZKxSHpQTU37A7 zu2FxVhdulmHED<505EQ80grjuP{Zosj}x#iC@3I8neUqqD$Sv26t}KV%ov_%Jav+U z;5mo>y;;+V@v3|Gc|53Rp5qLiEnx+{y4x&-vfZ@LRWrf-^D)QPv98u^Zg79T6%FJ8M3F z{<<*tdm5cgL&m?6!lou@TTuUe62bdcgy>3Eg!RTYz>=gbLAYjc9?1mB)ey-H8h%44 z#&Sm$fOj@+;*gOTnidwp^ZKvk4zV~&pWQqO*K(EOO#LS6Tkh3$ea;GHs@QF2H#v;O-6P0%zFCcuQ z;pNv!-iyPgbg4(Qw8A=Fdp404E|E>xj_;7LJB;po^Ysne8nZ0ba&fcK0Ba%xY^aa< zq0|9|O1KlKXfbIt2`?jXiBHh%JnGc{SYPhZrTh{qNRL-qIsZ(0L`%fu%HnjLP#UWj zcl|}yB9**-SolZIr*t)C`4)L44LhF{35|A%^!#R}v=?cxm2WagW4|zZ@-_a;>D-WK zjb96g@)>QL42H|EJz-E%#Q!MKo#onIJxkZ{(UBUaoOv!q7;3QjjJqV}TYi=ajwRS7Pfsgteu^m_i?_gr&GZ`xFe^W~4k7LQkM=j?n#HA#W| ziCrbZb#)ptS0@!k-hKMC3EFMn5H|l>O3zcg^xI|K#L=0MmX{F{+6+XXeE2X0G#s1s z^z^Fx9MCo8+_Pt`nVDJs)!$Fg+|JG254K*2m(zW2k{cwQqJR9)pJ;W434{!Pd-byO zkEN|`F#7r3$fS%FZ{NP9V`mp^Wv|_iFOmVQR1Ai|*E|dyJDM)*kslOrPrv{Cc?0ZV z#k+Su%Kd44n=hcimynda^_j|6LQ3ifq!yJB=-Cg{?Rsyk&Bt)5J%86waf%>5At25= zq+z}$nTH}X1=f63Ox5F)sV8m5|pA zJVAW_%@;3T+`W5uH#s%VOOvoqM~JWCoaXfR_us(5PftDj_b=;SR!;ngZSbc~pM|-0 zeCBV~e-0#$q=HvL#2}*)r9dHm2_Mh#zrKFAt3G_7#^9E+m6cWIw~kV8)2!czp&VJG zq=yid@A>zDhlhvb=RwdCZy{k^6LgV_i;Iq#`ScHPZ&g3gtgf??qxN5gk=#l=7AmFl zmn$kO8NRWnxQuts0s-33jGPXhId60A+BFSc`uh-e#Pu-T?aJj65ICM=r5~se|Ax?< z7#m{c(+Lv+Ia>_OdhGad`U*@}WJ0+yf;U_txv;o6frw`YJ!-x~U!W8RGxIH^r*{(@$vESn|~tpI4bHU0NbO*u0-w3 z>#?yrE;Oc?-CcY={naobfuEwK)LV{Jpm#lo%4=(BQG&fedzz2W!*k*X6{dlsIIBLK z6%)|Upa6Hz>!cYH5y1eh?j4+bGxXM7F#(+VlmBC9CkJwVc>o>PnTr?GnhSy~My@ny zhrB#=+R)GfHT`;O>iYcki#>Pb-T?wAy_`7gHPMEU5X3gz*XNIvPrC6^{YH^Cwzip9 zf1kt91`T-8t+G3I?09KEp^p@n0YQhpzW#vpi5K)slB-s&`Y2}KngvziPf)df_!ZtRaUKAchme83dGRcE>K=KmL(*v zN)K!K_RRum$?8jj|2qqEgD|5SOq1S;DJia!xu2}sAZvR>85tS3%X!I=`GF_GlTEsq z5Xr!}gZRa-$CD>YN=jPtY*~p8um5ApZbVJcAFE|#Wpg=(dwZ_~oU!3@_-AMD14I#W zJ;cMaj!DRlpS#)rcZqjkM*`96EF;%)2!CWb9Yvhez*)hVDAu zUjNcm&TkZz%MEl;uJL+7V|LcklG&!T(5kCM7CdtZYU&FzN8>flK%+YVhM@CGE9e>M2G4;Vkec*<6QV?QXY-hBS7K0I;Yz=8XS41gM&{T0l7HtuI) zVDJY(TyKt2P*m#xGJ)b*v6j}GDHP27w72j=$j`C!V*B)$KdS5DpPs9k5*Du(cXf5` zM!{Zzv6C%ybh`>p*w{Qj+m5q^O6V;x0%^^L?7}D42HPrWhFmLQ*G)l$`7)mK+78mt z|3f58gJ0zT#sBU1qG=+sFiL61<{bXrp&u{w;X>co?5X=>lB%=*Noc1&(9BO$J@BSV23F1oP`2_ z#(EHqwu6HeOo*C`&2Vzt!8N9NsjRoJaf+VhA zzwAK7tVNK(Lph}VEKH9M>Qp9RQhAi#Ux108F#UR1USO?S^=h}Su5LzlwjT~IilPnZ z;N)0+S%vt?#E@|DZAS-tm-n(P=A8ol*3ojx$gqK-2>UiaKW~l^=C$4(2X^CG0?eD? zHnikkWCH)>5BR|#{jRBr0TFV$xYgb6%eD9My!N;np@Z;#EQ6dF?XnbYc($-C$Z={G zB%3Gm^SL8~Z}GUaAH6{+P+9|+S1$V?kdEvzb@jCrC?eJ%MTt2aRaCS*Np*i=QTG9Q zj7Jm|-ReOWlKIQUS!fqhbkZW}_KA6(dV0JJ0I%uJogH$Qeo+;kSe7aiTA#Py zz8MNjrfT!Gmw2*jYHA)+RrMulTqo@Y2*o#5Rm$T^hiP{lssR7=>qjNc*|TTg;B*5U zUwirTWhhBHCm$a*s3>^{2Ql=~<1~1?2L=N0ugBrmXJlrIIzQa~9g~*%qaBkYOm1;_-nxS_YXbz~D4CyCbt! z*VOpoXZ~`nHuZ~6ja2P{u2Q)lJv~jC_&!ndHS0)Ns9s3gymZiECRS4}kGY*W0tQ;< zM-?rH?*%jKXa4byJ;}aMPj}g=xBry(fpArByuX`hvrtx#XO%#!#&zDZ5R+;4i}Ox0 zUJKiS;_%2cy$_%P)4@4jH6Rrd5HK~4#FOtb&VkEzH#GDb`fd6K2IXJBGNL=E=Y5Df zDtmd=gFO+d{`ie|0gB>lDdG`~jE;UE8MzH4@&v#ViQSF${3U39{2(Evq@bs!LYTaT zf66M!&zX=FVYPfRMQr(xgrMR@ZP;`qz|=S zI#>+t$Nky!&Lxlzt7+H~Fp2qQR#7gJQpK44OB%vm`cum-lKEpQ-@OZ5GQCoLch`Mr z??T1G&Z%C-6VNfXvotfi28Q6+C6}Ien{W!$w_sLeBlv)Gzt~QcflSD7o4$a_k7w1F zhAHfTiprRZ2c<%CenTE;0cKw6PE<`F-q5v}7(pyh?b^iz^&CVn6Xi z3601o3FN{!K=gsm*V2%#Y~bNZ*6P;`9dz(OO%zO&UdfBEu`SHe@j7mG@$1*$`}=Pq zM3r@Rnj}k}s00&jYSUTFTpKGV?C=QjH0bM&L8;GduTgR$3kaKASWI9p12b>5I4%BY zyJeo@;^f=Mw=e(ghXj5+%WR!r3VyLxp=QBsl3E&$(`fUasZUHe8sYOFEr80%T@vsV z`?S9M3nL;f#62W8Ve-AJ+~Z?!rkX47Zjf-B;F!zMWmh`~*th+@*xF zz?}#g4;E_ehsdea?%{ci%1%J6OI|GAX_*h@Y7tDgW0 zZhiZT7}MDE@qv+073qjR*mr&v1s)Zkvt0Mf)J)`geG?i;7$2VbFGgaSJY3jy@#;A(BnX<0G zC`hb_2KO4q5Muh@*VMd4nG^cx5zWrhC#0g7jyP8kidaAvcvO_{J)Ey`y4n7$8zKaO zof}yXY(!ez=PY&hX&}EdFta!FIW1A%2vwVJd7D6iZ6;y^r!pNgz>=0%mv^kC<+x46 z>3h}0ZUC^4bQCe+zgB|Pyd4vwOMbEDclw>hd38?F1;gzH`xX`!8tU9zA+Qw~u^x z&U>8qHc+294p-RFYnc?GfmjU^43g*!(r-QnKU4evVzey?D?n;r8_1j zCcTSaQBRaZuM~3I^B@*+q?tSld|tc|M3y*TaeF1b^wJNF zye;~zotdhC^;npYQN9nL*N|m81RKvWNxj%oRz`_ZroN`V?-M?(QYIq01>mj~NC3A` zuXE@2U;@Um75%-uMOl~Q-l3ZvN zlCqm%vvAd<`AkH9l9-Wqooq0rgux8q+o2`}Tak-FzWu)1yh&s>g;ye8A!&|KsqXIE7QCtCNwtqa4CDmk5 zxZqRaqzGg$KEmMAc1;^FyT2wUw^oddj7*8)bzq0eeGHPSTbLHR5=!R`o=bxNJW8~3 zAbAl?aCs)GFdRO1ETEfAN=%Hj?jwkhT^gbF(EI?GgoH%6lUv_Wyx~I&zmB~3<$rMe z2CkSnmJg7L`_Lrj_%Kja}{Zf>4|`4=+Rm@|q4_5}@3ByyuzxwR2-cy}aM z>RRBC_#-Y_pp6$09LyjSg*$-Tc;Dko4;r^hnHa!YFC|Gi=RHg%9W^ACF7rUd%(oDt z9d!2m!L+_DFE0-`jobcsqu9OD*A@SyluBCX)RC+1?&MXW3Rp+c4R&wg90R%kd`uV( z$wZ%by60eX$x32b#i!%-Jwj1=LjtvOrMg}cV9fI?p-QL4&OJsQjBr*upNUY6hSKe@ zpwv@*5y#ZjwpHLZD*2z9XHkCo1VNruco6#pk&Z4JZ;Udf?td=bW|xlaNKaA3J51m_ zE+;3~+uOT7knDc)$I#&!AO=gAoKApTS7j_NEG&dJfdLiQ7dLfO5dx+qY+%ISfrz3~ zB5Oei=?Dz9fw@$%JaI2Q_0(7gD@rxOW)dRGbqP&IR}8CYLhZ<9shC6hYs@1y1RjgdwZs*dJ27pwRrDLmHWiRc1w3{VQ>bgruh*R3oZa- z2alls7BvtcwQ`tW@(%a`Qltu})R8Zrj~8eLr)66u(r6I+>ERbH@kxVi0@ZUfA|hfZ zIzT~k%L@(1PFvq--kr1$3A>Dxj|!IMO6wIB=Xb6?Uf4~2<_iPzhNM{=6PtDk4zAgk z1f`6eoHZ2fMJ}}KHnWL1=&l@%LigEUyGTOvVxy;D1Jq8^4}SKf)vcoFj7g?`>8s7L zXU`taLx)Tj$1!&KPCd094X1aw8EY~z$+3MeYeQ626i*;tm|rkggE$(W+G)Bc=xJ$a z_9+}Vs}7N|J;o2^KjP{@6o3L?wUpz)PFEo*DT9t%{^C)HPY^~NrFU^~C`a31H4_ul z>yc!%eg>M-qIgYaJtK}A;$QrNnp;zble2t%tCjkY+N1piNS{Vm8&KlPyL`Z`^dpaW zb~;V#)Q{bp`Ge9CY(DCi(F9XZ-eP>P5nt>$)>tjNf>J(q;>3QG6y&E6qE`k~5fpJa zJA@Jl{Wphl_bj?{QZ|?}9XUs)iCw3MBIfNeU)nuBei@16cx+MC9rs8HJ)2XGY&?hY2x2^7;gE65cGQN z=EgKc>jZuDg6PEJv&=7E3?f0n95wQO!kt(iIk$E=wvYR=0zUB5_aI7AJZPw?6*M&N z0`ZgM%a;oRjaizC{IwX<%3TkP*rO;zd;AnM*!Y`USXqM_8?{m8B%lbib8#VMgc420 zi6$f)sypu3ZS<#o+t#hX71#P}W4UBy*+W7?P$n}HeKIKG*JEi3ntTxiczSrq0EmMr zry@0HsumS#^83Dr!_xDF(TGGBPsVqocNW)pyTJguMH>gMxGoNMQ+59No0LHGM-@;;?v(}@jh_QLgcGNMXfu3{yZt4k*Au6FFKi+a3HvE0f~k7JR%w?uu`BkAZo3sKRZ*^ zPl07nKzdP=buq(u=3#V3$TeR71AK}rx{Z#Hn~iEeWl5CW=pGunFhc*)L_uK{0LtC) z@as{+rsW`F(BuVXW5sW|3gnL8$U#BDO`!e*f`a@M;(cDdI!K=U#f!sRtB48-`Zq1P z5{Eu&fQtZ9V(vyPv#2R8+U>X3JDYapw3GDupC56}lB~+FQQ8c0Y)EN$HyjiUPvSRNz|2X_h<$RZR_W=L?Llz^y663mOXR zHg0psvUquUvD9MFIF@{}Tje>z@hOuR)ILY0Yz0ZqtTTn#iFd(HbXmMnpd zc|rl8RH{`aT$0{V&M7ey8J_b9M&L7$fsBe>H5>OQ??gVC8fo1&Uh-Fnjg2iZCOZT0 zl!oKw>Fl*YEr)e=8PTr00qn$uk*JK|q9SRC28=N$Chc3OFOzSp7@Ix&GACPGn9km6m@sQm&^zje8!v##G>{OFQ# zp(EWwFxmUkVJQH#4AvBDHSI$C5=(-6c_iP7x9FF6i#u{txB2eNB1a&?V*O_(6$9r&ci2}U?dVMvS z=fIFY&HD+Eb)3A3N)6?@q{HD2ev$x^A;)y29bAzc1H-H-mp;|!IFx~ z%0WN4Uq?nAMQ{EW`bi1)J@0<~`J)V2*gZUKII{hU+(8=tw4RH+zzYfbD*pOlRtB&i zWan3ioS$R4W}0Uy_%ZOkbQLJGZ()nVdUGVN3W&vYMvVMla#O}(Oop40N-daX2^X? zqxV5`{(L`Y|IF{-xG-Cc8qm_EAVayjl3VE>1T8r5HwfmhT4_UStCMnp;LLx1Gz&DW zzVQFsFo?eefX)L!b5Qe3Q>G&03m5pXC+T*V*WXQ;@#cWgJMlN?xg#L;?{{Z{%QMKa z(j2-Pp~_8;6A)Zfeg~>oyPqGKNgHh6`OgB*W&)HT6yHHkLjHDe-kn>w%0P)^oUi3L zj!ljZnz;u^XhU(qR50)tsy+Q8XTkYDxBWoL9OfO*)XrL4`Yy;k+uh}Z3sT>Gn&V_ z_SmGL`5f%!+R=(;L;}*Pf(ini#Gh`LBKs!x)2AxP+ESPEC*XNnAVIs%O`xtyV#UaSM^>LhfBjVPRUb zMX0bx{j3ffU41gX((FVXAsd?n2Z=miHm?> zj-?|Sd=XOps1WA27TI7@jd^BfMr!=6(J`m!EB5w#osX-kQv3S){zAaLIBlS%54x7H zGS{!KiFr5d#&&{x8F$@cs4nR0Sy3hrt`xccX zm6Ecu1*$RBCrx~E6cXM`#k^m$&!2qJ{{?+?VW%>NGmrbvo!()3gp>s_v3YrUq_Y$% zCT5FMiJ5M0vROtG4+R`~ozir~Pc9oS%XJM8IV2Itw1nOpqSyuttl%0+Eu0Mf``H zMAymQ4G>@nNRs?905KyfWrHCjGBW_kLNVtrE-EVO+mG=tZoY+#++2oFkPr!eG5?Fq z5G7R#wZ?Z4zP|Uthv9&&NAn0{JU^hqTTer?jbRy8ggKr{3uZ#*&x_|Fdc}k29jXFi zL6yN|)6?6#23bd5VBfxN2)aY+yB~(Eo-7igpFwZe4D-mVY&TwE%7_dMHq$ zY#jrvhkBnC1&^;S@@4=b3b2Qad!g4HOM}Z+ZoD%-U6b3TD=Tfk7EPmMvPsVaOJ4!O z=w(S~2UF-+zr+Nb23W7?<|YNucs(Fs{rhwS#Ay8zkAkmNTFpIHd~;p-_N&R`bG#i^ z(?@WOk&zKmIzUUyDJgkcd>4oQu>ZQvH}*x?KHuX z7ScBW>H*6{s$kpIy*vCMOEW`Li+gMsGR-QKl{@F6Z{A!7K}0ZOKj;4a4Ctm;BF#6} zVg~9AC>Vo98?EsmAFs}?ji1~>Upk4~Zi2vF1wW*@=`EHl~&7?yDHGqOIK za2qf(#SW0B)mtmjhtr~u5%pW&rcszOGIIY>3+4MLJ#tO_1@{)eJ^_QgzM-Xj$n)s!~{ z4`d%K{prK@;t>lpjINM5pi8|TJ(0ZbX?OyPcjGglEvG9rr1Ji0;;uZ84Em7!KVxiVbvCza|%d9Ic+Xx zWFg)B%0BLm%gvW+FAXR4Y5qn$d1SBN3BSkVWSkpWH{&~U%>j>#xTwJQK zXAZk2+`k~A+t1c|FFg^u5Y9)<7owdRZ6tIs=*NaD(g%a>J*KhzVjb?hG)nwEL-~C@ zJ$`sotUQ_K^d&zHOFSgW1XX}1)D~ftTJzi2^)AP#G8kcmZWCC2uTXyjhydO}m_}8Z z42U$8?uQr8J@z*kA88lV&5JEi8Z9j?**Q5?f~25hWW0eb*;YntOfWv9fBN)>!U|(x zbc{mPN$`N}*tL31Gwag;CDM8C^R+g$Tj7)c0Y?!u{)`mA#$cIqQKgn9=ZUK*dp@Y`Xe+6nN}hIN_ekkc`% zzNFbZ5juJ9S~kE}$-<6kZH$$xCH9yGno|E3xw2PC5 z2?&im>_BhmdACbBT4nwXujwO_H_R~Kg0gYf-n}Zn7QnZP0GRY~n{uqci zD$oUAu82L@h$xJvTNPU6Yibz;@}XFjz>MbJ)(w>n4cjpLfVAqdlTYil-FIJIB|V6} zX2IxCiMqSHJ3<^&ElSjAzkhF|Z$PU6HN+V}b(V3%50Czcqf~uvkV}nnarv zgvRb-$AgdCKaRGGznc)mInnzCzJa0W1PWP#G*Kgv<~6b*RtVDBHe)HtbLfCa(k{3H)3o_C*WC!@LzfnJgs}nx5rIe&3R zhrbH5z+=6E&P7=%9>Q>rw?#GBbtp_o!-Eh~4v$chF-ZPDyuEom)@$20N~zFl+AC3D6MUFNAorW8sUQYtcwa#bpnDdQzV$UH8RG8YXb2@%PZhW$O&dhTbx&%59K z`RqUT`r}^rTB~whzu$Qt=W%?e^A`rcgsR>9_m{JU@-e$(k0Fy!Wdf8)_@I#boqmUq zxeH|HE-_w$h_E$radFq4<0Gts1H>(7`S5`IZh|iBmyD0tYmi}fcp;0z=7|cu0?8=L zAr}N>?Wsi5U};-vs95SVqXw=?b)w3$g1OjxN`z)zE*`Xd*RI1b7;~0(HQn8PsGUo1 z4Pu27ZtaL|*S@&>?wvZhGF8}<<%wX+<~NBS11zfQ;2?yfMP-RGkuB}@l!OS6Ais}a zH(rOd2QP}rMp03bb6vBXoSY4I8@|^S9-N!{ii(*jWJ1LDU}mQI{XqVkoUW$u538t{ z7-7>I4Herl+hY%Zz51p#fVZ2=8RL`ZWN00L}CrUl@~E+UCJ5w&M20K}}18+IIDPoyw74O5R2m>FHh zYAmAg!HKDW+yIrsPCq^P-C+BlEEqj%VxNzklzFjzKXK zaseW^bJ`5HTsu~dgG9B;rBFn20GKrJbtSy)?zTb7qS@$^T?Sp1I5!XAQGY8~&Bu2r z^7jQ9>04iS&X335`WH?5^LO<%|B+YT(TE)0WpEc8D8vR;@4My$O48cKE`-KNoO(Lo z4!%@7L#nyC8FRu$L|yIl*tTcSMvP_(z_h2X-P{dEe->Im1}VG{%+1ClMQH6Y0$;_M zmv$7O9+*mL;0S3Gdq1Av3rkcW(9j`|X1v|^kbw)Ixr6gF5hNtgRVjRJqGQ2<56cijjn+{@u_o2@4eyVYes7;%#WqR32*k# zQSCp83hyxW%W2hMiqzF?UQAW}P>?DED{{VUZkID-3XeDo!ydtF1ER;^q&VZP%n#pa$Wr4 z>}XD2Ub|x%PS6mW3%`Fp#^DC+PHyx`$uL$ixTa+rH{P*%jJBtxcux#_+D~F=`gdc% zd|KCIluprLZ%gTA{^v`R0jAY<&*S=*G?}4X01LM`#Ea!leOy7%C{v+CVHdcl0EcP| zu3@(X+^T-U2l6AuC{DV(6K~8OGzSnvS-KEJyPE8xKjfSmn;=^JI3czi!&AipZeRa< z_wE_2dqdqyKB>?&jC9HmiXhFyNiY1VeJ!wWlD3}B3pj<=Ru;x{X!e>+tXr zW4b>~fyw_opgw*i)>gKPjD zNw~-z6WhDjg8lk%cVZfP_z;`!9}xf%^~=xu#?Dw3cp*pEBD!+h`}P_&spth01X$L4T7bxQf% zxe~b>Q1ybFw}E9yZom$bTaUBKV6SkrF*4r@@zxCu31O>ElCyIpNS>LCb)mv4zk7Eg zgty;3)po1?u@EwQDp5r>UyGwNMNf$@B|!EO1jfM1wazHK&b3>PEg}iXvh;&PGX)YH zZ^iq1(dk-Fn4^}c8q>K#@)N$iC{JvYSTH5W7#<6&8ATOaX4}Y^AEN>b!i5FZ#=pk` zc>9!v$zE*Tdp4?;$mH1Rfw3`~+XFwKR2-mb4fdZob*dVg7FkYnw2!f0+x*4fY)mR< zh2?cCWLnQ>f;|8*6cku%Yw6486E`a zdP2APy(n;Kh^)`q_Vq%WTbfJ~^QjemL@slR@ObF$IGB`2tWZBY9tPNeP#_deRXRX)+t5 z3sS-%ynvN+@83+u1>h=|(7aVn!}sr3UcMjD21Rf#CLLOZ%Bc;J?-XeOp}pT+-O=gb(~L~om?Rq5POmcLML1YQgVpEK|BPx6IL(avEM9vF@{~DG zB3?7fAa?Ob_Y<6xvz(W-1(k8MkiCQR&T#w*BDSi*ppdaTaloD#(gy7l`-|6JT?;Ln z8oOxdj0_}tf|sYQz4bc(fW!-$lK(s_0us~Hp_B&HH!3HJoP6V(RD7Mt&+W!qPe%&x zHG)u6R8;GvWnq(!DZ3@&R<cMpT;V*;4HEW6+NZ`BUmOhrN|S|ew56zHw(?FOPz4qs=B~W%lQ=xa;aEb?gMl_2 zc&yQ%!v!433L>KVd@b%=$Q}%_UCV{Am7Yxx@)-gYC80Wg;P{ zZ=p5@^|Z`?X5U(f06qacE{4M)rDGS6Hlkj{^y$z=<3iJDmA?1%zKE1-rhdoD9#)`-d!?=nT0q*K$Ni8K;n=mUM%9wF@>@|Z*~NTbQStM)c`w4 z&C*_|8lxXbxCLe1+t9=LLuwDcIJ2eMC>{FV!_U{cZiB7h1)t!BQTABq&a3~5%kd2x zqcy44o`xZ{^C|;NpD%9Ro)*Dw5US&$f7=TJi6vLzK$_~js14ws~DMM28cX6)0M;S8>ShkvWq_Rxm>VhACWk4SgNY2p}|^&64kML^^Q*AHnyIyF{ zPg3hgSkp#wFtBm*uBnbHUWM+Jm3m-qz$nGsb;(1b6iU4-Il7!s*MI*?aKEo-&ka37 zh)!*za;qpxmHuaJ3GbLaSowo)0qaWe{*2wtV^nw(#4Y*2>*j4fbOS)5UL~b%sSaL`qII#^+bv0!D;1LJo8~$+wBzC?O#OC?FtFHuOo` zI$nRLEEFTOqjJxvIDj!@Im)KM1?1}HrU?3;rwn=^A|iXRS34@FS?4Gqb1fYmj@l3r zvkalGEog`dRtrp2ZhE{=88WQmnoIJhcs;m2fBbmNUppvh#nT1Xifbti3;CalKdzO3 zp8@`U`KUd2!Xn$uz5J#4YBz+S;M!i=CYSeiw3Pb z!E?GowxM4FAx3iC6F}eN64_Ny1B@7aq4q0RNelmyx!AgR}~J`RO6H z^FODIW4}tO*2(P*_dT~#1K1sYQ38gn4uQwncoH~R62$KyGfK(Rv#|LTTx{%Q&|`Ps zaJ}8*Vf&OcYgEzo7=(2HZ6p2%Lm;x&ina7UOaZMDSiG>VG1~xJ_KukcASD-09LGXN zlol$O!CJ_{^WWM#u3dg}31C9~b>Wyop$#{AFozu-Kpdi~`LjeyP2mRswl%ng52mp8qyG&H5^x0%VRVIj!tc2jk_rfT10LAOJvZ5RlpT zn0t~nawti(%lMQo3#&-$|Ka#o5_~El<2CsrD2-)9Y*;K!{Wc|Vi4MLO;Zc5=NW2jw zBP2Mbth#?4f`lC*jdW3I`=zfCjb=@IL0jL@yg8+&vJ&?eL(^Z4g_J#IfvvTGqq>2M z0LQW1$7kMPP^0#`S%rk*3WIAH?lr_+jX{e2eTJAp)Y(VgJlP4{cuM*L!9r9w)4Adf ziPENQ3BVw0YtR3hv(Ng7lYbYkH##_f*44EF9#54CNNR~o5IlLOqm?k|4xr#EABt?$ z>-H`zINqH**TF5mrj~+Z|dkv?%vLcoGJ3r~p}O@Y{$pEi1&HsN}LM z0RjrVYRJoEAIn83fiD!9P?z!XRqOvBI~e}5%J@{J?SK-!N&8P|&z~pp`s)rhBw@4( zw#muK;S8q^^BHXt$T!!2b;GgUJ0tr*yMGHsSDZk0ImrTYx?J)mVh9eYz_Cos&6gJ-yKy(l1%&$<5#SG3f&l5zI)Qnro^VU>(GtJn zw;w1T4?QPr`~w3+<1qEdeRyu92LlihiZ8c$eDYl#=pQLu%>#Fg%D4sSgoAf?U4(S# z;KP?)um(fk+WGN-=phgQ=v-^SXfs$IhmK4NH;S&{mfu8#Aqa`w#(5_uCKy%323saU zoE;cOCH~-*albd>d66-lX9omD$Zbe(a+kb;nFj+jRgc*C6KOSzPs+HuH067}|k) z2m+Q^Mq5Gi3=8pU0@}lOaoEvOc>VRbxCm^zBT%P#H&oWuEycAiE0KJ><_8+{gh3*c z28+JndIcW{#%syRlP8G`4oZc8o;_1V9Vf>DnhUa}5))DYel^AI>$w-}6Xr|KEZ=_E zB~6ggwIG3q5w#B;8B}#Aax8BH@D0NNwswy-9{fYdYZdX{R+5p0aAt&s`~ug1ADHBq zNVmH;3rMp2!=B%`ONk4lOD#P93=?R72}6wMzZAiv61#Vxn}h>Il|qUBR0N>D>dl)s zJiDFWV|q<6Q7x3`O$ZVGJ$7wH%8zIA|2rD?lIZz;oQ4jcf2gTK1G$dlAZ~r&1YuSW zwy+}yn0?6F%I=;XqU^eKLIV@&CC%hZnd}!%Vao?aL@cF)PU=suZvP*NZzpsdK3{>~(_ zBS7gQihn=CgV)sbqr8F9!UwrW;35wXVj6>}i3l3ODq%M;*I_FqqSRihs>BJ(hq0An z((vPHCtw(X;kS$Ei1c8-3_Gs{<|>l@bP2xPT?vVa0#EukT*k*CumrY}wVw7hEHUg< z;>0CJ>DRAc6Or1>mnIg4u8QMkF821z(4LVrgu%Ul5GG)!wSi0xQnbhqJJ2vJ0AN<_ zH=y5yJo7RdGOE*No%QptWvKnjKb)OwPpQ5s_rx+M->exmltjW1KV7_Op!fCoQ3dL=~U12@fMPE>MK5CwjMdsUvq zuK$eDL5%@QvA&*X<3x6Fh_2SS1}73$?9~@9Uf4W-0@tza;6?PfP{tE);{3D=tbL#E zwjr173i@iMwOxplgOf9tuul=S;1ut*oQIzfk$ItLOqfHO-$EC9eVd?Na0rd=Fq$-CYfzj{TaI2D?wmynLW=PzP!zM^Izl#K)Fsw=5S@wbs=INTApJld zi!A`EsP}+zl5b~A0&EZClx0R%Exb8dR)gXZInQ+t?JoOn3lkz90iTeNdOzPca*r{P z2LyQ}d@909eUELb8Zq_tYZYc6V!enxC9VZA5(>z6L>L5Mdj!W_y?V8)&@+=v&KUW& zZQJ%OHpPhfH?e%%d2e3BN)TlW zR#aBrf9ZnF!{3kdhQs5bP?}!`KMEw#xIqhee_}lrIN}i$`Y&bdBsw)PhfC4lba;lr z&_onnxC5;5Q|P&TB+##hGn|85Y2sLdB?zTER@N15ZI9X~i{Qk7vW$_5Nl{NvFJ_(7tv^n08AxY| zuaEfEKt{C>b5=rGfW3u^mt!>l8BR6AA%6If0kOptFwc>~{fNjBo*B z>tGJ3qTApk4qGCw^JA$&cFxhH;}~ysk1aiG@@j|-_!hle&He)ot1PWY5P(ujx%ibZ zWSJ~R@3RGUHIN9JT!6(2^y8TmHi_J;iHCM(1(?fZT*cSGgk+Ni;DvYxp{cyqLue0l z-s_PZj+*1qeG|wj?z*~4^an(?hxtk1@%5(zjEsyG=zRp1b2J59y}BCT%xXFRUTh=cW=F)1(dTK9zT{7XS_T0tDG_+& zoZ{l*usZ71Lo_9ca4WXulA`XvuAKh=%g_Hio0ya_ovuWvU<7za&QiZF_GXh6ky8*ccF+=@$}pH z9Psc&9R;+g8c%?*j_{Razm%fUXc&ZR&`dL2@Tr3|S*o%h{g1|(Kmk4xiKhE)RPE#j z4tR3nPD9-So|m%!JPauKC3ZDo+V5}~8-vTW1v~ymo&`I81nl_lR_6Y--ddLC6-!F33Y_kb*lD`8|0& z1?X=)eSAg)wnF@X&TY%^?^jR06bg?0Ufr9mprDY#lc1+)A=I1dkTdR4U})~*@+R?p zg`r!%k(=m=(2TK*wc%^C%(|V(74_}(B4!Hu?9r*m`L;*C3l_NXTw!aMyR8=ok8Kl?02BkdKl2=w6L|$0T ztRROn9Olt>OTmMXU&GJB*!%wdBJ_dz$91DnuM+WK@^d9zc-Way2uPcu6R6~w`|zP3 zEK(`edNH(mT^SPo>(Hw5Wa%s?-Uzfm*fh6SC7b2-y>74TH}65CJOOAxx9v!~2!J3g zHfOmN$!|)1ZbrGvMU#768ZHYTxckS1z%Q08CsDD_%fxtN%|ME zqRhmet{GhiL<23TWH&ih@Btl#_!89dP4uAyl^Xq%^YUy7C6Fi$k@k>!3!dCMz#lT} zS$WepO4~Amr?rwoFBb@{&q_6w(+zO!xj|*8)&mIjh@)fL{h**7h+L~d!!MFDKQ*!p z+PG%BpFe(JKQ%Z!aqC~YPt|NCT=_XTwbgn6?x#r?U2z;PgbZn|ee>NTd9nBrV{PAM zljYBy0IeTR!g8P{UQBoVreEoVM8bot?w9_cs;C4i>cFs7W95`DT{f?;*fJV8+bYL`$v3@Lhq^c zlxiM-ha!yO?*ZiBUUfOrTL7a9JR!GNC8~7bi-#y~mPZ~7t-|?4{Ss6*nxE6lrNnBFTO5&jS*@9E5d8dLz;`d?ssi_VD4nseG zKShb(zulPfDlk%$A=*pbDO!Q20$+K93(9!Ap3K5G@o&G2_%r-OiuP(S3Rp>;VL>cP zPJs%`>h>>}<)R~Bn3DXHYo*d~XjPRGIbwiiCMX8^M~V<$0V3k%5aP&iG4oETL85^H zIdFE?la5@TBC{##J9!`PIM%MUfkl#|4aUT8rPRxm zElg6;Y&6C+J97P(x>|b1!jmWF5-b8kZ4lm^sI_=J?aQ$CQv5{|Sn;uCkH2Jr4lv`^ zK`pI?Gme#$*FQRH1D(R0V3O4uy<|~?J-EJ&&e!*6$PhXiXGzY5Rj<@Of?ftIkE0E} zZ$N&2{-2ezg)UF7kNMt0RaED?|AuSTpI;v2PHZ}8o29zL!opk)_ZdTCz;Ms}c8r&spfCd~N7uMijASDaipSN~KT=Nd z7#-bDIo;uXSnT=t&<3yH?aq&OZ7H26PI;Jk>7kha_o=E+;i*CfTw+sP@+>#HtVr7e@o)%ENF&OnUs!w) zy*H%#ss4HAwB2yls!~Pw?c2A_s${(q@5(P*{oA=0G3Nam6xgS;Oo99|!;a_sKZ~Q| zg^Dk8_Aw?Ooj@#(Kf4+t)x`_d4Jn!?$}OloC+1-GU*86(UsQ!J>PiByf4i`Ouz(e- z!NK%VWgB%%^dOhO?NaznamQM2+4eOHZ!1BIob-{Aks*!{oj^Y;qg#zsFxV$qr#=W)?}Yw6rA2P7BMoxEjGjq_Ez)q0UcmCbC@G zH!Pyx(xUPta50NPwGe$&yEFUaq4Oxsnhyum#%JOD-?*vM_$ zwgS_pq?A<4VTwOX?AAlC0?)^8ow^e(y;>t{a%w67`ADQrlRqn|8}7n{Ar`EX{v^5a z<)VKJW5z;ZG`tSxis<;zL_kc$1AG!$RFXS(T)~WmiH(RrK8=hx%Tcn7DI^JvfnQLN z55I~nm;w9I;Z?2xQqp+YJ1u4?H?JNAkVZV0Wc);p77`Y|SX8u|bXf@6K`LPWm@Z#b zFVG2=U=@;-bpE_Nj_(!(o07Q!<h#4tz9>dO=$`Y_V)($gzekxc{=hx2 z;NP0fBx~R|ECgTdKOcsNUg9&eDkUWaOs7AdIG&~RhbfrF*Fqvso)Estij^zL6C!a6 z=zehMIlbGeuYJ8f`Nt+*b#(>cj_CKAo;?G$vjQzt^0cL%o*&NIK46`m$b?23eh|TY z5Iu&Y-LxNor3BzN9_hN-t8l`@=%R|HSexGoaSy0pGyRxsTm%EX`^u2e zQ2JnENQd_>Yt3Sx`ne;urU%LmpvMbJ-S0Stzm$f^4Ghey zcbs^g`t7{)*In4kiAFYcn%W91qVO<{>F11dvrvaA>G9E|I`wdIbk@(Nhvai3_kvY6qApuhGldh}dIT zx-_@@=`s_M>o4XIZA2ay(566j{%cR}BE*bHGohUW_@UT$1_ONvB2F7)FQDzkLkMn} z#t?864HNU@Eb>HPn%j$V@AmtbFFOZ^VNr>mkP2~y6W>3Md(8`DFxl4@uFArbg}|Ma zPGQ?xlHW%_v&b7a>Wa!PUZjU=@CwX2l}M3mFs>JjQXdBs@*XXgd-mLwD_4l`75yHq z?easQyd>-fkIL9|+UDC3mBi^I z!3M1Y78L27yq%cc~~f~cLHmy-D4z=ibs+zL4-+l>u75e!zfIc zrhPjg{>3`Hz=IP9!WJB1cb^W8j3k0dB0i`IT;;8(tDD_8>OQr{ZY7sow3G`Zey~3X zg%$09a4Wa$vf-Gr&=%-oP%aNZ>o($}u^aJ==O&-+Bp6C#P8nqDz?ybvl>V64*Xf=A zMXXLleERQTC6aBL8IYAn#nwYG| zUK__(t9Y6Ff)ZfDZ40!f3R^oK8Z2@9w{_ncT4mzPoWgYk56Favp)o zG^c8a(dn6(1VKdd9tw+Fl1CPy+NGQ}P$s~v4dDM-d=;wMFVqu~iiR6ql<%`{V+Bhxt!=V{ZlHA zq&}=PvMsNy;(|Y{oHOG?l1@lS$V1TQBtb4g(@&b8jp@YOl3=`>D&U-?4-_%gi9=N( z(YEf_vl=24}X}Blr+2jCvmdrA8I}O#$7_k}ax0&W9HS!;&Lk3DMempU#pM%>T#;9zw zX>0Rev;C`&cjqyI^=7_F)l3e88i7J#FFrFG0-~bh9}hwFDc~6G- zI{X+`$Wbr|3Rx5+B{5)UfiSB>!60P7Ta9Z|syO#a%01=MVeA>Aj|2)*+12%UZ9qpsxsXV6cy9U73u%ry6S+54%OUP*%w$;2zt0Mo~OtjEp z{%Muv=+J3Z_;5)4mn|1O`M; zt}i_o?;i?_xQXa<9!x7&4rF%2eZm6@PiT`uJQz0NLn*x&?n?Goqen&2C4$@6nx{Qf z!CXVzHGxpt3C+T5_)wvEsC{@DB3ZxTC#r<<}7`p%s1j1yV`dISC z@7Qz`@0%FkneiA933Mes-da_l11-=Ih+N3O+ymJh$rf@g>Xx@I{C;d$y%=cpW~AM* zF2KqbMxBjz`{1uoGD zz@4I0R}U;-g2$x!i<&~-Q;YPV%XoiZtsc{S&=o^wS)IXWjz1R~sRy!#Y|dJWiuCyD z(VBQ0z<5cb6Iifk3@LP7%9fP5RoaZa9JWj8USv-uy-jRV{%^~QFKyM2DoyG_eT_9N zN>seil1@W)wgGQ>_wv`KrY55BME6+H-p-EdgJFpTF_2spGz2-FK;>2e0Fyd5JKoJ` zJ))j$KsGiKRqjsqb0Cr_fS4MfpAh|cGdlV(GR^?Te$0Q@y*C0!@a5oOCcj+z z=-kU&-y!V{VuxETDcT*GIJh6a@9OG0fQQzC3$6j4D)F=N%FD-1GOb2cz4Rw+lznUj zRTx?+GE10Ff*d1kD*4PcH4JECh?xd7uW7IYQDmd5-GQqaT;p>IKMFb^;3+PP@S7=c zuErNLS=t* zM*I5cAMgE*p|k4(Y(FOT5wUtz zuU^Hp9CLOyxBHx*hEheW%Jyvj=+}@U0BVcj)TvXL50$sA-GVa&$-o9*-ujtH)Ko_oLwQr=avY}k3fUw22@`T6!sZB;k11hddej`J$HTF+bR>HE zK03Bp6rhaP@f49-Q1DmLr9b+}#IT5rglJ(fw<%%FCZ}hyxQ1MlEW- z*z)C}A(N}&2O072s7qg<~h^?K4KWjj0F(lk+1$Q=cprL7nviPA+=Q!^5& z7VsQ&7Gsiy6pshMF_CoC?M0VD!VNxt{D^1>WsD~F@e&XX#rsZgz^Ki;vkv>-_G(~M z6f5X(>jAmOS=>spA)W)63b9*ZO1+YnCQQ&foWbBAsQF`9PDg;=NR^k5pcaLcv(o#K)-DoKZwb0c5aTBpWlD-SFf82*5Bsf_pQQt#A}J~9r_K-I9)JJ58Elu>T29+- z!_CUh?H;LQ#eyY79Do4`m(W64xeAVJCL0j&8>Ka4R@{=7vfl?=B>)ejU;Y#XmXJBB z4Y>gASo~aM(!brSd%rkni+?9W2Vr^!v)_|!e}RwCUk@`5LVIT`P<-q5ZPKEDLa?&G z(%1=-|3!K+Y{kyrDJxLkZz3%d|#ptF;#1Gm%lSx3Hq}$qypi~9Z+5pT!liep> zHrL}IH2$$4eF@OkqfOjsP|-~_ik_znn?+OI3m-c!TB>Mmy_REvDGX}l>ba!v_+bGW z?73fN7Ft7jU1cyoC%w`myZ!$`dU3E*FX%QRZUGR zz!_KIHQxi~<9awZg|ay>Gdue#zFfpWkIk`+(P$Q-B+A6`?)5>v7&laBXM~ZaeHOC1o4F z@d^yiCS7VdCSlm-*a!?belXW`r2jHDhgruBcQ>is!O1}sn!56HDkt6(C@TlnawF

7 zfp*Gq>sK>+UxR8PdQCUK1#`#lH8gN_6S%vm4>xoiekLj|UI)_&md&cVDUecKgqVh3 zCWC|Q3X(}%x0{&`K-f#%?PQ=4kDe7-FMBddfhmzbeQ;!vAD9QU!@*Nt-Jm4^Wr5VI zbJ&TI)M5|DE7L(Oth_-FMPpndc6-ui5d0B{wiQO!#sCG;fYqYth_HDUvOOC~z>z9- zO(XX1U2>NaNw!2);26cL0-;@jUpM|^?o=J7U1?W1E09%qvBjz_zVs{q`^JVx=--vy z+;W=vpRQ5L3!|9b@T{Fs;8z;vl3V?6NdV6&8~wAG-*3nMR|j<4=ogBGgBeOPT=b0& z+a)C{P!0zK-1~~gF4mzAE_@~9Q zAWca(ye9$)A>*sheaovWmDa6Cp*$P$jZem^3=DS z+-`yUXwXSMT>1+ZQ!eapy>G4S(I(JE@8j|X#ZC$|UL~RNh=rjLz!;uy#@_SY3PS8Q zP#KV>#a=1!{(WA=AYZ~s+pIgy%#~UsmDk+iQi+F%a!8wPOj+k)`l%Lr?wat?0dNrqrunZJE(SkWgi~ zqB8wO=^R> z8s@U?ACr;%hRW?e11oMpf5L+Yfr#X?K~qUS6*pW~dyfU-Uv+U-yG5z%Wx^ zJZNTWdISf_-Je@u%`4gE`<4MoG5i+QSX5Z}kEIm{sg8JIyRScBSVS-XYxd|5tdh(* z{Doacao#q}ha!bcH=`$LJyDyt?UZV$sGvhBBU&@l`bjh|gP9LgGBMN=GX)09gubYx@$?azzBk%0;ePyLFj<7P-@3X#y9_zX}oXau5`Fpxst z$%9*a&Wb1MpxOWv)y$G`#^#y*lz7!-6d6c|1;!i_fsSnkP-{?l7nzczu``-SgKBcQ zNHz=T>ZIp)Jhr&4x!?dvRDWm1Ecz4TIZnPsuz*a9qH9xDbGXX`xQnJsVqxKWKyAnJ zf1?Q{0uX~9(%6vJO0$A#)vC_oeiJV2rpymbkaA9>zki7<2uQY7<}^mdRQ5J(GBPFw zJViNPjnm35|2Kiay1anuqxv^ES?4=Z4Q8%^FUX~Zc5CmTC;AZP41Oc4GdM>xr*VEZ zSPp{L%k(E#OyGbwNb|jomy90G0V?LipCI)!MU+&$y3bgY{rc_CfhIvbEBbJKMMMX@abTS zsNKyN08Lg?;GwEO`(V-_Zh;OHX|m6UGg1&Lv29M(d|?X}K()b%6Vs@Wc3tm>LG`~{ zfa_}tPP|EXugXV@h?(E?cVStNg}iy6?u-QK-lM)TZ^^+|SeXEH@UWkIIAD z>+I+8Z)cT$_j9CKg5Ez>Ni!z!FKUQ}QpL`IYc}97a7YH*`#l$0ig5)N;#i}xfQ%`(~T(Xu}gFNR5u}g*G#62cb&M3o;u;=%|=U zAP>EC)-JL5^d?+P6@*hmC_ghUZnOOS=j}#5xDfZXMQK88XriKMIwoWt-L|o3>P%tH(`RX^} zo_p9`$d=09S4?ZvI&}U&|tRY_#7w@Q!IqPa@95LKUj0^Vp_tQy#4p<^=2Wl8N2&*1m zyS&jt1oM(S4oxx9FD0A)pup_3cZlg}R_TG{vw6UI@*I8PnqNif^J!I(aN+bZ_MU?h z>M-#9upL-?Na0fzn@C;LrM#Ym@kvT0NduqioQcF-^)+GuEjMzAjtZQ{uusl(&3zX1 zbU?!Y^oDvuH9#-DE?6VbYqO}LddD)s_9cJR)TzM0s*Twl*BY z?Dnzbts~!&+ZiD*vGkqytx{6wQl$Darl~0W-&fcouJym0zDPVGtFDEw`(4Wm4#NMy zlr8cL165Akx$G4*W<}iSHq59`G#HPrZGa%dbAMJ3CT5h0TFfC-s|U1A2QDg9#Hbi@ zfE6^l^Vv;kYm~Z)Zr)?edN4hBj;;|3^#IRN1z z3uiiQgG|5g=dV+i#)xry=Dcx$T=xLnNxBuRWR8-5{?)3Cr!8_*`(UJ7TY~SIbA#yts{9XdtQ!dF@5Jt z$vS7wg;VTkzMJJqCSDa`BGobEN;w&?Q) zMcZh@3K#=QBKEwieAr!sod!Iwv_>xZ)~r6`hbk8AdxOZ8be#Uj8i+~qKke}6axx{|Lh!K`?@@ws+tu%#uKIPnQeXt zN&yo?g>_HhS7n2pGrh525W!+n%Pe+5MXDX0PekABnOX!7k1!e+mY|uJrP=% zH>2OiA7^9NyV2V~{UE&B_|nZ1XBC$?3A(d}Wnk9R?0VAqyY~PXg!~6|2Zsx;zv)V! zJXr8em9biTf6K%E~R6_&tWV3Am9_M!UwmMR5v_V$*ps+7bM)^ z@4Y=e^dLUh${KYGM}5du`R|PA-K)uDdwu_Gq-JvF{3QTsU;0fqbj_PvFMW~`x?}S? zd0!sdJnK76y>lYo9i4Lq;-GUmM2!MIEKzIgWHmRQS8z`#(bFq_fC<{J0*7VYJyaT&K!J9+8Kg zf%=)*UYySVC|XoZtQK#M+%;5kZjKGhj@;vlHhdhn!4!>jJWk5M(Jq;?WYNXOFHc~z z!FZO`si;@%x~V9xEGtOi-VfP(U*`^;z(j~zzwFD;MnJc;Z@F-yh#0?WUD~0sp1ji$ z&l2DH8;`!KxoF{h&PQl&uF+9wcz}ovX0ByN4n7i7@1f$C6f{iDl|$NxeuxrrT?>G! zvx2?l4M?hih?aor#Wbsc&j^?ICG-+SUAkpmhM51THrI91BeQN9MT~5>j#V6mY?%-O z6sz(bID`)a$wF?;YG*4j5`Wd0IhhY0Y$a-a99wRy;%;n(C<$JM%u!6A`DPPO-=F%z z+wEmR(NIqd`||U;?(ejM=9`T@nc9}rJPf;rV`tq2%y4yLN-=L*cyV@rI?+<%_J!Sp zKcS2u<9+sXu8KEjH$ZYH($_$}aU-R3bZAkq%AzRs8V3MjF^z+_#rLaRk}zabD5CAT z{XMJZ<;z%YvpRCaERj#%#eQ7mM@5M@lkAk0OG>c;E+6J&3r*Ih(9D#$UMi z8NXFJ1L8VXQjN(Kb`Y*rW2TA3wBA3THN?MzR>NYrrj+g;gajQ-4?1VRe}9kRa2NP5 z>Qm&q@yN)KyHkPaDnLE+0kt%FsVo9*-X)h@Au|%9(t!53a zc`&(HBe7Vei+dZ7&pt6BAwP=9KrD*}BX6YUQ?~5kSBoDr_CbjLM^8xFr?e}%);Aem zeB3&(*UCZuwYmTx9C8`Q__v`$I)MqNAf0mdbMRR9JY5d!_v%rd)zj(Wic~aLLLwpt zPJRz*TP+PAh;Vcj+vF6=^`)*fW(yY-bw$5^D<(!tz8sM(HE2;Vnq+@F+7@^AR=<6A3TwEkV=Bd{aVPTn$J#M)MqC z%ILgS1Ehr0OKk9KzK48!?IcamvENNfQZltty*)AI`Aq`8hYbzyt+^-dF=L)LRMJ2E zgfjC)-2;6wLUux1@OgG1aE%yKHBSa!qd8qE{xV-27gTdVXKP`n3~dC`)wF}g)`I>H zK~RV%pSb27e(ffqfB!({fX0%|9ZyzqC}J`7+!l0}gFko*S>hAeas zaozWAM%}H942u{8l+*;$z)Cy0JW5Rs>iT+m69-VmWBF)W9eJ)vPt66t&(-Z(R*~U! z$oD=VYLFF>->g7;c%%stZnX#}W59@(Tu?hTw-d4I^E_G}D`S0pqsJC^slBu88Z+$Qxtusu=}3$wNwcq$CMl(WN~oN(&!&pWWC>wr#@1dr2Qb=5D8Bo84uK$q(?YrI zLn0SG4v}f1$LDCU@1`)RZl?Ly4^!gWYUT_qp&BJZGw43(F|45pdtKtS1}X#zBr5D) zqxzI6PQY_Rvsy{|t-8Im$!2KMAb2vEcjjqK>^Ima>rocHT}tW*h1DR+3E`&pan}+-;uolVmdq4}jjEG2B@JhDdbBkzB$a7O(Yx)R5l-WB)mKTP zhN8zOhMu^R{Fg%+8pgz;w_-)+t`tZyVXWZ8uwHL+0^APjAceFJ0>l(xS;D9^OK|Az)>gcuk1cm;7`y@8 z*UfSJo;q>EkO+qB3RI)j7*>uXtfH%H`!pg-tBHjQnOZOf1|T`UPPbm)YhgSAFS%01 zU?=(@g8>ij{pXu*;a{r(c{P3N=6hm}a$cx@YjD)asjmnDdON>u9poSQbF2*M$W3r+ zRUCRZK5s`PLT4^`9W{97t6W+Kt-VltkZXM;|Ljof5-60GllW{RvSp(?eZQ58CN@K7 zdecBZoctY32S#uv5d9)d{`9DW1Ue02xZX%u3LxTYchG=+o0{6-LV6&~3E_3x>1y@K zA1Q<9gh6IMb@D-UZ~oDYgXIAkx?jL56=fcOr%F8^ierw-kl0)X)yOI^0xQvgh_?9R z(t=F>V!)FLAjZSUcn18@Ak9{W)@5jDXPvO=zFyFRV*%Rv#)d;4qh;2{cDA+;X$R0^ zv=9eM?AGmceqVLeO9lTp9{vszDjtgzGX58s_7MQ~&clxhlZETmGcTrwxURIJOr?z# zrvs(VEIOe*#?l2jQlK*fmKnAK0*>bA z8F8EEoz0;E|KcmSb*pG;CiDkjeK)vZun<{!d#N9YD-s~^36~X+P+$<(X?a|9gssI* zI;NxGVhwJ!;||wG@BK$@Y@!d2fB1@H9rsFQl#~^OL`0~8_D*~TdmVr{N^!2n?pLF9 z-DfRb5J=#B*ave&V|+LS3LgH3&JaL)#lAy+Toumk7F>VaS%PFD$&{6tk2PLFafKUj z6%A&Rh?7l)vJT~&)uP9uR(h+3FpMQ*}lM8WT?SsL~AVoOL9ZpvQbkd z6tXasXbxKTSeXxQn6OUy)Ap;=y1X z;=>42XxYB_e&teshq(K?(~620H7}n-xc++G-_IKx-^Z>Hc7_5E2g8v(OnMkCk{hRp zW*vZ?WH+={t^jhV@1dBe#sSlUE8WpO9U-^tf_21yrgX>c0162w_MsdB84XAN`Mc@pJQH7C(gy*CdNv;0D1A>HEh_H6ok5?V~rTcEXM%=7~K(BJdA_bRQJNNd+vtoEeCpgD>zF8*@9i+ z;kN2QX5xii?{vLnd^Z(|usVtY8!y4x9ahlVYCv%IlE(tzx1oLsaRmDX`B5ScrjCe1 zH}A@MgmisHw@mI4&L{=<=H3;hb6P`32Wq>5*7+vnT|tFJUCjnd=emqh2xLnN=v`M^ zO^uJ^Yc=Aoh)dA#7b0I5~U<7$u6 zLCu0*xi5sO!6+IsBaiQR5+XTv1=FcbuDmoyHzVu9?_cgW2Q^;|m@@^5Dwc`><~=s`n;UnF;2}+RuQHY{ zKwpv}y~&y+)+3#Iy$exq;+7LC69s;vBLcT^TSqVan;*E8p$WmMA(BG$yEvTy!#7$6 zit3lqER?}2JR5$5-w=FuNgL*~&Ytvg*)&0UAB!%1+K3phzmTd(a*e1`V@K4l|JLTT zgkk>4o1Zayvw=&KRCj~Lg-iE0%GX*(L5mACN^Pv<`!VS9GYiDb3wRLnpEiYe7>+5? zYQFuO+XwH)3z)dGe|Dun7?9hIgE$wV3}_R;Dy6z{ph+FB@N=qt;7nqH@Cb!V zf1ousq1GZ)BxBP!a(2E*rD&l93Ty|bz79q>O^MZXMDzq1;CdGnlvyml=(kr)w$|AaSU4Az~)d0;Jrw!cy!a^;DG>@UcmVYH+a3b6^A%Bh(# zOQVfl`I$g45Ked`4}BhiCXE-N=zSr)meKsy_;Y+ay_Ot~_xT?y0~r9BN7HHPM~FIa zM#2jKvk6l>5&D0A^!x*DOV3GqKL|>NN}ppcymQt=2U;6~cXpH!`-S0s!u`Vuq}Btf zVZdn0;0ACjs85=la1~bCSNS$rh|W!Q7-vp{L;FfQUqHk(`EXiXe01Arx8?AYak~>I znl%f>g@h{57%PTVSYO*q}4;Jj+x5+P(Q;jL;-)RpH zG0w*GmaSvU{tn3oTVeQBj>Xj@7eSsH|G@2u>7COMs8u@c@%^6Hdbx6Pa#9MIZ4uKt z4?6`;Zp1zM1B*%2*|xSCxd(tT3~3wTq6VzbD<&4+B`@o{$u3@gwZahpcAfF*X*D9E z8pSF$^7eCvUK)kI#)%t5lc?i;8quME9iyhNAa_Kmxd-Y~^=w<*3XF02sB@Woh=X}d=p{RaF>N_Yf`q#O}FDA(0(0Ouf-%@tuMm-*td zs;DP%XvRpIn$bL_9x81XwNMi6pOFDVwGL6K`$2qFcsu<@`%LY&cj+&ty)AUR+?DTw z=?Os~eRm(A=zze}j+c+^_=RwGBJ^kOf+4E{?1M9(O#Y*soFj(2$t76$FVwQLf&v0= zR?jh{HWd9JWU329Ac5uO(FLxOqITQNa@dpiRvjf^=AK7jk^Z6|Oy=`6@?o%H+W@rx@(bm7nch-2CA)$P2@{iXaR)Ip6nS zr!LT7U!R^r%wV(6EA%S3%#P~DW$!ZjB$dd^fcqTI!*3$~Z|1INQ1N2YeyZ7e=!VHQ zHN2#e_}uzDT1N2mxX&JWP-jT)+?n0EgNb+QmbP8|_sNmHycS6?NB?a+u_P&i9T%-l z2+R%g%apo@^V>QxL(HS09nXb$=bk9=nAK@B6Mf*Gy|Z&pc#Q zx9hh#WPU`NgEsI}B+eiM8j*KZxpDjZ3FjiI1t5b8@d{nM`6ZUcLBYw9Xpo3H=#J|F zX9R2DK*7Pk_LKI|MVrV(D`O`*PPx)@(8K|?S&B;3ZX{{PsA$o`F$cll?4BxLH>EHL zTp?E@??$0$N_yE*<49v}%SW<;;@h(n8;ALn`a zf{{0^J8$Q*7ntHQf0b1UgK-ReG-z_lXtd4xy7}zLw+Q`rMLr(8WAGlFgFZ!84h&;h zwjQvb;M)u(p%UfCp?T@}%nu%K*Xh$~jXZLIiZ(_7WEWvoL%aa&y59I27?-iL zCfjE~n!t^u>=(Nl8d`#x!)kps==-7^y~O$6SF~hRoie(Xrart^Gj{*210Gu8dy@ygep2pPl!Lgf~bHGwUO6+#p@fBy>2NT1|-5Ad(X+ zBc|BKKSh(_!!pnO^!4a#-Uh}kz-;h8bQ2lj2@xDs5}F_rA0YIQ&8<|O(uG`HTHD_@ zl1;}zGIinyyR9wvIe^eQU=XF;?%Zj0-3q^7{IUK2cDk(a@thbj{|1qR9xCTvpr<5U z(X!wRGM|t^Pzd>g`e+zp{sNFV@Uzejq*Ti>a0+2m+LR&mY2erg|6AZ#KoNVq^?Of` zbOcu53+|OPI`rC2J2|QbwzwVE*74Ve4bfmmk~{QP zEpUXBv76amdlS_PGUo^s&Ql*0q17B`60eZ^U}ES(Qj|eWnH-_v?3{xH?!f_@h|zp;Q4bpWAYu%RUH}g|9pNwOlgAN|@zFA1SaB9c2DwN2`@!1~2oZE!nxP3`CSfi_ z*Bc_8uOK>tErrPyM_cAaN-$L6UvZ`W3VJvn82rZk3qS}j7Rh~Jo^M+`2wFJ zXuwZL`my%lc)E*f521&HLB9ywMGj36r)D&EzrZcOLuwkx+2Y(B8#zoG*U(&_W#f%z zpM8Fc4mS`u;C%El&OMdujSLM5#~vB;3fn`P>X2392#Hj_cVgZgLah)t1l31Td3ba& z0+0cOgw5hzr=merfi5d$|8dV!{yo@4A_&BL=mVaLikP&tGyxW1vrg;wP4#trX98+k zQiwILc|#5@kh`E>^9v%dizs3fq%XD+8DG40Yc!XFmiw24LA6bb9K<3>&TNiB!G+*L z$iP3;{*RnGC9z}24jed1VRg>VTZzS0#=w}sjBKE|({fPb^2OYq1=RqyfPldyZ@r^& zTc2E@sitP*< z$RX}0pGac_IWC@W!jE`%VD&|ij-?(t0JDavW~3)P%eh4s2lHrGe+`p@F8>O%PZ)wm zguC3!ViAd1$+SXYYXg}^4liol*g}R{K;3Z;_uJaFLEH=C2x$MpS7-3o*#MdYy*l>d zuc;w}7|G27Y>Du2vAxmKREl0cbABGY4k2|x0X(fE0$q=&@Jpa}M5Zy@Z8SxB>nUba z=C@>&xvY#u`dm7;Y|hW*iA< zcxsL>%2NRCNMictnI$M7nqgWvL3I2{=E_O_3J}eLz^=c6Z9Re>R*YO!havZTJO)fJ z;Zh;o7Y~Y3UW`Oj!cZ&)ovBY`%`-7_`gJJc+*C18MqGq`-%_#;!-g)UpvnwYsCr>9 z?8d{Zh1Bx}6(X(u@+z*z-hqe&R#k%niK1Y#rifub}u?fW+{ zA|m%8Xvlp%sn4X=A0iz;1o3_9OCB;3Y~ck?uv&7cmhVKC>eXj0i5LwfH}K6}zCJt* zSk}LQjh-t}j$_fe1ht4JLlkz+x#*RUl#|OiID_zb!lm6jl!NpLNWb*6i@YLp92y{Q z7t8=Np&@7mdH)NXP|SCF1#U*9_@+V!6Q#F|6JE z;$EoO3-CvFM}Tb*6%ZWBA}q42M7CvEkP-$;J^1%u4PQ)t8PGJ_{z6aFt(?uvGECl!^L}N^`dvgZX+M(?ig;qUiGQd+Z!vNJbCgQ z5G6D2%^vv6p>|Lytd0D+5BCJz_1bmnz=i%uXl%|G?<&>{7NUl0BD*Xz}5j>l^kS5y=_Z}Mp`tf&~``v=T9QC!#O zKj;&q=|gAE&^go|l`e)KX-g@C5gWnY%wdylV$W}=)XB?4^dgE3$M9eZ+LqM@a?rsS z+A(LKU?Q1_Kb3^PX%e5vr;kkM$TQR4Sbwfw- zYHl&_adtpoZih=_kLTCGg`Xzw>va{j@O$4qt;|JEP`bd>LQ?uu%vfBDlsem?IqD1IXS`w zGEFm+WXHPdS-4hiehQ~wuik!XiPP=i4eXMmmzPSJTKl zR<6a759pWY1ewxoz9`kIs3WRn;r`7iy)NOWc%$Hyd2JhIDV5>ROT8Q|t^p|SBGf4S zExbDu9tRk=xrF;z#QF`^`B`ToxV)|HFoO{1($I5w*Xr~}6`)Q(oe*{iIU;8w3sh)$G@tOyH_sDu3_x-k9?&>@} zwWE)E4ACVNETKf(;NLgouQUrO+P&O>w>nmHS zxSj-zxYR<6Gk=V~m*X2?XZt_oYq8atSHt}W9=M>!oa@bpZ(m0*Gqy9FTe>G3G|r`k z&AfEAp+?uk0l#nG`Fc%K?A$0z_qx~ovuv9UQGR{*F3Scf1&ovjvi&=w{Ot`^{VXo= zC`;^ljn^&8oy$aNLsrNCwJxo9?Ud&q_vx!NzK?$VAZ+W}^y1O3;mVfr$_p*c#R!Pi zIIriiUP>s1VDcxI^KxPFlR`5=T4q%WZGtE;?KNe6m|E)<&pw})CAC&QjH^hMIPfsS9`PurU6rohjIG7f zwZd3fek<-U=u}407R9_Tf8CQh=Pu(v{FQS(+;@dyLC1Zy&W91Bit~|&yAs{tq-DV^3|hWdE-^Nr!^?YBL<54S9?we)C8lhVuOb<-!hIky><#QyP> zQ5JJ&Q+{k?xldnu?tsPFw<|es_5C+m{rGF)Yb^iPGuGHELAkWH^_))qfUSDk{DFI| zjUdEr6_*>{7VGxOJoKtn3(i$fv7ftq4iNBJaecmaG}h9-EAo`DzldAYiLDzcx0a0` zYh<_1boSgqX_X8ImPTHSb2YM{38V$E+^$xwR0%kTfN_qTz{R&?hg=X`Bk2rYV(s%` zuYAdLtsfV#3=+HdeOZ!3loy3#IXsYMtZS7z$V3V*G!P-d=`4=2_V=;?HkH${N87u@ zueg14##NZ;l2j{RirQ$$gXvD(M|(8a;y2S0J1y>=>o=_YyL0{5Sq&A`@-(Av4?et5 zC(BZ+x2;zg@btnSG4&kJjOU;JqLiA$iDgsd(~Ab{v9zaHjXCH+@a&I>mx0=v)4DDD z9Nf}uQDX|igj6-HfqSz{7TLwV&vlk1k}Fj$R!+}vk5=n?^x#snX&lJGM>r$Q0Iezn zNNR(hkBt?(BlYJrvb9vTM&9a|E@>Gqy$t>VksYP}3mU=f2py zp)C_VEn!er`!9Jytsy3l1x*ZsqMq#7IZhr?JRr`op3S^+=V6ZZShjUA)3<$-a3A#L z_Y%eR2pIBU`Stl&qcD47$^(l-?J0eJ5Ly^SLjTd|3fY-6rYpD@4d%~oZ~B%IJOz92 z$aY(!Cll)B^8DPq^SjzYHdS2#4NCFYaFs@1nx%{m^OW^i{ctdSIJlQZ7mU%vVd%q@ zm#r#s9v{(X6~*(-hd6>U#`M?bNyF}KD6Lp}lsM)#Nug}(JU)=59g3i&6iE>aDt|oo z%*$Vzc73aeCx+3bH=iv<^=az1T1d>p;V9*F_wim2`jXrx_rqW`UQX7VDC05WA-+Fl z%qrU8CnJlkF+7+{U+U51X4;4uBAn;qvtNUvxb?jFtSigI)Ru*T%@!5%w!BxdX5>xd z*eIvQe_YlMp)ijD0SZ>cQDRHj(n%bB1 z;_b(^k(*yR>|C_D7oZSN)JRy8;hsA&I!!TbYu6*9&+`2ZL&!@f0E+Wz^c!jdoTuu| zGX=B|F1ajo_RsN`O5@vQ&c6AbHOr-$^5eNUnnDNjxK$d*aP0IHGr3=rn}+kXp>!F9 zFZS?s?V7GV{6`~7Q!Y%+q1XBWqt)cP$1rT%1^;DS-y^$-fxC~DuO(|;%_lx zOAD**FA2pfO1@tuni%A|EPcU%K6}}~l`F%rN^^6Yy-ziIvEw)>$d>*wed+vg_^e}S zzI6F=VR}`a|FUJWs3Hwxqe*Iyf1S-j{XbNp+y;_!0d0%YpK}G8LyhwUgU*6aJxo+} zu87a3Y(Quu4I39-|L_IMi6tl-;8=O73*hD@O#i{m&#=T~<|?t&uMPGr-7L?ZKU;I) z!0DD^%?YlyU=vt(ihD=ilWm0Vz4)4p6utXsRcGYV`8~tog5mHytjF^5XT6QNAL-|8 z?p5aWA);{(rjNwXJGDUbAHl-uYkc363%h#dl}7oW`E@acbkFQQLH^2F!yg+MG7A)9 zB?PTWoGSgEBJD^vCtq7_ga63Ep(Nq~rJKX})Ok<4Uv&6pGG7Q<@AbM|3+dUxq?r(guSBB-lfRA*jD!vyW0~!ZOd2Fq}#<#w=$-2gOA`e<` zpS1o>aoJY7RBc~vQDP19`eVrUxW(~^lyxmP@-Ix6l}@61cjdj34kq!65w-uOXPFUo zWAB0XCLAIzq}(}yHRog{X*xC!W#YhgSIcE~zx2Ws%fve0@Te%-x0QhpQ;C>~;xo;+ zf1EO1hB((d5znTX-+7sep%Wr?5H|tgy!WTiPj#80|7B5-9WPNVq2Yy1l0a5=*b{M7qjrs1M#l)&v zd-c5yWqiIrZDNq12mT~fdD;0+PHh0c7X8%ey{zTol(X;Ut;HoCo-3+Ggpy;?{yP!S z@|b~(sXx<^M;a+;zCW`k`0W$2>5_0cPlQEa_fF!OXmKoB?; z5`Hp2)~7e0lc-vhJT_b%KXHyWkf0Q5Tkab#wN0Biz^|NBR@~?}NvZgck* z{B(ZT(P#QF1+FwPF7Ya+T1CZ5n$ z7WcOQZ_K96{~@uyW%N^StAzFNOX=l5-L`S@YtDUwd+?Rlx6PV-d7 z^`{>#k$btIZ6w^b1<70D!*#zp6?7&J*Vh(*^7BQMQ2GE5tGxSP3j4gYB0MAYM-9cy z#7c=SiDhnZbz2j>cj836Eo&C!T4D$1%b6IWYgYjj21=tWqbfTJYY~aJ7+@mPX1t#w z_Waz|vG18IdwrinLmb5RCS#|bis`)HFl`}B_43(0w zD{r(b6~VOU9(a7GDa)pc!R8=_(VV6J?HbevqMd_Kfl(?W`TTg^VEri;g0GNCE&4Kh z;>b6pzovAg4wP%9S7m0kZJbRWUD{l0s;QZ2hz2&`!Op9fG!E^wKvwR8#5_zRB6`6lHv~|k(j5ATwWL|K%!>2TM~K@pNY<cpJX(_xrZEFudE>1)Cn^{B zXk5sWsQz~M?^!`~eggt7>343Gn`=Wf{)M<7@w$AKIy=_o@)U)s{&mym-l^C@R%iFE zU0XO&Q7bHw%bh(I)tQM#4`|D)JU}b)BdtqiuCQv0D4s7SwXOG`l7*}?r}i*EMEDN; zUzWikvEN!Qu3XR6F(rQsQ4GG_0qYCBee&o2!C$5e->C3w!c;M=$G5kXf8h^Dc8<)q zI2uU|^g3doC4_Hkw`}*O%-=szAWeQ|nn|>wZ;PAPLbjrx`Zv$E-+y~tY5IAuRZ}GKxAW$=^9xk!tiLZO@(+akg>O8+>)!t) zAK}G>NiDJGV&}csQGUN&pSO@qtYzIh(qCIb ze%)~I*%~u}0v;>z)3ra+ykg>o}3Fs5|(dH?&}`?kXR3v5HN< zTp^#RcuAIU!bXmWYxTd2%X-kojhe7N$_uVo?D?DiHF+<|IwzWV-~SpWMLsK5p+WyH zes%xkEte-A;b#hOIU?Y`aJ!2I@1yF_{t9dS$CXfQS~GI@6$lp>%e&>-=~eF^a?ga^ zd;ZzhUns=$?`yqO+ZqIWfq*vkER{2v4Gf7j-J z9=%O+UO?8|K@BO~8m)1N@_=aYr*n`oz)7`IAk zY}>NQTYf{dnP$_7D?j-6?e4pZvo^~9r|WE&Dml$L>u#~H+nRqHsi0qHHyyoEvfs)i zXNdQn@5AO9c*i=$?~yY9cAXM`)701Rkk!!Cz_Z+U>?> z;IHqhsUl#r{i~h^SPv5wOUZLikiquS+CusHF!>rxuUn` zd&XIcPs8IzrDe(Nqocc$t=-Nw^>H2rL@(`*3RLYR#FVS z=u6KQD>14mExFQBZS3ySwhetz{PvPhJyEwz7F&Jk$#>>6XW9l(s5hs7CTaYkv6cm14z_6#ET~gY1x*=nbf9%_!&X|2)-r>;lhtv;4 zuRitEI$Js2w!75jyuQ8DieJkMzBH=ekfBVi8Wa!9r~Z-o88ID`TP0QT9#S)U?s%o2 z_}#SN?&{F)joZr(~qwG_BX_FVbJhG0sUn+?bgpZC&sqxwkK{ z{{!oam5k*3!yWlaM`e?>tkc-5m+pz_|DdMBH7%C^;AZkxZgJVVT32;y*H4e@Ssg{R zgW8+hBBg#xY-{sl;i6_%H%Ik5M`-i6?VyA<$TOoBCc4GT9k6nna^jFxC1>W92ZO_l zTbNy}&hf{EcNLdC)0^QYvFC7Pl#F$~U{uyRelR`GqpWMpfIg^LrQz0AQ?e<>JItnN zeptrBQ>or-ZmKeP8=uf#@E_IdvQIUy(WKAe>cyySWrlC)o6l&VRBAjPYgu;N;{$)@ z+6*3T(-;0S$DtynRyhNbI+AdP8(Jw*Xr^LP(3ctJ+y7*x_jPjwoPgUZ3}Y3kKZ$}TALxQ z@3Gg|?uB*pL91t$x7=7=mD7uyZ>OJL$nQ?b^>f*4p6dPf@}sCfyH@tq=$NEO^|~xD z*|??FwZM}4QRg1DdfF934dZa7$LvJ!PC5O2T9r$p&d@&IjVhzNZ3s`#@UfF^LVM;v# zO&8xjvL8C@BhRbzZtlFK8o<>{pq6t~YF!hoZm_yPmEZMtQt$itNhd2UYM+-QJMOI4 zUJ94ecf5(u^l!fUlA{t*?iK6Ne~EQ!bDzopy^Lnm6~K!2DCjea<7eIRSUhUYqRo3% zq0#1TFFpIMOHGwVT+g!PsC|V|UHOzVqo246?QzW#qf8Aqp5Z*ES$mvo4`L^m^zR-W zSH~x*;i78!QwXXGQ6%m2ReMY9aGL^PO|7c-9 zhxe%|vP>uJ!9Ci(qT>e1QRiM5Zf%&~t)809Zuh>L!!y(>&Z2!QK5lo^G~24HE5EU~ z#Cy@lUV9nkyH;J{N^6wVLuhmKE8REEjL)iWo~Tc(}XkT z=o=~pdOJBKS#c^EwO5y0dTxCrFlOxV)3j^zP9LCupcps|Rne+b!_=uBU0t>py^=By zJl!gb>oI8&A9dWC*W~y#c**zVX2y5~@twbocv9JMPDLtNV;i`7?AhiuC{A zwSL}b1WWvylRvq}cqRD>Qtx z?NVz?&0Ou4-htZ};^jxF(|3Q1^ma;*YUe(eVr$IX*mtly!Kry~*{PJM?W&8;HYcY> z1xDl96542+j4J=u4%kACudr!eGeF0m3wjJU72j}=FZYt~Ql$xPvV!8it=`<09pka8CddW(N{GW^z0E>*aYXWMMBtn!7n}TR z{$S0TB6h^a5j99OE@(Y3D;M_iUh(A3AtzeqE-oCt{_n^`Pcb` z?PPsXh!frNA8n|b313)AUJ(5VLu^xFh%J0+WF>AxD$w_bj_~#Kb#H@fS(b6a99AUD zkNce4Miq61FW>fn$q@caCs4S)PTxhh{8t&$LeUEs@q*~*Bb7YiAKHoh&F1&NP!fKH*a>+ht8foDkpghY0voL>itH{_5v^6TZ8}4= zz6*rwoBvco_%#x1nJcY?Z&8rM3!L7AQ;akj>;RWH}#I`Ll-xA?j z2g%BYmEJ} zCR(ln)r|2N=4tv+3x&BnSfRf|Q#-bKR$<%xS10#N#Eke9ST^0N@vOh4y^j`2PvU)i z{N~bzhCpjdm|o$zL2G;ZIl7ymH(9G?&hsZcW5K9-ljce zoLXM3p1|IfqAM_o&X(DxOZmIuX~Bc-jb6p76{S~g&d}C(KVCzhi5db zs{TAHdbcyJUU;%4Zp+asmPJ5l@0ii_IFkMG+xXfoEcq= zzen8)1G>93*5~>KEIO_?YP5qtAn+-4`8?WEG-kh+o^G9$EJ);Ly7Ox2Em_K@TtUTy zcJ82gz0P(~=zDJ7OK&bkn2O zW2na8K%1GJ5ns>h&eG|0X_)xn)v|_mcUM5X8*MY+{>w;9+Kj~u|85T)M30bH&5xiS zwGMvv1V8Nw>yNLw(fSVV@f0U(?Z3rOreqv#SvYNG z`a3=RocjjP*xFvYzF&4%%YvTKKiYVE8T;9NgY4gR`JV25N^abdll(-kbG-ZM#Zd{~ zF%d4+KSq2%cQ1Dw-9tZdYN_+9G2^F}^&fR~vf~4ExM!<<4^j7M^fVt3IJS3h>5F+F zE7;J(`4Qkt&l8+B$(})!^ZCC>Bb!l*ul9&|0FQfo%C4qqZ~2@e{Q>d3I2-g`YYsRQP;pF|Bo$V&Oy0@eW9uo)wQ+jlr}7y@9a^yZ-giD zcWSzgSiI`-q-$?>=?qyPAdiwOMxMp4I%B8cvJL?d$b*8znY5MKk3bT5`)c?sB;eGvDgh`E}Od87whRmOI5vwj>ZK#GOb3M2EVKHogQ@^uxHkLR_C4I zsvrHKrli&KPU+XNJ4xPpLC)1m$d%IA1^!lBn_u*W?`?ehiWA#6VAH8um1^Cwsh*}; z!M?bKJ)5iT)c(Wu<>~s&O1IwVJc%|Ht7Ag@cBFp&5~0nh+G-o`IVQ;7`l*+$P@wu~ z$ls`=LbLhK0{u;@1sX&7KKkdJcJ8^gi``bF@wR9QFHoc2i?jQjUBIR`yX%zwQX48x zN4pJXB5VCk>9kpp>ixVYN1WBo3*rrForagHLMN(J#iLld<;fqJ`@igM^qT9DPqU(V zt!eJ%44KI8{JMlawEIwUypvavxMKRNHH?_jrgidVoZa93EUKyI@q-~BI3HghKFutW zSiHFPu_IHZ`K(VOUp+ZKaWJG{P~lBPUU&Vb(OkP^tFoRu-xKFM8#1rU1nByF;?ei{ z%u;q*+oL|6m6*FR$wDc?Th_jDXRh2VB*?X{D|n60jXP~+Ps^$nIn~5zFJb692kK?S z4=rK$l>F8Z|`~0v&gSLvnek*jGIuN#2@pu0!Kyp2gL|&tVV3VqQ|ryaQw$8|SO`sX7&~ zG83tVV{1ZU)F~~U-;>%XpEW0>xL=-oX|#JZevI!scgaYp8T(=0m-B-%wvFfNGkGI= z1#|~miHlJ+wPcFQ7&q>BhOVVg&P43B+VB{wZ|k+>vt(5hhAwAOby%zF_|Lgr0GPu8WDKoEb$&q@hObFZ5!vambx|W)YVovcre(dT63a*?A3Xf>ZS2< zp6j?r=8v{m1n6~GB%P|JHnE)FQ~9MMU%bu?!g|C;u^ZG!Vh^YX=(3zf)bCmQ?B=pB zUb$ksCtVoJh)KzlfF%KH-!jY>__(DtY2VKj`_1w|ZN6*c;WHod?@<-Rf||EeTbFh= zhmQW~{JTeachiQxMg!)M;);&+*>#7l3g+C)^Icx2KiAn-yK~A;XYJ0=L6zoa)q?gW zL88Rm0tOt*ism%#&c)8Z_cm`oKd{@UZ<_C|kFEn<>ZzYxt*-dA&u$y-M2fCi?HRG) z0I#R+08_GvmEp)t%cmAf6gkvq<_Hp8qPh|eFyC7+B_#trt0%4_^ZiWZaxJne|AcmD z-c^N8HorczC2!5SJVQ;p1n)4z#*)C*5gVNxy7Qm(KQvRS@vaKu#U}79dIRjeBA4=# z+5LH)B@K_`w->D#+dJ!#PhJ!MBlB-wkGd%)&JWK+ko>~saEupfpP1>TBTcJ-AGt&d}J*Nv^AX%9LxsWzG(LmPf)op5$FDb>xO z`i$9QE4V~#*-v-b)WD5v%}cgtPL)$Nw{LvN+c-RaZ-@<%TEz9ukX7ZeH4HSWaM5S+ zSuryXC*R6PU#BPm0EUP3-k_aA4#6;?A5rl+@<;*HB&~l=*_}m*X<8^ z7WMi~G)sqDFNZDOxfq>hp1hr#$~qCIEl;eccUZ?#!TG`OH9Aiywegef>U6`)r-8Q?3Q%fP>!m4{wIyimclL~ul2++S@_nGOo<7*JBKBoy|G+FyTaO*DGd;$7 zx1V>A2^3d6sQ*mtw@|wyG9~;4TOO^Uqo~@c)@2`C+1bX;Qmqy{t2zR##kn`D)T!r)F{uTP7N9WRqp~#IqDDqBb+? z6vH0M)rPW6`j|bXHFsUC94=|~+;VE^^V_skwUcJNG|BCQ+S(o~X$Fs@GMHJ^6Ftan zA5Y z#F=3@HJY;cc_{(f4TqKXPMw)PG!bv--J>?lr@Lsa zkMrQgGrQRIF3OE*w4|rhZuZl!rC!dm$yJ$c+_*aopJ`P(k*_k=(0RNo#eLxqvI6o| zhkOEh1^Fk2hcdg^(JIulBSqY}f|2B>FC^L)&eMwS(tOq_=1Vps)lUN{Q6AkPw!Y; zg3a7tKd>3O-hC`yp1+;tB22zEG?Db|=2(EgA1`kQw#TJ``tMVl1lsMlkF}Panm7~k zUbtuQe+P0-lXIMZ``JJDzfx1tUw`@E-~VlS#Kiu0J^m?O{I7nH75HC$_+NeacLVYN zhd#^`Hb64a{48y>;=eAgl8@$a);jBveP8qYN0r3H5*egOzAXSKyE9)o853O@rD$G$ zq1H#^woX;@{3Ffm=vraIOTNPFWXnjEM=R-F3z%RBgem!T>W~I)W2*@Ei1adXu_KCR z$+I^Erqd@z^FT|-TUOfZbd2qpNdIyMsBxacS z4RPW;niqw8RU9joem^jrAsEfb8}Bb?1+7cJLi!?$d(od9URykvP0&ZnW}&})Y6w~y z&*z|5c0!=ckeLUN%9!*6k-G%+bP?vXxel48Ipw$AA}!#T*9vhhrHkxdlZy&Fw9$%9 zO*%P6%>rO6&9M3nu;fp&5rsm)p0MN>^KkLtCzZ4@v3BMLg+@ zWs`;W(r(hK4fKFkG0;|}GgTYspy#jlvLY*w6&9SN&F=)*|CSmnWRnd#`nhw~h>5ZA zS+fH-(%V0&(nl)uzCSB(MK|Qm@AcWK95>Q?$)a(qf~`j)t*@f1QBiokkJl#;5hS|9 zGC=354 zws+?r2c7-84+pMD#rpW{hd*Oc2Q zH}O@jmERW-aJnI9Jpnk>*Mh@efi%Dh)xD1C?|$^D9IngNpFC5jh`TK3uSUA_+~=h^ zVUo`9`*44igNvridLE&m+qq}rMhyX7k;$>btEu3Wguu1+r3upW=M z`us10#VdCnJK!=!vhez@yUEjJ^jEFYexPo^+Bq+D)mq2axJHdTKCXIO+}f!XaWQW- zuc{~-MBL7+Tp^>o>cswK?{vkN9a;YK+H~c)7TcyC?`a#_^ROc!r7)#1)!V12isPB) zO=XWIr?AI9nhqCtU&ZL>lJJ0W78z<P}B&*&GuFE+Sv^Hc?EJ*`j$hxbyO&DM1};$8221oCsDLC#Q_yon zMn<}nEHHhI?#6yIXv*{FzfCG;CmPGwgMcl1hB@JidzRmnje90CKK|SRW0jOoRNR&^ znE{Kp;@s44k?)NAo)CxyWG@BonIQDQu5v6CLg1jG!)2z$tx@DCt>F4Q(J7P6Ab23; z#Z5ME-pu)+qhwnX04_i9e|&uO=+U3k9`6p5x%0qbikS53i|OZSa|^os2&&K6*!U@C z;koBFY60x>;Muc{inJduS1T%39Sg*l^TQ{zeppx3y>hBw_FlKHN0coF$%zw08i!>Zgor25(J7tiOCyS4P>s2K4{jqAwduo0X+vdI9beq*LmOS9rPG2Q} z9skkl_)?#~qNtoEz?I3D6SD!Z`El8GFa;ek7e%L59ZH}~l0~Pu&ZV@juCDu-oy3{| zSC=w|K8U1F)}if*jhNV*kar8NnQk}rs|K!F2yaPANx?)`zj}9&f(Vhi0Dfk?1}_zt zDAu-?Wg^AeX3Z;Gm$5=5c%V>E)X?+t)G(rH?t%piuK&3LtF#O-??@v2yWC0Pj}dqX zgwG-19T$MpvwBW2sY{9WVu!rOa2L@<`sDw)2&ZBbGwWw_;Nwo2^W?UiHmitL-;^I= z4AzwyOSUKo*J((y;HUn59Vr4&^lb~uARwDRNCWc7q;4t^#b&xDsi*{Ly)VuFUx zcRtvyMCR|75YYh<29X0C&j3^hyAM2o23A(o#u5k24+7so?1P{wV!wGzK10CbB}-VO z(VWcM<$OLCzN;W35?p6$f^ja<4Jh1$!d^_Dc8M4_Fv`i1 zeF!3!tR75QnM)9)2Te>$F+5o$1?F7Qm;WZ$j6qEpe7yJL$B#SR+}tqY7>2|#i*Ocf z?M}l^zXGlnpr(SsKgA|$)uAa`t;L>H-~P9cQc{B8A%4U1LUl3Rh^Ve2C`hFtP@(F+ z(VjHCXOBW{u@CSlEWoA9QUuuJWNr%n240PMn3(m(ARCe^6dByjX8=Q5PLOPQJvj}n zn1a5)p%-J2!q#QbHag{fy`;wbXe4@^2`sKq=*Add%7HJI-$ASvNNaP_O35Yy5t0UM z?!kixUt4_m0b>m(PoLgjmku(GDuBQqrKN=s zd@~$FzXhU+ZM~D1yY`rv6~II8%rL9@^hvmXYorP6`9`7H%DGopgH{acBQz;~F{0h5 zGE{#mV;7{;=vHgV0pKCQ(F1rhhR|{WP7KSnMd(MqU7EC%OF=E6uV7tF0c-I6f`Y9k z8CFZ7bma~q1egNz30)``E}TH5W4qQ)2NxvZP^+=wZ7@mOz5yez86+|yj{>TZwv25E z<(yVcZpBjsHi(#MznuW!A7HOz;lOC64M|13!gjeEFi*HCgTesN z_}zc_m0T%e)`j4o-P_PhfSsV&c^zqLwTR%vMb>NUqzaIYOFZYLG%8@}X``U^Tn+xf zTa6_t#HLI-lp|k6yFO@t4>TXZ3@T`{3@F$z_6;-Tv?v6e5AiP{%wD3s0wYAkFE)(| z?0xos*%U2bPvVvrJpo;c2}t@@5St<JR}g%!6gR=NA_82o+gu8&&j_KY)qwPPfK5Xp2C>H? z1>R*!qA#byo*W^95@USr8^E!(ir{*|y?bE84R#kJ%L~}(a{tvb@AihLOCmpM5(gYX&$5478uW+&Oo@?BE5$VC?PnLJLbs(=oh-sqa8Fxz2m_LJ1r2cM zHpCPHN~Y2bA!qgIskhz4z5ihWNxBTY-@|^oog@X5>pyx6Nx-JbkOayBd%CID8h3?> zh-8;n#VvWTC%o!t2{w`G`@3w21kWAYii!!b4lSdtyV*J`XM`lJ5CfUKB%RSh%_|7AelOcxoD zH%UmEUK7A5K@}I$x2S+iMhM3N(C`XS?QLY8j@+FzyB+EzmV`B5vBM-gJDY>YKe4$K zGnf$kz}zC7G%z!}O?=aNDxy8aZVNQ;fuYl1FxIjMwqqPv;;Wcuc(VumDy=}xc@pa` zuRZcsO7#z@_&IsehC4rAhw1enKYfCC(x-5ClBG^n)}wGP6 zfbk?k1l<9H*ieWa9}#A`(Wla{9t%@nCEOe07ba!Wi}Xpuz|t~qhB=^PMh0Nd5?K%= zVK9XxheHmPM}kqYf!&jrIgC-S8vxL@F1f!e*aq}G*xlHpiuvDe2zNJpf>1IY%t^@< zFo7dHhG9(xMn>~$YHFUslD%pF^ac^x?TxnkCbf98!@*hW5c!S%Boa+WC>3clNoM8~ zusCt1j)h=%WI=L0&h7k1Z*45R8hIOT8oTv$aI=BFagpddt);krx zrhal4Hh+h+4UY$I@F^JaMbQ-o1e~*Qt1%|F1khIpp$#0Bm=sBc3Mg)j^fe=AnAd#7 zakX<%WQ{cW$jzvzJ=o}wKgh$v|M+`a+anw7Ui##f2sgz75;zFJdLTJA)TdvU2nRLM z?mV%Ictb*Xa32Vo|Mak1E7OX!ESK; zO&8#y`^UudJ458OwP3hFZ$;p_B+n6({!MI~pgQo}i<&bzj9#(JC z{{{Sj*h&dK00HLHfGo6c(4|t;r4-EO0Kd7=X86Zf(IfPT9^w3jR?s_zA)v-1fGmQU zBDnj<-!@$zlaU!*Z+zd3?nJapZX&6JDLh6qMc;7 zkF&MIh4~=3;b_E#lVS>qUPh3-UQ1;tv#*U7TB(_Xd2B)_Tn9vcm zgGh<6coP*7I;rD&lb0zZNOpo=7kXLveESAJIQ1}6jR6nSU!eLrEmUD|agj9cDv`P0 z7zDWNpNQn~p#^aNZyiFwFiOM@H3mCq_V6YnIEQ9Ye!Fhk4XiIQ@??E5`S`_6M+`$3 zQmw!k6J{5##qS9Jj3#AL{H189S*O>=*wS(-e5a1j?44TyK|Lhe=Y-PaED3$L4Z)}q zhj^i*Qne*SqmQGU`L8I79fIW40Ki_aHzbqNuH>U(EeVt7G2|oHAxYOIU$TaHVF0X8!#1T z53yI^JXH4l@kMB_zTFV*@oUZqdI{Ua@i!@niL07X4}{NBw%Iuu^ulKmEfdB+Bs%#g zSGb!1>NW;Iw9?Qq5LM_ubW}yfK$wM~L|EiXjOY}R$bYLP`VL^d9>RxHrwGK;Je05; zz$C(lklYYkT^Li}^$qA*DSnp(G^ z_ok8FRT6|^9N=r3{K|#UElP6Jo0PEH&kEbcfb_h9zaI4?# zn|zLET$DnI*QJdActu>>vr5!rTA!6G2E$eZU>vXzoamK8p-%=v=T8?QF`aZ%_)m53 z?EFN`eVdO5pNxZ$=S^Nl6ixrS)sECQkmfQ-WtP-7>fJGjJ4nf&6YPQ`DCBQ3b56;j0Ko=iwIrlZi!ZbqDZJ!?hoWS-$DrDx1kiqSxoF~D#{P= z$x$=R*CRhcWrA8Gy?jqt2!=o-z<|*q=^as6VG8t4APV`WHrXK)B(4kri+(Gp?jW;^ z^YpIZLADUS9O+Fb_7j<%FV09$S0s^a34&u*6n)$_7AW_ojZW#?O-cT2;nR~10?1)u z{hGwDxgo-S-*Xv6wZR|-855YgsK!SUGhz%$wFgQIqQwW3H9ld{jud467y`|wwUA|{ zT359seqtcWy}=5UNyHG|R&}=W4-ywUul1dHEkfHvI2a@)p_?2OTeT+|0>+#zWeW>; zm|`FUr$sOp5KhwF0*1tBwv%eU)$2DjCW zxC+1-gJM)`u=Ti&eiTG3mkCaR<6qXcX1qwG6T%%3Pi^fj=uVeSOG}8;@F*n&2yg=v z?^-#aQdx%}N?l|NI;J=mkDO4jEKpgLRgXBijLy(cLL`Fg-kf1xNis#m7bH^@J6WlR zI#*riul+3Kx54oDdEM!St&ek?_cb_yOi@_aB!?!XTHkMWw3pwykbZ?6jeyB`E_PKY zxhq0PKvk5$x<4eN!gXV$5dAw+K}qK>41~ch=;rXfx4yG0aG})|SX*r?Ak?=o30(xI zp=Bb+h=q_yOsuXB{8tA}O&K><>W~D8gGAzlW+_bZF);3tQXtA59Ru;&c=x->DhJ3; zF!qA5^ALSSh39mxBMMX`+(tn{1_YQ@MuGWcA|nIyeGXB!AQACwy(;hhXc??$3ewSC z7-RLZwl>$R7bZ4@ggvYF-@eAnYmzJ&#q!P}F^>)GsUvmWnl)DuDl?D=Jd<5W?)aoN ziliTFYIfm)&&sC``p?-M38Z@o>Nt}&k=S^!$=o8jf>_54QFL;bEzHaZ9>|pt>w%GU z`t|O(F(GdNf}nRvOs2kHmk|eK2?Zs`1Z=^0{P1Wky?Mt1RH{sy&X%v|^f(7-w7H@t za|L5s8Q{^0s<~}B(~=MFhbtKY{BBYu=pP}a9|9#gu^EP3Ozc`ch#76bO9fwt3cKNi zw4qNJW}<+nQV-S}HbL2Mbbu6t4Uhtgotw;_?y~mEx>%Q7~G{ICQQErKP4remXQX6C(A zOnwsbd@Q67@2cNI2q-{iuEsht9Oy@AAs*fZfXG0^V`#xRp5>GAxtpuM-A;j zP)*KnK+O96UKykn3`Tj9855t9Q;7gnme|9Mqi*$87t=|ZH-7YE5{dfl8}Mzfk&KU4 zxZxh)!l#jnjVN5j36R53-#eo_aE%*}Z$^2Zr1U~3`J45mlWev}07XtOONeiQpV1MW zU4USP_+nCc4PW4{2Rnu`V3yZ-fS^yX6og``-gk)_KRfJ{zmiZl?&i2kpLKJ|2 z;l>Pee_w&+_y1EAi*_{4lBGz4b2^Nas&h`87dj>(v>-)faOpD05y-)#?%*(+ zSU>v8JXCrZZ1C}s z9Z|?ALK)wN(^}G#gS{hm{z3;TO~KL8(Z~T%zIvM?cz16H3zTsvSCy5Qv%vcGM)xUM z{l%g{?T;1?NMlNfn&ct1Bm^q<4NKiV8o1OLo0gDsNm2Iq{WB_-bLHdC8J4l(bO<>Z zN$=Sy(1CKo6lfRcW^!4WKL|OyaO$Ez;+uFcHDEL;Ac&m*EtPe~fFY9a66!aN(?Tc5 zH~;Sw6hyui4k6?$2X_jmhmF8M1|vK{5i5eal7oPELu6VC2lR9bq%a7<_2i&r@gIjY zMu9ys6tA=O1?u|3I@7g(DP9N79XK7Cyi(zc~#nO)((;_?^uj)`E zELVgOa8Wi2NI28_hJyc7kU1TH;{XNb;6%%Ib168$VYX27K#)8rPQ@D$$&4UB@;i*& zh>lzT?OYK#8Q(?g)p4GZ?-ML z*^&*8ON0svE^Mw@o3%&tXu0*ojlq#VU$zDZQJ!i53LNj08gVed0Y9TWYY?no+bK{y z<)bez-n^Ge7!?U472%L8JqFgxzg=KRP8h3_n;kd-^AfyL=N7~arZkwQ`-hjPoIJ2U zX3OIeJ(H0M4F|ky^dHMFs0R@%kvutt_Z({SsLs)7y|TO&bT$^Gvq9Ob{~PV2J7|$g zNo4NcKq2)d6O&s}-cU*p(Jz z2LTJ9bgb9_rAoITRY1zn2e4uVJ4#1TV5m~0gGLb)>1CveiZZ}Zg`t;kpBXgCz3+Sf z|7Im?5yH%z-ziVodq2-vEhzK#uKH)s@r236q|QlZSaGBuNvo&QECQ|ldpO%Zg5@G2y)GL?`61KEqp=53wN zh#@{*m$JcB0!ycW8)7$*$Vf8;3&ue;N!L0c9-O0&_ISveL9G>5`K5CiK(NN%l@*9GLHF^D^Jb(DjoC4k;?kNS*<+%a!T1Yoz2 z5Y?oC*9qJP0_YiUApiP)K_e%V2Kap`DS7N625C~&G%=8t z+~q`i=0a>C04pb$)HPbRYkofZfZnDmkZ~49k`~~A*D-kjLC69ouV_Xrzi$@tn>7mO zNTLvgBTjtg+(X1|5mb@mi%2&7%;5O(<6|)BRvZKq4xhNkXDj3QA_267 zdX$oqQt$IC`0vAD#nF1YwCE}6ZAwnFp%1Y-lv>>XU_)>TphS?{a?wsiTV*)Sh{;-> z9K|2CdVQZFyex7wv_e%5g95vrcr3|BNu0BSs+^uEX&Ays@uh%?K8|5*y)X z!jY5UZsUfpC#O?3ogphtG6~#K5?O$&QLcc}-Hm&HRD3y6{zGTnOi5T93@OXeO3QPv z)>InkRZaM8n02XG(luzyR9tbUFeV{)1+q`zTn@#5Q2}=D$BKZUNr;VO@hR-lLLz0mvvTdH9Dqy5HX+y+hPJ= z#rjfl;9e0xgr}Z_Z1}4ah;wuVktUsu!K4s}hyJ!w%O^=&RMJvXYB?{ygreqx7E+G@ zSS(L+#Z3;$?YQ)!5RZot3@F6!B(f%?7NsnQ8^lbnQ`gpB%IWHebB-4I!6}_!jFuDZ zwK>LU+#=9pq;0wHBAtx5nz=`nEN&vzfHh zWGI&}69N!u8|diquVS7!lJjY;yWV&aAPbjD9h3T!z@Mn%YBH9r(nKxT(-4?II`tQ{z!=lW9dzRB}Kw21Bsi{8cW)A z1J(ab8jNlG;0%%*q5%%XKxlM46Rt!v>m5UD;k?%h6U_=H?hUw*p@4Lc#Pw}K@%)$v zQ>eFK>1)2){_PPf=E2t^ti1U*Cq5;u+1jOmNhdTMJR=4n-=tv@0psO|TTQ{ez~2uv-^Cx!HJchs zBLW0vTUK1!Yk-`*Ap2D6tqIFed&)1L2>Gy6H=9>PkdWbZ0fD+PTypk(k|4PG13+&$ zVv88d(iYLIjwI4FG%EvM0*>_>d{Q(Vtyp%?POWit*ngCrEsKScrdljox|GQYXjuu`vshw9*a4<^aXeE2zUylq=$QB~lrkp2%a>c6 z=nh2qQUp8%9w5e?Pu4k6xt>TJa2Y|8{A*$liNLQJ?MaU~-G$DLf^D&cq$i*6>sh~{ zHz9Udc$TQUyr#txL@M3@5F$aOa5>nm>Sg4}04CeNF6b_itbo8yszgBSh|T(y*hC^J z;XuZHA(Mgb**G7D;Ds>TzJNjHShkknO92}->R^V)^}!V1y?@^^U6{2JohbJ-K(r$j zKZktz2V?lzr?_bVK`)400t%Qma##2^;Yg8s10u$D;6ToVi=)hd9E+p|imI8=^)-(+O?teRnH!;w*al~$ z;|U!WV0~_1Q1>fLRD*Q@fiL9IP;Y;9bg;jT4aaDDkg+{sOnv$t9`CQT*MJO!a&rR!vCt+LU^St{>WhhCJ-U65wLkOt@s}hpLMbx^)h0 zfxm0!&&NIz(&QxWo_I4k0lH%miPPt6{nWn>?H_Lh*yKuCgd3IwXpD@CFArnwm_E~WIG9Fl~`204r2+6swfT< z@&mN-(ub2#az`8dYec|?kR%}FypO!ti3aVHh0qCdfFAjjvu5hM7wz7yzDV50bLi9j ziN^NfMf1@0dctAHHCzAj(etxriU^u*_1Lj!w>oP?S1#n4=TaY|vUc_sCG zj;WVvyLJ1-r@@>YTbT)?uyacaWZWADT82NBhK2Vf2kXj{|($Smv9p1E}G{>HGtUeX9~y)!SC46T3aL-MZw~lZQk&&>j?~D36+*C5?Qr9f0;88Sr zdU(A2w1H}6Uik3+ks*GX(J5&Ue`fu#yh4N6LaX{yH2(6I(a9d>y#ssuvTLPpy>V|s zk{WA}wRYCxrP%e%-A9fb_qUxt4^NsV<&im2?i&28(=W*&c{q4N`qB8H7`s+(|W>g7GA+qJvW0 zlsh-isWK>`%{a~4c&pf-Uq|nEjTN76n47T9zcfueA>1k_dNOcbSGOO)N z-y2?Io>W$-CyQ60Hs=GjC1X zXqw(&*hF0WD678wS$xa3Igx^cJ@YmfEBLl_JFOj8XAHhz20h6=q|kl0Z;g&={h)|i zc7lik)5Phpgh!9_+tM`#2Ilg9THD;IBh)neVB^t@6x0G%F&uNh^vaeb<~k&Zv0WM* z4fkrXQv=16%R6Xqj|J`5VT27o7?AB6bDyxJwUsohlrX!beY4!;(>xlB6&O4C4EMzLS(YH-1}Ea4CP>Rx4&-@A(3y9gAKGt6CqFG1}tMlyo$tGU0q} zT)fr`9uW`4 zZhd9mZW~mhv!!B69}hGA@31qPGwbw1CZ!Gh)nZF)g9;vcgqYF) zw0pSrRij5%)bPCr<3^NNMsMZZckTH$T+^J^-eec3l^0vz;^!Hld&7^OtI;f((0$%E zV4In=L&v__$pmjz+p;CwZtuQ2xRqug!m=E84e+<+(%>8Sb zwtXYg?zA|T&4JzwM+WA0@*V4%II}lwBD!6A%$+qUtKT%6rRe`nEq;c`YX#Ym0JR!F zkD#_OcWZeuwKvCd9a>~nY8d7CPnD|^-WFz#AMYoOmU%kZ(Nsn=^=hgIsF6A$;X#Z> zlZLr9OrDZEDsiQ8?W_uBodMH!Hklu`SCCOi^Z5yW0~-g6APY$k?OX*|t5PQWaD88T^GHRowt;`C z_iGy&YrDjnf}&&ck-TZAXb;~pRpLtp7u5x^?A0BzON_JSmx`zMWo|VJ(F-o{HgbB) zxDy{Q$(C?y%;}ZwdKDsb!}ZgL^(#E}+ZppHDMafsC5HnVeX4pA;)jDA^aZTN@cBtj zF~Z0~h3M&&{^g)0IrlV>QWnx!z0b7+>{&i^yC>e6FMU0u)Khd|@9rXAzAfXY`n2+v zHDr4x^NLQ4IMf@J6t^$U)$i;@=2@Mcdp|F+K4~&c#dhFy9` zyQTduZJeiTBNgtVaU-VVzt+9sovvQ&XTsj&$bLMjziNE+zFosRd2|t4;9=TgPiHhf zoCt~+t9G}Rvrg&DFxe6!v)xy{s`{J#8@nKL(;$mUS!>T+hq27?;?X4{?bk$0nBuHM zui1}>^ykYpaXx{c{~ePrzI1sNqirxwriQ7-)Z5FlcT13|9%*Qw^-d+IK*!J~!Z6am zX;YmtgV)=~uX<38(Q+_zxZ#6zrJa0Qu$bKT5cRmy#-}9_(!R<1le*aREX8kBCkA`( zI@ZB)7YCYrnrG0x#^L)kT67Jwrg?8&UtC|`()<^b~`9up=vCWQ5=Pb9f?P4l3LHp|{SLB@$?AJ7-4 z&Q7%#NONtf2q`c~|FB*BmRi3}R&dHTBl#$627q?8dQhJ~qykW%rS2)T06Zc^CvCP` zl_9zDlK+IlvCD*R=!LHG%h=HZKKnz5fG!TJ;@-(Xh`oRR{m6L#3+IoP-L$f#Kjg*f zo7TFgtj}LB*wAxkNS>)RY3;IWcjkn*|2DVu>Kk&!6G0t}flJqx%n;9d@g$>VTZA}o z;*FGq7@bD*(bUeHpNH>$eMV~(8IkC|Y0SjTI zg{BsJRT=9`)NO8!ZOlC~<9Z6gPrCZIJY2e*Z`;>i^|Ycfdvl|JQr3Bowt;`7~4=klbcAK7B{!n~<;peFCjD8oI- zY&>&S{%D|GgGqtv=y*WOkcgquD9creW!K)Emh&`)>g`h#T~(lJ8~oUPGD&}M>yW%^ ztypERdalp?$=Y(Zx>A$r^0a)6(skB;ptjV+s_nS?hm7MbAul(lvXXRjm)u)F>1>$<9Y6hmp&W*vn!<8+g-Df5p z7@OOC{6Ve%n$JbQkP_4O#5?G2HMi7y$`DKmC}2qU}zgm2)Hvd0JNmk)|<+;}~?M4E`d zfl(0(mW#8t#*w{l*h$JfnhEL+qVTKn7`YtAcA(ZYZHNiAPigw(x{~F5PBhwX6$=iI zyN}r!E_(TW?P;f&*RO-2FLf;=dhH)n_Xz+g%LfS?i93^XkfajvJ4c7g6)}ct6GYm* z)6(b~iq5F@4-kRcT{}|^McxTuzy>RlH=H;6i-TgfT6;DQ&w-X5l0PK~`L-CRf=GLx z=jr)+T8zEb_7!z0lE@Js0>5ZV@4iQv>v>=60P#Pf}4MOZx#=`Y3MwdND(>xCjH-w$3CJqS|Iz-kjtIhDo zr{U!_mRz}*GM;c7`<0NQAuWoK3C!R zcg6W+EIBWLtS$r^gyJxQY!Yo-`XthHF6&BY=0%H!MhQ>HEiY=FzKuiEQ{-OYbNVkp z>rF7IU=$SMW<`JhEt7j!D0sNTaiH?^_DceuQ8M>JM;Cf&v=jysjHg~HD(O4UNehqCQNUcdSaZZ+)e4_JNokz2#6Of247q`PI}TPe51X6)z38SQceaT+wSbcz7H`U_6|{ zkEHK~9~^v|^2HB(P0=s?DR~Y`p#qBbZpPu}*}a@o#)dxU6-woRBtyJUBr98tsW1sj z*s|R9Z`C2A52{&&)gqtZdX8-KLLh(yzr>_N3%3;b70%D63kxB*1q5drXuFpuhV$$+ zGJNT{Kk(4x-Tgn|`^)N}K|R6E8n%|oE>u>2=&5(;!KN_yXTs6N5^yGT5Kup*_1yHS zDnmK4>V(38{uaa=5Z?{Wxe#w~w=7ZH12N|w0>!3P__nf{)*PaPJK|aQr^rRU3$gh2 z`?f!{gWMz-RO;08j{bYXhj*;^njrK9sr*7t^X!=dPm!g)2N*zSV@(`f)){ z#YxVWubKMtZ>1RacoAf1kMPdFIlATvPRSIf^nu7>ppD?D$0#GDbj(ZiDfgIhj%sT? zqy#xp@$o9$*+w13yYxv&BOAc+p&G2ma9!VVVO5_?K?2{@kW|2&?pjYO^kyYtle&248$eWr@oa68FDDEV$6&BK+t z)_l-^aHA?OhySqN{Q^W8Sf#GI#_6eUuDc7@St)?>J^{`LwLhe2lE_K9cET9iX-ep? zbG~mG3U)w$AQ3l$Xv5MIaU<^bL6MD`+5#>bO7wBq3tVsKRYIx=|9#AdlOa$reoj^o zw-g6!P7=|!kdP2Q4x;TnJI)FY5SQ1=(Z*-KGP=w;EyK*&VH7(i!FwMc^jmvgi&yeA zp|P)zELpUOQ{fME6rXZ}6dnV2=hG(mvZ5$wCa>^vC_jz?R~&Q!-51U!{y+i3SiqYA z%!ZpF??;Iych`HlLx4+ZT`;xjhfO1ey-LivL>&$mSqYR2T_2s#FD*{)h?&B+iSU(! zZ7ZsB-#=yIIPZT4C0+ryV2|(v|8w^e+02jKOUZ$hf}$8HadFE11Rui)stETAA`MwS z;W}~EG3*WO8{};m07w~JAkE$V9Ni7xzxcvG-=F7-y8kIcIL+Ds<1uaU#7XD+#UOB) zQ%|2uWc~kR={xTH1`3<21(;DiRSkoYV9g&zZ|IXi5%3lK=h$!`>4i{qobacV{!<*A z`;lvNL)T$bFI!LQ@30533Wm5@^!n(M#f$eK%7?uMjk$iqhBpR;{_B}cc4{iusTrIs zFmjSyt$1|7rn$9Sy{`Yf`ls5RLQCSpg+}%~U9WLu(d?{yC#pTJ>CU$B{2Zm}o3yxg z)wG>I2PhgB-Bet#Z)ah)J#*08dsO(QTj>R6b6{}rm*fwh-tM6!FxayW$B{isIr9s5 zn((#Gf?^k@ALf5}Xf3K4SoE8kaK()*+_qi~e&4>3j{9P-UcTIOjB`{el&BK2$tdX; zW^;BK=L7Ir8neMo!Mp}Y9ZNk`w8S9O;Y}Ai)uLLhDVJ8fG6wfiU%YU^nd43ImWF*( zAHwx6_)WGiexyG_eZU4Osan1yl%Qn6u67tULG+3M;o&YVL}=QIie9rxy%Ao|Z=mEF z*5{yeBInHD$HDTpIf|M$@7xJzvls!pv`{8->y2@e8!g$Sc#)u>4UZ1Bwd}?DYK}jl zP_&7`q zZa(XOyf*ivIp#9=`|v4zDEz}G%$y}n=99d7 zrs2mP$?)bWp<1^p@1UMcS-`OM2DF+h`<9Z+braK%zDq;Tj zAtG$KSq;XTU$+S!G-jt3?iuen6h_91(S~q3G<;kjz;(J%WH%*U>}k`cUA=N;vv(|*8prAcqo(tONGGoeZ_ZY#6xtn0=7PL(74c)9rmil% zs!GGY&;l{X+tSiBH`=~^mt6-s%z1S33kq+{=tdr?WO$36*h<2?^Q>!61$Bhyrr1>o;vG`|&)Qf}Ay7+H-8`0VzhIm5UGO z1Z}t7$(>FRh+oP^KwIhb_V%{i)QnUKGTzklrxbZ~?8@IckD~vVXDFxkfJ|!kZ*!sy zD=I4!=I8{;)9S+55L;#7R6tVp<5VT0mc$={}s)$U1-id^Iw( z7-^mAc!T;MkI%_PDCw7foC;?_%;x01kdOrKjz*RuBrKflMuRY&tN?bSYKT0XN#FM! zO%dWAPxHl(er!oHhaVM6LKG6X<^{z<>U%DZ51K(^x_IeQQyv|5Yx%AnJ7i0y4)oB9 z$ph$d^ilkLD>n+o%r3=`LnX-Iil@ayM6BkQbFB(v-{9S*1v(psvLr~R=-9_KHk_%T zc;)@a-9YKy^zcv}bPSp+W8S?xj9L7pGF~`Ak`EmN@m*&U>zyXouo{L6@O64D0+AH!udW!~>avXm1<_(S|4kW1E(}N=) zKOSquO^MpnB)WQ}&krm2QDkZ-ou9{jB#x!`vKQ4)55-f%E@nA)s=2siG_owKqQdH} zvx@nW+8GH%z$eQg@5$SYxYMxUZ&OLk%Yc;2w}>e_E$dts#=dpy7I`a9RgAXS%$dDW zI7#GnX4OZd=~xr}zy37}#jg-RJ8KrUgQMk%^Sydi&D}j426ExTRp-wx{q9w^{mC&+ zDlz{))|6=MX^edfcTWceb(RT0jh2lQ@kE(-J}HVOuTVO7=>KC2T;x5nfBzLVH8mVw z$T#Srq0hPNAWg*w&YHU{6uRvnCOTAz>#6>SQ8=@zu(VX$&MryH_}=~dPbw*45#LH`m z{mf#hf{gy*{3-OU|I1D{UpySe<0Bvk!?xPKupL5Ip11^T0njq!M6_2QeDUIi0I5kO zuje;7off`(?O@HrheJa=Cv;$c)I2td7-D zC*SctF-$kbEDXhe(Fp3dEE|%VQTlN6$xA3nn0>7o(n{S7yT^aMe3^4pvpUi4>j8*G zt3MB69soj#_>~kE7M7uZPSoAIi?W*f1(qy%g2Q709AkyNd-@7-vyW=?(*1=QGzbtz zAV3V1uRpd4R{b#aU$9K-ak{A*35IeK<`uh1B!svM4Pw6bPfV%Yd+;EYWjGSB605E80Dt_iyeuq8Sws*rQY5LCbu%|Z=Wlj+nx7+S>f zIgy%|NkpnDMH@+hEa?=ErV7gNG>{}G9Na09ghhp+uMbRf?me+n+tBXQUo&AvODzUB zD?K>0HclsbFTATO(p5H;yBVj9h7xeH)=Nq001Hz;apF-zk?q^RF8<0!3j*nsCt%10 z>fs+el_G_Q>QMQEo>Xh$-hggyS#iTLVgB5?8Yt4l=r3IOsX9)N@x#G+o#d>~_ToQ? zMU=I|Da$D&hwCO^XLp-euFg@6K|S{m-%dkHo!iicm?IsFl@9{zK!YH%zv=nPu- z?b{15cvYOd3RKtaTNh$cVkVBmrjzN=c3Z&t<1{0`=9`~CpBP8slAUv7>hTRuoiiwk znh1=X>&5cM9&xP(K!KE)p6$u)(j;xsdo*gQxA=L zU=m#iyNZ83HMQg#8@Qh7Iq?ZLI=JHc(tN>qBq~OJ=Y*^wIpisxgr<%SMIIf-LxA4; zn)>niw1l7H<54(uE7P%){_554guvNN8^pvIC6l!7fxH?#ZxW)V7~9(jfjoii1P>=^SQYhOOD2M@97Wd03jWD#5bR*q7$;1@x%I@xsog z$q6UqUmLb;si+p4W-abMbOt8q3%%4JBN!`#KooLjZR8tC$;nWMioygOZS1RxR!8fA z94NOto@O?-K}+^@JD`gQ6beUy12x39_o90(-T7ki|4JLf z$==}%1dkYgQA_r>Z*~B&+VIpBKxX1f8gtyz$f^SfZBW+yycbv1&dS>5k(X*#4mWN+ zfF>ucpYyLtGVw@lDCSZ_{X|?z(Ytq50CU3u@aP7iuS*230Y8KYIb|37uVh>v+$6P# zE`miyDPup%<91=pOPE$zs<#*7W5+*Kp)@1wyjoWs=b==z&%6R?b2l&$0YC16YHSZ# zBU~A_Yc`zY0Mw6N`BTv$<#6LaqO15{aN19wjs+SThW=|75SnQ*;lO~{2b!=(i*EP# zJMhc9A4Sw{YrkjP6J#~8H?P|R;h>6x5koa>_$-V!u>J57}p7uOJU?>P(e^SS2(23vzx zVrVB}qZ6pEuOADyId9pf=by$u87)}2kPT#VgRpQ+g{@m=^zg5i^C=jkD-HD=^y$(D zpVEBzWlVUEVxihlV!{(Qf8IPzxE!1-c<&8={`uDrSEpDFsU+eOZlM1>2yS28qH2B2 z@hcr2r@aS7aDI?_QonuK|CPy%K`*|&;YY&4!stiBFX2M#wZt8#HP~G%5ZqTaFM zGxUWzRHwrO>feZ^hR!?S56ghE2?r&g$e$yJ`X$3opSuqqMnk3A+EJSR8@K%6|iz{&f2N5(!-=G(c z&K&Lsg}Yw)QdnYpy;izV6XQc>SMZ5V%c|&_!q>uLZr=P&Oj~0dQK|(V4(d!bN5^NF z0!@%RfKQ{5m1PdM1mviB#|-jQLS1{PkulguB(htv`NRcPR{HP2Rm_kgXOPD#U9W(KG=lWmp>qNmI!dLHbg1 zqHra=JHDV2VFrl0)5%YsKUbu7HRxQqa^=9GLmD`5#S%(BeC3d{8MVDf6E<k3v_Fh5@|qruLR2VGC1D54<58?%mc<-jzS(Gen?77s~{e=k3(TW8HyK- zt_3wtpE08vAtH3;ufw{lEit*RDBEYC%dF+*pHW)!1Q7?GPX$QJnUPrGC1QrBso4mm zM^Jg+U9lV)Bb2_E4c&Y9ET21TkCGCJuPiE`+yDX?gQyn4_uF=X>aV%+*Tv$q*6I9O zt1&QInkqo+KXNUP2UCM#-Z!A4p701J;-1f%)vH@crIKby7E0DPUXo1!3u}SC6eF3X zlT3xlas()?_h~p*rc^5n3pyI6@&eg9I__ar5pe}2*-s$xRh_p&JTWF-xWTsR2EUBf zeEFf!UCikex~P$3CeZ(kVbch3-G@2(ADNh$ zKmFv)(a#eH93{YwhMyBi$SVM0w+??A^DKEcDPSqbPW{|d3y8i9BSGRsD%+yr{&_l& zJVb*AGXTy@W}JkZ)=fVXM%WzI>n&TiO3+3efVdWgZ7nJ)B9@lmIw2k-(P)E}w|?!~ z*U=-MF`=P~3ArP51Z5|whRED=CnwxUP)dG@1n=ER_~}X%ygmX7qT9G?+cxdViNQQJ zHXF-xa?lg$xn3koMOiu3^#c;K;z^qg4r}%!&a#tsIOPkD(-OR|q|6R^FKZ1PwR)%8%^B@ai(`BURg@eUK-n0A{u? z6k4-p&&7)uc{(_7TeIMHH>d!B4!_1W93r{;5+rNf&W23#1cHP%} zfB1am2b*@F1(UX!S(Je#8bL=R!zu|&M#3aN*0U=})}^AYOP2ssXuO%Fh{ty-S`yCt!g>K@(NTM^5Z+7dQN(fZRuD%-WD06 zFU>+=LL$TFDqX$CdoZUa+S_D|Vy+IRiX~*N)6aF+L74Rfrr*@I1aSsPDq%4#&76)8 zIeJ)WC!sTJJ#1S4?Y56_t_|C6_1JN{jJ zW@_V8taoy-Y!0sfQ9%2z88y=P_o)Hi5FU3&mW}R^npoq^)Oh4$>sPJX1(L(b*$UI^ zG1jXT(1$2568O0xI|G}Wg?OGP#9SFmjK&C0(DD6|K{19Sot4MEQIwJH-{S(o4U$`xA+BO5xVJs}}Wj*I|f){r*Y^3Zt8LzJ>UK|f0Z zZKM5K+KP{-%ROxp<;G>d5}-Yi%4e zrK#Pc4FupLEmUv?( z%JTW>Fa0fFZbIS75zGZzn41vrhd?z+##dmTH+G?>{t*H@bM${(!bEMP;yRVUMBRhn zPl&^YT_ZUzfh_<>rvh0Cun9=Zuc^gRwKwN=bwMZkGdZ|e6xOFI1a7FTfxdMv7FDtQ-h+QFcKjmh*~PD zUxpPy>^&~dCQO{lbVn81SrpOc~YAYvh^9W=0>HkP5=w-M~?`% z$3Aov_vzy}BMXAtD_$C_FZ}19JIMqfE?q1rm;ms)lg^Wmoxpi@PZ3iDq7gFP@=zS9 z8%y3sd<9wPwu&{_ZLP3%jtXT8kkwq9UoNJT^y+gatcfMUIg7H;7u1s^?(51KG$a-8 zO9a)@7SBp1$e)C9;cp1Y_aP#yhE+;&=`%r`+Cg`zk=_bWv>j!R>VQ2GczN-70&X3f z<-QfJTGF(l%O~L!`nbA&JUcsnq~ZD58D!WH$5ph+ku?#^9oh+qB{{uBH?gE*49W-{ zKGgbOn0tw}>kFc`CA+k5ry!F~yA%(6_Hr2xa6IPLh&_zDvsP3!o<}zao7DO;{8Hpn zJc8|hRFvt+OS32zey?`DC45TZ56sF*EkkZDZKMgpSJ-6=fbVKn-Z zI`~AMHbN{5+29EWpPHkSf;MdovLQ1dK=(?N_w6e~O}AWT>0%P60A&^NNi!?oIf?## zMCcbIuW;hk&vNJ@=8AGRWIZ}(`HkmCbG-h+R>N{!0SyUi&||%@BzZ( zSuHmrfT;k30#67;)m3wSg8-Y=Vtw$~cP#(|?JxqRjH#$6HDoXNPOy2|a1&wOiv;CL zd>s^YaObXFBC}i(3s*(#Ur6nEe?-YwKy&~8H=50e)}D?0rjG}aIQE=^JpM_At>wz4 zZp2}&kaO+V4B?fqxC^shMLejyAOdpO;&zx32!FCHIuLzCi9fL2gY1oD1*EI@Js;+m zFNMZiq#A$u^0Yu8OLQZ3Pl91i71%9wg{f}C4k}P+M;CyU_s73t;&`gF0Aj{8qv=po ze;*MFWbpB#RD6MISkA%|WpoV_V<1AF@635sy=Q zKkvELj8vfpCr)Hm2m<>j#9=_)d>B|$J}ou_OkH&^3}tN&16>ZbiRh=7MN7vIA=Coy zv@MdMeQ~r-Fg_~{In_R=23zl4O-kt>&=&Z(Z7g;KIweg!Mysd;1@JdSFnAC$#f&b~ zr>oKYUC(>IAhFGX@ujy$c_@d+T{0^*fkhD{58;W1v|;$(dFWM=A1*Nzj#^Bd!2QIO zq8;z1IF`mVWTB^aC4^h++88&>!TUji zuoA4F2C4~ti~BZSbX-lCB*d^F_7jswCe=X}ki4ut5UlP|+Q*U_0T9?xs`n#|Ag4@=siOuch?<=ge{zCOZ(vNEY0M$}#2*9)!Ieqi z2@{Yj8vS_j_HewDONT{Xa9nY83O`%kWtdlWVegpA~ z!~vYBr`ws(j%BaR)IVinV)63QJfO4{P}AHE4vxP%XVmlSyjkb55RhFEpr<8-lznnu z6P(t!Ucb&I`MmQBpnVp-D_NFE(m4DOIE+g?{1$`DYorxw=UjbnhyH6aInRJIqK@(w z81fFolfvnrI~bL?DCVNKw_Z?X)R9#&&9Lx^8A_lYHu^}Zjlzy<96o%^wn5D=Mi>ZF z+@*LZw&H(TxOV;a?eDw%V>6JNo-WzEY_3IZ(qq)B8MHZLtL3MXsAKyQe*QRU?dn9; zDiB?=M_37Z7frW3e8AYIw6Gj|3>Lp8YF;nbFg4tOd(_vIJ*Ss_+N;3j+nyDbsAfC8o>s>Q@_Ustg7_y~j$03Sp4&#VO$s7j1@+5cGpE(;z=s(5VpsCoXU@?aP%(^Kn zHuew!&6SmjXG3L`scn3@nZrmRPCvV=vA&HxfD>2-94z%@0gqK}QocpxhBw0rkpQ10 zneXRl5Lr!&z%zl@Ae6O_9Sbd{-QTEl!geF@DJ*kxW-e$vjOPyI1H$~J#l_X|HsbJM z0jcj7{Px>1$Vyhn*i@uBSfP~|_4BJ=uCY#>JlWZ`W$}Uqk8ldiP(bTgJQ)Y$qRXEL zcH*Vm$WG0Sgz>MLqi<1Tcxux!ar>4-Mt*9ij?Qs7kT+`*5VAEx9xzYRc1J*v05M~Z zGv7Klllhdxe6rE~r44PclckwxA)%hyYKIw4K@yIPa>IuGFwFL5zCAr>8XfDc;P2sb z0gAMQO-U3e6mA>X=Nv#j-3ZATnqx#`en_Mu+6`y#(D8mVSPey>bs7K#p)^~Um9@5Z zVb9YD%*C8vezA3>ohk{Iivg^A%Gj6$;D|JNE^6}8%Eqq(a|^IPO^d6^JX)KjwC7@H zfe<1`tJHu4A@Vd0L_qf=uEh8ylTtXVz~VuhUOI^P)W%w(cpb zLzrab8beR8xuo5P67zdGsAm5~x}DKz5jCvuk=dk=pSD4NeoX|l(1#&f0hTJ0qJ*ILhi>BKZCg%9=TGVnaRjAu z2Cjnbg3MY6%~&)r3LHcIy3*P*O&CV2lBbLLu3o>MlHn~QP@|XW5MA91Ux5y6p2-=$ zab-m~5Y&-=bHH3O8cnGk!>Kuc!XkVchn_Xor{3PcBZh!Ij6e1hAh_B`nXEa;K6N)Y zb?zq3YJb};jHCw|#`|c^fn;jkJbq~nf}#*IhG;-Oj}X@eQ=KI9zICJ9^stGNx)}^$ z&B(7Diloq^4zSiV3SH32DHAe_G;z}szJR`9j}dbT>xTC?*14(tOJtOu{vJFEIzsUr zB{C)OXMa;EA8n?{T~M7~Z(&5&OGs3+m@P<^AULf+D?w23jY*NoE7!c~J^a!J1|0fD zQ^HB}dGw`Alz7N>wQgE~^YBgJ8E8*vI~&>q!Pc1Lm{?2MfI1@???JaJbmQvEDEUe0 z^hF!K98sUDQ9URmQlzcpV&11(qlZU;JwksnwONBH+T5vfL+6#d!1m?PK&kFT`T(Me z11c&tUejjC>K=qlL}_sF5*8;Ru=(9!t}p?MN9;@^W56q!RYdG}ET*A3R)b@F7IrYP z8c?W!7vi6=KLAXG1iMHAh2`Z9{{=~afaOpCYcwaVx+q@lC!sGQ5Q3e(J-;pkv0HVt zx)K&O)}aCS`oO3J>N)|-LX&G9gr2r$2G~RdX@gPM32g3QH2a8d?rJ@8stCyjbz;oL zH^e?a=~R)p$C$I92Tiu|Y(b5Ln6R)t%i}((QC_wWp-1t(prAMuO}uZgM7l%-0o783 zNKx9Ut@$-~z>n2s4lM=)+*vTMp{Co{=^9ef1u;Ba$_O-r!SfYZ%>e}+q)B$!p zz&;#uKXk|-h&X6PP0TC;;mwU`-mru*v}OD8P0kagJm9K(hnD9-7#25$cR-5zk^5iCTTq#e_Y#dIpoc`O@vpdG1OWkcbXB1?Nxhy|~t4vOnFF8kKk7gU!< zgoPOsd0 zOR6vN6er^^#Dibp_Yc9IM*F&>e1ejvAAog?3;m;LGU9lrr7dKTU%)r`m7DdCWFW4u zL(757^Z@CQRtEkI%F1_k)5&?!5VuY<;DA*mNo7xl6o5<^fez1~59!qesmc zV+d@NL*yIV`a00roitE_5Df$;HC@IWRs-TV$XxSZz0zPUwV7IditH4StnWgUVNUy1 zRGw|Gfe<0!tnhx2da2u6ipw+)9JreABZ;MVMW2$mSzA}LjIL?q$QXziWLB+Xq8phR ztRoSK(Cl(WAnXAhmIIBuUfU3y43yU<+MCt@#D`Y$Oy+&!BTC$X8O z9a?Xqo4k#E2)rSLBI#@Z>f=6r-VWJnH^Y1`}OI^5!M0k8AqeC zGAL{e=#>WUE}VmdF0YFS6WtyNb<9ydkTy^9pJH=GJ$m$4%iu>iXS19_p-0H=fVd_u zTc8hxh(TBk!_Z9`aG567gVZF%OfW)s%y9CXM@c^`)weSW*t{iVO46Pa*^QQjOQyDK zc$d;UI$*w`NXH4ZZ|ydnQX)G&{VsmBBX_}ZUPnn03g!8l>b@{57%Y;M*>*s0CCTV+ z$}!+(_aB%Jv8%VI9W{F2OFPQWFU=v|MKFQ}gb*;F27u^|lsVz2h*{ za~#|rf_Y>_R5Oljo85luhisdN@jcu3O8z?@qBO+-aL<1VY7h5e?E(}NHpb50!FSLjZK=6utR$e{c z3WU-Dtta$4&60tj<;^pId5HqTMoQ`)HzWObMbpH7fE}6$vg@hMWMNwT2&u7 zTnm8yS~Au9)sRjAW=lYhfFm6Z@*j3O7+4btA;|Uv#7}VErde}n*y;7uv@e7vK$=)B zMbtXN!s-|=QR~<=$!BHS;7lu%uxvYYhxRe7CgZlk1SS zZD?b7HvkxoK~`e~@HExsHK=qATym0D#S#O{8je$Rtr@Y98bWb>`E{E&ms_+F?Z32B zS0I!EU%u%CZ!&qeEC<)OE-4TL=nj)cluDpW{D|`+{+#?daxFc}E5Jy>^hKkQmxiC1 z(K9IpusgWOHLkwI_xknzI_ftz$9Ug{ad0y>I{$*OBX#Y84<0OwX(re%arfv~D!g{> zUfAby=z<_CiiNs))arey4%qrEjLY4?n=XumrXQ`q)`L#K$Scr5EEXNMS->Jjz#dW& zu3mFfQc|)+ANXJ)e#)5{Nokct(Mj@FFh4TxWWpWQNWozig{iRY=`#2thAuN!y@b$Qu6C1D{o!t{KU{n8rrV5bZx|%YG5>trpBjEMZ@;T-*49@5CU}!4HeH6 zNuh33D}3(_2j~V56ezE~1CoM{i=HyR$~y6GF}{o@bTLQ{j|TbOx)lZ*B*q@5SHH`{ z0oJS)WHbE9g$vzj z({N_#dkTcHE@ZzE#Y4X0!*a7KY`9c_r``h|D6VU(nMOGy>?Umhkf9McAG-9Pe>n;r zwmHODw>>)V-s394>Yo*ViZ6tqV$}v;um?N;0APfDWMMaWaBMiIcDA`VI7~!!}~z z=Dv?Las@C?K=0G3s%XOKnEhKjvXDyPG(mObQqvI#I--G-Y><`J2Rc*<7fy$@sau>* z@(E(vKo{zg?GHeO4L;f&0*aqVX$|M4iL0jY!_xPCq`2k7COFv088hKgiJ^kv@U(3S z7{Wl2;^T-ALNL?l7=!#A{T=r&DS#BAz5l~Rp_X3^$pj$;vtptdZC)}dxeGjh&&e_Uy=!6PZ$~{^y16Lsd$8)j4sLAxQRHIkVA-kr8EKVabKKL zM%0x(BS7DCJ;LKc3ZsbSbucr4-6jGU7Ot-##SZx{xMaa0b$8axS+m-0JVygy4%*5x zkzo)oj}q6XEB>f|deR%-!a$@_Y#ERMppaV3xq|Wyai)@%HHXNNVd7G}!Z_2+dZacH zM5sYv3^!#+pC&-YSV9aa@XKC=o2}r~Yrtql0-4QNvkbQSF*ck&>=Xk5PBk#uZ5r)- zp6LnWy_s2hzyS~dKOw|7;%Uc(t;^;TvC?s!;c@bTDoKnFShrKS5^I zSvJt+<>1mM6y--Y=r5;G@{$I2M!|Rze8bMQd$yo^S$oJl_=9NNpg_A_;ujU;BMhhkotOt3e4vs4l5cd+sT2-EfpBZHO75A=D>{V}1r8w{iog z#>d2DZ{L2jB7y(bx%1~?QXLz|z+^*2uMF$ge{?*LNUV_aSmHuFVxNg{_L@C7p9n&v zK?|b^m@C5Q{A@_g)LdOhsC_r~hUbnC=-m)h_;8Z#b`UVFot@oA9|m3XAW$^>(F`(X zWD=IPvIu!XQ4oC>55s`MbDO@%Ow$N^`apB!_k7VNV0%;|czZ0;-=1neOp z912u>kaC6HyD#8CJ^}M1n(f>Mpye%(qhS%)5C_X@0<2FxsK$>95 ztGpLa^ExTnDWd*YvP+Baq^Wb%MANUS?ni&8Q0}AdO&2IF;jHn9sF~xJFe_6eYXmz+ zGYH-V#3c zDx{hjGGDN(ali~lsi75>Nbw2J1U(R8y$^$@*tv7(yJ_i2b;@dIP(DgXQM)wuOiW(v zTjf;Da8l?jgO}`nwiS1NSQkIo{L&;%@B7xLuR_UB_SLe*Ui7Y!GR+5=j^E{Dar-ZM zJ-@L&tE4+(Y-eA=><_fvzbq869hD+hlOzRzhB9eC^5-!fuLM#Q6;)T5Ftrdp^5^G* z(!3h%?wCuafxO1Jw|6boAN^5u>AdTd*ac`INPR^V zM7IE1H6e>8Vvk0T0i1tQDgm`Z)T48rtG?@YG*thNdbywc!IX88;Bys7#KN&lEC3ZD z7QGAG(&*>`0EgH~1YjFvWa_O(e?(~H5@b@Sot8fJtDfAq(gvDqaRT!oF(Rf5VR$=n zRK5ao4O#`@(5qokksfAQ!E}7%+u4+y+Fdc=Y25 zX$X*jfyT4?AEv6{P~)%>0x`1>b0&*BC+&Lc0rZ|JD&X@)<8<*GvE^f>wk*T9+eqZhjpCkOJPO-4&W2MI$| z0Z2lYT${{&b?y0Iv!3g`l9V$MH2~?GOdJ>P){sCl_@O0}WG`rpjVrkPvJbEM@hSg% z?LL5#M6{yZ5AdKJNd_s`U#k)rg)O9K(1-KELbzv0$JN^07lS+J|L8gpA|`F1Q-NeY zr>{f?a`PHu@DUhjO54Eo!Z;4vQNO=?JGVUh;287AjBMjEa>`8lLb!EA<5U!4k|a(~m^7rl6A`<)x&v@mh4vOgX_xmRN*zHIDRz}|)7(XWMs-t79k^p4i; zibt2DHXjVudjI6N(oY2>6^Pv-V z4P&e?yy(U4+|1jw-EkTB?`|)AjVJx+ENNSTeh6sAbZ+U$epIpHbtXF*RsEr}uW*WQ zpD%M?NjlIWC@*?~M8j}3YBul+VE!7$`Qg}6NtlfOKe;V?-pj5d1_VYv@ib0t>z)%F zvz_Ro!ac)Y$7`W(QR>tz!8&SdlL+=c?n&T}7t!u@my$zP^E_T(Av zc&A1qFg2S0_UoRDhdCd=X(O2z*IV4O=F`FGUO|5Ue~)JA)C{ypO?5fAbP92tqSyj% ztCV8o2Becxp5^xuf~7zh1iYKh9+CZ`_^>|L17_ zUMB9KD9i3EV|jD-5fv?kB3b&cp-2Z@`o3YN7J$P2bkjRi^Sx=-zt?pKcU}MMuZ@2G z=equw$ze2!Ag}*(G=DGCzc#^t+hV6zOwIS}kNo&8^a{r}g7 z|9|YgcT|+u+CEH*CnvGRhEXhuiVYBz-c1w@f}ql*D+nkENN1?VL`4yZNRh56C|!DI zj8c>~3c}D)Mi^m$aTtbPzH6iLByW=6`n~J>=lj+=taVNZkY}D}KYQQhy6^kCSobn; z#H3?psOjn{_}Rc0Iiu$ST?=209ZS_1eVpmd6ZpqU)~6LGZk>ti)NHVjkm z0ue^)xbt2+>%rzc#TI+LWSaP1RzJg25Gck)@A5NBy=AXz>^z&A_s^Yj_jn!~= z{kcR9hR7iASs3+Xv|p>VU3QlPuP?Jyed{y7oB%)Hiu|ZHjfva=C+e-4ZP8Q}N>5vp zR_q(D0P9r{Pj6)6X6~#Wv#|8V33hT3% zmafbd_8$*%#baY&@S7Tk>)yZh~du)l{%#*GyEnU1fWrqLr6> z3d?Iqv%zUL;CQU<*i^fG^0d5etoqngwxWuMV7U|3L~Zr7qRuzW#=V+CIc9~W39Dju zHK^}zvf**Ce4VqyGd z{#fnPn7766I`&tZ&M|0}r}rjqc`Cl#;#%x|0rUFL!-uKnn>xfbZ7E)(%r<+QkRaO{ zmA&uweyPytV5aV=GSM=roo@G1Zgp2mXAY~aTZpR`8knBrtuhtL$r-8MIinvO)<;*P z3Tv{(f6ClIubPQSbktN$;<`t7yI;t1DdXy#G z%URB#7*qKNGe#!c6-ph7PcJ{aI#sN|+or@PWp^N5B4)ETE6hngRW^`$of$HuTHwMG ziwX0!OY}}n|E$2-V#saiC_9v;>s=Qyqo~PNXE_FrU+bc`iiBRXtCO$0&a|M8t`l9@ zui(02HP^+9CSdMq)L&A{^rx_DT6^t>N(<7M{&$8{jV7HRaH5jOE;?Hc$u=xEVe({j z*@CU9Hl?Y9l)Iy)C-#(@$4YGyi(#gUO*n*kbe>?l4ebeR$fsv0j(tNbXFHC?2XI1G z2_;QfOi^T+2EUc zz_(ewrnydQr*|oDZsnIc%A3l>H<>QaV%Hy8v7y1C+99_(b<2SxRSs5P@|zc!d!Fr8 zQ7ApT%;aTv^47yW{p&-<nF&IQD0ur&#HM$k#9*GYv&k7SC&7ltzq0N(%nG2an&w!y~*irKN*e$(|%1?9Is%^ zE#Se3wIC-gz9TfrwZ}ZcXxs|Rq^2|x+I=94%ypE&(WzRA@4d~{R&P`p<4pjMvr9QM`y)1TTHic)Kh%saa z8!KeRc6dj{RvvTG$f6olIKHHZw@$Y6D4XSC3>a;b-y3-LeHupfIUrKn?sfKsTfnqi zj4s=-s5a46Z=X$cxK?#~4PzHgK($hLBP~lhaxk}JXA7sjwC;enmwJV2n#o$8dRyuX z9O>0O0~dMPq}(9A)y=k!p>OKpQY@^;&hBtvP+E%>rD7t-;wQOBD=Fwx<@Vak##CEL zwTaM07gjiWc-}m{KD8puB({2$%ZthO*(hx`-O8|bd@onpCXd3E9$RtRXXxYgX{9ncy*pJvCOU9 zZ(jV8)iDre-ozYrXHLblCgt-wTfC#^Tx-v5V0I^uu!@=f6_fIQS&}g~y=??&A>QF~ zfz~s!Ov+uG=&Of4dAxLeOcUd4_qzFD?UtZ=&ECyh%YFQcg9m?!*t;h=C?O+y&z?5l z6*(-w;hUe|*DFxwYrFdW*-Z_heHX{;msXWa3FVFc)L>OmJDw1d-_MC(Bb@blsd%aQ zu?K3uL<(l#`|`)yaf>f)cBJT?MnOdYXSe`=iUkDzp>FwwSP=(u-Qtb9OuOVvMK{O!jh) zSkD9*D~XmqEz^yzoEWdSd!Rm*v$pHrauc%++gSS)OIzjDyM>mwY@*hQGwlo2dwT7c z(^EQc)G@+yDe~57z4E)3o7@%lS1 zeX3!}R0=JXmlnX(i+lZ;SCCm(7R95;`fS*?SkBeU)<50dOQCdn($FsN@FyY8-)=N< zj?wP5e<2!9i83}}vnym*`H#28Nb#4i4j+$T)nqR-;Zin-f9pSEhslz1D8nR7_{F$S?f~Iu8CZ@`S%!GS-z9Oy4q?=-v$fF!j z9ed35H;>KA4a-wzIVzMk@d}oimx$-6Y~*0+l{ioYBOIimb#=S1hfO-JRM=?^UqN9#v(`9!qu2GKplFczM~@ zFtWW{crv{;jG9S#w#>t2CR$VRqs2FHC|H|IN*(5Wn|Lx)QQB8U=^Kw3#5SDmD6I?T zS;Nk9qI5*FmI%9do~dD&o~prxOjt8UCimBJ^Gnssk8^SqmWFWi-!>nn+J@wnxy9tw z-D5pk&X7^+vSJi0HyJDqHtZ^`%N|j+^{V5k4oyBQ_O9)aGH4Yks!JJ-(ypMFD(X%O z7AcBO7Fn>EH(2z;9tku0MsuOFMc-Yup8AYAVjX{>RW^jhJ`)g@?JWZJ8S=$)t#8J6 z>u8Fe<<*SP!00{ileI^xH4IDRH4WQhHpsmPj@({2AobX1nPc~IK8~Cl1o3NuzUWzz#o4+{ts!| zKV+nTUV(qe?LX##f6k^Y8~!1~Jh%Ms(>jF@DeV8q_WtwZf50XFAtU|s-Td#iSoD!6 zf1hDysr)^TG4kzul;nTF8~^$7?}zn&pd|m4k^U*){fFHCzZ)K=^5f3_?}q>HhW{U6 z;r~x9e9Fyv1b1BG>dcjxKo9DuVZq$a89Po|6i@I#O75W0idXtoMzh>)jbXJe2D^$c z_jDVTyy9i=pJJxPPhVmT#mjYu?(`XCq#gRnvD!#yin+ADuEAHOhtU-Kj`ujGtCnVx z*uLs=^{}Gka(tsn^spE;wGfMs7gt-^XkjwQ@qGr3D z^3N;(Wa*@{qNp#k_q!^iUPq7Xd4_vLpZknO%KOyh&I7T0-~382PX*6eAzC@J)KgmJ zWcayl+4n@NKMw5h~xdjXx!Qt-3fd$liP&?T8(bZn^G-7ch+YO<4iPp~wX zrRy(xFQa4Wn1~;mb+=GP(|1nYGU*iq;)26(}^AciSZH_`W%wOi65B z?af_CtSR#QJcZIVb7u7_xd+YV>-4?|b?-buk6S&7jB&Emyx@rt;~*pno6h zy>Z~>ilCsfu;yJGes(%pRXmk~a%b}kjh_b(`>S+UWk0Y}+p6NpD+|%{IQSw)SuUQZzT#m}O+=i6 z<;@5k6X)X7t<=57!z{HMKSqqQgzKD2 zYb%sTOT`V?apv-25<>LE%OCX2K)u3$b?zTsFJ;@ z_k|VhCzVeoy!Fhz0!AtFW9-Xno*^v!Qldh)Q=>OUvw-P;Z@NlWdyS|F>&zbGVIO)* z55?@Yn|hj1uYtt|-QKogz%C<)xRkq@rCWuLaN3%@6Vo+$-ib<<=4qn;I-1aVWBz`e z6Iw<41mN&ZRDSVMOJ9g4*gQ$Dw0}L@6Z;T(F%_x&M&Q>=^njaHBUL|lW z)jMZ+wSrwRMGk6cgrq>6O}u4aq7@emvg31IzK02=D2m9-rUQ!)OA3!XlDz; zowrsw++B_w|0{pzt$krmGX{N>%~mYOW9MI}a{;R<;lSz*WpUk_I+5gGP2a0w#i?Dp zNy>OWVZFecFE(CDEe_m!>TKwDdsI)f9k(!6%UyBEBAh?#m$3fJzM1W}W~53KMW{c1 z8+os9hf2Y*tI2ECCw|`hddpr;$W?KkJFP0x{9*i68S`HK(~RUT6}7D*0w!ABLPF9a z-By8i{JO2SKt)Y;C|)^A{hT8L3mO@byh`z_)S+R9^GOgXn&{`yFkmLU0XJ7`S5IH*zdgn*uvtI z&MtbArQJ5bUwG<_YX2}Rs@7x?ze&%<)AG7fZ&RcLvlxP6F=m%I>r{SlE3C^d*InaU z;C_EbKf2VMdb-!%yjZI(wKaX=WPFotDlg^@t3x($bqYkKUWSufkib)qW6$1`E1owxQ|vD6BLCA+IT!!{L&)~=y(*{W@9J4$Esq+Re} zsK4E;tXQ-_*XRNfP7cT@CX3AF@6+^pzQ`L@El9Ji+~ew%M!R{hvu#kPkFHPE>a`as zEuaZxxm(hir{yiWtvx!oSDTo684t@mq|7}vqE5@JYh<4>_(Gp)uj*Ou7A|1oRs52X z+!5%=QgipJ=2jGs=XysQ9Ac)6HBt;k@)Sy~#w02n^#&{CZ8y>iIL8HOS+Uu*4Bu9_ zm>GE=6Dvha5&hVk-g>zvcT-Ft7g#UaH*?o{!=apxtt#JzQ`!w<;b^RQw&9h@ik8|H zs#BBe&QdLaN?ISY)#r?kUmLAS)$EhKS(laBe0YX`)=2Ye`T*V2GiSCAKX*$hrx&U> zP8BJ3Vl#7#ZQzqdGQfuynF!CWmArWlGWqSLW|C=tM>9% z@Ji01>RRQA2b3q-J4J7MaVBq<{h~b@^`EDWcGLH0^twK8cW9tw)=Wh%aEjX2!*lHF z(l4lX z?&x5s+?VFI>lW8Cso85*vt%{uX?8v)L8}$+&p7nd%}w*NLDzGQR@=dT;z#c}Sg<*@ zIyxpY>_nKZOFH@s*`9w9la`(6N794Q?JBQ zpCjCR4U>DPx_{R0KBDAvY0NS{INFjG;BC^Y*EK-XVGJz;WI8O=`*SjapvutT~6XG+Oc_kauIw<35Q=@wNN* z`ygXHcUo$Az8fK>`gBhg={Z4Tg7$kHPy8c}m-st0@E*teZ&ShTkATvDn+pEi$nf7T z8npU+Y_9nS%<>=L*+2L6{O_RU|8~**w`~k7@X;>)UmPxO>>tmvu`mED;MZ|clpL$(AseF=VqrEHx6yU)EFAbE2cWfN^flbl<#R| zSs7J*-m5&iFyDxGs{5Xh=X(2Q5CnBYy_>e=fW1y^A_MER8um&SXxymrc8U<@(JBgU5cj$_0PZ zc1c6!1BN~`JkB-UVpE3wb0-Y%=J?rHtg*blav5ucJvYnd2vLWHQmItXT`|VI*<($! zWPZW2-HqYj(q&{{zKV0jXZ(uwT+Qlhm=2PioubYaxp|Q%!+3>+=>ZC+LL5Cuvtm=H z2S5<*zi?On$$P%Tb!d>zC)eCK$Jw}fQ@6(K8;x#ss0KLJ$ymO53bwzG)>Za>ONO0# zp=rjIjo#9VGkqFkI~kw^PdfLCQfqCzg2;Q)o?cJ}fBrWZ*>7k*^~kRZQ|RO?4T;O}Q~oW?wCxJ^99$k1ygcF#q!*a35V4uq)o9h+#8bb^iWt z(b;ZMQ_RAavq=A^x;&?>^t(?*Vti&N^1-;rfYfvVX8t_}&c9Ob0ppji&kWti$T}QM z6xT7raz533xZVZC9F<=vzWb@0N1d*wmSMDNVi`8gnTU^dpWqM8g5R=3&GY*|gm zJ7Z^BV#~QSC6nS=p2-|9CwSo-ukIDGzIL7r0kbQgh+08W(r%rCyOT|i%#DEWqVUo7 z9nSPs=}YkvVkckTKY0D$e7CE9Z+QHOo1);ya~#Z}>!>f3W~F2uyq?f;%f1t%Mw~~V zEnS)JGhOK;zpLO)kiVBA-=f!Op!Sx3_vkZDQ(kF~s~-590_Vm9=T7X`dOiY$cu}$! zDpzR8V@TbHr|YwdZs>TVxL-Sct;}9NzTbN%pU)pfQrYTfhWSi{RW3QT!>nTBnTB`f z@Z`@Mr(gYegKwX!oqI!MH)uHnAa=D%sST1n`jSSN-O3ww$Cz&Uzt+5Ef#}mec5Wgz z5F=-Tn>Im7Wi5DQRHy6M({)d8e8P7w+NsB6+kEzLQhM9<_2|dtoEu&))7Ovfew|3# zhs-I!o|s+X#kpcup%OGy*XTrk6f~qBWxjm3BQZ}?t}$Jvq2=uYy^IQsCwYWnQqfh{ zmQNdjtG%!tM^E3}1{}}uEeb})lzro1mkMf}K3yGbnp-tEP_=0br58JI$uKSl2 zNpmYPYSU?K-iZOd5PF!A%e-oKN!2e=la9?W6NDCU&5pHv7equueh;`a#v?rE`<{!R ztKDyK!^GIUD)Bm5r|qB~;D^xJb(o56ozuNim1plG*!NYPfbE&8Vqw!N8FM4(DLkb}rmTp7WWK$l7wqty6s>|-c!1^i-y?95#kg0}nFoRHuVQ!w=Kt^RZW*9Lo=~5dPbTVt~Es?~<)e>=IYMFB#u z9PnGqU??I(x+hq(o0sn6W$5%mS;$Upf^6CO1q5Lk2NNKbFrAof81N-GZ4bt{#mkA- zr4$Hw+IPSSRDKWwtMNc8B*6)7#c2_7lE` zNh)g7tS#^D-qZQ6`Y5NyN{pgx06E)EW9n#5y1(2|{$1XTokv?+H&$tzbVK7v(8?%z zbESN;cw=}0*Hm{E+#AlygPr5t_nExEuzD8Lz5>MWt;aySKek+6DVuB$P$G(_VaBV% zQe-%pI+@IQ_488xbfu9yOUv!1E9?#&D6>@LhL2an%~njyFs_SrR|%zG2j}S%a&DS3 zm#&lk+6SMz0$#_}lPTAA)~k!eLu`D{zyPLCx{MO{lBnl*m^=$`11mw6wc3HQWRscx z{N4!bZaDkSr@f^_wA;SRYiw0|dO>f*cxu;Vs&8}ay#h=K&Bp{pL*04}4(RuWP!hFo z)YcVQ&pi<<7Yt(0;>dDbOs!`$?Oj~!VotJg|w4&r59~SX2U>Ltj zohqXG!CshmIf9U;(xa0blaHxc2JFkwh)e-96l~-YyYgRM45^y9t2BAH;vfg{m6-QH zrFV*^VYM`DnCFmVtRJEQA%qaKBDipbR~HQ8H>-_C=30uz#$#A>bBwW!mfxCdqIw># z+sxcssiig(q?P1czwU?c1Rq~{x$T}~&{xm&mZ*%}(W<)=cY4VaeZTVEHIgn5GX`g- zo|Kn%dX!kQ8Jz-ote$QM)PLA??u2J?S7-VdE1JgTc#CU-bC?G=|4m^^OBJ22i=k;c zHYe8k5YbG84&~R(s&^Q z90eeiPXO7&5e$CiQuYsH+z(c8q|-Y(FnDn;HW6Ww*_m~tZ6N#WDz|tjCP)>xwR(= ztQyNs_jwbV9Lrd*1So>6I`Q@SRPYRbU_T|AH{jsee#MU;tQ=_+Zm*uFFeVc*{6+X_ z2$PxLgb#+AiCls9geAB-F~~|rq{(y~HkRrzhm#@0#5@zZuSp)=WT66Xd#({t>1_}` z5yk~qcd32W7)U#ef%z9W==ck8i4X!JLfwEcPXj~Ug0@D72xS7H%g5>sn_?0-FZu0G zpRi;y#Jgkt{dVV>gJ7+IG6tAcgv6bl}Ts(8KF5V?%AsE(Psej%%6?3>OFm41p znRAChqw$qUexVL%*}+qP&Y2VJ6z_M;%`N|kSsEGbg?YFS4Vpeo7)tsR$_By}x|&=C z(o@i&c<_Aju+&AcG$JrEAPb|7yC7rIM2`UEfrh^taU8(*$?I!4gg2I(baFEl@f|d6 zen9w=Eq4Vsv--q&XKjq%lOfmr@(?J&z-a@FlI+xZ2!U3~ywFjO1!39v8$&@G%f#Q%1SGWx z2RX&V0|ZLnBj^q+=Mg#@!o={LnE^!#LR;8<&{ac2BgeKwA9MXWSZ*Tk1x^<~WqJ6) z7g*H$LEkSobTw+?}*`S#0dpw_+p{m+DJ7FOrpgL+Y| zef#!x@`Q}~z5*F`xa&Y)Um`v*+<(UdO`>~3(#}s8eor_E!2KbFL4HiSlf6LB7nnA~ zqr;#vNqA7m!;%sF$QTU^i-ZKV@Z)bO;1SeKJl;z!4+57jNEOH+`-0vsP%#nePS7~v zbQ0~37M7q25%t0OO;C@va_+JcMLG-QhcB37)C`fKbkYxw^I!7mhPryQ11%yFh z_S6FBb4tu+Ny(;^eTg8lU*fvQfzF#kxNZfeGk&%gE=c$dU^|ltYYt?ZKy4k?5`oYE zH7qR5pO$-CFaP|hpV%#!yM6##f+WL&OnC)`kXyGNz}!4>4Z<`&q0E4E7U6LksJaWv z5lZYKUCcKhL7bUw#6<=t4Hd@UZB9PFGWmYVyW0&R2DPzDO6k52v=WcIzqxT}1Xh6+ zYGAuZjtyRnN!SMwmopJ!mH~wJWnz3>I#?B2Rz#j# zKrviaKa4mXuiBPh30hRd!GMaK|CSlJ&q2{**X%Te4MfBo5J80CO`I}>3;+hvE#8jY z0F4HrNi*@{g%p>REHoTLTuXSQzawJEl4N}rZ&WnI--^qJf*^>Hg@R_q;?hb;B@kZvY%o#4AXWD> zxtNUCfK#PZf>(>MQ6*_5#t}ycDhwjBH$t3k*fXVVPWw z&%wxoA!K;5$vABIwQV2udHG)HkFpOE&_Y0O@KUfJfS*=GGmgy%DPnRdgkn!z0a**R_a|CX$Z{ zaXJ=pF}WB_GRQ$|yDKITZw(d}lmI@voF+UwJ4*s6LQCeiK({{RVh|X!4B*_wobs=N zCi28l=TxMp&!2ypiqlHuxXLdI^&K8dCj3=M)Cz~%r7l9<#u`LUm&cKulQ*X)oF9}{ z91D>;F#quP0f3eA7yzU1=d6*bSH=y*6Ttwys~i)!={SSdNCOfO{VkLDtQybOaVQTIw6FEIanMk zg4cEV2Ywa8Nyo!$S$#r!IaKBeQ4ScJQeJ039IDFn{%@=`vQ7^y>Y@_iyS_>5OJ)<6 z+y^j!moq?t`tYpCAwz?4aj+AC3Db!1QoR{qv63q{c&H(v(JpZ4S;S3&VEus5NTtX$=%8h^ zaPYAZV>Y83Vjl%`e{tt`+xe#`9_*mn;3`|FkdB0|5C&y2XWA&dy$?3$@`>-2MMm8~ zm%M1f?g>J4hyUe_pCA-qNVtsiB7N`QzfXi-_qW$O7FmJz`?C5GG8{LWbTG zr*YGu29dzF-#-m>Mkl>Tn6Sv21uE#gr4VjX;3|vkJ=ot5>C^Q<(qiEf6&!pzf9c9M zNFPDnL6b|<&q>;ERD|d|;r@N=mQ3@@L>*)CC56y-Xe7J$gy^PJrQg#panAvTxi!Bv zq81_&^~5q#F#d2zJr4OxFr#LVl(-2v`s5fA_s zzFA`|T^FEI*65$8U~f5RF=zvUEf0?EF38B(skn&5Q~;5^%(0$OkN@FF5R z2MSefrVAM8O{YOZvOW8o9}4U5Jb+cQAes3pw^=Y+FUlYsqNbH8EDF${MaudJf%v6z zRO@l}7G)q{Nje^XSSMWur(t4pvNhOJWf{B^Qpw12gsSn-+G!RKgqMTRu_-?&XPaO_ z*B{AfK7p+0j-~-J1j0BH*$YztM6kv!;vmdy5f4mHPbbGWnWpjMkGogC4i?g#S8i)` z4Blm=59~pzELEfsI>SGPTulsRO;R zjx6i`)!VH7P7b>k5KRc}v4o4T_Z``1Z|YgfssOYvB4U5ZbkAu{r8CsYT7oRD-hiza zR1LjVj*ta(;nIsCQ-#Ua> z9W&CvLoUt$H=t!aQKE61sz^V+`U>!CWtia{_W*E?MXp~xB8${< z?X|t~^78Z2X(dfL47f5gUpwfLd<{`)5?AZhR9CBlX7l0MS}2tHt$%6zIg&v1^couy z9+$S_7Na0Y?e2sd%W4Kk?)s{9=@u~TeK!i+yrnUOg?LZg>HJmDxA$f3gbZYot&J`b zJbC9zsLHCly1M#hwj=^oMgFYSQRqyxgNd*>awZ-B#S0hS3=9lxv<}&!c@QU4$`6b< zpy{ejh+2Qjx(>N>qexym-uTm}yPa+drLkmhViUYB$p8;(vcn`$zM24F`a?%|!4h&; z=N#58lx&Sd7^mYbFLaeCFN5yqRyo4xGj5^=cd?ZU!BSFd3fYoU+wa}JANHZ%xJM`w z<-rAbLG1YlatbUvJ#L=4<-2p|&Py3wyKqk^T5H(#Nj?Yx#WXVw5O)*pc+*uowa&Lr zBRkKUY$Y^Gg&lx71A%d{UW!`y z-u~8{WFsyZzt$RCg2x8O^Mt}7SYaFehWhr=l`GsSl(R^w2uItho^?Oce8X|6}cc_QOvUWVBo_IWdQS@Cv4^iQv9YH}si}qNqGNQPBr(i-$sUb zN1gORH&#?=Juxw%H|;6XN2EM_!MxY35F5WMX0-k1;^N{(u?nH3PY{}|T=2(mO{$aj z3Pt9XYnFzLsO$dx#nRmV;u4QB;u9@0aJOq@59*m}Fqd1mG~D=cFaKvyR+HVl{tDsH z`Z_CHrE^25tY^zkALdR=8N|uo^`6N%1cL!pdC9D1uocw=Fc^%*3}Kt6xd)LEYa{*x zbysT|r~Czy-^CrjK@rm#bj8xn(bGPnf;ZxqYzuJe+;PSeLJExx5od{=KY~Q`wQLGJ zQ+3+J#m^TkXpyg|XGT2JavHNxHTql6=rl^jbw5$*j107@-4WNM>HU&)a605GA z6wGnGNch>jXWX++0EAv&^_vkrJ>KKj%?p%?_9wKLI~aa$vW9 zSf#pN{qSB@0T-?-Uq)DLZ0r-q{C>6^BuBqqxM-2n8Ff_#*L7va%aBdqe*0)h;^&JO zZS&)j1+FbRR6=H=uj2jBd3IPsHOrv8v`yY$DrnA0+NfQ$) zgw!k)fTs|%s6Ht`93&wWln7Sfj`B?10@h@X)_E(tJq4#tUYObwo_W)vgR`{cni@4w zmR?!dj^{e3{0GBdJ)ZR&p6%gU#O~F{=>b7C$16otqZJ2UgFiT!V)&hqQ1#c2*|#6n zyQvyQ4m;kH9y}=YY0aE_$cVGyvsLU&L<#X7kwkLdMhG6Tl4vJ~0Zf{KRrOV!V$ z<9H8`j!OFVuq~V$L1=jnwlPfTnb-SQLGSR_@^#g=-&L47LV+FtrK3 zv}^1S&EG2=(dQhU!D%cO38Svc6tw~fy0`3b*sj5mPZeEV zYaIWjyK-RMjoAB5lbgfsPIi+0YHo${=Se-DF+5 ziAVdHv2Rt#$^EW(fO(efIpNofqh#wl1$N2pl=1K1Q{muN4gEUs|9neMnSB;jdHh94 zy!tm%5lDUnOhM$ir1tDt&sqP*P-DA8O|?&-KJ-W_5w;m{ojxE>Fpn|*zWl-`}It#KX3G*xZ<2F*y=Ak*2OB>jYS05)AC-sxAZkD& z9B_JylL&{1aJ$ebVEiWty^M@ZK!R?{FTecq6w#6}KL{y-_q8Z6@x+TVkRP-vn4F4$ zmW<1}z2Ts%L_UTMcpTRtET)hl0FiIhuCv>~_8C<9F!1H=;=)r|GSr{+0~PXaWzRt?yuaLBr4lVjFOGGS9oF zA+r=PMHJ$2MjhMKV;&bK=j&sDL@?r2QG@enSsmbYEK^7Dkr!vce(LGvRci`V^FFwg z2O{i{^buBmy%gLrLhikOjcG=0&%;ge90b{^wYF`|ewM&3DJfapkp>wd8)N#7Pq$lV z;}O0#K$zdbB_I;Xy@<&ms#GA)U*a0PV+mfO`SvAG!4e3n?12bT0PX;5c9)6FcuA`w zC|aC>M50ZC4hg^E*)gaP5c#D0tDe59Xhf(EK<`$0n(=u7q z2W0)aK;H~KLXxhb)pPo)xm{P(Ap|v6(z-U(I15&x#ppWH=S5KIGJu{e5r&0k{3*IZ zZM|c{eDQ1HAIlu?i(ii_{&m7M(eK8y(?&maf)dxt7rMT(QJVHTg-&N7&+$J)1e$N=D`Do*^i!+dow`E)4mQZ0bZ}>(9vIx876mj5DTq z`b@t_dz=l<(|)+#ptO}Mwt^8E&`y$3QSK9ptikM(-A#s6B2f3;DES=ibq~+A>@~Nv zj4TH13 z+`5&NxG0sldrgG6Bw^HcAL6csowu;_BZny13`YG$QF6_tcyLUeq zc7f~x(B!U!94PNVvQd8jG?4&0yTO2n7~Q#Z$07rAuz1+&1d8I+fY|;4^hr2umui=v z2Lje1>qYn(zUp2ldI2w6*B(P+)0Sg>A9->_IZBN8aPKEddhv!&>2hwy*Qr@IrDPO7 z--~j{LVjU#)4_-~(+)TXGSLN+9N53XX8pPaEHJ_u>0T zIqw6hr?nXw-=UTT?UssO#(a+kgi5)IjY~r$(qUL8YK#ey0M; zA%gQsGDP#)?~=k&g^^!RX-;VaJJo^&W{oK<+}b^+2aI(SSb8|gfb-0>y(bWxHhmol z$axlU?GKd*+%!l*&DzL-H`qr1x{c~Xg)pL-_MYQ9I-!IQ9bCd$`2dW-Delt&GoAsJ zxtP})kh!$cczm$5wA7+fo%C~T)zq#VyY~t1|K0#r zcV5j;;>u>>RZ6=cjSFxu*J_$jU;@Fc6$aP+V!!nb?R_Yt@OSAVjB#+aDpx;7)j@Sn z%1M%d=`9)YLr3Q~JJOMocaIld9*68HsTQBe=|u%2wMvu?He~89H`u7$a=If>$>DboXNo*$+2+WLbrG`L#DGEd z(V9QHsgDT3C!sy;TlT0*9*LIKM%{XExEdIUv-0&?XfzFgmy&+CZ(4i-|7yEp&gD}L z!-)Aoo%}8Kp(>UvTY48F8>GrF31(d`jrAfjqfQ;pCY~H+q+8E+TT;De=EY=%->Cp7 zMa$*`!qg2ovPoXyXHqTxS`}(_;Ge&Z`xR)C0K5PZ48Uaf04p)expoJUAhJv7roTM` z^4JL3iTyWfbBG!nI@+LH-SO@O_+L>^v+u}Rx=}HOfKju}_2$(ZkYw*JBkJyzZ*TFXdqeQVW*jebn=7%ADtPsPhAwX^ zu~wQYDK2&bf?9Oi#?^?57?qT+^n_7lLkQ4-IzkF+{ZDRzB;E^xBc${If12WT%>o%I zk_3d&V2Py;D2sZ4TX7tc=m!P4qd4tjvPW6>vPM zh%(lEH3l?d9YnIu)onx4`Of%EM`2Da7l&B-PtK-w1^Ll0{)-=`z++sFyG}HHP|2lK zBE3R={lTeo*w{6NaxlL;!4ZyL2uGvG<@@07#qGNf_3__M7Zx!4AW8S!ftsxRKx}*5?RxYbs?%DJGCC%#^DE$MA|Zr;t-$O zKZjU_uv_1#gazK(^lI?@aTFH==Wg3bYYPhQ;2r^C2%n(lgw{PEmO5LcHq@{w7&OIEG zIg|H*l(~H&qh0>qmHZXxAt{(JtG$3oRHS>DiBkna{=!iWUalBRw;~ zp4XyJ1U%L?8>(@3fkLH5+y|OAiRNW3s<&5iogHv1QVAms4Jo${7akwUMe+XAN_RK6 zx9IyIdLTACaXHv;ZK4L-EU^PIAQk-uKbh_S;fE2zhm4lTyt79<#wRC<2=hxAb{=+* zzy=`04-4NV1E6$AL+3#2fUy?xq|aGY+J=-uFJnRZrq46geFm@fGNil0jnleT#mx8CMX(9c#fafjlU`Y+#gCcEVmCw(prZ&+6iY~PVCBY*cB3S$K~8+WgM$XqdkCg&b`=s)0l?ehu0w`T z$ZZh)ko?gAy9+iB^M2G?r;)m?{?yKb@OE zw}2o)7t;zzAn@Ctvr#Qi*8&1id2B_%zu-#{<>PtMJnC?=ge#J@lM_heLOJ@Ydb8WF z72%&&v(pm8pk=L>8quFE0<+e;jEvQdCsj_4GM~s2iYI zHc-gC7a!k;)6KsTO#OxAB8N6m4nS8Zmoftg$_m{n%EoBOfmRWTxD>cGV#CA3nP*c_ z9q5GG5)pe=Zh3uB7kw@0J!|=T4DD6lxJu&qp(x#eGUH1CqmdPa)EZ&TFK8qpm3ksU zB#FefDx?8^?LKxxTS;0fa*=qn60=~TbnssfMleu6cn;wa0udBdSR#;|NAv)su4ZjA z1$4!@3nt_bUuo}QpdyN|y6$#gbUu*sA&l4|Gz{E)cG@#NO4|Mh813WXd(9UG@&;No zvE6BMv)J6E6#@u;Sg(;GM9U3geP#SHiBO^wj6 z<-z#9g-#Ob6BCJH!e64*pdYoIhaTKfZCE_g1A=xMS+w>L9ZyZzp>ju<0v82zHWSg9 z{noA34Go$oq}8Ku{|;i*GswFx+wPt~Q;#yr3(rXvZ|oq&)eTbur1)GLqtNzzW*`Zj z4y5$Mzfs5_H-%IuA3$-BG`{89b&>3Ib$$$96Ecz8Sm0y;Me39#0q=%~>v4!vQ00S4 z+CH?JXrqVXCnD1_udIS@FSw1Pzl3)mT!|EUqxU#~b{r3YiTuwd5mC>~%#h+eG}#&C zvmn5D2g$r2GZ#t`fL?X5+=IsKBs4-FMDY|p(+{EDR=~CQ-AK159LH>o<$}lx;LPYwm3M z5s)L+gy;sM%dG~1 z5`p!S8!)d5K6dG=Ra$_H5R^VjNHao>9=ecGKmo{-;cEx9Qc*cgLOs0v)0%EHmTZ zsRW^$3wxgE(&CPxS*!=-kErmG^oHWjpWbCx_4MgcWTfbGB90LGNz%ENOj61s`XK`p7!8`ZvI2KvnRBVxX!*$YVNp|0tppX)DOFJ?S?8a{mP7|nkTZ0h{id9yoj zzuI>LC8m8UVbr%tY_HA;v7{&gvB&-cILdU?v4;a9Vc!s@7wy@M(p01dROg6~F+V-s zb-5RX?<9CE+Zwdj?vq{pTr*)|AEk2Td%Adh#4E$PiA1zFU5E_j%CY!#$ws$`79#~Q zl&lNhUy_C%tJ<&9&^U7!emH;rheB%Ic@iIQQ}Fj@-GAk)qn*)xY{-V+pwzPWNDe0{iyFR-A>eM)l^ zfrx8`RnP_#M4A?)Eb-^(8P9OI5s4#`MOje+-L1AY|Hhly>-~Y}9@qL$VRPS$9VVs4 zP>tBWG*F0LgtW~Xt=zVC*i)}6GM)O417%eUT8UVKXzXe2m|cU#T&Xnu?RyOaTq~`o zZwQ~ucUV$V^7a|haOpG6xl+`CALXASewOGEw@2eJ?7JiX=C==1l=$tdAEpoB$u8{w z2ru7$`&Voc#aH|-`8qoCH+yRyo_+s&zM;Ps&gWm(hhMk<^p`s1l9Oid2?n4g``0al zm-XR4eYF1XGw%1l-uHs>cWp6v;`uM-z`uSj!s7S8yb$--Jx0#U``_++@u#@pKO6b| zumA3*JLLbfU;bu6$kzGr$#Q?KbOTQNaOGD^{`iXDZ{GbM*W>ovzv&_QfALIzyz*bS zfGvqY|M{lp(dU0&kUze~k2{U;g7y1C2A|}6`3)SLFaGv`e7Nw@k^l1!`0IQ6KR?qS zxAkA{>ZH`ok3V~~{^Pg5-}FhTrT?%X!%nU+TYQ3R1m-0*_#O_B26BXW!vSbrLPl`X zMI9wpWE9`30AL6v()B1)uR0sUcl7wjYkss3AGX7VIO6tDPbZ2z@P)r34o6a`onxg< z@JxWg&i&HO+X(hi8AwN5A*e!nOII4I5b-S@st_#n%mv*;BuC&G*GBk^(M2JOL+13Qm= zoGb1%>6F!s=!_sMQs!FOWG)2mab7;NwpQd_F-$9hBl>Im^EJp1Lv+uR_5srS$dBBQ|-!q;%EVvMK?T_H-;Q~3FoeM$!{)@Ml5;sabv)p373o8(N zk+cnMTduA<`HtR0^E3~mxo3a{(M6(?j@`Yi_d2+Q@#+>FFHuycR3dUX4iU+pn53T) zfY-534i*>d++Dx!lUD~{{kHVISP9vrWE7%%*`O6Tb93;afP-c32 ze!;7R5u`jjfk$7Ydg zjsN}$yyZ?1?R>g1Lc#Z~i)keOHc_n-30qNVwvY57+K^b!W9VimH>s8!)h;wJKU zpo4pjDKLDe7U^a?bS+q+=S!!WP>lhFs#mY#vt9Ye zyk`9DkGR^E_I1%|fOvv`{q^K%`Z0lW1 zmoDYKeepzi6l>WMEx_0Q)Hd`FC(`JBx#Hg+tesTe9A^BgYHDiamLQiw<9G+RN3?Jf z=sy7|i`3N5A_;1R&ksg1AfwvaG-}N02F#qy=Y%BozcX1oBf&qtQBc zg#L&bKy4?dY_ut1_KQG@+cBg;+SJhi@CHEKhy;etOwG(lMivc?L3HNo0>eQEKEY$q zdzOf~Bcu*aldTYF)diAC5-4EBh>erbE=dv(ZQxxLciB0m*GL!dBNgojuyI&L zbUpwyL!=DR6|A{wm|EzKo4|8J)>j6tC?V*Vh2j)l`=>}p1)FctmHqMZJUu&Iv2Pr@!{X|lR^z|u_1=IkJA_GAcJPB>V zg>Tp7ld=yka1T-QAtaeW_kbLsT=)Q;&%soUzNNl{|F(|`fe+>r!wamtr_>{&5x`J~v9MDB!t|!8u^&z!%-0ZxL zE7k{M*{DVUxg;9Dn%E1`5lq}1(CD~Nnd4}XBn7Ek)Q7eFVc77&c}`eMNRNnsXfNE{C18=C_qF}Oe?ay1u3<*Ep;B$Sp80cnu>)^Y1Nv-iyV`~%;K)g0o#zqj zSZkdz))iQbxCnFT7mjSX+gcKjezTiSB8H||b;p9k(Zm6#!@6zriI0wh?KgM6Hk`!Z z00Rvx>TGiRF^N(leJL`kpn~2>g}z`Uu7_3^ZQw;hAirc8;a3O|5A2O4sWK>s?1Si~ z`>)P7E|QpOPzDwR^kX}NJcQ*Jg5<==Qe!}vJE5$W!(eO2iW{3ZZ`Q*>xe$r?o~m*+ zq;o}yvkibtTe0|lYS$pgrcDPxa?I`(!qemzS^V#Rm*V>7`-%t%C{uPMJtCw?m=vpt z#$Xao+lF!u5K;QU2IY8_$AtQULl5mN(F1JGQKD9tQY z%thxgKs6fXFz0uW3IB?feWbV_2CMh>1j0o9N)^3cSlD>J5T8qnRtT#!mk z-mSg=`?YHinKg{w$L<5<&GGZ0bS7Dae@f8ri#7{u)YSr4Swcr0&sm|SVFdyc2}Qv@ zkwscYG&f0$ym(Y{6oruiCfS%Yau3zwbab%6PJyMU1bzH&oMoROTotL2p#yj#sdy-C6^@E|fd(g<6Ugn~VjM#{GmJI&V|ZpdCX{*r@(2>2KzOS$ntAgk zL@@U-Xj`;au(Z>6iOX~B+1;czzoLE&Avn>zRVoV433@w4aF$rR4qNULG;8{u22(_M z4m@HmO7p}QZiNH~GR%k%nVcIKA}Z z4y8c*OkZfeC~o+=I{iC6t|mP)Q+5Kf+x#Bw7%X z2I+C6YDW_vKi*6{kgF@yH8_z%cj>+qPF0smp)%=z30f~+iRB+A2ysWCOB8?VA zUuRb&wD-+oGS*@H3v9#vb&v$e=VKe4OO4V0_N`mK$xn&R7!0UFcJqhstvMV)8PHekfU$VY}d*RS78 z8fxP5X4~2NV!@E!6N|>+{X#Ji{HQiYRyRbLXYviCm?W{Ii_1IsR4lBa9uRE>T>-Xl zHE2~J1pl(SnS^_Y!V99>+G56Wa{M+EV^8j_o*+lPj%A3As@|WPlZ(=~6J=y_trC_F z?U{#$(Q-({n9U^F4(iH3m+igi?cos&r?i@nacdt4TNwm6Ntl_LrNbSkj;)4;miuVV zOda3p4VyO~gwBz83(TumhXZaWjmouAs?fjWOH6;X;&TbD>Os*#I65TH=RSmD0|Q!7 zM&AWdjTPJyC^d(Lg@xFk#hK1@BOv&19{e1NWJ6-|Yb6>Gm6Qh@m|F*vpK!>;@%@Qb z-AY_q*|od-DAAq}?f;!rVUE{(*J0SxT%FhVpV~M7AT*18-`IY$cO4c znCvxcu;cD|cqnd`)L=4YfAq~H4tXN`5dHTh7M1s=Ru`b%*GbWG=z{{n5~e|6aH1ga z>U0j}dlZh8tfjTz_=+gQsz|F=N-DMtRTh|f%6IOeOTv#?m7D^uZt%gIdufCHI*VA> z!}R3Mf5@&gu0jlrAySv$Q-3TMkY~DoTOuwCVjC-)1xkGS0n$BaXsZq^5hJ&T>-PA# zKuCf0F-5&LLNy-PHnd_{M?4;5G@Hlw5q#G3L=r;G(yzV`q>|Eae^c(Y5#SabsJDrK z=)<;aMlcOrK}Mbu7q5@K#Xnk|jqE^Vn1C51zeI>qU}=$RnYoukg)0ocW*1<=2PLYp zULPnx)CW^g!q2>Bgs@J24Ir9#e-^x)={YcgHfA7PO!6c;M<}Kk^ew<51X00}#~}Q7 zIrS#3cpayYR(Ss&o6orcUOo7%B^cEjkpT8awGS~tH(dVB&X%GRYXxKOc;cnqk9 z9-dK2)VmGEf8Xxojz!AgRte~B1iXQVX>nb>v9&$ew<;d~zS zIhEiDp%MR8FQumE3wjlBG7>Y>2GOx-<~9~tUF^?g;aV1r)ERS~?ZPX*4NcG+P*+G* zmHez+;Irkirw`P@1WVKRt^{QIE3^Bek%$pHbB9Et&`=Vg0()T&ZO>REM14|>TSAC5 z#;$A9dg4ByT(>RKUXMTtdpL3oBX5kpPW04g)s$wVF(9iTG-V^UkQwW(%7VLi}-a!$6WgGE`j>P|HDRY@>6LIh>`z{~V#b3#GhpCI_>4R^>4C-dZF zMAoB@R_;$c01Z*^olbkAc!zKwUhOtr(ymW1{s<=de94fFUT; z1NllU*goyC{aK!l(I|DYyqIs+D5ZX=c+*KtrYF1$x z$4bR9m7b?-P+0hGc}VnbH0ZmMmaqydU<2p8aFCOeBhvt)j-5|pfm4M z2iFLhKc0p+pQ@i>ZxRF=aZ~~#zpqI~gi2xG*vXND%h1Oe_lr~V^YXgTwUuF(zi8fK zV(ubckzvYp``K1J%=aWdg6_1Bh>8IQ*AYN)X<$ey?`SR|pbKzO7^oumYiLdmY*>TB z(n84}$5}iDU`m1SOp(rv1d7Mz^z7FDKezx6W2b=*tXvU)tPGu`Pu_|r-sc$Or=y*1MS_fU{(L1m#FkUO0GsdLjXAlnb!EPz0yy zg*d2f-25IZF#tV6eRZjB!iOPH=etqqC4SOQ_+W`g)Qz*|OL4g}l0JT)hthF>-o{_h zAd(2gw5PT>ANAyXT?;yt)X}4**m?K7y!75r2{ZOIS7%+Zi_bQ>ylH*#ti!sT>U$rQEaeq{B(sSU+N(9 zHj)y*>FQd6PQA%8n!W@B;U@qn41p0|bM}UpK{#-mvk!jhJK8HMiyD9osKTLbJAKjI zJbpvL&Rx6S4XGvIu2IMJPDljf2{OANQO1Q|UV@tEyG4Up+R{_!OV-Cl5pgKoB$0L| z-RAx|^W4UsQYo@yOH*Rz+AF;b0|wD9?r%OzU^WKqQ~4dkc4hdlfow z;W*VZ^y`2|=<0zI5C(54;}7is!{6bL78(J1r>D9=hOdAbxfT3F@9NRMhQ36tXPcLF znjczscAoOMD`eP-pNH^FzJMgC(ZwT4P}Gl$*Q#`+k?|F#Ga)0B^ER{Su{gFc% zECM9EaP)}>Ca_$jJ$%gP3}~7^*2;09ShoGyQ{99IcUqmbCox)xdGULMy5pC3{^dbO zmJ}Khv;uI>S1J82_tp^RNChg`fVSVvl8PQ1s=5&xA1=NAxKL7)WXKn#wPGJez?=5d&W{{90MffaA?7TzFdI!L zfC8sxCZ8j}nCjEd6&>(~QuKELxKXRCck%I2V=NG~#0O~?g>*&m#VH_Tyox5rfXRcT z+7|KISsi%-5lK#uxdIew;+fNRE$l%_8RD#^9D%lY6$dBiELxiO!-}4F4-h|$sCA|H z(ZAEUjP(StINcIIBdOFo+%uXkuC7HF?K?_095@+onHC-zAsLU#RlxrGiV>6h2>CXI2U$DOR$05mw{ILUVuxLOF7yHSVs2%FGTp1(d9EDB$c6Av)ul}c4h2m z8D<{%ZGJMJ+3SMO-sCRYG%!u1dUPi;Jz5Uq!L8Gm(>>SDTd*MEX2b2K6^?+LL4Iy2 z%f_@a=^PSL&anb+YSMsaN%x#B_KB;NkR9|8fZ%WlGMd%ZC`$<~@LXy#a1nT*%z_ay z$+ativ>ltb^D@J-4vFkqGS*4zu%_}8*=JE{p*wrkQ;^i*E@N~pnI58_=&61CqGwjc zs;8NQPI)J6>h1|~UWNOd#c(?6-@ojZdFz&yD_ZoXdE6)}#K!LQ9D!t?09*ru60M3} z1KmA2AmvKo+RhvLpaoC^3dxs<9{s)SgV)TCAoR)1%WzvBDa%oCZ|5gzpctU;M4wO@=WA^vyIfQ&8NnYgfX{9jU1EGr}IqoxU*C5dd1_s%oJA`e`PQdfAU)E!KwbkgIL# zgKtrx|8>KdtjhQ-a2L7P#emHm!hdAY{81aH$*0;hdGOzO6G3<`OlbNS8UY}R zSFi0m3Y0UfTGb4~I6iqbe{uU-hZq22x?o!7cH7iu8B<*o76mUNwp z65y+Y`NbWM*m!&D60SPAxa{&ZK{v5@t)T4hx5p0yu)(n6qmn;*8L#jM1Z84#A6ljA zbl2C{D7=LQi3T>LMLlun_`Lyb>+5A;1cI0~leWiAq`^q3A&7G0i^tnJNlHSBfK|J6 zJFr}pfU!&GxSF35I~e#2^Bxo6Jl?$(SDZANO!ox+r5|aX?plvLFj*XZN3(OjpCRf8 zwF%`&6*+$IqFB8dG=_&*m7LcaB#NWvNRZ}4OuteBngIz0ZwX-s)!j36m7-s&>v%GP-YVeW%g zunM7lpqluHjZc4&4?IB+u+}vwH5aB#RT-Hkm^4Tm!5`Lj`*`81h>P#vX0D}5LWvi3 zV108xy4GLA!&Bf#=h@C{UEi=bUg}zE>wUFawylDKDzKM!Il}yGz+yE0R8}8~r_r%7 zfP$dyW-K(K@(b8%W53sQqy8*?l;m6`RdRu6%|7bymqnj1J=y_=yx13e^WgUo*r2Rj zWQ3}mS8>9^Lvf%1nOnn88T3>Ia5+8QX$FQ#<;pw$)&NYv&oD%f9P)C%$tiY?w3B9K z8YHe!P%C=#X2(FE4AZu-Cx5yCnlQH9Q*C~^&aN)-Xxm1X)uKBj`@aL2U(8dUpHUZ$ zc(tk;5OvlCzU+!C9q8XKiu!G$VvaCHfQ$?=gwY(%k1Z(}#L9as&e^Rs@c6k5azR*; zufe>_Cow8>k1@gO3o}Au| zunx~To5ss?)vHZVN?!}-xkK8sLS4?uNwKg<`C8O5^+67ESjO60T9zLYWT%NO;O2$2 zC<#iw&6e+->xE%*6-pQ@q%LTVRWK|kAP3-M^Uls!a)FY7(8HGQMF%>i$rVl zR7f)5z!jamSo|9GRm!7h<)}>#d+sVwumYb|f32m`3c3P$bRhvzK>lFvIXpIozadfY zr0({4PK*7v)Tw4Q)W4aTK8aS!{R>SV%FhG5F?q55ZDd<9rUd#ZRu7$DXf1xg?~N~T z{!(l>OVmE^l93({;Zy{dHW*uxZU;59ETBr`^v*5rOD#~WNE>0uWb7`#GJ}`8A_$E-mP~lf8HW?U&YpSsayB#Nm60mim5gM$cXQx zeVKyI9|rB);{|c^F>)}{gMY31ZP&hV^t2OGUYDCBubkqJXkm?zTC8Me&m+{VzhBSc z^35FZfi;ImE38kNdB>3f?(qeT^5zGe4 zzCNa-4}~m3riVf$!ccZmf-78794lovklQB{!5g(So2k)2=Wf*RGgeE#EyhbtJzeVT zVCl;0NSzPnwAmskC|J7Tp3R(yynl5GBFqz6ghaEEkvsfzPbY2L{Zqk3Y#av(-A6X^ zhu(ynoUT|tX@#qnxpKtpMh>v3AB2ogI~UruMmOCT{OQI)d242?-1~uE?R9C!O8#|$ zPoAhms|&rDvIT(~9Qv^L(`*B6K2h}vO@8m?${Vbw9}YTmI==CRVYo6+(nq&L>M*(A zoj110R0IJ{d|2(n&a&L|Vu9}9j-OLgUuvl{Umt(HOldSOw8C`l_OAX}MO|W_x6FHs z|7DLjIQK;R)ky$&I;#^>x2Q|j+uxtB6g+fZ)G@Y3Y-aR^qLxX-;`^4iwbk?v9uvM>&W8362rv4j;|67qomvBH zuBbKh{>seIYS(Y}bowjT%$yD;Dp5`RW#^p>G) zV@H0){?@-S{N*~DmZBAsKrGoterf7bwf^VDPY06Rh z@6T6UrIarn0b28m)P7-`&yUDYV|@8kAW6vHy&Hf?eK>E8390#vU0aVgL7L;iu=%W_ zQ64l0M^1D~;BARD)Q5#ugHe?N}iV0EUG zR=nR5?K5Z8d;266gMi~?)NRcns|~ynuBK5n_8Pu3F5ciJt@to5ygFZZ*GxwK-O*7D z5sXLsLoqp*uYq?JSv$lS8=%_yP!7;_(~IuC^~VkcJkIYsart3PCh1Y+Ymg#E*?2JjhMU#4 z5k-ScD-@$^G*LI`p`~MUO#(?P@ZSYoa~ei*zY2wl6l+pU1d@cLUe8;+m_LbGy4Lj2 zGo5`WK4DZ4qodOdTKd*NbCXlmLwe;jbTApk@x(|ZfhkRYCor=1kM%iU&S=( z^jDx~GovKBdUesIP~+5(Kg!qO%{~1#c$h@mnu%$!tv(IYOM zJ?Fc1-VhF4jqzmbh#5O(12VQV5+LT_Jr#SZ)SKU`D93|WS==LzTeMR#6g>|4Nd$1+ z8a*kut+RhZZ9O`yQ@NSIj@_3v0gQ-(YZIYiM82cmE+%@P8CcVX@-;xN>e#kIo`gDJ zhx4u4Gl~VD07v!Ph(?SGVv1m{6=9$XPM}q{4fe`Tc4eC;%qOfeh?s_IQCANmNOPO) z+yda1b-flJpU-(`Me;1GDvh&gITaJ$ApDM)Wl5?GUne&*W z1OfcW0i<SmKr{NlA1hJ*Dva2M3dQ_1M zjnJH>MD8?Cnt<}B?)c7eh~cDhomRV&0Ge^kXBTsDT=3G)!@r_EBX(R2h9FqNdlJc2 z=bl27GsI;Fn%*J}5^|24g4WYH15msoxojqrv=J-N%+Lqkc>?PdpP_B~#Xt?UM(f$^ zoya1)*=OJ9P?UmdF}_v;KvKEVPN$@u^_y?Brs%>iE}|zAK&_=ORhIJmB^eEM%Cbkq zgpzL!MEjz;MOpID;IuK^c#m>C7(WT<4^t##y+ZihfOOJ4qJoYCO@U^8H_|6g6;cM~ zsF|;iNzy@biv`SK1s=kOp7V6*Kv0Wd#7fq>H9+`yGsVqYZX8i8UHc)$#6K(wgv~?z z;!O7}{~o{RFIXdM2Wud&>iOc#*@AHwoE-p65gG$H+B07WMhG}Wz}TP0AnIvI~eQG>J2{ok8=~ zjd>)0B1+P5D4av=XWQ~HU%kVYlQZW`_Un?n<6@$s>f{;|HU*ZRWybshAkotKE>2Ey zks*p@^fnN>uO-tg7VHMH`#Gi!4_i%$K?e`nTqXM%y4wfHKoKXeY)@GlR|1}EsNaRx z<@M;Z3R~7llI}dr2tz#Xk~x6esz8hAGrw7kh2Ndl?q^{;gLyuIl;!p4wKqEDSD^*2 z6K+6qr#Ur$(oDn5l>ZBHFuVlV$F^?S(uEq}e5Y5MqLv36}FK^qZcK56}VYZk?j^)tDFMe#zaC~bD+;6cI2aMjr7ewagmNK`9~O2=b; z%*(!YOrMwkwdeaf$1%6sac(7yANY-H^~OpJeh{1$0UMAIgKeZqzZ7Uz1Z#L{lxjxg zIIJPHl63E|-jG&YMz|i02&Yl(?&Ml|Tt>z!B0di*X&4N2!P?#(#u0}yL0HS4#&9ab z@{^E7TL+Mq-HFif*Tv6Y!hI3iG*FdZ?;m9tsDN=LC6(IlcToXGTkobXXM1KUmNhT= zmt%?Y_|>adA5^P9Xy{98XCoA=6ku2mnRW|9JrFR(-l;Y7#S{D2YOrcd5^3>I;B9q2IC0wfJnWk^5CNT^2iqQH3}UyAS`oK z4ec#s_`NI})6sCmc=&xrm5!+=VNXIjpHMnQ`UQwPAx~XXO|%^VVaczW`je=5C!{N2 zg7_I|8p&!cfT9MnNY)Dn3`dE(95zK3K(2~ZzRyzPUP+)e8n)Aw9$u0TLJv z;$=5#rfYN-Nvw3@?OBdIwh@o>*Zq6i4@U+{O?tx>n;5`|GvGN9(LcnVMv*W-Dlc~c zEC&9FJ$b~kDs>5i2S-y{l42%f#2Eqf@whg30dhnr#P~1Evq5Ght^l|Y1PtB7DFyHW zAsY1;ASyuwOf|i9LL<1JDRBA#!Ufs4$WI^#599108PqW;pU$S#B!PoR$7PuN6RkG> zp<*;(YhBdtd>tOsdLm^E- zGvJrBeYQblsDQqE5#d&Jr$J8*L0dtGT%g#mzy!z&!ye);V!E=Eee27-yoXKVfR=FT zlj`++M;yG6h*ct=gQc!cM;8!d1Hd>)iBh=l@8Fm)jCtiL?xq~6fy9INN(?4Nrjr#(WQikT!b z6*_QjVZlYCgb#^Oz#xgO9?58S6Ct`CQEsn|)XqN`lQr@X06yIJf`BhWl&}qkNznAf zM^-9;7Dv_)AyWFv56Od>!a7C>dU_>q-t4xIQH}d>F8$-RHRK?0AS36F&Jl3&QU5=1 z%faPRGeG&dskxaMJU!YJNC`wk5381s#x;w`q`x97adJ}*d95@vUgAi|KM~XsEIg5q z2qCfV)07{MBHxP=(ZQSEXFt6sx()t81PUrr%>xl8)e5bB)J$=oc7Zd8zm9oz=5sLi z5NYCKF4O@zC6lM^a1;A>bPCNQDOf7#pGfzS-G!kfErn9;I@@+%JRraA1-o!39H+Xq z;1nMO{~c&BI>50&#`4@QWc~Eg4{#~Bii^pEnJL5D9&R=KiKO6OXZQIs{IMQ`PYL@w z8nGZeK^;yee$tA8j|&wS`GYkM>fVIt2zXB948*@#h5QI8z!6=6C{6&x6w&m>SW%MI&BiK6v8 zolCeh+NKxq=wxaTOPk8>j$^Av;9o|dKVo<=)c~;vLc@989$51*QUo3M#>;iB1QcD| zi5)Qq_5lN^!02*8U7rCz_~J!gyqo&0kld6o8PUsuo(ySI9?+R}y4IsUb`!uBv!68# zDn^4vwhgxxW0A$9aYz<$`Xg4uyJUbZ96+FOExU}45u{6%aHGsr%{%c~=H25XxvVLaVh-#~boh*kW3BQ+n3VWHR?U>Z9D#oB`DrOpnO2R!TIrM4IkzD^+K7|)lPdF)P$*C>vYtcOL z#c@^I3IDrGKg_PxytEEBCjd@Q zf+c4i(NOUOW%vc~@!>`J(cB*{9%Pe(y?+MH1sHzN_X1`se`wqWq2-r!wdJ-4xyJ%ft$a(tI_Q=Nem!6ma4TC`539oU) z!`9kfttiz!v@AjYwlZ{E31$Q#c`4b{HiiP+l*6L75;@LE)D%F^zj_EBA>uLE8*27n zhPK~XIZ0|c`YY5NP3HcHEOa0j_g3{*I-`j|@sv9}7)MdHJO$sF9cGFHbSMZF#DMZg zpE!ccJA{})-Tk4p4=ezhl_JCUnB^wJHwHK3I6~3+PBEmjyZR0^96pkNo}Z?mYW{{y zsjEN#wOM97SVEMYJuy7&B-1H~N@Gwxyfzlt`Xif=`9ZjoeeQm4qKsy(1FE+w?0h=i zM3!a@z@vOez)N+exiDJCcToptp+-OvvzHi;;K`A?0A-d)t%R^J=~$I8*j-p4h`^-? znFwjYGLuO-fCqL}Y1cN8Pw9SQ)1RPbfQ%QB$ZGW$V!lt)R;unSVfU1+x>~xf!ap^a62(hR6Kg) ztfTPSoxfZn!)AQ6Aq<#=VHDizK)!UuutW<&IAxHw&{7r_I_awe$`JDK0Z+BfGzYH2 zPf#wi`J0&iguf?KJfLr6jzxu5;S@$i_E#M!r;6Ml!r1vBWsJrppYah zJCyC}dc&`pc3?B0*sO4znb3mp_UxC2nd$b=U!*)sGJsex&Vv9M`NO%(;{wfqenLUz zB6BR&Oq)3B4j(LhsD&2P3hcND(Ah{VT$m4sCS{a74Cz(!4BYKV5s=>>8XWw9!yIU4 zVRJuIqg3wf00u&d|M|@2U1z|&g=y?4y$#)8W)+w~3b@^~R{qSC+J4=LrQ)=-L4EhW zy4QNWn|hTOGfi$lSD}`C?kza*U>krXxFygL_0(0RifbppK>4Z-Rxr)7& zK>&`xg0}VTAodi_@Tn^uDLclqibIe}6x_oKIza){Xxv$H!9v90jxW(QyE9|=?%g{? z<^Y7k2n0F&`t1YF1aG4al>qy=PhYAmaRqTQS9tONUlPbmN6^>v%%+p(_9FKKL^Gb2C!+(9E z*iVN>*yzjv6qX!Up8S#xQZ8m<30pJ|8eU${k0X1Tan3iTxKKLbJ!uI7IBmo83Ro(z-+SFO$Yln85pZ<%w- zhFN>#=-h!9-3R~;uV245zvzSM2r2w+W1!*%!Y>iiJo^&oZP&lx;PMQh@3R+;?I5)` zry&t=>*#HEYopr0*QSs4A51?$jZ=dMQzn^-LOCe+(@;H-W^L9w{P)kz8!#_rN z;>*lN*P?*PyU_(qxy^sY5>-0EL}=voX3P#vTQQ;GPa+7c$BYc{lA4co zgI;`1H45{V%9XIpr_=8SHGvtZLX6IRXAE)YVDNWSqHicN4^U)${_CG%m>M1uPeN2w za;>~n-op!SC9&0LYRfEnlS$Tx`ZR%B(*yP0SkX|w<+V=3x)^B9;rec(kbz9bUAFRv zMydV(&o4UTv33~Mo<5p-C?PH1_aRqNy|c%vOW|EI`3v5AgE3AxNW$AO>F%Ske9y@o zptSZN%h|?|H?z*GTN~Y%;6{o5X^zw|f1V{Rb@nw2)~Hi7qEp&`5OF{<#u=7rmsoQI zKW6QfOT)l=#SWF9Vw+QNEeOa9-uM5oJPMxrCqdPl_bLwDy9o5Yl6Me19z&Z;EwgP+ zLVP?Ca;c6nsLxEJz87TVZRqszE`~l44dI8Lw%;ewM;qcCI0q4yG3*tI%XikppA|#f zcR1m7y(AsvSQ)Jm0q|e2Ck4R=HFSFxMp2P4k+ok9ixtbiUekBBoT(z%Uf7q+vCR#3 zgGHOaOuCKEI>ZjG+N05v3JS|5m7g-Vz^IRGkN*;<3`TdPm)izrXV5gF z1!*g3+2WAd6=s%9vXc}$(Dq7e-l32?jMLtQY8}BZVdw=>3do@n9608~BwP0Pu`x)5 z__EZcxi4Sp;Vrb+sfxrjJGJ~QS{Mef1oDtGtMyqDJ1gOQ2x zcK;-s{-(?-RFq0UN2iu@}uZv8}_v?w5#Skh4F(&Ivp=&^4HspJ38Ubei0-YdoD zeeUvN>~4BJReW{)HO)d9FY)j4p>Kb%{wJG1y}`bTiUPYHN$m%hx8)>Q{3&)U|V*pQbq{ zCL?$1mSYBIr=44IO}tk2;CVYO!A`B%x_z~7Y35p~rs{>wYJKfrCYpRYr@mzQAM;;! zq^9TmhjaV~Ci5JwzHBaQf0bG{DHJP|<)GB8TGz*2v};9GiN0M(Gjqq;6qd1CRn`Vu zyI%Y3`8N$83Jkb+@7P)8X6QG{+Q5@%vmniXP%Fi5P^)*Su|M_r$gW!V@%DgP&#Rg4 zZqcC$<2-#|TK%`q_e)I06hNwBSOeZYbChg`+WGCQjbS^&oydYZ( z2Si>i`fa@Z^7HnyXPeXG{9o9<$eJkXY7i=R*eW>f$8oOSdUBjsQ{sr|3~RGZeQcWP zfdB6K3{i@hD{4WF60pir)Q{y}}K%9u_VLF>>7T@>Nev?eGTUA)Z*Kno~0pE0(soi_DCT z=NG0VZsy5uaPQtJu02ybrc?5E-2$okBLaDb%$}|N16;BNb;dVio(}U3&rE$XZ@%w6 zwIIW9Mc?(#DeleLcC%OZd1eY)4CHECNvLY2NUZ2m@wMJCwZPBXSd`!UlTeOU%9VDb z)hV9`i*sZPyhn3C3z_}oA2i*YclP*Bvq>TTtQfYJp|h&0SRkrDE4SV6stzN!M7p72Pn5H#pi+eBe_)+7;T$zyL7I7l_vwfB? z4=CL5`@!P|S2Tv6vOh14`}tpM&-&QU$nq!X_SrV+^S+pvIuX?udA+3CFseZ{qtdgk zz;3R^%&Q!?C6l(Bgm3fk9#0Y|Zt>-r%UhgSo3l9Y!AP;^m6A0*n)$50C0vQZT8q1~ z%R2|tn%kdRk7Q)^r!A|h$!;BtZk{xIB;Rbvt)(_m_g1j9OSHi=G^<;%f85GcePi5} z-`4r7)tA(yA78BFW6&oozHmHU^mvl6`1K~+O=jZByB1q^*#;H1Thx@S;mWD99I@lJ zWMvWeAGLSz72@_ybyM@>F}hhYK3@Hy&&E1uFfZqMVSC@`WigxpFqjuGrg^u}`bKo~ zta0IstSpoDrTen^^WTm{H)9X7ayL}khaGFT*qU9H_{#cd-h=z?ML&p}v}ojW4Y0de zd$-HD^sv4ht8HzLHyK}C*-$xnIKSCqYhKu_e0J;XwXdvv3agSP#(&Om8}&`ncvDp| zVRrD~ZUKhPB9m!{PW*>+`7?HFo^CLQWH=8tKo;-dgEN)0a<`tWvlCOgZlOPpDmc<` z^Vbssc{$=$D}TPadiBQ9z?a!G^-V=iEiOL^UDY~TJ=6Z2=gKGltf=9`y@$M%ybB9G z)wLoXsI^N9*xpwq`{|a^_KVizSH@K{Q%{;s6*S*2Dy+(D*J!la+-8^- zXnnRof~|n1alnE_P)oJVz%09^PiyY@NEA@~6rX^E{NUVcyCzoXk6IYoiY-^_NKKrq zO;2pF9asHi`pmA!(bz~cpUd4afaPIzfv3Ju^S(AitJ6sjc;?&XcANSbc*JlvmruVa z%@e%)O4)Q&cx7mHb;F8XTPoi@yD-{X+~%lx{gZI9k-#z|cX?JjzpKTb=kuOTPSwMt zY4+a2kHJA^kBX*_XLcy-<<@n8{C1o`9a5V^zW;a(CG?#s_o0ze?)ojtd#^%|C zt+zl6cYC+#c;&c=M@;OMcA3j|wxhxKccm2H^)l|uE}tIgdsF#r@UY8-u3EdrSwDgF z^9`R`#%l*I7-ng64Yam9Z%gcLamb!XOA-=4Tl1mM+Eb#j-8Fe6BUdmXrk+D9qgml> zUi)up>#POE*G&%Q?W=QN!lxCnAmX!NY2M)Bi{m3RX-@^Kod(BCMNJZ|#Z%IGT765d z_t^$TO`M!|SoeM^sIJw*B}G8IPt7r4IJwq+MVd%|=G)2(cgJyKS_XXa(Kf!8v33I` z*T-ywhDVl+2=}%O9uCcadww9vUaRY6dxLTI^XkFXS8Gncl#y_17B@F|R?Y~=d#jj(fdD-cS!HJ~r>^xuL|iOVaHDGG zQI@{U_e*c<_Xp-?v#e9iymilZRPmm*7Pt-$@%SrdUWz&Poaloc3*to)e!3jCaJb#Z0?og@wS`>zC7bRY$?NS(w{N`MK(jhI%u}%%_zGSw^g$_uI`r+D(jzNHk2> z8QZhwOq@|vYkrzhqi@f8R^%1SoZ^g^myfj^^W(A2w`w>uXRuWl+p@Or{`h=fo`U8( zg7r}Y>?`!l#8W0S%?qtZgt=|9mkas_EoisU8OdpXZ|)FJ~#8jRn3-J_3tqa!E{C)!U z<0bEB(wN^=O$**N5@^U+ka#<$sNFTQxh@aeL&$LET8$|8M1F;}MWTp!O7?NpPoo+B zYE6uL)SarBecoSSq{sSMe_HOi%6cs|thTBKr~b6NC6~uX-S-z-PYWB5yR0yBosDo| z)Kcin)6-^XpPg7-)#aeB=cAuyhXvfzda=FaYTnzC{0A*oKI$el<9Dv}^x4(t)Yw*; z)wEj#3XU3FOTCIKCWmm7vW2g1nvA9dm&e%*{_@X+FhD;I@Pcc(ht(U0IblZfr zuNwLIsg;j}I7_-)P=!N^8LRcM5T{voqL}!R!e-U=T1}~Kl{M!1HN~D2q6=K*x$+;} zulg`uv1KIYMc*DLy9sy0k9JXR)&XYWt>f=Gw7i=Yx|{31*m2shve?w?-R2ohW6pFN z?KxZJCm?L+P@K3nRP{%mdoiwqLN!go$JMt*jb^xZr{<2;PS5m~S32~~JXO_X^Ihk^ z;FI-6Es-iSWBVTa7^h4-+x(XLi)f0c zIo@`xw=$pbt*Gxm*Jr~rslMRelHNh1V@Ika3t3XDi}B}uWe(Q+3t76-&3)T}<`1*@ zWYrCZHV%R6Cm&=hoA3`OB9tW6zd2{6r1_fzwVO{ckMbN!`h3GGh=Ka? zH&snnpB*o1aO!FISWxeNr`u>kw6DX?*l}`4%Tw|8O7FVS{YLyI5^C*TT_foivaJlp z@B2Fr$dqu07^Y2~Pp&#HmSU4JT6b=1u?zRPn27P}^8VENu;CTeGw1W8`2Dm-T04fm zEFKbS-rF~K?zGQs_31`;Yj1C(QQJpB-KoFLuIkFJe9)dY+1^$x^d2X?3i~fhm+w{YD%E_#6`+92Xz8+M`@= z(fWhKOi$TJdSpULk#B0l`9}%eMkeEv6NjtZt-HG;vwEz8>qma9Id#Ijq-bt>)`4*! zvtJrQh3k!7hTFv4Q;Of#U#On3_}Jg~OTMRDTS>$1=4%c+`PloM0;hk~sKqP`uTnM{ zXz}JY4(-d&?fk9Y^ZY_4)ekt)#;$EwdoDOs%Z4#w%^S^eRP5DSJ(npx$GvDwL6l!# zW`}CxHsy)o$@V62K8@~qdWL!}WPBpiCBH|mjtyjNpY93BIv)_fZctQNZ*LPM+cEV{8m?LNz_tgO;hx7>9CgCHM3 zzP$90Olyy0IOz({rNdCyMkix(_#`q?`fneRgn3Rr*l#T0ffiR_92XQ4lD7al)P;iG zkqJ#;mL4m|Vc|_UeN$#TZ0Z@WKfXUIJUrY>)Yg1AKYyD6h%QbTkG;INUH&9}za)#g z={~zwPA$oDZn&(dq!?>RIeq(pj^5oQohjw2bC~EcdwYoGkcEXsw34doYRp+~Vul}r zdv){$tbLjz6N+EU2=MdgVJH@qoh_cL@a;1!l^>Eix#K*|fWS>YUJiGjbszz{VdfWl zZ$8sed0hDp_0*jhFk2AoIu~=RO_&LOe-6Dz8KC~(5*C)>C>OenSe#opH^bejq~t)p1|Q&7}H~B zka4%O&t`HvjM37uo2&P52>2H11_cIg1l9Nc8IEtaA$=LGPo}jMU0q#G3I0>(aHc(n zod3?#`*)}%?*MtllkpoAF%biQ$JEDepdd|ELqkK_PgLmP!-w>!7_Yt70W>$h6jTY~ zOEdT*ZdjUIyC6z(sjjZx1|M;{8Hpk=4{8RT*bNMpRs&21UBHPD{qv!)Fdm8^2i$;o z;Z(c|vskHe3or^uubI=G>6*+nNyC?sc?W8X-d4hFeT7C*j zYr15R)%~6{z(j}_pYckEk8Vi1ch1a2jyq_7TMRrrJZxPVXQbmIO7Ae9h8strCy%>D zvC!78?rsaXdM<-&zB|a=v;jM!!y_Vs@Q4MnK%4Q}t$z~xW$X9fzqW|6Fip#C;F#8g z8ps8P2JHr5dEj?0!?$hUf*+6oYC;|YS`cRA$~B-%wnK}|cvV0CdK336D&meS9E9&@ zbYZp4bVq*;EY3mnPu>P<+4j{RX({GVb5eI{0<0V9~IY(@FTC z9tDjE^e?M0FPGt6#Z-D%-SpX<{)Xuq$7AK(8K2_+V`~Yj>eCc{{T_fmv@?`7tP}(5 za~x+>z+7FpPFysYe!?EyPTJt+?_6D7d2I(;o1r1{>RADF~W{nze-iu6#EvcR|DnqV^HXEfe0)YH2GVrBl7ISZB!BMRlh zlw}p@EmE|ccKo@wK-O)7IR5DOz4h%!tR*X1=Mwu~i#QA=58A^|Hx5J=I?0pq39AH* z)zxiVJ)Ei`eqPC`6nS*(Pv0KK5$3O3gO|c%Tljr&`hNn+zy&Oo;Cl<11Xt8yZp;fC zpju0QZ&z5qzfU$&yAbg0-GUr?bR!#YyoZ3!2V|&zY>F-ay&)n&^>xN$6vV!;NJ>vn z{}vDb3^IvdZ$xZOs+so9wL*NDbKiS{W0`l+LVWV7N}g#wE4F(7=NC(kz~9e234_qw zUp+Pc+~Q?zQ8YSS903+{9*B!)AVqchLMz0g?Tr&$Yq7&GS<`lbTm8tvp1BfZ`^wA9 zAKxSUT)Zy*@e@wL4AcjF0y!-${|I=ndPz7syzvtg!K!4E1d-ZS+DDLAE+R7WCJZ6| zeEaTiSMY!BiEWYT>B6rI(TF2cIl(_G7s8^G(g<(#+PnJewt58x9S`nUU|bxpayRrS zJ5I%KhI!4MGq{3eNGb%tmRpGtaauK`k-Kkxn_I5^?d#i*lJ_3W<8HpvO-NG(p97LN zK3Erd)7#x>jtz!n_Q9x-kS#DA{4=bSJ!0(20~@`I@*+h%|Ag zAD?gI;wr)kwKXf?_y@WwC3ME?>FuwVGCLVG#t>j;ytmmzSsF1@+GN`117G*ZXkrAk^TT^eq2zTV$cW z)dpWs4h&D?_i<4uLXM*c#dxh<$jBHku`kY0q5QlBY5r2x2zEs zw*7Be!>@Hs!`R_}PF&H94Q6^X6@sNRSQUt*7D+I~QGv+mCaVZPJ$EsAR0rtJ+CxS^ zeOLx=l{4&SH~o1HU!Nu$H|{hvZ%rT&p7@W{u?(r>dN)|))M-G4>>B}5*mhJ+e&1Jw z@B!Lkdzbu?JwTdf|L^SKw&<@rtUs&Io+tR=w?|?5?Kc?wpF1QO_2$c}?2)8{J&u#^ zGe4sfI{v+wVr7K7(%BVNRrz?BXK-Gl(-&|uwL#03!?6-nL1@1tO$x*f2aVHlr>EOY zrnmfclkHVwEX|>vuvfTIg8uiPw}%E5%W7(Cf`fz8s{LW>^W@2s8*OcE%kO?$;<7gw z8-z^Omk7yFt!It zq0H*VB7v81m7 zR$yS@ldv!+Tt|u?7L4$-xh~MbiN1OSa-%VFz!%m z_@ac1*o5f+-~IQc=rNk9z_;>HAU`7edw5TN@8EARwpM8r5)S2^TG*32T>!678~jmT z942;Kg|5jEt%B~c))T8Fy z0DYMmS0fiU~G4e=rvcYsR64s@i^$&k``!JPhYpRph35}aeS8UI;URS5xh z3dgg+BUze6F&+KG{XhE7OWN)!L-;jrd{jk#-x6vjS$nW0mZ6GdJdEu~8obEs2YXOK zJ3-X)c-glWqj6;rRdtQqaMEho7&IR1(6zm;K!$wdFCh_~Iq;gczv*N9(ToOHScA8O zczb&jmO-{(!GF{ujqe%NZR57MU!GlADdAv8PgR+do7)X_7X!Y+_R;Tv(zCp>@-QAw zw34zii{qex<1(Z&+IWmt$N1MAItK@PtUHKYMVd$NJE6QcKS$kkJ`WzL1za z@cnLH5N9mS?Wl#i&j1Yir^K;#5!D$){SG`2UUfbsIo~r^=^$fO9F;_03D?M%G6Hq? zp1`hG0>}5T!WZ@g%2TEz-VhOO!L#;!0=DV91Yj4Tdwe|1!Z~$j4ZR1+6Cnvo5xeN>q34)<(usnjD%?n_q))G|= zqdx5J{vIYuEg0ce@C_7!AEj2bmTv4R~ic0j~Jm!e`rK|z{QRigAJO*)#03Mvt?fzm{z4x;pKMKKiV zQba*|?;XCi2guF6`Tf6np4@8;%)Ily=j^lhT5IoRvIDIis5Jkxu2!ag0R?Tof?7K; zzJL=4;o%}!QkFc}rFZPNoewGK(b++@Y9EQb$t^%6;ZT zL$GQzXa{b^JjRh89z^suMyC6B)UEPmMt&J%7%fn~u5W0VU5q1S>Ct;zucLP}7$BLe zAGKW4(SwEXH(H3b_;($e?=%_q{$M24`#cabQvE*)4;INUIL3oejJvz{PvnjN3oNX< zKY1x@r*bP;0sgBjch!A|^Q8vohg=bDXCWb>|E#R^$(0rS{{4GOGXAf;O0-1Nx>$v> z?fp?G$>25dH`wOdKDj&gV)Cdh)o)#B!uS__%W~@gVg%X~UlCTti2o;Qh;_$&Re3PK z$Sh@?Aq|f#k5NIX$SrAaMOZAlXei^(jEMw{=ZvxL?_rX#rUMN&wHSKhk_uxHi;o-c z2Xjsdzym58@7myPUEB46^H`9;fq5Qa$Lz=6no5vZ*5!)U-na;)qVU#KUX^b$#OyKH z2A2dS+9%Q9(JW+u#~6XOpy^k#7;`&obpp+L59Warb+PySHhA=sj!dN-yTtpE^xoEdHjk)8r){{Gn$ ziskg1x8@*NSmcT|{1DQFzX3u2Pyg|}Zv9HhfvfIUa80#^{^M4dKqU3Q`2O^Ju4QBX zaI$j)R?u1&+5LN!bqPs3i#jw0?L+&WCKtqNKx`Ctr(Xp@bb5=PN28NeuGB~?L7qWU zKo>(9j9WqLbRJIsAj`LtKiKx+pnnhx5LOh6J?Q^uQ3OY+O@75#uuPOk3JMDTFr@8- z5wg?g`P5sR3o=978YLtsH5lrD3nc>+Orb?Gg@&-OaH8$#N=Uz&^k5SD$ox#_OKbEb;zy;gU9_b0op#EC!2~pbgGCQSvf1*am zO7wGeVH8<{SRO8JB@Vzt+c~nlmTO=9H5ZPT`%#D*bmf>i?|Qm_ZU>sU^Mo{Uz!J7x z5>_`pm?ixlUEQ1nnU6MG7!mU}fV%VQhL+p#&+K%rE-}9Afi!H3tlOFkUrm7xw*D9B>QkTb}1TDfNW%y09 z7V|Mp=DMz2QGi{xVRlvO{Zjt7!*L`dS)*VU}& z9{RBO$WK+(Xj1RTzuD>gIGo$RAr@*4&B=V@#TxAlq`fNU%WlbSvYM8=x6NkzH6e#5u_5_N* z#T2;pmt#0pJBG_1uugIm|G4WtjG5jMTxeg)1G#~$D6*5}`OU-uo(i$StJ zNa+nmY|9rF7eD2kTq(&y8C1OV9tQV%iw=DV*_JXn68zx_X4g_;(}cFkLynX-V=$IP z+ZM?J>G4|W`c*RO4tWk=mjY6`g?|pp$o#%(VPh~RwTJ>adf+=AJ<*r(ey&vKr3Lp# zFlB`c?R*zrWpJV+o${NDu-&0}Qrb^-7@40tLR zci81QEa_W{m3H{x@1`x-51tsbB`G3_Z6XTiOI*rPfi!Qm6m*2sQaV}$G{?#O*YAsV zfB$1C;kk;t!I+&mQkYJV0Iu#rq4OywGVbcPpH82AU1LB$xfq@_89(vHgH6X>Rw3{q z*t*)rnkHYnwqK^xB8zi5)$~T?TVmIk_u64lNw*)myiSE+#J|8bhI(^VqR-Elh8b$=Uz%HfnkEg}*w!hiHE zIvcxay~0>zF%T`ga&cG$J<@iw)L=u=h(S(=z)IU(D%Qg#k&mmHiMbgnHO6$jzm&!_h7Hp_Eza=I|*pzi%wtYf}o zqTl}*Y&;~r3ZD~_$K;V+m6@eu^3&bziR&2%QU9A)1UOL)Ryr;zL`Uku{cmB-+oQeK zr0Rinxxq7Q`!Nvlq$dF*aTB8DbT`ARLjGQ5CJIC%j{x<}iyF7?=+h*2a z&Xt1UDsS}=8*NHe6cukjkU|7Ii`0qpJV3}>7_7B{wpvSRv}Z_2$mJe76)Z|l>Al0Y zJUcqrrKqf2^gVeVSamfj-zexWkbdIn_+x!n*l{P`@gA4)ydCD>GG~=*A95ai!~Gu6 zUHRjNit2G2$B~NaV?{`ZyicZHv`G2HdEGMIdfd~gRhNi^Yu5(*qFtC-5f!Co4urTx zLnJiYC9Ok2v;e^gQyk9Hz6MQH{g$dERN!s7)fg>2*s3SJnr<1qA-OtjUVbqiZC1;0 zry~#dx3GTvV5+4jl@EYHngV?&SjWul&Nr_&nGRKwh_@Yrtt^zOUSZ{b(BKz}vHx9xib zpZDwKXii@PB?KmtV+x@lrl*e&4q&rh{scDg9eDm4ftt^55JM59OKE0;%SeKvrlw>5LuLjM@RC`YSLK3kKZ}I?I@{WeUDy@A0`JD9fv1UR9DRrLqjVsnR_#`|<8gto!;;jKV+eC1c% zcP{SHZ*vzOEi6XNUH8?Xo^lNFV*RoLn53M=a9EPI5Z*~*jIT%}tqnLSkk&wK)?74x za6jc0L_$+JhGe_nwO^#>vCi#e+$(X3tW~R4oyDZ$t}yZ`0B&sg%~g5j?39LUL!CbR9Inz`w93?WPHef zLJYp@Z8i-~SaCJs5e#?av2{X=IBy`}y~nh!)hHeGZPRhWpNun9A*_QCQP^!(?HKx` z2EiAJw)BmSjcwC0Yvw(sL)Ic$IY*~)S25DQYnq3uUaj|(SVHt)dueTXH)V_P?f!bP zH72VDS!J$J8gn^iez!57Xho8fxwr_23<;yO->6K{4XYR|jmz8?a&yrEN0l606}rd$ zkUgO4O|p|hfnbxE3nk=xHEhFeB5kon`T2`*a33G7MJ_!fx>msf(_(7f`54zgq=zZx zY>qNPL?G2-rNDqy2_0id6`hEKdTf|z*d~%OB=6XCRL8Jq`9Ae#MpipuRrjOYl7b6w zuH(mV9qqDp8L-7kHyTv<2~S@78r~wUkWGzWNJNCNL83`9y*O%{fXjxb*0~9>kai^k z2%0CX8Q1GSUg#qJ%_O$&u)o^k#TfH<4Sc@{b`)^b~Mv zIyyHt!v#UOhVpJiozzdC#tWs#lqzvnXo+X}`1;mTu8TQcFF#eZgd-6^r6*S;)ph;& zep%U%XR7<9CK`~HC;sp{Dqdw?T;5z9QEN8QZ??NH;>zL<%@N8Vu?H8dW~jq;?G&!P z#r6IB60Eu!oSbdXabdtH-y*79%=A~W?W^i=7D>nIwJ9?%K7lNu5;d#^5OVJKzg1;H z;97%$&GcJOF=^wSTE=iOaNVBo^ClH8{DXvH1!(A_281Gu4iToBfLS0;;??;=_@pye zGY*rptF^rojYcF{raK1f3?yJ?Miy1WRGas;;~>>Sfa z*ba|#%|$6uplq75=2sa6(-SAnjXW-hcu+7Dgjv^O;9#CF6rxGYybvgLd6)^j>HE46 zW@3}7m?yzt{X@33@g z4trKZAP63CL{*#hJH%z5LE{!cANa0E{@MksD^(|jJb^XTd>R~&(yqOlBMO}?&)`v~ zxwW)hJ4Rm@BjmFRtM%hkNa~%1n&OFBb^2%Q+ugS9H|U|GZr6E!$qbXziGrD^%NV`C zAYlyKWu0kCTl{4gBqllpU}7r&BEtHR4-W#zxMORI;^7_gUQQFgkNFk~@rVbI8pNDu=tA`z?Y z9n65g&}C4@x1}QzAIL}1dViY$@{L+7fSbgKLyfhP5+p*v>~elXp6wBhX{B8##1nHI zw2ybg*7zXaH3+p6!6b*7Yo)9VMxM3Yy7=+g_a~?#^~V^t-A+~UdhntD4D{hz;w5trI(tdghMd2G1TZqnd{s%7DK{G(ZQDdYiP}Os->s_A|~X4(8ORork*1(g1tibQyl; zk}Y42p>~8qzmp@$m%D$;<_2xK zyKDain{~|*+)vF9o_fuDxA^eu=HCwQEOk5cMbpd&miGCL4ZKIq8iE=#45^k4cOlog zGzQ}$Lb?2m4FUoJup2!AGo;%Q5p5G!yf6ftMn=&X*3|l^8E@FHTw%~)JAfrV%~DcQ zG#{2`Z1z;*&0qm?XMX80a4w8OdqgG$Nl8f@7qw;Gq7yJ!jYbAS3;Uy`#i(o>=3hhJ zA1@{b|6aSNtCC-ENEu>GZQuw-eShfGw#9V2ILtJEo&z-Vu~}K0JIxrxw507tfSG5I zX(zjO5=?JoP#4C~=rb87Vnhjv*S*2x#v?Gibd6&>SY|B4%(323>kgdoWvd zus~7GrP?g1ZW)cbg6G8!6XXtgO$cJ8(2ld+slsGHL5%8xoo-B7nzt>~(mAlN<5dwn zoyj8EU#8pL)wRQTIV>&U6!vhOU&f1ULw#zr*m3IPCoQCY&!$As_*g@vO2j=7i&03x zN_#cQ;s8@i0m^Hi_!w! 70t>2BUnb5t$pRmKg{IT~|cTQkV-C14TUo>a1nCfGs zYMQ~>+Zh=dsC?B<>mM3+=zay4YfM>QTWAR|HX`Sj&F9YHNeP?vl7AzdV|pAn`z0nO zhT_@KFh`8YmCJ*exB_#Uat2Z{%NP;IQ?3ugm1$=8&>ZQ?%1W}JNgHWT2{G6Nej`Eb zQt&$M6ik^-gGABIw{>F}_Nosg#_84EO�c;%3QTi^M#g=bqzsEIwm$eSklR77t#ri6xcX>oU_q(USKZt|E*&9DMvUC05w0{} zk*3O`nlc{PF;DkhvR5!-*OpD2-YX#cD8z9DyT5y8-;NCL{tX4fs0<_lB9gZ7vS_Q$ zkgj&xXn=JP!gB;m9;Fz~tLu@pg-nu6g)knvSOFd=xwhgu&NFAujD-&sb3AtD-3^!~ zs|Ar!A?khkJd^75s@`!GFPo09CgESytDk~Mo~NF4;b|bbF<`{EKy~KbUfYu$$RgA* z(o`-_Yj@O;Arj;rKfPo83>#9UZja1X#Di;7e}4KBO{4CJ@yO20J5Dx~IrzM!p{pM-6hrG|_}8x8L-PS8BqUfRJ81YkrkJiULa7;&leB?tu zq1-s0o105ReUXakg>K%w*$C1#;Q^-QNIy87isBCZSTO4R9OY{4Yps#{8zV{_oe8}n z1B4KB&)$KXT^ogXa3c`(KY;{iYTPZ0iIkGEN<#ozn+1PYmbK$~; zAbA4wSLa99x3!VwfNwLZPDPCL&T%rnvK>HILw+77%w$6T{0q3{d{4fbI5M3QsIFRzVsFyEaLhFA%eDlVQU*a;JV2E!3;_^yEG+S)DYx(7@(auNf zJH)uSxM)r;YNKaB=R$$(+9M9i-^pQ|j5mH5Mw84_bVmIA{NPdST{XtBNkXDBHlDQ` z5RUKbhe&8`^annRb3=ALjADR>#p3Tc z#V))(&yvLzZ(MF z4gS_S00V($dgvH-9_s4K0^duu88c>N12P?18G=xc#GnaA3vT7`_0S34z8~E|){us; zc&$&PU*U{`-EqFGq~u35=wBlg<2L>s=oYg|%zE)#u}_{SUO8(O4W1ifJL1HuECX)myM;v!;3L!m~7|AD*}1KI+h8$;N$LVcqdn3t057 zBKWFBf-XE+1mdX96KXgdx$Nxh%B{LzeDp2h0dST(VGEs)LWl6US}?)fAly5O1;!5= zawAgsgahIYfId%t>*44s|B}3o%Dmg2wkDzny+Cst4*ONGI?z=0;_-g4_mf%x97(il zIel&@YM+X$@1e7CoqvU*Y80O=>(4)%>nAHaud!p}lDL>JcrzS8rm)PJD|Yn{^?I?5 zL{oM4>~VebeWymD;}O0GMtq`H2=n)Gn}3-<-y2ckSJ4D)8Fb+&lfMM=I?b~UFZz-S zF);e7XYG$4!Q@PdL;PLy<_#Ny4;#fm@f;F%0p8qM)||<7R=la^(};d@d8SY46FR{q zC94z=( z@=yDaa-6SnahUx2p}M+S8AcCbSa%_0HUsIglA~klIivW*NcEnf7!E5we$}W5FLSb( zfNM9LmqQx5H9YFRJAuU$U(lh1*sj!uptte!t&dpL=fdxvYs`N0Mh0V?cNQm@R)v$v z3gFvBglh)MWR4LBb6}n?B-uW4y|c?pTqi1;czA?5WWKW+Ccr&6LuKZE? zCGR)WyD(@_iV8JKr|v70Ns1Y_C}lfoJ6QH4Ey9pya3nmqP0_&YLNMPNQ$Gt|=d!Sb zmLaMJBka+BhF{XP1#p|hBhbe^6L;v3EN)x*NYX(SX`q`rF89ax?}|Tk=ggUtjRDr3 zyAEQj8>ArOC`E~W_w-WmV`yPCYnU(?f+doNV15}^-`X09u3ke-fz8g%6_&Vv=gt$1 zg{S^e$}5rQM*ssM|7)Do(K%u3>TcdOl9E0*kA(2A#4|#ze)~dVM)Mq0{rE# zwzlN4vJc}|laYw5Nxv6g@_W>`07=Y#KN&p6P%Ucm!0zwv>DeX6vm3VqG3H0BqtkeM zR*>BJ3l5IL8_(WrDvMCCKnbjW5N*H%~%P)UGvBSc0(iFp+Lu6HPD3Ae9 zQM!|Z2Tt68nvXEUz^NDq?@MH$I^Y7H!F^Nl%qEt{QcmT{J$-%rB|E?a*EKa6{LF)q zX*tND501%^rz^$3;Tx8h~mEc#~()4ftGEQ-g;wF&e>07G(>nXI#e3q4~-O zeA<!wC?4=HAC7QodPJwPQ zJN7Cfo)Iuzdz{H8+AsNl1jJg@9z<`iBC?NCD2SOu|LJ5#zN+15XPh?f?x==x|feF%X|gIP9`O% zA68as5|ZizYv90ktPjlS4MiHc%z6&Xo_Agt@lIz7lI+=E*J^)-O!1*%>DLch>|eir zbz{kfdc=CD``nwNUo;9;c;S59YPmclNOO0@n;l4MQ9Z_x;R>w#iW#*qc;bWU7VNyp zr4&$?y(~5#f0)hDKkgc(2RMClFK%geN1oRNl)-ZWm&D2Nv@&~to|evor;bJOvTEFM ziF|O+3R$@kazn}($uF^Ve-K3EoyqS+pZiHUL8Wou#lx7^b9 zH7bt=xHMWsckSv)X4Cr8qs_WYBQU#WH1fdU^h{WVWs}+Y^Zl@6q$$jpa4u7Y+5lO$ zBJ%zNk7uwb#H>LPPvePitr4rqcg9f-iPqinc#{Z_78x}9`e>NU$d}}p^bIp--WSjj z=6oqpxn1ivqY4a>TwIXbtZt>UE;0;&($0_;O1fjx*|TR`{mh|YSw@ClplOSt?NMLS zcJJD1lN_B-&DM8s-wwpaG(sh`fb%6X2Dp4bMJ*sDTLAgEN+eg8i?Nn~#`m``zSY(~ z0m*ul>|_87F>p%_fO6Wj`J(4Tq}ovl>dpGEvnR)kUA&1yr9Hu>UEv z;_|7a`cJd?N{dkqki`cw?jI@gc>MFDUjYy1Lwbw#{a3~N+s-Cwd6=J`8<-w#8z1tu z%;%QUJ+cx(;5I<zi(naPbAW)G*vk@FBR?a%b(GdYUZnT@|4~4kOF_Q zO`BozUJM;S2}d35W}nHGrU_2~Jn)xxFAV`a;D^Go=9?kV;^{pZze)LIAoACcavX9h zk-F!xH%;dZ^z{ju=<&_J?dPY>8yMk!vCYxZasKxQH*U;_se!lpmK<$pv zpvJ;oCdS?DQC=cp(+@@b4LKYb;q`L|haSK7YN?*??iVYc^B7~?8~0MjqLBgG&3oUN>MlgbF5lr$f~IowiuGtOlJ@1@I@E}JhALtGFkb_zI z1IPv|o2Klov^ht+oep|IK^BT>_E#_K)Kx+7W*Gih!mi-*F;;l<2BU(51D7)`HZm|X zz{2b9?A&fL0`%>nAr$jFih=s+1Zy+c%sAd5Q}T$_K?=aL6~|81*#S#_jc6RAQ7}2N zl8ya+-SfC@?#cC*7cM~8tXz3-`tEIE52OhvEa%o?@C1%&rgs1rd{>ZySByq=Aft8p z{P}Z8ITYsPlqBXD3AFct;em%-ic5eV$veRbIORiDO26JhN^Q3nqmncXhpTDA;f6{~ zY@mCDgdG>{WDl~DoIQR>>MV|~iYq-Zyt`9T8C&a!oSc<@&4;@@hx@w+21=nr@1=6i z?`7P!$g{;h0t+M6)o>9gh=htDe+xlh%Li2Xfmn2G+77LJN4lLh0lWm_uY^1bOCTJ5 zuLi(H4v4GTok+1UQECHVvdpS`fPL4Ync|2)&D9wh&?lU+Z%2?6oZV|IUqOIE?ad=Y z$NKp^bj$2BUi6i3+oq@2Gg0styaEPAf3l7&L>QyQZydqqK=SzXJrs3k9;>b=2zo?% zLHGCVfv{#}W!+gE?Nkk2m4jRb4GgHx*WT+sY-b&Z_!M{e&J!4qoeBRkT<-p<B)EL~2nyIUndfx$$;Sx{6?=h? zM0`bl7cnl&?;QhBk${cVkuOH>tvD-J8se7Vd)*H1wrDe!@8)UUG=cjQ4!GP+9W^in zA_j+^k-LqXfexHK$X8)>baZ|N-0);!3LgcH<3gCq9rnTj!YQRg$%nLMd9qzKFBpLC zOm|=?^#k!+qF;gDH$+7hV(%Qi7sUeY5w~*W2=44JUdA0qhu9jaV|L0QOW)LD(|Lc~ zzZxh`lvau={2Hlvz_}L(5PaQi2jkh_7*5q)*gTLJoe6~pDn;`JkFj=SfiC;Lk<)t9MaR)?MOgY0kEbu4^>vMA7JjNj2$t zb!cOg3Mt3+c3-~IUj{=JIA<7z(&2|>#D@EcM2gL@hN&qdC(QY2ATT|*EILW?`<&tj z&z}zg@unM!6ebK%aEJ-`5DO&}m|=k|dETOlG{FZ{)a?-6FWpcKR5+P7b!1fNq+s~}s_k<0?Wf0B}dQgOHi>}9kLatZnKwV~o9+|#h*RIV}w*2wF6V)U*nXT2n z5mBkKworkGl;h;-1>1QPsp~eY2}AJf$b`W2*xC3mKcmcafbMWnHnW~|NVf+T=2g~F&tBD7t` ztd%6;{55ON{CucRv zH2^@+1GqeRy$TOo8C$KpDvxz4nx1PB8pJAV2H_*n*>}g+H$umG=m6w~-Z+czci3D> zn*&2|0l?=nCa^tP1yQpzXm!Vd5u~8Ka9r};cNG#nH56j=B=inkfcs=NvI_-b%^yBW zAwV9Xk}VFNDrt}$Pyv2vZzxQ{$P5rI&W{7zfx3WOE33=`dI~T=NPhea+(cokL+6d} z2!&%HGDNzx4!u~@jXI(c(wWbQL#z_eYCD~?Z^yOVi;gzCK45hK$){W#oegL}1qpM% zDG4%Rew}np`c9}5yPKcxxE>+#ye~}$kT{N}5IpWJz-|z<;*EWpILNrx6g;X7SsFw5 zSb+7a*Yi>m)GX5QLX?Yj`9BpH!MM!`>9pGfN;(fmiK!Kr*_T&5nNVx$08lebKJ}%r zP)T~U^Y`)uGc!dXteP%;7 zRp!Q$-}#A+Kr_K<`T(dvPGm3V$R)jAn$K*GB$gq&~oyjmwuW zesFP%=jcb88MyE3{-5y$19~A6XDiw5IQtGYzRh%9{mAOyXH>h@J6=g zm@we8Z!VG{a*1peKL#145WHIfn&fVOx^vq$#{Lm&`Whe^W`Wt;i~F@x(jOcU?qSL; zNTH86L97-tQc1?e;BeVjt~9VWMTHacQ8r!>WF}#Q;9`HX&uunZzGlr<9UUDE1F1`q zM`AKQ+H;(A8@(W6YJUI`f}`aFzfVX&?9z-WQG^yK7`KWe5nHeg0H8n<#(->>ybrVS zfg#3EjL6i_scTgkL)`(9rNvleBx-mX60)ipn_m6JZ`UzA=|p1^EZTaZ*d^2P9daAW zkxK)+WSu`CIokQ=f{jf{#(1B=n-;ZUhs&jLGXynE(}&Izor^M0z^cMM1c#Yhz6vcX zn1=@h1e}nz#jPYS-J2_Ryk;*W%QPs1qM;tEu#Q9>3DzSvzYU5l13-~q9+lf*C2@+H zudQ~9{^=wdJWSM%4>iG9mjp4uit4CFHdA{5vrsuRe0EH(8P0))v~}qO&T-WM;(Y1O zT1avt36Ml!;Lj1}uLWw4IwK)d*3k>C?!SJLo(D#lfqmHoMc1w8{aL11qk2dpn# zD1eDDSrSi@Pgo_cC-g>2C8&CForSj-WC{gHU;v5fTJ*f_;j#O6X)Y;Q0MhPGH3Pdq z>yN71swyqAGW|Y8aR~`KZZ%CgWnG(&DrL)iPeqdnHC5Nhto`0JboKQW zqjx6|`%3}f;!8BeAA8e-T~vfTl2!5;4r@WZ+%+&G&;QPa*O4*{q62z&oANF1|)a15T9L%i9u72L=kc=0lUI(k# zTQO>^7y}cH=cM*n)>*y)Qva0lsma&GRaqvV=nk*Dqd1RMt*l}Lxp0&bM2>`uAo9$c zfww?;=@y-`x@ye61x(y)L}7h^tBdw1R2D`ob_;6zs?F2 zetp1kAevP8?Kip;Phkb@m|$yTlRL5s>vF4FY0$*dM##5=z>yKq4ODBZdHEq2*A|2F zlZ0*$a+OP3Sqa_cbO?jdENu!%YlI%MG~7cBS2&U&)Zz-&qvCR!u_!H zSnnNtWH|V#`jNh5Xri!yDLIvS^}ziDat;tl`Ka8x4e2Lwj%%Km6f~_o3D?JboUh~} zinPr)WT`fR&BP!Bvl75j_3pbRVZ)>{LqP_G-&5pdUn+AeD$wBAR{%uJ7?8LjXKkp1!f^oF<9HVEuagzy)snLlO)Jb@?-Rp-ajQZ*EiCx9G(GG21P;E1d&`9xZwKQ#6FF)%mq z>SchDR}TJVZ@(zw&{w;{f`T8-&Ad?C;3gi2t@k_Dl#|wN$l!r697BX9r3VPP+X)E? z$c>7Tc3s_c^s%&yHkPO|a7Rr00w2hOb&vCqcl&k)68wJtWj#&{x;RiJK?FgcKaVde z4+IV;fEuok4bS?oa3LKo@494h``%E)A(^RY+y=&ZIL9kjq* zBlIa;>Deu+K7vYju3x`iU_}LpPEuTLv>`J}dS&)q$>(t0c>pD&Ys(?9#X!nJ0h-B3 z!opq}m8`W>?PmJHfL=tTvnLq}zVgvwhZpGdspW5< znmxp#pnbP8nggBGEaq<@xuQIo$DB;Mv1XP|&WK>wK$gSiiINdTz&S+`7RGW!?T;gt zD9X3CPWFsK3iWl8ef;((o=co*mHIEg{NiT+-mfy7r_lI8z+XU<93{R&K$NmK#GHYwHT z&Ye5;dH&H*M86@i0A8h3enQc4ggxv9Xw140)jOBtShJ9Rpc`GI~su5(S@MIzIwIMnL{kRL0Ir%U>w|U+)zeJ z7->4KY*3i7h$&$ElO)E5fQSg-i?Fk|Zw7900wpznEA#G}ueXEfh1PGK((?ZfzbfDk@C{g4)IMRj3~>vboF<`j=ggsm-_dkh;m7JIG!onpA3l}U<#rh$Q7OBVn>vita6nS-00LIMdW8X2Gu}?;ZWA*Bz z-=-xvQ@>EDF!^vr5-}75D}2kgZMg~yp=U+ST-VlS@~`(USxZ9?SSmB&qFaQ_11#K% zW^{R=>C_wC7fDV3>y;_ayp^u2l~IF{^>S<34jCtC-)Ln&dOXxJjmJ;@R^kqeiu7@Y=Nm?z)afzR|@%Gzo3vl85tQehzd&h2TQ>ybne`_phONLLw`wh z`#)c<-FZ_>$UAaMrOuwn%EqQ*Y`panliK^wPrL4$+;WWdpo<`|M_D5!PMoi`}Nno$kSB!?YkWK_1ST5Oj`I@RaN!xUsyKv3u|FN4m$ClOwHlk+|kB` z|G66a(`kYBREl1_iIXhCLGUSV!{}M){RfZmU%$e;NiBjL9at2s0C&<;Mb_%A24b^7 z&)+Eokm0=D|Gh6+LK7b_cY?CQ;>C;YQGG(f1_lUb?thSk?VqnbJ0?Y3p|0!Jt)s$C zCg&gEm%P$He-?i({bdtV%+2u^1~bTC;zI!rdgk80-u0Kst*E>fJ)i{$cWgpPJxt1; z??oQ`JoJB`Qj5oQ9MSxT3Pln*6{%1FbNp-TejAoyqQs#rglS>>y;7L>kEp|}V*YXI zrIX8ekC+WwI!Lk!KFQ7A-26Eb%>Of#lnUrg-t^NI7%~4(j{P6+3PBdu)cTqipLz5{2xC&4SZ3oExN(3hx761MO^WP`$qW5~l z8tTJ1<|ko5y-#w;3Gct$CH?w;p1Zg3)Kck80FDz51_Q3|f<=IAwZ_oG6wAEv0j-!} zcZz7cz<$VUTQJ)CW$1dA{_tzj)$V1x7}8XH5JnwJFj6zf?gO0DQN)#RP-qC(w?}Nz z%9UZ@oLA0NLw$`_3@PT;%RC0O+-H~GEnx6brn58IFvFMw!7?V9Epthv%hWf^?7PL} zk59({vY7@L zz1F+;JUlx#eG^RCr(#>>2e1)OkSwe)MCv~b-8wx$0&*envOtwcISNYpyTwr~i8FpX zcdiL?1C{;zf0}W;(){Ga)K$8!W@ARz%$bO)&@TOn4KlwExrjcZ67poG4u~b->=4KS zbjNJjMVE_y(@4`<&wpy+o_ZJaC%;ZU^cX|9n&7*|B}tWd6L@|IzrCJ4dq$8o>dKUmKoEucAVSK~a)9y*pzS7O7AnOjCx*_k z;j0%SBf9J36At)_l%}Zm$%h`!^jm>dEs=hv0&F`Lz(MN3A!5T}K|lgRAObq!4cHPT zAa4nb17Mw9U9E%A`6sArnJhrp4}j-{f~bUq=pYbX@ZxvSjR6in5Q#2|!@K$&99?}H ziyh*R0NMpP`vi~(&+gqCR0~5hNT>xeYVW}zDtN)TLhqmnHboUw9&TxmvnrLB*Zm85 zv7w=nDuvtzN~D#tCqQCT|70kh62_e15(uQ`=jZPjdkroKP|OK@MdfI1$=N^s1kK7* z>jA(z-GuV5g=0DJ7Hqp!6V9VQrGvLD*yBBJ#SCcwTIfJ^WnNQAivTGuST))LP~ zi5sx6bCxbBa?l>A4M9{8G4FYL=g%90gCsp9p1NVr3RH>Ik=6U-&T(<}gM2)s7{-7L zQo5p;Dy!TCu!X2AJdLvh7J2^C(Q~;j24tELtb4H)5H{X`4-XAh&5p4Xe*hj${vh(o z1*|Xu`t##(G)3UK+IdE6ErgYihx2IHAz7E`kN93&cJ3@FD&@dkRt0##!NtV@fWRJ> z#|M127py9;xHwJsMC6T6dhFrhK`JBq`d~Ksy7JGTf``HrfC+JEB6ci2gIcx-4JOob zXeZK(F2J#5fPY_wsUNZr->$sU1z^lD{?yGS0-T=Crw!Cyh^s3GD=>c~5g-z_;3|oW z&#^g8xR8#hkxm2Jk}!Z;El|N+aZbbj!p^)yY#VK({I(t?_2}vXWgkiO&g*n_bqztf zoy{))aOXP(4^(XclAC%oywKQaV(pB6plYDaER9-L;l?R#VHQ_FiD+>cH=c}+=Vk9 z`4k--P1V0MLUl6IiEA5tw1t61(})^9zawLU0d$lHnug4_w6ruBZDG-5ul)u1Mbx=%hn}^HKCzhZHjKeb{ z$&F!Ea$M6;cfJcYI|$G4#>U2(36K+&K%fwbPn1V~0M%1Qpg(Hyhx^u)5}XFE3tc;g z01o7F67gNAzC|ruOxXld89u8Dki*URtx~ivL?Bw+w>M)j7|rEz5x=r*mIdNd>LHMR zGFW$;M*Lr$Wk7!bDaIL915*I;u@4OuvrTQsN9~Yj8$cVjO4K|6y5^*HR{}5_FmeJT zLljY=1y*Q7N`Q5XdBTN&u>}E~#~FVq_-?7bk)NN*_Ze|=;Yh()$Ik$Dg|JwB_htpb z$LKL8C8+q$2mlz3s{9D#gfT1S(0%?4CNU>Y=fAp#2hD?qPl&1kR9_>)Ak{As*l84|> zGw^rPc8rfyPn5@v;l98hY0Wememmok>Id9+Fb=&K`_c#^c($oS=eNDo=SmHdNW(ND zd;G+$k4#`vBZY(AvDJ8z1O)@a1-vE?ra~2hEB9iLG~Tm$Q-`XmPDP5F!!PZs{W?hu2DFo3KHKT8jU(nn-4y-S=h5@yUbMOZ)NI@6Vhwr_)^fUs( zu10Y6)K}mUJAo#ZZZ=IVMVx?D;3M4B?V2XA;0PR^hrdtB!=hk-IymA5$twiO-}cQ? zQ>(yj#>sxSWB(H`tHI7QKuYr%oo_h6u$H)t_i$6I@6)m`6PN~dNRCE+u#2C0a}P^{ zdS4Y<%3_|Nd~Yka?fmxY${Ysc9#nY{k~afk`3&xMO>-YaRK-|c`fA^r)d^>uCy+|+ z65^@R9LPkAp+~)164+}SU_nA>6ge>tqITWEG~mr~^ENFqNb)tL-e&CQR`YMohLpwb z&~>A3>H#T^?|lwY?)g1Pmd$+96P-qEAaB_17zs&ADvzC75t`Rty?S+}-501URzgyK zW^tVJP~CFkDTvpX89`^667pJ`xw!Uw_d<}F>esxzey;nAiYC-t`2g+M7cW-S#|I+^ zD?z!`^Z1$ytT6gRksL3$dY^$cz7yLdD1RWrOJT9Ybe&LHHd*I+2VlMuw9B$?K0vGK z6o61so%+8LxhTQH(iW<$Y{7wogrw!x?sf&7(^g1?v0e?4vV{2FLk<>&grM>IX1^`H zn&?1POh4m6*%;0~k{aLvpiSEDv8cs9;NL}Pxsj{T!Cs_xX|Ih((YJGEpwt2BSsL=A zE2gE;VHN%qfEWX27GuVUnp_FoMokSM0+Or{91JAU59!Rqc~09j4e6Ub#)2WmyOP=f z1#=OGut8+3h4e=JVrM`>VWE6OHAE(umR|(gBgMg{2-9nbUnUK4?;|w2QG$vEN`N~A zFr`?rNIvf+17k=jB4aaD^t~hkYe8!R|0*edR4U-U-@_xQJl@0yv`T3eUjmugR_s5I4%Zh6$> zJfmSv^v9kbef<^7WLzmR1~6ZTb{foH=LNJ!>Oh1$`GI>|G37!L5+87~Pb3_87%d6g zIrQ?-*zAdxL%yaGF&ngAy%{0?1}6(!T;wkjsD|m}AQi+#X=BrzaSSmVGAfS-%PzhBa z9O6&N({Ke5;3=E1=SX z6P9I&lTB+G5s4>9HBbK8Q>`dDUBoJ!oCH0utlhOsiE_jA(e9%%X;?VwDK=3m8P=;n zdMqRHeyTInX-q51%i^~i4%XxVW4 zMFvrin05JI{c&(KK@j-aEMx3^=md0W(46hD8N|`LlN5RAiZHmyGLA1!Z!ZL}vhYni zBwSky@{6G*fbdpKiA)oj@3ycR^;ruPV?_{bc#m0YC0*E9Z7m@w*@x~ty$r(HiFtk_ zOa~OtUl&r4Eful{BGWBjlbkJP5rX6#eJ!WbM+pDr5;rS}e27R-S<9!Q2B`5Pkv6v= z&Dxoofc{N>tz>0T(H|^Q2!%ROQei{NR%dFcm{RL5AXhf8k<_afKk`An~pbpL=ULb zaMQDpKYJ-nM2SI)zL@5gt)|3JBl9&4$8-}-%nNuvX#1I3Y>;-JzZ%_|WCPKGq|(KY zprff{;pE8&?>b^OfSrVb=m52q%Y84v`<ouhZucK>t|!@|C-bd>4(_~(5jC?t38 z-A<#O;A5yd6KGDr2kPTnwd?#19`m~!G5W#ZWju*z!)?S*Xfw9w>4K@LU}pg{_Jo`O z^-jwsb8IDa3Ynx_2jIGGc}|welIDF-Mm3_+D@75~hsz;}KlN1#>b{>30rb5O<7UfI zK;d?&&mTl-tA*@R5#L0<o=8gKk1 zh&s)cxiMmbp0UMFcc2~P{RtTcAi3Y6Vd|=7j7^9`?uR z9JJ7_4r#s1o-r|=(o~K$@fmSg5K{&4mMvpc^PyVHvt8vKXFBmP7StpuA4$zc5;F)g znT^h;U(HlHd7hd!!b}j<=%IDb*OX1zlp_Zn7owU_TEdw(b3E?6d?8xV$0w$fhodMY z!yoa{lvSYt(=iWVYN1D92?kQ(P_GKno|Ly3dtAZ^Fc5}32OU%ia(C8)NbN=OUuQRp zvCAt$24xe{$5HsI(;gK%Nv+Nx7S(C*BmbJ;0e=%M5XT=nj`G={8*|4Wt}h|eYmuH9 zI?niBjVU!oDDVlbGpfP?QJBV14m&Z7YRi%>?i>&Dd2Pq81E^_odcPtapxiXYKHGCw z|1?wNI)%pl2@EIGSpyfH*VtuGW2ymk{w!!L#7P?5vX4TD2v-Lum-t1 zk6vuRI!8cdj?2K-;sLQ2F>GmTTMAi*TpunfqCYPTGl?kpsU%w&$W%d#+sk4n+bWF{ zofHi{-)pc)h^b6UpTMT#UAuN|o;8SZ280zKloRS^xM%>5&r&Ae%XFm&!5SHn%)u;I zTRpt6Xux_Kh0>u0Nmu+ZSqF%u#-FQ^Tjqi(JAgt%dcJtkbQAVRav0Px=5o7&WIZ8^ zj$@9Gkn`xph!QJEE3YkHU&PSbEh7n{19v0GcA`cYs!1i3`Od?baCWaForE}>Vd|Q5 zV_FN~7=)60nYuJ`gXpg=6HX8{eC*Z6my$Q)MQ2Y=dGJh)ZXK0F>=Cjh92rqwiQ5lJPMy;+`EV70E)=Z zkYLS^!`m1kl2hKQB8lTb1-^R1MaUq9p!$mF7dS?Z6|;4IzkHb_s!FH%pCM@>A*>gT zS80K4z%!&AGc%r{0?lgH{lm>I9BU*bInCd!yY|N4XZ6}i=BjlWx}fMu^!nOE2TmFt zjj=r#K<%0a_?6JCQ)x~(eJHf}G~eUuy&nu^f(%KC%XK{^Ls0cq6zRbqPHqHpj&~ut zhfh4*d2VaJzpqzG#s+9h--c(FLaL}A^A)+K$I=&DV+7xAk&^lv?qp&COb-4~ZxkXB zhQ={aUES#hG88I3u2iY}0kW18GMcqW#I$nzm0O@S&-A>3!TT+59I+}D=Vu38$Zi|X z`S2khIyT+VDdC)*+xXU%!p0AH0!|dnTm%xK7*d+~aY#XU04g=>Do39D9O7W;j2U{T zRTAaBfPxycWTGS6V8}j@`4|a<3AwG2FAw5wnE9)vQ?^gHCpYc z0TK`d335>58vFgY^Az_6Eq#AEKPK332l#B2z&oi zMgSf|5c?80HZnln>ByVnhb|sCgNqC)$9R%6mYJ=$4{^m@Z2Jh)+J_6BsK(V3*A49@I#)y!`RwB3BgA=5IXgzz0mygD6SpY+K8L zEyaQ?z#{lqU(0a?6wt%=7FwzH~^*>IitzA#FmdW z0`p!p^xPDB6!W(lVFQyham^tFA5~j3PJTCgbhKhV@yrO9Cp!R~?ESKIZMZ_-bsBE&$Tl?p! zSq2;LdDvVd=-CRD9e{2aOC7_FE{41d4H-IDKC427Rat~i9E|Z)`$9Mjb-OY1(Q3%( zR%_qq*HA@~^W4D6VU{cYuw8yXb*wfGSE2eas0Qd*#t_>0aG#nNhy>FV=aK_wXvUO} z&3zQxqg#1!Eo5uTDnN zc>>i+)5-Xs+1TRvHhh8Jvas(tC?%!FW@uAG%yn?^$t!h7p5yUO#xbyDY4QszuJYjl zS1Z2L{g}96h@3{XLy7tuC9ApOEBRQnSN;uH?D4v|iXEub^Uu}e{US==sNsW}M1IC} zIoGpFIQ@K|@%(>`y?I>CdHcuT+;h*k$5_TLWZx=#qGX$~hSDP0Dtiv@ zmZ+pslt^i@BzvQkNQg=MkP4}O&+D8R?)&@r{eI`4nR(3VoX>eL*Ydhv*Xvq%_N`GJ zspF5?UiuKF>D7?Pzlh?^rYfpN%SRsR`|F7-Mi1m}Jqj%8Yw!w2Zr?v^$92iSnD_ko zajr$Iry#qn$4{Ov@;0Z=V@HNpNo=dWZeh_i)ti+@hOKSGb3s`4+u&L**c#{yxA)X^ z-+4Zl7A{^~NJVGUEUZ=^!*vnk=RLOdsCM%~Z44k~DKTp{;CND1Ux({OS*ou0nqw1y zPTJa_#bge=ex&+kBbN`=)sKe%a2f*jkmW}Mz)?lpBRw=Oe@2YmjN2xo^9iEQRj=82 zH?=D9Ny?(6^&KrqAB~NTXGPol6^w8lncK2~^*?13Yb7aU?(c$5KR9#Q=n%cHC=-_+ z^CTb4AtFdDHQEu--~bCyT>6qLMH}=v%u~xYZBAnFdxJ`nf}60ON$ZQ(jvkW#d*Q}_ z=nvt9L%)7_`}Xa*0_!8?UEUwxMZ}0EVmlj3U}tnUcn5(0Y`niWqcHO{boI`#Sgf-o zcjY~QwX=s_8nrZd)9Du0-K#s@yU=p}8ONF!bdCCg|*9FPXb)@eNHC*KR>Kz*9MK|#7umD0)qS__ z+}Z!!f>z!E)nRc}hmY$t)fzO&Q6R06>u1R$z-5gb_2`A~@wIvrzTe+4GM~#h0`*O& ziu)*gQM!E;w*FJ;)w!e6e)y1ANJIP;BiFngL**2!Z0~L%SYhB0!HLCRA|kemh&;xQ-*G|TR|2c6x4f6U+Q&oFV^N9k1lKJ z{7P^xEW8m!@ono)e|#~#jm4Zf#Ydv22ToCiHGS}38#8Bs+8~0tD)!s)o=^V?9ZM~h z#F2Hyk1kBb3=JOsu{mmvr25$(?ctb>o~M2z{RAh_6qa<~tP+%btZ{<~NSK z(pn~pFCx8xerCVpO;(wk+-opfCs^Y2_P>~O=VnDN-XVpPt4_qh+6Fl=*v{DAJzo3R zs6&;n-T~^aNh(WMh^XoOYyY88n-sVjlJjvG2qJ9e#dK?ZY4?L4{_c_8<^A7fmT={a+m{%G!3ra)MvU29i1uedNa7eiM zuJE+fs;tzM%d90qrS>Ju1h=@HqP+`SX-pfC(eTX(r}VXLf%R_OxG_B3k5ng^?-H)L zm`nUn4oSqOtz#tbvKe80$?fo2>&ZNk5Lw$^Nffb;jbYMVzow+1;1o3Y2T_aXS+yYA z(7tqjuznFG4I?6)63YwSZ?EmQ7{7!GbTXKF`_7$nKpudexC6NHFCONWRC)0KXiqZ& zw|n@gWq$4t_a1z%H8xi;Svs3THsi?-!6Q0gp^5$AyVW6PH4ws$(V5M6S~PDyC5rob z$VN~hSZPyMlWmEz94E<~8{c>2lEZtt^-N|F?;XP&h!73=PNePLd;M)&y|~nGizu2w zYX!}_8zzc5;QhCboH-9*3wQp>nWJ$ntfAlrLXGE!WM2x8%Cg(ouvH_!5J;ZD1T$C1 z>$Qi0VM@(pl~bqw`3wD?+viq2hRWj^Ju#V|)3s?BUT>WJ>DzdRdTkdf+x z_iLqNaOa!9x|KQlvA4QupH2HvE_q8L7$SH*1}?tEFLsZk=cUDbXZ(1C>zXwpf$3E7 ziL#nlvaGI+%prT)?>=lnfWE%fQADt!01VHhffKwM``c8)i=R}%#txj7Ip)cQLnB5;2&dEgP*0*b^B_uAn8bWa)TN%=b+lMou)w=UdNd*z ze7KNFmer7tqXK#TZMg%xDOAM!6FNzOGdR&_VNJ~~g)7z_y7!kuu<>rzmILD&cbR=L*g{|R+A^ifzwl^>;QM9AIVhAvJcvKh5>NvWmaH^tOB6yn85 z(-MG1?MsIG`U5vk@t=Kk4=2|;%>}MDEL$(Xy}NhMOB=ZCWpRxj$@RhgHZX0Xie`BJ zCwAdYO%;dozYIiraZ{D_`o#9SH}OJX_Jgab6^{tcLu_!GXKf7%GW`!pU07Q?qjW*b z0E~jFrS-yvuOxUYnjz{1+xISuSoeOVX$b=_%{WKPM~cau67{&WKD^$$(BMIAPKmqS z%a3|@xELH=r*P}Ctim8yLl>7AF8zx$OBcR(NM9Sx=g|1jPN>#tR4z0`v?DXhG5p5b z;wxYHj$KOPz^<#Tycqv>Hq=`M1XQljBHb~aGj*T{6+C+)WFT;98{(S?ogNy42dDp~ zz3RsSEnkj_xc4MMi%NUIsn!VMmOGf3m`LMnoAHyKq*qdEok2A)+aM5v`ovDzW7%+! zL(ifI=Y=dY<;w0A1_co`%vmBt;BWuf!xO4x!{8#&Sk zZ1(;(<^4sMieS<~^R|5-UQ2B`X!6gAALqn6-F-gz++(*=WiqlC4Ve9<-9+w#Umy=f zX;<}$1bQ~jjaJa3dJM|Zp{mHYaZzuz{?nw%kctaH>D#)f+A_V;vW_`P0#ea&YKUgt zIe@ywS!X?d3n;q`!KL`avNLWqE+H5ChPrO-PLclhG))hqi~pc~qwC4M%Sj0X70SUN z8h_G4qgdI|rHGx?J^Xk~ig32U_@e_N#xX#&=xowvQE({L+g5?C&t5&;Rt%&+VUh-b zuwNzoLUjO}z`Rh)xa_d4J@*DKe9(T&ZQX}M$iOLB58l{Ga&x8-$7bG$phuZ@pnHMq zSuE>e7vRnPOv&lVx#hHYObePj@Wm_8O$bVfMl{v_Dat0L+7zkaL@lZ;!g@=kndZbH zokD`%RaYMh{Ns93l0=C^)bgn>fC+?TJZh7fe~7RG_EGkYmzQ{I=oI`S#1?)@J{ zRGnBS?-xJy>~bJhekmMM;i`QrH0LvY$YdxuoN=i}ZhSI}FeIHk9|hT=Y3FsX{^&)| z*V043k0*A!z52H?#^1O1jdQm)iC?#tj+2K*lSv~;Bu|gLjvX0{QdDdv4sK}1N<-a9 zAxfDMxs6%QLkBL^5h~Q5*HjBK$2=EJ>5NBKK&aF?;!iz63uN0P0`~&SSwk~&DQ}2% z{xH6WFq7Vnh#IVVXE#q};CGP_PI_;l^j#=yi@|1(%RP?A4Hp*U)vH&hjlvEaByWYz z0-1a9>o_Q4GeY$~e2Ch7!nNNWS3wI5eh$FJ5`|N5qkMG1cE=-w!^1KHk3Hm<7~~s{ z9XmFxZihc}I0Cb!kr8&;p${LyiO;#$xs4%yG!PN60-bRsj0YZ^!9>bs6;)^gno4`T zdhccGYZe#9J;$mGschx?{{1U`>pV+8G4?k6rey5B%Cs|R^5c(MA1f{F?ChwtJ{-}E zjXS2Z4!CN%LD&|w&hTsoO)EzNJ_gS?dwi*IY!3vg0&0#E6gTDqKSKmrF2>uHjVa6& z?{-&M8!rEGre#H=Zo9viA#NM!>KSlz;zrJG zKfx;egvuVr3>dPsSx2h*U=#!O1sgK3{cFzDm{ul{L)M61Gb?5b)1q~5N?GKN5%~!x zKBFO5HvczRld%(GnCJ~FMhx7yj^Y>O{MXpn|j|8}v5_HJUeOfBJwGst>lQ^kT z+x9ZJ0t}Ep=ZyK88So0~85Hp}qyAW3MAdf;-$~hHHJvVxr;Wm&Fr$5Dy^2Tcsu2LO z5L~1Rdhp4QKm2eX{QqEnc7DoH(RG+o7x~h9UI>|S0W*HIG6YU#iDc}xbg%)zN>V7Q zGhm^jumT6M)BiLJAPIrPnoZN2KXT zsQd+Sl>29X{1A!%Tyo`)9|CVK$K*(fN%~3(DTuVLigT-pA%Y3uo$7u2-u>H>j5
6L>$L}gE(m8B}~d`n30C8?S*Ko%&K>6eB@e#$yFZU7($_4 zYne~UdlpHNqRBaks+TSJFSG7ELD2-ILn$OqS|{7vNGTm8yjb8Xm1V1l#b`%1 zrz~T_!8aq4#6A{5<;cFYHd1|AkDP}E zhtlpVB2+?6AK{XIX1UaPO#|ML0jQal5DL&Nc*m^-n2~h+R$7`?Zl37-W0zC36HR8c zuwEf`P|>j^bTDEWU*I&w&X6tymKpB_zqf!{ey8dIAn%_Ptc4GxT1e^gJ`Y}jh3-w~ zYCUg+=A$MZbdKTGXZe1Z#w0X&dG}im>a8w0q#X3YJv2CA)Nh6KS^hW|2DT74ok|}s z4FYX%%3_3VCepkhu#j8YAdybTUBddI2aREPRUxb7i>v-sRBEew*cXSaVwlL zX}02gdDB@dU=dJl;Hmwor(JX59y5O8TaqgjWR@@za+8b;i~@IO!6p(Nqp2t~cX)dW zNoG83>t6O82U{p}A~cd?WoJx5Hxr1qf<#5N5VvhwS6h>TY>No9fDKF+&O3S-EL^+z z1I@qr@jWA-sniSGBoaiE$384J`nqj)SGZkghQKVcr zDgy|Xw^qdp)~QpQbouBYKzR<>H2xtg&Ok5Zqi8pL}LtWL4QR zQS{^jh$S1myDPq%E19yoaI<2>0}Gr1Hott<1;Q;x!U$6~?2*9OKp{dsj!#CT%$STV z>4&Ls;e}#tQ1h}U$_f~;J8t&FF+CES5O0=0PT~5CxDBPr#TB!JUP^v0tm!mYn&_tI z4DEiCW`v{b-fTtwcv(fZ=Ky#`K8WQJ2nHBblb8xy1zy6CjbSouV1M6(-Vmm+7+DY< zr_7Un<-1g?gYy`oCSB`B^xif~`|Zu4%DkYW3!OV;tb$XKPWR~WUS$&n89v?6LZ8ON zv0;88R~ouFL3)+0b7pl$*zzyuHHurn&Je}>xas^x;x6qKk99=3<;i3OtqU!C=>wM@ z1Td#$zJdq-lMJ2zJ?o=uH-brWjf_sN>AM>IOMHvVjG3{JLpUNY3o}bbB4Zrn1kl-Oh6?%VB4u)Jtwu0sbI9qJs5IagmU729{83S6`WODy9=?Ma@n=bPg3d+U ziZmOeL7kEIih5%OG$#!jte^kU-}RosY*D|F6~@t6m&e2X5CtG+==AS$)bA4)AY3Hr zg$VGh2HRx{`S)IVThC~>A8#3tpsB4-uIpt0lF*TsAUVEl|MVr#o{0aYGQ3mNc|)qC z5J^@eoPfL%ddOd@kaEoZ&}N?hbSZq=Vy7-sr*KajhUo^sOx} zH;G(qAT%Zkp-wNq1<8r(NIIUz3`?NUqpSkv@q-LJB4xpYRkeCCpKFH2qB5YO;vB%8 zkkLS}o$iDIs!k*#i+cZaSN`9GhO{S>9fr`NiL}j|xvu2!((X_A^e0h;s6&P9EW>*U zxrWe_t7sZ#yHH3(QtWgtokM7E#)s~5w4FbVN?zkf9fvvgPEqf^@$$oJ=kZYLaX{%D z)sryfhNm=$tf{GyUh?%s;ZENfM9?XtPx^AZ{PFo7jzY&23DrW{7Mv>f9Rd!J(mZFYt(rIYs6INE|uUC(AIpcH@qjz9-@|( zS`&bUb=tUTRUX%u`35sEGAUZGWKzbs@#DpQ7T!OxIj0B`mklA4+>s2>0SJ>XFUEnZ z$*EfZ_vHp?2%ZP-li#KgHLodi7rk64N-J}K1y{;w3)-W*#wJ_`WVIz2r;o)x-mfpY zBaP5gh!N7bvd3gg^kzucQd%WZ-;MG$U)co}*NYhC#lsLiao!2=vxE%uVfa*E9Y0o7 z7X^jH9O)zFN0TgI++a$iQ#mjGup^eUl{Al;G=VoK5g=&|AM6HgfS_kG+$ST%NT^Cn z>%K4(tN)IV0yuFLohUlzfUQMtiVXgk;M2J0=~I0iT3VOos^&yHjA1H^5xx5S%B~m+ z_HiaVHq~#Gkx<8IF_4Z1TA``KVeNfI2n9*tL%)oXOof0x(#o8HL9}I^(NuYj-Ld6B zZ^{;h8EKJ9FoYCM)QsRl2&9Z)=%4`PG5Fo$BZfXliPkh}L!w4FjYtQFj{sc{yd|kn z5YM@>`8&o<2FnOH{Zs~wOkTHZK1@5V`(D2xBCtffIinir-DugeP?po5B(pbO7xI-j zK`fV$B3i!h-&p-#(S*wQgF0D0{H{$8py~y8VGv9IM8JahC%yL@o zooeD7>BErv-=2(j*# zK4g0r#FNq(WH!g&c8!aR2GnWLgmE#3#tvlR#uTt*xaPkMmPG80jdk_=xQ?mVD;nzJ zYfl^Xy7z)fAnFoZ5vlc$&|axA`+TSjY<*FLEbGAPl(hz3G}MWj4WhtlTDo?^N+}91 z%)BVJwf?mFVi_of{d;i%j+~o)d zNzYr8!t3uLOOL81=%F3?83j6Hk3XRj7Z}sY52gluBu;+ z<6LQ?&PPVB=nQw@({Rr&m98P{F@=T8D2Lq^=;kUk z^cE&8l92!#$}g^YeOy}iL*? z$r^NB9tG_K%w=>~Pn_t~}U7{pxBn%tw9s@Fg6 zt=I3dpLIR%d0LfmkT&RLQ+Z{UU!cFgfAK81dT40%0OnF)AS#K*2#TBoofl`%$7hkk z3V|C#eWRt@tF)6%_m~tvEh`JhW9gglTyFv2TE9QYYTXAWZ4`yV?2^O{--xrsEc7jv zMmHIDIE$%21#wW(vs?c4uj1FVoiYr_iq$Z~q{HSH#aqgIZO^j|hdHJB$NC-Kw{z#z zO6^j|t)&E7I6 zGEI@3_V7EQhke;8B`BcWabw4}yzTu=mQE_FEKYWJvd0eu4tcC=-A59T2F7CkJg2egEFMM@Ri?%({WlI@Z9ZKs> zhnyyIXyg_Bt)k+as1`zxIllOiAMJm`MVh4kM*Sc?Bkau8v5$SpPX`kUl6%Co)r*Y; z2lAVdrHO4C$=JY}BCEPV27O`yJtXHN1Dm-zlu{fB?Z%^{8Y1*=7w;EB7#6Z3CnhQF zPc;#^`*)8|SoCPWm8%_vuQ1wp4WegJOlKUkoNU#FI7?9tP}$&hEh}qiRuzpr+OVlX zBHApN(7O>)dODjfO#7@_>0?3P4Mfxw+v2w?_G(Tj9Kf~lxgZWB@U%(kpA^wlR6;NN zi!jDxht~ZqWu{R(8-x%1!DaImD2a;qV9uX7>r+&r*Nc1$=Gb|(dGAJSqO_|C)11p- zgE^Jqn$AZF9%x-XA|2Ttr%WFkNFDKPDY26LY>I|S|CkPyH+fz_IR&qJsyrS6uj3eS z=c0s>pwn=tUM2$5I3J{{Spe8FXt+@pol$~ukW4mFNFj~Zmnp3hFI>C!3g4&!wOV@2 z(Ui=(f7)+X04*I{)=QxjuE~@sL&{Ijs_7B z@c+bY*$l;N!*^KxmYUQGHXIiYkwop%j_hM@0h_ddP7{+~~$%u&mce>Z9? zErCE?W1ik3UWIb?@38-R@7xxOLWI2wE5fzgheq3Bi&g&xjehkv)=HkZVHq({7Rh|0 zDB{_v;1Bg~=!-YoFIG6Lr|F`61zMA9R76dbu%J-6&0oCrX|V#JOKSLtzDIGRd_jo4~3VL$n*brtA^sOyiM)MJ(mHuIf6L-Sa!VjQoMYy*2|5(QDie;P>N+@ar#eNeYXF0k6KQ>q`%0OSd zHpG6TAZ~b1lr~EszQ$23a{(Zsqtc&$@!sg!3d=cqYozpwqP=D$)W;_%%>N%B`-Adh zdker$bc>GR0SokVGKsQZLK^&^vtoZiQ3Tr1JdZS5I`gFHt#ZZOr z$%f`#h{!=7GT8VCTTnw0wR@SLpZ~vqaY3n+vKe*@O*U~Nj8S~>OAf`Skgxt%Z-{NC z_!&Bnu`JRdJ7ziXg@7x+5Bu!lmG4c=QLcFh8*(@7l9yj*th1dc?ke5N7wh8H|)Js1R{f%E6PG*>AZoiv{WR}yJIBqCj<#-Lp74GX4#hk{xUO&6I_nYv_j z-npf9L5sDk(o>jQzV!pS7!5kN_sTelaWN~5XQRIihht$7q;GxFTF$>`K7c>{)-wtE ze7Z{;ANjB;zczH_b!jSB%w|Bj7jH1jrzVCjmF!6Co%0USF(OlCRdPN#lnt?4Lr#do zR^s6@!We-JsO=oEKty;q@w?D4lyM+|;`2RwK*vRB< zSuXhu7vBY@GzOsHiyu(L#wW9V>>c=pI;wK_ zmwPWkx9?^X#uB^+FvzyrD|HGOYrJh^% z(1aN3hTD3S{vg^9Nm-A3w+;prdh~a8>ezvS+dsvBD)r{n50=<=8AFELV(~#}79v$=ZkcLw!AqRDH4Mf&Q#Z+?@cAPVi5DL?jrRHyh35%Y{9dNEB1s?4D=mKeJEz-OAj2L zWcz4Ni|RGSnBHDislp1Eo+X-r%oZ;W1O{5o$e|DjfQI+ojU^)`Wo<X|az*wVbZZ*#Lr=ClYDisYUN{r><7{b`Lv@n8#Xrn4 zPJ@;l#EhiZ!j!jqEVFPu%&<^_VPJ+uR#&`!y_=*(07&`*CtR2j>QQcqhiptR-oExx zD$$iF>Lfm|1d)&yTZ%$QEpDsQCY*7L;1jCk#>~$o$>?%LHb45;9jaR6(N)n(7cG71 zwp1R_=-73Dr@-zul|^`V;$kx|fAD{zp{Y_wX)^oeG(Zs`0)22NUz@D3<<6bBV8VQ% zT6BUvkdo&6(mWWfg0@Q8i2i=`mtR- zXk-*ke6pmy`A$+U7YvtU`HFUvtjYc37M6eeRwm34FHnY^o8~4eWOy9u$d*Zdg=r^u z8dT@50B;w`n5113ASUp7Bd2^-QL-va6l1jw)7PK3+ zZU440cX9#dmNm4HzO3n+!yun6e*O3X$uBg<`56Q0tXmQ+1?iOaZttEokXj<3?%k?) zY`RpssrH@{gdWS|T521us9al9p$&U96Q5!RnbKMV&Mtw-+v#rVjqSD zp)n!Zkg-I%B;fO<%kZzn)DpBB6b0Un(HD{^-;-qTpL4QsVF9-D)b!>b`fE_6iL-=# z7V=9=U9n(wAF>jdm6F=^)g33Ppiwk!6h)iqj5zldk5)XYta}h!^>=Wgmeo_c=2ymb z{p}Z@h7CRbv*z1or+)d)Wp~Sldkim|e7AYbfeFJ+qbv)*J2!6J>sj|R)eUvCoAiD= zI{)XE?Uv>Hc^+Rf+PIm|th4{>`mf)9`r(6L}1CuYRgLMcqv&YH95!Rg>(HgDpC zfgZm_e893(6@?sWuJ_qJV%9}FTHdw%Mqamhr2Va51XBEQ$!1i zyORyMHPdcmPv%qPGiH}>fW1ilZwK+$qBC&8&hQ`6wI7NWfR=CVLr6q_Z40WU+qu?B zu$l1ZPqvJzDVQn}tB!Tt`8%&g&37cnzDEq)9V1tcrHPYeVB-?+{UIpZLEZrq*rvy)UNF1CS=;cJ1nUWP2jp zb8>_;hGaa@Zl0#q`$@tBgCqJA*2v{!iJkl8m=*l5fk+$l?%Q`qr;kq?G&te3(GyHp z2V_4F?fXd!nKd$g8GLQeVTD28KB>E<8SZ~qQ_m4fM4%u)dm>2F0^^*pOb!`l=1J%q z9jRQ$Jv+hX+$RW@Ia;A;37{{P@eanUX;@80xLYfz9c$;Tx~uI|c=1AEeE;A=bg{NU z8XF|^IPX=M&-?UH>0pw&v6>_K=ZtepjF&yRr0XDo_w)S=JWm;K4qgLHn1?9O#G5;L zzE>!X_(HgI01KYLM1y58ZkRWGcm`m}tyS}xGxz$zf}Kfb<`!m}aaf^!gB+)|LOEp% zNWjS#ZRXftb}I|%pW@-`>#GC$l!u|4x)9r8{ z+SgUu)wKo;vHMss)qT>is{B!Px@1#ognF&w4cYqaUjs}Hb$c)<eLGneB_?N zjsiT-h*+q<4j3`FY(2zxeZB3?o1NwO&7gwpyXN`UYKsL(6{+WHwrXr45Y*BJVX(B? zA}?f-ZLt=}NR239&TtCj`rqzpX4ND~=yLHjC3a;O@}2}&#|L>LaBvL=X7UDn_*z2E zqlAD)&2bQ#6h{xNp~Jn!n4<;X-z)rudgX9Nk*dGn>Gt+STe?^_j+(d_+L@f|8R!z4 zKvjO*cI3J>NWUMyp5w+dn^!+Um@*kO>gb#GPpBr=H2E|0g7r9J$~~LHVA^$L4Yv`0 zo7;|17=aB3Z`^ZayPNc7%*?=rH+IdSc-0kAI8V&;ICyD;9>9T_nE%_4{kD)3oNP3w z7?_D0*316sWbHV+&-R%}jfO5OWW)oL?5-mM(x>rRQP zvx@%MpQoLCEI*#|v&lW7KrVGY5`5!{nrlU2E2<8TXi9XUxZFb4@*{_68&dFtXg4?N zU!8TH0JW{2(=ycdd%Pq#e2w(L`72#0K|rz|0lBpUeo+6Gb4hk9hOpT(86nninE7<1 z_Dv%2-S0P4sl7UQtJq0IKf0^InCWRaq9Wb92+-Bnsp>H78=cznQFU4+iDuXK#eS62 zw3S-i1PVkZ_i`PQ)y_i5__p%#iI}o&!IZL%Kc>`uoI1%3^@{5N_`Xj0fs9AERux|r z_YMox9{&@1zsNj8?8cz|j)*)Yqg!?{_i!wY1}zwuGSPQ+=1l$foKcfa)3u%v>uzJp z$Ut>l(xfQ|2dfL{UU}rhyU>YMaS3HjRw=eirNK7AK7wB`uxcC5vDYFfPNOBK8?%G^ z=pjpAnow7}VvT`eAKJO zemQ+OwO#9;PB-~`9jafZ_YS*#C{WkCpMIehmfgvthN?aBB<1sg!+kzjB4Fov$Sz)Y zQJa%kndGBrLS~1~S|k~VYUeo|$#2RYRUHJ;>!DT_>v}()E94U~eq8LF8XDMKCuM@3 z>>n`i7fkol4HmP{EoqieTM?f3M+?J=wV2^mxzcj~oLzcCLX19J)EhMYVXmf+%P+VSb9`&({x-DcV zS<{!*OYACM&y-@Y_Zj3nSWWq)on6X^o4$&w@>| zh|RxRUOj2({+7+oijROa?G+(s4<%~j08eOM9@60SLL7ik=GHCIjL_56?){VTsOqdu z%!=ASa=nX5Cd|yXLs!fR_};D3>1yMl{*A7JiJZQ!W^0q16E+g}{SLB%<|Q}Wm--m9 zQ6HZ*-Q(GGu%oC+pcJ@nw|7AMm=BaV+QG|MY?8@?wH@WQZlSe#ZTLb@{{G#=6t}%z zDF?MRkxJBzGaQ@oq>WjWm@llF2{J6Vqn~?dYQ&9~bL}>&&?M&98|$52vX5$|4A+ty z@d{?jx}w>+9#p5tWA~R&5A}k@y7M(5NY6zGG5`R=oV{jRTJw6Lnq&eB-F zetnVEY~aokvCk&Jdu84^ej0L)wt#hwB2Iicwvq_w8U>ZS=seF5hpfVwSI4)Rd_N3| zOv^g1v2W!ExI21FjZPsjaQ}i`?iv)fnmb}9rXKXzXL*UYUgLR?X$)z^o{ z?~eFZl=@6Ih0(Mpa=_I&`T-HCC08P=>fEP|kNIYQ%_+MY+uLHOyeu5=kIS&M%l#R< zN(6a*Xu|uuHsLc(^LsdLz*s-jW$PZK>7_J7-U@Dii!ss)QShXsEiK9o z^~ab~<=;t6G68%&B&VrjxsDApsT=8lbjk2H1zJtM#$jm z-N+GNgWRn7u^^X{XN^4!IrE};$GX!ARi53}@mPf#-6+3eEv;cz4;q$8eZB zgV5$Ag_m}sVENO-hYyXXPv3hZc+ff%^D>6VQguioaPab8UlC$;x-!c(d+EuH@u$B< z^Vky~E>W^i#N=swK_82l!63cu$i0@x=|-lO{czN=$ z)2u1JI%iY$yt4AG#|dr`fxF8(*F(6J$3*~~hX zigRlw@sxySi~hE;gV*Xw+>uOCiak55Qe9Eh22GC(dH(FnjMXjhG_4rxv*2SzxVynE zkRer;Y0^47^YQE5ru)0P`*?e&f3;qnrM0ZHkP^I^U%YxCU#M`r8k#vzr!9i(HMob~ zpPO?gPoBJh5dAe$)6-o>hn!bqxMD{9g^_Q=`&O*ob$kfLgzoaJ5Isz~x)N>Wd1!`B zj9GzdhkmkanbGR&s@7x4hw}*ZCN2aTGHe&4!I8ws^*k*$|J-4K2-Apzm)cY*vS^~^ z<&RopTEo&k$7`>(>M$|p@aEJJqclQ#R!vWKPN>d7hF}%PVc^TE=&o`ZFQNSS8rMAR z9u@v*nBIBRSVHpx>jn_7`{z9D8s8z}FxZ&*F|%1ABS&ebuD2`Gd)h#8dh7?Z<@9t; zVqx=OVotdQ$F~zern|yzn%zJ4b4A^&p*dmwt7n(tZ@1tkhHxiM1G0o<@5|(H8jz2^ zJ6iYt=ziT*i+y4p^m})o^jAsj}=gE>ihYuh2B115Hra8&3EGsv1@uKg;nrI(* z7e9>goeL-ho@I{Seoj`O8D~0U8q80*TDg`)wbfm@aE?nsfJ4setPF>={>~lU$@up~ zVE!b9xK+B$$N1>cRt4XG2#$4C*0x6*C>G)-@>mOyiNO{exd9;79E5XH^*4iXZnY0x z^N$3xdxkePKAforCFn#jY3Qn8=4>HAm;lsv95Zkq4zMUq7Kn{)hz;q=-h(NxzWnV*BQ6 zuE>ya&c{K@6&?)0<2laD17@qXIFxgG`%6l2q#m$aq6>pV3PW=VF4d46)V~=XTN?8Y zkjwJ4HlFTEec)X!7!;b_)+nU0L^P7Y zQ{`Pi{`#7h=kb)(q!H*fM61BKd~Tt|y7EK0+j**Vm!qV4x~1e%-X9x`diQgq_CrFm zi!$@Aleowx5m=bT$9_;=HUCI6LeXZgUpBZ1;@KAF)})0Jzk{HS2f@&ils71tnEf3= zZ}93e_R1te{7w(SwVu-?J@pn(`<%X^tpo`=v~Or?pCK>6?%J$Ay>(2S`#AjK0k4wk z)g2A{5pMeD#clSNRMT-q?l&P@wg~d^GAoq&Z`ZCONA2C^ILZ992cj6H9%~_!W0yLY zoC+7~;LtOK}&MlzjLY`VK+FJ)V(~k!h7m5I4wHa-l6-(EJyd2Bri$5=s3o_;LV6SyN{Bc zzl1+y7+dt{No7q%f5u5w9|;b{2Wj@;!dXORg)A>{a&iG}3W=INE_Px@z=&8qv*{Fo zpYNT$Vyk9Ybk~=2`09i2@DjH(4k1L4}K`Ct1Z*WHRW6j$YX*cxf*!O)QdxMJbU9$ zgWuRn60p>{{8W)&fE)wDx4mA}{HMpJ(B~xv?LHjMSGfu9#7TpQjAo}Dj0z<0P><~^ zT9U6RyId#pXg-q&Ht$@T;pEus@ng&-KvdiqixR!3K8CD=X~f#{ztsMY_wcWdGOTjD zQ>c@I#_z-X=vG;1`a~#IK$j_fZGpO)Y%`EfDiRVo^kMGY8YTK+C ze8C+dVKEv7t~yV@ z3a4gu#BV$iAL+cEJG_VU|5O3nw4v0y%3``D$4k7K3~B zn7S`xFV3n~jw1B>Q^0o&eBoluXdSANL#_J@GDu-*2^V*^YR4KdFgEtCgAps zTel{;y2Cf!HTt!*jMa_TS$3gM}qrWqN44DZ70K7`8=oqud8s9(mcUV3`(!yicesi}tQ zs;U4E^OMiVS*@a=o#A2_U}`1qY(BNLbd7N3k{PvcWbutkhs%hp%?HHLpSiQ+i1`57 z%3HNp;#HiF=K30bUmmrfsW&$_QuXqZIn-#^czd~H4a^FoNrxpdO+E4mWfPywU=LX` zrehDTeSfr0w?_go$a5K+lsZdL&Rd>Zk~YMSG3;5Q_v$V9K|t}rb7gZgo>1OxCH1IV z5-U1B#xPE5>~;>$dB014U-4hY@d52}9=mq!`)|r}74g#hZ`7MLBd2rduyd=$c;%MtZPf8Iqd0bXx0vQwX?cAxjoFIyy_|OOPqCX7oRO# z!1K&w9#QDZqvq+bG4@e$^*6QIPC-?We#;xAd>oO(^EK23Uhhlz*rVVeRk32{9qTtoRIcwoZSlU45s+yQc*T8u zb6X(sWWd&@YwC{VhR~>@qi#ow^v72dMg=?W{Qad>U4@lY0n}Xu?fI|W17pKLGW{FT)FH$Jg1+vS;cLu+CeZ0k9zIwG-_mgWmI^^ znws?)$<`Yc+$f@?kpqG6?f)%7E-cF#P)EB8>EaGleolbT<;8Mcv6{2ypnBD+Ny|Q# zmvs6e*1HCItUYfqLD&0Ucx>`4reKp1{nHOYaNf8zm@E)7^i)MqCjd>u@JDF#Y|OX0 z>i5Lm-4n7vG&3wr-%y7eAfG0&sNC_AIP#ipq+bEWZ(BA)S*KW-amXTUK(cI8>( zmj3RE6M3A<*y~g$5^~GURAYuots>%fzsWZlzVlEJa6t%`mTC6yzZ2o;Ym4?-?5%_W zd;OYzufx*{IySg%p70?<00WaZs#=S$G3LVJ+28ljOIbi^<1BQin6Ui|sX3ArZ1XSX zvTSvKzt4TE#4ibDb%6OpYb&hkv=;Gq>BX@+{IuV}mf{d=ct zq}B_#X!`FtrN=RFQe)_48A9JKcBGBiOMzh)brw|XHD;yPA~X<|YA0mh(oL`KG?CHA zmnaht1QPZqzaF)d`p8gbsosB=ofR*T!&WHT50EBNJefrB-o0rs;1cNYYif?g zRq;R|a%b|6))pMCbhRMQ5rkcm1pjq^)9;l5j6HQ)a6XAy9)Gc&So%8c9WCC2*jb*c z4kM30BU1j3nPzpQRY7gdv^2GA%H^~8=uYtAZz#|*e}xbj1T9Sb}JJgMT7C3eT*O+b4h!=&sT2yG`Ohya|2a zJU1Nh*6f3GTg4KMULKj&N+q$`e8Q}Ocqji*$>m5`l?N^hseqP3=us3wNEJph<7(Ek z)sT&<7O=zM2^qByqG3nMT3o{vs-_GtpMr@DxMGVLZij!}#_^S?anJYqWH<{T1#Qif zV5JNKlXuju$g9+V+dnUxrA56` z|DpOx43Yzp%=E%qf#^k}-&^pTd!y<;+Bh?*&jam}3D_qwkhP2&OD4s)dR76_x3%mn zJq+gWYxXbYzXU82p!*}5`a3Yl$?&F4t~XzZWB;dN)_Y_#wI4s!QppM@0+l>iQ01@YTfQm z(}zt%I<|}O1>O-5HIMZZ@XUeqCz+z2Pz8cXIWHs?Meq`)k)lp#h=0vS5^^7K5}wiBMi zf~RLH*EGoUYSDzhzfsvWA15Wy#bA5sLge%emN}FYc9D?j;4+>wX(J>uOA3v+y}pKe zCFh2@?ucHR%A5`qyK7uv}8SxrPe)E|?VRw#<&=V0d@yaSHKfQ*< zb&dK-^JD?T$VV2PQTE*>?o&kd*Cp?!>35=p(;dKKZW}3ZtaqF1s@?1CQXM$Q!gx`& zQ6>X_H$|4wOF zUbUA&eiG*`ftM@Zq5C4bD|SMmQvl2=D&1Xa`Z-RN=z3nxkqJwl)rR=umU@A`bi>uLMC^?^=nR;Cx+2mg z`4bmlmd_0;Qh7`h*$QT7Eo>tdm5dnB| zTOGksrQV?cNL5A$Th$br%MEDO455!@LL9kmuhQ z)+4O2eR98x-PPOE8!m?j+-OkxXI4FXZQ%s6tMi9|s{kur5fraDG+K>BHn{#ZT3c!d zI%A2hGz5|YDcKszK^O==wuGo%xz5TrC;Hu$bI?T+0A{r zQkVPmdiwVQcK_HB{P%!n5WopHA7ZmMy~ zoOXd7B?nhkt*dAN!p2`5Je0dF3bgjPmzX3)aI4P*kcKov+`q;M>BSt4Qy* zb9i$>Xm3e)2a>y5`9}>_68g&6^yYOCSx2P#D8FxmjWDW=n%+G+YLpNR{NYr&p>Om; zQShjLrVZUT%bjsqjIzG)ae4!QeJbuwz&w4mW4mR?MvssZpcFOLJTDHFBe9oKp=jd|NLJ!_@Wq;}V zpcUSMHX;(LU!H~0;}qw{&o=C0PXt_dc?;+5eu8`5mxorR$u-3XGQVWfa7Zlr4<%WF z=$xW-hRR=jY>?ukewEanK-epSZtWz|h5GVXjb0@C8?;D54HCTvJYrYTv7^YTwVPRg zUp?C*S)QPA#H$`8>kqIET{$u{#VAl${mLi=^^qNpT1X{fAc(^Z8V9=GzIk&iaQOtX zZ3X84X`h1{D&LDv%2~F@f7zQCH}At@`RXSJUzMNSH=4Lp*ed!Dg=q*_e($TF9HaQj zqMA7$B9Z<~!4D*z`D&N7=ZS6mUP{rpzilM7V}-xFpYUa&b@kM~pPoxaMW^Na?gLat zYfBRg#JHb`54VN_^cv5Y(U!CO)p#&D$_Lg1Vomc2_4VI>#{NGOyr}^w(p)pgN6RlHDbBwn+5dh1gAC-71kXjZ zL&!os79o!12@C%pD%CR=;c~sW`6h|@DChI82VqG z2gQ5sRS0AVD?5>%PhCjk*jAA8aYXiC@9~fFkd-GwL;KJv(ft3-F{-RiVs#STB==S3 z_LQGI|JyMdWyw}gn@cTJ61(n;5YyTz1i7z%_3sLc-Hw293qH8}A~X*?#JYa{CdK!S zq=&v4bXj48kEN5Ydd*)4tc3#brH$L5_`pO4-%|9(i!#j=iZdxTsIW&JtNwFp2-lp^#m>d>fj!bxUzFsVGQmE zNi-RPy?jTY?fcb@4wSF;TtmfR>!B4f>gw!^m89-p{pKm+O*-6{NbK_+z{<+A!L9k~ zcPJQp8@gqp$r3#D!)#~RH0Ycppj zr*Lx|p@2jLKvcaVB+G=edz^UkYXdQXE3o0}EaJgNs)M?-^hdl?>M_Z$uHbsPsyd-L z_E#G#&p$<%?Vr!Tk(KahfIs#ZwDYN?q?jyV`jjl?f^F#boBYZ%44(8}^U80tvS1|T zXT=#Az5X@Lgh^QmwG|RX3Rgf#UsKGp!(j@KC47@dbAw2?IHrm66FXEYeqz99Hv(XK z4P&MB!9o4`@=;^4vnLRo>)I7Ts*Y_e9F2vlfAwxE9TvA0fk+1{c>4m-Ti1vBDjjMS zpL>|xTfP?tvEFWJzoAIiUPSy9QOL28LXey^ojoJG+=-Q5e9YKIfYrB*Vm!jgaPL*Z z6Z&4rj=U0gi<_{Y0jXCJx5B#k-?%lX|2@cHLgbtjPuwO+4KRJ#(|y^=5H^p46bLCH z(6_yg{ajfg)g|>Nc|sF88S`Xrpb!D!XH8isxCl-V$YruJR;;)yeBau&mofU!V{>a( z2)?Ddq#{7h4@I*)T(F_8Gv)qk;*5W-4|kM7V22_91jZB}{t1jLgg^A0mh|md5bsrl zY=WLfzwnwoA9vayDhoP#pY(rRDu@DT=wa%^Z%Vg11fdv)vK4ncy1W;Gsa7ZO1lqRytXJI1S21r*^y^Kbf72fIUZ&lS9HnwM;fbZsM)sd zb=c9w$EtqKgH+{wcd( z#gzST^4qm4s9y>n%TCH?4H_YaZ_F(s7!$YVjuMkme7m6t+@9L6tjckoK$xhxx&+VM z0Wq9;!&%w}m(B-|nMIH#$<8DTC0{@#eWcnPl@;JS9A;O5tg)E$)Qj-`B5%h!qC^!+=rInFDLfLi)-ya>}vm6Zm9-Pij%y9}n4eJ{|jJvn1;x z8=oHFs{H5;*;$>_uuqjI>oErsIfGkpz1zW*zhMLgRpsf#0hB2O;tO_6k+vKmn4iEs zsk%xer2A{P0OsJl zZA9xq;hCOJbixiu%Z|vqPuHr;;wQq`#1 z1G`2a?=olXAJboS9e=EM!Oy3)-#Shsi*f^Iy&=`tFj^zb|fS-D>vm zzx;bQJAQ4*FPHx`{V`~1t0j&1uG~6tbmFlM&zk?IOZV#PZzCS&+__^JakIEzS=OkS zlsm)A*DpR2_xFqU_Y^5eD)1`S?#Zarc)h8VhMU;d6W!(EXL*V|(oX|1;Gtm8YkVlV zPUd$s8K-Qq{}Yg;i%4f*nVw7T;z-8*$7#{TtA9NkliCVS(|@?RE))SS33H3Cs;0Uv z>mj~X0G4T}3eTQ174csz+?Ah{|3^MxL*{7tbNk0cmV;3V439wU3mp@^`oG?Ak@?6X zLgC=hx4Ok`kSo*@$_9-4Cui#Xcp{sp;;GTUzqyms`NE>29HD_{fH!-iBQbZO@;U#K zer+j{y~x|#xP2ga-o}Al>wg0Ocs5?~7*ov5d=Le3q~*kUCSqY2hIEuE8gx_Ua%T|N z8;myifLD3a&u?V(XG0O|uYdigmlr9|hHiz0g$O^K^6+TrJd>_X8by2#9xVLyAhk#j z+G5T;w*6Q-wJ$-tF|K#Ey2htZ;aPrBJj*pkcu$)(tI({H2yQ9FmQOqKldlM$kq3QK zHVQ;OhZ}yn?H5aG-=zR}^xQ=#S+mG*EEmxfDXH9;`e*&)FT#p+cBH0sA9Uem!<8E? z4>0;gSH<6+B194a@HZOm2hRTk$=p{)W0eosS0=e9RKpE1=OnH8eaS!1{l~ZZKmfG2>I4sO*pUfsA6=ztT!>3Q#dkD?kFt`YdU!t(!^3unAVcSjL8h^1HtrVY= zk&&5c1N<){C9!>w+ywYA0`E}Uu0%9DuY)%DIJ6jtHk~H!4qN^KFGo+_h09f$l|zf| zDfGwI_;4_p_>tpR|F~D| zzjygW<92!_`@7#9kaP8=Md`5E6SwEb-t-K7WZ^aMhW|+SiVwLfqEqZ5bILD$6P0Wg z*H87Y#!DG%X&L@uxYK|UM^l`3u4#NU`tDBKe!I$JUZhz@Rxg^kGU~-8r<%OtA1s_| zZr5J=t0ptoA@fnn*rIUv{nN76SZ@1&2>b51sIIQ-u@a4n4K*raFA%`4AkBaUK>=w> zwIK)sf^?+Vv4IT;MFtT88H!4;V?z{_UW5TgL3$Y}!vMp;x6WV^lP^zR{&;`Bd8XZa z&)H}1wbx#&e{;D-oyRb{`R&JSPVTo;@%CA(o7pmT$;|5VMK#V@On(Wg`@9d%``he$ ztLi$^2Y1wQcwaQ*^UFKj;!bj_a)q=;d$z}yH(%&%zRT>aV6AxlO)h@WW<>7l{;mXS z`}{+h4y;E_9QuZ779(}k{oDY*Ahtzm!$&!f&LP^KtcYCKx7IpV!dIU|07~2w?AExgTSv4Q{Da-`}!rS5i_^dOBl^ zOY0u%fW$}7^r+kRCb%!EV+x;a-ZCn){VcsB+lTj{GTY-2N37SytziBe);=aBrtWGo zRsJMv&b`7(%!WW-L2qoOaqgDE%o!bA=A-eYL&F||0$Nl$?{xMkk1fV>;kij}4yiEd z9;}ra@vLs=xYfrSjj~pFFpujnsis>6deTn5{yLKC;dtp_yy<8UulDGy-xOFAf_Q;U zNpA1p%9CuDk*@W%k^=*=T|wN2r7u|RFS`7xoXEqY zPh%_9T*d1|PEzUJy8ZWxlOE0Xkk+#?W;LaJKU%$(ti;jksY9fz(7d7di00?zl^`oy zeEla1#Z*|Pqa^*g=Ej?Er!2f;uw)LSU^utezSO}#_mTz2J!F&eId5wtHN{?+MQg4v zxboHf+wa?Fn;C@6F_T#n?WCq4l_2HW5R_wTdoH0rZ>wBszsnL4Ro6v@b%(4I+E@8p zyEDSDaQ4u*%gSVkQ3oz`igZohviVYxwOh4?L)NQW`Cq*S6FoMw<7ZYc>Kge{Wn4RF zqhUdA`73t|_e^cNrCEh>7Wcd%-M8$BL)LF}!H0c}qBwtT*Uq6>?a^I==0|)xCS+uTzTm6$xw1l!_;u5-Z{DxQ-J1P`HH(V-g9wV%E?AkZf(_M zJ59;7>Wsd@i1^doSfgMKpQWhSz|9or zTK-f?`^b$ol{w$9-#%m@*FDglvWz3i z28Y5-3rF+CJD4lH`Gq4=eC?iuqSv74>#A#)UE9G^bf z){>fefGM+tRl0TmiBmGSs=6!GE^waEyPY!nT^3u^)+I1xsh4V;77E1s>nhwGWcNH_ z_Btg9Js9p6r1mGKX34Ru*LBvh)wHHXHI@14*>8Kd zjAn1VR@pw}nzAj_T{5WMq{{l^V5f*F>%8IUH~si3_EcThtO8TFbG@me8-RAF62yy(X&lJ&swYs~N*C0V$QP(UJKyqkyE!-hl)**GRH(jj}+w{M@FI zFGBt-E!P_jygor%y9q=7M5&SEX6)p;N@jG{73#>Wy43M>!7ITXDP874c>CF05BiqY z`5Ob9Bm?UNJa{@0mHEk8@MhVQwcE4Po+-=aJkg~KCVNb*vF0VF*c^-95%>1m+m{@l zh|~+2MyBn(4EdXWY@Y=G%3&vYo}zE%O&z3Sc*$|iaXgp zn{pX_g0C|@HrEOEwl@|V#TOpF;SyWFF-vd>XIoL~Y1Ty<@04`G{UQUg1+(K&BaP_- zlv~&4>emU}=+7&3d)k`PHD_YEwU7dxZDy^&t7w*+SS^2U$TESceZF5X!QG-*{^k+x z)#m2YOA!yTLF*zGeX1WhI?ylZBdA*WY|a>Vs&M+6qcj%WO#NsY4NAmBk2W=*Xq*E0 ze*Rn-ORl`2`Ocd84qN-)bvAEf7R%mwYw8{pxVryR#FJx8mslrO^JwPziuTx3VaI!W ztKPKRJh)&Y6zM8=yIE;<{!ozhdh4r$&M}-#@s;xJKOZ}EdPe`q(DCvmG@%8mh3&sY zT4pvi3w|BimSd7YZ{`VZ3^~HG4n4;leVN6KDQwqPE9zudvd*1cl4t8E-#q16sz{<%?c5pMF8zv9n+79{1FtGHa3eT5e_2NYGE6=`JEHqvC=dmo58SrZve8 z&{;c^D|=mB3x}tz7j5qi9F_a&^O;gk3f+BC8|PpY*ERQeKX+*qchMWxBpUOuANAh2 zkhMkb<d!cxr^tP| z*evtqIjh8#5;V?34o6SLqj1PWB(aZUs7@C=*Pb%CSg_4)(F@juQMxIQmu-EEDKKs% zWcud;1^TvI6c6eoEEv`V{fC_RB9FqY!Xn@37J*6mL(?9z#YS@(qZy@bb`L!yFFj@k z@-jAs_g-q|#8h$iJ++x=XOu8f&vDIZ8mV@doLFKsI-Fx)|9*U0wsdu(M~1_=im^1= z(`+XxN`6}8pZ^#*fjJO8qejpXE5HHnRM$f1EI#{Xudm93dyG>(3v@ibmlR>zAJ5V` z`_gU8D|amTUa{*b29J`md3-ON$;`pOG%_IxGC)+4mU!+hD@ppJyxd;!)NO30$a4X| zPWX)6ow$oU^Xk8i+VXwp~?ACs7<1-ivI z(F_a@64l|W%P|f3e8af!O*${zE6eb{C5dA{e5NbEs1!e_q4Ds;2i5ZO^0a(3=pSQc zN2A+3WA|i=!X?`~w%^N8C0VMC+-~03R~{22g~!qpMAQvZVA0XhH73xe6J58i7>HR5 zsRNbPUo@@z-ht34{O2_)ySZgOPe{-J7pux=I7yv?>>_wujPW&M9&=T28t?ik5#*7m!1?-7x9BNd9* z>hM1(G#Z7CvIV(OWBA_p@0w_WXGhFQbPq$H3rrbd^7zbNZ?gV=AVrye${xF&3e%R7 zvT`C+sSQvWYAY&U%!q*#jNY?jm!Sc^7`2#Z%d3ApLmg7q+;6$Y$7igxrTE$hsJ*XnW$Ew!mMRe?zbTBGWsM z|LZ7TE>}8L#{T(+!hZ`<5{9Y!3t9)^2!n&gzcDSj|9O2B6|_hz0n}&a5wUQ>P9^oe z!s#RbeQB!b=bi$q$~G-<ia|ai&#}>Pn?1$fR zyFO~^=%D+k5&h(e&7T$afddYt!ag>xF!TG`4huQT{VqVi!IMBmpHb8AMx*TEhd1Gj;%>bCA(D8pf!5Xm9Y{ZSTYiJ#J_AJpj zaKM|O88a^${IEn#gMBpQ`Pb|DiJuJLUI|1`6)nGS{K5yIF#ky`F!}aDpZ_}Cm|v15 z$Da4h3H&Wz4KIl<(COo!gT0Qyvb-b5wo@oab5p+G%9)J@cboT$h@r3|e`Bj;dv51g z&ZT2%8JiIUDFMO`1Vl7;Pkx9_zg&^oGvrh>c0{-FGi4vBx}>i z1kIvSwCebG^z6@b){lL>=UUSRJ&Ri#c7ERy&#sasQ$RXq#2_>)FAmd6@Cy7y0?hW@ z)V*on!(doQ;dYVl%4wAE?^=c?2~-7i5Dzm5{hFvEBfsEddclg*LY#jd`%)#@*Zr%; z{&|c-f^YB&?PX#(56n!N26Cu zODi0dekt-enpgi2fY*POD#^X*_x+psVLx~RT0BvF9F>@yER?bhCkyqA#EQRyj0EeS zhsB#za!)~i?-d1StQ@VKvD+&AzjxuWPnfe}!&2SJm-t9x{@QNsa_scKPBa&Z_h-X> zd;v+4zp+#4O%!&78JIFyHfR0`&b|0+>;Bs^T|m~S$%zwD>FL#melSVLJ%zvGc5+*8 z-}q~l4&LnD&^Fd!8e26>-9GZC zk`@v1)(`_l+dxg-hctn#P)f`X=k>olMWtua(V}<~EimosdQKdVKwW5q+CAC!lpU%s zzTeKU;G^%b1O&u?j8$F-?d|PhkE69~*Ja&7%x>AU%XN3mMH=xfe^=gEHbI%mKKJ&A z^MZtonU66)5n57mF#b6)`os1=OBUSBng5Z>XqbU3Bf56&Sp3C66I6b?e^0 ze247l^AB+*-}Rs8ghtr&=LgA&wtxSv4ANmluBHL>pWIH*&;R?dSUR<=tjvrYp)oOb z{?Okgib0rIiRZsj{r`A~+yDJ2r08Av?+cw}1^TugR;q3l__1NZ!HY>sLJ_b3uRO-< z2s#0Ty`ziCI6v@aS_z;Zn)4q)R4ED%8%^n#v|qu9!R+(K-(#jz6to(G7LTnjFWY}x zo1y4?!1@umeSu*G44uikhQT3!UB5d2e_UDa0D66a#+p$9*XGYh?*Xy+o9g4yU&hOlO`QJII=T93U1{`}h;Vtbn z=tdyJ+LBdWwD5P&u}wo!lJX~Ee&6@+9y^+rD?v5`bj{EFgc)t6H{?81u=qa$%tVN2 z-NFIUqU{HP*5ToE{3jRt_P_Ss=cnHTMRs?!{3jJ@^WPjN=T`lA=1_L7x{ z-i{P;1K{5r#?t>f0=FLlZZ#GvZ~|g3kdIL(3jgmzKJgi3RxHp4Gcp0as|e{iKTafW z`Qe2AdxFcgzc;9g{P_39^Xy0PuaeNB?!&3(=H@Gv z5uc@@tp?#)Q#Olf4Bms|_Je3?QU2|`+={H~2}s0gYwqtoh_*2x)-cp9{99<5M;`go z9$|R96FHx~x$q)70MmIWi4$*aVx>a9q@`GqSbSitp^UUW_e@(_vU&6777XOjMtc0| zEnsvML6+l}A{($uD&X0%1tlNJ2hbCXUAzuU((C^VcjFEr1^ErROd+BldIn0lp|* zi8R51t8+9kDHbO(I*^Ou<=uOy=w{=Jp_sUwD3B8^yamfqm;SWkUpBU06>{1nsDH;l zhw{iF41@7gfZ(?zH!6?`|4KBhF{BbaW71E1QO1H^+xJ>zL>-hIl2S>Bd-e1)tTABT zRRZ?FYaoM)#`b9;_5w(aZ!i(|23#;-fvmt-N_g@SNY3p=19_#vVN3;?voDFMp`a=P znS;@B4zbwphsuH!3kqV8s8dI+jV^eN36x=p<1B{R`0&Nj+2l-|*$GZq1O%2UXfaq~ z68SE93m8vie86^#WkS}7G+~r z3eJAF$rR)hyt=LA@eN$JJwAl^dE+^s2WjO?N(gkq%t09=qM|=PLSkyR%-`;0cwu?p zA7$-jQlq@;_>Z$kX(|aSVTMRriDVX?TaJStvp_D;t08GI!;J||d)P?dDSvo=txeBY z<&W@LpFVwhGMBXbF=!cxm?)9@c|5}n9D;CA15bf5oy;W;L|}~VQ&SU^w!$KRx*>3O z+{!;;s2kt+L`hLmC@K>0$20P$y;eVZ^kGR!2`MfqO4H`!SoA!*2X!NANFqdW3S!gA z+$Azj>WCRukKR!d%b>dc)lujvipn_wHs`41+>QC)fznjW9Mnk59r4 z68iseke?orMT{!0TQmrkSeU`$$7iL*IxLi17m1StP!BOVxjF|rN{mDeC5m1>h7>S< z9?(g2bcrN@9oFb%ZO3!xGMBh^Y;c1F?6X=HW<5(V)p9X<5vrG%A68Sp#}}l+&{Y!k#eQcmBF6tQ)*wNHUwMuu*@MdagP z@$+u|GtLnwpP(mPl0q zl&%HZTu`SzYEP_Ry^3-W&A`kN`^sHBe}3c>&B!*0Fr7zSQ{|ij1OP&kJrqE$*v(LT znOIA`a%D0>Bp^{;gwSn2JYWOP{yJC&kqrrp;W!YvlD0RQodqqOh}9Ez!~qzl!od6q z*k0+7cvwEdn?mti^{-py2!2W6cAKytkw1#kMzTi=v@tK2us1q-F^qEHZu$Ez#_S11W*U#vmeL5#R;l+`7Z`>oKy0(Hv|UP8PI*>eMD zO1Z7=_$g>2MAd?@&IYW{`}0e~dI6pGH5$WVnQ z`*WG>q-rZvvEoBG^5Vf^!~`7Mw=*12&EAAmqpH3BrW6ayyY@FcP*p{e62 zUJG_>zVdzZC$KS#Kq`sq&dzBBl{PkJNwlNJAr6H2?fi7+&NY<(FVgbOa{_lGD!aEk zVNjI~amO}bK06<9ar=Oz-*JaAM|*|7b$Ox3$XT-GqNG27@L0Na++);k#O7kZ#P!5v zB8Wj8u}z+9Kf=_gDl|vp`v9dvG5cc)rmWW}V_}p+$d8Q)D8>?&ZCavD!*1f62a0CK z2;`6Tugt2MTMiA8HgZaW2eq(h1Rq}vcKCfNH6B{$>O@#boov7B9vCnuorGUgSlBX9 z`H9=nZ3dN|YPtu#aRiz6LJ;Q<9ScGLcOQ5yLS!7S`v{Iw6E9e11OUu9g@GSEjPJf^ zu7xr@=%am5k!V2{Pzh<9($KrDVdwK3OHgQ&8{%>+Xec^9o*#2yyGd+8i0S~+!|L_O zB=uhW4QKBjoqw7_Ir1brdLO7zFfEG8_!XNNR47%XN|bpdRr_3%bUb$kqZ7GlGNbup zP$Vu_x@F}mHJm;`1IwVMW}V))Z#@R*>m74Ml+)pp5a_#0PajM?JA_T{{?r~ohSA2u z_4B1+l1gGG*l~mS&n78>zx!fa)t)Vs9k=FJXKANzp~Tx61D3`T{+mjVtynjL9o7yy z8TKr6f_FA4ChbhL7iu+=$AZAL&FC! zt`q`GnrM|QE1z|3;lhP|Fdt|~j+KJ1hpe(*V^dT0Y7n?*YW#3A@ZpHC^mJ)bXNHBn zGMtByi0A@fnva=!*%1gN_y!315zroe=Rk#|%qLrT65aut@OoY$F;d1r+if2vd_F>; z&PnGIQW^-f-$ZXDCPP&A6o$Z;31=LAJsBBpD<;Y=6HAG!TDhS8CxN!++3yCq8rXQ< z44p0Y0h~`Wphe=*E9TyrI|?0lVkj}R{}Tr5va-3gtaM{*_^UdM5(N$M>ooC8)edSH zQ7Xbc($>|jtarlb$#YNAu z8z$cQh^$639*r6^0Ip$ zuv(yM<4EsMIaUkd^hV|uifC->+IZlUiYw-RUBq(zLG?EsW~4i4&H&W`Uq@uedr!O z$N77jvw#17%jD%+k5ElNkZ~^Q`Gj$mwZH+uPSgR0Lo$Cv8ecy@a~w>5xx~hVKln<7=Uc`tBIa1#(>vM`I#YiqfjzGrhOMFxqVzH2#=+Lf% zYkE%317|BkloEz#Yq`!#+UenR`feew8{-0C(sVaq01aR) zQ0IvUy1;ND?mB>Tfhi{Hpw>zZ{t=8{BRzU-0tRj|z!V1+_rC+qGqw2;l9&4q$F4Hu za&6j8wrN@b(2=!7r>E-?Qz_^qEz|X!k_+t;+9_x3do(yqoNFK+6pGQ4z}0JOpooUr z|Eb~OHYlZ>vI#qg1kN5qkf$(Lym6W$3U<;egd|!#lBC`H^(CO6ngwzTaV6As#%T?$ z93!Z3ytEd^Q|9N3ZF|+!Dlx+n<}u3gsD#*um2(FkusybIpp`!0Gx&=AoPQh@Y8-US z+M$KfD=MIDn(8Y%mK`iXG2Rpz=P@hQR~186Ab(?~ycHHN-4-q&)Ba zMv2IJK^^3@I2^awwWtGVX~9JhKSpxIfj9ag%NPxwPFHELPq7%puRo5RRNcQD)Zb@@ z+6C*U)jBXNhu*8Xm;8F|DH^22z1MPiErJ)purU2ov`Ax}098|Ivv2>9@m&10Vy(+LE?%%NA{9fDvjNkW#OIjbc_IaR+oq zaIv6uW6wuplCsK%&j7Yskl(h=o6SIs_RF2VMCK5JKys!7Appn5mXk%1&Vvm^8K$Fa&5!T6 zv%~`^3C`Nh$fO;sASWwM&Jm2xv=u$Pm@$mBBiSC=_J*jlKB)Oo2hET;B8b!^S?tB& z9970DjMwZ>o}vlF1p;IdR;gbmmv+L5dFZ3H`y*&GwBWlEV~7Ci1EOqYKjm}?t}+}G z7wCEC+i_9Zl*2d@r;}dXND7A7P?5tZJ9dlWB+61diPjH$skGKrh-UaZHkIU06QlkE zV>TmCgc68AWrRW_+~k!qK+i2_n^ct#qM=BPsPIIHE-7(+gpAKQ#!y3jyXwJ2gwYh;TS!~W0TRYP-)ogm64bym<-`+_+!k{S3(NNPDyimu)^LwW-mqBi z+^58049-Uatg&#eLEFjlUU~Q?nmr-hqjp#?%YvSfqao~elI<^vmNXQ8{K(!X-Sbu7 zD!JM(5_XHXkDb}K-qJrD7}o3LY*MQG;Wr>UkqH1&g<)9TN0lRG`_+s17V2B2%u1ZJk)jJPCr?a!`7Sj$lRe8_y@@f1T{ATvb$ z0qn;yMYuUg0YUAE4{1FuBxN?tH(<~_-E5S!L{J2$Lx!XeEixPjJ3<>ebSTIhe~y~5 z3Ds4Zkgmb3l4o}+{4g^6-oX_-eaJKZ#&Cnugk=WL!k8P_=|CHZovpdD6>8-_tzGtd z{WOx-_%S+lB+Z}f^_G{DGa~*G6Y7}EJQXC|%W}S4I>K;oD_e&ParI-k_O(33?)7X# z_FpRL-u*MfjU4Ho$8T%~a9By4Og#G`T6WWtj`?=Y)K?&+3>(z_$Uq=%trvp-qfZbv zpLCZL?n-1293vKN%hrfjJpopSp{f|j1F8dGNH8>M!6~wgY6@AzWg?5BlFd|5hz`f? zT|ux+KL$>r7@U`%mH^mrUA2038`eMsBetF+e6M@%hu3FFoXxjH#Y!I^x8~5D#iQh? zFPqvmqH}x)yaW$WREZghgpM;*SK_nUoixLOxWH;>a?B!l>rB2VBfa z_%Vp>kh<_gudf3aJ71Gb``X#NcHchlq)&0Wv-A_t#_U=LE-`ZmJ-!!yYpUr4GKMQ6 z%kgsRUc@NR(0+(D=8NBp!zQ!`81?j=F?weX{}w4etzEm;Y-=~9epOKL@Y@kHe-9^?c37gJypAq?>`#4ztG*cRJY>Z{wL|JN0mI0664c9eO0V&+_R7&Hh#DtP z*-Ml&uZPo)Fs%IhRx6_jogzKiBNrAO^J4?;gv@?3eM$zb%SLc-Q)-QLb@zapv~=Sy z`={v67SzM@!k^bJlDZ#x_y-vas`Hu<)TXJv0P5K6r4tBUBL)3*P_)>@e&}N%7o|lD z6pV_2Z@ktqRT;S@VitBMV6XX&+NnC{E2zK!zMv#+f(;tlIw91Jewihd#_+YPvii+-%pLK;#0*%lY-|$8k3GJtQam!^# z)T?nst7ice@ngVUOBPZK>q66si&7~|4kLw@zT{~vF|$3kemHis(7xp+ii(OaxJecJ zbYIpor?d#y!B&w1=99&J(6) z=zGzZUVQ_Kt*?oJfn|yZ%r79(si~)@S7e{^_z0uRnjHZ)7}3w%S>9?blt|R-qK3j> z=*EMSG$p-D0mYF((kRfM(b|JM(0Br3YxT<<2vV`KBl#PI4QppeSlLLviIGmQWS#L{~hB!)+_lGYo!G` zzoj5`WaSa|0(3L@NawSwHuNB!lUX(P@1?qCUqL z_|yEp4R@;j_Bvuy>RWMvH&m~`Tcl(={IKu1GO_6F>?bf1=|w0;XT)OhL-wNDY7~d` z1#f)i2U~qW@v3vOhGRvWt;?f!W4lZ>qpJ_Yc44adC;-$$R_&jjtsv~D<^F$bE6fS< zI6sM^lmwKXfEuH$QJy^i9}s*kU)>DB&>vCU`&*^J)Y`7g@2y{eGopE#B}i-wWt^Te zrDur9olX-kQ+llKeWNT{DIK8PVJj{Gpi>z`{%dIE5yQ_^Y^Qv!Igo3;rYxKWnh&1Y zr!F!=Yk_#Tten|`@#pE=udO}wGMU$tFR2SOM?6K+I9ukk>pEp5iKQq6#8cq=U6u%~ zY&Go3DJ{b<<8}+(1ZIS)QZ&@f!omV|{^a~lBJVwRHvfP?JR6*U2(54~zB0+GR+=36 zuwQ&YR)vmy^PjVfz`gXK8WQUcX#Y$Xzd3yV38;t&0FC_+TI9wy;J_tjqm2&!G|P}Z zA}W@rETHD*$3P$>wo;yLX{2YC#d1?4UiHM^6f&VZMLdR|;T4uX2`cH&_0m4vEA39X z^5%^auP;(QskV8byMe&7b-+|Z+euYQY?xZScXXNgo=8xK88AUppV#0Frdj}j9aj!; zG!(WYjILgMQ(GV|L7()wuy43QFZ z95^N^mXW=F12wqVf{RKa7Rd#p@6&)&yZmD?HKHCqeCgo3A+o_*oRHU0@tn#yRsytU z3n&0Ye1i;0g5_(X-;8I&CifZOYeJ^c0TlTqU})*)cM{YFa;z~ZwDr3h(5Zxg+(}MR zDVV*4opi{GRS_FxVp~oSCTvfVJ3f_h$B#?y{ORf{6~7kT_!5lK{>rJsuZ8CBB!){z z94ZCxe{fnzo6ghg_}2F=NYG7b!L%_9(CFTF^oB9h5r#7KsdfQqt_3z5(#InR#xBEg z)qj31ezPWO`^J8Fmh2&RLY9TaI<%HI2lcDbbOOaudBvs+9H z7BkL5G{3|)s5bgH^bv<3+5Xkt4atHVa33S?c3}^TYd$`ekk%`Es3<_^W@Ey8=%t{0 zD2Cw`31Wprm+S_xDTvQAL1?#t7gY=Yg&`&!y}VRY?}pIaiQ%TeEsm@!=3Qw z^8^^fNB6mya~G^ddpe@>NioqyE)w-blzICjG4%U=^|NE6tR-mAQCT%05;(=RH8j5b zz_*@TaTW(iDxc`@9l059Fstl#E6#oS%2Af<%|@tVzjs;p+{_i>VhCK1Ue=@yP;Q4 z*P_y~oQZWUe&lZ?aL>mW!q>z+eUB-m_is zB-8Ku^HYLVBn0Jzw9)eSLj81P)zq^N-Yq z^nMjikQOt^ZTOzjDk?t@`MkDl#i;?0gkU51&w!(B4yjEdsaksU;iMRy^r?}55YS;L zT-m=a3DNb-*lpVeA!X=5d}O)9-r+H-q}HE&mjRtaG~JD8I+N;g?@sG9X$@O01HuZa1#Jxb)2Jl#f$&IVUzw)bBsb$%jZj0^ z-MxGFp>|>uiXWNd+10$t-=L&En{=Fs>VpWEddUof~!y`5NrF9FDtKKQTe`eL6 z#J)pHB|@GjNz))c@)3G+ZE)o*g*Zjy5v?Y<(c!a?BO_^1QSS`Vpp~wLMd6Iym;E-X zq!VL!gbE?QA^(3MmeW+oj7C2p11?7AXO z8(@qPT~CLbiCZ>JcSNxfa%eY6^A7&MvTM6j7*-j?`;MsC#I$}+8Zoo5xIt2DIc`;< z-vi(1+2}ak5GI{4}UT~o%;o5sP9WN3p%7w$=r{=~k5OwRphB-4_D$mn=@Vw0e z&dEgbmbmV6f900=XKP)(8FiW9z@Ou{JKEdl09+MV?13Dld%j2a{5$WRr#xr6b91nH z7J$^X;LJISXU{OYTv`^uNV+9QdnBK|p0PtKsTQyXU#(qnhSbU#J35|*wetY5iv0;mAQ>Q*b?CB9uGxbosWe5Jt(=l?47M|3hj0{ET} zjE!>~N?9UrK2Soh+`TJ;bBAI!zLR}`vcMfi;X)vH-(2CnefgI4tM0vjlhz!vi1^!* zPqcSSbHjGZG3J#U?=m$trJ4{7?l^!j`*+v%oDez=vLz~Ulq@6zzg%H1@)R$qP|0Bi z_%_c9U+M$L09jz13|1N%Dz5LwLivq<-j zxWC;*Vwlkqnz01b5nUp&-CgYCZl`dON+jaRBB?n&GkP9IXR@#Yq`kMpSpey^ zFpgmlI!8Aax;<}+&JZ!f8U`gnRo1_=xUO!=)y1}Z&oynLUIRJL>7DHaSU@Y;$+zbB zxr1~@34O9M;GlK4r(geN$#cs{A8oF|R%{VsPI?@jJnbA}+d~>6RfWLK4LVR2YUd2d zk2a6KAu4Vc@*VKN55X1kbi&82+Bqi%24IV>-KXnPDBT`IvaXbq^2@>gv(7mVD}X|% z-Gl1CwYs{RD06Oryy5RqU!_mZoNhA0?jbKjS9w1FWsr^|J`wQ1BlhSEWL@k!(2nU& zPRSBSYb50g^^Q9-scxJuImCws`i4GUUFTil+Gar{zHtuAujRtQ#FRL?dm)^zt8;0O zCFZyt*eo`fba@Nl*gTm^L~WtfKN6OY>fx1KK5*gV#M~W+{##Vf$1^&U^Xr({P0DK8Rk=y>guFG^+W6X?$i=$3H}MQ9QCl_{j@v~BN^Edc<^Ww_m{t)R6d-#x z`Qjx4*1+}G8Pde;Z-k!dax)8a4- zInNMX2OqXr6NyPgU8yaD_>~i-Z19VaapK|y(vMH3yP#k_4(gG1jy@uj z!I_tnp0J47$N>Cfn%x@(u)3v>kZ)1An6LCzu5}X}9LV);Xqmd!g7`S#K7{3O3Q!$> zEkCSy4jH;nK|w)a7nitwlix%DI@>H%?b69rCxZgJf`**%l`0bPCzEODuCF;D)p)v7PoA9j-@f6*@--k9DLQkTiFP~jC4luWgM#c?ET30*vnGWqRoKtuR8r+~h*a%%kX(?Fk{x(d6H2nZ9;gDN(}Z5ET? z3o>gelj{PC7}2LCZVi5hju7GTvOd?ah*vlNbs_qdxX=?3G=yGRZ`uA&;HvNM&mb|? zGNpDN5q9)EJCU+r0ex)jYP#gQK_WWjx%xQ6v>E#P=_OeK9Tyxucj$`Qz#Q2 zPv9@1&8ezNUH9a$@$jXaMJ~p9UjO3;7pIfZfvji%);c;i!+gh6=u*N&lqIS+3Y*ZI zNE%}52c~&=fWea&r)d>ILr3ODcP|JE>{`GeBGAPv@wA;liCQJhA7u-Dxk?#Js8>;i z+~SY9Shk|*NqWP_JE5LE+AT7Q*`x=H@Wrbt$xQKa(ASBIq|RKj<80t~sU0kEiD8Yw|Wr5;Yxo#Oy|Nu8UCaue-&x#Bc!W_ z$fVV?twG({j>g(?O#8U`nQRQ$PI5Q!PM#sdxn93Yc{Y(qx3ja;3Z8K5C`M6)VmVkx z{exXVe!Pg4I=dFqwFQM$f9e9%1CEGR#xmY=Z{Q<;CeqAF`MnRh%UDi-GhmZvT zZhZr21@TTnsP=M6>NP=RKp~Tp#k%F?1qKk`p_)FBBN<&iiZ%NS7V{x(M$rOMVLMWy z=78stK=l{bpNEp+7G7^kXW+bOz#(u8X;6iX8pZQGNj)fLT0mxQ06ckGcM5*VCd<>Q zeI_pQqmS&52 z0j=f;7{qemEfv;59gbk^Tq7xu16+#(n}7NtWT?0Z80;E&h?{O7%h4FVWS+LC-%!_A`SB*}7A3Fz9jNE!yi_z@$D3pu8{Vhfg z^T%KLVR^w*EH8F|O_X1D-($%ftj@{ea<3I1Cm66lnbTtH3BQ-*0?&Rd3c@=m6!W#m z|G1Nx&F4b+GuxY~{1tnZf4X2z3{-mXDg)mVL3O_Qllj7XUru2|YR8TeXE-8hIPi6i z{4d0F*%~H97w;dArw`}TZxT4Sh%}?8eLD>gF9!f=@vmO;w zta9c`Wxk2Ex3_oTo8wM|h83;n0l1~xXLP{l;{E+ctz8+6Y3Sve|9K+x71=Nl0 zG~Z;h(|o?~G>dsG;u1;9y_4Mw>PV{P5~*H6mY+)`_T`j69?hz(331E6-^@)LTc$=m zAy*F6O<=Jp+^M}WCDOj-o!4%SPE}ntf;2^a(G~tczrySMaB8nJe@L_F9DbO&)%VlU z-I`fZF%M$T*OBt3Upc&?2ISc1b?fE=$Q#%WZ^8O_7&f*)m`W*{OUg3*ZO#n;x`+HD zP{B284kZu@$H$2;g$-&xjsbWS z{5>LWcIQxb-4sOeMclR0oA3y?{ja;VU^D56*5IApAnocTy6fxf|Eia`HW|6&&Vrj? zPLkye@Msy6wWF@vyW_MDiE4={7W2k=F1x+qD_=8Up;6smCvRQji%{6&ss3t^x_*ZI z@VDy~qY&L%C?ITCwA3``Od^zRmsJ%a6Fr9d%m%D$QZ_HZrsUr4m`_={syir!+{dDC zf0lsc(#>({8z8ltih5rqZX1L>Q(;wpWv$qiAY@=O^g3`e2GV#@hhtDW6IF1B&~ntT z7V{QSLUrt3kRRqb*}rc_>h*8mkHmMOVn>Qd;f5S%0SPlN8zC2aV6k1%PQ_rM*Y6cf z(-%T=IoPg5*#`AY4%%tLX!pfxoJ2%;`R}RAi`(R@gWZQ(Z9r~$oXsQ;8e|MTx4Xwa zwG^uAk_r&Es1gUZf4Q{%>utr!%b>ykvVI$1b0YN1s#RlW#Y8OXD<4Qho=D%;BR$P` zPcbVFGqZLSGq;E3>bWX=lI9aD@&-kbA@#+s9nYR21Ww;cex2gK-@X;Xp36{lG2W2# z&})S^jC|>%T)NN28PZV&hc_&a!Cv{yA@w0~B;p@BXgXfmPJZH}KeCu`#~0d{MEDJH zv)#(8?>$j5j^E*T`ud~nOo@#FAN-ot1I$$Jb`{Q=b?4g3yI!igE0t&8NiZtB7`<{i z&+V?#>QW&~eRt(3Zo6u~l&=14p|`BsZ0ljwM3WTOHR|d2=+@PA(rz)`WF|E?5q1SGe6izH9}!nJeT38Yt-5)CKg})ws2j1Shs76b^Z>K z2{)LA*+RLJc5+jLSs(n`qf9eY&)YsRk?k}d&Tc%o*}Xqooz{LtCC!ojcHIIx?T!8I zm9q>d6=+(enf5d^*EC-*FMVElnCWu=y8KV<#G!%9to)n?PL||+ALGpIx!%-p??KaXhE zOSe3cWEjZE?{@2cG>#H`oSPF^gI&Kk-S&{Xo~t%V zbPnB-TQ=C@IK0v|Q1k}XQSN$Ov<06P?z<);TP_({ld!1ijhbzJvw`swm-}bs($$OuTml^*n24s% zm|D9{EH*NrMqz?CRf#pKGh4nj>w}m#GsCFMZIiHkt7g|AdzBkgh3S89Jm+HwB_O1? zM3be--YoCV8Z5}NZmLaA;CAj`QR^*~F6nQv=4XGEI^LS)Q;kfeI(J&ReGvCwt0!?s z++RypR8N9ZR6o0O8F>VM9SkpS3#RL#eZO)~thbc0k^jQaMVnqMWl(oKU!mzDpnAgf zLZoY4-44g_Ql6fDz;R&v1owz8lz;yVrFf0Id#auD zt^>|&KUZsK_AP3wIA^7#``26_yWzESE5@>6qt zLdN|%Sh+LZr7vqnPu?neNKEtMxWSwx5!AFQkMnuiWXN zw0}r?uWf&ju^RWjtCqT2Z`{RjY>$Oc@81&9e(Td#KYy*i96Ql$*iQCimLw-@hD-gt z^cg*E$Em%=oa_J(x#cd^0X646oVy>X4lhs`4nH*P)$!tqT(_%$3)^yy>#M#QC)$G? zM|cn4+28({vF0?>_S)>m*{kwatve(!E6AN~Mp2;c%Zuj|Zl;;aN_N?PPoJ;6RMmAe z4Q%aNnr-5#R7jzPEUD3#b~4>DU@G7A;prXui+fh%ro8oOc`$TbWjN&g&uL^)_f0$V z?iw?zp>AN($*sycYCSX7xa zRXBWd*kiE+FWW;MLwAqZ`^}*j4(|0=v6dB|NB?B6K71}*)cw5ewegL2J>-UJM5b1h zmQ?O!auuJs=ylqwht?!?-|$sAKOn_UEa9rPwuf4$apr1%8Q2wfaoF~KeB@yF3e%I1 zFH~ATY!7AemTft%=RRD<)(TC~~Oj$p1q_4ZT{$snl@3y7$(Cb(RS|~a0H@5zj!44JLJ$}7~ zl6ccDgH=9<5}U_-IrgGIa{_x|yK>p$^cj_uBGHo0Y+W@G6OJ1fY7)+Hiw~ig8S3pMpKo?1 zz^i7Bn+u)%Kbu8J`L4LSw#}72QK7wLDp{op7gf?d%(ru&xd)Kftlx_|k1{P_?8p3d z1Ae$me|$<8`4n1@Bfnt(E8KVnKzn`JJuA-Nu9{!SwDS0+2P zb!PHE!=Qwx`MS!7ollmP=gwmhVw5BAzW*5Q??2{>zp{LDZ<*kapW?v(6kqy9{xkn8 zG5UUU$5z*$S3TzrH(ZScrX?CWJc+G!4No9`ObW`~X$vNbgO;*MN%WE;8Oo?4S z_B?O=@g%Z;D9Qe@pvCiU7Ls=t4g9&FvHtd5wV;^&(mM8=|Ge_m%Q?!_I!W^KkGZM< zMsF*p3>nFeD@y-k!`Kg-G$^n zYtPQtvGPuSq+>l8+&VbHIlrWZ7rc(6k`^GxjFdCoC`)%L;kqSD70kUEq{MnI$28qo zOaH9DIbPMsm0Blvk1lg>^x_L%ja;*nYY8nobLi}nea-q$GtF%>(-_S;YGNLlZh5r8 zq2kEQ$vX1S8G^RFwIhW^BYu^iD|}RoGfQ#(@JKOHF*QXMu ziHcg==}~_z!8=|nt4bc5tL#Y*@m{8ErAt$lwz9tJ+u`xt##MT}NkdapdFf@E>BXAV zsu1q3o_#J?DJL=#zn-QlJzqr+&fGXDS*_9ceUr8mx2|FT-IbiDD*n1({dK#0Z+{iB z7B%W_5I9U`nz~*jf`;T9WJ_;`P4hFYR$}haP>E5UunUr&Km}i8lPoO^or_Q|4hAF*ZceQ z9WQ#-87@I*Q{7Tnrdi^8M#ZiMIr(Cu@gmA6dZQxT`+|My8?`gtl|-$C*efO*$q${T zud-tHEW% zOUXRQ-YOcEa=B13ygs7c@_kQUMr@N&B-ZcO?k1$sJ)%bPVI@NvPcw%!$rH^%(QJ4d2N>c@^-Oncv|QJUp`U{68FjAgR)nZcIE_v~sq`Uh=N?Cg}E zkABP-yWZ#<-n(z=SKn)=b03CyZy9u6mD7{wUdWLIlcRYts`D^`_V*X&uj8u=3C)|*A$4YbIQarW4Z)=Z7in_ z-FYt-KT__as-(q?`@LyiBkFRgDWM+n`^4{HlN7Lf%_0@eiq!E>Qx29)d_>ji) z%&;@gt0v{qUhtBgcwU)3qGy)m8K|x5&7sBb+5X1 zmiQfAhizDRUvtF|8@c&x+jibOkLHvme1zwPn6eR{_Muxx%c!-X_U>+a+nxiT7M3xv z9_^Nd)emW8X1Wv_V{I?dxih43FLP=I`PKQ2LX!%Qn=~&ol)oN)b++?k?`@*8f`i!0 zV)4PRJNIPaiyrq}H_x}2*_KObvy4#664#bLcun^#z4IEP>mkKZ`uRCwYy^8^0OL z1}E6ty84|E@C@J9e#<|Fxia$gers1lE04`T4)ZMU zWN?3*GGB~dW2lberyDcA5e&Pm=DXwI}*+Vg;@rGmBSof{a5a z#TDvRUaWBvD!;g|zR7rvZ%w63S4vWpz$%u&<@TM=dT34QzdBLhucK#k+t(Flwr{-V zUROEQ;4@y>({}bHFG5;%#&2!~7N(-KLTC0ft--!0g;m$po!umg>+Z)22qlgB;`43A z+zsc^=|b@WkE=)XydTxrAI|Tm(>VcoZ9D6n-_zrT(&&8?5BM4f^hbPlTW!jI)uE>qxt^ERa{lI`}^8<7L2yKdc-|nazENwTX>I=>{@-f zw2XF3irxtG{FCmqk%A`SknE7SdX3|g(TI)|AK&$X~Wnq%zKlv)4X%BfuQ`dC5au#j{33W@6bj*?$> zvSf5>X2*N;wNt|@gdzl!->8pEv(!ElcGy7<%#Pq3#+;susX@05nW)SeyNx!F&7YWc z!WNISvPar&N2>{Vr0ROyFgB%SFFj&Bmi`y4dYp8B*TCd@0jdXL{|&ZB;1`I@VQ7&0}8b zVDFc%b&toF;Va0E5Jc{se3BGDzkbc{;wO6gu~+))-r#xXSNCW@Zqq-@AEn7%^VXJ} zM*mQ@q(-W1%)lza`)QmV8u~{5U_s#v2vWP$g2>P?k{aw8O8*M>21!WykxF`Kp!r%U@_#XNy{kuBP04;R) znu>k)$y?m6KWc}d{$?{cSGnnN3Ho+dCDwX1lRgf{Ars6$VeY96%DTg&GRe=rg-&1# zqu$UI=KRmiZ*LoT%wh-yj4g!FD-5#GL@I;tf|o>8CFZ#Vi7F=VSB&Pe)d&8*@Ud8~wFp({do`c-J zaLtKDuZ8{-kzv*HE3G6|sp+^M`;_A_glUp(7_RnW-+9^~@9hFXGICIg?Y>8crbyS7 zNMe{puY3#q?+n$+ocM=OsAWSP+p%`_q>2INaBJ)9DFe4J32N=Ks)s~|1>V7GjvO)VI0W0UqCM5L zp$Ax>6-;yz3NBvqFPi1foqs|xg@JqsTZjlaY?d&5htrGZ<|A2&?V|$)bF+J1QHN^& zrzF-e@r#^$snf7W=e5Y&dZ=a&4y?Gu`3&z*Q2z|X4`jD7{aNuEmw)H>)`1N%>KMOB z_?$HnD0$D~RV?}?@hHPAi{#5DwA7I!?BD)HtNkT^dW-+92_XCW2B(R91EZ8xr;RLa zw9^{pF4EZj3s?8A3=dM)r8_&Yj`2!s0p#jK5R=^bGz)R}wuzsR>>HQ!oaa7xy5`k? zOZd&b&+J4EDUTBp&|TDJIUqf%m3TBy3Zia5qDzKiv}Eh^K339yH^Bs$LO>OU3Ob5t zR{O%;u^fV8@>_giVk1lB$?+yh_?PcPD@4>uL|;f2GZRe*+I(BbS}qJgZX#`rWohm< zD_qG0Z<>8~oo=49gD<-_1@IeJyN)hptqS1;p5bki74P=euc{LCE9E zp|jyh&`G%gWm%)e<8b4;=zTIn#FAg>Z9I!B`P z^X1x^T+P9l{F|TppA24)R9C#eS&r23%e5Vtk=pjWg;?pC4VK{}vypipp_T?ne0g_% zv{Cp7;TefBK?d_;uknJppYw|_r3dF%%N}s;(OeRoM|3=ATzikOu-JBnpD%-fR6mxS zQX)pr;kpv>vdfRC)mxsWz}C71C;e?U44OLP9^V&yb{Yy8BB&mE$mF^a+7C)wt=4Ir zVq3UultGVpWH5xE9|vyLydH$e$<;!4#C?-oarWJzy```LoN{Tct@Y>JoB@Zb>~En~ z?Ot4`uOZx|9ZSR>(sGu)+$?0>FHzwz1id(hrm!Jdi@9VZZ*F$l1SUO7PswzW3FiKm z1jBRBac=Pn^wQjA*)t;LP#FNK%`O77j(sO-9VGZH4nLD+Ys=8sHORIp6KA z+qVIFpz>Q^cHS0&j=k**i5_ zf~kb;p)#&2#*D6}Z$u{+hBcW(kk0Lt+O^9nl+5FB8ckN}_%Ld~3`L2&TBWD05D=1; z#kMf8DubuNp>u^4MMWsVKVlwYUNNyMf6x;`)m2mBZuvxMK7{?@yU8$KWcdn~G)(nO z5S7YAW4R2`eZ)7-()8dO`Q3E+vep)UeJgqTR@6#w?1B5o%>yp&%ZG>nL_Ju+4G%l& z^Eei)I#%onA|bS1csikF{IU`3*@mDamLPiWNGMeJ$y6lK7?HF!1nY+Y?>C0bf4AoWZ5{NUIM~;4EsHlOu+Mwvcy~ zVOUqFrj9wNaqB4wNCSUsmZnnhwrdbN}gc~y@rh2HLMC%u_? z@8)v^`IzhdArqWu(0gTOF>_l$l!rvVP0ekE?{15!oHC;Ke;@&+qAiq3Wkf1}c3#om zGTkyhHhB^HxS{R?Zc@Cs#|yr))JjKH6K!rlx?s=}wcn#lK)F9+D(` zI(MinyDReglr*ymCfwJXSy|rZrjCD?s8vc$#{VFVwT4n7aor5B7<#B|K>%dON9$!yI9YE{JPk}v8JxS7c|>)Z~Wfou4DM5^5Zr#7YC_!T^RjL<8?Lyy;P1o;1-3!!!39hAGQdL&iC;zJliX zCgggL-@A^y-bp5zHggO_2Wt2f{QOYh2UHZDKi9shfL(T~_NHp}#!;e?mvc{kq;{7T z+0mAaE5!{U_)g3R?uhA+oyNf1OP%tb}FPOpxImT?yxV%i&#E;L&sh%Slb zcwB13CmN8Bnb;x|i@7i^!teCG;*J<>_FQUqqC>NljmsWOkfTHCc@xH6Ka)-LK##$% zhJdjAaF-sxNj!(uA!jk-Sm2Ebh+P=rFcMJ~eGa5ZT;v3EX2@I`W`wsv+ZsMKi*4pN zi1L;UwOv_+`Mplra+ECXzM|E!PSpx=7YZGi2pFaKP5$cW-da0Aqayv66x#Z$4##f4 z-ES#L(f~f|X;d?wJ}t*I6FW(XQ}h|vt~H)1_}N~!f1BUUi$9{)kJ}+8lSEiB(%02> zjl^zSAjonWnZe7B4bNNH5O}Ub!&?eBR9@)j)RN}Nt0w~1$RPp*Cg3AEEklx$*|21P z##>e1w`A-H_oufZO)X49nB+J7b7zd8#Pogkf+5iltJcg^V54&U%oej$&6_|8A~G1N z3HD91tdIA4bNqqoYYhjOEv^;Q`Gz~S+J5nK{AyqwZSOS zdUrtMyOksAF45}ZVfk#8{d{RrJ64x0&%vQA@e94a-E(h1jr~4qOE?8}1lfEaNb{9nKJoFq02|%`%9(uP3KN?>I5}#;qlVdo91luKo-H zNsCN9^XE#pT|Bhoz4(Vy%SnE(`aXhT9KUt@WbboB@ThiAd<$91EmI{Z1}GD`Xtncg zp%ILtBpFR7d%{Y&gLb>w{O_D^2h}%N`4p$x@sJ0~@{B2MIH8o~bgl#J{u2 zW+Hc8>1i)s8Y1}9KCzuRm=iTuWwg$QpCoE>HlC1GwbJfJ7cO+~6F7iEw-M2hZYzoL zyYa*pRi?H4*Vv0R($jhA?#U+z3t~iPXi+jVX=DPeh~xlu2yIJF{)q zC4mN{Og1u0;IlOXvspzP$-@{-<|AWprKsA;E4l$^tMynLR6POY!%m_zw@u*!w$68C zJg^0*E7UB6K4ig6&m?V;Jqqj}P;@_g{1@C`U)o^p-Lx zho_w0*-0Gxgd375adi>5i%elXpqx9=emxD@Oq6{LG5G)5O_Ncrji$j!xp|uu)(ccxHc0daurC@3NV@ju3nElgf>69?? z2LFS!B-uSV=ec38jbXfcUmEwwM zSXeM$K}FH`kL(?6U_SEuwcc|E-Em;A9XQ@^XHNANJ)m& zaBpBpcl-x(d`CjQo;=_EhJMpejvt3qkyz9_C?*n&(ecV+?jh7x;$e7*TV97rrguF- z0DIc?AmQ&boI8g{w^1zNh$5NaNNAor7Cw#=!KZ<%@@}&DnU>ur+;cIHyM5g>2Yt7w z$jkr()pm*BOyz)K^xtzLYC13Eo^iNOiQ1_?vz`)N^cq@nOd4Rka((d5;RlzUxftEc zyc8HSwN5=sxONS+Kvvf)`50;|e9(_HmeL9>$6l9dbr(K*m|u?TZT#}&V1oVUCY#&8 zbW8sW_6JZM949nVnSQ{M*{K<56TJD+EIm|IpKJ9{=(N~{_ zB;^><(+uw)q#P!Z#eQW08Lx$IdshhugY4axUvh1R|FtL7)jb*TbG|d88Q1$D0hbzW zq4`Mql`t~S$I5>Nh||yeD;Wnjo&YCANh-j-zGE;;CP~R0sE_d{42s&KdMH17p`Q(P zolYMBQyQ|u-U}BD6Sk|L1pi};$gPv7zJ;EA>ID=+VU)by^Pszy!1RWMTzVorlR(YZ`7!z5T9BcW(Wu$&V#n zFV2Dly|@Jnfz?exU|@D1`OBfU=?Jz*d?Pa)kx5BOkuE58Ibk+=qW0@go^^-&A0!Zt z4XXz^&S1$z(bw~z9CU~dpRIIiU~YC`0><{?Q^`QH81myDxH$g2W53bVVal^Te3;g< zp|}&L-WAxD7+aBl^@Zy(V4Ry6Byj9adP;YCmB$#85JG5q$hH(V5WP z8#SKY&8L2WQG*UtESQiZ1q!(x)yHwiP`td8$~(@t{5W`e^D71JYx%KthZ#md7?FZ2 zOer>_7-Iob(~arBr-55AI$AC>-6u-hbXd*pjS2Y}?2>0skaFT&@5$-R?cQPTTV5ZV3`1sbvUq}N?*0b zF8`b3{Pd4pGP)b}i+PgwGvo-0JM!s(&x0eoD1540R~Y02v`&B;;6!>B;!40@eb+xQ zxqMz^+VM^v6UPMO3}H%`%^;t!sF*ef5==S~Hb0q%gEuxg3)0F5bFMdSkxv@d8O5HH zYsbvIRn`DPVizDD#sGK1Nyq!jhj|VLqnfwFgM{F60Pc<={>B*7LMZJb-LuGy-Jvi` zvn>gp@NQ-6|2z&Jpd6089PrWI#MQYxVnQ*|s6b|(0|o#yCKFJk_!$HJNsw03Ti(N; z`Y*l_+CYkoLvs;%(~Q^K4d$q44Rz!8?^QHh;(K_-scTETLmCf6rj^|>-%-Xodd&9_ zt8{In@e1|ZB}Y~Iwg}gTu~%*56n@aCw0MQ$lIZd3C)2Y z^RqKfA)%q|aK5NHzj0KV-(Qa3|3P*(``x>DuU)_X!pQCC5-RhiC+lTq1{H9YLj~KI z-E}S)T^#LU``(gw9TR!Q@7}Gz7PDf|0gLf=T(8F6fjtd?EmK-rn&5Oko`n9*wI0lM zyWywIw|O)D{yUp%&u{d2f!>J^cz16RTH-JV9&|%%nkY6JUnay zI!emQ{JVB>F)}jJl^Z=ftJIy9xNUZJHZ?0tdC#5&oijBx2Lbu^c5Bw97(Bazbxc%+ z@Cns*t3oYKMtF0U?jU^FS$#ytv7von&&*Oy{r zH!h~3>D|ShSMlmq30f64FeZcfnE&aQ>--KoIZ0~V-|dZDWQ<9&E|_%YE424xw!A$^ zVm+((=f1wgQ3*WX*ZV0HHu5Lou2=Z(-R*#6`WjeLreQlkkhdn4sFUhjTfDQ)6;$t> zKOgHTnD+erRy^Lt>|P#wUo1#fr!s&EzIC9{mQ30!DJ?nuF}tW|9Ce1==9NMQHx&7c z;CwLzSjs0W8zCmk$F~5UA}~qbaJvi^upg;ibABKaChd5L&38+|+$m~lt*|BZy#+B8#)DTmCvtYbC!gOUT2>Dwf>pM1q05)!hJd-_MW zrVHFZ!j>L_GcqCDi6g81PnYbR9KkT1qepK5R6mT2yka+t@^JK=NrJccQuw_*w3)>k zT$U9pJYcn4J&7F}#Ymb__B^PK-U=$an%p1le!PJ^wN>FuDEl=u%3#Rki>b2iT`O9! zKNA!SBWiR15xdiNqcf9s7(85teGTs5WZDZQAXOG{v=`h%a8sKoWxxo1Bnj$~# z;)SrE+S7~YEFT)&)n$$_#=ytNw{h#%_va-PFT<-^&?WXEKH@2B>*JP|beHb#j_JnB zv$(T(Zh!W|u08wzxGF4^oVo>ETwF0DI*GcMfm-DR0^4xBHVO$TI66wK)vIc3WCvyR z0#jscTlWeEwPXK=g!7M$E-o%O6}|`*yDyB(?*QNjBJn^(mF?6J5=j zLd7p$uvOo@cC8STiq`>EqTp+{U08SzZpK!F><11(TddIwScF1h7q($Zbhg4<7gfd! z^2cE>SzJ`K0QO?pP9p*^c1-*s;0f=%7p0{!`SNF&>z6i&b^Uswl#V&>#FxOV)Z4ag zi*bch=pj8lZv?O}U%q@sFOW`ldSA?84-bzAPoA)%%X-n(BTY{I~;5)vb!gX~*No`3Mx?mOQ)o+qZ8yjCYwm4Gaz2`ume?^3?>~75Vqz zvLO19r-$`+D(hIaTk-sfsjUPYeCv9uyetsUp`8Dd} zM`ZsFs~)$$?N8aKqaITi^u%Ygn6a@j`&(>8PUPx@aqU&q4{VzLr#G?c5d(o01)Z}{A<-$xAsu-oJZq>NAbRoVHg*a zigK`REtWYPx1z1BO?7z`wqaeyH_bFo6o$3ysn&c$i>BPMzqJ4s5nsNXAgsFT^fkpu z>Vl=OYHL~0mQhksxmGL#OUq#&YZtuL<6!@DXb+_XTgyHlsZhSOCyqft;@s{vx3TedUKQHagtwE!qsG zr9jyEhq>|*^sU)ayuA9^qAhFJx~IFsF@{~nC3|aefZf@% z#qc7e0%{818oX`o+O>&Co-*P+Nga6v;s=)J>XhVb)kyWj&iZ1lUpA|EO-mfJu<(Wv z&6A6Alm>M0LRaMLIJ&RFqUxApPx05GNvWuT$he!Hij9|Z?!J*;buwgHg|4{I~dsUWPJIoK}`QfxaIsscb zU|qhB)$0xY%e;E^zY0-!ag>&c;NzgGTlH6UZFoN6OMr^tUqMco1X#} zPjebMnfH@E!z~{k!C#Tb%iuro!sQ3*WoGp7sqrAQbPF?>eGkCmP{!=F>K+NyRYqSKG z9XN2H}%S8&%#5oSklZ)jL`VXW`+)vGiE!azZdS=L-AVWY;TdY+(M z!YQf2Uda{{JVtbRrDvd6NX^Wwm!h_&^^_;}3s7m)NUS__<_x!l&83k?Fcxvb0;T)- zf;%+Tzu4Kh3X6-E!M9R!b+awdMoD>jFP~tfH;kfew5{@NJG^)!`4mO*o5>kVkd~6t zXR004p-@Ugz8!g}M4*g3t?=f;xyj#9t*kH9~HU93q}0#btPKun?O-Y+xGcyIPhPlBdIxkC2xjR^Y{I!mDr? zNV{K>a0X{xH^ZC-+?zW}q?K(;V`4t!2^-%Z|LZJjEsvcr zXjM~FBgeLy<{Yw>A`=UXIy)Daos8TXKE8)-?tF^NT3T8Xv#^CmTJi4(Rq}xLwg&6p zf9G*aKrZ%8kj^UoGg3PZa5j&@=?7M5D|uyHu3;rTAMg)M83Bd8bT=#G5(WvF+7A8r zp=e{X{ffK0e^{6=z9#>cEezN%MHRn1AC7yAa7e2+sEmAVYr70oZ?XF*4{S0^@IBGV z=8ukv;ggZ!#dgS-uU{`Y_%pPxukUkP+k!OR#9AqW4FYv9z1vndx-Gj zUVQw>jZ0vI%}roY@o61~MC>Dl%M4z;e(hI(2iw+apZs{n6B!l7!o_tROMNmB{J^6s z+!MFvRG%5$$M3&JK!6%n&@U@0_E=bKBFOX187BPY9#@#mAj2fbSBZ_&pMi8G!hX-eQ z+oq?|)6?hw-whk0A|n^RfB$~C!yoiYwQ-PY%(a_0v0Rdse7(bm7s402@xuq;w}j!LTsyFW)uZ}+!Uha1OL5mRnvZ~4*Z%04V zatR(l2E6b07@O0h;Q3PC1fz743M(p>^78VAHDoACW6=%z!OG{(iNl?AO^_;iZZtGB zI1{lyKWs8$^!fE9THhGH?Q zI+;s7y}+xPL3f{H?Ir|kon@S4La_vm}*`W>qt%irfq zfAZu?M8s+=8~efIb9DQgsUL5b^8xlMhM|6XowrxOx zh8wGdDh#)PqpHK$)YP-EP!6RH3#u~J>3l@9Ykqzd2{$A+xbH5C!pc*VF09>YF+Qzg zz_d3uKE7Q#pD*hLs@v`Ga8>>QGdh$mKCnb3K1taZ#rfJ^KVG_n zA4#Jr4n&Bgoxs#s|FQ>Dt`aEnlvm3%`1z0@xj`eW5LN1mqwZMBbBrv6yXlRW z`vML6NE?}k$#(2BBFzUepFF;x+qaLQnwL2Lox4+VH5Gj8{UEWRfhp|Q+@7r7CPLK!?#O=HI>$V#&JIe;61pWT+~P)VDkruYkwM{kP!#miW1|D$eXb7lON05+kkE@m0RDMvqQ zx!izc5_niyBhRsYM4S)Xb)FFk?*)i(Po(zD)5-6YO?>t4B35hPgD|Sn)+?0<8+Er^ zz2EG)vsK1%kP75d*ghREW;a)q8bu&GhCpZ)QlmF4JuUDo z)U*DCtv(9t{1o)`IIt1s2AJWExzBR4a5ZLdwI}+Ywr+s2V;=*?JcI)Gau_gB~0x@Yxl`&_z zYYCR(!zOQUi!VdpLf4v{F3{UVd*QUfp#Dy9@KwlG$@kA?<)K#sc~N~|y2@M>4>Y0q zs9ivKxF2|=4lLTVoC{yix)eCX3xO|QP3!PU)B|+OlP&4lw^QA@bB8lUAqc_qIBG!4 zyc>gqHi`O~baJDwJBx~n@TM#SRo3Z(xfu9@g);H2&~m@y)Qm!T14+v=bgC=j*F>tC zn{R|~++3_$c6>iJZv9Xfa7T1wN^71XD7{GMaV|z z6jKZVK*&$C{q~B(uOR|d4$&3~rbJC@kK@_geBoIo9<_66$E9RaecNjkFgTB$L}LY0`q;PF4>i4f!h z)MNFk|HD&ToY7pg4HKP^pHov)sSbu~1%!mW3~9xRN9_O>ov)EzzRw=6ShdQoW$*(K zho=d>>8`NdPOMC;Wwe_2Zd_puk9BGeH&=2W;HY&CfZ+XZ(1;V0lY2I=d;tndQc}`! z_{&nT{Z%4pSAjg>zf zuF$avV;>#&ZbxPqv8x^xJL2N98x%tHlhr!K(aF1ACYa$pb~*9=@&QkIKKu|5G)E)x zp$^>N-L_l9AlH!vE0q|NkLLNHPk&wb4Oqd`r%xlKDe3ETVsDp3LjhdeXV8$Nd*d%g z{v0+R+X}1(=$e?CDhA#JDkWGLzt^l-)!3N-(Se8?5f`VHf&)F2qd`TAvWqiAd#jy{ z)7U|N0h=#EE+70-vHaun^9N*PWo1_*Olm%3-0GF;#>ooCGsC=gA>#0G*0pQ2ukK49 z`Pzs@%AWg_4jno~8kbsINw~`#*rb{hP;p`cE^)d+X;gA+m8<<8 zM8lw@(L8J2pcw&|*H?}VDDJ>>C}4xn8dmRHxCZ!Ixz3|LV(0L^wghKU?SK zV11X0l}KeXV6lx+Fn-*oaX@(IZG2d}mOfCzM)UWJY+jgk1M3W~Uh3GQ~y5FGSA`e&u zpkp1YXFaju2TuUagXcI7`G;@Aw!jJ-bPz#5-mrZkONDoJ=a54TR-=c=y2&7GR(Fl} zHHjjNE(N?ek%3(!p8?7fqidQ20s>O81pWPxGGY+EBwz%!nelPlN*o0*w28Jij|>l| z+IJr&ufewCq9M>gF%q|4KhU}o50}z#!fsP|1BWzoZSQMIM8N59`m!Q!N z`kB@VLGLCxFd}!=^px;9PY>z4eOfFC1L*3-2LNba2ic551l$f4niooDtNUsKFX|I? z(oCyXmbK<#FX~aH6)N^)vcg-ttd!N!jYl~N25L@oMKp5*oqr=Ha3Mosx{bs z=Ynx2)SLltmN77tLdsfw7fp3=b;a0)b$iTTH3kHW;-NzTbbp+xDud`Bn-Z%sGaMGTh&lGBPrWP75znVTHt{gBwc#x~fBTcC}eTo1>fSC~UT# z5ggH}4_OQlbRS!vqo}C3Q9TDc+@%w)02viyzjQ(d=fm`LQB)&LNE{Fr+#mXM5hM?G zv3fsvu*S0k*XWf5BUL@ymZxW0tMC3;tff=Xj0cl-s_6=V{6>Q;5w9n1oS~@NmFt3l z{9t=5jFrtX9kGI{vJx!!{?@q;%P0~q0<_UPZe;sS0 zZrVcddR}R1!YLA#I=6a(l%d~P3Q>Y`M0U1BOhc*%bL1VV){)w$7k`6oK6R#PokVIa z*AB5yHtO>S{Wsvg8|m}LQdn5HEw@Oc)-Mi^!!%PpLdMXvZof517bTSM3DSIgm&Zlk z)v}v>Lvp<$J2$f&`!kEdC`nF}vcJty;R7SEapN<)4Q5XQ(VxN2oR{XJ850g)f#$NE zM+Ku^1D4=a=AP013F~1`=%YMp1^iVA&m3MF_Y$1ZAP%?S&+}t_dSS9|yy&HYhA~}l z0IJ|P(okc8n=A-}9_epokI<&XUXbMpu5dC}U9N9w2W}Z3Mz7)^YD-S(3@gxmVf*$z zdh|#&(8t~apgC-*P9}DYF9t9Q8^J08cTmx{MOk9JaE3uG==LM>9+nUQD530AT*C|o z#A7@Q@0-5U88E9tRVB%GddzyaAbJNyGwDe(R3rMwiz(LNr5iIW0yT`|NaGpeWg7B52(yF6W8zrzK1=ma=y0>eW(n?DHTJ^uP-)N=Qd%ouyxEVkW)gh*lTzH-%P+H_e$-!%9XQzDx zwYga<)Oww{Mp?pNRw47j@|Djjf~IZ)qS} z$(_Om*@$#kIJ^rk7oZ}j6QIR6D~DobnP9jlx(qGC+60|#LA~+TC_fX|B|Ui}QvGH5 zgQ=fK8_*=Jm%4+QWZ#icISz#nNXCJI%K=z8tTTm-Y`t-6xu1Ue_))iW3uT`UihH1w zd-v>SrLkVl7KOX=5mc1I;6q}nQgEe7CFv{ft#}3TKQ5D73G9AhJoh-@dKB~(xxwUf zps%8_VF>In5z^#NW4+=E9B$t%w35S1jJv?CV;J{v%g;}rK4BBX3FUjp@PM_Ey)~RX z5J&+Zh%}>X2!NM7Hu@IoOHR}$!;rr(XSYf0ik$mJ#FnW)p!=##hgF$(5?gC_5zV1(hS!4 zj5bAqf&&uLXso7djiLWRg0B zPar93P3v0&a!EK2u8HbL_$bB&v**x{8tJNCt6iD`dKT2aM!%`#u3b^MFuo)8q^}bm z&H(oQXjN)j+M%gb8hP5qHFg?4t-O*?uEfnQ@fDL3OsgK)5?JN*T~4+uN4P%UhQCO{QoeWQ3MK$u` z)LA08|Xx;^%}XpvuVIT=MH>`~B9lwAiQKSSBKZo`JPXtAx@ zcJxc)R2CL~Lw-rd{YP2OMq!1=a&R@G%^6L5eSLjDx3?FoNU5l($bnyh*otx^C9W~@ z0G=Mm#&4erP<|weJCrpf2Hqqm=HAVlH}!ktU%z}wi{$8AsU&ijzP7fut05&KH|gA2 zc?LAG{N6W_M!YM7_1q3clntaQrmr8Gy9L=j{jesWS4Zx#6DJlM85!YX6}@`p>mSX< z##VaQp7OLZ=-|dLi7NyGD|9<)!1($tUwpa8?qGkk5S8P^;KMR+1G!NdZ9TmTb5w41 zj!uR-_O(*os)3dA2GeIgHPpYuPqc8!kG*HVM9b)ndEg1WI789+X)5>T>>KqUq`rjP z$DAG(;qx$8dSD;$Hyeu5V}Vw;5BXaLWonLJ?@rEaj{{nJ@`6E6n*00ub{u`AB?u8F z^MRTaIg2?!erNVpvXZ9)pa|gBj~t36IaW^z*}9XufLy<+5G&Yl$0J3 zLV}5_2S!-lOu46`Iqn)B7WRtsvBhv;W~LY#k1;<&^j_4A9Z+CnXTOH9RQ34GryNfx zsI=ujga6~8yu)?h+^Xa1;=+vAq)V;SuKme123I<)zGZl1 ziqU1y1F2N*^!tX)-FzN40SjVB3yU7PQm_AwqAl+KFzQyVTK#}fo^_7@k`iY&<-|3E zwI3IO(u|px_r}D8Bas?{fc3rS#o#Yvf;1)t^09GE?_njQ4@F=KH&j{pP9L>KuS?yS zCzO?TG0}C9o)IEo6!y5w91sjLM%NDQ$E&<+-xYXGO@Hm|9Eoo!4$7^q*}Qpk#!>3l zN=i@V>({24P0h`gnQF;Hh%>(3=m~4D2G!`-U~%Tm8=>w>@^rAb8>5ycysoPY&|yP*-+CM$MMX_5@n%hrl8Ns#nXU`F%#WBVw+{8H zDtweXg1`b;;GVxP7g3&QQBbNCo8R>Dkw-he#5nCf|JwpDRm->|L&_`)Dmn2-cwXKD z5=n2)kZ!T{>x@6B6)aw1;gX=r(L8Gri@qQqt2aB_PyV%Q>oszwaLB*B_4M?F9Na`} zk*|-BasM(z{xD&&cjEQuq%#9;twQeIv&!9VsHK(=-le?a?YGAA&g_Pd?D};7Nr)NM*#=!g@#7rF%MfnFRH7jC(L>vGBT3=7nV(IfZ_qFpmtyExxG?< z^3aZ2)#L4}`-ZGU?7Me`9kmE^RX*t3lxVkKfGc6qo(I~FdC3H*JR_@bBA zfOpAh3nlcW_@DiPa&TR`*7^DQbko5>LC3L^&2j2ST(Jz)o_}dRpfjz-nq;VIi<_GC zS~@|u>6a$mC}X#CHShcIFs=NJXF|XW$h@GKd3e?QM->z!P%w}xBP#D4tGw$im35;z zF4ndbGH}9&$?wRk=GO#K0VHNM*Zb`#63hSYH)IgxmZ5iM-@c_O7c<<3cmBFP;3>}9 z*|J6pHotHt!ibu+e<&mGr$2eev)g6Mxb%dKM-4%mtw&mP0Ktnw;(^8sj2&L5ie z=9c5)dlEoh(<>&_`ED-&-Zs!RB)B2y%+WctduPHhj^jHb#m zsE44o1Vgq)S~|SeNUx4!GWh`^>j~*S?Kp}h&UeJyn8nQP=%F+>$sAJ8smmz7^U#Q> zN45tfgP1&s+;g|^04<;TjsL*m{olIRy{O|7#W!r&VBh=RFMSUF2sY?sX-`A+cNJA- zabsi1cn+xW37UA%e;!TBH82>YWwoc zRe^|KzY@eZpMsZp?yJT|UG00in@_4e&(2Pr+U+!~7&QuEHA%HPMtE4uy%k9x2*>1$ zHRRR9(PX=?@RZ$p!+LAvF9m!19e}boAoso}d|>m+4!Hbivfo521R|$H6w3ND7ogpO z%7~5k{3ujgf#g>|9vaSWIS_r(M!NBG3X+cYJutjxtvL;gLJgLIgREBErhuq&4V9wu z2QYuNc^A%0D?#eY5UU}-#kH5jl6OBUepHfhrC`#^=qG(q395TtYiI^Ix1m}j zAo+X{rviUq1>xMacj>S$qhjYsD>%0WsP96aOW^(=Bo8DuI1jW)-wq10$bD5(q6k9G zii@+Y21K&<5qtZLvMTg7ih&aJ`W*t9a&vQaU3sOQqidy%avh7N^Ax8aJ42Gp@fN@K zR>cWKGh?k|PqMPUB|3Jsw=X2j^AUB{28e{&`{M)7tE;K$_Ch-S0*!P^KBE$8eCwnf z6=r>wv3})sy`Yca4M-<->r}Thy7KzaO@IDDNLd)$!kgMa*6Vkq&a6 zvlxD3)F;dxfg#tF_zoH^F$){rELXfBZ6j-kj@my@tO{E?gue7>`q=fugAPnw^xGF|HFpnS51GC5()Idk83b6PNXP-&WF zP_(o@qj%4_Wt))DGK{Ggr5=A#gR+UJJ2B=F8yj1rl>PcXrXyaxe$8PG=s@tbauoj3gsL3-y?dkz$Q^rJ`ddDWypc^)^RUUO%+Ao_W>wzk=>)8bKic;;c= zF;t&BJIh0m=jBmg302%UP+Ed_o7#MF?&tZSjulHG>HxdbHZX7#G%^<$!}sxm6hFjy zkw;ZkRklOzN}0iS==?$HV9vwTHUL$Q8Ara> zDC2+hH3ilR1gR1UE7%a+5j1NmA7A(JAzfBoeSJ$FGgor*A(eS!xf8l+4DhVDk)D=z zT&)0&$e;(2YWwzi>n|0U5<#ZYhDJ3+dAc9!*{>Z4sY75YfQDY+LiI}Qb0`f0Auvx8 z;Y7VVdMDD>A=tYwW-*L8R!{i8KQ%>Fv`&<*A)LAKD+S z-tBk^3N2AV@#dx`TW!OH>%C{jF*RgMJbFHv2i`=NNG-R*fm%-Z|x}fLa$Fu8B z{TRMnJmnzkGw)3<%>Q_8^2m?bHqEN zKgR8%?{^i{QP$4zd6B3Wdi?j)(+bqa8=+Xmu_}W+rzo@`$H9sWTB(-Xtv~b5l{HV0 zjg1Y`j__iqN*n36e02W1*IkS~Jl@13JZJh9TH?ptScDM|+Ye z<`X7*?qOOCEj>bJyw63?H9elX9=1Tc%IqPhf!)o{mfjYS4d|^;&?IcY$nT}NxJ^-@ z54=SWsozC(=a#(~R-bH9FH!8*EA5Kr4(Y!qeGQYaU5Ht;U zYHIWY4j@)^`c*GvxWzv{y(AtD!}qOv$iSbRAjkj{X_B#ontE7OYkYLn6Afo8C+O4< z3Rxs$KoV`aQ7w=a5#X&j>`ZM0+=2iv9Ck7`LpEy5prao}*JQmcr+AvT$%zJTJ({sXi;P2J>`>`Yn9`8&38g z05Ostx)O?ikN=9Mi*^sZJ#YYdD|A!*#H)iK!$48EK=VcrPbb4;XwwOAUiXEX3Xstx zGhD_s^n9@@Du1*e;|Hm2C_4k+yXNdV=xXv*Wa>uK!5w4$HH{rQ0&CV>qD(HP0#8~! zE$|uA1Dh5X8Ss+OrzDvqGNHMY4Q(=!M}S%n5+m#cgoBUjOy4Csv@JK;NFs0D?b`cEd28V&x1*lt?=yWx_J*r>3++!v0VB zu3V^Ihr2OU_z+ziy7CVnKBQN3WR@@LE7@uaYk(rl3xl5*pv-Uz=R&YdbD2B~mPWK0 zTs;|kZLy6VhH+6^GkHrGR#3JG0}(}Z3+VYKr0a63e(D`Yo2t4p!s|O4g-VKw9+emI z{tF>J*Xzf()}G>sfd(UyFqz8hHz*-mNH$o+LJs~aentl}3wV?bYJ?zpZ6i`l%Lr)v z7XOe8Wj2oeHx*Ig#l?1u;@--8JNnv_{)xVSS<$ z@hM-vd9xQXXLP1O6ta!2qFqeVXTlY#VD$IekuiL;z?GOlI!)AQIFu6Q6(>Xt(Dj4~ z2~Y2%kIfiU3rfa%MnonYPdsj3Liu8B;8t)jSj%+SGDa!8mjR66yWH1i(f$4f7LffP3H_GdVmJnW0X+jF7<8T;(eMxkW3DUyQ z%5?|_HyXCUZnPrDKR-2<84Yd)NC4p8!2s2JRVP04N4#c?pcH`>`PvUv5w%e!SuaTj z0EKFQU{PQpa12H~OYVkkL@q%>#PGPM!z?o%Nsj#{6E2ody3fw8hxu7$Z1kOo2(obc z7`Q%1(C(6fwCEtGg59ZBR0LusjZ5vE78qf%lNQFhlP63~k3$rIW@#}7ly%((KtT63 zXD-9jqKunX*3MQ$1P8Cu&pH*i-AP$5D52RElt-oH8w?9Hh*;8d?x01__!wnR-r28o zgmFg)a52VrJGi-cc>EyFLzKTGHUNhaIdC3MQ6l zx(2bCG_s_nc_1_{$F%LcWM7Mo=nv{zCuk?hzkHmM#z@QEIOKKm>B>uN+I!(NLcjv@naKwi3zCA^+v!o2t!TeeElA? ze1^ax%kM7!2mYKaQ9^I+H|qW^iAXaVPN@#P+NVC{P`Vv4 zD?+d}+tY^vW44st&mkrRzrHpCWxTSkuI?h`QX`~xJQq5Y-sk`lH7|r{L|6+c#WED* zn90SgfS>;Zedx7T;v06Tt%+BMPzi(!?b4;kGxoxd!J zs1!dx1|sVx9lPd&L1%Gvb4h#L;Zp$O3fu&!;ZwDUDC*O6>CyPf&E1W*nc(h%42%Y% zOMKPe|Byo7V)T+DNIft2*0qN9?@APtuF8KaN#mC%Th;su{X$dRulC|KvT$?z0+7aw zeaBMy66g=~*fx>T0_a4wmn__h^j8XL%phz4J0J=e9>YWe!M-p6urt8OW=)EbEHwB+ zhO>Zr>=ce49O--(^R!V)B{CZW$Thgy22i^90L&xgK&a_e`4zl=8)EYw!0sNQcg`kg z#=&Qm3NnZ*a)kpUuZT>VhKbikdjT$MDHIY>Hm5KNH;6N3j5onq{_z%kaKs?=Vj^8k z6q==>tMO{l)HqrdDt$qmWMRXrDwf@NMMNHkhPQCQ?#n%25(ppAa2B#4^7=75_kS4s z@<6J)?fug{yv-#gQN1X01B!%bqDaQbPzsrcA~Q`YDkVdiDf3k3`6NjZ%5;(`32_X` z6gvE#P2IQqz4!O~{`_{1pWf6}32WUV+k{%Y0|6x8>vpq3VwXliQe zVe>(;(3&nFL*!?Ge28W;!5)n=S?JeDZpXhSJ8`})|D6@mo6frY&z439XQ6d7lt}<{ zhkfV_$g1_e^YlT4B*I~!_&w9#oNv5UYgTP-6s&$`ff-3JAtS^+1q-Ag$HvV;bjYuM z67CtAC2c}kL+j}?B75wZP|U-n2WK!1iTTUfZexdqBgbdC1IOR}Cr_>$AMH!+>3pU8i_W`5Ezgl-hf3=PHVR+x@rf|#CV2qjjOAT(aN8FnvPP?6*kQij-j#%+jWMf8E5 zxe%36Z8N}89j3+Q<>j~*%8i*9Uq^%FyKP*+n20iq-FaxGp=li9T1HuNoRLgI@ioxBpXydqFB!G@Mv<53BA7y{c*PLk_)qLCZjJ!f$7gO;m1zuqE$ zcrD7gces@{9tIdSEcL(9V(;=QuRwqXI2dAZw<8?VfE@@G8xHXuB%C*p5!TVaz6+lN zuK61NtQGJFv@U8N)VHmPz#Y9#?BX_(<4b zh>(#ndLJP?Ljx^={!siy*UI-p-C2y>)N*(zLgcp=01{JV7xRJObml!{ zi%~chDB+y5|7W7GKHdU&l^$|0qQOkkr6zrkazytiDG-R5T1YNtQ@{l)t3pt@R`fc4 zQVkL*s%k-8IhvfW&R_ZhZ3ypBXWpUF(SnJE{S3EqLE?;pdKV99IX)8ll7?#u`bmdV$O%@+?V+2r2{9 zt@nGzWV8W_lLjnYN>?$*d8!Bz@hb39^sGP5%VS0MV2KTnGr0rqY=^#|A!2`fEa}o! znZzmD196O;*o0CWVW>>qp?DXuJohsny-wc-2FMp30L>mA7w1u10?Q8sIJBZJ#!(m6 z^aA|}^&&BP{e(wO+FjO|ry`;fb91tL3)RHh&SHNM;np|ON(@U0aJ!og=jE{g0V7WW zQ9s%?MQro6ufG39d42g9m&v<+XY0T8pRxz@hmH)c_}nq_YWSrZVZCCKKQ-0H?6Fss zFx#0Unc%9CSaOp9MefR+^RCIHe5vl!JNuGxKDy*)fIqI_moHy#f~gJ!tIk3(|X(bWna?Il^&eM)`0JTWQ_7x9(BDwj=5v_zxk^JBWk|Ma&rkgZan| zNPbgPGzSWy@nzLvU)s@PTyz8H^? zfSS*sBa$?W;;f*fx$e@?ASj^^1e7s3M$8lJL{4FNN zg4RQ%g<#U=L4)pvYbrirQ!M{S$qmT>X-H2nP(S2d_v9j1+(LJ>CFHc~S2;DiO5v<@ z4H}%%=u>Iu&#dqjPz7^Zh+=Wzd;3wS^$S5t+uGU!M1m=cW4;=5b>x*Rvw?mr2h}SD zLm&k3PhY+e+l6M|vL6txmY@_uU&GXdPoR%}jp)Yat3&m?V`;SooZEv%rM*`E!)mV3t~ROirMM;M z?uf8`cgKKGe9G$I`iy--^LUbb&g>MU)KQ%xX%DAY{>2eAIecnXI3hc3eS|D08*vql zVa(sV@SNMACv&)clfY4NU$W${3vtU!@PEvlYv41}8%1{2imE3IhqJBMQ)XS9QaMKc zWL;GEpo@IL=+xQ%inhR*LCL#RW$EP=v**p)zql;%b@-;)TRb|G>^AeTZSoLE*kt!O z#oF$27#fprmQBu`>s71Tc<}zbv~Ba4=JFYe9?kyCM>;uejB#Sa&S2SqH2G?w*$1pqv*Dcyo*t+a~~>uH!NSabDRu!#hZS= zwXmb&5k|baebeh1wjY{HS*Sa71%D&;)zi~+moEd!gG)LVJQ9)XVJRwTjn7&5ThV91 z@>$+L!a1kk#;tT)jI27Dq9tXwV`N<1VK(I&Z@~gGXv=p$YSCl*FgljJ2+%DQqNHW; zsojr^XqQh3d>Y^y%=cg&zA8oZ&gfVceZllQDvpuCQj~4GU%7-HK2CA7EFL|LzX;n2 zb^J4KQ67GNd3@DS(sc*s&C3QwrVzp*oHuIH%+YA8&&ErkETja|4o<%vMP>HL>AoP$ zC!s79R1j@q+cWTW5oJZe*LfIR@`8qLm|+su&rhH`mug&>us)5UE`loi$^4}(9MKKO zn2|cvV4zDlZlQP_Zn4qE3rrg~CgNm56LAD-W-io)XX~4vU!H^ZfaLFsB~L@tk#@T9 zN?0!yVFI3Ys_#aYQx>jPh_(FUX)-3ShVL*4q#bkN4<8I=o`lq!;gwYVQ81fEEbLu=$tOSKu|5=k?+dzE@n1QZ*q|!Yck)B62Uwd>xTM%=sj^*>)F170;PE^OzC1aSmd2R z3idI_CZ3>e$XU2|?NUaoR2=;NaRwrA8^*#2w{V&0X{_rT`(>1vpF!S@E|&M+PtWkh z8Z0Lhdflwg{`>xKU%PtsV~7UP%A-)Y498`zN3ZD%-(6MNe}nB8MAg z(d038g@4;(Xa4#9UwN%Vy>eY^bwg zn7LOZtC{8`&7v&a@au?vJ196E+^{3|M1OIK_WFN6V7!mi%~YnJHBU`;*xjFb*G*Dn z+BC&y!`bA@$6F~SV7*hv2#LjLcck9_Bp z9W_f3X3qTECiw545#QYRLAU|F=U=}*=fi(K=eS2>XIm2c^rx0;Uyb9o4kNauK5QSE zm=25F22S&5zLIokIep8_|K+*-sq$j6n+cZF{aMR@EGKt%OV`;UGHDRcti9;h9&!AS zJz_dfQn*KRi4MoiyDnTo4mhR#)d%|YLMHuSo1Qa<4=x@(Ai;i#G4nbe_TR6YS@1t! z_eTIF;hj=?=YRctXO{z#_1?KNl8JsnTVI66Vp2D!I;c2dw}l07fZiS^I(E9w27pks# zeV^~b^?z(KU}>k#v(CUyyPsmo`9jP*G^YNE?`=7H4^-ZECcO2c-k_D+L}u+tIJ@|* zO);zTz{J2eQQlS?&Z*anLNgao!crcSh(dm(gUWw|-DB6BIVmwtR{}2TKStfC-4$~Cb3a5Hg5&ReDst=9b8%oVYM1#jXew^j7+yAWECY1LD7kh)` zsg}lqL9edD#)eGBN~-Ea-!Q|B`f2!NMt_Fx=(u%vjm!Zr4y?rHRwJ!JD^w(e7 z(SM`b==~&n^6rS&13T+Rd+mJ9Cd^C9&Q2K853jM8l%u&64h&~I$B!0$X4p;+i1H+K zrtJ~Xw-514a2~W8_)tL4QB(`PDtXs&vfh@jp>p)Q=+pWqPM)K07+HHgGhA)=dUYB- z3*9wnf9|{_r89P__mr;xzD$b(0Y~$e!7;t2g7R--i|I_x)H@|j<2kWTZ7J1{Tr-?$ z6J5;($uxQT=;NpCyIcwuR~!zV*ukDG86sgg$o;H{fJp#`Hv3;)ki1BA3F!0 znotnzEi zE64RBw0Q2Dv_wY4tu5wO4aaU2+T}H}@O*a(Xz73P@@ho?jn0v!OPQ)C>&M+kAHVEv z(6V@7(aUTvDV}a|!Gdb(dhl*;XlHEokJrWqx7g}&VgN7%=>h2)AqI5)OUQ`e}BZxRco}VdqZV4 zzL7JvDLL?Z4#?`EgS1-|O%Wf|{CK|ZCm1U7RYqt4AdztHV z^8RYLl#|{c(xUjG;B9Yi%i>hy7U$>r@y_cTF0D0dF*0q@c;Xzq@n)YfEuWDZNH@K5 zxb?`?3)#8bpjT*rE^j-l5?eVe{{ zi~UGegGX9&L~q%L0uQ5>VHXFZ7NgpN1}m%HzMir3O$B_Fhbw!_a-5mCo~~iAt~tst@YAH_bJoKOWv|{1mCB>^l(7qnAChYE3&zenNWP_& zV}q!CsLa|r$z-HkoR+MVW-(Y(@I@|YXS%D%`?AL4Yeg=I+%IRn^qzk8M&)JRz841N zB?ooxdgie*2ew;T2`{8bot#cm$n%%hYQ4I5^mpFLgS@kmyQ?va$Kjjci6|Qp+FG%_ zryHL(ciB(8*f~0vsmRvw@9mlNwN3ST>%AII^){s688pqd7+|jVs*ZT;@0^=&PN#K_ z*&T6Z-5qb|WLBa_o!4OJP;TCTVMj^s2L?;`seanoy)LY~y#_X9eB0q7IFar_eLF(` zm=|_=6}7+EMMC_g?!$Y^0c{;6F7mxTLph(>T;K0pKHxKEQ`|B1_vDrd?k<~R)yOq8 z-zt{^{w&|abK&%6?8 zm3`p>)+Jf`Oe!(B_bt?R_r>M>!ul2vZ0cf6A6?THm*_B(h7p?srHHN>iXV}Q^lN#3sA!C z305#`&eK2#>^mgxAXK8E9E?Y5DF*WaQUStpdT0CsikteuzCUxu?$rZVF4Jb7!s~>2q&`#jJ@=~f&4aCU%!(Mu)d8CvrxF_hq@MPi0rVvv2w8V?elFO=^={`|Wo27R zq-}GxNlL7i7|;!KbMhj9YWTl_5U3>HycwpD!nT%R&HKi`K6X#RIFK#~GWZ|#UTp_O z&<&H|@C3+=0^o;KA_hvT8Z*q~(x@*kWFxyahK)F3E=j747sUadzx6SJ5TZip(}D@P zVd>kqLs?B=0Ahhh`Q@;EDi@|{L8UE3vj(Ry1ZHW$jxC=OQ23evR!?fjV>Jd{zyJ2@ zk1!-jgZ-RLJ!VA#%3ykOvy?{6N&vdh(Rt67xnAh}Ju)I}XDpudvp;gb|0p6B_v>tz zl+=YN8=`Mj2ZtOWGU;cE=JRW$9PlQq01WmkBp0fv)UL9cbwO%?*5DI4pFMc5Y?OPx zhjp1K>oTYzG~p=m9@s-VWB{^C0Aq)r&sqKzNU{MJtBPG;=Xao!kkN=aV~{7P!zxmx zz>HoItlCoBhc2Qv-9GNi*^^+uvL?4TbaE(KiH2%SF-RdAv%cmzW6njfIlXzeZyFng zB5^^^dZKXuKWYDdM)nz!tZm9$_pkIX%n;yDoe#IN5Y$TyK&yqzq7R@NeGKHv1a0`} zCq4PJXi)%KozA1xI0rWEK2W*$Sh}rxgH24#m;mKzo75#NvfbnI2r%FATgqfJ!^G<- zW_P&fKOXh;xK7-AaXgK+kHs zKS~;}eGm5M z8ag^VO<+EN4O0u#_6;}hmDK>^e*!3E?%vx>EL+(<*Kj%aWPb&$LV zo~aUAoE1S%YTQ_2M|Rg=0$I=79HOhaZxn^&>64g2aC2khIM!u1j2h7SL*}Z$|MotD z1M};rN6UGv;(!c0VD!L6kOc8?Ebv~eqYQ9(<{5wpq#+8(w*#cE0+k&LsNx9E*J_xm zMnPt%R6B?AT=36&E#P7?sT{5`M+ccNp9v+pzUj;`bM^j@?C9HDuJ7iwXa8&CJ_i*X+$xAB&dX`m!|50ae>`@Z|G|fD$8&6Z zBOcnzKs|o8@$OaODMsBDhs|#sd4WRzA?WHN1=+9QJ#{{1Ugob)WHO%&`MX_Bo zOwq->pp9Y|I&Pa}M{E_G9kUJvygpBEcU=4shIg$iH5CbjeXkt$CWX3(Gn?^g(_`J~ ztAxT6dK1@e?3EkqfuD%~b4EUCKr~Fy6Ab-I1cso6=A&ng&`(*3p{-K%@B&xB$X#e< zs6*qRDn3c|IA~@b@R~YBI-K?G$t^9Z)abjI2%1& z0b(|B+?Tft3Lc+=^yJDKUkmgkEiFSN8#FSA^R6QS#$!a@y9Q*2+w{ix_nQXr!1Og^ z`unn9 zZ--ChVriiQsHArB!K8En31XL#PI)`gL*+Eddi{xK{>QIn&JVi;^>F~qT<~*@PNr&H zyQ#QJHBRMOP&%KeX!g&mv*cQ@{3Wp6ccZ8^$HE!hfM?CW;|+ft_NZD z;_Ylju*dTHKB}zoiPx87-L(#e#w4h*$o+J9mMRug<-Gw zJbDxh5~nutlsd5nLTtUdQ7<+aBkKy6ZT`XuUl4FM2-Od(s;gBoMHolnFwBXSA<3DY z(SnfxiMmuBTsD`e{+N9s8vjr~vq3v_rLOK2K=L%JwnLFm zo^0E?^)M`qt+l-f1ZnWBIf3JC6yoss*O4EH$JHxXc>K$3CM5L zjjFk@$_LP*d;)K5(`?@v3+&b7)-CR8XH^bxAxZRNP`pelSB4|_#h^>s&o!`6l3dXE zg`g**Aa>`4!5{8kRXlMUkdN_r472K!i(22|VnJiCfM~D+1_~a4%0mqwVor)A#m(z~ z0x9>xxU|U-Yf5Y6rq-Ml(-+qe5lWIhNAO2BL( zIdI*zc#cbYHDUosVmP056Ji^QL^QMh>`peb`jq0fahSzmrlG8R3W81?)C>DPv&A)* zp06k?8_agGXT5>`0S$cqT|Q_Yl0#Agf+&ruBh=IFBQJa-cVt5d>WvGb9O z0bPgb$eZ-fowI99B9epg?s#Ez+2R>LX8oIQFBtS)zD)i%ZP3y&+}G5gScgt|Jiqc} zXLdw)OtJ|M@9B1D+sG%X$0UzaNe!LOS$kH0&*Yx_Y9fDm*c&-Db_H!9xnwOE`j) zhKYhs%8uiK5Z9ob3}hD>*oqxik{Dll&xJ9pPWujl zhA{33^;59q+7V2}`05&xBf_TbxJ`G>Gt=FB_nyLcOe9TgkjkpUdcU}!1DmKDQ>fp; zGE?jGO9^BVWP61=k31@&zK3Ac2KJ^V5JbTJlJ~s}PXsR7^AJ>P=Gi^dZ{d`^!y;TI zzq{!Pc&CQsm#Go1gNF_u=H0xxuzwwd70GC~&hmqnBp%YSx~KB*FI1Ic{9TqD zhM_iWp4B@44!dNtNj|xHV4;DO{}Db@hgB{PR4iV`0&R>y&etu;W^0E@!#*<5q;_OI zQaG}{91L&)0pj*4@Q$#EIh)x~3oRLhi*Id~0`5)S;`;c)zp!#LREeT!ZfZSMEIEyW zqxN&QL2Uhs+?EFUa9}|xDv@ER zBDx@G9W|OjWax$prWV~ggkmZytYB(F5Im-j9B#2RNBCx5-lHhw?n+GlI4jC}S1(f% zo=KCKOw|RARSh0v6qFJ^#%m*y<6#*}+uRt~^d{Jg-4I|tH~9Ph6YPQW$kdCuC}{TR z#+cG7^yLY|jRIm3S)L<`&P1w!9hW>DtPo8KH3l5g9mcz&ZOZsFf3TiaP`FF7EZkMx zyLZz%9dPr&6D=~(`L*6^kLUd!ydv3|Ocxm`!BaHi^f{O(GWdeqO z^Mjw+a4*Y8tSIK;NdumnnII&%U z9>uRjqUHfgoAWR#3D|on2~MQRV6rE)$vC)=`Hq>iUeIDg_zb6b)st#48Y-GMDeFZt zL*)PoTewB?xw7S|P(qR7k%3cPg?@)5^r`qMUkj&Wplda*my7VU(aYS21d;H67;zc{ zhK*&H!EF{{W0g3@Rt}D@nAf$7bLG)_G&js1RzP6?8^pqu;y+-q1KTfE%;dvoXGnw} z5zIr{C_v@)4?cLBfL1^aBys*NvVsSlakg0YIi$63DoI)Ev4gq9*!>nFpkt!)%DW)` z3qmj*S#NblPQ+dOaG z8ZgPox3q=>wO)r6&FOMfbECPZ&xOy|;D?VE>5iI+TrH%G#FV!E;(JrRVhO-H6W zLbu`I;K(x2d{b6djdk&m&$8~c+=B^kf=0OMCCpqQrXs}^p9u>P7f_ ztjcWlnqu_3vh2`1@NAL#L1$D`EEq=?t7BsS2&#Oz%+aggDqcrzf&&+Y&zpWs4;rg) z4=q(Oe?|dCldunNPV%i-a38cRQtw0ONgSX9Vi_bs(8p~Fa#(}jj884}wA{Ts;+fOg2!Ny|3;9Y~gi zP@smy7jmFzbT+}wf+Yi%G!HOY)5ZBm>kUGm`m38E!izwQko=)HVHH>t9CanUOD3WKAY$MB2LGgj?1x^aiu+M(bzQi4CRgGaSb0)K! zk!};g6(lG!WOS#HaiJJH^>7= z+)c*)xJ^`41J<&htzr|vp!^-`o`KFM+ppOC5~dOJYD})V+i&M>=>hGsVxkQ8h60ZbiN1skOL+j70g$`eWwDD zf$-FLjQ#85*0Ew0{6s4*nN{Q?^iNL2Asf4PB-<-JWX8E zOUsCOg3}gu&dnEf08AB`9xtd%4cJ`Tb+H=uaD4B-tTJ-4S-l$K~YW z3Roo#J@gQq#8hGX7*ZiQ<>ZxI{{YJVuXw&XnMMs6iGcdLFEvoycIxr|h&@Bo`?h5U zeU);XOG?)DY!Y)QZl&eiqYD&fSj~b36YUVO?DB=(VHc1&)IiVlgAq4sg&yVuzH#_` zYwN`(8=5`qO$Y%=N@|4cBx-*yx}7qQIoV?gwWf5Q>0QUzy+uXiNK;N>>T2My=U+;1 z+uH%Yq_T40{o5GU`op5bd<*xItkOhy5G}MV`l0jAV8dv9;mWg0;kai=4UDGyYQ96buB%aT z)jmBV<2Avb=n9MFq|x15wn&q#8B-|Dl~W1{$c4jS_UHxq3QdUQ#ash(i*OQ!;nuHg zj{;>Fzo;79s&r>k53ls5f?O)nm2 zyg2;p2pKqyct*dBL!8u1!VEc)@?}Z@qA*^! z20YVPtnpElY~M~opzpwf$&YXajew8bI)ji$5YnQq+*$UrG-U`S5goJbzvajy=Yf@j zFIE8!U&d%0@!REH}y zALV(6W~Oz!94VuAH(Fk;v|8h03yrfdk^tq;Cs0nq1U9c?wyGMOg@-XUFos}nHegzy zU(N5a%|+U^zDR8JTKwMrJY}zaBM*&4Hp?0}=VdoL(D{*MNJBWAdZr18?Ux;0rLC6d zac@U`j}CE&`!M%7t}paO=@1hl9B>mm>XR#9+VDozCL1D8Aj^qYwE@7!<96H5 zpbK{~BKJf(BJMp_ekP6vOe)dj+CHxV9m~dYKamRDX?B?~Q}(t92QWtTZNw@E>glpC zhx-~=Cm|JzAS3|5t)!jN)v<{G>1GXeipl4z9&>2h)T$EM)j8gd{uUH3UF=goZm_xh zI799bcN=KfK_4s-OF1@b%huNNK z;R2Wjj*+Xeh?qWX)qDU53IMAy5bbs*GK>{P#DQx>UvG@ts1U| z$LNDOgaxl9yC=I`cWPYSYnb<=Rg6x44iuC6k9yG36ph-NrIbhWr@lVY0|GXOCtwc; zVAob&bs`qWbOd5#s;tXM7W4M=&^a{ZjiVd7cbH1*XUrNsAuW9c@SmPsmdDMT??}0} zT;himD)B{jZ^OA@bw-A38S;c;UJngPE5NS0=?(3U`o{d0_Yo41%bq-jf3r?{rn{?XM|XE@^rs%X{&vqaGESk8s_XqKJ6j`TTL z@kHFlk4kmHK821W-~GdaPoH~A#I6m# zz8-5YVAYVTtM%9-ockGaIcvjqYXSdf2i8t3J)bN0&e7NliG( zdFT$Ss1l}7jK&A=?aPuuJJBs{>NcKJfJr7tGO1%eiYt!sYo;k8r6|?6eWI9#Z=<~G zIMszwgB};^Mrk78kvpqm0r*}4D)D?3B>#O%7zxG z?8%iE&L=FKo>0V-4&I_sg*1yHs8)80kW?L|cEYh$HfX8(Dfd}3)!>eiUs9lXjw~|# z?Fx^c26z#6fX|LksR^q#dMxYV7Xmg2v^r}N zT#LX7=^M36BtY`HWVU%?t&zJvs!X_8IFI@3t*s%re-7?lMj%Sv2KBTbWmk~NTxxMP z+~#@bPAyX9{!6EDM}R=7zP)~zmOh9)P0XjnY3x=iVL3urcvMo6M0JhWFl@5jbq{Hh zJf=@5n_C}q#ySF*%Bm~v_r51t@)IZ!|NDY6C9Jw}#P<5fbCs?RJ{*Y4WsOq+pv^Ypq-zSsm;VjRft<#mw1m1I<$`9ug z5vl9HP>iq!T60@l8z4^IZfDJO z6g=wPwuy+>lUanCLp8(8!&6GPW$M}kl1c{0=v%x3$u-DTAr!9#Yz3T>@2Ye|;XyP~ z(0QDyRKsyX&O@(F@?7x#YkMqtt`Q!xG3&iz9kNkuCR9A(zA3$JWI*JZUz$8k_=mgUx++&eV9QQ-BD9=T(Szc_OZir#qtrBljwmuEG6rxx?OM z^ygZfgrljziB)G1C5Ms5*fV-m&T-1Rn^!%#h1Sq`oW`tD!n_0VQ2DWb+nhIXmDw1# zE?48%WUfwzi|ty1HtB}svpvUMXHyog`cHMGvI3#^rG~CdT8ahx$*RNsV^)0&5Q8DC z+<73He$P}6Fzql1ERRC_-AN>i0-3s8M0zRteC1P*(x1nq`;7JYeEm2pK zH9}kBHY>-1b=yz7g#Z)O%S3@f@+)wcNnn;>qH2T=f4|g%e7Lxh+7=^aXRowAQeWVl zI}JGSmW!?e$$~Rug!D3rU@AljfjA_X;R4H;TdNC7J@6HAsW>$^^E`VMI(~hEPY_Mh zAG53;1IRYgWd3TRiW(MRQIC2sDqSF(>sKA&m6lG)1=X*tflf5J8Z|t0LY5iIfC4Pj7wdVfb2MD+Ho}?#nZs z?GKW8jNjt61;2*MlyFeBdG;5AT_?wKzIY!kJ;&u6QA*!~;X}S6Do2@yorlh0Yel8N zJ^MV`zw|S_5MX^ZPDR4@BNjd#B=rLz7=p_sd;y!C6rl-p?Ar-avMwVOxE$`Y;550dBf29>bn4GNa3Gg_i2F0op zUJh!K5j*+CL+Q{9X>!J?3vris_j{b?2{^~$D_{?&Y`;i_oW&}VcHoX8qoQ~?IS(RC z5e71z9Sywc32j|@c-I}iV|$NA=0YiR<6M<0^a65DO(yjzk+D612O^hpp9>bZR{|Ki z(+5HW;yJexYslOmMtwe6Q%J|H^ zG7{;oMCN@!cGs@W#qBDOeN=@Y*lxL}l;@?}LW8$BB6T1N%Jg-M7u8v9>v}#JAwAG| z!9OHKhkN9N5-BHi920>YKOq+ydI^$ju&NFOaR))O>Cif-_TfqLS%D_<+vb8b^pg>4 zMtzW2W9HNOR#mC+_I|>;6W9>@)(AAN`(S>=H_icpH5#0!eLA!RJS7tNKE8%!CdLw^ zL2^XcUIfFaN3n86-C4d|eF%nrTtQ((i*k#u8Egoef=_^^{kXic)T(6FJ<8JO$XH+dLemEyQf_1^EMGyYwmHVmB335bb z?Z%B8uN}G4?{~`lJ?g!)EMKNj*xs(o>x`P}Ds8Ax@ zl*d&`1y1Tga!DT*$UcDibcUyAFWIHGD^P=T%@|y{qJ>(I@QfK6q4S%d;<^Li9#QA1 zk%TZQ>+@n1hJ`!3B;HA)8woDA2Iza$e1W|-JaMRK_6eR9wH(>rj zgo@R~-)qh;yQV0)ES!zX6u*8ufYHZjjhwqV;1dvx6A*9=;R~-X1>vyq9Yc)uu3Lt# zrrIvc+en;AhO{E$a4DD6_6(ouk(BYZ4Y7jW&;XX9)xu7IB-9 zC+dsKBIjkVZWw+ft2ZKnD#$=8B5!7~FfYJVv7NWS_JF_>?l5;@j=uE)&T{+XmqkPX&`{wVC zF9b*jpvjTwA#w*|~EPd~*9oB`}zjk)#}w;C+z1@tjBfPI66SjhJK5U7s2a z)LOactpHP(CQ~=?3weF89RgO4fGGLp_i(15!wL-f8x?b)_GqT<_TWuYM0L;iz}_<@ zV1K2YQSzIEzFcQ`AIc2|91q%ahG$T?EAGiIQ`IjncO;fl6t^54Kk!d0*00u#f7ETr ztdj{JsjLBY7)fKwdVyeuVVd0w6_?6=qvyc^5!kYFLgD!F(S7ff$Whr`<0TVQ(k}}U zYb>-j8sOx)r%v-w;zwU8>v^bJYs(sp9yxk+Fjv9;`^CFqde&mW zXmx*-RC-r+3qa~qHW01($K<14_7mZtI)fXJTJH~m0D)ggS)82V)Vm;1*u;RR0U=9w z!a5Tq-YVKsu6Xus--u@7m_GcO{TkNx(&Vb5!8Mf~NcM;TJVHJ<2B2x_<8p^+&-~e~ z7^rd${sAF2ZTXkp@1JIswSxFYMA{bnGb+&h`yS=-sz?BSp{^#QBjpDi12~_8afy0aiSaY5bX@^lC`C9F zqW4d#^~FQJbLN^m%eCB@%80jj3*s@i9Ta8{9l~-tEO>0H+x+zy8p{ zgRuYv*yzX_id2vvQ&$x~h4NMTQS^RlT5!iq6KG&>RAZa za_C99HIM*6R(KdTi#2IR)umf%+$K=I*?f7uQaQVJiLg@PaNO5tB;jhA;4hcz|E`$t zV1q#MQIQNP2CbWPpqEGy2}_%lKcY#R>=_fLf&6X1wE&|u$X$Gp@vm)l-xqIEOW;{j zT{ZcsDk;Vci89GMIpqNxJHPD|)g@nF@01mIsj`&CWrm0unL*PgwJw?M-1TpjrBQe^ zEh$@eN`OY?^}!3< z+-le$Z0rNlI2sq!S0dz)S-x+LPyJU4W!Aa>;8>tjcVVoexIO*dj;8Fie1ssFn4vlj z<8gUf7`C5>{;?#m=QANRova-(Am`HlHjvRSr5`qIKfhxYfepANyiYJaz}7 ztGBJJvXUk_Ik24CfRj6p7R)Zq3M3_)@n=7G*$HTEkMr5yQ}MHJQj?& zt-xmdWEX~4VS^*pBEU@SPoyM+@xjm|e+8k>8~^m@M~f4jU2{0?Z?{zQ?I-D2Y!7Jv zk%{bTC`r3%BTVH`)PBAc>?JY&TE&*oNR7=rqSY<+g=7}wk96nlCx?v)pY0nE(0ysT zyv9f*?$}SP7@8Ue7`X+5^i1=B^4lLh?}`2c z>(hmn;AA6$lns5ly4)er!Z+{45c5yZ2iSi8Iu38z#4HjF=5G@llsQp&H#=hCaNCOR z0Y_*WMjw_E%5bK4dJr@eshZqV0Dpc3-yo>!bQxUt5Wrm%ps7(d1j>MtQoIgaV(b}F zLBYyojq}aUquO9{C)iBy!4^phRFFG?n@(}X*^^PLD6U_t=u~O6oi?yZByzBpUgSYV zJKuAuj0brTIl2fZ^CZ^zJB|c%k4w%z1v+-3w6@kp zMW{ap@ktygjCG}WYJ96b#8H@;9pRW`p9(Qv89jO{K)9$2@u&oewC3|Oh4&Kj-bCy& zmRE^`phJ{7_7kVGk(m(oQRWfA8K^oKa(w%M&=J=gTe5Ma^7`YH#xNL0{b5IQElQ2ZA~jt?&Ws#_k3$1#0U z%V=)>utR-KxC^WFJ{Jol#*qMx14=_3`$R(*9N)^t)s2i|m+vT@Rw;C{AOcW+(>e8pgebz4Gdk4Py=cBV91>#ik(Tgc{*zKcQq2c8nu*sJHdhB{Gr+7k_pMC zVTk>!&ON*-=&vR!AmwKbW{@{wR|ds8eR_UbXQ~buL~bk}Jetv%EnpjpHOpu;JSdDQ zTgvqSMugyv9-4H4i0nbj`gnWmZOd%vCDM^nJ_U-@yjGva{-GWJauC=tHTQz3-i*Hf zx7=2>PG>vbA1Ucz&YhxGMWU^T+7y?m(bOaZ+}vnZez`$4>Q8y<9*anT&_w+bZi=di zAdy4RipF4%`3*y2N_23@F(1u8oWpBAauXt2<$5qciMUpY>dxc#?Hb%iDvu8>rJ1h% z;~<_*tsjC%5#br!9~~l;0%>d^068LPGD+~dL*PjVo&G!UbQW?^P_ZQu4C>ntkObto zOr#~^vl|#hEn}BR0GR3+{9X@EuY}vCjS#?(;3e)UL<|Mx9T6arBwsaFsW`m?R6Lw; z-ov0`7|2alDTpoJpG&0S<41(2$KLEE!h&fv3?yMYTeqS$g{UV;<<%_-fqRHUH+Rtv z=`OPAU=Aw6u@po<2KHz|#iNXCt0afOBDgoxFs0M0;hJ2i@(Ds7jN|C1!HG~^10)>| z@)M%W0#isUX4XvrHw8-^6JQLQ(B~jqe}XY0VZ|sr&1|}=4idgB+zC@g2u4k`s$Mpj z?G*Pu$m3}m#yZk zx*s$|;FBV6usIwdS9Mnj1%Dzj{`GIjo6CeQe1O@QuIUylKmltuY>1x*b}%?RhFlK- zMz{A~fUiXWBpf89s%vZ;G6EGsa9)c5=SxT>ph><*xH)ZP_Jh>eqQE?N zMD5;U??UsOE-nHZ=6t&HmrAt4M&zd;ZH{40RKZLjv@W$lViALpjzv<9D5)Krrm{Oj z;&BCFj7_DgAL6l9+bbg|#gkIdT9G^7c>dlFk1f*~xNZ<4RQDspkR6vX7d(cM{#SON9xPjL&81?qj5&q5;@rFHV`=# z5M+$?WUK6l#s&w;ynUt1hnwh|2-|;^Q1a_bm6en(0=pve zfa$#m<@;eEV^Fse=xEjI)nrKZhHjYQ$KgY}Uwcoq+grDm_?}!c^`Y06=o)0)m+auo zz)Zgr&>-_=+(D55^r`sWjWxm7c~*b{+pk=u3O{@{*Pm9v=Vbl0Z6D#hivtp*gOVrL zB$41IYNWpM`2^AaQJK$=RNgIjkXxKqZyJEcC*b80GNA#Xp~*x7jO0Wa0STbnE{Jke zF^YwR-!2D}^cF=+h|#%)g;k-n1X@JOAi|lD6qJzdNCdG3hC#&P(Ke|_!i1Tqo_cKi zz5eR8%>`=~2O^fG6hldekrbOCI2j{<1Y;fzKnC>BQKIlMJ%w{g&NOneLtVOdZiM6g zh+?TL2-5V(m?Ee|PA#4gPhfeb$rw?8BHcb&F&idIRzzXP zM$Z7M0Y6N4EJ5(960H#Q&`AJxQD|aQp9htY^quezAD(XJkZfT(h-fS9IAXf*`|H2` zLjShF7D^_F7s+G8^TfSBkwZ!3?$ac(W+o&nSb%~LB8Z?-&>f@xI7DROOG!oCDZD5Z zkOT4P5k}Wh_1pVMZ;C@`WZDHf#g1@kBc0L0CV!8)P+I!L42b+L(rd{C#4 zG(u5>&H6){{?OGC zj!1qG9DR?@=Adj^_-C`^@G{ANw;}A#%^TY}JyTmnu*6DB#jZbIs9+ z65>3fK?Yvpu${nNKNXrkan~!NVrtK%PRbLhRkN=xQ+jfpCtq$4({*Kqc`JEOKE0aX z+}iY}OXGQ8aL8iotvqZF4yB9(`{$wgl6%$twZ>m%-8iJiCns;#_w>9qx2+Zlbl%4f z3(uBEV-;JKZEY_VgfiBlEw7Vk22I$JjD^9FDqkQK0J0|21+$es6PrJ@jTX;i)(Rs- z=#g14X;J}Z1xUIUK*xNd_`WLoZO=3ME>WwY=)MS?cU<^<4t4^$V+dir zpM0iwQ1#mSDm2@~kUz!x@nnwQQ&GG8l`JRd5LWZrwL}H39nOM~EQ8$^uc-G1yM{C= zF+fPG68EHAdIdp56j_ByGbA~qW)MX(9ah$(Ccz&KpoS*$x}S1SNsZZ28JdieZX)mQ zuS=l?#I{Q@4T7M2xZWYg`0LW=Y2ud?=Ag;CjGsi07W&}>2fo7UrwYX5n)T~7mad#l zF`jqh%eXxI@8-Jhkmu?-anFk`!Q&z5_N8h25oz^pE%VGo0@1Ej6SjX2B5gdf2{8n! zT1*;W$}XV=2Fthk5N>xO9&^SKC3jiu=gYiM;E-Avsa_28OkTKxpssik!eJ4z)VpVt0xC#r{uk3TtCDH)YSJjAD$~WbJXAQxLimAM>?MYOT=RC3oh!Dkvzr7r4A8YjUS&z1Wt|$ zG8!c~6t<_J)~!R~WK#SrDF(IC8G{K)bw)Z)u+r8aE~sI0dNUeI<)z#&0YPy1a{ur} zsPE~x)m5ymQ*fIQwbkd(dMF1}5Z*}li$LBtX;J`XCpikgK+7k~t$KshF5D*s_87ul z)(s?A^~{;@l6KIxP?T>c%C>vAtFchnvBu#1Dl(APl98|NY9~mSBavehxf?-O-~^Zm zySg&{37{5uC+#`&mwX!(Obiw%_@RfUp;JVWJ&cX4IoU*J$083)%I;vnn&(M30BIsM z_eC`0(BVt{jw}xPAIkKJiWaqu3EG{6{eG&~;;J;y?G=#^5!WC+KV-}%^nl593}cce zusKOXi%hsEVRut};T3dYr{yoNps;`stbLM>C(G{|?ydQfa@+gPQI>bY$KE9GPI?3P zEGf!4ZPBClo@#s8_gvQ%QF<0CzVh8eU|HM;i@lwXIlQ@66rR-E^Ks<3#9uTYkVM8v z8Oc~PY@IdWI*}@qXbfN^E~D@@VU=)U_&`j9M?!Wv0tQh^VH;`lo=q~l;w(AW@n^v{ zKE4`)eInkHZda60C{I*K%N{Nc#D4BATeiGAQ^$h@_Z@_jfCYDKlyuP|&8blFVc?M2 zk)W8Pq^S-OgiPQO!%zo!e3EWNqCK8bxGueTb8|1mWnkGb%;^RYuD`(tlTVDzfh*V; z;B%PFHB9*O5JFKW(*T!i($G?C$A_+uX|F7h)W|fL)~q2n6gdxMoEB0)6~`s`(BzFF z1i~9XCPxN!Yj<_D7Y^=;-iGBq04Aq#^^sm>;ENzS67W^x5o1Yjs;@aO`Uy!j0MZ)G zRGab1Py*xDO_WwOxw@Y;TSBf2xRg8~blsx3u`Mw0K0l8V&Y9Yl&eMG1gXqS6qx%mw z3kkTboRJ-e#mxhs>pkBloqmJXX1o)kk`)vU^$MlQh<=rEPuEueJBmG9Q)h2jL!k)2 zymyf^)}#TmkAU(hMv`VNbim6r5EYi>)L7-^MX-+uyIaUz!G$ua$|lx`n4E^Sec`gO+h;SILEdB$l^yOG$>DH0ccvpFQQ?8hDaGy#@6kWp_Htr$Htkl31Trh_Igm8%fEDxk z0&&VJeOXenA%zhN1xq=co)KX@@7JL}Dn^gQ($Nr~L$;gOuON7pZ&j0a!j_+^r3yC?+U1*l3W7RH3EsF=XfGQQ*1TBa3>+pV_Fy zKY+LWq`vIUaNxiE&T95yRT?+HF*r*)rNdv?%~+mYw3;@C%fm8sL6f)W!w4xHk^GQm zZdf_gLLzbgAr+0V+K#>H)0Yyu6N^hq${@Nyqc=&HfSEtRW-Kc(EDwl;wP(POUEO)e z07=R-t6>sC)f#yBx|!i|VHL(=&vc&q^ji!syy$$Due^6rAu(c;zAlsWtF8Wo(=vE` z#V|o%6A3hv|8xT?>p`Z~tN)hG=?E|(A~kXmMzxc$4#ZM96on8J4q50<*Y*u}Ps3~` zw~(!H<0IQr@l>{RWl82QgEpJv`PiW6J_%2IuQ9!vn6{JK?BP*~rL`oO2Ydj#(I2FS zk*L?lzd}leJb9xwhNP>qZp3T&>hl!vgz*!$Y@t)+jMLsuR%=D#yf*zn-O|YwK-_47 zzQz$)^<%1`7p=Xhs7M%tQ;BK@MWau5?O$Ilda+Zkt-YZl_Ptni#yG`Ij57?b{KTXp zZC(G27oNc5K)EJ+?XAli)fM44k@bB&)?00G}b_o zwm^*7o}IK>N9}Ao^$YsViT>jV75sUG@X{4SraD_^;(_9Vxk6nYa>O@bZJYD7XUw}| zZQL*9XQB1~*rk_~FY5odElOT|B`EPrw0rP_%fBUYwH)7klUS$SWmJAYsBIF{rkQwo zVP&tQM8R*rJIC3&zswew{Q49t4i+1dw*}YniYTkO$U7)|f)oj`FUURzUNPhj__2u>`UNGEj6G~26Pa&1Y{kV# zEt&P)=3Mtj2sbjDI?m+37%{Whm1LYgP0(l6+a`w!>n z50;61a(OE^WhUNC?bBU8QIn_rXyQ^<#+U>9po_TNIKTawr@lV-627=_pS5EQ*lqlg zs(X7Pzh??Nhvn3m=gb;VVw(QkS_M_L$+za+)m^=lm0Dq`_MFFT%4Spckjd-^tdW2D zEj^G~@6tkY@LyQRar8CeIX1SZPzUEgF= zO?i5C^%-UPQ{KytdL8asT)`MNaZ1_4mNaKVyG>-sLM+6bvDf7et%iC*iF3@PkFob> zPfM?z9bNZ(vuoF+^}RRL-P7H>R%3 zIQ547uU#Mhux#z_-q93>el3-5-`z|tw(l-z(LTN+nRdA~ZP01so{3AR3LES-=g}_L z(xW=*&p(R~e{eOTsxfkVE|t;4On3WfFZlk>wSLj*o|bAGMV8DPlS>VKCg@SM1GciP z-)pF~&TlP)eMc;?XyQ}+|^!2QOF5u-q z$1`$ypVse#9F2oqDtsbLsPMVyrxwu_OMpo4^|6ojHG}@zvdGQa|K+T+ zzF3i2TC0}_>E84mp>Jyl(wexmzD4IFwe-csI}Pbdp4n4XS^I|iE$O=E6UB;)Uft_% zO$jVrr5v`;r?&$s3xzj3NrSVu9D#!^)!iO_iyWQc&IO)l=7i}ncze|-@vP0 zaqHd|PIHYO<{Pyx{zs?u*KdN4+&gN6z44KgJ74x5p0aKbT-MLn%W6KMoy73(Wgcxa z%j$6|SSGGb?P%*|h&{yyyp?O5G{}-jJZ@r1sq#m9)!2i}-wVmNky%c8OWW zG}51^yL!gL3B_JRMZnU@|> z(90ckT#@<00Q*(xY$PGNPjg?h%S6+x*kEgO~Gv9>ZzJh`g z-MZ6Fu3bi20jE#qP_?i$(vQk)n?^l{?Twu`J{%u zfc!K~iIP_l2!`Np?sNGi}?G%8Sit z(J@la6cKoZtZ>_mSLwF5y9C@;?!2%T>C>w{^&5VC?|%lm?9v~Gu8YD7PvO>DH6FC_ zh1+h}?TfAZSI4KF(w?2(p5qp%)Bksdj|j3DHNV#RKMlx#hpqp0G@h{Q@v*SF0_W#&&j%N_wp5w;S*Sv?^o0qR76Nnx2`C%-*1rxRb&Gg(Nx+>~d7_y=kmjwA2 zaw|>L6(%WxITEp7aYNm+vXvDYMEfQYHd9n%gK$h%byThSjxQq21VDohx{D09;kK)fFa8CgbW8S7`=-Q?pbex$ zXA^ybw{0sr<)pwmCAii05y^|ZS~DSZMTd0z`DCW^y8M?f$s~qSW0DM!qy-T9W6hAH z?dY@8&@-|%A^)uWMniX=3F+-i;hR>d(D%VuH{Cw+K3r!^U=%VxC3+guS%9~6Fw^DSvZy1Zl_6mrC^zK|8!_7S9oVG?;;j?5X`bF^ z%lns0UAZD9|9o3s{#~V=?=pCT(ap%{C04D%#KGG{;6_7H1?sDtVh#|_V1Y`3d22&v z!M9g$E2*+zYv|q`ST{+|$6V|1;fUwYcaRz$AafbIK0SEW{oZ)OFqC~q(B@g>ds?$K zEw~62x>D4d0FRb|4FC0P&d^<}w8jN6_z`=1Y)~BSinBE~7Xd)%w#P#ZbtbfOqIm+E zl}Av2jH}Cqr}EAEY44&q@afOrXZ~u1ZY8Ns$GIwJk5u;m#t-AMKd!{A`Mf_$@NW-1 zzK7>VYu5Xczx>*l8RIqC{VOHUYSI7QOVd`-l!Pf@`nHQ&OF{3d5I$RT9|Cd-bcNr3 zp7a{10X0=lq0a`Y)(m^uZRat10&`Gyr$dfTeWXcpLOBYZx$~67)qkQ-zJ{|RJW((8 z;v0!j^u0OrH_dK9J_H>D=sO6LGRD0Zzn_Pv(4dd3_@Z0k66qp*llcELWB(<kp$f3$#Oxu#-KTqA0%iti@s`}n<{Tp{};#t z!S_@$$qfrIUb~GuFlK=BCJ>?nRNK#mL3IEsA>YPHiUvu7Jf=e5Pds2S6;7CP4JEUX!!S|%EYz02pM~|_QanBNn zGa|oO7Zwym_Bd~2-2#saT0w=k$M{y|lTZn52c82M0n;PMc=_sNT|BS`1Eu|4B?QqgJRW36P<7Z!h#vL2sI7f_oQ-h2Z|>bQc) zm4!sb0O~iqrkz2&A2f~l@-&QTYcN{<{%RB=uc6;>91gpvSuc3jybds>%5$Ju+{?kg&r()Wdmn`o_wkV7V z(hGiGJ-a%$ez=FNvmc`XK{|`ZM2B7ALZs-oYj_Oaf?g#d84xp)Zck%-ODx9j{B zOz8B+(yO(h%{F7(=52ZC3Tg`&*x!s)gnEeY-9T(a#H;mf7AC$|VlgSAWkrAgZ{F4& zv&L^6;s5L+iTo#+0GQBOKtv=KL~Nkr&o_5~oil6Ou82!do~$p5GC|jUi|BOCDFEU( zCgCis;roIAoAc`xXv~mbIq`45-l)rpHw4283A!ID_Bnk%Dc9%<&W49b7IhIwqxd%4 zzWETj-13k5_7IK@ze`|-6qJacgkOm?f!IbwZJ>!Zn3EMXuqkTS`j`4q(Bgz8zkdCC zYA^q*ksEMzz+MIEe=q4pL8+;LisRnp965l))Jq^;5IDQ!utr!G`mW=KyKv?GI$M$? zx4+L^`ju!C_*V2C=n+5yEKjvDCbb*bCXvZ)VPJycaFAIanA1KtZrp=)0+Z9HuzJ-h z>L0*uVja{o1y(8Y>^VkbqoA9R(=faVL9t?@?ao|L2SIaIJgX?UZMsBRtL8uzFh}6BoX}Q&Yf%ivGsUFNQuM2d)FhC z$?peD`mYez;PRq?4SGZ<+6KBh3u0#CS{lSrp$-QD{n)ScGq6NqxL7LoO@D^rkgs^o zcX6%!v{|I(X>>6j;NqUA9tABPdj97xUW~@#QNzp`jX(4wm_=u%O`Aqhk&t7gdFI&Q zM6E^A#3<_Hn>6SDEyStC{gX|n=|T6UN^R>(~pvg67{IiAuZv$6M2KqIVj_x z`pNW|qFuHJJw$Ra!!l&9a{&j{3^{`cSGoK3Ch~ci&cdysjr?!o0y{NC;f+Z!z2RlL zt9`qO-UoH%WD;WxvvkqwA=nw@wooK({_k7SSRoMfF5E*Vcq(g3Th&A$ViTDLtJ;~W zkIpIed=1=sE6^&5efE!N^*wtoM8;(9dQ4VqXt`XV;r8Sj)@8u~orWA7?z7NNb_Q)@ z1`F(ho@xzDyh|$@NT>}N?O91aZmsF$mIIrWXi7oBA{6nv>g7d^LsH^mN4#eb)EK)! z61f>v1(1yPP!kGWet-t3g?)~9 zk@M&h%u5~;RhCWt|DeW@o^@6zcCK#rg%6g}l)XZ~u6KUkX}s`WBFo~VIrjKk(@_yC<-LiEp*>=F|o2hA{ z`E5%xx)kK21}j8MUhQ^oI$^}KA&zw8&|?hu zFPM;X3=x7Xlu4Q%8H01wIGXxLg)?P=U~j|#OO5?r%vaQXU_0pXYtHfw!sDd3TuZ|Y z+(c(ojA)fDjTGD7>&rBJ2QC-a!3~T_yl)BYUoVBOOB9+yj9=q|rgH%=w1|86o^pFB zpM;_i+E@@S56~#8l7dxe|5m^|&jsz*cj1jNtO-RrrcNJ{+(#5g2O3&9DeRIsodw(k zQ*@A-kX^#=ZH_4i7q9l=!(X>-eN#d7Qx+!7{#mjCvx85e`FJ52tOWKH=I=^j*>wGn ze!r-E3e300bs0YhO@;_U-IB**6zBn9J0A(EcXo{lZr^Ci+sW+(h0e~{2W8+#kZ&=2 zimNvq5tfpL${?Ie9+FxWFT{Pu?uD5oOrn2~gRmv72$QaLVR2H4Vdz_6YeT`A_H$@* z4h-NZNe+9&jQrxBYFV}MuEYBy=!o5ay|kcl^5jK-`}+4gjz#8}H{Fovkwk-kiR_|( zb)B*WHyr;fvRH9SQ6YstGXCs%Byg6RtoAU!(w!L}ZT@Vbozvr}jW8U1-71%KOc{rI z+<^!tFldb1ge6zc_Hcla?>n3nH}zCohx*(pigfG4;N5NU7ehUajV8NLnMC9cJO)e3 z2e<+`$yeZvboAS-11A8Ee$@K(3ykROR?;43h5M;&|1Mpa;MjQ_0mZ<5L>RJC zTb;pGXpUl0b*-NJMx*jo#C{?gyhc$7f;UJ)We_elFyRaW_4Y^CZgTTIoK}Qvp&2$g zJ>)tFr+s@4D}@n}eqi@$CBKra2X)A3L2+3T^1k@ht0$`FY6lKT%GiVgm}s>}j*(6{;6MmKREW@Y!KUjwxY3^Ps12KG$yt%T78 z#w42tAbu_t(E*cl8o zY|lN2e~ zkc=5$Nk$aN)VGMMk_SKZZlx6I(HNKVNU8#H0VaNb*!m^x_kS#b&+{Jp)Ow(ngEAWw zbRr}XSso95I9q*6+WLjtR7H5M5jTnad)DXCNwX(d5Q z>s^+^(xAvyVF_7JyNdu zbyvwCT_dL1{k6Df!CYE()}VhXBNpVGaA>p&sVJ(9#~UEF zS~kNnMbuX`}^~0 zNFckS_aAd;Mf?7kGm7o7+-i{J{3WunicxqncQ!X&on{FTCBxqHfVgr~6&Prwrvu4W z2i$)$yF`{1(7p(ocH)-x;NV6f@QRF`)F8K5Ko#sFkmo*jS@X@_@gF^emFVD$Ph>Bm z$z>6%(mMQ{=2}h^0lO5p)uu2}rI7Cv+0ho| ztX;l*C;7CIqGsn1sYT$_t%0nGmGsi!AgUaA+_K407qmKZqr=k7R6S~#2@dUrW^iwG zVJ2}}9YAZq?YKkldI13vr(A2}&clcO39SNLpboE!jWK?}e2CgfYL2CGOJU(j-g&)9 z2mLZwOwbZHiOCaNo`fneh(mQm^GuL2l48WYx7CZj{EY|5m#0A5A`bfJjV}(SF7Rmf1UQ4Nw6)Bjl59a(2a%j|pSl68h@k&4 zY)^+5VW%v#w^iYfB>SUG>D_@eYnE1Rt6D@tIz**_xv@lEN4RQ+MGcKqnbA!cAWljv zfZj8;;qa@J5Wdh7#B}>6%m-fi!RhA^3GetErmZm;&-Qp>dEv8V_wd&r>2j zSi5V`)b}(w&y#Sd$3G$oQ`WO^KrdJ*LdS7Lv2@G3Oa+3pv)yUq=qJ-YU1(uvnMWS=}iTgU8d=nH&OAB`;y1?yfaRj2Utd$$% z;w6A`sC)2=j5610;VJ~6Nrn+njNntpKVDy&WWglI=&%VLH^>%D>zCbR1c`bn(EGeT zLK6*8!KUyhH%Qcm+)TKUk#tB&+|WlN>kN^Ku=8N}2lNVZ=K7?m+6>(0Wk@_CUVeA_W^P7*b_9DJ422dQdVu*MUhaTZ;3*)i(e#0V~ zQ3BAi8970c>dT(nmPVDTP`|+ADy2~YbV}$7RzX|`Z9v9nS-8IJfltXh99DCQa@{r( zosq~_YY4R4ubm>xeeZhFXNC;QI6~Kja&l~6GL`yfClt&7g}l9$N18Rv;bC;_4sKW{ zH>v4McFBsR4gYJbN=tB*$Fk=h!Vo@F5FS5JWiGXS?EiJe97oymvzBm*_XC|#TXE|T3T_k}6(wQf!XUP5+Aj%9@ z(%R9ec(m@udsFX$Qqd+;nzeDH&DtZHEKJ|(pe=-QN&84p@VDA5J>scljrQZnS%C9M zN*%O$%;C-o7i5y<5h#HT5ZmYfry9yQ(yZR8MmTKSa_1QXx!;d1yBG9%>xiC)XxKy+ zZfvF$gq*c9a<2G8<70w+oKzjCS8sQZdpe3ZKD9V>^P`mv!^QhV)T&0pZNkI~A#TcN zUvUO9_?9-Cc^&jR`3~q@o%BZ-PlS;m6ZuJ)xjJHVMOXDJ-nSf-U|NAG5<1>=yk9td zHFRqNvPPPvN~N0UPmn+rxtwAtgz4;$HcWQ`K%%%pKaJS@h~)<2aXthykTYEoel`$o zLiZy-ZfOpU&A|JnuO*a>ZoIOzL|kIRxU18jPm@51(gMrAS`5q4@rZ*W7o;$1_Z`Fc7Izo=m zNcMK>p&if|k2a#TD!foaCVx#MTm)2RlT&tVOKpa^-fNUR8yON$>QBoe;~k&iizo_b zg70$Q2e422b=&S?ivYT~^P7dU7N?#qp>z2l5`InK<-;we=$TQThmLToCE573!-m*4 z`u-$c0?r)2e>$ggE3)|Z8m&e-PzQg^2smelea2TvtHAXf+AS?<+sSi z5(82;BpN^{N=Dx$Zqwma43S?s+DQ7%jzoe4vQWWol!9JB5th=0MMZYW5E(P%t{6lQDznoKy@{WkQ?OL2IITS&A;Qci@8W=pE> zP~NV+<(rj;(PHyOMA|Fm_;y_54C($GeNpbvI1u#Lf^*ACfH-Etb z-F5QNSmBAbj$OTW?Xl*Li;Q1IE4M#lL?-721qA3eR_r9*HVjnXwr)&fDLV?Tu?kJj zGvWtG0!8G;oqDh=AxW1)1*Hv}yfhP7gei&}kfmp<*B-ueVr7-Cu73|_ z|C>5`QHWao+iHFtG1+p7Q^xi@i)KEU6%jqspmN20()9gf!$}W)jG!275FLv^Kj2Iz5_dL$#V{yP z6yJU>pNeKmH911Z{7_&VI6v45(`-c3qYIa0!CWh6P!=h&L}FVM8H|MZB}Kh%fH$C+ z){Z~Jb5E#Hd0Xe(^7wF8n2Spc-Jk9l+QS9T8ctB6;`gT$3Ch%9WJL*b?eYNpW4XWx zNJ1ar;Vxt_6b{ZsAFO*myfFFrp}kjWk!b>SBf2t0n8Dd-jH3wL_0AOZq_ zl8v;}uA?p_P^d$Z{t~wBV4(+A!$i3iCWyPe-yQJkK<;vvMdSdyB6GK2-?p4^P^7K^ ze+%g9(J)%M5ao!DYNv(hke%}6+0TW%_5W>(xr4?hNrQ?%E=B@vJ^44C7`*FjJ6R!k z7@=id5ub&))r{vY2adniIMUPH_%+T8iraEPTNd9cKS$dtqvg93X;pO8Th~K^UJY1z zRhACJ$<@Tyr^zt#Q50N@2)5jpRs;iBl6nA&U_v;I)a?PFc3mAe7XV!}k_pLFoZOC? z66ClEAZ0qsck5pK;Bep%g5JNP#X=T(H0PqG_kd)~ByY$K5ei%lv#AFw?eSjf<4HBR z>Aj;q7Xnt9f;F4aRv9q^l~Mq|B<-Y5-C`hoG*A&;0yv%QVPKq(Y>1>4`XbI4_cVir z+cZu!otz^+XBhzQAv30e8T8b!y)zNT7?RDZm>3d)j$BxyIcG!f36M-cR9T-$@{80Q zz+5_V%~42q-Py)Tvueg-)1gw0e_k2sQtGSSm3<1oi7`j_t&OZuSpRwIb3smZe&`!O zkEPJ8z$5LO-7>!%mA8C5e26nrfARKsCBWD6faG+c6Y+nyRoLyLlXR}+nfq24-b8x; zF17{J>7KhS02m2yfGkA`(}omH0;*#g6M^(fAy(j3$9i*%*T{|Wi^1s8LcgH``7jBQ zag{`R@0VydH&;H)Fprp;)GO;859xsx2e)Gwbr1@Z@m&W^+!%XMl0AR%0t$tP#Err= zR*6!4e1H{cNP%ojFUmndg4lg3^tZgqUv;?qK$rM!rqJO~P65VyuD*XCpgezUt_}n7 z=1{Zf;^-NSW9mV&_c4)4Siw;ftqv`$Hz9FoE<3uab@l1u6Uvg0F4?o18KSABH1ur~ z4GhJsM>4{vB$CwRP!?@Dqq;Rjk_LJZK7^mrY_ZY|-HcWBZ6+*;lu z3+hH<{L)`z&?ce@9dAjdkA1~xJ~Qmpal{~G3SapKtN!$KH}1soclZxAF#gBqw(r-)ITtg7BkH-KncLU#W=372JGeFKA z{oJ0&78r)8&e+vpfTCRD<6bqLSiF68H$b$+$f1@l?x00)`lDt8hyz-u1OK{ks<>>n z0!Jbe6!|}Z0z*I{*{4%8jZW70@82ani}<;Ajlir6Z`jk+)W)VJjNB-FQbRPcvd}5~ z2zHfQxk1MMA((uIJfix^Fx?%O7X^FA(y7XonDS_2evU+KzBV^W$4@(KV@SmYB@l{C zo=-8~(wj1ra5rjD6}qg1|9Pp5@s?-&t5JP@)#49D2x!JeC5z>#<%#BVu5Dmm+}3fi zQ0LAR`(Z}tir=ESjg707m2-4vwYLLCCD)|!>b(zMm&i9FaX(MEvgW(F#QkTA&z|3^ zzVV2xez#rxlt9B@$n;-TWqpM>@*Pr`lKd9_E_YFA);IvlO+t=L<745-K%HT-*x&yp za%xbC?1jvVu3@4ye9x$tc|weF(U%bdVRIV_eKP?|Z)m;y#0e zhlzOu-cphOj|D5(AR!U;e1s9!q7#Yyhmr23sd=aWnog@upZ?U7Ki2H1i|}u=m&{aI zr!4#GZ)5HBi$8j9{~iSnpxc&2ph2q~03A}wkBICN`7!nO6rd>YM^ps5#~3X_JH#Pk z?a(uwXIJc##7CnJX=)xdkG{U^;?dJ2=A`ja2wOASfc#Bsek2)f_Xzj(g-MsAKDB-t zyJDDe|0QxqYm9wGCDl>sO#DZ52(|#zqbY7o0|(za4p42H$3d!lSe$7SJ_T4FZ}cgk zQ<@kz-cLpH@1mL*x#|tKuo;ee;Gckm)NZKIQ?5D^qsLz{_fH0((#W-I z);un}b;lw-K zC=g`gcB(GHR3pi%(r=f~w)iMMl$_HIU^X)0(r?*>2KJ(bRN3H));J}l01n7Jf&`po zXoy4p=8Jcsj+hzx;?#FHI8$YUJdltv>L`MZfJsBet?4v=2_5iIlF8#@^}X_fjyMt< zN;(EH$s>q`t{0`sL1pbK1??(22yB3Wf^e;Ww#0;rGb;>mhVB@i%@Q~GwP=Hl05E^)81(aB2|^L_tQZ|>TEv7&x*MpslC z4L7`-$G3!*nMutFq}Hl!vHbID6rrJmc_WxA@4*#CX?GMvne3XDIXXFsh6aCZXlj`~ zF8)s`MHfbk11x>VMg(BQqvPiC=o^=`3J3@cWfd1{%SBGnx%>+-&#BwQzE{8?Yt6h^ zawf!fq;H zc~n2z^S(2$Tj}Xt3)^F>jbI_Hdn{Tyhy9P+t^9nd|0W;$NreAr67JTE!^tUskFe9* z#;LUBp-<2ZbaqRU5rPK+0^oPK)}BGF+VQ*24p5qef5?`Dp&AXWMn;yU!yj4n__}9! z?)>O4=~uM<;D*)XayQ^hv(WZ7j5_W?d(ppoGRPKy6$co{LgRg7GvRg`N^{;&DMf>9 zNxfVo7$^7V8UllXmv$JqE{uXRh~L&wltK@^S$j+c!JC}Mkk2gk{v9My-y1iMR$a6x ztw0&a(A2yKw7V5Ot4(VH{r$f_&ORxA__`UQryA}^R`h8PAQ3UDvfux#n6+!b9JxhU zGPKKw*R_sjKUmhW8gRYI119SoxKu#eBSAE(!mK1EZtpCIubj@eguV zb*TN@Qex5!r#u-Sd1^ls02QLXo=!|;8y*RHuc^6IOl|!ei7B68N8@gN7L6Tuw$?3R zEM|jxGFCOG_N9IY6qNzp^uemPN0I!@K=*4CHI|7H(k=OUzjvf)K}uy5oB(np22ea2 zkxr8uO-ERSWA>%7yG|=H4Uy1AE;B=p_Q?ARBNmC;DJL%>paaL`V5m*U3fBgW1bUI# z<$dnYMt?w4@9o`jo(y0+(wD);&!^#24F1K1H}Bjz^11LhJf1H!fB$~EXp(|Tm_;c1 zKcLmRvpR6|^6!5iwHlRYn0XZG%K_G%A5yUYdIRs|#J(@ToRcv9&ntBm+Dp>EmVAPy zR@3R{k}m~SkAPR|6l;THbjl0vW)Q$D#*WX4=74J{IUnmGsLE>Z)INXoTAJLXA@_gu z4Jab(uv1mJj*BzuG~*(!+13Dy*mp>p34|ru6o{05QOnBB$(;q+1JFt(*;H**=4N&1 zAJufU>72Z0qlOTDA4A^EPXRWN(T?ogi!z-242bU}q2KnET%i&C7_xq^ zR$cL3#GL3@NZ{sRBd_#BW?Yn|R`9lbeZ8MA2u>I}*y%G^DBYdGDj#kyLW&TA5m$C{ zpiiPcq*3#d!ln)+3a7in+za**m_DNz%ME^I3(T6B&)hbmijd`b$M3(r0#i+u1$>|V z)(L2Ehg>CQPM4uy;EOSANLcshPich#Wj{j$$;#Tv&s^l1s17B?IqW6K_WR<|t$XS5f}(J;hE;rmURee1`oiJ17`q`(j) zzpX}?q<2YtjrGMCO5#hp2X?F8jF)G|e&uz2;c{EirSFn05sWqpL(46Ne~PKEA$9+GL@0#-V_`M$RO34lO~ttUdeN&4sFu{-GV`Pkn*oQ_ z{)`Y*p~V}2FDkR}b@^2^Qmi?z?#mTa5`opoW=^D8?rtxb%e7-%LSY-C&5V^YENGF(lB_upp$yim%Uw#Bs`*D^3h7%%FHy?haclXVv$=o}2n#q;mIvNYno-Y=@T~2ZWp+GP9XE#xrZUk4NsreF1O)3c=JfZn#PsO zT6Cv=l})vuf8pK1v{$SCcz8WT?#<%NKSD0O*qwH1%|hSPb0($jUh>yA+m`;l>Svh@ zCgW^KS+iYwx5Aj%M}wr+k1jb3w>L^IOc3Ujl{znLl&`b8bg8r;W2{qj>|O64zMu`f z1j#{2=A*rq$zzA)567=2_rs+nx!NsE9-~e_aE^UwVB5jhDj$FQzHfB0^1!p$l^rk? z@(sQ9(x^9znTLLKxKKmb6Qn}}&t|#L&KI&%bwxd8HROsa)~3cLzvA{m{mrxI*Z`%; zw}#f|dVUU={`HbZy#4(XG#Bq$Rl@xb^eckSk|kVVSG4@@~2GeP?j%k~Bu| z$(U3|g^YYN70!u{2a_*opoN*EJ9wlbspZDbnV0bszSN>+-s#pry(06R^A9Sf3GHiGFs1>_xS#Wm{_lqHnjg|Tz zJw=r#3r=Xp6%+r#o~#ZxYmN&w!5fx~^7gfr`~r&Ji69WvY|vZoTYG!^>QUDmeE8Dc zcQ<_uM{za4EAgnc$q=+kU8;l?NzNL3DU%)7clK_49vqq6om~4fpu~PX#tRu+t=Od6 zdpjjFv@MIiTzzY!Pptb2Ls-qYe*aVS8WG6S`aw>pkFj>MsH>lJYQJ=?c`DC#aNL;U z=sdHWj}3fI*m;(Q((6*KzK5^mlV&~~a`}#QCfMc5_B%LY`GEHqc?_0?)PtJgiB*z( znV&D2qTwy34uM^=e6ZivZ%nv6%pEiUj@73PpP$h*9g_daTd7!ADE|vOw0?-D$g*ngzU+a__uKurZ!^V0ZTjN}@Ld;qSA$?{-|nRPZQP z=NnU{?mMY~z#0rXoSDskgrLPXzOO?D%fqX)mA%C-V?oncpQqpIui+}mp!f|5Oe84N z5${zPo~$Ryf8VxF&niv)jY9Y9RJ1cCQzFxKV4^HDI>;VNZ!6egH1J%Y9;TO<&SrPt zjkBzZFLlFR+?fhbsRlqv`W#bQtk(eyH={JrW4T=|U?p7JRHCry`qn1@!BPZ;=;iI7A2K^r>bXE-bY^xCd~U?Ah(mk^RyncbHZ9P(tV)_j2zdv*oE zvDKzek+s zTLwP*^=WBf-j9txy>^Z=~I@EEF;L;eScc7EJ&@{+qPYsxntPo*LSB-lyQ{K|s|Mz13&&@Fp z{ZMI-3*)3cTFxH!wsSqUO9NAT##msk@^((2fKC^41fVABnlo}V8>mcpv9N}C=>2h z-SDk#{kP7u@f@p+JY;aA$$1Ab&gT-_wNbLOV{Eje^RcO-B;HWU`1%=SNQ3kJp5`&t zV~VDQq_w5x8l}ECuRLjvWkP{*4i?kd`HF_jOyoYYVA9E3rR0malr%o#gf0Z} zC?a$8OxXf3n~yd=h{o%D9PW4_QbPk9F}3lwUOh8skO7~9_Oz96FZJv1Akk{=%;^6MP_S^du@Y~l1Ru<4YwYa z+qOdXZ@1Ha&IK>11}S4F&d_FBn&c4yDMO*L@3Y2cV_v35tgxWq)s(C@R%d~R`^NrJ zWa~KdA(x?WkpjjZLPvD`xPon+iodkuSS3~Hbjo;i%dn;$HGMG4_0z=8z`EfT?ugn= zwK=r#wBd}wpR&Ooe~B{07+!rgpr7Xfh-dOPzvC))IN3mZJZqW#Q9GaVY~nl}o#co} zw0^zWBRXrJ^_~cVx#JOJTevz7d%8EV?xIr9eOat{^@<}z)xjm?`mn8Ff=O_HCO2=d zQrAsKlrEwaySqo=vAr67G6A5mk{_*+NOU4cQwD{;tGWvgiM|=~L-sEfZcTTQpYAez zH?zx^eT{b4MQn!Tm75>qHlo$$H{Y@SZ!SP8m_zcnH+hT>4SASJW`Un~!M zaiPb+3(;z&9P9=O0nPcsxXL@`s(^v-5Z!P1LDr8*@K;M?Y#Efo^pj#k-P1N)>hb@@EJHTm{23Q^g^jZ)0pAES>o zmjSEF!4K<<-1XjrU^NaBi{-jl?Q*gzl`4K6xam-+p?l%L|gYa7mwoneW_YVI&9&u^DIC*d@%sAkIR+YTwH{h1x;fi_GO9oQCxyEg7bfj8Y!wjs(WZ#z-bWhP&{r7pJ`HtmNqic!|d zJv<(~`a+iQ1;tUT%rD|n_DEm(`3GoS2oVQI!}PWnV&);Nkw&=aJimAJVa*KJ0A3k?y1>1cKTm-xpBAgFpew$*y=^4dv$B(Us%#30L;>C1{omu|AsFblt%UTcaEK{ui!IEA%ZuDD1qQTjz8v@fyFUJovV z!`;ebcA21hd96+U@PIqQEe;g$=s)Wr5HkE_r7`@l1DEVLH$}zb8RIX(wM0X@_Mq9L zlwlsVh|h9e=|42me>qn?-2X{T2hd01|SjC_zR&7a|@qbUtMPVK-pZ z&n2pNY#^}8M`04#%^t{;dP$W{-QfF_dYUb9ceZGp&9UkY`9hnP4?4%g%XBVXDh&u| zSFf&1Uq$(wL(rNW6yAAMT{-4S8g7WZT+wq9QvXhku^x@ihoU2^PzBegqH;1%siQ!T z`@@rxK1xZOhUkuLFw=0+jrGVuhMX&3+j+h4*H<7JIk?u(3%LZ9&5CTiynGm^FMtdo zMsXCJU12Gwf&=wn-MMK@n-3cQJODu4hHIPNTp~VxkM$ft5KOW-@a3ku24~WtLPAxK zuZYu$Vcg!=K_y>imCb8P1!-hF zwmEjzA5OP76GBqfDd5p55Qi&8y59|nLUp0ulcUeJ)dycg-o7>Y;~5T}lm_If;2nVb z(wP{p$a;Fyp@X08xcjkw+D4_Js*;z$tQRBwI>k8&V52jLtFW@RCrl=98g`S4uqigC z4G@NWj9*>^-$WaI7`^(kk||gp+$+q_cKO(P^4qO9a*Oc3&t^6V+ms*Nn@2|(4i-m- z;Yb|)(G;1SCo{Xt*9MhJQ+5KM^quD={(=1wz)Ja^p4wkJf$i-4_2%Z_LBCm>jg(Ej z;!<0TF1B8e;oH_B;y%*cnOiw#w0ZQ)=6bNZ_botNUq>^!=AMT$)G z*N!EhSiHPCld=aJ=zQ|9_hw@)FCz?WSs4X1Ej!S1ZVPU`#e@LV;C_e$6n!}L^szO1 z$!pFa;D5lIFg@{lQ}K8ySLaCuB^t3p;HaL(1ZFwNe{T9Kd$?PsHQtV+utSvxf_7m0 zdQ%_p>+;#|z0s*S@$`PLQ23!D16+)2xcSu2ItJzflR1vp5QoO+0B)H1ke^-$MPvNy z;4Z8|Ke`rFQp87pSKv~gF>_&#}im2F4pnJUdTN`CKt)U#xw@O zg{qXph`W}!9_ZS8i8!yA0h^ht0KVOvFQgPTDs5GOje>_J;&UyDt^=i60?>p(fgD^q} zH<1C7JmENW?I_!Z)Mhh8DOJ6HJ0xn-F$1x zb}F`@>hx50EnuH|<*{z%@D0oO78=ZIAFkZs&dGY0`uWrIZuKj?1t1FJQuc{!R&ZQA zN5!sX3*Y*duDZdA2K_VmHeIns_I2+w-q*3mx=9j{7R`}wOVs*?JL;BmJYj8=6clEq zP@t9sCt#JzGWk-|zG-v>zTQFACU`MKb<4za#d$A(t(-C1GwEqlVaGFkx*~X^FQ9wH zt^>tl-4!$g`nBKuJhZQ}Qb+`{Rf+H3jsQ{artvCpz4J*m?lVPLh)7HEI@~;*e>rN` z*nbqyXA;OtHgo9!bYrGrq5x?n-T6sb9&fi#R+C%Dckj01)ZrjvjGNN4<4vpO#iJ-%-1pd-`Un)8n2TU%Q@X z@T8do=D1gQ+ki(e(}N${(SYOzuYmsS{wy012DqbEe?xHdHC(F8>d#G_d5NMBuntkQ z?;UFZ5OlU0jmcT%jP#Xu)gqq0M8T*K@m=N)M$9h*ljVV#{OI<{!t-&(3?&MSo?8^V z{*7rlxzZl(QN_;moX1v6{f%C1$H>c&pyB2z=Zbw+Dox~jI~j%|7Q;n3!$pb0V_4I; z-AuPcyw7~(6bnGsHs{=mp_^+w2X#2Y23*~cJ5S@=*-K@|0ZNnwaFJ^zf1P^HT&3** zy|+D2p9&b5aBqvxvFX0s+E6lIpCF1MAbD#Q%vW^8G>d3T$Vr1uWDWisqc{=L!JX>FT2L#rG&hJ+tv zgq%{?@i->)l~qV&{$u^2UH1((_LjH?-%d169xGMN66BN={^j>kX@8-VwV$io!{bA) zZc{nj>P?)PywO*^dPj10S#!8uVK)RSoNn(5xGvBoA86gik?BrObvxp6vLwM$!Iyh1 zd{8yfLt5E(M1NIITlWq(Ti$JDYw12ao>N({!1+NRk(^<$t<4i&cUIWy#~@VYX5G~GMwW;1uV@8CiASS4|;T9t$NStWsz1br87w?b*B zNUWqv)1dYl;WbSreXeVAW;U`VO?oe1$z&-@$r)TM%(gDw+%n80uUBa;p5qnknfh&TrS81dN|@v7wv>oN6n5B>gwTDsr}qsm!G)n2Bo2Yx#BAWe8F0pPrg#GCb-|6MD`YkU>xp^3LPZ2g-oVGG+bFia#SOZHV!fchx^F0CA z^B-THE1iAU>CoQFMwvdgq;f!GNz_{9gnJKShF@uDmna+*JDcgfQr4ukLPGiS!V-ng zp+~G5WK|wD2o^=I@yM(Y&0G0=m)K30KUCv}Px)%;y6e?ct?Zp&WSzwHZuaWy($jcr zE|pUe6VoL)qVUe}{7RJyzq&#{9bVOWr^|nN=$)((Jx~-hw0J>z4Pxv__e9WfzeXMI}uh2_AwY z2AN7H3PnqrG~$LWvweJwdKGWARmOMRIjOWeHq$C6KR{zRDqYE5c2+ef^wDtV``+}# z;m&#|&CGRsUmM1);XB%Wq5x4+>+FYP7xOUTY}3bMPo|*Dzw)|l{U!Ww`Ehj1Y>L*V zyQx%;GU&Gm56Ghf*w_R;=kCC=6zNMkpddFK;aq6_g58^zG&#f;`VI~ak zaS5&Jd*c(bpiru3@0r{UO!wU$n=!DCt?E(kSEu{^&^ND^u7_8CmnRjaIn?Gv{rOf=2yO(TiG@W|oxs&E$$6Kvl-=4>x z%9~R)vb;|8*49}MPS;%OQz#62tH?F8w-NbL#4akcJln`Fa&W4)j0k@7=hwL5h;=#N z6jtfW=#9K@@q2$IO7$!^Hu`+4EyrP#mixo3sg=d9!6yr)5}7=Q1cvIz{u6qW&%9fg z*1g0o*kqQzv&ffh>nKjBW6jA7jm^1_?L%~*D z{F_Q{)#OyIymmr>&u8uYQ+Y?S-IA4fZGjq-3amuWul&vAI%ilxq9tUH-R}q7g|{aw zc28Rz(01ozcG4k9D_i!V~06MsWLc5 z)^6G_SEeb&PumH3E?y=F# zf_1M8l03Qx9Jog|%)08Nb=#UZ8o+q0@rA$paiS8AR&ve*&5%;}b!H*SqpS7S&ui>8 zYvr~K`mpLwy=ijNOm^cqgk~$XRo-uDJ+19y&Hi$ak>NmR?Nwvb_mQ#$iYT6cIb?wa0lE#T@6If#20nkM-f zc?2gZg=8J)_$M7d^4h6ZX#!thbbSdM%g*=o#_F1F8)G;N@aJ>)H?ywBo~gCc(I1SA z32Xh1zgy2zeTx6^om=XcWBlkR2ypL689AKuS?U}*K<58^N5AsFuf$bO z>>aEbpG$tU=jS(?(80$yW^GpfXEO8ipBjTx=%3d2cvt(;AFlEJc#5b0eEL23c?x8` zjtuC(K8V=mZ~H=iezw0K|NkSOjFx!McpmoOmY8yBY=Bn@Uuji-dp&8~YkZ-t%3K1F zv6$6s$Y1i@@9|&g3-~S={#!Qx^Pg->^yr@!I{%&Qo!d?CbN$g3N9pgM-Z-#4e!1Jv zpXdDJny(d3{y zyt3Q6&&1+l^0olc#Op>szU;FGMv>PSvsltp-Q_pz`f=7v#~My7+j znzltlXU3_NlZN~aYZn}L5>a1Pt19oC#AJ4!QLvrW#-C@kI>XAo%COA&qrbU}TBn@+ zxz;@%9E+~XDyx!&ROwR`C2Dt4%JgAvc$%$uIs)**4^$BO^XJlEOh}xReSC|=S z&2~4G343eEsGGK_NlxBo*n2eQT7t1l?}pm!6lO==!%&{?pq++GW*c%-cW#J_+?R%P zv4SI(?B;B%FUcYMUHAu6q$)k@r^rlfubVBldh<-v zuSK7z*R2@kO=)@;WTd6k8FO_|x{n=geX_1)P~FwgzyDUqZ0#DQA?LD0CTG%v_&y1r zkdHZ*&$aW1SCyL<_OA8O1H>^U*nrD4aDOy!Xx3)s^vBj0hgZpq{x+BS@Y|JCX;$s~ z$`n?;TX$++TTw%?+LypNPJ!$_t?4?MOlT!P=L+XwA%3vW(;uVYo&#@Ob@5o0rnWCbOIO62XEm`zhwgVe)paLFWxpHUo{$w)WFf>J8Hy6C z)gR)vau~IH)m#`&thDRA4eus9@ZQDM#P_u@-R$zCYjYH1)wx<}$K6c#)avBQ@9a*t zAFJ;-+?}PHzq%kJwmUh0P-gH)Sf^z1+I)!8@YD{a&MA_Dc38zcA^O zD)I7^lF!vkSs!|^{Y3qhL|*5k`j(B>^6f^~JfgTxCbc>SE9J}2uIfsjT`AW6@SUob zIoqAPH-{7Lm?ar)eLq*tSG2K{uKD$^PpX}zqx2-LO18uJ_#b-~7kdPDa8K&*7E>v$ zdA#NR!=xn-S!0))Cfz(~6EjgmRj9y6-ZwR1Q|OB8eu3%!bvZVsElor7^7#ANG1i<> z;TDen`h^?b8JhHZUgm8Sw{b9M2Mi{sFrD1GwG_+r(+=v?eH)$ED8P|);Wh?$EguXL zO-sF^)h5~3aYK~R#t0NWs8x4z^T-=7*CA$wO!E!V5zpu$_kG9Zx|5Gs>bWIU-hJbw z=PhtEu}SK7-_Gz%&(glA80-AZVxK_!BXuV`Dni23GCQ^?r@!IM>}9DwX1%QNKUL%6 zo><=!m$jm>)nBxryvz83Q&B&D;X#4Q`Q6DIW3M|2o=**{o;A-eCb%m(EW4`jils|M z&rbWQS&Ceiiko~g^H8bbJi!xP$?KP=ZM1&)C>y`TqIQA99nqU?>rLmHM(3otUs~0( zbGM6c#aRj7oIJcp~_R(NWn9-p#AgkF(Mvtj~-+;;%f>^)OR3y};}1YGvtm zqg$dXPVSorarQR$78{sK7RFqOP88iZ!eQwrjm*9?5)k)-t#Wqa}d_8(TwTt7|W~5W6svkO3St+BnZPT@6 zcYjs=;up*mW^~rjBBmejVA-r&?zYc&=~vae#CB|M+?zl6KAG9N-XOoCCQC^7nw#{ZXw;Q~d5j0fpTCP`D)5l8<3rI1+=j!TqjUTx&mfd>5y+=Iu z_2?ElknW9pkGL>iGOU`otfr@KMV$9M&ATazK6*D(+R{rn*&!P9hI*GPUon!&N@T9{ z3UdzbmsghN@iylTuWG#*TQ4!THQU;zH#)1Oe(|YVo#EQD;ncuBbFXmUx{n6d8J8uE za-ur=MT*w)oFXcRBNR%8?uw?htvF&8@+SO!{z$QxzVhbShFjMy9@Hr1R;bll4Lim= zag5z$RBCVMRk*m_(syt!;aumvbM^SnyE&4%7WdEIq5f^6Eu8S-Q*PMKYvei}`Jb)% z9J-gz@ljdNpXb;~#kh5?X}!^Yr~m0-{1Y~;dkpQj>){8y7QQY0`tp7 zlm)Cy11`;Fb?a8Ite5_P6n~xZ7Eon|kHsJu5v+9R=IU zuO;MEdd8m#9H!usuH@eAbfe1pP@8Y_MWsY$U2K9cce_VyfF-!BYDEsoUtGT?XBB3L$O)%ZhREyMsP(14yW(Q2-0zdbRJxg=IIFt; z|6%VvfSOGE_EC0S^wAAAVm;RKzfmu z(9xA5U8E-vl+Yn$DWM0>?@og6`;|F!&V1keXU-hmnH@?Jp67n<`&X~ub&+3H?r@JM zQzFIC=)c5T6RV-U=AC-vD<3~nz4<{q< zc^Tg8(R5r*)*@(PA99#1F)8)c3zhvzS!%o0==ru>VX8FxMQ?wMANm2yMd6SSp$p8U zU_Af7Bn5p>MKZh*hxprn zy!lTbXoX_`2zvIo72y9XX$XfaDQ!opNN=~Uq%j;`MhXjbm_kSN*05z0fWTiU!y8k|7cbXE^;&L|0~|1Bf$03<+FKbG3x6Ex@tanWzQAP%?676 z>?Qs4_SY_`Y?upFg;E6*qr#unMh{eIP(D>k*XTY=eX80isjHyxpIo$FM04x&IrU7J z&BShQx{zss=RQ0 zbCIh|cE5o`r+1s`!S1_xb!%F|c{I;LVfm){bn*Q`BWi~7QcLr9+TMpxd4|oJ`zD<1 zI2C5VrO9m~EP{8Mr)h=j8V$R<&bm$}whr!p+DTQXxxZ+8-@IfsnuDEEd{trURZ3cv zuKKhjwiY>JY2>A|5-)_!nm^-N5l-V2IXyMlvgG!7{@xYpv=jHDOF|J<-Na{Kw)Fn; zl?ypl8+K#2Z*SVSy>DE$UQHv9y-jF)KR2bi;fZymW#y9MebwwLMB6uIhgY;oHq+NP z=j|8epU+**ySQTJtj37CP5jxtSUSg%z*~G4`X}?&~7vS9& zAeVE$&Zg1MR`;YNtI5=|RsCvIJyZe+8+|=FZudu^nwVT=M?I2gXvjt-=TzcJletxo z?i27?GV-ShS;F$C=d*%J%07f8G;-bZS?D$L#kwknT8&}*#KWC-H`)(dogP(BTt#J|yPl4Zk*AVk z3a11;iAv^E=_sPP$ilNzFfnqWML3LAat@7$$sakuH6#Awd3x6qMPr-I%q2%LLu-D; zu4xkv-Q@monsk%_dyn=cK{R*$hnR%PxW4L4W3EL1>#Vvqf9dELuC%Om80Q3BjXN!0 zP|LBDsO=+8vgWeTq{3899woIg%zh4AQMr1c=Qv)VV}F+e^}2nJJKPy ziI8E0v9u;eO-ba!8s4<$uUP!C(VTp4+gx&-M^~ro6|xS8u2nHqEhTbu=aScaVzlME zyKoY@MD)RBCviEIsW%66=dh1gUE*;OAHuM-askO6VOnEY!aO!^-9LPh0wqRDxhnK1 zBY0>*S*M`_kyXjl)JPW1?H-k_>GJk2oE86mMw#eR=xb&8R6S_J5My>}*v`mDRVw!+ zR#%eNt5oXU7*0evXKBD`q6IbsuuI_UXY7P~4P;)Tkl*}3*?02?H>TfJ* zY>fZW-GEh>?55^!n6qf@E6<~==Pg~^rR1Z!;uV;%>X6kW$L6?iAkWDLnczP=vM2VT zFC=z8CQF`3DC~JLS`+1Dy5llFhUL?XrErN8e;#kB%ivTAA0CZ6u41zp;2O)SaAc%- z<{w8}q~|0oifje*@~{FcZ-X)QSup2(kdV|7q$1>-V%{r35 z$;?6&j%)7Ne5EGTt|@=aEn3U-q{i~7sCb%lrijbON-xIph?Xu+O62}ElGST$RId|` zBRiPqO1Jo&Y$F6TQR>dGw(oybLGImu&YiPz;i9=GTVu$F&b&$*n;2X#~ZjOVdh zhAk(2wQIWac?xJT4TKbp!N!t6S_~At;(YG1UGFhAcaM`G2?u4#V6INajZP5^i2NEJ z3F`dvu{+Zn!>}x#AHve1?6x>53{^~tvbHc=$IcOD&2S!}t`&^|B(Ht;{`Gr!|q0!MDl2K89T2W;A|4f!8mMw0&Amtrzxi)tYAJv zNdCV3U{t(%JJFbJ<-mcb&54|7GX}E*N*;Z|G9H1j)-W{J>s1K@lmOe^V1Py8cdCD$ zphG$7FGT;l!{||e{r<^qx~us5y~3aLe(BfmSq?L>pkIIRf9M0=ZnhC=k3Kked$VlD zU+c%6%BZp0yS?^@{+l7ZqE~NZ5Ic+beik|Y%1%Epl1&4j^4&$YSsu-blm1WF^CNHl zT56*wRS(Yb1F&CNnqs#yJkIQ-dpY#m)>l=Rlo!({s`u&fu=+&jI@BRMy zkq$N3`d;C8di84STb5sbrFY1`e*f*ye_aar;@jf?`)YjKdjGx}-wxituf{jC^KYy1 z&FKEyVz7L3Isd*I{~vX@ehCXeqc07UwoA1zM&ACqXauJI?gA5Y&iGlz{k=RPC|t() z(l3j=+hqB7FKU)ah`;}df$F{M3G#zZC>cswze+VX(u*0ftQLf$?ZprgiEu<+={2@-xxO_5D z4%h;XMyg*Xb0u82f;L%8&_Q$9q%BGIFScNO#6ZSx@XX&uA5$Ng0IgF9%z(D_R-u74 zm~rsxnqq|4e#Xa`LD@nIP~Re!3{HFL`@@&_?Auq@5OE?;Uv{i5F$_)6!5tm3H-Y)r zqf$;MSJ&3^^sg~4;bYd<+t6m*#x)#Bft}s4b0-NX6?Ypq!3UWEz+9+}ipWx6go?ca zL1*Eg!qytz26>}kpdc3?kc07P;2sTw%MrY}7c{6v&!0a8RJ7J*ZExQy0JDUN-kT$g zcl+rxBT@x*0`TFZM=-Gnvu3?mJA*y@_t!5kFIOaZ0%skX5QO!!DQknF+M$oHF18O2 z%IA&IA+;EE*hBxdW&r3>w3Q9Q-I@ZqfdfoUUPzpWWf7H;ISS0iiRV5P1BvFL zBS&KAFEDn3-{OzVxLFnhk6;!=IhxA`fdLtf_J9UuKyI5ed`t<<<_!UDSmDfCsB6Ws z1h*mh1%iyiKPP48I{d;5RJdG`SIoO<8>>ClX!8XJ2RnI%a+rJx=a1dO0(gnicPR|p z67ZYAamSTN2Zut(TACS?`}wD9pUwx$JnSN(T0j&!kUn*}tigh!nZEPDDr~Uki#QkH z%wP$0;2sOhXc#kwf13h)Z?!=3g(vy~O%>d|%#@F`>r624#n!<5HQw6w)~#RAO<<=O z_rNk5&3vzdDd=x&WN71wn{dCgFn2HC3S>@Z1Q;YhBptyp71;r_5yTswdQr7Ng^8Dw zXMDz)%>tRW`J^DmcPAr&>$x7L5TvCnMj9j05Kh{WsxlDC9d_rg26huEYfGTRo-NMv z&Pg_7yvJ{akGY{gz6Gsb9z;h3ZwHM{`T+*Yh*3mbK>`j~1LgYtd-l`-VOUaib#*f7 zzkUEP?U=$e<}C1qxOz2aSvPr53rNLlx4$%1m<(NyY(Ctc7t$LKCoaM z;^H!TvREh>gl51+#l?$$Av6DY{FK0!Ej*jWc<9@0Hi5H*t%P~V!}FPzPJt6Bm48+T z4y6k03XnaVgUNmQT9{-WEjD7@{w@2hqz`{aO6mxrAOWd5cPo*m0vPA^Il#00cIW78 zRLp~aw>B2onD2XA{8BTc1_liuRa&i&}R%$l5bt9a^!nj7(*2e`<|Mdq`;17ZZc^7pS?AyQY zb<3(NZ~(jcQJ|`u8IJ)~pBms5G=ovD-P+6~^$gF}otFLEonpg~>YontiG4ouQP+ua z%2|4nuDt$PU6!g%SVK%8-!pUJdm(R_i~IH{%)iRczUSF^Ga`qkg~3kLLgm6OGL!Kf zSVp%p;#>PJDJz@71hJ@;6y7Nj7+Lj4=x}ismfT;xl{gqHa!&{z=PYpDN(^iOL(P@X zTQ3p4!8BMgJAo}T6fvS#;7n@+-GJ!L6t?)OncosS>;Ua65-Hxf2&>{#3oQDs*yD zfI7SqIe$s<;e4CdKpC-)x^%5OPRU3*WHP5)tK8SP!&imIx_2XC2X!=pS|>=0Q(vSs z&yThDT&mPvw9%jl(t13+iZm$N&G2xn>WN1y5~Fi;^f;@!`4!W zceQ;FkJhTy)gK*uUQ~U8Kb%B}vpmugbZxCX*@JTlLtegXW~^!=xiXL)Zz+}S8s*UQ zW?#>&dv+8-jWKsuunJ11gnb ztGBJkqNv>)h2z?rEyTeNNnv%WYn0?fURjSGx0dUnwoii^qxPr-k_*x_^cI>y_sFf8oSnsdjXSiN$`>eS>loa7o2+hyM|FMebx zM;`Yl?d(cri}mWO6`78^{kXLYt#UI-BV%VtGF=Wm*2XU`%zSc`%yV6>yfm-6Ixw@~ zNw{yUI@Mqhves8OU^rTfr*VwBo?PLK*vOSl%_R!zcgt7Ln@Uz+)O|H@iKf{zvoQ61 zUe%x)<4&GZo2s1K9O&T`b(U=(pwv#!t8!LzOi)!Hu8v|7tcb2gZ9&(TFq#;|l~qN=RBe8F@hEk$?cOy%4^ zH!d41OL!+@Jl;F7Kh<4Q$DE9}l`Oz>hECz*%lNQ(`vPA%=fC^~BrDX2!LjOGx|M^@ z)=GcupsL#^3Ko?;P`uOO>sm~Vao_lpc3lfolrH&+a4$>p@>pS)wDUH~@ZNGOUv(T$ zF}0g&(z&#x{Yq>xG1ql+J_{SS5pUNiEi1>HT3@fdX&yp|FRo5j@lB)F4DW5*yLvmb z>d!@`C6XMOlJVz}?l|QeJ#Auj-ujrJHqmg0*wAvlvF8@^J=&Y< zU2(x3__o&@o@Z%~izG$VmM~In;yJi4GFqFmu*jO~5|!_RT@1p?8#Y?3r^vR(**rWe z+Vc&|mq)1*zK^ZzmBptiQS+vSvl{Uh)>H9>ikO9^V0V5lp2D`BWE|%?GB&mvzfu{q z;T>dMU1LjDqiS_H^ej8tEW5kj#5iwH+A(h`J+GSV5UrD?0_S+cUQVPnrrxH`h&vt2<1A>Eq5hB9d|Ql zv}=>zr(&tBMP%wQ1^hkbs2q>)M)mCElmO0EjTjqCJfrW}IW4fKb4EUqtTre`09`^G zoJ->2wQH3LWe2dUE}Q~FckHrz+wI}bi^E)U+u^;h4) z*ftY#i&#sY<2O0(16bFU!(5yTQi%0!X(r)@!KS!t!RWC;vBn?&*zhhDOwN-fKdQI&nyAdPu z-ag5UTz+|K{CloU7JoQYHWByd(6~pPjymQtDT$#d;CBpQGYuucXNnAhM zT?$GkXYZNsL+|+5#C*BxT>tz6GZ0|e#k>mhaP$9#kJi55*1?SWSWd<;Lg;5#*h^1L zGUGdzgIAch$6weD!OOF))iD0QUya7&ZL;c!nFalOd5d|2{CfWjAN_5i(VA{;yKf8q z9}mm79Sxh~o9XyZHrm#KnIdQOgP&EXS4))R0->ug#5g(&^kazp579Rw8ZS3sb>s$C zy7EjiI%q5`hEq&KtX=RyRwHrW3t3MI&|&-7l@ApJAj?axu9mkin@Dwg&q@3VQMsBz zbpi@dW}tZa)ckipU`K^w2_p0;mvRA);C$fTLbPxkh`aEdpaYPT*FklO&UgfglzVQB zixf|WjMV~zJo|8OQuFIk#BgKmHCvn8Hqg_<;qC1W#UviY+u2jSL8a3k z1+odC+RrSMFeTAd||gPz*D@ckc{HPH+P=lW_W#n>SfmSwZRX zK(vTSD5$`m11Uldq$+_pzk(1y8@fM~yn9WX@sy|P+LgJ@ntII!nFNERLsbq9c*fa4 zm%;dr%jeZ+N^ZZ1ECh7>7*(NBJ2xn%WCr&lq<9p97vnVx+6oo%3R#c(|EMtcGK z>Z!6|EYzhCNfEkBhw+cEfQL||EJYIt^l!-kA&OG;caKWZ+Q9<}1H$g4s}moPj&YX* z#*;u`Pubc9?OWlKwRv}0QVpsI(y1(g-Rfl7vnaWx(zaSCX}6mjvQ~j2BAq!b^2+OR z&6hs`&sP9S-e+HncxQ}j;upu{H%mtrT`Q-IW}k+69S7e1C^w+>90Kx>SzxNIf%-(r zFXXpWfuO$?=wYy5!dC1>!m5pSAll@+chgs;+UDVI!ay9N1n!^$_@N!pdI9M#ceJpP zbyf=~_sI}xGp_ZUg{~A07oCys-d2k}SJ^TDi^#)gfA4wq;}8BLzlNnBJUL{2=k<~A zwhNU!NlL$&B}n-F_JNPT-1?5KDF49`ZjIe@nm^>cOnrIHOq=_UnxFo8+3&|;(O<*d zUu=9A(R^p7($Vs)m}^`AAZ7o}r5W$VmDZ}YgmT?cFV8Sg&&~UbXa$=(*XbYL7VFEi zh{4X;E${VfK6uigV^jy~4T$nD1PHJBVIzDp_e6ze#I8{fG{jZA4tsPVT1mZb(cdn# z&3~Ae(Kj$iE-pS78y834xKQ7z1r?jMyY}|NeWO57$AONo8T={l!yb7nz|3h_Wcw*a zWfl>7i{AP0#K&gm4iPa&bh`bl`5Be4@NmLJps_RJ^MwO|H?K@hO-1Brbb>0dG?Cfm zz(I{j+RT7LwvzSuKmqVkMNyyIw1ye)vH(6q39o6y;fd574p+sf0^@CU{ob=&51~BWa99Qxb{%6 z^=ZZq{AkQx%0|ubD=RBQ)VD*ES$UxXc@<|bg&x%_*g#cPsNapJjgJQ)a!NJ1m0?6F zYzk7^d{YwQ;x!<+Ku~}B+a?|;7KdO_yqaHJD7;e%JXo2Dym)u0)(Pn^f_81`AYtoI zA7|8+OsAccUdIFVGNO)+?pNV&o4@(y)^GBcl+;0^LITL@2qUcuKw_*Yuk=GC+R-U@ zRzQ_v?NoSt^jIxxQF)=X4yJh9!)W&lXwenU3D7uhps!vI_KgJ|JnHrLI|t|w)dRVH zV0Y=}#u`a1aYc0xr$jb^Tn(E#AB_LaVrw1S{t{6jBrX0>AP7Q#N1%-p3?z&~bQ;^B z>gAyL(qK8Uk3)k%3O|&d92y8@mAXn(;^N{?rrh5XNJO{kSy=g4L z-W>O|v3Ma3gsy6VeU%$A8-m2RqGAoSO5oEL{ZDD7)_^{Yz|q^3*z*pipw(M6KBMOB z>`Xh_{yF-uLpD}c0jQG%s%&Ueih>g3c_72z(dGKQ>W&3H61(2QT+b^|mun7OP5FYT zW`T3?oP>S<%{N|fdB_1GitQ$AN~ErF0NT}buI2LL%BmWH1Bj>gv>}puElvlyk;Mfw+{dGU{1TQBlp7W21YZ?(Xg+ zXXvoKjk(}xftZ;!OiTvDG!{c*V`I}ZGEV6Oi+Oppj1Pnh4%7Md>_XL`95V!5&+2;l zM}cv%9VAuWWZQY++(FBPgy{8~tR;C(!Fq|h&h+o2fT$KCsQM^CmH|q7xRWr^8BXa9?5J{N1KIVTc)E~)*PR}!TZQ(GeMEa;7|O;KEUQIFoY-x+vkw-1gpA?#+ixJ#WoZB5kC`%0G!?chS4aU zP2hF*M{IUg)ErDLXfq!Iv+TvA+>476G2w<i`AoI$)^ebeB8B`fy$*3=}0^-CY4WB@$>hOxOGGNPJcYJwsV`P?7JOaQUUV ziN5BUv2?x?mdnj=yvqq+;DO$+jM!`CLQDGxivbQ0^U(|^4$f?ZTv#?A+9F~YL zJStlZ+Hy(e8PxX>sUc=sN&yk5K93+TD^(GVFJ5?zln@Ye_gyQiz6=*cA_$?8vxric zFFMnS5Gc^q7}}*!fRv2IVtJB}7-}9dL-lx|vc0Qt{NmZ4VxH4u25m84Sriocr3|~2 z@ty)6K+n#uE+tT(e>Vqb;4V-Omt57nc<~}4P^;udN{&NFw1j&RC1%FjX z=H%qGfBN+EwQJYfdwUNdDUfJs5iZa#KmjUF z3*Hx)jCcZP?U1a8Q^opR6Wz=mR4s#rtztu4%A?Obc1a^^zz|8{GxG`xrUZjA=%l+#wNO_iwHV7r;qrWfDGkDI#Vv9Tt#Z% z1vB66Jo?c}IbC8~F^{#i^$_Ar{F|sy8I^!^yw<0xH*6XP9@m9(ai+bve+7}xgK^+N zO)JDnoqxAIDXHENvk8`M6B^bdfixxlR>j@bEyhExWS zFGuvgC}ZewK++As^(}-X8K@r3G;`3Er`slA@oqr2>Gk_lRuJr<4!4sV1?0*_^fx0C z*Y7kZ$d=#I!xVS$Q$=qW7;qvz4&W6QycBYT4RMR3@O^S>$~Ds{Q-%HZZ;UuIlaCP( z{Y=VN)6~+{)h(#_*!mkX3Sh}YhvFbu z#~->mHA8(N$wR=0rR8NVP=G!K2SD$Q;E0dEKcY=HC04j2UR--m415xu#eKI(#hI76 zuW`j#Y#orY-YD{d?S#BRQV&R7oT!K9tSQ7^+nY9FhY7TTJ9aJ99uIm|q$c1K)6eufUN1@)j0=OG+FF ztF^>W@lt(|7K`+8!{Us*yf6q)aqm4dOs-PS!2sRBZlfpz|equd<4)u3& zPFvyeR7*XAQ((UxE|ac{@7XZ1Jj?CdI*Dk6~qNbNju=hzux#&O-Vk=L3+PdZ0WuyuULt_vd$~Z~sy&5AGpG%u+3JdIrLnGhjTp zc?@#@Sa?2j7dpsFO)mZ0v!AI%@Usb_Y_yzBD-&f1U`>$qflN2s>HM}!jb_tG=LuB5 zspB`tPX@hVCdOUi3>y+Kd-Ftw9dWxCLsR6nZC%aHN71srjAgDW-KAZCq)JF{1BLY2F2E9f7~!Xe zfM`4bfB2l(qpCw^(#-bd@$VsUiDm{a(#*VU$Dctk5p9z<5J)P(S8NX`sD zasjNM8M2;`bDl>&b#-9^g#aa>OhbYI^rV}r@APT$k0gpv#^QT$KcC2Enw3#R3Glh; zV5I<_x3p5)cuo9D10s}#f|rnf71+Y_z-e7RU|*_od7F?%&Xdmio9Z+Bug5%RrgagF zGjyt9K83sgF_JgoYYGFEsRxwJ33cB<$W{x+9Ux~Cg}Cwj%5Vf-Bmj?Z-83|s{A~@Q znQ#YkHhrB+c_t?(%XH*NY$kStUgqi8pTr<1D;ZA;TD_3u6wD=!y|V&(ub!PVAnnqW z+0)zGo4lwM0mos6xpQ9r^dD^>edE)Q;z}G4@VgNR#2as>As7xB9UXmO4!eIAga%|m zfc(bXI>Zf1O{-Gx%wN_I8fcOq!~S<#i_y+|ou~TZyr?m#gFY}v?pt0tBI?gS{|p}i z*<7JzA6=Dplt}1}nG{-F)1zue!}T=KgG@l>C|l9C|AbDdzaVU!5xar1bEp~+?o1cE zf@h%l#ufw>l)|JzMYUYjKEVHGxr<$ULA=VnhWT7pPOZ>t`PcaUvKNHAY$gPdgiw0{ z5-j3#C0pn9s$JTq0~G}sNFMp7P)56bkYc@rl$#^j-a}H_lo?(2tUe}1z$0uc2`}uR zV{VSu$?0n2;V=({Ulk63T!AAe!d&kK=pJrwf=4ohwu{F!5S7s?Dh$WSStd+TH{pPZFZ(M^YwDE+`m6^(|94C;5=x*QcmN z(7y&{&RP`BNl(j3Vu%$eo@$T+acWpg2u4jzOvF{jAu$mV_!CH0tFt{a#Ol)bSq(4> z6yql0xoACqIaL~;n;wITq&{;Ii8To61py1+6jI0%wfdCuz}x_9d&+$pRrSmTce4wk zERIbGV`X*9-OJ8y)_21mf_-<}U9)Mo&QRaqDvP?$<=8BW4h*9Ee>X>0&cfm=cEmRo0wu8FeQE%#5{WgW zg0-&$sHJItiGxy@FUnzkH^zJ$cS||c(a9ZI&h&2C@H5xQ|Fsl*A(Ww)5Fr8D6@_lp z;oWwQ8kBefLTW~P1BBLUpjx3fcN7WndGK}5ltB7%bkl_z0MdYHoFVgv;X9G^-AJs-c*3H)t&X;+u=udk_MHwCGpMGT)>CDmuJUY88 zKdT6(2|-bI#EJ`ObDsbE?KY&El@ASo7%69Kq&lg_zoES+)#s3Ek7h1htu$haPbx1L z(knPl2SI~0>JeowXYF84N#P?$jtogwuUzZD-z0>*T9REN?7e&_&oAkW^9?}a>-}j4 z<@3%dx2ke7-71YB7*TP4A|;|M+ZBWh4_><$;O_>($Ph^Q1^&p&U(=g!hMFx1D;dqD ztn0i4mDKPN^i-f?Ve3g73KDTSndh{gaY&VRCMNf_A#Bqys9lQWf=XhNGgCJ!2vWbu zff=ZUkw90&3G>gGXKmgyj-s&d!kv+d9=LDLE8XrJD`VyTAvUlHl~^Np{A?Edu5BwY z>C%>>U;fK@8C)A%gA`J=dEwZ4Dr*KQ)E$!d@rtG*yNaY&aI7~_VFY}yQqz7w_1PCA zr6?%x5*N!p&8C?$&gcwM3TnLaOi&lX46li+Ca1GRc82fw9>0i$9-Z_sV2be*8>oDQ z6b2} zZz@m`QW-w9ndv$OUR0h#S|G>~8q`-pSt*19#XBSbQwJJ;21P`5Foj9x(1t@w z7T{)V`o@gFx<2lS43_76n8CB7qu5Cq77o#kBasD^hlkjh%btEi-Q_Ez#G&ef0*Z@(DRyK zI`H@-fD(2>iaO|If|VDAWr;a+`}dSm44PJw%fc3MAmm~q}uKt%i#5&{F=H@otU)z#HQ=d87-!PJSm&kaK;nCBF;?JwjN zP=N~7I@-aigHE|WB(jqGpy=ES?S#`@#ej$yOL zF-%YW9U%fFa_4&IA|$Uu{^z&rDI;Q3xb~V+%8_d1&2st&6y=yfiVicR49Gtx%fgcK zcFUYFedM#@QtDR#&}r5GQ1PWNC6rH=uiIeHHv1&hlkbx#;mZbxtUSJe9klt zSL|iInIGY|i+L3)4XBj-Km1^o1cQI!fBxEP2^64;9%Bt4Z-2(T2vv(bKvZ4cT8(pG zSECI=xBs#lDZ|}gF-z%Zn78-rf8m3_Ei*jxe@m{vEpz79G5NNeVQYL{jsHVN!&-pZ z5>|^+;&xvjTDqoTzc>0+C#9Po!bw_cReur#5%Hf%6sL&zzXmo_j!G4ox( zbcaxGPs}eEZ$>#A{^3@*?~#!Jm+lzrf?z&1WcJfzngR61>s?76tFL{_JypNBNO5&^ z1H171PqR+mhHE5O(~98(ec1O+ltFAEduZCS!@!T(pD8M9S_ipKkoR96_K6-Fpp~l4 zETv#par3jXo0Y->FN9k=IB9XNM%7J&Q}N@2H_92qCfd?WyA~r@upHNxI}r79ZuMba z-ihpCvPtzU%fyt({5~r5tYWdN1U%U=?*!T2H{Oo$TmWRXhy*q-1nMUSRQY2SsRioB#rfU%<7GM8ZXOrM7T&3 z_v<%>;ilK#EmCT+YpP*&mQQ_7{X}UU;4rCf>nsYx#VtMji(2?$zT0JKo>=A^>1!fL zmDXOa>OI0$uT0!sjfp1+IVVmK$9iWs2w&z0GS*DT+?qI-9!+63y{A?vO0#YGmn+PR z46OUjy%*PR5WQ2U2VdPMksZo0{U?H`6t$2r;u;rTF3hKro9@24PBqQ03Z zg{Qit080aGrM>4zab8{mR$+5-=M|H@LXz51f@@h{!u9KSl>^B8Ke1c88o0N+NXz@_ zR~Rqh$-TsVRJ<|9^A^^zJg;ZMP@d@4iMv%XV269F#xKo^N%OJtd>|w8$wY&=Jl;RU z896UJM0P132*N=Z`_jlhno+v(9~;7-W~Q8pC2xLCyqfRg8k}QRE`0#g0553f!w>iv zN!P?x&R$@*#gVp+J(}4myLQxbUQX*g(ceC}tzby#O_oL}DY@AV$JO%?y}M^7h~RVL zN)4fQtk=1`wqB$A3;F(x*a$q`>IB+8w$fbte#=1LQ14mkBJZ7v#)o+-eKsev)>K~& z;##R!S|bAM$|J~8=VtIkwJEv6O7id3n1Q=EqoeV@kIPEhoRSAZmmZRx@jX);>sj9A z@g%Obxp)mRA8q==UYGqBUV}LCN3Y%fbqkD)CjN`cXY3GGF}xQWzR4+qll_gh>O0tL z^-{4sk7u)PLb`~PKp6b;T`$+v2fRXBN5G=kDr?;A`WQ>)i-cleN{n zo~h+KGuU~hqO7G$6=Z_l=07k`H|%&Y2mK{QjuEkK$rmf^_V2<~3eLK7(2nA|FdP`; z<3y}Ql47}ZO?jmOw!WNf-Z0zOwfsTKM~-XN(^H18UU|)>ur9h8?v=++ zxpH$Dg{|e*8Lhdwt{=VXi;?yU#T>nHhWPtZ!}m+ibB6{tR;bI)rdD@ZEjZ7`lT$-vxu$lI}G}GJv-zY zPYU5BRrRl_5-2y;)@~Z7RPPQhe?a>Zj z$n(XfOL>bVN6(58pF}CzK4%9LdinRI)YmkpvWMsIisj}n8d@Fhk7(p#yv@>EyLXe) zxzmG6(q!W(;;%11x~~clzu*53>v?^VI%F62IrT6lbj+R9Fql0)Us=h488h?XreJqg z?yfVqi6tM6XFVX5=)+P&cvW)Qy>E8rUP9Gs!UBG&EQ}QLSEAVjMI~c?;YLL*PYng< zCcR!QS#`qwvCA6CQ&Wa`e-*b$5<2LJ8yc9UsijdW9dz9cUB_f-RN20b3yb)r6z?_n z`nh2e`TC1bODQ+kFg2w@tZz8b%Z-;4o(1BrH`|dbTxx|5w9vJ30feyCA8FtX3X)q$ zJv3=TftZNbxO{HWf3*8#d&EGUjm4#{zZ?u;RZDD ztaB#mFQhb%U<*pEG{Er5A+*3#$BUu}Yyx->S2qkGC>$nUN8jN8g<*qUvTk)g3w|r< z=|V_Ngh>$K+QA~7@#@vDsAzrV@pm89cMLKdhTq(l!`PGRor`K+j$^GMg{@KoQNl+0 z6ZcrgUqK(X1hmpg-V{$0fHQ83+_&>(;+|B7;bj@n*}4%I0Bzdd%TC`U@ob*opbc)y zB3ROu$KStIa|3A1P>{*GsSH%|4;&D+?R~RLnI0mruz0Kgt0=|ugTN%F_sy?JDgDah z-*^#13UCS2-J)oK9jbcS?>$i`1{L!Ltf4TvIB~=KWg#e(Cv_|xU@XCof8{?r>eX~V zT0oUM5IXNg2K4|i9~^%+JgW;*yx}V>em>8X_B`0mc;EQ**eeWc>olCN_TSo4vLkbz z{Qs>j{rap!ryX?+0X!xH4NTNYLH)}LXa%808bWf>foRZ=gCZqDLm)Iuw299J1P_3o zUkKDuow86jb{UrV5HO>Jp_U$M5uth`fW>N_ozgOe`<;yFMI9aV3TVFemN=O0;gOT! zS9@`2CzsQ8D8;6#w_f;Z9g4%j2==oj+Z}xL!dZk`qL-f&WJ=ZB;>XP{2U}tS9P_vLrO6z1c6i0x$&KEA;$s8#JR9Ebyk(LM!VC!YryL$er0(qU8UD5_U{nGepwTMpzmE_-cf3{Xg&K5Do@lL=qJh zvBfN!{ZTh1ADSakJYI?jCc$NF;!C4CG>g{PK%;ypGb=X`R#Ou0Sh}+h+8IOJc$O6^ zDBhGHK&dC$i2$?{F><&qe|c;Mm?I_B3>7BMCoI*V?isA}_>FaGD6Iz~fZI&zSORKZ z?donQ5F`SSvF@+7PD}g>bK*D6Gvx) zLFgFRfk{;8^m0Gua=A%w6AT5M04z(}rtO)J)oHTDXb=FP^+O;~9{}CUj9c+&a{_M& zH_yi3k3EI{)HQbJLtP3tkiaG_Rd0GDB;Oqx9*;+=u0xd+Z_H8Geg{5Z8y}cEs*~R! z$w$d7g(lVldWy$n^;7hfx;N%cQ2^@LIrQZ*PiSorM?IACuXcy|BTOLx_r;(g7ODM- z(K2RvnE3DgESjCM&&o&%fI`gxPhj5V3nmn4u=_(BNPvzo`}|-xAbk$V%J~A?)$rcC z?Fe!5U<4sF>8)-=>2ix#31MHs#?8X8Kt=X^j$R?Zebvh7GIT#V5?4FZOrd8f8@T3L zy=H{ENYIYfD*${X`mCE3ZVnYN$q;tIadcYS_qFZ(Xp6)9y^=LZ7(86%vK?xe(l3cp zB@K3;3qf~Xq`r`R0q;hFrlfGJZC5J5$Prc*?Mhc@o;dWGM@Q01D;7RoofX-f6Uo2R z5MD5eftdgw7<;M_f&VW9*y%8biI)R1q2^o!iSxIX(lH(^EK->nPIQy4pscs2IX>5e04y~mfD(#G zKTWPgUT3~_(~_c=oRZo4=)3Ce*B&;Xj5uI^{WsA-(esZC{o4i489zQ0Vr z*+g6*qVCY+^JCVzh36Gbhl~RFMcv|AkCKimC}})n7v()$^ikpciVw>*-Neb6%$aVf zn{9Py z+YQg%02@{fqhdq!C4FM7RPgd zed@jan2Xoc`Alt)NL3835{Q}*B(9r=T&2Vu!3-eyTb;I`nxlph>y}v`Pr{ zg3!zeJCHd8V+aS3h3WU0%9(xM=9QX45rbc%FvB1R2cQ5BTTd`4Xz%O}MfgGVN5*^d zOazT9&Znxy>z}zhj1C9<)jBx-#+@;P&}};pURA5!uCPTS2<*QSbRoePh@hkX;GGTt zGQ(hwN&=1-g{`=n`VNNd4x!ixO)F*Cb#-;kmbQ5dV*#Kr4zC5R=#%K!A-6W!e+~6w zp-YO8r%Aeb#(W`P5QYMc62PH;uDb#Ez%S8vCn7yJHx$^H>Hx&2>CDRuQ*OSs0ObLj z7JAnOd8=6z9072!vJ)$W?d3(D{o#(ifdHnvWMpL2+1Xhm)+Q6N>-#Lgm?f?cpFj;* zC16-HST~+p^Tu;a4thBv48?Zb-os~)qxgu^u1Eu2EgJi&P$&V8t2%&b>&(fa2mnpK zs3ywE7UyJUo<^P<;aZ86p2}mb@qqqE_)~gh0q;u1!;sNVG*?lh#9{Od-wm^oe~!7= zOj=%qDku6HJc_+%lM2%ULJhMVRR8rWc)c4uu>|Qj8XHN2#@4J z{bx8%h{h{x;Q1I!U}04&ql-BHj|U8Bl2$lXqND4#om>Y|Xricyp0EpqzZGN%h|Q4c z%~?dqw&ezOpYUD7>6w{<=w!|!sy&az>F3sxfPY?wb>>1q;Aq_{7+5(EaR}Xp!LqW` zywND@<#1;aynQ9L8+CfeyRvjEV6V*rz(5IBf`pj(jLVU=dTyEUd>4MAztj;KNcv#T z9=tm#L~;nQOV_r)HiUaeqf2O5Cd({0C#Pkt0-;h4C?7olYuhA%aF@u9ph#OA`vN&t zv~wQRyLm?(S6K)CqHEg?ooD(l&?U?PKANnaD3{@a!WTg1*6x+;f8xpw2Y(2#9|0(A zO>scadvA{>aLEk;=p`84e*Q#i+-MhUqb0hNK>IjD4&CQ+on%$J?aKyOdk&bgY7zh} z)Xf(VGmCmGj6bkLh$xtzaW%Z(z?ToWsXyny<UKbf8!TL}ytQzVGY!XPH+ zjkKCXv}x!-0NLAoBM!Du2}}VVk=yL?rRk!`cWK1*v+&$5Aar$tNq2slnzDz0C znWG53Vq4UH$pOO4VY_lL$0X0eHDf23R=P!?(&X&IkS~vXYTKEvfyM<>Z~bIaGdZmb z@Sr?WL6dM0oZRGL@9v(S`kDY%tr0p122!uP`F7wHP>z!iQx-Ki81|}|y1+}vokBKI zG%U)NY<1mGu?8N%0fYh?zAv=%5R5`B(x=lQj2b{Bne$j#I5rIP!NRiK%eGPd8pRO` zec5@>bEW?zUD0t}b~(S7W7!C^FThJv=FdM^NQNpjxn>IF=k_8GTy zD{Kz69nHP~O!iu&2~5`ufeV*_$6ErtWm?}!1FBnfd-y>T%*I$te9p{=nG7)(a8}wY zFex%%8;24&%6xRy!fWXG_anRtsKTp zANT3<%PP*y%-pr(%Q;uO@0}_d;~B{5Ul@!nXVb#x4Iv{l$|$(F?*ei&2Ac9H*nj7j z4i>F2#Rm&G_wetJqqHC$jGzDkf}5+2f#P>RzC^IExkfZ)0ttzw1{uf_Otk%a^kJF? zlW7Yfu@bH=6R0)Q9%wJZ%Jy2N#b24UgqcgV_-egEi&}(H)Rc#TpR3aVn^)=?gdL!1 z4uI%WH(eoX%y?O$`S7o*xyZa4WS#(07_=l$IR2^(ihRhIr-b zq$LD>84^4_G2eYU?aoLW6?CR_&`$GHR}DWH|J`s$YHqGD;*zW=FcjuM>(k&pa=(@Jj;u9Zi=_4?}WOLFJ@=6jUufgvg?&K)02Csliw;AiWmx$E~Kc3-P0l+4{ z?K+nf7bt`BXj?w`&aegG=w|e}Q4|Qnyc;V@6#_a#)0B5yW{B32uM#hO)C8D!T91}Jws{wrI)yZSLrUlkUu8uWfsz zKM%7v*pyY6+Pr=GkUfkN2`z5Iw26e5G57&AX@E>$4LX(kc8{)$_ovyNgZ)?WHqqZ7 z%~ZT7J;jU4ET>P6Tf^?+Gw-lg1GZKzb6`)jb*+B10Ou41TM*J(xtc&|>7FQ`E6V7* zD44}J%Fd-vnH|<{!c;;WaQ*6l84zHc9hID>~fYEA#S7 z40_FvLqy!}U8D>CE!m{nhYyFm1aU0tfoJzlxFCHma0u?^K&*&y^o^Ht4ut4?G_nP< z;o`7S+c)oa@a^P-OyCsTFCCgMRbC0|phwFC!odMuLkA^m1Jc_nK|SR1AmD?7OPL!yvw5t!bMxCY z4f9sU)a%V_sq{wEhnOqgkzOd=F2L>(7q{PpwDVZr^uzsfCo}D;eP~H=HAQ zEDVZp3{)K};`^NWgev>;Hkgf!0yH5Ewuta*?KzCnoxYXx1bgTAK!4<-SvLSc9h-xk zRXvJ^(9otP0gZf@Ky`**h=3ysaO_Yt`;V$ky|Cn0%UXL-BDHIPWVd&8YM|V8&k^yH zC_G=>C(s6P?j*Pvu*9rjf?QBh!5E4=XkxT*$+?uWgl3w7-UPh}Wom#O_kS*C2~S=O zO~q{|Jo9W;;G3)(T z*}`O(02)DqZLI9|Ww(hx42rNX6{4r9G0sha>Pa){|S*HqH`WXbSLPxU=Ckrp_G9)_fwC3$#|yE7^fPh|1{0vB--jGKEjI_ zSCph?H>2vjhdIc$6 z5FZssPDloWtJ7WSxg@dF(|xM~#f2!) zc7tj-p@O20hqE#iYm*yL4W*UsdB4)n8OsM=rU?>_jGf4YLTX0A;*JA3hNWk9Zs_Rr zCn{|}lT&>o`{vWrQ*gW<@04dNvuHvI6FsbjEkqA!VZK5>!F#a}AfK3GGU8)Kbq++& z;{cV>I#|7gOkg5B-?ANh?u1Hs39vGtAvC_?Daf~AOYj5WIl9+^I}OD|aH#2%DCVHw7bGbhdyd9RVvG$TEJFjU!;$oO$`-Kr z83mB>VoR$vzsI66)ia+np_1VM`EPxR1IZLzX*(o?>}z^eUhWMf+blG53{yTar_oJA zJT8JVRAfMsVI$`(>YVgsz|B~^`kCoS)bRbM+T($%+jj0#jjV*dFdLcY^}5Bo3c|vS z0sv7Ll~zX~udA~NI|r@=5YLyOVuAcWnDns4Jvl=Mr8uDZA^84sfz|)j-gibdxvg8f z^{D4=x4NyM2zU_-s30H&kz!XsU?WJ8s)&seK_LW5upHeYLKH!&prR04m{AN==AS zHJ4q||0D?B$8f&|jK8;sLY2SCecR4Y(w7D*2^&w85fX)mz!wB5ogbG#Vu6VEX0r>$10#N@jxI2B@ePY{y+c0q~hhp-?Quci`;! z0en>Pum4zd1Q~`vonE}^isJ&PeiD1@)h6gnfD;#FU=i(i(*0CR-A95P=pXaC?u<;&GyR|QIQ`vuY8C~LxA+6MDef%$-G9V zxhZlR9Dct8e}q4#zMG_EbfoLeO}r=S{GsEzYas?rp(Z-6O`y&30Qr+tZf9x4t9O8H zxc9fhHY8t^VjD;v10tFVWLz@bqz6V?Xn5VG+v1Xo@m&5%C!|Q4HFtTm{U{*Cm5`#$ zhS-3UX_O4HbEwJcsLg=P@u*8o0l8!PGt#Q_eRpu}YH{7CKvl0+oU_9taxCa##k7xx zUeY(N9zPZByKHikiFF%xF36&;WI|&<26hs0r_KPr8BLz(Y1SR2rlNZ8;3&d}x`=SN zlGGJ|*-EUmO$TK|ih3fD5Q*9(Xdis$4x;84TgJ0P^ZHLf8EjM=`4w5JK{vP&AoZ~Ec`vK`SP3YHi#DwBaOk#cL0!mN|VTP#9fD}^z45#n1K;FtCqY~+6*f0 zD4k!HlpKf9dQ;sAwKGV5WwZU^tu+?c;JwMU(EumWH(dn_9M>lqD!E#7$+>ZvwSs?u5*6{O94Xx$Z%@4}N1r1XafC-F9pw>M1)+lopNXo*+{9801 zki*Dk0U#v8wl1WBNYU9=85O7#{0WJh`l9!sf0vS*H~a$s1+M6Op=2C>>k42@5%js? zLmoh!qiog{Ggrw_A55R(pOm7a2N_z)dQyabJxU=4gYa1)t6%~Yh|7@>#Zct?`8_`68#MF)zAj8b9SnAXjv(e${N;E_u zbw`kvpEYq>t0Ij?n{4eep#%v%;-OgqegP}&T>=iao4{65=RX}rtOKIsZ3RfK_~=gH zAu^QSGh4F21_kN)^nv9;2sysRzt}kmOqSB`zVd$pC^teO(}%&ILPIJkg`zak#|el- z1W+eIXiWD(0KfX_V_8zf=$55{+N|H?Z`gxAldKOK+)gO|JzjcS=I7_535H5!U z`pHlL7|xt!H*UwaRt~_AF7p3hz>plmiJhvZ7ePcRa9yrddI;dRW)4* z5uFQY<7RI*RJdR)H7{LH&gHB%()(^4@QAAksrT6u&pZ-5s9VDkdhzrJ+8qGS|Y4!#Dctm*rVb#Q<{)^Tx;##K0!+5nq> z{DT5J453C+RB|U46+@iPuEOh%M z0yl)TaLB*}n$IwZ5}S|?7n;{F*!Tt#F(*`RU<46QcLJOHZpjl(=Yf1sN}~dyQMDfK zKk;uxXcxri3OgsCCFp#Du_6tWmYr}Sufws`2=G=>k(&8|^1W$5BBi2)qk{?vvO{d} zcVKy}TQhVSbe29y0Xl2G(h8L85X1xh<4w73^-#y7B0>Fe&JFO{kz(jbv*(J3dKAI$ zT8Cf#3ynpfrVP+3sVxE&lIZq?B?#R10@O|sDy-|E!oKnhgpiw%DiZqbn#jYz0-47^ z?zE9!v+-r*ICwiC0{k@#@#X0QCs74}y8O`=tnvg`d13*2zvc>p$rXB1Y4h- znFj`UdRP>CJD`?1XYC=cf7)G&?gB!w6w^CT8>DGSBiaEty!lKHYO-g{H++9e8R~^d zMC_wyLGp@Qau|vq67XkQ6eM1T6Pnm80i(N0-xC903B=Gd& z!lz{Um`AB11JEEa6btY7%E&6X1}t7tgSU)Yg3^hAgvDI&J6jnOaS|OFNK2R&z1VjZ zaLyK?rYii`l4}l%cZGmk>G1Yv0ZWB=VkM~7>_qa~NB1FY9N9D!^(1&Le`^nW4_R^| z73QY`=~G5R(Qq-Qri6zknh*(61htV(fE)Zl_#t;l@eDvUoyasydeH80Le^qipqNXk znzm1tw2X}mVkVuxD9#}U+AfN)S8^Ndobnx5(1V`{Yyoa_1v-w2u21OQ{(i`_5b7Jp zf^Cs-y-g(ya(qm?n-1v<*+|L*Z^oc)diFLC!f1I`Vf%7euus&knhQ`D5|tt^LE|eQ z0+LYNMVX;=u)ry?83#BDlJ}vvb|1Qj-T96vQ%eDQ;L5^*8e7tK%x4W5@cDPE2QvV` zM|8TLkriy^4oJsRz6YH#X(3$;eOvPpWt4T1+uz2<^sKB1(0ZHX3`E;lh5;z5XAqz+ z4&94JI*9yIW`#XmC4;nx>Tt8L$E;rYlt>wAJI!a6%(viZ^`lkvgS)`zvID|q1ac38 zdTfPhl;jvH9pKs_L!P$y5Nh7wdKrxLq8VhU7bNU^WV0+_IVz-3U*!h~L1kK^Z}Va1 z!z>8mzav398ZrRP7gvrbmbO#ci)mcuvc+v8cA}QMdyOIXv=OV;Uq2U5F9%R>ovNAUU8?p-g3=a((OW_(c zdIEDrPEv&{LiCX*#Zy;AF2HyDA?6sM;g4wq1ej}-eft;$&4dS7D`XwJwj5GDa&JKn zLGr*0?GzvdIG}iV#sq-#ix651_+8#7NiB>s$pe-S)%D!;0avdwNQ_$UFV0_~7dvZM z>VNqc(#*C(9m|E6xfjaRJW(%tVkPWsY2|JVW-@xwkkgIOun>~%OVP~^pcqeT?UdUl zA{;s<^MQZ9TSM(znzYUO8rIy?JKsa-w0@OJm~+?tA&SIWtaeBnokpo(6;8YY@}`=r z*R)c|=P0AeF9^)!-VN&zy$nql4eI?4Nc7hM8R&(?kGf1mYa@6$QF|HT0J1(ob|+xE zid-rIdr^h^wU)pIQY3Jd(u2R%ZRkJkq9K#(w8Q*>^n09(Hlf^87Rtk`pvm5Y zfLTP@0~AE5IGeZNdwIcHKl`+Ye|HTTENcl>|+NF%vFG%h#)_(46`*ch`C7 zU;CqK5@V=*g9Z{Er)K=8meW*!s4v&)OhR93#rrZJe;)<6n{0MG{Qc1+{;Vm*ul5vB zHjEU%OQ$B_rUCJe3Zivp>p*PBKqHd zhqqP3e#o}p?w{O=rW$f#Ka?78%_f)kTV?|m`({cLU^e*~wf7{W<>`|?zTcNu|77Rw zulft#;Re%%KeQLoQ;xO7~=4532zHIiB zofjYV7raBqsGl7dL{|M!U#wd{buP9PBZl{`Lm7p zsNQ_ntr8Wm{u^sKgS1AxW_2S;)!%D&QJR^CT+MS$!y)HX$|^^f8|IA-*xZz(ZC9KR z>=LJmEE*M=4W3?Gt9!YpUUOaeWh%=<%gCLsyG)988TKXO5>8#egPDx>TJ+rc(7^fZ!=`0>M<5mAwa{Q%a@DAkjKhKD;k_N7`+t(0h&qK zlDMu0t0+~2tSshxY>BIr(1nk$30G;sc5^V@n93w`4*FATNQLfH zJWbQZq=C~syfo4+U7S{Ho=3|#P0gU%-y4w^h?nZLuc4LNJRx!!)@;V!=MFhV?7Vz- z>Q})~eKpT|96vBdbRrwK5C~_FQk7|E()#e;Y@TDUvF3QvNpq@ze(7KZsj!#zvP5+M z%a*z{c3ynv0eAA?IgM;$?b%a??F&<8UuLsZOzm`olX3MU8Dp#?<{&jp3C~cO5biVh z%-`6CHK)gato4wLU^wFi?BX~jtRI8$MaU*Hn!X}D|BJ(rHSWdCGO41mOjUAo^My1cI1>xv+J>z72?DDL%lp-v+^|) z_GCI1Q@_moaKiS{$ypc9UOAuUxTvx0MM+xl1)@R$rUfgUS>MBsX~YskYmVBT2s*6S z#_2cc6I$r(d!E5us=ev?LuS37qi7cXV>S0$M>gp!)k}SC*7d?JqPYT=%b+E^#nVdv zD4R|zjVIbO97ie+>1)>TTF!b4#04JYjjqaU?K{ySZ7dZ{8+&0<=D9EA_ak?GSh=fG zbS~2rtY7gQ>x1ST^-voXuJb3)9c+G&t;-E)I+ObacmFcCz+cIUQz&4si?w&IK9g2N zWJn(QPnIL^g4+X&^eBJf`&Jn8EI*#jd1(o z(=(>0gk3C;Jy0IRNG6fF*#zsLLE6$rsg`%2lHVE)uOU==xL?`_BCy3 z4!tp(6J7dw5<5_WYlut64OSGjsPLYEuvWEH(DG5W3Vco7s=8-H$zC-cJdS z&|qI->ln3PWLRg=woXV+=?`sGSrz%lq&uAXW*x`AnOfD9KS&+Ay}|_U1F> zInKD5q@AfF`Nte;T=(uaKAu1|PUtI)VHP^SWZv7s-@bSN*A|jh%(CxfpJ@vUFH1Q< z-I--5CnL7^AMBP%=HEB<%NfSDVOBmdc}Z<=*hmje-+Xd7vagx$3e{Pk1`alJUd znbQ4h_fSs5Xu2e7&xL6ZBQb4vKL;_&((edoZA_s*_|~}u?&ZsJ4Vow>(@jsE%M)iA z%_{y@utVhKYbX$54JZ#j4O)<3I;8oP?#UB!Dl?%oVIKs=3zpqXNmv$e zQn=!H$rBZ9`*pY5dltk@qrbq&?jhn=%ccl})Ne7ro45se=t;-_U)ZQ=34VZ37*81j zZW!O;2ynvJe}u>^9)jrJGNf&7=N7d7TZqhi;xcXgWnUDMCIz`JcJPO+xFlD6)GrzOUQFkADaKUMPshko^aIEqJ)W zDa&lXRz-?LxHDwk0&A5c*P4tY^d~CBgCl`6?E9koLfc7BfjRG%dNmI#rbnizn1x+U z$88h0uglR(=^VDW$QHzUXgF`72&kR5I`niMZn5`X9=% zJ_gtdMw_|b&Tf5$o=VK(@s$&YsY+cR(m9>vT1;bd{=37*Wv{Pvp527g)inP^bz$Xd zo_ z>eni+7DT!&Z=E*npJB)sV7%4z_+ow~Kf>DeIOB}1TlbLIKd60sS>~T9ilJPJL6Mts zL6vE1;C0uD3m4M2&Q}a&1`!-BX}tC!v9d`+j$8I|DP;Ph@a5#@nO+wycM&{tQmAfn zWQ{qO&DFHJ8C;?8VfnbkPQnuxk1xl$`O0I`g8HI!++wfx)g7Xgo3E3=Z3mBy7u)_^o0x zf+d@4aWJj$$HY~2PC=_Rb*nGUr!}&c8aeT&sO>UwVjJ~cXU9z<|Eu8{ym8zJ`}sQ- z%}3O>y|r{?e=Vkg**U_{(+C##hS!E;SeMMjoj37m7~>Xv&r7_YNodVT&_O1q=hEw< zJA%*m@M#;V%7t!ghPF%Qmy26PEnI!&Zz6hF%k^-2Kfev%GcF1AY1Qkl(^qQ$)0fZE z+(Lie%^@%qE3&A2>2u{4aQphNm`QggWr=6Pnu)CdY)_kp9ViSPGnVi;${=AIciXku z>gztMP3s!0dDH6xwx7s9O&;y;NSpaMmL9saP$O@>9Nxp6C^BTO>*iI-yQ%o9yU|LO z3(qB=bommpg6~VP;x_m8nXbu6Q7k8HV%8PS=aVk^tEE`CZp`s-(s&&*zu8YLxb0hW9b@p00m$pY@dU6)y zMBa-r3Bg56`CjeJ$`M9B{#d;Ui8>JyfzLG};zbuk?^pW9Ok5hNemvAjSL(w(^r79t zbIk}i)}`jycwEUppkU@>Pgf;_ zoRu3#4!kcIA2yfd-exZS-OBHGOhM5*gK>kQXRamHen(DA98%;@Ycf~6n=pM24<#`d zV!GX8QbyVqGGFC%`y2bT`ryyq!FuyV%dr_GJ!YRO)0xRGVT~WXk{VJMpu{KHQj5j6 zW8OThExdN_2KMZ54a?+h;ZTYXA@-6axBEFQ_N;SROwvfi8fKQHIdFi%e5}Y= zw70*|oRaRMQKMr)c~^AjPc9`isahw1Vm^c4Q!%K0JEX*KZ)Zrn)lhY~{m@R&j96#o zvJh%N;Y2-0qgLm2h-QALzHb0EZj`5ZaH4u-zxRcCJuT*g`YQ@WL>y1DbcE$ZU?v`) z$Et7aTMybvwIi4H=Oa;#iekN&~pqo4kH z(@xdi>ghg)!h*K7Zrr4So}--TU>TGJmwj+PerF=E zEMPz{hZnad#X03q+HPB`x+;b?rG37|c!5cIvvO@py$-c-l$8{E&e=C7gUcEp-Z6NR zNcEl==p?md47K5!w-E}LTDP%8Z@!{4=Xn{fHMBgT;^C%AB2+W_+%Dw6mwykP(^addKw+O-@r#w0OdEGhdhM zHcOnlODyj9oM)Zj=Md}DbD>AHt*^kI5_y$G6$F%98SfDkKa$xTk};kM2@@A& -export SLURM_ACCOUNT= - - -# Source code. -export MEGATRON_CODE_DIR= - - -# This variable is used to mount the relevant part of the filesystem -# inside the docker container. Note that the `MEGATRON_CODE_DIR` and the -# launch directory already get mounted; this variable should be used to -# mount the directories that contain the data and tokenizer files. -export DOCKER_MOUNT_DIR= - - -# Data and tokenizer files. -MEGATRON_DATA= -BPE_VOCAB_FILE= -BPE_MERGE_FILE= - - -# Megatron input parameters. -# `MEGATRON_EXTRA_PARAMS` can be used to provide any extra parameters -# that are not listed here. -export MEGATRON_PARAMS=" ${MEGATRON_EXTRA_PARAMS} \ - --tensor-model-parallel-size ${TP} \ - --pipeline-model-parallel-size ${PP} \ - --micro-batch-size ${MBS} \ - --global-batch-size ${GBS} \ - --num-layers ${NLS} \ - --hidden-size ${HS} \ - --num-attention-heads ${NAH} \ - --DDP-impl ${DDP} \ - --data-path ${MEGATRON_DATA} \ - --vocab-file ${BPE_VOCAB_FILE} \ - --merge-file ${BPE_MERGE_FILE} \ - --log-interval 5 \ - --seq-length 2048 \ - --max-position-embeddings 2048 \ - --train-iters 500 \ - --lr-decay-iters 320 \ - --lr 0.0001 \ - --min-lr 0.00001 \ - --lr-decay-style cosine \ - --lr-warmup-fraction 0.01 \ - --split 969,30,1 \ - --eval-iters 100 \ - --eval-interval 1000 \ - --clip-grad 1.0 \ - --fp16 \ - --loss-scale 8192 " - - +#!/bin/bash + + +# SLURM options. +export SLURM_PARTITION= +export SLURM_ACCOUNT= + + +# Source code. +export MEGATRON_CODE_DIR= + + +# This variable is used to mount the relevant part of the filesystem +# inside the docker container. Note that the `MEGATRON_CODE_DIR` and the +# launch directory already get mounted; this variable should be used to +# mount the directories that contain the data and tokenizer files. +export DOCKER_MOUNT_DIR= + + +# Data and tokenizer files. +MEGATRON_DATA= +BPE_VOCAB_FILE= +BPE_MERGE_FILE= + + +# Megatron input parameters. +# `MEGATRON_EXTRA_PARAMS` can be used to provide any extra parameters +# that are not listed here. +export MEGATRON_PARAMS=" ${MEGATRON_EXTRA_PARAMS} \ + --tensor-model-parallel-size ${TP} \ + --pipeline-model-parallel-size ${PP} \ + --micro-batch-size ${MBS} \ + --global-batch-size ${GBS} \ + --num-layers ${NLS} \ + --hidden-size ${HS} \ + --num-attention-heads ${NAH} \ + --DDP-impl ${DDP} \ + --data-path ${MEGATRON_DATA} \ + --vocab-file ${BPE_VOCAB_FILE} \ + --merge-file ${BPE_MERGE_FILE} \ + --log-interval 5 \ + --seq-length 2048 \ + --max-position-embeddings 2048 \ + --train-iters 500 \ + --lr-decay-iters 320 \ + --lr 0.0001 \ + --min-lr 0.00001 \ + --lr-decay-style cosine \ + --lr-warmup-fraction 0.01 \ + --split 969,30,1 \ + --eval-iters 100 \ + --eval-interval 1000 \ + --clip-grad 1.0 \ + --fp16 \ + --loss-scale 8192 " + + diff --git a/examples/academic_paper_scripts/sc21/SBATCH.sh b/examples/academic_paper_scripts/sc21/SBATCH.sh old mode 100644 new mode 100755 index 95431b9..4516a24 --- a/examples/academic_paper_scripts/sc21/SBATCH.sh +++ b/examples/academic_paper_scripts/sc21/SBATCH.sh @@ -1,13 +1,13 @@ -#!/bin/bash - - -sbatch -p ${SLURM_PARTITION} \ - -A ${SLURM_ACCOUNT} \ - --job-name=${JOB_NAME} \ - --nodes=${NNODES} \ - --export=MEGATRON_CODE_DIR,MEGATRON_PARAMS,DOCKER_MOUNT_DIR SRUN.sh - -exit 0 - - - +#!/bin/bash + + +sbatch -p ${SLURM_PARTITION} \ + -A ${SLURM_ACCOUNT} \ + --job-name=${JOB_NAME} \ + --nodes=${NNODES} \ + --export=MEGATRON_CODE_DIR,MEGATRON_PARAMS,DOCKER_MOUNT_DIR SRUN.sh + +exit 0 + + + diff --git a/examples/academic_paper_scripts/sc21/SRUN.sh b/examples/academic_paper_scripts/sc21/SRUN.sh old mode 100644 new mode 100755 index 52a9aff..717ff53 --- a/examples/academic_paper_scripts/sc21/SRUN.sh +++ b/examples/academic_paper_scripts/sc21/SRUN.sh @@ -1,18 +1,18 @@ -#!/bin/bash - -#SBATCH -t 0:30:00 --exclusive --mem=0 --overcommit --ntasks-per-node=8 - - -THIS_DIR=`pwd` -DATETIME=`date +'date_%y-%m-%d_time_%H-%M-%S'` -mkdir -p ${THIS_DIR}/logs - - -CMD="python -u ${MEGATRON_CODE_DIR}/pretrain_gpt.py ${MEGATRON_PARAMS}" - - -srun -l \ - --container-image "nvcr.io#nvidia/pytorch:20.12-py3" \ - --container-mounts "${THIS_DIR}:${THIS_DIR},${MEGATRON_CODE_DIR}:${MEGATRON_CODE_DIR},${DOCKER_MOUNT_DIR}:${DOCKER_MOUNT_DIR}" \ - --output=${THIS_DIR}/logs/%x_%j_$DATETIME.log sh -c "${CMD}" - +#!/bin/bash + +#SBATCH -t 0:30:00 --exclusive --mem=0 --overcommit --ntasks-per-node=8 + + +THIS_DIR=`pwd` +DATETIME=`date +'date_%y-%m-%d_time_%H-%M-%S'` +mkdir -p ${THIS_DIR}/logs + + +CMD="python -u ${MEGATRON_CODE_DIR}/pretrain_gpt.py ${MEGATRON_PARAMS}" + + +srun -l \ + --container-image "nvcr.io#nvidia/pytorch:20.12-py3" \ + --container-mounts "${THIS_DIR}:${THIS_DIR},${MEGATRON_CODE_DIR}:${MEGATRON_CODE_DIR},${DOCKER_MOUNT_DIR}:${DOCKER_MOUNT_DIR}" \ + --output=${THIS_DIR}/logs/%x_%j_$DATETIME.log sh -c "${CMD}" + diff --git a/examples/academic_paper_scripts/sc21/run_figure_11.sh b/examples/academic_paper_scripts/sc21/run_figure_11.sh old mode 100644 new mode 100755 index 2ec7d9e..ff0594a --- a/examples/academic_paper_scripts/sc21/run_figure_11.sh +++ b/examples/academic_paper_scripts/sc21/run_figure_11.sh @@ -1,46 +1,46 @@ -#!/bin/bash - -# ================================ -# Choose the case to run. -# ================================ - -# Pipeline-parallel size options = [1, 2, 4, 8]. -PP=1 - -# Batch size (global batch size) options = [8, 128]. -GBS=8 - - - - - -# Set pipeline-parallel size options. -NLS=$((3*PP)) -NNODES=${PP} - - -# Other params. -TP=8 -MBS=1 -HS=20480 -NAH=128 -DDP=local -MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " - - -# Name of the job. -export JOB_NAME=results_figure_11_pipeline_parallel_size_${PP}_batch_size_${GBS} - - -# Import the configs. -. `pwd`/CONFIG.sh - - -# Submit the job. -. `pwd`/SBATCH.sh - - -exit 0 - - - +#!/bin/bash + +# ================================ +# Choose the case to run. +# ================================ + +# Pipeline-parallel size options = [1, 2, 4, 8]. +PP=1 + +# Batch size (global batch size) options = [8, 128]. +GBS=8 + + + + + +# Set pipeline-parallel size options. +NLS=$((3*PP)) +NNODES=${PP} + + +# Other params. +TP=8 +MBS=1 +HS=20480 +NAH=128 +DDP=local +MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " + + +# Name of the job. +export JOB_NAME=results_figure_11_pipeline_parallel_size_${PP}_batch_size_${GBS} + + +# Import the configs. +. `pwd`/CONFIG.sh + + +# Submit the job. +. `pwd`/SBATCH.sh + + +exit 0 + + + diff --git a/examples/academic_paper_scripts/sc21/run_figure_12.sh b/examples/academic_paper_scripts/sc21/run_figure_12.sh old mode 100644 new mode 100755 index 11e5508..df06eb5 --- a/examples/academic_paper_scripts/sc21/run_figure_12.sh +++ b/examples/academic_paper_scripts/sc21/run_figure_12.sh @@ -1,54 +1,54 @@ -#!/bin/bash - -# ================================ -# Choose the case to run. -# ================================ - -# Interleaved schedule options = [YES, NO]. -INTERLEAVED=YES - -# Batch size (global batch size) options = [12, 24, 36, ..., 60]. -GBS=12 - - - - - -# Set interleaved schedule options. -if [ ${INTERLEAVED} == "YES" ]; then - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform --num-layers-per-virtual-pipeline-stage 2 " -elif [ ${INTERLEAVED} == "NO" ]; then - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " -else - echo "Invalid configuration" - exit 1 -fi - - -# Other params. -TP=8 -PP=12 -MBS=1 -NLS=96 -HS=12288 -NAH=96 -DDP=local -NNODES=12 - - -# Name of the job. -export JOB_NAME=results_figure_12_interleaved_${INTERLEAVED}_batch_size_${GBS} - - -# Import the configs. -. `pwd`/CONFIG.sh - - -# Submit the job. -. `pwd`/SBATCH.sh - - -exit 0 - - - +#!/bin/bash + +# ================================ +# Choose the case to run. +# ================================ + +# Interleaved schedule options = [YES, NO]. +INTERLEAVED=YES + +# Batch size (global batch size) options = [12, 24, 36, ..., 60]. +GBS=12 + + + + + +# Set interleaved schedule options. +if [ ${INTERLEAVED} == "YES" ]; then + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform --num-layers-per-virtual-pipeline-stage 2 " +elif [ ${INTERLEAVED} == "NO" ]; then + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " +else + echo "Invalid configuration" + exit 1 +fi + + +# Other params. +TP=8 +PP=12 +MBS=1 +NLS=96 +HS=12288 +NAH=96 +DDP=local +NNODES=12 + + +# Name of the job. +export JOB_NAME=results_figure_12_interleaved_${INTERLEAVED}_batch_size_${GBS} + + +# Import the configs. +. `pwd`/CONFIG.sh + + +# Submit the job. +. `pwd`/SBATCH.sh + + +exit 0 + + + diff --git a/examples/academic_paper_scripts/sc21/run_figure_13.sh b/examples/academic_paper_scripts/sc21/run_figure_13.sh old mode 100644 new mode 100755 index 7ba560e..2c75c60 --- a/examples/academic_paper_scripts/sc21/run_figure_13.sh +++ b/examples/academic_paper_scripts/sc21/run_figure_13.sh @@ -1,46 +1,46 @@ -#!/bin/bash - -# ================================ -# Choose the case to run. -# ================================ - -# Pipeline-parallel size options = [2, 4, 8, 16, 32]. -PP=2 - -# Batch size (global batch size) options = [32, 128]. -GBS=32 - - - - - -# Set pipeline-parallel and tensor-parallel size options. -TP=$((64/PP)) - - -# Other params. -MBS=1 -NLS=32 -HS=20480 -NAH=128 -DDP=local -MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " -NNODES=8 - - -# Name of the job. -export JOB_NAME=results_figure_13_pipeline_parallel_size_${PP}_tensor_parallel_size_${TP}_batch_size_${GBS} - - -# Import the configs. -. `pwd`/CONFIG.sh - - -# Submit the job. -. `pwd`/SBATCH.sh - - -exit 0 - - - +#!/bin/bash + +# ================================ +# Choose the case to run. +# ================================ + +# Pipeline-parallel size options = [2, 4, 8, 16, 32]. +PP=2 + +# Batch size (global batch size) options = [32, 128]. +GBS=32 + + + + + +# Set pipeline-parallel and tensor-parallel size options. +TP=$((64/PP)) + + +# Other params. +MBS=1 +NLS=32 +HS=20480 +NAH=128 +DDP=local +MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " +NNODES=8 + + +# Name of the job. +export JOB_NAME=results_figure_13_pipeline_parallel_size_${PP}_tensor_parallel_size_${TP}_batch_size_${GBS} + + +# Import the configs. +. `pwd`/CONFIG.sh + + +# Submit the job. +. `pwd`/SBATCH.sh + + +exit 0 + + + diff --git a/examples/academic_paper_scripts/sc21/run_figure_14.sh b/examples/academic_paper_scripts/sc21/run_figure_14.sh old mode 100644 new mode 100755 index 4b83879..87ac082 --- a/examples/academic_paper_scripts/sc21/run_figure_14.sh +++ b/examples/academic_paper_scripts/sc21/run_figure_14.sh @@ -1,47 +1,47 @@ -#!/bin/bash - -# ================================ -# Choose the case to run. -# ================================ - -# Pipeline-parallel size options = [2, 4, 8, 16, 32]. -PP=2 - -# Batch size (global batch size) options = [32, 512]. -GBS=32 - - - - - -# Set pipeline-parallel and data-parallel size options. -DP=$((64/PP)) - - -# Other params. -TP=1 -MBS=1 -NLS=32 -HS=3840 -NAH=32 -DDP=local -MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " -NNODES=8 - - -# Name of the job. -export JOB_NAME=results_figure_14_pipeline_parallel_size_${PP}_data_parallel_size_${DP}_batch_size_${GBS} - - -# Import the configs. -. `pwd`/CONFIG.sh - - -# Submit the job. -. `pwd`/SBATCH.sh - - -exit 0 - - - +#!/bin/bash + +# ================================ +# Choose the case to run. +# ================================ + +# Pipeline-parallel size options = [2, 4, 8, 16, 32]. +PP=2 + +# Batch size (global batch size) options = [32, 512]. +GBS=32 + + + + + +# Set pipeline-parallel and data-parallel size options. +DP=$((64/PP)) + + +# Other params. +TP=1 +MBS=1 +NLS=32 +HS=3840 +NAH=32 +DDP=local +MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " +NNODES=8 + + +# Name of the job. +export JOB_NAME=results_figure_14_pipeline_parallel_size_${PP}_data_parallel_size_${DP}_batch_size_${GBS} + + +# Import the configs. +. `pwd`/CONFIG.sh + + +# Submit the job. +. `pwd`/SBATCH.sh + + +exit 0 + + + diff --git a/examples/academic_paper_scripts/sc21/run_figure_15.sh b/examples/academic_paper_scripts/sc21/run_figure_15.sh old mode 100644 new mode 100755 index 547ad1d..f47150f --- a/examples/academic_paper_scripts/sc21/run_figure_15.sh +++ b/examples/academic_paper_scripts/sc21/run_figure_15.sh @@ -1,47 +1,47 @@ -#!/bin/bash - -# ================================ -# Choose the case to run. -# ================================ - -# Tensor-parallel size options = [2, 4, 8, 16, 32]. -TP=2 - -# Batch size (global batch size) options = [32, 128, 512]. -GBS=32 - - - - - -# Set tensor-parallel and data-parallel size options. -DP=$((64/TP)) - - -# Other params. -PP=1 -MBS=1 -NLS=32 -HS=3840 -NAH=32 -DDP=local -MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " -NNODES=8 - - -# Name of the job. -export JOB_NAME=results_figure_15_tensor_parallel_size_${TP}_data_parallel_size_${DP}_batch_size_${GBS} - - -# Import the configs. -. `pwd`/CONFIG.sh - - -# Submit the job. -. `pwd`/SBATCH.sh - - -exit 0 - - - +#!/bin/bash + +# ================================ +# Choose the case to run. +# ================================ + +# Tensor-parallel size options = [2, 4, 8, 16, 32]. +TP=2 + +# Batch size (global batch size) options = [32, 128, 512]. +GBS=32 + + + + + +# Set tensor-parallel and data-parallel size options. +DP=$((64/TP)) + + +# Other params. +PP=1 +MBS=1 +NLS=32 +HS=3840 +NAH=32 +DDP=local +MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " +NNODES=8 + + +# Name of the job. +export JOB_NAME=results_figure_15_tensor_parallel_size_${TP}_data_parallel_size_${DP}_batch_size_${GBS} + + +# Import the configs. +. `pwd`/CONFIG.sh + + +# Submit the job. +. `pwd`/SBATCH.sh + + +exit 0 + + + diff --git a/examples/academic_paper_scripts/sc21/run_figure_16.sh b/examples/academic_paper_scripts/sc21/run_figure_16.sh old mode 100644 new mode 100755 index 8c353a3..7f61257 --- a/examples/academic_paper_scripts/sc21/run_figure_16.sh +++ b/examples/academic_paper_scripts/sc21/run_figure_16.sh @@ -1,43 +1,43 @@ -#!/bin/bash - -# ================================ -# Choose the case to run. -# ================================ - -# Microbatch size options = [1, 2, 4, 8]. -MBS=1 - -# Batch size (global batch size) options = [128, 512]. -GBS=128 - - - - - -# Other params. -TP=8 -PP=8 -NLS=32 -HS=15360 -NAH=128 -DDP=local -MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " -NNODES=8 - - -# Name of the job. -export JOB_NAME=results_figure_16_microbatch_size_${MBS}_batch_size_${GBS} - - -# Import the configs. -. `pwd`/CONFIG.sh - - -# Submit the job. -. `pwd`/SBATCH.sh - - -exit 0 - - - +#!/bin/bash + +# ================================ +# Choose the case to run. +# ================================ + +# Microbatch size options = [1, 2, 4, 8]. +MBS=1 + +# Batch size (global batch size) options = [128, 512]. +GBS=128 + + + + + +# Other params. +TP=8 +PP=8 +NLS=32 +HS=15360 +NAH=128 +DDP=local +MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " +NNODES=8 + + +# Name of the job. +export JOB_NAME=results_figure_16_microbatch_size_${MBS}_batch_size_${GBS} + + +# Import the configs. +. `pwd`/CONFIG.sh + + +# Submit the job. +. `pwd`/SBATCH.sh + + +exit 0 + + + diff --git a/examples/academic_paper_scripts/sc21/run_figure_17.sh b/examples/academic_paper_scripts/sc21/run_figure_17.sh old mode 100644 new mode 100755 index d6899b3..6da59de --- a/examples/academic_paper_scripts/sc21/run_figure_17.sh +++ b/examples/academic_paper_scripts/sc21/run_figure_17.sh @@ -1,54 +1,54 @@ -#!/bin/bash - -# ================================ -# Choose the case to run. -# ================================ - -# Activation recomputation options = [YES, NO]. -ACTIVATION_RECOMPUTATION=YES - -# Batch size (global batch size) options = [1, 2, 4, ..., 256]. -GBS=1 - - - - - -# Set activation recomputation. -if [ ${ACTIVATION_RECOMPUTATION} == "YES" ]; then - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " -elif [ ${ACTIVATION_RECOMPUTATION} == "NO" ]; then - MEGATRON_EXTRA_PARAMS="" -else - echo "Invalid configuration" - exit 1 -fi - - -# Other params. -TP=8 -PP=16 -MBS=1 -NLS=80 -HS=12288 -NAH=96 -DDP=local -NNODES=16 - - -# Name of the job. -export JOB_NAME=results_figure_17_activation_recomputation_${ACTIVATION_RECOMPUTATION}_batch_size_${GBS} - - -# Import the configs. -. `pwd`/CONFIG.sh - - -# Submit the job. -. `pwd`/SBATCH.sh - - -exit 0 - - - +#!/bin/bash + +# ================================ +# Choose the case to run. +# ================================ + +# Activation recomputation options = [YES, NO]. +ACTIVATION_RECOMPUTATION=YES + +# Batch size (global batch size) options = [1, 2, 4, ..., 256]. +GBS=1 + + + + + +# Set activation recomputation. +if [ ${ACTIVATION_RECOMPUTATION} == "YES" ]; then + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " +elif [ ${ACTIVATION_RECOMPUTATION} == "NO" ]; then + MEGATRON_EXTRA_PARAMS="" +else + echo "Invalid configuration" + exit 1 +fi + + +# Other params. +TP=8 +PP=16 +MBS=1 +NLS=80 +HS=12288 +NAH=96 +DDP=local +NNODES=16 + + +# Name of the job. +export JOB_NAME=results_figure_17_activation_recomputation_${ACTIVATION_RECOMPUTATION}_batch_size_${GBS} + + +# Import the configs. +. `pwd`/CONFIG.sh + + +# Submit the job. +. `pwd`/SBATCH.sh + + +exit 0 + + + diff --git a/examples/academic_paper_scripts/sc21/run_figure_18.sh b/examples/academic_paper_scripts/sc21/run_figure_18.sh old mode 100644 new mode 100755 index 88924fb..0ddd8a2 --- a/examples/academic_paper_scripts/sc21/run_figure_18.sh +++ b/examples/academic_paper_scripts/sc21/run_figure_18.sh @@ -1,54 +1,54 @@ -#!/bin/bash - -# ================================ -# Choose the case to run. -# ================================ - -# Scatter-gather communication optimization options = [YES, NO]. -SCATTER_GATHER=YES - -# Batch size (global batch size) options = [12, 24, 36, ..., 60]. -GBS=12 - - - - - -# Set scatter-gather communication optimization options. -if [ ${SCATTER_GATHER} == "YES" ]; then - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform --num-layers-per-virtual-pipeline-stage 2 " -elif [ ${SCATTER_GATHER} == "NO" ]; then - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform --num-layers-per-virtual-pipeline-stage 2 --no-scatter-gather-tensors-in-pipeline " -else - echo "Invalid configuration" - exit 1 -fi - - -# Other params. -TP=8 -PP=12 -MBS=1 -NLS=96 -HS=12288 -NAH=96 -DDP=local -NNODES=12 - - -# Name of the job. -export JOB_NAME=results_figure_18_scatter_gather_${SCATTER_GATHER}_batch_size_${GBS} - - -# Import the configs. -. `pwd`/CONFIG.sh - - -# Submit the job. -. `pwd`/SBATCH.sh - - -exit 0 - - - +#!/bin/bash + +# ================================ +# Choose the case to run. +# ================================ + +# Scatter-gather communication optimization options = [YES, NO]. +SCATTER_GATHER=YES + +# Batch size (global batch size) options = [12, 24, 36, ..., 60]. +GBS=12 + + + + + +# Set scatter-gather communication optimization options. +if [ ${SCATTER_GATHER} == "YES" ]; then + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform --num-layers-per-virtual-pipeline-stage 2 " +elif [ ${SCATTER_GATHER} == "NO" ]; then + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform --num-layers-per-virtual-pipeline-stage 2 --no-scatter-gather-tensors-in-pipeline " +else + echo "Invalid configuration" + exit 1 +fi + + +# Other params. +TP=8 +PP=12 +MBS=1 +NLS=96 +HS=12288 +NAH=96 +DDP=local +NNODES=12 + + +# Name of the job. +export JOB_NAME=results_figure_18_scatter_gather_${SCATTER_GATHER}_batch_size_${GBS} + + +# Import the configs. +. `pwd`/CONFIG.sh + + +# Submit the job. +. `pwd`/SBATCH.sh + + +exit 0 + + + diff --git a/examples/academic_paper_scripts/sc21/run_table_1.sh b/examples/academic_paper_scripts/sc21/run_table_1.sh old mode 100644 new mode 100755 index 1b15fb0..31884cc --- a/examples/academic_paper_scripts/sc21/run_table_1.sh +++ b/examples/academic_paper_scripts/sc21/run_table_1.sh @@ -1,145 +1,145 @@ -#!/bin/bash - -# ================================ -# Choose the case to run. -# ================================ -# model size options = [1.7B, 3.6B, 7.5B, 18B, 39B, 76B, 145B, 310B, 530B, 1T] -MODEL_SIZE=1.7B - - - - - - -if [ ${MODEL_SIZE} == "1.7B" ]; then - TP=1 - PP=1 - MBS=16 - GBS=512 - NLS=24 - HS=2304 - NAH=24 - DDP=torch - NNODES=4 - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " -elif [ ${MODEL_SIZE} == "3.6B" ]; then - TP=2 - PP=1 - MBS=16 - GBS=512 - NLS=30 - HS=3072 - NAH=32 - DDP=torch - NNODES=8 - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " -elif [ ${MODEL_SIZE} == "7.5B" ]; then - TP=4 - PP=1 - MBS=16 - GBS=512 - NLS=36 - HS=4096 - NAH=32 - DDP=torch - NNODES=16 - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " -elif [ ${MODEL_SIZE} == "18B" ]; then - TP=8 - PP=1 - MBS=8 - GBS=1024 - NLS=40 - HS=6144 - NAH=48 - DDP=torch - NNODES=32 - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " -elif [ ${MODEL_SIZE} == "39B" ]; then - TP=8 - PP=2 - MBS=4 - GBS=1536 - NLS=48 - HS=8192 - NAH=64 - DDP=local - NNODES=64 - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " -elif [ ${MODEL_SIZE} == "76B" ]; then - TP=8 - PP=4 - MBS=2 - GBS=1792 - NLS=60 - HS=10240 - NAH=80 - DDP=local - NNODES=128 - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform --num-layers-per-virtual-pipeline-stage 5" -elif [ ${MODEL_SIZE} == "145B" ]; then - TP=8 - PP=8 - MBS=2 - GBS=2304 - NLS=80 - HS=12288 - NAH=96 - DDP=local - NNODES=192 - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform --num-layers-per-virtual-pipeline-stage 5 " -elif [ ${MODEL_SIZE} == "310B" ]; then - TP=8 - PP=16 - MBS=1 - GBS=2160 - NLS=96 - HS=16384 - NAH=128 - DDP=local - NNODES=240 - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform --num-layers-per-virtual-pipeline-stage 3 " -elif [ ${MODEL_SIZE} == "530B" ]; then - TP=8 - PP=35 - MBS=1 - GBS=2520 - NLS=105 - HS=20480 - NAH=128 - DDP=local - NNODES=315 - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform --num-layers-per-virtual-pipeline-stage 1 " -elif [ ${MODEL_SIZE} == "1T" ]; then - TP=8 - PP=64 - MBS=1 - GBS=3072 - NLS=128 - HS=25600 - NAH=160 - DDP=local - NNODES=384 - MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " -else - echo "Invalid configuration" - exit 1 -fi - - -# Name of the job -export JOB_NAME=results_table_1_model_size_${MODEL_SIZE} - - -# Import the configs. -. `pwd`/CONFIG.sh - - -# Submit the job. -. `pwd`/SBATCH.sh - - -exit 0 - - - +#!/bin/bash + +# ================================ +# Choose the case to run. +# ================================ +# model size options = [1.7B, 3.6B, 7.5B, 18B, 39B, 76B, 145B, 310B, 530B, 1T] +MODEL_SIZE=1.7B + + + + + + +if [ ${MODEL_SIZE} == "1.7B" ]; then + TP=1 + PP=1 + MBS=16 + GBS=512 + NLS=24 + HS=2304 + NAH=24 + DDP=torch + NNODES=4 + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " +elif [ ${MODEL_SIZE} == "3.6B" ]; then + TP=2 + PP=1 + MBS=16 + GBS=512 + NLS=30 + HS=3072 + NAH=32 + DDP=torch + NNODES=8 + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " +elif [ ${MODEL_SIZE} == "7.5B" ]; then + TP=4 + PP=1 + MBS=16 + GBS=512 + NLS=36 + HS=4096 + NAH=32 + DDP=torch + NNODES=16 + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " +elif [ ${MODEL_SIZE} == "18B" ]; then + TP=8 + PP=1 + MBS=8 + GBS=1024 + NLS=40 + HS=6144 + NAH=48 + DDP=torch + NNODES=32 + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " +elif [ ${MODEL_SIZE} == "39B" ]; then + TP=8 + PP=2 + MBS=4 + GBS=1536 + NLS=48 + HS=8192 + NAH=64 + DDP=local + NNODES=64 + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " +elif [ ${MODEL_SIZE} == "76B" ]; then + TP=8 + PP=4 + MBS=2 + GBS=1792 + NLS=60 + HS=10240 + NAH=80 + DDP=local + NNODES=128 + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform --num-layers-per-virtual-pipeline-stage 5" +elif [ ${MODEL_SIZE} == "145B" ]; then + TP=8 + PP=8 + MBS=2 + GBS=2304 + NLS=80 + HS=12288 + NAH=96 + DDP=local + NNODES=192 + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform --num-layers-per-virtual-pipeline-stage 5 " +elif [ ${MODEL_SIZE} == "310B" ]; then + TP=8 + PP=16 + MBS=1 + GBS=2160 + NLS=96 + HS=16384 + NAH=128 + DDP=local + NNODES=240 + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform --num-layers-per-virtual-pipeline-stage 3 " +elif [ ${MODEL_SIZE} == "530B" ]; then + TP=8 + PP=35 + MBS=1 + GBS=2520 + NLS=105 + HS=20480 + NAH=128 + DDP=local + NNODES=315 + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform --num-layers-per-virtual-pipeline-stage 1 " +elif [ ${MODEL_SIZE} == "1T" ]; then + TP=8 + PP=64 + MBS=1 + GBS=3072 + NLS=128 + HS=25600 + NAH=160 + DDP=local + NNODES=384 + MEGATRON_EXTRA_PARAMS="--activations-checkpoint-method uniform " +else + echo "Invalid configuration" + exit 1 +fi + + +# Name of the job +export JOB_NAME=results_table_1_model_size_${MODEL_SIZE} + + +# Import the configs. +. `pwd`/CONFIG.sh + + +# Submit the job. +. `pwd`/SBATCH.sh + + +exit 0 + + + diff --git a/examples/bert/train_bert_340m_distributed.sh b/examples/bert/train_bert_340m_distributed.sh old mode 100644 new mode 100755 diff --git a/examples/export/ptq_and_trtllm_export/README.md b/examples/export/ptq_and_trtllm_export/README.md index abaa0d7..2605910 100644 --- a/examples/export/ptq_and_trtllm_export/README.md +++ b/examples/export/ptq_and_trtllm_export/README.md @@ -18,7 +18,7 @@ make -C docker release_build Once the container is built, install `nvidia-modelopt` and additional dependencies for sharded checkpoint support: ```sh pip install "nvidia-modelopt[all]~=0.13.0" --extra-index-url https://pypi.nvidia.com -pip install zarr tensorstore==0.1.45 +pip install zarr tensorstore!=0.1.46 ``` TensorRT-LLM quantization functionalities are currently packaged in `nvidia-modelopt`. You can find more documentation about `nvidia-modelopt` [here](https://nvidia.github.io/TensorRT-Model-Optimizer/). @@ -292,4 +292,4 @@ export trtllm_options=" \ trtllm-build ${trtllm_options} python examples/export/ptq_and_trtllm_export/trtllm_text_generation.py --tokenizer mistralai/Mixtral-8x7B-v0.1 -``` \ No newline at end of file +``` diff --git a/examples/export/ptq_and_trtllm_export/ptq_trtllm_llama2_7b.sh b/examples/export/ptq_and_trtllm_export/ptq_trtllm_llama2_7b.sh old mode 100644 new mode 100755 diff --git a/examples/export/ptq_and_trtllm_export/ptq_trtllm_llama3_1_8b.sh b/examples/export/ptq_and_trtllm_export/ptq_trtllm_llama3_1_8b.sh old mode 100644 new mode 100755 diff --git a/examples/export/ptq_and_trtllm_export/ptq_trtllm_llama3_8b.sh b/examples/export/ptq_and_trtllm_export/ptq_trtllm_llama3_8b.sh old mode 100644 new mode 100755 diff --git a/examples/export/ptq_and_trtllm_export/ptq_trtllm_minitron_8b.sh b/examples/export/ptq_and_trtllm_export/ptq_trtllm_minitron_8b.sh old mode 100644 new mode 100755 diff --git a/examples/export/ptq_and_trtllm_export/ptq_trtllm_mistral_12b.sh b/examples/export/ptq_and_trtllm_export/ptq_trtllm_mistral_12b.sh old mode 100644 new mode 100755 diff --git a/examples/export/ptq_and_trtllm_export/ptq_trtllm_mixtral_8x7b.sh b/examples/export/ptq_and_trtllm_export/ptq_trtllm_mixtral_8x7b.sh old mode 100644 new mode 100755 diff --git a/examples/gpt3/gpt_config.yaml b/examples/gpt3/gpt_config.yaml index 0625782..4f87c0a 100644 --- a/examples/gpt3/gpt_config.yaml +++ b/examples/gpt3/gpt_config.yaml @@ -1,300 +1,301 @@ -# WARNING: Yaml configs is currently an experimental feature -language_model: - # model architecture - num_layers: 24 - hidden_size: 1024 - num_attention_heads: 16 - num_query_groups: null - - ffn_hidden_size: null - kv_channels: null - hidden_dropout: 0.0 - attention_dropout: 0.0 - fp32_residual_connection: False - - apply_residual_connection_post_layernorm: False - layernorm_epsilon: 1.e-5 - layernorm_zero_centered_gamma: True - add_bias_linear: False - bias_activation_fusion: False - add_qkv_bias: False - gated_linear_unit: False - activation_func: swiglu - num_moe_experts: null - rotary_interleaved: False - window_size: null - - # initialization - init_method: null - init_method_std: 0.02 - output_layer_init_method: null - - # mixed-precision - apply_query_key_layer_scaling: False - attention_softmax_in_fp32: False - - # fusion - bias_swiglu_fusion: True - masked_softmax_fusion: True - persist_layer_norm: False - memory_efficient_layer_norm: False - bias_dropout_fusion: True - apply_rope_fusion: True - - # activation recomputation - recompute_granularity: null - recompute_method: null - recompute_num_layers: null - distribute_saved_activations: null - - # fp8 related - fp8: null - fp8_margin: 0 - fp8_interval: 1 - fp8_amax_history_len: 1 - fp8_amax_compute_algo: "most_recent" - fp8_wgrad: True - - # miscellaneous - clone_scatter_output_in_embedding: True - - normalization: "LayerNorm" # alt value supported by TE: "RMSNorm" - - # MoE related - moe_router_load_balancing_type: "aux_loss" - moe_router_topk: 2 - moe_router_topk_limited_devices: null - moe_grouped_gemm: False - moe_aux_loss_coeff: 0 # 1e-2 would be a good start value for load balance loss. - moe_z_loss_coeff: null # 1e-3 would be a good start value for z-loss - moe_input_jitter_eps: null - moe_token_dropping: False - -model_parallel: - # Model parallelism - tensor_model_parallel_size: 1 - context_parallel_size: 1 - pipeline_model_parallel_size: 1 - virtual_pipeline_model_parallel_size: null - sequence_parallel: True - expert_model_parallel_size: 1 - - # Initialization - perform_initialization: True - use_cpu_initialization: null - - # Training - fp16: False - bf16: True - params_dtype: null # Set from above arguments for core - timers: null - - # Optimizations - gradient_accumulation_fusion: True - async_tensor_model_parallel_allreduce: True - tp_comm_overlap: False - - # Debug Options - tp_comm_split_ag: True - tp_comm_atomic_ag: True - tp_comm_split_rs: True - tp_comm_atomic_rs: True - tp_comm_bulk_wgrad: True - tp_comm_bulk_dgrad: True - - # Parallelism - finalize_model_grads_func: null - - # Pipeline Parallel - pipeline_dtype: null - grad_scale_func: null - enable_autocast: False - autocast_dtype: null - variable_seq_lengths: False - num_microbatches_with_partial_activation_checkpoints: null - overlap_p2p_comm: False - batch_p2p_comm: True - batch_p2p_sync: True - use_ring_exchange_p2p: False - deallocate_pipeline_outputs: False - no_sync_func: null - grad_sync_func: null - param_sync_func: null - pipeline_model_parallel_split_rank: null - - # CPU Offloading - cpu_offloading: False - cpu_offloading_num_layers: 0 - _cpu_offloading_context: null - cpu_offloading_weights: False - cpu_offloading_activations: True - - # Timing - barrier_with_L1_time: True - -# training: -use_legacy_models: False -spec: null -micro_batch_size: 2 -global_batch_size: 128 -rampup_batch_size: [32, 32, 65324160] -check_for_nan_in_loss_and_grad: True -num_layers_per_virtual_pipeline_stage: null - -encoder_num_layers: null -decoder_num_layers: null -rotary_seq_len_interpolation_factor: null -add_position_embedding: False -make_vocab_size_divisible_by: 128 -group_query_attention: False - - -exit_signal_handler: False -exit_duration_in_mins: null -exit_interval: null - -untie_embeddings_and_output_weights: True -position_embedding_type: rope -rotary_percent: 0.5 -openai_gelu: False -squared_relu: False -swiglu: True -onnx_safe: null -bert_binary_head: True -max_position_embeddings: 4096 - -transformer_impl: local -use_flash_attn: False -seed: 1234 -data_parallel_random_init: False - -# Optimizer -optimizer: adam -lr: 2.5e-4 -lr_decay_style: cosine -lr_decay_iters: null -lr_decay_samples: 255126953 -lr_warmup_fraction: null -lr_warmup_iters: 0 -lr_warmup_samples: 81381 -lr_warmup_init: 0.0 -min_lr: 2.5e-5 -weight_decay: 0.1 -start_weight_decay: null -end_weight_decay: null -weight_decay_incr_style: constant -clip_grad: 1.0 -adam_beta1: 0.9 -adam_beta2: 0.95 -adam_eps: 1.e-08 -sgd_momentum: 0.9 -override_opt_param_scheduler: False -use_checkpoint_opt_param_scheduler: False - -# checkpointing arguments -save: null -save_interval: 20000 -no_save_optim: null -no_save_rng: null -load: null -no_load_optim: null -no_load_rng: null -finetune: False -use_checkpoint_args: False -exit_on_missing_checkpoint: False - -# loss arguments -loss_scale: null -initial_loss_scale: 4294967296 -min_loss_scale: 1.0 -loss_scale_window: 1000 -hysteresis: 2 -accumulate_allreduce_grads_in_fp32: False -fp16_lm_cross_entropy: False - -# distributed arguments -distributed_backend: nccl -distributed_timeout_minutes: 10 -overlap_grad_reduce: False -align_grad_reduce: True -overlap_param_gather: False -align_param_gather: False -scatter_gather_tensors_in_pipeline: True -local_rank: null -lazy_mpu_init: null -empty_unused_memory_level: 0 -standalone_embedding_stage: False -use_distributed_optimizer: False -nccl_communicator_config_path: null - -train_iters: null -eval_iters: 32 -eval_interval: 2000 -skip_train: False - -adlr_autoresume: False -adlr_autoresume_interval: 1000 - -# garbage collection -manual_gc: False -manual_gc_interval: 0 -manual_gc_eval: True - -tp_comm_overlap_cfg: null - -#data -data_path: null -split: '99,1,0' -train_data_path: null -valid_data_path: null -test_data_path: null -data_cache_path: null -mock_data: False -vocab_size: null -vocab_file: null -merge_file: null -vocab_extra_ids: 0 -seq_length: 4096 -encoder_seq_length: null -decoder_seq_length: null -retriever_seq_length: 256 -sample_rate: 1.0 -mask_prob: 0.15 -short_seq_prob: 0.1 -num_workers: 2 -tokenizer_type: GPTSentencePieceTokenizer -tokenizer_model: null -reset_position_ids: False -reset_attention_mask: False -eod_mask_loss: False -train_samples: 268554688 -dataloader_type: null - -#profile: -profile: False -profile_ranks: [0] -profile_step_end: 12 -profile_step_start: 10 - -#logging: -log_params_norm: True -log_num_zeros_in_grad: True -log_throughput: False -log_progress: False -timing_log_level: 0 -timing_log_option: minmax -tensorboard_log_interval: 1 -tensorboard_queue_size: 1000 -log_timers_to_tensorboard: False -log_validation_ppl_to_tensorboard: False -log_memory_to_tensorboard: False -log_world_size_to_tensorboard: False -log_loss_scale_to_tensorboard: True -wandb_project: '' -wandb_exp_name: '' -wandb_save_dir: '' -enable_one_logger: True -one_logger_project: megatron-lm -one_logger_run_name: null -log_interval: 100 -tensorboard_dir: null +# WARNING: Yaml configs is currently an experimental feature +language_model: + # model architecture + num_layers: 24 + hidden_size: 1024 + num_attention_heads: 16 + num_query_groups: null + + ffn_hidden_size: null + kv_channels: null + hidden_dropout: 0.0 + attention_dropout: 0.0 + fp32_residual_connection: False + + apply_residual_connection_post_layernorm: False + layernorm_epsilon: 1.e-5 + layernorm_zero_centered_gamma: True + add_bias_linear: False + bias_activation_fusion: False + add_qkv_bias: False + gated_linear_unit: False + activation_func: swiglu + num_moe_experts: null + rotary_interleaved: False + window_size: null + + # initialization + init_method: null + init_method_std: 0.02 + output_layer_init_method: null + + # mixed-precision + apply_query_key_layer_scaling: False + attention_softmax_in_fp32: False + + # fusion + bias_swiglu_fusion: True + masked_softmax_fusion: True + persist_layer_norm: False + memory_efficient_layer_norm: False + bias_dropout_fusion: True + apply_rope_fusion: True + + # activation recomputation + recompute_granularity: null + recompute_method: null + recompute_num_layers: null + distribute_saved_activations: null + + # fp8 related + fp8: null + fp8_margin: 0 + fp8_interval: 1 + fp8_amax_history_len: 1 + fp8_amax_compute_algo: "most_recent" + fp8_wgrad: True + + # miscellaneous + clone_scatter_output_in_embedding: True + + normalization: "LayerNorm" # alt value supported by TE: "RMSNorm" + + # MoE related + moe_router_load_balancing_type: "aux_loss" + moe_router_topk: 2 + moe_router_group_topk: null + moe_router_num_groups: null + moe_grouped_gemm: False + moe_aux_loss_coeff: 0 # 1e-2 would be a good start value for load balance loss. + moe_z_loss_coeff: null # 1e-3 would be a good start value for z-loss + moe_input_jitter_eps: null + moe_token_dropping: False + +model_parallel: + # Model parallelism + tensor_model_parallel_size: 1 + context_parallel_size: 1 + pipeline_model_parallel_size: 1 + virtual_pipeline_model_parallel_size: null + sequence_parallel: True + expert_model_parallel_size: 1 + + # Initialization + perform_initialization: True + use_cpu_initialization: null + + # Training + fp16: False + bf16: True + params_dtype: null # Set from above arguments for core + timers: null + + # Optimizations + gradient_accumulation_fusion: True + async_tensor_model_parallel_allreduce: True + tp_comm_overlap: False + + # Debug Options + tp_comm_split_ag: True + tp_comm_atomic_ag: True + tp_comm_split_rs: True + tp_comm_atomic_rs: True + tp_comm_bulk_wgrad: True + tp_comm_bulk_dgrad: True + + # Parallelism + finalize_model_grads_func: null + + # Pipeline Parallel + pipeline_dtype: null + grad_scale_func: null + enable_autocast: False + autocast_dtype: null + variable_seq_lengths: False + num_microbatches_with_partial_activation_checkpoints: null + overlap_p2p_comm: False + batch_p2p_comm: True + batch_p2p_sync: True + use_ring_exchange_p2p: False + deallocate_pipeline_outputs: False + no_sync_func: null + grad_sync_func: null + param_sync_func: null + pipeline_model_parallel_split_rank: null + + # CPU Offloading + cpu_offloading: False + cpu_offloading_num_layers: 0 + _cpu_offloading_context: null + cpu_offloading_weights: False + cpu_offloading_activations: True + + # Timing + barrier_with_L1_time: True + +# training: +use_legacy_models: False +spec: null +micro_batch_size: 2 +global_batch_size: 128 +rampup_batch_size: [32, 32, 65324160] +check_for_nan_in_loss_and_grad: True +num_layers_per_virtual_pipeline_stage: null + +encoder_num_layers: null +decoder_num_layers: null +rotary_seq_len_interpolation_factor: null +add_position_embedding: False +make_vocab_size_divisible_by: 128 +group_query_attention: False + + +exit_signal_handler: False +exit_duration_in_mins: null +exit_interval: null + +untie_embeddings_and_output_weights: True +position_embedding_type: rope +rotary_percent: 0.5 +openai_gelu: False +squared_relu: False +swiglu: True +onnx_safe: null +bert_binary_head: True +max_position_embeddings: 4096 + +transformer_impl: local +use_flash_attn: False +seed: 1234 +data_parallel_random_init: False + +# Optimizer +optimizer: adam +lr: 2.5e-4 +lr_decay_style: cosine +lr_decay_iters: null +lr_decay_samples: 255126953 +lr_warmup_fraction: null +lr_warmup_iters: 0 +lr_warmup_samples: 81381 +lr_warmup_init: 0.0 +min_lr: 2.5e-5 +weight_decay: 0.1 +start_weight_decay: null +end_weight_decay: null +weight_decay_incr_style: constant +clip_grad: 1.0 +adam_beta1: 0.9 +adam_beta2: 0.95 +adam_eps: 1.e-08 +sgd_momentum: 0.9 +override_opt_param_scheduler: False +use_checkpoint_opt_param_scheduler: False + +# checkpointing arguments +save: null +save_interval: 20000 +no_save_optim: null +no_save_rng: null +load: null +no_load_optim: null +no_load_rng: null +finetune: False +use_checkpoint_args: False +exit_on_missing_checkpoint: False + +# loss arguments +loss_scale: null +initial_loss_scale: 4294967296 +min_loss_scale: 1.0 +loss_scale_window: 1000 +hysteresis: 2 +accumulate_allreduce_grads_in_fp32: False +fp16_lm_cross_entropy: False + +# distributed arguments +distributed_backend: nccl +distributed_timeout_minutes: 10 +overlap_grad_reduce: False +align_grad_reduce: True +overlap_param_gather: False +align_param_gather: False +scatter_gather_tensors_in_pipeline: True +local_rank: null +lazy_mpu_init: null +empty_unused_memory_level: 0 +standalone_embedding_stage: False +use_distributed_optimizer: False +nccl_communicator_config_path: null + +train_iters: null +eval_iters: 32 +eval_interval: 2000 +skip_train: False + +adlr_autoresume: False +adlr_autoresume_interval: 1000 + +# garbage collection +manual_gc: False +manual_gc_interval: 0 +manual_gc_eval: True + +tp_comm_overlap_cfg: null + +#data +data_path: null +split: '99,1,0' +train_data_path: null +valid_data_path: null +test_data_path: null +data_cache_path: null +mock_data: False +vocab_size: null +vocab_file: null +merge_file: null +vocab_extra_ids: 0 +seq_length: 4096 +encoder_seq_length: null +decoder_seq_length: null +retriever_seq_length: 256 +sample_rate: 1.0 +mask_prob: 0.15 +short_seq_prob: 0.1 +num_workers: 2 +tokenizer_type: GPTSentencePieceTokenizer +tokenizer_model: null +reset_position_ids: False +reset_attention_mask: False +eod_mask_loss: False +train_samples: 268554688 +dataloader_type: null + +#profile: +profile: False +profile_ranks: [0] +profile_step_end: 12 +profile_step_start: 10 + +#logging: +log_params_norm: True +log_num_zeros_in_grad: True +log_throughput: False +log_progress: False +timing_log_level: 0 +timing_log_option: minmax +tensorboard_log_interval: 1 +tensorboard_queue_size: 1000 +log_timers_to_tensorboard: False +log_validation_ppl_to_tensorboard: False +log_memory_to_tensorboard: False +log_world_size_to_tensorboard: False +log_loss_scale_to_tensorboard: True +wandb_project: '' +wandb_exp_name: '' +wandb_save_dir: '' +enable_one_logger: True +one_logger_project: megatron-lm +one_logger_run_name: null +log_interval: 100 +tensorboard_dir: null diff --git a/examples/gpt3/hostfile_gpt_567B b/examples/gpt3/hostfile_gpt_567B new file mode 100644 index 0000000..e69de29 diff --git a/run_GPT-MOE_1nodes.sh b/examples/gpt3/run_gpt_567B_1nodes.sh old mode 100644 new mode 100755 similarity index 52% rename from run_GPT-MOE_1nodes.sh rename to examples/gpt3/run_gpt_567B_1nodes.sh index d38585e..32c5ac5 --- a/run_GPT-MOE_1nodes.sh +++ b/examples/gpt3/run_gpt_567B_1nodes.sh @@ -7,10 +7,10 @@ do fi done -mpirun -np 8 --allow-run-as-root \ - train_GPT-MOE_567B_1nodes.sh localhost --profiling=$profiling > output.log 2>&1 +mpirun -np 8 --allow-run-as-root \ + train_gpt_567B_1nodes.sh localhost --profiling=$profiling > output.log 2>&1 wait rm -rf CKPT -rm -rf mixtral_dataset/my-mixtral_text_document \ No newline at end of file +rm -rf mixtral_dataset/my-mixtral_text_document diff --git a/run_mixtral8x7B_2nodes.sh b/examples/gpt3/run_gpt_567B_multinodes.sh old mode 100644 new mode 100755 similarity index 67% rename from run_mixtral8x7B_2nodes.sh rename to examples/gpt3/run_gpt_567B_multinodes.sh index bd92ab0..10821c4 --- a/run_mixtral8x7B_2nodes.sh +++ b/examples/gpt3/run_gpt_567B_multinodes.sh @@ -7,13 +7,13 @@ do fi done -mpirun -np 16 --hostfile mixtralnodes \ +mpirun -np 512 --hostfile hostfile_gpt_567B \ --allow-run-as-root \ --bind-to none \ --mca plm_rsh_no_tree_spawn 1 \ - train_mixtral_8x7B_2nodes.sh node021 --profiling=$profiling > output.log 2>&1 + train_gpt_567B_multinodes.sh node002 --profiling=$profiling > output.log 2>&1 wait rm -rf CKPT -#rm -rf mixtral_dataset/my-mixtral_text_document \ No newline at end of file +#rm -rf mixtral_dataset/my-mixtral_text_document diff --git a/examples/gpt3/train_gpt3_175b_distributed.sh b/examples/gpt3/train_gpt3_175b_distributed.sh old mode 100644 new mode 100755 index 7d2c01b..fbc7e38 --- a/examples/gpt3/train_gpt3_175b_distributed.sh +++ b/examples/gpt3/train_gpt3_175b_distributed.sh @@ -1,82 +1,82 @@ -#!/bin/bash - -# Runs the "175B" parameter model - -export CUDA_DEVICE_MAX_CONNECTIONS=1 - -GPUS_PER_NODE=8 -# Change for multinode config -MASTER_ADDR=localhost -MASTER_PORT=6000 -NUM_NODES=1 -NODE_RANK=0 -WORLD_SIZE=$(($GPUS_PER_NODE*$NUM_NODES)) - -CHECKPOINT_PATH=$1 # -TENSORBOARD_LOGS_PATH=$2 # -VOCAB_FILE=$3 #/gpt2-vocab.json -MERGE_FILE=$4 #/gpt2-merges.txt -DATA_PATH=$5 #_text_document - -DISTRIBUTED_ARGS=( - --nproc_per_node $GPUS_PER_NODE - --nnodes $NUM_NODES - --master_addr $MASTER_ADDR - --master_port $MASTER_PORT -) - -GPT_MODEL_ARGS=( - --num-layers 96 - --hidden-size 12288 - --num-attention-heads 96 - --seq-length 2048 - --max-position-embeddings 2048 - --attention-backend auto # Can use (flash/fused/unfused/local) -) - -TRAINING_ARGS=( - --micro-batch-size 1 - --global-batch-size 1536 - --rampup-batch-size 16 16 5859375 - --train-iters 500000 - --weight-decay 0.1 - --adam-beta1 0.9 - --adam-beta2 0.95 - --init-method-std 0.006 - --clip-grad 1.0 - --fp16 - --lr 6.0e-5 - --lr-decay-style cosine - --min-lr 6.0e-6 - --lr-warmup-fraction .001 - --lr-decay-iters 430000 -) - -MODEL_PARALLEL_ARGS=( - --tensor-model-parallel-size 8 - --pipeline-model-parallel-size 16 -) - -DATA_ARGS=( - --data-path $DATA_PATH - --vocab-file $VOCAB_FILE - --merge-file $MERGE_FILE - --split 949,50,1 -) - -EVAL_AND_LOGGING_ARGS=( - --log-interval 100 - --save-interval 10000 - --eval-interval 1000 - --save $CHECKPOINT_PATH - --load $CHECKPOINT_PATH - --eval-iters 10 - --tensorboard-dir $TENSORBOARD_LOGS_PATH -) - -torchrun ${DISTRIBUTED_ARGS[@]} pretrain_gpt.py \ - ${GPT_MODEL_ARGS[@]} \ - ${TRAINING_ARGS[@]} \ - ${MODEL_PARALLEL_ARGS[@]} \ - ${DATA_ARGS[@]} \ - ${EVAL_AND_LOGGING_ARGS[@]} +#!/bin/bash + +# Runs the "175B" parameter model + +export CUDA_DEVICE_MAX_CONNECTIONS=1 + +GPUS_PER_NODE=8 +# Change for multinode config +MASTER_ADDR=localhost +MASTER_PORT=6000 +NUM_NODES=1 +NODE_RANK=0 +WORLD_SIZE=$(($GPUS_PER_NODE*$NUM_NODES)) + +CHECKPOINT_PATH=$1 # +TENSORBOARD_LOGS_PATH=$2 # +VOCAB_FILE=$3 #/gpt2-vocab.json +MERGE_FILE=$4 #/gpt2-merges.txt +DATA_PATH=$5 #_text_document + +DISTRIBUTED_ARGS=( + --nproc_per_node $GPUS_PER_NODE + --nnodes $NUM_NODES + --master_addr $MASTER_ADDR + --master_port $MASTER_PORT +) + +GPT_MODEL_ARGS=( + --num-layers 96 + --hidden-size 12288 + --num-attention-heads 96 + --seq-length 2048 + --max-position-embeddings 2048 + --attention-backend auto # Can use (flash/fused/unfused/local) +) + +TRAINING_ARGS=( + --micro-batch-size 1 + --global-batch-size 1536 + --rampup-batch-size 16 16 5859375 + --train-iters 500000 + --weight-decay 0.1 + --adam-beta1 0.9 + --adam-beta2 0.95 + --init-method-std 0.006 + --clip-grad 1.0 + --fp16 + --lr 6.0e-5 + --lr-decay-style cosine + --min-lr 6.0e-6 + --lr-warmup-fraction .001 + --lr-decay-iters 430000 +) + +MODEL_PARALLEL_ARGS=( + --tensor-model-parallel-size 8 + --pipeline-model-parallel-size 16 +) + +DATA_ARGS=( + --data-path $DATA_PATH + --vocab-file $VOCAB_FILE + --merge-file $MERGE_FILE + --split 949,50,1 +) + +EVAL_AND_LOGGING_ARGS=( + --log-interval 100 + --save-interval 10000 + --eval-interval 1000 + --save $CHECKPOINT_PATH + --load $CHECKPOINT_PATH + --eval-iters 10 + --tensorboard-dir $TENSORBOARD_LOGS_PATH +) + +torchrun ${DISTRIBUTED_ARGS[@]} pretrain_gpt.py \ + ${GPT_MODEL_ARGS[@]} \ + ${TRAINING_ARGS[@]} \ + ${MODEL_PARALLEL_ARGS[@]} \ + ${DATA_ARGS[@]} \ + ${EVAL_AND_LOGGING_ARGS[@]} diff --git a/train_GPT-MOE_567B_1nodes.sh b/examples/gpt3/train_gpt_567B_1nodes.sh old mode 100644 new mode 100755 similarity index 88% rename from train_GPT-MOE_567B_1nodes.sh rename to examples/gpt3/train_gpt_567B_1nodes.sh index 1eac615..40bd6b7 --- a/train_GPT-MOE_567B_1nodes.sh +++ b/examples/gpt3/train_gpt_567B_1nodes.sh @@ -4,18 +4,23 @@ for para in $* do if [[ $para == --profiling* ]];then profiling=${para#*=} - export GPU_FLUSH_ON_EXECUTION=1 - export HIP_DIRECT_DISPATCH=0 fi done +# Runs GPT 567B model source /opt/dtk/env.sh -# Runs Mixtral 8x7B model + +# defauat env +CURRENT_DIR="$( cd "$( dirname "$0" )" && pwd )" +MEGATRON_PATH=$( dirname $( dirname ${CURRENT_DIR})) +export PYTHONPATH=${MEGATRON_PATH}:$PYTHONPATH +export GLOG_minloglevel=3 export CUDA_DEVICE_MAX_CONNECTIONS=1 export HSA_FORCE_FINE_GRAIN_PCIE=1 export OMP_NUM_THREADS=1 export GPU_MAX_HW_QUEUES=10 +# nccl env export NCCL_ALGO=Ring export NCCL_MIN_NCHANNELS=32 export NCCL_MAX_NCHANNELS=32 @@ -23,9 +28,10 @@ export NCCL_NET_GDR_LEVEL=7 export NCCL_NET_GDR_READ=1 export RCCL_SDMA_COPY_ENABLE=0 export NCCL_IB_HCA=mlx5_2:1,mlx5_3:1,mlx5_4:1,mlx5_5:1,mlx5_6:1,mlx5_7:1,mlx5_8:1,mlx5_9:1 -#export NCCL_TOPO_FILE="/public/home/xingjl/dependency/rccl-tests-0204/topo-input.xml" +export NCCL_TOPO_FILE="/public/home/xingjl/dependency/rccl-tests-0204/topo-input.xml" + +# enable BatchLinear export GROUPED_GEMM_BatchLinear=1 -export GLOG_minloglevel=3 RANK=$OMPI_COMM_WORLD_RANK LOCAL_RANK=$OMPI_COMM_WORLD_LOCAL_RANK @@ -96,7 +102,6 @@ TRAINING_ARGS=( --bf16 --overlap-param-gather --overlap-grad-reduce - #--tp-comm-overlap ) TORCH_PROFIE_ARGS=( @@ -104,18 +109,10 @@ TORCH_PROFIE_ARGS=( --profile-ranks 0 1 2 3 4 5 6 7 --profile-step-start 3 --profile-step-end 4 - --profile-dir torch_prof_gpt_1nodes + --profile-dir torch_prof_gpt_1nodes_tp2-pp1-ep8-ep_tp1 --use-pytorch-profiler ) -HIP_PROFIE_ARGS=( - --profile - --profile-ranks 0 1 2 3 4 5 6 7 - --profile-step-start 4 - --profile-step-end 5 - --use-hip-profiler -) - MODEL_PARALLEL_ARGS=( --tensor-model-parallel-size 2 --pipeline-model-parallel-size 1 @@ -157,10 +154,6 @@ APP="python3 -u pretrain_gpt.py \ if [[ $profiling == "torch" ]]; then APP+=" ${TORCH_PROFIE_ARGS[@]}" -elif [[ $profiling == "hip" ]]; then - mkdir -p hip_prof_data - APP+=" ${HIP_PROFIE_ARGS[@]}" - APP="hipprof -d hip_prof_data --hip-trace --trace-off ${APP}" fi #for hygon cpu @@ -205,4 +198,4 @@ case ${LOCAL_RANK} in ${APP} #numactl --cpunodebind=7 --membind=7 ${APP} ;; -esac \ No newline at end of file +esac diff --git a/train_GPT-MOE_567B.sh b/examples/gpt3/train_gpt_567B_multinodes.sh old mode 100644 new mode 100755 similarity index 87% rename from train_GPT-MOE_567B.sh rename to examples/gpt3/train_gpt_567B_multinodes.sh index f298fa8..9751e08 --- a/train_GPT-MOE_567B.sh +++ b/examples/gpt3/train_gpt_567B_multinodes.sh @@ -4,18 +4,23 @@ for para in $* do if [[ $para == --profiling* ]];then profiling=${para#*=} - export GPU_FLUSH_ON_EXECUTION=1 - export HIP_DIRECT_DISPATCH=0 fi done +# Runs GPT 567B model source /opt/dtk/env.sh -# Runs Mixtral 8x7B model + +# defauat env +CURRENT_DIR="$( cd "$( dirname "$0" )" && pwd )" +MEGATRON_PATH=$( dirname $( dirname ${CURRENT_DIR})) +export PYTHONPATH=${MEGATRON_PATH}:$PYTHONPATH +export GLOG_minloglevel=3 export CUDA_DEVICE_MAX_CONNECTIONS=1 export HSA_FORCE_FINE_GRAIN_PCIE=1 export OMP_NUM_THREADS=1 export GPU_MAX_HW_QUEUES=10 +# nccl env export NCCL_ALGO=Ring export NCCL_MIN_NCHANNELS=32 export NCCL_MAX_NCHANNELS=32 @@ -23,9 +28,10 @@ export NCCL_NET_GDR_LEVEL=7 export NCCL_NET_GDR_READ=1 export RCCL_SDMA_COPY_ENABLE=0 export NCCL_IB_HCA=mlx5_2:1,mlx5_3:1,mlx5_4:1,mlx5_5:1,mlx5_6:1,mlx5_7:1,mlx5_8:1,mlx5_9:1 -#export NCCL_TOPO_FILE="/public/home/xingjl/dependency/rccl-tests-0204/topo-input.xml" +export NCCL_TOPO_FILE="/public/home/xingjl/dependency/rccl-tests-0204/topo-input.xml" + +# enable BatchLinear export GROUPED_GEMM_BatchLinear=1 -export GLOG_minloglevel=3 RANK=$OMPI_COMM_WORLD_RANK LOCAL_RANK=$OMPI_COMM_WORLD_LOCAL_RANK @@ -49,7 +55,7 @@ MODEL_ARGS=( --disable-bias-linear --seq-length 8192 --max-position-embeddings 32768 - --num-layers 64 + --num-layers 32 #64 --hidden-size 8192 --ffn-hidden-size 32768 --num-attention-heads 64 @@ -72,7 +78,7 @@ MOE_ARGS=( --moe-token-dispatcher-type alltoall --moe-expert-capacity-factor 0.5 --moe-pad-expert-input-to-capacity - --moe-grouped-gemm + #--moe-grouped-gemm ) DATA_ARGS=( @@ -84,7 +90,7 @@ DATA_ARGS=( TRAINING_ARGS=( --micro-batch-size 1 - --global-batch-size 4096 + --global-batch-size 1024 --lr 1e-4 --train-iters 10 --lr-decay-iters 320000 @@ -96,7 +102,6 @@ TRAINING_ARGS=( --bf16 --overlap-param-gather --overlap-grad-reduce - #--tp-comm-overlap ) TORCH_PROFIE_ARGS=( @@ -104,23 +109,16 @@ TORCH_PROFIE_ARGS=( --profile-ranks 0 1 2 3 4 5 6 7 --profile-step-start 3 --profile-step-end 4 - --profile-dir torch_prof_gpt + --profile-dir torch_prof_gpt_64nodes_tp2-pp16-ep16-ep_tp1-cp2 --use-pytorch-profiler ) -HIP_PROFIE_ARGS=( - --profile - --profile-ranks 0 1 2 3 4 5 6 7 - --profile-step-start 4 - --profile-step-end 5 - --use-hip-profiler -) - MODEL_PARALLEL_ARGS=( --tensor-model-parallel-size 2 --pipeline-model-parallel-size 16 --expert-model-parallel-size 16 --expert-tensor-parallel-size 1 + --context-parallel-size 2 --use-distributed-optimizer --sequence-parallel ) @@ -157,10 +155,6 @@ APP="python3 -u pretrain_gpt.py \ if [[ $profiling == "torch" ]]; then APP+=" ${TORCH_PROFIE_ARGS[@]}" -elif [[ $profiling == "hip" ]]; then - mkdir -p hip_prof_data - APP+=" ${HIP_PROFIE_ARGS[@]}" - APP="hipprof -d hip_prof_data --hip-trace --trace-off ${APP}" fi #for hygon cpu diff --git a/examples/inference/gpt/gpt_batch_inference.py b/examples/inference/gpt/gpt_batch_inference.py index 050b230..604408b 100644 --- a/examples/inference/gpt/gpt_batch_inference.py +++ b/examples/inference/gpt/gpt_batch_inference.py @@ -1,115 +1,200 @@ -import os -from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import InferenceWrapperConfig -from pretrain_gpt import model_provider -import torch -import sys -from argparse import Namespace -from megatron.core.inference.engines.abstract_engine import AbstractEngine -from megatron.core.inference.engines.mcore_engine import MCoreEngine -from megatron.core.inference.sampling_params import SamplingParams -from megatron.core.inference.model_inference_wrappers.gpt.gpt_inference_wrapper import GPTInferenceWrapper -from megatron.core.inference.inference_request import InferenceRequest -from megatron.core.inference.text_generation_controllers.text_generation_controller import TextGenerationController -from megatron.core.transformer.module import MegatronModule -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.path.pardir, os.path.pardir))) - -from megatron.training import get_args -from megatron.training import get_tokenizer -from megatron.training.checkpointing import load_checkpoint -from megatron.core import mpu -from megatron.training.initialize import initialize_megatron -from megatron.training import get_model -from typing import List - -def add_text_generate_args(parser): - """Text generation arguments.""" - group = parser.add_argument_group(title='text generation') - - group.add_argument("--temperature", type=float, default=1.0, - help='Sampling temperature.') - group.add_argument("--top_k", type=int, default=1, - help='Top k sampling.') - group.add_argument("--top_p", type=float, default=0.0, - help='Top p sampling.') - group.add_argument("--return-log-probs", action='store_true', default=False, - help='Return the log probabilities of the final output tokens') - group.add_argument("--num-tokens-to-generate", type=int, default=30, - help='Number of tokens to generate for each prompt') - group.add_argument("--prompts", metavar='N', type=str, nargs='+', - help='Input prompts with each prompt within quotes and seperated by space') - group.add_argument("--max-batch-size", type=int, default=1, - help='Max number of prompts to process at once') - return parser - - -def get_inference_engine(args: Namespace, model: MegatronModule) -> AbstractEngine: - """Utility to get the relevant backend for running inference - - This function will automatically chose the TRTLLMBackend when possible, and if not revert to Mcore backend if the user does not specify any backends. TRT LLM Backend is not implmented yet. - - Args: - args (Namespace): The user arguments parsed from command line - model (MegatronModule): The megatron model . - - Returns: - AbstractBackend: The chosen backend - """ - tokenizer = get_tokenizer() - - inference_wrapper_config = InferenceWrapperConfig( - hidden_size=args.hidden_size, - inference_batch_times_seqlen_threshold=args.inference_batch_times_seqlen_threshold, - fp32_residual_connection=args.fp32_residual_connection, - params_dtype=args.params_dtype, - padded_vocab_size=args.padded_vocab_size - ) - - inference_wrapped_model = GPTInferenceWrapper(model, inference_wrapper_config) - text_generation_controller = TextGenerationController(inference_wrapped_model=inference_wrapped_model, tokenizer=tokenizer) - return MCoreEngine(text_generation_controller=text_generation_controller, max_batch_size=args.max_batch_size) - -def main(): - """Main program.""" - - # Note: The default args passed here can be overwritten by using appropriate params (check arguments.py file) - # Micro batch size is not needed to be set by user. (It is calculated based on inference-batch-times-seqlen-threshold argument) - initialize_megatron(extra_args_provider=add_text_generate_args, - args_defaults={'no_load_rng': True, - 'no_load_optim': True, - 'micro_batch_size': 1, - 'exit_on_missing_checkpoint': True}) - - # Set up model and load checkpoint - model = get_model(model_provider, wrap_with_ddp=False) - load_checkpoint(model, None, None) - model = model[0] - - args = get_args() - - inference_engine = get_inference_engine(args, model) - - sampling_params = SamplingParams( - temperature=args.temperature, - top_k=args.top_k, - top_p=args.top_p, - return_log_probs=args.return_log_probs, - num_tokens_to_generate=args.num_tokens_to_generate) - - results: List[InferenceRequest] = inference_engine.generate( - prompts=args.prompts, sampling_params=sampling_params - ) - - if torch.distributed.get_rank() == 0: - for idx, result in enumerate(results): - print(f' \n------------- RESULT FOR PROMPT {idx} --------------- ') - result = { - 'id': result.request_id, - 'input_prompt': result.prompt, - 'generated_text': result.generated_text, - 'generated_tokens' : result.generated_tokens - } - print(result) - -if __name__ == "__main__": - main() +import os +from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( + InferenceWrapperConfig, +) +from pretrain_gpt import model_provider +import torch +import sys +import time +import tqdm +import warnings +from argparse import Namespace +from megatron.core.inference.engines.abstract_engine import AbstractEngine +from megatron.core.inference.engines.mcore_engine import MCoreEngine +from megatron.core.inference.sampling_params import SamplingParams +from megatron.core.inference.model_inference_wrappers.gpt.gpt_inference_wrapper import ( + GPTInferenceWrapper, +) +from megatron.core.inference.inference_request import InferenceRequest +from megatron.core.inference.text_generation_controllers.text_generation_controller import ( + TextGenerationController, +) +from megatron.core.transformer.module import MegatronModule + +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)) +) + +from megatron.training import get_args +from megatron.training import get_tokenizer +from megatron.training.checkpointing import load_checkpoint +from megatron.core import mpu +from megatron.training.initialize import initialize_megatron +from megatron.training import get_model +import asyncio +from typing import AsyncIterator, List + + + +def add_text_generate_args(parser): + """Text generation arguments.""" + group = parser.add_argument_group(title='text generation') + + group.add_argument("--temperature", type=float, default=1.0, help='Sampling temperature.') + group.add_argument("--top_k", type=int, default=1, help='Top k sampling.') + group.add_argument("--top_p", type=float, default=0.0, help='Top p sampling.') + group.add_argument( + "--return-log-probs", + action='store_true', + default=False, + help='Return the log probabilities of the final output tokens', + ) + group.add_argument( + "--num-tokens-to-generate", + type=int, + default=30, + help='Number of tokens to generate for each prompt', + ) + group.add_argument( + "--prompts", + metavar='N', + type=str, + nargs='+', + help='Input prompts with each prompt within quotes and seperated by space', + ) + group.add_argument( + "--max-batch-size", type=int, default=8, dest="inference_max_requests", + help='Max number of prompts to process at once' + ) + group.add_argument("--stream", action="store_true", default=False, help="Stream output tokens") + return parser + + +def get_inference_engine(args: Namespace, model: MegatronModule) -> AbstractEngine: + """Utility to get the relevant backend for running inference + + This function will automatically chose the TRTLLMBackend when possible, and if not revert to Mcore backend if the user does not specify any backends. TRT LLM Backend is not implmented yet. + + Args: + args (Namespace): The user arguments parsed from command line + model (MegatronModule): The megatron model . + + Returns: + AbstractBackend: The chosen backend + """ + tokenizer = get_tokenizer() + + inference_wrapper_config = InferenceWrapperConfig( + hidden_size=args.hidden_size, + inference_batch_times_seqlen_threshold=args.inference_batch_times_seqlen_threshold, + fp32_residual_connection=args.fp32_residual_connection, + params_dtype=args.params_dtype, + padded_vocab_size=args.padded_vocab_size, + inference_max_requests=args.inference_max_requests, + inference_max_seq_length=args.inference_max_seq_length, + ) + + inference_wrapped_model = GPTInferenceWrapper(model, inference_wrapper_config) + text_generation_controller = TextGenerationController(inference_wrapped_model=inference_wrapped_model, tokenizer=tokenizer) + return MCoreEngine(text_generation_controller=text_generation_controller) + + +async def generate( + inference_engine: MCoreEngine, + sampling_params: SamplingParams, + prompts: List[str], +) -> List[InferenceRequest]: + async def collect_stream(prompt, request_id, stream_generator): + print(f"Request {request_id}: {prompt}", end="", flush=True) + prev_idx = 0 + async for output in stream_generator: + print(output.generated_text[prev_idx:], end="", flush=True) + prev_idx = len(output.generated_text) + print() + + request_ids: List[str] = [ + inference_engine.add_request( + prompt=prompt, inference_parameters=sampling_params, streaming=True + ) + for prompt in prompts + ] + stream_generators = [inference_engine.get_stream_generator(request_id) for request_id in request_ids] + + tasks = [ + asyncio.create_task(collect_stream(prompt, request_id, stream_generator)) + for (prompt, request_id, stream_generator) in zip(prompts, request_ids, stream_generators) + ] + + await inference_engine.run_engine_async() + await asyncio.gather(*tasks) + + results: List[InferenceRequest] = [ + inference_engine.scheduler.completed_request_pool[request_id] for request_id in request_ids + ] + + return results + +def main(): + """Main program.""" + + # Note: The default args passed here can be overwritten by using appropriate params (check arguments.py file) + # Micro batch size is not needed to be set by user. (It is calculated based on inference-batch-times-seqlen-threshold argument) + initialize_megatron( + extra_args_provider=add_text_generate_args, + args_defaults={ + 'no_load_rng': True, + 'no_load_optim': True, + 'micro_batch_size': 1, + 'exit_on_missing_checkpoint': True, + }, + ) + + # Set up model and load checkpoint + model = get_model(model_provider, wrap_with_ddp=False) + load_checkpoint(model, None, None) + model = model[0] + + args = get_args() + + inference_engine = get_inference_engine(args, model) + + sampling_params = SamplingParams( + temperature=args.temperature, + top_k=args.top_k, + top_p=args.top_p, + return_log_probs=args.return_log_probs, + num_tokens_to_generate=args.num_tokens_to_generate, + ) + + if args.enable_cuda_graph: + print(f"Running warmup for CUDA graphs...") + inference_engine.generate( + prompts=args.prompts, sampling_params=sampling_params + ) + + start_time = time.perf_counter() + if args.stream: + results: List[InferenceRequest] = asyncio.run(generate(inference_engine, sampling_params, args.prompts)) + else: + results: List[InferenceRequest] = inference_engine.generate( + prompts=args.prompts, sampling_params=sampling_params, + ) + end_time = time.perf_counter() + latency = end_time - start_time + + if torch.distributed.get_rank() == 0: + for idx, result in enumerate(results): + print(f' \n------------- RESULT FOR PROMPT {idx} --------------- ') + result = { + 'id': result.request_id, + 'input_prompt': result.prompt, + 'generated_text': result.generated_text, + 'generated_tokens': result.generated_tokens, + 'latency': latency, + } + print(result) + + torch.distributed.destroy_process_group() + +if __name__ == "__main__": + main() diff --git a/examples/inference/llama_mistral/run_text_generation_llama3.1.sh b/examples/inference/llama_mistral/run_text_generation_llama3.1.sh old mode 100644 new mode 100755 index 06584f0..08db907 --- a/examples/inference/llama_mistral/run_text_generation_llama3.1.sh +++ b/examples/inference/llama_mistral/run_text_generation_llama3.1.sh @@ -1,56 +1,56 @@ -#!/bin/bash -# This example will start serving the Llama3.1-8B model -export NCCL_IB_SL=1 -export CUDA_DEVICE_MAX_CONNECTIONS=1 -export NVTE_APPLY_QK_LAYER_SCALING=0 - -DISTRIBUTED_ARGS="--nproc_per_node 1 \ - --nnodes 1 \ - --node_rank 0 \ - --master_addr 0.0.0.0 \ - --master_port 6000" - -# Ensure CHECKPOINT and TOKENIZER_MODEL are provided -if [ -z "$1" ] || [ -z "$2" ]; then - echo "Error: You must provide CHECKPOINT and TOKENIZER_MODEL as command-line arguments." - echo "Usage: $0 /path/to/checkpoint /path/to/tokenizer_model" - exit 1 -fi - -# Assign command-line arguments to variables -CHECKPOINT=$1 -TOKENIZER_MODEL=$2 - -pip install flask-restful - -torchrun $DISTRIBUTED_ARGS tools/run_text_generation_server.py \ - --use-checkpoint-args \ - --disable-bias-linear \ - --tokenizer-type HuggingFaceTokenizer \ - --tokenizer-model ${TOKENIZER_MODEL} \ - --transformer-impl transformer_engine \ - --normalization RMSNorm \ - --group-query-attention \ - --num-query-groups 8 \ - --no-masked-softmax-fusion \ - --attention-softmax-in-fp32 \ - --attention-dropout 0.0 \ - --hidden-dropout 0.0 \ - --untie-embeddings-and-output-weights \ - --position-embedding-type rope \ - --rotary-percent 1.0 \ - --rotary-base 500000 \ - --use-rope-scaling \ - --use-rotary-position-embeddings \ - --swiglu \ - --tensor-model-parallel-size 1 \ - --pipeline-model-parallel-size 1 \ - --num-layers 32 \ - --hidden-size 4096 \ - --ffn-hidden-size 14336 \ - --load ${CHECKPOINT} \ - --num-attention-heads 32 \ - --max-position-embeddings 131072 \ - --bf16 \ - --micro-batch-size 1 \ - --seq-length 8192 +#!/bin/bash +# This example will start serving the Llama3.1-8B model +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export NVTE_APPLY_QK_LAYER_SCALING=0 + +DISTRIBUTED_ARGS="--nproc_per_node 1 \ + --nnodes 1 \ + --node_rank 0 \ + --master_addr 0.0.0.0 \ + --master_port 6000" + +# Ensure CHECKPOINT and TOKENIZER_MODEL are provided +if [ -z "$1" ] || [ -z "$2" ]; then + echo "Error: You must provide CHECKPOINT and TOKENIZER_MODEL as command-line arguments." + echo "Usage: $0 /path/to/checkpoint /path/to/tokenizer_model" + exit 1 +fi + +# Assign command-line arguments to variables +CHECKPOINT=$1 +TOKENIZER_MODEL=$2 + +pip install flask-restful + +torchrun $DISTRIBUTED_ARGS tools/run_text_generation_server.py \ + --use-checkpoint-args \ + --disable-bias-linear \ + --tokenizer-type HuggingFaceTokenizer \ + --tokenizer-model ${TOKENIZER_MODEL} \ + --transformer-impl transformer_engine \ + --normalization RMSNorm \ + --group-query-attention \ + --num-query-groups 8 \ + --no-masked-softmax-fusion \ + --attention-softmax-in-fp32 \ + --attention-dropout 0.0 \ + --hidden-dropout 0.0 \ + --untie-embeddings-and-output-weights \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 500000 \ + --use-rope-scaling \ + --use-rotary-position-embeddings \ + --swiglu \ + --tensor-model-parallel-size 1 \ + --pipeline-model-parallel-size 1 \ + --num-layers 32 \ + --hidden-size 4096 \ + --ffn-hidden-size 14336 \ + --load ${CHECKPOINT} \ + --num-attention-heads 32 \ + --max-position-embeddings 131072 \ + --bf16 \ + --micro-batch-size 1 \ + --seq-length 8192 diff --git a/examples/inference/llama_mistral/run_text_generation_llama3.sh b/examples/inference/llama_mistral/run_text_generation_llama3.sh old mode 100644 new mode 100755 index c5fc410..fb23377 --- a/examples/inference/llama_mistral/run_text_generation_llama3.sh +++ b/examples/inference/llama_mistral/run_text_generation_llama3.sh @@ -1,55 +1,55 @@ -#!/bin/bash -# This example will start serving the Llama3-8B model -export NCCL_IB_SL=1 -export CUDA_DEVICE_MAX_CONNECTIONS=1 -export NVTE_APPLY_QK_LAYER_SCALING=0 - -DISTRIBUTED_ARGS="--nproc_per_node 1 \ - --nnodes 1 \ - --node_rank 0 \ - --master_addr 0.0.0.0 \ - --master_port 6000" - -# Ensure CHECKPOINT and TOKENIZER_MODEL are provided -if [ -z "$1" ] || [ -z "$2" ]; then - echo "Error: You must provide CHECKPOINT and TOKENIZER_MODEL as command-line arguments." - echo "Usage: $0 /path/to/checkpoint /path/to/tokenizer_model" - exit 1 -fi - -# Assign command-line arguments to variables -CHECKPOINT=$1 -TOKENIZER_MODEL=$2 - -pip install flask-restful - -torchrun $DISTRIBUTED_ARGS tools/run_text_generation_server.py \ - --use-checkpoint-args \ - --disable-bias-linear \ - --tokenizer-type HuggingFaceTokenizer \ - --tokenizer-model ${TOKENIZER_MODEL} \ - --transformer-impl transformer_engine \ - --normalization RMSNorm \ - --group-query-attention \ - --num-query-groups 8 \ - --no-masked-softmax-fusion \ - --attention-softmax-in-fp32 \ - --attention-dropout 0.0 \ - --hidden-dropout 0.0 \ - --untie-embeddings-and-output-weights \ - --position-embedding-type rope \ - --rotary-percent 1.0 \ - --rotary-base 500000 \ - --use-rotary-position-embeddings \ - --swiglu \ - --tensor-model-parallel-size 1 \ - --pipeline-model-parallel-size 1 \ - --num-layers 32 \ - --hidden-size 4096 \ - --ffn-hidden-size 14336 \ - --load ${CHECKPOINT} \ - --num-attention-heads 32 \ - --max-position-embeddings 8192 \ - --bf16 \ - --micro-batch-size 1 \ - --seq-length 8192 +#!/bin/bash +# This example will start serving the Llama3-8B model +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export NVTE_APPLY_QK_LAYER_SCALING=0 + +DISTRIBUTED_ARGS="--nproc_per_node 1 \ + --nnodes 1 \ + --node_rank 0 \ + --master_addr 0.0.0.0 \ + --master_port 6000" + +# Ensure CHECKPOINT and TOKENIZER_MODEL are provided +if [ -z "$1" ] || [ -z "$2" ]; then + echo "Error: You must provide CHECKPOINT and TOKENIZER_MODEL as command-line arguments." + echo "Usage: $0 /path/to/checkpoint /path/to/tokenizer_model" + exit 1 +fi + +# Assign command-line arguments to variables +CHECKPOINT=$1 +TOKENIZER_MODEL=$2 + +pip install flask-restful + +torchrun $DISTRIBUTED_ARGS tools/run_text_generation_server.py \ + --use-checkpoint-args \ + --disable-bias-linear \ + --tokenizer-type HuggingFaceTokenizer \ + --tokenizer-model ${TOKENIZER_MODEL} \ + --transformer-impl transformer_engine \ + --normalization RMSNorm \ + --group-query-attention \ + --num-query-groups 8 \ + --no-masked-softmax-fusion \ + --attention-softmax-in-fp32 \ + --attention-dropout 0.0 \ + --hidden-dropout 0.0 \ + --untie-embeddings-and-output-weights \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 500000 \ + --use-rotary-position-embeddings \ + --swiglu \ + --tensor-model-parallel-size 1 \ + --pipeline-model-parallel-size 1 \ + --num-layers 32 \ + --hidden-size 4096 \ + --ffn-hidden-size 14336 \ + --load ${CHECKPOINT} \ + --num-attention-heads 32 \ + --max-position-embeddings 8192 \ + --bf16 \ + --micro-batch-size 1 \ + --seq-length 8192 diff --git a/examples/inference/llama_mistral/run_text_generation_mistral.sh b/examples/inference/llama_mistral/run_text_generation_mistral.sh old mode 100644 new mode 100755 index 4358fd4..050de79 --- a/examples/inference/llama_mistral/run_text_generation_mistral.sh +++ b/examples/inference/llama_mistral/run_text_generation_mistral.sh @@ -1,53 +1,53 @@ -#!/bin/bash -# This example will start serving the Mistral-7B-v0.3 model -export NCCL_IB_SL=1 -export CUDA_DEVICE_MAX_CONNECTIONS=1 - -DISTRIBUTED_ARGS="--nproc_per_node 1 \ - --nnodes 1 \ - --node_rank 0 \ - --master_addr 0.0.0.0 \ - --master_port 6000" - -# Ensure CHECKPOINT and TOKENIZER_MODEL are provided -if [ -z "$1" ] || [ -z "$2" ]; then - echo "Error: You must provide CHECKPOINT and TOKENIZER_MODEL as command-line arguments." - echo "Usage: $0 /path/to/checkpoint /path/to/tokenizer_model" - exit 1 -fi - -# Assign command-line arguments to variables -CHECKPOINT=$1 -TOKENIZER_MODEL=$2 - -pip install flask-restful - -torchrun $DISTRIBUTED_ARGS tools/run_text_generation_server.py \ - --tokenizer-type HuggingFaceTokenizer \ - --tokenizer-model ${TOKENIZER_MODEL} \ - --use-checkpoint-args \ - --apply-layernorm-1p \ - --transformer-impl transformer_engine \ - --normalization RMSNorm \ - --group-query-attention \ - --num-query-groups 8 \ - --no-masked-softmax-fusion \ - --use-flash-attn \ - --untie-embeddings-and-output-weights \ - --disable-bias-linear \ - --position-embedding-type rope \ - --rotary-percent 1.0 \ - --rotary-base 1000000 \ - --swiglu \ - --ffn-hidden-size 14336 \ - --tensor-model-parallel-size 1 \ - --pipeline-model-parallel-size 1 \ - --num-layers 32 \ - --hidden-size 4096 \ - --load ${CHECKPOINT} \ - --num-attention-heads 32 \ - --max-position-embeddings 4096 \ - --bf16 \ - --micro-batch-size 1 \ - --seq-length 4096 \ - --seed 101 +#!/bin/bash +# This example will start serving the Mistral-7B-v0.3 model +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 + +DISTRIBUTED_ARGS="--nproc_per_node 1 \ + --nnodes 1 \ + --node_rank 0 \ + --master_addr 0.0.0.0 \ + --master_port 6000" + +# Ensure CHECKPOINT and TOKENIZER_MODEL are provided +if [ -z "$1" ] || [ -z "$2" ]; then + echo "Error: You must provide CHECKPOINT and TOKENIZER_MODEL as command-line arguments." + echo "Usage: $0 /path/to/checkpoint /path/to/tokenizer_model" + exit 1 +fi + +# Assign command-line arguments to variables +CHECKPOINT=$1 +TOKENIZER_MODEL=$2 + +pip install flask-restful + +torchrun $DISTRIBUTED_ARGS tools/run_text_generation_server.py \ + --tokenizer-type HuggingFaceTokenizer \ + --tokenizer-model ${TOKENIZER_MODEL} \ + --use-checkpoint-args \ + --apply-layernorm-1p \ + --transformer-impl transformer_engine \ + --normalization RMSNorm \ + --group-query-attention \ + --num-query-groups 8 \ + --no-masked-softmax-fusion \ + --use-flash-attn \ + --untie-embeddings-and-output-weights \ + --disable-bias-linear \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 1000000 \ + --swiglu \ + --ffn-hidden-size 14336 \ + --tensor-model-parallel-size 1 \ + --pipeline-model-parallel-size 1 \ + --num-layers 32 \ + --hidden-size 4096 \ + --load ${CHECKPOINT} \ + --num-attention-heads 32 \ + --max-position-embeddings 4096 \ + --bf16 \ + --micro-batch-size 1 \ + --seq-length 4096 \ + --seed 101 diff --git a/examples/inference/run_text_generation_server_345M.sh b/examples/inference/run_text_generation_server_345M.sh old mode 100644 new mode 100755 index e8e61ad..2394710 --- a/examples/inference/run_text_generation_server_345M.sh +++ b/examples/inference/run_text_generation_server_345M.sh @@ -1,31 +1,31 @@ -#!/bin/bash -# This example will start serving the 345M model. -DISTRIBUTED_ARGS="--nproc_per_node 1 \ - --nnodes 1 \ - --node_rank 0 \ - --master_addr localhost \ - --master_port 6000" - -CHECKPOINT= -VOCAB_FILE= -MERGE_FILE= - -export CUDA_DEVICE_MAX_CONNECTIONS=1 - -pip install flask-restful - -torchrun $DISTRIBUTED_ARGS tools/run_text_generation_server.py \ - --tensor-model-parallel-size 1 \ - --pipeline-model-parallel-size 1 \ - --num-layers 24 \ - --hidden-size 1024 \ - --load ${CHECKPOINT} \ - --num-attention-heads 16 \ - --max-position-embeddings 1024 \ - --tokenizer-type GPT2BPETokenizer \ - --fp16 \ - --micro-batch-size 1 \ - --seq-length 1024 \ - --vocab-file $VOCAB_FILE \ - --merge-file $MERGE_FILE \ - --seed 42 +#!/bin/bash +# This example will start serving the 345M model. +DISTRIBUTED_ARGS="--nproc_per_node 1 \ + --nnodes 1 \ + --node_rank 0 \ + --master_addr localhost \ + --master_port 6000" + +CHECKPOINT= +VOCAB_FILE= +MERGE_FILE= + +export CUDA_DEVICE_MAX_CONNECTIONS=1 + +pip install flask-restful + +torchrun $DISTRIBUTED_ARGS tools/run_text_generation_server.py \ + --tensor-model-parallel-size 1 \ + --pipeline-model-parallel-size 1 \ + --num-layers 24 \ + --hidden-size 1024 \ + --load ${CHECKPOINT} \ + --num-attention-heads 16 \ + --max-position-embeddings 1024 \ + --tokenizer-type GPT2BPETokenizer \ + --fp16 \ + --micro-batch-size 1 \ + --seq-length 1024 \ + --vocab-file $VOCAB_FILE \ + --merge-file $MERGE_FILE \ + --seed 42 diff --git a/examples/inference/run_text_generation_server_345M_8_tensor_parallel.sh b/examples/inference/run_text_generation_server_345M_8_tensor_parallel.sh old mode 100644 new mode 100755 index 368cec3..8ca0c41 --- a/examples/inference/run_text_generation_server_345M_8_tensor_parallel.sh +++ b/examples/inference/run_text_generation_server_345M_8_tensor_parallel.sh @@ -1,29 +1,29 @@ -#!/bin/bash -# This example will start serving the 345M model that is partitioned 8 way tensor parallel -DISTRIBUTED_ARGS="--nproc_per_node 8 \ - --nnodes 1 \ - --node_rank 0 \ - --master_addr localhost \ - --master_port 6000" - -CHECKPOINT= -VOCAB_FILE= -MERGE_FILE= - -pip install flask-restful - -python -m torch.distributed.launch $DISTRIBUTED_ARGS tools/run_text_generation_server.py \ - --tensor-model-parallel-size 8 \ - --pipeline-model-parallel-size 1 \ - --num-layers 24 \ - --hidden-size 1024 \ - --load ${CHECKPOINT} \ - --num-attention-heads 16 \ - --max-position-embeddings 1024 \ - --tokenizer-type GPT2BPETokenizer \ - --fp16 \ - --micro-batch-size 1 \ - --seq-length 1024 \ - --vocab-file $VOCAB_FILE \ - --merge-file $MERGE_FILE \ - --seed 42 +#!/bin/bash +# This example will start serving the 345M model that is partitioned 8 way tensor parallel +DISTRIBUTED_ARGS="--nproc_per_node 8 \ + --nnodes 1 \ + --node_rank 0 \ + --master_addr localhost \ + --master_port 6000" + +CHECKPOINT= +VOCAB_FILE= +MERGE_FILE= + +pip install flask-restful + +python -m torch.distributed.launch $DISTRIBUTED_ARGS tools/run_text_generation_server.py \ + --tensor-model-parallel-size 8 \ + --pipeline-model-parallel-size 1 \ + --num-layers 24 \ + --hidden-size 1024 \ + --load ${CHECKPOINT} \ + --num-attention-heads 16 \ + --max-position-embeddings 1024 \ + --tokenizer-type GPT2BPETokenizer \ + --fp16 \ + --micro-batch-size 1 \ + --seq-length 1024 \ + --vocab-file $VOCAB_FILE \ + --merge-file $MERGE_FILE \ + --seed 42 diff --git a/examples/mamba/run_text_gen_server_8b.sh b/examples/mamba/run_text_gen_server_8b.sh old mode 100644 new mode 100755 index 8d3137f..5c712ff --- a/examples/mamba/run_text_gen_server_8b.sh +++ b/examples/mamba/run_text_gen_server_8b.sh @@ -1,50 +1,50 @@ -#!/bin/bash - -# Use: ./run_text_gen_server_8b.sh -# To launch the client: python ../../tools/text_generation_cli.py - -CHECKPOINT_PATH=$1 -TOKENIZER_PATH=$2 - -DISTRIBUTED_ARGS="--nproc_per_node 1 \ - --nnodes 1 \ - --node_rank 0 \ - --master_addr localhost \ - --master_port 6000" - -export NCCL_IB_SL=1 -export CUDA_DEVICE_MAX_CONNECTIONS=1 -export NCCL_IB_TIMEOUT=19 -export NCCL_IB_QPS_PER_CONNECTION=4 - -export TRITON_CACHE_DIR="./triton-cache/" -export TRITON_CACHE_MANAGER="megatron.core.ssm.triton_cache_manager:ParallelFileCacheManager" - -torchrun $DISTRIBUTED_ARGS ../../tools/run_mamba_text_generation_server.py \ - --tensor-model-parallel-size 1 \ - --pipeline-model-parallel-size 1 \ - --untie-embeddings-and-output-weights \ - --num-layers 56 \ - --hidden-size 4096 \ - --load ${CHECKPOINT_PATH} \ - --num-attention-heads 32 \ - --group-query-attention \ - --num-query-groups 8 \ - --hybrid-attention-ratio 0.08 \ - --hybrid-mlp-ratio 0.5 \ - --attention-dropout 0.0 \ - --hidden-dropout 0.0 \ - --disable-bias-linear \ - --normalization RMSNorm \ - --seq-length 4096 \ - --max-position-embeddings 4096 \ - --position-embedding-type none \ - --tokenizer-type GPTSentencePieceTokenizer \ - --tokenizer-model ${TOKENIZER_PATH} \ - --distributed-backend nccl \ - --distributed-timeout-minutes 1440 \ - --bf16 \ - --micro-batch-size 1 \ - --use-mcore-models \ - --spec megatron.core.models.mamba.mamba_layer_specs mamba_stack_spec \ - --seed 42 +#!/bin/bash + +# Use: ./run_text_gen_server_8b.sh +# To launch the client: python ../../tools/text_generation_cli.py + +CHECKPOINT_PATH=$1 +TOKENIZER_PATH=$2 + +DISTRIBUTED_ARGS="--nproc_per_node 1 \ + --nnodes 1 \ + --node_rank 0 \ + --master_addr localhost \ + --master_port 6000" + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export NCCL_IB_TIMEOUT=19 +export NCCL_IB_QPS_PER_CONNECTION=4 + +export TRITON_CACHE_DIR="./triton-cache/" +export TRITON_CACHE_MANAGER="megatron.core.ssm.triton_cache_manager:ParallelFileCacheManager" + +torchrun $DISTRIBUTED_ARGS ../../tools/run_mamba_text_generation_server.py \ + --tensor-model-parallel-size 1 \ + --pipeline-model-parallel-size 1 \ + --untie-embeddings-and-output-weights \ + --num-layers 56 \ + --hidden-size 4096 \ + --load ${CHECKPOINT_PATH} \ + --num-attention-heads 32 \ + --group-query-attention \ + --num-query-groups 8 \ + --hybrid-attention-ratio 0.08 \ + --hybrid-mlp-ratio 0.5 \ + --attention-dropout 0.0 \ + --hidden-dropout 0.0 \ + --disable-bias-linear \ + --normalization RMSNorm \ + --seq-length 4096 \ + --max-position-embeddings 4096 \ + --position-embedding-type none \ + --tokenizer-type GPTSentencePieceTokenizer \ + --tokenizer-model ${TOKENIZER_PATH} \ + --distributed-backend nccl \ + --distributed-timeout-minutes 1440 \ + --bf16 \ + --micro-batch-size 1 \ + --use-mcore-models \ + --spec megatron.core.models.mamba.mamba_layer_specs mamba_stack_spec \ + --seed 42 diff --git a/examples/mamba/run_text_gen_server_8b_gpt3.sh b/examples/mamba/run_text_gen_server_8b_gpt3.sh old mode 100644 new mode 100755 diff --git a/examples/mamba/train.sh b/examples/mamba/train.sh old mode 100644 new mode 100755 index 3952a99..033a000 --- a/examples/mamba/train.sh +++ b/examples/mamba/train.sh @@ -1,105 +1,105 @@ -#!/bin/bash - -# Use: ./train.sh - -MODEL_SCALE="800M" # or "8B" - -case "${MODEL_SCALE}" in - "800M") - TENSOR_MODEL_PARALLEL_SIZE=1 - NUM_LAYERS=48 - HIDDEN_SIZE=1024 - NUM_ATTENTION_HEADS=16 - GLOBAL_BATCH_SIZE=32 - ;; - "8B") - TENSOR_MODEL_PARALLEL_SIZE=4 - NUM_LAYERS=56 - HIDDEN_SIZE=4096 - NUM_ATTENTION_HEADS=32 - GLOBAL_BATCH_SIZE=8 - ;; - *) - echo "Invalid version specified" - exit 1 - ;; -esac - -DATA_PATH=$1 -TOKENIZER_PATH=$2 - -export NCCL_IB_SL=1 -export CUDA_DEVICE_MAX_CONNECTIONS=1 -export NCCL_IB_TIMEOUT=19 -export NCCL_IB_QPS_PER_CONNECTION=4 - -CHECKPOINT_DIR="./checkpoints" -DATACACHE_DIR="./data-cache" -TENSORBOARD_DIR="./tensorboard" - -mkdir -p ${CHECKPOINT_DIR} -mkdir -p ${DATACACHE_DIR} -mkdir -p ${TENSORBOARD_DIR} - -export TRITON_CACHE_DIR="./triton-cache/" -export TRITON_CACHE_MANAGER="megatron.core.ssm.triton_cache_manager:ParallelFileCacheManager" - -SEQ_LEN=4096 -TRAIN_SAMPLES=73242188 # 300B tokens / 4096 -LR_WARMUP_SAMPLES=50000 -LR_DECAY_SAMPLES=73192188 # TRAIN_SAMPLES - LR_WARMUP_SAMPLES - -options=" \ - --tensor-model-parallel-size ${TENSOR_MODEL_PARALLEL_SIZE} \ - --sequence-parallel \ - --pipeline-model-parallel-size 1 \ - --use-distributed-optimizer \ - --overlap-param-gather \ - --overlap-grad-reduce \ - --untie-embeddings-and-output-weights \ - --init-method-std 0.02 \ - --position-embedding-type none \ - --num-layers ${NUM_LAYERS} \ - --hidden-size ${HIDDEN_SIZE} \ - --num-attention-heads ${NUM_ATTENTION_HEADS} \ - --group-query-attention \ - --num-query-groups 8 \ - --hybrid-attention-ratio 0.08 \ - --hybrid-mlp-ratio 0.5 \ - --seq-length ${SEQ_LEN} \ - --max-position-embeddings ${SEQ_LEN} \ - --train-samples ${TRAIN_SAMPLES} \ - --lr-warmup-samples ${LR_WARMUP_SAMPLES} \ - --lr-decay-samples ${LR_DECAY_SAMPLES} \ - --save ${CHECKPOINT_DIR} \ - --load ${CHECKPOINT_DIR} \ - --data-path ${DATA_PATH} \ - --data-cache-path ${DATACACHE_DIR} \ - --split 99,1,0 \ - --tokenizer-type GPTSentencePieceTokenizer \ - --tokenizer-model ${TOKENIZER_PATH} \ - --distributed-backend nccl \ - --micro-batch-size 4 \ - --global-batch-size ${GLOBAL_BATCH_SIZE} \ - --lr 2.5e-4 \ - --min-lr 2.5e-5 \ - --lr-decay-style cosine \ - --weight-decay 0.1 \ - --clip-grad 1.0 \ - --attention-dropout 0.0 \ - --hidden-dropout 0.0 \ - --disable-bias-linear \ - --normalization RMSNorm \ - --adam-beta1 0.9 \ - --adam-beta2 0.95 \ - --log-interval 10 \ - --save-interval 2000 \ - --eval-interval 2000 \ - --eval-iters 32 \ - --bf16 \ - --use-mcore-models \ - --spec megatron.core.models.mamba.mamba_layer_specs mamba_stack_spec \ - --no-create-attention-mask-in-dataloader \ - --tensorboard-dir ${TENSORBOARD_DIR}" - -torchrun --nproc_per_node 8 ../../pretrain_mamba.py ${options} +#!/bin/bash + +# Use: ./train.sh + +MODEL_SCALE="800M" # or "8B" + +case "${MODEL_SCALE}" in + "800M") + TENSOR_MODEL_PARALLEL_SIZE=1 + NUM_LAYERS=48 + HIDDEN_SIZE=1024 + NUM_ATTENTION_HEADS=16 + GLOBAL_BATCH_SIZE=32 + ;; + "8B") + TENSOR_MODEL_PARALLEL_SIZE=4 + NUM_LAYERS=56 + HIDDEN_SIZE=4096 + NUM_ATTENTION_HEADS=32 + GLOBAL_BATCH_SIZE=8 + ;; + *) + echo "Invalid version specified" + exit 1 + ;; +esac + +DATA_PATH=$1 +TOKENIZER_PATH=$2 + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export NCCL_IB_TIMEOUT=19 +export NCCL_IB_QPS_PER_CONNECTION=4 + +CHECKPOINT_DIR="./checkpoints" +DATACACHE_DIR="./data-cache" +TENSORBOARD_DIR="./tensorboard" + +mkdir -p ${CHECKPOINT_DIR} +mkdir -p ${DATACACHE_DIR} +mkdir -p ${TENSORBOARD_DIR} + +export TRITON_CACHE_DIR="./triton-cache/" +export TRITON_CACHE_MANAGER="megatron.core.ssm.triton_cache_manager:ParallelFileCacheManager" + +SEQ_LEN=4096 +TRAIN_SAMPLES=73242188 # 300B tokens / 4096 +LR_WARMUP_SAMPLES=50000 +LR_DECAY_SAMPLES=73192188 # TRAIN_SAMPLES - LR_WARMUP_SAMPLES + +options=" \ + --tensor-model-parallel-size ${TENSOR_MODEL_PARALLEL_SIZE} \ + --sequence-parallel \ + --pipeline-model-parallel-size 1 \ + --use-distributed-optimizer \ + --overlap-param-gather \ + --overlap-grad-reduce \ + --untie-embeddings-and-output-weights \ + --init-method-std 0.02 \ + --position-embedding-type none \ + --num-layers ${NUM_LAYERS} \ + --hidden-size ${HIDDEN_SIZE} \ + --num-attention-heads ${NUM_ATTENTION_HEADS} \ + --group-query-attention \ + --num-query-groups 8 \ + --hybrid-attention-ratio 0.08 \ + --hybrid-mlp-ratio 0.5 \ + --seq-length ${SEQ_LEN} \ + --max-position-embeddings ${SEQ_LEN} \ + --train-samples ${TRAIN_SAMPLES} \ + --lr-warmup-samples ${LR_WARMUP_SAMPLES} \ + --lr-decay-samples ${LR_DECAY_SAMPLES} \ + --save ${CHECKPOINT_DIR} \ + --load ${CHECKPOINT_DIR} \ + --data-path ${DATA_PATH} \ + --data-cache-path ${DATACACHE_DIR} \ + --split 99,1,0 \ + --tokenizer-type GPTSentencePieceTokenizer \ + --tokenizer-model ${TOKENIZER_PATH} \ + --distributed-backend nccl \ + --micro-batch-size 4 \ + --global-batch-size ${GLOBAL_BATCH_SIZE} \ + --lr 2.5e-4 \ + --min-lr 2.5e-5 \ + --lr-decay-style cosine \ + --weight-decay 0.1 \ + --clip-grad 1.0 \ + --attention-dropout 0.0 \ + --hidden-dropout 0.0 \ + --disable-bias-linear \ + --normalization RMSNorm \ + --adam-beta1 0.9 \ + --adam-beta2 0.95 \ + --log-interval 10 \ + --save-interval 2000 \ + --eval-interval 2000 \ + --eval-iters 32 \ + --bf16 \ + --use-mcore-models \ + --spec megatron.core.models.mamba.mamba_layer_specs mamba_stack_spec \ + --no-create-attention-mask-in-dataloader \ + --tensorboard-dir ${TENSORBOARD_DIR}" + +torchrun --nproc_per_node 8 ../../pretrain_mamba.py ${options} diff --git a/examples/mixtral/hostfile_mixtral_8x7B b/examples/mixtral/hostfile_mixtral_8x7B new file mode 100644 index 0000000..e69de29 diff --git a/run_mixtral8x7B_1nodes.sh b/examples/mixtral/run_mixtral_8x7B_1nodes.sh old mode 100644 new mode 100755 similarity index 100% rename from run_mixtral8x7B_1nodes.sh rename to examples/mixtral/run_mixtral_8x7B_1nodes.sh diff --git a/run_GPT-MOE.sh b/examples/mixtral/run_mixtral_8x7B_multinodes.sh old mode 100644 new mode 100755 similarity index 61% rename from run_GPT-MOE.sh rename to examples/mixtral/run_mixtral_8x7B_multinodes.sh index 9c16e0a..0fa5ded --- a/run_GPT-MOE.sh +++ b/examples/mixtral/run_mixtral_8x7B_multinodes.sh @@ -7,13 +7,13 @@ do fi done -mpirun -np 256 --hostfile gptnodes \ +mpirun -np 32 --hostfile hostfile_mixtral_8x7B \ --allow-run-as-root \ --bind-to none \ --mca plm_rsh_no_tree_spawn 1 \ - train_GPT-MOE_567B.sh node002 --profiling=$profiling > output.log 2>&1 + train_mixtral_8x7B_multinodes.sh node066 --profiling=$profiling > output.log 2>&1 wait rm -rf CKPT -#rm -rf mixtral_dataset/my-mixtral_text_document \ No newline at end of file +#rm -rf mixtral_dataset/my-mixtral_text_document diff --git a/train_mixtral_8x7B_1nodes.sh b/examples/mixtral/train_mixtral_8x7B_1nodes.sh similarity index 87% rename from train_mixtral_8x7B_1nodes.sh rename to examples/mixtral/train_mixtral_8x7B_1nodes.sh index 3420481..6e70fb1 100755 --- a/train_mixtral_8x7B_1nodes.sh +++ b/examples/mixtral/train_mixtral_8x7B_1nodes.sh @@ -4,18 +4,23 @@ for para in $* do if [[ $para == --profiling* ]];then profiling=${para#*=} - export GPU_FLUSH_ON_EXECUTION=1 - export HIP_DIRECT_DISPATCH=0 fi done -source /opt/dtk/env.sh # Runs Mixtral 8x7B model +source /opt/dtk/env.sh + +# defauat env +CURRENT_DIR="$( cd "$( dirname "$0" )" && pwd )" +MEGATRON_PATH=$( dirname $( dirname ${CURRENT_DIR})) +export PYTHONPATH=${MEGATRON_PATH}:$PYTHONPATH +export GLOG_minloglevel=3 export CUDA_DEVICE_MAX_CONNECTIONS=1 export HSA_FORCE_FINE_GRAIN_PCIE=1 export OMP_NUM_THREADS=1 export GPU_MAX_HW_QUEUES=10 +# nccl env export NCCL_ALGO=Ring export NCCL_MIN_NCHANNELS=32 export NCCL_MAX_NCHANNELS=32 @@ -23,9 +28,10 @@ export NCCL_NET_GDR_LEVEL=7 export NCCL_NET_GDR_READ=1 export RCCL_SDMA_COPY_ENABLE=0 export NCCL_IB_HCA=mlx5_2:1,mlx5_3:1,mlx5_4:1,mlx5_5:1,mlx5_6:1,mlx5_7:1,mlx5_8:1,mlx5_9:1 -#export NCCL_TOPO_FILE="/public/home/xingjl/dependency/rccl-tests-0204/topo-input.xml" +export NCCL_TOPO_FILE="/public/home/xingjl/dependency/rccl-tests-0204/topo-input.xml" + +# enable BatchLinear export GROUPED_GEMM_BatchLinear=1 -export GLOG_minloglevel=3 RANK=$OMPI_COMM_WORLD_RANK LOCAL_RANK=$OMPI_COMM_WORLD_LOCAL_RANK @@ -75,7 +81,7 @@ MOE_ARGS=( --moe-token-dispatcher-type alltoall --moe-expert-capacity-factor 0.5 --moe-pad-expert-input-to-capacity - --moe-grouped-gemm + #--moe-grouped-gemm ) DATA_ARGS=( @@ -103,25 +109,17 @@ TRAINING_ARGS=( TORCH_PROFIE_ARGS=( --profile - --profile-ranks 0 1 2 3 4 5 6 7 8 + --profile-ranks 0 1 2 3 4 5 6 7 --profile-step-start 3 --profile-step-end 4 - --profile-dir torch_prof_mixtral_1nodes + --profile-dir torch_prof_mixtral_1nodes_tp2-pp1-ep8-ep_tp1 --use-pytorch-profiler ) -HIP_PROFIE_ARGS=( - --profile - --profile-ranks 0 1 2 3 4 5 6 7 8 - --profile-step-start 4 - --profile-step-end 5 - --use-hip-profiler -) - MODEL_PARALLEL_ARGS=( --tensor-model-parallel-size 2 --pipeline-model-parallel-size 1 - --expert-model-parallel-size 2 + --expert-model-parallel-size 8 --expert-tensor-parallel-size 1 --use-distributed-optimizer --sequence-parallel @@ -159,10 +157,6 @@ APP="python3 -u pretrain_gpt.py \ if [[ $profiling == "torch" ]]; then APP+=" ${TORCH_PROFIE_ARGS[@]}" -elif [[ $profiling == "hip" ]]; then - mkdir -p hip_prof_data - APP+=" ${HIP_PROFIE_ARGS[@]}" - APP="hipprof -d hip_prof_data --hip-trace --trace-off ${APP}" fi #for hygon cpu diff --git a/train_mixtral_8x7B_2nodes.sh b/examples/mixtral/train_mixtral_8x7B_multinodes.sh old mode 100644 new mode 100755 similarity index 85% rename from train_mixtral_8x7B_2nodes.sh rename to examples/mixtral/train_mixtral_8x7B_multinodes.sh index d0b3933..6413c90 --- a/train_mixtral_8x7B_2nodes.sh +++ b/examples/mixtral/train_mixtral_8x7B_multinodes.sh @@ -4,18 +4,23 @@ for para in $* do if [[ $para == --profiling* ]];then profiling=${para#*=} - export GPU_FLUSH_ON_EXECUTION=1 - export HIP_DIRECT_DISPATCH=0 fi done -source /opt/dtk/env.sh # Runs Mixtral 8x7B model +source /opt/dtk/env.sh + +# defauat env +CURRENT_DIR="$( cd "$( dirname "$0" )" && pwd )" +MEGATRON_PATH=$( dirname $( dirname ${CURRENT_DIR})) +export PYTHONPATH=${MEGATRON_PATH}:$PYTHONPATH +export GLOG_minloglevel=3 export CUDA_DEVICE_MAX_CONNECTIONS=1 export HSA_FORCE_FINE_GRAIN_PCIE=1 export OMP_NUM_THREADS=1 export GPU_MAX_HW_QUEUES=10 +# nccl env export NCCL_ALGO=Ring export NCCL_MIN_NCHANNELS=32 export NCCL_MAX_NCHANNELS=32 @@ -23,9 +28,10 @@ export NCCL_NET_GDR_LEVEL=7 export NCCL_NET_GDR_READ=1 export RCCL_SDMA_COPY_ENABLE=0 export NCCL_IB_HCA=mlx5_2:1,mlx5_3:1,mlx5_4:1,mlx5_5:1,mlx5_6:1,mlx5_7:1,mlx5_8:1,mlx5_9:1 -#export NCCL_TOPO_FILE="/public/home/xingjl/dependency/rccl-tests-0204/topo-input.xml" +export NCCL_TOPO_FILE="/public/home/xingjl/dependency/rccl-tests-0204/topo-input.xml" + +# enable BatchLinear export GROUPED_GEMM_BatchLinear=1 -export GLOG_minloglevel=3 RANK=$OMPI_COMM_WORLD_RANK LOCAL_RANK=$OMPI_COMM_WORLD_LOCAL_RANK @@ -99,9 +105,6 @@ TRAINING_ARGS=( --bf16 --overlap-param-gather --overlap-grad-reduce - --recompute-granularity full - --recompute-method uniform - --recompute-num-layers 1 ) TORCH_PROFIE_ARGS=( @@ -109,23 +112,15 @@ TORCH_PROFIE_ARGS=( --profile-ranks 0 1 2 3 8 9 10 11 --profile-step-start 3 --profile-step-end 4 - --profile-dir torch_prof_data_mixtral_2nodes + --profile-dir torch_prof_mixtral_4nodes_tp2-pp8-ep2-ep_tp1 --use-pytorch-profiler ) -HIP_PROFIE_ARGS=( - --profile - --profile-ranks 0 1 2 3 8 9 10 11 - --profile-step-start 4 - --profile-step-end 5 - --use-hip-profiler -) - MODEL_PARALLEL_ARGS=( - --tensor-model-parallel-size 4 - --pipeline-model-parallel-size 4 + --tensor-model-parallel-size 2 + --pipeline-model-parallel-size 8 --expert-model-parallel-size 2 - --expert-tensor-parallel-size 2 + --expert-tensor-parallel-size 1 --use-distributed-optimizer --sequence-parallel ) @@ -162,10 +157,6 @@ APP="python3 -u pretrain_gpt.py \ if [[ $profiling == "torch" ]]; then APP+=" ${TORCH_PROFIE_ARGS[@]}" -elif [[ $profiling == "hip" ]]; then - mkdir -p hip_prof_data - APP+=" ${HIP_PROFIE_ARGS[@]}" - APP="hipprof -d hip_prof_data --hip-trace --trace-off ${APP}" fi #for hygon cpu diff --git a/examples/mixtral/train_mixtral_8x7b_distributed.sh b/examples/mixtral/train_mixtral_8x7b_distributed.sh deleted file mode 100644 index ed44d60..0000000 --- a/examples/mixtral/train_mixtral_8x7b_distributed.sh +++ /dev/null @@ -1,116 +0,0 @@ -#!/bin/bash - -# Runs Mixtral 8x7B model - -export CUDA_DEVICE_MAX_CONNECTIONS=1 - -GPUS_PER_NODE=8 -# Change for multinode config -MASTER_ADDR=${MASTER_ADDR:-"localhost"} -MASTER_PORT=${MASTER_PORT:-"6000"} -NNODES=${SLURM_NNODES:-"1"} -NODE_RANK=${RANK:-"0"} -WORLD_SIZE=$(($GPUS_PER_NODE*$NNODES)) - -CHECKPOINT_PATH=$1 -TOKENIZER_MODEL=$2 -DATA_PATH=$3 - -DISTRIBUTED_ARGS=( - --nproc_per_node $GPUS_PER_NODE - --nnodes $NNODES - --node_rank $NODE_RANK - --master_addr $MASTER_ADDR - --master_port $MASTER_PORT -) - -MODEL_ARGS=( - --use-mcore-models - --disable-bias-linear - --seq-length 4096 - --max-position-embeddings 32768 - --num-layers 32 - --hidden-size 4096 - --ffn-hidden-size 14336 - --num-attention-heads 32 - --init-method-std 0.01 - --attention-dropout 0.0 - --hidden-dropout 0.0 - --normalization RMSNorm - --position-embedding-type rope - --swiglu - --untie-embeddings-and-output-weights - --group-query-attention - --num-query-groups 8 - --no-masked-softmax-fusion - --no-position-embedding - --rotary-base 1000000 -) - -MOE_ARGS=( - --num-experts 8 - --moe-router-topk 2 - --moe-router-load-balancing-type aux_loss - --moe-aux-loss-coeff 1e-2 - --moe-grouped-gemm - --moe-token-dispatcher-type alltoall - --overlap-param-gather - --overlap-grad-reduce -) - -DATA_ARGS=( - --tokenizer-type Llama2Tokenizer - --tokenizer-model ${TOKENIZER_MODEL} - --data-path $DATA_PATH - --split 99990,8,2 -) - -TRAINING_ARGS=( - --micro-batch-size 1 - --global-batch-size 256 - --lr 1e-4 - --train-iters 500000 - --lr-decay-iters 320000 - --lr-decay-style cosine - --min-lr 1.0e-5 - --weight-decay 0.1 - --lr-warmup-iters 500 - --clip-grad 1.0 - --bf16 -) - -MODEL_PARALLEL_ARGS=( - --tensor-model-parallel-size 1 - --pipeline-model-parallel-size 4 - --expert-model-parallel-size 8 - --use-distributed-optimizer - --sequence-parallel -) - -LOGGING_ARGS=( - --log-interval 1 \ - --save-interval 10000 \ - --eval-interval 1000 \ - --eval-iters 10 \ - --save $CHECKPOINT_PATH \ - --load $CHECKPOINT_PATH \ - --tensorboard-dir "${CHECKPOINT_PATH}/tensorboard" \ - --no-load-optim \ - --no-load-rng -) - -if [ -n "${WANDB_API_KEY}" ]; then - LOGGING_ARGS+=( - --wandb-project ${WANDB_PROJECT:-"Mixtral"} - --wandb-exp-name ${WANDB_NAME:-"Mixtral_8x7B"} - ) -fi - - -torchrun ${DISTRIBUTED_ARGS[@]} pretrain_gpt.py \ - ${MODEL_ARGS[@]} \ - ${MOE_ARGS[@]} \ - ${DATA_ARGS[@]} \ - ${TRAINING_ARGS[@]} \ - ${MODEL_PARALLEL_ARGS[@]} \ - ${LOGGING_ARGS[@]} diff --git a/examples/multimodal/combine_lm_vision_checkpoints.sh b/examples/multimodal/combine_lm_vision_checkpoints.sh index 52de16e..b6e3277 100644 --- a/examples/multimodal/combine_lm_vision_checkpoints.sh +++ b/examples/multimodal/combine_lm_vision_checkpoints.sh @@ -1,57 +1,57 @@ -#/bin/bash -MCORE_LM=$1 # -MCORE_VISION=$2 # -OUTPUT_DIR=$3 # -MODEL_TYPE=$4 # Model type. Default: Mistral CLIP example. - -if [[ $MODEL_TYPE == "nvlm" ]]; then - # NVLM TP=8 - python examples/multimodal/combine_state_dicts.py \ - --input \ - ${MCORE_LM}/iter_0000001/mp_rank_00/model_optim_rng.pt \ - ${MCORE_VISION}/iter_0000001/mp_rank_00/model_optim_rng.pt \ - ${MCORE_LM}/iter_0000001/mp_rank_01/model_optim_rng.pt \ - ${MCORE_VISION}/iter_0000001/mp_rank_01/model_optim_rng.pt \ - ${MCORE_LM}/iter_0000001/mp_rank_02/model_optim_rng.pt \ - ${MCORE_VISION}/iter_0000001/mp_rank_02/model_optim_rng.pt \ - ${MCORE_LM}/iter_0000001/mp_rank_03/model_optim_rng.pt \ - ${MCORE_VISION}/iter_0000001/mp_rank_03/model_optim_rng.pt \ - ${MCORE_LM}/iter_0000001/mp_rank_04/model_optim_rng.pt \ - ${MCORE_VISION}/iter_0000001/mp_rank_04/model_optim_rng.pt \ - ${MCORE_LM}/iter_0000001/mp_rank_05/model_optim_rng.pt \ - ${MCORE_VISION}/iter_0000001/mp_rank_05/model_optim_rng.pt \ - ${MCORE_LM}/iter_0000001/mp_rank_06/model_optim_rng.pt \ - ${MCORE_VISION}/iter_0000001/mp_rank_06/model_optim_rng.pt \ - ${MCORE_LM}/iter_0000001/mp_rank_07/model_optim_rng.pt \ - ${MCORE_VISION}/iter_0000001/mp_rank_07/model_optim_rng.pt \ - --prefixes language_model vision_model language_model vision_model language_model vision_model language_model vision_model language_model vision_model language_model vision_model language_model vision_model language_model vision_model \ - --output \ - ${OUTPUT_DIR}/iter_0000001/mp_rank_00/model_optim_rng.pt \ - ${OUTPUT_DIR}/iter_0000001/mp_rank_01/model_optim_rng.pt \ - ${OUTPUT_DIR}/iter_0000001/mp_rank_02/model_optim_rng.pt \ - ${OUTPUT_DIR}/iter_0000001/mp_rank_03/model_optim_rng.pt \ - ${OUTPUT_DIR}/iter_0000001/mp_rank_04/model_optim_rng.pt \ - ${OUTPUT_DIR}/iter_0000001/mp_rank_05/model_optim_rng.pt \ - ${OUTPUT_DIR}/iter_0000001/mp_rank_06/model_optim_rng.pt \ - ${OUTPUT_DIR}/iter_0000001/mp_rank_07/model_optim_rng.pt -else - # Mistral CLIP example TP=4. - python examples/multimodal/combine_state_dicts.py \ - --input \ - ${MCORE_LM}/iter_0000001/mp_rank_00/model_optim_rng.pt \ - ${MCORE_VISION}/iter_0000001/mp_rank_00/model_optim_rng.pt \ - ${MCORE_LM}/iter_0000001/mp_rank_01/model_optim_rng.pt \ - ${MCORE_VISION}/iter_0000001/mp_rank_01/model_optim_rng.pt \ - ${MCORE_LM}/iter_0000001/mp_rank_02/model_optim_rng.pt \ - ${MCORE_VISION}/iter_0000001/mp_rank_02/model_optim_rng.pt \ - ${MCORE_LM}/iter_0000001/mp_rank_03/model_optim_rng.pt \ - ${MCORE_VISION}/iter_0000001/mp_rank_03/model_optim_rng.pt \ - --prefixes language_model vision_model language_model vision_model language_model vision_model language_model vision_model \ - --output \ - ${OUTPUT_DIR}/iter_0000001/mp_rank_00/model_optim_rng.pt \ - ${OUTPUT_DIR}/iter_0000001/mp_rank_01/model_optim_rng.pt \ - ${OUTPUT_DIR}/iter_0000001/mp_rank_02/model_optim_rng.pt \ - ${OUTPUT_DIR}/iter_0000001/mp_rank_03/model_optim_rng.pt -fi - -echo 1 > ${OUTPUT_DIR}/latest_checkpointed_iteration.txt +#/bin/bash +MCORE_LM=$1 # +MCORE_VISION=$2 # +OUTPUT_DIR=$3 # +MODEL_TYPE=$4 # Model type. Default: Mistral CLIP example. + +if [[ $MODEL_TYPE == "nvlm" ]]; then + # NVLM TP=8 + python examples/multimodal/combine_state_dicts.py \ + --input \ + ${MCORE_LM}/iter_0000001/mp_rank_00/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_00/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_01/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_01/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_02/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_02/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_03/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_03/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_04/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_04/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_05/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_05/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_06/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_06/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_07/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_07/model_optim_rng.pt \ + --prefixes language_model vision_model language_model vision_model language_model vision_model language_model vision_model language_model vision_model language_model vision_model language_model vision_model language_model vision_model \ + --output \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_00/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_01/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_02/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_03/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_04/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_05/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_06/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_07/model_optim_rng.pt +else + # Mistral CLIP example TP=4. + python examples/multimodal/combine_state_dicts.py \ + --input \ + ${MCORE_LM}/iter_0000001/mp_rank_00/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_00/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_01/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_01/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_02/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_02/model_optim_rng.pt \ + ${MCORE_LM}/iter_0000001/mp_rank_03/model_optim_rng.pt \ + ${MCORE_VISION}/iter_0000001/mp_rank_03/model_optim_rng.pt \ + --prefixes language_model vision_model language_model vision_model language_model vision_model language_model vision_model \ + --output \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_00/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_01/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_02/model_optim_rng.pt \ + ${OUTPUT_DIR}/iter_0000001/mp_rank_03/model_optim_rng.pt +fi + +echo 1 > ${OUTPUT_DIR}/latest_checkpointed_iteration.txt diff --git a/examples/multimodal/config.py b/examples/multimodal/config.py index ee40460..2bee671 100644 --- a/examples/multimodal/config.py +++ b/examples/multimodal/config.py @@ -1,200 +1,280 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from dataclasses import dataclass - -import torch - -from megatron.training.activations import fast_gelu, quick_gelu, squared_relu - - -def get_language_model_config(config): - if config.language_model_type == "llama3_8b": - config.activation_func = torch.nn.functional.silu - config.add_bias_linear = False - config.bias_activation_fusion = False - config.gated_linear_unit = True - config.apply_query_key_layer_scaling = False - config.layernorm_zero_centered_gamma = ( - False # Zero centered gamma not supported for RMSNorm - ) - config.bias_dropout_fusion = False - config.apply_rope_fusion = False - config.attention_softmax_in_fp32 = True - config.ffn_hidden_size = 14336 - elif config.language_model_type == "mistral_7b": - config.activation_func = torch.nn.functional.silu - config.add_bias_linear = False - config.bias_activation_fusion = False - config.gated_linear_unit = True - config.apply_query_key_layer_scaling = False - config.layernorm_zero_centered_gamma = ( - False # Zero centered gamma not supported for RMSNorm - ) - config.bias_dropout_fusion = False - config.apply_rope_fusion = False - config.attention_softmax_in_fp32 = True - config.ffn_hidden_size = 14336 - elif config.language_model_type == "yi-34b": - config.activation_func = torch.nn.functional.silu - config.add_bias_linear = False - config.bias_activation_fusion = False - config.gated_linear_unit = True - config.apply_query_key_layer_scaling = False - config.layernorm_zero_centered_gamma = ( - False # Zero centered gamma not supported for RMSNorm - ) - config.bias_dropout_fusion = False - config.apply_rope_fusion = False - config.attention_softmax_in_fp32 = True - config.ffn_hidden_size = 20480 - elif config.language_model_type == "qwen2.5_7B": - config.activation_func = torch.nn.functional.silu - config.add_bias_linear = False - config.add_qkv_bias = True - config.bias_activation_fusion = False - config.gated_linear_unit = True - config.apply_query_key_layer_scaling = False - config.layernorm_zero_centered_gamma = ( - False # Zero centered gamma not supported for RMSNorm - ) - config.bias_dropout_fusion = False - config.apply_rope_fusion = False - config.attention_softmax_in_fp32 = True - config.ffn_hidden_size = 18944 - elif config.language_model_type == "qwen2.0_72B": - config.activation_func = torch.nn.functional.silu - config.add_bias_linear = False - config.add_qkv_bias = True - config.bias_activation_fusion = False - config.gated_linear_unit = True - config.apply_query_key_layer_scaling = False - config.layernorm_zero_centered_gamma = ( - False # Zero centered gamma not supported for RMSNorm - ) - config.bias_dropout_fusion = False - config.apply_rope_fusion = False - config.attention_softmax_in_fp32 = True - config.ffn_hidden_size = 29568 - else: - raise ValueError(f"unknown language model type {config.language_model_type}") - - return config - - -def get_vision_model_config(config, apply_query_key_layer_scaling): - if config.vision_model_type == "clip": - config.num_layers = 24 - config.num_attention_heads = 16 - config.add_bias_linear = True - config.add_qkv_bias = True - config.hidden_size = 1024 - config.hidden_dropout = 0.0 - config.attention_dropout = 0.0 - config.ffn_hidden_size = 4096 - config.gated_linear_unit = False - config.activation_func = quick_gelu - config.kv_channels = 64 - config.num_query_groups = 16 - config.layernorm_zero_centered_gamma = False - config.apply_query_key_layer_scaling = apply_query_key_layer_scaling - config.bias_activation_fusion = False - config.bias_dropout_fusion = False - config.attention_softmax_in_fp32 = True - config.normalization = 'LayerNorm' - config.apply_rope_fusion = False - elif config.vision_model_type == "siglip": - config.num_layers = 27 - config.num_attention_heads = 16 - config.add_bias_linear = True - config.add_qkv_bias = True - config.hidden_size = 1152 - config.hidden_dropout = 0.0 - config.attention_dropout = 0.0 - config.ffn_hidden_size = 4304 - config.gated_linear_unit = False - config.activation_func = fast_gelu - config.kv_channels = 72 - config.num_query_groups = 16 - config.layernorm_zero_centered_gamma = False - config.apply_query_key_layer_scaling = apply_query_key_layer_scaling - config.bias_activation_fusion = False - config.bias_dropout_fusion = False - config.attention_softmax_in_fp32 = True - config.normalization = 'LayerNorm' - config.apply_rope_fusion = False - config.qk_layernorm = False - config.layernorm_epsilon = 1e-6 - elif config.vision_model_type == "internvit": - config.num_layers = 45 - config.num_attention_heads = 32 # Padded for TP=8. - config.num_query_groups = 32 # Padded for TP=8. - config.kv_channels = 128 - config.add_bias_linear = True - config.add_qkv_bias = False - config.hidden_size = 3200 - config.hidden_dropout = 0.0 - config.attention_dropout = 0.0 - config.ffn_hidden_size = 12800 - config.gated_linear_unit = False - config.activation_func = torch.nn.functional.gelu - config.layernorm_zero_centered_gamma = False - config.apply_query_key_layer_scaling = apply_query_key_layer_scaling - config.bias_activation_fusion = False - config.bias_dropout_fusion = False - config.attention_softmax_in_fp32 = True - config.normalization = 'RMSNorm' - config.layernorm_epsilon = 1e-6 - config.apply_rope_fusion = False - else: - raise ValueError(f"unknown vision model type {config.vision_model_type}") - - return config - - -def get_vision_projection_config(config, hidden_size): - config.gated_linear_unit = False - config.bias_activation_fusion = False - config.add_bias_linear = False - config.hidden_size = hidden_size # Used as the vision projection output size, i.e., the input to the language model. - if config.language_model_type == "llama3_8b": - config.ffn_hidden_size = 14336 - config.activation_func = torch.nn.functional.gelu - elif config.language_model_type == "mistral_7b": - config.ffn_hidden_size = 14336 - config.activation_func = torch.nn.functional.gelu - config.normalization = None - elif config.language_model_type == "yi-34b": - config.ffn_hidden_size = 20480 - config.normalization = "LayerNorm" - config.activation_func = torch.nn.functional.gelu - elif config.language_model_type == "qwen2.5_7B": - config.ffn_hidden_size = 3584 - config.activation_func = torch.nn.functional.gelu - elif config.language_model_type == "qwen2.0_72B": - config.ffn_hidden_size = 29568 - config.normalization = "LayerNorm" - config.activation_func = torch.nn.functional.gelu - else: - raise ValueError(f"unknown language model type {config.language_model_type}") - - return config - - -@dataclass -class EvaluationConfig: - """Evaluation related configuration.""" - task: str - - temperature: float = 1.0 - top_p: float = 0.0 - top_k: int = 0 - - out_seq_length: int = 32 - - output_path: str = "" - - input_image_path: str = "" - gt_path: str = "" - - num_partitions: int = 1 - partition_id: int = 0 - num_samples_per_partition: int = 0 +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from dataclasses import dataclass + +import torch + +from megatron.training.activations import fast_gelu, quick_gelu, squared_relu + + +def get_language_model_config(config): + if config.language_model_type == "llama3_8b": + config.activation_func = torch.nn.functional.silu + config.add_bias_linear = False + config.bias_activation_fusion = False + config.gated_linear_unit = True + config.apply_query_key_layer_scaling = False + config.layernorm_zero_centered_gamma = ( + False # Zero centered gamma not supported for RMSNorm + ) + config.bias_dropout_fusion = False + config.apply_rope_fusion = False + config.attention_softmax_in_fp32 = True + config.ffn_hidden_size = 14336 + elif config.language_model_type == "llama3.1_8b": + config.activation_func = torch.nn.functional.silu + config.add_bias_linear = False + config.bias_activation_fusion = False + config.gated_linear_unit = True + config.apply_query_key_layer_scaling = False + config.layernorm_zero_centered_gamma = ( + False # Zero centered gamma not supported for RMSNorm + ) + config.bias_dropout_fusion = False + config.apply_rope_fusion = False + config.attention_softmax_in_fp32 = True + config.ffn_hidden_size = 14336 + elif config.language_model_type == "llama3.1_70B": + config.activation_func = torch.nn.functional.silu + config.add_bias_linear = False + config.bias_activation_fusion = False + config.gated_linear_unit = True + config.apply_query_key_layer_scaling = False + config.layernorm_zero_centered_gamma = ( + False # Zero centered gamma not supported for RMSNorm + ) + config.bias_dropout_fusion = False + config.apply_rope_fusion = False + config.attention_softmax_in_fp32 = True + config.ffn_hidden_size = 28672 + elif config.language_model_type == "mistral_7b": + config.activation_func = torch.nn.functional.silu + config.add_bias_linear = False + config.bias_activation_fusion = False + config.gated_linear_unit = True + config.apply_query_key_layer_scaling = False + config.layernorm_zero_centered_gamma = ( + False # Zero centered gamma not supported for RMSNorm + ) + config.bias_dropout_fusion = False + config.apply_rope_fusion = False + config.attention_softmax_in_fp32 = True + config.ffn_hidden_size = 14336 + elif config.language_model_type == "yi-34b": + config.activation_func = torch.nn.functional.silu + config.add_bias_linear = False + config.bias_activation_fusion = False + config.gated_linear_unit = True + config.apply_query_key_layer_scaling = False + config.layernorm_zero_centered_gamma = ( + False # Zero centered gamma not supported for RMSNorm + ) + config.bias_dropout_fusion = False + config.apply_rope_fusion = False + config.attention_softmax_in_fp32 = True + config.ffn_hidden_size = 20480 + elif config.language_model_type == "qwen2.5_7B": + config.activation_func = torch.nn.functional.silu + config.add_bias_linear = False + config.add_qkv_bias = True + config.bias_activation_fusion = False + config.gated_linear_unit = True + config.apply_query_key_layer_scaling = False + config.layernorm_zero_centered_gamma = ( + False # Zero centered gamma not supported for RMSNorm + ) + config.bias_dropout_fusion = False + config.apply_rope_fusion = False + config.attention_softmax_in_fp32 = True + config.ffn_hidden_size = 18944 + elif config.language_model_type == "qwen2.0_72B": + config.activation_func = torch.nn.functional.silu + config.add_bias_linear = False + config.add_qkv_bias = True + config.bias_activation_fusion = False + config.gated_linear_unit = True + config.apply_query_key_layer_scaling = False + config.layernorm_zero_centered_gamma = ( + False # Zero centered gamma not supported for RMSNorm + ) + config.bias_dropout_fusion = False + config.apply_rope_fusion = False + config.attention_softmax_in_fp32 = True + config.ffn_hidden_size = 29568 + elif config.language_model_type == "llama3.2_1b": + config.activation_func = torch.nn.functional.silu + config.add_bias_linear = False + config.bias_activation_fusion = False + config.gated_linear_unit = True + config.apply_query_key_layer_scaling = False + config.layernorm_zero_centered_gamma = ( + False # Zero centered gamma not supported for RMSNorm + ) + config.bias_dropout_fusion = False + config.apply_rope_fusion = False + config.attention_softmax_in_fp32 = True + config.ffn_hidden_size = 8192 + elif config.language_model_type.startswith("huggingface"): + # Loaded from HuggingFace config file. + pass + else: + raise ValueError(f"unknown language model type {config.language_model_type}") + + return config + + +def get_vision_model_config(config, apply_query_key_layer_scaling): + if config.vision_model_type == "clip": + config.num_layers = 24 + config.num_attention_heads = 16 + config.add_bias_linear = True + config.add_qkv_bias = True + config.hidden_size = 1024 + config.hidden_dropout = 0.0 + config.attention_dropout = 0.0 + config.ffn_hidden_size = 4096 + config.gated_linear_unit = False + config.activation_func = quick_gelu + config.kv_channels = 64 + config.num_query_groups = 16 + config.layernorm_zero_centered_gamma = False + config.apply_query_key_layer_scaling = apply_query_key_layer_scaling + config.bias_activation_fusion = False + config.bias_dropout_fusion = False + config.attention_softmax_in_fp32 = True + config.normalization = 'LayerNorm' + config.apply_rope_fusion = False + elif config.vision_model_type == "siglip": + config.num_layers = 27 + config.num_attention_heads = 16 + config.add_bias_linear = True + config.add_qkv_bias = True + config.hidden_size = 1152 + config.hidden_dropout = 0.0 + config.attention_dropout = 0.0 + config.ffn_hidden_size = 4304 + config.gated_linear_unit = False + config.activation_func = fast_gelu + config.kv_channels = 72 + config.num_query_groups = 16 + config.layernorm_zero_centered_gamma = False + config.apply_query_key_layer_scaling = apply_query_key_layer_scaling + config.bias_activation_fusion = False + config.bias_dropout_fusion = False + config.attention_softmax_in_fp32 = True + config.normalization = 'LayerNorm' + config.apply_rope_fusion = False + config.qk_layernorm = False + config.layernorm_epsilon = 1e-6 + elif config.vision_model_type == "internvit": + config.num_layers = 45 + config.num_attention_heads = ((24 // config.tensor_model_parallel_size) + 1) * config.tensor_model_parallel_size + config.num_query_groups = config.num_attention_heads + config.add_bias_linear = True + config.add_qkv_bias = False + config.hidden_size = 3200 + config.hidden_dropout = 0.0 + config.attention_dropout = 0.0 + config.ffn_hidden_size = 12800 + config.gated_linear_unit = False + config.activation_func = torch.nn.functional.gelu + config.layernorm_zero_centered_gamma = False + config.apply_query_key_layer_scaling = apply_query_key_layer_scaling + config.bias_activation_fusion = False + config.bias_dropout_fusion = False + config.attention_softmax_in_fp32 = True + config.normalization = 'RMSNorm' + config.layernorm_epsilon = 1e-6 + config.apply_rope_fusion = False + elif config.vision_model_type == "radio": + config.num_layers = 32 + config.num_attention_heads = 16 + config.add_bias_linear = True + config.add_qkv_bias = True + config.hidden_size = 1280 + config.ffn_hidden_size = 5120 + config.gated_linear_unit = False + config.activation_func = fast_gelu + config.kv_channels = 80 + config.num_query_groups = 16 + config.layernorm_zero_centered_gamma = False + config.apply_query_key_layer_scaling = apply_query_key_layer_scaling + config.bias_activation_fusion = False + config.bias_dropout_fusion = False + config.attention_softmax_in_fp32 = True + config.normalization = 'LayerNorm' + config.apply_rope_fusion = False + config.qk_layernorm = False + config.layernorm_epsilon = 1e-6 + elif config.vision_model_type.startswith("huggingface"): + # Loaded from HuggingFace config file. + pass + else: + raise ValueError(f"unknown vision model type {config.vision_model_type}") + + return config + + +def get_vision_projection_config(config, hidden_size): + config.gated_linear_unit = False + config.bias_activation_fusion = False + config.add_bias_linear = False + config.hidden_size = hidden_size # Used as the vision projection output size, i.e., the input to the language model. + if config.language_model_type == "llama3_8b": + config.ffn_hidden_size = 14336 + config.activation_func = torch.nn.functional.gelu + elif config.language_model_type == "llama3.1_8b": + config.ffn_hidden_size = 4096 + config.activation_func = torch.nn.functional.gelu + config.layernorm_epsilon = 1e-5 + config.add_bias_linear = True + config.normalization = "LayerNorm" + elif config.language_model_type == "mistral_7b": + config.ffn_hidden_size = 14336 + config.activation_func = torch.nn.functional.gelu + config.normalization = None + elif config.language_model_type == "yi-34b": + config.ffn_hidden_size = 20480 + config.normalization = "LayerNorm" + config.activation_func = torch.nn.functional.gelu + elif config.language_model_type == "qwen2.5_7B": + config.ffn_hidden_size = 3584 + config.activation_func = torch.nn.functional.gelu + elif config.language_model_type == "qwen2.0_72B": + config.ffn_hidden_size = 29568 + config.normalization = "LayerNorm" + config.activation_func = torch.nn.functional.gelu + elif config.language_model_type == "llama3.2_1b": + config.ffn_hidden_size = 2048 + config.activation_func = torch.nn.functional.gelu + config.normalization = "LayerNorm" + elif config.language_model_type.startswith("huggingface"): + config.activation_func = torch.nn.functional.gelu + from transformers import AutoConfig + hf_config = AutoConfig.from_pretrained(config.huggingface_model_name_or_path) + if "qwen" in hf_config.model_type: + config.ffn_hidden_size = 1536 + else: + raise ValueError(f"unknown language model type {config.language_model_type}") + + return config + + +@dataclass +class EvaluationConfig: + """Evaluation related configuration.""" + task: str + + temperature: float = 1.0 + top_p: float = 0.0 + top_k: int = 0 + + out_seq_length: int = 32 + + output_path: str = "" + + input_image_path: str = "" + gt_path: str = "" + + num_partitions: int = 1 + partition_id: int = 0 + num_samples_per_partition: int = 0 diff --git a/examples/multimodal/dataset_helpers.py b/examples/multimodal/dataset_helpers.py index ecbbc50..bd1be08 100644 --- a/examples/multimodal/dataset_helpers.py +++ b/examples/multimodal/dataset_helpers.py @@ -1,814 +1,906 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -import bisect -import dataclasses -import json -import re -import sys -import traceback -from dataclasses import dataclass -from typing import Dict, List, Optional, Tuple, Union - -from image_processing import get_visual_transform -from PIL import Image -from torchvision.transforms import ToPILImage -import numpy as np -import torch - -from megatron.core.models.multimodal.llava_model import IGNORE_INDEX, IMAGE_TOKEN, VIDEO_TOKEN -from megatron.core.models.vision.clip_vit_model import get_num_image_embeddings -from megatron.energon import ( - Batch, - CaptioningSample, - DefaultTaskEncoder, - OCRSample, - Sample, - SimilarityInterleavedSample, - VQASample, - MultiChoiceVQASample -) -from megatron.energon.task_encoder.base import stateless -from megatron.training import get_args, get_tokenizer - - -@dataclass -class ImageTaskSample(Sample): - __key__: str - __restore_key__: Tuple[Union[str, int, tuple], ...] - __subflavor__: Dict - __subflavors__: Dict - # (c, h, w) - imgs: List[torch.Tensor] - num_tiles: List[int] - tokens: torch.Tensor - total_len: int # Total token count in the sample, including text and image tokens - labels: torch.Tensor = None - - -@dataclass -class ImageTaskSamplePacked(Sample): - """Dataclass to store a single packed sample (not a batch). - - P = Number of sub-samples in the packed sample - seq_len = Total sequence length - num_imgs = Number of images across all samples in the packed sample - """ - - __key__: str # Sample name - __restore_key__: Tuple[Union[str, int, tuple], ...] - __subflavor__: Dict # Sample metadata. Deprecated. - __subflavors__: Dict # Sample metadata. - tokens: torch.Tensor # Input tokens packed into a single tensor (seq_len,) - labels: torch.Tensor # Target tokens packed into a single tensor (seq_len,) - imgs: List[torch.Tensor] # Input images - num_tiles: List[int] # Number of tiles for each image of each sample (num_imgs) - max_length: int # Maximum length across sub-samples. - cu_lengths: List[int] # Cumulative length of each sub-sample in this packed sample incl. text and image tokens (P,) - - -# Typing for the resulting batch data after encode_batch() -@dataclass -class ImageTaskBatchPacked(Batch): - """Dataclass to store a batch of packed samples. - - N = Batch size - P = Number of samples in the packed sample - seq_len = Maximum sequence length - num_imgs = Number of images across all samples in the packed sample - """ - - __key__: List[str] # Sample names - __restore_key__: Tuple[Union[str, int, tuple], ...] - __subflavor__: Dict # Sample metadata. Deprecated. - __subflavors__: List[Dict] # Sample metadatas. - tokens: torch.Tensor # Input tokens packed and padded (N, seq_len) - labels: torch.Tensor # Target tokens packed and padded (N, seq_len) - imgs: torch.Tensor # All image tiles stacked into a single tensor (num_tiles, C, H, W) - num_tiles: List[List[int]] # Number of tiles per image (N, num_imgs) - max_lengths: List[int] # Maximum length across sub-samples (N,) - cu_lengths: List[List[int]] # Cumulative length of each sub-sample in each packed sample of the batch (N, P) - - -# Based on https://github.com/hiyouga/LLaMA-Factory/blob/641d0dab08d96a93c34657742213d8994d9ed476/src/llamafactory/data/processors/processor_utils.py#L19 -# Copyright (c) 2024 LLaMA-Factory. Apache license 2.0. -def search_for_fit(numbers: List[int], capacity: int) -> int: - """Finds the index of largest number that fits into the knapsack with the given capacity.""" - index = bisect.bisect(numbers, capacity) - return -1 if index == 0 else (index - 1) - - -# Based on https://github.com/hiyouga/LLaMA-Factory/blob/641d0dab08d96a93c34657742213d8994d9ed476/src/llamafactory/data/processors/processor_utils.py#L27 -# Copyright (c) 2024 LLaMA-Factory. Apache license 2.0. -def greedy_knapsack(item_sizes: List[int], samples: List, max_capacity: int) -> List: - """Greedy algorithm with binary search for the knapsack problem. - - Pack as many samples as possible given a maximum capacity and capacities of individual samples. - Used if sequence packing is enabled. - """ - assert len(item_sizes) == len(samples), "sample lengths and samples must have the same length." - - knapsacks = [] - - if len(item_sizes) == 0: - return knapsacks - - # Sort sample lengths and samples together. - sorted_item_sizes, sorted_samples = zip(*sorted(zip(item_sizes, samples), key=lambda x: x[0])) - sorted_item_sizes = list(sorted_item_sizes) - sorted_samples = list(sorted_samples) - - # Check if all samples fit in the knapsack capacity. - if sorted_item_sizes[-1] > max_capacity: - raise ValueError(f"knapsack: A sample is larger {sorted_item_sizes[-1]} than the max_sequence_length {max_capacity}.") - - while sorted_item_sizes: - current_knapsack = [] - remaining_capacity = max_capacity - - while True: - idx = search_for_fit(sorted_item_sizes, remaining_capacity) - if idx == -1: - break # Can't fit more samples. - - remaining_capacity -= sorted_item_sizes[idx] - - sorted_item_sizes.pop(idx) - sample = sorted_samples.pop(idx) - current_knapsack.append(sample) - - knapsacks.append(current_knapsack) - - return knapsacks - - -class TaskEncoder(DefaultTaskEncoder[OCRSample, OCRSample, ImageTaskBatchPacked, dict]): - """A simple task encoder for VLMs.""" - - def __init__( - self - ): - super().__init__() - - self.args = get_args() - - self.tokenizer = get_tokenizer() - with open(self.args.prompt_path, "r") as f: - self.manual_prompts = json.load(f) - self.dataloader_seq_length = self.args.dataloader_seq_length # Always return samples of this length. - self.packing_seq_length = self.args.packing_seq_length # Packing sequence length, if packing is enabled. - self.is_packing_enabled = self.args.packing_buffer_size is not None and self.args.packing_buffer_size > 0 - - if self.dataloader_seq_length and self.packing_seq_length: - assert self.dataloader_seq_length >= self.packing_seq_length, "dataloader sequence length must be greater than or equal to the packing sequence length" - - if self.is_packing_enabled: - assert self.packing_seq_length > 0, "packing sequence length must be set" - - self.num_image_embeddings_per_tile = get_num_image_embeddings( - self.args.img_h, - self.args.img_w, - self.args.patch_dim, - self.args.vision_model_type, - self.args.disable_vision_class_token, - 1, - self.args.pixel_shuffle, - self.args.use_tile_tags, - ) - - self.txt_to_token_dict = {} - - self.img_h, self.img_w = self.args.img_h, self.args.img_w - - # This map is used to reduce the number of tiles used per image if the number of tokens is - # larger than the decoder_seq_length. - self.num_tiles_degradation_map = {12:8, 8:6, 6:4, 4:2, 2:1, 1:1} - - def _get_total_seq_length(self, input_ids, num_tiles): - """Calculate expected sequence length given text tokens length and number of tiles.""" - total_num_images = len(num_tiles) - total_num_tiles = sum(num_tiles) - total_len = len(input_ids) + total_num_tiles * self.num_image_embeddings_per_tile - total_num_images - return total_len - - def _truncate_for_packing(self, input_ids, target, num_tiles): - """Truncate tokens and labels if they exceed packing sequence length.""" - total_num_images = len(num_tiles) - total_num_tiles = sum(num_tiles) - total_img_embeddings_len = total_num_tiles * self.num_image_embeddings_per_tile - max_text_tokens = self.packing_seq_length - total_img_embeddings_len + total_num_images - - input_ids = input_ids[:max_text_tokens] - target = target[:max_text_tokens] - - # If truncate causes all labels to be ignored, then skip the sample - if (target == IGNORE_INDEX).all(): - raise ValueError(f"all targets will be ignored after truncation: {input_ids}") - - return input_ids, target - - @stateless(restore_seeds=True) - def encode_sample(self, sample: Union[CaptioningSample, OCRSample, VQASample, SimilarityInterleavedSample]): - if isinstance(sample, OCRSample): - if "pdfa" in sample.__key__: - yield self.combined_ocr_encoder(sample, task_type='encode_pdf') - elif "multi" in sample.__key__: - yield self.combined_ocr_encoder(sample, task_type='_encode_ocr') - else: - yield self.combined_ocr_encoder(sample, task_type='encode_ocr_ref') - elif isinstance(sample, CaptioningSample): - yield self.encode_captioning(sample) - elif isinstance(sample, VQASample): - is_llava_training = sample.__subflavors__["is_llava_training"] if "is_llava_training" in sample.__subflavors__ else False - - if "llava" in sample.__key__ or is_llava_training: - yield self.encode_llava_pretrain(sample) - else: - yield self.encode_any_single_turn_vqa(sample) - elif isinstance(sample, SimilarityInterleavedSample): - yield self.encode_llava_sft(sample) - elif isinstance(sample, MultiChoiceVQASample): - yield self.encode_any_single_turn_vqa(sample) - else: - raise NotImplementedError("Sample format not supported", sample) - - def encode_captioning(self, sample: CaptioningSample): - """Encode CaptioningSample.""" - augment = sample.__subflavors__.get("augmentation") - - imgs = get_visual_transform( - sample.image, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, self.args.use_thumbnail, augment, - self.args.vision_model_type, - ) - num_tiles = [len(imgs)] - - prompt_list = self.manual_prompts["CaptioningPretraining"]["raw"] - - prompt_idx = np.random.randint(len(prompt_list)) - cur_prompt = prompt_list[prompt_idx] - cur_prompt = IMAGE_TOKEN + "\n" + cur_prompt + "\n" - - caption = sample.caption.strip() - - split_by_line_flag = sample.__subflavors__.get("SplitByLine") - if split_by_line_flag: - caption_list = caption.split('\n') - caption = np.random.choice(caption_list) - - conv = [ - # Note: no system message. - {"role": "user", "content": cur_prompt}, - {"role": "assistant", "content": caption}, - ] - - input_ids, target = self.tokenizer.tokenize_conversation(conv, True, False) - - if self.is_packing_enabled: - input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) - - return ImageTaskSample( - __key__=sample.__key__, - __restore_key__=sample.__restore_key__, - __subflavor__=None, - __subflavors__=sample.__subflavors__, - imgs=imgs, - num_tiles=num_tiles, - tokens=torch.tensor(input_ids), - labels=torch.tensor(target), - total_len=self._get_total_seq_length(input_ids, num_tiles), - ) - - def encode_llava_pretrain(self, sample: VQASample): - """Encode pretrain sample in LLAVA style.""" - augment = sample.__subflavors__.get("augmentation", False) - - imgs = get_visual_transform( - sample.image, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, self.args.use_thumbnail, augment, - self.args.vision_model_type, - ) - num_tiles = [len(imgs)] - - # LLAVA training: override text-prompt with just the image. - conv = [ - # Note: no system message. - {"role": "user", "content": IMAGE_TOKEN + "\n"}, - {"role": "assistant", "content": sample.answers}, - ] - - input_ids, target = self.tokenizer.tokenize_conversation(conv, True, False) - - if self.is_packing_enabled: - input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) - - return ImageTaskSample( - __key__=sample.__key__, - __restore_key__=sample.__restore_key__, - __subflavor__=None, - __subflavors__=sample.__subflavors__, - imgs=imgs, - num_tiles=num_tiles, - tokens=torch.tensor(input_ids), - labels=torch.tensor(target), - total_len=self._get_total_seq_length(input_ids, num_tiles), - ) - - def encode_llava_sft(self, sample: SimilarityInterleavedSample): - """Encode SFT sample.""" - augment = sample.__subflavors__['augmentation'] if 'augmentation' in sample.__subflavors__ else False - has_video = sample.__subflavors__['has_video'] if 'has_video' in sample.__subflavors__ else False - - has_image = False - if hasattr(sample, "images"): - # If this is a text-only sample and we are freezing the LM, - # then use a dummy input image. - if len(sample.images) == 0 and self.args.freeze_LM: - empty_img = Image.new('RGB', (self.args.img_w, self.args.img_h), (255, 255, 255)) - sample.images.append(empty_img) - if len(sample.images) > 0 and not has_video: - has_image = True - - # Note: Some tokenizers may ignore the system prompt. - conversation = [{"role": "system", "content": "Answer the questions."}] - # Format the conversation as a list of "user" / "assistant" turns. - for text in sample.texts: - error_msg = f"unexpected role {text['from']} in {sample.texts}" - assert text["from"] in ["human", "gpt"], error_msg - conversation.append({ - "role": "user" if text["from"] == "human" else "assistant", - "content": text["value"]}) - - # Replace the image tags with IMAGE_TOKEN and count the number of image tags - number_image_tags = 0 - image_tag_ids_list = [] - for turn in conversation: - if turn["role"] == "user": - image_tag_ids = [int(x) - 1 for x in re.findall(r"", turn["content"])] - image_tag_ids_list.extend(image_tag_ids) - turn["content"] = re.sub(r"", IMAGE_TOKEN, turn["content"]) - number_image_tags += turn["content"].count(IMAGE_TOKEN) - # For videos, we replace the image tag with the video tag - if has_video: - turn["content"] = turn["content"].replace(IMAGE_TOKEN, VIDEO_TOKEN) - - # We re-order the images in sample.images according to how they appear in the conversation. - if len(image_tag_ids_list) > 0: - sample.images = [sample.images[idx] for idx in image_tag_ids_list] - - # If there is only one image, but several image tags, we assume all the tags refer to the - # same image and duplicate the image: - if len(sample.images) == 1 and number_image_tags > 1: - sample.images = sample.images * number_image_tags - - number_of_images = len(sample.images) - # Fail if there are more image or video tags than image or videos: - error_msg = ( - f"Found {number_image_tags} image tags for {number_of_images} images. {sample.texts}") - assert number_image_tags <= number_of_images, error_msg - - # If there are less image of video tags than image or videos, prepend the tags to the first - # user message: - if number_image_tags < number_of_images: - for turn in conversation: - if turn["role"] == "user": - tag_to_add = VIDEO_TOKEN if has_video else IMAGE_TOKEN - turn["content"] = tag_to_add*(number_of_images-number_image_tags) + "\n" + turn["content"] - break - - input_ids, target = self.tokenizer.tokenize_conversation(conversation, True, False) - - if has_image: - imgs = [] - num_tiles = [] - max_num_tiles = self.args.max_num_tiles - # We keep a buffer of 4 tokens for the question, - # the rest can be used for image tokens. - max_image_token_allowed = self.args.decoder_seq_length - len(input_ids) - 4 - # We start by extracting as many tiles per image as possible, and decrease the max - # number of tiles if there are too many image tokens. - while True: - imgs = [] - num_tiles = [] - for img in sample.images: - img_tiles = get_visual_transform( - img, self.img_h, self.img_w, self.args.use_tiling, max_num_tiles, - self.args.use_thumbnail, augment, self.args.vision_model_type) - imgs += img_tiles - num_tiles += [len(img_tiles)] - if max_num_tiles == 1: - break - if sum(num_tiles) * self.token_per_img_tile > max_image_token_allowed: - if max_num_tiles in self.num_tiles_degradation_map: - max_num_tiles = self.num_tiles_degradation_map[max_num_tiles] - else: - raise RuntimeError(( - f"Tried to decrease the number of tiles {max_num_tiles} but it's not ", - f"defined in the degradation map {self.num_tiles_degradation_map}")) - else: - break - elif has_video: - # We don't use tiling for videos to limit the number of tokens. - use_tiling=False - # Grab the selected frames of the video as a tensor with shape - # fhwc: (num_frames, num_channels, height, width). - video_fchw = sample.images[0].permute(0, 1, 2, 3) - selected_frames = torch.linspace( - 0, video_fchw.shape[0] - 1, self.args.num_frames).long() - video_fchw = video_fchw[selected_frames] - imgs = [] - for video_chw in video_fchw: - to_pil = ToPILImage() - video_chw = to_pil(video_chw) - imgs += get_visual_transform( - video_chw, self.img_h, self.img_w, use_tiling, self.args.max_num_tiles, - self.args.use_thumbnail, augment, self.args.vision_model_type) - num_tiles = [len(imgs)] - else: - imgs = num_tiles = [] - - if self.is_packing_enabled: - input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) - - # Some final checks with respect to the number of image tokens and images on the tokenized - # conversation. There can still be errors, for instance if a non-video sample happens to - # have our pre-defined video token, or if the packing truncation removed a necessary image - # tag. - number_image_token = np.sum(input_ids == self.img_token_id) - error_msg = ( - f"Found {number_image_token} image tokens for len({num_tiles}) = {len(num_tiles)} image tiles in {conversation}.") - assert number_image_token == len(num_tiles), error_msg - error_msg = ( - f"Found sum({num_tiles}) = {np.sum(num_tiles)} tiles for {len(imgs)} images in {conversation}.") - assert np.sum(num_tiles) == len(imgs), error_msg - - return ImageTaskSample( - __key__=sample.__key__, - __restore_key__=sample.__restore_key__, - __subflavor__=None, - __subflavors__=sample.__subflavors__, - imgs=imgs, - num_tiles=num_tiles, - tokens=torch.tensor(input_ids), - labels=torch.tensor(target), - total_len=self._get_total_seq_length(input_ids, num_tiles), - ) - - def encode_any_single_turn_vqa(self, sample): - """Encode MultiChoiceVQA or VQA sample.""" - augment = sample.__subflavors__['augmentation'] if 'augmentation' in sample.__subflavors__ else False - has_video = sample.__subflavors__['has_video'] if 'has_video' in sample.__subflavors__ else False - - if has_video: - # Grab the selected frames of the video as a tensor with shape - # fhwc: (num_frames, height, width, num_channels). - video_fhwc = sample.image.permute(0, 2, 3, 1) - selected_frames = torch.linspace( - 0, video_fhwc.shape[0] - 1, self.args.num_frames).long() - video_frame_fhwc = video_fhwc[selected_frames] - imgs = [] - for video_frame_hwc in video_frame_fhwc: - imgs += get_visual_transform( - video_frame_hwc, self.img_h, self.img_w, - self.args.use_tiling, self.args.max_num_tiles, - self.args.use_thumbnail, augment, self.args.vision_model_type) - else: - imgs = get_visual_transform( - sample.image, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, - self.args.use_thumbnail, augment, self.args.vision_model_type, - ) - - num_tiles = [len(imgs)] - - if isinstance(sample, MultiChoiceVQASample): - cur_prompt = format_multichoice_question(sample.context, sample.choices) - if IMAGE_TOKEN not in cur_prompt: - cur_prompt = IMAGE_TOKEN + "\n" + cur_prompt - cur_answer = format_multichoice_answer(sample.correct_choice_idx) - elif isinstance(sample, VQASample): - if 'docvqa' in sample.__key__: - prompt_list = self.manual_prompts["VQASFT"]["docvqa"] - elif sample.__subflavors__.get("VQASFT"): - prompt_list = self.manual_prompts["VQASFT"]["raw"] - else: - prompt_list = ["{}"] - - prompt_idx = np.random.randint(len(prompt_list)) - cur_prompt = prompt_list[prompt_idx] - - cur_prompt = cur_prompt.format(sample.context) - - if IMAGE_TOKEN not in cur_prompt: - cur_prompt = IMAGE_TOKEN + "\n" + cur_prompt - - if isinstance(sample.answers, list): - answer_list = sample.answers - weight_list = np.array(sample.answer_weights).astype(np.float32) - weight_list = weight_list / np.sum(weight_list) - answer_idx = np.random.choice(weight_list.shape[0], 1, p=weight_list)[0] - cur_answer = answer_list[answer_idx] - else: - cur_answer = sample.answers - else: - raise NotImplementedError("Unsupported data type provided", sample) - - conversation = [ - {"role": "system", "content": "Answer the questions."}, - {"role": "user", "content": cur_prompt}, - {"role": "assistant", "content": str(cur_answer)}, - ] - - input_ids, target = self.tokenizer.tokenize_conversation(conversation, True, False) - - if self.is_packing_enabled: - input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) - - return ImageTaskSample( - __key__=sample.__key__, - __restore_key__=sample.__restore_key__, - __subflavor__=None, - __subflavors__=sample.__subflavors__, - imgs=imgs, - num_tiles=num_tiles, - tokens=torch.tensor(input_ids), - labels=torch.tensor(target), - total_len=self._get_total_seq_length(input_ids, num_tiles), - ) - - def combined_ocr_encoder(self, sample, task_type): - """Encode OCR samples.""" - augment = sample.__subflavors__['augmentation'] if 'augmentation' in sample.__subflavors__ else False - - if task_type == "encode_pdf": - sample, cur_prompt, cur_answer = self.encode_pdf_prompt(sample) - elif task_type == "encode_ocr_ref": - sample, cur_prompt, cur_answer = self.encode_ocr_ref_prompt(sample) - elif task_type == "_encode_ocr": - sample, cur_prompt, cur_answer = self.encode_ocr_prompt(sample) - - imgs = get_visual_transform( - sample.image, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, - self.args.use_thumbnail, augment, self.args.vision_model_type, - ) - num_tiles = [len(imgs)] - - conversation = [ - {"role": "system", "content": "Answer the questions."}, - {"role": "user", "content": cur_prompt}, - {"role": "assistant", "content": str(cur_answer)}, - ] - - input_ids, target = self.tokenizer.tokenize_conversation(conversation, True, False) - - if self.is_packing_enabled: - input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) - - return ImageTaskSample( - __key__=sample.__key__, - __restore_key__=sample.__restore_key__, - __subflavor__=None, - __subflavors__=sample.__subflavors__, - imgs=imgs, - num_tiles=num_tiles, - tokens=torch.tensor(input_ids), - labels=torch.tensor(target), - total_len=self._get_total_seq_length(input_ids, num_tiles), - ) - - def encode_pdf_prompt(self, sample: OCRSample) -> ImageTaskSample: - """Encode OCR sample.""" - prompt_list = self.manual_prompts["DocPretraining"]["raw"] - prompt_idx = np.random.randint(len(prompt_list)) - cur_prompt = prompt_list[prompt_idx] - if IMAGE_TOKEN not in cur_prompt: - cur_prompt = IMAGE_TOKEN + "\n" + cur_prompt - - # Make sure there is no extra IMAGE_TOKEN tag. - sample.text = sample.text.replace(IMAGE_TOKEN, "") - - caption = sample.text.strip() - - split_by_line_flag = sample.__subflavors__.get("SplitByLine") - if split_by_line_flag: - caption_list = caption.split('\n') - caption = np.random.choice(caption_list) - cur_answer = caption - - return sample, cur_prompt, cur_answer - - def encode_ocr_ref_prompt(self, sample: OCRSample) -> ImageTaskSample: - """Encode OCR sample.""" - ref = sample.text - region = sample.words_boxes - - # Make sure there is no extra IMAGE_TOKEN tag - ref = ref.replace(IMAGE_TOKEN, "") - - if len(region) == 4: - region = f"({region[0]},{region[1]}),({region[2]},{region[3]})" - else: - region = f"({region[0]},{region[1]}),({region[2]},{region[3]}),({region[4]},{region[5]}),({region[6]},{region[7]})" - - # Randomly choose between two tasks - task_idx = np.random.randint(2) - if task_idx == 0: - # Referring Grounding - prompt_list = self.manual_prompts["DocPretraining"]["referring_grounding"] - prompt_content = ref - answer = region - else: - # Grounded OCR - prompt_list = self.manual_prompts["DocPretraining"]["grounded_ocr"] - prompt_content = region - answer = ref - - prompt_idx = np.random.randint(len(prompt_list)) - cur_prompt = prompt_list[prompt_idx] - cur_prompt = cur_prompt.format(prompt_content) - if IMAGE_TOKEN not in cur_prompt: - cur_prompt = IMAGE_TOKEN + "\n" + cur_prompt - - return sample, cur_prompt, answer - - def bbox_coord_to_label(self, text, bbox): - """Format bbox coordinates as text.""" - assert len(bbox) == 4 or len(bbox) == 8 - - # Make sure there is no extra IMAGE_TOKEN tag - text = text.replace(IMAGE_TOKEN, "") - - if len(bbox) == 4: - label_str = f"{text}({bbox[0]},{bbox[1]}),({bbox[2]},{bbox[3]})" - else: - label_str = f"{text}({bbox[0]},{bbox[1]}),({bbox[2]},{bbox[3]}),({bbox[4]},{bbox[5]}),({bbox[6]},{bbox[7]})" - - return label_str - - def encode_ocr_prompt(self, sample: OCRSample) -> ImageTaskSample: - """Encode OCR sample.""" - if isinstance(sample.words_boxes[0], int): - answer = self.bbox_coord_to_label(sample.text, sample.words_boxes) - elif isinstance(sample.words_boxes[0], list): - answer = "" - for i, bbox in enumerate(sample.words_boxes): - answer += self.bbox_coord_to_label(sample.words_text[i], bbox) - - prompt_list = self.manual_prompts["DocPretraining"]["ocr_multi"] - prompt_idx = np.random.randint(len(prompt_list)) - cur_prompt = prompt_list[prompt_idx] - - if IMAGE_TOKEN not in cur_prompt: - cur_prompt = IMAGE_TOKEN + "\n" + cur_prompt - cur_answer = answer - - return sample, cur_prompt, cur_answer - - def batch(self, samples: List[Union[ImageTaskSample, ImageTaskSamplePacked]]) -> ImageTaskBatchPacked: - # Stack images to [num_tiles, c, h, w]. If there are no images (text-only), then use a dummy image. - imgs = [img for s in samples for img in s.imgs] - if len(imgs) > 0: - imgs = torch.stack(imgs) - else: - imgs = torch.tensor([[0]], dtype=torch.float32) - - # If the user hasn't defined a target dataloader sequence length, then use the max along the sample lengths. - max_seq_len = self.dataloader_seq_length - if not max_seq_len: - max_seq_len = max(len(s.tokens) for s in samples) - - tokens = np.full((len(samples), max_seq_len), self.tokenizer.pad, dtype=np.int64) - # +1 to accommodate shift to left by one later. - labels = np.full((len(samples), max_seq_len + 1), self.tokenizer.pad, dtype=np.int64) - - for i, s in enumerate(samples): - # If the sample/target length exceeds the target sequence length, then truncate. - text_len = min(max_seq_len, len(s.tokens)) - target_len = min(max_seq_len+1, len(s.labels)) - - tokens[i, :text_len] = s.tokens[:text_len] - labels[i, :target_len] = s.labels[:target_len] - - num_tiles = torch.tensor([n for s in samples for n in s.num_tiles], dtype=torch.int32) - if len(num_tiles) == 0: - num_tiles = torch.tensor([[0]], dtype=torch.int32) - - # Cumulative sample lengths are needed for packing, otherwise use dummy values. - cu_lengths = torch.tensor([[0]], dtype=torch.int32) - max_lengths = torch.tensor([[0]], dtype=torch.int32) - - if self.is_packing_enabled: - cu_lengths = torch.stack([s.cu_lengths for s in samples]) - max_lengths = torch.tensor([s.max_length for s in samples], dtype=torch.int32) - - return ImageTaskBatchPacked( - __key__=[s.__key__ for s in samples], - __restore_key__=[s.__restore_key__ for s in samples], - __subflavor__=None, - __subflavors__=samples[0].__subflavors__, - tokens=tokens, - labels=labels, - imgs=imgs, - num_tiles=num_tiles, - cu_lengths=cu_lengths, - max_lengths=max_lengths, - ) - - def encode_batch(self, batch: ImageTaskBatchPacked) -> dict: - raw = dataclasses.asdict(batch) - del raw["__subflavors__"] - return raw - - def select_samples_to_pack(self, samples: List[ImageTaskSample]) -> List[List[ImageTaskSample]]: - """Selects which samples will be packed together. - - NOTE: Energon dataloader calls this method internally if packing is used. - Please see https://nvidia.github.io/Megatron-Energon/packing.html - """ - lengths = [sample.total_len for sample in samples] - - packed_samples = greedy_knapsack(lengths, samples, self.packing_seq_length) - - return packed_samples - - @stateless - def pack_selected_samples(self, samples: List[ImageTaskSample]) -> List[ImageTaskSamplePacked]: - """ - Function to pack a list of ImageTaskSample into a single ImageTaskSamplePacked. - - NOTE: Energon dataloader calls this method internally if packing is used. - Please see https://nvidia.github.io/Megatron-Energon/packing.html - - Args: - samples: List of ImageTaskSample instances to pack into one sample. - - Returns: - ImageTaskSamplePacked instance. - """ - packing_seq_len = self.packing_seq_length - - packed_tokens = [] - packed_labels = [] - packed_imgs = [] - - current_length = 0 - max_length = 0 - cu_lengths = [0] - - # Process each sample and build lists that we will concatenate to create the packed sample. - for _, sample in enumerate(samples): - sample_len = sample.total_len - - if sample_len > max_length: - max_length = sample_len - - # If adding this sample exceeds the max length, stop. - # This should not happen. The select_samples_to_pack method should have already ensured that the samples fit. - if current_length + sample_len > packing_seq_len: - raise ValueError(f"Packed sample exceeds the maximum sequence length of {packing_seq_len}: {samples}") - - # Add the sample's tokens and labels - packed_tokens.append(sample.tokens) - packed_labels.append(sample.labels) - - # Add the images - packed_imgs += sample.imgs - - current_length += sample_len - cu_lengths.append(current_length) - - # Concatenate packed tokens and labels. - packed_tokens = torch.cat(packed_tokens, dim=0) - packed_labels = torch.cat(packed_labels, dim=0) - - return ImageTaskSamplePacked( - __key__=",".join([s.__key__ for s in samples]), - __restore_key__=(), # Will be set by energon based on `samples` - __subflavor__=None, - __subflavors__=samples[0].__subflavors__, - tokens=packed_tokens, - labels=packed_labels, - imgs=packed_imgs, - cu_lengths=torch.tensor(cu_lengths, dtype=torch.int32), - max_length=max_length, - num_tiles=[n for s in samples for n in s.num_tiles], - ) - - -def print_error_handler(exc: Exception, key: Optional[str]): - print( - f"The following exception occurred in the dataloader for sample {key} and is skipped", - file=sys.stderr, - ) - traceback.print_exc() - - -def format_multichoice_question(question, multichoice_options): - """Format multi-choice question.""" - options_text = ["{}. {}\n".format(chr(ord('A') + i), option) for i, option in - zip(range(len(multichoice_options)), multichoice_options)] - options_text = "".join(options_text) - - options_text = f"{options_text}Answer with the option's letter from the given choices directly." - - return "{}\n{}".format(question, options_text) - - -def format_multichoice_answer(idx): - """Format multi-choice answer.""" - return chr(ord('A') + idx) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import bisect +import dataclasses +import json +import re +import sys +import traceback +from dataclasses import dataclass +from typing import Dict, List, Optional, Tuple, Union + +from image_processing import find_closest_aspect_ratio, find_closest_area_weighted_aspect_ratio, get_visual_transform +from PIL import Image +from torchvision.transforms import ToPILImage +import numpy as np +import torch + +from energon_util import OfflineTargetAspectRatioSample, SampleListSample +from megatron.core.models.multimodal.llava_model import IGNORE_INDEX, IMAGE_TOKEN, VIDEO_TOKEN +from megatron.core.models.vision.clip_vit_model import get_num_image_embeddings +from megatron.energon import ( + Batch, + CaptioningSample, + DefaultTaskEncoder, + OCRSample, + Sample, + SimilarityInterleavedSample, + VQASample, + MultiChoiceVQASample +) +from megatron.energon.task_encoder.base import stateless +from megatron.training import get_args, get_tokenizer + + +@dataclass +class ImageTaskSample(Sample): + __key__: str + __restore_key__: Tuple[Union[str, int, tuple], ...] + __subflavor__: Dict + __subflavors__: Dict + # (c, h, w) + imgs: List[torch.Tensor] + num_tiles: List[int] + tokens: torch.Tensor + total_len: int # Total token count in the sample, including text and image tokens + labels: torch.Tensor = None + + +@dataclass +class ImageTaskSamplePacked(Sample): + """Dataclass to store a single packed sample (not a batch). + + P = Number of sub-samples in the packed sample + seq_len = Total sequence length + num_imgs = Number of images across all samples in the packed sample + """ + + __key__: str # Sample name + __restore_key__: Tuple[Union[str, int, tuple], ...] + __subflavor__: Dict # Sample metadata. Deprecated. + __subflavors__: Dict # Sample metadata. + tokens: torch.Tensor # Input tokens packed into a single tensor (seq_len,) + labels: torch.Tensor # Target tokens packed into a single tensor (seq_len,) + imgs: List[torch.Tensor] # Input images + num_tiles: List[int] # Number of tiles for each image of each sample (num_imgs) + max_length: int # Maximum length across sub-samples. + cu_lengths: List[int] # Cumulative length of each sub-sample in this packed sample incl. text and image tokens (P,) + + +# Typing for the resulting batch data after encode_batch() +@dataclass +class ImageTaskBatchPacked(Batch): + """Dataclass to store a batch of packed samples. + + N = Batch size + P = Number of samples in the packed sample + seq_len = Maximum sequence length + num_imgs = Number of images across all samples in the packed sample + """ + + __key__: List[str] # Sample names + __restore_key__: Tuple[Union[str, int, tuple], ...] + __subflavor__: Dict # Sample metadata. Deprecated. + __subflavors__: List[Dict] # Sample metadatas. + tokens: torch.Tensor # Input tokens packed and padded (N, seq_len) + labels: torch.Tensor # Target tokens packed and padded (N, seq_len) + imgs: torch.Tensor # All image tiles stacked into a single tensor (num_tiles, C, H, W) + num_tiles: List[List[int]] # Number of tiles per image (N, num_imgs) + max_lengths: List[int] # Maximum length across sub-samples (N,) + cu_lengths: List[List[int]] # Cumulative length of each sub-sample in each packed sample of the batch (N, P) + + +# Based on https://github.com/hiyouga/LLaMA-Factory/blob/641d0dab08d96a93c34657742213d8994d9ed476/src/llamafactory/data/processors/processor_utils.py#L19 +# Copyright (c) 2024 LLaMA-Factory. Apache license 2.0. +def search_for_fit(numbers: List[int], capacity: int) -> int: + """Finds the index of largest number that fits into the knapsack with the given capacity.""" + index = bisect.bisect(numbers, capacity) + return -1 if index == 0 else (index - 1) + + +# Based on https://github.com/hiyouga/LLaMA-Factory/blob/641d0dab08d96a93c34657742213d8994d9ed476/src/llamafactory/data/processors/processor_utils.py#L27 +# Copyright (c) 2024 LLaMA-Factory. Apache license 2.0. +def greedy_knapsack(item_sizes: List[int], samples: List, max_capacity: int) -> List: + """Greedy algorithm with binary search for the knapsack problem. + + Pack as many samples as possible given a maximum capacity and capacities of individual samples. + Used if sequence packing is enabled. + """ + assert len(item_sizes) == len(samples), "sample lengths and samples must have the same length." + + knapsacks = [] + + if len(item_sizes) == 0: + return knapsacks + + # Sort sample lengths and samples together. + sorted_item_sizes, sorted_samples = zip(*sorted(zip(item_sizes, samples), key=lambda x: x[0])) + sorted_item_sizes = list(sorted_item_sizes) + sorted_samples = list(sorted_samples) + + # Check if all samples fit in the knapsack capacity. + if sorted_item_sizes[-1] > max_capacity: + raise ValueError(f"knapsack: A sample is larger {sorted_item_sizes[-1]} than the max_sequence_length {max_capacity}.") + + while sorted_item_sizes: + current_knapsack = [] + remaining_capacity = max_capacity + + while True: + idx = search_for_fit(sorted_item_sizes, remaining_capacity) + if idx == -1: + break # Can't fit more samples. + + remaining_capacity -= sorted_item_sizes[idx] + + sorted_item_sizes.pop(idx) + sample = sorted_samples.pop(idx) + current_knapsack.append(sample) + + knapsacks.append(current_knapsack) + + return knapsacks + + +class TaskEncoder(DefaultTaskEncoder[OCRSample, OCRSample, ImageTaskBatchPacked, dict]): + """A simple task encoder for VLMs.""" + + def __init__( + self + ): + super().__init__() + + self.args = get_args() + + self.tokenizer = get_tokenizer() + with open(self.args.prompt_path, "r") as f: + self.manual_prompts = json.load(f) + self.dataloader_seq_length = self.args.dataloader_seq_length # Always return samples of this length. + self.packing_seq_length = self.args.packing_seq_length # Packing sequence length, if packing is enabled. + self.is_packing_enabled = self.args.packing_buffer_size is not None and self.args.packing_buffer_size > 0 + + if self.dataloader_seq_length and self.packing_seq_length: + assert self.dataloader_seq_length >= self.packing_seq_length, "dataloader sequence length must be greater than or equal to the packing sequence length" + + if self.is_packing_enabled: + assert self.packing_seq_length > 0, "packing sequence length must be set" + + self.num_image_embeddings_per_tile = get_num_image_embeddings( + self.args.img_h, + self.args.img_w, + self.args.patch_dim, + self.args.vision_model_type, + self.args.disable_vision_class_token, + 1, + self.args.pixel_shuffle, + self.args.use_tile_tags, + ) + + self.txt_to_token_dict = {} + + self.img_h, self.img_w = self.args.img_h, self.args.img_w + self.img_token_id = self.tokenizer.convert_tokens_to_ids(IMAGE_TOKEN) + # This map is used to reduce the number of tiles used per image if the number of tokens is + # larger than the decoder_seq_length. + self.num_tiles_degradation_map = {12:8, 8:6, 6:4, 4:2, 2:1, 1:1} + + self.find_closest_aspect_ratio_fn = ( + find_closest_area_weighted_aspect_ratio if self.args.use_area_weighted_aspect_ratio + else find_closest_aspect_ratio) + + def _get_total_seq_length(self, input_ids, num_tiles): + """Calculate expected sequence length given text tokens length and number of tiles.""" + total_num_images = len(num_tiles) + total_num_tiles = sum(num_tiles) + total_len = len(input_ids) + total_num_tiles * self.num_image_embeddings_per_tile - total_num_images + return total_len + + def _truncate_for_packing(self, input_ids, target, num_tiles): + """Truncate tokens and labels if they exceed packing sequence length.""" + total_num_images = len(num_tiles) + total_num_tiles = sum(num_tiles) + total_img_embeddings_len = total_num_tiles * self.num_image_embeddings_per_tile + max_text_tokens = self.packing_seq_length - total_img_embeddings_len + total_num_images + + input_ids = input_ids[:max_text_tokens] + target = target[:max_text_tokens] + + # If truncate causes all labels to be ignored, then skip the sample + if (target == IGNORE_INDEX).all(): + raise ValueError(f"all targets will be ignored after truncation: {input_ids}") + + return input_ids, target + + @stateless(restore_seeds=True) + def encode_sample(self, sample: Union[CaptioningSample, OCRSample, VQASample, SimilarityInterleavedSample]): + if isinstance(sample, OCRSample): + if "pdfa" in sample.__key__: + yield self.combined_ocr_encoder(sample, task_type='encode_pdf') + elif "multi" in sample.__key__: + yield self.combined_ocr_encoder(sample, task_type='_encode_ocr') + else: + yield self.combined_ocr_encoder(sample, task_type='encode_ocr_ref') + elif isinstance(sample, CaptioningSample): + yield self.encode_captioning(sample) + elif isinstance(sample, VQASample): + is_llava_training = sample.__subflavors__["is_llava_training"] if "is_llava_training" in sample.__subflavors__ else False + + if "llava" in sample.__key__ or is_llava_training: + yield self.encode_llava_pretrain(sample) + else: + yield self.encode_any_single_turn_vqa(sample) + elif isinstance(sample, SimilarityInterleavedSample): + yield self.encode_llava_sft(sample) + elif isinstance(sample, MultiChoiceVQASample): + yield self.encode_any_single_turn_vqa(sample) + # Because the SampleListSample is defined in the Megatron module but loaded by the Energon + # library, we need to resort to the more brittle check: + elif type(sample).__name__ == "SampleListSample": + yield self.encode_sample_list(sample) + else: + raise NotImplementedError("Sample format not supported", sample) + + def encode_captioning(self, sample: CaptioningSample): + """Encode CaptioningSample.""" + augment = sample.__subflavors__.get("augmentation") + + imgs = get_visual_transform( + sample.image, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, self.args.use_thumbnail, augment, + self.args.vision_model_type, find_closest_aspect_ratio_fn=self.find_closest_aspect_ratio_fn + ) + num_tiles = [len(imgs)] + + prompt_list = self.manual_prompts["CaptioningPretraining"]["raw"] + + prompt_idx = np.random.randint(len(prompt_list)) + cur_prompt = prompt_list[prompt_idx] + cur_prompt = IMAGE_TOKEN + "\n" + cur_prompt + "\n" + + caption = sample.caption.strip() + + split_by_line_flag = sample.__subflavors__.get("SplitByLine") + if split_by_line_flag: + caption_list = caption.split('\n') + caption = np.random.choice(caption_list) + + conv = [ + # Note: no system message. + {"role": "user", "content": cur_prompt}, + {"role": "assistant", "content": caption}, + ] + + input_ids, target = self.tokenizer.tokenize_conversation(conv, True, False) + + if self.is_packing_enabled: + input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) + + return ImageTaskSample( + __key__=sample.__key__, + __restore_key__=sample.__restore_key__, + __subflavor__=None, + __subflavors__=sample.__subflavors__, + imgs=imgs, + num_tiles=num_tiles, + tokens=torch.tensor(input_ids), + labels=torch.tensor(target), + total_len=self._get_total_seq_length(input_ids, num_tiles), + ) + + def encode_llava_pretrain(self, sample: VQASample): + """Encode pretrain sample in LLAVA style.""" + augment = sample.__subflavors__.get("augmentation", False) + + imgs = get_visual_transform( + sample.image, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, self.args.use_thumbnail, augment, + self.args.vision_model_type, find_closest_aspect_ratio_fn=self.find_closest_aspect_ratio_fn + ) + num_tiles = [len(imgs)] + + # LLAVA training: override text-prompt with just the image. + conv = [ + # Note: no system message. + {"role": "user", "content": IMAGE_TOKEN + "\n"}, + {"role": "assistant", "content": sample.answers}, + ] + + input_ids, target = self.tokenizer.tokenize_conversation(conv, True, False) + + if self.is_packing_enabled: + input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) + + return ImageTaskSample( + __key__=sample.__key__, + __restore_key__=sample.__restore_key__, + __subflavor__=None, + __subflavors__=sample.__subflavors__, + imgs=imgs, + num_tiles=num_tiles, + tokens=torch.tensor(input_ids), + labels=torch.tensor(target), + total_len=self._get_total_seq_length(input_ids, num_tiles), + ) + + def encode_sample_list(self, samples: SampleListSample): + """We encode the list of samples using encode_llava_sft on each sample.""" + error_msg = ("You probably don't want to use online packing since SampleListSample is " + "usually used along offline packing.") + assert not self.is_packing_enabled, error_msg + encoded_samples = [] + current_length = 0 + for sample in samples.samples: + encoded_sample = self.encode_llava_sft(sample, truncate_for_sample_list_packing=True) + if current_length + encoded_sample.total_len > self.packing_seq_length: + break + else: + encoded_samples.append(encoded_sample) + current_length += encoded_sample.total_len + return self.pack_selected_samples(encoded_samples) + + def encode_llava_sft(self, sample: Union[SimilarityInterleavedSample, OfflineTargetAspectRatioSample], truncate_for_sample_list_packing=False): + """Encode SFT sample.""" + augment = sample.__subflavors__['augmentation'] if 'augmentation' in sample.__subflavors__ else False + has_video = sample.__subflavors__['has_video'] if 'has_video' in sample.__subflavors__ else False + + # If the target aspect ratio are provided by the dataset, we use them instead of computing + # them with the self.find_closest_aspect_ratio_fn function. + local_find_closest_aspect_ratio_fn = self.find_closest_aspect_ratio_fn + if type(sample).__name__ == "OfflineTargetAspectRatioSample": + target_aspect_ratio = tuple(sample.target_aspect_ratio[0]) + assert target_aspect_ratio is not None, "Sample of type OfflineTargetAspectRatioSample needs to define the target aspect ratio." + local_find_closest_aspect_ratio_fn = lambda *args, **kwargs: target_aspect_ratio + + has_image = False + # We infer whether the sample has image or not. + if hasattr(sample, "images") and not has_video: + # If this is a text-only sample and we are freezing the LM, + # then use a dummy input image. + if len(sample.images) == 0 and self.args.freeze_LM: + empty_img = Image.new('RGB', (self.args.img_w, self.args.img_h), (255, 255, 255)) + sample.images.append(empty_img) + if len(sample.images) > 0: + has_image = True + + # Note: Some tokenizers may ignore the system prompt. + conversation = [{"role": "system", "content": "Answer the questions."}] + # Format the conversation as a list of "user" / "assistant" turns. + for text in sample.texts: + error_msg = f"unexpected role {text['from']} in {sample.texts}" + assert text["from"] in ["human", "gpt"], error_msg + conversation.append({ + "role": "user" if text["from"] == "human" else "assistant", + "content": text["value"]}) + + # Replace the image tags with IMAGE_TOKEN and count the number of image tags + number_image_tags = 0 + image_tag_ids_list = [] + for turn in conversation: + if turn["role"] == "user": + image_tag_ids = [int(x) - 1 for x in re.findall(r"", turn["content"])] + image_tag_ids_list.extend(image_tag_ids) + turn["content"] = re.sub(r"", IMAGE_TOKEN, turn["content"]) + # For videos, we use the image token to locate where to put the frames. + if has_video: + turn["content"] = turn["content"].replace(VIDEO_TOKEN, IMAGE_TOKEN) + number_image_tags += turn["content"].count(IMAGE_TOKEN) + + # We re-order the images in sample.images according to how they appear in the conversation. + if len(image_tag_ids_list) > 0: + sample.images = [sample.images[idx] for idx in image_tag_ids_list] + + # If there is only one image, but several image tags, we assume all the tags refer to the + # same image and duplicate the image: + if not has_video and len(sample.images) == 1 and number_image_tags > 1: + sample.images = sample.images * number_image_tags + + # We currently only support one video per sample. + number_of_images = 1 if has_video else len(sample.images) + # Fail if there are more image or video tags than image or videos: + error_msg = ( + f"Found {number_image_tags} image tags for {number_of_images} images. {sample.texts}") + assert number_image_tags <= number_of_images, error_msg + + # If there are less image of video tags than image or videos, prepend the tags to the first + # user message: + if number_image_tags < number_of_images: + for turn in conversation: + if turn["role"] == "user": + turn["content"] = IMAGE_TOKEN*(number_of_images-number_image_tags) + "\n" + turn["content"] + break + + input_ids, target = self.tokenizer.tokenize_conversation(conversation, True, False) + + if has_image: + imgs = [] + num_tiles = [] + max_num_tiles = self.args.max_num_tiles + # We keep a buffer of 4 tokens for the question, + # the rest can be used for image tokens. + max_image_token_allowed = self.args.decoder_seq_length - len(input_ids) - 4 + # We start by extracting as many tiles per image as possible, and decrease the max + # number of tiles if there are too many image tokens. + while True: + imgs = [] + num_tiles = [] + for img in sample.images: + img_tiles = get_visual_transform( + img, self.img_h, self.img_w, self.args.use_tiling, max_num_tiles, + self.args.use_thumbnail, augment, self.args.vision_model_type, + find_closest_aspect_ratio_fn=local_find_closest_aspect_ratio_fn) + imgs += img_tiles + num_tiles += [len(img_tiles)] + if max_num_tiles == 1: + break + if sum(num_tiles) * self.num_image_embeddings_per_tile > max_image_token_allowed: + if max_num_tiles in self.num_tiles_degradation_map: + max_num_tiles = self.num_tiles_degradation_map[max_num_tiles] + else: + raise RuntimeError(( + f"Tried to decrease the number of tiles {max_num_tiles} but it's not ", + f"defined in the degradation map {self.num_tiles_degradation_map}")) + else: + break + elif has_video: + # We don't use tiling for videos to limit the number of tokens. + use_tiling=False + # Grab the selected frames of the video as a tensor with shape + # fhwc: (num_frames, num_channels, height, width). + video_fchw = sample.images.frames + if video_fchw.shape[0] == 0: + raise ValueError(f"Video {sample.__key__} {sample.__restore_key__} {sample.texts} has no frames.") + selected_frames = torch.linspace( + 0, video_fchw.shape[0] - 1, self.args.num_frames).long() + video_fchw = video_fchw[selected_frames] + imgs = [] + for video_chw in video_fchw: + to_pil = ToPILImage() + video_chw = to_pil(video_chw) + imgs += get_visual_transform( + video_chw, self.img_h, self.img_w, use_tiling, self.args.max_num_tiles, + self.args.use_thumbnail, augment, self.args.vision_model_type, + find_closest_aspect_ratio_fn=local_find_closest_aspect_ratio_fn) + num_tiles = [len(imgs)] + else: + imgs = num_tiles = [] + + if self.is_packing_enabled or truncate_for_sample_list_packing: + input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) + + # Some final checks with respect to the number of image tokens and images on the tokenized + # conversation. There can still be errors, for instance if a non-video sample happens to + # have our pre-defined video token, or if the packing truncation removed a necessary image + # tag. + number_image_token = np.sum(input_ids == self.img_token_id) + error_msg = ( + f"Found {number_image_token} image tokens for len({num_tiles}) = {len(num_tiles)} image tiles in {conversation}.") + assert number_image_token == len(num_tiles), error_msg + error_msg = ( + f"Found sum({num_tiles}) = {np.sum(num_tiles)} tiles for {len(imgs)} images in {conversation}.") + assert np.sum(num_tiles) == len(imgs), error_msg + + # We need to ensure that there are at least some trainable tokens in the sample. + assert self.target_has_trainable_tokens(input_ids, num_tiles, target), "Sample has no trainable tokens." + + return ImageTaskSample( + __key__=sample.__key__, + __restore_key__=sample.__restore_key__, + __subflavor__=None, + __subflavors__=sample.__subflavors__, + imgs=imgs, + num_tiles=num_tiles, + tokens=torch.tensor(input_ids), + labels=torch.tensor(target), + total_len=self._get_total_seq_length(input_ids, num_tiles), + ) + + def target_has_trainable_tokens(self, input_ids, num_tiles, target): + # Compute the loss mask based on extending the image tags with the proper + # number of image tokens, extracting the first self.args.decoder_seq_length tokens, and + # ensuring that some of these tokens have a loss mask > 0. + # Note that this is a bit hacky because we reproduce here parts of the logics which are in + # the model itself. Ideally, the data sampler would return the already processed inputs + # and targets to avoid this duplication. + expanded_target = target.copy() + expanded_target[input_ids==self.img_token_id] = self.img_token_id + expanded_target = self.replace_value_with_repetition( + expanded_target, self.img_token_id, + self.num_image_embeddings_per_tile * np.array(num_tiles), IGNORE_INDEX) + loss_mask = torch.ones(torch.tensor(expanded_target).size(), dtype=torch.float) + loss_mask[expanded_target == self.tokenizer.pad] = 0.0 # mask paddings + loss_mask[expanded_target == IGNORE_INDEX] = 0.0 # mask prompts + loss_mask = torch.cat((loss_mask[1:], torch.zeros((1,)))) + loss_mask = loss_mask[:self.args.decoder_seq_length] + return torch.sum(loss_mask) > 0 + + def replace_value_with_repetition(self, arr, token_to_replace, num_repetition, new_token): + """ + Replace every occurrence of value V in the input array with R repetitions of W. + + Args: + arr (Array): Input array to be modified + token_to_replace: token to be replaced + new_token: new token + num_repetition (Array): number of repetition of new token. + + Returns: + Array: New array with token_to_replace replaced by num_repetition repetitions of + new_token + """ + error_msg = "The number of image tokens must match the length of the tile tensor." + assert np.sum(arr==token_to_replace) == len(num_repetition), error_msg + result = [] + idx = 0 + for item in arr: + if item == token_to_replace: + # If the current item matches token_to_replace, add R copies of W + result.extend([new_token] * num_repetition[idx]) + idx += 1 + else: + # Otherwise, keep the original item + result.append(item) + + return np.array(result) + + def encode_any_single_turn_vqa(self, sample): + """Encode MultiChoiceVQA or VQA sample.""" + augment = sample.__subflavors__['augmentation'] if 'augmentation' in sample.__subflavors__ else False + has_video = sample.__subflavors__['has_video'] if 'has_video' in sample.__subflavors__ else False + + if has_video: + # Grab the selected frames of the video as a tensor with shape + # fhwc: (num_frames, height, width, num_channels). + video_fhwc = sample.image.permute(0, 2, 3, 1) + selected_frames = torch.linspace( + 0, video_fhwc.shape[0] - 1, self.args.num_frames).long() + video_frame_fhwc = video_fhwc[selected_frames] + imgs = [] + for video_frame_hwc in video_frame_fhwc: + imgs += get_visual_transform( + video_frame_hwc, self.img_h, self.img_w, + self.args.use_tiling, self.args.max_num_tiles, + self.args.use_thumbnail, augment, self.args.vision_model_type, + find_closest_aspect_ratio_fn=self.find_closest_aspect_ratio_fn) + else: + imgs = get_visual_transform( + sample.image, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, + self.args.use_thumbnail, augment, self.args.vision_model_type, + find_closest_aspect_ratio_fn=self.find_closest_aspect_ratio_fn + ) + + num_tiles = [len(imgs)] + + if isinstance(sample, MultiChoiceVQASample): + cur_prompt = format_multichoice_question(sample.context, sample.choices) + if IMAGE_TOKEN not in cur_prompt: + cur_prompt = IMAGE_TOKEN + "\n" + cur_prompt + cur_answer = format_multichoice_answer(sample.correct_choice_idx) + elif isinstance(sample, VQASample): + if 'docvqa' in sample.__key__: + prompt_list = self.manual_prompts["VQASFT"]["docvqa"] + elif sample.__subflavors__.get("VQASFT"): + prompt_list = self.manual_prompts["VQASFT"]["raw"] + else: + prompt_list = ["{}"] + + prompt_idx = np.random.randint(len(prompt_list)) + cur_prompt = prompt_list[prompt_idx] + + cur_prompt = cur_prompt.format(sample.context) + + if IMAGE_TOKEN not in cur_prompt: + cur_prompt = IMAGE_TOKEN + "\n" + cur_prompt + + if isinstance(sample.answers, list): + answer_list = sample.answers + weight_list = np.array(sample.answer_weights).astype(np.float32) + weight_list = weight_list / np.sum(weight_list) + answer_idx = np.random.choice(weight_list.shape[0], 1, p=weight_list)[0] + cur_answer = answer_list[answer_idx] + else: + cur_answer = sample.answers + else: + raise NotImplementedError("Unsupported data type provided", sample) + + conversation = [ + {"role": "system", "content": "Answer the questions."}, + {"role": "user", "content": cur_prompt}, + {"role": "assistant", "content": str(cur_answer)}, + ] + + input_ids, target = self.tokenizer.tokenize_conversation(conversation, True, False) + + if self.is_packing_enabled: + input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) + + return ImageTaskSample( + __key__=sample.__key__, + __restore_key__=sample.__restore_key__, + __subflavor__=None, + __subflavors__=sample.__subflavors__, + imgs=imgs, + num_tiles=num_tiles, + tokens=torch.tensor(input_ids), + labels=torch.tensor(target), + total_len=self._get_total_seq_length(input_ids, num_tiles), + ) + + def combined_ocr_encoder(self, sample, task_type): + """Encode OCR samples.""" + augment = sample.__subflavors__['augmentation'] if 'augmentation' in sample.__subflavors__ else False + + if task_type == "encode_pdf": + sample, cur_prompt, cur_answer = self.encode_pdf_prompt(sample) + elif task_type == "encode_ocr_ref": + sample, cur_prompt, cur_answer = self.encode_ocr_ref_prompt(sample) + elif task_type == "_encode_ocr": + sample, cur_prompt, cur_answer = self.encode_ocr_prompt(sample) + + imgs = get_visual_transform( + sample.image, self.img_h, self.img_w, self.args.use_tiling, self.args.max_num_tiles, + self.args.use_thumbnail, augment, self.args.vision_model_type, + find_closest_aspect_ratio_fn=self.find_closest_aspect_ratio_fn + ) + num_tiles = [len(imgs)] + + conversation = [ + {"role": "system", "content": "Answer the questions."}, + {"role": "user", "content": cur_prompt}, + {"role": "assistant", "content": str(cur_answer)}, + ] + + input_ids, target = self.tokenizer.tokenize_conversation(conversation, True, False) + + if self.is_packing_enabled: + input_ids, target = self._truncate_for_packing(input_ids, target, num_tiles) + + return ImageTaskSample( + __key__=sample.__key__, + __restore_key__=sample.__restore_key__, + __subflavor__=None, + __subflavors__=sample.__subflavors__, + imgs=imgs, + num_tiles=num_tiles, + tokens=torch.tensor(input_ids), + labels=torch.tensor(target), + total_len=self._get_total_seq_length(input_ids, num_tiles), + ) + + def encode_pdf_prompt(self, sample: OCRSample) -> ImageTaskSample: + """Encode OCR sample.""" + prompt_list = self.manual_prompts["DocPretraining"]["raw"] + prompt_idx = np.random.randint(len(prompt_list)) + cur_prompt = prompt_list[prompt_idx] + if IMAGE_TOKEN not in cur_prompt: + cur_prompt = IMAGE_TOKEN + "\n" + cur_prompt + + # Make sure there is no extra IMAGE_TOKEN tag. + sample.text = sample.text.replace(IMAGE_TOKEN, "") + + caption = sample.text.strip() + + split_by_line_flag = sample.__subflavors__.get("SplitByLine") + if split_by_line_flag: + caption_list = caption.split('\n') + caption = np.random.choice(caption_list) + cur_answer = caption + + return sample, cur_prompt, cur_answer + + def encode_ocr_ref_prompt(self, sample: OCRSample) -> ImageTaskSample: + """Encode OCR sample.""" + ref = sample.text + region = sample.words_boxes + + # Make sure there is no extra IMAGE_TOKEN tag + ref = ref.replace(IMAGE_TOKEN, "") + + if len(region) == 4: + region = f"({region[0]},{region[1]}),({region[2]},{region[3]})" + else: + region = f"({region[0]},{region[1]}),({region[2]},{region[3]}),({region[4]},{region[5]}),({region[6]},{region[7]})" + + # Randomly choose between two tasks + task_idx = np.random.randint(2) + if task_idx == 0: + # Referring Grounding + prompt_list = self.manual_prompts["DocPretraining"]["referring_grounding"] + prompt_content = ref + answer = region + else: + # Grounded OCR + prompt_list = self.manual_prompts["DocPretraining"]["grounded_ocr"] + prompt_content = region + answer = ref + + prompt_idx = np.random.randint(len(prompt_list)) + cur_prompt = prompt_list[prompt_idx] + cur_prompt = cur_prompt.format(prompt_content) + if IMAGE_TOKEN not in cur_prompt: + cur_prompt = IMAGE_TOKEN + "\n" + cur_prompt + + return sample, cur_prompt, answer + + def bbox_coord_to_label(self, text, bbox): + """Format bbox coordinates as text.""" + assert len(bbox) == 4 or len(bbox) == 8 + + # Make sure there is no extra IMAGE_TOKEN tag + text = text.replace(IMAGE_TOKEN, "") + + if len(bbox) == 4: + label_str = f"{text}({bbox[0]},{bbox[1]}),({bbox[2]},{bbox[3]})" + else: + label_str = f"{text}({bbox[0]},{bbox[1]}),({bbox[2]},{bbox[3]}),({bbox[4]},{bbox[5]}),({bbox[6]},{bbox[7]})" + + return label_str + + def encode_ocr_prompt(self, sample: OCRSample) -> ImageTaskSample: + """Encode OCR sample.""" + if isinstance(sample.words_boxes[0], int): + answer = self.bbox_coord_to_label(sample.text, sample.words_boxes) + elif isinstance(sample.words_boxes[0], list): + answer = "" + for i, bbox in enumerate(sample.words_boxes): + answer += self.bbox_coord_to_label(sample.words_text[i], bbox) + + prompt_list = self.manual_prompts["DocPretraining"]["ocr_multi"] + prompt_idx = np.random.randint(len(prompt_list)) + cur_prompt = prompt_list[prompt_idx] + + if IMAGE_TOKEN not in cur_prompt: + cur_prompt = IMAGE_TOKEN + "\n" + cur_prompt + cur_answer = answer + + return sample, cur_prompt, cur_answer + + def batch(self, samples: List[Union[ImageTaskSample, ImageTaskSamplePacked]]) -> ImageTaskBatchPacked: + # Stack images to [num_tiles, c, h, w]. If there are no images (text-only), then use a dummy image. + imgs = [img for s in samples for img in s.imgs] + if len(imgs) > 0: + imgs = torch.stack(imgs) + else: + imgs = torch.tensor([[0]], dtype=torch.float32) + + # If the user hasn't defined a target dataloader sequence length, then use the max along the sample lengths. + max_seq_len = self.dataloader_seq_length + if not max_seq_len: + max_seq_len = max(len(s.tokens) for s in samples) + + tokens = np.full((len(samples), max_seq_len), self.tokenizer.pad, dtype=np.int64) + # +1 to accommodate shift to left by one later. + labels = np.full((len(samples), max_seq_len + 1), self.tokenizer.pad, dtype=np.int64) + + for i, s in enumerate(samples): + # If the sample/target length exceeds the target sequence length, then truncate. + text_len = min(max_seq_len, len(s.tokens)) + target_len = min(max_seq_len+1, len(s.labels)) + + tokens[i, :text_len] = s.tokens[:text_len] + labels[i, :target_len] = s.labels[:target_len] + + num_tiles = torch.tensor([n for s in samples for n in s.num_tiles], dtype=torch.int32) + if len(num_tiles) == 0: + num_tiles = torch.tensor([[0]], dtype=torch.int32) + + # Cumulative sample lengths are needed for packing, otherwise use dummy values. + cu_lengths = torch.tensor([[0]], dtype=torch.int32) + max_lengths = torch.tensor([[0]], dtype=torch.int32) + + if self.is_packing_enabled: + cu_lengths = torch.stack([s.cu_lengths for s in samples]) + max_lengths = torch.tensor([s.max_length for s in samples], dtype=torch.int32) + + return ImageTaskBatchPacked( + __key__=[s.__key__ for s in samples], + __restore_key__=[s.__restore_key__ for s in samples], + __subflavor__=None, + __subflavors__=samples[0].__subflavors__, + tokens=tokens, + labels=labels, + imgs=imgs, + num_tiles=num_tiles, + cu_lengths=cu_lengths, + max_lengths=max_lengths, + ) + + def encode_batch(self, batch: ImageTaskBatchPacked) -> dict: + raw = dataclasses.asdict(batch) + del raw["__subflavors__"] + return raw + + def select_samples_to_pack(self, samples: List[ImageTaskSample]) -> List[List[ImageTaskSample]]: + """Selects which samples will be packed together. + + NOTE: Energon dataloader calls this method internally if packing is used. + Please see https://nvidia.github.io/Megatron-Energon/packing.html + """ + lengths = [sample.total_len for sample in samples] + + packed_samples = greedy_knapsack(lengths, samples, self.packing_seq_length) + + return packed_samples + + @stateless + def pack_selected_samples(self, samples: List[ImageTaskSample]) -> List[ImageTaskSamplePacked]: + """ + Function to pack a list of ImageTaskSample into a single ImageTaskSamplePacked. + + NOTE: Energon dataloader calls this method internally if packing is used. + Please see https://nvidia.github.io/Megatron-Energon/packing.html + + Args: + samples: List of ImageTaskSample instances to pack into one sample. + + Returns: + ImageTaskSamplePacked instance. + """ + packing_seq_len = self.packing_seq_length + + packed_tokens = [] + packed_labels = [] + packed_imgs = [] + + current_length = 0 + max_length = 0 + cu_lengths = [0] + + # Process each sample and build lists that we will concatenate to create the packed sample. + for _, sample in enumerate(samples): + sample_len = sample.total_len + + if sample_len > max_length: + max_length = sample_len + + # If adding this sample exceeds the max length, stop. + # This should not happen. The select_samples_to_pack method should have already ensured that the samples fit. + if current_length + sample_len > packing_seq_len: + raise ValueError(f"Packed sample exceeds the maximum sequence length of {packing_seq_len}: {samples}") + + # Add the sample's tokens and labels + packed_tokens.append(sample.tokens) + packed_labels.append(sample.labels) + + # Add the images + packed_imgs += sample.imgs + + current_length += sample_len + cu_lengths.append(current_length) + + # Concatenate packed tokens and labels. + packed_tokens = torch.cat(packed_tokens, dim=0) + packed_labels = torch.cat(packed_labels, dim=0) + + return ImageTaskSamplePacked( + __key__=",".join([s.__key__ for s in samples]), + __restore_key__=(), # Will be set by energon based on `samples` + __subflavor__=None, + __subflavors__=samples[0].__subflavors__, + tokens=packed_tokens, + labels=packed_labels, + imgs=packed_imgs, + cu_lengths=torch.tensor(cu_lengths, dtype=torch.int32), + max_length=max_length, + num_tiles=[n for s in samples for n in s.num_tiles], + ) + + +def print_error_handler(exc: Exception, key: Optional[str]): + print( + f"The following exception occurred in the dataloader for sample {key} and is skipped", + file=sys.stderr, + ) + traceback.print_exc() + + +def format_multichoice_question(question, multichoice_options): + """Format multi-choice question.""" + options_text = ["{}. {}\n".format(chr(ord('A') + i), option) for i, option in + zip(range(len(multichoice_options)), multichoice_options)] + options_text = "".join(options_text) + + options_text = f"{options_text}Answer with the option's letter from the given choices directly." + + return "{}\n{}".format(question, options_text) + + +def format_multichoice_answer(idx): + """Format multi-choice answer.""" + return chr(ord('A') + idx) diff --git a/examples/multimodal/energon_util.py b/examples/multimodal/energon_util.py new file mode 100644 index 0000000..36135ff --- /dev/null +++ b/examples/multimodal/energon_util.py @@ -0,0 +1,48 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import torch +import warnings +from dataclasses import dataclass +from typing import Any, List + +from megatron.energon import Sample +from megatron.energon.epathlib.epath import EPath +from megatron.energon.flavors.webdataset import DefaultDecoderWebdatasetFactory + + +@dataclass +class SampleListSample(Sample): + """Sample type for a list of samples of any type which needs to be packed together. + + This is useful for datasets which are packed offline. + """ + + #: The images of the sequence + samples: List[Any] + + +class SampleListWebdataset(DefaultDecoderWebdatasetFactory[SampleListSample]): + __sample_type__ = SampleListSample + + def __init__(self, path: EPath, **kwargs): + warnings.warn( + f"{type(self)} is deprecated, use the default instead and set the sample_type:\n" + f"To convert, update your {path}/.nv-meta/dataset.yaml to:\n" + f"# remove top-level __module__ and __class__\n" + f"sample_type:\n" + f" __module__: megatron.energon\n" + f" __class__: {self.__sample_type__.__name__}\n" + f"# Keep the remaining content", + DeprecationWarning, + ) + super().__init__(path, **kwargs) + + +@dataclass +class OfflineTargetAspectRatioSample(Sample): + """Sample type for image + text samples with target aspect ratio computed offline.""" + + #: The images of the sequence + images: List[torch.Tensor] + #: The texts of the sequence + texts: List[str] + target_aspect_ratio: List[List] diff --git a/examples/multimodal/evaluation/evaluate_infovqa.py b/examples/multimodal/evaluation/evaluate_infovqa.py new file mode 100644 index 0000000..2ee8d3e --- /dev/null +++ b/examples/multimodal/evaluation/evaluate_infovqa.py @@ -0,0 +1,48 @@ +import argparse +import json + +from evaluate_vqav2 import compute_vqa_accuracy +from evaluate_mmmu import get_input_output_paths + + +def merge_input_files(input_path): + """Merge input files to a format compatible with the evaluator.""" + input_file_paths, output_file_path = get_input_output_paths(input_path, task="InfoVQA") + + results = [] + + for input_file_path in input_file_paths: + with open(input_file_path, "r") as input_file: + for line in input_file: + res = json.loads(line) + results.append( + { + "question_id": res["sample_id"], + "answer": res["answer"], + "gt_answer": res["gt_answer"], + } + ) + + # Make order deterministic. + # results = sorted(results, key=lambda d: d["question_id"]) + + with open(output_file_path, "w") as output_file: + json.dump(results, output_file) + + return output_file_path + + +def infovqa_eval(input_path): + """Run InfoVQA evaluation.""" + result_file_path = merge_input_files(input_path) + return compute_vqa_accuracy(result_file_path, task="InfoVQA") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument('--input-path', type=str, help="Path to input file(s)") + args = parser.parse_args() + + avg_acc = infovqa_eval(args.input_path) + + print(f"===== InfoVQA Accuracy {avg_acc:.2f}% =====") diff --git a/examples/multimodal/evaluation/evaluate_spdocvqa.py b/examples/multimodal/evaluation/evaluate_spdocvqa.py new file mode 100644 index 0000000..a5a4fd0 --- /dev/null +++ b/examples/multimodal/evaluation/evaluate_spdocvqa.py @@ -0,0 +1,48 @@ +import argparse +import json + +from evaluate_vqav2 import compute_vqa_accuracy +from evaluate_mmmu import get_input_output_paths + + +def merge_input_files(input_path): + """Merge input files to a format compatible with the evaluator.""" + input_file_paths, output_file_path = get_input_output_paths(input_path, task="SPDocVQA") + + results = [] + + for input_file_path in input_file_paths: + with open(input_file_path, "r") as input_file: + for line in input_file: + res = json.loads(line) + results.append( + { + "question_id": res["sample_id"], + "answer": res["answer"], + "gt_answer": res["gt_answer"], + } + ) + + # Make order deterministic. + # results = sorted(results, key=lambda d: d["question_id"]) + + with open(output_file_path, "w") as output_file: + json.dump(results, output_file) + + return output_file_path + + +def spdocvqa_eval(input_path): + """Run SPDocVQA evaluation.""" + result_file_path = merge_input_files(input_path) + return compute_vqa_accuracy(result_file_path, task="SPDocVQA") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument('--input-path', type=str, help="Path to input file(s)") + args = parser.parse_args() + + avg_acc = spdocvqa_eval(args.input_path) + + print(f"===== SPDocVQA Accuracy {avg_acc:.2f}% =====") diff --git a/examples/multimodal/evaluation/evaluate_vqav2.py b/examples/multimodal/evaluation/evaluate_vqav2.py index 7807d80..42ec6e6 100644 --- a/examples/multimodal/evaluation/evaluate_vqav2.py +++ b/examples/multimodal/evaluation/evaluate_vqav2.py @@ -1,109 +1,161 @@ -import argparse -import json - -from evaluate_mmmu import get_input_output_paths -from open_flamingo.eval.vqa_metric import VQAEval - - -def merge_input_files(input_path): - """Merge input files to a format compatible with the evaluator.""" - input_file_paths, output_file_path = get_input_output_paths(input_path, task="VQAv2") - - results = dict() - - for input_file_path in input_file_paths: - with open(input_file_path, "r") as input_file: - for line in input_file: - res = json.loads(line) - sample_id = res["sample_id"] - - # Skip possible duplicates. - if sample_id in results: - continue - - res["question_id"] = sample_id - results[sample_id] = res - - results = list(results.values()) - - with open(output_file_path, "w") as output_file: - json.dump(results, output_file) - - return output_file_path - - -def is_number(n: str): - """Check if input is a number.""" - try: - float(n) - return True - except ValueError: - return False - - -def compute_vqa_accuracy(result_file, task): - """Compute VQA accuracy.""" - merged_results = json.load(open(result_file)) - - vqa = VQAEval(vqa=None, vqaRes=None) - all_acc = [] - for res in merged_results: - pred = res["answer"] - pred = vqa.processPunctuation(pred) - pred = vqa.processDigitArticle(pred) - - gt = res["gt_answer"] - gt = [vqa.processPunctuation(ans) for ans in gt] - gt = [vqa.processDigitArticle(ans) for ans in gt] - - # ChartQA uses relaxed accuracy: - # "We consider an answer to be correct if it is within 5% of the gold answer. - # For non-numeric answers, we still need an exact match to consider an answer to be correct." - if task == "ChartQA": - acc = 0.0 - assert len(gt) == 1, "expected exactly one groundtruth answer." - gt = gt[0] - - pred = pred.rstrip("%") - gt = gt.rstrip("%") - - if is_number(pred) and is_number(gt): - pred = float(pred) - gt = float(gt) - if pred >= (gt * 0.95) and pred <= (gt * 1.05): - acc = 1.0 - elif pred == gt: - acc = 1.0 - - all_acc.append(acc) - elif task in ("VQAv2", "TextVQA"): - num_match = sum([pred == ans for ans in gt]) - acc = min(1.0, num_match / 3.0) - all_acc.append(acc) - elif task == "AI2D": - assert len(gt) == 1, f"Expected exactly 1 GT, got {gt}" - acc = pred == gt[0] - all_acc.append(acc) - else: - raise NotImplementedError(f"unknown task {task}") - - acc_avg = sum(all_acc) / len(all_acc) * 100 - - return acc_avg - - -def vqav2_eval(input_path): - """Run VQAv2 evaluation.""" - result_file = merge_input_files(input_path) - avg_acc = compute_vqa_accuracy(result_file, task="VQAv2") - return avg_acc - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument('--input-path', type=str, help="Path to input file(s)") - args = parser.parse_args() - - avg_acc = vqav2_eval(args.input_path) - - print(f"===== VQAv2 Accuracy {avg_acc:.2f}% =====") +import argparse +import json + +from evaluate_mmmu import get_input_output_paths +from open_flamingo.eval.vqa_metric import VQAEval + +# ANLS score calculation based on https://github.com/shunk031/ANLS/blob/6472e1d71e84d6cee28e3c6d2e18564bafaa312d/anls/metrics/dist.py#L1 +# and https://github.com/shunk031/ANLS/blob/6472e1d71e84d6cee28e3c6d2e18564bafaa312d/anls/metrics/score.py#L6 +# MIT License. Copyright (c) 2022 Shunsuke KITADA +def levenshtein_distance(s1: str, s2: str) -> int: + + if len(s1) > len(s2): + s1, s2 = s2, s1 + + distances = list(range(len(s1) + 1)) + for i2, c2 in enumerate(s2): + dists = [i2 + 1] + for i1, c1 in enumerate(s1): + if c1 == c2: + dists.append(distances[i1]) + else: + dists.append(1 + min((distances[i1], distances[i1 + 1], dists[-1]))) + distances = dists + + return distances[-1] + + +def normalized_levenshtein_distance(s1: str, s2: str) -> float: + dist = levenshtein_distance(s1, s2) + length = max(len(s1.upper()), len(s2.upper())) + return 0.0 if length == 0 else dist / length + +def similarity_function(prediction: str, gold_label: str, threshold: float) -> float: + nl_score = normalized_levenshtein_distance(prediction, gold_label) + return 1 - nl_score if nl_score < threshold else 0.0 + +def anls_score( + prediction: str, gold_labels: List[str], threshold: float = 0.5 +) -> float: + + # not case sensitive, but space sensitive + y_pred = " ".join(prediction.strip().lower().split()) + + anls_scores: List[float] = [] + for gold_label in gold_labels: + + # not case sensitive, but space sensitive + y_true = " ".join(gold_label.strip().lower().split()) + + anls_score = similarity_function(y_pred, y_true, threshold) + anls_scores.append(anls_score) + + score = max(anls_scores) + + return score + +def merge_input_files(input_path): + """Merge input files to a format compatible with the evaluator.""" + input_file_paths, output_file_path = get_input_output_paths(input_path, task="VQAv2") + + results = dict() + + for input_file_path in input_file_paths: + with open(input_file_path, "r") as input_file: + for line in input_file: + res = json.loads(line) + sample_id = res["sample_id"] + + # Skip possible duplicates. + if sample_id in results: + continue + + res["question_id"] = sample_id + results[sample_id] = res + + results = list(results.values()) + + with open(output_file_path, "w") as output_file: + json.dump(results, output_file) + + return output_file_path + + +def is_number(n: str): + """Check if input is a number.""" + try: + float(n) + return True + except ValueError: + return False + + +def compute_vqa_accuracy(result_file, task): + """Compute VQA accuracy.""" + merged_results = json.load(open(result_file)) + + vqa = VQAEval(vqa=None, vqaRes=None) + all_acc = [] + for res in merged_results: + pred = res["answer"] + pred = vqa.processPunctuation(pred) + pred = vqa.processDigitArticle(pred) + + gt = res["gt_answer"] + gt = [vqa.processPunctuation(ans) for ans in gt] + gt = [vqa.processDigitArticle(ans) for ans in gt] + + # ChartQA uses relaxed accuracy: + # "We consider an answer to be correct if it is within 5% of the gold answer. + # For non-numeric answers, we still need an exact match to consider an answer to be correct." + if task == "ChartQA": + acc = 0.0 + assert len(gt) == 1, "expected exactly one groundtruth answer." + gt = gt[0] + + pred = pred.rstrip("%") + gt = gt.rstrip("%") + + if is_number(pred) and is_number(gt): + pred = float(pred) + gt = float(gt) + if pred >= (gt * 0.95) and pred <= (gt * 1.05): + acc = 1.0 + elif pred == gt: + acc = 1.0 + + all_acc.append(acc) + elif task in ("VQAv2", "TextVQA"): + num_match = sum([pred == ans for ans in gt]) + acc = min(1.0, num_match / 3.0) + all_acc.append(acc) + elif task in ("SPDocVQA", "InfoVQA"): + acc = anls_score(prediction=pred, gold_labels=gt, threshold=0.5) + all_acc.append(acc) + elif task == "AI2D": + assert len(gt) == 1, f"Expected exactly 1 GT, got {gt}" + acc = pred == gt[0] + all_acc.append(acc) + else: + raise NotImplementedError(f"unknown task {task}") + + acc_avg = sum(all_acc) / len(all_acc) * 100 + + return acc_avg + + +def vqav2_eval(input_path): + """Run VQAv2 evaluation.""" + result_file = merge_input_files(input_path) + avg_acc = compute_vqa_accuracy(result_file, task="VQAv2") + return avg_acc + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument('--input-path', type=str, help="Path to input file(s)") + args = parser.parse_args() + + avg_acc = vqav2_eval(args.input_path) + + print(f"===== VQAv2 Accuracy {avg_acc:.2f}% =====") diff --git a/examples/multimodal/evaluation/evaluation_datasets.py b/examples/multimodal/evaluation/evaluation_datasets.py index 50a50d5..a2d3346 100644 --- a/examples/multimodal/evaluation/evaluation_datasets.py +++ b/examples/multimodal/evaluation/evaluation_datasets.py @@ -1,920 +1,948 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -"""Evaluation datasets.""" -import glob -import itertools -import json -import os -import re -from collections import defaultdict - -import numpy as np -import torch -from image_processing import get_visual_transform -from PIL import Image - -from megatron.training import print_rank_0 - - -def _get_partition_bounds( - total_num_samples, num_samples_per_partition, num_partitions, partition_id -): - if num_samples_per_partition == 0: - samples_per_partition = [ - int(x) for x in np.linspace(0, total_num_samples, num_partitions + 1) - ] - return samples_per_partition[partition_id], samples_per_partition[partition_id + 1] - return num_samples_per_partition * partition_id, num_samples_per_partition * (partition_id + 1) - - -class VQADataset(torch.utils.data.Dataset): - """VQA evaluation dataset.""" - - def __init__( - self, - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - keys, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - vision_model_type, - ): - samples = json.load(open(gt_path, encoding='utf-8')) - if "data" in samples: - samples = samples["data"] - - # Optionally, process only a subset of the input files. - if num_partitions > 0: - lb, ub = _get_partition_bounds( - len(samples), num_samples_per_partition, num_partitions, partition_id - ) - samples = samples[lb:ub] - - self._keys = keys - self._samples = samples - self._input_image_path = input_image_path - self._img_h = img_h - self._img_w = img_w - self._use_tiling = use_tiling - self._max_num_tiles = max_num_tiles - self._use_thumbnail = use_thumbnail - self._vision_model_type = vision_model_type - - def __len__(self): - return len(self._samples) - - def __getitem__(self, idx): - sample = self._samples[idx] - - img_file = "{}/{}".format(self._input_image_path, sample[self._keys["image_id"]]) - if not os.path.exists(img_file): - img_file += ".jpg" - - if not os.path.exists(img_file): - img_file = img_file.replace('.jpg', '.png') - - img = Image.open(img_file) - imgs = get_visual_transform( - img, - self._img_h, - self._img_w, - self._use_tiling, - self._max_num_tiles, - self._use_thumbnail, - augment=False, - vision_model_type=self._vision_model_type, - ) - tile_count = torch.tensor([len(imgs)], dtype=torch.int) - - sample_id = idx - if "sample_id" in self._keys: - sample_id = sample[self._keys["sample_id"]] - - metadata = "" # Not used. - - return ( - torch.stack(imgs), - tile_count, - sample_id, - sample[self._keys["question"]], - sample[self._keys["answer"]], - metadata, - ) - - -class CaptioningDataset(torch.utils.data.Dataset): - """Captioning evaluation dataset.""" - - def __init__( - self, - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - vision_model_type, - ): - image_files = sorted(glob.glob(input_image_path + "/*")) - - # Optionally, process only a subset of the input files. - if num_partitions > 0: - lb, ub = _get_partition_bounds( - len(image_files), num_samples_per_partition, num_partitions, partition_id - ) - image_files = image_files[lb:ub] - - gts = json.load(open(gt_path)) - answers = defaultdict(list) - for gt in gts["annotations"]: - answers[gt["image_id"]].append(gt['caption']) - - self._image_files = image_files - self._answers = answers - self._img_h = img_h - self._img_w = img_w - self._use_tiling = use_tiling - self._max_num_tiles = max_num_tiles - self._use_thumbnail = use_thumbnail - self._vision_model_type = vision_model_type - - def __len__(self): - return len(self._image_files) - - def __getitem__(self, idx): - img_file = self._image_files[idx] - image_id = int(img_file.split("_")[-1].split(".")[0]) - - img = Image.open(img_file) - imgs = get_visual_transform( - img, - self._img_h, - self._img_w, - self._use_tiling, - self._max_num_tiles, - self._use_thumbnail, - augment=False, - vision_model_type=self._vision_model_type, - ) - - tile_count = torch.tensor([len(imgs)], dtype=torch.int) - - question = "" # Fixed for all samples. - metadata = "" # Not used. - - return torch.stack(imgs), tile_count, image_id, question, self._answers[image_id], metadata - - -class MMMUDataset(torch.utils.data.Dataset): - """MMMU evaluation dataset.""" - - def __init__( - self, - input_image_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - prompt_style, - vision_model_type, - ): - import datasets - from MMMU.mmmu.utils.data_utils import CAT_SHORT2LONG, load_yaml - - # The following downloads the MMMU dataset from HuggingFace and uses the API from the MMMU github repo to run MMMU evaluation. - all_mmmu_datasets = [] - - hf_datasets_cache = os.environ["HF_DATASETS_CACHE"] - assert hf_datasets_cache != "", "Please set the environment variable HF_DATASETS_CACHE." - - for subject in CAT_SHORT2LONG.values(): - # Use a local copy of the dataset if exists (can be faster) or the HF one. - if os.path.exists(input_image_path): - subject_dataset = datasets.load_dataset( - os.path.join(input_image_path, subject), - split=datasets.Split.VALIDATION, - cache_dir=hf_datasets_cache, - verification_mode="no_checks", - ) - else: - subject_dataset = datasets.load_dataset( - "MMMU/MMMU", - subject, - split=datasets.Split.VALIDATION, - cache_dir=hf_datasets_cache, - ) - - all_mmmu_datasets.append(subject_dataset) - - dataset = datasets.concatenate_datasets(all_mmmu_datasets) - - dataset = [s for s in dataset if s['id'].startswith("val")] - - # Optionally, process only a subset of the input files. - if num_partitions > 0: - lb, ub = _get_partition_bounds( - len(dataset), num_samples_per_partition, num_partitions, partition_id - ) - dataset = dataset[lb:ub] - - # Using the LLaVA config from the MMMU repo. - config = load_yaml("examples/multimodal/MMMU/mmmu/configs/llava1.5.yaml") - for k, v in config.items(): - if isinstance(v, list): - assert len(v) == 1, "only one value supported." - config[k] = v[0] - - self._config = config - - self._dataset = dataset - - self._img_h = img_h - self._img_w = img_w - self._use_tiling = use_tiling - self._max_num_tiles = max_num_tiles - self._use_thumbnail = use_thumbnail - self._prompt_style = prompt_style - self._vision_model_type = vision_model_type - - def __len__(self): - return len(self._dataset) - - def __getitem__(self, idx): - from MMMU.mmmu.utils.data_utils import construct_prompt, process_single_sample - - sample = self._dataset[idx] - - # Use the single image approach from the MMMU repo. - if self._prompt_style == "single_image": - sample = process_single_sample(sample) - sample = construct_prompt(sample, self._config) - - img = sample["image"] - sample_imgs = get_visual_transform( - img, - self._img_h, - self._img_w, - self._use_tiling, - self._max_num_tiles, - self._use_thumbnail, - augment=False, - vision_model_type=self._vision_model_type, - ) - sample_num_tiles = [len(sample_imgs)] - - prompt = sample["final_input_prompt"] - for i in range(8): - prompt = prompt.replace(f"", "") - sample["final_input_prompt"] = f"\n{prompt}" - elif self._prompt_style == "vlmevalkit": - sample = construct_prompt(sample, self._config) - - if sample["question_type"] == "multiple-choice": - question = sample["question"] - - options = "" - for k, v in sample["index2ans"].items(): - options += f"{k}. {v}\n" - - final_prompt = f"{question}\n" - if "hint" in sample: - final_prompt += f"Hint: {sample['hint']}\n" - - if "task_instructions" in sample: - final_prompt += f"Task instructions: {sample['task_instructions']}\n" - - final_prompt += options - final_prompt += "Answer with the option's letter from the given choices directly." - - sample["final_input_prompt"] = final_prompt.rstrip() - else: - question = sample["question"] - final_prompt = f"{question}\n" - final_prompt += "Answer the question directly." - sample["final_input_prompt"] = final_prompt.rstrip() - - sample_imgs = [] - sample_num_tiles = [] - - img_indices = sorted(list(set(re.findall(r"" - - img = sample[img_key] - assert img is not None, f"{img_str} is in prompt but not in sample images" - - imgs = get_visual_transform( - img, - self._img_h, - self._img_w, - self._use_tiling, - adjusted_max_num_tiles, - self._use_thumbnail, - augment=False, - vision_model_type=self._vision_model_type, - ) # List of tiles. - - sample_imgs.extend(imgs) - sample_num_tiles.append(len(imgs)) - - sample["final_input_prompt"] = " ".join([f'' for i in range(len(img_indices))]) + "\n" + sample["final_input_prompt"] - elif self._prompt_style == "multi_image": - sample = construct_prompt(sample, self._config) - - sample_imgs = [] - sample_num_tiles = [] - - img_indices = re.findall(r"" - - img = sample[img_key] - assert img is not None, f"{img_str} is in prompt but not in sample images" - - # Note: Only replace the current image tag. - sample["final_input_prompt"] = sample["final_input_prompt"].replace( - img_str, "", 1 - ) - - imgs = get_visual_transform( - img, - self._img_h, - self._img_w, - self._use_tiling, - adjusted_max_num_tiles, - self._use_thumbnail, - augment=False, - vision_model_type=self._vision_model_type, - ) # List of tiles. - - sample_imgs.extend(imgs) - sample_num_tiles.append(len(imgs)) - - # Sanity check. - for i in range(1, 8): - assert ( - f"" not in sample["final_input_prompt"] - ), "prompt contains unhandled image tags" - else: - raise ValueError(f"unknown prompt style {self._prompt_style}") - - # MMMU specific metadata. - metadata = {"question_type": sample["question_type"]} - if sample["question_type"] == "multiple-choice": - metadata["index2ans"] = sample["index2ans"] - metadata["all_choices"] = sample["all_choices"] - - prompt = sample['final_input_prompt'] - - tile_count = torch.tensor(sample_num_tiles, dtype=torch.int) - - return ( - torch.stack(sample_imgs), - tile_count, - sample["id"], - prompt, - sample["answer"], - metadata, - ) - - -class VideoMMMEDataset(torch.utils.data.Dataset): - "Video MME evaluation dataset." - - def __init__( - self, - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - num_frames, - vision_model_type, - ): - ground_truth_original = json.load(open(gt_path)) - ground_truth = [] - for gt in ground_truth_original: - video_path = gt["url"] - video_path = video_path.replace("https://www.youtube.com/watch?v=", "") - video_path = video_path.replace("https://m.youtube.com/watch?v=", "") - video_path = os.path.join(input_image_path, video_path + ".mp4") - if not os.path.exists(video_path): - continue - gt["video_path"] = video_path - ground_truth.append(gt) - - ground_truth = sorted(ground_truth, key=lambda gt: gt["video_path"]) - print_rank_0(f"Found {len(ground_truth)} videos to process.") - - if num_partitions > 0: - start_idx, end_idx = _get_partition_bounds( - len(ground_truth), num_samples_per_partition, num_partitions, partition_id - ) - ground_truth = ground_truth[start_idx:end_idx] - - self._ground_truth = ground_truth - self._img_h = img_h - self._img_w = img_w - self._use_tiling = use_tiling - self._max_num_tiles = max_num_tiles - self._use_thumbnail = use_thumbnail - self._num_frames = num_frames - self._vision_model_type = vision_model_type - - def __len__(self): - return len(self._ground_truth) - - def __getitem__(self, idx): - from torchvision.io import read_video - - gt = self._ground_truth[idx] - - video, _, _ = read_video(gt["video_path"], start_pts=0, end_pts=None, pts_unit='sec') - video = video.numpy() - selected_frames = torch.linspace(0, video.shape[0] - 1, self._num_frames).long() - video_frames = video[selected_frames] - if self._num_frames == 1: - video_frames = video_frames[None] - - imgs = list( - itertools.chain.from_iterable( - get_visual_transform( - img, - self._img_h, - self._img_w, - self._use_tiling, - self._max_num_tiles, - self._use_thumbnail, - augment=False, - vision_model_type=self._vision_model_type, - ) - for img in video_frames - ) - ) - - for question in gt["questions"]: - # Very hacky, but we essentially re-create gt holding only the - # question of interest. This is the make this generation script - # compatible with the Video MME evaluation script. - question_dict = { - "video_id": gt["video_id"], - "duration_category": gt["duration_category"], - "video_category": gt["video_category"], - "video_subcategory": gt["video_subcategory"], - "url": gt["url"], - "questions": [question], - } - - num_tiles = torch.tensor([len(imgs)], dtype=torch.int) - - answer = "" - metadata = "" - - return ( - torch.stack(imgs), - num_tiles, - question["question_id"], - question_dict, - answer, - metadata, - ) - - -class OCRBenchDataset(torch.utils.data.Dataset): - """OCRBench evaluation dataset.""" - - def __init__( - self, - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - vision_model_type, - ): - gt = json.load(open(gt_path, encoding='utf-8')) - - if num_partitions > 0: - start_idx, end_idx = _get_partition_bounds( - len(gt), num_samples_per_partition, num_partitions, partition_id - ) - gt = gt[start_idx:end_idx] - - self._input_image_path = input_image_path - self._gt = gt - self._img_h = img_h - self._img_w = img_w - self._use_tiling = use_tiling - self._max_num_tiles = max_num_tiles - self._use_thumbnail = use_thumbnail - self._vision_model_type = vision_model_type - - def __len__(self): - return len(self._gt) - - def __getitem__(self, idx): - img_path = os.path.join(self._input_image_path, self._gt[idx]['image_path']) - - img = Image.open(img_path) - imgs = get_visual_transform( - img, - self._img_h, - self._img_w, - self._use_tiling, - self._max_num_tiles, - self._use_thumbnail, - augment=False, - vision_model_type=self._vision_model_type, - ) - - tile_count = torch.tensor([len(imgs)], dtype=torch.int) - - metadata = { - "dataset_name": self._gt[idx]["dataset_name"], - "data_type": self._gt[idx]["type"], - } - - return ( - torch.stack(imgs), - tile_count, - idx, - self._gt[idx]["question"], - self._gt[idx]["answers"], - metadata, - ) - - -class MathVistaDataset(torch.utils.data.Dataset): - """MathVista evaluation dataset.""" - - def __init__( - self, - input_image_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - vision_model_type, - ): - import datasets - - hf_datasets_cache = os.environ["HF_DATASETS_CACHE"] - assert hf_datasets_cache != "", "Please set the environment variable HF_DATASETS_CACHE." - - if os.path.exists(input_image_path): - dataset = datasets.load_dataset( - input_image_path, cache_dir=hf_datasets_cache, verification_mode="no_checks" - ) - else: - dataset = datasets.load_dataset( - "AI4Math/MathVista", split="testmini", cache_dir=hf_datasets_cache - ) - - if num_partitions > 0: - start_idx, end_idx = _get_partition_bounds( - len(dataset), num_samples_per_partition, num_partitions, partition_id - ) - dataset = dataset[start_idx:end_idx] - - self._dataset = dataset - self._img_h = img_h - self._img_w = img_w - self._use_tiling = use_tiling - self._max_num_tiles = max_num_tiles - self._use_thumbnail = use_thumbnail - self._vision_model_type = vision_model_type - - def __len__(self): - return len(self._dataset["pid"]) - - def __getitem__(self, idx): - # Already a PIL object. - img = self._dataset['decoded_image'][idx] - - imgs = get_visual_transform( - img, - self._img_h, - self._img_w, - self._use_tiling, - self._max_num_tiles, - self._use_thumbnail, - augment=False, - vision_model_type=self._vision_model_type, - ) - - tile_count = torch.tensor([len(imgs)], dtype=torch.int) - - question_id = self._dataset["pid"][idx] - question = self._dataset["question"][idx] - question_type = self._dataset["question_type"][idx] # free_form or multi_choice - query = self._dataset["query"][idx] - choices = self._dataset["choices"][idx] - answer = self._dataset["answer"][idx] - - if question_type == 'multi_choice': - start_chr = 'A' - choices_str = '' - index2ans = {} - all_choices = [] - for choice in choices: - all_choices.append(start_chr) - index2ans[start_chr] = choice - choices_str += f"{start_chr}. {choice}\n" - start_chr = chr(ord(start_chr) + 1) - - question = question + '\n' + choices_str - question = question + "Answer with the option's letter from the given choices directly." - answer = chr(ord('A') + choices.index(answer)) - else: - question = query.replace("Hint: ", "") - index2ans = {} - all_choices = [] - - metadata = { - "question_type": question_type, - "index2ans": index2ans, - "all_choices": all_choices, - } - - return torch.stack(imgs), tile_count, question_id, question, answer, metadata - - -class AI2DDataset(torch.utils.data.Dataset): - """AI2D evaluation dataset.""" - - def __init__( - self, - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - no_mask, - vision_model_type, - ): - with open(gt_path, 'r') as f: - jsonl = list(f) - - gt = [json.loads(json_str) for json_str in jsonl] - - if num_partitions > 0: - start_idx, end_idx = _get_partition_bounds( - len(gt), num_samples_per_partition, num_partitions, partition_id - ) - gt = gt[start_idx:end_idx] - - self._gt = gt - self._input_image_path = input_image_path - self._img_h = img_h - self._img_w = img_w - self._use_tiling = use_tiling - self._max_num_tiles = max_num_tiles - self._use_thumbnail = use_thumbnail - self._no_mask = no_mask - self._vision_model_type = vision_model_type - - def __len__(self): - return len(self._gt) - - def __getitem__(self, idx): - img_path = os.path.join(self._input_image_path, self._gt[idx]['image']) - if self._no_mask: - img_path.replace("AI2D_TEST", "AI2D_TEST_NO_MASK_IMAGES") - - img = Image.open(img_path) - imgs = get_visual_transform( - img, - self._img_h, - self._img_w, - self._use_tiling, - self._max_num_tiles, - self._use_thumbnail, - augment=False, - vision_model_type=self._vision_model_type, - ) - - tile_count = torch.tensor([len(imgs)], dtype=torch.int) - - metadata = "" # Not used. - - return ( - torch.stack(imgs), - tile_count, - self._gt[idx]["question_id"], - self._gt[idx]["question"], - self._gt[idx]["answer"], - metadata, - ) - - -def get_evaluation_dataset( - task, - input_image_path, - gt_path, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - num_samples_per_partition, - num_partitions, - partition_id, - num_frames, - vision_model_type, -): - """Get an evaluation dataset.""" - if task == "TextVQA": - keys = { - "image_id": "image_id", - "sample_id": "question_id", - "question": "question", - "answer": "answers", - } - - dataset = VQADataset( - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - keys, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - vision_model_type, - ) - elif task == "VQAv2": - keys = { - "image_id": "image", - "sample_id": "question_id", - "question": "question", - "answer": "answer", - } - - dataset = VQADataset( - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - keys, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - vision_model_type, - ) - elif task == "ChartQA": - keys = {"image_id": "imgname", "question": "query", "answer": "label"} - - dataset = VQADataset( - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - keys, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - vision_model_type, - ) - elif task == "captioning": - dataset = CaptioningDataset( - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - vision_model_type, - ) - elif task == 'MMMU': - # Note: - # - prompt_style="single_image" uses only one image like in the MMMU repo example. - # - prompt_style="multi_image" uses multiple input images. - # - prompt_style="vlmevalkit" is similar to https://github.com/open-compass/VLMEvalKit/blob/5d3cebcf18ef4bfbadc3bd3ef80bdc7aad2c6557/vlmeval/vlm/internvl_chat.py#L499 - dataset = MMMUDataset( - input_image_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - prompt_style="single_image", - vision_model_type=vision_model_type, - ) - elif task == "VideoMME": - dataset = VideoMMMEDataset( - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - num_frames, - vision_model_type, - ) - elif task == "OCRBench": - dataset = OCRBenchDataset( - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - vision_model_type, - ) - elif task == "MathVista": - dataset = MathVistaDataset( - input_image_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - vision_model_type, - ) - elif task == "AI2D": - dataset = AI2DDataset( - input_image_path, - gt_path, - num_samples_per_partition, - num_partitions, - partition_id, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - no_mask=False, - vision_model_type=vision_model_type, - ) - else: - raise NotImplementedError(f"unsupported task {task}") - - return dataset +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +"""Evaluation datasets.""" +import glob +import itertools +import json +import os +import re +from collections import defaultdict + +import numpy as np +import torch +from image_processing import get_visual_transform +from PIL import Image + +from megatron.training import print_rank_0 + + +def _get_partition_bounds( + total_num_samples, num_samples_per_partition, num_partitions, partition_id +): + if num_samples_per_partition == 0: + samples_per_partition = [ + int(x) for x in np.linspace(0, total_num_samples, num_partitions + 1) + ] + return samples_per_partition[partition_id], samples_per_partition[partition_id + 1] + return num_samples_per_partition * partition_id, num_samples_per_partition * (partition_id + 1) + + +class VQADataset(torch.utils.data.Dataset): + """VQA evaluation dataset.""" + + def __init__( + self, + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + keys, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ): + samples = json.load(open(gt_path, encoding='utf-8')) + if "data" in samples: + samples = samples["data"] + + # Optionally, process only a subset of the input files. + if num_partitions > 0: + lb, ub = _get_partition_bounds( + len(samples), num_samples_per_partition, num_partitions, partition_id + ) + samples = samples[lb:ub] + + self._keys = keys + self._samples = samples + self._input_image_path = input_image_path + self._img_h = img_h + self._img_w = img_w + self._use_tiling = use_tiling + self._max_num_tiles = max_num_tiles + self._use_thumbnail = use_thumbnail + self._vision_model_type = vision_model_type + + def __len__(self): + return len(self._samples) + + def __getitem__(self, idx): + sample = self._samples[idx] + + img_file = "{}/{}".format(self._input_image_path, sample[self._keys["image_id"]]) + if not os.path.exists(img_file): + img_file += ".jpg" + + if not os.path.exists(img_file): + img_file = img_file.replace('.jpg', '.png') + + img = Image.open(img_file) + imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + self._max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) + tile_count = torch.tensor([len(imgs)], dtype=torch.int) + + sample_id = idx + if "sample_id" in self._keys: + sample_id = sample[self._keys["sample_id"]] + + metadata = "" # Not used. + + return ( + torch.stack(imgs), + tile_count, + sample_id, + sample[self._keys["question"]], + sample[self._keys["answer"]], + metadata, + ) + + +class CaptioningDataset(torch.utils.data.Dataset): + """Captioning evaluation dataset.""" + + def __init__( + self, + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ): + image_files = sorted(glob.glob(input_image_path + "/*")) + + # Optionally, process only a subset of the input files. + if num_partitions > 0: + lb, ub = _get_partition_bounds( + len(image_files), num_samples_per_partition, num_partitions, partition_id + ) + image_files = image_files[lb:ub] + + gts = json.load(open(gt_path)) + answers = defaultdict(list) + for gt in gts["annotations"]: + answers[gt["image_id"]].append(gt['caption']) + + self._image_files = image_files + self._answers = answers + self._img_h = img_h + self._img_w = img_w + self._use_tiling = use_tiling + self._max_num_tiles = max_num_tiles + self._use_thumbnail = use_thumbnail + self._vision_model_type = vision_model_type + + def __len__(self): + return len(self._image_files) + + def __getitem__(self, idx): + img_file = self._image_files[idx] + image_id = int(img_file.split("_")[-1].split(".")[0]) + + img = Image.open(img_file) + imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + self._max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) + + tile_count = torch.tensor([len(imgs)], dtype=torch.int) + + question = "" # Fixed for all samples. + metadata = "" # Not used. + + return torch.stack(imgs), tile_count, image_id, question, self._answers[image_id], metadata + + +class MMMUDataset(torch.utils.data.Dataset): + """MMMU evaluation dataset.""" + + def __init__( + self, + input_image_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + prompt_style, + vision_model_type, + ): + import datasets + from MMMU.mmmu.utils.data_utils import CAT_SHORT2LONG, load_yaml + + # The following downloads the MMMU dataset from HuggingFace and uses the API from the MMMU github repo to run MMMU evaluation. + all_mmmu_datasets = [] + + hf_datasets_cache = os.environ["HF_DATASETS_CACHE"] + assert hf_datasets_cache != "", "Please set the environment variable HF_DATASETS_CACHE." + + for subject in CAT_SHORT2LONG.values(): + # Use a local copy of the dataset if exists (can be faster) or the HF one. + if os.path.exists(input_image_path): + subject_dataset = datasets.load_dataset( + os.path.join(input_image_path, subject), + split=datasets.Split.VALIDATION, + cache_dir=hf_datasets_cache, + verification_mode="no_checks", + ) + else: + subject_dataset = datasets.load_dataset( + "MMMU/MMMU", + subject, + split=datasets.Split.VALIDATION, + cache_dir=hf_datasets_cache, + ) + + all_mmmu_datasets.append(subject_dataset) + + dataset = datasets.concatenate_datasets(all_mmmu_datasets) + + dataset = [s for s in dataset if s['id'].startswith("val")] + + # Optionally, process only a subset of the input files. + if num_partitions > 0: + lb, ub = _get_partition_bounds( + len(dataset), num_samples_per_partition, num_partitions, partition_id + ) + dataset = dataset[lb:ub] + + # Using the LLaVA config from the MMMU repo. + config = load_yaml("examples/multimodal/MMMU/mmmu/configs/llava1.5.yaml") + for k, v in config.items(): + if isinstance(v, list): + assert len(v) == 1, "only one value supported." + config[k] = v[0] + + self._config = config + + self._dataset = dataset + + self._img_h = img_h + self._img_w = img_w + self._use_tiling = use_tiling + self._max_num_tiles = max_num_tiles + self._use_thumbnail = use_thumbnail + self._prompt_style = prompt_style + self._vision_model_type = vision_model_type + + def __len__(self): + return len(self._dataset) + + def __getitem__(self, idx): + from MMMU.mmmu.utils.data_utils import construct_prompt, process_single_sample + + sample = self._dataset[idx] + + # Use the single image approach from the MMMU repo. + if self._prompt_style == "single_image": + sample = process_single_sample(sample) + sample = construct_prompt(sample, self._config) + + img = sample["image"] + sample_imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + self._max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) + sample_num_tiles = [len(sample_imgs)] + + prompt = sample["final_input_prompt"] + for i in range(8): + prompt = prompt.replace(f"", "") + sample["final_input_prompt"] = f"\n{prompt}" + elif self._prompt_style == "vlmevalkit": + sample = construct_prompt(sample, self._config) + + if sample["question_type"] == "multiple-choice": + question = sample["question"] + + options = "" + for k, v in sample["index2ans"].items(): + options += f"{k}. {v}\n" + + final_prompt = f"{question}\n" + if "hint" in sample: + final_prompt += f"Hint: {sample['hint']}\n" + + if "task_instructions" in sample: + final_prompt += f"Task instructions: {sample['task_instructions']}\n" + + final_prompt += options + final_prompt += "Answer with the option's letter from the given choices directly." + + sample["final_input_prompt"] = final_prompt.rstrip() + else: + question = sample["question"] + final_prompt = f"{question}\n" + final_prompt += "Answer the question directly." + sample["final_input_prompt"] = final_prompt.rstrip() + + sample_imgs = [] + sample_num_tiles = [] + + img_indices = sorted(list(set(re.findall(r"" + + img = sample[img_key] + assert img is not None, f"{img_str} is in prompt but not in sample images" + + imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + adjusted_max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) # List of tiles. + + sample_imgs.extend(imgs) + sample_num_tiles.append(len(imgs)) + + sample["final_input_prompt"] = " ".join([f'' for i in range(len(img_indices))]) + "\n" + sample["final_input_prompt"] + elif self._prompt_style == "multi_image": + sample = construct_prompt(sample, self._config) + + sample_imgs = [] + sample_num_tiles = [] + + img_indices = re.findall(r"" + + img = sample[img_key] + assert img is not None, f"{img_str} is in prompt but not in sample images" + + # Note: Only replace the current image tag. + sample["final_input_prompt"] = sample["final_input_prompt"].replace( + img_str, "", 1 + ) + + imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + adjusted_max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) # List of tiles. + + sample_imgs.extend(imgs) + sample_num_tiles.append(len(imgs)) + + # Sanity check. + for i in range(1, 8): + assert ( + f"" not in sample["final_input_prompt"] + ), "prompt contains unhandled image tags" + else: + raise ValueError(f"unknown prompt style {self._prompt_style}") + + # MMMU specific metadata. + metadata = {"question_type": sample["question_type"]} + if sample["question_type"] == "multiple-choice": + metadata["index2ans"] = sample["index2ans"] + metadata["all_choices"] = sample["all_choices"] + + prompt = sample['final_input_prompt'] + + tile_count = torch.tensor(sample_num_tiles, dtype=torch.int) + + return ( + torch.stack(sample_imgs), + tile_count, + sample["id"], + prompt, + sample["answer"], + metadata, + ) + + +class VideoMMEDataset(torch.utils.data.Dataset): + "Video MME evaluation dataset." + + def __init__( + self, + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + num_frames, + vision_model_type, + ): + ground_truth_original = json.load(open(gt_path)) + ground_truth = [] + for gt in ground_truth_original: + video_path = gt["url"] + video_path = video_path.replace("https://www.youtube.com/watch?v=", "") + video_path = video_path.replace("https://m.youtube.com/watch?v=", "") + video_path = os.path.join(input_image_path, video_path + ".mp4") + if not os.path.exists(video_path): + continue + gt["video_path"] = video_path + ground_truth.append(gt) + + ground_truth = sorted(ground_truth, key=lambda gt: gt["video_path"]) + print_rank_0(f"Found {len(ground_truth)} videos to process.") + + if num_partitions > 0: + start_idx, end_idx = _get_partition_bounds( + len(ground_truth), num_samples_per_partition, num_partitions, partition_id + ) + ground_truth = ground_truth[start_idx:end_idx] + + self._ground_truth = ground_truth + self._img_h = img_h + self._img_w = img_w + self._use_tiling = False + self._max_num_tiles = max_num_tiles + self._use_thumbnail = use_thumbnail + self._num_frames = num_frames + self._vision_model_type = vision_model_type + + def __len__(self): + return len(self._ground_truth) + + def __getitem__(self, idx): + from torchvision.io import read_video + + gt = self._ground_truth[idx] + + video, _, _ = read_video(gt["video_path"], start_pts=0, end_pts=None, pts_unit='sec') + video = video.numpy() + selected_frames = torch.linspace(0, video.shape[0] - 1, self._num_frames).long() + video_frames = video[selected_frames] + if self._num_frames == 1: + video_frames = video_frames[None] + + imgs = [] + for img in video_frames: + from torchvision.transforms import ToPILImage + to_pil = ToPILImage() + img = to_pil(img) + imgs += get_visual_transform( + img, self._img_h, self._img_w, self._use_tiling, self._max_num_tiles, + self._use_thumbnail, augment=False, vision_model_type=self._vision_model_type + ) + + for question in gt["questions"]: + # Very hacky, but we essentially re-create gt holding only the + # question of interest. This is the make this generation script + # compatible with the Video MME evaluation script. + question_dict = { + "video_id": gt["video_id"], + "duration_category": gt["duration_category"], + "video_category": gt["video_category"], + "video_subcategory": gt["video_subcategory"], + "url": gt["url"], + "questions": [question], + } + + num_tiles = torch.tensor([len(imgs)], dtype=torch.int) + + answer = "" + metadata = "" + + return ( + torch.stack(imgs), + num_tiles, + question["question_id"], + question_dict, + answer, + metadata, + ) + + +class OCRBenchDataset(torch.utils.data.Dataset): + """OCRBench evaluation dataset.""" + + def __init__( + self, + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ): + gt = json.load(open(gt_path, encoding='utf-8')) + + if num_partitions > 0: + start_idx, end_idx = _get_partition_bounds( + len(gt), num_samples_per_partition, num_partitions, partition_id + ) + gt = gt[start_idx:end_idx] + + self._input_image_path = input_image_path + self._gt = gt + self._img_h = img_h + self._img_w = img_w + self._use_tiling = use_tiling + self._max_num_tiles = max_num_tiles + self._use_thumbnail = use_thumbnail + self._vision_model_type = vision_model_type + + def __len__(self): + return len(self._gt) + + def __getitem__(self, idx): + img_path = os.path.join(self._input_image_path, self._gt[idx]['image_path']) + + img = Image.open(img_path) + imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + self._max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) + + tile_count = torch.tensor([len(imgs)], dtype=torch.int) + + metadata = { + "dataset_name": self._gt[idx]["dataset_name"], + "data_type": self._gt[idx]["type"], + } + + return ( + torch.stack(imgs), + tile_count, + idx, + self._gt[idx]["question"], + self._gt[idx]["answers"], + metadata, + ) + + +class MathVistaDataset(torch.utils.data.Dataset): + """MathVista evaluation dataset.""" + + def __init__( + self, + input_image_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ): + import datasets + + hf_datasets_cache = os.environ["HF_DATASETS_CACHE"] + assert hf_datasets_cache != "", "Please set the environment variable HF_DATASETS_CACHE." + + if os.path.exists(input_image_path): + dataset = datasets.load_dataset( + input_image_path, cache_dir=hf_datasets_cache, verification_mode="no_checks" + ) + else: + dataset = datasets.load_dataset( + "AI4Math/MathVista", split="testmini", cache_dir=hf_datasets_cache + ) + + if num_partitions > 0: + start_idx, end_idx = _get_partition_bounds( + len(dataset), num_samples_per_partition, num_partitions, partition_id + ) + dataset = dataset[start_idx:end_idx] + + self._dataset = dataset + self._img_h = img_h + self._img_w = img_w + self._use_tiling = use_tiling + self._max_num_tiles = max_num_tiles + self._use_thumbnail = use_thumbnail + self._vision_model_type = vision_model_type + + def __len__(self): + return len(self._dataset["pid"]) + + def __getitem__(self, idx): + # Already a PIL object. + img = self._dataset['decoded_image'][idx] + + imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + self._max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) + + tile_count = torch.tensor([len(imgs)], dtype=torch.int) + + question_id = self._dataset["pid"][idx] + question = self._dataset["question"][idx] + question_type = self._dataset["question_type"][idx] # free_form or multi_choice + query = self._dataset["query"][idx] + choices = self._dataset["choices"][idx] + answer = self._dataset["answer"][idx] + + if question_type == 'multi_choice': + start_chr = 'A' + choices_str = '' + index2ans = {} + all_choices = [] + for choice in choices: + all_choices.append(start_chr) + index2ans[start_chr] = choice + choices_str += f"{start_chr}. {choice}\n" + start_chr = chr(ord(start_chr) + 1) + + question = question + '\n' + choices_str + question = question + "Answer with the option's letter from the given choices directly." + answer = chr(ord('A') + choices.index(answer)) + else: + question = query.replace("Hint: ", "") + index2ans = {} + all_choices = [] + + metadata = { + "question_type": question_type, + "index2ans": index2ans, + "all_choices": all_choices, + } + + return torch.stack(imgs), tile_count, question_id, question, answer, metadata + + +class AI2DDataset(torch.utils.data.Dataset): + """AI2D evaluation dataset.""" + + def __init__( + self, + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + no_mask, + vision_model_type, + ): + with open(gt_path, 'r') as f: + jsonl = list(f) + + gt = [json.loads(json_str) for json_str in jsonl] + + if num_partitions > 0: + start_idx, end_idx = _get_partition_bounds( + len(gt), num_samples_per_partition, num_partitions, partition_id + ) + gt = gt[start_idx:end_idx] + + self._gt = gt + self._input_image_path = input_image_path + self._img_h = img_h + self._img_w = img_w + self._use_tiling = use_tiling + self._max_num_tiles = max_num_tiles + self._use_thumbnail = use_thumbnail + self._no_mask = no_mask + self._vision_model_type = vision_model_type + + def __len__(self): + return len(self._gt) + + def __getitem__(self, idx): + img_path = os.path.join(self._input_image_path, self._gt[idx]['image']) + if self._no_mask: + img_path.replace("AI2D_TEST", "AI2D_TEST_NO_MASK_IMAGES") + + img = Image.open(img_path) + imgs = get_visual_transform( + img, + self._img_h, + self._img_w, + self._use_tiling, + self._max_num_tiles, + self._use_thumbnail, + augment=False, + vision_model_type=self._vision_model_type, + ) + + tile_count = torch.tensor([len(imgs)], dtype=torch.int) + + metadata = "" # Not used. + + return ( + torch.stack(imgs), + tile_count, + self._gt[idx]["question_id"], + self._gt[idx]["question"], + self._gt[idx]["answer"], + metadata, + ) + + +def get_evaluation_dataset( + task, + input_image_path, + gt_path, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + num_samples_per_partition, + num_partitions, + partition_id, + num_frames, + vision_model_type, +): + """Get an evaluation dataset.""" + if task == "TextVQA": + keys = { + "image_id": "image_id", + "sample_id": "question_id", + "question": "question", + "answer": "answers", + } + + dataset = VQADataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + keys, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ) + elif task == "VQAv2": + keys = { + "image_id": "image", + "sample_id": "question_id", + "question": "question", + "answer": "answer", + } + + dataset = VQADataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + keys, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ) + elif task == "ChartQA": + keys = {"image_id": "imgname", "question": "query", "answer": "label"} + + dataset = VQADataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + keys, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ) + elif task == "captioning": + dataset = CaptioningDataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ) + elif task == 'MMMU': + # Note: + # - prompt_style="single_image" uses only one image like in the MMMU repo example. + # - prompt_style="multi_image" uses multiple input images. + # - prompt_style="vlmevalkit" is similar to https://github.com/open-compass/VLMEvalKit/blob/5d3cebcf18ef4bfbadc3bd3ef80bdc7aad2c6557/vlmeval/vlm/internvl_chat.py#L499 + dataset = MMMUDataset( + input_image_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + prompt_style="single_image", + vision_model_type=vision_model_type, + ) + elif task == "VideoMME": + dataset = VideoMMEDataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + num_frames, + vision_model_type, + ) + elif task == "OCRBench": + dataset = OCRBenchDataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ) + elif task == "MathVista": + dataset = MathVistaDataset( + input_image_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ) + elif task == "AI2D": + dataset = AI2DDataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + no_mask=False, + vision_model_type=vision_model_type, + ) + elif task == "SPDocVQA": + keys = {"sample_id": "questionId", "image_id": "image", "question": "question", "answer": "answers"} + + dataset = VQADataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + keys, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ) + elif task == "InfoVQA": + keys = {"sample_id": "questionId", "image_id": "image_local_name", "question": "question", "answer": "answers"} + + dataset = VQADataset( + input_image_path, + gt_path, + num_samples_per_partition, + num_partitions, + partition_id, + keys, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + vision_model_type, + ) + else: + raise NotImplementedError(f"unsupported task {task}") + + return dataset diff --git a/examples/multimodal/image_processing.py b/examples/multimodal/image_processing.py index ed9401c..3d3365d 100644 --- a/examples/multimodal/image_processing.py +++ b/examples/multimodal/image_processing.py @@ -1,118 +1,143 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. Except portions as noted which are Copyright (c) 2023 OpenGVLab and licensed under the MIT license found in LICENSE. -from torchvision import transforms as T -from torchvision.transforms import Compose -from torchvision.transforms.functional import InterpolationMode - - -IMAGENET_PIXEL_MEAN = [0.485, 0.456, 0.406] -IMAGENET_PIXEL_STD = [0.229, 0.224, 0.225] -SIGLIP_PIXEL_MEAN = [0.5, 0.5, 0.5] -SIGLIP_PIXEL_STD = [0.5, 0.5, 0.5] -CLIP_PIXEL_MEAN = [0.48145466, 0.4578275, 0.40821073] -CLIP_PIXEL_STD = [0.26862954, 0.26130258, 0.27577711] - - -pixel_statistics = { - "clip": (CLIP_PIXEL_MEAN, CLIP_PIXEL_STD), - "siglip": (SIGLIP_PIXEL_MEAN, SIGLIP_PIXEL_STD), - "internvit": (IMAGENET_PIXEL_MEAN, IMAGENET_PIXEL_STD), -} - - -def get_visual_transform(img, img_h, img_w, use_tiling=False, max_num_tiles=1, use_thumbnail=False, augment=False, vision_model_type="clip"): - pixel_mean, pixel_std = pixel_statistics[vision_model_type] - - assert not augment, "Image augmentation not implemented." - transform = build_transform(img_h, pixel_mean, pixel_std, vision_model_type) - - if use_tiling: - assert img_h == img_w, "dynamic tiling expects equal tile height and width" - imgs = dynamic_preprocess(img, min_num=1, max_num=max_num_tiles, image_size=img_h, use_thumbnail=use_thumbnail) - imgs = [transform(img) for img in imgs] - else: - imgs = [transform(img)] - - return imgs - - -# From https://github.com/OpenGVLab/InternVL/blob/c62fa4f7c850165d7386bdc48ac6bc5a6fab0864/internvl_chat/internvl/train/dataset.py#L685 -# Copyright (c) 2023 OpenGVLab. -def find_closest_aspect_ratio(aspect_ratio, target_ratios, width, height, image_size): - best_ratio_diff = float('inf') - best_ratio = (1, 1) - area = width * height - for ratio in target_ratios: - target_aspect_ratio = ratio[0] / ratio[1] - ratio_diff = abs(aspect_ratio - target_aspect_ratio) - if ratio_diff < best_ratio_diff: - best_ratio_diff = ratio_diff - best_ratio = ratio - elif ratio_diff == best_ratio_diff: - if area > 0.5 * image_size * image_size * ratio[0] * ratio[1]: - best_ratio = ratio - # print(f'width: {width}, height: {height}, best_ratio: {best_ratio}') - return best_ratio - - -# From https://github.com/OpenGVLab/InternVL/blob/c62fa4f7c850165d7386bdc48ac6bc5a6fab0864/internvl_chat/internvl/train/dataset.py#L702 -# Copyright (c) 2023 OpenGVLab. -def dynamic_preprocess(image, min_num=1, max_num=6, image_size=448, use_thumbnail=False): - orig_width, orig_height = image.size - aspect_ratio = orig_width / orig_height - - # calculate the existing image aspect ratio - target_ratios = set( - (i, j) for n in range(min_num, max_num + 1) for i in range(1, n + 1) for j in range(1, n + 1) if - i * j <= max_num and i * j >= min_num) - target_ratios = sorted(target_ratios, key=lambda x: x[0] * x[1]) - - # find the closest aspect ratio to the target - target_aspect_ratio = find_closest_aspect_ratio( - aspect_ratio, target_ratios, orig_width, orig_height, image_size) - - # calculate the target width and height - target_width = image_size * target_aspect_ratio[0] - target_height = image_size * target_aspect_ratio[1] - blocks = target_aspect_ratio[0] * target_aspect_ratio[1] - - # resize the image - resized_img = image.resize((target_width, target_height)) - processed_images = [] - for i in range(blocks): - box = ( - (i % (target_width // image_size)) * image_size, - (i // (target_width // image_size)) * image_size, - ((i % (target_width // image_size)) + 1) * image_size, - ((i // (target_width // image_size)) + 1) * image_size - ) - # split the image - split_img = resized_img.crop(box) - processed_images.append(split_img) - assert len(processed_images) == blocks - if use_thumbnail and len(processed_images) != 1: - thumbnail_img = image.resize((image_size, image_size)) - processed_images.append(thumbnail_img) - return processed_images - - -# Based on https://github.com/openai/CLIP/blob/dcba3cb2e2827b402d2701e7e1c7d9fed8a20ef1/clip/clip.py#L79 -# and https://github.com/OpenGVLab/InternVL/blob/aa521e6eb1df4cf153aa4118fcf13e673c055d46/internvl_chat/internvl/train/dataset.py#L276 -def build_transform(input_size, pixel_mean, pixel_std, vision_model_type): - if vision_model_type in ("siglip", "internvit"): - transform = T.Compose([ - T.Lambda(lambda img: img.convert('RGB') if img.mode != 'RGB' else img), - T.Resize((input_size, input_size), interpolation=InterpolationMode.BICUBIC), - T.ToTensor(), - T.Normalize(mean=pixel_mean, std=pixel_std) - ]) - elif vision_model_type == "clip": - transform = Compose([ - T.Resize((input_size, input_size), interpolation=InterpolationMode.BICUBIC), - T.Lambda(lambda img: img.convert('RGB') if img.mode != 'RGB' else img), - T.ToTensor(), - T.Normalize(mean=pixel_mean, std=pixel_std), - ]) - else: - raise NotImplementedError(f"image processing not defined for vision model {vision_model_type}") - - return transform +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. Except portions as noted which are Copyright (c) 2023 OpenGVLab and licensed under the MIT license found in LICENSE. +from torchvision import transforms as T +from torchvision.transforms import Compose +from torchvision.transforms.functional import InterpolationMode + + +IMAGENET_PIXEL_MEAN = [0.485, 0.456, 0.406] +IMAGENET_PIXEL_STD = [0.229, 0.224, 0.225] +SIGLIP_PIXEL_MEAN = [0.5, 0.5, 0.5] +SIGLIP_PIXEL_STD = [0.5, 0.5, 0.5] +CLIP_PIXEL_MEAN = [0.48145466, 0.4578275, 0.40821073] +CLIP_PIXEL_STD = [0.26862954, 0.26130258, 0.27577711] + + +pixel_statistics = { + "clip": (CLIP_PIXEL_MEAN, CLIP_PIXEL_STD), + "siglip": (SIGLIP_PIXEL_MEAN, SIGLIP_PIXEL_STD), + "internvit": (IMAGENET_PIXEL_MEAN, IMAGENET_PIXEL_STD), + "radio": (CLIP_PIXEL_MEAN, CLIP_PIXEL_STD), + "huggingface": (SIGLIP_PIXEL_MEAN, SIGLIP_PIXEL_STD), +} + + +# From https://github.com/OpenGVLab/InternVL/blob/c62fa4f7c850165d7386bdc48ac6bc5a6fab0864/internvl_chat/internvl/train/dataset.py#L685 +# Copyright (c) 2023 OpenGVLab. +def find_closest_aspect_ratio(aspect_ratio, target_ratios, width, height, image_size): + best_ratio_diff = float('inf') + best_ratio = (1, 1) + area = width * height + for ratio in target_ratios: + target_aspect_ratio = ratio[0] / ratio[1] + ratio_diff = abs(aspect_ratio - target_aspect_ratio) + if ratio_diff < best_ratio_diff: + best_ratio_diff = ratio_diff + best_ratio = ratio + elif ratio_diff == best_ratio_diff: + if area > 0.5 * image_size * image_size * ratio[0] * ratio[1]: + best_ratio = ratio + return best_ratio + + +def find_closest_area_weighted_aspect_ratio(aspect_ratio, target_ratios, width, height, image_size): + """ + Find the best number of tiles based on the aspect ratio and the area covered by the tiles. + """ + best_factor = float('-inf') + best_ratio = (1, 1) + area = width * height + for ratio in target_ratios: + target_aspect_ratio = ratio[0] / ratio[1] + factor_based_on_area_n_ratio = ( + min((ratio[0]*ratio[1]*image_size*image_size)/ area, 0.6) * + min(target_aspect_ratio/aspect_ratio, aspect_ratio/target_aspect_ratio)) + if factor_based_on_area_n_ratio > best_factor: + best_factor = factor_based_on_area_n_ratio + best_ratio = ratio + return best_ratio + + +def get_visual_transform( + img, img_h, img_w, use_tiling=False, max_num_tiles=1, use_thumbnail=False, augment=False, + vision_model_type="clip", find_closest_aspect_ratio_fn=find_closest_aspect_ratio): + pixel_mean, pixel_std = pixel_statistics[vision_model_type] + + assert not augment, "Image augmentation not implemented." + transform = build_transform(img_h, pixel_mean, pixel_std, vision_model_type) + + if use_tiling: + assert img_h == img_w, "dynamic tiling expects equal tile height and width" + imgs = dynamic_preprocess( + img, min_num=1, max_num=max_num_tiles, image_size=img_h, use_thumbnail=use_thumbnail, + find_closest_aspect_ratio_fn=find_closest_aspect_ratio_fn) + imgs = [transform(img) for img in imgs] + else: + imgs = [transform(img)] + + return imgs + + +# From https://github.com/OpenGVLab/InternVL/blob/c62fa4f7c850165d7386bdc48ac6bc5a6fab0864/internvl_chat/internvl/train/dataset.py#L702 +# Copyright (c) 2023 OpenGVLab. +def dynamic_preprocess( + image, min_num=1, max_num=6, image_size=448, use_thumbnail=False, + find_closest_aspect_ratio_fn=find_closest_aspect_ratio): + orig_width, orig_height = image.size + aspect_ratio = orig_width / orig_height + + # calculate the existing image aspect ratio + target_ratios = set( + (i, j) for n in range(min_num, max_num + 1) for i in range(1, n + 1) for j in range(1, n + 1) if + i * j <= max_num and i * j >= min_num) + target_ratios = sorted(target_ratios, key=lambda x: x[0] * x[1]) + + # find the closest aspect ratio to the target + target_aspect_ratio = find_closest_aspect_ratio_fn( + aspect_ratio, target_ratios, orig_width, orig_height, image_size) + + # calculate the target width and height + target_width = image_size * target_aspect_ratio[0] + target_height = image_size * target_aspect_ratio[1] + blocks = target_aspect_ratio[0] * target_aspect_ratio[1] + + # resize the image + resized_img = image.resize((target_width, target_height)) + processed_images = [] + for i in range(blocks): + box = ( + (i % (target_width // image_size)) * image_size, + (i // (target_width // image_size)) * image_size, + ((i % (target_width // image_size)) + 1) * image_size, + ((i // (target_width // image_size)) + 1) * image_size + ) + # split the image + split_img = resized_img.crop(box) + processed_images.append(split_img) + assert len(processed_images) == blocks + if use_thumbnail and len(processed_images) != 1: + thumbnail_img = image.resize((image_size, image_size)) + processed_images.append(thumbnail_img) + return processed_images + + +# Based on https://github.com/openai/CLIP/blob/dcba3cb2e2827b402d2701e7e1c7d9fed8a20ef1/clip/clip.py#L79 +# and https://github.com/OpenGVLab/InternVL/blob/aa521e6eb1df4cf153aa4118fcf13e673c055d46/internvl_chat/internvl/train/dataset.py#L276 +def build_transform(input_size, pixel_mean, pixel_std, vision_model_type): + if vision_model_type in ("siglip", "internvit", "radio", "huggingface"): + transform = T.Compose([ + T.Lambda(lambda img: img.convert('RGB') if img.mode != 'RGB' else img), + T.Resize((input_size, input_size), interpolation=InterpolationMode.BICUBIC), + T.ToTensor(), + T.Normalize(mean=pixel_mean, std=pixel_std) + ]) + elif vision_model_type == "clip": + transform = Compose([ + T.Resize((input_size, input_size), interpolation=InterpolationMode.BICUBIC), + T.Lambda(lambda img: img.convert('RGB') if img.mode != 'RGB' else img), + T.ToTensor(), + T.Normalize(mean=pixel_mean, std=pixel_std), + ]) + else: + raise NotImplementedError(f"image processing not defined for vision model {vision_model_type}") + + return transform diff --git a/examples/multimodal/layer_specs.py b/examples/multimodal/layer_specs.py index 2e07dc8..0f170fa 100644 --- a/examples/multimodal/layer_specs.py +++ b/examples/multimodal/layer_specs.py @@ -1,135 +1,139 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -import torch - -from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add -from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear -from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules -from megatron.core.transformer.dot_product_attention import DotProductAttention -from megatron.core.transformer.enums import AttnMaskType -from megatron.core.transformer.identity_op import IdentityOp -from megatron.core.transformer.mlp import MLP, MLPSubmodules -from megatron.core.transformer.spec_utils import ModuleSpec -from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules - -try: - from megatron.core.extensions.transformer_engine import ( - TEColumnParallelLinear, - TEDotProductAttention, - TELayerNormColumnParallelLinear, - TENorm, - TERowParallelLinear, - ) - - HAVE_TE = True -except ImportError: - HAVE_TE = False - -try: - import apex - - from megatron.core.fusions.fused_layer_norm import FusedLayerNorm - from megatron.core.transformer.torch_norm import WrappedTorchNorm - - HAVE_APEX = True - LNImpl = FusedLayerNorm -except ImportError: - import warnings - - from megatron.core.transformer.torch_norm import WrappedTorchNorm - - warnings.warn(f'Apex is not installed. Falling back to Torch Norm') - LNImpl = WrappedTorchNorm - - -def get_layer_spec(is_vit, normalization) -> ModuleSpec: - attn_mask_type = AttnMaskType.no_mask if is_vit else AttnMaskType.causal - if normalization == "LayerNorm": - norm = LNImpl - elif normalization == "RMSNorm": - if HAVE_TE: - norm = TENorm - else: - version = torch.__version__.split('.') - version_geq_2_4 = ( - int(TORCH_VERSION[0]) > 2 - or ( - int(TORCH_VERSION[0]) == 2 - and int(TORCH_VERSION[1]) >= 4 - ) - ) - assert version_geq_2_4, "Torch version >= 2.4.0 is required for RMSNorm" - if HAVE_APEX: - warnings.warn(f'Apex does not support RMSNorm. Falling back to Torch Norm') - norm = WrappedTorchNorm - else: - raise RuntimeError("unknown normalization", normalization) - - mlp = get_mlp_module_spec(use_te=False) # doesn't include norm. - - return ModuleSpec( - module=TransformerLayer, - submodules=TransformerLayerSubmodules( - input_layernorm=norm, - self_attention=ModuleSpec( - module=SelfAttention, - params={"attn_mask_type": attn_mask_type}, - submodules=SelfAttentionSubmodules( - linear_qkv=ColumnParallelLinear, - core_attention=DotProductAttention, - linear_proj=RowParallelLinear, - q_layernorm=IdentityOp, - k_layernorm=IdentityOp, - ), - ), - self_attn_bda=get_bias_dropout_add, - pre_mlp_layernorm=norm, - mlp=mlp, - mlp_bda=get_bias_dropout_add, - ), - ) - - -def get_layer_spec_te(is_vit=False) -> ModuleSpec: - attn_mask_type = AttnMaskType.no_mask if is_vit else AttnMaskType.causal - - mlp = get_norm_mlp_module_spec_te() - return ModuleSpec( - module=TransformerLayer, - submodules=TransformerLayerSubmodules( - self_attention=ModuleSpec( - module=SelfAttention, - params={"attn_mask_type": attn_mask_type}, - submodules=SelfAttentionSubmodules( - linear_qkv=TELayerNormColumnParallelLinear, - core_attention=TEDotProductAttention, - linear_proj=TERowParallelLinear, - q_layernorm=IdentityOp, - k_layernorm=IdentityOp, - ), - ), - self_attn_bda=get_bias_dropout_add, - pre_mlp_layernorm=IdentityOp, - mlp=mlp, - mlp_bda=get_bias_dropout_add, - ), - ) - - -def get_mlp_module_spec(use_te: bool = True) -> ModuleSpec: - # Dense MLP w/ or w/o TE modules. - return ModuleSpec( - module=MLP, - submodules=MLPSubmodules( - linear_fc1=TEColumnParallelLinear if use_te else ColumnParallelLinear, - linear_fc2=TERowParallelLinear if use_te else RowParallelLinear, - ), - ) - - -def get_norm_mlp_module_spec_te() -> ModuleSpec: - return ModuleSpec( - module=MLP, - submodules=MLPSubmodules( - linear_fc1=TELayerNormColumnParallelLinear, linear_fc2=TERowParallelLinear - ), - ) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import torch + +from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add +from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear +from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules +from megatron.core.transformer.dot_product_attention import DotProductAttention +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.transformer.identity_op import IdentityOp +from megatron.core.transformer.mlp import MLP, MLPSubmodules +from megatron.core.transformer.spec_utils import ModuleSpec +from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules + +try: + from megatron.core.extensions.transformer_engine import ( + TEColumnParallelLinear, + TEDotProductAttention, + TELayerNormColumnParallelLinear, + TENorm, + TERowParallelLinear, + ) + + HAVE_TE = True +except ImportError: + HAVE_TE = False + +try: + import apex + + from megatron.core.fusions.fused_layer_norm import FusedLayerNorm + from megatron.core.transformer.torch_norm import WrappedTorchNorm + + HAVE_APEX = True + LNImpl = FusedLayerNorm +except ImportError: + import warnings + + from megatron.core.transformer.torch_norm import WrappedTorchNorm + + warnings.warn(f'Apex is not installed. Falling back to Torch Norm') + LNImpl = WrappedTorchNorm + + +def get_layer_spec(is_vit, normalization) -> ModuleSpec: + attn_mask_type = AttnMaskType.no_mask if is_vit else AttnMaskType.causal + if normalization == "LayerNorm": + norm = LNImpl + elif normalization == "RMSNorm": + if HAVE_TE: + norm = TENorm + else: + version = torch.__version__.split('.') + version_geq_2_4 = ( + int(TORCH_VERSION[0]) > 2 + or ( + int(TORCH_VERSION[0]) == 2 + and int(TORCH_VERSION[1]) >= 4 + ) + ) + assert version_geq_2_4, "Torch version >= 2.4.0 is required for RMSNorm" + if HAVE_APEX: + warnings.warn(f'Apex does not support RMSNorm. Falling back to Torch Norm') + norm = WrappedTorchNorm + else: + raise RuntimeError("unknown normalization", normalization) + + mlp = get_mlp_module_spec(use_te=False) # doesn't include norm. + + return ModuleSpec( + module=TransformerLayer, + submodules=TransformerLayerSubmodules( + input_layernorm=norm, + self_attention=ModuleSpec( + module=SelfAttention, + params={"attn_mask_type": attn_mask_type}, + submodules=SelfAttentionSubmodules( + linear_qkv=ColumnParallelLinear, + core_attention=DotProductAttention, + linear_proj=RowParallelLinear, + q_layernorm=IdentityOp, + k_layernorm=IdentityOp, + ), + ), + self_attn_bda=get_bias_dropout_add, + pre_mlp_layernorm=norm, + mlp=mlp, + mlp_bda=get_bias_dropout_add, + ), + ) + + +def get_layer_spec_te(is_vit=False, padding=False) -> ModuleSpec: + attn_mask_type = AttnMaskType.no_mask if is_vit else AttnMaskType.causal + # Padding mask is needed for e.g. Context Parallel. + if padding: + assert not is_vit, "padding_causal mask not used with ViT" + attn_mask_type = AttnMaskType.padding_causal + + mlp = get_norm_mlp_module_spec_te() + return ModuleSpec( + module=TransformerLayer, + submodules=TransformerLayerSubmodules( + self_attention=ModuleSpec( + module=SelfAttention, + params={"attn_mask_type": attn_mask_type}, + submodules=SelfAttentionSubmodules( + linear_qkv=TELayerNormColumnParallelLinear, + core_attention=TEDotProductAttention, + linear_proj=TERowParallelLinear, + q_layernorm=IdentityOp, + k_layernorm=IdentityOp, + ), + ), + self_attn_bda=get_bias_dropout_add, + pre_mlp_layernorm=IdentityOp, + mlp=mlp, + mlp_bda=get_bias_dropout_add, + ), + ) + + +def get_mlp_module_spec(use_te: bool = True) -> ModuleSpec: + # Dense MLP w/ or w/o TE modules. + return ModuleSpec( + module=MLP, + submodules=MLPSubmodules( + linear_fc1=TEColumnParallelLinear if use_te else ColumnParallelLinear, + linear_fc2=TERowParallelLinear if use_te else RowParallelLinear, + ), + ) + + +def get_norm_mlp_module_spec_te() -> ModuleSpec: + return ModuleSpec( + module=MLP, + submodules=MLPSubmodules( + linear_fc1=TELayerNormColumnParallelLinear, linear_fc2=TERowParallelLinear + ), + ) diff --git a/examples/multimodal/model.py b/examples/multimodal/model.py index a28a428..feca9a9 100644 --- a/examples/multimodal/model.py +++ b/examples/multimodal/model.py @@ -1,216 +1,254 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -import warnings -from copy import deepcopy - -import torch -from config import get_language_model_config, get_vision_model_config, get_vision_projection_config -from layer_specs import get_layer_spec, get_layer_spec_te, get_mlp_module_spec, get_norm_mlp_module_spec_te - -from megatron.core.models.multimodal.llava_model import IMAGE_TOKEN, LLaVAModel -from megatron.core.models.vision.clip_vit_model import get_num_image_embeddings -from megatron.training import get_args, get_tokenizer, print_rank_0 -from megatron.training.arguments import core_transformer_config_from_args - - -def model_provider( - pre_process=True, post_process=True, add_encoder=True, add_decoder=True, parallel_output=True -) -> LLaVAModel: - """Builds the model. - - Args: - pre_process (bool): Include the embedding layer in the gpt decoder (used with pipeline parallelism). Defaults to True. - post_process (bool): Include an output layer and a layernorm in the gpt decoder (used with pipeline parallelism). Defaults to True. - add_encoder (bool): Construct the encoder module (used with pipeline parallelism). Defaults to True. When we use pipelining, the encoder - will live on only a subset of the pipeline stages (specifically, only the first stage). - add_decoder (bool): Construct the decoder module (used with pipeline parallelism). Defaults to True. When we use pipelining, the decoder - will live on only a subset of the pipeline stages (specifically, every stage after the first one). - parallel_output (bool): Enable parallel model output. - - Returns: - model: A multimodal model. - """ - args = get_args() - assert args.ckpt_format == 'torch', "Only ckpt-format torch is supported for VLM training currently." - assert args.encoder_pipeline_model_parallel_size <= 1, "LLaVA does not support pp>1 for encoder on it's own pipeline rank" - - use_te = args.use_te - - print_rank_0('building a multimodal model ...') - - num_image_embeddings = get_num_image_embeddings( - args.img_h, - args.img_w, - args.patch_dim, - args.vision_model_type, - args.disable_vision_class_token, - 1, - args.pixel_shuffle, - args.use_tile_tags, - ) - old_seq_length = args.seq_length - args.seq_length = args.encoder_seq_length = num_image_embeddings - if torch.distributed.get_rank() == 0 and old_seq_length != args.seq_length: - warnings.warn( - f"Changed seq_length and encoder_seq_length (vision model sequence length) from {old_seq_length} to num_image_tokens ({num_image_embeddings})" - ) - - max_num_image_embeddings = (args.max_num_tiles + int(args.use_thumbnail)) * num_image_embeddings - - assert ( - args.decoder_seq_length is not None - ), "Please provide --decoder-seq-length to set the language model sequence length" - assert ( - args.decoder_seq_length > max_num_image_embeddings - ), "Language model sequence length must be greater than the maximum number of image embeddings" - if args.decoder_seq_length > args.max_position_embeddings: - args.max_position_embeddings = args.decoder_seq_length - warnings.warn( - f"Expanded max_position_embeddings to {args.max_position_embeddings} to accommodate the maximum language model sequence length" - ) - - base_config = core_transformer_config_from_args(get_args()) - base_config.language_model_type = args.language_model_type - base_config.vision_model_type = args.vision_model_type - base_config.calculate_per_token_loss = True - - language_config = deepcopy(base_config) - language_config = get_language_model_config(language_config) - - if use_te: - language_transformer_layer_spec = get_layer_spec_te( - is_vit=False - ) # TENorm detects LayerNorm/RMS automatically. - else: - language_transformer_layer_spec = get_layer_spec( - is_vit=False, normalization=language_config.normalization - ) - - vision_config = deepcopy(base_config) - vision_config = get_vision_model_config( - vision_config, apply_query_key_layer_scaling=args.apply_query_key_layer_scaling - ) - - vision_model_type = args.vision_model_type - if vision_model_type in ["clip", "siglip"]: - if use_te: - vision_transformer_layer_spec = get_layer_spec_te( - is_vit=True - ) # TENorm detects LayerNorm/RMS automatically. - else: - vision_transformer_layer_spec = get_layer_spec( - is_vit=True, normalization=vision_config.normalization - ) - elif vision_model_type == "internvit": - from nvlm.internvit import get_internvit_layer_spec - vision_transformer_layer_spec = get_internvit_layer_spec(use_te=use_te) - else: - raise RuntimeError("unsupported vision model type", vision_model_type) - - vision_projection_config = deepcopy(base_config) - vision_projection_config = get_vision_projection_config( - vision_projection_config, language_config.hidden_size - ) - - # --encoder-pipeline-model-parallel-size 1 will enable a separate pipeline stage for the vision model. - if args.encoder_pipeline_model_parallel_size > 0: - assert ( - args.encoder_pipeline_model_parallel_size == 1 - ), "vision model and projection can only live on 1 pipeline stage." - - if args.encoder_tensor_model_parallel_size > 0: - vision_config.tensor_model_parallel_size = args.encoder_tensor_model_parallel_size - vision_projection_config.tensor_model_parallel_size = ( - args.encoder_tensor_model_parallel_size - ) - - # Make sure vision model pipeline parallel size is not inherited from the language model pipeline parallel size. - # 0 is not a valid for the config value, hence max(1, ). - vision_config.pipeline_model_parallel_size = max(1, args.encoder_pipeline_model_parallel_size) - vision_projection_config.pipeline_model_parallel_size = vision_config.pipeline_model_parallel_size - - # Make sure the vision model does not inherit first and last pipeline num layers from the language model. - vision_config.first_pipeline_num_layers = vision_config.last_pipeline_num_layers = None - - if vision_projection_config.normalization: - vision_projection_layer_spec = get_norm_mlp_module_spec_te().submodules - else: - vision_projection_layer_spec = get_mlp_module_spec(use_te=use_te).submodules - - # Toggle --recompute* for the vision and language model separately. - if args.recompute_vision: - if vision_config.recompute_method is not None and vision_config.recompute_granularity is not None: - vision_config.recompute_num_layers = vision_config.num_layers - else: - vision_config.recompute_granularity = None - vision_config.recompute_method = None - vision_config.recompute_num_layers = None - - vision_projection_config.recompute_granularity = None - vision_projection_config.recompute_method = None - vision_projection_config.recompute_num_layers = None - - - tokenizer = get_tokenizer() - image_token_index = tokenizer.convert_tokens_to_ids(IMAGE_TOKEN) - - tile_tags = _get_tile_tags(args, tokenizer) - - model = LLaVAModel( - language_transformer_config=language_config, - language_transformer_layer_spec=language_transformer_layer_spec, - language_vocab_size=args.padded_vocab_size, - language_max_sequence_length=args.decoder_seq_length, - vision_transformer_config=vision_config, - vision_transformer_layer_spec=vision_transformer_layer_spec, - drop_vision_class_token=args.disable_vision_class_token, - vision_projection_config=vision_projection_config, - vision_projection_layer_spec=vision_projection_layer_spec, - vision_projection_type="mlp", - allow_missing_vision_projection_checkpoint=args.allow_missing_vision_projection_checkpoint, - parallel_output=parallel_output, - language_position_embedding_type=args.position_embedding_type, - language_rotary_percent=args.rotary_percent, - pre_process=pre_process, - post_process=post_process, - add_encoder=add_encoder, - add_decoder=add_decoder, - img_h=args.img_h, - img_w=args.img_w, - patch_dim=args.patch_dim, - language_rotary_base=args.rotary_base, - language_rope_scaling=args.use_rope_scaling, - image_token_index=image_token_index, - pixel_shuffle=args.pixel_shuffle, - tile_tags=tile_tags, - ) - - model.freeze( - freeze_language_model=args.freeze_LM, - freeze_vision_model=args.freeze_ViT, - freeze_vision_projection=False, - ) - - return model - - -def _get_tile_tags(args, tokenizer): - """Tile tags are used in NVLM to surround image tiles with text tags.""" - if not args.use_tile_tags: - return None - - # We expect the tokenized length of the tags is same. - thumbnail_tag_text = "" - if args.tokenizer_prompt_format == "nvlm-yi-34b": - thumbnail_tag_text = "" - - assert args.max_num_tiles <= 6, "Up to 6 tile tags used" - tile_tags_text = [f"" for i in range(1, args.max_num_tiles + 1)] + [thumbnail_tag_text] - - start_idx = 0 - if tokenizer._prompt_config.has_bos: - start_idx = 1 - - # Convert to tokens [num_tiles, tile_seq_len]. - tile_tags = [tokenizer.tokenize(t)[start_idx:] for t in tile_tags_text] - - return tile_tags +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import warnings +from copy import deepcopy + +import torch +from config import get_language_model_config, get_vision_model_config, get_vision_projection_config +from layer_specs import get_layer_spec, get_layer_spec_te, get_mlp_module_spec, get_norm_mlp_module_spec_te + +from megatron.core.models.multimodal.llava_model import IMAGE_TOKEN, LLaVAModel +from megatron.core.models.vision.clip_vit_model import get_num_image_embeddings +from megatron.training import get_args, get_tokenizer, print_rank_0 +from megatron.training.arguments import core_transformer_config_from_args + + +def model_provider( + pre_process=True, post_process=True, add_encoder=True, add_decoder=True, parallel_output=True +) -> LLaVAModel: + """Builds the model. + + Args: + pre_process (bool): Include the embedding layer in the gpt decoder (used with pipeline parallelism). Defaults to True. + post_process (bool): Include an output layer and a layernorm in the gpt decoder (used with pipeline parallelism). Defaults to True. + add_encoder (bool): Construct the encoder module (used with pipeline parallelism). Defaults to True. When we use pipelining, the encoder + will live on only a subset of the pipeline stages (specifically, only the first stage). + add_decoder (bool): Construct the decoder module (used with pipeline parallelism). Defaults to True. When we use pipelining, the decoder + will live on only a subset of the pipeline stages (specifically, every stage after the first one). + parallel_output (bool): Enable parallel model output. + + Returns: + model: A multimodal model. + """ + args = get_args() + assert args.encoder_pipeline_model_parallel_size <= 1, "LLaVA does not support pp>1 for encoder on it's own pipeline rank" + + use_te = args.use_te + + print_rank_0('building a multimodal model ...') + + num_image_embeddings = get_num_image_embeddings( + args.img_h, + args.img_w, + args.patch_dim, + args.vision_model_type, + args.disable_vision_class_token, + 1, + args.pixel_shuffle, + args.use_tile_tags, + ) + old_seq_length = args.seq_length + args.seq_length = args.encoder_seq_length = num_image_embeddings + if torch.distributed.get_rank() == 0 and old_seq_length != args.seq_length: + warnings.warn( + f"Changed seq_length and encoder_seq_length (vision model sequence length) from {old_seq_length} to num_image_tokens ({num_image_embeddings})" + ) + + max_num_image_embeddings = (args.max_num_tiles + int(args.use_thumbnail)) * num_image_embeddings + + assert ( + args.decoder_seq_length is not None + ), "Please provide --decoder-seq-length to set the language model sequence length" + assert ( + args.decoder_seq_length > max_num_image_embeddings + ), "Language model sequence length must be greater than the maximum number of image embeddings" + if args.decoder_seq_length > args.max_position_embeddings: + args.max_position_embeddings = args.decoder_seq_length + warnings.warn( + f"Expanded max_position_embeddings to {args.max_position_embeddings} to accommodate the maximum language model sequence length" + ) + + base_config = core_transformer_config_from_args(get_args()) + base_config.language_model_type = args.language_model_type + base_config.vision_model_type = args.vision_model_type + base_config.calculate_per_token_loss = True + + language_config = deepcopy(base_config) + language_config = get_language_model_config(language_config) + + if use_te: + # Padding mask needed for SP/CP. + padding = args.context_parallel_size > 1 and args.sequence_parallel + language_transformer_layer_spec = get_layer_spec_te( + is_vit=False, padding=padding + ) # TENorm detects LayerNorm/RMS automatically. + else: + language_transformer_layer_spec = get_layer_spec( + is_vit=False, normalization=language_config.normalization + ) + + vision_model_type = args.vision_model_type + vision_config = deepcopy(base_config) + vision_config = get_vision_model_config( + vision_config, apply_query_key_layer_scaling=args.apply_query_key_layer_scaling + ) + if vision_model_type.startswith("huggingface"): + assert args.encoder_tensor_model_parallel_size < 2, "Huggingface vision encoders do not support --encoder-tensor-model-parallel-size > 1" + assert args.encoder_pipeline_model_parallel_size == 0, "Huggingface vision encoders do not support --encoder-pipeline-model-parallel-size > 0" + assert not args.sequence_parallel, "Huggingface models do not support --sequence-parallel" + assert args.context_parallel_size < 2, "Huggingface models do not support --context-parallel-size > 1" + assert args.vision_huggingface_model_name_or_path is not None, "Providing --vision-huggingface-model-name-or-path is necessary when using huggingface vision model" + + vision_config.huggingface_model_name_or_path = args.vision_huggingface_model_name_or_path + + from transformers import AutoConfig + huggingface_config = AutoConfig.from_pretrained(vision_config.huggingface_model_name_or_path) + vision_config.hidden_size = huggingface_config.hidden_size + + vision_model_type = args.vision_model_type + if vision_model_type in ["clip", "siglip", "radio"]: + if use_te: + vision_transformer_layer_spec = get_layer_spec_te( + is_vit=True + ) # TENorm detects LayerNorm/RMS automatically. + else: + vision_transformer_layer_spec = get_layer_spec( + is_vit=True, normalization=vision_config.normalization + ) + elif vision_model_type == "internvit": + from nvlm.internvit import get_internvit_layer_spec + vision_transformer_layer_spec = get_internvit_layer_spec(use_te=use_te) + elif vision_model_type.startswith("huggingface"): + vision_transformer_layer_spec = None + else: + raise RuntimeError("unsupported vision model type", vision_model_type) + + vision_projection_config = deepcopy(base_config) + + if base_config.language_model_type.startswith("huggingface"): + assert args.tensor_model_parallel_size == 1, "Huggingface models do not support --tensor-model-parallel-size > 1" + assert args.pipeline_model_parallel_size < 2, "Huggingface models do not support --pipeline-model-parallel-size > 1" + assert not args.sequence_parallel, "Huggingface models do not support --sequence-parallel" + assert args.context_parallel_size < 2, "Huggingface models do not support --context-parallel-size > 1" + assert args.language_huggingface_model_name_or_path is not None, "Providing --language-huggingface-model-name-or-path is necessary when using huggingface language model" + + language_config.huggingface_model_name_or_path = args.language_huggingface_model_name_or_path + # Pass to vision projection config so can choose the correct ffn hidden size + vision_projection_config.huggingface_model_name_or_path = args.language_huggingface_model_name_or_path + + vision_projection_config = get_vision_projection_config( + vision_projection_config, language_config.hidden_size + ) + + # --encoder-pipeline-model-parallel-size 1 will enable a separate pipeline stage for the vision model. + if args.encoder_pipeline_model_parallel_size > 0: + assert ( + args.encoder_pipeline_model_parallel_size == 1 + ), "vision model and projection can only live on 1 pipeline stage." + + if args.encoder_tensor_model_parallel_size > 0: + vision_config.tensor_model_parallel_size = args.encoder_tensor_model_parallel_size + vision_projection_config.tensor_model_parallel_size = ( + args.encoder_tensor_model_parallel_size + ) + + # Make sure vision model pipeline parallel size is not inherited from the language model pipeline parallel size. + # 0 is not a valid for the config value, hence max(1, ). + vision_config.pipeline_model_parallel_size = max(1, args.encoder_pipeline_model_parallel_size) + vision_projection_config.pipeline_model_parallel_size = vision_config.pipeline_model_parallel_size + + # Make sure the vision model does not inherit first and last pipeline num layers from the language model. + vision_config.first_pipeline_num_layers = vision_config.last_pipeline_num_layers = None + + if vision_projection_config.normalization: + vision_projection_layer_spec = get_norm_mlp_module_spec_te().submodules + else: + vision_projection_layer_spec = get_mlp_module_spec(use_te=use_te).submodules + + # Toggle --recompute* for the vision and language model separately. + if args.recompute_vision: + if vision_config.recompute_method is not None and vision_config.recompute_granularity is not None: + vision_config.recompute_num_layers = vision_config.num_layers + else: + vision_config.recompute_granularity = None + vision_config.recompute_method = None + vision_config.recompute_num_layers = None + + vision_projection_config.recompute_granularity = None + vision_projection_config.recompute_method = None + vision_projection_config.recompute_num_layers = None + + # TODO: Vision model and projection do not use SP/CP yet. + vision_config.sequence_parallel = False + vision_config.context_parallel_size = 1 + vision_config.tp_comm_overlap = False + + vision_projection_config.sequence_parallel = False + vision_projection_config.context_parallel_size = 1 + vision_projection_config.tp_comm_overlap = False + + tokenizer = get_tokenizer() + image_token_index = tokenizer.convert_tokens_to_ids(IMAGE_TOKEN) + assert image_token_index is not None, f"IMAGE_TOKEN={IMAGE_TOKEN} needs to be added using the --special-tokens arg." + + tile_tags = _get_tile_tags(args, tokenizer) + + model = LLaVAModel( + language_transformer_config=language_config, + language_transformer_layer_spec=language_transformer_layer_spec, + language_vocab_size=args.padded_vocab_size, + language_max_sequence_length=args.decoder_seq_length, + vision_transformer_config=vision_config, + vision_transformer_layer_spec=vision_transformer_layer_spec, + drop_vision_class_token=args.disable_vision_class_token, + vision_projection_config=vision_projection_config, + vision_projection_layer_spec=vision_projection_layer_spec, + vision_projection_type="mlp", + allow_missing_vision_projection_checkpoint=args.allow_missing_vision_projection_checkpoint, + parallel_output=parallel_output, + share_embeddings_and_output_weights=not args.untie_embeddings_and_output_weights, + language_position_embedding_type=args.position_embedding_type, + language_rotary_percent=args.rotary_percent, + pre_process=pre_process, + post_process=post_process, + add_encoder=add_encoder, + add_decoder=add_decoder, + img_h=args.img_h, + img_w=args.img_w, + patch_dim=args.patch_dim, + language_rotary_base=args.rotary_base, + language_rope_scaling=args.use_rope_scaling, + image_token_index=image_token_index, + pixel_shuffle=args.pixel_shuffle, + tile_tags=tile_tags, + ) + + model.freeze( + freeze_language_model=args.freeze_LM, + freeze_vision_model=args.freeze_ViT, + freeze_vision_projection=False, + ) + + return model + + +def _get_tile_tags(args, tokenizer): + """Tile tags are used in NVLM to surround image tiles with text tags.""" + if not args.use_tile_tags: + return None + + # We expect the tokenized length of the tags is same. + thumbnail_tag_text = "" + if args.tokenizer_prompt_format == "nvlm-yi-34b": + thumbnail_tag_text = "" + + assert args.max_num_tiles <= 6, "Up to 6 tile tags used" + tile_tags_text = [f"" for i in range(1, args.max_num_tiles + 1)] + [thumbnail_tag_text] + + start_idx = 0 + if tokenizer._prompt_config.has_bos: + start_idx = 1 + + # Convert to tokens [num_tiles, tile_seq_len]. + tile_tags = [tokenizer.tokenize(t)[start_idx:] for t in tile_tags_text] + + return tile_tags diff --git a/examples/multimodal/model_converter/internvit_converter.py b/examples/multimodal/model_converter/internvit_converter.py index 48404c2..544e260 100644 --- a/examples/multimodal/model_converter/internvit_converter.py +++ b/examples/multimodal/model_converter/internvit_converter.py @@ -1,162 +1,162 @@ -import argparse -import os - -import torch -from transformers import AutoModel - - -def convert(model_name, output_path, tensor_parallel_size, use_te): - """Convert InternViT HF checkpoint to mcore.""" - hf_model = AutoModel.from_pretrained( - model_name, - trust_remote_code=True - ) - - hf_state_dict = hf_model.state_dict() - new_state_dicts = [{"model": dict()} for _ in range(tensor_parallel_size)] - - hidden_size = 3200 - num_heads = 25 - dim = 128 - - order = torch.ones(3 * hidden_size).long() - - for j in range(num_heads): - for i in range(dim): - order[i + dim*3*j] = j*dim+i - order[dim + i + dim*3*j] = j*dim+i+num_heads*dim - order[dim*2 + i + dim*3*j] = j*dim+i+num_heads*dim*2 - - for name, tensor in hf_state_dict.items(): - # Map parameter names to ones used in megatron. - new_name = "" - new_tensor = tensor - - # This is used for chunking some tensors to target tensor parallel size. - chunk_dim = None - - if "embeddings.class_embedding" in name: - new_name = "class_token" - elif "embeddings.patch_embedding.weight" in name: - new_name = "conv1.weight" - elif "embeddings.patch_embedding.bias" in name: - new_name = "conv1.bias" - elif "embeddings.position_embedding" in name: - new_name = "position_embeddings.weight" - new_tensor = new_tensor.squeeze(0) - elif "encoder.layers" in name: - layer_idx = name.split(".")[2] - - base = f"decoder.layers.{layer_idx}" - - head_dim = 128 - - if tensor_parallel_size == 1: - num_padded_heads = 25 - elif tensor_parallel_size == 8: - # Note: 25 is not divisible by 8 and we don't currently support uneven heads split with tensor parallelism. - # So we pad with dummy all-zero heads. Please use a nice even number of attention heads in your model. - num_padded_heads = 32 - else: - raise NotImplementedError("invalid tensor parallel size value:", tensor_parallel_size) - - if "ls1" in name: - new_name = f"{base}.ls1" - elif "ls2" in name: - new_name = f"{base}.ls2" - elif "attn.qkv.weight" in name: - new_name = f"{base}.self_attention.linear_qkv.weight" - num_tensors = 3 - padded_dim = head_dim * num_padded_heads * num_tensors - padded_tensor = torch.zeros((padded_dim, new_tensor.shape[-1]), dtype=new_tensor.dtype, device=new_tensor.device) - padded_tensor[:new_tensor.shape[0], :] = new_tensor[order] - new_tensor = padded_tensor - chunk_dim = 0 - elif "attn.q_norm.weight" in name: - new_name = f"{base}.self_attention.q_layernorm.weight" - num_tensors = 1 - padded_dim = head_dim * num_padded_heads * num_tensors - padded_tensor = torch.zeros(padded_dim, dtype=new_tensor.dtype, device=new_tensor.device) - padded_tensor[:new_tensor.shape[0]] = new_tensor - new_tensor = padded_tensor - chunk_dim = 0 - elif "attn.k_norm.weight" in name: - new_name = f"{base}.self_attention.k_layernorm.weight" - num_tensors = 1 - padded_dim = head_dim * num_padded_heads * num_tensors - padded_tensor = torch.zeros(padded_dim, dtype=new_tensor.dtype, device=new_tensor.device) - padded_tensor[:new_tensor.shape[0]] = new_tensor - new_tensor = padded_tensor - chunk_dim = 0 - elif "attn.proj.weight" in name: - new_name = f"{base}.self_attention.linear_proj.weight" - num_tensors = 1 - padded_dim = head_dim * num_padded_heads * num_tensors - padded_tensor = torch.zeros((new_tensor.shape[0], padded_dim), dtype=new_tensor.dtype, device=new_tensor.device) - padded_tensor[:, :new_tensor.shape[-1]] = new_tensor - new_tensor = padded_tensor - chunk_dim = 1 - elif "attn.proj.bias" in name: - new_name = f"{base}.self_attention.linear_proj.bias" - elif "mlp.fc1.weight" in name: - new_name = f"{base}.mlp.linear_fc1.weight" - chunk_dim = 0 - elif "mlp.fc1.bias" in name: - new_name = f"{base}.mlp.linear_fc1.bias" - chunk_dim = 0 - elif "mlp.fc2.weight" in name: - new_name = f"{base}.mlp.linear_fc2.weight" - chunk_dim = 1 - elif "mlp.fc2.bias" in name: - new_name = f"{base}.mlp.linear_fc2.bias" - elif "norm1" in name: - new_name = f"{base}.input_layernorm.weight" - elif "norm2" in name: - new_name = f"{base}.pre_mlp_layernorm.weight" - else: - raise RuntimeError("unexpected transformer layer name", name) - else: - raise RuntimeError("unexpected layer name", name) - - assert new_name != "", f"unexpected layer name {name}" - - # TE sets _extra_state (for FP8 purposes), so set an empty one here for compatibility. - extra_state_layers = ("linear_qkv", "linear_proj", "linear_fc1", "linear_fc2") - is_extra_state_layer = any([l in new_name for l in extra_state_layers]) - if use_te and is_extra_state_layer: - layer = new_name.split(".")[-2] - if layer in extra_state_layers: - extra_state_name = ( - new_name[: new_name.rfind(".") + 1] + "_extra_state" - ) # Replace the weight name. - for i in range(tensor_parallel_size): - new_state_dicts[i]["model"][extra_state_name] = None - - if chunk_dim is None: - new_tensors = [new_tensor for _ in range(tensor_parallel_size)] - else: - new_tensors = torch.chunk(new_tensor, tensor_parallel_size, dim=chunk_dim) - - for i in range(tensor_parallel_size): - new_state_dicts[i]["model"][new_name] = new_tensors[i].clone() - - for i in range(tensor_parallel_size): - output_dir_tp = os.path.join(output_path, f"iter_0000001/mp_rank_0{i}") - os.makedirs(output_dir_tp, exist_ok=True) - output_path_tp = os.path.join(output_dir_tp, "model_optim_rng.pt") - torch.save(new_state_dicts[i], output_path_tp) - print("saved file", output_path_tp) - - print("done") - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="InternVIT HuggingFace to Mcore converter") - parser.add_argument("--model-name", type=str, default="OpenGVLab/InternViT-6B-448px-V1-5", help="Model name in HuggingFace") - parser.add_argument("--output-dir", type=str, required=True, help="Output directory for the mcore model.") - parser.add_argument("--use-te", action="store_true", default=True) - parser.add_argument("--tensor-parallel-size", type=int, required=True) - - args = parser.parse_args() - - convert(args.model_name, args.output_dir, args.tensor_parallel_size, args.use_te) +import argparse +import os + +import torch +from transformers import AutoModel + + +def convert(model_name, output_path, tensor_parallel_size, use_te): + """Convert InternViT HF checkpoint to mcore.""" + hf_model = AutoModel.from_pretrained( + model_name, + trust_remote_code=True + ) + + hf_state_dict = hf_model.state_dict() + new_state_dicts = [{"model": dict()} for _ in range(tensor_parallel_size)] + + hidden_size = 3200 + num_heads = 25 + dim = 128 + + order = torch.ones(3 * hidden_size).long() + + for j in range(num_heads): + for i in range(dim): + order[i + dim*3*j] = j*dim+i + order[dim + i + dim*3*j] = j*dim+i+num_heads*dim + order[dim*2 + i + dim*3*j] = j*dim+i+num_heads*dim*2 + + for name, tensor in hf_state_dict.items(): + # Map parameter names to ones used in megatron. + new_name = "" + new_tensor = tensor + + # This is used for chunking some tensors to target tensor parallel size. + chunk_dim = None + + if "embeddings.class_embedding" in name: + new_name = "class_token" + elif "embeddings.patch_embedding.weight" in name: + new_name = "conv1.weight" + elif "embeddings.patch_embedding.bias" in name: + new_name = "conv1.bias" + elif "embeddings.position_embedding" in name: + new_name = "position_embeddings.weight" + new_tensor = new_tensor.squeeze(0) + elif "encoder.layers" in name: + layer_idx = name.split(".")[2] + + base = f"decoder.layers.{layer_idx}" + + head_dim = 128 + + if tensor_parallel_size == 1: + num_padded_heads = 25 + elif tensor_parallel_size == 8: + # Note: 25 is not divisible by 8 and we don't currently support uneven heads split with tensor parallelism. + # So we pad with dummy all-zero heads. Please use a nice even number of attention heads in your model. + num_padded_heads = 32 + else: + raise NotImplementedError("invalid tensor parallel size value:", tensor_parallel_size) + + if "ls1" in name: + new_name = f"{base}.ls1" + elif "ls2" in name: + new_name = f"{base}.ls2" + elif "attn.qkv.weight" in name: + new_name = f"{base}.self_attention.linear_qkv.weight" + num_tensors = 3 + padded_dim = head_dim * num_padded_heads * num_tensors + padded_tensor = torch.zeros((padded_dim, new_tensor.shape[-1]), dtype=new_tensor.dtype, device=new_tensor.device) + padded_tensor[:new_tensor.shape[0], :] = new_tensor[order] + new_tensor = padded_tensor + chunk_dim = 0 + elif "attn.q_norm.weight" in name: + new_name = f"{base}.self_attention.q_layernorm.weight" + num_tensors = 1 + padded_dim = head_dim * num_padded_heads * num_tensors + padded_tensor = torch.zeros(padded_dim, dtype=new_tensor.dtype, device=new_tensor.device) + padded_tensor[:new_tensor.shape[0]] = new_tensor + new_tensor = padded_tensor + chunk_dim = 0 + elif "attn.k_norm.weight" in name: + new_name = f"{base}.self_attention.k_layernorm.weight" + num_tensors = 1 + padded_dim = head_dim * num_padded_heads * num_tensors + padded_tensor = torch.zeros(padded_dim, dtype=new_tensor.dtype, device=new_tensor.device) + padded_tensor[:new_tensor.shape[0]] = new_tensor + new_tensor = padded_tensor + chunk_dim = 0 + elif "attn.proj.weight" in name: + new_name = f"{base}.self_attention.linear_proj.weight" + num_tensors = 1 + padded_dim = head_dim * num_padded_heads * num_tensors + padded_tensor = torch.zeros((new_tensor.shape[0], padded_dim), dtype=new_tensor.dtype, device=new_tensor.device) + padded_tensor[:, :new_tensor.shape[-1]] = new_tensor + new_tensor = padded_tensor + chunk_dim = 1 + elif "attn.proj.bias" in name: + new_name = f"{base}.self_attention.linear_proj.bias" + elif "mlp.fc1.weight" in name: + new_name = f"{base}.mlp.linear_fc1.weight" + chunk_dim = 0 + elif "mlp.fc1.bias" in name: + new_name = f"{base}.mlp.linear_fc1.bias" + chunk_dim = 0 + elif "mlp.fc2.weight" in name: + new_name = f"{base}.mlp.linear_fc2.weight" + chunk_dim = 1 + elif "mlp.fc2.bias" in name: + new_name = f"{base}.mlp.linear_fc2.bias" + elif "norm1" in name: + new_name = f"{base}.input_layernorm.weight" + elif "norm2" in name: + new_name = f"{base}.pre_mlp_layernorm.weight" + else: + raise RuntimeError("unexpected transformer layer name", name) + else: + raise RuntimeError("unexpected layer name", name) + + assert new_name != "", f"unexpected layer name {name}" + + # TE sets _extra_state (for FP8 purposes), so set an empty one here for compatibility. + extra_state_layers = ("linear_qkv", "linear_proj", "linear_fc1", "linear_fc2") + is_extra_state_layer = any([l in new_name for l in extra_state_layers]) + if use_te and is_extra_state_layer: + layer = new_name.split(".")[-2] + if layer in extra_state_layers: + extra_state_name = ( + new_name[: new_name.rfind(".") + 1] + "_extra_state" + ) # Replace the weight name. + for i in range(tensor_parallel_size): + new_state_dicts[i]["model"][extra_state_name] = None + + if chunk_dim is None: + new_tensors = [new_tensor for _ in range(tensor_parallel_size)] + else: + new_tensors = torch.chunk(new_tensor, tensor_parallel_size, dim=chunk_dim) + + for i in range(tensor_parallel_size): + new_state_dicts[i]["model"][new_name] = new_tensors[i].clone() + + for i in range(tensor_parallel_size): + output_dir_tp = os.path.join(output_path, f"iter_0000001/mp_rank_0{i}") + os.makedirs(output_dir_tp, exist_ok=True) + output_path_tp = os.path.join(output_dir_tp, "model_optim_rng.pt") + torch.save(new_state_dicts[i], output_path_tp) + print("saved file", output_path_tp) + + print("done") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="InternVIT HuggingFace to Mcore converter") + parser.add_argument("--model-name", type=str, default="OpenGVLab/InternViT-6B-448px-V1-5", help="Model name in HuggingFace") + parser.add_argument("--output-dir", type=str, required=True, help="Output directory for the mcore model.") + parser.add_argument("--use-te", action="store_true", default=True) + parser.add_argument("--tensor-parallel-size", type=int, required=True) + + args = parser.parse_args() + + convert(args.model_name, args.output_dir, args.tensor_parallel_size, args.use_te) diff --git a/examples/multimodal/model_converter/radio_converter.py b/examples/multimodal/model_converter/radio_converter.py new file mode 100644 index 0000000..e681e3d --- /dev/null +++ b/examples/multimodal/model_converter/radio_converter.py @@ -0,0 +1,152 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import argparse +import os + +import torch + +def convert(output_path, tensor_parallel_size, use_te, version): + device = "cuda" + + model = torch.hub.load('NVlabs/RADIO', 'radio_model', version=version, progress=True) + + state_dict = model.state_dict() + new_state_dicts = [{"model": dict()} for _ in range(tensor_parallel_size)] + + # Indices from mapping pytorch multihead attention to megatron. + kv_channels = 80 + hidden_dim = 1280 + num_heads = 16 + indices = [] + for i in range(num_heads): + lb = i * kv_channels + ub = (i + 1) * kv_channels + indices.append(torch.arange(lb, ub, dtype=torch.int)) + indices.append(torch.arange(hidden_dim + lb, hidden_dim + ub, dtype=torch.int)) + indices.append(torch.arange(2 * hidden_dim + lb, 2 * hidden_dim + ub, dtype=torch.int)) + + indices = torch.cat(indices) + + for name, tensor in state_dict.items(): + # Map parameter names to ones used in megatron. + new_name = "" + new_tensor = tensor + if new_tensor.dtype == torch.float16: + new_tensor = new_tensor.to(torch.float32) + + # This is used for chunking some tensors to target tensor parallel size. + chunk_dim = None + + if "summary_idxs" in name: + continue + elif "patch_generator" in name: + if "embedder" in name: + new_name = "embedder.weight" + chunk_dim = 0 + elif "cls_token" in name: + new_name = "class_token" + elif "pos_embed" in name: + new_name = "position_embeddings" + elif "input_conditioner" in name: + continue + elif "blocks" in name: + layer_idx = name.split(".")[2] + base = f"decoder.layers.{layer_idx}" + + if "attn.qkv.weight" in name: + new_name = f"{base}.self_attention.linear_qkv.weight" + new_tensor = new_tensor[indices] + chunk_dim = 0 + elif "attn.qkv.bias" in name: + new_name = f"{base}.self_attention.linear_qkv.bias" + new_tensor = new_tensor[indices] + chunk_dim = 0 + elif "attn.proj.weight" in name: + new_name = f"{base}.self_attention.linear_proj.weight" + chunk_dim = 1 + elif "attn.proj.bias" in name: + new_name = f"{base}.self_attention.linear_proj.bias" + elif "norm1.weight" in name: + new_name = f"{base}.input_layernorm.weight" + if use_te: + new_name = f"{base}.self_attention.linear_qkv.layer_norm_weight" + elif "norm1.bias" in name: + new_name = f"{base}.input_layernorm.bias" + if use_te: + new_name = f"{base}.self_attention.linear_qkv.layer_norm_bias" + elif "mlp.fc1.weight" in name: + new_name = f"{base}.mlp.linear_fc1.weight" + chunk_dim = 0 + elif "mlp.fc1.bias" in name: + new_name = f"{base}.mlp.linear_fc1.bias" + chunk_dim = 0 + elif "mlp.fc2.weight" in name: + new_name = f"{base}.mlp.linear_fc2.weight" + chunk_dim = 1 + elif "mlp.fc2.bias" in name: + new_name = f"{base}.mlp.linear_fc2.bias" + elif "norm2.weight" in name: + new_name = f"{base}.pre_mlp_layernorm.weight" + if use_te: + new_name = f"{base}.mlp.linear_fc1.layer_norm_weight" + elif "norm2.bias" in name: + new_name = f"{base}.pre_mlp_layernorm.bias" + if use_te: + new_name = f"{base}.mlp.linear_fc1.layer_norm_bias" + + assert new_name != "", f"unexpected layer name {name}" + + if chunk_dim is None: + new_tensors = [new_tensor for _ in range(tensor_parallel_size)] + else: + new_tensors = torch.chunk(new_tensor, tensor_parallel_size, dim=chunk_dim) + + for i in range(tensor_parallel_size): + # chunk() creates a view of a bigger tensor. clone() is used here to avoid excessive storage. + new_state_dicts[i]["model"][new_name] = new_tensors[i].clone() + + # TE sets _extra_state (for FP8 purposes), so set an empty one here for compatibility. + extra_state_layers = ("linear_qkv", "linear_proj", "linear_fc1", "linear_fc2") + is_extra_state_layer = any([l in new_name for l in extra_state_layers]) + if use_te and is_extra_state_layer: + layer = new_name.split(".")[-2] + if layer in extra_state_layers: + extra_state_name = ( + new_name[: new_name.rfind(".") + 1] + "_extra_state" + ) # Replace the weight name. + new_state_dicts[i]["model"][extra_state_name] = None + + for i in range(tensor_parallel_size): + output_dir_tp = os.path.join(output_path, "iter_0000001", f"mp_rank_0{i}") + os.makedirs(output_dir_tp) + output_path_tp = os.path.join(output_dir_tp, "model_optim_rng.pt") + torch.save(new_state_dicts[i], output_path_tp) + with open(os.path.join(output_path, "latest_checkpointed_iteration.txt"), "w") as f: + f.write("1") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=""" +Convert RADIO weights to megatron format. + + +Example usage: +python radio_converter.py --output /some/output/folder --tensor-parallel-size 4 +""", + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + + parser.add_argument( + "--output", type=str, required=True, help="output directory for megatron state dict file(s)" + ) + parser.add_argument( + "--tensor-parallel-size", type=int, default=1, help="model tensor parallel size" + ) + parser.add_argument("--use-te", action="store_true", help="Use Transformer Engine") + parser.add_argument("--version", type=str, default="radio_v2.5-h", help="Version of radio to load for conversion") + + args = parser.parse_args() + + convert(args.output, args.tensor_parallel_size, args.use_te, args.version) + + print("done.") diff --git a/examples/multimodal/multimodal_args.py b/examples/multimodal/multimodal_args.py index eb56118..22fadc9 100644 --- a/examples/multimodal/multimodal_args.py +++ b/examples/multimodal/multimodal_args.py @@ -1,79 +1,89 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from megatron.core.models.multimodal.llava_model import IMAGE_TOKEN - - -def add_multimodal_extra_args(parser): - """Extra arguments.""" - group = parser.add_argument_group(title='multimodal arguments') - group.add_argument('--dataset-config', type=str, default=None) - group.add_argument("--prompt-path", type=str, default=None) - group.add_argument('--freeze-LM', action='store_true', default=False) - group.add_argument('--freeze-ViT', action='store_true', default=False) - group.add_argument('--language-model-type', type=str, required=True) - group.add_argument('--vision-model-type', type=str, default="clip") - group.add_argument("--disable-vision-class-token", action="store_true", default=False) - group.add_argument( - "--allow-missing-vision-projection-checkpoint", action="store_true", default=False - ) - group.add_argument("--use-te", action="store_true", default=False) - group.add_argument( - "--dataloader-save", type=str, default=None, help="Energon dataloader state save path" - ) - group.add_argument( - "--use-tiling", action="store_true", default=False, help="Use input image tiling" - ) - group.add_argument("--max-num-tiles", type=int, default=1, help="Maximum number of image tiles") - group.add_argument( - "--use-thumbnail", action="store_true", default=False, help="Add image thumbnail as a tile" - ) - group.add_argument( - "--dataloader-seq-length", - type=int, - help="Make dataloader to produce sequences of specific length.", - ) - group.add_argument( - "--num-frames", - type=int, - default=1, - help="Number of frames to regularly sample from the video as input to the model.", - ) - group.add_argument( - "--online-evaluation-config", type=str, help="Config file for online evaluation." - ) - group.add_argument( - "--special-tokens", - nargs="*", - default=[IMAGE_TOKEN], - help="Special tokens used in the multimodal model", - ) - group.add_argument( - "--tokenizer-prompt-format", - type=str, - choices=["mistral", "llama3", "chatml", "nvlm-yi-34b", "qwen2p0", "qwen2p5"], - required=True, - help="Prompt format to use with the tokenizer.", - ) - group.add_argument("--pixel-shuffle", action="store_true", default=False) - group.add_argument( - "--image-tag-type", - type=str, - choices=["nvlm", "internvl", ""], - default="", # Default: Image tag not used. - help="Surround image tokens with tags.", - ) - group.add_argument("--use-tile-tags", action="store_true", default=False, help="Use tile tags") - group.add_argument( - "--packing-buffer-size", - type=int, - default=None, # Packing is disabled by default. - help="Enable sample packing by setting the buffer size to > 0", - ) - group.add_argument( - "--packing-seq-length", type=int, default=0, help="Packing sequence length. Must be > 0 if using packing." - ) - group.add_argument( - "--recompute-vision", action="store_true", default=False, help="Enable activation checkpointing in the vision model" - ) - - - return parser +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from megatron.core.models.multimodal.llava_model import IMAGE_TOKEN + + +def add_multimodal_extra_args(parser): + """Extra arguments.""" + group = parser.add_argument_group(title='multimodal arguments') + group.add_argument('--dataset-config', type=str, default=None) + group.add_argument("--prompt-path", type=str, default=None) + group.add_argument('--freeze-LM', action='store_true', default=False) + group.add_argument('--freeze-ViT', action='store_true', default=False) + group.add_argument('--language-model-type', type=str, required=True) + group.add_argument('--language-huggingface-model-name-or-path', type=str) + group.add_argument('--vision-model-type', type=str, default="clip") + group.add_argument('--vision-huggingface-model-name-or-path', type=str) + group.add_argument("--disable-vision-class-token", action="store_true", default=False) + group.add_argument( + "--allow-missing-vision-projection-checkpoint", action="store_true", default=False + ) + group.add_argument("--use-te", action="store_true", default=False) + group.add_argument( + "--dataloader-save", type=str, default=None, help="Energon dataloader state save path" + ) + group.add_argument( + "--use-tiling", action="store_true", default=False, help="Use input image tiling" + ) + group.add_argument("--max-num-tiles", type=int, default=1, help="Maximum number of image tiles") + group.add_argument( + "--use-thumbnail", action="store_true", default=False, help="Add image thumbnail as a tile" + ) + group.add_argument( + "--dataloader-seq-length", + type=int, + help="Make dataloader to produce sequences of specific length.", + ) + group.add_argument( + "--num-frames", + type=int, + default=1, + help="Number of frames to regularly sample from the video as input to the model.", + ) + group.add_argument( + "--online-evaluation-config", type=str, help="Config file for online evaluation." + ) + group.add_argument( + "--special-tokens", + nargs="*", + default=[IMAGE_TOKEN], + help="Special tokens used in the multimodal model", + ) + group.add_argument( + "--tokenizer-prompt-format", + type=str, + choices=["mistral", "llama3", "llama3p1", "chatml", "nvlm-yi-34b", "qwen2p0", "qwen2p5"], + required=True, + help="Prompt format to use with the tokenizer.", + ) + group.add_argument("--pixel-shuffle", action="store_true", default=False) + group.add_argument( + "--image-tag-type", + type=str, + choices=["nvlm", "internvl", ""], + default="", # Default: Image tag not used. + help="Surround image tokens with tags.", + ) + group.add_argument("--use-tile-tags", action="store_true", default=False, help="Use tile tags") + group.add_argument( + "--packing-buffer-size", + type=int, + default=None, # Packing is disabled by default. + help="Enable sample packing by setting the buffer size to > 0", + ) + group.add_argument( + "--packing-seq-length", type=int, default=0, help="Packing sequence length. Must be > 0 if using packing." + ) + group.add_argument( + "--recompute-vision", action="store_true", default=False, help="Enable activation checkpointing in the vision model" + ) + group.add_argument( + "--use-loss-scaling", action="store_true", default=False, help="Scale loss based on conversation turn length (in tokens)." + ) + group.add_argument( + "--use-area-weighted-aspect-ratio", action="store_true", default=False, + help=( + "When --use-tiling is True, find the aspect ratio to use based on the original ", + "image aspect ratio and the area covered by the tiles.") + ) + + return parser diff --git a/examples/multimodal/nvlm/internvit.py b/examples/multimodal/nvlm/internvit.py index cd116ff..ac560ed 100644 --- a/examples/multimodal/nvlm/internvit.py +++ b/examples/multimodal/nvlm/internvit.py @@ -1,273 +1,279 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -"""" -NOTE: NVLM uses InternViT with tensor parallel (TP) size = 8. -Since InternViT has 25 attention heads and Megatron currently requires the number of attention heads -to be divisible by the TP size, we add 7 dummy zero attention heads to have 32 attention heads. - -This workaround requires some changes to how we compute RMSNorm, Attention etc. - -Additionally, InternViT introduces some unique features like Layer Scaling. - -Those code changes are gathered here. -""" -from functools import partial -from typing import Dict - -import torch - -from megatron.core.dist_checkpointing.mapping import ShardedStateDict -from megatron.core.extensions.transformer_engine import ( - TEColumnParallelLinear, - TEDotProductAttention, - TERowParallelLinear, -) -from megatron.core.parallel_state import ( - get_tensor_model_parallel_group, - get_tensor_model_parallel_rank, - get_tensor_model_parallel_world_size, -) -from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear -from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules -from megatron.core.transformer.dot_product_attention import DotProductAttention -from megatron.core.transformer.enums import AttnMaskType -from megatron.core.transformer.mlp import MLP, MLPSubmodules -from megatron.core.transformer.module import MegatronModule -from megatron.core.transformer.spec_utils import ModuleSpec, build_module -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules -from megatron.core.transformer.utils import make_sharded_tensors_for_checkpoint - - -class InternViTRMSNorm(MegatronModule): - - def __init__( - self, - config, - hidden_size: int, - eps: float = 1e-6, - sequence_parallel: bool = False, - compute_var: bool = False, - ): - """Custom RMSNorm for InternViT. - - Args: - config (TransformerConfig): Config. - hidden_size (int): Input hidden size. - eps (float): epsilon to use for the norm, default to 1e-6 - sequence_parallel (bool): Set to true if sequence parallelism is being used, - this marks the weights as needing to be allreduced. - compute_var (bool): Indicator to compute statistic manually. - """ - super().__init__(config=config) - self.config = config - self.eps = eps - self.weight = torch.nn.Parameter(torch.ones(hidden_size)) - self._compute_var = compute_var - - assert not sequence_parallel, "Sequence parallelism is not supported with InternViT." - - setattr(self.weight, 'sequence_parallel', sequence_parallel) - - def _norm(self, x, var): - if var is None: - var = x.pow(2).mean(-1, keepdim=True) - - return x * torch.rsqrt(var + self.eps) - - def forward(self, x): - """Run RMSNorm with an option to compute custom statistic.""" - var = None - if self._compute_var: - unpadded_hidden_size = self.config.hidden_size # 3200 - max_dim = x.shape[-1] # 128 - - x = x.reshape(x.size(0), x.size(1), -1) - var = self._gather_var(x.float().pow(2), max_dim) / unpadded_hidden_size - - output = self._norm(x.float(), var).type_as(x) - output = output * self.weight - - if self._compute_var: - output = output.reshape(output.size(0), output.size(1), -1, max_dim) - - return output - - def _gather_var(self, input_, max_dim, valid_ranks=6): - """Compute statistic across the non-dummy heads.""" - world_size = get_tensor_model_parallel_world_size() - assert world_size == 8, "tested only with TP=8" - - # Size and dimension. - last_dim = input_.dim() - 1 - rank = get_tensor_model_parallel_rank() - - if rank < valid_ranks: # Ranks 0-5 have 24 non-dummy attention heads. - var = input_.sum(-1, keepdim=True) - elif rank == valid_ranks: # Rank 6 has 1 non-dummy attention head. - var = input_[..., :max_dim].sum(-1, keepdim=True) - else: - var = input_.sum(-1, keepdim=True) * 0.0 # Zero-out the dummy heads. - - tensor_list = [torch.empty_like(var) for _ in range(world_size)] - tensor_list[rank] = var - torch.distributed.all_gather(tensor_list, var, group=get_tensor_model_parallel_group()) - - output = torch.cat(tensor_list, dim=last_dim).contiguous() - - return output.sum(-1, keepdim=True) - - def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata={}): - - # in InternVitSelfAttention the q_layernorm and k_layernorm weights - # are tensor-parallel so must be converted to sharded tensors - if 'q_layernorm' in prefix or 'k_layernorm' in prefix: - state_dict = self.state_dict(prefix='', keep_vars=True) - return make_sharded_tensors_for_checkpoint( - state_dict, prefix, {'weight': 0}, sharded_offsets - ) - else: - return super().sharded_state_dict(prefix, sharded_offsets, metadata) - - -def get_mlp_module_spec(use_te: bool = True) -> ModuleSpec: - # Dense MLP w/ or w/o TE modules. - return ModuleSpec( - module=MLP, - submodules=MLPSubmodules( - linear_fc1=TEColumnParallelLinear if use_te else ColumnParallelLinear, - linear_fc2=TERowParallelLinear if use_te else RowParallelLinear, - ), - ) - - -# Handle InternViT's layer scaling. -def _bias_dropout_add_func_internvit(ls, x_with_bias, residual, prob, training): - x, bias = x_with_bias # unpack - residual = residual if residual.dtype == x.dtype else residual.to(x.dtype) - if bias is not None: - x = x + bias - out = torch.nn.functional.dropout(x, p=prob, training=training) - out = residual + out * ls - return out - else: - out = torch.nn.functional.dropout(x, p=prob, training=training) - out = residual + out * ls - return out - - -def bias_dropout_add_unfused_internvit(ls, training): - """Bias-dropout-add as in Megatron but with added LayerScaling handling.""" - - def _bias_dropout_add(x_with_bias, residual, prob): - return _bias_dropout_add_func_internvit(ls, x_with_bias, residual, prob, training) - - return _bias_dropout_add - - -def get_bias_dropout_add_internvit(ls, training, fused): - """Bias-dropout-add as in Megatron but with added LayerScaling handling.""" - assert not fused, "Fused bias-dropout-add not implemented for InternViT." - return bias_dropout_add_unfused_internvit(ls, training) - - -# Add InternViT specialties to our default TransformerLayer. -class InternViTTransformerLayer(TransformerLayer): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.ls1 = torch.nn.Parameter(torch.ones(self.config.hidden_size)) - self.ls2 = torch.nn.Parameter(torch.ones(self.config.hidden_size)) - - self.self_attn_bda = partial(self.self_attn_bda, self.ls1) - self.mlp_bda = partial(self.mlp_bda, self.ls2) - - -# Override a few things that are special in InternViT and not supported by the SelfAttention class. -class InternViTSelfAttention(SelfAttention): - def __init__( - self, config: TransformerConfig, submodules: SelfAttentionSubmodules, *args, **kwargs - ): - super().__init__(config=config, submodules=submodules, *args, **kwargs) - - # Need to override linear_qkv, q_layernorm and k_layernorm. - qkv_bias = False - - self.linear_qkv = build_module( - submodules.linear_qkv, - self.config.hidden_size, - self.query_projection_size + 2 * self.kv_projection_size, - config=self.config, - init_method=self.config.init_method, - gather_output=False, - bias=qkv_bias, - skip_bias_add=False, - is_expert=False, - tp_comm_buffer_name='qkv', - ) - - qk_layernorm_hidden_size = ( - self.hidden_size_per_attention_head * self.num_attention_heads_per_partition - ) # 512 for internvit - - self.q_layernorm = build_module( - submodules.q_layernorm, - hidden_size=qk_layernorm_hidden_size, - config=self.config, - eps=self.config.layernorm_epsilon, - compute_var=True, - ) - - self.k_layernorm = build_module( - submodules.k_layernorm, - hidden_size=qk_layernorm_hidden_size, - config=self.config, - eps=self.config.layernorm_epsilon, - compute_var=True, - ) - - -class InternViTTEDotProductAttention(TEDotProductAttention): - """Adjusted Attention for InternViT""" - - def forward(self, *args, **kwargs): - """Regular TEDotProductAttention + zero-out dummy attention heads.""" - out = super().forward(*args, **kwargs) - - # This makes sure the dummy attention heads are zeroed out. - mask = torch.ones_like(out, dtype=out.dtype, device=out.device) - rank = get_tensor_model_parallel_rank() - max_dim = out.shape[-1] # 128 - valid_ranks = 6 - - if rank == valid_ranks: - mask[..., max_dim:] *= 0.0 - elif rank > valid_ranks: - mask *= 0.0 - out *= mask - - return out - - -def get_internvit_layer_spec(use_te) -> ModuleSpec: - mlp = get_mlp_module_spec(use_te) # no norm - - return ModuleSpec( - module=InternViTTransformerLayer, - submodules=TransformerLayerSubmodules( - input_layernorm=InternViTRMSNorm, - self_attention=ModuleSpec( - module=InternViTSelfAttention, - params={"attn_mask_type": AttnMaskType.no_mask}, - submodules=SelfAttentionSubmodules( - linear_qkv=TEColumnParallelLinear if use_te else ColumnParallelLinear, - core_attention=TEDotProductAttention if use_te else DotProductAttention, - linear_proj=TERowParallelLinear if use_te else RowParallelLinear, - q_layernorm=InternViTRMSNorm, - k_layernorm=InternViTRMSNorm, - ), - ), - self_attn_bda=get_bias_dropout_add_internvit, - pre_mlp_layernorm=InternViTRMSNorm, - mlp=mlp, - mlp_bda=get_bias_dropout_add_internvit, - ), - ) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +"""" +NOTE: NVLM uses InternViT with tensor parallel (TP) size = 8. +Since InternViT has 25 attention heads and Megatron currently requires the number of attention heads +to be divisible by the TP size, we add 7 dummy zero attention heads to have 32 attention heads. + +This workaround requires some changes to how we compute RMSNorm, Attention etc. + +Additionally, InternViT introduces some unique features like Layer Scaling. + +Those code changes are gathered here. +""" +from functools import partial + +import torch + +from megatron.core.utils import divide +from megatron.core.extensions.transformer_engine import ( + TEColumnParallelLinear, + TEDotProductAttention, + TERowParallelLinear, +) +from megatron.core.parallel_state import ( + get_tensor_model_parallel_group, + get_tensor_model_parallel_rank, + get_tensor_model_parallel_world_size, +) +from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear +from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules +from megatron.core.transformer.dot_product_attention import DotProductAttention +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.transformer.mlp import MLP, MLPSubmodules +from megatron.core.transformer.module import MegatronModule +from megatron.core.transformer.spec_utils import ModuleSpec, build_module +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules +from megatron.core.transformer.utils import make_sharded_tensors_for_checkpoint + + +class InternViTRMSNorm(MegatronModule): + + def __init__( + self, + config, + hidden_size: int, + eps: float = 1e-6, + sequence_parallel: bool = False, + compute_var: bool = False, + ): + """Custom RMSNorm for InternViT. + + Args: + config (TransformerConfig): Config. + hidden_size (int): Input hidden size. + eps (float): epsilon to use for the norm, default to 1e-6 + sequence_parallel (bool): Set to true if sequence parallelism is being used, + this marks the weights as needing to be allreduced. + compute_var (bool): Indicator to compute statistic manually. + """ + super().__init__(config=config) + self.config = config + self.eps = eps + self.weight = torch.nn.Parameter(torch.ones(hidden_size)) + self._compute_var = compute_var + + assert not sequence_parallel, "Sequence parallelism is not supported with InternViT." + + setattr(self.weight, 'sequence_parallel', sequence_parallel) + + def _norm(self, x, var): + if var is None: + var = x.pow(2).mean(-1, keepdim=True) + + return x * torch.rsqrt(var + self.eps) + + def forward(self, x): + """Run RMSNorm with an option to compute custom statistic.""" + var = None + if self._compute_var: + unpadded_hidden_size = self.config.hidden_size # 3200 + max_dim = x.shape[-1] # 128 + + x = x.reshape(x.size(0), x.size(1), -1) + var = self._gather_var(x.float().pow(2), max_dim) / unpadded_hidden_size + + output = self._norm(x.float(), var).type_as(x) + output = output * self.weight + + if self._compute_var: + output = output.reshape(output.size(0), output.size(1), -1, max_dim) + + return output + + def _gather_var(self, input_, max_dim): + """Compute statistic across the non-dummy heads.""" + world_size = get_tensor_model_parallel_world_size() + + # Size and dimension. + last_dim = input_.dim() - 1 + rank = get_tensor_model_parallel_rank() + + num_attention_heads_per_partition = divide(self.config.num_attention_heads, world_size) + valid_ranks = 24 // num_attention_heads_per_partition + + residual_heads = 25 % num_attention_heads_per_partition + if residual_heads == 0: + residual_heads = num_attention_heads_per_partition + max_dim = max_dim * residual_heads + + if rank < valid_ranks: # Ranks without any dummy attention heads. + var = input_.sum(-1, keepdim=True) + elif rank == valid_ranks: # The only rank which may contain 'residual_heads' dummy attention heads. + var = input_[..., :max_dim].sum(-1, keepdim=True) + else: + var = input_.sum(-1, keepdim=True) * 0.0 # All heads in these ranks are dummy heads: Zero-out. + + tensor_list = [torch.empty_like(var) for _ in range(world_size)] + tensor_list[rank] = var + torch.distributed.all_gather(tensor_list, var, group=get_tensor_model_parallel_group()) + + output = torch.cat(tensor_list, dim=last_dim).contiguous() + + return output.sum(-1, keepdim=True) + + def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata={}): + + # in InternVitSelfAttention the q_layernorm and k_layernorm weights + # are tensor-parallel so must be converted to sharded tensors + if 'q_layernorm' in prefix or 'k_layernorm' in prefix: + state_dict = self.state_dict(prefix='', keep_vars=True) + return make_sharded_tensors_for_checkpoint( + state_dict, prefix, {'weight': 0}, sharded_offsets + ) + else: + return super().sharded_state_dict(prefix, sharded_offsets, metadata) + + +def get_mlp_module_spec(use_te: bool = True) -> ModuleSpec: + # Dense MLP w/ or w/o TE modules. + return ModuleSpec( + module=MLP, + submodules=MLPSubmodules( + linear_fc1=TEColumnParallelLinear if use_te else ColumnParallelLinear, + linear_fc2=TERowParallelLinear if use_te else RowParallelLinear, + ), + ) + + +# Handle InternViT's layer scaling. +def _bias_dropout_add_func_internvit(ls, x_with_bias, residual, prob, training): + x, bias = x_with_bias # unpack + residual = residual if residual.dtype == x.dtype else residual.to(x.dtype) + if bias is not None: + x = x + bias + out = torch.nn.functional.dropout(x, p=prob, training=training) + out = residual + out * ls + return out + else: + out = torch.nn.functional.dropout(x, p=prob, training=training) + out = residual + out * ls + return out + + +def bias_dropout_add_unfused_internvit(ls, training): + """Bias-dropout-add as in Megatron but with added LayerScaling handling.""" + + def _bias_dropout_add(x_with_bias, residual, prob): + return _bias_dropout_add_func_internvit(ls, x_with_bias, residual, prob, training) + + return _bias_dropout_add + + +def get_bias_dropout_add_internvit(ls, training, fused): + """Bias-dropout-add as in Megatron but with added LayerScaling handling.""" + assert not fused, "Fused bias-dropout-add not implemented for InternViT." + return bias_dropout_add_unfused_internvit(ls, training) + + +# Add InternViT specialties to our default TransformerLayer. +class InternViTTransformerLayer(TransformerLayer): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.ls1 = torch.nn.Parameter(torch.ones(self.config.hidden_size)) + self.ls2 = torch.nn.Parameter(torch.ones(self.config.hidden_size)) + + self.self_attn_bda = partial(self.self_attn_bda, self.ls1) + self.mlp_bda = partial(self.mlp_bda, self.ls2) + + +# Override a few things that are special in InternViT and not supported by the SelfAttention class. +class InternViTSelfAttention(SelfAttention): + def __init__( + self, config: TransformerConfig, submodules: SelfAttentionSubmodules, *args, **kwargs + ): + super().__init__(config=config, submodules=submodules, *args, **kwargs) + + # Need to override linear_qkv, q_layernorm and k_layernorm. + qkv_bias = False + + self.linear_qkv = build_module( + submodules.linear_qkv, + self.config.hidden_size, + self.query_projection_size + 2 * self.kv_projection_size, + config=self.config, + init_method=self.config.init_method, + gather_output=False, + bias=qkv_bias, + skip_bias_add=False, + is_expert=False, + tp_comm_buffer_name='qkv', + ) + + qk_layernorm_hidden_size = ( + self.hidden_size_per_attention_head * self.num_attention_heads_per_partition + ) # 512 for internvit + + self.q_layernorm = build_module( + submodules.q_layernorm, + hidden_size=qk_layernorm_hidden_size, + config=self.config, + eps=self.config.layernorm_epsilon, + compute_var=True, + ) + + self.k_layernorm = build_module( + submodules.k_layernorm, + hidden_size=qk_layernorm_hidden_size, + config=self.config, + eps=self.config.layernorm_epsilon, + compute_var=True, + ) + + +class InternViTTEDotProductAttention(TEDotProductAttention): + """Adjusted Attention for InternViT""" + + def forward(self, *args, **kwargs): + """Regular TEDotProductAttention + zero-out dummy attention heads.""" + out = super().forward(*args, **kwargs) + + # This makes sure the dummy attention heads are zeroed out. + mask = torch.ones_like(out, dtype=out.dtype, device=out.device) + rank = get_tensor_model_parallel_rank() + max_dim = out.shape[-1] # 128 + valid_ranks = 6 + + if rank == valid_ranks: + mask[..., max_dim:] *= 0.0 + elif rank > valid_ranks: + mask *= 0.0 + out *= mask + + return out + + +def get_internvit_layer_spec(use_te) -> ModuleSpec: + mlp = get_mlp_module_spec(use_te) # no norm + + return ModuleSpec( + module=InternViTTransformerLayer, + submodules=TransformerLayerSubmodules( + input_layernorm=InternViTRMSNorm, + self_attention=ModuleSpec( + module=InternViTSelfAttention, + params={"attn_mask_type": AttnMaskType.no_mask}, + submodules=SelfAttentionSubmodules( + linear_qkv=TEColumnParallelLinear if use_te else ColumnParallelLinear, + core_attention=TEDotProductAttention if use_te else DotProductAttention, + linear_proj=TERowParallelLinear if use_te else RowParallelLinear, + q_layernorm=InternViTRMSNorm, + k_layernorm=InternViTRMSNorm, + ), + ), + self_attn_bda=get_bias_dropout_add_internvit, + pre_mlp_layernorm=InternViTRMSNorm, + mlp=mlp, + mlp_bda=get_bias_dropout_add_internvit, + ), + ) diff --git a/examples/multimodal/nvlm/pretrain_qwen20_72b_internvit_6b.sh b/examples/multimodal/nvlm/pretrain_qwen20_72b_internvit_6b.sh old mode 100644 new mode 100755 diff --git a/examples/multimodal/nvlm/pretrain_yi_34b_internvit_6b.sh b/examples/multimodal/nvlm/pretrain_yi_34b_internvit_6b.sh old mode 100644 new mode 100755 index 00f9435..a9ba430 --- a/examples/multimodal/nvlm/pretrain_yi_34b_internvit_6b.sh +++ b/examples/multimodal/nvlm/pretrain_yi_34b_internvit_6b.sh @@ -1,154 +1,155 @@ -#!/bin/bash - -# Your SBATCH commands here if using SLURM. - -# Please launch this script from megatron-lm root. - -# Train a multimodal model. - -export NCCL_IB_SL=1 -export CUDA_DEVICE_MAX_CONNECTIONS=1 -export TOKENIZERS_PARALLELISM="false" - -DEBUG=0 - -if [[ $BATCH -eq 0 ]]; then - DATETIME=`date +'%y-%m-%d-%H-%M-%S'` - MODEL_NAME="mcore-nous-yi34b-internvit-mlp-${DATETIME}" -else - MODEL_NAME="mcore-nous-yi34b-internvit-mlp" -fi - -WORKSPACE="" -SOURCE=`pwd` -OUTPUT_BASE="${WORKSPACE}/output" -OUTPUT="${OUTPUT_BASE}/${MODEL_NAME}" - -FINETUNE_DIR=${OUTPUT}/checkpoints -LOGS_DIR="${OUTPUT}/logs" -TENSORBOARD_DIR="${OUTPUT}/tensorboard" - -LOAD_NAME="combined-yi-34b-internvit-tp8-mcore" -CHECKPOINT_DIR="${WORKSPACE}/${LOAD_NAME}" - -DATA_TRAIN="${SOURCE}/examples/multimodal/nvlm/pretrain_blend.yaml" - - -if [[ $DEBUG -eq 1 ]]; then - MBZ=1 - BZ=1 - NW=0 - LI=1 - AD=0.0 - HD=0.0 - EXTRA_ARGS="" - ALLOW_NONDETERMINISTIC=1 -else - MBZ=1 - BZ=2048 - NW=8 - LI=5 - AD=0.1 - HD=0.1 - EXTRA_ARGS="" - ALLOW_NONDETERMINISTIC=1 -fi - -SEQ_LEN=256 # Image embeddings sequence length. -DECODER_SEQ_LEN=512 # Language model sequence length. -MAX_POS_EMBED=512 - - -OPTIONS=" \ - --swiglu \ - --use-distributed-optimizer \ - --num-workers ${NW} \ - --num-layers 60 \ - --hidden-size 7168 \ - --normalization RMSNorm \ - --num-attention-heads 56 \ - --exit-duration-in-mins 230 \ - --group-query-attention \ - --num-query-groups 8 \ - --ffn-hidden-size 20480 \ - --seq-length ${SEQ_LEN} \ - --decoder-seq-length ${DECODER_SEQ_LEN} \ - --max-position-embeddings ${MAX_POS_EMBED} \ - --tokenizer-type MultimodalTokenizer \ - --tokenizer-model NousResearch/Nous-Hermes-2-Yi-34B \ - --tokenizer-prompt-format nvlm-yi-34b \ - --vocab-size 64000 \ - --make-vocab-size-divisible-by 1 \ - --position-embedding-type rope \ - --rotary-percent 1.0 \ - --rotary-base 5000000 \ - --disable-bias-linear \ - --tensor-model-parallel-size 8 \ - --language-model-type yi-34b \ - --vision-model-type internvit \ - --micro-batch-size ${MBZ} \ - --global-batch-size ${BZ} \ - --train-samples 122880000 \ - --lr-decay-samples 25600000 \ - --lr-warmup-samples 83200 \ - --lr 1e-4 \ - --min-lr 2.5e-5 \ - --lr-decay-style cosine \ - --clip-grad 10.0 \ - --weight-decay 0.1 \ - --adam-beta1 0.9 \ - --adam-beta2 0.95 \ - --init-method-std 0.014 \ - --attention-dropout ${AD} \ - --hidden-dropout ${HD} \ - --eod-mask-loss \ - --bf16 \ - --tensorboard-dir=${TENSORBOARD_DIR} \ - --freeze-LM \ - --freeze-ViT \ - --img-h 448 \ - --img-w 448 \ - --patch-dim 14 \ - --data-path ${DATA_TRAIN} \ - --dataloader-type external \ - --split 100,0,0 \ - --prompt-path ${SOURCE}/examples/multimodal/nvlm/nvlm_prompts.json \ - --log-interval ${LI} \ - --save-interval 2000 \ - --eval-interval 500 \ - --eval-iters 10 \ - --log-params-norm \ - --log-num-zeros-in-grad \ - ${EXTRA_ARGS} \ - --save ${FINETUNE_DIR} \ - --load ${FINETUNE_DIR} \ - --dataloader-save ${FINETUNE_DIR}/dataloader \ - --pretrained-checkpoint ${CHECKPOINT_DIR} \ - --allow-missing-vision-projection-checkpoint \ - --disable-vision-class-token \ - --use-te \ - --use-checkpoint-args \ - --ckpt-format torch \ - --pixel-shuffle \ - --image-tag-type nvlm - " - -export NVTE_ALLOW_NONDETERMINISTIC_ALGO=${ALLOW_NONDETERMINISTIC} -export NVTE_APPLY_QK_LAYER_SCALING=0 - -# Interactive or batch mode -if [[ $BATCH -eq 0 ]]; then - torchrun --nproc_per_node 8 examples/multimodal/train.py ${OPTIONS} -else - run_cmd="python -u ${SOURCE}/examples/multimodal/train.py ${OPTIONS}" - - DATETIME=`date +'date_%y-%m-%d_time_%H-%M-%S'` - - srun -l --verbose \ - --container-image \ - --container-mounts "" \ - --output=${LOGS_DIR}/%x_%j_$DATETIME.log \ - sh -c "${run_cmd}" - - set +x -fi +#!/bin/bash + +# Your SBATCH commands here if using SLURM. + +# Please launch this script from megatron-lm root. + +# Train a multimodal model. + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export TOKENIZERS_PARALLELISM="false" + +DEBUG=0 + +if [[ $BATCH -eq 0 ]]; then + DATETIME=`date +'%y-%m-%d-%H-%M-%S'` + MODEL_NAME="mcore-nous-yi34b-internvit-mlp-${DATETIME}" +else + MODEL_NAME="mcore-nous-yi34b-internvit-mlp" +fi + +WORKSPACE="" +SOURCE=`pwd` +OUTPUT_BASE="${WORKSPACE}/output" +OUTPUT="${OUTPUT_BASE}/${MODEL_NAME}" + +FINETUNE_DIR=${OUTPUT}/checkpoints +LOGS_DIR="${OUTPUT}/logs" +TENSORBOARD_DIR="${OUTPUT}/tensorboard" + +LOAD_NAME="combined-yi-34b-internvit-tp8-mcore" +CHECKPOINT_DIR="${WORKSPACE}/${LOAD_NAME}" + +DATA_TRAIN="${SOURCE}/examples/multimodal/nvlm/pretrain_blend.yaml" + + +if [[ $DEBUG -eq 1 ]]; then + MBZ=1 + BZ=1 + NW=0 + LI=1 + AD=0.0 + HD=0.0 + EXTRA_ARGS="" + ALLOW_NONDETERMINISTIC=1 +else + MBZ=1 + BZ=2048 + NW=8 + LI=5 + AD=0.1 + HD=0.1 + EXTRA_ARGS="" + ALLOW_NONDETERMINISTIC=1 +fi + +SEQ_LEN=256 # Image embeddings sequence length. +DECODER_SEQ_LEN=512 # Language model sequence length. +MAX_POS_EMBED=512 + + +OPTIONS=" \ + --swiglu \ + --use-distributed-optimizer \ + --num-workers ${NW} \ + --num-layers 60 \ + --hidden-size 7168 \ + --normalization RMSNorm \ + --num-attention-heads 56 \ + --exit-duration-in-mins 230 \ + --group-query-attention \ + --num-query-groups 8 \ + --ffn-hidden-size 20480 \ + --seq-length ${SEQ_LEN} \ + --decoder-seq-length ${DECODER_SEQ_LEN} \ + --max-position-embeddings ${MAX_POS_EMBED} \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model NousResearch/Nous-Hermes-2-Yi-34B \ + --tokenizer-prompt-format nvlm-yi-34b \ + --vocab-size 64000 \ + --make-vocab-size-divisible-by 1 \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 5000000 \ + --disable-bias-linear \ + --tensor-model-parallel-size 8 \ + --language-model-type yi-34b \ + --vision-model-type internvit \ + --micro-batch-size ${MBZ} \ + --global-batch-size ${BZ} \ + --train-samples 122880000 \ + --lr-decay-samples 25600000 \ + --lr-warmup-samples 83200 \ + --lr 1e-4 \ + --min-lr 2.5e-5 \ + --lr-decay-style cosine \ + --clip-grad 10.0 \ + --weight-decay 0.1 \ + --adam-beta1 0.9 \ + --adam-beta2 0.95 \ + --init-method-std 0.014 \ + --attention-dropout ${AD} \ + --hidden-dropout ${HD} \ + --untie-embeddings-and-output-weights \ + --eod-mask-loss \ + --bf16 \ + --tensorboard-dir=${TENSORBOARD_DIR} \ + --freeze-LM \ + --freeze-ViT \ + --img-h 448 \ + --img-w 448 \ + --patch-dim 14 \ + --data-path ${DATA_TRAIN} \ + --dataloader-type external \ + --split 100,0,0 \ + --prompt-path ${SOURCE}/examples/multimodal/nvlm/nvlm_prompts.json \ + --log-interval ${LI} \ + --save-interval 2000 \ + --eval-interval 500 \ + --eval-iters 10 \ + --log-params-norm \ + --log-num-zeros-in-grad \ + ${EXTRA_ARGS} \ + --save ${FINETUNE_DIR} \ + --load ${FINETUNE_DIR} \ + --dataloader-save ${FINETUNE_DIR}/dataloader \ + --pretrained-checkpoint ${CHECKPOINT_DIR} \ + --allow-missing-vision-projection-checkpoint \ + --disable-vision-class-token \ + --use-te \ + --use-checkpoint-args \ + --ckpt-format torch \ + --pixel-shuffle \ + --image-tag-type nvlm + " + +export NVTE_ALLOW_NONDETERMINISTIC_ALGO=${ALLOW_NONDETERMINISTIC} +export NVTE_APPLY_QK_LAYER_SCALING=0 + +# Interactive or batch mode +if [[ $BATCH -eq 0 ]]; then + torchrun --nproc_per_node 8 examples/multimodal/train.py ${OPTIONS} +else + run_cmd="python -u ${SOURCE}/examples/multimodal/train.py ${OPTIONS}" + + DATETIME=`date +'date_%y-%m-%d_time_%H-%M-%S'` + + srun -l --verbose \ + --container-image \ + --container-mounts "" \ + --output=${LOGS_DIR}/%x_%j_$DATETIME.log \ + sh -c "${run_cmd}" + + set +x +fi diff --git a/examples/multimodal/nvlm/run_text_generation_qwen20_72b_internvit_6b.sh b/examples/multimodal/nvlm/run_text_generation_qwen20_72b_internvit_6b.sh old mode 100644 new mode 100755 index e3b001c..165682e --- a/examples/multimodal/nvlm/run_text_generation_qwen20_72b_internvit_6b.sh +++ b/examples/multimodal/nvlm/run_text_generation_qwen20_72b_internvit_6b.sh @@ -1,141 +1,141 @@ -#!/bin/bash - -export NCCL_IB_SL=1 -export CUDA_DEVICE_MAX_CONNECTIONS=1 -export NVTE_APPLY_QK_LAYER_SCALING=0 -export TOKENIZERS_PARALLELISM="false" - -INPUT_IMAGE_PATH="placeholder" -GROUNDTRUTH_PATH="placeholder" - -USE_TILING=0 -USE_PIXEL_SHUFFLE_ONLY=0 - -while [[ $# -gt 0 ]]; do - case $1 in - --input-image-path) - INPUT_IMAGE_PATH="$2" - shift - shift - ;; - -o|--output-path) - OUTPUT_PATH="$2" - shift - shift - ;; - -m|--model-path) - MODEL_PATH="$2" - shift - shift - ;; - --task) - TASK="$2" - shift - shift - ;; - -g|--gt-path) - GROUNDTRUTH_PATH="$2" - shift - shift - ;; - --use-tiling) - USE_TILING=1 - shift - shift - ;; - --use-pixel-shuffle-only) - USE_PIXEL_SHUFFLE_ONLY=1 - shift - shift - ;; - -*|--*) - echo "Invalid option $1" - exit 1 - ;; - esac -done - -# Please modify these as needed. -NUM_PARTITIONS=0 -START=0 -END=0 - -SEQ_LEN=1024 # Image embeddings sequence length. -DECODER_SEQ_LEN=8192 # Language model sequence length. -MAX_POS_EMBED=8192 - -# Additional arguments. -EXTRA_ARGS="" - -if [[ $USE_TILING -eq 1 ]]; then - EXTRA_ARGS+=" --pixel-shuffle --use-tiling --max-num-tiles 6 --use-thumbnail --use-tile-tags" - SEQ_LEN=261 # Image embeddings sequence length (256 image embeddings + 5 tile tag embeddings). -fi - -if [[ $USE_PIXEL_SHUFFLE_ONLY -eq 1 ]]; then - EXTRA_ARGS+=" --pixel-shuffle" - SEQ_LEN=256 -fi - -for PARTITION_ID in $( eval echo {$START..$END} ) -do - torchrun --nproc_per_node 8 examples/multimodal/run_text_generation.py \ - --attention-softmax-in-fp32 \ - --no-masked-softmax-fusion \ - --swiglu \ - --num-layers 80 \ - --hidden-size 8192 \ - --normalization RMSNorm \ - --norm-epsilon 1e-06 \ - --num-attention-heads 64 \ - --exit-on-missing-checkpoint \ - --group-query-attention \ - --num-query-groups 8 \ - --ffn-hidden-size 29568 \ - --load ${MODEL_PATH} \ - --seq-length ${SEQ_LEN} \ - --decoder-seq-length ${DECODER_SEQ_LEN} \ - --max-position-embeddings ${MAX_POS_EMBED} \ - --tokenizer-type MultimodalTokenizer \ - --tokenizer-model Qwen/Qwen2-72B-Instruct \ - --tokenizer-prompt-format qwen2p0 \ - --position-embedding-type rope \ - --rotary-percent 1.0 \ - --rotary-base 1000000 \ - --disable-bias-linear \ - --add-qkv-bias \ - --tensor-model-parallel-size 8 \ - --pipeline-model-parallel-size 1 \ - --language-model-type qwen2.0_72B \ - --vision-model-type internvit \ - --micro-batch-size 1 \ - --attention-dropout 0.0 \ - --hidden-dropout 0.0 \ - --bf16 \ - --freeze-LM \ - --freeze-ViT \ - --img-h 448 \ - --img-w 448 \ - --patch-dim 14 \ - --use-te \ - --transformer-impl transformer_engine \ - --use-checkpoint-args \ - --out-seq-length 16 \ - --temperature 1.0 \ - --patch-dim 14 \ - --seed 1234 \ - --top_k 1 \ - --no-load-rng \ - --no-load-optim \ - --num-partitions ${NUM_PARTITIONS} \ - --partition-id ${PARTITION_ID} \ - --output-path ${OUTPUT_PATH} \ - --gt-path ${GROUNDTRUTH_PATH} \ - --disable-vision-class-token \ - --input-image-path ${INPUT_IMAGE_PATH} \ - --gt-path ${GROUNDTRUTH_PATH} \ - ${EXTRA_ARGS} \ - --task ${TASK} \ - --image-tag-type nvlm \ - --ckpt-format torch -done +#!/bin/bash + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export NVTE_APPLY_QK_LAYER_SCALING=0 +export TOKENIZERS_PARALLELISM="false" + +INPUT_IMAGE_PATH="placeholder" +GROUNDTRUTH_PATH="placeholder" + +USE_TILING=0 +USE_PIXEL_SHUFFLE_ONLY=0 + +while [[ $# -gt 0 ]]; do + case $1 in + --input-image-path) + INPUT_IMAGE_PATH="$2" + shift + shift + ;; + -o|--output-path) + OUTPUT_PATH="$2" + shift + shift + ;; + -m|--model-path) + MODEL_PATH="$2" + shift + shift + ;; + --task) + TASK="$2" + shift + shift + ;; + -g|--gt-path) + GROUNDTRUTH_PATH="$2" + shift + shift + ;; + --use-tiling) + USE_TILING=1 + shift + shift + ;; + --use-pixel-shuffle-only) + USE_PIXEL_SHUFFLE_ONLY=1 + shift + shift + ;; + -*|--*) + echo "Invalid option $1" + exit 1 + ;; + esac +done + +# Please modify these as needed. +NUM_PARTITIONS=0 +START=0 +END=0 + +SEQ_LEN=1024 # Image embeddings sequence length. +DECODER_SEQ_LEN=8192 # Language model sequence length. +MAX_POS_EMBED=8192 + +# Additional arguments. +EXTRA_ARGS="" + +if [[ $USE_TILING -eq 1 ]]; then + EXTRA_ARGS+=" --pixel-shuffle --use-tiling --max-num-tiles 6 --use-thumbnail --use-tile-tags" + SEQ_LEN=261 # Image embeddings sequence length (256 image embeddings + 5 tile tag embeddings). +fi + +if [[ $USE_PIXEL_SHUFFLE_ONLY -eq 1 ]]; then + EXTRA_ARGS+=" --pixel-shuffle" + SEQ_LEN=256 +fi + +for PARTITION_ID in $( eval echo {$START..$END} ) +do + torchrun --nproc_per_node 8 examples/multimodal/run_text_generation.py \ + --attention-softmax-in-fp32 \ + --no-masked-softmax-fusion \ + --swiglu \ + --num-layers 80 \ + --hidden-size 8192 \ + --normalization RMSNorm \ + --norm-epsilon 1e-06 \ + --num-attention-heads 64 \ + --exit-on-missing-checkpoint \ + --group-query-attention \ + --num-query-groups 8 \ + --ffn-hidden-size 29568 \ + --load ${MODEL_PATH} \ + --seq-length ${SEQ_LEN} \ + --decoder-seq-length ${DECODER_SEQ_LEN} \ + --max-position-embeddings ${MAX_POS_EMBED} \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model Qwen/Qwen2-72B-Instruct \ + --tokenizer-prompt-format qwen2p0 \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 1000000 \ + --disable-bias-linear \ + --add-qkv-bias \ + --tensor-model-parallel-size 8 \ + --pipeline-model-parallel-size 1 \ + --language-model-type qwen2.0_72B \ + --vision-model-type internvit \ + --micro-batch-size 1 \ + --attention-dropout 0.0 \ + --hidden-dropout 0.0 \ + --bf16 \ + --freeze-LM \ + --freeze-ViT \ + --img-h 448 \ + --img-w 448 \ + --patch-dim 14 \ + --use-te \ + --transformer-impl transformer_engine \ + --use-checkpoint-args \ + --out-seq-length 16 \ + --temperature 1.0 \ + --patch-dim 14 \ + --seed 1234 \ + --top_k 1 \ + --no-load-rng \ + --no-load-optim \ + --num-partitions ${NUM_PARTITIONS} \ + --partition-id ${PARTITION_ID} \ + --output-path ${OUTPUT_PATH} \ + --gt-path ${GROUNDTRUTH_PATH} \ + --disable-vision-class-token \ + --input-image-path ${INPUT_IMAGE_PATH} \ + --gt-path ${GROUNDTRUTH_PATH} \ + ${EXTRA_ARGS} \ + --task ${TASK} \ + --image-tag-type nvlm \ + --ckpt-format torch +done diff --git a/examples/multimodal/nvlm/run_text_generation_qwen25_7b_internvit_video.sh b/examples/multimodal/nvlm/run_text_generation_qwen25_7b_internvit_video.sh new file mode 100755 index 0000000..df1e900 --- /dev/null +++ b/examples/multimodal/nvlm/run_text_generation_qwen25_7b_internvit_video.sh @@ -0,0 +1,129 @@ +#!/bin/bash + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export NVTE_APPLY_QK_LAYER_SCALING=0 +export TOKENIZERS_PARALLELISM="false" + +INPUT_IMAGE_PATH="placeholder" +GROUNDTRUTH_PATH="placeholder" + +while [[ $# -gt 0 ]]; do + case $1 in + --input-image-path) + INPUT_IMAGE_PATH="$2" + shift + shift + ;; + --input-metadata-path) + INPUT_METADATA_PATH="$2" + shift + shift + ;; + --num-frames) + NUM_FRAMES="$2" + shift + shift + ;; + -g|--groundtruth-path) + GROUNDTRUTH_PATH="$2" + shift + shift + ;; + -o|--output-path) + OUTPUT_PATH="$2" + shift + shift + ;; + -m|--model-path) + MODEL_PATH="$2" + shift + shift + ;; + --task) + TASK="$2" + shift + shift + ;; + -g|--gt-path) + GROUNDTRUTH_PATH="$2" + shift + shift + ;; + -*|--*) + echo "Invalid option $1" + exit 1 + ;; + esac +done + + +# Please modify these as needed. +NUM_PARTITIONS=0 +START=0 +END=0 + +SEQ_LEN=256 +DECODER_SEQ_LEN=16384 + +EXTRA_ARGS=" --pixel-shuffle" + + +for PARTITION_ID in $( eval echo {$START..$END} ) +do + torchrun --nproc_per_node 8 examples/multimodal/run_text_generation.py \ + --attention-softmax-in-fp32 \ + --transformer-impl transformer_engine \ + --use-te \ + --use-checkpoint-args \ + --normalization RMSNorm \ + --norm-epsilon 1e-06 \ + --language-model-type=qwen2.5_7B \ + --untie-embeddings-and-output-weights \ + --disable-bias-linear \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 1000000 \ + --swiglu \ + --attention-dropout 0.0 \ + --hidden-dropout 0.0 \ + --tensor-model-parallel-size 4 \ + --pipeline-model-parallel-size 1 \ + --group-query-attention \ + --num-query-groups 4 \ + --num-layers 28 \ + --hidden-size 3584 \ + --ffn-hidden-size 18944 \ + --add-qkv-bias \ + --num-attention-heads 28 \ + --max-position-embeddings 32768 \ + --no-masked-softmax-fusion \ + --load ${MODEL_PATH} \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model Qwen/Qwen2.5-7B-Instruct \ + --tokenizer-prompt-format qwen2p5 \ + --bf16 \ + --micro-batch-size 1 \ + --seq-length ${SEQ_LEN} \ + --decoder-seq-length ${DECODER_SEQ_LEN} \ + --out-seq-length 128 \ + --temperature 1.0 \ + --img-h 448 \ + --img-w 448 \ + --patch-dim 14 \ + --seed 153 \ + --top_k 1 \ + --no-load-rng \ + --no-load-optim \ + --input-image-path ${INPUT_IMAGE_PATH} \ + --num-partitions ${NUM_PARTITIONS} \ + --partition-id ${PARTITION_ID} \ + --output-path ${OUTPUT_PATH} \ + --gt-path ${GROUNDTRUTH_PATH} \ + --task ${TASK} \ + ${EXTRA_ARGS} \ + --special-tokens "" "" "" \ + --vision-model-type internvit \ + --num-frames ${NUM_FRAMES} \ + --ckpt-format torch +done diff --git a/examples/multimodal/nvlm/run_text_generation_qwen25_7b_siglip.sh b/examples/multimodal/nvlm/run_text_generation_qwen25_7b_siglip.sh old mode 100644 new mode 100755 index 3b62219..d66640f --- a/examples/multimodal/nvlm/run_text_generation_qwen25_7b_siglip.sh +++ b/examples/multimodal/nvlm/run_text_generation_qwen25_7b_siglip.sh @@ -1,111 +1,111 @@ -#!/bin/bash - -export NCCL_IB_SL=1 -export CUDA_DEVICE_MAX_CONNECTIONS=1 -export NVTE_APPLY_QK_LAYER_SCALING=0 -export TOKENIZERS_PARALLELISM="false" - -INPUT_IMAGE_PATH="placeholder" -GROUNDTRUTH_PATH="placeholder" - -while [[ $# -gt 0 ]]; do - case $1 in - -i|--input-image-path) - INPUT_IMAGE_PATH="$2" - shift - shift - ;; - -o|--output-path) - OUTPUT_PATH="$2" - shift - shift - ;; - -m|--model-path) - MODEL_PATH="$2" - shift - shift - ;; - -t|--task) - TASK="$2" - shift - shift - ;; - -g|--gt-path) - GROUNDTRUTH_PATH="$2" - shift - shift - ;; - -*|--*) - echo "Invalid option $1" - exit 1 - ;; - esac -done - -# Please modify these as needed. -NUM_PARTITIONS=0 -START=0 -END=0 - - -SEQ_LEN=256 -DECODER_SEQ_LEN=8192 -EXTRA_ARGS=" --pixel-shuffle --use-tiling --max-num-tiles 12 --use-thumbnail" - -for PARTITION_ID in $( eval echo {$START..$END} ) -do - torchrun --nproc_per_node 8 examples/multimodal/run_text_generation.py \ - --attention-softmax-in-fp32 \ - --transformer-impl transformer_engine \ - --use-te \ - --use-checkpoint-args \ - --normalization RMSNorm \ - --norm-epsilon 1e-06 \ - --language-model-type=qwen2.5_7B \ - --untie-embeddings-and-output-weights \ - --disable-bias-linear \ - --position-embedding-type rope \ - --rotary-percent 1.0 \ - --rotary-base 1000000 \ - --swiglu \ - --attention-dropout 0.0 \ - --hidden-dropout 0.0 \ - --tensor-model-parallel-size 4 \ - --pipeline-model-parallel-size 1 \ - --group-query-attention \ - --num-query-groups 4 \ - --num-layers 28 \ - --hidden-size 3584 \ - --ffn-hidden-size 18944 \ - --add-qkv-bias \ - --num-attention-heads 28 \ - --max-position-embeddings 32768 \ - --no-masked-softmax-fusion \ - --load ${MODEL_PATH} \ - --tokenizer-type MultimodalTokenizer \ - --tokenizer-model Qwen/Qwen2.5-7B-Instruct \ - --tokenizer-prompt-format qwen2p5 \ - --bf16 \ - --micro-batch-size 1 \ - --seq-length ${SEQ_LEN} \ - --decoder-seq-length ${DECODER_SEQ_LEN} \ - --out-seq-length 128 \ - --temperature 1.0 \ - --img-h 448 \ - --img-w 448 \ - --patch-dim 14 \ - --seed 153 \ - --top_k 1 \ - --no-load-rng \ - --no-load-optim \ - --input-image-path ${INPUT_IMAGE_PATH} \ - --num-partitions ${NUM_PARTITIONS} \ - --partition-id ${PARTITION_ID} \ - --output-path ${OUTPUT_PATH} \ - --gt-path ${GROUNDTRUTH_PATH} \ - --task ${TASK} \ - ${EXTRA_ARGS} \ - --special-tokens "" "" "" \ - --vision-model-type siglip \ - --ckpt-format torch -done +#!/bin/bash + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export NVTE_APPLY_QK_LAYER_SCALING=0 +export TOKENIZERS_PARALLELISM="false" + +INPUT_IMAGE_PATH="placeholder" +GROUNDTRUTH_PATH="placeholder" + +while [[ $# -gt 0 ]]; do + case $1 in + -i|--input-image-path) + INPUT_IMAGE_PATH="$2" + shift + shift + ;; + -o|--output-path) + OUTPUT_PATH="$2" + shift + shift + ;; + -m|--model-path) + MODEL_PATH="$2" + shift + shift + ;; + -t|--task) + TASK="$2" + shift + shift + ;; + -g|--gt-path) + GROUNDTRUTH_PATH="$2" + shift + shift + ;; + -*|--*) + echo "Invalid option $1" + exit 1 + ;; + esac +done + +# Please modify these as needed. +NUM_PARTITIONS=0 +START=0 +END=0 + + +SEQ_LEN=256 +DECODER_SEQ_LEN=8192 +EXTRA_ARGS=" --pixel-shuffle --use-tiling --max-num-tiles 12 --use-thumbnail" + +for PARTITION_ID in $( eval echo {$START..$END} ) +do + torchrun --nproc_per_node 8 examples/multimodal/run_text_generation.py \ + --attention-softmax-in-fp32 \ + --transformer-impl transformer_engine \ + --use-te \ + --use-checkpoint-args \ + --normalization RMSNorm \ + --norm-epsilon 1e-06 \ + --language-model-type=qwen2.5_7B \ + --untie-embeddings-and-output-weights \ + --disable-bias-linear \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 1000000 \ + --swiglu \ + --attention-dropout 0.0 \ + --hidden-dropout 0.0 \ + --tensor-model-parallel-size 4 \ + --pipeline-model-parallel-size 1 \ + --group-query-attention \ + --num-query-groups 4 \ + --num-layers 28 \ + --hidden-size 3584 \ + --ffn-hidden-size 18944 \ + --add-qkv-bias \ + --num-attention-heads 28 \ + --max-position-embeddings 32768 \ + --no-masked-softmax-fusion \ + --load ${MODEL_PATH} \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model Qwen/Qwen2.5-7B-Instruct \ + --tokenizer-prompt-format qwen2p5 \ + --bf16 \ + --micro-batch-size 1 \ + --seq-length ${SEQ_LEN} \ + --decoder-seq-length ${DECODER_SEQ_LEN} \ + --out-seq-length 128 \ + --temperature 1.0 \ + --img-h 448 \ + --img-w 448 \ + --patch-dim 14 \ + --seed 153 \ + --top_k 1 \ + --no-load-rng \ + --no-load-optim \ + --input-image-path ${INPUT_IMAGE_PATH} \ + --num-partitions ${NUM_PARTITIONS} \ + --partition-id ${PARTITION_ID} \ + --output-path ${OUTPUT_PATH} \ + --gt-path ${GROUNDTRUTH_PATH} \ + --task ${TASK} \ + ${EXTRA_ARGS} \ + --special-tokens "" "" "" \ + --vision-model-type siglip \ + --ckpt-format torch +done diff --git a/examples/multimodal/nvlm/run_text_generation_yi_34b_internvit_6b.sh b/examples/multimodal/nvlm/run_text_generation_yi_34b_internvit_6b.sh old mode 100644 new mode 100755 diff --git a/examples/multimodal/nvlm/sft_34b_internvit.sh b/examples/multimodal/nvlm/sft_34b_internvit.sh old mode 100644 new mode 100755 index 0dff946..7cdc854 --- a/examples/multimodal/nvlm/sft_34b_internvit.sh +++ b/examples/multimodal/nvlm/sft_34b_internvit.sh @@ -1,160 +1,161 @@ -#!/bin/bash - -# Your SBATCH commands here if using SLURM. - -# Please launch this script from megatron-lm root. - -# Train a multimodal model. - -export NCCL_IB_SL=1 -export CUDA_DEVICE_MAX_CONNECTIONS=1 -export NCCL_ALGO=^NVLS -export TOKENIZERS_PARALLELISM="false" - - -DEBUG=0 - -if [[ $BATCH -eq 0 ]]; then - DATETIME=`date +'%y-%m-%d-%H-%M-%S'` - MODEL_NAME="mcore-nous-yi34b-internvit-mlp-sft-${DATETIME}" -else - MODEL_NAME="mcore-nous-yi34b-internvit-mlp-sft" -fi - -WORKSPACE="" -SOURCE=`pwd` -OUTPUT_BASE="${WORKSPACE}/output" -OUTPUT="${OUTPUT_BASE}/${MODEL_NAME}" - -FINETUNE_DIR=${OUTPUT}/checkpoints -LOGS_DIR="${OUTPUT}/logs" -TENSORBOARD_DIR="${OUTPUT}/tensorboard" - -LOAD_NAME="mcore-nous-yi34b-internvit-mlp" # From pretraining -CHECKPOINT_DIR="${WORKSPACE}/output/${LOAD_NAME}/checkpoints" - -DATA_TRAIN="${SOURCE}/examples/multimodal/nvlm/sft_blend.yaml" - - -if [[ $DEBUG -eq 1 ]]; then - MBZ=1 - BZ=1 - NW=0 - LI=1 - AD=0.0 - HD=0.0 - ALLOW_NONDETERMINISTIC=1 - - # Can run out of GPU memory in interactive memory without this. - # This is just for interactive testing purposes. Do not use for proper training. - EXTRA_ARGS=" --freeze-LM" -else - MBZ=1 - BZ=128 - NW=2 - LI=5 - AD=0.0 - HD=0.0 - ALLOW_NONDETERMINISTIC=1 - - EXTRA_ARGS="" -fi - -SEQ_LEN=261 # Image embeddings sequence length (256 image embeddings + 5 tile tag embeddings). -DECODER_SEQ_LEN=3200 # Language model sequence length. -MAX_POS_EMBED=3200 - -OPTIONS=" \ - --swiglu \ - --use-distributed-optimizer \ - --num-workers ${NW} \ - --num-layers 60 \ - --hidden-size 7168 \ - --normalization RMSNorm \ - --num-attention-heads 56 \ - --exit-duration-in-mins 230 \ - --group-query-attention \ - --num-query-groups 8 \ - --ffn-hidden-size 20480 \ - --seq-length ${SEQ_LEN} \ - --decoder-seq-length ${DECODER_SEQ_LEN} \ - --max-position-embeddings ${MAX_POS_EMBED} \ - --tokenizer-type MultimodalTokenizer \ - --tokenizer-model NousResearch/Nous-Hermes-2-Yi-34B \ - --tokenizer-prompt-format nvlm-yi-34b \ - --vocab-size 64000 \ - --make-vocab-size-divisible-by 1 \ - --position-embedding-type rope \ - --rotary-percent 1.0 \ - --rotary-base 5000000 \ - --disable-bias-linear \ - --tensor-model-parallel-size 8 \ - --language-model-type yi-34b \ - --vision-model-type internvit \ - --micro-batch-size ${MBZ} \ - --global-batch-size ${BZ} \ - --train-samples 30000000 \ - --lr-decay-samples 25600000 \ - --lr-warmup-samples 83200 \ - --lr 2e-6 \ - --min-lr 2.5e-7 \ - --lr-decay-style cosine \ - --split 100,0,0 \ - --clip-grad 10 \ - --weight-decay 0.1 \ - --adam-beta1 0.9 \ - --adam-beta2 0.95 \ - --init-method-std 0.014 \ - --attention-dropout ${AD} \ - --hidden-dropout ${HD} \ - --eod-mask-loss \ - --bf16 \ - --tensorboard-dir=${TENSORBOARD_DIR} \ - --freeze-ViT \ - --img-h 448 \ - --img-w 448 \ - --patch-dim 14 \ - --data-path ${DATA_TRAIN} \ - --dataloader-type external \ - --dataloader-save ${FINETUNE_DIR}/dataloader \ - --prompt-path ${SOURCE}/examples/multimodal/nvlm/nvlm_prompts.json \ - --log-interval ${LI} \ - --load ${FINETUNE_DIR} \ - --save ${FINETUNE_DIR} \ - --pretrained-checkpoint ${CHECKPOINT_DIR} \ - --save-interval 5000 \ - --eval-interval 500 \ - --eval-iters 10 \ - --log-params-norm \ - --log-num-zeros-in-grad \ - ${EXTRA_ARGS} \ - --disable-vision-class-token \ - --use-te \ - --ckpt-format torch \ - --pixel-shuffle \ - --use-tiling \ - --max-num-tiles 6 \ - --use-thumbnail \ - --use-tile-tags \ - --image-tag-type nvlm - " - -export NVTE_ALLOW_NONDETERMINISTIC_ALGO=${ALLOW_NONDETERMINISTIC} -export NVTE_APPLY_QK_LAYER_SCALING=0 - -# Interactive or batch mode -if [[ $BATCH -eq 0 ]]; then - torchrun --nproc_per_node 8 examples/multimodal/train.py ${OPTIONS} -else - run_cmd="python -u ${SOURCE}/examples/multimodal/train.py ${OPTIONS}" - - DATETIME=`date +'date_%y-%m-%d_time_%H-%M-%S'` - - srun -l --verbose \ - --container-image \ - --container-mounts "" \ - --output=${LOGS_DIR}/%x_%j_$DATETIME.log \ - sh -c "${run_cmd}" - - set +x -fi +#!/bin/bash + +# Your SBATCH commands here if using SLURM. + +# Please launch this script from megatron-lm root. + +# Train a multimodal model. + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export NCCL_ALGO=^NVLS +export TOKENIZERS_PARALLELISM="false" + + +DEBUG=0 + +if [[ $BATCH -eq 0 ]]; then + DATETIME=`date +'%y-%m-%d-%H-%M-%S'` + MODEL_NAME="mcore-nous-yi34b-internvit-mlp-sft-${DATETIME}" +else + MODEL_NAME="mcore-nous-yi34b-internvit-mlp-sft" +fi + +WORKSPACE="" +SOURCE=`pwd` +OUTPUT_BASE="${WORKSPACE}/output" +OUTPUT="${OUTPUT_BASE}/${MODEL_NAME}" + +FINETUNE_DIR=${OUTPUT}/checkpoints +LOGS_DIR="${OUTPUT}/logs" +TENSORBOARD_DIR="${OUTPUT}/tensorboard" + +LOAD_NAME="mcore-nous-yi34b-internvit-mlp" # From pretraining +CHECKPOINT_DIR="${WORKSPACE}/output/${LOAD_NAME}/checkpoints" + +DATA_TRAIN="${SOURCE}/examples/multimodal/nvlm/sft_blend.yaml" + + +if [[ $DEBUG -eq 1 ]]; then + MBZ=1 + BZ=1 + NW=0 + LI=1 + AD=0.0 + HD=0.0 + ALLOW_NONDETERMINISTIC=1 + + # Can run out of GPU memory in interactive memory without this. + # This is just for interactive testing purposes. Do not use for proper training. + EXTRA_ARGS=" --freeze-LM" +else + MBZ=1 + BZ=128 + NW=2 + LI=5 + AD=0.0 + HD=0.0 + ALLOW_NONDETERMINISTIC=1 + + EXTRA_ARGS="" +fi + +SEQ_LEN=261 # Image embeddings sequence length (256 image embeddings + 5 tile tag embeddings). +DECODER_SEQ_LEN=3200 # Language model sequence length. +MAX_POS_EMBED=3200 + +OPTIONS=" \ + --swiglu \ + --use-distributed-optimizer \ + --num-workers ${NW} \ + --num-layers 60 \ + --hidden-size 7168 \ + --normalization RMSNorm \ + --num-attention-heads 56 \ + --exit-duration-in-mins 230 \ + --group-query-attention \ + --num-query-groups 8 \ + --ffn-hidden-size 20480 \ + --seq-length ${SEQ_LEN} \ + --decoder-seq-length ${DECODER_SEQ_LEN} \ + --max-position-embeddings ${MAX_POS_EMBED} \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model NousResearch/Nous-Hermes-2-Yi-34B \ + --tokenizer-prompt-format nvlm-yi-34b \ + --vocab-size 64000 \ + --make-vocab-size-divisible-by 1 \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 5000000 \ + --disable-bias-linear \ + --tensor-model-parallel-size 8 \ + --language-model-type yi-34b \ + --vision-model-type internvit \ + --micro-batch-size ${MBZ} \ + --global-batch-size ${BZ} \ + --train-samples 30000000 \ + --lr-decay-samples 25600000 \ + --lr-warmup-samples 83200 \ + --lr 2e-6 \ + --min-lr 2.5e-7 \ + --lr-decay-style cosine \ + --split 100,0,0 \ + --clip-grad 10 \ + --weight-decay 0.1 \ + --adam-beta1 0.9 \ + --adam-beta2 0.95 \ + --init-method-std 0.014 \ + --attention-dropout ${AD} \ + --hidden-dropout ${HD} \ + --untie-embeddings-and-output-weights \ + --eod-mask-loss \ + --bf16 \ + --tensorboard-dir=${TENSORBOARD_DIR} \ + --freeze-ViT \ + --img-h 448 \ + --img-w 448 \ + --patch-dim 14 \ + --data-path ${DATA_TRAIN} \ + --dataloader-type external \ + --dataloader-save ${FINETUNE_DIR}/dataloader \ + --prompt-path ${SOURCE}/examples/multimodal/nvlm/nvlm_prompts.json \ + --log-interval ${LI} \ + --load ${FINETUNE_DIR} \ + --save ${FINETUNE_DIR} \ + --pretrained-checkpoint ${CHECKPOINT_DIR} \ + --save-interval 5000 \ + --eval-interval 500 \ + --eval-iters 10 \ + --log-params-norm \ + --log-num-zeros-in-grad \ + ${EXTRA_ARGS} \ + --disable-vision-class-token \ + --use-te \ + --ckpt-format torch \ + --pixel-shuffle \ + --use-tiling \ + --max-num-tiles 6 \ + --use-thumbnail \ + --use-tile-tags \ + --image-tag-type nvlm + " + +export NVTE_ALLOW_NONDETERMINISTIC_ALGO=${ALLOW_NONDETERMINISTIC} +export NVTE_APPLY_QK_LAYER_SCALING=0 + +# Interactive or batch mode +if [[ $BATCH -eq 0 ]]; then + torchrun --nproc_per_node 8 examples/multimodal/train.py ${OPTIONS} +else + run_cmd="python -u ${SOURCE}/examples/multimodal/train.py ${OPTIONS}" + + DATETIME=`date +'date_%y-%m-%d_time_%H-%M-%S'` + + srun -l --verbose \ + --container-image \ + --container-mounts "" \ + --output=${LOGS_DIR}/%x_%j_$DATETIME.log \ + sh -c "${run_cmd}" + + set +x +fi diff --git a/examples/multimodal/nvlm/sft_qwen20_72b_internvit_6b.sh b/examples/multimodal/nvlm/sft_qwen20_72b_internvit_6b.sh old mode 100644 new mode 100755 diff --git a/examples/multimodal/nvlm/sft_qwen2p5_7b_internvit_6b_video.sh b/examples/multimodal/nvlm/sft_qwen2p5_7b_internvit_6b_video.sh new file mode 100755 index 0000000..d7eb8e0 --- /dev/null +++ b/examples/multimodal/nvlm/sft_qwen2p5_7b_internvit_6b_video.sh @@ -0,0 +1,184 @@ +#!/bin/bash + +# Your SBATCH commands here if using SLURM. + +# Please launch this script from megatron-lm root. + +# Train a multimodal model. + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export NCCL_ALGO=^NVLS +export TOKENIZERS_PARALLELISM=false + +USER=$SLURM_JOB_USER + +# Auto-detect batch or interactive mode. +which srun +BATCH=$((1-$?)) + +DEBUG=0 + +if [[ $BATCH -eq 0 ]]; then + DATETIME=`date +'%y-%m-%d-%H-%M-%S'` + MODEL_NAME="qwen2.5-7B-internvit-video-sft-nvlm-${DATETIME}" +else + MODEL_NAME="qwen2.5-7B-internvitp-video-sft-nvlm" + DEBUG=0 +fi + +WORKSPACE="" +SOURCE=`pwd` +OUTPUT_BASE="${WORKSPACE}/output" +OUTPUT="${OUTPUT_BASE}/${MODEL_NAME}" + +FINETUNE_DIR="${OUTPUT}/checkpoints" +LOGS_DIR="${OUTPUT}/logs" +TENSORBOARD_DIR="${OUTPUT}/tensorboard" + +# From pretraining. The pretraining checkpoint should have tensor parallel size to 4. +LOAD_NAME="mcore-qwen2p5-7b-internvit-tp4" + +CHECKPOINT_DIR="${WORKSPACE}/output/${LOAD_NAME}/checkpoints" + +DATA_TRAIN="${SOURCE}/examples/multimodal/nvlm/sft_blend.yaml" + +if [[ $DEBUG -eq 1 ]]; then + MBZ=1 + BZ=1 + NW=0 + AD=0.0 + HD=0.0 + LI=1 + # This is just for interactive testing purposes. Do not use for proper training. + EXTRA_ARGS="--freeze-LM" + ALLOW_NONDETERMINISTIC=1 +else + MBZ=1 + BZ=256 + NW=8 + AD=0.0 + HD=0.0 + LI=5 + EXTRA_ARGS="" + ALLOW_NONDETERMINISTIC=1 +fi + +USE_TILING=1 +SEQ_LEN=1024 +DECODER_SEQ_LEN=16384 +MAX_POS_EMBED=32768 +TRAIN_SAMPLES=6602173 +WARMUP_SAMPLES=198065 + + +if [[ $BATCH -eq 0 ]]; then + # Runs out of GPU memory in interactive memory without this. + EXTRA_ARGS+="--freeze-LM" +fi + +if [[ $USE_TILING -eq 1 ]]; then + EXTRA_ARGS+=" --pixel-shuffle --use-tiling --max-num-tiles 12 --use-thumbnail" + SEQ_LEN=256 +fi + + +OPTIONS=" \ + --swiglu \ + --use-distributed-optimizer \ + --num-workers ${NW} \ + --num-layers 28 \ + --hidden-size 3584 \ + --norm-epsilon 1e-06 \ + --normalization RMSNorm \ + --num-attention-heads 28 \ + --exit-duration-in-mins 110 \ + --group-query-attention \ + --num-query-groups 4 \ + --ffn-hidden-size 18944 \ + --add-qkv-bias \ + --seq-length ${SEQ_LEN} \ + --decoder-seq-length ${DECODER_SEQ_LEN} \ + --max-position-embeddings ${MAX_POS_EMBED} \ + --dataloader-seq-length ${DECODER_SEQ_LEN} \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model Qwen/Qwen2.5-7B-Instruct \ + --tokenizer-prompt-format qwen2p5 \ + --pixel-shuffle \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 1000000 \ + --disable-bias-linear \ + --pipeline-model-parallel-size 1 \ + --tensor-model-parallel-size 4 \ + --language-model-type qwen2.5_7B \ + --vision-model-type internvit \ + --micro-batch-size ${MBZ} \ + --global-batch-size ${BZ} \ + --lr 2e-6 \ + --min-lr 2.5e-7 \ + --train-samples ${TRAIN_SAMPLES} \ + --lr-warmup-samples ${WARMUP_SAMPLES} \ + --lr-decay-style cosine \ + --clip-grad 10 \ + --weight-decay 0.1 \ + --adam-beta1 0.9 \ + --adam-beta2 0.95 \ + --init-method-std 0.014 \ + --attention-dropout ${AD} \ + --hidden-dropout ${HD} \ + --eod-mask-loss \ + --bf16 \ + --tensorboard-dir ${TENSORBOARD_DIR} \ + --img-h 448 \ + --img-w 448 \ + --patch-dim 14 \ + --data-path ${DATA_TRAIN} \ + --dataloader-type external \ + --split 100,0,0 \ + --prompt-path ${SOURCE}/examples/multimodal/nvlm/nvlm_prompts.json \ + --log-interval ${LI} \ + --save-interval 500 \ + --eval-interval 500 \ + --eval-iters 10 \ + --log-params-norm \ + --log-num-zeros-in-grad \ + ${EXTRA_ARGS} \ + --save ${FINETUNE_DIR} \ + --load ${FINETUNE_DIR} \ + --pretrained-checkpoint ${CHECKPOINT_DIR} \ + --distributed-timeout-minutes 60 \ + --allow-missing-vision-projection-checkpoint \ + --dataloader-save ${FINETUNE_DIR}/dataloader \ + --disable-vision-class-token \ + --use-te \ + --ckpt-format torch \ + --num-frames 32 \ + --use-checkpoint-args \ + --image-tag-type internvl \ + --recompute-granularity full \ + --recompute-method block \ + --recompute-num-layers 28 \ + --recompute-vision \ +" + + +export NVTE_ALLOW_NONDETERMINISTIC_ALGO=${ALLOW_NONDETERMINISTIC} +export NVTE_APPLY_QK_LAYER_SCALING=0 + +# Interactive or batch mode +if [[ $BATCH -eq 0 ]]; then + torchrun --nproc_per_node 8 examples/multimodal/train.py ${OPTIONS} +else + run_cmd="python -u ${SOURCE}/examples/multimodal/train.py ${OPTIONS}" + + DATETIME=`date +'date_%y-%m-%d_time_%H-%M-%S'` + + srun -l --verbose \ + --container-image \ + --container-mounts "" \ + --output=${LOGS_DIR}/%x_%j_$DATETIME.log \ + sh -c "${run_cmd}" + + set +x +fi diff --git a/examples/multimodal/pretrain_mistral_clip.sh b/examples/multimodal/pretrain_mistral_clip.sh old mode 100644 new mode 100755 index 90b0053..6032a83 --- a/examples/multimodal/pretrain_mistral_clip.sh +++ b/examples/multimodal/pretrain_mistral_clip.sh @@ -1,128 +1,128 @@ -#!/bin/bash -# Pretrain a multimodal model. - -export NCCL_IB_SL=1 -export CUDA_DEVICE_MAX_CONNECTIONS=1 -MODEL_NAME="mcore-llava-mistral-7b-instruct-clip336-pretraining" - -# Check that the user has set an output path for model checkpoints. -if [[ -z $WORKSPACE ]]; then - echo "Please set WORKSPACE for storing your model checkpoints." - exit 1 -fi - -SOURCE=`pwd` -OUTPUT_BASE="${WORKSPACE}/output" -OUTPUT="${OUTPUT_BASE}/${MODEL_NAME}" - -FINETUNE_DIR=${OUTPUT}/checkpoints -LOGS_DIR="${OUTPUT}/logs" -TENSORBOARD_DIR="${OUTPUT}/tensorboard" - -if [[ -z $LOAD_NAME ]]; then - echo "Please set LOAD_NAME for input model name." - exit 1 -fi - -CHECKPOINT_DIR="${WORKSPACE}/${LOAD_NAME}/checkpoints" - -DATA_TRAIN="${SOURCE}/examples/multimodal/pretrain_dataset.yaml" - -DEBUG=0 -if [[ $DEBUG -eq 1 ]]; then - BZ=32 - NW=2 - HD=0.0 - LI=1 - EXTRA_ARGS="" - NONDETERMINISTIC_ATTN=1 -else - BZ=256 - NW=2 - HD=0.1 - LI=10 - EXTRA_ARGS="" - NONDETERMINISTIC_ATTN=1 -fi - -OPTIONS=" \ - --apply-layernorm-1p \ - --attention-softmax-in-fp32 \ - --use-checkpoint-args \ - --use-distributed-optimizer \ - --transformer-impl transformer_engine \ - --use-te \ - --normalization RMSNorm \ - --group-query-attention \ - --num-query-groups 8 \ - --no-masked-softmax-fusion \ - --num-workers ${NW} \ - --exit-duration-in-mins 230 \ - --use-flash-attn \ - --untie-embeddings-and-output-weights \ - --disable-bias-linear \ - --position-embedding-type rope \ - --rotary-percent 1.0 \ - --rotary-base 1000000 \ - --swiglu \ - --attention-dropout 0.0 \ - --hidden-dropout ${HD} \ - --tensor-model-parallel-size 4 \ - --pipeline-model-parallel-size 1 \ - --num-layers 32 \ - --hidden-size 4096 \ - --num-attention-heads 32 \ - --seq-length 576 \ - --decoder-seq-length 1024 \ - --max-position-embeddings 4096 \ - --ffn-hidden-size 14336 \ - --train-iters 20000 \ - --micro-batch-size 1 \ - --global-batch-size ${BZ} \ - --lr-decay-iters 20000 \ - --lr-warmup-fraction .01 \ - --lr 0.00015 \ - --min-lr 1.0e-5 \ - --lr-decay-style cosine \ - --log-interval ${LI} \ - --eval-iters 10 \ - --eval-interval 1000 \ - --tokenizer-type MultimodalTokenizer \ - --tokenizer-model mistralai/Mistral-7B-Instruct-v0.3 \ - --tokenizer-prompt-format mistral \ - --data-path ${DATA_TRAIN} \ - --prompt-path ${SOURCE}/examples/multimodal/manual_prompts.json \ - --save-interval 1000 \ - --save ${FINETUNE_DIR} \ - --load ${FINETUNE_DIR} \ - --dataloader-save ${FINETUNE_DIR}/dataloader \ - --pretrained-checkpoint ${CHECKPOINT_DIR} \ - --split 100,0,0 \ - --clip-grad 1.0 \ - --weight-decay 1e-2 \ - --adam-beta1 0.9 \ - --adam-beta2 0.95 \ - --init-method-std 0.014 \ - --log-params-norm \ - --log-num-zeros-in-grad \ - --bf16 \ - --eod-mask-loss \ - --freeze-LM \ - --freeze-ViT \ - --patch-dim 14 \ - --img-h 336 \ - --img-w 336 \ - --dataloader-type external \ - --tensorboard-dir ${TENSORBOARD_DIR} \ - --language-model-type=mistral_7b \ - --disable-vision-class-token \ - ${EXTRA_ARGS} \ - --distributed-timeout-minutes 60 \ - --allow-missing-vision-projection-checkpoint \ - --ckpt-format torch -" - -export NVTE_APPLY_QK_LAYER_SCALING=0 -export NVTE_ALLOW_NONDETERMINISTIC_ALGO=${NONDETERMINISTIC_ATTN} - +#!/bin/bash +# Pretrain a multimodal model. + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +MODEL_NAME="mcore-llava-mistral-7b-instruct-clip336-pretraining" + +# Check that the user has set an output path for model checkpoints. +if [[ -z $WORKSPACE ]]; then + echo "Please set WORKSPACE for storing your model checkpoints." + exit 1 +fi + +SOURCE=`pwd` +OUTPUT_BASE="${WORKSPACE}/output" +OUTPUT="${OUTPUT_BASE}/${MODEL_NAME}" + +FINETUNE_DIR=${OUTPUT}/checkpoints +LOGS_DIR="${OUTPUT}/logs" +TENSORBOARD_DIR="${OUTPUT}/tensorboard" + +if [[ -z $LOAD_NAME ]]; then + echo "Please set LOAD_NAME for input model name." + exit 1 +fi + +CHECKPOINT_DIR="${WORKSPACE}/${LOAD_NAME}/checkpoints" + +DATA_TRAIN="${SOURCE}/examples/multimodal/pretrain_dataset.yaml" + +DEBUG=0 +if [[ $DEBUG -eq 1 ]]; then + BZ=32 + NW=2 + HD=0.0 + LI=1 + EXTRA_ARGS="" + NONDETERMINISTIC_ATTN=1 +else + BZ=256 + NW=2 + HD=0.1 + LI=10 + EXTRA_ARGS="" + NONDETERMINISTIC_ATTN=1 +fi + +OPTIONS=" \ + --apply-layernorm-1p \ + --attention-softmax-in-fp32 \ + --use-checkpoint-args \ + --use-distributed-optimizer \ + --transformer-impl transformer_engine \ + --use-te \ + --normalization RMSNorm \ + --group-query-attention \ + --num-query-groups 8 \ + --no-masked-softmax-fusion \ + --num-workers ${NW} \ + --exit-duration-in-mins 230 \ + --use-flash-attn \ + --untie-embeddings-and-output-weights \ + --disable-bias-linear \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 1000000 \ + --swiglu \ + --attention-dropout 0.0 \ + --hidden-dropout ${HD} \ + --tensor-model-parallel-size 4 \ + --pipeline-model-parallel-size 1 \ + --num-layers 32 \ + --hidden-size 4096 \ + --num-attention-heads 32 \ + --seq-length 576 \ + --decoder-seq-length 1024 \ + --max-position-embeddings 4096 \ + --ffn-hidden-size 14336 \ + --train-iters 20000 \ + --micro-batch-size 1 \ + --global-batch-size ${BZ} \ + --lr-decay-iters 20000 \ + --lr-warmup-fraction .01 \ + --lr 0.00015 \ + --min-lr 1.0e-5 \ + --lr-decay-style cosine \ + --log-interval ${LI} \ + --eval-iters 10 \ + --eval-interval 1000 \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model mistralai/Mistral-7B-Instruct-v0.3 \ + --tokenizer-prompt-format mistral \ + --data-path ${DATA_TRAIN} \ + --prompt-path ${SOURCE}/examples/multimodal/manual_prompts.json \ + --save-interval 1000 \ + --save ${FINETUNE_DIR} \ + --load ${FINETUNE_DIR} \ + --dataloader-save ${FINETUNE_DIR}/dataloader \ + --pretrained-checkpoint ${CHECKPOINT_DIR} \ + --split 100,0,0 \ + --clip-grad 1.0 \ + --weight-decay 1e-2 \ + --adam-beta1 0.9 \ + --adam-beta2 0.95 \ + --init-method-std 0.014 \ + --log-params-norm \ + --log-num-zeros-in-grad \ + --bf16 \ + --eod-mask-loss \ + --freeze-LM \ + --freeze-ViT \ + --patch-dim 14 \ + --img-h 336 \ + --img-w 336 \ + --dataloader-type external \ + --tensorboard-dir ${TENSORBOARD_DIR} \ + --language-model-type=mistral_7b \ + --disable-vision-class-token \ + ${EXTRA_ARGS} \ + --distributed-timeout-minutes 60 \ + --allow-missing-vision-projection-checkpoint \ + --ckpt-format torch +" + +export NVTE_APPLY_QK_LAYER_SCALING=0 +export NVTE_ALLOW_NONDETERMINISTIC_ALGO=${NONDETERMINISTIC_ATTN} + torchrun --nproc_per_node 8 examples/multimodal/train.py ${OPTIONS} \ No newline at end of file diff --git a/examples/multimodal/run_text_generation.py b/examples/multimodal/run_text_generation.py index cbde668..b4699fe 100644 --- a/examples/multimodal/run_text_generation.py +++ b/examples/multimodal/run_text_generation.py @@ -1,515 +1,595 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -"""Generate text using a vision language model.""" -import json -import logging -import os -import sys -from functools import partial - -# Add megatron to the path. -sys.path.append( - os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)) -) - -import torch -import yaml -from config import EvaluationConfig -from evaluation.evaluation_datasets import get_evaluation_dataset -from model import model_provider -from multimodal_args import add_multimodal_extra_args - -from megatron.core import parallel_state -from megatron.core.enums import ModelType -from megatron.core.models.multimodal.llava_model import IMAGE_TOKEN -from megatron.core.models.vision.clip_vit_model import get_num_image_embeddings -from megatron.inference.text_generation.api import generate_and_post_process -from megatron.inference.text_generation.forward_step import ForwardStep -from megatron.inference.text_generation.communication import broadcast_int_list -from megatron.training import get_args, get_model, get_tokenizer, print_rank_0 -from megatron.training.checkpointing import load_checkpoint -from megatron.training.initialize import initialize_megatron - - -def add_text_generation_args(parser): - """Text generation arguments.""" - group = parser.add_argument_group(title='Vision language model text generation arguments') - - group.add_argument("--temperature", type=float, default=1.0, help='Sampling temperature.') - group.add_argument("--top_p", type=float, default=0.0, help='Top p sampling.') - group.add_argument("--top_k", type=int, default=0, help='Top k sampling.') - group.add_argument( - "--out-seq-length", type=int, default=128, help='Length of the output generated text.' - ) - group.add_argument("--output-path", type=str, help='Output file path') - group.add_argument('--input-image-path', type=str, help="Input image directory") - group.add_argument( - '--num-partitions', type=int, default=0, help="Number of partitions for inputs." - ) - group.add_argument('--partition-id', type=int, default=0, help="Partition index") - group.add_argument("--gt-path", type=str, help="Optional ground truth file") - group.add_argument( - "--task", - type=str, - choices=[ - "captioning", - "TextVQA", - "VQAv2", - "ChartQA", - "MMMU", - "VideoMME", - "OCRBench", - "MathVista", - "AI2D", - ], - help="Generation task to run", - ) - group.add_argument( - "--num-samples-per-partition", type=int, default=0, help="Number of samples per partition" - ) - group.add_argument("--config-path", type=str, help="Evaluation config file to use.") - - # Add common multimodal arguments needed for e.g. building the model. - parser = add_multimodal_extra_args(parser) - - return parser - - -def get_evaluation_dataloader( - task, - input_image_path, - gt_path, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - num_samples_per_partition, - num_partitions, - partition_id, - num_frames, - num_workers, - vision_model_type, -): - """Build evaluation dataset.""" - dataset = get_evaluation_dataset( - task, - input_image_path, - gt_path, - img_h, - img_w, - use_tiling, - max_num_tiles, - use_thumbnail, - num_samples_per_partition, - num_partitions, - partition_id, - num_frames, - vision_model_type, - ) - - dp_rank = parallel_state.get_data_parallel_rank() - dp_world_size = parallel_state.get_data_parallel_world_size() - - sampler = torch.utils.data.DistributedSampler( - dataset, shuffle=False, num_replicas=dp_world_size, rank=dp_rank - ) - # TODO: Batched inference is not supported yet. - dataloader = torch.utils.data.DataLoader( - dataset, batch_size=None, num_workers=num_workers, sampler=sampler, pin_memory=True - ) - - return dataloader - - -def generate_samples(model, config: EvaluationConfig, print_output): - """Text generation using a trained vision language model.""" - args = get_args() - - dataloader = get_evaluation_dataloader( - config.task, - config.input_image_path, - config.gt_path, - args.img_h, - args.img_w, - args.use_tiling, - args.max_num_tiles, - args.use_thumbnail, - config.num_samples_per_partition, - config.num_partitions, - config.partition_id, - args.num_frames, - args.num_workers, - args.vision_model_type, - ) - - num_img_embeddings_per_tile = get_num_image_embeddings( - args.img_h, - args.img_w, - args.patch_dim, - args.vision_model_type, - args.disable_vision_class_token, - 1, - args.pixel_shuffle, - args.use_tile_tags, - ) - - for idx, (imgs, num_tiles, sample_id, question, answers, metadata) in enumerate(dataloader): - imgs = imgs.to("cuda") - num_tiles = num_tiles.to("cuda") - - conv = get_conversation(config.task, question) - - forward_step = partial(VLMForwardStep, num_img_embeddings_per_tile, imgs, num_tiles, args.decoder_seq_length) - - if is_first_rank(): - resp_sentences, _, _, _ = generate_and_post_process( - model, - forward_step=forward_step, - prompts=[conv], - tokens_to_generate=config.out_seq_length, - top_k_sampling=config.top_k, - top_p_sampling=config.top_p, - add_BOS=False, - temperature=config.temperature, - random_seed=args.seed, - detokenize_segments=False, - data_parallel=True, - ) - - for generation in resp_sentences: - if isinstance(sample_id, torch.Tensor): - sample_id = sample_id.item() - - output = {"sample_id": sample_id} - - output_name = "" - if config.task == "captioning": - output_name = "caption" - elif config.task in ( - "TextVQA", - "VQAv2", - "ChartQA", - "OCRBench", - "MathVista", - "AI2D", - ): - output_name = "answer" - elif config.task in ("MMMU"): - output_name = "text" - elif config.task == "VideoMME": - output_name = "response" - output = question - else: - raise NotImplementedError("no output name defined for", config.task) - - prompt, generated = get_prompt_and_generated( - generation, args.tokenizer_prompt_format - ) - if config.task == "VideoMME": - output["questions"][0][output_name] = generated - else: - output["prompt"] = prompt - output[output_name] = generated - - if config.task == "captioning": - output["ground_truth"] = answers - elif config.task in ( - "TextVQA", - "VQAv2", - "ChartQA", - "OCRBench", - "MathVista", - "AI2D", - ): - if isinstance(answers, str): - answers = [answers] - output["gt_answer"] = answers - - if len(metadata) > 0: - output.update(metadata) - elif config.task == "MMMU": - output["prediction"] = generated - output.update(metadata) - else: - raise NotImplementedError("no output processing defined for", config.task) - - if print_output: - print(output) - - yield output - idx += 1 - else: - generate_and_post_process( - model, forward_step=forward_step, detokenize_segments=False, data_parallel=True - ) - - idx += 1 - - -def get_evaluation_config(): - """Get evaluation config from a config file or command-line arguments.""" - args = get_args() - if args.config_path: - with open(args.config_path, "r") as f: - config_dict = yaml.safe_load(f) - - config = EvaluationConfig(**config_dict) - else: - config = EvaluationConfig( - task=args.task, - temperature=args.temperature, - top_p=args.top_p, - top_k=args.top_k, - out_seq_length=args.out_seq_length, - output_path=args.output_path, - input_image_path=args.input_image_path, - gt_path=args.gt_path, - num_partitions=args.num_partitions, - partition_id=args.partition_id, - num_samples_per_partition=args.num_samples_per_partition, - ) - - # Default output path if not defined... - if not config.output_path: - os.makedirs("generated", exist_ok=True) - config.output_path = "generated/" + args.language_model_type - - return config - - -def is_first_rank(): - """First tensor and pipeline parallel rank.""" - return ( - parallel_state.is_pipeline_first_stage(ignore_virtual=True) - and parallel_state.get_tensor_model_parallel_rank() == 0 - ) - - -def get_output_path(config, dp_rank): - """Generation output path.""" - return ( - f"{config.output_path}-{config.task}-dprank={dp_rank}-partition={config.partition_id}.jsonl" - ) - - -def generate_and_write_samples(model, config, print_output=True): - """Generate text and write to an output file.""" - dp_rank = parallel_state.get_data_parallel_rank() - - if is_first_rank(): - output_path = get_output_path(config, dp_rank) - output_file = open(output_path, "w") - print(f"output path: {output_file.name}") - - with torch.no_grad(): - for output in generate_samples(model, config, print_output): - if is_first_rank(): - output_file.write(json.dumps(output) + "\n") - output_file.flush() - - if is_first_rank(): - output_file.close() - - -class VLMForwardStep(ForwardStep): - """Inference forward step for a multimodal model.""" - - def __init__( - self, - num_img_embeddings_per_tile, - images, - num_tiles, - decoder_seq_length, - model, - max_batch_size, - max_sequence_length, - ): - """Create multimodal forward step.""" - total_num_tiles = torch.sum(num_tiles).item() - num_img_embeddings = num_img_embeddings_per_tile * total_num_tiles - - super().__init__(model, max_batch_size, max_sequence_length + num_img_embeddings) - self._images = images - self._num_tiles = num_tiles - self._num_img_embeddings = num_img_embeddings - self.decoder_seq_length = decoder_seq_length - - self._recv_only_vision_embeds = False - pp_rank = parallel_state.get_pipeline_model_parallel_rank() - # Checks if the previous stage only has a vision encoder, and that the current stage has part of the LM decoder. - # In this case, the current stage should only receive vision embeddings. - if pp_rank > 0: - self._recv_only_vision_embeds = parallel_state.is_inside_encoder(pp_rank - 1) and (not parallel_state.is_inside_decoder(pp_rank - 1)) and parallel_state.is_inside_decoder() - - # Checks if the current stage only has a vision encoder - self._encoder_only = parallel_state.is_inside_encoder() and not parallel_state.is_inside_decoder() - - def _forward(self, tokens, position_ids, attention_mask): - return self.model( - self._images, - tokens, - position_ids, - attention_mask=None, - inference_params=self.inference_params, - num_image_tiles=self._num_tiles, - runtime_gather_output=True, - ) - - def __call__(self, tokens, position_ids, attention_mask): - num_image_tokens = (tokens == self.model.module.image_token_index).sum().item() - num_tokens = tokens.size(1) - recv_buffer_seq_length = None - if num_image_tokens > 0: - # When there are image tokens and this stage only receives vision embeddings, adjust the recv buffer seq length to match the image embeddings sequence length. - # If there are image tokens and this stage receives full embeddings, make sure we compensate for expansion of image tokens. - # Note that this will set a recv_buffer_seq_length for the encoder stage, this length is irrelevant since that recv buffer is never allocated. - if self._recv_only_vision_embeds: - recv_buffer_seq_length = self._num_img_embeddings - else: - recv_buffer_seq_length = min(self._num_img_embeddings + num_tokens - num_image_tokens, self.decoder_seq_length) - elif self._recv_only_vision_embeds: - # If this stage only receives vision embeddings and there are no image tokens we won't run the encoder and therefore shouldn't try to recv. - recv_buffer_seq_length = 0 - - # If the pipeline stage only has a vision encoder, then it only needs to run when there are image tokens - if not (self._encoder_only and num_image_tokens == 0): - output = super().__call__(tokens, position_ids, attention_mask, recv_buffer_seq_length=recv_buffer_seq_length) - else: - output = None - if isinstance(output, tuple): - logits, _ = output - else: - logits = output - - # On the first inference iteration, we compute image tokens. - # On every PP stage(although inference params should only matter for decoder), - # update the sequence length offset by the number of image tokens. - if num_tokens > 1 and num_image_tokens > 0: - if "image_tokens_count" not in self.inference_params.key_value_memory_dict: - self.inference_params.key_value_memory_dict["image_tokens_count"] = self._num_img_embeddings - - if self._num_img_embeddings + num_tokens - num_image_tokens > self.decoder_seq_length: - self.inference_params.sequence_len_offset += self.decoder_seq_length - num_tokens - else: - self.inference_params.sequence_len_offset += ( - self.inference_params.key_value_memory_dict["image_tokens_count"] - num_image_tokens - ) - - return logits - - -def get_conversation(task, question): - """Get a conversation for a given task and evaluation question.""" - conversation = [] - - # In all cases, the tokenizer adds possible header tokens for the assistant. - if task == "captioning": - conversation = [ - {"role": "system", "content": "Answer the questions."}, - { - "role": "user", - "content": f"{IMAGE_TOKEN}\nProvide a one-sentence caption for provided image.", - }, - ] - elif task in ("TextVQA", "VQAv2", "ChartQA"): - conversation = [ - {"role": "system", "content": "Answer the questions."}, - { - "role": "user", - "content": f"{IMAGE_TOKEN}\n{question}\nAnswer the question using a single word or phrase.", - }, - ] - elif task in ("OCRBench", "MathVista", "AI2D"): - conversation = [ - {"role": "system", "content": "Answer the questions."}, - {"role": "user", "content": f"{IMAGE_TOKEN}\n{question}"}, - ] - elif task == "MMMU": - conversation = [ - {"role": "system", "content": "Answer the questions."}, - {"role": "user", "content": question}, - ] - elif task == "VideoMME": - q = ( - "Select the best answer to the following multiple-choice " - "question based on the video. Respond with only the letter " - "(A, B, C, or D) of the correct option.\n" - ) - q += question["questions"][0]["question"] + "\n" - q += question["questions"][0]["choices"][0] + "\n" - q += question["questions"][0]["choices"][1] + "\n" - q += question["questions"][0]["choices"][2] + "\n" - q += question["questions"][0]["choices"][3] + "\n" - - conversation = [ - {"role": "system", "content": "Answer the questions."}, - {"role": "user", "content": f"{IMAGE_TOKEN}\n{question}"}, - ] - - return conversation - - -def get_prompt_and_generated(prompt_and_generation, prompt_format): - """Strip prompt and other unnecessary text from generation.""" - if prompt_format == "llama3": - splitted = prompt_and_generation.split("<|start_header_id|>assistant<|end_header_id|>\n\n") - prompt = splitted[0] - generated = splitted[1] - generated = generated.split("<|eot_id|>")[0] - elif prompt_format == "mistral": - splitted = prompt_and_generation.split("[/INST]") - prompt = splitted[0] - generated = splitted[1] - generated = generated.split("")[0] - elif prompt_format == "chatml": - splitted = prompt_and_generation.split("<|im_start|> assistant\n") - prompt = splitted[0] - generated = splitted[1] - generated = generated.split("<|im_end|>")[0] - elif prompt_format in ("nvlm-yi-34b", "qwen2p0", "qwen2p5"): - splitted = prompt_and_generation.split("<|im_start|>assistant\n") - prompt = splitted[0] - generated = splitted[1] - generated = generated.split("<|im_end|>")[0] - else: - raise ValueError(f"Prompt format {prompt_format} is not supported.") - - # Remove possible garbage. - generated = generated.strip() - generated = generated.split("\n\n")[0] - generated = generated.split("\n")[0] - - return prompt, generated - - -def main(): - """Vision language model text generation.""" - initialize_megatron(extra_args_provider=add_text_generation_args) - - if torch.distributed.get_rank() == 0: - logging.getLogger(__name__).warning( - "Models using pipeline parallelism are not supported yet." - ) - - args = get_args() - - def wrapped_model_provider(pre_process, post_process, add_encoder, add_decoder): - return model_provider(pre_process, post_process, add_encoder, add_decoder, parallel_output=False) - - # Set up model and load checkpoint. - model = get_model(wrapped_model_provider, model_type=ModelType.encoder_and_decoder, wrap_with_ddp=False) - - if args.load is not None: - _ = load_checkpoint(model, None, None) - - model = model[0] - - model.eval() - - config = get_evaluation_config() - - generate_and_write_samples(model, config) - - -if __name__ == "__main__": - main() +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +"""Generate text using a vision language model.""" +import json +import logging +import os +import sys +from functools import partial + +# Add megatron to the path. +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)) +) + +import torch +import yaml +from config import EvaluationConfig +from evaluation.evaluation_datasets import get_evaluation_dataset +from model import model_provider +from multimodal_args import add_multimodal_extra_args + +from megatron.core import parallel_state +from megatron.core.enums import ModelType +from megatron.core.models.multimodal.llava_model import IMAGE_TOKEN +from megatron.core.models.vision.clip_vit_model import get_num_image_embeddings +from megatron.inference.text_generation.api import generate_and_post_process +from megatron.inference.text_generation.forward_step import ForwardStep +from megatron.inference.text_generation.communication import broadcast_int_list +from megatron.core.inference.sampling_params import SamplingParams +from megatron.core.inference.engines.mcore_engine import MCoreEngine +from megatron.core.inference.inference_request import InferenceRequest, VLMInferenceRequest +from megatron.core.inference.text_generation_controllers.vlm_text_generation_controller import ( + VLMTextGenerationController, +) +from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( + InferenceWrapperConfig, +) +from megatron.core.inference.model_inference_wrappers.multimodal.vlm_inference_wrapper import ( + VLMInferenceWrapper, +) +from megatron.training import get_args, get_model, get_tokenizer, print_rank_0 +from megatron.training.checkpointing import load_checkpoint +from megatron.training.initialize import initialize_megatron + + +def add_text_generation_args(parser): + """Text generation arguments.""" + group = parser.add_argument_group(title='Vision language model text generation arguments') + + group.add_argument("--temperature", type=float, default=1.0, help='Sampling temperature.') + group.add_argument("--top_p", type=float, default=0.0, help='Top p sampling.') + group.add_argument("--top_k", type=int, default=0, help='Top k sampling.') + group.add_argument( + "--out-seq-length", type=int, default=128, help='Length of the output generated text.' + ) + group.add_argument("--output-path", type=str, help='Output file path') + group.add_argument('--input-image-path', type=str, help="Input image directory") + group.add_argument( + '--num-partitions', type=int, default=0, help="Number of partitions for inputs." + ) + group.add_argument('--partition-id', type=int, default=0, help="Partition index") + group.add_argument("--gt-path", type=str, help="Optional ground truth file") + group.add_argument( + "--task", + type=str, + choices=[ + "captioning", + "TextVQA", + "VQAv2", + "ChartQA", + "MMMU", + "VideoMME", + "OCRBench", + "MathVista", + "AI2D", + "InfoVQA", + "SPDocVQA", + ], + help="Generation task to run", + ) + group.add_argument( + "--num-samples-per-partition", type=int, default=0, help="Number of samples per partition" + ) + group.add_argument("--config-path", type=str, help="Evaluation config file to use.") + + group.add_argument("--use-mcore-inference", action="store_true", default=False, help="Use the MCore inference API") + + # Add common multimodal arguments needed for e.g. building the model. + parser = add_multimodal_extra_args(parser) + + return parser + + +def get_evaluation_dataloader( + task, + input_image_path, + gt_path, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + num_samples_per_partition, + num_partitions, + partition_id, + num_frames, + num_workers, + vision_model_type, +): + """Build evaluation dataset.""" + dataset = get_evaluation_dataset( + task, + input_image_path, + gt_path, + img_h, + img_w, + use_tiling, + max_num_tiles, + use_thumbnail, + num_samples_per_partition, + num_partitions, + partition_id, + num_frames, + vision_model_type, + ) + + dp_rank = parallel_state.get_data_parallel_rank() + dp_world_size = parallel_state.get_data_parallel_world_size() + + sampler = torch.utils.data.DistributedSampler( + dataset, shuffle=False, num_replicas=dp_world_size, rank=dp_rank + ) + # TODO: Batched inference is not supported yet. + dataloader = torch.utils.data.DataLoader( + dataset, batch_size=None, num_workers=num_workers, sampler=sampler, pin_memory=True + ) + + return dataloader + + +def generate_samples(model, config: EvaluationConfig, print_output): + """Text generation using a trained vision language model.""" + args = get_args() + + dataloader = get_evaluation_dataloader( + config.task, + config.input_image_path, + config.gt_path, + args.img_h, + args.img_w, + args.use_tiling, + args.max_num_tiles, + args.use_thumbnail, + config.num_samples_per_partition, + config.num_partitions, + config.partition_id, + args.num_frames, + args.num_workers, + args.vision_model_type, + ) + + num_img_embeddings_per_tile = get_num_image_embeddings( + args.img_h, + args.img_w, + args.patch_dim, + args.vision_model_type, + args.disable_vision_class_token, + 1, + args.pixel_shuffle, + args.use_tile_tags, + ) + + if args.use_mcore_inference: + inference_wrapper_config = InferenceWrapperConfig( + hidden_size=args.hidden_size, + inference_batch_times_seqlen_threshold=args.inference_batch_times_seqlen_threshold, + fp32_residual_connection=args.fp32_residual_connection, + params_dtype=args.params_dtype, + padded_vocab_size=args.padded_vocab_size, + ) + inference_wrapped_model = VLMInferenceWrapper(model, inference_wrapper_config) + tokenizer = get_tokenizer() + controller = VLMTextGenerationController( + inference_wrapped_model=inference_wrapped_model, tokenizer=tokenizer + ) + inference_engine = MCoreEngine( + controller, max_batch_size=1, random_seed=args.seed + ) + sampling_params = SamplingParams( + temperature=config.temperature, + top_k=config.top_k, + top_p=config.top_p, + num_tokens_to_generate=config.out_seq_length, + ) + + for idx, (imgs, num_tiles, sample_id, question, answers, metadata) in enumerate(dataloader): + imgs = imgs.to("cuda") + num_tiles = num_tiles.to("cuda") + + conv = get_conversation(config.task, question) + + if not args.use_mcore_inference: + forward_step = partial(VLMForwardStep, num_img_embeddings_per_tile, imgs, num_tiles, args.decoder_seq_length) + + + if is_first_rank(): + + if args.use_mcore_inference: + inference_request = VLMInferenceRequest( + request_id=inference_engine.get_new_request_id(), + prompt=conv, + prompt_tokens=controller.tokenize_prompt(conv), + inference_parameters=sampling_params, + num_img_embeddings_per_tile=num_img_embeddings_per_tile, + imgs=imgs, + num_tiles=num_tiles, + decoder_seq_length=args.decoder_seq_length, + ) + results: List[InferenceRequest] = inference_engine.generate( + inference_requests=[inference_request] + ) + + resp_sentences = [ + tokenizer.detokenize(result.prompt_tokens) + result.generated_text + for result in results + ] + else: + resp_sentences, _, _, _ = generate_and_post_process( + model, + forward_step=forward_step, + prompts=[conv], + tokens_to_generate=config.out_seq_length, + top_k_sampling=config.top_k, + top_p_sampling=config.top_p, + add_BOS=False, + temperature=config.temperature, + random_seed=args.seed, + detokenize_segments=False, + data_parallel=True, + ) + + for generation in resp_sentences: + if isinstance(sample_id, torch.Tensor): + sample_id = sample_id.item() + + output = {"sample_id": sample_id} + + output_name = "" + if config.task == "captioning": + output_name = "caption" + elif config.task in ( + "TextVQA", + "VQAv2", + "ChartQA", + "OCRBench", + "MathVista", + "AI2D", + "InfoVQA", + "SPDocVQA", + ): + output_name = "answer" + elif config.task in ("MMMU"): + output_name = "text" + elif config.task == "VideoMME": + output_name = "response" + output = question + else: + raise NotImplementedError("no output name defined for", config.task) + + prompt, generated = get_prompt_and_generated( + generation, args.tokenizer_prompt_format + ) + if config.task == "VideoMME": + output["questions"][0][output_name] = generated + else: + output["prompt"] = prompt + output[output_name] = generated + + if config.task == "captioning": + output["ground_truth"] = answers + elif config.task in ( + "TextVQA", + "VQAv2", + "ChartQA", + "OCRBench", + "MathVista", + "AI2D", + "InfoVQA", + "SPDocVQA", + ): + if isinstance(answers, str): + answers = [answers] + output["gt_answer"] = answers + + if len(metadata) > 0: + output.update(metadata) + elif config.task == "MMMU": + output["prediction"] = generated + output.update(metadata) + else: + raise NotImplementedError("no output processing defined for", config.task) + + if print_output: + print(output) + + yield output + idx += 1 + else: + if args.use_mcore_inference: + inference_request = VLMInferenceRequest( + request_id=inference_engine.get_new_request_id(), + prompt=conv, + prompt_tokens=controller.tokenize_prompt(conv), + inference_parameters=sampling_params, + num_img_embeddings_per_tile=num_img_embeddings_per_tile, + imgs=imgs, + num_tiles=num_tiles, + decoder_seq_length=args.decoder_seq_length, + ) + inference_engine.generate( + inference_requests=[inference_request] + ) + else: + generate_and_post_process( + model, forward_step=forward_step, detokenize_segments=False, data_parallel=True + ) + + idx += 1 + + +def get_evaluation_config(): + """Get evaluation config from a config file or command-line arguments.""" + args = get_args() + if args.config_path: + with open(args.config_path, "r") as f: + config_dict = yaml.safe_load(f) + + config = EvaluationConfig(**config_dict) + else: + config = EvaluationConfig( + task=args.task, + temperature=args.temperature, + top_p=args.top_p, + top_k=args.top_k, + out_seq_length=args.out_seq_length, + output_path=args.output_path, + input_image_path=args.input_image_path, + gt_path=args.gt_path, + num_partitions=args.num_partitions, + partition_id=args.partition_id, + num_samples_per_partition=args.num_samples_per_partition, + ) + + # Default output path if not defined... + if not config.output_path: + os.makedirs("generated", exist_ok=True) + config.output_path = "generated/" + args.language_model_type + + return config + + +def is_first_rank(): + """First tensor and pipeline parallel rank.""" + return ( + parallel_state.is_pipeline_first_stage(ignore_virtual=True) + and parallel_state.get_tensor_model_parallel_rank() == 0 + ) + + +def get_output_path(config, dp_rank): + """Generation output path.""" + return ( + f"{config.output_path}-{config.task}-dprank={dp_rank}-partition={config.partition_id}.jsonl" + ) + + +def generate_and_write_samples(model, config, print_output=True): + """Generate text and write to an output file.""" + dp_rank = parallel_state.get_data_parallel_rank() + + if is_first_rank(): + output_path = get_output_path(config, dp_rank) + output_file = open(output_path, "w") + print(f"output path: {output_file.name}") + + with torch.no_grad(): + for output in generate_samples(model, config, print_output): + if is_first_rank(): + output_file.write(json.dumps(output) + "\n") + output_file.flush() + + if is_first_rank(): + output_file.close() + +class VLMForwardStep(ForwardStep): + """Inference forward step for a multimodal model.""" + + def __init__( + self, + num_img_embeddings_per_tile, + images, + num_tiles, + decoder_seq_length, + model, + max_batch_size, + max_sequence_length, + ): + """Create multimodal forward step.""" + total_num_tiles = torch.sum(num_tiles).item() + num_img_embeddings = num_img_embeddings_per_tile * total_num_tiles + + super().__init__(model, max_batch_size, max_sequence_length + num_img_embeddings) + self._images = images + self._num_tiles = num_tiles + self._num_img_embeddings = num_img_embeddings + self.decoder_seq_length = decoder_seq_length + + self._recv_only_vision_embeds = False + pp_rank = parallel_state.get_pipeline_model_parallel_rank() + # Checks if the previous stage only has a vision encoder, and that the current stage has part of the LM decoder. + # In this case, the current stage should only receive vision embeddings. + if pp_rank > 0: + self._recv_only_vision_embeds = parallel_state.is_inside_encoder(pp_rank - 1) and (not parallel_state.is_inside_decoder(pp_rank - 1)) and parallel_state.is_inside_decoder() + + # Checks if the current stage only has a vision encoder + self._encoder_only = parallel_state.is_inside_encoder() and not parallel_state.is_inside_decoder() + + def _forward(self, tokens, position_ids, attention_mask): + return self.model( + self._images, + tokens, + position_ids, + attention_mask=None, + inference_params=self.inference_params, + num_image_tiles=self._num_tiles, + runtime_gather_output=True, + ) + + def __call__(self, tokens, position_ids, attention_mask): + num_image_tokens = (tokens == self.model.module.image_token_index).sum().item() + num_tokens = tokens.size(1) + recv_buffer_seq_length = None + if num_image_tokens > 0: + # When there are image tokens and this stage only receives vision embeddings, adjust the recv buffer seq length to match the image embeddings sequence length. + # If there are image tokens and this stage receives full embeddings, make sure we compensate for expansion of image tokens. + # Note that this will set a recv_buffer_seq_length for the encoder stage, this length is irrelevant since that recv buffer is never allocated. + if self._recv_only_vision_embeds: + recv_buffer_seq_length = self._num_img_embeddings + else: + recv_buffer_seq_length = min(self._num_img_embeddings + num_tokens - num_image_tokens, self.decoder_seq_length) + elif self._recv_only_vision_embeds: + # If this stage only receives vision embeddings and there are no image tokens we won't run the encoder and therefore shouldn't try to recv. + recv_buffer_seq_length = 0 + + # If the pipeline stage only has a vision encoder, then it only needs to run when there are image tokens + if not (self._encoder_only and num_image_tokens == 0): + output = super().__call__(tokens, position_ids, attention_mask, recv_buffer_seq_length=recv_buffer_seq_length) + else: + output = None + if isinstance(output, tuple): + logits, _ = output + else: + logits = output + + # On the first inference iteration, we compute image tokens. + # On every PP stage(although inference params should only matter for decoder), + # update the sequence length offset by the number of image tokens. + if num_tokens > 1 and num_image_tokens > 0: + if "image_tokens_count" not in self.inference_params.key_value_memory_dict: + self.inference_params.key_value_memory_dict["image_tokens_count"] = self._num_img_embeddings + + if self._num_img_embeddings + num_tokens - num_image_tokens > self.decoder_seq_length: + self.inference_params.sequence_len_offset += self.decoder_seq_length - num_tokens + else: + self.inference_params.sequence_len_offset += ( + self.inference_params.key_value_memory_dict["image_tokens_count"] - num_image_tokens + ) + + return logits + + +def get_conversation(task, question): + """Get a conversation for a given task and evaluation question.""" + conversation = [] + + # In all cases, the tokenizer adds possible header tokens for the assistant. + if task == "captioning": + conversation = [ + {"role": "system", "content": "Answer the questions."}, + { + "role": "user", + "content": f"{IMAGE_TOKEN}\nProvide a one-sentence caption for provided image.", + }, + ] + elif task in ("TextVQA", "VQAv2", "ChartQA", "InfoVQA", "SPDocVQA"): + conversation = [ + {"role": "system", "content": "Answer the questions."}, + { + "role": "user", + "content": f"{IMAGE_TOKEN}\n{question}\nAnswer the question using a single word or phrase.", + }, + ] + elif task in ("OCRBench", "MathVista", "AI2D"): + conversation = [ + {"role": "system", "content": "Answer the questions."}, + {"role": "user", "content": f"{IMAGE_TOKEN}\n{question}"}, + ] + elif task == "MMMU": + conversation = [ + {"role": "system", "content": "Answer the questions."}, + {"role": "user", "content": question}, + ] + elif task == "VideoMME": + q = ( + "Select the best answer to the following multiple-choice " + "question based on the video. Respond with only the letter " + "(A, B, C, or D) of the correct option.\n" + ) + q += question["questions"][0]["question"] + "\n" + q += question["questions"][0]["choices"][0] + "\n" + q += question["questions"][0]["choices"][1] + "\n" + q += question["questions"][0]["choices"][2] + "\n" + q += question["questions"][0]["choices"][3] + "\n" + + conversation = [ + {"role": "system", "content": "Answer the questions."}, + {"role": "user", "content": f"{IMAGE_TOKEN}\n{q}"}, + ] + + return conversation + + +def get_prompt_and_generated(prompt_and_generation, prompt_format): + """Strip prompt and other unnecessary text from generation.""" + if prompt_format in ("llama3", "llama3p1"): + splitted = prompt_and_generation.split("<|start_header_id|>assistant<|end_header_id|>\n\n") + prompt = splitted[0] + generated = splitted[1] + generated = generated.split("<|eot_id|>")[0] + elif prompt_format == "mistral": + splitted = prompt_and_generation.split("[/INST]") + prompt = splitted[0] + generated = splitted[1] + generated = generated.split("
")[0] + elif prompt_format == "chatml": + splitted = prompt_and_generation.split("<|im_start|> assistant\n") + prompt = splitted[0] + generated = splitted[1] + generated = generated.split("<|im_end|>")[0] + elif prompt_format in ("nvlm-yi-34b", "qwen2p0", "qwen2p5"): + splitted = prompt_and_generation.split("<|im_start|>assistant\n") + prompt = splitted[0] + generated = splitted[1] + generated = generated.split("<|im_end|>")[0] + else: + raise ValueError(f"Prompt format {prompt_format} is not supported.") + + # Remove possible garbage. + generated = generated.strip() + generated = generated.split("\n\n")[0] + generated = generated.split("\n")[0] + + return prompt, generated + + +def main(): + """Vision language model text generation.""" + initialize_megatron(extra_args_provider=add_text_generation_args) + + if torch.distributed.get_rank() == 0: + logging.getLogger(__name__).warning( + "Models using pipeline parallelism are not supported yet." + ) + + args = get_args() + + def wrapped_model_provider(pre_process, post_process, add_encoder, add_decoder): + return model_provider(pre_process, post_process, add_encoder, add_decoder, parallel_output=False) + + # Set up model and load checkpoint. + model = get_model(wrapped_model_provider, model_type=ModelType.encoder_and_decoder, wrap_with_ddp=False) + + if args.load is not None: + _ = load_checkpoint(model, None, None) + + model = model[0] + + model.eval() + + config = get_evaluation_config() + + generate_and_write_samples(model, config) + + +if __name__ == "__main__": + main() diff --git a/examples/multimodal/sft_mistral_clip.sh b/examples/multimodal/sft_mistral_clip.sh old mode 100644 new mode 100755 index 94ff208..57e6d46 --- a/examples/multimodal/sft_mistral_clip.sh +++ b/examples/multimodal/sft_mistral_clip.sh @@ -1,130 +1,130 @@ -#!/bin/bash -# Run SFT on a pretrained multimodal model - -export NCCL_IB_SL=1 -export CUDA_DEVICE_MAX_CONNECTIONS=1 -MODEL_NAME="mcore-llava-mistral-7b-instruct-clip336-sft" - -# Check that the user has set an output path for model checkpoints. -if [[ -z $WORKSPACE ]]; then - echo "Please set WORKSPACE for storing your model checkpoints." - exit 1 -fi - -SOURCE=`pwd` -OUTPUT_BASE="${WORKSPACE}/output" -OUTPUT="${OUTPUT_BASE}/${MODEL_NAME}" - -FINETUNE_DIR=${OUTPUT}/checkpoints -LOGS_DIR="${OUTPUT}/logs" -TENSORBOARD_DIR="${OUTPUT}/tensorboard" - -if [[ -z $LOAD_NAME ]]; then - echo "Please set LOAD_NAME for input model name." - exit 1 -fi - -if [[ -z $LOAD_ITER ]]; then - echo "Please set LOAD_ITER for pre-trained input model iteration." - exit 1 -fi - -CHECKPOINT_DIR="${WORKSPACE}/${LOAD_NAME}/checkpoints" - -DATA_TRAIN="${SOURCE}/examples/multimodal/sft_dataset.yaml" - -DEBUG=0 -if [[ $DEBUG -eq 1 ]]; then - BZ=8 - NW=1 - HD=0.0 - LI=1 - EXTRA_ARGS="" - NONDETERMINISTIC_ATTN=1 -else - BZ=128 - NW=2 - HD=0.1 - LI=10 - EXTRA_ARGS="" - NONDETERMINISTIC_ATTN=1 -fi - -OPTIONS=" \ - --apply-layernorm-1p \ - --attention-softmax-in-fp32 \ - --use-checkpoint-args \ - --use-distributed-optimizer \ - --transformer-impl transformer_engine \ - --use-te \ - --normalization RMSNorm \ - --group-query-attention \ - --num-query-groups 8 \ - --no-masked-softmax-fusion \ - --num-workers ${NW} \ - --exit-duration-in-mins 230 \ - --use-flash-attn \ - --untie-embeddings-and-output-weights \ - --disable-bias-linear \ - --position-embedding-type rope \ - --rotary-percent 1.0 \ - --rotary-base 1000000 \ - --swiglu \ - --attention-dropout 0.0 \ - --hidden-dropout ${HD} \ - --tensor-model-parallel-size 4 \ - --pipeline-model-parallel-size 1 \ - --num-layers 32 \ - --hidden-size 4096 \ - --num-attention-heads 32 \ - --seq-length 576 \ - --decoder-seq-length 2048 \ - --max-position-embeddings 4096 \ - --ffn-hidden-size 14336 \ - --train-iters 20000 \ - --micro-batch-size 1 \ - --global-batch-size ${BZ} \ - --lr-decay-iters 20000 \ - --lr-warmup-fraction .01 \ - --lr 1e-6 \ - --min-lr 1e-7 \ - --lr-decay-style cosine \ - --log-interval ${LI} \ - --eval-iters 10 \ - --eval-interval 500 \ - --tokenizer-type MultimodalTokenizer \ - --tokenizer-model mistralai/Mistral-7B-Instruct-v0.3 \ - --tokenizer-prompt-format mistral \ - --data-path ${DATA_TRAIN} \ - --prompt-path ${SOURCE}/examples/multimodal/manual_prompts.json \ - --save-interval 500 \ - --save ${FINETUNE_DIR} \ - --load ${FINETUNE_DIR} \ - --pretrained-checkpoint ${CHECKPOINT_DIR} \ - --dataloader-save ${FINETUNE_DIR}/dataloader \ - --split 100,0,0 \ - --clip-grad 0.5 \ - --weight-decay 0.1 \ - --adam-beta1 0.9 \ - --adam-beta2 0.95 \ - --init-method-std 0.014 \ - --log-params-norm \ - --log-num-zeros-in-grad \ - --eod-mask-loss \ - --freeze-ViT \ - --patch-dim 14 \ - --img-h 336 \ - --img-w 336 \ - --dataloader-type external \ - --tensorboard-dir ${TENSORBOARD_DIR} \ - --language-model-type=mistral_7b \ - --disable-vision-class-token \ - ${EXTRA_ARGS} \ - --distributed-timeout-minutes 60 \ - --ckpt-format torch -" - -export NVTE_APPLY_QK_LAYER_SCALING=0 -export NVTE_ALLOW_NONDETERMINISTIC_ALGO=${NONDETERMINISTIC_ATTN} - -torchrun --nproc_per_node 8 examples/multimodal/train.py ${OPTIONS} +#!/bin/bash +# Run SFT on a pretrained multimodal model + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +MODEL_NAME="mcore-llava-mistral-7b-instruct-clip336-sft" + +# Check that the user has set an output path for model checkpoints. +if [[ -z $WORKSPACE ]]; then + echo "Please set WORKSPACE for storing your model checkpoints." + exit 1 +fi + +SOURCE=`pwd` +OUTPUT_BASE="${WORKSPACE}/output" +OUTPUT="${OUTPUT_BASE}/${MODEL_NAME}" + +FINETUNE_DIR=${OUTPUT}/checkpoints +LOGS_DIR="${OUTPUT}/logs" +TENSORBOARD_DIR="${OUTPUT}/tensorboard" + +if [[ -z $LOAD_NAME ]]; then + echo "Please set LOAD_NAME for input model name." + exit 1 +fi + +if [[ -z $LOAD_ITER ]]; then + echo "Please set LOAD_ITER for pre-trained input model iteration." + exit 1 +fi + +CHECKPOINT_DIR="${WORKSPACE}/${LOAD_NAME}/checkpoints" + +DATA_TRAIN="${SOURCE}/examples/multimodal/sft_dataset.yaml" + +DEBUG=0 +if [[ $DEBUG -eq 1 ]]; then + BZ=8 + NW=1 + HD=0.0 + LI=1 + EXTRA_ARGS="" + NONDETERMINISTIC_ATTN=1 +else + BZ=128 + NW=2 + HD=0.1 + LI=10 + EXTRA_ARGS="" + NONDETERMINISTIC_ATTN=1 +fi + +OPTIONS=" \ + --apply-layernorm-1p \ + --attention-softmax-in-fp32 \ + --use-checkpoint-args \ + --use-distributed-optimizer \ + --transformer-impl transformer_engine \ + --use-te \ + --normalization RMSNorm \ + --group-query-attention \ + --num-query-groups 8 \ + --no-masked-softmax-fusion \ + --num-workers ${NW} \ + --exit-duration-in-mins 230 \ + --use-flash-attn \ + --untie-embeddings-and-output-weights \ + --disable-bias-linear \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 1000000 \ + --swiglu \ + --attention-dropout 0.0 \ + --hidden-dropout ${HD} \ + --tensor-model-parallel-size 4 \ + --pipeline-model-parallel-size 1 \ + --num-layers 32 \ + --hidden-size 4096 \ + --num-attention-heads 32 \ + --seq-length 576 \ + --decoder-seq-length 2048 \ + --max-position-embeddings 4096 \ + --ffn-hidden-size 14336 \ + --train-iters 20000 \ + --micro-batch-size 1 \ + --global-batch-size ${BZ} \ + --lr-decay-iters 20000 \ + --lr-warmup-fraction .01 \ + --lr 1e-6 \ + --min-lr 1e-7 \ + --lr-decay-style cosine \ + --log-interval ${LI} \ + --eval-iters 10 \ + --eval-interval 500 \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model mistralai/Mistral-7B-Instruct-v0.3 \ + --tokenizer-prompt-format mistral \ + --data-path ${DATA_TRAIN} \ + --prompt-path ${SOURCE}/examples/multimodal/manual_prompts.json \ + --save-interval 500 \ + --save ${FINETUNE_DIR} \ + --load ${FINETUNE_DIR} \ + --pretrained-checkpoint ${CHECKPOINT_DIR} \ + --dataloader-save ${FINETUNE_DIR}/dataloader \ + --split 100,0,0 \ + --clip-grad 0.5 \ + --weight-decay 0.1 \ + --adam-beta1 0.9 \ + --adam-beta2 0.95 \ + --init-method-std 0.014 \ + --log-params-norm \ + --log-num-zeros-in-grad \ + --eod-mask-loss \ + --freeze-ViT \ + --patch-dim 14 \ + --img-h 336 \ + --img-w 336 \ + --dataloader-type external \ + --tensorboard-dir ${TENSORBOARD_DIR} \ + --language-model-type=mistral_7b \ + --disable-vision-class-token \ + ${EXTRA_ARGS} \ + --distributed-timeout-minutes 60 \ + --ckpt-format torch +" + +export NVTE_APPLY_QK_LAYER_SCALING=0 +export NVTE_ALLOW_NONDETERMINISTIC_ALGO=${NONDETERMINISTIC_ATTN} + +torchrun --nproc_per_node 8 examples/multimodal/train.py ${OPTIONS} diff --git a/examples/multimodal/text_generation_mistral_clip.sh b/examples/multimodal/text_generation_mistral_clip.sh old mode 100644 new mode 100755 index c1ef7bc..ed12b38 --- a/examples/multimodal/text_generation_mistral_clip.sh +++ b/examples/multimodal/text_generation_mistral_clip.sh @@ -1,109 +1,109 @@ -#!/bin/bash - -export NCCL_IB_SL=1 -export CUDA_DEVICE_MAX_CONNECTIONS=1 -export NVTE_APPLY_QK_LAYER_SCALING=0 - -INPUT_IMAGE_PATH="placeholder" -GROUNDTRUTH_PATH="placeholder" -NUM_FRAMES=1 - -while [[ $# -gt 0 ]]; do - case $1 in - -i|--input-image-path) - INPUT_IMAGE_PATH="$2" - shift - shift - ;; - --num-frames) - NUM_FRAMES="$2" - shift - shift - ;; - -o|--output-path) - OUTPUT_PATH="$2" - shift - shift - ;; - -m|--model-path) - MODEL_PATH="$2" - shift - shift - ;; - -t|--task) - TASK="$2" - shift - shift - ;; - -g|--gt-path) - GROUNDTRUTH_PATH="$2" - shift - shift - ;; - -*|--*) - echo "Invalid option $1" - exit 1 - ;; - esac -done - -# Please modify these as needed. -NUM_PARTITIONS=0 -START=0 -END=0 - -for PARTITION_ID in $( eval echo {$START..$END} ) -do - torchrun --nproc_per_node 8 examples/multimodal/run_text_generation.py \ - --apply-layernorm-1p \ - --attention-softmax-in-fp32 \ - --use-flash-attn \ - --transformer-impl transformer_engine \ - --use-te \ - --use-checkpoint-args \ - --normalization RMSNorm \ - --language-model-type mistral_7b \ - --untie-embeddings-and-output-weights \ - --disable-bias-linear \ - --position-embedding-type rope \ - --rotary-percent 1.0 \ - --rotary-base 1000000 \ - --swiglu \ - --attention-dropout 0.0 \ - --hidden-dropout 0.0 \ - --tensor-model-parallel-size 4 \ - --pipeline-model-parallel-size 1 \ - --group-query-attention \ - --num-query-groups 8 \ - --num-layers 32 \ - --hidden-size 4096 \ - --ffn-hidden-size 14336 \ - --num-attention-heads 32 \ - --max-position-embeddings 4096 \ - --no-masked-softmax-fusion \ - --load ${MODEL_PATH} \ - --tokenizer-type MultimodalTokenizer \ - --tokenizer-model mistralai/Mistral-7B-Instruct-v0.3 \ - --tokenizer-prompt-format mistral \ - --bf16 \ - --micro-batch-size 1 \ - --seq-length 2048 \ - --out-seq-length 12 \ - --temperature 1.0 \ - --img-h 336 \ - --img-w 336 \ - --patch-dim 14 \ - --seed 153 \ - --top_k 1 \ - --no-load-rng \ - --no-load-optim \ - --input-image-path ${INPUT_IMAGE_PATH} \ - --num-partitions ${NUM_PARTITIONS} \ - --partition-id ${PARTITION_ID} \ - --output-path ${OUTPUT_PATH} \ - --gt-path ${GROUNDTRUTH_PATH} \ - --task ${TASK} \ - --disable-vision-class-token \ - --num-frames ${NUM_FRAMES} \ - --ckpt-format torch -done +#!/bin/bash + +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 +export NVTE_APPLY_QK_LAYER_SCALING=0 + +INPUT_IMAGE_PATH="placeholder" +GROUNDTRUTH_PATH="placeholder" +NUM_FRAMES=1 + +while [[ $# -gt 0 ]]; do + case $1 in + -i|--input-image-path) + INPUT_IMAGE_PATH="$2" + shift + shift + ;; + --num-frames) + NUM_FRAMES="$2" + shift + shift + ;; + -o|--output-path) + OUTPUT_PATH="$2" + shift + shift + ;; + -m|--model-path) + MODEL_PATH="$2" + shift + shift + ;; + -t|--task) + TASK="$2" + shift + shift + ;; + -g|--gt-path) + GROUNDTRUTH_PATH="$2" + shift + shift + ;; + -*|--*) + echo "Invalid option $1" + exit 1 + ;; + esac +done + +# Please modify these as needed. +NUM_PARTITIONS=0 +START=0 +END=0 + +for PARTITION_ID in $( eval echo {$START..$END} ) +do + torchrun --nproc_per_node 8 examples/multimodal/run_text_generation.py \ + --apply-layernorm-1p \ + --attention-softmax-in-fp32 \ + --use-flash-attn \ + --transformer-impl transformer_engine \ + --use-te \ + --use-checkpoint-args \ + --normalization RMSNorm \ + --language-model-type mistral_7b \ + --untie-embeddings-and-output-weights \ + --disable-bias-linear \ + --position-embedding-type rope \ + --rotary-percent 1.0 \ + --rotary-base 1000000 \ + --swiglu \ + --attention-dropout 0.0 \ + --hidden-dropout 0.0 \ + --tensor-model-parallel-size 4 \ + --pipeline-model-parallel-size 1 \ + --group-query-attention \ + --num-query-groups 8 \ + --num-layers 32 \ + --hidden-size 4096 \ + --ffn-hidden-size 14336 \ + --num-attention-heads 32 \ + --max-position-embeddings 4096 \ + --no-masked-softmax-fusion \ + --load ${MODEL_PATH} \ + --tokenizer-type MultimodalTokenizer \ + --tokenizer-model mistralai/Mistral-7B-Instruct-v0.3 \ + --tokenizer-prompt-format mistral \ + --bf16 \ + --micro-batch-size 1 \ + --seq-length 2048 \ + --out-seq-length 12 \ + --temperature 1.0 \ + --img-h 336 \ + --img-w 336 \ + --patch-dim 14 \ + --seed 153 \ + --top_k 1 \ + --no-load-rng \ + --no-load-optim \ + --input-image-path ${INPUT_IMAGE_PATH} \ + --num-partitions ${NUM_PARTITIONS} \ + --partition-id ${PARTITION_ID} \ + --output-path ${OUTPUT_PATH} \ + --gt-path ${GROUNDTRUTH_PATH} \ + --task ${TASK} \ + --disable-vision-class-token \ + --num-frames ${NUM_FRAMES} \ + --ckpt-format torch +done diff --git a/examples/multimodal/train.py b/examples/multimodal/train.py index 1dc68d1..72b141d 100644 --- a/examples/multimodal/train.py +++ b/examples/multimodal/train.py @@ -1,300 +1,416 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -"""Pretrain or SFT multimodal.""" -import os -import sys -from functools import partial - -import torch -import yaml - -sys.path.append( - os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)) -) - -from dataloader_provider import train_valid_test_dataloaders_provider, is_first_or_last_stage -from model import model_provider -from multimodal_args import add_multimodal_extra_args - -from megatron.core import mpu, tensor_parallel -from megatron.core.enums import ModelType -from megatron.core.models.multimodal.llava_model import IGNORE_INDEX, LLaVAModel -from megatron.core.packed_seq_params import PackedSeqParams -from megatron.core.parallel_state import ( - get_tensor_model_parallel_rank, - get_pipeline_model_parallel_world_size, - is_pipeline_last_stage, -) -from megatron.training import get_args, get_timers, get_tokenizer, pretrain -from megatron.training.utils import is_last_rank - - -def get_batch(data_iterator): - """Generate a batch - - Note: attn_mask_type in layer_specs.py sets the attention mask. Attention mask is None here. - """ - imgs = None - tokens = None - labels = None - loss_mask = None - attention_mask = None - position_ids = None - num_tiles = None - packed_seq_params = None - - args = get_args() - - # Dataloader doesn't run on the middle stages in a pipeline parallel model. - pp_size = get_pipeline_model_parallel_world_size() - if not is_first_or_last_stage(pp_size, args.encoder_pipeline_model_parallel_size): - # Note these are all set to None above. - return tokens, labels, loss_mask, attention_mask, position_ids, imgs, num_tiles, packed_seq_params - - # Broadcast data. - torch.cuda.nvtx.range_push("get_data") - if data_iterator is not None and get_tensor_model_parallel_rank() == 0: - data = next(data_iterator) - else: - data = None - - data_text = tensor_parallel.broadcast_data(["tokens"], data, torch.int64)["tokens"] - labels = tensor_parallel.broadcast_data(["labels"], data, torch.int64)["labels"] - - imgs = tensor_parallel.broadcast_data(["imgs"], data, torch.float32)["imgs"] - num_tiles = tensor_parallel.broadcast_data(["num_tiles"], data, torch.int32)["num_tiles"] - - cu_lengths = tensor_parallel.broadcast_data(["cu_lengths"], data, torch.int32)["cu_lengths"] - max_lengths = tensor_parallel.broadcast_data(["max_lengths"], data, torch.int32)["max_lengths"] - - # No image input (text-only sample) if the dataloader produced a dummy image. - if imgs.shape == torch.Size([1, 1]): - # FIXME: text-only data can cause a hang if the vision model is own its own pipeline rank and --freeze-ViT is enabled. - imgs = torch.tensor([], dtype=torch.float32, device=data_text.device) - num_tiles = torch.tensor([], dtype=torch.int, device=data_text.device) - - # Last pipeline parallel stage doesn't need images. - if pp_size > 1 and is_pipeline_last_stage(): - imgs = None - - # If cu_lengths and max_lengths are non-dummy, construct PackedSeqParams. Otherwise, leave it at None. - if cu_lengths.shape != torch.Size([1, 1]): - assert ( - cu_lengths.shape[0] == max_lengths.shape[0] == 1 - ), "micro-batch-size must be 1 for packing" - cu_lengths = cu_lengths[0] - max_lengths = max_lengths[0] - - packed_seq_params = PackedSeqParams( - qkv_format="thd", - cu_seqlens_q=cu_lengths, - cu_seqlens_kv=cu_lengths, - max_seqlen_q=max_lengths, - max_seqlen_kv=max_lengths, - ) - - torch.cuda.nvtx.range_pop() - - tokens_ = data_text.long() - - torch.cuda.nvtx.range_push("index tokens") - tokenizer = get_tokenizer() - text_length = tokens_.shape[1] - tokens = tokens_[:, :text_length].contiguous() - labels = labels[:, 1 : text_length + 1].contiguous() - - assert tokens.shape == labels.shape, f"tokens: {tokens.shape} != labels: {labels.shape}" - torch.cuda.nvtx.range_pop() - - torch.cuda.nvtx.range_push("get_ltor_masks_and_position_ids") - loss_mask, position_ids = get_ltor_masks_and_position_ids(tokens, labels, tokenizer.pad) - torch.cuda.nvtx.range_pop() - - return ( - tokens, - labels, - loss_mask, - attention_mask, - position_ids, - imgs, - num_tiles, - packed_seq_params, - ) - - -def get_ltor_masks_and_position_ids(input_ids, target, pad_token): - """Build masks and position id for left to right model.""" - seq_length = input_ids.shape[1] - - # Position ids. - position_ids = torch.arange(seq_length, dtype=torch.long, device=input_ids.device) - position_ids = position_ids.unsqueeze(0).expand_as(input_ids) - - # Loss mask. - loss_mask = torch.ones(target.size(), dtype=torch.float, device=input_ids.device) - loss_mask[target == pad_token] = 0.0 # mask paddings - loss_mask[target == IGNORE_INDEX] = 0.0 # mask prompts - - return loss_mask, position_ids - - -def loss_func(loss_mask, output_tensor): - losses = output_tensor.float() - - loss_mask = loss_mask.contiguous().view(-1).float() - - total_tokens = loss_mask.sum() - total_loss = torch.sum(losses.view(-1) * loss_mask) - loss = torch.cat([total_loss.view(1), total_tokens.view(1)]) - - reporting_loss = loss.clone().detach() - torch.distributed.all_reduce(reporting_loss, group=mpu.get_data_parallel_group()) - - local_num_tokens = loss[1].clone().detach().to(torch.int) - - return (total_loss, local_num_tokens, {'lm loss': (reporting_loss[0], reporting_loss[1])}) - - -def forward_step(data_iterator, model: LLaVAModel): - """Forward training step. - - Args: - data_iterator (torch.utils.data.dataloader): Input data iterator - model: Multimodal model - - Returns: - output_tensor (torch.Tensor): Loss of shape [b, s] if labels are provided, otherwise logits of shape [b, s, vocab_size]. - loss_func (callable): Loss function with a loss mask specified. - """ - timers = get_timers() - - # Get the batch. - timers('batch-generator', log_level=2).start() - ( - tokens, - labels, - loss_mask, - attention_mask, - position_ids, - images, - num_image_tiles, - packed_seq_params, - ) = get_batch(data_iterator) - timers('batch-generator').stop() - - output_tensor, loss_mask = model( - images, - tokens, - position_ids, - attention_mask, - labels, - loss_mask, - num_image_tiles=num_image_tiles, - packed_seq_params=packed_seq_params, - ) - - return output_tensor, partial(loss_func, loss_mask) - - -def llava_embedding_ranks(pp_ranks): - """LLava's embedding ranks consist of the decoder's first and last ranks (ie, the ViT has no embeddings). - Args: - pp_ranks: A list of global ranks that constitute a pipeline group. - """ - args = get_args() - - # encoder size is also the index to the first rank of the decoder. - epp = args.encoder_pipeline_model_parallel_size - - last_rank = pp_ranks[-1] - if len(pp_ranks) == 1 or pp_ranks[epp] == last_rank: - return [last_rank] - else: - return [pp_ranks[epp], last_rank] - - -def llava_position_embedding_ranks(pp_ranks): - """LLava's embedding ranks consist of the singular rank of the model or the decoder's first rank. - Args: - pp_ranks: A list of global ranks that constitute a pipeline group. - """ - args = get_args() - - # encoder size is also the index to the first rank of the decoder. - epp = args.encoder_pipeline_model_parallel_size - - last_rank = pp_ranks[-1] - if len(pp_ranks) == 1: - return [last_rank] - else: - return [pp_ranks[epp]] - - -def run_online_eval(model): - """Run an evaluation benchmark during training.""" - args = get_args() - - # Online evaluation config is not defined. Do nothing. - if not args.online_evaluation_config: - return [] - - from config import EvaluationConfig - from run_text_generation import generate_and_write_samples - - with open(args.online_evaluation_config, "r") as f: - config_dict = yaml.safe_load(f) - - config = EvaluationConfig(**config_dict) - - # The inference code assumes the first rank is the leader. - # Tensorboard writer is on the last rank. - # We must write to a storage space that all ranks see. - output_dir = os.path.join(args.save, "online_eval") - os.makedirs(output_dir, exist_ok=True) - config.output_path = os.path.join(output_dir, args.language_model_type) - - # The actual generation. - generate_and_write_samples(model[0].module, config, print_output=False) - - # Make sure the first rank is done writing so that the last rank can run eval. - torch.distributed.barrier() - - if not is_last_rank(): - return [] - - # Run evaluation. - if config.task == "TextVQA": - from evaluate_textvqa import textvqa_eval - - avg_acc = textvqa_eval(config.output_path) - - return [{"TextVQA accuracy": avg_acc}] - else: - raise NotImplementedError(f"online evaluation of {config.task} not implemented yet") - - -def write_online_eval_to_tensorboard(data, iteration, writer): - """Write online evaluation data to Tensorboard.""" - if not writer: - return - - for item in data: - for k, v in item.items(): - writer.add_scalar(k, v, iteration) - - -if __name__ == "__main__": - - train_valid_test_dataloaders_provider.is_distributed = True - - pretrain( - train_valid_test_dataloaders_provider, - model_provider, - ModelType.encoder_and_decoder, - forward_step, - args_defaults={'tokenizer_type': 'GPT2BPETokenizer'}, - extra_args_provider=add_multimodal_extra_args, - process_non_loss_data_func=write_online_eval_to_tensorboard, - get_embedding_ranks=llava_embedding_ranks, - get_position_embedding_ranks=llava_position_embedding_ranks, - non_loss_data_func=run_online_eval, - ) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +"""Pretrain or SFT multimodal.""" +import math +import os +import sys +from functools import partial + +import torch +import yaml + +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)) +) + +from dataloader_provider import train_valid_test_dataloaders_provider, is_first_or_last_stage +from model import model_provider +from multimodal_args import add_multimodal_extra_args + +from megatron.core import mpu, tensor_parallel +from megatron.core.enums import ModelType +from megatron.core.models.multimodal import context_parallel +from megatron.core.models.multimodal.llava_model import IGNORE_INDEX, LLaVAModel +from megatron.core.packed_seq_params import PackedSeqParams +from megatron.core.parallel_state import ( + get_tensor_model_parallel_rank, + get_pipeline_model_parallel_world_size, + is_pipeline_last_stage, +) +from megatron.training import get_args, get_timers, get_tokenizer, pretrain +from megatron.training.utils import is_last_rank, get_batch_on_this_cp_rank + + +def get_batch(data_iterator, image_token_index, img_seq_len): + """Generate a batch + + Note: attn_mask_type in layer_specs.py sets the attention mask. Attention mask is None here. + """ + imgs = None + tokens = None + labels = None + loss_mask = None + attention_mask = None + position_ids = None + num_tiles = None + packed_seq_params = None + + args = get_args() + + # Dataloader doesn't run on the middle stages in a pipeline parallel model. + pp_size = get_pipeline_model_parallel_world_size() + if not is_first_or_last_stage(pp_size, args.encoder_pipeline_model_parallel_size): + # Note these are all set to None above. + return tokens, labels, loss_mask, attention_mask, position_ids, imgs, num_tiles, packed_seq_params + + # Broadcast data. + torch.cuda.nvtx.range_push("get_data") + if data_iterator is not None and get_tensor_model_parallel_rank() == 0: + data = next(data_iterator) + else: + data = None + + data_text = tensor_parallel.broadcast_data(["tokens"], data, torch.int64)["tokens"] + labels = tensor_parallel.broadcast_data(["labels"], data, torch.int64)["labels"] + + imgs = tensor_parallel.broadcast_data(["imgs"], data, torch.float32)["imgs"] + num_tiles = tensor_parallel.broadcast_data(["num_tiles"], data, torch.int32)["num_tiles"] + + cu_lengths = tensor_parallel.broadcast_data(["cu_lengths"], data, torch.int32)["cu_lengths"] + max_lengths = tensor_parallel.broadcast_data(["max_lengths"], data, torch.int32)["max_lengths"] + + # No image input (text-only sample) if the dataloader returned a size 1 image. + if imgs.shape == torch.Size([1, 1]): + # FSDP can hang with text-only samples. A workaround is to run a valid dummy image through the vision + # model and then add image embeddings with a zero multiplier. + if args.use_torch_fsdp2: + imgs = torch.zeros((1, 3, args.img_h, args.img_w), dtype=torch.float32, device=data_text.device) + num_tiles = torch.tensor([], dtype=torch.int, device=data_text.device) + else: + # Similar workaround is not needed without FSDP and we can use an empty image. + # FIXME: text-only data can cause still cause a hang in the special case where + # the vision model is own its own pipeline rank and --freeze-ViT is enabled. + imgs = torch.tensor([], dtype=torch.float32, device=data_text.device) + num_tiles = torch.tensor([], dtype=torch.int, device=data_text.device) + + # Last pipeline parallel stage doesn't need images. + if pp_size > 1 and is_pipeline_last_stage(): + imgs = None + + # If cu_lengths and max_lengths are non-dummy, construct PackedSeqParams. Otherwise, leave it at None. + if cu_lengths.shape != torch.Size([1, 1]): + assert ( + cu_lengths.shape[0] == max_lengths.shape[0] == 1 + ), "micro-batch-size must be 1 for packing" + cu_lengths = cu_lengths[0] + max_lengths = max_lengths[0] + + packed_seq_params = PackedSeqParams( + qkv_format="thd", + cu_seqlens_q=cu_lengths, + cu_seqlens_kv=cu_lengths, + max_seqlen_q=max_lengths, + max_seqlen_kv=max_lengths, + ) + + torch.cuda.nvtx.range_pop() + + tokens_ = data_text.long() + + torch.cuda.nvtx.range_push("index tokens") + tokenizer = get_tokenizer() + text_length = tokens_.shape[1] + tokens = tokens_[:, :text_length].contiguous() + labels = labels[:, 1 : text_length + 1].contiguous() + + assert tokens.shape == labels.shape, f"tokens: {tokens.shape} != labels: {labels.shape}" + torch.cuda.nvtx.range_pop() + + torch.cuda.nvtx.range_push("get_ltor_masks_and_position_ids") + loss_mask, position_ids = get_ltor_masks_and_position_ids(tokens, labels, tokenizer.pad) + torch.cuda.nvtx.range_pop() + + # If context parallel is enabled, must shard inputs to CP ranks. + if args.context_parallel_size > 1 or args.sequence_parallel: + assert tokens.shape[0], "micro-batch-size > 1 not supported yet with CP" + + num_image_tokens = torch.sum(tokens == image_token_index).item() + num_image_embeddings = num_image_tokens * img_seq_len - num_image_tokens + seq_len = text_length + num_image_embeddings + + # CP expects sequence length is divisible by CP size so apply padding. + mp_padding_needed = context_parallel.get_padding( + seq_len, args.context_parallel_size, + args.tensor_model_parallel_size, args.sequence_parallel, + ) + tokens, position_ids, labels, loss_mask = [torch.nn.functional.pad(item, (0, mp_padding_needed)) for item in (tokens, position_ids, labels, loss_mask)] + + # Get PackedSeqParams that indicate the amount of padding for TransformerEngine. + packed_seq_params = context_parallel.get_packed_seq_params(tokens, num_image_embeddings, mp_padding_needed, args.context_parallel_size, True) + + return ( + tokens, + labels, + loss_mask, + attention_mask, + position_ids, + imgs, + num_tiles, + packed_seq_params, + ) + + +def get_ltor_masks_and_position_ids(input_ids, target, pad_token): + """Build masks and position id for left to right model.""" + seq_length = input_ids.shape[1] + + # Position ids. + position_ids = torch.arange(seq_length, dtype=torch.long, device=input_ids.device) + position_ids = position_ids.unsqueeze(0).expand_as(input_ids) + + # Loss mask. + loss_mask = torch.ones(target.size(), dtype=torch.float, device=input_ids.device) + loss_mask[target == pad_token] = 0.0 # mask paddings + loss_mask[target == IGNORE_INDEX] = 0.0 # mask prompts + + return loss_mask, position_ids + + +def get_mask_start_and_end_idx(arr): + """ + Returns a list of tuples holding the start and end index in arr of the non-zeros contiguuous + sub arrays. + + For instance, if arr = [0, 1, 0, 0, 1, 1] + get_mask_start_and_end_idx(arr) = [(1, 1), (4, 5)] + such that arr[1:1+1] = [1] and arr[4:5+1] = [1, 1] + """ + mask = (arr != 0) + + mask_int = mask.int() + + diff = mask_int[1:] - mask_int[:-1] + start_indices = (diff == 1).nonzero(as_tuple=False).flatten() + 1 + end_indices = (diff == -1).nonzero(as_tuple=False).flatten() + if len(mask)==0: return [] + if mask[0]: + start_indices = torch.cat((torch.tensor([0], device=arr.device), start_indices)) + if mask[-1]: + end_indices = torch.cat((end_indices, torch.tensor([len(arr) - 1], device=arr.device))) + sequences = list(zip(start_indices.tolist(), end_indices.tolist())) + return sequences + + +def scaled_loss_func(loss_mask, output_tensor): + """ + Scaled loss function + + Scale the loss for each conversation turn using the formula: + + 1 / sum_j[ sqrt(length(loss_turn_j)) ] * sum_i[ sum(loss_turn_i) / sqrt(length(loss_turn_i)) ] + + Where we use the loss mask to infer the start / end of the conversation turns. + """ + losses = output_tensor.float() + + loss_list = [] + num_valid_labels_list = [] + for idx in range(losses.shape[0]): + loss_this_sample = losses[idx] + turn_start_end_list = get_mask_start_and_end_idx(loss_mask[idx]) + for turn_start, turn_end in turn_start_end_list: + # compute loss for each turn + loss_this_turn = loss_this_sample[turn_start:turn_end+1].sum() + assert (1 - loss_mask)[idx][turn_start:turn_end+1].sum() < 1.0 + num_valid_labels_this_turn = turn_end - turn_start + 1 + loss_this_turn = loss_this_turn / num_valid_labels_this_turn + loss_list.append(loss_this_turn) + # append num of valid labels for each turn + num_valid_labels_list.append(num_valid_labels_this_turn) + base_num = sum([math.sqrt(each) for each in num_valid_labels_list]) + for idx in range(len(loss_list)): + # normalize loss for each turn + loss_list[idx] = loss_list[idx] * math.sqrt(num_valid_labels_list[idx]) / base_num + + total_loss = torch.stack(loss_list).sum() + total_tokens = torch.ones_like(total_loss) + + loss = torch.cat([total_loss.view(1), total_tokens.view(1)]) + + reporting_loss = loss.clone().detach() + torch.distributed.all_reduce(reporting_loss, group=mpu.get_data_parallel_group()) + + local_num_tokens = loss[1].clone().detach().to(torch.int) + + return ( + total_loss, + local_num_tokens, + {'lm loss': (reporting_loss[0], reporting_loss[1])}, + ) + + +def loss_func(loss_mask, output_tensor): + args = get_args() + + losses = output_tensor.float() + + loss_mask = loss_mask.contiguous().view(-1).float() + + total_tokens = loss_mask.sum() + total_loss = torch.sum(losses.view(-1) * loss_mask) + loss = torch.cat([total_loss.view(1), total_tokens.view(1)]) + + if args.context_parallel_size > 1: + torch.distributed.all_reduce(loss, group=mpu.get_context_parallel_group()) + + reporting_loss = loss.clone().detach() + torch.distributed.all_reduce(reporting_loss, group=mpu.get_data_parallel_group()) + + local_num_tokens = loss[1].clone().detach().to(torch.int) + + # We multiply by context parallel size because later there will be a divide by CP(+DP) size. + return ( + loss[0] * args.context_parallel_size, + local_num_tokens, + {'lm loss': (reporting_loss[0], reporting_loss[1])} + ) + + +def forward_step(data_iterator, model: LLaVAModel): + """Forward training step. + + Args: + data_iterator (torch.utils.data.dataloader): Input data iterator + model: Multimodal model + + Returns: + output_tensor (torch.Tensor): Loss of shape [b, s] if labels are provided, otherwise logits of shape [b, s, vocab_size]. + loss_func (callable): Loss function with a loss mask specified. + """ + timers = get_timers() + + # Get the batch. + timers('batch-generator', log_level=2).start() + ( + tokens, + labels, + loss_mask, + attention_mask, + position_ids, + images, + num_image_tiles, + packed_seq_params, + ) = get_batch(data_iterator, model.module.module.image_token_index, model.module.module.img_seq_len) + timers('batch-generator').stop() + + output_tensor, loss_mask = model( + images, + tokens, + position_ids, + attention_mask, + labels, + loss_mask, + num_image_tiles=num_image_tiles, + packed_seq_params=packed_seq_params, + ) + args = get_args() + if args.use_loss_scaling: + loss_function = partial(scaled_loss_func, loss_mask) + else: + loss_function = partial(loss_func, loss_mask) + + return output_tensor, loss_function + + +def llava_embedding_ranks(pp_ranks): + """LLava's embedding ranks consist of the decoder's first and last ranks (ie, the ViT has no embeddings). + Args: + pp_ranks: A list of global ranks that constitute a pipeline group. + """ + args = get_args() + + # encoder size is also the index to the first rank of the decoder. + epp = args.encoder_pipeline_model_parallel_size + + last_rank = pp_ranks[-1] + if len(pp_ranks) == 1 or pp_ranks[epp] == last_rank: + return [last_rank] + else: + return [pp_ranks[epp], last_rank] + + +def llava_position_embedding_ranks(pp_ranks): + """LLava's embedding ranks consist of the singular rank of the model or the decoder's first rank. + Args: + pp_ranks: A list of global ranks that constitute a pipeline group. + """ + args = get_args() + + # encoder size is also the index to the first rank of the decoder. + epp = args.encoder_pipeline_model_parallel_size + + last_rank = pp_ranks[-1] + if len(pp_ranks) == 1: + return [last_rank] + else: + return [pp_ranks[epp]] + + +def run_online_eval(model): + """Run an evaluation benchmark during training.""" + args = get_args() + + # Online evaluation config is not defined. Do nothing. + if not args.online_evaluation_config: + return [] + + from config import EvaluationConfig + from run_text_generation import generate_and_write_samples + + with open(args.online_evaluation_config, "r") as f: + config_dict = yaml.safe_load(f) + + config = EvaluationConfig(**config_dict) + + # The inference code assumes the first rank is the leader. + # Tensorboard writer is on the last rank. + # We must write to a storage space that all ranks see. + output_dir = os.path.join(args.save, "online_eval") + os.makedirs(output_dir, exist_ok=True) + config.output_path = os.path.join(output_dir, args.language_model_type) + + # The actual generation. + generate_and_write_samples(model[0].module, config, print_output=False) + + # Make sure the first rank is done writing so that the last rank can run eval. + torch.distributed.barrier() + + if not is_last_rank(): + return [] + + # Run evaluation. + if config.task == "TextVQA": + from evaluate_textvqa import textvqa_eval + + avg_acc = textvqa_eval(config.output_path) + + return [{"TextVQA accuracy": avg_acc}] + else: + raise NotImplementedError(f"online evaluation of {config.task} not implemented yet") + + +def write_online_eval_to_tensorboard(data, iteration, writer): + """Write online evaluation data to Tensorboard.""" + if not writer: + return + + for item in data: + for k, v in item.items(): + writer.add_scalar(k, v, iteration) + + +if __name__ == "__main__": + + train_valid_test_dataloaders_provider.is_distributed = True + + pretrain( + train_valid_test_dataloaders_provider, + model_provider, + ModelType.encoder_and_decoder, + forward_step, + args_defaults={'tokenizer_type': 'GPT2BPETokenizer'}, + extra_args_provider=add_multimodal_extra_args, + process_non_loss_data_func=write_online_eval_to_tensorboard, + get_embedding_ranks=llava_embedding_ranks, + get_position_embedding_ranks=llava_position_embedding_ranks, + non_loss_data_func=run_online_eval, + ) diff --git a/examples/retro/preprocess_data.sh b/examples/retro/preprocess_data.sh old mode 100644 new mode 100755 diff --git a/examples/retro/train_retro_2b_distributed.sh b/examples/retro/train_retro_2b_distributed.sh old mode 100644 new mode 100755 diff --git a/examples/t5/train_t5_220m_distributed.sh b/examples/t5/train_t5_220m_distributed.sh old mode 100644 new mode 100755 index 62e6f9d..8793a99 --- a/examples/t5/train_t5_220m_distributed.sh +++ b/examples/t5/train_t5_220m_distributed.sh @@ -1,78 +1,78 @@ -#!/bin/bash - -# Runs the "220M" parameter model - -export CUDA_DEVICE_MAX_CONNECTIONS=1 - -GPUS_PER_NODE=8 -# Change for multinode config -MASTER_ADDR=localhost -MASTER_PORT=6000 -NUM_NODES=1 -NODE_RANK=0 -WORLD_SIZE=$(($GPUS_PER_NODE*$NUM_NODES)) - -CHECKPOINT_PATH=$1 # -TENSORBOARD_DIR=$2 # -VOCAB_FILE=$3 #/bert-large-cased-vocab.txt -DATA_PATH=$4 #_text_document - -DISTRIBUTED_ARGS=" - --nproc_per_node $GPUS_PER_NODE \ - --nnodes $NUM_NODES \ - --node_rank $NODE_RANK \ - --master_addr $MASTER_ADDR \ - --master_port $MASTER_PORT -" - -T5_ARGS=" - --encoder-num-layers 12 \ - --decoder-num-layers 12 \ - --hidden-size 768 \ - --num-attention-heads 12 \ - --kv-channels 64 \ - --ffn-hidden-size 3072 \ - --encoder-seq-length 512 \ - --decoder-seq-length 128 \ - --max-position-embeddings 512 \ - --micro-batch-size 64 \ - --global-batch-size 512 \ - --lr 0.0001 \ - --train-iters 1000000 \ - --lr-decay-iters 1000000 \ - --lr-decay-style linear \ - --min-lr 0.00001 \ - --weight-decay 1e-2 \ - --lr-warmup-fraction .01 \ - --clip-grad 1.0 \ - --bf16 \ - --vocab-extra-ids 100 \ - --init-method-std 0.015 \ - --transformer-impl transformer_engine \ - --tensor-model-parallel-size 1 \ - --pipeline-model-parallel-size 1 \ - --attention-backend auto \ -" - -DATA_ARGS=" - --data-path $DATA_PATH \ - --vocab-file $VOCAB_FILE \ - --tokenizer-type BertWordPieceCase \ - --split 99982,9,9 \ -" - -OUTPUT_ARGS=" - --log-interval 100 \ - --tensorboard-dir ${TENSORBOARD_DIR} \ - --save-interval 500 \ - --eval-interval 1000 \ - --eval-iters 10 -" - -torchrun $DISTRIBUTED_ARGS pretrain_t5.py \ - $T5_ARGS \ - $DATA_ARGS \ - $OUTPUT_ARGS \ - --distributed-backend nccl \ - --save $CHECKPOINT_PATH \ - --load $CHECKPOINT_PATH \ +#!/bin/bash + +# Runs the "220M" parameter model + +export CUDA_DEVICE_MAX_CONNECTIONS=1 + +GPUS_PER_NODE=8 +# Change for multinode config +MASTER_ADDR=localhost +MASTER_PORT=6000 +NUM_NODES=1 +NODE_RANK=0 +WORLD_SIZE=$(($GPUS_PER_NODE*$NUM_NODES)) + +CHECKPOINT_PATH=$1 # +TENSORBOARD_DIR=$2 # +VOCAB_FILE=$3 #/bert-large-cased-vocab.txt +DATA_PATH=$4 #_text_document + +DISTRIBUTED_ARGS=" + --nproc_per_node $GPUS_PER_NODE \ + --nnodes $NUM_NODES \ + --node_rank $NODE_RANK \ + --master_addr $MASTER_ADDR \ + --master_port $MASTER_PORT +" + +T5_ARGS=" + --encoder-num-layers 12 \ + --decoder-num-layers 12 \ + --hidden-size 768 \ + --num-attention-heads 12 \ + --kv-channels 64 \ + --ffn-hidden-size 3072 \ + --encoder-seq-length 512 \ + --decoder-seq-length 128 \ + --max-position-embeddings 512 \ + --micro-batch-size 64 \ + --global-batch-size 512 \ + --lr 0.0001 \ + --train-iters 1000000 \ + --lr-decay-iters 1000000 \ + --lr-decay-style linear \ + --min-lr 0.00001 \ + --weight-decay 1e-2 \ + --lr-warmup-fraction .01 \ + --clip-grad 1.0 \ + --bf16 \ + --vocab-extra-ids 100 \ + --init-method-std 0.015 \ + --transformer-impl transformer_engine \ + --tensor-model-parallel-size 1 \ + --pipeline-model-parallel-size 1 \ + --attention-backend auto \ +" + +DATA_ARGS=" + --data-path $DATA_PATH \ + --vocab-file $VOCAB_FILE \ + --tokenizer-type BertWordPieceCase \ + --split 99982,9,9 \ +" + +OUTPUT_ARGS=" + --log-interval 100 \ + --tensorboard-dir ${TENSORBOARD_DIR} \ + --save-interval 500 \ + --eval-interval 1000 \ + --eval-iters 10 +" + +torchrun $DISTRIBUTED_ARGS pretrain_t5.py \ + $T5_ARGS \ + $DATA_ARGS \ + $OUTPUT_ARGS \ + --distributed-backend nccl \ + --save $CHECKPOINT_PATH \ + --load $CHECKPOINT_PATH \ diff --git a/gptnodes b/gptnodes deleted file mode 100644 index 523cbf3..0000000 --- a/gptnodes +++ /dev/null @@ -1,32 +0,0 @@ -node002 slots=8 -node003 slots=8 -node004 slots=8 -node005 slots=8 -node006 slots=8 -node020 slots=8 -node021 slots=8 -node022 slots=8 -node033 slots=8 -node034 slots=8 -node035 slots=8 -node036 slots=8 -node037 slots=8 -node038 slots=8 -node039 slots=8 -node040 slots=8 -node041 slots=8 -node042 slots=8 -node043 slots=8 -node044 slots=8 -node045 slots=8 -node046 slots=8 -node047 slots=8 -node048 slots=8 -node056 slots=8 -node057 slots=8 -node058 slots=8 -node059 slots=8 -node060 slots=8 -node061 slots=8 -node062 slots=8 -node063 slots=8 diff --git a/megatron/core/datasets/blended_dataset.py b/megatron/core/datasets/blended_dataset.py index be0b7a4..6b027fa 100644 --- a/megatron/core/datasets/blended_dataset.py +++ b/megatron/core/datasets/blended_dataset.py @@ -1,201 +1,201 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. - -import hashlib -import json -import logging -import os -import time -from collections import OrderedDict -from typing import Dict, List, Optional, Tuple, Union - -import numpy -import torch - -from megatron.core.datasets.blended_megatron_dataset_config import BlendedMegatronDatasetConfig -from megatron.core.datasets.megatron_dataset import MegatronDataset -from megatron.core.datasets.utils import normalize -from megatron.core.utils import log_single_rank - -logger = logging.getLogger(__name__) - -_VERBOSE = False - - -class BlendedDataset(torch.utils.data.Dataset): - """Conjugating class for a set of MegatronDataset instances - - Args: - datasets (List[MegatronDataset]): The MegatronDataset instances to blend - - weights (List[Union[int, float]]): The weights that determine the dataset blend ratios - - size (Optional[int]): The number of samples to draw from the blend. If None, for each dataset index idx draw exactly weights[idx] samples from datasets[idx]. - - config (BlendedMegatronDatasetConfig): The config - - Raises: - RuntimeError: When the dataset has fewer or more samples than 'size' post-initialization - """ - - def __init__( - self, - datasets: List[MegatronDataset], - weights: List[Union[int, float]], - size: Optional[int], - config: BlendedMegatronDatasetConfig, - ) -> None: - assert len(datasets) == len(weights) - assert len(datasets) < 32767 - assert all(map(lambda _: type(_) == type(datasets[0]), datasets)) - assert all(map(lambda _: _.index_split == datasets[0].index_split, datasets)) - assert all(map(lambda _: _ > 0, weights)) - assert all(map(lambda _: type(_) == type(weights[0]), weights)) - if size is None and isinstance(weights[0], float): - assert all(map(lambda _: _ == int(_), weights)) - - # Alert user to unnecessary blending - if len(datasets) == 1: - log_single_rank( - logger, logging.WARNING, f"Building a BlendedDataset for a single MegatronDataset" - ) - - if size is not None: - weights = normalize(weights) - - self.datasets = datasets - self.split = self.datasets[0].index_split - self.weights = weights - self.size = size - self.config = config - - unique_identifiers = OrderedDict() - unique_identifiers["class"] = type(self).__name__ - unique_identifiers["datasets"] = [dataset.unique_identifiers for dataset in self.datasets] - unique_identifiers["split"] = self.split.name - unique_identifiers["weights"] = self.weights - unique_identifiers["size"] = self.size - unique_identifiers["renormalize_blend_weights"] = self.config.renormalize_blend_weights - - self.unique_description = json.dumps( - unique_identifiers, indent=4, default=lambda obj: obj.unique_identifiers - ) - self.unique_description_hash = hashlib.md5( - self.unique_description.encode("utf-8") - ).hexdigest() - - self.built_anew_on_cache_miss = False - - self.dataset_index, self.dataset_sample_index = self._build_indices() - - def __len__(self) -> int: - return self.dataset_index.shape[0] - - def __getitem__(self, idx: int) -> Dict[str, Union[int, numpy.ndarray]]: - dataset_id = self.dataset_index[idx] - dataset_sample_id = self.dataset_sample_index[idx] - return {"dataset_id": dataset_id, **self.datasets[dataset_id][dataset_sample_id]} - - def _build_indices(self) -> Tuple[numpy.ndarray, numpy.ndarray]: - """Build and optionally cache the dataset index and the dataset sample index - - The dataset index is a 1-D mapping which determines the dataset to query. The dataset - sample index is a 1-D mapping which determines the sample to request from the queried - dataset. - - Returns: - Tuple[numpy.ndarray, numpy.ndarray]: The dataset index and the dataset sample index - """ - path_to_cache = self.config.path_to_cache - - if path_to_cache: - get_path_to = lambda suffix: os.path.join( - path_to_cache, - f"{self.unique_description_hash}-{type(self).__name__}-{self.split.name}-{suffix}", - ) - path_to_description = get_path_to("description.txt") - path_to_dataset_index = get_path_to("dataset_index.npy") - path_to_dataset_sample_index = get_path_to("dataset_sample_index.npy") - cache_hit = all( - map( - os.path.isfile, - [path_to_description, path_to_dataset_index, path_to_dataset_sample_index], - ) - ) - else: - cache_hit = False - - if not path_to_cache or (not cache_hit and torch.distributed.get_rank() == 0): - log_single_rank( - logger, logging.INFO, f"Build and save the {type(self).__name__} indices" - ) - self.built_anew_on_cache_miss = True - - # Build the dataset and dataset sample indexes - log_single_rank( - logger, logging.INFO, f"\tBuild and save the dataset and dataset sample indexes" - ) - t_beg = time.time() - from megatron.core.datasets import helpers - - if self.size is not None: - dataset_index = numpy.zeros(self.size, dtype=numpy.int16) - dataset_sample_index = numpy.zeros(self.size, dtype=numpy.int64) - helpers.build_blending_indices( - dataset_index, - dataset_sample_index, - self.weights, - len(self.datasets), - self.size, - _VERBOSE, - ) - else: - size = sum(self.weights) - dataset_index = numpy.zeros(size, dtype=numpy.int16) - dataset_sample_index = numpy.zeros(size, dtype=numpy.int64) - helpers.build_exhaustive_blending_indices( - dataset_index, dataset_sample_index, self.weights, len(self.datasets) - ) - - if path_to_cache: - os.makedirs(path_to_cache, exist_ok=True) - # Write the description - with open(path_to_description, "wt") as writer: - writer.write(self.unique_description) - # Save the indexes - numpy.save(path_to_dataset_index, dataset_index, allow_pickle=True) - numpy.save(path_to_dataset_sample_index, dataset_sample_index, allow_pickle=True) - else: - log_single_rank( - logger, - logging.WARNING, - f"Unable to save the {type(self).__name__} indexes because path_to_cache is None", - ) - - t_end = time.time() - log_single_rank(logger, logging.DEBUG, f"\t> time elapsed: {t_end - t_beg:4f} seconds") - - return dataset_index, dataset_sample_index - - log_single_rank(logger, logging.INFO, f"Load the {type(self).__name__} indices") - - log_single_rank( - logger, logging.INFO, f"\tLoad the dataset index from {path_to_dataset_index}" - ) - t_beg = time.time() - dataset_index = numpy.load(path_to_dataset_index, allow_pickle=True, mmap_mode='r') - t_end = time.time() - log_single_rank(logger, logging.DEBUG, f"\t> time elapsed: {t_end - t_beg:4f} seconds") - - log_single_rank( - logger, - logging.INFO, - f"\tLoad the dataset sample index from {path_to_dataset_sample_index}", - ) - t_beg = time.time() - dataset_sample_index = numpy.load( - path_to_dataset_sample_index, allow_pickle=True, mmap_mode='r' - ) - t_end = time.time() - log_single_rank(logger, logging.DEBUG, f"\t> time elapsed: {t_end - t_beg:4f} seconds") - - return dataset_index, dataset_sample_index +# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. + +import hashlib +import json +import logging +import os +import time +from collections import OrderedDict +from typing import Dict, List, Optional, Tuple, Union + +import numpy +import torch + +from megatron.core.datasets.blended_megatron_dataset_config import BlendedMegatronDatasetConfig +from megatron.core.datasets.megatron_dataset import MegatronDataset +from megatron.core.datasets.utils import normalize +from megatron.core.utils import log_single_rank + +logger = logging.getLogger(__name__) + +_VERBOSE = False + + +class BlendedDataset(torch.utils.data.Dataset): + """Conjugating class for a set of MegatronDataset instances + + Args: + datasets (List[MegatronDataset]): The MegatronDataset instances to blend + + weights (List[Union[int, float]]): The weights that determine the dataset blend ratios + + size (Optional[int]): The number of samples to draw from the blend. If None, for each + dataset index idx draw exactly weights[idx] samples from datasets[idx]. + + config (BlendedMegatronDatasetConfig): The config + + Raises: + RuntimeError: When the dataset has fewer or more samples than 'size' post-initialization + """ + + def __init__( + self, + datasets: List[MegatronDataset], + weights: List[Union[int, float]], + size: Optional[int], + config: BlendedMegatronDatasetConfig, + ) -> None: + assert len(datasets) == len(weights) + assert len(datasets) < 32767 + assert all(map(lambda _: type(_) == type(datasets[0]), datasets)) + assert all(map(lambda _: _.index_split == datasets[0].index_split, datasets)) + assert all(map(lambda _: _ > 0, weights)) + assert all(map(lambda _: type(_) == type(weights[0]), weights)) + if size is None and isinstance(weights[0], float): + assert all(map(lambda _: _ == int(_), weights)) + + # Alert user to unnecessary blending + if len(datasets) == 1: + log_single_rank( + logger, logging.WARNING, f"Building a BlendedDataset for a single MegatronDataset" + ) + + if size is not None: + weights = normalize(weights) + + self.datasets = datasets + self.split = self.datasets[0].index_split + self.weights = weights + self.size = size + self.config = config + + unique_identifiers = OrderedDict() + unique_identifiers["class"] = type(self).__name__ + unique_identifiers["datasets"] = [dataset.unique_identifiers for dataset in self.datasets] + unique_identifiers["split"] = self.split.name + unique_identifiers["weights"] = self.weights + unique_identifiers["size"] = self.size + + self.unique_description = json.dumps( + unique_identifiers, indent=4, default=lambda obj: obj.unique_identifiers + ) + self.unique_description_hash = hashlib.md5( + self.unique_description.encode("utf-8") + ).hexdigest() + + self.built_anew_on_cache_miss = False + + self.dataset_index, self.dataset_sample_index = self._build_indices() + + def __len__(self) -> int: + return self.dataset_index.shape[0] + + def __getitem__(self, idx: int) -> Dict[str, Union[int, numpy.ndarray]]: + dataset_id = self.dataset_index[idx] + dataset_sample_id = self.dataset_sample_index[idx] + return {"dataset_id": dataset_id, **self.datasets[dataset_id][dataset_sample_id]} + + def _build_indices(self) -> Tuple[numpy.ndarray, numpy.ndarray]: + """Build and optionally cache the dataset index and the dataset sample index + + The dataset index is a 1-D mapping which determines the dataset to query. The dataset + sample index is a 1-D mapping which determines the sample to request from the queried + dataset. + + Returns: + Tuple[numpy.ndarray, numpy.ndarray]: The dataset index and the dataset sample index + """ + path_to_cache = self.config.path_to_cache + + if path_to_cache: + get_path_to = lambda suffix: os.path.join( + path_to_cache, + f"{self.unique_description_hash}-{type(self).__name__}-{self.split.name}-{suffix}", + ) + path_to_description = get_path_to("description.txt") + path_to_dataset_index = get_path_to("dataset_index.npy") + path_to_dataset_sample_index = get_path_to("dataset_sample_index.npy") + cache_hit = all( + map( + os.path.isfile, + [path_to_description, path_to_dataset_index, path_to_dataset_sample_index], + ) + ) + else: + cache_hit = False + + if not path_to_cache or (not cache_hit and torch.distributed.get_rank() == 0): + log_single_rank( + logger, logging.INFO, f"Build and save the {type(self).__name__} indices" + ) + self.built_anew_on_cache_miss = True + + # Build the dataset and dataset sample indexes + log_single_rank( + logger, logging.INFO, f"\tBuild and save the dataset and dataset sample indexes" + ) + t_beg = time.time() + from megatron.core.datasets import helpers + + if self.size is not None: + dataset_index = numpy.zeros(self.size, dtype=numpy.int16) + dataset_sample_index = numpy.zeros(self.size, dtype=numpy.int64) + helpers.build_blending_indices( + dataset_index, + dataset_sample_index, + self.weights, + len(self.datasets), + self.size, + _VERBOSE, + ) + else: + size = sum(self.weights) + dataset_index = numpy.zeros(size, dtype=numpy.int16) + dataset_sample_index = numpy.zeros(size, dtype=numpy.int64) + helpers.build_exhaustive_blending_indices( + dataset_index, dataset_sample_index, self.weights, len(self.datasets) + ) + + if path_to_cache: + os.makedirs(path_to_cache, exist_ok=True) + # Write the description + with open(path_to_description, "wt") as writer: + writer.write(self.unique_description) + # Save the indexes + numpy.save(path_to_dataset_index, dataset_index, allow_pickle=True) + numpy.save(path_to_dataset_sample_index, dataset_sample_index, allow_pickle=True) + else: + log_single_rank( + logger, + logging.WARNING, + f"Cannot save the {type(self).__name__} indexes because path_to_cache is None", + ) + + t_end = time.time() + log_single_rank(logger, logging.DEBUG, f"\t> time elapsed: {t_end - t_beg:4f} seconds") + + return dataset_index, dataset_sample_index + + log_single_rank(logger, logging.INFO, f"Load the {type(self).__name__} indices") + + log_single_rank( + logger, logging.INFO, f"\tLoad the dataset index from {path_to_dataset_index}" + ) + t_beg = time.time() + dataset_index = numpy.load(path_to_dataset_index, allow_pickle=True, mmap_mode='r') + t_end = time.time() + log_single_rank(logger, logging.DEBUG, f"\t> time elapsed: {t_end - t_beg:4f} seconds") + + log_single_rank( + logger, + logging.INFO, + f"\tLoad the dataset sample index from {path_to_dataset_sample_index}", + ) + t_beg = time.time() + dataset_sample_index = numpy.load( + path_to_dataset_sample_index, allow_pickle=True, mmap_mode='r' + ) + t_end = time.time() + log_single_rank(logger, logging.DEBUG, f"\t> time elapsed: {t_end - t_beg:4f} seconds") + + return dataset_index, dataset_sample_index diff --git a/megatron/core/datasets/blended_megatron_dataset_builder.py b/megatron/core/datasets/blended_megatron_dataset_builder.py index c9cf4ab..e69e0a6 100644 --- a/megatron/core/datasets/blended_megatron_dataset_builder.py +++ b/megatron/core/datasets/blended_megatron_dataset_builder.py @@ -1,528 +1,579 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. - -import logging -import math -from concurrent.futures import ThreadPoolExecutor -from typing import Any, Callable, Iterable, List, Optional, Type, Union - -import numpy -import torch - -from megatron.core.datasets.blended_dataset import BlendedDataset -from megatron.core.datasets.blended_megatron_dataset_config import BlendedMegatronDatasetConfig -from megatron.core.datasets.megatron_dataset import LowLevelDataset, MegatronDataset -from megatron.core.datasets.utils import Split, normalize -from megatron.core.parallel_state import get_virtual_pipeline_model_parallel_rank -from megatron.core.utils import log_single_rank - -logger = logging.getLogger(__name__) - -MidLevelDataset = MegatronDataset - -TopLevelDataset = Union[BlendedDataset, MidLevelDataset] - -DistributedDataset = Union[ - TopLevelDataset, MidLevelDataset, LowLevelDataset, torch.utils.data.Dataset -] - - -class BlendedMegatronDatasetBuilder(object): - """Builder class for the BlendedDataset and MegatronDataset classes - - Args: - cls (Type[MegatronDataset]): The class to instantiate, must inherit from MegatronDataset - - sizes (List[Optional[int]]): The minimum total number of samples to draw, or None, per split - - is_built_on_rank (Callable): A callable which returns True if the dataset should be built on the current rank and False otherwise. It should be Megatron Core parallelism aware i.e. global rank, local group rank, and virtual rank may inform its return value. - - config (BlendedMegatronDatasetConfig): The config object which informs dataset creation - """ - - def __init__( - self, - cls: Type[MidLevelDataset], - sizes: List[int], - is_built_on_rank: Callable, - config: BlendedMegatronDatasetConfig, - ): - self.cls = cls - self.sizes = sizes - self.is_built_on_rank = is_built_on_rank - self.config = config - - log_single_rank( - logger, - logging.INFO, - f"Building dataset splits with cls={cls.__name__}, sizes={self.sizes}, and config={self.config}", - ) - - if not self.config.mock: - for split in Split: - size_is_none = self.sizes[split.value] is None - if self.config.blend_per_split is None: - weights_are_none = self.config.blend[1] is None - else: - if self.config.blend_per_split[split.value] is None: - continue - weights_are_none = self.config.blend_per_split[split.value][1] is None - if size_is_none: - assert ( - weights_are_none - ), f"size_is_none => weights_are_none fails for {split.name} split" - - if torch.distributed.is_initialized(): - gb_rank = torch.distributed.get_rank() - vp_rank = get_virtual_pipeline_model_parallel_rank() - if gb_rank == 0 and (vp_rank == 0 or vp_rank is None): - assert ( - self.is_built_on_rank() - ), "is_built_on_rank must return True when global rank = 0 and vp rank = 0" - - def build(self) -> List[Optional[TopLevelDataset]]: - """Build all dataset splits according to the provided blend(s) - - This method is distributed-aware and must be called on all ranks. - - The dataset splits returned can vary according to the config. Supply config.blend and - config.split to build BlendedDataset and/or MegatronDataset splits from the same - distribution. Supply config.blend_per_split to build BlendedDataset and/or MegatronDataset - splits from separate distributions. In either case, for each split, handle the following - cases: - - (1) The split is None - - do nothing - - (2) The split has one contributing dataset, and... - - (a) 'size' is not None - - Build a mid-level dataset with low-level dataset sampling in proportion to the size - - (b) 'size' is None - - Build mid-level datasets with no excess low-level dataset sampling - - (3) The split has multiple contributing datasets, and... - - (a) 'weights' is not None and 'size' is not None - - Build mid-level datasets with low-level dataset sampling in proportion to their weights and the size - - Build a top-level dataset of length marginally greater than 'size' with mid-level dataset sampling in proportion to their weights and the size - - (b) 'weights' is not None and 'size' is None - - Error - - (c) 'weights' is None and 'size' is not None - - Build mid-level datasets with no excess low-level dataset sampling - - Build a top-level dataset of length 'size' with mid-level dataset sampling in proportion to their lengths and the size - - - The 'size' of the top-level dataset is capped at the sum of the mid-level dataset lengths - - (d) 'weights' is None and 'size' is None - - Build mid-level datasets with no excess low-level dataset sampling - - Build a top-level dataset with no excess mid-level dataset sampling - - Returns: - List[Optional[TopLevelDataset]]: A list containing a dataset instance (or None) per split - """ - datasets = self._build_blended_dataset_splits() - - for dataset in datasets: - if dataset is not None and len(dataset) > 0: - if isinstance(dataset, BlendedDataset): - if dataset.built_anew_on_cache_miss or any( - x.built_anew_on_cache_miss for x in dataset.datasets - ): - log_single_rank( - logger, - logging.INFO, - f"Verifying NumPy indices for {type(dataset).__name__} {dataset.split.name} split", - ) - else: - log_single_rank( - logger, - logging.INFO, - f"NumPy indices for {type(dataset).__name__} {dataset.split.name} split are fully cached, skipping verification", - ) - continue - # Check blend size - assert dataset.size is None or dataset.size == dataset.dataset_index.shape[0] - # Check blend access of mid-level datasets - _, sizes = numpy.unique(dataset.dataset_index, return_counts=True) - for i, dataset_and_size in enumerate(zip(dataset.datasets, sizes)): - if len(dataset_and_size[0]) < dataset_and_size[1]: - raise IndexError( - f"The {dataset.split.name} blend oversamples (N = {dataset_and_size[1]}) {type(dataset_and_size[0]).__name__} {i} (len = {len(dataset_and_size[0])}). " - f"Set renormalize_blend_weights to True and re-run. File an issue if the problem is not resolved." - ) - - return datasets - - def _build_blended_dataset_splits(self) -> List[Optional[TopLevelDataset]]: - """Build all dataset splits according to the provided blend(s) - - See the BlendedMegatronDatasetBuilder.build alias for more information. - - Returns: - List[Optional[TopLevelDataset]]: A list containing a dataset instance (or None) per split - """ - ## - # Return fake "mock" datasets - ## - if self.config.mock: - split = self.config.split_matrix - try: - return self._build_megatron_dataset_splits(None, split, self.sizes) - except Exception as error: - raise Exception( - f"{self.cls.__name__} failed to build as a mock data generator" - ) from error - - ## - # All splits come from the same distribution - ## - elif self.config.blend: - prefixes, weights = self.config.blend - if weights is not None: - weights = normalize(weights) - - split = self.config.split_matrix - - # Blend consists of a single prefix - if len(prefixes) == 1 and weights is None: - return self._build_megatron_dataset_splits(prefixes[0], split, self.sizes) - - # Build the mid-level datasets - if weights is None: - sizes_per_dataset = [[None for split in Split] for prefix in prefixes] - else: - sizes_per_dataset = _get_size_per_split_per_dataset(weights, self.sizes) - - # build each dataset in parallel - megatron_datasets = self._build_megatron_datasets_parallel( - prefixes, split, sizes_per_dataset - ) - - # Build the top-level datasets - blended_datasets = [None] * len(Split) - for i in range(len(Split)): - if split[i] is not None: - weights_i = weights - if weights_i is not None and self.sizes[i] is not None: - size_per_dataset = list(zip(*sizes_per_dataset))[i] - size_i = sum(size_per_dataset) - if self.config.renormalize_blend_weights: - weights_i = list(map(lambda _size: _size / size_i, size_per_dataset)) - elif weights_i is None: - try: - weights_i = [ - len(megatron_dataset) for megatron_dataset in megatron_datasets[i] - ] - except TypeError: - weights_i = [0 for _ in prefixes] - if self.sizes[i] is not None: - size_i = min(self.sizes[i], sum(weights_i)) - else: - size_i = None # => the size will be sum(weights_i) - else: - raise RuntimeError - blended_datasets[i] = self.build_generic_dataset( - BlendedDataset, - self.is_built_on_rank, - True, # synchronize_ranks, default behavior to build on rank-0 first - megatron_datasets[i], - weights_i, - size_i, - self.config, - ) - - return blended_datasets - - ## - # Each split comes from a separate distribution - ## - else: - blended_datasets = [None] * len(Split) - for i in range(len(Split)): - split_spoof = [None] * len(Split) - split_spoof[i] = (0.0, 1.0) - sizes_spoof = [0] * len(Split) - sizes_spoof[i] = self.sizes[i] - - # Blend is provided for the split - blend = self.config.blend_per_split[i] - if blend is not None: - prefixes, weights = blend - if weights is not None: - weights = normalize(weights) - - # Blend consists of a sigle prefix - if len(prefixes) == 1: - blended_datasets[i] = self._build_megatron_dataset_splits( - prefixes[0], split_spoof, sizes_spoof - )[i] - continue - - # Build mid-level datasets - if weights is None: - sizes_per_dataset = [[None for split in Split] for prefix in prefixes] - else: - sizes_per_dataset = _get_size_per_split_per_dataset(weights, sizes_spoof) - - # build each dataset in parallel - megatron_datasets = self._build_megatron_datasets_parallel( - prefixes, split_spoof, sizes_per_dataset - )[i] - - # Build top-level dataset - if weights is not None and self.sizes[i] is not None: - size_per_dataset = list(zip(*sizes_per_dataset))[i] - size = sum(size_per_dataset) - if self.config.renormalize_blend_weights: - weights = list(map(lambda _size: _size / size, size_per_dataset)) - elif weights is None: - try: - weights = [ - len(megatron_dataset) for megatron_dataset in megatron_datasets - ] - except TypeError: - weights = [0 for _ in prefixes] - if self.sizes[i] is not None: - size = min(self.sizes[i], sum(weights)) - else: - size = None # => the size will be sum(weights) - else: - raise RuntimeError - blended_datasets[i] = self.build_generic_dataset( - BlendedDataset, - self.is_built_on_rank, - True, # synchronize_ranks, default behavior to build on rank-0 first - megatron_datasets, - weights, - size, - self.config, - ) - - return blended_datasets - - def _build_megatron_datasets_parallel( - self, prefixes: List[str], split: List[float], sizes_per_dataset: List[List[int]] - ) -> List[List[Optional[MegatronDataset]]]: - """Build the megatron datasets for a list of prefixes in parallel - - Args: - prefixes (List[str]): The list of prefix strings - - split (List[float]): The dataset split ratios (must sum to 1.00) - - sizes_per_dataset (List[List[int]]): The number of samples to request - per MegatronDataset per spilt - - Returns: - List[List[Optional[MegatronDataset]]]: For each split, have a list of - MegatronDataset per prefix - """ - - # Helper function to wrap the threading logic - def _threading_helper( - megatron_datasets: List[List[Optional[MegatronDataset]]], - num_workers: int, - prefixes: List[str], - split: List[float], - sizes_per_dataset: List[List[int]], - ) -> None: - with ThreadPoolExecutor(max_workers=num_workers) as executor: - all_futures = [] - for i in range(len(prefixes)): - all_futures.append( - executor.submit( - self._build_megatron_dataset_splits, - prefixes[i], - split, - sizes_per_dataset[i], - False, # synchronize_ranks, barrier is called in this function - ) - ) - for future in all_futures: - try: - megatron_datasets_split = future.result() - for j in range(len(megatron_datasets_split)): - megatron_datasets[j].append(megatron_datasets_split[j]) - except Exception as err: - raise err - - megatron_datasets = [[] for _ in range(len(Split))] - num_dataset_builder_threads = self.config.num_dataset_builder_threads - - if torch.distributed.is_initialized(): - rank = torch.distributed.get_rank() - # First, build on rank 0 - if rank == 0: - num_workers = num_dataset_builder_threads - if num_workers > 1: - # since only rank 0 is running, scale up the thread count - # but not too much to avoid overloading storage on miss path. - # if user set num_dataset_builder_threads to 1, - # i.e. meant for serial build, do not scale up. - num_workers *= min(2, max(1, torch.cuda.device_count())) - _threading_helper( - megatron_datasets, num_workers, prefixes, split, sizes_per_dataset - ) - - torch.distributed.barrier() - - # Then, build on other ranks; guaranteed to be data_cache hit - if rank != 0: - _threading_helper( - megatron_datasets, - num_dataset_builder_threads, - prefixes, - split, - sizes_per_dataset, - ) - else: - _threading_helper( - megatron_datasets, num_dataset_builder_threads, prefixes, split, sizes_per_dataset - ) - - return megatron_datasets - - def _build_megatron_dataset_splits( - self, - dataset_path: Optional[str], - split: List[float], - sizes: List[int], - synchronize_ranks: bool = True, - ) -> List[Optional[MidLevelDataset]]: - """Build each MidLevelDataset split from a single LowLevelDataset - - Args: - dataset_path (Optional[str]): The path on disk which defines the underlying LowLevelDataset, or None for mock dataset classes - - split (List[Tuple[float, float]]): The dataset split matrix - - sizes (List[int]): The number of total samples to draw from each split - - synchronize_ranks (bool): Whether to call barrier for rank-0 / barrier / other-ranks behavior. Set to False when we enforce this behavior at higher level. - - Returns: - List[Optional[MidLevelDataset]]: The MidLevelDataset (or None) per split - """ - # short-cut if we are not building on this rank - if torch.distributed.is_initialized() and not self.is_built_on_rank(): - for i in range(len(Split)): - if split[i] is not None and synchronize_ranks: - torch.distributed.barrier() - return [None] * len(Split) - - # Build the low level dataset - low_level_dataset = self.cls.build_low_level_dataset(dataset_path, self.config) - - # Build the split indices for the low level dataset - num_elements = self.cls.numel_low_level_dataset(low_level_dataset) - split_indices = [] - for i, _ in enumerate(Split): - if split[i] is not None: - beg = int(round(split[i][0] * float(num_elements))) - end = int(round(split[i][1] * float(num_elements))) - split_indices.append(numpy.arange(start=beg, stop=end, step=1, dtype=numpy.int32)) - else: - split_indices.append(None) - - # Build the mid level dataset - mid_level_datasets = [] - for i, _split in enumerate(Split): - if split[i] is None: - mid_level_datasets.append(None) - else: - mid_level_datasets.append( - self.build_generic_dataset( - self.cls, - self.is_built_on_rank, - synchronize_ranks, - low_level_dataset, - dataset_path, - split_indices[i], - sizes[i], - _split, - self.config, - ) - ) - - return mid_level_datasets - - @staticmethod - def build_generic_dataset( - cls: Union[Type[DistributedDataset], Callable], - is_built_on_rank: Callable, - synchronize_ranks: bool, - *args: Any, - ) -> Optional[Union[DistributedDataset, Iterable]]: - """Build the DistributedDataset - - Return None if and only if the underlying dataset class is not built on the current rank - and torch.distributed is initialized. - - Args: - cls (Union[Type[DistributedDataset], Callable]): The DistributedDataset class to be built. In special cases, e.g. when we are building the low level dataset for a RawMegatronDataset instance, we can accept a Callable which returns an Iterable. - - synchronize_ranks (bool): Whether to call barrier for rank-0 / barrier / other-ranks behavior. Set to False when we enforce this behavior at higher level. - - args (Tuple[Any]): The positional arguments used to build the provided DistributedDataset class - - Raises: - Exception: When the dataset constructor raises an OSError - - Returns: - Optional[Union[DistributedDataset, Iterable]]: The DistributedDataset instantion, the Iterable instantiation, or None - """ - if torch.distributed.is_initialized(): - rank = torch.distributed.get_rank() - - dataset = None - - # First, build on rank 0 - if rank == 0 and is_built_on_rank(): - try: - dataset = cls(*args) - except OSError as err: - log = ( - f"Failed to write dataset materials to the data cache directory. " - + f"Please supply a directory to which you have write access via " - + f"the path_to_cache attribute in BlendedMegatronDatasetConfig and " - + f"retry. Refer to the preserved traceback above for more information." - ) - raise Exception(log) from err - - if synchronize_ranks: - torch.distributed.barrier() - - # After, build on other ranks - if rank != 0 and is_built_on_rank(): - dataset = cls(*args) - - return dataset - - return cls(*args) - - -def _get_size_per_split_per_dataset( - normalized_weights: List[float], target_size_per_split: List[int] -) -> List[List[int]]: - """Determine the contribution of the MegatronDataset splits to the BlendedDataset splits - - Args: - normalized_weights (List[float]): e.g. [0.3, 0.7] - - target_size_per_split (List[int]): The number of samples to target for each BlendedDataset split - - Returns: - List[List[int]]: The number of samples to request per MegatronDataset per split - """ - assert numpy.isclose(sum(normalized_weights), 1.0) - - # Use 0.5% target margin to ensure we satiate the request - sizes_per_dataset = [ - [int(math.ceil(target_size * weight * 1.005)) for target_size in target_size_per_split] - for weight in normalized_weights - ] - - return sizes_per_dataset +# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. + +import logging +import math +from concurrent.futures import ThreadPoolExecutor +from typing import Any, Callable, Iterable, List, Optional, Type, Union + +import numpy +import torch + +from megatron.core.datasets.blended_dataset import BlendedDataset +from megatron.core.datasets.blended_megatron_dataset_config import BlendedMegatronDatasetConfig +from megatron.core.datasets.megatron_dataset import LowLevelDataset, MegatronDataset +from megatron.core.datasets.utils import Split, normalize +from megatron.core.parallel_state import get_virtual_pipeline_model_parallel_rank +from megatron.core.utils import log_single_rank + +logger = logging.getLogger(__name__) + +MidLevelDataset = MegatronDataset + +TopLevelDataset = Union[BlendedDataset, MidLevelDataset] + +DistributedDataset = Union[ + TopLevelDataset, MidLevelDataset, LowLevelDataset, torch.utils.data.Dataset +] + + +class BlendedMegatronDatasetBuilder(object): + """Builder class for the BlendedDataset and MegatronDataset classes + + Args: + cls (Type[MegatronDataset]): The class to instantiate, must inherit from MegatronDataset + + sizes (List[Optional[int]]): The minimum total number of samples to draw, or None, per split + + is_built_on_rank (Callable): A callable which returns True if the dataset should be built on + the current rank and False otherwise. It should be Megatron Core parallelism aware i.e. + global rank, local group rank, and virtual rank may inform its return value. + + config (BlendedMegatronDatasetConfig): The config object which informs dataset creation + """ + + def __init__( + self, + cls: Type[MidLevelDataset], + sizes: List[int], + is_built_on_rank: Callable, + config: BlendedMegatronDatasetConfig, + ): + self.cls = cls + self.sizes = sizes + self.is_built_on_rank = is_built_on_rank + self.config = config + + log_single_rank( + logger, + logging.INFO, + f"Building {cls.__name__} splits with sizes={self.sizes} and config={self.config}", + ) + + if not self.config.mock: + for split in Split: + size_is_none = self.sizes[split.value] is None + if self.config.blend_per_split is None: + weights_are_none = self.config.blend[1] is None + else: + if self.config.blend_per_split[split.value] is None: + continue + weights_are_none = self.config.blend_per_split[split.value][1] is None + if size_is_none: + assert ( + weights_are_none + ), f"size_is_none => weights_are_none fails for {split.name} split" + + if torch.distributed.is_initialized(): + gb_rank = torch.distributed.get_rank() + vp_rank = get_virtual_pipeline_model_parallel_rank() + if gb_rank == 0 and (vp_rank == 0 or vp_rank is None): + assert ( + self.is_built_on_rank() + ), "is_built_on_rank must return True when global rank = 0 and vp rank = 0" + + def build(self) -> List[Optional[TopLevelDataset]]: + """Build all dataset splits according to the provided blend(s) + + This method is distributed-aware and must be called on all ranks. + + The dataset splits returned can vary according to the config. Supply config.blend and + config.split to build BlendedDataset and/or MegatronDataset splits from the same + distribution. Supply config.blend_per_split to build BlendedDataset and/or MegatronDataset + splits from separate distributions. In either case, for each split, handle the following + cases: + + (1) The split is None + - do nothing + + (2) The split has one contributing dataset, and... + + (a) 'size' is not None + - Build a mid-level dataset with low-level dataset sampling in proportion to the + size + + (b) 'size' is None + - Build mid-level datasets with no excess low-level dataset sampling + + (3) The split has multiple contributing datasets, and... + + (a) 'weights' is not None and 'size' is not None + - Build mid-level datasets with low-level dataset sampling in proportion to their + weights and the size + - Build a top-level dataset of length marginally greater than 'size' with mid-level + dataset sampling in proportion to their weights and the size + + (b) 'weights' is not None and 'size' is None + - Error + + (c) 'weights' is None and 'size' is not None + - Build mid-level datasets with no excess low-level dataset sampling + - Build a top-level dataset of length 'size' (capped at the sum of the mid-level + dataset lengths) with mid-level dataset sampling in proportion to their lengths + and the size + + (d) 'weights' is None and 'size' is None + - Build mid-level datasets with no excess low-level dataset sampling + - Build a top-level dataset with no excess mid-level dataset sampling + + Returns: + List[Optional[TopLevelDataset]]: A list containing a dataset instance (or None) per + split + """ + datasets = self._build_blended_dataset_splits() + + for dataset in datasets: + if dataset is not None and len(dataset) > 0: + if isinstance(dataset, BlendedDataset): + if dataset.built_anew_on_cache_miss or any( + x.built_anew_on_cache_miss for x in dataset.datasets + ): + log_single_rank( + logger, + logging.INFO, + ( + f"Verifying NumPy indices for {type(dataset).__name__} " + f"{dataset.split.name} split" + ), + ) + else: + log_single_rank( + logger, + logging.INFO, + ( + f"NumPy indices for {type(dataset).__name__} {dataset.split.name} " + f"split are fully cached, skipping verification" + ), + ) + continue + # Check blend size + assert dataset.size is None or dataset.size == dataset.dataset_index.shape[0] + # Check blend access of mid-level datasets + dataset_indices, dataset_sizes = numpy.unique( + dataset.dataset_index, return_counts=True + ) + for i, (index, size) in enumerate(zip(dataset_indices, dataset_sizes)): + if len(dataset.datasets[index]) < size: + raise IndexError( + f"The {dataset.split.name} blend oversamples the contributing " + f"datasets and, e.g., requests {size} samples from " + f"{type(dataset.datasets[index]).__name__} {i} with size " + f"{len(dataset.datasets[index])}. This is unexpected. " + f"Please file an issue." + ) + + return datasets + + def _build_blended_dataset_splits(self) -> List[Optional[TopLevelDataset]]: + """Build all dataset splits according to the provided blend(s) + + See the BlendedMegatronDatasetBuilder.build alias for more information. + + Returns: + List[Optional[TopLevelDataset]]: A list containing a dataset instance (or None) per + split + """ + ## + # Return fake "mock" datasets + ## + if self.config.mock: + split = self.config.split_matrix + try: + return self._build_megatron_dataset_splits(None, split, self.sizes) + except Exception as error: + raise Exception( + f"{self.cls.__name__} failed to build as a mock data generator" + ) from error + + ## + # All splits come from the same distribution + ## + elif self.config.blend: + prefixes, weights = self.config.blend + if weights is not None: + weights = normalize(weights) + + split = self.config.split_matrix + + # Blend consists of a single prefix + if len(prefixes) == 1 and weights is None: + return self._build_megatron_dataset_splits(prefixes[0], split, self.sizes) + + # Build the mid-level datasets + if weights is None: + # Build only one "epoch" + sizes_per_dataset_buffer = [[None for split in Split] for prefix in prefixes] + else: + # The number of samples we plan to use per dataset + sizes_per_dataset_target = _get_size_per_split_per_dataset(weights, self.sizes) + # The number of samples we plan to build per dataset + sizes_per_dataset_buffer = _get_size_per_split_per_dataset( + weights, self.sizes, margin=0.5 + ) + + # Build each dataset in parallel + megatron_datasets = self._build_megatron_datasets_parallel( + prefixes, split, sizes_per_dataset_buffer + ) + + # Build the top-level datasets + blended_datasets = [None] * len(Split) + for i in range(len(Split)): + if split[i] is not None: + weights_i = weights + if weights_i is not None and self.sizes[i] is not None: + # Blend according to client-specified weights and client-specified size + size_per_dataset = list(zip(*sizes_per_dataset_target))[i] + size_i = sum(size_per_dataset) + elif weights_i is None: + # Blend according to dataset sizes as-is and (maybe) client-specified size + try: + weights_i = [ + len(megatron_dataset) for megatron_dataset in megatron_datasets[i] + ] + except TypeError: + weights_i = [0 for _ in prefixes] + if self.sizes[i] is not None: + size_i = min(self.sizes[i], sum(weights_i)) + else: + # Build exhaustive indices + size_i = None + else: + raise ValueError( + "Using client-specified weights requires client-specified size" + ) + blended_datasets[i] = self.build_generic_dataset( + BlendedDataset, + self.is_built_on_rank, + True, # synchronize_ranks, default behavior to build on rank-0 first + megatron_datasets[i], + weights_i, + size_i, + self.config, + ) + + return blended_datasets + + ## + # Each split comes from a separate distribution + ## + else: + blended_datasets = [None] * len(Split) + for i in range(len(Split)): + split_spoof = [None] * len(Split) + split_spoof[i] = (0.0, 1.0) + sizes_spoof = [0] * len(Split) + sizes_spoof[i] = self.sizes[i] + + # Blend is provided for the split + blend = self.config.blend_per_split[i] + if blend is not None: + prefixes, weights = blend + if weights is not None: + weights = normalize(weights) + + # Blend consists of a sigle prefix + if len(prefixes) == 1: + blended_datasets[i] = self._build_megatron_dataset_splits( + prefixes[0], split_spoof, sizes_spoof + )[i] + continue + + # Build mid-level datasets + if weights is None: + sizes_per_dataset_buffer = [ + [None for split in Split] for prefix in prefixes + ] + else: + # The number of samples we plan to use per dataset + sizes_per_dataset_target = _get_size_per_split_per_dataset( + weights, sizes_spoof + ) + # The number of samples we plan to build per dataset + sizes_per_dataset_buffer = _get_size_per_split_per_dataset( + weights, sizes_spoof, margin=0.5 + ) + + # Build each dataset in parallel + megatron_datasets = self._build_megatron_datasets_parallel( + prefixes, split_spoof, sizes_per_dataset_buffer + )[i] + + # Build top-level dataset + if weights is not None and self.sizes[i] is not None: + # Blend according to client-specified weights and client-specified size + size_per_dataset = list(zip(*sizes_per_dataset_target))[i] + size = sum(size_per_dataset) + elif weights is None: + # Blend according to dataset sizes as-is and (maybe) client-specified size + try: + weights = [ + len(megatron_dataset) for megatron_dataset in megatron_datasets + ] + except TypeError: + weights = [0 for _ in prefixes] + if self.sizes[i] is not None: + size = min(self.sizes[i], sum(weights)) + else: + # Build exhaustive indices + size = None + else: + raise RuntimeError + blended_datasets[i] = self.build_generic_dataset( + BlendedDataset, + self.is_built_on_rank, + True, # synchronize_ranks, default behavior to build on rank-0 first + megatron_datasets, + weights, + size, + self.config, + ) + + return blended_datasets + + def _build_megatron_datasets_parallel( + self, prefixes: List[str], split: List[float], sizes_per_dataset: List[List[int]] + ) -> List[List[Optional[MegatronDataset]]]: + """Build the megatron datasets for a list of prefixes in parallel + + Args: + prefixes (List[str]): The list of prefix strings + + split (List[float]): The dataset split ratios (must sum to 1.00) + + sizes_per_dataset (List[List[int]]): The number of samples to request + per MegatronDataset per spilt + + Returns: + List[List[Optional[MegatronDataset]]]: For each split, have a list of + MegatronDataset per prefix + """ + + # Helper function to wrap the threading logic + def _threading_helper( + megatron_datasets: List[List[Optional[MegatronDataset]]], + num_workers: int, + prefixes: List[str], + split: List[float], + sizes_per_dataset: List[List[int]], + ) -> None: + with ThreadPoolExecutor(max_workers=num_workers) as executor: + all_futures = [] + for i in range(len(prefixes)): + all_futures.append( + executor.submit( + self._build_megatron_dataset_splits, + prefixes[i], + split, + sizes_per_dataset[i], + False, # synchronize_ranks, barrier is called in this function + ) + ) + for future in all_futures: + try: + megatron_datasets_split = future.result() + for j in range(len(megatron_datasets_split)): + megatron_datasets[j].append(megatron_datasets_split[j]) + except Exception as err: + raise err + + megatron_datasets = [[] for _ in range(len(Split))] + num_dataset_builder_threads = self.config.num_dataset_builder_threads + + if torch.distributed.is_initialized(): + rank = torch.distributed.get_rank() + # First, build on rank 0 + if rank == 0: + num_workers = num_dataset_builder_threads + if num_workers > 1: + # since only rank 0 is running, scale up the thread count + # but not too much to avoid overloading storage on miss path. + # if user set num_dataset_builder_threads to 1, + # i.e. meant for serial build, do not scale up. + num_workers *= min(2, max(1, torch.cuda.device_count())) + _threading_helper( + megatron_datasets, num_workers, prefixes, split, sizes_per_dataset + ) + + torch.distributed.barrier() + + # Then, build on other ranks; guaranteed to be data_cache hit + if rank != 0: + _threading_helper( + megatron_datasets, + num_dataset_builder_threads, + prefixes, + split, + sizes_per_dataset, + ) + else: + _threading_helper( + megatron_datasets, num_dataset_builder_threads, prefixes, split, sizes_per_dataset + ) + + return megatron_datasets + + def _build_megatron_dataset_splits( + self, + dataset_path: Optional[str], + split: List[float], + sizes: List[int], + synchronize_ranks: bool = True, + ) -> List[Optional[MidLevelDataset]]: + """Build each MidLevelDataset split from a single LowLevelDataset + + Args: + dataset_path (Optional[str]): The path on disk which defines the underlying + LowLevelDataset, or None for mock dataset classes + + split (List[Tuple[float, float]]): The dataset split matrix + + sizes (List[int]): The number of total samples to draw from each split + + synchronize_ranks (bool): Whether to call barrier for rank-0 / barrier / other-ranks + behavior. Set to False when we enforce this behavior at higher level. + + Returns: + List[Optional[MidLevelDataset]]: The MidLevelDataset (or None) per split + """ + # short-cut if we are not building on this rank + if torch.distributed.is_initialized() and not self.is_built_on_rank(): + for i in range(len(Split)): + if split[i] is not None and synchronize_ranks: + torch.distributed.barrier() + return [None] * len(Split) + + # Build the low level dataset + low_level_dataset = self.cls.build_low_level_dataset(dataset_path, self.config) + + # Build the split indices for the low level dataset + num_elements = self.cls.numel_low_level_dataset(low_level_dataset) + split_indices = [] + for i, _ in enumerate(Split): + if split[i] is not None: + beg = int(round(split[i][0] * float(num_elements))) + end = int(round(split[i][1] * float(num_elements))) + split_indices.append(numpy.arange(start=beg, stop=end, step=1, dtype=numpy.int32)) + else: + split_indices.append(None) + + # Build the mid level dataset + mid_level_datasets = [] + for i, _split in enumerate(Split): + if split[i] is None: + mid_level_datasets.append(None) + else: + mid_level_datasets.append( + self.build_generic_dataset( + self.cls, + self.is_built_on_rank, + synchronize_ranks, + low_level_dataset, + dataset_path, + split_indices[i], + sizes[i], + _split, + self.config, + ) + ) + + return mid_level_datasets + + @staticmethod + def build_generic_dataset( + cls: Union[Type[DistributedDataset], Callable], + is_built_on_rank: Callable, + synchronize_ranks: bool, + *args: Any, + ) -> Optional[Union[DistributedDataset, Iterable]]: + """Build the DistributedDataset + + Return None if and only if the underlying dataset class is not built on the current rank + and torch.distributed is initialized. + + Args: + cls (Union[Type[DistributedDataset], Callable]): The DistributedDataset class to be + built. In special cases, e.g. when we are building the low level dataset for a + RawMegatronDataset instance, we can accept a Callable which returns an Iterable. + + synchronize_ranks (bool): Whether to call barrier for rank-0 / barrier / other-ranks + behavior. Set to False when we enforce this behavior at higher level. + + args (Tuple[Any]): The positional arguments used to build the provided + DistributedDataset class + + Raises: + Exception: When the dataset constructor raises an OSError + + Returns: + Optional[Union[DistributedDataset, Iterable]]: The DistributedDataset instantion, the + Iterable instantiation, or None + """ + if torch.distributed.is_initialized(): + rank = torch.distributed.get_rank() + + dataset = None + + # First, build on rank 0 + if rank == 0 and is_built_on_rank(): + try: + dataset = cls(*args) + except OSError as err: + log = ( + f"Failed to write dataset materials to the data cache directory. Please " + f"supply a directory to which you have write access via the path_to_cache " + f"attribute in BlendedMegatronDatasetConfig and retry. Refer to the " + f"preserved traceback above for more information." + ) + raise Exception(log) from err + + if synchronize_ranks: + torch.distributed.barrier() + + # After, build on other ranks + if rank != 0 and is_built_on_rank(): + dataset = cls(*args) + + return dataset + + return cls(*args) + + +def _get_size_per_split_per_dataset( + normalized_weights: List[float], target_size_per_split: List[int], margin: float = 0.0 +) -> List[List[int]]: + """Determine the contribution of the MegatronDataset splits to the BlendedDataset splits + + Args: + normalized_weights (List[float]): e.g. [0.3, 0.7] + + target_size_per_split (List[int]): The number of samples to target for each BlendedDataset + split + + margin (float): The relative quantity of extra samples to build per per split per dataset, + as a percentage + + Returns: + List[List[int]]: The number of samples to request per MegatronDataset per split + """ + assert numpy.isclose(sum(normalized_weights), 1.0) + + # Use margin as buffer to ensure we satiate the request + sizes_per_dataset = [ + [ + int(math.ceil(math.ceil(target_size * weight) * (1 + margin / 100))) + for target_size in target_size_per_split + ] + for weight in normalized_weights + ] + + return sizes_per_dataset diff --git a/megatron/core/datasets/blended_megatron_dataset_config.py b/megatron/core/datasets/blended_megatron_dataset_config.py index 52bc31f..f79d0eb 100644 --- a/megatron/core/datasets/blended_megatron_dataset_config.py +++ b/megatron/core/datasets/blended_megatron_dataset_config.py @@ -1,177 +1,172 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import functools -import logging -import re -from dataclasses import dataclass, field -from typing import List, Optional, Tuple - -from megatron.core.datasets.megatron_tokenizer import MegatronTokenizer -from megatron.core.datasets.utils import Split, log_single_rank, normalize - -logger = logging.getLogger(__name__) - - -@dataclass -class BlendedMegatronDatasetConfig: - """Configuration object for Megatron Core datasets""" - - random_seed: int - """The seed for all RNG during dataset creation.""" - - sequence_length: int - """The sequence length.""" - - blend: Optional[Tuple[List[str], Optional[List[float]]]] = None - """The blend, consisting of a list of dataset prefixes and optionally a list of dataset - weights. For example, [["dataset-path1", "dataset-path2"], [0.3, 0.7]]. When the weights are - None, they are inferred from the lengths of the contributing datasets. Not to be used with - 'blend_per_split'. Defaults to None. - """ - - blend_per_split: Optional[List[Optional[Tuple[List[str], Optional[List[float]]]]]] = None - """A set of blends, as defined above, one for each split distribution. Not to be used with - 'blend'. Defauls to None. - """ - - renormalize_blend_weights: bool = False - """Renormalize the blend weights to account for mid-level dataset oversampling done to ensure - fulfillmenet of the of the requested number of samples. Defaults to False for backward - comparability in the data sample order. - """ - - split: Optional[str] = None - """The split string, a comma separated weighting for the dataset splits when drawing samples - from a single distribution. Not to be used with 'blend_per_split'. Defaults to None. - """ - - split_matrix: Optional[List[Tuple[float, float]]] = field(init=False, default=None) - """The split matrix consisting of non-overlapping book-ends of each split in order. For more - information, refer to 'convert_split_vector_to_split_matrix'. Created automatically from - 'split'. Not to be passed in to the constructor. - """ - - num_dataset_builder_threads: int = 1 - """The number of threads to use for dataset building.""" - - path_to_cache: Optional[str] = None - """Where all re-useable dataset indices are to be cached.""" - - mmap_bin_files: bool = True - """Whether to mmap the .bin files or use file pointers.""" - - mock: bool = field(init=False, default=False) - """Whether to bypass real data loading and validation in favor of mock data generation. - Created automatically from 'blend' and 'blend_per_split'. Not to be passed in to the - constructor. - """ - - tokenizer: Optional[MegatronTokenizer] = None - """The MegatronTokenizer instance or None. Required for datasets which do online tokenization.""" - - def __post_init__(self) -> None: - """Do asserts and set fields post init""" - if self.blend_per_split is not None and any(self.blend_per_split): - assert self.blend is None, "blend and blend_per_split are incompatible" - assert self.split is None, "split and blend_per_split are incompatible" - assert len(self.blend_per_split) == len( - Split - ), f"blend_per_split must contain {len(Split)} blends" - for split in Split: - if self.blend_per_split[split.value] is None: - log_single_rank( - logger, logging.INFO, f"blend not provided for {split.name} split" - ) - else: - assert self.blend_per_split[split.value][1] is None or len( - self.blend_per_split[split.value][0] - ) == len( - self.blend_per_split[split.value][1] - ), "blend per split prefixes and weights must be equal in number" - else: - if self.blend is not None: - assert self.blend[1] is None or len(self.blend[0]) == len( - self.blend[1] - ), "blend prefixes and weights must be equal in number" - assert self.split is not None, "split must be provided when blend is not None" - else: - self.mock = True - log_single_rank( - logger, - logging.INFO, - f"Let mock = True, as both blend and blend_per_split are None", - ) - self.split = "1,1,1" - log_single_rank( - logger, - logging.INFO, - f"Let split = {self.split}, an arbitrarily even split, as mock is True", - ) - split_vector = parse_and_normalize_split(self.split) - self.split_matrix = convert_split_vector_to_split_matrix(split_vector) - log_single_rank(logger, logging.INFO, f"Let split_matrix = {self.split_matrix}") - - -def parse_and_normalize_split(split: str) -> List[float]: - """Parse the dataset split ratios from a string - - Args: - split (str): The train valid test split string e.g. "99,1,0" - - Returns: - List[float]: The trian valid test split ratios e.g. [0.99, 0.01, 0.0] - """ - split = list(map(float, re.findall(r"[.0-9]+", split))) - split = split + [0.0 for _ in range(len(Split) - len(split))] - - assert len(split) == len(Split) - assert all(map(lambda _: _ >= 0.0, split)) - - split = normalize(split) - - return split - - -def convert_split_vector_to_split_matrix( - vector_a: List[float], vector_b: Optional[List[float]] = None -) -> List[Optional[Tuple[float, float]]]: - """Build the split matrix from one or optionally two contributing split vectors. - - Ex. a standard conversion: - - [0.99, 0.01, 0.0] -> [(0, 0.99), (0.99, 1.0), None] - - Ex. a conversion for Retro when Retro pretraining uses a [0.99, 0.01, 0.0] split and Retro - preprocessing used a [0.98, 0.02, 0.0] split: - - [0.99, 0.01, 0.0], [0.98, 0.02, 0.0] -> [(0, 0.98), (0.99, 1.0), None] - - Args: - vector_a (List[float]): The primary split vector - - vector_b (Optional[List[float]]): An optional secondary split vector which constrains the primary split vector. Defaults to None. - - Returns: - List[Tuple[float, float]]: The split matrix consisting of book-ends of each split in order - """ - if vector_b is None: - vector_b = vector_a - - # [.900, .090, .010] -> [0.00, .900, .990, 100] - expansion_a = functools.reduce(lambda a, b: a + [a[len(a) - 1] + b], [[0], *vector_a]) - expansion_b = functools.reduce(lambda a, b: a + [a[len(a) - 1] + b], [[0], *vector_b]) - - # [0.00, .900, .990, 100.0] -> [(0.00, .900), (.900, .990), (.990, 100)] - bookends_a = list(zip(expansion_a[:-1], expansion_a[1:])) - bookends_b = list(zip(expansion_b[:-1], expansion_b[1:])) - - # gather per-split overlap or None - matrix = [] - for bookend_a, bookend_b in zip(bookends_a, bookends_b): - if min(bookend_a[1], bookend_b[1]) <= max(bookend_a[0], bookend_b[0]): - overlap = None - else: - overlap = (max(bookend_a[0], bookend_b[0]), min(bookend_a[1], bookend_b[1])) - matrix.append(overlap) - - return matrix +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import functools +import logging +import re +from dataclasses import dataclass, field +from typing import List, Optional, Tuple + +from megatron.core.datasets.megatron_tokenizer import MegatronTokenizer +from megatron.core.datasets.utils import Split, log_single_rank, normalize + +logger = logging.getLogger(__name__) + + +@dataclass +class BlendedMegatronDatasetConfig: + """Configuration object for Megatron Core datasets""" + + random_seed: int + """The seed for all RNG during dataset creation.""" + + sequence_length: int + """The sequence length.""" + + blend: Optional[Tuple[List[str], Optional[List[float]]]] = None + """The blend, consisting of a list of dataset prefixes and optionally a list of dataset + weights. For example, [["dataset-path1", "dataset-path2"], [0.3, 0.7]]. When the weights are + None, they are inferred from the lengths of the contributing datasets. Not to be used with + 'blend_per_split'. Defaults to None. + """ + + blend_per_split: Optional[List[Optional[Tuple[List[str], Optional[List[float]]]]]] = None + """A set of blends, as defined above, one for each split distribution. Not to be used with + 'blend'. Defauls to None. + """ + + split: Optional[str] = None + """The split string, a comma separated weighting for the dataset splits when drawing samples + from a single distribution. Not to be used with 'blend_per_split'. Defaults to None. + """ + + split_matrix: Optional[List[Tuple[float, float]]] = field(init=False, default=None) + """The split matrix consisting of non-overlapping book-ends of each split in order. For more + information, refer to 'convert_split_vector_to_split_matrix'. Created automatically from + 'split'. Not to be passed in to the constructor. + """ + + num_dataset_builder_threads: int = 1 + """The number of threads to use for dataset building.""" + + path_to_cache: Optional[str] = None + """Where all re-useable dataset indices are to be cached.""" + + mmap_bin_files: bool = True + """Whether to mmap the .bin files or use file pointers.""" + + mock: bool = field(init=False, default=False) + """Whether to bypass real data loading and validation in favor of mock data generation. + Created automatically from 'blend' and 'blend_per_split'. Not to be passed in to the + constructor. + """ + + tokenizer: Optional[MegatronTokenizer] = None + """The MegatronTokenizer instance. Required for datasets that do online tokenization.""" + + def __post_init__(self) -> None: + """Do asserts and set fields post init""" + if self.blend_per_split is not None and any(self.blend_per_split): + assert self.blend is None, "blend and blend_per_split are incompatible" + assert self.split is None, "split and blend_per_split are incompatible" + assert len(self.blend_per_split) == len( + Split + ), f"blend_per_split must contain {len(Split)} blends" + for split in Split: + if self.blend_per_split[split.value] is None: + log_single_rank( + logger, logging.INFO, f"blend not provided for {split.name} split" + ) + else: + assert self.blend_per_split[split.value][1] is None or len( + self.blend_per_split[split.value][0] + ) == len( + self.blend_per_split[split.value][1] + ), "blend per split prefixes and weights must be equal in number" + else: + if self.blend is not None: + assert self.blend[1] is None or len(self.blend[0]) == len( + self.blend[1] + ), "blend prefixes and weights must be equal in number" + assert self.split is not None, "split must be provided when blend is not None" + else: + self.mock = True + log_single_rank( + logger, + logging.INFO, + f"Let mock = True, as both blend and blend_per_split are None", + ) + self.split = "1,1,1" + log_single_rank( + logger, + logging.INFO, + f"Let split = {self.split}, an arbitrarily even split, as mock is True", + ) + split_vector = parse_and_normalize_split(self.split) + self.split_matrix = convert_split_vector_to_split_matrix(split_vector) + log_single_rank(logger, logging.INFO, f"Let split_matrix = {self.split_matrix}") + + +def parse_and_normalize_split(split: str) -> List[float]: + """Parse the dataset split ratios from a string + + Args: + split (str): The train valid test split string e.g. "99,1,0" + + Returns: + List[float]: The trian valid test split ratios e.g. [0.99, 0.01, 0.0] + """ + split = list(map(float, re.findall(r"[.0-9]+", split))) + split = split + [0.0 for _ in range(len(Split) - len(split))] + + assert len(split) == len(Split) + assert all(map(lambda _: _ >= 0.0, split)) + + split = normalize(split) + + return split + + +def convert_split_vector_to_split_matrix( + vector_a: List[float], vector_b: Optional[List[float]] = None +) -> List[Optional[Tuple[float, float]]]: + """Build the split matrix from one or optionally two contributing split vectors. + + Ex. a standard conversion: + + [0.99, 0.01, 0.0] -> [(0, 0.99), (0.99, 1.0), None] + + Ex. a conversion for Retro when Retro pretraining uses a [0.99, 0.01, 0.0] split and Retro + preprocessing used a [0.98, 0.02, 0.0] split: + + [0.99, 0.01, 0.0], [0.98, 0.02, 0.0] -> [(0, 0.98), (0.99, 1.0), None] + + Args: + vector_a (List[float]): The primary split vector + + vector_b (Optional[List[float]]): An optional secondary split vector which constrains the + primary split vector. Defaults to None. + + Returns: + List[Tuple[float, float]]: The split matrix consisting of book-ends of each split in order + """ + if vector_b is None: + vector_b = vector_a + + # [.900, .090, .010] -> [0.00, .900, .990, 100] + expansion_a = functools.reduce(lambda a, b: a + [a[len(a) - 1] + b], [[0], *vector_a]) + expansion_b = functools.reduce(lambda a, b: a + [a[len(a) - 1] + b], [[0], *vector_b]) + + # [0.00, .900, .990, 100.0] -> [(0.00, .900), (.900, .990), (.990, 100)] + bookends_a = list(zip(expansion_a[:-1], expansion_a[1:])) + bookends_b = list(zip(expansion_b[:-1], expansion_b[1:])) + + # gather per-split overlap or None + matrix = [] + for bookend_a, bookend_b in zip(bookends_a, bookends_b): + if min(bookend_a[1], bookend_b[1]) <= max(bookend_a[0], bookend_b[0]): + overlap = None + else: + overlap = (max(bookend_a[0], bookend_b[0]), min(bookend_a[1], bookend_b[1])) + matrix.append(overlap) + + return matrix diff --git a/megatron/core/dist_checkpointing/__init__.py b/megatron/core/dist_checkpointing/__init__.py index eb7ad78..c9d059b 100644 --- a/megatron/core/dist_checkpointing/__init__.py +++ b/megatron/core/dist_checkpointing/__init__.py @@ -1,12 +1,12 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. - -from .core import check_is_distributed_checkpoint -from .mapping import LocalNonpersistentObject, LocalNonpersitentObject, ShardedTensor -from .serialization import ( - load, - load_common_state_dict, - load_plain_tensors, - load_tensors_metadata, - remove_sharded_tensors, - save, -) +# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. + +from .core import check_is_distributed_checkpoint +from .mapping import LocalNonpersistentObject, ShardedObject, ShardedTensor +from .serialization import ( + load, + load_common_state_dict, + load_plain_tensors, + load_tensors_metadata, + remove_sharded_tensors, + save, +) diff --git a/megatron/core/dist_checkpointing/exchange_utils.py b/megatron/core/dist_checkpointing/exchange_utils.py index 2106fe5..8a9b52c 100644 --- a/megatron/core/dist_checkpointing/exchange_utils.py +++ b/megatron/core/dist_checkpointing/exchange_utils.py @@ -1,519 +1,544 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. - -"""Utilities for exchanging data between ranks.""" - -import logging -from collections import defaultdict -from functools import reduce -from itertools import zip_longest -from time import time -from typing import Dict, List, NamedTuple, Optional, Set, Tuple, TypeVar, cast - -import numpy as np -import torch - -from .core import CheckpointingException -from .dict_utils import nested_values -from .mapping import ShardedStateDict, ShardedTensor, is_main_replica -from .utils import _sharded_tensor_shard_id, _ShardId - -# TODO: remove TE references once the TE bug is fixed -# Check if Transformer Engine has Float8Tensor class -HAVE_TE_FLOAT8TENSOR = False -try: - from transformer_engine.pytorch.float8_tensor import Float8Tensor - - HAVE_TE_FLOAT8TENSOR = True -except (ImportError, ModuleNotFoundError): - # Float8Tensor not found - pass - - -def is_float8tensor(tensor: torch.Tensor) -> bool: - """Check if a tensor is a Transformer Engine Float8Tensor""" - return HAVE_TE_FLOAT8TENSOR and isinstance(tensor, Float8Tensor) - - -logger = logging.getLogger(__name__) - - -class ShardDistribution(NamedTuple): - """Represents a distribution of ShardedTensors. - - Given distribution is valid only for a specific parallelization group, - which is implicit here (not referenced by this class). - - Args: - main_rank_for_shard (Dict[_ShardId, int]): specifies which rank should hold - the main replica for a given shard - shards_in_this_group (Set[_ShardId]): which shards have a main replica - in this parallelization group - shard_to_metadata (Dict[_ShardId, ShardedTensor]): maps ShardedTensor - identifier to the original ShardedTensor - all_ranks_for_shard (Dict[_ShardId, List[int]]): specifies which ranks - need a given shard in a given parallelization group - - """ - - main_rank_for_shard: Dict[_ShardId, int] - shards_in_this_group: Set[_ShardId] - shard_to_metadata: Dict[_ShardId, ShardedTensor] - all_ranks_for_shard: Dict[_ShardId, List[int]] - - -def _shard_size(sh_ten: ShardedTensor): - """Returns size in bytes of a given sharded tensor.""" - if sh_ten.flattened_range is None: - numel = np.product(sh_ten.local_shape) - else: - numel = sh_ten.flattened_range.stop - sh_ten.flattened_range.start - return numel * torch._utils._element_size(sh_ten.dtype) - - -def _get_empty_tensor_for_exchange( - shard_id: _ShardId, - needed_shards: Dict[_ShardId, ShardedTensor], - unneeded_shards: Dict[_ShardId, ShardedTensor], - loaded_tensors: Dict[_ShardId, torch.Tensor], -) -> Tuple[torch.Tensor, Optional[torch.device]]: - """Determines the empty tensor to use for exchange. - - If shard_id is needed by this rank, it will be in the `unloaded_shards`. - Otherwise, the metadata for this tensor can be found in `shard_to_metadata` - - Args: - shard_id (_ShardId): shard_id that will be exchanged - needed_shards (Dict[_ShardId, ShardedTensor]): mapping from shard ids - to metadata for shards needed by this rank - unneeded_shards (Dict[_ShardId, ShardedTensor]): mapping from shard ids - to metadata for shards that can be discarded after exchange - loaded_tensors (Dict[_ShardId, torch.Tensor]): mapping where useful tensors - are placed in - - Returns: - Tuple[torch.Tensor, Optional[torch.device]]: empty CUDA tensor to be exchanged, - and the device of the original state dict tensor (if there was any) - """ - local_unloaded_sh_ten = needed_shards.get(shard_id) - if local_unloaded_sh_ten is None: - orig_device = None # this tensor will be discarded anyway - sh_ten = unneeded_shards[shard_id] - if sh_ten.data is None: - sh_ten.init_data('cuda') - tensor = sh_ten.data - sh_ten.data = None # won't be used. free memory - else: - tensor = sh_ten.data - if tensor.device.type == 'cpu': - tensor = torch.empty_like(tensor, device='cuda') - else: - local_unloaded_sh_ten.init_data('cuda') - orig_device = local_unloaded_sh_ten.data.device - tensor = local_unloaded_sh_ten.data - if tensor.device.type == 'cpu': - tensor = torch.empty_like(tensor, device='cuda') - loaded_tensors[shard_id] = tensor - return tensor, orig_device - - -T = TypeVar('T') - - -def distribute_shards_to_ranks( - shard_to_ranks: Dict[T, List[int]], shard_to_size: Dict[T, int], num_ranks: int -) -> Dict[T, int]: - """Computes uniform distribution of workload across ranks, based on sizes. - - Currently, the assignment is greedy, based on: - 1. Firstly, the coverage of each shard - (how many ranks the shard is available on; lower coverage is assigned first) - 2. Secondly, the size of each shard (larger size is assigned first) - 3. Finally, shard id for differentiation. - - Third step is added because we rely on the fact that - the assignment is deterministic on all ranks. - - Args: - shard_to_ranks (Dict[T, List[int]]): mapping of rank access to shards - shard_to_size (Dict[T, int]): sizes of each shard - num_ranks (int): number of ranks in the parallelization group - - Returns (Dict[T, int]): assignment of shard to rank (which rank should do the work - to achieve maximal uniformity) - """ - shard_to_ranks = {k: tuple(v) for k, v in shard_to_ranks.items()} - shard_to_saving_rank = {} - rank_sizes = [(0, rank) for rank in range(num_ranks)] - - # start from tensors of lowest coverage, then go by tensor size from largest (hence minus size) - for shard_id, shard_ranks in sorted( - shard_to_ranks.items(), - key=lambda sh_id_ranks: ( - len(sh_id_ranks[1]), - -shard_to_size[sh_id_ranks[0]], - sh_id_ranks[0], - ), - ): - # assign greedily to the least occupied rank - size, rank = min((size, rank) for size, rank in rank_sizes if rank in shard_ranks) - - shard_to_saving_rank[shard_id] = rank - rank_sizes[rank] = (size + shard_to_size[shard_id], rank) - - logger.debug(f'distribute_shards_to_ranks distribution: {rank_sizes}') - - return shard_to_saving_rank - - -def determine_main_replica_uniform_distribution( - sharded_state_dict: ShardedStateDict, - parallelization_group: torch.distributed.ProcessGroup, - ignore_groups: bool = False, -) -> Optional[ShardDistribution]: - """Computes the save distribution. - - Should be used in conjunction with `distribute_main_replicas_with_precomputed_distribution` - which applies the computed save distribution. - - We rely on the fact that the assignment algorithm is deterministic on all ranks, - so there is no extra communication needed after metadata exchange. - - Args: - sharded_state_dict (ShardedStateDict): state dict to compute the distribution of - parallelization_group (ProcessGroup): distribution will be computed - within this process group - ignore_groups (bool, optional): whether the distribution defines groups. - This option is primarily used during loading, as it ensures that all replicas, - including non-main ones, are loaded by this parallelization group - Defaults to False. - - Returns (ShardDistribution, optional): distribution that can be used to apply the - parallelization. Returns None if the process_group is trivial (1 rank) - - """ - group_size = torch.distributed.get_world_size(group=parallelization_group) - if group_size <= 1: - return - local_shards = list( - sh_base - for sh_base in nested_values(sharded_state_dict) - if isinstance(sh_base, ShardedTensor) - ) - local_shards_no_data = [ten.without_data() for ten in local_shards] - - all_shards = [None] * torch.distributed.get_world_size(group=parallelization_group) - torch.distributed.all_gather_object( - all_shards, local_shards_no_data, group=parallelization_group - ) - - shard_to_ranks = defaultdict(list) - shard_to_size = {} - shard_to_metadata = {} - shards_in_this_parallelization_group: Set[_ShardId] = set() - for rank, rank_shards in enumerate(all_shards): - for sh_ten in rank_shards: - shard_id = _sharded_tensor_shard_id(sh_ten) - shard_to_ranks[shard_id].append(rank) - if shard_id not in shard_to_size: - shard_to_size[shard_id] = _shard_size(sh_ten) - shard_to_metadata[shard_id] = sh_ten - if is_main_replica(sh_ten.replica_id) or ignore_groups: - shards_in_this_parallelization_group.add(shard_id) - - shard_to_ranks = { - k: v for k, v in shard_to_ranks.items() if k in shards_in_this_parallelization_group - } - - shard_to_saving_rank = distribute_shards_to_ranks( - shard_to_ranks, shard_to_size, len(all_shards) - ) - - return ShardDistribution( - shard_to_saving_rank, - shards_in_this_parallelization_group, - shard_to_metadata, - shard_to_ranks, - ) - - -@torch.no_grad() -def exchange_loaded_tensors_gather_rounds( - loaded_tensors: Dict[_ShardId, torch.Tensor], - unloaded_shards: Dict[_ShardId, ShardedTensor], - shard_distribution: ShardDistribution = None, - parallelization_group: Optional[torch.distributed.ProcessGroup] = None, -) -> Dict[_ShardId, torch.Tensor]: - """Exchange the tensors loaded by different ranks with several all_gather calls. - - Groups tensors by dtype, divide tensors that will be exchanged into rounds - and execute all_gather for tensors from each round. - - Note: the loading is distributed across ranks based on total loaded size - in bytes, so there is no guarantee that number of rounds needed for each - rank will be similar, which might result in a lot of almost empty - all_gathers. The solution would be to group all tensors into a one - bytes tensor and do a single all_gather (with similarly sized messages). - - Args: - loaded_tensors (Dict[_ShardId, torch.Tensor]): mapping from ShardedTensor - shard ids to tensors already loaded by this rank. - unloaded_shards (Dict[_ShardId, torch.Tensor]): mapping from ShardedTensor - shard ids to ShardedTensors that aren't loaded yet. - shard_distribution (ShardDistribution): distribution of all shards - parallelization_group (ProcessGroup, optional): process group used for load - distribution. Tensors will be exchanged within this group - - Returns: - Dict[_ShardId, torch.Tensor]: dictionary mapping shard ids to tensors - needed by this rank to load a given state dict. Includes - previously loaded tensors (from `loaded_tensors` input) - """ - main_rank_for_shard, _, shard_to_metadata, all_ranks_for_shard = shard_distribution - local_rank = torch.distributed.get_rank(group=parallelization_group) - - all_loaded_tensors = dict(loaded_tensors) - - # Group by dtype so that we all_gather tensors of the same dtype - for dtype in sorted(set(map(lambda sh_ten: sh_ten.dtype, shard_to_metadata.values())), key=str): - - start = time() - # shards_by_rank maps rank to tensors loaded by this rank - shards_by_rank: List[List[torch.Tensor]] = [ - [] for _ in range(torch.distributed.get_world_size(group=parallelization_group)) - ] - for shard_id, rank in main_rank_for_shard.items(): - if len(all_ranks_for_shard[shard_id]) == 1: - assert all_ranks_for_shard[shard_id][0] == main_rank_for_shard[shard_id], ( - f'When there is only 1 ranks that needs a given shard,' - f' it should be the loading rank.' - f' Got: needs [{all_ranks_for_shard[shard_id][0]}]' - f' vs loads [{main_rank_for_shard[shard_id]}]' - ) - # Skipping the exchange since only the loading rank needs this tensor - # TODO: we can employ some optimizations even for `len(shard_to_ranks) > 1` - # case, e.g. P2P exchange. Currently handling this case saves most of the - # work though. - continue - if shard_to_metadata[shard_id].dtype == dtype: - shards_by_rank[rank].append(shard_id) - - # Transpose `shards_by_rank` to form exchange rounds - shards_by_round = zip_longest(*shards_by_rank, fillvalue=None) - for round_idx, round_shard_ids in enumerate(shards_by_round): - round_tensors = [] - orig_devices = {} - for rank, shard_id in enumerate(round_shard_ids): - if shard_id is None: - # if no more useful data, the given rank will exchange empty tensor - local_ten = torch.empty(0, dtype=dtype, device='cuda') - orig_device = None - else: - assert isinstance(shard_id, tuple), type(shard_id) - if rank == local_rank: - assert shard_id in all_loaded_tensors, (shard_id, all_loaded_tensors.keys()) - orig_device = all_loaded_tensors[shard_id] - all_loaded_tensors[shard_id] = all_loaded_tensors[shard_id].cuda() - local_ten = all_loaded_tensors[shard_id] - else: - local_ten, orig_device = _get_empty_tensor_for_exchange( - shard_id, unloaded_shards, shard_to_metadata, all_loaded_tensors - ) - # Because of a TE bug, we have to exchange a nominal dtype instead of FP8 - # It's ok to keep the nominal dtype after exchange, because TE will handle - # this during state dict load. - # TODO: remove it once the bug is fixed - if is_float8tensor(local_ten): - local_ten = local_ten.from_float8() - all_loaded_tensors[shard_id] = local_ten - - round_tensors.append(local_ten) - if orig_device is not None: - orig_devices[shard_id] = orig_device - - torch.distributed.all_gather( - list(round_tensors), - round_tensors[local_rank], - group=parallelization_group, - async_op=False, - ) - - # Move tensors back to CPU if originally was on CPU - for shard_id, orig_device in orig_devices.items(): - all_loaded_tensors[shard_id] = all_loaded_tensors[shard_id].to(orig_device) - - del round_tensors # remove tensor references - - end = time() - if torch.distributed.get_rank() == 0: - logger.debug(f'{dtype} exchange rounds all_gather schedule took {end - start}s') - - return all_loaded_tensors - - -def exchange_loaded_tensors_gather_object( - loaded_tensors: Dict[_ShardId, torch.Tensor], - unloaded_shards: Dict[_ShardId, ShardedTensor], - shard_distribution: ShardDistribution, - parallelization_group: Optional[torch.distributed.ProcessGroup] = None, -) -> Dict[_ShardId, torch.Tensor]: - """Exchange the tensors loaded by different ranks with a simple all_gather_object call. - - This version can be used for debugging purposes do to its simplistic - implementation. Shouldn't be used if performance is important. - - Args: - loaded_tensors (Dict[_ShardId, torch.Tensor]): mapping from ShardedTensor - shard ids to tensors already loaded by this rank. - unloaded_shards (Dict[_ShardId, torch.Tensor]): mapping from ShardedTensor - shard ids to ShardedTensors that aren't loaded yet. - shard_distribution (ShardDistribution): distribution of all shards - parallelization_group (ProcessGroup, optional): process group used for load - distribution. Tensors will be exchanged within this group - - Returns: - Dict[_ShardId, torch.Tensor]: dictionary mapping shard ids to tensors - needed by this rank to load a given state dict. Includes - previously loaded tensors (from `loaded_tensors` input) - - """ - all_loaded_tensors_list = [None] * torch.distributed.get_world_size(group=parallelization_group) - torch.distributed.all_gather_object( - all_loaded_tensors_list, loaded_tensors, group=parallelization_group - ) - all_loaded_tensors_list = cast(List[Dict[_ShardId, torch.Tensor]], all_loaded_tensors_list) - all_loaded_tensors = reduce(lambda x, y: {**x, **y}, all_loaded_tensors_list) - - # Error checks - if len(all_loaded_tensors) != sum(map(len, all_loaded_tensors_list)): - err_msg = 'Duplicate shard ids loaded by different ranks' - if torch.distributed.get_rank() == 0: - logger.error( - f'{err_msg}. Shards ids by rank:' - f' {[lt.keys() for lt in all_loaded_tensors_list]}' - ) - raise CheckpointingException(err_msg) - - return all_loaded_tensors - - -@torch.no_grad() -def exchange_loaded_tensors_broadcast( - loaded_tensors: Dict[_ShardId, torch.Tensor], - unloaded_shards: Dict[_ShardId, ShardedTensor], - shard_distribution: ShardDistribution, - parallelization_group: Optional[torch.distributed.ProcessGroup] = None, -) -> Dict[_ShardId, torch.Tensor]: - """Exchange the tensors loaded by different ranks by a series of broadcasts. - - For each rank for each loaded tensor do a broadcast to the whole group. - A reasonable tradeoff in terms of performance and simplicity. - - Args: - loaded_tensors (Dict[_ShardId, torch.Tensor]): mapping from ShardedTensor - shard ids to tensors already loaded by this rank. - unloaded_shards (Dict[_ShardId, ShardedTensor]): mapping from ShardedTensor - shard ids to ShardedTensors that aren't loaded yet. - shard_distribution (ShardDistribution): distribution of all shards - parallelization_group (ProcessGroup, optional): process group used for load - distribution. Tensors will be exchanged within this group - - Returns: - Dict[_ShardId, torch.Tensor]: dictionary mapping shard ids to tensors - needed by this rank to load a given state dict. Includes - previously loaded tensors (from `loaded_tensors` input) - """ - main_rank_for_shard, _, shard_to_metadata, all_ranks_for_shard = shard_distribution - local_rank = torch.distributed.get_rank(group=parallelization_group) - - all_loaded_tensors = dict(loaded_tensors) - - start = time() - - for idx, (shard_id, rank) in enumerate(main_rank_for_shard.items()): - if len(all_ranks_for_shard[shard_id]) == 1: - assert all_ranks_for_shard[shard_id][0] == main_rank_for_shard[shard_id], ( - f'When there is only 1 ranks that needs a given shard,' - f' it should be the loading rank.' - f'Got: needs [{all_ranks_for_shard[shard_id][0]}]' - f' vs loads [{main_rank_for_shard[shard_id]}]' - ) - # Skipping the exchange since only the loading rank needs this tensor - # TODO: we can employ some optimizations even for `len(shard_to_ranks) > 1` case, - # e.g. P2P exchange. Currently handling this case saves most of the work though. - continue - if rank == local_rank: - assert shard_id in all_loaded_tensors, (shard_id, all_loaded_tensors.keys()) - orig_device = all_loaded_tensors[shard_id].device - local_ten = all_loaded_tensors[shard_id].cuda() - else: - local_ten, orig_device = _get_empty_tensor_for_exchange( - shard_id, unloaded_shards, shard_to_metadata, all_loaded_tensors - ) - - # Because of a TE bug, we have to exchange a nominal dtype instead of FP8 - # It's ok to keep the nominal dtype after exchange, because TE will handle - # this during state dict load. - # TODO: remove it once the bug is fixed - if is_float8tensor(local_ten): - local_ten = local_ten.from_float8() - all_loaded_tensors[shard_id] = local_ten - - global_src_rank = ( - rank - if parallelization_group == None - else torch.distributed.get_global_rank(parallelization_group, rank) - ) - # We can do async_op=True only if there is no CPU-copy follow-up - torch.distributed.broadcast( - local_ten, - src=global_src_rank, - group=parallelization_group, - async_op=orig_device is None, - ) - # Move tensor back to CPU if originally was on CPU - if orig_device is not None: - all_loaded_tensors[shard_id] = local_ten.to(orig_device) - del local_ten - - end = time() - if torch.distributed.get_rank() == 0: - logger.debug(f'exchange broadcast schedule took {end - start}s') - - return all_loaded_tensors - - -def exchange_by_distribution( - loaded_tensors: Dict[_ShardId, torch.Tensor], - unloaded_shards: Dict[_ShardId, ShardedTensor], - shard_distribution: ShardDistribution = None, - parallelization_group: Optional[torch.distributed.ProcessGroup] = None, - exchange_algo='broadcast', -) -> Dict[_ShardId, torch.Tensor]: - """Exchange tensors loaded by different ranks using the specified exchange_algo. - - Args: - loaded_tensors (Dict[_ShardId, torch.Tensor]): mapping from ShardedTensor - shard ids to tensors already loaded by this rank. - unloaded_shards (Dict[_ShardId, ShardedTensor]): mapping from ShardedTensor - shard ids to ShardedTensors that aren't loaded yet. - shard_distribution (ShardDistribution): distribution of all shards - parallelization_group (ProcessGroup, optional): process group used for load - distribution. Tensors will be exchanged within this group - exchange_algo (str): The algorithm used for performing exchanges. - Defaults to 'broadcast'. - - Returns: - Dict[_ShardId, torch.Tensor]: dictionary mapping shard ids to tensors - needed by this rank to load a given state dict. Includes - previously loaded tensors (from `loaded_tensors` input) - """ - - if exchange_algo == 'gather_object': - exchange_fn = exchange_loaded_tensors_gather_object - elif exchange_algo == 'gather_rounds': - exchange_fn = exchange_loaded_tensors_gather_rounds - elif exchange_algo == 'broadcast': - exchange_fn = exchange_loaded_tensors_broadcast - else: - raise NotImplementedError(f'Unrecognized gather algorithm: {exchange_algo}') - return exchange_fn(loaded_tensors, unloaded_shards, shard_distribution, parallelization_group) +# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. + +"""Utilities for exchanging data between ranks.""" + +import logging +from collections import defaultdict +from functools import reduce +from itertools import zip_longest +from typing import Any, Dict, List, NamedTuple, Optional, Set, Tuple, TypeVar, cast + +import numpy as np +import torch + +from .core import CheckpointingException +from .dict_utils import nested_values +from .mapping import ShardedStateDict, ShardedTensor, is_main_replica +from .utils import _sharded_tensor_shard_id, _ShardId, debug_time + +# TODO: remove TE references once the TE bug is fixed +# Check if Transformer Engine has Float8Tensor class +HAVE_TE_FLOAT8TENSOR = False +try: + from transformer_engine.pytorch.float8_tensor import Float8Tensor + + HAVE_TE_FLOAT8TENSOR = True +except (ImportError, ModuleNotFoundError): + # Float8Tensor not found + pass + + +def is_float8tensor(tensor: torch.Tensor) -> bool: + """Check if a tensor is a Transformer Engine Float8Tensor""" + return HAVE_TE_FLOAT8TENSOR and isinstance(tensor, Float8Tensor) + + +logger = logging.getLogger(__name__) + + +class ShardDistribution(NamedTuple): + """Represents a distribution of ShardedTensors. + + Given distribution is valid only for a specific parallelization group, + which is implicit here (not referenced by this class). + + Args: + main_rank_for_shard (Dict[_ShardId, int]): specifies which rank should hold + the main replica for a given shard + shards_in_this_group (Set[_ShardId]): which shards have a main replica + in this parallelization group + shard_to_metadata (Dict[_ShardId, ShardedTensor]): maps ShardedTensor + identifier to the original ShardedTensor + all_ranks_for_shard (Dict[_ShardId, List[int]]): specifies which ranks + need a given shard in a given parallelization group + """ + + main_rank_for_shard: Dict[_ShardId, int] + shards_in_this_group: Set[_ShardId] + shard_to_metadata: Dict[_ShardId, ShardedTensor] + all_ranks_for_shard: Dict[_ShardId, List[int]] + + +def _shard_size(sh_ten: ShardedTensor): + """Returns size in bytes of a given sharded tensor.""" + if sh_ten.flattened_range is None: + numel = np.product(sh_ten.local_shape) + else: + numel = sh_ten.flattened_range.stop - sh_ten.flattened_range.start + return numel * torch._utils._element_size(sh_ten.dtype) + + +def _get_empty_tensor_for_exchange( + shard_id: _ShardId, + needed_shards: Dict[_ShardId, ShardedTensor], + unneeded_shards: Dict[_ShardId, ShardedTensor], + loaded_tensors: Dict[_ShardId, torch.Tensor], +) -> Tuple[torch.Tensor, Optional[torch.device]]: + """Determines the empty tensor to use for exchange. + + If shard_id is needed by this rank, it will be in the `unloaded_shards`. + Otherwise, the metadata for this tensor can be found in `shard_to_metadata` + + Args: + shard_id (_ShardId): shard_id that will be exchanged + needed_shards (Dict[_ShardId, ShardedTensor]): mapping from shard ids + to metadata for shards needed by this rank + unneeded_shards (Dict[_ShardId, ShardedTensor]): mapping from shard ids + to metadata for shards that can be discarded after exchange + loaded_tensors (Dict[_ShardId, torch.Tensor]): mapping where useful tensors + are placed in + + Returns: + Tuple[torch.Tensor, Optional[torch.device]]: empty CUDA tensor to be exchanged, + and the device of the original state dict tensor (if there was any) + """ + local_unloaded_sh_ten = needed_shards.get(shard_id) + if local_unloaded_sh_ten is None: + orig_device = None # this tensor will be discarded anyway + sh_ten = unneeded_shards[shard_id] + if sh_ten.data is None: + sh_ten.init_data('cuda') + tensor = sh_ten.data + sh_ten.data = None # won't be used. free memory + else: + tensor = sh_ten.data + if tensor.device.type == 'cpu': + tensor = torch.empty_like(tensor, device='cuda') + else: + local_unloaded_sh_ten.init_data('cuda') + orig_device = local_unloaded_sh_ten.data.device + tensor = local_unloaded_sh_ten.data + if tensor.device.type == 'cpu': + tensor = torch.empty_like(tensor, device='cuda') + loaded_tensors[shard_id] = tensor + return tensor, orig_device + + +T = TypeVar('T') + + +def distribute_shards_to_ranks( + shard_to_ranks: Dict[T, List[int]], shard_to_size: Dict[T, int], num_ranks: int +) -> Dict[T, int]: + """Computes uniform distribution of workload across ranks, based on sizes. + + Currently, the assignment is greedy, based on: + 1. Firstly, the coverage of each shard + (how many ranks the shard is available on; lower coverage is assigned first) + 2. Secondly, the size of each shard (larger size is assigned first) + 3. Finally, shard id for differentiation. + + Third step is added because we rely on the fact that + the assignment is deterministic on all ranks. + + Args: + shard_to_ranks (Dict[T, List[int]]): mapping of rank access to shards + shard_to_size (Dict[T, int]): sizes of each shard + num_ranks (int): number of ranks in the parallelization group + + Returns (Dict[T, int]): assignment of shard to rank (which rank should do the work + to achieve maximal uniformity) + """ + shard_to_ranks = {k: tuple(v) for k, v in shard_to_ranks.items()} + shard_to_saving_rank = {} + rank_sizes = [(0, rank) for rank in range(num_ranks)] + + # start from tensors of lowest coverage, then go by tensor size from largest (hence minus size) + for shard_id, shard_ranks in sorted( + shard_to_ranks.items(), + key=lambda sh_id_ranks: ( + len(sh_id_ranks[1]), + -shard_to_size[sh_id_ranks[0]], + sh_id_ranks[0], + ), + ): + # assign greedily to the least occupied rank + size, rank = min((size, rank) for size, rank in rank_sizes if rank in shard_ranks) + + shard_to_saving_rank[shard_id] = rank + rank_sizes[rank] = (size + shard_to_size[shard_id], rank) + + logger.debug(f'distribute_shards_to_ranks distribution: {rank_sizes}') + + return shard_to_saving_rank + + +def determine_main_replica_uniform_distribution( + sharded_state_dict: ShardedStateDict, + parallelization_group: torch.distributed.ProcessGroup, + ignore_groups: bool = False, +) -> Optional[ShardDistribution]: + """Computes the save distribution. + + Should be used in conjunction with `distribute_main_replicas_with_precomputed_distribution` + which applies the computed save distribution. + + We rely on the fact that the assignment algorithm is deterministic on all ranks, + so there is no extra communication needed after metadata exchange. + + Args: + sharded_state_dict (ShardedStateDict): state dict to compute the distribution of + parallelization_group (ProcessGroup): distribution will be computed + within this process group + ignore_groups (bool, optional): whether the distribution defines groups. + This option is primarily used during loading, as it ensures that all replicas, + including non-main ones, are loaded by this parallelization group + Defaults to False. + + Returns (ShardDistribution, optional): distribution that can be used to apply the + parallelization. Returns None if the process_group is trivial (1 rank) + + """ + group_size = torch.distributed.get_world_size(group=parallelization_group) + if group_size <= 1: + return + local_shards = list( + sh_base + for sh_base in nested_values(sharded_state_dict) + if isinstance(sh_base, ShardedTensor) + ) + local_shards_no_data = [ten.without_data() for ten in local_shards] + + all_shards = [None] * torch.distributed.get_world_size(group=parallelization_group) + torch.distributed.all_gather_object( + all_shards, local_shards_no_data, group=parallelization_group + ) + + shard_to_ranks = defaultdict(list) + shard_to_size = {} + shard_to_metadata = {} + shards_in_this_parallelization_group: Set[_ShardId] = set() + for rank, rank_shards in enumerate(all_shards): + for sh_ten in rank_shards: + shard_id = _sharded_tensor_shard_id(sh_ten) + shard_to_ranks[shard_id].append(rank) + if shard_id not in shard_to_size: + shard_to_size[shard_id] = _shard_size(sh_ten) + shard_to_metadata[shard_id] = sh_ten + if is_main_replica(sh_ten.replica_id) or ignore_groups: + shards_in_this_parallelization_group.add(shard_id) + + shard_to_ranks = { + k: v for k, v in shard_to_ranks.items() if k in shards_in_this_parallelization_group + } + + shard_to_saving_rank = distribute_shards_to_ranks( + shard_to_ranks, shard_to_size, len(all_shards) + ) + + return ShardDistribution( + shard_to_saving_rank, + shards_in_this_parallelization_group, + shard_to_metadata, + shard_to_ranks, + ) + + +@torch.no_grad() +@debug_time(f"exchange_loaded_tensors_gather_rounds", logger) +def exchange_loaded_tensors_gather_rounds( + loaded_tensors: Dict[_ShardId, torch.Tensor], + unloaded_shards: Dict[_ShardId, ShardedTensor], + shard_distribution: ShardDistribution = None, + parallelization_group: Optional[torch.distributed.ProcessGroup] = None, +) -> Dict[_ShardId, torch.Tensor]: + """Exchange the tensors loaded by different ranks with several all_gather calls. + + Groups tensors by dtype, divide tensors that will be exchanged into rounds + and execute all_gather for tensors from each round. + + Note: the loading is distributed across ranks based on total loaded size + in bytes, so there is no guarantee that number of rounds needed for each + rank will be similar, which might result in a lot of almost empty + all_gathers. The solution would be to group all tensors into a one + bytes tensor and do a single all_gather (with similarly sized messages). + + Args: + loaded_tensors (Dict[_ShardId, torch.Tensor]): mapping from ShardedTensor + shard ids to tensors already loaded by this rank. + unloaded_shards (Dict[_ShardId, torch.Tensor]): mapping from ShardedTensor + shard ids to ShardedTensors that aren't loaded yet. + shard_distribution (ShardDistribution): distribution of all shards + parallelization_group (ProcessGroup, optional): process group used for load + distribution. Tensors will be exchanged within this group + + Returns: + Dict[_ShardId, torch.Tensor]: dictionary mapping shard ids to tensors + needed by this rank to load a given state dict. Includes + previously loaded tensors (from `loaded_tensors` input) + """ + main_rank_for_shard, _, shard_to_metadata, all_ranks_for_shard = shard_distribution + local_rank = torch.distributed.get_rank(group=parallelization_group) + + all_loaded_tensors = dict(loaded_tensors) + + # Group by dtype so that we all_gather tensors of the same dtype + for dtype in sorted(set(map(lambda sh_ten: sh_ten.dtype, shard_to_metadata.values())), key=str): + + with debug_time(f"dtype_{dtype}"): + # shards_by_rank maps rank to tensors loaded by this rank + shards_by_rank: List[List[torch.Tensor]] = [ + [] for _ in range(torch.distributed.get_world_size(group=parallelization_group)) + ] + for shard_id, rank in main_rank_for_shard.items(): + if len(all_ranks_for_shard[shard_id]) == 1: + assert all_ranks_for_shard[shard_id][0] == main_rank_for_shard[shard_id], ( + f'When there is only 1 ranks that needs a given shard,' + f' it should be the loading rank.' + f' Got: needs [{all_ranks_for_shard[shard_id][0]}]' + f' vs loads [{main_rank_for_shard[shard_id]}]' + ) + # Skipping the exchange since only the loading rank needs this tensor + # TODO: we can employ some optimizations even for `len(shard_to_ranks) > 1` + # case, e.g. P2P exchange. Currently handling this case saves most of the + # work though. + continue + if shard_to_metadata[shard_id].dtype == dtype: + shards_by_rank[rank].append(shard_id) + + # Transpose `shards_by_rank` to form exchange rounds + shards_by_round = zip_longest(*shards_by_rank, fillvalue=None) + for round_idx, round_shard_ids in enumerate(shards_by_round): + round_tensors = [] + orig_devices = {} + for rank, shard_id in enumerate(round_shard_ids): + if shard_id is None: + # if no more useful data, the given rank will exchange empty tensor + local_ten = torch.empty(0, dtype=dtype, device='cuda') + orig_device = None + else: + assert isinstance(shard_id, tuple), type(shard_id) + if rank == local_rank: + assert shard_id in all_loaded_tensors, ( + shard_id, + all_loaded_tensors.keys(), + ) + orig_device = all_loaded_tensors[shard_id] + all_loaded_tensors[shard_id] = all_loaded_tensors[shard_id].cuda() + local_ten = all_loaded_tensors[shard_id] + else: + local_ten, orig_device = _get_empty_tensor_for_exchange( + shard_id, unloaded_shards, shard_to_metadata, all_loaded_tensors + ) + # Because of a TE bug, we have to exchange a nominal dtype instead of FP8 + # It's ok to keep the nominal dtype after exchange, because TE will handle + # this during state dict load. + # TODO: remove it once the bug is fixed + if is_float8tensor(local_ten): + local_ten = local_ten.from_float8() + all_loaded_tensors[shard_id] = local_ten + + round_tensors.append(local_ten) + if orig_device is not None: + orig_devices[shard_id] = orig_device + + torch.distributed.all_gather( + list(round_tensors), + round_tensors[local_rank], + group=parallelization_group, + async_op=False, + ) + + # Move tensors back to CPU if originally was on CPU + for shard_id, orig_device in orig_devices.items(): + all_loaded_tensors[shard_id] = all_loaded_tensors[shard_id].to(orig_device) + + del round_tensors # remove tensor references + + return all_loaded_tensors + + +def exchange_loaded_tensors_gather_object( + loaded_tensors: Dict[_ShardId, torch.Tensor], + unloaded_shards: Dict[_ShardId, ShardedTensor], + shard_distribution: ShardDistribution, + parallelization_group: Optional[torch.distributed.ProcessGroup] = None, +) -> Dict[_ShardId, torch.Tensor]: + """Exchange the tensors loaded by different ranks with a simple all_gather_object call. + + This version can be used for debugging purposes do to its simplistic + implementation. Shouldn't be used if performance is important. + + Args: + loaded_tensors (Dict[_ShardId, torch.Tensor]): mapping from ShardedTensor + shard ids to tensors already loaded by this rank. + unloaded_shards (Dict[_ShardId, torch.Tensor]): mapping from ShardedTensor + shard ids to ShardedTensors that aren't loaded yet. + shard_distribution (ShardDistribution): distribution of all shards + parallelization_group (ProcessGroup, optional): process group used for load + distribution. Tensors will be exchanged within this group + + Returns: + Dict[_ShardId, torch.Tensor]: dictionary mapping shard ids to tensors + needed by this rank to load a given state dict. Includes + previously loaded tensors (from `loaded_tensors` input) + + """ + all_loaded_tensors_list = [None] * torch.distributed.get_world_size(group=parallelization_group) + torch.distributed.all_gather_object( + all_loaded_tensors_list, loaded_tensors, group=parallelization_group + ) + all_loaded_tensors_list = cast(List[Dict[_ShardId, torch.Tensor]], all_loaded_tensors_list) + all_loaded_tensors = reduce(lambda x, y: {**x, **y}, all_loaded_tensors_list) + + # Error checks + if len(all_loaded_tensors) != sum(map(len, all_loaded_tensors_list)): + err_msg = 'Duplicate shard ids loaded by different ranks' + if torch.distributed.get_rank() == 0: + logger.error( + f'{err_msg}. Shards ids by rank:' + f' {[lt.keys() for lt in all_loaded_tensors_list]}' + ) + raise CheckpointingException(err_msg) + + return all_loaded_tensors + + +def exchange_loaded_objects_gather_object( + loaded_objects: Dict[_ShardId, Any] +) -> Dict[_ShardId, Any]: + """Exchange the objects loaded by different ranks with a simple all_gather_object call. + + Args: + loaded_objects (Dict[_ShardId, Any]): mapping from shard ids to objects + already loaded by this rank. + + Returns: + Dict[_ShardId, Any]: dictionary mapping shard ids to objects needed by this rank to + load a given state dict. + """ + all_loaded_objects_list = [None] * torch.distributed.get_world_size(group=None) + torch.distributed.all_gather_object(all_loaded_objects_list, loaded_objects, group=None) + all_loaded_objects_list = cast(List[Dict[_ShardId, Any]], all_loaded_objects_list) + all_loaded_objects = reduce(lambda x, y: {**x, **y}, all_loaded_objects_list) + + # Error checks + if len(all_loaded_objects) != sum(map(len, all_loaded_objects_list)): + err_msg = 'Duplicate shard ids loaded by different ranks' + if torch.distributed.get_rank() == 0: + logger.error( + f'{err_msg}. Shards ids by rank:' + f' {[lt.keys() for lt in all_loaded_objects_list]}' + ) + raise CheckpointingException(err_msg) + + return all_loaded_objects + + +@torch.no_grad() +@debug_time("exchange_loaded_tensors_broadcast", logger) +def exchange_loaded_tensors_broadcast( + loaded_tensors: Dict[_ShardId, torch.Tensor], + unloaded_shards: Dict[_ShardId, ShardedTensor], + shard_distribution: ShardDistribution, + parallelization_group: Optional[torch.distributed.ProcessGroup] = None, +) -> Dict[_ShardId, torch.Tensor]: + """Exchange the tensors loaded by different ranks by a series of broadcasts. + + For each rank for each loaded tensor do a broadcast to the whole group. + A reasonable tradeoff in terms of performance and simplicity. + + Args: + loaded_tensors (Dict[_ShardId, torch.Tensor]): mapping from ShardedTensor + shard ids to tensors already loaded by this rank. + unloaded_shards (Dict[_ShardId, ShardedTensor]): mapping from ShardedTensor + shard ids to ShardedTensors that aren't loaded yet. + shard_distribution (ShardDistribution): distribution of all shards + parallelization_group (ProcessGroup, optional): process group used for load + distribution. Tensors will be exchanged within this group + + Returns: + Dict[_ShardId, torch.Tensor]: dictionary mapping shard ids to tensors + needed by this rank to load a given state dict. Includes + previously loaded tensors (from `loaded_tensors` input) + """ + main_rank_for_shard, _, shard_to_metadata, all_ranks_for_shard = shard_distribution + local_rank = torch.distributed.get_rank(group=parallelization_group) + + all_loaded_tensors = dict(loaded_tensors) + + for idx, (shard_id, rank) in enumerate(main_rank_for_shard.items()): + if len(all_ranks_for_shard[shard_id]) == 1: + assert all_ranks_for_shard[shard_id][0] == main_rank_for_shard[shard_id], ( + f'When there is only 1 ranks that needs a given shard,' + f' it should be the loading rank.' + f'Got: needs [{all_ranks_for_shard[shard_id][0]}]' + f' vs loads [{main_rank_for_shard[shard_id]}]' + ) + # Skipping the exchange since only the loading rank needs this tensor + # TODO: we can employ some optimizations even for `len(shard_to_ranks) > 1` case, + # e.g. P2P exchange. Currently handling this case saves most of the work though. + continue + if rank == local_rank: + assert shard_id in all_loaded_tensors, (shard_id, all_loaded_tensors.keys()) + orig_device = all_loaded_tensors[shard_id].device + local_ten = all_loaded_tensors[shard_id].cuda() + else: + local_ten, orig_device = _get_empty_tensor_for_exchange( + shard_id, unloaded_shards, shard_to_metadata, all_loaded_tensors + ) + + # Because of a TE bug, we have to exchange a nominal dtype instead of FP8 + # It's ok to keep the nominal dtype after exchange, because TE will handle + # this during state dict load. + # TODO: remove it once the bug is fixed + if is_float8tensor(local_ten): + local_ten = local_ten.from_float8() + all_loaded_tensors[shard_id] = local_ten + + global_src_rank = ( + rank + if parallelization_group == None + else torch.distributed.get_global_rank(parallelization_group, rank) + ) + # We can do async_op=True only if there is no CPU-copy follow-up + torch.distributed.broadcast( + local_ten, + src=global_src_rank, + group=parallelization_group, + async_op=orig_device is None, + ) + # Move tensor back to CPU if originally was on CPU + if orig_device is not None: + all_loaded_tensors[shard_id] = local_ten.to(orig_device) + del local_ten + + return all_loaded_tensors + + +def exchange_by_distribution( + loaded_tensors: Dict[_ShardId, torch.Tensor], + unloaded_shards: Dict[_ShardId, ShardedTensor], + shard_distribution: ShardDistribution, + parallelization_group: Optional[torch.distributed.ProcessGroup] = None, + exchange_algo='broadcast', +) -> Dict[_ShardId, torch.Tensor]: + """Exchange tensors loaded by different ranks using the specified exchange_algo. + + Args: + loaded_tensors (Dict[_ShardId, torch.Tensor]): mapping from ShardedTensor + shard ids to tensors already loaded by this rank. + unloaded_shards (Dict[_ShardId, ShardedTensor]): mapping from ShardedTensor + shard ids to ShardedTensors that aren't loaded yet. + shard_distribution (ShardDistribution): distribution of all shards + parallelization_group (ProcessGroup, optional): process group used for load + distribution. Tensors will be exchanged within this group + exchange_algo (str): The algorithm used for performing exchanges. + Defaults to 'broadcast'. + + Returns: + Dict[_ShardId, torch.Tensor]: dictionary mapping shard ids to tensors + needed by this rank to load a given state dict. Includes + previously loaded tensors (from `loaded_tensors` input) + """ + + assert shard_distribution is not None, 'Expecting distribution to perform exchange' + if exchange_algo == 'gather_object': + exchange_fn = exchange_loaded_tensors_gather_object + elif exchange_algo == 'gather_rounds': + exchange_fn = exchange_loaded_tensors_gather_rounds + elif exchange_algo == 'broadcast': + exchange_fn = exchange_loaded_tensors_broadcast + else: + raise NotImplementedError(f'Unrecognized gather algorithm: {exchange_algo}') + return exchange_fn(loaded_tensors, unloaded_shards, shard_distribution, parallelization_group) diff --git a/megatron/core/dist_checkpointing/mapping.py b/megatron/core/dist_checkpointing/mapping.py index d376c63..6bf3532 100644 --- a/megatron/core/dist_checkpointing/mapping.py +++ b/megatron/core/dist_checkpointing/mapping.py @@ -1,723 +1,725 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. - -""" Core library classes for representing sharding of tensors and objects. - -The main expected usage is wrapping torch.Tensors in state dicts with -ShardedTensor class (mostly with the ShardedTensor.from_rank_offsets classmethod). -""" - -import logging -from abc import ABC, abstractmethod -from dataclasses import dataclass, field, replace -from itertools import chain -from typing import Any, Callable, Dict, List, Optional, Tuple, Union - -import numpy as np -import torch - -from .core import CheckpointingException -from .dict_utils import dict_list_map_inplace - -logger = logging.getLogger(__name__) - -# These type definitions are just hints to differentiate a plain model state -# dict (StateDict) from a state dict with tensors replaced with ShardedTensors -# (ShardedStateDict). -StateDict = Dict[str, Any] -CommonStateDict = Dict[str, Any] -ShardedStateDict = Dict[str, Any] -ReplicaId = Union[int, Tuple[int, ...]] - - -class ShardedBase(ABC): - """Base class for ShardedTensor and ShardedStateDict.""" - - key: str - data: object - replica_id: ReplicaId - - @abstractmethod - def validate_metadata_integrity(self): - """Codifies the constraints on metadata attributes.""" - - @abstractmethod - def without_data(self) -> 'ShardedBase': - """Returns a new ShardedBase instance with data=None.""" - raise NotImplementedError - - -@dataclass -class ShardedTensor(ShardedBase): - """Represents a mapping between a local tensor and a global tensor. - - Global tensor is assumed to consist of many local tensors distributed - between different processes. - - Args: - key: unique identifier of a global tensor - data: local tensor data. Can be None only for consistency validation - dtype: tensor dtype - local_shape: local tensor shape - global_shape: global tensor shape - global_offset: offset of a local tensor in a global tensor, - specified in number of tensor elements - axis_fragmentations: global tensor fragmentation of each axis - replica_id: indicates given local tensor's replication wrt. - local tensors in different processes - prepend_axis_num: number of axes prepended to the local tensor to - reflect global tensor shape. The behavior is similar to - unsqueezing the local tensor. - allow_shape_mismatch: if True, during loading, the global shape of - a stored tensor does not have to match the expected global shape. - Useful for representing tensors with flexible shape, - e.g. padded. - flattened_range: specifies a slice that should be applied to a - flattened tensor with `local_shape` in order to get - the tensor stored as `data` - """ - - key: str - data: Optional[torch.Tensor] = field(repr=False) - dtype: torch.dtype - local_shape: Tuple[int, ...] - global_shape: Tuple[int, ...] - global_offset: Tuple[int, ...] - axis_fragmentations: Optional[Tuple[int, ...]] - replica_id: ReplicaId = 0 - prepend_axis_num: int = 0 - allow_shape_mismatch: bool = False - flattened_range: Optional[slice] = None - - def __post_init__(self): - self.validate_metadata_integrity() - - def validate_metadata_integrity(self) -> None: - """Codifies the constraints on metadata attributes. - - Meeting those constraints is guaranteed when instantiating a ShardedTensor - class with `from_rank_offsets` or `from_rank_offsets_flat` constructors. - - Returns: - None - """ - has_flattened_range = self.flattened_range is not None - if self.data is not None: - if self.data.dtype != self.dtype: - raise CheckpointingException( - f'Data dtype should match `dtype` attribute for {self}' - ) - if not has_flattened_range and self.data.shape != self.local_shape: - raise CheckpointingException( - f'Data shape should match `local_shape` attribute for {self}' - ) - if has_flattened_range: - if self.data.ndim != 1: - raise CheckpointingException(f'Data should be 1D for a flattened {self}') - real_data = self.data - try: - self.data = None - self.init_data(device='meta') - if self.data.shape != real_data.shape: - raise CheckpointingException( - f'Data shape {real_data.shape} doesnt match' - f' expected {self.data.shape} for {self}' - ) - finally: - self.data = real_data - - if len(self.global_shape) != len(self.global_offset): - raise CheckpointingException( - f'Global offset dimensions should be equal to global shape dimensions for {self}' - ) - if len(self.local_shape) + self.prepend_axis_num != len(self.global_shape): - raise CheckpointingException( - f'Local shape together with `prepend_axis_num` dimensions should be ' - f'equal to global shape dimensions for {self}' - ) - - for off, sh in zip(self.global_offset[self.prepend_axis_num :], self.local_shape): - if off % sh != 0: - raise CheckpointingException( - f'Global offset ({off}) must be divisible by local shape ({sh}) for {self}.' - ) - - if has_flattened_range and self.flattened_range.step is not None: - raise CheckpointingException( - f'`step` argument in the flattened range of a ShardedTensor is not supported.' - ) - - def global_slice(self) -> Tuple[Union[int, slice], ...]: - """ - Returns a tuple of int and slice objects representing a slice of the - global tensor that this ShardedTensor corresponds to. - """ - assert len(self.global_offset) == len(self.local_shape) + self.prepend_axis_num - return tuple( - chain( - (off for off in self.global_offset[: self.prepend_axis_num]), - ( - slice(off, off + sh) - for off, sh in zip( - self.global_offset[self.prepend_axis_num :], self.local_shape - ) - ), - ) - ) - - def global_coordinates(self) -> Tuple[np.ndarray, ...]: - """ - Returns a tuple of np.ndarrays representing the coordinates of the global tensor - that this ShardedTensor corresponds to. - """ - if self.flattened_range is None: - raise CheckpointingException( - f'`global_coordinates` is undefined for' - f' {self.__class__.__name__} without `flattened_range`' - ) - - local_coords = self.local_coordinates() - assert len(local_coords) + self.prepend_axis_num == len(self.global_offset), ( - len(local_coords), - self, - ) - global_coords = tuple( - c + off - for c, off in zip((0,) * self.prepend_axis_num + local_coords, self.global_offset) - ) - return global_coords - - def local_coordinates(self) -> Tuple[np.ndarray, ...]: - """ - Returns a tuple of np.ndarrays representing the coordinates of the local tensor - that this ShardedTensor corresponds to. - """ - if self.flattened_range is None: - raise CheckpointingException( - f'`local_coordinates` is undefined for' - f' {self.__class__.__name__} without `flattened_range`' - ) - - # TODO: np.unravel_index? - mask = np.zeros(np.product(self.local_shape), dtype=bool) - mask[self.flattened_range] = True - return np.nonzero(mask.reshape(self.local_shape)) - - def local_chunk_offset_in_global(self) -> Tuple[int, ...]: - """Offset of a local chunk in a global array of chunks. - - Returns: - Tuple[int, ...]: the offset of the whole local chunk in a global array of chunks. - """ - assert len(self.global_offset) == len(self.local_shape) + self.prepend_axis_num - chunk_offset = list(self.global_offset[: self.prepend_axis_num]) - for off, sh in zip(self.global_offset[self.prepend_axis_num :], self.local_shape): - assert off % sh == 0, str(self) - chunk_offset.append(off // sh) - return tuple(chunk_offset) - - def max_allowed_chunks(self) -> Tuple[int, ...]: - """ - Returns the maximum allowed chunks for this ShardedTensor. - """ - chunks = [] - for axis_sh, axis_fragm in zip(self.global_shape, self.axis_fragmentations): - if not self.allow_shape_mismatch and axis_sh % axis_fragm != 0: - raise CheckpointingException( - f'Axis shape ({axis_sh}) not divisible by axis fragmentation ({axis_fragm}' - ) - axis_chunk_size = axis_sh // axis_fragm - chunks.append(axis_chunk_size) - return tuple(chunks) - - def without_data(self): - return replace(self, data=None) - - @classmethod - def from_rank_offsets( - cls, - key: str, - data: torch.Tensor, - *rank_offsets: Tuple[int, int, int], - replica_id: ReplicaId = 0, - prepend_axis_num: int = 0, - flattened_range: None = None, - **init_kwargs, - ): - """Allows to construct the ShardedTensor given offset specified in process ranks. - - Args: - key (str): unique key - data (torch.Tensor): local tensor data - rank_offsets (Tuple[int, int, int]): each tuple - (axis, axis_rank_offset, axis_fragm) says that if - global tensor is divided into `axis_fragm` fragment along `axis` - axis, then local tensor data corresponds to the `axis_rank_offset` chunk. - replica_id (ReplicaId): see ShardedTensor - prepend_axis_num (int): see ShardedTensor - flattened_range (None): must be None when using this constructor - init_kwargs: passed to ShardedTensor.__init__ - """ - if flattened_range is not None: - raise ValueError( - 'Cannot instantiate a flat ShardedTensor with `from_rank_offsets` method.' - ' Use `from_rank_offsets_flat` instead' - ) - global_offset = [0] * (data.ndim + prepend_axis_num) - global_shape = ([1] * prepend_axis_num) + list(data.shape) - axis_fragmentations = [1] * (data.ndim + prepend_axis_num) - _seen_axis = set() - for axis, axis_rank_offset, axis_fragm in rank_offsets: - if axis < 0 or axis_rank_offset < 0 or axis_fragm < 1 or axis_rank_offset >= axis_fragm: - raise CheckpointingException(f'Invalid rank offsets: {rank_offsets} for key {key}.') - _seen_axis.add(axis) - - local_axis_shape = 1 if axis < prepend_axis_num else data.shape[axis - prepend_axis_num] - global_shape[axis] = axis_fragm * local_axis_shape - global_offset[axis] = axis_rank_offset * local_axis_shape - axis_fragmentations[axis] = axis_fragm - - return cls( - key, - data, - data.dtype, - tuple(data.shape), - tuple(global_shape), - tuple(global_offset), - tuple(axis_fragmentations), - replica_id, - prepend_axis_num, - flattened_range=flattened_range, - **init_kwargs, - ) - - @classmethod - def from_rank_offsets_flat( - cls, - key: str, - data: torch.Tensor, - non_flat_local_shape: Tuple[int, ...], - *args, - flattened_range: Optional[slice] = None, - **kwargs, - ): - """Allows to construct a *flattened* ShardedTensor given offset specified in process ranks. - - Args: - key (str): - data (torch.Tensor): this should be a flattened data tensor - non_flat_local_shape (Tuple[int, ...]): expected local shape of a non-flat chunk - *args: passed unchanged to the `from_rank_offsets` constructor - flattened_range (slice): see ShardedTensor. Defaults to None, but must be set to - a non-None slice. - **kwargs: - - Returns: - ShardedTensor: constructed ShardedTensor instance - """ - if flattened_range is None: - raise CheckpointingException( - 'Cannot instantiate a non-flat ShardedTensor with `from_rank_offsets_flat` method.' - ' Use `from_rank_offsets` instead' - ) - if data.ndim != 1: - raise CheckpointingException( - f'Flattened ShardedTensor requires 1D data, got shape: {data.shape}' - ) - if flattened_range.stop - flattened_range.start != data.numel(): - raise CheckpointingException( - f'Flattened ShardedTensor data length ({data.numel()}) must meet the ' - f'slice length: {flattened_range.stop - flattened_range.start}' - ) - - non_flat_data_meta = torch.empty(*non_flat_local_shape, dtype=data.dtype, device='meta') - sh_ten = cls.from_rank_offsets(key, non_flat_data_meta, *args, **kwargs) - instance = replace(sh_ten, data=data, flattened_range=flattened_range) - instance.validate_metadata_integrity() - return instance - - def init_data(self, device: Union[str, torch.device], init_fn=torch.empty): - """ - Initialize the tensor data of this ShardedTensor. - - Only called if `data` attribute is None. - - Args: - device (Union[str, torch.device]): device to place the tensor on - init_fn (Callable, optional): function to use to initialize the tensor. - Defaults to `torch.empty`. - """ - if self.data is not None: - return - self.data = init_fn(self.local_shape, dtype=self.dtype, device=device) - if self.flattened_range is not None: - self.data = self.data.flatten()[self.flattened_range.start : self.flattened_range.stop] - - def narrow(self, dim: int, start: int, length: int) -> List['ShardedTensor']: - """This is an analogue of torch.narrow for ShardedTensors. - - Narrowing assumes that we narrow a local tensor on each rank. - This has consequences on local_shape, global_shape, global_offset, etc. - - Args: - dim (int): dimension to narrow. Doesn't include prepended axes. - start (int): start element - length (int): length of the slice - - Returns: - List[ShardedTensor]: narrowed ShardedTensors. For non-flat tensors, - the list will always have 1 element. For flat ShardedTensors the number of - elements varies depending on `dim` and on overlap, because flat - tensors must be contiguous. In particular the list can be empty. - """ - prepended_dim = dim + self.prepend_axis_num - local_length_along_dim = self.local_shape[dim] - - def _update_tuple(x, ind, val): - x = list(x) - x[ind] = val - return tuple(x) - - def _safe_div(x, y): - assert x % y == 0, (x, y) - return x // y - - # Decrease global shape and global offset by `length / local_length_along_dim` - assert ( - self.global_shape[prepended_dim] % local_length_along_dim == 0 - ), f'Only regular grid of local tensors is supported for narrowing, got: {self}' - assert ( - self.global_offset[prepended_dim] % local_length_along_dim == 0 - ), f'Only regular grid of local tensors is supported for narrowing, got: {self}' - global_shape = _update_tuple( - self.global_shape, - prepended_dim, - _safe_div(self.global_shape[prepended_dim] * length, local_length_along_dim), - ) - global_offset = _update_tuple( - self.global_offset, - prepended_dim, - _safe_div(self.global_offset[prepended_dim] * length, local_length_along_dim), - ) - - if self.flattened_range is None: - new_data = self.data.narrow(dim, start, length) - # always a single result tensor - return [ - replace( - self, - data=new_data, - local_shape=new_data.shape, - global_shape=global_shape, - global_offset=global_offset, - ) - ] - else: - if dim != 0: - raise CheckpointingException( - f'Narrowing along the first axis is supported for now only, got dim={dim}' - ) - - # If dim=0, we will always get 0 or 1 resulting tensor. - # If dim>1, in general there can be more result tensors (e.g. max 3 for dim=1) - - # For on original flat ShardedTensor of local shape [3, 4] and - # flattened_range=slice(5, 10), - # the X signs mark the actual (flat) data in `self.data` - # notice 12 (3*4) total "virtual" elements, out of which 5 is actual data. - # flat original: [.....XXXXX..] - - # If we narrow to start=1, length=1 in the original local shape dimensions, - # the overlapping flat slice would be: - # narrow to: [....XXXX....] - # flat overlap: [.....XXX....] - - # Now `data` is flattened and sliced, so we must compute local_shape manually - local_shape = _update_tuple(self.local_shape, dim, length) - other_dims_volume = np.prod( - _update_tuple(local_shape, dim, 1) - ) # 4 in the example above - volume_before_split = other_dims_volume * start # 4 in the example above - volume_of_split = other_dims_volume * length # 4 in the example above - - flat_slice_start_shifted = ( - self.flattened_range.start - volume_before_split - ) # 5 - 4 = 1 in the example above - flat_slice_stop_shifted = ( - self.flattened_range.stop - volume_before_split - ) # 10 - 4 = 6 in the example above - - # Find an intersection of - # (flat_slice_start_shifted, flat_slice_stop_shifted) vs (0, volume_of_split) - - if flat_slice_stop_shifted <= 0 or flat_slice_start_shifted >= volume_of_split: - return [] # no intersection - - # new_flattened_range = slice(1, 4) in the example above - new_flattened_range = slice( - max(flat_slice_start_shifted, 0), min(flat_slice_stop_shifted, volume_of_split) - ) - # Apply the intersection to the flattened data tensor. - # Compute start and slice appropriate length - intersection_slice_start = ( - new_flattened_range.start - flat_slice_start_shifted - ) # 0 in the example above - new_data = self.data[ - intersection_slice_start : intersection_slice_start - + new_flattened_range.stop - - new_flattened_range.start - ] - - return [ - replace( - self, - data=new_data, - local_shape=local_shape, - global_shape=global_shape, - global_offset=global_offset, - flattened_range=new_flattened_range, - ) - ] - - -def is_main_replica(replica_id: ReplicaId): - """Checks if given `replica_id` is considered as main. - - "Main" replica is: - - integer 0 - - or an iterable with all 0 elements - - It is the application responsibility to set correct replicas for sharded tensors. - - Args: - replica_id (Union[int, Tuple[int, ...]]): replica id - - Returns: - (bool): True for a "main" replica - """ - if isinstance(replica_id, int): - return replica_id == 0 - return all(r == 0 for r in replica_id) - - -class LocalNonpersistentObject: - """Object that should not be stored in a checkpoint, but restored locally. - - Wrapping any object inside the state dict with LocalNonpersistentObject - will result in: - - during saving, this object will *not* be stored in the checkpoint - - during loading, a local version of this object will be placed in a state dict - """ - - def __init__(self, obj): - self.obj = obj - - def unwrap(self): - """Returns the original object.""" - return self.obj - - -# TODO: Delete once NeMo fixes typo. -LocalNonpersitentObject = LocalNonpersistentObject - - -@dataclass -class ShardedObject(ShardedBase): - """Represents a mapping between a local object and a global object. - - Global object is assumed to consist of many local objects distributed - between different processes. - - NOTE: Contrary to ShardedTensor, it's impossible to change global object - sharding. Conceptually, ShardedObject is a fully-sharded ShardedTensor - with atomic arbitrary typed elements. - - Args: - key: unique identifier of a global tensor - data: local object data. Can be None only for consistency validation - global_shape: global object shape - global_offset: offset of a local object in a global object, specified in number of shards - replica_id: indicates local object replication wrt. local objects in different processes - """ - - key: str - data: object - global_shape: Tuple[int, ...] - global_offset: Tuple[int, ...] - replica_id: ReplicaId = 0 - - def __post_init__(self): - self.validate_metadata_integrity() - - def validate_metadata_integrity(self): - if len(self.global_shape) != len(self.global_offset): - raise CheckpointingException( - f'Global offset dimensions should be equal to global shape dimensions for {self}' - ) - - def without_data(self): - return replace(self, data=None) - - @property - def unique_key(self): - """returns a unique key for this object""" - return ( - f'{self.key}/shard_' - f'{".".join(map(str, self.global_offset))}_' - f'{".".join(map(str, self.global_shape))}' - ) - - def __str__(self): - return f'{self.__class__.__name__}(key=\'{self.key}\')' - - @classmethod - def empty_from_unique_key(cls, unique_key, replica_id: ReplicaId = 0) -> 'ShardedObject': - """Instantiates a ShardedObject from a unique key. - - Args: - unique_key: a string of the form - /shard__ - replica_id: indicates local object replication wrt. - local objects in different processes - - Returns: - a ShardedObject with data=None - """ - key, shard_key = unique_key.split('/') - shard_str, offset, shape = shard_key.split('_') - assert shard_str == 'shard' - offset = tuple(map(int, offset.split('.'))) - shape = tuple(map(int, shape.split('.'))) - if len(shape) + 1 == len(offset): - # This is a backward-compatible fix. We don't know the last - # element of global shape so set it to -1. - shape += (-1,) - return cls(key, None, shape, offset, replica_id) - - -FactoryBuildFn = Callable[[str, torch.Tensor, ReplicaId, Optional[slice]], ShardedStateDict] -FactoryMergeFn = Callable[[StateDict], torch.Tensor] - - -@dataclass -class ShardedTensorFactory(ShardedBase): - """Allows to apply transformations to tensors before/after serialization. - - The essence of those transformations is that they can be applied to - optimizer states the same way they are applied to the model params. - The ultimate state dict with sharded tensors must depend functionally on - `build_fn` arguments (key, data, replica_id, flattened_range), - which will be provided by the optimizer. - - Builder creates a sub-state-dict out of a tensor before saving, and merger - merges the corresponding state dict after loading. - - Args: - key (str): unique identifier of the factory - data (torch.Tensor): original model parameter that will be further - transformed by this factory - build_fn (callable): function that transforms the original tensor - to a sharded state dict - merge_fn (callable): function that transforms loaded subtree back - into a single tensor (inverse of `build_fn`) - replica_id (ReplicaId): indicates factory replication wrt. - factories in different processes - flattened_range (slice, optional): indicates additional flattening - applied to the ShardedTensors produced by the factory - """ - - key: str - data: torch.Tensor - build_fn: FactoryBuildFn - merge_fn: FactoryMergeFn - replica_id: ReplicaId = 0 - flattened_range: Optional[slice] = None - - def build(self): - """Builds a ShardedStateDict from the original tensor""" - return self.build_fn(self.key, self.data, self.replica_id, self.flattened_range) - - def validate_metadata_integrity(self): - """No reasonable checks can be applied""" - pass - - def without_data(self): - return replace(self, data=None) - - -def apply_factories(sharded_state_dict: ShardedStateDict): - """Turn ShardedTensorFactories into ShardedTensors *in-place*. - - Args: - sharded_state_dict (ShardedStateDict): state dict possibly - containing ShardedTensorFactory objects - - Returns: - None: state dict is modified in place - """ - - def apply(x): - if isinstance(x, ShardedTensorFactory): - x = x.build() - return x - - dict_list_map_inplace(apply, sharded_state_dict) - - -def apply_factory_merges( - x1: StateDict, x2: ShardedStateDict, key: Tuple[str, ...] = () -) -> StateDict: - """Apply merges defined by ShardedTensorFactories *in-place*. - - Args: - x1 (StateDict): state dict loaded from the checkpoint - x2 (ShardedStateDict): subset of `x1` (in terms of dict keys) - with ShardedTensorFactory - as (possibly nested) values that define how to - merge objects from the `x1` state dict - key (Tuple[str, ...]): current key in a recursive call. - Used only for reporting meaningful errors - - Returns: - StateDict: `x1` modified in-place - """ - if isinstance(x2, ShardedTensorFactory): - return x2.merge_fn(x1) - - # There rest is almost the same as the `merge` function from `dict_utils` - if isinstance(x1, dict) and isinstance(x2, dict): - for k, v2 in x2.items(): - if k not in x1: - raise ValueError( - f'Different dict keys encountered in `apply_factory_merges` ' - f'({x1.keys()} vs {x2.keys()})' - ) - else: - x1[k] = apply_factory_merges(x1[k], v2, key=key + (k,)) - elif isinstance(x1, list) and isinstance(x2, list): - if len(x1) != len(x2): - err_msg = ( - f'Cannot merge two lists with different lengths ' - f'({len(x1)} and {len(x2)}, encountered at key {key})' - ) - logger.error(err_msg + f'\nx1: {x1}\nx2: {x2}') - raise ValueError(err_msg) - for i, v2 in enumerate(x2): - x1[i] = apply_factory_merges(x1[i], v2, key=key + (i,)) - elif isinstance(x1, list) and isinstance(x2, dict): - for k, v2 in x2.items(): - if not isinstance(k, int): - raise ValueError( - f'Invalid dict key {k} non-integer type encountered ' - f'in a list-dict merge at level {key}' - ) - if k >= len(x1): - raise ValueError( - f'Dict key {k} out of bound for list of length' - f'{len(x1)} (encountered at level {key})' - ) - x1[k] = apply_factory_merges(x1[k], v2, key=key + (k,)) - else: - raise ValueError( - f'Duplicate non-dict and non-list values encountered: `{x1}` and `{x2} (at key {key})`' - ) - return x1 +# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. + +""" Core library classes for representing sharding of tensors and objects. + +The main expected usage is wrapping torch.Tensors in state dicts with +ShardedTensor class (mostly with the ShardedTensor.from_rank_offsets classmethod). +""" + +import logging +from abc import ABC, abstractmethod +from dataclasses import dataclass, field, replace +from itertools import chain +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +import numpy as np +import torch + +from .core import CheckpointingException +from .dict_utils import dict_list_map_inplace + +logger = logging.getLogger(__name__) + +# These type definitions are just hints to differentiate a plain model state +# dict (StateDict) from a state dict with tensors replaced with ShardedTensors +# (ShardedStateDict). +StateDict = Dict[str, Any] +CommonStateDict = Dict[str, Any] +ShardedStateDict = Dict[str, Any] +ReplicaId = Union[int, Tuple[int, ...]] + + +class ShardedBase(ABC): + """Base class for ShardedTensor and ShardedStateDict.""" + + key: str + data: object + replica_id: ReplicaId + + @abstractmethod + def validate_metadata_integrity(self): + """Codifies the constraints on metadata attributes.""" + + @abstractmethod + def without_data(self) -> 'ShardedBase': + """Returns a new ShardedBase instance with data=None.""" + raise NotImplementedError + + +@dataclass +class ShardedTensor(ShardedBase): + """Represents a mapping between a local tensor and a global tensor. + + Global tensor is assumed to consist of many local tensors distributed + between different processes. + + Args: + key: unique identifier of a global tensor + data: local tensor data. Can be None only for consistency validation + dtype: tensor dtype + local_shape: local tensor shape + global_shape: global tensor shape + global_offset: offset of a local tensor in a global tensor, + specified in number of tensor elements + axis_fragmentations: global tensor fragmentation of each axis + replica_id: indicates given local tensor's replication wrt. + local tensors in different processes + prepend_axis_num: number of axes prepended to the local tensor to + reflect global tensor shape. The behavior is similar to + unsqueezing the local tensor. + allow_shape_mismatch: if True, during loading, the global shape of + a stored tensor does not have to match the expected global shape. + Useful for representing tensors with flexible shape, + e.g. padded. + flattened_range: specifies a slice that should be applied to a + flattened tensor with `local_shape` in order to get + the tensor stored as `data` + """ + + key: str + data: Optional[torch.Tensor] = field(repr=False) + dtype: torch.dtype + local_shape: Tuple[int, ...] + global_shape: Tuple[int, ...] + global_offset: Tuple[int, ...] + axis_fragmentations: Optional[Tuple[int, ...]] + replica_id: ReplicaId = 0 + prepend_axis_num: int = 0 + allow_shape_mismatch: bool = False + flattened_range: Optional[slice] = None + + def __post_init__(self): + self.validate_metadata_integrity() + + def validate_metadata_integrity(self) -> None: + """Codifies the constraints on metadata attributes. + + Meeting those constraints is guaranteed when instantiating a ShardedTensor + class with `from_rank_offsets` or `from_rank_offsets_flat` constructors. + + Returns: + None + """ + has_flattened_range = self.flattened_range is not None + if self.data is not None: + if self.data.dtype != self.dtype: + raise CheckpointingException( + f'Data dtype should match `dtype` attribute for {self}' + ) + if not has_flattened_range and self.data.shape != self.local_shape: + raise CheckpointingException( + f'Data shape should match `local_shape` attribute for {self}' + ) + if has_flattened_range: + if self.data.ndim != 1: + raise CheckpointingException(f'Data should be 1D for a flattened {self}') + real_data = self.data + try: + self.data = None + self.init_data(device='meta') + if self.data.shape != real_data.shape: + raise CheckpointingException( + f'Data shape {real_data.shape} doesnt match' + f' expected {self.data.shape} for {self}' + ) + finally: + self.data = real_data + + if len(self.global_shape) != len(self.global_offset): + raise CheckpointingException( + f'Global offset dimensions should be equal to global shape dimensions for {self}' + ) + if len(self.local_shape) + self.prepend_axis_num != len(self.global_shape): + raise CheckpointingException( + f'Local shape together with `prepend_axis_num` dimensions should be ' + f'equal to global shape dimensions for {self}' + ) + + for off, sh in zip(self.global_offset[self.prepend_axis_num :], self.local_shape): + # NOTE: In custom FSDP, we have a case where a new parameter shard is created locally. + # For example, consider parameters [p0, p1, p2] sharded across GPU0 and GPU1. + # GPU0 receives p0 and a portion of p1, while GPU1 receives the + # remaining portion of p1 and p2. + # As a result, there is no parameter shard of p2 on GPU0, and + # the shape of p2 on GPU0 is zero. + if sh != 0 and off % sh != 0: + raise CheckpointingException( + f'Global offset ({off}) must be divisible by local shape ({sh}) for {self}.' + ) + + if has_flattened_range and self.flattened_range.step is not None: + raise CheckpointingException( + f'`step` argument in the flattened range of a ShardedTensor is not supported.' + ) + + def global_slice(self) -> Tuple[Union[int, slice], ...]: + """ + Returns a tuple of int and slice objects representing a slice of the + global tensor that this ShardedTensor corresponds to. + """ + assert len(self.global_offset) == len(self.local_shape) + self.prepend_axis_num + return tuple( + chain( + (off for off in self.global_offset[: self.prepend_axis_num]), + ( + slice(off, off + sh) + for off, sh in zip( + self.global_offset[self.prepend_axis_num :], self.local_shape + ) + ), + ) + ) + + def global_coordinates(self) -> Tuple[np.ndarray, ...]: + """ + Returns a tuple of np.ndarrays representing the coordinates of the global tensor + that this ShardedTensor corresponds to. + """ + if self.flattened_range is None: + raise CheckpointingException( + f'`global_coordinates` is undefined for' + f' {self.__class__.__name__} without `flattened_range`' + ) + + local_coords = self.local_coordinates() + assert len(local_coords) + self.prepend_axis_num == len(self.global_offset), ( + len(local_coords), + self, + ) + global_coords = tuple( + c + off + for c, off in zip((0,) * self.prepend_axis_num + local_coords, self.global_offset) + ) + return global_coords + + def local_coordinates(self) -> Tuple[np.ndarray, ...]: + """ + Returns a tuple of np.ndarrays representing the coordinates of the local tensor + that this ShardedTensor corresponds to. + """ + if self.flattened_range is None: + raise CheckpointingException( + f'`local_coordinates` is undefined for' + f' {self.__class__.__name__} without `flattened_range`' + ) + + # TODO: np.unravel_index? + mask = np.zeros(np.product(self.local_shape), dtype=bool) + mask[self.flattened_range] = True + return np.nonzero(mask.reshape(self.local_shape)) + + def local_chunk_offset_in_global(self) -> Tuple[int, ...]: + """Offset of a local chunk in a global array of chunks. + + Returns: + Tuple[int, ...]: the offset of the whole local chunk in a global array of chunks. + """ + assert len(self.global_offset) == len(self.local_shape) + self.prepend_axis_num + chunk_offset = list(self.global_offset[: self.prepend_axis_num]) + for off, sh in zip(self.global_offset[self.prepend_axis_num :], self.local_shape): + assert off % sh == 0, str(self) + chunk_offset.append(off // sh) + return tuple(chunk_offset) + + def max_allowed_chunks(self) -> Tuple[int, ...]: + """ + Returns the maximum allowed chunks for this ShardedTensor. + """ + chunks = [] + for axis_sh, axis_fragm in zip(self.global_shape, self.axis_fragmentations): + if not self.allow_shape_mismatch and axis_sh % axis_fragm != 0: + raise CheckpointingException( + f'Axis shape ({axis_sh}) not divisible by axis fragmentation ({axis_fragm}' + ) + axis_chunk_size = axis_sh // axis_fragm + chunks.append(axis_chunk_size) + return tuple(chunks) + + def without_data(self): + return replace(self, data=None) + + @classmethod + def from_rank_offsets( + cls, + key: str, + data: torch.Tensor, + *rank_offsets: Tuple[int, int, int], + replica_id: ReplicaId = 0, + prepend_axis_num: int = 0, + flattened_range: None = None, + **init_kwargs, + ): + """Allows to construct the ShardedTensor given offset specified in process ranks. + + Args: + key (str): unique key + data (torch.Tensor): local tensor data + rank_offsets (Tuple[int, int, int]): each tuple + (axis, axis_rank_offset, axis_fragm) says that if + global tensor is divided into `axis_fragm` fragment along `axis` + axis, then local tensor data corresponds to the `axis_rank_offset` chunk. + replica_id (ReplicaId): see ShardedTensor + prepend_axis_num (int): see ShardedTensor + flattened_range (None): must be None when using this constructor + init_kwargs: passed to ShardedTensor.__init__ + """ + if flattened_range is not None: + raise ValueError( + 'Cannot instantiate a flat ShardedTensor with `from_rank_offsets` method.' + ' Use `from_rank_offsets_flat` instead' + ) + global_offset = [0] * (data.ndim + prepend_axis_num) + global_shape = ([1] * prepend_axis_num) + list(data.shape) + axis_fragmentations = [1] * (data.ndim + prepend_axis_num) + _seen_axis = set() + for axis, axis_rank_offset, axis_fragm in rank_offsets: + if axis < 0 or axis_rank_offset < 0 or axis_fragm < 1 or axis_rank_offset >= axis_fragm: + raise CheckpointingException(f'Invalid rank offsets: {rank_offsets} for key {key}.') + _seen_axis.add(axis) + + local_axis_shape = 1 if axis < prepend_axis_num else data.shape[axis - prepend_axis_num] + global_shape[axis] = axis_fragm * local_axis_shape + global_offset[axis] = axis_rank_offset * local_axis_shape + axis_fragmentations[axis] = axis_fragm + + return cls( + key, + data, + data.dtype, + tuple(data.shape), + tuple(global_shape), + tuple(global_offset), + tuple(axis_fragmentations), + replica_id, + prepend_axis_num, + flattened_range=flattened_range, + **init_kwargs, + ) + + @classmethod + def from_rank_offsets_flat( + cls, + key: str, + data: torch.Tensor, + non_flat_local_shape: Tuple[int, ...], + *args, + flattened_range: Optional[slice] = None, + **kwargs, + ): + """Allows to construct a *flattened* ShardedTensor given offset specified in process ranks. + + Args: + key (str): + data (torch.Tensor): this should be a flattened data tensor + non_flat_local_shape (Tuple[int, ...]): expected local shape of a non-flat chunk + *args: passed unchanged to the `from_rank_offsets` constructor + flattened_range (slice): see ShardedTensor. Defaults to None, but must be set to + a non-None slice. + **kwargs: + + Returns: + ShardedTensor: constructed ShardedTensor instance + """ + if flattened_range is None: + raise CheckpointingException( + 'Cannot instantiate a non-flat ShardedTensor with `from_rank_offsets_flat` method.' + ' Use `from_rank_offsets` instead' + ) + if data.ndim != 1: + raise CheckpointingException( + f'Flattened ShardedTensor requires 1D data, got shape: {data.shape}' + ) + if flattened_range.stop - flattened_range.start != data.numel(): + raise CheckpointingException( + f'Flattened ShardedTensor data length ({data.numel()}) must meet the ' + f'slice length: {flattened_range.stop - flattened_range.start}' + ) + + non_flat_data_meta = torch.empty(*non_flat_local_shape, dtype=data.dtype, device='meta') + sh_ten = cls.from_rank_offsets(key, non_flat_data_meta, *args, **kwargs) + instance = replace(sh_ten, data=data, flattened_range=flattened_range) + instance.validate_metadata_integrity() + return instance + + def init_data(self, device: Union[str, torch.device], init_fn=torch.empty): + """ + Initialize the tensor data of this ShardedTensor. + + Only called if `data` attribute is None. + + Args: + device (Union[str, torch.device]): device to place the tensor on + init_fn (Callable, optional): function to use to initialize the tensor. + Defaults to `torch.empty`. + """ + if self.data is not None: + return + self.data = init_fn(self.local_shape, dtype=self.dtype, device=device) + if self.flattened_range is not None: + self.data = self.data.flatten()[self.flattened_range.start : self.flattened_range.stop] + + def narrow(self, dim: int, start: int, length: int) -> List['ShardedTensor']: + """This is an analogue of torch.narrow for ShardedTensors. + + Narrowing assumes that we narrow a local tensor on each rank. + This has consequences on local_shape, global_shape, global_offset, etc. + + Args: + dim (int): dimension to narrow. Doesn't include prepended axes. + start (int): start element + length (int): length of the slice + + Returns: + List[ShardedTensor]: narrowed ShardedTensors. For non-flat tensors, + the list will always have 1 element. For flat ShardedTensors the number of + elements varies depending on `dim` and on overlap, because flat + tensors must be contiguous. In particular the list can be empty. + """ + prepended_dim = dim + self.prepend_axis_num + local_length_along_dim = self.local_shape[dim] + + def _update_tuple(x, ind, val): + x = list(x) + x[ind] = val + return tuple(x) + + def _safe_div(x, y): + assert x % y == 0, (x, y) + return x // y + + # Decrease global shape and global offset by `length / local_length_along_dim` + assert ( + self.global_shape[prepended_dim] % local_length_along_dim == 0 + ), f'Only regular grid of local tensors is supported for narrowing, got: {self}' + assert ( + self.global_offset[prepended_dim] % local_length_along_dim == 0 + ), f'Only regular grid of local tensors is supported for narrowing, got: {self}' + global_shape = _update_tuple( + self.global_shape, + prepended_dim, + _safe_div(self.global_shape[prepended_dim] * length, local_length_along_dim), + ) + global_offset = _update_tuple( + self.global_offset, + prepended_dim, + _safe_div(self.global_offset[prepended_dim] * length, local_length_along_dim), + ) + + if self.flattened_range is None: + new_data = self.data.narrow(dim, start, length) + # always a single result tensor + return [ + replace( + self, + data=new_data, + local_shape=new_data.shape, + global_shape=global_shape, + global_offset=global_offset, + ) + ] + else: + if dim != 0: + raise CheckpointingException( + f'Narrowing along the first axis is supported for now only, got dim={dim}' + ) + + # If dim=0, we will always get 0 or 1 resulting tensor. + # If dim>1, in general there can be more result tensors (e.g. max 3 for dim=1) + + # For on original flat ShardedTensor of local shape [3, 4] and + # flattened_range=slice(5, 10), + # the X signs mark the actual (flat) data in `self.data` + # notice 12 (3*4) total "virtual" elements, out of which 5 is actual data. + # flat original: [.....XXXXX..] + + # If we narrow to start=1, length=1 in the original local shape dimensions, + # the overlapping flat slice would be: + # narrow to: [....XXXX....] + # flat overlap: [.....XXX....] + + # Now `data` is flattened and sliced, so we must compute local_shape manually + local_shape = _update_tuple(self.local_shape, dim, length) + other_dims_volume = np.prod( + _update_tuple(local_shape, dim, 1) + ) # 4 in the example above + volume_before_split = other_dims_volume * start # 4 in the example above + volume_of_split = other_dims_volume * length # 4 in the example above + + flat_slice_start_shifted = ( + self.flattened_range.start - volume_before_split + ) # 5 - 4 = 1 in the example above + flat_slice_stop_shifted = ( + self.flattened_range.stop - volume_before_split + ) # 10 - 4 = 6 in the example above + + # Find an intersection of + # (flat_slice_start_shifted, flat_slice_stop_shifted) vs (0, volume_of_split) + + if flat_slice_stop_shifted <= 0 or flat_slice_start_shifted >= volume_of_split: + return [] # no intersection + + # new_flattened_range = slice(1, 4) in the example above + new_flattened_range = slice( + max(flat_slice_start_shifted, 0), min(flat_slice_stop_shifted, volume_of_split) + ) + # Apply the intersection to the flattened data tensor. + # Compute start and slice appropriate length + intersection_slice_start = ( + new_flattened_range.start - flat_slice_start_shifted + ) # 0 in the example above + new_data = self.data[ + intersection_slice_start : intersection_slice_start + + new_flattened_range.stop + - new_flattened_range.start + ] + + return [ + replace( + self, + data=new_data, + local_shape=local_shape, + global_shape=global_shape, + global_offset=global_offset, + flattened_range=new_flattened_range, + ) + ] + + +def is_main_replica(replica_id: ReplicaId): + """Checks if given `replica_id` is considered as main. + + "Main" replica is: + - integer 0 + - or an iterable with all 0 elements + + It is the application responsibility to set correct replicas for sharded tensors. + + Args: + replica_id (Union[int, Tuple[int, ...]]): replica id + + Returns: + (bool): True for a "main" replica + """ + if isinstance(replica_id, int): + return replica_id == 0 + return all(r == 0 for r in replica_id) + + +class LocalNonpersistentObject: + """Object that should not be stored in a checkpoint, but restored locally. + + Wrapping any object inside the state dict with LocalNonpersistentObject + will result in: + - during saving, this object will *not* be stored in the checkpoint + - during loading, a local version of this object will be placed in a state dict + """ + + def __init__(self, obj): + self.obj = obj + + def unwrap(self): + """Returns the original object.""" + return self.obj + + +@dataclass +class ShardedObject(ShardedBase): + """Represents a mapping between a local object and a global object. + + Global object is assumed to consist of many local objects distributed + between different processes. + + NOTE: Contrary to ShardedTensor, it's impossible to change global object + sharding. Conceptually, ShardedObject is a fully-sharded ShardedTensor + with atomic arbitrary typed elements. + + Args: + key: unique identifier of a global tensor + data: local object data. Can be None only for consistency validation + global_shape: global object shape + global_offset: offset of a local object in a global object, specified in number of shards + replica_id: indicates local object replication wrt. local objects in different processes + """ + + key: str + data: object + global_shape: Tuple[int, ...] + global_offset: Tuple[int, ...] + replica_id: ReplicaId = 0 + + def __post_init__(self): + self.validate_metadata_integrity() + + def validate_metadata_integrity(self): + if len(self.global_shape) != len(self.global_offset): + raise CheckpointingException( + f'Global offset dimensions should be equal to global shape dimensions for {self}' + ) + + def without_data(self): + return replace(self, data=None) + + @property + def unique_key(self): + """returns a unique key for this object""" + return ( + f'{self.key}/shard_' + f'{".".join(map(str, self.global_offset))}_' + f'{".".join(map(str, self.global_shape))}' + ) + + def __str__(self): + return f'{self.__class__.__name__}(key=\'{self.key}\')' + + @classmethod + def empty_from_unique_key(cls, unique_key, replica_id: ReplicaId = 0) -> 'ShardedObject': + """Instantiates a ShardedObject from a unique key. + + Args: + unique_key: a string of the form + /shard__ + replica_id: indicates local object replication wrt. + local objects in different processes + + Returns: + a ShardedObject with data=None + """ + key, shard_key = unique_key.split('/') + shard_str, offset, shape = shard_key.split('_') + assert shard_str == 'shard' + offset = tuple(map(int, offset.split('.'))) + shape = tuple(map(int, shape.split('.'))) + if len(shape) + 1 == len(offset): + # This is a backward-compatible fix. We don't know the last + # element of global shape so set it to -1. + shape += (-1,) + return cls(key, None, shape, offset, replica_id) + + +FactoryBuildFn = Callable[[str, torch.Tensor, ReplicaId, Optional[slice]], ShardedStateDict] +FactoryMergeFn = Callable[[StateDict], torch.Tensor] + + +@dataclass +class ShardedTensorFactory(ShardedBase): + """Allows to apply transformations to tensors before/after serialization. + + The essence of those transformations is that they can be applied to + optimizer states the same way they are applied to the model params. + The ultimate state dict with sharded tensors must depend functionally on + `build_fn` arguments (key, data, replica_id, flattened_range), + which will be provided by the optimizer. + + Builder creates a sub-state-dict out of a tensor before saving, and merger + merges the corresponding state dict after loading. + + Args: + key (str): unique identifier of the factory + data (torch.Tensor): original model parameter that will be further + transformed by this factory + build_fn (callable): function that transforms the original tensor + to a sharded state dict + merge_fn (callable): function that transforms loaded subtree back + into a single tensor (inverse of `build_fn`) + replica_id (ReplicaId): indicates factory replication wrt. + factories in different processes + flattened_range (slice, optional): indicates additional flattening + applied to the ShardedTensors produced by the factory + """ + + key: str + data: torch.Tensor + build_fn: FactoryBuildFn + merge_fn: FactoryMergeFn + replica_id: ReplicaId = 0 + flattened_range: Optional[slice] = None + + def build(self): + """Builds a ShardedStateDict from the original tensor""" + return self.build_fn(self.key, self.data, self.replica_id, self.flattened_range) + + def validate_metadata_integrity(self): + """No reasonable checks can be applied""" + pass + + def without_data(self): + return replace(self, data=None) + + +def apply_factories(sharded_state_dict: ShardedStateDict): + """Turn ShardedTensorFactories into ShardedTensors *in-place*. + + Args: + sharded_state_dict (ShardedStateDict): state dict possibly + containing ShardedTensorFactory objects + + Returns: + None: state dict is modified in place + """ + + def apply(x): + if isinstance(x, ShardedTensorFactory): + x = x.build() + return x + + dict_list_map_inplace(apply, sharded_state_dict) + + +def apply_factory_merges( + x1: StateDict, x2: ShardedStateDict, key: Tuple[str, ...] = () +) -> StateDict: + """Apply merges defined by ShardedTensorFactories *in-place*. + + Args: + x1 (StateDict): state dict loaded from the checkpoint + x2 (ShardedStateDict): subset of `x1` (in terms of dict keys) + with ShardedTensorFactory + as (possibly nested) values that define how to + merge objects from the `x1` state dict + key (Tuple[str, ...]): current key in a recursive call. + Used only for reporting meaningful errors + + Returns: + StateDict: `x1` modified in-place + """ + if isinstance(x2, ShardedTensorFactory): + return x2.merge_fn(x1) + + # There rest is almost the same as the `merge` function from `dict_utils` + if isinstance(x1, dict) and isinstance(x2, dict): + for k, v2 in x2.items(): + if k not in x1: + raise ValueError( + f'Different dict keys encountered in `apply_factory_merges` ' + f'({x1.keys()} vs {x2.keys()})' + ) + else: + x1[k] = apply_factory_merges(x1[k], v2, key=key + (k,)) + elif isinstance(x1, list) and isinstance(x2, list): + if len(x1) != len(x2): + err_msg = ( + f'Cannot merge two lists with different lengths ' + f'({len(x1)} and {len(x2)}, encountered at key {key})' + ) + logger.error(err_msg + f'\nx1: {x1}\nx2: {x2}') + raise ValueError(err_msg) + for i, v2 in enumerate(x2): + x1[i] = apply_factory_merges(x1[i], v2, key=key + (i,)) + elif isinstance(x1, list) and isinstance(x2, dict): + for k, v2 in x2.items(): + if not isinstance(k, int): + raise ValueError( + f'Invalid dict key {k} non-integer type encountered ' + f'in a list-dict merge at level {key}' + ) + if k >= len(x1): + raise ValueError( + f'Dict key {k} out of bound for list of length' + f'{len(x1)} (encountered at level {key})' + ) + x1[k] = apply_factory_merges(x1[k], v2, key=key + (k,)) + else: + raise ValueError( + f'Duplicate non-dict and non-list values encountered: `{x1}` and `{x2} (at key {key})`' + ) + return x1 diff --git a/megatron/core/dist_checkpointing/serialization.py b/megatron/core/dist_checkpointing/serialization.py index 600dd87..b8c7bad 100644 --- a/megatron/core/dist_checkpointing/serialization.py +++ b/megatron/core/dist_checkpointing/serialization.py @@ -1,424 +1,424 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. - -""" Entrypoints for saving and loading the distributed checkpoints. - -Functions `load` and `save` are equivalents of `torch.load` and `torch.save` -but expect torch.Tensors to be wrapped with classes from the `mapping module`. -Additionally, `load` expects the sharded state dict argument as a guidance for -loading the sharded tensors. -""" - -import logging -from pathlib import Path -from typing import Callable, Dict, Optional, Set, Tuple, Union - -import torch - -from . import ShardedTensor -from .core import CheckpointingConfig, save_config -from .dict_utils import extract_matching_values, merge -from .mapping import ( - CheckpointingException, - CommonStateDict, - ShardedObject, - ShardedStateDict, - StateDict, - apply_factory_merges, -) -from .state_dict_transformation import load_preprocess, save_preprocess -from .strategies.async_utils import AsyncRequest -from .strategies.base import ( - AsyncSaveShardedStrategy, - LoadCommonStrategy, - LoadShardedStrategy, - SaveCommonStrategy, - SaveShardedStrategy, - StrategyAction, - get_default_strategy, -) -from .utils import extract_sharded_base -from .validation import ( - StrictHandling, - determine_global_metadata, - parse_strict_flag, - validate_integrity_and_strict_load, - validate_sharded_objects_handling, - verify_checkpoint_and_load_strategy, -) - -logger = logging.getLogger(__name__) - - -# flat state dict with sharded objects without any data -CkptShardedMetadata = Dict[str, Union[ShardedTensor, ShardedObject]] - - -def load( - sharded_state_dict: ShardedStateDict, - checkpoint_dir: str, - sharded_strategy: Union[LoadShardedStrategy, Tuple[str, int], None] = None, - common_strategy: Union[LoadCommonStrategy, Tuple[str, int], None] = None, - validate_access_integrity: bool = True, - strict: Union[str, StrictHandling] = StrictHandling.ASSUME_OK_UNEXPECTED, -) -> Union[StateDict, Tuple[StateDict, Set[str], Set[str]]]: - """Loading entrypoint. - - In the steps below, the following verbs refer to corresponding objects: - - load = load from checkpoint - - extract = extract from sharded_state_dict - - add = add to the final state dict - Steps: - 1. Load common state dict and form the base of the result state dict - 2. Apply factories to sharded_state_dict - 3. Extract LocalNonPersistentObject and add - 4. (optional) Extract ShardedObjects, load and add - 5. Extract ShardedBase, load, apply factory merges and add - - Args: - sharded_state_dict (ShardedStateDict): state dict of the existing model - populated with ShardedTensors. Used as a mapping to determine which - parts of global tensors stored in the checkpoint should be loaded. - checkpoint_dir (str): directory with the checkpoint - sharded_strategy (LoadShardedStrategy, Tuple[str, int], optional): - configures loading behavior for sharded tensors - common_strategy (LoadCommonStrategy, Tuple[str, int], optional): - configures loading behavior for common data - validate_access_integrity (bool default = True): checks if each tensor shard is accessed - exactly once (as main replica) by some process - strict (StrictHandling, str, optional): determines the behavior in case of a mismatch - between the requested sharded state dict and the checkpoint. See `StrictHandling` docs - for more details. Some values affect the return value of this function - (missing and unexpected keys are returned). - Defaults to `True` (StrictHandling.ASSUME_OK_UNEXPECTED) which doesn't - incur any performance overhead. Other recommended values - are: `False` (StrictHandling.LOG_UNEXPECTED) which logs only unexpected keys - or `StrictHandling.RETURN_ALL` which returns all mismatch keys. - - Returns: - StateDict or Tuple[StateDict, Set[str], Set[str]]: in most cases only - the loaded state dict is returned. If `strict` flag was set to - """ - sharded_strategy, common_strategy = verify_checkpoint_and_load_strategy( - checkpoint_dir, sharded_strategy, common_strategy - ) - - checkpoint_dir = Path(checkpoint_dir) - common_state_dict = common_strategy.load_common(checkpoint_dir) - - sharded_state_dict, nonpersistent_state_dict, sh_ten_factories = load_preprocess( - sharded_state_dict - ) - merge(common_state_dict, nonpersistent_state_dict) - - # At this point we are only dealing with ShardedBase objects - sharded_state_dict, _ = extract_sharded_base(sharded_state_dict) - - # Validation - ckpt_sharded_metadata = None - local_metadata, global_metadata = None, None - strict = parse_strict_flag(strict) - if StrictHandling.requires_explicit_ckpt_mismatch_check(strict): - ckpt_sharded_metadata = load_sharded_metadata( - str(checkpoint_dir), sharded_strategy, common_strategy - ) - if validate_access_integrity or StrictHandling.requires_global_app_metadata(strict): - local_metadata, global_metadata = determine_global_metadata(sharded_state_dict) - - sharded_state_dict, missing_keys, unexpected_keys = validate_integrity_and_strict_load( - sharded_state_dict, - strict, - validate_access_integrity, - local_metadata, - global_metadata, - ckpt_sharded_metadata, - ) - - # ShardedBase loading - if not sharded_strategy.can_handle_sharded_objects: - validate_sharded_objects_handling(sharded_strategy, common_strategy) - sharded_objects_state_dict, sharded_state_dict = extract_matching_values( - sharded_state_dict, lambda v: isinstance(v, ShardedObject) - ) - sharded_objects = common_strategy.load_sharded_objects( - sharded_objects_state_dict, checkpoint_dir - ) - merge(common_state_dict, sharded_objects) - - loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) - - merge(common_state_dict, loaded_state_dict) - - loaded_state_dict = apply_factory_merges(common_state_dict, sh_ten_factories) - - if StrictHandling.requires_returning_mismatch_keys(strict): - return common_state_dict, missing_keys, unexpected_keys - else: - return common_state_dict - - -def load_common_state_dict(checkpoint_dir: Path) -> StateDict: - """Load common (non-sharded) objects state dict from the checkpoint. - - Args: - checkpoint_dir (Path): checkpoint directory - - Returns: - StateDict: state dict with non-sharded objects from the checkpoint - """ - sharded_strategy, common_strategy = verify_checkpoint_and_load_strategy(str(checkpoint_dir)) - return common_strategy.load_common(checkpoint_dir) - - -def load_tensors_metadata( - checkpoint_dir: str, sharded_strategy: Union[LoadShardedStrategy, None] = None -) -> CkptShardedMetadata: - """Load tensors metadata from the checkpoint. - - Returns a dictionary similar to a sharded state dict, but note that - the dictionary keys are simply ShardedTensor keys (contrary to the - actual sharded state dicts where keys correspond to state dict keys). - - Dict values are ShardedTensors without any sharding (so, the only useful - information is tensors global shape and dtype). - - Concrete implementation depends on the loading strategy. If no strategy is - given, a default for a given backend is used. - - Args: - checkpoint_dir (str): checkpoint directory to load from - sharded_strategy (LoadShardedStrategy, optional): sharded strategy to load metadata. - Defaults to None - in this case a default load strategy for a given checkpoint type - is used. - - Returns: - CkptShardedMetadata: flat state dict without data describing ShardedTensors - in the checkpoint - """ - sharded_strategy, common_strategy = verify_checkpoint_and_load_strategy( - checkpoint_dir, sharded_strategy - ) - return sharded_strategy.load_tensors_metadata(Path(checkpoint_dir)) - - -def load_sharded_metadata( - checkpoint_dir: str, - sharded_strategy: Union[LoadShardedStrategy, None] = None, - common_strategy: Union[LoadCommonStrategy, None] = None, -) -> CkptShardedMetadata: - """Load sharded metadata from the checkpoint. - - Similar to `load_tensors_metadata`, but includes also ShardedObjects. - - Returns a dictionary similar to a sharded state dict, but note that - the dictionary keys are simply ShardedTensor keys (contrary to the - actual sharded state dicts where keys correspond to state dict keys). - - Dict values are ShardedTensors without any sharding (so, the only useful - information is tensors global shape and dtype). - - Concrete implementation depends on the loading strategy. If no strategy is - given, a default for a given backend is used. - - Args: - checkpoint_dir (str): checkpoint directory to load from - sharded_strategy (LoadShardedStrategy, optional): sharded strategy to load metadata. - Defaults to None - in this case a default load strategy for a given checkpoint type - is used. - common_strategy (LoadCommonStrategy, optional): common strategy to load metadata. - Defaults to None - in this case a default load strategy for a given checkpoint type is - used. This strategy won't be used unless `sharded_strategy` can't handle ShardedObjects - - Returns: - CkptShardedMetadata: flat state dict without data describing ShardedTensors - and ShardedObjects in the checkpoint - """ - sharded_strategy, common_strategy = verify_checkpoint_and_load_strategy( - checkpoint_dir, sharded_strategy, common_strategy - ) - sharded_metadata = sharded_strategy.load_sharded_metadata(Path(checkpoint_dir)) - if not sharded_strategy.can_handle_sharded_objects: - validate_sharded_objects_handling(sharded_strategy, common_strategy) - common_metadata = common_strategy.load_sharded_metadata(Path(checkpoint_dir)) - sharded_metadata = merge(sharded_metadata, common_metadata) - return sharded_metadata - - -def load_plain_tensors(checkpoint_dir: str) -> StateDict: - """Load checkpoint tensors without any sharding and plain structure. - - NOTE: common state dict is NOT included. - - Args: - checkpoint_dir (str): checkpoint directory to load the tensors from. - - Returns: - StateDict: checkpoint state dict containing only torch.Tensors. - """ - sharded_state_dict = load_tensors_metadata(checkpoint_dir) - # Don't validate integrity because shards will be overlapped - # if world_size > 1 (all processes load whole tensors) - return load(sharded_state_dict, checkpoint_dir, validate_access_integrity=False) - - -# -# def load_plain_tensors_and_objects(checkpoint_dir: str) -> StateDict: -# """Load checkpoint tensors and objects without any sharding and plain structure. -# -# NOTE: state dict structure might be different than the one used for checkpoint saving. -# NOTE: common state dict is NOT included. -# -# Args: -# checkpoint_dir (str): checkpoint directory to load the state dict from. -# -# Returns: -# StateDict: complete checkpoint state dict without any sharding. -# """ -# sharded_state_dict = load_tensors_metadata(checkpoint_dir) -# # Don't validate integrity because shards will be overlapped -# # if world_size > 1 (all processes load whole tensors) -# return load(sharded_state_dict, checkpoint_dir, validate_access_integrity=False) - - -def remove_sharded_tensors(checkpoint_dir: str, key_prefix: str): - """determine the appropriate sharding strategy and delegate removal to the sharded strategy""" - sharded_strategy, common_strategy = verify_checkpoint_and_load_strategy(checkpoint_dir) - sharded_strategy.remove_sharded_tensors(checkpoint_dir, key_prefix) - - -def save( - sharded_state_dict: ShardedStateDict, - checkpoint_dir: str, - sharded_strategy: Union[SaveShardedStrategy, Tuple[str, int], None] = None, - common_strategy: Union[SaveCommonStrategy, Tuple[str, int], None] = None, - validate_access_integrity: bool = True, - async_sharded_save: bool = False, - preprocess_common_before_consistancy_check: Callable[[CommonStateDict], StateDict] = None, -) -> Optional[AsyncRequest]: - """Saving entrypoint. - - Extracts ShardedTensors from the given state dict. Rank 0 saves the - "regular" part of the checkpoint to common torch file. - The ShardedTensors are saved according to a strategy specified by the - config. - - Steps: - 1. Apply factories - 2. Extract and discard LocalNonPersistentObject - 3. Extract all ShardedBase object - 4. Save all other objects to common.pt - 5. (optional) Extract and save ShardedObjects - 6. Save all ShardedBase objects - 7. Write metadata.json file with backend and version metadata. - - Step (6) can be performed asynchronously (see `async_sharded_save`), in this - case the actual save is embodied in the returned async request and can be - scheduled by the external caller. For async request, step (7) is added as - one of the finalization functions, so that metadata.json is written only - if the checkpoint is complete. - - Args: - sharded_state_dict (ShardedStateDict): state dict of the populated with - ShardedTensors. Used as a mapping to determine how local tensors - should be saved as global tensors in the checkpoint. - checkpoint_dir (str): directory to save the checkpoint to - sharded_strategy (SaveShardedStrategy, Tuple[str, int], optional): - configures sharded tensors saving behavior and backend - common_strategy (SaveCommonStrategy, Tuple[str, int], optional): - configures common data saving behavior and backend - validate_access_integrity (bool default = True): checks if each tensor shard is accessed - exactly once (as main replica) by some process. - It also makes sure the common state dict is consistant across all ranks - async_sharded_save (bool, optional): if True, for the sharded state dict part - an async save implementation will be called, with the AsyncRequest - being returned to the caller. Note that it is the caller responsibility to - actually schedule the async save. Defaults to False. - preprocess_common_before_consistancy_check (Callable[[CommonStateDict], StateDict], None): - A callable function that will preprocess the common state dict (i.e can be used to - remove keys that we expect to be different in the state dict). The function must not - modify the original state dict - - Returns: - AsyncRequest (optional): if `async_sharded_save` is True, returns - async request that should be scheduled by the caller of this function. - None otherwise. - """ - checkpoint_dir = Path(checkpoint_dir) - - if torch.distributed.get_rank() == 0: - if not checkpoint_dir.exists(): - raise CheckpointingException( - f'Checkpoint destination directory does not exist: {checkpoint_dir}' - ) - - if next(checkpoint_dir.iterdir(), None) is not None: - raise CheckpointingException( - f'Checkpoint destination directory ({checkpoint_dir}) is not empty' - ) - - if common_strategy is not None: - raise NotImplementedError('The only supported common strategy is torch') - - if sharded_strategy is None: - sharded_strategy = get_default_save_sharded_strategy() - if not isinstance(sharded_strategy, SaveShardedStrategy): - assert isinstance(sharded_strategy, tuple), type(sharded_strategy) - sharded_strategy = get_default_strategy(StrategyAction.SAVE_SHARDED, *sharded_strategy) - - if common_strategy is None: - common_strategy = get_default_save_common_strategy() - if not isinstance(common_strategy, SaveCommonStrategy): - assert isinstance(common_strategy, tuple), type(common_strategy) - common_strategy = get_default_strategy(StrategyAction.SAVE_COMMON, *common_strategy) - - sharded_state_dict, state_dict = save_preprocess( - sharded_state_dict, validate_access_integrity, preprocess_common_before_consistancy_check - ) - - common_strategy.save_common(state_dict, checkpoint_dir) - - if not sharded_strategy.can_handle_sharded_objects: - validate_sharded_objects_handling(sharded_strategy, common_strategy) - sharded_objects_state_dict, sharded_state_dict = extract_matching_values( - sharded_state_dict, lambda v: isinstance(v, ShardedObject) - ) - common_strategy.save_sharded_objects(sharded_objects_state_dict, checkpoint_dir) - - def metadata_finalize_fn(): - if torch.distributed.get_rank() == 0: - save_config( - CheckpointingConfig(sharded_strategy.backend, sharded_strategy.version), - checkpoint_dir, - ) - torch.distributed.barrier() - - if not async_sharded_save: - sharded_strategy.save(sharded_state_dict, checkpoint_dir) - metadata_finalize_fn() - return - - if not isinstance(sharded_strategy, AsyncSaveShardedStrategy): - raise CheckpointingException( - f'Cannot apply async_save to non-async strategy {sharded_strategy}' - ) - async_request = sharded_strategy.async_save(sharded_state_dict, checkpoint_dir) - async_request.finalize_fns.append(metadata_finalize_fn) - return async_request - - -def get_default_save_sharded_strategy( - backend: str = 'torch_dist', version: int = 1 -) -> SaveShardedStrategy: - """Get default save sharded strategy.""" - return get_default_strategy(StrategyAction.SAVE_SHARDED, backend, version) - - -def get_default_save_common_strategy( - backend: str = 'torch', version: int = 1 -) -> SaveCommonStrategy: - """Get default save common strategy.""" - return get_default_strategy(StrategyAction.SAVE_COMMON, backend, version) - - -def get_default_load_sharded_strategy(checkpoint_dir: str) -> LoadShardedStrategy: - """Get default load sharded strategy.""" - return verify_checkpoint_and_load_strategy(checkpoint_dir)[0] +# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. + +""" Entrypoints for saving and loading the distributed checkpoints. + +Functions `load` and `save` are equivalents of `torch.load` and `torch.save` +but expect torch.Tensors to be wrapped with classes from the `mapping module`. +Additionally, `load` expects the sharded state dict argument as a guidance for +loading the sharded tensors. +""" + +import logging +from pathlib import Path +from typing import Callable, Dict, Optional, Set, Tuple, Union + +import torch + +from . import ShardedTensor +from .core import CheckpointingConfig, save_config +from .dict_utils import extract_matching_values, merge +from .mapping import ( + CheckpointingException, + CommonStateDict, + ShardedObject, + ShardedStateDict, + StateDict, + apply_factory_merges, +) +from .state_dict_utils import load_preprocess, save_preprocess +from .strategies.async_utils import AsyncRequest +from .strategies.base import ( + AsyncSaveShardedStrategy, + LoadCommonStrategy, + LoadShardedStrategy, + SaveCommonStrategy, + SaveShardedStrategy, + StrategyAction, + get_default_strategy, +) +from .utils import extract_sharded_base +from .validation import ( + StrictHandling, + determine_global_metadata, + parse_strict_flag, + validate_integrity_and_strict_load, + validate_sharded_objects_handling, + verify_checkpoint_and_load_strategy, +) + +logger = logging.getLogger(__name__) + + +# flat state dict with sharded objects without any data +CkptShardedMetadata = Dict[str, Union[ShardedTensor, ShardedObject]] + + +def load( + sharded_state_dict: ShardedStateDict, + checkpoint_dir: str, + sharded_strategy: Union[LoadShardedStrategy, Tuple[str, int], None] = None, + common_strategy: Union[LoadCommonStrategy, Tuple[str, int], None] = None, + validate_access_integrity: bool = True, + strict: Union[str, StrictHandling] = StrictHandling.ASSUME_OK_UNEXPECTED, +) -> Union[StateDict, Tuple[StateDict, Set[str], Set[str]]]: + """Loading entrypoint. + + In the steps below, the following verbs refer to corresponding objects: + - load = load from checkpoint + - extract = extract from sharded_state_dict + - add = add to the final state dict + Steps: + 1. Load common state dict and form the base of the result state dict + 2. Apply factories to sharded_state_dict + 3. Extract LocalNonPersistentObject and add + 4. (optional) Extract ShardedObjects, load and add + 5. Extract ShardedBase, load, apply factory merges and add + + Args: + sharded_state_dict (ShardedStateDict): state dict of the existing model + populated with ShardedTensors. Used as a mapping to determine which + parts of global tensors stored in the checkpoint should be loaded. + checkpoint_dir (str): directory with the checkpoint + sharded_strategy (LoadShardedStrategy, Tuple[str, int], optional): + configures loading behavior for sharded tensors + common_strategy (LoadCommonStrategy, Tuple[str, int], optional): + configures loading behavior for common data + validate_access_integrity (bool default = True): checks if each tensor shard is accessed + exactly once (as main replica) by some process + strict (StrictHandling, str, optional): determines the behavior in case of a mismatch + between the requested sharded state dict and the checkpoint. See `StrictHandling` docs + for more details. Some values affect the return value of this function + (missing and unexpected keys are returned). + Defaults to `True` (StrictHandling.ASSUME_OK_UNEXPECTED) which doesn't + incur any performance overhead. Other recommended values + are: `False` (StrictHandling.LOG_UNEXPECTED) which logs only unexpected keys + or `StrictHandling.RETURN_ALL` which returns all mismatch keys. + + Returns: + StateDict or Tuple[StateDict, Set[str], Set[str]]: in most cases only + the loaded state dict is returned. If `strict` flag was set to + """ + sharded_strategy, common_strategy = verify_checkpoint_and_load_strategy( + checkpoint_dir, sharded_strategy, common_strategy + ) + + checkpoint_dir = Path(checkpoint_dir) + common_state_dict = common_strategy.load_common(checkpoint_dir) + + sharded_state_dict, nonpersistent_state_dict, sh_ten_factories = load_preprocess( + sharded_state_dict + ) + merge(common_state_dict, nonpersistent_state_dict) + + # At this point we are only dealing with ShardedBase objects + sharded_state_dict, _ = extract_sharded_base(sharded_state_dict) + + # Validation + ckpt_sharded_metadata = None + local_metadata, global_metadata = None, None + strict = parse_strict_flag(strict) + if StrictHandling.requires_explicit_ckpt_mismatch_check(strict): + ckpt_sharded_metadata = load_sharded_metadata( + str(checkpoint_dir), sharded_strategy, common_strategy + ) + if validate_access_integrity or StrictHandling.requires_global_app_metadata(strict): + local_metadata, global_metadata = determine_global_metadata(sharded_state_dict) + + sharded_state_dict, missing_keys, unexpected_keys = validate_integrity_and_strict_load( + sharded_state_dict, + strict, + validate_access_integrity, + local_metadata, + global_metadata, + ckpt_sharded_metadata, + ) + + # ShardedBase loading + if not sharded_strategy.can_handle_sharded_objects: + validate_sharded_objects_handling(sharded_strategy, common_strategy) + sharded_objects_state_dict, sharded_state_dict = extract_matching_values( + sharded_state_dict, lambda v: isinstance(v, ShardedObject) + ) + sharded_objects = common_strategy.load_sharded_objects( + sharded_objects_state_dict, checkpoint_dir + ) + merge(common_state_dict, sharded_objects) + + loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) + + merge(common_state_dict, loaded_state_dict) + + loaded_state_dict = apply_factory_merges(common_state_dict, sh_ten_factories) + + if StrictHandling.requires_returning_mismatch_keys(strict): + return common_state_dict, missing_keys, unexpected_keys + else: + return common_state_dict + + +def load_common_state_dict(checkpoint_dir: Path) -> StateDict: + """Load common (non-sharded) objects state dict from the checkpoint. + + Args: + checkpoint_dir (Path): checkpoint directory + + Returns: + StateDict: state dict with non-sharded objects from the checkpoint + """ + sharded_strategy, common_strategy = verify_checkpoint_and_load_strategy(str(checkpoint_dir)) + return common_strategy.load_common(checkpoint_dir) + + +def load_tensors_metadata( + checkpoint_dir: str, sharded_strategy: Union[LoadShardedStrategy, None] = None +) -> CkptShardedMetadata: + """Load tensors metadata from the checkpoint. + + Returns a dictionary similar to a sharded state dict, but note that + the dictionary keys are simply ShardedTensor keys (contrary to the + actual sharded state dicts where keys correspond to state dict keys). + + Dict values are ShardedTensors without any sharding (so, the only useful + information is tensors global shape and dtype). + + Concrete implementation depends on the loading strategy. If no strategy is + given, a default for a given backend is used. + + Args: + checkpoint_dir (str): checkpoint directory to load from + sharded_strategy (LoadShardedStrategy, optional): sharded strategy to load metadata. + Defaults to None - in this case a default load strategy for a given checkpoint type + is used. + + Returns: + CkptShardedMetadata: flat state dict without data describing ShardedTensors + in the checkpoint + """ + sharded_strategy, common_strategy = verify_checkpoint_and_load_strategy( + checkpoint_dir, sharded_strategy + ) + return sharded_strategy.load_tensors_metadata(Path(checkpoint_dir)) + + +def load_sharded_metadata( + checkpoint_dir: str, + sharded_strategy: Union[LoadShardedStrategy, None] = None, + common_strategy: Union[LoadCommonStrategy, None] = None, +) -> CkptShardedMetadata: + """Load sharded metadata from the checkpoint. + + Similar to `load_tensors_metadata`, but includes also ShardedObjects. + + Returns a dictionary similar to a sharded state dict, but note that + the dictionary keys are simply ShardedTensor keys (contrary to the + actual sharded state dicts where keys correspond to state dict keys). + + Dict values are ShardedTensors without any sharding (so, the only useful + information is tensors global shape and dtype). + + Concrete implementation depends on the loading strategy. If no strategy is + given, a default for a given backend is used. + + Args: + checkpoint_dir (str): checkpoint directory to load from + sharded_strategy (LoadShardedStrategy, optional): sharded strategy to load metadata. + Defaults to None - in this case a default load strategy for a given checkpoint type + is used. + common_strategy (LoadCommonStrategy, optional): common strategy to load metadata. + Defaults to None - in this case a default load strategy for a given checkpoint type is + used. This strategy won't be used unless `sharded_strategy` can't handle ShardedObjects + + Returns: + CkptShardedMetadata: flat state dict without data describing ShardedTensors + and ShardedObjects in the checkpoint + """ + sharded_strategy, common_strategy = verify_checkpoint_and_load_strategy( + checkpoint_dir, sharded_strategy, common_strategy + ) + sharded_metadata = sharded_strategy.load_sharded_metadata(Path(checkpoint_dir)) + if not sharded_strategy.can_handle_sharded_objects: + validate_sharded_objects_handling(sharded_strategy, common_strategy) + common_metadata = common_strategy.load_sharded_metadata(Path(checkpoint_dir)) + sharded_metadata = merge(sharded_metadata, common_metadata) + return sharded_metadata + + +def load_plain_tensors(checkpoint_dir: str) -> StateDict: + """Load checkpoint tensors without any sharding and plain structure. + + NOTE: common state dict is NOT included. + + Args: + checkpoint_dir (str): checkpoint directory to load the tensors from. + + Returns: + StateDict: checkpoint state dict containing only torch.Tensors. + """ + sharded_state_dict = load_tensors_metadata(checkpoint_dir) + # Don't validate integrity because shards will be overlapped + # if world_size > 1 (all processes load whole tensors) + return load(sharded_state_dict, checkpoint_dir, validate_access_integrity=False) + + +# +# def load_plain_tensors_and_objects(checkpoint_dir: str) -> StateDict: +# """Load checkpoint tensors and objects without any sharding and plain structure. +# +# NOTE: state dict structure might be different than the one used for checkpoint saving. +# NOTE: common state dict is NOT included. +# +# Args: +# checkpoint_dir (str): checkpoint directory to load the state dict from. +# +# Returns: +# StateDict: complete checkpoint state dict without any sharding. +# """ +# sharded_state_dict = load_tensors_metadata(checkpoint_dir) +# # Don't validate integrity because shards will be overlapped +# # if world_size > 1 (all processes load whole tensors) +# return load(sharded_state_dict, checkpoint_dir, validate_access_integrity=False) + + +def remove_sharded_tensors(checkpoint_dir: str, key_prefix: str): + """determine the appropriate sharding strategy and delegate removal to the sharded strategy""" + sharded_strategy, common_strategy = verify_checkpoint_and_load_strategy(checkpoint_dir) + sharded_strategy.remove_sharded_tensors(checkpoint_dir, key_prefix) + + +def save( + sharded_state_dict: ShardedStateDict, + checkpoint_dir: str, + sharded_strategy: Union[SaveShardedStrategy, Tuple[str, int], None] = None, + common_strategy: Union[SaveCommonStrategy, Tuple[str, int], None] = None, + validate_access_integrity: bool = True, + async_sharded_save: bool = False, + preprocess_common_before_consistancy_check: Callable[[CommonStateDict], StateDict] = None, +) -> Optional[AsyncRequest]: + """Saving entrypoint. + + Extracts ShardedTensors from the given state dict. Rank 0 saves the + "regular" part of the checkpoint to common torch file. + The ShardedTensors are saved according to a strategy specified by the + config. + + Steps: + 1. Apply factories + 2. Extract and discard LocalNonPersistentObject + 3. Extract all ShardedBase object + 4. Save all other objects to common.pt + 5. (optional) Extract and save ShardedObjects + 6. Save all ShardedBase objects + 7. Write metadata.json file with backend and version metadata. + + Step (6) can be performed asynchronously (see `async_sharded_save`), in this + case the actual save is embodied in the returned async request and can be + scheduled by the external caller. For async request, step (7) is added as + one of the finalization functions, so that metadata.json is written only + if the checkpoint is complete. + + Args: + sharded_state_dict (ShardedStateDict): state dict of the populated with + ShardedTensors. Used as a mapping to determine how local tensors + should be saved as global tensors in the checkpoint. + checkpoint_dir (str): directory to save the checkpoint to + sharded_strategy (SaveShardedStrategy, Tuple[str, int], optional): + configures sharded tensors saving behavior and backend + common_strategy (SaveCommonStrategy, Tuple[str, int], optional): + configures common data saving behavior and backend + validate_access_integrity (bool default = True): checks if each tensor shard is accessed + exactly once (as main replica) by some process. + It also makes sure the common state dict is consistant across all ranks + async_sharded_save (bool, optional): if True, for the sharded state dict part + an async save implementation will be called, with the AsyncRequest + being returned to the caller. Note that it is the caller responsibility to + actually schedule the async save. Defaults to False. + preprocess_common_before_consistancy_check (Callable[[CommonStateDict], StateDict], None): + A callable function that will preprocess the common state dict (i.e can be used to + remove keys that we expect to be different in the state dict). The function must not + modify the original state dict + + Returns: + AsyncRequest (optional): if `async_sharded_save` is True, returns + async request that should be scheduled by the caller of this function. + None otherwise. + """ + checkpoint_dir = Path(checkpoint_dir) + + if torch.distributed.get_rank() == 0: + if not checkpoint_dir.exists(): + raise CheckpointingException( + f'Checkpoint destination directory does not exist: {checkpoint_dir}' + ) + + if next(checkpoint_dir.iterdir(), None) is not None: + raise CheckpointingException( + f'Checkpoint destination directory ({checkpoint_dir}) is not empty' + ) + + if common_strategy is not None: + raise NotImplementedError('The only supported common strategy is torch') + + if sharded_strategy is None: + sharded_strategy = get_default_save_sharded_strategy() + if not isinstance(sharded_strategy, SaveShardedStrategy): + assert isinstance(sharded_strategy, tuple), type(sharded_strategy) + sharded_strategy = get_default_strategy(StrategyAction.SAVE_SHARDED, *sharded_strategy) + + if common_strategy is None: + common_strategy = get_default_save_common_strategy() + if not isinstance(common_strategy, SaveCommonStrategy): + assert isinstance(common_strategy, tuple), type(common_strategy) + common_strategy = get_default_strategy(StrategyAction.SAVE_COMMON, *common_strategy) + + sharded_state_dict, state_dict = save_preprocess( + sharded_state_dict, validate_access_integrity, preprocess_common_before_consistancy_check + ) + + common_strategy.save_common(state_dict, checkpoint_dir) + + if not sharded_strategy.can_handle_sharded_objects: + validate_sharded_objects_handling(sharded_strategy, common_strategy) + sharded_objects_state_dict, sharded_state_dict = extract_matching_values( + sharded_state_dict, lambda v: isinstance(v, ShardedObject) + ) + common_strategy.save_sharded_objects(sharded_objects_state_dict, checkpoint_dir) + + def metadata_finalize_fn(): + if torch.distributed.get_rank() == 0: + save_config( + CheckpointingConfig(sharded_strategy.backend, sharded_strategy.version), + checkpoint_dir, + ) + torch.distributed.barrier() + + if not async_sharded_save: + sharded_strategy.save(sharded_state_dict, checkpoint_dir) + metadata_finalize_fn() + return + + if not isinstance(sharded_strategy, AsyncSaveShardedStrategy): + raise CheckpointingException( + f'Cannot apply async_save to non-async strategy {sharded_strategy}' + ) + async_request = sharded_strategy.async_save(sharded_state_dict, checkpoint_dir) + async_request.finalize_fns.append(metadata_finalize_fn) + return async_request + + +def get_default_save_sharded_strategy( + backend: str = 'torch_dist', version: int = 1 +) -> SaveShardedStrategy: + """Get default save sharded strategy.""" + return get_default_strategy(StrategyAction.SAVE_SHARDED, backend, version) + + +def get_default_save_common_strategy( + backend: str = 'torch', version: int = 1 +) -> SaveCommonStrategy: + """Get default save common strategy.""" + return get_default_strategy(StrategyAction.SAVE_COMMON, backend, version) + + +def get_default_load_sharded_strategy(checkpoint_dir: str) -> LoadShardedStrategy: + """Get default load sharded strategy.""" + return verify_checkpoint_and_load_strategy(checkpoint_dir)[0] diff --git a/megatron/core/dist_checkpointing/state_dict_transformation.py b/megatron/core/dist_checkpointing/state_dict_transformation.py deleted file mode 100644 index c8f01dd..0000000 --- a/megatron/core/dist_checkpointing/state_dict_transformation.py +++ /dev/null @@ -1,270 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. - -""" Utilities for transforming state_dict, including a tensor-aware implementation.""" - -import logging -from time import time -from typing import Any, Callable, Optional - -import torch - -from .dict_utils import dict_list_map_inplace, extract_matching_values, merge, nested_values -from .exchange_utils import determine_main_replica_uniform_distribution, exchange_by_distribution -from .mapping import ( - CommonStateDict, - ShardedObject, - ShardedStateDict, - ShardedTensor, - ShardedTensorFactory, - StateDict, - apply_factories, - apply_factory_merges, -) -from .utils import ( - _sharded_object_id, - _sharded_tensor_shard_id, - extract_nonpersistent, - extract_sharded_base, -) -from .validation import determine_global_metadata, validate_sharding_integrity - -logger = logging.getLogger(__name__) - - -def save_preprocess( - sharded_state_dict: ShardedStateDict, - validate_access_integrity: bool = True, - preprocess_common_before_consistancy_check: Callable[[CommonStateDict], StateDict] = None, -): - """Preprocesses the given state dictionary by applying factories, - discarding non-persistent data and extracting the common state dictionary. - Optionally, it can validate sharding integrity. - - Args: - sharded_state_dict (ShardedStateDict): The initial state dictionary to be preprocessed. - validate_access_integrity (bool): If True, triggers validation of sharding integrity. - preprocess_common_before_consistancy_check (callable, None): A callable function - that will preprocess the common state dict (i.e can be used to remove keys - that we expect to be different in the state dict) - - Returns: - Tuple[ShardedStateDict, dict]: - The preprocessed sharded state dictionary and the common state dictionary. - """ - apply_factories(sharded_state_dict) - _, sharded_state_dict = extract_nonpersistent(sharded_state_dict) - sharded_part, common_state_dict = extract_sharded_base(sharded_state_dict) - if validate_access_integrity: - preprocessed_common_state_dict = common_state_dict - if preprocess_common_before_consistancy_check: - preprocessed_common_state_dict = preprocess_common_before_consistancy_check( - common_state_dict - ) - validate_sharding_integrity( - determine_global_metadata(sharded_part)[1], - common_state_dict=preprocessed_common_state_dict, - ) - return sharded_part, common_state_dict - - -def load_preprocess(sharded_state_dict: ShardedStateDict): - """Preprocesses the given state dictionary by applying factories - and extracting non-persistent data, without modifying the original dictionary. - - Args: - sharded_state_dict (ShardedStateDict): - The initial state dictionary to be processed (remains unchanged). - - Returns: - Tuple[ShardedStateDict, dict, dict]: - - A preprocessed copy of the sharded state dictionary. - - A dictionary containing non-persistent state data. - - A dictionary of `ShardedTensorFactory` instances. - """ - # Create a copy of sharded_state_dict as the passed in state dict may have - # references that prevent tensors from being deallocated - sharded_state_dict, _ = extract_matching_values(sharded_state_dict, lambda x: True) - - sh_ten_factories, _ = extract_matching_values( - sharded_state_dict, - lambda x: isinstance(x, ShardedTensorFactory), - return_lists_as_dicts=True, - ) - apply_factories(sharded_state_dict) - - # Data inside sh_ten_factories no longer needed so delete them to reduce memory usage - dict_list_map_inplace(ShardedTensorFactory.without_data, sh_ten_factories) - # Non-persistent objects - nonpersistent_state_dict, sharded_state_dict = extract_nonpersistent(sharded_state_dict) - dict_list_map_inplace(lambda o: o.unwrap(), nonpersistent_state_dict) - return sharded_state_dict, nonpersistent_state_dict, sh_ten_factories - - -def prepare_state_dict_for_save( - sharded_state_dict: ShardedStateDict, - async_prepare: bool = False, - algo: str = 'atomic', - validate_access_integrity: bool = True, - parallelization_group: Optional[torch.distributed.ProcessGroup] = None, - to_cpu: bool = True, -): - """Creates a tensor-aware state dictionary that can be saved using the Local Checkpoint Manager. - - Args: - sharded_state_dict (ShardedStateDict): The initial state dictionary. - async_prepare (bool): If True, enables asynchronous preparation. - algo (str): The algorithm used to create the tensor-aware state dictionary. - validate_access_integrity (bool): If True, validates sharding integrity. - parallelization_group (torch.distributed.ProcessGroup): - The process group used for exchanges to avoid duplications. - to_cpu (bool): If True, moves all tensors from device to CPU. - - Returns: - ShardedStateDict: The tensor-aware state dictionary. - """ - - _start = time() - - if async_prepare: - raise NotImplementedError('Async state_dict preparation is not yet implemented') - if algo != 'atomic' and algo != 'fully_parallel': - raise NotImplementedError( - 'Only "atomic" and "fully_parallel" sharding algorithms are supported.' - ) - fully_parallel = algo == 'fully_parallel' - - sharded_part, common_state_dict = save_preprocess(sharded_state_dict, validate_access_integrity) - sharded_tensors = [] - sharded_objects = [] - for sh_base in nested_values(sharded_part): - if isinstance(sh_base, ShardedTensor): - sharded_tensors.append(sh_base) - else: - assert isinstance(sh_base, ShardedObject) - sharded_objects.append(sh_base) - if fully_parallel: - shard_to_saving_rank, _, shard_to_metadata = determine_main_replica_uniform_distribution( - sharded_part, parallelization_group, True - ) - - raw_tensors, raw_objects = {}, {} - for ten in sharded_tensors: - shard_id = _sharded_tensor_shard_id(ten) - if not fully_parallel or shard_to_saving_rank[shard_id] == torch.distributed.get_rank(): - # TODO cover creating copies on host in CheckpointManager.save() - if to_cpu: - raw_tensors[shard_id] = ten.data.to("cpu", non_blocking=True) - else: - raw_tensors[shard_id] = ten.data - ten.data = None - for obj in sharded_objects: - raw_objects[_sharded_object_id(obj)] = obj.data - obj.data = None - - logger.debug(f'prepare_state_dict_for_save took {time() - _start}') - - state_dict_for_save = { - 'raw_tensors': raw_tensors, - 'raw_objects': raw_objects, - 'common': common_state_dict, - 'sharded_state_dict': sharded_part, - } - if fully_parallel: - state_dict_for_save['shard_to_rank'] = shard_to_saving_rank - state_dict_for_save['shard_to_metadata'] = shard_to_metadata - return state_dict_for_save - - -def recreate_state_dict_after_load( - sharded_state_dict: ShardedStateDict, - loaded_state_dict: ShardedStateDict, - algo: str = 'atomic', - exchange_algo: str = 'broadcast', - validate_access_integrity: bool = True, - parallelization_group: Optional[torch.distributed.ProcessGroup] = None, -): - """Creates a final sharded state dictionary from a tensor-aware state dictionary. - - Args: - sharded_state_dict (ShardedStateDict): - The initial sharded state dictionary generated from the model. - loaded_state_dict (ShardedStateDict): - Tensor-aware state dictionary used to fill in missing data in the sharded state. - algo (str): The algorithm used to reconstruct the state dictionary - from the tensor-aware state dictionary. - exchange_algo (str): The algorithm used for tensor exchanges during retrieval. - validate_access_integrity (bool): If True, performs validation of sharding integrity. - parallelization_group (torch.distributed.ProcessGroup): - The process group used for efficient exchanges during retrieval. - - Returns: - ShardedStateDict: The finalized sharded state dictionary. - """ - - if algo != 'atomic' and algo != 'fully_parallel': - raise NotImplementedError( - 'Only "atomic" and "fully_parallel" sharding algorithms are supported.' - ) - fully_parallel = algo == 'fully_parallel' - - # __adding__ common part - recreated_state_dict, _ = extract_matching_values(loaded_state_dict["common"], lambda x: True) - - if not sharded_state_dict: - return recreated_state_dict - # TODO validate laoded_state_dict["sharded_state_dict"] and sharded_state_dict are compatible - - sharded_state_dict, nonpersistent_state_dict, sh_ten_factories = load_preprocess( - sharded_state_dict - ) - # __adding__ nonpersistent part - merge(recreated_state_dict, nonpersistent_state_dict) - - sharded_part, _ = extract_sharded_base(sharded_state_dict) - if validate_access_integrity: - validate_sharding_integrity(determine_global_metadata(sharded_part)[1]) - - # load sharded tensors and sharded objects to sharded_part - loaded_tensors = loaded_state_dict['raw_tensors'] - # TODO cover restoring the original device (H2D) in CheckpointManager.load() - for k, v in loaded_tensors.items(): - loaded_tensors[k] = v.cuda() # H2D - if fully_parallel: - distribution = ( - loaded_state_dict['shard_to_rank'], - None, - loaded_state_dict['shard_to_metadata'], - ) - unloaded_shards = {} - for sh_base in nested_values(sharded_part): - if isinstance(sh_base, ShardedTensor): - shard_id = _sharded_tensor_shard_id(sh_base) - if shard_id not in loaded_tensors: - unloaded_shards[shard_id] = sh_base - loaded_tensors = exchange_by_distribution( - loaded_tensors, unloaded_shards, distribution, parallelization_group, exchange_algo - ) - loaded_objects = loaded_state_dict['raw_objects'] - - def load_sharded_base(x: Any): - if isinstance(x, ShardedTensor): - shard_id = _sharded_tensor_shard_id(x) - if shard_id not in loaded_tensors: - raise Exception( - 'The current local checkpoint implementation assumes' - 'consistent tensor sharding during load and save operations.' - f'However, the expected shard {x} (ID: {shard_id})' - f'was not found in the checkpoint. (IDs: {loaded_tensors.keys()})' - ) - x = loaded_tensors[shard_id] - if isinstance(x, ShardedObject): - object_id = _sharded_object_id(x) - assert object_id in loaded_objects, (x, object_id, loaded_objects.keys()) - x = loaded_objects[object_id] - return x - - dict_list_map_inplace(load_sharded_base, sharded_part) - sharded_part = apply_factory_merges(sharded_part, sh_ten_factories) - # __adding__ sharded_part - merge(recreated_state_dict, sharded_part) - return recreated_state_dict diff --git a/megatron/core/dist_checkpointing/state_dict_utils.py b/megatron/core/dist_checkpointing/state_dict_utils.py new file mode 100644 index 0000000..74de4fd --- /dev/null +++ b/megatron/core/dist_checkpointing/state_dict_utils.py @@ -0,0 +1,112 @@ +# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. + +""" Utilities for transforming state_dict.""" + +from typing import Callable, Union + +from .dict_utils import dict_list_map_inplace, extract_matching_values +from .mapping import ( + CommonStateDict, + ShardedStateDict, + ShardedTensor, + ShardedTensorFactory, + StateDict, + apply_factories, +) +from .utils import extract_nonpersistent, extract_sharded_base +from .validation import determine_global_metadata, validate_sharding_integrity + + +def save_preprocess( + sharded_state_dict: ShardedStateDict, + validate_access_integrity: bool = True, + preprocess_common_before_consistancy_check: Callable[[CommonStateDict], StateDict] = None, +): + """Preprocesses the given state dictionary by applying factories, + discarding non-persistent data and extracting the common state dictionary. + Optionally, it can validate sharding integrity. + + Args: + sharded_state_dict (ShardedStateDict): The initial state dictionary to be preprocessed. + validate_access_integrity (bool): If True, triggers validation of sharding integrity. + preprocess_common_before_consistancy_check (callable, None): A callable function + that will preprocess the common state dict (i.e can be used to remove keys + that we expect to be different in the state dict) + + Returns: + Tuple[ShardedStateDict, dict]: + The preprocessed sharded state dictionary and the common state dictionary. + """ + apply_factories(sharded_state_dict) + _, sharded_state_dict = extract_nonpersistent(sharded_state_dict) + sharded_part, common_state_dict = extract_sharded_base(sharded_state_dict) + sharded_part = filter_out_empty_flatten_tensor(sharded_part) + if validate_access_integrity: + preprocessed_common_state_dict = common_state_dict + if preprocess_common_before_consistancy_check: + preprocessed_common_state_dict = preprocess_common_before_consistancy_check( + common_state_dict + ) + validate_sharding_integrity( + determine_global_metadata(sharded_part)[1], + common_state_dict=preprocessed_common_state_dict, + ) + return sharded_part, common_state_dict + + +def load_preprocess(sharded_state_dict: ShardedStateDict): + """Preprocesses the given state dictionary by applying factories + and extracting non-persistent data, without modifying the original dictionary. + + Args: + sharded_state_dict (ShardedStateDict): + The initial state dictionary to be processed (remains unchanged). + + Returns: + Tuple[ShardedStateDict, dict, dict]: + - A preprocessed copy of the sharded state dictionary. + - A dictionary containing non-persistent state data. + - A dictionary of `ShardedTensorFactory` instances. + """ + # Create a copy of sharded_state_dict as the passed in state dict may have + # references that prevent tensors from being deallocated + sharded_state_dict, _ = extract_matching_values(sharded_state_dict, lambda x: True) + sharded_state_dict = filter_out_empty_flatten_tensor(sharded_state_dict) + + sh_ten_factories, _ = extract_matching_values( + sharded_state_dict, + lambda x: isinstance(x, ShardedTensorFactory), + return_lists_as_dicts=True, + ) + apply_factories(sharded_state_dict) + + # Data inside sh_ten_factories no longer needed so delete them to reduce memory usage + dict_list_map_inplace(ShardedTensorFactory.without_data, sh_ten_factories) + # Non-persistent objects + nonpersistent_state_dict, sharded_state_dict = extract_nonpersistent(sharded_state_dict) + dict_list_map_inplace(lambda o: o.unwrap(), nonpersistent_state_dict) + return sharded_state_dict, nonpersistent_state_dict, sh_ten_factories + + +def filter_out_empty_flatten_tensor(sharded_state_dict: Union[dict, list]): + """ + Filter out ShardedTensors with empty flatten_range. + These tensors can cause the PyTorch check in failure. + + Args: + sharded_state_dict: state dict possibly containing ShardedTensor objects + """ + # Filter out ShardedTensors with empty flatten_range. + # These tensors can cause the PyTorch check in + # `TorchShardedTensor._init_from_local_shards_and_global_metadata` to fail. + # This situation may occur in custom Fully Sharded Data Parallel (FSDP) cases. + sharded_state_dict, _ = extract_matching_values( + sharded_state_dict, + lambda v: not ( + isinstance(v, ShardedTensor) + and v.flattened_range + and v.flattened_range.start == v.flattened_range.stop + ), + ) + + return sharded_state_dict diff --git a/megatron/core/dist_checkpointing/strategies/async_utils.py b/megatron/core/dist_checkpointing/strategies/async_utils.py index 7cdda8a..29b2d5f 100644 --- a/megatron/core/dist_checkpointing/strategies/async_utils.py +++ b/megatron/core/dist_checkpointing/strategies/async_utils.py @@ -1,224 +1,543 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -""" -This module provides an async utilities which allow to start -a checkpoint save process in the background. -""" -import logging -from collections import deque -from time import time -from typing import Callable, List, NamedTuple, Optional, Tuple - -import torch -from torch import multiprocessing as mp - -logger = logging.getLogger(__name__) - - -class AsyncRequest(NamedTuple): - """Represents an async request that needs to be scheduled for execution. - - Args: - async_fn (Callable, optional): async function to call. None represents noop. - async_fn_args (Tuple): args to pass to `async_fn`. - finalize_fns (List[Callable]): list of functions to call to finalize the request. - These functions will be called synchronously after `async_fn` is done - *on all ranks*. - """ - - async_fn: Optional[Callable] - async_fn_args: Tuple - finalize_fns: List[Callable] - is_frozen: bool = False - - def add_finalize_fn(self, fn: Callable) -> None: - """Adds a new finalize function to the request. - - Args: - fn (Callable): function to add to the async request. This function - will be called *after* existing finalization functions. - - Returns: - None - """ - if self.is_frozen: - raise RuntimeError('Cannot add finalization functions to a frozen AsyncRequest') - self.finalize_fns.append(fn) - - def execute_sync(self) -> None: - """Helper to synchronously execute the request. - - This logic is equivalent to what should happen in case of the async call. - """ - if self.async_fn is not None: - self.async_fn(*self.async_fn_args) - torch.distributed.barrier() - for finalize_fn in self.finalize_fns: - finalize_fn() - - def freeze(self) -> 'AsyncRequest': - """Freezes the async request, disallowing adding new finalization functions. - - Returns: - AsyncRequest: new async request with all same fields except for the - `is_frozen` flag. - """ - return self._replace(is_frozen=True) - - -class DistributedAsyncCaller: - """Wrapper around mp.Process that ensures correct semantic of distributed finalization. - - Starts process asynchronously and allows checking if all processes on all ranks are done. - """ - - def __init__(self): - self.process: Optional[mp.Process] = None - self.start_time: Optional[float] = None - - def schedule_async_call(self, async_fn: Optional[Callable], save_args: Tuple) -> None: - """Spawn a process with `async_fn` as the target. - - This method must be called on all ranks. - - Args: - async_fn (Callable, optional): async function to call. If None, - no process will be started. - save_args (Tuple): async function args. - """ - if async_fn is None: - return # nothing to do - start_sync = time() - torch.cuda.synchronize() - end_sync = time() - logger.debug( - f"rank: {torch.distributed.get_rank()}, takes {end_sync - start_sync} to finish D2H " - ) - - ctx = mp.get_context('fork') - self.start_time = time() - self.process = ctx.Process(target=async_fn, args=save_args) - self.process.start() - init_time = time() - logger.debug( - f"rank: {torch.distributed.get_rank()}, takes {init_time - self.start_time} to schedule async ckpt " - ) - - def is_current_async_call_done(self, blocking=False) -> bool: - """Check if async save is finished on all ranks. - - For semantic correctness, requires rank synchronization in each check. - This method must be called on all ranks. - - Args: - blocking (bool, optional): if True, will wait until the call is done - on all ranks. Otherwise, returns immediately if at least one rank - is still active. Defaults to False. - - Returns: - bool: True if all ranks are done (immediately of after active wait - if `blocking` is True), False if at least one rank is still active. - """ - # The following takes the same overhead as torch.distributed.barrier (single integer all-reduce) - is_alive = int(self.process.is_alive()) if self.process is not None else 0 - ten = torch.tensor([is_alive], dtype=torch.int, device=torch.cuda.current_device()) - logger.debug( - f"rank: {torch.distributed.get_rank()}, DistributedAsyncCaller is_alive: {is_alive}" - ) - torch.distributed.all_reduce(ten) - if ten[0] > 0 and not blocking: - return False - else: - if self.process is not None: - logger.debug(f"rank: {torch.distributed.get_rank()}, joining self.process") - self.process.join() - self.process = None - - logger.debug( - f"DistributedAsyncCaller: Async process join finished after {time() - self.start_time:.2f}s from forking" - ) - self.start_time = None - return True - - -class _ActiveAsyncRequest(NamedTuple): - """Helper to represent an active async call. - - Args: - idx (int): index of the call (starting from 0) - async_caller (DistributedAsyncCaller): async caller instance that represents - the async process handling the async request - async_request (AsyncRequest): async request that is being called - """ - - idx: int - async_caller: DistributedAsyncCaller - async_request: AsyncRequest - - -class AsyncCallsQueue: - """Manages a queue of async calls. - - Allows adding a new async call with `schedule_async_request` and finalizing - active calls with `maybe_finalize_async_calls`. - """ - - def __init__(self): - self.async_calls: deque[_ActiveAsyncRequest] = deque([]) - self.call_idx: int = -1 - - def schedule_async_request(self, async_request: AsyncRequest) -> int: - """Start a new async call and add it to a queue of active async calls. - - This method must be called on all ranks. - - Args: - async_request (AsyncRequest): async request to start. - - Returns: - int: index of the async call that was started. - This can help the user keep track of the async calls. - """ - self.call_idx += 1 - async_caller = DistributedAsyncCaller() - async_request = async_request.freeze() - async_caller.schedule_async_call(async_request.async_fn, async_request.async_fn_args) - self.async_calls.append(_ActiveAsyncRequest(self.call_idx, async_caller, async_request)) - return self.call_idx - - def maybe_finalize_async_calls(self, blocking=False) -> List[int]: - """Finalizes all available calls. - - This method must be called on all ranks. - - Args: - blocking (bool, optional): if True, will wait until all active requests - are done. Otherwise, finalizes only the async request that already - finished. Defaults to False. - Returns: - List[int]: list of indices (as returned by `schedule_async_request`) - of async calls that have been successfully finalized. - """ - call_idx_finalized = [] - while self.async_calls: - next_async_done = self.async_calls[0].async_caller.is_current_async_call_done(blocking) - if not next_async_done: - break - call_idx, _, async_request = self.async_calls.popleft() - for finalize_fn in async_request.finalize_fns: - finalize_fn() - ten = torch.tensor([call_idx], dtype=torch.int, device=torch.cuda.current_device()) - torch.distributed.all_reduce(ten, op=torch.distributed.ReduceOp.MAX) - assert ( - ten.item() == call_idx - ), 'Unmatched async calls. That probably means not all ranks are participating in async finalization' - call_idx_finalized.append(call_idx) - return call_idx_finalized - - def get_num_unfinalized_calls(self): - """Get the number of active async calls.""" - return len(self.async_calls) - - def close(self): - """Finalize all calls upon closing.""" - self.maybe_finalize_async_calls(blocking=True) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +""" +This module provides an async utilities which allow to start +a checkpoint save process in the background. +""" +import gc +import logging +from abc import ABC, abstractmethod +from collections import deque +from contextlib import contextmanager +from queue import Empty +from time import sleep, time +from typing import Callable, Dict, List, NamedTuple, Optional, Tuple + +import torch +from torch import multiprocessing as mp + +from ..utils import debug_time + +logger = logging.getLogger(__name__) + + +@contextmanager +def _disable_gc(): + """Temporarily disables GC.""" + gc_enabled = gc.isenabled() + try: + if gc_enabled: + gc.disable() + yield + finally: + if gc_enabled: + gc.enable() + + +class AsyncRequest(NamedTuple): + """Represents an async request that needs to be scheduled for execution. + + Args: + async_fn (Callable, optional): async function to call. None represents noop. + async_fn_args (Tuple): args to pass to `async_fn`. + finalize_fns (List[Callable]): list of functions to call to finalize the request. + These functions will be called synchronously after `async_fn` is done + *on all ranks*. + async_fn_kwargs (Tuple): kwargs to pass to `async_fn`. + preload_fn (Callable): preload function to stage tensors from GPU to Host. + This should be self-contained with a proper list of arguments with `partial`. + is_frozen (Bool): a flag to indicate this async request can be modified or not. + call_idx (int): index variable used to order async requests for synchronization + in preloading and writing tensors on the async caller + + """ + + async_fn: Optional[Callable] + async_fn_args: Tuple + finalize_fns: List[Callable] + async_fn_kwargs: Dict = {} + preload_fn: Callable = None + is_frozen: bool = False + call_idx: int = 0 + + def add_finalize_fn(self, fn: Callable) -> None: + """Adds a new finalize function to the request. + + Args: + fn (Callable): function to add to the async request. This function + will be called *after* existing finalization functions. + + Returns: + None + """ + if self.is_frozen: + raise RuntimeError('Cannot add finalization functions to a frozen AsyncRequest') + self.finalize_fns.append(fn) + + def execute_sync(self) -> None: + """Helper to synchronously execute the request. + + This logic is equivalent to what should happen in case of the async call. + """ + if self.async_fn is not None: + self.async_fn(*self.async_fn_args) + torch.distributed.barrier() + for finalize_fn in self.finalize_fns: + finalize_fn() + + def freeze(self) -> 'AsyncRequest': + """Freezes the async request, disallowing adding new finalization functions. + + Returns: + AsyncRequest: new async request with all same fields except for the + `is_frozen` flag. + """ + return self._replace(is_frozen=True) + + +class AsyncCaller(ABC): + """Wrapper around mp.Process that ensures correct semantic of distributed finalization. + + Starts process asynchronously and allows checking if all processes on all ranks are done. + """ + + @abstractmethod + def schedule_async_call(self, async_req: AsyncRequest) -> None: + """Schedule `async_req` with some process forking or reusing + persistent worker + + This method must be called on all ranks. + + Args: + async_req (AsyncRequest): `AsyncRequest` object containing to + start async process + """ + raise NotImplementedError("This should be implemented") + + @abstractmethod + def is_current_async_call_done(self, blocking: bool, no_dist: bool) -> bool: + """Check if async save is finished on all ranks. + + For semantic correctness, requires rank synchronization in each check. + This method must be called on all ranks. + + Args: + blocking (bool, optional): if True, will wait until the call is done + on all ranks. Otherwise, returns immediately if at least one rank + is still active. Defaults to False. + no_dist (bool, Optional): if True, training ranks simply check its + asynchronous checkpoint writer without synchronization. + + Returns: + bool: True if all ranks are done (immediately of after active wait + if `blocking` is True), False if at least one rank is still active. + + """ + raise NotImplementedError("This should be implemented") + + def sync_all_async_calls(self, is_alive: int) -> bool: + """Check if all ranks have completed async checkpoint writing + + Args: + is_alive (bool): if True, the current async request is not completed + + Returns: + bool: True if all ranks are done, False if at least one rank is still active. + + """ + ten = torch.tensor([is_alive], dtype=torch.int, device=torch.cuda.current_device()) + torch.distributed.all_reduce(ten) + return ten[0] == 0 + + @abstractmethod + def close(self): + """Terminate the async caller at exit of an application or some termination conditions""" + logger.info(f"AsyncCaller: {torch.distributed.get_rank()}, Destroying Async Caller") + + def __del__(self): + self.close() + + +class TemporalAsyncCaller(AsyncCaller): + """Wrapper around mp.Process that ensures correct semantic of distributed finalization. + + Starts process asynchronously and allows checking if all processes on all ranks are done. + """ + + def __init__(self): + self.process: Optional[mp.Process] = None + self.start_time: Optional[float] = None + + @_disable_gc() + def schedule_async_call(self, async_req: AsyncRequest) -> None: + """Spawn a process with `async_fn` as the target. + + This method must be called on all ranks. + + Args: + async_fn (Callable, optional): async function to call. If None, + no process will be started. + async_req (AsyncRequest): `AsyncRequest` object containing to + start async process + """ + if async_req.async_fn is None: + return # nothing to do + + async_fn_args = list(async_req.async_fn_args) + if async_req.preload_fn: + # If there's a preload_fn in `async_req`, we call this func + # to do the defined action in `async_req.preload_fn` to + # stage GPU tensors to its defined destination + async_fn_args[1] = async_req.preload_fn() + + rank = torch.distributed.get_rank() + start_sync = time() + torch.cuda.synchronize() + end_sync = time() + logger.debug(f"rank: {rank}, takes {end_sync - start_sync} to finish D2H ") + + ctx = mp.get_context('fork') + self.start_time = time() + self.process = ctx.Process( + target=async_req.async_fn, args=async_fn_args, kwargs=async_req.async_fn_kwargs + ) + self.process.start() + init_time = time() + logger.debug(f"rank: {rank}, takes {init_time - self.start_time} to schedule async ckpt ") + + def is_current_async_call_done(self, blocking: bool = False, no_dist: bool = False) -> bool: + """Check if async save is finished on all ranks. + + For semantic correctness, requires rank synchronization in each check. + This method must be called on all ranks. + + Args: + blocking (bool, optional): if True, will wait until the call is done + on all ranks. Otherwise, returns immediately if at least one rank + is still active. Defaults to False. + no_dist (bool, Optional): if True, training ranks simply check its + asynchronous checkpoint writer without synchronization. + + Returns: + bool: True if all ranks are done (immediately of after active wait + if `blocking` is True), False if at least one rank is still active. + """ + # The following takes the same overhead + # as torch.distributed.barrier (single integer all-reduce) + is_alive = int(self.process.is_alive()) if self.process is not None else 0 + is_done = not is_alive if no_dist else self.sync_all_async_calls(is_alive) + + if not is_done and blocking: + self.close() + is_done = True + return is_done + + def close(self): + if self.process: + logger.debug(f"rank: {torch.distributed.get_rank()}, joining self.process") + self.process.join() + self.process = None + logger.debug( + "TemporalAsyncCaller: Async process join finished " + f"after {time() - self.start_time:.2f}s from forking" + ) + self.start_time = None + + +class PersistentAsyncCaller(AsyncCaller): + """Wrapper around mp.Process that ensures correct semantic of distributed finalization. + + Starts process asynchronously and allows checking if all processes on all ranks are done. + """ + + def __init__(self): + self.process: mp.Process = None + self.start_time: Optional[float] = None + ctx = mp.get_context('spawn') + # main queue to deliver `AsyncRequest` from host to the ckpt worker + self.queue: mp.JoinableQueue = ctx.JoinableQueue() + # Queue used to synchronize for the completion of preloading tensors to host + # between a trainer and ckpt worker + self.preload_q: mp.JoinableQueue = ctx.JoinableQueue() + # Queue used to inform trainer when the saving is completed + self.comp_q: mp.Queue = ctx.Queue() + self.cur_item: int = None + self.cur_idx: int = -1 + + def schedule_async_call(self, async_req: AsyncRequest) -> None: + """Put `AsyncRequest` to the Persistent Async Caller + + This method must be called on all ranks. + + Args: + async_fn (Callable, optional): async function to call. If None, + no process will be started. + async_req (AsyncRequest): `AsyncRequest` object containing to + schedule a checkpointing request + """ + if async_req.async_fn is None: + return # nothing to do + + start_sync = end_sync = None + + self.start_time = time() + if self.process is None: + ctx = mp.get_context('spawn') + logger.info( + f"PersistentAsyncCaller: {torch.distributed.get_rank()}, Starting Async Caller" + ) + self.process: mp.Process = ctx.Process( + target=PersistentAsyncCaller.async_loop, + args=( + torch.distributed.get_rank(), + self.queue, + self.preload_q, + self.comp_q, + logger.getEffectiveLevel(), + ), + ) + self.process.start() + logger.info( + f"PersistentAsyncCaller: {torch.distributed.get_rank()}, Started Async Caller" + ) + + if async_req.preload_fn: + self.preload_q.put(async_req.call_idx) + self.queue.put(async_req) + logger.debug(f"rank: {torch.distributed.get_rank()}, put {async_req.call_idx}") + + if async_req.preload_fn: + start_sync = time() + # Synchronize for pre-staging tensors + self.preload_q.join() + end_sync = time() + logger.debug( + f"rank: {torch.distributed.get_rank()}, " + f"takes {end_sync - start_sync} to finish D2H " + ) + + init_time = time() + logger.debug( + f"rank: {torch.distributed.get_rank()}, takes {init_time - self.start_time} " + "to schedule async ckpt " + ) + + def is_current_async_call_done(self, blocking: bool = False, no_dist: bool = False) -> bool: + """Check if async save is finished on all ranks. + + For semantic correctness, requires rank synchronization in each check. + This method must be called on all ranks. + + Args: + blocking (bool, optional): if True, will wait until the call is done + on all ranks. Otherwise, returns immediately if at least one rank + is still active. Defaults to False. + no_dist (bool, Optional): if True, training ranks simply check its + asynchronous checkpoint writer without synchronization. + + Returns: + bool: True if all ranks are done (immediately of after active wait + if `blocking` is True), False if at least one rank is still active. + """ + + is_alive: bool = False + + if self.process: + while self.cur_item is None: + try: + # Retrieve comp call_idx without waiting + self.cur_item = self.comp_q.get_nowait() + except Empty: + # This method is called after any `AsyncRequest` is pushed to the main loop + # So, the background writing is still active + # before the worker put call_idx to `comp_q` + if not blocking: + is_alive = True + break + sleep(0.1) + + if self.cur_item is not None: + logger.debug( + f"rank: {torch.distributed.get_rank()}, item: {self.cur_item}" + f" is completed, {is_alive}" + ) + + is_done = not is_alive if no_dist else self.sync_all_async_calls(is_alive) + # This is set to False when blocking == False so this routine is called again + # to simply call `sync_all_async_calls` to check if other ranks complete the writing + if is_done: + # The current request is completed globally. Reset the current item for polling. + logger.debug( + f"rank: {torch.distributed.get_rank()}, item: {self.cur_item}" + f" is completed globally, {is_done}" + ) + self.cur_item = None + + return is_done + + def close(self): + logger.info( + f"PersistentAsyncCaller: {torch.distributed.get_rank()}, Destroying Async Caller" + ) + if self.process: + self.queue.put('DONE') + self.queue.join() + self.process.join() + self.process = None + + @staticmethod + @_disable_gc() + def async_loop( + rank: int, + queue: mp.JoinableQueue, + preload_q: mp.JoinableQueue, + comp_q: mp.Queue, + log_level: int = logging.INFO, + ): + """Main function for the persistent checkpoint worker + + The persisent worker is created once and terminated at exit or + when application calls `close()` explictily + + This routine receives `AsyncRequest` and does `preload_fn` first and + put the integer value in `preload_q` to inform the trainer to proceed. + When the `async_fn` from the request` is completed (background saving is done), + it puts a integer value to `comp_q` to notify the trainer the completion. + + Args: + rank (int): the rank of the trainer where the persistent worker is created. + queue (mp.JoinableQueue): the main queue used to receive `AsyncRequest + from the training rank + preload_q (mp.JoinableQueue): a queue to inform trainer that preloading of tensors + from GPU to Host or dedicated location is completed + comp_q (mp.Queue): a queue to inform the training rank the completion of scheduled + async checkpoint request + log_level (int, Optional): an integer to set log-level in this spawned process + to get aligned with the training rank's logging level + + """ + logger = logging.getLogger(__name__) + logger.setLevel(log_level) + logger.info(f"PersistentAsyncCaller: persistent ckpt worker for {rank} has started") + while True: + item = queue.get() + if isinstance(item, str) and item == 'DONE': + queue.task_done() + break + elif isinstance(item, AsyncRequest): + async_fn_args = list(item.async_fn_args) + if item.preload_fn: + call_idx = preload_q.get() + # the 2nd arg is state dict + async_fn_args[1] = item.preload_fn() + logger.debug(f"{rank} has completed D2H of {call_idx}") + preload_q.task_done() + item.async_fn(*async_fn_args, **item.async_fn_kwargs) + logger.debug(f"{rank} has completed saving {item.call_idx}") + comp_q.put(item.call_idx) + queue.task_done() + + logger.info(f"PersistentAsyncCaller: persistent ckpt worker for {rank} has terminated") + + +class _ActiveAsyncRequest(NamedTuple): + """Helper to represent an active async call. + + Args: + idx (int): index of the call (starting from 0) + async_caller (DistributedAsyncCaller): async caller instance that represents + the async process handling the async request + async_request (AsyncRequest): async request that is being called + """ + + idx: int + async_caller: AsyncCaller + async_request: AsyncRequest + + +class AsyncCallsQueue: + """Manages a queue of async calls. + + Allows adding a new async call with `schedule_async_request` and finalizing + active calls with `maybe_finalize_async_calls`. + """ + + def __init__(self, persistent: bool = False): + self.async_calls: deque[_ActiveAsyncRequest] = deque([]) + self.call_idx: int = -1 + self.persistent: bool = persistent + self.persistent_caller: AsyncCaller = None + + def _get_async_caller(self): + if not self.persistent: + return TemporalAsyncCaller() + if self.persistent_caller is None: + self.persistent_caller = PersistentAsyncCaller() + return self.persistent_caller + + def schedule_async_request(self, async_request: AsyncRequest) -> int: + """Start a new async call and add it to a queue of active async calls. + + This method must be called on all ranks. + + Args: + async_request (AsyncRequest): async request to start. + + Returns: + int: index of the async call that was started. + This can help the user keep track of the async calls. + """ + self.call_idx += 1 + async_caller = self._get_async_caller() + # Backward compatibility for local checkpointing built with the old AsyncRequest + if len(async_request._fields) != len(AsyncRequest._fields): + async_request = AsyncRequest(**async_request._asdict()) + + async_request = async_request._replace(call_idx=self.call_idx) + finalize_fns = async_request.finalize_fns + async_request = async_request._replace(finalize_fns=None) + async_request = async_request.freeze() + async_caller.schedule_async_call(async_request) + self.async_calls.append(_ActiveAsyncRequest(self.call_idx, async_caller, finalize_fns)) + return self.call_idx + + def maybe_finalize_async_calls(self, blocking=False, no_dist=False) -> List[int]: + """Finalizes all available calls. + + This method must be called on all ranks. + + Args: + blocking (bool, optional): if True, will wait until all active requests + are done. Otherwise, finalizes only the async request that already + finished. Defaults to False. + Returns: + List[int]: list of indices (as returned by `schedule_async_request`) + of async calls that have been successfully finalized. + """ + call_idx_finalized = [] + while self.async_calls: + next_async_done = self.async_calls[0].async_caller.is_current_async_call_done( + blocking, no_dist + ) + if not next_async_done: + break + with debug_time("finalize", logger): + call_idx, _, finalize_fns = self.async_calls.popleft() + ten = torch.tensor([call_idx], dtype=torch.int, device=torch.cuda.current_device()) + torch.distributed.all_reduce(ten, op=torch.distributed.ReduceOp.MAX) + assert ten.item() == call_idx, 'Unmatched async calls. ' + 'That probably means not all ranks are participating in async finalization' + for finalize_fn in finalize_fns: + finalize_fn() + call_idx_finalized.append(call_idx) + return call_idx_finalized + + def get_num_unfinalized_calls(self): + """Get the number of active async calls.""" + return len(self.async_calls) + + def close(self): + """Finalize all calls upon closing.""" + self.maybe_finalize_async_calls(blocking=True) + if self.persistent and self.persistent_caller: + self.persistent_caller.close() diff --git a/megatron/core/dist_checkpointing/strategies/base.py b/megatron/core/dist_checkpointing/strategies/base.py index cdcdd49..7409a6a 100644 --- a/megatron/core/dist_checkpointing/strategies/base.py +++ b/megatron/core/dist_checkpointing/strategies/base.py @@ -1,227 +1,228 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. - -""" Strategies base interfaces. """ - -from abc import ABC, abstractmethod -from collections import defaultdict -from enum import Enum -from pathlib import Path -from typing import Any, DefaultDict, Union - -from ..mapping import CheckpointingException, ShardedStateDict, StateDict -from .async_utils import AsyncCallsQueue, AsyncRequest - - -class StrategyAction(Enum): - """Specifies save vs load and sharded vs common action.""" - - LOAD_COMMON = 'load_common' - LOAD_SHARDED = 'load_sharded' - SAVE_COMMON = 'save_common' - SAVE_SHARDED = 'save_sharded' - - -default_strategies: DefaultDict[str, dict[tuple, Any]] = defaultdict(dict) - -async_calls = AsyncCallsQueue() - - -def get_default_strategy(action: StrategyAction, backend: str, version: int): - """Retrieves a default strategy for a given action, backend and version.""" - try: - if backend == 'zarr': - error_hint = ' Please install `zarr` and `tensorstore<=0.1.45` packages' - from .tensorstore import register_default_tensorstore_strategies - - register_default_tensorstore_strategies() - from .zarr import register_default_zarr_strategies - - register_default_zarr_strategies() - elif backend == 'torch_dist': - error_hint = ' Please use PyTorch version >=2.1' - from .torch import register_default_torch_strategies - - register_default_torch_strategies() - except ImportError as e: - raise CheckpointingException( - f'Cannot import a default strategy for: {(action.value, backend, version)}. ' - f'Error: {e}. Hint: {error_hint}' - ) from e - try: - return default_strategies[action.value][(backend, version)] - except KeyError as e: - raise CheckpointingException( - f'Cannot find a default strategy for: {(action.value, backend, version)}' - ) from e - - -def register_default_strategy( - action: StrategyAction, - backend: str, - version: int, - strategy: Union['SaveStrategyBase', 'LoadStrategyBase'], -): - """Adds a given strategy to the registry of default strategies. - - Args: - action (StrategyAction): specifies save/load and sharded/common - backend (str): backend that the strategy becomes a default for - version (int): version that the strategy becomes a default for - strategy (SaveStrategyBase, LoadStrategyBase): strategy to register - """ - default_strategies[action.value][(backend, version)] = strategy - - -class LoadStrategyBase(ABC): - """Base class for a load strategy. Requires implementing checks for compatibility with a - given checkpoint version.""" - - @abstractmethod - def check_backend_compatibility(self, loaded_backend): - """Verifies if this strategy is compatible with `loaded_backend`.""" - raise NotImplementedError - - @abstractmethod - def check_version_compatibility(self, loaded_version): - """Verifies if this strategy is compatible with `loaded_version`.""" - raise NotImplementedError - - @property - def can_handle_sharded_objects(self): - """Returns whether or not this strategy can handle loading ShardedObjects.""" - return False - - -class SaveStrategyBase(ABC): - """Base class for a save strategy. Requires defining a backend type and - version of the saved format.""" - - def __init__(self, backend: str, version: int): - self.backend = backend - self.version = version - - @property - def can_handle_sharded_objects(self): - """Returns whether or not this strategy can handle saving ShardedObjects.""" - return False - - def __str__(self): - return f'{self.__class__.__name__}({self.backend}, {self.version})' - - -class LoadCommonStrategy(LoadStrategyBase): - """Load strategy for common (non-sharded) objects""" - - @abstractmethod - def load_common(self, checkpoint_dir: Path): - """Load common part of the checkpoint.""" - raise NotImplementedError - - @abstractmethod - def load_sharded_objects( - self, sharded_objects_state_dict: ShardedStateDict, checkpoint_dir: Path - ): - """Load sharded objects from the checkpoint.""" - raise NotImplementedError - - def load_sharded_metadata(self, checkpoint_dir: Path) -> ShardedStateDict: - """Load just the metadata from the checkpoint.""" - if not self.can_handle_sharded_objects: - return {} - raise NotImplementedError - - -class LoadShardedStrategy(LoadStrategyBase): - """Load strategy for sharded tensors""" - - @abstractmethod - def load(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path): - """Load the sharded part of the checkpoint.""" - raise NotImplementedError - - @abstractmethod - def load_tensors_metadata(self, checkpoint_dir: Path): - """Load tensors metadata from the checkpoint for ShardedTensors. - - Returns a dictionary similar to a sharded state dict, but note that - the dictionary keys are simply ShardedTensor keys (contrary to the - actual sharded state dicts where keys correspond to state dict keys). - - Dict values are ShardedTensors without any data and sharding (so, the - only useful information is tensors global shape and dtype). - """ - raise NotImplementedError( - f'Loading only tensors metadata not implemented for {self.__class__.__name__}' - ) - - def load_sharded_metadata(self, checkpoint_dir: Path): - """Load sharded metadata from the checkpoint for ShardedTensors and ShardedObjects. - - Returns a dictionary similar to a sharded state dict, but note that - the dictionary keys are simply sharded keys (contrary to the - actual sharded state dicts where keys correspond to state dict keys). - - Dict values are ShardedTensors or ShardedObjects without any data and sharding. - """ - if not self.can_handle_sharded_objects: - return self.load_tensors_metadata(checkpoint_dir) - raise NotImplementedError( - f'Loading only sharded metadata not implemented for {self.__class__.__name__}' - ) - - def remove_sharded_tensors(self, checkpoint_dir: str, key_prefix: str): - """Remove all tensors whose key starts with key_prefix""" - raise NotImplementedError - - -class SaveCommonStrategy(SaveStrategyBase): - """Save strategy for common (non-sharded) objects""" - - @abstractmethod - def save_common(self, common_state_dict: StateDict, checkpoint_dir: Path): - """Save common part of the state dict.""" - raise NotImplementedError - - def save_sharded_objects( - self, sharded_objects_state_dict: ShardedStateDict, checkpoint_dir: Path - ): - """Save sharded objects from the state dict.""" - raise NotImplementedError - - -class SaveShardedStrategy(SaveStrategyBase): - """Save strategy for sharded tensors""" - - @abstractmethod - def save(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path): - """Save the sharded part of the state dict.""" - raise NotImplementedError - - -class AsyncSaveShardedStrategy(SaveShardedStrategy): - """Save strategy suitable for async save.""" - - @abstractmethod - def async_save( - self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path - ) -> AsyncRequest: - """Perform preparation and return an AsyncRequest to the external caller. - - Args: - sharded_state_dict (ShardedStateDict): sharded state dict to save - checkpoint_dir (Path): checkpoint target directory - - Returns: - AsyncRequest: represents the async save function and finalization function. - It is the caller responsibility to actually schedule the async save. - """ - raise NotImplementedError - - def save(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path): - """Each async strategy can be trivially used as a sync strategy.""" - async_request = self.async_save(sharded_state_dict, checkpoint_dir) - # multiprocessing routines may cause issue when called on parent process - # We keep this verbose call for now - global async_calls - async_calls.schedule_async_request(async_request) - async_calls.maybe_finalize_async_calls(blocking=True) +# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. + +""" Strategies base interfaces. """ + +from abc import ABC, abstractmethod +from collections import defaultdict +from enum import Enum +from pathlib import Path +from typing import Any, DefaultDict, Union + +from ..mapping import CheckpointingException, ShardedStateDict, StateDict +from .async_utils import AsyncCallsQueue, AsyncRequest + + +class StrategyAction(Enum): + """Specifies save vs load and sharded vs common action.""" + + LOAD_COMMON = 'load_common' + LOAD_SHARDED = 'load_sharded' + SAVE_COMMON = 'save_common' + SAVE_SHARDED = 'save_sharded' + + +default_strategies: DefaultDict[str, dict[tuple, Any]] = defaultdict(dict) + +async_calls = AsyncCallsQueue() + + +def get_default_strategy(action: StrategyAction, backend: str, version: int): + """Retrieves a default strategy for a given action, backend and version.""" + error_hint: str = None + try: + if backend == 'zarr': + error_hint = ' Please install `zarr` and `tensorstore!=0.1.46` packages' + from .tensorstore import register_default_tensorstore_strategies + + register_default_tensorstore_strategies() + from .zarr import register_default_zarr_strategies + + register_default_zarr_strategies() + elif backend == 'torch_dist': + error_hint = ' Please use PyTorch version >=2.1' + from .torch import register_default_torch_strategies + + register_default_torch_strategies() + except ImportError as e: + raise CheckpointingException( + f'Cannot import a default strategy for: {(action.value, backend, version)}. ' + f'Error: {e}. Hint: {error_hint}' + ) from e + try: + return default_strategies[action.value][(backend, version)] + except KeyError as e: + raise CheckpointingException( + f'Cannot find a default strategy for: {(action.value, backend, version)}' + ) from e + + +def register_default_strategy( + action: StrategyAction, + backend: str, + version: int, + strategy: Union['SaveStrategyBase', 'LoadStrategyBase'], +): + """Adds a given strategy to the registry of default strategies. + + Args: + action (StrategyAction): specifies save/load and sharded/common + backend (str): backend that the strategy becomes a default for + version (int): version that the strategy becomes a default for + strategy (SaveStrategyBase, LoadStrategyBase): strategy to register + """ + default_strategies[action.value][(backend, version)] = strategy + + +class LoadStrategyBase(ABC): + """Base class for a load strategy. Requires implementing checks for compatibility with a + given checkpoint version.""" + + @abstractmethod + def check_backend_compatibility(self, loaded_backend): + """Verifies if this strategy is compatible with `loaded_backend`.""" + raise NotImplementedError + + @abstractmethod + def check_version_compatibility(self, loaded_version): + """Verifies if this strategy is compatible with `loaded_version`.""" + raise NotImplementedError + + @property + def can_handle_sharded_objects(self): + """Returns whether or not this strategy can handle loading ShardedObjects.""" + return False + + +class SaveStrategyBase(ABC): + """Base class for a save strategy. Requires defining a backend type and + version of the saved format.""" + + def __init__(self, backend: str, version: int): + self.backend = backend + self.version = version + + @property + def can_handle_sharded_objects(self): + """Returns whether or not this strategy can handle saving ShardedObjects.""" + return False + + def __str__(self): + return f'{self.__class__.__name__}({self.backend}, {self.version})' + + +class LoadCommonStrategy(LoadStrategyBase): + """Load strategy for common (non-sharded) objects""" + + @abstractmethod + def load_common(self, checkpoint_dir: Path): + """Load common part of the checkpoint.""" + raise NotImplementedError + + @abstractmethod + def load_sharded_objects( + self, sharded_objects_state_dict: ShardedStateDict, checkpoint_dir: Path + ): + """Load sharded objects from the checkpoint.""" + raise NotImplementedError + + def load_sharded_metadata(self, checkpoint_dir: Path) -> ShardedStateDict: + """Load just the metadata from the checkpoint.""" + if not self.can_handle_sharded_objects: + return {} + raise NotImplementedError + + +class LoadShardedStrategy(LoadStrategyBase): + """Load strategy for sharded tensors""" + + @abstractmethod + def load(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path): + """Load the sharded part of the checkpoint.""" + raise NotImplementedError + + @abstractmethod + def load_tensors_metadata(self, checkpoint_dir: Path): + """Load tensors metadata from the checkpoint for ShardedTensors. + + Returns a dictionary similar to a sharded state dict, but note that + the dictionary keys are simply ShardedTensor keys (contrary to the + actual sharded state dicts where keys correspond to state dict keys). + + Dict values are ShardedTensors without any data and sharding (so, the + only useful information is tensors global shape and dtype). + """ + raise NotImplementedError( + f'Loading only tensors metadata not implemented for {self.__class__.__name__}' + ) + + def load_sharded_metadata(self, checkpoint_dir: Path): + """Load sharded metadata from the checkpoint for ShardedTensors and ShardedObjects. + + Returns a dictionary similar to a sharded state dict, but note that + the dictionary keys are simply sharded keys (contrary to the + actual sharded state dicts where keys correspond to state dict keys). + + Dict values are ShardedTensors or ShardedObjects without any data and sharding. + """ + if not self.can_handle_sharded_objects: + return self.load_tensors_metadata(checkpoint_dir) + raise NotImplementedError( + f'Loading only sharded metadata not implemented for {self.__class__.__name__}' + ) + + def remove_sharded_tensors(self, checkpoint_dir: str, key_prefix: str): + """Remove all tensors whose key starts with key_prefix""" + raise NotImplementedError + + +class SaveCommonStrategy(SaveStrategyBase): + """Save strategy for common (non-sharded) objects""" + + @abstractmethod + def save_common(self, common_state_dict: StateDict, checkpoint_dir: Path): + """Save common part of the state dict.""" + raise NotImplementedError + + def save_sharded_objects( + self, sharded_objects_state_dict: ShardedStateDict, checkpoint_dir: Path + ): + """Save sharded objects from the state dict.""" + raise NotImplementedError + + +class SaveShardedStrategy(SaveStrategyBase): + """Save strategy for sharded tensors""" + + @abstractmethod + def save(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path): + """Save the sharded part of the state dict.""" + raise NotImplementedError + + +class AsyncSaveShardedStrategy(SaveShardedStrategy): + """Save strategy suitable for async save.""" + + @abstractmethod + def async_save( + self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path + ) -> AsyncRequest: + """Perform preparation and return an AsyncRequest to the external caller. + + Args: + sharded_state_dict (ShardedStateDict): sharded state dict to save + checkpoint_dir (Path): checkpoint target directory + + Returns: + AsyncRequest: represents the async save function and finalization function. + It is the caller responsibility to actually schedule the async save. + """ + raise NotImplementedError + + def save(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path): + """Each async strategy can be trivially used as a sync strategy.""" + async_request = self.async_save(sharded_state_dict, checkpoint_dir) + # multiprocessing routines may cause issue when called on parent process + # We keep this verbose call for now + global async_calls + async_calls.schedule_async_request(async_request) + async_calls.maybe_finalize_async_calls(blocking=True) diff --git a/megatron/core/dist_checkpointing/strategies/cached_metadata_filesystem_reader.py b/megatron/core/dist_checkpointing/strategies/cached_metadata_filesystem_reader.py new file mode 100644 index 0000000..a6fc2c5 --- /dev/null +++ b/megatron/core/dist_checkpointing/strategies/cached_metadata_filesystem_reader.py @@ -0,0 +1,38 @@ +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +""" FS Reader with metadata cached support. """ + +import os +from typing import Union + +from torch.distributed.checkpoint import FileSystemReader, Metadata + + +class CachedMetadataFileSystemReader(FileSystemReader): + """ + Extends FileSystemReader to cache metadata for improved performance. + + Attributes: + _cached_metadata (Metadata or None): Cached metadata from the file system. + """ + + def __init__(self, path: Union[str, os.PathLike]) -> None: + """ + Initialize with file system path. + + Args: + path (Union[str, os.PathLike]): Path to the checkpoint directory or file. + """ + super().__init__(path=path) + self._cached_metadata = None + + def read_metadata(self) -> Metadata: + """ + Read metadata from file system, caching for subsequent calls. + + Returns: + Metadata: Checkpoint metadata. + """ + if self._cached_metadata is None: + self._cached_metadata = super().read_metadata() + return self._cached_metadata diff --git a/megatron/core/dist_checkpointing/strategies/common.py b/megatron/core/dist_checkpointing/strategies/common.py index f2c87b4..8e51b5c 100644 --- a/megatron/core/dist_checkpointing/strategies/common.py +++ b/megatron/core/dist_checkpointing/strategies/common.py @@ -1,157 +1,157 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -""" Common strategies. """ - -import logging -import os -from pathlib import Path - -import torch - -from megatron.core.dist_checkpointing.mapping import ShardedStateDict, StateDict -from megatron.core.dist_checkpointing.strategies.base import ( - SaveCommonStrategy, - StrategyAction, - register_default_strategy, -) - -from ..dict_utils import dict_list_map_inplace, nested_values -from ..mapping import CheckpointingException, ShardedObject, is_main_replica -from ..strategies.base import LoadCommonStrategy - -COMMON_STATE_FNAME = 'common.pt' - -logger = logging.getLogger(__name__) - - -def register_default_common_strategies(): - """Register default common strategies.""" - register_default_strategy(StrategyAction.LOAD_COMMON, 'torch', 1, TorchCommonLoadStrategy()) - register_default_strategy( - StrategyAction.SAVE_COMMON, 'torch', 1, TorchCommonSaveStrategy('torch', 1) - ) - - -class TorchCommonSaveStrategy(SaveCommonStrategy): - """Common save strategy leveraging native torch save/load.""" - - def save_common(self, common_state_dict: StateDict, checkpoint_dir: Path): - """Save common part of the state dict.""" - if torch.distributed.get_rank() == 0: - torch.save(common_state_dict, checkpoint_dir / COMMON_STATE_FNAME) - - def save_sharded_objects( - self, sharded_objects_state_dict: ShardedStateDict, checkpoint_dir: Path - ): - """Save sharded objects from the state dict.""" - for sh_obj in nested_values(sharded_objects_state_dict): - if is_main_replica(sh_obj.replica_id): - save_path = checkpoint_dir / f'{sh_obj.unique_key}.pt' - os.makedirs(save_path.parent, exist_ok=True) - torch.save(sh_obj.data, save_path) - - def can_handle_sharded_objects(self): - """This strategy can handle ShardedObjects.""" - return True - - -class TorchCommonLoadStrategy(LoadCommonStrategy): - """Common load strategy leveraging native torch save/load.""" - - def load_common(self, checkpoint_dir: Path): - """Load common (non-sharded) objects state dict from the checkpoint. - - Args: - checkpoint_dir (Path): checkpoint directory - - Returns: - StateDict: state dict with non-sharded objects from the checkpoint - """ - load_path = Path(checkpoint_dir) / COMMON_STATE_FNAME - try: - return torch.load(load_path, map_location='cpu') - except FileNotFoundError as e: - err_msg = f'Common file {load_path} does not exist' - ckpt_files = [f.name for f in checkpoint_dir.iterdir()] - logger.debug(f'{err_msg}. Checkpoint directory content: {ckpt_files}') - raise CheckpointingException(err_msg) from e - - def load_sharded_objects( - self, sharded_objects_state_dict: ShardedStateDict, checkpoint_dir: Path - ): - """Replaces all ShardedObject from a given state dict with values loaded from the - checkpoint. - - Args: - sharded_objects_state_dict (ShardedStateDict): - sharded state dict defining what objects should be loaded. - checkpoint_dir (Path): checkpoint directory - - Returns: - None: sharded state dict is modified in place - """ - - def load_sharded_object(sh_obj: ShardedObject): - sh_obj.data = None - load_path = checkpoint_dir / f'{sh_obj.unique_key}.pt' - try: - loaded_obj = torch.load(load_path) - except FileNotFoundError as e: - # Backward compatible logic: previously the save format was incorrect - old_load_path = (checkpoint_dir / sh_obj.unique_key).with_suffix('.pt') - try: - loaded_obj = torch.load(old_load_path) - except FileNotFoundError: - err_msg = f'Object shard {load_path} not found' - obj_subdir = checkpoint_dir / sh_obj.key - if obj_subdir.exists(): - obj_files = [f.name for f in obj_subdir.iterdir()] - logger.debug( - f'{err_msg}. Object {sh_obj.key} directory content: {obj_files}' - ) - else: - ckpt_files = [f.name for f in checkpoint_dir.iterdir()] - logger.debug( - f'{err_msg}. Object {sh_obj.key} directory does not exist. Checkpoint' - f' directory content: {ckpt_files}' - ) - raise CheckpointingException(err_msg) from e - return loaded_obj - - return dict_list_map_inplace(load_sharded_object, sharded_objects_state_dict) - - def load_sharded_metadata(self, checkpoint_dir: Path) -> ShardedStateDict: - sharded_metadata = {} - for subdir in checkpoint_dir.iterdir(): - if not subdir.is_dir(): - continue - shard_files = list(subdir.glob('shard_*.pt')) - if not shard_files: - continue - sh_objs = [] - for shard_file in shard_files: - full_key = f'{subdir.name}/{shard_file.stem}' - sh_objs.append(ShardedObject.empty_from_unique_key(full_key)) - - # This is a backward-compatibility fix, where the last global shape is missing in the - # name - if sh_objs[0].global_shape[-1] < 0: - max_last_offset = max(map(lambda sh_obj: sh_obj.global_offset[-1], sh_objs)) - for sh_obj in sh_objs: - sh_obj.global_shape = (*sh_obj.global_shape[:-1], max_last_offset + 1) - - # Update the sharded state dict - for sh_obj in sh_objs: - sharded_metadata[sh_obj.unique_key] = sh_obj - return sharded_metadata - - @property - def can_handle_sharded_objects(self): - """This strategy can handle ShardedObjects.""" - return True - - def check_backend_compatibility(self, loaded_version): - pass - - def check_version_compatibility(self, loaded_version): - pass +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +""" Common strategies. """ + +import logging +import os +from pathlib import Path + +import torch + +from megatron.core.dist_checkpointing.mapping import ShardedStateDict, StateDict +from megatron.core.dist_checkpointing.strategies.base import ( + SaveCommonStrategy, + StrategyAction, + register_default_strategy, +) + +from ..dict_utils import dict_list_map_inplace, nested_values +from ..mapping import CheckpointingException, ShardedObject, is_main_replica +from ..strategies.base import LoadCommonStrategy + +COMMON_STATE_FNAME = 'common.pt' + +logger = logging.getLogger(__name__) + + +def register_default_common_strategies(): + """Register default common strategies.""" + register_default_strategy(StrategyAction.LOAD_COMMON, 'torch', 1, TorchCommonLoadStrategy()) + register_default_strategy( + StrategyAction.SAVE_COMMON, 'torch', 1, TorchCommonSaveStrategy('torch', 1) + ) + + +class TorchCommonSaveStrategy(SaveCommonStrategy): + """Common save strategy leveraging native torch save/load.""" + + def save_common(self, common_state_dict: StateDict, checkpoint_dir: Path): + """Save common part of the state dict.""" + if torch.distributed.get_rank() == 0: + torch.save(common_state_dict, checkpoint_dir / COMMON_STATE_FNAME) + + def save_sharded_objects( + self, sharded_objects_state_dict: ShardedStateDict, checkpoint_dir: Path + ): + """Save sharded objects from the state dict.""" + for sh_obj in nested_values(sharded_objects_state_dict): + if is_main_replica(sh_obj.replica_id): + save_path = checkpoint_dir / f'{sh_obj.unique_key}.pt' + os.makedirs(save_path.parent, exist_ok=True) + torch.save(sh_obj.data, save_path) + + def can_handle_sharded_objects(self): + """This strategy can handle ShardedObjects.""" + return True + + +class TorchCommonLoadStrategy(LoadCommonStrategy): + """Common load strategy leveraging native torch save/load.""" + + def load_common(self, checkpoint_dir: Path): + """Load common (non-sharded) objects state dict from the checkpoint. + + Args: + checkpoint_dir (Path): checkpoint directory + + Returns: + StateDict: state dict with non-sharded objects from the checkpoint + """ + load_path = Path(checkpoint_dir) / COMMON_STATE_FNAME + try: + return torch.load(load_path, map_location='cpu', weights_only=False) + except FileNotFoundError as e: + err_msg = f'Common file {load_path} does not exist' + ckpt_files = [f.name for f in checkpoint_dir.iterdir()] + logger.debug(f'{err_msg}. Checkpoint directory content: {ckpt_files}') + raise CheckpointingException(err_msg) from e + + def load_sharded_objects( + self, sharded_objects_state_dict: ShardedStateDict, checkpoint_dir: Path + ): + """Replaces all ShardedObject from a given state dict with values loaded from the + checkpoint. + + Args: + sharded_objects_state_dict (ShardedStateDict): + sharded state dict defining what objects should be loaded. + checkpoint_dir (Path): checkpoint directory + + Returns: + None: sharded state dict is modified in place + """ + + def load_sharded_object(sh_obj: ShardedObject): + sh_obj.data = None + load_path = checkpoint_dir / f'{sh_obj.unique_key}.pt' + try: + loaded_obj = torch.load(load_path, weights_only=False) + except FileNotFoundError as e: + # Backward compatible logic: previously the save format was incorrect + old_load_path = (checkpoint_dir / sh_obj.unique_key).with_suffix('.pt') + try: + loaded_obj = torch.load(old_load_path, weights_only=False) + except FileNotFoundError: + err_msg = f'Object shard {load_path} not found' + obj_subdir = checkpoint_dir / sh_obj.key + if obj_subdir.exists(): + obj_files = [f.name for f in obj_subdir.iterdir()] + logger.debug( + f'{err_msg}. Object {sh_obj.key} directory content: {obj_files}' + ) + else: + ckpt_files = [f.name for f in checkpoint_dir.iterdir()] + logger.debug( + f'{err_msg}. Object {sh_obj.key} directory does not exist. Checkpoint' + f' directory content: {ckpt_files}' + ) + raise CheckpointingException(err_msg) from e + return loaded_obj + + return dict_list_map_inplace(load_sharded_object, sharded_objects_state_dict) + + def load_sharded_metadata(self, checkpoint_dir: Path) -> ShardedStateDict: + sharded_metadata = {} + for subdir in checkpoint_dir.iterdir(): + if not subdir.is_dir(): + continue + shard_files = list(subdir.glob('shard_*.pt')) + if not shard_files: + continue + sh_objs = [] + for shard_file in shard_files: + full_key = f'{subdir.name}/{shard_file.stem}' + sh_objs.append(ShardedObject.empty_from_unique_key(full_key)) + + # This is a backward-compatibility fix, where the last global shape is missing in the + # name + if sh_objs[0].global_shape[-1] < 0: + max_last_offset = max(map(lambda sh_obj: sh_obj.global_offset[-1], sh_objs)) + for sh_obj in sh_objs: + sh_obj.global_shape = (*sh_obj.global_shape[:-1], max_last_offset + 1) + + # Update the sharded state dict + for sh_obj in sh_objs: + sharded_metadata[sh_obj.unique_key] = sh_obj + return sharded_metadata + + @property + def can_handle_sharded_objects(self): + """This strategy can handle ShardedObjects.""" + return True + + def check_backend_compatibility(self, loaded_version): + pass + + def check_version_compatibility(self, loaded_version): + pass diff --git a/megatron/core/dist_checkpointing/strategies/filesystem_async.py b/megatron/core/dist_checkpointing/strategies/filesystem_async.py index 47ab4d1..ef7053b 100644 --- a/megatron/core/dist_checkpointing/strategies/filesystem_async.py +++ b/megatron/core/dist_checkpointing/strategies/filesystem_async.py @@ -1,439 +1,496 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -""" Storage writer for PyT Distributed format allowing asynchronous save. """ -import gc -import logging -import os -import queue -from contextlib import contextmanager -from itertools import chain -from pathlib import Path -from time import time -from typing import Callable, Dict, List, Optional, Tuple, Union - -import psutil -import torch -from torch import multiprocessing as mp -from torch.distributed.checkpoint import FileSystemWriter -from torch.distributed.checkpoint.filesystem import DEFAULT_SUFFIX, _StoragePrefix, _write_item -from torch.distributed.checkpoint.planner import SavePlan, SavePlanner, WriteItem, WriteItemType -from torch.distributed.checkpoint.storage import WriteResult -from torch.futures import Future - -logger = logging.getLogger(__name__) - -WriteBucket = Tuple[Path, str, Tuple[list, list]] # represents writes to a single file - -_results_queue = None - - -def _get_write_results_queue(): - global _results_queue - if _results_queue is None: - ctx = mp.get_context('spawn') - _results_queue = ctx.Manager().Queue() - return _results_queue - - -@contextmanager -def _disable_gc(): - """Temporarily disables GC.""" - gc_enabled = gc.isenabled() - try: - if gc_enabled: - gc.disable() - yield - finally: - if gc_enabled: - gc.enable() - - -class FileSystemWriterAsync(FileSystemWriter): - """ - Async-enabled implementation of FileSystemWriter using file IO. - - This class doesn't spawn the async process itself, relies on the external async mechanism. - - Flow: - 1. Call `write_data` - 2. Externally start async process with `get_save_function_and_args` function and args - 3. The async function to call is `writer_proxy_func` which calls - `write_preloaded_data` in multiple processes - - After saving is finalized on all ranks: - 4. Call `super().finish` with the results gathered in `self.writer_result` - - Note that step (3) above can also be called synchronously. - - Currently, it's assumed that a separate writer is created for each ckpt save - (intermediate state is stored as writer attributes). - """ - - def __init__(self, *args, separation_hint: Optional[str] = None, **kwargs): - super().__init__(*args, **kwargs) - if not self.single_file_per_rank: - raise NotImplementedError( - 'single_file_per_rank flag not supported for FileSystemWriterAsync' - ) - - # Intermediate state between preparation and finalization - self.write_buckets: Optional[List[WriteBucket]] = None - self.results_queue: Optional[mp.Queue] = None - self.separation_hint = separation_hint - - def prepare_write_data(self, plan: SavePlan, planner: SavePlanner) -> None: - """ - First stage of async saving. Copy data to CPU and plan the local saving. - - Args: - plan (SavePlan): save plan generated by the PyT Distributed compatible planner - planner (SavePlanner): save planner used to resolve the bytes and tensor data - - Returns: None, but stores the save plan in `self.write_buckets` - """ - storage_plan: _StoragePrefix = plan.storage_data - start = time() - logger.debug(f"thread_count: {self.thread_count}, time: {start}") - if self.separation_hint: - assert ( - self.thread_count > 1 - ), "thread_count must be at least 2 if separation_hint is provided" - bins = self.thread_count // 2 if self.separation_hint is not None else self.thread_count - item_buckets = _split_by_size_and_type(bins, plan.items, self.separation_hint) - logger.debug(f"bucket_prep, time: {time() - start}") - - start = time() - # move tensors from GPU to CPU before starting async writing - # We do D2H synchronously for now - file_count = 0 - - def gen_file(prefix=""): - nonlocal file_count - file_name = f"{prefix}{storage_plan.prefix}{file_count}{DEFAULT_SUFFIX}" - file_count += 1 - return file_name - - # Prepare bytes / tensor data in each bucket, which will be assigned to each writer process - self.write_buckets = [] - for group_name, group_buckets in _split_by_separation_hint( - item_buckets, self.separation_hint - ).items(): - for bucket in group_buckets: - bytes_data = [ - (item, planner.resolve_data(item)) - for item in bucket - if item.type == WriteItemType.BYTE_IO - ] - tensor_data = [ - (item, planner.resolve_data(item).detach().to("cpu", non_blocking=True)) - for item in bucket - if item.type != WriteItemType.BYTE_IO - ] - if len(bytes_data) > 0 or len(tensor_data) > 0: - file_name = gen_file(prefix=group_name) - self.write_buckets.append( - (self.path / file_name, file_name, (bytes_data, tensor_data)) - ) - - # Check if there is anything to write on this rank - if len(self.write_buckets) > 0: - assert len(self.write_buckets) <= self.thread_count, ( - len(self.write_buckets), - self.thread_count, - ) - self.results_queue = _get_write_results_queue() - else: - self.results_queue = None - end = time() - logger.debug(f"D2H and push, time: {end - start}") - - def get_save_function_and_args(self) -> Tuple[Optional[Callable], Tuple]: - """ - Get function that saves the data to storage along with its arguments. - Allows the external caller to apply the save function synchronously or asynchronously. - - Returns: None (if there is nothing to write on this rank) or a tuple of: - - the function that saves the data - - arguments to that function - """ - if not self.write_buckets: - return None, () - return (self.write_preloaded_data_multiproc, (self.write_buckets, self.results_queue)) - - @staticmethod - @_disable_gc() - def write_preloaded_data_multiproc( - write_buckets: List[WriteBucket], global_results_queue: mp.Queue - ) -> None: - """ - Performs saving data to storage with multiple processes. - - Starts predefined number of processes and uses 2 queues to make sure the results - are complete: - - local_results_queue - to send the actual results - - count_queue - small queue to mark worker as completed - - Using just one queue disallowed proper exception handling. - - This method is meant to be run in a forked subprocess. - Triggering GC during execution leads to CUDA errors - (cleaning up tensors owned by the parent process). - To prevent this, we disable the GC explicitly for this function with _disable_gc. - - Args: - write_buckets (List[WriteBucket]): write plan - global_results_queue (mp.Queue): mp.Queue to collect Dict[List[WriteResults]] - (or an Exception) from parallel write processes to the main training process - Returns: None - """ - w_start = time() - write_results_or_exc: Union[dict, Exception] = dict() - ctx = mp.get_context('fork') - local_results_queue = ctx.Queue() - count_queue = ctx.JoinableQueue() - p_list = [] - for i, write_bucket in enumerate(write_buckets): - try: - count_queue.put(i) - p_list.append( - ctx.Process( - target=FileSystemWriterAsync.write_preloaded_data, - args=(i, write_bucket, local_results_queue, count_queue, True), - ) - ) - except Exception as e: - err_msg = f'An error is caught while a proc {i} is created, error: {e}' - logger.error(err_msg) - write_results_or_exc = RuntimeError(err_msg) - - if not isinstance(write_results_or_exc, Exception): - for p in p_list: - p.start() - - logger.debug('FileSystemWriterAsync: collecting worker results...') - - # To make sure all nodes are completed - count_queue.join() - # At this point, all workers completed, so the queue should have exactly - # `len(write_buckets)` items - for proc_idx in range(len(write_buckets)): - try: - local_proc_idx, local_results_or_exc = local_results_queue.get() - except queue.Empty: - write_results_or_exc = RuntimeError( - f'Unexpected empty `local_results_queue`' - f' (got only {proc_idx}/{len(write_buckets)} items)' - ) - break - else: - if isinstance(local_results_or_exc, Exception): - err_msg = ( - f"Local process {local_proc_idx} encountered" - f" an error: {local_results_or_exc}" - ) - logger.error(err_msg) - write_results_or_exc = local_results_or_exc - break - else: - assert isinstance(local_results_or_exc, list), type(local_results_or_exc) - write_results_or_exc[local_proc_idx] = local_results_or_exc - p_list[local_proc_idx].join() - - logger.debug('FileSystemWriterAsync: collected worker results successfully') - - global_results_queue.put(write_results_or_exc) - - w_end = time() - logger.debug( - f"{w_end}, rank: {torch.distributed.get_rank()}," - f" write(sync,parallel): {w_end - w_start}" - ) - - @staticmethod - @_disable_gc() - def write_preloaded_data( - local_proc_idx: int, - write_bucket: WriteBucket, - results_queue: mp.SimpleQueue, - count_queue: mp.JoinableQueue, - use_fsync: bool, - ) -> None: - """ - Performs actual data saving to storage. - - Args: - local_proc_idx (int): index of a local process that performs writing - write_bucket (WriteBucket): data to write to storage - results_queue (mp.Queue): queue to return the write results - to the proxy checkpoint process. - count_queue (mp.JoinableQueue): queue to marks worker task as completed - use_fsync (bool): if True, calls os.fsync at the end of saving - - Returns: None, the write result are put into the `queue` - """ - mem_before = _process_memory() - - local_results = [] - try: - file_name, storage_key, (bytes_data, tensor_data) = write_bucket - with open(file_name, "wb") as stream: - for write_item, data in bytes_data: - local_results.append(_write_item(stream, data, write_item, storage_key)) - - for write_item, tensor in tensor_data: - assert tensor.is_cpu - local_results.append(_write_item(stream, tensor, write_item, storage_key)) - - if use_fsync: - os.fsync(stream.fileno()) - local_output = (local_proc_idx, local_results) - except Exception as e: - local_output = (local_proc_idx, e) - - results_queue.put(local_output) - # Signal this process is done. - count_queue.get() - count_queue.task_done() - - mem_after = _process_memory() - logger.debug( - f"{local_proc_idx} consumed: {mem_after - mem_before}," - f" before: {mem_before}, after: {mem_after}" - ) - - def write_data(self, plan: SavePlan, planner: SavePlanner) -> Future[List[WriteResult]]: - """Write all items from ``plan``.""" - raise NotImplementedError('write_data not implemented for FileSystemWriterAsync') - - def retrieve_write_results(self) -> List[WriteResult]: - """ - Turn the latest dict including write results from `self.results_queue` - into a single results lists. Includes error check. - - Returns (List[WriteResult]): the list of write results - from all local processes performing the save. - - """ - assert self.write_buckets is not None - - if self.results_queue is None: - write_results_or_exc = {} - else: - try: - write_results_or_exc = self.results_queue.get_nowait() - except queue.Empty: - raise RuntimeError(f'results_queue should not be empty') - - if isinstance(write_results_or_exc, Exception): - raise RuntimeError(f'Worker failure: {write_results_or_exc}') from write_results_or_exc - write_results: dict = write_results_or_exc - if len(write_results) != len(self.write_buckets): - raise RuntimeError( - f'Incomplete worker results (expected {len(self.write_buckets)},' - f' got {len(write_results)}. This probably indicates a worker failure.' - ) - return list(chain.from_iterable(write_results.values())) - - -def _split_by_size_and_type( - bins: int, items: List[WriteItem], separation_hint: Optional[str] = None -) -> List[List[WriteItem]]: - """ - Splits write items according to item size into close to uniform bins. - - Same as torch.distributed.checkpoint.filesystem._split_by_size_and_type, - but with a fixed _item_size function. - - Args: - bins (int): numbers of bins to split to - items (List[WriteItem]): list of write items - - Returns (List[List[WriteItem]]): write items split to bins - """ - if bins == 1: - return [items] - - bytes_items = [wi for wi in items if wi.type == WriteItemType.BYTE_IO] - tensor_items = [wi for wi in items if wi.type != WriteItemType.BYTE_IO] - - buckets: List[List[WriteItem]] = [[] for _ in range(bins)] - bucket_sizes = [0 for _ in range(bins)] - - tensor_items.sort(key=_item_size, reverse=True) - - # Assign bytes with a simple round-robin - for i, item in enumerate(bytes_items): - buckets[i % bins].append(item) - - # Then, assign tensors according to their sizes - for item in tensor_items: - # TODO replace with headq - idx = min(enumerate(bucket_sizes), key=lambda x: x[1])[0] - buckets[idx].append(item) - bucket_sizes[idx] += _item_size(item) - - return buckets - - -def _split_by_separation_hint( - buckets: List[List[WriteItem]], separation_hint: Optional[str] = None -) -> Dict[str, List[List[WriteItem]]]: - """ - Splits buckets into those whose keys begin with the separation_hint and those whose keys do not - - Args: - buckets (List[List[WriteItem]]): buckets to split - separation_hint (Optional[str]): optional prefix to split on - - Returns (Dict[str, List[List[WriteItem]]]): a dictionary - mapping the prefix to the relevant buckets - """ - bins = len(buckets) - buckets_with_separation_hint = {} - if separation_hint is not None: - buckets_default = [[] for _ in range(bins)] - buckets_hint = [[] for _ in range(bins)] - for i in range(bins): - for item in buckets[i]: - if item.index.fqn.startswith(separation_hint): - buckets_hint[i].append(item) - else: - buckets_default[i].append(item) - buckets_with_separation_hint[""] = buckets_default - buckets_with_separation_hint[separation_hint] = buckets_hint - else: - buckets_with_separation_hint[""] = buckets - return buckets_with_separation_hint - - -def _item_size(item: WriteItem) -> int: - """ - Calculates size (in bytes) of a single write item. - - Same as torch.distributed.checkpoint.filesystem._item_size, - but fixes computing chunk size (with item.tensor_data.chunk.sizes) - - Args: - item (WriteItem): write item to compute the size of - - Returns (int): size of an item in bytes - """ - size = 1 - assert item.tensor_data is not None - # can't use math.prod as PT needs to support older python - for s in item.tensor_data.chunk.sizes: - size *= s - - dtype = item.tensor_data.properties.dtype - return size * torch._utils._element_size(dtype) - - -def _process_memory() -> int: - """ - Get memory used by current process. - - Returns (int): memory used by current process - """ - process = psutil.Process(os.getpid()) - mem_info = process.memory_info() - return mem_info.rss +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +""" Storage writer for PyT Distributed format allowing asynchronous save. """ +import dataclasses +import logging +import os +import queue +from functools import partial +from heapq import heappop, heappush +from itertools import chain +from operator import itemgetter +from pathlib import Path +from time import time +from typing import Callable, Dict, List, Optional, Tuple, Union + +import psutil +import torch +from torch import multiprocessing as mp +from torch.distributed.checkpoint import FileSystemWriter +from torch.distributed.checkpoint.filesystem import DEFAULT_SUFFIX, _StoragePrefix, _write_item +from torch.distributed.checkpoint.planner import SavePlan, SavePlanner, WriteItem, WriteItemType +from torch.distributed.checkpoint.storage import WriteResult +from torch.futures import Future + +from .async_utils import _disable_gc + +logger = logging.getLogger(__name__) + +WriteBucket = Tuple[Path, str, Tuple[list, list]] # represents writes to a single file + +_results_queue = None + + +def _get_write_results_queue(): + global _results_queue + if _results_queue is None: + ctx = mp.get_context('spawn') + _results_queue = ctx.Manager().Queue() + return _results_queue + + +class FileSystemWriterAsync(FileSystemWriter): + """ + Async-enabled implementation of FileSystemWriter using file IO. + + This class doesn't spawn the async process itself, relies on the external async mechanism. + + Flow: + 1. Call `write_data` + 2. Externally start async process with `get_save_function_and_args` function and args + 3. The async function to call is `writer_proxy_func` which calls + `write_preloaded_data` in multiple processes + + After saving is finalized on all ranks: + 4. Call `super().finish` with the results gathered in `self.writer_result` + + Note that step (3) above can also be called synchronously. + + Currently, it's assumed that a separate writer is created for each ckpt save + (intermediate state is stored as writer attributes). + """ + + def __init__(self, *args, separation_hint: Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + if not self.single_file_per_rank: + raise NotImplementedError( + 'single_file_per_rank flag not supported for FileSystemWriterAsync' + ) + + self.can_run_decentralized_global_plan: bool = True + + # Intermediate state between preparation and finalization + self.write_buckets: Optional[List[WriteBucket]] = None + self.results_queue: Optional[mp.Queue] = None + self.separation_hint = separation_hint + + def prepare_write_data(self, plan: SavePlan, planner: SavePlanner) -> None: + """ + First stage of async saving. Copy data to CPU and plan the local saving. + + Args: + plan (SavePlan): save plan generated by the PyT Distributed compatible planner + planner (SavePlanner): save planner used to resolve the bytes and tensor data + + Returns: None, but stores the save plan in `self.write_buckets` + """ + storage_plan: _StoragePrefix = plan.storage_data + start = time() + logger.debug(f"thread_count: {self.thread_count}, time: {start}") + if self.separation_hint: + assert ( + self.thread_count > 1 + ), "thread_count must be at least 2 if separation_hint is provided" + bins = self.thread_count // 2 if self.separation_hint is not None else self.thread_count + item_buckets = _split_by_size_and_type(bins, plan.items) + logger.debug(f"bucket_prep, time: {time() - start}") + + start = time() + # move tensors from GPU to CPU before starting async writing + # We do D2H synchronously for now + file_count = 0 + + def gen_file(prefix=""): + nonlocal file_count + file_name = f"{prefix}{storage_plan.prefix}{file_count}{DEFAULT_SUFFIX}" + file_count += 1 + return file_name + + def _clone_if_needed(ten: torch.Tensor): + """Clone if we detect incontiguous storage for CPU tensors + + Makes sure we perform a `clone` only if we detect incontiguous storage, + so that we don't blow up host memory unnecessarily. + + TODO: For persistent worker, this work should be changed to move the cpu tensor + to shared_memory. + """ + ten = ten.detach() + if ten.device.type != "cpu": + # We do D2H later when the async_request is scheduled for both sync / async + # checkpointing + return ten + is_view = ten.untyped_storage().size() != ten.numel() * ten.itemsize + return ten.clone() if is_view else ten + + # Prepare bytes / tensor data in each bucket, which will be assigned to each writer process + self.write_buckets = [] + for group_name, group_buckets in _split_by_separation_hint( + item_buckets, self.separation_hint + ).items(): + for bucket in group_buckets: + bytes_data = [ + (item, planner.resolve_data(item)) + for item in bucket + if item.type == WriteItemType.BYTE_IO + ] + tensor_data = [ + (item, _clone_if_needed(planner.resolve_data(item))) + for item in bucket + if item.type != WriteItemType.BYTE_IO + ] + if len(bytes_data) > 0 or len(tensor_data) > 0: + file_name = gen_file(prefix=group_name) + self.write_buckets.append( + (self.path / file_name, file_name, (bytes_data, tensor_data)) + ) + + # Check if there is anything to write on this rank + if len(self.write_buckets) > 0: + assert len(self.write_buckets) <= self.thread_count, ( + len(self.write_buckets), + self.thread_count, + ) + self.results_queue = _get_write_results_queue() + else: + self.results_queue = None + end = time() + logger.debug(f"D2H and push, time: {end - start}") + + def get_save_function_and_args(self) -> Tuple[Optional[Callable], Optional[Callable], List]: + """ + Get function that saves the data to storage along with its arguments. + Allows the external caller to apply the save function synchronously or asynchronously. + + Returns: None (if there is nothing to write on this rank) or a tuple of: + 1) the function that saves the data. + 2) the function that stages the GPU tensors to a destination for async checkpointing. + This function should be self-contained. + 3) arguments to that function in 1). + """ + if not self.write_buckets: + return None, None, () + return ( + self.write_preloaded_data_multiproc, + partial(self.preload_tensors, self.write_buckets, True), + [torch.distributed.get_rank(), self.write_buckets, self.results_queue], + ) + + @staticmethod + def preload_tensors(write_buckets: List[WriteBucket], non_blocking=True) -> List[WriteBucket]: + """Preload tensors in state_dict to host memory through CPU memory + Args: + write_buckets(List): List of `WriteBucket`, + which includes what to be saved in a checkpoint + non_blocking (bool, optional): knob to enable pinned D2H memcpy. Default is True. + """ + result = [] + + for bucket in write_buckets: + file_name, storage_key, (bytes_data, tensor_data) = bucket + tensor_data = [ + (item, tensor.to("cpu", non_blocking=non_blocking)) for item, tensor in tensor_data + ] + result.append((file_name, storage_key, (bytes_data, tensor_data))) + if non_blocking: + torch.cuda.synchronize() + return result + + @staticmethod + @_disable_gc() + def write_preloaded_data_multiproc( + rank, write_buckets: List[WriteBucket], global_results_queue: mp.Queue + ) -> None: + """ + Performs saving data to storage with multiple processes. + + Starts predefined number of processes and uses 2 queues to make sure the results + are complete: + - local_results_queue - to send the actual results + - count_queue - small queue to mark worker as completed + + Using just one queue disallowed proper exception handling. + + This method is meant to be run in a forked subprocess. + Triggering GC during execution leads to CUDA errors + (cleaning up tensors owned by the parent process). + To prevent this, we disable the GC explicitly for this function with _disable_gc. + + Args: + write_buckets (List[WriteBucket]): write plan + global_results_queue (mp.Queue): mp.Queue to collect Dict[List[WriteResults]] + (or an Exception) from parallel write processes to the main training process + Returns: None + """ + logger = logging.getLogger(__name__) + w_start = time() + write_results_or_exc: Union[dict, Exception] = dict() + ctx = mp.get_context('fork') + local_results_queue = ctx.Queue() + count_queue = ctx.JoinableQueue() + p_list = [] + for i, write_bucket in enumerate(write_buckets): + try: + count_queue.put(i) + p_list.append( + ctx.Process( + target=FileSystemWriterAsync.write_preloaded_data, + args=(i, write_bucket, local_results_queue, count_queue, True), + ) + ) + except Exception as e: + err_msg = f'An error is caught while a proc {i} is created, error: {e}' + logger.error(err_msg) + write_results_or_exc = RuntimeError(err_msg) + + if not isinstance(write_results_or_exc, Exception): + for p in p_list: + p.start() + + logger.debug('FileSystemWriterAsync: collecting worker results...') + + # To make sure all nodes are completed + count_queue.join() + # At this point, all workers completed, so the queue should have exactly + # `len(write_buckets)` items + for proc_idx in range(len(write_buckets)): + try: + local_proc_idx, local_results_or_exc = local_results_queue.get() + except queue.Empty: + write_results_or_exc = RuntimeError( + f'Unexpected empty `local_results_queue`' + f' (got only {proc_idx}/{len(write_buckets)} items)' + ) + break + else: + if isinstance(local_results_or_exc, Exception): + err_msg = ( + f"Local process {local_proc_idx} encountered" + f" an error: {local_results_or_exc}" + ) + logger.error(err_msg) + write_results_or_exc = local_results_or_exc + break + assert isinstance(local_results_or_exc, list), type(local_results_or_exc) + write_results_or_exc[local_proc_idx] = local_results_or_exc + p_list[local_proc_idx].join() + + logger.debug('FileSystemWriterAsync: collected worker results successfully') + + global_results_queue.put(write_results_or_exc) + + w_end = time() + logger.debug(f"{w_end}, rank: {rank}," f" write(sync,parallel): {w_end - w_start}") + + @staticmethod + @_disable_gc() + def write_preloaded_data( + local_proc_idx: int, + write_bucket: WriteBucket, + results_queue: mp.SimpleQueue, + count_queue: mp.JoinableQueue, + use_fsync: bool, + ) -> None: + """ + Performs actual data saving to storage. + + Args: + local_proc_idx (int): index of a local process that performs writing + write_bucket (WriteBucket): data to write to storage + results_queue (mp.Queue): queue to return the write results + to the proxy checkpoint process. + count_queue (mp.JoinableQueue): queue to marks worker task as completed + use_fsync (bool): if True, calls os.fsync at the end of saving + + Returns: None, the write result are put into the `queue` + """ + logger = logging.getLogger(__name__) + logger.debug(f'{local_proc_idx} started') + mem_before = _process_memory() + + local_results = [] + try: + file_name, storage_key, (bytes_data, tensor_data) = write_bucket + with open(file_name, "wb") as stream: + for write_item, data in bytes_data: + local_results.append(_write_item(stream, data, write_item, storage_key)) + + for write_item, tensor in tensor_data: + assert tensor.is_cpu + local_results.append(_write_item(stream, tensor, write_item, storage_key)) + + if use_fsync: + os.fsync(stream.fileno()) + local_output = (local_proc_idx, local_results) + except Exception as e: + logger.debug(f'{local_proc_idx} failed') + local_output = (local_proc_idx, e) + + results_queue.put(local_output) + # Signal this process is done. + count_queue.get() + count_queue.task_done() + + mem_after = _process_memory() + logger.debug( + f"{local_proc_idx} consumed: {mem_after - mem_before}," + f" before: {mem_before}, after: {mem_after}" + ) + + def write_data(self, plan: SavePlan, planner: SavePlanner) -> Future[List[WriteResult]]: + """Write all items from ``plan``.""" + raise NotImplementedError('write_data not implemented for FileSystemWriterAsync') + + def retrieve_write_results(self) -> List[WriteResult]: + """ + Turn the latest dict including write results from `self.results_queue` + into a single results lists. Includes error check. + + Returns (List[WriteResult]): the list of write results + from all local processes performing the save. + + """ + assert self.write_buckets is not None + + if self.results_queue is None: + write_results_or_exc = {} + else: + try: + write_results_or_exc = self.results_queue.get_nowait() + except queue.Empty: + raise RuntimeError(f'results_queue should not be empty') + + if isinstance(write_results_or_exc, Exception): + raise RuntimeError(f'Worker failure: {write_results_or_exc}') from write_results_or_exc + write_results: dict = write_results_or_exc + if len(write_results) != len(self.write_buckets): + raise RuntimeError( + f'Incomplete worker results (expected {len(self.write_buckets)},' + f' got {len(write_results)}. This probably indicates a worker failure.' + ) + return list(chain.from_iterable(write_results.values())) + + def prepare_decentralized_global_plan(self, local_plan: SavePlan) -> SavePlan: + """Instead of assigning indices by plan order, uses PyT rank (same outcome). + + Args: + local_plan (SavePlan): local plan to turn to a global plan + (without interactions with other ranks) + + Returns: + SavePlan - locally transformed plan equivalent to the plan that would be + created by the coordinator + """ + return dataclasses.replace( + local_plan, storage_data=_StoragePrefix(f"__{torch.distributed.get_rank()}_") + ) + + +def _split_by_size_and_type(bins: int, items: List[WriteItem]) -> List[List[WriteItem]]: + """ + Splits write items according to item size into close to uniform bins. + + Same as torch.distributed.checkpoint.filesystem._split_by_size_and_type, + but with a fixed _item_size function. + + Args: + bins (int): numbers of bins to split to + items (List[WriteItem]): list of write items + + Returns (List[List[WriteItem]]): write items split to bins + """ + if bins == 1: + return [items] + + bytes_items: List[WriteItem] = [] + tensor_items: List[WriteItem] = [] + for wi in items: + container = bytes_items if wi.type == WriteItemType.BYTE_IO else tensor_items + container.append(wi) + + buckets: List[List[WriteItem]] = [[] for _ in range(bins)] + bucket_sizes = [0 for _ in range(bins)] + + # Assign bytes with a simple round-robin + for i, item in enumerate(bytes_items): + buckets[i % bins].append(item) + + # Sort tensor items by size in decreasing order once and store the size with item + sized_tensors = [(item, _item_size(item)) for item in tensor_items] + sized_tensors.sort(key=itemgetter(1), reverse=True) + + # Use a min heap for bin assignment + # Store (total_size_of_bin, bin_index) tuples + heap: List[Tuple[int, int]] = [(0, i) for i in range(bins)] + + # Assign tensors using heap + for item, size in sized_tensors: + total_bin_size, bin_idx = heappop(heap) + buckets[bin_idx].append(item) + heappush(heap, (total_bin_size + size, bin_idx)) + + return buckets + + +def _split_by_separation_hint( + buckets: List[List[WriteItem]], separation_hint: Optional[str] = None +) -> Dict[str, List[List[WriteItem]]]: + """ + Splits buckets into those whose keys begin with the separation_hint and those whose keys do not + + Args: + buckets (List[List[WriteItem]]): buckets to split + separation_hint (Optional[str]): optional prefix to split on + + Returns (Dict[str, List[List[WriteItem]]]): a dictionary + mapping the prefix to the relevant buckets + """ + bins = len(buckets) + buckets_with_separation_hint = {} + if separation_hint is not None: + buckets_default = [[] for _ in range(bins)] + buckets_hint = [[] for _ in range(bins)] + for i in range(bins): + for item in buckets[i]: + if item.index.fqn.startswith(separation_hint): + buckets_hint[i].append(item) + else: + buckets_default[i].append(item) + buckets_with_separation_hint[""] = buckets_default + buckets_with_separation_hint[separation_hint] = buckets_hint + else: + buckets_with_separation_hint[""] = buckets + return buckets_with_separation_hint + + +def _item_size(item: WriteItem) -> int: + """ + Calculates size (in bytes) of a single write item. + + Same as torch.distributed.checkpoint.filesystem._item_size, + but fixes computing chunk size (with item.tensor_data.chunk.sizes) + + Args: + item (WriteItem): write item to compute the size of + + Returns (int): size of an item in bytes + """ + size = 1 + assert item.tensor_data is not None + # can't use math.prod as PT needs to support older python + for s in item.tensor_data.chunk.sizes: + size *= s + + dtype = item.tensor_data.properties.dtype + return size * torch._utils._element_size(dtype) + + +def _process_memory() -> int: + """ + Get memory used by current process. + + Returns (int): memory used by current process + """ + process = psutil.Process(os.getpid()) + mem_info = process.memory_info() + return mem_info.rss diff --git a/megatron/core/dist_checkpointing/strategies/fully_parallel.py b/megatron/core/dist_checkpointing/strategies/fully_parallel.py index 56523da..3fb46c5 100644 --- a/megatron/core/dist_checkpointing/strategies/fully_parallel.py +++ b/megatron/core/dist_checkpointing/strategies/fully_parallel.py @@ -1,439 +1,515 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -import logging -from pathlib import Path -from time import time -from typing import Dict, Optional, Tuple - -import torch -import torch.distributed as dist - -from megatron.core.dist_checkpointing import ShardedTensor -from megatron.core.dist_checkpointing.core import CheckpointingException -from megatron.core.dist_checkpointing.dict_utils import ( - dict_list_map_inplace, - extract_matching_values, - merge, - nested_values, -) -from megatron.core.dist_checkpointing.exchange_utils import ( - ShardDistribution, - determine_main_replica_uniform_distribution, - exchange_by_distribution, -) -from megatron.core.dist_checkpointing.mapping import ShardedStateDict, StateDict, is_main_replica -from megatron.core.dist_checkpointing.strategies.base import ( - AsyncSaveShardedStrategy, - LoadShardedStrategy, - SaveShardedStrategy, -) -from megatron.core.dist_checkpointing.utils import _sharded_tensor_shard_id, _ShardId -from megatron.core.dist_checkpointing.validation import ( - determine_global_metadata, - validate_sharding_integrity, -) - -logger = logging.getLogger(__name__) - - -class FullyParallelSaveStrategyWrapper(AsyncSaveShardedStrategy): - """Wraps arbitrary strategy and distributes the save during `save`. - - The save distribution happens without any *data* communication. - Only the *metadata* is exchanged and based on data replication on different - ranks, we try to distribute the save as uniformly as possible. - - This wrapper assumes, that setting `replica_id` to 0 will make the - underlying strategy do the saving on current rank. All the other `replica_id`s - are set to 1. - - Currently, the save distribution is realized with a greedy algorithm - described in `distribute_shards_to_ranks`. - - Args: - strategy (SaveShardedStrategy): base strategy to wrap - parallelization_group (ProcessGroup, optional): process group to use for save - distribution. Note that this doesn't have to match exactly the - data distribution, but should cover the replication pattern - to maximize performance. Defaults to the whole world. - do_cache_distribution (bool, optional): whether to cache the save distribution - from previous calls. Should be set to True only if the state dict - structure between the calls is always the same. Defaults to True. - """ - - def __init__( - self, - strategy: SaveShardedStrategy, - parallelization_group: Optional[torch.distributed.ProcessGroup] = None, - do_cache_distribution: bool = False, - ): - super().__init__(strategy.backend, strategy.version) - self.base_strategy = strategy - self.parallelization_group = parallelization_group - self.do_cache_distribution = do_cache_distribution - - self.cached_distribution: Optional[ShardDistribution] = None - - def async_save(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path): - if not isinstance(self.base_strategy, AsyncSaveShardedStrategy): - raise CheckpointingException( - f'Cannot apply async_save to non-async base strategy {self.base_strategy}' - ) - self.apply_saving_parallelization(sharded_state_dict) - return self.base_strategy.async_save(sharded_state_dict, checkpoint_dir) - - def save(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path): - self.apply_saving_parallelization(sharded_state_dict) - return self.base_strategy.save(sharded_state_dict, checkpoint_dir) - - def apply_saving_parallelization(self, sharded_state_dict: ShardedStateDict) -> None: - """Distributes the save across ranks by exchanging metadata. - - Exchanges metadata from the state dict and computes the uniform - (as close as possible) distribution of saves among the ranks. - - If `self.do_cache_distribution` is True, caches the distribution between - the calls and subsequent distributions happen without any inter-rank - communication. - - Args: - sharded_state_dict (ShardedStateDict): state dict to distribute the saving - - Returns: None - """ - start = time() - if self.do_cache_distribution and self.cached_distribution is not None: - logger.debug(f'Apply *cached* save parallelization') - precomputed_distribution = self.cached_distribution - else: - logger.debug(f'Apply save parallelization') - precomputed_distribution = determine_main_replica_uniform_distribution( - sharded_state_dict, self.parallelization_group - ) - - distribute_main_replicas_with_precomputed_distribution( - sharded_state_dict, self.parallelization_group, precomputed_distribution - ) - if self.cached_distribution is None: - # First time applying the parallelization - validate_sharding_integrity(determine_global_metadata(sharded_state_dict)[1]) - if self.do_cache_distribution: - self.cached_distribution = precomputed_distribution - end = time() - logger.debug(f"parallel save sharding, time: {end - start}") - - @property - def can_handle_sharded_objects(self): - return self.base_strategy.can_handle_sharded_objects - - -class FullyParallelLoadStrategyWrapper(LoadShardedStrategy): - """Wraps arbitrary load strategy and distributes the load during `load`. - - See `load` method docs for details. - - Args: - strategy (LoadShardedStrategy): base strategy to wrap - parallelization_group (ProcessGroup, optional): process group to use for load - distribution. Note that this doesn't have to match exactly the - data distribution, but should cover the replication pattern - to maximize performance. Defaults to the whole world. - In most cases, it's recommended to set it to the DP group. - do_cache_distribution (bool, optional): whether to cache the load distribution - from previous calls. Should be set to True only if the state dict - structure between the calls is always the same. Defaults to False, - since the loading in general happens only once during training. - Note that the load distribution *cannot* be reused as a save distribution, - because save/load is not fully symmetrical. - exchange_algo (str): algorithm to use for exchanging the data. - Options: - - broadcast - each rank broadcasts individual tensors to others - - gather_object (default) - ranks all_gather_object the whole loaded state dicts - - gather_rounds (default) - ranks all gather individual tensors in rounds - See method docs for more details. - """ - - def __init__( - self, - strategy: LoadShardedStrategy, - parallelization_group: Optional[torch.distributed.ProcessGroup] = None, - do_cache_distribution: bool = False, - exchange_algo: str = 'broadcast', - ): - super().__init__() - self.base_strategy = strategy - if parallelization_group is None: - parallelization_group = ( - dist.GroupMember.WORLD - ) # explicit group needed for torch.distributed.get_global_rank call - self.parallelization_group = parallelization_group - self.do_cache_distribution = do_cache_distribution - self.exchange_algo = exchange_algo - - self.cached_distribution: Optional[ShardDistribution] = None - - def load(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path) -> StateDict: - """Distributes the load and calls underlying strategy only for parts of the state dict. - - Steps: - 1. Load metadata is exchanged between the ranks in the parallelization group. - 2. Each rank deterministically plans the load for the whole workload - so that the loads are as uniform as possible. - 3. Each ranks loads its planned shard of the checkpoint. - 4. All ranks exchange the loaded shards. - - Internode communication is involved in steps (1) (with metadata) - and (4) (with actual data). Storage interaction is involved in step (3). - - Currently, the load distribution (step 2) is realized with a greedy algorithm - described in `distribute_shards_to_ranks` (same as for saving distribution). - - Currently, the shards are all gathered between all ranks in the parallelization - group. This might not be optimal (some ranks do not need all tensors), - but it's a reasonable approximation for an optimal exchange in most scenarios. - - Args: - sharded_state_dict (ShardedStateDict): sharded state dict to load - checkpoint_dir (Path): checkpoint directory to load from - - Returns: - StateDict: loaded state dict. The state dict should be equivalent to - a state dict that would be loaded with the underlying strategy - without this wrapper. - """ - if torch.distributed.get_world_size(self.parallelization_group) <= 1: - return self.base_strategy.load(sharded_state_dict, checkpoint_dir) - - # Step 1 and 2: exchange load metadata and distribute the load - start = time() - precomputed_distribution = self.apply_loading_parallelization(sharded_state_dict) - assert ( - precomputed_distribution is not None - ), 'Expecting non-trivial distribution for non-trivial parallelization group' - end = time() - logger.debug(f'self.apply_loading_parallelization took {end - start}s') - start = end - - # Step 3: load part of the checkpoint. - # Load only sharded objects first. ShardedTensors will be loaded separately - # so that we can keep track of sharded tensors loaded by this rank - (sharded_tensors, sharded_state_dict, to_load_shards, unloaded_shards) = ( - self._defer_loading_sharded_tensors(sharded_state_dict) - ) - loaded_state_dict = self.base_strategy.load(sharded_state_dict, checkpoint_dir) - - end = time() - logger.debug(f'Base load of ShardedObjects took {end - start}s') - start = end - - # Load sharded tensors separately - loaded_tensors = self.base_strategy.load(to_load_shards, checkpoint_dir) - - end = time() - logger.debug(f'Base load of ShardedTensors took {end - start}s') - start = end - - # Step 4: exchange data between ranks - logger.debug(f'Applying parallel load with algo {self.exchange_algo}') - all_loaded_tensors = exchange_by_distribution( - loaded_tensors, - unloaded_shards, - precomputed_distribution, - self.parallelization_group, - self.exchange_algo, - ) - if not set(unloaded_shards.keys()).issubset(all_loaded_tensors.keys()): - missing_shards = set(unloaded_shards.keys()) - all_loaded_tensors.keys() - raise CheckpointingException( - f'Missing shards after fully parallel loading: {missing_shards}' - ) - - sync_start = time() - torch.cuda.synchronize() - end = time() - logger.debug(f'torch.cuda.synchronize took {end - sync_start}s') - logger.debug(f'self.exchange_loaded_tensors took {end - start}s') - - self.fill_in_deferred_sharded_tensors(sharded_tensors, all_loaded_tensors) - merge(loaded_state_dict, sharded_tensors) - return loaded_state_dict - - def _defer_loading_sharded_tensors( - self, sharded_state_dict: ShardedStateDict - ) -> Tuple[ - ShardedStateDict, - ShardedStateDict, - Dict[_ShardId, ShardedTensor], - Dict[_ShardId, ShardedTensor], - ]: - """Divides state dict into parts loaded by this vs other ranks. - - ShardedTensors with main replica_id will be loaded by this rank, - others will be received by other ranks (after loading from storage). - - Args: - sharded_state_dict (ShardedStateDict): state dict with ShardedTensor - that will be divided. - - Returns: a tuple of: - - ShardedStateDict: sub-state dict only with ShardedTensors - - ShardedStateDict: sub-state dict with non-ShardedTensors - - Dict[_ShardId, ShardedTensor]: ShardedTensor are uniquely identified - by shard ids. This is a mapping from shard id to a corresponding - ShardedTensor for tensors loaded by *this* rank - - Dict[_ShardId, ShardedTensor]: mapping from shard id to a corresponding - ShardedTensor for tensors loaded by *other* ranks - """ - to_load_shards = {} - unloaded_shards = {} - - sharded_tensors, sharded_state_dict = extract_matching_values( - sharded_state_dict, lambda v: isinstance(v, ShardedTensor) - ) - - def wrap_non_main_replicas(x): - if isinstance(x, ShardedTensor): - # Assign shard to be loaded or not - if is_main_replica(x.replica_id): - to_load_shards[_sharded_tensor_shard_id(x)] = x - else: - unloaded_shards[_sharded_tensor_shard_id(x)] = x - return x - - dict_list_map_inplace(wrap_non_main_replicas, sharded_tensors) - return sharded_tensors, sharded_state_dict, to_load_shards, unloaded_shards - - def apply_loading_parallelization( - self, sharded_state_dict: ShardedStateDict - ) -> Optional[ShardDistribution]: - """Distributes the load across ranks by exchanging metadata. - - Exchanges metadata from the state dict and computes the uniform - (as close as possible) distribution of loads among the ranks. - Marks ShardedTensors to be loaded by the current rank with replica_id 0 - (and others with non 0 values). - - If `self.do_cache_distribution` is True, caches the distribution between - the calls and subsequent distributions happen without any inter-rank - communication. - - Args: - sharded_state_dict (ShardedStateDict): state dict to distribute the loading - - Returns: - ShardDistribution (optional): the computed loading distribution - """ - if self.do_cache_distribution and self.cached_distribution is not None: - logger.debug(f'Apply *cached* load parallelization') - precomputed_distribution = self.cached_distribution - else: - logger.debug(f'Apply load parallelization') - precomputed_distribution = determine_main_replica_uniform_distribution( - sharded_state_dict, self.parallelization_group, True - ) - - distribute_main_replicas_with_precomputed_distribution( - sharded_state_dict, self.parallelization_group, precomputed_distribution - ) - if self.do_cache_distribution: - self.cached_distribution = precomputed_distribution - - return precomputed_distribution - - def fill_in_deferred_sharded_tensors( - self, sharded_state_dict: ShardedStateDict, loaded_tensors: Dict[_ShardId, torch.Tensor] - ) -> None: - """Fill in tensors not loaded by current rank with tensors from `loaded_tensors` map. - - Args: - sharded_state_dict (ShardedStateDict): sharded state dict to fill in. - ShardedTensors are completely replaced with corresponding torch.Tensors. - loaded_tensors (Dict[_ShardId, torch.Tensor]): dict allowing to map - ShardedTensor from the sharded_state_dict to loaded tensors. - - Returns: - - """ - - def fill_in_sharded_tensor(x): - if isinstance(x, ShardedTensor): - try: - x = loaded_tensors[_sharded_tensor_shard_id(x)] - except KeyError as e: - raise CheckpointingException( - f'Missing loaded tensor shard: {_sharded_tensor_shard_id(x)}' - ) from e - - return x - - dict_list_map_inplace(fill_in_sharded_tensor, sharded_state_dict) - - @property - def can_handle_sharded_objects(self): - return self.base_strategy.can_handle_sharded_objects - - def load_tensors_metadata(self, checkpoint_dir: Path): - return self.base_strategy.load_tensors_metadata(checkpoint_dir) - - def load_sharded_metadata(self, checkpoint_dir: Path): - return self.base_strategy.load_sharded_metadata(checkpoint_dir) - - def check_backend_compatibility(self, loaded_version): - return self.base_strategy.check_backend_compatibility(loaded_version) - - def check_version_compatibility(self, loaded_version): - return self.base_strategy.check_version_compatibility(loaded_version) - - -def distribute_main_replicas_with_precomputed_distribution( - sharded_state_dict: ShardedStateDict, - parallelization_group: torch.distributed.ProcessGroup, - precomputed_distribution: Optional[ShardDistribution], -): - """Applies the save distribution computed with `determine_main_replica_uniform_distribution`. - - Based on rank assignment, sets replica ids of the shards saved by current rank to 0 - and all the other replica ids to 1. - - Args: - sharded_state_dict (ShardedStateDict): state dict to apply the save distribution to - parallelization_group (ProcessGroup): distribution will be applied within this - process group. Must match with the process group passed to - `determine_main_replica_uniform_distribution`. - precomputed_distribution (ShardDistribution): distribution computed with - `determine_main_replica_uniform_distribution` - - Returns: None - - Example replica ids of tensors A, B, C before distribution: - rank0: A: (0, 0, 0), B: (0, 0, 0), C: (0, 0, 0) - rank1: A: (0, 0, 1), B: (0, 0, 1), C: (0, 0, 1) - rank2: A: (0, 0, 2), B: (0, 0, 2), C: (0, 0, 2) - - Replicas after distribution for the example above: - rank0: A: 0, B: 1, C: 1 - rank1: A: 1, B: 0, C: 1 - rank2: A: 1, B: 1, C: 0 - """ - if torch.distributed.get_world_size(group=parallelization_group) <= 1: - return - if precomputed_distribution is None: - raise ValueError( - 'precomputed_distribution must be not None for non-trivial parallelization group' - ) - - local_shards = list( - sh_base - for sh_base in nested_values(sharded_state_dict) - if isinstance(sh_base, ShardedTensor) - ) - - rank_within_dp_group = torch.distributed.get_rank(parallelization_group) - for sh_ten in local_shards: - shard_id = _sharded_tensor_shard_id(sh_ten) - if ( - shard_id in precomputed_distribution.shards_in_this_group - and rank_within_dp_group == precomputed_distribution.main_rank_for_shard[shard_id] - ): - sh_ten.replica_id = 0 - else: - sh_ten.replica_id = 1 +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import logging +from pathlib import Path +from time import time +from typing import Any, Callable, Dict, Optional, Tuple, TypeVar + +import torch +import torch.distributed as dist +from torch.distributed.checkpoint import Metadata + +from megatron.core.dist_checkpointing import ShardedObject, ShardedTensor +from megatron.core.dist_checkpointing.core import CheckpointingException +from megatron.core.dist_checkpointing.dict_utils import ( + dict_list_map_inplace, + extract_matching_values, + merge, + nested_values, +) +from megatron.core.dist_checkpointing.exchange_utils import ( + ShardDistribution, + determine_main_replica_uniform_distribution, + exchange_by_distribution, + exchange_loaded_objects_gather_object, +) +from megatron.core.dist_checkpointing.mapping import ShardedStateDict, StateDict, is_main_replica +from megatron.core.dist_checkpointing.strategies.base import ( + AsyncSaveShardedStrategy, + LoadShardedStrategy, + SaveShardedStrategy, +) +from megatron.core.dist_checkpointing.utils import ( + _sharded_object_id, + _sharded_tensor_shard_id, + _ShardId, + debug_time, +) +from megatron.core.dist_checkpointing.validation import ( + determine_global_metadata, + validate_sharding_integrity, +) + +logger = logging.getLogger(__name__) + +T = TypeVar('T', ShardedObject, ShardedTensor) + + +class FullyParallelSaveStrategyWrapper(AsyncSaveShardedStrategy): + """Wraps arbitrary strategy and distributes the save during `save`. + + The save distribution happens without any *data* communication. + Only the *metadata* is exchanged and based on data replication on different + ranks, we try to distribute the save as uniformly as possible. + + This wrapper assumes, that setting `replica_id` to 0 will make the + underlying strategy do the saving on current rank. All the other `replica_id`s + are set to 1. + + Currently, the save distribution is realized with a greedy algorithm + described in `distribute_shards_to_ranks`. + + Args: + strategy (SaveShardedStrategy): base strategy to wrap + parallelization_group (ProcessGroup, optional): process group to use for save + distribution. Note that this doesn't have to match exactly the + data distribution, but should cover the replication pattern + to maximize performance. Defaults to the whole world. + do_cache_distribution (bool, optional): whether to cache the save distribution + from previous calls. Should be set to True only if the state dict + structure between the calls is always the same. Defaults to True. + """ + + def __init__( + self, + strategy: SaveShardedStrategy, + parallelization_group: Optional[torch.distributed.ProcessGroup] = None, + do_cache_distribution: bool = False, + ): + super().__init__(strategy.backend, strategy.version) + self.base_strategy = strategy + self.parallelization_group = parallelization_group + self.do_cache_distribution = do_cache_distribution + + self.cached_distribution: Optional[ShardDistribution] = None + + def async_save(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path): + if not isinstance(self.base_strategy, AsyncSaveShardedStrategy): + raise CheckpointingException( + f'Cannot apply async_save to non-async base strategy {self.base_strategy}' + ) + self.apply_saving_parallelization(sharded_state_dict) + return self.base_strategy.async_save(sharded_state_dict, checkpoint_dir) + + def save(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path): + self.apply_saving_parallelization(sharded_state_dict) + return self.base_strategy.save(sharded_state_dict, checkpoint_dir) + + def apply_saving_parallelization(self, sharded_state_dict: ShardedStateDict) -> None: + """Distributes the save across ranks by exchanging metadata. + + Exchanges metadata from the state dict and computes the uniform + (as close as possible) distribution of saves among the ranks. + + If `self.do_cache_distribution` is True, caches the distribution between + the calls and subsequent distributions happen without any inter-rank + communication. + + Args: + sharded_state_dict (ShardedStateDict): state dict to distribute the saving + + Returns: None + """ + start = time() + if self.do_cache_distribution and self.cached_distribution is not None: + logger.debug(f'Apply *cached* save parallelization') + precomputed_distribution = self.cached_distribution + else: + logger.debug(f'Apply save parallelization') + precomputed_distribution = determine_main_replica_uniform_distribution( + sharded_state_dict, self.parallelization_group + ) + + distribute_main_replicas_with_precomputed_distribution( + sharded_state_dict, self.parallelization_group, precomputed_distribution + ) + if self.cached_distribution is None: + # First time applying the parallelization + validate_sharding_integrity(determine_global_metadata(sharded_state_dict)[1]) + if self.do_cache_distribution: + self.cached_distribution = precomputed_distribution + end = time() + logger.debug(f"parallel save sharding, time: {end - start}") + + @property + def can_handle_sharded_objects(self): + return self.base_strategy.can_handle_sharded_objects + + +class FullyParallelLoadStrategyWrapper(LoadShardedStrategy): + """Wraps arbitrary load strategy and distributes the load during `load`. + + See `load` method docs for details. + + Args: + strategy (LoadShardedStrategy): base strategy to wrap + parallelization_group (ProcessGroup, optional): process group to use for load + distribution. Note that this doesn't have to match exactly the + data distribution, but should cover the replication pattern + to maximize performance. Defaults to the whole world. + In most cases, it's recommended to set it to the DP group. + do_cache_distribution (bool, optional): whether to cache the load distribution + from previous calls. Should be set to True only if the state dict + structure between the calls is always the same. Defaults to False, + since the loading in general happens only once during training. + Note that the load distribution *cannot* be reused as a save distribution, + because save/load is not fully symmetrical. + exchange_algo (str): algorithm to use for exchanging the data. + Options: + - broadcast - each rank broadcasts individual tensors to others + - gather_object (default) - ranks all_gather_object the whole loaded state dicts + - gather_rounds (default) - ranks all gather individual tensors in rounds + See method docs for more details. + """ + + def __init__( + self, + strategy: LoadShardedStrategy, + parallelization_group: Optional[torch.distributed.ProcessGroup] = None, + do_cache_distribution: bool = False, + exchange_algo: str = 'broadcast', + ): + super().__init__() + self.base_strategy = strategy + if parallelization_group is None: + parallelization_group = ( + dist.GroupMember.WORLD + ) # explicit group needed for torch.distributed.get_global_rank call + self.parallelization_group = parallelization_group + self.do_cache_distribution = do_cache_distribution + self.exchange_algo = exchange_algo + + self.cached_distribution: Optional[ShardDistribution] = None + self.cached_global_metadata: Optional[Metadata] = None + + @debug_time("FullyParallelLoadStrategyWrapper.load", logger) + def load(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path) -> StateDict: + """Distributes the load and calls underlying strategy only for parts of the state dict. + + Steps: + 1. Load metadata is exchanged between the ranks in the parallelization group. + 2. Each rank deterministically plans the load for the whole workload + so that the loads are as uniform as possible. + 3. Each ranks loads its planned shard of the checkpoint. + 4. All ranks exchange the loaded shards. + + Internode communication is involved in steps (1) (with metadata) + and (4) (with actual data). Storage interaction is involved in step (3). + + Currently, the load distribution (step 2) is realized with a greedy algorithm + described in `distribute_shards_to_ranks` (same as for saving distribution). + + Currently, the shards are all gathered between all ranks in the parallelization + group. This might not be optimal (some ranks do not need all tensors), + but it's a reasonable approximation for an optimal exchange in most scenarios. + + Args: + sharded_state_dict (ShardedStateDict): sharded state dict to load + checkpoint_dir (Path): checkpoint directory to load from + + Returns: + StateDict: loaded state dict. The state dict should be equivalent to + a state dict that would be loaded with the underlying strategy + without this wrapper. + """ + + loaded_state_dict = {} + + if torch.distributed.get_world_size(self.parallelization_group) <= 1: + return self.base_strategy.load(sharded_state_dict, checkpoint_dir) + + # Step 1 and 2: exchange load metadata and distribute the load + with debug_time("self.apply_loading_parallelization", logger): + precomputed_distribution: ShardDistribution | None = self.apply_loading_parallelization( + sharded_state_dict + ) + assert ( + precomputed_distribution is not None + ), 'Expecting non-trivial distribution for non-trivial parallelization group' + + # Step 3: load part of the checkpoint. + # Load only sharded objects first. ShardedTensors will be loaded separately + # so that we can keep track of sharded tensors loaded by this rank + (sharded_tensors, sharded_state_dict, to_load_shards, unloaded_shards) = ( + self._defer_loading_sharded_tensors(sharded_state_dict) + ) + + (sharded_objects, sharded_state_dict, to_load_objects, unloaded_objects) = ( + self._defer_loading_sharded_objects(sharded_state_dict) + ) + + assert ( + len(sharded_state_dict) == 0 + ), "sharded_state_dict is not empty after deferring tensors and objects" + with debug_time("base_load_ShardedObjects", logger): + # Load sharded objects first + loaded_objects = self.base_strategy.load(to_load_objects, checkpoint_dir) + + with debug_time("base_load_ShardedTensors", logger): + # Load sharded tensors separately + loaded_tensors = self.base_strategy.load(to_load_shards, checkpoint_dir) + + with debug_time("self.exchange_loaded_tensors", logger): + + # Step 4: exchange data between ranks + logger.debug(f'Applying parallel load with algo {self.exchange_algo}') + all_loaded_tensors = exchange_by_distribution( + loaded_tensors, + unloaded_shards, + precomputed_distribution, + self.parallelization_group, + self.exchange_algo, + ) + if not set(unloaded_shards.keys()).issubset(all_loaded_tensors.keys()): + missing_shards = set(unloaded_shards.keys()) - all_loaded_tensors.keys() + raise CheckpointingException( + f'Missing shards after fully parallel loading: {missing_shards}' + ) + + with debug_time("torch.cuda.synchronize", logger): + torch.cuda.synchronize() + + all_loaded_objects = exchange_loaded_objects_gather_object(loaded_objects) + + if not set(unloaded_objects.keys()).issubset(all_loaded_objects.keys()): + missing_object_shards = set(unloaded_objects.keys()) - all_loaded_objects.keys() + raise CheckpointingException( + f'Missing object shards after fully parallel loading: {missing_object_shards}' + ) + torch.cuda.synchronize() + + self.fill_in_deferred_sharded_tensors(sharded_tensors, all_loaded_tensors) + self.fill_in_deferred_sharded_objects(sharded_objects, all_loaded_objects) + + merge(loaded_state_dict, sharded_objects) + merge(loaded_state_dict, sharded_tensors) + if hasattr(self.base_strategy, "cached_global_metadata"): + self.cached_global_metadata = self.base_strategy.cached_global_metadata + return loaded_state_dict + + @staticmethod + def _defer_loading_sharded_objects( + sharded_state_dict: ShardedStateDict, + ) -> Tuple[ + ShardedStateDict, + ShardedStateDict, + Dict[_ShardId, ShardedObject], + Dict[_ShardId, ShardedObject], + ]: + return _defer_loading_sharded_items(sharded_state_dict, ShardedObject, _sharded_object_id) + + @staticmethod + def _defer_loading_sharded_tensors( + sharded_state_dict: ShardedStateDict, + ) -> Tuple[ + ShardedStateDict, + ShardedStateDict, + Dict[_ShardId, ShardedTensor], + Dict[_ShardId, ShardedTensor], + ]: + return _defer_loading_sharded_items( + sharded_state_dict, ShardedTensor, _sharded_tensor_shard_id + ) + + @staticmethod + def fill_in_deferred_sharded_objects( + sharded_state_dict: ShardedStateDict, loaded_objects: Dict[_ShardId, Any] + ) -> None: + """Fill in objects not loaded by current rank with objects from `loaded_objects` map. + + Args: + sharded_state_dict (ShardedStateDict): sharded state dict to fill in. + ShardedObjects are completely replaced with corresponding objects. + loaded_objects (Dict[_ShardId, Any]): dict allowing to map + ShardedObject from the sharded_state_dict to loaded objects. + + Returns: + None + """ + _fill_in_deferred_sharded_items( + sharded_state_dict, loaded_objects, ShardedObject, _sharded_object_id + ) + + @staticmethod + def fill_in_deferred_sharded_tensors( + sharded_state_dict: ShardedStateDict, loaded_tensors: Dict[_ShardId, torch.Tensor] + ) -> None: + """Fill in tensors not loaded by current rank with tensors from `loaded_tensors` map. + + Args: + sharded_state_dict (ShardedStateDict): sharded state dict to fill in. + ShardedTensors are completely replaced with corresponding torch.Tensors. + loaded_tensors (Dict[_ShardId, torch.Tensor]): dict allowing to map + ShardedTensor from the sharded_state_dict to loaded tensors. + + Returns: + None + """ + _fill_in_deferred_sharded_items( + sharded_state_dict, loaded_tensors, ShardedTensor, _sharded_tensor_shard_id + ) + + def apply_loading_parallelization( + self, sharded_state_dict: ShardedStateDict + ) -> Optional[ShardDistribution]: + """Distributes the load across ranks by exchanging metadata. + + Exchanges metadata from the state dict and computes the uniform + (as close as possible) distribution of loads among the ranks. + Marks ShardedTensors to be loaded by the current rank with replica_id 0 + (and others with non 0 values). + + If `self.do_cache_distribution` is True, caches the distribution between + the calls and subsequent distributions happen without any inter-rank + communication. + + Args: + sharded_state_dict (ShardedStateDict): state dict to distribute the loading + + Returns: + ShardDistribution (optional): the computed loading distribution + """ + if self.do_cache_distribution and self.cached_distribution is not None: + logger.debug(f'Apply *cached* load parallelization') + precomputed_distribution = self.cached_distribution + else: + logger.debug(f'Apply load parallelization') + precomputed_distribution = determine_main_replica_uniform_distribution( + sharded_state_dict, self.parallelization_group, True + ) + + distribute_main_replicas_with_precomputed_distribution( + sharded_state_dict, self.parallelization_group, precomputed_distribution + ) + if self.do_cache_distribution: + self.cached_distribution = precomputed_distribution + + return precomputed_distribution + + @property + def can_handle_sharded_objects(self): + return self.base_strategy.can_handle_sharded_objects + + def load_tensors_metadata(self, checkpoint_dir: Path): + return self.base_strategy.load_tensors_metadata(checkpoint_dir) + + def load_sharded_metadata(self, checkpoint_dir: Path): + return self.base_strategy.load_sharded_metadata(checkpoint_dir) + + def check_backend_compatibility(self, loaded_version): + return self.base_strategy.check_backend_compatibility(loaded_version) + + def check_version_compatibility(self, loaded_version): + return self.base_strategy.check_version_compatibility(loaded_version) + + +def distribute_main_replicas_with_precomputed_distribution( + sharded_state_dict: ShardedStateDict, + parallelization_group: torch.distributed.ProcessGroup, + precomputed_distribution: Optional[ShardDistribution], +): + """Applies the save distribution computed with `determine_main_replica_uniform_distribution`. + + Based on rank assignment, sets replica ids of the shards saved by current rank to 0 + and all the other replica ids to 1. + + Args: + sharded_state_dict (ShardedStateDict): state dict to apply the save distribution to + parallelization_group (ProcessGroup): distribution will be applied within this + process group. Must match with the process group passed to + `determine_main_replica_uniform_distribution`. + precomputed_distribution (ShardDistribution): distribution computed with + `determine_main_replica_uniform_distribution` + + Returns: None + + Example replica ids of tensors A, B, C before distribution: + rank0: A: (0, 0, 0), B: (0, 0, 0), C: (0, 0, 0) + rank1: A: (0, 0, 1), B: (0, 0, 1), C: (0, 0, 1) + rank2: A: (0, 0, 2), B: (0, 0, 2), C: (0, 0, 2) + + Replicas after distribution for the example above: + rank0: A: 0, B: 1, C: 1 + rank1: A: 1, B: 0, C: 1 + rank2: A: 1, B: 1, C: 0 + """ + if torch.distributed.get_world_size(group=parallelization_group) <= 1: + return + if precomputed_distribution is None: + raise ValueError( + 'precomputed_distribution must be not None for non-trivial parallelization group' + ) + + local_shards = list( + sh_base + for sh_base in nested_values(sharded_state_dict) + if isinstance(sh_base, ShardedTensor) + ) + + rank_within_dp_group = torch.distributed.get_rank(parallelization_group) + for sh_ten in local_shards: + shard_id = _sharded_tensor_shard_id(sh_ten) + if ( + shard_id in precomputed_distribution.shards_in_this_group + and rank_within_dp_group == precomputed_distribution.main_rank_for_shard[shard_id] + ): + sh_ten.replica_id = 0 + else: + sh_ten.replica_id = 1 + + +def _defer_loading_sharded_items( + sharded_state_dict: ShardedStateDict, item_type: type, shard_id_func: Callable[[T], _ShardId] +) -> Tuple[ShardedStateDict, ShardedStateDict, Dict[_ShardId, T], Dict[_ShardId, T]]: + """Divides state dict into parts loaded by this vs other ranks. + + Args: + sharded_state_dict (ShardedStateDict): state dict with sharded items + that will be divided. + item_type: The type of sharded item (ShardedObject or ShardedTensor) + shard_id_func: Function to get the shard ID for the item type + + Returns: a tuple of: + - ShardedStateDict: sub-state dict only with sharded items + - ShardedStateDict: sub-state dict with non-sharded items + - Dict[_ShardId, T]: mapping from shard id to items loaded by *this* rank + - Dict[_ShardId, T]: mapping from shard id to items loaded by *other* ranks + """ + to_load_shards = {} + unloaded_shards = {} + + sharded_items, remaining_state_dict = extract_matching_values( + sharded_state_dict, lambda v: isinstance(v, item_type) + ) + + def wrap_non_main_replicas(x: Any) -> Any: + if isinstance(x, item_type): + shard_id = shard_id_func(x) + if is_main_replica(x.replica_id): + to_load_shards[shard_id] = x + else: + unloaded_shards[shard_id] = x + return x + + dict_list_map_inplace(wrap_non_main_replicas, sharded_items) + return sharded_items, remaining_state_dict, to_load_shards, unloaded_shards + + +def _fill_in_deferred_sharded_items( + sharded_state_dict: ShardedStateDict, + loaded_items: Dict[_ShardId, Any], + item_type: type, + shard_id_func: Callable[[T], _ShardId], +) -> None: + """Helper function to fill in items not loaded by current rank.""" + + def fill_in_sharded_item(x: Any) -> Any: + if isinstance(x, item_type): + try: + x = loaded_items[shard_id_func(x)] + except KeyError as e: + raise CheckpointingException( + f'Missing loaded item shard: {shard_id_func(x)}' + ) from e + return x + + dict_list_map_inplace(fill_in_sharded_item, sharded_state_dict) diff --git a/megatron/core/dist_checkpointing/strategies/resharding.py b/megatron/core/dist_checkpointing/strategies/resharding.py index c1c2bce..6b5aeb2 100644 --- a/megatron/core/dist_checkpointing/strategies/resharding.py +++ b/megatron/core/dist_checkpointing/strategies/resharding.py @@ -1,315 +1,318 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -""" Performant resharding of flattened tensors. - -Tensors that are first sharded (e.g. across TP) and then flattened cause -very irregular access patterns during loading. The idea for performant save/load -is to store tensors with global shape [X, Y, Z] and local shape [x, y, z] -as tensors with global shape [X // x, Y // y, Z // z, x * y * z] and -local shape [1, 1, 1, x * y * z]. This allows parallel save of tensors along the -last (flattened) dimension. During loading, some additional resharding is needed. -""" -import logging -import math -from dataclasses import dataclass -from itertools import product -from typing import Any, Dict, Optional, Tuple, Union - -import numpy as np -import torch -from torch.distributed.checkpoint import ChunkStorageMetadata -from torch.distributed.checkpoint.resharding import _shards_get_overlap_region_wrt_saved_tensor - -from megatron.core.dist_checkpointing import ShardedTensor -from megatron.core.dist_checkpointing.core import CheckpointingException -from megatron.core.dist_checkpointing.dict_utils import ( - dict_list_map_inplace, - extract_matching_values, -) -from megatron.core.dist_checkpointing.mapping import ( - ReplicaId, - ShardedStateDict, - ShardedTensorFactory, - StateDict, - apply_factories, - apply_factory_merges, -) - -logger = logging.getLogger(__name__) - - -@dataclass -class TensorReformulationMetadata: - """Metadata needed to restore the original tensor shape. - - Args: - ckpt_orig_global_shape (Tuple[int, ...]): original global shape of the tensor - saved in the checkpoint. This is the global shape of the application, - further reformulated into `ckpt_reform_global_shape` while saving. - ckpt_reform_global_shape (Tuple[int, ...]): reformulated global shape of the tensor - saved in the checkpoint. This is the actual saved shape. - """ - - ckpt_orig_global_shape: Tuple[int, ...] - ckpt_reform_global_shape: Tuple[int, ...] - - def __post_init__(self): - assert self.ckpt_orig_global_shape - - -def nd_flattened_tensor_reformulated_global_shape(sh_ten: ShardedTensor) -> Tuple[int, ...]: - """Reformulated global shape of the flattened N-D ShardedTensor. - - N-D tensor global shape [X, Y, Z] and local shape [x, y, z] - is reformulated into global shape [X // x, Y // y, Z // z, x * y * z] and - local shape [1, 1, 1, x * y * z], to allow parallel save of tensors along the - last (flattened) dimension. - - Args: - sh_ten (ShardedTensor): flattened N-D ShardedTensor (N > 1) - - Returns: - Tuple[int, ...]: reformulated tensor shape - """ - assert is_nd_flattened_tensor(sh_ten), sh_ten - return sh_ten.axis_fragmentations + (int(np.prod(sh_ten.local_shape)),) - - -def is_nd_flattened_tensor(sh_ten: Any) -> bool: - """Checks if ShardedTensor is flattened and more than 1-dimensional - - Args: - sh_ten (Any): any object - - Returns: - bool: whether the given object is a flattened ShardedTensor and is N-dimensional (N > 1) - """ - return ( - isinstance(sh_ten, ShardedTensor) - and sh_ten.flattened_range is not None - and len(sh_ten.global_shape) > 1 - ) - - -# information needed to restore. With current implementation, this is a nested state dict -# with ShardedTensorFactories which is basically a ShardedStateDict type -ReformulationRestoreMetadata = ShardedStateDict - - -def apply_nd_flattened_tensors_reformulation( - sharded_state_dict: ShardedStateDict, - reformulation_metadata: Dict[str, TensorReformulationMetadata], -) -> Tuple[ShardedStateDict, ReformulationRestoreMetadata]: - """Applies N-D reformulation to a given sharded state dict. - - After applying the method and loading the reformulated state dict, - the `restore_nd_flattened_tensors_formulation` needs to be applied. - - Current implementation uses ShardedTensorFactories for convenience of - restoring the original structure, but it's just an implementation detail. - Turns N-D ShardedTensors into factories and immediately applies them, - keeping the data needed to restore the original structure. - - Args: - sharded_state_dict (ShardedStateDict): sharded state dict potentially - with tensors to reformulate. - reformulation_metadata (Dict[str, TensorReformulationMetadata]): dict - containing all metadata needed for reformulating tensors in `sharded_state_dict`. - for each N-D flattened tensor `sh_ten` in `sharded_state_dict` there must be an - entry with `sh_ten.key`. - - Returns: - tuple: - ShardedStateDict - reformulated sharded state dict - ReformulationRestoreMetadata - data needed to restore the original formulation - with `restore_nd_flattened_tensors_formulation` - """ - - def maybe_reformulate_nd_flattened_tensor(sh_ten: Any): - if not isinstance(sh_ten, ShardedTensor) or not is_nd_flattened_tensor(sh_ten): - return sh_ten - # N-D flattened ShardedTensor - try: - sh_ten_reformulation_metadata = reformulation_metadata[sh_ten.key] - except KeyError as e: - raise CheckpointingException( - f'Missing reformulation metadata for tensor {sh_ten}. Existing keys: {reformulation_metadata.keys()}' - ) from e - - ckpt_actual_saved_shape = sh_ten_reformulation_metadata.ckpt_reform_global_shape - app_actual_load_shape = nd_flattened_tensor_reformulated_global_shape(sh_ten) - if ckpt_actual_saved_shape == app_actual_load_shape: - # Same shape - no need to reshard - return sh_ten - - return reformulate_single_nd_flattened_tensor(sh_ten, sh_ten_reformulation_metadata) - - # Turn N-D tensors into factories and immediately apply them - dict_list_map_inplace(maybe_reformulate_nd_flattened_tensor, sharded_state_dict) - sh_ten_factories, _ = extract_matching_values( - sharded_state_dict, - lambda x: isinstance(x, ShardedTensorFactory), - return_lists_as_dicts=True, - ) - apply_factories(sharded_state_dict) - - # Unlink `data` pointers to free memory - def unlink_data(x): - x.data = None - return x - - dict_list_map_inplace(unlink_data, sh_ten_factories) - return sharded_state_dict, sh_ten_factories - - -def restore_nd_flattened_tensors_formulation( - state_dict: StateDict, formulation_restore_metadata: ReformulationRestoreMetadata -) -> StateDict: - """Restores the original state dict from a reformulated form. - - Inverse of `apply_nd_flattened_tensors_reformulation`. - - Args: - state_dict (StateDict): state dict obtained by loading a reformulated - sharded state dict. - formulation_restore_metadata (ReformulationRestoreMetadata): metadata returned by - `apply_nd_flattened_tensors_reformulation` function - - Returns: - StateDict: state dict with the original tensors formulation restored - """ - return apply_factory_merges(state_dict, formulation_restore_metadata) - - -def reformulate_single_nd_flattened_tensor( - sh_ten: ShardedTensor, reformulation_metadata: TensorReformulationMetadata -) -> Union[Any, ShardedTensorFactory]: - """Reformulates shapes of a single N-D flattened ShardedTensor. - - We need to define a pair of transformations: - - turn N-D ShardedTensor with original formulation into multiple reformulated ShardedTensors - - merge multiple reformulated loaded torch.Tensors into a single original tensor - Current implementation uses ShardedTensorFactories as a convenient mechanism - for specifying and keeping track of those transformations. - - Args: - sh_ten (ShardedTensor): sharded tensor to reformulate. - reformulation_metadata (TensorReformulationMetadata): metadata needed to - perform the reformulation - - Returns: - ShardedTensorFactory: factory that keeps information how to reformulate - (build) the ShardedTensor and then restore original formulation (merge) - after loading. - """ - rmd = reformulation_metadata - # Data won't be needed - remove unnecessary tensor references - sh_ten = sh_ten.without_data() - - # Based on reformulation_metadata, determine other tensor shapes and metadata - ckpt_axis_fragmentation = rmd.ckpt_reform_global_shape[:-1] - for sh, fragm in zip(rmd.ckpt_orig_global_shape, ckpt_axis_fragmentation): - assert sh % fragm == 0, (sh_ten, rmd.ckpt_reform_global_shape) - ckpt_local_shape_with_prepended_axis = tuple( - sh // fragm for sh, fragm in zip(rmd.ckpt_orig_global_shape, ckpt_axis_fragmentation) - ) - assert ( - ckpt_local_shape_with_prepended_axis[: sh_ten.prepend_axis_num] - == (1,) * sh_ten.prepend_axis_num - ), (ckpt_local_shape_with_prepended_axis, sh_ten) - ckpt_local_shape = ckpt_local_shape_with_prepended_axis[sh_ten.prepend_axis_num :] - - # Iterate over reformulated shapes needed by the application and from checkpoint, - # and generate new ShardedTensors that match the checkpoint sharding. - overlap_dim_offsets = [] - assert len(ckpt_axis_fragmentation) == len(sh_ten.axis_fragmentations), ( - ckpt_axis_fragmentation, - sh_ten, - ) - for dim, (app_chunk_dim_offset, ckpt_fragm, app_fragm) in enumerate( - zip( - sh_ten.local_chunk_offset_in_global(), - ckpt_axis_fragmentation, - sh_ten.axis_fragmentations, - ) - ): - # without `int`, it's an exact offset of the app shard expressed in ckpt_local_shape units - first_overlap_dim_offset = int(ckpt_fragm / app_fragm * app_chunk_dim_offset) - # `math.ceil` argument is an exact offset of the app next shard expressed in ckpt_local_shape units - next_overlap_dim_offset = math.ceil(ckpt_fragm / app_fragm * (app_chunk_dim_offset + 1)) - overlap_dim_offsets.append(range(first_overlap_dim_offset, next_overlap_dim_offset)) - - logger.debug( - f'Generated the following number of overlap shards for each dimension: {list(map(len, overlap_dim_offsets))}' - f' for fragmentation ckpt {ckpt_axis_fragmentation} vs app {sh_ten.axis_fragmentations} and chunk offset {sh_ten.local_chunk_offset_in_global()}' - ) - reformulated_sh_tens = {} - for chunk_offset in product(*overlap_dim_offsets): - global_offset = tuple( - chunk_off * chunk_shape - for chunk_off, chunk_shape in zip(chunk_offset, ckpt_local_shape_with_prepended_axis) - ) - reformulated_sh_tens[(global_offset, ckpt_local_shape)] = ShardedTensor( - sh_ten.key, - None, - sh_ten.dtype, - ckpt_local_shape, - rmd.ckpt_orig_global_shape, - global_offset, - ckpt_axis_fragmentation, - sh_ten.replica_id, - sh_ten.prepend_axis_num, - sh_ten.allow_shape_mismatch, - flattened_range=slice(0, rmd.ckpt_reform_global_shape[-1]), # whole ckpt shard - ) - - # Now, we have to define the transformations from application sharding - # to checkpoint sharding. - - @torch.no_grad() - def sh_ten_build_fn(*args, **kwargs): - # Here we simply return the precomputed tensors. - return reformulated_sh_tens - - @torch.no_grad() - def sh_ten_merge_fn(sub_state_dict): - # This is the non-flattened local tensor with original formulation - # that we are going to fill with shards loaded from the checkpoint. - app_non_flat_ten = torch.empty( - sh_ten.local_shape, - dtype=sh_ten.dtype, - device=sh_ten.data.device if sh_ten.data is not None else None, - ) - - assert len(sub_state_dict) > 0 - for (ckpt_global_offset, ckpt_local_shape), ckpt_ten in sub_state_dict.items(): - # For each ckpt shard, we fill the appropriate application shard part - dest_ten = app_non_flat_ten - src_ten = ckpt_ten.view(ckpt_local_shape) - # We don't need narrowing over `prepend_axis_num` axes so we take the [sh_ten.prepend_axis_num:] offsets slice - for ( - dim, - offset_for_saved_tensor, - offset_for_current_tensor, - length, - ) in _shards_get_overlap_region_wrt_saved_tensor( - saved_shard=ChunkStorageMetadata( - ckpt_global_offset[sh_ten.prepend_axis_num :], ckpt_local_shape - ), - current_shard=ChunkStorageMetadata( - sh_ten.global_offset[sh_ten.prepend_axis_num :], sh_ten.local_shape - ), - ): - src_ten = src_ten.narrow(dim, offset_for_saved_tensor, length) - dest_ten = dest_ten.narrow(dim, offset_for_current_tensor, length) - dest_ten.copy_(src_ten) - return app_non_flat_ten.flatten()[sh_ten.flattened_range] - - return ShardedTensorFactory( - sh_ten.key, - sh_ten.data, - sh_ten_build_fn, - sh_ten_merge_fn, - sh_ten.replica_id, - sh_ten.flattened_range, - ) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +""" Performant resharding of flattened tensors. + +Tensors that are first sharded (e.g. across TP) and then flattened cause +very irregular access patterns during loading. The idea for performant save/load +is to store tensors with global shape [X, Y, Z] and local shape [x, y, z] +as tensors with global shape [X // x, Y // y, Z // z, x * y * z] and +local shape [1, 1, 1, x * y * z]. This allows parallel save of tensors along the +last (flattened) dimension. During loading, some additional resharding is needed. +""" +import logging +import math +from dataclasses import dataclass +from itertools import product +from typing import Any, Dict, Tuple, Union + +import numpy as np +import torch +from torch.distributed.checkpoint import ChunkStorageMetadata +from torch.distributed.checkpoint.resharding import _shards_get_overlap_region_wrt_saved_tensor + +from megatron.core.dist_checkpointing import ShardedTensor +from megatron.core.dist_checkpointing.core import CheckpointingException +from megatron.core.dist_checkpointing.dict_utils import ( + dict_list_map_inplace, + extract_matching_values, +) +from megatron.core.dist_checkpointing.mapping import ( + ShardedStateDict, + ShardedTensorFactory, + StateDict, + apply_factories, + apply_factory_merges, +) + +logger = logging.getLogger(__name__) + + +@dataclass +class TensorReformulationMetadata: + """Metadata needed to restore the original tensor shape. + + Args: + ckpt_orig_global_shape (Tuple[int, ...]): original global shape of the tensor + saved in the checkpoint. This is the global shape of the application, + further reformulated into `ckpt_reform_global_shape` while saving. + ckpt_reform_global_shape (Tuple[int, ...]): reformulated global shape of the tensor + saved in the checkpoint. This is the actual saved shape. + """ + + ckpt_orig_global_shape: Tuple[int, ...] + ckpt_reform_global_shape: Tuple[int, ...] + + def __post_init__(self): + assert self.ckpt_orig_global_shape + + +def nd_flattened_tensor_reformulated_global_shape(sh_ten: ShardedTensor) -> Tuple[int, ...]: + """Reformulated global shape of the flattened N-D ShardedTensor. + + N-D tensor global shape [X, Y, Z] and local shape [x, y, z] + is reformulated into global shape [X // x, Y // y, Z // z, x * y * z] and + local shape [1, 1, 1, x * y * z], to allow parallel save of tensors along the + last (flattened) dimension. + + Args: + sh_ten (ShardedTensor): flattened N-D ShardedTensor (N > 1) + + Returns: + Tuple[int, ...]: reformulated tensor shape + """ + assert is_nd_flattened_tensor(sh_ten), sh_ten + return sh_ten.axis_fragmentations + (int(np.prod(sh_ten.local_shape)),) + + +def is_nd_flattened_tensor(sh_ten: Any) -> bool: + """Checks if ShardedTensor is flattened and more than 1-dimensional + + Args: + sh_ten (Any): any object + + Returns: + bool: whether the given object is a flattened ShardedTensor and is N-dimensional (N > 1) + """ + return isinstance(sh_ten, ShardedTensor) and sh_ten.flattened_range is not None + + +# information needed to restore. With current implementation, this is a nested state dict +# with ShardedTensorFactories which is basically a ShardedStateDict type +ReformulationRestoreMetadata = ShardedStateDict + + +def apply_nd_flattened_tensors_reformulation( + sharded_state_dict: ShardedStateDict, + reformulation_metadata: Dict[str, TensorReformulationMetadata], +) -> Tuple[ShardedStateDict, ReformulationRestoreMetadata]: + """Applies N-D reformulation to a given sharded state dict. + + After applying the method and loading the reformulated state dict, + the `restore_nd_flattened_tensors_formulation` needs to be applied. + + Current implementation uses ShardedTensorFactories for convenience of + restoring the original structure, but it's just an implementation detail. + Turns N-D ShardedTensors into factories and immediately applies them, + keeping the data needed to restore the original structure. + + Args: + sharded_state_dict (ShardedStateDict): sharded state dict potentially + with tensors to reformulate. + reformulation_metadata (Dict[str, TensorReformulationMetadata]): dict + containing all metadata needed for reformulating tensors in `sharded_state_dict`. + for each N-D flattened tensor `sh_ten` in `sharded_state_dict` there must be an + entry with `sh_ten.key`. + + Returns: + tuple: + ShardedStateDict - reformulated sharded state dict + ReformulationRestoreMetadata - data needed to restore the original formulation + with `restore_nd_flattened_tensors_formulation` + """ + + def maybe_reformulate_nd_flattened_tensor(sh_ten: Any): + if not isinstance(sh_ten, ShardedTensor) or not is_nd_flattened_tensor(sh_ten): + return sh_ten + # N-D flattened ShardedTensor + try: + sh_ten_reformulation_metadata = reformulation_metadata[sh_ten.key] + except KeyError as e: + # Handle legacy checkpointing where 1-D flatten tensor metadata was not saved + if len(sh_ten.global_shape) == 1: + return sh_ten + raise CheckpointingException( + f'Missing reformulation metadata for tensor {sh_ten}. ' + f'Existing keys: {reformulation_metadata.keys()}' + ) from e + + ckpt_actual_saved_shape = sh_ten_reformulation_metadata.ckpt_reform_global_shape + app_actual_load_shape = nd_flattened_tensor_reformulated_global_shape(sh_ten) + if ckpt_actual_saved_shape == app_actual_load_shape: + # Same shape - no need to reshard + return sh_ten + + return reformulate_single_nd_flattened_tensor(sh_ten, sh_ten_reformulation_metadata) + + # Turn N-D tensors into factories and immediately apply them + dict_list_map_inplace(maybe_reformulate_nd_flattened_tensor, sharded_state_dict) + sh_ten_factories, _ = extract_matching_values( + sharded_state_dict, + lambda x: isinstance(x, ShardedTensorFactory), + return_lists_as_dicts=True, + ) + apply_factories(sharded_state_dict) + + # Unlink `data` pointers to free memory + def unlink_data(x): + x.data = None + return x + + dict_list_map_inplace(unlink_data, sh_ten_factories) + return sharded_state_dict, sh_ten_factories + + +def restore_nd_flattened_tensors_formulation( + state_dict: StateDict, formulation_restore_metadata: ReformulationRestoreMetadata +) -> StateDict: + """Restores the original state dict from a reformulated form. + + Inverse of `apply_nd_flattened_tensors_reformulation`. + + Args: + state_dict (StateDict): state dict obtained by loading a reformulated + sharded state dict. + formulation_restore_metadata (ReformulationRestoreMetadata): metadata returned by + `apply_nd_flattened_tensors_reformulation` function + + Returns: + StateDict: state dict with the original tensors formulation restored + """ + return apply_factory_merges(state_dict, formulation_restore_metadata) + + +def reformulate_single_nd_flattened_tensor( + sh_ten: ShardedTensor, reformulation_metadata: TensorReformulationMetadata +) -> Union[Any, ShardedTensorFactory]: + """Reformulates shapes of a single N-D flattened ShardedTensor. + + We need to define a pair of transformations: + - turn N-D ShardedTensor with original formulation into multiple reformulated ShardedTensors + - merge multiple reformulated loaded torch.Tensors into a single original tensor + Current implementation uses ShardedTensorFactories as a convenient mechanism + for specifying and keeping track of those transformations. + + Args: + sh_ten (ShardedTensor): sharded tensor to reformulate. + reformulation_metadata (TensorReformulationMetadata): metadata needed to + perform the reformulation + + Returns: + ShardedTensorFactory: factory that keeps information how to reformulate + (build) the ShardedTensor and then restore original formulation (merge) + after loading. + """ + rmd = reformulation_metadata + # Data won't be needed - remove unnecessary tensor references + sh_ten = sh_ten.without_data() + + # Based on reformulation_metadata, determine other tensor shapes and metadata + ckpt_axis_fragmentation = rmd.ckpt_reform_global_shape[:-1] + for sh, fragm in zip(rmd.ckpt_orig_global_shape, ckpt_axis_fragmentation): + assert sh % fragm == 0, (sh_ten, rmd.ckpt_reform_global_shape) + ckpt_local_shape_with_prepended_axis = tuple( + sh // fragm for sh, fragm in zip(rmd.ckpt_orig_global_shape, ckpt_axis_fragmentation) + ) + assert ( + ckpt_local_shape_with_prepended_axis[: sh_ten.prepend_axis_num] + == (1,) * sh_ten.prepend_axis_num + ), (ckpt_local_shape_with_prepended_axis, sh_ten) + ckpt_local_shape = ckpt_local_shape_with_prepended_axis[sh_ten.prepend_axis_num :] + + # Iterate over reformulated shapes needed by the application and from checkpoint, + # and generate new ShardedTensors that match the checkpoint sharding. + overlap_dim_offsets = [] + assert len(ckpt_axis_fragmentation) == len(sh_ten.axis_fragmentations), ( + ckpt_axis_fragmentation, + sh_ten, + ) + for dim, (app_chunk_dim_offset, ckpt_fragm, app_fragm) in enumerate( + zip( + sh_ten.local_chunk_offset_in_global(), + ckpt_axis_fragmentation, + sh_ten.axis_fragmentations, + ) + ): + # without `int`, it's an exact offset of the app shard expressed in ckpt_local_shape units + first_overlap_dim_offset = int(ckpt_fragm / app_fragm * app_chunk_dim_offset) + # `math.ceil` argument is an exact offset of the app next shard expressed + # in ckpt_local_shape units + next_overlap_dim_offset = math.ceil(ckpt_fragm / app_fragm * (app_chunk_dim_offset + 1)) + overlap_dim_offsets.append(range(first_overlap_dim_offset, next_overlap_dim_offset)) + + logger.debug( + f'Generated the following number of overlap shards for each dimension: ' + f'{list(map(len, overlap_dim_offsets))} for fragmentation ckpt ' + f'{ckpt_axis_fragmentation} vs app {sh_ten.axis_fragmentations} ' + f'and chunk offset {sh_ten.local_chunk_offset_in_global()}' + ) + reformulated_sh_tens = {} + for chunk_offset in product(*overlap_dim_offsets): + global_offset = tuple( + chunk_off * chunk_shape + for chunk_off, chunk_shape in zip(chunk_offset, ckpt_local_shape_with_prepended_axis) + ) + reformulated_sh_tens[(global_offset, ckpt_local_shape)] = ShardedTensor( + sh_ten.key, + None, + sh_ten.dtype, + ckpt_local_shape, + rmd.ckpt_orig_global_shape, + global_offset, + ckpt_axis_fragmentation, + sh_ten.replica_id, + sh_ten.prepend_axis_num, + sh_ten.allow_shape_mismatch, + flattened_range=slice(0, rmd.ckpt_reform_global_shape[-1]), # whole ckpt shard + ) + + # Now, we have to define the transformations from application sharding + # to checkpoint sharding. + + @torch.no_grad() + def sh_ten_build_fn(*args, **kwargs): + # Here we simply return the precomputed tensors. + return reformulated_sh_tens + + @torch.no_grad() + def sh_ten_merge_fn(sub_state_dict): + # This is the non-flattened local tensor with original formulation + # that we are going to fill with shards loaded from the checkpoint. + app_non_flat_ten = torch.empty( + sh_ten.local_shape, + dtype=sh_ten.dtype, + device=sh_ten.data.device if sh_ten.data is not None else None, + ) + + assert len(sub_state_dict) > 0 + for (ckpt_global_offset, ckpt_local_shape), ckpt_ten in sub_state_dict.items(): + # For each ckpt shard, we fill the appropriate application shard part + dest_ten = app_non_flat_ten + src_ten = ckpt_ten.view(ckpt_local_shape) + # We don't need narrowing over `prepend_axis_num` axes so we take + # the [sh_ten.prepend_axis_num:] offsets slice + for ( + dim, + offset_for_saved_tensor, + offset_for_current_tensor, + length, + ) in _shards_get_overlap_region_wrt_saved_tensor( + saved_shard=ChunkStorageMetadata( + ckpt_global_offset[sh_ten.prepend_axis_num :], ckpt_local_shape + ), + current_shard=ChunkStorageMetadata( + sh_ten.global_offset[sh_ten.prepend_axis_num :], sh_ten.local_shape + ), + ): + src_ten = src_ten.narrow(dim, offset_for_saved_tensor, length) + dest_ten = dest_ten.narrow(dim, offset_for_current_tensor, length) + dest_ten.copy_(src_ten) + return app_non_flat_ten.flatten()[sh_ten.flattened_range] + + return ShardedTensorFactory( + sh_ten.key, + sh_ten.data, + sh_ten_build_fn, + sh_ten_merge_fn, + sh_ten.replica_id, + sh_ten.flattened_range, + ) diff --git a/megatron/core/dist_checkpointing/strategies/state_dict_saver.py b/megatron/core/dist_checkpointing/strategies/state_dict_saver.py index 7b35209..200304a 100644 --- a/megatron/core/dist_checkpointing/strategies/state_dict_saver.py +++ b/megatron/core/dist_checkpointing/strategies/state_dict_saver.py @@ -1,162 +1,247 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -""" State dict saver for PyT Distributed format allowing asynchronous save. """ - -from logging import getLogger -from time import time -from typing import TYPE_CHECKING, Optional, Tuple, cast - -import torch -import torch.distributed as dist -from torch.distributed.checkpoint import CheckpointException -from torch.distributed.checkpoint.default_planner import DefaultSavePlanner -from torch.distributed.checkpoint.metadata import STATE_DICT_TYPE, Metadata -from torch.distributed.checkpoint.planner import SavePlan, SavePlanner -from torch.distributed.checkpoint.utils import _DistWrapper, _get_failure_dict - -if TYPE_CHECKING: - from .filesystem_async import FileSystemWriterAsync - - -logger = getLogger(__name__) - - -def save_state_dict_async_plan( - state_dict: STATE_DICT_TYPE, - storage_writer: 'FileSystemWriterAsync', - process_group: Optional[dist.ProcessGroup] = None, - coordinator_rank: int = 0, - planner: Optional[SavePlanner] = None, - cached_ckpt_structure: Optional[Tuple[SavePlan, SavePlan, bool]] = None, -) -> Tuple[Tuple['FileSystemWriterAsync', Metadata, _DistWrapper], SavePlan, bool]: - """ - First stage of saving a state dict to storage. - - This is an async adjustment of torch.distributed.checkpoint.state_dict_saver. - In order to support async save, saving should be split into three parts: - 1. Planning - 2. Actual saving - 3. Finalization - - Out of these, step (2) *must* happen asynchronously. - The first step is realized with this function. - - The planning part consists of several steps, described here: - https://pytorch.org/docs/stable/distributed.checkpoint.html#torch.distributed.checkpoint.SavePlanner - - Args: - state_dict (STATE_DICT_TYPE): state dict to save - storage_writer (FileSystemWriterAsync): in current version only an instance of - FileSystemWriterAsync - process_group (dist.ProcessGroup, optional): process group used for save planning - coordinator_rank (int, optional): coordinator rank for planning. Defaults to 0. - planner (SavePlanner, optional): save planner for torch.distributed.checkpoint format - cached_ckpt_structure (Tuple[SavePlan, SavePlan, bool], Optional): - Each object of this tuple will be used in the order as following - cached_central_plan (SavePlan): a globally coordinated save plan - cached in the previous iteration - cached_local_plan (SavePlan): a local plan - cached in the previous iteration - validated_cache_reuse (bool): boolean value to tell global_metadata and planning dict - is consistent over iterations - - Returns: Tuple of: - - storage writer (the one passed as input) - - metadata from planning - - distributed wrapper used for planning - The return value of this function should be passed as an input to - `save_state_dict_async_finalize` and cached_plan to skip `reduce_scatter` at planning. - """ - cached_central_plan, cached_local_plan, validated_cache_reuse = (None, None, False) - if cached_ckpt_structure: - cached_central_plan, cached_local_plan, validated_cache_reuse = cached_ckpt_structure - - rank = torch.distributed.get_rank() if torch.distributed.is_initialized() else 0 - dist_wrapper = _DistWrapper(process_group, True, coordinator_rank) - if planner is None: - planner = DefaultSavePlanner() - assert planner is not None - - global_metadata = None - logger.debug(f"rank: {rank}, starting state dict save") - local_plan = cached_local_plan - - def local_step(): - nonlocal local_plan - assert planner is not None - # PyTorch 2.4 introduced additional `metadata` argument, - # we have to reference `is_coordinator` args by name - planner.set_up_planner(state_dict, is_coordinator=dist_wrapper.is_coordinator) - storage_writer.set_up_storage_writer(dist_wrapper.is_coordinator) - if not validated_cache_reuse and local_plan is None: - local_plan = planner.create_local_plan() - local_plan = storage_writer.prepare_local_plan(local_plan) - return local_plan - - def global_step(all_local_plans): - nonlocal global_metadata - assert planner is not None - all_local_plans, global_metadata = planner.create_global_plan(all_local_plans) - all_local_plans = storage_writer.prepare_global_plan(all_local_plans) - return all_local_plans - - # Execute local and global planning - start_plan = time() - if validated_cache_reuse and cached_central_plan: - logger.debug(f"rank: {rank}, Passed cache reusable") - local_step() - central_plan = cached_central_plan - else: - central_plan = dist_wrapper.reduce_scatter("plan", local_step, global_step) - central_plan = planner.finish_plan(central_plan) - end_plan = time() - logger.debug(f"rank: {rank}, plan time: {end_plan - start_plan}") - # Prepare async writing of tensors. - # The `storage_writer` will store the information about tensors it needs to save - start = time() - storage_writer.prepare_write_data(central_plan, planner) - end = time() - logger.debug(f"{time()} rank: {rank}, write(async) time: {end - start}") - return ( - (storage_writer, cast(Metadata, global_metadata), dist_wrapper), - central_plan, - local_plan, - cached_central_plan == central_plan, - ) - - -def save_state_dict_async_finalize( - storage_writer: 'FileSystemWriterAsync', global_metadata: Metadata, dist_wrapper: _DistWrapper -) -> None: - """ - Finalization of save_state_dict_async_plan. - - The input arguments are the same as the save_state_dict_async_plan output, - the `write_results` are retrieved from the storage_writer. - - Args: - storage_writer (FileSystemWriterAsync): storage writer used for planning - global_metadata (Metadata): metadata created during planning - dist_wrapper (_DistWrapper): distributed wrapper created during planning - - Returns: None - """ - write_results = storage_writer.retrieve_write_results() - - # Gather the write results that will be saved to the metadata file. - gather_start = time() - all_results = dist_wrapper.gather_object(write_results) - gather_end = time() - logger.debug(f"{gather_end}, {torch.distributed.get_rank()}, gather: {gather_end-gather_start}") - - # Store the metadata on coordinator rank - if dist_wrapper.is_coordinator: - node_failures = _get_failure_dict(all_results) - if len(node_failures) == 0: - assert global_metadata is not None - write_start = time() - storage_writer.finish(global_metadata, all_results) - write_end = time() - logger.debug(f"{write_end}, metadata_write: {write_end - write_start}") - else: - raise CheckpointException("write", node_failures) +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +""" State dict saver for PyT Distributed format allowing asynchronous save. """ + +from logging import getLogger +from time import time +from typing import TYPE_CHECKING, List, Optional, Tuple, Union + +import torch +import torch.distributed as dist +from torch.distributed.checkpoint import CheckpointException +from torch.distributed.checkpoint.default_planner import DefaultSavePlanner +from torch.distributed.checkpoint.metadata import STATE_DICT_TYPE, Metadata +from torch.distributed.checkpoint.planner import SavePlan, SavePlanner +from torch.distributed.checkpoint.utils import _DistWrapper, _get_failure_dict + +if TYPE_CHECKING: + from .filesystem_async import FileSystemWriterAsync + from .torch import MCoreSavePlanner + + +logger = getLogger(__name__) + +from dataclasses import fields + + +def _compare_dataclasses(obj1, obj2): + if type(obj1) != type(obj2): + return f"Objects are of different types: {type(obj1)} and {type(obj2)}" + + differences = [] + for field in fields(obj1): + value1 = getattr(obj1, field.name) + value2 = getattr(obj2, field.name) + if value1 != value2: + differences.append(f"{field.name}: {value1} != {value2}") + + return differences if differences else "All fields are equal" + + +def save_state_dict_async_plan( + state_dict: STATE_DICT_TYPE, + storage_writer: 'FileSystemWriterAsync', + process_group: Optional[dist.ProcessGroup] = None, + coordinator_rank: int = 0, + planner: Optional[Union[SavePlanner, 'MCoreSavePlanner']] = None, + cached_ckpt_structure: Optional[Tuple[SavePlan, SavePlan, bool]] = None, + loaded_all_plans: Optional[List[SavePlan]] = None, +) -> Tuple[Tuple['FileSystemWriterAsync', Union[Metadata, None], _DistWrapper], SavePlan, bool]: + """ + First stage of saving a state dict to storage. + + This is an async adjustment of torch.distributed.checkpoint.state_dict_saver. + In order to support async save, saving should be split into three parts: + 1. Planning + 2. Actual saving + 3. Finalization + + Out of these, step (2) *must* happen asynchronously. + The first step is realized with this function. + + The planning part consists of several steps, described here: + https://pytorch.org/docs/stable/distributed.checkpoint.html#torch.distributed.checkpoint.SavePlanner + + Args: + state_dict (STATE_DICT_TYPE): state dict to save + storage_writer (FileSystemWriterAsync): in current version only an instance of + FileSystemWriterAsync + process_group (dist.ProcessGroup, optional): process group used for save planning + coordinator_rank (int, optional): coordinator rank for planning. Defaults to 0. + planner (SavePlanner, optional): save planner for torch.distributed.checkpoint format + cached_ckpt_structure (Tuple[SavePlan, SavePlan, bool], Optional): + Each object of this tuple will be used in the order as following + cached_central_plan (SavePlan): a globally coordinated save plan + cached in the previous iteration + cached_local_plan (SavePlan): a local plan + cached in the previous iteration + validated_cache_reuse (bool): boolean value to tell global_metadata and planning dict + is consistent over iterations + + Returns: Tuple of: + - storage writer (the one passed as input) + - metadata from planning (or None if we reuse cached global metadata) + - distributed wrapper used for planning + The return value of this function should be passed as an input to + `save_state_dict_async_finalize` and cached_plan to skip `reduce_scatter` at planning. + """ + cached_central_plan, cached_local_plan, validated_cache_reuse = (None, None, False) + if cached_ckpt_structure: + cached_central_plan, cached_local_plan, validated_cache_reuse = cached_ckpt_structure + + rank = torch.distributed.get_rank() if torch.distributed.is_initialized() else 0 + dist_wrapper = _DistWrapper(process_group, True, coordinator_rank) + if planner is None: + planner = DefaultSavePlanner() + assert planner is not None + + global_metadata = None + logger.debug(f"rank: {rank}, starting state dict save") + local_plan = cached_local_plan + global_md_verify_reuse = False + + def local_step(): + nonlocal local_plan + assert planner is not None + # PyTorch 2.4 introduced additional `metadata` argument, + # we have to reference `is_coordinator` args by name + planner.set_up_planner(state_dict, is_coordinator=dist_wrapper.is_coordinator) + storage_writer.set_up_storage_writer(dist_wrapper.is_coordinator) + if not validated_cache_reuse and local_plan is None: + local_plan = planner.create_local_plan() + local_plan = storage_writer.prepare_local_plan(local_plan) + return local_plan + + def global_step(all_local_plans): + nonlocal global_metadata + assert planner is not None + all_local_plans, global_metadata = planner.create_global_plan(all_local_plans) + all_local_plans = storage_writer.prepare_global_plan(all_local_plans) + return all_local_plans + + # Execute local and global planning + # Ideally we want to use the cached plan. Otherwise if the planner and storage_writer + # allow it (`can_run_decentralized_global_plan`) we gather the plans to create + # the metadata but prepare the plans independently on each rank. + # In the worst case we have to reduce_scatter all the plans. + start_plan = time() + if validated_cache_reuse and cached_central_plan: + logger.debug(f"rank: {rank}, Passed cache reusable") + local_step() + central_plan = cached_central_plan + elif getattr(planner, 'can_run_decentralized_global_plan', False) and getattr( + storage_writer, 'can_run_decentralized_global_plan', False + ): + local_plan = local_step() + global_md_verify_reuse = verify_global_md_reuse( + loaded_all_plans, local_plan, rank, dist_wrapper + ) + + if not loaded_all_plans or not global_md_verify_reuse: + all_local_plans = dist_wrapper.gather_object(local_plan) + if dist_wrapper.is_coordinator: + _, global_metadata = planner.create_global_plan(all_local_plans) + global_metadata.all_local_plans = all_local_plans + else: + logger.debug(f"rank: {rank}, Passed cached global metadata") + global_metadata = None + local_plan = planner.create_decentralized_global_plan(local_plan) + local_plan = storage_writer.prepare_decentralized_global_plan(local_plan) + central_plan = local_plan + else: + central_plan = dist_wrapper.reduce_scatter("plan", local_step, global_step) + central_plan = planner.finish_plan(central_plan) + end_plan = time() + logger.debug(f"rank: {rank}, plan time: {end_plan - start_plan}") + # Prepare async writing of tensors. + # The `storage_writer` will store the information about tensors it needs to save + start = time() + storage_writer.prepare_write_data(central_plan, planner) + end = time() + logger.debug(f"{time()} rank: {rank}, write(async) time: {end - start}") + return ( + (storage_writer, global_metadata, dist_wrapper), + central_plan, + local_plan, + cached_central_plan == central_plan, + global_md_verify_reuse, + ) + + +def verify_global_md_reuse( + loaded_all_plans: List[SavePlan], local_plan: SavePlan, rank: int, dist_wrapper: _DistWrapper +) -> bool: + """ + Verifies that global metadata reuse is possible by checking the loaded plans from the + checkpoint are consistent, which means we have the same settings when resuming training. + Args: + loaded_all_plans: List[SavePlan], The loaded plans from the checkpoint + (stored in checkpoint metadata). + local_plan: SavePlan, The local save plan. + rank: Current process rank. + dist_wrapper (_DistWrapper): distributed wrapper created during planning + + Returns: True iff the global metadata reuse is possible. + + """ + logger.debug(f"verifying reuse of global metadata") + if not loaded_all_plans: + global_md_verify_reuse = False + logger.debug("loaded global metadata reuse verification: no loaded plans passed") + + elif len(loaded_all_plans) == dist_wrapper.get_world_size(): + local_verify_reuse = all( + getattr(local_plan, f.name) == getattr(loaded_all_plans[rank], f.name) + for f in fields(local_plan) + if f.name != 'storage_data' + ) + + if not local_verify_reuse: + logger.debug( + f"local_verify_reuse is False: diffs -" + f" {_compare_dataclasses(local_plan, loaded_all_plans[rank])}" + ) + all_results = torch.tensor([local_verify_reuse], dtype=torch.int, device='cuda') + torch.distributed.all_reduce(all_results, op=torch.distributed.ReduceOp.MIN) + # Check if all reduced results are True + global_md_verify_reuse = all_results.item() == 1 + else: + global_md_verify_reuse = False + return global_md_verify_reuse + + +def save_state_dict_async_finalize( + storage_writer: 'FileSystemWriterAsync', global_metadata: Metadata, dist_wrapper: _DistWrapper +) -> None: + """ + Finalization of save_state_dict_async_plan. + + The input arguments are the same as the save_state_dict_async_plan output, + the `write_results` are retrieved from the storage_writer. + + Args: + storage_writer (FileSystemWriterAsync): storage writer used for planning + global_metadata (Metadata): metadata created during planning + dist_wrapper (_DistWrapper): distributed wrapper created during planning + + Returns: None + """ + write_results = storage_writer.retrieve_write_results() + + # Gather the write results that will be saved to the metadata file. + gather_start = time() + all_results = dist_wrapper.gather_object(write_results) + gather_end = time() + logger.debug(f"{gather_end}, {torch.distributed.get_rank()}, gather: {gather_end-gather_start}") + + # Store the metadata on coordinator rank + if dist_wrapper.is_coordinator: + node_failures = _get_failure_dict(all_results) + if len(node_failures) == 0: + assert global_metadata is not None + write_start = time() + storage_writer.finish(global_metadata, all_results) + write_end = time() + logger.debug(f"{write_end}, metadata_write: {write_end - write_start}") + else: + raise CheckpointException("write", node_failures) diff --git a/megatron/core/dist_checkpointing/strategies/torch.py b/megatron/core/dist_checkpointing/strategies/torch.py index ea95254..ec647eb 100644 --- a/megatron/core/dist_checkpointing/strategies/torch.py +++ b/megatron/core/dist_checkpointing/strategies/torch.py @@ -1,939 +1,1010 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. - -""" Strategies using PyTorch distributed.checkpoint as an underlying format. """ -import io -import os -import pickle -import warnings -from collections import ChainMap, defaultdict -from dataclasses import dataclass -from itertools import product -from logging import getLogger -from pathlib import Path -from typing import Any, Dict, Iterable, List, Optional, Tuple, Union, cast - -import torch -from packaging.version import Version as PkgVersion -from torch.distributed import checkpoint -from torch.distributed._shard.metadata import ShardMetadata -from torch.distributed._shard.sharded_tensor import Shard -from torch.distributed._shard.sharded_tensor import ShardedTensor as TorchShardedTensor -from torch.distributed._shard.sharded_tensor import ShardedTensorMetadata, TensorProperties -from torch.distributed.checkpoint import ( - BytesStorageMetadata, - DefaultLoadPlanner, - DefaultSavePlanner, - FileSystemReader, - FileSystemWriter, - LoadPlan, - Metadata, - ReadItem, - SavePlan, - TensorStorageMetadata, - WriteItem, -) -from torch.distributed.checkpoint._nested_dict import FLATTEN_MAPPING, unflatten_state_dict -from torch.distributed.checkpoint._traverse import OBJ_PATH, traverse_state_dict -from torch.distributed.checkpoint.metadata import Metadata -from torch.distributed.checkpoint.planner_helpers import _create_write_items - -from ...utils import get_torch_version, is_torch_min_version -from ..core import CheckpointingException -from ..dict_utils import nested_values -from ..mapping import ( - ShardedBase, - ShardedObject, - ShardedStateDict, - ShardedTensor, - StateDict, - is_main_replica, -) -from .async_utils import AsyncRequest -from .base import ( - AsyncSaveShardedStrategy, - LoadShardedStrategy, - StrategyAction, - register_default_strategy, -) -from .filesystem_async import FileSystemWriterAsync -from .resharding import ( - TensorReformulationMetadata, - apply_nd_flattened_tensors_reformulation, - is_nd_flattened_tensor, - nd_flattened_tensor_reformulated_global_shape, - restore_nd_flattened_tensors_formulation, -) -from .state_dict_saver import save_state_dict_async_finalize, save_state_dict_async_plan - -try: - if not torch.cuda.is_available(): - raise ImportError - from transformer_engine.pytorch.float8_tensor import Float8Tensor - - HAVE_TE = True -except ImportError: - HAVE_TE = False - -try: - from torch.distributed._tensor import DTensor - - HAVE_DTENSOR = True -except ImportError: - HAVE_DTENSOR = False - -_metadata_fn: str = ".metadata" - - -def register_default_torch_strategies(): - """Register default strategies related to PyT Distributed backend.""" - register_default_strategy( - StrategyAction.LOAD_SHARDED, 'torch_dist', 1, TorchDistLoadShardedStrategy() - ) - register_default_strategy( - StrategyAction.SAVE_SHARDED, 'torch_dist', 1, TorchDistSaveShardedStrategy('torch_dist', 1) - ) - - -logger = getLogger(__name__) - - -def flatten_state_dict( - state_dict: ShardedStateDict, -) -> Tuple[ShardedStateDict, Dict[str, OBJ_PATH]]: - """Flattens state dict into a single level dict. - - It's a copy of torch.distributed.checkpoint._nested_dict.flatten_state_dict - which also accepts ShardedBase tensors as terminal objects - - Args: - state_dict (ShardedStateDict): state dict to be flattened - - Returns (tuple): flattened state dict and a mapping allowing to recreate the original one - - """ - flattened = {} - mappings = {} - - def flat_copy(path: OBJ_PATH, value: Any) -> None: - new_fqn = ".".join(map(str, path)) - if new_fqn in flattened: - raise ValueError(f"duplicated flatten key {new_fqn}") - flattened[new_fqn] = value - mappings[new_fqn] = path - - traverse_state_dict(state_dict, flat_copy, lambda x: isinstance(x, (torch.Tensor, ShardedBase))) - return flattened, mappings - - -def sharded_tensor_to_torch_sharded_tensor( - sh_tens: List[ShardedTensor], rank: Optional[int] = None -) -> TorchShardedTensor: - """Convert MCore ShardedTensor to PyT ShardedTensor. PyT requires information about all chunks. - - On high-level, this function follows the logic of - torch.distributed.fsdp._shard_utils._create_chunk_sharded_tensor. - Additionally, it saves `prepend_axis_num` and `has_flattened_range` (specific to MCore) - as attributes for further restoration in `_unwrap_pyt_sharded_tensor`. - - NOTE: this function assumes regular (grid) sharding of the MCore ShardedTensor. - The only local irregularities could be introduced with a `flattened_range` attribute. - - This function handles 3 different type of ShardedTensors: - 1. Non-flat regular ShardedTensors (`not has_flattened_range`) - 2. 1D flattened ShardedTensors (`is_flattened_range_1d`) - 3. N-D flattened ShardedTensors (`has_flattened_range`) - - (1) and (2) type are saved according to their original shape. - Type (3) however requires global shape adjustment for efficiency: - we treat [X, Y, Z] global shape tensor with local shape [x, y, z] - as a [X // x, Y // y, Z // z, x * y * z] tensor with last axis - partitioned according to `flattened_range` slices. - This will need special handling while resharding. - - Args: - sh_tens (List[ShardedTensor]): list of sharded tensors to convert - rank (int, optional): current process rank passed to PyT ShardedTensor. - If None, assumes rank in the default pg. - - Returns (TorchShardedTensor): PyT ShardedTensor containing all passed shards. - - """ - if rank is None: - rank = torch.distributed.get_rank() - - some_sh_ten = sh_tens[0] - has_flattened_range = some_sh_ten.flattened_range is not None - is_flattened_range_1d = has_flattened_range and len(some_sh_ten.global_shape) == 1 - - for sh_ten in sh_tens: - assert (sh_ten.flattened_range is not None) == has_flattened_range, sh_tens - if not sh_ten.data.is_contiguous(): - sh_ten.data = sh_ten.data.contiguous() - - local_global_offsets = {} - - prepend_axis_num = sh_tens[0].prepend_axis_num - # Determine local shards according to tensor type (see docs) - if is_flattened_range_1d: - # Type (2) case: 1D flattened ShardedTensors - for sh_ten in sh_tens: - assert len(sh_ten.global_offset) == 1, sh_ten - assert sh_ten.prepend_axis_num == 0, sh_ten - local_global_offsets.setdefault(sh_ten.global_offset, []).append(sh_ten) - - global_shape = some_sh_ten.global_shape - offsets_shape = ( - some_sh_ten.local_shape - ) # local shape is not flattened, we need it for chunk offsets - - local_shards = [ - Shard.from_tensor_and_offsets( - sh_ten.data, - [ - sh_ten.global_offset[0] + sh_ten.flattened_range.start - ], # additional flattened offset - rank, - ) - for sh_ten in sh_tens - ] - - elif has_flattened_range: - # Type (3) case: N-D flattened ShardedTensors - for sh_ten in sh_tens: - local_global_offsets.setdefault(sh_ten.local_chunk_offset_in_global(), []).append( - sh_ten - ) - assert sh_ten.data.ndim == 1, sh_ten - sh_ten.data = sh_ten.data.view((1,) * len(sh_ten.global_shape) + (-1,)) - - # Global shape reformulation: - global_shape = nd_flattened_tensor_reformulated_global_shape(some_sh_ten) - offsets_shape = (1,) * len( - some_sh_ten.global_shape - ) # reformulated global shape has shape equal ti number of local chunks - - local_shards = [ - Shard.from_tensor_and_offsets( - sh_ten.data, - list( - sh_ten.local_chunk_offset_in_global() + (sh_ten.flattened_range.start,) - ), # additional flattened offset - rank, - ) - for sh_ten in sh_tens - ] - else: - # Type (1) case: non-flat regular ShardedTensors - for sh_ten in sh_tens: - local_global_offsets.setdefault(sh_ten.global_offset, []).append(sh_ten) - sh_ten.data = sh_ten.data.view( - (1,) * prepend_axis_num + sh_ten.local_shape - ) # adjust to prepended_axis_num - - global_shape = some_sh_ten.global_shape - offsets_shape = some_sh_ten.data.shape # includes prepended axes - - local_shards = [ - Shard.from_tensor_and_offsets( - sh_ten.data, list(sh_ten.global_offset), rank # simple case - ) - for sh_ten in sh_tens - ] - - # Create a ShardedTensor without invoking communication. Determine global shards - world_size = torch.distributed.get_world_size() - shard_metadata = [] - # NOTE: here we assume a regular grid of shards - for fragment_offsets in product(*map(range, some_sh_ten.axis_fragmentations)): - offset = tuple(map(lambda x: x[0] * x[1], zip(fragment_offsets, offsets_shape))) - if offset in local_global_offsets: - # local shard - placement = f"rank:{rank}/cuda" - for sh_ten in local_global_offsets[offset]: - if is_flattened_range_1d: - offset = (sh_ten.global_offset[0] + sh_ten.flattened_range.start,) - size = sh_ten.data.shape - elif has_flattened_range: - assert offset == sh_ten.local_chunk_offset_in_global() - # This is not an actual offset, but an offset of the whole shard - # This is needed for a PyT Dist internal integrity check - offset = sh_ten.local_chunk_offset_in_global() + (0,) - size = (1,) * len(offsets_shape) + global_shape[-1:] - else: - size = sh_ten.data.shape - shard_metadata.append(ShardMetadata(offset, size, placement)) - - else: - # pylint: disable=line-too-long - # for shards from other ranks we provide simplistic data - this information will be discarded - # during TorchShardedTensor._init_from_local_shards_and_global_metadata call. - # Due to a bug in PyT 24.05 container we must specify some concrete rank within a world size. - # The exact rank doesn't matter as long as it's different than my rank - hence (rank + 1) % WS. - placement = f"rank:{(rank + 1) % world_size}/cuda" - if has_flattened_range and not is_flattened_range_1d: - offset = offset + (0,) - size = (1,) * len(offsets_shape) + global_shape[-1:] - else: - size = offsets_shape - shard_metadata.append(ShardMetadata(offset, size, placement)) - - tensor = some_sh_ten.data - sharded_tensor_metadata = ShardedTensorMetadata( - shards_metadata=shard_metadata, - size=torch.Size(global_shape), - tensor_properties=TensorProperties( - dtype=tensor.dtype, - layout=tensor.layout, - requires_grad=tensor.requires_grad, - memory_format=torch.contiguous_format, - pin_memory=tensor.is_pinned(), - ), - ) - pyt_sh_ten = TorchShardedTensor._init_from_local_shards_and_global_metadata( - local_shards, sharded_tensor_metadata=sharded_tensor_metadata, process_group=None - ) - # Store MCore related data as PyTShardedTensor attribute. - # This won't be stored in the checkpoint, only for runtime purposes - pyt_sh_ten.mcore_sh_ten = sh_ten.without_data() - pyt_sh_ten.mcore_metadata = {} - if has_flattened_range and not is_flattened_range_1d: - pyt_sh_ten.mcore_metadata['nd_reformulated_orig_global_shape'] = sh_ten.global_shape - return pyt_sh_ten - - -def mcore_to_pyt_state_dict( - state_dict: Dict[str, List[ShardedBase]], - is_loading: bool = False, - init_device: torch.device = torch.device("cpu"), -) -> Dict[str, Union[TorchShardedTensor, io.BytesIO]]: - """Convert state dict with ShardedTensors and ShardedObjects - to state dict compatible with PyT Dist format. - - Operates in-place and returns the original state dict. - - Args: - state_dict (Dict[str, List[ShardedBase]]): flattened state dict, where values - are lists of either ShardedTensor or ShardedObjects. - is_loading (bool, optional): flag indicating if loading or saving. Defaults to False. - init_device (torch.device, optional): device to initialize potentially missing tensors - during loading. Defaults to 'cpu'. - - Returns (Dict[str, Union[TorchShardedTensor, io.BytesIO]]): original dictionary with values - converted either into PyT ShardedTensors or io.BytesIO. - - """ - rank = torch.distributed.get_rank() - pyt_state_dict = {} - - def _mcore_to_torch_sharded_tensor(sh_tens: List[ShardedTensor]) -> TorchShardedTensor: - """Build a PyT ShardedTensor from given shards. - - During loading: - - if data is None, initialize it with an empty tensor (will be used to copy the data into) - - if `allow_shape_mismatch` is True, the data is initialized with zeros - prior to loading (not all parts of the tensor will be read from the checkpoint) - """ - assert all(isinstance(sh_ten, ShardedTensor) for sh_ten in sh_tens), sh_tens - for sh_ten in sh_tens: - if sh_ten.data is None: - if is_loading: - sh_ten.init_data( - init_device, - init_fn=torch.zeros if sh_ten.allow_shape_mismatch else torch.empty, - ) - else: - raise CheckpointingException(f'`data` attr is None for {sh_ten}') - else: - sh_ten.data = sh_ten.data.detach() - if sh_ten.allow_shape_mismatch and is_loading: - sh_ten.data.zero_() - - torch_sh_ten = sharded_tensor_to_torch_sharded_tensor(sh_tens, rank) - torch_sh_ten.key = sh_tens[0].key - return torch_sh_ten - - def _mcore_to_torch_sharded_object(sh_objs: List[ShardedObject]) -> io.BytesIO: - """Build io.BytesIO from given sharded objects data.""" - assert all(isinstance(sh_obj, ShardedObject) for sh_obj in sh_objs), sh_objs - serialized_data = io.BytesIO() - torch.save([sh_obj.data for sh_obj in sh_objs], serialized_data) - return serialized_data - - for k, v in state_dict.items(): - if isinstance(v[0], ShardedTensor): - v = cast(List[ShardedTensor], v) - pyt_state_dict[k] = _mcore_to_torch_sharded_tensor(v) - else: - v = cast(List[ShardedObject], v) - pyt_state_dict[k] = _mcore_to_torch_sharded_object(v) - - return pyt_state_dict - - -def _unwrap_pyt_sharded_tensor(sh_ten: TorchShardedTensor) -> List[torch.Tensor]: - """Unwrap tensor from PyT ShardedTensor instance. - - If `prepend_axis_num` was non-zero (which is specific to MCore ShardedTensor) - then the tensor has additional singleton dimensions which should be squeezed. - """ - mcore_sh_ten = sh_ten.mcore_sh_ten - ret_tensors = [] - for sh in sh_ten.local_shards(): - ten = sh.tensor - if mcore_sh_ten.flattened_range is not None: - assert ten.shape[:-1] == (1,) * (len(ten.shape) - 1), ten.shape - ten = ten.view(-1) - else: - for _ in range(mcore_sh_ten.prepend_axis_num): - ten = ten.squeeze(0) - ret_tensors.append(ten) - return ret_tensors - - -def _replace_state_dict_keys_with_sharded_keys( - sharded_state_dict: ShardedStateDict, keep_only_main_replica: bool = False -) -> Tuple[Dict[str, List[ShardedBase]], FLATTEN_MAPPING, Dict[str, List[str]]]: - """Group ShardedBase objects by keys and - return mappings required for recreating the original dict.""" - flat_sd, flat_mapping = flatten_state_dict(sharded_state_dict) - rename_mapping = defaultdict(list) - new_flat_sd = defaultdict(list) - for k, sh_base in flat_sd.items(): - assert isinstance(sh_base, ShardedBase), type(sh_base) - key = sh_base.unique_key if isinstance(sh_base, ShardedObject) else sh_base.key - if is_main_replica(sh_base.replica_id) or not keep_only_main_replica: - rename_mapping[key].append(k) - new_flat_sd[key].append(sh_base) - return new_flat_sd, flat_mapping, rename_mapping - - -def _replace_sharded_keys_with_state_dict_keys( - state_dict: Dict[str, List[Union[torch.Tensor, io.BytesIO]]], - flat_mapping: FLATTEN_MAPPING, - rename_mapping: Dict[str, List[str]], -): - """Inverse of _replace_state_dict_keys_with_sharded_keys.""" - recovered_sd = {} - for k, tensors in state_dict.items(): - assert len(tensors) == len(rename_mapping[k]) - for ten, recovered_k in zip(tensors, rename_mapping[k]): - recovered_sd[recovered_k] = ten - - return unflatten_state_dict(recovered_sd, flat_mapping) - - -def _restore_dict_types(x: Union[dict, list, Any], keys_template: Union[dict, list, Any]): - """Recursively update `x` keys, based on `keys_template`.""" - if isinstance(keys_template, dict): - assert isinstance(x, dict), type(x) - for k, v in keys_template.items(): - if not isinstance(k, str): - assert str(k) in x, (k, x.keys) - x[k] = x.pop(str(k)) - _restore_dict_types(x[k], v) - elif isinstance(keys_template, list): - assert isinstance(x, list), type(x) - for x_val, templ_val in zip(x, keys_template): - _restore_dict_types(x_val, templ_val) - - -@dataclass(frozen=True) -class MCoreSavePlan(SavePlan): - """SavePlan with MCore specific data.""" - - mcore_data: Dict[str, Dict[str, Any]] = None # Mcore related data about each tensor - - -class MCoreSavePlanner(DefaultSavePlanner): - """Differs with the default planner by saving BytesIO objects on all ranks. - - In the integration of MCore with PyT Distributed format, BytesIO objects - come from ShardedObjects, which should be treated as separate objects on each rank - (not common on all ranks). - - Also, the objects are already packed in io.BytesIO, so no need to redo it - in transform_object. - """ - - def __init__( - self, - *args, - dedup_replicated_tensors: Optional[bool] = None, - nd_flattened_global_shapes: Optional[Dict[str, Tuple[int, ...]]] = None, - **kwargs, - ) -> None: - # `dedup_replicated_tensors` was deprecated in 2.3; this check avoids warnings - # during saving. - if get_torch_version() <= PkgVersion("2.2"): - kwargs['dedup_replicated_tensors'] = dedup_replicated_tensors - super().__init__(*args, **kwargs) - self.nd_flattened_global_shapes = nd_flattened_global_shapes or {} - - def create_local_plan(self) -> SavePlan: - """Adds IOBytes write request on non-coordinator ranks.""" - - # NOTE: for PyT 2.4.0a0 we can't rely on `create_default_local_save_plan` because - # some alpha versions (specifically 2.4.0a0+f70bd71a48 in 24.06 NGC PyTorch container) - # add iobytes request only on coordinator ranks and some alpha versions - # (specifically 2.4.0a0+3bcc3cddb5 in 24.07 NGC PyTorch container) - # add those requests on all ranks. We inline a simplified version of this method below. - write_items = [] - for fqn, obj in self.state_dict.items(): - assert not HAVE_DTENSOR or not isinstance( - obj, DTensor - ) # translation from MCore ShardedTensors shouldn't result in DTensors - # Create write requests for tensor and bytes values. - # For MCore, these should be already non-duplicates. - write_items += _create_write_items(fqn, obj) - - self.plan = MCoreSavePlan( - items=write_items, - planner_data=self.mappings, - mcore_data={ - k: sh_ten.mcore_metadata - for k, sh_ten in self.state_dict.items() - if isinstance(sh_ten, TorchShardedTensor) - }, - ) - return self.plan - - def create_global_plan(self, all_plans: List[MCoreSavePlan]) -> Tuple[List[SavePlan], Metadata]: - """Merges MCore data for all plans.""" - global_plan, metadata = super().create_global_plan(all_plans) - metadata.mcore_data = dict(ChainMap(*(plan.mcore_data for plan in all_plans))) - return global_plan, metadata - - def transform_object(self, write_item: WriteItem, object: Any): - """Make no transformations - bytes objects are already serialized.""" - return object - - -class MCoreLoadPlanner(DefaultLoadPlanner): - """Adds global shape validation to the default planner. - - If global shape validation can be ignored (shouldn't!), the default - load planner can be used. - """ - - def __init__( - self, *args, shapes_validation_sharded_tensors: Iterable[ShardedTensor] = (), **kwargs - ) -> None: - super().__init__(*args, **kwargs) - self.shapes_validation_sharded_tensors = shapes_validation_sharded_tensors - self._intermediate_read_item_and_target: Optional[Tuple[ReadItem, torch.Tensor]] = None - - def _validate_global_shapes(self, metadata, sharded_tensors): - for sh_ten in sharded_tensors: - if sh_ten.key not in metadata.state_dict_metadata: - raise KeyError( - f"{sh_ten.key} from model not in state dict:" - f" {sorted(metadata.state_dict_metadata.keys())}" - ) - loaded_shape = metadata.state_dict_metadata[sh_ten.key].size - if not is_nd_flattened_tensor(sh_ten): - expected_shape = sh_ten.global_shape - else: - expected_shape = nd_flattened_tensor_reformulated_global_shape(sh_ten) - if loaded_shape != expected_shape: - _msg = ( - f'Global shape mismatch for loaded ({loaded_shape})' - f' and expected ({expected_shape}) tensor' - f' for key {sh_ten.key}' - ) - raise CheckpointingException(_msg) - - def create_local_plan(self) -> LoadPlan: - """Runs additional shapes validation.""" - self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) - return super().create_local_plan() - - def resolve_tensor(self, read_item: ReadItem): - """Override to add FP8 support. - - Narrowing the Float8Tensor can create incontiguous tensors and there are - no `copy` kernels for such cases. This method creates a contiguous FP8 - tensors so that the subsequent `copy_` in FileSystemReader succeeds. - Note that this requires tracking the original tensor - (as `self._intermediate_read_item_and_target` attribute) - and restoring it in `commit_tensor` method. - """ - target_tensor = super().resolve_tensor(read_item) - if ( - not target_tensor.is_contiguous() - and HAVE_TE - and isinstance(target_tensor, Float8Tensor) - ): - self._intermediate_read_item_and_target = (read_item, target_tensor) - target_tensor = Float8Tensor.make_like( - target_tensor, data=target_tensor._data.contiguous() - ) - return target_tensor - - def commit_tensor(self, read_item: ReadItem, tensor: torch.Tensor) -> None: - """Restores the original FP8 tensor saved in `resolve_tensor`.""" - if self._intermediate_read_item_and_target is not None: - interm_read_item, target_tensor = self._intermediate_read_item_and_target - assert ( - interm_read_item is read_item - ), '`commit_tensor` method should be called right after `resolve_tensor`' - target_tensor.copy_(tensor) - tensor = target_tensor - self._intermediate_read_item_and_target = None - return super().commit_tensor(read_item, tensor) - - -class TorchDistSaveShardedStrategy(AsyncSaveShardedStrategy): - """Async save strategy for the PyT Distributed format. - - The idea is to translate MCore ShardedTensors into PyT ShardedTensors - and use the async-adjusted torch.distributed.checkpoint saving mechanism - provided by the FileSystemWriterAsync writer. - """ - - def __init__( - self, - backend: str, - version: int, - keep_only_main_replica: bool = True, - thread_count: int = 2, - cached_metadata: bool = False, - separation_hint: str = None, - ): - """Adds parameters specific to PyT Distributed format - Args: - backend (str): format backend string - version (int): format version - keep_only_main_replica (bool, optional): PyT Distributed has a mechanism - for deduplication, but replica_id aware deduplication is more coherent. - Default is True (recommended to keep it). - thread_count (int, optional): threads to use during saving. - Affects the number of files in the checkpoint (saving ranks * num_threads). - cached_metadata (bool, optional): Enables using cached global metadata to avoid - gathering local metadata every checkpointing invocation - separation_hint(str, optional): If provided, all tensors whose keys have this - prefix will be saved to a separate file. - """ - super().__init__(backend, version) - self.keep_only_main_replica = keep_only_main_replica - self.thread_count = thread_count - - # Cached SavePlans to skip plan in `save_state_dict_async_plan` - # cached outcome of `SavePlan.prepare_global_plan`, - # which aggregates local plans from all ranks - self.cached_central_plan: SavePlan = None - # cached outcome of `SavePlan.prepare_local_plan` describes how local state_dict is written - self.cached_local_plan: SavePlan = None - # Cached global metadata, only `coordinator` for dist-ckpt holds - # if central plans are consistent over iters - self.cached_global_metadata: Metadata = None - # This variable records if the ckpt structures are consistent - # so the following checkpoint savings reuse `cached_global_metadata` - self.validated_cache_reuse: bool = False - # The knob to enable cached metadata communication in saving - self.use_cached_ckpt_structure: bool = cached_metadata - - self.separation_hint = separation_hint - - def async_save( - self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path - ) -> AsyncRequest: - """Translates MCore ShardedTensors to PyT ShardedTensors & saves in PyT Distributed format. - - Args: - sharded_state_dict (ShardedStateDict): sharded state dict to save - checkpoint_dir (Path): checkpoint directory - - Returns: None - """ - # Translate the state dict - (sharded_state_dict, flat_mapping, rename_mapping) = ( - _replace_state_dict_keys_with_sharded_keys( - sharded_state_dict, self.keep_only_main_replica - ) - ) - pyt_state_dict = mcore_to_pyt_state_dict(sharded_state_dict, False) - # Use PyT saving mechanism - writer = FileSystemWriterAsync( - checkpoint_dir, separation_hint=self.separation_hint, thread_count=self.thread_count - ) - # This should be set differently if we run in a smaller process group than the default - coordinator = 0 - # Try twice to validate the generated `central_plan` is the same across iterations - # If so, reuse `cached_central_plan` and `cached_global_metadata` - # From the 3rd iteration, `save_state_dict_async_plan` will not generate `global_metadata` - # (return None) so `self.cached_global_metadata` is reused - args_cached_plans = None - if self.use_cached_ckpt_structure: - args_cached_plans = ( - self.cached_central_plan, - self.cached_local_plan, - self.validated_cache_reuse, - ) - - ( - save_state_dict_ret, - self.cached_central_plan, - self.cached_local_plan, - self.validated_cache_reuse, - ) = save_state_dict_async_plan( - pyt_state_dict, - writer, - None, - coordinator, - planner=MCoreSavePlanner(dedup_replicated_tensors=not self.keep_only_main_replica), - cached_ckpt_structure=args_cached_plans, - ) - rank = torch.distributed.get_rank() - if self.use_cached_ckpt_structure: - if self.validated_cache_reuse: - logger.debug(f"rank: {rank}, cache validated") - if save_state_dict_ret[1]: # when global_metadata is not cached - self.cached_global_metadata = save_state_dict_ret[1] # Cache Metadata - # Only Coordinator rank holds cached global_metadata - # (None is returned for global_metadata) - elif coordinator == rank: - logger.debug(f"rank: {rank}, reuse metadata, {save_state_dict_ret[1]}") - save_state_dict_ret = list(save_state_dict_ret) - save_state_dict_ret[1] = self.cached_global_metadata - - return self._get_save_and_finalize_callbacks(writer, save_state_dict_ret) - - def _get_save_and_finalize_callbacks(self, writer, save_state_dict_ret) -> AsyncRequest: - save_fn_args = writer.get_save_function_and_args() - save_fn, save_args = save_fn_args - - def finalize_fn(): - save_state_dict_async_finalize(*save_state_dict_ret) - torch.distributed.barrier() - - return AsyncRequest(save_fn, save_args, [finalize_fn]) - - def can_handle_sharded_objects(self): - return True - - -def get_reformulation_metadata( - sharded_state_dict: ShardedStateDict, checkpoint_dir: Path -) -> Dict[str, TensorReformulationMetadata]: - """Reads MCore data for N-D flattened tensors from checkpoint metadata during ckpt load. - - Args: - sharded_state_dict (ShardedStateDict): sharded state dict to load - checkpoint_dir (Path): checkpoint directory - - Returns: - Dict[str, TensorReformulationMetadata] - dictionary that maps keys of every - N-D flattened tensor from the sharded_state_dict to its original global shape - as stored in `mcore_data` in the checkpoint. - """ - ckpt_metadata = FileSystemReader(checkpoint_dir).read_metadata() - reformulation_metadata = {} - for sh_ten in nested_values(sharded_state_dict): - if not is_nd_flattened_tensor(sh_ten): - continue - try: - ckpt_global_shape = ckpt_metadata.mcore_data[sh_ten.key][ - 'nd_reformulated_orig_global_shape' - ] - except KeyError as e: - raise CheckpointingException( - f'Cannot find global shape metadata for N-D flattened tensor {sh_ten} ' - f'in checkpoint metadata: {ckpt_metadata.mcore_data}' - ) from e - - reformulation_metadata[sh_ten.key] = TensorReformulationMetadata( - ckpt_global_shape, ckpt_metadata.state_dict_metadata[sh_ten.key].size - ) - return reformulation_metadata - - -class TorchDistLoadShardedStrategy(LoadShardedStrategy): - """Basic load strategy for the PyT Distributed format.""" - - def load(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path) -> StateDict: - """Translates MCore ShardedTensors to PyT ShardedTensors & loads from PyT Distributed fmt. - - Args: - sharded_state_dict (ShardedStateDict): sharded state dict with mapping - information to instruct loading - checkpoint_dir (Path): checkpoint directory - - Returns: loaded state dict - """ - # Apply N-D tensors resharding - sharded_state_dict, formulation_restore_data = apply_nd_flattened_tensors_reformulation( - sharded_state_dict, get_reformulation_metadata(sharded_state_dict, checkpoint_dir) - ) - - flexible_shape_sharded_tensors = [ - sh_ten - for sh_ten in nested_values(sharded_state_dict) - if isinstance(sh_ten, ShardedTensor) and not sh_ten.allow_shape_mismatch - ] - - orig_sharded_state_dict = sharded_state_dict - # MCore state dict to PyT Distributed compatible - (sharded_state_dict, flat_mapping, rename_mapping) = ( - _replace_state_dict_keys_with_sharded_keys(sharded_state_dict) - ) - pyt_state_dict = mcore_to_pyt_state_dict(sharded_state_dict, True) - # Load PyT Distributed format - checkpoint.load_state_dict( - pyt_state_dict, - FileSystemReader(checkpoint_dir), - planner=MCoreLoadPlanner( - shapes_validation_sharded_tensors=flexible_shape_sharded_tensors - ), - ) - pyt_state_dict = cast( - Dict[str, Union[TorchShardedTensor, List[io.BytesIO]]], pyt_state_dict - ) - # Unwrap ShardedTensors and return to original state dict - mcore_state_dict = { - k: v if not isinstance(v, TorchShardedTensor) else _unwrap_pyt_sharded_tensor(v) - for k, v in pyt_state_dict.items() - } - mcore_state_dict = _replace_sharded_keys_with_state_dict_keys( - mcore_state_dict, flat_mapping, rename_mapping - ) - _restore_dict_types(mcore_state_dict, orig_sharded_state_dict) - # Apply N-D tensors resharding postprocessing - mcore_state_dict = restore_nd_flattened_tensors_formulation( - mcore_state_dict, formulation_restore_data - ) - return mcore_state_dict - - def load_tensors_metadata(self, checkpoint_dir: Path, metadata: Metadata = None): - """Uses tensors metadata stored in the metadata file.""" - if metadata is None: - fs_reader = FileSystemReader(checkpoint_dir) - metadata = fs_reader.read_metadata() - - mcore_data = getattr(metadata, 'mcore_data', {}) - sharded_metadata = {} - for k, tp in metadata.state_dict_metadata.items(): - if not isinstance(tp, TensorStorageMetadata): - continue # load only tensors - - nd_orig_global_shape = mcore_data.get(k, {}).get('nd_reformulated_orig_global_shape') - if nd_orig_global_shape is None: - # Regular tensor - sharded_metadata[k] = ShardedTensor.from_rank_offsets( - k, torch.empty(tp.size, **tp.properties.__dict__, device='meta') - ).without_data() - else: - # N-D flattened tensor - unflat_ten = torch.empty( - nd_orig_global_shape, **tp.properties.__dict__, device='meta' - ) - flat_ten = unflat_ten.flatten() - sharded_metadata[k] = ShardedTensor.from_rank_offsets_flat( - k, - flat_ten, - unflat_ten.shape, - flattened_range=slice(0, unflat_ten.numel()), # whole slice - ).without_data() - - return sharded_metadata - - def load_sharded_metadata(self, checkpoint_dir: Path) -> ShardedStateDict: - """Uses tensors and objects metadata stored in the metadata file.""" - fs_reader = FileSystemReader(checkpoint_dir) - metadata = fs_reader.read_metadata() - - sharded_metadata = {} - for metadata_key, storage_metadata in metadata.state_dict_metadata.items(): - if not isinstance(storage_metadata, BytesStorageMetadata): - continue - sh_obj = ShardedObject.empty_from_unique_key(metadata_key) - sharded_metadata[sh_obj.unique_key] = sh_obj - - sharded_metadata.update(self.load_tensors_metadata(checkpoint_dir, metadata)) - return sharded_metadata - - def remove_sharded_tensors(self, checkpoint_dir: str, key_prefix: str): - """Removes checkpoint files whose keys have the given prefix. - - Performs the following steps: - 1. checks whether there are files that start with the key_prefix - 2. loads metadata - 3. removes all entries from the metadata that start with the key_prefix - 4. resaves the new metadata and removes the old metadata - 5. removes the relevant files - """ - - assert is_torch_min_version( - "2.3.0" - ), f'torch >= 2.3.0 is required for remove_sharded_tensors' - - distckpt_files = [f for f in os.listdir(checkpoint_dir) if f.endswith("distcp")] - files_to_remove = [f for f in distckpt_files if f.startswith(key_prefix)] - - if not files_to_remove: - warnings.warn( - f'There are no files in {checkpoint_dir} that begin with "{key_prefix}".' - f' Skipping removal.' - ) - return - - fs_reader = FileSystemReader(checkpoint_dir) - original_metadata = fs_reader.read_metadata() - - new_state_dict_metadata = {} - new_planner_data = {} - new_storage_data = {} - for k in original_metadata.state_dict_metadata.keys(): - if k.startswith(key_prefix): - continue - new_state_dict_metadata[k] = original_metadata.state_dict_metadata[k] - for k in original_metadata.planner_data.keys(): - if k.startswith(key_prefix): - continue - new_planner_data[k] = original_metadata.planner_data[k] - for k in original_metadata.storage_data.keys(): - if k.fqn.startswith(key_prefix): - continue - new_storage_data[k] = original_metadata.storage_data[k] - metadata = Metadata( - state_dict_metadata=new_state_dict_metadata, - planner_data=new_planner_data, - storage_data=new_storage_data, - ) - fs_writer = FileSystemWriter(checkpoint_dir) - metadata_filename = cast(Path, fs_writer.fs.concat_path(fs_writer.path, _metadata_fn)) - tmp_path = cast( - metadata_filename, fs_writer.fs.concat_path(fs_writer.path, f"{_metadata_fn}.tmp") - ) - old_path = cast( - metadata_filename, fs_writer.fs.concat_path(fs_writer.path, f"{_metadata_fn}.bck") - ) - ## save the new metadata - with fs_writer.fs.create_stream(tmp_path, "wb") as metadata_file: - pickle.dump(metadata, metadata_file) - try: - os.fsync(metadata_file.fileno()) - except AttributeError: - os.sync() - ## move the old metadata - fs_writer.fs.rename(fs_writer.metadata_path, old_path) - try: - ## rename the new metadata - fs_writer.fs.rename(tmp_path, fs_writer.metadata_path) - - ## finally, remove the files we want to drop - for f in files_to_remove: - fs_writer.fs.rm_file(checkpoint_dir / f) - except Exception as e: - fs_writer.fs.rename(old_path, fs_writer.metadata_path) - raise e - else: - fs_writer.fs.rm_file(old_path) - - def can_handle_sharded_objects(self): - return True - - def check_backend_compatibility(self, loaded_version): - pass # TODO - - def check_version_compatibility(self, loaded_version): - pass # TODO +# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. + +""" Strategies using PyTorch distributed.checkpoint as an underlying format. """ +import io +import os +import pickle +import warnings +from collections import ChainMap, defaultdict +from dataclasses import dataclass +from itertools import product +from logging import getLogger +from pathlib import Path +from typing import Any, Dict, Iterable, List, Optional, Tuple, Union, cast + +import torch +from packaging.version import Version as PkgVersion +from torch.distributed import checkpoint +from torch.distributed._shard.metadata import ShardMetadata +from torch.distributed._shard.sharded_tensor import Shard +from torch.distributed._shard.sharded_tensor import ShardedTensor as TorchShardedTensor +from torch.distributed._shard.sharded_tensor import ShardedTensorMetadata, TensorProperties +from torch.distributed.checkpoint import ( + BytesStorageMetadata, + DefaultLoadPlanner, + DefaultSavePlanner, + FileSystemReader, + FileSystemWriter, + LoadPlan, + Metadata, + ReadItem, + SavePlan, + TensorStorageMetadata, + WriteItem, +) +from torch.distributed.checkpoint._nested_dict import FLATTEN_MAPPING, unflatten_state_dict +from torch.distributed.checkpoint._traverse import OBJ_PATH, traverse_state_dict +from torch.distributed.checkpoint.metadata import Metadata +from torch.distributed.checkpoint.planner_helpers import _create_write_items + +from ...utils import get_torch_version, is_torch_min_version +from ..core import CheckpointingException +from ..dict_utils import nested_values +from ..mapping import ( + ShardedBase, + ShardedObject, + ShardedStateDict, + ShardedTensor, + StateDict, + is_main_replica, +) +from .async_utils import AsyncRequest +from .base import ( + AsyncSaveShardedStrategy, + LoadShardedStrategy, + StrategyAction, + register_default_strategy, +) +from .cached_metadata_filesystem_reader import CachedMetadataFileSystemReader +from .filesystem_async import FileSystemWriterAsync +from .resharding import ( + TensorReformulationMetadata, + apply_nd_flattened_tensors_reformulation, + is_nd_flattened_tensor, + nd_flattened_tensor_reformulated_global_shape, + restore_nd_flattened_tensors_formulation, +) +from .state_dict_saver import save_state_dict_async_finalize, save_state_dict_async_plan + +try: + if not torch.cuda.is_available(): + raise ImportError + from transformer_engine.pytorch.float8_tensor import Float8Tensor + + HAVE_TE = True +except ImportError: + HAVE_TE = False + +try: + from torch.distributed._tensor import DTensor + + HAVE_DTENSOR = True +except ImportError: + HAVE_DTENSOR = False + +_metadata_fn: str = ".metadata" + + +def register_default_torch_strategies(): + """Register default strategies related to PyT Distributed backend.""" + register_default_strategy( + StrategyAction.LOAD_SHARDED, 'torch_dist', 1, TorchDistLoadShardedStrategy() + ) + register_default_strategy( + StrategyAction.SAVE_SHARDED, 'torch_dist', 1, TorchDistSaveShardedStrategy('torch_dist', 1) + ) + + +logger = getLogger(__name__) + + +def flatten_state_dict( + state_dict: ShardedStateDict, +) -> Tuple[ShardedStateDict, Dict[str, OBJ_PATH]]: + """Flattens state dict into a single level dict. + + It's a copy of torch.distributed.checkpoint._nested_dict.flatten_state_dict + which also accepts ShardedBase tensors as terminal objects + + Args: + state_dict (ShardedStateDict): state dict to be flattened + + Returns (tuple): flattened state dict and a mapping allowing to recreate the original one + + """ + flattened = {} + mappings = {} + + def flat_copy(path: OBJ_PATH, value: Any) -> None: + new_fqn = ".".join(map(str, path)) + if new_fqn in flattened: + raise ValueError(f"duplicated flatten key {new_fqn}") + flattened[new_fqn] = value + mappings[new_fqn] = path + + traverse_state_dict(state_dict, flat_copy, lambda x: isinstance(x, (torch.Tensor, ShardedBase))) + return flattened, mappings + + +def sharded_tensor_to_torch_sharded_tensor( + sh_tens: List[ShardedTensor], + rank: Optional[int] = None, + load_legacy_1d_flatten_tensors: bool = False, +) -> TorchShardedTensor: + """Convert MCore ShardedTensor to PyT ShardedTensor. PyT requires information about all chunks. + + On high-level, this function follows the logic of + torch.distributed.fsdp._shard_utils._create_chunk_sharded_tensor. + Additionally, it saves `prepend_axis_num` and `has_flattened_range` (specific to MCore) + as attributes for further restoration in `_unwrap_pyt_sharded_tensor`. + + NOTE: this function assumes regular (grid) sharding of the MCore ShardedTensor. + The only local irregularities could be introduced with a `flattened_range` attribute. + + This function handles 2 different type of ShardedTensors: + 1. Non-flat regular ShardedTensors (`not has_flattened_range`) + 2. N-D flattened ShardedTensors (`has_flattened_range`) + + (1) type are saved according to their original shape. + Type (2) however requires global shape adjustment for efficiency: + we treat [X, Y, Z] global shape tensor with local shape [x, y, z] + as a [X // x, Y // y, Z // z, x * y * z] tensor with last axis + partitioned according to `flattened_range` slices. + This will need special handling while resharding. + + Args: + sh_tens (List[ShardedTensor]): list of sharded tensors to convert + rank (int, optional): current process rank passed to PyT ShardedTensor. + If None, assumes rank in the default pg. + load_legacy_1d_flatten_tensors (bool, optional): flag indicating if 1-D flattened tensors + should be loaded in a legacy way. Defaults to False. + + Returns (TorchShardedTensor): PyT ShardedTensor containing all passed shards. + + """ + if rank is None: + rank = torch.distributed.get_rank() + + some_sh_ten = sh_tens[0] + has_flattened_range = some_sh_ten.flattened_range is not None + + for sh_ten in sh_tens: + assert (sh_ten.flattened_range is not None) == has_flattened_range, sh_tens + if not sh_ten.data.is_contiguous(): + sh_ten.data = sh_ten.data.contiguous() + + if load_legacy_1d_flatten_tensors and len(some_sh_ten.global_shape) == 1: + # Legacy 1-D flattened tensors are loaded as non-flat regular ShardedTensors + has_flattened_range = False + + local_global_offsets = {} + + prepend_axis_num = sh_tens[0].prepend_axis_num + # Determine local shards according to tensor type (see docs) + if has_flattened_range: + # Type (3) case: N-D flattened ShardedTensors + for sh_ten in sh_tens: + local_global_offsets.setdefault(sh_ten.local_chunk_offset_in_global(), []).append( + sh_ten + ) + assert sh_ten.data.ndim == 1, sh_ten + sh_ten.data = sh_ten.data.view((1,) * len(sh_ten.global_shape) + (-1,)) + + # Global shape reformulation: + global_shape = nd_flattened_tensor_reformulated_global_shape(some_sh_ten) + offsets_shape = (1,) * len( + some_sh_ten.global_shape + ) # reformulated global shape has shape equal ti number of local chunks + + local_shards = [ + Shard.from_tensor_and_offsets( + sh_ten.data, + list( + sh_ten.local_chunk_offset_in_global() + (sh_ten.flattened_range.start,) + ), # additional flattened offset + rank, + ) + for sh_ten in sh_tens + ] + else: + # Type (1) case: non-flat regular ShardedTensors + for sh_ten in sh_tens: + local_global_offsets.setdefault(sh_ten.global_offset, []).append(sh_ten) + sh_ten.data = sh_ten.data.view( + (1,) * prepend_axis_num + sh_ten.local_shape + ) # adjust to prepended_axis_num + + global_shape = some_sh_ten.global_shape + offsets_shape = some_sh_ten.data.shape # includes prepended axes + + local_shards = [ + Shard.from_tensor_and_offsets( + sh_ten.data, list(sh_ten.global_offset), rank # simple case + ) + for sh_ten in sh_tens + ] + + # Create a ShardedTensor without invoking communication. Determine global shards + world_size = torch.distributed.get_world_size() + shard_metadata = [] + # NOTE: here we assume a regular grid of shards + for fragment_offsets in product(*map(range, some_sh_ten.axis_fragmentations)): + offset = tuple(map(lambda x: x[0] * x[1], zip(fragment_offsets, offsets_shape))) + if offset in local_global_offsets: + # local shard + placement = f"rank:{rank}/cuda" + for sh_ten in local_global_offsets[offset]: + if has_flattened_range: + assert offset == sh_ten.local_chunk_offset_in_global() + # This is not an actual offset, but an offset of the whole shard + # This is needed for a PyT Dist internal integrity check + offset = sh_ten.local_chunk_offset_in_global() + (0,) + size = (1,) * len(offsets_shape) + global_shape[-1:] + else: + size = sh_ten.data.shape + shard_metadata.append(ShardMetadata(offset, size, placement)) + + else: + # pylint: disable=line-too-long + # for shards from other ranks we provide simplistic data - this information will be discarded + # during TorchShardedTensor._init_from_local_shards_and_global_metadata call. + # Due to a bug in PyT 24.05 container we must specify some concrete rank within a world size. + # The exact rank doesn't matter as long as it's different than my rank - hence (rank + 1) % WS. + placement = f"rank:{(rank + 1) % world_size}/cuda" + if has_flattened_range: + offset = offset + (0,) + size = (1,) * len(offsets_shape) + global_shape[-1:] + else: + size = offsets_shape + shard_metadata.append(ShardMetadata(offset, size, placement)) + + tensor = some_sh_ten.data + sharded_tensor_metadata = ShardedTensorMetadata( + shards_metadata=shard_metadata, + size=torch.Size(global_shape), + tensor_properties=TensorProperties( + dtype=tensor.dtype, + layout=tensor.layout, + requires_grad=tensor.requires_grad, + memory_format=torch.contiguous_format, + pin_memory=tensor.is_pinned(), + ), + ) + pyt_sh_ten = TorchShardedTensor._init_from_local_shards_and_global_metadata( + local_shards, sharded_tensor_metadata=sharded_tensor_metadata, process_group=None + ) + # Store MCore related data as PyTShardedTensor attribute. + # This won't be stored in the checkpoint, only for runtime purposes + pyt_sh_ten.mcore_sh_ten = sh_ten.without_data() + pyt_sh_ten.mcore_metadata = {} + if has_flattened_range: + pyt_sh_ten.mcore_metadata['nd_reformulated_orig_global_shape'] = sh_ten.global_shape + return pyt_sh_ten + + +def mcore_to_pyt_state_dict( + state_dict: Dict[str, List[ShardedBase]], + is_loading: bool = False, + init_device: torch.device = torch.device("cpu"), + load_legacy_1d_flatten_tensors: bool = False, +) -> Dict[str, Union[TorchShardedTensor, io.BytesIO]]: + """Convert state dict with ShardedTensors and ShardedObjects + to state dict compatible with PyT Dist format. + + Operates in-place and returns the original state dict. + + Args: + state_dict (Dict[str, List[ShardedBase]]): flattened state dict, where values + are lists of either ShardedTensor or ShardedObjects. + is_loading (bool, optional): flag indicating if loading or saving. Defaults to False. + init_device (torch.device, optional): device to initialize potentially missing tensors + during loading. Defaults to 'cpu'. + + Returns (Dict[str, Union[TorchShardedTensor, io.BytesIO]]): original dictionary with values + converted either into PyT ShardedTensors or io.BytesIO. + + """ + rank = torch.distributed.get_rank() + pyt_state_dict = {} + + def _mcore_to_torch_sharded_tensor(sh_tens: List[ShardedTensor]) -> TorchShardedTensor: + """Build a PyT ShardedTensor from given shards. + + During loading: + - if data is None, initialize it with an empty tensor (will be used to copy the data into) + - if `allow_shape_mismatch` is True, the data is initialized with zeros + prior to loading (not all parts of the tensor will be read from the checkpoint) + """ + assert all(isinstance(sh_ten, ShardedTensor) for sh_ten in sh_tens), sh_tens + for sh_ten in sh_tens: + if sh_ten.data is None: + if is_loading: + sh_ten.init_data( + init_device, + init_fn=torch.zeros if sh_ten.allow_shape_mismatch else torch.empty, + ) + else: + raise CheckpointingException(f'`data` attr is None for {sh_ten}') + else: + sh_ten.data = sh_ten.data.detach() + if sh_ten.allow_shape_mismatch and is_loading: + sh_ten.data.zero_() + + torch_sh_ten = sharded_tensor_to_torch_sharded_tensor( + sh_tens, rank, load_legacy_1d_flatten_tensors + ) + torch_sh_ten.key = sh_tens[0].key + return torch_sh_ten + + def _mcore_to_torch_sharded_object(sh_objs: List[ShardedObject]) -> io.BytesIO: + """Build io.BytesIO from given sharded objects data.""" + assert all(isinstance(sh_obj, ShardedObject) for sh_obj in sh_objs), sh_objs + serialized_data = io.BytesIO() + torch.save([sh_obj.data for sh_obj in sh_objs], serialized_data) + return serialized_data + + for k, v in state_dict.items(): + if isinstance(v[0], ShardedTensor): + v = cast(List[ShardedTensor], v) + pyt_state_dict[k] = _mcore_to_torch_sharded_tensor(v) + else: + v = cast(List[ShardedObject], v) + pyt_state_dict[k] = _mcore_to_torch_sharded_object(v) + + return pyt_state_dict + + +def _unwrap_pyt_sharded_tensor(sh_ten: TorchShardedTensor) -> List[torch.Tensor]: + """Unwrap tensor from PyT ShardedTensor instance. + + If `prepend_axis_num` was non-zero (which is specific to MCore ShardedTensor) + then the tensor has additional singleton dimensions which should be squeezed. + """ + mcore_sh_ten = sh_ten.mcore_sh_ten + ret_tensors = [] + for sh in sh_ten.local_shards(): + ten = sh.tensor + if mcore_sh_ten.flattened_range is not None: + assert ten.shape[:-1] == (1,) * (len(ten.shape) - 1), ten.shape + ten = ten.view(-1) + else: + for _ in range(mcore_sh_ten.prepend_axis_num): + ten = ten.squeeze(0) + ret_tensors.append(ten) + return ret_tensors + + +def _replace_state_dict_keys_with_sharded_keys( + sharded_state_dict: ShardedStateDict, keep_only_main_replica: bool = False +) -> Tuple[Dict[str, List[ShardedBase]], FLATTEN_MAPPING, Dict[str, List[str]]]: + """Group ShardedBase objects by keys and + return mappings required for recreating the original dict.""" + flat_sd, flat_mapping = flatten_state_dict(sharded_state_dict) + rename_mapping = defaultdict(list) + new_flat_sd = defaultdict(list) + for k, sh_base in flat_sd.items(): + assert isinstance(sh_base, ShardedBase), type(sh_base) + key = sh_base.unique_key if isinstance(sh_base, ShardedObject) else sh_base.key + if is_main_replica(sh_base.replica_id) or not keep_only_main_replica: + rename_mapping[key].append(k) + new_flat_sd[key].append(sh_base) + return new_flat_sd, flat_mapping, rename_mapping + + +def _replace_sharded_keys_with_state_dict_keys( + state_dict: Dict[str, List[Union[torch.Tensor, io.BytesIO]]], + flat_mapping: FLATTEN_MAPPING, + rename_mapping: Dict[str, List[str]], +): + """Inverse of _replace_state_dict_keys_with_sharded_keys.""" + recovered_sd = {} + for k, tensors in state_dict.items(): + assert len(tensors) == len(rename_mapping[k]) + for ten, recovered_k in zip(tensors, rename_mapping[k]): + recovered_sd[recovered_k] = ten + + return unflatten_state_dict(recovered_sd, flat_mapping) + + +def _restore_dict_types(x: Union[dict, list, Any], keys_template: Union[dict, list, Any]): + """Recursively update `x` keys, based on `keys_template`.""" + if isinstance(keys_template, dict): + assert isinstance(x, dict), type(x) + for k, v in keys_template.items(): + if not isinstance(k, str): + assert str(k) in x, (k, x.keys) + x[k] = x.pop(str(k)) + _restore_dict_types(x[k], v) + elif isinstance(keys_template, list): + assert isinstance(x, list), type(x) + for x_val, templ_val in zip(x, keys_template): + _restore_dict_types(x_val, templ_val) + + +@dataclass(frozen=True) +class MCoreSavePlan(SavePlan): + """SavePlan with MCore specific data.""" + + mcore_data: Dict[str, Dict[str, Any]] = None # Mcore related data about each tensor + + +class MCoreSavePlanner(DefaultSavePlanner): + """Differs with the default planner by saving BytesIO objects on all ranks. + + In the integration of MCore with PyT Distributed format, BytesIO objects + come from ShardedObjects, which should be treated as separate objects on each rank + (not common on all ranks). + + Also, the objects are already packed in io.BytesIO, so no need to redo it + in transform_object. + """ + + def __init__( + self, + *args, + dedup_replicated_tensors: Optional[bool] = None, + nd_flattened_global_shapes: Optional[Dict[str, Tuple[int, ...]]] = None, + can_run_decentralized_global_plan: bool = True, + **kwargs, + ) -> None: + # `dedup_replicated_tensors` was deprecated in 2.3; this check avoids warnings + # during saving. + if get_torch_version() <= PkgVersion("2.2"): + kwargs['dedup_replicated_tensors'] = dedup_replicated_tensors + super().__init__(*args, **kwargs) + self.nd_flattened_global_shapes = nd_flattened_global_shapes or {} + self.can_run_decentralized_global_plan = can_run_decentralized_global_plan + if can_run_decentralized_global_plan: + assert ( + not dedup_replicated_tensors + ), 'Cannot run decentralized plan with dedup_replicated_tensors=True' + assert ( + not self.flatten_state_dict + ), 'Cannot run decentralized plan with flatten_state_dict=True' + + def create_local_plan(self) -> SavePlan: + """Adds IOBytes write request on non-coordinator ranks.""" + + # NOTE: for PyT 2.4.0a0 we can't rely on `create_default_local_save_plan` because + # some alpha versions (specifically 2.4.0a0+f70bd71a48 in 24.06 NGC PyTorch container) + # add iobytes request only on coordinator ranks and some alpha versions + # (specifically 2.4.0a0+3bcc3cddb5 in 24.07 NGC PyTorch container) + # add those requests on all ranks. We inline a simplified version of this method below. + write_items = [] + for fqn, obj in self.state_dict.items(): + assert not HAVE_DTENSOR or not isinstance( + obj, DTensor + ) # translation from MCore ShardedTensors shouldn't result in DTensors + # Create write requests for tensor and bytes values. + # For MCore, these should be already non-duplicates. + write_items += _create_write_items(fqn, obj) + + self.plan = MCoreSavePlan( + items=write_items, + planner_data=self.mappings, + mcore_data={ + k: sh_ten.mcore_metadata + for k, sh_ten in self.state_dict.items() + if isinstance(sh_ten, TorchShardedTensor) + }, + ) + return self.plan + + def create_global_plan(self, all_plans: List[MCoreSavePlan]) -> Tuple[List[SavePlan], Metadata]: + """Merges MCore data for all plans.""" + global_plan, metadata = super().create_global_plan(all_plans) + metadata.mcore_data = dict(ChainMap(*(plan.mcore_data for plan in all_plans))) + return global_plan, metadata + + def create_decentralized_global_plan(self, local_plan: SavePlan) -> SavePlan: + """Nothing to do, just some checks. + + Args: + local_plan (SavePlan): local plan to turn to a global plan + (without interactions with other ranks) + + Returns: + SavePlan - locally transformed plan equivalent to the plan that would be + created by the coordinator + """ + assert ( + not self.flatten_state_dict + ), 'Cannot run decentralized plan with flatten_state_dict=True' + assert not local_plan.planner_data, 'Planner data should be empty with decentralized plan' + return local_plan + + def transform_object(self, write_item: WriteItem, object: Any): + """Make no transformations - bytes objects are already serialized.""" + return object + + +class MCoreLoadPlanner(DefaultLoadPlanner): + """Adds global shape validation to the default planner. + + If global shape validation can be ignored (shouldn't!), the default + load planner can be used. + """ + + def __init__( + self, *args, shapes_validation_sharded_tensors: Iterable[ShardedTensor] = (), **kwargs + ) -> None: + super().__init__(*args, **kwargs) + self.shapes_validation_sharded_tensors = shapes_validation_sharded_tensors + self._intermediate_read_item_and_target: Optional[Tuple[ReadItem, torch.Tensor]] = None + + def _validate_global_shapes(self, metadata, sharded_tensors): + for sh_ten in sharded_tensors: + if sh_ten.key not in metadata.state_dict_metadata: + raise KeyError( + f"{sh_ten.key} from model not in state dict:" + f" {sorted(metadata.state_dict_metadata.keys())}" + ) + loaded_shape = metadata.state_dict_metadata[sh_ten.key].size + if not is_nd_flattened_tensor(sh_ten): + expected_shape = sh_ten.global_shape + else: + expected_shape = nd_flattened_tensor_reformulated_global_shape(sh_ten) + if loaded_shape != expected_shape: + if is_nd_flattened_tensor(sh_ten) and len(sh_ten.global_shape) == 1: + # Handle legacy 1-D flattened tensors checkpoint format + # where the global shape is not stored in the metadata + expected_shape = sh_ten.global_shape + if loaded_shape == expected_shape: + continue + _msg = ( + f'Global shape mismatch for loaded ({loaded_shape})' + f' and expected ({expected_shape}) tensor' + f' for key {sh_ten.key}' + ) + raise CheckpointingException(_msg) + + def create_local_plan(self) -> LoadPlan: + """Runs additional shapes validation.""" + self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) + return super().create_local_plan() + + def resolve_tensor(self, read_item: ReadItem): + """Override to add FP8 support. + + Narrowing the Float8Tensor can create incontiguous tensors and there are + no `copy` kernels for such cases. This method creates a contiguous FP8 + tensors so that the subsequent `copy_` in FileSystemReader succeeds. + Note that this requires tracking the original tensor + (as `self._intermediate_read_item_and_target` attribute) + and restoring it in `commit_tensor` method. + """ + target_tensor = super().resolve_tensor(read_item) + if ( + not target_tensor.is_contiguous() + and HAVE_TE + and isinstance(target_tensor, Float8Tensor) + ): + self._intermediate_read_item_and_target = (read_item, target_tensor) + target_tensor = Float8Tensor.make_like( + target_tensor, data=target_tensor._data.contiguous() + ) + return target_tensor + + def commit_tensor(self, read_item: ReadItem, tensor: torch.Tensor) -> None: + """Restores the original FP8 tensor saved in `resolve_tensor`.""" + if self._intermediate_read_item_and_target is not None: + interm_read_item, target_tensor = self._intermediate_read_item_and_target + assert ( + interm_read_item is read_item + ), '`commit_tensor` method should be called right after `resolve_tensor`' + target_tensor.copy_(tensor) + tensor = target_tensor + self._intermediate_read_item_and_target = None + return super().commit_tensor(read_item, tensor) + + +class TorchDistSaveShardedStrategy(AsyncSaveShardedStrategy): + """Async save strategy for the PyT Distributed format. + + The idea is to translate MCore ShardedTensors into PyT ShardedTensors + and use the async-adjusted torch.distributed.checkpoint saving mechanism + provided by the FileSystemWriterAsync writer. + """ + + def __init__( + self, + backend: str, + version: int, + keep_only_main_replica: bool = True, + thread_count: int = 2, + cached_metadata: bool = False, + separation_hint: str = None, + ): + """Adds parameters specific to PyT Distributed format + Args: + backend (str): format backend string + version (int): format version + keep_only_main_replica (bool, optional): PyT Distributed has a mechanism + for deduplication, but replica_id aware deduplication is more coherent. + Default is True (recommended to keep it). + thread_count (int, optional): threads to use during saving. + Affects the number of files in the checkpoint (saving ranks * num_threads). + cached_metadata (bool, optional): Enables using cached global metadata to avoid + gathering local metadata every checkpointing invocation + separation_hint(str, optional): If provided, all tensors whose keys have this + prefix will be saved to a separate file. + """ + super().__init__(backend, version) + self.keep_only_main_replica = keep_only_main_replica + self.thread_count = thread_count + + # Cached SavePlans to skip plan in `save_state_dict_async_plan` + # cached outcome of `SavePlan.prepare_global_plan`, + # which aggregates local plans from all ranks + self.cached_central_plan: SavePlan = None + # cached outcome of `SavePlan.prepare_local_plan` describes how local state_dict is written + self.cached_local_plan: SavePlan = None + # Cached global metadata, only `coordinator` for dist-ckpt holds + # if central plans are consistent over iters + self.cached_global_metadata: Metadata = None + # This variable records if the ckpt structures are consistent + # so the following checkpoint savings reuse `cached_global_metadata` + self.validated_cache_reuse: bool = False + # The knob to enable cached metadata communication in saving + self.use_cached_ckpt_structure: bool = cached_metadata + + self.separation_hint = separation_hint + + self.validated_loaded_metadata_reuse = False + + def async_save( + self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path + ) -> AsyncRequest: + """Translates MCore ShardedTensors to PyT ShardedTensors & saves in PyT Distributed format. + + Args: + sharded_state_dict (ShardedStateDict): sharded state dict to save + checkpoint_dir (Path): checkpoint directory + + Returns: None + """ + # Translate the state dict + (sharded_state_dict, flat_mapping, rename_mapping) = ( + _replace_state_dict_keys_with_sharded_keys( + sharded_state_dict, self.keep_only_main_replica + ) + ) + pyt_state_dict = mcore_to_pyt_state_dict(sharded_state_dict, False) + # Use PyT saving mechanism + writer = FileSystemWriterAsync( + checkpoint_dir, separation_hint=self.separation_hint, thread_count=self.thread_count + ) + # This should be set differently if we run in a smaller process group than the default + coordinator = 0 + # Try twice to validate the generated `central_plan` is the same across iterations + # If so, reuse `cached_central_plan` and `cached_global_metadata` + # From the 3rd iteration, `save_state_dict_async_plan` will not generate `global_metadata` + # (return None) so `self.cached_global_metadata` is reused + args_cached_plans = None + loaded_all_plans = None + if self.use_cached_ckpt_structure: + loaded_all_plans = getattr(self.cached_global_metadata, "all_local_plans", None) + if loaded_all_plans is None: + logger.debug( + "no all_local_plans in metadata - can't verify global metadata reuse..." + ) + + args_cached_plans = ( + self.cached_central_plan, + self.cached_local_plan, + self.validated_cache_reuse, + ) + + ( + save_state_dict_ret, + self.cached_central_plan, + self.cached_local_plan, + self.validated_cache_reuse, + self.validated_loaded_metadata_reuse, + ) = save_state_dict_async_plan( + pyt_state_dict, + writer, + None, + coordinator, + planner=MCoreSavePlanner( + dedup_replicated_tensors=not self.keep_only_main_replica, flatten_state_dict=False + ), + cached_ckpt_structure=args_cached_plans, + loaded_all_plans=loaded_all_plans, + ) + rank = torch.distributed.get_rank() + if self.use_cached_ckpt_structure: + if ( + loaded_all_plans + and self.cached_global_metadata + and self.validated_loaded_metadata_reuse + ): + if coordinator == rank: + logger.debug( + f"rank: {rank}, reuse global metadata from loaded" + f" .metadata, {save_state_dict_ret[1]}" + ) + save_state_dict_ret = list(save_state_dict_ret) + save_state_dict_ret[1] = self.cached_global_metadata + + elif self.validated_cache_reuse: + logger.debug(f"rank: {rank}, cache validated") + if save_state_dict_ret[1]: # when global_metadata is not cached + self.cached_global_metadata = save_state_dict_ret[1] # Cache Metadata + # Only Coordinator rank holds cached global_metadata + # (None is returned for global_metadata) + elif coordinator == rank: + logger.debug( + f"rank: {rank}, reuse global metadata cached from previous" + f" save iteration, {save_state_dict_ret[1]}" + ) + save_state_dict_ret = list(save_state_dict_ret) + save_state_dict_ret[1] = self.cached_global_metadata + + return self._get_save_and_finalize_callbacks(writer, save_state_dict_ret) + + def _get_save_and_finalize_callbacks(self, writer, save_state_dict_ret) -> AsyncRequest: + save_fn_args = writer.get_save_function_and_args() + save_fn, preload_fn, save_args = save_fn_args + + def finalize_fn(): + save_state_dict_async_finalize(*save_state_dict_ret) + torch.distributed.barrier() + + return AsyncRequest(save_fn, save_args, [finalize_fn], preload_fn=preload_fn) + + def can_handle_sharded_objects(self): + return True + + +def get_reformulation_metadata( + sharded_state_dict: ShardedStateDict, checkpoint_dir: Path +) -> Dict[str, TensorReformulationMetadata]: + """Reads MCore data for N-D flattened tensors from checkpoint metadata during ckpt load. + + Args: + sharded_state_dict (ShardedStateDict): sharded state dict to load + checkpoint_dir (Path): checkpoint directory + + Returns: + Dict[str, TensorReformulationMetadata] - dictionary that maps keys of every + N-D flattened tensor from the sharded_state_dict to its original global shape + as stored in `mcore_data` in the checkpoint. + """ + ckpt_metadata = FileSystemReader(checkpoint_dir).read_metadata() + reformulation_metadata = {} + for sh_ten in nested_values(sharded_state_dict): + if not is_nd_flattened_tensor(sh_ten): + continue + try: + ckpt_global_shape = ckpt_metadata.mcore_data[sh_ten.key][ + 'nd_reformulated_orig_global_shape' + ] + except KeyError as e: + if len(sh_ten.global_shape) == 1: + warnings.warn( + f'Legacy checkpoint format detected for 1-D flattened tensor {sh_ten}. ' + 'Skip metadata reformulation.' + ) + continue + raise CheckpointingException( + f'Cannot find global shape metadata for N-D flattened tensor {sh_ten} ' + f'in checkpoint metadata: {ckpt_metadata.mcore_data}' + ) from e + + reformulation_metadata[sh_ten.key] = TensorReformulationMetadata( + ckpt_global_shape, ckpt_metadata.state_dict_metadata[sh_ten.key].size + ) + return reformulation_metadata + + +class TorchDistLoadShardedStrategy(LoadShardedStrategy): + """Basic load strategy for the PyT Distributed format.""" + + def __init__(self): + self.cached_global_metadata: Optional[Metadata] = None + super().__init__() + + def load(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path) -> StateDict: + """Translates MCore ShardedTensors to PyT ShardedTensors & loads from PyT Distributed fmt. + + Args: + sharded_state_dict (ShardedStateDict): sharded state dict with mapping + information to instruct loading + checkpoint_dir (Path): checkpoint directory + + Returns: loaded state dict + """ + # Apply N-D tensors resharding + reformulation_metadata = get_reformulation_metadata(sharded_state_dict, checkpoint_dir) + sharded_state_dict, formulation_restore_data = apply_nd_flattened_tensors_reformulation( + sharded_state_dict, reformulation_metadata + ) + + # Check if there are legacy 1-D flattened tensors in the checkpoint + has_legacy_1d_flattened_tensors = False + for sh_ten in nested_values(sharded_state_dict): + if is_nd_flattened_tensor(sh_ten) and sh_ten.key not in reformulation_metadata: + has_legacy_1d_flattened_tensors = True + break + + flexible_shape_sharded_tensors = [ + sh_ten + for sh_ten in nested_values(sharded_state_dict) + if isinstance(sh_ten, ShardedTensor) and not sh_ten.allow_shape_mismatch + ] + + orig_sharded_state_dict = sharded_state_dict + # MCore state dict to PyT Distributed compatible + (sharded_state_dict, flat_mapping, rename_mapping) = ( + _replace_state_dict_keys_with_sharded_keys(sharded_state_dict) + ) + pyt_state_dict = mcore_to_pyt_state_dict( + sharded_state_dict, True, load_legacy_1d_flatten_tensors=has_legacy_1d_flattened_tensors + ) + # Load PyT Distributed format + fsr = CachedMetadataFileSystemReader(checkpoint_dir) + checkpoint.load_state_dict( + pyt_state_dict, + fsr, + planner=MCoreLoadPlanner( + shapes_validation_sharded_tensors=flexible_shape_sharded_tensors + ), + ) + + self.cached_global_metadata = ( + fsr.read_metadata() + ) # no storage interaction thanks to caching + + pyt_state_dict = cast( + Dict[str, Union[TorchShardedTensor, List[io.BytesIO]]], pyt_state_dict + ) + # Unwrap ShardedTensors and return to original state dict + mcore_state_dict = { + k: v if not isinstance(v, TorchShardedTensor) else _unwrap_pyt_sharded_tensor(v) + for k, v in pyt_state_dict.items() + } + mcore_state_dict = _replace_sharded_keys_with_state_dict_keys( + mcore_state_dict, flat_mapping, rename_mapping + ) + _restore_dict_types(mcore_state_dict, orig_sharded_state_dict) + # Apply N-D tensors resharding postprocessing + mcore_state_dict = restore_nd_flattened_tensors_formulation( + mcore_state_dict, formulation_restore_data + ) + return mcore_state_dict + + def load_tensors_metadata(self, checkpoint_dir: Path, metadata: Metadata = None): + """Uses tensors metadata stored in the metadata file.""" + if metadata is None: + fs_reader = FileSystemReader(checkpoint_dir) + metadata = fs_reader.read_metadata() + + mcore_data = getattr(metadata, 'mcore_data', {}) + sharded_metadata = {} + for k, tp in metadata.state_dict_metadata.items(): + if not isinstance(tp, TensorStorageMetadata): + continue # load only tensors + + nd_orig_global_shape = mcore_data.get(k, {}).get('nd_reformulated_orig_global_shape') + if nd_orig_global_shape is None: + # Regular tensor + sharded_metadata[k] = ShardedTensor.from_rank_offsets( + k, torch.empty(tp.size, **tp.properties.__dict__, device='meta') + ).without_data() + else: + # N-D flattened tensor + unflat_ten = torch.empty( + nd_orig_global_shape, **tp.properties.__dict__, device='meta' + ) + flat_ten = unflat_ten.flatten() + sharded_metadata[k] = ShardedTensor.from_rank_offsets_flat( + k, + flat_ten, + unflat_ten.shape, + flattened_range=slice(0, unflat_ten.numel()), # whole slice + ).without_data() + + return sharded_metadata + + def load_sharded_metadata(self, checkpoint_dir: Path) -> ShardedStateDict: + """Uses tensors and objects metadata stored in the metadata file.""" + fs_reader = FileSystemReader(checkpoint_dir) + metadata = fs_reader.read_metadata() + + sharded_metadata = {} + for metadata_key, storage_metadata in metadata.state_dict_metadata.items(): + if not isinstance(storage_metadata, BytesStorageMetadata): + continue + sh_obj = ShardedObject.empty_from_unique_key(metadata_key) + sharded_metadata[sh_obj.unique_key] = sh_obj + + sharded_metadata.update(self.load_tensors_metadata(checkpoint_dir, metadata)) + return sharded_metadata + + def remove_sharded_tensors(self, checkpoint_dir: str, key_prefix: str): + """Removes checkpoint files whose keys have the given prefix. + + Performs the following steps: + 1. checks whether there are files that start with the key_prefix + 2. loads metadata + 3. removes all entries from the metadata that start with the key_prefix + 4. resaves the new metadata and removes the old metadata + 5. removes the relevant files + """ + + assert is_torch_min_version( + "2.3.0" + ), f'torch >= 2.3.0 is required for remove_sharded_tensors' + + distckpt_files = [f for f in os.listdir(checkpoint_dir) if f.endswith("distcp")] + files_to_remove = [f for f in distckpt_files if f.startswith(key_prefix)] + + if not files_to_remove: + warnings.warn( + f'There are no files in {checkpoint_dir} that begin with "{key_prefix}".' + f' Skipping removal.' + ) + return + + fs_reader = FileSystemReader(checkpoint_dir) + original_metadata = fs_reader.read_metadata() + + new_state_dict_metadata = {} + new_planner_data = {} + new_storage_data = {} + for k in original_metadata.state_dict_metadata.keys(): + if k.startswith(key_prefix): + continue + new_state_dict_metadata[k] = original_metadata.state_dict_metadata[k] + for k in original_metadata.planner_data.keys(): + if k.startswith(key_prefix): + continue + new_planner_data[k] = original_metadata.planner_data[k] + for k in original_metadata.storage_data.keys(): + if k.fqn.startswith(key_prefix): + continue + new_storage_data[k] = original_metadata.storage_data[k] + metadata = Metadata( + state_dict_metadata=new_state_dict_metadata, + planner_data=new_planner_data, + storage_data=new_storage_data, + ) + fs_writer = FileSystemWriter(checkpoint_dir) + metadata_filename = cast(Path, fs_writer.fs.concat_path(fs_writer.path, _metadata_fn)) + tmp_path = cast( + metadata_filename, fs_writer.fs.concat_path(fs_writer.path, f"{_metadata_fn}.tmp") + ) + old_path = cast( + metadata_filename, fs_writer.fs.concat_path(fs_writer.path, f"{_metadata_fn}.bck") + ) + ## save the new metadata + with fs_writer.fs.create_stream(tmp_path, "wb") as metadata_file: + pickle.dump(metadata, metadata_file) + try: + os.fsync(metadata_file.fileno()) + except AttributeError: + os.sync() + ## move the old metadata + fs_writer.fs.rename(fs_writer.metadata_path, old_path) + try: + ## rename the new metadata + fs_writer.fs.rename(tmp_path, fs_writer.metadata_path) + + ## finally, remove the files we want to drop + for f in files_to_remove: + fs_writer.fs.rm_file(checkpoint_dir / f) + except Exception as e: + fs_writer.fs.rename(old_path, fs_writer.metadata_path) + raise e + else: + fs_writer.fs.rm_file(old_path) + + def can_handle_sharded_objects(self): + return True + + def check_backend_compatibility(self, loaded_version): + pass # TODO + + def check_version_compatibility(self, loaded_version): + pass # TODO diff --git a/megatron/core/dist_checkpointing/strategies/two_stage.py b/megatron/core/dist_checkpointing/strategies/two_stage.py index 72e60bc..b8a5094 100644 --- a/megatron/core/dist_checkpointing/strategies/two_stage.py +++ b/megatron/core/dist_checkpointing/strategies/two_stage.py @@ -1,254 +1,268 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. - -""" 2-stage checkpoint loading. """ -import os -import time -from collections import defaultdict -from dataclasses import dataclass -from functools import partial, wraps -from itertools import chain -from logging import DEBUG, INFO, StreamHandler, getLogger -from operator import attrgetter, itemgetter -from pathlib import Path -from typing import Iterable, List, NamedTuple, Optional, Tuple, Union - -import torch - -from ..dict_utils import dict_list_map_inplace, map_reduce, nested_values -from ..mapping import ShardedStateDict, ShardedTensor, StateDict -from .base import LoadShardedStrategy -from .tensorstore import TensorStoreLoadShardedStrategy, _load_from_array, open_ts_array -from .zarr import flatten_range, load_zarr_based_sharded_metadata - -_import_trigger = None - - -timers = defaultdict(list) - -logger = getLogger(__name__) - - -def timed(verbose=True): - def timed_dec(fn): - name = fn.__name__ - - @wraps(fn) - def wrapped(*args, **kwargs): - if verbose: - logger.debug(f'{name} init') - start = time.time() - ret = fn(*args, **kwargs) - took = time.time() - start - if verbose: - logger.debug(f'{name} took {took}s') - timers[name].append(took) - return ret - - return wrapped - - return timed_dec - - -@dataclass -class _ShardedTensorMetadata: - global_rank: int - sharded_tensor_no_data: ShardedTensor - dist_group_rank: Tuple[int] # id of distributed group - dist_group_ranks: Tuple[int] # id of distributed group - data_size: Optional[int] = None # bytes - - -def sharded_tensor_chunk_id(sharded_tensor: ShardedTensor): - return (sharded_tensor.key, sharded_tensor.global_offset) - - -class TwoStageDataParallelLoadShardedStrategy(LoadShardedStrategy): - """Loads one checkpoint replica from storage and broadcasts to other nodes. - - This strategy loads checkpoint from storage on minimal set of nodes - and distributes the checkpoint to other nodes with torch.distributed. - Loading is performed with tensorstore. - - Steps: - 0. (optional) create Gloo distributed groups - 1. Exchange ShardedTensors metadata between all nodes - 2. Align needed tensors within DP groups - 3. For each globally unique tensor: - 3.a) on one of the ranks load it from storage to CPU and move to CUDA - 3.b) allocate CUDA tensor on other ranks - 3.c) broadcast within DP group - 3.d) copy tensor content to the model param location - 3.e) free tensor buffers from a) and b) - - Notes: - 1. Loading and broadcasting is done sequentially to avoid both host and device OOMs - 2. There is a lot of overlap potential between all three steps done for each tensor: - 2.a) loading from storage to numpy - 2.b) moving CPU tensors to CUDA - 2.c) broadcast - """ - - def __init__(self, data_parallel_group, cpu_transfer=True): - super().__init__() - - self.cpu_transfer = cpu_transfer - self.data_parallel_group_orig = data_parallel_group - self.data_parallel_group = None if cpu_transfer else data_parallel_group - self.dp_group_ranks = tuple( - sorted(torch.distributed.get_process_group_ranks(data_parallel_group)) - ) - self.dp_group_rank = torch.distributed.get_rank(self.data_parallel_group_orig) - self.global_rank = torch.distributed.get_rank() - - def load(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path): - self.maybe_init_gloo_group() - all_tensors_sorted = self._build_load_plan(sharded_state_dict) - self._exchange_loaded_tensors(all_tensors_sorted, sharded_state_dict, checkpoint_dir) - # TODO: fix hang in summarize_load_times - # self.summarize_load_times() - return sharded_state_dict - - def summarize_load_times(self): - torch.distributed.barrier() - logger.info('Checkpoint loading finished. Summary:') - # TODO: `timers` keys are not guaranteed to be the same across ranks which causes hangs - for key, times in sorted(timers.items()): - times_sum = sum(times) - max_times = torch.tensor([times_sum], device='cuda') - avg_times = torch.tensor([times_sum], device='cuda') - torch.distributed.all_reduce(max_times, op=torch.distributed.ReduceOp.MAX) - torch.distributed.all_reduce(avg_times, op=torch.distributed.ReduceOp.SUM) - avg_times /= torch.distributed.get_world_size() - if torch.distributed.get_rank() == 0: - logger.info(f'{key}: max {max_times[0]}, avg {avg_times[0]}') - - @timed(verbose=False) - def load_tensor_from_storage(self, checkpoint_dir, ten_meta: _ShardedTensorMetadata): - logger.debug(f'_load_from_array({ten_meta.sharded_tensor_no_data.key}) init') - ret = _load_from_array( - ten_meta.sharded_tensor_no_data, - checkpoint_dir, - load_directly_on_device=False, - apply_flattened_range=False, - ) - logger.debug(f'_load_from_array({ten_meta.sharded_tensor_no_data.key}) DONE') - return ret - - @timed() - def maybe_init_gloo_group(self): - if not self.cpu_transfer: - return - all_groups = [None] * torch.distributed.get_world_size() - torch.distributed.all_gather_object(all_groups, self.dp_group_ranks) - all_groups = set(tuple(sorted(gr)) for gr in all_groups) - for group_ranks in sorted(all_groups): - gloo_pg = torch.distributed.new_group(ranks=group_ranks, backend='gloo') - if self.global_rank in group_ranks: - self.data_parallel_group = gloo_pg - assert self.dp_group_rank == torch.distributed.get_rank(self.data_parallel_group) - - def check_backend_compatibility(self, loaded_version): - pass # TODO - - def check_version_compatibility(self, loaded_version): - pass # TODO - - @timed() - def _build_load_plan( - self, sharded_state_dict: ShardedStateDict - ) -> List[_ShardedTensorMetadata]: - local_meta = [ - _ShardedTensorMetadata( - self.global_rank, - sharded_ten.without_data(), - self.dp_group_rank, - self.dp_group_ranks, - ) - for sharded_ten in nested_values(sharded_state_dict) - ] - all_meta = [None] * torch.distributed.get_world_size(group=self.data_parallel_group) - torch.distributed.all_gather_object(all_meta, local_meta, group=self.data_parallel_group) - all_meta = list(chain.from_iterable(all_meta)) - all_tensors_sorted = self.deduplicate_chunks(all_meta) - return all_tensors_sorted - - @timed() - def deduplicate_chunks(self, ten_metas: List[_ShardedTensorMetadata]): - """Group tensors by chunk and then pick the tensor with the lowest rank. - - NOTE: with proper loading overlap, loading from randomized ranks - (instead of the smallest one) could be beneficial here. - """ - ten_metas = map_reduce( - ten_metas, - key_fn=lambda meta: sharded_tensor_chunk_id(meta.sharded_tensor_no_data), - reduce_fn=partial(min, key=attrgetter('dist_group_rank')), - ) - all_metas_sorted = list(map(itemgetter(1), sorted(ten_metas.items()))) - return all_metas_sorted - - @timed() - def _exchange_loaded_tensors( - self, ten_metas: List[_ShardedTensorMetadata], sharded_state_dict, checkpoint_dir - ): - logger.debug(f'_exchange_loaded_tensors, num ten_metas: {len(ten_metas)}') - for ten_meta in ten_metas: - - src_rank = torch.distributed.get_global_rank( - self.data_parallel_group, ten_meta.dist_group_rank - ) - - if self.dp_group_rank == ten_meta.dist_group_rank: - exchange_tensor = self.load_tensor_from_storage(checkpoint_dir, ten_meta) - if not self.cpu_transfer: - exchange_tensor = exchange_tensor.cuda() - else: - # TODO: for non-flattened ranges we could reuse the buffer from the start here - exchange_tensor = torch.empty( - ten_meta.sharded_tensor_no_data.local_shape, - device='cpu' if self.cpu_transfer else 'cuda', - dtype=ten_meta.sharded_tensor_no_data.dtype, - ) - - logger.debug( - f'exchange {ten_meta.sharded_tensor_no_data.key}, {exchange_tensor.shape}({exchange_tensor.numel()}), broadcast({src_rank} -> {self.dp_group_ranks})' - ) - torch.distributed.broadcast( - exchange_tensor, group=self.data_parallel_group, src=src_rank - ) - self._distribute_data_to_state_dict(ten_meta, exchange_tensor, sharded_state_dict) - logger.debug(f'exchange {ten_meta.sharded_tensor_no_data.key} done') - - # free buffer memory - exchange_tensor = None - - @timed(verbose=False) - def _distribute_data_to_state_dict( - self, - ten_meta: _ShardedTensorMetadata, - loaded_ten: torch.Tensor, - sharded_state_dict: ShardedStateDict, - ): - tensor_key = sharded_tensor_chunk_id(ten_meta.sharded_tensor_no_data) - - def _fill_in_data(t: Union[ShardedTensor, torch.Tensor]): - if not isinstance(t, ShardedTensor) or sharded_tensor_chunk_id(t) != tensor_key: - # already filled-in or key not matching - return t - sharded_tensor: ShardedTensor = t - x = loaded_ten - if sharded_tensor.flattened_range is not None: - x = flatten_range(sharded_tensor, x) - - # Reuse existing buffer - sharded_tensor.data.data.copy_(x) - return sharded_tensor.data - - dict_list_map_inplace(_fill_in_data, sharded_state_dict) - - def load_tensors_metadata(self, checkpoint_dir: Path): - def get_ts_shape_dtype(path): - arr = open_ts_array(path) - return arr.shape, arr.dtype.numpy_dtype - - return load_zarr_based_sharded_metadata(checkpoint_dir, get_ts_shape_dtype) +# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. + +""" 2-stage checkpoint loading. """ +import time +from collections import defaultdict +from dataclasses import dataclass +from functools import partial, wraps +from itertools import chain +from logging import getLogger +from operator import attrgetter, itemgetter +from pathlib import Path +from typing import List, Optional, Tuple, Union + +import torch + +from ..dict_utils import dict_list_map_inplace, map_reduce, nested_values +from ..mapping import ShardedStateDict, ShardedTensor +from .base import LoadShardedStrategy +from .tensorstore import _load_from_array, open_ts_array +from .zarr import flatten_range, load_zarr_based_sharded_metadata + +_import_trigger = None + + +timers = defaultdict(list) + +logger = getLogger(__name__) +logger.warning( + 'megatron.core.dist_checkpointing.two_stage module is deprecated' + ' and will be removed in Megatron-Core v0.12. Please use' + ' FullyParallelLoadStrategyWrapper to accomplish a parallelized checkpoint load.' +) + + +def timed(verbose=True): + """Timing decorator.""" + + def timed_dec(fn): + name = fn.__name__ + + @wraps(fn) + def wrapped(*args, **kwargs): + if verbose: + logger.debug(f'{name} init') + start = time.time() + ret = fn(*args, **kwargs) + took = time.time() - start + if verbose: + logger.debug(f'{name} took {took}s') + timers[name].append(took) + return ret + + return wrapped + + return timed_dec + + +@dataclass +class _ShardedTensorMetadata: + global_rank: int + sharded_tensor_no_data: ShardedTensor + dist_group_rank: Tuple[int] # id of distributed group + dist_group_ranks: Tuple[int] # id of distributed group + data_size: Optional[int] = None # bytes + + +def sharded_tensor_chunk_id(sharded_tensor: ShardedTensor): + """Id of a sharded tensor.""" + return (sharded_tensor.key, sharded_tensor.global_offset) + + +class TwoStageDataParallelLoadShardedStrategy(LoadShardedStrategy): + """Loads one checkpoint replica from storage and broadcasts to other nodes. + + This strategy loads checkpoint from storage on minimal set of nodes + and distributes the checkpoint to other nodes with torch.distributed. + Loading is performed with tensorstore. + + Steps: + 0. (optional) create Gloo distributed groups + 1. Exchange ShardedTensors metadata between all nodes + 2. Align needed tensors within DP groups + 3. For each globally unique tensor: + 3.a) on one of the ranks load it from storage to CPU and move to CUDA + 3.b) allocate CUDA tensor on other ranks + 3.c) broadcast within DP group + 3.d) copy tensor content to the model param location + 3.e) free tensor buffers from a) and b) + + Notes: + 1. Loading and broadcasting is done sequentially to avoid both host and device OOMs + 2. There is a lot of overlap potential between all three steps done for each tensor: + 2.a) loading from storage to numpy + 2.b) moving CPU tensors to CUDA + 2.c) broadcast + """ + + def __init__(self, data_parallel_group, cpu_transfer=True): + super().__init__() + + self.cpu_transfer = cpu_transfer + self.data_parallel_group_orig = data_parallel_group + self.data_parallel_group = None if cpu_transfer else data_parallel_group + self.dp_group_ranks = tuple( + sorted(torch.distributed.get_process_group_ranks(data_parallel_group)) + ) + self.dp_group_rank = torch.distributed.get_rank(self.data_parallel_group_orig) + self.global_rank = torch.distributed.get_rank() + + def load(self, sharded_state_dict: ShardedStateDict, checkpoint_dir: Path): + """Main load method.""" + self.maybe_init_gloo_group() + all_tensors_sorted = self._build_load_plan(sharded_state_dict) + self._exchange_loaded_tensors(all_tensors_sorted, sharded_state_dict, checkpoint_dir) + # TODO: fix hang in summarize_load_times + # self.summarize_load_times() + return sharded_state_dict + + def summarize_load_times(self): + """Summarize load times.""" + torch.distributed.barrier() + logger.info('Checkpoint loading finished. Summary:') + # TODO: `timers` keys are not guaranteed to be the same across ranks which causes hangs + for key, times in sorted(timers.items()): + times_sum = sum(times) + max_times = torch.tensor([times_sum], device='cuda') + avg_times = torch.tensor([times_sum], device='cuda') + torch.distributed.all_reduce(max_times, op=torch.distributed.ReduceOp.MAX) + torch.distributed.all_reduce(avg_times, op=torch.distributed.ReduceOp.SUM) + avg_times /= torch.distributed.get_world_size() + if torch.distributed.get_rank() == 0: + logger.info(f'{key}: max {max_times[0]}, avg {avg_times[0]}') + + @timed(verbose=False) + def load_tensor_from_storage(self, checkpoint_dir, ten_meta: _ShardedTensorMetadata): + """Load tensor from storage.""" + logger.debug(f'_load_from_array({ten_meta.sharded_tensor_no_data.key}) init') + ret = _load_from_array( + ten_meta.sharded_tensor_no_data, + checkpoint_dir, + load_directly_on_device=False, + apply_flattened_range=False, + ) + logger.debug(f'_load_from_array({ten_meta.sharded_tensor_no_data.key}) DONE') + return ret + + @timed() + def maybe_init_gloo_group(self): + """Create Gloo groups.""" + if not self.cpu_transfer: + return + all_groups = [None] * torch.distributed.get_world_size() + torch.distributed.all_gather_object(all_groups, self.dp_group_ranks) + all_groups = set(tuple(sorted(gr)) for gr in all_groups) + for group_ranks in sorted(all_groups): + # "two_stage" module will be deprecated, so not replace new_group() + # with ...parallel_state.create_group() func setting group_desc here. + gloo_pg = torch.distributed.new_group(ranks=group_ranks, backend='gloo') + if self.global_rank in group_ranks: + self.data_parallel_group = gloo_pg + assert self.dp_group_rank == torch.distributed.get_rank(self.data_parallel_group) + + def check_backend_compatibility(self, loaded_version): + pass # TODO + + def check_version_compatibility(self, loaded_version): + pass # TODO + + @timed() + def _build_load_plan( + self, sharded_state_dict: ShardedStateDict + ) -> List[_ShardedTensorMetadata]: + local_meta = [ + _ShardedTensorMetadata( + self.global_rank, + sharded_ten.without_data(), + self.dp_group_rank, + self.dp_group_ranks, + ) + for sharded_ten in nested_values(sharded_state_dict) + ] + all_meta = [None] * torch.distributed.get_world_size(group=self.data_parallel_group) + torch.distributed.all_gather_object(all_meta, local_meta, group=self.data_parallel_group) + all_meta = list(chain.from_iterable(all_meta)) + all_tensors_sorted = self.deduplicate_chunks(all_meta) + return all_tensors_sorted + + @timed() + def deduplicate_chunks(self, ten_metas: List[_ShardedTensorMetadata]): + """Group tensors by chunk and then pick the tensor with the lowest rank. + + NOTE: with proper loading overlap, loading from randomized ranks + (instead of the smallest one) could be beneficial here. + """ + ten_metas = map_reduce( + ten_metas, + key_fn=lambda meta: sharded_tensor_chunk_id(meta.sharded_tensor_no_data), + reduce_fn=partial(min, key=attrgetter('dist_group_rank')), + ) + all_metas_sorted = list(map(itemgetter(1), sorted(ten_metas.items()))) + return all_metas_sorted + + @timed() + def _exchange_loaded_tensors( + self, ten_metas: List[_ShardedTensorMetadata], sharded_state_dict, checkpoint_dir + ): + logger.debug(f'_exchange_loaded_tensors, num ten_metas: {len(ten_metas)}') + for ten_meta in ten_metas: + + src_rank = torch.distributed.get_global_rank( + self.data_parallel_group, ten_meta.dist_group_rank + ) + + if self.dp_group_rank == ten_meta.dist_group_rank: + exchange_tensor = self.load_tensor_from_storage(checkpoint_dir, ten_meta) + if not self.cpu_transfer: + exchange_tensor = exchange_tensor.cuda() + else: + # TODO: for non-flattened ranges we could reuse the buffer from the start here + exchange_tensor = torch.empty( + ten_meta.sharded_tensor_no_data.local_shape, + device='cpu' if self.cpu_transfer else 'cuda', + dtype=ten_meta.sharded_tensor_no_data.dtype, + ) + + logger.debug( + f'exchange {ten_meta.sharded_tensor_no_data.key}, {exchange_tensor.shape}\ +({exchange_tensor.numel()}), broadcast({src_rank} -> {self.dp_group_ranks})' + ) + torch.distributed.broadcast( + exchange_tensor, group=self.data_parallel_group, src=src_rank + ) + self._distribute_data_to_state_dict(ten_meta, exchange_tensor, sharded_state_dict) + logger.debug(f'exchange {ten_meta.sharded_tensor_no_data.key} done') + + # free buffer memory + exchange_tensor = None + + @timed(verbose=False) + def _distribute_data_to_state_dict( + self, + ten_meta: _ShardedTensorMetadata, + loaded_ten: torch.Tensor, + sharded_state_dict: ShardedStateDict, + ): + tensor_key = sharded_tensor_chunk_id(ten_meta.sharded_tensor_no_data) + + def _fill_in_data(t: Union[ShardedTensor, torch.Tensor]): + if not isinstance(t, ShardedTensor) or sharded_tensor_chunk_id(t) != tensor_key: + # already filled-in or key not matching + return t + sharded_tensor: ShardedTensor = t + x = loaded_ten + if sharded_tensor.flattened_range is not None: + x = flatten_range(sharded_tensor, x) + + # Reuse existing buffer + sharded_tensor.data.data.copy_(x) + return sharded_tensor.data + + dict_list_map_inplace(_fill_in_data, sharded_state_dict) + + def load_tensors_metadata(self, checkpoint_dir: Path): + def get_ts_shape_dtype(path): + arr = open_ts_array(path) + return arr.shape, arr.dtype.numpy_dtype + + return load_zarr_based_sharded_metadata(checkpoint_dir, get_ts_shape_dtype) diff --git a/megatron/core/dist_checkpointing/tensor_aware_state_dict.py b/megatron/core/dist_checkpointing/tensor_aware_state_dict.py new file mode 100644 index 0000000..6f3d11b --- /dev/null +++ b/megatron/core/dist_checkpointing/tensor_aware_state_dict.py @@ -0,0 +1,347 @@ +# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. + +""" Utilities for transforming state_dict, including a tensor-aware implementation.""" + +import logging +from dataclasses import dataclass +from typing import Any, Dict, Iterable, Iterator, List, Optional, Tuple + +import torch +from nvidia_resiliency_ext.checkpointing.local.base_state_dict import TensorAwareStateDict + +from .dict_utils import dict_list_map_inplace, dict_list_map_outplace, merge, nested_values +from .exchange_utils import ( + ShardDistribution, + determine_main_replica_uniform_distribution, + exchange_by_distribution, +) +from .mapping import ShardedObject, ShardedStateDict, ShardedTensor, StateDict, apply_factory_merges +from .state_dict_utils import load_preprocess, save_preprocess +from .utils import ( + _sharded_object_id, + _sharded_tensor_shard_id, + debug_time, + extract_sharded_base, + zip_strict, +) +from .validation import determine_global_metadata, validate_sharding_integrity + +logger = logging.getLogger(__name__) + + +@dataclass +class MCoreTensorAwareStateDict(TensorAwareStateDict): + """ + MCore-specific class defining the interface between the MCore state dict and checkpoint manager. + + This class distinguishes between raw objects, the common state dict, and sharded state dicts + (tensor parts). It also handles optional metadata needed for fully parallel save/load. + """ + + common: StateDict + sharded_state_dict: ShardedStateDict + _is_hollow: bool = False + + @staticmethod + def _validate_params(algo): + if algo != 'atomic' and algo != 'fully_parallel': + raise NotImplementedError( + 'Only "atomic" and "fully_parallel" sharding algorithms are supported.' + ) + + @staticmethod + def _get_distribution( + fully_parallel, sharded_part, parallelization_group, cached_distribution=None + ): + if fully_parallel: + if cached_distribution is None: + distribution = determine_main_replica_uniform_distribution( + sharded_part, parallelization_group, True + ) + logger.debug(f'MCore_TASD._get_distribution calculated distribution') + else: + distribution = cached_distribution + logger.debug(f'MCore_TASD._get_distribution used cache') + else: + distribution = (None, None, None, None) + logger.debug(f'MCore_TASD._get_distribution returned empty distribution') + return distribution + + @staticmethod + def _remove_redundant_data( + fully_parallel, sharded_part, shard_to_saving_rank, parallelization_group + ): + if fully_parallel: + for sh_base in nested_values(sharded_part): + # TODO remove redundant objects as well + if isinstance(sh_base, ShardedTensor): + shard_id = _sharded_tensor_shard_id(sh_base) + if shard_to_saving_rank[shard_id] != torch.distributed.get_rank( + group=parallelization_group + ): + sh_base.data = None + + @classmethod + @debug_time("from_state_dict", logger) + def from_state_dict( + cls, + sharded_state_dict: ShardedStateDict, + algo: str = 'fully_parallel', + parallelization_group: Optional[torch.distributed.ProcessGroup] = None, + cached_metadata: ShardDistribution = None, + ) -> Tuple[TensorAwareStateDict, ShardDistribution]: + """ + Constructs a TensorAwareStateDict from a sharded state dictionary. + + This method preprocesses the input `sharded_state_dict`, validates parameters, + and extracts the necessary data to create an instance of `MCoreTensorAwareStateDict`. + + Args: + sharded_state_dict: The input sharded state dictionary to be converted. + algo (str, optional): Initialization algorithm. Defaults to 'fully_parallel'. + - 'fully_parallel' enables fully parallel initialization. + parallelization_group (Optional): A distributed process group for parallelization. + cached_metadata (Optional): Precomputed metadata from previous saves. + - Reuses data that doesn't need recalculation, optimizing the creation process. + + Returns: + TensorAwareStateDict: An instance initialized with the provided sharded state dictionary + and optional cached metadata. + - The metadata is stored in memory to speed up future saves. + """ + with debug_time("_get_distribution", logger): + cls._validate_params(algo) + fully_parallel = algo == 'fully_parallel' + sharded_part, common_state_dict = save_preprocess( + sharded_state_dict, cached_metadata is None + ) + cacheable_distribution = cls._get_distribution( + fully_parallel, sharded_part, parallelization_group, cached_metadata + ) + if cacheable_distribution is not None: + shard_to_saving_rank, _, _, _ = cacheable_distribution + cls._remove_redundant_data( + fully_parallel, sharded_part, shard_to_saving_rank, parallelization_group + ) + + return ( + MCoreTensorAwareStateDict(common=common_state_dict, sharded_state_dict=sharded_part), + cacheable_distribution, + ) + + @property + def is_hollow(self): + """ + True iff tensors had been extracted and have not been inserted back yet. + """ + return self._is_hollow + + @property + def _sharded_tensors(self): + # Three possible states for sharded_tensor: + # 1. sharded_tensor with data (.data = tensor) + # 2. sharded_tensor hollow (.data = None, .orig_device = orig_device) + # 3. removed sharded_tensor (.data = None, no device information) + # TODO: Consider simplifying by removing the entire sharded_tensor instead of just the data + if self.is_hollow: + for sh_base in nested_values(self.sharded_state_dict): + # FIXME: Hacky way to store the original device of the popped tensor + if isinstance(sh_base, ShardedTensor) and hasattr(sh_base, 'orig_device'): + yield sh_base + else: + for sh_base in nested_values(self.sharded_state_dict): + if isinstance(sh_base, ShardedTensor) and sh_base.data is not None: + yield sh_base + + @property + def tensors(self) -> Iterator[torch.Tensor]: + """ + Get the tensor data from the state dict. + """ + assert not self.is_hollow # TODO raise exception + return map(lambda sh_ten: sh_ten.data, self._sharded_tensors) + + @property + def common_state_dict(self) -> Dict: + """ + Get the common state dict from the state dict. + """ + return self.common + + def pop_tensors(self) -> List[torch.Tensor]: + """ + Extracts the tensor data from the wrapped state dict, preserving metadata. + + Replaces the tensor data in sharded_tensors with device type of extracted tensors. + After this operation, the state dictionary is "hollow", containing no tensor data. + Further calls to `pop_tensor` will raise an error. + + @return List of extracted tensors + """ + assert not self.is_hollow # TODO raise exception + result = [] + for sh_ten in self._sharded_tensors: + result.append(sh_ten.data) + # FIXME: Hacky way to store the original device, which is not included in the metadata + setattr(sh_ten, 'orig_device', sh_ten.data.device.type) + sh_ten.data = None + self._is_hollow = True + return result + + def insert_tensors(self, tensor_data: Iterable[torch.Tensor]): + """ + Reverse of `pop_tensors`. Replaces device type in sharded_tensors with actual values + Value of `self` is considered to be the same after: + ``` + self.insert_tensors(self.pop_tensors()) + ``` + """ + assert self.is_hollow # TODO raise exception + for sh_ten, ten in zip_strict(self._sharded_tensors, tensor_data): + # FIXME: Hacky way to store the original device + if sh_ten.orig_device == ten.device.type: + delattr(sh_ten, 'orig_device') + # Tensor might be on non-original device + sh_ten.data = ten + self._is_hollow = False + + def init_tensors(self): + """ + Initializes empty tensors with the same properties as the original tensors. + + This function should only be called after the original tensors have been popped. + It ensures that the newly created empty tensors match the shape, + dtype, and device of the originals, but contain no data. + """ + assert self.is_hollow # TODO raise exception + for sh_ten in self._sharded_tensors: + # Hacky way to retrieve the original device + sh_ten.init_data(sh_ten.orig_device) + delattr(sh_ten, 'orig_device') + self._is_hollow = False + + def copy_tensors_to_cpu(self, non_blocking=False): + """ + Stores CPU copies of tensors in the state_dict, replacing the originals, + but without destroying them. + The original devices are remembered for restoration with restore_tensor_device(). + Using non_blocking=True allows for asynchronous copying. + """ + assert not self.is_hollow # TODO raise exception + for sh_ten in self._sharded_tensors: + if sh_ten.data.device.type == 'cpu': + # Skip cloning if it's already confirmed to be a copy + if not hasattr(sh_ten, 'orig_device'): + sh_ten.data = sh_ten.data.clone() + else: + # FIXME: Hacky way to store the original device + if not hasattr(sh_ten, 'orig_device'): + setattr(sh_ten, 'orig_device', sh_ten.data.device.type) + sh_ten.data = sh_ten.data.detach().to("cpu", non_blocking=non_blocking) + + def restore_tensor_device(self, non_blocking=True): + """ + Restores all tensors to their original devices, if a move is required. + Using non_blocking=True allows for asynchronous copying. + """ + assert not self.is_hollow # TODO raise exception + for sh_ten in self._sharded_tensors: + # FIXME: Hacky way to store the original device + if hasattr(sh_ten, 'orig_device'): + sh_ten.data = sh_ten.data.to(sh_ten.orig_device, non_blocking=non_blocking) + delattr(sh_ten, 'orig_device') + + def _insert_sharded_data( + self, fully_parallel, sharded_part, parallelization_group, exchange_algo + ): + loaded_tensors = {} + for sh_ten in self._sharded_tensors: + loaded_tensors[_sharded_tensor_shard_id(sh_ten)] = sh_ten.data + if fully_parallel: + with debug_time("_get_distribution", logger): + distribution = self._get_distribution( + fully_parallel, sharded_part, parallelization_group + ) + if distribution is not None: + unloaded_shards = {} + for sh_base in nested_values(sharded_part): + # TODO retrieve redundant ShardedObjects once removed in _remove_redundant_data + if isinstance(sh_base, ShardedTensor): + shard_id = _sharded_tensor_shard_id(sh_base) + if shard_id not in loaded_tensors: + unloaded_shards[shard_id] = sh_base + + with debug_time("exchange_by_distribution", logger): + loaded_tensors = exchange_by_distribution( + loaded_tensors, + unloaded_shards, + distribution, + parallelization_group, + exchange_algo, + ) + torch.cuda.synchronize() + loaded_objects = {} + for sh_base in nested_values(self.sharded_state_dict): + if not isinstance(sh_base, ShardedTensor): + assert isinstance(sh_base, ShardedObject) + loaded_objects[_sharded_object_id(sh_base)] = sh_base.data + + def load_sharded_base(x: Any): + if isinstance(x, ShardedTensor): + shard_id = _sharded_tensor_shard_id(x) + assert shard_id in loaded_tensors, (x, shard_id, loaded_tensors.keys()) + x = loaded_tensors[shard_id] + if isinstance(x, ShardedObject): + object_id = _sharded_object_id(x) + assert object_id in loaded_objects, (x, object_id, loaded_objects.keys()) + x = loaded_objects[object_id] + return x + + dict_list_map_inplace(load_sharded_base, sharded_part) + + @debug_time("to_state_dict", logger) + def to_state_dict( + self, + sharded_state_dict: ShardedStateDict, + algo: str = 'atomic', + exchange_algo: str = 'broadcast', + validate_access_integrity: bool = True, + parallelization_group: Optional[torch.distributed.ProcessGroup] = None, + ): + """ + Convert tensor-aware dict back to the original state_dict + """ + with debug_time("load_preprocess_and_state_dict_manipulations", logger): + assert not self.is_hollow # TODO raise exception + self._validate_params(algo) + fully_parallel = algo == 'fully_parallel' + + # __adding__ common part + recreated_state_dict = dict_list_map_outplace(lambda x: x, self.common) + + if not sharded_state_dict: + return recreated_state_dict + # TODO validate self.sharded_state_dict"] and sharded_state_dict are compatible + + sharded_state_dict, nonpersistent_state_dict, sh_ten_factories = load_preprocess( + sharded_state_dict + ) + # __adding__ nonpersistent part + merge(recreated_state_dict, nonpersistent_state_dict) + + sharded_part, _ = extract_sharded_base(sharded_state_dict) + + if validate_access_integrity: + with debug_time("validate_sharding_integrity", logger): + validate_sharding_integrity(determine_global_metadata(sharded_part)[1]) + + # load sharded tensors and sharded objects to sharded_part + with debug_time("_insert_sharded_data", logger): + self._insert_sharded_data( + fully_parallel, sharded_part, parallelization_group, exchange_algo + ) + with debug_time("apply_factory_merges", logger): + sharded_part = apply_factory_merges(sharded_part, sh_ten_factories) + # __adding__ sharded_part + merge(recreated_state_dict, sharded_part) + return recreated_state_dict diff --git a/megatron/core/dist_checkpointing/utils.py b/megatron/core/dist_checkpointing/utils.py index 9186e47..815c950 100644 --- a/megatron/core/dist_checkpointing/utils.py +++ b/megatron/core/dist_checkpointing/utils.py @@ -1,219 +1,319 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. - -""" Helpers for manipulating sharded tensors and sharded state dicts. """ - -from typing import Dict, Optional, Tuple - -from .dict_utils import dict_list_map_inplace, extract_matching_values -from .mapping import ( - LocalNonpersistentObject, - ShardedBase, - ShardedObject, - ShardedStateDict, - ShardedTensor, - ShardedTensorFactory, - StateDict, -) - -# _ShardId uniquely identifies a ShardedTensor. This is a subset of ShardedTensor -# attributes: key (str), global_offset (tuple) and flattened_range (optional tuple) -_ShardId = Tuple[str, tuple, Optional[tuple]] - - -def _sharded_tensor_shard_id(sharded_tensor: ShardedTensor) -> _ShardId: - """Unique id of the sharded tensor data. - - Should yield the same value for same data replicated on different ranks. - - Args: - sharded_tensor (ShardedTensor): sharded tensor representing the data shard - - Returns (tuple): unique id of a data shard - """ - f_range = sharded_tensor.flattened_range - return ( - sharded_tensor.key, - sharded_tensor.global_offset, - None if f_range is None else (f_range.start, f_range.stop), - ) - - -def _sharded_object_id(sharded_object: ShardedObject) -> _ShardId: - """Unique id of the sharded object data. - - Should yield the same value for same data replicated on different ranks. - - Args: - sharded_object (ShardedObject): sharded object representing the data shard - - Returns (tuple): unique id of a data shard - """ - return (sharded_object.key, sharded_object.global_offset, sharded_object.global_shape) - - -def extract_sharded_tensors( - sharded_state_dict: ShardedStateDict, -) -> Tuple[ShardedStateDict, StateDict]: - """Extract a dict consisting of only ShardedTensor objects - from a given state dict with any objects. - - Args: - sharded_state_dict: state dict possibly containing ShardedTensor objects - - Returns: - Tuple[ShardedStateDict, StateDict]: tuple of: - - state dict with all ShardedTensor (keeping the original state dict structure) - - state dict with all objects other than ShardedTensor - (keeping the original state dict structure) - """ - return extract_matching_values(sharded_state_dict, lambda v: isinstance(v, ShardedTensor)) - - -def extract_sharded_tensors_and_factories( - sharded_state_dict: ShardedStateDict, -) -> Tuple[ShardedStateDict, StateDict]: - """Extract a dict consisting of only ShardedTensor and ShardedTensorFactory objects - from a given state dict with any objects. - - Args: - sharded_state_dict: - state dict possibly containing ShardedTensor and ShardedTensorFactory objects - - Returns: - Tuple[ShardedStateDict, StateDict]: tuple of: - - state dict with all ShardedTensor and ShardedTensorFactory - (keeping the original state dict structure) - - state dict with all other objects (keeping the original state dict structure) - """ - return extract_matching_values( - sharded_state_dict, lambda v: isinstance(v, (ShardedTensor, ShardedTensorFactory)) - ) - - -def extract_sharded_tensors_or_nonpersistent( - sharded_state_dict: ShardedStateDict, -) -> Tuple[ShardedStateDict, StateDict]: - """Extract a dict consisting of only ShardedTensor, ShardedTensorFactory - and LocalNonpersistentObject objects from a given state dict with any objects. - - Args: - sharded_state_dict: state dict possibly containing ShardedTensor, ShardedTensorFactory - and LocalNonpersistentObject objects - - Returns: - Tuple[ShardedStateDict, StateDict]: tuple of: - - state dict with all ShardedTensor, ShardedTensorFactory and LocalNonpersistentObject - (keeping the original state dict structure) - - state dict with all other objects (keeping the original state dict structure) - """ - return extract_matching_values( - sharded_state_dict, - lambda v: isinstance(v, (ShardedTensor, LocalNonpersistentObject, ShardedTensorFactory)), - ) - - -def extract_sharded_base( - sharded_state_dict: ShardedStateDict, -) -> Tuple[ShardedStateDict, StateDict]: - """Extract a dict consisting of only ShardedBase from a given state dict with any objects. - - Args: - sharded_state_dict: state dict possibly containing ShardedBase objects - - Returns: - Tuple[ShardedStateDict, StateDict]: tuple of: - - state dict with all ShardedBase objects (keeping the original state dict structure) - - state dict with all other objects (keeping the original state dict structure) - """ - return extract_matching_values(sharded_state_dict, lambda v: isinstance(v, ShardedBase)) - - -def extract_nonpersistent( - sharded_state_dict: ShardedStateDict, -) -> Tuple[ShardedStateDict, StateDict]: - """Extract a dict consisting of only LocalNonpersistentObjects from a given state dict. - - Args: - sharded_state_dict: state dict possibly containing LocalNonpersistentObjects - - Returns: - Tuple[ShardedStateDict, StateDict]: tuple of: - - state dict with all LocalNonpersistentObjects - (keeping the original state dict structure) - - state dict with all other objects (keeping the original state dict structure) - """ - - return extract_matching_values( - sharded_state_dict, lambda v: isinstance(v, LocalNonpersistentObject) - ) - - -def add_prefix_for_sharding(sharded_state_dict: ShardedStateDict, prefix: str): - """Prepend a given prefix to all ShardedBase objects in a given state dict *in-place*. - - Args: - sharded_state_dict (ShardedStateDict): sharded state dict - prefix (str): prefix to be prepended - - Returns: - None: state dict is modified in-place - """ - - def add_prefix(t): - if isinstance(t, ShardedBase): - t.key = f'{prefix}{t.key}' - return t - - dict_list_map_inplace(add_prefix, sharded_state_dict) - - -def replace_prefix_for_sharding( - sharded_state_dict: ShardedStateDict, old_prefix: str, new_prefix: str -): - """Replaces the given prefix in *all* sharded keys in a given state dict. - - Errors out if some key does not begin with a given prefix. - - Args: - sharded_state_dict (ShardedStateDict): sharded state dict to replace keys in - old_prefix (str): prefix to be replaced in each key - new_prefix (str): new prefix - - Returns: - None: state dict is modified in place - """ - - def _replace_prefix(x): - if isinstance(x, (ShardedTensor, ShardedTensorFactory, ShardedObject)): - if not x.key.startswith(old_prefix): - raise ValueError(f'Expected {x.key} to begin with prefix {old_prefix}') - x.key = f'{new_prefix}{x.key[len(old_prefix):]}' # str.removeprefix in Python >= 3.9 - return x - - dict_list_map_inplace(_replace_prefix, sharded_state_dict) - - -def apply_prefix_mapping(sharded_state_dict: ShardedStateDict, prefix_map: Dict[str, str]): - """Replaces prefixes *only in keys matching* with one of prefixes in the map. - - Args: - sharded_state_dict (ShardedStateDict): sharded state dict to replace keys in - prefix_map (Dict[str, str]): - map of old->new prefixes. The first matching prefix for each key is used - - Returns: - None: state dict is modified in place - """ - - def _replace_prefixes(x): - if not isinstance(x, (ShardedTensor, ShardedTensorFactory, ShardedObject)): - return x - for old_prefix, new_prefix in prefix_map.items(): - if x.key.startswith(old_prefix): - x.key = ( - f'{new_prefix}{x.key[len(old_prefix):]}' # str.removeprefix in Python >= 3.9 - ) - break - return x - - dict_list_map_inplace(_replace_prefixes, sharded_state_dict) +# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. + +""" Helpers for manipulating sharded tensors and sharded state dicts. """ +import logging +from contextlib import contextmanager +from time import time +from typing import Dict, Optional, Tuple + +from .dict_utils import dict_list_map_inplace, extract_matching_values +from .mapping import ( + LocalNonpersistentObject, + ShardedBase, + ShardedObject, + ShardedStateDict, + ShardedTensor, + ShardedTensorFactory, + StateDict, +) + +# _ShardId uniquely identifies a ShardedTensor. This is a subset of ShardedTensor +# attributes: key (str), global_offset (tuple) and flattened_range (optional tuple) +_ShardId = Tuple[str, tuple, Optional[tuple]] + + +def zip_strict(*args): + """ + Alternative to Python's builtin zip(..., strict=True) (available in 3.10+). + Apart from providing functionality in earlier versions of Python is also more verbose. + (Python's zip does not print lengths, only which iterable has finished earlier) + """ + args = [list(a) for a in args] + lens = [len(a) for a in args] + assert len(set(lens)) <= 1, f"Tried to zip iterables of unequal lengths: {lens}!" + return zip(*args) + + +def _sharded_tensor_shard_id(sharded_tensor: ShardedTensor) -> _ShardId: + """Unique id of the sharded tensor data. + + Should yield the same value for same data replicated on different ranks. + + Args: + sharded_tensor (ShardedTensor): sharded tensor representing the data shard + + Returns (tuple): unique id of a data shard + """ + f_range = sharded_tensor.flattened_range + return ( + sharded_tensor.key, + sharded_tensor.global_offset, + None if f_range is None else (f_range.start, f_range.stop), + ) + + +def _sharded_object_id(sharded_object: ShardedObject) -> _ShardId: + """Unique id of the sharded object data. + + Should yield the same value for same data replicated on different ranks. + + Args: + sharded_object (ShardedObject): sharded object representing the data shard + + Returns (tuple): unique id of a data shard + """ + return (sharded_object.key, sharded_object.global_offset, sharded_object.global_shape) + + +def extract_sharded_tensors( + sharded_state_dict: ShardedStateDict, +) -> Tuple[ShardedStateDict, StateDict]: + """Extract a dict consisting of only ShardedTensor objects + from a given state dict with any objects. + + Args: + sharded_state_dict: state dict possibly containing ShardedTensor objects + + Returns: + Tuple[ShardedStateDict, StateDict]: tuple of: + - state dict with all ShardedTensor (keeping the original state dict structure) + - state dict with all objects other than ShardedTensor + (keeping the original state dict structure) + """ + return extract_matching_values(sharded_state_dict, lambda v: isinstance(v, ShardedTensor)) + + +def extract_sharded_tensors_and_factories( + sharded_state_dict: ShardedStateDict, +) -> Tuple[ShardedStateDict, StateDict]: + """Extract a dict consisting of only ShardedTensor and ShardedTensorFactory objects + from a given state dict with any objects. + + Args: + sharded_state_dict: + state dict possibly containing ShardedTensor and ShardedTensorFactory objects + + Returns: + Tuple[ShardedStateDict, StateDict]: tuple of: + - state dict with all ShardedTensor and ShardedTensorFactory + (keeping the original state dict structure) + - state dict with all other objects (keeping the original state dict structure) + """ + return extract_matching_values( + sharded_state_dict, lambda v: isinstance(v, (ShardedTensor, ShardedTensorFactory)) + ) + + +def extract_sharded_tensors_or_nonpersistent( + sharded_state_dict: ShardedStateDict, +) -> Tuple[ShardedStateDict, StateDict]: + """Extract a dict consisting of only ShardedTensor, ShardedTensorFactory + and LocalNonpersistentObject objects from a given state dict with any objects. + + Args: + sharded_state_dict: state dict possibly containing ShardedTensor, ShardedTensorFactory + and LocalNonpersistentObject objects + + Returns: + Tuple[ShardedStateDict, StateDict]: tuple of: + - state dict with all ShardedTensor, ShardedTensorFactory and LocalNonpersistentObject + (keeping the original state dict structure) + - state dict with all other objects (keeping the original state dict structure) + """ + return extract_matching_values( + sharded_state_dict, + lambda v: isinstance(v, (ShardedTensor, LocalNonpersistentObject, ShardedTensorFactory)), + ) + + +def extract_sharded_base( + sharded_state_dict: ShardedStateDict, +) -> Tuple[ShardedStateDict, StateDict]: + """Extract a dict consisting of only ShardedBase from a given state dict with any objects. + + Args: + sharded_state_dict: state dict possibly containing ShardedBase objects + + Returns: + Tuple[ShardedStateDict, StateDict]: tuple of: + - state dict with all ShardedBase objects (keeping the original state dict structure) + - state dict with all other objects (keeping the original state dict structure) + """ + return extract_matching_values(sharded_state_dict, lambda v: isinstance(v, ShardedBase)) + + +def extract_nonpersistent( + sharded_state_dict: ShardedStateDict, +) -> Tuple[ShardedStateDict, StateDict]: + """Extract a dict consisting of only LocalNonpersistentObjects from a given state dict. + + Args: + sharded_state_dict: state dict possibly containing LocalNonpersistentObjects + + Returns: + Tuple[ShardedStateDict, StateDict]: tuple of: + - state dict with all LocalNonpersistentObjects + (keeping the original state dict structure) + - state dict with all other objects (keeping the original state dict structure) + """ + + return extract_matching_values( + sharded_state_dict, lambda v: isinstance(v, LocalNonpersistentObject) + ) + + +def add_prefix_for_sharding(sharded_state_dict: ShardedStateDict, prefix: str): + """Prepend a given prefix to all ShardedBase objects in a given state dict *in-place*. + + Args: + sharded_state_dict (ShardedStateDict): sharded state dict + prefix (str): prefix to be prepended + + Returns: + None: state dict is modified in-place + """ + + def add_prefix(t): + if isinstance(t, ShardedBase): + t.key = f'{prefix}{t.key}' + return t + + dict_list_map_inplace(add_prefix, sharded_state_dict) + + +def replace_prefix_for_sharding( + sharded_state_dict: ShardedStateDict, old_prefix: str, new_prefix: str +): + """Replaces the given prefix in *all* sharded keys in a given state dict. + + Errors out if some key does not begin with a given prefix. + + Args: + sharded_state_dict (ShardedStateDict): sharded state dict to replace keys in + old_prefix (str): prefix to be replaced in each key + new_prefix (str): new prefix + + Returns: + None: state dict is modified in place + """ + + def _replace_prefix(x): + if isinstance(x, (ShardedTensor, ShardedTensorFactory, ShardedObject)): + if not x.key.startswith(old_prefix): + raise ValueError(f'Expected {x.key} to begin with prefix {old_prefix}') + x.key = f'{new_prefix}{x.key[len(old_prefix):]}' # str.removeprefix in Python >= 3.9 + return x + + dict_list_map_inplace(_replace_prefix, sharded_state_dict) + + +def apply_prefix_mapping(sharded_state_dict: ShardedStateDict, prefix_map: Dict[str, str]): + """Replaces prefixes *only in keys matching* with one of prefixes in the map. + + Args: + sharded_state_dict (ShardedStateDict): sharded state dict to replace keys in + prefix_map (Dict[str, str]): + map of old->new prefixes. The first matching prefix for each key is used + + Returns: + None: state dict is modified in place + """ + + def _replace_prefixes(x): + if not isinstance(x, (ShardedTensor, ShardedTensorFactory, ShardedObject)): + return x + for old_prefix, new_prefix in prefix_map.items(): + if x.key.startswith(old_prefix): + x.key = ( + f'{new_prefix}{x.key[len(old_prefix):]}' # str.removeprefix in Python >= 3.9 + ) + break + return x + + dict_list_map_inplace(_replace_prefixes, sharded_state_dict) + + +fallback_logger = logging.getLogger(__name__) +__LOGGER_NAME_STACK = [] +__LOGGER_STACK = [] + + +@contextmanager +def logger_stack(name: Optional[str] = None, current_logger: Optional[logging.Logger] = None): + """Context manager for managing logger and name stack. + + Temporarily pushes a logger and/or name onto their respective stacks, allowing hierarchical + logging and contextual logger usage. Ensures the logger stack is restored afterward. + + Args: + name (str, optional): Name to add to the logger stack. Defaults to None. + current_logger (logging.Logger, optional): Logger to use. Defaults to the last logger in + the stack or a fallback if none exist. + + Yields: + Tuple[str, logging.Logger]: A tuple with the concatenated logger name stack and + the current logger for the block. + + Example: + with logger_stack("scope", logger): + logger.info("Log within 'scope'") + """ + if name: + __LOGGER_NAME_STACK.append(name) + if current_logger: + __LOGGER_STACK.append(current_logger) + last_logger = current_logger + elif __LOGGER_STACK: + last_logger = __LOGGER_STACK[-1] + else: + last_logger = fallback_logger + try: + yield ".".join(__LOGGER_NAME_STACK), last_logger + finally: + if name and __LOGGER_NAME_STACK: + __LOGGER_NAME_STACK.pop(-1) + if current_logger and __LOGGER_STACK: + __LOGGER_STACK.pop(-1) + + +@contextmanager +def debug_time( + name: str, logger: Optional[logging.Logger] = None, threshold: float = float("-inf"), level=None +): + """Simple context manager for timing functions/code blocks. + + Args: + name (str): Label describing the code being measured. + logger (logging.Logger, optional): Logger for output. Defaults to the lowest logger. + threshold (float, optional): Minimum time (seconds) to log. Skips logging if faster. + level (int, optional): Logging level. Defaults to DEBUG if `threshold` is unset; + WARNING otherwise. + """ + with logger_stack(name, logger) as (stacked_name, last_logger): + start = time() + try: + yield + finally: + result = time() - start + if result < threshold: + return + if level is None: + level = logging.DEBUG if threshold == float("-inf") else logging.WARNING + last_logger.log(level, f"{stacked_name} took {result:.4f}s") + + +def debug_msg(msg: str): + """Logs a debug message using the current logger stack. + + This function formats and logs a debug message with the current logger + and name stack, preserving context from the logger_stack context manager. + + Args: + msg (str): The message to be logged at the debug level. + + Example: + debug_msg("Checkpoint initialized") + # Logs: "scope_name Checkpoint initialized" if called within logger_stack("scope_name") + """ + with logger_stack(None, None) as (stacked_name, last_logger): + last_logger.debug(f"{stacked_name} {msg}") diff --git a/megatron/core/distributed/custom_fsdp/__init__.py b/megatron/core/distributed/custom_fsdp/__init__.py new file mode 100644 index 0000000..f907aca --- /dev/null +++ b/megatron/core/distributed/custom_fsdp/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +from .fully_sharded_data_parallel import FullyShardedDataParallel diff --git a/megatron/core/distributed/custom_fsdp/fully_sharded_data_parallel.py b/megatron/core/distributed/custom_fsdp/fully_sharded_data_parallel.py new file mode 100644 index 0000000..381e8a4 --- /dev/null +++ b/megatron/core/distributed/custom_fsdp/fully_sharded_data_parallel.py @@ -0,0 +1,687 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import functools +import logging +from contextlib import contextmanager +from enum import Enum, auto +from typing import Any, Dict, List, Optional, Tuple + +import torch +import torch.nn as nn +from torch.utils._pytree import tree_flatten, tree_unflatten + +from megatron.core import parallel_state +from megatron.core.config_logger import has_config_logger_enabled, log_config_to_disk +from megatron.core.distributed.custom_fsdp.param_and_grad_buffer import ( + AllGatherPipeline, + BucketingPolicy, + GradReducePipeline, + ParamAndGradBuffer, + PrefetchOrder, +) +from megatron.core.distributed.data_parallel_base import _BaseDataParallel +from megatron.core.distributed.distributed_data_parallel_config import DistributedDataParallelConfig +from megatron.core.models.common.embeddings.language_model_embedding import LanguageModelEmbedding +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.transformer.transformer_layer import TransformerLayer +from megatron.core.utils import is_float8tensor, is_submodule, log_single_rank + +logger = logging.getLogger(__name__) + + +class TrainingState(Enum): + """States of a FSDP parameter group, which are coupled with + the sharding activity of parameters and gradients during training.""" + + # From pre-forward before post-forward, where parameters should be unsharded + FORWARD = auto() + # Prior to backward computation, where parameters should be unsharded + PRE_BACKWARD = auto() + # After backward computation, where gradients should be re-sharded + POST_BACKWARD = auto() + # Before and after module forward computaton or before pre-backward and + # after post-backward states, where no un/sharding activity happens + IDLE = auto() + + +class FullyShardedDataParallel(_BaseDataParallel): + """Fully Sharded Data Parallel training for MCore models. + + A distributed training wrapper that shards model parameters, gradients and optimizer + states across data parallel workers. Integrates seamlessly with MCore's tensor + and expert parallelism features. + + We supports following modes: + - no_shard: Traditional data parallel training without parameter sharding. + - optim: Shards optimizer states, this is conceptually close to "ZeRO-1", and + main weights for mixed precision training, meanwhile the following `optim_grads` + and `optim_grads_params` will also sharding main weights + during mixed-precision training, omitted without detailed notation. + - optim_grads: Shards gradients and optimizer states, this is conceptually close to "ZeRO-2". + - optim_grads_params: Shards parameters, gradients and optimizer states, this + is conceptually close to "ZeRO-3". + + Key Features: + - Compatible with MCore's tensor, context and expert parallelism + - Automatic mixed precision training (BF16/FP8) + - Gradient accumulation and bucketing + - Optimized activation recompute with shard-aware communication: When recomputing + a whole Transformer layer, gather parameters once for both the recomputation + and backward computation + - Compatible with MCore's distributed checkpointing + + Args: + config: Transformer config object. + ddp_config: FullyShardedDataParallel config object. + module: Underlying model. + fsdp_unit_modules: List of modules that should be treated as FSDP Unit, + i.e., the minimum releasable model unit. If not provided, defaults to + [TransformerLayer, LanguageModelEmbedding] for GPT-like models. + disable_bucketing: If true, force assign all parameters to a single bucket. If false, + use standard bucketing policy: assign parameters to smaller buckets and all-reduce + per bucket. + Examples: + >>> model = GPTModel(config) + >>> model = FullyShardedDataParallel( + ... config, + ... model, + ... ddp_config, + ... fsdp_unit_modules = [TransformerLayer, LanguageModelEmbedding], + ... ) + """ + + # TODO: add hybrid FSDP (shard model states in a partial DP domain) + def __init__( + self, + config: TransformerConfig, + ddp_config: DistributedDataParallelConfig, + module: torch.nn.Module, + fsdp_unit_modules: Optional[List[torch.nn.Module]] = None, + disable_bucketing: bool = False, + device: Optional[torch.device] = None, + ): + super().__init__(config=config, module=module) + if has_config_logger_enabled(config): + log_config_to_disk(config, locals(), prefix=type(self).__name__) + + self.module = module + self.ddp_config = ddp_config + log_single_rank( + logger, + logging.INFO, + f'Setting up DistributedDataParallel with config {self.ddp_config}', + ) + + self.bucket_size = self.ddp_config.bucket_size + if disable_bucketing: + self.bucket_size = None + self.device = device if device else torch.cuda.current_device() + + self.param_to_bucket_group = {} + + if fsdp_unit_modules is not None: + self.fsdp_unit_modules = fsdp_unit_modules + else: + self.fsdp_unit_modules = [TransformerLayer] + if not getattr(self.module, "share_embeddings_and_output_weights", False): + self.fsdp_unit_modules.append(LanguageModelEmbedding) + self.main_weights = True + self.data_parallel_group = parallel_state.get_data_parallel_group( + with_context_parallel=True + ) + self.expert_data_parallel_group = parallel_state.get_expert_data_parallel_group() + + # Determine if we should delay the gradient reduction. + self.is_delay_grad_reduce = self.ddp_config.data_parallel_sharding_strategy in [ + "no_shard", + "optim", + ] + + if self.ddp_config.data_parallel_sharding_strategy == "optim_grads_params": + assert self.ddp_config.overlap_param_gather + if not self.is_delay_grad_reduce: + assert self.ddp_config.overlap_grad_reduce + self._init_fsdp_param_and_grad_buffer() + self._register_fsdp_hooks(self.module) + + # Delete references to weight_tensor if they exist since we don't want two parameter copies + # if we re-mapped parameters (which happens when we use the distributed optimizer). + # This is a temporary workaround around a TE bug that is fixed with + # https://github.com/NVIDIA/TransformerEngine/pull/719. + @torch.no_grad() + def unmap_weight_tensor(m): + if hasattr(m, 'weight_tensor'): + m.weight_tensor = None + + self.module.apply(unmap_weight_tensor) + + def _init_fsdp_param_and_grad_buffer(self): + if self.config.calculate_per_token_loss: + # We don't need to scale the gradients in this case. + gradient_scaling_factor = None + expert_gradient_scaling_factor = None + else: + if self.ddp_config.average_in_collective: + # FIXME(@jianbinc): Will fix this issue based on Parallel Folding's EDP patch MR. + raise Exception("Not supported") + else: + data_parallel_world_size = parallel_state.get_data_parallel_world_size( + with_context_parallel=True + ) + gradient_scaling_factor = 1.0 / data_parallel_world_size + expert_gradient_scaling_factor = 1.0 / data_parallel_world_size + + # Initialize the param and grad buffer. + self.data_parallel_sharding_strategy = self.ddp_config.data_parallel_sharding_strategy + self.param_to_name = {p: name for name, p in self.module.named_parameters()} + self.param_and_grad_buffer = ParamAndGradBuffer( + self.ddp_config, + self.module, + bucketing_policy=BucketingPolicy( + suggested_bucket_size=self.bucket_size, + fsdp_unit_modules=( + # Only when model weights need to be sharded, we need to + # identify the minimum releasable model unit, which is the + # FSDP Unit Module. + self.fsdp_unit_modules + if self.data_parallel_sharding_strategy == "optim_grads_params" + else [] + ), + data_parallel_sharding_strategy=self.data_parallel_sharding_strategy, + ), + data_parallel_group=self.data_parallel_group, + expert_data_parallel_group=self.expert_data_parallel_group, + preserve_fp32_weights=self.ddp_config.preserve_fp32_weights, + grad_reduce_in_fp32=self.ddp_config.grad_reduce_in_fp32, + gradient_scaling_factor=gradient_scaling_factor, + expert_gradient_scaling_factor=expert_gradient_scaling_factor, + device=self.device, + reset_parameters_for_meta_device_init_module=self.config.init_model_with_meta_device, + ) + self.param_and_grad_buffer + + self.side_stream_for_buffer_copy_and_grad_accum = torch.cuda.Stream() + + # Initialize the reduce-scatter pipeline. + self.grad_reduce_pipeline = GradReducePipeline( + self.param_and_grad_buffer, cuda_stream=self.side_stream_for_buffer_copy_and_grad_accum + ) + + # Initialize the all-gather pipeline. + self.all_gather_pipeline = AllGatherPipeline(self.param_and_grad_buffer) + + self.suggested_RS_queue_capacity = self.ddp_config.suggested_communication_unit_size + self.suggested_AG_prefetch_size = self.ddp_config.suggested_communication_unit_size + + def _register_fsdp_hooks(self, root_module): + """Register necessary hooks for Fully Sharded Data Parallel (FSDP) execution on the model. + + This function sets up various hooks required for FSDP operations, including parameter + resharding/unsharding and gradient handling. The registered hooks are: + - Pre-forward hook: Unshards parameters before forward pass + - Post-forward hook: Reshards parameters after forward pass + - Pre-backward hook: Unshards parameters before backward pass + - Post-backward hook: Reshards parameters after backward pass + - Gradient accumulation hook: Handles gradient accumulation and reduction across devices + + Args: + root_module: The PyTorch module to register FSDP hooks on + + Note: + These hooks are essential for FSDP's memory efficiency as they manage: + 1. Dynamic parameter sharding/unsharding to reduce memory footprint + 2. Proper gradient synchronization across distributed processes + 3. Gradient accumulation for large batch training + + Returns: + None + """ + + # Initialize module training state. + for m in root_module.modules(): + setattr(m, "_training_state", TrainingState.IDLE) + + self.forward_pre_hooks = {} + self.forward_hooks = {} + self.backward_pre_hooks = {} + + """ + An FSDP unit is a module designed to manage the lifecycle of model parameters + in Fully Sharded Data Parallel (FSDP) training. It ensures that parameters + are only used within the module and are released immediately after + the forward and backward computations are completed. + This approach is crucial for efficient memory management, as releasing + parameters too early can lead to issues if other computations depend on them. + + `optim` and `optim_grads` do not require FSDP units because they do not + shard model parameters. + """ + if self.data_parallel_sharding_strategy != "optim_grads_params": + fsdp_unit_modules = [] + else: + fsdp_unit_modules = self.fsdp_unit_modules + + def release_module_parameters(module, *unused): + for param in module.parameters(): + bucket_id = self.param_and_grad_buffer.param_to_param_group[param] + self.all_gather_pipeline.release_bucket(bucket_id) + + if not self.ddp_config.keep_fp8_transpose_cache_when_using_custom_fsdp: + release_params_fp8_transpose_cache(module.parameters()) + + def release_params_fp8_transpose_cache(params): + for param in params: + if is_float8tensor(param): + param._transpose_invalid = True + param._transpose = None + + def all_gather_module_parameters( + module, + *unused, + prefetch=True, + prefetch_order=PrefetchOrder.FORWARD_PASS_ORDER, + wait_bucket_ready=True, + ): + wait_list = [] + ag_pipeline = self.all_gather_pipeline + for param in module.parameters(): + bucket_id = self.param_and_grad_buffer.param_to_param_group[param] + ag_pipeline.queue_bucket_to_all_gather( + bucket_id, + prefetch=prefetch, + prefetch_order=prefetch_order, + suggested_AG_prefetch_size=self.suggested_AG_prefetch_size, + ) + wait_list.append(bucket_id) + + if wait_bucket_ready: + for bucket_id in wait_list: + ag_pipeline.wait_bucket_ready(bucket_id) + + def _post_backward(module, *unused): + release_module_parameters(module) + module._training_state = TrainingState.IDLE + + def _pre_forward(module: nn.Module, args: Tuple[Any, ...], kwargs: Dict[str, Any]): + input_training_state = module._training_state + fsdp_forward_prefetch = True + if input_training_state == TrainingState.PRE_BACKWARD: + # In activation recomputation case, we need to cancel forward prefetch. + fsdp_forward_prefetch = False + else: + module._training_state = TrainingState.FORWARD + + if isinstance(module, tuple(fsdp_unit_modules)): + wait_list = [] + for param in module.parameters(): + bucket_id = self.param_and_grad_buffer.param_to_param_group[param] + self.all_gather_pipeline.queue_bucket_to_all_gather( + bucket_id, + prefetch=fsdp_forward_prefetch, + suggested_AG_prefetch_size=self.suggested_AG_prefetch_size, + ) + wait_list.append(bucket_id) + for bucket_id in wait_list: + self.all_gather_pipeline.wait_bucket_ready(bucket_id) + + if not torch.is_grad_enabled(): + return args, kwargs + + # Register the backward function to release the parameters. + args_list, args_spec = tree_flatten(args) + kwargs_list, kwargs_spec = tree_flatten(kwargs) + args_kwargs_list = list(args_list) + list(kwargs_list) + inp_tensor_indices: List[int] = [] + inp_tensors: List[torch.Tensor] = [] + for i, obj in enumerate(args_kwargs_list): + if torch.is_tensor(obj) and obj.requires_grad: + inp_tensor_indices.append(i) + inp_tensors.append(obj) + if len(inp_tensors) == 0: + return args, kwargs + inp_tensors = RegisterFSDPBackwardFunction.apply( + functools.partial(_post_backward, module), *inp_tensors + ) + for inp_tensor_idx, inp_tensor in zip(inp_tensor_indices, inp_tensors): + args_kwargs_list[inp_tensor_idx] = inp_tensor + args_list = args_kwargs_list[: len(args_list)] + kwargs_list = args_kwargs_list[len(args_list) :] + args = tree_unflatten(args_list, args_spec) + kwargs = tree_unflatten(kwargs_list, kwargs_spec) + + return args, kwargs + else: + # All-gather the parameters in every forward pass for FSDP. + for param in module.parameters(recurse=False): + bucket_id = self.param_and_grad_buffer.param_to_param_group[param] + self.all_gather_pipeline.queue_bucket_to_all_gather( + bucket_id, + prefetch=fsdp_forward_prefetch, + suggested_AG_prefetch_size=self.suggested_AG_prefetch_size, + ) + for param in module.parameters(recurse=False): + bucket_id = self.param_and_grad_buffer.param_to_param_group[param] + self.all_gather_pipeline.wait_bucket_ready(bucket_id) + + return args, kwargs + + if self.ddp_config.overlap_param_gather: + fsdp_modules = [] + for name, module in root_module.named_modules(): + if self.ddp_config.data_parallel_sharding_strategy == "optim_grads_params": + if any(is_submodule(module, fsdp_module) for fsdp_module in fsdp_modules): + continue + + if isinstance(module, tuple(fsdp_unit_modules)): + fsdp_modules.append(module) + + self.forward_pre_hooks[f'module {name} parameter all-gather'] = ( + module.register_forward_pre_hook(_pre_forward, prepend=True, with_kwargs=True) + ) + + def _pre_backward(module: nn.Module, *unused): + module._training_state = TrainingState.PRE_BACKWARD + if isinstance(module, tuple(fsdp_unit_modules)): + all_gather_module_parameters( + module, prefetch_order=PrefetchOrder.BACKWARD_PASS_ORDER + ) + + def _root_pre_backward(module: nn.Module, *unused): + """Marks the module's training state as 'pre_backward' before the + backprop, this function is registered on the root module. + + This marking enables us to determine whether forward pass needs to + perform reshard/unshard operations in activation recomputation + scenarios. + """ + for module in root_module.modules(): + if isinstance(module, tuple(fsdp_unit_modules)): + module._training_state = TrainingState.PRE_BACKWARD + for param in module.parameters(): + bucket_id = self.param_and_grad_buffer.param_to_param_group[param] + self.all_gather_pipeline.wait_bucket_ready(bucket_id, empty_ok=True) + self.all_gather_pipeline.release_bucket(bucket_id) + + def _post_forward(module: nn.Module, input: Any, output: Any): + # When composing with module-hook-based activation checkpointing, the + # post-backward hook is responsible for the reshard + if module._training_state == TrainingState.PRE_BACKWARD: + return output + + release_module_parameters(module) + module._training_state = TrainingState.IDLE + + return output + + def _release_module_fp8_transpose_cache(module: nn.Module, *unused): + release_params_fp8_transpose_cache(module.parameters(recurse=False)) + + if self.data_parallel_sharding_strategy == "optim_grads_params": + fsdp_modules = [] + for name, module in root_module.named_modules(): + if any(is_submodule(module, fsdp_module) for fsdp_module in fsdp_modules): + continue + + if isinstance(module, tuple(fsdp_unit_modules)): + fsdp_modules.append(module) + self.forward_hooks[f"release module {name} parameters"] = ( + module.register_forward_hook(_post_forward, prepend=False) + ) + self.backward_pre_hooks[f"all-gather module {name} parameters"] = ( + module.register_full_backward_pre_hook(_pre_backward) + ) + elif not self.ddp_config.keep_fp8_transpose_cache_when_using_custom_fsdp: + self.forward_hooks[f"remove module {name} fp8 transpose cache"] = ( + module.register_forward_hook( + _release_module_fp8_transpose_cache, prepend=False + ) + ) + self._root_pre_backward_hook_handle = root_module.register_full_backward_pre_hook( + _root_pre_backward + ) + + def _make_param_hook(param: torch.nn.Parameter): + """ + Creates the all-reduce / reduce-scatter hook for backprop. + """ + + wait_previous_grad_reduce = not self.is_delay_grad_reduce + + # FIXME: Use insert forward op to replace grad acc hook, which will + # be lost after parameter data movement. For example, module.cuda() + # will cause the registered grad acc hook to be lost. + def param_hook(*unused): + if param.requires_grad: + if self.ddp_config.overlap_grad_reduce: + assert ( + param.grad is not None + ), 'param.grad being None is not safe when overlap_grad_reduce is True' + + if param.grad is not None and ( + not param.grad_added_to_main_grad or getattr(param, 'zero_out_wgrad', False) + ): + if self.is_delay_grad_reduce: + param.main_grad.add_(param.grad.data) + else: + param.main_grad.copy_(param.grad.data) + param.grad = None + + if self.ddp_config.overlap_grad_reduce and ( + not self.is_delay_grad_reduce or self.is_last_microbatch + ): + gr_pipeline = self.grad_reduce_pipeline + bucket_id = self.param_and_grad_buffer.param_to_param_group[param] + gr_pipeline.place_bucket(bucket_id) + go_rs = gr_pipeline.mark_item_ready(param, async_rs=True) + if go_rs and wait_previous_grad_reduce: + gr_pipeline.wait_for_previous_grad_reduce( + recommeded_queue_capacity=self.suggested_RS_queue_capacity + ) + + return param_hook + + # Register backward gradient accumulation hook for each parameter. + self.grad_accs = [] + for param in root_module.parameters(): + bucket_id = self.param_and_grad_buffer.param_to_param_group[param] + wbuf = self.param_and_grad_buffer.parameter_groups[bucket_id].model_weight_buffer + if param.requires_grad: + if wbuf and wbuf.is_data_distributed: + wbuf.fetch_bucket(and_allocate_params_data=True) + + # Expand so we get access to grad_fn. + param_tmp = param.expand_as(param) + # Get the gradient accumulator function. + grad_acc = param_tmp.grad_fn.next_functions[0][0] + grad_acc.register_hook(_make_param_hook(param)) + self.grad_accs.append(grad_acc) + + if wbuf and wbuf.is_data_distributed: + wbuf.free_bucket_storage() + + @contextmanager + def no_sync(self): + """ + Context manager that turns off gradient synchronization. + For grads shard mode there will actually always be gradient sync happening. + """ + # FIXME: Better handling of grads shard mode and no_sync in the training loop so that + # the code doesn't bog down developers. + self.is_last_microbatch = False + try: + yield + finally: + self.is_last_microbatch = True + + def start_param_sync(self, *unused, force_sync: bool = False, force_dispatch: bool = False): + """ + Initiates param sync (all-gather) communication operations for all model parameters. + + By default, when overlap_param_gather is set to True, dispatches asynchronous communication + calls; when overlap_param_gather is set to False, calls synchronous communication + ops. Can override this default behavior using flags below. + + Args: + force_sync (bool, optional): force synchronous collective regardless of + other settings. + force_dispatch (bool, optional): force dispatch regardless of other settings. + """ + if not force_sync and self.ddp_config.overlap_param_gather: + # All-gather the first bucket before the forward pass. + self.all_gather_pipeline.queue_bucket_to_all_gather(bucket_id=0, prefetch=False) + else: + self.all_gather_pipeline.reset() + for bucket_id in range(self.all_gather_pipeline.num_buckets): + self.all_gather_pipeline.all_gather_bucket_and_set_items( + bucket_id=bucket_id, async_op=True + ) + group = self.param_and_grad_buffer.parameter_groups[bucket_id] + if group.model_weight_buffer is None: + continue + + if group.model_weight_buffer.is_data_distributed: + # If model weight is sharded, we wait for the all-gather to complete and + # then release the bucket immediately to save memory usage. + self.all_gather_pipeline.wait_bucket_ready(bucket_id) + for bucket_id in range(self.all_gather_pipeline.num_buckets): + self.all_gather_pipeline.wait_bucket_ready(bucket_id) + + def start_grad_sync(self, *unused): + """ + Initiates grad sync (all-reduce or reduce-scatter) communication operations + for all model gradients. + + When overlap_grad_reduce is set to True, dispatches asynchronous communication + calls. When overlap_grad_reduce is set to False, calls synchronous + communication ops. + """ + if not self.ddp_config.overlap_grad_reduce: + if self.data_parallel_sharding_strategy == "no_shard": + self.param_and_grad_buffer.all_reduce_gradients( + async_op=self.ddp_config.overlap_grad_reduce + ) + else: + self.param_and_grad_buffer.reduce_scatter_gradients() + + def finish_grad_sync(self): + """ + Finishes grad sync (all-reduce or reduce-scatter) communication operations + for all model gradients. + + When overlap_grad_reduce is set to True, waits for asynchronous communication + calls to complete. When overlap_grad_reduce is set to False, calls synchronous + communication ops. + """ + if self.ddp_config.overlap_grad_reduce: + self.grad_reduce_pipeline.wait_for_previous_grad_reduce(0) + self.grad_reduce_pipeline.reset() + else: + self.start_grad_sync() + + self.param_and_grad_buffer.update_main_grads() + + if self.ddp_config.overlap_param_gather: + self.all_gather_pipeline.reset() + + def optimizer_named_parameters(self) -> List[Tuple[str, torch.Tensor]]: + """ + Returns a list of tuples containing the main weights and their corresponding names + for mixed-precision training, to be used by the optimizer for updates. + + Returns: + List[Tuple[str, torch.Tensor]]: A list of tuples, where each tuple + contains a main weight tensor and its corresponding name. + """ + return self.param_and_grad_buffer.optimizer_named_parameters + + def scale_gradients(self, scaling_factor: float): + """Scale all gradients inside the buffers by `scaling_factor`.""" + self.param_and_grad_buffer.scale_gradients(scaling_factor) + + def zero_grad_buffer(self): + """ + Zeros out all grad buffers. Needs to be called at the beginning of each + training iteration. + """ + for param in self.module.parameters(): + if param.requires_grad: + param.grad_added_to_main_grad = False + self.param_and_grad_buffer.zero_grad() + + def broadcast_params(self): + """ + Syncs parameters across all DP ranks. + """ + for param in self.module.parameters(): + is_expert_parallel = not getattr(param, 'allreduce', True) + + if is_expert_parallel: + data_parallel_group = parallel_state.get_data_modulo_expert_parallel_group( + with_context_parallel=True + ) + else: + data_parallel_group = parallel_state.get_data_parallel_group( + with_context_parallel=True + ) + torch.distributed.broadcast( + param.data, + src=torch.distributed.get_global_rank(data_parallel_group, 0), + group=data_parallel_group, + ) + + def load_state_dict(self, state_dict, strict=True): + """ + Copies parameters and buffers from state_dict into the wrapped module and its + descendants. If strict is True, then the keys of state_dict must exactly match + the keys returned by this module’s state_dict() function. + """ + if self.ddp_config.data_parallel_sharding_strategy == "optim_grads_params": + # make a copy of the state_dict to avoid modifying the input state_dict + state_dict = state_dict.copy() + state_dict_extra_states = {} + for key in list(state_dict.keys()): + if key.endswith("_extra_state"): + state_dict_extra_states[key] = state_dict[key] + del state_dict[key] + self.module.load_state_dict(state_dict_extra_states, strict=False) + + prefix = "module." + buffer = self.param_and_grad_buffer + for param_groups in buffer.parameter_groups: + wbuf = param_groups.model_weight_buffer + for model_param in wbuf.params: + if is_float8tensor(model_param): + fp8_meta = model_param._fp8_meta['scaling_fwd'] + fp8_meta_index = model_param._fp8_meta_index + model_param._scale_inv.copy_(fp8_meta.scale_inv[fp8_meta_index]) + + param_name = f"{buffer.param_to_name[model_param]}"[len(prefix) :] + if param_name in state_dict: + if wbuf and wbuf.is_data_distributed: + model_param.fully_shard_param_local_shard.data.copy_( + state_dict[param_name] + ) + else: + model_param.data.copy_(state_dict[param_name]) + del state_dict[param_name] + self.module.load_state_dict(state_dict, strict=False) + return + self.module.load_state_dict(state_dict, strict=strict) + + +class RegisterFSDPBackwardFunction(torch.autograd.Function): + """ + Register a backward function that will be called after the backward pass + of the model. This function is used to release the parameters after the + backward pass. + """ + + @staticmethod + def forward(ctx, post_backward, *inputs: torch.Tensor): + ctx.post_backward = post_backward + return inputs + + @staticmethod + def backward(ctx, *grads: torch.Tensor): + ctx.post_backward() + return (None,) + grads diff --git a/megatron/core/distributed/custom_fsdp/param_and_grad_buffer.py b/megatron/core/distributed/custom_fsdp/param_and_grad_buffer.py new file mode 100644 index 0000000..a29768a --- /dev/null +++ b/megatron/core/distributed/custom_fsdp/param_and_grad_buffer.py @@ -0,0 +1,1971 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import dataclasses +import gc +import inspect +import logging +import math +import traceback +import warnings +from collections import namedtuple +from contextlib import ExitStack +from enum import Enum +from typing import Any, List, Optional, Tuple + +import torch + +from megatron.core import parallel_state +from megatron.core.distributed.distributed_data_parallel_config import DistributedDataParallelConfig +from megatron.core.tensor_parallel import get_cuda_rng_tracker +from megatron.core.utils import ( + is_float8tensor, + is_submodule, + is_te_min_version, + log_on_each_pipeline_stage, +) + +try: + from transformer_engine.pytorch import fp8_model_init + + # This will be used when "--use-fp8-params" is enabled. + # When BF16/FP16 parameters don't exist, we need to cast the FP32 main parameters to + # FP8 directly in the optimizer. + from transformer_engine.pytorch.cpp_extensions import cast_to_fp8 +except: + pass + +try: + from transformer_engine.pytorch.module.base import TransformerEngineBaseModule +except: + pass + + +logger = logging.getLogger(__name__) + + +def _p_assert(cond: Any, s: str, raise_assertion_error: bool = True) -> None: + """Alternate to ``assert`` when in the backward context to print the error + message ``s`` since otherwise, it is swallowed. + """ + if not cond: + print(s) + traceback.print_stack() + if raise_assertion_error: + raise AssertionError(s) + + +def _alloc_storage(tensor: torch.Tensor, size: torch.Size) -> None: + """ + Allocate storage for ``tensor`` with the given size. + + Returns: + bool: ``True`` if this method allocated storage and ``False`` if the + storage was already allocated. + """ + with torch.no_grad(): + if not torch.distributed._functional_collectives.is_torchdynamo_compiling(): + already_allocated = tensor._typed_storage()._size() == size.numel() + if not already_allocated: + tensor_storage_size = tensor._typed_storage()._size() + _p_assert( + tensor_storage_size == 0, + "Tensor storage should have been resized to be 0 but got PLACEHOLDEr", + ) + tensor._typed_storage()._resize_(size.numel()) + + +def _free_storage(tensor: torch.Tensor): + """ + Frees the underlying storage of ``tensor``. + + Returns: + bool: ``True`` if the method freed the storage and ``False`` if the + storage was already freed. + """ + with torch.no_grad(): + if not torch.distributed._functional_collectives.is_torchdynamo_compiling(): + already_freed = tensor._typed_storage()._size() == 0 + if not already_freed: + _p_assert( + tensor.storage_offset() == 0, + "Freeing a tensor's storage is unsafe when it is not the sole occupant\n" + f"storage offset: {tensor.storage_offset()}\n" + f"storage size: {tensor._typed_storage()._size()}\n" + f"tensor shape: {tensor.shape}", + ) + tensor._typed_storage()._resize_(0) + + +TensorItemIndex = namedtuple( + 'TensorItemIndex', ['global_data_index', 'size', 'item_id', 'bucket_id', 'shape'] +) +BucketIndex = namedtuple('BucketIndex', ['bucket_id', 'global_data_index', 'size', 'items']) +ShardBucketIndex = namedtuple( + 'ShardBucketIndex', + ['bucket_id', 'global_data_index', 'local_data_index', 'bucket_data_index', 'size'], +) + + +@dataclasses.dataclass +class BucketingPolicy: + """ + A policy for bucketing in Fully Sharded Data Parallel (FSDP) training. + + Attributes: + suggested_bucket_size (int): The suggested size of each bucket in num of elements. + fsdp_unit_modules (list): A list of module classes that are treated as a + single unit for FSDP bucketing. + data_parallel_sharding_strategy (str): The strategy used for sharding + data parallel modules. + + Note: + This policy is used to configure the bucketing behavior in FSDP training. + """ + + suggested_bucket_size: Optional[int] = 40_000_000 + fsdp_unit_modules: List[torch.nn.Module] = dataclasses.field(default_factory=list) + data_parallel_sharding_strategy: str = 'no_shard' + + +def _pad(number_to_be_padded: int, divisor: int) -> int: + return int(math.ceil(number_to_be_padded / divisor) * divisor) + + +def build_data_parallel_buffer_index( + elements: List[torch.Size], + data_parallel_rank: int, + data_parallel_world_size: int, + is_data_distributed: bool, + ddp_config: DistributedDataParallelConfig, + bucket_id: int = 0, +) -> Tuple[int, List[tuple], List[tuple], List[tuple]]: + """ + Assuming that all input tensor elements are consecutively compose a global + buffer, give the index range of every tensor, every bucket and every in + bucket local buffer. + + Args: + elements (List[torch.Size]): List of input tensor. + data_parallel_rank (int): Rank of the current process in the data parallel group. + data_parallel_world_size (int): World size of the data parallel group. + bucket_id (int, optional): The id of the bucket. Defaults to 0. + + Returns: + Tuple[int, List[tuple], List[tuple], List[tuple]]: The index range of every tensor, + every bucket and every in bucket local buffer. + """ + + def _pad_if_needed(data_index: int) -> int: + """ + Pads data indices if using distributed optimizer (to ensure uniform sharding). + """ + if ddp_config.data_parallel_sharding_strategy != 'no_shard': + # Workaround for TE bug causing cuBLAS to pick an incompatible algorithm. + # This also helps cuBLAS pick more efficient algorithms for GEMMs. + # We now ensure that all buckets start at a memory address that is 256-byte + # aligned (128 values since params and grads use >= 16-bit precision). + return _pad(data_index, math.lcm(data_parallel_world_size, 128)) + return data_index + + def add_item(item_id, item, bucket, item_index_map, bucket_id): + bucket.append(item) + bucket_size = sum([it.numel() for it in bucket]) + item_index_map.append( + TensorItemIndex( + data_index + bucket_size - item.numel(), + item.numel(), + item_id=item_id, + bucket_id=bucket_id, + shape=item, + ) + ) + + item_index_map = [] + bucket = [] + data_index = 0 + for item_id, item in enumerate(elements): + add_item(item_id, item, bucket, item_index_map, bucket_id) + + bucket_size = sum([it.numel() for it in bucket]) + bucket_size = _pad_if_needed(bucket_size) + bucket_index = BucketIndex( + bucket_id, + data_index, + bucket_size, + items=list(filter(lambda x: x.bucket_id == bucket_id, item_index_map)), + ) + + shard_size = bucket_index.size // data_parallel_world_size + bucket_data_index = shard_size * data_parallel_rank + global_data_index = bucket_index.global_data_index + bucket_data_index + + if is_data_distributed: + shard_bucket_index = ShardBucketIndex( + bucket_id, global_data_index, 0, bucket_data_index, shard_size + ) + else: + shard_bucket_index = ShardBucketIndex( + bucket_id, global_data_index, global_data_index, bucket_data_index, shard_size + ) + + return item_index_map, bucket_index, shard_bucket_index + + +@dataclasses.dataclass +class Bucket: + """ + A container for holding data in Fully Sharded Data Parallel (FSDP) training. + + Attributes: + data (torch.Tensor): A tensor containing the data elements + grouped together in a bucket. + data_operation_event (Optional[torch.cuda.Event]): An optional CUDA event + used to synchronize data operations. + status (Any): An optional status object used to track the state of the bucket. + + Note: + Buckets are used to optimize communication in FSDP training by + grouping small tensors together. + """ + + data: torch.Tensor + data_operation_event: Optional[torch.cuda.Event] = None + status: Any = None + + +class TemporaryBucketAllocator: + """ + A utility class for managing temporary buckets (buffers) used in FSDP + operations like parameters unshard and gradients reduction. + + This allocator handles the dynamic allocation and deallocation of temporary memory buffers + needed during FSDP (Fully Sharded Data Parallel) operations, particularly for parameters + unshard and gradients reduction. It helps optimize memory usage by allowing temporary + buckets to be released when no longer needed. + + Key Features: + - Dynamic allocation of temporary buckets for FSDP operations + - Memory-efficient management of temporary buffers + - Support for both parameters unshard and gradients reduction operations + - Automatic cleanup of unused buckets to save memory + + Usage: + ```python + # Create an allocator instance + allocator = TemporaryBucketAllocator(name="gpt_parameters") + + # Allocate a temporary bucket + temp_bucket = allocator.allocate(size=1024, dtype=torch.float32) + + # Use the temporary bucket for FSDP operations + # ... perform all-gather or reduce-scatter ... + + # Free the bucket when done + allocator.free(temp_bucket) + ``` + + Note: + It's important to release temporary buckets after use to prevent memory leaks + and optimize memory usage during training. + """ + + def __init__(self): + self.buckets = {} + + def allocate( + self, bucket_id: int, size: int, dtype: torch.dtype, device: torch.device + ) -> Bucket: + """ + allocate a temporary bucket. + """ + if bucket_id not in self.buckets: + self.buckets[bucket_id] = Bucket(data=torch.empty(size, dtype=dtype, device=device)) + return self.buckets[bucket_id] + + def free(self, bucket_id: int): + """ + free a temporary bucket. + """ + if bucket_id in self.buckets: + _free_storage(self.buckets[bucket_id].data) + del self.buckets[bucket_id] + + +class StorageResizeBasedBucketAllocator(TemporaryBucketAllocator): + """ + A specialized temporary bucket allocator that resizes the storage of temporary buckets + based on the required size. + """ + + def __init__(self): + self.buckets = {} # {bucket_id: Bucket} + + def allocate( + self, bucket_id: int, size: int, dtype: torch.dtype, device: torch.device + ) -> Bucket: + """ + allocate a temporary bucket. + """ + if bucket_id not in self.buckets: + self.buckets[bucket_id] = Bucket(data=torch.empty(size, dtype=dtype, device=device)) + bucket = self.buckets[bucket_id] + _alloc_storage(bucket.data, torch.Size([size])) + return bucket + + def free(self, bucket_id: int): + """ + free a temporary bucket. + """ + if bucket_id in self.buckets: + _free_storage(self.buckets[bucket_id].data) + + +class RotaryBucketAllocator(TemporaryBucketAllocator): + """A specialized temporary bucket allocator that implements a circular buffer recycling strategy + to minimize memory fragmentation in FSDP operations. + + RotaryBucketAllocator extends TemporaryBucketAllocator by maintaining a limited pool of + pre-allocated buffers that are reused in a circular manner. This approach helps prevent + memory fragmentation that typically occurs with frequent allocation and deallocation of + temporary buffers during FSDP operations. + + Key Features: + - Circular buffer recycling strategy for memory efficiency + - Reduced memory fragmentation compared to dynamic allocation + - Pre-allocated buffer pool for faster access + - Automatic buffer reuse without explicit deallocation + + Usage: + ```python + # Create a rotary allocator + allocator = RotaryBucketAllocator(name="gpt_parameters") + + # Get a temporary buffer from the pool + temp_bucket = allocator.allocate(size=1024, dtype=torch.float32) + + # Use the temporary bucket for FSDP operations + # ... perform all-gather or reduce-scatter ... + + # Free the bucket when done, make it in idle buffer pool + allocator.free(temp_bucket) + ``` + """ + + def __init__(self, name: str): + self.name = name + self.num_global_buffer = 0 + self.idle_buffer = [] # [buffer_id] + self.using_buffer = {} # {bucket_id: buffer_id} + self.buckets = {} + + def allocate( + self, bucket_id: int, size: int, dtype: torch.dtype, device: torch.device + ) -> Bucket: + """ + allocate a temporary bucket. + """ + + def _get_global_buffer(buffer_id: int): + return parallel_state.get_global_memory_buffer().get_tensor( + [size], dtype=dtype, name=self._get_gbuf_name(buffer_id) + ) + + if bucket_id in self.using_buffer: + buffer_id = self.using_buffer[bucket_id] + return Bucket(data=_get_global_buffer(buffer_id)) + + if len(self.idle_buffer) == 0: + # allocate new buffer + buffer_id = self.num_global_buffer + self.num_global_buffer += 1 + self.idle_buffer.append(buffer_id) + + buffer_id = self.idle_buffer.pop(0) + self.using_buffer[bucket_id] = buffer_id + return Bucket(data=_get_global_buffer(buffer_id)) + + def _get_gbuf_name(self, buffer_id: int): + return f"{self.name}_{buffer_id}" + + def free(self, bucket_id: int): + """ + free a temporary bucket. + """ + if bucket_id in self.using_buffer: + buffer_id = self.using_buffer.pop(bucket_id) + self.idle_buffer.append(buffer_id) + + +class DataParallelBuffer: + """ + A class that manages the data parallel buffer for Fully Sharded Data Parallel (FSDP) training. + """ + + def __init__( + self, + ddp_config: DistributedDataParallelConfig, + params: List[torch.nn.Parameter], + is_data_distributed: bool, + bucket_id: int, + dtype: Optional[torch.dtype] = None, + device: Optional[torch.device] = None, + data_parallel_group: Optional[torch.distributed.ProcessGroup] = None, + temporary_bucket_allocator: Optional[TemporaryBucketAllocator] = None, + init_meta_only: bool = False, + is_dtype_float8: bool = False, + gradient_scaling_factor: Optional[float] = None, + ) -> None: + self.ddp_config = ddp_config + self.params = params + _param_dtype = {p.dtype for p in self.params} + assert len(_param_dtype) == 1, f'params have different dtypes: {_param_dtype}' + self.is_data_distributed = is_data_distributed + self.bucket_id = bucket_id + self.dtype = dtype if dtype else next(iter(_param_dtype)) + self.device = device + self.data_parallel_group = data_parallel_group + self.dp_rank = torch.distributed.get_rank(group=self.data_parallel_group) + self.dp_world_size = torch.distributed.get_world_size(group=self.data_parallel_group) + self.temporary_bucket_allocator = ( + temporary_bucket_allocator if temporary_bucket_allocator else TemporaryBucketAllocator() + ) + self.is_dtype_float8 = is_dtype_float8 + self.gradient_scaling_factor = gradient_scaling_factor + + (self.item_index_map, self.bucket_index, self.shard_bucket_index) = ( + build_data_parallel_buffer_index( + [p.shape for p in self.params], + self.dp_rank, + self.dp_world_size, + is_data_distributed, + ddp_config, + bucket_id=bucket_id, + ) + ) + + self.data_size = ( + self.bucket_index.size if not is_data_distributed else self.shard_bucket_index.size + ) + if init_meta_only: + self.data = None + else: + self.data = torch.empty(self.data_size, dtype=self.dtype, device=device) + + self.param_idx = {p: i for i, p in enumerate(self.params)} + self.placeholder_bucket = None + self.placeholder_items = {} + + def fetch_bucket( + self, dtype: Optional[torch.dtype] = None, and_allocate_params_data: bool = False + ) -> Bucket: + """ + Fetch a communication buffer for data-parallel operations. + + The size of the bucket is defined by the `DataParallelBuffer` instance. + If `and_allocate_params_data` is True, this method resets the parameter + data stored in the `DataParallelBuffer` instance. + + Args: + dtype (Optional[torch.dtype], optional): The data type of the tensor + to fetch a buffer for. Defaults to None. + and_allocate_params_data (bool, optional): Whether to allocate and + reset parameter data. Defaults to False. + + Returns: + Bucket: The communication buffer for the specified data type. + """ + if dtype is None: + dtype = self.dtype + bucket_index = self.bucket_index + + if not self.is_data_distributed and dtype == self.dtype: + bucket = Bucket( + data=self.data[ + bucket_index.global_data_index : bucket_index.global_data_index + + bucket_index.size + ] + ) + else: + bucket = self.temporary_bucket_allocator.allocate( + bucket_id=bucket_index.bucket_id, + size=bucket_index.size, + dtype=dtype, + device=self.device, + ) + + if and_allocate_params_data: + for p in self.params: + item_id = self.param_idx[p] + if is_float8tensor(p): + p._data = self.get_item_from_bucket(bucket, item_id).view(p.shape) + else: + p.data = self.get_item_from_bucket(bucket, item_id).view(p.shape) + + return bucket + + def free_bucket_storage(self, and_free_params_data: bool = False): + """ + Release the storage of a temporary communication bucket. + + If the bucket is temporary, this method frees its storage. + If `and_free_params_data` is True, this method also releases the storage + of the parameter data stored in the `DataParallelBuffer` instance. + + Args: + and_free_params_data (bool, optional): Whether to also release the + storage of the parameter data. Defaults to False. + + Returns: + None + """ + if not self.is_data_distributed: + return + + self.temporary_bucket_allocator.free(self.bucket_index.bucket_id) + if and_free_params_data: + if self.placeholder_bucket is None: + self.placeholder_bucket = Bucket( + data=torch.empty(self.bucket_index.size, dtype=self.dtype, device=self.device) + ) + for p in self.params: + item_id = self.param_idx[p] + self.placeholder_items[item_id] = self.get_item_from_bucket( + self.placeholder_bucket, item_id + ).view(p.shape) + _free_storage(self.placeholder_bucket.data) + for p in self.params: + item_id = self.param_idx[p] + if is_float8tensor(p): + p._data = self.placeholder_items[item_id] + else: + p.data = self.placeholder_items[item_id] + + def _get_item_slice_in_shard(self, item_id: int) -> Tuple[int, int]: + item_index = self.item_index_map[item_id] + shard_bucket_index = self.shard_bucket_index + + item_global_start = item_index.global_data_index + item_global_end = item_index.global_data_index + item_index.size + shard_bucket_start = shard_bucket_index.global_data_index + shard_bucket_end = shard_bucket_index.global_data_index + shard_bucket_index.size + + if item_global_start > shard_bucket_end or item_global_end < shard_bucket_start: + return (0, 0) + + start = max(item_global_start, shard_bucket_start) - item_global_start + end = min(item_global_end, shard_bucket_end) - item_global_start + + return (start, end) + + # pylint: disable=missing-function-docstring + def locate_item_in_global_item(self, item_id: int) -> Tuple[int, int]: + item_index = self.item_index_map[item_id] + if not self.is_data_distributed: + return (0, item_index.size) + + slice_start, slice_end = self._get_item_local_shard_index(item_id) + if slice_start == slice_end: + return (0, 0) + + local_shard_index_to_global_index_offset = ( + self.shard_bucket_index.global_data_index - self.shard_bucket_index.local_data_index + ) + slice_start += local_shard_index_to_global_index_offset + slice_end += local_shard_index_to_global_index_offset + return ( + slice_start - item_index.global_data_index, + slice_end - item_index.global_data_index, + ) + + def _get_item_local_shard_index(self, item_id: int) -> Tuple[int, int]: + slice_start, slice_end = self._get_item_slice_in_shard(item_id) + if slice_start == slice_end: + return (0, 0) + + item_index = self.item_index_map[item_id] + shard_bucket_index = self.shard_bucket_index + offset = ( + item_index.global_data_index + - shard_bucket_index.global_data_index + + shard_bucket_index.local_data_index + ) + + return (offset + slice_start, offset + slice_end) + + def _get_item_local_index(self, item_id: int) -> Tuple[int, int]: + if not self.is_data_distributed: + item_index = self.item_index_map[item_id] + return (item_index.global_data_index, item_index.global_data_index + item_index.size) + + return self._get_item_local_shard_index(item_id) + + def set_item(self, item_id: int, item_data: torch.Tensor) -> None: + """ + Update a tensor item managed by the `DataParallelBuffer` instance. + + The storage of the item is mapped to the communication bucket. + This method updates the item data and ensures consistency with the bucket. + + Args: + item_id (int): The ID of the tensor item to update. + item_data (torch.Tensor): The new data for the tensor item. + + Returns: + None + """ + if self.is_data_distributed: + slice_start, slice_end = self._get_item_slice_in_shard(item_id) + item_data = item_data.flatten()[slice_start:slice_end] + local_index_start, local_index_end = self._get_item_local_index(item_id) + shard = self.data[local_index_start:local_index_end] + if shard.numel() > 0: + shard.data.copy_(item_data.flatten()) + + def get_item(self, item_id: int, only_shard: bool = False) -> torch.Tensor: + """ + Retrieve a tensor item managed by the `DataParallelBuffer` instance. + + The storage of the item is mapped to the communication bucket. + If `only_shard` is True, returns only the shard of the item corresponding + to the current process. + Otherwise, returns the entire item. + + Args: + item_id (int): The ID of the tensor item to retrieve. + only_shard (bool, optional): Whether to return only the shard of the + item. Defaults to False. + + Returns: + torch.Tensor: The retrieved tensor item. + """ + if only_shard: + start, end = self._get_item_local_shard_index(item_id) + else: + start, end = self._get_item_local_index(item_id) + + return self.data[start:end] + + def get_item_from_bucket(self, bucket: Bucket, item_id: int): + """get item from bucket.""" + item_index = self.item_index_map[item_id] + bucket_index = self.bucket_index + start_index = item_index.global_data_index - bucket_index.global_data_index + end_index = start_index + item_index.size + item = bucket.data[start_index:end_index] + return item + + def get_shard_from_bucket(self, bucket: Bucket): + """Get the local sharding of the bucket.""" + shard_bucket_index = self.shard_bucket_index + offset = shard_bucket_index.bucket_data_index + shard_size = shard_bucket_index.size + shard = bucket.data[offset : offset + shard_size] + return shard + + def get_shard_from_local_buffer(self) -> torch.Tensor: + """Get the local sharding of the bucket.""" + index = self.shard_bucket_index + return self.data[index.local_data_index : index.local_data_index + index.size] + + +@dataclasses.dataclass +class ParameterGroup: + """ + A group of model parameters with associated metadata for data-parallel training. + + This dataclass encapsulates a list of PyTorch parameters and additional information + necessary for managing data-parallel operations, such as data type, gradient requirements, + and buffer assignments. + """ + + params: List[torch.nn.Parameter] + dtype: Optional[torch.dtype] = None + is_expert_param: bool = False + requires_grad: Optional[bool] = None + fsdp_unit_id: Optional[int] = None + data_parallel_world_size: Optional[int] = None + model_weight_buffer: Optional[DataParallelBuffer] = None + main_weight_buffer: Optional[DataParallelBuffer] = None + main_grad_buffer: Optional[DataParallelBuffer] = None + + +def _get_parameter_groups( + module: torch.nn.Module, policy: BucketingPolicy, meta_device_init_fp8_params: dict +): + """ + Get the parameter group for the given module and parameters. + """ + param_to_name = {p: name for name, p in module.named_parameters()} + fsdp_units = [] + if policy.fsdp_unit_modules: + param_to_id = {} + for i, p in enumerate(module.parameters()): + param_to_id[p] = i + fsdp_modules = [] + for m in module.modules(): + # Skip nested FSDP module. + if any(is_submodule(module, fsdp_module) for fsdp_module in fsdp_modules): + continue + if isinstance(m, tuple(policy.fsdp_unit_modules)): + fsdp_units.append([param_to_name[p] for p in m.parameters()]) + fsdp_modules.append(m) + + def _does_param_require_new_bucket(param): + """ + Split shared embedding parameters into separate bucket if using distributed + optimizer that makes use of reduce-scatters instead of all-reduces. + This ensures that the first and last pipeline stage partition optimizer state + for the shared embedding parameters the same way across DP replicas, allowing + the DP reduce-scatter to be before the embedding all-reduce. + """ + return ( + getattr(param, "shared_embedding", False) + and policy.data_parallel_sharding_strategy != "no_shard" + ) + + is_expert_parameter = lambda p: not getattr(p, 'allreduce', True) + + # Step 1: Group the parameters according to their execution order and attributes. + parameter_groups = [] + for name, param in module.named_parameters(): + param_attrs = dict( + dtype=( + "float8" + if is_float8tensor(param) or meta_device_init_fp8_params.get(name, False) + else param.dtype + ), + is_expert_param=is_expert_parameter(param), + requires_grad=param.requires_grad, + fsdp_unit_id=None, + ) + for fsdp_unit_id, fsdp_unit in enumerate(fsdp_units): + if name in fsdp_unit: + param_attrs["fsdp_unit_id"] = fsdp_unit_id + break + + found_group = False + for param_group in parameter_groups: + group_attrs = { + key: value for key, value in param_group.__dict__.items() if key in param_attrs + } + if group_attrs == param_attrs: + param_group.params.append(param) + found_group = True + break + + if not found_group: + parameter_groups.append(ParameterGroup([param], **param_attrs)) + + # Step 2: Bucket the parameters based on the guide bucket size. + suggested_bucket_size = policy.suggested_bucket_size + bucket_groups = [] + for group in parameter_groups: + bucket = [] + + basic_attrs = { + key: value + for key, value in group.__dict__.items() + if key in ['dtype', 'is_expert_param', 'requires_grad', 'fsdp_unit_id'] + } + for param in group.params: + if _does_param_require_new_bucket(param): + if len(bucket) > 0: + bucket_groups.append(ParameterGroup(bucket, **basic_attrs)) + bucket_groups.append(ParameterGroup([param], **basic_attrs)) + bucket = [] + continue + + bucket.append(param) + if ( + group.fsdp_unit_id is None + and suggested_bucket_size + and sum([p.numel() for p in bucket]) >= suggested_bucket_size + ): + bucket_groups.append(ParameterGroup(bucket, **basic_attrs)) + bucket = [] + continue + + if bucket: + bucket_groups.append(ParameterGroup(bucket, **basic_attrs)) + + param_to_param_group = {} + for group_id, group in enumerate(bucket_groups): + for param in group.params: + param_to_param_group[param] = group_id + + # Log buckets for all PP stages. + if ( + parallel_state.get_data_parallel_rank(with_context_parallel=True) == 0 + and parallel_state.get_tensor_model_parallel_rank() == 0 + ): + log_strs = [] + log_strs.append(f'Number of parameter groups for FSDP: {len(bucket_groups)}') + for index, group in enumerate(bucket_groups): + numel = 0 + for param in group.params: + numel += param.numel() + log_strs.append( + f"Params for group {index+1} ({numel} elements, dtype {group.dtype}, " + f"has_weight_buffer: {group.model_weight_buffer is not None}, " + f"has_grad_buffer: {group.main_grad_buffer is not None}, " + f"has_main_weight_buffer: {group.main_weight_buffer is not None}):" + ) + for param in group.params: + log_strs.append(f'\t{param_to_name[param]}') + log_on_each_pipeline_stage(logger, logging.INFO, '\n'.join(log_strs)) + + return (bucket_groups, fsdp_units, param_to_param_group) + + +class ParamAndGradBuffer: + """A class that manages parameter grouping, buffer allocation, and + communication operations for data-parallel distributed training. + + This class provides functionality to: + 1. Group parameters based on their data types and communication group sizes + 2. Create contiguous buffers for model weights, gradients, and high-precision + main weights + 3. Handle parameter unsharding, gradient reduction, and weight + synchronization operations + + Key Features: + - Efficient parameter grouping based on data types and communication patterns + - Memory-efficient contiguous buffer allocation + - Support for mixed-precision training with main weights + - Distributed operations including parameters all-gather and gradients + reduce-scatter/all-reduce + - Synchronized weight updates between model and main weights + + Note: + This class is designed for distributed training scenarios where efficient + parameter management and communication are crucial for performance. + + Args: + ddp_config (DistributedDataParallelConfig): The distributed data parallel + configuration. + module (torch.nn.Module): The module whose parameters are to be grouped + and flatten. + bucketing_policy (BucketingPolicy): The bucketing policy. + data_parallel_group (torch.distributed.ProcessGroup): The data parallel group. + expert_data_parallel_group (Optional[torch.distributed.ProcessGroup]): + The expert data parallel group. + preserve_fp32_weights (bool): Whether to preserve FP32 weights. + grad_reduce_in_fp32 (bool): Whether to reduce gradients in FP32. + gradient_scaling_factor (Optional[float]): The gradient scaling factor. + expert_gradient_scaling_factor (Optional[float]): The expert gradient + scaling factor. + device (torch.device): The parameter and gradient buffer device. + only_create_grad_buffer_and_main_weight_buffer_for_param_requires_grad (bool): + Whether to only create the gradient buffer and main weight buffer + for parameters that require gradients. Default is True. + """ + + def __init__( + self, + ddp_config: DistributedDataParallelConfig, + module: torch.nn.Module, + bucketing_policy: BucketingPolicy, + data_parallel_group: torch.distributed.ProcessGroup, + expert_data_parallel_group: Optional[torch.distributed.ProcessGroup] = None, + preserve_fp32_weights: bool = True, + grad_reduce_in_fp32: bool = True, + gradient_scaling_factor: Optional[float] = None, + expert_gradient_scaling_factor: Optional[float] = None, + device: torch.device = torch.device('cuda'), + only_create_grad_buffer_and_main_weight_buffer_for_param_requires_grad: bool = True, + reset_parameters_for_meta_device_init_module: bool = False, + ): + self.ddp_config = ddp_config + self.module = module + self.bucketing_policy = bucketing_policy + self.param_to_name = {p: name for name, p in self.module.named_parameters()} + self.preserve_fp32_weights = preserve_fp32_weights + self.grad_reduce_in_fp32 = grad_reduce_in_fp32 + self.data_parallel_group = data_parallel_group + self.expert_data_parallel_group = expert_data_parallel_group + self.params = list(module.parameters()) + self.gradient_scaling_factor = gradient_scaling_factor + self.expert_gradient_scaling_factor = expert_gradient_scaling_factor + self.device = device + self.only_create_grad_buffer_and_main_weight_buffer_for_param_requires_grad = ( + only_create_grad_buffer_and_main_weight_buffer_for_param_requires_grad + ) + self.reset_parameters_for_meta_device_init_module = ( + reset_parameters_for_meta_device_init_module + ) + + # Mark fp8 param. + meta_device_init_fp8_params = {} + if reset_parameters_for_meta_device_init_module: + for m in module.modules(): + if not isinstance(m, TransformerEngineBaseModule): + continue + for name, param in m.named_parameters(recurse=False): + # The fp8 param initialized from the meta device may NOT be + # an fp8 tensor, according to the internal logic of the TE + # to determine whether this parameter is fp8 or not. + fp8_meta_index = m.param_init_meta[name].fp8_meta_index + if m.primary_weights_in_fp8 and fp8_meta_index is not None: + meta_device_init_fp8_params[self.param_to_name[param]] = True + + # Get the parameter groups. + (self.parameter_groups, self.fsdp_units, self.param_to_param_group) = _get_parameter_groups( + module, bucketing_policy, meta_device_init_fp8_params + ) + + self._init_each_parameter_group_buffers(meta_device_init_fp8_params) + + # Initialize the optimizer named parameters. + self.optimizer_named_parameters = self._init_optimizer_named_parameters() + + def _init_each_parameter_group_buffers(self, meta_device_init_fp8_params): + """ + Initialize the buffers for each parameter group. + """ + data_parallel_sharding_strategy = self.ddp_config.data_parallel_sharding_strategy + if data_parallel_sharding_strategy == 'no_shard': + is_model_weight_buffer_distributed = False + is_main_weight_buffer_distributed = False + is_grad_buffer_distributed = False + elif data_parallel_sharding_strategy == 'optim': + is_model_weight_buffer_distributed = False + is_main_weight_buffer_distributed = True + is_grad_buffer_distributed = False + elif data_parallel_sharding_strategy == 'optim_grads': + is_model_weight_buffer_distributed = False + is_main_weight_buffer_distributed = True + is_grad_buffer_distributed = True + elif data_parallel_sharding_strategy == 'optim_grads_params': + is_model_weight_buffer_distributed = True + is_main_weight_buffer_distributed = True + is_grad_buffer_distributed = True + else: + raise ValueError( + f'Invalid data_parallel_sharding_strategy: {data_parallel_sharding_strategy}' + ) + + self.memory_allocator_for_model_weight_buffer = StorageResizeBasedBucketAllocator() + self.buffer_all_in_one = True + + preserve_fp32_weights = self.preserve_fp32_weights + grad_reduce_in_fp32 = self.grad_reduce_in_fp32 + buffer_size = {torch.float32: 0, torch.float16: 0, torch.bfloat16: 0, "float8": 0} + for group_id, group in enumerate(self.parameter_groups): + dp_group = ( + self.data_parallel_group + if not group.is_expert_param + else self.expert_data_parallel_group + ) + group.data_parallel_world_size = torch.distributed.get_world_size(group=dp_group) + gradient_scaling_factor = ( + self.gradient_scaling_factor + if not group.is_expert_param + else self.expert_gradient_scaling_factor + ) + one_param = group.params[0] + is_dtype_float8 = is_float8tensor(one_param) or meta_device_init_fp8_params.get( + self.param_to_name[one_param], False + ) + if is_dtype_float8: + param_dtype = torch.uint8 + grad_dtype = torch.bfloat16 + else: + param_dtype = group.params[0].dtype + grad_dtype = param_dtype + should_create_grad_buffer_or_main_weight_buffer = ( + not self.only_create_grad_buffer_and_main_weight_buffer_for_param_requires_grad + or group.requires_grad + ) + + # Initialize the model weight buffer. + if data_parallel_sharding_strategy != 'no_shard': + group.model_weight_buffer = DataParallelBuffer( + self.ddp_config, + group.params, + is_data_distributed=is_model_weight_buffer_distributed + and group.data_parallel_world_size > 1, + dtype=param_dtype, + device=self.device, + data_parallel_group=dp_group, + init_meta_only=True, + is_dtype_float8=is_dtype_float8, + temporary_bucket_allocator=self.memory_allocator_for_model_weight_buffer, + bucket_id=group_id, + ) + + # Initialize the main weight buffer. + if should_create_grad_buffer_or_main_weight_buffer and preserve_fp32_weights: + group.main_weight_buffer = DataParallelBuffer( + self.ddp_config, + group.params, + is_data_distributed=is_main_weight_buffer_distributed + and group.data_parallel_world_size > 1, + dtype=torch.float32, + device=self.device, + data_parallel_group=dp_group, + init_meta_only=True, + bucket_id=group_id, + ) + + # Initialize the main grad buffer. + if should_create_grad_buffer_or_main_weight_buffer: + group.main_grad_buffer = DataParallelBuffer( + self.ddp_config, + group.params, + is_data_distributed=is_grad_buffer_distributed + and group.data_parallel_world_size > 1, + dtype=torch.float32 if grad_reduce_in_fp32 else grad_dtype, + device=self.device, + data_parallel_group=dp_group, + init_meta_only=True, + is_dtype_float8=not grad_reduce_in_fp32 and grad_dtype is torch.uint8, + gradient_scaling_factor=gradient_scaling_factor, + bucket_id=group_id, + ) + if grad_reduce_in_fp32: + buffer_size[torch.float32] += group.main_grad_buffer.data_size + elif group.main_grad_buffer.is_dtype_float8: + buffer_size["float8"] += group.main_grad_buffer.data_size + else: + buffer_size[group.main_grad_buffer.dtype] += group.main_grad_buffer.data_size + + reset_context_args = {"init_param_with_fp8": self.ddp_config.fp8_param_gather} + module_reset_flag = {} + if self.reset_parameters_for_meta_device_init_module: + self.param_to_direct_module = {} + for name, m in self.module.named_modules(): + for p in m.parameters(recurse=False): + self.param_to_direct_module[p] = (name, m) + + meta_params_numel = 0 + cuda_params_numel = 0 + cpu_params_numel = 0 + for group in self.parameter_groups: + for p in group.params: + if p.is_meta: + meta_params_numel += p.numel() + elif p.device.type == 'cuda': + cuda_params_numel += p.numel() + else: + cpu_params_numel += p.numel() + log_str = ( + f"Meta params numel: {meta_params_numel / 1_000_000:.2f} M, " + f"CUDA params numel: {cuda_params_numel / 1_000_000:.2f} M, " + f"CPU params numel: {cpu_params_numel / 1_000_000:.2f} M" + ) + log_on_each_pipeline_stage(logger, logging.INFO, log_str) + + # Initialize the model weight buffer data of each parameter group. + for group in self.parameter_groups: + wbuf = group.model_weight_buffer + if wbuf: + wbuf.data = torch.empty(wbuf.data_size, dtype=wbuf.dtype, device=self.device) + bucket = wbuf.fetch_bucket() + mbuf = group.main_weight_buffer + if mbuf: + mbuf.data = torch.empty(mbuf.data_size, dtype=mbuf.dtype, device=self.device) + for item_id, p in enumerate(group.params): + if wbuf: + if self.reset_parameters_for_meta_device_init_module and p.is_meta: + m_name, m = self.param_to_direct_module[p] + if not module_reset_flag.get(m_name, False) and hasattr( + m, "reset_parameters" + ): + old_params = list(m.parameters(recurse=False)) + + # If the GPU memory over threshold, empty cache to leave + # some memory for initialization of the model on the + # CUDA device. + if check_gpu_memory(threshold=0.5): + gc.collect() + torch.cuda.empty_cache() + + m.to_empty(device=self.device, recurse=False) + if is_te_min_version("0.9.0") and not isinstance( + m, TransformerEngineBaseModule + ): + reset_context_args["with_cuda_rng_tracker"] = True + with ResetParametersContext(**reset_context_args): + m.reset_parameters() + module_reset_flag[m_name] = True + new_params = list(m.parameters(recurse=False)) + + self._reset_parameters(old_params, new_params) + p = group.params[item_id] + assert not p.is_meta, (self.param_to_name[p], module_reset_flag) + wbuf.set_item(item_id, p.data) + + # reset the parameter data to the buffer + old_param_data = p.data + new_param_data = wbuf.get_item_from_bucket(bucket, item_id).view(p.shape) + if is_float8tensor(p): + p._data = new_param_data + else: + p.data = new_param_data + assert old_param_data._base is None + p.data.detach().copy_(old_param_data) + del old_param_data + if mbuf: + if hasattr(p, 'get_high_precision_init_val'): + mbuf.set_item(item_id, p.get_high_precision_init_val()) + p.clear_high_precision_init_val() + else: + mbuf.set_item(item_id, p) + + if wbuf and wbuf.is_data_distributed: + """ + When MCore Custom FSDP `optim_grads_params` is enabled, + it is necessary to save the tensor local shard. This local shard is + accessible through the `fully_shard_param_local_shard` + attribute of the tensor. + + This attribute contains the local shard of the fully + sharded parameter, which is essential for correctly + saving and loading the model state when using + `optim_grads_params` with FSDP. + + Example: + >>> # Assuming `tensor` is a fully sharded parameter + >>> local_shard = tensor.fully_shard_param_local_shard + >>> # Save the local shard as needed + """ + local_shard = wbuf.get_item(item_id, only_shard=True) + local_shard.fsdp_shard_orig_param = p + p.fully_shard_param_local_shard = local_shard + p.fully_shard_param_local_index = wbuf.locate_item_in_global_item(item_id) + + def disable_shard_param_to_function(*unused): + """Prevents users from accessing the 'to' operation + on parameters after sharding. + + This restriction helps maintain data integrity and + proper sharding behavior by disabling direct 'to' + device/dtype operations on sharded parameters. + """ + raise RuntimeError( + "Your model is wrapped by MCore Custom FSDP. All " + "parameter dtypes and devices must be set before FSDP " + "wrapping. After FSDP wrapping, parameter storage " + "is sharded and you cannot modify parameter " + "dtypes or devices." + ) + + setattr(p, 'to', disable_shard_param_to_function) + + def disable_shard_param_cpu_function(*unused): + warnings.warn( + "The parameters are sharded by custom fsdp, " + "and no actual cpu operation is performed." + ) + return torch.empty([], device='cpu') + + setattr(p, 'cpu', disable_shard_param_cpu_function) + + if wbuf and wbuf.is_data_distributed: + wbuf.free_bucket_storage() + + # Allocate the main_weight buffer and main_grad buffer data in one buffer. + if self.buffer_all_in_one: + self.buffer = { + torch.float32: torch.empty( + buffer_size[torch.float32], dtype=torch.float32, device=self.device + ), + torch.float16: torch.empty( + buffer_size[torch.float16], dtype=torch.float16, device=self.device + ), + torch.bfloat16: torch.empty( + buffer_size[torch.bfloat16], dtype=torch.bfloat16, device=self.device + ), + "float8": torch.empty(buffer_size["float8"], dtype=torch.uint8, device=self.device), + } + offset = {torch.float32: 0, torch.float16: 0, torch.bfloat16: 0, "float8": 0} + + def _alloc(dtype, size): + if self.buffer_all_in_one: + if dtype == torch.uint8: + dtype = "float8" + data = self.buffer[dtype][offset[dtype] : offset[dtype] + size] + offset[dtype] += size + return data + return torch.empty(size, dtype=dtype, device=self.device) + + # Initialize the main grad buffer data of each parameter group. + for group in self.parameter_groups: + gbuf = group.main_grad_buffer + if not gbuf: + continue + gbuf.data = _alloc(gbuf.dtype, gbuf.data_size) + gbuf.data.zero_() + for item_id, p in enumerate(group.params): + p.fsdp_managed_main_grad = gbuf.get_item(item_id) + p._gbuf = gbuf + p._item_id = item_id + + def main_grad_getter(p): + # Make sure main_grad memory storage ready. + bucket = p._gbuf.fetch_bucket() + gbuf = p._gbuf + item_id = p._item_id + if bucket.status == GradBucketStatus.GRAD_REDUCING: + if bucket.data_operation_event: + bucket.data_operation_event.wait() + bucket.data_operation_event = None + # Here it is assumed that main_grad is taken out and do + # gradient accumulation and should not be freed up before + # gradient reduction. + bucket.status = GradBucketStatus.GRAD_ACCUMULATING + return gbuf.get_item_from_bucket(bucket, item_id).view(p.shape) + + setattr(p.__class__, 'main_grad', property(main_grad_getter)) + + if gbuf.is_data_distributed: + gbuf.free_bucket_storage() + + gc.collect() + torch.cuda.empty_cache() + + def _reset_parameters(self, old_params, new_params): + assert len(old_params) == len(new_params) + param_map = {} + for old_param, new_param in zip(old_params, new_params): + param_map[old_param] = new_param + self.param_to_name[new_param] = self.param_to_name[old_param] + del self.param_to_name[old_param] + + self.param_to_param_group[new_param] = self.param_to_param_group[old_param] + del self.param_to_param_group[old_param] + + self.param_to_direct_module[new_param] = self.param_to_direct_module[old_param] + del self.param_to_direct_module[old_param] + + for item_id, p in enumerate(self.params): + if p in param_map: + new_p = param_map[p] + self.params[item_id] = new_p + + for group in self.parameter_groups: + for item_id, p in enumerate(group.params): + if p not in param_map: + continue + new_p = param_map[p] + group.params[item_id] = new_p + for buf in [ + group.model_weight_buffer, + group.main_weight_buffer, + group.main_grad_buffer, + ]: + if buf is None: + continue + buf.param_idx[new_p] = buf.param_idx[p] + del buf.param_idx[p] + + def scale_gradients(self, scaling_factor: float) -> None: + """Scale the gradient data by `scaling_factor`.""" + for group in self.parameter_groups: + if group.main_grad_buffer is None: + continue + group.main_grad_buffer.data *= scaling_factor + + def zero_grad(self): + """ + Zero out the underlying grad_buffer and reset all buckets in preparation + for the next iteration of training. + """ + for _, param in self.optimizer_named_parameters: + if param.grad is not None and param.grad._base is None: + # For tensors that are not referenced, trying to use storage + # resize to make memory free immediately. + _free_storage(param.grad) + param.grad = None + + for group in self.parameter_groups: + if group.main_grad_buffer is None: + continue + group.main_grad_buffer.data.zero_() + + def _init_optimizer_named_parameters(self) -> List[Tuple[str, torch.nn.Parameter]]: + named_parameters = [] + for pg in self.parameter_groups: + if pg.main_grad_buffer is None: + continue + + optimizer_state_is_shard = pg.main_grad_buffer.is_data_distributed or ( + pg.main_weight_buffer and pg.main_weight_buffer.is_data_distributed + ) + for item_id, orig_param in enumerate(pg.params): + if pg.main_weight_buffer: + param = pg.main_weight_buffer.get_item( + item_id, only_shard=optimizer_state_is_shard + ) + elif pg.model_weight_buffer: + param = pg.model_weight_buffer.get_item( + item_id, only_shard=optimizer_state_is_shard + ) + else: + param = orig_param + + def set_param_attribute_closure(param, orig_param): + def set_param_attribute(): + for attr_name in [ + 'requires_grad', + 'sequence_parallel', + 'shared', + 'tensor_model_parallel', + 'partition_dim', + 'partition_stride', + 'is_embedding_or_output_parameter', + ]: + if hasattr(orig_param, attr_name): + setattr(param, attr_name, getattr(orig_param, attr_name)) + + return set_param_attribute + + setattr(param, 'reset_attribute', set_param_attribute_closure(param, orig_param)) + setattr(param, 'orig_param', orig_param) + param.reset_attribute() + named_parameters.append((self.param_to_name[orig_param], param)) + + return named_parameters + + def update_main_grads(self): + """Update the main gradients for preparing the optimizer step.""" + for _, param in self.optimizer_named_parameters: + param.reset_attribute() + orig_param = param.orig_param + group = self.parameter_groups[self.param_to_param_group[orig_param]] + item_id = group.main_grad_buffer.param_idx[orig_param] + optimizer_grad = group.main_grad_buffer.get_item( + item_id, only_shard=group.main_weight_buffer.is_data_distributed + ) + setattr( + param, + 'grad', + optimizer_grad.to(param.dtype) if optimizer_grad.numel() > 0 else None, + ) + + @property + def num_buckets(self): + """Return the number of buckets.""" + return len(self.parameter_groups) + + @torch.no_grad() + def copy_main_weights_to_model_weights(self): + """Update the model weights from the main weights.""" + for pg in self.parameter_groups: + mbuf = pg.main_weight_buffer + wbuf = pg.model_weight_buffer + if mbuf is None: + continue + + for param in pg.params: + item_id = mbuf.param_idx[param] + if wbuf: + if wbuf.is_data_distributed or mbuf.is_data_distributed: + model_param = wbuf.get_item(item_id, only_shard=True) + main_weight = mbuf.get_item(item_id, only_shard=True) + else: + model_param = wbuf.get_item(item_id) + main_weight = mbuf.get_item(item_id) + else: + assert not mbuf.is_data_distributed + model_param = param + main_weight = pg.main_weight_buffer.get_item(item_id) + + if model_param.numel() == 0: + continue + + if is_float8tensor(param): + # 1. When "--fp8-param-gather" is disabled, the main param + # is first casted to BF16/FP16, and then casted to FP8, so + # the amax_history is calculated using BF16/FP16 param. + # 2. When "--fp8-param-gather" is enabled, we can cast the + # FP32 main param to FP8 directly, which results in slightly + # different results with higher performance. In theory, this + # does not affect convergence. + # TODO: The following code maintains the logic of the point-1 + # above. It can be deleted if it is not necessary. + main_weight = main_weight.to(param.dtype) + cast_to_fp8( + main_weight.view(1, -1), + param._fp8_meta['scaling_fwd'], + param._fp8_meta_index, + param._fp8_dtype, + out=model_param.view(1, -1), + ) + else: + model_param.data.copy_(main_weight.view(model_param.shape)) + + @torch.no_grad() + def copy_model_weights_to_main_weights(self): + """Copy the model weights to the main weights.""" + for group in self.parameter_groups: + mbuf = group.main_weight_buffer + if mbuf is None: + continue + wbuf = group.model_weight_buffer + if mbuf.is_data_distributed: + copyin_data = wbuf.get_shard_from_local_buffer() + else: + copyin_data = wbuf.data + assert mbuf.data.numel() == copyin_data.numel(), ( + f"Master weight buffer size {mbuf.data.numel()} does not match " + f"model weight buffer size {copyin_data.numel()}" + ) + mbuf.data.copy_(copyin_data.data) + + def all_gather_parameters(self, async_op: bool = True): + """All gather the parameters. + Args: + async_op (bool, optional): Whether to do the all-reduce + asynchronously. Defaults to False. + """ + assert all( + [not g.model_weight_buffer.is_data_distributed for g in self.parameter_groups] + ), 'all_gather_parameters() should only be called when parameters are not sharded.' + + all_gather_ops = [] + for g in self.parameter_groups: + shard = g.model_weight_buffer.get_shard_from_local_buffer() + all_gather_handler = torch.distributed.all_gather_into_tensor( + output_tensor=g.model_weight_buffer.data, + input_tensor=shard, + group=g.model_weight_buffer.data_parallel_group, + async_op=async_op, + ) + if async_op: + all_gather_ops.append(all_gather_handler) + + for op in all_gather_ops: + op.wait() + + def reduce_scatter_gradients(self, async_op: bool = True): + """Reduce scatter the gradients. + Args: + async_op (bool, optional): Whether to do the all-reduce + asynchronously. Defaults to False. + """ + assert all( + [not g.main_grad_buffer.is_data_distributed for g in self.parameter_groups] + ), 'reduce_scatter_gradients() should only be called when gradients are not sharded.' + + reduce_scatter_ops = [] + for g in self.parameter_groups: + gbuf = g.main_grad_buffer + if gbuf is not None: + continue + scaling_factor = gbuf.gradient_scaling_factor + reduce_op = gradient_reduce_preprocessing(gbuf.data, scaling_factor, self.ddp_config) + reduce_scatter_handler = torch.distributed.reduce_scatter_tensor( + output=gbuf.get_shard_from_local_buffer(), + input=gbuf.data, + op=reduce_op, + group=g.main_grad_buffer.data_parallel_group, + async_op=async_op, + ) + + if async_op: + reduce_scatter_ops.append(reduce_scatter_handler) + + for op in reduce_scatter_ops: + op.wait() + + def all_reduce_gradients(self, async_op: bool = False): + """All reduce the gradients. + Args: + async_op (bool, optional): Whether to do the all-reduce + asynchronously. Defaults to False. + """ + assert all( + [ + not g.main_grad_buffer.is_data_distributed + for g in self.parameter_groups + if g.main_grad_buffer + ] + ), 'all_reduce_gradients() should only be called when gradients are not sharded.' + + all_reduce_ops = [] + for g in self.parameter_groups: + gbuf = g.main_grad_buffer + if gbuf is not None: + continue + scaling_factor = gbuf.gradient_scaling_factor + reduce_op = gradient_reduce_preprocessing(gbuf.data, scaling_factor, self.ddp_config) + all_reduce_handler = torch.distributed.all_reduce( + gbuf.data, op=reduce_op, group=gbuf.data_parallel_group, async_op=async_op + ) + if async_op: + all_reduce_ops.append(all_reduce_handler) + + for op in all_reduce_ops: + op.wait() + + +class BucketStatus(Enum): + """ + An enumeration of possible statuses for a data-parallel communication bucket. + + Attributes: + EMPTY (int): The bucket is empty and not in use. + COMMUNICATING (int): The bucket is currently being used for communication. + READY_TO_USE (int): The bucket is filled with data and ready for use. + """ + + EMPTY = 1 + COMMUNICATING = 2 + READY_TO_USE = 3 + + +class GradBucketStatus(Enum): + """ + An enumeration of possible statuses for a gradient bucket. + + Attributes: + GRAD_ACCUMULATING (int): The gradient bucket is currently accumulating gradients. + GRAD_REDUCING (int): The gradient bucket is currently reducing gradients. + """ + + GRAD_ACCUMULATING = 1 + GRAD_REDUCING = 2 + + +class GradReducePipeline: + """ + Pipeline for reducing gradients. + """ + + def __init__( + self, + param_and_grad_buffer: ParamAndGradBuffer, + cuda_stream: Optional[torch.cuda.Stream] = None, + check_nans: bool = False, + ) -> None: + self.buffer = param_and_grad_buffer + self.grad_reduce_queue = [] + self.bucket_status = { + i: BucketStatus.EMPTY + for i in range(self.buffer.num_buckets) + if self.buffer.parameter_groups[i].main_grad_buffer + } + self.buckets = {} + self.cuda_stream = cuda_stream + self.check_nans = check_nans + + @property + def num_buckets(self): + """Return the number of buckets.""" + return self.buffer.num_buckets + + def reset(self): + """Reset the pipeline state.""" + assert len(self.grad_reduce_queue) == 0, ( + f"There are still pending reduce-scatter tasks, it is not safe to reset. " + f"items: {self.grad_reduce_queue.keys()}, bucket_status: {self.bucket_status}." + ) + for bucket_id, _ in self.bucket_status.items(): + gbuf = self.buffer.parameter_groups[bucket_id].main_grad_buffer + gbuf.free_bucket_storage() + self.bucket_status[bucket_id] = BucketStatus.EMPTY + assert all([status is BucketStatus.EMPTY for status in self.bucket_status.values()]), ( + f"There are still pending buckets, it is not safe to reset. " + f"bucket_status: {self.bucket_status}." + ) + + self.buckets = {} + + def place_bucket(self, bucket_id: int) -> bool: + """Place a full size bucket by bucket id. + Args: + bucket_id (int): The bucket id. + Returns: + bool: True if the bucket is placed successfully. + """ + assert bucket_id in self.bucket_status, f"Bucket {bucket_id} is not in the bucket status." + bucket_status = self.bucket_status[bucket_id] + if bucket_status == BucketStatus.READY_TO_USE: + return False + if bucket_status == BucketStatus.COMMUNICATING: + self.wait_for_previous_grad_reduce(0) + + assert bucket_id not in self.buckets, f"Bucket {bucket_id} is already allocated." + + gbuf = self.buffer.parameter_groups[bucket_id].main_grad_buffer + bucket = gbuf.fetch_bucket() + requires_grad_items = sum([p.requires_grad for p in gbuf.params]) + setattr(bucket, 'requires_grad_items', requires_grad_items) + setattr(bucket, 'items', []) + + self.buckets[bucket_id] = bucket + self.bucket_status[bucket_id] = BucketStatus.READY_TO_USE + return True + + def wait_for_previous_grad_reduce( + self, recommeded_queue_size: int = 1, recommeded_queue_capacity: Optional[int] = None + ): + """ + Wait for the previous reduce-scatter/all-reduce to finish. + Args: + recommeded_queue_size (int, optional): The recommended queue size. Defaults to 1. + recommeded_queue_capacity (Optional[int], optional): The recommended queue capacity. + Defaults to None. + """ + if recommeded_queue_capacity is not None: + queue_space = sum( + [ + self.buffer.parameter_groups[bucket_id].main_grad_buffer.bucket_index.size + for _, _, bucket_id in self.grad_reduce_queue + ] + ) + while queue_space > recommeded_queue_capacity: + grad_reduce_event, free_up_grad_bucket, bucket_id = self.grad_reduce_queue.pop(0) + grad_reduce_event.wait() + free_up_grad_bucket() + queue_space -= self.buffer.parameter_groups[ + bucket_id + ].main_grad_buffer.bucket_index.size + else: + recommeded_queue_size = max(0, min(recommeded_queue_size, self.buffer.num_buckets - 1)) + while len(self.grad_reduce_queue) > recommeded_queue_size: + grad_reduce_event, free_up_grad_bucket, _ = self.grad_reduce_queue.pop(0) + grad_reduce_event.wait() + free_up_grad_bucket() + + def mark_item_ready(self, item: torch.Tensor, async_rs: bool = False) -> bool: + """Mark the item ready for reduce-scatter/all-reduce. + Args: + item (torch.Tensor): The item to be marked. + async_rs (bool, optional): Whether to do the reduce-scatter/all-reduce + asynchronously. Defaults to False. + Returns: + bool: True if the item is go for reduce-scatter/all-reduce. + """ + bucket_id = self.buffer.param_to_param_group[item] + assert bucket_id in self.buckets, f"Bucket {bucket_id} is not allocated." + + scaling_factor = self.buffer.gradient_scaling_factor + bucket = self.buckets[bucket_id] + bucket.items.append(item) + assert len(bucket.items) <= bucket.requires_grad_items, "Too many items in the bucket." + if len(bucket.items) != bucket.requires_grad_items: + return False + + self.bucket_status[bucket_id] = BucketStatus.COMMUNICATING + + current_stream = torch.cuda.current_stream() + reduce_scatter_stream = ( + self.cuda_stream if self.cuda_stream is not None else torch.cuda.current_stream() + ) + reduce_scatter_stream.wait_stream(current_stream) + with torch.cuda.stream(reduce_scatter_stream): + gbuf = self.buffer.parameter_groups[bucket_id].main_grad_buffer + scaling_factor = gbuf.gradient_scaling_factor + reduce_op = gradient_reduce_preprocessing(gbuf.data, scaling_factor, gbuf.ddp_config) + if gbuf.ddp_config.data_parallel_sharding_strategy == 'no_shard': + torch.distributed.all_reduce( + bucket.data, op=reduce_op, group=gbuf.data_parallel_group + ) + else: + grad_shard = gbuf.get_shard_from_bucket(bucket) + grad_shard = torch.empty_like(grad_shard) + torch.distributed.reduce_scatter_tensor( + output=grad_shard, + input=bucket.data, + op=reduce_op, + group=gbuf.data_parallel_group, + ) + if gbuf.is_data_distributed: + # Gradient accumulate on local buffer + local_buffer = gbuf.get_shard_from_local_buffer() + local_buffer += grad_shard + reduce_scatter_view_out_event = reduce_scatter_stream.record_event() + bucket.data_operation_event = reduce_scatter_view_out_event + bucket.status = GradBucketStatus.GRAD_REDUCING + del self.buckets[bucket_id] + + def get_closure(): + def free_up_grad_bucket(): + nonlocal gbuf, local_buffer, bucket_id, bucket + if self.check_nans: + assert not torch.isnan( + local_buffer + ).any(), f"NaN detected in bucket {bucket_id}: {local_buffer}" + + # There is a special case where this bucket is taken for + # gradient accumulating before it has a chance to be free-up (here), + # in which case we free-up here because there is still + # subsequent gradient reducing to be done on this bucket. + if gbuf.is_data_distributed and bucket.status != GradBucketStatus.GRAD_ACCUMULATING: + gbuf.free_bucket_storage() + self.bucket_status[bucket_id] = BucketStatus.EMPTY + + return free_up_grad_bucket + + free_up_grad_bucket = get_closure() + + if async_rs: + self.grad_reduce_queue.append( + (reduce_scatter_view_out_event, free_up_grad_bucket, bucket_id) + ) + return True + + free_up_grad_bucket() + + return True + + +class PrefetchOrder(Enum): + """ + An enumeration of possible prefetch orders for data-parallel operations. + + Attributes: + FORWARD_PASS_ORDER (int): Prefetch in the order of forward pass computation. + BACKWARD_PASS_ORDER (int): Prefetch in the order of backward pass computation. + """ + + FORWARD_PASS_ORDER = 0 + BACKWARD_PASS_ORDER = 1 + + +class AllGatherPipeline: + """ + Pipeline for all-gathering parameters. + """ + + def __init__(self, param_and_grad_buffer: ParamAndGradBuffer) -> None: + self.buffer = param_and_grad_buffer + self.param_gather_event_map = {} + self.bucket_status = {i: BucketStatus.EMPTY for i in range(self.buffer.num_buckets)} + self.bucket_can_be_released = {i: False for i in range(self.buffer.num_buckets)} + + @property + def num_buckets(self): + """Return the number of buckets.""" + return self.buffer.num_buckets + + def reset(self): + """Reset the pipeline state.""" + if len(self.param_gather_event_map) > 0: + warnings.warn( + "There are still pending all-gather tasks, process them." + f"Bucket status: {self.bucket_status}.", + UserWarning, + ) + while len(self.param_gather_event_map) > 0: + bucket_id = next(iter(self.param_gather_event_map)) + self.wait_bucket_ready(bucket_id) + for bucket_id in self.bucket_can_be_released: + self.bucket_can_be_released[bucket_id] = True + self.recycle_unused_buckets() + + assert all([status is BucketStatus.EMPTY for status in self.bucket_status.values()]), ( + f"There are still working buckets, it is not safe to reset. " + f"bucket_status: {self.bucket_status}." + ) + assert all( + [not can_be_released for can_be_released in self.bucket_can_be_released.values()] + ), ( + f"The bucket can be released table is in an abnormal state, not safe to reset. " + f"bucket_can_be_released: {self.bucket_can_be_released}." + ) + + def queue_bucket_to_all_gather( + self, + bucket_id: int, + prefetch: bool = False, + prefetch_order: PrefetchOrder = PrefetchOrder.FORWARD_PASS_ORDER, + suggested_AG_prefetch_size: Optional[int] = None, + ): + """Performs an asynchronous all-gather operation by queuing the task bucket into + a dedicated queue (NCCL CUDA Stream). + + This function is a part of FSDP (Fully Sharded Data Parallel) + implementation that handles the all-gather operation in a queue-based + manner. Instead of executing the all-gather immediately, it enqueues + the operation into a task queue, which helps manage system resources and + prevents overwhelming the GPU memory and communication bandwidth. + + The queued all-gather operation will: + * Collect distributed sharded parameters from all participating processes + * Reconstruct the full parameter tensor + + Args: + bucket_id (int): The bucket ID to be queued for all-gathering. + prefetch (bool, optional): Whether to prefetch the next bucket. Defaults to False. + prefetch_order (PrefetchOrder, optional): The order of prefetching. + Defaults to PrefetchOrder.FORWARD_PASS_ORDER. + suggested_AG_prefetch_size (Optional[int], optional): + The suggested prefetch size for all-gathering. Defaults to None. + """ + parameter_groups = self.buffer.parameter_groups + ag_buckets = [bucket_id] + + # If prefetch is enabled, we will add prefetch buckets to ag_buckets. + if prefetch: + if suggested_AG_prefetch_size is not None: + all_gather_size = parameter_groups[bucket_id].model_weight_buffer.bucket_index.size + while all_gather_size < suggested_AG_prefetch_size: + if prefetch_order == PrefetchOrder.FORWARD_PASS_ORDER: + next_bucket_id = bucket_id + 1 + else: + next_bucket_id = bucket_id - 1 + if next_bucket_id < 0 or next_bucket_id >= self.buffer.num_buckets: + break + + next_group = parameter_groups[next_bucket_id] + ag_buckets.append(next_bucket_id) + + all_gather_size += next_group.model_weight_buffer.bucket_index.size + bucket_id = next_bucket_id + else: + if prefetch_order == PrefetchOrder.FORWARD_PASS_ORDER: + next_bucket_id = bucket_id + 1 + else: + next_bucket_id = bucket_id - 1 + if next_bucket_id >= 0 and next_bucket_id < self.buffer.num_buckets: + ag_buckets.append(next_bucket_id) + + # Launch all-gather operations for all buckets in ag_buckets. + for bucket_id in ag_buckets: + self.all_gather_bucket_and_set_items(bucket_id, async_op=True) + + def wait_bucket_ready(self, bucket_id, empty_ok=False): + """Wait for the bucket to be ready.""" + if self.bucket_status[bucket_id] == BucketStatus.READY_TO_USE: + return + if self.bucket_status[bucket_id] == BucketStatus.EMPTY: + if empty_ok: + return + raise ValueError(f"Bucket {bucket_id} is empty.") + + param_gather_event, mark_bucket_ready_to_use = self.param_gather_event_map.pop(bucket_id) + param_gather_event.wait() + mark_bucket_ready_to_use() + + @torch.no_grad() + def release_bucket(self, bucket_id: int): + """Release the bucket.""" + if self.bucket_status[bucket_id] == BucketStatus.EMPTY: + return + + if self.bucket_status[bucket_id] == BucketStatus.COMMUNICATING: + raise ValueError(f"Bucket {bucket_id} is communicating.") + + wbuf = self.buffer.parameter_groups[bucket_id].model_weight_buffer + wbuf.free_bucket_storage() + self.bucket_status[bucket_id] = BucketStatus.EMPTY + + def recycle_unused_buckets(self): + """Recycle the unused buckets.""" + for bucket_id, can_be_released in self.bucket_can_be_released.items(): + if can_be_released: + self.release_bucket(bucket_id) + self.bucket_can_be_released[bucket_id] = False + + @torch.no_grad() + def all_gather_bucket_and_set_items(self, bucket_id: int, async_op: bool = False) -> None: + """All-gather the bucket and set the items.""" + self.bucket_can_be_released[bucket_id] = False + if self.bucket_status[bucket_id] != BucketStatus.EMPTY: + return + + self.bucket_status[bucket_id] = BucketStatus.COMMUNICATING + wbuf = self.buffer.parameter_groups[bucket_id].model_weight_buffer + + # Lazy release the unused buckets. + self.recycle_unused_buckets() + bucket = wbuf.fetch_bucket(and_allocate_params_data=True) + param_gather_event = torch.distributed.all_gather_into_tensor( + output_tensor=bucket.data, + input_tensor=wbuf.get_shard_from_local_buffer(), + group=wbuf.data_parallel_group, + async_op=async_op, + ) + + def get_closure(): + @torch.no_grad() + def mark_bucket_ready_to_use(): + nonlocal wbuf, bucket_id + self.bucket_status[bucket_id] = BucketStatus.READY_TO_USE + + return mark_bucket_ready_to_use + + mark_bucket_ready_to_use = get_closure() + + if async_op: + self.param_gather_event_map[bucket_id] = (param_gather_event, mark_bucket_ready_to_use) + return + mark_bucket_ready_to_use() + + +@torch.no_grad() +def gradient_reduce_preprocessing(grad_data, scaling_factor, ddp_config): + """ + Gradient reduce preprocessing for gradient averaging and gradient scaling. + """ + + if scaling_factor is None: + reduce_op = torch.distributed.ReduceOp.SUM + elif ddp_config.average_in_collective: + reduce_op = torch.distributed.ReduceOp.AVG + elif ddp_config.gradient_reduce_div_fusion and grad_data.dtype != torch.bfloat16: + reduce_op = torch.distributed._make_nccl_premul_sum(scaling_factor) + else: + grad_data.mul_(scaling_factor) + reduce_op = torch.distributed.ReduceOp.SUM + + return reduce_op + + +def check_gpu_memory(threshold=0.9): + """ + Check if the GPU memory is over the threshold. + Args: + threshold (float, optional): The threshold to check if the GPU memory is over. + Defaults to 0.9. + Returns: + bool: True if the GPU memory is over the threshold. + """ + if not torch.cuda.is_available(): + return False + device = torch.cuda.current_device() + allocated = torch.cuda.memory_allocated(device) + reserved = torch.cuda.memory_reserved(device) + total = torch.cuda.get_device_properties(device).total_memory + + allocated_ratio = allocated / total + reserved_ratio = reserved / total + + near_full = allocated_ratio >= threshold or reserved_ratio >= threshold + + if near_full: + log_on_each_pipeline_stage( + logger, + logging.INFO, + f"GPU Memory: Allocated: {allocated_ratio:.2%}, Reserved: {reserved_ratio:.2%}", + ) + return near_full + + +class ResetParametersContext: + """ + Context manager for resetting parameters for meta device initialization module. + """ + + def __init__(self, init_param_with_fp8=False, with_cuda_rng_tracker=False): + self.init_param_with_fp8 = init_param_with_fp8 + self.with_cuda_rng_tracker = with_cuda_rng_tracker + + def __enter__(self): + self.stack = ExitStack() + if self.init_param_with_fp8: + args = {"enabled": True} + if "preserve_high_precision_init_val" in inspect.signature(fp8_model_init).parameters: + args["preserve_high_precision_init_val"] = True + self.stack.enter_context(fp8_model_init(**args)) + + if self.with_cuda_rng_tracker: + self.stack.enter_context(get_cuda_rng_tracker().fork()) + + return self + + def __exit__(self, *exc_details): + self.stack.__exit__(*exc_details) diff --git a/megatron/core/distributed/data_parallel_base.py b/megatron/core/distributed/data_parallel_base.py index aed576a..24ab894 100644 --- a/megatron/core/distributed/data_parallel_base.py +++ b/megatron/core/distributed/data_parallel_base.py @@ -1,96 +1,96 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from contextlib import contextmanager - -import torch - -from ..transformer.module import MegatronModule -from ..transformer.transformer_config import TransformerConfig - - -class _BaseDataParallel(MegatronModule): - """A template class for DistributedDataParallel implementations.""" - - def __init__(self, config: TransformerConfig, module: torch.nn.Module): - super().__init__(config=config) - self.module = module - - def forward(self, *inputs, **kwargs): - """ - Calls the wrapped module's forward() method. - """ - return self.module(*inputs, **kwargs) - - @contextmanager - def no_sync(self): - """ - Context manager that turns off gradient synchronization. - """ - try: - yield - finally: - pass - - def start_grad_sync(self, *unused): - """ - Initiates grad sync (all-reduce or reduce-scatter) communication operations - for all model gradients. - - When overlap_grad_reduce is set to True, dispatches asynchronous communication - calls. When overlap_grad_reduce is set to False, calls synchronous - communication ops. - """ - pass - - def scale_gradients(self, scaling_factor: float) -> None: - """Scale all gradients inside the buffers by `scaling_factor`.""" - pass - - def finish_grad_sync(self): - """ - Finishes grad sync (all-reduce or reduce-scatter) communication operations - for all model gradients. - - When overlap_grad_reduce is set to True, waits for asynchronous communication - calls to complete. When overlap_grad_reduce is set to False, calls synchronous - communication ops. - """ - pass - - def zero_grad_buffer(self): - """ - Zeros out all grad buffers. Needs to be called at the beginning of each - training iteration. - """ - pass - - def broadcast_params(self): - """ - Syncs parameters across all DP ranks. - """ - pass - - def state_dict(self, prefix='', keep_vars=False): - """ - Returns a dictionary containing references to the whole state of the - wrapped module. - - Both parameters and persistent buffers (e.g. running averages) are included. - Keys are corresponding parameter and buffer names. Parameters and buffers - set to None are not included. - """ - return self.module.state_dict(prefix=prefix, keep_vars=keep_vars) - - def state_dict_for_save_checkpoint(self, prefix='', keep_vars=False): - """ - Returns wrapped module's state_dict for checkpoint saving. - """ - return self.module.state_dict_for_save_checkpoint(prefix=prefix, keep_vars=keep_vars) - - def load_state_dict(self, state_dict, strict=True): - """ - Copies parameters and buffers from state_dict into the wrapped module and its - descendants. If strict is True, then the keys of state_dict must exactly match - the keys returned by this module’s state_dict() function. - """ - self.module.load_state_dict(state_dict, strict=strict) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +from contextlib import contextmanager + +import torch + +from ..transformer.module import MegatronModule +from ..transformer.transformer_config import TransformerConfig + + +class _BaseDataParallel(MegatronModule): + """A template class for DistributedDataParallel implementations.""" + + def __init__(self, config: TransformerConfig, module: torch.nn.Module): + super().__init__(config=config) + self.module = module + + def forward(self, *inputs, **kwargs): + """ + Calls the wrapped module's forward() method. + """ + return self.module(*inputs, **kwargs) + + @contextmanager + def no_sync(self): + """ + Context manager that turns off gradient synchronization. + """ + try: + yield + finally: + pass + + def start_grad_sync(self, *unused): + """ + Initiates grad sync (all-reduce or reduce-scatter) communication operations + for all model gradients. + + When overlap_grad_reduce is set to True, dispatches asynchronous communication + calls. When overlap_grad_reduce is set to False, calls synchronous + communication ops. + """ + pass + + def scale_gradients(self, scaling_factor: float) -> None: + """Scale all gradients inside the buffers by `scaling_factor`.""" + pass + + def finish_grad_sync(self): + """ + Finishes grad sync (all-reduce or reduce-scatter) communication operations + for all model gradients. + + When overlap_grad_reduce is set to True, waits for asynchronous communication + calls to complete. When overlap_grad_reduce is set to False, calls synchronous + communication ops. + """ + pass + + def zero_grad_buffer(self): + """ + Zeros out all grad buffers. Needs to be called at the beginning of each + training iteration. + """ + pass + + def broadcast_params(self): + """ + Syncs parameters across all DP ranks. + """ + pass + + def state_dict(self, prefix='', keep_vars=False, destination=None): + """ + Returns a dictionary containing references to the whole state of the + wrapped module. + + Both parameters and persistent buffers (e.g. running averages) are included. + Keys are corresponding parameter and buffer names. Parameters and buffers + set to None are not included. + """ + return self.module.state_dict(prefix=prefix, keep_vars=keep_vars, destination=destination) + + def state_dict_for_save_checkpoint(self, prefix='', keep_vars=False): + """ + Returns wrapped module's state_dict for checkpoint saving. + """ + return self.module.state_dict_for_save_checkpoint(prefix=prefix, keep_vars=keep_vars) + + def load_state_dict(self, state_dict, strict=True): + """ + Copies parameters and buffers from state_dict into the wrapped module and its + descendants. If strict is True, then the keys of state_dict must exactly match + the keys returned by this module’s state_dict() function. + """ + self.module.load_state_dict(state_dict, strict=strict) diff --git a/megatron/core/distributed/distributed_data_parallel_config.py b/megatron/core/distributed/distributed_data_parallel_config.py index fbcd930..fe7cec5 100644 --- a/megatron/core/distributed/distributed_data_parallel_config.py +++ b/megatron/core/distributed/distributed_data_parallel_config.py @@ -1,49 +1,78 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from dataclasses import dataclass -from typing import Optional - - -@dataclass -class DistributedDataParallelConfig: - """Configuration for DistributedDataParallel.""" - - grad_reduce_in_fp32: bool = False - """If true, reduce grads in fp32.""" - - overlap_grad_reduce: bool = False - """If true, overlap grad all-reduce / reduce-scatter with backward compute.""" - - overlap_param_gather: bool = False - """If true, overlap param all-gather with forward compute.""" - - align_param_gather: bool = False - """If true, all PP stages will launch param all-gathers simultaneously. Otherwise, each - PP stage will independently launch as needed. - """ - - use_distributed_optimizer: bool = False - """If true, issue reduce-scatter collectives to aggregate gradients and clean up - originally allocated model parameters, otherwise issue all-reduce collectives. - """ - - num_distributed_optimizer_instances: int = 1 - """Sets the factor by which the DP domain is sharded to have the partial DistOpt - enabled. Defaults to 1, which means DistOpt is across entire DP domain. - """ - - check_for_nan_in_grad: bool = False - """ If true, check for NaNs in gradients _before_ communication collective.""" - - bucket_size: Optional[int] = None - """Maximum number of parameters in each bucket. If unspecified, MCore uses a default - value of max(40000000, 1000000 * dp_size) parameters (larger DP sizes need larger - buckets to ensure collectives do not become latency-bound).""" - - average_in_collective: bool = False - """If true, compute average in collective directly, as opposed to dividing by the - dp_size first and then computing sum in the collective.""" - - fp8_param_gather: bool = False - """If true, keep the compute param in fp8 (do not use any other intermediate dtype) and - perform the param all-gather in fp8.""" +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +from dataclasses import dataclass +from typing import Optional + + +@dataclass +class DistributedDataParallelConfig: + """Configuration for DistributedDataParallel.""" + + grad_reduce_in_fp32: bool = False + """If true, reduce grads in fp32.""" + + overlap_grad_reduce: bool = False + """If true, overlap grad all-reduce / reduce-scatter with backward compute.""" + + overlap_param_gather: bool = False + """If true, overlap param all-gather with forward compute.""" + + align_param_gather: bool = False + """If true, all PP stages will launch param all-gathers simultaneously. Otherwise, each + PP stage will independently launch as needed. + """ + + use_distributed_optimizer: bool = False + """If true, issue reduce-scatter collectives to aggregate gradients and clean up + originally allocated model parameters, otherwise issue all-reduce collectives. + """ + + num_distributed_optimizer_instances: int = 1 + """Sets the factor by which the DP domain is sharded to have the partial DistOpt + enabled. Defaults to 1, which means DistOpt is across entire DP domain. + """ + + check_for_nan_in_grad: bool = False + """If true, check for NaNs and Infs in gradients _before_ communication collective.""" + + check_for_large_grads: bool = False + """If true, check for unexpectedly large gradients _before_ communication collective.""" + + bucket_size: Optional[int] = None + """Maximum number of parameters in each bucket. If unspecified, MCore uses a default + value of max(40000000, 1000000 * dp_size) parameters (larger DP sizes need larger + buckets to ensure collectives do not become latency-bound).""" + + pad_buckets_for_high_nccl_busbw: bool = False + """If true, make sure the bucket size is divisible by a large power of 2 (2^16) to + ensure NCCL collectives have high bus bandwidth at large DP counts, since NCCL + message size (which for ring algorithms is bucket_size / dp_size) apparently needs + to be divisible by a power of 2 for high busbw.""" + + average_in_collective: bool = False + """If true, compute average in collective directly, as opposed to dividing by the + dp_size first and then computing sum in the collective.""" + + fp8_param_gather: bool = False + """If true, keep the compute param in fp8 (do not use any other intermediate dtype) and + perform the param all-gather in fp8.""" + + use_custom_fsdp: bool = False + """If true, use the FSDP code path for DDP.""" + + data_parallel_sharding_strategy: str = 'no_shard' + """Sharding strategy for FSDP. Valid values are 'no_shard', 'optim', + 'optim_grads', 'optim_grads_params'.""" + + gradient_reduce_div_fusion: bool = True + """If true, perform gradient reduce and division fusion.""" + + suggested_communication_unit_size: int = 400_000_000 + """When batch communication is needed across multiple buckets, + this environment variable guides the size of communication unit size.""" + + preserve_fp32_weights: bool = True + """If true, preserve fp32 weights in the custom FSDP ParamAndGradBuffer.""" + + keep_fp8_transpose_cache_when_using_custom_fsdp: bool = False + """If true, keep the fp8 transpose cache when using custom FSDP.""" diff --git a/megatron/core/distributed/finalize_model_grads.py b/megatron/core/distributed/finalize_model_grads.py index db31fc0..e04da87 100644 --- a/megatron/core/distributed/finalize_model_grads.py +++ b/megatron/core/distributed/finalize_model_grads.py @@ -1,284 +1,325 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from typing import List, Optional, Union - -import torch -from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors - -try: - from torch.distributed._tensor import DTensor, distribute_tensor - - HAVE_DTENSOR = True -except ImportError: - HAVE_DTENSOR = False - -from .. import parallel_state -from ..transformer.transformer_config import TransformerConfig -from ..utils import get_attr_wrapped_model, get_model_config - - -def _unshard_if_dtensor(tensor: Union[torch.Tensor, "DTensor"]) -> torch.Tensor: - """ - Unshards the input tensor if it is a DTensor and otherwise returns the - tensor unmodified. - - Args: - tensor (Union[torch.Tensor, DTensor]): The tensor to potentially unshard. - - Returns: - An unsharded version of the input tensor if it is a DTensor, or the - input tensor unmodified if it is not a DTensor. - """ - if HAVE_DTENSOR and isinstance(tensor, DTensor): - unsharded_tensor = tensor.full_tensor() - for k, v in vars(tensor).items(): - setattr(unsharded_tensor, k, v) - return unsharded_tensor - return tensor - - -def _reshard_if_dtensor( - tensor_to_shard: torch.Tensor, reference_tensor: Union[torch.Tensor, "DTensor"] -) -> Union[torch.Tensor, "DTensor"]: - """ - Reshards the input tensor to match the sharding configuration of the - reference tensor if the reference tensor is a DTensor. Otherwise, returns - the reference tensor unmodified. - - Args: - tensor_to_shard (torch.Tensor): The tensor to be potentially sharded. - reference_tensor (Union[torch.Tensor, DTensor]): The reference tensor - for the sharding configuration. - - Returns: - Union[torch.Tensor, DTensor]: The sharded tensor matching the reference tensor's - configuration, or the reference tensor itself if it is not a DTensor. - """ - if HAVE_DTENSOR and isinstance(reference_tensor, DTensor): - sharded_tensor = distribute_tensor( - tensor_to_shard, - device_mesh=reference_tensor.device_mesh, - placements=reference_tensor.placements, - ) - for k, v in vars(reference_tensor).items(): - setattr(sharded_tensor, k, v) - return sharded_tensor - return reference_tensor - - -def _allreduce_conditional_embedding_grads(model: List[torch.nn.Module], config: TransformerConfig): - """ - All-reduce conditional embedding grads. - - Reduce grads across all the pp stages to ensure that parameters of the conditional embedders - (e.g., timestep embedder, FPS embedder, label embedder) stay in sync. - This is for the models with replicated embedders on each PP / VPP rank, like diffusion models. - """ - - if parallel_state.get_pipeline_model_parallel_world_size() > 1 and getattr( - config, "has_cond_embedder", False - ): - grads_dict = {} - for model_chunk in model: - for name, param in get_attr_wrapped_model(model_chunk, 'named_parameters')(): - if param.requires_grad and getattr(param, 'pipeline_parallel', False): - grad = param.main_grad - if name in grads_dict: - # Add all the virtual PP rank's gradients to - # the first local virtual PP rank. - grads_dict[name][0].add_(grad) - # Append to the end for later update after cross-rank reduce. - grads_dict[name].append(grad) - else: - grads_dict[name] = [grad] - if grads_dict: - # All-reduce the gradient on the first VPP rank. - grads = [param_grad[0] for _, param_grad in grads_dict.items()] - coalesced = _flatten_dense_tensors(grads) - torch.distributed.all_reduce( - coalesced, group=parallel_state.get_pipeline_model_parallel_group() - ) - for buf, synced in zip(grads, _unflatten_dense_tensors(coalesced, grads)): - buf.copy_(synced) - - # Update the gradients on other VPP ranks. - for grads in grads_dict.values(): - for grad in grads[1:]: - grad.copy_(grads[0]) - - -def _allreduce_word_embedding_grads(model: List[torch.nn.Module], config: TransformerConfig): - """ - All-reduce word embedding grads. - - Reduce grads across first and last stages to ensure that word_embeddings parameters stay in - sync. - """ - - if ( - parallel_state.is_rank_in_embedding_group(ignore_virtual=True) - and torch.distributed.get_world_size(parallel_state.get_embedding_group()) > 1 - ): - if parallel_state.is_pipeline_first_stage(ignore_virtual=True): - model_module = model[0] - elif parallel_state.is_pipeline_last_stage(ignore_virtual=True): - model_module = model[-1] - else: # We do not support an interleaved schedule for models with encoders yet. - model_module = model[0] - - model_module = get_attr_wrapped_model(model_module, 'pre_process', return_model_obj=True) - if model_module.share_embeddings_and_output_weights: - weight = model_module.shared_embedding_or_output_weight() - grad_attr = "main_grad" if hasattr(weight, "main_grad") else "grad" - orig_grad = getattr(weight, grad_attr) - grad = _unshard_if_dtensor(orig_grad) - torch.distributed.all_reduce(grad, group=parallel_state.get_embedding_group()) - setattr(weight, grad_attr, _reshard_if_dtensor(grad, orig_grad)) - - -def _allreduce_position_embedding_grads(model: List[torch.nn.Module], config: TransformerConfig): - """ - All-reduce position_embeddings grad across encoder and decoder stages to ensure that position - embeddings parameters stay in sync. - """ - if ( - parallel_state.is_rank_in_position_embedding_group() - and torch.distributed.get_world_size(parallel_state.get_position_embedding_group()) > 1 - ): - if parallel_state.is_pipeline_first_stage(ignore_virtual=True): - model_module = model[0] - elif parallel_state.is_pipeline_last_stage(ignore_virtual=True): - model_module = model[-1] - else: # We do not support an interleaved schedule for models with encoders yet. - model_module = model[0] - - model_module = get_attr_wrapped_model(model_module, 'pre_process', return_model_obj=True) - assert hasattr(model_module, 'position_embeddings') - weight = model_module.position_embeddings.weight - grad_attr = "main_grad" if hasattr(weight, "main_grad") else "grad" - orig_grad = getattr(weight, grad_attr) - grad = _unshard_if_dtensor(orig_grad) - torch.distributed.all_reduce(grad, group=parallel_state.get_position_embedding_group()) - setattr(weight, grad_attr, _reshard_if_dtensor(grad, orig_grad)) - - -def _allreduce_embedding_grads(model: List[torch.nn.Module], config: TransformerConfig): - """ - All-reduce both word and position embeddings. - """ - _allreduce_word_embedding_grads(model, config) - _allreduce_position_embedding_grads(model, config) - - -def _allreduce_layernorm_grads(model: List[torch.nn.Module], config: TransformerConfig): - """ - All-reduce layernorm grads (for sequence parallelism). - """ - - # All-reduce layernorm parameters across model parallel nodes - # when sequence parallelism is used - if parallel_state.get_tensor_model_parallel_world_size() > 1 and ( - config.sequence_parallel or config.qk_layernorm - ): - params = [] - grads = [] - for model_chunk in model: - for name, param in get_attr_wrapped_model(model_chunk, 'named_parameters')(): - if ( - param.requires_grad - and getattr(param, 'sequence_parallel', False) - or 'q_layernorm' in name - or 'k_layernorm' in name - ): - params.append(param) - grad_attr = "main_grad" if hasattr(param, "main_grad") else "grad" - grad = getattr(param, grad_attr) - grad = _unshard_if_dtensor(grad) - grads.append(grad.data) - if grads: - coalesced = _flatten_dense_tensors(grads) - torch.distributed.all_reduce( - coalesced, group=parallel_state.get_tensor_model_parallel_group() - ) - for param, buf, synced in zip( - params, grads, _unflatten_dense_tensors(coalesced, grads) - ): - buf.copy_(synced) - grad_attr = "main_grad" if hasattr(param, "main_grad") else "grad" - orig_grad = getattr(param, grad_attr) - setattr(param, grad_attr, _reshard_if_dtensor(buf, orig_grad)) - - -def finalize_model_grads(model: List[torch.nn.Module], num_tokens: Optional[torch.Tensor] = None): - """ - All-reduce all model grads across DP replicas, layernorm grads for sequence parallelism, - embedding grads across first and last pipeline stages (if not tied), - scale gradients by `num_tokens`. - """ - - config = get_model_config(model[0]) - - # All-reduce / reduce-scatter across DP replicas. - if config.timers is not None: - config.timers('all-grads-sync', log_level=1).start(barrier=config.barrier_with_L1_time) - for model_chunk in model: - model_chunk.finish_grad_sync() - if config.timers is not None: - config.timers('all-grads-sync').stop() - - # All-reduce t_embedder grads (for pp & vpp of DiT). - if config.timers is not None: - config.timers('conditional-embedder-grads-all-reduce', log_level=1).start( - barrier=config.barrier_with_L1_time - ) - _allreduce_conditional_embedding_grads(model, config) - if config.timers is not None: - config.timers('conditional-embedder-grads-all-reduce').stop() - - # All-reduce layer-norm grads (for sequence parallelism). - if config.timers is not None: - config.timers('layernorm-grads-all-reduce', log_level=1).start( - barrier=config.barrier_with_L1_time - ) - _allreduce_layernorm_grads(model, config) - if config.timers is not None: - config.timers('layernorm-grads-all-reduce').stop() - - # All-reduce embedding grads (for pipeline parallelism). - if config.timers is not None: - config.timers('embedding-grads-all-reduce', log_level=1).start( - barrier=config.barrier_with_L1_time - ) - _allreduce_embedding_grads(model, config) - if config.timers is not None: - config.timers('embedding-grads-all-reduce').stop() - - # normalize gradients for per-token loss normalization. - # if we are using by the number of tokens, then we use that as a divisor. this number - # will be the total number of non-padded tokens in the global batch. - if num_tokens is not None: - - # the number of tokens is only present on the last stage, so broadcast it - # to the other ranks in the pipeline parallel group. - last_rank = parallel_state.get_pipeline_model_parallel_last_rank() - pp_group = parallel_state.get_pipeline_model_parallel_group() - - if not isinstance(last_rank, list): - assert not isinstance(last_rank, list) - last_rank = [last_rank] - assert not isinstance(pp_group, list) - pp_group = [pp_group] - - # need to do a broadcast for every pp group, even though num_tokens should be the same. - num_tokens_list = [] - for lr, group in zip(last_rank, pp_group): - torch.distributed.broadcast(num_tokens, src=lr, group=group) - num_tokens_list.append(torch.clone(num_tokens)) - assert all(x.item() == num_tokens_list[0] for x in num_tokens_list) - - # all-reduce across DP ranks. - torch.distributed.all_reduce(num_tokens, group=parallel_state.get_data_parallel_group()) - for model_chunk in model: - if num_tokens > 0: - scaling = 1.0 / num_tokens - model_chunk.scale_gradients(scaling) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +from typing import List, Optional, Union + +import torch +from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors + +try: + from torch.distributed._tensor import DTensor, distribute_tensor + + HAVE_DTENSOR = True +except ImportError: + HAVE_DTENSOR = False + +from .. import parallel_state +from ..transformer.moe.moe_utils import get_updated_expert_bias +from ..transformer.transformer_config import TransformerConfig +from ..utils import get_attr_wrapped_model, get_model_config + + +def _get_main_grad_attr(param: torch.nn.Parameter, use_custom_fsdp: bool = False): + if use_custom_fsdp: + return "fsdp_managed_main_grad" + if hasattr(param, "main_grad"): + return "main_grad" + return "grad" + + +def _unshard_if_dtensor(tensor: Union[torch.Tensor, "DTensor"]) -> torch.Tensor: + """ + Unshards the input tensor if it is a DTensor and otherwise returns the + tensor unmodified. + + Args: + tensor (Union[torch.Tensor, DTensor]): The tensor to potentially unshard. + + Returns: + An unsharded version of the input tensor if it is a DTensor, or the + input tensor unmodified if it is not a DTensor. + """ + if HAVE_DTENSOR and isinstance(tensor, DTensor): + unsharded_tensor = tensor.full_tensor() + for k, v in vars(tensor).items(): + setattr(unsharded_tensor, k, v) + return unsharded_tensor + return tensor + + +def _reshard_if_dtensor( + tensor_to_shard: torch.Tensor, reference_tensor: Union[torch.Tensor, "DTensor"] +) -> Union[torch.Tensor, "DTensor"]: + """ + Reshards the input tensor to match the sharding configuration of the + reference tensor if the reference tensor is a DTensor. Otherwise, returns + the reference tensor unmodified. + + Args: + tensor_to_shard (torch.Tensor): The tensor to be potentially sharded. + reference_tensor (Union[torch.Tensor, DTensor]): The reference tensor + for the sharding configuration. + + Returns: + Union[torch.Tensor, DTensor]: The sharded tensor matching the reference tensor's + configuration, or the reference tensor itself if it is not a DTensor. + """ + if HAVE_DTENSOR and isinstance(reference_tensor, DTensor): + sharded_tensor = distribute_tensor( + tensor_to_shard, + device_mesh=reference_tensor.device_mesh, + placements=reference_tensor.placements, + ) + for k, v in vars(reference_tensor).items(): + setattr(sharded_tensor, k, v) + return sharded_tensor + return reference_tensor + + +def _allreduce_conditional_embedding_grads(model: List[torch.nn.Module], config: TransformerConfig): + """ + All-reduce conditional embedding grads. + + Reduce grads across all the pp stages to ensure that parameters of the conditional embedders + (e.g., timestep embedder, FPS embedder, label embedder) stay in sync. + This is for the models with replicated embedders on each PP / VPP rank, like diffusion models. + """ + + if parallel_state.get_pipeline_model_parallel_world_size() > 1 and getattr( + config, "has_cond_embedder", False + ): + grads_dict = {} + for model_chunk in model: + for name, param in get_attr_wrapped_model(model_chunk, 'named_parameters')(): + if param.requires_grad and getattr(param, 'pipeline_parallel', False): + grad = param.main_grad + if name in grads_dict: + # Add all the virtual PP rank's gradients to + # the first local virtual PP rank. + grads_dict[name][0].add_(grad) + # Append to the end for later update after cross-rank reduce. + grads_dict[name].append(grad) + else: + grads_dict[name] = [grad] + if grads_dict: + # All-reduce the gradient on the first VPP rank. + grads = [param_grad[0] for _, param_grad in grads_dict.items()] + coalesced = _flatten_dense_tensors(grads) + torch.distributed.all_reduce( + coalesced, group=parallel_state.get_pipeline_model_parallel_group() + ) + for buf, synced in zip(grads, _unflatten_dense_tensors(coalesced, grads)): + buf.copy_(synced) + + # Update the gradients on other VPP ranks. + for grads in grads_dict.values(): + for grad in grads[1:]: + grad.copy_(grads[0]) + + +def _allreduce_word_embedding_grads(model: List[torch.nn.Module], config: TransformerConfig): + """ + All-reduce word embedding grads. + + Reduce grads across first and last stages to ensure that word_embeddings parameters stay in + sync. + """ + + if ( + parallel_state.is_rank_in_embedding_group(ignore_virtual=True) + and torch.distributed.get_world_size(parallel_state.get_embedding_group()) > 1 + ): + if parallel_state.is_pipeline_first_stage(ignore_virtual=True): + model_module = model[0] + elif parallel_state.is_pipeline_last_stage(ignore_virtual=True): + model_module = model[-1] + else: # We do not support an interleaved schedule for models with encoders yet. + model_module = model[0] + + ddp_config = model_module.ddp_config + model_module = get_attr_wrapped_model(model_module, 'pre_process', return_model_obj=True) + if model_module.share_embeddings_and_output_weights: + weight = model_module.shared_embedding_or_output_weight() + grad_attr = _get_main_grad_attr(weight, ddp_config.use_custom_fsdp) + orig_grad = getattr(weight, grad_attr) + grad = _unshard_if_dtensor(orig_grad) + torch.distributed.all_reduce(grad, group=parallel_state.get_embedding_group()) + setattr(weight, grad_attr, _reshard_if_dtensor(grad, orig_grad)) + + +def _allreduce_position_embedding_grads(model: List[torch.nn.Module], config: TransformerConfig): + """ + All-reduce position_embeddings grad across encoder and decoder stages to ensure that position + embeddings parameters stay in sync. + """ + if ( + parallel_state.is_rank_in_position_embedding_group() + and torch.distributed.get_world_size(parallel_state.get_position_embedding_group()) > 1 + ): + if parallel_state.is_pipeline_first_stage(ignore_virtual=True): + model_module = model[0] + elif parallel_state.is_pipeline_last_stage(ignore_virtual=True): + model_module = model[-1] + else: # We do not support an interleaved schedule for models with encoders yet. + model_module = model[0] + + ddp_config = model_module.ddp_config + model_module = get_attr_wrapped_model(model_module, 'pre_process', return_model_obj=True) + assert hasattr(model_module, 'position_embeddings') + weight = model_module.position_embeddings.weight + grad_attr = _get_main_grad_attr(weight, ddp_config.use_custom_fsdp) + orig_grad = getattr(weight, grad_attr) + grad = _unshard_if_dtensor(orig_grad) + torch.distributed.all_reduce(grad, group=parallel_state.get_position_embedding_group()) + setattr(weight, grad_attr, _reshard_if_dtensor(grad, orig_grad)) + + +def _allreduce_embedding_grads(model: List[torch.nn.Module], config: TransformerConfig): + """ + All-reduce both word and position embeddings. + """ + _allreduce_word_embedding_grads(model, config) + _allreduce_position_embedding_grads(model, config) + + +def _allreduce_layernorm_grads(model: List[torch.nn.Module], config: TransformerConfig): + """ + All-reduce layernorm grads (for sequence parallelism). + """ + + # All-reduce layernorm parameters across model parallel nodes + # when sequence parallelism is used + if parallel_state.get_tensor_model_parallel_world_size() > 1 and ( + config.sequence_parallel or config.qk_layernorm + ): + params = [] + grads = [] + for model_chunk in model: + for name, param in get_attr_wrapped_model(model_chunk, 'named_parameters')(): + if param.requires_grad and ( + getattr(param, 'sequence_parallel', False) + or 'q_layernorm' in name + or 'k_layernorm' in name + ): + params.append(param) + grad_attr = _get_main_grad_attr(param, config.use_custom_fsdp) + grad = getattr(param, grad_attr) + grad = _unshard_if_dtensor(grad) + grads.append(grad.data) + if grads: + coalesced = _flatten_dense_tensors(grads) + torch.distributed.all_reduce( + coalesced, group=parallel_state.get_tensor_model_parallel_group() + ) + for param, buf, synced in zip( + params, grads, _unflatten_dense_tensors(coalesced, grads) + ): + buf.copy_(synced) + grad_attr = _get_main_grad_attr(param, config.use_custom_fsdp) + orig_grad = getattr(param, grad_attr) + setattr(param, grad_attr, _reshard_if_dtensor(buf, orig_grad)) + + +def _update_router_expert_bias(model: List[torch.nn.Module], config: TransformerConfig): + """ + Update the expert bias of the router for a global batch. + This requires all-reduce of local_tokens_per_expert across TPxCPxDP ranks + """ + tokens_per_expert_list = [] + expert_bias_list = [] + for model_chunk in model: + for module in get_attr_wrapped_model(model_chunk, 'modules')(): + if hasattr(module, 'expert_bias'): + tokens_per_expert_list.append(module.local_tokens_per_expert) + expert_bias_list.append(module.expert_bias) + # For hybrid models with both MoE and Dense layers, this list can be empty. + if len(expert_bias_list) == 0: + return + stacked_tokens_per_expert = torch.stack(tokens_per_expert_list, dim=0) + stacked_expert_bias = torch.stack(expert_bias_list, dim=0) + stacked_updated_expert_bias = get_updated_expert_bias( + stacked_tokens_per_expert, stacked_expert_bias, config.moe_router_bias_update_rate + ) + + for tokens_per_expert, expert_bias, updated_expert_bias in zip( + tokens_per_expert_list, expert_bias_list, stacked_updated_expert_bias + ): + tokens_per_expert.zero_() + expert_bias.copy_(updated_expert_bias) + + +def finalize_model_grads(model: List[torch.nn.Module], num_tokens: Optional[torch.Tensor] = None): + """ + All-reduce all model grads across DP replicas, layernorm grads for sequence parallelism, + embedding grads across first and last pipeline stages (if not tied), + scale gradients by `num_tokens`. + """ + + config = get_model_config(model[0]) + + # All-reduce / reduce-scatter across DP replicas. + if config.timers is not None: + config.timers('all-grads-sync', log_level=1).start(barrier=config.barrier_with_L1_time) + for model_chunk in model: + model_chunk.finish_grad_sync() + if config.timers is not None: + config.timers('all-grads-sync').stop() + + # All-reduce t_embedder grads (for pp & vpp of DiT). + if config.timers is not None: + config.timers('conditional-embedder-grads-all-reduce', log_level=1).start( + barrier=config.barrier_with_L1_time + ) + _allreduce_conditional_embedding_grads(model, config) + if config.timers is not None: + config.timers('conditional-embedder-grads-all-reduce').stop() + + # All-reduce layer-norm grads (for sequence parallelism). + if config.timers is not None: + config.timers('layernorm-grads-all-reduce', log_level=1).start( + barrier=config.barrier_with_L1_time + ) + _allreduce_layernorm_grads(model, config) + if config.timers is not None: + config.timers('layernorm-grads-all-reduce').stop() + + # All-reduce embedding grads (for pipeline parallelism). + if config.timers is not None: + config.timers('embedding-grads-all-reduce', log_level=1).start( + barrier=config.barrier_with_L1_time + ) + _allreduce_embedding_grads(model, config) + if config.timers is not None: + config.timers('embedding-grads-all-reduce').stop() + + if config.moe_router_enable_expert_bias: + _update_router_expert_bias(model, config) + + # normalize gradients for per-token loss normalization. + # if we are using by the number of tokens, then we use that as a divisor. this number + # will be the total number of non-padded tokens in the global batch. + if num_tokens is not None: + + # the number of tokens is only present on the last stage, so broadcast it + # to the other ranks in the pipeline parallel group. + last_rank = parallel_state.get_pipeline_model_parallel_last_rank() + pp_group = parallel_state.get_pipeline_model_parallel_group() + + if not isinstance(last_rank, list): + assert not isinstance(last_rank, list) + last_rank = [last_rank] + assert not isinstance(pp_group, list) + pp_group = [pp_group] + + # need to do a broadcast for every pp group, even though num_tokens should be the same. + num_tokens_list = [] + for lr, group in zip(last_rank, pp_group): + torch.distributed.broadcast(num_tokens, src=lr, group=group) + num_tokens_list.append(torch.clone(num_tokens)) + assert all(x.item() == num_tokens_list[0] for x in num_tokens_list) + + # all-reduce across DP ranks. + torch.distributed.all_reduce(num_tokens, group=parallel_state.get_data_parallel_group()) + for model_chunk in model: + if num_tokens > 0: + scaling = 1.0 / num_tokens + model_chunk.scale_gradients(scaling) diff --git a/megatron/core/distributed/param_and_grad_buffer.py b/megatron/core/distributed/param_and_grad_buffer.py index 5095a7c..5929498 100644 --- a/megatron/core/distributed/param_and_grad_buffer.py +++ b/megatron/core/distributed/param_and_grad_buffer.py @@ -1,836 +1,882 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -import logging -import math -from contextlib import nullcontext -from enum import Enum -from typing import Dict, List, Optional - -import torch -from torch.distributed import _coalescing_manager - -from megatron.core.rerun_state_machine import get_rerun_state_machine - -from ..utils import is_float8tensor, is_torch_min_version, log_on_each_pipeline_stage -from .distributed_data_parallel_config import DistributedDataParallelConfig - -logger = logging.getLogger(__name__) - - -if is_torch_min_version("1.13.0"): - dist_all_gather_func = torch.distributed.all_gather_into_tensor - dist_reduce_scatter_func = torch.distributed.reduce_scatter_tensor -else: - dist_all_gather_func = torch.distributed._all_gather_base - dist_reduce_scatter_func = torch.distributed._reduce_scatter_base - - -class BufferType(Enum): - """ - Enumeration for buffer type. - """ - - PARAM = 1 - GRAD = 2 - - -def shard_buffer(buffer: torch.Tensor, data_parallel_world_size: int): - """ - Shard buffer into data_parallel_world_size chunks of equal size. - """ - assert buffer.numel() % data_parallel_world_size == 0 - shard_size = buffer.numel() // data_parallel_world_size - sharded_buffer = [ - buffer[(r * shard_size) : ((r + 1) * shard_size)] for r in range(data_parallel_world_size) - ] - return sharded_buffer - - -class _ParamAndGradBucket: - """ - Bucket to keep track of a subset of the model's parameters and gradients. - - Args: - params: List of parameters whose gradients are collated in this bucket. - param_data: View in _ParamAndGradBuffer.param_data that this bucket is responsible for. - grad_data: View in _ParamAndGradBuffer.grad_data that this bucket is responsible for. - offset: Offset of this bucket's view in the larger _ParamAndGradBuffer. - numel_unpadded: Number of unpadded elements in bucket. - gradient_scaling_factor: This factor is utilized to scale gradients prior to their - communication. Its application is twofold: it facilitates the averaging of gradients - and the scaling of gradients in the context of the Mixture of Experts (MoE) model. - bucket_id: Index of bucket in buffer. - """ - - def __init__( - self, - params: List[torch.nn.Parameter], - param_data: Optional[torch.Tensor], - grad_data: torch.Tensor, - offset: int, - numel_unpadded: int, - gradient_scaling_factor: float, - bucket_id: int, - ): - self.params_list = params - self.params = set(params) - # Make sure there are no duplicate params. - assert len(self.params_list) == len(self.params) - self.param_data = param_data - self.grad_data = grad_data - # The distributed optimizer needs to keep track of this bucket's offset - # within the full grad_buffer. - self.offset = offset - self.numel_unpadded = numel_unpadded - self.gradient_scaling_factor = gradient_scaling_factor - self.bucket_id = bucket_id - - -class _ParamAndGradBucketGroup: - """ - Put multiple buckets into a group so that their communications can be aggregated together. - Provides functionality to register when params in the bucket group have grads ready to be - synced; an asynchronous communication call is automatically launched when _all_ params in - the bucket group have grads ready. - - Args: - buckets: A list of buckets. - ddp_config: DistributedDataParallel config object. - collective_group: intra_distributed_optimizer_instance_group if using distributed - optimizer, data_parallel_group if not. - collective_group_size: World size using the intra data-parallel group. - """ - - def __init__( - self, - buckets: List[_ParamAndGradBucket], - ddp_config: DistributedDataParallelConfig, - collective_group: torch.distributed.ProcessGroup, - collective_group_size: int, - ): - self.buckets = buckets - self.ddp_config = ddp_config - - if self.ddp_config.use_distributed_optimizer: - self.intra_distributed_optimizer_instance_group = collective_group - self.intra_distributed_optimizer_instance_size = collective_group_size - self.intra_distributed_optimizer_instance_rank = torch.distributed.get_rank( - group=collective_group - ) - else: - self.data_parallel_group = collective_group - - # State for bookkeeping: params is the set of parameters this bucket group is - # responsible for, params_with_grad is the set of parameters with grads - # available. When overlap_grad_reduce is True, communication (all-reduce - # or reduce-scatter) is issued when params_with_grad equals params. - self.param_to_bucket = {} - self.params = set() - for bucket in self.buckets: - for param in bucket.params_list: - self.param_to_bucket[param] = bucket - self.params.add(param) - - self.next_param_gather_bucket_group = None - - if self.ddp_config.num_distributed_optimizer_instances > 1: - self.inter_distributed_optimizer_instance_group = None - self.communication_stream = None - - self.reset() - self.param_gather_handle = None - self.param_gather_dispatched = False - self.grad_reduce_handle = None - - def reset(self): - """ - Reset metadata in bucket group in preparation for the next iteration of training. - """ - self.params_with_grad = set() - self.is_last_microbatch = True - - def check_for_nan_in_grad(self): - """ - Make sure norm of grads in bucket are not NaN prior to data-parallel - all-reduce / reduce-scatter. - """ - rerun_state_machine = get_rerun_state_machine() - for i in range(len(self.buckets)): - rerun_state_machine.validate_result( - result=self.buckets[i].grad_data.norm(p=2), - rejection_func=torch.isnan, - message=f"found NaN in local grad norm for bucket #{i} " - f"in backward pass before data-parallel communication collective", - tolerance=0.001, # 0.1% tolerance to account for non-deterministic FA backward - fatal=True, - ) - - def start_param_sync(self, force_sync: bool = False): - """ - Initiates all necessary param all-gathers for this bucket. - - When ddp_config.overlap_param_gather is set to True, dispatches an asynchronous - communication call (unless force_sync is True). When ddp_config.overlap_param_gather - is set to False, makes synchronous call. - - Args: - force_sync (bool, optional): force synchronous collective regardless of - other settings if true. - """ - assert self.ddp_config.use_distributed_optimizer - - if force_sync: - if self.param_gather_handle is not None: - self.param_gather_handle.wait() - self.param_gather_handle = None - return - else: - assert self.param_gather_handle is None - - async_op = self.ddp_config.overlap_param_gather and not force_sync - # Coalesce communication kernels across buckets in the bucket group. - with _coalescing_manager( - self.intra_distributed_optimizer_instance_group, async_ops=async_op - ) as cm: - for bucket in self.buckets: - local_data_view = shard_buffer( - bucket.param_data, self.intra_distributed_optimizer_instance_size - )[self.intra_distributed_optimizer_instance_rank] - dist_all_gather_func( - bucket.param_data, - local_data_view, - group=self.intra_distributed_optimizer_instance_group, - async_op=async_op, - ) - if async_op: - self.param_gather_handle = cm - else: - # When using `_coalescing_manager`, even if a synchronous op (async_op=False) is used, - # `cm` is not None, which is different from when `_coalescing_manager` is not used in - # which case the torch.distributed._all_gather_base() will return None. In order to - # maintain consistency with prior code, we need to manually set communication handle to - # None. - self.param_gather_handle = None - self.param_gather_dispatched = True - - def finish_param_sync(self, skip_next_bucket_dispatch: bool = False): - """ - Finishes param sync communication operation for this bucket. Dispatches - next bucket's param sync if available, unless skip_next_bucket_dispatch - is True. - - When ddp_config.overlap_param_gather is set to True, waits for asynchronous - communication call to complete (and dispatches one if one is not already - outstanding). Throws assertion error if ddp_config.overlap_param_gather is set to - False. - - Args: - skip_next_bucket_dispatch (bool, optional): if true, dispatch next - bucket's communication if available. - """ - assert self.ddp_config.use_distributed_optimizer - assert self.ddp_config.overlap_param_gather - - # If current bucket's param AG has not been dispatched, dispatch it now (e.g., first - # AG bucket in first model chunk if ddp_config.align_param_gather is False). - if not self.param_gather_dispatched: - self.start_param_sync() - - if self.param_gather_handle is not None: - self.param_gather_handle.wait() - self.param_gather_handle = None - # Dispatch next bucket's asynchronous param AG. - if self.next_param_gather_bucket_group is not None and not skip_next_bucket_dispatch: - self.next_param_gather_bucket_group.start_param_sync() - - def start_grad_sync(self): - """ - Initiates grad sync (all-reduce or reduce-scatter) communication operations - for all buckets in the bucket group. - - When ddp_config.overlap_grad_reduce is set to True, dispatches an asynchronous - communication call. When ddp_config.overlap_grad_reduce is set to False, makes - synchronous call. - """ - assert ( - self.grad_reduce_handle is None - ), 'Should not have multiple communication calls outstanding at once' - - if self.ddp_config.check_for_nan_in_grad: - self.check_for_nan_in_grad() - - # gradient_scaling_factor already takes into account whether we are computing - # an average or sum in the data-parallel collective. - for bucket in self.buckets: - if bucket.gradient_scaling_factor != 1.0: - bucket.grad_data *= bucket.gradient_scaling_factor - - # Decide reduce_op. - reduce_op = torch.distributed.ReduceOp.SUM - if self.ddp_config.average_in_collective: - reduce_op = torch.distributed.ReduceOp.AVG - - # We use the following stream synchronization for the gradient reduction - # within and across DistOpt instances. - - # Compute Stream: -------------Gradient compute------------------- - # Comm. Stream: ------(wait for NCCL)-----(wait for NCCL)------- - # NCCL Stream: -------RS------ -------AR------ - - # Use async communications only when overlap_grad_reduce is True. - async_op = ( - self.ddp_config.overlap_grad_reduce - and self.ddp_config.num_distributed_optimizer_instances == 1 - ) - if ( - self.ddp_config.num_distributed_optimizer_instances > 1 - and self.ddp_config.overlap_grad_reduce - ): - # Assign a communication stream if we have multiple DistOpt instances and we - # need to overlap communication. - stream_context = torch.cuda.stream(self.communication_stream) - - # The RS/AR communication stream needs to wait for the default stream - # to complete its gradient computation before launching the next - # gradient reduction collective. - self.communication_stream.wait_stream(torch.cuda.default_stream()) - else: - stream_context = nullcontext() - - if self.ddp_config.use_distributed_optimizer: - communication_group = self.intra_distributed_optimizer_instance_group - else: - communication_group = self.data_parallel_group - - # Coalesce communication kernels across buckets in the bucket group. - with stream_context, _coalescing_manager(communication_group, async_ops=async_op) as cm: - for bucket in self.buckets: - if self.ddp_config.use_distributed_optimizer: - local_data_view = shard_buffer( - bucket.grad_data, self.intra_distributed_optimizer_instance_size - )[self.intra_distributed_optimizer_instance_rank] - dist_reduce_scatter_func( - local_data_view, - bucket.grad_data, - op=reduce_op, - group=communication_group, - async_op=async_op, - ) - else: - torch.distributed.all_reduce( - bucket.grad_data, op=reduce_op, group=communication_group, async_op=async_op - ) - - # With multiple DistOpt instances, we need to all-reduce across instances. - if ( - self.ddp_config.use_distributed_optimizer - and self.ddp_config.num_distributed_optimizer_instances > 1 - ): - - # Create a new coalescing manager for the inter-instance all-reduce. - with stream_context, _coalescing_manager( - self.inter_distributed_optimizer_instance_group, async_ops=async_op - ) as cm: - for bucket in self.buckets: - local_data_view = shard_buffer( - bucket.grad_data, self.intra_distributed_optimizer_instance_size - )[self.intra_distributed_optimizer_instance_rank] - - torch.distributed.all_reduce( - local_data_view, - op=reduce_op, - group=self.inter_distributed_optimizer_instance_group, - async_op=async_op, - ) - - if async_op: - self.grad_reduce_handle = cm - else: - # When using `_coalescing_manager`, even if a synchronous op (async_op=False) is used, - # `cm` is not None, which is different from when `_coalescing_manager` is not used in - # which case the torch.distributed._reduce_scatter_base() will return None. In order to - # maintain consistency with prior code, we need to manually set communication handle to - # None. - self.grad_reduce_handle = None - - def finish_grad_sync(self): - """ - Finishes grad sync (all-reduce or reduce-scatter) communication operations - for all buckets in the bucket group. - - When ddp_config.overlap_grad_reduce is set to True, waits for asynchronous - communication call to complete. When ddp_config.overlap_grad_reduce is set to False, - makes synchronous call. - """ - self.param_gather_dispatched = False - # If overlap_grad_reduce is False, start (and finish) synchronous communication call here. - if not self.ddp_config.overlap_grad_reduce: - self.start_grad_sync() - return - # When using multiple DistOpt instances, we don't need to sync here as we launch - # communications on a separate communication stream. - if self.ddp_config.num_distributed_optimizer_instances > 1: - torch.cuda.default_stream().wait_stream(self.communication_stream) - return - assert self.grad_reduce_handle is not None, ( - f'Communication call has not been issued for this bucket ' - f'({len(self.params_with_grad)}/{len(self.params)} params have grad available)' - ) - self.grad_reduce_handle.wait() - self.grad_reduce_handle = None - - def register_grad_ready(self, param: torch.nn.Parameter): - """ - Registers grads for the passed-in param to be "ready" for grad sync. - - When the number of microbatches is greater than 1, we only want to register - grads as ready when processing the last microbatch and ddp_config.overlap_grad_reduce - is True. - """ - assert ( - self.ddp_config.overlap_grad_reduce - ), 'register_grad_ready() should only be called when overlap_grad_reduce is True' - if self.is_last_microbatch: - assert param in self.param_to_bucket, 'Param is not in the bucket group' - assert param not in self.params_with_grad, 'Cannot set grad twice' - self.params_with_grad.add(param) - # If all params in bucket group have grads available, issue communication call. - if len(self.params_with_grad) == len(self.params): - self.start_grad_sync() - - -class _ParamAndGradBuffer: - """ - Groups parameters and gradients into a contiguous buffer, and then breaks the buffer into - buckets with roughly `bucket_size` parameters each. - - Args: - ddp_config: DistributedDataParallel config object. - param_dtype: Type of param tensor. - grad_dtype: Type of grad tensor. - params: List of parameters whose parameters and gradients are collated in the underlying - tensor. - data_parallel_group: Data-parallel process group. - bucket_size: The rough size of each bucket in terms of number of parameters. - param_to_name: Mapping from `torch.nn.Parameter` to name (for logging purposes). - gradient_scaling_factor: This factor is utilized to scale gradients prior to their - communication. Its application is twofold: it facilitates the averaging of gradients - and the scaling of gradients in the context of the Mixture of Experts (MoE) model. - param_indices: The index of each param among the params with same dtype, if a param is fp8, - use its "fake" high precision dtype to determine which params have same dtype with it. - These indices are needed when loading a non-native-fp8 checkpoint in native-fp8 mode. - """ - - def __init__( - self, - ddp_config: DistributedDataParallelConfig, - param_dtype: torch.dtype, - grad_dtype: torch.dtype, - params: List[torch.nn.Parameter], - data_parallel_group: torch.distributed.ProcessGroup, - bucket_size: int, - param_to_name: Dict[torch.nn.Parameter, str], - gradient_scaling_factor: float, - param_indices: List[int], - ): - self.ddp_config = ddp_config - self.params = params - self.param_indices = param_indices - - # Check that params are unique. - unique_params = set() - for param in params: - assert param not in unique_params - unique_params.add(param) - del unique_params - - # Store attributes that will be needed later. - self.param_dtype = param_dtype - self.grad_dtype = grad_dtype - self.data_parallel_group = data_parallel_group - self.data_parallel_world_size = torch.distributed.get_world_size( - group=self.data_parallel_group - ) - self.gradient_scaling_factor = gradient_scaling_factor - - # Data structures to store underlying buckets and relevant indexing data. - self.buckets = [] - self.param_to_bucket = {} # Param -> bucket mapping. - self.param_index_map = {} # Param -> location in buffer mapping (used in dist. optimizer). - - def _pad(number_to_be_padded: int, divisor: int) -> int: - return int(math.ceil(number_to_be_padded / divisor) * divisor) - - def _pad_end_of_bucket_if_needed(bucket_end_index: int) -> int: - """ - Pads end index of bucket if using distributed optimizer (to ensure uniform sharding). - """ - if self.ddp_config.use_distributed_optimizer: - # Workaround for TE bug causing cuBLAS to pick an incompatible algorithm. - # This also helps cuBLAS pick more efficient algorithms for GEMMs. - # We now ensure that all buckets start at a memory address that is 256-byte - # aligned (128 values since params and grads use >= 16-bit precision). - return _pad(bucket_end_index, math.lcm(self.data_parallel_world_size, 128)) - return bucket_end_index - - def _pad_start_of_param_if_needed(param_start_index: int) -> int: - """ - Pads start index of param if using distributed optimizer (to ensure "good" alignment). - """ - if self.ddp_config.use_distributed_optimizer: - # Ensure that params start at 128-byte aligned addresses (64 values - # since params are >= 16-bit precision). - return _pad(param_start_index, 64) - return param_start_index - - # First, figure out how many elements should be in the underlying buffer storage. - # Note that if we need to split the buffer into smaller buckets, each of these - # might need to be padded as well (if using the distributed optimizer). - param_start_index = 0 - bucket_start_index = param_start_index - bucket_params = set() - self.bucket_indices = [] - per_bucket_numel_unpadded = [] - bucket_id = 0 - - def _update_bucket_metadata(param_end_index: int) -> int: - """ - Record metadata for the bucket starting at bucket_start_index and ending with the - passed-in param_end_index. Returns the bucket's end_index. - """ - nonlocal bucket_start_index, bucket_params, bucket_id - per_bucket_numel_unpadded.append(param_end_index - bucket_start_index) - bucket_end_index = _pad_end_of_bucket_if_needed(param_end_index) - - # Record metadata of new bucket. - self.bucket_indices.append((bucket_start_index, bucket_end_index)) - bucket_start_index = bucket_end_index - - # Prepare for next bucket. - bucket_params = set() - bucket_id += 1 - - # Return the potentially padded bucket_end_index. - return bucket_end_index - - def _does_param_require_new_bucket(param): - """ - Split shared embedding parameters into separate bucket if using distributed - optimizer that makes use of reduce-scatters instead of all-reduces. - This ensures that the first and last pipeline stage partition optimizer state - for the shared embedding parameters the same way across DP replicas, allowing - the DP reduce-scatter to be before the embedding all-reduce. - """ - return ( - getattr(param, "shared_embedding", False) - and self.ddp_config.use_distributed_optimizer - ) - - for param in params[::-1]: - # Iterate through parameters in reverse order to roughly follow backprop order. - - this_numel = param.data.nelement() - param_start_index = _pad_start_of_param_if_needed(param_start_index) - - # Create bucket with collected parameters if current param needs its own bucket. - if _does_param_require_new_bucket(param): - # We are creating a bucket for the already accumulated parameters, whose params - # end at the current param_start_index. - if self.ddp_config.use_distributed_optimizer: - # Make sure new bucket is appropriately padded. - if param_start_index % self.data_parallel_world_size != 0: - param_start_index = _pad_end_of_bucket_if_needed(param_start_index) - if len(bucket_params) > 0: - bucket_end_index = _update_bucket_metadata(param_start_index) - - param_end_index = param_start_index + this_numel - self.param_index_map[param] = (param_start_index, param_end_index, bucket_id) - bucket_params.add(param) - - # If we have enough elements already or the current param is part of the shared - # embedding layer and needs a separate bucket, form a new bucket. - if ( - bucket_size is not None and (param_end_index - bucket_start_index) >= bucket_size - ) or _does_param_require_new_bucket(param): - bucket_end_index = _update_bucket_metadata(param_end_index) - param_start_index = bucket_end_index - else: - param_start_index = param_end_index - - # Add remaining params to a new bucket. - if len(bucket_params) > 0: - bucket_end_index = _update_bucket_metadata(param_end_index) - - # Next, create underlying storage for buffer (with numel elements that includes - # padding as necessary). - self.numel = bucket_end_index - self.numel_unpadded = sum(per_bucket_numel_unpadded) - assert self.numel_unpadded <= self.numel - if self.ddp_config.use_distributed_optimizer: - assert self.numel % self.data_parallel_world_size == 0 - else: - assert self.numel == self.numel_unpadded - - self.param_data = None - # Only re-map param tensors if using distributed optimizer. - if self.ddp_config.use_distributed_optimizer: - self.param_data = torch.zeros( - self.numel, - dtype=self.param_dtype, - device=torch.cuda.current_device(), - requires_grad=False, - ) - self.grad_data = torch.zeros( - self.numel, - dtype=self.grad_dtype, - device=torch.cuda.current_device(), - requires_grad=False, - ) - - # Finally, map param.data and param.main_grad fields to buffers. - bucket_params = [] - bucket_start_index = 0 - cur_bucket_id = 0 - for param in params[::-1]: - param_start_index, param_end_index, bucket_id = self.param_index_map[param] - - # Assign param.data to appropriate segment of self.param_data. - if self.param_data is not None: - old_param_data = param.data - new_param_data = self._get( - param.data.shape, param_start_index, buffer_type=BufferType.PARAM - ) - if is_float8tensor(param): - param._data = new_param_data - else: - param.data = new_param_data - assert old_param_data._base is None - # Copy tensor values (from initialization or checkpoint). - param.data.detach().copy_(old_param_data) - del old_param_data - - param.main_grad = self._get( - param.data.shape, param_start_index, buffer_type=BufferType.GRAD - ) - if bucket_id != cur_bucket_id: - bucket_end_index = _pad_end_of_bucket_if_needed(param_start_index) - self.buckets.append( - self._new_bucket( - bucket_params=bucket_params, - start_index=bucket_start_index, - end_index=bucket_end_index, - numel_unpadded=per_bucket_numel_unpadded[cur_bucket_id], - bucket_id=cur_bucket_id, - ) - ) - bucket_start_index = bucket_end_index - bucket_params = [] - assert cur_bucket_id + 1 == len(self.buckets) - assert bucket_id == cur_bucket_id + 1 - cur_bucket_id = bucket_id - bucket_params.append(param) - - # Add remaining params to a new bucket. - if len(bucket_params) > 0: - bucket_end_index = _pad_end_of_bucket_if_needed(param_end_index) - self.buckets.append( - self._new_bucket( - bucket_params=bucket_params, - start_index=bucket_start_index, - end_index=bucket_end_index, - numel_unpadded=per_bucket_numel_unpadded[cur_bucket_id], - bucket_id=cur_bucket_id, - ) - ) - - # Log buckets for all PP stages. - log_strs = [] - log_strs.append( - f'Number of buckets for gradient all-reduce / reduce-scatter: {len(self.buckets)}' - ) - for index, bucket in enumerate(self.buckets): - numel = 0 - for param in bucket.params: - numel += param.data.nelement() - log_strs.append(f'Params for bucket {index+1} ({numel} elements):') - for param in bucket.params: - log_strs.append(f'\t{param_to_name[param]}') - log_on_each_pipeline_stage(logger, logging.INFO, '\n'.join(log_strs)) - - def scale_gradients(self, scaling_factor: float) -> None: - """Scale the gradient data by `scaling_factor`.""" - self.grad_data *= scaling_factor - - def _get(self, shape: torch.Size, start_index: int, buffer_type: BufferType) -> torch.Tensor: - """ - Return a tensor with the input `shape` as a view into the 1-D data starting at - `start_index`. - """ - end_index = start_index + shape.numel() - assert end_index <= self.numel, 'Requested tensor is out of buffer range' - if buffer_type == BufferType.PARAM: - assert self.param_data is not None - buffer_tensor = self.param_data[start_index:end_index] - elif buffer_type == BufferType.GRAD: - buffer_tensor = self.grad_data[start_index:end_index] - else: - raise Exception("Illegal buffer type provided to GradBuffer._get() function") - buffer_tensor = buffer_tensor.view(shape) - return buffer_tensor - - def _new_bucket( - self, - bucket_params: List[torch.nn.Parameter], - start_index: int, - end_index: int, - numel_unpadded: int, - bucket_id: int, - ) -> _ParamAndGradBucket: - """ - Helper function that creates a new bucket. Also updates param->bucket mapping. - """ - - # Assert that indices are correctly padded (if needed), and that bucket - # position is same as originally computed. - if self.ddp_config.use_distributed_optimizer: - assert start_index % self.data_parallel_world_size == 0 - assert end_index % self.data_parallel_world_size == 0 - assert (start_index, end_index) == self.bucket_indices[bucket_id] - - # Get appropriate view into global _ParamAndGradBuffer. - bucketed_param_data = None - if self.param_data is not None: - bucketed_param_data = self._get( - torch.Size([end_index - start_index]), start_index, buffer_type=BufferType.PARAM - ) - bucketed_grad_data = self._get( - torch.Size([end_index - start_index]), start_index, buffer_type=BufferType.GRAD - ) - bucket = _ParamAndGradBucket( - params=bucket_params, - param_data=bucketed_param_data, - grad_data=bucketed_grad_data, - offset=start_index, - numel_unpadded=numel_unpadded, - gradient_scaling_factor=self.gradient_scaling_factor, - bucket_id=bucket_id, - ) - for bucket_param in bucket_params: - assert bucket_param not in self.param_to_bucket - self.param_to_bucket[bucket_param] = bucket - - return bucket - - def reset(self): - """ - Zero out the underlying grad_buffer. - """ - self.grad_data.zero_() - - -def partition_buckets( - buffers: List[_ParamAndGradBuffer], force_single_bucket_group: bool = False -) -> List[_ParamAndGradBucketGroup]: - """ - Automatically regroup the buckets of input buffers and return a list of bucket groups. - - In some scenarios, we need to put buckets from different buffers into a group so that their - communication can be aggregated. - - For example, when there are both fp8 weights and bf16 biases in the model and virtual - pipeline parallelism is enabled, each model chunk will have an fp8 bucket and a bf16 bucket, - which doubles the number of communication kernels, and because of the use of - CUDA_DEVICE_MAX_CONNECTIONS=1, having multiple back-to-back communications will prevent the - overlap of communication kernels with computation kernels. - - The grouping strategy is: - 1. If force_single_bucket_group is True, put all buckets across all buffers into a single - bucket group. - 2. If force_single_bucket_group is False, when there is no fp8 buffer in the input buffers, - let each bucket group have only one bucket. - 3. If force_single_bucket_group is False, when using fp8 params, merge all non-fp8 buckets - into the last fp8 bucket group. - - Since the non-fp8 parameters (typically the biases of various layers) are relatively - small, they are likely to be grouped into a single non-fp8 bucket. - - The fp8 buckets start from the end of the model, i.e., the first bucket corresponds to - the end of the model, while the last bucket corresponds to the beginning. - - If we combine the non-fp8 bucket with the first fp8 bucket, we cannot initiate the - reduce-scatter to synchronize gradients after the backward pass at the end of the model - has completed. This is because we need to wait for the non-fp8 params from the beginning - layers to obtain their gradients. - - Combining the non-fp8 bucket with the last fp8 bucket can help avoid this issue. - - Args: - buffers (list): list of input buffers. - single_bucket_group_per_buffer (bool, optional): force group all buckets in each buffer - into a single bucket group. - """ - - if len(buffers) == 0: - return [] - - dtype_to_buffer_map = {} - for buffer in buffers: - dtype = buffer.param_dtype - # Make sure that the param_dtype of any two buffers is different. - assert dtype not in dtype_to_buffer_map - dtype_to_buffer_map[dtype] = buffer - - # Case 1: Put all buckets into a single bucket group if force_single_bucket_group is True. - if force_single_bucket_group: - buckets = [] - ddp_config = buffers[0].ddp_config - data_parallel_group = buffers[0].data_parallel_group - data_parallel_world_size = buffers[0].data_parallel_world_size - for buffer in buffers: - assert ddp_config == buffer.ddp_config - assert data_parallel_group == buffer.data_parallel_group - assert data_parallel_world_size == buffer.data_parallel_world_size - buckets.extend(buffer.buckets) - - bucket_group = _ParamAndGradBucketGroup( - buckets, ddp_config, data_parallel_group, data_parallel_world_size - ) - return [bucket_group] - - if torch.uint8 not in dtype_to_buffer_map: - # Case 2: When there is no fp8 buffer in the input buffers, let each bucket group have - # only one bucket. - bucket_groups = [] - for buffer in buffers: - for bucket in buffer.buckets: - bucket_groups.append( - _ParamAndGradBucketGroup( - [bucket], - buffer.ddp_config, - buffer.data_parallel_group, - buffer.data_parallel_world_size, - ) - ) - return bucket_groups - else: - # Case 3: When using fp8 params, merge all non-fp8 buckets into the last fp8 bucket group. - non_fp8_buckets = [] - for buffer in buffers: - if buffer.param_dtype != torch.uint8: - for bucket in buffer.buckets: - non_fp8_buckets.append(bucket) - - bucket_groups = [] - fp8_buffer = dtype_to_buffer_map[torch.uint8] - for bucket in fp8_buffer.buckets: - if len(bucket_groups) == len(fp8_buffer.buckets) - 1: - # The last bucket group. - group_buckets = [bucket] + non_fp8_buckets - else: - # The first N-1 bucket groups. - group_buckets = [bucket] - bucket_groups.append( - _ParamAndGradBucketGroup( - group_buckets, - buffer.ddp_config, - buffer.data_parallel_group, - buffer.data_parallel_world_size, - ) - ) - return bucket_groups +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import logging +import math +import warnings +from contextlib import nullcontext +from enum import Enum +from functools import partial +from typing import Dict, List, Optional + +import torch +from torch.distributed import _coalescing_manager + +from megatron.core.rerun_state_machine import get_rerun_state_machine + +from ..utils import is_float8tensor, is_torch_min_version, log_on_each_pipeline_stage +from .distributed_data_parallel_config import DistributedDataParallelConfig + +logger = logging.getLogger(__name__) + + +if is_torch_min_version("1.13.0"): + dist_all_gather_func = torch.distributed.all_gather_into_tensor + dist_reduce_scatter_func = torch.distributed.reduce_scatter_tensor +else: + dist_all_gather_func = torch.distributed._all_gather_base + dist_reduce_scatter_func = torch.distributed._reduce_scatter_base + + +class BufferType(Enum): + """ + Enumeration for buffer type. + """ + + PARAM = 1 + GRAD = 2 + + +def shard_buffer(buffer: torch.Tensor, data_parallel_world_size: int): + """ + Shard buffer into data_parallel_world_size chunks of equal size. + """ + assert buffer.numel() % data_parallel_world_size == 0 + shard_size = buffer.numel() // data_parallel_world_size + sharded_buffer = [ + buffer[(r * shard_size) : ((r + 1) * shard_size)] for r in range(data_parallel_world_size) + ] + return sharded_buffer + + +class _ParamAndGradBucket: + """ + Bucket to keep track of a subset of the model's parameters and gradients. + + Args: + params: List of parameters whose gradients are collated in this bucket. + param_data: View in _ParamAndGradBuffer.param_data that this bucket is responsible for. + grad_data: View in _ParamAndGradBuffer.grad_data that this bucket is responsible for. + offset: Offset of this bucket's view in the larger _ParamAndGradBuffer. + numel_unpadded: Number of unpadded elements in bucket. + gradient_scaling_factor: This factor is utilized to scale gradients prior to their + communication. Its application is twofold: it facilitates the averaging of gradients + and the scaling of gradients in the context of the Mixture of Experts (MoE) model. + bucket_id: Index of bucket in buffer. + """ + + def __init__( + self, + params: List[torch.nn.Parameter], + param_data: Optional[torch.Tensor], + grad_data: torch.Tensor, + offset: int, + numel_unpadded: int, + gradient_scaling_factor: float, + bucket_id: int, + ): + self.params_list = params + self.params = set(params) + # Make sure there are no duplicate params. + assert len(self.params_list) == len(self.params) + self.param_data = param_data + self.grad_data = grad_data + # The distributed optimizer needs to keep track of this bucket's offset + # within the full grad_buffer. + self.offset = offset + self.numel_unpadded = numel_unpadded + self.gradient_scaling_factor = gradient_scaling_factor + self.bucket_id = bucket_id + + +class _ParamAndGradBucketGroup: + """ + Put multiple buckets into a group so that their communications can be aggregated together. + Provides functionality to register when params in the bucket group have grads ready to be + synced; an asynchronous communication call is automatically launched when _all_ params in + the bucket group have grads ready. + + Args: + buckets: A list of buckets. + ddp_config: DistributedDataParallel config object. + collective_group: intra_distributed_optimizer_instance_group if using distributed + optimizer, data_parallel_group if not. + collective_group_size: World size using the intra data-parallel group. + """ + + def __init__( + self, + buckets: List[_ParamAndGradBucket], + ddp_config: DistributedDataParallelConfig, + collective_group: torch.distributed.ProcessGroup, + collective_group_size: int, + ): + self.buckets = buckets + self.ddp_config = ddp_config + + if self.ddp_config.use_distributed_optimizer: + self.intra_distributed_optimizer_instance_group = collective_group + self.intra_distributed_optimizer_instance_size = collective_group_size + self.intra_distributed_optimizer_instance_rank = torch.distributed.get_rank( + group=collective_group + ) + else: + self.data_parallel_group = collective_group + + # State for bookkeeping: params is the set of parameters this bucket group is + # responsible for, params_with_grad is the set of parameters with grads + # available. When overlap_grad_reduce is True, communication (all-reduce + # or reduce-scatter) is issued when params_with_grad equals params. + self.param_to_bucket = {} + self.params = set() + for bucket in self.buckets: + for param in bucket.params_list: + self.param_to_bucket[param] = bucket + self.params.add(param) + + self.next_param_gather_bucket_group = None + + if self.ddp_config.num_distributed_optimizer_instances > 1: + self.inter_distributed_optimizer_instance_group = None + self.communication_stream = None + + self.reset() + self.param_gather_handle = None + self.param_gather_dispatched = False + self.grad_reduce_handle = None + + def reset(self): + """ + Reset metadata in bucket group in preparation for the next iteration of training. + """ + self.params_with_grad = set() + self.is_last_microbatch = True + + def check_grads(self, check_for_nan_or_inf, check_for_large): + """ + Make sure norm of grads in bucket are not NaN prior to data-parallel + all-reduce / reduce-scatter. + """ + rerun_state_machine = get_rerun_state_machine() + for i in range(len(self.buckets)): + grad_norm = self.buckets[i].grad_data.norm(p=2) + # check for NaN, Inf and unexpectedly large grads + if check_for_nan_or_inf: + rerun_state_machine.validate_result( + result=grad_norm, + rejection_func=torch.isnan, + message=f"found NaN in local grad norm for bucket #{i} " + f"in backward pass before data-parallel communication collective", + tolerance=0.001, # 0.1% tolerance to account for non-deterministic FA backward + fatal=True, + ) + rerun_state_machine.validate_result( + result=grad_norm, + rejection_func=torch.isinf, + message=f"found Inf in local grad norm for bucket #{i} " + f"in backward pass before data-parallel communication collective", + tolerance=0.001, # 0.1% tolerance to account for non-deterministic FA backward + fatal=True, + ) + if check_for_large: + rerun_state_machine.validate_result( + result=grad_norm, + rejection_func=partial( + rerun_state_machine.is_unexpectedly_large, threshold=10, context="grads" + ), + message=f"found unexpected large grads in bucket #{i} " + f"in backward pass before data-parallel communication collective", + tolerance=0.001, # 0.1% tolerance to account for non-deterministic FA backward + fatal=False, + ) + + def start_param_sync(self, force_sync: bool = False): + """ + Initiates all necessary param all-gathers for this bucket. + + When ddp_config.overlap_param_gather is set to True, dispatches an asynchronous + communication call (unless force_sync is True). When ddp_config.overlap_param_gather + is set to False, makes synchronous call. + + Args: + force_sync (bool, optional): force synchronous collective regardless of + other settings if true. + """ + assert self.ddp_config.use_distributed_optimizer + + if force_sync: + if self.param_gather_handle is not None: + self.param_gather_handle.wait() + self.param_gather_handle = None + return + else: + assert self.param_gather_handle is None + + async_op = self.ddp_config.overlap_param_gather and not force_sync + # Coalesce communication kernels across buckets in the bucket group. + with _coalescing_manager( + self.intra_distributed_optimizer_instance_group, async_ops=async_op + ) as cm: + for bucket in self.buckets: + local_data_view = shard_buffer( + bucket.param_data, self.intra_distributed_optimizer_instance_size + )[self.intra_distributed_optimizer_instance_rank] + dist_all_gather_func( + bucket.param_data, + local_data_view, + group=self.intra_distributed_optimizer_instance_group, + async_op=async_op, + ) + if async_op: + self.param_gather_handle = cm + else: + # When using `_coalescing_manager`, even if a synchronous op (async_op=False) is used, + # `cm` is not None, which is different from when `_coalescing_manager` is not used in + # which case the torch.distributed._all_gather_base() will return None. In order to + # maintain consistency with prior code, we need to manually set communication handle to + # None. + self.param_gather_handle = None + self.param_gather_dispatched = True + + def finish_param_sync(self, skip_next_bucket_dispatch: bool = False): + """ + Finishes param sync communication operation for this bucket. Dispatches + next bucket's param sync if available, unless skip_next_bucket_dispatch + is True. + + When ddp_config.overlap_param_gather is set to True, waits for asynchronous + communication call to complete (and dispatches one if one is not already + outstanding). Throws assertion error if ddp_config.overlap_param_gather is set to + False. + + Args: + skip_next_bucket_dispatch (bool, optional): if true, dispatch next + bucket's communication if available. + """ + assert self.ddp_config.use_distributed_optimizer + assert self.ddp_config.overlap_param_gather + + # If current bucket's param AG has not been dispatched, dispatch it now (e.g., first + # AG bucket in first model chunk if ddp_config.align_param_gather is False). + if not self.param_gather_dispatched: + self.start_param_sync() + + if self.param_gather_handle is not None: + self.param_gather_handle.wait() + self.param_gather_handle = None + # Dispatch next bucket's asynchronous param AG only if it has not been dispatched yet. + if self.next_param_gather_bucket_group is not None and not skip_next_bucket_dispatch: + if self.next_param_gather_bucket_group.param_gather_dispatched: + warnings.warn( + "The next bucket's parameter all-gather operation has already been " + "dispatched. This may be caused by a mismatch between the order of " + "parameter registration and forward pass execution, which will " + "hurt the communication-computation overlap performance." + ) + else: + self.next_param_gather_bucket_group.start_param_sync() + + def start_grad_sync(self): + """ + Initiates grad sync (all-reduce or reduce-scatter) communication operations + for all buckets in the bucket group. + + When ddp_config.overlap_grad_reduce is set to True, dispatches an asynchronous + communication call. When ddp_config.overlap_grad_reduce is set to False, makes + synchronous call. + """ + assert ( + self.grad_reduce_handle is None + ), 'Should not have multiple communication calls outstanding at once' + + if self.ddp_config.check_for_nan_in_grad or self.ddp_config.check_for_large_grads: + self.check_grads( + check_for_nan_or_inf=self.ddp_config.check_for_nan_in_grad, + check_for_large=self.ddp_config.check_for_large_grads, + ) + + # gradient_scaling_factor already takes into account whether we are computing + # an average or sum in the data-parallel collective. + for bucket in self.buckets: + if bucket.gradient_scaling_factor != 1.0: + bucket.grad_data *= bucket.gradient_scaling_factor + + # Decide reduce_op. + reduce_op = torch.distributed.ReduceOp.SUM + if self.ddp_config.average_in_collective: + reduce_op = torch.distributed.ReduceOp.AVG + + # We use the following stream synchronization for the gradient reduction + # within and across DistOpt instances. + + # Compute Stream: -------------Gradient compute------------------- + # Comm. Stream: ------(wait for NCCL)-----(wait for NCCL)------- + # NCCL Stream: -------RS------ -------AR------ + + # Use async communications only when overlap_grad_reduce is True. + async_op = ( + self.ddp_config.overlap_grad_reduce + and self.ddp_config.num_distributed_optimizer_instances == 1 + ) + if ( + self.ddp_config.num_distributed_optimizer_instances > 1 + and self.ddp_config.overlap_grad_reduce + ): + # Assign a communication stream if we have multiple DistOpt instances and we + # need to overlap communication. + stream_context = torch.cuda.stream(self.communication_stream) + + # The RS/AR communication stream needs to wait for the default stream + # to complete its gradient computation before launching the next + # gradient reduction collective. + self.communication_stream.wait_stream(torch.cuda.default_stream()) + else: + stream_context = nullcontext() + + if self.ddp_config.use_distributed_optimizer: + communication_group = self.intra_distributed_optimizer_instance_group + else: + communication_group = self.data_parallel_group + + # Coalesce communication kernels across buckets in the bucket group. + with stream_context, _coalescing_manager(communication_group, async_ops=async_op) as cm: + for bucket in self.buckets: + if self.ddp_config.use_distributed_optimizer: + local_data_view = shard_buffer( + bucket.grad_data, self.intra_distributed_optimizer_instance_size + )[self.intra_distributed_optimizer_instance_rank] + dist_reduce_scatter_func( + local_data_view, + bucket.grad_data, + op=reduce_op, + group=communication_group, + async_op=async_op, + ) + else: + torch.distributed.all_reduce( + bucket.grad_data, op=reduce_op, group=communication_group, async_op=async_op + ) + + # With multiple DistOpt instances, we need to all-reduce across instances. + if ( + self.ddp_config.use_distributed_optimizer + and self.ddp_config.num_distributed_optimizer_instances > 1 + ): + + # Create a new coalescing manager for the inter-instance all-reduce. + with stream_context, _coalescing_manager( + self.inter_distributed_optimizer_instance_group, async_ops=async_op + ) as cm: + for bucket in self.buckets: + local_data_view = shard_buffer( + bucket.grad_data, self.intra_distributed_optimizer_instance_size + )[self.intra_distributed_optimizer_instance_rank] + + torch.distributed.all_reduce( + local_data_view, + op=reduce_op, + group=self.inter_distributed_optimizer_instance_group, + async_op=async_op, + ) + + if async_op: + self.grad_reduce_handle = cm + else: + # When using `_coalescing_manager`, even if a synchronous op (async_op=False) is used, + # `cm` is not None, which is different from when `_coalescing_manager` is not used in + # which case the torch.distributed._reduce_scatter_base() will return None. In order to + # maintain consistency with prior code, we need to manually set communication handle to + # None. + self.grad_reduce_handle = None + + def finish_grad_sync(self): + """ + Finishes grad sync (all-reduce or reduce-scatter) communication operations + for all buckets in the bucket group. + + When ddp_config.overlap_grad_reduce is set to True, waits for asynchronous + communication call to complete. When ddp_config.overlap_grad_reduce is set to False, + makes synchronous call. + """ + self.param_gather_dispatched = False + # If overlap_grad_reduce is False, start (and finish) synchronous communication call here. + if not self.ddp_config.overlap_grad_reduce: + self.start_grad_sync() + return + # When using multiple DistOpt instances, we don't need to sync here as we launch + # communications on a separate communication stream. + if self.ddp_config.num_distributed_optimizer_instances > 1: + torch.cuda.default_stream().wait_stream(self.communication_stream) + return + assert self.grad_reduce_handle is not None, ( + f'Communication call has not been issued for this bucket ' + f'({len(self.params_with_grad)}/{len(self.params)} params have grad available)' + ) + self.grad_reduce_handle.wait() + self.grad_reduce_handle = None + + def register_grad_ready(self, param: torch.nn.Parameter): + """ + Registers grads for the passed-in param to be "ready" for grad sync. + + When the number of microbatches is greater than 1, we only want to register + grads as ready when processing the last microbatch and ddp_config.overlap_grad_reduce + is True. + """ + assert ( + self.ddp_config.overlap_grad_reduce + ), 'register_grad_ready() should only be called when overlap_grad_reduce is True' + if self.is_last_microbatch: + assert param in self.param_to_bucket, 'Param is not in the bucket group' + assert param not in self.params_with_grad, 'Cannot set grad twice' + self.params_with_grad.add(param) + # If all params in bucket group have grads available, issue communication call. + if len(self.params_with_grad) == len(self.params): + self.start_grad_sync() + + +class _ParamAndGradBuffer: + """ + Groups parameters and gradients into a contiguous buffer, and then breaks the buffer into + buckets with roughly `bucket_size` parameters each. + + Args: + ddp_config: DistributedDataParallel config object. + param_dtype: Type of param tensor. + grad_dtype: Type of grad tensor. + params: List of parameters whose parameters and gradients are collated in the underlying + tensor. + data_parallel_group: Data-parallel process group. + bucket_size: The rough size of each bucket in terms of number of parameters. + param_to_name: Mapping from `torch.nn.Parameter` to name (for logging purposes). + gradient_scaling_factor: This factor is utilized to scale gradients prior to their + communication. Its application is twofold: it facilitates the averaging of gradients + and the scaling of gradients in the context of the Mixture of Experts (MoE) model. + param_indices: The index of each param among the params with same dtype, if a param is fp8, + use its "fake" high precision dtype to determine which params have same dtype with it. + These indices are needed when loading a non-native-fp8 checkpoint in native-fp8 mode. + """ + + def __init__( + self, + ddp_config: DistributedDataParallelConfig, + param_dtype: torch.dtype, + grad_dtype: torch.dtype, + params: List[torch.nn.Parameter], + data_parallel_group: torch.distributed.ProcessGroup, + bucket_size: int, + param_to_name: Dict[torch.nn.Parameter, str], + gradient_scaling_factor: float, + param_indices: List[int], + ): + self.ddp_config = ddp_config + self.params = params + self.param_indices = param_indices + + # Check that params are unique. + unique_params = set() + for param in params: + assert param not in unique_params + unique_params.add(param) + del unique_params + + # Store attributes that will be needed later. + self.param_dtype = param_dtype + self.grad_dtype = grad_dtype + self.data_parallel_group = data_parallel_group + self.data_parallel_world_size = torch.distributed.get_world_size( + group=self.data_parallel_group + ) + self.gradient_scaling_factor = gradient_scaling_factor + + # Data structures to store underlying buckets and relevant indexing data. + self.buckets = [] + self.param_to_bucket = {} # Param -> bucket mapping. + self.param_index_map = {} # Param -> location in buffer mapping (used in dist. optimizer). + + def _pad(number_to_be_padded: int, divisor: int) -> int: + return int(math.ceil(number_to_be_padded / divisor) * divisor) + + def _pad_end_of_bucket_if_needed(bucket_end_index: int) -> int: + """ + Pads end index of bucket if using distributed optimizer (to ensure uniform sharding). + """ + if self.ddp_config.use_distributed_optimizer: + # Workaround for TE bug causing cuBLAS to pick an incompatible algorithm. + # This also helps cuBLAS pick more efficient algorithms for GEMMs. + # We now ensure that all buckets start at a memory address that is 256-byte + # aligned (128 values since params and grads use >= 16-bit precision). + if self.ddp_config.pad_buckets_for_high_nccl_busbw: + # Make sure the bucket size is divisible by a large power of 2 (2^16) to + # ensure NCCL collectives have high bus bandwidth at large DP counts, + # since NCCL message size (which for ring algorithms is bucket_size / + # dp_size) apparently needs to be divisible by a power of 2 for high busbw. + bucket_size_divisor = math.lcm(self.data_parallel_world_size, 128, 2**16) + else: + bucket_size_divisor = math.lcm(self.data_parallel_world_size, 128) + return _pad(bucket_end_index, bucket_size_divisor) + return bucket_end_index + + def _pad_start_of_param_if_needed(param_start_index: int) -> int: + """ + Pads start index of param if using distributed optimizer (to ensure "good" alignment). + """ + if self.ddp_config.use_distributed_optimizer: + # Ensure that params start at 128-byte aligned addresses (64 values + # since params are >= 16-bit precision). + return _pad(param_start_index, 64) + return param_start_index + + # First, figure out how many elements should be in the underlying buffer storage. + # Note that if we need to split the buffer into smaller buckets, each of these + # might need to be padded as well (if using the distributed optimizer). + param_start_index = 0 + bucket_start_index = param_start_index + bucket_params = set() + self.bucket_indices = [] + per_bucket_numel_unpadded = [] + bucket_id = 0 + + def _update_bucket_metadata(param_end_index: int) -> int: + """ + Record metadata for the bucket starting at bucket_start_index and ending with the + passed-in param_end_index. Returns the bucket's end_index. + """ + nonlocal bucket_start_index, bucket_params, bucket_id + per_bucket_numel_unpadded.append(param_end_index - bucket_start_index) + bucket_end_index = _pad_end_of_bucket_if_needed(param_end_index) + + # Record metadata of new bucket. + self.bucket_indices.append((bucket_start_index, bucket_end_index)) + bucket_start_index = bucket_end_index + + # Prepare for next bucket. + bucket_params = set() + bucket_id += 1 + + # Return the potentially padded bucket_end_index. + return bucket_end_index + + def _does_param_require_new_bucket(param): + """ + Split shared embedding parameters into separate bucket if using distributed + optimizer that makes use of reduce-scatters instead of all-reduces. + This ensures that the first and last pipeline stage partition optimizer state + for the shared embedding parameters the same way across DP replicas, allowing + the DP reduce-scatter to be before the embedding all-reduce. + """ + return ( + getattr(param, "shared_embedding", False) + and self.ddp_config.use_distributed_optimizer + ) + + for param in params[::-1]: + # Iterate through parameters in reverse order to roughly follow backprop order. + + this_numel = param.data.nelement() + param_start_index = _pad_start_of_param_if_needed(param_start_index) + + # Create bucket with collected parameters if current param needs its own bucket. + if _does_param_require_new_bucket(param): + # We are creating a bucket for the already accumulated parameters, whose params + # end at the current param_start_index. + if self.ddp_config.use_distributed_optimizer: + # Make sure new bucket is appropriately padded. + if param_start_index % self.data_parallel_world_size != 0: + param_start_index = _pad_end_of_bucket_if_needed(param_start_index) + if len(bucket_params) > 0: + bucket_end_index = _update_bucket_metadata(param_start_index) + + param_end_index = param_start_index + this_numel + self.param_index_map[param] = (param_start_index, param_end_index, bucket_id) + bucket_params.add(param) + + # If we have enough elements already or the current param is part of the shared + # embedding layer and needs a separate bucket, form a new bucket. + if ( + bucket_size is not None and (param_end_index - bucket_start_index) >= bucket_size + ) or _does_param_require_new_bucket(param): + bucket_end_index = _update_bucket_metadata(param_end_index) + param_start_index = bucket_end_index + else: + param_start_index = param_end_index + + # Add remaining params to a new bucket. + if len(bucket_params) > 0: + bucket_end_index = _update_bucket_metadata(param_end_index) + + # Next, create underlying storage for buffer (with numel elements that includes + # padding as necessary). + self.numel = bucket_end_index + self.numel_unpadded = sum(per_bucket_numel_unpadded) + assert self.numel_unpadded <= self.numel + if self.ddp_config.use_distributed_optimizer: + assert self.numel % self.data_parallel_world_size == 0 + else: + assert self.numel == self.numel_unpadded + + self.param_data = None + # Only re-map param tensors if using distributed optimizer. + if self.ddp_config.use_distributed_optimizer: + self.param_data = torch.zeros( + self.numel, + dtype=self.param_dtype, + device=torch.cuda.current_device(), + requires_grad=False, + ) + self.grad_data = torch.zeros( + self.numel, + dtype=self.grad_dtype, + device=torch.cuda.current_device(), + requires_grad=False, + ) + + # Finally, map param.data and param.main_grad fields to buffers. + bucket_params = [] + bucket_start_index = 0 + cur_bucket_id = 0 + for param in params[::-1]: + param_start_index, param_end_index, bucket_id = self.param_index_map[param] + + # Assign param.data to appropriate segment of self.param_data. + if self.param_data is not None: + old_param_data = param.data + new_param_data = self._get( + param.data.shape, param_start_index, buffer_type=BufferType.PARAM + ) + if is_float8tensor(param): + param._data = new_param_data + else: + param.data = new_param_data + assert old_param_data._base is None + # Copy tensor values (from initialization or checkpoint). + param.data.detach().copy_(old_param_data) + del old_param_data + + param.main_grad = self._get( + param.data.shape, param_start_index, buffer_type=BufferType.GRAD + ) + if bucket_id != cur_bucket_id: + bucket_end_index = _pad_end_of_bucket_if_needed(param_start_index) + self.buckets.append( + self._new_bucket( + bucket_params=bucket_params, + start_index=bucket_start_index, + end_index=bucket_end_index, + numel_unpadded=per_bucket_numel_unpadded[cur_bucket_id], + bucket_id=cur_bucket_id, + ) + ) + bucket_start_index = bucket_end_index + bucket_params = [] + assert cur_bucket_id + 1 == len(self.buckets) + assert bucket_id == cur_bucket_id + 1 + cur_bucket_id = bucket_id + bucket_params.append(param) + + # Add remaining params to a new bucket. + if len(bucket_params) > 0: + bucket_end_index = _pad_end_of_bucket_if_needed(param_end_index) + self.buckets.append( + self._new_bucket( + bucket_params=bucket_params, + start_index=bucket_start_index, + end_index=bucket_end_index, + numel_unpadded=per_bucket_numel_unpadded[cur_bucket_id], + bucket_id=cur_bucket_id, + ) + ) + + # Log buckets for all PP stages. + log_strs = [] + log_strs.append( + f'Number of buckets for gradient all-reduce / reduce-scatter: {len(self.buckets)}' + ) + for index, bucket in enumerate(self.buckets): + numel = 0 + for param in bucket.params: + numel += param.data.nelement() + log_strs.append( + f"Params for bucket {index+1} ({numel} elements, " + f"{bucket.grad_data.nelement()} padded size):" + ) + for param in bucket.params: + log_strs.append(f'\t{param_to_name[param]}') + log_on_each_pipeline_stage(logger, logging.INFO, '\n'.join(log_strs)) + + def scale_gradients(self, scaling_factor: float) -> None: + """Scale the gradient data by `scaling_factor`.""" + self.grad_data *= scaling_factor + + def _get(self, shape: torch.Size, start_index: int, buffer_type: BufferType) -> torch.Tensor: + """ + Return a tensor with the input `shape` as a view into the 1-D data starting at + `start_index`. + """ + end_index = start_index + shape.numel() + assert end_index <= self.numel, 'Requested tensor is out of buffer range' + if buffer_type == BufferType.PARAM: + assert self.param_data is not None + buffer_tensor = self.param_data[start_index:end_index] + elif buffer_type == BufferType.GRAD: + buffer_tensor = self.grad_data[start_index:end_index] + else: + raise Exception("Illegal buffer type provided to GradBuffer._get() function") + buffer_tensor = buffer_tensor.view(shape) + return buffer_tensor + + def _new_bucket( + self, + bucket_params: List[torch.nn.Parameter], + start_index: int, + end_index: int, + numel_unpadded: int, + bucket_id: int, + ) -> _ParamAndGradBucket: + """ + Helper function that creates a new bucket. Also updates param->bucket mapping. + """ + + # Assert that indices are correctly padded (if needed), and that bucket + # position is same as originally computed. + if self.ddp_config.use_distributed_optimizer: + assert start_index % self.data_parallel_world_size == 0 + assert end_index % self.data_parallel_world_size == 0 + assert (start_index, end_index) == self.bucket_indices[bucket_id] + + # Get appropriate view into global _ParamAndGradBuffer. + bucketed_param_data = None + if self.param_data is not None: + bucketed_param_data = self._get( + torch.Size([end_index - start_index]), start_index, buffer_type=BufferType.PARAM + ) + bucketed_grad_data = self._get( + torch.Size([end_index - start_index]), start_index, buffer_type=BufferType.GRAD + ) + bucket = _ParamAndGradBucket( + params=bucket_params, + param_data=bucketed_param_data, + grad_data=bucketed_grad_data, + offset=start_index, + numel_unpadded=numel_unpadded, + gradient_scaling_factor=self.gradient_scaling_factor, + bucket_id=bucket_id, + ) + for bucket_param in bucket_params: + assert bucket_param not in self.param_to_bucket + self.param_to_bucket[bucket_param] = bucket + + return bucket + + def reset(self): + """ + Zero out the underlying grad_buffer. + """ + self.grad_data.zero_() + + +def partition_buckets( + buffers: List[_ParamAndGradBuffer], force_single_bucket_group: bool = False +) -> List[_ParamAndGradBucketGroup]: + """ + Automatically regroup the buckets of input buffers and return a list of bucket groups. + + In some scenarios, we need to put buckets from different buffers into a group so that their + communication can be aggregated. + + For example, when there are both fp8 weights and bf16 biases in the model and virtual + pipeline parallelism is enabled, each model chunk will have an fp8 bucket and a bf16 bucket, + which doubles the number of communication kernels, and because of the use of + CUDA_DEVICE_MAX_CONNECTIONS=1, having multiple back-to-back communications will prevent the + overlap of communication kernels with computation kernels. + + The grouping strategy is: + 1. If force_single_bucket_group is True, put all buckets across all buffers into a single + bucket group. + 2. If force_single_bucket_group is False, when there is no fp8 buffer in the input buffers, + let each bucket group have only one bucket. + 3. If force_single_bucket_group is False, when using fp8 params, merge all non-fp8 buckets + into the last fp8 bucket group. + - Since the non-fp8 parameters (typically the biases of various layers) are relatively + small, they are likely to be grouped into a single non-fp8 bucket. + - The fp8 buckets start from the end of the model, i.e., the first bucket corresponds to + the end of the model, while the last bucket corresponds to the beginning. + - If we combine the non-fp8 bucket with the first fp8 bucket, we cannot initiate the + reduce-scatter to synchronize gradients after the backward pass at the end of the model + has completed. This is because we need to wait for the non-fp8 params from the beginning + layers to obtain their gradients. + - Combining the non-fp8 bucket with the last fp8 bucket can help avoid this issue. + + Args: + buffers (list): list of input buffers. + single_bucket_group_per_buffer (bool, optional): force group all buckets in each buffer + into a single bucket group. + """ + + if len(buffers) == 0: + return [] + + dtype_to_buffer_map = {} + for buffer in buffers: + dtype = buffer.param_dtype + # Make sure that the param_dtype of any two buffers is different. + assert dtype not in dtype_to_buffer_map + dtype_to_buffer_map[dtype] = buffer + + # Case 1: Put all buckets into a single bucket group if force_single_bucket_group is True. + if force_single_bucket_group: + buckets = [] + ddp_config = buffers[0].ddp_config + data_parallel_group = buffers[0].data_parallel_group + data_parallel_world_size = buffers[0].data_parallel_world_size + for buffer in buffers: + assert ddp_config == buffer.ddp_config + assert data_parallel_group == buffer.data_parallel_group + assert data_parallel_world_size == buffer.data_parallel_world_size + buckets.extend(buffer.buckets) + + bucket_group = _ParamAndGradBucketGroup( + buckets, ddp_config, data_parallel_group, data_parallel_world_size + ) + return [bucket_group] + + if torch.uint8 not in dtype_to_buffer_map: + # Case 2: When there is no fp8 buffer in the input buffers, let each bucket group have + # only one bucket. + bucket_groups = [] + for buffer in buffers: + for bucket in buffer.buckets: + bucket_groups.append( + _ParamAndGradBucketGroup( + [bucket], + buffer.ddp_config, + buffer.data_parallel_group, + buffer.data_parallel_world_size, + ) + ) + return bucket_groups + else: + # Case 3: When using fp8 params, merge all non-fp8 buckets into the last fp8 bucket group. + non_fp8_buckets = [] + for buffer in buffers: + if buffer.param_dtype != torch.uint8: + for bucket in buffer.buckets: + non_fp8_buckets.append(bucket) + + bucket_groups = [] + fp8_buffer = dtype_to_buffer_map[torch.uint8] + for bucket in fp8_buffer.buckets: + if len(bucket_groups) == len(fp8_buffer.buckets) - 1: + # The last bucket group. + group_buckets = [bucket] + non_fp8_buckets + else: + # The first N-1 bucket groups. + group_buckets = [bucket] + bucket_groups.append( + _ParamAndGradBucketGroup( + group_buckets, + buffer.ddp_config, + buffer.data_parallel_group, + buffer.data_parallel_world_size, + ) + ) + return bucket_groups diff --git a/megatron/core/distributed/torch_fully_sharded_data_parallel.py b/megatron/core/distributed/torch_fully_sharded_data_parallel.py index 6d2e84e..40a840e 100644 --- a/megatron/core/distributed/torch_fully_sharded_data_parallel.py +++ b/megatron/core/distributed/torch_fully_sharded_data_parallel.py @@ -1,115 +1,123 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from typing import List - -import torch - -try: - from torch.distributed import DeviceMesh - from torch.distributed._composable.fsdp import fully_shard - - HAVE_FSDP = True -except ImportError: - HAVE_FSDP = False - -from .. import parallel_state, tensor_parallel -from ..models.common.embeddings.language_model_embedding import LanguageModelEmbedding -from ..models.common.embeddings.rotary_pos_embedding import RotaryEmbedding -from ..transformer.transformer_config import TransformerConfig -from ..transformer.transformer_layer import TransformerLayer -from .data_parallel_base import _BaseDataParallel - - -class TorchFullyShardedDataParallel(_BaseDataParallel): - """ - Enables fully sharded data parallelism by wrapping the given model with - the PyTorch FSDP2 API: - https://github.com/pytorch/torchtitan/blob/main/docs/fsdp.md - To utilize this class, PyTorch version >= 2.4.0 is required. - - Args: - config: Transformer config object. - module: Underlying model. - sub_modules_to_wrap: List of sub_modules to shard with FSDP. - Parameters within each sub_module will be all-gathered just-in-time. - The default list includes the following submodules derived from the - GPT model architecture: - TransformerLayer (all Transformer layers) - LanguageModelEmbedding (initial embedding layer) - RotaryEmbedding (initial RoPE layer) - tensor_parallel.ColumnParallelLinear (final output layer) - """ - - def __init__( - self, - config: TransformerConfig, - module: torch.nn.Module, - sub_modules_to_wrap: List[torch.nn.Module] = [ - TransformerLayer, - LanguageModelEmbedding, - RotaryEmbedding, - tensor_parallel.ColumnParallelLinear, - ], - **kwargs - ): - - assert ( - HAVE_FSDP - ), 'TorchFullyShardedDataParallel requires PyTorch >= 2.4.0 with FSDP 2 support.' - - super().__init__(config=config, module=module) - self.data_parallel_group = parallel_state.get_data_parallel_group( - with_context_parallel=True - ) - - mesh = DeviceMesh.from_group(self.data_parallel_group, "cuda") - - kwargs = {"mesh": mesh} - - def save_custom_attrs(module): - custom_attrs = {} - for name, param in module.named_parameters(): - attrs = vars(param) - custom_attrs[name] = {k: v for k, v in attrs.items()} - return custom_attrs - - def restore_custom_attrs(module, custom_attrs): - for name, param in module.named_parameters(): - if name in custom_attrs: - for attr_name, attr_value in custom_attrs[name].items(): - setattr(param, attr_name, attr_value) - - # Save the custom attributes on Parameters before FSDP overwrites them. - # See https://github.com/pytorch/pytorch/issues/136929. - attrs = save_custom_attrs(self.module) - - prev_module = None - for sub_module in self.module.modules(): - # Wrap individual submodules to fetch parameters just-in-time rather than - # conservatively fetching all parameters at the start of each iteration. - # See https://github.com/pytorch/pytorch/issues/114299. - if any( - isinstance(sub_module, sub_module_to_wrap) - for sub_module_to_wrap in sub_modules_to_wrap - ): - fully_shard(sub_module, **kwargs) - - # Explicitly set the FSDP backward prefetch schedule to prevent activation - # recomputation from disrupting the automatically generated default schedule. - if config.recompute_granularity is not None: - sub_module.set_modules_to_backward_prefetch( - [prev_module] if prev_module else [] - ) - prev_module = sub_module - - # Wrap the root module as required by the FSDP API. - # See https://github.com/pytorch/pytorch/issues/114299. - fully_shard(self.module, **kwargs) - - restore_custom_attrs(self.module, attrs) - - def load_state_dict(self, state_dict, strict=True): - """ - No-op because tensors are already loaded in-place by - `_load_base_checkpoint` with FSDP2.""" - pass +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +from typing import List + +import torch + +try: + from torch.distributed import DeviceMesh + from torch.distributed._composable.fsdp import fully_shard + + HAVE_FSDP = True +except ImportError: + HAVE_FSDP = False + +from megatron.core.utils import is_float8tensor + +from .. import parallel_state, tensor_parallel +from ..models.common.embeddings.language_model_embedding import LanguageModelEmbedding +from ..models.common.embeddings.rotary_pos_embedding import RotaryEmbedding +from ..transformer.transformer_config import TransformerConfig +from ..transformer.transformer_layer import TransformerLayer +from .data_parallel_base import _BaseDataParallel +from .distributed_data_parallel_config import DistributedDataParallelConfig + + +class TorchFullyShardedDataParallel(_BaseDataParallel): + """ + Enables fully sharded data parallelism by wrapping the given model with + the PyTorch FSDP2 API: + https://github.com/pytorch/torchtitan/blob/main/docs/fsdp.md + To utilize this class, PyTorch version >= 2.4.0 is required. + + Args: + config: Transformer config object. + ddp_config: DistributedDataParallel config object. + module: Underlying model. + sub_modules_to_wrap: List of sub_modules to shard with FSDP. + Parameters within each sub_module will be all-gathered just-in-time. + The default list includes the following submodules derived from the + GPT model architecture: + TransformerLayer (all Transformer layers) + LanguageModelEmbedding (initial embedding layer) + RotaryEmbedding (initial RoPE layer) + tensor_parallel.ColumnParallelLinear (final output layer) + """ + + def __init__( + self, + config: TransformerConfig, + ddp_config: DistributedDataParallelConfig, + module: torch.nn.Module, + sub_modules_to_wrap: List[torch.nn.Module] = [ + TransformerLayer, + LanguageModelEmbedding, + RotaryEmbedding, + tensor_parallel.ColumnParallelLinear, + ], + ): + + assert ( + HAVE_FSDP + ), 'TorchFullyShardedDataParallel requires PyTorch >= 2.4.0 with FSDP 2 support.' + + super().__init__(config=config, module=module) + self.data_parallel_group = parallel_state.get_data_parallel_group( + with_context_parallel=True + ) + + kwargs = {"mesh": DeviceMesh.from_group(self.data_parallel_group, "cuda")} + + def save_custom_attrs(module): + custom_attrs = {} + for name, param in module.named_parameters(): + attrs = vars(param) + if is_float8tensor(param): + # disable fp8 transpose cache and perform transposing fp8 weights + # at each micro-batch because torch-FSDP doesn't recognize the + # micro-batch id, thus removing unnecessary memory stores + attrs['_fp8_attrs']['transpose_invalid'] = False + del attrs['_fp8_attrs']['transpose'] + custom_attrs[name] = {k: v for k, v in attrs.items()} + return custom_attrs + + def restore_custom_attrs(module, custom_attrs): + for name, param in module.named_parameters(): + if name in custom_attrs: + for attr_name, attr_value in custom_attrs[name].items(): + setattr(param, attr_name, attr_value) + + # Save the custom attributes on Parameters before FSDP overwrites them. + # See https://github.com/pytorch/pytorch/issues/136929. + attrs = save_custom_attrs(self.module) + + prev_module = None + for sub_module in self.module.modules(): + # Wrap individual submodules to fetch parameters just-in-time rather than + # conservatively fetching all parameters at the start of each iteration. + # See https://github.com/pytorch/pytorch/issues/114299. + if any( + isinstance(sub_module, sub_module_to_wrap) + for sub_module_to_wrap in sub_modules_to_wrap + ): + fully_shard(sub_module, **kwargs) + + # Explicitly set the FSDP backward prefetch schedule to prevent activation + # recomputation from disrupting the automatically generated default schedule. + if config.recompute_granularity is not None: + sub_module.set_modules_to_backward_prefetch( + [prev_module] if prev_module else [] + ) + prev_module = sub_module + + # Wrap the root module as required by the FSDP API. + # See https://github.com/pytorch/pytorch/issues/114299. + fully_shard(self.module, **kwargs) + + restore_custom_attrs(self.module, attrs) + + def load_state_dict(self, state_dict, strict=True): + """ + No-op because tensors are already loaded in-place by + `_load_base_checkpoint` with FSDP2.""" + pass diff --git a/megatron/core/export/trtllm/model_to_trllm_mapping/default_conversion_dict.py b/megatron/core/export/trtllm/model_to_trllm_mapping/default_conversion_dict.py index 7a1401f..d3cd7ff 100644 --- a/megatron/core/export/trtllm/model_to_trllm_mapping/default_conversion_dict.py +++ b/megatron/core/export/trtllm/model_to_trllm_mapping/default_conversion_dict.py @@ -21,6 +21,10 @@ DEFAULT_CONVERSION_DICT = { 'decoder.layers.mlp.linear_fc1.bias': TRTLLMLayers.mlp_fc_bias, 'decoder.layers.mlp.linear_fc2.weight': TRTLLMLayers.mlp_projection_weight, 'decoder.layers.mlp.linear_fc2.bias': TRTLLMLayers.mlp_projection_bias, + # EXPERTS + 'decoder.layers.mlp.experts.experts.linear_fc1.weight': TRTLLMLayers.mlp_fc_weight_mixture_of_experts, + 'decoder.layers.mlp.experts.experts.linear_fc2.weight': TRTLLMLayers.mlp_projection_weight_mixture_of_experts, + 'decoder.layers.mlp.router.weight': TRTLLMLayers.mlp_router_weight, # FINAL LAYER NORM 'decoder.final_layernorm.weight': TRTLLMLayers.final_layernorm_weight, 'decoder.final_layernorm.bias': TRTLLMLayers.final_layernorm_bias, diff --git a/megatron/core/extensions/transformer_engine.py b/megatron/core/extensions/transformer_engine.py index a89e272..29914b8 100644 --- a/megatron/core/extensions/transformer_engine.py +++ b/megatron/core/extensions/transformer_engine.py @@ -1,1273 +1,1359 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -import dataclasses -import io -import os -import pickle -import warnings -from typing import Callable - -import torch -import transformer_engine as te -from packaging.version import Version as PkgVersion -from torch import Tensor -from torch.nn.parameter import Parameter - -from megatron.core.dist_checkpointing.utils import replace_prefix_for_sharding -from megatron.core.model_parallel_config import ModelParallelConfig -from megatron.core.packed_seq_params import PackedSeqParams -from megatron.core.parallel_state import ( - get_context_parallel_global_ranks, - get_context_parallel_group, - get_expert_data_parallel_rank, - get_expert_model_parallel_rank, - get_expert_model_parallel_world_size, - get_expert_tensor_parallel_group, - get_expert_tensor_parallel_rank, - get_expert_tensor_parallel_world_size, - get_hierarchical_context_parallel_groups, - get_tensor_model_parallel_group, - get_tensor_model_parallel_rank, - get_tensor_model_parallel_world_size, -) -from megatron.core.tensor_parallel import get_cuda_rng_tracker, get_expert_parallel_rng_tracker_name -from megatron.core.tensor_parallel.layers import ( - _initialize_affine_weight_cpu, - set_tensor_model_parallel_attributes, -) -from megatron.core.tensor_parallel.utils import divide -from megatron.core.transformer.enums import AttnMaskType -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.transformer.utils import make_sharded_tensors_for_checkpoint -from megatron.core.utils import get_te_version, is_te_min_version - - -def _get_extra_te_kwargs(config: TransformerConfig): - extra_transformer_engine_kwargs = {"params_dtype": config.params_dtype} - - if is_te_min_version("0.12.0"): - if config.use_cpu_initialization: - extra_transformer_engine_kwargs["device"] = 'cpu' - else: - extra_transformer_engine_kwargs["device"] = torch.cuda.current_device() - return extra_transformer_engine_kwargs - - -def condition_init_method(config, init_method): - """Condition TE init_method on config.perform_initialization.""" - return init_method if config.perform_initialization else (lambda w: None) - - -class TENorm: - """ - A conditional wrapper to initialize an instance of Transformer-Engine's - `LayerNorm` or `RMSNorm` based on input - """ - - # TODO should we ditch normalization config and just use spec to choose LayerNorm vs RMSNorm? - def __new__(cls, config: TransformerConfig, hidden_size: int, eps: float = 1e-5): - if config.normalization == "LayerNorm": - instance = te.pytorch.LayerNorm( - hidden_size=hidden_size, - eps=eps, - sequence_parallel=config.sequence_parallel, - zero_centered_gamma=config.layernorm_zero_centered_gamma, - **_get_extra_te_kwargs(config), - ) - elif config.normalization == "RMSNorm": - assert hasattr( - te.pytorch, "RMSNorm" - ), "Transformer-Engine >= v0.11 required to use this feature" - instance = te.pytorch.RMSNorm( - hidden_size=hidden_size, - eps=eps, - sequence_parallel=config.sequence_parallel, - zero_centered_gamma=config.layernorm_zero_centered_gamma, - **_get_extra_te_kwargs(config), - ) - else: - raise Exception('Only LayerNorm and RMSNorm are curently supported') - - return instance - - -class TELinear(te.pytorch.Linear): - """ - Wrapper for the Transformer-Engine's `Linear` layer. - - Note that if Megatron's parallel_state has not been initialized - yet, the tp_group passed to TE will be None and must be set later - via set_tensor_parallel_group(). - """ - - def __init__( - self, - input_size: int, - output_size: int, - *, - parallel_mode: str, - config: ModelParallelConfig, - init_method: Callable, - bias: bool, - skip_bias_add: bool, - skip_weight_param_allocation: bool, - tp_comm_buffer_name: str = None, - is_expert: bool = False, - ): - self.config = config - - # TE returns a zero length Tensor when bias=False and - # return_bias=True, but we prefer None. So in that case we - # tell TE to not return the bias, and return None - # ourselves. This way our forward always returns two values - # and we don't have to deal with the zero length Tensor. - self.te_return_bias = skip_bias_add and bias - self.is_first_microbatch = True - self.disable_parameter_transpose_cache = self.config.disable_parameter_transpose_cache - if skip_weight_param_allocation: - raise ValueError( - 'Transformer Engine linear layers do not support skip_weight_param_allocation' - ) - - extra_kwargs = _get_extra_te_kwargs(config) - - if is_te_min_version("0.8.0"): - if self.config.tp_comm_overlap: - if is_te_min_version("1.5.0"): - # Use old overlap flags if they were supplied instead - extra_kwargs["ub_overlap_ag"] = ( - self.config.tp_comm_overlap_ag - if hasattr(self.config, "tp_comm_overlap_ag") - else self.config.tp_comm_split_ag or self.config.tp_comm_atomic_ag - ) - extra_kwargs["ub_overlap_rs"] = ( - self.config.tp_comm_overlap_rs - if hasattr(self.config, "tp_comm_overlap_rs") - else self.config.tp_comm_split_rs or self.config.tp_comm_atomic_rs - ) - # Disable ub overlap for experts. - if is_expert: - extra_kwargs["ub_overlap_ag"] = False - extra_kwargs["ub_overlap_rs"] = False - else: - extra_kwargs["ub_split_ag"] = self.config.tp_comm_split_ag - extra_kwargs["ub_atomic_gemm_ag"] = self.config.tp_comm_atomic_ag - extra_kwargs["ub_split_rs"] = self.config.tp_comm_split_rs - extra_kwargs["ub_atomic_gemm_rs"] = self.config.tp_comm_atomic_rs - # Disable ub overlap for experts. - if is_expert: - extra_kwargs["ub_split_ag"] = False - extra_kwargs["ub_atomic_gemm_ag"] = False - extra_kwargs["ub_split_rs"] = False - extra_kwargs["ub_atomic_gemm_rs"] = False - if is_te_min_version("1.0.0", check_equality=False): - assert ( - tp_comm_buffer_name is not None - ), "Buffer name should be set to configure communication overlap settings" - extra_kwargs["ub_name"] = tp_comm_buffer_name - - self.expert_parallel = self.config.expert_model_parallel_size > 1 - if is_expert: - rng_tracker_name = get_expert_parallel_rng_tracker_name() - else: - rng_tracker_name = None - if is_te_min_version("1.7.0"): - extra_kwargs["rng_tracker_name"] = rng_tracker_name - - # Disable communications in TE when using TP or EP by making TE agnostic of model parallel. - if is_expert: - tp_group = get_expert_tensor_parallel_group(check_initialized=False) - tp_size = get_expert_tensor_parallel_world_size() - else: - tp_group = get_tensor_model_parallel_group(check_initialized=False) - tp_size = get_tensor_model_parallel_world_size() - explicit_expert_comm = is_expert and (tp_size > 1 or self.expert_parallel) - - if explicit_expert_comm: - if parallel_mode == "column": - output_size = divide(output_size, tp_size) - elif parallel_mode == "row": - input_size = divide(input_size, tp_size) - parallel_mode = None - tp_size = 1 - tp_group = None - - super().__init__( - in_features=input_size, - out_features=output_size, - sequence_parallel=self.config.sequence_parallel, - fuse_wgrad_accumulation=self.config.gradient_accumulation_fusion, - tp_group=tp_group, - tp_size=tp_size, - get_rng_state_tracker=( - get_cuda_rng_tracker if get_cuda_rng_tracker().is_initialized() else None - ), - init_method=condition_init_method(config, init_method), - bias=bias, - return_bias=self.te_return_bias, - parallel_mode=parallel_mode, - **extra_kwargs, - ) - - for param in self.parameters(): - setattr(param, 'allreduce', not (is_expert and self.expert_parallel)) - - def forward(self, x): - """Forward.""" - _is_first_microbatch = ( - None if self.disable_parameter_transpose_cache else self.is_first_microbatch - ) - out = super().forward(x, is_first_microbatch=_is_first_microbatch) - self.is_first_microbatch = False - - # TE only returns a tuple when return_bias is True, otherwise - # it returns a single Tensor, we always want to return two - # values regardless of the arguments. - if self.te_return_bias: - return out - return out, None - - -class TELayerNormColumnParallelLinear(te.pytorch.LayerNormLinear): - """ - Wrapper for the Transformer-Engine's `LayerNormLinear` layer that combines - layernorm and linear layers - """ - - def __init__( - self, - input_size: int, - output_size: int, - *, - config: TransformerConfig, - init_method: Callable, - gather_output: bool, - bias: bool, - skip_bias_add: bool, - is_expert: bool, - skip_weight_param_allocation: bool = False, - tp_comm_buffer_name: str = None, - ): - self.config = config - - if gather_output: - raise ValueError('Transformer Engine linear layers do not support gather_output = True') - - if is_expert: - raise ValueError('Transformer Engine linear layers do not yet support MoE') - - if skip_weight_param_allocation: - raise ValueError( - 'Transformer Engine linear layers do not support skip_weight_param_allocation' - ) - - # TE returns a zero length Tensor when bias=False and - # return_bias=True, but we prefer None. So in that case we - # tell TE to not return the bias, and return None - # ourselves. This way our forward always returns two values - # and we don't have to deal with the zero length Tensor. - self.te_return_bias = skip_bias_add and bias - self.is_first_microbatch = True - self.disable_parameter_transpose_cache = self.config.disable_parameter_transpose_cache - extra_kwargs = _get_extra_te_kwargs(config) - - # Only Transformer-Engine version >= 0.11.0 supports `RMSNorm` - if is_te_min_version("0.11.0"): - extra_kwargs["normalization"] = self.config.normalization - elif self.config.normalization != "LayerNorm": - te_version = get_te_version() - raise ValueError( - f"Transformer Engine v{te_version} does not support {self.config.normalization}." - ) - - if is_te_min_version("0.8.0"): - if self.config.tp_comm_overlap: - extra_kwargs["ub_bulk_wgrad"] = self.config.tp_comm_bulk_wgrad - extra_kwargs["ub_bulk_dgrad"] = self.config.tp_comm_bulk_dgrad - if is_te_min_version("1.5.0", check_equality=False): - # Use old overlap flags if they were supplied instead - extra_kwargs["ub_overlap_ag"] = ( - self.config.tp_comm_overlap_ag - if hasattr(self.config, "tp_comm_overlap_ag") - else self.config.tp_comm_split_ag or self.config.tp_comm_atomic_ag - ) - if is_te_min_version("1.6.0.dev0", check_equality=False): - extra_kwargs["ub_overlap_rs_dgrad"] = ( - self.config.tp_comm_overlap_rs_dgrad - if hasattr(self.config, "tp_comm_overlap_rs_dgrad") - else False - ) - if tp_comm_buffer_name == 'qkv' and self.config.tp_comm_overlap_disable_qkv: - extra_kwargs["ub_overlap_ag"] = False - extra_kwargs["ub_overlap_rs_dgrad"] = False - - if tp_comm_buffer_name == 'fc1' and self.config.tp_comm_overlap_disable_fc1: - extra_kwargs["ub_overlap_ag"] = False - extra_kwargs["ub_overlap_rs_dgrad"] = False - else: - extra_kwargs["ub_atomic_gemm_ag"] = self.config.tp_comm_atomic_ag - extra_kwargs["ub_split_ag"] = self.config.tp_comm_split_ag - if is_te_min_version("1.0.0", check_equality=False): - assert ( - tp_comm_buffer_name is not None - ), "Buffer name should be set to configure communication overlap settings" - extra_kwargs["ub_name"] = tp_comm_buffer_name - - super().__init__( - in_features=input_size, - out_features=output_size, - eps=self.config.layernorm_epsilon, - sequence_parallel=self.config.sequence_parallel, - fuse_wgrad_accumulation=self.config.gradient_accumulation_fusion, - tp_group=get_tensor_model_parallel_group(check_initialized=False), - tp_size=self.config.tensor_model_parallel_size, - get_rng_state_tracker=( - get_cuda_rng_tracker if get_cuda_rng_tracker().is_initialized() else None - ), - init_method=( - condition_init_method(config, init_method) - if not config.use_cpu_initialization - else lambda w: None - ), - bias=bias, - return_bias=self.te_return_bias, - parallel_mode="column", - return_layernorm_output=False, - zero_centered_gamma=self.config.layernorm_zero_centered_gamma, - **extra_kwargs, - ) - - world_size = get_tensor_model_parallel_world_size() - rank = get_tensor_model_parallel_rank() - - if config.use_cpu_initialization: - output_size_per_partition = divide(output_size, world_size) - _ = _initialize_affine_weight_cpu( - self.weight, - output_size, - input_size, - output_size_per_partition, - 0, - init_method=condition_init_method(config, init_method), - stride=1, - return_master_weight=False, - rank=rank, - world_size=world_size, - skip_set_tensor_parallel_attributes=True, - ) - if bias: - self.bias = Parameter( - torch.empty(output_size_per_partition, dtype=config.params_dtype) - ) - set_tensor_model_parallel_attributes(self.bias, True, 0, 1) - with torch.no_grad(): - self.bias.zero_() - setattr(self.bias, 'allreduce', True) - - def forward(self, x): - """Forward.""" - _is_first_microbatch = ( - None if self.disable_parameter_transpose_cache else self.is_first_microbatch - ) - out = super().forward(x, is_first_microbatch=_is_first_microbatch) - self.is_first_microbatch = False - - # TE only returns a tuple when return_bias is True, otherwise - # it returns a single Tensor, we always want to return two - # values regardless of the arguments. - if self.te_return_bias: - return out - return out, None - - def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): - """Sharding along axis 0, bias sharded""" - state_dict = self.state_dict(prefix='', keep_vars=True) - return make_sharded_tensors_for_checkpoint( - state_dict, prefix, {'weight': 0, 'bias': 0}, sharded_offsets - ) - - -class TEColumnParallelLinear(TELinear): - """ - Wrapper for the Transformer-Engine's `Linear` layer but specialized similar - to megatron's `ColumnParallelLinear` layer. - """ - - def __init__( - self, - input_size: int, - output_size: int, - *, - config: ModelParallelConfig, - init_method: Callable, - gather_output: bool, - bias: bool, - skip_bias_add: bool, - is_expert: bool, - skip_weight_param_allocation: bool = False, - tp_comm_buffer_name: str = None, - ): - if gather_output: - raise ValueError('Transformer Engine linear layers do not support gather_output = True') - - super().__init__( - input_size=input_size, - output_size=output_size, - parallel_mode="column", - config=config, - init_method=( - condition_init_method(config, init_method) - if not config.use_cpu_initialization - else lambda w: None - ), - bias=bias, - skip_bias_add=skip_bias_add, - is_expert=is_expert, - skip_weight_param_allocation=skip_weight_param_allocation, - tp_comm_buffer_name=tp_comm_buffer_name, - ) - - if config.use_cpu_initialization: - if is_expert: - world_size = get_expert_tensor_parallel_world_size() - rank = get_expert_tensor_parallel_rank() - else: - world_size = get_tensor_model_parallel_world_size() - rank = get_tensor_model_parallel_rank() - output_size_per_partition = divide(output_size, world_size) - _ = _initialize_affine_weight_cpu( - self.weight, - output_size, - input_size, - output_size_per_partition, - 0, - init_method=condition_init_method(config, init_method), - stride=1, - return_master_weight=False, - rank=rank, - world_size=world_size, - skip_set_tensor_parallel_attributes=True, - ) - if bias: - self.bias = Parameter( - torch.empty(output_size_per_partition, dtype=config.params_dtype) - ) - set_tensor_model_parallel_attributes(self.bias, True, 0, 1) - with torch.no_grad(): - self.bias.zero_() - setattr(self.bias, 'allreduce', True) - - def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): - """Sharding along axis 0, bias sharded""" - state_dict = self.state_dict(prefix='', keep_vars=True) - return make_sharded_tensors_for_checkpoint( - state_dict, prefix, {'weight': 0, 'bias': 0}, sharded_offsets - ) - - -class TERowParallelLinear(TELinear): - """ - Wrapper for the Transformer-Engine's `Linear` layer but specialized similar - to megatron's `RowParallelLinear` layer. - """ - - def __init__( - self, - input_size: int, - output_size: int, - *, - config: ModelParallelConfig, - init_method: Callable, - bias: bool, - input_is_parallel: bool, - skip_bias_add: bool, - is_expert: bool, - tp_comm_buffer_name: str = None, - ): - if not input_is_parallel: - raise ValueError( - "Transformer Engine linear layers do not support input_is_parallel = False" - ) - - super().__init__( - input_size=input_size, - output_size=output_size, - parallel_mode="row", - config=config, - init_method=( - condition_init_method(config, init_method) - if not config.use_cpu_initialization - else lambda w: None - ), - bias=bias, - skip_bias_add=skip_bias_add, - skip_weight_param_allocation=False, # We don't currently use this for row parallel layers # pylint: disable=line-too-long - is_expert=is_expert, - tp_comm_buffer_name=tp_comm_buffer_name, - ) - if config.use_cpu_initialization: - if is_expert: - world_size = get_expert_tensor_parallel_world_size() - rank = get_expert_tensor_parallel_rank() - else: - world_size = get_tensor_model_parallel_world_size() - rank = get_tensor_model_parallel_rank() - input_size_per_partition = divide(input_size, world_size) - self.master_weight = _initialize_affine_weight_cpu( - self.weight, - output_size, - input_size, - input_size_per_partition, - 1, - init_method=condition_init_method(config, init_method), - stride=1, - return_master_weight=False, - params_dtype=config.params_dtype, - rank=rank, - world_size=world_size, - skip_set_tensor_parallel_attributes=True, - ) - if bias: - self.bias = Parameter(torch.empty(output_size, dtype=config.params_dtype)) - # Always initialize bias to zero. - with torch.no_grad(): - self.bias.zero_() - setattr(self.bias, 'allreduce', True) - setattr(self.bias, 'sequence_parallel', config.sequence_parallel) - - def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): - """Sharding along axis 1, bias not sharded""" - state_dict = self.state_dict(prefix='', keep_vars=True) - return make_sharded_tensors_for_checkpoint( - state_dict, prefix, {'weight': 1}, sharded_offsets - ) - - -class TEDotProductAttention(te.pytorch.DotProductAttention): - """ - Wrapper for the Transformer-Engine's `DotProductAttention` layer that also - has "flash attention" enabled. - - Note that if Megatron's parallel_state has not been initialized yet, the - tp_group and cp_group passed to TE will be None and must be set later - via set_tensor_parallel_group() and set_context_parallel_group(). - """ - - cp_stream: torch.cuda.Stream = None - - def __init__( - self, - config: TransformerConfig, - layer_number: int, - attn_mask_type: AttnMaskType, - attention_type: str, - attention_dropout: float = None, - softmax_scale: float = None, - k_channels: int = None, - v_channels: int = None, - cp_comm_type: str = "p2p", - ): - self.config = config - self.te_forward_mask_type = False - self.qkv_format: str = 'sbhd' - - if self.config.apply_query_key_layer_scaling != bool( - int(os.getenv('NVTE_APPLY_QK_LAYER_SCALING', '0')) - ): - raise ValueError( - f"apply_query_key_layer_scaling is {self.config.apply_query_key_layer_scaling} " - f"but environment variable NVTE_APPLY_QK_LAYER_SCALING is " - f"{os.getenv('NVTE_APPLY_QK_LAYER_SCALING')}. Transformer Engine does not support " - f"setting query key layer scaling via argument, so these two must match." - ) - - extra_kwargs = {} - if is_te_min_version("0.11.0"): - extra_kwargs["num_gqa_groups"] = self.config.num_query_groups - elif self.config.num_query_groups != self.config.num_attention_heads: - raise ValueError( - f"Transformer Engine v{get_te_version()} does not support Grouped Query Attention, " - f"use a newer version of Transformer Engine. " - f"(num_query_groups ({self.config.num_query_groups}) != " - f"num_attention_heads ({self.config.num_attention_heads}))" - ) - - if is_te_min_version("0.10.0"): - extra_kwargs["attention_type"] = attention_type - # older version don't need attention_type - - if is_te_min_version("0.12.0", check_equality=False): - self.te_forward_mask_type = True - - # This check is important as CP config can be disabled while having a valid CP group - # Example - Disabling CP for encoder while a valid CP group exists for decoder - if self.config.context_parallel_size > 1: - assert is_te_min_version( - "1.0.0" - ), "Only Transformer-Engine version >= 1.0.0 supports context parallelism!" - if getattr(TEDotProductAttention, "cp_stream") is None: - TEDotProductAttention.cp_stream = torch.cuda.Stream() - extra_kwargs["cp_group"] = get_context_parallel_group(check_initialized=False) - extra_kwargs["cp_global_ranks"] = get_context_parallel_global_ranks( - check_initialized=False - ) - extra_kwargs["cp_stream"] = TEDotProductAttention.cp_stream - if is_te_min_version("1.10.0"): - if cp_comm_type is None: - extra_kwargs["cp_comm_type"] = "p2p" - elif cp_comm_type == "a2a+p2p": - assert is_te_min_version("1.12.0"), ( - f"Transformer-Engine v{get_te_version()} must be >= 1.12.0 to support" - "hierarchical cp commucation." - ) - extra_kwargs["cp_comm_type"] = "a2a+p2p" - extra_kwargs["cp_group"] = get_hierarchical_context_parallel_groups( - check_initialized=False - ) - else: - extra_kwargs["cp_comm_type"] = cp_comm_type - - if self.config.deterministic_mode: - if int(os.getenv("NVTE_ALLOW_NONDETERMINISTIC_ALGO", "1")) != 0: - raise RuntimeError( - "deterministic_mode is on and we are using DotProductAttention from " - "Transformer Engine, but NVTE_ALLOW_NONDETERMINISTIC_ALGO is not 0. " - f"Currently set to: {os.getenv('NVTE_ALLOW_NONDETERMINISTIC_ALGO', 'not set')}." - ) - - if config.window_size is not None: - # Check version - assert is_te_min_version("1.2.0"), ( - f"Transformer-Engine v{get_te_version()} must be >= 1.2.0 to support" - "sliding window attention." - ) - extra_kwargs['window_size'] = config.window_size - - if is_te_min_version("1.10.0"): - # TE 1.10.0 introduces the ability to set the different k and v channels - kv_channels = ( - (k_channels, v_channels) - if k_channels is not None and v_channels is not None - else self.config.kv_channels - ) - extra_kwargs['softmax_scale'] = softmax_scale - else: - kv_channels = self.config.kv_channels - - self.kept_packed_seq_params = set( - field.name for field in dataclasses.fields(PackedSeqParams) - ) - if get_te_version() < PkgVersion("1.3.0"): - # TE 1.3.0 introduces precomputing max_seqlen to remove unnecessary kernels and D2H - # copies (#555) - # These two arguments did not exist prior to 1.3.0 - self.kept_packed_seq_params.discard("max_seqlen_q") - self.kept_packed_seq_params.discard("max_seqlen_kv") - - if get_te_version() < PkgVersion("1.10.0"): - # TE 1.8.0 introduces cu_seqlens_padded which is the cu_seqlens with paddings counted - # in each individual sequence in THD format dataset - # These two arguments did not exist prior to 1.8.0. Full support added in 1.10.0 (#1012) - self.kept_packed_seq_params.discard("cu_seqlens_q_padded") - self.kept_packed_seq_params.discard("cu_seqlens_kv_padded") - - super().__init__( - num_attention_heads=self.config.num_attention_heads, - kv_channels=kv_channels, - attention_dropout=( - self.config.attention_dropout if attention_dropout is None else attention_dropout - ), - attn_mask_type=attn_mask_type.name, - sequence_parallel=self.config.sequence_parallel, - tp_size=self.config.tensor_model_parallel_size, - get_rng_state_tracker=( - get_cuda_rng_tracker if get_cuda_rng_tracker().is_initialized() else None - ), - tp_group=get_tensor_model_parallel_group(check_initialized=False), - layer_number=layer_number, - **extra_kwargs, - ) - - def forward( - self, - query: Tensor, - key: Tensor, - value: Tensor, - attention_mask: Tensor, - attn_mask_type: AttnMaskType, - attention_bias: Tensor = None, - packed_seq_params: PackedSeqParams = None, - ): - """Forward.""" - packed_seq_kwargs = ( - {key: getattr(packed_seq_params, key) for key in self.kept_packed_seq_params} - if packed_seq_params is not None - else {} - ) - # overwrite self.qkv_format depending on self.config.apply_rope_fusion, which can be set - # after init - if self.config.apply_rope_fusion and is_te_min_version("0.13.0", check_equality=False): - self.qkv_format = 'bshd' - - qkv_format = packed_seq_kwargs.get('qkv_format', self.qkv_format) - - # WAR for peak memory usage. - # See https://gitlab-master.nvidia.com/ADLR/megatron-lm/-/merge_requests/2388 - if self.config.apply_rope_fusion and qkv_format == 'bshd': - query, key, value = [x.transpose(0, 1).contiguous() for x in (query, key, value)] - # In PyTorch, the following two tensors are in fact the same: - # Tensor with shape (1, S, H, D) and stride (S*H*D, H*D, D, 1) - # Tensor with shape (1, S, H, D) and stride (H*D, H*D, D, 1) - # Stride for a dimension that is 1 has no meaning, so tensors created two different ways - # can have same shape but different strides. - # We unify them to the first one to pass the stride check in TE - if value.shape == key.shape and value.shape[0] == 1 and value.stride() != key.stride(): - value = value.as_strided(value.shape, key.stride()) - - attention_bias_kwargs = {} - if attention_bias is not None: - assert is_te_min_version("1.2.0"), ( - f"Transformer-Engine v{get_te_version()} must be >= 1.2.0 to support" - "`attention_bias`." - ) - attention_bias_kwargs = dict( - core_attention_bias_type='post_scale_bias', core_attention_bias=attention_bias - ) - - if self.te_forward_mask_type: - if qkv_format == 'thd' and is_te_min_version("1.7.0"): - # thd format uses flash attention with cuDNN kernel which requires is_padding=True, - # so the only acceptable mask types are `padding_causal` and `padding`. These do not - # necessarily indicate there are padded tokens in the sequence. - if attn_mask_type == AttnMaskType.causal: - attn_mask_type = AttnMaskType.padding_causal - elif attn_mask_type == AttnMaskType.no_mask: - attn_mask_type = AttnMaskType.padding - core_attn_out = super().forward( - query, - key, - value, - attention_mask, - attn_mask_type=attn_mask_type.name, - **attention_bias_kwargs, - **packed_seq_kwargs, - ) - else: - core_attn_out = super().forward( - query, key, value, attention_mask, **attention_bias_kwargs, **packed_seq_kwargs - ) - - if self.config.apply_rope_fusion and qkv_format == 'bshd': - return core_attn_out.transpose(0, 1) - else: - return core_attn_out - - -if is_te_min_version("1.9.0.dev0"): - - class TEGroupedLinear(te.pytorch.BatchLinear if int(os.getenv("GROUPED_GEMM_BatchLinear", '0')) else te.pytorch.GroupedLinear): - """ - Wrapper for the Transformer-Engine's `GroupedLinear` layer. - - Note that if Megatron's parallel_state has not been initialized - yet, the tp_group passed to TE will be None and must be set later - via set_tensor_parallel_group(). - """ - - def __init__( - self, - num_gemms: int, - input_size: int, - output_size: int, - *, - parallel_mode: str, - config: ModelParallelConfig, - init_method: Callable, - bias: bool, - skip_bias_add: bool, - is_expert: bool = False, - tp_comm_buffer_name: str = None, - ): - self.config = config - - # TE returns a zero length Tensor when bias=False and - # return_bias=True, but we prefer None. So in that case we - # tell TE to not return the bias, and return None - # ourselves. This way our forward always returns two values - # and we don't have to deal with the zero length Tensor. - self.te_return_bias = skip_bias_add and bias - self.is_first_microbatch = True - self.disable_parameter_transpose_cache = self.config.disable_parameter_transpose_cache - - extra_kwargs = _get_extra_te_kwargs(config) - extra_kwargs["ub_name"] = tp_comm_buffer_name - - self.expert_parallel = self.config.expert_model_parallel_size > 1 - if is_expert: - extra_kwargs["rng_tracker_name"] = get_expert_parallel_rng_tracker_name() - - # The comms between TP and EP group is explicitly handled by MoE token dispatcher. - # So we disable comms by making TE agnostic of model parallel. - if is_expert: - tp_group = get_expert_tensor_parallel_group(check_initialized=False) - tp_size = get_expert_tensor_parallel_world_size() - else: - tp_group = get_tensor_model_parallel_group(check_initialized=False) - tp_size = get_tensor_model_parallel_world_size() - self.explicit_expert_comm = is_expert and (tp_size > 1 or self.expert_parallel) - - if self.explicit_expert_comm: - if parallel_mode == "column": - output_size = divide(output_size, tp_size) - elif parallel_mode == "row": - input_size = divide(input_size, tp_size) - parallel_mode = None - tp_size = 1 - tp_group = None - - super().__init__( - num_gemms=num_gemms, - in_features=input_size, - out_features=output_size, - sequence_parallel=self.config.sequence_parallel, - fuse_wgrad_accumulation=self.config.gradient_accumulation_fusion, - tp_group=tp_group, - tp_size=tp_size, - get_rng_state_tracker=( - get_cuda_rng_tracker if get_cuda_rng_tracker().is_initialized() else None - ), - init_method=condition_init_method(config, init_method), - bias=bias, - return_bias=self.te_return_bias, - parallel_mode=parallel_mode, - **extra_kwargs, - ) - - for param in self.parameters(): - setattr(param, 'allreduce', not (is_expert and self.expert_parallel)) - - def merge_extra_states( - self, - state_dict, - prefix, - local_metadata, - strict, - missing_keys, - unexpected_keys, - error_msgs, - ): - """ - Merge multiple "_extra_state" into one. - """ - self.init_fp8_metadata(num_gemms=self.num_gemms) - fp8_checkpoint = self.fp8_meta["fp8_checkpoint"] or self.fp8 or self.fp8_calibration - - try: - state_list = [ - state_dict.pop(f"{prefix}_extra_state{i}") for i in range(1, self.num_gemms) - ] - except KeyError: - # "_extra_state{i}" only exists for dist-ckpt. Return for torch native ckpt. - return - - if not fp8_checkpoint: - return - state_list = [state_dict.pop(f"{prefix}_extra_state")] + state_list - state_list = [self._decode_extra_state(state) for state in state_list] - extra_fp8_variables = state_list[0]['extra_fp8_variables'] - extra_fp8_variables['num_gemms'] = self.num_gemms - extra_state = { - "scale_fwd": torch.cat( - [state['scale_fwd'].view(-1, 1) for state in state_list], dim=1 - ).view(-1), - "scale_inv_fwd": torch.cat( - [state['scale_inv_fwd'].view(-1, 1) for state in state_list], dim=1 - ).view(-1), - "amax_history_fwd": torch.cat( - [state['amax_history_fwd'].view(-1, 1) for state in state_list], dim=1 - ).view(self.fp8_meta["recipe"].amax_history_len, -1), - "scale_bwd": torch.cat( - [state['scale_bwd'].view(-1, 1) for state in state_list], dim=1 - ).view(-1), - "scale_inv_bwd": torch.cat( - [state['scale_inv_bwd'].view(-1, 1) for state in state_list], dim=1 - ).view(-1), - "amax_history_bwd": torch.cat( - [state['amax_history_bwd'].view(-1, 1) for state in state_list], dim=1 - ).view(self.fp8_meta["recipe"].amax_history_len, -1), - "extra_fp8_variables": extra_fp8_variables, - } - state_dict[f"{prefix}_extra_state"] = self._encode_extra_state(extra_state) - - self._register_load_state_dict_pre_hook(merge_extra_states, with_module=True) - - def forward(self, x, m_splits): - """Forward.""" - _is_first_microbatch = ( - None if self.disable_parameter_transpose_cache else self.is_first_microbatch - ) - out = super().forward(x, m_splits, is_first_microbatch=_is_first_microbatch) - self.is_first_microbatch = False - - # TE only returns a tuple when return_bias is True, otherwise - # it returns a single Tensor, we always want to return two - # values regardless of the arguments. - if self.te_return_bias: - return out - return out, None - - def _encode_extra_state(self, state): - state_serialized = io.BytesIO() - torch.save(state, state_serialized) - return state_serialized - - def _decode_extra_state(self, state): - if isinstance(state, torch.Tensor): - return pickle.loads(state.detach().cpu().numpy().tobytes()) - elif isinstance(state, io.BytesIO): - state.seek(0) - return torch.load(state, map_location="cuda") - else: - raise RuntimeError("Unsupported checkpoint format.") - - def _split_extra_state(self, state): - fp8_checkpoint = self.fp8_meta["fp8_checkpoint"] or self.fp8 or self.fp8_calibration - - if not fp8_checkpoint: - return [state] * self.num_gemms - - state = self._decode_extra_state(state) - extra_states = [] - extra_fp8_variables = state['extra_fp8_variables'] - extra_fp8_variables['num_gemms'] = 1 - for gemm_idx in range(self.num_gemms): - tmp_state = { - "scale_fwd": state['scale_fwd'].view(3, -1)[:, gemm_idx], - "scale_inv_fwd": state['scale_inv_fwd'].view(3, -1)[:, gemm_idx], - "amax_history_fwd": state['amax_history_fwd'].view( - self.fp8_meta["recipe"].amax_history_len, 3, -1 - )[:, :, gemm_idx], - "scale_bwd": state['scale_bwd'].view(2, -1)[:, gemm_idx], - "scale_inv_bwd": state['scale_inv_bwd'].view(2, -1)[:, gemm_idx], - "amax_history_bwd": state['amax_history_bwd'].view( - self.fp8_meta["recipe"].amax_history_len, 2, -1 - )[:, :, gemm_idx], - "extra_fp8_variables": extra_fp8_variables, - } - extra_states.append(self._encode_extra_state(tmp_state)) - return extra_states - - def _sharded_state_dict_grouped( - self, tp_axis_map, prefix='', sharded_offsets=(), metadata=None - ): - """ - prefix should be module_name to make keys identical to sequetial ones. - """ - sharded_state_dict = {} - full_state_dict = self.state_dict(prefix='', keep_vars=True) - num_global_experts = get_expert_model_parallel_world_size() * self.num_gemms - local_expert_indices_offset = get_expert_model_parallel_rank() * self.num_gemms - ep_axis = len(sharded_offsets) - extra_states = self._split_extra_state(full_state_dict['_extra_state']) - for gemm_idx in range(self.num_gemms): - state_dict = { - f'{gemm_idx}.weight': full_state_dict[f'weight{gemm_idx}'], - f'{gemm_idx}._extra_state': extra_states[gemm_idx], - } - if self.use_bias: - state_dict[f'{gemm_idx}.bias'] = full_state_dict[f'bias{gemm_idx}'] - sub_sd = make_sharded_tensors_for_checkpoint( - state_dict, - '', - tp_axis_map, - ( - *sharded_offsets, - (ep_axis, local_expert_indices_offset + gemm_idx, num_global_experts), - ), - ) - # Remove expert layers indexing from sharded keys - replace_prefix_for_sharding(sub_sd, f'{gemm_idx}.', prefix) - sharded_state_dict.update( - { - f'{prefix}weight{gemm_idx}': sub_sd[f'{gemm_idx}.weight'], - f'{prefix}_extra_state{"" if gemm_idx == 0 else gemm_idx}': sub_sd[ - f'{gemm_idx}._extra_state' - ], - } - ) - if self.use_bias: - sharded_state_dict[f'{prefix}bias{gemm_idx}'] = sub_sd[f'{gemm_idx}.bias'] - # Adjust replica ids - replication along DP modulo EP - for k, sh_ten in sharded_state_dict.items(): - replica_id = sh_ten.replica_id - assert ( - len(replica_id) == 3 - ), f'Expected replica_id for {k} to be in (PP, TP, DP) format, got: {replica_id}' - sh_ten.replica_id = (*replica_id[:2], get_expert_data_parallel_rank()) - return sharded_state_dict - - class TEColumnParallelGroupedLinear(TEGroupedLinear): - """ - Wrapper for the Transformer-Engine's `GroupedLinear` layer but specialized - to column-parallel style. - """ - - def __init__( - self, - num_gemms: int, - input_size: int, - output_size: int, - *, - config: ModelParallelConfig, - init_method: Callable, - bias: bool, - skip_bias_add: bool, - is_expert: bool, - tp_comm_buffer_name: str = None, - ): - - super().__init__( - num_gemms=num_gemms, - input_size=input_size, - output_size=output_size, - parallel_mode="column", - config=config, - init_method=condition_init_method(config, init_method), - bias=bias, - skip_bias_add=skip_bias_add, - is_expert=is_expert, - tp_comm_buffer_name=tp_comm_buffer_name, - ) - - def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): - """ - For each gemm, sharding along axis 0, bias sharded. - Assume sharded_offsets[-1] is the expert parallel offset. - """ - tp_axis_map = {} - for gemm_idx in range(self.num_gemms): - tp_axis_map.update({f'{gemm_idx}.weight': 0, f'{gemm_idx}.bias': 0}) - return super()._sharded_state_dict_grouped( - tp_axis_map, prefix, sharded_offsets, metadata - ) - - class TERowParallelGroupedLinear(TEGroupedLinear): - """ - Wrapper for the Transformer-Engine's `GroupedLinear` layer but specialized - to row-parallel style. - """ - - def __init__( - self, - num_gemms: int, - input_size: int, - output_size: int, - *, - config: ModelParallelConfig, - init_method: Callable, - bias: bool, - skip_bias_add: bool, - is_expert: bool, - tp_comm_buffer_name: str = None, - ): - - super().__init__( - num_gemms=num_gemms, - input_size=input_size, - output_size=output_size, - parallel_mode="row", - config=config, - init_method=condition_init_method(config, init_method), - bias=bias, - skip_bias_add=skip_bias_add, - is_expert=is_expert, - tp_comm_buffer_name=tp_comm_buffer_name, - ) - - def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): - """ - For each gemm, sharding along axis 1, bias not sharded. - Assume sharded_offsets[-1] is the expert parallel offset. - """ - tp_axis_map = {f'{gemm_idx}.weight': 1 for gemm_idx in range(self.num_gemms)} - return super()._sharded_state_dict_grouped( - tp_axis_map, prefix, sharded_offsets, metadata - ) - -else: - - TEGroupedLinear = None - TEColumnParallelGroupedLinear = None - TERowParallelGroupedLinear = None - - -class TEDelayedScaling(te.common.recipe.DelayedScaling): - """ - Wrapper for the Transformer-Engine's `DelayedScaling` layer. - """ - - def __init__( - self, - config: ModelParallelConfig, - fp8_format: int, - override_linear_precision: tuple = (False, False, False), - ): - extra_kwargs = _get_extra_te_kwargs(config) - if is_te_min_version("1.6.0.dev0"): - extra_kwargs["fp8_dpa"] = config.fp8_dot_product_attention - extra_kwargs["fp8_mha"] = config.fp8_multi_head_attention - if get_te_version() < PkgVersion("1.8.0"): - extra_kwargs["interval"] = config.fp8_interval - elif config.fp8_interval != 1: - warnings.warn("fp8_interval is deprecated and ignored from Transformer-Engine v1.8.0.") - - super().__init__( - margin=config.fp8_margin, - fp8_format=fp8_format, - amax_compute_algo=config.fp8_amax_compute_algo, - amax_history_len=config.fp8_amax_history_len, - override_linear_precision=override_linear_precision, - **extra_kwargs, - ) - - -class TECudaRNGStatesTracker(te.pytorch.distributed.CudaRNGStatesTracker): - """Wraps TransformerEngine's CudaRNGStatesTracker so that it is - interchangeable with Megatron's RNG tracker""" - - def is_initialized(self): - """Checks if the internal RNG state has been set wirth set_states().""" - return self._is_initialized - - def reset(self): - """Reset the internal RNG state.""" - super().reset() - self._is_initialized = False - - def set_states(self, states): - """Set the internal RNG state.""" - super().set_states(states) - self._is_initialized = True - - def add(self, name, seed): - """Track the rng state.""" - super().add(name, seed) - self._is_initialized = True - - -def te_checkpoint( - forward_func, - distribute_saved_activations, - get_rng_state_tracker, - tp_group, - hidden_states, - attention_mask, - context, - context_mask, - rotary_pos_emb, -): - """Checkpointing with Transformer-Engine.""" - from transformer_engine.pytorch.distributed import checkpoint - - if is_te_min_version("1.5.0"): - return checkpoint( - forward_func, - hidden_states, - attention_mask, - context, - context_mask, - rotary_pos_emb, - distribute_saved_activations=distribute_saved_activations, - get_rng_state_tracker=get_rng_state_tracker, - tp_group=tp_group, - ) - else: - return checkpoint( - forward_func, - distribute_saved_activations, - get_rng_state_tracker, - tp_group, - hidden_states, - attention_mask, - context, - context_mask, - rotary_pos_emb, - ) - - -try: - - from transformer_engine.pytorch.attention import _SplitAlongDim - - SplitAlongDim = _SplitAlongDim.apply - -except ImportError: - - SplitAlongDim = None - -try: - - from transformer_engine.pytorch.cpu_offload import ( - get_cpu_offload_context as _get_cpu_offload_context, - ) - - def get_cpu_offload_context( - enabled, num_layers, model_layers, activation_offloading, weight_offloading - ): - """Get CPU offload context and sync function.""" - if is_te_min_version("1.10.0.dev0"): - context, sync_func = _get_cpu_offload_context( - enabled, num_layers, model_layers, activation_offloading, weight_offloading - ) - else: - context, sync_func = _get_cpu_offload_context( - enabled, num_layers, activation_offloading, weight_offloading - ) - - return context, sync_func - -except ImportError: - - get_cpu_offload_context = None - -try: - - from transformer_engine.pytorch.attention import FusedRoPEFunc - - def fused_apply_rotary_pos_emb( - t: torch.Tensor, freqs: torch.Tensor, transpose_output_memory: bool = False - ) -> torch.Tensor: - """Apply rotary positional embedding to input tensor T in `sbhd` format.""" - if transpose_output_memory: - warnings.warn( - "transpose_output_memory is not supported by TE's fused RoPE and will be ignored." - ) - return FusedRoPEFunc.apply(t, freqs, "sbhd") - - def fused_apply_rotary_pos_emb_thd( - t: torch.Tensor, - cu_seqlens: torch.Tensor, - freqs: torch.Tensor, - cp_size: int = 1, - cp_rank: int = 0, - ) -> torch.Tensor: - """ - Apply rotary positional embedding to input tensor T in `thd` format with CP support. - """ - if is_te_min_version("1.11.0", check_equality=False): - return FusedRoPEFunc.apply(t, freqs, "thd", cu_seqlens, cp_size, cp_rank) - else: - return FusedRoPEFunc.apply(t, freqs, "thd", cu_seqlens) - -except ImportError: - - pass - -try: - - from transformer_engine.pytorch import Fp8Padding, Fp8Unpadding # pylint: disable=unused-import - -except ImportError: - - Fp8Padding = None - Fp8Unpadding = None +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import dataclasses +import io +import os +import pickle +import warnings +from typing import Any, Callable, Optional + +import torch +import transformer_engine as te +from packaging.version import Version as PkgVersion +from torch import Tensor +from torch.nn.parameter import Parameter + +from megatron.core.dist_checkpointing.utils import replace_prefix_for_sharding +from megatron.core.model_parallel_config import ModelParallelConfig +from megatron.core.packed_seq_params import PackedSeqParams +from megatron.core.parallel_state import ( + get_context_parallel_global_ranks, + get_context_parallel_group, + get_expert_data_parallel_rank, + get_expert_model_parallel_rank, + get_expert_model_parallel_world_size, + get_expert_tensor_parallel_group, + get_expert_tensor_parallel_rank, + get_expert_tensor_parallel_world_size, + get_hierarchical_context_parallel_groups, + get_tensor_model_parallel_group, + get_tensor_model_parallel_rank, + get_tensor_model_parallel_world_size, +) +from megatron.core.tensor_parallel import get_cuda_rng_tracker, get_expert_parallel_rng_tracker_name +from megatron.core.tensor_parallel.layers import ( + _initialize_affine_weight_cpu, + set_tensor_model_parallel_attributes, +) +from megatron.core.tensor_parallel.random import get_data_parallel_rng_tracker_name +from megatron.core.tensor_parallel.utils import divide +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.transformer.utils import make_sharded_tensors_for_checkpoint +from megatron.core.utils import get_te_version, is_te_min_version + + +def _get_extra_te_kwargs(config: TransformerConfig): + extra_transformer_engine_kwargs = {"params_dtype": config.params_dtype} + + if is_te_min_version("0.12.0"): + if config.use_cpu_initialization: + extra_transformer_engine_kwargs["device"] = 'cpu' + elif config.init_model_with_meta_device: + extra_transformer_engine_kwargs["device"] = "meta" + else: + extra_transformer_engine_kwargs["device"] = torch.cuda.current_device() + return extra_transformer_engine_kwargs + + +def condition_init_method(config, init_method): + """Condition TE init_method on config.perform_initialization.""" + return init_method if config.perform_initialization else (lambda w: None) + + +class TENorm: + """ + A conditional wrapper to initialize an instance of Transformer-Engine's + `LayerNorm` or `RMSNorm` based on input + """ + + # TODO should we ditch normalization config and just use spec to choose LayerNorm vs RMSNorm? + def __new__(cls, config: TransformerConfig, hidden_size: int, eps: float = 1e-5): + if config.normalization == "LayerNorm": + instance = te.pytorch.LayerNorm( + hidden_size=hidden_size, + eps=eps, + sequence_parallel=config.sequence_parallel, + zero_centered_gamma=config.layernorm_zero_centered_gamma, + **_get_extra_te_kwargs(config), + ) + elif config.normalization == "RMSNorm": + assert hasattr( + te.pytorch, "RMSNorm" + ), "Transformer-Engine >= v0.11 required to use this feature" + instance = te.pytorch.RMSNorm( + hidden_size=hidden_size, + eps=eps, + sequence_parallel=config.sequence_parallel, + zero_centered_gamma=config.layernorm_zero_centered_gamma, + **_get_extra_te_kwargs(config), + ) + else: + raise Exception('Only LayerNorm and RMSNorm are curently supported') + + return instance + + +class TELinear(te.pytorch.Linear): + """ + Wrapper for the Transformer-Engine's `Linear` layer. + + Note that if Megatron's parallel_state has not been initialized + yet, the tp_group passed to TE will be None and must be set later + via set_tensor_parallel_group(). + + parallel_mode currently supports 3 different values: + - "column": Split the weight matrix along output dimension (used in TEColumnParallelLinear) + - "row": Split the weight matrix along input dimension (used in TERowParallelLinear) + - "duplicated": No tensor parallelism and weight is duplicated across TP ranks + - Note: For expert linear layers, we will disable communication logic here + as TP communication is handled in token_dispatcher. + """ + + def __init__( + self, + input_size: int, + output_size: int, + *, + parallel_mode: Optional[str], + config: ModelParallelConfig, + init_method: Callable, + bias: bool, + skip_bias_add: bool, + skip_weight_param_allocation: bool, + tp_comm_buffer_name: Optional[str] = None, + is_expert: bool = False, + ): + self.config = config + + # TE returns a zero length Tensor when bias=False and + # return_bias=True, but we prefer None. So in that case we + # tell TE to not return the bias, and return None + # ourselves. This way our forward always returns two values + # and we don't have to deal with the zero length Tensor. + self.te_return_bias = skip_bias_add and bias + self.is_first_microbatch = True + self.disable_parameter_transpose_cache = self.config.disable_parameter_transpose_cache + if skip_weight_param_allocation: + raise ValueError( + 'Transformer Engine linear layers do not support skip_weight_param_allocation' + ) + + extra_kwargs = _get_extra_te_kwargs(config) + + if is_te_min_version("0.8.0"): + if self.config.tp_comm_overlap: + if is_te_min_version("1.5.0"): + # Use old overlap flags if they were supplied instead + extra_kwargs["ub_overlap_ag"] = ( + self.config.tp_comm_overlap_ag + if hasattr(self.config, "tp_comm_overlap_ag") + else self.config.tp_comm_split_ag or self.config.tp_comm_atomic_ag + ) + extra_kwargs["ub_overlap_rs"] = ( + self.config.tp_comm_overlap_rs + if hasattr(self.config, "tp_comm_overlap_rs") + else self.config.tp_comm_split_rs or self.config.tp_comm_atomic_rs + ) + # Disable ub overlap for experts. + if is_expert: + extra_kwargs["ub_overlap_ag"] = False + extra_kwargs["ub_overlap_rs"] = False + else: + extra_kwargs["ub_split_ag"] = self.config.tp_comm_split_ag + extra_kwargs["ub_atomic_gemm_ag"] = self.config.tp_comm_atomic_ag + extra_kwargs["ub_split_rs"] = self.config.tp_comm_split_rs + extra_kwargs["ub_atomic_gemm_rs"] = self.config.tp_comm_atomic_rs + # Disable ub overlap for experts. + if is_expert: + extra_kwargs["ub_split_ag"] = False + extra_kwargs["ub_atomic_gemm_ag"] = False + extra_kwargs["ub_split_rs"] = False + extra_kwargs["ub_atomic_gemm_rs"] = False + if is_te_min_version("1.0.0", check_equality=False): + assert ( + tp_comm_buffer_name is not None + ), "Buffer name should be set to configure communication overlap settings" + extra_kwargs["ub_name"] = tp_comm_buffer_name + + self.expert_parallel = self.config.expert_model_parallel_size > 1 + if is_expert: + rng_tracker_name = get_expert_parallel_rng_tracker_name() + else: + if parallel_mode == "duplicated": + rng_tracker_name = get_data_parallel_rng_tracker_name() + else: + rng_tracker_name = None + if is_te_min_version("1.7.0"): + extra_kwargs["rng_tracker_name"] = rng_tracker_name + + te_parallel_mode = parallel_mode + if parallel_mode == "duplicated": + # Handle non-parallel case + tp_group = None + tp_size = 1 + explicit_expert_comm = False + te_parallel_mode = None + else: + # Disable communications in TE when using TP or EP by + # making TE agnostic of model parallel. + if is_expert: + tp_group = get_expert_tensor_parallel_group(check_initialized=False) + tp_size = get_expert_tensor_parallel_world_size() + else: + tp_group = get_tensor_model_parallel_group(check_initialized=False) + tp_size = get_tensor_model_parallel_world_size() + explicit_expert_comm = is_expert and (tp_size > 1 or self.expert_parallel) + + if explicit_expert_comm: + if parallel_mode == "column": + output_size = divide(output_size, tp_size) + elif parallel_mode == "row": + input_size = divide(input_size, tp_size) + te_parallel_mode = None + tp_size = 1 + tp_group = None + + super().__init__( + in_features=input_size, + out_features=output_size, + sequence_parallel=self.config.sequence_parallel, + fuse_wgrad_accumulation=self.config.gradient_accumulation_fusion, + tp_group=tp_group, + tp_size=tp_size, + get_rng_state_tracker=( + get_cuda_rng_tracker if get_cuda_rng_tracker().is_initialized() else None + ), + init_method=condition_init_method(config, init_method), + bias=bias, + return_bias=self.te_return_bias, + parallel_mode=te_parallel_mode, + **extra_kwargs, + ) + + for param in self.parameters(): + if is_expert: + # Reduce the gradient on the expert_data_parallel group for expert linear layers + setattr(param, 'allreduce', not self.expert_parallel) + else: + # Reduce the gradient on DP group + setattr(param, 'allreduce', True) + if parallel_mode == "duplicated": + # Reduce the gradient further on the TP group since the weight is + # duplicated across TP ranks + setattr(param, 'sequence_parallel', self.config.sequence_parallel) + + def forward(self, x): + """Forward.""" + _is_first_microbatch = ( + None if self.disable_parameter_transpose_cache else self.is_first_microbatch + ) + out = super().forward(x, is_first_microbatch=_is_first_microbatch) + self.is_first_microbatch = False + + # TE only returns a tuple when return_bias is True, otherwise + # it returns a single Tensor, we always want to return two + # values regardless of the arguments. + if self.te_return_bias: + return out + return out, None + + def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): + """Replicate cross TP/DP.""" + + # Provide the dist-ckpt support when TELinear is directly used + # It can only happen with duplicated parallel mode + assert ( + self.parallel_mode == None + ), "TELinear sharded_state_dict can only be used with duplicated parallel mode" + state_dict = self.state_dict(prefix='', keep_vars=True) + return make_sharded_tensors_for_checkpoint(state_dict, prefix, None, sharded_offsets) + + +class TELayerNormColumnParallelLinear(te.pytorch.LayerNormLinear): + """ + Wrapper for the Transformer-Engine's `LayerNormLinear` layer that combines + layernorm and linear layers + """ + + def __init__( + self, + input_size: int, + output_size: int, + *, + config: TransformerConfig, + init_method: Callable, + gather_output: bool, + bias: bool, + skip_bias_add: bool, + is_expert: bool, + skip_weight_param_allocation: bool = False, + tp_comm_buffer_name: Optional[str] = None, + ): + self.config = config + + if gather_output: + raise ValueError('Transformer Engine linear layers do not support gather_output = True') + + if is_expert: + raise ValueError('Transformer Engine linear layers do not yet support MoE') + + if skip_weight_param_allocation: + raise ValueError( + 'Transformer Engine linear layers do not support skip_weight_param_allocation' + ) + + # TE returns a zero length Tensor when bias=False and + # return_bias=True, but we prefer None. So in that case we + # tell TE to not return the bias, and return None + # ourselves. This way our forward always returns two values + # and we don't have to deal with the zero length Tensor. + self.te_return_bias = skip_bias_add and bias + self.is_first_microbatch = True + self.disable_parameter_transpose_cache = self.config.disable_parameter_transpose_cache + extra_kwargs = _get_extra_te_kwargs(config) + + # Only Transformer-Engine version >= 0.11.0 supports `RMSNorm` + if is_te_min_version("0.11.0"): + extra_kwargs["normalization"] = self.config.normalization + elif self.config.normalization != "LayerNorm": + te_version = get_te_version() + raise ValueError( + f"Transformer Engine v{te_version} does not support {self.config.normalization}." + ) + + if is_te_min_version("0.8.0"): + if self.config.tp_comm_overlap: + extra_kwargs["ub_bulk_wgrad"] = self.config.tp_comm_bulk_wgrad + extra_kwargs["ub_bulk_dgrad"] = self.config.tp_comm_bulk_dgrad + if is_te_min_version("1.5.0", check_equality=False): + # Use old overlap flags if they were supplied instead + extra_kwargs["ub_overlap_ag"] = ( + self.config.tp_comm_overlap_ag + if hasattr(self.config, "tp_comm_overlap_ag") + else self.config.tp_comm_split_ag or self.config.tp_comm_atomic_ag + ) + if is_te_min_version("1.6.0.dev0", check_equality=False): + extra_kwargs["ub_overlap_rs_dgrad"] = ( + self.config.tp_comm_overlap_rs_dgrad + if hasattr(self.config, "tp_comm_overlap_rs_dgrad") + else False + ) + if tp_comm_buffer_name == 'qkv' and self.config.tp_comm_overlap_disable_qkv: + extra_kwargs["ub_overlap_ag"] = False + extra_kwargs["ub_overlap_rs_dgrad"] = False + + if tp_comm_buffer_name == 'fc1' and self.config.tp_comm_overlap_disable_fc1: + extra_kwargs["ub_overlap_ag"] = False + extra_kwargs["ub_overlap_rs_dgrad"] = False + else: + extra_kwargs["ub_atomic_gemm_ag"] = self.config.tp_comm_atomic_ag + extra_kwargs["ub_split_ag"] = self.config.tp_comm_split_ag + if is_te_min_version("1.0.0", check_equality=False): + assert ( + tp_comm_buffer_name is not None + ), "Buffer name should be set to configure communication overlap settings" + extra_kwargs["ub_name"] = tp_comm_buffer_name + + super().__init__( + in_features=input_size, + out_features=output_size, + eps=self.config.layernorm_epsilon, + sequence_parallel=self.config.sequence_parallel, + fuse_wgrad_accumulation=self.config.gradient_accumulation_fusion, + tp_group=get_tensor_model_parallel_group(check_initialized=False), + tp_size=self.config.tensor_model_parallel_size, + get_rng_state_tracker=( + get_cuda_rng_tracker if get_cuda_rng_tracker().is_initialized() else None + ), + init_method=( + condition_init_method(config, init_method) + if not config.use_cpu_initialization + else lambda w: None + ), + bias=bias, + return_bias=self.te_return_bias, + parallel_mode="column", + return_layernorm_output=False, + zero_centered_gamma=self.config.layernorm_zero_centered_gamma, + **extra_kwargs, + ) + + world_size = get_tensor_model_parallel_world_size() + rank = get_tensor_model_parallel_rank() + + if config.use_cpu_initialization: + output_size_per_partition = divide(output_size, world_size) + _ = _initialize_affine_weight_cpu( + self.weight, + output_size, + input_size, + output_size_per_partition, + 0, + init_method=condition_init_method(config, init_method), + stride=1, + return_master_weight=False, + rank=rank, + world_size=world_size, + skip_set_tensor_parallel_attributes=True, + ) + if bias: + self.bias = Parameter( + torch.empty(output_size_per_partition, dtype=config.params_dtype) + ) + set_tensor_model_parallel_attributes(self.bias, True, 0, 1) + with torch.no_grad(): + self.bias.zero_() + setattr(self.bias, 'allreduce', True) + + def forward(self, x): + """Forward.""" + _is_first_microbatch = ( + None if self.disable_parameter_transpose_cache else self.is_first_microbatch + ) + out = super().forward(x, is_first_microbatch=_is_first_microbatch) + self.is_first_microbatch = False + + # TE only returns a tuple when return_bias is True, otherwise + # it returns a single Tensor, we always want to return two + # values regardless of the arguments. + if self.te_return_bias: + return out + return out, None + + def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): + """Sharding along axis 0, bias sharded""" + state_dict = self.state_dict(prefix='', keep_vars=True) + return make_sharded_tensors_for_checkpoint( + state_dict, prefix, {'weight': 0, 'bias': 0}, sharded_offsets + ) + + def __repr__(self): + return ( + f"{type(self).__name__}(in_features={self.in_features}, " + f"out_features={self.out_features}, bias={self.use_bias}, TP={self.tp_size})" + ) + + +class TEColumnParallelLinear(TELinear): + """ + Wrapper for the Transformer-Engine's `Linear` layer but specialized similar + to megatron's `ColumnParallelLinear` layer. + """ + + def __init__( + self, + input_size: int, + output_size: int, + *, + config: ModelParallelConfig, + init_method: Callable, + gather_output: bool, + bias: bool, + skip_bias_add: bool, + is_expert: bool, + skip_weight_param_allocation: bool = False, + tp_comm_buffer_name: Optional[str] = None, + ): + if gather_output: + raise ValueError('Transformer Engine linear layers do not support gather_output = True') + + super().__init__( + input_size=input_size, + output_size=output_size, + parallel_mode="column", + config=config, + init_method=( + condition_init_method(config, init_method) + if not config.use_cpu_initialization + else lambda w: None + ), + bias=bias, + skip_bias_add=skip_bias_add, + is_expert=is_expert, + skip_weight_param_allocation=skip_weight_param_allocation, + tp_comm_buffer_name=tp_comm_buffer_name, + ) + + if config.use_cpu_initialization: + if is_expert: + world_size = get_expert_tensor_parallel_world_size() + rank = get_expert_tensor_parallel_rank() + else: + world_size = get_tensor_model_parallel_world_size() + rank = get_tensor_model_parallel_rank() + output_size_per_partition = divide(output_size, world_size) + _ = _initialize_affine_weight_cpu( + self.weight, + output_size, + input_size, + output_size_per_partition, + 0, + init_method=condition_init_method(config, init_method), + stride=1, + return_master_weight=False, + rank=rank, + world_size=world_size, + skip_set_tensor_parallel_attributes=True, + ) + if bias: + self.bias = Parameter( + torch.empty(output_size_per_partition, dtype=config.params_dtype) + ) + set_tensor_model_parallel_attributes(self.bias, True, 0, 1) + with torch.no_grad(): + self.bias.zero_() + setattr(self.bias, 'allreduce', True) + + def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): + """Sharding along axis 0, bias sharded""" + state_dict = self.state_dict(prefix='', keep_vars=True) + return make_sharded_tensors_for_checkpoint( + state_dict, prefix, {'weight': 0, 'bias': 0}, sharded_offsets + ) + + def __repr__(self): + return ( + f"{type(self).__name__}(in_features={self.in_features}, " + f"out_features={self.out_features}, bias={self.use_bias}, TP={self.tp_size})" + ) + + +class TERowParallelLinear(TELinear): + """ + Wrapper for the Transformer-Engine's `Linear` layer but specialized similar + to megatron's `RowParallelLinear` layer. + """ + + def __init__( + self, + input_size: int, + output_size: int, + *, + config: ModelParallelConfig, + init_method: Callable, + bias: bool, + input_is_parallel: bool, + skip_bias_add: bool, + is_expert: bool, + tp_comm_buffer_name: Optional[str] = None, + ): + if not input_is_parallel: + raise ValueError( + "Transformer Engine linear layers do not support input_is_parallel = False" + ) + + super().__init__( + input_size=input_size, + output_size=output_size, + parallel_mode="row", + config=config, + init_method=( + condition_init_method(config, init_method) + if not config.use_cpu_initialization + else lambda w: None + ), + bias=bias, + skip_bias_add=skip_bias_add, + skip_weight_param_allocation=False, # We don't currently use this for row parallel layers # pylint: disable=line-too-long + is_expert=is_expert, + tp_comm_buffer_name=tp_comm_buffer_name, + ) + if config.use_cpu_initialization: + if is_expert: + world_size = get_expert_tensor_parallel_world_size() + rank = get_expert_tensor_parallel_rank() + else: + world_size = get_tensor_model_parallel_world_size() + rank = get_tensor_model_parallel_rank() + input_size_per_partition = divide(input_size, world_size) + self.master_weight = _initialize_affine_weight_cpu( + self.weight, + output_size, + input_size, + input_size_per_partition, + 1, + init_method=condition_init_method(config, init_method), + stride=1, + return_master_weight=False, + params_dtype=config.params_dtype, + rank=rank, + world_size=world_size, + skip_set_tensor_parallel_attributes=True, + ) + if bias: + self.bias = Parameter(torch.empty(output_size, dtype=config.params_dtype)) + # Always initialize bias to zero. + with torch.no_grad(): + self.bias.zero_() + setattr(self.bias, 'allreduce', True) + setattr(self.bias, 'sequence_parallel', config.sequence_parallel) + + def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): + """Sharding along axis 1, bias not sharded""" + state_dict = self.state_dict(prefix='', keep_vars=True) + return make_sharded_tensors_for_checkpoint( + state_dict, prefix, {'weight': 1}, sharded_offsets + ) + + def __repr__(self): + return ( + f"{type(self).__name__}(in_features={self.in_features}, " + f"out_features={self.out_features}, bias={self.use_bias}, TP={self.tp_size})" + ) + + +class TEDotProductAttention(te.pytorch.DotProductAttention): + """ + Wrapper for the Transformer-Engine's `DotProductAttention` layer that also + has "flash attention" enabled. + + Note that if Megatron's parallel_state has not been initialized yet, the + tp_group and cp_group passed to TE will be None and must be set later + via set_tensor_parallel_group() and set_context_parallel_group(). + """ + + cp_stream: torch.cuda.Stream = None + + def __init__( + self, + config: TransformerConfig, + layer_number: int, + attn_mask_type: AttnMaskType, + attention_type: str, + attention_dropout: Optional[float] = None, + softmax_scale: Optional[float] = None, + k_channels: Optional[int] = None, + v_channels: Optional[int] = None, + cp_comm_type: str = "p2p", + ): + self.config = config + self.te_forward_mask_type = False + self.qkv_format: str = 'sbhd' + + if self.config.apply_query_key_layer_scaling != bool( + int(os.getenv('NVTE_APPLY_QK_LAYER_SCALING', '0')) + ): + raise ValueError( + f"apply_query_key_layer_scaling is {self.config.apply_query_key_layer_scaling} " + f"but environment variable NVTE_APPLY_QK_LAYER_SCALING is " + f"{os.getenv('NVTE_APPLY_QK_LAYER_SCALING')}. Transformer Engine does not support " + f"setting query key layer scaling via argument, so these two must match." + ) + + extra_kwargs: dict[str, Any] = {} + if is_te_min_version("0.11.0"): + extra_kwargs["num_gqa_groups"] = self.config.num_query_groups + elif self.config.num_query_groups != self.config.num_attention_heads: + raise ValueError( + f"Transformer Engine v{get_te_version()} does not support Grouped Query Attention, " + f"use a newer version of Transformer Engine. " + f"(num_query_groups ({self.config.num_query_groups}) != " + f"num_attention_heads ({self.config.num_attention_heads}))" + ) + + if is_te_min_version("0.10.0"): + extra_kwargs["attention_type"] = attention_type + # older version don't need attention_type + + if is_te_min_version("0.12.0", check_equality=False): + self.te_forward_mask_type = True + + # This check is important as CP config can be disabled while having a valid CP group + # Example - Disabling CP for encoder while a valid CP group exists for decoder + if self.config.context_parallel_size > 1: + assert is_te_min_version( + "1.0.0" + ), "Only Transformer-Engine version >= 1.0.0 supports context parallelism!" + if getattr(TEDotProductAttention, "cp_stream") is None: + TEDotProductAttention.cp_stream = torch.cuda.Stream() + extra_kwargs["cp_group"] = get_context_parallel_group(check_initialized=False) + extra_kwargs["cp_global_ranks"] = get_context_parallel_global_ranks( + check_initialized=False + ) + extra_kwargs["cp_stream"] = TEDotProductAttention.cp_stream + if is_te_min_version("1.10.0"): + if cp_comm_type is None: + extra_kwargs["cp_comm_type"] = "p2p" + elif cp_comm_type == "a2a+p2p": + assert is_te_min_version("1.12.0"), ( + f"Transformer-Engine v{get_te_version()} must be >= 1.12.0 to support" + "hierarchical cp commucation." + ) + extra_kwargs["cp_comm_type"] = "a2a+p2p" + extra_kwargs["cp_group"] = get_hierarchical_context_parallel_groups( + check_initialized=False + ) + else: + extra_kwargs["cp_comm_type"] = cp_comm_type + + if self.config.deterministic_mode: + if int(os.getenv("NVTE_ALLOW_NONDETERMINISTIC_ALGO", "1")) != 0: + raise RuntimeError( + "deterministic_mode is on and we are using DotProductAttention from " + "Transformer Engine, but NVTE_ALLOW_NONDETERMINISTIC_ALGO is not 0. " + f"Currently set to: {os.getenv('NVTE_ALLOW_NONDETERMINISTIC_ALGO', 'not set')}." + ) + + if config.window_size is not None: + # Check version + assert is_te_min_version("1.2.0"), ( + f"Transformer-Engine v{get_te_version()} must be >= 1.2.0 to support" + "sliding window attention." + ) + extra_kwargs['window_size'] = config.window_size + + if is_te_min_version("1.10.0"): + # TE 1.10.0 introduces the ability to set the different k and v channels + kv_channels = ( + (k_channels, v_channels) + if k_channels is not None and v_channels is not None + else self.config.kv_channels + ) + extra_kwargs['softmax_scale'] = softmax_scale + else: + kv_channels = self.config.kv_channels + + self.kept_packed_seq_params = set( + field.name for field in dataclasses.fields(PackedSeqParams) + ) + if get_te_version() < PkgVersion("1.3.0"): + # TE 1.3.0 introduces precomputing max_seqlen to remove unnecessary kernels and D2H + # copies (#555) + # These two arguments did not exist prior to 1.3.0 + self.kept_packed_seq_params.discard("max_seqlen_q") + self.kept_packed_seq_params.discard("max_seqlen_kv") + + if get_te_version() < PkgVersion("1.10.0"): + # TE 1.8.0 introduces cu_seqlens_padded which is the cu_seqlens with paddings counted + # in each individual sequence in THD format dataset + # These two arguments did not exist prior to 1.8.0. Full support added in 1.10.0 (#1012) + self.kept_packed_seq_params.discard("cu_seqlens_q_padded") + self.kept_packed_seq_params.discard("cu_seqlens_kv_padded") + + super().__init__( + num_attention_heads=self.config.num_attention_heads, + kv_channels=kv_channels, + attention_dropout=( + self.config.attention_dropout if attention_dropout is None else attention_dropout + ), + attn_mask_type=attn_mask_type.name, + sequence_parallel=self.config.sequence_parallel, + tp_size=self.config.tensor_model_parallel_size, + get_rng_state_tracker=( + get_cuda_rng_tracker if get_cuda_rng_tracker().is_initialized() else None + ), + tp_group=get_tensor_model_parallel_group(check_initialized=False), + layer_number=layer_number, + **extra_kwargs, + ) + + def forward( + self, + query: Tensor, + key: Tensor, + value: Tensor, + attention_mask: Tensor, + attn_mask_type: AttnMaskType, + attention_bias: Tensor = None, + packed_seq_params: PackedSeqParams = None, + ): + """Forward.""" + packed_seq_kwargs = ( + {key: getattr(packed_seq_params, key) for key in self.kept_packed_seq_params} + if packed_seq_params is not None + else {} + ) + # overwrite self.qkv_format depending on self.config.apply_rope_fusion, which can be set + # after init + if self.config.apply_rope_fusion and is_te_min_version("0.13.0", check_equality=False): + self.qkv_format = 'bshd' + + qkv_format = packed_seq_kwargs.get('qkv_format', self.qkv_format) + + # WAR for peak memory usage. + # See https://gitlab-master.nvidia.com/ADLR/megatron-lm/-/merge_requests/2388 + if self.config.apply_rope_fusion and qkv_format == 'bshd': + query, key, value = [x.transpose(0, 1).contiguous() for x in (query, key, value)] + # In PyTorch, the following two tensors are in fact the same: + # Tensor with shape (1, S, H, D) and stride (S*H*D, H*D, D, 1) + # Tensor with shape (1, S, H, D) and stride (H*D, H*D, D, 1) + # Stride for a dimension that is 1 has no meaning, so tensors created two different ways + # can have same shape but different strides. + # We unify them to the first one to pass the stride check in TE + if value.shape == key.shape and value.shape[0] == 1 and value.stride() != key.stride(): + value = value.as_strided(value.shape, key.stride()) + + attention_bias_kwargs = {} + if attention_bias is not None: + assert is_te_min_version("1.2.0"), ( + f"Transformer-Engine v{get_te_version()} must be >= 1.2.0 to support" + "`attention_bias`." + ) + attention_bias_kwargs = dict( + core_attention_bias_type='post_scale_bias', core_attention_bias=attention_bias + ) + + if self.te_forward_mask_type: + if qkv_format == 'thd' and is_te_min_version("1.7.0"): + # thd format uses flash attention with cuDNN kernel which requires is_padding=True, + # so the only acceptable mask types are `padding_causal` and `padding`. These do not + # necessarily indicate there are padded tokens in the sequence. + if attn_mask_type == AttnMaskType.causal: + attn_mask_type = AttnMaskType.padding_causal + elif attn_mask_type == AttnMaskType.no_mask: + attn_mask_type = AttnMaskType.padding + core_attn_out = super().forward( + query, + key, + value, + attention_mask, + attn_mask_type=attn_mask_type.name, + **attention_bias_kwargs, + **packed_seq_kwargs, + ) + else: + core_attn_out = super().forward( + query, key, value, attention_mask, **attention_bias_kwargs, **packed_seq_kwargs + ) + + if self.config.apply_rope_fusion and qkv_format == 'bshd': + return core_attn_out.transpose(0, 1) + else: + return core_attn_out + + +if is_te_min_version("1.9.0.dev0"): + + class TEGroupedLinear(te.pytorch.BatchLinear if int(os.getenv("GROUPED_GEMM_BatchLinear", '0')) else te.pytorch.GroupedLinear): + """ + Wrapper for the Transformer-Engine's `GroupedLinear` layer. + + Note that if Megatron's parallel_state has not been initialized + yet, the tp_group passed to TE will be None and must be set later + via set_tensor_parallel_group(). + """ + + def __init__( + self, + num_gemms: int, + input_size: int, + output_size: int, + *, + parallel_mode: Optional[str], + config: ModelParallelConfig, + init_method: Callable, + bias: bool, + skip_bias_add: bool, + is_expert: bool = False, + tp_comm_buffer_name: Optional[str] = None, + ): + self.config = config + + # TE returns a zero length Tensor when bias=False and + # return_bias=True, but we prefer None. So in that case we + # tell TE to not return the bias, and return None + # ourselves. This way our forward always returns two values + # and we don't have to deal with the zero length Tensor. + self.te_return_bias = skip_bias_add and bias + self.is_first_microbatch = True + self.disable_parameter_transpose_cache = self.config.disable_parameter_transpose_cache + + extra_kwargs = _get_extra_te_kwargs(config) + extra_kwargs["ub_name"] = tp_comm_buffer_name + + self.expert_parallel = self.config.expert_model_parallel_size > 1 + if is_expert: + extra_kwargs["rng_tracker_name"] = get_expert_parallel_rng_tracker_name() + + # The comms between TP and EP group is explicitly handled by MoE token dispatcher. + # So we disable comms by making TE agnostic of model parallel. + if is_expert: + tp_group = get_expert_tensor_parallel_group(check_initialized=False) + tp_size = get_expert_tensor_parallel_world_size() + else: + tp_group = get_tensor_model_parallel_group(check_initialized=False) + tp_size = get_tensor_model_parallel_world_size() + self.explicit_expert_comm = is_expert and (tp_size > 1 or self.expert_parallel) + + if self.explicit_expert_comm: + if parallel_mode == "column": + output_size = divide(output_size, tp_size) + elif parallel_mode == "row": + input_size = divide(input_size, tp_size) + parallel_mode = None + tp_size = 1 + tp_group = None + + super().__init__( + num_gemms=num_gemms, + in_features=input_size, + out_features=output_size, + sequence_parallel=self.config.sequence_parallel, + fuse_wgrad_accumulation=self.config.gradient_accumulation_fusion, + tp_group=tp_group, + tp_size=tp_size, + get_rng_state_tracker=( + get_cuda_rng_tracker if get_cuda_rng_tracker().is_initialized() else None + ), + init_method=condition_init_method(config, init_method), + bias=bias, + return_bias=self.te_return_bias, + parallel_mode=parallel_mode, + **extra_kwargs, + ) + + for param in self.parameters(): + setattr(param, 'allreduce', not (is_expert and self.expert_parallel)) + + def merge_extra_states( + self, + state_dict, + prefix, + local_metadata, + strict, + missing_keys, + unexpected_keys, + error_msgs, + ): + """ + Merge multiple "_extra_state" into one. + """ + self.init_fp8_metadata(num_gemms=self.num_gemms) + fp8_checkpoint = self.fp8_meta["fp8_checkpoint"] or self.fp8 or self.fp8_calibration + + try: + state_list = [ + state_dict.pop(f"{prefix}_extra_state{i}") for i in range(1, self.num_gemms) + ] + except KeyError: + # "_extra_state{i}" only exists for dist-ckpt. Return for torch native ckpt. + return + + if not fp8_checkpoint: + return + state_list = [state_dict.pop(f"{prefix}_extra_state")] + state_list + state_list = [self._decode_extra_state(state) for state in state_list] + extra_fp8_variables = state_list[0]['extra_fp8_variables'] + extra_fp8_variables['num_gemms'] = self.num_gemms + extra_state = { + "scale_fwd": torch.cat( + [state['scale_fwd'].view(-1, 1) for state in state_list], dim=1 + ).view(-1), + "scale_inv_fwd": torch.cat( + [state['scale_inv_fwd'].view(-1, 1) for state in state_list], dim=1 + ).view(-1), + "amax_history_fwd": torch.cat( + [state['amax_history_fwd'].view(-1, 1) for state in state_list], dim=1 + ).view(self.fp8_meta["recipe"].amax_history_len, -1), + "scale_bwd": torch.cat( + [state['scale_bwd'].view(-1, 1) for state in state_list], dim=1 + ).view(-1), + "scale_inv_bwd": torch.cat( + [state['scale_inv_bwd'].view(-1, 1) for state in state_list], dim=1 + ).view(-1), + "amax_history_bwd": torch.cat( + [state['amax_history_bwd'].view(-1, 1) for state in state_list], dim=1 + ).view(self.fp8_meta["recipe"].amax_history_len, -1), + "extra_fp8_variables": extra_fp8_variables, + } + state_dict[f"{prefix}_extra_state"] = self._encode_extra_state(extra_state) + + self._register_load_state_dict_pre_hook(merge_extra_states, with_module=True) + + def forward(self, x, m_splits): + """Forward.""" + _is_first_microbatch = ( + None if self.disable_parameter_transpose_cache else self.is_first_microbatch + ) + out = super().forward(x, m_splits, is_first_microbatch=_is_first_microbatch) + self.is_first_microbatch = False + + # TE only returns a tuple when return_bias is True, otherwise + # it returns a single Tensor, we always want to return two + # values regardless of the arguments. + if self.te_return_bias: + return out + return out, None + + def _encode_extra_state(self, state): + state_serialized = io.BytesIO() + torch.save(state, state_serialized) + return state_serialized + + def _decode_extra_state(self, state): + if isinstance(state, torch.Tensor): + return pickle.loads(state.detach().cpu().numpy().tobytes()) + elif isinstance(state, io.BytesIO): + state.seek(0) + return torch.load(state, map_location="cuda") + else: + raise RuntimeError("Unsupported checkpoint format.") + + def _split_extra_state(self, state): + fp8_checkpoint = self.fp8_meta["fp8_checkpoint"] or self.fp8 or self.fp8_calibration + + if not fp8_checkpoint: + return [state] * self.num_gemms + + state = self._decode_extra_state(state) + extra_states = [] + extra_fp8_variables = state['extra_fp8_variables'] + extra_fp8_variables['num_gemms'] = 1 + for gemm_idx in range(self.num_gemms): + tmp_state = { + "scale_fwd": state['scale_fwd'].view(3, -1)[:, gemm_idx], + "scale_inv_fwd": state['scale_inv_fwd'].view(3, -1)[:, gemm_idx], + "amax_history_fwd": state['amax_history_fwd'].view( + self.fp8_meta["recipe"].amax_history_len, 3, -1 + )[:, :, gemm_idx], + "scale_bwd": state['scale_bwd'].view(2, -1)[:, gemm_idx], + "scale_inv_bwd": state['scale_inv_bwd'].view(2, -1)[:, gemm_idx], + "amax_history_bwd": state['amax_history_bwd'].view( + self.fp8_meta["recipe"].amax_history_len, 2, -1 + )[:, :, gemm_idx], + "extra_fp8_variables": extra_fp8_variables, + } + extra_states.append(self._encode_extra_state(tmp_state)) + return extra_states + + def _sharded_state_dict_grouped( + self, tp_axis_map, prefix='', sharded_offsets=(), metadata=None + ): + """ + prefix should be module_name to make keys identical to sequetial ones. + """ + sharded_state_dict = {} + full_state_dict = self.state_dict(prefix='', keep_vars=True) + num_global_experts = get_expert_model_parallel_world_size() * self.num_gemms + local_expert_indices_offset = get_expert_model_parallel_rank() * self.num_gemms + ep_axis = len(sharded_offsets) + extra_states = self._split_extra_state(full_state_dict['_extra_state']) + for gemm_idx in range(self.num_gemms): + state_dict = { + f'{gemm_idx}.weight': full_state_dict[f'weight{gemm_idx}'], + f'{gemm_idx}._extra_state': extra_states[gemm_idx], + } + if self.use_bias: + state_dict[f'{gemm_idx}.bias'] = full_state_dict[f'bias{gemm_idx}'] + sub_sd = make_sharded_tensors_for_checkpoint( + state_dict, + '', + tp_axis_map, + ( + *sharded_offsets, + (ep_axis, local_expert_indices_offset + gemm_idx, num_global_experts), + ), + ) + # Remove expert layers indexing from sharded keys + replace_prefix_for_sharding(sub_sd, f'{gemm_idx}.', prefix) + sharded_state_dict.update( + { + f'{prefix}weight{gemm_idx}': sub_sd[f'{gemm_idx}.weight'], + f'{prefix}_extra_state{"" if gemm_idx == 0 else gemm_idx}': sub_sd[ + f'{gemm_idx}._extra_state' + ], + } + ) + if self.use_bias: + sharded_state_dict[f'{prefix}bias{gemm_idx}'] = sub_sd[f'{gemm_idx}.bias'] + # Adjust replica ids - replication along DP modulo EP + for k, sh_ten in sharded_state_dict.items(): + replica_id = sh_ten.replica_id + assert ( + len(replica_id) == 3 + ), f'Expected replica_id for {k} to be in (PP, TP, DP) format, got: {replica_id}' + if getattr(sh_ten, "is_data_parallel_fully_shard", False): + edp_replica_id = 0 + else: + edp_replica_id = get_expert_data_parallel_rank() + sh_ten.replica_id = (*replica_id[:2], edp_replica_id) + return sharded_state_dict + + class TEColumnParallelGroupedLinear(TEGroupedLinear): + """ + Wrapper for the Transformer-Engine's `GroupedLinear` layer but specialized + to column-parallel style. + """ + + def __init__( + self, + num_gemms: int, + input_size: int, + output_size: int, + *, + config: ModelParallelConfig, + init_method: Callable, + bias: bool, + skip_bias_add: bool, + is_expert: bool, + tp_comm_buffer_name: Optional[str] = None, + ): + + super().__init__( + num_gemms=num_gemms, + input_size=input_size, + output_size=output_size, + parallel_mode="column", + config=config, + init_method=condition_init_method(config, init_method), + bias=bias, + skip_bias_add=skip_bias_add, + is_expert=is_expert, + tp_comm_buffer_name=tp_comm_buffer_name, + ) + + def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): + """ + For each gemm, sharding along axis 0, bias sharded. + Assume sharded_offsets[-1] is the expert parallel offset. + """ + tp_axis_map = {} + for gemm_idx in range(self.num_gemms): + tp_axis_map.update({f'{gemm_idx}.weight': 0, f'{gemm_idx}.bias': 0}) + return super()._sharded_state_dict_grouped( + tp_axis_map, prefix, sharded_offsets, metadata + ) + + class TERowParallelGroupedLinear(TEGroupedLinear): + """ + Wrapper for the Transformer-Engine's `GroupedLinear` layer but specialized + to row-parallel style. + """ + + def __init__( + self, + num_gemms: int, + input_size: int, + output_size: int, + *, + config: ModelParallelConfig, + init_method: Callable, + bias: bool, + skip_bias_add: bool, + is_expert: bool, + tp_comm_buffer_name: Optional[str] = None, + ): + + super().__init__( + num_gemms=num_gemms, + input_size=input_size, + output_size=output_size, + parallel_mode="row", + config=config, + init_method=condition_init_method(config, init_method), + bias=bias, + skip_bias_add=skip_bias_add, + is_expert=is_expert, + tp_comm_buffer_name=tp_comm_buffer_name, + ) + + def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): + """ + For each gemm, sharding along axis 1, bias not sharded. + Assume sharded_offsets[-1] is the expert parallel offset. + """ + tp_axis_map = {f'{gemm_idx}.weight': 1 for gemm_idx in range(self.num_gemms)} + return super()._sharded_state_dict_grouped( + tp_axis_map, prefix, sharded_offsets, metadata + ) + +else: + + TEGroupedLinear = None # type: ignore[assignment, misc] + TEColumnParallelGroupedLinear = None # type: ignore[assignment, misc] + TERowParallelGroupedLinear = None # type: ignore[assignment, misc] + + +class TEDelayedScaling(te.common.recipe.DelayedScaling): + """ + Wrapper for the Transformer-Engine's `DelayedScaling` layer. + """ + + def __init__( + self, + config: ModelParallelConfig, + fp8_format: int, + override_linear_precision: tuple = (False, False, False), + ): + extra_kwargs = _get_extra_te_kwargs(config) + if is_te_min_version("1.6.0.dev0"): + extra_kwargs["fp8_dpa"] = config.fp8_dot_product_attention + extra_kwargs["fp8_mha"] = config.fp8_multi_head_attention + if get_te_version() < PkgVersion("1.8.0"): + extra_kwargs["interval"] = config.fp8_interval + elif config.fp8_interval != 1: + warnings.warn("fp8_interval is deprecated and ignored from Transformer-Engine v1.8.0.") + + super().__init__( + margin=config.fp8_margin, + fp8_format=fp8_format, + amax_compute_algo=config.fp8_amax_compute_algo, + amax_history_len=config.fp8_amax_history_len, + override_linear_precision=override_linear_precision, + **extra_kwargs, + ) + + +class TECudaRNGStatesTracker(te.pytorch.distributed.CudaRNGStatesTracker): + """Wraps TransformerEngine's CudaRNGStatesTracker so that it is + interchangeable with Megatron's RNG tracker""" + + def __init__(self): + super().__init__() + self.reset() + + def is_initialized(self): + """Checks if the internal RNG state has been set wirth set_states().""" + return self._is_initialized + + def reset(self): + """Reset the internal RNG state.""" + super().reset() + self._is_initialized = False + + def set_states(self, states): + """Set the internal RNG state.""" + super().set_states(states) + self._is_initialized = True + + def add(self, name, seed): + """Track the rng state.""" + super().add(name, seed) + self._is_initialized = True + + +def te_checkpoint( + forward_func, + distribute_saved_activations, + get_rng_state_tracker, + tp_group, + hidden_states, + attention_mask, + context, + context_mask, + rotary_pos_emb, +): + """Checkpointing with Transformer-Engine.""" + from transformer_engine.pytorch.distributed import checkpoint + + if is_te_min_version("1.5.0"): + return checkpoint( + forward_func, + hidden_states, + attention_mask, + context, + context_mask, + rotary_pos_emb, + distribute_saved_activations=distribute_saved_activations, + get_rng_state_tracker=get_rng_state_tracker, + tp_group=tp_group, + ) + else: + return checkpoint( + forward_func, + distribute_saved_activations, + get_rng_state_tracker, + tp_group, + hidden_states, + attention_mask, + context, + context_mask, + rotary_pos_emb, + ) + + +try: + + from transformer_engine.pytorch.attention import _SplitAlongDim + + SplitAlongDim = _SplitAlongDim.apply + +except ImportError: + + SplitAlongDim = None + +try: + + from transformer_engine.pytorch.cpu_offload import ( + get_cpu_offload_context as _get_cpu_offload_context, + ) + + def get_cpu_offload_context( + enabled, num_layers, model_layers, activation_offloading, weight_offloading + ): + """Get CPU offload context and sync function.""" + if is_te_min_version("1.10.0.dev0"): + context, sync_func = _get_cpu_offload_context( + enabled, num_layers, model_layers, activation_offloading, weight_offloading + ) + else: + context, sync_func = _get_cpu_offload_context( + enabled, num_layers, activation_offloading, weight_offloading + ) + + return context, sync_func + +except ImportError: + + get_cpu_offload_context = None # type: ignore[assignment, misc] + +try: + + from transformer_engine.pytorch.attention import FusedRoPEFunc + + def fused_apply_rotary_pos_emb( + t: torch.Tensor, freqs: torch.Tensor, transpose_output_memory: bool = False + ) -> torch.Tensor: + """Apply rotary positional embedding to input tensor T in `sbhd` format.""" + if transpose_output_memory: + warnings.warn( + "transpose_output_memory is not supported by TE's fused RoPE and will be ignored." + ) + return FusedRoPEFunc.apply(t, freqs, "sbhd") + + def fused_apply_rotary_pos_emb_thd( + t: torch.Tensor, + cu_seqlens: torch.Tensor, + freqs: torch.Tensor, + cp_size: int = 1, + cp_rank: int = 0, + ) -> torch.Tensor: + """ + Apply rotary positional embedding to input tensor T in `thd` format with CP support. + """ + if is_te_min_version("1.11.0", check_equality=False): + return FusedRoPEFunc.apply(t, freqs, "thd", cu_seqlens, cp_size, cp_rank) + else: + return FusedRoPEFunc.apply(t, freqs, "thd", cu_seqlens) + +except ImportError: + + pass + +try: + + from transformer_engine.pytorch import Fp8Padding, Fp8Unpadding # pylint: disable=unused-import + +except ImportError: + + Fp8Padding = None + Fp8Unpadding = None + +try: + + from transformer_engine.pytorch.permutation import ( + moe_permute, + moe_sort_chunks_by_index, + moe_unpermute, + ) + + fused_permute = moe_permute + fused_unpermute = moe_unpermute + fused_sort_chunks_by_index = moe_sort_chunks_by_index + +except ImportError: + + fused_permute = None + fused_unpermute = None + fused_sort_chunks_by_index = None diff --git a/megatron/core/inference/ammo_support/__init__.py b/megatron/core/inference/ammo_support/__init__.py deleted file mode 100644 index 12be50c..0000000 --- a/megatron/core/inference/ammo_support/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -import warnings - -warnings.warn( - "The 'megatron.core.inference.ammo_support' module is deprecated and will be removed in a future release. " - "Please use megatron.core.inference.modelopt_support instead", - DeprecationWarning, -) diff --git a/megatron/core/inference/ammo_support/gpt/model_specs.py b/megatron/core/inference/ammo_support/gpt/model_specs.py deleted file mode 100644 index ba3bd9f..0000000 --- a/megatron/core/inference/ammo_support/gpt/model_specs.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from megatron.core.inference.modelopt_support.gpt.model_specs import get_gpt_layer_modelopt_spec diff --git a/megatron/core/inference/ammo_support/gpt/state_dict_hooks.py b/megatron/core/inference/ammo_support/gpt/state_dict_hooks.py deleted file mode 100644 index 8532366..0000000 --- a/megatron/core/inference/ammo_support/gpt/state_dict_hooks.py +++ /dev/null @@ -1,5 +0,0 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from megatron.core.inference.modelopt_support.gpt.state_dict_hooks import ( - mcore_gpt_load_legacy_state_dict_pre_hook, - mcore_gpt_load_te_state_dict_pre_hook, -) diff --git a/megatron/core/inference/async_stream.py b/megatron/core/inference/async_stream.py new file mode 100644 index 0000000..c742dcb --- /dev/null +++ b/megatron/core/inference/async_stream.py @@ -0,0 +1,67 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. +# Copyright 2025 The vLLM authors. +# +# This code was adopted from https://github.com/vllm-project/vllm/ +# This source code is licensed under the Apache license found in the +# LICENSE file in the root directory of this source tree. + +import asyncio +from typing import Any, AsyncGenerator, Callable, Optional, Type, Union + +from megatron.core.inference.inference_request import InferenceRequest + +STOP_ITERATION = Exception() + + +class AsyncStream: + """ + Class for encapsulating an asynchronous stream of InferenceRequest outputs. + + Adopted from https://github.com/vllm-project/vllm/blob/eb881ed006ca458b052905e33f0d16dbb428063a/vllm/v1/engine/async_stream.py # pylint: disable=line-too-long + """ + + def __init__(self, request_id: str, cancel: Callable[[str], None]) -> None: + self._request_id = request_id + self._cancel = cancel + self._queue: asyncio.Queue = asyncio.Queue() + self._finished = False + self._loop = asyncio.get_running_loop() + + def put(self, item: Union[InferenceRequest, Exception]) -> None: + """Adds a new value to the stream""" + if not self._finished: + self._loop.call_soon_threadsafe(self._queue.put_nowait, item) + + def finish(self, exception: Optional[Union[BaseException, Type[BaseException]]] = None) -> None: + """Completes the stream by adding a sentinel value""" + if not self._finished: + self._finished = True + self._loop.call_soon_threadsafe( + self._queue.put_nowait, + exception if self._is_raisable(exception) else STOP_ITERATION, + ) + + @property + def finished(self) -> bool: + """Whether the stream has finished""" + return self._finished + + async def generator(self) -> AsyncGenerator[InferenceRequest, None]: + """Creates an AsyncGenerator over the stream queue""" + try: + while True: + result = await self._queue.get() + if self._is_raisable(result): + if result == STOP_ITERATION: + return + raise result + yield result + except GeneratorExit: + self._cancel() + raise asyncio.CancelledError from None + + @staticmethod + def _is_raisable(value: Any): + return isinstance(value, BaseException) or ( + isinstance(value, type) and issubclass(value, BaseException) + ) diff --git a/megatron/core/inference/communication_utils.py b/megatron/core/inference/communication_utils.py index 0c23a58..53d3eb4 100644 --- a/megatron/core/inference/communication_utils.py +++ b/megatron/core/inference/communication_utils.py @@ -1,50 +1,54 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -import torch - -from megatron.core import parallel_state - - -def _is_cuda(tensor): - """Check if a tensor is not none and is cuda.""" - assert tensor is not None - assert tensor.is_cuda - - -def broadcast_from_last_pipeline_stage(size, dtype, tensor=None): - """Broadcast a tensor from last pipeline stage to all ranks.""" - - if parallel_state.is_pipeline_last_stage(): - _is_cuda(tensor) - assert tensor.is_contiguous() - else: - tensor = torch.empty(size, dtype=dtype, device=torch.cuda.current_device()) - # Get the group and corresponding source rank. - src = parallel_state.get_pipeline_model_parallel_last_rank() - group = parallel_state.get_pipeline_model_parallel_group() - torch.distributed.broadcast(tensor, src, group) - return tensor - - -def recv_from_prev_pipeline_rank_(recv_buffer=None): - """Receive from previous pipeline stage and update the - input buffer inplace.""" - recv_prev_op = torch.distributed.P2POp( - torch.distributed.irecv, recv_buffer, parallel_state.get_pipeline_model_parallel_prev_rank() - ) - reqs = torch.distributed.batch_isend_irecv([recv_prev_op]) - for req in reqs: - req.wait() - # To protect against race condition when using batch_isend_irecv(). - torch.cuda.synchronize() - - -def send_to_next_pipeline_rank(tensor=None): - """Send output to the next pipeline stage.""" - send_next_op = torch.distributed.P2POp( - torch.distributed.isend, tensor, parallel_state.get_pipeline_model_parallel_next_rank() - ) - reqs = torch.distributed.batch_isend_irecv([send_next_op]) - for req in reqs: - req.wait() - # To protect against race condition when using batch_isend_irecv(). - torch.cuda.synchronize() +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import torch + +from megatron.core import parallel_state + + +def _is_cuda(tensor): + """Check if a tensor is not none and is cuda.""" + assert tensor is not None + assert tensor.is_cuda + + +def broadcast_from_last_pipeline_stage(size, dtype, tensor=None): + """Broadcast a tensor from last pipeline stage to all ranks.""" + + if parallel_state.is_pipeline_last_stage(): + assert size == list( + tensor.shape + ), f"Expected tensor of shape {size} but got {list(tensor.shape)}" + assert dtype == tensor.dtype, f"Expected tensor of type {dtype} but got {tensor.dtype}" + _is_cuda(tensor) + assert tensor.is_contiguous() + else: + tensor = torch.empty(size, dtype=dtype, device=torch.cuda.current_device()) + # Get the group and corresponding source rank. + src = parallel_state.get_pipeline_model_parallel_last_rank() + group = parallel_state.get_pipeline_model_parallel_group() + torch.distributed.broadcast(tensor, src, group) + return tensor + + +def recv_from_prev_pipeline_rank_(recv_buffer=None): + """Receive from previous pipeline stage and update the + input buffer inplace.""" + recv_prev_op = torch.distributed.P2POp( + torch.distributed.irecv, recv_buffer, parallel_state.get_pipeline_model_parallel_prev_rank() + ) + reqs = torch.distributed.batch_isend_irecv([recv_prev_op]) + for req in reqs: + req.wait() + # To protect against race condition when using batch_isend_irecv(). + torch.cuda.synchronize() + + +def send_to_next_pipeline_rank(tensor=None): + """Send output to the next pipeline stage.""" + send_next_op = torch.distributed.P2POp( + torch.distributed.isend, tensor, parallel_state.get_pipeline_model_parallel_next_rank() + ) + reqs = torch.distributed.batch_isend_irecv([send_next_op]) + for req in reqs: + req.wait() + # To protect against race condition when using batch_isend_irecv(). + torch.cuda.synchronize() diff --git a/megatron/core/inference/engines/mcore_engine.py b/megatron/core/inference/engines/mcore_engine.py index 28ef46b..d080b3f 100644 --- a/megatron/core/inference/engines/mcore_engine.py +++ b/megatron/core/inference/engines/mcore_engine.py @@ -1,120 +1,228 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from typing import Dict, List - -import torch - -from megatron.core.inference.engines.abstract_engine import AbstractEngine -from megatron.core.inference.inference_request import InferenceRequest -from megatron.core.inference.sampling_params import SamplingParams -from megatron.core.inference.scheduler import Scheduler -from megatron.core.inference.text_generation_controllers.text_generation_controller import ( - TextGenerationController, -) - - -class MCoreEngine(AbstractEngine): - """The Megatron core backend constructor - - This is the backend that does a simple forward pass on the model. - Supports any model that is callable (Accepts the inputs and outputs the tensor) - - Args: - text_generation_controller (TextGenerationController): A text generation - controller that will be used to define how to preprocess prompts, generate - outputs and detokenizer the output tokens. - max_batch_size : The maxinum number of requests to process at once - random_seed (int, optional): Use a random seed if you want deterministic - results. Defaults to None. - """ - - def __init__( - self, - text_generation_controller: TextGenerationController, - max_batch_size, - random_seed: int = None, - ): - self.text_generation_controller = text_generation_controller - self.random_seed = random_seed - self.scheduler = Scheduler(max_batch_size=max_batch_size) - - def generate( - self, - prompts: List[str], - add_BOS: bool = False, - encoder_prompts: List[str] = None, - common_inference_params: SamplingParams = None, - sampling_params: SamplingParams = None, - ) -> dict: - """The megatron core inference backend generate function - - This backend returns the output generations as a dictionary. - It returns the prompt tokens along with the generated tokens, the prompt - plus the generated string and the output log probabilities if requested - - Args: - prompts (List[str]): All the prompts as a list of strings - add_BOS (bool): Whether to add BOS token to beginning of prompts - encoder_prompts (List[dict]): All the encoder prompts as a list of strings - common_inference_params: Deprecated. Only used for backward compatibility with - MCore <= 0.9.0. Use `sampling_params` going forward. - sampling_params (SamplingParams): The request-level sampling parameters - - Returns: - List[InferenceRequest]: The output is list of inference requests containing the - generated tokens, texts and log probs if required - """ - # TODO :M core- get rng state tracker - - if common_inference_params: - sampling_params = common_inference_params - - if self.random_seed: - torch.random.manual_seed(self.random_seed) - - for i in range(len(prompts)): - prompt = prompts[i] - encoder_prompt = encoder_prompts[i] if encoder_prompts is not None else None - prompt_tokens = self.text_generation_controller.tokenize_prompt(prompt, add_BOS) - - self.scheduler.add_request( - prompt=prompt, - prompt_tokens=prompt_tokens, - encoder_prompt=encoder_prompt, - inference_parameters=sampling_params, - ) - - self.run_engine() - - result: List[InferenceRequest] = self.scheduler.completed_request_pool.values() - return result - - def run_engine(self): - """Main functionality to run inference - - Runs the engine until there are no requests in the queue. - - Args: - dynamic_generation (bool, optional): Set this to True, if you want - to enable dynamic batching. Mainly used with an inference server. - Defaults to False. - """ - while self.scheduler.have_requests_pending(): - active_requests: Dict[int, InferenceRequest] = self.scheduler.active_request_pool.copy() - result_dict: Dict[int, InferenceRequest] = ( - self.text_generation_controller.generate_all_output_tokens_static_batch( - active_requests - ) - ) - - self.scheduler.update_requests_pools(result_dict=result_dict) - - # TODO: Later for dynamic batching we will do something like this - """ - if dynamic_batching: - result_dict: Dict[ - int, InferenceRequest - ] = self.text_generation_controller.generate_output_tokens_one_step_dynamic_batch( - active_requests - ) - self.scheduler.update_requests_pools(result_dict=result_dict) - """ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import asyncio +import warnings +from collections import OrderedDict +from typing import AsyncGenerator, Dict, List, Optional, Union + +import torch + +from megatron.core.inference.async_stream import AsyncStream +from megatron.core.inference.engines.abstract_engine import AbstractEngine +from megatron.core.inference.inference_request import InferenceRequest +from megatron.core.inference.sampling_params import SamplingParams +from megatron.core.inference.scheduler import Scheduler +from megatron.core.inference.text_generation_controllers.text_generation_controller import ( + TextGenerationController, +) + + +class MCoreEngine(AbstractEngine): + """The Megatron core backend constructor + + This is the backend that does a simple forward pass on the model. + Supports any model that is callable (Accepts the inputs and outputs the tensor) + + Args: + text_generation_controller (TextGenerationController): A text generation + controller that will be used to define how to preprocess prompts, generate + outputs and detokenizer the output tokens. + max_batch_size (int, optional): The maximum number of requests to process at once. + Will be set from the InferenceWrapperConfig in `text_generation_controller` by + default. + random_seed (int, optional): Use a random seed if you want deterministic + results. Defaults to None. + """ + + def __init__( + self, + text_generation_controller: TextGenerationController, + max_batch_size: Optional[int] = None, + random_seed: Optional[int] = None, + ): + inference_wrapper_config = ( + text_generation_controller.inference_wrapped_model.inference_wrapper_config + ) + inference_max_batch_size = inference_wrapper_config.inference_max_requests + if max_batch_size is None: + max_batch_size = inference_max_batch_size + elif max_batch_size > inference_max_batch_size: + warnings.warn( + f"Engine `max_batch_size` ({max_batch_size}) > " + f"`inference_max_requests` in `inference_wrapper_config` " + f"({inference_max_batch_size}); setting `max_batch_size` to " + f"{inference_max_batch_size}", + UserWarning, + ) + max_batch_size = inference_max_batch_size + self.text_generation_controller = text_generation_controller + self.random_seed = random_seed + self.scheduler = Scheduler(max_batch_size=max_batch_size) + + def get_new_request_id(self) -> str: + """Gets a new request id from the scheduler""" + return self.scheduler.get_new_request_id() + + def add_request( + self, + prompt: Optional[str] = None, + add_BOS: bool = False, + encoder_prompt: Optional[str] = None, + inference_parameters: Optional[SamplingParams] = None, + streaming: bool = False, + inference_request: Optional[InferenceRequest] = None, + ) -> str: + """ + Adds a request to the scheduler and returns the request ID. + + Args: + prompt (str): A prompt string + add_BOS (bool): Whether to add BOS token to beginning of the prompt + encoder_prompt (str): The encoder prompt string + inference_parameters (SamplingParams): The inference parameters + streaming (bool): Whether to stream incremental outputs for this request + inference_request (InferenceRequest, optional): A fully constructed request. + Defaults to None. + + Returns: + The newly created request ID. + """ + assert ( + prompt is not None or inference_request is not None + ), f"At least one of `prompt` or `inference_request` must be specified" + + if inference_request is None: + prompt_tokens = self.text_generation_controller.tokenize_prompt(prompt, add_BOS) + else: + prompt_tokens = inference_request.prompt_tokens + + return self.scheduler.add_request( + prompt=prompt, + prompt_tokens=prompt_tokens, + encoder_prompt=encoder_prompt, + inference_parameters=inference_parameters, + streaming=streaming, + inference_request=inference_request, + ) + + def get_stream_generator( + self, request_id: str + ) -> Union[AsyncGenerator[InferenceRequest, None], None]: + """Returns the stream generator for the given request ID if it exists.""" + stream = self.scheduler.streams.get(request_id, None) + if stream is not None: + return stream.generator() + return None + + def generate( + self, + prompts: Optional[List[str]] = None, + add_BOS: bool = False, + encoder_prompts: Optional[List[str]] = None, + common_inference_params: Optional[SamplingParams] = None, + sampling_params: Optional[SamplingParams] = None, + inference_requests: Optional[List[InferenceRequest]] = None, + ) -> List[InferenceRequest]: + """The megatron core inference backend generate function + + This backend returns the output generations as a dictionary. + It returns the prompt tokens along with the generated tokens, the prompt + plus the generated string and the output log probabilities if requested + + Args: + prompts (List[str]): All the prompts as a list of strings + add_BOS (bool): Whether to add BOS token to beginning of prompts + encoder_prompts (List[dict]): All the encoder prompts as a list of strings + common_inference_params: Deprecated. Only used for backward compatibility with + MCore <= 0.9.0. Use `sampling_params` going forward. + sampling_params (SamplingParams): The request-level sampling parameters + inference_requests (List[InferenceRequest]): A pre-populated list of inference requests + + Returns: + List[InferenceRequest]: The output is list of inference requests containing the + generated tokens, texts and log probs if required + """ + # TODO :M core- get rng state tracker + + request_ids: List[str] = [] + + if self.random_seed: + torch.random.manual_seed(self.random_seed) + + if inference_requests is None: + assert prompts is not None + + if common_inference_params: + sampling_params = common_inference_params + + for i in range(len(prompts)): + prompt = prompts[i] + encoder_prompt = encoder_prompts[i] if encoder_prompts is not None else None + request_id = self.add_request( + prompt=prompt, + encoder_prompt=encoder_prompt, + inference_parameters=sampling_params, + ) + request_ids.append(request_id) + else: + for inference_request in inference_requests: + request_ids.append(inference_request.request_id) + self.scheduler.add_request(inference_request=inference_request) + + self.run_engine() + + result: List[InferenceRequest] = [ + self.scheduler.completed_request_pool[request_id] for request_id in request_ids + ] + return result + + def run_engine(self): + """Main functionality to run inference + + Runs the engine until there are no requests in the queue. + + Args: + dynamic_generation (bool, optional): Set this to True, if you want + to enable dynamic batching. Mainly used with an inference server. + Defaults to False. + """ + while self.scheduler.have_requests_pending(): + active_requests: Dict[str, InferenceRequest] = self.scheduler.active_request_pool.copy() + active_streams: Dict[str, AsyncStream] = OrderedDict() + for request_id in active_requests: + if (stream := self.scheduler.streams.get(request_id, None)) is not None: + assert isinstance(stream, AsyncStream), stream + active_streams[request_id] = stream + result_dict: Dict[str, InferenceRequest] = ( + self.text_generation_controller.generate_all_output_tokens_static_batch( + active_requests, active_streams + ) + ) + + self.scheduler.update_requests_pools(result_dict=result_dict) + + # TODO: Later for dynamic batching we will do something like this + """ + if dynamic_batching: + result_dict: Dict[ + str, InferenceRequest + ] = self.text_generation_controller.generate_output_tokens_one_step_dynamic_batch( + active_requests + ) + self.scheduler.update_requests_pools(result_dict=result_dict) + """ + + def _wrapped_run_engine(self, cuda_device): + """ + Explicitly sets the CUDA device before running the engine. + + This is to ensure that the CUDA device is correctly propagated when running + in a new thread context. + """ + torch.cuda.set_device(cuda_device) + self.run_engine() + + async def run_engine_async(self): + """Runs the engine asynchronously using asyncio""" + loop = asyncio.get_running_loop() + + await loop.run_in_executor(None, self._wrapped_run_engine, torch.cuda.current_device()) diff --git a/megatron/core/inference/inference_request.py b/megatron/core/inference/inference_request.py index ea0d67b..398a99a 100644 --- a/megatron/core/inference/inference_request.py +++ b/megatron/core/inference/inference_request.py @@ -1,39 +1,52 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from dataclasses import dataclass -from enum import Enum -from typing import List - -import torch - -from megatron.core.inference.sampling_params import SamplingParams - - -# class syntax -class Status(Enum): - """Enum for status""" - - WAITING_IN_QUEUE = 1 - ACTIVE_AND_GENERATING_TOKENS = 2 - ACTIVE_BUT_NOT_GENERATING_TOKENS = 3 - COMPLETED = 4 - - -@dataclass -class InferenceRequest: - """Class for one inference request - - Containing relevant data for an inference request - - """ - - request_id: str - prompt: str - inference_parameters: SamplingParams - prompt_tokens: List[int] - arrival_time: float - status: Status - encoder_prompt: str = None - generated_text: str = None - generated_tokens: torch.Tensor = None - generated_log_probs: torch.Tensor = None - generated_length: int = 0 +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from dataclasses import dataclass +from enum import Enum +from typing import List, Optional + +import torch + +from megatron.core.inference.sampling_params import SamplingParams + + +# class syntax +class Status(Enum): + """Enum for status""" + + WAITING_IN_QUEUE = 1 + ACTIVE_AND_GENERATING_TOKENS = 2 + ACTIVE_BUT_NOT_GENERATING_TOKENS = 3 + COMPLETED = 4 + + +@dataclass(kw_only=True) +class InferenceRequest: + """Class for one inference request + + Containing relevant data for an inference request + + """ + + request_id: str + prompt: str + inference_parameters: Optional[SamplingParams] = None + prompt_tokens: Optional[List[int]] = None + arrival_time: Optional[float] = None + status: Optional[Status] = None + encoder_prompt: Optional[str] = None + generated_text: Optional[str] = None + segments: Optional[List[str]] = None + generated_segments: Optional[List[str]] = None + generated_sequence_lengths: Optional[List[int]] = None + generated_tokens: Optional[torch.Tensor] = None + generated_log_probs: Optional[torch.Tensor] = None + generated_length: Optional[int] = None + + +@dataclass(kw_only=True) +class VLMInferenceRequest(InferenceRequest): + """Class for a VLM inference request""" + + num_img_embeddings_per_tile: int + imgs: torch.Tensor + num_tiles: torch.Tensor + decoder_seq_length: int diff --git a/megatron/core/inference/model_inference_wrappers/abstract_model_inference_wrapper.py b/megatron/core/inference/model_inference_wrappers/abstract_model_inference_wrapper.py index 647c4d1..fbaa94c 100644 --- a/megatron/core/inference/model_inference_wrappers/abstract_model_inference_wrapper.py +++ b/megatron/core/inference/model_inference_wrappers/abstract_model_inference_wrapper.py @@ -1,238 +1,315 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -import abc -import math -from typing import Iterable, List, Union - -import torch - -from megatron.core import parallel_state, tensor_parallel -from megatron.core.inference.communication_utils import ( - recv_from_prev_pipeline_rank_, - send_to_next_pipeline_rank, -) -from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( - InferenceWrapperConfig, -) -from megatron.core.inference_params import InferenceParams -from megatron.core.models.gpt.gpt_model import GPTModel - - -# pylint: disable=line-too-long -class AbstractModelInferenceWrapper(abc.ABC): - """Abstract inference wrapper - - Extend this to create a version for your model. - """ - - def __init__( - self, - model: Union['LegacyGPTModel', GPTModel], - inference_wrapper_config: InferenceWrapperConfig, - ): - """Constructor for the model inference wrapper - - The wrapper prepares the model for inference, provides the required input data and runs the forward pass. - - Args: - model (Union[GPTModel, LegacyGPTModel]): The actual GPT model (MCore or MLM) - inference_wrapper_config (InferenceWrapperConfig): Has info like hidden size, vocab size etc. - """ - assert not isinstance( - model, Iterable - ), 'interleaving schedule is not supported for inference' - self.model = model - self.inference_wrapper_config = inference_wrapper_config - self.pipeline_communication_dtype = ( - torch.float - if self.inference_wrapper_config.fp32_residual_connection - else self.inference_wrapper_config.params_dtype - ) - - def prep_model_for_inference(self, prompts_tokens: torch.Tensor): - """A utility function for preparing model for inference - - The function gets called once before the auto regressive inference loop. It puts the model in eval mode , and gets some model and inference data parameters. Extend this to build position ids ,attention mask etc, so that required slices can be extracted during the forward pass. - - Args: - prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_seq_len] - - """ - self.model.eval() - - # For TP only model both is_pp_first_stage and _is_pp_last_stage returns True - self.model_is_pipeline_parallel = not ( - parallel_state.is_pipeline_first_stage() and parallel_state.is_pipeline_last_stage() - ) - self.prompts_tokens = prompts_tokens - batch_size, max_sequence_length = self.prompts_tokens.shape - self.inference_params = InferenceParams(batch_size, max_sequence_length) - - @abc.abstractmethod - def get_batch_for_context_window(self) -> List: - """Returns the input data for inference - - This function gets called iteratively in the inference loop . It can be used to extract relevant input from the prompt tokens, attention mask etc. required for each step in inference. - - """ - pass - - def forward_pass_without_pipeline_parallel(self, inference_input: List) -> torch.Tensor: - """Utility to carry out simple forward pass for TP or no model parallel models - - Runs a very simple forward pass for model. Used in the case of models without any parallelism or only tensor parallelism. - - Args: - inference_input (List): A list containg the inputs for the gpt model [tokens, position ids, attention mask] - - Returns: - torch.Tensor: The output logits of shape [batch_size, seq_len, padded_vocab_size] - """ - tokens, position_ids, attention_mask = inference_input - logits = self.model( - tokens, position_ids, attention_mask, inference_params=self.inference_params - ) - logits = tensor_parallel.gather_from_tensor_model_parallel_region(logits) - self.inference_params.sequence_len_offset += tokens.size(1) - - return logits - - def _allocate_recv_buffer(self, batch_size, seq_len): - """Receive happens between the layers with size [seq_len, batch_size, hidden_size].""" - recv_size = (seq_len, batch_size, self.inference_wrapper_config.hidden_size) - return torch.empty( - recv_size, dtype=self.pipeline_communication_dtype, device=torch.cuda.current_device() - ) - - def forward_pass_with_pipeline_parallel_small_input_batch( - self, inference_input: List - ) -> torch.Tensor: - """Utility to carry out forward pass for PP models with very small inputs - - If a model is pipeline parallel, yet, the input global batch is very small, we compute a foward pass on the entire global batch, rather than splitting it up into micro batches and doing something more complex as in the forward_pass_with_pipeline_parallel_large_input_batch method - - Args: - inference_input (List): A list containg the inputs for the gpt model [tokens, position ids, attention mask] - - Returns: - torch.Tensor: The output logits of shape [batch_size, seq_len, padded_vocab_size] - """ - tokens, position_ids, attention_mask = inference_input - batch_size, seq_len = tokens.shape - recv_buffer = None - if not parallel_state.is_pipeline_first_stage(): - recv_buffer = self._allocate_recv_buffer(batch_size, seq_len) - recv_from_prev_pipeline_rank_(recv_buffer) - - self.model.set_input_tensor(recv_buffer) - output_tensor = self.model( - tokens, position_ids, attention_mask, inference_params=self.inference_params - ) - - if not parallel_state.is_pipeline_last_stage(): - send_to_next_pipeline_rank(output_tensor.type(dtype=self.pipeline_communication_dtype)) - - self.inference_params.sequence_len_offset += seq_len - - logits = None - if parallel_state.is_pipeline_last_stage(): - logits = output_tensor - logits = tensor_parallel.gather_from_tensor_model_parallel_region(logits) - - return logits - - def forward_pass_with_pipeline_parallel_large_input_batch( - self, inference_input: List - ) -> torch.Tensor: - """Utility to carry out forward pass PP models. - - Runs the forward pass for models which are pipeline parallel. This is more complex than forward_pass_with_pipeline_parallel_small_input_batch coz this splits the global batch into small micro batches and runs them through the model. - - Args: - inference_input (List): A list containg the inputs for the gpt model [tokens, position ids, attention mask] - - Returns: - torch.Tensor: The output logits of shape [batch_size, seq_len, padded_vocab_size] - """ - tokens, position_ids, attention_mask = inference_input - micro_batch_size = max( - 1, - self.inference_wrapper_config.inference_batch_times_seqlen_threshold // tokens.size(1), - ) - batch_size, seq_len = tokens.shape - # Round up to account for the last partial micro batch if present - num_micro_batches = math.ceil(batch_size / micro_batch_size) - - logits = None - # Preallocate memory for output logits. - if parallel_state.is_pipeline_last_stage(): - logits = torch.empty( - (batch_size, seq_len, self.inference_wrapper_config.padded_vocab_size), - dtype=torch.float32, - device=torch.cuda.current_device(), - ) - - recv_buffer = None - if not parallel_state.is_pipeline_first_stage(): - recv_buffer = self._allocate_recv_buffer(micro_batch_size, seq_len) - for micro_batch_index in range(num_micro_batches): - start = micro_batch_index * micro_batch_size - end = min(start + micro_batch_size, batch_size) - tokens2use = tokens[start:end, ...] - position_ids2use = position_ids[start:end, ...] - current_micro_batch_size = end - start - - # Need to change recv buffer shape for the last partial microbatch (if exists) - if current_micro_batch_size != micro_batch_size: - recv_buffer = self._allocate_recv_buffer(current_micro_batch_size, seq_len) - - if not parallel_state.is_pipeline_first_stage(): - recv_from_prev_pipeline_rank_(recv_buffer) - - self.model.set_input_tensor(recv_buffer) - output_tensor = self.model( - tokens2use, position_ids2use, attention_mask, inference_params=self.inference_params - ) - - if not parallel_state.is_pipeline_last_stage(): - send_to_next_pipeline_rank(output_tensor) - - self.inference_params.batch_size_offset += current_micro_batch_size - - if parallel_state.is_pipeline_last_stage(): - output_tensor = tensor_parallel.gather_from_tensor_model_parallel_region( - output_tensor - ) - logits[start:end, ...] = output_tensor - - # Once done with all micro batches, we reset batch size offset and seq len offset - self.inference_params.sequence_len_offset += seq_len - self.inference_params.batch_size_offset = 0 - - # NOTE: Only returns the logits on the last pipeline stage - return logits - - def run_one_forward_step(self, inference_input: List) -> torch.Tensor: - """The forward pass of the model for inference - - Appropriate utility is called for the forward pass depending on the type of model parallelism used - - Args: - inference_input (List): A list containg the inputs for the gpt model [tokens, position ids, attention mask] - - Returns: - torch.Tensor: The output logits of shape [batch_size, seq_len, padded_vocab_size]. The logits are returned only in the last pipeline stage for PP models. - """ - if self.model_is_pipeline_parallel: - tokens = inference_input[0] - current_batch_size, seq_len = tokens.shape - # If input batch is large, we need to split into micro batches and run the forward pass - if ( - current_batch_size * seq_len - > self.inference_wrapper_config.inference_batch_times_seqlen_threshold - ): - return self.forward_pass_with_pipeline_parallel_large_input_batch(inference_input) - else: - # If input batch is very small we can do a simple forward pass on the entire global batch - return self.forward_pass_with_pipeline_parallel_small_input_batch(inference_input) - else: - return self.forward_pass_without_pipeline_parallel(inference_input) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import abc +import math +from typing import Any, Dict, Iterable, Optional, Union + +import torch + +from megatron.core import parallel_state, tensor_parallel +from megatron.core.inference.communication_utils import ( + recv_from_prev_pipeline_rank_, + send_to_next_pipeline_rank, +) +from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( + InferenceWrapperConfig, +) +from megatron.core.inference_params import InferenceParams +from megatron.core.models.gpt.gpt_model import GPTModel + + +# pylint: disable=line-too-long +class AbstractModelInferenceWrapper(abc.ABC): + """Abstract inference wrapper + + Extend this to create a version for your model. + """ + + def __init__( + self, + model: Union['LegacyGPTModel', GPTModel], # type: ignore[name-defined] + inference_wrapper_config: InferenceWrapperConfig, + ): + """Constructor for the model inference wrapper + + The wrapper prepares the model for inference, provides the required input data and runs the forward pass. + + Args: + model (Union[GPTModel, LegacyGPTModel]): The actual GPT model (MCore or MLM) + inference_wrapper_config (InferenceWrapperConfig): Has info like hidden size, vocab size etc. + """ + assert not isinstance( + model, Iterable + ), 'interleaving schedule is not supported for inference' + self.model = model + self.inference_wrapper_config = inference_wrapper_config + self.pipeline_communication_dtype = ( + torch.float + if self.inference_wrapper_config.fp32_residual_connection + else self.inference_wrapper_config.params_dtype + ) + + max_batch_size = self.inference_wrapper_config.inference_max_requests + max_sequence_length = self.inference_wrapper_config.inference_max_seq_length + self.inference_params = InferenceParams(max_batch_size, max_sequence_length) + + def prep_model_for_inference(self, prompts_tokens: torch.Tensor): + """A utility function for preparing model for inference + + The function gets called once before the auto regressive inference loop. + It puts the model in eval mode. + + Args: + prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_seq_len] + + """ + self.model.eval() + + # For TP only model both is_pp_first_stage and _is_pp_last_stage returns True + self.model_is_pipeline_parallel = not ( + parallel_state.is_pipeline_first_stage() and parallel_state.is_pipeline_last_stage() + ) + + self.inference_params.reset() + + @abc.abstractmethod + def prep_inference_input(self, prompt_tokens) -> Dict[str, Any]: + """Prepares the inference input data. + + Args: + prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_seq_len] + + Returns: + A dict with all the inference input needed for the batch. + """ + raise NotImplementedError() + + @abc.abstractmethod + def get_batch_for_context_window(self, *args, **kwargs) -> Dict[str, Any]: + """Returns the input data for inference + + This function gets called iteratively in the inference loop . It can be used to extract relevant input from the prompt tokens, attention mask etc. required for each step in inference. + + """ + raise NotImplementedError() + + def _forward(self, inference_input): + """Runs a forward pass of the model. + + Args: + inference_input(Dict[str, Any]): The input data. + inference_params(InferenceParams): The inference parameters. + + Returns: + The model output logits. + """ + tokens = inference_input["tokens"] + position_ids = inference_input["position_ids"] + attention_mask = inference_input["attention_mask"] + return self.model( + tokens, position_ids, attention_mask, inference_params=self.inference_params + ) + + def _get_batch_size_and_seq_len( + self, tokens: torch.Tensor, recv_buffer_seq_len: Optional[int] = None + ): + """ + Returns the batch size and sequence length based on the tokens tensor and recv_buffer_seq_len. + + Args: + tokens (torch.Tensor): The input tensor of shape (batch_size, seq_len). + recv_buffer_seq_len (int, optional): An optional recv buffer sequence length. + + Returns: + tuple: A tuple (batch_size, seq_len), where batch_size is the first dimension of tokens + and seq_len is either the second dimension or recv_buffer_seq_len. + """ + batch_size = tokens.shape[0] + seq_len = recv_buffer_seq_len if recv_buffer_seq_len is not None else tokens.shape[1] + return batch_size, seq_len + + def _allocate_recv_buffer(self, batch_size, seq_len): + """Receive happens between the layers with size [seq_len, batch_size, hidden_size].""" + recv_size = (seq_len, batch_size, self.inference_wrapper_config.hidden_size) + return torch.empty( + recv_size, dtype=self.pipeline_communication_dtype, device=torch.cuda.current_device() + ) + + def forward_pass_without_pipeline_parallel( + self, inference_input: Dict[str, Any] + ) -> torch.Tensor: + """Utility to carry out simple forward pass for TP or no model parallel models + + Runs a very simple forward pass for model. Used in the case of models without any parallelism or only tensor parallelism. + + Args: + inference_input (Dict[str, Any]): A dict containg the inputs for the gpt model [tokens, position ids, attention mask] + + Returns: + torch.Tensor: The output logits of shape [batch_size, seq_len, padded_vocab_size] + """ + tokens = inference_input["tokens"] + logits = self._forward(inference_input) + logits = tensor_parallel.gather_from_tensor_model_parallel_region(logits) + self.inference_params.sequence_len_offset += tokens.size(1) + + return logits + + def forward_pass_with_pipeline_parallel_small_input_batch( + self, inference_input: Dict[str, Any], recv_buffer_seq_len: Optional[int] = None + ) -> torch.Tensor: + """Utility to carry out forward pass for PP models with very small inputs + + If a model is pipeline parallel, yet, the input global batch is very small, we compute a foward pass on the entire global batch, rather than splitting it up into micro batches and doing something more complex as in the forward_pass_with_pipeline_parallel_large_input_batch method + + Args: + inference_input (Dict[str, Any]): A dict containing the inputs for the gpt model [tokens, position ids, attention mask] + recv_buffer_seq_len (int): An optional sequence length for the pipeline parallel recv buffer. + + Returns: + torch.Tensor: The output logits of shape [batch_size, seq_len, padded_vocab_size] + """ + tokens = inference_input["tokens"] + position_ids = inference_input["position_ids"] + attention_mask = inference_input["attention_mask"] + batch_size, seq_len = self._get_batch_size_and_seq_len(tokens, recv_buffer_seq_len) + recv_buffer = None + if not parallel_state.is_pipeline_first_stage(): + recv_buffer = self._allocate_recv_buffer(batch_size, seq_len) + recv_from_prev_pipeline_rank_(recv_buffer) + + self.model.set_input_tensor(recv_buffer) + output_tensor = self._forward(inference_input) + + if not parallel_state.is_pipeline_last_stage(): + send_to_next_pipeline_rank(output_tensor.type(dtype=self.pipeline_communication_dtype)) + + self.inference_params.sequence_len_offset += seq_len + + logits = None + if parallel_state.is_pipeline_last_stage(): + logits = output_tensor + logits = tensor_parallel.gather_from_tensor_model_parallel_region(logits) + + # Explicitly cast logits to expected dtype + logits = logits.to(self.inference_wrapper_config.params_dtype) + + return logits + + def forward_pass_with_pipeline_parallel_large_input_batch( + self, inference_input: Dict[str, Any], recv_buffer_seq_len=None + ) -> torch.Tensor: + """Utility to carry out forward pass PP models. + + Runs the forward pass for models which are pipeline parallel. + This is more complex than forward_pass_with_pipeline_parallel_small_input_batch because + this splits the global batch into small micro batches and runs them through the model. + + Args: + inference_input (Dict[str, Any]): A dict containg the inputs for the gpt model [tokens, position ids, attention mask] + recv_buffer_seq_len (int): An optional sequence length for the pipeline parallel recv buffer. + + Returns: + torch.Tensor: The output logits of shape [batch_size, seq_len, padded_vocab_size] + """ + tokens = inference_input["tokens"] + position_ids = inference_input["position_ids"] + attention_mask = inference_input["attention_mask"] + micro_batch_size = max( + 1, + self.inference_wrapper_config.inference_batch_times_seqlen_threshold // tokens.size(1), + ) + batch_size, seq_len = self._get_batch_size_and_seq_len(tokens, recv_buffer_seq_len) + # Round up to account for the last partial micro batch if present + num_micro_batches = math.ceil(batch_size / micro_batch_size) + + logits = None + # Preallocate memory for output logits. + if parallel_state.is_pipeline_last_stage(): + logits = torch.empty( + (batch_size, seq_len, self.inference_wrapper_config.padded_vocab_size), + dtype=self.pipeline_communication_dtype, + device=torch.cuda.current_device(), + ) + + recv_buffer = None + if not parallel_state.is_pipeline_first_stage(): + recv_buffer = self._allocate_recv_buffer(micro_batch_size, seq_len) + for micro_batch_index in range(num_micro_batches): + start = micro_batch_index * micro_batch_size + end = min(start + micro_batch_size, batch_size) + tokens2use = tokens[start:end, ...] + position_ids2use = position_ids[start:end, ...] + current_micro_batch_size = end - start + + # Need to change recv buffer shape for the last partial microbatch (if exists) + if current_micro_batch_size != micro_batch_size: + recv_buffer = self._allocate_recv_buffer(current_micro_batch_size, seq_len) + + if not parallel_state.is_pipeline_first_stage(): + recv_from_prev_pipeline_rank_(recv_buffer) + + self.model.set_input_tensor(recv_buffer) + output_tensor = self._forward( + { + "tokens": tokens2use, + "position_ids": position_ids2use, + "attention_mask": attention_mask, + } + ) + + if not parallel_state.is_pipeline_last_stage(): + send_to_next_pipeline_rank(output_tensor) + + self.inference_params.batch_size_offset += current_micro_batch_size + + if parallel_state.is_pipeline_last_stage(): + output_tensor = tensor_parallel.gather_from_tensor_model_parallel_region( + output_tensor + ) + assert logits is not None + logits[start:end, ...] = output_tensor + + # Explicitly cast logits to expected dtype + logits = logits.to(self.inference_wrapper_config.params_dtype) + + # Once done with all micro batches, we reset batch size offset and seq len offset + self.inference_params.sequence_len_offset += seq_len + self.inference_params.batch_size_offset = 0 + + # NOTE: Only returns the logits on the last pipeline stage + return logits + + def run_one_forward_step( + self, inference_input: Dict[str, Any], recv_buffer_seq_len: Optional[int] = None + ) -> torch.Tensor: + """The forward pass of the model for inference + + Appropriate utility is called for the forward pass depending on the type of model parallelism used + + Args: + inference_input (Dict[str, Any]): A dict containg the inputs for the gpt model [tokens, position ids, attention mask] + recv_buffer_seq_len (int): An optional sequence length for the pipeline parallel recv buffer. + + Returns: + torch.Tensor: The output logits of shape [batch_size, seq_len, padded_vocab_size]. The logits are returned only in the last pipeline stage for PP models. + """ + if self.model_is_pipeline_parallel: + tokens = inference_input["tokens"] + current_batch_size, seq_len = self._get_batch_size_and_seq_len( + tokens, recv_buffer_seq_len + ) + # If input batch is large, we need to split into micro batches and run the forward pass + if ( + current_batch_size * seq_len + > self.inference_wrapper_config.inference_batch_times_seqlen_threshold + ): + return self.forward_pass_with_pipeline_parallel_large_input_batch( + inference_input, recv_buffer_seq_len + ) + else: + # If input batch is very small we can do a simple forward pass on the entire global batch + return self.forward_pass_with_pipeline_parallel_small_input_batch( + inference_input, recv_buffer_seq_len + ) + else: + return self.forward_pass_without_pipeline_parallel(inference_input) diff --git a/megatron/core/inference/model_inference_wrappers/gpt/gpt_inference_wrapper.py b/megatron/core/inference/model_inference_wrappers/gpt/gpt_inference_wrapper.py index 166ed5e..5af4b09 100644 --- a/megatron/core/inference/model_inference_wrappers/gpt/gpt_inference_wrapper.py +++ b/megatron/core/inference/model_inference_wrappers/gpt/gpt_inference_wrapper.py @@ -1,90 +1,102 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from typing import List, Tuple - -import torch - -from megatron.core.inference.model_inference_wrappers.abstract_model_inference_wrapper import ( - AbstractModelInferenceWrapper, -) -from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( - InferenceWrapperConfig, -) -from megatron.core.models.gpt import GPTModel - - -# pylint: disable=line-too-long -class GPTInferenceWrapper(AbstractModelInferenceWrapper): - """Inference wrapper for GPT model""" - - def __init__(self, model: GPTModel, inference_wrapper_config: InferenceWrapperConfig): - """Constructor for the model inference wrapper - - The wrapper prepares the model for inference, provides the required input data, and runs the forward pass - - Args: - model (GPTModel): The GPT model (MCore or legacy) - inference_wrapper_config (InferenceWrapperConfig): Has info like hidden size, vocab size etc - """ - super().__init__(model, inference_wrapper_config) - - def prep_model_for_inference(self, prompts_tokens: torch.Tensor): - """A utility function for preparing model for inference - - This function is called before the forward pass. It puts the model in eval mode, builds position ids, and creates attention masks so that required slices can be extracted during the forward pass. - - Args: - prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_seq_len] - """ - - super().prep_model_for_inference(prompts_tokens=prompts_tokens) - self.attention_mask, self.position_ids = self._build_attention_mask_and_position_ids( - prompts_tokens - ) - - def _build_attention_mask_and_position_ids( - self, prompts_tokens: torch.Tensor - ) -> Tuple[torch.Tensor, torch.Tensor]: - """Builds the full attention mask and position ids for the input tokens - - Args: - prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_seq_len] - - Returns: - Tuple[torch.Tensor, torch.Tensor]: The attention mask of shape [1, 1, max_seq_len, max_seq_len] and position ids of shape [batch_size, max_seq_len] - """ - seq_length = prompts_tokens.size(1) - attention_mask = torch.tril( - torch.ones((1, seq_length, seq_length), device=prompts_tokens.device) - ).view(1, 1, seq_length, seq_length) - # Convert to boolean - attention_mask = attention_mask < 0.5 - - position_ids = ( - torch.arange(seq_length, dtype=torch.long, device=prompts_tokens.device) - .unsqueeze(0) - .expand_as(prompts_tokens) - ) - - return attention_mask, position_ids - - def get_batch_for_context_window( - self, context_start_position: int, context_end_position: int - ) -> List: - """Returns the inference data given context window - - This function gets called iteratively in a loop . Given the start and end context positions , it extracts the appropriate data. - - Args: - context_start_position (int): Start of the context window. During the first inference step it is mostly 0 - context_end_position (int): End of the context window. During the last inference step it will mostly be the max generated sequence length. - - Returns: - List: A list of inputs that will be used by your model in the forward step - """ - tokens2use = self.prompts_tokens[:, context_start_position:context_end_position] - positions2use = self.position_ids[:, context_start_position:context_end_position] - attention_mask2use = self.attention_mask[ - ..., context_start_position:context_end_position, :context_end_position - ] - data_at_step_idx = [tokens2use, positions2use, attention_mask2use] - return data_at_step_idx +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from typing import Any, Dict, Tuple + +import torch + +from megatron.core.inference.model_inference_wrappers.abstract_model_inference_wrapper import ( + AbstractModelInferenceWrapper, +) +from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( + InferenceWrapperConfig, +) +from megatron.core.models.gpt import GPTModel + + +# pylint: disable=line-too-long +class GPTInferenceWrapper(AbstractModelInferenceWrapper): + """Inference wrapper for GPT model""" + + def __init__(self, model: GPTModel, inference_wrapper_config: InferenceWrapperConfig): + """Constructor for the model inference wrapper + + The wrapper prepares the model for inference, provides the required input data, and runs the forward pass + + Args: + model (GPTModel): The GPT model (MCore or legacy) + inference_wrapper_config (InferenceWrapperConfig): Has info like hidden size, vocab size etc + """ + super().__init__(model, inference_wrapper_config) + + def prep_inference_input(self, prompts_tokens: torch.Tensor) -> Dict[str, Any]: + """Prepares the inference input data. + + Args: + prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_seq_len] + + Returns: + A dict with all the inference input needed for the batch. + """ + attention_mask, position_ids = self._build_attention_mask_and_position_ids(prompts_tokens) + return { + "tokens": prompts_tokens, + "attention_mask": attention_mask, + "position_ids": position_ids, + } + + def _build_attention_mask_and_position_ids( + self, prompts_tokens: torch.Tensor + ) -> Tuple[torch.Tensor, torch.Tensor]: + """Builds the full attention mask and position ids for the input tokens + + Args: + prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_seq_len] + + Returns: + Tuple[torch.Tensor, torch.Tensor]: The attention mask of shape [1, 1, max_seq_len, max_seq_len] and position ids of shape [batch_size, max_seq_len] + """ + seq_length = prompts_tokens.size(1) + attention_mask = torch.tril( + torch.ones((1, seq_length, seq_length), device=prompts_tokens.device) + ).view(1, 1, seq_length, seq_length) + # Convert to boolean + attention_mask = attention_mask < 0.5 + + position_ids = ( + torch.arange(seq_length, dtype=torch.long, device=prompts_tokens.device) + .unsqueeze(0) + .expand_as(prompts_tokens) + ) + + return attention_mask, position_ids + + def get_batch_for_context_window( + self, + inference_input: Dict[str, Any], + context_start_position: int, + context_end_position: int, + ) -> Dict[str, Any]: + """Returns the inference data given context window + + This function gets called iteratively in a loop . Given the start and end context positions , it extracts the appropriate data. + + Args: + inference_input (Dict[str, Any]): The inference input for the batch. + context_start_position (int): Start of the context window. During the first inference step it is mostly 0 + context_end_position (int): End of the context window. During the last inference step it will mostly be the max generated sequence length. + + Returns: + Dict[str, Any]: A dict of inputs that will be used by your model in the forward step + """ + tokens = inference_input["tokens"] + position_ids = inference_input["position_ids"] + attention_mask = inference_input["attention_mask"] + tokens2use = tokens[:, context_start_position:context_end_position] + positions2use = position_ids[:, context_start_position:context_end_position] + attention_mask2use = attention_mask[ + ..., context_start_position:context_end_position, :context_end_position + ] + return { + "tokens": tokens2use, + "position_ids": positions2use, + "attention_mask": attention_mask2use, + } diff --git a/megatron/core/inference/model_inference_wrappers/inference_wrapper_config.py b/megatron/core/inference/model_inference_wrappers/inference_wrapper_config.py index 14ca0f6..a746f8c 100644 --- a/megatron/core/inference/model_inference_wrappers/inference_wrapper_config.py +++ b/megatron/core/inference/model_inference_wrappers/inference_wrapper_config.py @@ -1,44 +1,50 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from dataclasses import dataclass - -import torch - - -@dataclass -class InferenceWrapperConfig: - """Config for the model inference wrapper - - NOTE : All the arguments here are obtained from arguments.py file - """ - - hidden_size: int - """Receive happens between the layers during PP with size [seq_len, batch_size, hidden_size]""" - - params_dtype: torch.dtype - """Can be torch.float or torch.half if --fp16 is used, or torch.bfloat16 if --bf16 is used""" - - inference_batch_times_seqlen_threshold: int - """if (batch-size * sequence-length) is smaller than this threshold then we will not pipeline - the batch.""" - - padded_vocab_size: int - """The final padded vocab size (Padded to make it divisible by - --make-vocab-size-divisible-by value)""" - - fp32_residual_connection: bool = False - """Move residual connections to fp32. Obtained from arguments.py""" - - def add_attributes(self, attribute_value_pair: dict): - """Utility to add more attributes to inference params - - Use this method to pass in a custom dictionary to add more configs to the instance created. - Use as follows: - c = InferenceWrapperConfig - c.add_attributes({'precision':'fp32'}) - - Args: - attribute_value_pair (dict): A dictionary containing attributes as the key names and - corresponding values. - """ - for key, value in attribute_value_pair.items(): - setattr(self, key, value) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from dataclasses import dataclass + +import torch + + +@dataclass +class InferenceWrapperConfig: + """Config for the model inference wrapper + + NOTE : All the arguments here are obtained from arguments.py file + """ + + hidden_size: int + """Receive happens between the layers during PP with size [seq_len, batch_size, hidden_size]""" + + params_dtype: torch.dtype + """Can be torch.float or torch.half if --fp16 is used, or torch.bfloat16 if --bf16 is used""" + + inference_batch_times_seqlen_threshold: int + """if (batch-size * sequence-length) is smaller than this threshold then we will not pipeline + the batch.""" + + padded_vocab_size: int + """The final padded vocab size (Padded to make it divisible by + --make-vocab-size-divisible-by value)""" + + inference_max_requests: int = 8 + """ Maximum number of requests for inference (prefill & decode). Necessary for CUDA graphs. """ + + inference_max_seq_length: int = 2560 + """ Maximum sequence length for inference (prefill & decode). Necessary for CUDA graphs. """ + + fp32_residual_connection: bool = False + """Move residual connections to fp32. Obtained from arguments.py""" + + def add_attributes(self, attribute_value_pair: dict): + """Utility to add more attributes to inference params + + Use this method to pass in a custom dictionary to add more configs to the instance created. + Use as follows: + c = InferenceWrapperConfig + c.add_attributes({'precision':'fp32'}) + + Args: + attribute_value_pair (dict): A dictionary containing attributes as the key names and + corresponding values. + """ + for key, value in attribute_value_pair.items(): + setattr(self, key, value) diff --git a/megatron/core/inference/model_inference_wrappers/multimodal/vlm_inference_wrapper.py b/megatron/core/inference/model_inference_wrappers/multimodal/vlm_inference_wrapper.py new file mode 100644 index 0000000..96acca6 --- /dev/null +++ b/megatron/core/inference/model_inference_wrappers/multimodal/vlm_inference_wrapper.py @@ -0,0 +1,208 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from typing import Any, Dict + +import torch + +from megatron.core import parallel_state +from megatron.core.inference.model_inference_wrappers.gpt.gpt_inference_wrapper import ( + GPTInferenceWrapper, +) +from megatron.core.inference_params import InferenceParams + + +# pylint: disable=line-too-long +class VLMInferenceWrapper(GPTInferenceWrapper): + """Inference wrapper for VLMs""" + + def prep_model_for_inference(self, prompts_tokens: torch.Tensor): + """A utility function for preparing model for inference + + The function gets called once before the auto regressive inference loop. + It puts the model in eval mode. + + Args: + prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_seq_len] + + """ + super().prep_model_for_inference(prompts_tokens) + + # For TP only model both is_pp_first_stage and _is_pp_last_stage returns True + self.model_is_pipeline_parallel = not ( + parallel_state.is_pipeline_first_stage() and parallel_state.is_pipeline_last_stage() + ) + + self._recv_only_vision_embeds = False + pp_rank = parallel_state.get_pipeline_model_parallel_rank() + # Checks if the previous stage only has a vision encoder, and that the current stage + # has part of the LM decoder. In this case, the current stage should only receive + # vision embeddings. + if pp_rank > 0: + self._recv_only_vision_embeds = ( + parallel_state.is_inside_encoder(pp_rank - 1) + and (not parallel_state.is_inside_decoder(pp_rank - 1)) + and parallel_state.is_inside_decoder() + ) + + # Checks if the current stage only has a vision encoder + self._encoder_only = ( + parallel_state.is_inside_encoder() and not parallel_state.is_inside_decoder() + ) + + # For TP only model both is_pp_first_stage and _is_pp_last_stage returns True + self.model_is_pipeline_parallel = not ( + parallel_state.is_pipeline_first_stage() and parallel_state.is_pipeline_last_stage() + ) + + def prep_inference_input( + self, + prompts_tokens: torch.Tensor, + num_img_embeddings_per_tile: int, + images: torch.Tensor, + num_tiles: torch.Tensor, + decoder_seq_length: int, + ): + """Prepares the inference input data. + + Args: + prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_seq_len] + num_img_embeddings_per_tile (int): The number of image embeddings per tile + images (torch.Tensor): The image embeddings + num_tiles (torch.Tensor): The number of tiles for each input image + decoder_seq_length (int): The decoder sequence length + """ + inference_input = super().prep_inference_input(prompts_tokens) + + total_num_tiles = torch.sum(num_tiles).item() + num_img_embeddings = num_img_embeddings_per_tile * total_num_tiles + + batch_size, max_sequence_length = prompts_tokens.shape + self.inference_params = InferenceParams( + batch_size, max_sequence_length + num_img_embeddings + ) + + inference_input["images"] = images + inference_input["num_tiles"] = num_tiles + inference_input["num_img_embeddings"] = num_img_embeddings + inference_input["decoder_seq_length"] = decoder_seq_length + + return inference_input + + def get_batch_for_context_window( + self, + inference_input: Dict[str, Any], + context_start_position: int, + context_end_position: int, + ) -> Dict[str, Any]: + """Returns the inference data given context window + + This function gets called iteratively in a loop . Given the start and end context positions , it extracts the appropriate data. + + Args: + inference_input (Dict[str, Any]): The inference input for the batch. + context_start_position (int): Start of the context window. During the first inference step it is mostly 0 + context_end_position (int): End of the context window. During the last inference step it will mostly be the max generated sequence length. + + Returns: + Dict[str, Any]: A dict of inputs that will be used by your model in the forward step + """ + tokens = inference_input["tokens"] + position_ids = inference_input["position_ids"] + images = inference_input["images"] + num_tiles = inference_input["num_tiles"] + num_img_embeddings = inference_input["num_img_embeddings"] + decoder_seq_length = inference_input["decoder_seq_length"] + + tokens2use = tokens[:, context_start_position:context_end_position] + positions2use = position_ids[:, context_start_position:context_end_position] + + return { + "tokens": tokens2use, + "position_ids": positions2use, + "images": images, + "num_tiles": num_tiles, + "num_img_embeddings": num_img_embeddings, + "decoder_seq_length": decoder_seq_length, + } + + def _forward(self, inference_input: Dict[str, Any]): + """Runs a forward pass of the model. + + Args: + inference_input(Dict[str, Any]): The input data. + + Returns: + The model output logits. + """ + images = inference_input["images"] + tokens = inference_input["tokens"] + position_ids = inference_input["position_ids"] + num_image_tiles = inference_input["num_tiles"] + + output = self.model( + images, + tokens, + position_ids=position_ids, + attention_mask=None, + inference_params=self.inference_params, + num_image_tiles=num_image_tiles, + runtime_gather_output=True, + ) + if isinstance(output, tuple): + logits, _ = output + else: + logits = output + return logits + + def run_one_forward_step(self, inference_input: Dict[str, Any]) -> torch.Tensor: + tokens = inference_input["tokens"] + num_image_tokens = (tokens == self.model.module.image_token_index).sum().item() + num_img_embeddings = inference_input["num_img_embeddings"] + decoder_seq_length = inference_input["decoder_seq_length"] + num_tokens = tokens.size(1) + recv_buffer_seq_len = None + if num_image_tokens > 0: + # When there are image tokens and this stage only receives vision embeddings, + # adjust the recv buffer seq length to match the image embeddings sequence length. + # If there are image tokens and this stage receives full embeddings, make sure we + # compensate for expansion of image tokens. + # Note that this will set a recv_buffer_seq_len for the encoder stage, + # this length is irrelevant since that recv buffer is never allocated. + if self._recv_only_vision_embeds: + recv_buffer_seq_len = num_img_embeddings + else: + recv_buffer_seq_len = min( + num_img_embeddings + num_tokens - num_image_tokens, decoder_seq_length + ) + elif self._recv_only_vision_embeds: + # If this stage only receives vision embeddings and there are no image tokens + # we won't run the encoder and therefore shouldn't try to recv. + recv_buffer_seq_len = 0 + + # If the pipeline stage only has a vision encoder, then it only needs to + # run when there are image tokens + if not (self._encoder_only and num_image_tokens == 0): + output = super().run_one_forward_step( + inference_input, recv_buffer_seq_len=recv_buffer_seq_len + ) + else: + output = None + logits = output + + # On the first inference iteration, we compute image tokens. + # On every PP stage(although inference params should only matter for decoder), + # update the sequence length offset by the number of image tokens. + if num_tokens > 1 and num_image_tokens > 0: + if "image_tokens_count" not in self.inference_params.key_value_memory_dict: + self.inference_params.key_value_memory_dict["image_tokens_count"] = ( + num_img_embeddings + ) + + if num_img_embeddings + num_tokens - num_image_tokens > decoder_seq_length: + self.inference_params.sequence_len_offset += decoder_seq_length - num_tokens + else: + self.inference_params.sequence_len_offset += ( + self.inference_params.key_value_memory_dict["image_tokens_count"] + - num_image_tokens + ) + + return logits diff --git a/megatron/core/inference/model_inference_wrappers/t5/t5_inference_wrapper.py b/megatron/core/inference/model_inference_wrappers/t5/t5_inference_wrapper.py index 2e5f846..f076528 100644 --- a/megatron/core/inference/model_inference_wrappers/t5/t5_inference_wrapper.py +++ b/megatron/core/inference/model_inference_wrappers/t5/t5_inference_wrapper.py @@ -1,215 +1,225 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from collections import deque -from typing import Any, List, Tuple - -import numpy -import torch - -from megatron.core import tensor_parallel -from megatron.core.datasets.t5_dataset import T5MaskedWordPieceDataset -from megatron.core.inference.model_inference_wrappers.abstract_model_inference_wrapper import ( - AbstractModelInferenceWrapper, -) -from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( - InferenceWrapperConfig, -) -from megatron.core.models.T5 import T5Model - - -# pylint: disable=line-too-long -class T5InferenceWrapper(AbstractModelInferenceWrapper): - """Constructor for the model inference wrapper - - The wrapper prepares the model for inference, provides the required input - data, and runs the forward pass - - Args: - model (T5Model): The T5 model (MCore or legacy) - inference_wrapper_config (InferenceWrapperConfig): The command line arguments that were passed - use_local (bool): Whether the T5 model's transformer impl - is local (vs transformer_engine) - """ - - def __init__( - self, - model: T5Model, - inference_wrapper_config: InferenceWrapperConfig, - use_local: bool = False, - ): - super().__init__(model, inference_wrapper_config) - self.use_local = use_local - - def prep_model_for_inference( - self, prompts_tokens: torch.Tensor, encoder_prompts: List[str] = None, tokenizer: Any = None - ): - """A utility function for preparing model for inference - - This function is called before the forward pass. It puts the model in eval mode, builds - position ids, and creates attention masks so that required slices can be extracted during - the forward pass. - - Args: - prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_sequence_length] - encoder_prompts (dict): List of string of encoder input prompts - tokenizer (_type_): Tokenizer used for tokenizing and detokenizing text - """ - - super().prep_model_for_inference(prompts_tokens=prompts_tokens) - - # get max_sequence_length - if hasattr(self.model, "module"): # if self.model is Float16Module - max_sequence_length = self.model.module.max_sequence_length - else: - max_sequence_length = self.model.max_sequence_length - - encoder_prompts_tokens_list = [ - self.tokenize_encoder_prompt(encoder_prompt, tokenizer) - for encoder_prompt in encoder_prompts - ] - self.batch_encoder_prompts_tokens = self.pad_encoder_prompts_tokens( - encoder_prompts_tokens_list, max_sequence_length, tokenizer - ) - - # create batch mask for encoder_prompt (self.batch_input_tokens) and - # decoder_input (self.prompts_tokens), similar to megatron/core/datasets/t5_dataset.py - decoder_prompts_tokens = self.prompts_tokens.cpu().numpy() - encoder_prompts_tokens = self.batch_encoder_prompts_tokens.cpu().numpy() - self.batch_mask_encoder = [] - self.batch_mask_decoder = [] - for i in range(len(self.prompts_tokens)): - mask_encoder = encoder_prompts_tokens[i] == tokenizer.pad - mask_decoder = decoder_prompts_tokens[i] == tokenizer.pad - self.batch_mask_encoder.append(mask_encoder) - self.batch_mask_decoder.append(mask_decoder) - self.batch_mask_encoder = torch.tensor(numpy.array(self.batch_mask_encoder)).cuda() - self.batch_mask_decoder = torch.tensor(numpy.array(self.batch_mask_decoder)).cuda() - - def tokenize_encoder_prompt( - self, encoder_prompt: str, tokenizer - ) -> Tuple[torch.Tensor, torch.Tensor]: - """Utility to tokenize the encoder_prompt - - Args: - encoder_prompt (str): The encoder_prompt - tokenizer (_type_): Tokenizer used for tokenizing and detokenizing string - - Returns: - torch.Tensor: Returns the tokenized prompt - """ - - # if there is the word "" in prompt, replacing it with special_additional_token, - # similar to processing step in megatron/core/datasets/t5_dataset.py - divided_encoder_prompt_list = encoder_prompt.split("") - masks_count = len(divided_encoder_prompt_list) - 1 - sentinels = deque(tokenizer.additional_special_tokens_ids) - - encoder_prompt_tokens = [] - for divided_encoder_prompt in divided_encoder_prompt_list: - divided_encoder_prompt_tokens = tokenizer.tokenize(divided_encoder_prompt) - encoder_prompt_tokens.extend(divided_encoder_prompt_tokens) - if masks_count > 0: - sentinel = sentinels.popleft() - encoder_prompt_tokens.extend([sentinel]) - masks_count -= 1 - - return encoder_prompt_tokens - - def pad_encoder_prompts_tokens( - self, encoder_prompts_tokens_list: List[List[int]], max_sequence_length: int, tokenizer - ) -> torch.Tensor: - """Method to pad input prompts - - Given a list of prompts, pad them all to uniform length - - Args: - encoder_prompts_tokens_list (List[List[int]]): A list containing the - encoder_input_tokens - max_sequence_length (int): Maximum of the length of the encoder inputs tokens - tokenizer (_type_): Tokenizer used for tokenizing and detokenizing text - - Returns: - torch.Tensor: A torch tensor of shape [bs, max_sequence_length] - """ - - for encoder_prompt_tokens in encoder_prompts_tokens_list: - padding_size = max_sequence_length - len(encoder_prompt_tokens) - encoder_prompt_tokens.extend([tokenizer.pad] * padding_size) - - return torch.tensor(encoder_prompts_tokens_list).cuda() - - def get_batch_for_context_window( - self, context_start_position: int, context_end_position: int - ) -> List: - """Returns the inference data given context window - - This function gets called iteratively in a loop . Given the start and end context - positions , it extracts the appropriate data. - - Args: - context_start_position (int): Start of the context window. During - the first inference step it is mostly 0 - context_end_position (int): End of the context window. During the - last inference step it will mostly be the max generated sequence length. - - Returns: - List: A list of inputs that will be used by your model in the forward step - """ - - # T5 inference not yet support kv_cache - encoder_tokens2use = self.batch_encoder_prompts_tokens - decoder_tokens2use = self.prompts_tokens[:, :context_end_position] - encoder_mask2use = self.batch_mask_encoder - decoder_mask2use = self.batch_mask_decoder[:, :context_end_position] - - # Configure attention mask based on different conditions - # (e.g., transformer-impl, TE versions, TE backends) - [encoder_mask2use, decoder_mask2use, encoder_decoder_mask2use] = ( - T5MaskedWordPieceDataset.config_attention_mask( - encoder_tokens2use, - decoder_tokens2use, - encoder_mask2use, - decoder_mask2use, - self.use_local, - ) - ) - - data_at_step_idx = [ - encoder_tokens2use, - decoder_tokens2use, - encoder_mask2use, - decoder_mask2use, - encoder_decoder_mask2use, - ] - - return data_at_step_idx - - def forward_pass_without_pipeline_parallel(self, inference_input: List) -> torch.Tensor: - """Utility to carry out simple forward pass for TP or no model parallel models - - Runs a very simple forward pass for model. Used in the case of models without - any parallelism or only tensor parallelism. - - Args: - inference_input (List): A list containg the inputs for the gpt - model [tokens, position ids, attention mask] - - Returns: - torch.Tensor: The output logits of shape [batch_size, seq_len, padded_vocab_size] - """ - [encoder_tokens, decoder_tokens, encoder_mask, decoder_mask, encoder_decoder_mask] = ( - inference_input - ) - tokens = decoder_tokens - - # T5 inference not yet support kv_cache - logits = self.model( - encoder_tokens, - decoder_tokens, - encoder_mask, - decoder_mask, - encoder_decoder_mask, - inference_params=None, - ) - logits = tensor_parallel.gather_from_tensor_model_parallel_region(logits) - - return logits +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from collections import deque +from typing import Any, Dict, List, Optional + +import numpy +import torch + +from megatron.core import tensor_parallel +from megatron.core.datasets.t5_dataset import T5MaskedWordPieceDataset +from megatron.core.inference.model_inference_wrappers.abstract_model_inference_wrapper import ( + AbstractModelInferenceWrapper, +) +from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( + InferenceWrapperConfig, +) +from megatron.core.models.T5 import T5Model +from megatron.core.utils import get_attr_wrapped_model + + +# pylint: disable=line-too-long +class T5InferenceWrapper(AbstractModelInferenceWrapper): + """Constructor for the model inference wrapper + + The wrapper prepares the model for inference, provides the required input + data, and runs the forward pass + + Args: + model (T5Model): The T5 model (MCore or legacy) + inference_wrapper_config (InferenceWrapperConfig): The command line arguments that were passed + use_local (bool): Whether the T5 model's transformer impl + is local (vs transformer_engine) + """ + + def __init__( + self, + model: T5Model, + inference_wrapper_config: InferenceWrapperConfig, + use_local: bool = False, + ): + super().__init__(model, inference_wrapper_config) + self.use_local = use_local + + def prep_inference_input( + self, + prompts_tokens: torch.Tensor, + encoder_prompts: Optional[List[str]] = None, + tokenizer: Any = None, + ) -> Dict[str, Any]: + """Prepares the inference input data. + + Args: + prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_seq_len] + encoder_prompts (dict): List of string of encoder input prompts + tokenizer (_type_): Tokenizer used for tokenizing and detokenizing text + + Returns: + A dict with all the inference input needed for the batch. + """ + # get max_sequence_length + max_sequence_length = get_attr_wrapped_model(self.model, "max_sequence_length") + + encoder_prompts_tokens_list = [ + self.tokenize_encoder_prompt(encoder_prompt, tokenizer) + for encoder_prompt in encoder_prompts + ] + batch_encoder_prompts_tokens = self.pad_encoder_prompts_tokens( + encoder_prompts_tokens_list, max_sequence_length, tokenizer + ) + + # create batch mask for encoder_prompt (self.batch_input_tokens) and + # decoder_input (prompts_tokens), similar to megatron/core/datasets/t5_dataset.py + decoder_prompts_tokens = prompts_tokens + encoder_prompts_tokens = batch_encoder_prompts_tokens + decoder_prompts_tokens_numpy = decoder_prompts_tokens.cpu().numpy() + encoder_prompts_tokens_numpy = encoder_prompts_tokens.cpu().numpy() + batch_mask_encoder = [] + batch_mask_decoder = [] + for i in range(len(prompts_tokens)): + mask_encoder = encoder_prompts_tokens_numpy[i] == tokenizer.pad + mask_decoder = decoder_prompts_tokens_numpy[i] == tokenizer.pad + batch_mask_encoder.append(mask_encoder) + batch_mask_decoder.append(mask_decoder) + batch_mask_encoder = torch.tensor(numpy.array(batch_mask_encoder)).cuda() + batch_mask_decoder = torch.tensor(numpy.array(batch_mask_decoder)).cuda() + + return { + "encoder_tokens": encoder_prompts_tokens, + "decoder_tokens": decoder_prompts_tokens, + "encoder_mask": batch_mask_encoder, + "decoder_mask": batch_mask_decoder, + } + + def tokenize_encoder_prompt(self, encoder_prompt: str, tokenizer) -> torch.Tensor: + """Utility to tokenize the encoder_prompt + + Args: + encoder_prompt (str): The encoder_prompt + tokenizer (_type_): Tokenizer used for tokenizing and detokenizing string + + Returns: + torch.Tensor: Returns the tokenized prompt + """ + + # if there is the word "" in prompt, replacing it with special_additional_token, + # similar to processing step in megatron/core/datasets/t5_dataset.py + divided_encoder_prompt_list = encoder_prompt.split("") + masks_count = len(divided_encoder_prompt_list) - 1 + sentinels = deque(tokenizer.additional_special_tokens_ids) + + encoder_prompt_tokens = [] + for divided_encoder_prompt in divided_encoder_prompt_list: + divided_encoder_prompt_tokens = tokenizer.tokenize(divided_encoder_prompt) + encoder_prompt_tokens.extend(divided_encoder_prompt_tokens) + if masks_count > 0: + sentinel = sentinels.popleft() + encoder_prompt_tokens.extend([sentinel]) + masks_count -= 1 + + return encoder_prompt_tokens + + def pad_encoder_prompts_tokens( + self, encoder_prompts_tokens_list: List[List[int]], max_sequence_length: int, tokenizer + ) -> torch.Tensor: + """Method to pad input prompts + + Given a list of prompts, pad them all to uniform length + + Args: + encoder_prompts_tokens_list (List[List[int]]): A list containing the + encoder_input_tokens + max_sequence_length (int): Maximum of the length of the encoder inputs tokens + tokenizer (_type_): Tokenizer used for tokenizing and detokenizing text + + Returns: + torch.Tensor: A torch tensor of shape [bs, max_sequence_length] + """ + + for encoder_prompt_tokens in encoder_prompts_tokens_list: + padding_size = max_sequence_length - len(encoder_prompt_tokens) + encoder_prompt_tokens.extend([tokenizer.pad] * padding_size) + + return torch.tensor(encoder_prompts_tokens_list).cuda() + + def get_batch_for_context_window( + self, + inference_input: Dict[str, Any], + context_start_position: int, + context_end_position: int, + ) -> Dict[str, Any]: + """Returns the inference data given context window + + This function gets called iteratively in a loop . Given the start and end context + positions , it extracts the appropriate data. + + Args: + inference_input (Dict[str, Any]): The inference input for the batch. + context_start_position (int): Start of the context window. During + the first inference step it is mostly 0 + context_end_position (int): End of the context window. During the + last inference step it will mostly be the max generated sequence length. + + Returns: + Dict: A dict of inputs that will be used by your model in the forward step + """ + + # T5 inference not yet support kv_cache + encoder_tokens2use = inference_input["encoder_tokens"] + decoder_tokens2use = inference_input["decoder_tokens"][:, :context_end_position] + encoder_mask2use = inference_input["encoder_mask"] + decoder_mask2use = inference_input["decoder_mask"][:, :context_end_position] + + # Configure attention mask based on different conditions + # (e.g., transformer-impl, TE versions, TE backends) + [encoder_mask2use, decoder_mask2use, encoder_decoder_mask2use] = ( + T5MaskedWordPieceDataset.config_attention_mask( + encoder_tokens2use, + decoder_tokens2use, + encoder_mask2use, + decoder_mask2use, + self.use_local, + ) + ) + + return { + "encoder_tokens": encoder_tokens2use, + "decoder_tokens": decoder_tokens2use, + "encoder_mask": encoder_mask2use, + "decoder_mask": decoder_mask2use, + "encoder_decoder_mask": encoder_decoder_mask2use, + } + + def forward_pass_without_pipeline_parallel( + self, inference_input: Dict[str, Any] + ) -> torch.Tensor: + """Utility to carry out simple forward pass for TP or no model parallel models + + Runs a very simple forward pass for model. Used in the case of models without + any parallelism or only tensor parallelism. + + Args: + inference_input (Dict[str, Any]): A dict containg the inputs for the gpt + model [tokens, position ids, attention mask] + + Returns: + torch.Tensor: The output logits of shape [batch_size, seq_len, padded_vocab_size] + """ + encoder_tokens = inference_input["encoder_tokens"] + decoder_tokens = inference_input["decoder_tokens"] + encoder_mask = inference_input["encoder_mask"] + decoder_mask = inference_input["decoder_mask"] + encoder_decoder_mask = inference_input["encoder_decoder_mask"] + tokens = decoder_tokens + + # T5 inference not yet support kv_cache + logits = self.model( + encoder_tokens, + decoder_tokens, + encoder_mask, + decoder_mask, + encoder_decoder_mask, + inference_params=None, + ) + logits = tensor_parallel.gather_from_tensor_model_parallel_region(logits) + + return logits diff --git a/megatron/core/inference/modelopt_support/__init__.py b/megatron/core/inference/modelopt_support/__init__.py index f8eb8f3..4da0530 100644 --- a/megatron/core/inference/modelopt_support/__init__.py +++ b/megatron/core/inference/modelopt_support/__init__.py @@ -1,8 +1,10 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -"""Integrations with NVIDIA TensorRT Model Optimizer (referred as ModelOpt). - -ModelOpt is a library comprising state-of-the-art model optimization techniques including quantization and sparsity to -compress model for efficient inference on NVIDIA GPUs. ModelOpt is integrated with Megatron-core to provide a seamless -experience for users to optimize their Megatron-core models for inference. More details on ModelOpt including -installation and usage can be found at https://github.com/NVIDIA/TensorRT-Model-Optimizer. -""" +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +"""Integrations with NVIDIA TensorRT Model Optimizer (referred as ModelOpt). + +ModelOpt is a library comprising state-of-the-art model optimization techniques +including quantization and sparsity to compress model for efficient inference on +NVIDIA GPUs. ModelOpt is integrated with Megatron-core to provide a seamless +experience for users to optimize their Megatron-core models for inference. +More details on ModelOpt including installation and usage can be found at +https://github.com/NVIDIA/TensorRT-Model-Optimizer. +""" diff --git a/megatron/core/inference/modelopt_support/gpt/model_specs.py b/megatron/core/inference/modelopt_support/gpt/model_specs.py index 4d422bc..b11232a 100644 --- a/megatron/core/inference/modelopt_support/gpt/model_specs.py +++ b/megatron/core/inference/modelopt_support/gpt/model_specs.py @@ -1,63 +1,68 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from megatron.core.extensions.transformer_engine import TEDotProductAttention, TENorm -from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add -from megatron.core.models.gpt.gpt_layer_specs import _get_mlp_module_spec -from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear -from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules -from megatron.core.transformer.enums import AttnMaskType -from megatron.core.transformer.identity_op import IdentityOp -from megatron.core.transformer.spec_utils import ModuleSpec -from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules - - -# Use this spec for ModelOpt PTQ and TensorRT-LLM export -def get_gpt_layer_modelopt_spec( - num_experts: int = None, - moe_grouped_gemm: bool = False, - remap_te_layernorm: bool = False, - qk_layernorm: bool = False, -) -> ModuleSpec: - """Mix the native spec with TENorm. - - This is essentially the native local spec except for the layernorm implementation - is using TENorm from Transformer-Engine. The issue is that FusedLayerNorm from apex - has stopped supporting RMSNorm needed by llama. - """ - mlp = _get_mlp_module_spec( - use_te=False, num_experts=num_experts, moe_grouped_gemm=moe_grouped_gemm, fp8=False - ) - sharded_state_dict_keys_map = {} - if remap_te_layernorm: - if num_experts: - sharded_state_dict_keys_map = { - 'input_layernorm.': 'self_attention.linear_qkv.layer_norm_' - } - else: - sharded_state_dict_keys_map = { - 'input_layernorm.': 'self_attention.linear_qkv.layer_norm_', - 'pre_mlp_layernorm.': 'mlp.linear_fc1.layer_norm_', - } - return ModuleSpec( - module=TransformerLayer, - submodules=TransformerLayerSubmodules( - input_layernorm=TENorm, - self_attention=ModuleSpec( - module=SelfAttention, - params={"attn_mask_type": AttnMaskType.causal}, - submodules=SelfAttentionSubmodules( - linear_qkv=ColumnParallelLinear, - core_attention=TEDotProductAttention, - linear_proj=RowParallelLinear, - q_layernorm=TENorm if qk_layernorm else IdentityOp, - k_layernorm=TENorm if qk_layernorm else IdentityOp, - ), - ), - self_attn_bda=get_bias_dropout_add, - pre_mlp_layernorm=TENorm, - mlp=mlp, - mlp_bda=get_bias_dropout_add, - # Map TE-layernorm-fusion keys back - sharded_state_dict_keys_map=sharded_state_dict_keys_map, - ), - ) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +from typing import Optional + +from megatron.core.extensions.transformer_engine import TEDotProductAttention, TENorm +from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add +from megatron.core.models.gpt.gpt_layer_specs import get_mlp_module_spec +from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear +from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules +from megatron.core.transformer.dot_product_attention import DotProductAttention +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.transformer.identity_op import IdentityOp +from megatron.core.transformer.spec_utils import ModuleSpec +from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules + + +# Use this spec for ModelOpt PTQ and TensorRT-LLM export +def get_gpt_layer_modelopt_spec( + num_experts: Optional[int] = None, + local_core_attention: bool = False, + moe_grouped_gemm: bool = False, + remap_te_layernorm: bool = False, + qk_layernorm: bool = False, +) -> ModuleSpec: + """Mix the native spec with TENorm. + + This is essentially the native local spec except for the layernorm implementation + is using TENorm from Transformer-Engine. The issue is that FusedLayerNorm from apex + has stopped supporting RMSNorm needed by llama. + """ + core_attention = DotProductAttention if local_core_attention else TEDotProductAttention + mlp = get_mlp_module_spec( + use_te=False, num_experts=num_experts, moe_grouped_gemm=moe_grouped_gemm, fp8=False + ) + sharded_state_dict_keys_map = {} + if remap_te_layernorm: + if num_experts: + sharded_state_dict_keys_map = { + 'input_layernorm.': 'self_attention.linear_qkv.layer_norm_' + } + else: + sharded_state_dict_keys_map = { + 'input_layernorm.': 'self_attention.linear_qkv.layer_norm_', + 'pre_mlp_layernorm.': 'mlp.linear_fc1.layer_norm_', + } + return ModuleSpec( + module=TransformerLayer, + submodules=TransformerLayerSubmodules( + input_layernorm=TENorm, + self_attention=ModuleSpec( + module=SelfAttention, + params={"attn_mask_type": AttnMaskType.causal}, + submodules=SelfAttentionSubmodules( + linear_qkv=ColumnParallelLinear, + core_attention=core_attention, + linear_proj=RowParallelLinear, + q_layernorm=TENorm if qk_layernorm else IdentityOp, + k_layernorm=TENorm if qk_layernorm else IdentityOp, + ), + ), + self_attn_bda=get_bias_dropout_add, + pre_mlp_layernorm=TENorm, + mlp=mlp, + mlp_bda=get_bias_dropout_add, + # Map TE-layernorm-fusion keys back + sharded_state_dict_keys_map=sharded_state_dict_keys_map, + ), + ) diff --git a/megatron/core/inference/modelopt_support/mamba/__init__.py b/megatron/core/inference/modelopt_support/mamba/__init__.py new file mode 100644 index 0000000..a1f3599 --- /dev/null +++ b/megatron/core/inference/modelopt_support/mamba/__init__.py @@ -0,0 +1 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. diff --git a/megatron/core/inference/modelopt_support/mamba/model_specs.py b/megatron/core/inference/modelopt_support/mamba/model_specs.py new file mode 100644 index 0000000..b270868 --- /dev/null +++ b/megatron/core/inference/modelopt_support/mamba/model_specs.py @@ -0,0 +1,89 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +from megatron.core.extensions.transformer_engine import TEDotProductAttention, TENorm +from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add +from megatron.core.ssm.mamba_block import MambaStack, MambaStackSubmodules +from megatron.core.ssm.mamba_layer import MambaLayer, MambaLayerSubmodules +from megatron.core.ssm.mamba_mixer import MambaMixer, MambaMixerSubmodules +from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear +from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules +from megatron.core.transformer.dot_product_attention import DotProductAttention +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.transformer.mlp import MLP, MLPSubmodules +from megatron.core.transformer.spec_utils import ModuleSpec +from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules + + +# Use this spec for ModelOpt PTQ and TensorRT-LLM export +def get_mamba_stack_modelopt_spec( + local_core_attention: bool = False, remap_te_layernorm: bool = False +) -> ModuleSpec: + """Mix the native spec with TENorm. + + This is essentially the native local spec except for the layernorm implementation + is using TENorm from Transformer-Engine. + """ + mamba_state_dict_keys_map = {} + transformer_state_dict_keys_map = {} + if remap_te_layernorm: + mamba_state_dict_keys_map = {'norm.': 'mixer.in_proj.layer_norm_'} + transformer_state_dict_keys_map = { + 'input_layernorm.': 'self_attention.linear_qkv.layer_norm_', + 'pre_mlp_layernorm.': 'mlp.linear_fc1.layer_norm_', + } + + mamba_layer = ModuleSpec( + module=MambaLayer, + submodules=MambaLayerSubmodules( + norm=TENorm, + mixer=ModuleSpec( + module=MambaMixer, + submodules=MambaMixerSubmodules( + in_proj=ColumnParallelLinear, out_proj=RowParallelLinear + ), + ), + mamba_bda=get_bias_dropout_add, + sharded_state_dict_keys_map=mamba_state_dict_keys_map, + ), + ) + + core_attention = DotProductAttention if local_core_attention else TEDotProductAttention + attention_layer = ModuleSpec( + module=TransformerLayer, + submodules=TransformerLayerSubmodules( + input_layernorm=TENorm, + self_attention=ModuleSpec( + module=SelfAttention, + params={"attn_mask_type": AttnMaskType.causal}, + submodules=SelfAttentionSubmodules( + linear_qkv=ColumnParallelLinear, + core_attention=core_attention, + linear_proj=RowParallelLinear, + ), + ), + self_attn_bda=get_bias_dropout_add, + sharded_state_dict_keys_map=transformer_state_dict_keys_map, + ), + ) + + mlp_layer = ModuleSpec( + module=TransformerLayer, + submodules=TransformerLayerSubmodules( + pre_mlp_layernorm=TENorm, + mlp=ModuleSpec( + module=MLP, + submodules=MLPSubmodules( + linear_fc1=ColumnParallelLinear, linear_fc2=RowParallelLinear + ), + ), + mlp_bda=get_bias_dropout_add, + sharded_state_dict_keys_map=transformer_state_dict_keys_map, + ), + ) + + return ModuleSpec( + module=MambaStack, + submodules=MambaStackSubmodules( + mamba_layer=mamba_layer, attention_layer=attention_layer, mlp_layer=mlp_layer + ), + ) diff --git a/megatron/core/inference/sampling_params.py b/megatron/core/inference/sampling_params.py index 8ffcb63..d73a612 100644 --- a/megatron/core/inference/sampling_params.py +++ b/megatron/core/inference/sampling_params.py @@ -1,35 +1,36 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from dataclasses import dataclass - - -@dataclass -class SamplingParams: - """Inference parameters sent along with the prompts. - This class contains request-level attributes that control the sampling techniques used when - generating text. This is distinct from megatron.core.InferenceParams, which is sets model-level - inference attributes such as the maximum sequence length, and contains the KV cache. - - For an explanation of these parameters refer to this blog - https://ivibudh.medium.com/a-guide-to-controlling-llm-model-output-exploring-top-k-top-p-and- - temperature-parameters-ed6a31313910 - """ - - temperature: float = 1.0 - top_k: int = 0 - top_p: float = 0.0 - return_log_probs: bool = False - num_tokens_to_generate: int = 30 - - def add_attributes(self, attribute_value_pair: dict): - """Utility to add more attributes to sampling params - - Use this method to pass in a custom dictionary to add more sampling parameter attributes. - c = SamplingParams - c.add_attributes({'min_length':4, 'eod_id':153}) - - Args: - attribute_value_pair (dict): A dictionary containing attributes as the key names and - their values as the values. - """ - for key, value in attribute_value_pair.items(): - setattr(self, key, value) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from dataclasses import dataclass + + +@dataclass +class SamplingParams: + """Inference parameters sent along with the prompts. + This class contains request-level attributes that control the sampling techniques used when + generating text. This is distinct from megatron.core.InferenceParams, which is sets model-level + inference attributes such as the maximum sequence length, and contains the KV cache. + + For an explanation of these parameters refer to this blog + https://ivibudh.medium.com/a-guide-to-controlling-llm-model-output-exploring-top-k-top-p-and- + temperature-parameters-ed6a31313910 + """ + + temperature: float = 1.0 + top_k: int = 0 + top_p: float = 0.0 + return_log_probs: bool = False + return_segments: bool = False # Whether to return individually detokenized tokens + num_tokens_to_generate: int = 30 + + def add_attributes(self, attribute_value_pair: dict): + """Utility to add more attributes to sampling params + + Use this method to pass in a custom dictionary to add more sampling parameter attributes. + c = SamplingParams + c.add_attributes({'min_length':4, 'eod_id':153}) + + Args: + attribute_value_pair (dict): A dictionary containing attributes as the key names and + their values as the values. + """ + for key, value in attribute_value_pair.items(): + setattr(self, key, value) diff --git a/megatron/core/inference/scheduler.py b/megatron/core/inference/scheduler.py index ef17723..d3afcb0 100644 --- a/megatron/core/inference/scheduler.py +++ b/megatron/core/inference/scheduler.py @@ -1,127 +1,175 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -import time -import typing -from collections import OrderedDict -from typing import Dict - -import torch - -from megatron.core.inference.inference_request import InferenceRequest, Status -from megatron.core.inference.sampling_params import SamplingParams -from megatron.core.inference.utils import Counter - - -class Scheduler: - """Scheduler for handling requests to inference engine - - This class is responsible for handing of all the incomign requests - - Args: - max_batch_size (int): The max batch size that we can pass to the - inference engine at a time. - """ - - def __init__(self, max_batch_size: int): - self.max_batch_size = max_batch_size - self.active_request_pool: Dict[int, InferenceRequest] = OrderedDict() - self.waiting_request_pool: Dict[int, InferenceRequest] = OrderedDict() - self.completed_request_pool: Dict[int, InferenceRequest] = OrderedDict() - self.request_counter = Counter() - - def add_request( - self, - prompt: str, - prompt_tokens: torch.Tensor, - encoder_prompt: str = None, - inference_parameters: SamplingParams = None, - arrival_time: float = None, - ): - """Add an incoming request - - This method will add the request to either the active pool or the waiting pool - depending on the batch size. - - Args: - prompt (str): Input prompt string - prompt_tokens (torch.Tensor): A torch tensor having the input prompts tokenized - encoder_prompt (str): Encoder input string - inference_parameters (SamplingParams): The inference parameters - arrival_time (float, optional): The incoming request time. Defaults to None. - """ - request_id = str(next(self.request_counter)) - - if arrival_time is None: - arrival_time = time.time() - - status = ( - Status.ACTIVE_BUT_NOT_GENERATING_TOKENS - if len(self.active_request_pool) < self.max_batch_size - else Status.WAITING_IN_QUEUE - ) - - inference_request = InferenceRequest( - request_id=request_id, - prompt=prompt, - inference_parameters=inference_parameters, - arrival_time=arrival_time, - prompt_tokens=prompt_tokens, - status=status, - encoder_prompt=encoder_prompt, - ) - - if status == status.ACTIVE_BUT_NOT_GENERATING_TOKENS: - self.active_request_pool[request_id] = inference_request - else: - self.waiting_request_pool[request_id] = inference_request - - def have_requests_pending(self) -> bool: - """Method to check if there are requests pending - - This method returns False only when there are no active requests or waiting requests. - """ - num_requests_pending = len(self.active_request_pool) + len(self.waiting_request_pool) - return num_requests_pending > 0 - - def add_earliest_waiting_request_to_active_pool(self): - """Utility to add the waiting request to active pool - - This method will add the earliest request (FIFO) that is in the waiting request - pool to the active request pool. - """ - assert ( - len(self.active_request_pool) < self.max_batch_size - ), "Active request pool is already full. Cant add any more requests" - if len(self.waiting_request_pool) > 0: - (earliest_waiting_request_request_id, earliest_waiting_request) = ( - self.waiting_request_pool.popitem(last=False) - ) - earliest_waiting_request.status = Status.ACTIVE_BUT_NOT_GENERATING_TOKENS - self.active_request_pool[earliest_waiting_request_request_id] = earliest_waiting_request - - def update_requests_pools(self, result_dict: typing.OrderedDict[int, InferenceRequest] = None): - """Update request pool status - - This method will full up the active request pool, if it has less than max batch size - elements from the waiting request pool. - If provided with a request dict, it will put the completed requests into the completed - request pool and add waiting request into active pool. - - Args: - result (typing.OrderedDict[int, InferenceRequest], optional): The result returned - by the engine. A dictionary with keys as the request ids, and values as the - requests. Defaults to None - """ - for result_request_id in list(result_dict.keys()): - active_request = self.active_request_pool[result_request_id] - - # If a request has completed put it into the completed request pool. - if active_request.status == Status.COMPLETED: - completed_request = self.active_request_pool.pop(result_request_id) - self.completed_request_pool[result_request_id] = completed_request - - # If the active request pool is not full, add waiting requests in FIFO order - while ( - len(self.active_request_pool) < self.max_batch_size - and len(self.waiting_request_pool) > 0 - ): - self.add_earliest_waiting_request_to_active_pool() +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import functools +import time +import typing +from collections import OrderedDict +from typing import Dict, Optional, Type, Union + +import torch + +from megatron.core.inference.async_stream import AsyncStream +from megatron.core.inference.inference_request import InferenceRequest, Status +from megatron.core.inference.sampling_params import SamplingParams +from megatron.core.inference.utils import Counter + + +class Scheduler: + """Scheduler for handling requests to inference engine + + This class is responsible for handing of all the incomign requests + + Args: + max_batch_size (int): The max batch size that we can pass to the + inference engine at a time. + request_type (InferenceRequest): The class to use for instantiating new requests. + """ + + def __init__(self, max_batch_size): + self.max_batch_size = max_batch_size + self.requests: Dict[str, InferenceRequest] = OrderedDict() + self.streams: Dict[str, AsyncStream] = OrderedDict() + self.active_request_pool: Dict[str, InferenceRequest] = OrderedDict() + self.waiting_request_pool: Dict[str, InferenceRequest] = OrderedDict() + self.completed_request_pool: Dict[str, InferenceRequest] = OrderedDict() + self.request_counter = Counter() + + def get_new_request_id(self) -> str: + """Gets a new request id""" + request_id = str(next(self.request_counter)) + return request_id + + def add_request( + self, + prompt: Optional[str] = None, + prompt_tokens: Optional[torch.Tensor] = None, + encoder_prompt: Optional[str] = None, + inference_parameters: Optional[SamplingParams] = None, + arrival_time: Optional[float] = None, + streaming: bool = False, + inference_request: Optional[InferenceRequest] = None, + ) -> str: + """Add an incoming request + + This method will add the request to either the active pool or the waiting pool + depending on the batch size. + + Args: + prompt (str): Input prompt string + prompt_tokens (torch.Tensor): A torch tensor having the input prompts tokenized + encoder_prompt (str): Encoder input string + inference_parameters (SamplingParams): The inference parameters + arrival_time (float, optional): The incoming request time. Defaults to None. + streaming (bool, optional): Whether to asynchronously stream tokens for this request. + inference_request (InferenceRequest, optional): A fully constructed request. + Defaults to None. + + Returns: + The request_id for the new request. + """ + status = ( + Status.ACTIVE_BUT_NOT_GENERATING_TOKENS + if len(self.active_request_pool) < self.max_batch_size + else Status.WAITING_IN_QUEUE + ) + + if inference_request is None: + assert prompt is not None + assert prompt_tokens is not None + + request_id = self.get_new_request_id() + + if arrival_time is None: + arrival_time = time.time() + + inference_request = InferenceRequest( + request_id=request_id, + prompt=prompt, + inference_parameters=inference_parameters, + arrival_time=arrival_time, + prompt_tokens=prompt_tokens, + status=status, + encoder_prompt=encoder_prompt, + ) + else: + request_id = inference_request.request_id + inference_request.status = status + if inference_request.arrival_time is None: + inference_request.arrival_time = time.time() + + self.requests[request_id] = inference_request + + if streaming: + abort_request = functools.partial(self.abort_request, request_id=request_id) + self.streams[request_id] = AsyncStream(request_id, abort_request) + + if status == status.ACTIVE_BUT_NOT_GENERATING_TOKENS: + self.active_request_pool[request_id] = inference_request + else: + self.waiting_request_pool[request_id] = inference_request + + return request_id + + def have_requests_pending(self) -> bool: + """Method to check if there are requests pending + + This method returns False only when there are no active requests or waiting requests. + """ + num_requests_pending = len(self.active_request_pool) + len(self.waiting_request_pool) + return num_requests_pending > 0 + + def add_earliest_waiting_request_to_active_pool(self): + """Utility to add the waiting request to active pool + + This method will add the earliest request (FIFO) that is in the waiting request + pool to the active request pool. + """ + assert ( + len(self.active_request_pool) < self.max_batch_size + ), "Active request pool is already full. Cant add any more requests" + if len(self.waiting_request_pool) > 0: + (earliest_waiting_request_request_id, earliest_waiting_request) = ( + self.waiting_request_pool.popitem(last=False) + ) + earliest_waiting_request.status = Status.ACTIVE_BUT_NOT_GENERATING_TOKENS + self.active_request_pool[earliest_waiting_request_request_id] = earliest_waiting_request + + def update_requests_pools( + self, result_dict: Optional[typing.OrderedDict[str, InferenceRequest]] = None + ): + """Update request pool status + + This method will full up the active request pool, if it has less than max batch size + elements from the waiting request pool. + If provided with a request dict, it will put the completed requests into the completed + request pool and add waiting request into active pool. + + Args: + result (typing.OrderedDict[str, InferenceRequest], optional): The result returned + by the engine. A dictionary with keys as the request ids, and values as the + requests. Defaults to None + """ + for result_request_id in list(result_dict.keys()): + active_request = self.active_request_pool[result_request_id] + + # If a request has completed put it into the completed request pool. + if active_request.status == Status.COMPLETED: + completed_request = self.active_request_pool.pop(result_request_id) + self.completed_request_pool[result_request_id] = completed_request + + # If the active request pool is not full, add waiting requests in FIFO order + while ( + len(self.active_request_pool) < self.max_batch_size + and len(self.waiting_request_pool) > 0 + ): + self.add_earliest_waiting_request_to_active_pool() + + def abort_request( + self, + request_id: str, + *, + exception: Optional[Union[BaseException, Type[BaseException]]] = None + ): + """Cancels the given request""" + stream = self.streams.get(request_id, None) + if stream is not None: + stream.finish(exception=exception) diff --git a/megatron/core/inference/text_generation_controllers/encoder_decoder_text_generation_controller.py b/megatron/core/inference/text_generation_controllers/encoder_decoder_text_generation_controller.py index 0c2a41b..f50ba90 100644 --- a/megatron/core/inference/text_generation_controllers/encoder_decoder_text_generation_controller.py +++ b/megatron/core/inference/text_generation_controllers/encoder_decoder_text_generation_controller.py @@ -1,35 +1,38 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from typing import OrderedDict - -import torch - -from megatron.core.inference.inference_request import InferenceRequest -from megatron.core.inference.text_generation_controllers.text_generation_controller import ( - TextGenerationController, -) - - -class EncoderDecoderTextGenerationController(TextGenerationController): - """The text generation controller for encoder-decoder architecture - - This class inherits from TextGenerationController, adding features - relating to encoder input encoder_prompt - - """ - - def prep_model_for_inference( - self, prompts_tokens: torch.Tensor, active_requests: OrderedDict[int, InferenceRequest] - ): - """Preparing batch for inference, using respective wrapper's prep_model_for_inference method - - Args: - prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_sequence_length] - active_requests (OrderedDict[int, InferenceRequest]): The input active requests - """ - encoder_prompts = list( - map(lambda request: request.encoder_prompt, active_requests.values()) - ) - - self.inference_wrapped_model.prep_model_for_inference( - prompts_tokens=prompts_tokens, encoder_prompts=encoder_prompts, tokenizer=self.tokenizer - ) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from typing import Any, Dict, OrderedDict + +import torch + +from megatron.core.inference.inference_request import InferenceRequest +from megatron.core.inference.text_generation_controllers.text_generation_controller import ( + TextGenerationController, +) + + +class EncoderDecoderTextGenerationController(TextGenerationController): + """The text generation controller for encoder-decoder architecture + + This class inherits from TextGenerationController, adding features + relating to encoder input encoder_prompt + + """ + + def prep_inference_input( + self, prompts_tokens: torch.Tensor, active_requests: OrderedDict[str, InferenceRequest] + ) -> Dict[str, Any]: + """Preparing input data for inference, using respective wrapper's prep_inference_input method # pylint: disable=line-too-long + + Args: + prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_sequence_length] + active_requests (OrderedDict[str, InferenceRequest]): The input active requests + + Returns: + A dict of the inference input for the current batch. + """ + encoder_prompts = list( + map(lambda request: request.encoder_prompt, active_requests.values()) + ) + + return self.inference_wrapped_model.prep_inference_input( + prompts_tokens, encoder_prompts, tokenizer=self.tokenizer + ) diff --git a/megatron/core/inference/text_generation_controllers/simple_text_generation_controller.py b/megatron/core/inference/text_generation_controllers/simple_text_generation_controller.py index f97df13..54627c2 100644 --- a/megatron/core/inference/text_generation_controllers/simple_text_generation_controller.py +++ b/megatron/core/inference/text_generation_controllers/simple_text_generation_controller.py @@ -1,5 +1,5 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from megatron.core.inference.text_generation_controllers.text_generation_controller import ( # noqa: F401 # pylint: disable=unused-import - TextGenerationController as SimpleTextGenerationController, -) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +from megatron.core.inference.text_generation_controllers.text_generation_controller import ( # noqa: F401 # pylint: disable=unused-import + TextGenerationController as SimpleTextGenerationController, +) diff --git a/megatron/core/inference/text_generation_controllers/text_generation_controller.py b/megatron/core/inference/text_generation_controllers/text_generation_controller.py index f15c819..f1a4ae4 100644 --- a/megatron/core/inference/text_generation_controllers/text_generation_controller.py +++ b/megatron/core/inference/text_generation_controllers/text_generation_controller.py @@ -1,400 +1,674 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from typing import List, OrderedDict, Tuple - -import torch -import torch.nn.functional as F - -from megatron.core import parallel_state -from megatron.core.inference.communication_utils import broadcast_from_last_pipeline_stage -from megatron.core.inference.inference_request import InferenceRequest, Status -from megatron.core.inference.model_inference_wrappers.abstract_model_inference_wrapper import ( - AbstractModelInferenceWrapper, -) -from megatron.core.inference.sampling_params import SamplingParams - - -class TextGenerationController: - """The text generation controller (the main sampling loop) - - This class tokenizes the input, runs inference, samples from logits, and detokenizes the output. - - Args: - inference_wrapped_model (AbstractModelInferenceWrapper): A model that - is wrapped using the specs given in the abstract_model_inference_wrapper.py - tokenizer (_type_): Tokenizer used for tokenizing and detokenizing the prompts - """ - - def __init__(self, inference_wrapped_model: AbstractModelInferenceWrapper, tokenizer): - self.inference_wrapped_model = inference_wrapped_model - self.tokenizer = tokenizer - - # For models without pipeline parallelism, is_first_stage and is_last_stage returns True - self.model_is_pipeline_parallel = not ( - parallel_state.is_pipeline_first_stage() and parallel_state.is_pipeline_last_stage() - ) - - def tokenize_prompt( - self, prompt: str, add_BOS: bool = False - ) -> Tuple[torch.Tensor, torch.Tensor]: - """Utility to tokenize the input prompts - - Args: - prompt (str): The input prompt - - Returns: - torch.Tensor: Returns the tokenized prompt - """ - prompt_tokens = self.tokenizer.tokenize(prompt) - - if add_BOS: - prompt_tokens = [self.tokenizer.bos] + prompt_tokens - - return prompt_tokens - - def detokenize_generations(self, prompt_tokens_with_generated_tokens: torch.Tensor) -> str: - """Detokenize the output generations - - Args: - prompt_tokens_with_generated_tokens (torch.Tensor): The input prompt - tokens plus the generated tokens - - Returns: - str: The detokenized output - """ - tokens = prompt_tokens_with_generated_tokens.cpu().numpy().tolist() - return self.tokenizer.detokenize(tokens) - - def sample_from_logits( - self, - last_token_logits: torch.Tensor, - sampling_params: SamplingParams = None, - vocab_size: int = None, - **kwargs - ) -> torch.Tensor: - """Samples the logits to generate outputs - - Given the logits of the last token, this function samples it - according to the parameters defined in sampling_params - and returns the samples - - Args: - last_token_logits (torch.Tensor): The last token logits. A tensor of - size [batch_size, vocab_size] - sampling_params (SamplingParams): The parameters to use for inference. - vocab_size (int): Obtained from the tokenizer. Defaults to None - - Returns: - torch.Tensor: 1D tensor of the sampled logits with [batch_size] elements - """ - - if kwargs.get('common_inference_params'): - sampling_params = kwargs['common_inference_params'] - - top_p = sampling_params.top_p - top_k = sampling_params.top_k - temperature = sampling_params.temperature - - assert not (top_k > 0 and top_p > 0), 'Cannot have top-p and top-k both greater than zero' - assert top_p <= 1.0, 'top-p should be in (0,1]' - - def modify_logits_for_top_k_filtering(logits, top_k): - """Set the logits for none top-k values to -inf.""" - filter_ = logits < torch.topk(logits, top_k)[0][..., -1, None] - logits.masked_fill_(filter_, float('-Inf')) - - def modify_logits_for_top_p_filtering(logits, top_p): - """Set the logits for none top-p values to -inf.""" - # First sort and calculate cumulative sum of probabilities. - sorted_logits, sorted_indices = torch.sort(logits, descending=True) - cumulative_probs = sorted_logits.softmax(dim=-1).cumsum(dim=-1) - - # Filteration based on the cumulative sum. - filter_ = cumulative_probs > top_p - # This shift by 1 is weird and I cannot justify it. This existed - # in the original implementation: - # https://github.com/ari-holtzman/degen/blob/master/gen.py - # and I guess it is needed so keeping it for now. - filter_[:, 1:] = filter_[:, :-1].clone() - # Make sure we at least have one token to select from. - filter_[..., 0] = 0 - - # Fill in the filtered part - filter_ = filter_.scatter(1, sorted_indices, filter_) - logits.masked_fill_(filter_, float('-Inf')) - - # Greedy sampling - if top_k == 1: - sampled_logits = torch.argmax(last_token_logits, dim=-1) - else: - last_token_logits = last_token_logits.clone() - if temperature != 1.0: - last_token_logits.div_(temperature) - - if top_k > 1: - assert top_k <= last_token_logits.size(1), 'top-k is larger than logit size.' - if vocab_size: - assert top_k < vocab_size, 'top-k is larger than vocab size.' - modify_logits_for_top_k_filtering(last_token_logits, top_k) - - elif top_p > 0.0: - modify_logits_for_top_p_filtering(last_token_logits, top_p) - - # After filtering, we need to recalculate the distribution. - probabilities = last_token_logits.softmax(dim=-1) - sampled_logits = torch.multinomial(probabilities, num_samples=1).view(-1) - - # If vocab size is provided, make sure the samples are in in the range [0, vocab-size). - if vocab_size: - sampled_logits = torch.clamp(sampled_logits, min=0, max=(vocab_size - 1)) - return sampled_logits - - def update_generation_status( - self, - updated_prompts_tokens: torch.Tensor, - generation_started: torch.Tensor, - current_context_end_position: int, - is_generation_done_tensor: torch.Tensor, - generated_sequence_lengths: torch.Tensor, - ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: - """Checks which prompts have reached an end condition - - We check which prompts have reached an end condition and set the corresponding - flags of the is_generation_done_tensor to True. The generated sequence lengths - increase as we keep generating, until that prompts hits an end condition. The - generation_started tensor determines which prompts have started generating. - - Args: - updated_prompts_tokens (torch.Tensor): The prompts tokens updated with the latest - generated tokens. A tensor of shape [batch_size, max_seq_len] - (i.e max_seq_len = max_prompt_len + tokens_to_generate) - generation_started (torch.Tensor): A boolean tensor of shape [batch_size]. True - indicates the prompt at that index has started generating tokens. - current_context_end_position (int): An integer indicating which position to - extract from the prompts tokens to get the latest generated tokens. - is_generation_done_tensor (torch.Tensor): A boolean tensor of shape [batch_size]. - True indicates the prompt at that index has reached end condition. - generated_sequence_lengths (torch.Tensor): A int tensor of shape [batch_size]. - Each value represents the generated sequence lengths for that prompt. - - Returns: - Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: Returns the boolean - is_generation_done_tensor and the generated_sequence_lengths after updating it - """ - latest_samples = updated_prompts_tokens[:, current_context_end_position] - # Make sure we are checking eod criterion only for prompts that have started generating - # (i.e) We only look at the generated tokenns and not the input tokens. - reached_eod = (latest_samples == self.tokenizer.eod) & generation_started - is_generation_done_tensor = is_generation_done_tensor | reached_eod - # We increment generated sequence lengths when that prompt has not hit the - # EOD and generation has started - generated_sequence_lengths += ~is_generation_done_tensor & generation_started - - return is_generation_done_tensor, generated_sequence_lengths - - def pad_input_prompt_tokens( - self, - batch_prompt_tokens_list: List[List[int]], - max_prompt_length_in_batch: int, - num_tokens_to_generate: int, - ) -> torch.Tensor: - """Method to pad input prompts - - Given a list of prompts, pad them all to uniform length - - Args: - batch_prompt_tokens_list (List[List[int]]): A list containing the prompt tokens - max_prompt_length_in_batch (int): Maximum of the length of the input prompt tokens - num_tokens_togenerate (int): The number of tokens to generate for each prompt - - Returns: - torch.Tensor: A torch tensor of shape [bs, max_seq_len] (i.e) - max_seq_len = max_prompt_length_in_batch + num_tokens_to_generate, - with extra indices for each tensor padded with mask id. - """ - max_seq_len = max_prompt_length_in_batch + num_tokens_to_generate - - for prompt_tokens in batch_prompt_tokens_list: - padding_size = max_seq_len - len(prompt_tokens) - prompt_tokens.extend([self.tokenizer.eod] * padding_size) - - return torch.tensor(batch_prompt_tokens_list).cuda() - - def generate_output_tokens_dynamic_batch( - self, active_requests: OrderedDict[int, InferenceRequest] - ) -> OrderedDict[int, InferenceRequest]: - """Utility to generate the output tokens and probabilities for the prompts - - This utility generates the output tokens for a dynamic batch. It will run one forward step - at a time, and pass control back to the engine, which will update the request pool and call - this method again. - - Args: - active_requests (OrderedDict[int, InferenceRequest]): The input active requests. - - Returns: - OrderedDict[int, InferenceRequest]: The result for each of the incoming requests - after running one forward step. - """ - raise Exception("Not implemented yet") - - def generate_all_output_tokens_static_batch( - self, active_requests: OrderedDict[int, InferenceRequest] - ) -> OrderedDict[int, InferenceRequest]: - """Utility to generate the all the output tokens and probabilities for the prompts . - - This utility generates the output tokens for a static batch. It runs the forward steps till - all prompts complete generation, updates the status of these requests to completed, adds - the generated result and returns these requests - - Args: - active_requests (OrderedDict[int, InferenceRequest]): The input active requests. - - Returns: - OrderedDict[int, InferenceRequest]: The result for each of the incoming requests - """ - batch_prompt_tokens_list = list( - map(lambda request: request.prompt_tokens, active_requests.values()) - ) - prompt_lengths_in_batch = torch.tensor( - [len(prompt_tokens) for prompt_tokens in batch_prompt_tokens_list] - ).cuda() - max_prompt_length_in_batch = max(prompt_lengths_in_batch) - min_prompt_length_in_batch = min(prompt_lengths_in_batch) - - # For batch inference the inference params are the same for all request - sampling_params: SamplingParams = list(active_requests.values())[0].inference_parameters - - # max_seq_len = max_prompt_length_in_batch + num_tokens_to_generate - batch_prompt_tokens = self.pad_input_prompt_tokens( - batch_prompt_tokens_list, - max_prompt_length_in_batch=max_prompt_length_in_batch, - num_tokens_to_generate=sampling_params.num_tokens_to_generate, - ) - batch_size, max_sequence_length = batch_prompt_tokens.shape - - # Pre allocate log probs tensor - output_log_probs = None - if sampling_params.return_log_probs: - output_log_probs = torch.empty( - (batch_size, max_sequence_length - 1), dtype=torch.float32 - ).cuda() - - # An array to check which of the prompts have reached end of generation condition - is_generation_done_tensor = torch.zeros(batch_size, dtype=torch.bool).cuda() - - # An array to act as a counter to keep track of generated sequence lengths - generated_sequence_lengths = torch.zeros(batch_size).cuda() - - with torch.no_grad(): - - self.prep_model_for_inference( - prompts_tokens=batch_prompt_tokens, active_requests=active_requests - ) - - context_start_position = 0 - # Pick the context window that we need to pass through the network. - for context_end_position in range(min_prompt_length_in_batch, max_sequence_length): - - inference_input = self.inference_wrapped_model.get_batch_for_context_window( - context_start_position, context_end_position - ) - - # Returns the final logits of shape [batch_size, context_length, vocab_size] - # Note: This is returned in all TP ranks or last PP stage in PP models - logits = self.inference_wrapped_model.run_one_forward_step(inference_input) - if self.model_is_pipeline_parallel: - context_length = context_end_position - context_start_position - logits = broadcast_from_last_pipeline_stage( - [batch_size, context_length, self.tokenizer.vocab_size], - dtype=self.inference_wrapped_model.inference_wrapper_config.params_dtype, - tensor=logits, - ) - - # Indicates which of the input prompts have started generating tokens. - # A 1D boolean tensor with [batch_size] elements (i.e) The shortest - # prompts will start generating first and so on - generation_started = prompt_lengths_in_batch <= context_end_position - last_token_logits = logits[:, -1, :] - sampled_logits = self.sample_from_logits( - last_token_logits, sampling_params, self.tokenizer.vocab_size - ) - - # Substitute the sampled logits only for only the prompts that - # have started generating tokens - batch_prompt_tokens[generation_started, context_end_position] = sampled_logits[ - generation_started - ] - - if sampling_params.return_log_probs: - log_probs = F.log_softmax(logits, dim=2) - indices = torch.unsqueeze( - batch_prompt_tokens[ - :, (context_start_position + 1) : (context_end_position + 1) - ], - 2, - ) - # Get the log probabilities for only the prompt tokens - output_log_probs[:, context_start_position:context_end_position] = torch.gather( - log_probs, 2, indices - ).squeeze(2) - - context_start_position = context_end_position - - # Check end of generation status for each tensor - # and update generated sequence lengths - (is_generation_done_tensor, generated_sequence_lengths) = ( - self.update_generation_status( - updated_prompts_tokens=batch_prompt_tokens, - generation_started=generation_started, - current_context_end_position=context_end_position, - is_generation_done_tensor=is_generation_done_tensor, - generated_sequence_lengths=generated_sequence_lengths, - ) - ) - # Boolean flag indicating if all prompts are finished - all_prompts_done = torch.all(is_generation_done_tensor) - if all_prompts_done: - break - - # Include all the generated tokens - batch_prompt_tokens_with_generations = batch_prompt_tokens[:, : (context_end_position + 1)] - if sampling_params.return_log_probs: - output_log_probs = output_log_probs[:, :context_end_position] - - generated_sequence_lengths[ - generated_sequence_lengths > sampling_params.num_tokens_to_generate - ] = sampling_params.num_tokens_to_generate - - for idx, request in enumerate(active_requests.values()): - input_prompt_length = int(prompt_lengths_in_batch[idx]) - # Shorter prompts might have generated more than required tokens. So we trim them down - required_sequence_length = int( - min(generated_sequence_lengths[idx], sampling_params.num_tokens_to_generate) - ) - # Extract only the generated tokens - required_result_tokens = batch_prompt_tokens_with_generations[ - idx, input_prompt_length : (input_prompt_length + required_sequence_length) - ] - - request.generated_length = required_sequence_length - request.generated_tokens = required_result_tokens - request.generated_log_probs = ( - None - if output_log_probs is None - else output_log_probs[idx, input_prompt_length:required_sequence_length] - ) - request.status = Status.COMPLETED - request.generated_text = self.detokenize_generations(required_result_tokens) - - return active_requests - - def prep_model_for_inference( - self, prompts_tokens: torch.Tensor, active_requests: OrderedDict[int, InferenceRequest] - ): - """Preparing batch for inference, using respective wrapper's prep_model_for_inference method - - Args: - prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_sequence_length] - active_requests (OrderedDict[int, InferenceRequest]): The input active requests - """ - self.inference_wrapped_model.prep_model_for_inference(prompts_tokens=prompts_tokens) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import concurrent +import copy +import functools +from typing import Any, Dict, List, Optional, OrderedDict, Tuple, Union + +import torch +import torch.nn.functional as F + +from megatron.core import parallel_state +from megatron.core.inference.async_stream import AsyncStream +from megatron.core.inference.communication_utils import broadcast_from_last_pipeline_stage +from megatron.core.inference.inference_request import InferenceRequest, Status +from megatron.core.inference.model_inference_wrappers.abstract_model_inference_wrapper import ( + AbstractModelInferenceWrapper, +) +from megatron.core.inference.sampling_params import SamplingParams +from megatron.core.transformer.cuda_graphs import create_cudagraphs +from megatron.core.utils import get_model_config + + +class TextGenerationController: + """The text generation controller (the main sampling loop) + + This class tokenizes the input, runs inference, samples from logits, and detokenizes the output. + + Args: + inference_wrapped_model (AbstractModelInferenceWrapper): A model that + is wrapped using the specs given in the abstract_model_inference_wrapper.py + tokenizer (_type_): Tokenizer used for tokenizing and detokenizing the prompts + """ + + def __init__(self, inference_wrapped_model: AbstractModelInferenceWrapper, tokenizer): + self.inference_wrapped_model = inference_wrapped_model + self.tokenizer = tokenizer + + # For models without pipeline parallelism, is_first_stage and is_last_stage returns True + self.model_is_pipeline_parallel = not ( + parallel_state.is_pipeline_first_stage() and parallel_state.is_pipeline_last_stage() + ) + + def tokenize_prompt( + self, prompt: str, add_BOS: bool = False + ) -> Tuple[torch.Tensor, torch.Tensor]: + """Utility to tokenize the input prompts + + Args: + prompt (str): The input prompt + + Returns: + torch.Tensor: Returns the tokenized prompt + """ + prompt_tokens = self.tokenizer.tokenize(prompt) + + if add_BOS: + prompt_tokens = [self.tokenizer.bos] + prompt_tokens + + return prompt_tokens + + def detokenize_generations( + self, + tokens_gpu_tensor: torch.Tensor, + lengths_gpu_tensor: torch.Tensor, + detokenize_segments: bool, + ) -> tuple[str, Optional[List[List[str]]]]: + """Detokenize the generated tokens. + + Args: + tokens_gpu_tensor (torch.Tensor): Tensor containing the tokens + lengths_gpu_tensor (torch.Tensor): Tensor containing the lengths of each sequence + detokenize_segments (bool): If True, returns individually detokenized tokens. If False, + returns None as second element. Helpful for understanding per-token boundaries in + generated text. + + Returns: + tuple[str, List[str] | None]: A tuple containing: + - str: The complete detokenized text + - List[str] | None: List of segmented tokens if detokenize_segments is True, else None + """ + # TODO(helenn): Unify with `detokenize_generations` from legacy textgen path + + if not detokenize_segments: + tokens = tokens_gpu_tensor.cpu().numpy().tolist() + return self.tokenizer.detokenize(tokens), None + + prompts_plus_generations: List[str] = [] + prompts_plus_generations_segments: List[List[str]] = [] + + tokens_gpu_tensor = torch.unsqueeze(tokens_gpu_tensor, 0) + tokens = tokens_gpu_tensor.cpu().numpy().tolist() + lengths = lengths_gpu_tensor.cpu().numpy().tolist() + + for sequence_tokens, length in zip(tokens, lengths): + sequence_tokens = sequence_tokens[:length] + detok_str = self.tokenizer.detokenize(sequence_tokens) + prompts_plus_generations.append(detok_str) + offsets = self.tokenizer.offsets(sequence_tokens, detok_str) + words = [ + detok_str[start:end] for start, end in zip(offsets, offsets[1:] + [len(detok_str)]) + ] + + prompts_plus_generations_segments.append(words) + + text = self.tokenizer.detokenize(tokens[0]) + + return text, prompts_plus_generations_segments + + def sample_from_logits( + self, + last_token_logits: torch.Tensor, + sampling_params: Optional[SamplingParams] = None, + vocab_size: Optional[int] = None, + **kwargs, + ) -> torch.Tensor: + """Samples the logits to generate outputs + + Given the logits of the last token, this function samples it + according to the parameters defined in sampling_params + and returns the samples + + Args: + last_token_logits (torch.Tensor): The last token logits. A tensor of + size [batch_size, vocab_size] + sampling_params (SamplingParams): The parameters to use for inference. + vocab_size (int): Obtained from the tokenizer. Defaults to None + + Returns: + torch.Tensor: 1D tensor of the sampled logits with [batch_size] elements + """ + + if kwargs.get('common_inference_params'): + sampling_params = kwargs['common_inference_params'] + + top_p = sampling_params.top_p + top_k = sampling_params.top_k + temperature = sampling_params.temperature + + assert not (top_k > 0 and top_p > 0), 'Cannot have top-p and top-k both greater than zero' + assert top_p <= 1.0, 'top-p should be in (0,1]' + + def modify_logits_for_top_k_filtering(logits, top_k): + """Set the logits for none top-k values to -inf.""" + filter_ = logits < torch.topk(logits, top_k)[0][..., -1, None] + logits.masked_fill_(filter_, float('-Inf')) + + def modify_logits_for_top_p_filtering(logits, top_p): + """Set the logits for none top-p values to -inf.""" + # First sort and calculate cumulative sum of probabilities. + sorted_logits, sorted_indices = torch.sort(logits, descending=True) + cumulative_probs = sorted_logits.softmax(dim=-1).cumsum(dim=-1) + + # Filteration based on the cumulative sum. + filter_ = cumulative_probs > top_p + # This shift by 1 is weird and I cannot justify it. This existed + # in the original implementation: + # https://github.com/ari-holtzman/degen/blob/master/gen.py + # and I guess it is needed so keeping it for now. + filter_[:, 1:] = filter_[:, :-1].clone() + # Make sure we at least have one token to select from. + filter_[..., 0] = 0 + + # Fill in the filtered part + filter_ = filter_.scatter(1, sorted_indices, filter_) + logits.masked_fill_(filter_, float('-Inf')) + + # Greedy sampling + if top_k == 1: + sampled_logits = torch.argmax(last_token_logits, dim=-1) + else: + last_token_logits = last_token_logits.clone() + if temperature != 1.0: + last_token_logits.div_(temperature) + + if top_k > 1: + assert top_k <= last_token_logits.size(1), 'top-k is larger than logit size.' + if vocab_size: + assert top_k < vocab_size, 'top-k is larger than vocab size.' + modify_logits_for_top_k_filtering(last_token_logits, top_k) + + elif top_p > 0.0: + modify_logits_for_top_p_filtering(last_token_logits, top_p) + + # After filtering, we need to recalculate the distribution. + probabilities = last_token_logits.softmax(dim=-1) + sampled_logits = torch.multinomial(probabilities, num_samples=1).view(-1) + + # If vocab size is provided, make sure the samples are in in the range [0, vocab-size). + if vocab_size: + sampled_logits = torch.clamp(sampled_logits, min=0, max=(vocab_size - 1)) + return sampled_logits + + def update_generation_status( + self, + updated_prompts_tokens: torch.Tensor, + generation_started: torch.Tensor, + current_context_end_position: int, + is_generation_done_tensor: torch.Tensor, + generated_sequence_lengths: torch.Tensor, + ) -> Tuple[torch.Tensor, torch.Tensor]: + """Checks which prompts have reached an end condition + + We check which prompts have reached an end condition and set the corresponding + flags of the is_generation_done_tensor to True. The generated sequence lengths + increase as we keep generating, until that prompts hits an end condition. The + generation_started tensor determines which prompts have started generating. + + Args: + updated_prompts_tokens (torch.Tensor): The prompts tokens updated with the latest + generated tokens. A tensor of shape [batch_size, max_seq_len] + (i.e max_seq_len = max_prompt_len + tokens_to_generate) + generation_started (torch.Tensor): A boolean tensor of shape [batch_size]. True + indicates the prompt at that index has started generating tokens. + current_context_end_position (int): An integer indicating which position to + extract from the prompts tokens to get the latest generated tokens. + is_generation_done_tensor (torch.Tensor): A boolean tensor of shape [batch_size]. + True indicates the prompt at that index has reached end condition. + generated_sequence_lengths (torch.Tensor): A int tensor of shape [batch_size]. + Each value represents the generated sequence lengths for that prompt. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: Returns the boolean + is_generation_done_tensor and the generated_sequence_lengths after updating it + """ + latest_samples = updated_prompts_tokens[:, current_context_end_position] + # Make sure we are checking eod criterion only for prompts that have started generating + # (i.e) We only look at the generated tokenns and not the input tokens. + reached_eod = (latest_samples == self.tokenizer.eod) & generation_started + is_generation_done_tensor = is_generation_done_tensor | reached_eod + # We increment generated sequence lengths when that prompt has not hit the + # EOD and generation has started + generated_sequence_lengths += ~is_generation_done_tensor & generation_started + + return is_generation_done_tensor, generated_sequence_lengths.int() + + def pad_input_prompt_tokens( + self, + batch_prompt_tokens_list: List[List[int]], + max_prompt_length_in_batch: int, + num_tokens_to_generate: int, + ) -> torch.Tensor: + """Method to pad input prompts + + Given a list of prompts, pad them all to uniform length + + Args: + batch_prompt_tokens_list (List[List[int]]): A list containing the prompt tokens + max_prompt_length_in_batch (int): Maximum of the length of the input prompt tokens + num_tokens_togenerate (int): The number of tokens to generate for each prompt + + Returns: + torch.Tensor: A torch tensor of shape [bs, max_seq_len] (i.e) + max_seq_len = max_prompt_length_in_batch + num_tokens_to_generate, + """ + max_seq_len = max_prompt_length_in_batch + num_tokens_to_generate + + for prompt_tokens in batch_prompt_tokens_list: + padding_size = max_seq_len - len(prompt_tokens) + prompt_tokens.extend([self.tokenizer.eod] * padding_size) + + return torch.tensor(batch_prompt_tokens_list, device=torch.cuda.current_device()) + + def generate_output_tokens_dynamic_batch( + self, active_requests: OrderedDict[str, InferenceRequest] + ) -> OrderedDict[str, InferenceRequest]: + """Utility to generate the output tokens and probabilities for the prompts + + This utility generates the output tokens for a dynamic batch. It will run one forward step + at a time, and pass control back to the engine, which will update the request pool and call + this method again. + + Args: + active_requests (OrderedDict[str, InferenceRequest]): The input active requests. + + Returns: + OrderedDict[str, InferenceRequest]: The result for each of the incoming requests + after running one forward step. + """ + raise Exception("Not implemented yet") + + def generate_all_output_tokens_static_batch( + self, + active_requests: OrderedDict[str, InferenceRequest], + active_streams: Optional[OrderedDict[str, AsyncStream]] = None, + ) -> OrderedDict[str, InferenceRequest]: + """Utility to generate the all the output tokens and probabilities for the prompts . + + This utility generates the output tokens for a static batch. It runs the forward steps till + all prompts complete generation, updates the status of these requests to completed, adds + the generated result and returns these requests + + Args: + active_requests (OrderedDict[str, InferenceRequest]): The input active requests. + + Returns: + OrderedDict[str, InferenceRequest]: The result for each of the incoming requests + """ + assert all(request.prompt_tokens is not None for request in active_requests.values()) + + # Perform a deep copy so that the request prompt tokens do not get modified. + batch_prompt_tokens_list: List[List[int]] = list( + map( + lambda request: copy.deepcopy(request.prompt_tokens), # type: ignore[arg-type] + active_requests.values(), + ) + ) + prompt_lengths_in_batch = torch.tensor( + [len(prompt_tokens) for prompt_tokens in batch_prompt_tokens_list], + device=torch.cuda.current_device(), + ) + max_prompt_length_in_batch = max(prompt_lengths_in_batch) + min_prompt_length_in_batch = min(prompt_lengths_in_batch) + + # For batch inference the inference params are the same for all request + sampling_params: SamplingParams = list(active_requests.values())[0].inference_parameters + + # max_seq_len = max_prompt_length_in_batch + num_tokens_to_generate + batch_prompt_tokens = self.pad_input_prompt_tokens( + batch_prompt_tokens_list, + max_prompt_length_in_batch=max_prompt_length_in_batch, + num_tokens_to_generate=sampling_params.num_tokens_to_generate, + ) + batch_size, max_sequence_length = batch_prompt_tokens.shape + + # Verify that output sequence length is within configured limit + # TODO(ksanthanam): Raise TokenOverflowError once !2518 is merged + inference_max_sequence_length = ( + self.inference_wrapped_model.inference_wrapper_config.inference_max_seq_length + ) + assert max_sequence_length <= inference_max_sequence_length, ( + f"Maximum allowed sequence length was set to {inference_max_sequence_length} tokens " + f"but requested generation of {max_sequence_length} tokens" + ) + + # Pre allocate log probs tensor + output_log_probs = None + if sampling_params.return_log_probs: + output_log_probs = torch.empty( + (batch_size, max_sequence_length - 1), + dtype=torch.float32, + device=torch.cuda.current_device(), + ) + + # An array to check which of the prompts have reached end of generation condition + is_generation_done_tensor = torch.zeros( + batch_size, dtype=torch.bool, device=torch.cuda.current_device() + ) + + # An array to act as a counter to keep track of generated sequence lengths + generated_sequence_lengths = torch.zeros( + batch_size, device=torch.cuda.current_device() + ).cuda() + + # Use padded vocab size because tokenizer vocab size might not include padding + # to nearest power of 2 + vocab_size = self.inference_wrapped_model.inference_wrapper_config.padded_vocab_size + + # Check whether CUDA graphs are enabled + enable_cuda_graph = get_model_config(self.inference_wrapped_model.model).enable_cuda_graph + + streaming_enabled = active_streams is not None and len(active_streams) > 0 + if streaming_enabled: + # Start a separate thread for streaming tokens to avoid blocking the + # main computation + streaming_idx: List[int] = [ + i + for (i, request_id) in enumerate(active_requests.keys()) + if request_id in active_streams + ] + streaming_request_ids: List[str] = list(active_streams.keys()) + streams: List[AsyncStream] = list(active_streams.values()) + streaming_requests: List[InferenceRequest] = [ + active_requests[request_id] for request_id in streaming_request_ids + ] + streaming_executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) + stream_tokens = functools.partial(self.stream_tokens, sampling_params) + + with torch.no_grad(): + + self.inference_wrapped_model.prep_model_for_inference( + prompts_tokens=batch_prompt_tokens + ) + + inference_input: Dict[str, Any] = self.prep_inference_input( + prompts_tokens=batch_prompt_tokens, active_requests=active_requests + ) + + assert ( + not self.inference_wrapped_model.inference_params.decode_mode + ), f"Generation must start in prefill mode" + + context_start_position = 0 + # Pick the context window that we need to pass through the network. + for context_end_position in range(min_prompt_length_in_batch, max_sequence_length): + + inference_input_for_context_window: Dict[str, Any] = ( + self.inference_wrapped_model.get_batch_for_context_window( + inference_input, context_start_position, context_end_position + ) + ) + + # Disable attention mask when using CUDA graphs for decode + if ( + enable_cuda_graph + and self.inference_wrapped_model.inference_params.decode_mode + and "attention_mask" in inference_input_for_context_window + ): + inference_input_for_context_window["attention_mask"] = None + + # Returns the final logits of shape [batch_size, context_length, vocab_size] + # Note: This is returned in all TP ranks or last PP stage in PP models + logits = self.inference_wrapped_model.run_one_forward_step( + inference_input_for_context_window + ) + + if enable_cuda_graph: + create_cudagraphs() + + if self.model_is_pipeline_parallel: + context_length = context_end_position - context_start_position + logits = broadcast_from_last_pipeline_stage( + [batch_size, context_length, vocab_size], + dtype=self.inference_wrapped_model.inference_wrapper_config.params_dtype, + tensor=logits, + ) + + # Indicates which of the input prompts have started generating tokens. + # A 1D boolean tensor with [batch_size] elements (i.e) The shortest + # prompts will start generating first and so on + generation_started = prompt_lengths_in_batch <= context_end_position + last_token_logits = logits[:, -1, :] + sampled_logits = self.sample_from_logits( + last_token_logits, sampling_params, vocab_size + ) + + # Substitute the sampled logits only for the prompts that + # have started generating tokens + batch_prompt_tokens[generation_started, context_end_position] = sampled_logits[ + generation_started + ] + + if sampling_params.return_log_probs: + log_probs = F.log_softmax(logits, dim=2) + indices = torch.unsqueeze( + batch_prompt_tokens[ + :, (context_start_position + 1) : (context_end_position + 1) + ], + 2, + ) + # Get the log probabilities for only the prompt tokens + assert output_log_probs is not None + output_log_probs[:, context_start_position:context_end_position] = torch.gather( + log_probs, 2, indices + ).squeeze(2) + + context_start_position = context_end_position + + # Check end of generation status for each tensor + # and update generated sequence lengths + (is_generation_done_tensor, generated_sequence_lengths) = ( + self.update_generation_status( + updated_prompts_tokens=batch_prompt_tokens, + generation_started=generation_started, + current_context_end_position=context_end_position, + is_generation_done_tensor=is_generation_done_tensor, + generated_sequence_lengths=generated_sequence_lengths, + ) + ) + + # Stream intermediate outputs + if streaming_enabled: + streaming_executor.submit( + stream_tokens, + streaming_request_ids, + streaming_requests, + streams, + generation_started[streaming_idx].cpu(), + is_generation_done_tensor[streaming_idx].cpu(), + batch_prompt_tokens[streaming_idx].cpu(), + prompt_lengths_in_batch[streaming_idx].cpu(), + generated_sequence_lengths[streaming_idx].cpu(), + ( + output_log_probs[streaming_idx].cpu() + if output_log_probs is not None + else [None] * len(streaming_idx) + ), + ) + + # Boolean flag indicating if all prompts are finished + all_prompts_done = torch.all(is_generation_done_tensor) + if all_prompts_done: + break + + # Change to decode mode if all prefill is complete + if torch.all(generation_started): + self.inference_wrapped_model.inference_params.enable_decode_mode() + + # Close all streams + if streaming_enabled: + streaming_executor.shutdown() + for stream in streams: + stream.finish() + + # Include all the generated tokens + batch_prompt_tokens_with_generations = batch_prompt_tokens[:, : (context_end_position + 1)] + if sampling_params.return_log_probs: + assert output_log_probs is not None + output_log_probs = output_log_probs[:, :context_end_position] + + generated_sequence_lengths[ + generated_sequence_lengths > sampling_params.num_tokens_to_generate + ] = sampling_params.num_tokens_to_generate + + for idx, request in enumerate(active_requests.values()): + input_prompt_length = int(prompt_lengths_in_batch[idx]) + # Shorter prompts might have generated more than required tokens. So we trim them down + required_sequence_length = int( + min(generated_sequence_lengths[idx], sampling_params.num_tokens_to_generate) + ) + # Extract only the generated tokens + required_result_tokens = batch_prompt_tokens_with_generations[ + idx, input_prompt_length : (input_prompt_length + required_sequence_length) + ] + generated_sequence_lengths = generated_sequence_lengths.to(dtype=torch.int32) + request.generated_sequence_lengths = generated_sequence_lengths.to(dtype=torch.int32) + request.generated_length = required_sequence_length + request.generated_tokens = required_result_tokens + + request.prompt_log_probs = ( + None + if output_log_probs is None + else output_log_probs[idx, :input_prompt_length].cpu().numpy().tolist() + ) + + request.generated_log_probs = ( + None + if output_log_probs is None + else output_log_probs[ + idx, + input_prompt_length - 1 : (input_prompt_length + required_sequence_length - 1), + ] + .cpu() + .numpy() + .tolist() + ) + request.status = Status.COMPLETED + + text, segments = self.detokenize_generations( + batch_prompt_tokens_with_generations[idx], + input_prompt_length + generated_sequence_lengths, + sampling_params.return_segments, + ) + request.text = text # Inference server returns prompts & generations together + if sampling_params.return_segments: + request.segments = segments[0] + request.generated_text = text[len(request.prompt) :] + return active_requests + + def prep_inference_input( + self, prompts_tokens: torch.Tensor, active_requests: OrderedDict[str, InferenceRequest] + ) -> Dict[str, Any]: + """Preparing input data for inference, using respective wrapper's prep_inference_input method # pylint: disable=line-too-long + + Args: + prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_sequence_length] + active_requests (OrderedDict[str, InferenceRequest]): The input active requests + + Returns: + A dict of the inference input for the current batch. + """ + return self.inference_wrapped_model.prep_inference_input(prompts_tokens) + + def stream_tokens( + self, + sampling_params: SamplingParams, + request_ids: List[str], + requests: List[InferenceRequest], + streams: List[AsyncStream], + generation_started: List[bool], + is_generation_done: List[bool], + tokens: torch.Tensor, + prompt_lengths: List[int], + generated_lengths: List[int], + output_log_probs: Union[torch.Tensor, None], + ): + """Asynchronously streams tokens for the given requests. + + Args: + sampling_params (SamplingParams): The sampling parameters. + request_ids (List[str]): The request IDs. + request (List[InferenceRequest]): The requests. + stream (List[AsyncStream]): The streams over which to send tokens. + generation_started (List[bool]): Whether the decode step has started. + is_generation_done (List[bool]): Whether generation has completed. + tokens (torch.Tensor): The tokens for this request. + prompt_lengths (List[int]): The number of prompt tokens for each request. + generated_lengths (List[int]): The number of output tokens for each request. + output_log_probs (torch.Tensor, optional): The log probs for each request. + """ + + def stream_token( + request_id: str, + request: InferenceRequest, + stream: AsyncStream, + generation_started: bool, + is_generation_done: bool, + tokens: torch.Tensor, + prompt_length: int, + generated_length: int, + output_log_probs: Union[torch.Tensor, None], + ): + """Asynchronously streams a token for the given request.""" + + if not generation_started or stream.finished: + return + + num_tokens_to_generate = sampling_params.num_tokens_to_generate + return_segments = sampling_params.return_segments + detokenize_streaming_text = not getattr( + sampling_params, "no_detokenize_streaming_text", False + ) + + generated_tokens = tokens[prompt_length : prompt_length + generated_length] + + if detokenize_streaming_text: + generated_text, generated_segments = self.detokenize_generations( + generated_tokens, prompt_length + generated_length, return_segments + ) + else: + generated_text = "" + generated_segments = [] + + if output_log_probs is not None: + generated_log_probs = ( + output_log_probs[prompt_length - 1 : prompt_length + generated_length - 1] + .cpu() + .numpy() + .tolist() + ) + else: + generated_log_probs = None + + stream.put( + InferenceRequest( + request_id=request_id, + prompt=request.prompt, + inference_parameters=request.inference_parameters, + prompt_tokens=request.prompt_tokens, + arrival_time=request.arrival_time, + status=request.status, + encoder_prompt=request.encoder_prompt, + generated_text=generated_text, + generated_segments=generated_segments, + generated_tokens=generated_tokens, + generated_log_probs=generated_log_probs, + generated_length=generated_length, + ) + ) + + if is_generation_done or generated_length == num_tokens_to_generate: + stream.finish() + + ret = map( + stream_token, + request_ids, + requests, + streams, + generation_started, + is_generation_done, + tokens, + prompt_lengths, + generated_lengths, + output_log_probs, + ) + list(ret) diff --git a/megatron/core/inference/text_generation_controllers/vlm_text_generation_controller.py b/megatron/core/inference/text_generation_controllers/vlm_text_generation_controller.py new file mode 100644 index 0000000..1d92947 --- /dev/null +++ b/megatron/core/inference/text_generation_controllers/vlm_text_generation_controller.py @@ -0,0 +1,40 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from typing import OrderedDict + +import torch + +from megatron.core.inference.inference_request import InferenceRequest, VLMInferenceRequest +from megatron.core.inference.text_generation_controllers.text_generation_controller import ( + TextGenerationController, +) + + +class VLMTextGenerationController(TextGenerationController): + """The text generation controller for VLMs""" + + def prep_inference_input( + self, prompts_tokens: torch.Tensor, active_requests: OrderedDict[str, InferenceRequest] + ): + """Preparing input data for inference, using respective wrapper's prep_inference_input method # pylint: disable=line-too-long + + Currently only supports batch size 1 inference. + + Args: + prompts_tokens (torch.Tensor): A tensor of shape [batch_size, max_sequence_length] + active_requests (OrderedDict[str, InferenceRequest]): The input active requests + """ + assert len(active_requests) == 1, f"VLM inference currently only supports batch size 1" + + request = list(active_requests.values())[0] + + assert isinstance( + request, VLMInferenceRequest + ), f"Found inference request of type {type(request)}, expected VLMInferenceRequest" + + return self.inference_wrapped_model.prep_inference_input( + prompts_tokens, + request.num_img_embeddings_per_tile, + request.imgs, + request.num_tiles, + request.decoder_seq_length, + ) diff --git a/megatron/core/inference_params.py b/megatron/core/inference_params.py index 0db49e3..846ceb7 100644 --- a/megatron/core/inference_params.py +++ b/megatron/core/inference_params.py @@ -1,31 +1,100 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -class InferenceParams: - """Inference parameters that are passed to the main model in order - to efficienly calculate and store the context during inference.""" - - def __init__(self, max_batch_size, max_sequence_length): - self.max_sequence_length = max_sequence_length - self.max_batch_size = max_batch_size - self.sequence_len_offset = 0 - self.batch_size_offset = 0 - self.key_value_memory_dict = {} - - def swap_key_value_dict(self, batch_idx): - "swap between batches" - if len(self.key_value_memory_dict) == 0: - raise ValueError("should not swap when dict in empty") - - for layer_number in self.key_value_memory_dict.keys(): - inference_key_memory, inference_value_memory = self.key_value_memory_dict[layer_number] - assert ( - len(batch_idx) == inference_key_memory.shape[1] - ) # make sure batch size is the same - new_inference_key_memory = inference_key_memory[:, batch_idx] - new_inference_value_memory = inference_value_memory[:, batch_idx] - self.key_value_memory_dict[layer_number] = ( - new_inference_key_memory, - new_inference_value_memory, - ) - - def __str__(self): - return f"InferenceParams(max_seq_len = {self.max_sequence_length}, max_batch_size = {self.max_batch_size}, sequence_len_offset = {self.sequence_len_offset}, batch_size_offset = {self.batch_size_offset}, key_value_memory_dict = {self.key_value_memory_dict.keys()})" +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +class InferenceParams: + """Inference parameters that are passed to the main model in order + to efficienly calculate and store the context during inference.""" + + def __init__(self, max_batch_size, max_sequence_length): + self.max_sequence_length = max_sequence_length + self.max_batch_size = max_batch_size + self.current_batch_size = max_batch_size # Required for bookkeeping variable-sized batches + self.sequence_len_offset = 0 + self.batch_size_offset = 0 + self.decode_mode = False + self.key_value_memory_dict = {} + self.decode_mode = False + + def swap_key_value_dict(self, batch_idx): + "swap between batches" + if len(self.key_value_memory_dict) == 0: + raise ValueError("should not swap when dict in empty") + + for layer_number in self.key_value_memory_dict.keys(): + inference_key_memory, inference_value_memory = self.key_value_memory_dict[layer_number] + assert ( + len(batch_idx) == inference_key_memory.shape[1] + ) # make sure batch size is the same + new_inference_key_memory = inference_key_memory[:, batch_idx] + new_inference_value_memory = inference_value_memory[:, batch_idx] + self.key_value_memory_dict[layer_number] = ( + new_inference_key_memory, + new_inference_value_memory, + ) + + def enable_prefill_mode(self): + """ + Indicates the generation loop is in the prefill phase (still processing + input prompt tokens). This should be enabled if the generation loop is + encoding prompt tokens for *any* request in a batch. + """ + self.decode_mode = False + + def enable_decode_mode(self): + """ + Indicates the generation loop is in the decode phase (generating new output + tokens). This should only be enabled if the generation loop has fully encoded + the prompts for *all* requests in a batch. + """ + self.decode_mode = True + + def reset(self): + """Resets the inference state for a new batch.""" + self.current_batch_size = self.max_batch_size + self.sequence_len_offset = 0 + self.batch_size_offset = 0 + self.enable_prefill_mode() + + def __str__(self): + return ( + f"InferenceParams(max_seq_len = {self.max_sequence_length}, " + f"max_batch_size = {self.max_batch_size}, " + f"current_batch_size = {self.current_batch_size}, " + f"sequence_len_offset = {self.sequence_len_offset}, " + f"batch_size_offset = {self.batch_size_offset}, " + f"key_value_memory_dict = {self.key_value_memory_dict.keys()})" + f"decode_mode = {self.decode_mode}" + ) + + def __eq__(self, other): + + if not isinstance(other, InferenceParams): + return False + + # Check all attributes match + basic_attrs = [ + 'max_sequence_length', + 'max_batch_size', + 'current_batch_size', + 'sequence_len_offset', + 'batch_size_offset', + ] + + if not all(hasattr(other, attr) for attr in basic_attrs): + return False + + # Check dictionary keys match; i.e. the same number of layers are cached + if self.key_value_memory_dict.keys() != other.key_value_memory_dict.keys(): + return False + + # Check each tensor tuple in the dictionary + for key in self.key_value_memory_dict: + self_tensors = self.key_value_memory_dict[key] + other_tensors = other.key_value_memory_dict[key] + + # Compare each key, value tensor in the tuple + for self_tensor, other_tensor in zip(self_tensors, other_tensors): + if ( + self_tensor.data_ptr() != other_tensor.data_ptr() + or self_tensor.shape != other_tensor.shape + ): + return False + return True diff --git a/megatron/core/jit.py b/megatron/core/jit.py index c35c41b..5b1dfff 100644 --- a/megatron/core/jit.py +++ b/megatron/core/jit.py @@ -7,18 +7,4 @@ from megatron.core.utils import is_torch_min_version jit_fuser = torch.jit.script # nvFuser is deprecated in PyTorch JIT starting from 2.2 if is_torch_min_version("2.2.0a0"): - jit_fuser = torch.compile(mode='max-autotune-no-cudagraphs') - -# Decorator to disable Torch Dynamo -# See: https://github.com/NVIDIA/TransformerEngine/issues/308 -no_torch_dynamo = lambda recursive=True: lambda func: func -if torch.__version__ >= "2": - import torch._dynamo - - if torch.__version__ >= "2.1": - no_torch_dynamo = lambda recursive=True: lambda f: torch._dynamo.disable( - f, recursive=recursive - ) - else: - # no "recursive" option in pyTorch 2.0 - it acts as if recursive was True - no_torch_dynamo = lambda recursive=True: torch._dynamo.disable + jit_fuser = torch.compile diff --git a/megatron/core/model_parallel_config.py b/megatron/core/model_parallel_config.py index 46a03f6..cf6b20f 100644 --- a/megatron/core/model_parallel_config.py +++ b/megatron/core/model_parallel_config.py @@ -1,387 +1,392 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -from dataclasses import dataclass -from typing import Callable, ContextManager, Optional - -import torch - - -@dataclass -class ModelParallelConfig: - """Base configuration for Megatron Core - - The initialization function has an argument for each parameter. - """ - - ################### - # Model parallelism - ################### - tensor_model_parallel_size: int = 1 - """Intra-layer model parallelism. Splits tensors across GPU ranks.""" - - pipeline_model_parallel_size: int = 1 - """Inter-layer model parallelism. Splits transformer layers across GPU ranks.""" - - virtual_pipeline_model_parallel_size: Optional[int] = None - """Interleaved pipeline parallelism is used to improve performance by reducing the pipeline - bubble. Considers a transformer block as a list of smaller transformer (virtual) blocks. - The number of virtual blocks per pipeline model parallel rank is the virtual model parallel - size. See Efficient Large-Scale Language Model Training on GPU Clusters Using Megatron-LM: - arxiv.org/pdf/2104.04473.pdf for more details. - """ - - sequence_parallel: bool = False - """Makes tensor parallelism more memory efficient for LLMs (20B+) by parallelizing layer norms - and dropout sequentially. See Reducing Activation Recomputation in Large Transformer Models - (https://arxiv.org/abs/2205.05198) for more details. - """ - - context_parallel_size: int = 1 - """Splits network input along sequence dimension across GPU ranks.""" - - hierarchical_context_parallel_sizes: Optional[list[int]] = None - """Degrees of the hierarchical context parallelism. Users should provide a list to specify - the sizes for different levels. Taking the a2a+p2p cp comm type as example, it contains - groups of two levels, so the first value of the list indicates the group size of the a2a - communication type, and the second value indicates the group size of the p2p communication - type. - """ - - expert_model_parallel_size: int = 1 - """Distributes Moe Experts across sub data parallel dimension.""" - - expert_tensor_parallel_size: Optional[int] = None - """Intra-layer tensor model parallelsm for expert layer. Splits tensors across GPU ranks.""" - - moe_extended_tp: bool = False - """NOTE: Deprecated from MCore v0.10. This flag is ignored. - Its functionality is replaced by expert_tensor_parallel_size. - """ - - ################### - # Initialization - ################### - perform_initialization: bool = True - """If true, weights are initialized. This option can be useful when you know you are going to - load values from a checkpoint. - """ - - use_cpu_initialization: bool = False - """When set to False, we initialize the weights directly on the GPU. CPU initialization is the - same regardless of tensor model parallelism, but GPU initialization is not. Transferring - weights from CPU to GPU can take a significant amount of time for large models. - """ - - ################### - # Training - ################### - fp16: bool = False - """If true, train with fp16 mixed precision training.""" - - bf16: bool = False - """If true, train with bf16 mixed precision training.""" - - params_dtype: torch.dtype = torch.float32 - """dtype used when intializing the weights.""" - - timers: Optional[Callable] = None - """Timers object to call for various timing functions. See megatron.core.timers.Timers""" - - finalize_model_grads_func: Optional[Callable] = None - """Function that finalizes gradients on all workers. Could include ensuring that grads are - all-reduced across data parallelism, pipeline parallelism, and sequence parallelism - dimensions. - """ - - grad_scale_func: Optional[Callable] = None - """If using loss scaling, this function should take the loss and return the scaled loss. If - None, no function is called on the loss. - """ - - no_sync_func: Optional[Callable] = None - """Function that creates a context that suppresses asynchronous data-parallel communication. If - the model is an instance of core.distributed.DistributedDataParallel, the default is to use - core.distributed.DistributedDataParallel.no_sync. - """ - - grad_sync_func: Optional[Callable] = None - """Function that launches asynchronous gradient reductions (e.g. distributed optimizer gradient - reduce-scatters). The function should take one argument: an iterable of parameters whose - gradients are to be synchronized. - """ - - param_sync_func: Optional[Callable] = None - """Function that launches asynchronous parameter synchronizations (e.g. distributed optimizer - parameter all-gathers). The function should take one argument: an iterable of parameters to - be synchronized. - """ - - deterministic_mode: bool = False - """If true, code that has deterministic execution will be chosen. This usually - means slower execution, but is good for debugging and testing. Defaults to False.""" - - enable_autocast: bool = False - """If true runs the forward step function inside torch.autocast context.""" - - autocast_dtype: Optional[torch.dtype] = None - """dtype to pass to torch.amp.autocast when enabled. If None, is set to pipeline_dtype.""" - - num_microbatches_with_partial_activation_checkpoints: Optional[int] = None - """If int, set the number of microbatches where not all of the layers will be checkpointed and - recomputed. The rest of the microbatches within the window of maximum outstanding - microbatches will recompute all layers (either full recompute or selective recompute). If - None, the checkpoint and recompute will be left up to the forward_step function. - - """ - - ################### - # Optimizations - ################### - gradient_accumulation_fusion: bool = False - """If true, fuses weight gradient accumulation to GEMMs. Requires the custom CUDA extension - fused_weight_gradient_mlp_cuda module. To use gradient_accumulation_fusion you must install - APEX with --cpp_ext and --cuda_ext. For example: "pip install --global-option=\"--cpp_ext\" - --global-option=\"--cuda_ext\" ". Note that the extension requires CUDA>=11. Otherwise, you - must turn off gradient accumulation fusion. - """ - - async_tensor_model_parallel_allreduce: bool = False - """NOTE: Deprecated. This flag is ignored.""" - - use_te_rng_tracker: bool = False - """If true, uses RNG state tracker in TransformerEngine if exists. - """ - - tp_comm_overlap: bool = False - """If true, allows overlapping of Linear layer execution with tensor parallel communication - collectives like AllGather/ReduceScatter. Overlapping is done for the linear layers wherever - possible during the forward and the backward pass. - """ - - tp_comm_bulk_wgrad: bool = True - """If true, allows All-Gather overlap with Bprop activation gradient GEMM. Don't care if - tp_comm_overlap is False. - """ - - tp_comm_bulk_dgrad: bool = True - """If true, allows Reduce-Scatter overlap with Bprop weight gradient GEMM. Don't care if - tp_comm_overlap is False. - """ - - tp_comm_overlap_ag: bool = True - """If true, allows All-Gather overlap with GEMM by pipelining the GEMM and All-Gather. - Don't care if tp_comm_overlap is False. - """ - - tp_comm_overlap_rs: bool = True - """If true, allows Reduce-Scatter overlap with GEMM by pipelining the GEMM and Reduce-Scatter. - Don't care if tp_comm_overlap is False. - """ - - tp_comm_overlap_rs_dgrad: bool = False - """If true, allows Reduce-Scatter overlap with DGRAD GEMM by pipelining the - GEMM and Reduce-Scatter splits. Don't care if tp_comm_overlap is False. - """ - - tp_comm_split_ag: bool = True - """Deprecated from TransformerEngine v1.6.0. - If true, allows All-Gather overlap with Fprop GEMM by pipelining the GEMM and All-Gather - splits. Don't care if tp_comm_overlap is False. - """ - - tp_comm_atomic_ag: bool = False - """Deprecated from TransformerEngine v1.6.0. - If true, allows All-Gather overlap with Fprop GEMM by pipelining the GEMM and All-Gather - both done atomically. Don't care if tp_comm_overlap is False. - """ - - tp_comm_split_rs: bool = True - """Deprecated from TransformerEngine v1.6.0. - If true, allows Reduce-Scatter overlap with Fprop GEMM by pipelining the GEMM and - Reduce-Scatter splits. Don't care if tp_comm_overlap is False. - """ - - tp_comm_atomic_rs: bool = False - """Deprecated from TransformerEngine v1.6.0. - If true, allows Reduce-Scatter overlap with Fprop GEMM by pipelining the GEMM and - Reduce-Scatter both done atomically. Don't care if tp_comm_overlap is False. - """ - - cross_entropy_loss_fusion: bool = False - """If this is enabled, the fused cross entropy implementation would be used. - Defaults to False. - """ - - tp_comm_overlap_disable_qkv: bool = False - """ - If true, the AllGather -> Gemm overlap for QKV gets disabled - """ - - tp_comm_overlap_disable_fc1: bool = False - """ - If true, the AllGather -> Gemm overlap for FC1 layer of MLP gets disabled - """ - - tp_comm_bootstrap_backend: str = 'nccl' - """ - Set the bootstrapping backend out of 'nccl', 'mpi', and 'gloo' - """ - - ################### - # Pipeline Parallel - ################### - pipeline_dtype: torch.dtype = None - """dtype used in p2p communication, usually params_dtype""" - - variable_seq_lengths: bool = False - """Support for variable sequence lengths across microbatches. Setting this communicates the size - of tensors during pipeline parallelism communication, because of this extra overhead it - should only be set if the sequence length varies by microbatch within a global batch. - """ - - overlap_p2p_comm: bool = False - """When True some of the peer to peer communication for pipeline parallelism will overlap with - computation. Must be False if batch_p2p_comm is true. - """ - - batch_p2p_comm: bool = True - """Use batch_isend_irecv instead of individual isend/irecv calls. Must be False if - overlap_p2p_comm is True. - """ - - batch_p2p_sync: bool = True - """When using batch_isend_irecv, do a cuda.device.synchronize afterward to work around a bug in - older version of PyTorch. - """ - - use_ring_exchange_p2p: bool = False - """Use custom ring_exchange kernel instead of torch.distributed.batch_isend_irecv(). Requires - custom built torch with torch.distributed.ring_exchange. - """ - - deallocate_pipeline_outputs: bool = False - """If True, output data is deallocated after the tensor is sent to the next pipeline stage. - Helps with saving memory, does nothing when pipeline parallel is not used. - """ - - defer_embedding_wgrad_compute: bool = False - """If true, defers the embedding WGRAD GEMMs while pipeline flush is - taking place enabling us to hide pipeline flush latency. Defaults to False. - """ - - wgrad_deferral_limit: int = 0 - """This value tunes the number of micro-batches for which the embedding weight gradient compute - needs to be deferred to pipeline flush, this argument is invalid if - `defer_embedding_wgrad_compute` is False. - Defaults to 0, which means all micro-batches are deferred. - """ - - pipeline_model_parallel_split_rank: Optional[int] = None - """If int, rank where encoder and decoder should be split in cases where the model has both an - encoder and decoder (e.g., T5). Ignored if None. - """ - - overlap_p2p_comm_warmup_flush: bool = False - """If true, overlap communication and computation in warm up and flush phase. - Only valid when overlap_p2p_comm is True and batch_p2p_comm is False. - Defaults to False. - """ - - microbatch_group_size_per_vp_stage: Optional[int] = None - """This value specifies the number of micro-batches that are executed - at a time for a given virtual stage (both forward and backward). - Default (in __post_init__() method below) to pipeline_parallel_size - which specifies a depth-first schedule. - Example: for PP=2 VP=2, when microbatch_group_size_per_vp_stage=2, - num_microbatches = 4, we have - rank 0 | 0 1 0 1 2 3 2 3 - rank 1 | 0 1 0 1 2 3 2 3 - When microbatch_group_size_per_vp_stage=3, num_microbatches = 5, - we have - rank 0 | 0 1 2 0 1 2 3 4 3 4 - rank 1 | 0 1 2 0 1 2 3 4 3 4 - """ - - ################### - # CPU Offloading - ################### - cpu_offloading: bool = False - """When set to True, all the activations are offloaded to the CPU asynchronously.""" - - cpu_offloading_num_layers: int = 0 - """Tells the number of transformer layers for which activations has to be offloaded.""" - - _cpu_offloading_context: Optional[ContextManager] = ( - None - # Used for internal use only, not to be set by a user. - # TODO: Need to move to the 'right' place when possible. - ) - """For internal use only, do not set.""" - - cpu_offloading_activations: bool = True - """If True, offloads the activations to CPU.""" - - cpu_offloading_weights: bool = True - """If True, offloads the weights to CPU.""" - - ################### - # Timing - ################### - barrier_with_L1_time: bool = True - """If true, use barrier with level 1 time measurements. It is up to the user to make sure - calling barrier with their timers will not result in hangs. This can happen if for example - the user adds a level 1 timer that is not called by all ranks. - """ - - def __post_init__(self): - """Python dataclass method that is used to modify attributes after initialization. - See https://docs.python.org/3/library/dataclasses.html#post-init-processing for more - details. - """ - if self.sequence_parallel: - if self.tensor_model_parallel_size <= 1: - raise ValueError("Can not use sequence paralllelism without tensor parallelism") - - if self.expert_tensor_parallel_size is None: - self.expert_tensor_parallel_size = self.tensor_model_parallel_size - - if self.pipeline_model_parallel_size > 1: - if self.pipeline_dtype is None: - raise ValueError( - "When using pipeline parallelism, pipeline_dtype must be specified" - ) - - if self.autocast_dtype is None: - self.autocast_dtype = self.params_dtype - - if self.defer_embedding_wgrad_compute and self.pipeline_model_parallel_size == 1: - raise ValueError( - "Cannot defer embedding wgrad compute when pipeline model parallel is not used" - ) - - if self.defer_embedding_wgrad_compute and not self.gradient_accumulation_fusion: - raise ValueError( - "Cannot defer embedding wgrad compute when gradient accumulation fusion is not used" - ) - - if self.defer_embedding_wgrad_compute and self.wgrad_deferral_limit < 0: - raise ValueError( - "Wgrad deferral limit should be greater than or equal to 0 when it is enabled!" - ) - - if self.expert_model_parallel_size > 1 and self.tensor_model_parallel_size > 1: - if self.sequence_parallel is False: - raise ValueError( - "When using expert parallelism and tensor parallelism, " - "sequence parallelism must be used" - ) - - if self.microbatch_group_size_per_vp_stage is None: - self.microbatch_group_size_per_vp_stage = self.pipeline_model_parallel_size - - if self.overlap_p2p_comm_warmup_flush: - if not self.overlap_p2p_comm or self.batch_p2p_comm: - raise ValueError( - "Pipeline parallel communication overlapping in warmup and flush is only " - "compatible with overlap_p2p_comm but not batch_p2p_comm." - ) +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +from dataclasses import dataclass +from typing import Callable, ContextManager, Optional + +import torch + + +@dataclass +class ModelParallelConfig: + """Base configuration for Megatron Core + + The initialization function has an argument for each parameter. + """ + + ################### + # Model parallelism + ################### + tensor_model_parallel_size: int = 1 + """Intra-layer model parallelism. Splits tensors across GPU ranks.""" + + pipeline_model_parallel_comm_backend: Optional[str] = None + """Configuring backend option of pipeline parallel communication (e.g., nccl, ucc) + If None, the default backend will be used. + """ + + pipeline_model_parallel_size: int = 1 + """Inter-layer model parallelism. Splits transformer layers across GPU ranks.""" + + virtual_pipeline_model_parallel_size: Optional[int] = None + """Interleaved pipeline parallelism is used to improve performance by reducing the pipeline + bubble. Considers a transformer block as a list of smaller transformer (virtual) blocks. + The number of virtual blocks per pipeline model parallel rank is the virtual model parallel + size. See Efficient Large-Scale Language Model Training on GPU Clusters Using Megatron-LM: + arxiv.org/pdf/2104.04473.pdf for more details. + """ + + sequence_parallel: bool = False + """Makes tensor parallelism more memory efficient for LLMs (20B+) by parallelizing layer norms + and dropout sequentially. See Reducing Activation Recomputation in Large Transformer Models + (https://arxiv.org/abs/2205.05198) for more details. + """ + + context_parallel_size: int = 1 + """Splits network input along sequence dimension across GPU ranks.""" + + hierarchical_context_parallel_sizes: Optional[list[int]] = None + """Degrees of the hierarchical context parallelism. Users should provide a list to specify + the sizes for different levels. Taking the a2a+p2p cp comm type as example, it contains + groups of two levels, so the first value of the list indicates the group size of the a2a + communication type, and the second value indicates the group size of the p2p communication + type. + """ + + expert_model_parallel_size: int = 1 + """Distributes Moe Experts across sub data parallel dimension.""" + + expert_tensor_parallel_size: Optional[int] = None + """Intra-layer tensor model parallelsm for expert layer. Splits tensors across GPU ranks.""" + + moe_extended_tp: bool = False + """NOTE: Deprecated from MCore v0.10. This flag is ignored. + Its functionality is replaced by expert_tensor_parallel_size. + """ + + ################### + # Initialization + ################### + perform_initialization: bool = True + """If true, weights are initialized. This option can be useful when you know you are going to + load values from a checkpoint. + """ + + use_cpu_initialization: bool = False + """When set to False, we initialize the weights directly on the GPU. CPU initialization is the + same regardless of tensor model parallelism, but GPU initialization is not. Transferring + weights from CPU to GPU can take a significant amount of time for large models. + """ + + ################### + # Training + ################### + fp16: bool = False + """If true, train with fp16 mixed precision training.""" + + bf16: bool = False + """If true, train with bf16 mixed precision training.""" + + params_dtype: torch.dtype = torch.float32 + """dtype used when intializing the weights.""" + + timers: Optional[Callable] = None + """Timers object to call for various timing functions. See megatron.core.timers.Timers""" + + finalize_model_grads_func: Optional[Callable] = None + """Function that finalizes gradients on all workers. Could include ensuring that grads are + all-reduced across data parallelism, pipeline parallelism, and sequence parallelism + dimensions. + """ + + grad_scale_func: Optional[Callable] = None + """If using loss scaling, this function should take the loss and return the scaled loss. If + None, no function is called on the loss. + """ + + no_sync_func: Optional[Callable] = None + """Function that creates a context that suppresses asynchronous data-parallel communication. If + the model is an instance of core.distributed.DistributedDataParallel, the default is to use + core.distributed.DistributedDataParallel.no_sync. + """ + + grad_sync_func: Optional[Callable] = None + """Function that launches asynchronous gradient reductions (e.g. distributed optimizer gradient + reduce-scatters). The function should take one argument: an iterable of parameters whose + gradients are to be synchronized. + """ + + param_sync_func: Optional[Callable] = None + """Function that launches asynchronous parameter synchronizations (e.g. distributed optimizer + parameter all-gathers). The function should take one argument: an iterable of parameters to + be synchronized. + """ + + deterministic_mode: bool = False + """If true, code that has deterministic execution will be chosen. This usually + means slower execution, but is good for debugging and testing. Defaults to False.""" + + enable_autocast: bool = False + """If true runs the forward step function inside torch.autocast context.""" + + autocast_dtype: Optional[torch.dtype] = None + """dtype to pass to torch.amp.autocast when enabled. If None, is set to pipeline_dtype.""" + + num_microbatches_with_partial_activation_checkpoints: Optional[int] = None + """If int, set the number of microbatches where not all of the layers will be checkpointed and + recomputed. The rest of the microbatches within the window of maximum outstanding + microbatches will recompute all layers (either full recompute or selective recompute). If + None, the checkpoint and recompute will be left up to the forward_step function. + + """ + + ################### + # Optimizations + ################### + gradient_accumulation_fusion: bool = False + """If true, fuses weight gradient accumulation to GEMMs. Requires the custom CUDA extension + fused_weight_gradient_mlp_cuda module. To use gradient_accumulation_fusion you must install + APEX with --cpp_ext and --cuda_ext. For example: "pip install --global-option=\"--cpp_ext\" + --global-option=\"--cuda_ext\" ". Note that the extension requires CUDA>=11. Otherwise, you + must turn off gradient accumulation fusion. + """ + + async_tensor_model_parallel_allreduce: bool = False + """NOTE: Deprecated. This flag is ignored.""" + + use_te_rng_tracker: bool = False + """If true, uses RNG state tracker in TransformerEngine if exists. + """ + + tp_comm_overlap: bool = False + """If true, allows overlapping of Linear layer execution with tensor parallel communication + collectives like AllGather/ReduceScatter. Overlapping is done for the linear layers wherever + possible during the forward and the backward pass. + """ + + tp_comm_bulk_wgrad: bool = True + """If true, allows All-Gather overlap with Bprop activation gradient GEMM. Don't care if + tp_comm_overlap is False. + """ + + tp_comm_bulk_dgrad: bool = True + """If true, allows Reduce-Scatter overlap with Bprop weight gradient GEMM. Don't care if + tp_comm_overlap is False. + """ + + tp_comm_overlap_ag: bool = True + """If true, allows All-Gather overlap with GEMM by pipelining the GEMM and All-Gather. + Don't care if tp_comm_overlap is False. + """ + + tp_comm_overlap_rs: bool = True + """If true, allows Reduce-Scatter overlap with GEMM by pipelining the GEMM and Reduce-Scatter. + Don't care if tp_comm_overlap is False. + """ + + tp_comm_overlap_rs_dgrad: bool = False + """If true, allows Reduce-Scatter overlap with DGRAD GEMM by pipelining the + GEMM and Reduce-Scatter splits. Don't care if tp_comm_overlap is False. + """ + + tp_comm_split_ag: bool = True + """Deprecated from TransformerEngine v1.6.0. + If true, allows All-Gather overlap with Fprop GEMM by pipelining the GEMM and All-Gather + splits. Don't care if tp_comm_overlap is False. + """ + + tp_comm_atomic_ag: bool = False + """Deprecated from TransformerEngine v1.6.0. + If true, allows All-Gather overlap with Fprop GEMM by pipelining the GEMM and All-Gather + both done atomically. Don't care if tp_comm_overlap is False. + """ + + tp_comm_split_rs: bool = True + """Deprecated from TransformerEngine v1.6.0. + If true, allows Reduce-Scatter overlap with Fprop GEMM by pipelining the GEMM and + Reduce-Scatter splits. Don't care if tp_comm_overlap is False. + """ + + tp_comm_atomic_rs: bool = False + """Deprecated from TransformerEngine v1.6.0. + If true, allows Reduce-Scatter overlap with Fprop GEMM by pipelining the GEMM and + Reduce-Scatter both done atomically. Don't care if tp_comm_overlap is False. + """ + + cross_entropy_loss_fusion: bool = False + """If this is enabled, the fused cross entropy implementation would be used. + Defaults to False. + """ + + tp_comm_overlap_disable_qkv: bool = False + """ + If true, the AllGather -> Gemm overlap for QKV gets disabled + """ + + tp_comm_overlap_disable_fc1: bool = False + """ + If true, the AllGather -> Gemm overlap for FC1 layer of MLP gets disabled + """ + + tp_comm_bootstrap_backend: str = 'nccl' + """ + Set the bootstrapping backend out of 'nccl', 'mpi', and 'gloo' + """ + + ################### + # Pipeline Parallel + ################### + pipeline_dtype: torch.dtype = None + """dtype used in p2p communication, usually params_dtype""" + + variable_seq_lengths: bool = False + """Support for variable sequence lengths across microbatches. Setting this communicates the size + of tensors during pipeline parallelism communication, because of this extra overhead it + should only be set if the sequence length varies by microbatch within a global batch. + """ + + overlap_p2p_comm: bool = False + """When True some of the peer to peer communication for pipeline parallelism will overlap with + computation. Must be False if batch_p2p_comm is true. + """ + + batch_p2p_comm: bool = True + """Use batch_isend_irecv instead of individual isend/irecv calls. Must be False if + overlap_p2p_comm is True. + """ + + batch_p2p_sync: bool = True + """When using batch_isend_irecv, do a cuda.device.synchronize afterward to work around a bug in + older version of PyTorch. + """ + + use_ring_exchange_p2p: bool = False + """Use custom ring_exchange kernel instead of torch.distributed.batch_isend_irecv(). Requires + custom built torch with torch.distributed.ring_exchange. + """ + + deallocate_pipeline_outputs: bool = False + """If True, output data is deallocated after the tensor is sent to the next pipeline stage. + Helps with saving memory, does nothing when pipeline parallel is not used. + """ + + defer_embedding_wgrad_compute: bool = False + """If true, defers the embedding WGRAD GEMMs while pipeline flush is + taking place enabling us to hide pipeline flush latency. Defaults to False. + """ + + wgrad_deferral_limit: int = 0 + """This value tunes the number of micro-batches for which the embedding weight gradient compute + needs to be deferred to pipeline flush, this argument is invalid if + `defer_embedding_wgrad_compute` is False. + Defaults to 0, which means all micro-batches are deferred. + """ + + pipeline_model_parallel_split_rank: Optional[int] = None + """If int, rank where encoder and decoder should be split in cases where the model has both an + encoder and decoder (e.g., T5). Ignored if None. + """ + + overlap_p2p_comm_warmup_flush: bool = False + """If true, overlap communication and computation in warm up and flush phase. + Only valid when overlap_p2p_comm is True and batch_p2p_comm is False. + Defaults to False. + """ + + microbatch_group_size_per_vp_stage: Optional[int] = None + """This value specifies the number of micro-batches that are executed + at a time for a given virtual stage (both forward and backward). + Default (in __post_init__() method below) to pipeline_parallel_size + which specifies a depth-first schedule. + Example: for PP=2 VP=2, when microbatch_group_size_per_vp_stage=2, + num_microbatches = 4, we have + rank 0 | 0 1 0 1 2 3 2 3 + rank 1 | 0 1 0 1 2 3 2 3 + When microbatch_group_size_per_vp_stage=3, num_microbatches = 5, + we have + rank 0 | 0 1 2 0 1 2 3 4 3 4 + rank 1 | 0 1 2 0 1 2 3 4 3 4 + """ + + ################### + # CPU Offloading + ################### + cpu_offloading: bool = False + """When set to True, all the activations are offloaded to the CPU asynchronously.""" + + cpu_offloading_num_layers: int = 0 + """Tells the number of transformer layers for which activations has to be offloaded.""" + + _cpu_offloading_context: Optional[ContextManager] = ( + None + # Used for internal use only, not to be set by a user. + # TODO: Need to move to the 'right' place when possible. + ) + """For internal use only, do not set.""" + + cpu_offloading_activations: bool = True + """If True, offloads the activations to CPU.""" + + cpu_offloading_weights: bool = True + """If True, offloads the weights to CPU.""" + + ################### + # Timing + ################### + barrier_with_L1_time: bool = True + """If true, use barrier with level 1 time measurements. It is up to the user to make sure + calling barrier with their timers will not result in hangs. This can happen if for example + the user adds a level 1 timer that is not called by all ranks. + """ + + def __post_init__(self): + """Python dataclass method that is used to modify attributes after initialization. + See https://docs.python.org/3/library/dataclasses.html#post-init-processing for more + details. + """ + if self.sequence_parallel: + if self.tensor_model_parallel_size <= 1: + raise ValueError("Can not use sequence paralllelism without tensor parallelism") + + if self.expert_tensor_parallel_size is None: + self.expert_tensor_parallel_size = self.tensor_model_parallel_size + + if self.pipeline_model_parallel_size > 1: + if self.pipeline_dtype is None: + raise ValueError( + "When using pipeline parallelism, pipeline_dtype must be specified" + ) + + if self.autocast_dtype is None: + self.autocast_dtype = self.params_dtype + + if self.defer_embedding_wgrad_compute and self.pipeline_model_parallel_size == 1: + raise ValueError( + "Cannot defer embedding wgrad compute when pipeline model parallel is not used" + ) + + if self.defer_embedding_wgrad_compute and not self.gradient_accumulation_fusion: + raise ValueError( + "Cannot defer embedding wgrad compute when gradient accumulation fusion is not used" + ) + + if self.defer_embedding_wgrad_compute and self.wgrad_deferral_limit < 0: + raise ValueError( + "Wgrad deferral limit should be greater than or equal to 0 when it is enabled!" + ) + + if self.expert_model_parallel_size > 1 and self.tensor_model_parallel_size > 1: + if self.sequence_parallel is False: + raise ValueError( + "When using expert parallelism and tensor parallelism, " + "sequence parallelism must be used" + ) + + if self.microbatch_group_size_per_vp_stage is None: + self.microbatch_group_size_per_vp_stage = self.pipeline_model_parallel_size + + if self.overlap_p2p_comm_warmup_flush: + if not self.overlap_p2p_comm or self.batch_p2p_comm: + raise ValueError( + "Pipeline parallel communication overlapping in warmup and flush is only " + "compatible with overlap_p2p_comm but not batch_p2p_comm." + ) diff --git a/megatron/core/models/T5/t5_model.py b/megatron/core/models/T5/t5_model.py index 462fbfc..6833559 100644 --- a/megatron/core/models/T5/t5_model.py +++ b/megatron/core/models/T5/t5_model.py @@ -10,9 +10,11 @@ from megatron.core.config_logger import has_config_logger_enabled, log_config_to from megatron.core.dist_checkpointing.mapping import ShardedStateDict from megatron.core.enums import ModelType from megatron.core.models.common.embeddings.language_model_embedding import LanguageModelEmbedding +from megatron.core.models.common.embeddings.relative_pos_embedding import RelativePositionEmbedding from megatron.core.models.common.embeddings.rotary_pos_embedding import RotaryEmbedding from megatron.core.models.common.language_module.language_module import LanguageModule from megatron.core.packed_seq_params import PackedSeqParams +from megatron.core.tensor_parallel.mappings import scatter_to_tensor_model_parallel_region from megatron.core.transformer.module import MegatronModule from megatron.core.transformer.spec_utils import ModuleSpec from megatron.core.transformer.transformer_block import TransformerBlock @@ -135,9 +137,13 @@ class T5Model(LanguageModule): fp16_lm_cross_entropy: bool = False, parallel_output: bool = True, share_embeddings_and_output_weights: bool = False, - position_embedding_type: Literal['learned_absolute', 'rope'] = 'learned_absolute', + position_embedding_type: Literal[ + 'learned_absolute', 'rope', 'relative' + ] = 'learned_absolute', rotary_percent: float = 1.0, seq_len_interpolation_factor: Optional[float] = None, + relative_attention_num_buckets: int = 32, + relative_attention_max_distance: int = 128, add_encoder: bool = True, add_decoder: bool = True, ): @@ -193,6 +199,23 @@ class T5Model(LanguageModule): use_cpu_initialization=self.config.use_cpu_initialization, ) + # Relative Position Embeddings + if self.position_embedding_type == 'relative': + self.encoder_relative_pos_emb = RelativePositionEmbedding( + bidirectional=True, + init_method=self.config.init_method, + num_attention_heads=self.config.num_attention_heads, + relative_attention_num_buckets=relative_attention_num_buckets, + relative_attention_max_distance=relative_attention_max_distance, + ) + self.decoder_relative_pos_emb = RelativePositionEmbedding( + bidirectional=False, + init_method=self.config.init_method, + num_attention_heads=self.config.num_attention_heads, + relative_attention_num_buckets=relative_attention_num_buckets, + relative_attention_max_distance=relative_attention_max_distance, + ) + # Transformer encoder encoder_spec, decoder_spec = ( self.transformer_encoder_layer_spec, @@ -284,6 +307,27 @@ class T5Model(LanguageModule): ) rotary_pos_emb = self.rotary_pos_emb(rotary_seq_len) + # Relative positional embeddings + encoder_attention_bias_parallel = None + if self.position_embedding_type == 'relative': + query_seq_length = RelativePositionEmbedding.get_relative_seq_len( + inference_params, self.encoder, encoder_input, self.config + ) + key_seq_length = query_seq_length + attention_bias = self.encoder_relative_pos_emb(query_seq_length, key_seq_length) + + # Scatter attention_bias to TP ranks + # First, reshape [1, num_head, seqlen_q, seqlen_kv] to + # [1, seqlen_q, seqlen_kv, num_head] to be scatter along + # the last (num_heads dimension) + attention_bias = torch.permute(attention_bias, (0, 2, 3, 1)) + # Then, scatter to TP region + attention_bias_parallel = scatter_to_tensor_model_parallel_region(attention_bias) + # Lastly, revert the dimension back to [1, num_head, seqlen_q, seqlen_kv] + encoder_attention_bias_parallel = torch.permute( + attention_bias_parallel, (0, 3, 1, 2) + ) + # Run encoder. if self.add_encoder: encoder_hidden_states = self.encoder( @@ -291,6 +335,7 @@ class T5Model(LanguageModule): attention_mask=encoder_attn_mask, inference_params=inference_params, rotary_pos_emb=rotary_pos_emb, + attention_bias=encoder_attention_bias_parallel, ) else: encoder_hidden_states = self.encoder_hidden_state @@ -315,10 +360,29 @@ class T5Model(LanguageModule): rotary_pos_emb = None if self.position_embedding_type == 'rope': rotary_seq_len = self.rotary_pos_emb.get_rotary_seq_len( - inference_params, self.encoder, encoder_input, self.config, packed_seq_params + inference_params, self.decoder, decoder_input, self.config, packed_seq_params ) rotary_pos_emb = self.rotary_pos_emb(rotary_seq_len) + # Relative positional embeddings + decoder_attention_bias_parallel = None + if self.position_embedding_type == 'relative': + query_seq_length = RelativePositionEmbedding.get_relative_seq_len( + inference_params, self.decoder, decoder_input, self.config + ) + key_seq_length = query_seq_length + attention_bias = self.decoder_relative_pos_emb(query_seq_length, key_seq_length) + + # Scatter attention_bias to TP ranks + # First, reshape [1, num_head, seqlen_q, seqlen_kv] to + # [1, seqlen_q, seqlen_kv, num_head] to be scatter along + # the last (num_heads dimension) + attention_bias = torch.permute(attention_bias, (0, 2, 3, 1)) + # Then, scatter to TP region + attention_bias_parallel = scatter_to_tensor_model_parallel_region(attention_bias) + # Lastly, revert the dimension back to [1, num_head, seqlen_q, seqlen_kv] + decoder_attention_bias_parallel = torch.permute(attention_bias_parallel, (0, 3, 1, 2)) + # Run decoder. decoder_hidden_states = self.decoder( hidden_states=decoder_input, @@ -327,12 +391,15 @@ class T5Model(LanguageModule): context_mask=encoder_decoder_attn_mask, inference_params=inference_params, rotary_pos_emb=rotary_pos_emb, + attention_bias=decoder_attention_bias_parallel, ) if self.post_process: - lm_logits = self.lm_head( - decoder_hidden_states, self.shared_embedding_or_output_weight() - ) + output_weight = None + if self.share_embeddings_and_output_weights: + output_weight = self.shared_embedding_or_output_weight() + lm_logits = self.lm_head(decoder_hidden_states, word_embeddings_weight=output_weight) + if lm_labels is None: # [s b h] => [b s h] return lm_logits.transpose(0, 1).contiguous() diff --git a/megatron/core/models/common/embeddings/relative_pos_embedding.py b/megatron/core/models/common/embeddings/relative_pos_embedding.py new file mode 100644 index 0000000..af17bce --- /dev/null +++ b/megatron/core/models/common/embeddings/relative_pos_embedding.py @@ -0,0 +1,173 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +import logging +import math +from typing import Callable + +import torch +from torch import Tensor, nn + +from megatron.core.inference_params import InferenceParams +from megatron.core.transformer.transformer_block import TransformerBlock +from megatron.core.transformer.transformer_config import TransformerConfig + +logger = logging.getLogger(__name__) + + +__all__ = ['RelativePositionEmbedding'] + + +class RelativePositionEmbedding(nn.Module): + """Relative Position Embedding for language model. + + Args: + + """ + + def __init__( + self, + bidirectional: bool, + init_method: Callable, + num_attention_heads: int, + relative_attention_num_buckets: int = 32, + relative_attention_max_distance: int = 128, + ) -> None: + super().__init__() + + self.bidirectional = bidirectional + self.relative_attention_num_buckets = relative_attention_num_buckets + self.relative_attention_max_distance = relative_attention_max_distance + self.relative_attention_bias = torch.nn.Embedding( + self.relative_attention_num_buckets, num_attention_heads + ) + init_method(self.relative_attention_bias.weight) + + def _relative_position_bucket( + self, relative_position, bidirectional=True, num_buckets=32, max_distance=128 + ): + """ + Adapted from HuggingFace T5 Model: + https://github.com/huggingface/transformers/blob/329f5dbf97a5cb2473914c88c05aa3dcb242e19a/ + src/transformers/models/t5/modeling_t5.py#L397 + + Translate relative position to a bucket number for relative attention. + The relative position is defined as memory_position - query_position, i.e. the + distance in tokens from the attending position to the attended-to position. + If bidirectional=False, then positive relative positions are invalid. We use + smaller buckets for small absolute relative_position and larger buckets for + larger absolute relative_positions. All relative positions >=max_distance map + to the same bucket. All relative positions <=-max_distance map to the same bucket. + This should allow for more graceful generalization to longer sequences than the + model has been trained on. + + Args: + relative_position: an int32 Tensor + bidirectional: a boolean - whether the attention is bidirectional + num_buckets: an integer + max_distance: an integer + Returns: + a Tensor with the same shape as relative_position, + containing int32 values in the range [0, num_buckets) + """ + relative_buckets = 0 + if bidirectional: + num_buckets //= 2 + relative_buckets += (relative_position > 0).to(torch.long) * num_buckets + relative_position = torch.abs(relative_position) + else: + relative_position = -torch.min(relative_position, torch.zeros_like(relative_position)) + # now relative_position is in the range [0, inf) + + # half of the buckets are for exact increments in positions + max_exact = num_buckets // 2 + is_small = relative_position < max_exact + + # The other half of the buckets are for logarithmically bigger + # bins in positions up to max_distance + relative_position_if_large = max_exact + ( + torch.log(relative_position.float() / max_exact) + / math.log(max_distance / max_exact) + * (num_buckets - max_exact) + ).to(torch.long) + relative_position_if_large = torch.min( + relative_position_if_large, torch.full_like(relative_position_if_large, num_buckets - 1) + ) + + relative_buckets += torch.where(is_small, relative_position, relative_position_if_large) + return relative_buckets + + def _compute_bias(self, query_length, key_length): + """ + Adapted from HuggingFace T5 Model + https://github.com/huggingface/transformers/blob/329f5dbf97a5cb2473914c88c05aa3dcb242e19a/ + src/transformers/models/t5/modeling_t5.py#L444C9-L444C21 + + Compute binned relative position bias + + Args: + query_length (int): The length of the query sequence + (e.g., the input sequence in attention). + key_length (int): The length of the key sequence + (e.g., the sequence to compare against in attention). + + Returns: + torch.Tensor: A tensor representing the relative position bias, with shape + (1, num_heads, query_length, key_length). + """ + device = self.relative_attention_bias.weight.device + context_position = torch.arange(query_length, dtype=torch.long, device=device)[:, None] + memory_position = torch.arange(key_length, dtype=torch.long, device=device)[None, :] + + relative_position = memory_position - context_position # shape(query_length,key_length) + relative_position_bucket = self._relative_position_bucket( + relative_position, # shape (query_length, key_length) + bidirectional=self.bidirectional, + num_buckets=self.relative_attention_num_buckets, + max_distance=self.relative_attention_max_distance, + ) + values = self.relative_attention_bias( + relative_position_bucket + ) # shape(query_length,key_length,num_heads) + values = values.permute([2, 0, 1]).unsqueeze( + 0 + ) # shape(1, num_heads,query_length,key_length) + return values + + @staticmethod + def get_relative_seq_len( + inference_params: InferenceParams, + transformer: TransformerBlock, + transformer_input: Tensor, + transformer_config: TransformerConfig, + ) -> float: + """Function to get the rotary sequence length. + + Args: + inference_params : Used during Inference time + transformer (TransformerBlock): The transformer block (decoder/encoder) used + by the model + transformer_input (Tensor): Input tensor to the transformer + transformer_config (TransformerConfig): Transformer config used by the model + + Returns: + float: The rotary sequence length + """ + if inference_params is not None: + relative_seq_len = inference_params.max_sequence_length + else: + if transformer.input_tensor is not None: + relative_seq_len = transformer.input_tensor.size(0) + else: + relative_seq_len = transformer_input.size(0) + + if transformer_config.sequence_parallel: + relative_seq_len *= transformer_config.tensor_model_parallel_size + + return relative_seq_len + + def forward(self, query_seq_length, key_seq_length): + """ + Args: + Returns: + """ + return self._compute_bias(query_seq_length, key_seq_length) diff --git a/megatron/core/models/common/embeddings/rotary_pos_embedding.py b/megatron/core/models/common/embeddings/rotary_pos_embedding.py index c2837c6..407cc10 100644 --- a/megatron/core/models/common/embeddings/rotary_pos_embedding.py +++ b/megatron/core/models/common/embeddings/rotary_pos_embedding.py @@ -1,213 +1,215 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -from __future__ import annotations - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from megatron.core.transformer.transformer_config import TransformerConfig - from megatron.core.transformer.transformer_block import TransformerBlock - from megatron.core.inference_params import InferenceParams - from megatron.core.packed_seq_params import PackedSeqParams - -import logging -import math -from functools import lru_cache - -import torch -from torch import Tensor, nn - -from megatron.core import parallel_state -from megatron.core.models.common.embeddings.rope_utils import ( # for backward compatibility; pylint: disable=unused-import - _apply_rotary_pos_emb_bshd, - _apply_rotary_pos_emb_thd, - _rotate_half, - apply_rotary_pos_emb, - get_pos_emb_on_this_cp_rank, -) - -logger = logging.getLogger(__name__) - - -__all__ = ['RotaryEmbedding'] - - -class RotaryEmbedding(nn.Module): - """Rotary Embedding for language model. - - Args: - kv_channels (int): Projection weights dimension in multi-head attention. Obtained - from transformer config - rotary_percent (float): Percent of rotary dimension to use for rotary position - embeddings. - rotary_interleaved (bool, optional): If True, interleaved rotary position embeddings. - Defaults to False. - seq_len_interpolation_factor (float, optional): scale of linearly interpolating RoPE - for longer sequences. The value must be a float larger than 1.0. Defaults to None - rotary_base (int, optional): Base period for rotary position embeddings. Defaults to - 10000. - rope_scaling (bool, optional): Apply rope scaling as used in llama 3.1 - use_cpu_initialization (bool, optional): If False, initialize the inv_freq directly - on the GPU. Defaults to False - """ - - def __init__( - self, - kv_channels: int, - rotary_percent: float, - rotary_interleaved: bool = False, - seq_len_interpolation_factor: float = None, - rotary_base: int = 10000, - rope_scaling: bool = False, - use_cpu_initialization: bool = False, - ) -> None: - super().__init__() - - dim = kv_channels - if rotary_percent < 1.0: - dim = int(dim * rotary_percent) - self.rotary_interleaved = rotary_interleaved - - self.seq_len_interpolation_factor = seq_len_interpolation_factor - device = 'cpu' if use_cpu_initialization else torch.cuda.current_device() - self.inv_freq = 1.0 / ( - rotary_base ** (torch.arange(0, dim, 2, dtype=torch.float32, device=device) / dim) - ) - - if rope_scaling: - self.inv_freq = self._apply_scaling(self.inv_freq) - - def _apply_scaling( - self, - freqs, - factor=8, - low_freq_factor=1, - high_freq_factor=4, - original_max_position_embeddings=8192, - ): - # This implementation is adapted from: - # https://github.com/huggingface/transformers/blob/2a5a6ad18aa22e98429bb5ecb880660328030ea0/src/transformers/modeling_rope_utils.py#L303-L343 - - factor = factor # `8` in the original implementation - low_freq_factor = low_freq_factor # `1` in the original implementation - high_freq_factor = high_freq_factor # `4` in the original implementation - old_context_len = original_max_position_embeddings # `8192` in the original implementation - - low_freq_wavelen = old_context_len / low_freq_factor - high_freq_wavelen = old_context_len / high_freq_factor - - wavelen = 2 * math.pi / freqs - # wavelen < high_freq_wavelen: do nothing - # wavelen > low_freq_wavelen: divide by factor - inv_freq_llama = torch.where(wavelen > low_freq_wavelen, freqs / factor, freqs) - # otherwise: interpolate between the two, using a smooth factor - smooth_factor = (old_context_len / wavelen - low_freq_factor) / ( - high_freq_factor - low_freq_factor - ) - smoothed_inv_freq = ( - 1 - smooth_factor - ) * inv_freq_llama / factor + smooth_factor * inv_freq_llama - is_medium_freq = ~(wavelen < high_freq_wavelen) * ~(wavelen > low_freq_wavelen) - inv_freq_llama = torch.where(is_medium_freq, smoothed_inv_freq, inv_freq_llama) - - return inv_freq_llama - - def get_freqs_non_repeated(self, max_seq_len: int, offset: int = 0) -> Tensor: - """Generates matrix of frequencies based on positions in the sequence, - used to create positional encodings""" - seq = ( - torch.arange(max_seq_len, device=self.inv_freq.device, dtype=self.inv_freq.dtype) - + offset - ) - - if self.seq_len_interpolation_factor is not None: - seq *= 1 / self.seq_len_interpolation_factor - - freqs = torch.outer(seq, self.inv_freq) # [seq len, dim] - - return freqs - - def get_cos_sin(self, max_seq_len: int, offset: int = 0) -> (Tensor, Tensor): - """Cosine and sine values for RoPE are precomputed for all positions up to the maximum - sequence length""" - freqs = self.get_freqs_non_repeated(max_seq_len, offset) - cos = torch.cos(freqs) - sin = torch.sin(freqs) - return cos, sin - - @lru_cache(maxsize=32) - def forward(self, max_seq_len: int, offset: int = 0, packed_seq: bool = False) -> Tensor: - """Forward pass of RoPE embedding. - - Args: - max_seq_len (int): Maximum size of sequence - offset (int, optional): RoPE offset. Defaults to 0. - packed_seq (bool, optional): Whether to use packed sequence. Defaults to False. - - Returns: - Tensor: Embeddings after applying RoPE. - """ - if self.inv_freq.device.type == 'cpu': - # move `inv_freq` to GPU once at the first micro-batch forward pass - self.inv_freq = self.inv_freq.to(device=torch.cuda.current_device()) - - freqs = self.get_freqs_non_repeated(max_seq_len, offset) - # first part even vector components, second part odd vector components, - # 2 * dim in dimension size - if not self.rotary_interleaved: - emb = torch.cat((freqs, freqs), dim=-1) - else: - emb = torch.stack((freqs.view(-1, 1), freqs.view(-1, 1)), dim=-1).view( - freqs.shape[0], -1 - ) - # emb [seq_length, .., dim] - emb = emb[:, None, None, :] - if parallel_state.get_context_parallel_world_size() > 1 and not packed_seq: - # slice rotary_pos_emb along sequence dimension and select the parition of the current - # CP rank - emb = get_pos_emb_on_this_cp_rank(emb, 0) - return emb - - def _load_from_state_dict(self, state_dict, prefix, *args, **kwargs): - state_dict.pop(f'{prefix}inv_freq', None) - return super()._load_from_state_dict(state_dict, prefix, *args, **kwargs) - - def get_rotary_seq_len( - self, - inference_params: InferenceParams, - transformer: TransformerBlock, - transformer_input: Tensor, - transformer_config: TransformerConfig, - packed_seq_params: PackedSeqParams, - ) -> float: - """Function to get the rotary sequence length. - - Args: - inference_params : Used during Inference time - transformer (TransformerBlock): The transformer block (decoder/encoder) used - by the model - transformer_input (Tensor): Input tensor to the transformer - transformer_config (TransformerConfig): Transformer config used by the model - packed_seq_params (PackedSeqParams): Packed sequence params - - Returns: - float: The rotary sequence length - """ - if packed_seq_params is not None: - # max_seqlen are the max sequence length in the packed sequence before being divived - # by the tp and cp size. - return max(packed_seq_params.max_seqlen_q, packed_seq_params.max_seqlen_kv) - elif inference_params is not None: - rotary_seq_len = inference_params.max_sequence_length - else: - if transformer.input_tensor is not None: - rotary_seq_len = transformer.input_tensor.size(0) - else: - rotary_seq_len = transformer_input.size(0) - - if transformer_config.sequence_parallel: - rotary_seq_len *= transformer_config.tensor_model_parallel_size - - rotary_seq_len *= transformer_config.context_parallel_size - - return rotary_seq_len +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from megatron.core.transformer.transformer_config import TransformerConfig + from megatron.core.transformer.transformer_block import TransformerBlock + from megatron.core.inference_params import InferenceParams + from megatron.core.packed_seq_params import PackedSeqParams + +import logging +import math +from functools import lru_cache + +import torch +from torch import Tensor, nn + +from megatron.core import parallel_state +from megatron.core.models.common.embeddings.rope_utils import ( # for backward compatibility; pylint: disable=unused-import + _apply_rotary_pos_emb_bshd, + _apply_rotary_pos_emb_thd, + _rotate_half, + apply_rotary_pos_emb, + get_pos_emb_on_this_cp_rank, +) + +logger = logging.getLogger(__name__) + + +__all__ = ['RotaryEmbedding'] + + +class RotaryEmbedding(nn.Module): + """Rotary Embedding for language model. + + Args: + kv_channels (int): Projection weights dimension in multi-head attention. Obtained + from transformer config + rotary_percent (float): Percent of rotary dimension to use for rotary position + embeddings. + rotary_interleaved (bool, optional): If True, interleaved rotary position embeddings. + Defaults to False. + seq_len_interpolation_factor (float, optional): scale of linearly interpolating RoPE + for longer sequences. The value must be a float larger than 1.0. Defaults to None + rotary_base (int, optional): Base period for rotary position embeddings. Defaults to + 10000. + rope_scaling (bool, optional): Apply rope scaling as used in llama 3.x. + rope_scaling_factor (float, optional): rope scaling factor in llama 3.x. Defaults to 8. + use_cpu_initialization (bool, optional): If False, initialize the inv_freq directly + on the GPU. Defaults to False + """ + + def __init__( + self, + kv_channels: int, + rotary_percent: float, + rotary_interleaved: bool = False, + seq_len_interpolation_factor: float = None, + rotary_base: int = 10000, + rope_scaling: bool = False, + rope_scaling_factor: float = 8.0, + use_cpu_initialization: bool = False, + ) -> None: + super().__init__() + + dim = kv_channels + if rotary_percent < 1.0: + dim = int(dim * rotary_percent) + self.rotary_interleaved = rotary_interleaved + + self.seq_len_interpolation_factor = seq_len_interpolation_factor + device = 'cpu' if use_cpu_initialization else torch.cuda.current_device() + self.inv_freq = 1.0 / ( + rotary_base ** (torch.arange(0, dim, 2, dtype=torch.float32, device=device) / dim) + ) + + if rope_scaling: + self.inv_freq = self._apply_scaling(self.inv_freq, factor=rope_scaling_factor) + + def _apply_scaling( + self, + freqs, + factor=8, + low_freq_factor=1, + high_freq_factor=4, + original_max_position_embeddings=8192, + ): + # This implementation is adapted from: + # https://github.com/huggingface/transformers/blob/2a5a6ad18aa22e98429bb5ecb880660328030ea0/src/transformers/modeling_rope_utils.py#L303-L343 + + factor = factor # `8` in the original implementation + low_freq_factor = low_freq_factor # `1` in the original implementation + high_freq_factor = high_freq_factor # `4` in the original implementation + old_context_len = original_max_position_embeddings # `8192` in the original implementation + + low_freq_wavelen = old_context_len / low_freq_factor + high_freq_wavelen = old_context_len / high_freq_factor + + wavelen = 2 * math.pi / freqs + # wavelen < high_freq_wavelen: do nothing + # wavelen > low_freq_wavelen: divide by factor + inv_freq_llama = torch.where(wavelen > low_freq_wavelen, freqs / factor, freqs) + # otherwise: interpolate between the two, using a smooth factor + smooth_factor = (old_context_len / wavelen - low_freq_factor) / ( + high_freq_factor - low_freq_factor + ) + smoothed_inv_freq = ( + 1 - smooth_factor + ) * inv_freq_llama / factor + smooth_factor * inv_freq_llama + is_medium_freq = ~(wavelen < high_freq_wavelen) * ~(wavelen > low_freq_wavelen) + inv_freq_llama = torch.where(is_medium_freq, smoothed_inv_freq, inv_freq_llama) + + return inv_freq_llama + + def get_freqs_non_repeated(self, max_seq_len: int, offset: int = 0) -> Tensor: + """Generates matrix of frequencies based on positions in the sequence, + used to create positional encodings""" + seq = ( + torch.arange(max_seq_len, device=self.inv_freq.device, dtype=self.inv_freq.dtype) + + offset + ) + + if self.seq_len_interpolation_factor is not None: + seq *= 1 / self.seq_len_interpolation_factor + + freqs = torch.outer(seq, self.inv_freq) # [seq len, dim] + + return freqs + + def get_cos_sin(self, max_seq_len: int, offset: int = 0) -> (Tensor, Tensor): + """Cosine and sine values for RoPE are precomputed for all positions up to the maximum + sequence length""" + freqs = self.get_freqs_non_repeated(max_seq_len, offset) + cos = torch.cos(freqs) + sin = torch.sin(freqs) + return cos, sin + + @lru_cache(maxsize=32) + def forward(self, max_seq_len: int, offset: int = 0, packed_seq: bool = False) -> Tensor: + """Forward pass of RoPE embedding. + + Args: + max_seq_len (int): Maximum size of sequence + offset (int, optional): RoPE offset. Defaults to 0. + packed_seq (bool, optional): Whether to use packed sequence. Defaults to False. + + Returns: + Tensor: Embeddings after applying RoPE. + """ + if self.inv_freq.device.type == 'cpu': + # move `inv_freq` to GPU once at the first micro-batch forward pass + self.inv_freq = self.inv_freq.to(device=torch.cuda.current_device()) + + freqs = self.get_freqs_non_repeated(max_seq_len, offset) + # first part even vector components, second part odd vector components, + # 2 * dim in dimension size + if not self.rotary_interleaved: + emb = torch.cat((freqs, freqs), dim=-1) + else: + emb = torch.stack((freqs.view(-1, 1), freqs.view(-1, 1)), dim=-1).view( + freqs.shape[0], -1 + ) + # emb [seq_length, .., dim] + emb = emb[:, None, None, :] + if parallel_state.get_context_parallel_world_size() > 1 and not packed_seq: + # slice rotary_pos_emb along sequence dimension and select the parition of the current + # CP rank + emb = get_pos_emb_on_this_cp_rank(emb, 0) + return emb + + def _load_from_state_dict(self, state_dict, prefix, *args, **kwargs): + state_dict.pop(f'{prefix}inv_freq', None) + return super()._load_from_state_dict(state_dict, prefix, *args, **kwargs) + + def get_rotary_seq_len( + self, + inference_params: InferenceParams, + transformer: TransformerBlock, + transformer_input: Tensor, + transformer_config: TransformerConfig, + packed_seq_params: PackedSeqParams, + ) -> float: + """Function to get the rotary sequence length. + + Args: + inference_params : Used during Inference time + transformer (TransformerBlock): The transformer block (decoder/encoder) used + by the model + transformer_input (Tensor): Input tensor to the transformer + transformer_config (TransformerConfig): Transformer config used by the model + packed_seq_params (PackedSeqParams): Packed sequence params + + Returns: + float: The rotary sequence length + """ + if packed_seq_params is not None: + # max_seqlen are the max sequence length in the packed sequence before being divived + # by the tp and cp size. + return max(packed_seq_params.max_seqlen_q, packed_seq_params.max_seqlen_kv) + elif inference_params is not None: + rotary_seq_len = inference_params.max_sequence_length + else: + if transformer is not None and transformer.input_tensor is not None: + rotary_seq_len = transformer.input_tensor.size(0) + else: + rotary_seq_len = transformer_input.size(0) + + if transformer_config.sequence_parallel: + rotary_seq_len *= transformer_config.tensor_model_parallel_size + + rotary_seq_len *= transformer_config.context_parallel_size + + return rotary_seq_len diff --git a/megatron/core/models/gpt/gpt_layer_specs.py b/megatron/core/models/gpt/gpt_layer_specs.py index d0e48c1..225626f 100644 --- a/megatron/core/models/gpt/gpt_layer_specs.py +++ b/megatron/core/models/gpt/gpt_layer_specs.py @@ -1,350 +1,383 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import warnings -from typing import Optional - -from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add -from megatron.core.models.gpt.moe_module_specs import get_moe_module_spec -from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear -from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules -from megatron.core.transformer.dot_product_attention import DotProductAttention -from megatron.core.transformer.enums import AttnMaskType -from megatron.core.transformer.identity_op import IdentityOp -from megatron.core.transformer.mlp import MLP, MLPSubmodules -from megatron.core.transformer.multi_latent_attention import ( - MLASelfAttention, - MLASelfAttentionSubmodules, -) -from megatron.core.transformer.spec_utils import ModuleSpec -from megatron.core.transformer.transformer_block import ( - TransformerBlockSubmodules, - get_num_layers_to_build, -) -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules -from megatron.core.utils import is_te_min_version - -try: - from megatron.core.extensions.transformer_engine import ( - TEColumnParallelLinear, - TEDotProductAttention, - TELayerNormColumnParallelLinear, - TENorm, - TERowParallelLinear, - ) - - HAVE_TE = True -except ImportError: - HAVE_TE = False - -try: - import apex # pylint: disable=unused-import - - from megatron.core.fusions.fused_layer_norm import FusedLayerNorm - - HAVE_APEX = True - LNImpl = FusedLayerNorm -except ImportError: - from megatron.core.transformer.torch_norm import WrappedTorchNorm - - warnings.warn('Apex is not installed. Falling back to Torch Norm') - LNImpl = WrappedTorchNorm - - -def get_gpt_layer_with_transformer_engine_spec( - num_experts: Optional[int] = None, - moe_grouped_gemm: Optional[bool] = False, - qk_layernorm: Optional[bool] = False, - multi_latent_attention: Optional[bool] = False, - fp8: Optional[str] = None, # pylint: disable=unused-arguments - moe_use_legacy_grouped_gemm: Optional[bool] = False, -) -> ModuleSpec: - """Use this spec to use lower-level Transformer Engine modules (required for fp8 training). - - - Args: - num_experts (int, optional): Number of experts. Defaults to None. - moe_grouped_gemm (bool, optional): To use Grouped GEMM. Defaults to False. - qk_layernorm (bool, optional): To use layernorm for queries/keys. Defaults to False. - fp8 (str, optional): Deprecated. For temporary Nemo compatibility. - moe_use_legacy_grouped_gemm (bool, optional): Force use the legacy GroupedMLP. - Defaults to False. - - Returns: - ModuleSpec: Module specification with TE modules - """ - if fp8 is not None: - warnings.warn( - 'The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated' - ' and will be removed soon. Please update your code accordingly.' - ) - - mlp = _get_mlp_module_spec( - use_te=True, - num_experts=num_experts, - moe_grouped_gemm=moe_grouped_gemm, - moe_use_legacy_grouped_gemm=moe_use_legacy_grouped_gemm, - ) - - if multi_latent_attention: - return ModuleSpec( - module=TransformerLayer, - submodules=TransformerLayerSubmodules( - input_layernorm=TENorm, - self_attention=ModuleSpec( - module=MLASelfAttention, - params={"attn_mask_type": AttnMaskType.causal}, - submodules=MLASelfAttentionSubmodules( - linear_q_proj=TEColumnParallelLinear, - linear_q_down_proj=TEColumnParallelLinear, - linear_q_up_proj=TEColumnParallelLinear, - linear_kv_down_proj=TEColumnParallelLinear, - linear_kv_up_proj=TEColumnParallelLinear, - core_attention=TEDotProductAttention, - linear_proj=TERowParallelLinear, - q_layernorm=TENorm if qk_layernorm else IdentityOp, - kv_layernorm=TENorm if qk_layernorm else IdentityOp, - ), - ), - self_attn_bda=get_bias_dropout_add, - pre_mlp_layernorm=TENorm if num_experts else IdentityOp, - mlp=mlp, - mlp_bda=get_bias_dropout_add, - ), - ) - else: - - # TENorm significantly harms convergence when used - # for QKLayerNorm if TE Version < 1.9; - # we instead use the Apex implementation. - qk_norm = TENorm if is_te_min_version("1.9.0") else FusedLayerNorm - - return ModuleSpec( - module=TransformerLayer, - submodules=TransformerLayerSubmodules( - self_attention=ModuleSpec( - module=SelfAttention, - params={"attn_mask_type": AttnMaskType.causal}, - submodules=SelfAttentionSubmodules( - linear_qkv=TELayerNormColumnParallelLinear, - core_attention=TEDotProductAttention, - linear_proj=TERowParallelLinear, - q_layernorm=qk_norm if qk_layernorm else IdentityOp, - k_layernorm=qk_norm if qk_layernorm else IdentityOp, - ), - ), - self_attn_bda=get_bias_dropout_add, - pre_mlp_layernorm=TENorm if num_experts else IdentityOp, - mlp=mlp, - mlp_bda=get_bias_dropout_add, - ), - ) - - -def get_gpt_layer_local_spec( - num_experts: Optional[int] = None, - moe_grouped_gemm: Optional[bool] = False, - qk_layernorm: Optional[bool] = False, - multi_latent_attention: Optional[bool] = False, - fp8: Optional[str] = None, # pylint: disable=unused-arguments - moe_use_legacy_grouped_gemm: Optional[bool] = False, -) -> ModuleSpec: - """Use this spec for an implementation using only modules in Megatron-Core. - - - Args: - num_experts (int, optional): Number of experts. Defaults to None. - moe_grouped_gemm (bool, optional): To use Grouped GEMM. Defaults to False. - qk_layernorm (bool, optional): To use layernorm for queries/keys. Defaults to False. - fp8 (str, optional): Deprecated. For temporary Nemo compatibility. - moe_use_legacy_grouped_gemm (bool, optional): Force use the legacy GroupedMLP. - Defaults to False. - - Returns: - ModuleSpec: Module specification with Megatron-Core modules - """ - if fp8 is not None: - warnings.warn( - 'The fp8 argument in "get_gpt_layer_local_spec" has been deprecated' - ' and will be removed soon. Please update your code accordingly.' - ) - - mlp = _get_mlp_module_spec( - use_te=False, - num_experts=num_experts, - moe_grouped_gemm=moe_grouped_gemm, - moe_use_legacy_grouped_gemm=moe_use_legacy_grouped_gemm, - ) - - if multi_latent_attention: - return ModuleSpec( - module=TransformerLayer, - submodules=TransformerLayerSubmodules( - input_layernorm=LNImpl, - self_attention=ModuleSpec( - module=MLASelfAttention, - params={"attn_mask_type": AttnMaskType.causal}, - submodules=MLASelfAttentionSubmodules( - linear_q_proj=ColumnParallelLinear, - linear_q_down_proj=ColumnParallelLinear, - linear_q_up_proj=ColumnParallelLinear, - linear_kv_down_proj=ColumnParallelLinear, - linear_kv_up_proj=ColumnParallelLinear, - core_attention=DotProductAttention, - linear_proj=RowParallelLinear, - q_layernorm=LNImpl if qk_layernorm else IdentityOp, - kv_layernorm=LNImpl if qk_layernorm else IdentityOp, - ), - ), - self_attn_bda=get_bias_dropout_add, - pre_mlp_layernorm=LNImpl, - mlp=mlp, - mlp_bda=get_bias_dropout_add, - ), - ) - else: - return ModuleSpec( - module=TransformerLayer, - submodules=TransformerLayerSubmodules( - input_layernorm=LNImpl, - self_attention=ModuleSpec( - module=SelfAttention, - params={"attn_mask_type": AttnMaskType.causal}, - submodules=SelfAttentionSubmodules( - linear_qkv=ColumnParallelLinear, - core_attention=DotProductAttention, - linear_proj=RowParallelLinear, - q_layernorm=LNImpl if qk_layernorm else IdentityOp, - k_layernorm=LNImpl if qk_layernorm else IdentityOp, - ), - ), - self_attn_bda=get_bias_dropout_add, - pre_mlp_layernorm=LNImpl, - mlp=mlp, - mlp_bda=get_bias_dropout_add, - sharded_state_dict_keys_map={ - 'input_layernorm.': 'self_attention.linear_qkv.layer_norm_', - 'pre_mlp_layernorm.': 'mlp.linear_fc1.layer_norm_', - }, - ), - ) - - -def _get_mlp_module_spec( - use_te: Optional[bool] = True, - num_experts: Optional[int] = None, - moe_grouped_gemm: Optional[bool] = False, - fp8: Optional[str] = None, # pylint: disable=unused-arguments - moe_use_legacy_grouped_gemm: Optional[bool] = False, -) -> ModuleSpec: - """Helper function to get module spec for MLP/MoE""" - if fp8 is not None: - warnings.warn( - 'The fp8 argument in "_get_mlp_module_spec" has been deprecated' - ' and will be removed soon. Please update your code accordingly.' - ) - - if num_experts is None: - # Dense MLP w/ or w/o TE modules. - return ModuleSpec( - module=MLP, - submodules=MLPSubmodules( - linear_fc1=TELayerNormColumnParallelLinear if use_te else ColumnParallelLinear, - linear_fc2=TERowParallelLinear if use_te else RowParallelLinear, - ), - ) - else: - # Mixture of experts with modules in megatron core. - return get_moe_module_spec( - use_te=use_te, - num_experts=num_experts, - moe_grouped_gemm=moe_grouped_gemm, - moe_use_legacy_grouped_gemm=moe_use_legacy_grouped_gemm, - ) - - -def get_gpt_decoder_block_spec( - config: TransformerConfig, use_transformer_engine: bool -) -> TransformerBlockSubmodules: - """GPT block spec.""" - if use_transformer_engine: - layer_norm_impl = TENorm - else: - layer_norm_impl = LNImpl - - # Layer specs. - dense_layer_spec = ( - get_gpt_layer_with_transformer_engine_spec( - num_experts=None, - moe_grouped_gemm=False, - qk_layernorm=config.qk_layernorm, - multi_latent_attention=config.multi_latent_attention, - moe_use_legacy_grouped_gemm=config.moe_use_legacy_grouped_gemm, - ) - if use_transformer_engine - else get_gpt_layer_local_spec( - num_experts=None, - moe_grouped_gemm=False, - qk_layernorm=config.qk_layernorm, - multi_latent_attention=config.multi_latent_attention, - moe_use_legacy_grouped_gemm=config.moe_use_legacy_grouped_gemm, - ) - ) - moe_layer_spec = ( - get_gpt_layer_with_transformer_engine_spec( - num_experts=config.num_moe_experts, - moe_grouped_gemm=config.moe_grouped_gemm, - qk_layernorm=config.qk_layernorm, - multi_latent_attention=config.multi_latent_attention, - moe_use_legacy_grouped_gemm=config.moe_use_legacy_grouped_gemm, - ) - if use_transformer_engine - else get_gpt_layer_local_spec( - num_experts=config.num_moe_experts, - moe_grouped_gemm=config.moe_grouped_gemm, - qk_layernorm=config.qk_layernorm, - multi_latent_attention=config.multi_latent_attention, - moe_use_legacy_grouped_gemm=config.moe_use_legacy_grouped_gemm, - ) - ) - - # Parse config.moe_layer_freq to determine the pattern of expert/dense layers. - # 0 stands for dense layers, 1 stands for expert layers. - # For integer N: Creates a pattern with one expert layer every N layers. - # For string pattern: Evaluates the str directly (e.g. "[1,0,1]" for alternating expert/dense). - if isinstance(config.moe_layer_freq, int): - moe_layer_pattern = [ - 1 if (i % config.moe_layer_freq == 0) else 0 for i in range(config.num_layers) - ] - elif isinstance(config.moe_layer_freq, list): - moe_layer_pattern = config.moe_layer_freq - assert len(moe_layer_pattern) == config.num_layers, ( - f"Invalid length of moe_layer_pattern: {len(moe_layer_pattern)}, " - f"expected {config.num_layers}, " - f"current moe layer pattern: {config.moe_layer_freq}" - ) - else: - raise ValueError( - f"Invalid moe_layer_freq: {type(config.moe_layer_freq)}, {config.moe_layer_freq}" - ) - - # Create the layer specs for the model. - layer_specs = [] - for layer_number in range(config.num_layers): - if moe_layer_pattern[layer_number] == 1: - layer_specs.append(moe_layer_spec) - elif moe_layer_pattern[layer_number] == 0: - layer_specs.append(dense_layer_spec) - else: - raise ValueError(f"Invalid layer pattern: {moe_layer_pattern}") - - # Slice the layer specs to only include the layers that are built in this pipeline stage. - # Note: MCore layer_number starts at 1 - offset = TransformerLayer._get_layer_offset(config) - num_layers_to_build = get_num_layers_to_build(config) - layer_specs = layer_specs[offset : offset + num_layers_to_build] - - # Block spec. - block_spec = TransformerBlockSubmodules(layer_specs=layer_specs, layer_norm=layer_norm_impl) - - return block_spec +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import warnings +from typing import Optional + +from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add +from megatron.core.models.gpt.moe_module_specs import get_moe_module_spec +from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear +from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules +from megatron.core.transformer.dot_product_attention import DotProductAttention +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.transformer.identity_op import IdentityOp +from megatron.core.transformer.mlp import MLP, MLPSubmodules +from megatron.core.transformer.multi_latent_attention import ( + MLASelfAttention, + MLASelfAttentionSubmodules, +) +from megatron.core.transformer.spec_utils import ModuleSpec +from megatron.core.transformer.transformer_block import ( + TransformerBlockSubmodules, + get_num_layers_to_build, +) +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.transformer.transformer_layer import ( + TransformerLayer, + TransformerLayerSubmodules, + get_transformer_layer_offset, +) +from megatron.core.utils import is_te_min_version + +try: + from megatron.core.extensions.transformer_engine import ( + TEColumnParallelLinear, + TEDotProductAttention, + TELayerNormColumnParallelLinear, + TENorm, + TERowParallelLinear, + ) + + HAVE_TE = True +except ImportError: + HAVE_TE = False + +try: + import apex # pylint: disable=unused-import + + from megatron.core.fusions.fused_layer_norm import FusedLayerNorm + + HAVE_APEX = True + LNImpl = FusedLayerNorm +except ImportError: + from megatron.core.transformer.torch_norm import WrappedTorchNorm + + warnings.warn('Apex is not installed. Falling back to Torch Norm') + LNImpl = WrappedTorchNorm + + +def get_gpt_layer_with_transformer_engine_spec( + num_experts: Optional[int] = None, + moe_grouped_gemm: Optional[bool] = False, + qk_layernorm: Optional[bool] = False, + multi_latent_attention: Optional[bool] = False, + fp8: Optional[str] = None, # pylint: disable=unused-arguments + moe_use_legacy_grouped_gemm: Optional[bool] = False, +) -> ModuleSpec: + """Use this spec to use lower-level Transformer Engine modules (required for fp8 training). + + + Args: + num_experts (int, optional): Number of experts. Defaults to None. + moe_grouped_gemm (bool, optional): To use Grouped GEMM. Defaults to False. + qk_layernorm (bool, optional): To use layernorm for queries/keys. Defaults to False. + fp8 (str, optional): Deprecated. For temporary Nemo compatibility. + moe_use_legacy_grouped_gemm (bool, optional): Force use the legacy GroupedMLP. + Defaults to False. + + Returns: + ModuleSpec: Module specification with TE modules + """ + if fp8 is not None: + warnings.warn( + 'The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated' + ' and will be removed soon. Please update your code accordingly.' + ) + + mlp = get_mlp_module_spec( + use_te=True, + num_experts=num_experts, + moe_grouped_gemm=moe_grouped_gemm, + moe_use_legacy_grouped_gemm=moe_use_legacy_grouped_gemm, + ) + + if multi_latent_attention: + return ModuleSpec( + module=TransformerLayer, + submodules=TransformerLayerSubmodules( + input_layernorm=TENorm, + self_attention=ModuleSpec( + module=MLASelfAttention, + params={"attn_mask_type": AttnMaskType.causal}, + submodules=MLASelfAttentionSubmodules( + linear_q_proj=TEColumnParallelLinear, + linear_q_down_proj=TEColumnParallelLinear, + linear_q_up_proj=( + TELayerNormColumnParallelLinear + if qk_layernorm + else TEColumnParallelLinear + ), + linear_kv_down_proj=TEColumnParallelLinear, + linear_kv_up_proj=( + TELayerNormColumnParallelLinear + if qk_layernorm + else TEColumnParallelLinear + ), + core_attention=TEDotProductAttention, + linear_proj=TERowParallelLinear, + q_layernorm=IdentityOp, + kv_layernorm=IdentityOp, + ), + ), + self_attn_bda=get_bias_dropout_add, + pre_mlp_layernorm=TENorm if num_experts else IdentityOp, + mlp=mlp, + mlp_bda=get_bias_dropout_add, + ), + ) + else: + + # TENorm significantly harms convergence when used + # for QKLayerNorm if TE Version < 1.9; + # we instead use the Apex implementation. + qk_norm = TENorm if is_te_min_version("1.9.0") else FusedLayerNorm + + return ModuleSpec( + module=TransformerLayer, + submodules=TransformerLayerSubmodules( + self_attention=ModuleSpec( + module=SelfAttention, + params={"attn_mask_type": AttnMaskType.causal}, + submodules=SelfAttentionSubmodules( + linear_qkv=TELayerNormColumnParallelLinear, + core_attention=TEDotProductAttention, + linear_proj=TERowParallelLinear, + q_layernorm=qk_norm if qk_layernorm else IdentityOp, + k_layernorm=qk_norm if qk_layernorm else IdentityOp, + ), + ), + self_attn_bda=get_bias_dropout_add, + pre_mlp_layernorm=TENorm if num_experts else IdentityOp, + mlp=mlp, + mlp_bda=get_bias_dropout_add, + ), + ) + + +def get_gpt_layer_local_spec( + num_experts: Optional[int] = None, + moe_grouped_gemm: Optional[bool] = False, + qk_layernorm: Optional[bool] = False, + multi_latent_attention: Optional[bool] = False, + fp8: Optional[str] = None, # pylint: disable=unused-arguments + moe_use_legacy_grouped_gemm: Optional[bool] = False, +) -> ModuleSpec: + """Use this spec for an implementation using only modules in Megatron-Core. + + + Args: + num_experts (int, optional): Number of experts. Defaults to None. + moe_grouped_gemm (bool, optional): To use Grouped GEMM. Defaults to False. + qk_layernorm (bool, optional): To use layernorm for queries/keys. Defaults to False. + fp8 (str, optional): Deprecated. For temporary Nemo compatibility. + moe_use_legacy_grouped_gemm (bool, optional): Force use the legacy GroupedMLP. + Defaults to False. + + Returns: + ModuleSpec: Module specification with Megatron-Core modules + """ + if fp8 is not None: + warnings.warn( + 'The fp8 argument in "get_gpt_layer_local_spec" has been deprecated' + ' and will be removed soon. Please update your code accordingly.' + ) + + mlp = get_mlp_module_spec( + use_te=False, + num_experts=num_experts, + moe_grouped_gemm=moe_grouped_gemm, + moe_use_legacy_grouped_gemm=moe_use_legacy_grouped_gemm, + ) + + if multi_latent_attention: + return ModuleSpec( + module=TransformerLayer, + submodules=TransformerLayerSubmodules( + input_layernorm=LNImpl, + self_attention=ModuleSpec( + module=MLASelfAttention, + params={"attn_mask_type": AttnMaskType.causal}, + submodules=MLASelfAttentionSubmodules( + linear_q_proj=ColumnParallelLinear, + linear_q_down_proj=ColumnParallelLinear, + linear_q_up_proj=ColumnParallelLinear, + linear_kv_down_proj=ColumnParallelLinear, + linear_kv_up_proj=ColumnParallelLinear, + core_attention=DotProductAttention, + linear_proj=RowParallelLinear, + q_layernorm=LNImpl if qk_layernorm else IdentityOp, + kv_layernorm=LNImpl if qk_layernorm else IdentityOp, + ), + ), + self_attn_bda=get_bias_dropout_add, + pre_mlp_layernorm=LNImpl, + mlp=mlp, + mlp_bda=get_bias_dropout_add, + ), + ) + else: + return ModuleSpec( + module=TransformerLayer, + submodules=TransformerLayerSubmodules( + input_layernorm=LNImpl, + self_attention=ModuleSpec( + module=SelfAttention, + params={"attn_mask_type": AttnMaskType.causal}, + submodules=SelfAttentionSubmodules( + linear_qkv=ColumnParallelLinear, + core_attention=DotProductAttention, + linear_proj=RowParallelLinear, + q_layernorm=LNImpl if qk_layernorm else IdentityOp, + k_layernorm=LNImpl if qk_layernorm else IdentityOp, + ), + ), + self_attn_bda=get_bias_dropout_add, + pre_mlp_layernorm=LNImpl, + mlp=mlp, + mlp_bda=get_bias_dropout_add, + sharded_state_dict_keys_map={ + 'input_layernorm.': 'self_attention.linear_qkv.layer_norm_', + 'pre_mlp_layernorm.': 'mlp.linear_fc1.layer_norm_', + }, + ), + ) + + +def _get_mlp_module_spec( + use_te: Optional[bool] = True, + num_experts: Optional[int] = None, + moe_grouped_gemm: Optional[bool] = False, + fp8: Optional[str] = None, # pylint: disable=unused-arguments + moe_use_legacy_grouped_gemm: Optional[bool] = False, +): + warnings.warn( + """This private function is on a deprecation track. Please switch to `get_mlp_module_spec` + since it will be removed in a future release.""" + ) + + return get_mlp_module_spec( + use_te=use_te, + num_experts=num_experts, + moe_grouped_gemm=moe_grouped_gemm, + fp8=fp8, + moe_use_legacy_grouped_gemm=moe_use_legacy_grouped_gemm, + ) + + +def get_mlp_module_spec( + use_te: Optional[bool] = True, + num_experts: Optional[int] = None, + moe_grouped_gemm: Optional[bool] = False, + fp8: Optional[str] = None, # pylint: disable=unused-arguments + moe_use_legacy_grouped_gemm: Optional[bool] = False, +) -> ModuleSpec: + """Helper function to get module spec for MLP/MoE""" + if fp8 is not None: + warnings.warn( + 'The fp8 argument in "_get_mlp_module_spec" has been deprecated' + ' and will be removed soon. Please update your code accordingly.' + ) + + if num_experts is None: + # Dense MLP w/ or w/o TE modules. + return ModuleSpec( + module=MLP, + submodules=MLPSubmodules( + linear_fc1=TELayerNormColumnParallelLinear if use_te else ColumnParallelLinear, + linear_fc2=TERowParallelLinear if use_te else RowParallelLinear, + ), + ) + else: + # Mixture of experts with modules in megatron core. + return get_moe_module_spec( + use_te=use_te, + num_experts=num_experts, + moe_grouped_gemm=moe_grouped_gemm, + moe_use_legacy_grouped_gemm=moe_use_legacy_grouped_gemm, + ) + + +def get_gpt_decoder_block_spec( + config: TransformerConfig, use_transformer_engine: bool +) -> TransformerBlockSubmodules: + """GPT block spec.""" + if use_transformer_engine: + layer_norm_impl = TENorm + else: + layer_norm_impl = LNImpl + + # Layer specs. + dense_layer_spec = ( + get_gpt_layer_with_transformer_engine_spec( + num_experts=None, + moe_grouped_gemm=False, + qk_layernorm=config.qk_layernorm, + multi_latent_attention=config.multi_latent_attention, + moe_use_legacy_grouped_gemm=config.moe_use_legacy_grouped_gemm, + ) + if use_transformer_engine + else get_gpt_layer_local_spec( + num_experts=None, + moe_grouped_gemm=False, + qk_layernorm=config.qk_layernorm, + multi_latent_attention=config.multi_latent_attention, + moe_use_legacy_grouped_gemm=config.moe_use_legacy_grouped_gemm, + ) + ) + moe_layer_spec = ( + get_gpt_layer_with_transformer_engine_spec( + num_experts=config.num_moe_experts, + moe_grouped_gemm=config.moe_grouped_gemm, + qk_layernorm=config.qk_layernorm, + multi_latent_attention=config.multi_latent_attention, + moe_use_legacy_grouped_gemm=config.moe_use_legacy_grouped_gemm, + ) + if use_transformer_engine + else get_gpt_layer_local_spec( + num_experts=config.num_moe_experts, + moe_grouped_gemm=config.moe_grouped_gemm, + qk_layernorm=config.qk_layernorm, + multi_latent_attention=config.multi_latent_attention, + moe_use_legacy_grouped_gemm=config.moe_use_legacy_grouped_gemm, + ) + ) + + # Parse config.moe_layer_freq to determine the pattern of expert/dense layers. + # 0 stands for dense layers, 1 stands for expert layers. + # For integer N: Creates a pattern with one expert layer every N layers. + # For string pattern: Evaluates the str directly (e.g. "[1,0,1]" for alternating expert/dense). + if isinstance(config.moe_layer_freq, int): + moe_layer_pattern = [ + 1 if (i % config.moe_layer_freq == 0) else 0 for i in range(config.num_layers) + ] + elif isinstance(config.moe_layer_freq, list): + moe_layer_pattern = config.moe_layer_freq + assert len(moe_layer_pattern) == config.num_layers, ( + f"Invalid length of moe_layer_pattern: {len(moe_layer_pattern)}, " + f"expected {config.num_layers}, " + f"current moe layer pattern: {config.moe_layer_freq}" + ) + else: + raise ValueError( + f"Invalid moe_layer_freq: {type(config.moe_layer_freq)}, {config.moe_layer_freq}" + ) + + # Create the layer specs for the model. + layer_specs = [] + for layer_number in range(config.num_layers): + if moe_layer_pattern[layer_number] == 1: + layer_specs.append(moe_layer_spec) + elif moe_layer_pattern[layer_number] == 0: + layer_specs.append(dense_layer_spec) + else: + raise ValueError(f"Invalid layer pattern: {moe_layer_pattern}") + + # Slice the layer specs to only include the layers that are built in this pipeline stage. + # Note: MCore layer_number starts at 1 + offset = get_transformer_layer_offset(config) + num_layers_to_build = get_num_layers_to_build(config) + layer_specs = layer_specs[offset : offset + num_layers_to_build] + + # Block spec. + block_spec = TransformerBlockSubmodules(layer_specs=layer_specs, layer_norm=layer_norm_impl) + + return block_spec diff --git a/megatron/core/models/gpt/gpt_model.py b/megatron/core/models/gpt/gpt_model.py index be8cdce..8f50cbe 100644 --- a/megatron/core/models/gpt/gpt_model.py +++ b/megatron/core/models/gpt/gpt_model.py @@ -1,309 +1,331 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -from collections import OrderedDict -from typing import Dict, Literal, Optional - -from torch import Tensor - -from megatron.core import InferenceParams, tensor_parallel -from megatron.core.config_logger import has_config_logger_enabled, log_config_to_disk -from megatron.core.dist_checkpointing.mapping import ShardedStateDict -from megatron.core.models.common.embeddings.language_model_embedding import LanguageModelEmbedding -from megatron.core.models.common.embeddings.rotary_pos_embedding import RotaryEmbedding -from megatron.core.models.common.language_module.language_module import LanguageModule -from megatron.core.packed_seq_params import PackedSeqParams -from megatron.core.transformer.enums import ModelType -from megatron.core.transformer.spec_utils import ModuleSpec -from megatron.core.transformer.transformer_block import TransformerBlock -from megatron.core.transformer.transformer_config import TransformerConfig - - -class GPTModel(LanguageModule): - """GPT Transformer language model. - - Args: - config (TransformerConfig): - Transformer config - transformer_layer_spec (ModuleSpec): - Specifies module to use for transformer layers - vocab_size (int): - Vocabulary size - max_sequence_length (int): - maximum size of sequence. This is used for positional embedding - pre_process (bool, optional): - Include embedding layer (used with pipeline parallelism). Defaults to True. - post_process (bool, optional): - Include an output layer (used with pipeline parallelism). Defaults to True. - fp16_lm_cross_entropy (bool, optional): - Defaults to False. - parallel_output (bool, optional): - Do not gather the outputs, keep them split across tensor - parallel ranks. Defaults to True. - share_embeddings_and_output_weights (bool, optional): - When True, input embeddings and output logit weights are shared. Defaults to False. - position_embedding_type (Literal[learned_absolute,rope], optional): - Position embedding type.. Defaults to 'learned_absolute'. - rotary_percent (float, optional): - Percent of rotary dimension to use for rotary position embeddings. - Ignored unless position_embedding_type is 'rope'. Defaults to 1.0. - rotary_base (int, optional): - Base period for rotary position embeddings. Ignored unless - position_embedding_type is 'rope'. - Defaults to 10000. - scatter_embedding_sequence_parallel (bool, optional): - Whether embeddings should be scattered across sequence parallel - region or not. Defaults to True. - seq_len_interpolation_factor (Optional[float], optional): - scale of linearly interpolating RoPE for longer sequences. - The value must be a float larger than 1.0. Defaults to None. - """ - - def __init__( - self, - config: TransformerConfig, - transformer_layer_spec: ModuleSpec, - vocab_size: int, - max_sequence_length: int, - pre_process: bool = True, - post_process: bool = True, - fp16_lm_cross_entropy: bool = False, - parallel_output: bool = True, - share_embeddings_and_output_weights: bool = False, - position_embedding_type: Literal['learned_absolute', 'rope', 'none'] = 'learned_absolute', - rotary_percent: float = 1.0, - rotary_base: int = 10000, - rope_scaling: bool = False, - scatter_embedding_sequence_parallel: bool = True, - seq_len_interpolation_factor: Optional[float] = None, - ) -> None: - super().__init__(config=config) - - if has_config_logger_enabled(config): - log_config_to_disk(config, locals(), prefix=type(self).__name__) - - self.transformer_layer_spec: ModuleSpec = transformer_layer_spec - self.vocab_size = vocab_size - self.max_sequence_length = max_sequence_length - self.pre_process = pre_process - self.post_process = post_process - self.fp16_lm_cross_entropy = fp16_lm_cross_entropy - self.parallel_output = parallel_output - self.share_embeddings_and_output_weights = share_embeddings_and_output_weights - self.position_embedding_type = position_embedding_type - - # megatron core pipelining currently depends on model type - # TODO: remove this dependency ? - self.model_type = ModelType.encoder_or_decoder - - # These 4 attributes are needed for TensorRT-LLM export. - self.max_position_embeddings = max_sequence_length - self.rotary_percent = rotary_percent - self.rotary_base = rotary_base - self.rotary_scaling = rope_scaling - - if self.pre_process: - self.embedding = LanguageModelEmbedding( - config=self.config, - vocab_size=self.vocab_size, - max_sequence_length=self.max_sequence_length, - position_embedding_type=position_embedding_type, - scatter_to_sequence_parallel=scatter_embedding_sequence_parallel, - ) - - if self.position_embedding_type == 'rope' and not self.config.multi_latent_attention: - self.rotary_pos_emb = RotaryEmbedding( - kv_channels=self.config.kv_channels, - rotary_percent=rotary_percent, - rotary_interleaved=self.config.rotary_interleaved, - seq_len_interpolation_factor=seq_len_interpolation_factor, - rotary_base=rotary_base, - rope_scaling=rope_scaling, - use_cpu_initialization=self.config.use_cpu_initialization, - ) - - # Transformer. - self.decoder = TransformerBlock( - config=self.config, - spec=transformer_layer_spec, - pre_process=self.pre_process, - post_process=self.post_process, - ) - - # Output - if post_process: - if self.config.defer_embedding_wgrad_compute: - # The embedding activation buffer preserves a reference to the input activations - # of the final embedding projection layer GEMM. It will hold the activations for - # all the micro-batches of a global batch for the last pipeline stage. Once we are - # done with all the back props for all the microbatches for the last pipeline stage, - # it will be in the pipeline flush stage. During this pipeline flush we use the - # input activations stored in embedding activation buffer and gradient outputs - # stored in gradient buffer to calculate the weight gradients for the embedding - # final linear layer. - self.embedding_activation_buffer = [] - self.grad_output_buffer = [] - else: - self.embedding_activation_buffer = None - self.grad_output_buffer = None - - self.output_layer = tensor_parallel.ColumnParallelLinear( - config.hidden_size, - self.vocab_size, - config=config, - init_method=config.init_method, - bias=False, - skip_bias_add=False, - gather_output=not self.parallel_output, - skip_weight_param_allocation=self.pre_process - and self.share_embeddings_and_output_weights, - embedding_activation_buffer=self.embedding_activation_buffer, - grad_output_buffer=self.grad_output_buffer, - ) - - if self.pre_process or self.post_process: - self.setup_embeddings_and_output_layer() - - if has_config_logger_enabled(self.config): - log_config_to_disk( - self.config, self.state_dict(), prefix=f'{type(self).__name__}_init_ckpt' - ) - - def set_input_tensor(self, input_tensor: Tensor) -> None: - """Sets input tensor to the model. - - See megatron.model.transformer.set_input_tensor() - - Args: - input_tensor (Tensor): Sets the input tensor for the model. - """ - # This is usually handled in schedules.py but some inference code still - # gives us non-lists or None - if not isinstance(input_tensor, list): - input_tensor = [input_tensor] - - assert len(input_tensor) == 1, 'input_tensor should only be length 1 for gpt/bert' - self.decoder.set_input_tensor(input_tensor[0]) - - def forward( - self, - input_ids: Tensor, - position_ids: Tensor, - attention_mask: Tensor, - decoder_input: Tensor = None, - labels: Tensor = None, - inference_params: InferenceParams = None, - packed_seq_params: PackedSeqParams = None, - extra_block_kwargs: dict = None, - runtime_gather_output: Optional[bool] = None, - ) -> Tensor: - """Forward function of the GPT Model This function passes the input tensors - through the embedding layer, and then the decoeder and finally into the post - processing layer (optional). - - It either returns the Loss values if labels are given or the final hidden units - - Args: - runtime_gather_output (bool): Gather output at runtime. Default None means - `parallel_output` arg in the constructor will be used. - """ - # If decoder_input is provided (not None), then input_ids and position_ids are ignored. - # Otherwise, apply embedding layer on input_ids and position_ids to get decoder_input. - - # Decoder embedding. - if decoder_input is not None: - pass - elif self.pre_process: - decoder_input = self.embedding(input_ids=input_ids, position_ids=position_ids) - else: - # intermediate stage of pipeline - # decoder will get hidden_states from encoder.input_tensor - decoder_input = None - - # Rotary positional embeddings (embedding is None for PP intermediate devices) - rotary_pos_emb = None - rotary_pos_cos = None - rotary_pos_sin = None - if self.position_embedding_type == 'rope' and not self.config.multi_latent_attention: - if not self.training and self.config.flash_decode: - # Flash decoding uses precomputed cos and sin for RoPE - rotary_pos_cos, rotary_pos_sin = self.rotary_pos_emb.get_cos_sin( - inference_params.max_sequence_length - ) - else: - rotary_seq_len = self.rotary_pos_emb.get_rotary_seq_len( - inference_params, self.decoder, decoder_input, self.config, packed_seq_params - ) - rotary_pos_emb = self.rotary_pos_emb( - rotary_seq_len, - packed_seq=packed_seq_params is not None - and packed_seq_params.qkv_format == 'thd', - ) - - # Run decoder. - hidden_states = self.decoder( - hidden_states=decoder_input, - attention_mask=attention_mask, - inference_params=inference_params, - rotary_pos_emb=rotary_pos_emb, - rotary_pos_cos=rotary_pos_cos, - rotary_pos_sin=rotary_pos_sin, - packed_seq_params=packed_seq_params, - **(extra_block_kwargs or {}), - ) - - if not self.post_process: - return hidden_states - - # logits and loss - output_weight = None - if self.share_embeddings_and_output_weights: - output_weight = self.shared_embedding_or_output_weight() - logits, _ = self.output_layer( - hidden_states, weight=output_weight, runtime_gather_output=runtime_gather_output - ) - - if has_config_logger_enabled(self.config): - payload = OrderedDict( - { - 'input_ids': input_ids, - 'position_ids': position_ids, - 'attention_mask': attention_mask, - 'decoder_input': decoder_input, - 'logits': logits, - } - ) - log_config_to_disk(self.config, payload, prefix='input_and_logits') - - if labels is None: - # [s b h] => [b s h] - return logits.transpose(0, 1).contiguous() - - loss = self.compute_language_model_loss(labels, logits) - - return loss - - def sharded_state_dict( - self, prefix: str = '', sharded_offsets: tuple = (), metadata: Optional[Dict] = None - ) -> ShardedStateDict: - """Sharded state dict implementation for GPTModel backward-compatibility - (removing extra state). - - Args: - prefix (str): Module name prefix. - sharded_offsets (tuple): PP related offsets, expected to be empty at this module level. - metadata (Optional[Dict]): metadata controlling sharded state dict creation. - - Returns: - ShardedStateDict: sharded state dict for the GPTModel - """ - sharded_state_dict = super().sharded_state_dict(prefix, sharded_offsets, metadata) - output_layer_extra_state_key = f'{prefix}output_layer._extra_state' - - # Old GPT checkpoints only stored the output layer weight key. So we remove the - # _extra_state key but check that it doesn't contain any data anyway - output_extra_state = sharded_state_dict.pop(output_layer_extra_state_key, None) - assert not ( - output_extra_state and output_extra_state.data - ), f'Expected output layer extra state to be empty, got: {output_extra_state}' - - return sharded_state_dict +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +from collections import OrderedDict +from typing import Dict, Literal, Optional + +import torch +from torch import Tensor + +from megatron.core import InferenceParams, tensor_parallel +from megatron.core.config_logger import has_config_logger_enabled, log_config_to_disk +from megatron.core.dist_checkpointing.mapping import ShardedStateDict +from megatron.core.models.common.embeddings.language_model_embedding import LanguageModelEmbedding +from megatron.core.models.common.embeddings.rotary_pos_embedding import RotaryEmbedding +from megatron.core.models.common.language_module.language_module import LanguageModule +from megatron.core.packed_seq_params import PackedSeqParams +from megatron.core.transformer.enums import ModelType +from megatron.core.transformer.spec_utils import ModuleSpec +from megatron.core.transformer.transformer_block import TransformerBlock +from megatron.core.transformer.transformer_config import TransformerConfig + + +class GPTModel(LanguageModule): + """GPT Transformer language model. + + Args: + config (TransformerConfig): + Transformer config + transformer_layer_spec (ModuleSpec): + Specifies module to use for transformer layers + vocab_size (int): + Vocabulary size + max_sequence_length (int): + maximum size of sequence. This is used for positional embedding + pre_process (bool, optional): + Include embedding layer (used with pipeline parallelism). Defaults to True. + post_process (bool, optional): + Include an output layer (used with pipeline parallelism). Defaults to True. + fp16_lm_cross_entropy (bool, optional): + Defaults to False. + parallel_output (bool, optional): + Do not gather the outputs, keep them split across tensor + parallel ranks. Defaults to True. + share_embeddings_and_output_weights (bool, optional): + When True, input embeddings and output logit weights are shared. Defaults to False. + position_embedding_type (Literal[learned_absolute,rope], optional): + Position embedding type.. Defaults to 'learned_absolute'. + rotary_percent (float, optional): + Percent of rotary dimension to use for rotary position embeddings. + Ignored unless position_embedding_type is 'rope'. Defaults to 1.0. + rotary_base (int, optional): + Base period for rotary position embeddings. Ignored unless + position_embedding_type is 'rope'. + Defaults to 10000. + rope_scaling (bool, optional): Toggle RoPE scaling. + rope_scaling_factor (float): RoPE scaling factor. Default 8. + scatter_embedding_sequence_parallel (bool, optional): + Whether embeddings should be scattered across sequence parallel + region or not. Defaults to True. + seq_len_interpolation_factor (Optional[float], optional): + scale of linearly interpolating RoPE for longer sequences. + The value must be a float larger than 1.0. Defaults to None. + """ + + def __init__( + self, + config: TransformerConfig, + transformer_layer_spec: ModuleSpec, + vocab_size: int, + max_sequence_length: int, + pre_process: bool = True, + post_process: bool = True, + fp16_lm_cross_entropy: bool = False, + parallel_output: bool = True, + share_embeddings_and_output_weights: bool = False, + position_embedding_type: Literal['learned_absolute', 'rope', 'none'] = 'learned_absolute', + rotary_percent: float = 1.0, + rotary_base: int = 10000, + rope_scaling: bool = False, + rope_scaling_factor: float = 8.0, + scatter_embedding_sequence_parallel: bool = True, + seq_len_interpolation_factor: Optional[float] = None, + ) -> None: + super().__init__(config=config) + + if has_config_logger_enabled(config): + log_config_to_disk(config, locals(), prefix=type(self).__name__) + + self.transformer_layer_spec: ModuleSpec = transformer_layer_spec + self.vocab_size = vocab_size + self.max_sequence_length = max_sequence_length + self.pre_process = pre_process + self.post_process = post_process + self.fp16_lm_cross_entropy = fp16_lm_cross_entropy + self.parallel_output = parallel_output + self.share_embeddings_and_output_weights = share_embeddings_and_output_weights + self.position_embedding_type = position_embedding_type + + # megatron core pipelining currently depends on model type + # TODO: remove this dependency ? + self.model_type = ModelType.encoder_or_decoder + + # These 4 attributes are needed for TensorRT-LLM export. + self.max_position_embeddings = max_sequence_length + self.rotary_percent = rotary_percent + self.rotary_base = rotary_base + self.rotary_scaling = rope_scaling + + if self.pre_process: + self.embedding = LanguageModelEmbedding( + config=self.config, + vocab_size=self.vocab_size, + max_sequence_length=self.max_sequence_length, + position_embedding_type=position_embedding_type, + scatter_to_sequence_parallel=scatter_embedding_sequence_parallel, + ) + + if self.position_embedding_type == 'rope' and not self.config.multi_latent_attention: + self.rotary_pos_emb = RotaryEmbedding( + kv_channels=self.config.kv_channels, + rotary_percent=rotary_percent, + rotary_interleaved=self.config.rotary_interleaved, + seq_len_interpolation_factor=seq_len_interpolation_factor, + rotary_base=rotary_base, + rope_scaling=rope_scaling, + rope_scaling_factor=rope_scaling_factor, + use_cpu_initialization=self.config.use_cpu_initialization, + ) + + # Cache for RoPE tensors which do not change between iterations. + self.rotary_pos_emb_cache = {} + + # Transformer. + self.decoder = TransformerBlock( + config=self.config, + spec=transformer_layer_spec, + pre_process=self.pre_process, + post_process=self.post_process, + ) + + # Output + if post_process: + if self.config.defer_embedding_wgrad_compute: + # The embedding activation buffer preserves a reference to the input activations + # of the final embedding projection layer GEMM. It will hold the activations for + # all the micro-batches of a global batch for the last pipeline stage. Once we are + # done with all the back props for all the microbatches for the last pipeline stage, + # it will be in the pipeline flush stage. During this pipeline flush we use the + # input activations stored in embedding activation buffer and gradient outputs + # stored in gradient buffer to calculate the weight gradients for the embedding + # final linear layer. + self.embedding_activation_buffer = [] + self.grad_output_buffer = [] + else: + self.embedding_activation_buffer = None + self.grad_output_buffer = None + + self.output_layer = tensor_parallel.ColumnParallelLinear( + config.hidden_size, + self.vocab_size, + config=config, + init_method=config.init_method, + bias=False, + skip_bias_add=False, + gather_output=not self.parallel_output, + skip_weight_param_allocation=self.pre_process + and self.share_embeddings_and_output_weights, + embedding_activation_buffer=self.embedding_activation_buffer, + grad_output_buffer=self.grad_output_buffer, + ) + + if self.pre_process or self.post_process: + self.setup_embeddings_and_output_layer() + + if has_config_logger_enabled(self.config): + log_config_to_disk( + self.config, self.state_dict(), prefix=f'{type(self).__name__}_init_ckpt' + ) + + def set_input_tensor(self, input_tensor: Tensor) -> None: + """Sets input tensor to the model. + + See megatron.model.transformer.set_input_tensor() + + Args: + input_tensor (Tensor): Sets the input tensor for the model. + """ + # This is usually handled in schedules.py but some inference code still + # gives us non-lists or None + if not isinstance(input_tensor, list): + input_tensor = [input_tensor] + + assert len(input_tensor) == 1, 'input_tensor should only be length 1 for gpt/bert' + self.decoder.set_input_tensor(input_tensor[0]) + + def forward( + self, + input_ids: Tensor, + position_ids: Tensor, + attention_mask: Tensor, + decoder_input: Tensor = None, + labels: Tensor = None, + inference_params: InferenceParams = None, + packed_seq_params: PackedSeqParams = None, + extra_block_kwargs: dict = None, + runtime_gather_output: Optional[bool] = None, + ) -> Tensor: + """Forward function of the GPT Model This function passes the input tensors + through the embedding layer, and then the decoeder and finally into the post + processing layer (optional). + + It either returns the Loss values if labels are given or the final hidden units + + Args: + runtime_gather_output (bool): Gather output at runtime. Default None means + `parallel_output` arg in the constructor will be used. + """ + # If decoder_input is provided (not None), then input_ids and position_ids are ignored. + # Otherwise, apply embedding layer on input_ids and position_ids to get decoder_input. + + # Decoder embedding. + if decoder_input is not None: + pass + elif self.pre_process: + decoder_input = self.embedding(input_ids=input_ids, position_ids=position_ids) + else: + # intermediate stage of pipeline + # decoder will get hidden_states from encoder.input_tensor + decoder_input = None + + # Rotary positional embeddings (embedding is None for PP intermediate devices) + rotary_pos_emb = None + rotary_pos_cos = None + rotary_pos_sin = None + if self.position_embedding_type == 'rope' and not self.config.multi_latent_attention: + if not self.training and self.config.flash_decode and inference_params: + # Flash decoding uses precomputed cos and sin for RoPE + rotary_pos_cos, rotary_pos_sin = self.rotary_pos_emb_cache.setdefault( + inference_params.max_sequence_length, + self.rotary_pos_emb.get_cos_sin(inference_params.max_sequence_length), + ) + else: + rotary_seq_len = self.rotary_pos_emb.get_rotary_seq_len( + inference_params, self.decoder, decoder_input, self.config, packed_seq_params + ) + rotary_pos_emb = self.rotary_pos_emb( + rotary_seq_len, + packed_seq=packed_seq_params is not None + and packed_seq_params.qkv_format == 'thd', + ) + if ( + (self.config.enable_cuda_graph or self.config.flash_decode) + and rotary_pos_cos is not None + and inference_params + ): + sequence_len_offset = torch.tensor( + [inference_params.sequence_len_offset] * inference_params.current_batch_size, + dtype=torch.int32, + device=rotary_pos_cos.device, # Co-locate this with the rotary tensors + ) + else: + sequence_len_offset = None + + # Run decoder. + hidden_states = self.decoder( + hidden_states=decoder_input, + attention_mask=attention_mask, + inference_params=inference_params, + rotary_pos_emb=rotary_pos_emb, + rotary_pos_cos=rotary_pos_cos, + rotary_pos_sin=rotary_pos_sin, + packed_seq_params=packed_seq_params, + sequence_len_offset=sequence_len_offset, + **(extra_block_kwargs or {}), + ) + + if not self.post_process: + return hidden_states + + # logits and loss + output_weight = None + if self.share_embeddings_and_output_weights: + output_weight = self.shared_embedding_or_output_weight() + logits, _ = self.output_layer( + hidden_states, weight=output_weight, runtime_gather_output=runtime_gather_output + ) + + if has_config_logger_enabled(self.config): + payload = OrderedDict( + { + 'input_ids': input_ids, + 'position_ids': position_ids, + 'attention_mask': attention_mask, + 'decoder_input': decoder_input, + 'logits': logits, + } + ) + log_config_to_disk(self.config, payload, prefix='input_and_logits') + + if labels is None: + # [s b h] => [b s h] + return logits.transpose(0, 1).contiguous() + + loss = self.compute_language_model_loss(labels, logits) + + return loss + + def sharded_state_dict( + self, prefix: str = '', sharded_offsets: tuple = (), metadata: Optional[Dict] = None + ) -> ShardedStateDict: + """Sharded state dict implementation for GPTModel backward-compatibility + (removing extra state). + + Args: + prefix (str): Module name prefix. + sharded_offsets (tuple): PP related offsets, expected to be empty at this module level. + metadata (Optional[Dict]): metadata controlling sharded state dict creation. + + Returns: + ShardedStateDict: sharded state dict for the GPTModel + """ + sharded_state_dict = super().sharded_state_dict(prefix, sharded_offsets, metadata) + output_layer_extra_state_key = f'{prefix}output_layer._extra_state' + + # Old GPT checkpoints only stored the output layer weight key. So we remove the + # _extra_state key but check that it doesn't contain any data anyway + output_extra_state = sharded_state_dict.pop(output_layer_extra_state_key, None) + assert not ( + output_extra_state and output_extra_state.data + ), f'Expected output layer extra state to be empty, got: {output_extra_state}' + + return sharded_state_dict diff --git a/megatron/core/models/gpt/moe_module_specs.py b/megatron/core/models/gpt/moe_module_specs.py index 513eedd..1d53a3b 100644 --- a/megatron/core/models/gpt/moe_module_specs.py +++ b/megatron/core/models/gpt/moe_module_specs.py @@ -1,81 +1,81 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import warnings -from typing import Optional - -from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear -from megatron.core.transformer.mlp import MLPSubmodules -from megatron.core.transformer.moe.experts import GroupedMLP, SequentialMLP, TEGroupedMLP -from megatron.core.transformer.moe.moe_layer import MoELayer, MoESubmodules -from megatron.core.transformer.moe.shared_experts import SharedExpertMLP -from megatron.core.transformer.spec_utils import ModuleSpec -from megatron.core.utils import get_te_version, is_te_min_version - -try: - from megatron.core.extensions.transformer_engine import ( - TEColumnParallelGroupedLinear, - TEColumnParallelLinear, - TERowParallelGroupedLinear, - TERowParallelLinear, - ) - - HAVE_TE = True -except ImportError: - HAVE_TE = False - - -def get_moe_module_spec( - use_te: Optional[bool] = True, - num_experts: Optional[int] = None, - moe_grouped_gemm: Optional[bool] = False, - moe_use_legacy_grouped_gemm: Optional[bool] = False, -) -> ModuleSpec: - """Helper function to get module spec for MoE""" - assert num_experts is not None - - mlp = MLPSubmodules( - linear_fc1=TEColumnParallelLinear if use_te else ColumnParallelLinear, - linear_fc2=TERowParallelLinear if use_te else RowParallelLinear, - ) - - # experts spec - if moe_grouped_gemm: - ## use GroupedMLP - if use_te and TEColumnParallelGroupedLinear is not None and not moe_use_legacy_grouped_gemm: - ## use TEGroupedLinear - expert_module = TEGroupedMLP - expert_submodule = MLPSubmodules( - linear_fc1=TEColumnParallelGroupedLinear, linear_fc2=TERowParallelGroupedLinear - ) - else: - ## use legacy GroupedMLP - expert_module = GroupedMLP - expert_submodule = None - warnings.warn( - 'The legacy GroupedMLP will be deprecated in Megatron-Core v0.12.0. ' - 'Please update the TransformerEngine to version>=1.7.0 and use TEGroupedMLP.' - ) - else: - ## use SequentialMLP - expert_module = SequentialMLP - if use_te and not is_te_min_version("1.7.0.dev0"): - warnings.warn( - "Only transformer-engine>=1.7.0 supports MoE experts, " - f"but your version is {get_te_version()}. Use local linear implementation instead." - ) - expert_submodule = MLPSubmodules( - linear_fc1=ColumnParallelLinear, linear_fc2=RowParallelLinear - ) - else: - expert_submodule = mlp - - experts = ModuleSpec(module=expert_module, submodules=expert_submodule) - - # shared experts spec - shared_experts = ModuleSpec(module=SharedExpertMLP, params={"gate": False}, submodules=mlp) - - # MoE module spec - moe_module_spec = ModuleSpec( - module=MoELayer, submodules=MoESubmodules(experts=experts, shared_experts=shared_experts) - ) - return moe_module_spec +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import warnings +from typing import Optional + +from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear +from megatron.core.transformer.mlp import MLPSubmodules +from megatron.core.transformer.moe.experts import GroupedMLP, SequentialMLP, TEGroupedMLP +from megatron.core.transformer.moe.moe_layer import MoELayer, MoESubmodules +from megatron.core.transformer.moe.shared_experts import SharedExpertMLP +from megatron.core.transformer.spec_utils import ModuleSpec +from megatron.core.utils import get_te_version, is_te_min_version + +try: + from megatron.core.extensions.transformer_engine import ( + TEColumnParallelGroupedLinear, + TEColumnParallelLinear, + TERowParallelGroupedLinear, + TERowParallelLinear, + ) + + HAVE_TE = True +except ImportError: + HAVE_TE = False + + +def get_moe_module_spec( + use_te: Optional[bool] = True, + num_experts: Optional[int] = None, + moe_grouped_gemm: Optional[bool] = False, + moe_use_legacy_grouped_gemm: Optional[bool] = False, +) -> ModuleSpec: + """Helper function to get module spec for MoE""" + assert num_experts is not None + + mlp = MLPSubmodules( + linear_fc1=TEColumnParallelLinear if use_te else ColumnParallelLinear, + linear_fc2=TERowParallelLinear if use_te else RowParallelLinear, + ) + + # experts spec + if moe_grouped_gemm: + ## use GroupedMLP + if use_te and TEColumnParallelGroupedLinear is not None and not moe_use_legacy_grouped_gemm: + ## use TEGroupedLinear + expert_module = TEGroupedMLP + expert_submodule = MLPSubmodules( + linear_fc1=TEColumnParallelGroupedLinear, linear_fc2=TERowParallelGroupedLinear + ) + else: + ## use legacy GroupedMLP + expert_module = GroupedMLP + expert_submodule = None + warnings.warn( + 'The legacy GroupedMLP will be deprecated in Megatron-Core v0.12.0. ' + 'Please update the TransformerEngine to version>=1.7.0 and use TEGroupedMLP.' + ) + else: + ## use SequentialMLP + expert_module = SequentialMLP + if use_te and not is_te_min_version("1.7.0.dev0"): + warnings.warn( + "Only transformer-engine>=1.7.0 supports MoE experts, " + f"but your version is {get_te_version()}. Use local linear implementation instead." + ) + expert_submodule = MLPSubmodules( + linear_fc1=ColumnParallelLinear, linear_fc2=RowParallelLinear + ) + else: + expert_submodule = mlp + + experts = ModuleSpec(module=expert_module, submodules=expert_submodule) + + # shared experts spec + shared_experts = ModuleSpec(module=SharedExpertMLP, params={"gate": False}, submodules=mlp) + + # MoE module spec + moe_module_spec = ModuleSpec( + module=MoELayer, submodules=MoESubmodules(experts=experts, shared_experts=shared_experts) + ) + return moe_module_spec diff --git a/megatron/core/models/huggingface/__init__.py b/megatron/core/models/huggingface/__init__.py new file mode 100644 index 0000000..e2e0b01 --- /dev/null +++ b/megatron/core/models/huggingface/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. +from .module import HuggingFaceModule, build_hf_model diff --git a/megatron/core/models/huggingface/clip_model.py b/megatron/core/models/huggingface/clip_model.py new file mode 100644 index 0000000..a151749 --- /dev/null +++ b/megatron/core/models/huggingface/clip_model.py @@ -0,0 +1,22 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +from transformers import AutoModel + +from megatron.core.models.huggingface import HuggingFaceModule + + +class ClipHuggingFaceModel(HuggingFaceModule): + """ + Wrapper for CLIP HuggingFace models + """ + + def __init__(self, config): + super().__init__(config) + self.model = AutoModel.from_pretrained(config.huggingface_model_name_or_path) + + def forward(self, *args, **kwargs): + """Forward function""" + x = self.model(*args, **kwargs) + x = x['last_hidden_state'] + + return x diff --git a/megatron/core/models/huggingface/module.py b/megatron/core/models/huggingface/module.py new file mode 100644 index 0000000..bb1f7e8 --- /dev/null +++ b/megatron/core/models/huggingface/module.py @@ -0,0 +1,50 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +from transformers import AutoConfig, AutoModel + +from megatron.core.transformer.module import MegatronModule + + +class HuggingFaceModule(MegatronModule): + """ + Basic module for huggingface + """ + + def __init__(self, config): + super().__init__(config=config) + + def set_input_tensor(self, input_tensor): + """Dummy function for set_input_tensor""" + self.input_tensor = input_tensor + + +class AutoHuggingFaceModel(HuggingFaceModule): + """ + Wrapper for HuggingFace AutoModel + """ + + def __init__(self, config): + super().__init__(config) + self.model = AutoModel.from_pretrained(config.huggingface_model_name_or_path) + + def forward(self, *args, **kwargs): + """Forward function""" + return self.model(*args, **kwargs) + + +def build_hf_model(config): + """Builds huggingface wrapper model given config""" + hf_config = AutoConfig.from_pretrained(config.huggingface_model_name_or_path) + + if "qwen" in hf_config.model_type: + from megatron.core.models.huggingface.qwen_model import QwenHuggingFaceModel + + model = QwenHuggingFaceModel(config) + elif "vit" in hf_config.model_type: + from megatron.core.models.huggingface.clip_model import ClipHuggingFaceModel + + model = ClipHuggingFaceModel(config) + else: + raise NotImplementedError(f"Huggingface model type {hf_config.model_type} is not supported") + + return model diff --git a/megatron/core/models/huggingface/qwen_model.py b/megatron/core/models/huggingface/qwen_model.py new file mode 100644 index 0000000..3a02057 --- /dev/null +++ b/megatron/core/models/huggingface/qwen_model.py @@ -0,0 +1,36 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +from transformers.models.qwen2 import Qwen2ForCausalLM + +from megatron.core.models.huggingface import HuggingFaceModule + + +class QwenHuggingFaceModel(HuggingFaceModule): + """ + Wrapper for Qwen LM HuggingFace models + """ + + def __init__(self, config): + super().__init__(config) + self.model = Qwen2ForCausalLM.from_pretrained(config.huggingface_model_name_or_path) + + def forward(self, *args, **kwargs): + """Forward function""" + combined_embeddings = kwargs['decoder_input'].permute(1, 0, 2) + x = self.model( + position_ids=None, # TODO: I guess we're just assuming no custom pos ids + attention_mask=kwargs['attention_mask'], + inputs_embeds=combined_embeddings, + labels=kwargs['labels'], + ) + + if kwargs['labels'] is not None: + x = x["loss"] + else: + x = x["logits"] + + return x + + def embedding(self, input_ids, position_ids=None): + """Function to run process tokens with input embeddings""" + return self.model.get_input_embeddings()(input_ids).transpose(1, 0).contiguous() diff --git a/megatron/core/models/mamba/mamba_layer_specs.py b/megatron/core/models/mamba/mamba_layer_specs.py index e5fa9ef..97ddd20 100644 --- a/megatron/core/models/mamba/mamba_layer_specs.py +++ b/megatron/core/models/mamba/mamba_layer_specs.py @@ -1,67 +1,67 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -from megatron.core.extensions.transformer_engine import ( - TEDotProductAttention, - TELayerNormColumnParallelLinear, - TERowParallelLinear, -) -from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add -from megatron.core.ssm.mamba_block import MambaStack, MambaStackSubmodules -from megatron.core.ssm.mamba_layer import MambaLayer, MambaLayerSubmodules -from megatron.core.ssm.mamba_mixer import MambaMixer, MambaMixerSubmodules -from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules -from megatron.core.transformer.enums import AttnMaskType -from megatron.core.transformer.mlp import MLP, MLPSubmodules -from megatron.core.transformer.spec_utils import ModuleSpec -from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules - -mamba_stack_spec = ModuleSpec( - module=MambaStack, - submodules=MambaStackSubmodules( - mamba_layer=ModuleSpec( - module=MambaLayer, - submodules=MambaLayerSubmodules( - mixer=ModuleSpec( - module=MambaMixer, - submodules=MambaMixerSubmodules( - in_proj=TELayerNormColumnParallelLinear, out_proj=TERowParallelLinear - ), - ), - mamba_bda=get_bias_dropout_add, - ), - ), - # Started with spec from gpt_layer_specs.py (with MLP removed) - # Using the TE spec because we had problems getting the non-TE spec - # working - attention_layer=ModuleSpec( - module=TransformerLayer, - submodules=TransformerLayerSubmodules( - self_attention=ModuleSpec( - module=SelfAttention, - params={"attn_mask_type": AttnMaskType.causal}, - submodules=SelfAttentionSubmodules( - linear_qkv=TELayerNormColumnParallelLinear, - core_attention=TEDotProductAttention, - linear_proj=TERowParallelLinear, - ), - ), - self_attn_bda=get_bias_dropout_add, - ), - ), - # Started with spec from gpt_layer_specs.py - # Using the TE spec because we had problems getting the non-TE spec - # working - mlp_layer=ModuleSpec( - module=TransformerLayer, - submodules=TransformerLayerSubmodules( - mlp=ModuleSpec( - module=MLP, - submodules=MLPSubmodules( - linear_fc1=TELayerNormColumnParallelLinear, linear_fc2=TERowParallelLinear - ), - ), - mlp_bda=get_bias_dropout_add, - ), - ), - ), -) +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +from megatron.core.extensions.transformer_engine import ( + TEDotProductAttention, + TELayerNormColumnParallelLinear, + TERowParallelLinear, +) +from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add +from megatron.core.ssm.mamba_block import MambaStack, MambaStackSubmodules +from megatron.core.ssm.mamba_layer import MambaLayer, MambaLayerSubmodules +from megatron.core.ssm.mamba_mixer import MambaMixer, MambaMixerSubmodules +from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.transformer.mlp import MLP, MLPSubmodules +from megatron.core.transformer.spec_utils import ModuleSpec +from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules + +mamba_stack_spec = ModuleSpec( + module=MambaStack, + submodules=MambaStackSubmodules( + mamba_layer=ModuleSpec( + module=MambaLayer, + submodules=MambaLayerSubmodules( + mixer=ModuleSpec( + module=MambaMixer, + submodules=MambaMixerSubmodules( + in_proj=TELayerNormColumnParallelLinear, out_proj=TERowParallelLinear + ), + ), + mamba_bda=get_bias_dropout_add, + ), + ), + # Started with spec from gpt_layer_specs.py (with MLP removed) + # Using the TE spec because we had problems getting the non-TE spec + # working + attention_layer=ModuleSpec( + module=TransformerLayer, + submodules=TransformerLayerSubmodules( + self_attention=ModuleSpec( + module=SelfAttention, + params={"attn_mask_type": AttnMaskType.causal}, + submodules=SelfAttentionSubmodules( + linear_qkv=TELayerNormColumnParallelLinear, + core_attention=TEDotProductAttention, + linear_proj=TERowParallelLinear, + ), + ), + self_attn_bda=get_bias_dropout_add, + ), + ), + # Started with spec from gpt_layer_specs.py + # Using the TE spec because we had problems getting the non-TE spec + # working + mlp_layer=ModuleSpec( + module=TransformerLayer, + submodules=TransformerLayerSubmodules( + mlp=ModuleSpec( + module=MLP, + submodules=MLPSubmodules( + linear_fc1=TELayerNormColumnParallelLinear, linear_fc2=TERowParallelLinear + ), + ), + mlp_bda=get_bias_dropout_add, + ), + ), + ), +) diff --git a/megatron/core/models/multimodal/context_parallel.py b/megatron/core/models/multimodal/context_parallel.py new file mode 100644 index 0000000..8115fca --- /dev/null +++ b/megatron/core/models/multimodal/context_parallel.py @@ -0,0 +1,99 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. +"""Multimodal Sequence Parallel (SP) and Context Parallel (CP) functionality.""" + +import torch + +from megatron.core.packed_seq_params import PackedSeqParams + + +def get_padding( + seq_len, cp_size, tp_size, has_sp, decoder_tp_comm_overlap=False, decoder_seq_len=None +): + """Calculate padding needed for SP and/or CP. + + Args: + seq_len (int): Model sequence length. + cp_size (int): Context parallel size. + tp_size (int): Tensor parallel size. + has_sp (bool): Model uses sequence parallelism. + decoder_tp_comm_overlap (bool): Decoder (LLM) uses tensor parallel communication overlap. + decoder_seq_len (int): Decoder (LLM) maximum sequence length. + + Returns: + padding (int): Padding needed given model configuration. + """ + + padding = 0 + # TP Comm overlap is performed with combined text+image embeddings. + if has_sp and decoder_tp_comm_overlap: + # If TP Comm Overlap is enabled for combined text+image embedding in LM backbone, + # user needs to provide decoder_seq_len with any potential padding needed for SP+CP + assert ( + decoder_seq_len is not None + ), "Please provide decoder seq length when using TP comm overlap for LM backbone" + padding = decoder_seq_len - seq_len + elif has_sp or cp_size > 1: + padding_factor = 1 + if has_sp and cp_size > 1: + # Padding to multiple of tp_size * cp_size * 2 when using CP + SP. + padding_factor = tp_size * cp_size * 2 + elif cp_size > 1: + padding_factor = cp_size * 2 + elif has_sp: + padding_factor = tp_size + + padding = int((seq_len + padding_factor - 1) // padding_factor * padding_factor) - seq_len + + return padding + + +def get_packed_seq_params(tokens, img_seq_len, padding_needed, cp_size, use_packed_sequence=False): + """Get PackedSeqParams for CP. + + Args: + tokens (torch.Tensor): [batch, seq_len] input tokens. + img_seq_len (int): Image sequence length. + padding_needed (int): Padding to add. + cp_size (int): Context parallel size. + use_packed_sequence (bool): Uses sequence packing. + + Returns: + packed_seq_params (PackedSeqParams): Parameters to be sent to Transformer Engine. + """ + batch_size = tokens.shape[0] + # Calculate the valid token seq len that LM backbone should compute on + combined_valid_seqlen = tokens.shape[1] + img_seq_len - padding_needed + cu_seqlens = torch.arange( + 0, + (batch_size + 1) * (combined_valid_seqlen), + step=(combined_valid_seqlen), + dtype=torch.int32, + device=tokens.device, + ) + # Calculate the total padded token seq len + combined_padded_seqlen = tokens.shape[1] + img_seq_len + cu_seqlens_padded = None + qkv_format = 'sbhd' + if cp_size > 1 and (padding_needed > 0 or use_packed_sequence): + # Provide cu_seqlens__padded for CP support + cu_seqlens_padded = torch.arange( + 0, + (batch_size + 1) * (combined_padded_seqlen), + step=(combined_padded_seqlen), + dtype=torch.int32, + device=tokens.device, + ) + # CP with padding mask type requires THD format + qkv_format = 'thd' + + packed_seq_params = PackedSeqParams( + cu_seqlens_q=cu_seqlens, + cu_seqlens_kv=cu_seqlens, + cu_seqlens_q_padded=cu_seqlens_padded, + cu_seqlens_kv_padded=cu_seqlens_padded, + max_seqlen_q=combined_padded_seqlen, + max_seqlen_kv=combined_padded_seqlen, + qkv_format=qkv_format, + ) + + return packed_seq_params diff --git a/megatron/core/models/multimodal/llava_model.py b/megatron/core/models/multimodal/llava_model.py index 3de68b5..09f83ac 100644 --- a/megatron/core/models/multimodal/llava_model.py +++ b/megatron/core/models/multimodal/llava_model.py @@ -1,924 +1,958 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -import logging -from collections import namedtuple -from functools import partial -from typing import List, Optional - -import torch - -from megatron.core import InferenceParams, tensor_parallel -from megatron.core.config_logger import has_config_logger_enabled, log_config_to_disk -from megatron.core.models.gpt import GPTModel -from megatron.core.models.vision.clip_vit_model import CLIPViTModel, get_num_image_embeddings -from megatron.core.models.vision.multimodal_projector import MultimodalProjector -from megatron.core.packed_seq_params import PackedSeqParams -from megatron.core.parallel_state import get_context_parallel_group, get_context_parallel_world_size -from megatron.core.transformer import MegatronModule -from megatron.core.transformer.spec_utils import ModuleSpec -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.utils import log_single_rank - -try: - import transformer_engine # pylint: disable=unused-import - from transformer_engine.pytorch.distributed import gather_along_first_dim - - from megatron.core.extensions.transformer_engine import TEDotProductAttention - from megatron.core.utils import is_te_min_version - - HAVE_TE = True -except: - HAVE_TE = False - if get_context_parallel_world_size() > 1: - raise RuntimeError("ContextParallelism requires TransformerEngine support, but not found.") - - -IGNORE_INDEX = -100 # ID for labels that should be ignored. -# Image token index can be tokenizer dependent so the default value does not work in all cases. -DEFAULT_IMAGE_TOKEN_INDEX = -200 -IMAGE_TOKEN = "" -VIDEO_TOKEN = "

-Click here. - -```bash -#!/bin/bash - -# Runs Mixtral 8x7B model on 32 H100/A100 GPUs -# The Dropless MoE suffers from an imbalanced token distribution at the early stage of training (the first few hundred iterations), which may lead to poor performance and out-of-memory (OOM) issues. -# To check the performance of a Dropless MoE model, we should run the model for at least 500 iterations or resume from trained checkpoints. - -export CUDA_DEVICE_MAX_CONNECTIONS=1 - -GPUS_PER_NODE=8 -# Change for multinode config -MASTER_ADDR=${MASTER_ADDR:-"localhost"} -MASTER_PORT=${MASTER_PORT:-"6000"} -NNODES=${NNODES:-"1"} -NODE_RANK=${RANK:-"0"} -WORLD_SIZE=$(($GPUS_PER_NODE*$NNODES)) - -CHECKPOINT_PATH=$1 -TOKENIZER_MODEL=$2 -DATA_PATH=$3 - -DISTRIBUTED_ARGS=( - --nproc_per_node $GPUS_PER_NODE - --nnodes $NNODES - --node_rank $NODE_RANK - --master_addr $MASTER_ADDR - --master_port $MASTER_PORT -) - -MODEL_ARGS=( - --disable-bias-linear - --seq-length 4096 - --max-position-embeddings 32768 - --num-layers 32 - --hidden-size 4096 - --ffn-hidden-size 14336 - --num-attention-heads 32 - --init-method-std 0.01 - --attention-dropout 0.0 - --hidden-dropout 0.0 - --normalization RMSNorm - --position-embedding-type rope - --swiglu - --untie-embeddings-and-output-weights - --group-query-attention - --num-query-groups 8 - --no-masked-softmax-fusion - --no-position-embedding -) - -MOE_ARGS=( - --num-experts 8 - --expert-model-parallel-size 8 - --moe-router-load-balancing-type aux_loss # options: aux_loss, sinkhorn, None. Default is aux_loss. - --moe-router-topk 2 - --moe-aux-loss-coeff 1e-2 - --moe-grouped-gemm -) - -DATA_ARGS=( - --tokenizer-type Llama2Tokenizer - --tokenizer-model ${TOKENIZER_MODEL} - --data-path $DATA_PATH - --split 99990,8,2 -) - -TRAINING_ARGS=( - --micro-batch-size 1 - --global-batch-size 128 - --lr 1e-4 - --train-iters 500000 - --lr-decay-iters 320000 - --lr-decay-style cosine - --min-lr 1.0e-5 - --weight-decay 0.1 - --lr-warmup-iters 500 - --clip-grad 1.0 - --bf16 - --overlap-grad-reduce - --overlap-param-gather -) - -MODEL_PARALLEL_ARGS=( - --tensor-model-parallel-size 1 - --pipeline-model-parallel-size 4 - --num-layers-per-virtual-pipeline-stage 8 - --sequence-parallel - --use-distributed-optimizer -) - -LOGGING_ARGS=( - --log-interval 1 \ - --save-interval 10000 \ - --eval-interval 1000 \ - --eval-iters 10 \ - --save $CHECKPOINT_PATH \ - --load $CHECKPOINT_PATH \ - --tensorboard-dir "${CHECKPOINT_PATH}/tensorboard" \ - --no-load-optim \ - --no-load-rng -) - -if [ -n "${WANDB_API_KEY}" ]; then - LOGGING_ARGS+=( - --wandb-project ${WANDB_PROJECT:-"Mixtral-Finetuning"} - --wandb-exp-name ${WANDB_NAME:-"Mixtral_8x7B"} - ) -fi - -torchrun ${DISTRIBUTED_ARGS[@]} pretrain_gpt.py \ - ${MODEL_ARGS[@]} \ - ${MOE_ARGS[@]} \ - ${DATA_ARGS[@]} \ - ${TRAINING_ARGS[@]} \ - ${MODEL_PARALLEL_ARGS[@]} \ - ${LOGGING_ARGS[@]} -``` -
- -# Performance Best Practice - -### Tuning Guide of Parallel Mappings - -To find a good parallel mapping that help you achieve a high throughput of a new model, there are some general rule that could help. Here is an overview of properties in different aspects for each parallel strategy. - -| Parallel Strategy | Peak Activation Memory | Weight Memory | Optimizer states | Communication (Per-Layer) | -|:-----------------:|:-------------------------------:|:--------------:|:---------------------------------:|:-------------------------:| -| TP | 1/N (with SP on) | 1/N | 1/N | High | -| EP | 1 | 1/N in MoELayer| 1/N | Medium | -| PP | 1 (>1 with virtual pipeline) | 1/N | 1/N | Medium | -| CP | 1/N | 1 | 1/N (with distributed optimizer) | Medium | -| DP | 1 | 1 | 1/N (with distributed optimizer) | Low | - -For a specific model, the best parallel mapping varies based on the model architecture, trained sequence length and the hardware platform. -Here we provide some general rules to get better performance: -1. Keep the model parallism size as small as possible. - - For the large language models, model parallism is often required to prevent OOM, but it will bring communication overhead and hurt performance. - - With distributed optimizer, master weights and optimizer states will be sharded across all DP ranks with slight communication overhead. - So try to reduce the model parallism size and increase data parallism size when there are lots of free GPU memory during training. -2. Ensure the EPxTP communication winthin the NVLink domain. - - Communications of EP and TP should remain within the NVLink domain as much as possible, as both are communication-intensive. - - If the model is too large and requires scaling across multiple nodes, consider PP before TP and EP. See item 3 for details. -3. Use Pipeline Parallelism to scale the model further. - - Enable Virtual Pipeline Parallelism(VPP) to reduce pp bubbles when PP_size >= 2 by setting `num_layers_per_virtual_pipeline_stage`. - - VPP_size tuning: the legal values of vpp_size are all common divisors of num_layers/pp_size, E.g., num_layers=24, pp_size=4, then we can pick vpp_size from {1, 2, 3, 6}. The larger the vpp_size, the lower the pipeline bubbles, while the larger number of P2P communications between each PP stages. Empirically a value in the middle often gives the best trade-off. `VPP_size=num_layers / PP_size / num_layers_per_virtual_pipeline_stage` -4. Prefer EP over TP for the expert layer when possible: - - TP saves more memory than EP, but EP can achieve better GEMM efficiency and less communication overhead than TP. - - If EP size increased to the number of expert, the local token permutation/un-permutation for experts computation are omitted. - - Simplify the computation graph of MoE layers, more convenient for performing potential comm-computation overlapping. - - In practice, EP8TP1 is better than EP4TP2 for 8x7B. -5. Enable Context Parallelism for long context training. - - The efficiency of CP largely depends on whether its communication can be overlapped with computation. - - Emperically, use CP when sequence length >= 8K. - -### MoE Parallel Folding - -MoE Parallel Folding separates the MoE related parallel groups from Dense groups. -1. Traditional MoE parallel groups are entangled with dense by using a 5-dimension parallel group generator with default order `tp-cp-ep-dp-pp`. The EP group in MoE is a sub-group of DP in Attention. -2. With MoE Parallel Fodling, we use a parallel group generator with `tp-cp-dp-pp` for Attention, and another with `tp-ep-dp-pp` for MoE. The EPxTP group in MoE is a sub-group of DPxCPxTP in Attention. - -By setting `--expert-tensor-parallel-size`, we can set MoE-specific TP size. - -#### Advantages of MoE Parallel Folding -1. The CP and EP group are folded together by defualt, such that: - 1. It reduces the minimal required GPUs to turn on both CP and EP. For example, the traditional way with (CP=8, EP=8) needs at least 64 GPUs, for now it only requires 8 GPUs. - 2. The CP and EP communication can be both put in the NVLink domain. -2. We can set different TP sizes for Attention and MoE part. - 1. For MoE, EP is often more efficient than TP. But in the traditional way, only using EP can get OOM for most models. - 2. With MoE parallel folding, we can turn on TP for Attention part and setting TP=1 for MoE models, which often gets better MFU. - -### End-to-End Training Practice -**Use the latest NVIDIA PyTorch or NeMo Docker Image** -- [NGC PyTorch Image](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/pytorch) -- [NGC NeMo Image](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/nemo) - -**Token Dispatcher Choices** -- Token Dispatcher sends tokens to the designated expert, involves tensor rearangement and communications. -- Dispatcher `allgather` is the default option. It achieves better performance and efficiency when only tensor parallelism is used or when the Top-k value is very large. -- Dispatcher `alltoall` is recommended if expert parallelism is applied. -- Dispatcher `alltoall_seq` is the original implementation of `alltoall` and is retained for potential compatibility risk. - -**Enable Communication Overlap** -- Enable `--overlap-param-gather` and `--overlap-grad-reduce` with distributed optimizer. -- Enable `--tp-comm-overlap` when TP>1. -- Enable p2p comm overlap when PP > 1 by setting `num_layers_per_virtual_pipeline_stage`. - -**Enable GroupedGEMM when num_local_experts>1 with `--moe-grouped-gemm`** -- GroupedGEMM has higher efficiency than vanilla sequential GEMMs for each expert. -- Recommend to use the TE version of Grouped GEMM (by upgrading to MCore v0.8 and TE v1.9), which support Gradient Accumulation Fusion and FP8 Training. - -**OOM Caused by Token Distribution Imbalance when Training From Scratch** -MoE suffers from a severe load imbalance issue when the router is under-trained, leading to the model easily running out of memory (OOM), which typically occurs in the first 100~300 steps when training from scratch. -Therefore, there are two recommended ways during the first 200 steps to avoid the OOM problem, which can be removed after the token distribution is more stable: -1. Increase the `expert-tensor-parallel-size` and decrease `expert-model-parallel-size` to replace EP with TP in MoELayer, this can prevent the load imbalancing between EP ranks. Since current ETP implementation has some memeory overhead, you can further enable activation recomputation only for MoE Layer by adding `--moe-layer-recompute`. -2. Setting capacity factor to a relatively small number like 1.0 by adding `--moe-token-capacity-factor 1.0`. - -### Reference Best Parallel Mapping - -Here are the reference parallel mappings of MCore v0.8 for Mixtral 8x7B and 8x22B models: -| Model | Vocab Size| Dispatcher | Precision | #GPUs | SEQ LEN | TP | EP | PP | VP | MBS | GBS | -|:-----------------------:|:---------:|:----------:|:---------:|:-----:|:-------:|:--:|:--:|:--:|:--:|:---:|:---:| -| Mixtral 8x7B(Dropless) | 32K | All-to-All | BF16 | 64 | 4096 | 1 | 8 | 4 | 8 | 1 | 256 | -| Mixtral 8x22B(Dropless) | 32K | All-to-All | BF16 | 128 | 4096 | 4 | 2 | 8 | 7 | 1 | 256 | - -Detailed Benchmark Information: -Server: -- 8xH100 80GB HBM3 -- NVLink 4th Generation -- InfiniBand 8x400 Gbit/s - -Docker Image: +# Megatron Core MoE + +Megatron-Core MoE provides comprehensive parallelism strategies, seamlessly integrating Expert Parallelism with tensor, data, sequence, and pipeline parallelism. With MCore v0.9, we've achieved remarkable performance of **468 TFLOPS** for Mixtral 8X7B bf16 training. Additionally, we support state-of-the-art MoE model architectures including DeepSeek-V3 and Qwen-MoE. + +### What's New +- **Support for DeepSeek-V3 architecture** + - Enable TP for MLA and DeepSeek-V3 + - Support aux-loss-free load balancing strategy + - Support node-limited routing +- **Support DeepSeek's DeepEP for efficient token dispatching and combining** +- Add fusion for token permutation and unpermutation +- Support Uneven virtual pipeline parallel split + +### Parallelism +- **Expert Parallelism** + - A specific method of parallelism for MoE models, where experts are partitioned onto different workers and each worker processes a different batch of training samples, each worker process one or more experts for each MoE layer. +- **3D Parallelism**: Data Parallelism, Tensor Parallelism, Pipeline Parallelism + - Note: When using MoE with expert parallelism and tensor parallelism, sequence parallelism must be enabled. +- **Context Parallelism**: + - Split the sequence dimension to support long context training. +- **Richer parallel mappings**: EP can be combined with DP/TP/PP/CP for handling larger MoE variants. +- **MoE Parallel Folding**: Support for setting different parallelism strategies for Attention and MoE components, enabling more flexible and efficient model sharding. See detailed documentation below. +- **Full distributed optimizer support.** + +### Router and Load Balancing +- Router type: + - Top-K MLP router +- Load Balancing algorithms: + - Sinkhorn (S-BASE) + - Aux loss / Load balancing loss + - Aux-loss-free load balancing strategy + +### Performance Optimizations +- (Experimental) **DeepEP** is integrated for efficient token communication in large-scale MoE training. +- GroupedGEMM when num local experts > 1 + - Supported dtype: bf16 + - Performance improvements for larger MoE models +- Enable `--tp-comm-overlap` for MoE +- FP8 training support + +### Token Dispatch Mechanism +- Dropless / No token drop +- Token drop, with or without padding to capacity +- Token permutation / Unpermutation fusion + +### Ease of use +- Checkpoint converter for Mixtral models, see the [example](https://github.com/NVIDIA/Megatron-LM/tree/main/examples/mixtral) for details. +- MoE Layer Frequency to customize the hybrid MoE/Dense layer architecture +- Distributed checkpoining +- Per-layer logging +- Upcycling Support + +## Upcoming features +- TopK Router Fusion +- Multi-token Prediction + +# User Guide + +## Usage + +### Quick Start +To train a top-2 MoE model with 8 experts and auxiliary loss, include the following arguments: + +```bash +--num-experts 8 +--expert-model-parallel-size 8 +--moe-grouped-gemm +--moe-permute-fusion +--moe-router-load-balancing-type aux_loss # options: aux_loss, sinkhorn, none. Default is aux_loss. +--moe-router-topk 2 +--moe-aux-loss-coeff 1e-2 +--use-distributed-optimizer +--moe-token-dispatcher-type alltoall +``` + +To enable the token drop mechanism, such as GShard and SwitchTransformer, include the following arguments: + +```bash +--moe-expert-capacity-factor 1.0 +--moe-pad-expert-input-to-capacity # Optional +``` + +The following figure illustrates differenting dropping strategies in MCore: + + + +1. The default dropless strategy will not drop or pad any token. +2. By setting `--moe-expert-capacity-factor`, the tokens exceed the capacity of expert will be dropped based on their selected probabilities. + The dropping is performed before the token exchange operation between EP ranks when EP > 1. + The formula of capacity is `capacity = num_tokens_per_rank * topk * capacity_factor / num_experts`. +3. By setting `--moe-pad-expert-input-to-capacity`, the experts with tokens less than capacity will be padded to the capacity. + +### Fine-tuning Mixtral Models +Megatron-Core has full support for Mixtral MoE models, and we provide the checkpoint converter for Mixtral models from huggingface format to MCore format. + + +### Distributed Checkpointing +MCore v0.7 introduced fully parallel and asynchronous saving capabilities to distributed checkpointing, +which addresses the issues of low efficiency in the traditional checkpoint saving methods. +It also solved the problem of incompatibility between checkpoints of different parallel mappings in the traditional format. +With the new distributed checkpointing solution, MCore can achieve flexible parallelism configurations by saving and loading the unified format checkpoints. +Compared to native PyTorch solution, MCore achieves up to 50x reduction in checkpointing overhead. + +From MCore v0.8, MoE supports Distributed Checkpointing, which means users can save and load with any combination of parallelism and it is currently available, including expert parallel. +1. Loading weight and distributed optimizer states with TPxCPxEPxPP resharding with SequentialMLP is supported in version 0.8. +2. GroupedMLP weight resharding is supported in version 0.8.0 and optimizer state resharding is supported in version 0.10.0. Switching between GroupedMLP/SequentialMLP when loading and saving is partially supported. +3. TEGroupedMLP has fully support on distributed checkpointing and is fully exchangable with SequentialMLP in version 0.9.0. +4. Optimizer state resharding cannot do across EP=1 with EP>1 due to the different optimizer type. + +Usage +- `--ckpt-format torch_dist` The main argument, it will attempt to save and load using distributed checkpointing. +- `--auto-detect-ckpt-format` With this, it can load both distributed checkpointing and legacy checkpointing. + +Checkpoint compatibility across SequentialMLP, GroupedMLP, and TEGroupedMLP: +```text + ┌───────────────┐ ┌───────────────┐ ┌───────────────┐ + │ GroupedMLP │ │ SequentialMLP │ │ TEGroupedMLP │ + │ │ │ │ │ │ + │ │ │ │ │ │ + │ ┌───────────┐ │ │ ┌───────────┐ │ │ ┌───────────┐ │ + │ │legacy ckpt│ │ │ │legacy ckpt│ │ │ │legacy ckpt│ │ + │ └─────┬─────┘ │ │ └─────┬─────┘ │ │ └─────┬─────┘ │ + │ ▼ │ │ ▼ │ │ ▼ │ + │ ┌─────────┐ │ │ ┌─────────┐ │ │ ┌─────────┐ │ + │ │dist ckpt│ │ │ │dist ckpt│ │ │ │dist ckpt│ │ +┌──►│ │ weight │ │◄────────►│ │ weight │ │◄────────►│ │ weight │ │◄──┐ +│ │ └─────────┘ │ │ └─────────┘ │ │ └─────────┘ │ │ +└───┼───────────────┼──────────┼───────────────┼──────────┼───────────────┼───┘ + │┌─────────────┐│ │┌─────────────┐│ │┌─────────────┐│ + ││ dist ckpt ││ ││ dist ckpt ││ ││ dist ckpt ││ + ││optim states ││ ││optim states ││◄────────►││optim states ││ + │└─────────────┘│ │└─────────────┘│ │└─────────────┘│ + └───────────────┘ └───────────────┘ └───────────────┘ +``` + +Best practices for distributed checkpointing: +1. Convert a legacy checkpoint to a distributed checkpoint. To achieve this, we can add both `--ckpt-format torch_dist --auto-detect-ckpt-format`, then it will load the legacy one and save as the distributed checkpoint format later when the training progress tries to save checkpoints. +2. Convert checkpoint of the legacy GroupedMLP to TEGroupedMLP. This is only supported for the weight parts. To achieve this, we can use the above method to convert the legacy checkpoint to a distributed checkpoint of the legacy GroupedMLP. After updating the libraries and using TEGroupedMLP, we can directly load the previously saved checkpoint by adding argument `--no-load-optim`. + +### Shared Experts +MCore v0.9 introduced the shared expert feature. We can enable this feature by setting suitable `--moe-shared-expert-intermediate-size`. + +The parallelism patterns of the shared experts follow the settings of the dense part, i.e., the attention module. The shared experts are not distributed but replicated in EP ranks. + +We also have an experimental feature that tries to overlap the communications and computations in the shared experts and the dispatcher. +We can set `--moe-shared-expert-overlap` and use `alltoall` dispatcher to enable it. +The overlapping relies on the envirionment setting `CUDA_DEVICE_MAX_CONNECTIONS=1`. +The `AllGather` and `ReduceScatter` communications in the shared experts are overlapped with `permute`/`unpermute` in the dispatcher. +The `MLP` computation part in the shared experts are overlapped with the `AlltoAll` communications in the dispatcher. +Both the forward and the backward pass can overlap. But to get the overlapping in the backward pass, the PyTorch version should `>= 2.2.0`. + +### Upcycling +Use `--moe-use-upcycling` to enable upcycling, which loads the dense model from the `--load` directory, converts it to an MoE model at runtime, and starts training. The converted model is saved to the `--save` path before training begins. Upcycling is built on distributed checkpointing, supporting parallel modes different from existing dense checkpoints, such as arbitrary expert parallelism during upcycling. + +We currently only support the default upcycling strategy, which duplicates the existing MLP to multiple experts, with each expert starting from a copy of the MLP. In the future, we will support more state-of-the-art upcycling strategies, such as Granular upcycling from [our recent research work](https://arxiv.org/abs/2410.07524). + +Note: The MoE model structure is defined through script arguments. All MoE-related arguments (such as `--num-experts`) can be customized; however, other model structure arguments must be consistent with those of the dense model. + +### Leverage DeepSeek's DeepEP for High-Performance Cross-Node Token Dispatching +- [DeepSeek-DeepEP](https://github.com/deepseek-ai/deepep) provides a highly optimized implementation for MoE token dispatching and combining operations, specifically designed for large-scale MoE training scenarios. +- DeepEP is particularly recommended for training large-scale, fine-grained MoE architectures such as DeepSeek-V3 and other advanced MoE models. +- To enable DeepEP in your training configuration, simply set `--moe-token-dispatcher-type=flex` and `--moe-enable-deepep` in your command line arguments. + +### MoE Related Arguments +| Item | Description | +| --- | --- | +| --num-experts | Number of Experts in MoE (None means no MoE) | +| --expert-model-parallel-size | Degree of expert model parallelism. Default is 1. | +| --moe-ffn-hidden-size | MoE Feed-Forward Network hidden size. Default is None. | + +
+ View all MoE related arguments. + +| Item | Description | +| --- | --- | +| --num-experts | Number of Experts in MoE (None means no MoE) | +| --expert-model-parallel-size | Degree of expert model parallelism. Default is 1. | +| --moe-ffn-hidden-size | MoE Feed-Forward Network hidden size. Default is None. | +| --expert-tensor-parallel-size | Degree of tensor model parallelism of expert layer. Default is same to --tensor-model-parallel-size. | +| --moe-layer-freq | Frequency between MoE layers and Dense layers. Accepts either: 1) An integer N for 1:N ratio (one expert layer for every N-1 dense layers), 2) A string "N" for the same ratio, or 3) A string with Python list expression for custom patterns like `([1]*3+[0]*1)*3` which gives [1,1,1,0,1,1,1,0,1,1,1,0] where 1=expert layer and 0=dense layer. Examples: `([0]+[1]*23)` for 1 dense layer followed by 23 experts layers, `([1]*3+[0]*2)*2` for three expert layers followed by two dense layers, repeated twice. Default is 1. | +| --moe-grouped-gemm | When there are multiple experts per rank, launch multiple local GEMM kernels in multiple streams to improve the utilization and performance with GroupedLinear in TransformerEngine. | +| --moe-router-load-balancing-type | Determines the load balancing strategy for the router. "aux_loss" corresponds to the load balancing loss used in GShard and SwitchTransformer; "seq_aux_loss" corresponds to the load balancing loss used in DeepSeekV2, which computes the loss for each individual sample; "sinkhorn" corresponds to the balancing algorithm used in S-BASE, and "none" implies no load balancing. The default is "aux_loss". | +| --moe-router-topk | Number of experts to route to for each token. The default is 2. | +| --moe-router-score-function | Score function for MoE routing. Can be "softmax" or "sigmoid". Default is "softmax". | +| --moe-router-pre-softmax | Enable pre-softmax routing for MoE, which means softmax is before the top-k selection. By default, softmax is done after top-k. | +| --moe-router-num-groups | Number of groups to divide experts into for group-limited routing. When using group-limited routing: 1) Experts are divided into equal-sized groups, 2) For each token, a subset of groups are selected based on routing scores (sum of top-2 expert scores within each group), 3) From these selected groups, moe_router_topk experts are chosen. Two common use cases: 1) Device-limited routing: Set equal to expert parallel size (EP) to limit each token to experts on a subset of devices (See DeepSeek-V2: https://arxiv.org/pdf/2405.04434) 2) Node-limited routing: Set equal to number of nodes in EP group to limit each token to experts on a subset of nodes (See DeepSeek-V3: https://arxiv.org/pdf/2412.19437)) | +| --moe-router-group-topk | Number of selected groups for group-limited routing. | +| --moe-router-topk-scaling-factor | Scaling factor for routing score in top-k selection, only works when --moe-router-pre-softmax enabled. Defaults to None, which means no scaling. | +| --moe-router-enable-expert-bias | TopK routing with dynamic per-expert bias in the aux-loss-free load balancing strategy. The routing decision is based on the sum of the routing scores and the expert bias. See https://arxiv.org/abs/2408.15664 for details. | +| --moe-router-bias-update-rate | The expert bias is updated based on the number of assigned tokens to each expert in a global batch, where the bias is increased for experts with less assigned tokens and decreased for experts with more assigned tokens. Default is 1e-3 same as that used in DeepSeekV3. | +| --moe-aux-loss-coeff | Scaling coefficient for the aux loss: a starting value of 1e-2 is recommended. Default is 0.0. | +| --moe-z-loss-coeff | Scaling coefficient for the z-loss: a starting value of 1e-3 is recommended. Default is None. | +| --moe-input-jitter-eps | Add noise to the input tensor by applying jitter with a specified epsilon value. Default is None. | +| --moe-token-dispatcher-type | Determines the token dispatcher type. Choices are "allgather", "alltoall" and "alltoall_seq". Default is "allgather". We recommend using 'alltoall' if expert parallelism is applied. We have upgraded the "alltoall" dispatcher in place during MCore v0.9, while retaining the original implementation, renamed as "alltoall_seq".| +| --moe-enable-deepep | (Experimental) Enable DeepSeek/DeepEP for efficient token dispatching and combine in MoE models. Only works with flex token dispatcher by setting --moe-token-dispatcher-type=flex. | +| --moe-per-layer-logging | Enable per-layer logging for MoE, currently supports auxiliary loss and z loss. | +| --moe-expert-capacity-factor | The capacity factor for each expert, None means no token will be dropped. Default is None. | +| --moe-pad-expert-input-to-capacity | Pads the input for each expert to match the expert capacity length, effective only after the --moe-expert-capacity-factor is set. | +| --moe-token-drop-policy | The policy to drop tokens. Can be either "probs" or "position". If "probs", the tokens with the lowest probabilities will be dropped. If "position", tokens at the end of each batch will be dropped. | +| --moe-layer-recompute | Enable activation checkpointing for moe_layer, should be used when memory is not sufficient. | +| --moe-permute-fusion | Fuse token rearrangement ops during token dispatching. | +| --moe-shared-expert-intermediate-size | Set shared expert total ffn hidden size. It should be equal to `num_shared_experts * ffn_size_of_each_shared_expert` if there are multiple shared experts. None means no shared expert. | +| --moe-shared-expert-overlap | (Experimental, may changed) If this is set, the communications/computations in the shared experts and the dispatcher will overlap (The `alltoall` dispatcher is needed.) Otherwise, the shared expert runs after the routed experts. | +| --moe-use-upcycling | Load the dense model checkpoint, convert it into an MoE model at runtime and start training. The converted model will be saved to the path specified by `--save` before training begins. Upcycling is implemented on the top of distributed checkpointing, so it supports parallel modes different from the dense model.| + +
+ +## MoE training example: +
+Click here. + +```bash +#!/bin/bash + +# Runs Mixtral 8x7B model on 32 H100/A100 GPUs +# The Dropless MoE suffers from an imbalanced token distribution at the early stage of training (the first few hundred iterations), which may lead to poor performance and out-of-memory (OOM) issues. +# To check the performance of a Dropless MoE model, we should run the model for at least 500 iterations or resume from trained checkpoints. + +export CUDA_DEVICE_MAX_CONNECTIONS=1 + +GPUS_PER_NODE=8 +# Change for multinode config +MASTER_ADDR=${MASTER_ADDR:-"localhost"} +MASTER_PORT=${MASTER_PORT:-"6000"} +NNODES=${NNODES:-"1"} +NODE_RANK=${RANK:-"0"} +WORLD_SIZE=$(($GPUS_PER_NODE*$NNODES)) + +CHECKPOINT_PATH=$1 +TOKENIZER_MODEL=$2 +DATA_PATH=$3 + +DISTRIBUTED_ARGS=( + --nproc_per_node $GPUS_PER_NODE + --nnodes $NNODES + --node_rank $NODE_RANK + --master_addr $MASTER_ADDR + --master_port $MASTER_PORT +) + +MODEL_ARGS=( + --disable-bias-linear + --seq-length 4096 + --max-position-embeddings 32768 + --num-layers 32 + --hidden-size 4096 + --ffn-hidden-size 14336 + --num-attention-heads 32 + --init-method-std 0.01 + --attention-dropout 0.0 + --hidden-dropout 0.0 + --normalization RMSNorm + --position-embedding-type rope + --swiglu + --untie-embeddings-and-output-weights + --group-query-attention + --num-query-groups 8 + --no-masked-softmax-fusion + --no-position-embedding +) + +MOE_ARGS=( + --num-experts 8 + --expert-model-parallel-size 8 + --moe-router-load-balancing-type aux_loss # options: aux_loss, sinkhorn, None. Default is aux_loss. + --moe-router-topk 2 + --moe-aux-loss-coeff 1e-2 + --moe-grouped-gemm + --moe-permute-fusion +) + +DATA_ARGS=( + --tokenizer-type Llama2Tokenizer + --tokenizer-model ${TOKENIZER_MODEL} + --data-path $DATA_PATH + --split 99990,8,2 +) + +TRAINING_ARGS=( + --micro-batch-size 1 + --global-batch-size 128 + --lr 1e-4 + --train-iters 500000 + --lr-decay-iters 320000 + --lr-decay-style cosine + --min-lr 1.0e-5 + --weight-decay 0.1 + --lr-warmup-iters 500 + --clip-grad 1.0 + --bf16 + --overlap-grad-reduce + --overlap-param-gather +) + +MODEL_PARALLEL_ARGS=( + --tensor-model-parallel-size 1 + --pipeline-model-parallel-size 4 + --num-layers-per-virtual-pipeline-stage 8 + --sequence-parallel + --use-distributed-optimizer +) + +LOGGING_ARGS=( + --log-interval 1 \ + --save-interval 10000 \ + --eval-interval 1000 \ + --eval-iters 10 \ + --save $CHECKPOINT_PATH \ + --load $CHECKPOINT_PATH \ + --tensorboard-dir "${CHECKPOINT_PATH}/tensorboard" \ + --no-load-optim \ + --no-load-rng +) + +if [ -n "${WANDB_API_KEY}" ]; then + LOGGING_ARGS+=( + --wandb-project ${WANDB_PROJECT:-"Mixtral-Finetuning"} + --wandb-exp-name ${WANDB_NAME:-"Mixtral_8x7B"} + ) +fi + +torchrun ${DISTRIBUTED_ARGS[@]} pretrain_gpt.py \ + ${MODEL_ARGS[@]} \ + ${MOE_ARGS[@]} \ + ${DATA_ARGS[@]} \ + ${TRAINING_ARGS[@]} \ + ${MODEL_PARALLEL_ARGS[@]} \ + ${LOGGING_ARGS[@]} +``` +
+ +# Performance Best Practice + +### Tuning Guide of Parallel Mappings + +To find a good parallel mapping that help you achieve a high throughput of a new model, there are some general rule that could help. Here is an overview of properties in different aspects for each parallel strategy. + +| Parallel Strategy | Peak Activation Memory | Weight Memory | Optimizer states | Communication (Per-Layer) | +|:-----------------:|:-------------------------------:|:--------------:|:---------------------------------:|:-------------------------:| +| TP | 1/N (with SP on) | 1/N | 1/N | High | +| EP | 1 | 1/N in MoELayer| 1/N | Medium | +| PP | 1 (>1 with virtual pipeline) | 1/N | 1/N | Medium | +| CP | 1/N | 1 | 1/N (with distributed optimizer) | Medium | +| DP | 1 | 1 | 1/N (with distributed optimizer) | Low | + +For a specific model, the best parallel mapping varies based on the model architecture, trained sequence length and the hardware platform. +Here we provide some general rules to get better performance: +1. Keep the model parallism size as small as possible. + - For the large language models, model parallism is often required to prevent OOM, but it will bring communication overhead and hurt performance. + - With distributed optimizer, master weights and optimizer states will be sharded across all DP ranks with slight communication overhead. + So try to reduce the model parallism size and increase data parallism size when there are lots of free GPU memory during training. +2. Ensure the EPxTP communication winthin the NVLink domain. + - Communications of EP and TP should remain within the NVLink domain as much as possible, as both are communication-intensive. + - If the model is too large and requires scaling across multiple nodes, consider PP before TP and EP. See item 3 for details. +3. Use Pipeline Parallelism to scale the model further. + - Enable Virtual Pipeline Parallelism(VPP) to reduce pp bubbles when PP_size >= 2 by setting `num_layers_per_virtual_pipeline_stage`. + - VPP_size tuning: the legal values of vpp_size are all common divisors of num_layers/pp_size, E.g., num_layers=24, pp_size=4, then we can pick vpp_size from {1, 2, 3, 6}. The larger the vpp_size, the lower the pipeline bubbles, while the larger number of P2P communications between each PP stages. Empirically a value in the middle often gives the best trade-off. `VPP_size=num_layers / PP_size / num_layers_per_virtual_pipeline_stage` +4. Prefer EP over TP for the expert layer when possible: + - TP saves more memory than EP, but EP can achieve better GEMM efficiency and less communication overhead than TP. + - If EP size increased to the number of expert, the local token permutation/un-permutation for experts computation are omitted. + - Simplify the computation graph of MoE layers, more convenient for performing potential comm-computation overlapping. + - In practice, EP8TP1 is better than EP4TP2 for 8x7B. +5. Enable Context Parallelism for long context training. + - The efficiency of CP largely depends on whether its communication can be overlapped with computation. + - Empirically, use CP when sequence length >= 8K. + +### MoE Parallel Folding + +MoE Parallel Folding separates the MoE related parallel groups from Dense groups. +1. Traditional MoE parallel groups are entangled with dense by using a 5-dimension parallel group generator with default order `tp-cp-ep-dp-pp`. The EP group in MoE is a sub-group of DP in Attention. +2. With MoE Parallel Folding, we use a parallel group generator with `tp-cp-dp-pp` for Attention, and another with `tp-ep-dp-pp` for MoE. The EPxTP group in MoE is a sub-group of DPxCPxTP in Attention. + +By setting `--expert-tensor-parallel-size`, we can set MoE-specific TP size. + +#### Advantages of MoE Parallel Folding +1. The CP and EP group are folded together by defualt, such that: + 1. It reduces the minimal required GPUs to turn on both CP and EP. For example, the traditional way with (CP=8, EP=8) needs at least 64 GPUs, for now it only requires 8 GPUs. + 2. The CP and EP communication can be both put in the NVLink domain. +2. We can set different TP sizes for Attention and MoE part. + 1. For MoE, EP is often more efficient than TP. But in the traditional way, only using EP can get OOM for most models. + 2. With MoE parallel folding, we can turn on TP for Attention part and setting TP=1 for MoE models, which often gets better MFU. + +### End-to-End Training Practice +**Use the latest NVIDIA PyTorch or NeMo Docker Image** +- [NGC PyTorch Image](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/pytorch) +- [NGC NeMo Image](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/nemo) + +**Token Dispatcher Choices** +- Token Dispatcher sends tokens to the designated expert, involves tensor rearangement and communications. +- Dispatcher `allgather` is the default option. It achieves better performance and efficiency when only tensor parallelism is used or when the Top-k value is very large. +- Dispatcher `alltoall` is recommended if expert parallelism is applied. +- Dispatcher `alltoall_seq` is the original implementation of `alltoall` and is retained for potential compatibility risk. +- Dispatcher `flex` is a new dispatcher decouples communication group from model parallelism. Currently, only the DeepEP backend is supported for by setting `--moe-enable-deepep`. + +**Enable Communication Overlap** +- Enable `--overlap-param-gather` and `--overlap-grad-reduce` with distributed optimizer. +- Enable `--tp-comm-overlap` when TP>1. +- Enable p2p comm overlap when PP > 1 by setting `num_layers_per_virtual_pipeline_stage`. + +**Enable GroupedGEMM when num_local_experts>1 with `--moe-grouped-gemm`** +- GroupedGEMM has higher efficiency than vanilla sequential GEMMs for each expert. +- Recommend to use the TE version of Grouped GEMM (by upgrading to MCore v0.8 and TE v1.9), which support Gradient Accumulation Fusion and FP8 Training. + +**OOM Caused by Token Distribution Imbalance when Training From Scratch** +MoE suffers from a severe load imbalance issue when the router is under-trained, leading to the model easily running out of memory (OOM), which typically occurs in the first 100~300 steps when training from scratch. +Therefore, there are two recommended ways during the first 200 steps to avoid the OOM problem, which can be removed after the token distribution is more stable: +1. Increase the `expert-tensor-parallel-size` and decrease `expert-model-parallel-size` to replace EP with TP in MoELayer, this can prevent the load imbalancing between EP ranks. Since current ETP implementation has some memeory overhead, you can further enable activation recomputation only for MoE Layer by adding `--moe-layer-recompute`. +2. Setting capacity factor to a relatively small number like 1.0 by adding `--moe-token-capacity-factor 1.0`. + +**Leverage DeepSeek's DeepEP for High-Performance Cross-Node Token Dispatching** +- The primary advantage of DeepEP is its cross-node token communication efficiency, which delivers substantial performance improvements when deploying expert parallelism across multiple nodes with large TopK values. +- To enable DeepEP in your training configuration, simply set `--moe-token-dispatcher-type=flex` and `--moe-enable-deepep` in your command line arguments. + +### Reference Best Parallel Mapping + +Here are the reference parallel mappings of MCore v0.8 for Mixtral 8x7B and 8x22B models: +| Model | Vocab Size| Dispatcher | Precision | #GPUs | SEQ LEN | TP | EP | PP | VP | MBS | GBS | +|:-----------------------:|:---------:|:----------:|:---------:|:-----:|:-------:|:--:|:--:|:--:|:--:|:---:|:---:| +| Mixtral 8x7B(Dropless) | 32K | All-to-All | BF16 | 64 | 4096 | 1 | 8 | 4 | 8 | 1 | 256 | +| Mixtral 8x22B(Dropless) | 32K | All-to-All | BF16 | 128 | 4096 | 4 | 2 | 8 | 7 | 1 | 256 | + +Detailed Benchmark Information: +Server: +- 8xH100 80GB HBM3 +- NVLink 4th Generation +- InfiniBand 8x400 Gbit/s + +Docker Image: - PyTorch 24.09 with TransformerEngine v1.11 \ No newline at end of file diff --git a/megatron/core/transformer/moe/experts.py b/megatron/core/transformer/moe/experts.py index dbb2590..811ef78 100644 --- a/megatron/core/transformer/moe/experts.py +++ b/megatron/core/transformer/moe/experts.py @@ -1,853 +1,889 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -import itertools -from copy import deepcopy -from functools import partial, wraps -from math import ceil -from typing import Optional, Tuple - -import torch -import torch.nn.functional as F -from torch.nn.parameter import Parameter - -from megatron.core import parallel_state -from megatron.core.dist_checkpointing import ShardedTensor -from megatron.core.dist_checkpointing.mapping import ( - LocalNonpersistentObject, - ReplicaId, - ShardedStateDict, - ShardedTensorFactory, -) -from megatron.core.dist_checkpointing.utils import replace_prefix_for_sharding -from megatron.core.fusions.fused_bias_geglu import bias_geglu_impl -from megatron.core.fusions.fused_bias_gelu import bias_gelu_impl -from megatron.core.fusions.fused_bias_swiglu import bias_swiglu_impl -from megatron.core.jit import jit_fuser -from megatron.core.tensor_parallel.layers import ( - _initialize_affine_weight_cpu, - _initialize_affine_weight_gpu, -) -from megatron.core.tensor_parallel.utils import divide -from megatron.core.transformer.mlp import MLP, MLPSubmodules, apply_swiglu_sharded_factory -from megatron.core.transformer.module import MegatronModule -from megatron.core.transformer.moe import grouped_gemm_util as gg -from megatron.core.transformer.spec_utils import build_module -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.transformer.utils import make_sharded_object_for_checkpoint - -try: - - from megatron.core.extensions.transformer_engine import Fp8Padding, Fp8Unpadding - - HAVE_TE = True - -except ImportError: - - HAVE_TE = False - - -def expert_dist_ckpt_decorator(func): - """Decorator of shared_state_dict in expert layer for distributed checkpoint. - - Since !1940, the TP size for Expert layer can be different with Attention. - To make distributed checkpoint work in such cases, we use a decorator to - replace the default TP parallel states with expert-TP parallel states. - """ - - @wraps(func) - def wrapper(*args, **kwargs): - # Store original states - original_rank = parallel_state._MPU_TENSOR_MODEL_PARALLEL_RANK - original_size = parallel_state._MPU_TENSOR_MODEL_PARALLEL_WORLD_SIZE - original_group = parallel_state._TENSOR_MODEL_PARALLEL_GROUP - try: - # Set new states - parallel_state._MPU_TENSOR_MODEL_PARALLEL_RANK = ( - parallel_state.get_expert_tensor_parallel_rank() - ) - parallel_state._MPU_TENSOR_MODEL_PARALLEL_WORLD_SIZE = ( - parallel_state.get_expert_tensor_parallel_world_size() - ) - parallel_state._TENSOR_MODEL_PARALLEL_GROUP = ( - parallel_state.get_expert_tensor_parallel_group() - ) - - # Execute the function - result = func(*args, **kwargs) - finally: - # Restore original states - parallel_state._MPU_TENSOR_MODEL_PARALLEL_RANK = original_rank - parallel_state._MPU_TENSOR_MODEL_PARALLEL_WORLD_SIZE = original_size - parallel_state._TENSOR_MODEL_PARALLEL_GROUP = original_group - return result - - return wrapper - - -class GroupedMLP(MegatronModule): - """An efficient implementation of the Experts layer using GroupedGEMM. - - Executes multiple experts in parallel to maximize computational efficiency. - """ - - def __init__(self, num_local_experts: int, config: TransformerConfig): - super().__init__(config=config) - self.config: TransformerConfig = config - self.num_local_experts = num_local_experts - gg.assert_grouped_gemm_is_available() - assert ( - config.add_bias_linear == False - ), "bias not supported in Grouped GEMM yet, please set '--disable-bias-linear' instead." - - self.expert_parallel = config.expert_model_parallel_size > 1 - if self.config.gated_linear_unit: - if self.config.activation_func not in (F.silu, F.gelu): - raise ValueError("Activation function must be silu or gelu when using GroupedMLP.") - - @jit_fuser - def glu(x): - x = torch.chunk(x, 2, dim=-1) - return self.config.activation_func(x[0]) * x[1] - - self.activation_func = glu - else: - self.activation_func = self.config.activation_func - - # How many feature each rank holds for fc1 and fc2, respectively. - tp_size = parallel_state.get_expert_tensor_parallel_world_size() - tp_rank = parallel_state.get_expert_tensor_parallel_rank() - - fc1_output_size = self.config.moe_ffn_hidden_size * self.num_local_experts - if config.gated_linear_unit: - # Project to 4h. If using swiglu double the output width, - # see https://arxiv.org/pdf/2002.05202.pdf - fc1_output_size *= 2 - fc1_output_size_per_partition = divide(fc1_output_size, tp_size) - - fc2_input_size = self.config.moe_ffn_hidden_size * self.num_local_experts - fc2_input_size_per_partition = divide(fc2_input_size, tp_size) - - # Note: The current kernel implementations of grouped_gemm - # does not support transposition with CUTLASS grouped GEMM - # (https://github.com/fanshiqing/grouped_gemm/blob/main/csrc/grouped_gemm.cu#L355-L358) - # and as a result we avoid allocate the transpose of weights. - # Initialize weight. - if config.use_cpu_initialization: - self.weight1 = Parameter( - torch.empty( - self.config.hidden_size, - fc1_output_size_per_partition, - dtype=config.params_dtype, - ) - ) - self.weight2 = Parameter( - torch.empty( - fc2_input_size_per_partition, self.config.hidden_size, dtype=config.params_dtype - ) - ) - if config.perform_initialization: - _initialize_affine_weight_cpu( - self.weight1, - self.config.hidden_size, - fc1_output_size, - fc1_output_size_per_partition, - partition_dim=1, - init_method=config.init_method, - params_dtype=config.params_dtype, - rank=tp_rank, - world_size=tp_size, - ) - _initialize_affine_weight_cpu( - self.weight2, - fc2_input_size, - self.config.hidden_size, - fc2_input_size_per_partition, - partition_dim=0, - init_method=config.output_layer_init_method, - params_dtype=config.params_dtype, - rank=tp_rank, - world_size=tp_size, - ) - else: - self.weight1 = Parameter( - torch.empty( - self.config.hidden_size, - fc1_output_size_per_partition, - device=torch.cuda.current_device(), - dtype=config.params_dtype, - ) - ) - self.weight2 = Parameter( - torch.empty( - fc2_input_size_per_partition, - self.config.hidden_size, - device=torch.cuda.current_device(), - dtype=config.params_dtype, - ) - ) - if config.perform_initialization: - _initialize_affine_weight_gpu( - self.weight1, config.init_method, partition_dim=1, is_expert=True - ) - _initialize_affine_weight_gpu( - self.weight2, config.output_layer_init_method, partition_dim=0, is_expert=True - ) - setattr(self.weight1, 'allreduce', not self.expert_parallel) - setattr(self.weight2, 'allreduce', not self.expert_parallel) - - def remove_extra_states_check(self, incompatible_keys): - """ - Remove _extra_state from unexpected keys. - These keys are for dist ckpt compatibility with SequentialMLP. - """ - keys = deepcopy(incompatible_keys.unexpected_keys) - for key in keys: - if '_extra_state' in key: - incompatible_keys.unexpected_keys.remove(key) - - self.register_load_state_dict_post_hook(remove_extra_states_check) - - def forward(self, permuted_local_hidden_states: torch.Tensor, tokens_per_expert: torch.Tensor): - """Forward step of the GroupedMLP.""" - if permuted_local_hidden_states.nelement() != 0: - # Reshape the weights for the grouped GEMMs. - w1 = self.weight1.view(self.num_local_experts, self.config.hidden_size, -1) - w2 = self.weight2.view(self.num_local_experts, -1, self.config.hidden_size) - - fc1_output = gg.ops.gmm( - permuted_local_hidden_states, w1, tokens_per_expert, trans_b=False - ) - - intermediate_parallel = self.activation_func(fc1_output) - - fc2_output = gg.ops.gmm(intermediate_parallel, w2, tokens_per_expert, trans_b=False) - else: - # No token is allocated for local experts. - assert torch.count_nonzero(tokens_per_expert) == 0 - - # Make sure params of experts still have gradients even given zero tokens. - w1 = self.weight1.view(self.config.hidden_size, -1) - w2 = self.weight2.view(-1, self.config.hidden_size) - h = torch.matmul(permuted_local_hidden_states, w1) - h = self.activation_func(h) - h = torch.matmul(h, w2) - - fc2_output = h - - return fc2_output, None - - @expert_dist_ckpt_decorator - def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): - """ - Maps local expert to global experts. - The sharded_state_dict for the weight parts are compatible with the SequentialMLP, - whereas the optimizer states are not due to the limitation from weight transposing. - That is, for finetuning scenario, the checkpoint is compatible with the SequentialMLP. - """ - sharded_state_dict = {} - num_global_experts = ( - parallel_state.get_expert_model_parallel_world_size() * self.num_local_experts - ) - local_expert_indices_offset = ( - parallel_state.get_expert_model_parallel_rank() * self.num_local_experts - ) - tp_size = parallel_state.get_tensor_model_parallel_world_size() - tp_rank = parallel_state.get_tensor_model_parallel_rank() - - prepend_axis_num = len(sharded_offsets) - replica_id = (0, 0, parallel_state.get_expert_data_parallel_rank()) - - local_ffn_dim_size = ( - self.weight2.numel() // self.num_local_experts // self.config.hidden_size - ) - - @torch.no_grad() - def sh_ten_build_fn( - key: str, - t: torch.Tensor, - replica_id: ReplicaId, - flattened_range: Optional[slice], - tp_axis: int, - with_glu: bool, - ): - # TODO: write a generic implementation to cover both cases with and without GLU - if tp_axis == 1: - # weight1 - if with_glu: - last_dim_size = local_ffn_dim_size * 2 - else: - last_dim_size = local_ffn_dim_size - real_shape = (self.num_local_experts, self.config.hidden_size, last_dim_size) - elif tp_axis == 0: - # weight2 - real_shape = (self.num_local_experts, local_ffn_dim_size, self.config.hidden_size) - assert with_glu == False - else: - raise ValueError("tp_axis should be 0 or 1.") - if flattened_range is None: - # weights - t = t.view(real_shape).transpose(-1, -2) - # change tp_axis due to the transposing - tp_axis = 1 - tp_axis - if with_glu: - local_tensors = torch.chunk(t, 2, -2) - sub_states = [ - ShardedTensor.from_rank_offsets( - key, - local_tensors[0].contiguous(), - *sharded_offsets, - ( - prepend_axis_num, - parallel_state.get_expert_model_parallel_rank(), - parallel_state.get_expert_model_parallel_world_size(), - ), - (prepend_axis_num + 1, tp_rank, tp_size * 2), - replica_id=replica_id, - prepend_axis_num=prepend_axis_num, - ), - ShardedTensor.from_rank_offsets( - key, - local_tensors[1].contiguous(), - *sharded_offsets, - ( - prepend_axis_num, - parallel_state.get_expert_model_parallel_rank(), - parallel_state.get_expert_model_parallel_world_size(), - ), - (prepend_axis_num + 1, tp_size + tp_rank, tp_size * 2), - replica_id=replica_id, - prepend_axis_num=prepend_axis_num, - ), - ] - else: - sub_states = ShardedTensor.from_rank_offsets( - key, - t.contiguous(), - *sharded_offsets, - ( - prepend_axis_num, - parallel_state.get_expert_model_parallel_rank(), - parallel_state.get_expert_model_parallel_world_size(), - ), - (prepend_axis_num + 1 + tp_axis, tp_rank, tp_size), - replica_id=replica_id, - prepend_axis_num=prepend_axis_num, - ) - else: - # flattened optmizer states - # the non-flattened weight shape is [local_expert_num, hidden_size, ffn_size] - # - # For the case without GLU, it is straightforward, we just need to split each - # expert along the dim-0. - # - # For the case with GLU, we need to split the experts along dim-0 and split the - # two tensors for GLU along dim-2. - # To split along the non-first dim, we need to chunk the tensor into small pieces, - # since they belong to different tenors and are interleaved in the flattened space. - # Refer to the below sketch graph. - # |................| |........|........| - # |............FFFF| |........|....BBBB| - # |FFFFFFFFFFFFFFFF| -> |AAAAAAAA|BBBBBBBB| - # |FFFFFFFFFFFFFFFF| |AAAAAAAA|BBBBBBBB| - # |FF..............| |AA......|........| - # |................| |........|........| - # - # But too many chunks have severe performance issues. We merge these chunks during - # the save process along with some length information and recover them during the - # load process. - assert t.ndim == 1, (key, t.shape) - if with_glu: - non_flat_local_shape = (1, self.config.hidden_size, local_ffn_dim_size) - chunk_numel = local_ffn_dim_size - sub_states = [] - start_pos = 0 - for local_expert_idx in range(self.num_local_experts): - first_glu_idx = -1 - w_start_range = -1 - v_start_range = -1 - w_tensors = [] - v_tensors = [] - w_lens = [] - v_lens = [] - for input_dim_idx in range(self.config.hidden_size): - for glu_idx in range(2): - local_idx = ( - local_expert_idx * self.config.hidden_size * 2 - + input_dim_idx * 2 - + glu_idx - ) - if ( - flattened_range.start < chunk_numel * (local_idx + 1) - and flattened_range.stop > chunk_numel * local_idx - ): - if first_glu_idx == -1: - first_glu_idx = glu_idx - end_pos = min( - flattened_range.stop, - chunk_numel * (local_idx + 1) - flattened_range.start, - ) - local_tensor = t[start_pos:end_pos] - local_flattened_range = slice( - max(0, flattened_range.start - chunk_numel * local_idx), - min( - chunk_numel, - flattened_range.stop - chunk_numel * local_idx, - ), - ) - assert ( - len(local_tensor) - == local_flattened_range.stop - local_flattened_range.start - ) - start_pos += len(local_tensor) - expert_global_idx = ( - local_expert_indices_offset + local_expert_idx - ) - if glu_idx == 0: - w_tensors.append(local_tensor) - w_lens.append(len(local_tensor)) - if w_start_range == -1: - w_start_range = max( - 0, flattened_range.start - chunk_numel * local_idx - ) - else: - v_tensors.append(local_tensor) - v_lens.append(len(local_tensor)) - if v_start_range == -1: - v_start_range = max( - 0, flattened_range.start - chunk_numel * local_idx - ) - sub_states.append( - { - 'w_tensors': ShardedTensor.from_rank_offsets_flat( - key, - ( - torch.cat(w_tensors, -1) - if len(w_tensors) > 0 - else torch.Tensor() - ), - non_flat_local_shape, - *sharded_offsets, - (prepend_axis_num, expert_global_idx, num_global_experts), - (prepend_axis_num + 1 + tp_axis, tp_rank, tp_size * 2), - replica_id=replica_id, - prepend_axis_num=prepend_axis_num, - flattened_range=slice( - w_start_range, w_start_range + sum(w_lens) - ), - ), - 'w_lens': LocalNonpersistentObject(w_lens), - 'v_tensors': ShardedTensor.from_rank_offsets_flat( - key, - ( - torch.cat(v_tensors, -1) - if len(v_tensors) > 0 - else torch.Tensor() - ), - non_flat_local_shape, - *sharded_offsets, - (prepend_axis_num, expert_global_idx, num_global_experts), - ( - prepend_axis_num + 1 + tp_axis, - tp_rank + tp_size, - tp_size * 2, - ), - replica_id=replica_id, - prepend_axis_num=prepend_axis_num, - flattened_range=slice( - v_start_range, v_start_range + sum(v_lens) - ), - ), - 'v_lens': LocalNonpersistentObject(v_lens), - 'first_glu_idx': LocalNonpersistentObject(first_glu_idx), - } - ) - else: - non_flat_local_shape = ( - real_shape[0] // self.num_local_experts, - *real_shape[1:], - ) - chunk_numel = local_ffn_dim_size * self.config.hidden_size - sub_states = [] - start_pos = 0 - for local_expert_idx in range(self.num_local_experts): - if ( - flattened_range.start < chunk_numel * (local_expert_idx + 1) - and flattened_range.stop > chunk_numel * local_expert_idx - ): - end_pos = min( - flattened_range.stop, - chunk_numel * (local_expert_idx + 1) - flattened_range.start, - ) - local_tensor = t[start_pos:end_pos] - local_flattened_range = slice( - max(0, flattened_range.start - chunk_numel * local_expert_idx), - min( - chunk_numel, - flattened_range.stop - chunk_numel * local_expert_idx, - ), - ) - assert ( - len(local_tensor) - == local_flattened_range.stop - local_flattened_range.start - ) - start_pos += len(local_tensor) - expert_global_idx = local_expert_indices_offset + local_expert_idx - sub_states.append( - ShardedTensor.from_rank_offsets_flat( - key, - local_tensor, - non_flat_local_shape, - *sharded_offsets, - (prepend_axis_num, expert_global_idx, num_global_experts), - (prepend_axis_num + 1 + tp_axis, tp_rank, tp_size), - replica_id=replica_id, - prepend_axis_num=prepend_axis_num, - flattened_range=local_flattened_range, - ) - ) - return sub_states - - @torch.no_grad() - def sh_ten_merge_fn(sub_state_dict, tp_axis: int, with_glu: bool): - if tp_axis == 1: - # weight1 - weight_shape = (self.config.hidden_size, -1) - elif tp_axis == 0: - # weight2 - weight_shape = (-1, self.config.hidden_size) - assert with_glu == False - else: - raise ValueError("tp_axis should be 0 or 1.") - if isinstance(sub_state_dict, list) and isinstance(sub_state_dict[0], dict): - # flattened tensor with glu - res = [] - for local_expert_dict in sub_state_dict: - w_tensors = torch.split( - local_expert_dict['w_tensors'], local_expert_dict['w_lens'] - ) - v_tensors = torch.split( - local_expert_dict['v_tensors'], local_expert_dict['v_lens'] - ) - first_glu_idx = local_expert_dict['first_glu_idx'] - if first_glu_idx == 0: - res += [ - x for x in itertools.chain(*itertools.zip_longest(w_tensors, v_tensors)) - ] - else: - res += [ - x for x in itertools.chain(*itertools.zip_longest(v_tensors, w_tensors)) - ] - return torch.cat(res) - elif isinstance(sub_state_dict, list) and sub_state_dict[0].ndim == 1: - # flattened tensor without glu - return torch.cat(sub_state_dict) - else: - if with_glu: - sub_state_dict = torch.cat(sub_state_dict, -2) - return sub_state_dict.transpose(-1, -2).reshape(weight_shape) - - state_dict = self.state_dict(prefix='', keep_vars=True) - for name, tensor in state_dict.items(): - if name == 'weight1': - tp_axis = 1 - with_glu = self.config.gated_linear_unit - wkey = f'{prefix}experts.linear_fc1.weight' - else: - tp_axis = 0 - with_glu = False - wkey = f'{prefix}experts.linear_fc2.weight' - sharded_state_dict[f'{prefix}{name}'] = ShardedTensorFactory( - wkey, - tensor, - partial(sh_ten_build_fn, tp_axis=tp_axis, with_glu=with_glu), - partial(sh_ten_merge_fn, tp_axis=tp_axis, with_glu=with_glu), - replica_id, - ) - - replica_id = ( - 0, - parallel_state.get_tensor_model_parallel_rank(), - parallel_state.get_expert_data_parallel_rank(), - ) - # Add fake _extra_state to be compatible with SequentialMLP - for expert_local_idx in range(self.num_local_experts): - expert_global_idx = local_expert_indices_offset + expert_local_idx - expert_sharded_offsets = ( - *sharded_offsets, - (len(sharded_offsets), expert_global_idx, num_global_experts), - ) - for mod in ['linear_fc1', 'linear_fc2']: - sharded_state_dict[f'{prefix}expert{expert_global_idx}.{mod}._extra_state'] = ( - make_sharded_object_for_checkpoint( - None, - f'{prefix}experts.{mod}._extra_state', - expert_sharded_offsets, - replica_id, - ) - ) - - return sharded_state_dict - - -class TEGroupedMLP(MegatronModule): - """An efficient implementation of the Experts layer using TE's GroupedLinear. - - Executes multiple experts in parallel to maximize computational efficiency. - """ - - def __init__(self, num_local_experts, config: TransformerConfig, submodules: MLPSubmodules): - super().__init__(config=config) - self.num_local_experts = num_local_experts - self.input_size = self.config.hidden_size - - # Double the output width with gated linear unit, see https://arxiv.org/pdf/2002.05202.pdf - ffn_hidden_size = self.config.moe_ffn_hidden_size - if self.config.gated_linear_unit: - ffn_hidden_size *= 2 - - self.linear_fc1 = build_module( - submodules.linear_fc1, - self.num_local_experts, - self.input_size, - ffn_hidden_size, - config=self.config, - init_method=self.config.init_method, - bias=self.config.add_bias_linear, - skip_bias_add=True, - is_expert=True, - tp_comm_buffer_name='fc1', - ) - - self.activation_func = self.config.activation_func - - self.linear_fc2 = build_module( - submodules.linear_fc2, - self.num_local_experts, - self.config.moe_ffn_hidden_size, - self.config.hidden_size, - config=self.config, - init_method=self.config.output_layer_init_method, - bias=self.config.add_bias_linear, - skip_bias_add=True, - is_expert=True, - tp_comm_buffer_name='fc2', - ) - - if self.config.fp8: - assert HAVE_TE, "FP8 requires TE." - self.fp8_padding = Fp8Padding(self.num_local_experts) - self.fp8_unpadding = Fp8Unpadding(self.num_local_experts) - - def forward( - self, permuted_local_hidden_states: torch.Tensor, tokens_per_expert: torch.Tensor - ) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: - """Forward of TEGroupedMLP - - Args: - permuted_local_hidden_states (torch.Tensor): The permuted input hidden states of the - local experts. - tokens_per_expert (torch.Tensor): The number of tokens per expert. - - Return: - output (torch.Tensor): The output of the local experts. - """ - tokens_per_expert = tokens_per_expert.tolist() - if self.config.fp8: - actual_tokens_per_expert = tokens_per_expert - permuted_local_hidden_states, tokens_per_expert = self.fp8_padding( - permuted_local_hidden_states, tokens_per_expert - ) - - intermediate_parallel, bias_parallel = self.linear_fc1( - permuted_local_hidden_states, tokens_per_expert - ) - - if self.config.bias_activation_fusion: - if self.activation_func == F.gelu: - if self.config.gated_linear_unit: - intermediate_parallel = bias_geglu_impl(intermediate_parallel, bias_parallel) - else: - assert self.config.add_bias_linear is True - intermediate_parallel = bias_gelu_impl(intermediate_parallel, bias_parallel) - elif self.activation_func == F.silu and self.config.gated_linear_unit: - intermediate_parallel = bias_swiglu_impl( - intermediate_parallel, - bias_parallel, - self.config.activation_func_fp8_input_store, - ) - else: - raise ValueError("Only support fusion of gelu and swiglu") - else: - if bias_parallel is not None: - shape = intermediate_parallel.shape - intermediate_parallel = torch.cat( - [ - t + b - for t, b in zip( - torch.split( - intermediate_parallel.view(-1, shape[-1]), tokens_per_expert - ), - bias_parallel, - ) - ] - ).view(shape) - if self.config.gated_linear_unit: - - def glu(x): - x = torch.chunk(x, 2, dim=-1) - return self.config.activation_func(x[0]) * x[1] - - intermediate_parallel = glu(intermediate_parallel) - else: - intermediate_parallel = self.activation_func(intermediate_parallel) - - output, output_bias = self.linear_fc2(intermediate_parallel, tokens_per_expert) - - # upad and concat the output - if self.config.fp8: - output = self.fp8_unpadding(output, actual_tokens_per_expert) - - return output, output_bias - - @expert_dist_ckpt_decorator - def sharded_state_dict( - self, prefix: str = '', sharded_offsets: tuple = (), metadata: Optional[dict] = None - ) -> ShardedStateDict: - """ - Maps local expert to global experts. - The sharded state dict is interchangable with SequentialMLP's. - """ - sharded_state_dict = {} - for name, module in self._modules.items(): - sub_sd = module.sharded_state_dict(f'{name}.', sharded_offsets, metadata) - if name == 'linear_fc1' and self.config.gated_linear_unit: - num_global_experts = ( - parallel_state.get_expert_model_parallel_world_size() * self.num_local_experts - ) - local_expert_indices_offset = ( - parallel_state.get_expert_model_parallel_rank() * self.num_local_experts - ) - ep_axis = len(sharded_offsets) - for i in range(self.num_local_experts): - new_sharded_offsets = ( - *sharded_offsets, - (ep_axis, local_expert_indices_offset + i, num_global_experts), - ) - for k in (f'{name}.weight{i}', f'{name}.bias{i}'): - if k in sub_sd: - sub_sd[k] = apply_swiglu_sharded_factory(sub_sd[k], new_sharded_offsets) - # Add prefix here to match sequential's keys - replace_prefix_for_sharding(sub_sd, f'{name}.', f'{prefix}experts.{name}.') - sharded_state_dict.update({f"{prefix}{k}": v for k, v in sub_sd.items()}) - return sharded_state_dict - - -class SequentialMLP(MegatronModule): - """An implementation of the Experts layer using a sequence of MLP layers. - - This class executes each expert sequentially. - """ - - def __init__(self, num_local_experts, config: TransformerConfig, submodules: MLPSubmodules): - super().__init__(config=config) - self.add_bias = config.add_bias_linear - self.num_local_experts = num_local_experts - self.local_experts = torch.nn.ModuleList() - - assert ( - self.config.moe_ffn_hidden_size == self.config.ffn_hidden_size - ), "Please use GroupedMLP or TEGroupedMLP when moe_ffn_hidden_size is \ - different from ffn_hidden_size" - for _ in range(self.num_local_experts): - expert = MLP(self.config, submodules, is_expert=True) - self.local_experts.append(expert) - - def _pad_tensor_for_fp8(self, hidden): - """Padding tensor shape to multiples of 16.""" - actual_num_tokens = hidden.shape[0] - divisor = 16 - padded_num_tokens = ceil(actual_num_tokens / divisor) * divisor - actual_num_tokens - if padded_num_tokens > 0: - pad_tensor = torch.zeros( - padded_num_tokens, hidden.shape[1], dtype=hidden.dtype, device=hidden.device - ) - hidden = torch.cat((hidden, pad_tensor), dim=0) - return hidden - - def forward(self, permuted_local_hidden_states: torch.Tensor, tokens_per_expert: torch.Tensor): - """Forward step of the SequentialMLP.""" - if self.num_local_experts == 1: - if self.config.fp8: - hidden = self._pad_tensor_for_fp8(permuted_local_hidden_states) - output, output_bias = self.local_experts[0](hidden) - output = output[: permuted_local_hidden_states.shape[0]] - else: - output, output_bias = self.local_experts[0](permuted_local_hidden_states) - - return output, output_bias - else: - tokens_per_expert = tokens_per_expert.tolist() - tokens_list = torch.split(permuted_local_hidden_states, tokens_per_expert) - - output_local_list = [] - output_bias_list = [] - - for expert, tokens in zip(self.local_experts, tokens_list): - if self.config.fp8: - hidden = self._pad_tensor_for_fp8(tokens) - output, output_bias = expert(hidden) - output = output[: tokens.shape[0]] - else: - output, output_bias = expert(tokens) - output_local_list.append(output) - if self.add_bias: - output_bias_list.append(output_bias.expand_as(output)) - - output_local = torch.cat(output_local_list, dim=0) - if self.add_bias: - output_bias_local = torch.cat(output_bias_list, dim=0) - else: - output_bias_local = None - - return output_local, output_bias_local - - @expert_dist_ckpt_decorator - def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): - """Maps local expert to global experts.""" - sharded_state_dict = {} - num_global_experts = ( - parallel_state.get_expert_model_parallel_world_size() * self.num_local_experts - ) - local_expert_indices_offset = ( - parallel_state.get_expert_model_parallel_rank() * self.num_local_experts - ) - - expert_sharded_prefix = f'{prefix}experts.' - for expert_local_idx, expert in enumerate(self.local_experts): - expert_global_idx = local_expert_indices_offset + expert_local_idx - expert_state_dict_prefix = f'{prefix}local_experts.{expert_local_idx}.' - expert_sharded_offsets = ( - *sharded_offsets, - (len(sharded_offsets), expert_global_idx, num_global_experts), - ) - - expert_state_dict = expert.sharded_state_dict( - expert_state_dict_prefix, expert_sharded_offsets, metadata - ) - # Remove expert layers indexing from sharded keys - replace_prefix_for_sharding( - expert_state_dict, expert_state_dict_prefix, expert_sharded_prefix - ) - # Adjust replica ids - replication along DP modulo EP - for k, sh_ten in expert_state_dict.items(): - replica_id = sh_ten.replica_id - assert ( - len(replica_id) == 3 - ), f'Expected replica_id for {k} to be in (PP, TP, DP) format, got: {replica_id}' - sh_ten.replica_id = ( - *replica_id[:2], - parallel_state.get_expert_data_parallel_rank(), - ) - - sharded_state_dict.update(expert_state_dict) - return sharded_state_dict +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import copy +import itertools +from copy import deepcopy +from functools import partial, wraps +from math import ceil +from typing import Optional, Tuple + +import torch +import torch.nn.functional as F +from torch.nn.parameter import Parameter + +from megatron.core import parallel_state +from megatron.core.dist_checkpointing import ShardedTensor +from megatron.core.dist_checkpointing.mapping import ( + LocalNonpersistentObject, + ReplicaId, + ShardedStateDict, + ShardedTensorFactory, +) +from megatron.core.dist_checkpointing.utils import replace_prefix_for_sharding +from megatron.core.fusions.fused_bias_geglu import bias_geglu_impl +from megatron.core.fusions.fused_bias_gelu import bias_gelu_impl +from megatron.core.fusions.fused_bias_swiglu import bias_swiglu_impl +from megatron.core.jit import jit_fuser +from megatron.core.tensor_parallel.layers import ( + _initialize_affine_weight_cpu, + _initialize_affine_weight_gpu, +) +from megatron.core.tensor_parallel.utils import divide +from megatron.core.transformer.mlp import MLP, MLPSubmodules, apply_swiglu_sharded_factory +from megatron.core.transformer.module import MegatronModule +from megatron.core.transformer.moe import grouped_gemm_util as gg +from megatron.core.transformer.spec_utils import build_module +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.transformer.utils import ( + make_sharded_object_for_checkpoint, + sharded_state_dict_default, +) + +try: + + from megatron.core.extensions.transformer_engine import Fp8Padding, Fp8Unpadding + + HAVE_TE = True + +except ImportError: + + HAVE_TE = False + + +def expert_dist_ckpt_decorator(func): + """Decorator of shared_state_dict in expert layer for distributed checkpoint. + + Since !1940, the TP size for Expert layer can be different with Attention. + To make distributed checkpoint work in such cases, we use a decorator to + replace the default TP parallel states with expert-TP parallel states. + """ + + @wraps(func) + def wrapper(*args, **kwargs): + # Store original states + original_rank = parallel_state._MPU_TENSOR_MODEL_PARALLEL_RANK + original_size = parallel_state._MPU_TENSOR_MODEL_PARALLEL_WORLD_SIZE + original_group = parallel_state._TENSOR_MODEL_PARALLEL_GROUP + try: + # Set new states + parallel_state._MPU_TENSOR_MODEL_PARALLEL_RANK = ( + parallel_state.get_expert_tensor_parallel_rank() + ) + parallel_state._MPU_TENSOR_MODEL_PARALLEL_WORLD_SIZE = ( + parallel_state.get_expert_tensor_parallel_world_size() + ) + parallel_state._TENSOR_MODEL_PARALLEL_GROUP = ( + parallel_state.get_expert_tensor_parallel_group() + ) + + # Execute the function + result = func(*args, **kwargs) + finally: + # Restore original states + parallel_state._MPU_TENSOR_MODEL_PARALLEL_RANK = original_rank + parallel_state._MPU_TENSOR_MODEL_PARALLEL_WORLD_SIZE = original_size + parallel_state._TENSOR_MODEL_PARALLEL_GROUP = original_group + return result + + return wrapper + + +class GroupedMLP(MegatronModule): + """An efficient implementation of the Experts layer using GroupedGEMM. + + Executes multiple experts in parallel to maximize computational efficiency. + """ + + def __init__(self, num_local_experts: int, config: TransformerConfig): + super().__init__(config=config) + self.config: TransformerConfig = config + self.num_local_experts = num_local_experts + gg.assert_grouped_gemm_is_available() + assert ( + config.add_bias_linear == False + ), "bias not supported in Grouped GEMM yet, please set '--disable-bias-linear' instead." + + self.expert_parallel = config.expert_model_parallel_size > 1 + if self.config.gated_linear_unit: + if self.config.activation_func not in (F.silu, F.gelu): + raise ValueError("Activation function must be silu or gelu when using GroupedMLP.") + + @jit_fuser + def glu(x): + x = torch.chunk(x, 2, dim=-1) + return self.config.activation_func(x[0]) * x[1] + + self.activation_func = glu + else: + self.activation_func = self.config.activation_func + + # How many feature each rank holds for fc1 and fc2, respectively. + tp_size = parallel_state.get_expert_tensor_parallel_world_size() + tp_rank = parallel_state.get_expert_tensor_parallel_rank() + + fc1_output_size = self.config.moe_ffn_hidden_size * self.num_local_experts + if config.gated_linear_unit: + # Project to 4h. If using swiglu double the output width, + # see https://arxiv.org/pdf/2002.05202.pdf + fc1_output_size *= 2 + fc1_output_size_per_partition = divide(fc1_output_size, tp_size) + + fc2_input_size = self.config.moe_ffn_hidden_size * self.num_local_experts + fc2_input_size_per_partition = divide(fc2_input_size, tp_size) + + # Note: The current kernel implementations of grouped_gemm + # does not support transposition with CUTLASS grouped GEMM + # (https://github.com/fanshiqing/grouped_gemm/blob/main/csrc/grouped_gemm.cu#L355-L358) + # and as a result we avoid allocate the transpose of weights. + # Initialize weight. + if config.use_cpu_initialization: + self.weight1 = Parameter( + torch.empty( + self.config.hidden_size, + fc1_output_size_per_partition, + dtype=config.params_dtype, + ) + ) + self.weight2 = Parameter( + torch.empty( + fc2_input_size_per_partition, self.config.hidden_size, dtype=config.params_dtype + ) + ) + if config.perform_initialization: + _initialize_affine_weight_cpu( + self.weight1, + self.config.hidden_size, + fc1_output_size, + fc1_output_size_per_partition, + partition_dim=1, + init_method=config.init_method, + params_dtype=config.params_dtype, + rank=tp_rank, + world_size=tp_size, + ) + _initialize_affine_weight_cpu( + self.weight2, + fc2_input_size, + self.config.hidden_size, + fc2_input_size_per_partition, + partition_dim=0, + init_method=config.output_layer_init_method, + params_dtype=config.params_dtype, + rank=tp_rank, + world_size=tp_size, + ) + else: + self.weight1 = Parameter( + torch.empty( + self.config.hidden_size, + fc1_output_size_per_partition, + device=torch.cuda.current_device(), + dtype=config.params_dtype, + ) + ) + self.weight2 = Parameter( + torch.empty( + fc2_input_size_per_partition, + self.config.hidden_size, + device=torch.cuda.current_device(), + dtype=config.params_dtype, + ) + ) + if config.perform_initialization: + _initialize_affine_weight_gpu( + self.weight1, config.init_method, partition_dim=1, is_expert=True + ) + _initialize_affine_weight_gpu( + self.weight2, config.output_layer_init_method, partition_dim=0, is_expert=True + ) + setattr(self.weight1, 'allreduce', not self.expert_parallel) + setattr(self.weight2, 'allreduce', not self.expert_parallel) + + def remove_extra_states_check(self, incompatible_keys): + """ + Remove _extra_state from unexpected keys. + These keys are for dist ckpt compatibility with SequentialMLP. + """ + keys = deepcopy(incompatible_keys.unexpected_keys) + for key in keys: + if '_extra_state' in key: + incompatible_keys.unexpected_keys.remove(key) + + self.register_load_state_dict_post_hook(remove_extra_states_check) + + def forward(self, permuted_local_hidden_states: torch.Tensor, tokens_per_expert: torch.Tensor): + """Forward step of the GroupedMLP.""" + if permuted_local_hidden_states.nelement() != 0: + # Reshape the weights for the grouped GEMMs. + w1 = self.weight1.view(self.num_local_experts, self.config.hidden_size, -1) + w2 = self.weight2.view(self.num_local_experts, -1, self.config.hidden_size) + + fc1_output = gg.ops.gmm( + permuted_local_hidden_states, w1, tokens_per_expert, trans_b=False + ) + + intermediate_parallel = self.activation_func(fc1_output) + + fc2_output = gg.ops.gmm(intermediate_parallel, w2, tokens_per_expert, trans_b=False) + else: + # No token is allocated for local experts. + assert torch.count_nonzero(tokens_per_expert) == 0 + + # Make sure params of experts still have gradients even given zero tokens. + w1 = self.weight1.view(self.config.hidden_size, -1) + w2 = self.weight2.view(-1, self.config.hidden_size) + h = torch.matmul(permuted_local_hidden_states, w1) + h = self.activation_func(h) + h = torch.matmul(h, w2) + + fc2_output = h + + return fc2_output, None + + @expert_dist_ckpt_decorator + def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): + """ + Maps local expert to global experts. + The sharded_state_dict for the weight parts are compatible with the SequentialMLP, + whereas the optimizer states are not due to the limitation from weight transposing. + That is, for finetuning scenario, the checkpoint is compatible with the SequentialMLP. + """ + sharded_state_dict = {} + num_global_experts = ( + parallel_state.get_expert_model_parallel_world_size() * self.num_local_experts + ) + local_expert_indices_offset = ( + parallel_state.get_expert_model_parallel_rank() * self.num_local_experts + ) + tp_size = parallel_state.get_tensor_model_parallel_world_size() + tp_rank = parallel_state.get_tensor_model_parallel_rank() + + prepend_axis_num = len(sharded_offsets) + replica_id = (0, 0, parallel_state.get_expert_data_parallel_rank()) + + local_ffn_dim_size = ( + self.weight2.numel() // self.num_local_experts // self.config.hidden_size + ) + + @torch.no_grad() + def sh_ten_build_fn( + key: str, + t: torch.Tensor, + replica_id: ReplicaId, + flattened_range: Optional[slice], + tp_axis: int, + with_glu: bool, + ): + # TODO: write a generic implementation to cover both cases with and without GLU + if tp_axis == 1: + # weight1 + if with_glu: + last_dim_size = local_ffn_dim_size * 2 + else: + last_dim_size = local_ffn_dim_size + real_shape = (self.num_local_experts, self.config.hidden_size, last_dim_size) + elif tp_axis == 0: + # weight2 + real_shape = (self.num_local_experts, local_ffn_dim_size, self.config.hidden_size) + assert with_glu == False + else: + raise ValueError("tp_axis should be 0 or 1.") + if flattened_range is None: + # weights + t = t.view(real_shape).transpose(-1, -2) + # change tp_axis due to the transposing + tp_axis = 1 - tp_axis + if with_glu: + local_tensors = torch.chunk(t, 2, -2) + sub_states = [ + ShardedTensor.from_rank_offsets( + key, + local_tensors[0].contiguous(), + *sharded_offsets, + ( + prepend_axis_num, + parallel_state.get_expert_model_parallel_rank(), + parallel_state.get_expert_model_parallel_world_size(), + ), + (prepend_axis_num + 1, tp_rank, tp_size * 2), + replica_id=replica_id, + prepend_axis_num=prepend_axis_num, + ), + ShardedTensor.from_rank_offsets( + key, + local_tensors[1].contiguous(), + *sharded_offsets, + ( + prepend_axis_num, + parallel_state.get_expert_model_parallel_rank(), + parallel_state.get_expert_model_parallel_world_size(), + ), + (prepend_axis_num + 1, tp_size + tp_rank, tp_size * 2), + replica_id=replica_id, + prepend_axis_num=prepend_axis_num, + ), + ] + else: + sub_states = ShardedTensor.from_rank_offsets( + key, + t.contiguous(), + *sharded_offsets, + ( + prepend_axis_num, + parallel_state.get_expert_model_parallel_rank(), + parallel_state.get_expert_model_parallel_world_size(), + ), + (prepend_axis_num + 1 + tp_axis, tp_rank, tp_size), + replica_id=replica_id, + prepend_axis_num=prepend_axis_num, + ) + else: + # flattened optmizer states + # the non-flattened weight shape is [local_expert_num, hidden_size, ffn_size] + # + # For the case without GLU, it is straightforward, we just need to split each + # expert along the dim-0. + # + # For the case with GLU, we need to split the experts along dim-0 and split the + # two tensors for GLU along dim-2. + # To split along the non-first dim, we need to chunk the tensor into small pieces, + # since they belong to different tenors and are interleaved in the flattened space. + # Refer to the below sketch graph. + # |................| |........|........| + # |............FFFF| |........|....BBBB| + # |FFFFFFFFFFFFFFFF| -> |AAAAAAAA|BBBBBBBB| + # |FFFFFFFFFFFFFFFF| |AAAAAAAA|BBBBBBBB| + # |FF..............| |AA......|........| + # |................| |........|........| + # + # But too many chunks have severe performance issues. We merge these chunks during + # the save process along with some length information and recover them during the + # load process. + assert t.ndim == 1, (key, t.shape) + if with_glu: + non_flat_local_shape = (1, self.config.hidden_size, local_ffn_dim_size) + chunk_numel = local_ffn_dim_size + sub_states = [] + start_pos = 0 + for local_expert_idx in range(self.num_local_experts): + first_glu_idx = -1 + w_start_range = -1 + v_start_range = -1 + w_tensors = [] + v_tensors = [] + w_lens = [] + v_lens = [] + expert_global_idx = local_expert_indices_offset + local_expert_idx + for input_dim_idx in range(self.config.hidden_size): + for glu_idx in range(2): + local_idx = ( + local_expert_idx * self.config.hidden_size * 2 + + input_dim_idx * 2 + + glu_idx + ) + if ( + flattened_range.start < chunk_numel * (local_idx + 1) + and flattened_range.stop > chunk_numel * local_idx + ): + if first_glu_idx == -1: + first_glu_idx = glu_idx + end_pos = min( + flattened_range.stop, + chunk_numel * (local_idx + 1) - flattened_range.start, + ) + local_tensor = t[start_pos:end_pos] + local_flattened_range = slice( + max(0, flattened_range.start - chunk_numel * local_idx), + min( + chunk_numel, + flattened_range.stop - chunk_numel * local_idx, + ), + ) + assert ( + len(local_tensor) + == local_flattened_range.stop - local_flattened_range.start + ) + start_pos += len(local_tensor) + if glu_idx == 0: + w_tensors.append(local_tensor) + w_lens.append(len(local_tensor)) + if w_start_range == -1: + w_start_range = max( + 0, flattened_range.start - chunk_numel * local_idx + ) + else: + v_tensors.append(local_tensor) + v_lens.append(len(local_tensor)) + if v_start_range == -1: + v_start_range = max( + 0, flattened_range.start - chunk_numel * local_idx + ) + sub_states.append( + { + 'w_tensors': ShardedTensor.from_rank_offsets_flat( + key, + ( + torch.cat(w_tensors, -1) + if len(w_tensors) > 0 + else torch.Tensor() + ), + non_flat_local_shape, + *sharded_offsets, + ( + prepend_axis_num, + expert_global_idx, # pylint: disable=E0606 + num_global_experts, + ), + (prepend_axis_num + 1 + tp_axis, tp_rank, tp_size * 2), + replica_id=replica_id, + prepend_axis_num=prepend_axis_num, + flattened_range=slice( + w_start_range, w_start_range + sum(w_lens) + ), + ), + 'w_lens': LocalNonpersistentObject(w_lens), + 'v_tensors': ShardedTensor.from_rank_offsets_flat( + key, + ( + torch.cat(v_tensors, -1) + if len(v_tensors) > 0 + else torch.Tensor() + ), + non_flat_local_shape, + *sharded_offsets, + (prepend_axis_num, expert_global_idx, num_global_experts), + ( + prepend_axis_num + 1 + tp_axis, + tp_rank + tp_size, + tp_size * 2, + ), + replica_id=replica_id, + prepend_axis_num=prepend_axis_num, + flattened_range=slice( + v_start_range, v_start_range + sum(v_lens) + ), + ), + 'v_lens': LocalNonpersistentObject(v_lens), + 'first_glu_idx': LocalNonpersistentObject(first_glu_idx), + } + ) + else: + non_flat_local_shape = ( + real_shape[0] // self.num_local_experts, + *real_shape[1:], + ) + chunk_numel = local_ffn_dim_size * self.config.hidden_size + sub_states = [] + start_pos = 0 + for local_expert_idx in range(self.num_local_experts): + if ( + flattened_range.start < chunk_numel * (local_expert_idx + 1) + and flattened_range.stop > chunk_numel * local_expert_idx + ): + end_pos = min( + flattened_range.stop, + chunk_numel * (local_expert_idx + 1) - flattened_range.start, + ) + local_tensor = t[start_pos:end_pos] + local_flattened_range = slice( + max(0, flattened_range.start - chunk_numel * local_expert_idx), + min( + chunk_numel, + flattened_range.stop - chunk_numel * local_expert_idx, + ), + ) + assert ( + len(local_tensor) + == local_flattened_range.stop - local_flattened_range.start + ) + start_pos += len(local_tensor) + expert_global_idx = local_expert_indices_offset + local_expert_idx + sub_states.append( + ShardedTensor.from_rank_offsets_flat( + key, + local_tensor, + non_flat_local_shape, + *sharded_offsets, + (prepend_axis_num, expert_global_idx, num_global_experts), + (prepend_axis_num + 1 + tp_axis, tp_rank, tp_size), + replica_id=replica_id, + prepend_axis_num=prepend_axis_num, + flattened_range=local_flattened_range, + ) + ) + return sub_states + + @torch.no_grad() + def sh_ten_merge_fn(sub_state_dict, tp_axis: int, with_glu: bool): + if tp_axis == 1: + # weight1 + weight_shape = (self.config.hidden_size, -1) + elif tp_axis == 0: + # weight2 + weight_shape = (-1, self.config.hidden_size) + assert with_glu == False + else: + raise ValueError("tp_axis should be 0 or 1.") + if isinstance(sub_state_dict, list) and isinstance(sub_state_dict[0], dict): + # flattened tensor with glu + res = [] + for local_expert_dict in sub_state_dict: + w_tensors = torch.split( + local_expert_dict['w_tensors'], local_expert_dict['w_lens'] + ) + v_tensors = torch.split( + local_expert_dict['v_tensors'], local_expert_dict['v_lens'] + ) + first_glu_idx = local_expert_dict['first_glu_idx'] + if first_glu_idx == 0: + res += [ + x for x in itertools.chain(*itertools.zip_longest(w_tensors, v_tensors)) + ] + else: + res += [ + x for x in itertools.chain(*itertools.zip_longest(v_tensors, w_tensors)) + ] + return torch.cat(res) + elif isinstance(sub_state_dict, list) and sub_state_dict[0].ndim == 1: + # flattened tensor without glu + return torch.cat(sub_state_dict) + else: + if with_glu: + sub_state_dict = torch.cat(sub_state_dict, -2) + return sub_state_dict.transpose(-1, -2).reshape(weight_shape) + + state_dict = self.state_dict(prefix='', keep_vars=True) + for name, tensor in state_dict.items(): + if name == 'weight1': + tp_axis = 1 + with_glu = self.config.gated_linear_unit + wkey = f'{prefix}experts.linear_fc1.weight' + else: + tp_axis = 0 + with_glu = False + wkey = f'{prefix}experts.linear_fc2.weight' + + """ + When MCore Custom FSDP `optim_grads_params` is enabled, it is necessary to save the tensor local shard. + This local shard is accessible through the `fully_shard_param_local_shard` attribute of the tensor. + + This attribute contains the local shard of the fully sharded parameter, which is essential for + correctly saving and loading the model state when using `optim_grads_params` with FSDP. + + Example: + >>> # Assuming `tensor` is a fully sharded parameter + >>> local_shard = tensor.fully_shard_param_local_shard + >>> # Save the local shard as needed + """ + this_replica_id = list(copy.deepcopy(replica_id)) + if hasattr(tensor, 'fully_shard_param_local_shard'): + if tensor.fully_shard_param_local_shard.numel() == 0: + continue + flattened_range = slice(*tensor.fully_shard_param_local_index) + tensor = tensor.fully_shard_param_local_shard + this_replica_id[-1] = 0 + else: + flattened_range = None + + sharded_state_dict[f'{prefix}{name}'] = ShardedTensorFactory( + wkey, + tensor, + partial(sh_ten_build_fn, tp_axis=tp_axis, with_glu=with_glu), + partial(sh_ten_merge_fn, tp_axis=tp_axis, with_glu=with_glu), + tuple(this_replica_id), + flattened_range=flattened_range, + ) + + replica_id = ( + 0, + parallel_state.get_tensor_model_parallel_rank(), + parallel_state.get_expert_data_parallel_rank(), + ) + # Add fake _extra_state to be compatible with SequentialMLP + for expert_local_idx in range(self.num_local_experts): + expert_global_idx = local_expert_indices_offset + expert_local_idx + expert_sharded_offsets = ( + *sharded_offsets, + (len(sharded_offsets), expert_global_idx, num_global_experts), + ) + for mod in ['linear_fc1', 'linear_fc2']: + sharded_state_dict[f'{prefix}expert{expert_global_idx}.{mod}._extra_state'] = ( + make_sharded_object_for_checkpoint( + None, + f'{prefix}experts.{mod}._extra_state', + expert_sharded_offsets, + replica_id, + ) + ) + + return sharded_state_dict + + +class TEGroupedMLP(MegatronModule): + """An efficient implementation of the Experts layer using TE's GroupedLinear. + + Executes multiple experts in parallel to maximize computational efficiency. + """ + + def __init__(self, num_local_experts, config: TransformerConfig, submodules: MLPSubmodules): + super().__init__(config=config) + self.num_local_experts = num_local_experts + self.input_size = self.config.hidden_size + + # Double the output width with gated linear unit, see https://arxiv.org/pdf/2002.05202.pdf + ffn_hidden_size = self.config.moe_ffn_hidden_size + if self.config.gated_linear_unit: + ffn_hidden_size *= 2 + + self.linear_fc1 = build_module( + submodules.linear_fc1, + self.num_local_experts, + self.input_size, + ffn_hidden_size, + config=self.config, + init_method=self.config.init_method, + bias=self.config.add_bias_linear, + skip_bias_add=True, + is_expert=True, + tp_comm_buffer_name='fc1', + ) + + self.activation_func = self.config.activation_func + + self.linear_fc2 = build_module( + submodules.linear_fc2, + self.num_local_experts, + self.config.moe_ffn_hidden_size, + self.config.hidden_size, + config=self.config, + init_method=self.config.output_layer_init_method, + bias=self.config.add_bias_linear, + skip_bias_add=True, + is_expert=True, + tp_comm_buffer_name='fc2', + ) + + if self.config.fp8: + assert HAVE_TE, "FP8 requires TE." + self.fp8_padding = Fp8Padding(self.num_local_experts) + self.fp8_unpadding = Fp8Unpadding(self.num_local_experts) + + def forward( + self, permuted_local_hidden_states: torch.Tensor, tokens_per_expert: torch.Tensor + ) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: + """Forward of TEGroupedMLP + + Args: + permuted_local_hidden_states (torch.Tensor): The permuted input hidden states of the + local experts. + tokens_per_expert (torch.Tensor): The number of tokens per expert. + + Return: + output (torch.Tensor): The output of the local experts. + """ + tokens_per_expert = tokens_per_expert.tolist() + if self.config.fp8: + actual_tokens_per_expert = tokens_per_expert + permuted_local_hidden_states, tokens_per_expert = self.fp8_padding( + permuted_local_hidden_states, tokens_per_expert + ) + + intermediate_parallel, bias_parallel = self.linear_fc1( + permuted_local_hidden_states, tokens_per_expert + ) + + if self.config.bias_activation_fusion: + if self.activation_func == F.gelu: + if self.config.gated_linear_unit: + intermediate_parallel = bias_geglu_impl(intermediate_parallel, bias_parallel) + else: + assert self.config.add_bias_linear is True + intermediate_parallel = bias_gelu_impl(intermediate_parallel, bias_parallel) + elif self.activation_func == F.silu and self.config.gated_linear_unit: + intermediate_parallel = bias_swiglu_impl( + intermediate_parallel, + bias_parallel, + self.config.activation_func_fp8_input_store, + ) + else: + raise ValueError("Only support fusion of gelu and swiglu") + else: + if bias_parallel is not None: + shape = intermediate_parallel.shape + intermediate_parallel = torch.cat( + [ + t + b + for t, b in zip( + torch.split( + intermediate_parallel.view(-1, shape[-1]), tokens_per_expert + ), + bias_parallel, + ) + ] + ).view(shape) + if self.config.gated_linear_unit: + + def glu(x): + x = torch.chunk(x, 2, dim=-1) + return self.config.activation_func(x[0]) * x[1] + + intermediate_parallel = glu(intermediate_parallel) + else: + intermediate_parallel = self.activation_func(intermediate_parallel) + + output, output_bias = self.linear_fc2(intermediate_parallel, tokens_per_expert) + + # upad and concat the output + if self.config.fp8: + output = self.fp8_unpadding(output, actual_tokens_per_expert) + + return output, output_bias + + @expert_dist_ckpt_decorator + def sharded_state_dict( + self, prefix: str = '', sharded_offsets: tuple = (), metadata: Optional[dict] = None + ) -> ShardedStateDict: + """ + Maps local expert to global experts. + The sharded state dict is interchangable with SequentialMLP's. + """ + sharded_state_dict = {} + for name, module in self._modules.items(): + sub_sd = sharded_state_dict_default(module, f'{name}.', sharded_offsets, metadata) + if name == 'linear_fc1' and self.config.gated_linear_unit: + num_global_experts = ( + parallel_state.get_expert_model_parallel_world_size() * self.num_local_experts + ) + local_expert_indices_offset = ( + parallel_state.get_expert_model_parallel_rank() * self.num_local_experts + ) + ep_axis = len(sharded_offsets) + for i in range(self.num_local_experts): + new_sharded_offsets = ( + *sharded_offsets, + (ep_axis, local_expert_indices_offset + i, num_global_experts), + ) + for k in (f'{name}.weight{i}', f'{name}.bias{i}'): + if k in sub_sd: + sub_sd[k] = apply_swiglu_sharded_factory(sub_sd[k], new_sharded_offsets) + # Add prefix here to match sequential's keys + replace_prefix_for_sharding(sub_sd, f'{name}.', f'{prefix}experts.{name}.') + sharded_state_dict.update({f"{prefix}{k}": v for k, v in sub_sd.items()}) + return sharded_state_dict + + +class SequentialMLP(MegatronModule): + """An implementation of the Experts layer using a sequence of MLP layers. + + This class executes each expert sequentially. + """ + + def __init__(self, num_local_experts, config: TransformerConfig, submodules: MLPSubmodules): + super().__init__(config=config) + self.add_bias = config.add_bias_linear + self.num_local_experts = num_local_experts + self.local_experts = torch.nn.ModuleList() + + assert ( + self.config.moe_ffn_hidden_size == self.config.ffn_hidden_size + ), "Please use GroupedMLP or TEGroupedMLP when moe_ffn_hidden_size is \ + different from ffn_hidden_size" + for _ in range(self.num_local_experts): + expert = MLP(self.config, submodules, is_expert=True) + self.local_experts.append(expert) + + def _pad_tensor_for_fp8(self, hidden): + """Padding tensor shape to multiples of 16.""" + actual_num_tokens = hidden.shape[0] + divisor = 16 + padded_num_tokens = ceil(actual_num_tokens / divisor) * divisor - actual_num_tokens + if padded_num_tokens > 0: + pad_tensor = torch.zeros( + padded_num_tokens, hidden.shape[1], dtype=hidden.dtype, device=hidden.device + ) + hidden = torch.cat((hidden, pad_tensor), dim=0) + return hidden + + def forward(self, permuted_local_hidden_states: torch.Tensor, tokens_per_expert: torch.Tensor): + """Forward step of the SequentialMLP.""" + if self.num_local_experts == 1: + if self.config.fp8: + hidden = self._pad_tensor_for_fp8(permuted_local_hidden_states) + output, output_bias = self.local_experts[0](hidden) + output = output[: permuted_local_hidden_states.shape[0]] + else: + output, output_bias = self.local_experts[0](permuted_local_hidden_states) + + return output, output_bias + else: + tokens_per_expert = tokens_per_expert.tolist() + tokens_list = torch.split(permuted_local_hidden_states, tokens_per_expert) + + output_local_list = [] + output_bias_list = [] + + for expert, tokens in zip(self.local_experts, tokens_list): + if self.config.fp8: + hidden = self._pad_tensor_for_fp8(tokens) + output, output_bias = expert(hidden) + output = output[: tokens.shape[0]] + else: + output, output_bias = expert(tokens) + output_local_list.append(output) + if self.add_bias: + output_bias_list.append(output_bias.expand_as(output)) + + output_local = torch.cat(output_local_list, dim=0) + if self.add_bias: + output_bias_local = torch.cat(output_bias_list, dim=0) + else: + output_bias_local = None + + return output_local, output_bias_local + + @expert_dist_ckpt_decorator + def sharded_state_dict(self, prefix='', sharded_offsets=(), metadata=None): + """Maps local expert to global experts.""" + sharded_state_dict = {} + num_global_experts = ( + parallel_state.get_expert_model_parallel_world_size() * self.num_local_experts + ) + local_expert_indices_offset = ( + parallel_state.get_expert_model_parallel_rank() * self.num_local_experts + ) + + expert_sharded_prefix = f'{prefix}experts.' + for expert_local_idx, expert in enumerate(self.local_experts): + expert_global_idx = local_expert_indices_offset + expert_local_idx + expert_state_dict_prefix = f'{prefix}local_experts.{expert_local_idx}.' + expert_sharded_offsets = ( + *sharded_offsets, + (len(sharded_offsets), expert_global_idx, num_global_experts), + ) + + expert_state_dict = expert.sharded_state_dict( + expert_state_dict_prefix, expert_sharded_offsets, metadata + ) + # Remove expert layers indexing from sharded keys + replace_prefix_for_sharding( + expert_state_dict, expert_state_dict_prefix, expert_sharded_prefix + ) + # Adjust replica ids - replication along DP modulo EP + for k, sh_ten in expert_state_dict.items(): + replica_id = sh_ten.replica_id + assert ( + len(replica_id) == 3 + ), f'Expected replica_id for {k} to be in (PP, TP, DP) format, got: {replica_id}' + + is_custom_fsdp_shard_tensor = getattr(sh_ten, "is_data_parallel_fully_shard", False) + if is_custom_fsdp_shard_tensor: + sh_ten.replica_id = (*replica_id[:2], 0) + continue + + sh_ten.replica_id = ( + *replica_id[:2], + parallel_state.get_expert_data_parallel_rank(), + ) + + sharded_state_dict.update(expert_state_dict) + return sharded_state_dict diff --git a/megatron/core/transformer/moe/fused_a2a.py b/megatron/core/transformer/moe/fused_a2a.py new file mode 100644 index 0000000..ebd9311 --- /dev/null +++ b/megatron/core/transformer/moe/fused_a2a.py @@ -0,0 +1,202 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. +# Portions of this code are from DeepSeek DeepEP project +# Copyright (c) 2025 DeepSeek +# Licensed under the MIT License - https://github.com/deepseek-ai/DeepEP/blob/main/LICENSE + +try: + from deep_ep import Buffer + + HAVE_DEEP_EP = True +except ImportError: + HAVE_DEEP_EP = False + +import torch + +_buffer = None + + +def get_hidden_bytes(x: torch.Tensor) -> int: + """Calculate the number of hidden bytes for a tensor. + + Args: + x (torch.Tensor): Input tensor + + Returns: + int: Number of hidden bytes + """ + return x.size(1) * max(x.element_size(), 2) + + +def get_buffer(group: torch.distributed.ProcessGroup, hidden_bytes: int): + """Get or create a buffer for all-to-all communication. + + Args: + group (torch.distributed.ProcessGroup): Process group for communication + hidden_bytes (int): Number of hidden bytes needed + + Returns: + Buffer: Communication buffer + """ + global _buffer + num_nvl_bytes, num_rdma_bytes = 0, 0 + for config in ( + Buffer.get_dispatch_config(group.size()), + Buffer.get_combine_config(group.size()), + ): + # Split long line for PEP8 compliance + num_nvl_bytes = max( + config.get_nvl_buffer_size_hint(hidden_bytes, group.size()), num_nvl_bytes + ) + num_rdma_bytes = max( + config.get_rdma_buffer_size_hint(hidden_bytes, group.size()), num_rdma_bytes + ) + + # Allocate buffer if not existed or not enough buffer + # NOTES: the adaptive routing configuration of the network **must be off** + if ( + _buffer is None + or _buffer.group != group + or _buffer.num_nvl_bytes < num_nvl_bytes + or _buffer.num_rdma_bytes < num_rdma_bytes + ): + _buffer = Buffer(group, num_nvl_bytes, num_rdma_bytes) + return _buffer + + +class FusedDispatch(torch.autograd.Function): + """Fused dispatch operation for MoE routing combining computation and communication.""" + + @staticmethod + def forward(ctx, x, token_indices, token_probs, num_experts, group, previous_event=None): + """Forward pass of fused dispatch.""" + # Calculate layout before actual dispatch + buffer = get_buffer(group, get_hidden_bytes(x)) + ( + num_tokens_per_rank, + num_tokens_per_rdma_rank, + num_tokens_per_expert, + is_token_in_rank, + previous_event, + ) = buffer.get_dispatch_layout( + token_indices, + num_experts, + previous_event=None, + async_finish=False, + allocate_on_comm_stream=False, + ) + + # Do MoE dispatch + # NOTES: the CPU will wait for GPU's signal to arrive, + # so this is not compatible with CUDA graph + ( + recv_x, + recv_token_indices, + recv_token_probs, + num_recv_tokens_per_expert_list, + handle, + event, + ) = buffer.dispatch( + x, + topk_idx=token_indices, + topk_weights=token_probs.float(), + num_tokens_per_rank=num_tokens_per_rank, + num_tokens_per_rdma_rank=num_tokens_per_rdma_rank, + is_token_in_rank=is_token_in_rank, + num_tokens_per_expert=num_tokens_per_expert, + previous_event=None, + async_finish=False, + allocate_on_comm_stream=False, + ) + + ctx.buffer = buffer + ctx.handle = handle + ctx.event = event + tokens_per_expert = torch.tensor(num_recv_tokens_per_expert_list) + + return (recv_x, recv_token_indices, recv_token_probs, tokens_per_expert, handle) + + @staticmethod + def backward( + ctx, grad_output, grad_token_indices, grad_token_probs, grad_tokens_per_expert, grad_handle + ): + """Backward pass of fused dispatch.""" + buffer = ctx.buffer + handle = ctx.handle + + grad_x, grad_token_probs, event = buffer.combine( + grad_output.contiguous(), + handle, + topk_weights=grad_token_probs.float(), + previous_event=None, + async_finish=False, + allocate_on_comm_stream=False, + ) + return grad_x, None, grad_token_probs, None, None, None + + +class FusedCombine(torch.autograd.Function): + """Fused combine operation for MoE output combining computation and communication.""" + + @staticmethod + def forward(ctx, x, group, handle, previous_event=None): + """Forward pass of fused combine.""" + buffer = get_buffer(group, get_hidden_bytes(x)) + combined_x, _, event = buffer.combine( + x, handle=handle, async_finish=False, previous_event=None, allocate_on_comm_stream=False + ) + ctx.handle = handle + ctx.buffer = buffer + + return combined_x, event + + @staticmethod + def backward(ctx, grad_output, previous_event=None): + """Backward pass of fused combine.""" + buffer = ctx.buffer + grad_x, _, _, _, _, event = buffer.dispatch( + grad_output.contiguous(), + handle=ctx.handle, + previous_event=previous_event, + async_finish=False, + allocate_on_comm_stream=False, + ) + return grad_x, None, None, None + + +if HAVE_DEEP_EP: + + def fused_dispatch(x, token_indices, token_probs, num_experts, group, previous_event=None): + """Perform fused dispatch operation if deep_ep is available. + + Args: + x: Input tensor [num_tokens, hidden_size] + token_indices: Token routing indices [num_tokens, topk] + token_probs: Token routing probabilities [num_tokens, topk] + num_experts: Number of experts + group: Process group + previous_event: Previous CUDA event + + Returns: + Result of FusedDispatch + """ + return FusedDispatch.apply( + x.contiguous(), token_indices, token_probs, num_experts, group, previous_event + ) + + def fused_combine(x, group, handle, previous_event=None): + """Perform fused combine operation if deep_ep is available. + + Args: + x: Input tensor + group: Process group + handle: Communication handle + previous_event: Previous CUDA event + + Returns: + Result of FusedCombine + """ + return FusedCombine.apply(x, group, handle, previous_event) + +else: + fused_dispatch = None + fused_combine = None diff --git a/megatron/core/transformer/moe/legacy_a2a_token_dispatcher.py b/megatron/core/transformer/moe/legacy_a2a_token_dispatcher.py index dd5f447..13d9da8 100644 --- a/megatron/core/transformer/moe/legacy_a2a_token_dispatcher.py +++ b/megatron/core/transformer/moe/legacy_a2a_token_dispatcher.py @@ -1,314 +1,317 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from typing import List, Optional, Tuple - -import torch -import torch.distributed - -from megatron.core import parallel_state, tensor_parallel -from megatron.core.transformer.moe.moe_utils import ( - get_capacity, - permute, - sort_chunks_by_idxs, - unpermute, -) -from megatron.core.transformer.moe.token_dispatcher import MoETokenDispatcher -from megatron.core.transformer.transformer_config import TransformerConfig - - -class MoEAlltoAllSEQTokenDispatcher(MoETokenDispatcher): - """ - The legacy implementation of the AlltoAll-based token dispatcher, which handles token - dispatching on the sequence level instead of token level. The core of this implementation - lies in each device dispatching on the entire sequence, with the hidden state being partitioned. - - Note: This class is a replica of the MoEAlltoAllTokenDispatcher from version 0.8. - """ - - def __init__( - self, num_local_experts: int, local_expert_indices: List[int], config: TransformerConfig - ) -> None: - """ - Initialize the AlltoAll token dispatcher. - - Args: - num_local_experts (int): Number of local experts on the current device. - local_expert_indices (List[int]): Indices of local experts on the current device. - config (TransformerConfig): Configuration for the transformer model. - """ - super().__init__(config=config) - self.hidden_shape = None - self.num_input_tokens = None - self.num_local_experts = num_local_experts - self.num_experts = config.num_moe_experts - assert self.num_local_experts > 0, "Expected at least one expert" - self.local_expert_indices = local_expert_indices - assert ( - len(self.local_expert_indices) == self.num_local_experts - ), "Invalid local expert indices" - for i in range(len(self.local_expert_indices) - 1): - assert ( - self.local_expert_indices[i] == self.local_expert_indices[i + 1] - 1 - ), "local_expert_indices must be continous" - self.ep_size = config.expert_model_parallel_size - self.tp_size = config.tensor_model_parallel_size - self.probs = None - self.input_splits = None - self.output_splits = None - # [tp_size * ep_size, num_local_experts]. Represents the number of tokens sent - # to each local expert by all ranks. - self.num_global_tokens_per_local_expert_cpu = None - input_chunk_idxs = torch.arange(self.num_experts) - # [num_local_experts, ep_size]. Sort the input chunks by local experts. - self.sort_input_by_local_experts = ( - input_chunk_idxs.reshape(-1, self.num_local_experts).T.ravel().tolist() - ) - # [ep_size, num_local_experts]. Restore the output chunks by local experts. - self.restore_output_by_local_experts = ( - input_chunk_idxs.reshape(self.num_local_experts, -1).T.ravel().tolist() - ) - - # Token drop and padding. - # We need to keep track of the token num if we drop tokens without padding them. - self.num_out_tokens = None - # Drop and pad the input to capacity. - self.drop_and_pad = self.config.moe_pad_expert_input_to_capacity - if self.drop_and_pad: - assert self.config.moe_expert_capacity_factor is not None - self.capacity = None - - # A cuda stream synchronization is needed in self.token_permutation() - # in some cases, because there are several non-blocking DtoH data - # transfers called in self.preprocess(). The synchronization happens - # at different points based on MoE settings as late as possible. - # Valid sync points are "before_permutation_1", "before_ep_alltoall", - # "before_finish", and "no_sync". - self.cuda_sync_point = "no_sync" - - def preprocess(self, routing_map: torch.Tensor) -> torch.Tensor: - """ - Preprocess routing map for AlltoAll communication and token permutation. - This method computes the number of tokens assigned to each expert based on - the routing map. It also initializes the necessary data structures for - AlltoAll communication, such as input and output splits, and the mapping - between global tokens and local experts. - - Args: - routing_map (torch.Tensor): The mapping of tokens to experts, with shape - [num_tokens, num_experts]. - - Returns: - torch.Tensor: Tensor containing the number of tokens assigned to local expert. - """ - num_local_tokens_per_expert = routing_map.sum(dim=0).long() - # num_local_tokens_per_expert: [num_experts] - - ep_size = self.config.expert_model_parallel_size - if self.drop_and_pad: - # Drop and pad the input to capacity. - num_tokens = routing_map.size(0) * self.config.moe_router_topk - self.capacity = get_capacity( - num_tokens=num_tokens, - num_experts=self.num_experts, - capacity_factor=self.config.moe_expert_capacity_factor, - ) - self.num_out_tokens = self.capacity * self.num_experts - num_tokens_per_local_expert = torch.full( - (self.num_local_experts,), self.capacity * self.ep_size, dtype=torch.long - ) - self.num_global_tokens_per_local_expert_cpu = torch.full( - (self.num_experts * self.tp_size,), self.capacity, dtype=torch.long - ) - return num_tokens_per_local_expert - elif self.config.moe_expert_capacity_factor is not None: - # Token drop but no pad. A synchronization is needed before the first - # permutation to get the `num_out_tokens` CPU value. - self.num_out_tokens = num_local_tokens_per_expert.sum().to( - torch.device("cpu"), non_blocking=True - ) - self.cuda_sync_point = "before_permutation_1" - else: - # Dropless - self.num_out_tokens = routing_map.size(0) * self.config.moe_router_topk - if self.ep_size > 1 or self.num_local_experts > 1: - # Token dropless and enable ep. A synchronization is needed before expert parallel - # AlltoAll communication to get the `input_splits` and `output_splits` CPU values. - self.cuda_sync_point = "before_ep_alltoall" - else: - # Token dropless and no ep. A synchronization is needed to get the - # `tokens_per_expert` CPU value. - self.cuda_sync_point = "before_finish" - - if ep_size > 1: - # =================================================== - # Calculate input_splits, output_splits for alltoall-v. - # =================================================== - self.input_splits = ( - num_local_tokens_per_expert.reshape(ep_size, self.num_local_experts) - .sum(axis=1) - .to(torch.device("cpu"), non_blocking=True) - .numpy() - ) - num_global_tokens_per_expert = tensor_parallel.gather_from_sequence_parallel_region( - num_local_tokens_per_expert, group=self.ep_group - ).reshape(ep_size, self.num_experts) - self.num_global_tokens_per_local_expert = num_global_tokens_per_expert[ - :, self.local_expert_indices[0] : self.local_expert_indices[-1] + 1 - ] - self.output_splits = ( - self.num_global_tokens_per_local_expert.sum(axis=-1) - .to(torch.device("cpu"), non_blocking=True) - .numpy() - ) - num_tokens_per_local_expert = self.num_global_tokens_per_local_expert.sum(axis=0).to( - torch.device("cpu"), non_blocking=True - ) - # =================================================== - # num_global_tokens_per_expert: [ep_size, num_experts] - # num_global_tokens_per_local_expert: [ep_size, num_local_experts] - # num_tokens_per_local_expert: [num_local_experts] - # =================================================== - else: - self.num_global_tokens_per_local_expert = num_local_tokens_per_expert.reshape( - -1, self.num_experts - ) - num_tokens_per_local_expert = num_local_tokens_per_expert.to( - torch.device("cpu"), non_blocking=True - ) - - if self.num_local_experts > 1: - self.num_global_tokens_per_local_expert_cpu = ( - self.num_global_tokens_per_local_expert.view(-1, self.num_local_experts).to( - torch.device("cpu"), non_blocking=True - ) - ) - - return num_tokens_per_local_expert - - def token_permutation( - self, hidden_states: torch.Tensor, probs: torch.Tensor, routing_map: torch.Tensor - ) -> Tuple[torch.Tensor, torch.Tensor]: - """ - Dispatch tokens to local experts using AlltoAll communication. - - Args: - hidden_states (torch.Tensor): Input token embeddings. - probs (torch.Tensor): Probs of tokens assigned to experts. - Shape: [num_tokens, num_experts]. - routing_map (torch.Tensor): Mapping of tokens assigned to experts. - Shape: [num_tokens, num_experts]. - - Returns: - Tuple[torch.Tensor, torch.Tensor]: - - Permuted token embeddings for local experts. - - Number of tokens per expert. - """ - # Preprocess: Get the metadata for communication, permutation and computation operations. - self.hidden_shape = hidden_states.shape - self.probs = probs - self.routing_map = routing_map - assert probs.dim() == 2, "Expected 2D tensor for probs" - assert routing_map.dim() == 2, "Expected 2D tensor for routing map" - hidden_states = hidden_states.view(-1, self.hidden_shape[-1]) - tokens_per_expert = self.preprocess(routing_map) - - # Perform tensor parallel AlltoAll communication - # hidden_states: [S*B/TP, H] -> [S*B, H/TP] - if parallel_state.get_tensor_model_parallel_world_size() > 1: - hidden_states = tensor_parallel.all_to_all_sp2hp(hidden_states) - - # Permutation 1: input to AlltoAll input - self.hidden_shape_before_permute = hidden_states.shape - if self.cuda_sync_point == "before_permutation_1": - torch.cuda.current_stream().synchronize() - permutated_local_input_tokens, self.reversed_local_input_permutation_mapping = permute( - hidden_states, routing_map, num_out_tokens=self.num_out_tokens - ) - - # Perform expert parallel AlltoAll communication - if self.cuda_sync_point == "before_ep_alltoall": - torch.cuda.current_stream().synchronize() - global_input_tokens = tensor_parallel.all_to_all( - parallel_state.get_expert_model_parallel_group(), - permutated_local_input_tokens, - self.output_splits, - self.input_splits, - ) - - # Permutation 2: Sort tokens by local expert. - if self.num_local_experts > 1: - global_input_tokens = sort_chunks_by_idxs( - global_input_tokens, - self.num_global_tokens_per_local_expert_cpu.ravel(), - self.sort_input_by_local_experts, - ) - - # Perform tensor parallel AllGather on the hidden dimension to obtain the input tokens. - # global_input_tokens: [SEQL, H/TP] -> [SEQL, H] - if parallel_state.get_tensor_model_parallel_world_size() > 1: - global_input_tokens = tensor_parallel.all_gather_last_dim_from_tensor_parallel_region( - global_input_tokens - ) - if self.cuda_sync_point == "before_finish": - torch.cuda.current_stream().synchronize() - - return global_input_tokens, tokens_per_expert - - def token_unpermutation( - self, hidden_states: torch.Tensor, bias: torch.Tensor = None - ) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: - """ - Reverse the token permutation to restore the original order. - - Args: - hidden_states (torch.Tensor): Output from local experts. - bias (torch.Tensor, optional): Bias tensor (not supported). - - Returns: - Tuple[torch.Tensor, Optional[torch.Tensor]]: - - Unpermuted token embeddings in the original order. - - None (bias is not supported). - """ - assert bias is None, "Bias is not supported in MoEAlltoAllTokenDispatcher" - - # Perform tensor parallel Reduce-Scatter - # hidden_states: [SEQL, H] -> [SEQL, H/TP] - if parallel_state.get_tensor_model_parallel_world_size() > 1: - hidden_states = tensor_parallel.reduce_scatter_last_dim_to_tensor_parallel_region( - hidden_states - ) - - # Unpermutation 2: Unsort tokens by local expert. - if self.num_local_experts > 1: - hidden_states = sort_chunks_by_idxs( - hidden_states, - self.num_global_tokens_per_local_expert_cpu.T.ravel(), - self.restore_output_by_local_experts, - ) - - # Perform expert parallel AlltoAll communication - # hidden_states: [SEQL, H] -> [SEQL, H/TP] - permutated_local_input_tokens = tensor_parallel.all_to_all( - parallel_state.get_expert_model_parallel_group(), - hidden_states, - self.input_splits, - self.output_splits, - ) - - # Unpermutation 1: AlltoAll output to output - output = unpermute( - permutated_local_input_tokens, - self.reversed_local_input_permutation_mapping, - probs=self.probs, - restore_shape=self.hidden_shape_before_permute, - routing_map=self.routing_map, - ) - - # Perform tensor parallel AlltoAll communication - # output: [S*B, H/TP] -> [S*B/TP, H] - if parallel_state.get_tensor_model_parallel_world_size() > 1: - output = tensor_parallel.all_to_all_hp2sp(output) - - # Reshape the output tensor - output = output.view(self.hidden_shape) - return output, None +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +# type: ignore +# This file will be deprecated soon. We won't fix the mypy type checks. + +from typing import List, Optional, Tuple + +import torch +import torch.distributed + +from megatron.core import parallel_state, tensor_parallel +from megatron.core.transformer.moe.moe_utils import ( + get_capacity, + permute, + sort_chunks_by_idxs, + unpermute, +) +from megatron.core.transformer.moe.token_dispatcher import MoETokenDispatcher +from megatron.core.transformer.transformer_config import TransformerConfig + + +class MoEAlltoAllSEQTokenDispatcher(MoETokenDispatcher): + """ + The legacy implementation of the AlltoAll-based token dispatcher, which handles token + dispatching on the sequence level instead of token level. The core of this implementation + lies in each device dispatching on the entire sequence, with the hidden state being partitioned. + + Note: This class is a replica of the MoEAlltoAllTokenDispatcher from version 0.8. + """ + + def __init__( + self, num_local_experts: int, local_expert_indices: List[int], config: TransformerConfig + ) -> None: + """ + Initialize the AlltoAll token dispatcher. + + Args: + num_local_experts (int): Number of local experts on the current device. + local_expert_indices (List[int]): Indices of local experts on the current device. + config (TransformerConfig): Configuration for the transformer model. + """ + super().__init__(config=config) + self.hidden_shape = None + self.num_input_tokens = None + self.num_local_experts = num_local_experts + self.num_experts = config.num_moe_experts + assert self.num_local_experts > 0, "Expected at least one expert" + self.local_expert_indices = local_expert_indices + assert ( + len(self.local_expert_indices) == self.num_local_experts + ), "Invalid local expert indices" + for i in range(len(self.local_expert_indices) - 1): + assert ( + self.local_expert_indices[i] == self.local_expert_indices[i + 1] - 1 + ), "local_expert_indices must be continous" + self.ep_size = config.expert_model_parallel_size + self.tp_size = config.tensor_model_parallel_size + self.probs = None + self.input_splits = None + self.output_splits = None + # [tp_size * ep_size, num_local_experts]. Represents the number of tokens sent + # to each local expert by all ranks. + self.num_global_tokens_per_local_expert_cpu = None + input_chunk_idxs = torch.arange(self.num_experts) + # [num_local_experts, ep_size]. Sort the input chunks by local experts. + self.sort_input_by_local_experts = input_chunk_idxs.reshape( + -1, self.num_local_experts + ).T.ravel() + # [ep_size, num_local_experts]. Restore the output chunks by local experts. + self.restore_output_by_local_experts = input_chunk_idxs.reshape( + self.num_local_experts, -1 + ).T.ravel() + + # Token drop and padding. + # We need to keep track of the token num if we drop tokens without padding them. + self.num_out_tokens = None + # Drop and pad the input to capacity. + self.drop_and_pad = self.config.moe_pad_expert_input_to_capacity + if self.drop_and_pad: + assert self.config.moe_expert_capacity_factor is not None + self.capacity = None + + # A cuda stream synchronization is needed in self.token_permutation() + # in some cases, because there are several non-blocking DtoH data + # transfers called in self.preprocess(). The synchronization happens + # at different points based on MoE settings as late as possible. + # Valid sync points are "before_permutation_1", "before_ep_alltoall", + # "before_finish", and "no_sync". + self.cuda_sync_point = "no_sync" + + def preprocess(self, routing_map: torch.Tensor) -> torch.Tensor: + """ + Preprocess routing map for AlltoAll communication and token permutation. + This method computes the number of tokens assigned to each expert based on + the routing map. It also initializes the necessary data structures for + AlltoAll communication, such as input and output splits, and the mapping + between global tokens and local experts. + + Args: + routing_map (torch.Tensor): The mapping of tokens to experts, with shape + [num_tokens, num_experts]. + + Returns: + torch.Tensor: Tensor containing the number of tokens assigned to local expert. + """ + num_local_tokens_per_expert = routing_map.sum(dim=0).long() + # num_local_tokens_per_expert: [num_experts] + + ep_size = self.config.expert_model_parallel_size + if self.drop_and_pad: + # Drop and pad the input to capacity. + num_tokens = routing_map.size(0) * self.config.moe_router_topk + self.capacity = get_capacity( + num_tokens=num_tokens, + num_experts=self.num_experts, + capacity_factor=self.config.moe_expert_capacity_factor, + ) + self.num_out_tokens = self.capacity * self.num_experts + num_tokens_per_local_expert = torch.full( + (self.num_local_experts,), self.capacity * self.ep_size, dtype=torch.long + ) + self.num_global_tokens_per_local_expert_cpu = torch.full( + (self.num_experts * self.tp_size,), self.capacity, dtype=torch.long + ) + return num_tokens_per_local_expert + elif self.config.moe_expert_capacity_factor is not None: + # Token drop but no pad. A synchronization is needed before the first + # permutation to get the `num_out_tokens` CPU value. + self.num_out_tokens = num_local_tokens_per_expert.sum().to( + torch.device("cpu"), non_blocking=True + ) + self.cuda_sync_point = "before_permutation_1" + else: + # Dropless + self.num_out_tokens = routing_map.size(0) * self.config.moe_router_topk + if self.ep_size > 1 or self.num_local_experts > 1: + # Token dropless and enable ep. A synchronization is needed before expert parallel + # AlltoAll communication to get the `input_splits` and `output_splits` CPU values. + self.cuda_sync_point = "before_ep_alltoall" + else: + # Token dropless and no ep. A synchronization is needed to get the + # `tokens_per_expert` CPU value. + self.cuda_sync_point = "before_finish" + + if ep_size > 1: + # =================================================== + # Calculate input_splits, output_splits for alltoall-v. + # =================================================== + self.input_splits = ( + num_local_tokens_per_expert.reshape(ep_size, self.num_local_experts) + .sum(axis=1) + .to(torch.device("cpu"), non_blocking=True) + .numpy() + ) + num_global_tokens_per_expert = tensor_parallel.gather_from_sequence_parallel_region( + num_local_tokens_per_expert, group=self.ep_group + ).reshape(ep_size, self.num_experts) + self.num_global_tokens_per_local_expert = num_global_tokens_per_expert[ + :, self.local_expert_indices[0] : self.local_expert_indices[-1] + 1 + ] + self.output_splits = ( + self.num_global_tokens_per_local_expert.sum(axis=-1) + .to(torch.device("cpu"), non_blocking=True) + .numpy() + ) + num_tokens_per_local_expert = self.num_global_tokens_per_local_expert.sum(axis=0).to( + torch.device("cpu"), non_blocking=True + ) + # =================================================== + # num_global_tokens_per_expert: [ep_size, num_experts] + # num_global_tokens_per_local_expert: [ep_size, num_local_experts] + # num_tokens_per_local_expert: [num_local_experts] + # =================================================== + else: + self.num_global_tokens_per_local_expert = num_local_tokens_per_expert.reshape( + -1, self.num_experts + ) + num_tokens_per_local_expert = num_local_tokens_per_expert.to( + torch.device("cpu"), non_blocking=True + ) + + if self.num_local_experts > 1: + self.num_global_tokens_per_local_expert_cpu = ( + self.num_global_tokens_per_local_expert.view(-1, self.num_local_experts).to( + torch.device("cpu"), non_blocking=True + ) + ) + + return num_tokens_per_local_expert + + def token_permutation( + self, hidden_states: torch.Tensor, probs: torch.Tensor, routing_map: torch.Tensor + ) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Dispatch tokens to local experts using AlltoAll communication. + + Args: + hidden_states (torch.Tensor): Input token embeddings. + probs (torch.Tensor): Probs of tokens assigned to experts. + Shape: [num_tokens, num_experts]. + routing_map (torch.Tensor): Mapping of tokens assigned to experts. + Shape: [num_tokens, num_experts]. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: + - Permuted token embeddings for local experts. + - Number of tokens per expert. + """ + # Preprocess: Get the metadata for communication, permutation and computation operations. + self.hidden_shape = hidden_states.shape + self.probs = probs + self.routing_map = routing_map + assert probs.dim() == 2, "Expected 2D tensor for probs" + assert routing_map.dim() == 2, "Expected 2D tensor for routing map" + hidden_states = hidden_states.view(-1, self.hidden_shape[-1]) + tokens_per_expert = self.preprocess(routing_map) + + # Perform tensor parallel AlltoAll communication + # hidden_states: [S*B/TP, H] -> [S*B, H/TP] + if parallel_state.get_tensor_model_parallel_world_size() > 1: + hidden_states = tensor_parallel.all_to_all_sp2hp(hidden_states) + + # Permutation 1: input to AlltoAll input + self.hidden_shape_before_permute = hidden_states.shape + if self.cuda_sync_point == "before_permutation_1": + torch.cuda.current_stream().synchronize() + permutated_local_input_tokens, self.reversed_local_input_permutation_mapping = permute( + hidden_states, routing_map, num_out_tokens=self.num_out_tokens + ) + + # Perform expert parallel AlltoAll communication + if self.cuda_sync_point == "before_ep_alltoall": + torch.cuda.current_stream().synchronize() + global_input_tokens = tensor_parallel.all_to_all( + parallel_state.get_expert_model_parallel_group(), + permutated_local_input_tokens, + self.output_splits, + self.input_splits, + ) + + # Permutation 2: Sort tokens by local expert. + if self.num_local_experts > 1: + global_input_tokens = sort_chunks_by_idxs( + global_input_tokens, + self.num_global_tokens_per_local_expert_cpu.ravel(), + self.sort_input_by_local_experts, + ) + + # Perform tensor parallel AllGather on the hidden dimension to obtain the input tokens. + # global_input_tokens: [SEQL, H/TP] -> [SEQL, H] + if parallel_state.get_tensor_model_parallel_world_size() > 1: + global_input_tokens = tensor_parallel.all_gather_last_dim_from_tensor_parallel_region( + global_input_tokens + ) + if self.cuda_sync_point == "before_finish": + torch.cuda.current_stream().synchronize() + + return global_input_tokens, tokens_per_expert + + def token_unpermutation( + self, hidden_states: torch.Tensor, bias: torch.Tensor = None + ) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: + """ + Reverse the token permutation to restore the original order. + + Args: + hidden_states (torch.Tensor): Output from local experts. + bias (torch.Tensor, optional): Bias tensor (not supported). + + Returns: + Tuple[torch.Tensor, Optional[torch.Tensor]]: + - Unpermuted token embeddings in the original order. + - None (bias is not supported). + """ + assert bias is None, "Bias is not supported in MoEAlltoAllTokenDispatcher" + + # Perform tensor parallel Reduce-Scatter + # hidden_states: [SEQL, H] -> [SEQL, H/TP] + if parallel_state.get_tensor_model_parallel_world_size() > 1: + hidden_states = tensor_parallel.reduce_scatter_last_dim_to_tensor_parallel_region( + hidden_states + ) + + # Unpermutation 2: Unsort tokens by local expert. + if self.num_local_experts > 1: + hidden_states = sort_chunks_by_idxs( + hidden_states, + self.num_global_tokens_per_local_expert_cpu.T.ravel(), + self.restore_output_by_local_experts, + ) + + # Perform expert parallel AlltoAll communication + # hidden_states: [SEQL, H] -> [SEQL, H/TP] + permutated_local_input_tokens = tensor_parallel.all_to_all( + parallel_state.get_expert_model_parallel_group(), + hidden_states, + self.input_splits, + self.output_splits, + ) + + # Unpermutation 1: AlltoAll output to output + output = unpermute( + permutated_local_input_tokens, + self.reversed_local_input_permutation_mapping, + probs=self.probs, + restore_shape=self.hidden_shape_before_permute, + routing_map=self.routing_map, + ) + + # Perform tensor parallel AlltoAll communication + # output: [S*B, H/TP] -> [S*B/TP, H] + if parallel_state.get_tensor_model_parallel_world_size() > 1: + output = tensor_parallel.all_to_all_hp2sp(output) + + # Reshape the output tensor + output = output.view(self.hidden_shape) + return output, None diff --git a/megatron/core/transformer/moe/moe_layer.py b/megatron/core/transformer/moe/moe_layer.py index ea0b0b1..592b0cd 100644 --- a/megatron/core/transformer/moe/moe_layer.py +++ b/megatron/core/transformer/moe/moe_layer.py @@ -1,147 +1,151 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -from abc import ABC, abstractmethod -from dataclasses import dataclass -from typing import Union - -import torch - -from megatron.core import parallel_state, tensor_parallel -from megatron.core.transformer.mlp import MLPSubmodules -from megatron.core.transformer.module import MegatronModule -from megatron.core.transformer.moe.legacy_a2a_token_dispatcher import MoEAlltoAllSEQTokenDispatcher -from megatron.core.transformer.moe.router import TopKRouter -from megatron.core.transformer.moe.token_dispatcher import ( - MoEAllGatherTokenDispatcher, - MoEAlltoAllTokenDispatcher, -) -from megatron.core.transformer.spec_utils import ModuleSpec, build_module -from megatron.core.transformer.transformer_config import TransformerConfig - - -@dataclass -class MoESubmodules: - """MoE Layer Submodule spec""" - - experts: Union[ModuleSpec, type] = None - shared_experts: Union[ModuleSpec, type] = None - - -class BaseMoELayer(MegatronModule, ABC): - """Base class for a mixture of experts layer. - - Args: - config (TransformerConfig): Configuration object for the transformer model. - """ - - def __init__(self, config: TransformerConfig, layer_number: int = None): - super(BaseMoELayer, self).__init__(config) - self.config = config - self.expert_parallel_size = parallel_state.get_expert_model_parallel_world_size() - assert self.expert_parallel_size > 0, "Expected non-negative expert parallel size" - - assert self.config.num_moe_experts % self.expert_parallel_size == 0 - self.num_local_experts = self.config.num_moe_experts // self.expert_parallel_size - local_expert_indices_offset = ( - parallel_state.get_expert_model_parallel_rank() * self.num_local_experts - ) - - self.use_shared_expert = self.config.moe_shared_expert_intermediate_size is not None - self.shared_expert_overlap = self.config.moe_shared_expert_overlap - - self.local_expert_indices = [ - local_expert_indices_offset + i for i in range(self.num_local_experts) - ] - assert all(map(lambda x: x < self.config.num_moe_experts, self.local_expert_indices)) - self.router = None - self.experts = None - self.shared_experts = None - self.token_dispatcher = None - self.layer_number = layer_number - - @abstractmethod - def forward(self, hidden_states): - """Forward method for the MoE layer.""" - pass - - def set_layer_number(self, layer_number: int): - """Set the layer number for the MoE layer.""" - self.layer_number = layer_number - self.router.set_layer_number(layer_number) - - -class MoELayer(BaseMoELayer): - """Mixture of experts Layer **currently only supports no token dropping**. - - Args: - BaseMoELayer (MegatronModule): Base class for MoE layers - """ - - def __init__( - self, config: TransformerConfig, submodules: MLPSubmodules = None, layer_number: int = None - ): - self.submodules = submodules - super(MoELayer, self).__init__(config=config, layer_number=layer_number) - self.moe_layer_recompute = config.moe_layer_recompute - - # Initialize router - self.router = TopKRouter(config=self.config) - - # Initialize token dispatcher - if config.moe_token_dispatcher_type == "allgather": - self.token_dispatcher = MoEAllGatherTokenDispatcher( - self.num_local_experts, self.local_expert_indices, config=self.config - ) - elif config.moe_token_dispatcher_type == "alltoall": - self.token_dispatcher = MoEAlltoAllTokenDispatcher( - self.num_local_experts, self.local_expert_indices, config=self.config - ) - elif config.moe_token_dispatcher_type == "alltoall_seq": - self.token_dispatcher = MoEAlltoAllSEQTokenDispatcher( - self.num_local_experts, self.local_expert_indices, config=self.config - ) - else: - raise ValueError( - f"Unsupported token dispatcher type: {config.moe_token_dispatcher_type}" - ) - - # Initialize experts - self.experts = build_module(self.submodules.experts, self.num_local_experts, self.config) - - # Initialize shared experts - if self.use_shared_expert: - self.shared_experts = build_module(self.submodules.shared_experts, config=self.config) - if self.shared_expert_overlap: - self.token_dispatcher.set_shared_experts(self.shared_experts) - - def forward(self, hidden_states: torch.Tensor): - if ( - self.training - and self.config.tensor_model_parallel_size > 1 - and not self.config.sequence_parallel - ): - raise ValueError( - "During training, performance may degrade if MoE and tensor parallelism" - "are enabled without also enabling sequence parallelism." - ) - - # process MoE - def custom_forward(hidden_states): - probs, routing_map = self.router(hidden_states) - (dispatched_input, tokens_per_expert) = self.token_dispatcher.token_permutation( - hidden_states, probs, routing_map - ) - expert_output, mlp_bias = self.experts(dispatched_input, tokens_per_expert) - output, mlp_bias = self.token_dispatcher.token_unpermutation(expert_output, mlp_bias) - if self.use_shared_expert and not self.shared_expert_overlap: - # if shared_expert_overlap is True, the expert calculation happens in - # the token_dispatcher to overlap communications and computations - output += self.shared_experts(hidden_states) - return output, mlp_bias - - if self.moe_layer_recompute: - output, mlp_bias = tensor_parallel.checkpoint(custom_forward, False, hidden_states) - else: - output, mlp_bias = custom_forward(hidden_states) - - return output, mlp_bias +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +from abc import ABC, abstractmethod +from dataclasses import dataclass +from typing import Union + +import torch + +from megatron.core import parallel_state, tensor_parallel +from megatron.core.transformer.module import MegatronModule +from megatron.core.transformer.moe.legacy_a2a_token_dispatcher import MoEAlltoAllSEQTokenDispatcher +from megatron.core.transformer.moe.router import TopKRouter +from megatron.core.transformer.moe.token_dispatcher import ( + MoEAllGatherTokenDispatcher, + MoEAlltoAllTokenDispatcher, + MoEFlexTokenDispatcher, +) +from megatron.core.transformer.spec_utils import ModuleSpec, build_module +from megatron.core.transformer.transformer_config import TransformerConfig + + +@dataclass +class MoESubmodules: + """MoE Layer Submodule spec""" + + experts: Union[ModuleSpec, type] = None + shared_experts: Union[ModuleSpec, type] = None + + +class BaseMoELayer(MegatronModule, ABC): + """Base class for a mixture of experts layer. + + Args: + config (TransformerConfig): Configuration object for the transformer model. + """ + + def __init__(self, config: TransformerConfig, layer_number: int = None): + super(BaseMoELayer, self).__init__(config) + self.config = config + self.expert_parallel_size = parallel_state.get_expert_model_parallel_world_size() + assert self.expert_parallel_size > 0, "Expected non-negative expert parallel size" + + assert self.config.num_moe_experts % self.expert_parallel_size == 0 + self.num_local_experts = self.config.num_moe_experts // self.expert_parallel_size + local_expert_indices_offset = ( + parallel_state.get_expert_model_parallel_rank() * self.num_local_experts + ) + + self.use_shared_expert = self.config.moe_shared_expert_intermediate_size is not None + self.shared_expert_overlap = self.config.moe_shared_expert_overlap + + self.local_expert_indices = [ + local_expert_indices_offset + i for i in range(self.num_local_experts) + ] + assert all(map(lambda x: x < self.config.num_moe_experts, self.local_expert_indices)) + self.router = None + self.experts = None + self.shared_experts = None + self.token_dispatcher = None + self.layer_number = layer_number + + @abstractmethod + def forward(self, hidden_states): + """Forward method for the MoE layer.""" + pass + + def set_layer_number(self, layer_number: int): + """Set the layer number for the MoE layer.""" + self.layer_number = layer_number + self.router.set_layer_number(layer_number) + + +class MoELayer(BaseMoELayer): + """Mixture of experts Layer **currently only supports no token dropping**. + + Args: + BaseMoELayer (MegatronModule): Base class for MoE layers + """ + + def __init__( + self, config: TransformerConfig, submodules: MoESubmodules = None, layer_number: int = None + ): + self.submodules = submodules + super(MoELayer, self).__init__(config=config, layer_number=layer_number) + self.moe_layer_recompute = config.moe_layer_recompute + + # Initialize router + self.router = TopKRouter(config=self.config) + + # Initialize token dispatcher + if config.moe_token_dispatcher_type == "allgather": + self.token_dispatcher = MoEAllGatherTokenDispatcher( + self.num_local_experts, self.local_expert_indices, config=self.config + ) + elif config.moe_token_dispatcher_type == "alltoall": + self.token_dispatcher = MoEAlltoAllTokenDispatcher( + self.num_local_experts, self.local_expert_indices, config=self.config + ) + elif config.moe_token_dispatcher_type == "alltoall_seq": + self.token_dispatcher = MoEAlltoAllSEQTokenDispatcher( + self.num_local_experts, self.local_expert_indices, config=self.config + ) + elif config.moe_token_dispatcher_type == "flex": + self.token_dispatcher = MoEFlexTokenDispatcher( + self.num_local_experts, self.local_expert_indices, config=self.config + ) + else: + raise ValueError( + f"Unsupported token dispatcher type: {config.moe_token_dispatcher_type}" + ) + + # Initialize experts + self.experts = build_module(self.submodules.experts, self.num_local_experts, self.config) + + # Initialize shared experts + if self.use_shared_expert: + self.shared_experts = build_module(self.submodules.shared_experts, config=self.config) + if self.shared_expert_overlap: + self.token_dispatcher.set_shared_experts(self.shared_experts) + + def forward(self, hidden_states: torch.Tensor): + if ( + self.training + and self.config.tensor_model_parallel_size > 1 + and not self.config.sequence_parallel + ): + raise ValueError( + "During training, performance may degrade if MoE and tensor parallelism" + "are enabled without also enabling sequence parallelism." + ) + + # process MoE + def custom_forward(hidden_states): + probs, routing_map = self.router(hidden_states) + (dispatched_input, tokens_per_expert) = self.token_dispatcher.token_permutation( + hidden_states, probs, routing_map + ) + expert_output, mlp_bias = self.experts(dispatched_input, tokens_per_expert) + output, mlp_bias = self.token_dispatcher.token_unpermutation(expert_output, mlp_bias) + if self.use_shared_expert and not self.shared_expert_overlap: + # if shared_expert_overlap is True, the expert calculation happens in + # the token_dispatcher to overlap communications and computations + output = output + self.shared_experts(hidden_states) + return output, mlp_bias + + if self.moe_layer_recompute: + output, mlp_bias = tensor_parallel.checkpoint(custom_forward, False, hidden_states) + else: + output, mlp_bias = custom_forward(hidden_states) + + return output, mlp_bias diff --git a/megatron/core/transformer/moe/moe_utils.py b/megatron/core/transformer/moe/moe_utils.py index e8735a9..cf65b39 100644 --- a/megatron/core/transformer/moe/moe_utils.py +++ b/megatron/core/transformer/moe/moe_utils.py @@ -1,518 +1,655 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -import math -from typing import Optional - -import torch - -from megatron.core import parallel_state - - -def switch_load_balancing_loss_func( - probs: torch.Tensor, - tokens_per_expert: torch.Tensor, - topk: int, - moe_aux_loss_coeff: float, - sequence_partition_group=None, -): - """Calculate the auxiliary loss for load balancing. - Refer to the Switch Transformer paper (https://arxiv.org/abs/2101.03961) for details. - - Args: - probs (torch.Tensor): Softmax probabilities output by the router for each token. - Shape in [num_tokens, num_experts]. - tokens_per_expert (torch.Tensor): Number of tokens assigned to each expert. - Shape in [num_experts] - topk (int): The number of experts selected for each token. - moe_aux_loss_coeff (float): The coefficient for the auxiliary loss. - sequence_partition_group (optional): The parallel group over which the sequence is - partitioned. If None, no partitioning is applied. - Defaults to None. - - Returns: - torch.Tensor: The auxiliary loss for load balancing. - """ - num_sub_sequence = 1 - - # If the sequence is partitioned by certain parallelism strategies like Sequence Parallelism - # or Context Parallelism, compute the gradient of the auxiliary loss with respect to the full - # sequence. - if sequence_partition_group is not None: - # We can keep `aggregated_probs_per_expert` local since we don't need the gradient for - # `tokens_per_expert`, saving one allreduce operation for `aggregated_probs_per_expert`. - num_sub_sequence = torch.distributed.get_world_size(sequence_partition_group) - torch.distributed.all_reduce(tokens_per_expert, group=sequence_partition_group) - - num_tokens = probs.shape[0] * num_sub_sequence - num_experts = probs.shape[1] - - # The formula of aux_loss: aux_loss = sum((probs_per_expert/num_tokens) * - # (tokens_per_expert/(num_tokens*topk))) * num_experts * moe_aux_loss_coeff. - # This can be simplified to fuse the division and multiplication operations. - aggregated_probs_per_expert = probs.sum(dim=0) - aux_loss = torch.sum(aggregated_probs_per_expert * tokens_per_expert) * ( - num_experts * moe_aux_loss_coeff / (num_tokens * num_tokens * topk) - ) - return aux_loss - - -def sequence_load_balancing_loss_func( - probs: torch.Tensor, - routing_map: torch.Tensor, - tokens_per_expert: torch.Tensor, - batch_size: int, - seq_length: int, - topk: int, - moe_aux_loss_coeff: float, - sequence_partition_group=None, -): - """ - Calculate the auxiliary loss in sequence-level by computing the loss for each individual sample. - Refer to the DeepSeek-V2 huggingface repo - (https://huggingface.co/deepseek-ai/DeepSeek-V2) for details. - """ - num_sub_sequence = 1 - - # If the sequence is partitioned by certain parallelism strategies like Sequence Parallelism - # or Context Parallelism, compute the gradient of the auxiliary loss with respect to the full - # sequence. - if sequence_partition_group is not None: - # We can keep `aggregated_probs_per_expert` local since we don't need the gradient for - # `tokens_per_expert`, saving one allreduce operation for `aggregated_probs_per_expert`. - num_sub_sequence = torch.distributed.get_world_size(sequence_partition_group) - torch.distributed.all_reduce(tokens_per_expert, group=sequence_partition_group) - - assert num_sub_sequence == 1, "Do not support sequence aux loss in sequence partition case" - - num_experts = probs.shape[1] - - probs_for_aux_loss = probs.view(seq_length, batch_size, -1) - cost_coeff = routing_map.view(seq_length, batch_size, -1).sum(dim=0).float() - cost_coeff.div_(seq_length * topk / num_experts) - seq_aux_loss = (cost_coeff * probs_for_aux_loss.mean(dim=0)).sum(dim=1).mean() - seq_aux_loss *= moe_aux_loss_coeff - - return seq_aux_loss - - -def z_loss_func(logits, z_loss_coeff): - """Encourages the router's logits to remain small to enhance stability. - Please refer to the ST-MoE paper (https://arxiv.org/pdf/2202.08906.pdf) for details. - - Args: - logits (torch.Tensor): The logits of the router. - - Returns: - torch.Tensor: The logits after applying the z-loss. - """ - - z_loss = torch.mean(torch.square(torch.logsumexp(logits, dim=-1))) * z_loss_coeff - return z_loss - - -def sinkhorn(cost: torch.Tensor, tol: float = 0.0001): - """Sinkhorn based MoE routing function""" - cost = torch.exp(cost) - d0 = torch.ones(cost.size(0), device=cost.device, dtype=cost.dtype) - d1 = torch.ones(cost.size(1), device=cost.device, dtype=cost.dtype) - - eps = 0.00000001 - error = 1e9 - d1_old = d1 - while error > tol: - d0 = (1 / d0.size(0)) * 1 / (torch.sum(d1 * cost, 1) + eps) - d1 = (1 / d1.size(0)) * 1 / (torch.sum(d0.unsqueeze(1) * cost, 0) + eps) - error = torch.mean(torch.abs(d1_old - d1)) - d1_old = d1 - return d1 * cost * d0.unsqueeze(1) - - -def get_capacity(num_tokens: int, num_experts: int, capacity_factor: float, min_capacity=None): - """ - Calculate the capacity of each expert. - - Args: - num_tokens (int): num of the input tokens. - num_experts (int): num of the experts. - capacity_factor (float): Capacity factor. - min_capacity (int, optional): Minimum capacity. Defaults to None. - - Returns: - Tensor: Capacity of each expert. - """ - capacity = math.ceil((num_tokens / num_experts) * capacity_factor) - if min_capacity is not None and capacity < min_capacity: - capacity = min_capacity - return capacity - - -class MoEAuxLossAutoScaler(torch.autograd.Function): - """An AutoScaler that triggers the backward pass and scales the grad for auxiliary loss.""" - - main_loss_backward_scale: torch.Tensor = torch.tensor(1.0) - - @staticmethod - def forward(ctx, output: torch.Tensor, aux_loss: torch.Tensor): - """Preserve the aux_loss by storing it in the context to avoid garbage collection. - - Args: - output (torch.Tensor): The output tensor. - aux_loss (torch.Tensor): The auxiliary loss tensor. - - Returns: - torch.Tensor: The output tensor. - """ - ctx.save_for_backward(aux_loss) - return output - - @staticmethod - def backward(ctx, grad_output: torch.Tensor): - """Compute and scale the gradient for auxiliary loss.. - - Args: - grad_output (torch.Tensor): The gradient of the output. - - Returns: - Tuple[torch.Tensor, torch.Tensor]: The gradient of the output, scaled auxiliary loss - gradient. - """ - (aux_loss,) = ctx.saved_tensors - aux_loss_backward_scale = MoEAuxLossAutoScaler.main_loss_backward_scale - scaled_aux_loss_grad = torch.ones_like(aux_loss) * aux_loss_backward_scale - return grad_output, scaled_aux_loss_grad - - @staticmethod - def set_loss_scale(scale: torch.Tensor): - """set the scale of the aux loss. - - Args: - scale (torch.Tensor): The scale value to set. Please ensure that the scale passed in - matches the scale of the main_loss. - """ - MoEAuxLossAutoScaler.main_loss_backward_scale = scale - - -def permute(tokens, routing_map, num_out_tokens: int = None): - """Permute the tokens and probs based on the mask. - Tokens with the same designated expert will be grouped together. - The shape of mask is [tokens, num_experts], it indicates which experts were selected - by each token. - - Args: - tokens (torch.Tensor): The input token tensor, [num_tokens, hidden]. - routing_map (torch.Tensor): The sparse token to expert mapping, [num_tokens, num_experts]. - num_out_tokens (int, optional): The number of output tokens. If None, it's set to - the number of input tokens. - """ - num_tokens, hidden = tokens.shape - num_experts = routing_map.shape[1] - - # mask [num_tokens, num_experts] -> [num_experts, num_tokens] - routing_map = routing_map.bool().T.contiguous() - - # Create a dense expert-to-token mapping from the sparse token-to-expert mapping - token_indices = ( - torch.arange(num_tokens, device=routing_map.device).unsqueeze(0).expand(num_experts, -1) - ) - sorted_indices = token_indices.masked_select(routing_map) - - # use the mapping to permute the tokens - permuted_input = tokens.index_select(0, sorted_indices) - - return permuted_input, sorted_indices - - -def unpermute( - permuted_tokens: torch.Tensor, - sorted_indices: torch.Tensor, - restore_shape: torch.Size, - probs: torch.Tensor = None, - routing_map: torch.Tensor = None, -): - """ - Restore the original order of tokens after permutation. If probs are provided, it - will also apply them to the tokens before restoring the order. - - Args: - permuted_tokens (torch.Tensor): The permuted token tensor. - sorted_indices (torch.Tensor): The indices used to sort the tokens. - restore_shape (torch.Size): The shape of the unpermuted tensor. - probs (torch.Tensor, optional): The unpermuted probs tensor, - routing_map (torch.Tensor, optional): Token to expert mapping, shape - [num_tokens, num_experts]. - - Returns: - torch.Tensor: The tokens restored to their original order. - """ - _, hidden = restore_shape - - if probs is not None: - assert routing_map is not None, "Mask must be provided to permute the probs." - permuted_probs = probs.T.contiguous().masked_select(routing_map.T.contiguous()) - permuted_tokens = permuted_tokens * permuted_probs.unsqueeze(-1) - - # Create an output tensor filled with zeros - output_tokens = torch.zeros( - restore_shape, device=permuted_tokens.device, dtype=permuted_tokens.dtype - ) - # Scatter add the permuted_input back to the original positions - output_tokens.scatter_add_(0, sorted_indices.unsqueeze(1).expand(-1, hidden), permuted_tokens) - return output_tokens - - -def sort_chunks_by_idxs(input: torch.Tensor, split_sizes: torch.Tensor, sorted_idxs: torch.Tensor): - """Split and sort the input tensor based on the split_sizes and sorted indices.""" - input = torch.split(input, split_sizes.tolist(), dim=0) - output = torch.cat([input[i] for i in sorted_idxs], dim=0) - return output - - -def device_limited_topk( - scores: torch.Tensor, - topk: int, - num_tokens: int, - num_experts: int, - moe_router_topk_limited_devices: int, -): - """Perform top-k routing on a subset of expert parallel ranks. - - Selects N ranks for each token, then conducts top-k selection among experts on these devices. - See DeepSeek-V2 technical report (https://arxiv.org/pdf/2405.04434) for details. - - Args: - scores (torch.Tensor): Softmax scores from the router. - topk (int): The number of experts to select for each token. - num_tokens (int): The number of tokens. - num_experts (int): The number of experts. - moe_router_topk_limited_devices (int): Number of expert parallel ranks to consider for - each token during routing. None means no device limitation. - - Returns: - Tuple[torch.Tensor, torch.Tensor]: Probs and indices tensor. - """ - - # Organize the experts into groups - num_group = ( - parallel_state.get_expert_model_parallel_world_size() - ) # num_group equals to expert parallel size - group_scores = scores.view(num_tokens, num_group, -1).max(dim=-1).values - group_idx = torch.topk(group_scores, k=moe_router_topk_limited_devices, dim=-1, sorted=False)[1] - group_mask = torch.zeros_like(group_scores) - group_mask.scatter_(1, group_idx, 1) - - # Mask the experts based on selection groups - score_mask = ( - group_mask.unsqueeze(-1) - .expand(num_tokens, num_group, num_experts // num_group) - .reshape(num_tokens, -1) - ) - - masked_scores = scores.masked_fill(~score_mask.bool(), 0.0) - probs, top_indices = torch.topk(masked_scores, k=topk, dim=-1) - - return probs, top_indices - -@torch.compile(options={"triton.cudagraphs": True, "triton.cudagraph_trees": False}) -def topk_softmax_with_capacity( - logits: torch.Tensor, - topk: int, - capacity_factor: Optional[float] = None, - pad_to_capacity: bool = False, - drop_policy: str = "probs", - use_pre_softmax: bool = False, - moe_router_topk_limited_devices: int = None, - moe_router_topk_scaling_factor: float = None, - deterministic_mode: bool = False, -): - """Apply capacity and padding to the top-k selection. - Args: - logits (torch.Tensor): Logits tensor. - topk (int): The number of experts to select for each token. - capacity_factor (int): The capacity factor of each expert. Will drop tokens if the number - of tokens exceeds the capacity. - pad_to_capacity (bool): Whether to need padding in token drop mode. - drop_policy (str): The policy to drop tokens. Can be either "prob" or "position". - If "prob", the tokens with the lowest probabilities will be dropped. - If "position", tokens at the end of each batch will be dropped. - use_pre_softmax (bool): Whether to apply softmax before top-k selection. - moe_router_topk_limited_devices (int): Number of expert parallel ranks to consider for - each token during routing. None means no device limitation. - moe_router_topk_scaling_factor (float): Scaling factor for routing score in top-k - selection, only works when use_pre_softmax enabled. - deterministic_mode (bool): Deprecated. - Returns: - Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: - - routing_probs (torch.Tensor): A tensor of shape [num_tokens, num_experts] containing - the routing probabilities for each token to each expert. - - routing_map (torch.Tensor): A mask tensor of shape [num_tokens, num_experts] - indicating which experts were selected for each token. True values represent - the selected experts. - - tokens_per_expert (torch.Tensor): A tensor of shape [num_experts] containing - the number of local tokens assigned to each expert before dropping and padding. - """ - assert logits.dim() == 2, f"Expected 2D logits [num_tokens, num_experts], got {logits.dim()}." - num_tokens = logits.shape[0] - num_experts = logits.shape[1] - if use_pre_softmax: - # Pre softmax - scores = torch.softmax(logits, dim=-1, dtype=torch.float32).type_as(logits) - - if moe_router_topk_limited_devices: - probs, top_indices = device_limited_topk( - scores, topk, num_tokens, num_experts, moe_router_topk_limited_devices - ) - else: - probs, top_indices = torch.topk(scores, k=topk, dim=1) - - # Normalize the probs. - if moe_router_topk_scaling_factor: - probs = probs * moe_router_topk_scaling_factor - else: - # Post softmax - if topk == 1: - # Requires applying softmax before selecting the top-k when k is 1, - # since softmax on a [num_tokens, 1] would yield a zero gradient. - raise ValueError("Please use --moe-router-pre-softmax when topk is 1.") - assert ( - moe_router_topk_scaling_factor is None - ), "moe_router_topk_scaling_factor is not supported with post-softmax" - if moe_router_topk_limited_devices: - scores, top_indices = device_limited_topk( - logits, topk, num_tokens, num_experts, moe_router_topk_limited_devices - ) - else: - scores, top_indices = torch.topk(logits, k=topk, dim=1) - probs = torch.softmax(scores, dim=-1, dtype=torch.float32).type_as(logits) - - # TODO Try using element-wise operations instead of scatter? - topk_masked_gates = torch.zeros_like(logits).scatter(1, top_indices, probs) - topk_map = torch.zeros_like(logits).int().scatter(1, top_indices, 1).bool() - tokens_per_expert = topk_map.sum(dim=0) - - if capacity_factor is None: - # TopK without capacity - return topk_masked_gates, topk_map, tokens_per_expert - else: - # TopK with capacity - expert_capacity = get_capacity( - num_tokens=num_tokens * topk, num_experts=num_experts, capacity_factor=capacity_factor - ) - - # Maskout exceeded tokens - if drop_policy == "probs": - _, capacity_indices = torch.topk( - topk_masked_gates, k=expert_capacity, dim=0, sorted=False - ) - capacity_mask = torch.zeros_like(logits).scatter(0, capacity_indices, 1).bool() - elif drop_policy == "position": - _, capacity_indices = torch.topk(topk_map.int(), k=expert_capacity, dim=0, sorted=False) - capacity_mask = torch.zeros_like(logits).scatter(0, capacity_indices, 1).bool() - else: - raise ValueError(f"Invalid drop_policy: {drop_policy}") - - if pad_to_capacity: - final_map = capacity_mask - final_probs = topk_masked_gates * final_map - else: - # Get exceed mask and maskout exceeded probs and indices - final_map = torch.logical_and(topk_map, capacity_mask) - final_probs = topk_masked_gates * final_map - return final_probs, final_map, tokens_per_expert - - -def save_to_aux_losses_tracker( - name: str, - loss: torch.Tensor, - layer_number: int, - num_layers: int, - reduce_group: torch.distributed.ProcessGroup = None, - avg_group: torch.distributed.ProcessGroup = None, -): - """Save the auxiliary loss for logging. - Args: - name (str): The name of the loss. - loss (torch.Tensor): The loss tensor. - layer_number (int): Layer index of the loss. - num_layers (int): The number of total layers. - reduce_group (torch.distributed.ProcessGroup): The group for reducing the loss. - mean_group (torch.distributed.ProcessGroup): The group for averaging the loss. - """ - # Skip aux loss logging if layer_number is None. - if layer_number is None: - return - - tracker = parallel_state.get_moe_layer_wise_logging_tracker() - if name not in tracker: - tracker[name] = {} - tracker[name]["values"] = torch.zeros(num_layers, device=loss.device) - tracker[name]["values"][layer_number - 1] += loss.detach() # Aggregate the loss for the layer. - tracker[name]["reduce_group"] = reduce_group - tracker[name]["avg_group"] = avg_group - - -def clear_aux_losses_tracker(): - """Clear the auxiliary losses.""" - tracker = parallel_state.get_moe_layer_wise_logging_tracker() - for name in tracker: - tracker[name]["values"].zero_() - tracker[name]["reduce_group"] = None - tracker[name]["avg_group"] = None - - -def reduce_aux_losses_tracker_across_ranks(): - """Collect and reduce the auxiliary losses across ranks.""" - tracker = parallel_state.get_moe_layer_wise_logging_tracker() - for name in tracker: - values = tracker[name]["values"] - # Collect aux losses across PP. - torch.distributed.all_reduce( - values, group=parallel_state.get_pipeline_model_parallel_group() - ) - # Reduce aux losses across ranks. - if tracker[name].get('reduce_group') is not None: - torch.distributed.all_reduce(values, group=tracker[name].get('reduce_group')) - if tracker[name].get('avg_group') is not None: - torch.distributed.all_reduce( - values, group=tracker[name]['avg_group'], op=torch.distributed.ReduceOp.AVG - ) - - -def track_moe_metrics( - loss_scale, iteration, writer, wandb_writer=None, total_loss_dict=None, per_layer_logging=False -): - """Track the MoE metrics for logging.""" - # Aux loss logging - reduce_aux_losses_tracker_across_ranks() - tracker = parallel_state.get_moe_layer_wise_logging_tracker() - if writer is not None: - aux_losses = {k: v['values'].float() * loss_scale for k, v in tracker.items()} - for name, loss_list in aux_losses.items(): - if total_loss_dict is not None: - if name not in total_loss_dict: - total_loss_dict[name] = loss_list.mean() - else: - total_loss_dict[name] += loss_list.mean() - - # currently when using add_scalars, - # torch.utils.add_scalars makes each timer its own run, which - # polutes the runs list, so we just add each as a scalar - writer.add_scalar(name, loss_list.mean(), iteration) - if per_layer_logging: - for i, loss in enumerate(loss_list.tolist()): - writer.add_scalar(f"moe/{name}_layer_{i}", loss, iteration) - - # W&B logging lacks support for logging multiple scalars simultaneously. - # As a workaround, we log each scalar individually first, then we can create - # a custom panel to manually group them to a single plot. - if wandb_writer: - wandb_writer.log({f"{name}": loss_list.mean()}, iteration) - if per_layer_logging: - wandb_writer.log( - { - f"moe/{name}_layer_{i}": loss - for i, loss in enumerate(loss_list.tolist()) - }, - iteration, - ) - - clear_aux_losses_tracker() +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import math +from typing import Optional + +import torch + +from megatron.core import parallel_state +from megatron.core.tensor_parallel.mappings import gather_from_sequence_parallel_region + +try: + from megatron.core.extensions.transformer_engine import ( + fused_permute, + fused_sort_chunks_by_index, + fused_unpermute, + ) + + HAVE_TE = True +except ImportError: + HAVE_TE = False + + +def switch_load_balancing_loss_func( + probs: torch.Tensor, + tokens_per_expert: torch.Tensor, + topk: int, + moe_aux_loss_coeff: float, + sequence_partition_group=None, +): + """Calculate the auxiliary loss for load balancing. + Refer to the Switch Transformer paper (https://arxiv.org/abs/2101.03961) for details. + + Args: + probs (torch.Tensor): Softmax probabilities output by the router for each token. + Shape in [num_tokens, num_experts]. + tokens_per_expert (torch.Tensor): Number of tokens assigned to each expert. + Shape in [num_experts] + topk (int): The number of experts selected for each token. + moe_aux_loss_coeff (float): The coefficient for the auxiliary loss. + sequence_partition_group (optional): The parallel group over which the sequence is + partitioned. If None, no partitioning is applied. + Defaults to None. + + Returns: + torch.Tensor: The auxiliary loss for load balancing. + """ + num_sub_sequence = 1 + + # If the sequence is partitioned by certain parallelism strategies like Sequence Parallelism + # or Context Parallelism, compute the gradient of the auxiliary loss with respect to the full + # sequence. + if sequence_partition_group is not None: + # We can keep `aggregated_probs_per_expert` local since we don't need the gradient for + # `tokens_per_expert`, saving one allreduce operation for `aggregated_probs_per_expert`. + num_sub_sequence = torch.distributed.get_world_size(sequence_partition_group) + torch.distributed.all_reduce(tokens_per_expert, group=sequence_partition_group) + + num_tokens = probs.shape[0] * num_sub_sequence + num_experts = probs.shape[1] + + # The formula of aux_loss: aux_loss = sum((probs_per_expert/num_tokens) * + # (tokens_per_expert/(num_tokens*topk))) * num_experts * moe_aux_loss_coeff. + # This can be simplified to fuse the division and multiplication operations. + aggregated_probs_per_expert = probs.sum(dim=0) + aux_loss = torch.sum(aggregated_probs_per_expert * tokens_per_expert) * ( + num_experts * moe_aux_loss_coeff / (num_tokens * num_tokens * topk) + ) + return aux_loss + + +def sequence_load_balancing_loss_func( + probs: torch.Tensor, + routing_map: torch.Tensor, + batch_size: int, + seq_length: int, + topk: int, + moe_aux_loss_coeff: float, + sequence_partition_group=None, +): + """ + Calculate the auxiliary loss in sequence-level by computing the loss for each individual sample. + Refer to the DeepSeek-V2 huggingface repo + (https://huggingface.co/deepseek-ai/DeepSeek-V2) for details. + + Args: + probs (torch.Tensor): Softmax probabilities output by the router for each token. + Shape in [num_tokens, num_experts]. + routing_map (torch.Tensor): Mapping of tokens to experts assignment. + Shape in [num_tokens, num_experts]. + batch_size (int): Batch size to process. + seq_length (int): Sequence length to process. + topk (int): Number of experts to route to for each token. + moe_aux_loss_coeff (float): Scaling coefficient for the auxiliary loss. + sequence_partition_group (optional): The parallel group over which the sequence is + partitioned. If None, no partitioning is applied. + Defaults to None. + + Returns: + torch.Tensor: The sequence auxiliary loss for load balancing. + """ + num_sub_sequence = 1 + num_experts = probs.shape[1] + + probs_for_aux_loss = probs.view(seq_length, batch_size, -1) + routing_map = routing_map.view(seq_length, batch_size, -1) + + # If the sequence is partitioned by certain parallelism strategies like Sequence Parallelism + # or Context Parallelism, compute the gradient of the auxiliary loss with respect to the full + # sequence. + if sequence_partition_group is not None: + num_sub_sequence = torch.distributed.get_world_size(sequence_partition_group) + seq_length *= num_sub_sequence + probs_for_aux_loss = gather_from_sequence_parallel_region( + probs_for_aux_loss, group=sequence_partition_group + ) + + cost_coeff = routing_map.sum(dim=0, dtype=torch.float).div_(seq_length * topk / num_experts) + seq_aux_loss = (cost_coeff * probs_for_aux_loss.mean(dim=0)).sum(dim=1).mean() + seq_aux_loss *= moe_aux_loss_coeff + + return seq_aux_loss + + +def z_loss_func(logits, z_loss_coeff): + """Encourages the router's logits to remain small to enhance stability. + Please refer to the ST-MoE paper (https://arxiv.org/pdf/2202.08906.pdf) for details. + + Args: + logits (torch.Tensor): The logits of the router. + + Returns: + torch.Tensor: The logits after applying the z-loss. + """ + + z_loss = torch.mean(torch.square(torch.logsumexp(logits, dim=-1))) * z_loss_coeff + return z_loss + + +def sinkhorn(cost: torch.Tensor, tol: float = 0.0001): + """Sinkhorn based MoE routing function""" + cost = torch.exp(cost) + d0 = torch.ones(cost.size(0), device=cost.device, dtype=cost.dtype) + d1 = torch.ones(cost.size(1), device=cost.device, dtype=cost.dtype) + + eps = 0.00000001 + error = 1e9 + d1_old = d1 + while error > tol: + d0 = (1 / d0.size(0)) * 1 / (torch.sum(d1 * cost, 1) + eps) + d1 = (1 / d1.size(0)) * 1 / (torch.sum(d0.unsqueeze(1) * cost, 0) + eps) + error = torch.mean(torch.abs(d1_old - d1)) + d1_old = d1 + return d1 * cost * d0.unsqueeze(1) + + +def get_capacity(num_tokens: int, num_experts: int, capacity_factor: float, min_capacity=None): + """ + Calculate the capacity of each expert. + + Args: + num_tokens (int): num of the input tokens. + num_experts (int): num of the experts. + capacity_factor (float): Capacity factor. + min_capacity (int, optional): Minimum capacity. Defaults to None. + + Returns: + Tensor: Capacity of each expert. + """ + capacity = math.ceil((num_tokens / num_experts) * capacity_factor) + if min_capacity is not None and capacity < min_capacity: + capacity = min_capacity + return capacity + + +class MoEAuxLossAutoScaler(torch.autograd.Function): + """An AutoScaler that triggers the backward pass and scales the grad for auxiliary loss.""" + + main_loss_backward_scale: torch.Tensor = torch.tensor(1.0) + + @staticmethod + def forward(ctx, output: torch.Tensor, aux_loss: torch.Tensor): + """Preserve the aux_loss by storing it in the context to avoid garbage collection. + + Args: + output (torch.Tensor): The output tensor. + aux_loss (torch.Tensor): The auxiliary loss tensor. + + Returns: + torch.Tensor: The output tensor. + """ + ctx.save_for_backward(aux_loss) + return output + + @staticmethod + def backward(ctx, grad_output: torch.Tensor): + """Compute and scale the gradient for auxiliary loss.. + + Args: + grad_output (torch.Tensor): The gradient of the output. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: The gradient of the output, scaled auxiliary loss + gradient. + """ + (aux_loss,) = ctx.saved_tensors + aux_loss_backward_scale = MoEAuxLossAutoScaler.main_loss_backward_scale + scaled_aux_loss_grad = torch.ones_like(aux_loss) * aux_loss_backward_scale + return grad_output, scaled_aux_loss_grad + + @staticmethod + def set_loss_scale(scale: torch.Tensor): + """set the scale of the aux loss. + + Args: + scale (torch.Tensor): The scale value to set. Please ensure that the scale passed in + matches the scale of the main_loss. + """ + MoEAuxLossAutoScaler.main_loss_backward_scale = scale + + +def permute( + tokens, + routing_map, + num_out_tokens: Optional[int] = None, + fused: bool = False, + drop_and_pad: bool = False, +): + """Permute the tokens and probs based on the mask. + Tokens with the same designated expert will be grouped together. + The shape of mask is [tokens, num_experts], it indicates which experts were selected + by each token. + + When drop_and_pad=True, in routing_map, the number of non-zeros in each column equals to + expert capacity. This function exploits this feature to use ops that support cuda graph. + + Args: + tokens (torch.Tensor): The input token tensor, [num_tokens, hidden]. + routing_map (torch.Tensor): The sparse token to expert mapping, [num_tokens, num_experts]. + num_out_tokens (int, optional): The number of output tokens. If None, it's set to + the number of input tokens. + fused (bool, optional): Whether use the fused permute function. + drop_and_pad (bool, optional): Whether or not the token dispatcher uses token-drop + and pads the number of tokens to the expert capacity. + If set to true, routing_map has a fixed number of non-zeros + in each column. + """ + if fused: + if not HAVE_TE or fused_permute is None: + raise ValueError("fused_permute is not available. Please install TE >= 2.1.0.") + return fused_permute(tokens, routing_map, num_out_tokens) + + num_tokens, hidden = tokens.shape + num_experts = routing_map.shape[1] + if drop_and_pad and not (num_out_tokens is None): + capacity = num_out_tokens // num_experts + assert not routing_map.requires_grad + # mask [num_tokens, num_experts] -> [num_experts, num_tokens] + routing_map = routing_map.to(dtype=torch.int8).T.contiguous() + # use argsort to put indices of all non-zeros in the beginning of list + # and keep the first `capacity` number of indices + sorted_indices = routing_map.argsort(dim=-1, descending=True, stable=True)[ + :, :capacity + ].contiguous() + # flatten from [num_experts, capacity] to 1D + sorted_indices = sorted_indices.view(-1) + else: + # mask [num_tokens, num_experts] -> [num_experts, num_tokens] + routing_map = routing_map.bool().T.contiguous() + + # Create a dense expert-to-token mapping from the sparse token-to-expert mapping + token_indices = ( + torch.arange(num_tokens, device=routing_map.device).unsqueeze(0).expand(num_experts, -1) + ) + sorted_indices = token_indices.masked_select(routing_map) + + # use the mapping to permute the tokens + permuted_input = tokens.index_select(0, sorted_indices) + + return permuted_input, sorted_indices + + +def unpermute( + permuted_tokens: torch.Tensor, + sorted_indices: torch.Tensor, + restore_shape: torch.Size, + probs: torch.Tensor = None, + routing_map: torch.Tensor = None, + fused: bool = False, + drop_and_pad: bool = False, +): + """ + Restore the original order of tokens after permutation. If probs are provided, it + will also apply them to the tokens before restoring the order. + + When drop_and_pad=True, the tensors will have the following properties: + - In routing_map, the number of non-zeros in each column equals to expert capacity + - The size of sorted_indices equals to num_experts * capacity, each split of `capacity` + contains the indices of tokens routed to an expert. + This function exploits these features to use ops that support cuda graph. + + Args: + permuted_tokens (torch.Tensor): The permuted token tensor. + sorted_indices (torch.Tensor): The indices used to sort the tokens. + restore_shape (torch.Size): The shape of the unpermuted tensor. + probs (torch.Tensor, optional): The unpermuted probs tensor, + routing_map (torch.Tensor, optional): Token to expert mapping, shape + [num_tokens, num_experts]. + fused (bool, optional): Whether use the fused unpermute function. + drop_and_pad (bool, optional): Whether or not the token dispatcher uses token-drop + and pads the number of tokens to the expert capacity. + + Returns: + torch.Tensor: The tokens restored to their original order. + """ + if fused: + if not HAVE_TE or fused_unpermute is None: + raise ValueError("fused_unpermute is not available. Please install TE >= 2.1.0.") + return fused_unpermute(permuted_tokens, sorted_indices, probs, restore_shape) + + _, hidden = restore_shape + + if probs is not None: + assert routing_map is not None, "Mask must be provided to permute the probs." + if drop_and_pad: + num_experts = routing_map.size(1) + num_permuted_tokens = sorted_indices.size(0) + capacity = num_permuted_tokens // num_experts + num_unpermuted_tokens = probs.size(0) + + # [num_unpermuted_tokens, num_experts] -> num_experts * num_unpermuted_tokens + probs_T_1D = probs.T.contiguous().view(-1) + + # get 1D indices of the probs selected by routing_map + indices_dim0 = torch.arange(num_experts, device=routing_map.device).unsqueeze(-1) + indices_dim1 = sorted_indices.view(num_experts, capacity) + indices_1D = (indices_dim0 * num_unpermuted_tokens + indices_dim1).view(-1) + + # get probs from indices + permuted_probs = probs_T_1D.index_select(0, indices_1D) + else: + permuted_probs = probs.T.contiguous().masked_select(routing_map.T.contiguous()) + permuted_tokens = permuted_tokens * permuted_probs.unsqueeze(-1) + + # Create an output tensor filled with zeros + output_tokens = torch.zeros( + restore_shape, device=permuted_tokens.device, dtype=permuted_tokens.dtype + ) + # Scatter add the permuted_input back to the original positions + output_tokens.scatter_add_(0, sorted_indices.unsqueeze(1).expand(-1, hidden), permuted_tokens) + return output_tokens + + +def sort_chunks_by_idxs( + input: torch.Tensor, split_sizes: torch.Tensor, sorted_idxs: torch.Tensor, fused: bool = False +): + """Split and sort the input tensor based on the split_sizes and sorted indices.""" + if fused: + if not HAVE_TE or fused_sort_chunks_by_index is None: + raise ValueError( + "fused_sort_chunks_by_index is not available. Please install TE >= 2.1.0." + ) + return fused_sort_chunks_by_index(input, split_sizes, sorted_idxs) + + input = torch.split(input, split_sizes.tolist(), dim=0) + output = torch.cat([input[i] for i in sorted_idxs.tolist()], dim=0) + return output + + +def group_limited_topk( + scores: torch.Tensor, + topk: int, + num_tokens: int, + num_experts: int, + num_groups: int, + group_topk: int, +): + """Perform top-k routing on a subset of expert groups. + + When using group-limited routing: + 1. Experts are divided into 'moe_router_num_groups' equal-sized groups + 2. For each token, 'moe_router_group_topk' groups are selected based on routing scores + (specifically, the sum of top-2 expert scores within each group) + 3. From these selected groups, 'moe_router_topk' individual experts are chosen + + Two common use cases: + - Device-limited routing: Set 'moe_router_num_groups' equal to expert parallel size (EP) + to limit each token to experts on a subset of devices + (See DeepSeek-V2: https://arxiv.org/pdf/2405.04434) + + - Node-limited routing: Set 'moe_router_num_groups' equal to number of nodes in EP group + to limit each token to experts on a subset of nodes + (See DeepSeek-V3: https://arxiv.org/pdf/2412.19437) + + Args: + scores (torch.Tensor): Softmax scores generated by the router. + topk (int): The number of experts to select for each token. + num_tokens (int): The number of tokens. + num_experts (int): The number of experts. + num_groups (int): Number of groups for routed experts. + group_topk (int): Number of groups selected for each token. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: Probs and indices tensor. + """ + # Organize the experts into groups + group_scores = scores.view(num_tokens, num_groups, -1).topk(2, dim=-1)[0].sum(dim=-1) + group_idx = torch.topk(group_scores, k=group_topk, dim=-1, sorted=False)[1] + group_mask = torch.zeros_like(group_scores) + group_mask.scatter_(1, group_idx, 1) + + # Mask the experts based on selection groups + score_mask = ( + group_mask.unsqueeze(-1) + .expand(num_tokens, num_groups, num_experts // num_groups) + .reshape(num_tokens, -1) + ) + + masked_scores = scores.masked_fill(~score_mask.bool(), float('-inf')) + probs, top_indices = torch.topk(masked_scores, k=topk, dim=-1) + + return probs, top_indices + +@torch.compile(options={"triton.cudagraphs": True, "triton.cudagraph_trees": False}) +def topk_softmax_with_capacity( + logits: torch.Tensor, + topk: int, + capacity_factor: Optional[float] = None, + pad_to_capacity: bool = False, + drop_policy: str = "probs", + use_pre_softmax: bool = False, + num_groups: Optional[int] = None, + group_topk: Optional[int] = None, + scaling_factor: Optional[float] = None, + deterministic_mode: bool = False, + score_function: str = "softmax", + expert_bias: Optional[torch.Tensor] = None, +): + """Apply capacity and padding to the top-k selection. + Args: + logits (torch.Tensor): Logits tensor. + topk (int): The number of experts to select for each token. + capacity_factor (float): The capacity factor of each expert. Will drop tokens if the number + of tokens exceeds the capacity. + pad_to_capacity (bool): Whether to need padding in token drop mode. + drop_policy (str): The policy to drop tokens. Can be either "prob" or "position". + If "prob", the tokens with the lowest probabilities will be dropped. + If "position", tokens at the end of each batch will be dropped. + use_pre_softmax (bool): Whether to apply softmax before top-k selection. + num_groups (int): Number of groups for routed experts. + group_topk (int): Number of selected groups for each token. + scaling_factor (float): Scaling factor of routing score in top-k selection. + deterministic_mode (bool): Deprecated. + score_function (str): The score function to use. Can be either "softmax" or "sigmoid". + expert_bias (torch.Tensor): The bias added to logits for expert routing. + + Returns: + Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + - routing_probs (torch.Tensor): A tensor of shape [num_tokens, num_experts] containing + the routing probabilities for each token to each expert. + - routing_map (torch.Tensor): A mask tensor of shape [num_tokens, num_experts] + indicating which experts were selected for each token. True values represent + the selected experts. + - tokens_per_expert (torch.Tensor): A tensor of shape [num_experts] containing + the number of local tokens assigned to each expert before dropping and padding. + """ + assert logits.dim() == 2, f"Expected 2D logits [num_tokens, num_experts], got {logits.dim()}." + num_tokens, num_experts = logits.shape + + def compute_topk(scores, topk, num_groups=None, group_topk=None): + if group_topk: + return group_limited_topk( + scores=scores, + topk=topk, + num_tokens=num_tokens, + num_experts=num_experts, + num_groups=num_groups, + group_topk=group_topk, + ) + else: + return torch.topk(scores, k=topk, dim=1) + + if score_function == "softmax": + if use_pre_softmax: + scores = torch.softmax(logits, dim=-1, dtype=torch.float32).type_as(logits) + probs, top_indices = compute_topk(scores, topk, num_groups, group_topk) + else: + scores, top_indices = compute_topk(logits, topk, num_groups, group_topk) + probs = torch.softmax(scores, dim=-1, dtype=torch.float32).type_as(logits) + elif score_function == "sigmoid": + scores = torch.sigmoid(logits) + if expert_bias is not None: + scores_for_routing = scores + expert_bias + _, top_indices = compute_topk(scores_for_routing, topk, num_groups, group_topk) + scores = torch.gather(scores, dim=1, index=top_indices).type_as(logits) + else: + scores, top_indices = compute_topk(scores, topk, num_groups, group_topk) + probs = scores / (scores.sum(dim=-1, keepdim=True) + 1e-20) if topk > 1 else scores + else: + raise ValueError(f"Invalid score_function: {score_function}") + + if scaling_factor: + probs = probs * scaling_factor + + # TODO Try using element-wise operations instead of scatter? + topk_masked_gates = torch.zeros_like(logits).scatter(1, top_indices, probs) + topk_map = torch.zeros_like(logits).int().scatter(1, top_indices, 1).bool() + tokens_per_expert = topk_map.sum(dim=0) + + if capacity_factor is None: + # TopK without capacity + return topk_masked_gates, topk_map, tokens_per_expert + else: + # TopK with capacity + expert_capacity = get_capacity( + num_tokens=num_tokens * topk, num_experts=num_experts, capacity_factor=capacity_factor + ) + + # Maskout exceeded tokens + if drop_policy == "probs": + _, capacity_indices = torch.topk( + topk_masked_gates, k=expert_capacity, dim=0, sorted=False + ) + capacity_mask = torch.zeros_like(logits).scatter(0, capacity_indices, 1).bool() + elif drop_policy == "position": + _, capacity_indices = torch.topk(topk_map.int(), k=expert_capacity, dim=0, sorted=False) + capacity_mask = torch.zeros_like(logits).scatter(0, capacity_indices, 1).bool() + else: + raise ValueError(f"Invalid drop_policy: {drop_policy}") + + if pad_to_capacity: + final_map = capacity_mask + final_probs = topk_masked_gates * final_map + else: + # Get exceed mask and maskout exceeded probs and indices + final_map = torch.logical_and(topk_map, capacity_mask) + final_probs = topk_masked_gates * final_map + return final_probs, final_map, tokens_per_expert + + +def save_to_aux_losses_tracker( + name: str, + loss: torch.Tensor, + layer_number: int, + num_layers: int, + reduce_group: torch.distributed.ProcessGroup = None, + avg_group: torch.distributed.ProcessGroup = None, +): + """Save the auxiliary loss for logging. + Args: + name (str): The name of the loss. + loss (torch.Tensor): The loss tensor. + layer_number (int): Layer index of the loss. + num_layers (int): The number of total layers. + reduce_group (torch.distributed.ProcessGroup): The group for reducing the loss. + mean_group (torch.distributed.ProcessGroup): The group for averaging the loss. + """ + # Skip aux loss logging if layer_number is None. + if layer_number is None: + return + + tracker = parallel_state.get_moe_layer_wise_logging_tracker() + if name not in tracker: + tracker[name] = {} + tracker[name]["values"] = torch.zeros(num_layers, device=loss.device) + tracker[name]["values"][layer_number - 1] += loss.detach() # Aggregate the loss for the layer. + tracker[name]["reduce_group"] = reduce_group + tracker[name]["avg_group"] = avg_group + + +def clear_aux_losses_tracker(): + """Clear the auxiliary losses.""" + tracker = parallel_state.get_moe_layer_wise_logging_tracker() + for name in tracker: + tracker[name]["values"].zero_() + tracker[name]["reduce_group"] = None + tracker[name]["avg_group"] = None + + +def reduce_aux_losses_tracker_across_ranks(): + """Collect and reduce the auxiliary losses across ranks.""" + tracker = parallel_state.get_moe_layer_wise_logging_tracker() + for name in tracker: + values = tracker[name]["values"] + # Collect aux losses across PP. + torch.distributed.all_reduce( + values, group=parallel_state.get_pipeline_model_parallel_group() + ) + # Reduce aux losses across ranks. + if tracker[name].get('reduce_group') is not None: + torch.distributed.all_reduce(values, group=tracker[name].get('reduce_group')) + if tracker[name].get('avg_group') is not None: + torch.distributed.all_reduce( + values, group=tracker[name]['avg_group'], op=torch.distributed.ReduceOp.AVG + ) + + +def track_moe_metrics( + loss_scale, iteration, writer, wandb_writer=None, total_loss_dict=None, per_layer_logging=False +): + """Track the MoE metrics for logging.""" + # Aux loss logging + reduce_aux_losses_tracker_across_ranks() + tracker = parallel_state.get_moe_layer_wise_logging_tracker() + if writer is not None: + aux_losses = {k: v['values'].float() * loss_scale for k, v in tracker.items()} + for name, loss_list in aux_losses.items(): + if total_loss_dict is not None: + if name not in total_loss_dict: + total_loss_dict[name] = loss_list.mean() + else: + total_loss_dict[name] += loss_list.mean() + + # currently when using add_scalars, + # torch.utils.add_scalars makes each timer its own run, which + # polutes the runs list, so we just add each as a scalar + writer.add_scalar(name, loss_list.mean(), iteration) + if per_layer_logging: + for i, loss in enumerate(loss_list.tolist()): + writer.add_scalar(f"moe/{name}_layer_{i}", loss, iteration) + + # W&B logging lacks support for logging multiple scalars simultaneously. + # As a workaround, we log each scalar individually first, then we can create + # a custom panel to manually group them to a single plot. + if wandb_writer: + wandb_writer.log({f"{name}": loss_list.mean()}, iteration) + if per_layer_logging: + wandb_writer.log( + { + f"moe/{name}_layer_{i}": loss + for i, loss in enumerate(loss_list.tolist()) + }, + iteration, + ) + + clear_aux_losses_tracker() + + +def get_updated_expert_bias(tokens_per_expert, expert_bias, expert_bias_update_rate): + """Update expert bias for biased expert routing. See https://arxiv.org/abs/2408.15664v1# + + Args: + tokens_per_expert (torch.Tensor): The number of tokens assigned to each expert. + expert_bias (torch.Tensor): The bias for each expert. + expert_bias_udpate_rate (float): The update rate for the expert bias. + """ + with torch.no_grad(): + # All Reduce Across TPxCPxDP group + torch.distributed.all_reduce( + tokens_per_expert, + group=parallel_state.get_tensor_and_data_parallel_group(with_context_parallel=True), + ) + average_tokens = tokens_per_expert.sum(dim=-1, keepdim=True) / tokens_per_expert.shape[-1] + offset = average_tokens - tokens_per_expert + updated_expert_bias = expert_bias + torch.sign(offset) * expert_bias_update_rate + return updated_expert_bias diff --git a/megatron/core/transformer/moe/router.py b/megatron/core/transformer/moe/router.py index 82d1029..268b429 100644 --- a/megatron/core/transformer/moe/router.py +++ b/megatron/core/transformer/moe/router.py @@ -1,334 +1,364 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -from abc import ABC, abstractmethod -from functools import partial -from typing import Callable - -import torch - -from megatron.core import parallel_state -from megatron.core.tensor_parallel import gather_from_sequence_parallel_region -from megatron.core.transformer.module import MegatronModule -from megatron.core.transformer.moe.moe_utils import ( - MoEAuxLossAutoScaler, - save_to_aux_losses_tracker, - sequence_load_balancing_loss_func, - sinkhorn, - switch_load_balancing_loss_func, - topk_softmax_with_capacity, - z_loss_func, -) -from megatron.core.transformer.transformer_config import TransformerConfig - - -class Router(ABC, MegatronModule): - """Base Router class""" - - def __init__(self, config: TransformerConfig) -> None: - """ - Initialize the Router module. - - Args: - config (TransformerConfig): Configuration object for the Transformer model. - """ - super().__init__(config) - self.config = config - self.num_experts = self.config.num_moe_experts - self.moe_aux_loss_func = None - self.layer_number = None - - # Initialize the gate weights. - # TODO: Add support for GPU initialization, which requires updating the golden values. - self.weight = torch.nn.Parameter( - torch.empty((self.config.num_moe_experts, self.config.hidden_size), dtype=torch.float32) - ) - if config.perform_initialization: - config.init_method(self.weight) - self.weight.data = self.weight.data.to(dtype=config.params_dtype) - setattr(self.weight, 'sequence_parallel', config.sequence_parallel) - - def gating(self, input: torch.Tensor): - """Forward pass of the router gate. - - Args: - input (torch.Tensor): Input tensor. - - Returns: - torch.Tensor: Logits tensor. - """ - if self.weight.device.type == 'cpu': - # move weights to GPU - self.weight.data = self.weight.data.to(device=torch.cuda.current_device()) - logits = torch.nn.functional.linear(input, self.weight) - return logits - - @abstractmethod - def routing(self, logits: torch.Tensor): - """Routing function. - - Args: - logits (torch.Tensor): Logits tensor. - - Returns: - Tuple[torch.Tensor, torch.Tensor]: A tuple containing token assignment - probabilities and mapping. - """ - raise NotImplementedError("Routing function not implemented.") - - @abstractmethod - def forward(self, input: torch.Tensor): - """ - Forward pass of the router. - - Args: - input (torch.Tensor): Input tensor. - """ - raise NotImplementedError("Forward function not implemented.") - - def set_layer_number(self, layer_number: int): - """Set the layer number for the router.""" - self.layer_number = layer_number - - -class TopKRouter(Router): - """Route each token to the top-k experts.""" - - def __init__(self, config: TransformerConfig) -> None: - """Initialize the zero token dropping router. - - Args: - config (TransformerConfig): The configuration for the transformer model. - """ - super().__init__(config=config) - self.topk = self.config.moe_router_topk - self.routing_type = self.config.moe_router_load_balancing_type - self.input_jitter = None - - def sinkhorn_load_balancing(self, logits: torch.Tensor): - """Apply sinkhorn routing to the logits tensor. - - Args: - logits (torch.Tensor): The logits tensor. - - Returns: - Tuple[torch.Tensor, torch.Tensor]: A tuple containing token assignment - probabilities and mask. - """ - - def _sinkhorn_activation(logits): - if self.topk == 1: - logits = torch.sigmoid(logits) - else: # k > 1 - logits = torch.softmax(logits, dim=-1, dtype=torch.float32).type_as(logits) - return logits - - assert self.config.moe_aux_loss_coeff == 0, "Sinkhorn routing does not support aux loss." - if self.training: - with torch.no_grad(): - norm_logits = sinkhorn( - logits.to(dtype=torch.float32) - ) # explicit fp32 conversion for stability - _, indices = torch.topk(norm_logits, k=self.topk, dim=1) - logits = _sinkhorn_activation(logits) - else: - logits = _sinkhorn_activation(logits) - _, indices = torch.topk(logits, k=self.topk, dim=1) - map = torch.zeros_like(logits).int().scatter(1, indices, 1).bool() - scores = logits * map - return scores, map - - def aux_loss_load_balancing(self, logits: torch.Tensor): - """Apply loss-based load balancing to the logits tensor. - - Args: - logits (torch.Tensor): the logits tensor after gating, shape: [num_tokens, num_experts]. - - Returns: - probs (torch.Tensor): The probabilities of token to experts assignment. - routing_map (torch.Tensor): The mask of token to experts assignment. - """ - probs, routing_map, tokens_per_expert = topk_softmax_with_capacity( - logits, - self.topk, - capacity_factor=self.config.moe_expert_capacity_factor, - pad_to_capacity=self.config.moe_pad_expert_input_to_capacity, - drop_policy=self.config.moe_token_drop_policy, - use_pre_softmax=self.config.moe_router_pre_softmax, - moe_router_topk_limited_devices=self.config.moe_router_topk_limited_devices, - moe_router_topk_scaling_factor=self.config.moe_router_topk_scaling_factor, - deterministic_mode=self.config.deterministic_mode, - ) - - if self.training: - # Apply load balancing loss - scores = torch.softmax(logits, dim=-1, dtype=torch.float32) - aux_loss_func = partial( - switch_load_balancing_loss_func, - probs=scores, - tokens_per_expert=tokens_per_expert, - topk=self.topk, - ) - probs = self.apply_load_balancing_loss( - activation=probs, load_balancing_loss_func=aux_loss_func - ) - return probs, routing_map - - def seq_aux_loss_load_balancing(self, logits: torch.Tensor, bsz: int, seq_length: int): - """Apply loss-based load balancing to the logits tensor.""" - - probs, routing_map, tokens_per_expert = topk_softmax_with_capacity( - logits, - self.topk, - capacity_factor=self.config.moe_expert_capacity_factor, - pad_to_capacity=self.config.moe_pad_expert_input_to_capacity, - drop_policy=self.config.moe_token_drop_policy, - use_pre_softmax=self.config.moe_router_pre_softmax, - moe_router_topk_limited_devices=self.config.moe_router_topk_limited_devices, - moe_router_topk_scaling_factor=self.config.moe_router_topk_scaling_factor, - deterministic_mode=self.config.deterministic_mode, - ) - - if self.training: - scores = torch.softmax(logits, dim=-1, dtype=torch.float32) - aux_loss_func = partial( - sequence_load_balancing_loss_func, - probs=scores, - routing_map=routing_map, - tokens_per_expert=tokens_per_expert, - batch_size=bsz, - seq_length=seq_length, - topk=self.topk, - ) - probs = self.apply_load_balancing_loss( - activation=probs, load_balancing_loss_func=aux_loss_func - ) - - return probs, routing_map - - def apply_load_balancing_loss( - self, activation: torch.Tensor, load_balancing_loss_func: Callable - ): - """Calculate auxiliary loss, attach gradient function to activation and add to logging.""" - moe_aux_loss_coeff = self.config.moe_aux_loss_coeff - sequence_partition_group = None - if self.config.moe_token_dispatcher_type == "alltoall_seq": - sequence_partition_group = parallel_state.get_context_parallel_group() - moe_aux_loss_coeff /= parallel_state.get_tensor_model_parallel_world_size() - else: - sequence_partition_group = parallel_state.get_tensor_and_context_parallel_group() - - aux_loss = load_balancing_loss_func( - moe_aux_loss_coeff=moe_aux_loss_coeff, sequence_partition_group=sequence_partition_group - ) - save_to_aux_losses_tracker( - "load_balancing_loss", - aux_loss / moe_aux_loss_coeff, - self.layer_number, - self.config.num_layers, - reduce_group=sequence_partition_group, - ) - activation = MoEAuxLossAutoScaler.apply(activation, aux_loss) - return activation - - def apply_z_loss(self, logits): - """Encourages the router's logits to remain small to enhance stability. - Please refer to the ST-MoE paper (https://arxiv.org/pdf/2202.08906.pdf) for details. - - Args: - logits (torch.Tensor): The logits of the router. - - Returns: - torch.Tensor: The logits after applying the z-loss. - """ - if self.config.moe_z_loss_coeff is not None and self.training: - moe_z_loss_coeff = ( - self.config.moe_z_loss_coeff - / parallel_state.get_tensor_and_context_parallel_world_size() - ) - z_loss = z_loss_func(logits, moe_z_loss_coeff) - logits = MoEAuxLossAutoScaler.apply(logits, z_loss) - save_to_aux_losses_tracker( - "z_loss", z_loss / moe_z_loss_coeff, self.layer_number, self.config.num_layers - ) - return logits - - def apply_input_jitter(self, input: torch.Tensor): - """Add noise to the input tensor. - Refer to https://arxiv.org/abs/2101.03961. - - Args: - input (Tensor): Input tensor. - - Returns: - Tensor: Jittered input. - """ - if self.config.moe_input_jitter_eps is not None: - eps = self.config.moe_input_jitter_eps - if self.input_jitter is None: - self.input_jitter = torch.distributions.uniform.Uniform( - torch.tensor(1.0 - eps, device=input.device), - torch.tensor(1.0 + eps, device=input.device), - ).rsample - return input * self.input_jitter(input.shape) - else: - return input - - def routing(self, logits: torch.Tensor): - """Top-k routing function - - Args: - logits (torch.Tensor): Logits tensor after gating. - - Returns: - probs (torch.Tensor): The probabilities of token to experts assignment. - routing_map (torch.Tensor): The mapping of token to experts assignment, - with shape [num_tokens, num_experts]. - """ - seq_length, bsz = logits.shape[:2] - logits = logits.view(-1, self.config.num_moe_experts) - - # Apply Z-Loss - logits = self.apply_z_loss(logits) - - if self.config.moe_token_dispatcher_type == "alltoall_seq": - # Gather the logits from the TP region - logits = gather_from_sequence_parallel_region(logits) - - if self.routing_type == "sinkhorn": - scores, routing_map = self.sinkhorn_load_balancing(logits) - elif self.routing_type == "aux_loss": - scores, routing_map = self.aux_loss_load_balancing(logits) - elif self.routing_type == "seq_aux_loss": - scores, routing_map = self.seq_aux_loss_load_balancing(logits, bsz, seq_length) - elif self.routing_type == "none": - # A naive top-k routing without load balancing - scores, routing_map, _ = topk_softmax_with_capacity( - logits, - self.topk, - capacity_factor=self.config.moe_expert_capacity_factor, - pad_to_capacity=self.config.moe_pad_expert_input_to_capacity, - drop_policy=self.config.moe_token_drop_policy, - use_pre_softmax=self.config.moe_router_pre_softmax, - moe_router_topk_scaling_factor=self.config.moe_router_topk_scaling_factor, - deterministic_mode=self.config.deterministic_mode, - ) - else: - raise ValueError(f"Unsupported MoE routing type: {self.routing_type}") - - return scores, routing_map - - def forward(self, input: torch.Tensor): - """ - Forward pass of the router. - - Args: - input (torch.Tensor): Input tensor. - """ - - # Apply input jitter - input = self.apply_input_jitter(input) - logits = self.gating(input) - - scores, routing_map = self.routing(logits) - - return scores, routing_map +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +from abc import ABC, abstractmethod +from functools import partial +from typing import Callable + +import torch + +from megatron.core import parallel_state +from megatron.core.tensor_parallel import gather_from_sequence_parallel_region +from megatron.core.transformer.module import MegatronModule +from megatron.core.transformer.moe.moe_utils import ( + MoEAuxLossAutoScaler, + save_to_aux_losses_tracker, + sequence_load_balancing_loss_func, + sinkhorn, + switch_load_balancing_loss_func, + topk_softmax_with_capacity, + z_loss_func, +) +from megatron.core.transformer.transformer_config import TransformerConfig + + +class Router(ABC, MegatronModule): + """Base Router class""" + + def __init__(self, config: TransformerConfig) -> None: + """ + Initialize the Router module. + + Args: + config (TransformerConfig): Configuration object for the Transformer model. + """ + super().__init__(config) + self.config = config + self.num_experts = self.config.num_moe_experts + self.moe_aux_loss_func = None + self.layer_number = None + + # Initialize the gate weights. + # TODO: Add support for GPU initialization, which requires updating the golden values. + self.weight = torch.nn.Parameter( + torch.empty((self.config.num_moe_experts, self.config.hidden_size), dtype=torch.float32) + ) + if config.perform_initialization: + config.init_method(self.weight) + self.weight.data = self.weight.data.to(dtype=config.params_dtype) + setattr(self.weight, 'sequence_parallel', config.sequence_parallel) + + def gating(self, input: torch.Tensor): + """Forward pass of the router gate. + + Args: + input (torch.Tensor): Input tensor. + + Returns: + torch.Tensor: Logits tensor. + """ + if self.weight.device.type == 'cpu': + # move weights to GPU + self.weight.data = self.weight.data.to(device=torch.cuda.current_device()) + logits = torch.nn.functional.linear(input, self.weight) + return logits + + @abstractmethod + def routing(self, logits: torch.Tensor): + """Routing function. + + Args: + logits (torch.Tensor): Logits tensor. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: A tuple containing token assignment + probabilities and mapping. + """ + raise NotImplementedError("Routing function not implemented.") + + @abstractmethod + def forward(self, input: torch.Tensor): + """ + Forward pass of the router. + + Args: + input (torch.Tensor): Input tensor. + """ + raise NotImplementedError("Forward function not implemented.") + + def set_layer_number(self, layer_number: int): + """Set the layer number for the router.""" + self.layer_number = layer_number + + +class TopKRouter(Router): + """Route each token to the top-k experts.""" + + def __init__(self, config: TransformerConfig) -> None: + """Initialize the zero token dropping router. + + Args: + config (TransformerConfig): The configuration for the transformer model. + """ + super().__init__(config=config) + self.topk = self.config.moe_router_topk + self.routing_type = self.config.moe_router_load_balancing_type + self.score_function = self.config.moe_router_score_function + self.input_jitter = None + + self.enable_expert_bias = self.config.moe_router_enable_expert_bias + if self.enable_expert_bias: + self.register_buffer( + 'local_tokens_per_expert', + torch.zeros(self.config.num_moe_experts, dtype=torch.float32), + persistent=False, + ) + self.register_buffer( + 'expert_bias', torch.zeros(self.config.num_moe_experts, dtype=torch.float32) + ) + else: + self.local_tokens_per_expert = None + self.expert_bias = None + + def sinkhorn_load_balancing(self, logits: torch.Tensor): + """Apply sinkhorn routing to the logits tensor. + + Args: + logits (torch.Tensor): The logits tensor. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: A tuple containing token assignment + probabilities and mask. + """ + + def _sinkhorn_activation(logits): + if self.topk == 1: + logits = torch.sigmoid(logits) + else: # k > 1 + logits = torch.softmax(logits, dim=-1, dtype=torch.float32).type_as(logits) + return logits + + assert self.config.moe_aux_loss_coeff == 0, "Sinkhorn routing does not support aux loss." + if self.training: + with torch.no_grad(): + norm_logits = sinkhorn( + logits.to(dtype=torch.float32) + ) # explicit fp32 conversion for stability + _, indices = torch.topk(norm_logits, k=self.topk, dim=1) + logits = _sinkhorn_activation(logits) + else: + logits = _sinkhorn_activation(logits) + _, indices = torch.topk(logits, k=self.topk, dim=1) + map = torch.zeros_like(logits).int().scatter(1, indices, 1).bool() + scores = logits * map + return scores, map + + def aux_loss_load_balancing(self, logits: torch.Tensor): + """Apply loss-based load balancing to the logits tensor. + + Args: + logits (torch.Tensor): the logits tensor after gating, shape: [num_tokens, num_experts]. + + Returns: + probs (torch.Tensor): The probabilities of token to experts assignment. + routing_map (torch.Tensor): The mask of token to experts assignment. + """ + probs, routing_map, tokens_per_expert = topk_softmax_with_capacity( + logits, + self.topk, + capacity_factor=self.config.moe_expert_capacity_factor, + pad_to_capacity=self.config.moe_pad_expert_input_to_capacity, + drop_policy=self.config.moe_token_drop_policy, + use_pre_softmax=self.config.moe_router_pre_softmax, + num_groups=self.config.moe_router_num_groups, + group_topk=self.config.moe_router_group_topk, + scaling_factor=self.config.moe_router_topk_scaling_factor, + deterministic_mode=self.config.deterministic_mode, + score_function=self.score_function, + expert_bias=self.expert_bias, + ) + + if self.training: + # Apply load balancing loss + scores = torch.softmax(logits, dim=-1, dtype=torch.float32) + aux_loss_func = partial( + switch_load_balancing_loss_func, + probs=scores, + tokens_per_expert=tokens_per_expert, + topk=self.topk, + ) + probs = self.apply_load_balancing_loss( + activation=probs, load_balancing_loss_func=aux_loss_func + ) + return probs, routing_map + + def seq_aux_loss_load_balancing(self, logits: torch.Tensor, bsz: int, seq_length: int): + """Apply loss-based load balancing to the logits tensor.""" + + probs, routing_map, tokens_per_expert = topk_softmax_with_capacity( + logits, + self.topk, + capacity_factor=self.config.moe_expert_capacity_factor, + pad_to_capacity=self.config.moe_pad_expert_input_to_capacity, + drop_policy=self.config.moe_token_drop_policy, + use_pre_softmax=self.config.moe_router_pre_softmax, + num_groups=self.config.moe_router_num_groups, + group_topk=self.config.moe_router_group_topk, + scaling_factor=self.config.moe_router_topk_scaling_factor, + deterministic_mode=self.config.deterministic_mode, + score_function=self.score_function, + expert_bias=self.expert_bias, + ) + + if self.training: + scores = torch.softmax(logits, dim=-1, dtype=torch.float32) + aux_loss_func = partial( + sequence_load_balancing_loss_func, + probs=scores, + routing_map=routing_map, + batch_size=bsz, + seq_length=seq_length, + topk=self.topk, + ) + probs = self.apply_load_balancing_loss( + activation=probs, load_balancing_loss_func=aux_loss_func + ) + + return probs, routing_map + + def apply_load_balancing_loss( + self, activation: torch.Tensor, load_balancing_loss_func: Callable + ): + """Calculate auxiliary loss, attach gradient function to activation and add to logging.""" + moe_aux_loss_coeff = self.config.moe_aux_loss_coeff + if moe_aux_loss_coeff == 0: + return activation + sequence_partition_group = None + if self.config.moe_token_dispatcher_type == "alltoall_seq": + sequence_partition_group = parallel_state.get_context_parallel_group() + moe_aux_loss_coeff /= parallel_state.get_tensor_model_parallel_world_size() + elif parallel_state.get_tensor_and_context_parallel_world_size() > 1: + sequence_partition_group = parallel_state.get_tensor_and_context_parallel_group() + + aux_loss = load_balancing_loss_func( + moe_aux_loss_coeff=moe_aux_loss_coeff, sequence_partition_group=sequence_partition_group + ) + save_to_aux_losses_tracker( + "load_balancing_loss", + aux_loss / moe_aux_loss_coeff, + self.layer_number, + self.config.num_layers, + reduce_group=sequence_partition_group, + ) + activation = MoEAuxLossAutoScaler.apply(activation, aux_loss) + return activation + + def apply_z_loss(self, logits): + """Encourages the router's logits to remain small to enhance stability. + Please refer to the ST-MoE paper (https://arxiv.org/pdf/2202.08906.pdf) for details. + + Args: + logits (torch.Tensor): The logits of the router. + + Returns: + torch.Tensor: The logits after applying the z-loss. + """ + if self.config.moe_z_loss_coeff is not None and self.training: + moe_z_loss_coeff = ( + self.config.moe_z_loss_coeff + / parallel_state.get_tensor_and_context_parallel_world_size() + ) + z_loss = z_loss_func(logits, moe_z_loss_coeff) + logits = MoEAuxLossAutoScaler.apply(logits, z_loss) + save_to_aux_losses_tracker( + "z_loss", z_loss / moe_z_loss_coeff, self.layer_number, self.config.num_layers + ) + return logits + + def apply_input_jitter(self, input: torch.Tensor): + """Add noise to the input tensor. + Refer to https://arxiv.org/abs/2101.03961. + + Args: + input (Tensor): Input tensor. + + Returns: + Tensor: Jittered input. + """ + if self.config.moe_input_jitter_eps is not None: + eps = self.config.moe_input_jitter_eps + if self.input_jitter is None: + self.input_jitter = torch.distributions.uniform.Uniform( + torch.tensor(1.0 - eps, device=input.device), + torch.tensor(1.0 + eps, device=input.device), + ).rsample + return input * self.input_jitter(input.shape) + else: + return input + + def routing(self, logits: torch.Tensor): + """Top-k routing function + + Args: + logits (torch.Tensor): Logits tensor after gating. + + Returns: + probs (torch.Tensor): The probabilities of token to experts assignment. + routing_map (torch.Tensor): The mapping of token to experts assignment, + with shape [num_tokens, num_experts]. + """ + seq_length, bsz = logits.shape[:2] + logits = logits.view(-1, self.config.num_moe_experts) + + # Apply Z-Loss + logits = self.apply_z_loss(logits) + + if self.config.moe_token_dispatcher_type == "alltoall_seq": + # Gather the logits from the TP region + logits = gather_from_sequence_parallel_region(logits) + + if self.routing_type == "sinkhorn": + scores, routing_map = self.sinkhorn_load_balancing(logits) + elif self.routing_type == "aux_loss": + scores, routing_map = self.aux_loss_load_balancing(logits) + elif self.routing_type == "seq_aux_loss": + scores, routing_map = self.seq_aux_loss_load_balancing(logits, bsz, seq_length) + elif self.routing_type == "none": + # A naive top-k routing without load balancing + scores, routing_map, _ = topk_softmax_with_capacity( + logits, + self.topk, + capacity_factor=self.config.moe_expert_capacity_factor, + pad_to_capacity=self.config.moe_pad_expert_input_to_capacity, + drop_policy=self.config.moe_token_drop_policy, + use_pre_softmax=self.config.moe_router_pre_softmax, + num_groups=self.config.moe_router_num_groups, + group_topk=self.config.moe_router_group_topk, + scaling_factor=self.config.moe_router_topk_scaling_factor, + deterministic_mode=self.config.deterministic_mode, + score_function=self.score_function, + expert_bias=self.expert_bias, + ) + else: + raise ValueError(f"Unsupported MoE routing type: {self.routing_type}") + # Prevent extra local tokens accumulation on evaluation or activation recomputation + if self.enable_expert_bias and torch.is_grad_enabled(): + with torch.no_grad(): + self.local_tokens_per_expert += routing_map.sum(dim=0) + + return scores, routing_map + + def forward(self, input: torch.Tensor): + """ + Forward pass of the router. + + Args: + input (torch.Tensor): Input tensor. + """ + + # Apply input jitter + input = self.apply_input_jitter(input) + logits = self.gating(input) + + scores, routing_map = self.routing(logits) + + return scores, routing_map diff --git a/megatron/core/transformer/moe/token_dispatcher.py b/megatron/core/transformer/moe/token_dispatcher.py index dbd768d..37fee37 100644 --- a/megatron/core/transformer/moe/token_dispatcher.py +++ b/megatron/core/transformer/moe/token_dispatcher.py @@ -1,594 +1,901 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from abc import abstractmethod -from typing import List, Optional, Tuple - -import torch - -from megatron.core.parallel_state import ( - get_expert_model_parallel_group, - get_expert_tensor_and_model_parallel_group, - get_expert_tensor_parallel_group, - get_expert_tensor_parallel_rank, -) -from megatron.core.tensor_parallel import ( - all_to_all, - gather_from_sequence_parallel_region, - reduce_scatter_to_sequence_parallel_region, -) -from megatron.core.transformer.moe.moe_utils import ( - get_capacity, - permute, - sort_chunks_by_idxs, - unpermute, -) -from megatron.core.transformer.moe.shared_experts import SharedExpertMLP -from megatron.core.transformer.transformer_config import TransformerConfig - -""" We use the following notation throughout this file: - H: hidden size - B: micro batch size - S: sequence length - TP: tensor model parallel size - EP: expert model parallel size - num_local_tokens: S/TP*B - num_global_tokens: num_local_tokens*TP*EP -""" - - -class MoETokenDispatcher: - """ - MoE Token Dispatcher - """ - - def __init__(self, config: TransformerConfig) -> None: - """ - Initialize the MoE Token Dispatcher. - """ - self.config = config - self.shared_experts: Optional[SharedExpertMLP] = None - - self.tp_size = config.expert_tensor_parallel_size - self.ep_size = config.expert_model_parallel_size - - @property - def ep_group(self): - """Get expert model parallel group.""" - return get_expert_model_parallel_group() - - @property - def tp_group(self): - """Get expert tensor parallel group.""" - return get_expert_tensor_parallel_group() - - @property - def tp_rank(self): - """Get expert tensor parallel rank.""" - return get_expert_tensor_parallel_rank() - - @property - def tp_ep_group(self): - """Get expert tensor and model parallel group.""" - return get_expert_tensor_and_model_parallel_group() - - @abstractmethod - def token_permutation( - self, tokens: torch.Tensor, probs: torch.Tensor, routing_map: torch.Tensor - ): - """Dispatch tokens to experts. - - Args: - tokens (torch.Tensor): Input tokens. - probs (torch.Tensor): The routing probability tensor [num_tokens, num_experts]. - routing_map (torch.Tensor): Token to expert mapping tensor. - - Returns: - torch.Tensor: Tokens tensor. - """ - raise NotImplementedError("Dispatch function not implemented.") - - @abstractmethod - def token_unpermutation(self, expert_output: torch.Tensor, bias: torch.Tensor = None): - """Restores the expert output to its original ordering. - - Args: - expert_output (torch.Tensor): The output tensor from the expert models. - bias (torch.Tensor): The bias tensor. - - Returns: - (torch.Tensor, torch.Tensor): Unpermuted activation and optional bias. - """ - raise NotImplementedError("Restore function not implemented.") - - def set_shared_experts(self, shared_experts): - """Set shared expert to the dispatcher.""" - self.shared_experts = shared_experts - - -class MoEAllGatherTokenDispatcher(MoETokenDispatcher): - """ - AllGather Based Token dispatcher. - Note that this allgather spans the communication domain of TP*EP: - """ - - def __init__( - self, num_local_experts: int, local_expert_indices: List[int], config: TransformerConfig - ) -> None: - """ - Initialize the zero token dropping router. - """ - super().__init__(config=config) - self.num_local_experts = num_local_experts - assert self.num_local_experts > 0, "Expected at least one expert" - self.local_expert_indices = local_expert_indices - assert len(self.local_expert_indices) > 0, "Expected at least one local expert index" - self.router_topk = config.moe_router_topk - self.add_bias = config.add_bias_linear - - # self.local_probs: probs of global token assignment to local experts. - self.local_probs = None - - # self.global_local_map: 2D tensor. A mask of mapping between global and local tokens where - # each element is True if it's between the local_expert_indices. Only useful when cross - # device token permutation is enabled and **AllGahter** is performed. - self.global_local_map = None - - def token_permutation( - self, hidden_states: torch.Tensor, probs: torch.Tensor, routing_map: torch.Tensor - ): - """Dispatch tokens to local experts. It's composed of two stages: - (1) Gather the tokens across the expert parallel devices. After this stage, - each device receives all of the tokens assigned to its local set of experts - in its local HBM. - (2) Permute the tokens locally so that they are grouped by their expert - assignment. - - Args: - hidden_states: 3D tensor [S/TP, B, H]. Input tokens. - probs: 2D tensor [S/TP*B, num_experts]. Each row of probs contains - the probility distribution across `topk` experts for one local token. - routing_map: 2D tensor [S/TP*B, num_experts], representing token assignment to - global experts. - - Returns: - permuted_local_hidden_states: Permutation of tokens to local experts group. - tokens_per_expert: the number of tokens each local expert to process. - """ - self.hidden_shape = hidden_states.shape - # [S/TP, B, H] -> [S*B/TP, H] - hidden_states = hidden_states.view(-1, self.hidden_shape[-1]) - - # Permute the tokens across the expert parallel devices. - if self.tp_size > 1 or self.ep_size > 1: - ## local_indices calculation - with torch.no_grad(): - # [num_local_tokens, num_experts] -> [num_global_tokens, num_experts], where: - # num_local_tokens=(S/TP)*B, num_global_tokens=S*B*EP - routing_map = gather_from_sequence_parallel_region( - routing_map, group=self.tp_ep_group - ) - - ## local_probs calculation - # max_prob: [S/TP*B, num_experts] -> global_probs: [S*B*EP, num_experts] - probs = gather_from_sequence_parallel_region(probs, group=self.tp_ep_group) - - # Note that this allgather spans the communication domain of TP*EP. - # [(S/TP)*B, H] -> [((S/TP)*B)*(TP*EP), H] = [S*B*EP, H] - hidden_states = gather_from_sequence_parallel_region( - hidden_states, group=self.tp_ep_group, use_global_buffer=True - ) - self.hidden_shape_before_permute = hidden_states.shape - - # The routing map and probs that for local experts. - self.local_map = routing_map[ - :, self.local_expert_indices[0] : self.local_expert_indices[-1] + 1 - ].contiguous() - self.local_probs = probs[ - :, self.local_expert_indices[0] : self.local_expert_indices[-1] + 1 - ].contiguous() - - tokens_per_expert = self.local_map.sum(dim=0).long().cpu() - - (permuted_local_hidden_states, self.reversed_local_input_permutation_mapping) = permute( - hidden_states, self.local_map - ) - - return permuted_local_hidden_states, tokens_per_expert - - def token_unpermutation(self, hidden_states: torch.Tensor, bias: torch.Tensor = None): - """ - Reverse process of `dispatch()` which permutes the output of local - experts locallay and across expert parallel rank into the original order to - produce the final output. - - Args: - hidden_states: 2D tensor [num_permuted_tokens_for_local_experts, H], - output of local experts. - bias (optional): The bias tensor. - - Returns: - output_total: un-permuted updated hidden states output from all local experts - with shape of [S/TP, B, H] - """ - # Scale the expert output prior to reduction and subsequent to local unpermutation if k > 1. - # Unpermute the expert output and bias - permuted_probs = self.local_probs.T.contiguous().masked_select( - self.local_map.T.contiguous() - ) - hidden_states = hidden_states * permuted_probs.unsqueeze(-1) - unpermuted_local_hidden = unpermute( - hidden_states, - self.reversed_local_input_permutation_mapping, - restore_shape=self.hidden_shape_before_permute, - ) - - unpermuted_local_bias = None - if self.add_bias: - assert bias is not None - bias = bias * permuted_probs.unsqueeze(-1) - unpermuted_local_bias = unpermute( - bias, - self.reversed_local_input_permutation_mapping, - restore_shape=self.hidden_shape_before_permute, - ) - - output_total = unpermuted_local_hidden - output_bias_total = unpermuted_local_bias - - # Unpermute the tokens across ranks. - if self.tp_size > 1 or self.ep_size > 1: - output_total = reduce_scatter_to_sequence_parallel_region( - output_total, group=self.tp_ep_group - ) - if self.add_bias: - # Unpermute the bias across expert parallel devices. - # bias is duplicated across tensor parallelism ranks; - output_bias_total = ( - reduce_scatter_to_sequence_parallel_region( - output_bias_total, group=self.tp_ep_group - ) - / self.tp_size - ) - - output_total = output_total.view(self.hidden_shape) - if self.add_bias: - output_bias_total = output_bias_total.view(self.hidden_shape) - - return output_total, output_bias_total - - -class MoEAlltoAllTokenDispatcher(MoETokenDispatcher): - """ - AlltoAll-based token dispatcher. - - The workflow of AlltoAll token dispatcher is as follows: - (1) preprocess(): calculate necessary metadata for communication and permute - (2) token_permutation(): permute->A2A(EP)->AG(TP)->sort_chunk(if num_local_experts>1) - (3) token_unpermutation(): sort_chunk(if num_local_experts>1)->RS(TP)->A2A(EP)->unpermute - """ - - def __init__( - self, num_local_experts: int, local_expert_indices: List[int], config: TransformerConfig - ) -> None: - """ - Initialize the AlltoAll token dispatcher. - - Args: - num_local_experts (int): Number of local experts on the current device. - local_expert_indices (List[int]): Indices of local experts on the current device. - config (TransformerConfig): Configuration for the transformer model. - """ - super().__init__(config=config) - self.hidden_shape = None - self.num_local_experts = num_local_experts - self.num_experts = config.num_moe_experts - assert self.num_local_experts > 0, "Expected at least one expert" - self.local_expert_indices = local_expert_indices - assert ( - len(self.local_expert_indices) == self.num_local_experts - ), "Invalid local expert indices" - for i in range(len(self.local_expert_indices) - 1): - assert ( - self.local_expert_indices[i] == self.local_expert_indices[i + 1] - 1 - ), "local_expert_indices must be continous" - self.probs = None - - # [ep_size]. Represents the number of tokens sent by the current rank to other - # EP ranks. - self.input_splits = None - # [ep_size]. Represents the number of tokens received by the current rank from - # other EP ranks. - self.output_splits = None - # [tp_size]. Represents the number of tokens received by the current rank from - # other TP ranks. - self.output_splits_tp = None - # [tp_size * ep_size, num_local_experts]. Represents the number of tokens sent - # to each local expert by all ranks. - self.num_global_tokens_per_local_expert_cpu = None - input_chunk_idxs = torch.arange(self.num_experts * self.tp_size) - # [num_local_experts, tp_size * ep_size]. Sort the input chunks by local experts. - self.sort_input_by_local_experts = ( - input_chunk_idxs.reshape(-1, self.num_local_experts).T.ravel().tolist() - ) - # [tp_size * ep_size, num_local_experts]. Restore the output chunks by local experts. - self.restore_output_by_local_experts = ( - input_chunk_idxs.reshape(self.num_local_experts, -1).T.ravel().tolist() - ) - - # Token drop and padding. - # We need to keep track of the token num if we drop tokens without padding them. - self.num_out_tokens = None - # Drop and pad the input to capacity. - self.drop_and_pad = self.config.moe_pad_expert_input_to_capacity - if self.drop_and_pad: - assert self.config.moe_expert_capacity_factor is not None - self.capacity = None - - # A cuda stream synchronization is needed in self.token_permutation() in some cases, - # because there are several non-blocking DtoH data transfers called in self.preprocess(). - # The synchronization happens at different points based on MoE settings as late as possible. - # Valid sync points are "before_permutation_1", "before_ep_alltoall", "before_finish", - # and "no_sync". - self.cuda_sync_point = "no_sync" - - self.shared_experts = None - - def preprocess(self, routing_map: torch.Tensor) -> torch.Tensor: - """ - Preprocess token routing map for AlltoAll communication and token permutation. - - This method computes the number of tokens assigned to each expert based on the routing_map. - It also initializes the necessary data structures for AlltoAll communication, such as input - and output splits, and the mapping between global tokens and local experts. - - Args: - routing_map (torch.Tensor): The mapping of tokens to experts, with shape - [num_tokens, num_experts]. - - Returns: - torch.Tensor: Tensor containing the number of tokens assigned to local expert. - """ - # [num_experts], number of tokens assigned to each expert from the current rank's input. - num_local_tokens_per_expert = routing_map.sum(dim=0).long() - - if self.drop_and_pad: - # Drop and pad the input to capacity. - num_tokens = routing_map.size(0) * self.config.moe_router_topk - self.capacity = get_capacity( - num_tokens=num_tokens, - num_experts=self.num_experts, - capacity_factor=self.config.moe_expert_capacity_factor, - ) - self.num_out_tokens = self.capacity * self.num_experts - # [num_local_experts], number of tokens processed by each expert. - num_tokens_per_local_expert = torch.full( - (self.num_local_experts,), - self.capacity * self.tp_size * self.ep_size, - dtype=torch.long, - ) - # [tp_size * ep_size, num_local_experts]. - self.num_global_tokens_per_local_expert_cpu = torch.full( - (self.num_experts * self.tp_size,), self.capacity, dtype=torch.long - ) - return num_tokens_per_local_expert - elif self.config.moe_expert_capacity_factor is not None: - # Drop tokens to capacity, no padding. - # A synchronization is needed before the first - # permutation to get the `num_out_tokens` CPU value. - self.num_out_tokens = num_local_tokens_per_expert.sum().to( - torch.device("cpu"), non_blocking=True - ) - self.cuda_sync_point = "before_permutation_1" - else: - # Dropless - self.num_out_tokens = routing_map.size(0) * self.config.moe_router_topk - if self.ep_size > 1 or self.num_local_experts > 1: - # Token dropless and enable ep. A synchronization is needed before expert parallel - # AlltoAll communication to get the `input_splits` and `output_splits` CPU values. - self.cuda_sync_point = "before_ep_alltoall" - else: - # Token dropless and no ep. A synchronization is needed before the returns - # to get the `tokens_per_expert` CPU value for - self.cuda_sync_point = "before_finish" - - if self.ep_size > 1 or self.tp_size > 1: - # =================================================== - # Calculate input_splits, output_splits for alltoall/allgather in variable size. - # =================================================== - self.input_splits = ( - num_local_tokens_per_expert.reshape(self.ep_size, self.num_local_experts) - .sum(axis=1) - .to(torch.device("cpu"), non_blocking=True) - .numpy() - ) - # Gather the global distribution of tokens across ranks. - # num_global_tokens_per_expert represents the number of tokens sent to each - # expert by all ranks. - # [tp_size, ep_size, num_experts] - num_global_tokens_per_expert = ( - gather_from_sequence_parallel_region( - num_local_tokens_per_expert, group=self.tp_ep_group - ) - .reshape(self.ep_size, self.tp_size, self.num_experts) - .transpose(0, 1) - ) - # [tp_size, ep_size, num_experts] -> [tp_size, ep_size, num_local_experts] - num_global_tokens_per_local_expert = num_global_tokens_per_expert[ - :, :, self.local_expert_indices[0] : self.local_expert_indices[-1] + 1 - ].contiguous() - # [tp_size, ep_size, num_local_experts] -> [tp_size, ep_size] - num_global_tokens_per_rank = num_global_tokens_per_local_expert.sum(axis=2) - # [tp_size, ep_size] -> [ep_size] - # self.output_splits represents the number of tokens received by the current rank - # from other EP rank. - self.output_splits = ( - num_global_tokens_per_rank[self.tp_rank] - .to(torch.device("cpu"), non_blocking=True) - .numpy() - ) - # [tp_size, ep_size] -> [tp_size] - # self.output_splits_tp represents the number of tokens received by the current - # rank from other TP rank. - self.output_splits_tp = ( - num_global_tokens_per_rank.sum(axis=1) - .to(torch.device("cpu"), non_blocking=True) - .numpy() - ) - # [tp_size, ep_size, num_local_experts] -> [num_local_experts] - num_tokens_per_local_expert = num_global_tokens_per_local_expert.sum(dim=(0, 1)).to( - torch.device("cpu"), non_blocking=True - ) - else: - num_global_tokens_per_local_expert = num_local_tokens_per_expert.reshape( - self.num_experts - ) - num_tokens_per_local_expert = num_local_tokens_per_expert.to( - torch.device("cpu"), non_blocking=True - ) - - if self.num_local_experts > 1: - self.num_global_tokens_per_local_expert_cpu = num_global_tokens_per_local_expert.view( - -1, self.num_local_experts - ).to(torch.device("cpu"), non_blocking=True) - - return num_tokens_per_local_expert - - def token_permutation( - self, hidden_states: torch.Tensor, probs: torch.Tensor, routing_map: torch.Tensor - ) -> Tuple[torch.Tensor, torch.Tensor]: - """ - Dispatch tokens to local experts using AlltoAll communication. - - This method performs the following steps: - 1. Preprocess the routing map to get metadata for communication and permutation. - 2. Permute input tokens for AlltoAll communication. - 3. Perform expert parallel AlltoAll communication. - 4. Sort tokens by local expert (if multiple local experts exist). - - Args: - hidden_states (torch.Tensor): Input token embeddings. - probs (torch.Tensor): The probabilities of token to experts assignment. - routing_map (torch.Tensor): The mapping of token to experts assignment. - - Returns: - Tuple[torch.Tensor, torch.Tensor]: - - Permuted token embeddings for local experts. - - Number of tokens per expert. - """ - # Preprocess: Get the metadata for communication, permutation and computation operations. - self.hidden_shape = hidden_states.shape - self.probs = probs - self.routing_map = routing_map - assert probs.dim() == 2, "Expected 2D tensor for probs" - assert routing_map.dim() == 2, "Expected 2D tensor for token2expert mask" - assert routing_map.dtype == torch.bool, "Expected bool tensor for mask" - hidden_states = hidden_states.view(-1, self.hidden_shape[-1]) - tokens_per_expert = self.preprocess(self.routing_map) - - if self.shared_experts is not None: - self.shared_experts.pre_forward_comm(hidden_states.view(self.hidden_shape)) - - # Permutation 1: input to AlltoAll input - self.hidden_shape_before_permute = hidden_states.shape - if self.cuda_sync_point == "before_permutation_1": - torch.cuda.current_stream().synchronize() - permutated_local_input_tokens, self.reversed_local_input_permutation_mapping = permute( - hidden_states, routing_map, num_out_tokens=self.num_out_tokens - ) - - # Perform expert parallel AlltoAll communication - if self.cuda_sync_point == "before_ep_alltoall": - torch.cuda.current_stream().synchronize() - global_input_tokens = all_to_all( - self.ep_group, permutated_local_input_tokens, self.output_splits, self.input_splits - ) - if self.shared_experts is not None: - self.shared_experts.linear_fc1_forward_and_act(global_input_tokens) - - if self.tp_size > 1: - global_input_tokens = gather_from_sequence_parallel_region( - global_input_tokens, - group=self.tp_group, - output_split_sizes=( - self.output_splits_tp.tolist() if self.output_splits_tp is not None else None - ), - ) - - # Permutation 2: Sort tokens by local expert. - if self.num_local_experts > 1: - global_input_tokens = sort_chunks_by_idxs( - global_input_tokens, - self.num_global_tokens_per_local_expert_cpu.ravel(), - self.sort_input_by_local_experts, - ) - - if self.cuda_sync_point == "before_finish": - torch.cuda.current_stream().synchronize() - - return global_input_tokens, tokens_per_expert - - def token_unpermutation( - self, hidden_states: torch.Tensor, bias: Optional[torch.Tensor] = None - ) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: - """ - Reverse the token permutation to restore the original order. - - This method performs the following steps: - 1. Unsort tokens by local expert (if multiple local experts exist). - 2. Perform expert parallel AlltoAll communication to restore the original order. - 3. Unpermute tokens to restore the original order. - - Args: - hidden_states (torch.Tensor): Output from local experts. - bias (torch.Tensor, optional): Bias tensor (not supported). - - Returns: - Tuple[torch.Tensor, Optional[torch.Tensor]]: - - Unpermuted token embeddings in the original order. - - None (bias is not supported). - """ - assert bias is None, "Bias is not supported in MoEAlltoAllTokenDispatcher" - - # Unpermutation 2: Unsort tokens by local expert. - if self.num_local_experts > 1: - hidden_states = sort_chunks_by_idxs( - hidden_states, - self.num_global_tokens_per_local_expert_cpu.T.ravel(), - self.restore_output_by_local_experts, - ) - - if self.tp_size > 1: - hidden_states = reduce_scatter_to_sequence_parallel_region( - hidden_states, - group=self.tp_group, - input_split_sizes=( - self.output_splits_tp.tolist() if self.output_splits_tp is not None else None - ), - ) - - # Perform expert parallel AlltoAll communication - # hidden_states: [SEQL, H] -> [SEQL, H/TP] - permutated_local_input_tokens = all_to_all( - self.ep_group, hidden_states, self.input_splits, self.output_splits - ) - if self.shared_experts is not None: - self.shared_experts.linear_fc2_forward(permutated_local_input_tokens) - self.shared_experts.post_forward_comm() - - # Unpermutation 1: AlltoAll output to output - output = unpermute( - permutated_local_input_tokens, - self.reversed_local_input_permutation_mapping, - restore_shape=self.hidden_shape_before_permute, - probs=self.probs, - routing_map=self.routing_map, - ) - - # Reshape the output tensor - output = output.view(self.hidden_shape) - - # Add shared experts output - if self.shared_experts is not None: - shared_expert_output = self.shared_experts.get_output() - output += shared_expert_output - return output, None +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +from abc import ABC, abstractmethod +from typing import List, Optional, Tuple + +import torch + +from megatron.core.parallel_state import ( + get_expert_model_parallel_group, + get_expert_tensor_and_model_parallel_group, + get_expert_tensor_parallel_group, + get_expert_tensor_parallel_rank, +) +from megatron.core.tensor_parallel import ( + all_to_all, + gather_from_sequence_parallel_region, + reduce_scatter_to_sequence_parallel_region, +) +from megatron.core.transformer.moe.fused_a2a import fused_combine, fused_dispatch +from megatron.core.transformer.moe.moe_utils import ( + get_capacity, + permute, + sort_chunks_by_idxs, + unpermute, +) +from megatron.core.transformer.moe.shared_experts import SharedExpertMLP +from megatron.core.transformer.transformer_config import TransformerConfig + +""" We use the following notation throughout this file: + H: hidden size + B: micro batch size + S: sequence length + TP: tensor model parallel size + EP: expert model parallel size + num_local_tokens: S/TP*B + num_global_tokens: num_local_tokens*TP*EP +""" + + +class MoETokenDispatcher: + """ + MoE Token Dispatcher + """ + + def __init__(self, config: TransformerConfig) -> None: + """ + Initialize the MoE Token Dispatcher. + """ + self.config = config + self.shared_experts: Optional[SharedExpertMLP] = None + + self.tp_size = config.expert_tensor_parallel_size + self.ep_size = config.expert_model_parallel_size + + @property + def ep_group(self): + """Get expert model parallel group.""" + return get_expert_model_parallel_group() + + @property + def tp_group(self): + """Get expert tensor parallel group.""" + return get_expert_tensor_parallel_group() + + @property + def tp_rank(self): + """Get expert tensor parallel rank.""" + return get_expert_tensor_parallel_rank() + + @property + def tp_ep_group(self): + """Get expert tensor and model parallel group.""" + return get_expert_tensor_and_model_parallel_group() + + @abstractmethod + def token_permutation( + self, tokens: torch.Tensor, probs: torch.Tensor, routing_map: torch.Tensor + ): + """Dispatch tokens to experts. + + Args: + tokens (torch.Tensor): Input tokens. + probs (torch.Tensor): The routing probability tensor [num_tokens, num_experts]. + routing_map (torch.Tensor): Token to expert mapping tensor. + + Returns: + torch.Tensor: Tokens tensor. + """ + raise NotImplementedError("Dispatch function not implemented.") + + @abstractmethod + def token_unpermutation(self, expert_output: torch.Tensor, bias: torch.Tensor = None): + """Restores the expert output to its original ordering. + + Args: + expert_output (torch.Tensor): The output tensor from the expert models. + bias (torch.Tensor): The bias tensor. + + Returns: + (torch.Tensor, torch.Tensor): Unpermuted activation and optional bias. + """ + raise NotImplementedError("Restore function not implemented.") + + def set_shared_experts(self, shared_experts): + """Set shared expert to the dispatcher.""" + assert self.config.moe_shared_expert_overlap + self.shared_experts = shared_experts + + +class MoEAllGatherTokenDispatcher(MoETokenDispatcher): + """ + AllGather Based Token dispatcher. + Note that this allgather spans the communication domain of TP*EP: + """ + + def __init__( + self, num_local_experts: int, local_expert_indices: List[int], config: TransformerConfig + ) -> None: + """ + Initialize the zero token dropping router. + """ + super().__init__(config=config) + self.num_local_experts = num_local_experts + assert self.num_local_experts > 0, "Expected at least one expert" + self.local_expert_indices = local_expert_indices + assert len(self.local_expert_indices) > 0, "Expected at least one local expert index" + self.router_topk = config.moe_router_topk + self.add_bias = config.add_bias_linear + + # self.global_local_map: 2D tensor. A mask of mapping between global and local tokens where + # each element is True if it's between the local_expert_indices. Only useful when cross + # device token permutation is enabled and **AllGahter** is performed. + self.global_local_map = None + + def token_permutation( + self, hidden_states: torch.Tensor, probs: torch.Tensor, routing_map: torch.Tensor + ): + """Dispatch tokens to local experts. It's composed of two stages: + (1) Gather the tokens across the expert parallel devices. After this stage, + each device receives all of the tokens assigned to its local set of experts + in its local HBM. + (2) Permute the tokens locally so that they are grouped by their expert + assignment. + + Args: + hidden_states: 3D tensor [S/TP, B, H]. Input tokens. + probs: 2D tensor [S/TP*B, num_experts]. Each row of probs contains + the probility distribution across `topk` experts for one local token. + routing_map: 2D tensor [S/TP*B, num_experts], representing token assignment to + global experts. + + Returns: + permuted_local_hidden_states: Permutation of tokens to local experts group. + tokens_per_expert: the number of tokens each local expert to process. + """ + self.hidden_shape = hidden_states.shape + # [S/TP, B, H] -> [S*B/TP, H] + hidden_states = hidden_states.view(-1, self.hidden_shape[-1]) + + # Permute the tokens across the expert parallel devices. + if self.tp_size > 1 or self.ep_size > 1: + ## local_indices calculation + with torch.no_grad(): + # [num_local_tokens, num_experts] -> [num_global_tokens, num_experts], where: + # num_local_tokens=(S/TP)*B, num_global_tokens=S*B*EP + routing_map = gather_from_sequence_parallel_region( + routing_map, group=self.tp_ep_group + ) + + ## local_probs calculation + # max_prob: [S/TP*B, num_experts] -> global_probs: [S*B*EP, num_experts] + probs = gather_from_sequence_parallel_region(probs, group=self.tp_ep_group) + + # Note that this allgather spans the communication domain of TP*EP. + # [(S/TP)*B, H] -> [((S/TP)*B)*(TP*EP), H] = [S*B*EP, H] + hidden_states = gather_from_sequence_parallel_region( + hidden_states, group=self.tp_ep_group, use_global_buffer=True + ) + self.hidden_shape_before_permute = hidden_states.shape + + # The routing map and probs that for local experts. + self.local_map = routing_map[ + :, self.local_expert_indices[0] : self.local_expert_indices[-1] + 1 + ].contiguous() + # probs of global token assignment to local experts. + self.local_probs = probs[ + :, self.local_expert_indices[0] : self.local_expert_indices[-1] + 1 + ].contiguous() + + tokens_per_expert = self.local_map.sum(dim=0).long().cpu() + + (permuted_local_hidden_states, self.reversed_local_input_permutation_mapping) = permute( + hidden_states, + self.local_map, + num_out_tokens=tokens_per_expert.sum(), + fused=self.config.moe_permute_fusion, + ) + + return permuted_local_hidden_states, tokens_per_expert + + def token_unpermutation(self, hidden_states: torch.Tensor, bias: torch.Tensor = None): + """ + Reverse process of `dispatch()` which permutes the output of local + experts locallay and across expert parallel rank into the original order to + produce the final output. + + Args: + hidden_states: 2D tensor [num_permuted_tokens_for_local_experts, H], + output of local experts. + bias (optional): The bias tensor. + + Returns: + output_total: un-permuted updated hidden states output from all local experts + with shape of [S/TP, B, H] + """ + # Scale the expert output prior to reduction and subsequent to local unpermutation if k > 1. + # Unpermute the expert output and bias + permuted_probs = self.local_probs.T.contiguous().masked_select( + self.local_map.T.contiguous() + ) + hidden_states = hidden_states * permuted_probs.unsqueeze(-1) + unpermuted_local_hidden = unpermute( + hidden_states, + self.reversed_local_input_permutation_mapping, + restore_shape=self.hidden_shape_before_permute, + routing_map=self.local_map, + fused=self.config.moe_permute_fusion, + ) + + unpermuted_local_bias = None + if self.add_bias: + assert bias is not None + bias = bias * permuted_probs.unsqueeze(-1) + unpermuted_local_bias = unpermute( + bias, + self.reversed_local_input_permutation_mapping, + restore_shape=self.hidden_shape_before_permute, + routing_map=self.local_map, + fused=self.config.moe_permute_fusion, + ) + + output_total = unpermuted_local_hidden + output_bias_total = unpermuted_local_bias + + # Unpermute the tokens across ranks. + if self.tp_size > 1 or self.ep_size > 1: + output_total = reduce_scatter_to_sequence_parallel_region( + output_total, group=self.tp_ep_group + ) + if self.add_bias: + # Unpermute the bias across expert parallel devices. + # bias is duplicated across tensor parallelism ranks; + output_bias_total = ( + reduce_scatter_to_sequence_parallel_region( + output_bias_total, group=self.tp_ep_group + ) + / self.tp_size + ) + + output_total = output_total.view(self.hidden_shape) + if self.add_bias: + output_bias_total = output_bias_total.view(self.hidden_shape) + + return output_total, output_bias_total + + +class MoEAlltoAllTokenDispatcher(MoETokenDispatcher): + """ + AlltoAll-based token dispatcher. + + The workflow of AlltoAll token dispatcher is as follows: + (1) preprocess(): calculate necessary metadata for communication and permute + (2) token_permutation(): permute->A2A(EP)->AG(TP)->sort_chunk(if num_local_experts>1) + (3) token_unpermutation(): sort_chunk(if num_local_experts>1)->RS(TP)->A2A(EP)->unpermute + """ + + def __init__( + self, num_local_experts: int, local_expert_indices: List[int], config: TransformerConfig + ) -> None: + """ + Initialize the AlltoAll token dispatcher. + + Args: + num_local_experts (int): Number of local experts on the current device. + local_expert_indices (List[int]): Indices of local experts on the current device. + config (TransformerConfig): Configuration for the transformer model. + """ + super().__init__(config=config) + self.num_local_experts = num_local_experts + assert config.num_moe_experts is not None + self.num_experts = config.num_moe_experts + assert self.num_local_experts > 0, "Expected at least one expert" + self.local_expert_indices = local_expert_indices + assert ( + len(self.local_expert_indices) == self.num_local_experts + ), "Invalid local expert indices" + for i in range(len(self.local_expert_indices) - 1): + assert ( + self.local_expert_indices[i] == self.local_expert_indices[i + 1] - 1 + ), "local_expert_indices must be continous" + + # [ep_size]. Represents the number of tokens sent by the current rank to other + # EP ranks. + self.input_splits = None + # [ep_size]. Represents the number of tokens received by the current rank from + # other EP ranks. + self.output_splits = None + # [tp_size]. Represents the number of tokens received by the current rank from + # other TP ranks. + self.output_splits_tp = None + self.permute_idx_device = torch.device("cuda") if self.config.moe_permute_fusion else None + input_chunk_idxs = torch.arange( + self.num_experts * self.tp_size, device=self.permute_idx_device + ) + # [num_local_experts, tp_size * ep_size]. Sort the input chunks by local experts. + self.sort_input_by_local_experts = input_chunk_idxs.reshape( + -1, self.num_local_experts + ).T.ravel() + # [tp_size * ep_size, num_local_experts]. Restore the output chunks by local experts. + self.restore_output_by_local_experts = input_chunk_idxs.reshape( + self.num_local_experts, -1 + ).T.ravel() + + # Token drop and padding. + # Drop and pad the input to capacity. + self.drop_and_pad = self.config.moe_pad_expert_input_to_capacity + if self.drop_and_pad: + assert self.config.moe_expert_capacity_factor is not None + self.moe_expert_capacity_factor = self.config.moe_expert_capacity_factor + self.capacity = None + + # A cuda stream synchronization is needed in self.token_permutation() in some cases, + # because there are several non-blocking DtoH data transfers called in self.preprocess(). + # The synchronization happens at different points based on MoE settings as late as possible. + # Valid sync points are "before_permutation_1", "before_ep_alltoall", "before_finish", + # and "no_sync". + self.cuda_sync_point = "no_sync" + + self.shared_experts = None + + def preprocess(self, routing_map: torch.Tensor) -> torch.Tensor: + """ + Preprocess token routing map for AlltoAll communication and token permutation. + + This method computes the number of tokens assigned to each expert based on the routing_map. + It also initializes the necessary data structures for AlltoAll communication, such as input + and output splits, and the mapping between global tokens and local experts. + + Args: + routing_map (torch.Tensor): The mapping of tokens to experts, with shape + [num_tokens, num_experts]. + + Returns: + torch.Tensor: Tensor containing the number of tokens assigned to local expert. + """ + # [num_experts], number of tokens assigned to each expert from the current rank's input. + num_local_tokens_per_expert = routing_map.sum(dim=0).long() + + if self.drop_and_pad: + # Drop and pad the input to capacity. + num_tokens = routing_map.size(0) * self.config.moe_router_topk + self.capacity = get_capacity( + num_tokens=num_tokens, + num_experts=self.num_experts, + capacity_factor=self.moe_expert_capacity_factor, + ) + self.num_out_tokens = self.capacity * self.num_experts + # [num_local_experts], number of tokens processed by each expert. + num_tokens_per_local_expert = torch.full( + (self.num_local_experts,), + self.capacity * self.tp_size * self.ep_size, + dtype=torch.long, + ) + # [tp_size * ep_size, num_local_experts]. Represents the number of tokens sent + # to each local expert by all ranks. + self.num_global_tokens_per_local_expert = torch.full( + (self.num_experts * self.tp_size,), + self.capacity, + dtype=torch.long, + device=self.permute_idx_device, + ) + return num_tokens_per_local_expert + elif self.config.moe_expert_capacity_factor is not None: + # Drop tokens to capacity, no padding. + # A synchronization is needed before the first + # permutation to get the `num_out_tokens` CPU value. + self.num_out_tokens = num_local_tokens_per_expert.sum().to( + torch.device("cpu"), non_blocking=True + ) + self.cuda_sync_point = "before_permutation_1" + else: + # Dropless + self.num_out_tokens = routing_map.size(0) * self.config.moe_router_topk + if self.ep_size > 1 or self.num_local_experts > 1: + # Token dropless and enable ep. A synchronization is needed before expert parallel + # AlltoAll communication to get the `input_splits` and `output_splits` CPU values. + self.cuda_sync_point = "before_ep_alltoall" + else: + # Token dropless and no ep. A synchronization is needed before the returns + # to get the `tokens_per_expert` CPU value for + self.cuda_sync_point = "before_finish" + + if self.ep_size > 1 or self.tp_size > 1: + # =================================================== + # Calculate input_splits, output_splits for alltoall/allgather in variable size. + # =================================================== + # [ep_size]. Represents the number of tokens sent by the current rank to other + # EP ranks. + self.input_splits = ( + num_local_tokens_per_expert.reshape(self.ep_size, self.num_local_experts) + .sum(axis=1) + .to(torch.device("cpu"), non_blocking=True) + .numpy() + ) + # Gather the global distribution of tokens across ranks. + # num_global_tokens_per_expert represents the number of tokens sent to each + # expert by all ranks. + # [tp_size, ep_size, num_experts] + num_global_tokens_per_expert = ( + gather_from_sequence_parallel_region( + num_local_tokens_per_expert, group=self.tp_ep_group + ) + .reshape(self.ep_size, self.tp_size, self.num_experts) + .transpose(0, 1) + ) + # [tp_size, ep_size, num_experts] -> [tp_size, ep_size, num_local_experts] + num_global_tokens_per_local_expert = num_global_tokens_per_expert[ + :, :, self.local_expert_indices[0] : self.local_expert_indices[-1] + 1 + ].contiguous() + # [tp_size, ep_size, num_local_experts] -> [tp_size, ep_size] + num_global_tokens_per_rank = num_global_tokens_per_local_expert.sum(axis=2) + # [tp_size, ep_size] -> [ep_size] + # self.output_splits represents the number of tokens received by the current rank + # from other EP rank. + self.output_splits = ( + num_global_tokens_per_rank[self.tp_rank] + .to(torch.device("cpu"), non_blocking=True) + .numpy() + ) + # [tp_size, ep_size] -> [tp_size] + # self.output_splits_tp represents the number of tokens received by the current + # rank from other TP rank. + self.output_splits_tp = ( + num_global_tokens_per_rank.sum(axis=1) + .to(torch.device("cpu"), non_blocking=True) + .numpy() + ) + # [tp_size, ep_size, num_local_experts] -> [num_local_experts] + num_tokens_per_local_expert = num_global_tokens_per_local_expert.sum(dim=(0, 1)).to( + torch.device("cpu"), non_blocking=True + ) + else: + num_global_tokens_per_local_expert = num_local_tokens_per_expert.reshape( + self.num_experts + ) + num_tokens_per_local_expert = num_local_tokens_per_expert.to( + torch.device("cpu"), non_blocking=True + ) + + if self.num_local_experts > 1: + # [tp_size * ep_size, num_local_experts]. Represents the number of tokens sent + # to each local expert by all ranks. + self.num_global_tokens_per_local_expert = num_global_tokens_per_local_expert.view( + -1, self.num_local_experts + ) + if not self.config.moe_permute_fusion: + self.num_global_tokens_per_local_expert = num_global_tokens_per_local_expert.to( + torch.device("cpu"), non_blocking=False + ) + + return num_tokens_per_local_expert + + def token_permutation( + self, hidden_states: torch.Tensor, probs: torch.Tensor, routing_map: torch.Tensor + ) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Dispatch tokens to local experts using AlltoAll communication. + + This method performs the following steps: + 1. Preprocess the routing map to get metadata for communication and permutation. + 2. Permute input tokens for AlltoAll communication. + 3. Perform expert parallel AlltoAll communication. + 4. Sort tokens by local expert (if multiple local experts exist). + + Args: + hidden_states (torch.Tensor): Input token embeddings. + probs (torch.Tensor): The probabilities of token to experts assignment. + routing_map (torch.Tensor): The mapping of token to experts assignment. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: + - Permuted token embeddings for local experts. + - Number of tokens per expert. + """ + # Preprocess: Get the metadata for communication, permutation and computation operations. + self.hidden_shape = hidden_states.shape + self.probs = probs + self.routing_map = routing_map + assert probs.dim() == 2, "Expected 2D tensor for probs" + assert routing_map.dim() == 2, "Expected 2D tensor for token2expert mask" + assert routing_map.dtype == torch.bool, "Expected bool tensor for mask" + hidden_states = hidden_states.view(-1, self.hidden_shape[-1]) + tokens_per_expert = self.preprocess(self.routing_map) + + if self.shared_experts is not None: + self.shared_experts.pre_forward_comm(hidden_states.view(self.hidden_shape)) + + # Permutation 1: input to AlltoAll input + self.hidden_shape_before_permute = hidden_states.shape + if self.cuda_sync_point == "before_permutation_1": + torch.cuda.current_stream().synchronize() + permutated_local_input_tokens, self.reversed_local_input_permutation_mapping = permute( + hidden_states, + routing_map, + num_out_tokens=self.num_out_tokens, + fused=self.config.moe_permute_fusion, + drop_and_pad=self.drop_and_pad, + ) + + # Perform expert parallel AlltoAll communication + if self.cuda_sync_point == "before_ep_alltoall": + torch.cuda.current_stream().synchronize() + global_input_tokens = all_to_all( + self.ep_group, permutated_local_input_tokens, self.output_splits, self.input_splits + ) + if self.shared_experts is not None: + self.shared_experts.linear_fc1_forward_and_act(global_input_tokens) + + if self.tp_size > 1: + if self.output_splits_tp is None: + output_split_sizes = None + else: + output_split_sizes = self.output_splits_tp.tolist() + global_input_tokens = gather_from_sequence_parallel_region( + global_input_tokens, group=self.tp_group, output_split_sizes=output_split_sizes + ) + + # Permutation 2: Sort tokens by local expert. + if self.num_local_experts > 1: + if self.drop_and_pad: + global_input_tokens = ( + global_input_tokens.view( + self.tp_size * self.ep_size, + self.num_local_experts, + self.capacity, + *global_input_tokens.size()[1:], + ) + .transpose(0, 1) + .contiguous() + .flatten(start_dim=0, end_dim=2) + ) + else: + global_input_tokens = sort_chunks_by_idxs( + global_input_tokens, + self.num_global_tokens_per_local_expert.ravel(), + self.sort_input_by_local_experts, + fused=self.config.moe_permute_fusion, + ) + + if self.cuda_sync_point == "before_finish": + torch.cuda.current_stream().synchronize() + + return global_input_tokens, tokens_per_expert + + def token_unpermutation( + self, hidden_states: torch.Tensor, bias: Optional[torch.Tensor] = None + ) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: + """ + Reverse the token permutation to restore the original order. + + This method performs the following steps: + 1. Unsort tokens by local expert (if multiple local experts exist). + 2. Perform expert parallel AlltoAll communication to restore the original order. + 3. Unpermute tokens to restore the original order. + + Args: + hidden_states (torch.Tensor): Output from local experts. + bias (torch.Tensor, optional): Bias tensor (not supported). + + Returns: + Tuple[torch.Tensor, Optional[torch.Tensor]]: + - Unpermuted token embeddings in the original order. + - None (bias is not supported). + """ + assert bias is None, "Bias is not supported in MoEAlltoAllTokenDispatcher" + + # Unpermutation 2: Unsort tokens by local expert. + if self.num_local_experts > 1: + if self.drop_and_pad: + hidden_states = ( + hidden_states.view( + self.num_local_experts, + self.tp_size * self.ep_size, + self.capacity, + *hidden_states.size()[1:], + ) + .transpose(0, 1) + .contiguous() + .flatten(start_dim=0, end_dim=2) + ) + else: + hidden_states = sort_chunks_by_idxs( + hidden_states, + self.num_global_tokens_per_local_expert.T.ravel(), + self.restore_output_by_local_experts, + fused=self.config.moe_permute_fusion, + ) + + if self.tp_size > 1: + if self.output_splits_tp is None: + input_split_sizes = None + else: + input_split_sizes = self.output_splits_tp.tolist() + hidden_states = reduce_scatter_to_sequence_parallel_region( + hidden_states, group=self.tp_group, input_split_sizes=input_split_sizes + ) + + # Perform expert parallel AlltoAll communication + # hidden_states: [SEQL, H] -> [SEQL, H/TP] + permutated_local_input_tokens = all_to_all( + self.ep_group, hidden_states, self.input_splits, self.output_splits + ) + if self.shared_experts is not None: + self.shared_experts.linear_fc2_forward(permutated_local_input_tokens) + self.shared_experts.post_forward_comm() + + # Unpermutation 1: AlltoAll output to output + output = unpermute( + permutated_local_input_tokens, + self.reversed_local_input_permutation_mapping, + restore_shape=self.hidden_shape_before_permute, + probs=self.probs, + routing_map=self.routing_map, + fused=self.config.moe_permute_fusion, + drop_and_pad=self.drop_and_pad, + ) + + # Reshape the output tensor + output = output.view(self.hidden_shape) + + # Add shared experts output + if self.shared_experts is not None: + shared_expert_output = self.shared_experts.get_output() + output += shared_expert_output + return output, None + + +class _DispatchManager(ABC): + """ + A manager class to handle dispatch and combine processes for MoE models. + + DispatcherManager handles token dispatching according to the routing_map of format + [num_local_tokens, world_size, num_instances]. The routing_map is a 3D tensor where each + element indicates whether a token should be sent to a specific rank. + + num_instances is the maximum number of tokens instances dispatched into a target rank, it + can be the number of local experts, or the size of sub_group. + """ + + @abstractmethod + def setup_metadata(self, routing_map: torch.Tensor, probs: torch.Tensor): + """Set up metadata of routing_map and probs.""" + pass + + @abstractmethod + def dispatch(self, hidden_states: torch.Tensor) -> torch.Tensor: + """Dispatch the hidden_states according to the routing_map.""" + pass + + @abstractmethod + def combine(self, hidden_states: torch.Tensor) -> torch.Tensor: + """Combine the hidden_states after expert processing.""" + pass + + @abstractmethod + def get_dispached_metadata(self) -> torch.Tensor: + """Get the metadata of the dispatched hidden_states.""" + pass + + @abstractmethod + def get_permuted_hidden_states_by_experts(self, hidden_states: torch.Tensor) -> torch.Tensor: + """Get the permuted hidden states by instances.""" + pass + + @abstractmethod + def get_restored_hidden_states_by_experts(self, hidden_states: torch.Tensor) -> torch.Tensor: + """Get the restored hidden states by instances.""" + pass + + +class _DeepepManager(_DispatchManager): + """ + A manager class to handle fused all-to-all communication processes for MoE models using + DeepEP backend. See https://github.com/deepseek-ai/deepep for more details. + + The workflow of the DeepEP dispatcher is: + (1) setup_metadata(): Process routing map and probabilities to prepare dispatch metadata + (2) dispatch(): + - Use fused kernel to permute tokens and perform all-to-all communication in single step + (3) get_permuted_hidden_states_by_instances(): + - Convert routing map and probabilities to multihot format + - Permute tokens using fused kernel + (4) get_restored_hidden_states_by_instances(): + - Reverse permutation using fused kernel + (5) combine(): + - Reverse process using fused kernel to unpermute and perform all-to-all in single step + + This implementation uses fused communication kernels (fused_dispatch/fused_combine) that + combine permutation and communication operations for improved efficiency compared to + separate permute+alltoall steps. + """ + + def __init__( + self, group: torch.distributed.ProcessGroup, router_topk: int, permute_fusion: bool = False + ): + self.group = group + self.router_topk = router_topk + + # Metadata + self.token_indices = None + self.token_probs = None + # Handle used for combine operation + self.handle = None + + self.permute_fusion = permute_fusion + + if fused_dispatch is None: + raise ImportError( + "DeepEP is not installed. Please install DeepEP package from " + "https://github.com/deepseek-ai/deepep." + ) + + def setup_metadata(self, routing_map: torch.Tensor, probs: torch.Tensor): + self.num_instances = routing_map.shape[-1] + + probs = probs.reshape(probs.shape[0], -1) + self.num_experts = probs.shape[-1] + self.token_probs, self.token_indices = torch.topk(probs, self.router_topk, dim=-1) + + def dispatch(self, hidden_states: torch.Tensor) -> torch.Tensor: + hidden_states, dispatched_indices, dispatched_probs, num_tokens_per_expert, handle = ( + fused_dispatch( + hidden_states, self.token_indices, self.token_probs, self.num_experts, self.group + ) + ) + self.handle = handle + self.tokens_per_expert = num_tokens_per_expert + self.dispatched_indices = dispatched_indices + self.dispatched_probs = dispatched_probs + + return hidden_states + + def _indices_to_multihot(self, indices, probs): + """ + Converts a tensor of indices to a multihot vector efficiently in PyTorch. + + Args: + indices (torch.Tensor): [num_tokens, topk] token indices, where -1 means masked out. + probs (torch.Tensor): [num_tokens, topk] token probabilities. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: + - routing_map: Multihot vector. + - probs: Multihot probabilities. + """ + batch_size = indices.shape[0] + multihot_routing_map = torch.zeros( + (batch_size, self.num_instances), dtype=torch.long, device=indices.device + ) + + multihot_probs = torch.zeros( + (batch_size, self.num_instances), dtype=torch.float, device=indices.device + ) + + mask = indices != -1 + valid_indices = indices[mask] + row_indices = torch.arange(batch_size, device=indices.device).repeat_interleave( + mask.sum(dim=1) + ) + multihot_routing_map[row_indices, valid_indices] = 1 + multihot_probs[row_indices, valid_indices] = probs[mask] + return multihot_routing_map.bool(), multihot_probs + + def get_dispached_metadata(self) -> torch.Tensor: + return self.dispatched_indices, self.dispatched_probs + + def get_number_of_tokens_per_expert(self) -> torch.Tensor: + """ + Get the number of tokens per expert. + """ + return self.tokens_per_expert + + def combine(self, hidden_states: torch.Tensor) -> torch.Tensor: + hidden_states, event = fused_combine(hidden_states, self.group, self.handle) + return hidden_states + + def get_permuted_hidden_states_by_experts(self, hidden_states: torch.Tensor) -> torch.Tensor: + self.dispatched_routing_map, self.dispatched_probs = self._indices_to_multihot( + self.dispatched_indices, self.dispatched_probs + ) + self.hidden_shape_before_permute = hidden_states.shape + hidden_states, self.reversed_mapping_for_combine = permute( + hidden_states, + self.dispatched_routing_map, + num_out_tokens=sum(self.tokens_per_expert), + fused=self.permute_fusion, + ) + return hidden_states + + def get_restored_hidden_states_by_experts(self, hidden_states: torch.Tensor) -> torch.Tensor: + input_dtype = hidden_states.dtype + assert self.dispatched_probs.dtype == torch.float32, "DeepEP only supports float32 probs" + hidden_states = unpermute( + hidden_states, + self.reversed_mapping_for_combine, + restore_shape=self.hidden_shape_before_permute, + routing_map=self.dispatched_routing_map, + probs=self.dispatched_probs, + fused=self.permute_fusion, + ) + return hidden_states.to(input_dtype) + + +class MoEFlexTokenDispatcher(MoETokenDispatcher): + """ + Flexible token dispatcher for MoE models with Efficient-A2A communication kernels. + """ + + def __init__( + self, num_local_experts: int, local_expert_indices: List[int], config: TransformerConfig + ): + super().__init__(config) + + self.num_local_experts = num_local_experts + self.local_expert_indices = local_expert_indices + assert self.tp_size * self.ep_size > 1, "Flex token dispatcher requires TPxEP > 1" + assert ( + self.config.moe_enable_deepep + ), "DeepEP is not enabled. Please set --moe-enable-deepep to use DeepEP backend." + self._comm_manager = _DeepepManager( + self.tp_ep_group, + self.tp_size * self.config.moe_router_topk, + permute_fusion=self.config.moe_permute_fusion, + ) + + def set_shared_experts(self, shared_experts): + raise NotImplementedError("Shared experts overlap not supported in flex token dispatcher") + + def _initialize_metadata(self, routing_map: torch.Tensor, probs: torch.Tensor) -> torch.Tensor: + """ + Initialize the routing map and probs to a unified format covering the TPxEP group. + This design decouples the communication group from underlying model parallelism groups, + such that the communication strategy of tokens can be agnostic of TP size and EP size. + + This function expands the routing_map from shape [num_local_tokens, num_experts] to + [num_local_tokens, world_size, num_local_experts]. Each element in the routing_map + indicates whether a token should be sent to a specific rank. Specifically, the + routing_map is replicated across TP group since each TP ranks in a TP group should + receive the same tokens. + """ + num_local_tokens = routing_map.shape[0] + world_size = self.tp_size * self.ep_size + # Organize routing map and probs to [num_local_tokens, world_size, num_local_experts] + routing_map = ( + routing_map.reshape(num_local_tokens, self.ep_size, 1, self.num_local_experts) + .expand(-1, -1, self.tp_size, -1) + .reshape(num_local_tokens, world_size, self.num_local_experts) + ).contiguous() + probs = ( + probs.reshape(num_local_tokens, self.ep_size, 1, self.num_local_experts) + .expand(-1, -1, self.tp_size, -1) + .reshape(num_local_tokens, world_size, self.num_local_experts) + ).contiguous() + return routing_map, probs + + def token_permutation( + self, hidden_states: torch.Tensor, probs: torch.Tensor, routing_map: torch.Tensor + ) -> Tuple[torch.Tensor, torch.Tensor]: + self.hidden_shape = hidden_states.shape + hidden_states = hidden_states.view(-1, self.hidden_shape[-1]) + + # Initialize metadata + routing_map, probs = self._initialize_metadata(routing_map, probs) + + self._comm_manager.setup_metadata(routing_map, probs) + hidden_states = self._comm_manager.dispatch(hidden_states) + global_input_tokens = self._comm_manager.get_permuted_hidden_states_by_experts( + hidden_states + ) + tokens_per_expert = self._comm_manager.get_number_of_tokens_per_expert() + + return global_input_tokens, tokens_per_expert + + def token_unpermutation( + self, hidden_states: torch.Tensor, bias: Optional[torch.Tensor] = None + ) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: + assert bias is None, "Bias is not supported in MoEFlexTokenDispatcher" + hidden_states = self._comm_manager.get_restored_hidden_states_by_experts(hidden_states) + hidden_states = self._comm_manager.combine(hidden_states) + + return hidden_states.view(self.hidden_shape), None diff --git a/megatron/core/transformer/multi_latent_attention.py b/megatron/core/transformer/multi_latent_attention.py index 67603c5..6d83ef7 100644 --- a/megatron/core/transformer/multi_latent_attention.py +++ b/megatron/core/transformer/multi_latent_attention.py @@ -1,387 +1,414 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - - -import math -from dataclasses import dataclass -from typing import Union - -import torch - -from megatron.core import parallel_state -from megatron.core.models.common.embeddings import ( - YarnRotaryEmbedding, - _yarn_get_mscale, - apply_rotary_pos_emb, -) -from megatron.core.transformer.attention import Attention -from megatron.core.transformer.enums import AttnMaskType -from megatron.core.transformer.spec_utils import ModuleSpec, build_module -from megatron.core.transformer.transformer_config import MLATransformerConfig - - -@dataclass -class MLASelfAttentionSubmodules: - """Submodules for the MLA self-attention layer.""" - - linear_q_proj: Union[ModuleSpec, type] = None - linear_q_down_proj: Union[ModuleSpec, type] = None - linear_q_up_proj: Union[ModuleSpec, type] = None - linear_kv_down_proj: Union[ModuleSpec, type] = None - linear_kv_up_proj: Union[ModuleSpec, type] = None - core_attention: Union[ModuleSpec, type] = None - linear_proj: Union[ModuleSpec, type] = None - q_layernorm: Union[ModuleSpec, type] = None - kv_layernorm: Union[ModuleSpec, type] = None - - -class MultiLatentAttention(Attention): - """Multi-Latent Attention layer abstract class. - - This layer only contains common modules required for the "self attn" and - "cross attn" specializations. - """ - - def __init__( - self, - config: MLATransformerConfig, - submodules: Union[MLASelfAttentionSubmodules], - layer_number: int, - attn_mask_type: AttnMaskType, - attention_type: str, - cp_comm_type: str = None, - ) -> None: - world_size = parallel_state.get_tensor_model_parallel_world_size() - assert ( - world_size == 1 - ), "MLA is not supported with Tensor Parallelism yet, \ - use Expert Parallelism and Pipeline Parallelism for better performance." - - super().__init__( - config=config, - submodules=submodules, - layer_number=layer_number, - attention_type=attention_type, - attn_mask_type=attn_mask_type, - ) - - self.query_projection_size = self.config.v_head_dim * self.config.num_attention_heads - - self.q_head_dim = self.config.qk_head_dim + self.config.qk_pos_emb_head_dim - - mscale = _yarn_get_mscale(self.config.rotary_scaling_factor, self.config.mscale) - self.softmax_scale = mscale * mscale / math.sqrt(self.q_head_dim) - - self.rotary_pos_emb = YarnRotaryEmbedding( - self.config.qk_pos_emb_head_dim, - rotary_base=self.config.rotary_base, - scaling_factor=self.config.rotary_scaling_factor, - original_max_position_embeddings=self.config.max_position_embeddings, - beta_fast=self.config.beta_fast, - beta_slow=self.config.beta_slow, - mscale=self.config.mscale, - mscale_all_dim=self.config.mscale_all_dim, - ) - - self.core_attention = build_module( - submodules.core_attention, - config=self.config, - layer_number=self.layer_number, - attn_mask_type=self.attn_mask_type, - attention_type=self.attention_type, - softmax_scale=self.softmax_scale, - k_channels=self.q_head_dim, - v_channels=self.config.v_head_dim, - cp_comm_type=cp_comm_type, - ) - - # Output. - self.linear_proj = build_module( - submodules.linear_proj, - self.query_projection_size, - self.config.hidden_size, - config=self.config, - init_method=self.config.output_layer_init_method, - bias=self.config.add_bias_linear, - input_is_parallel=True, - skip_bias_add=True, - is_expert=False, - tp_comm_buffer_name='proj', - ) - - def forward( - self, - hidden_states, - attention_mask, - key_value_states=None, - inference_params=None, - rotary_pos_emb=None, - rotary_pos_cos=None, - rotary_pos_sin=None, - attention_bias=None, - packed_seq_params=None, - position_ids=None, - ): - """Forward pass for multi-latent attention""" - assert rotary_pos_emb is None, "Rotary position embeddings should not be passed into MLA." - assert attention_bias is None, "Attention bias should not be passed into MLA." - assert ( - rotary_pos_cos is None and rotary_pos_sin is None - ), "MLA does not support Flash Decoding" - - # hidden_states: [sq, b, h] - - # ===================== - # Query, Key, and Value - # ===================== - # Get the query, key and value tensors based on the type of attention - - # self or cross attn. - # query: [96, 1, 16, 128], key:[96, 1, 16, 128], value:[96, 1, 16, 128] - query, key, value = self.get_query_key_value_tensors( - hidden_states, - key_value_states, - position_ids, - packed_seq_params, - inference_params=inference_params, - ) - - # =================================================== - # Adjust key, value for inference - # =================================================== - # rotary_pos_emb = None - query, key, value, _, attn_mask_type = self._adjust_key_value_for_inference( - inference_params, query, key, value, rotary_pos_emb=None - ) - - # ================================== - # core attention computation - # ================================== - # Need corresponding TE change - if self.checkpoint_core_attention and self.training: - core_attn_out = self._checkpointed_attention_forward( - query, key, value, attention_mask, packed_seq_params=packed_seq_params - ) - else: - core_attn_out = self.core_attention( - query, - key, - value, - attention_mask, - packed_seq_params=packed_seq_params, - attn_mask_type=attn_mask_type, - ) - - if packed_seq_params is not None: - # reshape to same output shape as unpacked case - # (t, np, hn) -> (t, b=1, h=np*hn) - # t is the pack size = sum (sq_i) - # note that batch is a dummy dimension in the packed case - core_attn_out = core_attn_out.reshape(core_attn_out.size(0), 1, -1) - - # ================= - # Output. [sq, b, h] - # ================= - output, bias = self.linear_proj(core_attn_out) - - return output, bias - - -class MLASelfAttention(MultiLatentAttention): - """MLA Self-attention layer class - - Self-attention layer takes input with size [s, b, h] - and returns output of the same size. - """ - - def __init__( - self, - config: MLATransformerConfig, - submodules: MLASelfAttentionSubmodules, - layer_number: int, - attn_mask_type=AttnMaskType.padding, - cp_comm_type: str = None, - ): - super().__init__( - config=config, - submodules=submodules, - layer_number=layer_number, - attn_mask_type=attn_mask_type, - attention_type="self", - ) - - if self.config.q_lora_rank is None: - # Not projectiing query - self.linear_q_proj = build_module( - submodules.linear_q_proj, - self.config.hidden_size, - self.config.num_attention_heads * self.q_head_dim, - config=self.config, - init_method=self.config.init_method, - gather_output=False, - bias=False, - skip_bias_add=False, - is_expert=False, - ) - - else: - - self.linear_q_down_proj = build_module( - submodules.linear_q_down_proj, - self.config.hidden_size, - self.config.q_lora_rank, - config=self.config, - init_method=self.config.init_method, - gather_output=False, - bias=False, - skip_bias_add=False, - is_expert=False, - ) - - self.linear_q_up_proj = build_module( - submodules.linear_q_up_proj, - self.config.q_lora_rank, - self.config.num_attention_heads * self.q_head_dim, - config=self.config, - init_method=self.config.init_method, - gather_output=False, - bias=False, - skip_bias_add=False, - is_expert=False, - ) - - self.linear_kv_down_proj = build_module( - submodules.linear_kv_down_proj, - self.config.hidden_size, - self.config.kv_lora_rank + self.config.qk_pos_emb_head_dim, - config=self.config, - init_method=self.config.init_method, - gather_output=False, - bias=False, - skip_bias_add=False, - is_expert=False, - ) - - self.linear_kv_up_proj = build_module( - submodules.linear_kv_up_proj, - self.config.kv_lora_rank, - self.config.num_attention_heads * (self.config.qk_head_dim + self.config.v_head_dim), - config=self.config, - init_method=self.config.init_method, - gather_output=False, - bias=False, - skip_bias_add=False, - is_expert=False, - ) - - if self.config.q_lora_rank is not None: - self.q_layernorm = build_module( - submodules.q_layernorm, - hidden_size=self.config.q_lora_rank, - config=self.config, - eps=self.config.layernorm_epsilon, - ) - - self.kv_layernorm = build_module( - submodules.kv_layernorm, - hidden_size=self.config.kv_lora_rank, - config=self.config, - eps=self.config.layernorm_epsilon, - ) - - def get_query_key_value_tensors( - self, - hidden_states, - key_value_states=None, - position_ids=None, - packed_seq_params=None, - inference_params=None, - ): - """ - Derives `query`, `key` and `value` tensors from `hidden_states`. - """ - # s = sequence length, b = batch size, h = hidden size, n = num attention heads - # Attention heads [s, b, n*h] - assert ( - hidden_states.ndim == 3 - ), f"hidden_states should be 3D, [s, b, n*h], got {hidden_states.ndim}D" - q_len, bsz, _ = hidden_states.size() - - if self.config.q_lora_rank is not None: - q_compressed, _ = self.linear_q_down_proj(hidden_states) - q_compressed = self.q_layernorm(q_compressed) - q, _ = self.linear_q_up_proj(q_compressed) - else: - # hidden_states:[s, b, 2048], q: [s, b, n * 192] - q, _ = self.linear_q_proj(hidden_states) - - # q: [s, b, n, 192] - q = q.view(q_len, bsz, self.num_attention_heads_per_partition, self.q_head_dim) - - # q: [s, b, n, 128], q_pos_emb: [s, b, n, 64] - q_no_pe, q_pos_emb = torch.split( - q, [self.config.qk_head_dim, self.config.qk_pos_emb_head_dim], dim=-1 - ) - - # kv_combined: [s, b, 576] - kv_combined, _ = self.linear_kv_down_proj(hidden_states) - - # kv_compressed:[s, b, 512], k_pos_emb: [s, b, 64] - kv_compressed, k_pos_emb = torch.split( - kv_combined, [self.config.kv_lora_rank, self.config.qk_pos_emb_head_dim], dim=-1 - ) - - # kv: [s, b, 2048] - kv, _ = self.linear_kv_up_proj(self.kv_layernorm(kv_compressed)) - - # kv: [s, b, n, 256] - kv = kv.view( - q_len, - bsz, - self.num_attention_heads_per_partition, - self.config.qk_head_dim + self.config.v_head_dim, - ) - - # k_no_pe: [s, b, n, 128], value: [s, b, n, 128] - k_no_pe, value = torch.split(kv, [self.config.qk_head_dim, self.config.v_head_dim], dim=-1) - - # rotary_pos_emb:[s, b, 1, 64] - rotary_pos_emb = self.rotary_pos_emb(max_seq_len=self.config.max_position_embeddings) - - if len(rotary_pos_emb) == 2: - mscale = rotary_pos_emb[1] - rotary_pos_emb = rotary_pos_emb[0] - - if inference_params is not None: - # add offset to the sequence start for inference - sequence_start = inference_params.sequence_len_offset - sequence_end = sequence_start + q_len - rotary_pos_emb = rotary_pos_emb[sequence_start:sequence_end] - - # [s, b, 64] -> [s, b, 1, 64] - k_pos_emb = torch.unsqueeze(k_pos_emb, 2) - - if packed_seq_params is not None: - cu_seqlens_q = packed_seq_params.cu_seqlens_q - cu_seqlens_kv = packed_seq_params.cu_seqlens_kv - else: - cu_seqlens_q = cu_seqlens_kv = None - - # q_pos_emb: [s, b, n, 64], k_pos_emb:[s, b, 1, 64] - q_pos_emb = apply_rotary_pos_emb( - q_pos_emb, rotary_pos_emb, config=self.config, cu_seqlens=cu_seqlens_q, mscale=mscale - ) - k_pos_emb = apply_rotary_pos_emb( - k_pos_emb, rotary_pos_emb, config=self.config, cu_seqlens=cu_seqlens_kv, mscale=mscale - ) - - # query: [s, b, n, 192] - query = torch.cat([q_no_pe, q_pos_emb], dim=-1) - - # key: [s, b, n, 192] - k_pos_emb = k_pos_emb.expand(-1, -1, self.config.num_attention_heads, -1) - key = torch.cat([k_no_pe, k_pos_emb], dim=-1) - - query = query.contiguous() - key = key.contiguous() - value = value.contiguous() - - return query, key, value +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + + +import math +from dataclasses import dataclass +from typing import Union + +import torch + +from megatron.core.models.common.embeddings import ( + RotaryEmbedding, + YarnRotaryEmbedding, + _yarn_get_mscale, + apply_rotary_pos_emb, +) +from megatron.core.tensor_parallel.mappings import ( + gather_from_tensor_model_parallel_region, + scatter_to_sequence_parallel_region, +) +from megatron.core.transformer.attention import Attention +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.transformer.spec_utils import ModuleSpec, build_module +from megatron.core.transformer.transformer_config import MLATransformerConfig + + +@dataclass +class MLASelfAttentionSubmodules: + """Submodules for the MLA self-attention layer.""" + + linear_q_proj: Union[ModuleSpec, type] = None + linear_q_down_proj: Union[ModuleSpec, type] = None + linear_q_up_proj: Union[ModuleSpec, type] = None + linear_kv_down_proj: Union[ModuleSpec, type] = None + linear_kv_up_proj: Union[ModuleSpec, type] = None + core_attention: Union[ModuleSpec, type] = None + linear_proj: Union[ModuleSpec, type] = None + q_layernorm: Union[ModuleSpec, type] = None + kv_layernorm: Union[ModuleSpec, type] = None + + +class MultiLatentAttention(Attention): + """Multi-Latent Attention layer abstract class. + + This layer only contains common modules required for the "self attn" and + "cross attn" specializations. + """ + + def __init__( + self, + config: MLATransformerConfig, + submodules: Union[MLASelfAttentionSubmodules], + layer_number: int, + attn_mask_type: AttnMaskType, + attention_type: str, + cp_comm_type: str = None, + ) -> None: + + super().__init__( + config=config, + submodules=submodules, + layer_number=layer_number, + attention_type=attention_type, + attn_mask_type=attn_mask_type, + ) + + self.query_projection_size = self.config.v_head_dim * self.config.num_attention_heads + + self.q_head_dim = self.config.qk_head_dim + self.config.qk_pos_emb_head_dim + + # Overwrite the base class kv shape to support MLA inference + self.key_hidden_size = self.q_head_dim + self.val_hidden_size = self.config.v_head_dim + + mscale = _yarn_get_mscale(self.config.rotary_scaling_factor, self.config.mscale) + self.softmax_scale = mscale * mscale / math.sqrt(self.q_head_dim) + + if self.config.rope_type == "rope": + self.rotary_pos_emb = RotaryEmbedding( + self.config.qk_pos_emb_head_dim, + rotary_percent=self.config.rotary_percent, + rotary_base=self.config.rotary_base, + ) + elif self.config.rope_type == "yarn": + self.rotary_pos_emb = YarnRotaryEmbedding( + self.config.qk_pos_emb_head_dim, + rotary_base=self.config.rotary_base, + scaling_factor=self.config.rotary_scaling_factor, + original_max_position_embeddings=self.config.max_position_embeddings, + beta_fast=self.config.beta_fast, + beta_slow=self.config.beta_slow, + mscale=self.config.mscale, + mscale_all_dim=self.config.mscale_all_dim, + ) + else: + raise ValueError( + f"Unsupported RoPE type: {self.config.rope_type}, supported types are " + "'rope' and 'yarn'" + ) + + self.core_attention = build_module( + submodules.core_attention, + config=self.config, + layer_number=self.layer_number, + attn_mask_type=self.attn_mask_type, + attention_type=self.attention_type, + softmax_scale=self.softmax_scale, + k_channels=self.q_head_dim, + v_channels=self.config.v_head_dim, + cp_comm_type=cp_comm_type, + ) + + # Output. + self.linear_proj = build_module( + submodules.linear_proj, + self.query_projection_size, + self.config.hidden_size, + config=self.config, + init_method=self.config.output_layer_init_method, + bias=self.config.add_bias_linear, + input_is_parallel=True, + skip_bias_add=True, + is_expert=False, + tp_comm_buffer_name='proj', + ) + + def forward( + self, + hidden_states, + attention_mask, + key_value_states=None, + inference_params=None, + rotary_pos_emb=None, + rotary_pos_cos=None, + rotary_pos_sin=None, + attention_bias=None, + packed_seq_params=None, + position_ids=None, + sequence_len_offset=None, + ): + """Forward pass for multi-latent attention""" + assert rotary_pos_emb is None, "Rotary position embeddings should not be passed into MLA." + assert attention_bias is None, "Attention bias should not be passed into MLA." + assert ( + rotary_pos_cos is None and rotary_pos_sin is None + ), "MLA does not support Flash Decoding" + + # hidden_states: [sq, b, h] + + # ===================== + # Query, Key, and Value + # ===================== + # Get the query, key and value tensors based on the type of attention - + # self or cross attn. + # query: [96, 1, 16, 128], key:[96, 1, 16, 128], value:[96, 1, 16, 128] + query, key, value = self.get_query_key_value_tensors( + hidden_states, + key_value_states, + position_ids, + packed_seq_params, + inference_params=inference_params, + ) + + # =================================================== + # Adjust key, value for inference + # =================================================== + # rotary_pos_emb = None + query, key, value, _, attn_mask_type = self._adjust_key_value_for_inference( + inference_params, query, key, value, rotary_pos_emb=None + ) + + # ================================== + # core attention computation + # ================================== + # Need corresponding TE change + if self.checkpoint_core_attention and self.training: + core_attn_out = self._checkpointed_attention_forward( + query, key, value, attention_mask, packed_seq_params=packed_seq_params + ) + else: + core_attn_out = self.core_attention( + query, + key, + value, + attention_mask, + packed_seq_params=packed_seq_params, + attn_mask_type=attn_mask_type, + ) + + if packed_seq_params is not None: + # reshape to same output shape as unpacked case + # (t, np, hn) -> (t, b=1, h=np*hn) + # t is the pack size = sum (sq_i) + # note that batch is a dummy dimension in the packed case + core_attn_out = core_attn_out.reshape(core_attn_out.size(0), 1, -1) + + # ================= + # Output. [sq, b, h] + # ================= + output, bias = self.linear_proj(core_attn_out) + + return output, bias + + +class MLASelfAttention(MultiLatentAttention): + """MLA Self-attention layer class + + Self-attention layer takes input with size [s, b, h] + and returns output of the same size. + """ + + def __init__( + self, + config: MLATransformerConfig, + submodules: MLASelfAttentionSubmodules, + layer_number: int, + attn_mask_type=AttnMaskType.padding, + cp_comm_type: str = None, + ): + super().__init__( + config=config, + submodules=submodules, + layer_number=layer_number, + attn_mask_type=attn_mask_type, + attention_type="self", + ) + + if self.config.q_lora_rank is None: + # Not projectiing query + self.linear_q_proj = build_module( + submodules.linear_q_proj, + self.config.hidden_size, + self.config.num_attention_heads * self.q_head_dim, + config=self.config, + init_method=self.config.init_method, + gather_output=False, + bias=False, + skip_bias_add=False, + is_expert=False, + ) + + else: + + self.linear_q_down_proj = build_module( + submodules.linear_q_down_proj, + self.config.hidden_size, + self.config.q_lora_rank, + config=self.config, + init_method=self.config.init_method, + bias=False, + skip_bias_add=False, + gather_output=False, + is_expert=False, + ) + + self.linear_q_up_proj = build_module( + submodules.linear_q_up_proj, + self.config.q_lora_rank, + self.config.num_attention_heads * self.q_head_dim, + config=self.config, + init_method=self.config.init_method, + gather_output=False, + bias=False, + skip_bias_add=False, + is_expert=False, + ) + + self.linear_kv_down_proj = build_module( + submodules.linear_kv_down_proj, + self.config.hidden_size, + self.config.kv_lora_rank + self.config.qk_pos_emb_head_dim, + config=self.config, + init_method=self.config.init_method, + bias=False, + skip_bias_add=False, + gather_output=False, + is_expert=False, + ) + + self.linear_kv_up_proj = build_module( + submodules.linear_kv_up_proj, + self.config.kv_lora_rank, + self.config.num_attention_heads * (self.config.qk_head_dim + self.config.v_head_dim), + config=self.config, + init_method=self.config.init_method, + gather_output=False, + bias=False, + skip_bias_add=False, + is_expert=False, + ) + + if self.config.q_lora_rank is not None: + self.q_layernorm = build_module( + submodules.q_layernorm, + hidden_size=self.config.q_lora_rank, + config=self.config, + eps=self.config.layernorm_epsilon, + ) + + self.kv_layernorm = build_module( + submodules.kv_layernorm, + hidden_size=self.config.kv_lora_rank, + config=self.config, + eps=self.config.layernorm_epsilon, + ) + + def get_query_key_value_tensors( + self, + hidden_states, + key_value_states=None, + position_ids=None, + packed_seq_params=None, + inference_params=None, + ): + """ + Derives `query`, `key` and `value` tensors from `hidden_states`. + """ + # s = sequence length, b = batch size, h = hidden size, n = num attention heads + # Attention heads [s, b, n*h] + assert ( + hidden_states.ndim == 3 + ), f"hidden_states should be 3D, [s, b, n*h], got {hidden_states.ndim}D" + + if self.config.q_lora_rank is not None: + q_compressed, _ = self.linear_q_down_proj(hidden_states) + q_compressed = gather_from_tensor_model_parallel_region(q_compressed) + if self.config.sequence_parallel: + q_compressed = scatter_to_sequence_parallel_region(q_compressed) + q, _ = self.linear_q_up_proj(self.q_layernorm(q_compressed)) + else: + # hidden_states:[s, b, 2048], q: [s, b, n * 192] + q, _ = self.linear_q_proj(hidden_states) + + q_len, bsz, _ = q.size() + + # q: [s, b, n, 192] + q = q.view(q_len, bsz, self.num_attention_heads_per_partition, self.q_head_dim) + + # q: [s, b, n, 128], q_pos_emb: [s, b, n, 64] + q_no_pe, q_pos_emb = torch.split( + q, [self.config.qk_head_dim, self.config.qk_pos_emb_head_dim], dim=-1 + ) + + # kv_combined: [s, b, 576] + kv_combined, _ = self.linear_kv_down_proj(hidden_states) + kv_combined = gather_from_tensor_model_parallel_region(kv_combined) + + # kv_compressed:[s, b, 512], k_pos_emb: [s, b, 64] + kv_compressed, k_pos_emb = torch.split( + kv_combined, [self.config.kv_lora_rank, self.config.qk_pos_emb_head_dim], dim=-1 + ) + + if self.config.sequence_parallel: + kv_compressed = scatter_to_sequence_parallel_region(kv_compressed) + # kv: [s, b, 2048] + kv, _ = self.linear_kv_up_proj(self.kv_layernorm(kv_compressed)) + + # kv: [s, b, n, 256] + kv = kv.view( + q_len, + bsz, + self.num_attention_heads_per_partition, + self.config.qk_head_dim + self.config.v_head_dim, + ) + + # k_no_pe: [s, b, n, 128], value: [s, b, n, 128] + k_no_pe, value = torch.split(kv, [self.config.qk_head_dim, self.config.v_head_dim], dim=-1) + + rotary_seq_len = self.rotary_pos_emb.get_rotary_seq_len( + inference_params, None, hidden_states, self.config, packed_seq_params + ) + + # rotary_pos_emb:[s, b, 1, 64] + mscale = 1.0 + if self.config.rope_type == "rope": + packed_seq = packed_seq_params is not None and packed_seq_params.qkv_format == 'thd' + rotary_pos_emb = self.rotary_pos_emb(rotary_seq_len, packed_seq=packed_seq) + else: + rotary_pos_emb, mscale = self.rotary_pos_emb(rotary_seq_len) + + if inference_params is not None: + # add offset to the sequence start for inference + sequence_start = inference_params.sequence_len_offset + sequence_end = sequence_start + q_len + rotary_pos_emb = rotary_pos_emb[sequence_start:sequence_end] + + # [s, b, 64] -> [s, b, 1, 64] + k_pos_emb = torch.unsqueeze(k_pos_emb, 2) + + if packed_seq_params is not None: + cu_seqlens_q = packed_seq_params.cu_seqlens_q + cu_seqlens_kv = packed_seq_params.cu_seqlens_kv + else: + cu_seqlens_q = cu_seqlens_kv = None + + # q_pos_emb: [s, b, n, 64], k_pos_emb:[s, b, 1, 64] + q_pos_emb = apply_rotary_pos_emb( + q_pos_emb, rotary_pos_emb, config=self.config, cu_seqlens=cu_seqlens_q, mscale=mscale + ) + k_pos_emb = apply_rotary_pos_emb( + k_pos_emb, rotary_pos_emb, config=self.config, cu_seqlens=cu_seqlens_kv, mscale=mscale + ) + + # query: [s, b, n, 192] + query = torch.cat([q_no_pe, q_pos_emb], dim=-1) + + # key: [s, b, n, 192] + k_pos_emb = k_pos_emb.expand(-1, -1, self.num_attention_heads_per_partition, -1) + key = torch.cat([k_no_pe, k_pos_emb], dim=-1) + + query = query.contiguous() + key = key.contiguous() + value = value.contiguous() + + return query, key, value diff --git a/megatron/core/transformer/transformer_block.py b/megatron/core/transformer/transformer_block.py index d40476d..e6cd8b7 100644 --- a/megatron/core/transformer/transformer_block.py +++ b/megatron/core/transformer/transformer_block.py @@ -1,618 +1,664 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from contextlib import nullcontext -from dataclasses import dataclass -from typing import List, Optional, Union - -import torch -from torch import Tensor - -from megatron.core import InferenceParams, parallel_state, tensor_parallel -from megatron.core.dist_checkpointing.mapping import ShardedStateDict -from megatron.core.dist_checkpointing.utils import replace_prefix_for_sharding -from megatron.core.fusions.fused_layer_norm import FusedLayerNorm -from megatron.core.packed_seq_params import PackedSeqParams -from megatron.core.transformer.module import MegatronModule -from megatron.core.transformer.spec_utils import ModuleSpec, build_module -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.transformer.transformer_layer import BaseTransformerLayer, TransformerLayer -from megatron.core.transformer.utils import sharded_state_dict_default -from megatron.core.utils import is_te_min_version, make_viewless_tensor - -try: - from megatron.core.extensions.transformer_engine import ( - TEDelayedScaling, - TENorm, - get_cpu_offload_context, - te_checkpoint, - ) - - HAVE_TE = True - LayerNormImpl = TENorm -except ImportError: - HAVE_TE = False - get_cpu_offload_context = None - - try: - import apex # pylint: disable=unused-import - - LayerNormImpl = FusedLayerNorm - - except ImportError: - from megatron.core.transformer.torch_norm import WrappedTorchNorm - - LayerNormImpl = WrappedTorchNorm - - -def get_num_layers_to_build(config: TransformerConfig) -> int: - """ - Determine the number of transformer layers to build for the current pipeline stage. - Args: - config (TransformerConfig): Configuration object containing transformer model parameters. - - Returns: - int: The number of layers to be built for the current pipeline stage. - """ - if config.first_pipeline_num_layers is not None or config.last_pipeline_num_layers is not None: - assert ( - parallel_state.get_virtual_pipeline_model_parallel_world_size() is None - ), "Uneven number of layer not compatible with interleaved pipeline schedule" - - # Number of layers to distribute over rest of pipeline stages - layers_to_distribute = config.num_layers - # Number of pipeline stages left for distributing transformer layers - pipeline_stages_left = parallel_state.get_pipeline_model_parallel_world_size() - - if config.first_pipeline_num_layers is not None: - layers_to_distribute -= config.first_pipeline_num_layers - pipeline_stages_left -= 1 - if parallel_state.is_pipeline_first_stage(): - return config.first_pipeline_num_layers - - if config.last_pipeline_num_layers is not None: - layers_to_distribute -= config.last_pipeline_num_layers - pipeline_stages_left -= 1 - if parallel_state.is_pipeline_last_stage(): - return config.last_pipeline_num_layers - - assert ( - layers_to_distribute % pipeline_stages_left == 0 - ), "With uneven pipelineing the left over layers must be divisible by left over stages" - num_layers_per_pipeline_rank = layers_to_distribute // pipeline_stages_left - else: - pipeline_ranks = config.pipeline_model_parallel_size - num_layers_per_pipeline_rank = config.num_layers // pipeline_ranks - - if parallel_state.get_virtual_pipeline_model_parallel_world_size() is not None: - # Interleaved pipeline parallelism: - # Number of layers in each model chunk is the number of layers in the stage, - # divided by the number of model chunks in a stage. - # With 8 layers, 2 stages, and 4 model chunks, we want an assignment of - # layers to stages like (each list is a model chunk): - # Stage 0: [0] [2] [4] [6] - # Stage 1: [1] [3] [5] [7] - # With 8 layers, 2 stages, and 2 virtual stages, we want an assignment of - # layers to stages like (each list is a model chunk): - # Stage 0: [0, 1] [4, 5] - # Stage 1: [2, 3] [6, 7] - - vp_size = parallel_state.get_virtual_pipeline_model_parallel_world_size() - - num_layers_per_virtual_rank = num_layers_per_pipeline_rank // vp_size - - num_layers_to_build = num_layers_per_virtual_rank - - else: - # Non-interleaved pipeline parallelism: - # Each stage gets a contiguous set of layers. - - num_layers_to_build = num_layers_per_pipeline_rank - - return num_layers_to_build - - -@dataclass -class TransformerBlockSubmodules: - """ - Dataclass for specifying the submodules of a transformer block. - - This class defines the structure for configuring the layers and normalization - within a transformer block, allowing for flexible and customizable architecture designs. - - Args: - layer_specs (List[ModuleSpec], optional): A list of module specifications for - the layers within the transformer block. Each specification typically - defines a complete transformer layer (e.g., self-attention, feed-forward network). - layer_norm (Optional[Union[ModuleSpec, torch.nn.Module]], optional): Specification - or instance of the layer normalization to be applied. - """ - - layer_specs: List[ModuleSpec] = None - layer_norm: Optional[Union[ModuleSpec, torch.nn.Module]] = None - - -def _get_block_submodules( - config: TransformerConfig, spec: Union[TransformerBlockSubmodules, ModuleSpec] -) -> TransformerBlockSubmodules: - """ - Retrieve or construct TransformerBlockSubmodules based on the provided specification. - - Args: - config (TransformerConfig): Configuration object for the transformer model. - spec (Union[TransformerBlockSubmodules, ModuleSpec]): Specification for the - transformer block submodules. Can be either a TransformerBlockSubmodules - instance or a ModuleSpec. - - Returns: - TransformerBlockSubmodules: The submodules for the transformer block. - """ - - # Transformer block submodules. - if isinstance(spec, TransformerBlockSubmodules): - return spec - - # ModuleSpec here is generally assumed to be for a transformer layer that - # is implemented in `transformer_layer.py` or if it subclasses - # `BaseTransformerLayer` from the `transformer_layer.py` file. - elif isinstance(spec, ModuleSpec): - if issubclass(spec.module, TransformerBlock): - return spec.submodules - elif issubclass(spec.module, BaseTransformerLayer): - num_layers = get_num_layers_to_build(config) - return TransformerBlockSubmodules( - layer_specs=[spec] * num_layers, layer_norm=LayerNormImpl - ) - else: - raise Exception(f"specialize for {spec.module.__name__}.") - else: - raise Exception(f"specialize for {type(spec).__name__}.") - - -class TransformerBlock(MegatronModule): - """Transformer class.""" - - def __init__( - self, - config: TransformerConfig, - spec: Union[TransformerBlockSubmodules, ModuleSpec], - post_layer_norm: bool = True, - pre_process: bool = True, - post_process: bool = True, - ): - super().__init__(config=config) - - self.submodules = _get_block_submodules(config, spec) - self.post_layer_norm = post_layer_norm - self.pre_process = pre_process - self.post_process = post_process - # Dictionary to store CUDA graphs. Number of items in the dictionary = len(self.layers). - # Item `i` in the dictionary is a list of `N` CUDA graphs for layer 'i' where N is the - # number of microbatches. Multiple CUDA graphs per layer is required to support - # pipelining which requires running FWD graph of multiple microbatches before BWD graph. - # To enable CUDA graph, this dictionary should be populated in the model training script - # with the graphs returned by make_graphed_callables API before the first trainng step. - self.cuda_graphs = {} - self.current_microbatch = -1 - - # required for pipeline parallel schedules - self.input_tensor = None - - self.checkpoint_core_attention = self.config.recompute_granularity == 'selective' - - if get_cpu_offload_context is not None: - (self.offload_context, self.group_prefetch_offload_commit_async) = ( - get_cpu_offload_context( - self.config.cpu_offloading, - self.config.cpu_offloading_num_layers, - self.config.num_layers, - self.config.cpu_offloading_activations, - self.config.cpu_offloading_weights, - ) - ) - self.config._cpu_offloading_context = ( - self.offload_context if self.config.cpu_offloading else None - ) - else: - assert ( - self.config.cpu_offloading is False - ), "CPU Offloading is enabled when TE is not present" - - self.offload_context, self.group_prefetch_offload_commit_async = nullcontext(), None - self.config._cpu_offloading_context = None - - self._build_layers() - self.num_layers_per_pipeline_rank = len(self.layers) - self.tp_only_amax_red = config.tp_only_amax_red - - def _build_layers(self): - # Transformer layers. - # @jcasper can we improve how we deal with layer_number? - # currently it's only used in CoreAttention? - # if self.apply_query_key_layer_scaling: - # coeff = self.layer_number - # self.norm_factor *= coeff - def build_layer(layer_spec, layer_number): - return build_module(layer_spec, config=self.config, layer_number=layer_number) - - # offset is implicit in TransformerLayer - self.layers = torch.nn.ModuleList( - [ - build_layer(layer_spec, i + 1) - for i, layer_spec in enumerate(self.submodules.layer_specs) - ] - ) - - # @TODO: add back standalone_embedding_stage (see issue #293) - # In pipeline parallelism, we want to add this LN only to the last stage of the pipeline - # self.post_process and self.post_layer_norm guide this behavior - if self.submodules.layer_norm and self.post_process and self.post_layer_norm: - self.final_layernorm = build_module( - self.submodules.layer_norm, - config=self.config, - hidden_size=self.config.hidden_size, - eps=self.config.layernorm_epsilon, - ) - else: - self.final_layernorm = None # Either this or nn.Identity - - def _get_layer(self, layer_number: int): - return self.layers[layer_number] - - def _checkpointed_forward( - self, - hidden_states: Tensor, - attention_mask: Tensor, - context: Tensor, - context_mask: Tensor, - rotary_pos_emb: Tensor, - attention_bias: Tensor, - packed_seq_params: PackedSeqParams, - ): - """Forward method with activation checkpointing.""" - - def custom(start: int, end: int): - def custom_forward( - hidden_states, attention_mask, context, context_mask, rotary_pos_emb - ): - for index in range(start, end): - layer = self._get_layer(index) - hidden_states, context = layer( - hidden_states=hidden_states, - attention_mask=attention_mask, - context=context, - context_mask=context_mask, - rotary_pos_emb=rotary_pos_emb, - attention_bias=attention_bias, - inference_params=None, - packed_seq_params=packed_seq_params, - ) - return hidden_states, context - - return custom_forward - - def checkpoint_handler(forward_func): - """Determines whether to use the `te_checkpoint` or `tensor_parallel.checkpoint`""" - if self.config.fp8: - return te_checkpoint( - forward_func, - self.config.distribute_saved_activations, - tensor_parallel.random.get_cuda_rng_tracker, - parallel_state.get_tensor_model_parallel_group(), - hidden_states, - attention_mask, - context, - context_mask, - rotary_pos_emb, - ) - else: - return tensor_parallel.checkpoint( - forward_func, - self.config.distribute_saved_activations, - hidden_states, - attention_mask, - context, - context_mask, - rotary_pos_emb, - ) - - if self.config.recompute_method == 'uniform': - # Uniformly divide the total number of Transformer layers and checkpoint - # the input activation of each divided chunk. - # A method to further reduce memory usage reducing checkpoints. - layer_idx = 0 - while layer_idx < self.num_layers_per_pipeline_rank: - hidden_states, context = checkpoint_handler( - custom(layer_idx, layer_idx + self.config.recompute_num_layers) - ) - - layer_idx += self.config.recompute_num_layers - - elif self.config.recompute_method == 'block': - # Checkpoint the input activation of only a set number of individual - # Transformer layers and skip the rest. - # A method fully use the device memory removing redundant re-computation. - recompute_skip_num_layers = 0 - for layer_idx in range(self.num_layers_per_pipeline_rank): - # Skip recomputation when input grad computation is not needed. - # Need to have at least one input tensor with gradient computation - # for re-enterant autograd engine. - if self.config.fp8 and not hidden_states.requires_grad: - recompute_skip_num_layers += 1 - if ( - layer_idx >= recompute_skip_num_layers - and layer_idx < self.config.recompute_num_layers + recompute_skip_num_layers - ): - hidden_states, context = checkpoint_handler(custom(layer_idx, layer_idx + 1)) - else: - hidden_states, context = custom(layer_idx, layer_idx + 1)( - hidden_states, attention_mask, context, context_mask, rotary_pos_emb - ) - else: - raise ValueError("Invalid activation recompute method.") - - return hidden_states - - def set_input_tensor(self, input_tensor: Tensor): - """Set input tensor to be used instead of forward()'s input. - - When doing pipeline parallelism the input from the previous - stage comes from communication, not from the input, so the - model's forward_step_func won't have it. This function is thus - used by internal code to bypass the input provided by the - forward_step_func""" - self.input_tensor = input_tensor - - def get_cuda_graph_optional_args( - self, - attention_mask: Tensor, - context: Tensor, - context_mask: Tensor, - rotary_pos_emb: Tensor, - attention_bias: Tensor, - inference_params: InferenceParams, - packed_seq_params: PackedSeqParams, - ): - """Get optional tensor arguments for CUDA graph.""" - - optional_inputs = {} - optional_inputs['is_first_microbatch'] = self.current_microbatch == 0 - try: - import transformer_engine.pytorch as te # pylint: disable=unused-import - - if is_te_min_version("1.10.0", check_equality=False): - assert not any( - [attention_mask, context, context_mask, rotary_pos_emb] - ), "Keyword Arguments not supported with CUDA graph." - else: - optional_inputs['attention_mask'] = attention_mask - optional_inputs['context'] = context - optional_inputs['context_mask'] = context_mask - optional_inputs['rotary_pos_emb'] = rotary_pos_emb - except ImportError: - raise RuntimeError("CUDAGraph requires TransformerEngine, but not installed") - return optional_inputs - - def forward( - self, - hidden_states: Tensor, - attention_mask: Tensor, - context: Tensor = None, - context_mask: Tensor = None, - rotary_pos_emb: Tensor = None, - rotary_pos_cos: Tensor = None, - rotary_pos_sin: Tensor = None, - attention_bias: Tensor = None, - inference_params: InferenceParams = None, - packed_seq_params: PackedSeqParams = None, - ): - """ - Perform the forward pass through the transformer block. - - This method handles the core computation of the transformer, including - self-attention, optional cross-attention, and feed-forward operations. - - Args: - hidden_states (Tensor): Input tensor of shape [s, b, h] where s is the - sequence length, b is the batch size, and h is the hidden size. - attention_mask (Tensor): Boolean tensor of shape [1, 1, s, s] for masking - self-attention. - context (Tensor, optional): Context tensor for cross-attention. - context_mask (Tensor, optional): Mask for cross-attention context - rotary_pos_emb (Tensor, optional): Rotary positional embeddings. - attention_bias (Tensor): Bias tensor for Q * K.T of shape in shape broadcastable - to [b, num_head, sq, skv], e.g. [1, 1, sq, skv]. - Used as an alternative to apply attention mask for TE cuDNN attention. - inference_params (InferenceParams, optional): Parameters for inference-time - optimizations. - packed_seq_params (PackedSeqParams, optional): Parameters for packed sequence - processing. - - Returns: - Union[Tensor, Tuple[Tensor, Tensor]]: The output hidden states tensor of shape - [s, b, h], and optionally the updated context tensor if cross-attention is used. - """ - - if not self.pre_process: - # See set_input_tensor() - hidden_states = self.input_tensor - - # Viewless tensor. - # - We only need to create a viewless tensor in the case of micro batch - # size (mbs) == 1, since in this case, 'hidden_states.transpose()' - # above creates a view tensor, and '.contiguous()' is a pass-through. - # For mbs >= 2, '.contiguous()' creates a new tensor, eliminating - # the need to make it viewless. - # - # However, we don't explicitly check mbs == 1 here because - # make_viewless_tensor() has negligible overhead when its input - # is already viewless. - # - # - For the 'else' case above, calling make_viewless_tensor() here is - # likely redundant, since p2p_communication.py (likely originator) - # already creates viewless tensors. That said, make_viewless_tensor() - # is called here to be future-proof and corner-case-proof. - hidden_states = make_viewless_tensor(inp=hidden_states, requires_grad=True, keep_graph=True) - - if self.config.sequence_parallel: - rng_context = tensor_parallel.get_cuda_rng_tracker().fork() - else: - rng_context = nullcontext() - - if self.config.fp8: - import transformer_engine # To keep out TE dependency when not training in fp8 - - if self.config.fp8 == "e4m3": - fp8_format = transformer_engine.common.recipe.Format.E4M3 - elif self.config.fp8 == "hybrid": - fp8_format = transformer_engine.common.recipe.Format.HYBRID - else: - raise ValueError("E4M3 and HYBRID are the only supported FP8 formats.") - - fp8_recipe = TEDelayedScaling( - config=self.config, - fp8_format=fp8_format, - override_linear_precision=(False, False, not self.config.fp8_wgrad), - ) - fp8_group = None - if parallel_state.model_parallel_is_initialized(): - fp8_group = parallel_state.get_amax_reduction_group( - with_context_parallel=True, tp_only_amax_red=self.tp_only_amax_red - ) - fp8_context = transformer_engine.pytorch.fp8_autocast( - enabled=True, fp8_recipe=fp8_recipe, fp8_group=fp8_group - ) - else: - fp8_context = nullcontext() - - with rng_context, fp8_context: - # Forward pass. - if self.config.recompute_granularity == 'full' and self.training: - hidden_states = self._checkpointed_forward( - hidden_states=hidden_states, - attention_mask=attention_mask, - context=context, - context_mask=context_mask, - rotary_pos_emb=rotary_pos_emb, - attention_bias=attention_bias, - packed_seq_params=packed_seq_params, - ) - else: - for l_no, layer in enumerate(self.layers): - with self.offload_context: - layer.use_cudagraph = True - if (len(self.cuda_graphs) == 0) or (not self.training): - hidden_states, context = layer( - hidden_states=hidden_states, - attention_mask=attention_mask, - context=context, - context_mask=context_mask, - rotary_pos_emb=rotary_pos_emb, - rotary_pos_cos=rotary_pos_cos, - rotary_pos_sin=rotary_pos_sin, - attention_bias=attention_bias, - inference_params=inference_params, - packed_seq_params=packed_seq_params, - ) - else: - # CUDA graph replay for layer `l_no` and microbatch - # `self.current_microbatch`. TransformerEngine versions>=1.10 - # allow keyword arguments with CUDA graph. However, CUDA graph - # acccepts only Tensor inputs and Tensor outputs. Hence, - # `inference_params` and `packed_seq_params` are excluded from - # input list while output is limited to `hidden_states`. - cg_index = self.current_microbatch % len(self.cuda_graphs[l_no]) - assert not any( - [inference_params, packed_seq_params] - ), "CUDA graph accepts only Tensor inputs." - optional_inputs = self.get_cuda_graph_optional_args( - attention_mask, - context, - context_mask, - rotary_pos_emb, - attention_bias, - inference_params, - packed_seq_params, - ) - hidden_states = self.cuda_graphs[l_no][cg_index]( - hidden_states, **optional_inputs - ) - - if ( - torch.is_grad_enabled() - and self.config.cpu_offloading - and self.group_prefetch_offload_commit_async is not None - ): - hidden_states = self.group_prefetch_offload_commit_async(hidden_states) - - # Final layer norm. - if self.final_layernorm is not None: - hidden_states = self.final_layernorm(hidden_states) - # TENorm produces a "viewed" tensor. This will result in schedule.py's - # deallocate_output_tensor() throwing an error, so a viewless tensor is - # created to prevent this. - hidden_states = make_viewless_tensor( - inp=hidden_states, requires_grad=True, keep_graph=True - ) - - return hidden_states - - def sharded_state_dict( - self, prefix: str = '', sharded_offsets: tuple = (), metadata: dict = None - ) -> ShardedStateDict: - """ - Generate a sharded state dictionary for the transformer block. - - Args: - prefix (str, optional): Prefix to be added to all keys in the state dict. - Defaults to an empty string. - sharded_offsets (tuple, optional): Tuple of sharding offsets. - metadata (dict, optional): Additional metadata for sharding. - Can specify if layers are non-homogeneous. Defaults to None. - - Returns: - ShardedStateDict: A dictionary containing the sharded state of the model. - """ - assert not sharded_offsets, "Unexpected sharded offsets" - non_homogeneous_layers = metadata is not None and metadata.get( - 'non_homogeneous_layers', False - ) - if isinstance(self.config.moe_layer_freq, int): - if self.config.moe_layer_freq > 1: - non_homogeneous_layers = True - elif isinstance(self.config.moe_layer_freq, list): - non_homogeneous_layers = True - - sharded_state_dict = {} - - layer_prefix = f'{prefix}layers.' - num_layers = self.config.num_layers - for layer in self.layers: - offset = TransformerLayer._get_layer_offset(self.config) - - global_layer_offset = layer.layer_number - 1 # self.layer_number starts at 1 - state_dict_prefix = f'{layer_prefix}{global_layer_offset - offset}.' # module list index in TransformerBlock # pylint: disable=line-too-long - if non_homogeneous_layers: - sharded_prefix = f'{layer_prefix}{global_layer_offset}.' - sharded_pp_offset = [] - else: - sharded_prefix = layer_prefix - sharded_pp_offset = [ - (0, global_layer_offset, num_layers) - ] # PP sharding offset for ShardedTensors - layer_sharded_state_dict = layer.sharded_state_dict( - state_dict_prefix, sharded_pp_offset, metadata - ) - replace_prefix_for_sharding(layer_sharded_state_dict, state_dict_prefix, sharded_prefix) - - sharded_state_dict.update(layer_sharded_state_dict) - - # Add modules other than self.layers - for name, module in self.named_children(): - if not module is self.layers: - sharded_state_dict.update( - sharded_state_dict_default( - module, f'{prefix}{name}.', sharded_offsets, metadata - ) - ) - - return sharded_state_dict +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +from contextlib import nullcontext +from dataclasses import dataclass +from typing import List, Optional, Union + +import torch +from torch import Tensor + +from megatron.core import InferenceParams, parallel_state, tensor_parallel +from megatron.core.dist_checkpointing.mapping import ShardedStateDict +from megatron.core.dist_checkpointing.utils import replace_prefix_for_sharding +from megatron.core.fusions.fused_layer_norm import FusedLayerNorm +from megatron.core.packed_seq_params import PackedSeqParams +from megatron.core.transformer.module import MegatronModule +from megatron.core.transformer.spec_utils import ModuleSpec, build_module +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.transformer.transformer_layer import BaseTransformerLayer, TransformerLayer +from megatron.core.transformer.utils import sharded_state_dict_default +from megatron.core.utils import is_te_min_version, make_viewless_tensor + +try: + from megatron.core.extensions.transformer_engine import ( + TEDelayedScaling, + TENorm, + get_cpu_offload_context, + te_checkpoint, + ) + + HAVE_TE = True + LayerNormImpl = TENorm +except ImportError: + HAVE_TE = False + get_cpu_offload_context = None + + try: + import apex # pylint: disable=unused-import + + LayerNormImpl = FusedLayerNorm + + except ImportError: + from megatron.core.transformer.torch_norm import WrappedTorchNorm + + LayerNormImpl = WrappedTorchNorm + + +def get_num_layers_to_build(config: TransformerConfig) -> int: + """ + Determine the number of transformer layers to build for the current pipeline stage. + Args: + config (TransformerConfig): Configuration object containing transformer model parameters. + + Returns: + int: The number of layers to be built for the current pipeline stage. + """ + if ( + config.num_layers_in_first_pipeline_stage is not None + or config.num_layers_in_last_pipeline_stage is not None + ): + + assert not ( + config.account_for_embedding_in_pipeline_split + or config.account_for_loss_in_pipeline_split + ), " \ + Does not support standalone embedding stage and standalone loss stage with uneven pp" + # Number of layers to distribute over rest of pipeline stages + layers_to_distribute = config.num_layers + # Number of pipeline stages left for distributing transformer layers + pipeline_stages_left = parallel_state.get_pipeline_model_parallel_world_size() + + # If the uneven first (last) pipeline stage is enabled, remove the specified number + # of layers to calculate the number of layers on each middle pipeline stage. + if config.num_layers_in_first_pipeline_stage is not None: + layers_to_distribute -= config.num_layers_in_first_pipeline_stage + pipeline_stages_left -= 1 + + if config.num_layers_in_last_pipeline_stage is not None: + layers_to_distribute -= config.num_layers_in_last_pipeline_stage + pipeline_stages_left -= 1 + + assert ( + layers_to_distribute % pipeline_stages_left == 0 + ), "With uneven pipelineing the left over layers must be divisible by left over stages" + num_layers_per_pipeline_rank = layers_to_distribute // pipeline_stages_left + + # If the uneven first (last) pipeline stage is enabled, return the specified number + # of layers for all virtual pipeline parallel stages within the first (last) pipeline + # parallel stage. + if ( + parallel_state.is_pipeline_first_stage(ignore_virtual=True) + and config.num_layers_in_first_pipeline_stage is not None + ): + num_layers_per_pipeline_rank = config.num_layers_in_first_pipeline_stage + + if ( + parallel_state.is_pipeline_last_stage(ignore_virtual=True) + and config.num_layers_in_last_pipeline_stage is not None + ): + num_layers_per_pipeline_rank = config.num_layers_in_last_pipeline_stage + else: + # Include the embedding layer and loss layer into pipeline parallelism partition + num_layers = config.num_layers + if config.account_for_embedding_in_pipeline_split: + num_layers += 1 + + if config.account_for_loss_in_pipeline_split: + num_layers += 1 + + assert ( + num_layers % config.pipeline_model_parallel_size == 0 + ), "num_layers should be divisible by pipeline_model_parallel_size" + num_layers_per_pipeline_rank = num_layers // config.pipeline_model_parallel_size + + if parallel_state.get_virtual_pipeline_model_parallel_world_size() is not None: + # Interleaved pipeline parallelism: + # Number of layers in each model chunk is the number of layers in the stage, + # divided by the number of model chunks in a stage. + # With 8 layers, 2 stages, and 4 model chunks, we want an assignment of + # layers to stages like (each list is a model chunk): + # Stage 0: [0] [2] [4] [6] + # Stage 1: [1] [3] [5] [7] + # With 8 layers, 2 stages, and 2 virtual stages, we want an assignment of + # layers to stages like (each list is a model chunk): + # Stage 0: [0, 1] [4, 5] + # Stage 1: [2, 3] [6, 7] + vp_size = parallel_state.get_virtual_pipeline_model_parallel_world_size() + + assert ( + num_layers_per_pipeline_rank % vp_size == 0 + ), "num_layers_per_pipeline_rank should be divisible by vp_size" + num_layers_per_virtual_rank = num_layers_per_pipeline_rank // vp_size + + num_layers_to_build = num_layers_per_virtual_rank + + else: + # Non-interleaved pipeline parallelism: + # Each stage gets a contiguous set of layers. + num_layers_to_build = num_layers_per_pipeline_rank + + # The embedding (or loss) layer cannot function as a standalone transformer layer + # Reduce the number of layers to construct by 1 on the first (or last) stage if the + # embedding (or loss) layer is included in the pipeline parallelism partition and placement. + if parallel_state.is_pipeline_first_stage() and config.account_for_embedding_in_pipeline_split: + num_layers_to_build -= 1 + assert num_layers_to_build >= 0, "Not enough layers in the first virtual pipeline stage" + + if parallel_state.is_pipeline_last_stage() and config.account_for_loss_in_pipeline_split: + num_layers_to_build -= 1 + assert num_layers_to_build >= 0, "Not enough layers in the last virtual pipeline stage" + + return num_layers_to_build + + +@dataclass +class TransformerBlockSubmodules: + """ + Dataclass for specifying the submodules of a transformer block. + + This class defines the structure for configuring the layers and normalization + within a transformer block, allowing for flexible and customizable architecture designs. + + Args: + layer_specs (List[ModuleSpec], optional): A list of module specifications for + the layers within the transformer block. Each specification typically + defines a complete transformer layer (e.g., self-attention, feed-forward network). + layer_norm (Optional[Union[ModuleSpec, torch.nn.Module]], optional): Specification + or instance of the layer normalization to be applied. + """ + + layer_specs: List[ModuleSpec] = None + layer_norm: Optional[Union[ModuleSpec, torch.nn.Module]] = None + + +def _get_block_submodules( + config: TransformerConfig, spec: Union[TransformerBlockSubmodules, ModuleSpec] +) -> TransformerBlockSubmodules: + """ + Retrieve or construct TransformerBlockSubmodules based on the provided specification. + + Args: + config (TransformerConfig): Configuration object for the transformer model. + spec (Union[TransformerBlockSubmodules, ModuleSpec]): Specification for the + transformer block submodules. Can be either a TransformerBlockSubmodules + instance or a ModuleSpec. + + Returns: + TransformerBlockSubmodules: The submodules for the transformer block. + """ + + # Transformer block submodules. + if isinstance(spec, TransformerBlockSubmodules): + return spec + + # ModuleSpec here is generally assumed to be for a transformer layer that + # is implemented in `transformer_layer.py` or if it subclasses + # `BaseTransformerLayer` from the `transformer_layer.py` file. + elif isinstance(spec, ModuleSpec): + if issubclass(spec.module, TransformerBlock): + return spec.submodules + elif issubclass(spec.module, BaseTransformerLayer): + num_layers = get_num_layers_to_build(config) + return TransformerBlockSubmodules( + layer_specs=[spec] * num_layers, layer_norm=LayerNormImpl + ) + else: + raise Exception(f"specialize for {spec.module.__name__}.") + else: + raise Exception(f"specialize for {type(spec).__name__}.") + + +class TransformerBlock(MegatronModule): + """Transformer class.""" + + def __init__( + self, + config: TransformerConfig, + spec: Union[TransformerBlockSubmodules, ModuleSpec], + post_layer_norm: bool = True, + pre_process: bool = True, + post_process: bool = True, + ): + super().__init__(config=config) + + self.submodules = _get_block_submodules(config, spec) + self.post_layer_norm = post_layer_norm + self.pre_process = pre_process + self.post_process = post_process + # Dictionary to store CUDA graphs. Number of items in the dictionary = len(self.layers). + # Item `i` in the dictionary is a list of `N` CUDA graphs for layer 'i' where N is the + # number of microbatches. Multiple CUDA graphs per layer is required to support + # pipelining which requires running FWD graph of multiple microbatches before BWD graph. + # To enable CUDA graph, this dictionary should be populated in the model training script + # with the graphs returned by make_graphed_callables API before the first trainng step. + self.cuda_graphs = {} + self.current_microbatch = -1 + + # required for pipeline parallel schedules + self.input_tensor = None + + self.checkpoint_core_attention = self.config.recompute_granularity == 'selective' + + if get_cpu_offload_context is not None: + (self.offload_context, self.group_prefetch_offload_commit_async) = ( + get_cpu_offload_context( + self.config.cpu_offloading, + self.config.cpu_offloading_num_layers, + self.config.num_layers, + self.config.cpu_offloading_activations, + self.config.cpu_offloading_weights, + ) + ) + self.config._cpu_offloading_context = ( + self.offload_context if self.config.cpu_offloading else None + ) + else: + assert ( + self.config.cpu_offloading is False + ), "CPU Offloading is enabled when TE is not present" + + self.offload_context, self.group_prefetch_offload_commit_async = nullcontext(), None + self.config._cpu_offloading_context = None + + self._build_layers() + self.num_layers_per_pipeline_rank = len(self.layers) + self.tp_only_amax_red = config.tp_only_amax_red + + def _build_layers(self): + # Transformer layers. + # @jcasper can we improve how we deal with layer_number? + # currently it's only used in CoreAttention? + # if self.apply_query_key_layer_scaling: + # coeff = self.layer_number + # self.norm_factor *= coeff + def build_layer(layer_spec, layer_number): + return build_module(layer_spec, config=self.config, layer_number=layer_number) + + # offset is implicit in TransformerLayer + self.layers = torch.nn.ModuleList( + [ + build_layer(layer_spec, i + 1) + for i, layer_spec in enumerate(self.submodules.layer_specs) + ] + ) + + # @TODO: add back account_for_embedding_in_pipeline_split (see issue #293) + # In pipeline parallelism, we want to add this LN only to the last stage of the pipeline + # self.post_process and self.post_layer_norm guide this behavior + if self.submodules.layer_norm and self.post_process and self.post_layer_norm: + self.final_layernorm = build_module( + self.submodules.layer_norm, + config=self.config, + hidden_size=self.config.hidden_size, + eps=self.config.layernorm_epsilon, + ) + else: + self.final_layernorm = None # Either this or nn.Identity + + def _get_layer(self, layer_number: int): + return self.layers[layer_number] + + def _checkpointed_forward( + self, + hidden_states: Tensor, + attention_mask: Tensor, + context: Tensor, + context_mask: Tensor, + rotary_pos_emb: Tensor, + attention_bias: Tensor, + packed_seq_params: PackedSeqParams, + ): + """Forward method with activation checkpointing.""" + + def custom(start: int, end: int): + def custom_forward( + hidden_states, attention_mask, context, context_mask, rotary_pos_emb + ): + for index in range(start, end): + layer = self._get_layer(index) + hidden_states, context = layer( + hidden_states=hidden_states, + attention_mask=attention_mask, + context=context, + context_mask=context_mask, + rotary_pos_emb=rotary_pos_emb, + attention_bias=attention_bias, + inference_params=None, + packed_seq_params=packed_seq_params, + ) + return hidden_states, context + + return custom_forward + + def checkpoint_handler(forward_func): + """Determines whether to use the `te_checkpoint` or `tensor_parallel.checkpoint`""" + if self.config.fp8: + return te_checkpoint( + forward_func, + self.config.distribute_saved_activations, + tensor_parallel.random.get_cuda_rng_tracker, + parallel_state.get_tensor_model_parallel_group(), + hidden_states, + attention_mask, + context, + context_mask, + rotary_pos_emb, + ) + else: + return tensor_parallel.checkpoint( + forward_func, + self.config.distribute_saved_activations, + hidden_states, + attention_mask, + context, + context_mask, + rotary_pos_emb, + ) + + if self.config.recompute_method == 'uniform': + # Uniformly divide the total number of Transformer layers and checkpoint + # the input activation of each divided chunk. + # A method to further reduce memory usage reducing checkpoints. + layer_idx = 0 + while layer_idx < self.num_layers_per_pipeline_rank: + hidden_states, context = checkpoint_handler( + custom(layer_idx, layer_idx + self.config.recompute_num_layers) + ) + + layer_idx += self.config.recompute_num_layers + + elif self.config.recompute_method == 'block': + # Checkpoint the input activation of only a set number of individual + # Transformer layers and skip the rest. + # A method fully use the device memory removing redundant re-computation. + recompute_skip_num_layers = 0 + for layer_idx in range(self.num_layers_per_pipeline_rank): + # Skip recomputation when input grad computation is not needed. + # Need to have at least one input tensor with gradient computation + # for re-enterant autograd engine. + if self.config.fp8 and not hidden_states.requires_grad: + recompute_skip_num_layers += 1 + if ( + layer_idx >= recompute_skip_num_layers + and layer_idx < self.config.recompute_num_layers + recompute_skip_num_layers + ): + hidden_states, context = checkpoint_handler(custom(layer_idx, layer_idx + 1)) + else: + hidden_states, context = custom(layer_idx, layer_idx + 1)( + hidden_states, attention_mask, context, context_mask, rotary_pos_emb + ) + else: + raise ValueError("Invalid activation recompute method.") + + return hidden_states + + def set_input_tensor(self, input_tensor: Tensor): + """Set input tensor to be used instead of forward()'s input. + + When doing pipeline parallelism the input from the previous + stage comes from communication, not from the input, so the + model's forward_step_func won't have it. This function is thus + used by internal code to bypass the input provided by the + forward_step_func""" + self.input_tensor = input_tensor + + def get_cuda_graph_optional_args( + self, + attention_mask: Tensor, + context: Tensor, + context_mask: Tensor, + rotary_pos_emb: Tensor, + attention_bias: Tensor, + inference_params: InferenceParams, + packed_seq_params: PackedSeqParams, + ): + """Get optional tensor arguments for CUDA graph.""" + + optional_inputs = {} + optional_inputs['is_first_microbatch'] = self.current_microbatch == 0 + try: + import transformer_engine.pytorch as te # pylint: disable=unused-import + + if is_te_min_version("1.10.0", check_equality=False): + assert not any( + [attention_mask, context, context_mask, rotary_pos_emb] + ), "Keyword Arguments not supported with CUDA graph." + else: + optional_inputs['attention_mask'] = attention_mask + optional_inputs['context'] = context + optional_inputs['context_mask'] = context_mask + optional_inputs['rotary_pos_emb'] = rotary_pos_emb + except ImportError: + raise RuntimeError("CUDAGraph requires TransformerEngine, but not installed") + return optional_inputs + + def forward( + self, + hidden_states: Tensor, + attention_mask: Tensor, + context: Tensor = None, + context_mask: Tensor = None, + rotary_pos_emb: Tensor = None, + rotary_pos_cos: Tensor = None, + rotary_pos_sin: Tensor = None, + attention_bias: Tensor = None, + inference_params: InferenceParams = None, + packed_seq_params: PackedSeqParams = None, + sequence_len_offset: Tensor = None, + ): + """ + Perform the forward pass through the transformer block. + + This method handles the core computation of the transformer, including + self-attention, optional cross-attention, and feed-forward operations. + + Args: + hidden_states (Tensor): Input tensor of shape [s, b, h] where s is the + sequence length, b is the batch size, and h is the hidden size. + attention_mask (Tensor): Boolean tensor of shape [1, 1, s, s] for masking + self-attention. + context (Tensor, optional): Context tensor for cross-attention. + context_mask (Tensor, optional): Mask for cross-attention context + rotary_pos_emb (Tensor, optional): Rotary positional embeddings. + attention_bias (Tensor): Bias tensor for Q * K.T of shape in shape broadcastable + to [b, num_head, sq, skv], e.g. [1, 1, sq, skv]. + Used as an alternative to apply attention mask for TE cuDNN attention. + inference_params (InferenceParams, optional): Parameters for inference-time + optimizations. + packed_seq_params (PackedSeqParams, optional): Parameters for packed sequence + processing. + + Returns: + Union[Tensor, Tuple[Tensor, Tensor]]: The output hidden states tensor of shape + [s, b, h], and optionally the updated context tensor if cross-attention is used. + """ + + if not self.pre_process: + # See set_input_tensor() + hidden_states = self.input_tensor + + # Update the inference parameters with the current batch size in case it is variable + if inference_params and not self.training: + inference_params.current_batch_size = hidden_states.size(1) + + # Viewless tensor. + # - We only need to create a viewless tensor in the case of micro batch + # size (mbs) == 1, since in this case, 'hidden_states.transpose()' + # above creates a view tensor, and '.contiguous()' is a pass-through. + # For mbs >= 2, '.contiguous()' creates a new tensor, eliminating + # the need to make it viewless. + # + # However, we don't explicitly check mbs == 1 here because + # make_viewless_tensor() has negligible overhead when its input + # is already viewless. + # + # - For the 'else' case above, calling make_viewless_tensor() here is + # likely redundant, since p2p_communication.py (likely originator) + # already creates viewless tensors. That said, make_viewless_tensor() + # is called here to be future-proof and corner-case-proof. + hidden_states = make_viewless_tensor(inp=hidden_states, requires_grad=True, keep_graph=True) + + if self.config.sequence_parallel: + rng_context = tensor_parallel.get_cuda_rng_tracker().fork() + else: + rng_context = nullcontext() + + if self.config.fp8: + import transformer_engine # To keep out TE dependency when not training in fp8 + + if self.config.fp8 == "e4m3": + fp8_format = transformer_engine.common.recipe.Format.E4M3 + elif self.config.fp8 == "hybrid": + fp8_format = transformer_engine.common.recipe.Format.HYBRID + else: + raise ValueError("E4M3 and HYBRID are the only supported FP8 formats.") + + fp8_recipe = TEDelayedScaling( + config=self.config, + fp8_format=fp8_format, + override_linear_precision=(False, False, not self.config.fp8_wgrad), + ) + fp8_group = None + if parallel_state.model_parallel_is_initialized(): + fp8_group = parallel_state.get_amax_reduction_group( + with_context_parallel=True, tp_only_amax_red=self.tp_only_amax_red + ) + fp8_context = transformer_engine.pytorch.fp8_autocast( + enabled=True, fp8_recipe=fp8_recipe, fp8_group=fp8_group + ) + else: + fp8_context = nullcontext() + + with rng_context, fp8_context: + # Forward pass. + if self.config.recompute_granularity == 'full' and self.training: + hidden_states = self._checkpointed_forward( + hidden_states=hidden_states, + attention_mask=attention_mask, + context=context, + context_mask=context_mask, + rotary_pos_emb=rotary_pos_emb, + attention_bias=attention_bias, + packed_seq_params=packed_seq_params, + ) + else: + for l_no, layer in enumerate(self.layers): + with self.offload_context: + layer.use_cudagraph = True + if (len(self.cuda_graphs) == 0) or (not self.training): + hidden_states, context = layer( + hidden_states=hidden_states, + attention_mask=attention_mask, + context=context, + context_mask=context_mask, + rotary_pos_emb=rotary_pos_emb, + rotary_pos_cos=rotary_pos_cos, + rotary_pos_sin=rotary_pos_sin, + attention_bias=attention_bias, + inference_params=inference_params, + packed_seq_params=packed_seq_params, + sequence_len_offset=sequence_len_offset, + ) + else: + # CUDA graph replay for layer `l_no` and microbatch + # `self.current_microbatch`. TransformerEngine versions>=1.10 + # allow keyword arguments with CUDA graph. However, CUDA graph + # acccepts only Tensor inputs and Tensor outputs. Hence, + # `inference_params` and `packed_seq_params` are excluded from + # input list while output is limited to `hidden_states`. + cg_index = self.current_microbatch % len(self.cuda_graphs[l_no]) + assert not any( + [inference_params, packed_seq_params] + ), "CUDA graph accepts only Tensor inputs." + optional_inputs = self.get_cuda_graph_optional_args( + attention_mask, + context, + context_mask, + rotary_pos_emb, + attention_bias, + inference_params, + packed_seq_params, + ) + hidden_states = self.cuda_graphs[l_no][cg_index]( + hidden_states, **optional_inputs + ) + + if ( + torch.is_grad_enabled() + and self.config.cpu_offloading + and self.group_prefetch_offload_commit_async is not None + ): + hidden_states = self.group_prefetch_offload_commit_async(hidden_states) + + # Final layer norm. + if self.final_layernorm is not None: + hidden_states = self.final_layernorm(hidden_states) + # TENorm produces a "viewed" tensor. This will result in schedule.py's + # deallocate_output_tensor() throwing an error, so a viewless tensor is + # created to prevent this. + hidden_states = make_viewless_tensor( + inp=hidden_states, requires_grad=True, keep_graph=True + ) + + return hidden_states + + def sharded_state_dict( + self, prefix: str = '', sharded_offsets: tuple = (), metadata: dict = None + ) -> ShardedStateDict: + """ + Generate a sharded state dictionary for the transformer block. + + Args: + prefix (str, optional): Prefix to be added to all keys in the state dict. + Defaults to an empty string. + sharded_offsets (tuple, optional): Tuple of sharding offsets. + metadata (dict, optional): Additional metadata for sharding. + Can specify if layers are non-homogeneous. Defaults to None. + + Returns: + ShardedStateDict: A dictionary containing the sharded state of the model. + """ + assert not sharded_offsets, "Unexpected sharded offsets" + non_homogeneous_layers = metadata is not None and metadata.get( + 'non_homogeneous_layers', False + ) + if isinstance(self.config.moe_layer_freq, int): + if self.config.moe_layer_freq > 1: + non_homogeneous_layers = True + elif isinstance(self.config.moe_layer_freq, list): + non_homogeneous_layers = True + + sharded_state_dict = {} + + layer_prefix = f'{prefix}layers.' + num_layers = self.config.num_layers + for layer in self.layers: + offset = TransformerLayer._get_layer_offset(self.config) + + global_layer_offset = layer.layer_number - 1 # self.layer_number starts at 1 + state_dict_prefix = f'{layer_prefix}{global_layer_offset - offset}.' # module list index in TransformerBlock # pylint: disable=line-too-long + if non_homogeneous_layers: + sharded_prefix = f'{layer_prefix}{global_layer_offset}.' + sharded_pp_offset = [] + else: + sharded_prefix = layer_prefix + sharded_pp_offset = [ + (0, global_layer_offset, num_layers) + ] # PP sharding offset for ShardedTensors + layer_sharded_state_dict = layer.sharded_state_dict( + state_dict_prefix, sharded_pp_offset, metadata + ) + replace_prefix_for_sharding(layer_sharded_state_dict, state_dict_prefix, sharded_prefix) + + sharded_state_dict.update(layer_sharded_state_dict) + + # Add modules other than self.layers + for name, module in self.named_children(): + if not module is self.layers: + sharded_state_dict.update( + sharded_state_dict_default( + module, f'{prefix}{name}.', sharded_offsets, metadata + ) + ) + + return sharded_state_dict diff --git a/megatron/core/transformer/transformer_config.py b/megatron/core/transformer/transformer_config.py index adc97a5..683e51d 100644 --- a/megatron/core/transformer/transformer_config.py +++ b/megatron/core/transformer/transformer_config.py @@ -1,672 +1,932 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from dataclasses import dataclass -from typing import Callable, List, Optional, Tuple, Union - -import torch.nn.functional as F - -from megatron.core.transformer.enums import AttnBackend - -from ..model_parallel_config import ModelParallelConfig -from ..utils import get_te_version, init_method_normal, is_te_min_version, scaled_init_method_normal - - -@dataclass -class TransformerConfig(ModelParallelConfig): - """Configuration object for megatron-core transformers. - - The initialization function has an argument for each parameter, - including those in ModelParallelConfig. - """ - - #################### - # model architecture - #################### - num_layers: int = 0 - """Number of transformer layers in a transformer block.""" - - first_pipeline_num_layers: int = None - """Number of transformer layers on first pipeline stage. - None implies equal layer division across PP ranks.""" - - last_pipeline_num_layers: int = None - """Number of transformer layers on last pipeline stage. - None implies equal layer division across PP ranks.""" - - hidden_size: int = 0 - """Transformer hidden size.""" - - num_attention_heads: int = 0 - """Number of transformer attention heads.""" - - attention_backend: AttnBackend = AttnBackend.auto - """Attention backend to run. By default we let transformer engine - decide the best backend to run (except in the case of local). - If attention backend is local we use the local pytorch implementation in mcore. - Users can specify exact backend by changing this config. """ - - num_query_groups: int = None - """Number of query groups for group query attention. If None, normal attention is used.""" - - ffn_hidden_size: int = None - """Transformer Feed-Forward Network hidden size. This is set to 4*hidden_size - if not provided.""" - - kv_channels: int = None - """Projection weights dimension in multi-head attention. This is set to hidden_size // - num_attention_heads if not provided.""" - - hidden_dropout: float = 0.1 - """Dropout probability for transformer hidden state.""" - - attention_dropout: float = 0.1 - """Post attention dropout probability.""" - - fp32_residual_connection: bool = False - """If true, move residual connections to fp32.""" - - # @jcasper should we keep this option? - apply_residual_connection_post_layernorm: bool = False - """If True, uses the original BERT residule connection ordering.""" - - layernorm_epsilon: float = 1e-5 - """Epsilon value for any LayerNorm operations.""" - - layernorm_zero_centered_gamma: bool = False - """If set to True, the LayerNorm is adjusted to center the gamma values around 0. This improves - numerical stability.""" - - add_bias_linear: bool = True - """Include a bias term in all linear layers (QKV projections, after core attention, and two in - MLP layer).""" - - add_qkv_bias: bool = False - """Add a bias term only for QKV projections.""" - - gated_linear_unit: bool = False - """Use a gated linear unit for the first linear layer in the MLP.""" - - activation_func: Callable = F.gelu - """Activation function to use for the non-linearity in the MLP.""" - - activation_func_fp8_input_store: bool = False - """Store the input of MLP activation function in FP8 for backprop to save memory. - The stored input is casted back to the original precision before backprop compuatation.""" - - num_moe_experts: int = None - """Number of experts to use for MoE layer. When set, it replaces MLP with MoE layer. Set to None - for no MoE.""" - - rotary_interleaved: bool = False - """True is rotate pairs of even and odd dimensions (RoFormer style), False is rotate pairs of - first half and second half (LLaMa style). Default to False.""" - - window_size: Optional[Tuple[int, int]] = None - """If not None, then will use sliding window attention. The size of the window is specified by - the numbers inside the tuple; -1 is special value meaning "infinite window size".""" - - normalization: bool = "LayerNorm" - """Which norm to use for normalization layers, valid options are `LayerNorm` and `RMSNorm`.""" - - qk_layernorm: bool = False - """Whether to apply LayerNorm to the query and key embeddings.""" - - test_mode: bool = False - """Whether to run real-time tests.""" - - calculate_per_token_loss: bool = False - """Whether cross entropy loss is calculated over the actual number of non-padded tokens in the - global batch, versus the default behavior of assuming all tokens are non-padded.""" - - multi_latent_attention: bool = False - """Whether to use multi-latent attention.""" - - #################### - # initialization - #################### - init_method: Callable = None - """Method to initialize weights. Note that bias is always set to zero. Should be a function that - takes a single Tensor and initializes it. If None, will be set to - megatron.core.utils.init_method_normal(init_method_std) which is torch nn init normal with - mean=0.0 and std=init_method_std.""" - - output_layer_init_method: Callable = None - """Method to initialize weights of the output layer of both attention and MLP blocks. If None, - will be set to megatron.core.utils.scaled_init_method_normal(init_method_std) which is torch nn - init normal with mean=0.0 and std=init_method_std / math.sqrt(2.0 * num_layers).""" - - init_method_std: float = 0.02 - """Standard deviation of the zero mean normal for the default initialization method, not used if - init_method and output_layer_init_method are provided.""" - - #################### - # mixed-precision - #################### - apply_query_key_layer_scaling: bool = False - """If true, scale Q * K^T by 1 / layer-number. This improve numeric stability when training with - fp16.""" - - attention_softmax_in_fp32: bool = True - """If True, run attention masking and softmax in fp32. This should be True if - apply_query_key_layer_scaling is True.""" - - #################### - # fusion - #################### - bias_activation_fusion: bool = False - """If True, fuses bias addition and the activation function when possible.""" - - masked_softmax_fusion: bool = False - """If True, uses softmax fusion.""" - - persist_layer_norm: bool = False - """If True, uses the persistent fused layer norm kernel. This kernel only supports a fixed set - of hidden sizes.""" - - memory_efficient_layer_norm: bool = False - """If True, and using local layers (not from TransformerEngine), tells Apex to use the memory - efficient fused LayerNorm kernel. Ignored if not using LayerNorm.""" - - bias_dropout_fusion: bool = False # TODO: this should be bias_dropout_add_fusion? - """If True, uses bias dropout fusion.""" - - apply_rope_fusion: bool = False - """If True, use fused RoPE kernel.""" - - #################### - # activation recomputation - #################### - recompute_granularity: str = None - """Determines which type of activation recompute to use. Megatron-core supports 'selective' - activation checkpointing where only the memory intensive part of attention is checkpointed. - These memory intensive activations are also less compute intensive which makes activation - checkpointing more efficient for LLMs (20B+). See Reducing Activation Recomputation in Large - Transformer Models (https://arxiv.org/abs/2205.05198) for more details. 'full' will checkpoint - the entire transformer layer. If None, no recompute is performed and all activations are saved. - If set, must be 'selective' or 'full'. 'selective' always uses all layers. - """ - - recompute_method: str = None - """Determines which transformer layers will be recomputed. uniform will uniformly divide the - total number of transformer layers in a transformer block and recompute the input activation of - each divided chunk at the specified granularity. block will recompute the input activations for - only a set number of transformer layers per pipeline stage. The rest of the layers in the - pipeline stage will not have any activations recomputed. If None, and recompute is enabled, all - layers will do recomputation. If set, must be 'uniform' or 'block'.""" - - recompute_num_layers: int = None - """When recompute_method is uniform, recompute_num_layers is the number of transformer layers in - each uniformly divided recompute unit. When recompute_method is block, recompute_num_layers is - the number of transformer layers to recompute within each pipeline stage. Must be None for - 'selective' activation checkpointing.""" - - distribute_saved_activations: bool = None - """If True, distribute recomputed activations across the model parallel group.""" - - #################### - # fp8 related - #################### - fp8: str = None - """If set, enables the use of FP8 precision through Transformer Engine. There are 2 predefined - choices (1) 'e4m3' uniformly uses e4m3 for all FP8 tensors, (2) 'hybrid' uses e4m3 for all FP8 - activation and weight tensors and e5m2 for all FP8 output activation gradient tensors.""" - - fp8_margin: int = 0 - """Margin for the scaling factor computation.""" - - fp8_interval: int = 1 - """DEPRECATED from TransformerEngine v1.8.0. This flag is ignored. - Controls how often the scaling factor is recomputed. - """ - - fp8_amax_history_len: int = 1 - """The length of the amax history window used for scaling factor computation.""" - - fp8_amax_compute_algo: str = "most_recent" - """Algorithm used for choosing the `amax` value for the scaling factor computation. There are 2 - predefined choices: `max` chooses the largest `amax` in the history window, while `most_recent` - always chooses the most recently seen value. - - """ - - fp8_wgrad: bool = True - """When set to False, override FP8 config options and do the wgrad computation - in higher precision.""" - - fp8_dot_product_attention: bool = False - """When set to True, use the FP8 implementation of Dot Product Attention.""" - - fp8_multi_head_attention: bool = False - """When set to True, use the FP8 implementation of Multi Head Attention.""" - - tp_only_amax_red: bool = False - """When set to True, reduce the FP8 AMAX only in the TP or TP-CP domain""" - - #################### - # MoE related - #################### - moe_shared_expert_intermediate_size: int = None - """Shared expert total ffn hidden size. - It should be equal to 'num_shared_experts * ffn_size_of_each_shared_expert' if - there are multiple shared experts. - None means no shared expert.""" - - moe_shared_expert_overlap: bool = False - """Enable overlapping between shared expert computations and dispatcher communications. - Without this, the shared epxerts execute after the routed experts.""" - - moe_layer_freq: int = 1 - """Frequency between MoE layers and Dense layers. Accepts either: - - An integer N: Represents a 1:N ratio, meaning one expert layer for every N-1 dense layers. - - A string containing a Python list expression that defines a custom pattern, e.g.: - "([1]*3+[0]*1)*3" evaluates to [1,1,1,0,1,1,1,0,1,1,1,0] - where 1 indicates an expert layer and 0 indicates a dense layer.""" - - moe_ffn_hidden_size: int = None - """MoE Feed-Forward Network hidden size""" - - moe_router_load_balancing_type: str = "aux_loss" - """The load balancing strategy for the router. "aux_loss" corresponds to the load balancing loss - used in GShard and SwitchTransformer; "seq_aux_loss" corresponds to the loss used in DeepSeekV2, - which computes the loss for each individual sample; "sinkhorn" corresponds to the balancing - algorithm used in S-BASE, and "none" implies no load balancing. The default is "aux_loss".""" - - moe_router_topk: int = 2 - """Number of experts to route to for each token.""" - - moe_router_topk_limited_devices: int = None - """Number of expert parallel ranks to consider for each token during routing. Perform top-k - routing on a subset of expert parallel ranks by first selecting N ranks for each token, then - conducting top-k selection among experts on these devices. None means no device limitation.""" - - moe_router_pre_softmax: bool = False - """Enable pre-softmax routing for MoE, which means softmax is before the top-k selection. - By default, softmax is done after top-k.""" - - moe_router_topk_scaling_factor: float = None - """Scaling factor for routing score in top-k selection, only works when moe_router_pre_softmax - enabled. Defaults to None, which means no scaling.""" - - moe_grouped_gemm: bool = False - """When there are multiple experts per rank, compress multiple local (potentially small) gemms - in a single kernel launch to improve the utilization and performance by leveraging the Grouped - GEMM feature introduced since CUTLASS 2.8 (https://github.com/fanshiqing/grouped_gemm). - """ - - moe_use_legacy_grouped_gemm: bool = False - """Use legacy GroupedMLP rather than TEGroupedMLP. - Note: The legacy one will be deprecated soon.""" - - moe_aux_loss_coeff: float = 0 # 1e-2 would be a good start value for load balance loss. - """Scaling coefficient for the aux loss. A starting value of 1e-2 is recommended.""" - - moe_z_loss_coeff: float = None # 1e-3 would be a good start value for z-loss - """Scaling coefficient for the z-loss. A starting value of 1e-3 is recommended.""" - - moe_input_jitter_eps: float = None - """Add noise to the input tensor by applying jitter with a specified epsilon value.""" - - moe_token_dropping: bool = False # TODO: Support token dropping. - """This feature involves selectively dropping and padding tokens for each expert to achieve a - specified capacity, similar to GShard, Switch-Transformer, and DeepSpeed-MoE. Note that this is - currently unsupported so should remain False.""" - - moe_token_dispatcher_type: str = "allgather" - """The type of token dispatcher to use. The default is 'allgather'. - Options are 'allgather' and 'alltoall'.""" - - moe_per_layer_logging: bool = False - """Enable per-layer logging for MoE, currently supports auxiliary loss and z loss.""" - - moe_expert_capacity_factor: float = None - """moe_expert_capacity_factor (float): The capacity factor for each expert, None means no token - will be dropped. The default is None.""" - - moe_pad_expert_input_to_capacity: bool = False - """moe_pad_expert_input_to_capacity (bool): If True, pads the input for each expert to match - the expert capacity length, effective only after the moe_expert_capacity_factor is set. The - default setting is False.""" - - moe_token_drop_policy: str = 'probs' - """The policy to drop tokens. Can be either "probs" or "position". If "probs", the tokens with - the lowest probabilities will be dropped. If "position", tokens at the end of each batch will - be dropped. - """ - - moe_layer_recompute: bool = False - """Memory optimization: checkpointing moe_layer to save actiavtion memory.""" - - ################## - # Context Parallel - ################## - cp_comm_type: Union[str, List[str]] = None - """Inter-gpu communication type for context parallelism. - str: all layers share same communication type. - List[str]: each layer has its separate communication type. - cp_comm_type of each layer can be "p2p" or "all_gather" or "a2a" or "a2a+p2p". - "p2p": Exchange KV chunks with P2P communications in ring topology. P2P is async and can be - overlapped with attention compute. - "all_gather": All-gather to get full sequence of KV before attention. The all-gather is not - async, and cannot be overlapped. - "a2a": Like DeepSpeed Ulysses, scatter attention heads across the CP group, and gather to get - full sequence of QKV. - "a2a+p2p": A hierarchical implementation of context parallelism to attention. - It uses A2A communications in low-level CP groups (e.g., via NVLink), - and P2P communications in high-level CP groups (e.g., via IBLink). - """ - - #################### - # miscellaneous - #################### - clone_scatter_output_in_embedding: bool = True - """When set to True, clone the output of scatter_to_sequence_parallel_region in embedding layer - to facilitate garbage collection of input.""" - - disable_parameter_transpose_cache: bool = False - """When set to true, the parameter transposes are not cached for subsequent iterations.""" - - enable_cuda_graph: bool = False - """When set to true, TransformerLayer layers are swapped with a CUDA graphed version.""" - - cuda_graph_retain_backward_graph: bool = False - """When set to true, cudagraph backward passes will be graph captured with 'retain_grad=True' - This may enable cudagraphs for certain modules that are not completely cudagraph safe. For - more details, see: https://pytorch.org/docs/stable/generated/torch.Tensor.backward.html.""" - - external_cuda_graph: bool = False - """When set to true, TransformerLayer layers are swapped with user provided CUDA graphs.""" - - config_logger_dir: str = "" - """When non-empty, dumps entry-point configs to config_logger_dir""" - - flash_decode: bool = False - """ Use the optimized flash decoding kernel during inference. """ - - def __post_init__(self): - """Python dataclass method that is used to modify attributes after initialization. - See https://docs.python.org/3/library/dataclasses.html#post-init-processing for more - details. - """ - super().__post_init__() - if self.fp16 and self.bf16: - raise ValueError( - f'Only one of self.fp16: {self.fp16} and self.bf16 {self.bf16} should be True.' - ) - - if self.num_attention_heads % self.tensor_model_parallel_size != 0: - raise ValueError( - f"num_attention_heads ({self.num_attention_heads}) must be a multiple of " - f"tensor_model_parallel_size ({self.tensor_model_parallel_size})." - ) - - if self.ffn_hidden_size is None: - self.ffn_hidden_size = 4 * self.hidden_size - - if self.kv_channels is None: - self.kv_channels = self.hidden_size // self.num_attention_heads - - if self.num_query_groups is None: - self.num_query_groups = self.num_attention_heads - - if self.num_query_groups % self.tensor_model_parallel_size != 0: - raise ValueError( - f"num_query_groups ({self.num_query_groups}) must be a multiple of " - f"tensor_model_parallel_size ({self.tensor_model_parallel_size})." - ) - - if self.apply_query_key_layer_scaling: - self.attention_softmax_in_fp32 = True - - if self.expert_model_parallel_size > 1 and self.num_moe_experts is None: - raise ValueError('num_moe_experts must be non None to use expert-parallel.') - - if self.num_moe_experts is not None and self.num_moe_experts <= 0: - raise ValueError('num_moe_experts must be non-negative.') - - if self.moe_ffn_hidden_size is None: - self.moe_ffn_hidden_size = self.ffn_hidden_size - - if self.moe_shared_expert_intermediate_size is not None: - if self.moe_shared_expert_intermediate_size <= 0: - raise ValueError( - f'moe_shared_expert_intermediate_size must be ' - f'num_shared_experts * ffn_size_of_each_shared_expert, ' - f'but got {self.moe_shared_expert_intermediate_size}' - ) - if self.moe_shared_expert_overlap and self.moe_token_dispatcher_type not in [ - "alltoall" - ]: - raise ValueError( - f'moe_shared_expert_overlap only works with alltoall token dispatcher.' - ) - - if self.moe_expert_capacity_factor is not None: - if self.moe_token_dispatcher_type not in ["alltoall", "alltoall_seq"]: - raise ValueError( - 'moe_expert_capacity_factor only works with alltoall token dispatcher' - ) - if self.moe_expert_capacity_factor < 0: - self.moe_expert_capacity_factor = None - if self.moe_router_load_balancing_type not in ["aux_loss", "seq_aux_loss", "none"]: - raise ValueError( - 'moe_expert_capacity_factor only works with aux_loss or none load balancing' - ) - - if self.moe_pad_expert_input_to_capacity: - if self.moe_expert_capacity_factor is None: - raise ValueError( - 'moe_expert_capacity_factor must be set to use moe_pad_expert_input_to_capacity' - ) - - if self.cpu_offloading and ( - self.cpu_offloading_num_layers < 0 or self.cpu_offloading_num_layers >= self.num_layers - ): - raise ValueError( - f'CPU offloading can be done only for layers less than {self.num_layers}' - ) - - if self.cpu_offloading and self.pipeline_model_parallel_size > 1: - raise ValueError( - 'Currently there is no support for Pipeline parallelism with CPU offloading' - ) - - if self.cpu_offloading and self.recompute_granularity is not None: - raise ValueError( - 'CPU offloading does not work when activation recomputation is enabled' - ) - - if self.recompute_granularity is not None: - if self.recompute_granularity not in ['full', 'selective']: - raise ValueError( - f'When using recompute_granuarlity: {self.recompute_granularity} must be "full"' - 'or "selective".' - ) - - if self.recompute_method is not None: - if self.recompute_method not in ['block', 'uniform']: - raise ValueError( - f'recompute_method: {self.recompute_method} must be "block" or "uniform".' - ) - elif self.recompute_granularity != 'selective': - raise ValueError( - f'Using recompute_granularity: {self.recompute_granularity} so ' - 'recompute_method must be "block" or "uniform"' - ) - - if self.recompute_granularity != 'selective' and self.recompute_num_layers is None: - raise ValueError( - f'When using recompute_granularity: {self.recompute_granularity} ' - 'recompute_num_layers must be between ' - '1 and num_layers_per_pipeline_rank: ' - f'{self.num_layers // self.pipeline_model_parallel_size}' - ) - elif ( - self.recompute_granularity == 'selective' and self.recompute_num_layers is not None - ): - raise ValueError( - f'When using recompute_granularity: {self.recompute_granularity} ' - 'recompute_num_layers must be None.' - ) - - if self.distribute_saved_activations and self.sequence_parallel: - raise ValueError( - f'distribute_saved_activations: {self.distribute_saved_activations} must be ' - f'false when sequence parallel is enabled: {self.sequence_parallel}' - ) - - if self.virtual_pipeline_model_parallel_size is not None: - if not self.num_layers % self.virtual_pipeline_model_parallel_size == 0: - raise ValueError( - f'num_layers: {self.num_layers} must be divisible by ' - f'virtual_model_parallel_size {self.virtual_pipeline_model_parallel_size}' - ) - - if self.apply_query_key_layer_scaling: - self.attention_softmax_in_fp32 = True - - if self.bias_activation_fusion: - if self.activation_func not in [F.gelu, F.silu]: - raise ValueError( - "When bias_activation_fusion is True, activation function should be either " - "gelu or swiglu" - ) - if ( - self.activation_func == F.gelu - and not self.gated_linear_unit - and not self.add_bias_linear - ): - raise ValueError( - "When bias_activation_fusion is True, gated_linear_unit is False, " - "and activation function is gelu, add_bias_linear must also be True." - ) - - if self.activation_func_fp8_input_store: - if self.activation_func != F.silu or not self.gated_linear_unit: - raise ValueError("Storing activation input in FP8 is supported only for SwiGLU.") - - if self.apply_rope_fusion: - if self.rotary_interleaved: - raise ValueError("rotary_interleaved does not work with apply_rope_fusion.") - - from megatron.core.models.common.embeddings.rope_utils import ( - fused_apply_rotary_pos_emb, - fused_apply_rotary_pos_emb_thd, - ) - - if fused_apply_rotary_pos_emb is None and fused_apply_rotary_pos_emb_thd is None: - raise ValueError( - "apply_rope_fusion is not available. Please install TE >= 1.4 or Apex." - ) - - if self.multi_latent_attention and self.rotary_interleaved: - raise ValueError("rotary_interleaved does not work with multi_latent_attention.") - - if self.init_method is None: - self.init_method = init_method_normal(self.init_method_std) - - if self.output_layer_init_method is None: - self.output_layer_init_method = scaled_init_method_normal( - self.init_method_std, self.num_layers - ) - - if ( - self.moe_token_dispatcher_type == "alltoall_seq" - and self.tensor_model_parallel_size != self.expert_tensor_parallel_size - ): - raise ValueError( - "alltoall_seq dispatcher not support different TP size for MoE and Dense layer." - ) - - if self.num_moe_experts and self.fp8: - # TE version below 1.7.0 will raise Error when handle zeros tokens for expert - if not is_te_min_version("1.7.0.dev0"): - raise ValueError( - "Only transformer-engine>=1.7.0 supports MoE FP8 training, " - f"but your version is {get_te_version()}." - ) - - if self.moe_grouped_gemm and not is_te_min_version("1.11.0"): - raise ValueError( - "Only transformer-engine>=1.11.0 supports FP8 grouped gemm, " - f"but your version is {get_te_version()}." - ) - - if self.moe_router_topk_limited_devices: - if self.moe_router_topk_limited_devices > self.expert_model_parallel_size: - raise ValueError( - f"moe_router_topk_limited_devices: {self.moe_router_topk_limited_devices} " - f"must be smaller than expert_model_parallel_size " - f"{self.expert_model_parallel_size}" - ) - - if self.flash_decode and self.fp8: - raise ValueError("FP8 inference is currently not support with flash decoding.") - - if self.moe_token_dispatcher_type in ['allgather', 'alltoall_seq']: - if self.variable_seq_lengths is True: - raise ValueError( - f"Token dispatcher type: {self.moe_token_dispatcher_type} does not support " - f"variable sequence length, please use alltoall dispatcher instead." - ) - - if self.cp_comm_type is not None: - if isinstance(self.cp_comm_type, list): - assert len(self.cp_comm_type) == self.num_layers, ( - f"Length of cp_comm_type ({len(self.cp_comm_type)}) should equal to " - f"the total number of transformer layers ({self.num_layers})!" - ) - else: - assert isinstance( - self.cp_comm_type, str - ), "Unsupported communication type for context parallelism!" - - -@dataclass -class MLATransformerConfig(TransformerConfig): - """Configuration object for megatron-core Multi-Latent Attention (MLA) transformers. - - The initialization function has an argument for each parameter, including those in - ModelParallelConfig. Included YaRN RoPE parameters that is fused in MLA. - """ - - multi_latent_attention: bool = True - """Whether to use Multi-Latent Attention.""" - - q_lora_rank: int = 512 - """Rank of Query tensor's low rank representation.""" - - kv_lora_rank: int = 512 - """Rank of Key and Value tensors' low rank representation.""" - - qk_head_dim: int = 128 - """Dimension of the head in the QK projection. q_head_dim = qk_head_dim + qk_pos_emb_head_dim""" - - qk_pos_emb_head_dim: int = 64 - """Dimension of the position embedding in the QK projection.""" - - v_head_dim: int = 128 - """Dimension of the head in the V projection.""" - - rotary_base: float = 10000 - """Rotary base for the rotary embeddings.""" - - rotary_scaling_factor: float = 40 - """Rotary scaling factor for the rotary embeddings.""" - - normalization: str = "RMSNorm" - """Default normalization layer for MLA models is RMSNorm.""" - - max_position_embeddings: int = 163840 - """Maximum position embeddings for the original model.""" - - beta_fast: float = 32 - """Beta fast for YaRN RoPE.""" - - beta_slow: float = 1 - """Beta slow for YaRN RoPE.""" - - mscale: float = 0.707 - """Mscale for YaRN RoPE in Multi-Latent Attention.""" - - mscale_all_dim: float = 0.707 - """Mscale all dimensions for YaRN RoPE in Multi-Latent Attention.""" +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import warnings +from dataclasses import dataclass +from typing import Callable, List, Optional, Tuple, Union + +import torch.nn.functional as F + +from megatron.core.transformer.enums import AttnBackend + +from ..model_parallel_config import ModelParallelConfig +from ..utils import get_te_version, init_method_normal, is_te_min_version, scaled_init_method_normal + + +@dataclass +class TransformerConfig(ModelParallelConfig): + """Configuration object for megatron-core transformers. + + The initialization function has an argument for each parameter, + including those in ModelParallelConfig. + """ + + #################### + # model architecture + #################### + num_layers: int = 0 + """Number of transformer layers in a transformer block.""" + + num_layers_in_first_pipeline_stage: Optional[int] = None + """Number of transformer layers on first pipeline stage. + None implies equal layer division across PP ranks.""" + + num_layers_in_last_pipeline_stage: Optional[int] = None + """Number of transformer layers on last pipeline stage. + None implies equal layer division across PP ranks.""" + + account_for_embedding_in_pipeline_split: bool = False + """If set, the embedding layer will be treated as a standard transformer + layer in the context of partition and placement for pipeline parallelism.""" + + account_for_loss_in_pipeline_split: bool = False + """If set, the loss layer will be treated as a standard transformer + layer in the context of partition and placement for pipeline parallelism.""" + + hidden_size: int = 0 + """Transformer hidden size.""" + + num_attention_heads: int = 0 + """Number of transformer attention heads.""" + + attention_backend: AttnBackend = AttnBackend.auto + """Attention backend to run. By default we let transformer engine + decide the best backend to run (except in the case of local). + If attention backend is local we use the local pytorch implementation in mcore. + Users can specify exact backend by changing this config. """ + + softmax_scale: Optional[float] = None + """Softmax scale for attention scaling.""" + + num_query_groups: Optional[int] = None + """Number of query groups for group query attention. If None, normal attention is used.""" + + ffn_hidden_size: Optional[int] = None + """Transformer Feed-Forward Network hidden size. This is set to 4*hidden_size + if not provided.""" + + kv_channels: Optional[int] = None + """Projection weights dimension in multi-head attention. This is set to hidden_size // + num_attention_heads if not provided.""" + + hidden_dropout: float = 0.1 + """Dropout probability for transformer hidden state.""" + + attention_dropout: float = 0.1 + """Post attention dropout probability.""" + + fp32_residual_connection: bool = False + """If true, move residual connections to fp32.""" + + # @jcasper should we keep this option? + apply_residual_connection_post_layernorm: bool = False + """If True, uses the original BERT residule connection ordering.""" + + layernorm_epsilon: float = 1e-5 + """Epsilon value for any LayerNorm operations.""" + + layernorm_zero_centered_gamma: bool = False + """If set to True, the LayerNorm is adjusted to center the gamma values around 0. This improves + numerical stability.""" + + add_bias_linear: bool = True + """Include a bias term in all linear layers (QKV projections, after core attention, and two in + MLP layer).""" + + add_qkv_bias: bool = False + """Add a bias term only for QKV projections.""" + + gated_linear_unit: bool = False + """Use a gated linear unit for the first linear layer in the MLP.""" + + activation_func: Callable = F.gelu + """Activation function to use for the non-linearity in the MLP.""" + + activation_func_fp8_input_store: bool = False + """Store the input of MLP activation function in FP8 for backprop to save memory. + The stored input is casted back to the original precision before backprop compuatation.""" + + num_moe_experts: Optional[int] = None + """Number of experts to use for MoE layer. When set, it replaces MLP with MoE layer. Set to None + for no MoE.""" + + rotary_interleaved: bool = False + """True is rotate pairs of even and odd dimensions (RoFormer style), False is rotate pairs of + first half and second half (LLaMa style). Default to False.""" + + window_size: Optional[Tuple[int, int]] = None + """If not None, then will use sliding window attention. The size of the window is specified by + the numbers inside the tuple; -1 is special value meaning "infinite window size".""" + + normalization: str = "LayerNorm" + """Which norm to use for normalization layers, valid options are `LayerNorm` and `RMSNorm`.""" + + qk_layernorm: bool = False + """Whether to apply LayerNorm to the query and key embeddings.""" + + test_mode: bool = False + """Whether to run real-time tests.""" + + calculate_per_token_loss: bool = False + """Whether cross entropy loss is calculated over the actual number of non-padded tokens in the + global batch, versus the default behavior of assuming all tokens are non-padded.""" + + multi_latent_attention: bool = False + """Whether to use multi-latent attention.""" + + #################### + # initialization + #################### + init_method: Optional[Callable] = None + """Method to initialize weights. Note that bias is always set to zero. Should be a function that + takes a single Tensor and initializes it. If None, will be set to + megatron.core.utils.init_method_normal(init_method_std) which is torch nn init normal with + mean=0.0 and std=init_method_std.""" + + output_layer_init_method: Optional[Callable] = None + """Method to initialize weights of the output layer of both attention and MLP blocks. If None, + will be set to megatron.core.utils.scaled_init_method_normal(init_method_std) which is torch nn + init normal with mean=0.0 and std=init_method_std / math.sqrt(2.0 * num_layers).""" + + init_method_std: float = 0.02 + """Standard deviation of the zero mean normal for the default initialization method, not used if + init_method and output_layer_init_method are provided.""" + + init_model_with_meta_device: bool = False + """ + If True, initializes the model with the meta device. This is helpful for + training of very large models. This feature is only works when custom fsdp is turned on. + """ + + #################### + # mixed-precision + #################### + apply_query_key_layer_scaling: bool = False + """If true, scale Q * K^T by 1 / layer-number. This improve numeric stability when training with + fp16.""" + + attention_softmax_in_fp32: bool = True + """If True, run attention masking and softmax in fp32. This should be True if + apply_query_key_layer_scaling is True.""" + + #################### + # fusion + #################### + bias_activation_fusion: bool = False + """If True, fuses bias addition and the activation function when possible.""" + + masked_softmax_fusion: bool = False + """If True, uses softmax fusion.""" + + persist_layer_norm: bool = False + """If True, uses the persistent fused layer norm kernel. This kernel only supports a fixed set + of hidden sizes.""" + + memory_efficient_layer_norm: bool = False + """If True, and using local layers (not from TransformerEngine), tells Apex to use the memory + efficient fused LayerNorm kernel. Ignored if not using LayerNorm.""" + + bias_dropout_fusion: bool = False # TODO: this should be bias_dropout_add_fusion? + """If True, uses bias dropout fusion.""" + + apply_rope_fusion: bool = False + """If True, use fused RoPE kernel.""" + + #################### + # activation recomputation + #################### + recompute_granularity: Optional[str] = None + """Determines which type of activation recompute to use. Megatron-core supports 'selective' + activation checkpointing where only the memory intensive part of attention is checkpointed. + These memory intensive activations are also less compute intensive which makes activation + checkpointing more efficient for LLMs (20B+). See Reducing Activation Recomputation in Large + Transformer Models (https://arxiv.org/abs/2205.05198) for more details. 'full' will checkpoint + the entire transformer layer. If None, no recompute is performed and all activations are saved. + If set, must be 'selective' or 'full'. 'selective' always uses all layers. + """ + + recompute_method: Optional[str] = None + """Determines which transformer layers will be recomputed. uniform will uniformly divide the + total number of transformer layers in a transformer block and recompute the input activation of + each divided chunk at the specified granularity. block will recompute the input activations for + only a set number of transformer layers per pipeline stage. The rest of the layers in the + pipeline stage will not have any activations recomputed. If None, and recompute is enabled, all + layers will do recomputation. If set, must be 'uniform' or 'block'.""" + + recompute_num_layers: Optional[int] = None + """When recompute_method is uniform, recompute_num_layers is the number of transformer layers in + each uniformly divided recompute unit. When recompute_method is block, recompute_num_layers is + the number of transformer layers to recompute within each pipeline stage. Must be None for + 'selective' activation checkpointing.""" + + distribute_saved_activations: Optional[bool] = None + """If True, distribute recomputed activations across the model parallel group.""" + + #################### + # fp8 related + #################### + fp8: Optional[str] = None + """If set, enables the use of FP8 precision through Transformer Engine. There are 2 predefined + choices (1) 'e4m3' uniformly uses e4m3 for all FP8 tensors, (2) 'hybrid' uses e4m3 for all FP8 + activation and weight tensors and e5m2 for all FP8 output activation gradient tensors.""" + + fp8_margin: int = 0 + """Margin for the scaling factor computation.""" + + fp8_interval: int = 1 + """DEPRECATED from TransformerEngine v1.8.0. This flag is ignored. + Controls how often the scaling factor is recomputed. + """ + + fp8_amax_history_len: int = 1 + """The length of the amax history window used for scaling factor computation.""" + + fp8_amax_compute_algo: str = "most_recent" + """Algorithm used for choosing the `amax` value for the scaling factor computation. There are 2 + predefined choices: `max` chooses the largest `amax` in the history window, while `most_recent` + always chooses the most recently seen value. + + """ + + fp8_wgrad: bool = True + """When set to False, override FP8 config options and do the wgrad computation + in higher precision.""" + + fp8_dot_product_attention: bool = False + """When set to True, use the FP8 implementation of Dot Product Attention.""" + + fp8_multi_head_attention: bool = False + """When set to True, use the FP8 implementation of Multi Head Attention.""" + + tp_only_amax_red: bool = False + """When set to True, reduce the FP8 AMAX only in the TP or TP-CP domain""" + + #################### + # MoE related + #################### + moe_shared_expert_intermediate_size: Optional[int] = None + """Shared expert total ffn hidden size. + It should be equal to 'num_shared_experts * ffn_size_of_each_shared_expert' if + there are multiple shared experts. + None means no shared expert.""" + + moe_shared_expert_overlap: bool = False + """Enable overlapping between shared expert computations and dispatcher communications. + Without this, the shared epxerts execute after the routed experts.""" + + moe_layer_freq: Union[int, List[int]] = 1 + """Frequency between MoE layers and Dense layers. Accepts either: + - An integer N: Represents a 1:N ratio, meaning one expert layer for every N-1 dense layers. + - A list that defines a custom pattern, e.g.: [1,1,1,0,1,1,1,0,1,1,1,0]""" + + moe_ffn_hidden_size: Optional[int] = None + """MoE Feed-Forward Network hidden size""" + + moe_router_load_balancing_type: str = "aux_loss" + """The load balancing strategy for the router. "aux_loss" corresponds to the load balancing loss + used in GShard and SwitchTransformer; "seq_aux_loss" corresponds to the loss used in DeepSeekV2, + which computes the loss for each individual sample; "sinkhorn" corresponds to the balancing + algorithm used in S-BASE, and "none" implies no load balancing. The default is "aux_loss".""" + + moe_router_topk: int = 2 + """Number of experts to route to for each token.""" + + moe_router_topk_limited_devices: Optional[int] = None + """Number of EP ranks to consider for each token in group-limited routing, + DEPRECATED and replaced by moe_router_num_groups and moe_router_group_topk. + """ + + moe_router_num_groups: Optional[int] = None + """Number of groups to divide experts into for group-limited routing. + When using group-limited routing: + 1. Experts are divided into 'moe_router_num_groups' equal-sized groups + 2. For each token, 'moe_router_group_topk' groups are selected based on routing scores + (specifically, the sum of top-2 expert scores within each group) + 3. From these selected groups, 'moe_router_topk' individual experts are chosen + Two common use cases: + - Device-limited routing: Set 'moe_router_num_groups' equal to expert parallel size (EP) + to limit each token to experts on a subset of devices + (See DeepSeek-V2: https://arxiv.org/pdf/2405.04434) + - Node-limited routing: Set 'moe_router_num_groups' equal to number of nodes in EP group + to limit each token to experts on a subset of nodes + (See DeepSeek-V3: https://arxiv.org/pdf/2412.19437) + """ + + moe_router_group_topk: Optional[int] = None + """Number of selected groups for group-limited routing.""" + + moe_router_pre_softmax: bool = False + """Enable pre-softmax routing for MoE, which means softmax is before the top-k selection. + By default, softmax is done after top-k.""" + + moe_router_topk_scaling_factor: Optional[float] = None + """Scaling factor for routing score in top-k selection, only works when moe_router_pre_softmax + enabled. Defaults to None, which means no scaling.""" + + moe_router_score_function: str = "softmax" + """Score function for MoE routing. Can be "softmax" or "sigmoid".""" + + moe_router_enable_expert_bias: bool = False + """TopK routing with dynamic per-expert bias in the aux-loss-free load balancing strategy. + The routing decision is based on the sum of the routing scores and the expert bias. + See https://arxiv.org/abs/2408.15664 for details.""" + + moe_router_bias_update_rate: float = 1e-3 + """The expert bias is updated based on the number of assigned tokens to each expert + in a global batch, where the bias is increased for the experts with less assigned tokens + and decreased for the experts with more assigned tokens. + The default value 1e-3 is same as that used in DeepSeekV3.""" + + moe_grouped_gemm: bool = False + """When there are multiple experts per rank, compress multiple local (potentially small) gemms + in a single kernel launch to improve the utilization and performance by leveraging the Grouped + GEMM feature introduced since CUTLASS 2.8 (https://github.com/fanshiqing/grouped_gemm). + """ + + moe_use_legacy_grouped_gemm: bool = False + """Use legacy GroupedMLP rather than TEGroupedMLP. + Note: The legacy one will be deprecated soon.""" + + moe_aux_loss_coeff: float = 0 # 1e-2 would be a good start value for load balance loss. + """Scaling coefficient for the aux loss. A starting value of 1e-2 is recommended.""" + + moe_z_loss_coeff: Optional[float] = None # 1e-3 would be a good start value for z-loss + """Scaling coefficient for the z-loss. A starting value of 1e-3 is recommended.""" + + moe_input_jitter_eps: Optional[float] = None + """Add noise to the input tensor by applying jitter with a specified epsilon value.""" + + moe_token_dropping: bool = False + """This feature involves selectively dropping and padding tokens for each expert to achieve a + specified capacity, similar to GShard, Switch-Transformer, and DeepSpeed-MoE. Note that this is + currently unsupported so should remain False.""" + + moe_token_dispatcher_type: str = "allgather" + """The type of token dispatcher to use. The default is 'allgather'. + Options are 'allgather','alltoall' and 'flex'.""" + + moe_enable_deepep: bool = False + """[Experimental] Enable DeepEP for efficient token dispatching and combine in MoE models.""" + + moe_per_layer_logging: bool = False + """Enable per-layer logging for MoE, currently supports auxiliary loss and z loss.""" + + moe_expert_capacity_factor: Optional[float] = None + """moe_expert_capacity_factor (float): The capacity factor for each expert, None means no token + will be dropped. The default is None.""" + + moe_pad_expert_input_to_capacity: bool = False + """moe_pad_expert_input_to_capacity (bool): If True, pads the input for each expert to match + the expert capacity length, effective only after the moe_expert_capacity_factor is set. The + default setting is False.""" + + moe_token_drop_policy: str = 'probs' + """The policy to drop tokens. Can be either "probs" or "position". If "probs", the tokens with + the lowest probabilities will be dropped. If "position", tokens at the end of each batch will + be dropped. + """ + + moe_layer_recompute: bool = False + """Memory optimization: checkpointing moe_layer to save actiavtion memory.""" + + moe_permute_fusion: bool = False + """Fuse token rearrangement ops during token dispatching.""" + + ################## + # Context Parallel + ################## + cp_comm_type: Optional[Union[str, List[str]]] = None + """Inter-gpu communication type for context parallelism. + str: all layers share same communication type. + List[str]: each layer has its separate communication type. + cp_comm_type of each layer can be "p2p" or "all_gather" or "a2a" or "a2a+p2p". + "p2p": Exchange KV chunks with P2P communications in ring topology. P2P is async and can be + overlapped with attention compute. + "all_gather": All-gather to get full sequence of KV before attention. The all-gather is not + async, and cannot be overlapped. + "a2a": Like DeepSpeed Ulysses, scatter attention heads across the CP group, and gather to get + full sequence of QKV. + "a2a+p2p": A hierarchical implementation of context parallelism to attention. + It uses A2A communications in low-level CP groups (e.g., via NVLink), + and P2P communications in high-level CP groups (e.g., via IBLink). + """ + + ################## + # Cuda Graphs + ################## + enable_cuda_graph: bool = False + """When set to true, TransformerLayer layers are swapped with a CUDA graphed version.""" + + cuda_graph_use_single_mempool: bool = False + """When set to true, cudagraphs will be captured inside a single mempool, in which all + cudagraphs may only be used once per step. If false, cudagraphs may be reused across + microbatches. Enabling may reduce cudagraph memory overheads due to memory fragmentation, + however may greatly increase the number of cudagraphs created when the number of microbatches + is high.""" + + cuda_graph_retain_backward_graph: bool = False + """When set to true, cudagraph backward passes will be graph captured with 'retain_grad=True' + This may enable cudagraphs for certain modules that are not completely cudagraph safe. For + more details, see: https://pytorch.org/docs/stable/generated/torch.Tensor.backward.html.""" + + cuda_graph_warmup_steps: int = 3 + """Number of warmup steps for CUDA graphs""" + + external_cuda_graph: bool = False + """When set to true, TransformerLayer layers are swapped with user provided CUDA graphs.""" + + #################### + # miscellaneous + #################### + clone_scatter_output_in_embedding: bool = True + """When set to True, clone the output of scatter_to_sequence_parallel_region in embedding layer + to facilitate garbage collection of input.""" + + disable_parameter_transpose_cache: bool = False + """When set to true, the parameter transposes are not cached for subsequent iterations.""" + + config_logger_dir: str = "" + """When non-empty, dumps entry-point configs to config_logger_dir""" + + flash_decode: bool = False + """ Use the optimized flash decoding kernel during inference. """ + + use_te_rng_tracker: bool = False + """ Whether to use the TE or MCore version of the RNG tracker. """ + + inference_rng_tracker: bool = False + """ Whether we should instantiate a separate RNG tracker for inference. """ + + use_custom_fsdp: bool = False + """ Whether to use custom fsdp for training. """ + + def __post_init__(self): + """Python dataclass method that is used to modify attributes after initialization. + See https://docs.python.org/3/library/dataclasses.html#post-init-processing for more + details. + """ + super().__post_init__() + if self.fp16 and self.bf16: + raise ValueError( + f'Only one of self.fp16: {self.fp16} and self.bf16 {self.bf16} should be True.' + ) + + if self.num_attention_heads % self.tensor_model_parallel_size != 0: + raise ValueError( + f"num_attention_heads ({self.num_attention_heads}) must be a multiple of " + f"tensor_model_parallel_size ({self.tensor_model_parallel_size})." + ) + + if self.ffn_hidden_size is None: + self.ffn_hidden_size = 4 * self.hidden_size + + if self.kv_channels is None: + self.kv_channels = self.hidden_size // self.num_attention_heads + + if self.num_query_groups is None: + self.num_query_groups = self.num_attention_heads + + if self.num_query_groups % self.tensor_model_parallel_size != 0: + raise ValueError( + f"num_query_groups ({self.num_query_groups}) must be a multiple of " + f"tensor_model_parallel_size ({self.tensor_model_parallel_size})." + ) + + if self.apply_query_key_layer_scaling: + self.attention_softmax_in_fp32 = True + + if self.expert_model_parallel_size > 1 and self.num_moe_experts is None: + raise ValueError('num_moe_experts must be non None to use expert-parallel.') + + if self.num_moe_experts is not None and self.num_moe_experts <= 0: + raise ValueError('num_moe_experts must be non-negative.') + + if self.moe_ffn_hidden_size is None: + self.moe_ffn_hidden_size = self.ffn_hidden_size + + if self.moe_enable_deepep: + if self.moe_token_dispatcher_type != "flex": + raise ValueError("DeepEP backend is only supported with flex token dispatcher.") + + if self.moe_shared_expert_intermediate_size is not None: + if self.moe_shared_expert_intermediate_size <= 0: + raise ValueError( + f'moe_shared_expert_intermediate_size must be ' + f'num_shared_experts * ffn_size_of_each_shared_expert, ' + f'but got {self.moe_shared_expert_intermediate_size}' + ) + if self.moe_shared_expert_overlap and self.moe_token_dispatcher_type not in [ + "alltoall" + ]: + raise ValueError( + f'moe_shared_expert_overlap only works with alltoall token dispatcher.' + ) + + if self.moe_expert_capacity_factor is not None: + if self.moe_expert_capacity_factor < 0: + self.moe_expert_capacity_factor = None + if self.moe_router_load_balancing_type not in ["aux_loss", "seq_aux_loss", "none"]: + raise ValueError( + 'moe_expert_capacity_factor only works with aux_loss or none load balancing' + ) + + if self.moe_pad_expert_input_to_capacity: + if self.moe_expert_capacity_factor is None: + raise ValueError( + 'moe_expert_capacity_factor must be set to use moe_pad_expert_input_to_capacity' + ) + + if self.cpu_offloading and ( + self.cpu_offloading_num_layers < 0 or self.cpu_offloading_num_layers >= self.num_layers + ): + raise ValueError( + f'CPU offloading can be done only for layers less than {self.num_layers}' + ) + + if self.cpu_offloading and self.pipeline_model_parallel_size > 1: + raise ValueError( + 'Currently there is no support for Pipeline parallelism with CPU offloading' + ) + + if self.cpu_offloading and self.recompute_granularity is not None: + raise ValueError( + 'CPU offloading does not work when activation recomputation is enabled' + ) + + if self.recompute_granularity is not None: + if self.recompute_granularity not in ['full', 'selective']: + raise ValueError( + f'When using recompute_granuarlity: {self.recompute_granularity} must be "full"' + 'or "selective".' + ) + + if self.recompute_method is not None: + if self.recompute_method not in ['block', 'uniform']: + raise ValueError( + f'recompute_method: {self.recompute_method} must be "block" or "uniform".' + ) + elif self.recompute_granularity != 'selective': + raise ValueError( + f'Using recompute_granularity: {self.recompute_granularity} so ' + 'recompute_method must be "block" or "uniform"' + ) + + if self.recompute_granularity != 'selective' and self.recompute_num_layers is None: + raise ValueError( + f'When using recompute_granularity: {self.recompute_granularity} ' + 'recompute_num_layers must be between ' + '1 and num_layers_per_pipeline_rank: ' + f'{self.num_layers // self.pipeline_model_parallel_size}' + ) + elif ( + self.recompute_granularity == 'selective' and self.recompute_num_layers is not None + ): + raise ValueError( + f'When using recompute_granularity: {self.recompute_granularity} ' + 'recompute_num_layers must be None.' + ) + + if self.distribute_saved_activations and self.sequence_parallel: + raise ValueError( + f'distribute_saved_activations: {self.distribute_saved_activations} must be ' + f'false when sequence parallel is enabled: {self.sequence_parallel}' + ) + + if ( + self.num_layers_in_first_pipeline_stage is not None + or self.num_layers_in_last_pipeline_stage is not None + ) and ( + self.account_for_embedding_in_pipeline_split or self.account_for_loss_in_pipeline_split + ): + raise ValueError( + 'num_layers_in_first_pipeline_stage and num_layers_in_last_pipeline_stage cannot be' + 'set at the same time with account_for_embedding_in_pipeline_split' + 'and account_for_loss_in_pipeline_split' + ) + + if ( + self.num_layers_in_first_pipeline_stage is not None + or self.num_layers_in_last_pipeline_stage is not None + ): + pipeline_parallel_size = self.pipeline_model_parallel_size + num_layers = self.num_layers + + if self.num_layers_in_first_pipeline_stage is not None: + if self.num_layers_in_first_pipeline_stage <= 0: + raise ValueError('num_layers_in_first_pipeline_stage must be larger than 0') + + if self.virtual_pipeline_model_parallel_size is not None: + if ( + self.num_layers_in_first_pipeline_stage + % self.virtual_pipeline_model_parallel_size + != 0 + ): + raise ValueError( + f'number of layers at first stage: ' + f'{self.num_layers_in_first_pipeline_stage}' + f'must be divisible by virtual pipeline' + f'parallel degree {self.virtual_pipeline_model_parallel_size}' + ) + num_layers -= self.num_layers_in_first_pipeline_stage + pipeline_parallel_size -= 1 + + if self.num_layers_in_last_pipeline_stage is not None: + if self.num_layers_in_last_pipeline_stage <= 0: + raise ValueError('num_layers_in_last_pipeline_stage must be larger than 0') + + if self.virtual_pipeline_model_parallel_size is not None: + if ( + self.num_layers_in_last_pipeline_stage + % self.virtual_pipeline_model_parallel_size + != 0 + ): + raise ValueError( + f'number of layers at last stage: ' + f'{self.num_layers_in_last_pipeline_stage}' + f'must be divisible by virtual pipeline' + f'parallel degree {self.virtual_pipeline_model_parallel_size}' + ) + num_layers -= self.num_layers_in_last_pipeline_stage + pipeline_parallel_size -= 1 + + if not num_layers % pipeline_parallel_size == 0: + raise ValueError( + f'number of layers at middle stage: {num_layers} must be divisible by' + f'the middle pipeline model parallel size {pipeline_parallel_size}' + ) + + if self.virtual_pipeline_model_parallel_size is not None: + num_layers_per_middle_pipeline_rank = num_layers // pipeline_parallel_size + if ( + not num_layers_per_middle_pipeline_rank + % self.virtual_pipeline_model_parallel_size + == 0 + ): + raise ValueError( + f'number of layers on each middle pipeline rank:' + f'{num_layers_per_middle_pipeline_rank} must be divisible by virtual' + f'pipeline parallel degree {self.virtual_pipeline_model_parallel_size}' + ) + + if self.account_for_embedding_in_pipeline_split or self.account_for_loss_in_pipeline_split: + if self.virtual_pipeline_model_parallel_size is None: + pipeline_parallel_size = self.pipeline_model_parallel_size + + if self.account_for_embedding_in_pipeline_split: + pipeline_parallel_size -= 1 + + if self.account_for_loss_in_pipeline_split: + pipeline_parallel_size -= 1 + + if not self.num_layers % pipeline_parallel_size == 0: + raise ValueError( + f'number of middle layers: {self.num_layers} must be divisible by ' + f'middle pipeline_model_parallel_size {pipeline_parallel_size}' + ) + else: + num_layers = self.num_layers + if self.account_for_embedding_in_pipeline_split: + num_layers += 1 + + if self.account_for_loss_in_pipeline_split: + num_layers += 1 + + if not num_layers % self.pipeline_model_parallel_size == 0: + raise ValueError( + f'num_layers: {num_layers} after enable' + f'account_for_embedding_in_pipeline_split or ' + f'account_for_loss_in_pipeline_split must be divisible' + f'by pipeline_model_parallel_size ' + f'{self.pipeline_model_parallel_size}' + ) + + num_layers_per_pipeline_rank = num_layers // self.pipeline_model_parallel_size + if ( + not num_layers_per_pipeline_rank % self.virtual_pipeline_model_parallel_size + == 0 + ): + raise ValueError( + f'number of layers on each pipeline rank: {num_layers_per_pipeline_rank}' + f'(after enable account_for_embedding_in_pipeline_split or ' + f'account_for_loss_in_pipeline_split) must be divisible by' + f'virtual_pipeline_model_parallel_size' + f'{self.virtual_pipeline_model_parallel_size}' + ) + + if self.apply_query_key_layer_scaling: + self.attention_softmax_in_fp32 = True + + if self.bias_activation_fusion: + if self.activation_func not in [F.gelu, F.silu]: + raise ValueError( + "When bias_activation_fusion is True, activation function should be either " + "gelu or swiglu" + ) + if ( + self.activation_func == F.gelu + and not self.gated_linear_unit + and not self.add_bias_linear + ): + raise ValueError( + "When bias_activation_fusion is True, gated_linear_unit is False, " + "and activation function is gelu, add_bias_linear must also be True." + ) + + if self.activation_func_fp8_input_store: + if self.activation_func != F.silu or not self.gated_linear_unit: + raise ValueError("Storing activation input in FP8 is supported only for SwiGLU.") + + if self.apply_rope_fusion: + if self.rotary_interleaved: + raise ValueError("rotary_interleaved does not work with apply_rope_fusion.") + + from megatron.core.models.common.embeddings.rope_utils import ( + fused_apply_rotary_pos_emb, + fused_apply_rotary_pos_emb_thd, + ) + + if fused_apply_rotary_pos_emb is None and fused_apply_rotary_pos_emb_thd is None: + raise ValueError( + "apply_rope_fusion is not available. Please install TE >= 1.4 or Apex." + ) + + if self.multi_latent_attention: + raise ValueError("multi_latent_attention does not support apply_rope_fusion.") + + if self.multi_latent_attention and self.rotary_interleaved: + raise ValueError("rotary_interleaved does not work with multi_latent_attention.") + + if self.init_method is None: + self.init_method = init_method_normal(self.init_method_std) + + if self.output_layer_init_method is None: + self.output_layer_init_method = scaled_init_method_normal( + self.init_method_std, self.num_layers + ) + + if ( + self.moe_token_dispatcher_type == "alltoall_seq" + and self.tensor_model_parallel_size != self.expert_tensor_parallel_size + ): + raise ValueError( + "alltoall_seq dispatcher not support different TP size for MoE and Dense layer." + ) + + if self.moe_router_enable_expert_bias and self.moe_router_score_function != "sigmoid": + raise ValueError( + "Expert bias for aux-loss-free routing only supports sigmoid score function." + "Please set --moe-router-score-function sigmoid for sigmoid score function." + ) + + if self.num_moe_experts and self.fp8: + # TE version below 1.7.0 will raise Error when handle zeros tokens for expert + if not is_te_min_version("1.7.0.dev0"): + raise ValueError( + "Only transformer-engine>=1.7.0 supports MoE FP8 training, " + f"but your version is {get_te_version()}." + ) + + if self.moe_grouped_gemm and not is_te_min_version("1.11.0"): + raise ValueError( + "Only transformer-engine>=1.11.0 supports FP8 grouped gemm, " + f"but your version is {get_te_version()}." + ) + + if ( + self.moe_router_topk == 1 + and self.moe_router_score_function == 'softmax' + and not self.moe_router_pre_softmax + and self.moe_router_load_balancing_type != 'sinkhorn' + ): + # Requires applying softmax before selecting the top-k when k is 1, + # since softmax on a [num_tokens, 1] would yield a zero gradient. + raise ValueError("Please use --moe-router-pre-softmax when topk is 1.") + + if self.moe_router_group_topk: + if self.moe_router_topk_limited_devices: + raise ValueError( + "moe_router_topk_limited_devices is deprecated and replaced by " + "moe_router_group_topk and moe_router_num_groups." + ) + if not self.moe_router_num_groups: + raise ValueError( + "When using group limited routing, moe_router_num_groups must be specified." + ) + else: + assert self.num_moe_experts % self.moe_router_num_groups == 0, ( + f"num_moe_experts ({self.num_moe_experts}) should be divisible by " + f"moe_router_num_groups ({self.moe_router_num_groups})." + ) + assert self.moe_router_group_topk <= self.moe_router_num_groups, ( + f"moe_router_group_topk ({self.moe_router_group_topk}) should be smaller than " + f"moe_router_num_groups ({self.moe_router_num_groups})." + ) + elif self.moe_router_topk_limited_devices: + warnings.warn( + "moe_router_topk_limited_devices is deprecated. Use moe_router_group_topk and " + "moe_router_num_groups instead." + ) + self.moe_router_group_topk = self.moe_router_topk_limited_devices + self.moe_router_num_groups = self.expert_model_parallel_size + + if self.flash_decode and self.fp8: + raise ValueError("FP8 inference is currently not support with flash decoding.") + + if self.enable_cuda_graph: + if self.cpu_offloading: + raise ValueError("CUDA graphs not supported with CPU offloading.") + if self.recompute_granularity: + raise ValueError("CUDA graphs not supported with activation recomputation.") + + if self.moe_token_dispatcher_type in ['allgather', 'alltoall_seq']: + if self.variable_seq_lengths is True: + raise ValueError( + f"Token dispatcher type: {self.moe_token_dispatcher_type} does not support " + f"variable sequence length, please use alltoall dispatcher instead." + ) + + if self.moe_permute_fusion: + from megatron.core.transformer.moe.moe_utils import ( + fused_permute, + fused_sort_chunks_by_index, + fused_unpermute, + ) + + if ( + fused_permute is None + or fused_sort_chunks_by_index is None + or fused_unpermute is None + ): + raise ValueError("fused permutation is not available. Please install TE >= 2.1.0.") + + if self.cp_comm_type is not None: + if isinstance(self.cp_comm_type, list): + assert len(self.cp_comm_type) == self.num_layers, ( + f"Length of cp_comm_type ({len(self.cp_comm_type)}) should equal to " + f"the total number of transformer layers ({self.num_layers})!" + ) + else: + assert isinstance( + self.cp_comm_type, str + ), "Unsupported communication type for context parallelism!" + + assert ( + self.pipeline_model_parallel_size > 0 + ), f"Pipeline model parallel size must be larger than 0 \ + when enable --standalone-embedding-stage and --standalone-loss-stage" + + +@dataclass +class MLATransformerConfig(TransformerConfig): + """Configuration object for megatron-core Multi-Latent Attention (MLA) transformers. + + The initialization function has an argument for each parameter, including those in + ModelParallelConfig. Included YaRN RoPE parameters that is fused in MLA. + """ + + multi_latent_attention: bool = True + """Whether to use Multi-Latent Attention.""" + + q_lora_rank: int = 512 + """Rank of Query tensor's low rank representation.""" + + kv_lora_rank: int = 512 + """Rank of Key and Value tensors' low rank representation.""" + + qk_head_dim: int = 128 + """Dimension of the head in the QK projection. q_head_dim = qk_head_dim + qk_pos_emb_head_dim""" + + qk_pos_emb_head_dim: int = 64 + """Dimension of the position embedding in the QK projection.""" + + v_head_dim: int = 128 + """Dimension of the head in the V projection.""" + + normalization: str = "RMSNorm" + """Default normalization layer for MLA models is RMSNorm.""" + + rope_type: str = "yarn" + """Type of RoPE to use. Default to yarn, options are rope and yarn.""" + + rotary_base: float = 10000 + """Rotary base for the rotary embeddings, used by rope and yarn.""" + + rotary_percent: float = 1.0 + """Rotary percent for the rotary embeddings, used by rope.""" + + rotary_scaling_factor: float = 40 + """Rotary scaling factor for the rotary embeddings, used by yarn.""" + + max_position_embeddings: int = 4096 + """Maximum position embeddings for the original model, used by yarn.""" + + beta_fast: float = 32 + """Beta fast for YaRN RoPE, used by yarn.""" + + beta_slow: float = 1 + """Beta slow for YaRN RoPE, used by yarn.""" + + mscale: float = 0.707 + """Mscale for YaRN RoPE in Multi-Latent Attention, used by yarn.""" + + mscale_all_dim: float = 0.707 + """Mscale all dimensions for YaRN RoPE in Multi-Latent Attention, used by yarn.""" diff --git a/megatron/core/transformer/transformer_layer.py b/megatron/core/transformer/transformer_layer.py index 0e7eabb..1566467 100644 --- a/megatron/core/transformer/transformer_layer.py +++ b/megatron/core/transformer/transformer_layer.py @@ -1,397 +1,502 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -from abc import ABC -from dataclasses import dataclass, field -from typing import Dict, Optional, Union - -import torch - -from megatron.core import parallel_state -from megatron.core.dist_checkpointing.mapping import ShardedStateDict -from megatron.core.dist_checkpointing.utils import apply_prefix_mapping -from megatron.core.transformer.cuda_graphs import CudaGraphManager -from megatron.core.transformer.identity_op import IdentityFuncOp, IdentityOp -from megatron.core.transformer.module import MegatronModule -from megatron.core.transformer.spec_utils import ModuleSpec, build_module -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.utils import make_viewless_tensor - - -@dataclass -class TransformerLayerSubmodules: - """ - Configuration class for specifying the submodules of a transformer layer. - - This class defines the structure and default implementations for various - components of a transformer layer, allowing for flexible customization - of the layer's architecture. - - Args: - input_layernorm (Union[ModuleSpec, type]): Specification for the input layer normalization. - self_attention (Union[ModuleSpec, type]): Specification for the self-attention mechanism. - self_attn_bda (Union[ModuleSpec, type]): Specification for the bias-dropout-add operation - after self-attention. - pre_cross_attn_layernorm (Union[ModuleSpec, type]): Specification for the layer - normalization before cross-attention. - cross_attention (Union[ModuleSpec, type]): Specification for the cross-attention mechanism. - cross_attn_bda (Union[ModuleSpec, type]): Specification for the bias-dropout-add operation - after cross-attention. - pre_mlp_layernorm (Union[ModuleSpec, type]): Specification for the layer normalization - before the MLP. - mlp (Union[ModuleSpec, type]): Specification for the MLP in Dense layer. - mlp_bda (Union[ModuleSpec, type]): Specification for the bias-dropout-add operation - after the MLP. - sharded_state_dict_keys_map (Dict[str, str]): Mapping for sharded tensor keys to be applied - in the `sharded_state_dict` method. - """ - - input_layernorm: Union[ModuleSpec, type] = IdentityOp - self_attention: Union[ModuleSpec, type] = IdentityOp - self_attn_bda: Union[ModuleSpec, type] = IdentityFuncOp - - pre_cross_attn_layernorm: Union[ModuleSpec, type] = IdentityOp - cross_attention: Union[ModuleSpec, type] = IdentityOp - cross_attn_bda: Union[ModuleSpec, type] = IdentityFuncOp - - pre_mlp_layernorm: Union[ModuleSpec, type] = IdentityOp - mlp: Union[ModuleSpec, type] = IdentityOp - mlp_bda: Union[ModuleSpec, type] = IdentityFuncOp - - # Mapping for sharded tensor keys to be applied in `sharded_state_dict` method - sharded_state_dict_keys_map: Dict[str, str] = field(default_factory=dict) - - -class BaseTransformerLayer(ABC): - """A common parent class for `TransformerLayer` like implementations. - - A dummy class that is subclassed by similar `TransformerLayer`s e.g. the - `TransformerLayer` in this file and possibly other `TransformerLayer` - implementations that aim to use `TransformerBlock` as the base module. - The main purpose is to check if any layer (or module) provided in the spec - is a subclass of this class to allow fanning-out of that spec for all the - layers in the `TransformerBlock`. See `_get_block_submodules` method - implementation in `transformer_block.py` file for more details. - """ - - def __init__(self): - pass - - -class TransformerLayer(MegatronModule, BaseTransformerLayer): - """A single transformer layer. - - Transformer layer takes input with size [s, b, h] and returns an - output of the same size. - """ - - def __init__( - self, - config: TransformerConfig, - submodules: TransformerLayerSubmodules, - layer_number: int = 1, - hidden_dropout: float = None, - ): - super().__init__(config=config) - - if config.enable_cuda_graph and self.training: - assert ( - not config.cpu_offloading and config.recompute_granularity is None - ), "Cudagraphs not supported" - self.cudagraph_manager = CudaGraphManager() - - self.submodules_config = submodules - self.layer_number = layer_number + TransformerLayer._get_layer_offset(self.config) - self.hidden_dropout = config.hidden_dropout if hidden_dropout is None else hidden_dropout - - # [Module 1: Input Layernorm] Optional Layernorm on the input data - # TODO: add pytorch only layernorm - self.input_layernorm = build_module( - submodules.input_layernorm, - config=self.config, - hidden_size=self.config.hidden_size, - eps=self.config.layernorm_epsilon, - ) - - attention_optional_kwargs = {} - if config.cp_comm_type is not None: - if isinstance(config.cp_comm_type, list): - attention_optional_kwargs["cp_comm_type"] = config.cp_comm_type[self.layer_number] - else: - attention_optional_kwargs["cp_comm_type"] = config.cp_comm_type - - # [Module 2: SelfAttention] - self.self_attention = build_module( - submodules.self_attention, - config=self.config, - layer_number=layer_number, - **attention_optional_kwargs, - ) - - # [Module 3: BiasDropoutFusion] - self.self_attn_bda = build_module(submodules.self_attn_bda) - - # [Module 4: Post SelfAttention] Optional Layernorm after self-attn - self.pre_cross_attn_layernorm = build_module( - submodules.pre_cross_attn_layernorm, - config=self.config, - hidden_size=self.config.hidden_size, - eps=self.config.layernorm_epsilon, - ) - - # [Module 5: CrossAttention] - self.cross_attention = build_module( - submodules.cross_attention, - config=self.config, - layer_number=layer_number, - **attention_optional_kwargs, - ) - - # [Module 6: BiasDropoutFusion] - self.cross_attn_bda = build_module(submodules.cross_attn_bda, config=self.config) - - # [Module 7: Pre MLP] Optional Layernorm before MLP - self.pre_mlp_layernorm = build_module( - submodules.pre_mlp_layernorm, - config=self.config, - hidden_size=self.config.hidden_size, - eps=self.config.layernorm_epsilon, - ) - # [Module 8: MLP block] - self.mlp = build_module(submodules.mlp, config=self.config) - if hasattr(self.mlp, 'set_layer_number'): - self.mlp.set_layer_number(self.layer_number) - - # [Module 9: BiasDropoutFusion] - self.mlp_bda = build_module(submodules.mlp_bda) - - # @jcasper how should we handle nvfuser? - # Set bias+dropout+add fusion grad_enable execution handler. - # TORCH_MAJOR = int(torch.__version__.split('.')[0]) - # TORCH_MINOR = int(torch.__version__.split('.')[1]) - # use_nvfuser = TORCH_MAJOR > 1 or (TORCH_MAJOR == 1 and TORCH_MINOR >= 10) - # self.bias_dropout_add_exec_handler = nullcontext if use_nvfuser else torch.enable_grad - self.bias_dropout_add_exec_handler = torch.enable_grad - - @staticmethod - def _get_layer_offset(config: TransformerConfig): - """Get the index offset of current pipeline stage, given the level of pipelining.""" - pipeline_rank = parallel_state.get_pipeline_model_parallel_rank() - if not parallel_state.is_inside_encoder(): - pp_decoder_start = parallel_state.get_pipeline_model_parallel_decoder_start() - if pp_decoder_start is not None: - pipeline_rank = pipeline_rank - pp_decoder_start - - num_layers_per_pipeline_rank = config.num_layers // config.pipeline_model_parallel_size - - if parallel_state.get_virtual_pipeline_model_parallel_world_size() is not None: - vp_rank = parallel_state.get_virtual_pipeline_model_parallel_rank() - vp_size = parallel_state.get_virtual_pipeline_model_parallel_world_size() - - total_num_layers = config.num_layers - num_layers_per_virtual_rank = num_layers_per_pipeline_rank // vp_size - total_virtual_chunks = total_num_layers // vp_size - offset = vp_rank * total_virtual_chunks + (pipeline_rank * num_layers_per_virtual_rank) - - else: - # Each stage gets a contiguous set of layers. - if config.pipeline_model_parallel_size > 1: - if ( - config.first_pipeline_num_layers is not None - or config.last_pipeline_num_layers is not None - ): - # Calculate number of pipelines for distributing layers - middle_pipeline_stages = config.pipeline_model_parallel_size - middle_pipeline_stages -= sum( - [ - 1 if x is not None else 0 - for x in ( - config.first_pipeline_num_layers, - config.last_pipeline_num_layers, - ) - ] - ) - - # Calculate layers to distribute - first_pipeline_offset = ( - 0 - if config.first_pipeline_num_layers is None - else config.first_pipeline_num_layers - ) - last_pipeline_offset = ( - 0 - if config.last_pipeline_num_layers is None - else config.last_pipeline_num_layers - ) - - middle_num_layers = ( - config.num_layers - first_pipeline_offset - last_pipeline_offset - ) - - if middle_pipeline_stages > 0: - num_layers_per_pipeline_rank = middle_num_layers // middle_pipeline_stages - else: - num_layers_per_pipeline_rank = 0 - - middle_pipeline_rank = ( - pipeline_rank - if config.first_pipeline_num_layers is None - else pipeline_rank - 1 - ) - - if pipeline_rank == 0: - offset = 0 - else: - offset = ( - middle_pipeline_rank * num_layers_per_pipeline_rank - ) + first_pipeline_offset - else: - offset = pipeline_rank * num_layers_per_pipeline_rank - else: - offset = 0 - - return offset - - def forward( - self, - hidden_states, - attention_mask=None, - context=None, - context_mask=None, - rotary_pos_emb=None, - rotary_pos_cos=None, - rotary_pos_sin=None, - attention_bias=None, - inference_params=None, - packed_seq_params=None, - ): - """ - Perform a forward pass through the transformer layer. - - This method implements the core computation of a transformer layer, including - self-attention, cross-attention (if applicable), and feed-forward operations. - - Args: - hidden_states (Tensor): Input tensor of shape [s, b, h] where s is sequence length, - b is batch size, and h is hidden size. - attention_mask (Tensor): Mask tensor for self-attention. - context (Tensor, optional): Context tensor for cross-attention. - context_mask (Tensor, optional): Mask tensor for cross-attention. - rotary_pos_emb (Tensor, optional): Rotary positional embeddings. - attention_bias (Tensor, optional): Bias tensor for Q * K.T. - inference_params (object, optional): Parameters for inference-time optimizations. - packed_seq_params (object, optional): Parameters for packed sequence processing. - - Returns: - Tuple[Tensor, Tensor]: A tuple containing: - output (Tensor): Transformed hidden states of shape [s, b, h]. - context (Tensor): Updated context tensor if cross-attention is used, - otherwise None. - """ - - # Residual connection. - residual = hidden_states - - # Optional Input Layer norm - input_layernorm_output = self.input_layernorm(hidden_states) - - # Self attention. - attention_output_with_bias = self.self_attention( - input_layernorm_output, - attention_mask=attention_mask, - inference_params=inference_params, - rotary_pos_emb=rotary_pos_emb, - rotary_pos_cos=rotary_pos_cos, - rotary_pos_sin=rotary_pos_sin, - attention_bias=attention_bias, - packed_seq_params=packed_seq_params, - ) - - # TODO: could we move `bias_dropout_add_exec_handler` itself - # inside the module provided in the `bias_dropout_add_spec` module? - with self.bias_dropout_add_exec_handler(): - hidden_states = self.self_attn_bda(self.training, self.config.bias_dropout_fusion)( - attention_output_with_bias, residual, self.hidden_dropout - ) - - # Residual connection. - residual = hidden_states - - # Optional Layer norm after self-attention - pre_cross_attn_layernorm_output = self.pre_cross_attn_layernorm(hidden_states) - - # Cross attention. - attention_output_with_bias = self.cross_attention( - pre_cross_attn_layernorm_output, - attention_mask=context_mask, - key_value_states=context, - inference_params=inference_params, - ) - - if isinstance(attention_output_with_bias, dict) and "context" in attention_output_with_bias: - context = attention_output_with_bias["context"] - - # TODO: could we move `bias_dropout_add_exec_handler` itself - # inside the module provided in the `bias_dropout_add_spec` module? - with self.bias_dropout_add_exec_handler(): - hidden_states = self.cross_attn_bda(self.training, self.config.bias_dropout_fusion)( - attention_output_with_bias, residual, self.hidden_dropout - ) - - # Residual connection. - residual = hidden_states - - # Optional Layer norm post the cross-attention. - pre_mlp_layernorm_output = self.pre_mlp_layernorm(hidden_states) - - # MLP. - mlp_output_with_bias = self.mlp(pre_mlp_layernorm_output) - - # TODO: could we move `bias_dropout_add_exec_handler` itself - # inside the module provided in the `bias_dropout_add_spec` module? - with self.bias_dropout_add_exec_handler(): - hidden_states = self.mlp_bda(self.training, self.config.bias_dropout_fusion)( - mlp_output_with_bias, residual, self.hidden_dropout - ) - - # Jit compiled function creates 'view' tensor. This tensor - # potentially gets saved in the MPU checkpoint function context, - # which rejects view tensors. While making a viewless tensor here - # won't result in memory savings (like the data loader, or - # p2p_communication), it serves to document the origin of this - # 'view' tensor. - output = make_viewless_tensor( - inp=hidden_states, requires_grad=hidden_states.requires_grad, keep_graph=True - ) - - # CUDA graph requires returned values to be Tensors - if self.config.external_cuda_graph and self.training: - return output - return output, context - - def sharded_state_dict( - self, prefix: str = '', sharded_offsets: tuple = (), metadata: Optional[dict] = None - ) -> ShardedStateDict: - """ - Generate a sharded state dictionary for the transformer layer. - - Args: - prefix (str, optional): Prefix to be added to all keys in the state dict. - sharded_offsets (tuple, optional): Tuple of sharding offsets. - metadata (Optional[dict], optional): Additional metadata for sharding. - - Returns: - ShardedStateDict: A dictionary containing the sharded state of the transformer layer. - """ - sharded_state_dict = super().sharded_state_dict(prefix, sharded_offsets, metadata) - prefixed_map = { - f'{prefix}{k}': f'{prefix}{v}' - for k, v in self.submodules_config.sharded_state_dict_keys_map.items() - } - if prefixed_map: - apply_prefix_mapping(sharded_state_dict, prefixed_map) - return sharded_state_dict - - def __call__(self, *args, **kwargs): - if hasattr(self, 'cudagraph_manager'): - return self.cudagraph_manager(self, args, kwargs) - return super(MegatronModule, self).__call__(*args, **kwargs) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import warnings +from abc import ABC +from dataclasses import dataclass, field +from typing import Dict, Optional, Union + +import torch +import torch.distributed + +from megatron.core import parallel_state +from megatron.core.dist_checkpointing.mapping import ShardedStateDict +from megatron.core.dist_checkpointing.utils import apply_prefix_mapping +from megatron.core.transformer.cuda_graphs import CudaGraphManager +from megatron.core.transformer.identity_op import IdentityFuncOp, IdentityOp +from megatron.core.transformer.module import MegatronModule +from megatron.core.transformer.spec_utils import ModuleSpec, build_module +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.utils import make_viewless_tensor + + +def get_transformer_layer_offset(config: TransformerConfig): + """Get the index offset of current pipeline stage, given the level of pipelining.""" + pipeline_rank = parallel_state.get_pipeline_model_parallel_rank() + if not parallel_state.is_inside_encoder(): + pp_decoder_start = parallel_state.get_pipeline_model_parallel_decoder_start() + if pp_decoder_start is not None: + pipeline_rank = pipeline_rank - pp_decoder_start + + if config.pipeline_model_parallel_size > 1: + + if ( + config.num_layers_in_first_pipeline_stage is not None + or config.num_layers_in_last_pipeline_stage is not None + ): + # Calculate number of pipeline stages to distribute the remaining Transformer + # layers after deducting the Transformer layers in the first or the last stages + middle_pipeline_stages = config.pipeline_model_parallel_size + middle_pipeline_stages -= sum( + [ + 1 if x is not None else 0 + for x in ( + config.num_layers_in_first_pipeline_stage, + config.num_layers_in_last_pipeline_stage, + ) + ] + ) + + # Calculate layers to distribute in each pipeline stage. If the + # num_layers_in_first_pipeline_stage and num_layers_in_last_pipeline_stage + # are not set, we will not enable uneven pipeline. All layers will be treated + # as middle layers. + num_layers_in_first_pipeline_stage = ( + 0 + if config.num_layers_in_first_pipeline_stage is None + else config.num_layers_in_first_pipeline_stage + ) + num_layers_in_last_pipeline_stage = ( + 0 + if config.num_layers_in_last_pipeline_stage is None + else config.num_layers_in_last_pipeline_stage + ) + + middle_num_layers = ( + config.num_layers + - num_layers_in_first_pipeline_stage + - num_layers_in_last_pipeline_stage + ) + + if parallel_state.get_virtual_pipeline_model_parallel_world_size() is not None: + vp_rank = parallel_state.get_virtual_pipeline_model_parallel_rank() + vp_size = parallel_state.get_virtual_pipeline_model_parallel_world_size() + + # Calculate number of layers in each virtual model chunk + # If the num_layers_in_first_pipeline_stage and + # num_layers_in_last_pipeline_stage are not set, all pipeline stages + # will be treated as middle pipeline stages in the calculation + num_layers_per_virtual_model_chunk_in_first_pipeline_stage = ( + 0 + if config.num_layers_in_first_pipeline_stage is None + else config.num_layers_in_first_pipeline_stage // vp_size + ) + + num_layers_per_virtual_model_chunk_in_last_pipeline_stage = ( + 0 + if config.num_layers_in_last_pipeline_stage is None + else config.num_layers_in_last_pipeline_stage // vp_size + ) + + num_layers_per_vritual_model_chunk_in_middle_pipeline_stage = ( + middle_num_layers // vp_size + ) + + # First stage + middle stage + last stage + total_virtual_chunks = ( + num_layers_per_virtual_model_chunk_in_first_pipeline_stage + + num_layers_per_vritual_model_chunk_in_middle_pipeline_stage + + num_layers_per_virtual_model_chunk_in_last_pipeline_stage + ) + + # Calculate the layer offset with interleaved uneven pipeline parallelism + if pipeline_rank == 0: + offset = vp_rank * total_virtual_chunks + else: + offset = ( + vp_rank * total_virtual_chunks + + num_layers_per_virtual_model_chunk_in_first_pipeline_stage + + (pipeline_rank - 1) + * ( + num_layers_per_vritual_model_chunk_in_middle_pipeline_stage + // middle_pipeline_stages + ) + ) + else: + if middle_pipeline_stages > 0: + num_layers_per_pipeline_rank = middle_num_layers // middle_pipeline_stages + else: + num_layers_per_pipeline_rank = 0 + + middle_pipeline_rank = ( + pipeline_rank + if config.num_layers_in_first_pipeline_stage is None + else pipeline_rank - 1 + ) + + if pipeline_rank == 0: + offset = 0 + else: + offset = ( + middle_pipeline_rank * num_layers_per_pipeline_rank + ) + num_layers_in_first_pipeline_stage + else: + num_layers = config.num_layers + + # Increase the number of layers by one if we include the embedding (loss) + # layer into pipeline parallelism partition and placement + if config.account_for_embedding_in_pipeline_split: + num_layers += 1 + + if config.account_for_loss_in_pipeline_split: + num_layers += 1 + + num_layers_per_pipeline_rank = num_layers // config.pipeline_model_parallel_size + + if parallel_state.get_virtual_pipeline_model_parallel_world_size() is not None: + vp_rank = parallel_state.get_virtual_pipeline_model_parallel_rank() + vp_size = parallel_state.get_virtual_pipeline_model_parallel_world_size() + + num_layers_per_virtual_rank = num_layers_per_pipeline_rank // vp_size + total_virtual_chunks = num_layers // vp_size + offset = vp_rank * total_virtual_chunks + ( + pipeline_rank * num_layers_per_virtual_rank + ) + + # Reduce the offset of embedding layer from the total layer number + if ( + config.account_for_embedding_in_pipeline_split + and not parallel_state.is_pipeline_first_stage() + ): + offset -= 1 + else: + offset = pipeline_rank * num_layers_per_pipeline_rank + + # Reduce the offset of embedding layer from the total layer number + if ( + config.account_for_embedding_in_pipeline_split + and not parallel_state.is_pipeline_first_stage() + ): + offset -= 1 + else: + offset = 0 + return offset + + +@dataclass +class TransformerLayerSubmodules: + """ + Configuration class for specifying the submodules of a transformer layer. + + This class defines the structure and default implementations for various + components of a transformer layer, allowing for flexible customization + of the layer's architecture. + + Args: + input_layernorm (Union[ModuleSpec, type]): Specification for the input layer normalization. + self_attention (Union[ModuleSpec, type]): Specification for the self-attention mechanism. + self_attn_bda (Union[ModuleSpec, type]): Specification for the bias-dropout-add operation + after self-attention. + pre_cross_attn_layernorm (Union[ModuleSpec, type]): Specification for the layer + normalization before cross-attention. + cross_attention (Union[ModuleSpec, type]): Specification for the cross-attention mechanism. + cross_attn_bda (Union[ModuleSpec, type]): Specification for the bias-dropout-add operation + after cross-attention. + pre_mlp_layernorm (Union[ModuleSpec, type]): Specification for the layer normalization + before the MLP. + mlp (Union[ModuleSpec, type]): Specification for the MLP in Dense layer. + mlp_bda (Union[ModuleSpec, type]): Specification for the bias-dropout-add operation + after the MLP. + sharded_state_dict_keys_map (Dict[str, str]): Mapping for sharded tensor keys to be applied + in the `sharded_state_dict` method. + """ + + input_layernorm: Union[ModuleSpec, type] = IdentityOp + self_attention: Union[ModuleSpec, type] = IdentityOp + self_attn_bda: Union[ModuleSpec, type] = IdentityFuncOp + + pre_cross_attn_layernorm: Union[ModuleSpec, type] = IdentityOp + cross_attention: Union[ModuleSpec, type] = IdentityOp + cross_attn_bda: Union[ModuleSpec, type] = IdentityFuncOp + + pre_mlp_layernorm: Union[ModuleSpec, type] = IdentityOp + mlp: Union[ModuleSpec, type] = IdentityOp + mlp_bda: Union[ModuleSpec, type] = IdentityFuncOp + + # Mapping for sharded tensor keys to be applied in `sharded_state_dict` method + sharded_state_dict_keys_map: Dict[str, str] = field(default_factory=dict) + + +class BaseTransformerLayer(ABC): + """A common parent class for `TransformerLayer` like implementations. + + A dummy class that is subclassed by similar `TransformerLayer`s e.g. the + `TransformerLayer` in this file and possibly other `TransformerLayer` + implementations that aim to use `TransformerBlock` as the base module. + The main purpose is to check if any layer (or module) provided in the spec + is a subclass of this class to allow fanning-out of that spec for all the + layers in the `TransformerBlock`. See `_get_block_submodules` method + implementation in `transformer_block.py` file for more details. + """ + + def __init__(self): + pass + + +class TransformerLayer(MegatronModule, BaseTransformerLayer): + """A single transformer layer. + + Transformer layer takes input with size [s, b, h] and returns an + output of the same size. + """ + + def __init__( + self, + config: TransformerConfig, + submodules: TransformerLayerSubmodules, + layer_number: int = 1, + hidden_dropout: float = None, + ): + super().__init__(config=config) + + if config.enable_cuda_graph: + if not self.training: + # Cudagraphs for inference are only enabled with the flash decoding kernel + assert ( + self.config.flash_decode + ), "--flash-decode is required to use CUDA graphs during inference" + self.cudagraph_manager = CudaGraphManager(config) + + self.submodules_config = submodules + self.layer_number = layer_number + get_transformer_layer_offset(self.config) + self.hidden_dropout = config.hidden_dropout if hidden_dropout is None else hidden_dropout + + # [Module 1: Input Layernorm] Optional Layernorm on the input data + # TODO: add pytorch only layernorm + self.input_layernorm = build_module( + submodules.input_layernorm, + config=self.config, + hidden_size=self.config.hidden_size, + eps=self.config.layernorm_epsilon, + ) + + attention_optional_kwargs = {} + if config.cp_comm_type is not None: + if isinstance(config.cp_comm_type, list): + attention_optional_kwargs["cp_comm_type"] = config.cp_comm_type[self.layer_number] + else: + attention_optional_kwargs["cp_comm_type"] = config.cp_comm_type + + # [Module 2: SelfAttention] + self.self_attention = build_module( + submodules.self_attention, + config=self.config, + layer_number=layer_number, + **attention_optional_kwargs, + ) + + # [Module 3: BiasDropoutFusion] + self.self_attn_bda = build_module(submodules.self_attn_bda) + + # [Module 4: Post SelfAttention] Optional Layernorm after self-attn + self.pre_cross_attn_layernorm = build_module( + submodules.pre_cross_attn_layernorm, + config=self.config, + hidden_size=self.config.hidden_size, + eps=self.config.layernorm_epsilon, + ) + + # [Module 5: CrossAttention] + self.cross_attention = build_module( + submodules.cross_attention, + config=self.config, + layer_number=layer_number, + **attention_optional_kwargs, + ) + + # [Module 6: BiasDropoutFusion] + self.cross_attn_bda = build_module(submodules.cross_attn_bda, config=self.config) + + # [Module 7: Pre MLP] Optional Layernorm before MLP + self.pre_mlp_layernorm = build_module( + submodules.pre_mlp_layernorm, + config=self.config, + hidden_size=self.config.hidden_size, + eps=self.config.layernorm_epsilon, + ) + # [Module 8: MLP block] + self.mlp = build_module(submodules.mlp, config=self.config) + if hasattr(self.mlp, 'set_layer_number'): + self.mlp.set_layer_number(self.layer_number) + + # [Module 9: BiasDropoutFusion] + self.mlp_bda = build_module(submodules.mlp_bda) + + # @jcasper how should we handle nvfuser? + # Set bias+dropout+add fusion grad_enable execution handler. + # TORCH_MAJOR = int(torch.__version__.split('.')[0]) + # TORCH_MINOR = int(torch.__version__.split('.')[1]) + # use_nvfuser = TORCH_MAJOR > 1 or (TORCH_MAJOR == 1 and TORCH_MINOR >= 10) + # self.bias_dropout_add_exec_handler = nullcontext if use_nvfuser else torch.enable_grad + self.bias_dropout_add_exec_handler = torch.enable_grad + + @staticmethod + def _get_layer_offset(config: TransformerConfig): + """ + Get the layer offset for the current pipeline stage. + + Deprecated: please use `get_transformer_layer_offset` instead. + """ + + warnings.warn( + "TransformerLayer._get_layer_offset is deprecated." + "Please use get_transformer_layer_offset instead." + ) + return get_transformer_layer_offset(config) + + def forward( + self, + hidden_states, + attention_mask=None, + context=None, + context_mask=None, + rotary_pos_emb=None, + rotary_pos_cos=None, + rotary_pos_sin=None, + attention_bias=None, + inference_params=None, + packed_seq_params=None, + sequence_len_offset=None, + ): + """ + Perform a forward pass through the transformer layer. + + This method implements the core computation of a transformer layer, including + self-attention, cross-attention (if applicable), and feed-forward operations. + + Args: + hidden_states (Tensor): Input tensor of shape [s, b, h] where s is sequence length, + b is batch size, and h is hidden size. + attention_mask (Tensor): Mask tensor for self-attention. + context (Tensor, optional): Context tensor for cross-attention. + context_mask (Tensor, optional): Mask tensor for cross-attention. + rotary_pos_emb (Tensor, optional): Rotary positional embeddings. + attention_bias (Tensor, optional): Bias tensor for Q * K.T. + inference_params (object, optional): Parameters for inference-time optimizations. + packed_seq_params (object, optional): Parameters for packed sequence processing. + + Returns: + Tuple[Tensor, Tensor]: A tuple containing: + output (Tensor): Transformed hidden states of shape [s, b, h]. + context (Tensor): Updated context tensor if cross-attention is used, + otherwise None. + """ + + # Residual connection. + residual = hidden_states + + # Optional Input Layer norm + input_layernorm_output = self.input_layernorm(hidden_states) + + # Self attention. + attention_output_with_bias = self.self_attention( + input_layernorm_output, + attention_mask=attention_mask, + inference_params=inference_params, + rotary_pos_emb=rotary_pos_emb, + rotary_pos_cos=rotary_pos_cos, + rotary_pos_sin=rotary_pos_sin, + attention_bias=attention_bias, + packed_seq_params=packed_seq_params, + sequence_len_offset=sequence_len_offset, + ) + + # TODO: could we move `bias_dropout_add_exec_handler` itself + # inside the module provided in the `bias_dropout_add_spec` module? + with self.bias_dropout_add_exec_handler(): + hidden_states = self.self_attn_bda(self.training, self.config.bias_dropout_fusion)( + attention_output_with_bias, residual, self.hidden_dropout + ) + + # Residual connection. + residual = hidden_states + + # Optional Layer norm after self-attention + pre_cross_attn_layernorm_output = self.pre_cross_attn_layernorm(hidden_states) + + # Cross attention. + attention_output_with_bias = self.cross_attention( + pre_cross_attn_layernorm_output, + attention_mask=context_mask, + key_value_states=context, + inference_params=inference_params, + ) + + if isinstance(attention_output_with_bias, dict) and "context" in attention_output_with_bias: + context = attention_output_with_bias["context"] + + # TODO: could we move `bias_dropout_add_exec_handler` itself + # inside the module provided in the `bias_dropout_add_spec` module? + with self.bias_dropout_add_exec_handler(): + hidden_states = self.cross_attn_bda(self.training, self.config.bias_dropout_fusion)( + attention_output_with_bias, residual, self.hidden_dropout + ) + + # Residual connection. + residual = hidden_states + + # Optional Layer norm post the cross-attention. + pre_mlp_layernorm_output = self.pre_mlp_layernorm(hidden_states) + + # MLP. + mlp_output_with_bias = self.mlp(pre_mlp_layernorm_output) + + # TODO: could we move `bias_dropout_add_exec_handler` itself + # inside the module provided in the `bias_dropout_add_spec` module? + with self.bias_dropout_add_exec_handler(): + hidden_states = self.mlp_bda(self.training, self.config.bias_dropout_fusion)( + mlp_output_with_bias, residual, self.hidden_dropout + ) + + # Jit compiled function creates 'view' tensor. This tensor + # potentially gets saved in the MPU checkpoint function context, + # which rejects view tensors. While making a viewless tensor here + # won't result in memory savings (like the data loader, or + # p2p_communication), it serves to document the origin of this + # 'view' tensor. + output = make_viewless_tensor( + inp=hidden_states, requires_grad=hidden_states.requires_grad, keep_graph=True + ) + + # CUDA graph requires returned values to be Tensors + if self.config.external_cuda_graph and self.training: + return output + return output, context + + def sharded_state_dict( + self, prefix: str = '', sharded_offsets: tuple = (), metadata: Optional[dict] = None + ) -> ShardedStateDict: + """ + Generate a sharded state dictionary for the transformer layer. + + Args: + prefix (str, optional): Prefix to be added to all keys in the state dict. + sharded_offsets (tuple, optional): Tuple of sharding offsets. + metadata (Optional[dict], optional): Additional metadata for sharding. + + Returns: + ShardedStateDict: A dictionary containing the sharded state of the transformer layer. + """ + sharded_state_dict = super().sharded_state_dict(prefix, sharded_offsets, metadata) + prefixed_map = { + f'{prefix}{k}': f'{prefix}{v}' + for k, v in self.submodules_config.sharded_state_dict_keys_map.items() + } + if prefixed_map: + apply_prefix_mapping(sharded_state_dict, prefixed_map) + return sharded_state_dict + + def __call__(self, *args, **kwargs): + # Training and validation mode CUDA graphs + if hasattr(self, 'cudagraph_manager') and kwargs.get('inference_params') is None: + return self.cudagraph_manager(self, args, kwargs) + # Inference mode. CUDA graphs are used in the decode phase only, when attn mask is None + elif ( + not self.training + and hasattr(self, 'cudagraph_manager') + and kwargs.get('inference_params') is not None + and kwargs['inference_params'].decode_mode + ): + assert ( + kwargs.get('attention_mask') is None + ), f"Attention mask must not be set when using CUDA graphs for decode" + return self.cudagraph_manager(self, args, kwargs) + return super(MegatronModule, self).__call__(*args, **kwargs) diff --git a/megatron/core/transformer/utils.py b/megatron/core/transformer/utils.py index 4781b68..4c0b65c 100644 --- a/megatron/core/transformer/utils.py +++ b/megatron/core/transformer/utils.py @@ -1,188 +1,195 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -"""Utilities for transformer layers.""" -from functools import lru_cache -from operator import itemgetter -from typing import Any, Dict, Iterable, Iterator, Optional, Tuple, Union - -import torch - -from megatron.core import parallel_state -from megatron.core.dist_checkpointing.mapping import ShardedObject, ShardedStateDict, StateDict -from megatron.core.jit import jit_fuser -from megatron.core.utils import ( - make_sharded_tensor_for_checkpoint, - make_tp_sharded_tensor_for_checkpoint, -) - - -def get_linear_layer(rows, columns, init_method, perform_initialization=True): - """Simple linear layer with weight initialization.""" - layer = torch.nn.Linear(rows, columns) - if perform_initialization: # Take from modelparallel config - init_method(layer.weight) - with torch.no_grad(): - layer.bias.zero_() - return layer - - -@lru_cache(maxsize=32) -def get_default_causal_mask(sq: int) -> torch.Tensor: - """Return the causal upper triangular mask for softmax input.""" - return torch.triu(torch.ones(sq, sq, device="cuda"), diagonal=1).bool() - - -def attention_mask_func(attention_scores, attention_mask): - attention_scores.masked_fill_(attention_mask, -10000.0) - return attention_scores - - -@jit_fuser -def gelu_impl(x): - """OpenAI's gelu implementation.""" - return 0.5 * x * (1.0 + torch.tanh(0.7978845608028654 * x * (1.0 + 0.044715 * x * x))) - - -def openai_gelu(x): - return gelu_impl(x) - - -# This is actually Python equivalent of torch.nn.functional.gelu(), also with type hints for ONNX exporter -@jit_fuser -def erf_gelu(x): - return ( - x * 0.5 * (torch.erf(x / 1.41421).to(dtype=x.dtype) + torch.ones_like(x).to(dtype=x.dtype)) - ) - - -def make_sharded_tensors_for_checkpoint( - state_dict: StateDict, - prefix: str, - tensor_parallel_layers_axis_map: Optional[Dict[str, int]] = None, - sharded_offsets: Iterable[Tuple[int, int, int]] = (), - extra_state_suffix: str = '_extra_state', -): - """Wraps tensors from transformer layers with ShardedTensor or ShardedObject. - - For a given `state_dict`, wraps: - - all _extra_states with ShardedObject - - all tensors specified in tensor_parallel_layers_axis_map with TP and DP sharded ShardedTensor - - other values with DP sharded ShardedTensor - - Args: - state_dict (StateDict): state_dict to convert - prefix (str): prefix appended to keys in final state dict - tensor_parallel_layers_axis_map (Dict[str, int], optional): dict mapping layer - names to the axis for TP sharding - sharded_offsets (Iterable[Tuple[int, int, int]], optional): sharding already - applied (e.g. PP related), passed along to ShardedTensor - extra_state_suffix (str, default = '_extra_state'): layers with this - suffix will be wrapped with ShardedObject instead of ShardedTensor. - - """ - - if tensor_parallel_layers_axis_map is None: - tensor_parallel_layers_axis_map = {} - - sharded_state_dict = {} - for layer_name in state_dict.keys(): - tensor = state_dict[layer_name] - layer_key = f'{prefix}{layer_name}' - - if layer_name.endswith(extra_state_suffix): - sharded_state_dict[layer_key] = make_sharded_object_for_checkpoint( - tensor, layer_key, sharded_offsets - ) - - elif layer_name in tensor_parallel_layers_axis_map: - tp_axis = tensor_parallel_layers_axis_map[layer_name] - sharded_state_dict[layer_key] = make_tp_sharded_tensor_for_checkpoint( - tensor, layer_key, tp_axis, prepend_offsets=sharded_offsets - ) - - else: - sharded_state_dict[layer_key] = make_sharded_tensor_for_checkpoint( - tensor, layer_key, prepend_offsets=sharded_offsets - ) - - return sharded_state_dict - - -def make_sharded_object_for_checkpoint( - obj: Any, - key: str, - sharded_offsets: Iterable[Tuple[int, int, int]] = (), - replica_id: Union[None, int, Tuple[int, ...]] = None, - **kwargs, -): - """Helper for instantiating a non-sharded ShardedObject (replicated across TP and DP group). - - Args: - obj (object): any object to be sharded - key (str): unique identifier of the object - sharded_offsets (Iterable[Tuple[int, int, int]]): offsets normally - prepended to ShardedTensors, will be used as global offsets for - ShardedObject - replica_id (Union[None, int, Tuple[int, ...]]): replica id - """ - if replica_id is None: - replica_id = ( - 0, - parallel_state.get_tensor_model_parallel_rank(), - parallel_state.get_data_parallel_rank(with_context_parallel=True), - ) - - return ShardedObject(key, obj, *_get_extra_state_offsets(sharded_offsets), replica_id, **kwargs) - - -def _get_extra_state_offsets( - sharded_offsets: Iterable[Tuple[int, int, int]] -) -> Tuple[Tuple[int, ...], Tuple[int, ...]]: - """Turns ShardedTensor offsets into offsets suitable for ShardedObject.""" - if sharded_offsets: - sharded_offsets = sorted(sharded_offsets, key=itemgetter(0)) # sort by axis - axis, extra_state_offset, extra_state_shape = zip(*sharded_offsets) - assert list(axis) == list( - range(len(axis)) - ), f'Expected contiguous axis for offsets: {sharded_offsets}' - else: - extra_state_shape = (1,) - extra_state_offset = (0,) - return extra_state_shape, extra_state_offset - - -def sharded_state_dict_default( - module: torch.nn.Module, - prefix: str = '', - sharded_offsets: Tuple[Tuple[int, int, int]] = (), - metadata: Optional[dict] = None, -) -> ShardedStateDict: - """Provides implementation for sharded_state_dict method for non-MegatronModules. - - Tries to call `module.sharded_state_dict` when possible, - otherwise uses regular state dict and assumes tensors are replicated across TP and DP. - - `keep_vars=True` is passed to module.state_dict so that optimizer states - can be sharded later on. - - Args: - module (torch.nn.Module): module which sharded state dict we want to obtain - prefix (str): prefix for the state dict keys - sharded_offsets (Tuple[Tuple[int, int, int]], optional): sharding already - applied (e.g. PP related) by sup-modules. Passed along to ShardedTensor - metadata (dict, optional): metadata passed to module sharded_state_dict method - - Returns: - dict: dictionary of state dict keys mapped to ShardedTensors - """ - - if hasattr(module, 'sharded_state_dict'): - module_sharded_sd = module.sharded_state_dict( - prefix=prefix, sharded_offsets=sharded_offsets, metadata=metadata - ) - else: - module_sd = module.state_dict(prefix='', keep_vars=True) - module_sharded_sd = make_sharded_tensors_for_checkpoint( - module_sd, prefix, {}, sharded_offsets - ) - return module_sharded_sd +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Utilities for transformer layers.""" +from functools import lru_cache +from operator import itemgetter +from typing import Any, Dict, Iterable, Optional, Tuple, Union + +import torch + +from megatron.core import parallel_state +from megatron.core.dist_checkpointing.mapping import ShardedObject, ShardedStateDict, StateDict +from megatron.core.jit import jit_fuser +from megatron.core.utils import ( + make_sharded_tensor_for_checkpoint, + make_tp_sharded_tensor_for_checkpoint, +) + + +def get_linear_layer(rows, columns, init_method, perform_initialization=True): + """Simple linear layer with weight initialization.""" + layer = torch.nn.Linear(rows, columns) + if perform_initialization: # Take from modelparallel config + init_method(layer.weight) + with torch.no_grad(): + layer.bias.zero_() + return layer + + +@lru_cache(maxsize=32) +def get_default_causal_mask(sq: int) -> torch.Tensor: + """Return the causal upper triangular mask for softmax input.""" + return torch.triu(torch.ones(sq, sq, device="cuda"), diagonal=1).bool() + + +# pylint: disable=missing-function-docstring +def attention_mask_func(attention_scores, attention_mask): + attention_scores.masked_fill_(attention_mask, -10000.0) + return attention_scores + + +@jit_fuser +def gelu_impl(x): + """OpenAI's gelu implementation.""" + return 0.5 * x * (1.0 + torch.tanh(0.7978845608028654 * x * (1.0 + 0.044715 * x * x))) + + +# pylint: disable=missing-function-docstring +def openai_gelu(x): + return gelu_impl(x) + + +# This is actually Python equivalent of torch.nn.functional.gelu(), also with +# type hints for ONNX exporter +# pylint: disable=missing-function-docstring +@jit_fuser +def erf_gelu(x): + return ( + x * 0.5 * (torch.erf(x / 1.41421).to(dtype=x.dtype) + torch.ones_like(x).to(dtype=x.dtype)) + ) + + +def make_sharded_tensors_for_checkpoint( + state_dict: StateDict, + prefix: str, + tensor_parallel_layers_axis_map: Optional[Dict[str, int]] = None, + sharded_offsets: Iterable[Tuple[int, int, int]] = (), + extra_state_suffix: str = '_extra_state', +): + """Wraps tensors from transformer layers with ShardedTensor or ShardedObject. + + For a given `state_dict`, wraps: + - all _extra_states with ShardedObject + - all tensors specified in tensor_parallel_layers_axis_map with TP and DP sharded ShardedTensor + - other values with DP sharded ShardedTensor + + Args: + state_dict (StateDict): state_dict to convert + prefix (str): prefix appended to keys in final state dict + tensor_parallel_layers_axis_map (Dict[str, int], optional): dict mapping layer + names to the axis for TP sharding + sharded_offsets (Iterable[Tuple[int, int, int]], optional): sharding already + applied (e.g. PP related), passed along to ShardedTensor + extra_state_suffix (str, default = '_extra_state'): layers with this + suffix will be wrapped with ShardedObject instead of ShardedTensor. + + """ + + if tensor_parallel_layers_axis_map is None: + tensor_parallel_layers_axis_map = {} + + sharded_state_dict = {} + for layer_name in state_dict.keys(): + tensor = state_dict[layer_name] + layer_key = f'{prefix}{layer_name}' + + if layer_name.endswith(extra_state_suffix): + sharded_state_dict[layer_key] = make_sharded_object_for_checkpoint( + tensor, layer_key, sharded_offsets + ) + + elif layer_name in tensor_parallel_layers_axis_map: + tp_axis = tensor_parallel_layers_axis_map[layer_name] + sharded_state_dict[layer_key] = make_tp_sharded_tensor_for_checkpoint( + tensor, layer_key, tp_axis, prepend_offsets=sharded_offsets + ) + + else: + sharded_state_dict[layer_key] = make_sharded_tensor_for_checkpoint( + tensor, layer_key, prepend_offsets=sharded_offsets + ) + + return sharded_state_dict + + +def make_sharded_object_for_checkpoint( + obj: Any, + key: str, + sharded_offsets: Iterable[Tuple[int, int, int]] = (), + replica_id: Union[None, int, Tuple[int, ...]] = None, + **kwargs, +): + """Helper for instantiating a non-sharded ShardedObject (replicated across TP and DP group). + + Args: + obj (object): any object to be sharded + key (str): unique identifier of the object + sharded_offsets (Iterable[Tuple[int, int, int]]): offsets normally + prepended to ShardedTensors, will be used as global offsets for + ShardedObject + replica_id (Union[None, int, Tuple[int, ...]]): replica id + """ + is_obj_fully_sharded = hasattr(obj, 'fully_shard_param_local_index') + assert not is_obj_fully_sharded, f"Fully sharded object not supported: {key}" + + if replica_id is None: + replica_id = ( + 0, + parallel_state.get_tensor_model_parallel_rank(), + parallel_state.get_data_parallel_rank(with_context_parallel=True), + ) + + return ShardedObject(key, obj, *_get_extra_state_offsets(sharded_offsets), replica_id, **kwargs) + + +def _get_extra_state_offsets( + sharded_offsets: Iterable[Tuple[int, int, int]] +) -> Tuple[Tuple[int, ...], Tuple[int, ...]]: + """Turns ShardedTensor offsets into offsets suitable for ShardedObject.""" + if sharded_offsets: + sharded_offsets = sorted(sharded_offsets, key=itemgetter(0)) # sort by axis + axis, extra_state_offset, extra_state_shape = zip(*sharded_offsets) + assert list(axis) == list( + range(len(axis)) + ), f'Expected contiguous axis for offsets: {sharded_offsets}' + else: + extra_state_shape = (1,) + extra_state_offset = (0,) + return extra_state_shape, extra_state_offset + + +def sharded_state_dict_default( + module: torch.nn.Module, + prefix: str = '', + sharded_offsets: Tuple[Tuple[int, int, int]] = (), + metadata: Optional[dict] = None, +) -> ShardedStateDict: + """Provides implementation for sharded_state_dict method for non-MegatronModules. + + Tries to call `module.sharded_state_dict` when possible, + otherwise uses regular state dict and assumes tensors are replicated across TP and DP. + + `keep_vars=True` is passed to module.state_dict so that optimizer states + can be sharded later on. + + Args: + module (torch.nn.Module): module which sharded state dict we want to obtain + prefix (str): prefix for the state dict keys + sharded_offsets (Tuple[Tuple[int, int, int]], optional): sharding already + applied (e.g. PP related) by sup-modules. Passed along to ShardedTensor + metadata (dict, optional): metadata passed to module sharded_state_dict method + + Returns: + dict: dictionary of state dict keys mapped to ShardedTensors + """ + + if hasattr(module, 'sharded_state_dict'): + module_sharded_sd = module.sharded_state_dict( + prefix=prefix, sharded_offsets=sharded_offsets, metadata=metadata + ) + else: + module_sd = module.state_dict(prefix='', keep_vars=True) + module_sharded_sd = make_sharded_tensors_for_checkpoint( + module_sd, prefix, {}, sharded_offsets + ) + return module_sharded_sd diff --git a/megatron/core/utils.py b/megatron/core/utils.py index 3bb2804..0ad314a 100644 --- a/megatron/core/utils.py +++ b/megatron/core/utils.py @@ -1,1453 +1,1513 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -"""Utility functions used throughout Megatron core""" -import array -import hashlib -import logging -import math -import operator -import queue -import socket -import sys -import threading -import time -import traceback -from dataclasses import dataclass -from datetime import datetime -from functools import reduce -from importlib.metadata import version -from types import TracebackType -from typing import Any, Dict, List, Optional, Tuple, Type, Union - -import torch -from packaging.version import Version as PkgVersion - -try: - from torch.distributed._tensor import DTensor - - HAVE_DTENSOR = True -except ImportError: - HAVE_DTENSOR = False - -from megatron.core import parallel_state -from megatron.core.dist_checkpointing.mapping import ShardedTensor - -logger = logging.getLogger(__name__) - - -try: - _torch_version = PkgVersion(torch.__version__) -except: - # This is a WAR for building docs, where torch is not actually imported - _torch_version = PkgVersion("0.0.0") -_te_version = None - - -def get_torch_version(): - """Get pytorch version from __version__; if not available use pip's. Use caching.""" - - def get_torch_version_str(): - import torch - - if hasattr(torch, '__version__'): - return str(torch.__version__) - else: - return version("torch") - - global _torch_version - if _torch_version is None: - _torch_version = PkgVersion(get_torch_version_str()) - return _torch_version - - -def get_te_version(): - """Get TE version from __version__; if not available use pip's. Use caching.""" - - def get_te_version_str(): - import transformer_engine as te - - if hasattr(te, '__version__'): - return str(te.__version__) - else: - return version("transformer-engine") - - global _te_version - if _te_version is None: - _te_version = PkgVersion(get_te_version_str()) - return _te_version - - -def is_te_min_version(version, check_equality=True): - """Check if minimum version of `transformer-engine` is installed.""" - if check_equality: - return get_te_version() >= PkgVersion(version) - return get_te_version() > PkgVersion(version) - - -def get_torch_version(): - """Get torch version from __version__.""" - - global _torch_version - return _torch_version - - -def is_torch_min_version(version, check_equality=True): - """Check if minimum version of `torch` is installed.""" - if check_equality: - return get_torch_version() >= PkgVersion(version) - return get_torch_version() > PkgVersion(version) - - -def ensure_divisibility(numerator, denominator): - """Ensure that numerator is divisible by the denominator.""" - assert numerator % denominator == 0, "{} is not divisible by {}".format(numerator, denominator) - - -def divide(numerator, denominator): - """Ensure that numerator is divisible by the denominator and return - the division value.""" - ensure_divisibility(numerator, denominator) - return numerator // denominator - - -def get_attr_wrapped_model(model, attr, allow_none=True, return_model_obj=False): - """Get an attribute from a wrapped model. - If return_model_obj is true, return the object that has the 'attr' attribute; - otherwise, return the attribute directly.""" - if isinstance(model, list): - raise RuntimeError("_get_attr_wrapped_model given a list of models") - - if allow_none: - - def condition(model, attr): - return not hasattr(model, attr) - - else: - - def condition(model, attr): - return getattr(model, attr, None) is None - - while condition(model, attr): - if not hasattr(model, "module"): - raise RuntimeError(f"_get_attr_wrapped_model couldn't find attribute {attr}") - - model = model.module - - if return_model_obj: - return model - return getattr(model, attr) - - -def get_model_type(model): - """Returns model_type attribute""" - return get_attr_wrapped_model(model, 'model_type') - - -def get_model_xattn(model): - """Returns whether the model has the xattn_needed attribute""" - try: - return get_attr_wrapped_model(model, 'xattn_needed') - except RuntimeError: - return False - - -def get_model_config(model): - """Returns the config attribute, allowed to return None""" - return get_attr_wrapped_model(model, 'config', allow_none=False) - - -class GlobalMemoryBuffer: - """Global buffer to avoid dynamic memory allocations. - Caller should ensure that buffers of the same name - are not used concurrently.""" - - def __init__(self): - self.buffer = {} - - def get_tensor(self, tensor_shape, dtype, name): - """ - Returns (potentially) a sub-tensor from the self.buffer for the given shape. - """ - required_len = reduce(operator.mul, tensor_shape, 1) - if ( - self.buffer.get((name, dtype), None) is None - or self.buffer[(name, dtype)].numel() < required_len - ): - self.buffer[(name, dtype)] = torch.empty( - required_len, dtype=dtype, device=torch.cuda.current_device(), requires_grad=False - ) - - return self.buffer[(name, dtype)][0:required_len].view(*tensor_shape) - - -def _kernel_make_viewless_tensor(inp, requires_grad): - """Make a viewless tensor. - - View tensors have the undesirable side-affect of retaining a reference - to the originally-viewed tensor, even after manually setting the '.data' - field. This method creates a new tensor that links to the old tensor's - data, without linking the viewed tensor, referenced via the '._base' - field. - """ - out = torch.empty((1,), dtype=inp.dtype, device=inp.device, requires_grad=requires_grad) - out.data = inp.data - return out - - -class MakeViewlessTensor(torch.autograd.Function): - """ - Autograd function to make a viewless tensor. - - This function should be used in cases where the computation graph needs - to be propagated, but we only want a viewless tensor (e.g., - ParallelTransformer's hidden_states). Call this function by passing - 'keep_graph = True' to 'make_viewless_tensor()'. - """ - - @staticmethod - def forward(ctx, inp, requires_grad): - """Runs the fwd pass of _kernel_make_viewless_tensor""" - return _kernel_make_viewless_tensor(inp, requires_grad) - - @staticmethod - def backward(ctx, grad_output): - """No-op""" - return grad_output, None - - -def make_viewless_tensor(inp, requires_grad, keep_graph): - """ - Entry-point for creating viewless tensors. - - This method should be used, rather than calling 'MakeViewlessTensor' - or '_kernel_make_viewless_tensor' directly. This method acts as a - switch for determining if an autograd function or a regular method - should be used to create the tensor. - """ - - # return tensor as-is, if not a 'view' - if inp._base is None: - return inp - - # create viewless tensor - if keep_graph: - return MakeViewlessTensor.apply(inp, requires_grad) - else: - return _kernel_make_viewless_tensor(inp, requires_grad) - - -def assert_viewless_tensor(tensor, extra_msg=None): - """Assert that a tensor is not a view (i.e., its '._base' field is - not set).""" - if isinstance(tensor, list): - [assert_viewless_tensor(t) for t in tensor] - return tensor - if not isinstance(tensor, torch.Tensor): - return tensor - assert tensor._base is None, ( - "Ensure tensor._base is None before setting tensor.data or storing " - "tensor to memory buffer. Otherwise, a memory leak will occur (and " - f"likely accumulate over iterations). {extra_msg}" - ) - return tensor - - -def safely_set_viewless_tensor_data(tensor, new_data_tensor): - """Safely set tensor's '.data' field. - - Check first that the tensor is viewless (i.e., '._base' not set). If not, - raise an exception. - """ - assert_viewless_tensor( - tensor, - extra_msg="FYI, tensor._base has shape %s, and new_data_tensor has shape %s." - % ("--" if tensor._base is None else tensor._base.shape, new_data_tensor.shape), - ) - tensor.data = new_data_tensor - - -def init_method_normal(sigma): - """Init method based on N(0, sigma).""" - - def init_(tensor): - return torch.nn.init.normal_(tensor, mean=0.0, std=sigma) - - return init_ - - -def scaled_init_method_normal(sigma, num_layers): - """Init method based on N(0, sigma/sqrt(2*num_layers).""" - std = sigma / math.sqrt(2.0 * num_layers) - - def init_(tensor): - return torch.nn.init.normal_(tensor, mean=0.0, std=std) - - return init_ - - -def log_single_rank(logger: logging.Logger, *args: Any, rank: int = 0, **kwargs: Any): - """If torch distributed is initialized, log only on rank - - Args: - logger (logging.Logger): The logger to write the logs - - args (Tuple[Any]): All logging.Logger.log positional arguments - - rank (int, optional): The rank to write on. Defaults to 0. - - kwargs (Dict[str, Any]): All logging.Logger.log keyword arguments - """ - if torch.distributed.is_initialized(): - if torch.distributed.get_rank() == rank: - logger.log(*args, **kwargs) - else: - logger.log(*args, **kwargs) - - -def log_on_each_pipeline_stage(logger: logging.Logger, *args: Any, **kwargs: Any): - """Log on first rank in each pipeline stage - - Args: - logger (logging.Logger): The logger to write the logs - - args (Tuple[Any]): All logging.Logger.log positional arguments - - kwargs (Dict[str, Any]): All logging.Logger.log keyword arguments - """ - assert torch.distributed.is_initialized() - - if ( - parallel_state.get_data_parallel_rank(with_context_parallel=True) == 0 - and parallel_state.get_tensor_model_parallel_rank() == 0 - ): - logger.log(*args, **kwargs) - - -def check_param_hashes_across_dp_replicas( - model: List[torch.nn.Module], cross_check: bool = False -) -> bool: - """Computes hashes of all parameters in model, all-gathers hashes across DP replicas, - and then checks for equality between the locally-computed hashes and those of other ranks. - - NOTE: This function computes SHA-1 hashes on the CPU and thus needs to move all param - tensors from GPU to CPU first; as a result, this function is not intended to be called - very frequently in the main training loop. - - Args: - model (List[torch.nn.Module]): List of model chunks whose parameter hashes need to - be checked. - cross_check (bool): If true, will check whether hashes match across all DP replicas. - - Returns: - True if all param hashes match with corresponding hash on DP replica 0 or - across all replicas if cross_check is enabled, False otherwise. - """ - - # Compute per-parameter hashes on this rank. - # Keep track of expert and non-expert parameters separately since they need to be - # all-gathered across different sets of ranks. - non_expert_params, expert_params = [], [] - local_non_expert_param_hashes, local_expert_param_hashes = [], [] - for model_chunk_id, model_chunk in enumerate(model): - for param_name, param in model_chunk.named_parameters(): - param_hash = torch.frombuffer( - array.array( - 'B', hashlib.sha1(param.data.to("cpu").float().numpy(force=True)).digest() - ), - dtype=torch.uint8, - ) - if getattr(param, 'allreduce', True): - non_expert_params.append((model_chunk_id, param_name, param)) - local_non_expert_param_hashes.append(param_hash) - else: - expert_params.append((model_chunk_id, param_name, param)) - local_expert_param_hashes.append(param_hash) - - # Use data-modulo-expert parallel group to all-gather expert param hashes, regular - # data-parallel group for non-expert param hashes. - all_param_hashes_match = True - for params, local_param_hashes, all_gather_group in zip( - [non_expert_params, expert_params], - [local_non_expert_param_hashes, local_expert_param_hashes], - [ - parallel_state.get_data_parallel_group_gloo(), - parallel_state.get_expert_data_parallel_group_gloo(), - ], - ): - # Collect per-parameter hashes across all ranks in group. - assert len(params) == len(local_param_hashes) - if len(params) == 0: - continue - local_param_hashes = torch.stack(local_param_hashes) - all_param_hashes = [ - torch.zeros_like(local_param_hashes) - for _ in range(torch.distributed.get_world_size(all_gather_group)) - ] - torch.distributed.all_gather(all_param_hashes, local_param_hashes, group=all_gather_group) - - # Make sure local per-parameter hash matches DP rank 0. - param_hashes_match = torch.equal(local_param_hashes, all_param_hashes[0]) - if not param_hashes_match: - for i, (model_chunk_id, param_name, param) in enumerate(params): - if not torch.equal(local_param_hashes[i], all_param_hashes[0][i]): - rank = torch.distributed.get_rank() - logger.info( - f"[Rank {rank}] Hash not matching for {param_name} in model chunk" - f"{model_chunk_id}" - ) - if cross_check: - # Make sure all ranks have the same hash. - all_param_hashes_match &= all( - map(lambda x: torch.equal(local_param_hashes, x), all_param_hashes) - ) - else: - all_param_hashes_match &= param_hashes_match - - return all_param_hashes_match - - -def make_tp_sharded_tensor_for_checkpoint( - tensor, key, tp_axis=0, replica_id=None, prepend_offsets=(), **kwargs -): - """Helper for instantiating a ShardedTensor where the `tp_axis` dimension - is sharded across TP group. - - Optionally, can provide offsets which prepend new dimensions to the tensor. - """ - prepend_axis_num = len(prepend_offsets) - - new_offsets = [] - tp_rank = parallel_state.get_tensor_model_parallel_rank() - dp_rank = parallel_state.get_data_parallel_rank(with_context_parallel=True) - tp_size = parallel_state.get_tensor_model_parallel_world_size() - dp_size = parallel_state.get_data_parallel_world_size(with_context_parallel=True) - dp_replica_id = parallel_state.get_data_parallel_rank(with_context_parallel=True) - - new_offsets.append((tp_axis + prepend_axis_num, tp_rank, tp_size)) - - if HAVE_DTENSOR and isinstance(tensor, DTensor): - # TP + FSDP2 sharding - dp_replica_id = 0 - tensor = tensor._local_tensor - - if tp_axis == 0: - # both FSDP2 and TP shards axis 0 - # default MCore uses tp-cp-ep-dp-pp - # FSDP2 is compatibile with TP, CP - new_offsets[0] = (prepend_axis_num, tp_rank * dp_size + dp_rank, tp_size * dp_size) - else: - # FSDP2 shards axis 0 and TP shards some other axis - new_offsets.append((prepend_axis_num, dp_rank, dp_size)) - - if replica_id is None: - replica_id = (0, 0, dp_replica_id) - - return ShardedTensor.from_rank_offsets( - key, - tensor, - *prepend_offsets, - *new_offsets, - replica_id=replica_id, - prepend_axis_num=prepend_axis_num, - **kwargs, - ) - - -def make_sharded_tensor_for_checkpoint(tensor, key, prepend_offsets=(), replica_id=None, **kwargs): - """Helper for instantiating a non-sharded ShardedTensor (replicated across TP and DP group). - - Optionally, can provide offsets which prepend new dimensions to the tensor. - """ - - prepend_axis_num = len(prepend_offsets) - - new_offsets = [] - dp_rank = parallel_state.get_data_parallel_rank(with_context_parallel=True) - dp_size = parallel_state.get_data_parallel_world_size(with_context_parallel=True) - dp_replica_id = parallel_state.get_data_parallel_rank(with_context_parallel=True) - - if HAVE_DTENSOR and isinstance(tensor, DTensor): - # FSDP2 sharding - dp_replica_id = 0 - tensor = tensor._local_tensor - new_offsets.append((prepend_axis_num, dp_rank, dp_size)) - - if replica_id is None: - replica_id = (0, parallel_state.get_tensor_model_parallel_rank(), dp_replica_id) - - return ShardedTensor.from_rank_offsets( - key, - tensor, - *prepend_offsets, - *new_offsets, - replica_id=replica_id, - prepend_axis_num=prepend_axis_num, - **kwargs, - ) - - -def to_local_if_dtensor(tensor: Union[torch.Tensor, "DTensor"]) -> torch.Tensor: - """Returns the local shard of the given tensor if it is a DTensor.""" - with torch.no_grad(): - return tensor.to_local() if HAVE_DTENSOR and isinstance(tensor, DTensor) else tensor - - -def get_data_parallel_group_if_dtensor( - tensor: Union[torch.Tensor, "DTensor"], data_parallel_group: "ProcessGroup" = None -) -> Optional["ProcessGroup"]: - """Gets the data parallel group of the given tensor if it is a DTensor.""" - if HAVE_DTENSOR and isinstance(tensor, DTensor): - current_group = tensor.device_mesh.get_group() - assert data_parallel_group is None or current_group == data_parallel_group - return current_group - return None - - -def prepare_input_tensors_for_wgrad_compute(grad_output, all_gathered_input): - """Ensure grad_output is stored in a contiguous buffer.""" - # Doing gather + slicing during the NeMo forward pass can make this tensor - # not be contiguous. PyTorch only checks if the tensor is contiguous, and only - # clones it if it's not contiguous: - # https://github.com/pytorch/pytorch/blob/c47cf9bc7f9e02f649ab4ed53fe4d35732c92ab6/torch/_refs/__init__.py#L2761 - grad_output = grad_output.contiguous() - # Convert the tensor shapes to 2D for execution compatibility - if grad_output.dim() == 3: - grad_output = grad_output.view( - grad_output.shape[0] * grad_output.shape[1], grad_output.shape[2] - ) - all_gathered_input = all_gathered_input.view( - all_gathered_input.shape[0] * all_gathered_input.shape[1], all_gathered_input.shape[2] - ) - - return grad_output, all_gathered_input - - -if is_torch_min_version("1.13.0"): - dist_all_gather_func = torch.distributed.all_gather_into_tensor -else: - dist_all_gather_func = torch.distributed._all_gather_base - - -def drain_embedding_wgrad_compute(config, embedding_activation_buffer, grad_output_buffer, weight): - """Helper for performing embedding wgrad GEMM's during the pipeline drain phase, pipelines the - AllGather and GEMM's. - - Should only be used when pipeline model parallelism and gradient accumulation - fusion are enabled. - """ - - assert len(embedding_activation_buffer) == len( - grad_output_buffer - ), "Length of activation and gradient buffers need to be equal!" - - import fused_weight_gradient_mlp_cuda - - from megatron.core.parallel_state import ( - get_global_memory_buffer, - get_tensor_model_parallel_group, - get_tensor_model_parallel_world_size, - ) - - input = embedding_activation_buffer.pop(0) - world_size = get_tensor_model_parallel_world_size() - dim_size = list(input.size()) - dim_size[0] = dim_size[0] * world_size - - all_gathered_input = [None, None] - if config.sequence_parallel: - all_gather_buffer = get_global_memory_buffer().get_tensor(dim_size, input.dtype, "mpu_0") - handle = dist_all_gather_func( - all_gather_buffer, input, group=get_tensor_model_parallel_group(), async_op=False - ) - - all_gathered_input[0] = all_gather_buffer - all_gather_buffer = None - else: - all_gathered_input[0] = input - - input = None - - def wgrad_compute(all_gathered_input, grad_output, weight): - - grad_output, all_gathered_input = prepare_input_tensors_for_wgrad_compute( - grad_output, all_gathered_input - ) - - if config.gradient_accumulation_fusion: - if weight.main_grad.dtype == torch.float32: - fused_weight_gradient_mlp_cuda.wgrad_gemm_accum_fp32( - all_gathered_input, grad_output, weight.main_grad - ) - elif weight.main_grad.dtype in (torch.float16, torch.bfloat16): - fused_weight_gradient_mlp_cuda.wgrad_gemm_accum_fp16( - all_gathered_input, grad_output, weight.main_grad - ) - else: - raise RuntimeError("Unsupported gradient type for gradient accumulation fusion") - - # We have all_gathered_input list acting as a double buffer here, - # since we are pipelining the AllGather and GEMM,one buffer all gathers - # the input while the other buffer reads from it for the GEMM. We use i - # and (i+1) for indexing to enable this double buffering. - for i in range(len(embedding_activation_buffer)): - input = embedding_activation_buffer.pop(0) - if config.sequence_parallel: - name = "mpu_" + str((i + 1) % 2) - all_gather_buffer = get_global_memory_buffer().get_tensor(dim_size, input.dtype, name) - handle = dist_all_gather_func( - all_gather_buffer, input, group=get_tensor_model_parallel_group(), async_op=True - ) - - all_gathered_input[(i + 1) % 2] = all_gather_buffer - all_gather_buffer = None - else: - all_gathered_input[(i + 1) % 2] = input - - grad_output = grad_output_buffer.pop(0) - wgrad_compute(all_gathered_input[i % 2], grad_output, weight) - drain_idx = (i + 1) % 2 - input, all_gathered_input[i % 2], grad_output = None, None, None - - if config.sequence_parallel: - handle.wait() - - grad_output = grad_output_buffer.pop(0) - wgrad_compute(all_gathered_input[drain_idx], grad_output, weight) - input, all_gathered_input[drain_idx], grad_output = None, None, None - - -def local_multi_tensor_applier(op, noop_flag_buffer, tensor_lists, *args): - """Multi tensor op applier""" - return op(2048 * 32, noop_flag_buffer, tensor_lists, *args) - - -# computes l2 norm for a list of contiguous tensors -# works as a drop-in replacement for amp_C.multi_tensor_l2norm -def local_multi_tensor_l2_norm(chunk_size, noop_flag, tensor_lists, per_tensor, *args): - """ - Computes l2 norm for a list of contiguous tensors - works as a drop-in replacement for amp_C.multi_tensor_l2norm - """ - l2 = [[(torch.norm(tensor)) for tensor in tensor_list] for tensor_list in tensor_lists] - l2_reduced = torch.norm(torch.tensor(l2)) - l2_cuda = torch.tensor([float(l2_reduced)], dtype=torch.float, device='cuda') - return l2_cuda, None - - -# works as a drop-in replacement for amp_C.multi_tensor_scale -def local_multi_tensor_scale(chunk_size, noop_flag, tensor_lists, scale): - """Works as a drop-in replacement for amp_C.multi_tensor_scale.""" - for src, dst in zip(tensor_lists[0], tensor_lists[1]): - dst.copy_(src * scale) - - -class _ValueWithRank: - """This is an internal class, not for use outside this module - - Attributes: - _rank (int): rank for the value - _value (float) : the value it stores, eg elapsed time - _unit (str) : unit for the value - """ - - def __init__(self, value: float, rank: int, unit: str = "") -> None: - """Initializer - - Args: - _value (float): the initial value with which it is inited - _rank (int): the rank number - _unit (str) : the unit of the value, eg ms or flops - """ - self._rank = rank - self._value = value - self._unit = unit - - def __lt__(self, other) -> bool: - """Check if value of self is smaller than other's value - - Args: - other (_ValueWithRank): The other object to compare with - - Returns: - bool: True if lhs._value of operand is less than rhs._value, else False - """ - return self._value < other._value - - def __gt__(self, other) -> bool: - """Check if value of self is larger than other's value - - Args: - other (_ValueWithRank): The other object to compare with - - Returns: - bool: True if lhs._value of operand is greater than rhs._value, else False - """ - return self._value > other._value - - def __call__(self) -> Tuple[float, int, str]: - """Returns the value, the rank, and unit as a Tuple - - Returns: - Tuple[float, int, str]: value, rank, unit - """ - return self._value, self._rank, self._unit - - def __str__(self) -> str: - """String representation of the object - - Returns: - str: strigified object - """ - - return f"{self._value:.2f}{self._unit}/{self._rank}" - - -@dataclass -class _StragglerData: - """This is an internal dataclass, not for use outside this module - - Attributes: - min_elapsed (_ValueWithRank) min iteration time across all ranks - max_elapsed (_ValueWithRank) max iteration time across all ranks - min_btime (_ValueWithRank) min cpu time across all ranks - max_btime (_ValueWithRank) max cpu time across all ranks - min_temp (_ValueWithRank): min gpu temp across all ranks - max_temp (_ValueWithRank): max gpu temp across all ranks - min_power (_ValueWithRank) min gpu power across all ranks - max_power (_ValueWithRank) max gpu power across all ranks - min_util (_ValueWithRank): min gpu util across all ranks - max_util (_ValueWithRank): max gpu util across all ranks - min_clock (_ValueWithRank): min gpu clock across all ranks - max_clock (_ValueWithRank) max gpu clock across all ranks - aflops (List[_ValueWithRank]): sorted array of (_ValueWithRank) - """ - - # gemm time - min_elapsed = _ValueWithRank(sys.float_info.max, 0, "ms") - max_elapsed = _ValueWithRank(sys.float_info.min, 0, "ms") - # get_batch time - min_btime = _ValueWithRank(sys.float_info.max, 0, "us") - max_btime = _ValueWithRank(sys.float_info.min, 0, "us") - # temp - min_temp = _ValueWithRank(sys.float_info.max, 0, "C") - max_temp = _ValueWithRank(sys.float_info.min, 0, "C") - # power - min_power = _ValueWithRank(sys.float_info.max, 0, "W") - max_power = _ValueWithRank(sys.float_info.min, 0, "W") - # util - min_util = _ValueWithRank(sys.float_info.max, 0, "%") - max_util = _ValueWithRank(sys.float_info.min, 0, "%") - # clock - min_clock = _ValueWithRank(sys.float_info.max, 0, "MHz") - max_clock = _ValueWithRank(sys.float_info.min, 0, "MHz") - aflops: Union[List[_ValueWithRank], None] = None - - -class StragglerDetector: - """Singleton Class implementing per rank Straggler Detector - - It use cuda events to time operation of choice using the - start and stop methods which can be directly invoked using - the class instance or can be used like a python context. - After collection, a report() method is available to display - the collected metrics. It is only supported if CUDA is - available. megatron/core/README_STRAGGLER.md for more info - - Note: - The instance and class attributes mentioned below are all - private to the class and has no use outside the class - - Attributes: - _off (bool): current state of the toggle - start (FunctionType): start method - stop (FunctionType): stop method - world (int): world size - rank (int): rank for this instance - mmcnt (int): number of ranks to report - port (int): control port - amp (float): amplification factor for TFLOPs, default 3.0 - toggle (bool): whether to start/stop detector collection - bdata (bool): when true, just collect get_batch - dev (int): cuda device - evt_q (LifoQueue): cuda event queue - start_gemm_ev (list[torch.cuda.Event]): cuda start event - stop_gemm_ev (list[torch.cuda.Event]): cuda stop event - start_data_ev (list[torch.cuda.Event]): cuda start event - stop_data_ev (list[torch.cuda.Event]): cuda stop event - start_gemm_tm (list[int]): start time (wallclock) - stop_gemm_tm (list[int]): stop time (wallclock) - start_data_tm (list[int]): start time for get_batch - stop_data_tm (list[int]): stop time for get_batch - sock (socket): the controller socket - ctrlr (Thread): the controller thread - """ - - _configured = False - """Indicates if the singleton instance is configured or not - """ - - def __new__(cls: Type["StragglerDetector"]) -> "StragglerDetector": - """Constructor - Creates an instance of the class if not created - - Args: - cls (Type['StragglerDetector']): The class type - - Returns: - StragglerDetector: the class instance - """ - - if not hasattr(cls, "_instance"): - cls._instance = super(StragglerDetector, cls).__new__(cls) - return cls._instance - - def __init__(self) -> None: - """Initializer - - The inital state of the StragglerDetector instance is disabled. - The enabled state is indicated using self._off member variable - and the proerty enabled. - """ - self._off: bool = True - self.start = self.null_method - self.stop = self.null_method - self.world: int = 0 - self.rank: int = 0 - self.mmcnt: int = 1 - self.port: int = 0 - self.amp: float = 3.0 - self.toggle: bool = False - self.bdata: bool = False - self.dev: Union[torch.device, int, None] = None - self.evt_q: Union[queue.LifoQueue, None] = None - self.start_gemm_ev: List[torch.cuda.Event] = [] - self.stop_gemm_ev: List[torch.cuda.Event] = [] - self.start_data_ev: List[torch.cuda.Event] = [] - self.stop_data_ev: List[torch.cuda.Event] = [] - self.start_gemm_tm: List[int] = [] - self.stop_gemm_tm: List[int] = [] - self.start_data_tm: List[int] = [] - self.stop_data_tm: List[int] = [] - self.sock: Union[socket.socket, None] = None - self.ctrlr: Union[threading.Thread, None] = None - - def configure( - self, - world: int, - rank: int, - mmcnt: int = 1, - amp: float = 3.0, - port: int = 65535, - prefill: int = 1024, - enabled: bool = False, - ) -> None: - """This method is called to configure the Singleton instance - - It should be called once per instantiation per process. - - Note: - The constructor keeps the state of instance disabled - i.e no collection will happen even when start/stop methods are - called. Only when enabled is True (self._off is True), the - start/stop method pointers get assigned the real collection - methods, otherwise they are initialized with null_method - - Args: - world (int): World Size - rank (int): The rank of this trainer - mmcnt (int, optional): Number of ranks to print for showing Min/Max Etpt. - Defaults to 1. - amp (float, optional): Set to 3.0 if we only use timers in fwd pass. - Defaults to 3.0. - port (int, optional): Control port, useful only for rank-0. Defaults to 65535. - prefill (int, optional): How many Events to pre-populate. Defaults to 1024. - enabled (bool, optional): Whether or not collection is enabled on startup. - Defaults to False. - """ - if StragglerDetector._configured: - # don't throw - return - StragglerDetector._configured = True - self.bdata = False - self.start = self.null_method - self.stop = self.null_method - self._off = True - # No CUDA, No Support - if torch.cuda.is_available(): - self._off = not enabled - self.world = world - self.rank = rank - self.mmcnt = mmcnt if mmcnt > 1 else 1 - self.amp = amp - self.port = port - self.toggle = False - self.bdata = False - self.evt_q = queue.LifoQueue() - self.start_gemm_ev = [] - self.stop_gemm_ev = [] - self.start_data_ev = [] - self.stop_data_ev = [] - self.start_gemm_tm = [] - self.stop_gemm_tm = [] - self.start_data_tm = [] - self.stop_data_tm = [] - backend = torch.distributed.get_backend() - if backend == "nccl": - self.dev = torch.cuda.current_device() - else: - self.dev = torch.device("cpu") - # cache some events - for _ in range(prefill): - self.evt_q.put(torch.cuda.Event(enable_timing=True)) - if self.rank == 0: - # Start the controller - self._controller() - if not self._off: - self.start = self.start_method - self.stop = self.stop_method - - def reset(self) -> None: - """This method is called to reset the metrics state of the instance - - It is generally called from within elapsed() after extracting per rank metrics. - """ - if self._off: - return - # Pool them - if self.evt_q is not None: - _ = [self.evt_q.put(ev) for ev in self.start_gemm_ev] - _ = [self.evt_q.put(ev) for ev in self.stop_gemm_ev] - _ = [self.evt_q.put(ev) for ev in self.start_data_ev] - _ = [self.evt_q.put(ev) for ev in self.stop_data_ev] - self.start_gemm_ev = [] - self.stop_gemm_ev = [] - self.start_data_ev = [] - self.stop_data_ev = [] - # Use regular timers - self.start_gemm_tm = [] - self.stop_gemm_tm = [] - self.start_data_tm = [] - self.stop_data_tm = [] - self.bdata = False - - def start_method(self) -> None: - """This method adds the start timers. - - Both cuda event and perf_counter are added. If bdata is set to - true from __call__, this method skips inserting cuda - timer. This way it can be used to measure time spent on - CPU - generally useful for timing get_batch() - """ - # Not reentrant - if self.evt_q is not None and self.evt_q.qsize() > 1: - sev = self.evt_q.get() # no try-catch - eev = self.evt_q.get() # no try-catch - else: - sev = torch.cuda.Event(enable_timing=True) - eev = torch.cuda.Event(enable_timing=True) - # First check if this start is for data - if self.bdata: - self.start_data_ev.append(sev) - self.stop_data_ev.append(eev) - self.start_data_tm.append(0) - self.stop_data_tm.append(0) - idx = len(self.stop_data_tm) - 1 - self.start_data_tm[idx] = time.perf_counter_ns() - self.start_data_ev[idx].record() - self.bdata = False - return - self.start_gemm_ev.append(sev) - self.stop_gemm_ev.append(eev) - self.start_gemm_tm.append(0) - self.stop_gemm_tm.append(0) - idx = len(self.stop_gemm_tm) - 1 - self.start_gemm_tm[idx] = time.perf_counter_ns() - self.start_gemm_ev[idx].record() - - def stop_method(self) -> None: - """This method adds the stop timers. - - Both cuda event and perf_counter are added. If bdata is set to - true from __call__, this method skips inserting cuda - timer. Also see start_method() - """ - # Not reentrant - # First check if this stop is for data - idx = len(self.stop_data_tm) - 1 - if idx >= 0 and self.stop_data_tm[idx] == 0: - self.stop_data_tm[idx] = time.perf_counter_ns() - self.stop_data_ev[idx].record() - return - idx = len(self.stop_gemm_tm) - 1 - if idx >= 0 and self.stop_gemm_tm[idx] == 0: - self.stop_gemm_tm[idx] = time.perf_counter_ns() - self.stop_gemm_ev[idx].record() - - def elapsed(self) -> Tuple[float, float, int, int, int, int]: - """This method is called from report(), or can be called directly - - It is called to collect all the elapsed time since last reset(). - It finally calls reset() - - Returns: - Tuple[float, float, int, int, int, int]: see below for returns - delta : time spent in kernel - batch_delta : time spent in get_batch - temp : observed gpu temp - power : observed gpu power - util : observed gpu utilization - clock : observed gpu clock - """ - if self._off: - # match with return below - return 0, 0, 0, 0, 0, 0 - ls_ev = len(self.start_gemm_ev) - le_ev = len(self.stop_gemm_ev) - ls_bs = len(self.start_data_ev) - ls_be = len(self.stop_data_ev) - delta = 0.0 - batch_delta = 0.0 - temp = 0 - power = 0 - clock = 0 - if ls_ev != le_ev: - logger.warning(f"Event Start/Stop out of sync {ls_ev}/{le_ev}") - elif ls_bs != ls_be: - logger.warning(f"get_batch Start/Stop out of sync {ls_bs}/{ls_be}") - else: - temp = torch.cuda.temperature() - power = torch.cuda.power_draw() - util = torch.cuda.utilization() - clock = torch.cuda.clock_rate() - torch.cuda.synchronize() - # Process Events - for i in range(ls_ev): - e_ev = self.start_gemm_ev[i].elapsed_time(self.stop_gemm_ev[i]) - e_tm = (self.stop_gemm_tm[i] - self.start_gemm_tm[i]) / 1e6 # ns to ms - # Pick the larger of Event and perf_counter time? - delta += max(e_ev, e_tm) - # Process get_batch - for i in range(ls_bs): - b_ev = self.start_data_ev[i].elapsed_time(self.stop_data_ev[i]) - b_tm = (self.stop_data_tm[i] - self.start_data_tm[i]) / 1e6 # ns to ms - # data fetching has prefetch, hence take the max, instead of avg - batch_delta = max(batch_delta, max(b_ev, b_tm)) - self.reset() # Prepare for next round - # time in ms, batch_delta in ms, check return above - return delta, batch_delta, temp, power, util, clock - - def report(self, total_flops: float = 0.0, log_interval: int = 0) -> bool: - """Function to log the min/max metircs and the associated rank over a time period - - It finds the slowest and fastest rank among all ranks. It should be - called by all ranks, but only rank-0 prints the analysis - At the end it checks, if the straggler detector should - remain active or if it should be deactivated. - - Args: - total_flops (float, optional): The theoretical flops over the period. Defaults to 0.0. - log_interval (int, optional): The training interval over which reporting is called(ms) - Defaults to 0. - - Returns: - bool: True if reported, else False - """ - ret = False - if not self._off and total_flops > 0.0 and log_interval > 0: - elapsed, btime, temp, power, util, clock = self.elapsed() # get raw time - # btime (get_batch time is max in the iteration) - ptime = elapsed / (log_interval * 1.0) # avg per iteration elapsed time, ms - api_flops = total_flops / (log_interval * 1.0) # avg per iteration flops, ms - apir_flops = api_flops / ( - ptime * 10**9 * self.world - ) # this is avg per iteration this rank's thruput, TFLOP/s (note 10**9), - et_flops = apir_flops / self.amp # Estimated TFLOPs, not tracing backward - - o_dt = self._min_max( - ptime, btime, float(temp), float(power), float(util), float(clock), et_flops - ) - if self.rank == 0 and o_dt is not None and o_dt.aflops is not None: - now = f"[{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}]" - min_flops, min_frank, _ = o_dt.aflops[0]() - max_flops, max_frank, _ = o_dt.aflops[-1]() - logger.info( - f"{now} | " - f"MnRtt/Rnk: {o_dt.min_elapsed} | " - f"MxRtt/Rnk: {o_dt.max_elapsed} | " - f"MnPwr/Rnk: {o_dt.min_power} | " - f"MxPwr/Rnk: {o_dt.max_power} | " - f"MnTmp/Rnk: {o_dt.min_temp} | " - f"MxTmp/Rnk: {o_dt.max_temp} | " - f"MnUtl/Rnk: {o_dt.min_util} | " - f"MxUtl/Rnk: {o_dt.max_util} | " - f"MnClk/Rnk: {o_dt.min_clock} | " - f"MxClk/Rnk: {o_dt.max_clock} | " - f"MnDRtt/Rnk: {o_dt.min_btime} | " - f"MxDRtt/Rnk: {o_dt.max_btime} | " - f"MnEtpt/Rnk: {min_flops:.2f}TF/{min_frank} | " - f"MxEtpt/Rnk: {max_flops:.2f}TF/{max_frank}" - ) - if self.mmcnt > 1 and self.mmcnt < self.world: - line = f"^^^^ Bottom {self.mmcnt} Ranks with lowest Etpt(TF):" - for i in range(self.mmcnt): - line += f" {o_dt.aflops[i]}," - logger.info(line) - line = f"^^^^ Top {self.mmcnt} Ranks with highest Etpt(TF):" - shift = self.world - self.mmcnt - for i in range(self.mmcnt): - line += f" {o_dt.aflops[i+shift]}," - logger.info(line) - ret = True - - # Check/Communicate if tracking is turned off or on - self._check_toggle() - return ret - - def _check_toggle(self) -> None: - """Helper method to check if a request to toggle the collection state was made - - It checks iof collection state toggle req was made via the server listening on - rank-0 since last call to report(). Called by report(). Calling this method - indirectly from report() is the only way to activate the change that is made - via rank-0 - """ - # If no change just communicate the current - off = self._off - if self.rank == 0 and self.toggle: - off = not self._off - self.toggle = False - st = torch.tensor(off, dtype=torch.bool, device=self.dev) - torch.distributed.broadcast(st, 0) # Blocking - # save old switch - off = self._off - self._off = bool(st.item()) - if off != self._off: - if not self._off: - self.start = self.start_method - self.stop = self.stop_method - state = "ON" - else: - self.start = self.null_method - self.stop = self.null_method - state = "OFF" - if self.rank == 0: - logger.info(f"Toggling StragglerDetector State {state}") - - def _handler(self) -> None: - """Thread function for the controller. - - It is a tcp-server that listens on a port. Uses HTTP protocol. - If connected to it using curl, it indicates a toggle of the - collection state. The actual toggling happens at the end of - calling report() when _check_toggle() is called. - """ - resp = r"HTTP/1.0 200 OK\r\nConnection: Close\r\nContent-length: " - - if self.rank == 0: - state = "OFF" if self._off else "ON" - logger.info( - f"Controller ready to recv commands on port {self.port}. Current state {state}" - ) - while True and self.sock is not None: - try: - conn, _ = self.sock.accept() - _ = conn.recv(1024) - self.toggle = True - state = "ON" if self._off else "OFF" - msg = f"Will turn StragglerDetector {state} at next logging interval" - msg_len = len(msg) - final_resp = f"{resp}{msg_len}\r\n\r\n{msg}" - conn.send(final_resp.encode()) - conn.close() - logger.info(msg) - except Exception as err: - logger.error(f"Error in stragler handler.. {str(err)}") - return - - def _controller(self): - """Installs a controller listener that is used to toggle collection state. - - Called from configure(). Ignored for all ranks other than rank-0 - """ - try: - if self.rank == 0: - neth = "0.0.0.0" - netp = self.port - self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - self.sock.bind((neth, netp)) - self.sock.listen(128) - self.ctrlr = threading.Thread( - target=self._handler, args=(), name="straggler", daemon=True - ) - self.ctrlr.start() - except Exception as err: - logger.warning(f"StragglerDetector cannot be controlled.. {str(err)}") - - def _min_max( - self, - ptime: float, - btime: float, - temp: float, - power: float, - util: float, - clock: float, - flops: float, - ) -> Union[_StragglerData, None]: - """Helper function to find the min/max values - - Args: - ptime (float): avg per iteration gpu time - btime (float): avg per iteration cpu time - temp (float): gpu temp at the time of reporting - power (float): gpu power at the time of reporting - util (float): gpu util at the time of reporting - clock (float): gpu clock at the time of reporting - flops (float): estimated flops for the rank - - Returns: - Union[_StragglerData, None]: It contains the min/max of few metrics and the - corresponding rank it also has sorted list of - all (flops, rank) sorted by flops (aflops) - or returns None if collecton is disabled - """ - if self._off: - return None - # initialize output data object - o_dt = _StragglerData() - - prof_data: Dict[str, Union[int, float]] = {} - data_list: List[Dict[str, Union[int, float]]] = [] - prof_data["rank"] = self.rank - prof_data["time"] = ptime - prof_data["btime"] = btime - prof_data["temp"] = temp - prof_data["power"] = power - prof_data["util"] = util - prof_data["clock"] = clock - prof_data["flops"] = flops - - if self.rank == 0: - data_list = [prof_data] * self.world - - # this is blocking by default - torch.distributed.gather_object(prof_data, object_gather_list=data_list, dst=0) - - if self.rank == 0: - min_ctime = min(data_list, key=lambda k: k["time"]) # elapsed - max_ctime = max(data_list, key=lambda k: k["time"]) # elapsed - - min_cbatch = min(data_list, key=lambda k: k["btime"]) # batch time - max_cbatch = max(data_list, key=lambda k: k["btime"]) # batch time - - min_ctemp = min(data_list, key=lambda k: k["temp"]) # temp - max_ctemp = max(data_list, key=lambda k: k["temp"]) # temp - - min_cpower = min(data_list, key=lambda k: k["power"]) # power - max_cpower = max(data_list, key=lambda k: k["power"]) # power - - min_cutil = min(data_list, key=lambda k: k["util"]) # gpu util - max_cutil = max(data_list, key=lambda k: k["util"]) # gpu util - - min_cclock = min(data_list, key=lambda k: k["clock"]) # gpu clock - max_cclock = max(data_list, key=lambda k: k["clock"]) # gpu clock - - min_val = min_ctime["time"] - min_rank = min_ctime["rank"] - max_val = max_ctime["time"] - max_rank = max_ctime["rank"] - o_dt.min_elapsed = _ValueWithRank(min_val, int(min_rank), "ms") - o_dt.max_elapsed = _ValueWithRank(max_val, int(max_rank), "ms") - - min_val = min_cbatch["btime"] - min_rank = min_cbatch["rank"] - max_val = max_cbatch["btime"] - max_rank = max_cbatch["rank"] - o_dt.min_btime = _ValueWithRank(min_val, int(min_rank), "ms") - o_dt.max_btime = _ValueWithRank(max_val, int(max_rank), "ms") - - min_val = min_ctemp["temp"] - min_rank = min_ctemp["rank"] - max_val = max_ctemp["temp"] - max_rank = max_ctemp["rank"] - o_dt.min_temp = _ValueWithRank(min_val, int(min_rank), "C") - o_dt.max_temp = _ValueWithRank(max_val, int(max_rank), "C") - - min_val = min_cpower["power"] - min_rank = min_cpower["rank"] - max_val = max_cpower["power"] - max_rank = max_cpower["rank"] - o_dt.min_power = _ValueWithRank(min_val, int(min_rank), "W") - o_dt.max_power = _ValueWithRank(max_val, int(max_rank), "W") - - min_val = min_cutil["util"] - min_rank = min_cutil["rank"] - max_val = max_cutil["util"] - max_rank = max_cutil["rank"] - o_dt.min_util = _ValueWithRank(min_val, int(min_rank), "%") - o_dt.max_util = _ValueWithRank(max_val, int(max_rank), "%") - - min_val = min_cclock["clock"] - min_rank = min_cclock["rank"] - max_val = max_cclock["clock"] - max_rank = max_cclock["rank"] - o_dt.min_clock = _ValueWithRank(min_val, int(min_rank), "MHz") - o_dt.max_clock = _ValueWithRank(max_val, int(max_rank), "MHz") - - o_dt.aflops = [ - _ValueWithRank(d.get("flops", 0.0), int(d.get("rank", -1))) - for _, d in enumerate(data_list) - ] - o_dt.aflops.sort(key=lambda val_with_rank: val_with_rank()[0]) - # wait for everyone here - torch.distributed.barrier() - - return o_dt - - @property - def enabled(self) -> bool: - """Can be called to check the enabled state of the instance - - Note: - After the request to toggle the state, the - actual state change happens at end of call - to report() - """ - return not self._off - - @property - def configured(self) -> bool: - """Can be called to check if the instance is already configured - - Returns: - bool: returns True if configure was called and was a success, else False - """ - return StragglerDetector._configured - - @property - def my_rank(self): - """Can be called to get configured rank of this instance - - Returns: - int: Configured rank for this instance - """ - return self.rank - - @property - def world_size(self) -> int: - """Can be called to get configured world of this instance - - Returns: - int: World size configured for this instance - """ - return self.world - - def null_method(self) -> None: - """Default method to initialize start/stop method ptrs""" - pass - - def __enter__(self) -> "StragglerDetector": - """Define context/instance entry - - Returns: - StragglerDetector: the instance - """ - self.start() - return self - - def __call__(self, bdata: bool = False) -> "StragglerDetector": - """Callable for the instance. Set context state, - - Useful when the context is used for cpu timers only when bdata=True - - Args: - bdata (bool, optional): when true, only enables cpu timers. Defaults to False. - - Returns: - StragglerDetector: the instance - """ - self.bdata = bdata - return self - - def __exit__( - self, - ex_type: Optional[Type[BaseException]], - ex_val: Optional[BaseException], - ex_tb: Optional[TracebackType], - ) -> bool: - """Define context/instance exit, calls the stop method - - Args: - ex_type (Optional[Type[BaseException]]): Exception type - ex_val (Optional[BaseException]): _description_ - ex_tb (Optional[TracebackType]): _description_ - - Returns: - bool: True if the exception was handled - """ - # Should not suppress errors even if turned off - if ex_type is not None: - err = traceback.format_exception(ex_type, ex_val, ex_tb) - logger.warning(f"{str(ex_val)}\n{err}") - self.stop() - return False - - -# Singleton, global visibility -__straggler__ = StragglerDetector() -"""StragglerDetector: private module variable, not be directly accessed -""" - - -# Check if Transformer Engine has Float8Tensor class -HAVE_TE_FLOAT8TENSOR = False -try: - from transformer_engine.pytorch.float8_tensor import Float8Tensor - - HAVE_TE_FLOAT8TENSOR = True -except (ImportError, ModuleNotFoundError): - # Float8Tensor not found - pass - - -def is_float8tensor(tensor: torch.Tensor) -> bool: - """Check if a tensor is a Transformer Engine Float8Tensor""" - return HAVE_TE_FLOAT8TENSOR and isinstance(tensor, Float8Tensor) - - -######################## -### context parallel ### -######################## - - -def get_batch_on_this_cp_rank(batch: Dict[str, Any]): - """Slice batch input along sequence dimension into multiple chunks, - which are parallelized across GPUs in a context parallel group. - """ - - # With causal masking, each token only attends to its prior tokens. Simply split - # sequence into CP chunks can result in severe load imbalance. That's to say, chunks - # at the end of sequence have bigger workload than others. To address this issue, - # we split sequence into 2*CP ranks. Assuming CP=2, we then get 4 chunks, chunk_0 - # and chunk_3 are assigned to GPU0, chunk_1 and chunk_2 are assigned to GPU1, so - # that we can get balanced workload among GPUs in a context parallel group. - cp_size = parallel_state.get_context_parallel_world_size() - if cp_size > 1: - cp_rank = parallel_state.get_context_parallel_rank() - for key, val in batch.items(): - if val is not None: - seq_dim = 1 if key != 'attention_mask' else 2 - val = val.view( - *val.shape[0:seq_dim], - 2 * cp_size, - val.shape[seq_dim] // (2 * cp_size), - *val.shape[(seq_dim + 1) :], - ) - index = torch.tensor( - [cp_rank, (2 * cp_size - cp_rank - 1)], device="cpu", pin_memory=True - ).cuda(non_blocking=True) - val = val.index_select(seq_dim, index) - val = val.view(*val.shape[0:seq_dim], -1, *val.shape[(seq_dim + 2) :]) - batch[key] = val - - return batch +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +"""Utility functions used throughout Megatron core""" +import array +import functools +import hashlib +import logging +import math +import operator +import queue +import socket +import sys +import threading +import time +import traceback +from dataclasses import dataclass +from datetime import datetime +from functools import reduce +from importlib.metadata import version +from types import TracebackType +from typing import Any, Dict, List, Optional, Tuple, Type, Union + +import torch +from packaging.version import Version as PkgVersion + +try: + from torch.distributed._tensor import DTensor + from torch.distributed.tensor.placement_types import Shard + + HAVE_DTENSOR = True +except ImportError: + HAVE_DTENSOR = False + +from megatron.core import parallel_state +from megatron.core.dist_checkpointing.mapping import ShardedTensor + +logger = logging.getLogger(__name__) + + +try: + _torch_version = PkgVersion(torch.__version__) +except: + # This is a WAR for building docs, where torch is not actually imported + _torch_version = PkgVersion("0.0.0") +_te_version = None + + +def get_torch_version(): + """Get pytorch version from __version__; if not available use pip's. Use caching.""" + + def get_torch_version_str(): + import torch + + if hasattr(torch, '__version__'): + return str(torch.__version__) + else: + return version("torch") + + global _torch_version + if _torch_version is None: + _torch_version = PkgVersion(get_torch_version_str()) + return _torch_version + + +def get_te_version(): + """Get TE version from __version__; if not available use pip's. Use caching.""" + + def get_te_version_str(): + import transformer_engine as te + + if hasattr(te, '__version__'): + return str(te.__version__) + else: + return version("transformer-engine") + + global _te_version + if _te_version is None: + _te_version = PkgVersion(get_te_version_str()) + return _te_version + + +def is_te_min_version(version, check_equality=True): + """Check if minimum version of `transformer-engine` is installed.""" + if check_equality: + return get_te_version() >= PkgVersion(version) + return get_te_version() > PkgVersion(version) + + +def get_torch_version(): + """Get torch version from __version__.""" + + global _torch_version + return _torch_version + + +def is_torch_min_version(version, check_equality=True): + """Check if minimum version of `torch` is installed.""" + if check_equality: + return get_torch_version() >= PkgVersion(version) + return get_torch_version() > PkgVersion(version) + + +def ensure_divisibility(numerator, denominator): + """Ensure that numerator is divisible by the denominator.""" + assert numerator % denominator == 0, "{} is not divisible by {}".format(numerator, denominator) + + +def divide(numerator, denominator): + """Ensure that numerator is divisible by the denominator and return + the division value.""" + ensure_divisibility(numerator, denominator) + return numerator // denominator + + +def get_attr_wrapped_model(model, attr, allow_none=True, return_model_obj=False): + """Get an attribute from a wrapped model. + If return_model_obj is true, return the object that has the 'attr' attribute; + otherwise, return the attribute directly.""" + if isinstance(model, list): + raise RuntimeError("_get_attr_wrapped_model given a list of models") + + if allow_none: + + def condition(model, attr): + return not hasattr(model, attr) + + else: + + def condition(model, attr): + return getattr(model, attr, None) is None + + while condition(model, attr): + if not hasattr(model, "module"): + raise RuntimeError(f"_get_attr_wrapped_model couldn't find attribute {attr}") + + model = model.module + + if return_model_obj: + return model + return getattr(model, attr) + + +def get_model_type(model): + """Returns model_type attribute""" + return get_attr_wrapped_model(model, 'model_type') + + +def get_model_xattn(model): + """Returns whether the model has the xattn_needed attribute""" + try: + return get_attr_wrapped_model(model, 'xattn_needed') + except RuntimeError: + return False + + +def get_model_config(model): + """Returns the config attribute, allowed to return None""" + return get_attr_wrapped_model(model, 'config', allow_none=False) + + +class GlobalMemoryBuffer: + """Global buffer to avoid dynamic memory allocations. + Caller should ensure that buffers of the same name + are not used concurrently.""" + + def __init__(self): + self.buffer = {} + + def get_tensor(self, tensor_shape, dtype, name): + """ + Returns (potentially) a sub-tensor from the self.buffer for the given shape. + """ + required_len = reduce(operator.mul, tensor_shape, 1) + if ( + self.buffer.get((name, dtype), None) is None + or self.buffer[(name, dtype)].numel() < required_len + ): + self.buffer[(name, dtype)] = torch.empty( + required_len, dtype=dtype, device=torch.cuda.current_device(), requires_grad=False + ) + + return self.buffer[(name, dtype)][0:required_len].view(*tensor_shape) + + +def _kernel_make_viewless_tensor(inp, requires_grad): + """Make a viewless tensor. + + View tensors have the undesirable side-affect of retaining a reference + to the originally-viewed tensor, even after manually setting the '.data' + field. This method creates a new tensor that links to the old tensor's + data, without linking the viewed tensor, referenced via the '._base' + field. + """ + out = torch.empty((1,), dtype=inp.dtype, device=inp.device, requires_grad=requires_grad) + out.data = inp.data + return out + + +class MakeViewlessTensor(torch.autograd.Function): + """ + Autograd function to make a viewless tensor. + + This function should be used in cases where the computation graph needs + to be propagated, but we only want a viewless tensor (e.g., + ParallelTransformer's hidden_states). Call this function by passing + 'keep_graph = True' to 'make_viewless_tensor()'. + """ + + @staticmethod + def forward(ctx, inp, requires_grad): + """Runs the fwd pass of _kernel_make_viewless_tensor""" + return _kernel_make_viewless_tensor(inp, requires_grad) + + @staticmethod + def backward(ctx, grad_output): + """No-op""" + return grad_output, None + + +def make_viewless_tensor(inp, requires_grad, keep_graph): + """ + Entry-point for creating viewless tensors. + + This method should be used, rather than calling 'MakeViewlessTensor' + or '_kernel_make_viewless_tensor' directly. This method acts as a + switch for determining if an autograd function or a regular method + should be used to create the tensor. + """ + + # return tensor as-is, if not a 'view' + if inp._base is None: + return inp + + # create viewless tensor + if keep_graph: + return MakeViewlessTensor.apply(inp, requires_grad) + else: + return _kernel_make_viewless_tensor(inp, requires_grad) + + +def assert_viewless_tensor(tensor, extra_msg=None): + """Assert that a tensor is not a view (i.e., its '._base' field is + not set).""" + if isinstance(tensor, list): + [assert_viewless_tensor(t) for t in tensor] + return tensor + if not isinstance(tensor, torch.Tensor): + return tensor + assert tensor._base is None, ( + "Ensure tensor._base is None before setting tensor.data or storing " + "tensor to memory buffer. Otherwise, a memory leak will occur (and " + f"likely accumulate over iterations). {extra_msg}" + ) + return tensor + + +def safely_set_viewless_tensor_data(tensor, new_data_tensor): + """Safely set tensor's '.data' field. + + Check first that the tensor is viewless (i.e., '._base' not set). If not, + raise an exception. + """ + assert_viewless_tensor( + tensor, + extra_msg="FYI, tensor._base has shape %s, and new_data_tensor has shape %s." + % ("--" if tensor._base is None else tensor._base.shape, new_data_tensor.shape), + ) + tensor.data = new_data_tensor + + +def init_method_normal(sigma): + """Init method based on N(0, sigma).""" + return functools.partial(torch.nn.init.normal_, mean=0.0, std=sigma) + + +def scaled_init_method_normal(sigma, num_layers): + """Init method based on N(0, sigma/sqrt(2*num_layers).""" + std = sigma / math.sqrt(2.0 * num_layers) + + return functools.partial(torch.nn.init.normal_, mean=0.0, std=std) + + +def log_single_rank(logger: logging.Logger, *args: Any, rank: int = 0, **kwargs: Any): + """If torch distributed is initialized, log only on rank + + Args: + logger (logging.Logger): The logger to write the logs + + args (Tuple[Any]): All logging.Logger.log positional arguments + + rank (int, optional): The rank to write on. Defaults to 0. + + kwargs (Dict[str, Any]): All logging.Logger.log keyword arguments + """ + if torch.distributed.is_initialized(): + if torch.distributed.get_rank() == rank: + logger.log(*args, **kwargs) + else: + logger.log(*args, **kwargs) + + +def log_on_each_pipeline_stage(logger: logging.Logger, *args: Any, **kwargs: Any): + """Log on first rank in each pipeline stage + + Args: + logger (logging.Logger): The logger to write the logs + + args (Tuple[Any]): All logging.Logger.log positional arguments + + kwargs (Dict[str, Any]): All logging.Logger.log keyword arguments + """ + assert torch.distributed.is_initialized() + + if ( + parallel_state.get_data_parallel_rank(with_context_parallel=True) == 0 + and parallel_state.get_tensor_model_parallel_rank() == 0 + ): + logger.log(*args, **kwargs) + + +def check_param_hashes_across_dp_replicas( + model: List[torch.nn.Module], cross_check: bool = False +) -> bool: + """Computes hashes of all parameters in model, all-gathers hashes across DP replicas, + and then checks for equality between the locally-computed hashes and those of other ranks. + + NOTE: This function computes SHA-1 hashes on the CPU and thus needs to move all param + tensors from GPU to CPU first; as a result, this function is not intended to be called + very frequently in the main training loop. + + Args: + model (List[torch.nn.Module]): List of model chunks whose parameter hashes need to + be checked. + cross_check (bool): If true, will check whether hashes match across all DP replicas. + + Returns: + True if all param hashes match with corresponding hash on DP replica 0 or + across all replicas if cross_check is enabled, False otherwise. + """ + + # Compute per-parameter hashes on this rank. + # Keep track of expert and non-expert parameters separately since they need to be + # all-gathered across different sets of ranks. + non_expert_params, expert_params = [], [] + local_non_expert_param_hashes, local_expert_param_hashes = [], [] + for model_chunk_id, model_chunk in enumerate(model): + for param_name, param in model_chunk.named_parameters(): + param_hash = torch.frombuffer( + array.array( + 'B', hashlib.sha1(param.data.to("cpu").float().numpy(force=True)).digest() + ), + dtype=torch.uint8, + ) + if getattr(param, 'allreduce', True): + non_expert_params.append((model_chunk_id, param_name, param)) + local_non_expert_param_hashes.append(param_hash) + else: + expert_params.append((model_chunk_id, param_name, param)) + local_expert_param_hashes.append(param_hash) + + # Use data-modulo-expert parallel group to all-gather expert param hashes, regular + # data-parallel group for non-expert param hashes. + all_param_hashes_match = True + for params, local_param_hashes, all_gather_group in zip( + [non_expert_params, expert_params], + [local_non_expert_param_hashes, local_expert_param_hashes], + [parallel_state.get_data_parallel_group(), parallel_state.get_expert_data_parallel_group()], + ): + # Collect per-parameter hashes across all ranks in group. + assert len(params) == len(local_param_hashes) + if len(params) == 0: + continue + local_param_hashes = torch.stack(local_param_hashes).cuda() + all_param_hashes = [ + torch.zeros_like(local_param_hashes) + for _ in range(torch.distributed.get_world_size(all_gather_group)) + ] + torch.distributed.all_gather(all_param_hashes, local_param_hashes, group=all_gather_group) + + # Make sure local per-parameter hash matches DP rank 0. + param_hashes_match = torch.equal(local_param_hashes, all_param_hashes[0]) + if not param_hashes_match: + for i, (model_chunk_id, param_name, param) in enumerate(params): + if not torch.equal(local_param_hashes[i], all_param_hashes[0][i]): + rank = torch.distributed.get_rank() + logger.info( + f"[Rank {rank}] Hash not matching for {param_name} in model chunk" + f"{model_chunk_id}" + ) + if cross_check: + # Make sure all ranks have the same hash. + all_param_hashes_match &= all( + map(lambda x: torch.equal(local_param_hashes, x), all_param_hashes) + ) + else: + all_param_hashes_match &= param_hashes_match + + return all_param_hashes_match + + +def make_tp_sharded_tensor_for_checkpoint( + tensor, key, tp_axis=0, replica_id=None, prepend_offsets=(), **kwargs +): + """Helper for instantiating a ShardedTensor where the `tp_axis` dimension + is sharded across TP group. + + Optionally, can provide offsets which prepend new dimensions to the tensor. + """ + prepend_axis_num = len(prepend_offsets) + + new_offsets = [] + tp_rank = parallel_state.get_tensor_model_parallel_rank() + dp_rank = parallel_state.get_data_parallel_rank(with_context_parallel=True) + tp_size = parallel_state.get_tensor_model_parallel_world_size() + dp_size = parallel_state.get_data_parallel_world_size(with_context_parallel=True) + dp_replica_id = parallel_state.get_data_parallel_rank(with_context_parallel=True) + + new_offsets.append((tp_axis + prepend_axis_num, tp_rank, tp_size)) + + if HAVE_DTENSOR and isinstance(tensor, DTensor): + # TP + FSDP2 sharding + dp_replica_id = 0 + tensor = tensor._local_tensor + + if tp_axis == 0: + # both FSDP2 and TP shards axis 0 + # default MCore uses tp-cp-ep-dp-pp + # FSDP2 is compatibile with TP, CP + new_offsets[0] = (prepend_axis_num, tp_rank * dp_size + dp_rank, tp_size * dp_size) + else: + # FSDP2 shards axis 0 and TP shards some other axis + new_offsets.append((prepend_axis_num, dp_rank, dp_size)) + + if replica_id is None: + replica_id = (0, 0, dp_replica_id) + + if hasattr(tensor, 'fully_shard_param_local_shard'): + assert len(replica_id) == 3, f'Expected replica_id format (PP, TP, DP), got: {replica_id}' + replica_id = (*replica_id[:2], 0) + + sh_ten = ShardedTensor.from_rank_offsets_flat( + key, + tensor.fully_shard_param_local_shard, + tensor.shape, + *prepend_offsets, + ( + tp_axis + prepend_axis_num, + parallel_state.get_tensor_model_parallel_rank(), + parallel_state.get_tensor_model_parallel_world_size(), + ), + flattened_range=slice(*tensor.fully_shard_param_local_index), + replica_id=replica_id, + prepend_axis_num=prepend_axis_num, + **kwargs, + ) + setattr(sh_ten, 'is_data_parallel_fully_shard', True) + return sh_ten + + return ShardedTensor.from_rank_offsets( + key, + tensor, + *prepend_offsets, + *new_offsets, + replica_id=replica_id, + prepend_axis_num=prepend_axis_num, + **kwargs, + ) + + +def make_sharded_tensor_for_checkpoint(tensor, key, prepend_offsets=(), replica_id=None, **kwargs): + """Helper for instantiating a non-sharded ShardedTensor (replicated across TP and DP group). + + Optionally, can provide offsets which prepend new dimensions to the tensor. + """ + + prepend_axis_num = len(prepend_offsets) + + new_offsets = [] + dp_rank = parallel_state.get_data_parallel_rank(with_context_parallel=True) + dp_size = parallel_state.get_data_parallel_world_size(with_context_parallel=True) + dp_replica_id = parallel_state.get_data_parallel_rank(with_context_parallel=True) + + if HAVE_DTENSOR and isinstance(tensor, DTensor): + # FSDP2 sharding + dp_replica_id = 0 + tensor = get_full_tensor_if_necessary(tensor) + new_offsets.append((prepend_axis_num, dp_rank, dp_size)) + + if replica_id is None: + replica_id = (0, parallel_state.get_tensor_model_parallel_rank(), dp_replica_id) + + if hasattr(tensor, 'fully_shard_param_local_shard'): + assert len(replica_id) == 3, f'Expected replica_id format (PP, TP, DP), got: {replica_id}' + replica_id = (*replica_id[:2], 0) + + sh_ten = ShardedTensor.from_rank_offsets_flat( + key, + tensor.fully_shard_param_local_shard, + tensor.shape, + *prepend_offsets, + flattened_range=slice(*tensor.fully_shard_param_local_index), + replica_id=replica_id, + prepend_axis_num=prepend_axis_num, + **kwargs, + ) + setattr(sh_ten, 'is_data_parallel_fully_shard', True) + return sh_ten + + return ShardedTensor.from_rank_offsets( + key, + tensor, + *prepend_offsets, + *new_offsets, + replica_id=replica_id, + prepend_axis_num=prepend_axis_num, + **kwargs, + ) + + +def get_full_tensor_if_necessary(tensor): + """For DTensor gets full tensor if some ranks will not have a local copy""" + need_full_tensor = False + for i in range(tensor.device_mesh.ndim): + if ( + isinstance(tensor.placements[i], Shard) + and tensor.device_mesh.shape[i] > tensor.shape[tensor.placements[i].dim] + ): + need_full_tensor = True + break + + tensor = tensor.full_tensor() if need_full_tensor else tensor._local_tensor + + return tensor + + +def to_local_if_dtensor(tensor: Union[torch.Tensor, "DTensor"]) -> torch.Tensor: + """Returns the local shard of the given tensor if it is a DTensor.""" + with torch.no_grad(): + return tensor.to_local() if HAVE_DTENSOR and isinstance(tensor, DTensor) else tensor + + +def get_data_parallel_group_if_dtensor( + tensor: Union[torch.Tensor, "DTensor"], data_parallel_group: "ProcessGroup" = None +) -> Optional["ProcessGroup"]: + """Gets the data parallel group of the given tensor if it is a DTensor.""" + if HAVE_DTENSOR and isinstance(tensor, DTensor): + current_group = tensor.device_mesh.get_group() + assert data_parallel_group is None or current_group == data_parallel_group + return current_group + return None + + +def prepare_input_tensors_for_wgrad_compute(grad_output, all_gathered_input): + """Ensure grad_output is stored in a contiguous buffer.""" + # Doing gather + slicing during the NeMo forward pass can make this tensor + # not be contiguous. PyTorch only checks if the tensor is contiguous, and only + # clones it if it's not contiguous: + # https://github.com/pytorch/pytorch/blob/c47cf9bc7f9e02f649ab4ed53fe4d35732c92ab6/torch/_refs/__init__.py#L2761 + grad_output = grad_output.contiguous() + # Convert the tensor shapes to 2D for execution compatibility + if grad_output.dim() == 3: + grad_output = grad_output.view( + grad_output.shape[0] * grad_output.shape[1], grad_output.shape[2] + ) + all_gathered_input = all_gathered_input.view( + all_gathered_input.shape[0] * all_gathered_input.shape[1], all_gathered_input.shape[2] + ) + + return grad_output, all_gathered_input + + +if is_torch_min_version("1.13.0"): + dist_all_gather_func = torch.distributed.all_gather_into_tensor +else: + dist_all_gather_func = torch.distributed._all_gather_base + + +def drain_embedding_wgrad_compute(config, embedding_activation_buffer, grad_output_buffer, weight): + """Helper for performing embedding wgrad GEMM's during the pipeline drain phase, pipelines the + AllGather and GEMM's. + + Should only be used when pipeline model parallelism and gradient accumulation + fusion are enabled. + """ + + assert len(embedding_activation_buffer) == len( + grad_output_buffer + ), "Length of activation and gradient buffers need to be equal!" + + import fused_weight_gradient_mlp_cuda + + from megatron.core.parallel_state import ( + get_global_memory_buffer, + get_tensor_model_parallel_group, + get_tensor_model_parallel_world_size, + ) + + input = embedding_activation_buffer.pop(0) + world_size = get_tensor_model_parallel_world_size() + dim_size = list(input.size()) + dim_size[0] = dim_size[0] * world_size + + all_gathered_input = [None, None] + if config.sequence_parallel: + all_gather_buffer = get_global_memory_buffer().get_tensor(dim_size, input.dtype, "mpu_0") + handle = dist_all_gather_func( + all_gather_buffer, input, group=get_tensor_model_parallel_group(), async_op=False + ) + + all_gathered_input[0] = all_gather_buffer + all_gather_buffer = None + else: + all_gathered_input[0] = input + + input = None + + def wgrad_compute(all_gathered_input, grad_output, weight): + + grad_output, all_gathered_input = prepare_input_tensors_for_wgrad_compute( + grad_output, all_gathered_input + ) + + if config.gradient_accumulation_fusion: + if weight.main_grad.dtype == torch.float32: + fused_weight_gradient_mlp_cuda.wgrad_gemm_accum_fp32( + all_gathered_input, grad_output, weight.main_grad + ) + elif weight.main_grad.dtype in (torch.float16, torch.bfloat16): + fused_weight_gradient_mlp_cuda.wgrad_gemm_accum_fp16( + all_gathered_input, grad_output, weight.main_grad + ) + else: + raise RuntimeError("Unsupported gradient type for gradient accumulation fusion") + + # We have all_gathered_input list acting as a double buffer here, + # since we are pipelining the AllGather and GEMM,one buffer all gathers + # the input while the other buffer reads from it for the GEMM. We use i + # and (i+1) for indexing to enable this double buffering. + for i in range(len(embedding_activation_buffer)): + input = embedding_activation_buffer.pop(0) + if config.sequence_parallel: + name = "mpu_" + str((i + 1) % 2) + all_gather_buffer = get_global_memory_buffer().get_tensor(dim_size, input.dtype, name) + handle = dist_all_gather_func( + all_gather_buffer, input, group=get_tensor_model_parallel_group(), async_op=True + ) + + all_gathered_input[(i + 1) % 2] = all_gather_buffer + all_gather_buffer = None + else: + all_gathered_input[(i + 1) % 2] = input + + grad_output = grad_output_buffer.pop(0) + wgrad_compute(all_gathered_input[i % 2], grad_output, weight) + drain_idx = (i + 1) % 2 + input, all_gathered_input[i % 2], grad_output = None, None, None + + if config.sequence_parallel: + handle.wait() + + grad_output = grad_output_buffer.pop(0) + wgrad_compute(all_gathered_input[drain_idx], grad_output, weight) + input, all_gathered_input[drain_idx], grad_output = None, None, None + + +def local_multi_tensor_applier(op, noop_flag_buffer, tensor_lists, *args): + """Multi tensor op applier""" + return op(2048 * 32, noop_flag_buffer, tensor_lists, *args) + + +# computes l2 norm for a list of contiguous tensors +# works as a drop-in replacement for amp_C.multi_tensor_l2norm +def local_multi_tensor_l2_norm(chunk_size, noop_flag, tensor_lists, per_tensor, *args): + """ + Computes l2 norm for a list of contiguous tensors + works as a drop-in replacement for amp_C.multi_tensor_l2norm + """ + l2 = [[(torch.norm(tensor)) for tensor in tensor_list] for tensor_list in tensor_lists] + l2_reduced = torch.norm(torch.tensor(l2)) + l2_cuda = torch.tensor([float(l2_reduced)], dtype=torch.float, device='cuda') + return l2_cuda, None + + +# works as a drop-in replacement for amp_C.multi_tensor_scale +def local_multi_tensor_scale(chunk_size, noop_flag, tensor_lists, scale): + """Works as a drop-in replacement for amp_C.multi_tensor_scale.""" + for src, dst in zip(tensor_lists[0], tensor_lists[1]): + dst.copy_(src * scale) + + +class _ValueWithRank: + """This is an internal class, not for use outside this module + + Attributes: + _rank (int): rank for the value + _value (float) : the value it stores, eg elapsed time + _unit (str) : unit for the value + """ + + def __init__(self, value: float, rank: int, unit: str = "") -> None: + """Initializer + + Args: + _value (float): the initial value with which it is inited + _rank (int): the rank number + _unit (str) : the unit of the value, eg ms or flops + """ + self._rank = rank + self._value = value + self._unit = unit + + def __lt__(self, other) -> bool: + """Check if value of self is smaller than other's value + + Args: + other (_ValueWithRank): The other object to compare with + + Returns: + bool: True if lhs._value of operand is less than rhs._value, else False + """ + return self._value < other._value + + def __gt__(self, other) -> bool: + """Check if value of self is larger than other's value + + Args: + other (_ValueWithRank): The other object to compare with + + Returns: + bool: True if lhs._value of operand is greater than rhs._value, else False + """ + return self._value > other._value + + def __call__(self) -> Tuple[float, int, str]: + """Returns the value, the rank, and unit as a Tuple + + Returns: + Tuple[float, int, str]: value, rank, unit + """ + return self._value, self._rank, self._unit + + def __str__(self) -> str: + """String representation of the object + + Returns: + str: strigified object + """ + + return f"{self._value:.2f}{self._unit}/{self._rank}" + + +@dataclass +class _StragglerData: + """This is an internal dataclass, not for use outside this module + + Attributes: + min_elapsed (_ValueWithRank) min iteration time across all ranks + max_elapsed (_ValueWithRank) max iteration time across all ranks + min_btime (_ValueWithRank) min cpu time across all ranks + max_btime (_ValueWithRank) max cpu time across all ranks + min_temp (_ValueWithRank): min gpu temp across all ranks + max_temp (_ValueWithRank): max gpu temp across all ranks + min_power (_ValueWithRank) min gpu power across all ranks + max_power (_ValueWithRank) max gpu power across all ranks + min_util (_ValueWithRank): min gpu util across all ranks + max_util (_ValueWithRank): max gpu util across all ranks + min_clock (_ValueWithRank): min gpu clock across all ranks + max_clock (_ValueWithRank) max gpu clock across all ranks + aflops (List[_ValueWithRank]): sorted array of (_ValueWithRank) + """ + + # gemm time + min_elapsed = _ValueWithRank(sys.float_info.max, 0, "ms") + max_elapsed = _ValueWithRank(sys.float_info.min, 0, "ms") + # get_batch time + min_btime = _ValueWithRank(sys.float_info.max, 0, "us") + max_btime = _ValueWithRank(sys.float_info.min, 0, "us") + # temp + min_temp = _ValueWithRank(sys.float_info.max, 0, "C") + max_temp = _ValueWithRank(sys.float_info.min, 0, "C") + # power + min_power = _ValueWithRank(sys.float_info.max, 0, "W") + max_power = _ValueWithRank(sys.float_info.min, 0, "W") + # util + min_util = _ValueWithRank(sys.float_info.max, 0, "%") + max_util = _ValueWithRank(sys.float_info.min, 0, "%") + # clock + min_clock = _ValueWithRank(sys.float_info.max, 0, "MHz") + max_clock = _ValueWithRank(sys.float_info.min, 0, "MHz") + aflops: Union[List[_ValueWithRank], None] = None + + +class StragglerDetector: + """Singleton Class implementing per rank Straggler Detector + + It use cuda events to time operation of choice using the + start and stop methods which can be directly invoked using + the class instance or can be used like a python context. + After collection, a report() method is available to display + the collected metrics. It is only supported if CUDA is + available. megatron/core/README_STRAGGLER.md for more info + + Note: + The instance and class attributes mentioned below are all + private to the class and has no use outside the class + + Attributes: + _off (bool): current state of the toggle + start (FunctionType): start method + stop (FunctionType): stop method + world (int): world size + rank (int): rank for this instance + mmcnt (int): number of ranks to report + port (int): control port + amp (float): amplification factor for TFLOPs, default 3.0 + toggle (bool): whether to start/stop detector collection + bdata (bool): when true, just collect get_batch + dev (int): cuda device + evt_q (LifoQueue): cuda event queue + start_gemm_ev (list[torch.cuda.Event]): cuda start event + stop_gemm_ev (list[torch.cuda.Event]): cuda stop event + start_data_ev (list[torch.cuda.Event]): cuda start event + stop_data_ev (list[torch.cuda.Event]): cuda stop event + start_gemm_tm (list[int]): start time (wallclock) + stop_gemm_tm (list[int]): stop time (wallclock) + start_data_tm (list[int]): start time for get_batch + stop_data_tm (list[int]): stop time for get_batch + sock (socket): the controller socket + ctrlr (Thread): the controller thread + """ + + _configured = False + """Indicates if the singleton instance is configured or not + """ + + def __new__(cls: Type["StragglerDetector"]) -> "StragglerDetector": + """Constructor + Creates an instance of the class if not created + + Args: + cls (Type['StragglerDetector']): The class type + + Returns: + StragglerDetector: the class instance + """ + + if not hasattr(cls, "_instance"): + cls._instance = super(StragglerDetector, cls).__new__(cls) + return cls._instance + + def __init__(self) -> None: + """Initializer + + The inital state of the StragglerDetector instance is disabled. + The enabled state is indicated using self._off member variable + and the proerty enabled. + """ + self._off: bool = True + self.start = self.null_method + self.stop = self.null_method + self.world: int = 0 + self.rank: int = 0 + self.mmcnt: int = 1 + self.port: int = 0 + self.amp: float = 3.0 + self.toggle: bool = False + self.bdata: bool = False + self.dev: Union[torch.device, int, None] = None + self.evt_q: Union[queue.LifoQueue, None] = None + self.start_gemm_ev: List[torch.cuda.Event] = [] + self.stop_gemm_ev: List[torch.cuda.Event] = [] + self.start_data_ev: List[torch.cuda.Event] = [] + self.stop_data_ev: List[torch.cuda.Event] = [] + self.start_gemm_tm: List[int] = [] + self.stop_gemm_tm: List[int] = [] + self.start_data_tm: List[int] = [] + self.stop_data_tm: List[int] = [] + self.sock: Union[socket.socket, None] = None + self.ctrlr: Union[threading.Thread, None] = None + + def configure( + self, + world: int, + rank: int, + mmcnt: int = 1, + amp: float = 3.0, + port: int = 65535, + prefill: int = 1024, + enabled: bool = False, + ) -> None: + """This method is called to configure the Singleton instance + + It should be called once per instantiation per process. + + Note: + The constructor keeps the state of instance disabled + i.e no collection will happen even when start/stop methods are + called. Only when enabled is True (self._off is True), the + start/stop method pointers get assigned the real collection + methods, otherwise they are initialized with null_method + + Args: + world (int): World Size + rank (int): The rank of this trainer + mmcnt (int, optional): Number of ranks to print for showing Min/Max Etpt. + Defaults to 1. + amp (float, optional): Set to 3.0 if we only use timers in fwd pass. + Defaults to 3.0. + port (int, optional): Control port, useful only for rank-0. Defaults to 65535. + prefill (int, optional): How many Events to pre-populate. Defaults to 1024. + enabled (bool, optional): Whether or not collection is enabled on startup. + Defaults to False. + """ + if StragglerDetector._configured: + # don't throw + return + StragglerDetector._configured = True + self.bdata = False + self.start = self.null_method + self.stop = self.null_method + self._off = True + # No CUDA, No Support + if torch.cuda.is_available(): + self._off = not enabled + self.world = world + self.rank = rank + self.mmcnt = mmcnt if mmcnt > 1 else 1 + self.amp = amp + self.port = port + self.toggle = False + self.bdata = False + self.evt_q = queue.LifoQueue() + self.start_gemm_ev = [] + self.stop_gemm_ev = [] + self.start_data_ev = [] + self.stop_data_ev = [] + self.start_gemm_tm = [] + self.stop_gemm_tm = [] + self.start_data_tm = [] + self.stop_data_tm = [] + backend = torch.distributed.get_backend() + if backend == "nccl": + self.dev = torch.cuda.current_device() + else: + self.dev = torch.device("cpu") + # cache some events + for _ in range(prefill): + self.evt_q.put(torch.cuda.Event(enable_timing=True)) + if self.rank == 0: + # Start the controller + self._controller() + if not self._off: + self.start = self.start_method + self.stop = self.stop_method + + def reset(self) -> None: + """This method is called to reset the metrics state of the instance + + It is generally called from within elapsed() after extracting per rank metrics. + """ + if self._off: + return + # Pool them + if self.evt_q is not None: + _ = [self.evt_q.put(ev) for ev in self.start_gemm_ev] + _ = [self.evt_q.put(ev) for ev in self.stop_gemm_ev] + _ = [self.evt_q.put(ev) for ev in self.start_data_ev] + _ = [self.evt_q.put(ev) for ev in self.stop_data_ev] + self.start_gemm_ev = [] + self.stop_gemm_ev = [] + self.start_data_ev = [] + self.stop_data_ev = [] + # Use regular timers + self.start_gemm_tm = [] + self.stop_gemm_tm = [] + self.start_data_tm = [] + self.stop_data_tm = [] + self.bdata = False + + def start_method(self) -> None: + """This method adds the start timers. + + Both cuda event and perf_counter are added. If bdata is set to + true from __call__, this method skips inserting cuda + timer. This way it can be used to measure time spent on + CPU - generally useful for timing get_batch() + """ + # Not reentrant + if self.evt_q is not None and self.evt_q.qsize() > 1: + sev = self.evt_q.get() # no try-catch + eev = self.evt_q.get() # no try-catch + else: + sev = torch.cuda.Event(enable_timing=True) + eev = torch.cuda.Event(enable_timing=True) + # First check if this start is for data + if self.bdata: + self.start_data_ev.append(sev) + self.stop_data_ev.append(eev) + self.start_data_tm.append(0) + self.stop_data_tm.append(0) + idx = len(self.stop_data_tm) - 1 + self.start_data_tm[idx] = time.perf_counter_ns() + self.start_data_ev[idx].record() + self.bdata = False + return + self.start_gemm_ev.append(sev) + self.stop_gemm_ev.append(eev) + self.start_gemm_tm.append(0) + self.stop_gemm_tm.append(0) + idx = len(self.stop_gemm_tm) - 1 + self.start_gemm_tm[idx] = time.perf_counter_ns() + self.start_gemm_ev[idx].record() + + def stop_method(self) -> None: + """This method adds the stop timers. + + Both cuda event and perf_counter are added. If bdata is set to + true from __call__, this method skips inserting cuda + timer. Also see start_method() + """ + # Not reentrant + # First check if this stop is for data + idx = len(self.stop_data_tm) - 1 + if idx >= 0 and self.stop_data_tm[idx] == 0: + self.stop_data_tm[idx] = time.perf_counter_ns() + self.stop_data_ev[idx].record() + return + idx = len(self.stop_gemm_tm) - 1 + if idx >= 0 and self.stop_gemm_tm[idx] == 0: + self.stop_gemm_tm[idx] = time.perf_counter_ns() + self.stop_gemm_ev[idx].record() + + def elapsed(self) -> Tuple[float, float, int, int, int, int]: + """This method is called from report(), or can be called directly + + It is called to collect all the elapsed time since last reset(). + It finally calls reset() + + Returns: + Tuple[float, float, int, int, int, int]: see below for returns + delta : time spent in kernel + batch_delta : time spent in get_batch + temp : observed gpu temp + power : observed gpu power + util : observed gpu utilization + clock : observed gpu clock + """ + if self._off: + # match with return below + return 0, 0, 0, 0, 0, 0 + ls_ev = len(self.start_gemm_ev) + le_ev = len(self.stop_gemm_ev) + ls_bs = len(self.start_data_ev) + ls_be = len(self.stop_data_ev) + delta = 0.0 + batch_delta = 0.0 + temp = 0 + power = 0 + clock = 0 + if ls_ev != le_ev: + logger.warning(f"Event Start/Stop out of sync {ls_ev}/{le_ev}") + elif ls_bs != ls_be: + logger.warning(f"get_batch Start/Stop out of sync {ls_bs}/{ls_be}") + else: + temp = torch.cuda.temperature() + power = torch.cuda.power_draw() + util = torch.cuda.utilization() + clock = torch.cuda.clock_rate() + torch.cuda.synchronize() + # Process Events + for i in range(ls_ev): + e_ev = self.start_gemm_ev[i].elapsed_time(self.stop_gemm_ev[i]) + e_tm = (self.stop_gemm_tm[i] - self.start_gemm_tm[i]) / 1e6 # ns to ms + # Pick the larger of Event and perf_counter time? + delta += max(e_ev, e_tm) + # Process get_batch + for i in range(ls_bs): + b_ev = self.start_data_ev[i].elapsed_time(self.stop_data_ev[i]) + b_tm = (self.stop_data_tm[i] - self.start_data_tm[i]) / 1e6 # ns to ms + # data fetching has prefetch, hence take the max, instead of avg + batch_delta = max(batch_delta, max(b_ev, b_tm)) + self.reset() # Prepare for next round + # time in ms, batch_delta in ms, check return above + return delta, batch_delta, temp, power, util, clock + + def report(self, total_flops: float = 0.0, log_interval: int = 0) -> bool: + """Function to log the min/max metircs and the associated rank over a time period + + It finds the slowest and fastest rank among all ranks. It should be + called by all ranks, but only rank-0 prints the analysis + At the end it checks, if the straggler detector should + remain active or if it should be deactivated. + + Args: + total_flops (float, optional): The theoretical flops over the period. Defaults to 0.0. + log_interval (int, optional): The training interval over which reporting is called(ms) + Defaults to 0. + + Returns: + bool: True if reported, else False + """ + ret = False + if not self._off and total_flops > 0.0 and log_interval > 0: + elapsed, btime, temp, power, util, clock = self.elapsed() # get raw time + # btime (get_batch time is max in the iteration) + ptime = elapsed / (log_interval * 1.0) # avg per iteration elapsed time, ms + api_flops = total_flops / (log_interval * 1.0) # avg per iteration flops, ms + apir_flops = api_flops / ( + ptime * 10**9 * self.world + ) # this is avg per iteration this rank's thruput, TFLOP/s (note 10**9), + et_flops = apir_flops / self.amp # Estimated TFLOPs, not tracing backward + + o_dt = self._min_max( + ptime, btime, float(temp), float(power), float(util), float(clock), et_flops + ) + if self.rank == 0 and o_dt is not None and o_dt.aflops is not None: + now = f"[{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}]" + min_flops, min_frank, _ = o_dt.aflops[0]() + max_flops, max_frank, _ = o_dt.aflops[-1]() + logger.info( + f"{now} | " + f"MnRtt/Rnk: {o_dt.min_elapsed} | " + f"MxRtt/Rnk: {o_dt.max_elapsed} | " + f"MnPwr/Rnk: {o_dt.min_power} | " + f"MxPwr/Rnk: {o_dt.max_power} | " + f"MnTmp/Rnk: {o_dt.min_temp} | " + f"MxTmp/Rnk: {o_dt.max_temp} | " + f"MnUtl/Rnk: {o_dt.min_util} | " + f"MxUtl/Rnk: {o_dt.max_util} | " + f"MnClk/Rnk: {o_dt.min_clock} | " + f"MxClk/Rnk: {o_dt.max_clock} | " + f"MnDRtt/Rnk: {o_dt.min_btime} | " + f"MxDRtt/Rnk: {o_dt.max_btime} | " + f"MnEtpt/Rnk: {min_flops:.2f}TF/{min_frank} | " + f"MxEtpt/Rnk: {max_flops:.2f}TF/{max_frank}" + ) + if self.mmcnt > 1 and self.mmcnt < self.world: + line = f"^^^^ Bottom {self.mmcnt} Ranks with lowest Etpt(TF):" + for i in range(self.mmcnt): + line += f" {o_dt.aflops[i]}," + logger.info(line) + line = f"^^^^ Top {self.mmcnt} Ranks with highest Etpt(TF):" + shift = self.world - self.mmcnt + for i in range(self.mmcnt): + line += f" {o_dt.aflops[i+shift]}," + logger.info(line) + ret = True + + # Check/Communicate if tracking is turned off or on + self._check_toggle() + return ret + + def _check_toggle(self) -> None: + """Helper method to check if a request to toggle the collection state was made + + It checks iof collection state toggle req was made via the server listening on + rank-0 since last call to report(). Called by report(). Calling this method + indirectly from report() is the only way to activate the change that is made + via rank-0 + """ + # If no change just communicate the current + off = self._off + if self.rank == 0 and self.toggle: + off = not self._off + self.toggle = False + st = torch.tensor(off, dtype=torch.bool, device=self.dev) + torch.distributed.broadcast(st, 0) # Blocking + # save old switch + off = self._off + self._off = bool(st.item()) + if off != self._off: + if not self._off: + self.start = self.start_method + self.stop = self.stop_method + state = "ON" + else: + self.start = self.null_method + self.stop = self.null_method + state = "OFF" + if self.rank == 0: + logger.info(f"Toggling StragglerDetector State {state}") + + def _handler(self) -> None: + """Thread function for the controller. + + It is a tcp-server that listens on a port. Uses HTTP protocol. + If connected to it using curl, it indicates a toggle of the + collection state. The actual toggling happens at the end of + calling report() when _check_toggle() is called. + """ + resp = r"HTTP/1.0 200 OK\r\nConnection: Close\r\nContent-length: " + + if self.rank == 0: + state = "OFF" if self._off else "ON" + logger.info( + f"Controller ready to recv commands on port {self.port}. Current state {state}" + ) + while True and self.sock is not None: + try: + conn, _ = self.sock.accept() + _ = conn.recv(1024) + self.toggle = True + state = "ON" if self._off else "OFF" + msg = f"Will turn StragglerDetector {state} at next logging interval" + msg_len = len(msg) + final_resp = f"{resp}{msg_len}\r\n\r\n{msg}" + conn.send(final_resp.encode()) + conn.close() + logger.info(msg) + except Exception as err: + logger.error(f"Error in stragler handler.. {str(err)}") + return + + def _controller(self): + """Installs a controller listener that is used to toggle collection state. + + Called from configure(). Ignored for all ranks other than rank-0 + """ + try: + if self.rank == 0: + neth = "0.0.0.0" + netp = self.port + self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.sock.bind((neth, netp)) + self.sock.listen(128) + self.ctrlr = threading.Thread( + target=self._handler, args=(), name="straggler", daemon=True + ) + self.ctrlr.start() + except Exception as err: + logger.warning(f"StragglerDetector cannot be controlled.. {str(err)}") + + def _min_max( + self, + ptime: float, + btime: float, + temp: float, + power: float, + util: float, + clock: float, + flops: float, + ) -> Union[_StragglerData, None]: + """Helper function to find the min/max values + + Args: + ptime (float): avg per iteration gpu time + btime (float): avg per iteration cpu time + temp (float): gpu temp at the time of reporting + power (float): gpu power at the time of reporting + util (float): gpu util at the time of reporting + clock (float): gpu clock at the time of reporting + flops (float): estimated flops for the rank + + Returns: + Union[_StragglerData, None]: It contains the min/max of few metrics and the + corresponding rank it also has sorted list of + all (flops, rank) sorted by flops (aflops) + or returns None if collecton is disabled + """ + if self._off: + return None + # initialize output data object + o_dt = _StragglerData() + + prof_data: Dict[str, Union[int, float]] = {} + data_list: List[Dict[str, Union[int, float]]] = [] + prof_data["rank"] = self.rank + prof_data["time"] = ptime + prof_data["btime"] = btime + prof_data["temp"] = temp + prof_data["power"] = power + prof_data["util"] = util + prof_data["clock"] = clock + prof_data["flops"] = flops + + if self.rank == 0: + data_list = [prof_data] * self.world + + # this is blocking by default + torch.distributed.gather_object(prof_data, object_gather_list=data_list, dst=0) + + if self.rank == 0: + min_ctime = min(data_list, key=lambda k: k["time"]) # elapsed + max_ctime = max(data_list, key=lambda k: k["time"]) # elapsed + + min_cbatch = min(data_list, key=lambda k: k["btime"]) # batch time + max_cbatch = max(data_list, key=lambda k: k["btime"]) # batch time + + min_ctemp = min(data_list, key=lambda k: k["temp"]) # temp + max_ctemp = max(data_list, key=lambda k: k["temp"]) # temp + + min_cpower = min(data_list, key=lambda k: k["power"]) # power + max_cpower = max(data_list, key=lambda k: k["power"]) # power + + min_cutil = min(data_list, key=lambda k: k["util"]) # gpu util + max_cutil = max(data_list, key=lambda k: k["util"]) # gpu util + + min_cclock = min(data_list, key=lambda k: k["clock"]) # gpu clock + max_cclock = max(data_list, key=lambda k: k["clock"]) # gpu clock + + min_val = min_ctime["time"] + min_rank = min_ctime["rank"] + max_val = max_ctime["time"] + max_rank = max_ctime["rank"] + o_dt.min_elapsed = _ValueWithRank(min_val, int(min_rank), "ms") + o_dt.max_elapsed = _ValueWithRank(max_val, int(max_rank), "ms") + + min_val = min_cbatch["btime"] + min_rank = min_cbatch["rank"] + max_val = max_cbatch["btime"] + max_rank = max_cbatch["rank"] + o_dt.min_btime = _ValueWithRank(min_val, int(min_rank), "ms") + o_dt.max_btime = _ValueWithRank(max_val, int(max_rank), "ms") + + min_val = min_ctemp["temp"] + min_rank = min_ctemp["rank"] + max_val = max_ctemp["temp"] + max_rank = max_ctemp["rank"] + o_dt.min_temp = _ValueWithRank(min_val, int(min_rank), "C") + o_dt.max_temp = _ValueWithRank(max_val, int(max_rank), "C") + + min_val = min_cpower["power"] + min_rank = min_cpower["rank"] + max_val = max_cpower["power"] + max_rank = max_cpower["rank"] + o_dt.min_power = _ValueWithRank(min_val, int(min_rank), "W") + o_dt.max_power = _ValueWithRank(max_val, int(max_rank), "W") + + min_val = min_cutil["util"] + min_rank = min_cutil["rank"] + max_val = max_cutil["util"] + max_rank = max_cutil["rank"] + o_dt.min_util = _ValueWithRank(min_val, int(min_rank), "%") + o_dt.max_util = _ValueWithRank(max_val, int(max_rank), "%") + + min_val = min_cclock["clock"] + min_rank = min_cclock["rank"] + max_val = max_cclock["clock"] + max_rank = max_cclock["rank"] + o_dt.min_clock = _ValueWithRank(min_val, int(min_rank), "MHz") + o_dt.max_clock = _ValueWithRank(max_val, int(max_rank), "MHz") + + o_dt.aflops = [ + _ValueWithRank(d.get("flops", 0.0), int(d.get("rank", -1))) + for _, d in enumerate(data_list) + ] + o_dt.aflops.sort(key=lambda val_with_rank: val_with_rank()[0]) + # wait for everyone here + torch.distributed.barrier() + + return o_dt + + @property + def enabled(self) -> bool: + """Can be called to check the enabled state of the instance + + Note: + After the request to toggle the state, the + actual state change happens at end of call + to report() + """ + return not self._off + + @property + def configured(self) -> bool: + """Can be called to check if the instance is already configured + + Returns: + bool: returns True if configure was called and was a success, else False + """ + return StragglerDetector._configured + + @property + def my_rank(self): + """Can be called to get configured rank of this instance + + Returns: + int: Configured rank for this instance + """ + return self.rank + + @property + def world_size(self) -> int: + """Can be called to get configured world of this instance + + Returns: + int: World size configured for this instance + """ + return self.world + + def null_method(self) -> None: + """Default method to initialize start/stop method ptrs""" + pass + + def __enter__(self) -> "StragglerDetector": + """Define context/instance entry + + Returns: + StragglerDetector: the instance + """ + self.start() + return self + + def __call__(self, bdata: bool = False) -> "StragglerDetector": + """Callable for the instance. Set context state, + + Useful when the context is used for cpu timers only when bdata=True + + Args: + bdata (bool, optional): when true, only enables cpu timers. Defaults to False. + + Returns: + StragglerDetector: the instance + """ + self.bdata = bdata + return self + + def __exit__( + self, + ex_type: Optional[Type[BaseException]], + ex_val: Optional[BaseException], + ex_tb: Optional[TracebackType], + ) -> bool: + """Define context/instance exit, calls the stop method + + Args: + ex_type (Optional[Type[BaseException]]): Exception type + ex_val (Optional[BaseException]): _description_ + ex_tb (Optional[TracebackType]): _description_ + + Returns: + bool: True if the exception was handled + """ + # Should not suppress errors even if turned off + if ex_type is not None: + err = traceback.format_exception(ex_type, ex_val, ex_tb) + logger.warning(f"{str(ex_val)}\n{err}") + self.stop() + return False + + +# Singleton, global visibility +__straggler__ = StragglerDetector() +"""StragglerDetector: private module variable, not be directly accessed +""" + + +# Check if Transformer Engine has Float8Tensor class +HAVE_TE_FLOAT8TENSOR = False +try: + from transformer_engine.pytorch.float8_tensor import Float8Tensor + + HAVE_TE_FLOAT8TENSOR = True +except (ImportError, ModuleNotFoundError): + # Float8Tensor not found + pass + + +def is_float8tensor(tensor: torch.Tensor) -> bool: + """Check if a tensor is a Transformer Engine Float8Tensor""" + return HAVE_TE_FLOAT8TENSOR and isinstance(tensor, Float8Tensor) + + +def is_submodule(module, parent_module, strict=True): + """ + Check if a module is a submodule of another module. + """ + if strict: + if module is parent_module: + return False + for m in parent_module.modules(): + if m is module: + return True + return False + + +######################## +### context parallel ### +######################## + + +def get_batch_on_this_cp_rank(batch: Dict[str, Any]): + """Slice batch input along sequence dimension into multiple chunks, + which are parallelized across GPUs in a context parallel group. + """ + + # With causal masking, each token only attends to its prior tokens. Simply split + # sequence into CP chunks can result in severe load imbalance. That's to say, chunks + # at the end of sequence have bigger workload than others. To address this issue, + # we split sequence into 2*CP ranks. Assuming CP=2, we then get 4 chunks, chunk_0 + # and chunk_3 are assigned to GPU0, chunk_1 and chunk_2 are assigned to GPU1, so + # that we can get balanced workload among GPUs in a context parallel group. + cp_size = parallel_state.get_context_parallel_world_size() + if cp_size > 1: + cp_rank = parallel_state.get_context_parallel_rank() + for key, val in batch.items(): + if val is not None: + seq_dim = 1 if key != 'attention_mask' else 2 + val = val.view( + *val.shape[0:seq_dim], + 2 * cp_size, + val.shape[seq_dim] // (2 * cp_size), + *val.shape[(seq_dim + 1) :], + ) + index = torch.tensor( + [cp_rank, (2 * cp_size - cp_rank - 1)], device="cpu", pin_memory=True + ).cuda(non_blocking=True) + val = val.index_select(seq_dim, index) + val = val.view(*val.shape[0:seq_dim], -1, *val.shape[(seq_dim + 2) :]) + batch[key] = val + + return batch diff --git a/megatron/inference/text_generation/generation.py b/megatron/inference/text_generation/generation.py index 13e53b3..2b3af8d 100644 --- a/megatron/inference/text_generation/generation.py +++ b/megatron/inference/text_generation/generation.py @@ -1,462 +1,476 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. - -"""Generation utilities.""" - -import torch -import torch.nn.functional as F - -from megatron.training import get_args, get_tokenizer -from megatron.core import mpu -from megatron.training.utils import get_ltor_masks_and_position_ids -from .communication import ( - copy_from_last_to_first_pipeline_stage, - broadcast_from_last_pipeline_stage, - broadcast_from_last_to_first_pipeline_stage) -from .forward_step import ForwardStep -from .sampling import sample -from .beam_utils import BeamHypotheses - -MAX_TOPK_LOGPROBS = 5 -NO_TOPK_LOGPROBS = None - -def score_and_return_on_first_stage(model, tokens: torch.Tensor, lengths: torch.Tensor): - """Function for just scoring. - - Args: - model: no interleaving is supported. - tokens: prompt tokens extended to be of size [b, max_prompt_length] - lengths: original prompt length, size: [b] - Note: Outside of model, other parameters only need to be available on - rank 0. - - Returns: - output_log_probs: log probability of the selected tokens. size: [b, s] - """ - - args = get_args() - - batch_size = tokens.size(0) - max_prompt_length = lengths.max().item() - assert max_prompt_length == tokens.size(1) - - if max_prompt_length > args.max_position_embeddings: - raise ValueError( - f"Length of prompt + tokens_to_generate longer than allowed {max_prompt_length} > {args.max_position_embeddings}" - ) - - if max_prompt_length * batch_size > args.max_tokens_to_oom: - raise ValueError( - f"Too many tokens. {max_prompt_length*batch_size} > {args.max_tokens_to_oom}" - ) - - # forward step. - forward_step = ForwardStep(model, batch_size, args.inference_max_seq_length) - - # =================== - # Pre-allocate memory - # =================== - - # Log probability of the sequence (prompt + generated tokens). - output_log_probs = None - output_topk_log_probs, output_topk_log_indices = None, None - output_log_probs_size = (batch_size, max_prompt_length - 1) - output_topk_log_probs_size = (batch_size, max_prompt_length, MAX_TOPK_LOGPROBS) - - if mpu.is_pipeline_last_stage(): - output_log_probs = torch.empty( - output_log_probs_size, dtype=torch.float32, device=torch.cuda.current_device() - ) - - output_topk_log_probs = torch.empty( - output_topk_log_probs_size, dtype=torch.float32, device=torch.cuda.current_device() - ) - - output_topk_log_indices = torch.empty( - output_topk_log_probs_size, dtype=torch.int64, device=torch.cuda.current_device() - ) - # ============= - # Run infernece - # ============= - with torch.no_grad(): - attention_mask, position_ids = _build_attention_mask_and_position_ids(tokens) - - # logits will be meanigful only in the last pipeline stage. - logits = forward_step(tokens, position_ids, attention_mask) - - if mpu.is_pipeline_last_stage(): - # Always the last stage should have an output. - assert logits is not None - log_probs = F.log_softmax(logits, dim=2) - - # Pick the tokens that we need to get the log - # probabilities for. Note that next input token is - # the token which we selected in the current logits, - # so shift by 1. - indices = torch.unsqueeze(tokens[:, 1:], 2) - output_log_probs = torch.gather(log_probs, 2, indices).squeeze(2) - torch.topk(log_probs, MAX_TOPK_LOGPROBS, dim=2, out=(output_topk_log_probs, output_topk_log_indices)) - - # ====================================== - # Broadcast to the first pipeline stage. - # ====================================== - output_topk_log_probs = broadcast_from_last_to_first_pipeline_stage( - output_topk_log_probs_size, torch.float32, output_topk_log_probs - ) - output_topk_log_indices = broadcast_from_last_to_first_pipeline_stage( - output_topk_log_probs_size, torch.int64, output_topk_log_indices - ) - output_log_probs = broadcast_from_last_to_first_pipeline_stage( - output_log_probs_size, torch.float32, output_log_probs - ) - - logprobs_topk = torch.return_types.topk((output_topk_log_probs, output_topk_log_indices)) - return tokens, lengths, output_log_probs, logprobs_topk - -def generate_tokens_probs_and_return_on_first_stage( - model, forward_step, tokens, lengths, - return_output_log_probs=False, - top_k=0, top_p=0.0, top_p_decay=0.0, top_p_bound=0.0, - temperature=1.0, - use_eod_token_for_early_termination=True, - stop_on_double_eol=False, - stop_on_eol=False, - prevent_newline_after_colon=True - ): - """Main token generation function. - - Args: - model: no interleaving is supported. - forward_step (ForwardStep): Class for running the model forward step. - tokens: prompt tokens extended to be of size [b, max-sequence-length] - lengths: original prompt length, size: [b] - return_output_log_probs: flag to calculate the log probability of - the generated tokens. Note that the log probability is the one - from the original logit. - top_k, top_p: top-k and top-p sampling parameters. - Note that top-k = 1 is gready. Also, these paramters are - exclusive meaning that: - if top-k > 0 then we expect top-p=0. - if top-p > 0 then we check for top-k=0. - temperature: sampling temperature. - use_eod_token_for_early_termination: if True, do early termination if - all the sequences have reached this token. - prevent_newline_after_colon: if True, it will disable generating new line \n after : - Note: Outside of model, other parameters only need to be available on - rank 0. - - Returns: Note that is size is adjusted to a lower value than - max-sequence-length if generation is terminated early. - tokens: prompt and generated tokens. size: [b, :] - generated_sequence_lengths: total length (including prompt) of - the generated sequence. size: [b] - output_log_probs: log probability of the selected tokens. size: [b, s] - """ - - args = get_args() - tokenizer = get_tokenizer() - - batch_size = tokens.size(0) - min_prompt_length = lengths.min().item() - max_sequence_length = tokens.size(1) - - if max_sequence_length > args.max_position_embeddings: - raise ValueError("Length of prompt + tokens_to_generate longer than allowed") - - if max_sequence_length * batch_size > args.max_tokens_to_oom: - raise ValueError("Too many tokens. " + str(max_sequence_length*batch_size)+ " is greater than "+str(args.max_tokens_to_oom)) - - # forward step. - forward_step = forward_step(model, batch_size, args.inference_max_seq_length) - - # Added termination_id to support the case that we want to terminate the - # generation once that id is generated. - if hasattr(args, 'eos_id'): - termination_id = args.eos_id - elif hasattr(tokenizer, 'eod'): - termination_id = tokenizer.eod - elif hasattr(tokenizer, 'eos_id'): - termination_id = tokenizer.eos_id - else: - raise AttributeError('No eod token found in tokenizer or args') - - # =================== - # Pre-allocate memory - # =================== - - # Log probability of the sequence (prompt + generated tokens). - output_log_probs = None - output_log_probs_size = (batch_size, max_sequence_length - 1) - # Lengths of generated seuquence including including prompts. - generated_sequence_lengths = None - if mpu.is_pipeline_last_stage(): - if return_output_log_probs: - output_log_probs = torch.empty(output_log_probs_size, - dtype=torch.float32, - device=torch.cuda.current_device()) - generated_sequence_lengths = torch.ones( - batch_size, dtype=torch.int64, - device=torch.cuda.current_device()) * max_sequence_length - - # Whether we have reached a termination id. - is_generation_done = torch.zeros(batch_size, dtype=torch.uint8, - device=torch.cuda.current_device()) - - # ============= - # Run infernece - # ============= - - with torch.no_grad(): - attention_mask, position_ids = _build_attention_mask_and_position_ids( - tokens) - prev_context_length = 0 - for context_length in range(min_prompt_length, max_sequence_length): - - # Pick the slice that we need to pass through the network. - tokens2use = tokens[:, prev_context_length:context_length] - positions2use = position_ids[:, prev_context_length:context_length] - attention_mask2use = attention_mask[ - ..., prev_context_length:context_length, :context_length] - - # logits will be meanigful only in the last pipeline stage. - logits = forward_step(tokens2use, positions2use, attention_mask2use) - - if mpu.is_pipeline_last_stage(): - if prevent_newline_after_colon: - logits[tokens2use[:, -1] == tokenizer.tokenize(':')[0], -1, tokenizer.tokenize('\n')[0]] = -1e10 # disable "\n" after ":" - # Always the last stage should have an output. - assert logits is not None - - # Sample. - last_token_logits = logits[:, -1, :] - new_sample = sample(last_token_logits, - top_k=top_k, - top_p=top_p, - temperature=temperature, - vocab_size=tokenizer.vocab_size) - if top_p > 0.0 and top_p_decay > 0.0: - top_p = top_p * top_p_decay - if top_p_bound > 0.0: - top_p = max(top_p, top_p_bound) - - # If a prompt length is smaller or equal th current context - # length, it means we have started generating tokens - started = lengths <= context_length - # Update the tokens. - tokens[started, context_length] = new_sample[started] - - # Calculate the log probabilities. - if return_output_log_probs: - log_probs = F.log_softmax(logits, dim=2) - if return_output_log_probs: - # Pick the tokens that we need to get the log - # probabilities for. Note that next input token is - # the token which we selected in the current logits, - # so shift by 1. - indices = torch.unsqueeze( - tokens[ - :, - (prev_context_length + 1):(context_length + 1)], - 2) - output_log_probs[:, - prev_context_length:context_length] = \ - torch.gather(log_probs, 2, indices).squeeze(2) - - # Update the tokens on the first stage so the next input to - # the network is correct. - copy_from_last_to_first_pipeline_stage(batch_size, torch.int64, - tokens[:, context_length]) - - # Update the context length for the next token generation. - prev_context_length = context_length - - # Check if all the sequences have hit the termination_id. - done = None - if mpu.is_pipeline_last_stage(): - # TODO(rprenger) These stopping methods are tokenizer dependent - # instead tokenization should be in the inference loop so stop sequences can be used - if stop_on_double_eol: - hit_double_eol = (new_sample == 628).byte() & started.byte() - hit_two_eols = (new_sample == 198).byte() & (tokens[:, context_length-1] == 198).byte() & started.byte() - done_token = hit_double_eol | hit_two_eols - elif stop_on_eol: - hit_double_eol = (new_sample == 628).byte() & started.byte() - hit_eol = (new_sample == 198).byte() & started.byte() - done_token = hit_double_eol | hit_eol - else: - done_token = (new_sample == termination_id).byte() & \ - started.byte() - - just_finished = (done_token & ~is_generation_done).bool() - generated_sequence_lengths[just_finished.view(-1)] = \ - context_length + 1 - is_generation_done = is_generation_done | done_token - done = torch.all(is_generation_done) - done = broadcast_from_last_pipeline_stage(1, torch.uint8, - tensor=done) - if use_eod_token_for_early_termination and done: - break - - # =================================================== - # Update the length of based on max generated length. - # =================================================== - - tokens = tokens[:, :(context_length + 1)] - if mpu.is_pipeline_last_stage(): - if return_output_log_probs: - output_log_probs = output_log_probs[:, :context_length] - - # ====================================== - # Broadcast to the first pipeline stage. - # ====================================== - - generated_sequence_lengths = broadcast_from_last_to_first_pipeline_stage( - batch_size, torch.int64, generated_sequence_lengths) - if return_output_log_probs: - output_log_probs_size = (batch_size, context_length) - output_log_probs = broadcast_from_last_to_first_pipeline_stage( - output_log_probs_size, torch.float32, output_log_probs) - - return tokens, generated_sequence_lengths, output_log_probs, NO_TOPK_LOGPROBS - -def beam_search_and_return_on_first_stage(model, forward_step, tokens, lengths, beam_size, stop_token, num_return_gen, length_penalty, prevent_newline_after_colon=True): - args = get_args() - tokenizer = get_tokenizer() - - batch_size = tokens.size(0) - assert(batch_size == 1) - prompt_length = lengths.item() - final_sequence_length = tokens.size(1) - final_sequence_length = min(final_sequence_length, args.max_position_embeddings) - - # If the context is too big, this happens - if prompt_length >= final_sequence_length: - raise ValueError("context length + tokens_to_generate too large") - - # forward step. - forward_step = forward_step(model, beam_size, final_sequence_length) - - beam_hyp = BeamHypotheses(beam_size, length_penalty) - best_batches = None - done = torch.zeros(1, dtype=torch.uint8, device=torch.cuda.current_device()) - scores = torch.zeros(beam_size, - dtype=torch.float32, - device=torch.cuda.current_device()).unsqueeze(1) - scores_size_tensor, tokens_size_tensor = None, None - # ============= - # Run infernece - # ============= - with torch.no_grad(): - tokens = tokens.repeat(beam_size, 1) - attention_mask, position_ids = _build_attention_mask_and_position_ids(tokens) - prev_context_length = 0 - for context_length in range(prompt_length, final_sequence_length): - - # Pick the slice that we need to pass through the network. - tokens2use = tokens[:, prev_context_length:context_length] - positions2use = position_ids[:, prev_context_length:context_length] - attention_mask2use = attention_mask[ - ..., prev_context_length:context_length, :context_length] - - # logits will be meanigful only in the last pipeline stage. - logits = forward_step(tokens2use, positions2use, attention_mask2use) - - if mpu.is_pipeline_last_stage(): - if prevent_newline_after_colon: - logits[tokens2use[:, -1] == tokenizer.tokenize(':')[0], -1, tokenizer.tokenize('\n')[0]] = -1e10 # disable "\n" after ":" - vocab_size = logits.size(2) - log_probs = F.log_softmax(logits, dim=2) - new_scores = log_probs[:, -1, :] + scores - - if context_length == prompt_length: # if this is the first one - sorted_scores, indices = torch.sort(new_scores[0,:], descending=True) - else: - sorted_scores, indices = torch.sort(new_scores.view(-1), descending=True) - - best_beam_ids = torch.div(indices[: 2 * beam_size], vocab_size).trunc().long() - best_words = indices[:2 * beam_size] % vocab_size - best_scores = sorted_scores[: 2 * beam_size] - - next_beams = [] - for beam_token_rank, (token_id, beam_score, beam_id) in enumerate( - zip(best_words, best_scores, best_beam_ids) - ): - if token_id.item() == stop_token: - # if beam_token does not belong to top num_beams tokens, it should not be added - is_beam_token_worse_than_top_num_beams = beam_token_rank >= beam_size - if is_beam_token_worse_than_top_num_beams: - continue - beam_hyp.add( - tokens[beam_id].clone(), - beam_score, - context_length + 1 - prompt_length - ) - else: - # add next predicted token since it is not eos_token - next_beams.append((token_id, beam_score, beam_id)) - - if len(next_beams) == beam_size: - break - - if beam_hyp.is_done(best_scores.max().item(), context_length + 1 - prompt_length): - done = torch.ones(1, dtype=torch.uint8, device=torch.cuda.current_device()) - - best_batches = tokens.new([item[2] for item in next_beams]) - tokens = tokens[best_batches,:] - tokens[:, context_length] = tokens.new([item[0] for item in next_beams]) - scores = scores.new([item[1] for item in next_beams]).unsqueeze(1) - - # torch.distributed.barrier() - done = broadcast_from_last_pipeline_stage(1, torch.uint8, done) - if done: - break - - # Update the tokens on the first stage so the next input to - # the network is correct. - copy_from_last_to_first_pipeline_stage(tokens.size(), torch.int64, - tokens) - - # set inference key values to make it consistent with best beam index - best_batches = broadcast_from_last_pipeline_stage(beam_size, torch.int64, best_batches) - forward_step.inference_params.swap_key_value_dict(best_batches) - - # Update the context length for the next token generation. - prev_context_length = context_length - - if mpu.is_pipeline_last_stage(): - # if cannot find stop token, add open beams to hyps - if not done: - for beam_id in range(beam_size): - beam_hyp.add(tokens[beam_id].clone(), scores[beam_id].squeeze(), context_length + 1 - prompt_length) - - # rank based on scores - sorted_hyps = sorted(beam_hyp.beams, key=lambda x: x[0], reverse=True) - num_return_gen = min(num_return_gen, len(sorted_hyps)) - scores = [sorted_hyps[i][0] for i in range(num_return_gen)] - tokens = [sorted_hyps[i][1] for i in range(num_return_gen)] - scores = torch.stack(scores, dim=0) - tokens = torch.stack(tokens, dim=0) - scores_size_tensor = torch.tensor(scores.shape, dtype=torch.int64, device=torch.cuda.current_device()) - tokens_size_tensor = torch.tensor(tokens.shape, dtype=torch.int64, device=torch.cuda.current_device()) - - scores_size_tensor = broadcast_from_last_pipeline_stage(1, torch.int64, scores_size_tensor) - tokens_size_tensor = broadcast_from_last_pipeline_stage(2, torch.int64, tokens_size_tensor) - - scores = broadcast_from_last_to_first_pipeline_stage(tuple(scores_size_tensor), torch.float32, scores) - tokens = broadcast_from_last_to_first_pipeline_stage(tuple(tokens_size_tensor), torch.int64, tokens) - - return tokens, scores - - -def _build_attention_mask_and_position_ids(tokens): - """Build the attention mask and postition ids for the input tokens.""" - - # Since we are not interested in loss-mask and reset attention/position - # is also False, eod_token is not used so it is safe to set it to None. - attention_mask, _, position_ids = get_ltor_masks_and_position_ids( - data=tokens, - eod_token=None, - reset_position_ids=False, - reset_attention_mask=False, - eod_mask_loss=False) - - return attention_mask, position_ids +# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. + +"""Generation utilities.""" + +import torch +import torch.nn.functional as F + +from megatron.training import get_args, get_tokenizer +from megatron.core import mpu +from megatron.training.utils import get_ltor_masks_and_position_ids +from megatron.core.transformer.cuda_graphs import create_cudagraphs +from .communication import ( + copy_from_last_to_first_pipeline_stage, + broadcast_from_last_pipeline_stage, + broadcast_from_last_to_first_pipeline_stage) +from .forward_step import ForwardStep +from .sampling import sample +from .beam_utils import BeamHypotheses + +MAX_TOPK_LOGPROBS = 5 +NO_TOPK_LOGPROBS = None + +def score_and_return_on_first_stage(model, tokens: torch.Tensor, lengths: torch.Tensor): + """Function for just scoring. + + Args: + model: no interleaving is supported. + tokens: prompt tokens extended to be of size [b, max_prompt_length] + lengths: original prompt length, size: [b] + Note: Outside of model, other parameters only need to be available on + rank 0. + + Returns: + output_log_probs: log probability of the selected tokens. size: [b, s] + """ + + args = get_args() + + batch_size = tokens.size(0) + max_prompt_length = lengths.max().item() + assert max_prompt_length == tokens.size(1) + + if max_prompt_length > args.max_position_embeddings: + raise ValueError( + f"Length of prompt + tokens_to_generate longer than allowed {max_prompt_length} > {args.max_position_embeddings}" + ) + + if max_prompt_length * batch_size > args.max_tokens_to_oom: + raise ValueError( + f"Too many tokens. {max_prompt_length*batch_size} > {args.max_tokens_to_oom}" + ) + + # forward step. + forward_step = ForwardStep(model, batch_size, args.inference_max_seq_length) + + # =================== + # Pre-allocate memory + # =================== + + # Log probability of the sequence (prompt + generated tokens). + output_log_probs = None + output_topk_log_probs, output_topk_log_indices = None, None + output_log_probs_size = (batch_size, max_prompt_length - 1) + output_topk_log_probs_size = (batch_size, max_prompt_length, MAX_TOPK_LOGPROBS) + + if mpu.is_pipeline_last_stage(): + output_log_probs = torch.empty( + output_log_probs_size, dtype=torch.float32, device=torch.cuda.current_device() + ) + + output_topk_log_probs = torch.empty( + output_topk_log_probs_size, dtype=torch.float32, device=torch.cuda.current_device() + ) + + output_topk_log_indices = torch.empty( + output_topk_log_probs_size, dtype=torch.int64, device=torch.cuda.current_device() + ) + # ============= + # Run infernece + # ============= + with torch.no_grad(): + attention_mask, position_ids = _build_attention_mask_and_position_ids(tokens) + + # logits will be meanigful only in the last pipeline stage. + logits = forward_step(tokens, position_ids, attention_mask) + + if mpu.is_pipeline_last_stage(): + # Always the last stage should have an output. + assert logits is not None + log_probs = F.log_softmax(logits, dim=2).to(dtype=output_topk_log_probs.dtype) + + # Pick the tokens that we need to get the log + # probabilities for. Note that next input token is + # the token which we selected in the current logits, + # so shift by 1. + indices = torch.unsqueeze(tokens[:, 1:], 2) + output_log_probs = torch.gather(log_probs, 2, indices).squeeze(2) + torch.topk(log_probs, MAX_TOPK_LOGPROBS, dim=2, out=(output_topk_log_probs, output_topk_log_indices)) + + # ====================================== + # Broadcast to the first pipeline stage. + # ====================================== + output_topk_log_probs = broadcast_from_last_to_first_pipeline_stage( + output_topk_log_probs_size, torch.float32, output_topk_log_probs + ) + output_topk_log_indices = broadcast_from_last_to_first_pipeline_stage( + output_topk_log_probs_size, torch.int64, output_topk_log_indices + ) + output_log_probs = broadcast_from_last_to_first_pipeline_stage( + output_log_probs_size, torch.float32, output_log_probs + ) + + logprobs_topk = torch.return_types.topk((output_topk_log_probs, output_topk_log_indices)) + return tokens, lengths, output_log_probs, logprobs_topk + +def generate_tokens_probs_and_return_on_first_stage( + model, forward_step, tokens, lengths, + return_output_log_probs=False, + top_k=0, top_p=0.0, top_p_decay=0.0, top_p_bound=0.0, + temperature=1.0, + use_eod_token_for_early_termination=True, + stop_on_double_eol=False, + stop_on_eol=False, + prevent_newline_after_colon=True + ): + """Main token generation function. + + Args: + model: no interleaving is supported. + forward_step (ForwardStep): Class for running the model forward step. + tokens: prompt tokens extended to be of size [b, max-sequence-length] + lengths: original prompt length, size: [b] + return_output_log_probs: flag to calculate the log probability of + the generated tokens. Note that the log probability is the one + from the original logit. + top_k, top_p: top-k and top-p sampling parameters. + Note that top-k = 1 is gready. Also, these paramters are + exclusive meaning that: + if top-k > 0 then we expect top-p=0. + if top-p > 0 then we check for top-k=0. + temperature: sampling temperature. + use_eod_token_for_early_termination: if True, do early termination if + all the sequences have reached this token. + prevent_newline_after_colon: if True, it will disable generating new line \n after : + Note: Outside of model, other parameters only need to be available on + rank 0. + + Returns: Note that is size is adjusted to a lower value than + max-sequence-length if generation is terminated early. + tokens: prompt and generated tokens. size: [b, :] + generated_sequence_lengths: total length (including prompt) of + the generated sequence. size: [b] + output_log_probs: log probability of the selected tokens. size: [b, s] + """ + + args = get_args() + tokenizer = get_tokenizer() + + batch_size = tokens.size(0) + min_prompt_length = lengths.min().item() + max_sequence_length = tokens.size(1) + + if max_sequence_length > args.max_position_embeddings: + raise ValueError("Length of prompt + tokens_to_generate longer than allowed") + + if max_sequence_length * batch_size > args.max_tokens_to_oom: + raise ValueError("Too many tokens. " + str(max_sequence_length*batch_size)+ " is greater than "+str(args.max_tokens_to_oom)) + + # forward step. + forward_step = forward_step(model, batch_size, args.inference_max_seq_length) + + # Added termination_id to support the case that we want to terminate the + # generation once that id is generated. + if hasattr(args, 'eos_id'): + termination_id = args.eos_id + elif hasattr(tokenizer, 'eod'): + termination_id = tokenizer.eod + elif hasattr(tokenizer, 'eos_id'): + termination_id = tokenizer.eos_id + else: + raise AttributeError('No eod token found in tokenizer or args') + + # =================== + # Pre-allocate memory + # =================== + + # Log probability of the sequence (prompt + generated tokens). + output_log_probs = None + output_log_probs_size = (batch_size, max_sequence_length - 1) + # Lengths of generated seuquence including including prompts. + generated_sequence_lengths = None + if mpu.is_pipeline_last_stage(): + if return_output_log_probs: + output_log_probs = torch.empty(output_log_probs_size, + dtype=torch.float32, + device=torch.cuda.current_device()) + generated_sequence_lengths = torch.ones( + batch_size, dtype=torch.int64, + device=torch.cuda.current_device()) * max_sequence_length + + # Whether we have reached a termination id. + is_generation_done = torch.zeros(batch_size, dtype=torch.uint8, + device=torch.cuda.current_device()) + + # ============= + # Run inference + # ============= + + with torch.no_grad(): + attention_mask, position_ids = _build_attention_mask_and_position_ids( + tokens) + prev_context_length = 0 + for context_length in range(min_prompt_length, max_sequence_length): + + prefill = context_length == min_prompt_length + if not prefill: + forward_step.inference_params.enable_decode_mode() + + # Pick the slice that we need to pass through the network. + tokens2use = tokens[:, prev_context_length:context_length] + positions2use = position_ids[:, prev_context_length:context_length] + + # Do not pass a variable-shape attention mask in the decode phase. + attention_mask2use = attention_mask[ + ..., prev_context_length:context_length, :context_length] if prefill else None + + # logits will be meanigful only in the last pipeline stage. + logits = forward_step(tokens2use, positions2use, attention_mask2use) + + if args.enable_cuda_graph: + create_cudagraphs() + + if mpu.is_pipeline_last_stage(): + if prevent_newline_after_colon: + logits[tokens2use[:, -1] == tokenizer.tokenize(':')[0], -1, tokenizer.tokenize('\n')[0]] = -1e10 # disable "\n" after ":" + # Always the last stage should have an output. + assert logits is not None + + # Sample. + last_token_logits = logits[:, -1, :] + new_sample = sample(last_token_logits, + top_k=top_k, + top_p=top_p, + temperature=temperature, + vocab_size=tokenizer.vocab_size) + if top_p > 0.0 and top_p_decay > 0.0: + top_p = top_p * top_p_decay + if top_p_bound > 0.0: + top_p = max(top_p, top_p_bound) + + # If a prompt length is smaller or equal th current context + # length, it means we have started generating tokens + started = lengths <= context_length + # Update the tokens. + tokens[started, context_length] = new_sample[started] + + # Calculate the log probabilities. + if return_output_log_probs: + log_probs = F.log_softmax(logits, dim=2) + if return_output_log_probs: + # Pick the tokens that we need to get the log + # probabilities for. Note that next input token is + # the token which we selected in the current logits, + # so shift by 1. + indices = torch.unsqueeze( + tokens[ + :, + (prev_context_length + 1):(context_length + 1)], + 2) + output_log_probs[:, + prev_context_length:context_length] = \ + torch.gather(log_probs, 2, indices).squeeze(2) + + # Update the tokens on the first stage so the next input to + # the network is correct. + copy_from_last_to_first_pipeline_stage(batch_size, torch.int64, + tokens[:, context_length]) + + # Update the context length for the next token generation. + prev_context_length = context_length + + # Check if all the sequences have hit the termination_id. + done = None + if mpu.is_pipeline_last_stage(): + # TODO(rprenger) These stopping methods are tokenizer dependent + # instead tokenization should be in the inference loop so stop sequences can be used + if stop_on_double_eol: + hit_double_eol = (new_sample == 628).byte() & started.byte() + hit_two_eols = (new_sample == 198).byte() & (tokens[:, context_length-1] == 198).byte() & started.byte() + done_token = hit_double_eol | hit_two_eols + elif stop_on_eol: + hit_double_eol = (new_sample == 628).byte() & started.byte() + hit_eol = (new_sample == 198).byte() & started.byte() + done_token = hit_double_eol | hit_eol + else: + done_token = (new_sample == termination_id).byte() & \ + started.byte() + + just_finished = (done_token & ~is_generation_done).bool() + generated_sequence_lengths[just_finished.view(-1)] = \ + context_length + 1 + is_generation_done = is_generation_done | done_token + done = torch.all(is_generation_done) + done = broadcast_from_last_pipeline_stage(1, torch.uint8, + tensor=done) + if use_eod_token_for_early_termination and done: + break + + # =================================================== + # Update the length of based on max generated length. + # =================================================== + + tokens = tokens[:, :(context_length + 1)] + if mpu.is_pipeline_last_stage(): + if return_output_log_probs: + output_log_probs = output_log_probs[:, :context_length] + + # ====================================== + # Broadcast to the first pipeline stage. + # ====================================== + + generated_sequence_lengths = broadcast_from_last_to_first_pipeline_stage( + batch_size, torch.int64, generated_sequence_lengths) + if return_output_log_probs: + output_log_probs_size = (batch_size, context_length) + output_log_probs = broadcast_from_last_to_first_pipeline_stage( + output_log_probs_size, torch.float32, output_log_probs) + + return tokens, generated_sequence_lengths, output_log_probs, NO_TOPK_LOGPROBS + +def beam_search_and_return_on_first_stage(model, forward_step, tokens, lengths, beam_size, stop_token, num_return_gen, length_penalty, prevent_newline_after_colon=True): + args = get_args() + tokenizer = get_tokenizer() + + batch_size = tokens.size(0) + assert(batch_size == 1) + prompt_length = lengths.item() + final_sequence_length = tokens.size(1) + final_sequence_length = min(final_sequence_length, args.max_position_embeddings) + + # If the context is too big, this happens + if prompt_length >= final_sequence_length: + raise ValueError("context length + tokens_to_generate too large") + + # forward step. + forward_step = forward_step(model, beam_size, final_sequence_length) + + beam_hyp = BeamHypotheses(beam_size, length_penalty) + best_batches = None + done = torch.zeros(1, dtype=torch.uint8, device=torch.cuda.current_device()) + scores = torch.zeros(beam_size, + dtype=torch.float32, + device=torch.cuda.current_device()).unsqueeze(1) + scores_size_tensor, tokens_size_tensor = None, None + # ============= + # Run inference + # ============= + with torch.no_grad(): + tokens = tokens.repeat(beam_size, 1) + attention_mask, position_ids = _build_attention_mask_and_position_ids(tokens) + prev_context_length = 0 + for context_length in range(prompt_length, final_sequence_length): + + prefill = context_length == prompt_length + + # Pick the slice that we need to pass through the network. + tokens2use = tokens[:, prev_context_length:context_length] + positions2use = position_ids[:, prev_context_length:context_length] + + # Do not pass a variable-shape attention mask in the decode phase. + attention_mask2use = attention_mask[ + ..., prev_context_length:context_length, :context_length] if not prefill else None + + # logits will be meanigful only in the last pipeline stage. + logits = forward_step(tokens2use, positions2use, attention_mask2use) + + if mpu.is_pipeline_last_stage(): + if prevent_newline_after_colon: + logits[tokens2use[:, -1] == tokenizer.tokenize(':')[0], -1, tokenizer.tokenize('\n')[0]] = -1e10 # disable "\n" after ":" + vocab_size = logits.size(2) + log_probs = F.log_softmax(logits, dim=2) + new_scores = log_probs[:, -1, :] + scores + + if context_length == prompt_length: # if this is the first one + sorted_scores, indices = torch.sort(new_scores[0,:], descending=True) + else: + sorted_scores, indices = torch.sort(new_scores.view(-1), descending=True) + + best_beam_ids = torch.div(indices[: 2 * beam_size], vocab_size).trunc().long() + best_words = indices[:2 * beam_size] % vocab_size + best_scores = sorted_scores[: 2 * beam_size] + + next_beams = [] + for beam_token_rank, (token_id, beam_score, beam_id) in enumerate( + zip(best_words, best_scores, best_beam_ids) + ): + if token_id.item() == stop_token: + # if beam_token does not belong to top num_beams tokens, it should not be added + is_beam_token_worse_than_top_num_beams = beam_token_rank >= beam_size + if is_beam_token_worse_than_top_num_beams: + continue + beam_hyp.add( + tokens[beam_id].clone(), + beam_score, + context_length + 1 - prompt_length + ) + else: + # add next predicted token since it is not eos_token + next_beams.append((token_id, beam_score, beam_id)) + + if len(next_beams) == beam_size: + break + + if beam_hyp.is_done(best_scores.max().item(), context_length + 1 - prompt_length): + done = torch.ones(1, dtype=torch.uint8, device=torch.cuda.current_device()) + + best_batches = tokens.new([item[2] for item in next_beams]) + tokens = tokens[best_batches,:] + tokens[:, context_length] = tokens.new([item[0] for item in next_beams]) + scores = scores.new([item[1] for item in next_beams]).unsqueeze(1) + + # torch.distributed.barrier() + done = broadcast_from_last_pipeline_stage(1, torch.uint8, done) + if done: + break + + # Update the tokens on the first stage so the next input to + # the network is correct. + copy_from_last_to_first_pipeline_stage(tokens.size(), torch.int64, + tokens) + + # set inference key values to make it consistent with best beam index + best_batches = broadcast_from_last_pipeline_stage(beam_size, torch.int64, best_batches) + forward_step.inference_params.swap_key_value_dict(best_batches) + + # Update the context length for the next token generation. + prev_context_length = context_length + + if mpu.is_pipeline_last_stage(): + # if cannot find stop token, add open beams to hyps + if not done: + for beam_id in range(beam_size): + beam_hyp.add(tokens[beam_id].clone(), scores[beam_id].squeeze(), context_length + 1 - prompt_length) + + # rank based on scores + sorted_hyps = sorted(beam_hyp.beams, key=lambda x: x[0], reverse=True) + num_return_gen = min(num_return_gen, len(sorted_hyps)) + scores = [sorted_hyps[i][0] for i in range(num_return_gen)] + tokens = [sorted_hyps[i][1] for i in range(num_return_gen)] + scores = torch.stack(scores, dim=0) + tokens = torch.stack(tokens, dim=0) + scores_size_tensor = torch.tensor(scores.shape, dtype=torch.int64, device=torch.cuda.current_device()) + tokens_size_tensor = torch.tensor(tokens.shape, dtype=torch.int64, device=torch.cuda.current_device()) + + scores_size_tensor = broadcast_from_last_pipeline_stage(1, torch.int64, scores_size_tensor) + tokens_size_tensor = broadcast_from_last_pipeline_stage(2, torch.int64, tokens_size_tensor) + + scores = broadcast_from_last_to_first_pipeline_stage(tuple(scores_size_tensor), torch.float32, scores) + tokens = broadcast_from_last_to_first_pipeline_stage(tuple(tokens_size_tensor), torch.int64, tokens) + + return tokens, scores + + +def _build_attention_mask_and_position_ids(tokens): + """Build the attention mask and postition ids for the input tokens.""" + + # Since we are not interested in loss-mask and reset attention/position + # is also False, eod_token is not used so it is safe to set it to None. + attention_mask, _, position_ids = get_ltor_masks_and_position_ids( + data=tokens, + eod_token=None, + reset_position_ids=False, + reset_attention_mask=False, + eod_mask_loss=False) + + return attention_mask, position_ids diff --git a/megatron/inference/text_generation/tokenization.py b/megatron/inference/text_generation/tokenization.py index 7610cd4..9fa3390 100644 --- a/megatron/inference/text_generation/tokenization.py +++ b/megatron/inference/text_generation/tokenization.py @@ -1,135 +1,136 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. - -"""Tokenization utilities.""" - - -import torch - - -from megatron.core import parallel_state -from megatron.training import get_args, get_tokenizer -from .communication import broadcast_int_list, broadcast_tensor - - -def detokenize_generations(tokens_gpu_tensor, - lengths_gpu_tensor, - detokenize_segments): - """Detokenize the generated tokens.""" - - tokenizer = get_tokenizer() - prompts_plus_generations = [] - prompts_plus_generations_segments = [] - - tokens = tokens_gpu_tensor.cpu().numpy().tolist() - lengths = lengths_gpu_tensor.cpu().numpy().tolist() - for sequence_tokens, length in zip(tokens, lengths): - sequence_tokens = sequence_tokens[:length] - detok_str = tokenizer.detokenize(sequence_tokens) - prompts_plus_generations.append(detok_str) - if detokenize_segments: - try: - offsets = tokenizer.offsets(sequence_tokens, detok_str) - words = [ - detok_str[start:end] - for start, end in zip(offsets, offsets[1:] + [len(detok_str)]) - ] - except NotImplementedError: - words = [] - for token in sequence_tokens: - word = tokenizer.tokenizer.decoder[token] - word = bytearray([tokenizer.tokenizer.byte_decoder[c] for c in word]).decode( - "utf-8", errors="replace" - ) - words.append(word) - - prompts_plus_generations_segments.append(words) - - return tokens, prompts_plus_generations, prompts_plus_generations_segments - - -def tokenize_prompts(prompts=None, tokens_to_generate=None, - add_BOS=None, rank=0, data_parallel=False): - """Tokenize prompts and make them avaiable on all ranks. - - Args: - data_parallel (bool): Broadcast tokens across a single data parallel model replica. - """ - - # On all ranks set to None so we can pass them to functions - sizes_list = None - prompts_tokens_cuda_long_tensor = None - prompts_length_cuda_long_tensor = None - - # On the specified rank, build the above. - src_rank = torch.distributed.get_rank() - if data_parallel: - src_rank = parallel_state.get_data_parallel_src_rank() - - if src_rank == rank: - assert prompts is not None - assert tokens_to_generate is not None - # Tensor of tokens padded and their unpadded length. - prompts_tokens_cuda_long_tensor, prompts_length_cuda_long_tensor = \ - _tokenize_prompts_and_batch(prompts, tokens_to_generate, add_BOS) - # We need the sizes of these tensors for the boradcast - sizes_list = [prompts_tokens_cuda_long_tensor.size(0), # Batch size - prompts_tokens_cuda_long_tensor.size(1)] # Sequence lenght - - # First, broadcast the sizes. - sizes_tensor = broadcast_int_list(2, int_list=sizes_list, rank=rank, data_parallel=data_parallel) - - # Now that we have the sizes, we can boradcast the tokens - # and length tensors. - sizes = sizes_tensor.tolist() - prompts_tokens_cuda_long_tensor = broadcast_tensor( - sizes, torch.int64, tensor=prompts_tokens_cuda_long_tensor, rank=rank, data_parallel=data_parallel) - prompts_length_cuda_long_tensor = broadcast_tensor( - sizes[0], torch.int64, tensor=prompts_length_cuda_long_tensor, - rank=rank, data_parallel=data_parallel) - - return prompts_tokens_cuda_long_tensor, prompts_length_cuda_long_tensor - - -def _tokenize_prompts_and_batch(prompts, tokens_to_generate, add_BOS): - """Given a set of prompts and number of tokens to generate: - - tokenize prompts - - set the sequence length to be the max of length of prompts - plus the number of tokens we would like to generate - - pad all the sequences to this length so we can convert them - into a 2D tensor. - """ - - # Tokenize all the prompts. - tokenizer = get_tokenizer() - if hasattr(tokenizer, 'eod'): - eod_token = tokenizer.eod - elif hasattr(tokenizer, 'eos_id'): - eod_token = tokenizer.eos_id - else: - raise AttributeError('No eod token found in Tokenizer') - if add_BOS: - prompts_tokens = [[eod_token] + tokenizer.tokenize(prompt) - for prompt in prompts] - else: - prompts_tokens = [tokenizer.tokenize(prompt) for prompt in prompts] - - # Now we have a list of list of tokens which each list has a different - # size. We want to extend this list to: - # - incorporate the tokens that need to be generated - # - make all the sequences equal length. - # Get the prompts length. - prompts_length = [len(prompt_tokens) for prompt_tokens in prompts_tokens] - # Get the max prompts length. - max_prompt_len = max(prompts_length) - # Number of tokens in the each sample of the batch. - samples_length = max_prompt_len + tokens_to_generate - # Now update the list of list to be of the same size: samples_length. - for prompt_tokens, prompt_length in zip(prompts_tokens, prompts_length): - padding_size = samples_length - prompt_length - prompt_tokens.extend([eod_token] * padding_size) - - # Now we are in a structured format, we can convert to tensors. - prompts_tokens_tensor = torch.tensor(prompts_tokens, dtype=torch.long, device='cuda') - prompts_length_tensor = torch.tensor(prompts_length, dtype=torch.long, device='cuda') - - return prompts_tokens_tensor, prompts_length_tensor +# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. + +"""Tokenization utilities.""" + + +import torch + + +from megatron.core import parallel_state +from megatron.training import get_args, get_tokenizer +from .communication import broadcast_int_list, broadcast_tensor + + +def detokenize_generations(tokens_gpu_tensor, + lengths_gpu_tensor, + detokenize_segments): + """Detokenize the generated tokens.""" + + tokenizer = get_tokenizer() + prompts_plus_generations = [] + prompts_plus_generations_segments = [] + + tokens = tokens_gpu_tensor.cpu().numpy().tolist() + lengths = lengths_gpu_tensor.cpu().numpy().tolist() + + for sequence_tokens, length in zip(tokens, lengths): + sequence_tokens = sequence_tokens[:length] + detok_str = tokenizer.detokenize(sequence_tokens) + prompts_plus_generations.append(detok_str) + if detokenize_segments: + try: + offsets = tokenizer.offsets(sequence_tokens, detok_str) + words = [ + detok_str[start:end] + for start, end in zip(offsets, offsets[1:] + [len(detok_str)]) + ] + except NotImplementedError: + words = [] + for token in sequence_tokens: + word = tokenizer.tokenizer.decoder[token] + word = bytearray([tokenizer.tokenizer.byte_decoder[c] for c in word]).decode( + "utf-8", errors="replace" + ) + words.append(word) + + prompts_plus_generations_segments.append(words) + + return tokens, prompts_plus_generations, prompts_plus_generations_segments + + +def tokenize_prompts(prompts=None, tokens_to_generate=None, + add_BOS=None, rank=0, data_parallel=False): + """Tokenize prompts and make them avaiable on all ranks. + + Args: + data_parallel (bool): Broadcast tokens across a single data parallel model replica. + """ + + # On all ranks set to None so we can pass them to functions + sizes_list = None + prompts_tokens_cuda_long_tensor = None + prompts_length_cuda_long_tensor = None + + # On the specified rank, build the above. + src_rank = torch.distributed.get_rank() + if data_parallel: + src_rank = parallel_state.get_data_parallel_src_rank() + + if src_rank == rank: + assert prompts is not None + assert tokens_to_generate is not None + # Tensor of tokens padded and their unpadded length. + prompts_tokens_cuda_long_tensor, prompts_length_cuda_long_tensor = \ + _tokenize_prompts_and_batch(prompts, tokens_to_generate, add_BOS) + # We need the sizes of these tensors for the boradcast + sizes_list = [prompts_tokens_cuda_long_tensor.size(0), # Batch size + prompts_tokens_cuda_long_tensor.size(1)] # Sequence lenght + + # First, broadcast the sizes. + sizes_tensor = broadcast_int_list(2, int_list=sizes_list, rank=rank, data_parallel=data_parallel) + + # Now that we have the sizes, we can boradcast the tokens + # and length tensors. + sizes = sizes_tensor.tolist() + prompts_tokens_cuda_long_tensor = broadcast_tensor( + sizes, torch.int64, tensor=prompts_tokens_cuda_long_tensor, rank=rank, data_parallel=data_parallel) + prompts_length_cuda_long_tensor = broadcast_tensor( + sizes[0], torch.int64, tensor=prompts_length_cuda_long_tensor, + rank=rank, data_parallel=data_parallel) + + return prompts_tokens_cuda_long_tensor, prompts_length_cuda_long_tensor + + +def _tokenize_prompts_and_batch(prompts, tokens_to_generate, add_BOS): + """Given a set of prompts and number of tokens to generate: + - tokenize prompts + - set the sequence length to be the max of length of prompts + plus the number of tokens we would like to generate + - pad all the sequences to this length so we can convert them + into a 2D tensor. + """ + + # Tokenize all the prompts. + tokenizer = get_tokenizer() + if hasattr(tokenizer, 'eod'): + eod_token = tokenizer.eod + elif hasattr(tokenizer, 'eos_id'): + eod_token = tokenizer.eos_id + else: + raise AttributeError('No eod token found in Tokenizer') + if add_BOS: + prompts_tokens = [[eod_token] + tokenizer.tokenize(prompt) + for prompt in prompts] + else: + prompts_tokens = [tokenizer.tokenize(prompt) for prompt in prompts] + + # Now we have a list of list of tokens which each list has a different + # size. We want to extend this list to: + # - incorporate the tokens that need to be generated + # - make all the sequences equal length. + # Get the prompts length. + prompts_length = [len(prompt_tokens) for prompt_tokens in prompts_tokens] + # Get the max prompts length. + max_prompt_len = max(prompts_length) + # Number of tokens in the each sample of the batch. + samples_length = max_prompt_len + tokens_to_generate + # Now update the list of list to be of the same size: samples_length. + for prompt_tokens, prompt_length in zip(prompts_tokens, prompts_length): + padding_size = samples_length - prompt_length + prompt_tokens.extend([eod_token] * padding_size) + + # Now we are in a structured format, we can convert to tensors. + prompts_tokens_tensor = torch.tensor(prompts_tokens, dtype=torch.long, device='cuda') + prompts_length_tensor = torch.tensor(prompts_length, dtype=torch.long, device='cuda') + + return prompts_tokens_tensor, prompts_length_tensor diff --git a/megatron/inference/text_generation_server.py b/megatron/inference/text_generation_server.py index df1e672..d8a4e81 100644 --- a/megatron/inference/text_generation_server.py +++ b/megatron/inference/text_generation_server.py @@ -1,231 +1,234 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -import datetime -import json - -from flask import Flask, request, jsonify -from flask_restful import Resource, Api - -from megatron.inference.text_generation import generate_and_post_process -from megatron.inference.text_generation import beam_search_and_post_process -from megatron.inference.endpoints.common import send_do_generate, send_do_beam_search, LOCK -from megatron.inference.endpoints.completions import MegatronCompletions - - -class MegatronGenerate(Resource): - def __init__(self, model): - self.model = model - - def put(self): - if not "prompts" in request.get_json(): - return "prompts argument required", 400 - - if "max_len" in request.get_json(): - return "max_len is no longer used. Replace with tokens_to_generate", 400 - - if "sentences" in request.get_json(): - return "sentences is no longer used. Replace with prompts", 400 - - prompts = request.get_json()["prompts"] - if not isinstance(prompts, list): - return "prompts is not a list of strings", 400 - - if len(prompts) == 0: - return "prompts is empty", 400 - - if len(prompts) > 128: - return "Maximum number of prompts is 128", 400 - - tokens_to_generate = 64 # Choosing hopefully sane default. Full sequence is slow - if "tokens_to_generate" in request.get_json(): - tokens_to_generate = request.get_json()["tokens_to_generate"] - if not isinstance(tokens_to_generate, int): - return "tokens_to_generate must be an integer greater than 0" - if tokens_to_generate < 0: - return "tokens_to_generate must be an integer greater than or equal to 0" - - logprobs = False - if "logprobs" in request.get_json(): - logprobs = request.get_json()["logprobs"] - if not isinstance(logprobs, bool): - return "logprobs must be a boolean value" - - if tokens_to_generate == 0 and not logprobs: - return "tokens_to_generate=0 implies logprobs should be True" - - temperature = 1.0 - if "temperature" in request.get_json(): - temperature = request.get_json()["temperature"] - if not (isinstance(temperature, (int, float))): - return "temperature must be a positive number less than or equal to 1000.0" - if not (0.0 < temperature <= 100.0): - return "temperature must be a positive number less than or equal to 100.0" - - top_k = 0 - if "top_k" in request.get_json(): - top_k = request.get_json()["top_k"] - if not (isinstance(top_k, int)): - return "top_k must be an integer equal to or greater than 0 and less than or equal to 1000" - if not (0 <= top_k <= 1000): - return "top_k must be equal to or greater than 0 and less than or equal to 1000" - - top_p = 0.0 - if "top_p" in request.get_json(): - top_p = request.get_json()["top_p"] - if not (isinstance(top_p, float)): - return "top_p must be a positive float less than or equal to 1.0" - if top_p > 0.0 and top_k > 0.0: - return "cannot set both top-k and top-p samplings." - if not (0 <= top_p <= 1.0): - return "top_p must be less than or equal to 1.0" - - top_p_decay = 0.0 - if "top_p_decay" in request.get_json(): - top_p_decay = request.get_json()["top_p_decay"] - if not (isinstance(top_p_decay, float)): - return "top_p_decay must be a positive float less than or equal to 1.0" - if top_p == 0.0: - return "top_p_decay cannot be set without top_p" - if not (0 <= top_p_decay <= 1.0): - return "top_p_decay must be less than or equal to 1.0" - - top_p_bound = 0.0 - if "top_p_bound" in request.get_json(): - top_p_bound = request.get_json()["top_p_bound"] - if not (isinstance(top_p_bound, float)): - return "top_p_bound must be a positive float less than or equal to top_p" - if top_p == 0.0: - return "top_p_bound cannot be set without top_p" - if not (0.0 < top_p_bound <= top_p): - return "top_p_bound must be greater than 0 and less than top_p" - - add_BOS = False - if "add_BOS" in request.get_json(): - add_BOS = request.get_json()["add_BOS"] - if not isinstance(add_BOS, bool): - return "add_BOS must be a boolean value" - - if any([len(prompt) == 0 for prompt in prompts]) and not add_BOS: - return "Empty prompts require add_BOS=true" - - stop_on_double_eol = False - if "stop_on_double_eol" in request.get_json(): - stop_on_double_eol = request.get_json()["stop_on_double_eol"] - if not isinstance(stop_on_double_eol, bool): - return "stop_on_double_eol must be a boolean value" - - stop_on_eol = False - if "stop_on_eol" in request.get_json(): - stop_on_eol = request.get_json()["stop_on_eol"] - if not isinstance(stop_on_eol, bool): - return "stop_on_eol must be a boolean value" - - prevent_newline_after_colon = False - if "prevent_newline_after_colon" in request.get_json(): - prevent_newline_after_colon = request.get_json()["prevent_newline_after_colon"] - if not isinstance(prevent_newline_after_colon, bool): - return "prevent_newline_after_colon must be a boolean value" - - random_seed = -1 - if "random_seed" in request.get_json(): - random_seed = request.get_json()["random_seed"] - if not isinstance(random_seed, int): - return "random_seed must be integer" - if random_seed < 0: - return "random_seed must be a positive integer" - - no_log = False - if "no_log" in request.get_json(): - no_log = request.get_json()["no_log"] - if not isinstance(no_log, bool): - return "no_log must be a boolean value" - - beam_width = None - if "beam_width" in request.get_json(): - beam_width = request.get_json()["beam_width"] - if not isinstance(beam_width, int): - return "beam_width must be integer" - if beam_width < 1: - return "beam_width must be an integer > 1" - if len(prompts) > 1: - return "When doing beam_search, batch size must be 1" - - stop_token = 50256 - if "stop_token" in request.get_json(): - stop_token = request.get_json()["stop_token"] - if not isinstance(stop_token, int): - return "stop_token must be an integer" - - length_penalty = 1 - if "length_penalty" in request.get_json(): - length_penalty = request.get_json()["length_penalty"] - if not isinstance(length_penalty, float): - return "length_penalty must be a float" - - with LOCK: # Need to get lock to keep multiple threads from hitting code - - if not no_log: - print("request IP: " + str(request.remote_addr)) - print(json.dumps(request.get_json()), flush=True) - print("start time: ", datetime.datetime.now()) - - try: - if beam_width is not None: - send_do_beam_search() # Tell other ranks we're doing beam_search - response, response_seg, response_scores = beam_search_and_post_process( - self.model, - prompts=prompts, - tokens_to_generate=tokens_to_generate, - beam_size=beam_width, - add_BOS=add_BOS, - stop_token=stop_token, - num_return_gen=beam_width, # Returning whole beam - length_penalty=length_penalty, - prevent_newline_after_colon=prevent_newline_after_colon, - ) - - return jsonify( - {"text": response, "segments": response_seg, "scores": response_scores} - ) - else: - send_do_generate() # Tell other ranks we're doing generate - result = generate_and_post_process( - self.model, - prompts=prompts, - tokens_to_generate=tokens_to_generate, - return_output_log_probs=logprobs, - top_k_sampling=top_k, - top_p_sampling=top_p, - top_p_decay=top_p_decay, - top_p_bound=top_p_bound, - temperature=temperature, - add_BOS=add_BOS, - use_eod_token_for_early_termination=True, - stop_on_double_eol=stop_on_double_eol, - stop_on_eol=stop_on_eol, - prevent_newline_after_colon=prevent_newline_after_colon, - random_seed=random_seed, - ) - - response, response_seg, response_logprobs = result[:3] - response = { - "text": response, - "segments": response_seg, - "logprobs": response_logprobs, - } - - return jsonify(response) - - except ValueError as ve: - return ve.args[0] - print("end time: ", datetime.datetime.now()) - - -class MegatronServer(object): - def __init__(self, model): - self.app = Flask(__name__, static_url_path='') - api = Api(self.app) - api.add_resource(MegatronGenerate, '/api', resource_class_args=[model]) - api.add_resource(MegatronCompletions, '/completions', resource_class_args=[model]) - - def run(self, url, port): - self.app.run(url, threaded=True, debug=False, port=port) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import datetime +import json +import os +import sys + +from flask import Flask, request, jsonify +from flask_restful import Resource, Api + +from megatron.core.inference.sampling_params import SamplingParams +from megatron.inference.endpoints.common import send_do_generate, send_do_beam_search, LOCK +from megatron.inference.endpoints.completions import MegatronCompletions +from megatron.inference.text_generation import beam_search_and_post_process + +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)) +) + + +class MegatronGenerate(Resource): + def __init__(self, engine, args): + self.engine = engine + self.args = args + + def put(self): + if not "prompts" in request.get_json(): + return "prompts argument required", 400 + + if "max_len" in request.get_json(): + return "max_len is no longer used. Replace with tokens_to_generate", 400 + + if "sentences" in request.get_json(): + return "sentences is no longer used. Replace with prompts", 400 + + prompts = request.get_json()["prompts"] + if not isinstance(prompts, list): + return "prompts is not a list of strings", 400 + + if len(prompts) == 0: + return "prompts is empty", 400 + + if len(prompts) > 128: + return "Maximum number of prompts is 128", 400 + + tokens_to_generate = 64 # Choosing hopefully sane default. Full sequence is slow + if "tokens_to_generate" in request.get_json(): + tokens_to_generate = request.get_json()["tokens_to_generate"] + if not isinstance(tokens_to_generate, int): + return "tokens_to_generate must be an integer greater than 0" + if tokens_to_generate < 0: + return "tokens_to_generate must be an integer greater than or equal to 0" + + logprobs = False + if "logprobs" in request.get_json(): + logprobs = request.get_json()["logprobs"] + if not isinstance(logprobs, bool): + return "logprobs must be a boolean value" + + if tokens_to_generate == 0 and not logprobs: + return "tokens_to_generate=0 implies logprobs should be True" + + temperature = 1.0 + if "temperature" in request.get_json(): + temperature = request.get_json()["temperature"] + if not (isinstance(temperature, (int, float))): + return "temperature must be a positive number less than or equal to 1000.0" + if not (0.0 < temperature <= 100.0): + return "temperature must be a positive number less than or equal to 100.0" + + top_k = 0 + if "top_k" in request.get_json(): + top_k = request.get_json()["top_k"] + if not (isinstance(top_k, int)): + return "top_k must be an integer equal to or greater than 0 and less than or equal to 1000" + if not (0 <= top_k <= 1000): + return "top_k must be equal to or greater than 0 and less than or equal to 1000" + + top_p = 0.0 + if "top_p" in request.get_json(): + top_p = request.get_json()["top_p"] + if not (isinstance(top_p, float)): + return "top_p must be a positive float less than or equal to 1.0" + if top_p > 0.0 and top_k > 0.0: + return "cannot set both top-k and top-p samplings." + if not (0 <= top_p <= 1.0): + return "top_p must be less than or equal to 1.0" + + top_p_decay = 0.0 + if "top_p_decay" in request.get_json(): + top_p_decay = request.get_json()["top_p_decay"] + if not (isinstance(top_p_decay, float)): + return "top_p_decay must be a positive float less than or equal to 1.0" + if top_p == 0.0: + return "top_p_decay cannot be set without top_p" + if not (0 <= top_p_decay <= 1.0): + return "top_p_decay must be less than or equal to 1.0" + + top_p_bound = 0.0 + if "top_p_bound" in request.get_json(): + top_p_bound = request.get_json()["top_p_bound"] + if not (isinstance(top_p_bound, float)): + return "top_p_bound must be a positive float less than or equal to top_p" + if top_p == 0.0: + return "top_p_bound cannot be set without top_p" + if not (0.0 < top_p_bound <= top_p): + return "top_p_bound must be greater than 0 and less than top_p" + + add_BOS = False + if "add_BOS" in request.get_json(): + add_BOS = request.get_json()["add_BOS"] + if not isinstance(add_BOS, bool): + return "add_BOS must be a boolean value" + + if any([len(prompt) == 0 for prompt in prompts]) and not add_BOS: + return "Empty prompts require add_BOS=true" + + stop_on_double_eol = False + if "stop_on_double_eol" in request.get_json(): + stop_on_double_eol = request.get_json()["stop_on_double_eol"] + if not isinstance(stop_on_double_eol, bool): + return "stop_on_double_eol must be a boolean value" + + stop_on_eol = False + if "stop_on_eol" in request.get_json(): + stop_on_eol = request.get_json()["stop_on_eol"] + if not isinstance(stop_on_eol, bool): + return "stop_on_eol must be a boolean value" + + prevent_newline_after_colon = False + if "prevent_newline_after_colon" in request.get_json(): + prevent_newline_after_colon = request.get_json()["prevent_newline_after_colon"] + if not isinstance(prevent_newline_after_colon, bool): + return "prevent_newline_after_colon must be a boolean value" + + random_seed = -1 + if "random_seed" in request.get_json(): + random_seed = request.get_json()["random_seed"] + if not isinstance(random_seed, int): + return "random_seed must be integer" + if random_seed < 0: + return "random_seed must be a positive integer" + + no_log = False + if "no_log" in request.get_json(): + no_log = request.get_json()["no_log"] + if not isinstance(no_log, bool): + return "no_log must be a boolean value" + + beam_width = None + if "beam_width" in request.get_json(): + beam_width = request.get_json()["beam_width"] + if not isinstance(beam_width, int): + return "beam_width must be integer" + if beam_width < 1: + return "beam_width must be an integer > 1" + if len(prompts) > 1: + return "When doing beam_search, batch size must be 1" + + stop_token = 50256 + if "stop_token" in request.get_json(): + stop_token = request.get_json()["stop_token"] + if not isinstance(stop_token, int): + return "stop_token must be an integer" + + length_penalty = 1 + if "length_penalty" in request.get_json(): + length_penalty = request.get_json()["length_penalty"] + if not isinstance(length_penalty, float): + return "length_penalty must be a float" + + with LOCK: # Need to get lock to keep multiple threads from hitting code + + if not no_log: + print("request IP: " + str(request.remote_addr)) + print(json.dumps(request.get_json()), flush=True) + print("start time: ", datetime.datetime.now()) + + try: + if beam_width is not None: + send_do_beam_search() # Tell other ranks we're doing beam_search + response, response_seg, response_scores = beam_search_and_post_process( + self.model, + prompts=prompts, + tokens_to_generate=tokens_to_generate, + beam_size=beam_width, + add_BOS=add_BOS, + stop_token=stop_token, + num_return_gen=beam_width, # Returning whole beam + length_penalty=length_penalty, + prevent_newline_after_colon=prevent_newline_after_colon, + ) + + return jsonify( + {"text": response, "segments": response_seg, "scores": response_scores} + ) + else: + send_do_generate() # Tell other ranks we're doing generate + + sampling_params = SamplingParams( + temperature=temperature, + top_k=top_k, + top_p=top_p, + return_segments=True, + return_log_probs=logprobs, + num_tokens_to_generate=tokens_to_generate, + ) + result = list( + self.engine.generate( + prompts=prompts, common_inference_params=sampling_params + ) + ) + response_dict = {"text": [x.prompt + x.generated_text for x in result]} + if sampling_params.return_log_probs: + response_logprobs = [x.prompt_log_probs + x.generated_log_probs for x in + result] + response_dict["logprobs"] = response_logprobs + if sampling_params.return_segments: + response_dict["segments"] = [x.segments for x in result] + + return jsonify(response_dict) + + except ValueError as ve: + return ve.args[0] + + +class MegatronServer(object): + def __init__(self, model, args=None): + self.app = Flask(__name__, static_url_path='') + api = Api(self.app) + api.add_resource(MegatronGenerate, '/api', resource_class_args=[model, args]) + api.add_resource(MegatronCompletions, '/completions', resource_class_args=[model]) + + def run(self, url, port): + self.app.run(url, threaded=True, debug=False, port=port) diff --git a/megatron/legacy/model/language_model.py b/megatron/legacy/model/language_model.py index 0173a4a..ce89390 100644 --- a/megatron/legacy/model/language_model.py +++ b/megatron/legacy/model/language_model.py @@ -15,9 +15,6 @@ from .module import MegatronModule from .transformer import ParallelTransformer from .utils import get_linear_layer, init_method_normal, scaled_init_method_normal -import torch._dynamo -torch._dynamo.config.suppress_errors = True - def parallel_lm_logits(input_, word_embeddings_weight, parallel_output, bias=None): """LM logits using word embedding weights.""" @@ -471,7 +468,6 @@ class TransformerLanguageModel(MegatronModule): else: raise Exception('Stage must have at least either encoder or decoder') - # @torch.compile(mode="max-autotune-no-cudagraphs") def forward( self, enc_input_ids, diff --git a/megatron/legacy/model/rms_norm.py b/megatron/legacy/model/rms_norm.py index 33e5baf..21ba00c 100644 --- a/megatron/legacy/model/rms_norm.py +++ b/megatron/legacy/model/rms_norm.py @@ -2,8 +2,6 @@ import torch from torch import nn -import torch._dynamo -torch._dynamo.config.suppress_errors = True class RMSNorm(torch.nn.Module): @@ -26,11 +24,9 @@ class RMSNorm(torch.nn.Module): setattr(self.weight, 'sequence_parallel', sequence_parallel) - @torch.compile(mode="max-autotune-no-cudagraphs") def _norm(self, x): return x * torch.rsqrt(x.pow(2).mean(-1, keepdim=True) + self.eps) - @torch.compile(mode="max-autotune-no-cudagraphs") def forward(self, x): output = self._norm(x.float()).type_as(x) return output * self.weight diff --git a/megatron/legacy/model/transformer.py b/megatron/legacy/model/transformer.py index 7f65b17..6a5cd55 100644 --- a/megatron/legacy/model/transformer.py +++ b/megatron/legacy/model/transformer.py @@ -1,1895 +1,1805 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -"""Transformer.""" -import math -import os -from contextlib import nullcontext -from typing import Optional - -import numpy as np -import torch -import torch.nn.functional as F - -from megatron import core -from megatron.core import mpu, tensor_parallel -from megatron.core.enums import ModelType -from megatron.legacy.model.enums import AttnMaskType, LayerType, AttnType -from megatron.legacy.model.fused_softmax import FusedScaleMaskSoftmax -from megatron.legacy.model.fused_bias_gelu import bias_gelu_impl -from megatron.core.models.common.embeddings import apply_rotary_pos_emb -from megatron.core.jit import jit_fuser -from megatron.core.num_microbatches_calculator import get_num_microbatches -from megatron.core.parallel_state import ( - get_expert_tensor_and_model_parallel_group, - get_tensor_model_parallel_group, -) -from megatron.core.tensor_parallel import ( - gather_from_sequence_parallel_region, - reduce_scatter_to_sequence_parallel_region, - get_cuda_rng_tracker, - get_data_parallel_rng_tracker_name, -) -from megatron.legacy.model.enums import AttnMaskType, AttnType, LayerType -from megatron.legacy.model.fused_bias_gelu import bias_gelu_impl -from megatron.legacy.model.fused_softmax import FusedScaleMaskSoftmax -from megatron.legacy.model.utils import ( - attention_mask_func, - erf_gelu, - get_norm, - openai_gelu, -) -from megatron.training import get_args, get_timers - -import torch._dynamo -torch._dynamo.config.suppress_errors = True - -from .module import MegatronModule - -try: - from einops import rearrange -except ImportError: - rearrange = None - -try: - from flash_attn.flash_attn_interface import flash_attn_unpadded_func -except ImportError: - try: - from flash_attn.flash_attn_interface import ( - flash_attn_varlen_func as flash_attn_unpadded_func, - ) - except ImportError: - flash_attn_unpadded_func = None - -try: - from flash_attn.flash_attn_triton import flash_attn_func -except ImportError: - flash_attn_func = None -""" We use the following notation throughout this file: - h: hidden size - n: number of attention heads - p: number of model parallel partitions - np: n/p - hp: h/p - hn: h/n - b: batch size - s: sequence length - l: number of layers - Transformer takes input of size [s, b, h] and returns a - tensor of the same size. We use the following arguments: - hyperparameters: transformer hyperparameters -""" - -class DropPath(MegatronModule): - """Drop paths (Stochastic Depth) per sample - (when applied in main path of residual blocks). - """ - - def __init__(self, drop_prob=0.): - super(DropPath, self).__init__() - self.drop_prob = drop_prob - - def forward(self, hidden_state): - if self.drop_prob == 0. or not self.training: - return hidden_state - keep_prob = 1 - self.drop_prob - # work with diff dim tensors, not just 2D ConvNets - # hidden_state: [s, b, h] - shape = (1,) + (hidden_state.shape[1],) + (1,) * (hidden_state.ndim - 2) - random_tensor = keep_prob + \ - torch.rand(shape, dtype=hidden_state.dtype, device=hidden_state.device) - random_tensor.floor_() # binarize - output = hidden_state.div(keep_prob) * random_tensor - return output - -class ParallelMLP(MegatronModule): - """MLP. - - MLP will take the input with h hidden state, project it to 4*h - hidden dimension, perform nonlinear transformation, and project the - state back into h hidden dimension. - """ - - def __init__(self, config, is_expert=False): - super(ParallelMLP, self).__init__() - args = get_args() - - self.add_bias = config.add_bias_linear - - ffn_hidden_size = config.ffn_hidden_size - if config.gated_linear_unit: - ffn_hidden_size *= 2 - - # Project to 4h. If using swiglu double the output width, see https://arxiv.org/pdf/2002.05202.pdf - self.dense_h_to_4h = tensor_parallel.ColumnParallelLinear( - config.hidden_size, - ffn_hidden_size, - config=config, - init_method=config.init_method, - bias=self.add_bias, - gather_output=False, - skip_bias_add=True, - is_expert=is_expert, - ) - - self.bias_gelu_fusion = False - self.activation_func = None - self.swiglu = args.swiglu - - if args.openai_gelu: - self.activation_func = openai_gelu - elif args.onnx_safe: - self.activation_func = erf_gelu - elif args.swiglu: - @torch.compile(mode="max-autotune-no-cudagraphs") - def swiglu(x): - x = torch.chunk(x, 2, dim=-1) - return F.silu(x[0]) * x[1] - self.activation_func = swiglu - elif args.squared_relu: - def squared_relu(x): - return torch.pow(F.relu(x), 2) - self.activation_func = squared_relu - else: - self.bias_gelu_fusion = args.bias_gelu_fusion - self.activation_func = F.gelu - - # Project back to h. - self.dense_4h_to_h = tensor_parallel.RowParallelLinear( - config.ffn_hidden_size, - config.hidden_size, - config=config, - init_method=config.output_layer_init_method, - bias=self.add_bias, - skip_bias_add=True, - input_is_parallel=True, - is_expert=is_expert, - ) - - # @torch.compile(mode="max-autotune-no-cudagraphs") - def forward(self, hidden_states): - - # [s, b, 4hp] - intermediate_parallel, bias_parallel = self.dense_h_to_4h(hidden_states) - - if self.bias_gelu_fusion: - assert self.add_bias is True - assert self.activation_func == F.gelu - intermediate_parallel = bias_gelu_impl(intermediate_parallel, bias_parallel) - else: - if bias_parallel is not None: - intermediate_parallel = intermediate_parallel + bias_parallel - intermediate_parallel = self.activation_func(intermediate_parallel) - - # [s, b, h] - output, output_bias = self.dense_4h_to_h(intermediate_parallel) - return output, output_bias - -def sinkhorn(cost, tol=0.0001): - cost = torch.exp(cost) - d0 = torch.ones(cost.size(0), device=cost.device, dtype=cost.dtype) - d1 = torch.ones(cost.size(1), device=cost.device, dtype=cost.dtype) - - eps = 0.00000001 - error = 1e9 - d1_old = d1 - while error > tol: - d0 = (1/d0.size(0))*1/(torch.sum(d1*cost,1) + eps) - d1 = (1/d1.size(0))*1/(torch.sum(d0.unsqueeze(1)*cost,0)+eps) - error = torch.mean(torch.abs(d1_old-d1)) - d1_old = d1 - return d1*cost*d0.unsqueeze(1) - - -def get_router_linear_layer(config): - args = get_args() - router = torch.nn.Linear(args.hidden_size, args.num_experts, bias=False) - with get_cuda_rng_tracker().fork(get_data_parallel_rng_tracker_name()): - config.init_method(router.weight) - setattr(router.weight, 'sequence_parallel',config.sequence_parallel) - return router - - -class SwitchMLP(MegatronModule): - """ - Routes input to one of N MLP "experts" - """ - def __init__(self, config): - super(SwitchMLP, self).__init__() - args = get_args() - self.router = get_router_linear_layer(config) - self.expert_parallel_size = mpu.get_expert_model_parallel_world_size() - self.sequence_parallel = config.sequence_parallel - self.add_bias = config.add_bias_linear - - assert args.num_experts % self.expert_parallel_size == 0 - self.num_local_experts = args.num_experts // self.expert_parallel_size - local_expert_indices_offset = mpu.get_expert_model_parallel_rank() * self.num_local_experts - self.local_expert_indices = [local_expert_indices_offset + i for i in range(self.num_local_experts)] - - self.local_experts = torch.nn.ModuleList() - for i in range(self.num_local_experts): - self.local_experts.append(ParallelMLP(config, is_expert=True)) - - self.tp_ep_group = get_expert_tensor_and_model_parallel_group() - - def gather_indices(self, local_indices): - """ Gather tensors and concatinate along the first dimension.""" - world_size = torch.distributed.get_world_size(group=self.tp_ep_group) - # Bypass the function if we are using only 1 GPU. - if world_size == 1: - return local_indices - - dim_size = list(local_indices.size()) - dim_size[0] = dim_size[0] * world_size - - # TODO pre allocate memory - output = torch.empty(dim_size, dtype=local_indices.dtype, - device=torch.cuda.current_device()) - torch.distributed._all_gather_base( - output, local_indices.contiguous(), group=self.tp_ep_group - ) - return output - - def forward(self, hidden_states): - # hidden_states: [b, s, h] - args = get_args() - s = hidden_states.size(0) - b = hidden_states.size(1) - h = hidden_states.size(2) - route = self.router(hidden_states).view(-1, args.num_experts) - - # TODO (rprenger) Right now we're just using the sinkhorn algorithm - # for load balancing. There should be an option to do no load balancing - # and the algorithm and parametets should be further tested - if self.training: - with torch.no_grad(): - sinkroute = sinkhorn(route.detach().to(dtype=torch.float32)) - _, max_ind = torch.max(sinkroute, dim=1) - route = torch.sigmoid(route) - max_prob = route[torch.arange(route.size(0)), max_ind] - else: - route = torch.sigmoid(route) - max_prob, max_ind = torch.max(route, dim=1) - - max_prob = torch.unsqueeze(max_prob, 1) - hidden_states = hidden_states.view(-1, hidden_states.size(2)) - - # TODO (rprenger) TODO this could be made easier to read - # Converting [s, b, h] to [s*b, h]. - # Each vector could be routed differently - if self.sequence_parallel or (self.expert_parallel_size > 1): - global_hidden_states = \ - gather_from_sequence_parallel_region(hidden_states, group=self.tp_ep_group) - global_indices = self.gather_indices(max_ind) - else: - global_hidden_states = hidden_states - global_indices = max_ind - - output_total = torch.zeros_like(global_hidden_states) - if self.add_bias: - output_bias_total = torch.zeros_like(global_hidden_states) - - for expert_num, expert in enumerate(self.local_experts): - local_expert_index = self.local_expert_indices[expert_num] - local_indices = (global_indices == local_expert_index).nonzero() - hidden = global_hidden_states[local_indices, :] - output, output_bias = expert(hidden) - output_total[local_indices, :] = output - if self.add_bias: - output_bias = output_bias.expand_as(output) - output_bias_total[local_indices, :] = output_bias - - if self.sequence_parallel or (self.expert_parallel_size > 1): - output_total = \ - reduce_scatter_to_sequence_parallel_region(output_total, group=self.tp_ep_group) - if self.add_bias: - output_bias_total = \ - reduce_scatter_to_sequence_parallel_region(output_bias_total, group=self.tp_ep_group) - - # bias is duplicated across tensor parallelism ranks; - # reduce scatter reduces bias across tensor parallel_ranks - output_bias_total = \ - output_bias_total/mpu.get_tensor_model_parallel_world_size() - - output_total = output_total*max_prob - output_total = output_total.view(s, b, h) - if self.add_bias: - output_bias_total = output_bias_total*max_prob - output_bias_total = output_bias_total.view(s, b, h) - else: - output_bias_total = None - - return output_total, output_bias_total - - -class CoreAttention(MegatronModule): - - def __init__(self, layer_number, config, - attn_mask_type=AttnMaskType.padding): - super(CoreAttention, self).__init__() - self.fp16 = config.fp16 - self.bf16 = config.bf16 - - self.apply_query_key_layer_scaling = config.apply_query_key_layer_scaling - self.attention_softmax_in_fp32 = config.attention_softmax_in_fp32 - if self.apply_query_key_layer_scaling: - self.attention_softmax_in_fp32 = True - self.layer_number = max(1, layer_number) - self.attn_mask_type = attn_mask_type - self.sequence_parallel = config.sequence_parallel - - projection_size = config.kv_channels * config.num_attention_heads - - # Per attention head and per partition values. - world_size = mpu.get_tensor_model_parallel_world_size() - self.hidden_size_per_partition = core.utils.divide(projection_size, - world_size) - self.hidden_size_per_attention_head = core.utils.divide( - projection_size, config.num_attention_heads) - self.num_attention_heads_per_partition = core.utils.divide( - config.num_attention_heads, world_size) - - coeff = None - self.norm_factor = math.sqrt(self.hidden_size_per_attention_head) - if self.apply_query_key_layer_scaling: - coeff = self.layer_number - self.norm_factor *= coeff - - self.scale_mask_softmax = FusedScaleMaskSoftmax( - self.fp16, self.bf16, - self.attn_mask_type, - config.masked_softmax_fusion, - attention_mask_func, - self.attention_softmax_in_fp32, - coeff) - - # Dropout. Note that for a single iteration, this layer will generate - # different outputs on different number of parallel partitions but - # on average it should not be partition dependent. - self.attention_dropout = torch.nn.Dropout(config.attention_dropout) - - def forward(self, query_layer, key_layer, - value_layer, attention_mask): - - # =================================== - # Raw attention scores. [b, np, s, s] - # =================================== - - # [b, np, sq, sk] - output_size = (query_layer.size(1), - query_layer.size(2), - query_layer.size(0), - key_layer.size(0)) - - # [sq, b, np, hn] -> [sq, b * np, hn] - query_layer = query_layer.reshape(output_size[2], - output_size[0] * output_size[1], -1) - # [sk, b, np, hn] -> [sk, b * np, hn] - key_layer = key_layer.view(output_size[3], - output_size[0] * output_size[1], -1) - - # preallocting input tensor: [b * np, sq, sk] - matmul_input_buffer = mpu.get_global_memory_buffer().get_tensor( - (output_size[0]*output_size[1], output_size[2], output_size[3]), - query_layer.dtype, "mpu") - - # Raw attention scores. [b * np, sq, sk] - matmul_result = torch.baddbmm( - matmul_input_buffer, - query_layer.transpose(0, 1), # [b * np, sq, hn] - key_layer.transpose(0, 1).transpose(1, 2), # [b * np, hn, sk] - beta=0.0, alpha=(1.0/self.norm_factor)) - - # change view to [b, np, sq, sk] - attention_scores = matmul_result.view(*output_size) - - # =========================== - # Attention probs and dropout - # =========================== - - # attention scores and attention mask [b, np, sq, sk] - attention_probs = self.scale_mask_softmax(attention_scores, - attention_mask) - - # This is actually dropping out entire tokens to attend to, which might - # seem a bit unusual, but is taken from the original Transformer paper. - if not self.sequence_parallel: - with tensor_parallel.get_cuda_rng_tracker().fork(): - attention_probs = self.attention_dropout(attention_probs) - else: - attention_probs = self.attention_dropout(attention_probs) - - # ========================= - # Context layer. [sq, b, hp] - # ========================= - - # value_layer -> context layer. - # [sk, b, np, hn] --> [b, np, sq, hn] - - # context layer shape: [b, np, sq, hn] - output_size = (value_layer.size(1), - value_layer.size(2), - query_layer.size(0), - value_layer.size(3)) - - # change view [sk, b * np, hn] - value_layer = value_layer.view(value_layer.size(0), - output_size[0] * output_size[1], -1) - - # change view [b * np, sq, sk] - attention_probs = attention_probs.view(output_size[0] * output_size[1], - output_size[2], -1) - - # matmul: [b * np, sq, hn] - context_layer = torch.bmm(attention_probs, value_layer.transpose(0, 1)) - - # change view [b, np, sq, hn] - context_layer = context_layer.view(*output_size) - - # [b, np, sq, hn] --> [sq, b, np, hn] - context_layer = context_layer.permute(2, 0, 1, 3).contiguous() - - # [sq, b, np, hn] --> [sq, b, hp] - new_context_layer_shape = context_layer.size()[:-2] + \ - (self.hidden_size_per_partition,) - context_layer = context_layer.view(*new_context_layer_shape) - - return context_layer - -class FlashSelfAttentionTorch(torch.nn.Module): - def __init__(self, causal=False, softmax_scale=None, attention_dropout=0.0, device=None, dtype=None): - super().__init__() - assert flash_attn_func is not None, ('Triton version of FlashAttention is not installed.') - assert rearrange is not None, 'Please install einops first, e.g., with pip install einops' - self.causal = causal - self.softmax_scale = softmax_scale - self.attention_dropout = attention_dropout - def forward(self, q, k, v): - """Implements the multihead softmax attention. - Arguments - --------- - q, k, v: The tensor containing the query, key, and value. (B, S, H, D) - """ - assert q.dtype in [torch.float16, torch.bfloat16] - assert q.is_cuda - if os.environ.get('USE_BSHD',None): - q, k, v = [rearrange(x, 's b h d -> b s h d').contiguous() - for x in (q, k, v)] - else: - q, k, v = [rearrange(x, 's b h d -> b h s d').contiguous() - for x in (q, k, v)] - output = SDPA(q, k, v, is_causal=self.causal, dropout_p=self.attention_dropout, scale=self.softmax_scale) - if os.environ.get('USE_BSHD',None): - output = rearrange(output, 'b s h d -> s b (h d)').contiguous() - else: - output = rearrange(output, 'b h s d -> s b (h d)').contiguous() - return output - -class FlashSelfAttention(torch.nn.Module): - """Implement the scaled dot product attention with softmax. - Arguments - --------- - softmax_scale: The temperature to use for the softmax attention. - (default: 1/sqrt(d_keys) where d_keys is computed at - runtime) - attention_dropout: The dropout rate to apply to the attention - (default: 0.0) - """ - def __init__(self, causal=False, softmax_scale=None, attention_dropout=0.0, - device=None, dtype=None): - super().__init__() - assert flash_attn_unpadded_func is not None, ('Please install FlashAttention first, ' - 'e.g., with pip install flash-attn') - assert rearrange is not None, 'Please install einops first, e.g., with pip install einops' - self.causal = causal - self.softmax_scale = softmax_scale - self.dropout_p = attention_dropout - - # Use FlashAttention-2 when args.use_flash_attn_ck is True - args = get_args() - self.flash_attn_func = flash_attn_unpadded_func - - def forward(self, q, k, v): - """Implements the multihead softmax attention. - Arguments - --------- - q, k, v: The tensor containing the query, key, and value. (B, S, H, D) - """ - - assert all((i.dtype in [torch.float16, torch.bfloat16] for i in (q,k,v))) - assert all((i.is_cuda for i in (q,k,v))) - - batch_size, seqlen_q = q.shape[0], q.shape[1] - seqlen_k = k.shape[1] - - q, k, v = [rearrange(x, 'b s ... -> (b s) ...') for x in [q, k, v]] - cu_seqlens_q = torch.arange(0, (batch_size + 1) * seqlen_q, step=seqlen_q, dtype=torch.int32, - device=q.device) - - if self.training: - # during training q,k,v always have same seqlen - assert seqlen_k == seqlen_q - - is_causal = self.causal - cu_seqlens_k = cu_seqlens_q - dropout_p = self.dropout_p - else: - # turn off FA causal mask after first inference autoregressive iteration - # only on first autoregressive step q,k,v have same seqlen - is_causal = seqlen_q == seqlen_k - cu_seqlens_k = torch.arange(0, (batch_size + 1) * seqlen_k, step=seqlen_k, dtype=torch.int32, - device=q.device) - dropout_p = 0 - - output = flash_attn_unpadded_func( - q, k, v, cu_seqlens_q, cu_seqlens_k, seqlen_q, seqlen_k, - dropout_p, - softmax_scale=self.softmax_scale, causal=is_causal - ) - - output = rearrange(output, '(b s) ... -> b s ...', b=batch_size) - return output - -class FlashSelfAttentionTriton(torch.nn.Module): - """Implement the scaled dot product attention with softmax. - Arguments - --------- - softmax_scale: The temperature to use for the softmax attention. - (default: 1/sqrt(d_keys) where d_keys is computed at - runtime) - attention_dropout: The dropout rate to apply to the attention - (default: 0.0) - """ - def __init__(self, causal=False, softmax_scale=None, attention_dropout=0.0, - device=None, dtype=None): - super().__init__() - assert flash_attn_func is not None, ('Triton version of FlashAttention is not installed.') - assert rearrange is not None, 'Please install einops first, e.g., with pip install einops' - self.causal = causal - self.softmax_scale = softmax_scale - self.dropout_p = attention_dropout - - def forward(self, q, k, v): - """Implements the multihead softmax attention. - Arguments - --------- - q, k, v: The tensor containing the query, key, and value. (B, S, H, D) - """ - assert q.dtype in [torch.float16, torch.bfloat16] - assert q.is_cuda - q, k, v = [rearrange(x, 's b h d -> b h s d').contiguous() - for x in (q, k, v)] - output = flash_attn_func(q, k, v, self.causal) - output = rearrange(output, 'b s h d -> h b (s d)').contiguous() - return output - -class ParallelAttention(MegatronModule): - """Parallel self-attention layer abstract class. - - Self-attention layer takes input with size [s, b, h] - and returns output of the same size. - """ - - def __init__(self, config, layer_number, - attention_type=AttnType.self_attn, - attn_mask_type=AttnMaskType.padding): - super(ParallelAttention, self).__init__() - args = get_args() - self.layer_number = max(1, layer_number) - self.attention_type = attention_type - self.attn_mask_type = attn_mask_type - self.params_dtype = config.params_dtype - self.sequence_parallel = config.sequence_parallel - self.config = config - self.group_query_attention = args.group_query_attention - self.num_query_groups = args.num_query_groups - - query_projection_size = config.kv_channels * config.num_attention_heads - if self.group_query_attention: - kv_projection_size = args.kv_channels * args.num_query_groups - else: - kv_projection_size = args.kv_channels * args.num_attention_heads - - self.use_flash_attn = (args.use_flash_attn_cutlass or args.use_flash_attn_triton or args.use_flash_attn_torch) \ - and attention_type == AttnType.self_attn \ - and self.attn_mask_type == AttnMaskType.causal - self.use_flash_attn_triton = args.use_flash_attn_triton - self.use_flash_attn_torch = args.use_flash_attn_torch - - if self.use_flash_attn: - if args.use_flash_attn_cutlass: - if flash_attn_unpadded_func is None: - raise ImportError('FlashAttention is not installed, please install with ' - 'pip install flash-attn') - if args.use_flash_attn_triton: - assert flash_attn_func != None, "Cannot import FlashAttention triton " - - assert attention_type == AttnType.self_attn, ('FlashAttention code path only supports ' - 'self-attention for now') - assert self.attn_mask_type == AttnMaskType.causal, ('FlashAttention code path only ' - 'supports causal mask for now') - if rearrange is None: - raise ImportError('einops is not installed, please install with pip install einops') - - # Per attention head and per partition values. - world_size = mpu.get_tensor_model_parallel_world_size() - self.hidden_size_per_attention_head = core.utils.divide( - query_projection_size, config.num_attention_heads) - self.num_attention_heads_per_partition = core.utils.divide( - config.num_attention_heads, world_size) - - if self.group_query_attention: - if args.num_query_groups % world_size != 0: - raise NotImplementedError('Currently the num_query_groups should be ' - 'a multiple of the tensor parallel size') - self.num_query_groups_per_partition = core.utils.divide( - args.num_query_groups, world_size) - else: - self.num_query_groups_per_partition = self.num_attention_heads_per_partition - - # Strided linear layer. - if attention_type == AttnType.self_attn: - self.query_key_value = tensor_parallel.ColumnParallelLinear( - config.hidden_size, - query_projection_size + 2 * kv_projection_size, - config=config, - init_method=config.init_method, - bias=args.add_bias_linear or args.add_qkv_bias, - gather_output=False) - else: - assert attention_type == AttnType.cross_attn - - if self.group_query_attention: - raise NotImplementedError("Grouped query attention not implemented for cross-attention.") - assert query_projection_size == kv_projection_size - - self.query = tensor_parallel.ColumnParallelLinear( - config.hidden_size, - query_projection_size, - config=config, - init_method=config.init_method, - bias=config.add_bias_linear, - gather_output=False) - - self.key_value = tensor_parallel.ColumnParallelLinear( - config.hidden_size, - 2 * kv_projection_size, - config=config, - init_method=config.init_method, - bias=config.add_bias_linear, - gather_output=False) - - self.core_attention = CoreAttention(self.layer_number, config, - self.attn_mask_type) - self.checkpoint_core_attention = config.recompute_granularity == 'selective' - - if self.use_flash_attn_triton: - self.core_attention_flash = FlashSelfAttentionTriton( - causal=True, attention_dropout=args.attention_dropout - ) - elif self.use_flash_attn_torch: - self.core_attention_flash = FlashSelfAttentionTorch(causal=True, attention_dropout=config.attention_dropout) - elif self.use_flash_attn: - self.core_attention_flash = FlashSelfAttention( - causal=True, attention_dropout=config.attention_dropout - ) - - # Output. - self.dense = tensor_parallel.RowParallelLinear( - query_projection_size, - config.hidden_size, - config=config, - init_method=config.output_layer_init_method, - bias=args.add_bias_linear, - input_is_parallel=True, - skip_bias_add=True) - - def _checkpointed_attention_forward(self, query_layer, key_layer, - value_layer, attention_mask, - rotary_pos_emb=None): - """Forward method with activation checkpointing.""" - def custom_forward(*inputs): - query_layer = inputs[0] - key_layer = inputs[1] - value_layer = inputs[2] - attention_mask = inputs[3] - output_ = self.core_attention(query_layer, key_layer, - value_layer, attention_mask) - return output_ - - q_pos_emb, k_pos_emb = (None, None) if rotary_pos_emb is None \ - else rotary_pos_emb - - hidden_states = tensor_parallel.checkpoint( - custom_forward, - False, query_layer, key_layer, value_layer, attention_mask, - q_pos_emb, k_pos_emb) - - return hidden_states - - def _allocate_memory(self, inference_max_sequence_len, batch_size, num_attention_heads): - return torch.empty( - inference_max_sequence_len, - batch_size, - num_attention_heads, - self.hidden_size_per_attention_head, - dtype=self.params_dtype, - device=torch.cuda.current_device()) - - def forward(self, hidden_states, attention_mask, - encoder_output=None, inference_params=None, - rotary_pos_emb=None): - # hidden_states: [sq, b, h] - - # ================================================= - # Pre-allocate memory for key-values for inference. - # ================================================= - is_first_step = False - if inference_params: - if self.layer_number not in inference_params.key_value_memory_dict: - inf_max_seq_len = inference_params.max_sequence_length - inf_max_batch_size = inference_params.max_batch_size - inference_key_memory = self._allocate_memory( - inf_max_seq_len, inf_max_batch_size, - self.num_query_groups_per_partition) - inference_value_memory = self._allocate_memory( - inf_max_seq_len, inf_max_batch_size, - self.num_query_groups_per_partition) - - inference_params.key_value_memory_dict[self.layer_number] = ( - inference_key_memory, inference_value_memory) - is_first_step = True - else: - inference_key_memory, inference_value_memory = \ - inference_params.key_value_memory_dict[self.layer_number] - - # ===================== - # Query, Key, and Value - # ===================== - if self.attention_type == AttnType.self_attn: - - # Attention heads [sq, b, h] --> [sq, b, ng * (np/ng + 2) * hn)] - mixed_x_layer, _ = self.query_key_value(hidden_states) - - # [sq, b, hp] --> [sq, b, ng, (np/ng + 2) * hn] - new_tensor_shape = mixed_x_layer.size()[:-1] + ( - self.num_query_groups_per_partition, - ( - (self.num_attention_heads_per_partition // self.num_query_groups_per_partition + 2) - * self.hidden_size_per_attention_head - ), - ) - mixed_x_layer = mixed_x_layer.view(*new_tensor_shape) - - # [sq, b, ng, (np/ng + 2) * hn] --> [sq, b, ng, np/ng * hn], [sq, b, ng, hn], [sq, b, ng, hn] - (query_layer, - key_layer, - value_layer) = torch.split( - mixed_x_layer, - [ - ( - self.num_attention_heads_per_partition // self.num_query_groups_per_partition - * self.hidden_size_per_attention_head - ), - self.hidden_size_per_attention_head, - self.hidden_size_per_attention_head - ], - dim=3) - - # [sq, b, ng, np/ng * hn] -> [sq, b, np, hn] - - query_layer = query_layer.contiguous().view(query_layer.size(0), query_layer.size(1), -1, self.hidden_size_per_attention_head) - else: - # Attention heads [sk, b, h] --> [sk, b, (np * 2 * hn)] - mixed_kv_layer, _ = self.key_value(encoder_output) - - # [sk, b, (np * 2 * hn)] --> [sk, b, np, 2 * hn] - new_tensor_shape = mixed_kv_layer.size()[:-1] + \ - (self.num_attention_heads_per_partition, - 2 * self.hidden_size_per_attention_head) - mixed_kv_layer = mixed_kv_layer.view(*new_tensor_shape) - - # [sk, b, np, 2 * hn] --> 2 [sk, b, np, hn] - (key_layer, - value_layer) = tensor_parallel.split_tensor_along_last_dim(mixed_kv_layer, 2) - - # Attention head [sq, b, h] --> [sq, b, hp] - query_layer, _ = self.query(hidden_states) - # [sq, b, hp] --> [sq, b, np, hn] - new_tensor_shape = query_layer.size()[:-1] + \ - (self.num_attention_heads_per_partition, - self.hidden_size_per_attention_head) - query_layer = query_layer.view(*new_tensor_shape) - - # ================================== - # Adjust key and value for inference - # ================================== - - # duplicate the pos_emb for self attention - if rotary_pos_emb is not None: - if isinstance(rotary_pos_emb, tuple): - rotary_pos_emb = rotary_pos_emb - else: - rotary_pos_emb = ((rotary_pos_emb,) * 2) - - if inference_params: - batch_start = inference_params.batch_size_offset - batch_end = batch_start + key_layer.size(1) - assert batch_end <= inference_key_memory.size(1) - sequence_start = inference_params.sequence_len_offset - sequence_end = sequence_start + key_layer.size(0) - assert sequence_end <= inference_key_memory.size(0) - # Copy key and values. - inference_key_memory[sequence_start:sequence_end, - batch_start:batch_end, ...] = key_layer - inference_value_memory[sequence_start:sequence_end, - batch_start:batch_end, ...] = value_layer - key_layer = inference_key_memory[ - :sequence_end, batch_start:batch_end, ...] - value_layer = inference_value_memory[ - :sequence_end, batch_start:batch_end, ...] - - - # adjust the key rotary positional embedding - if rotary_pos_emb is not None: - q_pos_emb, k_pos_emb = rotary_pos_emb - # need to cross check this condition during inference - # if not set_inference_key_value_memory: - if not is_first_step: - # In inference, we compute one token at a time. - # Select the correct positional embedding - # (only the last token in the sequence) - q_pos_emb = q_pos_emb[sequence_end - 1 : sequence_end] - else: - # In the first forward pass of inference, - # we use the entire provided prefix. - # q_pos_emb here has the rope embeddings of the entire - # prefix + to-be-generated output so - # we slice to just the prefix. - q_pos_emb = q_pos_emb[:sequence_end, :, :, :] - k_pos_emb = k_pos_emb[:sequence_end, :, :, :] - rotary_pos_emb = (q_pos_emb, k_pos_emb) - - # ================================== - # core attention computation - # ================================== - - # expand the key_layer and value_layer [sk, b, ng, hn] -> [sk, b, np, hn] - if self.num_attention_heads_per_partition // self.num_query_groups_per_partition > 1: - key_layer = key_layer.repeat_interleave( - self.num_attention_heads_per_partition // self.num_query_groups_per_partition, - dim = 2 - ) - value_layer = value_layer.repeat_interleave( - self.num_attention_heads_per_partition // self.num_query_groups_per_partition, - dim = 2 - ) - - # apply relative positional encoding (rotary embedding) - if rotary_pos_emb is not None: - q_pos_emb, k_pos_emb = rotary_pos_emb - query_layer = apply_rotary_pos_emb(query_layer, q_pos_emb,self.config) - key_layer = apply_rotary_pos_emb(key_layer, k_pos_emb,self.config) - # TODO, can apply positional embedding to value_layer so it has - # absolute positional embedding. - # otherwise, only relative positional embedding takes effect - # value_layer = apply_rotary_pos_emb(value_layer, k_pos_emb) - - if not self.use_flash_attn: - if self.checkpoint_core_attention: - context_layer = self._checkpointed_attention_forward( - query_layer, key_layer, value_layer, attention_mask) - else: - context_layer = self.core_attention( - query_layer, key_layer, value_layer, attention_mask) - else: - if not self.use_flash_attn_triton and not self.use_flash_attn_torch: - query_layer, key_layer, value_layer = [rearrange(x, 's b ... -> b s ...').contiguous() - for x in (query_layer, key_layer, value_layer)] - - if not self.sequence_parallel: - with tensor_parallel.get_cuda_rng_tracker().fork(): - context_layer = self.core_attention_flash(query_layer, key_layer, value_layer) - else: - context_layer = self.core_attention_flash(query_layer, key_layer, value_layer) - - if not self.use_flash_attn_triton and not self.use_flash_attn_torch: - context_layer = rearrange(context_layer, 'b s h d -> s b (h d)').contiguous() - - # ================= - # Output. [sq, b, h] - # ================= - - output, bias = self.dense(context_layer) - - return output, bias - - -def bias_dropout_add(x, bias, residual, prob, training): - # type: (Tensor, Optional[Tensor], Tensor, float, bool) -> Tensor - if bias is not None: - x = x + bias - out = torch.nn.functional.dropout(x, p=prob, training=training) - out = residual + out - return out - - -def get_bias_dropout_add(training): - def _bias_dropout_add(x, bias, residual, prob): - return bias_dropout_add(x, bias, residual, prob, training) - return _bias_dropout_add - - -@jit_fuser -def bias_dropout_add_fused_train(x: torch.Tensor, - bias: Optional[torch.Tensor], - residual: torch.Tensor, - prob: float) -> torch.Tensor: - return bias_dropout_add(x, bias, residual, prob, True) - - -@jit_fuser -def bias_dropout_add_fused_inference(x: torch.Tensor, - bias: Optional[torch.Tensor], - residual: torch.Tensor, - prob: float) -> torch.Tensor: - return bias_dropout_add(x, bias, residual, prob, False) - - -class ParallelTransformerLayer(MegatronModule): - """A single transformer layer. - - Transformer layer takes input with size [s, b, h] and returns an - output of the same size. - """ - - def __init__(self, config, - layer_number, layer_type=LayerType.encoder, - self_attn_mask_type=AttnMaskType.padding, - drop_path_rate=0.): - args = get_args() - - super(ParallelTransformerLayer, self).__init__() - self.layer_number = layer_number - self.layer_type = layer_type - - self.apply_residual_connection_post_norm \ - = config.apply_residual_connection_post_layernorm - - self.bf16 = config.bf16 - self.fp32_residual_connection = config.fp32_residual_connection - - # Normalize the input data. - self.input_norm = get_norm(config) - - # Self attention. - self.self_attention = ParallelAttention( - config, - layer_number, - attention_type=AttnType.self_attn, - attn_mask_type=self_attn_mask_type) - self.hidden_dropout = config.hidden_dropout - self.bias_dropout_fusion = config.bias_dropout_fusion - self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0.0 else None - - # Normalize the attention output - self.post_attention_norm = get_norm(config) - - # Cross attention. - if self.layer_type in (LayerType.decoder, - LayerType.retro_decoder, - LayerType.retro_decoder_with_retriever, - LayerType.retro_encoder): - self.inter_attention = ParallelAttention( - config, - layer_number, - attention_type=AttnType.cross_attn) - # Normalize the attention output. - self.post_inter_attention_norm = get_norm(config) - - # MLP - if args.num_experts is not None: - self.mlp = SwitchMLP(config) - else: - self.mlp = ParallelMLP(config) - - # Set bias+dropout+add fusion grad_enable execution handler. - TORCH_MAJOR = int(torch.__version__.split('.')[0]) - TORCH_MINOR = int(torch.__version__.split('.')[1]) - use_nvfuser = TORCH_MAJOR > 1 or (TORCH_MAJOR == 1 and TORCH_MINOR >= 10) - self.bias_dropout_add_exec_handler = \ - nullcontext if use_nvfuser else torch.enable_grad - - if args.retro_add_retriever: - self.retro_num_neighbors = args.retro_num_neighbors - self.retro_chunk_length = args.retro_chunk_length - self.retro_retrieved_length = \ - args.retro_num_retrieved_chunks * args.retro_chunk_length - - # Retriever (bi-directional transformer with cross attention) - if layer_type == LayerType.retro_decoder_with_retriever: - self.retriever = ParallelTransformer( - config=config, - model_type=ModelType.retro_encoder, - self_attn_mask_type=AttnMaskType.padding, - pre_process=True, - post_process=False, - ) - self._retriever_key = 'retriever' - else: - self.retriever = None - - def default_decoder_cross_attention(self, - encoder_output, - enc_dec_attn_mask, - norm_input, - norm_output, - bias_dropout_add_func): - '''Cross attention for a standard encoder-decoder model.''' - - # Attention. - attention_output, attention_bias = \ - self.inter_attention(norm_output, - enc_dec_attn_mask, - encoder_output=encoder_output) - - # Residual connection. - if self.apply_residual_connection_post_norm: - residual = norm_output - else: - residual = norm_input - - if attention_bias is not None: - attention_bias = attention_bias.expand_as(residual) - - # Bias-dropout-add. - with self.bias_dropout_add_exec_handler(): - norm_input = bias_dropout_add_func( - attention_output, - attention_bias, - residual, - self.hidden_dropout) - - # Normalize. - norm_output = self.post_inter_attention_norm(norm_input) - - return norm_input, norm_output - - def retro_encoder_cross_attention(self, - retriever_output, - norm_input, - norm_output, - bias_dropout_add_func): - """Cross attention for Retro encoder. - - Notation: - ns : Sequence length. - bs : Batch size. - d : Hidden size. - l : Number of chunks per sample (i.e., seq_length/chunk_length). - k : Number of neighbors. - r : Number of retrieved tokens (neighbors + continuation). - """ - - ns, bs, d = norm_output.shape # [r, bs * l * k, d] - - # Divide sequence dimension into chunks. - chunked_outputs = norm_output.reshape(self.retro_retrieved_length, - -1, - self.retro_num_neighbors, - d) - chunked_outputs_before_norm = \ - norm_input.reshape(self.retro_retrieved_length, -1, - self.retro_num_neighbors, d) # [r, bs*l, k, d] - - # Per-chunk attention. - norm_inputs = [] - norm_outputs = [] - for k in range(self.retro_num_neighbors): - - # Attention. - chunked_output = chunked_outputs[:,:,k].contiguous() - attention_output, attention_bias = \ - self.inter_attention( - chunked_output, # Q (neighbor embedding) - None, - encoder_output=retriever_output) # K, V (hidden act) - - # Residual connection. - if self.apply_residual_connection_post_norm: - residual = chunked_output - else: - residual = chunked_outputs_before_norm[:,:,k] - - # Re-enable torch grad to enable fused optimization. - with torch.enable_grad(): - norm_input = bias_dropout_add_func( - attention_output, - None if attention_bias is None else attention_bias.expand_as(residual), - residual, - self.hidden_dropout) - norm_inputs.append(norm_input) - - # Layer norm. - norm_output = self.post_inter_attention_norm(norm_input) - norm_outputs.append(norm_output) - - # Concatenate layer norms. - # norm_input : [r, k * bs * l, d] - # norm_output : [r, k * bs * l, d] - norm_input = torch.stack(norm_inputs, dim=1).reshape(ns, bs, d) - norm_output = torch.stack(norm_outputs, dim=1).reshape(ns, bs, d) - - return norm_input, norm_output - - def retro_decoder_cross_attention(self, - retriever_input, - retriever_output, - retriever_attn_mask, - norm_input, - norm_output, - inference_params, - bias_dropout_add_func): - """Cross attention for Retro decoder. - - Notation: - ns : Sequence length. - bs : Batch size. - d : Hidden size. - l : Number of chunks per sample (i.e., seq_length/chunk_length). - m : Number of tokens per chunk. - k : Number of neighbors. - r : Number of retrieved tokens (neighbors + continuation). - """ - - ns, bs, d = norm_output.shape - l = int(np.ceil(ns / self.retro_chunk_length)) - - # Retrieve neighbors. - if self.layer_type == LayerType.retro_decoder_with_retriever: - first_ns = ns % self.retro_chunk_length - if first_ns > 0: - first_chunk, rest_chunk = \ - norm_output[:first_ns], norm_output[first_ns:] - first_chunk = torch.nn.functional.pad( - first_chunk, - (0, 0, 0, 0, 0, self.retro_chunk_length - first_ns), - 'constant', - 0) - chunked_output = \ - torch.cat((first_chunk, rest_chunk), dim=0) # [l * m, bs, d] - else: - chunked_output = norm_output # [l * m, bs, d] - chunked_output = chunked_output \ - .reshape(l, self.retro_chunk_length, bs, d) \ - .permute(1, 2, 0, 3) \ - .reshape(self.retro_chunk_length, bs * l, d) \ - .contiguous() - - # Get Encoder Output - retriever_output = self.retriever( - hidden_states=retriever_input, - attention_mask=retriever_attn_mask, - retriever_output=chunked_output, - retriever_attn_mask=retriever_attn_mask, - inference_params=inference_params) # [r, k * bs * l , d] - retriever_output = retriever_output.reshape( - self.retro_retrieved_length * self.retro_num_neighbors, bs * l, d) # [r * k, bs * l, d] - - # Chunks. - pad = (ns - 1) % self.retro_chunk_length - attending_chunks = norm_output[pad:] - padded_chunks = torch.nn.functional.pad( - attending_chunks, - (0, 0, 0, 0, 0, self.retro_chunk_length - 1), - 'constant', 0) - padded_chunked_output = padded_chunks \ - .reshape(l, self.retro_chunk_length, bs, d) \ - .permute(1, 2, 0, 3) - padded_chunked_output = padded_chunked_output.reshape( - self.retro_chunk_length, bs * l, d).contiguous() - - # Encoder output. - attention_output, attention_bias = \ - self.inter_attention(padded_chunked_output, - None, - encoder_output=retriever_output) - - # Residual connection. - if self.apply_residual_connection_post_norm: - residual = norm_output - else: - residual = norm_input - - # Re-enable torch grad to enable fused optimization. - with torch.enable_grad(): - norm_input = bias_dropout_add_func( - attention_output, - None if attention_bias is None else attention_bias.expand_as(attention_output), - torch.zeros_like(attention_output), - self.hidden_dropout) - norm_input = norm_input \ - .reshape(self.retro_chunk_length, bs, l, d) \ - .permute(2, 0, 1, 3) # [l, m, bs, d] - norm_input = norm_input.reshape(self.retro_chunk_length * l, bs, d) - norm_input = torch.nn.functional.pad( - norm_input, - (0, 0, 0, 0, pad, 0), - 'constant', 0)[:ns] # [ns, b, d] - # TODO: better redesign with inference param - args = get_args() - norm_input = args.retro_attention_gate * norm_input + residual - - # Layer norm post the decoder attention - norm_output = self.post_inter_attention_norm(norm_input) - - return retriever_output, norm_input, norm_output - - # @torch.compile(mode="max-autotune-no-cudagraphs") - def forward(self, hidden_states, attention_mask, - encoder_output=None, enc_dec_attn_mask=None, - retriever_input=None, - retriever_output=None, - retriever_attn_mask=None, - inference_params=None, - rotary_pos_emb=None): - - # Update the params in case the retro param changes during inference - # TODO: better redesign with inference param - args = get_args() - if args.retro_add_retriever: - self.retro_num_neighbors = args.retro_num_neighbors - self.retro_chunk_length = args.retro_chunk_length - self.retro_retrieved_length = \ - args.retro_num_retrieved_chunks * args.retro_chunk_length - - # hidden_states: [s, b, h] - - # Layer norm at the beginning of the transformer layer. - norm_output = self.input_norm(hidden_states) - - # Self attention. - attention_output, attention_bias = \ - self.self_attention( - norm_output, - attention_mask, - inference_params=inference_params, - rotary_pos_emb=rotary_pos_emb) - - # Residual connection. - if self.apply_residual_connection_post_norm: - residual = norm_output - else: - residual = hidden_states - - if self.drop_path is None: - # jit scripting for a nn.module (with dropout) is not - # trigerring the fusion kernel. For now, we use two - # different nn.functional routines to account for varying - # dropout semantics during training and inference phases. - if self.bias_dropout_fusion: - if self.training: - bias_dropout_add_func = bias_dropout_add_fused_train - else: - bias_dropout_add_func = bias_dropout_add_fused_inference - else: - bias_dropout_add_func = get_bias_dropout_add(self.training) - - if attention_bias is not None: - attention_bias = attention_bias.expand_as(residual) - with self.bias_dropout_add_exec_handler(): - norm_input = bias_dropout_add_func( - attention_output, - attention_bias, - residual, - self.hidden_dropout) - else: - out = torch.nn.functional.dropout(attention_output + attention_bias, - p=self.hidden_dropout, - training=self.training) - norm_input = residual + self.drop_path(out) - - # Layer norm post the self attention. - norm_output = self.post_attention_norm(norm_input) - - # Cross attention. - if self.layer_type == LayerType.encoder: - pass - elif self.layer_type == LayerType.decoder: - norm_input, norm_output = \ - self.default_decoder_cross_attention( - encoder_output, - enc_dec_attn_mask, - norm_input, - norm_output, - bias_dropout_add_func) - elif self.layer_type == LayerType.retro_encoder: - norm_input, norm_output = \ - self.retro_encoder_cross_attention( - retriever_output, - norm_input, - norm_output, - bias_dropout_add_func) - elif self.layer_type in (LayerType.retro_decoder, - LayerType.retro_decoder_with_retriever): - retriever_output, norm_input, norm_output = \ - self.retro_decoder_cross_attention( - retriever_input, - retriever_output, - retriever_attn_mask, - norm_input, - norm_output, - inference_params, - bias_dropout_add_func) - else: - raise Exception("Unsupported layer type, '%s'." % - self.layer_type.name) - - # MLP. - mlp_output, mlp_bias = self.mlp(norm_output) - - # Second residual connection. - if self.apply_residual_connection_post_norm: - residual = norm_output - else: - residual = norm_input - - if self.drop_path is None: - if mlp_bias is not None: - mlp_bias = mlp_bias.expand_as(residual) - with self.bias_dropout_add_exec_handler(): - output = bias_dropout_add_func( - mlp_output, - mlp_bias, - residual, - self.hidden_dropout) - - # Jit compiled function creates 'view' tensor. This tensor - # potentially gets saved in the MPU checkpoint function context, - # which rejects view tensors. While making a viewless tensor here - # won't result in memory savings (like the data loader, or - # p2p_communication), it serves to document the origin of this - # 'view' tensor. - output = core.utils.make_viewless_tensor(inp = output, - requires_grad = output.requires_grad, - keep_graph = True) - - else: - if mlp_bias is not None: - mlp_output = mlp_output + mlp_bias - out = torch.nn.functional.dropout(mlp_output, - p=self.hidden_dropout, - training=self.training) - output = residual + self.drop_path(out) - - if self.layer_type == LayerType.retro_decoder_with_retriever: - return output, retriever_output - else: - return output - - -class NoopTransformerLayer(MegatronModule): - """A single 'no-op' transformer layer. - - The sole purpose of this layer is for when a standalone embedding layer - is used (i.e., args.standalone_embedding_stage == True). In this case, - zero transformer layers are assigned when pipeline rank == 0. Additionally, - when virtual pipeline rank >= 1, zero total model parameters are created - (virtual rank 0 contains the input embedding). This results in the model's - input and output tensors being the same, which causes an error when - performing certain memory optimiations on the output tensor (e.g., - deallocating it). Thus, this layer disconnects the input from the output - via a clone. Since ranks containing a no-op layer are generally under- - utilized (both compute and memory), there's no worry of any performance - degredation. - """ - - def __init__(self, layer_number): - super().__init__() - self.layer_number = layer_number - - def forward(self, hidden_states, attention_mask, - encoder_output=None, enc_dec_attn_mask=None, - inference_params=None): - return hidden_states.clone() - - -def _get_num_layers(args, model_type, is_decoder=False): - """Compute the number of transformer layers resident on the current rank.""" - is_encoder_and_decoder_model = (model_type == ModelType.encoder_and_decoder) - if model_type == ModelType.retro_encoder: - num_layers = args.retro_encoder_layers - elif mpu.get_pipeline_model_parallel_world_size() > 1: - assert not is_encoder_and_decoder_model, "This is no longer supported." - assert args.num_layers == args.encoder_num_layers - assert args.num_layers % args.transformer_pipeline_model_parallel_size == 0, \ - 'num_layers must be divisible by transformer_pipeline_model_parallel_size' - - # When a standalone embedding stage is used, all transformer layers - # are divided among pipeline rank >= 1, while on pipeline rank 0, - # ranks either contain the input embedding layer (virtual pp rank 0), - # or no layers at all (virtual pp rank >= 1). - num_layers = ( - 0 - if args.standalone_embedding_stage - and mpu.get_pipeline_model_parallel_rank() == 0 else - args.num_layers // args.transformer_pipeline_model_parallel_size - ) - else: - if not is_decoder: - num_layers = args.encoder_num_layers - else: - num_layers = args.decoder_num_layers - return num_layers - - -def _get_layer_type(model_type, default_layer_type, retro_layer_numbers, - layer_number): - args = get_args() - if args.retro_add_retriever and layer_number in retro_layer_numbers: - if model_type == ModelType.retro_decoder: - return LayerType.retro_decoder_with_retriever \ - if layer_number == retro_layer_numbers[0] \ - else LayerType.retro_decoder - elif model_type == ModelType.retro_encoder: - return LayerType.retro_encoder - else: - raise Exception("Unsupported model type, '%s'." % model_type) - else: - return default_layer_type - - -class ParallelTransformer(MegatronModule): - """Transformer class.""" - - def __init__(self, config, - model_type, layer_type=LayerType.encoder, - self_attn_mask_type=AttnMaskType.padding, - post_norm=True, - pre_process=True, - post_process=True, - drop_path_rate=0.0): - super(ParallelTransformer, self).__init__() - args = get_args() - - self.layer_type = layer_type - self.model_type = model_type - self.bf16 = config.bf16 - self.fp32_residual_connection = config.fp32_residual_connection - self.post_norm = post_norm - self.pre_process = pre_process - self.post_process = post_process - self.input_tensor = None - self.drop_path_rate = drop_path_rate - self.transformer_impl = args.transformer_impl - self.retro_add_retriever = args.retro_add_retriever - - # Store activation checkpoiting flag. - self.recompute_granularity = config.recompute_granularity - self.recompute_method = config.recompute_method - self.recompute_num_layers = config.recompute_num_layers - self.distribute_saved_activations = \ - config.distribute_saved_activations and not config.sequence_parallel - - self.sequence_parallel = config.sequence_parallel - - # Transformer Engine Init. - self.transformer_engine_v_0_10 = False - self.transformer_engine_v_0_11 = False - self.transformer_engine_v_0_8 = False - if self.transformer_impl == 'transformer_engine': - global transformer_engine - import transformer_engine - - if core.utils.is_te_min_version("0.8.0"): - self.transformer_engine_v_0_8 = True - if core.utils.is_te_min_version("0.10.0"): - self.transformer_engine_v_0_10 = True - if core.utils.is_te_min_version("0.11.0"): - self.transformer_engine_v_0_11 = True - - assert not args.squared_relu, ("TransformerEngine does not support squared " - "relu activation.") - - self.use_fp8 = args.fp8 is not None - self.fp8_recipe = None - self.fp8_group = None - if self.use_fp8: - assert args.transformer_impl == 'transformer_engine', \ - 'transformer-engine required for fp8 training and inference' - self.fp8_group = mpu.get_amax_reduction_group(tp_only_amax_red=config.tp_only_amax_red) - if args.fp8 == "e4m3": - fp8_format = transformer_engine.common.recipe.Format.E4M3 - elif args.fp8 == "hybrid": - fp8_format = transformer_engine.common.recipe.Format.HYBRID - else: - raise ValueError("The DelayedScaling recipe only supports E4M3 and HYBRID formats.") - self.fp8_recipe = transformer_engine.common.recipe.DelayedScaling( - margin=args.fp8_margin, - interval=args.fp8_interval, - fp8_format=fp8_format, - amax_history_len=args.fp8_amax_history_len, - amax_compute_algo=args.fp8_amax_compute_algo, - override_linear_precision=(False, False, not args.fp8_wgrad), - ) - - self.num_microbatches_in_previous_step = -1 - self.microbatch_count = 0 - self.checkpoint_core_attention = config.recompute_granularity == 'selective' - - # Number of layers. - self.num_layers = _get_num_layers(args, model_type, - layer_type==LayerType.decoder) - - self.drop_path_rates = [ - rate.item() for rate in - torch.linspace(0, self.drop_path_rate, config.num_layers)] - - self.retro_layer_numbers = None - if model_type == ModelType.retro_decoder: - retro_layer_start = 6 if config.num_layers <= 15 else 9 - self.retro_layer_numbers = \ - np.arange(retro_layer_start, args.num_layers + 1, 3).tolist() - if model_type == ModelType.retro_encoder: - self.retro_layer_numbers = [1] - - # Transformer layers. - if args.retro_add_retriever: - assert self.recompute_granularity != 'full', \ - "Full recompute not supported for Retro." - assert args.transformer_impl == 'local', \ - "Transformer engine does not support Retro layers." - def build_layer(layer_number): - if args.transformer_impl == 'local': - current_layer_type = _get_layer_type( - model_type, layer_type, self.retro_layer_numbers, - layer_number) - return ParallelTransformerLayer( - config, - layer_number, - layer_type=current_layer_type, - self_attn_mask_type=self_attn_mask_type, - drop_path_rate=self.drop_path_rates[layer_number - 1]) - else: - # This argument is only available from TE v0.10 onwards. - extra_transformer_engine_kwargs = {} - if self.transformer_engine_v_0_8: - extra_transformer_engine_kwargs["bias"] = args.add_bias_linear - if self.transformer_engine_v_0_10: - extra_transformer_engine_kwargs["activation"] = "swiglu" if args.swiglu else "gelu" - if self.transformer_engine_v_0_11: - extra_transformer_engine_kwargs["normalization"] = args.normalization - assert config.attention_softmax_in_fp32, "TransformerEngine only supports softmax compute in FP32." - assert ( - (bool(int(os.getenv("NVTE_APPLY_QK_LAYER_SCALING", "0"))) and args.fp16) == config.apply_query_key_layer_scaling - ), ("Unsupported config for apply_query_key_layer_scaling in TransformerEngine. If --apply-query-key-layer-scaling is " - "provided, set env-var NVTE_APPLY_QK_LAYER_SCALING=1 and you must be using fp16.") - return transformer_engine.pytorch.TransformerLayer( - config.hidden_size, - config.ffn_hidden_size, - config.num_attention_heads, - layernorm_epsilon=config.layernorm_epsilon, - hidden_dropout=config.hidden_dropout, - attention_dropout=config.attention_dropout, - init_method=config.init_method, - output_layer_init_method=config.output_layer_init_method, - layer_number=layer_number, - kv_channels=config.kv_channels, - self_attn_mask_type=self_attn_mask_type.name, - tp_group=mpu.get_tensor_model_parallel_group() if mpu.is_initialized() else None, - tp_size=mpu.get_tensor_model_parallel_world_size(), - get_rng_state_tracker=get_cuda_rng_tracker - if get_cuda_rng_tracker().is_initialized() - else None, - fuse_wgrad_accumulation=config.gradient_accumulation_fusion, - seq_length=args.seq_length, - micro_batch_size=args.micro_batch_size, - sequence_parallel=config.sequence_parallel, - params_dtype=config.params_dtype, - apply_residual_connection_post_layernorm=config.apply_residual_connection_post_layernorm, - output_layernorm=False, - layer_type="encoder", - drop_path_rate=self.drop_path_rates[layer_number - 1], - set_parallel_mode=True, - fuse_qkv_params=True, - **extra_transformer_engine_kwargs) - - if config.virtual_pipeline_model_parallel_size is not None: - assert config.num_layers % config.virtual_pipeline_model_parallel_size == 0, \ - 'num_layers_per_stage must be divisible by ' \ - 'virtual_pipeline_model_parallel_size' - assert args.model_type != ModelType.encoder_and_decoder - # Number of layers in each model chunk is the number of layers in the stage, - # divided by the number of model chunks in a stage. - self.num_layers = self.num_layers // config.virtual_pipeline_model_parallel_size - # With 8 layers, 2 stages, and 4 model chunks, we want an assignment of - # layers to stages like (each list is a model chunk): - # Stage 0: [0] [2] [4] [6] - # Stage 1: [1] [3] [5] [7] - # With 8 layers, 2 stages, and 2 virtual stages, we want an assignment of - # layers to stages like (each list is a model chunk): - # Stage 0: [0, 1] [4, 5] - # Stage 1: [2, 3] [6, 7] - offset = mpu.get_virtual_pipeline_model_parallel_rank() * ( - config.num_layers // config.virtual_pipeline_model_parallel_size) + \ - (mpu.get_pipeline_model_parallel_rank() * self.num_layers) - else: - # Each stage gets a contiguous set of layers. - if args.model_type == ModelType.encoder_and_decoder and \ - mpu.get_pipeline_model_parallel_world_size() > 1: - pipeline_rank = mpu.get_pipeline_model_parallel_rank() - if layer_type == LayerType.encoder: - offset = pipeline_rank * self.num_layers - else: - num_ranks_in_enc = args.pipeline_model_parallel_split_rank - offset = (pipeline_rank - num_ranks_in_enc) * self.num_layers - else: - offset = mpu.get_pipeline_model_parallel_rank() * self.num_layers - - if self.num_layers == 0: - # When a standalone embedding stage is used (e.g., - # args.standalone_embedding_stage == True), virtual pipeline ranks - # on pipeline rank 0 will have zero transformer layers assigned to - # them. This results in the model's input and output tensors to be - # the same, which will cause failure for certain output tensor - # optimizations (e.g., pipeline output deallocation). To remedy - # this, we assign a 'no-op' layer on these ranks, which will - # disconnect the input tensor from the output tensor. - self.num_layers = 1 - self.layers = torch.nn.ModuleList([ NoopTransformerLayer(1) ]) - else: - self.layers = torch.nn.ModuleList( - [build_layer(i + 1 + offset) for i in range(self.num_layers)]) - - # Update dropout rate for Retro encoder. - if model_type == ModelType.retro_encoder: - for layer in self.layers: - if layer.self_attention.use_flash_attn: - layer.self_attention.core_attention_flash.dropout_p = \ - torch.nn.Dropout(args.retro_encoder_attention_dropout) - else: - layer.self_attention.core_attention.attention_dropout.p =\ - args.retro_encoder_attention_dropout - layer.hidden_dropout = args.retro_encoder_hidden_dropout - - if self.post_process and self.post_norm: - # Final layer norm before output. - self.final_norm = get_norm(config) - - def _get_layer(self, layer_number): - return self.layers[layer_number] - - def _checkpointed_forward(self, hidden_states, attention_mask, - encoder_output, enc_dec_attn_mask, - rotary_pos_emb, is_first_microbatch): - """Forward method with activation checkpointing.""" - def custom(start, end): - def custom_forward(*args, **kwargs): - x_, *args = args - for index in range(start, end): - layer = self._get_layer(index) - x_ = layer(x_, *args, **kwargs) - return x_ - return custom_forward - - te_forward_kwargs = {} - if self.transformer_impl == 'transformer_engine': - te_forward_kwargs['is_first_microbatch'] = is_first_microbatch - if self.transformer_engine_v_0_10: - te_forward_kwargs['rotary_pos_emb'] = rotary_pos_emb - - if self.recompute_method == 'uniform': - # Uniformly divide the total number of Transformer layers and - # checkpoint the input activation of each divided chunk. - # A method to further reduce memory usage reducing checkpoints. - l = 0 - while l < self.num_layers: - if self.transformer_impl == 'transformer_engine': - hidden_states = transformer_engine.pytorch.checkpoint( - custom(l, l + self.recompute_num_layers), - self.distribute_saved_activations, - tensor_parallel.get_cuda_rng_tracker, - mpu.get_tensor_model_parallel_group(), - hidden_states, attention_mask, encoder_output, - enc_dec_attn_mask, **te_forward_kwargs) - else: - hidden_states = tensor_parallel.checkpoint( - custom(l, l + self.recompute_num_layers), - self.distribute_saved_activations, - hidden_states, attention_mask, - encoder_output, enc_dec_attn_mask, - None, None, None, None, rotary_pos_emb) - - l += self.recompute_num_layers - - elif self.recompute_method == 'block': - # Checkpoint the input activation of only a set number of individual - # Transformer layers and skip the rest. - # A method fully use the device memory removing redundant re-computation. - for l in range(self.num_layers): - if l < self.recompute_num_layers: - if self.transformer_impl == 'transformer_engine': - hidden_states = transformer_engine.pytorch.checkpoint( - custom(l, l + 1), - self.distribute_saved_activations, - tensor_parallel.get_cuda_rng_tracker, - mpu.get_tensor_model_parallel_group(), - hidden_states, attention_mask, encoder_output, - enc_dec_attn_mask, **te_forward_kwargs) - else: - hidden_states = tensor_parallel.checkpoint( - custom(l, l + 1), - self.distribute_saved_activations, - hidden_states, attention_mask, - encoder_output, enc_dec_attn_mask, - None, None, None, None, rotary_pos_emb) - else: - if self.transformer_impl == 'transformer_engine': - hidden_states = custom(l, l + 1)( - hidden_states, attention_mask, encoder_output, - enc_dec_attn_mask, **te_forward_kwargs) - else: - hidden_states = custom(l, l + 1)( - hidden_states, attention_mask, - encoder_output, enc_dec_attn_mask, - None, None, None, None, rotary_pos_emb) - else: - raise ValueError("Invalid activation recompute method.") - - return hidden_states - - def set_input_tensor(self, input_tensor): - """Set input tensor to be used instead of forward()'s input. - - When doing pipeline parallelism the input from the previous - stage comes from communication, not from the input, so the - model's forward_step_func won't have it. This function is thus - used by internal code to bypass the input provided by the - forward_step_func""" - self.input_tensor = input_tensor - - def forward(self, hidden_states, attention_mask, - encoder_output=None, enc_dec_attn_mask=None, - retriever_input=None, - retriever_output=None, - retriever_attn_mask=None, - inference_params=None, - rotary_pos_emb=None): - # hidden_states: [s, b, h] - - # Checks. - if inference_params: - assert self.recompute_granularity is None, \ - 'inference does not work with activation checkpointing' - - if not self.pre_process: - # See set_input_tensor() - hidden_states = self.input_tensor - - # Viewless tensor. - # - We only need to create a viewless tensor in the case of micro batch - # size (mbs) == 1, since in this case, 'hidden_states.transpose()' - # above creates a view tensor, and '.contiguous()' is a pass-through. - # For mbs >= 2, '.contiguous()' creates a new tensor, eliminating - # the need to make it viewless. - # - # However, we don't explicitly check mbs == 1 here because - # make_viewless_tensor() has negligible overhead when its input - # is already viewless. - # - # - For the 'else' case above, calling make_viewless_tensor() here is - # likely redundant, since p2p_communication.py (likely originator) - # already creates viewless tensors. That said, make_viewless_tensor() - # is called here to be future-proof and corner-case-proof. - hidden_states = core.utils.make_viewless_tensor( - hidden_states, - requires_grad=True, - keep_graph=True, - ) - - # RNG context. - if self.sequence_parallel: - rng_context = tensor_parallel.get_cuda_rng_tracker().fork() - else: - rng_context = nullcontext() - - # Forward layers. - with rng_context: - # The fp8_autocast context manager is a no-op when enabled=True - # The if...else serves to short circuit name resolution for fp8_autocast - with transformer_engine.pytorch.fp8_autocast( - enabled=self.use_fp8, - fp8_recipe=self.fp8_recipe, - fp8_group=self.fp8_group - ) if self.use_fp8 else nullcontext(): - # Determine if the current iteration is first microbatch - if self.num_microbatches_in_previous_step != get_num_microbatches(): - self.microbatch_count = 0 # Reset count on new batch size rampup interval - self.num_microbatches_in_previous_step = get_num_microbatches() - is_first_microbatch = self.microbatch_count % get_num_microbatches() == 0 - - # Forward pass. - if self.recompute_granularity == 'full': - hidden_states = self._checkpointed_forward(hidden_states, - attention_mask, - encoder_output, - enc_dec_attn_mask, - rotary_pos_emb, - is_first_microbatch) - else: - forward_kwargs = { - 'encoder_output': encoder_output, - 'enc_dec_attn_mask': enc_dec_attn_mask, - 'inference_params': inference_params, - } - - if self.transformer_impl == 'transformer_engine': - forward_kwargs['is_first_microbatch'] = is_first_microbatch - forward_kwargs['checkpoint_core_attention'] = self.checkpoint_core_attention - if self.transformer_engine_v_0_10: - forward_kwargs['rotary_pos_emb'] = rotary_pos_emb - else: - forward_kwargs['rotary_pos_emb'] = rotary_pos_emb - forward_kwargs['retriever_input'] = retriever_input - forward_kwargs['retriever_output'] = retriever_output - forward_kwargs['retriever_attn_mask'] = retriever_attn_mask - - for index in range(self.num_layers): - layer = self._get_layer(index) - - hidden_states = layer( - hidden_states, - attention_mask, - **forward_kwargs) - - # First Retro decoder layer returns both hidden_states - # and retriever_output. Make retriever_output available - # to subsequence Retro layers. - if isinstance(hidden_states, tuple): - assert len(hidden_states) == 2 - hidden_states, retriever_output = hidden_states - forward_kwargs["retriever_output"] = retriever_output - - # Skip counter update for eval and activation checkpointing - if torch.is_grad_enabled() and self.training: - self.microbatch_count += 1 - - # Final layer norm. - if self.post_process and self.post_norm: - hidden_states = self.final_norm(hidden_states) - - return hidden_states - - def load_state_dict(self, state_dict, strict=True): - """Customize load.""" - - # Handle renaming layernorm -> norm in component names - state_dict_ = {} - for key in state_dict.keys(): - # Bypass TransformerEngine module parameters. - if "layernorm_qkv" in key or "layernorm_mlp" in key: - state_dict_[key] = state_dict[key] - continue - newkey = key.replace("layernorm", "norm") - state_dict_[newkey] = state_dict[key] - - super().load_state_dict(state_dict_, strict) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Transformer.""" +import math +import os +from contextlib import nullcontext +from typing import Optional + +import numpy as np +import torch +import torch.nn.functional as F + +from megatron import core +from megatron.core import mpu, tensor_parallel +from megatron.core.enums import ModelType +from megatron.legacy.model.enums import AttnMaskType, LayerType, AttnType +from megatron.legacy.model.fused_softmax import FusedScaleMaskSoftmax +from megatron.legacy.model.fused_bias_gelu import bias_gelu_impl +from megatron.core.models.common.embeddings import apply_rotary_pos_emb +from megatron.core.jit import jit_fuser +from megatron.core.num_microbatches_calculator import get_num_microbatches +from megatron.core.parallel_state import ( + get_expert_tensor_and_model_parallel_group, + get_tensor_model_parallel_group, +) +from megatron.core.tensor_parallel import ( + gather_from_sequence_parallel_region, + reduce_scatter_to_sequence_parallel_region, + get_cuda_rng_tracker, + get_data_parallel_rng_tracker_name, +) +from megatron.legacy.model.enums import AttnMaskType, AttnType, LayerType +from megatron.legacy.model.fused_bias_gelu import bias_gelu_impl +from megatron.legacy.model.fused_softmax import FusedScaleMaskSoftmax +from megatron.legacy.model.utils import ( + attention_mask_func, + erf_gelu, + get_norm, + openai_gelu, +) +from megatron.training import get_args, get_timers + +from .module import MegatronModule + +try: + from einops import rearrange +except ImportError: + rearrange = None + +try: + from flash_attn.flash_attn_interface import flash_attn_unpadded_func +except ImportError: + try: + from flash_attn.flash_attn_interface import ( + flash_attn_varlen_func as flash_attn_unpadded_func, + ) + except ImportError: + flash_attn_unpadded_func = None + +""" We use the following notation throughout this file: + h: hidden size + n: number of attention heads + p: number of model parallel partitions + np: n/p + hp: h/p + hn: h/n + b: batch size + s: sequence length + l: number of layers + Transformer takes input of size [s, b, h] and returns a + tensor of the same size. We use the following arguments: + hyperparameters: transformer hyperparameters +""" + +class DropPath(MegatronModule): + """Drop paths (Stochastic Depth) per sample + (when applied in main path of residual blocks). + """ + + def __init__(self, drop_prob=0.): + super(DropPath, self).__init__() + self.drop_prob = drop_prob + + def forward(self, hidden_state): + if self.drop_prob == 0. or not self.training: + return hidden_state + keep_prob = 1 - self.drop_prob + # work with diff dim tensors, not just 2D ConvNets + # hidden_state: [s, b, h] + shape = (1,) + (hidden_state.shape[1],) + (1,) * (hidden_state.ndim - 2) + random_tensor = keep_prob + \ + torch.rand(shape, dtype=hidden_state.dtype, device=hidden_state.device) + random_tensor.floor_() # binarize + output = hidden_state.div(keep_prob) * random_tensor + return output + +class ParallelMLP(MegatronModule): + """MLP. + + MLP will take the input with h hidden state, project it to 4*h + hidden dimension, perform nonlinear transformation, and project the + state back into h hidden dimension. + """ + + def __init__(self, config, is_expert=False): + super(ParallelMLP, self).__init__() + args = get_args() + + self.add_bias = config.add_bias_linear + + ffn_hidden_size = config.ffn_hidden_size + if config.gated_linear_unit: + ffn_hidden_size *= 2 + + # Project to 4h. If using swiglu double the output width, see https://arxiv.org/pdf/2002.05202.pdf + self.dense_h_to_4h = tensor_parallel.ColumnParallelLinear( + config.hidden_size, + ffn_hidden_size, + config=config, + init_method=config.init_method, + bias=self.add_bias, + gather_output=False, + skip_bias_add=True, + is_expert=is_expert, + ) + + self.bias_gelu_fusion = False + self.activation_func = None + self.swiglu = args.swiglu + + if args.openai_gelu: + self.activation_func = openai_gelu + elif args.onnx_safe: + self.activation_func = erf_gelu + elif args.swiglu: + def swiglu(x): + x = torch.chunk(x, 2, dim=-1) + return F.silu(x[0]) * x[1] + self.activation_func = swiglu + elif args.squared_relu: + def squared_relu(x): + return torch.pow(F.relu(x), 2) + self.activation_func = squared_relu + else: + self.bias_gelu_fusion = args.bias_gelu_fusion + self.activation_func = F.gelu + + # Project back to h. + self.dense_4h_to_h = tensor_parallel.RowParallelLinear( + config.ffn_hidden_size, + config.hidden_size, + config=config, + init_method=config.output_layer_init_method, + bias=self.add_bias, + skip_bias_add=True, + input_is_parallel=True, + is_expert=is_expert, + ) + + def forward(self, hidden_states): + + # [s, b, 4hp] + intermediate_parallel, bias_parallel = self.dense_h_to_4h(hidden_states) + + if self.bias_gelu_fusion: + assert self.add_bias is True + assert self.activation_func == F.gelu + intermediate_parallel = bias_gelu_impl(intermediate_parallel, bias_parallel) + else: + if bias_parallel is not None: + intermediate_parallel = intermediate_parallel + bias_parallel + intermediate_parallel = self.activation_func(intermediate_parallel) + + # [s, b, h] + output, output_bias = self.dense_4h_to_h(intermediate_parallel) + return output, output_bias + +def sinkhorn(cost, tol=0.0001): + cost = torch.exp(cost) + d0 = torch.ones(cost.size(0), device=cost.device, dtype=cost.dtype) + d1 = torch.ones(cost.size(1), device=cost.device, dtype=cost.dtype) + + eps = 0.00000001 + error = 1e9 + d1_old = d1 + while error > tol: + d0 = (1/d0.size(0))*1/(torch.sum(d1*cost,1) + eps) + d1 = (1/d1.size(0))*1/(torch.sum(d0.unsqueeze(1)*cost,0)+eps) + error = torch.mean(torch.abs(d1_old-d1)) + d1_old = d1 + return d1*cost*d0.unsqueeze(1) + + +def get_router_linear_layer(config): + args = get_args() + router = torch.nn.Linear(args.hidden_size, args.num_experts, bias=False) + with get_cuda_rng_tracker().fork(get_data_parallel_rng_tracker_name()): + config.init_method(router.weight) + setattr(router.weight, 'sequence_parallel',config.sequence_parallel) + return router + + +class SwitchMLP(MegatronModule): + """ + Routes input to one of N MLP "experts" + """ + def __init__(self, config): + super(SwitchMLP, self).__init__() + args = get_args() + self.router = get_router_linear_layer(config) + self.expert_parallel_size = mpu.get_expert_model_parallel_world_size() + self.sequence_parallel = config.sequence_parallel + self.add_bias = config.add_bias_linear + + assert args.num_experts % self.expert_parallel_size == 0 + self.num_local_experts = args.num_experts // self.expert_parallel_size + local_expert_indices_offset = mpu.get_expert_model_parallel_rank() * self.num_local_experts + self.local_expert_indices = [local_expert_indices_offset + i for i in range(self.num_local_experts)] + + self.local_experts = torch.nn.ModuleList() + for i in range(self.num_local_experts): + self.local_experts.append(ParallelMLP(config, is_expert=True)) + + self.tp_ep_group = get_expert_tensor_and_model_parallel_group() + + def gather_indices(self, local_indices): + """ Gather tensors and concatinate along the first dimension.""" + world_size = torch.distributed.get_world_size(group=self.tp_ep_group) + # Bypass the function if we are using only 1 GPU. + if world_size == 1: + return local_indices + + dim_size = list(local_indices.size()) + dim_size[0] = dim_size[0] * world_size + + # TODO pre allocate memory + output = torch.empty(dim_size, dtype=local_indices.dtype, + device=torch.cuda.current_device()) + torch.distributed._all_gather_base( + output, local_indices.contiguous(), group=self.tp_ep_group + ) + return output + + def forward(self, hidden_states): + # hidden_states: [b, s, h] + args = get_args() + s = hidden_states.size(0) + b = hidden_states.size(1) + h = hidden_states.size(2) + route = self.router(hidden_states).view(-1, args.num_experts) + + # TODO (rprenger) Right now we're just using the sinkhorn algorithm + # for load balancing. There should be an option to do no load balancing + # and the algorithm and parametets should be further tested + if self.training: + with torch.no_grad(): + sinkroute = sinkhorn(route.detach().to(dtype=torch.float32)) + _, max_ind = torch.max(sinkroute, dim=1) + route = torch.sigmoid(route) + max_prob = route[torch.arange(route.size(0)), max_ind] + else: + route = torch.sigmoid(route) + max_prob, max_ind = torch.max(route, dim=1) + + max_prob = torch.unsqueeze(max_prob, 1) + hidden_states = hidden_states.view(-1, hidden_states.size(2)) + + # TODO (rprenger) TODO this could be made easier to read + # Converting [s, b, h] to [s*b, h]. + # Each vector could be routed differently + if self.sequence_parallel or (self.expert_parallel_size > 1): + global_hidden_states = \ + gather_from_sequence_parallel_region(hidden_states, group=self.tp_ep_group) + global_indices = self.gather_indices(max_ind) + else: + global_hidden_states = hidden_states + global_indices = max_ind + + output_total = torch.zeros_like(global_hidden_states) + if self.add_bias: + output_bias_total = torch.zeros_like(global_hidden_states) + + for expert_num, expert in enumerate(self.local_experts): + local_expert_index = self.local_expert_indices[expert_num] + local_indices = (global_indices == local_expert_index).nonzero() + hidden = global_hidden_states[local_indices, :] + output, output_bias = expert(hidden) + output_total[local_indices, :] = output + if self.add_bias: + output_bias = output_bias.expand_as(output) + output_bias_total[local_indices, :] = output_bias + + if self.sequence_parallel or (self.expert_parallel_size > 1): + output_total = \ + reduce_scatter_to_sequence_parallel_region(output_total, group=self.tp_ep_group) + if self.add_bias: + output_bias_total = \ + reduce_scatter_to_sequence_parallel_region(output_bias_total, group=self.tp_ep_group) + + # bias is duplicated across tensor parallelism ranks; + # reduce scatter reduces bias across tensor parallel_ranks + output_bias_total = \ + output_bias_total/mpu.get_tensor_model_parallel_world_size() + + output_total = output_total*max_prob + output_total = output_total.view(s, b, h) + if self.add_bias: + output_bias_total = output_bias_total*max_prob + output_bias_total = output_bias_total.view(s, b, h) + else: + output_bias_total = None + + return output_total, output_bias_total + + +class CoreAttention(MegatronModule): + + def __init__(self, layer_number, config, + attn_mask_type=AttnMaskType.padding): + super(CoreAttention, self).__init__() + self.fp16 = config.fp16 + self.bf16 = config.bf16 + + self.apply_query_key_layer_scaling = config.apply_query_key_layer_scaling + self.attention_softmax_in_fp32 = config.attention_softmax_in_fp32 + if self.apply_query_key_layer_scaling: + self.attention_softmax_in_fp32 = True + self.layer_number = max(1, layer_number) + self.attn_mask_type = attn_mask_type + self.sequence_parallel = config.sequence_parallel + + projection_size = config.kv_channels * config.num_attention_heads + + # Per attention head and per partition values. + world_size = mpu.get_tensor_model_parallel_world_size() + self.hidden_size_per_partition = core.utils.divide(projection_size, + world_size) + self.hidden_size_per_attention_head = core.utils.divide( + projection_size, config.num_attention_heads) + self.num_attention_heads_per_partition = core.utils.divide( + config.num_attention_heads, world_size) + + coeff = None + self.norm_factor = math.sqrt(self.hidden_size_per_attention_head) + if self.apply_query_key_layer_scaling: + coeff = self.layer_number + self.norm_factor *= coeff + + self.scale_mask_softmax = FusedScaleMaskSoftmax( + self.fp16, self.bf16, + self.attn_mask_type, + config.masked_softmax_fusion, + attention_mask_func, + self.attention_softmax_in_fp32, + coeff) + + # Dropout. Note that for a single iteration, this layer will generate + # different outputs on different number of parallel partitions but + # on average it should not be partition dependent. + self.attention_dropout = torch.nn.Dropout(config.attention_dropout) + + def forward(self, query_layer, key_layer, + value_layer, attention_mask): + + # =================================== + # Raw attention scores. [b, np, s, s] + # =================================== + + # [b, np, sq, sk] + output_size = (query_layer.size(1), + query_layer.size(2), + query_layer.size(0), + key_layer.size(0)) + + # [sq, b, np, hn] -> [sq, b * np, hn] + query_layer = query_layer.reshape(output_size[2], + output_size[0] * output_size[1], -1) + # [sk, b, np, hn] -> [sk, b * np, hn] + key_layer = key_layer.view(output_size[3], + output_size[0] * output_size[1], -1) + + # preallocting input tensor: [b * np, sq, sk] + matmul_input_buffer = mpu.get_global_memory_buffer().get_tensor( + (output_size[0]*output_size[1], output_size[2], output_size[3]), + query_layer.dtype, "mpu") + + # Raw attention scores. [b * np, sq, sk] + matmul_result = torch.baddbmm( + matmul_input_buffer, + query_layer.transpose(0, 1), # [b * np, sq, hn] + key_layer.transpose(0, 1).transpose(1, 2), # [b * np, hn, sk] + beta=0.0, alpha=(1.0/self.norm_factor)) + + # change view to [b, np, sq, sk] + attention_scores = matmul_result.view(*output_size) + + # =========================== + # Attention probs and dropout + # =========================== + + # attention scores and attention mask [b, np, sq, sk] + attention_probs = self.scale_mask_softmax(attention_scores, + attention_mask) + + # This is actually dropping out entire tokens to attend to, which might + # seem a bit unusual, but is taken from the original Transformer paper. + if not self.sequence_parallel: + with tensor_parallel.get_cuda_rng_tracker().fork(): + attention_probs = self.attention_dropout(attention_probs) + else: + attention_probs = self.attention_dropout(attention_probs) + + # ========================= + # Context layer. [sq, b, hp] + # ========================= + + # value_layer -> context layer. + # [sk, b, np, hn] --> [b, np, sq, hn] + + # context layer shape: [b, np, sq, hn] + output_size = (value_layer.size(1), + value_layer.size(2), + query_layer.size(0), + value_layer.size(3)) + + # change view [sk, b * np, hn] + value_layer = value_layer.view(value_layer.size(0), + output_size[0] * output_size[1], -1) + + # change view [b * np, sq, sk] + attention_probs = attention_probs.view(output_size[0] * output_size[1], + output_size[2], -1) + + # matmul: [b * np, sq, hn] + context_layer = torch.bmm(attention_probs, value_layer.transpose(0, 1)) + + # change view [b, np, sq, hn] + context_layer = context_layer.view(*output_size) + + # [b, np, sq, hn] --> [sq, b, np, hn] + context_layer = context_layer.permute(2, 0, 1, 3).contiguous() + + # [sq, b, np, hn] --> [sq, b, hp] + new_context_layer_shape = context_layer.size()[:-2] + \ + (self.hidden_size_per_partition,) + context_layer = context_layer.view(*new_context_layer_shape) + + return context_layer + + +class FlashSelfAttention(torch.nn.Module): + """Implement the scaled dot product attention with softmax. + Arguments + --------- + softmax_scale: The temperature to use for the softmax attention. + (default: 1/sqrt(d_keys) where d_keys is computed at + runtime) + attention_dropout: The dropout rate to apply to the attention + (default: 0.0) + """ + def __init__(self, causal=False, softmax_scale=None, attention_dropout=0.0, + device=None, dtype=None): + super().__init__() + assert flash_attn_unpadded_func is not None, ('Please install FlashAttention first, ' + 'e.g., with pip install flash-attn') + assert rearrange is not None, 'Please install einops first, e.g., with pip install einops' + self.causal = causal + self.softmax_scale = softmax_scale + self.dropout_p = attention_dropout + + def forward(self, q, k, v): + """Implements the multihead softmax attention. + Arguments + --------- + q, k, v: The tensor containing the query, key, and value. (B, S, H, D) + """ + + assert all((i.dtype in [torch.float16, torch.bfloat16] for i in (q,k,v))) + assert all((i.is_cuda for i in (q,k,v))) + + batch_size, seqlen_q = q.shape[0], q.shape[1] + seqlen_k = k.shape[1] + + q, k, v = [rearrange(x, 'b s ... -> (b s) ...') for x in [q, k, v]] + cu_seqlens_q = torch.arange(0, (batch_size + 1) * seqlen_q, step=seqlen_q, dtype=torch.int32, + device=q.device) + + if self.training: + # during training q,k,v always have same seqlen + assert seqlen_k == seqlen_q + + is_causal = self.causal + cu_seqlens_k = cu_seqlens_q + dropout_p = self.dropout_p + else: + # turn off FA causal mask after first inference autoregressive iteration + # only on first autoregressive step q,k,v have same seqlen + is_causal = seqlen_q == seqlen_k + cu_seqlens_k = torch.arange(0, (batch_size + 1) * seqlen_k, step=seqlen_k, dtype=torch.int32, + device=q.device) + dropout_p = 0 + + output = flash_attn_unpadded_func( + q, k, v, cu_seqlens_q, cu_seqlens_k, seqlen_q, seqlen_k, + dropout_p, + softmax_scale=self.softmax_scale, causal=is_causal + ) + + output = rearrange(output, '(b s) ... -> b s ...', b=batch_size) + return output + + +class ParallelAttention(MegatronModule): + """Parallel self-attention layer abstract class. + + Self-attention layer takes input with size [s, b, h] + and returns output of the same size. + """ + + def __init__(self, config, layer_number, + attention_type=AttnType.self_attn, + attn_mask_type=AttnMaskType.padding): + super(ParallelAttention, self).__init__() + args = get_args() + self.layer_number = max(1, layer_number) + self.attention_type = attention_type + self.attn_mask_type = attn_mask_type + self.params_dtype = config.params_dtype + self.sequence_parallel = config.sequence_parallel + self.config = config + self.group_query_attention = args.group_query_attention + self.num_query_groups = args.num_query_groups + + query_projection_size = config.kv_channels * config.num_attention_heads + if self.group_query_attention: + kv_projection_size = args.kv_channels * args.num_query_groups + else: + kv_projection_size = args.kv_channels * args.num_attention_heads + + self.use_flash_attn = args.use_flash_attn \ + and attention_type == AttnType.self_attn \ + and self.attn_mask_type == AttnMaskType.causal + if self.use_flash_attn: + if flash_attn_unpadded_func is None: + raise ImportError('FlashAttention is not installed, please install with ' + 'pip install flash-attn') + assert attention_type == AttnType.self_attn, ('FlashAttention code path only supports ' + 'self-attention for now') + assert self.attn_mask_type == AttnMaskType.causal, ('FlashAttention code path only ' + 'supports causal mask for now') + if rearrange is None: + raise ImportError('einops is not installed, please install with pip install einops') + + # Per attention head and per partition values. + world_size = mpu.get_tensor_model_parallel_world_size() + self.hidden_size_per_attention_head = core.utils.divide( + query_projection_size, config.num_attention_heads) + self.num_attention_heads_per_partition = core.utils.divide( + config.num_attention_heads, world_size) + + if self.group_query_attention: + if args.num_query_groups % world_size != 0: + raise NotImplementedError('Currently the num_query_groups should be ' + 'a multiple of the tensor parallel size') + self.num_query_groups_per_partition = core.utils.divide( + args.num_query_groups, world_size) + else: + self.num_query_groups_per_partition = self.num_attention_heads_per_partition + + # Strided linear layer. + if attention_type == AttnType.self_attn: + self.query_key_value = tensor_parallel.ColumnParallelLinear( + config.hidden_size, + query_projection_size + 2 * kv_projection_size, + config=config, + init_method=config.init_method, + bias=args.add_bias_linear or args.add_qkv_bias, + gather_output=False) + else: + assert attention_type == AttnType.cross_attn + + if self.group_query_attention: + raise NotImplementedError("Grouped query attention not implemented for cross-attention.") + assert query_projection_size == kv_projection_size + + self.query = tensor_parallel.ColumnParallelLinear( + config.hidden_size, + query_projection_size, + config=config, + init_method=config.init_method, + bias=config.add_bias_linear, + gather_output=False) + + self.key_value = tensor_parallel.ColumnParallelLinear( + config.hidden_size, + 2 * kv_projection_size, + config=config, + init_method=config.init_method, + bias=config.add_bias_linear, + gather_output=False) + + self.core_attention = CoreAttention(self.layer_number, config, + self.attn_mask_type) + self.checkpoint_core_attention = config.recompute_granularity == 'selective' + + if self.use_flash_attn: + self.core_attention_flash = FlashSelfAttention( + causal=True, attention_dropout=config.attention_dropout + ) + + # Output. + self.dense = tensor_parallel.RowParallelLinear( + query_projection_size, + config.hidden_size, + config=config, + init_method=config.output_layer_init_method, + bias=args.add_bias_linear, + input_is_parallel=True, + skip_bias_add=True) + + def _checkpointed_attention_forward(self, query_layer, key_layer, + value_layer, attention_mask, + rotary_pos_emb=None): + """Forward method with activation checkpointing.""" + def custom_forward(*inputs): + query_layer = inputs[0] + key_layer = inputs[1] + value_layer = inputs[2] + attention_mask = inputs[3] + output_ = self.core_attention(query_layer, key_layer, + value_layer, attention_mask) + return output_ + + q_pos_emb, k_pos_emb = (None, None) if rotary_pos_emb is None \ + else rotary_pos_emb + + hidden_states = tensor_parallel.checkpoint( + custom_forward, + False, query_layer, key_layer, value_layer, attention_mask, + q_pos_emb, k_pos_emb) + + return hidden_states + + def _allocate_memory(self, inference_max_sequence_len, batch_size, num_attention_heads): + return torch.empty( + inference_max_sequence_len, + batch_size, + num_attention_heads, + self.hidden_size_per_attention_head, + dtype=self.params_dtype, + device=torch.cuda.current_device()) + + def forward(self, hidden_states, attention_mask, + encoder_output=None, inference_params=None, + rotary_pos_emb=None): + # hidden_states: [sq, b, h] + + # ================================================= + # Pre-allocate memory for key-values for inference. + # ================================================= + is_first_step = False + if inference_params: + if self.layer_number not in inference_params.key_value_memory_dict: + inf_max_seq_len = inference_params.max_sequence_length + inf_max_batch_size = inference_params.max_batch_size + inference_key_memory = self._allocate_memory( + inf_max_seq_len, inf_max_batch_size, + self.num_query_groups_per_partition) + inference_value_memory = self._allocate_memory( + inf_max_seq_len, inf_max_batch_size, + self.num_query_groups_per_partition) + + inference_params.key_value_memory_dict[self.layer_number] = ( + inference_key_memory, inference_value_memory) + is_first_step = True + else: + inference_key_memory, inference_value_memory = \ + inference_params.key_value_memory_dict[self.layer_number] + + # ===================== + # Query, Key, and Value + # ===================== + if self.attention_type == AttnType.self_attn: + + # Attention heads [sq, b, h] --> [sq, b, ng * (np/ng + 2) * hn)] + mixed_x_layer, _ = self.query_key_value(hidden_states) + + # [sq, b, hp] --> [sq, b, ng, (np/ng + 2) * hn] + new_tensor_shape = mixed_x_layer.size()[:-1] + ( + self.num_query_groups_per_partition, + ( + (self.num_attention_heads_per_partition // self.num_query_groups_per_partition + 2) + * self.hidden_size_per_attention_head + ), + ) + mixed_x_layer = mixed_x_layer.view(*new_tensor_shape) + + # [sq, b, ng, (np/ng + 2) * hn] --> [sq, b, ng, np/ng * hn], [sq, b, ng, hn], [sq, b, ng, hn] + (query_layer, + key_layer, + value_layer) = torch.split( + mixed_x_layer, + [ + ( + self.num_attention_heads_per_partition // self.num_query_groups_per_partition + * self.hidden_size_per_attention_head + ), + self.hidden_size_per_attention_head, + self.hidden_size_per_attention_head + ], + dim=3) + + # [sq, b, ng, np/ng * hn] -> [sq, b, np, hn] - + query_layer = query_layer.view(query_layer.size(0), query_layer.size(1), -1, self.hidden_size_per_attention_head) + else: + # Attention heads [sk, b, h] --> [sk, b, (np * 2 * hn)] + mixed_kv_layer, _ = self.key_value(encoder_output) + + # [sk, b, (np * 2 * hn)] --> [sk, b, np, 2 * hn] + new_tensor_shape = mixed_kv_layer.size()[:-1] + \ + (self.num_attention_heads_per_partition, + 2 * self.hidden_size_per_attention_head) + mixed_kv_layer = mixed_kv_layer.view(*new_tensor_shape) + + # [sk, b, np, 2 * hn] --> 2 [sk, b, np, hn] + (key_layer, + value_layer) = tensor_parallel.split_tensor_along_last_dim(mixed_kv_layer, 2) + + # Attention head [sq, b, h] --> [sq, b, hp] + query_layer, _ = self.query(hidden_states) + # [sq, b, hp] --> [sq, b, np, hn] + new_tensor_shape = query_layer.size()[:-1] + \ + (self.num_attention_heads_per_partition, + self.hidden_size_per_attention_head) + query_layer = query_layer.view(*new_tensor_shape) + + # ================================== + # Adjust key and value for inference + # ================================== + + # duplicate the pos_emb for self attention + if rotary_pos_emb is not None: + if isinstance(rotary_pos_emb, tuple): + rotary_pos_emb = rotary_pos_emb + else: + rotary_pos_emb = ((rotary_pos_emb,) * 2) + + if inference_params: + batch_start = inference_params.batch_size_offset + batch_end = batch_start + key_layer.size(1) + assert batch_end <= inference_key_memory.size(1) + sequence_start = inference_params.sequence_len_offset + sequence_end = sequence_start + key_layer.size(0) + assert sequence_end <= inference_key_memory.size(0), ("Current sequence length is " + "longer than expected maximum sequence length! Increase inference_max_seq_length.") + # Copy key and values. + inference_key_memory[sequence_start:sequence_end, + batch_start:batch_end, ...] = key_layer + inference_value_memory[sequence_start:sequence_end, + batch_start:batch_end, ...] = value_layer + key_layer = inference_key_memory[ + :sequence_end, batch_start:batch_end, ...] + value_layer = inference_value_memory[ + :sequence_end, batch_start:batch_end, ...] + + + # adjust the key rotary positional embedding + if rotary_pos_emb is not None: + q_pos_emb, k_pos_emb = rotary_pos_emb + # need to cross check this condition during inference + # if not set_inference_key_value_memory: + if not is_first_step: + # In inference, we compute one token at a time. + # Select the correct positional embedding + # (only the last token in the sequence) + q_pos_emb = q_pos_emb[sequence_end - 1 : sequence_end] + else: + # In the first forward pass of inference, + # we use the entire provided prefix. + # q_pos_emb here has the rope embeddings of the entire + # prefix + to-be-generated output so + # we slice to just the prefix. + q_pos_emb = q_pos_emb[:sequence_end, :, :, :] + k_pos_emb = k_pos_emb[:sequence_end, :, :, :] + rotary_pos_emb = (q_pos_emb, k_pos_emb) + + # ================================== + # core attention computation + # ================================== + + # expand the key_layer and value_layer [sk, b, ng, hn] -> [sk, b, np, hn] + if self.num_attention_heads_per_partition // self.num_query_groups_per_partition > 1: + key_layer = key_layer.repeat_interleave( + self.num_attention_heads_per_partition // self.num_query_groups_per_partition, + dim = 2 + ) + value_layer = value_layer.repeat_interleave( + self.num_attention_heads_per_partition // self.num_query_groups_per_partition, + dim = 2 + ) + + # apply relative positional encoding (rotary embedding) + if rotary_pos_emb is not None: + q_pos_emb, k_pos_emb = rotary_pos_emb + query_layer = apply_rotary_pos_emb(query_layer, q_pos_emb,self.config) + key_layer = apply_rotary_pos_emb(key_layer, k_pos_emb,self.config) + # TODO, can apply positional embedding to value_layer so it has + # absolute positional embedding. + # otherwise, only relative positional embedding takes effect + # value_layer = apply_rotary_pos_emb(value_layer, k_pos_emb) + + if not self.use_flash_attn: + if self.checkpoint_core_attention: + context_layer = self._checkpointed_attention_forward( + query_layer, key_layer, value_layer, attention_mask) + else: + context_layer = self.core_attention( + query_layer, key_layer, value_layer, attention_mask) + else: + q, k, v = [rearrange(x, 's b ... -> b s ...').contiguous() + for x in (query_layer, key_layer, value_layer)] + if not self.sequence_parallel: + with tensor_parallel.get_cuda_rng_tracker().fork(): + context_layer = self.core_attention_flash(q, k, v) + else: + context_layer = self.core_attention_flash(q, k, v) + context_layer = rearrange(context_layer, 'b s h d -> s b (h d)').contiguous() + + # ================= + # Output. [sq, b, h] + # ================= + + output, bias = self.dense(context_layer) + + return output, bias + + +def bias_dropout_add(x, bias, residual, prob, training): + # type: (Tensor, Optional[Tensor], Tensor, float, bool) -> Tensor + if bias is not None: + x = x + bias + out = torch.nn.functional.dropout(x, p=prob, training=training) + out = residual + out + return out + + +def get_bias_dropout_add(training): + def _bias_dropout_add(x, bias, residual, prob): + return bias_dropout_add(x, bias, residual, prob, training) + return _bias_dropout_add + + +@jit_fuser +def bias_dropout_add_fused_train(x: torch.Tensor, + bias: Optional[torch.Tensor], + residual: torch.Tensor, + prob: float) -> torch.Tensor: + return bias_dropout_add(x, bias, residual, prob, True) + + +@jit_fuser +def bias_dropout_add_fused_inference(x: torch.Tensor, + bias: Optional[torch.Tensor], + residual: torch.Tensor, + prob: float) -> torch.Tensor: + return bias_dropout_add(x, bias, residual, prob, False) + + +class ParallelTransformerLayer(MegatronModule): + """A single transformer layer. + + Transformer layer takes input with size [s, b, h] and returns an + output of the same size. + """ + + def __init__(self, config, + layer_number, layer_type=LayerType.encoder, + self_attn_mask_type=AttnMaskType.padding, + drop_path_rate=0.): + args = get_args() + + super(ParallelTransformerLayer, self).__init__() + self.layer_number = layer_number + self.layer_type = layer_type + + self.apply_residual_connection_post_norm \ + = config.apply_residual_connection_post_layernorm + + self.bf16 = config.bf16 + self.fp32_residual_connection = config.fp32_residual_connection + + # Normalize the input data. + self.input_norm = get_norm(config) + + # Self attention. + self.self_attention = ParallelAttention( + config, + layer_number, + attention_type=AttnType.self_attn, + attn_mask_type=self_attn_mask_type) + self.hidden_dropout = config.hidden_dropout + self.bias_dropout_fusion = config.bias_dropout_fusion + self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0.0 else None + + # Normalize the attention output + self.post_attention_norm = get_norm(config) + + # Cross attention. + if self.layer_type in (LayerType.decoder, + LayerType.retro_decoder, + LayerType.retro_decoder_with_retriever, + LayerType.retro_encoder): + self.inter_attention = ParallelAttention( + config, + layer_number, + attention_type=AttnType.cross_attn) + # Normalize the attention output. + self.post_inter_attention_norm = get_norm(config) + + # MLP + if args.num_experts is not None: + self.mlp = SwitchMLP(config) + else: + self.mlp = ParallelMLP(config) + + # Set bias+dropout+add fusion grad_enable execution handler. + TORCH_MAJOR = int(torch.__version__.split('.')[0]) + TORCH_MINOR = int(torch.__version__.split('.')[1]) + use_nvfuser = TORCH_MAJOR > 1 or (TORCH_MAJOR == 1 and TORCH_MINOR >= 10) + self.bias_dropout_add_exec_handler = \ + nullcontext if use_nvfuser else torch.enable_grad + + if args.retro_add_retriever: + self.retro_num_neighbors = args.retro_num_neighbors + self.retro_chunk_length = args.retro_chunk_length + self.retro_retrieved_length = \ + args.retro_num_retrieved_chunks * args.retro_chunk_length + + # Retriever (bi-directional transformer with cross attention) + if layer_type == LayerType.retro_decoder_with_retriever: + self.retriever = ParallelTransformer( + config=config, + model_type=ModelType.retro_encoder, + self_attn_mask_type=AttnMaskType.padding, + pre_process=True, + post_process=False, + ) + self._retriever_key = 'retriever' + else: + self.retriever = None + + def default_decoder_cross_attention(self, + encoder_output, + enc_dec_attn_mask, + norm_input, + norm_output, + bias_dropout_add_func): + '''Cross attention for a standard encoder-decoder model.''' + + # Attention. + attention_output, attention_bias = \ + self.inter_attention(norm_output, + enc_dec_attn_mask, + encoder_output=encoder_output) + + # Residual connection. + if self.apply_residual_connection_post_norm: + residual = norm_output + else: + residual = norm_input + + if attention_bias is not None: + attention_bias = attention_bias.expand_as(residual) + + # Bias-dropout-add. + with self.bias_dropout_add_exec_handler(): + norm_input = bias_dropout_add_func( + attention_output, + attention_bias, + residual, + self.hidden_dropout) + + # Normalize. + norm_output = self.post_inter_attention_norm(norm_input) + + return norm_input, norm_output + + def retro_encoder_cross_attention(self, + retriever_output, + norm_input, + norm_output, + bias_dropout_add_func): + """Cross attention for Retro encoder. + + Notation: + ns : Sequence length. + bs : Batch size. + d : Hidden size. + l : Number of chunks per sample (i.e., seq_length/chunk_length). + k : Number of neighbors. + r : Number of retrieved tokens (neighbors + continuation). + """ + + ns, bs, d = norm_output.shape # [r, bs * l * k, d] + + # Divide sequence dimension into chunks. + chunked_outputs = norm_output.reshape(self.retro_retrieved_length, + -1, + self.retro_num_neighbors, + d) + chunked_outputs_before_norm = \ + norm_input.reshape(self.retro_retrieved_length, -1, + self.retro_num_neighbors, d) # [r, bs*l, k, d] + + # Per-chunk attention. + norm_inputs = [] + norm_outputs = [] + for k in range(self.retro_num_neighbors): + + # Attention. + chunked_output = chunked_outputs[:,:,k].contiguous() + attention_output, attention_bias = \ + self.inter_attention( + chunked_output, # Q (neighbor embedding) + None, + encoder_output=retriever_output) # K, V (hidden act) + + # Residual connection. + if self.apply_residual_connection_post_norm: + residual = chunked_output + else: + residual = chunked_outputs_before_norm[:,:,k] + + # Re-enable torch grad to enable fused optimization. + with torch.enable_grad(): + norm_input = bias_dropout_add_func( + attention_output, + None if attention_bias is None else attention_bias.expand_as(residual), + residual, + self.hidden_dropout) + norm_inputs.append(norm_input) + + # Layer norm. + norm_output = self.post_inter_attention_norm(norm_input) + norm_outputs.append(norm_output) + + # Concatenate layer norms. + # norm_input : [r, k * bs * l, d] + # norm_output : [r, k * bs * l, d] + norm_input = torch.stack(norm_inputs, dim=1).reshape(ns, bs, d) + norm_output = torch.stack(norm_outputs, dim=1).reshape(ns, bs, d) + + return norm_input, norm_output + + def retro_decoder_cross_attention(self, + retriever_input, + retriever_output, + retriever_attn_mask, + norm_input, + norm_output, + inference_params, + bias_dropout_add_func): + """Cross attention for Retro decoder. + + Notation: + ns : Sequence length. + bs : Batch size. + d : Hidden size. + l : Number of chunks per sample (i.e., seq_length/chunk_length). + m : Number of tokens per chunk. + k : Number of neighbors. + r : Number of retrieved tokens (neighbors + continuation). + """ + + ns, bs, d = norm_output.shape + l = int(np.ceil(ns / self.retro_chunk_length)) + + # Retrieve neighbors. + if self.layer_type == LayerType.retro_decoder_with_retriever: + first_ns = ns % self.retro_chunk_length + if first_ns > 0: + first_chunk, rest_chunk = \ + norm_output[:first_ns], norm_output[first_ns:] + first_chunk = torch.nn.functional.pad( + first_chunk, + (0, 0, 0, 0, 0, self.retro_chunk_length - first_ns), + 'constant', + 0) + chunked_output = \ + torch.cat((first_chunk, rest_chunk), dim=0) # [l * m, bs, d] + else: + chunked_output = norm_output # [l * m, bs, d] + chunked_output = chunked_output \ + .reshape(l, self.retro_chunk_length, bs, d) \ + .permute(1, 2, 0, 3) \ + .reshape(self.retro_chunk_length, bs * l, d) \ + .contiguous() + + # Get Encoder Output + retriever_output = self.retriever( + hidden_states=retriever_input, + attention_mask=retriever_attn_mask, + retriever_output=chunked_output, + retriever_attn_mask=retriever_attn_mask, + inference_params=inference_params) # [r, k * bs * l , d] + retriever_output = retriever_output.reshape( + self.retro_retrieved_length * self.retro_num_neighbors, bs * l, d) # [r * k, bs * l, d] + + # Chunks. + pad = (ns - 1) % self.retro_chunk_length + attending_chunks = norm_output[pad:] + padded_chunks = torch.nn.functional.pad( + attending_chunks, + (0, 0, 0, 0, 0, self.retro_chunk_length - 1), + 'constant', 0) + padded_chunked_output = padded_chunks \ + .reshape(l, self.retro_chunk_length, bs, d) \ + .permute(1, 2, 0, 3) + padded_chunked_output = padded_chunked_output.reshape( + self.retro_chunk_length, bs * l, d).contiguous() + + # Encoder output. + attention_output, attention_bias = \ + self.inter_attention(padded_chunked_output, + None, + encoder_output=retriever_output) + + # Residual connection. + if self.apply_residual_connection_post_norm: + residual = norm_output + else: + residual = norm_input + + # Re-enable torch grad to enable fused optimization. + with torch.enable_grad(): + norm_input = bias_dropout_add_func( + attention_output, + None if attention_bias is None else attention_bias.expand_as(attention_output), + torch.zeros_like(attention_output), + self.hidden_dropout) + norm_input = norm_input \ + .reshape(self.retro_chunk_length, bs, l, d) \ + .permute(2, 0, 1, 3) # [l, m, bs, d] + norm_input = norm_input.reshape(self.retro_chunk_length * l, bs, d) + norm_input = torch.nn.functional.pad( + norm_input, + (0, 0, 0, 0, pad, 0), + 'constant', 0)[:ns] # [ns, b, d] + # TODO: better redesign with inference param + args = get_args() + norm_input = args.retro_attention_gate * norm_input + residual + + # Layer norm post the decoder attention + norm_output = self.post_inter_attention_norm(norm_input) + + return retriever_output, norm_input, norm_output + + def forward(self, hidden_states, attention_mask, + encoder_output=None, enc_dec_attn_mask=None, + retriever_input=None, + retriever_output=None, + retriever_attn_mask=None, + inference_params=None, + rotary_pos_emb=None): + + # Update the params in case the retro param changes during inference + # TODO: better redesign with inference param + args = get_args() + if args.retro_add_retriever: + self.retro_num_neighbors = args.retro_num_neighbors + self.retro_chunk_length = args.retro_chunk_length + self.retro_retrieved_length = \ + args.retro_num_retrieved_chunks * args.retro_chunk_length + + # hidden_states: [s, b, h] + + # Layer norm at the beginning of the transformer layer. + norm_output = self.input_norm(hidden_states) + + # Self attention. + attention_output, attention_bias = \ + self.self_attention( + norm_output, + attention_mask, + inference_params=inference_params, + rotary_pos_emb=rotary_pos_emb) + + # Residual connection. + if self.apply_residual_connection_post_norm: + residual = norm_output + else: + residual = hidden_states + + if self.drop_path is None: + # jit scripting for a nn.module (with dropout) is not + # trigerring the fusion kernel. For now, we use two + # different nn.functional routines to account for varying + # dropout semantics during training and inference phases. + if self.bias_dropout_fusion: + if self.training: + bias_dropout_add_func = bias_dropout_add_fused_train + else: + bias_dropout_add_func = bias_dropout_add_fused_inference + else: + bias_dropout_add_func = get_bias_dropout_add(self.training) + + if attention_bias is not None: + attention_bias = attention_bias.expand_as(residual) + with self.bias_dropout_add_exec_handler(): + norm_input = bias_dropout_add_func( + attention_output, + attention_bias, + residual, + self.hidden_dropout) + else: + out = torch.nn.functional.dropout(attention_output + attention_bias, + p=self.hidden_dropout, + training=self.training) + norm_input = residual + self.drop_path(out) + + # Layer norm post the self attention. + norm_output = self.post_attention_norm(norm_input) + + # Cross attention. + if self.layer_type == LayerType.encoder: + pass + elif self.layer_type == LayerType.decoder: + norm_input, norm_output = \ + self.default_decoder_cross_attention( + encoder_output, + enc_dec_attn_mask, + norm_input, + norm_output, + bias_dropout_add_func) + elif self.layer_type == LayerType.retro_encoder: + norm_input, norm_output = \ + self.retro_encoder_cross_attention( + retriever_output, + norm_input, + norm_output, + bias_dropout_add_func) + elif self.layer_type in (LayerType.retro_decoder, + LayerType.retro_decoder_with_retriever): + retriever_output, norm_input, norm_output = \ + self.retro_decoder_cross_attention( + retriever_input, + retriever_output, + retriever_attn_mask, + norm_input, + norm_output, + inference_params, + bias_dropout_add_func) + else: + raise Exception("Unsupported layer type, '%s'." % + self.layer_type.name) + + # MLP. + mlp_output, mlp_bias = self.mlp(norm_output) + + # Second residual connection. + if self.apply_residual_connection_post_norm: + residual = norm_output + else: + residual = norm_input + + if self.drop_path is None: + if mlp_bias is not None: + mlp_bias = mlp_bias.expand_as(residual) + with self.bias_dropout_add_exec_handler(): + output = bias_dropout_add_func( + mlp_output, + mlp_bias, + residual, + self.hidden_dropout) + + # Jit compiled function creates 'view' tensor. This tensor + # potentially gets saved in the MPU checkpoint function context, + # which rejects view tensors. While making a viewless tensor here + # won't result in memory savings (like the data loader, or + # p2p_communication), it serves to document the origin of this + # 'view' tensor. + output = core.utils.make_viewless_tensor(inp = output, + requires_grad = output.requires_grad, + keep_graph = True) + + else: + if mlp_bias is not None: + mlp_output = mlp_output + mlp_bias + out = torch.nn.functional.dropout(mlp_output, + p=self.hidden_dropout, + training=self.training) + output = residual + self.drop_path(out) + + if self.layer_type == LayerType.retro_decoder_with_retriever: + return output, retriever_output + else: + return output + + +class NoopTransformerLayer(MegatronModule): + """A single 'no-op' transformer layer. + + The sole purpose of this layer is for when a standalone embedding layer + is used (i.e., args.account_for_embedding_in_pipeline_split == True). In this case, + zero transformer layers are assigned when pipeline rank == 0. Additionally, + when virtual pipeline rank >= 1, zero total model parameters are created + (virtual rank 0 contains the input embedding). This results in the model's + input and output tensors being the same, which causes an error when + performing certain memory optimiations on the output tensor (e.g., + deallocating it). Thus, this layer disconnects the input from the output + via a clone. Since ranks containing a no-op layer are generally under- + utilized (both compute and memory), there's no worry of any performance + degredation. + """ + + def __init__(self, layer_number): + super().__init__() + self.layer_number = layer_number + + def forward(self, hidden_states, attention_mask, + encoder_output=None, enc_dec_attn_mask=None, + inference_params=None): + return hidden_states.clone() + + +def _get_num_layers(args, model_type, is_decoder=False): + """Compute the number of transformer layers resident on the current rank.""" + is_encoder_and_decoder_model = (model_type == ModelType.encoder_and_decoder) + if model_type == ModelType.retro_encoder: + num_layers = args.retro_encoder_layers + elif mpu.get_pipeline_model_parallel_world_size() > 1: + assert not is_encoder_and_decoder_model, "This is no longer supported." + assert args.num_layers == args.encoder_num_layers + assert args.num_layers % args.transformer_pipeline_model_parallel_size == 0, \ + 'num_layers must be divisible by transformer_pipeline_model_parallel_size' + + # When a standalone embedding stage is used, all transformer layers + # are divided among pipeline rank >= 1, while on pipeline rank 0, + # ranks either contain the input embedding layer (virtual pp rank 0), + # or no layers at all (virtual pp rank >= 1). + num_layers = ( + 0 + if args.account_for_embedding_in_pipeline_split + and mpu.get_pipeline_model_parallel_rank() == 0 else + args.num_layers // args.transformer_pipeline_model_parallel_size + ) + else: + if not is_decoder: + num_layers = args.encoder_num_layers + else: + num_layers = args.decoder_num_layers + return num_layers + + +def _get_layer_type(model_type, default_layer_type, retro_layer_numbers, + layer_number): + args = get_args() + if args.retro_add_retriever and layer_number in retro_layer_numbers: + if model_type == ModelType.retro_decoder: + return LayerType.retro_decoder_with_retriever \ + if layer_number == retro_layer_numbers[0] \ + else LayerType.retro_decoder + elif model_type == ModelType.retro_encoder: + return LayerType.retro_encoder + else: + raise Exception("Unsupported model type, '%s'." % model_type) + else: + return default_layer_type + + +class ParallelTransformer(MegatronModule): + """Transformer class.""" + + def __init__(self, config, + model_type, layer_type=LayerType.encoder, + self_attn_mask_type=AttnMaskType.padding, + post_norm=True, + pre_process=True, + post_process=True, + drop_path_rate=0.0): + super(ParallelTransformer, self).__init__() + args = get_args() + + self.layer_type = layer_type + self.model_type = model_type + self.bf16 = config.bf16 + self.fp32_residual_connection = config.fp32_residual_connection + self.post_norm = post_norm + self.pre_process = pre_process + self.post_process = post_process + self.input_tensor = None + self.drop_path_rate = drop_path_rate + self.transformer_impl = args.transformer_impl + self.retro_add_retriever = args.retro_add_retriever + + # Store activation checkpoiting flag. + self.recompute_granularity = config.recompute_granularity + self.recompute_method = config.recompute_method + self.recompute_num_layers = config.recompute_num_layers + self.distribute_saved_activations = \ + config.distribute_saved_activations and not config.sequence_parallel + + self.sequence_parallel = config.sequence_parallel + + # Transformer Engine Init. + self.transformer_engine_v_0_10 = False + self.transformer_engine_v_0_11 = False + self.transformer_engine_v_0_8 = False + if self.transformer_impl == 'transformer_engine': + global transformer_engine + import transformer_engine + + if core.utils.is_te_min_version("0.8.0"): + self.transformer_engine_v_0_8 = True + if core.utils.is_te_min_version("0.10.0"): + self.transformer_engine_v_0_10 = True + if core.utils.is_te_min_version("0.11.0"): + self.transformer_engine_v_0_11 = True + + assert not args.squared_relu, ("TransformerEngine does not support squared " + "relu activation.") + + self.use_fp8 = args.fp8 is not None + self.fp8_recipe = None + self.fp8_group = None + if self.use_fp8: + assert args.transformer_impl == 'transformer_engine', \ + 'transformer-engine required for fp8 training and inference' + self.fp8_group = mpu.get_amax_reduction_group(tp_only_amax_red=config.tp_only_amax_red) + if args.fp8 == "e4m3": + fp8_format = transformer_engine.common.recipe.Format.E4M3 + elif args.fp8 == "hybrid": + fp8_format = transformer_engine.common.recipe.Format.HYBRID + else: + raise ValueError("The DelayedScaling recipe only supports E4M3 and HYBRID formats.") + self.fp8_recipe = transformer_engine.common.recipe.DelayedScaling( + margin=args.fp8_margin, + interval=args.fp8_interval, + fp8_format=fp8_format, + amax_history_len=args.fp8_amax_history_len, + amax_compute_algo=args.fp8_amax_compute_algo, + override_linear_precision=(False, False, not args.fp8_wgrad), + ) + + self.num_microbatches_in_previous_step = -1 + self.microbatch_count = 0 + self.checkpoint_core_attention = config.recompute_granularity == 'selective' + + # Number of layers. + self.num_layers = _get_num_layers(args, model_type, + layer_type==LayerType.decoder) + + self.drop_path_rates = [ + rate.item() for rate in + torch.linspace(0, self.drop_path_rate, config.num_layers)] + + self.retro_layer_numbers = None + if model_type == ModelType.retro_decoder: + retro_layer_start = 6 if config.num_layers <= 15 else 9 + self.retro_layer_numbers = \ + np.arange(retro_layer_start, args.num_layers + 1, 3).tolist() + if model_type == ModelType.retro_encoder: + self.retro_layer_numbers = [1] + + # Transformer layers. + if args.retro_add_retriever: + assert self.recompute_granularity != 'full', \ + "Full recompute not supported for Retro." + assert args.transformer_impl == 'local', \ + "Transformer engine does not support Retro layers." + def build_layer(layer_number): + if args.transformer_impl == 'local': + current_layer_type = _get_layer_type( + model_type, layer_type, self.retro_layer_numbers, + layer_number) + return ParallelTransformerLayer( + config, + layer_number, + layer_type=current_layer_type, + self_attn_mask_type=self_attn_mask_type, + drop_path_rate=self.drop_path_rates[layer_number - 1]) + else: + # This argument is only available from TE v0.10 onwards. + extra_transformer_engine_kwargs = {} + if self.transformer_engine_v_0_8: + extra_transformer_engine_kwargs["bias"] = args.add_bias_linear + if self.transformer_engine_v_0_10: + extra_transformer_engine_kwargs["activation"] = "swiglu" if args.swiglu else "gelu" + if self.transformer_engine_v_0_11: + extra_transformer_engine_kwargs["normalization"] = args.normalization + assert config.attention_softmax_in_fp32, "TransformerEngine only supports softmax compute in FP32." + assert ( + (bool(int(os.getenv("NVTE_APPLY_QK_LAYER_SCALING", "0"))) and args.fp16) == config.apply_query_key_layer_scaling + ), ("Unsupported config for apply_query_key_layer_scaling in TransformerEngine. If --apply-query-key-layer-scaling is " + "provided, set env-var NVTE_APPLY_QK_LAYER_SCALING=1 and you must be using fp16.") + return transformer_engine.pytorch.TransformerLayer( + config.hidden_size, + config.ffn_hidden_size, + config.num_attention_heads, + layernorm_epsilon=config.layernorm_epsilon, + hidden_dropout=config.hidden_dropout, + attention_dropout=config.attention_dropout, + init_method=config.init_method, + output_layer_init_method=config.output_layer_init_method, + layer_number=layer_number, + kv_channels=config.kv_channels, + self_attn_mask_type=self_attn_mask_type.name, + tp_group=mpu.get_tensor_model_parallel_group() if mpu.is_initialized() else None, + tp_size=mpu.get_tensor_model_parallel_world_size(), + get_rng_state_tracker=get_cuda_rng_tracker + if get_cuda_rng_tracker().is_initialized() + else None, + fuse_wgrad_accumulation=config.gradient_accumulation_fusion, + seq_length=args.seq_length, + micro_batch_size=args.micro_batch_size, + sequence_parallel=config.sequence_parallel, + params_dtype=config.params_dtype, + apply_residual_connection_post_layernorm=config.apply_residual_connection_post_layernorm, + output_layernorm=False, + layer_type="encoder", + drop_path_rate=self.drop_path_rates[layer_number - 1], + set_parallel_mode=True, + fuse_qkv_params=True, + **extra_transformer_engine_kwargs) + + if config.virtual_pipeline_model_parallel_size is not None: + assert config.num_layers % config.virtual_pipeline_model_parallel_size == 0, \ + 'num_layers_per_stage must be divisible by ' \ + 'virtual_pipeline_model_parallel_size' + assert args.model_type != ModelType.encoder_and_decoder + # Number of layers in each model chunk is the number of layers in the stage, + # divided by the number of model chunks in a stage. + self.num_layers = self.num_layers // config.virtual_pipeline_model_parallel_size + # With 8 layers, 2 stages, and 4 model chunks, we want an assignment of + # layers to stages like (each list is a model chunk): + # Stage 0: [0] [2] [4] [6] + # Stage 1: [1] [3] [5] [7] + # With 8 layers, 2 stages, and 2 virtual stages, we want an assignment of + # layers to stages like (each list is a model chunk): + # Stage 0: [0, 1] [4, 5] + # Stage 1: [2, 3] [6, 7] + offset = mpu.get_virtual_pipeline_model_parallel_rank() * ( + config.num_layers // config.virtual_pipeline_model_parallel_size) + \ + (mpu.get_pipeline_model_parallel_rank() * self.num_layers) + else: + # Each stage gets a contiguous set of layers. + if args.model_type == ModelType.encoder_and_decoder and \ + mpu.get_pipeline_model_parallel_world_size() > 1: + pipeline_rank = mpu.get_pipeline_model_parallel_rank() + if layer_type == LayerType.encoder: + offset = pipeline_rank * self.num_layers + else: + num_ranks_in_enc = args.pipeline_model_parallel_split_rank + offset = (pipeline_rank - num_ranks_in_enc) * self.num_layers + else: + offset = mpu.get_pipeline_model_parallel_rank() * self.num_layers + + if self.num_layers == 0: + # When a standalone embedding stage is used (e.g., + # args.account_for_embedding_in_pipeline_split == True), virtual pipeline ranks + # on pipeline rank 0 will have zero transformer layers assigned to + # them. This results in the model's input and output tensors to be + # the same, which will cause failure for certain output tensor + # optimizations (e.g., pipeline output deallocation). To remedy + # this, we assign a 'no-op' layer on these ranks, which will + # disconnect the input tensor from the output tensor. + self.num_layers = 1 + self.layers = torch.nn.ModuleList([ NoopTransformerLayer(1) ]) + else: + self.layers = torch.nn.ModuleList( + [build_layer(i + 1 + offset) for i in range(self.num_layers)]) + + # Update dropout rate for Retro encoder. + if model_type == ModelType.retro_encoder: + for layer in self.layers: + if layer.self_attention.use_flash_attn: + layer.self_attention.core_attention_flash.dropout_p = \ + torch.nn.Dropout(args.retro_encoder_attention_dropout) + else: + layer.self_attention.core_attention.attention_dropout.p =\ + args.retro_encoder_attention_dropout + layer.hidden_dropout = args.retro_encoder_hidden_dropout + + if self.post_process and self.post_norm: + # Final layer norm before output. + self.final_norm = get_norm(config) + + def _get_layer(self, layer_number): + return self.layers[layer_number] + + def _checkpointed_forward(self, hidden_states, attention_mask, + encoder_output, enc_dec_attn_mask, + rotary_pos_emb, is_first_microbatch): + """Forward method with activation checkpointing.""" + def custom(start, end): + def custom_forward(*args, **kwargs): + x_, *args = args + for index in range(start, end): + layer = self._get_layer(index) + x_ = layer(x_, *args, **kwargs) + return x_ + return custom_forward + + te_forward_kwargs = {} + if self.transformer_impl == 'transformer_engine': + te_forward_kwargs['is_first_microbatch'] = is_first_microbatch + if self.transformer_engine_v_0_10: + te_forward_kwargs['rotary_pos_emb'] = rotary_pos_emb + + if self.recompute_method == 'uniform': + # Uniformly divide the total number of Transformer layers and + # checkpoint the input activation of each divided chunk. + # A method to further reduce memory usage reducing checkpoints. + l = 0 + while l < self.num_layers: + if self.transformer_impl == 'transformer_engine': + hidden_states = transformer_engine.pytorch.checkpoint( + custom(l, l + self.recompute_num_layers), + self.distribute_saved_activations, + tensor_parallel.get_cuda_rng_tracker, + mpu.get_tensor_model_parallel_group(), + hidden_states, attention_mask, encoder_output, + enc_dec_attn_mask, **te_forward_kwargs) + else: + hidden_states = tensor_parallel.checkpoint( + custom(l, l + self.recompute_num_layers), + self.distribute_saved_activations, + hidden_states, attention_mask, + encoder_output, enc_dec_attn_mask, + None, None, None, None, rotary_pos_emb) + + l += self.recompute_num_layers + + elif self.recompute_method == 'block': + # Checkpoint the input activation of only a set number of individual + # Transformer layers and skip the rest. + # A method fully use the device memory removing redundant re-computation. + for l in range(self.num_layers): + if l < self.recompute_num_layers: + if self.transformer_impl == 'transformer_engine': + hidden_states = transformer_engine.pytorch.checkpoint( + custom(l, l + 1), + self.distribute_saved_activations, + tensor_parallel.get_cuda_rng_tracker, + mpu.get_tensor_model_parallel_group(), + hidden_states, attention_mask, encoder_output, + enc_dec_attn_mask, **te_forward_kwargs) + else: + hidden_states = tensor_parallel.checkpoint( + custom(l, l + 1), + self.distribute_saved_activations, + hidden_states, attention_mask, + encoder_output, enc_dec_attn_mask, + None, None, None, None, rotary_pos_emb) + else: + if self.transformer_impl == 'transformer_engine': + hidden_states = custom(l, l + 1)( + hidden_states, attention_mask, encoder_output, + enc_dec_attn_mask, **te_forward_kwargs) + else: + hidden_states = custom(l, l + 1)( + hidden_states, attention_mask, + encoder_output, enc_dec_attn_mask, + None, None, None, None, rotary_pos_emb) + else: + raise ValueError("Invalid activation recompute method.") + + return hidden_states + + def set_input_tensor(self, input_tensor): + """Set input tensor to be used instead of forward()'s input. + + When doing pipeline parallelism the input from the previous + stage comes from communication, not from the input, so the + model's forward_step_func won't have it. This function is thus + used by internal code to bypass the input provided by the + forward_step_func""" + self.input_tensor = input_tensor + + def forward(self, hidden_states, attention_mask, + encoder_output=None, enc_dec_attn_mask=None, + retriever_input=None, + retriever_output=None, + retriever_attn_mask=None, + inference_params=None, + rotary_pos_emb=None): + # hidden_states: [s, b, h] + + # Checks. + if inference_params: + assert self.recompute_granularity is None, \ + 'inference does not work with activation checkpointing' + + if not self.pre_process: + # See set_input_tensor() + hidden_states = self.input_tensor + + # Viewless tensor. + # - We only need to create a viewless tensor in the case of micro batch + # size (mbs) == 1, since in this case, 'hidden_states.transpose()' + # above creates a view tensor, and '.contiguous()' is a pass-through. + # For mbs >= 2, '.contiguous()' creates a new tensor, eliminating + # the need to make it viewless. + # + # However, we don't explicitly check mbs == 1 here because + # make_viewless_tensor() has negligible overhead when its input + # is already viewless. + # + # - For the 'else' case above, calling make_viewless_tensor() here is + # likely redundant, since p2p_communication.py (likely originator) + # already creates viewless tensors. That said, make_viewless_tensor() + # is called here to be future-proof and corner-case-proof. + hidden_states = core.utils.make_viewless_tensor( + hidden_states, + requires_grad=True, + keep_graph=True, + ) + + # RNG context. + if self.sequence_parallel: + rng_context = tensor_parallel.get_cuda_rng_tracker().fork() + else: + rng_context = nullcontext() + + # Forward layers. + with rng_context: + # The fp8_autocast context manager is a no-op when enabled=True + # The if...else serves to short circuit name resolution for fp8_autocast + with transformer_engine.pytorch.fp8_autocast( + enabled=self.use_fp8, + fp8_recipe=self.fp8_recipe, + fp8_group=self.fp8_group + ) if self.use_fp8 else nullcontext(): + # Determine if the current iteration is first microbatch + if self.num_microbatches_in_previous_step != get_num_microbatches(): + self.microbatch_count = 0 # Reset count on new batch size rampup interval + self.num_microbatches_in_previous_step = get_num_microbatches() + is_first_microbatch = self.microbatch_count % get_num_microbatches() == 0 + + # Forward pass. + if self.recompute_granularity == 'full': + hidden_states = self._checkpointed_forward(hidden_states, + attention_mask, + encoder_output, + enc_dec_attn_mask, + rotary_pos_emb, + is_first_microbatch) + else: + forward_kwargs = { + 'encoder_output': encoder_output, + 'enc_dec_attn_mask': enc_dec_attn_mask, + 'inference_params': inference_params, + } + + if self.transformer_impl == 'transformer_engine': + forward_kwargs['is_first_microbatch'] = is_first_microbatch + forward_kwargs['checkpoint_core_attention'] = self.checkpoint_core_attention + if self.transformer_engine_v_0_10: + forward_kwargs['rotary_pos_emb'] = rotary_pos_emb + else: + forward_kwargs['rotary_pos_emb'] = rotary_pos_emb + forward_kwargs['retriever_input'] = retriever_input + forward_kwargs['retriever_output'] = retriever_output + forward_kwargs['retriever_attn_mask'] = retriever_attn_mask + + for index in range(self.num_layers): + layer = self._get_layer(index) + + hidden_states = layer( + hidden_states, + attention_mask, + **forward_kwargs) + + # First Retro decoder layer returns both hidden_states + # and retriever_output. Make retriever_output available + # to subsequence Retro layers. + if isinstance(hidden_states, tuple): + assert len(hidden_states) == 2 + hidden_states, retriever_output = hidden_states + forward_kwargs["retriever_output"] = retriever_output + + # Skip counter update for eval and activation checkpointing + if torch.is_grad_enabled() and self.training: + self.microbatch_count += 1 + + # Final layer norm. + if self.post_process and self.post_norm: + hidden_states = self.final_norm(hidden_states) + + return hidden_states + + def load_state_dict(self, state_dict, strict=True): + """Customize load.""" + + # Handle renaming layernorm -> norm in component names + state_dict_ = {} + for key in state_dict.keys(): + # Bypass TransformerEngine module parameters. + if "layernorm_qkv" in key or "layernorm_mlp" in key: + state_dict_[key] = state_dict[key] + continue + newkey = key.replace("layernorm", "norm") + state_dict_[newkey] = state_dict[key] + + super().load_state_dict(state_dict_, strict) diff --git a/megatron/legacy/model/utils.py b/megatron/legacy/model/utils.py index 46e7640..5762000 100644 --- a/megatron/legacy/model/utils.py +++ b/megatron/legacy/model/utils.py @@ -9,8 +9,6 @@ import torch from megatron.training import get_args from megatron.legacy.model import LayerNorm, RMSNorm from megatron.core.jit import jit_fuser -import torch._dynamo -torch._dynamo.config.suppress_errors = True def init_method_normal(sigma): """Init method based on N(0, sigma).""" @@ -60,7 +58,7 @@ def openai_gelu(x): def erf_gelu(x): return x * 0.5 * (torch.erf(x / 1.41421).to(dtype=x.dtype)+torch.ones_like(x).to(dtype=x.dtype)) -@torch.compile(mode="max-autotune-no-cudagraphs") + def get_norm(config): args = get_args() if args.normalization == "LayerNorm": diff --git a/megatron/training/arguments.py b/megatron/training/arguments.py index b1a5076..6fb1b1b 100644 --- a/megatron/training/arguments.py +++ b/megatron/training/arguments.py @@ -1,2223 +1,2419 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -"""Megatron arguments.""" - -import argparse -import dataclasses -import json -import os -import types -import warnings -from packaging.version import Version as PkgVersion - -import torch -import torch.nn.functional as F - -from megatron.core.dist_checkpointing.validation import StrictHandling -from megatron.core.models.retro.utils import ( - get_config_path as get_retro_config_path, - get_gpt_data_dir as get_retro_data_dir, -) -from megatron.core.transformer import TransformerConfig, MLATransformerConfig -from megatron.core.transformer.enums import AttnBackend -from megatron.core.utils import is_torch_min_version -from megatron.training.activations import squared_relu -from megatron.training.utils import update_use_dist_ckpt - - -def parse_args(extra_args_provider=None, ignore_unknown_args=False): - """Parse all arguments.""" - parser = argparse.ArgumentParser(description='Megatron-LM Arguments', - allow_abbrev=False) - - # Standard arguments. - parser = _add_network_size_args(parser) - parser = _add_regularization_args(parser) - parser = _add_training_args(parser) - parser = _add_initialization_args(parser) - parser = _add_learning_rate_args(parser) - parser = _add_checkpointing_args(parser) - parser = _add_mixed_precision_args(parser) - parser = _add_distributed_args(parser) - parser = _add_validation_args(parser) - parser = _add_data_args(parser) - parser = _add_tokenizer_args(parser) - parser = _add_autoresume_args(parser) - parser = _add_biencoder_args(parser) - parser = _add_vision_args(parser) - parser = _add_moe_args(parser) - parser = _add_mla_args(parser) - parser = _add_logging_args(parser) - parser = _add_straggler_detector_args(parser) - parser = _add_inference_args(parser) - parser = _add_transformer_engine_args(parser) - parser = _add_retro_args(parser) - parser = _add_experimental_args(parser) - parser = _add_one_logger_args(parser) - parser = _add_ft_package_args(parser) - parser = _add_config_logger_args(parser) - parser = _add_rerun_machine_args(parser) - - # Custom arguments. - if extra_args_provider is not None: - parser = extra_args_provider(parser) - - # Parse. - if ignore_unknown_args: - args, _ = parser.parse_known_args() - else: - args = parser.parse_args() - - # Experimental yaml - if args.yaml_cfg is not None: - from .yaml_arguments import load_yaml - assert args.yaml_cfg and not args.use_legacy_models, \ - "Yaml config is not supported with legacy models." - args = load_yaml(args.yaml_cfg) - - - # Args from environment - # args.rank = int(os.getenv('RANK', '0')) - # args.world_size = int(os.getenv("WORLD_SIZE", '1')) - - return args - - -def load_retro_config(retro_project_dir): - '''Load Retro's config.json.''' - - # Retro config path. - retro_config_path = get_retro_config_path(retro_project_dir) - assert os.path.exists(retro_config_path), \ - "Retro project dir missing config.json." - - # Load retro config. - with open(retro_config_path) as f: - retro_config = types.SimpleNamespace(**json.load(f)) - - return retro_config - - -def load_retro_args(args): - """Load predefined args from Retro config (if applicable). - - When using Retro (or GPT for comparison purposes), data arguments are - overridden by the saved config.json within the Retro project directory. This - is to ensure that the data used for pretraining is consistent with the data - that was preprocessed using the Retro preprocessing pipeline (see - `tools/retro/preprocess_data.py`). - """ - - # Return if no project directory is specified. - if args.retro_project_dir is None: - return - - # Load retro config. - retro_config = load_retro_config(args.retro_project_dir) - - # Retro data path is relative to project dir (via hard or soft links). - data_dir = get_retro_data_dir(args.retro_project_dir) - data_path = list(retro_config.retro_gpt_data_path) - if len(data_path) % 2 == 0: - for i in range(len(data_path) - 1, -1, -2): - data_path[i] = os.path.join(data_dir, data_path[i]) - else: - assert len(data_path) == 1 - data_path[0] = os.path.join(data_dir, data_path[0]) - - # Update args. - args.data_cache_path = retro_config.retro_gpt_data_cache_path - args.data_path = data_path if args.data_path is None else args.data_path - args.eval_interval = retro_config.retro_gpt_eval_interval - args.eval_iters = retro_config.retro_gpt_eval_iters - args.global_batch_size = retro_config.retro_gpt_global_batch_size - args.max_position_embeddings = retro_config.retro_gpt_seq_length - args.merge_file = os.path.join( - args.retro_project_dir, - retro_config.retro_gpt_merge_file, - ) if retro_config.retro_gpt_merge_file is not None else None - args.seed = retro_config.retro_gpt_seed - args.seq_length = retro_config.retro_gpt_seq_length - args.tokenizer_model = os.path.join( - args.retro_project_dir, - retro_config.retro_gpt_tokenizer_model, - ) if retro_config.retro_gpt_tokenizer_model is not None else None - args.tokenizer_type = retro_config.retro_gpt_tokenizer_type - args.train_samples = retro_config.retro_gpt_train_samples - args.vocab_file = os.path.join( - args.retro_project_dir, - retro_config.retro_gpt_vocab_file, - ) if retro_config.retro_gpt_vocab_file is not None else None - - # Retro-specific args. - args.retro_block_size = retro_config.retro_block_size - args.retro_chunk_length = retro_config.retro_gpt_chunk_length - args.retro_neighbor_dirs = retro_config.retro_neighbor_dirs - args.retro_split_preprocessing = retro_config.retro_gpt_split - args.retro_bert_tokenizer_type = retro_config.retro_bert_tokenizer_type - args.retro_bert_vocab_file = retro_config.retro_bert_vocab_file - -def moe_freq_type(x): - """Frequency between MoE layers and Dense layers. - - Accepts either: - - An integer N: Represents a 1:N ratio, meaning one expert layer for every N-1 dense layers - - A string "N": Same as above, but provided as a string - - A string containing a Python list expression that defines a custom pattern, e.g.: - "([1]*3+[0]*1)*3" evaluates to [1,1,1,0,1,1,1,0,1,1,1,0] - where 1 indicates an expert layer and 0 indicates a dense layer. - This allows defining arbitrary patterns of expert and dense layers. - The pattern length must match the total number of transformer layers. - Examples: - "([0]+[1]*23)": 1 dense layer followed by 23 experts layers - "([1]*3+[0]*2)*2": Three expert layers followed by two dense layers, repeated twice. - """ - if isinstance(x, int): - return x - assert isinstance(x, str) - if '[' in x: - # it's a custom pattern - pattern = eval(x) - return pattern - else: - # it's a single int but in str - return int(x) - - -def validate_args(args, defaults={}): - - # Temporary - assert args.non_persistent_ckpt_type in ['global', None], \ - 'Currently only global checkpoints are supported' - - # Load saved args from Retro (if applicable). - load_retro_args(args) - - # Set args.use_dist_ckpt from args.ckpt_format. - update_use_dist_ckpt(args) - - - if args.encoder_pipeline_model_parallel_size == 0 and args.num_experts == 0: - assert args.encoder_tensor_model_parallel_size == args.tensor_model_parallel_size, "If non-MOE encoder shares first decoder pipeline rank it must have the same TP as the decoder." - - if args.encoder_tensor_model_parallel_size > 0: - assert args.num_attention_heads % args.encoder_tensor_model_parallel_size == 0 - assert args.encoder_tensor_model_parallel_size <= args.tensor_model_parallel_size, "We do not support encoders with more TP than the decoder." - - if args.encoder_pipeline_model_parallel_size > 0 and args.encoder_tensor_model_parallel_size == 0: - args.encoder_tensor_model_parallel_size = args.tensor_model_parallel_size - - encoder_model_size = args.encoder_tensor_model_parallel_size * args.encoder_pipeline_model_parallel_size * args.context_parallel_size - decoder_model_size = args.tensor_model_parallel_size * args.pipeline_model_parallel_size * args.context_parallel_size - total_model_size = encoder_model_size + decoder_model_size - - # Total model size. - assert args.world_size % total_model_size == 0, ( - f"world size ({args.world_size}) is not divisible by total_model_size ({encoder_model_size=} + {decoder_model_size=})" - ) - - if args.attention_backend == AttnBackend.local: - assert args.spec[0] == 'local' , '--attention-backend local is only supported with --spec local' - - # Pipeline model parallel size. - args.transformer_pipeline_model_parallel_size = ( - args.pipeline_model_parallel_size - 1 - if args.standalone_embedding_stage else - args.pipeline_model_parallel_size - ) - - args.data_parallel_size = args.world_size // total_model_size - - if args.rank == 0: - print('using world size: {}, data-parallel size: {}, ' - 'context-parallel size: {}, ' - 'hierarchical context-parallel sizes: {}' - 'tensor-model-parallel size: {}, ' - 'encoder-tensor-model-parallel size: {}, ' - 'pipeline-model-parallel size: {}, ' - 'encoder-pipeline-model-parallel size: {}'.format( - args.world_size, args.data_parallel_size, - args.context_parallel_size, - args.hierarchical_context_parallel_sizes, - args.tensor_model_parallel_size, - args.encoder_tensor_model_parallel_size, - args.pipeline_model_parallel_size, - args.encoder_pipeline_model_parallel_size), flush=True) - - # Checks. - - # Backwards compatibility. - if args.pipeline_model_parallel_split_rank is not None: - args.encoder_pipeline_model_parallel_size = args.pipeline_model_parallel_split_rank - args.pipeline_model_parallel_size -= args.encoder_pipeline_model_parallel_size - assert args.pipeline_model_parallel_size > 0 - - if args.hierarchical_context_parallel_sizes: - from numpy import prod - assert args.context_parallel_size == prod(args.hierarchical_context_parallel_sizes) - if "a2a+p2p" in args.cp_comm_type: - assert args.hierarchical_context_parallel_sizes is not None, \ - "--hierarchical-context-parallel-sizes must be set when a2a+p2p is used in cp comm" - - if args.expert_tensor_parallel_size is None: - args.expert_tensor_parallel_size = args.tensor_model_parallel_size - - # Deprecated arguments. - assert args.batch_size is None, '--batch-size argument is no longer ' \ - 'valid, use --micro-batch-size instead' - del args.batch_size - assert args.warmup is None, '--warmup argument is no longer valid, use ' \ - '--lr-warmup-fraction instead' - del args.warmup - assert args.model_parallel_size is None, '--model-parallel-size is no ' \ - 'longer valid, use --tensor-model-parallel-size instead' - del args.model_parallel_size - - if args.checkpoint_activations: - if args.rank == 0: - print('--checkpoint-activations is no longer valid, use --recompute-activations, ' - 'or, for more control, --recompute-granularity and --recompute-method.') - exit() - del args.checkpoint_activations - - if args.recompute_activations: - args.recompute_granularity = 'selective' - del args.recompute_activations - - # Set input defaults. - for key in defaults: - # For default to be valid, it should not be provided in the - # arguments that are passed to the program. We check this by - # ensuring the arg is set to None. - if getattr(args, key, None) is not None: - if args.rank == 0: - print('WARNING: overriding default arguments for {key}:{v} \ - with {key}:{v2}'.format(key=key, v=defaults[key], - v2=getattr(args, key)), - flush=True) - else: - setattr(args, key, defaults[key]) - - if args.data_path is not None and args.split is None: - legacy_default_split_value = '969, 30, 1' - if args.rank == 0: - print('WARNING: Please specify --split when using --data-path. Using legacy default value ' - f'of "{legacy_default_split_value}"') - args.split = legacy_default_split_value - - use_data_path = (args.data_path is not None) or (args.data_args_path is not None) - if use_data_path: - # Exactly one of the two has to be None if we use it. - assert (args.data_path is None) or (args.data_args_path is None) - use_per_split_data_path = any( - elt is not None - for elt in [args.train_data_path, args.valid_data_path, args.test_data_path]) or \ - args.per_split_data_args_path is not None - if use_per_split_data_path: - # Exactly one of the two has to be None if we use it. - assert any(elt is not None - for elt in [args.train_data_path, args.valid_data_path, args.test_data_path]) is False or \ - args.per_split_data_args_path is None - - # Batch size. - assert args.micro_batch_size is not None - assert args.micro_batch_size > 0 - if args.global_batch_size is None: - args.global_batch_size = args.micro_batch_size * args.data_parallel_size - if args.rank == 0: - print('setting global batch size to {}'.format( - args.global_batch_size), flush=True) - assert args.global_batch_size > 0 - if args.decoder_first_pipeline_num_layers is None and args.decoder_last_pipeline_num_layers is None: - # Divisibility check not applicable for T5 models which specify encoder_num_layers - # and decoder_num_layers. - if args.num_layers is not None: - assert args.num_layers % args.transformer_pipeline_model_parallel_size == 0, \ - 'Number of layers should be divisible by the pipeline-model-parallel size' - if args.num_layers_per_virtual_pipeline_stage is not None: - if args.overlap_p2p_comm: - assert args.pipeline_model_parallel_size > 1, \ - 'When interleaved schedule is used, pipeline-model-parallel size '\ - 'should be greater than 1' - else: - assert args.pipeline_model_parallel_size > 2, \ - 'When interleaved schedule is used and p2p communication overlap is disabled, '\ - 'pipeline-model-parallel size should be greater than 2 to avoid having multiple '\ - 'p2p sends and recvs between same 2 ranks per communication batch' - assert args.num_layers is not None - # Double check divisibility check here since check above is if guarded. - assert args.num_layers % args.transformer_pipeline_model_parallel_size == 0, \ - 'Number of layers should be divisible by the pipeline-model-parallel size' - num_layers_per_pipeline_stage = args.num_layers // args.transformer_pipeline_model_parallel_size - assert num_layers_per_pipeline_stage % args.num_layers_per_virtual_pipeline_stage == 0, \ - 'Number of layers per pipeline stage must be divisible by number of layers per virtual pipeline stage' - args.virtual_pipeline_model_parallel_size = num_layers_per_pipeline_stage // \ - args.num_layers_per_virtual_pipeline_stage - else: - args.virtual_pipeline_model_parallel_size = None - # Overlap P2P communication is disabled if not using the interleaved schedule. - args.overlap_p2p_comm = False - args.align_param_gather = False - # Only print warning if PP size > 1. - if args.rank == 0 and args.pipeline_model_parallel_size > 1: - print('WARNING: Setting args.overlap_p2p_comm and args.align_param_gather to False ' - 'since non-interleaved schedule does not support overlapping p2p communication ' - 'and aligned param AG') - - if args.overlap_param_gather: - assert args.use_distributed_optimizer, \ - '--overlap-param-gather only supported with distributed optimizer' - assert args.overlap_grad_reduce, \ - 'Must use --overlap-param-gather with --overlap-grad-reduce' - assert not args.use_legacy_models, \ - '--overlap-param-gather only supported with MCore models' - - if getattr(args, "use_torch_fsdp2", False): - assert get_torch_version() >= PkgVersion("2.4"), \ - 'FSDP2 requires PyTorch >= 2.4.0 with FSDP 2 support.' - assert args.pipeline_model_parallel_size == 1, \ - '--use-torch-fsdp2 is not supported with pipeline parallelism' - assert args.expert_model_parallel_size == 1, \ - '--use-torch-fsdp2 is not supported with expert parallelism' - assert not args.use_distributed_optimizer, \ - "--use-torch-fsdp2 is not supported with MCore's distributed optimizer" - assert not args.gradient_accumulation_fusion, \ - '--use-torch-fsdp2 is not supported with gradient accumulation fusion' - assert args.ckpt_format == 'torch_dist', \ - '--use-torch-fsdp2 requires --ckpt-format torch_dist' - assert args.untie_embeddings_and_output_weights, \ - '--use-torch-fsdp2 requires --untie-embeddings-and-output-weights' - assert not args.fp16, \ - '--use-torch-fsdp2 not supported with fp16 yet' - - if args.overlap_param_gather_with_optimizer_step: - assert args.use_distributed_optimizer, \ - '--overlap-param-gather-with-optimizer-step only supported with distributed optimizer' - assert args.overlap_param_gather, \ - 'Must use --overlap-param-gather-with-optimizer-step with --overlap-param-gather' - assert args.virtual_pipeline_model_parallel_size is not None, \ - '--overlap-param-gather-with-optimizer-step only supported with interleaved pipeline parallelism' - assert not args.use_dist_ckpt, \ - '--overlap-param-gather-with-optimizer-step not supported with distributed checkpointing yet' - - dtype_map = { - 'fp32': torch.float32, 'bf16': torch.bfloat16, 'fp16': torch.float16, 'fp8': torch.uint8, - } - args.main_grads_dtype = dtype_map[args.main_grads_dtype] - args.main_params_dtype = dtype_map[args.main_params_dtype] - args.exp_avg_dtype = dtype_map[args.exp_avg_dtype] - args.exp_avg_sq_dtype = dtype_map[args.exp_avg_sq_dtype] - - if args.fp8_param_gather: - assert args.use_distributed_optimizer, \ - '--fp8-param-gather only supported with distributed optimizer' - - # Parameters dtype. - args.params_dtype = torch.float - if args.fp16: - assert not args.bf16 - args.params_dtype = torch.half - # Turn off checking for NaNs in loss and grads if using dynamic loss scaling, - # where NaNs in grads / loss are signal to the loss scaler. - if not args.loss_scale: - args.check_for_nan_in_loss_and_grad = False - if args.rank == 0: - print('WARNING: Setting args.check_for_nan_in_loss_and_grad to False since ' - 'dynamic loss scaling is being used') - if args.bf16: - assert not args.fp16 - args.params_dtype = torch.bfloat16 - # bfloat16 requires gradient accumulation and all-reduce to - # be done in fp32. - if args.accumulate_allreduce_grads_in_fp32: - assert args.main_grads_dtype == torch.float32, \ - "--main-grads-dtype can only be fp32 when --accumulate-allreduce-grads-in-fp32 is set" - - if not args.accumulate_allreduce_grads_in_fp32 and args.main_grads_dtype == torch.float32: - args.accumulate_allreduce_grads_in_fp32 = True - if args.rank == 0: - print('accumulate and all-reduce gradients in fp32 for ' - 'bfloat16 data type.', flush=True) - - if args.rank == 0: - print('using {} for parameters ...'.format(args.params_dtype), - flush=True) - - if args.dataloader_type is None: - args.dataloader_type = 'single' - - # data - assert args.num_dataset_builder_threads > 0 - - # Consumed tokens. - args.consumed_train_samples = 0 - args.skipped_train_samples = 0 - args.consumed_valid_samples = 0 - - # Support for variable sequence lengths across batches/microbatches. - # set it if the dataloader supports generation of variable sequence lengths - # across batches/microbatches. Due to additional communication overhead - # during pipeline parallelism, it should not be set if sequence length - # is constant during training. - args.variable_seq_lengths = False - - # Iteration-based training. - if args.train_iters: - # If we use iteration-based training, make sure the - # sample-based options are off. - assert args.train_samples is None, \ - 'expected iteration-based training' - assert args.lr_decay_samples is None, \ - 'expected iteration-based learning rate decay' - assert args.lr_warmup_samples == 0, \ - 'expected iteration-based learning rate warmup' - assert args.rampup_batch_size is None, \ - 'expected no batch-size rampup for iteration-based training' - if args.lr_warmup_fraction is not None: - assert args.lr_warmup_iters == 0, \ - 'can only specify one of lr-warmup-fraction and lr-warmup-iters' - - # Sample-based training. - if args.train_samples: - # If we use sample-based training, make sure the - # iteration-based options are off. - assert args.train_iters is None, \ - 'expected sample-based training' - assert args.lr_decay_iters is None, \ - 'expected sample-based learning rate decay' - assert args.lr_warmup_iters == 0, \ - 'expected sample-based learnig rate warmup' - if args.lr_warmup_fraction is not None: - assert args.lr_warmup_samples == 0, \ - 'can only specify one of lr-warmup-fraction ' \ - 'and lr-warmup-samples' - - if args.num_layers is not None: - assert args.encoder_num_layers is None, \ - 'cannot have both num-layers and encoder-num-layers specified' - args.encoder_num_layers = args.num_layers - else: - assert args.encoder_num_layers is not None, \ - 'either num-layers or encoder-num-layers should be specified' - args.num_layers = args.encoder_num_layers - - # Check required arguments. - required_args = ['num_layers', 'hidden_size', 'num_attention_heads', - 'max_position_embeddings'] - for req_arg in required_args: - _check_arg_is_not_none(args, req_arg) - - # Checks. - if args.ffn_hidden_size is None: - if args.swiglu: - # reduce the dimnesion for MLP since projections happens on - # two linear layers. this keeps the number of paramters in - # the same ballpark as the counterpart with 4*h size - # we keep it a multiple of 64, which means the actual tensor size - # will be a multiple of 64 / tp_size - args.ffn_hidden_size = int((4 * args.hidden_size * 2 / 3) / 64) * 64 - else: - args.ffn_hidden_size = 4 * args.hidden_size - - if args.kv_channels is None: - assert args.hidden_size % args.num_attention_heads == 0 - args.kv_channels = args.hidden_size // args.num_attention_heads - - if args.seq_length is not None and args.context_parallel_size > 1: - assert args.seq_length % (args.context_parallel_size * 2) == 0, \ - 'seq-length should be a multiple of 2 * context-parallel-size ' \ - 'if context-parallel-size > 1.' - - if args.seq_length is not None: - assert args.encoder_seq_length is None - args.encoder_seq_length = args.seq_length - else: - assert args.encoder_seq_length is not None - args.seq_length = args.encoder_seq_length - - if args.seq_length is not None: - assert args.max_position_embeddings >= args.seq_length - if args.decoder_seq_length is not None: - assert args.max_position_embeddings >= args.decoder_seq_length - if args.lr is not None: - assert args.min_lr <= args.lr - if args.save is not None: - assert args.save_interval is not None - # Mixed precision checks. - if args.fp16_lm_cross_entropy: - assert args.fp16, 'lm cross entropy in fp16 only support in fp16 mode.' - if args.fp32_residual_connection: - assert args.fp16 or args.bf16, \ - 'residual connection in fp32 only supported when using fp16 or bf16.' - - if args.moe_grouped_gemm: - assert args.bf16, 'Currently GroupedGEMM for MoE only supports bf16 dtype.' - dc = torch.cuda.get_device_capability() - assert dc[0] >= 8, "Unsupported compute capability for GroupedGEMM kernels." - - if args.weight_decay_incr_style == 'constant': - assert args.start_weight_decay is None - assert args.end_weight_decay is None - args.start_weight_decay = args.weight_decay - args.end_weight_decay = args.weight_decay - else: - assert args.start_weight_decay is not None - assert args.end_weight_decay is not None - - # Persistent fused layer norm. - if not is_torch_min_version("1.11.0a0"): - args.no_persist_layer_norm = True - if args.rank == 0: - print('Persistent fused layer norm kernel is supported from ' - 'pytorch v1.11 (nvidia pytorch container paired with v1.11). ' - 'Defaulting to no_persist_layer_norm=True') - - # Activation recomputing. - if args.distribute_saved_activations: - assert args.tensor_model_parallel_size > 1, 'can distribute ' \ - 'recomputed activations only across tensor model ' \ - 'parallel groups' - assert args.recompute_granularity == 'full', \ - 'distributed recompute activations is only '\ - 'application to full recompute granularity' - assert args.recompute_method is not None, \ - 'for distributed recompute activations to work you '\ - 'need to use a recompute method ' - assert is_torch_min_version("1.10.0a0"), \ - 'distributed recompute activations are supported for pytorch ' \ - 'v1.10 and above (Nvidia Pytorch container >= 21.07). Current ' \ - f'pytorch version is v{get_torch_version()}.' - - if args.recompute_granularity == 'selective': - assert args.recompute_method is None, \ - 'recompute method is not yet supported for ' \ - 'selective recomputing granularity' - - # disable sequence parallelism when tp=1 - # to avoid change in numerics when - # sequence_parallelism is enabled. - if args.tensor_model_parallel_size == 1: - if args.sequence_parallel: - warnings.warn("Disabling sequence parallelism because tensor model parallelism is disabled") - args.sequence_parallel = False - - if args.tp_comm_overlap: - assert args.sequence_parallel == True, 'Tensor parallel communication/GEMM overlap can happen only when sequence parallelism is enabled' - - # disable async_tensor_model_parallel_allreduce when - # model parallel memory optimization is enabled - if args.sequence_parallel: - args.async_tensor_model_parallel_allreduce = False - if getattr(args, "use_torch_fsdp2", False): - warnings.warn( - "Using sequence parallelism with FSDP2 together. Try not to using them " - "together since they require different CUDA_MAX_CONNECTIONS settings " - "for best performance. sequence parallelism requires setting the " - "environment variable CUDA_DEVICE_MAX_CONNECTIONS to 1 while FSDP2 " - "requires not setting CUDA_DEVICE_MAX_CONNECTIONS=1 for better parallelization.") - - if os.environ.get('CUDA_DEVICE_MAX_CONNECTIONS') != "1": - if args.sequence_parallel: - raise RuntimeError( - "Using sequence parallelism requires setting the environment variable " - "CUDA_DEVICE_MAX_CONNECTIONS to 1") - if args.async_tensor_model_parallel_allreduce: - raise RuntimeError( - "Using async gradient all reduce requires setting the environment " - "variable CUDA_DEVICE_MAX_CONNECTIONS to 1") - - # Disable bias gelu fusion if we are disabling bias altogether - if not args.add_bias_linear: - args.bias_gelu_fusion = False - - # Keep the 'add bias' args in sync; add_qkv_bias is more targeted. - if args.add_bias_linear: - args.add_qkv_bias = True - - # Retro checks. - if args.retro_add_retriever: - - # Train samples should be auto-loaded. - assert args.train_samples is not None, \ - "args.train_samples should be auto-loaded from the retro config." - - # Sequence parallelism unsupported. - assert not args.sequence_parallel, \ - "retro currently does not support sequence parallelism." - - # Pipeline parallelism unsupported. - assert args.pipeline_model_parallel_size == 1, \ - "retro currently does not support pipeline parallelism." - - if args.decoupled_lr is not None or args.decoupled_min_lr is not None: - assert not args.use_legacy_models, \ - '--decoupled-lr and --decoupled-min-lr is not supported in legacy models.' - - # FlashAttention - args.use_flash_attn = args.use_flash_attn_cutlass or args.use_flash_attn_triton or args.use_flash_attn_torch - - # Legacy RoPE arguments - if args.use_rotary_position_embeddings: - args.position_embedding_type = 'rope' - if args.rotary_interleaved and args.apply_rope_fusion: - raise RuntimeError('--rotary-interleaved does not work with rope_fusion.') - if args.rotary_interleaved and args.use_legacy_models: - raise RuntimeError('--rotary-interleaved is not supported in legacy models.') - if args.position_embedding_type != 'rope': - args.apply_rope_fusion = False - - # Would just need to add 'NoPE' as a position_embedding_type to support this, but for now - # don't allow it to keep things simple - if not args.add_position_embedding and args.position_embedding_type != 'rope': - raise RuntimeError('--no-position-embedding is deprecated, use --position-embedding-type') - - # MoE Spec check - if args.num_experts == 0: - args.num_experts = None - if args.num_experts is not None: - assert args.spec is None, "Model Spec must be None when using MoEs" - - if args.moe_ffn_hidden_size is None: - args.moe_ffn_hidden_size = args.ffn_hidden_size - - # Context parallel - if args.context_parallel_size > 1: - assert not args.use_legacy_models, "Context parallelism is not supported in legacy models." - - # Expert parallelism check - if args.expert_model_parallel_size > 1: - assert args.num_experts is not None, "num_experts must be non None to use expert model parallelism" - assert args.num_experts % args.expert_model_parallel_size == 0, \ - "Number of experts should be a multiple of expert model parallel_size." - assert not args.fp16, \ - "Expert parallelism is not supported with fp16 training." - - # Distributed checkpointing checks - if args.use_dist_ckpt and args.use_legacy_models: - raise RuntimeError('--use-dist-ckpt is not supported in legacy models.') - - # Data blend checks - assert args.mock_data + \ - bool(args.data_path) + \ - any([args.train_data_path, args.valid_data_path, args.test_data_path]) \ - <= 1, "A single data source must be provided in training mode, else None" - - if args.use_tp_pp_dp_mapping: - assert args.context_parallel_size * args.expert_model_parallel_size <= 1, \ - "context_parallel and expert_model_parallel can't be used with tp-pp-dp mapping." - - # Deterministic mode - if args.deterministic_mode: - assert not args.use_flash_attn, "Flash attention can not be used in deterministic mode." - assert not args.cross_entropy_loss_fusion, "Cross Entropy Fusion is currently not deterministic." - - all_reduce_choices = ["Tree", "Ring", "CollnetDirect", "CollnetChain", "^NVLS"] - assert os.getenv("NCCL_ALGO", -1) != -1 and os.getenv("NCCL_ALGO") in all_reduce_choices, \ - f"NCCL_ALGO must be one of {all_reduce_choices}." - - torch.use_deterministic_algorithms(True) - - # Update the printed args to reflect that `apply_query_key_layer_scaling` also controls `attention_softmax_in_fp32` - if args.apply_query_key_layer_scaling: - args.attention_softmax_in_fp32 = True - - # Checkpointing - if args.ckpt_fully_parallel_save_deprecated and args.rank == 0: - print('--ckpt-fully-parallel-save flag is deprecated and has no effect.' - ' Use --no-ckpt-fully-parallel-save to disable parallel save.') - if ( - args.use_dist_ckpt - and not args.ckpt_fully_parallel_save - and args.use_distributed_optimizer - and args.rank == 0 - ): - print('Warning: With non-parallel ckpt save and DistributedOptimizer,' - ' it will be impossible to resume training with different parallelism.' - ' Consider removing flag --no-ckpt-fully-parallel-save.') - if args.use_dist_ckpt_deprecated and args.rank == 0: - print('--use-dist-ckpt is deprecated and has no effect.' - ' Use --ckpt-format to select the checkpoint format.') - if args.dist_ckpt_format_deprecated and args.rank == 0: - print('--dist-ckpt-format is deprecated and has no effect.' - ' Use --ckpt-format to select the checkpoint format.') - - # Inference args - if args.inference_batch_times_seqlen_threshold > -1: - assert args.pipeline_model_parallel_size > 1, \ - "--inference-batch-times-seqlen-threshold requires setting --pipeline-model-parallel-size > 1." - - # MoE upcycling check - if args.moe_use_upcycling: - assert args.save is not None, "When using upcycling, the --save option must be specified." - if not args.no_load_optim: - args.no_load_optim = True - print('Warning: disabling --no-load-optim for upcycling.') - if not args.no_load_rng: - args.no_load_rng = True - print('Warning: disabling --no-load-rng for upcycling.') - - # Print arguments. - _print_args("arguments", args) - - return args - - -def _print_args(title, args): - """Print arguments.""" - if args.rank == 0: - print(f'------------------------ {title} ------------------------', - flush=True) - str_list = [] - for arg in vars(args): - dots = '.' * (48 - len(arg)) - str_list.append(' {} {} {}'.format(arg, dots, getattr(args, arg))) - for arg in sorted(str_list, key=lambda x: x.lower()): - print(arg, flush=True) - print(f'-------------------- end of {title} ---------------------', - flush=True) - - -def _check_arg_is_not_none(args, arg): - assert getattr(args, arg) is not None, '{} argument is None'.format(arg) - - -def core_transformer_config_from_args(args, config_class=None): - - # Config class. - config_class = config_class or TransformerConfig - - if args.multi_latent_attention: - config_class = MLATransformerConfig - - # Translate args to core transformer configuration - kw_args = {} - for f in dataclasses.fields(config_class): - if hasattr(args, f.name): - kw_args[f.name] = getattr(args, f.name) - kw_args['persist_layer_norm'] = not args.no_persist_layer_norm - kw_args['layernorm_zero_centered_gamma'] = args.apply_layernorm_1p - kw_args['layernorm_epsilon'] = args.norm_epsilon - kw_args['deallocate_pipeline_outputs'] = True - kw_args['pipeline_dtype'] = args.params_dtype - kw_args['batch_p2p_comm'] = not args.overlap_p2p_comm - kw_args['num_moe_experts'] = args.num_experts - kw_args['rotary_interleaved'] = args.rotary_interleaved - kw_args['first_pipeline_num_layers']= args.decoder_first_pipeline_num_layers - kw_args['last_pipeline_num_layers']= args.decoder_last_pipeline_num_layers - if args.swiglu: - kw_args['activation_func'] = F.silu - kw_args['gated_linear_unit'] = True - kw_args['bias_activation_fusion'] = args.bias_swiglu_fusion - else: - kw_args['bias_activation_fusion'] = args.bias_gelu_fusion - if args.squared_relu: - assert not args.swiglu - kw_args['activation_func'] = squared_relu - if args.init_method_xavier_uniform: - kw_args['init_method'] = torch.nn.init.xavier_uniform_ - kw_args['scaled_init_method'] = torch.nn.init.xavier_uniform_ - if args.group_query_attention: - kw_args['num_query_groups'] = args.num_query_groups - else: - kw_args['num_query_groups'] = None - kw_args['config_logger_dir'] = args.config_logger_dir - - if len(args.cp_comm_type) == 1: - kw_args['cp_comm_type'] = args.cp_comm_type[0] - - # Return config. - return config_class(**kw_args) - - -def _add_transformer_engine_args(parser): - group = parser.add_argument_group(title='Transformer-Engine') - - group.add_argument('--fp8-format', default=None, - choices=['e4m3', 'hybrid'], - help='Which fp8 format scheme to use for FP8 tensors in the forward and backward pass', - dest='fp8') - group.add_argument('--fp8-margin', type=int, default=0, - help='Scaling margin for fp8', - dest='fp8_margin') - group.add_argument('--fp8-interval', type=int, default=1, - help='DEPRECATED. This flag is ignored. Scaling update interval for fp8', - dest='fp8_interval') - group.add_argument('--fp8-amax-history-len', type=int, default=1, - help='Number of steps for which amax history is recorded per tensor', - dest='fp8_amax_history_len') - group.add_argument('--fp8-amax-compute-algo', default='most_recent', - choices=['most_recent', 'max'], - help='Algorithm for computing amax from history', - dest='fp8_amax_compute_algo') - group.add_argument('--no-fp8-wgrad', action='store_false', - help='Execute wgrad in higher precision even for FP8 runs', - dest='fp8_wgrad') - group.add_argument('--transformer-impl', default='transformer_engine', - choices=['local', 'transformer_engine'], - help='Which Transformer implementation to use.') - group.add_argument('--fp8-param-gather', action='store_true', - help='Keep the compute param in fp8 (do not use any other intermediate ' - 'dtype) and perform the param all-gather in fp8.') - return parser - -def _add_inference_args(parser): - group = parser.add_argument_group(title='inference') - - group.add_argument('--inference-batch-times-seqlen-threshold', - type=int, default=-1, - help='If (batch-size * sequence-length) is smaller than this threshold' - 'then batches will not be split up for pipelining.' - 'Requires setting --pipeline-model-parallel-size > 1.' - 'Setting this to -1 indicates that batch pipelining is not used.') - group.add_argument('--max-tokens-to-oom', - type=int, default=12000, - help='Maximum number of tokens during inference' - 'tokens here is # in prompt + # to generate' - 'Allows us to throw an error before OOM crashes server') - group.add_argument('--output-bert-embeddings', action='store_true', - help='Output Bert embeddings (via mean pooling) from ' - 'model, rather than its binary head output or entire ' - 'hidden batch.') - group.add_argument('--bert-embedder-type', default="megatron", - choices=["megatron", "huggingface"], - help='Select either Megatron or Huggingface as the ' - 'Bert embedder.') - group.add_argument('--flash-decode', default=False, action="store_true", - help='Whether to use the flash decoding kernel.') - group.add_argument('--inference-max-seq-length', type=int, default=2560, - help='Maximum sequence length allocated for prefill during inference.', - dest='inference_max_seq_length') - return parser - - -def _add_retro_args(parser): - group = parser.add_argument_group(title='retro') - - group.add_argument('--retro-project-dir', default=None, - help='Retro project directory, which contains the ' - 'preprocessed data for pretraining. This directory ' - 'is built during preprocessing (see ' - 'tools/retro/README.md), and contains subdirectories ' - 'for the chunk database and pretraining neighbors.') - group.add_argument('--retro-add-retriever', - action='store_true', default=False, - help='Add a retriever to the transformer, for use in ' - 'pretraining a Retro model.') - group.add_argument('--retro-cyclic-train-iters', type=int, default=None, - help='Set number of training iterations for cyclic ' - 'Retro training.') - group.add_argument('--retro-encoder-layers', type=int, default=2, - help='Number of layers to use for the retrieval ' - 'encoder.') - group.add_argument('--retro-encoder-hidden-dropout', - type=float, default=0.1, help='Hidden dropout for ' - 'retrieval encoder.') - group.add_argument('--retro-encoder-attention-dropout', - type=float, default=0.1, help='Attention dropout for ' - 'retrieval encoder.') - group.add_argument("--retro-num-neighbors", type=int, default=2, - help='Number of neighbors to retrieve during ' - 'pretraining.') - group.add_argument("--retro-num-retrieved-chunks", type=int, default=2, - help='Number of chunks to retrieve from the retrieval ' - 'database.') - group.add_argument("--retro-attention-gate", type=float, default=1, - help="Gated cross attention.") - group.add_argument("--retro-no-verify-neighbor-count", action="store_false", - dest="retro_verify_neighbor_count", - help="Skip verifying that len(GPT dataset) == len(saved " - "neighbors).") - - # Enforce argument naming convention. - for action in group._group_actions: - prefix = action.dest.split("_")[0] - assert prefix == "retro", \ - "Retro args must be prefixed with '--retro-*', for consistent " \ - "styling. Please fix '%s'." % ", ".join(action.option_strings) - - return parser - - -def _add_network_size_args(parser): - group = parser.add_argument_group(title='network size') - - group.add_argument('--num-layers', type=int, default=None, - help='Number of transformer layers.') - group.add_argument('--encoder-num-layers', type=int, default=None, - help='Number of encoder transformer layers.') - group.add_argument('--decoder-num-layers', type=int, default=None, - help='Number of decoder transformer layers.') - group.add_argument('--hidden-size', type=int, default=None, - help='Tansformer hidden size.') - group.add_argument('--ffn-hidden-size', type=int, default=None, - help='Transformer Feed-Forward Network hidden size. ' - 'This is set to 4*hidden-size if not provided') - group.add_argument('--num-attention-heads', type=int, default=None, - help='Number of transformer attention heads.') - group.add_argument('--attention-backend', type=lambda attn_backend: AttnBackend[attn_backend], default=AttnBackend.auto, choices = list(AttnBackend), help='Attention backend to use (flash,fused,unfused,local,auto). Defaults to auto') - group.add_argument('--kv-channels', type=int, default=None, - help='Projection weights dimension in multi-head ' - 'attention. This is set to ' - ' args.hidden_size // args.num_attention_heads ' - 'if not provided.') - group.add_argument('--group-query-attention', action='store_true', - help='Use group-query attention.') - group.add_argument('--num-query-groups', type=int, default=1) - - group.add_argument('--max-position-embeddings', type=int, default=None, - help='Maximum number of position embeddings to use. ' - 'This is the size of position embedding.') - group.add_argument('--position-embedding-type', type=str, default='learned_absolute', - choices=['learned_absolute', 'rope', 'none'], - help='Position embedding type.') - group.add_argument('--use-rotary-position-embeddings', action='store_true', - help='Use rotary positional embeddings or not. ' - 'Deprecated: use --position-embedding-type') - group.add_argument('--rotary-base', type=int, default=10000, - help='Base to use for rotary positional embeddings, default 10000') - group.add_argument('--rotary-percent', type=float, default=1.0, - help='Percent of rotary dimension to use, default 100%%') - group.add_argument('--rotary-interleaved', action='store_true', - help='Use interleaved rotary embedding.') - group.add_argument('--rotary-seq-len-interpolation-factor', type=int, default=None, - help='Sequence length interpolation factor for rotary embeddings.') - group.add_argument('--use-rope-scaling', action='store_true', - help='Apply rope scaling as used in llama3.1') - group.add_argument('--no-position-embedding', - action='store_false', - help='Disable position embedding. Deprecated: use --position-embedding-type', - dest='add_position_embedding') - group.add_argument('--make-vocab-size-divisible-by', type=int, default=128, - help='Pad the vocab size to be divisible by this value.' - 'This is added for computational efficieny reasons.') - group.add_argument('--normalization', default='LayerNorm', - choices=['LayerNorm', 'RMSNorm'], - help='Which normalization technique to use.') - group.add_argument('--norm-epsilon', type=float, default=1e-5, - help='Epsilon for layer norm and RMS norm.') - group.add_argument('--apply-layernorm-1p', action='store_true', - help='Adjust LayerNorm weights such that they are centered ' - 'around zero. This improves numerical stability.') - group.add_argument('--apply-residual-connection-post-layernorm', - action='store_true', - help='If set, use original BERT residula connection ' - 'ordering.') - group.add_argument('--openai-gelu', action='store_true', - help='Use OpenAIs GeLU implementation. This option' - 'should not be used unless for backward compatibility' - 'reasons.') - group.add_argument('--squared-relu', action='store_true', - help='Use squared relu activation instead of default gelu') - group.add_argument('--swiglu', action='store_true', - help='Use gated linear units and SiLU activation instead of default gelu') - group.add_argument('--onnx-safe', type=bool, required=False, - help='Use workarounds for known problems with ' - 'Torch ONNX exporter') - group.add_argument('--bert-no-binary-head', action='store_false', - help='Disable BERT binary head.', - dest='bert_binary_head') - group.add_argument('--untie-embeddings-and-output-weights', action='store_true', - help='Untie embeddings and output weights.') - group.add_argument('--multi-latent-attention', action='store_true', - help='Use multi-latent attention for model.') - return parser - - -def _add_straggler_detector_args(parser): - group = parser.add_argument_group(title='straggler') - group.add_argument('--log-straggler', action='store_true', - help='If set, tracks and logs straggler per GPU.') - group.add_argument('--disable-straggler-on-startup', action='store_true', - help='If set, StragglerDetector is disabled on startup.') - group.add_argument('--straggler-ctrlr-port', type=int, default=65535, - help='Port number to toggle StragglerDetector on/off at runtime') - group.add_argument('--straggler-minmax-count', type=int, default=1, - help='Number of ranks to report with high/low estimated throughput') - return parser - - -def _add_one_logger_args(parser): - group = parser.add_argument_group(title='one logger') - group.add_argument('--no-one-logger', action='store_false', - help='If set, disable using one_logger to track E2E metrics' - 'Note that one_logger is an internal tool and not ' - 'available externally. For installation, please go to ' - 'https://confluence.nvidia.com/display/MLWFO/Package+Repositories' - 'for more details', - dest='enable_one_logger') - group.add_argument('--one-logger-project', type=str, default='megatron-lm', - help='The one-logger project name. Will ignore if ' - '--no-one-logger is set') - group.add_argument('--one-logger-run-name', type=str, default=None, - help='The one-logger run name displayed. Will ignore if ' - '--no-one-logger is set') - group.add_argument('--one-logger-async', action='store_true', - help='If set, forces one_logger to use async mode.') - group.add_argument('--app-tag-run-name', type=str, default=None, - help='Jobs belonging to same training run, suppose to ' - 'have the same name. It will be used to track progress of ' - 'a training done over multiple different jobs') - group.add_argument('--app-tag-run-version', type=str, default='0.0.0', - help='The version of the training of which current job is ' - 'part of. It will be used to track the changes in the ' - 'application side which might change the performance ' - 'baseline') - return parser - - -def _add_ft_package_args(parser): - group = parser.add_argument_group(title='ft_package') - group.add_argument('--enable-ft-package', action='store_true', - help='If set, Fault Tolerance package is enabled. ' - 'Note: This feature is for Nvidia internal use only.') - return parser - - -def _add_config_logger_args(parser): - group = parser.add_argument_group(title='config logger') - group.add_argument('--config-logger-dir', type=str, default='', - help='If set, will dump all configs to --config-logger-dir', - dest='config_logger_dir') - return parser - - -def _add_logging_args(parser): - group = parser.add_argument_group(title='logging') - - group.add_argument('--log-params-norm', action='store_true', - help='If set, calculate and log parameters norm.') - group.add_argument('--log-num-zeros-in-grad', action='store_true', - help='If set, calculate and log the number of zeros in gradient.') - group.add_argument('--log-throughput', action='store_true', - help='If set, calculate and log throughput per GPU.') - group.add_argument('--log-progress', action='store_true', - help='If set, log progress (in terms of number of processed tokens and ' - 'number of floating-point operations) to progress.txt file in checkpoint ' - 'directory.') - group.add_argument('--timing-log-level', type=int, - default=0, choices=range(0,3), - help='Granularity level to measure and report timing. ' - ' 0: report only iteration time and make sure timing ' - ' does not introduce extra overhead.' - ' 1: report timing for operations that are executed ' - ' very limited times (basically once) during ' - ' each iteration (such as gradient all-reduce) ' - ' 2: report timing for operations that migh be ' - ' executed numerous times during each iteration. ' - 'Note that setting the level to 1 or 2 might ' - 'cause increase in iteration time.') - group.add_argument('--no-barrier-with-level-1-timing', action='store_false', - help='If not set, use barrier with level 1 time ' - 'measurements. Note that this is up to the user ' - 'to make sure calling barrier with their timers ' - 'will not result in hangs. This can happen if for ' - 'example the user adds a level 1 timer that is not ' - 'called by all ranks.', - dest='barrier_with_L1_time') - group.add_argument('--timing-log-option', type=str, default='minmax', - choices=['max', 'minmax', 'all'], - help='Options for logging timing:' - ' max: report the max timing across all ranks' - ' minmax: report min and max timings across all ranks' - ' all: report timings of all ranks.') - group.add_argument('--tensorboard-log-interval', type=int, default=1, - help='Report to tensorboard interval.') - group.add_argument('--tensorboard-queue-size', type=int, default=1000, - help='Size of the tensorboard queue for pending events ' - 'and summaries before one of the ‘add’ calls forces a ' - 'flush to disk.') - group.add_argument('--log-timers-to-tensorboard', action='store_true', - help='If set, write timers to tensorboard.') - group.add_argument('--no-log-loss-scale-to-tensorboard', - action='store_false', - help='Disable loss-scale logging to tensorboard.', - dest='log_loss_scale_to_tensorboard') - group.add_argument('--log-validation-ppl-to-tensorboard', - action='store_true', - help='If set, write validation perplexity to ' - 'tensorboard.') - group.add_argument('--log-memory-to-tensorboard', - action='store_true', - help='Enable memory logging to tensorboard.') - group.add_argument('--log-world-size-to-tensorboard', - action='store_true', - help='Enable world size logging to tensorboard.') - group.add_argument('--wandb-project', type=str, default='', - help='The wandb project name. Ignore wandb by default.') - group.add_argument('--wandb-exp-name', type=str, default='', - help='The wandb experiment name.') - group.add_argument('--wandb-save-dir', type=str, default='', - help='Path to save the wandb results locally.') - group.add_argument('--logging-level', type=int, default=None, - help='Set default logging level') - return parser - - -def _add_regularization_args(parser): - group = parser.add_argument_group(title='regularization') - - group.add_argument('--attention-dropout', type=float, default=0.1, - help='Post attention dropout probability.') - group.add_argument('--hidden-dropout', type=float, default=0.1, - help='Dropout probability for hidden state transformer.') - group.add_argument('--weight-decay', type=float, default=0.01, - help='Weight decay coefficient for L2 regularization.') - group.add_argument('--start-weight-decay', type=float, - help='Initial weight decay coefficient for L2 regularization.') - group.add_argument('--end-weight-decay', type=float, - help='End of run weight decay coefficient for L2 regularization.') - group.add_argument('--weight-decay-incr-style', type=str, default='constant', - choices=['constant', 'linear', 'cosine'], - help='Weight decay increment function.') - group.add_argument('--clip-grad', type=float, default=1.0, - help='Gradient clipping based on global L2 norm.') - group.add_argument('--adam-beta1', type=float, default=0.9, - help='First coefficient for computing running averages ' - 'of gradient and its square') - group.add_argument('--adam-beta2', type=float, default=0.999, - help='Second coefficient for computing running averages ' - 'of gradient and its square') - group.add_argument('--adam-eps', type=float, default=1e-08, - help='Term added to the denominator to improve' - 'numerical stability') - group.add_argument('--sgd-momentum', type=float, default=0.9, - help='Momentum factor for sgd') - return parser - - -def _add_training_args(parser): - group = parser.add_argument_group(title='training') - - group.add_argument('--micro-batch-size', type=int, default=None, - help='Batch size per model instance (local batch size). ' - 'Global batch size is local batch size times data ' - 'parallel size times number of micro batches.') - group.add_argument('--batch-size', type=int, default=None, - help='Old batch size parameter, do not use. ' - 'Use --micro-batch-size instead') - group.add_argument('--global-batch-size', type=int, default=None, - help='Training batch size. If set, it should be a ' - 'multiple of micro-batch-size times data-parallel-size. ' - 'If this value is None, then ' - 'use micro-batch-size * data-parallel-size as the ' - 'global batch size. This choice will result in 1 for ' - 'number of micro-batches.') - group.add_argument('--rampup-batch-size', nargs='*', default=None, - help='Batch size ramp up with the following values:' - ' --rampup-batch-size ' - ' ' - ' ' - 'For example:' - ' --rampup-batch-size 16 8 300000 \\ ' - ' --global-batch-size 1024' - 'will start with global batch size 16 and over ' - ' (1024 - 16) / 8 = 126 intervals will increase' - 'the batch size linearly to 1024. In each interval' - 'we will use approximately 300000 / 126 = 2380 samples.') - group.add_argument('--decrease-batch-size-if-needed', action='store_true', default=False, - help='If set, decrease batch size if microbatch_size * dp_size' - 'does not divide batch_size. Useful for KSO (Keep Soldiering On)' - 'to continue making progress if number of healthy GPUs (and' - 'corresponding dp_size) does not support current batch_size.' - 'Old batch_size will be restored if training is re-started with' - 'dp_size that divides batch_size // microbatch_size.') - group.add_argument('--recompute-activations', action='store_true', - help='recompute activation to allow for training ' - 'with larger models, sequences, and batch sizes.') - group.add_argument('--recompute-granularity', type=str, default=None, - choices=['full', 'selective'], - help='Checkpoint activations to allow for training ' - 'with larger models, sequences, and batch sizes. ' - 'It is supported at two granularities 1) full: ' - 'whole transformer layer is recomputed, ' - '2) selective: core attention part of the transformer ' - 'layer is recomputed.') - group.add_argument('--no-check-for-nan-in-loss-and-grad', action='store_false', - help='Check for NaNs in loss and grad', - dest='check_for_nan_in_loss_and_grad') - group.add_argument('--check-for-spiky-loss', action='store_true', - help='Check for spiky loss', - dest='check_for_spiky_loss') - group.add_argument('--distribute-saved-activations', - action='store_true', - help='If set, distribute recomputed activations ' - 'across model parallel group.') - group.add_argument('--recompute-method', type=str, default=None, - choices=['uniform', 'block'], - help='1) uniform: uniformly divide the total number of ' - 'Transformer layers and recompute the input activation of ' - 'each divided chunk at specified granularity, ' - '2) recompute the input activations of only a set number of ' - 'individual Transformer layers per pipeline stage and do the ' - 'rest without any recomputing at specified granularity' - 'default) do not apply activations recompute to any layers') - group.add_argument('--recompute-num-layers', type=int, default=None, - help='1) uniform: the number of Transformer layers in each ' - 'uniformly divided recompute unit, ' - '2) block: the number of individual Transformer layers ' - 'to recompute within each pipeline stage.') - group.add_argument('--no-clone-scatter-output-in-embedding', action='store_false', - help='If not set, clone the output of the scatter in embedding layer to GC original tensor.', - dest='clone_scatter_output_in_embedding') - group.add_argument('--profile', action='store_true', - help='Enable nsys profiling. When using this option, nsys ' - 'options should be specified in commandline. An example ' - 'nsys commandline is `nsys profile -s none -t nvtx,cuda ' - '-o --force-overwrite true ' - '--capture-range=cudaProfilerApi ' - '--capture-range-end=stop`.') - group.add_argument('--profile-step-start', type=int, default=10, - help='Global step to start profiling.') - group.add_argument('--profile-step-end', type=int, default=12, - help='Global step to stop profiling.') - group.add_argument('--use-pytorch-profiler', action='store_true', - help='Use the built-in pytorch profiler. ' - 'Useful if you wish to view profiles in tensorboard.', - dest='use_pytorch_profiler') - group.add_argument('--use-hip-profiler', action='store_true', - help='Use HIP PROFILER', - dest='use_hip_profiler') - group.add_argument('--profile-ranks', nargs='+', type=int, default=[0], - help='Global ranks to profile.') - group.add_argument('--profile-dir', type=str, default="./", - help='profile dir to save.') - group.add_argument('--record-memory-history', action="store_true", default=False, - help='Record memory history in last rank.') - group.add_argument('--memory-snapshot-path', type=str, default="snapshot.pickle", - help='Specifies where to dump the memory history pickle.') - group.add_argument('--tp-comm-overlap', action='store_true', help='Enables the ' - ' overlap of Tensor parallel communication and GEMM kernels.') - group.add_argument('--tp-comm-overlap-cfg', type=str, default=None, - help='Config file when tp_comm_overlap is enabled.') - group.add_argument('--disable-tp-comm-overlap-ag', action='store_false', - help=('Disables the All-Gather overlap with GEMM by ' - 'pipelining the GEMM and All-Gather.'), - dest='tp_comm_overlap_ag') - group.add_argument('--disable-tp-comm-overlap-rs', action='store_false', - help=('Disables the Reduce-Scatter overlap with GEMM by ' - 'pipelining the GEMM and Reduce-Scatter.'), - dest='tp_comm_overlap_rs') - group.add_argument('--tp-comm-overlap-rs-dgrad', action='store_true', - help = 'Enables the Reduce-Scatter overlap with dgrad GEMM.', - dest='tp_comm_overlap_rs_dgrad') - group.add_argument('--disable-tp-comm-bulk-dgrad', action='store_false', - help='Disables the All-Gather overlap with bprop activation gradient GEMM.', - dest='tp_comm_bulk_dgrad') - group.add_argument('--disable-tp-comm-bulk-wgrad', action='store_false', - help='Disables the Reduce-Scatter overlap with bprop weight gradient GEMM.', - dest='tp_comm_bulk_wgrad') - group.add_argument('--tp-comm-bootstrap-backend', default='nccl', type=str, - choices=['nccl', 'mpi', 'gloo'], - help='Set the bootstrapping backend of Tensor parallel communications.') - group.add_argument('--use-cpu-initialization', action='store_true', - default=None, - help='If set, initialize weights on the CPU. This eliminates init differences based on tensor parallelism.') - group.add_argument('--empty-unused-memory-level', default=0, type=int, - choices=[0, 1, 2], - help='Call torch.cuda.empty_cache() each iteration ' - '(training and eval), to reduce fragmentation.' - '0=off, 1=moderate, 2=aggressive.') - group.add_argument('--deterministic-mode', action='store_true', - help='Choose code that has deterministic execution. This usually ' - 'means slower execution, but is good for debugging and testing.') - group.add_argument('--check-weight-hash-across-dp-replicas-interval', type=int, default=None, - help='Interval to check weight hashes are same across DP replicas. If not specified, weight hashes not checked.') - group.add_argument('--calculate-per-token-loss', action='store_true', - help=('Scale cross entropy loss by the number of non-padded tokens in the ' - 'global batch, versus the default behavior of assuming all tokens are non-padded.')) - group.add_argument('--train-sync-interval', type=int, default=None, - help='Training CPU-GPU synchronization interval, to ensure that CPU is not running too far ahead of GPU.') - - # deprecated - group.add_argument('--checkpoint-activations', action='store_true', - help='Checkpoint activation to allow for training ' - 'with larger models, sequences, and batch sizes.') - group.add_argument('--train-iters', type=int, default=None, - help='Total number of iterations to train over all ' - 'training runs. Note that either train-iters or ' - 'train-samples should be provided.') - group.add_argument('--train-samples', type=int, default=None, - help='Total number of samples to train over all ' - 'training runs. Note that either train-iters or ' - 'train-samples should be provided.') - group.add_argument('--log-interval', type=int, default=100, - help='Report loss and timing interval.') - group.add_argument('--exit-interval', type=int, default=None, - help='Exit the program after the iteration is divisible ' - 'by this value.') - group.add_argument('--exit-duration-in-mins', type=int, default=None, - help='Exit the program after this many minutes.') - group.add_argument('--exit-signal-handler', action='store_true', - help='Dynamically save the checkpoint and shutdown the ' - 'training if SIGTERM is received') - group.add_argument('--tensorboard-dir', type=str, default=None, - help='Write TensorBoard logs to this directory.') - group.add_argument('--no-masked-softmax-fusion', - action='store_false', - help='Disable fusion of query_key_value scaling, ' - 'masking, and softmax.', - dest='masked_softmax_fusion') - group.add_argument('--no-bias-gelu-fusion', action='store_false', - help='Disable bias and gelu fusion.', - dest='bias_gelu_fusion') - group.add_argument('--no-bias-swiglu-fusion', action='store_false', - help='Disable bias and swiglu fusion, the fusion is ' - 'available only when using megatron-core.', - dest='bias_swiglu_fusion') - group.add_argument('--no-bias-dropout-fusion', action='store_false', - help='Disable bias and dropout fusion.', - dest='bias_dropout_fusion') - group.add_argument('--no-rope-fusion', action='store_false', - help='Disable rope fusion, the fusion is available ' - 'only when using megatron-core.', - dest='apply_rope_fusion') - group.add_argument('--cross-entropy-loss-fusion', action='store_true', - help='Enabled fusion of cross entropy loss calculation.', - dest='cross_entropy_loss_fusion') - group.add_argument('--use-flash-attn-cutlass', action='store_true', - help='use FlashAttention implementation of attention. ' - 'https://arxiv.org/abs/2205.14135') - group.add_argument('--use-flash-attn-torch', action='store_true', - help='use FlashAttention implementation of attention using torch.') - group.add_argument('--use-flash-attn-triton', action='store_true', - help='use FlashAttention implementation of attention using Triton.') - group.add_argument('--disable-bias-linear', action='store_false', - help='Disable bias in the linear layers', - dest='add_bias_linear') - group.add_argument('--add-qkv-bias', action='store_true', - help='Enable bias only in the QKV linear layers', - dest='add_qkv_bias') - group.add_argument('--optimizer', type=str, default='adam', - choices=['adam', 'sgd'], - help='Optimizer function') - group.add_argument('--dataloader-type', type=str, default=None, - choices=['single', 'cyclic', 'external'], - help='Single pass vs multiple pass data loader') - group.add_argument('--no-async-tensor-model-parallel-allreduce', - action='store_false', - help='DEPRECATED. This flag is ignored.', - dest='async_tensor_model_parallel_allreduce') - group.add_argument('--no-persist-layer-norm', action='store_true', - help='Disable using persistent fused layer norm kernel. ' - 'This kernel supports only a set of hidden sizes. Please ' - 'check persist_ln_hidden_sizes if your hidden ' - 'size is supported.') - group.add_argument('--sequence-parallel', action='store_true', - help='Enable sequence parallel optimization.') - group.add_argument('--no-gradient-accumulation-fusion', - action='store_false', - help='Disable fusing gradient accumulation to weight ' - 'gradient computation of linear layers', - dest='gradient_accumulation_fusion') - group.add_argument('--use-mcore-models', action='store_true', - dest='deprecated_use_mcore_models', - help='DEPRECATED. Use the implementation from megatron core.' - 'Now ignored and mcore models are the default, use ' - '--use-legacy-models to not use core models.') - group.add_argument('--use-legacy-models', action='store_true', - help='Use the legacy Megatron models, not Megatron-Core models.') - group.add_argument('--manual-gc', action='store_true', - help='Disable the threshold-based default garbage ' - 'collector and trigger the garbage collection manually. ' - 'Manual garbage collection helps to align the timing of ' - 'the collection across ranks which mitigates the impact ' - 'of CPU-associated jitters. When the manual gc is enabled, ' - 'garbage collection is performed only at the start and the ' - 'end of the validation routine by default.') - group.add_argument('--manual-gc-interval', type=int, default=0, - help='Training step interval to trigger manual garbage ' - 'collection. When the value is set to 0, garbage ' - 'collection is not triggered between training steps.') - group.add_argument('--no-manual-gc-eval', action='store_false', - help='When using manual garbage collection, disable ' - 'garbage collection at the start and the end of each ' - 'evaluation run.', dest='manual_gc_eval') - group.add_argument('--disable-tp-comm-split-ag', action='store_false', - help='Disables the All-Gather overlap with fprop GEMM.', - dest='tp_comm_split_ag') - group.add_argument('--disable-tp-comm-split-rs', action='store_false', - help='Disables the Reduce-Scatter overlap with fprop GEMM.', - dest='tp_comm_split_rs') - - return parser - - -def _add_rerun_machine_args(parser): - group = parser.add_argument_group(title='rerun engine') - - group.add_argument('--error-injection-rate', type=int, default=0, - help='Rate at which to inject unexpected results, ' - 'e.g. 1000 means once every 1000 result validations') - group.add_argument('--error-injection-type', type=str, default='transient_error', - choices=['correct_result', 'transient_error', 'persistent_error'], - help='Type of error to inject. ') - group.add_argument('--rerun-mode', type=str, default='disabled', - choices=['disabled', 'validate_results', 'report_stats'], - help='Use re-run engine to validate results (default) ' - 'or to emit stats on variability of computations due to ' - 'non-deterministic algorithms.') - - return parser - - -def _add_initialization_args(parser): - group = parser.add_argument_group(title='initialization') - - group.add_argument('--seed', type=int, default=1234, - help='Random seed used for python, numpy, ' - 'pytorch, and cuda.') - group.add_argument('--data-parallel-random-init', action='store_true', - help='Enable random initialization of params ' - 'across data parallel ranks') - group.add_argument('--init-method-std', type=float, default=0.02, - help='Standard deviation of the zero mean normal ' - 'distribution used for weight initialization.') - group.add_argument('--init-method-xavier-uniform', action='store_true', - help='Enable Xavier uniform parameter initialization') - - return parser - - -def _add_learning_rate_args(parser): - group = parser.add_argument_group(title='learning rate') - - group.add_argument('--lr', type=float, default=None, - help='Initial learning rate. Depending on decay style ' - 'and initial warmup, the learning rate at each ' - 'iteration would be different.') - group.add_argument('--lr-decay-style', type=str, default='linear', - choices=['constant', 'linear', 'cosine', 'inverse-square-root', 'WSD'], - help='Learning rate decay function.') - group.add_argument('--lr-wsd-decay-style', type=str, default='exponential', - choices=['exponential', 'linear', 'cosine'], - help='Decay style for the annealing phase of WSD'), - group.add_argument('--lr-decay-iters', type=int, default=None, - help='number of iterations to decay learning rate over,' - ' If None defaults to `--train-iters`') - group.add_argument('--lr-decay-samples', type=int, default=None, - help='number of samples to decay learning rate over,' - ' If None defaults to `--train-samples`') - group.add_argument('--lr-wsd-decay-samples', type=int, default=None, - help='number of samples for the annealing phase in the wsd schedule') - group.add_argument('--lr-wsd-decay-iters', type=int, default=None, - help='number of iterations for the annealing phase in the wsd schedule') - group.add_argument('--lr-warmup-fraction', type=float, default=None, - help='fraction of lr-warmup-(iters/samples) to use ' - 'for warmup (as a float)') - group.add_argument('--lr-warmup-iters', type=int, default=0, - help='number of iterations to linearly warmup ' - 'learning rate over.') - group.add_argument('--lr-warmup-samples', type=int, default=0, - help='number of samples to linearly warmup ' - 'learning rate over.') - group.add_argument('--lr-warmup-init', type=float, default=0.0, - help='Initial value for learning rate warmup. The ' - 'scheduler starts warmup from this value.') - group.add_argument('--warmup', type=int, default=None, - help='Old lr warmup argument, do not use. Use one of the' - '--lr-warmup-* arguments above') - group.add_argument('--min-lr', type=float, default=0.0, - help='Minimum value for learning rate. The scheduler' - 'clip values below this threshold.') - group.add_argument('--override-opt_param-scheduler', action='store_true', - help='Reset the values of the scheduler (learning rate,' - 'warmup iterations, minimum learning rate, maximum ' - 'number of iterations, and decay style from input ' - 'arguments and ignore values from checkpoints. Note' - 'that all the above values will be reset.') - group.add_argument('--use-checkpoint-opt_param-scheduler', action='store_true', - help='Use checkpoint to set the values of the scheduler ' - '(learning rate, warmup iterations, minimum learning ' - 'rate, maximum number of iterations, and decay style ' - 'from checkpoint and ignore input arguments.') - group.add_argument('--decoupled-lr', type=float, default=None, - help='Separate learning rate for the input and output layer') - group.add_argument('--decoupled-min-lr', type=float, default=None, - help='Minimum value for learning rate for the input and output layer. The scheduler' - 'clip values below this threshold') - - return parser - - -def _add_checkpointing_args(parser): - group = parser.add_argument_group(title='checkpointing') - - group.add_argument('--save', type=str, default=None, - help='Output directory to save checkpoints to.') - group.add_argument('--save-interval', '--persistent-save-interval', type=int, default=None, - help='Number of iterations between persistent checkpoint saves.') - group.add_argument('--no-save-optim', action='store_true', default=None, - help='Do not save current optimizer.') - group.add_argument('--no-save-rng', action='store_true', default=None, - help='Do not save current rng state.') - group.add_argument('--load', type=str, default=None, - help='Directory containing a model checkpoint.') - group.add_argument('--no-load-optim', action='store_true', default=None, - help='Do not load optimizer when loading checkpoint.') - group.add_argument('--no-load-rng', action='store_true', default=None, - help='Do not load rng state when loading checkpoint.') - group.add_argument('--non-persistent-save-interval', type=int, default=None, - help='Number of iterations between non-persistent saves.') - group.add_argument('--non-persistent-ckpt-type', type=str, default=None, - choices=['global', 'local', 'in_memory', None], - help='Type of non-persistent model checkpoints. ' - '"global" - Saved as a standard checkpoint (e.g., on Lustre) with old checkpoints being removed. ' - '"local" - [TBD] Each rank saves a portion of the checkpoint locally (e.g., on SSD/ramdisk). ' - '"in_memory" - [TBD] A special kind of local checkpoint that avoids serialization. ' - 'None - No non-persistent checkpointing (default option).') - group.add_argument('--non-persistent-global-ckpt-dir', type=str, default=None, - help='Directory containing global non-persistent model checkpoints.') - group.add_argument('--non-persistent-local-ckpt-dir', type=str, default=None, - help='Directory containing local non-persistent model checkpoints.') - group.add_argument('--non-persistent-local-ckpt-algo', type=str, default='fully_parallel', - choices=['fully_parallel', 'atomic'], - help='Algorithm for local non-persistent checkpointing.') - group.add_argument('--finetune', action='store_true', - help='Load model for finetuning. Do not load optimizer ' - 'or rng state from checkpoint and set iteration to 0. ' - 'Assumed when loading a release checkpoint.') - group.add_argument('--pretrained-checkpoint', type=str, default=None, - help='Directory containing a pretrained model checkpoint for finetuning.') - group.add_argument('--ckpt-step', type=int, default=None, - help='Checkpoint step to load model from.') - group.add_argument('--no-initialization', action='store_false', - help='Do not perform initialization when building model, ' - 'can reduce startup time when definitely loading from a ' - 'checkpoint', - dest='perform_initialization') - group.add_argument('--use-checkpoint-args', action='store_true', - help='Override model-related command-line arguments with arguments from checkpoint') - group.add_argument('--use-mp-args-from-checkpoint-args', action='store_true', - help='Copy model parallelism command-line arguments from checkpoint') - group.add_argument('--no-use-tokenizer-model-from-checkpoint-args', action='store_false', - dest='use_tokenizer_model_from_checkpoint_args', - help='If set, do not use tokenizer model path from checkpoint') - group.add_argument('--exit-on-missing-checkpoint', action='store_true', - help="If '--load' is set, but checkpoint is not found " - "(e.g., path typo), then exit instead of random " - "initialization.") - group.add_argument('--use-dist-ckpt', action='store_true', - dest='use_dist_ckpt_deprecated', - help='Deprecated: see --ckpt-format.') - group.add_argument('--auto-detect-ckpt-format', action='store_true', - help='Determine if the checkpoint format is in legacy or distributed format.' - ' If False, expects distributed checkpoint iff args.ckpt_format != "torch".' - ' Might slow down loading a bit (double rank0 ckpt load).') - group.add_argument('--dist-ckpt-format', - dest='dist_ckpt_format_deprecated', - help='Deprecated: see --ckpt-format.') - group.add_argument('--ckpt-format', default='torch_dist', - choices=['torch', 'torch_dist', 'zarr'], - help='Checkpoint format to use.') - group.add_argument('--ckpt-convert-format', default=None, - choices=['torch', 'torch_dist', 'zarr'], - help='Checkpoint format for conversion.') - group.add_argument('--ckpt-convert-save', default=None, - help='Save directory for converted checkpoint.') - group.add_argument('--ckpt-convert-update-legacy-dist-opt-format', action='store_true', - help='When loading a checkpoint, update the legacy format ' - 'for the distributed optimizer, which previously used a ' - 'merged param/grad buffer and a different bucket mapping. ' - 'The legacy format was deprecated on Feb 13, 2024.') - group.add_argument('--ckpt-fully-parallel-save', action='store_true', - dest='ckpt_fully_parallel_save_deprecated', - help='Deprecated: see --no-ckpt-fully-parallel-save.') - group.add_argument('--no-ckpt-fully-parallel-save', action='store_false', - dest='ckpt_fully_parallel_save', - help='Disable applying full save parallelization across DP for' - ' distributed checkpoints. Depending on ckpt format' - ' might decrease the number of files in the checkpoint.' - ' Makes DistributedOptimizer checkpoint non-reshardable.') - group.add_argument('--async-save', action='store_true', default=None, - help='Apply async checkpointing save. Currently works only with' - '`torch_dist` distributed checkpoint format.') - group.add_argument('--ckpt-fully-parallel-load', action='store_true', - help='Apply full load parallelization across DP for' - ' distributed checkpoints.') - group.add_argument('--ckpt-assume-constant-structure', action='store_true', - help='If the model and optimizer state dict structure is' - 'constant throughout a *single training job*, it allows for' - 'different checkpointing performance optimizations.') - group.add_argument('--dist-ckpt-strictness', type=str, default='assume_ok_unexpected', - choices=[e.value for e in StrictHandling], - help='Determine handling of key mismatch during checkpoint load.' - ' Check StrictHandling docs for flags meaning.' - ' NOTE: This flag controls only distributed checkpoint' - ' load from storage, not loading state dict into the model.') - return parser - - -def _add_mixed_precision_args(parser): - group = parser.add_argument_group(title='mixed precision') - - group.add_argument('--fp16', action='store_true', - help='Run model in fp16 mode.') - group.add_argument('--bf16', action='store_true', - help='Run model in bfloat16 mode.') - group.add_argument('--loss-scale', type=float, default=None, - help='Static loss scaling, positive power of 2 ' - 'values can improve fp16 convergence. If None, dynamic' - 'loss scaling is used.') - group.add_argument('--initial-loss-scale', type=float, default=2**32, - help='Initial loss-scale for dynamic loss scaling.') - group.add_argument('--min-loss-scale', type=float, default=1.0, - help='Minimum loss scale for dynamic loss scaling.') - group.add_argument('--loss-scale-window', type=float, default=1000, - help='Window over which to raise/lower dynamic scale.') - group.add_argument('--hysteresis', type=int, default=2, - help='hysteresis for dynamic loss scaling') - group.add_argument('--fp32-residual-connection', action='store_true', - help='Move residual connections to fp32.') - group.add_argument('--apply-query-key-layer-scaling', action='store_true', - help='Scale Q * K^T by 1 / layer-number. ' - 'Useful for fp16 training. Also sets `attention_softmax_in_fp32` to True.') - group.add_argument('--attention-softmax-in-fp32', action='store_true', - help='Run attention masking and softmax in fp32.') - group.add_argument('--accumulate-allreduce-grads-in-fp32', - action='store_true', - help='Gradient accumulation and all-reduce in fp32.') - group.add_argument('--fp16-lm-cross-entropy', action='store_true', - help='Move the cross entropy unreduced loss calculation' - 'for lm head to fp16.') - - return parser - - -def _add_distributed_args(parser): - group = parser.add_argument_group(title='distributed') - - group.add_argument('--tensor-model-parallel-size', type=int, default=1, - help='Degree of tensor model parallelism.') - group.add_argument('--encoder-tensor-model-parallel-size', type=int, default=0, - help='Degree of tensor model parallelism for the encoder.') - group.add_argument('--pipeline-model-parallel-size', type=int, default=1, - help='Degree of pipeline model parallelism.') - group.add_argument('--encoder-pipeline-model-parallel-size', type=int, default=0, - help=('Degree of pipeline model parallelism in the encoder. This is ' - 'independent of the amount of pipeline in the decoder.')) - group.add_argument('--pipeline-model-parallel-split-rank', - type=int, default=None, - help=('Rank where encoder and decoder should be split. ' - 'Deprecated; use --encoder-pipeline-model-parallel-size instead.')) - group.add_argument('--decoder-first-pipeline-num-layers', - type=int, default=None, - help=('The number of transformer layers on the first pipeline stage of the decoder. ' - 'Default None is even split of transformer layers across all pipeline stages')) - group.add_argument('--decoder-last-pipeline-num-layers', - type=int, default=None, - help=('The number of transformer layers on the last pipeline stage of the decoder. ' - 'Default None is even split of transformer layers across all pipeline stages')) - group.add_argument('--model-parallel-size', type=int, default=None, - help='Old model parallel argument, do not use. Use ' - '--tensor-model-parallel-size instead.') - group.add_argument('--num-layers-per-virtual-pipeline-stage', type=int, default=None, - help='Number of layers per virtual pipeline stage') - group.add_argument('--microbatch-group-size-per-virtual-pipeline-stage', type=int, default=None, - help='Number of contiguous microbatches per virtual pipeline stage', - dest='microbatch_group_size_per_vp_stage') - group.add_argument('--no-overlap-p2p-communication', action='store_false', - help='overlap pipeline parallel communication with forward and backward chunks in 1F1B', - dest='overlap_p2p_comm') - group.add_argument('--overlap-p2p-communication-warmup-flush', action='store_true', - default=False, help='if set, overlap pipeline parallel communication in warmup and flush', - dest='overlap_p2p_comm_warmup_flush') - group.add_argument('--distributed-backend', default='nccl', - choices=['nccl', 'gloo'], - help='Which backend to use for distributed training.') - group.add_argument('--distributed-timeout-minutes', type=int, default=10, - help='Timeout minutes for torch.distributed.') - group.add_argument('--overlap-grad-reduce', action='store_true', - default=False, help='If set, overlap DDP grad reduce.') - group.add_argument('--defer-embedding-wgrad-compute', action='store_true', - default=False, help='If set, defers the vocabulary projection linear layer weight' - 'gradient compute to pipeline flush.', dest='defer_embedding_wgrad_compute') - group.add_argument('--wgrad-deferral-limit', type=int, default=0, help='Number of micro-batches for which' - 'weight gradient computation of vocabulary projection is deferred, defaults to 0 which' - 'means all the micro-batches are deferred. Invalid if `defer-embedding-wgrad-compute`' - 'is not set') - group.add_argument('--no-align-grad-reduce', action='store_false', - help='If not set, all PP stages will launch gradient reduces simultaneously. ' - 'Otherwise, each PP stage will independently launch as needed.', - dest='align_grad_reduce') - group.add_argument('--ddp-bucket-size', type=int, default=None, - help='Bucket size for data-parallel communication') - group.add_argument('--ddp-average-in-collective', action='store_true', - default=False, help='If set, average directly in data-parallel communication collective.') - group.add_argument('--overlap-param-gather', action='store_true', - default=False, help='If set, overlap param all-gather in distributed optimizer.') - group.add_argument('--overlap-param-gather-with-optimizer-step', action='store_true', - default=False, help='If set, overlap param all-gather of first bucket with optimizer step.') - group.add_argument('--no-align-param-gather', action='store_false', - help='If not set, all PP stages will launch param all-gathers simultaneously. ' - 'Otherwise, each PP stage will independently launch as needed.', - dest='align_param_gather') - group.add_argument('--no-scatter-gather-tensors-in-pipeline', action='store_false', - help='If not set, use scatter/gather to optimize communication of tensors in pipeline.', - dest='scatter_gather_tensors_in_pipeline') - group.add_argument('--use-ring-exchange-p2p', action='store_true', - default=False, help='If set, use custom-built ring exchange ' - 'for p2p communications. Note that this option will require ' - 'a custom built image that support ring-exchange p2p.') - group.add_argument('--local-rank', type=int, default=int(os.getenv('LOCAL_RANK', '0')), - help='local rank passed from distributed launcher.') - group.add_argument('--lazy-mpu-init', type=bool, required=False, - help='If set to True, initialize_megatron() ' - 'skips DDP initialization and returns function to ' - 'complete it instead.Also turns on ' - '--use-cpu-initialization flag. This is for ' - 'external DDP manager.' ) - group.add_argument('--standalone-embedding-stage', action='store_true', - default=False, help='If set, *input* embedding layer ' - 'is placed on its own pipeline stage, without any ' - 'transformer layers. (For T5, this flag currently only ' - 'affects the encoder embedding.)') - group.add_argument('--use-distributed-optimizer', action='store_true', - help='Use distributed optimizer.') - group.add_argument('--num-distributed-optimizer-instances', type=int, default=1, - help='Number of Distributed Optimizer copies across Data Parallel domain.') - group.add_argument('--use-torch-fsdp2', action='store_true', - help="Use the torch FSDP2 implementation. FSDP2 is not currently working with Pipeline Parallel." - "It is still not in a stable release stage, and may therefore contain bugs or other potential issues.") - group.add_argument('--context-parallel-size', type=int, default=1, - help='Degree of context parallelism.') - group.add_argument('--cp-comm-type', nargs='+', type=str, default=["p2p"], - help='Inter-gpu communication type for context parallelism: ' - 'p2p, a2a, allgather or a2a+p2p. If a single string is provided, ' - 'all layers will share the same communication type. Users can also ' - 'specify separated types for each layer like ' - '--cp-comm-type p2p p2p a2a a2a a2a+p2p a2a+p2p') - group.add_argument('--hierarchical-context-parallel-sizes', nargs='+', type=int, default=None, - help='Degrees of the hierarchical context parallelism. Users should ' - 'provide a list to specify the sizes for different levels. ' - '--hierarchical-context-parallel-sizes 2 4 indicates every two adjacent gpus ' - 'forms the first level of cp groups and the cp ranks with the same odevity ' - 'forms the second level of cp groups.') - group.add_argument('--nccl-communicator-config-path', type=str, default=None, - help='Path to the yaml file with NCCL communicator ' - 'configurations. The number of min/max thread groups and thread ' - 'group cluster size of each communicator can be configured by ' - 'setting `min_ctas`, `max_ctas`, and `cga_cluster_size`.') - group.add_argument('--use-tp-pp-dp-mapping', action='store_true', default=False, - help='If set, distributed ranks initialize order is changed ' - 'from tp-dp-pp to tp-pp-dp. Make sure EP and CP aren\'t used ' - 'with this option enabled') - group.add_argument('--rank', default=-1, type=int, - help='node rank for distributed training') - group.add_argument('--world-size', type=int, default=8, - help='number of nodes for distributed training') - group.add_argument('--dist-url', - help='Which master node url for distributed training.') - return parser - - -def _add_validation_args(parser): - group = parser.add_argument_group(title='validation') - - group.add_argument('--eval-iters', type=int, default=100, - help='Number of iterations to run for evaluation' - 'validation/test for.') - group.add_argument('--eval-interval', type=int, default=1000, - help='Interval between running evaluation on ' - 'validation set.') - group.add_argument("--test-mode", action="store_true", help='Run all real-time test alongside the experiment.') - group.add_argument('--skip-train', action='store_true', - default=False, help='If set, bypass the training loop, ' - 'optionally do evaluation for validation/test, and exit.') - - return parser - - -def _add_tokenizer_args(parser): - group = parser.add_argument_group(title='tokenizer') - group.add_argument('--vocab-size', type=int, default=None, - help='Size of vocab before EOD or padding.') - group.add_argument('--vocab-file', type=str, default=None, - help='Path to the vocab file.') - group.add_argument('--merge-file', type=str, default=None, - help='Path to the BPE merge file.') - group.add_argument('--vocab-extra-ids', type=int, default=0, - help='Number of additional vocabulary tokens. ' - 'They are used for span masking in the T5 model') - group.add_argument('--tokenizer-type', type=str, - default=None, - choices=['BertWordPieceLowerCase', - 'BertWordPieceCase', - 'GPT2BPETokenizer', - 'SentencePieceTokenizer', - 'GPTSentencePieceTokenizer', - 'HuggingFaceTokenizer', - 'Llama2Tokenizer', - 'Llama3Tokenizer', - 'QwenTokenizer', - 'TikTokenizer', - 'MultimodalTokenizer', - 'NullTokenizer'], - help='What type of tokenizer to use.') - group.add_argument('--tokenizer-model', type=str, default=None, - help='Sentencepiece tokenizer model.') - group.add_argument('--tiktoken-pattern', type=str, default=None, - help='Which tiktoken pattern to use. Options: [v1, v2]') - group.add_argument('--tiktoken-num-special-tokens', type=int, default=1000, - help='Number of special tokens in tiktoken tokenizer') - group.add_argument('--tiktoken-special-tokens', type=str, nargs='+', default=None, - help='List of tiktoken special tokens, needs to have ["", "", ""]') - return parser - - -def _add_data_args(parser): - group = parser.add_argument_group(title='data and dataloader') - - group.add_argument('--data-path', nargs='*', default=None, - help='The weight and prefix list for a set of train, validation, and test' - 'datasets which split according to --split. The accepted formats are: ' - '(1) a single prefix, ' - '(2) a list of weight prefix pairs e.g. weight1 prefix1 weight2 prefix2, ' - '(3) a list of prefixes e.g. prefix1 prefix2. ' - 'For (3), weights are inferred from the lengths of the contributing datasets. ' - 'This argument is exclusive to the other independent --*-data-path arguments.') - group.add_argument('--renormalize-blend-weights', action='store_true', - help='Renormalize the blend weights to account for the mid-level dataset ' - 'oversampling done to ensure fulfillment of the requested number of ' - 'samples. Use this option if prompted. Defaults to False for backward ' - 'comparability in the data sample order.') - group.add_argument('--split', type=str, default=None, - help='Comma-separated list of proportions for training,' - ' validation, and test split. For example the split ' - '`90,5,5` will use 90%% of data for training, 5%% for ' - 'validation and 5%% for test.') - group.add_argument('--train-data-path', nargs='*', default=None, - help='The weight and prefix list for an independent train dataset. ' - 'Follows the same pattern rules as --data-path.') - group.add_argument('--valid-data-path', nargs='*', default=None, - help='The weight and prefix list for an independent validation dataset. ' - 'Follows the same pattern rules as --data-path.') - group.add_argument('--test-data-path', nargs='*', default=None, - help='The weight and prefix list for an independent test dataset. ' - 'Follows the same pattern rules as --data-path.') - group.add_argument('--data-args-path', type=str, default=None, - help='Path to data-args. Instead of feeding `--data-path` ' - 'with weighted dataset, we pass in a file path from which ' - 'we read that argument. This is useful when the list of data is ' - 'too big.') - group.add_argument('--per-split-data-args-path', type=str, default=None, - help='Path to per-split-data-args. Instead of feeding ' - '`--(train|valid|test)-data-path` with weighted dataset, ' - 'we pass in a file path from which we read those arguments. ' - 'This is useful when the list of data is too big. Format is a ' - 'json file with `train`, `valid, `test` keys') - group.add_argument('--data-cache-path', default=None, - help='Path to a directory to hold cached index files.') - group.add_argument('--no-mmap-bin-files', action='store_false', - help='Disable mmap-ing of .bin files.', - dest='mmap_bin_files') - group.add_argument('--mock-data', action='store_true', - help='Skip data loading and validation and opt for artificial ' - 'generation of mock data when an implementation is available.') - group.add_argument('--seq-length', type=int, default=None, - help='Maximum sequence length to process.') - group.add_argument('--encoder-seq-length', type=int, default=None, - help='Maximum encoder sequence length to process.' - 'This should be exclusive of --seq-length') - group.add_argument('--decoder-seq-length', type=int, default=None, - help="Maximum decoder sequence length to process.") - group.add_argument('--retriever-seq-length', type=int, default=256, - help='Maximum sequence length for the biencoder model ' - 'for retriever') - group.add_argument('--sample-rate', type=float, default=1.0, - help='sample rate for training data. Supposed to be 0 ' - ' < sample_rate < 1') - group.add_argument('--mask-prob', type=float, default=0.15, - help='Probability of replacing a token with mask.') - group.add_argument('--short-seq-prob', type=float, default=0.1, - help='Probability of producing a short sequence.') - group.add_argument('--num-workers', type=int, default=2, - help="Dataloader number of workers.") - group.add_argument('--reset-position-ids', action='store_true', - help='Reset posistion ids after end-of-document token.') - group.add_argument('--reset-attention-mask', action='store_true', - help='Reset self attention maske after ' - 'end-of-document token.') - group.add_argument('--eod-mask-loss', action='store_true', - help='Mask loss for the end of document tokens.') - group.add_argument('--no-create-attention-mask-in-dataloader', action='store_false', - help='If set, do not create attention_masks in dataloader.', - dest='create_attention_mask_in_dataloader') - group.add_argument('--num-dataset-builder-threads', type=int, default=1, - help='Number of parallel threads per rank for dataset builder') - group.add_argument('--s3-cache-path', type=str, default=None, - help='Path to cache index files when using s3 dataloader') - return parser - - -def _add_autoresume_args(parser): - group = parser.add_argument_group(title='autoresume') - - group.add_argument('--adlr-autoresume', action='store_true', - help='Enable autoresume on adlr cluster.') - group.add_argument('--adlr-autoresume-interval', type=int, default=1000, - help='Intervals over which check for autoresume' - 'termination signal') - - return parser - - -def _add_biencoder_args(parser): - group = parser.add_argument_group(title='biencoder') - - # network size - group.add_argument('--ict-head-size', type=int, default=None, - help='Size of block embeddings to be used in ICT and ' - 'REALM (paper default: 128)') - group.add_argument('--biencoder-projection-dim', type=int, default=0, - help='Size of projection head used in biencoder (paper' - ' default: 128)') - group.add_argument('--biencoder-shared-query-context-model', action='store_true', - help='Whether to share the parameters of the query ' - 'and context models or not') - - # checkpointing - group.add_argument('--ict-load', type=str, default=None, - help='Directory containing an ICTBertModel checkpoint') - group.add_argument('--bert-load', type=str, default=None, - help='Directory containing an BertModel checkpoint ' - '(needed to start ICT and REALM)') - - # data - group.add_argument('--titles-data-path', type=str, default=None, - help='Path to titles dataset used for ICT') - group.add_argument('--query-in-block-prob', type=float, default=0.1, - help='Probability of keeping query in block for ' - 'ICT dataset') - group.add_argument('--use-one-sent-docs', action='store_true', - help='Whether to use one sentence documents in ICT') - group.add_argument('--evidence-data-path', type=str, default=None, - help='Path to Wikipedia Evidence frm DPR paper') - - # training - group.add_argument('--retriever-report-topk-accuracies', nargs='+', type=int, - default=[], help="Which top-k accuracies to report " - "(e.g. '1 5 20')") - group.add_argument('--retriever-score-scaling', action='store_true', - help='Whether to scale retriever scores by inverse ' - 'square root of hidden size') - - # faiss index - group.add_argument('--block-data-path', type=str, default=None, - help='Where to save/load BlockData to/from') - group.add_argument('--embedding-path', type=str, default=None, - help='Where to save/load Open-Retrieval Embedding' - ' data to/from') - - # indexer - group.add_argument('--indexer-batch-size', type=int, default=128, - help='How large of batches to use when doing indexing ' - 'jobs') - group.add_argument('--indexer-log-interval', type=int, default=1000, - help='After how many batches should the indexer ' - 'report progress') - return parser - - -def _add_vision_args(parser): - group = parser.add_argument_group(title="vision") - - # general vision arguements - group.add_argument('--num-classes', type=int, default=1000, - help='num of classes in vision classificaiton task') - group.add_argument('--img-h', type=int, default=224, - help='Image height for vision classification task') - group.add_argument('--img-w', type=int, default=224, - help='Image height for vision classification task') - group.add_argument('--num-channels', type=int, default=3, - help='Number of channels in input image data') - group.add_argument('--patch-dim', type=int, default=16, - help='patch dimension') - group.add_argument('--classes-fraction', type=float, default=1.0, - help='training with fraction of classes.') - group.add_argument('--data-per-class-fraction', type=float, default=1.0, - help='training with fraction of data per class.') - group.add_argument('--no-data-sharding', action='store_false', - help='Disable data sharding.', - dest='data_sharding') - group.add_argument('--head-lr-mult', type=float, default=1.0, - help='learning rate multiplier for head during finetuning') - - # pretraining type and backbone selection` - group.add_argument('--vision-pretraining', action='store_true', - help='flag to indicate vision pretraining') - group.add_argument('--vision-pretraining-type', type=str, default='classify', - choices=['classify', 'inpaint', 'dino'], - help='pretraining objectives') - group.add_argument('--vision-backbone-type', type=str, default='vit', - choices=['vit', 'mit', 'swin'], - help='backbone types types') - group.add_argument('--swin-backbone-type', type=str, default='tiny', - choices=['tiny', 'base', 'h3'], - help='pretraining objectives') - # inpainting arguments - group.add_argument('--mask-type', type=str, default='random', - choices=['random', 'row'], - help='mask types') - group.add_argument('--mask-factor', type=float, default=1.0, - help='mask size scaling parameter') - - # dino arguments - group.add_argument('--iter-per-epoch', type=int, default=1250, - help='iterations per epoch') - group.add_argument('--dino-local-img-size', type=int, default=96, - help='Image size for vision classification task') - group.add_argument('--dino-local-crops-number', type=int, default=10, - help='Number of local crops') - group.add_argument('--dino-head-hidden-size', type=int, default=2048, - help='Hidden dimension size in dino head') - group.add_argument('--dino-bottleneck-size', type=int, default=256, - help='Bottle neck dimension in dino head ') - group.add_argument('--dino-freeze-last-layer', type=float, default=1, - help='Freezing last layer weights') - group.add_argument('--dino-norm-last-layer', action='store_true', - help='Disable Norm in last layer.') - group.add_argument('--dino-warmup-teacher-temp', type=float, default=0.04, - help='warump teacher temperature') - group.add_argument('--dino-teacher-temp', type=float, default=0.07, - help='teacher temperature') - group.add_argument('--dino-warmup-teacher-temp-epochs', type=int, default=30, - help='warmup teacher temperaure epochs') - - # regularization arguments - group.add_argument('--qk-layernorm', action='store_true', - help='Whether to layer normalize the q and k attention embeddings.') - - return parser - -def _add_moe_args(parser): - group = parser.add_argument_group(title="moe") - # General arguments - group.add_argument('--expert-model-parallel-size', type=int, default=1, - help='Degree of expert model parallelism.') - group.add_argument('--expert-tensor-parallel-size', type=int, default=None, - help='Degree of expert model parallelism. Default is None, which will be set to the value of --tensor-model-paralle-size.') - group.add_argument('--num-experts', type=int, default=None, - help='Number of Experts in MoE (None means no MoE)') - group.add_argument('--moe-layer-freq', type=moe_freq_type, default=1, - help='Frequency between MoE layers and Dense layers. Accepts either: ' - '- An integer N: Represents a 1:N ratio, meaning one expert layer for every N-1 dense layers ' - '- A string containing a Python list expression that defines a custom pattern, e.g.: ' - '"([1]*3+[0]*1)*3" evaluates to [1,1,1,0,1,1,1,0,1,1,1,0] ' - 'where 1 indicates an expert layer and 0 indicates a dense layer. ' - 'Examples: "([0]+[1]*23)": 1 dense layer followed by 23 experts layers, ' - '"([1]*3+[0]*2)*2": Three expert layers followed by two dense layers, repeated twice.') - group.add_argument('--moe-ffn-hidden-size', type=int, default=None, - help='The hidden size of each expert\'s feed-forward network (ffn). ' - 'If not specified, defaults to the ffn_hidden_size.') - group.add_argument('--moe-shared-expert-intermediate-size', type=int, default=None, - help='Shared expert total ffn hidden size. ' - 'It should be equal to "num_shared_experts * ffn_size_of_each_shared_expert" if there are multiple shared experts. ' - 'None means no shared expert.') - group.add_argument('--moe-shared-expert-overlap', action='store_true', - help='Enable overlapping between shared expert computations and dispatcher communications. ' - 'Without this, the shared epxerts execute after the routed experts. ' - 'Only effective when moe-shared-expert-intermediate-size is set.') - group.add_argument('--moe-grouped-gemm', action='store_true', - help='When there are multiple experts per rank, launch multiple local GEMM kernels in multiple streams to improve the utilization and performance with GroupedLinear in TransformerEngine.') - # Router arguments - group.add_argument('--moe-router-load-balancing-type', type=str, - choices=['aux_loss', 'seq_aux_loss', 'sinkhorn', 'none'], - default='aux_loss', - help='Determines the load balancing strategy for the router. "aux_loss" corresponds to the load balancing loss used in GShard and SwitchTransformer; "seq_aux_loss" corresponds to the load balancing loss used in DeepSeekV2, which computes the loss for each individual sample; "sinkhorn" corresponds to the balancing algorithm used in S-BASE, and "none" implies no load balancing. The default is "aux_loss".') - group.add_argument('--moe-router-topk', type=int, default=2, - help='Number of experts to route to for each token. The default is 2.') - group.add_argument('--moe-router-pre-softmax', action='store_true', - help='Enable pre-softmax routing for MoE, which means softmax is before the top-k selection. By default, softmax is done after top-k.') - group.add_argument('--moe-router-topk-limited-devices', type=int, default=None, - help='Number of expert parallel ranks to consider for each token during routing. Perform top-k routing on a subset of expert parallel ranks by first selecting N ranks for each token, then conducting top-k selection among experts on these devices. Default is None, which means no limited devices.') - group.add_argument('--moe-router-topk-scaling-factor', type=float, default=None, - help='Scaling factor for routing score in top-k selection, only works when --moe-router-pre-softmax enabled. Defaults to None, which means no scaling.') - group.add_argument('--moe-use-legacy-grouped-gemm', action='store_true', - help='Use legacy GroupedMLP rather than TEGroupedMLP. Note: The legacy one will be deprecated soon.') - group.add_argument('--moe-aux-loss-coeff', type=float, default=0.0, - help='Scaling coefficient for the aux loss: a starting value of 1e-2 is recommended.') - group.add_argument('--moe-z-loss-coeff', type=float, default=None, - help='Scaling coefficient for the z-loss: a starting value of 1e-3 is recommended.') - group.add_argument('--moe-input-jitter-eps', type=float, default=None, - help='Add noise to the input tensor by applying jitter with a specified epsilon value.') - group.add_argument('--moe-token-dispatcher-type', type=str, - choices=['allgather', 'alltoall', 'alltoall_seq'], - default='allgather', - help="The type of token dispatcher to use. The default is 'allgather'. Options are 'allgather', 'alltoall' and 'alltoall_seq'. We recommend using 'alltoall' when applying expert parallelism. For more information, please refer to the documentation in core/moe/README.") - group.add_argument('--moe-per-layer-logging', action='store_true', - help='Enable per-layer logging for MoE, currently supports auxiliary loss and z loss.') - # Token dropping arguments - group.add_argument('--moe-expert-capacity-factor', type=float, default=None, - help='The capacity factor for each expert, None means no token will be dropped.') - group.add_argument('--moe-pad-expert-input-to-capacity', action='store_true', - help='Pads the input for each expert to match the expert capacity length, effective only after the --moe-expert-capacity-factor is set.') - group.add_argument('--moe-token-drop-policy', type=str, default='probs', choices=['probs', 'position'], - help='The policy to drop tokens. Can be either "probs" or "position". If "probs", the tokens with the lowest probabilities will be dropped. If "position", tokens at the end of each batch will be dropped.') - group.add_argument('--moe-layer-recompute', action='store_true', - help='Enable checkpointing for moe_layer, should be used when memory is not sufficient.') - group.add_argument('--moe-extended-tp', action='store_true', - help='Deprecated. Use --expert-tensor-parallel-size instead.') - group.add_argument('--moe-use-upcycling', action='store_true', - help='Load a checkpoint of a dense model, convert it into an MoE model, and save the converted model to the path specified by --save. ' - 'Upcycling is implemented on the top of distributed checkpointing, so it supports parallel modes different from the dense model.') - - return parser - -def _add_mla_args(parser): - group = parser.add_argument_group(title="mla") - group.add_argument('--q-lora-rank', type=int, default=None, - help="Rank of Query tensor's low rank representation.") - group.add_argument('--kv-lora-rank', type=int, default=32, - help="Rank of Key and Value tensors' low rank representation.") - group.add_argument('--qk-head-dim', type=int, default=128, - help="Dimension of the head in the QK projection. q_head_dim = qk_head_dim + qk_pos_emb_head_dim") - group.add_argument('--qk-pos-emb-head-dim', type=int, default=64, - help="Dimension of the position embedding in the QK projection.") - group.add_argument('--v-head-dim', type=int, default=128, - help="Dimension of the head in the V projection.") - group.add_argument('--rotary-scaling-factor', type=float, default=1.0, - help="Rotary scaling factor for the rotary embeddings.") - - return parser - -def _add_experimental_args(parser): - group = parser.add_argument_group(title='experimental') - - group.add_argument('--spec', type=str, default=None, nargs='*', - help='Specify the pair ' - 'that returns a spec to customize a model, transformer ' - 'block, or transformer layer, depending on the use case.' - 'To use local spec specify local as the argument.' - 'For more details, see the model class, ' - '`transformer_block.py`, or `transformer_layer.py`') - group.add_argument('--hybrid-attention-ratio', type=float, default=0.0, - help='Ratio of attention layers to total layers, in the ' - 'range [0.0, 1.0].') - group.add_argument('--hybrid-mlp-ratio', type=float, default=0.0, - help='Ratio of mlp layers to total layers, in the ' - 'range [0.0, 1.0].') - group.add_argument('--hybrid-override-pattern', type=str, default=None, - help='Force a specific hybrid layer pattern. The value' - 'should be a string of characters chosen from' - 'core.ssm.mamba_hybrid_layer_allocation.Symbols.' - 'If a value greater than 0.0 is supplied to any of the ' - 'hybrid ratio arguments, then the number of each type' - 'of layer in the override pattern must match number in' - 'the overidden pattern') - group.add_argument('--yaml-cfg', type=str, default=None, - help = 'Config file to add additional arguments') - - # Args of precision-aware optimizer - group.add_argument('--use-precision-aware-optimizer', action='store_true', - help='Use the precision-aware optimizer in TransformerEngine, which allows ' - 'setting the main params and optimizer states to lower precision, such as ' - 'fp16 and fp8.') - group.add_argument('--main-grads-dtype', default='fp32', choices=['fp32', 'bf16'], - help='Dtype of main grads when enabling precision-aware-optimizer') - group.add_argument('--main-params-dtype', default='fp32', choices=['fp32', 'fp16'], - help='Dtype of main params when enabling precision-aware-optimizer') - group.add_argument('--exp-avg-dtype', default='fp32', choices=['fp32', 'fp16', 'fp8'], - help='Dtype of exp_avg when enabling precision-aware-optimizer') - group.add_argument('--exp-avg-sq-dtype', default='fp32', choices=['fp32', 'fp16', 'fp8'], - help='Dtype of exp_avg_sq when enabling precision-aware-optimizer') - return parser +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Megatron arguments.""" + +import argparse +import dataclasses +import json +import os +import types +import warnings +from packaging.version import Version as PkgVersion + +import torch +import torch.nn.functional as F + +from megatron.core.dist_checkpointing.validation import StrictHandling +from megatron.core.models.retro.utils import ( + get_config_path as get_retro_config_path, + get_gpt_data_dir as get_retro_data_dir, +) +from megatron.core.rerun_state_machine import RerunStateMachine +from megatron.core.transformer import TransformerConfig, MLATransformerConfig +from megatron.core.transformer.enums import AttnBackend +from megatron.core.utils import ( + is_torch_min_version, + get_torch_version, +) +from megatron.training.activations import squared_relu +from megatron.training.utils import update_use_dist_ckpt, get_device_arch_version + + +def parse_args(extra_args_provider=None, ignore_unknown_args=False): + """Parse all arguments.""" + parser = argparse.ArgumentParser(description='Megatron-LM Arguments', + allow_abbrev=False) + + # Standard arguments. + parser = _add_network_size_args(parser) + parser = _add_regularization_args(parser) + parser = _add_training_args(parser) + parser = _add_initialization_args(parser) + parser = _add_learning_rate_args(parser) + parser = _add_checkpointing_args(parser) + parser = _add_mixed_precision_args(parser) + parser = _add_distributed_args(parser) + parser = _add_validation_args(parser) + parser = _add_data_args(parser) + parser = _add_tokenizer_args(parser) + parser = _add_autoresume_args(parser) + parser = _add_biencoder_args(parser) + parser = _add_vision_args(parser) + parser = _add_moe_args(parser) + parser = _add_mla_args(parser) + parser = _add_logging_args(parser) + parser = _add_straggler_detector_args(parser) + parser = _add_inference_args(parser) + parser = _add_transformer_engine_args(parser) + parser = _add_retro_args(parser) + parser = _add_experimental_args(parser) + parser = _add_one_logger_args(parser) + parser = _add_ft_package_args(parser) + parser = _add_config_logger_args(parser) + parser = _add_rerun_machine_args(parser) + + # Custom arguments. + if extra_args_provider is not None: + parser = extra_args_provider(parser) + + # Parse. + if ignore_unknown_args: + args, _ = parser.parse_known_args() + else: + args = parser.parse_args() + + # Experimental yaml + if args.yaml_cfg is not None: + from .yaml_arguments import load_yaml + assert args.yaml_cfg and not args.use_legacy_models, \ + "Yaml config is not supported with legacy models." + args = load_yaml(args.yaml_cfg) + + + # Args from environment + #args.rank = int(os.getenv('RANK', '0')) + #args.world_size = int(os.getenv("WORLD_SIZE", '1')) + + return args + + +def load_retro_config(retro_project_dir): + '''Load Retro's config.json.''' + + # Retro config path. + retro_config_path = get_retro_config_path(retro_project_dir) + assert os.path.exists(retro_config_path), \ + "Retro project dir missing config.json." + + # Load retro config. + with open(retro_config_path) as f: + retro_config = types.SimpleNamespace(**json.load(f)) + + return retro_config + + +def load_retro_args(args): + """Load predefined args from Retro config (if applicable). + + When using Retro (or GPT for comparison purposes), data arguments are + overridden by the saved config.json within the Retro project directory. This + is to ensure that the data used for pretraining is consistent with the data + that was preprocessed using the Retro preprocessing pipeline (see + `tools/retro/preprocess_data.py`). + """ + + # Return if no project directory is specified. + if args.retro_project_dir is None: + return + + # Load retro config. + retro_config = load_retro_config(args.retro_project_dir) + + # Retro data path is relative to project dir (via hard or soft links). + data_dir = get_retro_data_dir(args.retro_project_dir) + data_path = list(retro_config.retro_gpt_data_path) + if len(data_path) % 2 == 0: + for i in range(len(data_path) - 1, -1, -2): + data_path[i] = os.path.join(data_dir, data_path[i]) + else: + assert len(data_path) == 1 + data_path[0] = os.path.join(data_dir, data_path[0]) + + # Update args. + args.data_cache_path = retro_config.retro_gpt_data_cache_path + args.data_path = data_path if args.data_path is None else args.data_path + args.eval_interval = retro_config.retro_gpt_eval_interval + args.eval_iters = retro_config.retro_gpt_eval_iters + args.global_batch_size = retro_config.retro_gpt_global_batch_size + args.max_position_embeddings = retro_config.retro_gpt_seq_length + args.merge_file = os.path.join( + args.retro_project_dir, + retro_config.retro_gpt_merge_file, + ) if retro_config.retro_gpt_merge_file is not None else None + args.seed = retro_config.retro_gpt_seed + args.seq_length = retro_config.retro_gpt_seq_length + args.tokenizer_model = os.path.join( + args.retro_project_dir, + retro_config.retro_gpt_tokenizer_model, + ) if retro_config.retro_gpt_tokenizer_model is not None else None + args.tokenizer_type = retro_config.retro_gpt_tokenizer_type + args.train_samples = retro_config.retro_gpt_train_samples + args.vocab_file = os.path.join( + args.retro_project_dir, + retro_config.retro_gpt_vocab_file, + ) if retro_config.retro_gpt_vocab_file is not None else None + + # Retro-specific args. + args.retro_block_size = retro_config.retro_block_size + args.retro_chunk_length = retro_config.retro_gpt_chunk_length + args.retro_neighbor_dirs = retro_config.retro_neighbor_dirs + args.retro_split_preprocessing = retro_config.retro_gpt_split + args.retro_bert_tokenizer_type = retro_config.retro_bert_tokenizer_type + args.retro_bert_vocab_file = retro_config.retro_bert_vocab_file + +def moe_freq_type(x): + """Frequency between MoE layers and Dense layers. + + Accepts either: + - An integer N: Represents a 1:N ratio, meaning one expert layer for every N-1 dense layers + - A string "N": Same as above, but provided as a string + - A string containing a Python list expression that defines a custom pattern, e.g.: + "([1]*3+[0]*1)*3" evaluates to [1,1,1,0,1,1,1,0,1,1,1,0] + where 1 indicates an expert layer and 0 indicates a dense layer. + This allows defining arbitrary patterns of expert and dense layers. + The pattern length must match the total number of transformer layers. + Examples: + "([0]+[1]*23)": 1 dense layer followed by 23 experts layers + "([1]*3+[0]*2)*2": Three expert layers followed by two dense layers, repeated twice. + """ + if isinstance(x, int): + return x + assert isinstance(x, str) + if '[' in x: + # it's a custom pattern + pattern = eval(x) + return pattern + else: + # it's a single int but in str + return int(x) + + +def validate_args(args, defaults={}): + + # Temporary + assert args.non_persistent_ckpt_type in ['global', 'local', None], \ + 'Currently only global and local checkpoints are supported' + if args.non_persistent_ckpt_type == 'local': + try: + from nvidia_resiliency_ext.checkpointing.local.ckpt_managers.local_manager import \ + LocalCheckpointManager + except ModuleNotFoundError as e: + raise RuntimeError('nvidia_resiliency_ext is required for local checkpointing') from e + + # Load saved args from Retro (if applicable). + load_retro_args(args) + + # Set args.use_dist_ckpt from args.ckpt_format. + if args.use_legacy_models: + assert args.ckpt_format == "torch", \ + "legacy model format only supports the 'torch' checkpoint format." + update_use_dist_ckpt(args) + + if args.encoder_pipeline_model_parallel_size == 0 and args.num_experts == 0: + assert args.encoder_tensor_model_parallel_size == args.tensor_model_parallel_size, "If non-MOE encoder shares first decoder pipeline rank it must have the same TP as the decoder." + + if args.encoder_tensor_model_parallel_size > 0: + assert args.num_attention_heads % args.encoder_tensor_model_parallel_size == 0 + assert args.encoder_tensor_model_parallel_size <= args.tensor_model_parallel_size, "We do not support encoders with more TP than the decoder." + + if args.encoder_pipeline_model_parallel_size > 0 and args.encoder_tensor_model_parallel_size == 0: + args.encoder_tensor_model_parallel_size = args.tensor_model_parallel_size + + encoder_model_size = args.encoder_tensor_model_parallel_size * args.encoder_pipeline_model_parallel_size * args.context_parallel_size + decoder_model_size = args.tensor_model_parallel_size * args.pipeline_model_parallel_size * args.context_parallel_size + total_model_size = encoder_model_size + decoder_model_size + + # Total model size. + assert args.world_size % total_model_size == 0, ( + f"world size ({args.world_size}) is not divisible by total_model_size ({encoder_model_size=} + {decoder_model_size=})" + ) + + if args.attention_backend == AttnBackend.local: + assert args.spec[0] == 'local' , '--attention-backend local is only supported with --spec local' + + # Pipeline model parallel size. + args.transformer_pipeline_model_parallel_size = args.pipeline_model_parallel_size + + args.data_parallel_size = args.world_size // total_model_size + + if args.rank == 0: + print('using world size: {}, data-parallel size: {}, ' + 'context-parallel size: {}, ' + 'hierarchical context-parallel sizes: {}' + 'tensor-model-parallel size: {}, ' + 'encoder-tensor-model-parallel size: {}, ' + 'pipeline-model-parallel size: {}, ' + 'encoder-pipeline-model-parallel size: {}'.format( + args.world_size, args.data_parallel_size, + args.context_parallel_size, + args.hierarchical_context_parallel_sizes, + args.tensor_model_parallel_size, + args.encoder_tensor_model_parallel_size, + args.pipeline_model_parallel_size, + args.encoder_pipeline_model_parallel_size), flush=True) + + # Checks. + + # Backwards compatibility. + if args.pipeline_model_parallel_split_rank is not None: + args.encoder_pipeline_model_parallel_size = args.pipeline_model_parallel_split_rank + args.pipeline_model_parallel_size -= args.encoder_pipeline_model_parallel_size + assert args.pipeline_model_parallel_size > 0 + + if args.hierarchical_context_parallel_sizes: + from numpy import prod + assert args.context_parallel_size == prod(args.hierarchical_context_parallel_sizes) + if "a2a+p2p" in args.cp_comm_type: + assert args.hierarchical_context_parallel_sizes is not None, \ + "--hierarchical-context-parallel-sizes must be set when a2a+p2p is used in cp comm" + + if args.expert_tensor_parallel_size is None: + args.expert_tensor_parallel_size = args.tensor_model_parallel_size + + # Deprecated arguments. + assert args.batch_size is None, '--batch-size argument is no longer ' \ + 'valid, use --micro-batch-size instead' + del args.batch_size + assert args.warmup is None, '--warmup argument is no longer valid, use ' \ + '--lr-warmup-fraction instead' + del args.warmup + assert args.model_parallel_size is None, '--model-parallel-size is no ' \ + 'longer valid, use --tensor-model-parallel-size instead' + del args.model_parallel_size + + if args.checkpoint_activations: + if args.rank == 0: + print('--checkpoint-activations is no longer valid, use --recompute-activations, ' + 'or, for more control, --recompute-granularity and --recompute-method.') + exit() + del args.checkpoint_activations + + if args.recompute_activations: + args.recompute_granularity = 'selective' + del args.recompute_activations + + # Set input defaults. + for key in defaults: + # For default to be valid, it should not be provided in the + # arguments that are passed to the program. We check this by + # ensuring the arg is set to None. + if getattr(args, key, None) is not None: + if args.rank == 0: + print('WARNING: overriding default arguments for {key}:{v} \ + with {key}:{v2}'.format(key=key, v=defaults[key], + v2=getattr(args, key)), + flush=True) + else: + setattr(args, key, defaults[key]) + + if args.data_path is not None and args.split is None: + legacy_default_split_value = '969, 30, 1' + if args.rank == 0: + print('WARNING: Please specify --split when using --data-path. Using legacy default value ' + f'of "{legacy_default_split_value}"') + args.split = legacy_default_split_value + + use_data_path = (args.data_path is not None) or (args.data_args_path is not None) + if use_data_path: + # Exactly one of the two has to be None if we use it. + assert (args.data_path is None) or (args.data_args_path is None) + use_per_split_data_path = any( + elt is not None + for elt in [args.train_data_path, args.valid_data_path, args.test_data_path]) or \ + args.per_split_data_args_path is not None + if use_per_split_data_path: + # Exactly one of the two has to be None if we use it. + assert any(elt is not None + for elt in [args.train_data_path, args.valid_data_path, args.test_data_path]) is False or \ + args.per_split_data_args_path is None + + # Batch size. + assert args.micro_batch_size is not None + assert args.micro_batch_size > 0 + if args.global_batch_size is None: + args.global_batch_size = args.micro_batch_size * args.data_parallel_size + if args.rank == 0: + print('setting global batch size to {}'.format( + args.global_batch_size), flush=True) + assert args.global_batch_size > 0 + + # Uneven virtual pipeline parallelism + assert args.num_layers_per_virtual_pipeline_stage is None or args.num_virtual_stages_per_pipeline_rank is None, \ + '--num-layers-per-virtual-pipeline-stage and --num-virtual-stages-per-pipeline-rank cannot be set at the same time' + + if args.num_layers_per_virtual_pipeline_stage is not None or args.num_virtual_stages_per_pipeline_rank is not None: + if args.overlap_p2p_comm: + assert args.pipeline_model_parallel_size > 1, \ + 'When interleaved schedule is used, pipeline-model-parallel size '\ + 'should be greater than 1' + else: + assert args.pipeline_model_parallel_size > 2, \ + 'When interleaved schedule is used and p2p communication overlap is disabled, '\ + 'pipeline-model-parallel size should be greater than 2 to avoid having multiple '\ + 'p2p sends and recvs between same 2 ranks per communication batch' + + if args.num_virtual_stages_per_pipeline_rank is None: + assert args.decoder_first_pipeline_num_layers is None and args.decoder_last_pipeline_num_layers is None, \ + 'please use --num-virtual-stages-per-pipeline-rank to specify virtual pipeline parallel degree when enable uneven pipeline parallelism' + num_layers = args.num_layers + + if args.account_for_embedding_in_pipeline_split: + num_layers += 1 + + if args.account_for_loss_in_pipeline_split: + num_layers += 1 + + assert num_layers % args.transformer_pipeline_model_parallel_size == 0, \ + 'number of layers of the model must be divisible pipeline model parallel size' + num_layers_per_pipeline_stage = num_layers // args.transformer_pipeline_model_parallel_size + + assert num_layers_per_pipeline_stage % args.num_layers_per_virtual_pipeline_stage == 0, \ + 'number of layers per pipeline stage must be divisible number of layers per virtual pipeline stage' + args.virtual_pipeline_model_parallel_size = num_layers_per_pipeline_stage // \ + args.num_layers_per_virtual_pipeline_stage + else: + args.virtual_pipeline_model_parallel_size = args.num_virtual_stages_per_pipeline_rank + else: + args.virtual_pipeline_model_parallel_size = None + # Overlap P2P communication is disabled if not using the interleaved schedule. + args.overlap_p2p_comm = False + args.align_param_gather = False + # Only print warning if PP size > 1. + if args.rank == 0 and args.pipeline_model_parallel_size > 1: + print('WARNING: Setting args.overlap_p2p_comm and args.align_param_gather to False ' + 'since non-interleaved schedule does not support overlapping p2p communication ' + 'and aligned param AG') + + if args.decoder_first_pipeline_num_layers is None and args.decoder_last_pipeline_num_layers is None: + # Divisibility check not applicable for T5 models which specify encoder_num_layers + # and decoder_num_layers. + if args.num_layers is not None: + num_layers = args.num_layers + + if args.account_for_embedding_in_pipeline_split: + num_layers += 1 + + if args.account_for_loss_in_pipeline_split: + num_layers += 1 + + assert num_layers % args.transformer_pipeline_model_parallel_size == 0, \ + 'Number of layers should be divisible by the pipeline-model-parallel size' + if args.rank == 0: + print(f"Number of virtual stages per pipeline stage: {args.virtual_pipeline_model_parallel_size}") + + if args.data_parallel_sharding_strategy == "optim_grads_params": + args.overlap_param_gather = True + args.overlap_grad_reduce = True + + if args.data_parallel_sharding_strategy == "optim_grads": + args.overlap_grad_reduce = True + + if args.overlap_param_gather: + assert args.use_distributed_optimizer, \ + '--overlap-param-gather only supported with distributed optimizer' + assert args.overlap_grad_reduce, \ + 'Must use --overlap-param-gather with --overlap-grad-reduce' + assert not args.use_legacy_models, \ + '--overlap-param-gather only supported with MCore models' + + if args.use_torch_fsdp2: + assert is_torch_min_version("2.4.0"), \ + 'FSDP2 requires PyTorch >= 2.4.0 with FSDP 2 support.' + assert args.pipeline_model_parallel_size == 1, \ + '--use-torch-fsdp2 is not supported with pipeline parallelism' + assert args.expert_model_parallel_size == 1, \ + '--use-torch-fsdp2 is not supported with expert parallelism' + assert not args.use_distributed_optimizer, \ + "--use-torch-fsdp2 is not supported with MCore's distributed optimizer" + assert not args.gradient_accumulation_fusion, \ + '--use-torch-fsdp2 is not supported with gradient accumulation fusion' + assert args.ckpt_format == 'torch_dist', \ + '--use-torch-fsdp2 requires --ckpt-format torch_dist' + assert args.untie_embeddings_and_output_weights, \ + '--use-torch-fsdp2 requires --untie-embeddings-and-output-weights' + assert not args.fp16, \ + '--use-torch-fsdp2 not supported with fp16 yet' + + if args.overlap_param_gather_with_optimizer_step: + assert args.use_distributed_optimizer, \ + '--overlap-param-gather-with-optimizer-step only supported with distributed optimizer' + assert args.overlap_param_gather, \ + 'Must use --overlap-param-gather-with-optimizer-step with --overlap-param-gather' + assert args.virtual_pipeline_model_parallel_size is not None, \ + '--overlap-param-gather-with-optimizer-step only supported with interleaved pipeline parallelism' + assert not args.use_dist_ckpt, \ + '--overlap-param-gather-with-optimizer-step not supported with distributed checkpointing yet' + + dtype_map = { + 'fp32': torch.float32, 'bf16': torch.bfloat16, 'fp16': torch.float16, 'fp8': torch.uint8, + } + map_dtype = lambda d: d if isinstance(d, torch.dtype) else dtype_map[d] + + args.main_grads_dtype = map_dtype(args.main_grads_dtype) + args.main_params_dtype = map_dtype(args.main_params_dtype) + args.exp_avg_dtype = map_dtype(args.exp_avg_dtype) + args.exp_avg_sq_dtype = map_dtype(args.exp_avg_sq_dtype) + + if args.fp8_param_gather: + assert args.use_distributed_optimizer, \ + '--fp8-param-gather only supported with distributed optimizer' + + if args.use_custom_fsdp: + assert args.use_distributed_optimizer, \ + '--use-custom-fsdp only supported with distributed optimizer' + + if args.data_parallel_sharding_strategy in ["optim_grads_params", "optim_grads"]: + warnings.warn('Please make sure your TransformerEngine support FSDP + gradient accumulation fusion') + assert args.gradient_accumulation_fusion is False, \ + "optim_grads_params optim_grads are not supported with gradient accumulation fusion" + + if args.data_parallel_sharding_strategy == "optim_grads_params": + assert args.check_weight_hash_across_dp_replicas_interval is None, \ + 'check_weight_hash_across_dp_replicas_interval is not supported with optim_grads_params' + + # Parameters dtype. + args.params_dtype = torch.float + if args.fp16: + assert not args.bf16 + args.params_dtype = torch.half + # Turn off checking for NaNs in loss and grads if using dynamic loss scaling, + # where NaNs in grads / loss are signal to the loss scaler. + if not args.loss_scale: + args.check_for_nan_in_loss_and_grad = False + if args.rank == 0: + print('WARNING: Setting args.check_for_nan_in_loss_and_grad to False since ' + 'dynamic loss scaling is being used') + if args.bf16: + assert not args.fp16 + args.params_dtype = torch.bfloat16 + # bfloat16 requires gradient accumulation and all-reduce to + # be done in fp32. + if args.accumulate_allreduce_grads_in_fp32: + assert args.main_grads_dtype == torch.float32, \ + "--main-grads-dtype can only be fp32 when --accumulate-allreduce-grads-in-fp32 is set" + + if args.grad_reduce_in_bf16: + args.accumulate_allreduce_grads_in_fp32 = False + elif not args.accumulate_allreduce_grads_in_fp32 and args.main_grads_dtype == torch.float32: + args.accumulate_allreduce_grads_in_fp32 = True + if args.rank == 0: + print('accumulate and all-reduce gradients in fp32 for ' + 'bfloat16 data type.', flush=True) + + if args.rank == 0: + print('using {} for parameters ...'.format(args.params_dtype), + flush=True) + + if args.dataloader_type is None: + args.dataloader_type = 'single' + + # data + assert args.num_dataset_builder_threads > 0 + + # Consumed tokens. + args.consumed_train_samples = 0 + args.skipped_train_samples = 0 + args.consumed_valid_samples = 0 + + # Support for variable sequence lengths across batches/microbatches. + # set it if the dataloader supports generation of variable sequence lengths + # across batches/microbatches. Due to additional communication overhead + # during pipeline parallelism, it should not be set if sequence length + # is constant during training. + args.variable_seq_lengths = False + + # Iteration-based training. + if args.train_iters: + # If we use iteration-based training, make sure the + # sample-based options are off. + assert args.train_samples is None, \ + 'expected iteration-based training' + assert args.lr_decay_samples is None, \ + 'expected iteration-based learning rate decay' + assert args.lr_warmup_samples == 0, \ + 'expected iteration-based learning rate warmup' + assert args.rampup_batch_size is None, \ + 'expected no batch-size rampup for iteration-based training' + if args.lr_warmup_fraction is not None: + assert args.lr_warmup_iters == 0, \ + 'can only specify one of lr-warmup-fraction and lr-warmup-iters' + + # Sample-based training. + if args.train_samples: + # If we use sample-based training, make sure the + # iteration-based options are off. + assert args.train_iters is None, \ + 'expected sample-based training' + assert args.lr_decay_iters is None, \ + 'expected sample-based learning rate decay' + assert args.lr_warmup_iters == 0, \ + 'expected sample-based learnig rate warmup' + if args.lr_warmup_fraction is not None: + assert args.lr_warmup_samples == 0, \ + 'can only specify one of lr-warmup-fraction ' \ + 'and lr-warmup-samples' + + if args.num_layers is not None: + assert args.encoder_num_layers is None, \ + 'cannot have both num-layers and encoder-num-layers specified' + args.encoder_num_layers = args.num_layers + else: + assert args.encoder_num_layers is not None, \ + 'either num-layers or encoder-num-layers should be specified' + args.num_layers = args.encoder_num_layers + + # Check required arguments. + required_args = ['num_layers', 'hidden_size', 'num_attention_heads', + 'max_position_embeddings'] + for req_arg in required_args: + _check_arg_is_not_none(args, req_arg) + + # Checks. + if args.ffn_hidden_size is None: + if args.swiglu: + # reduce the dimnesion for MLP since projections happens on + # two linear layers. this keeps the number of paramters in + # the same ballpark as the counterpart with 4*h size + # we keep it a multiple of 64, which means the actual tensor size + # will be a multiple of 64 / tp_size + args.ffn_hidden_size = int((4 * args.hidden_size * 2 / 3) / 64) * 64 + else: + args.ffn_hidden_size = 4 * args.hidden_size + + if args.kv_channels is None: + assert args.hidden_size % args.num_attention_heads == 0 + args.kv_channels = args.hidden_size // args.num_attention_heads + + if args.seq_length is not None and args.context_parallel_size > 1: + assert args.seq_length % (args.context_parallel_size * 2) == 0, \ + 'seq-length should be a multiple of 2 * context-parallel-size ' \ + 'if context-parallel-size > 1.' + + if args.seq_length is not None: + assert args.encoder_seq_length is None + args.encoder_seq_length = args.seq_length + else: + assert args.encoder_seq_length is not None + args.seq_length = args.encoder_seq_length + + if args.seq_length is not None: + assert args.max_position_embeddings >= args.seq_length, \ + f"max_position_embeddings ({args.max_position_embeddings}) must be greater than " \ + f"or equal to seq_length ({args.seq_length})." + if args.decoder_seq_length is not None: + assert args.max_position_embeddings >= args.decoder_seq_length + if args.lr is not None: + assert args.min_lr <= args.lr + if args.save is not None: + assert args.save_interval is not None + # Mixed precision checks. + if args.fp16_lm_cross_entropy: + assert args.fp16, 'lm cross entropy in fp16 only support in fp16 mode.' + if args.fp32_residual_connection: + assert args.fp16 or args.bf16, \ + 'residual connection in fp32 only supported when using fp16 or bf16.' + + if args.moe_grouped_gemm: + assert args.bf16, 'Currently GroupedGEMM for MoE only supports bf16 dtype.' + dc = torch.cuda.get_device_capability() + assert dc[0] >= 8, "Unsupported compute capability for GroupedGEMM kernels." + + if args.weight_decay_incr_style == 'constant': + assert args.start_weight_decay is None + assert args.end_weight_decay is None + args.start_weight_decay = args.weight_decay + args.end_weight_decay = args.weight_decay + else: + assert args.start_weight_decay is not None + assert args.end_weight_decay is not None + + # Persistent fused layer norm. + if not is_torch_min_version("1.11.0a0"): + args.no_persist_layer_norm = True + if args.rank == 0: + print('Persistent fused layer norm kernel is supported from ' + 'pytorch v1.11 (nvidia pytorch container paired with v1.11). ' + 'Defaulting to no_persist_layer_norm=True') + + # Activation recomputing. + if args.distribute_saved_activations: + assert args.tensor_model_parallel_size > 1, 'can distribute ' \ + 'recomputed activations only across tensor model ' \ + 'parallel groups' + assert args.recompute_granularity == 'full', \ + 'distributed recompute activations is only '\ + 'application to full recompute granularity' + assert args.recompute_method is not None, \ + 'for distributed recompute activations to work you '\ + 'need to use a recompute method ' + assert is_torch_min_version("1.10.0a0"), \ + 'distributed recompute activations are supported for pytorch ' \ + 'v1.10 and above (Nvidia Pytorch container >= 21.07). Current ' \ + f'pytorch version is v{get_torch_version()}.' + + if args.recompute_granularity == 'selective': + assert args.recompute_method is None, \ + 'recompute method is not yet supported for ' \ + 'selective recomputing granularity' + + # disable sequence parallelism when tp=1 + # to avoid change in numerics when + # sequence_parallelism is enabled. + if args.tensor_model_parallel_size == 1: + if args.sequence_parallel: + warnings.warn("Disabling sequence parallelism because tensor model parallelism is disabled") + args.sequence_parallel = False + + if args.tp_comm_overlap: + assert args.sequence_parallel == True, 'Tensor parallel communication/GEMM overlap can happen only when sequence parallelism is enabled' + + # disable async_tensor_model_parallel_allreduce when + # model parallel memory optimization is enabled + if args.sequence_parallel: + args.async_tensor_model_parallel_allreduce = False + if args.use_torch_fsdp2: + warnings.warn( + "Using sequence parallelism with FSDP2 together. Try not to using them " + "together since they require different CUDA_MAX_CONNECTIONS settings " + "for best performance. sequence parallelism requires setting the " + "environment variable CUDA_DEVICE_MAX_CONNECTIONS to 1 while FSDP2 " + "requires not setting CUDA_DEVICE_MAX_CONNECTIONS=1 for better parallelization.") + + if os.environ.get('CUDA_DEVICE_MAX_CONNECTIONS') != "1" and get_device_arch_version() < 10: + # CUDA_DEVICE_MAX_CONNECTIONS requirement no longer exists since the Blackwell architecture + if args.sequence_parallel: + warnings.warn( + "Using sequence parallelism requires setting the environment variable " + "CUDA_DEVICE_MAX_CONNECTIONS to 1") + if args.async_tensor_model_parallel_allreduce: + warnings.warn( + "Using async gradient all reduce requires setting the environment " + "variable CUDA_DEVICE_MAX_CONNECTIONS to 1") + + # Disable bias gelu fusion if we are disabling bias altogether + if not args.add_bias_linear: + args.bias_gelu_fusion = False + + # Keep the 'add bias' args in sync; add_qkv_bias is more targeted. + if args.add_bias_linear: + args.add_qkv_bias = True + + # Retro checks. + if args.retro_add_retriever: + + # Train samples should be auto-loaded. + assert args.train_samples is not None, \ + "args.train_samples should be auto-loaded from the retro config." + + # Sequence parallelism unsupported. + assert not args.sequence_parallel, \ + "retro currently does not support sequence parallelism." + + # Pipeline parallelism unsupported. + assert args.pipeline_model_parallel_size == 1, \ + "retro currently does not support pipeline parallelism." + + if args.decoupled_lr is not None or args.decoupled_min_lr is not None: + assert not args.use_legacy_models, \ + '--decoupled-lr and --decoupled-min-lr is not supported in legacy models.' + + # Legacy RoPE arguments + if args.use_rotary_position_embeddings: + args.position_embedding_type = 'rope' + if args.rotary_interleaved and args.apply_rope_fusion: + raise RuntimeError('--rotary-interleaved does not work with rope_fusion.') + if args.rotary_interleaved and args.use_legacy_models: + raise RuntimeError('--rotary-interleaved is not supported in legacy models.') + if args.position_embedding_type != 'rope': + args.apply_rope_fusion = False + + # Would just need to add 'NoPE' as a position_embedding_type to support this, but for now + # don't allow it to keep things simple + if not args.add_position_embedding and args.position_embedding_type != 'rope': + raise RuntimeError('--no-position-embedding is deprecated, use --position-embedding-type') + + # Relative position embeddings arguments + if args.position_embedding_type == 'relative': + assert ( + args.transformer_impl == "transformer_engine" + ), 'Local transformer implementation currently does not support attention bias-based position embeddings.' + + # MoE Spec check + if args.num_experts == 0: + args.num_experts = None + if args.num_experts is not None: + assert args.spec is None, "Model Spec must be None when using MoEs" + + if args.moe_ffn_hidden_size is None: + args.moe_ffn_hidden_size = args.ffn_hidden_size + + # Context parallel + if args.context_parallel_size > 1: + assert not args.use_legacy_models, "Context parallelism is not supported in legacy models." + + # Expert parallelism check + if args.expert_model_parallel_size > 1: + assert args.num_experts is not None, "num_experts must be non None to use expert model parallelism" + assert args.num_experts % args.expert_model_parallel_size == 0, \ + "Number of experts should be a multiple of expert model parallel_size." + assert not args.fp16, \ + "Expert parallelism is not supported with fp16 training." + + # Distributed checkpointing checks + if args.use_dist_ckpt and args.use_legacy_models: + raise RuntimeError('--use-dist-ckpt is not supported in legacy models.') + + # Data blend checks + assert args.mock_data + \ + bool(args.data_path) + \ + any([args.train_data_path, args.valid_data_path, args.test_data_path]) \ + <= 1, "A single data source must be provided in training mode, else None" + + # Deterministic mode + if args.deterministic_mode: + assert not args.use_flash_attn, "Flash attention can not be used in deterministic mode." + assert not args.cross_entropy_loss_fusion, "Cross Entropy Fusion is currently not deterministic." + + all_reduce_choices = ["Tree", "Ring", "CollnetDirect", "CollnetChain", "^NVLS"] + assert os.getenv("NCCL_ALGO", -1) != -1 and os.getenv("NCCL_ALGO") in all_reduce_choices, \ + f"NCCL_ALGO must be one of {all_reduce_choices}." + + torch.use_deterministic_algorithms(True) + + # Update the printed args to reflect that `apply_query_key_layer_scaling` also controls `attention_softmax_in_fp32` + if args.apply_query_key_layer_scaling: + args.attention_softmax_in_fp32 = True + + if args.result_rejected_tracker_filename is not None: + # Append to passed-in args.iterations_to_skip. + iterations_to_skip_from_file = RerunStateMachine.get_skipped_iterations_from_tracker_file( + args.result_rejected_tracker_filename + ) + args.iterations_to_skip.extend(iterations_to_skip_from_file) + + # Make sure all functionality that requires Gloo process groups is disabled. + if not args.enable_gloo_process_groups: + if args.use_distributed_optimizer: + # If using distributed optimizer, must use distributed checkpointing. + # Legacy checkpointing uses Gloo process groups to collect full distributed + # optimizer state in the CPU memory of DP rank 0. + assert args.use_dist_ckpt + + # Checkpointing + if args.ckpt_fully_parallel_save_deprecated and args.rank == 0: + print('--ckpt-fully-parallel-save flag is deprecated and has no effect.' + ' Use --no-ckpt-fully-parallel-save to disable parallel save.') + if ( + args.use_dist_ckpt + and not args.ckpt_fully_parallel_save + and args.use_distributed_optimizer + and args.rank == 0 + ): + print('Warning: With non-parallel ckpt save and DistributedOptimizer,' + ' it will be impossible to resume training with different parallelism.' + ' Consider removing flag --no-ckpt-fully-parallel-save.') + if args.use_dist_ckpt_deprecated and args.rank == 0: + print('--use-dist-ckpt is deprecated and has no effect.' + ' Use --ckpt-format to select the checkpoint format.') + if args.dist_ckpt_format_deprecated and args.rank == 0: + print('--dist-ckpt-format is deprecated and has no effect.' + ' Use --ckpt-format to select the checkpoint format.') + + # Inference args + if args.inference_batch_times_seqlen_threshold > -1: + assert args.pipeline_model_parallel_size > 1, \ + "--inference-batch-times-seqlen-threshold requires setting --pipeline-model-parallel-size > 1." + + # MoE upcycling check + if args.moe_use_upcycling: + assert args.save is not None, "When using upcycling, the --save option must be specified." + if not args.no_load_optim: + args.no_load_optim = True + print('Warning: disabling --no-load-optim for upcycling.') + if not args.no_load_rng: + args.no_load_rng = True + print('Warning: disabling --no-load-rng for upcycling.') + + # Optimizer CPU offload check + if args.optimizer_cpu_offload: + assert args.use_precision_aware_optimizer, ( + "The optimizer cpu offload must be used in conjunction with `--use-precision-aware-optimizer`, " + "as the hybrid device optimizer reuses the code path of this flag." + ) + + # MoE loss and include embedding and loss layer check + if args.num_experts is not None: + if args.moe_router_load_balancing_type != "none" or args.moe_z_loss_coeff is not None: + assert not args.account_for_embedding_in_pipeline_split, \ + "Cannot support load balancing loss and z loss with --account-for-embedding-in-pipeline-split" + assert not args.account_for_loss_in_pipeline_split, \ + "Cannot support load balancing loss and z loss with --account-for-loss-in-pipeline-split" + + + if args.non_persistent_ckpt_type == "local": + assert args.non_persistent_local_ckpt_dir is not None, "Tried to use local checkpointing without specifying --local-ckpt-dir!" + if args.replication: + assert args.replication_jump is not None, "--replication requires the value of --replication-jump!" + assert args.non_persistent_ckpt_type == "local", f"--replication requires args.non_persistent_ckpt_type == 'local', but got: {args.non_persistent_ckpt_type}" + elif args.replication_jump: + print("Warning: --replication-jump was specified despite not using replication. Ignoring.") + args.replication_jump = None + + # Print arguments. + _print_args("arguments", args) + + return args + + +def _print_args(title, args): + """Print arguments.""" + if args.rank == 0: + print(f'------------------------ {title} ------------------------', + flush=True) + str_list = [] + for arg in vars(args): + dots = '.' * (48 - len(arg)) + str_list.append(' {} {} {}'.format(arg, dots, getattr(args, arg))) + for arg in sorted(str_list, key=lambda x: x.lower()): + print(arg, flush=True) + print(f'-------------------- end of {title} ---------------------', + flush=True) + + +def _check_arg_is_not_none(args, arg): + assert getattr(args, arg) is not None, '{} argument is None'.format(arg) + + +def core_transformer_config_from_args(args, config_class=None): + + # Config class. + config_class = config_class or TransformerConfig + + if args.multi_latent_attention: + config_class = MLATransformerConfig + + # Translate args to core transformer configuration + kw_args = {} + for f in dataclasses.fields(config_class): + if hasattr(args, f.name): + kw_args[f.name] = getattr(args, f.name) + kw_args['persist_layer_norm'] = not args.no_persist_layer_norm + kw_args['layernorm_zero_centered_gamma'] = args.apply_layernorm_1p + kw_args['layernorm_epsilon'] = args.norm_epsilon + kw_args['deallocate_pipeline_outputs'] = True + kw_args['pipeline_dtype'] = args.params_dtype + kw_args['batch_p2p_comm'] = not args.overlap_p2p_comm + kw_args['num_moe_experts'] = args.num_experts + kw_args['rotary_interleaved'] = args.rotary_interleaved + kw_args['num_layers_in_first_pipeline_stage']= args.decoder_first_pipeline_num_layers + kw_args['num_layers_in_last_pipeline_stage']= args.decoder_last_pipeline_num_layers + if args.swiglu: + kw_args['activation_func'] = F.silu + kw_args['gated_linear_unit'] = True + kw_args['bias_activation_fusion'] = args.bias_swiglu_fusion + else: + kw_args['bias_activation_fusion'] = args.bias_gelu_fusion + if args.squared_relu: + assert not args.swiglu + kw_args['activation_func'] = squared_relu + if args.init_method_xavier_uniform: + kw_args['init_method'] = torch.nn.init.xavier_uniform_ + kw_args['scaled_init_method'] = torch.nn.init.xavier_uniform_ + if args.group_query_attention: + kw_args['num_query_groups'] = args.num_query_groups + else: + kw_args['num_query_groups'] = None + kw_args['config_logger_dir'] = args.config_logger_dir + + if len(args.cp_comm_type) == 1: + kw_args['cp_comm_type'] = args.cp_comm_type[0] + + # Return config. + return config_class(**kw_args) + + +def _add_transformer_engine_args(parser): + group = parser.add_argument_group(title='Transformer-Engine') + + group.add_argument('--fp8-format', default=None, + choices=['e4m3', 'hybrid'], + help='Which fp8 format scheme to use for FP8 tensors in the forward and backward pass', + dest='fp8') + group.add_argument('--fp8-margin', type=int, default=0, + help='Scaling margin for fp8', + dest='fp8_margin') + group.add_argument('--fp8-interval', type=int, default=1, + help='DEPRECATED. This flag is ignored. Scaling update interval for fp8', + dest='fp8_interval') + group.add_argument('--fp8-amax-history-len', type=int, default=1, + help='Number of steps for which amax history is recorded per tensor', + dest='fp8_amax_history_len') + group.add_argument('--fp8-amax-compute-algo', default='most_recent', + choices=['most_recent', 'max'], + help='Algorithm for computing amax from history', + dest='fp8_amax_compute_algo') + group.add_argument('--no-fp8-wgrad', action='store_false', + help='Execute wgrad in higher precision even for FP8 runs', + dest='fp8_wgrad') + group.add_argument('--transformer-impl', default='transformer_engine', + choices=['local', 'transformer_engine'], + help='Which Transformer implementation to use.') + group.add_argument('--fp8-param-gather', action='store_true', + help='Keep the compute param in fp8 (do not use any other intermediate ' + 'dtype) and perform the param all-gather in fp8.') + group.add_argument('--te-rng-tracker', action='store_true', default=False, + help='Use the Transformer Engine version of the random number generator. ' + 'Required for CUDA graphs support.') + group.add_argument('--inference-rng-tracker', action='store_true', default=False, + help='Use a random number generator configured for inference.') + return parser + +def _add_inference_args(parser): + group = parser.add_argument_group(title='inference') + + group.add_argument('--inference-batch-times-seqlen-threshold', + type=int, default=-1, + help='If (batch-size * sequence-length) is smaller than this threshold' + 'then batches will not be split up for pipelining.' + 'Requires setting --pipeline-model-parallel-size > 1.' + 'Setting this to -1 indicates that batch pipelining is not used.') + group.add_argument('--max-tokens-to-oom', + type=int, default=12000, + help='Maximum number of tokens during inference' + 'tokens here is # in prompt + # to generate' + 'Allows us to throw an error before OOM crashes server') + group.add_argument('--output-bert-embeddings', action='store_true', + help='Output Bert embeddings (via mean pooling) from ' + 'model, rather than its binary head output or entire ' + 'hidden batch.') + group.add_argument('--bert-embedder-type', default="megatron", + choices=["megatron", "huggingface"], + help='Select either Megatron or Huggingface as the ' + 'Bert embedder.') + group.add_argument('--flash-decode', default=False, action="store_true", + help='Whether to use the flash decoding kernel.') + group.add_argument('--enable-cuda-graph', default=False, action="store_true", + help='Use CUDA graph capture and replay.') + group.add_argument("--cuda-graph-warmup-steps", type=int, default=3, + help="Number of CUDA graph warmup steps") + group.add_argument('--inference-max-requests', type=int, default=8, + help='Maximum number of requests for inference.', + dest='inference_max_batch_size') + group.add_argument('--inference-max-seq-length', type=int, default=2560, + help='Maximum sequence length expected for inference (prefill + decode).', + dest='inference_max_seq_length') + return parser + + +def _add_retro_args(parser): + group = parser.add_argument_group(title='retro') + + group.add_argument('--retro-project-dir', default=None, + help='Retro project directory, which contains the ' + 'preprocessed data for pretraining. This directory ' + 'is built during preprocessing (see ' + 'tools/retro/README.md), and contains subdirectories ' + 'for the chunk database and pretraining neighbors.') + group.add_argument('--retro-add-retriever', + action='store_true', default=False, + help='Add a retriever to the transformer, for use in ' + 'pretraining a Retro model.') + group.add_argument('--retro-cyclic-train-iters', type=int, default=None, + help='Set number of training iterations for cyclic ' + 'Retro training.') + group.add_argument('--retro-encoder-layers', type=int, default=2, + help='Number of layers to use for the retrieval ' + 'encoder.') + group.add_argument('--retro-encoder-hidden-dropout', + type=float, default=0.1, help='Hidden dropout for ' + 'retrieval encoder.') + group.add_argument('--retro-encoder-attention-dropout', + type=float, default=0.1, help='Attention dropout for ' + 'retrieval encoder.') + group.add_argument("--retro-num-neighbors", type=int, default=2, + help='Number of neighbors to retrieve during ' + 'pretraining.') + group.add_argument("--retro-num-retrieved-chunks", type=int, default=2, + help='Number of chunks to retrieve from the retrieval ' + 'database.') + group.add_argument("--retro-attention-gate", type=float, default=1, + help="Gated cross attention.") + group.add_argument("--retro-no-verify-neighbor-count", action="store_false", + dest="retro_verify_neighbor_count", + help="Skip verifying that len(GPT dataset) == len(saved " + "neighbors).") + + # Enforce argument naming convention. + for action in group._group_actions: + prefix = action.dest.split("_")[0] + assert prefix == "retro", \ + "Retro args must be prefixed with '--retro-*', for consistent " \ + "styling. Please fix '%s'." % ", ".join(action.option_strings) + + return parser + + +def _add_network_size_args(parser): + group = parser.add_argument_group(title='network size') + + group.add_argument('--num-layers', type=int, default=None, + help='Number of transformer layers.') + group.add_argument('--encoder-num-layers', type=int, default=None, + help='Number of encoder transformer layers.') + group.add_argument('--decoder-num-layers', type=int, default=None, + help='Number of decoder transformer layers.') + group.add_argument('--hidden-size', type=int, default=None, + help='Tansformer hidden size.') + group.add_argument('--ffn-hidden-size', type=int, default=None, + help='Transformer Feed-Forward Network hidden size. ' + 'This is set to 4*hidden-size if not provided') + group.add_argument('--num-attention-heads', type=int, default=None, + help='Number of transformer attention heads.') + group.add_argument('--attention-backend', type=lambda attn_backend: AttnBackend[attn_backend], default=AttnBackend.auto, choices = list(AttnBackend), help='Attention backend to use (flash,fused,unfused,local,auto). Defaults to auto') + group.add_argument('--kv-channels', type=int, default=None, + help='Projection weights dimension in multi-head ' + 'attention. This is set to ' + ' args.hidden_size // args.num_attention_heads ' + 'if not provided.') + group.add_argument('--group-query-attention', action='store_true', + help='Use group-query attention.') + group.add_argument('--num-query-groups', type=int, default=1) + + group.add_argument('--max-position-embeddings', type=int, default=None, + help='Maximum number of position embeddings to use. ' + 'This is the size of position embedding.') + group.add_argument('--position-embedding-type', type=str, default='learned_absolute', + choices=['learned_absolute', 'rope', 'relative', 'none'], + help='Position embedding type.') + group.add_argument('--relative-attention-num-buckets', type=int, default=32, + help='Number of buckets for relative position embeddings.') + group.add_argument('--relative-attention-max-distance', type=int, default=128, + help='Maximum distance for relative position embeddings calculation.') + group.add_argument('--use-rotary-position-embeddings', action='store_true', + help='Use rotary positional embeddings or not. ' + 'Deprecated: use --position-embedding-type') + group.add_argument('--rotary-base', type=int, default=10000, + help='Base to use for rotary positional embeddings, default 10000') + group.add_argument('--rotary-percent', type=float, default=1.0, + help='Percent of rotary dimension to use, default 100%%') + group.add_argument('--rotary-interleaved', action='store_true', + help='Use interleaved rotary embedding.') + group.add_argument('--rotary-seq-len-interpolation-factor', type=int, default=None, + help='Sequence length interpolation factor for rotary embeddings.') + group.add_argument('--use-rope-scaling', action='store_true', + help='Apply rope scaling as used in llama3.x') + group.add_argument('--rope-scaling-factor', type=float, default=8.0, + help='Rope scaling factor in llama3.x models') + group.add_argument('--no-position-embedding', + action='store_false', + help='Disable position embedding. Deprecated: use --position-embedding-type', + dest='add_position_embedding') + group.add_argument('--make-vocab-size-divisible-by', type=int, default=128, + help='Pad the vocab size to be divisible by this value.' + 'This is added for computational efficieny reasons.') + group.add_argument('--normalization', default='LayerNorm', + choices=['LayerNorm', 'RMSNorm'], + help='Which normalization technique to use.') + group.add_argument('--norm-epsilon', type=float, default=1e-5, + help='Epsilon for layer norm and RMS norm.') + group.add_argument('--apply-layernorm-1p', action='store_true', + help='Adjust LayerNorm weights such that they are centered ' + 'around zero. This improves numerical stability.') + group.add_argument('--apply-residual-connection-post-layernorm', + action='store_true', + help='If set, use original BERT residula connection ' + 'ordering.') + group.add_argument('--openai-gelu', action='store_true', + help='Use OpenAIs GeLU implementation. This option' + 'should not be used unless for backward compatibility' + 'reasons.') + group.add_argument('--squared-relu', action='store_true', + help='Use squared relu activation instead of default gelu') + group.add_argument('--swiglu', action='store_true', + help='Use gated linear units and SiLU activation instead of default gelu') + group.add_argument('--onnx-safe', type=bool, required=False, + help='Use workarounds for known problems with ' + 'Torch ONNX exporter') + group.add_argument('--bert-no-binary-head', action='store_false', + help='Disable BERT binary head.', + dest='bert_binary_head') + group.add_argument('--untie-embeddings-and-output-weights', action='store_true', + help='Untie embeddings and output weights.') + group.add_argument('--multi-latent-attention', action='store_true', + help='Use multi-latent attention for model.') + return parser + + +def _add_straggler_detector_args(parser): + group = parser.add_argument_group(title='straggler') + group.add_argument('--log-straggler', action='store_true', + help='If set, tracks and logs straggler per GPU.') + group.add_argument('--disable-straggler-on-startup', action='store_true', + help='If set, StragglerDetector is disabled on startup.') + group.add_argument('--straggler-ctrlr-port', type=int, default=65535, + help='Port number to toggle StragglerDetector on/off at runtime') + group.add_argument('--straggler-minmax-count', type=int, default=1, + help='Number of ranks to report with high/low estimated throughput') + return parser + + +def _add_one_logger_args(parser): + group = parser.add_argument_group(title='one logger') + group.add_argument('--no-one-logger', action='store_false', + help='If set, disable using one_logger to track E2E metrics' + 'Note that one_logger is an internal tool and not ' + 'available externally. For installation, please go to ' + 'https://confluence.nvidia.com/display/MLWFO/Package+Repositories' + 'for more details', + dest='enable_one_logger') + group.add_argument('--one-logger-project', type=str, default='megatron-lm', + help='The one-logger project name. Will ignore if ' + '--no-one-logger is set') + group.add_argument('--one-logger-run-name', type=str, default=None, + help='The one-logger run name displayed. Will ignore if ' + '--no-one-logger is set') + group.add_argument('--one-logger-async', action='store_true', + help='If set, forces one_logger to use async mode.') + group.add_argument('--app-tag-run-name', type=str, default=None, + help='Jobs belonging to same training run, suppose to ' + 'have the same name. It will be used to track progress of ' + 'a training done over multiple different jobs') + group.add_argument('--app-tag-run-version', type=str, default='0.0.0', + help='The version of the training of which current job is ' + 'part of. It will be used to track the changes in the ' + 'application side which might change the performance ' + 'baseline') + return parser + + +def _add_ft_package_args(parser): + group = parser.add_argument_group(title='ft_package') + group.add_argument('--enable-ft-package', action='store_true', + help='If set, Fault Tolerance package is enabled. ' + 'Note: This feature is for Nvidia internal use only.') + group.add_argument('--calc-ft-timeouts', action='store_true', + help='If set, FT package will try to automatically compute the timeouts. ' + 'Note: This feature is for Nvidia internal use only.') + return parser + + +def _add_config_logger_args(parser): + group = parser.add_argument_group(title='config logger') + group.add_argument('--config-logger-dir', type=str, default='', + help='If set, will dump all configs to --config-logger-dir', + dest='config_logger_dir') + return parser + + +def _add_logging_args(parser): + group = parser.add_argument_group(title='logging') + + group.add_argument('--log-params-norm', action='store_true', + help='If set, calculate and log parameters norm.') + group.add_argument('--log-num-zeros-in-grad', action='store_true', + help='If set, calculate and log the number of zeros in gradient.') + group.add_argument('--log-throughput', action='store_true', + help='If set, calculate and log throughput per GPU.') + group.add_argument('--log-progress', action='store_true', + help='If set, log progress (in terms of number of processed tokens and ' + 'number of floating-point operations) to progress.txt file in checkpoint ' + 'directory.') + group.add_argument('--timing-log-level', type=int, + default=0, choices=range(0,3), + help='Granularity level to measure and report timing. ' + ' 0: report only iteration time and make sure timing ' + ' does not introduce extra overhead.' + ' 1: report timing for operations that are executed ' + ' very limited times (basically once) during ' + ' each iteration (such as gradient all-reduce) ' + ' 2: report timing for operations that migh be ' + ' executed numerous times during each iteration. ' + 'Note that setting the level to 1 or 2 might ' + 'cause increase in iteration time.') + group.add_argument('--no-barrier-with-level-1-timing', action='store_false', + help='If not set, use barrier with level 1 time ' + 'measurements. Note that this is up to the user ' + 'to make sure calling barrier with their timers ' + 'will not result in hangs. This can happen if for ' + 'example the user adds a level 1 timer that is not ' + 'called by all ranks.', + dest='barrier_with_L1_time') + group.add_argument('--timing-log-option', type=str, default='minmax', + choices=['max', 'minmax', 'all'], + help='Options for logging timing:' + ' max: report the max timing across all ranks' + ' minmax: report min and max timings across all ranks' + ' all: report timings of all ranks.') + group.add_argument('--tensorboard-log-interval', type=int, default=1, + help='Report to tensorboard interval.') + group.add_argument('--tensorboard-queue-size', type=int, default=1000, + help='Size of the tensorboard queue for pending events ' + 'and summaries before one of the ‘add’ calls forces a ' + 'flush to disk.') + group.add_argument('--log-timers-to-tensorboard', action='store_true', + help='If set, write timers to tensorboard.') + group.add_argument('--no-log-loss-scale-to-tensorboard', + action='store_false', + help='Disable loss-scale logging to tensorboard.', + dest='log_loss_scale_to_tensorboard') + group.add_argument('--log-validation-ppl-to-tensorboard', + action='store_true', + help='If set, write validation perplexity to ' + 'tensorboard.') + group.add_argument('--log-memory-to-tensorboard', + action='store_true', + help='Enable memory logging to tensorboard.') + group.add_argument('--log-world-size-to-tensorboard', + action='store_true', + help='Enable world size logging to tensorboard.') + group.add_argument('--wandb-project', type=str, default='', + help='The wandb project name. Ignore wandb by default.') + group.add_argument('--wandb-exp-name', type=str, default='', + help='The wandb experiment name.') + group.add_argument('--wandb-save-dir', type=str, default='', + help='Path to save the wandb results locally.') + group.add_argument('--logging-level', type=int, default=None, + help='Set default logging level') + return parser + + +def _add_regularization_args(parser): + group = parser.add_argument_group(title='regularization') + + group.add_argument('--attention-dropout', type=float, default=0.1, + help='Post attention dropout probability.') + group.add_argument('--hidden-dropout', type=float, default=0.1, + help='Dropout probability for hidden state transformer.') + group.add_argument('--weight-decay', type=float, default=0.01, + help='Weight decay coefficient for L2 regularization.') + group.add_argument('--start-weight-decay', type=float, + help='Initial weight decay coefficient for L2 regularization.') + group.add_argument('--end-weight-decay', type=float, + help='End of run weight decay coefficient for L2 regularization.') + group.add_argument('--weight-decay-incr-style', type=str, default='constant', + choices=['constant', 'linear', 'cosine'], + help='Weight decay increment function.') + group.add_argument('--clip-grad', type=float, default=1.0, + help='Gradient clipping based on global L2 norm.') + group.add_argument('--adam-beta1', type=float, default=0.9, + help='First coefficient for computing running averages ' + 'of gradient and its square') + group.add_argument('--adam-beta2', type=float, default=0.999, + help='Second coefficient for computing running averages ' + 'of gradient and its square') + group.add_argument('--adam-eps', type=float, default=1e-08, + help='Term added to the denominator to improve' + 'numerical stability') + group.add_argument('--sgd-momentum', type=float, default=0.9, + help='Momentum factor for sgd') + return parser + + +def _add_training_args(parser): + group = parser.add_argument_group(title='training') + + group.add_argument('--micro-batch-size', type=int, default=None, + help='Batch size per model instance (local batch size). ' + 'Global batch size is local batch size times data ' + 'parallel size times number of micro batches.') + group.add_argument('--batch-size', type=int, default=None, + help='Old batch size parameter, do not use. ' + 'Use --micro-batch-size instead') + group.add_argument('--global-batch-size', type=int, default=None, + help='Training batch size. If set, it should be a ' + 'multiple of micro-batch-size times data-parallel-size. ' + 'If this value is None, then ' + 'use micro-batch-size * data-parallel-size as the ' + 'global batch size. This choice will result in 1 for ' + 'number of micro-batches.') + group.add_argument('--rampup-batch-size', nargs='*', default=None, + help='Batch size ramp up with the following values:' + ' --rampup-batch-size ' + ' ' + ' ' + 'For example:' + ' --rampup-batch-size 16 8 300000 \\ ' + ' --global-batch-size 1024' + 'will start with global batch size 16 and over ' + ' (1024 - 16) / 8 = 126 intervals will increase' + 'the batch size linearly to 1024. In each interval' + 'we will use approximately 300000 / 126 = 2380 samples.') + group.add_argument('--decrease-batch-size-if-needed', action='store_true', default=False, + help='If set, decrease batch size if microbatch_size * dp_size' + 'does not divide batch_size. Useful for KSO (Keep Soldiering On)' + 'to continue making progress if number of healthy GPUs (and' + 'corresponding dp_size) does not support current batch_size.' + 'Old batch_size will be restored if training is re-started with' + 'dp_size that divides batch_size // microbatch_size.') + group.add_argument('--recompute-activations', action='store_true', + help='recompute activation to allow for training ' + 'with larger models, sequences, and batch sizes.') + group.add_argument('--recompute-granularity', type=str, default=None, + choices=['full', 'selective'], + help='Checkpoint activations to allow for training ' + 'with larger models, sequences, and batch sizes. ' + 'It is supported at two granularities 1) full: ' + 'whole transformer layer is recomputed, ' + '2) selective: core attention part of the transformer ' + 'layer is recomputed.') + group.add_argument('--no-check-for-nan-in-loss-and-grad', action='store_false', + help='Check for NaNs in loss and grad', + dest='check_for_nan_in_loss_and_grad') + group.add_argument('--check-for-spiky-loss', action='store_true', + help='Check for spiky loss', + dest='check_for_spiky_loss') + group.add_argument('--check-for-large-grads', action='store_true', + help='Check for unexpectedly large grads', + dest='check_for_large_grads') + group.add_argument('--distribute-saved-activations', + action='store_true', + help='If set, distribute recomputed activations ' + 'across model parallel group.') + group.add_argument('--recompute-method', type=str, default=None, + choices=['uniform', 'block'], + help='1) uniform: uniformly divide the total number of ' + 'Transformer layers and recompute the input activation of ' + 'each divided chunk at specified granularity, ' + '2) recompute the input activations of only a set number of ' + 'individual Transformer layers per pipeline stage and do the ' + 'rest without any recomputing at specified granularity' + 'default) do not apply activations recompute to any layers') + group.add_argument('--recompute-num-layers', type=int, default=None, + help='1) uniform: the number of Transformer layers in each ' + 'uniformly divided recompute unit, ' + '2) block: the number of individual Transformer layers ' + 'to recompute within each pipeline stage.') + group.add_argument('--no-clone-scatter-output-in-embedding', action='store_false', + help='If not set, clone the output of the scatter in embedding layer to GC original tensor.', + dest='clone_scatter_output_in_embedding') + group.add_argument('--profile', action='store_true', + help='Enable nsys profiling. When using this option, nsys ' + 'options should be specified in commandline. An example ' + 'nsys commandline is `nsys profile -s none -t nvtx,cuda ' + '-o --force-overwrite true ' + '--capture-range=cudaProfilerApi ' + '--capture-range-end=stop`.') + group.add_argument('--profile-step-start', type=int, default=10, + help='Global step to start profiling.') + group.add_argument('--profile-step-end', type=int, default=12, + help='Global step to stop profiling.') + group.add_argument('--iterations-to-skip', nargs='+', type=int, default=[], + help='List of iterations to skip, empty by default.') + group.add_argument('--result-rejected-tracker-filename', type=str, default=None, + help='Optional name of file tracking `result_rejected` events.') + group.add_argument('--disable-gloo-process-groups', action='store_false', + dest='enable_gloo_process_groups', + help='Disables creation and usage of Gloo process groups.') + group.add_argument('--use-pytorch-profiler', action='store_true', + help='Use the built-in pytorch profiler. ' + 'Useful if you wish to view profiles in tensorboard.', + dest='use_pytorch_profiler') + group.add_argument('--profile-ranks', nargs='+', type=int, default=[0], + help='Global ranks to profile.') + group.add_argument('--profile-dir', type=str, default="./", + help='profile dir to save.') + group.add_argument('--record-memory-history', action="store_true", default=False, + help='Record memory history in last rank.') + group.add_argument('--memory-snapshot-path', type=str, default="snapshot.pickle", + help='Specifies where to dump the memory history pickle.') + group.add_argument('--tp-comm-overlap', action='store_true', help='Enables the ' + ' overlap of Tensor parallel communication and GEMM kernels.') + group.add_argument('--tp-comm-overlap-cfg', type=str, default=None, + help='Config file when tp_comm_overlap is enabled.') + group.add_argument('--disable-tp-comm-overlap-ag', action='store_false', + help=('Disables the All-Gather overlap with GEMM by ' + 'pipelining the GEMM and All-Gather.'), + dest='tp_comm_overlap_ag') + group.add_argument('--disable-tp-comm-overlap-rs', action='store_false', + help=('Disables the Reduce-Scatter overlap with GEMM by ' + 'pipelining the GEMM and Reduce-Scatter.'), + dest='tp_comm_overlap_rs') + group.add_argument('--tp-comm-overlap-rs-dgrad', action='store_true', + help = 'Enables the Reduce-Scatter overlap with dgrad GEMM.', + dest='tp_comm_overlap_rs_dgrad') + group.add_argument('--disable-tp-comm-bulk-dgrad', action='store_false', + help='Disables the All-Gather overlap with bprop activation gradient GEMM.', + dest='tp_comm_bulk_dgrad') + group.add_argument('--disable-tp-comm-bulk-wgrad', action='store_false', + help='Disables the Reduce-Scatter overlap with bprop weight gradient GEMM.', + dest='tp_comm_bulk_wgrad') + group.add_argument('--tp-comm-bootstrap-backend', default='nccl', type=str, + choices=['nccl', 'mpi', 'gloo'], + help='Set the bootstrapping backend of Tensor parallel communications.') + group.add_argument('--use-cpu-initialization', action='store_true', + default=None, + help='If set, initialize weights on the CPU. This eliminates init differences based on tensor parallelism.') + group.add_argument('--empty-unused-memory-level', default=0, type=int, + choices=[0, 1, 2], + help='Call torch.cuda.empty_cache() each iteration ' + '(training and eval), to reduce fragmentation.' + '0=off, 1=moderate, 2=aggressive.') + group.add_argument('--deterministic-mode', action='store_true', + help='Choose code that has deterministic execution. This usually ' + 'means slower execution, but is good for debugging and testing.') + group.add_argument('--check-weight-hash-across-dp-replicas-interval', type=int, default=None, + help='Interval to check weight hashes are same across DP replicas. If not specified, weight hashes not checked.') + group.add_argument('--calculate-per-token-loss', action='store_true', + help=('Scale cross entropy loss by the number of non-padded tokens in the ' + 'global batch, versus the default behavior of assuming all tokens are non-padded.')) + group.add_argument('--train-sync-interval', type=int, default=None, + help='Training CPU-GPU synchronization interval, to ensure that CPU is not running too far ahead of GPU.') + + # deprecated + group.add_argument('--checkpoint-activations', action='store_true', + help='Checkpoint activation to allow for training ' + 'with larger models, sequences, and batch sizes.') + group.add_argument('--train-iters', type=int, default=None, + help='Total number of iterations to train over all ' + 'training runs. Note that either train-iters or ' + 'train-samples should be provided.') + group.add_argument('--train-samples', type=int, default=None, + help='Total number of samples to train over all ' + 'training runs. Note that either train-iters or ' + 'train-samples should be provided.') + group.add_argument('--log-interval', type=int, default=100, + help='Report loss and timing interval.') + group.add_argument('--exit-interval', type=int, default=None, + help='Exit the program after the iteration is divisible ' + 'by this value.') + group.add_argument('--exit-duration-in-mins', type=int, default=None, + help='Exit the program after this many minutes.') + group.add_argument('--exit-signal-handler', action='store_true', + help='Dynamically save the checkpoint and shutdown the ' + 'training if SIGTERM is received') + group.add_argument('--tensorboard-dir', type=str, default=None, + help='Write TensorBoard logs to this directory.') + group.add_argument('--no-masked-softmax-fusion', + action='store_false', + help='Disable fusion of query_key_value scaling, ' + 'masking, and softmax.', + dest='masked_softmax_fusion') + group.add_argument('--no-bias-gelu-fusion', action='store_false', + help='Disable bias and gelu fusion.', + dest='bias_gelu_fusion') + group.add_argument('--no-bias-swiglu-fusion', action='store_false', + help='Disable bias and swiglu fusion, the fusion is ' + 'available only when using megatron-core.', + dest='bias_swiglu_fusion') + group.add_argument('--no-bias-dropout-fusion', action='store_false', + help='Disable bias and dropout fusion.', + dest='bias_dropout_fusion') + group.add_argument('--no-rope-fusion', action='store_false', + help='Disable rope fusion, the fusion is available ' + 'only when using megatron-core.', + dest='apply_rope_fusion') + group.add_argument('--cross-entropy-loss-fusion', action='store_true', + help='Enabled fusion of cross entropy loss calculation.', + dest='cross_entropy_loss_fusion') + group.add_argument('--use-flash-attn', action='store_true', + help='use FlashAttention implementation of attention. ' + 'https://arxiv.org/abs/2205.14135') + group.add_argument('--disable-bias-linear', action='store_false', + help='Disable bias in the linear layers', + dest='add_bias_linear') + group.add_argument('--add-qkv-bias', action='store_true', + help='Enable bias only in the QKV linear layers', + dest='add_qkv_bias') + group.add_argument('--optimizer', type=str, default='adam', + choices=['adam', 'sgd'], + help='Optimizer function') + group.add_argument('--optimizer-cpu-offload', action='store_true', + help='Offload optimizer state to CPU') + group.add_argument('--optimizer-offload-fraction', type=float, default=1.0, + help='Ratio of optimizer state to offload to CPU') + group.add_argument('--use-torch-optimizer-for-cpu-offload', action='store_true', + help="Use torch.optim.Optimizer instead of Megatron's optimizer in optimizer cpu offload mode.") + group.add_argument('--overlap-cpu-optimizer-d2h-h2d', action='store_true', default=False, + help='Overlap CPU optimizer step, gradients D2H and updated parameters H2D.') + group.add_argument('--no-pin-cpu-grads', action='store_false', dest='pin_cpu_grads', + help='Disable pinning of CPU memory for gradients.') + group.add_argument('--no-pin-cpu-params', action='store_false', dest='pin_cpu_params', + help='Disable pinning of CPU memory for parameters.') + group.add_argument('--dataloader-type', type=str, default=None, + choices=['single', 'cyclic', 'external'], + help='Single pass vs multiple pass data loader') + group.add_argument('--no-async-tensor-model-parallel-allreduce', + action='store_false', + help='DEPRECATED. This flag is ignored.', + dest='async_tensor_model_parallel_allreduce') + group.add_argument('--no-persist-layer-norm', action='store_true', + help='Disable using persistent fused layer norm kernel. ' + 'This kernel supports only a set of hidden sizes. Please ' + 'check persist_ln_hidden_sizes if your hidden ' + 'size is supported.') + group.add_argument('--sequence-parallel', action='store_true', + help='Enable sequence parallel optimization.') + group.add_argument('--no-gradient-accumulation-fusion', + action='store_false', + help='Disable fusing gradient accumulation to weight ' + 'gradient computation of linear layers', + dest='gradient_accumulation_fusion') + group.add_argument('--use-mcore-models', action='store_true', + dest='deprecated_use_mcore_models', + help='DEPRECATED. Use the implementation from megatron core.' + 'Now ignored and mcore models are the default, use ' + '--use-legacy-models to not use core models.') + group.add_argument('--use-legacy-models', action='store_true', + help='Use the legacy Megatron models, not Megatron-Core models.') + group.add_argument('--manual-gc', action='store_true', + help='Disable the threshold-based default garbage ' + 'collector and trigger the garbage collection manually. ' + 'Manual garbage collection helps to align the timing of ' + 'the collection across ranks which mitigates the impact ' + 'of CPU-associated jitters. When the manual gc is enabled, ' + 'garbage collection is performed only at the start and the ' + 'end of the validation routine by default.') + group.add_argument('--manual-gc-interval', type=int, default=0, + help='Training step interval to trigger manual garbage ' + 'collection. When the value is set to 0, garbage ' + 'collection is not triggered between training steps.') + group.add_argument('--no-manual-gc-eval', action='store_false', + help='When using manual garbage collection, disable ' + 'garbage collection at the start and the end of each ' + 'evaluation run.', dest='manual_gc_eval') + group.add_argument('--disable-tp-comm-split-ag', action='store_false', + help='Disables the All-Gather overlap with fprop GEMM.', + dest='tp_comm_split_ag') + group.add_argument('--disable-tp-comm-split-rs', action='store_false', + help='Disables the Reduce-Scatter overlap with fprop GEMM.', + dest='tp_comm_split_rs') + group.add_argument('--pipeline-model-parallel-comm-backend', type=str, default=None, + choices=['nccl', 'ucc'], + help='Select a communicator backend for pipeline parallel communication. ' + 'If None, the default backend will be used.') + + return parser + + +def _add_rerun_machine_args(parser): + group = parser.add_argument_group(title='rerun engine') + + group.add_argument('--error-injection-rate', type=int, default=0, + help='Rate at which to inject unexpected results, ' + 'e.g. 1000 means once every 1000 result validations') + group.add_argument('--error-injection-type', type=str, default='transient_error', + choices=['correct_result', 'transient_error', 'persistent_error'], + help='Type of error to inject. ') + group.add_argument('--rerun-mode', type=str, default='disabled', + choices=['disabled', 'validate_results', 'report_stats'], + help='Use re-run engine to validate results (default) ' + 'or to emit stats on variability of computations due to ' + 'non-deterministic algorithms.') + + return parser + + +def _add_initialization_args(parser): + group = parser.add_argument_group(title='initialization') + + group.add_argument('--seed', type=int, default=1234, + help='Random seed used for python, numpy, ' + 'pytorch, and cuda.') + group.add_argument('--data-parallel-random-init', action='store_true', + help='Enable random initialization of params ' + 'across data parallel ranks') + group.add_argument('--init-method-std', type=float, default=0.02, + help='Standard deviation of the zero mean normal ' + 'distribution used for weight initialization.') + group.add_argument('--init-method-xavier-uniform', action='store_true', + help='Enable Xavier uniform parameter initialization') + + return parser + + +def _add_learning_rate_args(parser): + group = parser.add_argument_group(title='learning rate') + + group.add_argument('--lr', type=float, default=None, + help='Initial learning rate. Depending on decay style ' + 'and initial warmup, the learning rate at each ' + 'iteration would be different.') + group.add_argument('--lr-decay-style', type=str, default='linear', + choices=['constant', 'linear', 'cosine', 'inverse-square-root', 'WSD'], + help='Learning rate decay function.') + group.add_argument('--lr-wsd-decay-style', type=str, default='exponential', + choices=['exponential', 'linear', 'cosine'], + help='Decay style for the annealing phase of WSD'), + group.add_argument('--lr-decay-iters', type=int, default=None, + help='number of iterations to decay learning rate over,' + ' If None defaults to `--train-iters`') + group.add_argument('--lr-decay-samples', type=int, default=None, + help='number of samples to decay learning rate over,' + ' If None defaults to `--train-samples`') + group.add_argument('--lr-wsd-decay-samples', type=int, default=None, + help='number of samples for the annealing phase in the wsd schedule') + group.add_argument('--lr-wsd-decay-iters', type=int, default=None, + help='number of iterations for the annealing phase in the wsd schedule') + group.add_argument('--lr-warmup-fraction', type=float, default=None, + help='fraction of lr-warmup-(iters/samples) to use ' + 'for warmup (as a float)') + group.add_argument('--lr-warmup-iters', type=int, default=0, + help='number of iterations to linearly warmup ' + 'learning rate over.') + group.add_argument('--lr-warmup-samples', type=int, default=0, + help='number of samples to linearly warmup ' + 'learning rate over.') + group.add_argument('--lr-warmup-init', type=float, default=0.0, + help='Initial value for learning rate warmup. The ' + 'scheduler starts warmup from this value.') + group.add_argument('--warmup', type=int, default=None, + help='Old lr warmup argument, do not use. Use one of the' + '--lr-warmup-* arguments above') + group.add_argument('--min-lr', type=float, default=0.0, + help='Minimum value for learning rate. The scheduler' + 'clip values below this threshold.') + group.add_argument('--override-opt_param-scheduler', action='store_true', + help='Reset the values of the scheduler (learning rate,' + 'warmup iterations, minimum learning rate, maximum ' + 'number of iterations, and decay style from input ' + 'arguments and ignore values from checkpoints. Note' + 'that all the above values will be reset.') + group.add_argument('--use-checkpoint-opt_param-scheduler', action='store_true', + help='Use checkpoint to set the values of the scheduler ' + '(learning rate, warmup iterations, minimum learning ' + 'rate, maximum number of iterations, and decay style ' + 'from checkpoint and ignore input arguments.') + group.add_argument('--decoupled-lr', type=float, default=None, + help='Separate learning rate for the input and output layer') + group.add_argument('--decoupled-min-lr', type=float, default=None, + help='Minimum value for learning rate for the input and output layer. The scheduler' + 'clip values below this threshold') + + return parser + + +def _add_checkpointing_args(parser): + group = parser.add_argument_group(title='checkpointing') + + group.add_argument('--save', type=str, default=None, + help='Output directory to save checkpoints to.') + group.add_argument('--save-interval', '--persistent-save-interval', type=int, default=None, + help='Number of iterations between persistent checkpoint saves.') + group.add_argument('--no-save-optim', action='store_true', default=None, + help='Do not save current optimizer.') + group.add_argument('--no-save-rng', action='store_true', default=None, + help='Do not save current rng state.') + group.add_argument('--load', type=str, default=None, + help='Directory containing a model checkpoint.') + group.add_argument('--no-load-optim', action='store_true', default=None, + help='Do not load optimizer when loading checkpoint.') + group.add_argument('--no-load-rng', action='store_true', default=None, + help='Do not load rng state when loading checkpoint.') + group.add_argument('--non-persistent-save-interval', type=int, default=None, + help='Number of iterations between non-persistent saves.') + group.add_argument('--non-persistent-ckpt-type', type=str, default=None, + choices=['global', 'local', 'in_memory', None], + help='Type of non-persistent model checkpoints. ' + '"global" - Saved as a standard checkpoint (e.g., on Lustre) with old checkpoints being removed. ' + '"local" - Each rank saves a portion of the checkpoint locally (e.g., on SSD/ramdisk). ' + 'None - No non-persistent checkpointing (default option).') + group.add_argument('--non-persistent-global-ckpt-dir', type=str, default=None, + help='Directory containing global non-persistent model checkpoints.') + group.add_argument('--non-persistent-local-ckpt-dir', type=str, default=None, + help='Directory containing local non-persistent model checkpoints.') + group.add_argument('--non-persistent-local-ckpt-algo', type=str, default='fully_parallel', + choices=['fully_parallel', 'atomic'], + help='Algorithm for local non-persistent checkpointing.') + group.add_argument('--finetune', action='store_true', + help='Load model for finetuning. Do not load optimizer ' + 'or rng state from checkpoint and set iteration to 0. ' + 'Assumed when loading a release checkpoint.') + group.add_argument('--pretrained-checkpoint', type=str, default=None, + help='Directory containing a pretrained model checkpoint for finetuning.') + group.add_argument('--ckpt-step', type=int, default=None, + help='Checkpoint step to load model from.') + group.add_argument('--no-initialization', action='store_false', + help='Do not perform initialization when building model, ' + 'can reduce startup time when definitely loading from a ' + 'checkpoint', + dest='perform_initialization') + group.add_argument('--use-checkpoint-args', action='store_true', + help='Override model-related command-line arguments with arguments from checkpoint') + group.add_argument('--use-mp-args-from-checkpoint-args', action='store_true', + help='Copy model parallelism command-line arguments from checkpoint') + group.add_argument('--no-use-tokenizer-model-from-checkpoint-args', action='store_false', + dest='use_tokenizer_model_from_checkpoint_args', + help='If set, do not use tokenizer model path from checkpoint') + group.add_argument('--exit-on-missing-checkpoint', action='store_true', + help="If '--load' is set, but checkpoint is not found " + "(e.g., path typo), then exit instead of random " + "initialization.") + group.add_argument('--use-dist-ckpt', action='store_true', + dest='use_dist_ckpt_deprecated', + help='Deprecated: see --ckpt-format.') + group.add_argument('--use-persistent-ckpt-worker', action='store_true', + help='Enables a persitent checkpoint worker for async save') + + group.add_argument('--auto-detect-ckpt-format', action='store_true', + help='Determine if the checkpoint format is in legacy or distributed format.' + ' If False, expects distributed checkpoint iff args.ckpt_format != "torch".' + ' Might slow down loading a bit (double rank0 ckpt load).') + group.add_argument('--dist-ckpt-format', + dest='dist_ckpt_format_deprecated', + help='Deprecated: see --ckpt-format.') + group.add_argument('--ckpt-format', default='torch_dist', + choices=['torch', 'torch_dist', 'zarr'], + help='Checkpoint format to use.') + group.add_argument('--ckpt-convert-format', default=None, + choices=['torch', 'torch_dist', 'zarr'], + help='Checkpoint format for conversion.') + group.add_argument('--ckpt-convert-save', default=None, + help='Save directory for converted checkpoint.') + group.add_argument('--ckpt-convert-update-legacy-dist-opt-format', action='store_true', + help='When loading a checkpoint, update the legacy format ' + 'for the distributed optimizer, which previously used a ' + 'merged param/grad buffer and a different bucket mapping. ' + 'The legacy format was deprecated on Feb 13, 2024.') + group.add_argument('--ckpt-fully-parallel-save', action='store_true', + dest='ckpt_fully_parallel_save_deprecated', + help='Deprecated: see --no-ckpt-fully-parallel-save.') + group.add_argument('--no-ckpt-fully-parallel-save', action='store_false', + dest='ckpt_fully_parallel_save', + help='Disable applying full save parallelization across DP for' + ' distributed checkpoints. Depending on ckpt format' + ' might decrease the number of files in the checkpoint.' + ' Makes DistributedOptimizer checkpoint non-reshardable.') + group.add_argument('--async-save', action='store_true', default=None, + help='Apply async checkpointing save. Currently works only with' + '`torch_dist` distributed checkpoint format.') + group.add_argument('--ckpt-fully-parallel-load', action='store_true', + help='Apply full load parallelization across DP for' + ' distributed checkpoints.') + group.add_argument('--ckpt-assume-constant-structure', action='store_true', + help='If the model and optimizer state dict structure is' + 'constant throughout a *single training job*, it allows for' + 'different checkpointing performance optimizations.') + group.add_argument('--dist-ckpt-strictness', type=str, default='assume_ok_unexpected', + choices=[e.value for e in StrictHandling], + help='Determine handling of key mismatch during checkpoint load.' + ' Check StrictHandling docs for flags meaning.' + ' NOTE: This flag controls only distributed checkpoint' + ' load from storage, not loading state dict into the model.') + return parser + + +def _add_mixed_precision_args(parser): + group = parser.add_argument_group(title='mixed precision') + + group.add_argument('--fp16', action='store_true', + help='Run model in fp16 mode.') + group.add_argument('--bf16', action='store_true', + help='Run model in bfloat16 mode.') + group.add_argument('--grad-reduce-in-bf16', action='store_true', + help='Reduce gradients in bfloat16.') + group.add_argument('--loss-scale', type=float, default=None, + help='Static loss scaling, positive power of 2 ' + 'values can improve fp16 convergence. If None, dynamic' + 'loss scaling is used.') + group.add_argument('--initial-loss-scale', type=float, default=2**32, + help='Initial loss-scale for dynamic loss scaling.') + group.add_argument('--min-loss-scale', type=float, default=1.0, + help='Minimum loss scale for dynamic loss scaling.') + group.add_argument('--loss-scale-window', type=float, default=1000, + help='Window over which to raise/lower dynamic scale.') + group.add_argument('--hysteresis', type=int, default=2, + help='hysteresis for dynamic loss scaling') + group.add_argument('--fp32-residual-connection', action='store_true', + help='Move residual connections to fp32.') + group.add_argument('--apply-query-key-layer-scaling', action='store_true', + help='Scale Q * K^T by 1 / layer-number. ' + 'Useful for fp16 training. Also sets `attention_softmax_in_fp32` to True.') + group.add_argument('--attention-softmax-in-fp32', action='store_true', + help='Run attention masking and softmax in fp32.') + group.add_argument('--accumulate-allreduce-grads-in-fp32', + action='store_true', + help='Gradient accumulation and all-reduce in fp32.') + group.add_argument('--fp16-lm-cross-entropy', action='store_true', + help='Move the cross entropy unreduced loss calculation' + 'for lm head to fp16.') + + return parser + + +def _add_distributed_args(parser): + group = parser.add_argument_group(title='distributed') + + group.add_argument('--tensor-model-parallel-size', type=int, default=1, + help='Degree of tensor model parallelism.') + group.add_argument('--encoder-tensor-model-parallel-size', type=int, default=0, + help='Degree of tensor model parallelism for the encoder.') + group.add_argument('--pipeline-model-parallel-size', type=int, default=1, + help='Degree of pipeline model parallelism.') + group.add_argument('--encoder-pipeline-model-parallel-size', type=int, default=0, + help=('Degree of pipeline model parallelism in the encoder. This is ' + 'independent of the amount of pipeline in the decoder.')) + group.add_argument('--pipeline-model-parallel-split-rank', + type=int, default=None, + help=('Rank where encoder and decoder should be split. ' + 'Deprecated; use --encoder-pipeline-model-parallel-size instead.')) + group.add_argument('--decoder-first-pipeline-num-layers', + type=int, default=None, + help=('The number of transformer layers on the first pipeline stage of the decoder. ' + 'Default None is even split of transformer layers across all pipeline stages')) + group.add_argument('--decoder-last-pipeline-num-layers', + type=int, default=None, + help=('The number of transformer layers on the last pipeline stage of the decoder. ' + 'Default None is even split of transformer layers across all pipeline stages')) + group.add_argument('--model-parallel-size', type=int, default=None, + help='Old model parallel argument, do not use. Use ' + '--tensor-model-parallel-size instead.') + group.add_argument('--num-layers-per-virtual-pipeline-stage', type=int, default=None, + help='Number of layers per virtual pipeline stage') + group.add_argument('--num-virtual-stages-per-pipeline-rank', type=int, default=None, + help='Number of virtual pipeline stages per pipeline parallelism rank') + group.add_argument('--microbatch-group-size-per-virtual-pipeline-stage', type=int, default=None, + help='Number of contiguous microbatches per virtual pipeline stage', + dest='microbatch_group_size_per_vp_stage') + group.add_argument('--no-overlap-p2p-communication', action='store_false', + help='overlap pipeline parallel communication with forward and backward chunks in 1F1B', + dest='overlap_p2p_comm') + group.add_argument('--overlap-p2p-communication-warmup-flush', action='store_true', + default=False, help='if set, overlap pipeline parallel communication in warmup and flush', + dest='overlap_p2p_comm_warmup_flush') + group.add_argument('--distributed-backend', default='nccl', + choices=['nccl', 'gloo'], + help='Which backend to use for distributed training.') + group.add_argument('--distributed-timeout-minutes', type=int, default=10, + help='Timeout minutes for torch.distributed.') + group.add_argument('--overlap-grad-reduce', action='store_true', + default=False, help='If set, overlap DDP grad reduce.') + group.add_argument('--defer-embedding-wgrad-compute', action='store_true', + default=False, help='If set, defers the vocabulary projection linear layer weight' + 'gradient compute to pipeline flush.', dest='defer_embedding_wgrad_compute') + group.add_argument('--wgrad-deferral-limit', type=int, default=0, help='Number of micro-batches for which' + 'weight gradient computation of vocabulary projection is deferred, defaults to 0 which' + 'means all the micro-batches are deferred. Invalid if `defer-embedding-wgrad-compute`' + 'is not set') + group.add_argument('--no-align-grad-reduce', action='store_false', + help='If not set, all PP stages will launch gradient reduces simultaneously. ' + 'Otherwise, each PP stage will independently launch as needed.', + dest='align_grad_reduce') + group.add_argument('--ddp-num-buckets', type=int, default=None, + help='Number of buckets for data-parallel communication') + group.add_argument('--ddp-bucket-size', type=int, default=None, + help='Bucket size for data-parallel communication') + group.add_argument('--ddp-pad-buckets-for-high-nccl-busbw', action='store_true', + default=False, help='If set, make sure the bucket size is divisible by a large power ' + 'of 2 (2^16) to ensure NCCL collectives have high bus bandwidth at large DP counts, ' + 'since NCCL message size (which for ring algorithms is bucket_size / dp_size) ' + 'apparently needs to be divisible by a power of 2 for high busbw.') + group.add_argument('--ddp-average-in-collective', action='store_true', + default=False, help='If set, average directly in data-parallel communication collective.') + group.add_argument('--overlap-param-gather', action='store_true', + default=False, help='If set, overlap param all-gather in distributed optimizer.') + group.add_argument('--overlap-param-gather-with-optimizer-step', action='store_true', + default=False, help='If set, overlap param all-gather of first bucket with optimizer step.') + group.add_argument('--no-align-param-gather', action='store_false', + help='If not set, all PP stages will launch param all-gathers simultaneously. ' + 'Otherwise, each PP stage will independently launch as needed.', + dest='align_param_gather') + group.add_argument('--no-scatter-gather-tensors-in-pipeline', action='store_false', + help='If not set, use scatter/gather to optimize communication of tensors in pipeline.', + dest='scatter_gather_tensors_in_pipeline') + group.add_argument('--use-ring-exchange-p2p', action='store_true', + default=False, help='If set, use custom-built ring exchange ' + 'for p2p communications. Note that this option will require ' + 'a custom built image that support ring-exchange p2p.') + group.add_argument('--local-rank', type=int, default=int(os.getenv('LOCAL_RANK', '0')), + help='local rank passed from distributed launcher.') + group.add_argument('--lazy-mpu-init', type=bool, required=False, + help='If set to True, initialize_megatron() ' + 'skips DDP initialization and returns function to ' + 'complete it instead. Also turns on ' + '--use-cpu-initialization flag. This is for ' + 'external DDP manager.' ) + group.add_argument('--account-for-embedding-in-pipeline-split', action='store_true', + default=False, help='If set, *input* embedding layer will be treated as a standard transformer' + 'layer in the context of partition and placement for pipeline parallelism.') + group.add_argument('--account-for-loss-in-pipeline-split', action='store_true', + default=False, help='If set, loss layer will be treated as a standard transformer' + 'layer in the context of partition and placement for pipeline parallelism.') + group.add_argument('--use-distributed-optimizer', action='store_true', + help='Use distributed optimizer.') + group.add_argument('--use-custom-fsdp', action='store_true', + help='Use the Megatron FSDP code path in DDP.') + group.add_argument('--init-model-with-meta-device', action='store_true') + group.add_argument('--data-parallel-sharding-strategy', type=str, default='no_shard', + choices=['no_shard', 'optim', 'optim_grads', 'optim_grads_params'], + help='Sharding strategy of data parallelism.') + group.add_argument('--no-gradient-reduce-div-fusion', action='store_false', dest='gradient_reduce_div_fusion', + help='If not set, fuse the division in gradient reduce.') + group.add_argument('--suggested-communication-unit-size', type=int, default=400_000_000, + help='When batch communication is needed across multiple buckets, ' + 'this environment variable guides the size of communication unit size.') + group.add_argument('--keep-fp8-transpose-cache-when-using-custom-fsdp', action='store_true', + help='If set, keep the fp8 transpose cache when using custom FSDP.') + group.add_argument('--num-distributed-optimizer-instances', type=int, default=1, + help='Number of Distributed Optimizer copies across Data Parallel domain.') + group.add_argument('--use-torch-fsdp2', action='store_true', + help="Use the torch FSDP2 implementation. FSDP2 is not currently working with Pipeline Parallel." + "It is still not in a stable release stage, and may therefore contain bugs or other potential issues.") + group.add_argument('--context-parallel-size', type=int, default=1, + help='Degree of context parallelism.') + group.add_argument('--cp-comm-type', nargs='+', type=str, default=["p2p"], + help='Inter-gpu communication type for context parallelism: ' + 'p2p, a2a, allgather or a2a+p2p. If a single string is provided, ' + 'all layers will share the same communication type. Users can also ' + 'specify separated types for each layer like ' + '--cp-comm-type p2p p2p a2a a2a a2a+p2p a2a+p2p') + group.add_argument('--hierarchical-context-parallel-sizes', nargs='+', type=int, default=None, + help='Degrees of the hierarchical context parallelism. Users should ' + 'provide a list to specify the sizes for different levels. ' + '--hierarchical-context-parallel-sizes 2 4 indicates every two adjacent gpus ' + 'forms the first level of cp groups and the cp ranks with the same odevity ' + 'forms the second level of cp groups.') + group.add_argument('--nccl-communicator-config-path', type=str, default=None, + help='Path to the yaml file with NCCL communicator ' + 'configurations. The number of min/max thread groups and thread ' + 'group cluster size of each communicator can be configured by ' + 'setting `min_ctas`, `max_ctas`, and `cga_cluster_size`.') + group.add_argument('--use-tp-pp-dp-mapping', action='store_true', default=False, + help='If set, distributed ranks initialize order is changed ' + 'from tp-cp-ep-dp-pp to tp-cp-ep-pp-dp.') + group.add_argument('--replication', action='store_true', default=False, + help="If set, replication of local checkpoints is enabled. " + "Needs to be enabled on all ranks.") + group.add_argument('--replication-jump', default=None, type=int, + help="Specifies `J`, the spacing between ranks storing replicas of a given rank's data. " + "Replicas for rank `n` may be on ranks `n+J`, `n+2J`, ..., or `n-J`, `n-2J`, etc. " + "This flag has an effect only if --replication is used. " + "and must be consistent across all ranks.") + group.add_argument('--replication-factor', default=2, type=int, + help="Number of machines storing the replica of a given rank's data.") + group.add_argument('--rank', default=-1, type=int, + help='node rank for distributed training') + group.add_argument('--world-size', type=int, default=8, + help='number of nodes for distributed training') + group.add_argument('--dist-url', + help='Which master node url for distributed training.') + return parser + + +def _add_validation_args(parser): + group = parser.add_argument_group(title='validation') + + group.add_argument('--eval-iters', type=int, default=100, + help='Number of iterations to run for evaluation' + 'validation/test for.') + group.add_argument('--eval-interval', type=int, default=1000, + help='Interval between running evaluation on ' + 'validation set.') + group.add_argument("--test-mode", action="store_true", help='Run all real-time test alongside the experiment.') + group.add_argument('--skip-train', action='store_true', + default=False, help='If set, bypass the training loop, ' + 'optionally do evaluation for validation/test, and exit.') + + return parser + + +def _add_tokenizer_args(parser): + group = parser.add_argument_group(title='tokenizer') + group.add_argument('--vocab-size', type=int, default=None, + help='Size of vocab before EOD or padding.') + group.add_argument('--vocab-file', type=str, default=None, + help='Path to the vocab file.') + group.add_argument('--merge-file', type=str, default=None, + help='Path to the BPE merge file.') + group.add_argument('--vocab-extra-ids', type=int, default=0, + help='Number of additional vocabulary tokens. ' + 'They are used for span masking in the T5 model') + group.add_argument('--tokenizer-type', type=str, + default=None, + choices=['BertWordPieceLowerCase', + 'BertWordPieceCase', + 'GPT2BPETokenizer', + 'SentencePieceTokenizer', + 'GPTSentencePieceTokenizer', + 'HuggingFaceTokenizer', + 'Llama2Tokenizer', + 'TikTokenizer', + 'MultimodalTokenizer', + 'NullTokenizer'], + help='What type of tokenizer to use.') + group.add_argument('--tokenizer-model', type=str, default=None, + help='Sentencepiece tokenizer model.') + group.add_argument('--tiktoken-pattern', type=str, default=None, + help='Which tiktoken pattern to use. Options: [v1, v2]') + group.add_argument('--tiktoken-num-special-tokens', type=int, default=1000, + help='Number of special tokens in tiktoken tokenizer') + group.add_argument('--tiktoken-special-tokens', type=str, nargs='+', default=None, + help='List of tiktoken special tokens, needs to have ["", "", ""]') + return parser + + +def _add_data_args(parser): + group = parser.add_argument_group(title='data and dataloader') + + group.add_argument('--data-path', nargs='*', default=None, + help='The weight and prefix list for a set of train, validation, and test' + 'datasets which split according to --split. The accepted formats are: ' + '(1) a single prefix, ' + '(2) a list of weight prefix pairs e.g. weight1 prefix1 weight2 prefix2, ' + '(3) a list of prefixes e.g. prefix1 prefix2. ' + 'For (3), weights are inferred from the lengths of the contributing datasets. ' + 'This argument is exclusive to the other independent --*-data-path arguments.') + group.add_argument('--split', type=str, default=None, + help='Comma-separated list of proportions for training,' + ' validation, and test split. For example the split ' + '`90,5,5` will use 90%% of data for training, 5%% for ' + 'validation and 5%% for test.') + group.add_argument('--train-data-path', nargs='*', default=None, + help='The weight and prefix list for an independent train dataset. ' + 'Follows the same pattern rules as --data-path.') + group.add_argument('--valid-data-path', nargs='*', default=None, + help='The weight and prefix list for an independent validation dataset. ' + 'Follows the same pattern rules as --data-path.') + group.add_argument('--test-data-path', nargs='*', default=None, + help='The weight and prefix list for an independent test dataset. ' + 'Follows the same pattern rules as --data-path.') + group.add_argument('--data-args-path', type=str, default=None, + help='Path to data-args. Instead of feeding `--data-path` ' + 'with weighted dataset, we pass in a file path from which ' + 'we read that argument. This is useful when the list of data is ' + 'too big.') + group.add_argument('--per-split-data-args-path', type=str, default=None, + help='Path to per-split-data-args. Instead of feeding ' + '`--(train|valid|test)-data-path` with weighted dataset, ' + 'we pass in a file path from which we read those arguments. ' + 'This is useful when the list of data is too big. Format is a ' + 'json file with `train`, `valid, `test` keys') + group.add_argument('--data-cache-path', default=None, + help='Path to a directory to hold cached index files.') + group.add_argument('--no-mmap-bin-files', action='store_false', + help='Disable mmap-ing of .bin files.', + dest='mmap_bin_files') + group.add_argument('--mock-data', action='store_true', + help='Skip data loading and validation and opt for artificial ' + 'generation of mock data when an implementation is available.') + group.add_argument('--seq-length', type=int, default=None, + help='Maximum sequence length to process.') + group.add_argument('--encoder-seq-length', type=int, default=None, + help='Maximum encoder sequence length to process.' + 'This should be exclusive of --seq-length') + group.add_argument('--decoder-seq-length', type=int, default=None, + help="Maximum decoder sequence length to process.") + group.add_argument('--retriever-seq-length', type=int, default=256, + help='Maximum sequence length for the biencoder model ' + 'for retriever') + group.add_argument('--sample-rate', type=float, default=1.0, + help='sample rate for training data. Supposed to be 0 ' + ' < sample_rate < 1') + group.add_argument('--mask-prob', type=float, default=0.15, + help='Probability of replacing a token with mask.') + group.add_argument('--short-seq-prob', type=float, default=0.1, + help='Probability of producing a short sequence.') + group.add_argument('--num-workers', type=int, default=2, + help="Dataloader number of workers.") + group.add_argument('--reset-position-ids', action='store_true', + help='Reset posistion ids after end-of-document token.') + group.add_argument('--reset-attention-mask', action='store_true', + help='Reset self attention maske after ' + 'end-of-document token.') + group.add_argument('--eod-mask-loss', action='store_true', + help='Mask loss for the end of document tokens.') + group.add_argument('--no-create-attention-mask-in-dataloader', action='store_false', + help='If set, do not create attention_masks in dataloader.', + dest='create_attention_mask_in_dataloader') + group.add_argument('--num-dataset-builder-threads', type=int, default=1, + help='Number of parallel threads per rank for dataset builder') + group.add_argument('--s3-cache-path', type=str, default=None, + help='Path to cache index files when using s3 dataloader') + return parser + + +def _add_autoresume_args(parser): + group = parser.add_argument_group(title='autoresume') + + group.add_argument('--adlr-autoresume', action='store_true', + help='Enable autoresume on adlr cluster.') + group.add_argument('--adlr-autoresume-interval', type=int, default=1000, + help='Intervals over which check for autoresume' + 'termination signal') + + return parser + + +def _add_biencoder_args(parser): + group = parser.add_argument_group(title='biencoder') + + # network size + group.add_argument('--ict-head-size', type=int, default=None, + help='Size of block embeddings to be used in ICT and ' + 'REALM (paper default: 128)') + group.add_argument('--biencoder-projection-dim', type=int, default=0, + help='Size of projection head used in biencoder (paper' + ' default: 128)') + group.add_argument('--biencoder-shared-query-context-model', action='store_true', + help='Whether to share the parameters of the query ' + 'and context models or not') + + # checkpointing + group.add_argument('--ict-load', type=str, default=None, + help='Directory containing an ICTBertModel checkpoint') + group.add_argument('--bert-load', type=str, default=None, + help='Directory containing an BertModel checkpoint ' + '(needed to start ICT and REALM)') + + # data + group.add_argument('--titles-data-path', type=str, default=None, + help='Path to titles dataset used for ICT') + group.add_argument('--query-in-block-prob', type=float, default=0.1, + help='Probability of keeping query in block for ' + 'ICT dataset') + group.add_argument('--use-one-sent-docs', action='store_true', + help='Whether to use one sentence documents in ICT') + group.add_argument('--evidence-data-path', type=str, default=None, + help='Path to Wikipedia Evidence frm DPR paper') + + # training + group.add_argument('--retriever-report-topk-accuracies', nargs='+', type=int, + default=[], help="Which top-k accuracies to report " + "(e.g. '1 5 20')") + group.add_argument('--retriever-score-scaling', action='store_true', + help='Whether to scale retriever scores by inverse ' + 'square root of hidden size') + + # faiss index + group.add_argument('--block-data-path', type=str, default=None, + help='Where to save/load BlockData to/from') + group.add_argument('--embedding-path', type=str, default=None, + help='Where to save/load Open-Retrieval Embedding' + ' data to/from') + + # indexer + group.add_argument('--indexer-batch-size', type=int, default=128, + help='How large of batches to use when doing indexing ' + 'jobs') + group.add_argument('--indexer-log-interval', type=int, default=1000, + help='After how many batches should the indexer ' + 'report progress') + return parser + + +def _add_vision_args(parser): + group = parser.add_argument_group(title="vision") + + # general vision arguements + group.add_argument('--num-classes', type=int, default=1000, + help='num of classes in vision classificaiton task') + group.add_argument('--img-h', type=int, default=224, + help='Image height for vision classification task') + group.add_argument('--img-w', type=int, default=224, + help='Image height for vision classification task') + group.add_argument('--num-channels', type=int, default=3, + help='Number of channels in input image data') + group.add_argument('--patch-dim', type=int, default=16, + help='patch dimension') + group.add_argument('--classes-fraction', type=float, default=1.0, + help='training with fraction of classes.') + group.add_argument('--data-per-class-fraction', type=float, default=1.0, + help='training with fraction of data per class.') + group.add_argument('--no-data-sharding', action='store_false', + help='Disable data sharding.', + dest='data_sharding') + group.add_argument('--head-lr-mult', type=float, default=1.0, + help='learning rate multiplier for head during finetuning') + + # pretraining type and backbone selection` + group.add_argument('--vision-pretraining', action='store_true', + help='flag to indicate vision pretraining') + group.add_argument('--vision-pretraining-type', type=str, default='classify', + choices=['classify', 'inpaint', 'dino'], + help='pretraining objectives') + group.add_argument('--vision-backbone-type', type=str, default='vit', + choices=['vit', 'mit', 'swin'], + help='backbone types types') + group.add_argument('--swin-backbone-type', type=str, default='tiny', + choices=['tiny', 'base', 'h3'], + help='pretraining objectives') + # inpainting arguments + group.add_argument('--mask-type', type=str, default='random', + choices=['random', 'row'], + help='mask types') + group.add_argument('--mask-factor', type=float, default=1.0, + help='mask size scaling parameter') + + # dino arguments + group.add_argument('--iter-per-epoch', type=int, default=1250, + help='iterations per epoch') + group.add_argument('--dino-local-img-size', type=int, default=96, + help='Image size for vision classification task') + group.add_argument('--dino-local-crops-number', type=int, default=10, + help='Number of local crops') + group.add_argument('--dino-head-hidden-size', type=int, default=2048, + help='Hidden dimension size in dino head') + group.add_argument('--dino-bottleneck-size', type=int, default=256, + help='Bottle neck dimension in dino head ') + group.add_argument('--dino-freeze-last-layer', type=float, default=1, + help='Freezing last layer weights') + group.add_argument('--dino-norm-last-layer', action='store_true', + help='Disable Norm in last layer.') + group.add_argument('--dino-warmup-teacher-temp', type=float, default=0.04, + help='warump teacher temperature') + group.add_argument('--dino-teacher-temp', type=float, default=0.07, + help='teacher temperature') + group.add_argument('--dino-warmup-teacher-temp-epochs', type=int, default=30, + help='warmup teacher temperaure epochs') + + # regularization arguments + group.add_argument('--qk-layernorm', action='store_true', + help='Whether to layer normalize the q and k attention embeddings.') + + return parser + +def _add_moe_args(parser): + group = parser.add_argument_group(title="moe") + # General arguments + group.add_argument('--expert-model-parallel-size', type=int, default=1, + help='Degree of expert model parallelism.') + group.add_argument('--expert-tensor-parallel-size', type=int, default=None, + help='Degree of expert model parallelism. Default is None, which will be set to the value of --tensor-model-paralle-size.') + group.add_argument('--num-experts', type=int, default=None, + help='Number of Experts in MoE (None means no MoE)') + group.add_argument('--moe-layer-freq', type=moe_freq_type, default=1, + help='Frequency between MoE layers and Dense layers. Accepts either: ' + '- An integer N: Represents a 1:N ratio, meaning one expert layer for every N-1 dense layers ' + '- A string containing a Python list expression that defines a custom pattern, e.g.: ' + '"([1]*3+[0]*1)*3" evaluates to [1,1,1,0,1,1,1,0,1,1,1,0] ' + 'where 1 indicates an expert layer and 0 indicates a dense layer. ' + 'Examples: "([0]+[1]*23)": 1 dense layer followed by 23 experts layers, ' + '"([1]*3+[0]*2)*2": Three expert layers followed by two dense layers, repeated twice.') + group.add_argument('--moe-ffn-hidden-size', type=int, default=None, + help='The hidden size of each expert\'s feed-forward network (ffn). ' + 'If not specified, defaults to the ffn_hidden_size.') + group.add_argument('--moe-shared-expert-intermediate-size', type=int, default=None, + help='Shared expert total ffn hidden size. ' + 'It should be equal to "num_shared_experts * ffn_size_of_each_shared_expert" if there are multiple shared experts. ' + 'None means no shared expert.') + group.add_argument('--moe-shared-expert-overlap', action='store_true', + help='Enable overlapping between shared expert computations and dispatcher communications. ' + 'Without this, the shared epxerts execute after the routed experts. ' + 'Only effective when moe-shared-expert-intermediate-size is set.') + group.add_argument('--moe-grouped-gemm', action='store_true', + help='When there are multiple experts per rank, launch multiple local GEMM kernels in multiple streams to improve the utilization and performance with GroupedLinear in TransformerEngine.') + # Router arguments + group.add_argument('--moe-router-load-balancing-type', type=str, + choices=['aux_loss', 'seq_aux_loss', 'sinkhorn', 'none'], + default='aux_loss', + help='Determines the load balancing strategy for the router. "aux_loss" corresponds to the load balancing loss used in GShard and SwitchTransformer; "seq_aux_loss" corresponds to the load balancing loss used in DeepSeekV2, which computes the loss for each individual sample; "sinkhorn" corresponds to the balancing algorithm used in S-BASE, and "none" implies no load balancing. The default is "aux_loss".') + group.add_argument('--moe-router-score-function', type=str, + choices=['softmax', 'sigmoid'], + default='softmax', + help='Score function for MoE TopK routing. Can be "softmax" or "sigmoid".') + group.add_argument('--moe-router-topk', type=int, default=2, + help='Number of experts to route to for each token. The default is 2.') + group.add_argument('--moe-router-pre-softmax', action='store_true', + help='Enable pre-softmax routing for MoE, which means softmax is before the top-k selection. By default, softmax is done after top-k.') + group.add_argument('--moe-router-num-groups', type=int, default=None, + help='Number of groups to divide experts into for group-limited routing. When using group-limited routing: 1) Experts are divided into equal-sized groups, 2) For each token, a subset of groups are selected based on routing scores (sum of top-2 expert scores within each group), 3) From these selected groups, moe_router_topk experts are chosen.' + 'Two common use cases: 1) Device-limited routing: Set equal to expert parallel size (EP) to limit each token to experts on a subset of devices (See DeepSeek-V2: https://arxiv.org/pdf/2405.04434) 2) Node-limited routing: Set equal to number of nodes in EP group to limit each token to experts on a subset of nodes (See DeepSeek-V3: https://arxiv.org/pdf/2412.19437)') + group.add_argument('--moe-router-group-topk', type=int, default=None, + help='Number of selected groups for group-limited routing.') + group.add_argument('--moe-router-topk-scaling-factor', type=float, default=None, + help='Scaling factor for routing score in top-k selection, only works when --moe-router-pre-softmax enabled. Defaults to None, which means no scaling.') + group.add_argument('--moe-router-enable-expert-bias', action='store_true', + help='TopK routing with dynamic expert bias in the aux-loss-free load balancing strategy. ' + 'The routing decision is based on the sum of the routing scores and the expert bias. ' + 'See https://arxiv.org/abs/2408.15664 for details.') + group.add_argument('--moe-router-bias-update-rate', type=float, default=1e-3, + help='Expert bias update rate in the aux-loss-free load balancing strategy. ' + 'The expert bias is updated based on the number of assigned tokens to each expert in a global batch, ' + 'where the bias is increased for the experts with less assigned tokens and decreased for the experts with more assigned tokens. ' + 'The default value 1e-3 is same as that used in DeepSeekV3.') + group.add_argument('--moe-use-legacy-grouped-gemm', action='store_true', + help='Use legacy GroupedMLP rather than TEGroupedMLP. Note: The legacy one will be deprecated soon.') + group.add_argument('--moe-aux-loss-coeff', type=float, default=0.0, + help='Scaling coefficient for the aux loss: a starting value of 1e-2 is recommended.') + group.add_argument('--moe-z-loss-coeff', type=float, default=None, + help='Scaling coefficient for the z-loss: a starting value of 1e-3 is recommended.') + group.add_argument('--moe-input-jitter-eps', type=float, default=None, + help='Add noise to the input tensor by applying jitter with a specified epsilon value.') + group.add_argument('--moe-token-dispatcher-type', type=str, + choices=['allgather', 'alltoall', 'flex', 'alltoall_seq'], + default='allgather', + help="The type of token dispatcher to use. The default is 'allgather'. Options are 'allgather', 'alltoall' and 'alltoall_seq'. We recommend using 'alltoall' when applying expert parallelism. For more information, please refer to the documentation in core/moe/README.") + group.add_argument('--moe-enable-deepep', action='store_true', + help='[Experimental] Enable DeepSeek/DeepEP for efficient token dispatching and combine in MoE models. Only works with flex token dispatcher by setting --moe-token-dispatcher-type=flex.') + group.add_argument('--moe-per-layer-logging', action='store_true', + help='Enable per-layer logging for MoE, currently supports auxiliary loss and z loss.') + # Token dropping arguments + group.add_argument('--moe-expert-capacity-factor', type=float, default=None, + help='The capacity factor for each expert, None means no token will be dropped.') + group.add_argument('--moe-pad-expert-input-to-capacity', action='store_true', + help='Pads the input for each expert to match the expert capacity length, effective only after the --moe-expert-capacity-factor is set.') + group.add_argument('--moe-token-drop-policy', type=str, default='probs', choices=['probs', 'position'], + help='The policy to drop tokens. Can be either "probs" or "position". If "probs", the tokens with the lowest probabilities will be dropped. If "position", tokens at the end of each batch will be dropped.') + group.add_argument('--moe-layer-recompute', action='store_true', + help='Enable checkpointing for moe_layer, should be used when memory is not sufficient.') + group.add_argument('--moe-extended-tp', action='store_true', + help='Deprecated. Use --expert-tensor-parallel-size instead.') + group.add_argument('--moe-use-upcycling', action='store_true', + help='Load a checkpoint of a dense model, convert it into an MoE model, and save the converted model to the path specified by --save. ' + 'Upcycling is implemented on the top of distributed checkpointing, so it supports parallel modes different from the dense model.') + group.add_argument('--moe-permute-fusion', action='store_true', + help='Fuse token rearrangement ops during token dispatching.') + + return parser + +def _add_mla_args(parser): + group = parser.add_argument_group(title="mla") + group.add_argument('--q-lora-rank', type=int, default=None, + help="Rank of Query tensor's low rank representation.") + group.add_argument('--kv-lora-rank', type=int, default=32, + help="Rank of Key and Value tensors' low rank representation.") + group.add_argument('--qk-head-dim', type=int, default=128, + help="Dimension of the head in the QK projection. q_head_dim = qk_head_dim + qk_pos_emb_head_dim") + group.add_argument('--qk-pos-emb-head-dim', type=int, default=64, + help="Dimension of the position embedding in the QK projection.") + group.add_argument('--v-head-dim', type=int, default=128, + help="Dimension of the head in the V projection.") + group.add_argument('--rotary-scaling-factor', type=float, default=1.0, + help="Rotary scaling factor for the rotary embeddings.") + group.add_argument('--mscale', type=float, default=1.0, + help="Mscale for YaRN RoPE in multi-latent attention.") + group.add_argument('--mscale-all-dim', type=float, default=1.0, + help="Mscale all dimensions for YaRN RoPE in multi-latent attention.") + + return parser + +def _add_experimental_args(parser): + group = parser.add_argument_group(title='experimental') + + group.add_argument('--spec', type=str, default=None, nargs='*', + help='Specify the pair ' + 'that returns a spec to customize a model, transformer ' + 'block, or transformer layer, depending on the use case.' + 'To use local spec specify local as the argument.' + 'For more details, see the model class, ' + '`transformer_block.py`, or `transformer_layer.py`') + group.add_argument('--hybrid-attention-ratio', type=float, default=0.0, + help='Ratio of attention layers to total layers, in the ' + 'range [0.0, 1.0].') + group.add_argument('--hybrid-mlp-ratio', type=float, default=0.0, + help='Ratio of mlp layers to total layers, in the ' + 'range [0.0, 1.0].') + group.add_argument('--hybrid-override-pattern', type=str, default=None, + help='Force a specific hybrid layer pattern. The value' + 'should be a string of characters chosen from' + 'core.ssm.mamba_hybrid_layer_allocation.Symbols.' + 'If a value greater than 0.0 is supplied to any of the ' + 'hybrid ratio arguments, then the number of each type' + 'of layer in the override pattern must match number in' + 'the overidden pattern') + group.add_argument('--yaml-cfg', type=str, default=None, + help = 'Config file to add additional arguments') + + # Args of precision-aware optimizer + group.add_argument('--use-precision-aware-optimizer', action='store_true', + help='Use the precision-aware optimizer in TransformerEngine, which allows ' + 'setting the main params and optimizer states to lower precision, such as ' + 'fp16 and fp8.') + group.add_argument('--main-grads-dtype', default='fp32', choices=['fp32', 'bf16'], + help='Dtype of main grads when enabling precision-aware-optimizer') + group.add_argument('--main-params-dtype', default='fp32', choices=['fp32', 'fp16'], + help='Dtype of main params when enabling precision-aware-optimizer') + group.add_argument('--exp-avg-dtype', default='fp32', choices=['fp32', 'fp16', 'fp8'], + help='Dtype of exp_avg when enabling precision-aware-optimizer') + group.add_argument('--exp-avg-sq-dtype', default='fp32', choices=['fp32', 'fp16', 'fp8'], + help='Dtype of exp_avg_sq when enabling precision-aware-optimizer') + return parser diff --git a/megatron/training/async_utils.py b/megatron/training/async_utils.py index 44530ad..96163ad 100644 --- a/megatron/training/async_utils.py +++ b/megatron/training/async_utils.py @@ -1,43 +1,65 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -""" -This module provides a singleton instance of AsyncCallsQueue which manages -the async checkpoint save calls. -""" -import logging - -from megatron.core.dist_checkpointing.strategies.async_utils import AsyncCallsQueue, AsyncRequest -from megatron.training import get_args -from megatron.training.utils import print_rank_0 - -logger = logging.getLogger(__name__) - -# Singleton manager of async calls -_async_calls_queue = AsyncCallsQueue() - - -def schedule_async_save(async_request: AsyncRequest): - """ Schedule the async save request. - - Args: - async_request (AsyncRequest): the async save request. - """ - _async_calls_queue.schedule_async_request(async_request) - - -def maybe_finalize_async_save(blocking: bool = False): - """ Finalizes active async save calls. - - Args: - blocking (bool, optional): if True, will wait until all active requests - are done. Otherwise, finalizes only the async request that already - finished. Defaults to False. - """ - args = get_args() - if not args.async_save: - return - - if blocking and _async_calls_queue.get_num_unfinalized_calls() > 0: - print_rank_0('Unfinalized async checkpoint saves. Finalizing them synchronously now.') - - _async_calls_queue.maybe_finalize_async_calls(blocking) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +""" +This module provides a singleton instance of AsyncCallsQueue which manages +the async checkpoint save calls. +""" +import logging + +from megatron.core.dist_checkpointing.strategies.async_utils import AsyncCallsQueue, AsyncRequest +from megatron.training import get_args +from megatron.training.utils import print_rank_0 + +logger = logging.getLogger(__name__) + +# Singleton manager of async calls +# The default is `TemporalAsyncCaller` +_async_calls_queue = AsyncCallsQueue() + + +def init_persistent_async_worker(): + global _async_calls_queue + # Recreate the async_calls_queue for persistent worker + # This duplicate step is for backward compatiblity + _async_calls_queue = AsyncCallsQueue(persistent=True) + + +def schedule_async_save(async_request: AsyncRequest): + """Schedule the async save request. + + Args: + async_request (AsyncRequest): the async save request. + """ + _async_calls_queue.schedule_async_request(async_request) + + +def maybe_finalize_async_save(blocking: bool = False, terminate=False): + """Finalizes active async save calls. + + Args: + blocking (bool, optional): if True, will wait until all active requests + are done. Otherwise, finalizes only the async request that already + finished. Defaults to False. + terminate (bool, optional): if True, the asynchronous queue will + be closed as the last action of this function. + """ + args = get_args() + if not args.async_save: + return + + if blocking and not is_empty_async_queue(): + print_rank_0('Unfinalized async checkpoint saves. Finalizing them synchronously now.') + + _async_calls_queue.maybe_finalize_async_calls(blocking, no_dist=False) + + if terminate: + _async_calls_queue.close() + + +def is_empty_async_queue() -> bool: + """Check if async calls queue is empty. This result is consistent across ranks. + + Returns: + bool: True if there is any ongoing async call. + """ + return _async_calls_queue.get_num_unfinalized_calls() == 0 diff --git a/megatron/training/checkpointing.py b/megatron/training/checkpointing.py index b51a6c7..a125ee7 100644 --- a/megatron/training/checkpointing.py +++ b/megatron/training/checkpointing.py @@ -1,1379 +1,1427 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -"""Input/output checkpointing.""" - -import contextlib -import os -import random -import shutil -import sys -import threading -from enum import Enum, auto -from logging import getLogger -from pathlib import Path - -import numpy as np -from time import time - -import torch - -from megatron.core import mpu, tensor_parallel, dist_checkpointing -from megatron.core.dist_checkpointing.mapping import ShardedObject -from megatron.core.dist_checkpointing.serialization import get_default_load_sharded_strategy -from megatron.core.dist_checkpointing.state_dict_transformation import ( - prepare_state_dict_for_save, - recreate_state_dict_after_load, -) -from megatron.core.dist_checkpointing.strategies.fully_parallel import \ - FullyParallelSaveStrategyWrapper, FullyParallelLoadStrategyWrapper -from megatron.core.num_microbatches_calculator import update_num_microbatches -from megatron.core.utils import is_float8tensor -from megatron.core.rerun_state_machine import get_rerun_state_machine -from .async_utils import schedule_async_save -from .global_vars import get_args, get_one_logger -from .utils import unwrap_model, print_rank_0, append_to_progress_log, is_last_rank -from ..core.dist_checkpointing.serialization import \ - get_default_save_sharded_strategy -from .one_logger_utils import on_save_checkpoint_start, on_save_checkpoint_success - -# [ModelOpt]: Import -try: - from modelopt.torch.opt.plugins import ( - save_modelopt_state, - save_sharded_modelopt_state, - restore_modelopt_state, - restore_sharded_modelopt_state, - ) - has_nvidia_modelopt = True -except Exception: - has_nvidia_modelopt = False - -_CHECKPOINT_VERSION = None - -logger = getLogger(__name__) -_NON_PERSISTENT_CKPT_SUBDIR = 'non_persistent' - -def set_checkpoint_version(value): - global _CHECKPOINT_VERSION - if _CHECKPOINT_VERSION is not None: - assert _CHECKPOINT_VERSION == value, \ - "checkpoint versions do not match" - _CHECKPOINT_VERSION = value - - -def get_checkpoint_version(): - global _CHECKPOINT_VERSION - return _CHECKPOINT_VERSION - - -def check_checkpoint_args(checkpoint_args): - """Ensure fixed arguments for a model are the same for the input - arguments and the one retrieved from checkpoint.""" - args = get_args() - - def _compare(arg_name, old_arg_name=None, default=None): - if old_arg_name is not None: - ckpt_arg_name = old_arg_name - else: - ckpt_arg_name = arg_name - if default is not None: - checkpoint_value = getattr(checkpoint_args, ckpt_arg_name, default) - else: - checkpoint_value = getattr(checkpoint_args, ckpt_arg_name) - args_value = getattr(args, arg_name) - error_message = '{} value from checkpoint ({}) is not equal to the ' \ - 'input argument value ({}).'.format( - arg_name, checkpoint_value, args_value) - assert checkpoint_value == args_value, error_message - - _compare('num_layers') - _compare('hidden_size') - _compare('num_attention_heads') - _compare('add_position_embedding', default=True) - if args.vocab_file: - _compare('max_position_embeddings') - _compare('make_vocab_size_divisible_by') - if not args.use_dist_ckpt: - _compare('padded_vocab_size') - _compare('tokenizer_type') - if args.data_parallel_random_init: - _compare('data_parallel_random_init') - if get_checkpoint_version() < 3.0: - _compare('tensor_model_parallel_size', - old_arg_name='model_parallel_size') - if get_checkpoint_version() >= 3.0 and not args.use_dist_ckpt: - _compare('tensor_model_parallel_size') - _compare('pipeline_model_parallel_size') - - -def ensure_directory_exists(filename, check_parent=True): - """Build filename's path if it does not already exists.""" - dirname = os.path.dirname(filename) if check_parent else filename - os.makedirs(dirname, exist_ok=True) - - -def get_checkpoint_name(checkpoints_path, iteration, release=False, - pipeline_parallel=None, - tensor_rank=None, pipeline_rank=None, - expert_parallel=None, expert_rank=None, - return_base_dir=False, basename="model_optim_rng.pt"): - """Determine the directory name for this rank's checkpoint.""" - if release: - directory = 'release' - else: - directory = 'iter_{:07d}'.format(iteration) - if return_base_dir: - common_path = os.path.join(checkpoints_path, directory) - return common_path - - # Use both the tensor and pipeline MP rank. - if pipeline_parallel is None: - pipeline_parallel = (mpu.get_pipeline_model_parallel_world_size() > 1) - if tensor_rank is None: - tensor_rank = mpu.get_tensor_model_parallel_rank() - if pipeline_rank is None: - pipeline_rank = mpu.get_pipeline_model_parallel_rank() - if expert_parallel is None: - expert_parallel = (mpu.get_expert_model_parallel_world_size() > 1) - if expert_rank is None: - expert_rank = mpu.get_expert_model_parallel_rank() - - # Use both the tensor and pipeline MP rank. If using the distributed - # optimizer, then the optimizer's path must additionally include the - # data parallel rank. - if not pipeline_parallel: - common_path = os.path.join(checkpoints_path, directory, - f'mp_rank_{tensor_rank:02d}') - else: - common_path = os.path.join(checkpoints_path, directory, - f'mp_rank_{tensor_rank:02d}_{pipeline_rank:03d}') - - if expert_parallel: - common_path = common_path + f'_{expert_rank:03d}' - - return os.path.join(common_path, basename) - - -def get_distributed_optimizer_checkpoint_name(model_checkpoint_name): - return os.path.join(os.path.dirname(model_checkpoint_name), - "distrib_optim.pt") - - -def find_checkpoint_rank_0(checkpoints_path, iteration, release=False): - """Finds the checkpoint for rank 0 without knowing if we are using - pipeline parallelism/expert parallelism or not. - - Since the checkpoint naming scheme changes if pipeline or expert - parallelism is present, we need to look for both naming schemes if - we don't know if the checkpoint has pipeline or expert parallelism. - """ - - # Look for checkpoint with no pipelining and no expert parallelism - filename = get_checkpoint_name(checkpoints_path, iteration, release, - pipeline_parallel=False, - tensor_rank=0, pipeline_rank=0, - expert_parallel=False, expert_rank=0) - if os.path.isfile(filename): - return filename - - # Look for checkpoint with no pipelining and expert parallelism - filename = get_checkpoint_name(checkpoints_path, iteration, release, - pipeline_parallel=False, - tensor_rank=0, pipeline_rank=0, - expert_parallel=True, expert_rank=0) - if os.path.isfile(filename): - return filename - - # Look for checkpoint with pipelining and no expert parallelism - filename = get_checkpoint_name(checkpoints_path, iteration, release, - pipeline_parallel=True, - tensor_rank=0, pipeline_rank=0, - expert_parallel=False, expert_rank=0) - if os.path.isfile(filename): - return filename - - # Look for checkpoint with pipelining and expert parallelism - filename = get_checkpoint_name(checkpoints_path, iteration, release, - pipeline_parallel=True, - tensor_rank=0, pipeline_rank=0, - expert_parallel=True, expert_rank=0) - if os.path.isfile(filename): - return filename - - # Look for a distributed checkpoint - filename = get_checkpoint_name(checkpoints_path, iteration, release, - pipeline_parallel=True, - return_base_dir=True) - if dist_checkpointing.check_is_distributed_checkpoint(filename): - return filename - - return None - - -def get_checkpoint_tracker_filename(checkpoints_path): - - """Tracker file rescords the latest chckpoint during - training to restart from.""" - return os.path.join(checkpoints_path, 'latest_checkpointed_iteration.txt') - - -def checkpoint_exists(checkpoints_path): - if checkpoints_path is None: - return False - load_step = 'latest_checkpointed_iteration.txt' - return os.path.exists(os.path.join(checkpoints_path, load_step)) - - -def read_metadata(tracker_filename): - # Read the tracker file and either set the iteration or - # mark it as a release checkpoint. - iteration = 0 - release = False - with open(tracker_filename, 'r') as f: - metastring = f.read().strip() - try: - iteration = int(metastring) - except ValueError: - release = metastring == 'release' - if not release: - print_rank_0('ERROR: Invalid metadata file {}. Exiting'.format( - tracker_filename)) - sys.exit() - assert iteration > 0 or release, 'error parsing metadata file {}'.format( - tracker_filename) - - # Get the max iteration retrieved across the ranks. - if torch.distributed.is_initialized(): - iters_cuda = torch.tensor([iteration], dtype=torch.long, device='cuda') - torch.distributed.all_reduce(iters_cuda, op=torch.distributed.ReduceOp.MAX) - max_iter = iters_cuda[0].item() - - # We should now have all the same iteration. - # If not, print a warning and chose the maximum - # iteration across all ranks. - if iteration != max_iter: - rank = torch.distributed.get_rank() - print('WARNING: on rank {} found iteration {} in the ' - 'metadata while max iteration across the ranks ' - 'is {}, replacing it with max iteration.'.format( - rank, iteration, max_iter), flush=True) - else: - # When loading a checkpoint outside of training (for example, - # when editing it), we might not have torch distributed - # initialized, in this case, just assume we have the latest - max_iter = iteration - return max_iter, release - - -def get_rng_state(use_dist_ckpt: bool = False): - """ collect rng state across data parallel ranks """ - args = get_args() - rng_state = { - 'random_rng_state': random.getstate(), - 'np_rng_state': np.random.get_state(), - 'torch_rng_state': torch.get_rng_state(), - 'cuda_rng_state': torch.cuda.get_rng_state(), - 'rng_tracker_states': tensor_parallel.get_cuda_rng_tracker().get_states()} - - rng_state_list = None - if torch.distributed.is_initialized() and \ - mpu.get_data_parallel_world_size() > 1 and \ - args.data_parallel_random_init: - rng_state_list = \ - [None for i in range(mpu.get_data_parallel_world_size())] - torch.distributed.all_gather_object( - rng_state_list, - rng_state, - group=mpu.get_data_parallel_group()) - else: - rng_state_list = [rng_state] - - if use_dist_ckpt: - pp_rank = mpu.get_pipeline_model_parallel_rank() - pp_size = mpu.get_pipeline_model_parallel_world_size() - tp_rank = mpu.get_tensor_model_parallel_rank() - tp_size = mpu.get_tensor_model_parallel_world_size() - rng_state_list = ShardedObject('rng_state', rng_state_list, (pp_size, tp_size), (pp_rank, tp_rank), - replica_id=mpu.get_data_parallel_rank(with_context_parallel=True)) - - return rng_state_list - -class CheckpointType(Enum): - LEGACY = auto() - LOCAL = auto() - GLOBAL = auto() - -def save_checkpoint(iteration, model, optimizer, opt_param_scheduler, num_floating_point_operations_so_far, - checkpointing_context=None, pipeline_rank=None, expert_rank=None, tensor_rank=None, pipeline_parallel=None, expert_parallel=None, non_persistent_ckpt=False, - train_data_iterator=None, ft_client=None, preprocess_common_state_dict_fn = None): - """Save a model, optimizer and optionally dataloader checkpoint. - - Checkpointing context is used to persist some checkpointing state - throughout a single job. Must be initialized externally (not used if None). - - If non_persistent_ckpt is True, - the checkpoint will be saved with special functionality for removing old checkpoints. - There are several types of non-persistent checkpoints: - "global" - Saved as a standard checkpoint (e.g., on Lustre) with old checkpoints being removed. - "local" - [TBD] Each rank saves a portion of the checkpoint locally (e.g., on SSD/ramdisk). - "in_memory" - [TBD] A special kind of local checkpoint that avoids serialization. - - Dataloader checkpoint is only saved if the dataloader supports it. Currently this applies only - to the Megatron Energon dataloader (multimodal) and not the built-in Megatron dataloader (text-only). - """ - start_ckpt = time() - args = get_args() - - # Prepare E2E metrics at start of save checkpoint - productive_metrics = on_save_checkpoint_start(args.async_save) - - # Only rank zero of the data parallel writes to the disk. - model = unwrap_model(model) - - # Handle non_persistent_ckpt flag. Besides overwriting `args.save` and - # `args.use_dist_ckpt`, non-persistent global ckpt requires no additional logic - ckpt_type = CheckpointType.GLOBAL if args.use_dist_ckpt else CheckpointType.LEGACY - save_dir = args.save - if non_persistent_ckpt: - if args.non_persistent_ckpt_type == 'global': - ckpt_type = CheckpointType.GLOBAL - save_dir = ( - args.non_persistent_global_ckpt_dir - if args.non_persistent_global_ckpt_dir - else os.path.join(save_dir, _NON_PERSISTENT_CKPT_SUBDIR) - ) - # TODO Can we ensure the previous checkpoint is saved? We don't want to allow two saves in parallel. - cleanup_old_non_persistent_checkpoint( - save_dir, leave_ckpt_num=1, do_async=args.async_save - ) - elif args.non_persistent_ckpt_type == 'local': - raise RuntimeError('LocalCheckpointManagers are not yet integrated') - ckpt_type = CheckpointType.LOCAL - save_dir = checkpointing_context['local_checkpoint_manager'].local_ckpt_dir - else: - assert False, 'Please use local or global non-persistent checkpoints' \ - f'(got: {args.non_persistent_ckpt_type})' - - ckpt_format = args.ckpt_format if ckpt_type == CheckpointType.GLOBAL else 'torch' - print_rank_0('saving checkpoint at iteration {:7d} to {} in {} format'.format( - iteration, save_dir, ckpt_format)) - - # Collect rng state across data parallel ranks. - rng_state = get_rng_state(ckpt_type != CheckpointType.LEGACY) - - # Collect rerun state across all ranks - rerun_state_machine = get_rerun_state_machine() - rerun_state = rerun_state_machine.state_dict( - data_iterator=train_data_iterator, use_dist_ckpt=ckpt_type != CheckpointType.LEGACY - ) - - # Checkpoint name. - return_base_dir = (ckpt_type != CheckpointType.LEGACY) - checkpoint_name = get_checkpoint_name(save_dir, iteration, release=False, pipeline_parallel=pipeline_parallel, - tensor_rank=tensor_rank, pipeline_rank=pipeline_rank, expert_parallel=expert_parallel, expert_rank=expert_rank, return_base_dir=return_base_dir) - - # Save dataloader state if the dataloader supports it (currently only Megatron Energon). - save_dataloader_state(train_data_iterator, iteration, getattr(args, "dataloader_save", None)) - - # Save distributed optimizer's custom parameter state. - if ( - args.use_distributed_optimizer - and not args.no_save_optim - and optimizer is not None - and ckpt_type == CheckpointType.LEGACY - ): - optim_checkpoint_name = \ - get_distributed_optimizer_checkpoint_name(checkpoint_name) - ensure_directory_exists(optim_checkpoint_name) - if not optimizer.is_stub_optimizer: - optimizer.save_parameter_state(optim_checkpoint_name) - - async_save_request = None - if args.async_save: - if ckpt_type == CheckpointType.LEGACY: - raise NotImplementedError('Async checkpoint save not implemented for legacy checkpoints') - elif ckpt_type == CheckpointType.GLOBAL and args.ckpt_format != 'torch_dist': - raise NotImplementedError(f'Async checkpoint save not implemented for {args.ckpt_format} distributed checkpoint format') - - rank = torch.distributed.get_rank() if torch.distributed.is_initialized() else 0 - - # Collect args, model, RNG. - if not torch.distributed.is_initialized() \ - or mpu.get_expert_data_parallel_rank() == 0 \ - or ckpt_type != CheckpointType.LEGACY: - optim_sd_kwargs = {} - if ckpt_type != CheckpointType.LEGACY and args.use_distributed_optimizer: - optim_sd_kwargs['sharding_type'] = ('fully_sharded_model_space' - if args.ckpt_fully_parallel_save - else 'dp_zero_gather_scatter') - print_rank_0(f'Storing distributed optimizer sharded state of type {optim_sd_kwargs["sharding_type"]}') - state_dict = generate_state_dict( - args, - model, - optimizer, - opt_param_scheduler, - rng_state, - use_dist_ckpt=ckpt_type != CheckpointType.LEGACY, - iteration=iteration, - optim_sd_kwargs=optim_sd_kwargs, - rerun_state=rerun_state, - ) - - if args.enable_ft_package and ft_client is not None: - state_dict["ft_state"] = ft_client.state_dict() - state_dict['num_floating_point_operations_so_far'] = num_floating_point_operations_so_far - if ckpt_type == CheckpointType.GLOBAL: - if not torch.distributed.is_initialized() or torch.distributed.get_rank() == 0: - # TODO Handle non-empty directories (e.g., after a crash during saving). - ensure_directory_exists(checkpoint_name, check_parent=False) - if checkpointing_context is not None and 'save_strategy' in checkpointing_context: - save_strategy = checkpointing_context['save_strategy'] - # Already saved once before - don't need to rerun sharding validation - validate_sharding_integrity = not args.ckpt_assume_constant_structure - else: - validate_sharding_integrity = True - save_strategy = get_default_save_sharded_strategy(args.ckpt_format) - if args.ckpt_assume_constant_structure and args.ckpt_format == 'torch_dist': - save_strategy.use_cached_ckpt_structure = args.ckpt_assume_constant_structure - if args.ckpt_fully_parallel_save: - save_strategy = FullyParallelSaveStrategyWrapper(save_strategy, mpu.get_data_parallel_group(with_context_parallel=True), - args.ckpt_assume_constant_structure) - # Store save strategy for future checkpoint saves - if checkpointing_context is not None: - checkpointing_context['save_strategy'] = save_strategy - end_ckpt = time() - logger.debug(f"rank: {rank}, takes {end_ckpt - start_ckpt} to prepare state dict for ckpt ") - async_save_request = dist_checkpointing.save(state_dict, checkpoint_name, save_strategy, - async_sharded_save=args.async_save, - validate_access_integrity=validate_sharding_integrity, - preprocess_common_before_consistancy_check=preprocess_common_state_dict_fn) - # [ModelOpt]: save sharded modelopt_state - if has_nvidia_modelopt: - save_sharded_modelopt_state(model, checkpoint_name, (args.ckpt_format, 1)) - else: - # [ModelOpt]: Inject modelopt_state into state_dict - if has_nvidia_modelopt: - save_modelopt_state(model, state_dict) - - if ckpt_type == CheckpointType.LOCAL: - state_dict_for_save = prepare_state_dict_for_save( - state_dict, algo=args.non_persistent_local_ckpt_algo - ) - async_save_request = checkpointing_context['local_checkpoint_manager'].save( - state_dict_for_save, iteration, is_async=bool(args.async_save) - ) - else: - assert ckpt_type == CheckpointType.LEGACY - # Save. - ensure_directory_exists(checkpoint_name) - torch.save(state_dict, checkpoint_name) - start_misc = time() - if not args.async_save: - assert async_save_request is None - # Wait so everyone is done (necessary) - if torch.distributed.is_initialized(): - torch.distributed.barrier() - - # And update the latest iteration - if not torch.distributed.is_initialized() \ - or torch.distributed.get_rank() == 0: - tracker_filename = get_checkpoint_tracker_filename(save_dir) - - if ckpt_type == CheckpointType.LOCAL: - def iter_finalize_fn(): - print_rank_0(' successfully saved local checkpoint from iteration {:7d}' - .format(iteration)) - if args.log_progress and args.async_save: - append_to_progress_log(f'Saved async local checkpoint\tIteration: {iteration}', - barrier=False) - else: - def iter_finalize_fn(): - with open(tracker_filename, 'w') as f: - f.write(str(iteration)) - print_rank_0(f' successfully saved checkpoint from iteration {int(iteration):7d} to {args.save} ' - f'[ t {(tensor_rank if tensor_rank is not None else mpu.get_tensor_model_parallel_rank()) + 1}/{mpu.get_tensor_model_parallel_world_size()}, ' - f'p {(pipeline_rank if pipeline_rank is not None else mpu.get_pipeline_model_parallel_rank()) + 1}/{mpu.get_pipeline_model_parallel_world_size()} ]') - if args.log_progress and args.async_save: - append_to_progress_log(f'Saved async checkpoint\tIteration: {iteration}', - barrier=False) - - if args.async_save: - assert async_save_request is not None - async_save_request.add_finalize_fn(iter_finalize_fn) - else: - iter_finalize_fn() - - # Additional callback for one_logger (last rank) - if not torch.distributed.is_initialized() \ - or is_last_rank(): - def onelogger_finalize_fn(): - on_save_checkpoint_success(productive_metrics, args.async_save) - if args.async_save: - assert async_save_request is not None - async_save_request.add_finalize_fn(onelogger_finalize_fn) - else: - onelogger_finalize_fn() - - if args.async_save: - schedule_async_save(async_save_request) - print_rank_0(' scheduled an async checkpoint save at iteration {:7d} to {}' \ - .format(iteration, save_dir)) - - # Wait so everyone is done (not necessary) - if torch.distributed.is_initialized(): - torch.distributed.barrier() - - end_misc = time() - logger.debug(f"rank: {rank}, takes {end_misc - start_misc} to finalize ckpt save ") - - -def cleanup_old_non_persistent_checkpoint(save_dir, leave_ckpt_num=1, do_async=False): - if torch.distributed.is_initialized() and torch.distributed.get_rank() != 0: - return - save_dir = Path(save_dir) - - iter_prefix = "iter_" - iter_ckpts = save_dir.rglob(f'{iter_prefix}*') - sorted_iter_ckpts = sorted(iter_ckpts, key=lambda ckpt_name: int(ckpt_name.name[len(iter_prefix):])) - if not sorted_iter_ckpts: - return - rm_iter_ckpts = sorted_iter_ckpts[:-leave_ckpt_num] - print_rank_0(f'Non-persistent checkpoints scheduled for removal: {rm_iter_ckpts}') - print_rank_0(f'Non-persistent checkpoints to be kept: {sorted_iter_ckpts[-leave_ckpt_num:]}') - - def remove_iter_ckpts(_iter_ckpts): - for ckpt in _iter_ckpts: - shutil.rmtree(ckpt) - if do_async: - threading.Thread(target=remove_iter_ckpts, args=(rm_iter_ckpts,)).start() - else: - remove_iter_ckpts(rm_iter_ckpts) - - -def save_dataloader_state(train_iterator, iteration, dataloader_save_path): - """Saves dataloader state if the dataloader supports it. - - Currently, this is only used by Megatron Energon dataloader (multimodal) to store its state at a - specific iteration. The Megatron built-in dataloader (text-only) creates index files upfront - to track its state. - - If the provided dataloader has `save_state` method, then it is called to save the state. - Otherwise, no state is saved. - - Args: - train_iterator (iterable): Train dataloader. - iteration (int): Current iteration. - dataloader_save_path (str): Path where the dataloader state is saved. - """ - # If no dataloader or saving path is provided, then exit early. - if train_iterator is None or dataloader_save_path is None: - return - - # If dataloader doesn't support saving state, exit early. - if not hasattr(train_iterator, "save_state"): - return - - # Save dataloader state for each data parallel rank only once. - first_rank = mpu.is_pipeline_first_stage(ignore_virtual=True) and mpu.get_tensor_model_parallel_rank() == 0 - if not first_rank: - return - - dp_rank = mpu.get_data_parallel_rank() - print(f"saving dataloader checkpoint at iteration {iteration} to {dataloader_save_path}") - train_dataloader_state_dict = train_iterator.save_state() - data_state_save_path = get_checkpoint_name( - dataloader_save_path, iteration, - basename=f'train_dataloader_dprank{dp_rank:03d}.pt' - ) - - torch.distributed.barrier(group=mpu.get_data_parallel_group()) - - if mpu.get_data_parallel_rank() == 0: - ensure_directory_exists(data_state_save_path) - - torch.distributed.barrier(group=mpu.get_data_parallel_group()) - - dataloader_save_dict = {} - dataloader_save_dict['dataloader_state_dict'] = train_dataloader_state_dict - torch.save(dataloader_save_dict, data_state_save_path) - - -def generate_state_dict(args, model, optimizer, opt_param_scheduler, - rng_state, use_dist_ckpt=False, iteration=None, - optim_sd_kwargs=None, rerun_state=None): - # Arguments, iteration, and model. - state_dict = {} - state_dict['args'] = args - state_dict['checkpoint_version'] = 3.0 - if iteration is not None: - state_dict['iteration'] = iteration - - if len(model) == 1: - state_dict['model'] = (model[0].sharded_state_dict() - if use_dist_ckpt else - model[0].state_dict_for_save_checkpoint()) - else: - for i in range(len(model)): - mpu.set_virtual_pipeline_model_parallel_rank(i) - state_dict['model%d' % i] = ( - model[i].sharded_state_dict() - if use_dist_ckpt else - model[i].state_dict_for_save_checkpoint()) - # Optimizer stuff. - if not args.no_save_optim: - if optimizer is not None and not optimizer.is_stub_optimizer: - state_dict['optimizer'] = (optimizer.sharded_state_dict(state_dict, **(optim_sd_kwargs or {})) - if use_dist_ckpt else - optimizer.state_dict()) - if opt_param_scheduler is not None: - state_dict['opt_param_scheduler'] = \ - opt_param_scheduler.state_dict() - - # Rerun state - state_dict['rerun_state_machine'] = rerun_state - - # RNG states. - if not args.no_save_rng: - state_dict["rng_state"] = rng_state - return state_dict - - -def _transpose_first_dim(t, num_splits, num_splits_first, model): - input_shape = t.size() - # We use a self_attention module but the values extracted aren't - # specific to self attention so should work for cross attention as well - while hasattr(model, 'module'): - model = model.module - attention_module = model.language_model.encoder.layers[0].self_attention - hidden_size_per_attention_head = attention_module.hidden_size_per_attention_head - num_attention_heads_per_partition = attention_module.num_attention_heads_per_partition - if num_splits_first: - """[num_splits * np * hn, h] - -->(view) [num_splits, np, hn, h] - -->(tranpose) [np, num_splits, hn, h] - -->(view) [np * num_splits * hn, h] """ - - intermediate_shape = \ - (num_splits, num_attention_heads_per_partition, - hidden_size_per_attention_head) + input_shape[1:] - - t = t.view(*intermediate_shape) - t = t.transpose(0, 1).contiguous() - else: - """[np * hn * num_splits, h] - -->(view) [np, hn, num_splits, h] - -->(tranpose) [np, num_splits, hn, h] - -->(view) [np * num_splits * hn, h] """ - - intermediate_shape = \ - (num_attention_heads_per_partition, - hidden_size_per_attention_head, num_splits) +\ - input_shape[1:] - - t = t.view(*intermediate_shape) - t = t.transpose(1, 2).contiguous() - t = t.view(*input_shape) - - return t - - -def fix_query_key_value_ordering(model, checkpoint_version): - """Fix up query/key/value matrix ordering if checkpoint - version is smaller than 2.0 - """ - if checkpoint_version < 2.0: - if isinstance(model, list): - assert len(model)==1 - model = model[0] - for name, param in model.named_parameters(): - if name.endswith(('.query_key_value.weight', '.query_key_value.bias')): - if checkpoint_version == 0: - fixed_param = _transpose_first_dim(param.data, 3, True, model) - elif checkpoint_version == 1.0: - fixed_param = _transpose_first_dim(param.data, 3, False, model) - else: - print_rank_0(f"Invalid checkpoint version {checkpoint_version}.") - sys.exit() - param.data.copy_(fixed_param) - if name.endswith(('.key_value.weight', '.key_value.bias')): - if checkpoint_version == 0: - fixed_param = _transpose_first_dim(param.data, 2, True, model) - elif checkpoint_version == 1.0: - fixed_param = _transpose_first_dim(param.data, 2, False, model) - else: - print_rank_0(f"Invalid checkpoint version {checkpoint_version}.") - sys.exit() - param.data.copy_(fixed_param) - print_rank_0(" successfully fixed query-key-values ordering for" - " checkpoint version {}".format(checkpoint_version)) - - -def _get_non_persistent_iteration(non_persistent_global_dir, args, checkpointing_context=None): - if args.non_persistent_ckpt_type is None: - return -1 - elif args.non_persistent_ckpt_type == "global": - tracker_filename = get_checkpoint_tracker_filename(non_persistent_global_dir) - if os.path.isfile(tracker_filename): - iteration, release = read_metadata(tracker_filename) - if release: - raise RuntimeError('Non-persistent checkpoint can\'t be a release checkpoint') - else: - iteration = -1 - print_rank_0('WARNING: could not find the metadata file {}'.format(tracker_filename)) - print_rank_0(' will not load any non-persistent checkpoint') - return iteration - elif args.non_persistent_ckpt_type == "local": - raise RuntimeError('LocalCheckpointManagers are not yet integrated') - return checkpointing_context['local_checkpoint_manager'].get_latest_checkpoint_iteration() - else: - assert False, 'Please use local or global non-persistent checkpoints' \ - f'(got: {args.non_persistent_ckpt_type})' - - -def _load_non_persistent_base_checkpoint( - non_persistent_global_dir, - args, - rank0, - sharded_state_dict, - non_persistent_iteration, - checkpointing_context=None, -): - """ Load the base state_dict from a non-persistent distributed checkpoint. - Depending on the non_persistent_ckpt_type, different logic may be required. - """ - assert args.non_persistent_ckpt_type is not None - if args.non_persistent_ckpt_type == "global": - if not rank0: - print_rank_0( - f'Loading from a non-persistent checkpoint (non-persistent iter {non_persistent_iteration})' - ) - return _load_global_dist_base_checkpoint( - non_persistent_global_dir, args, rank0, sharded_state_dict, non_persistent_iteration, False - ) - elif args.non_persistent_ckpt_type == "local": - raise RuntimeError('LocalCheckpointManagers are not yet integrated') - intermediate_state_dict, checkpoint_name = checkpointing_context[ - 'local_checkpoint_manager' - ].load() - state_dict = recreate_state_dict_after_load( - sharded_state_dict, - intermediate_state_dict, - algo=args.non_persistent_local_ckpt_algo, - ) - return state_dict, checkpoint_name, False, CheckpointType.LOCAL - else: - assert False, 'Please use local or global non-persistent checkpoints' \ - f'(got: {args.non_persistent_ckpt_type})' - - -def _load_global_dist_base_checkpoint( - load_dir, args, rank0, sharded_state_dict, iteration, release -): - """ Load the base state_dict from the given directory containing the global distributed checkpoint """ - if rank0: - checkpoint_name = find_checkpoint_rank_0(load_dir, iteration, release) - state_dict = dist_checkpointing.load_common_state_dict(checkpoint_name) - return state_dict, checkpoint_name, release, CheckpointType.GLOBAL - - if sharded_state_dict is None: - assert not args.auto_detect_ckpt_format and not args.use_dist_ckpt, ( - args.auto_detect_ckpt_format, - args.use_dist_ckpt, - ) - raise RuntimeError( - 'Detected load from a distributed checkpoint, but neither --use-dist-ckpt nor --auto-detect-ckpt-format is set.' - ) - - checkpoint_name = get_checkpoint_name(load_dir, iteration, release, return_base_dir=True) - load_strategy = get_default_load_sharded_strategy(checkpoint_name) - # NOTE: `args.ckpt_fully_parallel_load` applies to both persistent and non-persistent checkpoints. - if args.ckpt_fully_parallel_load: - load_strategy = FullyParallelLoadStrategyWrapper( - load_strategy, mpu.get_data_parallel_group(with_context_parallel=True) - ) - state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) - return state_dict, checkpoint_name, release, CheckpointType.GLOBAL - - -def _load_base_checkpoint( - load_dir, - args, - rank0=False, - sharded_state_dict=None, - checkpointing_context=None, -): - """ Load the base state_dict from the given directory - - If rank0 is true, just loads rank 0 checkpoint, ignoring arguments. - """ - # Try to load non-persistent checkpoint first - non_persistent_global_dir = ( - args.non_persistent_global_ckpt_dir - if args.non_persistent_global_ckpt_dir or load_dir is None - else os.path.join(load_dir, _NON_PERSISTENT_CKPT_SUBDIR) - ) - non_persistent_iteration = _get_non_persistent_iteration( - non_persistent_global_dir, args, checkpointing_context - ) - iteration, release = -1, False - tracker_filename = 'because load directory is not defined' - if load_dir is not None: - tracker_filename = get_checkpoint_tracker_filename(load_dir) - if os.path.isfile(tracker_filename): - iteration, release = read_metadata(tracker_filename) - if non_persistent_iteration != -1: # there is a non-persistent checkpoint - if non_persistent_iteration >= iteration: - return _load_non_persistent_base_checkpoint( - non_persistent_global_dir, - args, - rank0, - sharded_state_dict, - non_persistent_iteration, - checkpointing_context, - ) - else: - print_rank_0('WARNING: non-persistent checkpoints are older than persistent checkpoint') - - # Otherwise we are dealing with global checkpoints - # If no tracker file, return nothing - if iteration == -1: - if not rank0: - print_rank_0('WARNING: could not find the metadata file {}'.format(tracker_filename)) - print_rank_0(' will not load any checkpoints and will start from random') - # Conditionally exit if checkpoint not found. - if args.exit_on_missing_checkpoint: - print_rank_0(">> '--exit-on-missing-checkpoint' set ... exiting. <<") - if torch.distributed.is_initialized(): - torch.distributed.barrier() - sys.exit() - - return None, "", False, None - - # Determine the type of the checkpoint - checkpoint_name = get_checkpoint_name(load_dir, iteration, release, return_base_dir=True) - is_dist_ckpt = dist_checkpointing.check_is_distributed_checkpoint(checkpoint_name) - if not rank0: - dist_infix = "distributed " if is_dist_ckpt else "" - if release: - print_rank_0(f' loading release {dist_infix}checkpoint from {load_dir}') - else: - print_rank_0( - f' loading {dist_infix}checkpoint from {load_dir} at iteration {iteration}' - ) - - # Handle global distributed checkpoint - if is_dist_ckpt: - return _load_global_dist_base_checkpoint( - load_dir, args, rank0, sharded_state_dict, iteration, release - ) - # Handle global legacy checkpoint - if rank0: - checkpoint_name = find_checkpoint_rank_0(load_dir, iteration, release) - else: - checkpoint_name = get_checkpoint_name(load_dir, iteration, release, return_base_dir=False) - try: - state_dict = torch.load(checkpoint_name, map_location='cpu') - except ModuleNotFoundError: - from megatron.legacy.fp16_deprecated import loss_scaler - - # For backward compatibility. - if not rank0: - print_rank_0(' > deserializing using the old code structure ...') - sys.modules['fp16.loss_scaler'] = sys.modules['megatron.legacy.fp16_deprecated.loss_scaler'] - sys.modules['megatron.fp16.loss_scaler'] = sys.modules[ - 'megatron.legacy.fp16_deprecated.loss_scaler' - ] - sys.modules['megatron.model'] = sys.modules['megatron.legacy.model'] - state_dict = torch.load(checkpoint_name, map_location='cpu') - sys.modules.pop('fp16.loss_scaler', None) - sys.modules.pop('megatron.fp16.loss_scaler', None) - sys.modules.pop('megatron.model', None) - except Exception as e: - print('could not load the checkpoint') - print(e) - sys.exit() - - return state_dict, checkpoint_name, release, CheckpointType.LEGACY - - -def load_args_from_checkpoint( - args, load_arg='load', checkpointing_context=None -): - """Set required arguments from the checkpoint specified in the - arguments. - - Will overwrite arguments that have a non-None default value, but - will leave any arguments that default to None as set. - - Returns the same args NameSpace with the new values added/updated. - - If no checkpoint is specified in args, or if the checkpoint is - there but invalid, the arguments will not be modified - - """ - load_dir = getattr(args, load_arg) - - if load_dir is None: - print_rank_0('No load directory specified, using provided arguments.') - return args - - state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( - load_dir, - args, - rank0=True, - checkpointing_context=checkpointing_context, - ) - - # Args. - if not state_dict: - print_rank_0('Checkpoint not found to provide arguments, using provided arguments.') - return args - - if 'args' not in state_dict: - print_rank_0('Checkpoint provided does not have arguments saved, using provided arguments.') - return args - - checkpoint_args = state_dict['args'] - checkpoint_version = state_dict.get('checkpoint_version', 0) - args.iteration = state_dict['iteration'] - - # One-off conversion for foundation models - if hasattr(checkpoint_args, 'disable_bias_linear'): - setattr( - checkpoint_args, 'add_bias_linear', not getattr(checkpoint_args, 'disable_bias_linear') - ) - - def _set_arg(arg_name, old_arg_name=None, force=False): - if not force and getattr(args, arg_name, None) is not None: - return - - if old_arg_name is not None: - checkpoint_value = getattr(checkpoint_args, old_arg_name, None) - else: - checkpoint_value = getattr(checkpoint_args, arg_name, None) - - if checkpoint_value is not None: - print_rank_0(f"Setting {arg_name} to {checkpoint_value} from checkpoint") - setattr(args, arg_name, checkpoint_value) - else: - print_rank_0(f"Checkpoint did not provide arguments {arg_name}") - - # Model args. - _set_arg('num_layers') - _set_arg('hidden_size') - _set_arg('ffn_hidden_size') - _set_arg('seq_length') - _set_arg('num_attention_heads') - _set_arg('num_query_groups', force=True) - _set_arg('group_query_attention', force=True) - _set_arg('kv_channels') - _set_arg('max_position_embeddings') - _set_arg('position_embedding_type', force=True) - _set_arg('add_position_embedding', force=True) - _set_arg('use_rotary_position_embeddings', force=True) - _set_arg('rotary_base', force=True) - _set_arg('rotary_percent', force=True) - _set_arg('rotary_interleaved', force=True) - _set_arg('add_bias_linear', force=True) - _set_arg('add_qkv_bias', force=True) - _set_arg('squared_relu', force=True) - _set_arg('swiglu', force=True) - _set_arg('untie_embeddings_and_output_weights', force=True) - _set_arg('apply_layernorm_1p', force=True) - _set_arg('normalization', force=True) - _set_arg('apply_query_key_layer_scaling', force=True) - _set_arg('attention_dropout', force=True) - _set_arg('hidden_dropout', force=True) - - _set_arg('hybrid_override_pattern', force=True) - _set_arg('spec', force=True) - _set_arg('hybrid_attention_ratio', force=True) - _set_arg('hybrid_mlp_ratio', force=True) - - _set_arg('num_experts', force=True) - _set_arg('moe_layer_freq', force=True) - _set_arg('moe_ffn_hidden_size', force=True) - _set_arg('moe_router_topk', force=True) - _set_arg('moe_token_dispatcher_type', force=True) - _set_arg('moe_router_pre_softmax', force=True) - _set_arg('moe_grouped_gemm', force=True) - _set_arg('moe_shared_expert_intermediate_size', force=True) - - # Tokenizer args. - _set_arg('tokenizer_type', force=True) - # Using checkpoint version might not always be safe (e.g., if running on different cluster). - if args.use_tokenizer_model_from_checkpoint_args: - _set_arg('tokenizer_model', force=True) - _set_arg('tiktoken_pattern', force=True) - _set_arg('padded_vocab_size') - - # Checkpoint args. - _set_arg('ckpt_format') - - # Model parallelism args. - if args.use_mp_args_from_checkpoint_args: - if checkpoint_version < 3.0: - _set_arg('tensor_model_parallel_size', 'model_parallel_size') - else: - _set_arg('tensor_model_parallel_size', force=True) - _set_arg('pipeline_model_parallel_size', force=True) - _set_arg('virtual_pipeline_model_parallel_size', force=True) - _set_arg('num_layers_per_virtual_pipeline_stage') - _set_arg('expert_model_parallel_size', force=True) - - return args, checkpoint_args - - -def fix_fp8_params_lose_precision_when_loading_dist_ckpt(state_dict): - """ - When "--fp8-param-gather" and "--use-dist-ckpt" are both enabled, the state dict read from - dist-checkpoint loses precision (the weights read from checkpoint go through the process of - bf16/fp16 -> fp8 -> bf16/fp16). This function is implemented to solve this problem. - When "--fp8-param-gather" is disabled, this function doesn't modify anything. - """ - for key in state_dict.keys(): - if key.startswith('model'): - for _, sharded_tensor in state_dict[key].items(): - if is_float8tensor(sharded_tensor.data): - sharded_tensor.data = sharded_tensor.data.from_float8().cpu() - - -def load_checkpoint(model, optimizer, opt_param_scheduler, load_arg='load', strict=True, - ft_client=None, checkpointing_context=None, skip_load_to_model_and_opt=False): - """Load a model checkpoint and return the iteration. - strict (bool): whether to strictly enforce that the keys in - :attr:`state_dict` of the checkpoint match the names of - parameters and buffers in model. - skip_load_to_model_and_opt (bool): whether to call `load_state_dict` - for :attr:`model` and :attr:`optimizer`. In case of running FSDP2 - or other torch features that uses DTensor in state dict, the tensors - are already loaded in-place by `_load_base_checkpoint`. - """ - args = get_args() - load_dir = getattr(args, load_arg) - - # Finetuning directories - pretrained_dir = getattr(args, 'pretrained_checkpoint', None) - if pretrained_dir is not None and not checkpoint_exists(load_dir): - print_rank_0( - f'Checkpoint file not found in load directory {load_dir} attempting to finetune with checkpoint in {pretrained_dir}' - ) - load_dir = pretrained_dir - if not checkpoint_exists(load_dir): - raise FileNotFoundError("No checkpoint found in load directory or pretrained directory") - args.finetune = True - - model = unwrap_model(model) - - load_kwargs = {} - is_dist_ckpt = False - if ( - args.auto_detect_ckpt_format - or args.use_dist_ckpt - or args.non_persistent_save_interval is not None - ): - state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( - load_dir, - args, - rank0=True, - checkpointing_context=checkpointing_context, - ) - if args.enable_ft_package and ft_client is not None and state_dict is not None: - if 'ft_state' in state_dict: - ft_client.load_state_dict(state_dict['ft_state']) - else: - print_rank_0("ft_state is not present in state_dict") - is_dist_ckpt = ( - ckpt_type == CheckpointType.LOCAL - or dist_checkpointing.check_is_distributed_checkpoint(checkpoint_name) - ) - if is_dist_ckpt: - ckpt_tp_pp = ( - state_dict['args'].tensor_model_parallel_size, - state_dict['args'].pipeline_model_parallel_size, - getattr(state_dict['args'], 'encoder_tensor_model_parallel_size', 0), - getattr(state_dict['args'], 'encoder_pipeline_model_parallel_size', 0), - ) - run_tp_pp = ( - args.tensor_model_parallel_size, - args.pipeline_model_parallel_size, - # TODO: change this to args.encoder_tensor_model_parallel_size after 30th Nov 24 - getattr(args, 'encoder_tensor_model_parallel_size', 0), - getattr(args, 'encoder_pipeline_model_parallel_size', 0), - ) - mismatch_msg = "(TP, PP, encoder TP, encoder PP) mismatch after resume ({} vs {} from checkpoint)".format( - run_tp_pp, ckpt_tp_pp - ) - - # Determine if RNG state will be loaded - if (ckpt_tp_pp == run_tp_pp and not release and not args.finetune and not args.no_load_rng - and not getattr(state_dict['args'], 'no_save_rng', False)): - gen_sd_rng_state = get_rng_state(True) # we can load the rng state - else: - gen_sd_rng_state = None - if ckpt_tp_pp != run_tp_pp: - print_rank_0("{}: RNG state will be ignored".format(mismatch_msg)) - - optim_sd_kwargs = dict(is_loading=True) - # Determine if optimizer state will be loaded - if (not release and not args.finetune and not args.no_load_optim - and not getattr(state_dict['args'], 'no_save_optim', False)): - gen_sd_optim = optimizer - gen_sd_opt_param_scheduler = opt_param_scheduler - - if args.use_distributed_optimizer: - optim_sd_kwargs['sharding_type'] = ('fully_sharded_model_space' - if getattr(state_dict['args'], 'ckpt_fully_parallel_save', False) - else 'dp_zero_gather_scatter') - # This is for backwards-compatibility. Can be removed once 'fully_sharded_bucket_space' loading is removed - for maybe_dist_opt_optim_state in (state_dict['optimizer'], *state_dict['optimizer'].values()): - if 'param_state_sharding_type' in maybe_dist_opt_optim_state: - if maybe_dist_opt_optim_state['param_state_sharding_type'] == 'fully_sharded_bucket_space': - print_rank_0('Detected deprecated `fully_sharded_bucket_space` DistributedOptimizer checkpoint format') - optim_sd_kwargs['sharding_type'] = maybe_dist_opt_optim_state['param_state_sharding_type'] - break - - if ckpt_tp_pp != run_tp_pp and optim_sd_kwargs['sharding_type'] != 'fully_sharded_model_space': - raise RuntimeError(f"{mismatch_msg}: not supported for DistributedOptimizer with sharding type {optim_sd_kwargs['sharding_type']}." - f" Please use `--ckpt-fully-parallel-save` flag during checkpoint saving.") - else: - gen_sd_optim = None - gen_sd_opt_param_scheduler = None - - # Determine if rerun state will be loaded - if (ckpt_tp_pp == run_tp_pp and not release and not args.finetune): - rerun_state_machine = get_rerun_state_machine() - gen_sd_rerun_state = rerun_state_machine.state_dict( - data_iterator=None, use_dist_ckpt=True - ) - else: - gen_sd_rerun_state = None - if ckpt_tp_pp != run_tp_pp: - print_rank_0("{}: Rerun state will be ignored".format(mismatch_msg)) - - # [ModelOpt]: Initial loading from non-resume sharded checkpoint to a Distillation Model - # will result in key mismatch with loss modules potentially containing parameters, since - # it requires generating a state_dict before loading. Here we hide those modules if present. - with contextlib.ExitStack() as stack: # Allows multiple context managers for each model shard - if args.finetune and hasattr(model[0], "hide_loss_modules"): - for m in model: - stack.enter_context(m.hide_loss_modules()) - load_kwargs['sharded_state_dict'] = generate_state_dict( - args, model, gen_sd_optim, gen_sd_opt_param_scheduler, gen_sd_rng_state, - use_dist_ckpt=True, optim_sd_kwargs=optim_sd_kwargs, rerun_state=gen_sd_rerun_state - ) - - # When "--fp8-param-gather" is disabled, this function doesn't modify anything. - fix_fp8_params_lose_precision_when_loading_dist_ckpt(load_kwargs['sharded_state_dict']) - - state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( - load_dir, args, rank0=False, checkpointing_context=checkpointing_context, - **load_kwargs - ) - - if args.enable_ft_package and ft_client is not None and state_dict is not None: - if 'ft_state' in state_dict: - ft_client.load_state_dict(state_dict['ft_state']) - else: - print_rank_0("ft_state is not present in state_dict") - - # Checkpoint not loaded. - if state_dict is None: - # Iteration and num_floating_point_operations_so_far default to 0. - return 0, 0 - - # Set checkpoint version. - set_checkpoint_version(state_dict.get('checkpoint_version', 0)) - - # Set iteration. - if args.finetune or release: - iteration = 0 - else: - try: - iteration = state_dict['iteration'] - except KeyError: - try: # Backward compatible with older checkpoints - iteration = state_dict['total_iters'] - except KeyError: - print_rank_0('A metadata file exists but unable to load ' - 'iteration from checkpoint {}, exiting'.format(checkpoint_name)) - sys.exit() - num_floating_point_operations_so_far = state_dict.get('num_floating_point_operations_so_far', 0) - - # Check arguments. - assert args.consumed_train_samples == 0 - assert args.skipped_train_samples == 0 - assert args.consumed_valid_samples == 0 - if 'args' in state_dict and not args.finetune: - checkpoint_args = state_dict['args'] - check_checkpoint_args(checkpoint_args) - args.consumed_train_samples = getattr(checkpoint_args, - 'consumed_train_samples', 0) - args.skipped_train_samples = getattr(checkpoint_args, - 'skipped_train_samples', 0) - update_num_microbatches(consumed_samples=args.consumed_train_samples, verbose=True) - args.consumed_valid_samples = getattr(checkpoint_args, - 'consumed_valid_samples', 0) - else: - print_rank_0('could not find arguments in the checkpoint ...') - - # [ModelOpt]: loading modelopt_state (sharded or not) - if has_nvidia_modelopt: - if ckpt_type == CheckpointType.LOCAL: - raise NotImplementedError('Local checkpointing does not support model opt') - if not args.use_dist_ckpt: - restore_modelopt_state(model, state_dict) - else: - restore_sharded_modelopt_state(model, checkpoint_name) - - # Model. - strict = False if args.retro_add_retriever else strict - if not skip_load_to_model_and_opt: - if len(model) == 1: - model[0].load_state_dict(state_dict['model'], strict=strict) - else: - for i in range(len(model)): - mpu.set_virtual_pipeline_model_parallel_rank(i) - model[i].load_state_dict(state_dict['model%d' % i], strict=strict) - - # Fix up query/key/value matrix ordering if needed. - checkpoint_version = get_checkpoint_version() - print_rank_0(f' checkpoint version {checkpoint_version}') - fix_query_key_value_ordering(model, checkpoint_version) - - # Optimizer. - if not release and not args.finetune and not args.no_load_optim: - try: - # Load state dict. - if not skip_load_to_model_and_opt and optimizer is not None and not optimizer.is_stub_optimizer: - optimizer.load_state_dict(state_dict['optimizer']) - - # Load distributed optimizer's custom parameter state. - # For distributed checkpoint it's already loaded in load_state_dict above - if args.use_distributed_optimizer and not is_dist_ckpt: - # NOTE: this is a manual read of the tracker file. - # This code should not be reached when reading from a non_persistent checkpoint - assert not is_dist_ckpt - tracker_filename = get_checkpoint_tracker_filename(load_dir) - iteration, release = read_metadata(tracker_filename) - model_checkpoint_name = \ - get_checkpoint_name(load_dir, iteration, release) - optim_checkpoint_name = \ - get_distributed_optimizer_checkpoint_name( - model_checkpoint_name) - optimizer.load_parameter_state(optim_checkpoint_name, - update_legacy_format=args.ckpt_convert_update_legacy_dist_opt_format) - - # Load scheduler. - if opt_param_scheduler is not None: - if 'lr_scheduler' in state_dict: # backward compatbility - opt_param_scheduler.load_state_dict(state_dict['lr_scheduler']) - else: - opt_param_scheduler.load_state_dict(state_dict['opt_param_scheduler']) - except KeyError as e: - print_rank_0('Unable to load optimizer from checkpoint {}. ' - 'Specify --no-load-optim or --finetune to prevent ' - 'attempting to load the optimizer state, ' - 'exiting ...'.format(checkpoint_name)) - raise e - else: - if (args.fp16 or args.bf16) and optimizer is not None: - optimizer.reload_model_params() - - # rerun state - try: - if 'rerun_state_machine' in state_dict: - get_rerun_state_machine().load_state_dict(state_dict['rerun_state_machine']) - except Exception as e: - print(f"Unable to restore RerunMachine from checkpoint: {e}") - sys.exit() - - # rng states. - if not release and not args.finetune and not args.no_load_rng: - try: - if 'rng_state' in state_dict: - # access rng_state for data parallel rank - if args.data_parallel_random_init: - rng_state = state_dict['rng_state'][mpu.get_data_parallel_rank()] - else: - rng_state = state_dict['rng_state'][0] - random.setstate(rng_state['random_rng_state']) - np.random.set_state(rng_state['np_rng_state']) - torch.set_rng_state(rng_state['torch_rng_state']) - torch.cuda.set_rng_state(rng_state['cuda_rng_state']) - # Check for empty states array - if not rng_state['rng_tracker_states']: - raise KeyError - tensor_parallel.get_cuda_rng_tracker().set_states( - rng_state['rng_tracker_states']) - else: # backward compatability - random.setstate(state_dict['random_rng_state']) - np.random.set_state(state_dict['np_rng_state']) - torch.set_rng_state(state_dict['torch_rng_state']) - torch.cuda.set_rng_state(state_dict['cuda_rng_state']) - # Check for empty states array - if not state_dict['rng_tracker_states']: - raise KeyError - tensor_parallel.get_cuda_rng_tracker().set_states( - state_dict['rng_tracker_states']) - except KeyError: - print_rank_0('Unable to load rng state from checkpoint {}. ' - 'Specify --no-load-rng or --finetune to prevent ' - 'attempting to load the rng state, ' - 'exiting ...'.format(checkpoint_name)) - sys.exit() - - # Some utilities want to load a checkpoint without distributed being initialized - if torch.distributed.is_initialized(): - torch.distributed.barrier() - - print_rank_0(f' successfully loaded checkpoint from {load_dir} ' - f'[ t {mpu.get_tensor_model_parallel_rank() + 1}/{mpu.get_tensor_model_parallel_world_size()}, ' - f'p {mpu.get_pipeline_model_parallel_rank() + 1}/{mpu.get_pipeline_model_parallel_world_size()} ] ' - f'at iteration {iteration}') - - torch.cuda.empty_cache() - return iteration, num_floating_point_operations_so_far - - -def load_biencoder_checkpoint(model, only_query_model=False, - only_context_model=False, custom_load_path=None): - """ - selectively load retrieval models for indexing/retrieving - from saved checkpoints - """ - - args = get_args() - - model = unwrap_model(model) - - load_path = custom_load_path if custom_load_path is not None else args.load - - tracker_filename = get_checkpoint_tracker_filename(load_path) - with open(tracker_filename, 'r') as f: - iteration = int(f.read().strip()) - - checkpoint_name = get_checkpoint_name(load_path, iteration, - args.use_distributed_optimizer, - release=False) - - if mpu.get_data_parallel_rank() == 0: - print('global rank {} is loading checkpoint {}'.format( - torch.distributed.get_rank(), checkpoint_name)) - - state_dict = torch.load(checkpoint_name, map_location='cpu') - ret_state_dict = state_dict['model'] - - if only_query_model: - ret_state_dict.pop('context_model') - if only_context_model: - ret_state_dict.pop('query_model') - - assert len(model) == 1 - model[0].load_state_dict(ret_state_dict) - torch.distributed.barrier() - - if mpu.get_data_parallel_rank() == 0: - print(' successfully loaded {}'.format(checkpoint_name)) - - return model +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Input/output checkpointing.""" + +import contextlib +import os +import random +import shutil +import sys +import threading +from enum import Enum, auto +from logging import getLogger +from pathlib import Path + +import numpy as np +from time import time + +import torch + +from megatron.core import mpu, tensor_parallel, dist_checkpointing +from megatron.core.dist_checkpointing.mapping import ShardedObject +from megatron.core.dist_checkpointing.serialization import get_default_load_sharded_strategy +from megatron.core.dist_checkpointing.strategies.fully_parallel import \ + FullyParallelSaveStrategyWrapper, FullyParallelLoadStrategyWrapper +from megatron.core.num_microbatches_calculator import update_num_microbatches +from megatron.core.utils import is_float8tensor +from megatron.core.rerun_state_machine import get_rerun_state_machine +from .async_utils import schedule_async_save, is_empty_async_queue +from .global_vars import get_args, get_one_logger +from .utils import unwrap_model, print_rank_0, append_to_progress_log, is_last_rank +from ..core.dist_checkpointing.serialization import \ + get_default_save_sharded_strategy +from .one_logger_utils import on_save_checkpoint_start, on_save_checkpoint_success +from . import wandb_utils + +from . import ft_integration + +# [ModelOpt]: Import +try: + from modelopt.torch.opt.plugins import ( + save_modelopt_state, + save_sharded_modelopt_state, + restore_modelopt_state, + restore_sharded_modelopt_state, + ) + has_nvidia_modelopt = True +except Exception: + has_nvidia_modelopt = False + +_CHECKPOINT_VERSION = None + +logger = getLogger(__name__) +_NON_PERSISTENT_CKPT_SUBDIR = 'non_persistent' + +def set_checkpoint_version(value): + global _CHECKPOINT_VERSION + if _CHECKPOINT_VERSION is not None: + assert _CHECKPOINT_VERSION == value, \ + "checkpoint versions do not match" + _CHECKPOINT_VERSION = value + + +def get_checkpoint_version(): + global _CHECKPOINT_VERSION + return _CHECKPOINT_VERSION + + +def check_checkpoint_args(checkpoint_args): + """Ensure fixed arguments for a model are the same for the input + arguments and the one retrieved from checkpoint.""" + args = get_args() + + def _compare(arg_name, old_arg_name=None, default=None): + if old_arg_name is not None: + ckpt_arg_name = old_arg_name + else: + ckpt_arg_name = arg_name + if default is not None: + checkpoint_value = getattr(checkpoint_args, ckpt_arg_name, default) + else: + checkpoint_value = getattr(checkpoint_args, ckpt_arg_name) + args_value = getattr(args, arg_name) + error_message = '{} value from checkpoint ({}) is not equal to the ' \ + 'input argument value ({}).'.format( + arg_name, checkpoint_value, args_value) + assert checkpoint_value == args_value, error_message + + _compare('num_layers') + _compare('hidden_size') + _compare('num_attention_heads') + _compare('add_position_embedding', default=True) + if args.vocab_file: + _compare('max_position_embeddings') + _compare('make_vocab_size_divisible_by') + if not args.use_dist_ckpt: + _compare('padded_vocab_size') + _compare('tokenizer_type') + if args.data_parallel_random_init: + _compare('data_parallel_random_init') + if get_checkpoint_version() < 3.0: + _compare('tensor_model_parallel_size', + old_arg_name='model_parallel_size') + if get_checkpoint_version() >= 3.0 and not args.use_dist_ckpt: + _compare('tensor_model_parallel_size') + _compare('pipeline_model_parallel_size') + + +def ensure_directory_exists(filename, check_parent=True): + """Build filename's path if it does not already exists.""" + dirname = os.path.dirname(filename) if check_parent else filename + os.makedirs(dirname, exist_ok=True) + + +def get_checkpoint_name(checkpoints_path, iteration, release=False, + pipeline_parallel=None, + tensor_rank=None, pipeline_rank=None, + expert_parallel=None, expert_rank=None, + return_base_dir=False, basename="model_optim_rng.pt"): + """Determine the directory name for this rank's checkpoint.""" + if release: + directory = 'release' + else: + directory = 'iter_{:07d}'.format(iteration) + if return_base_dir: + common_path = os.path.join(checkpoints_path, directory) + return common_path + + # Use both the tensor and pipeline MP rank. + if pipeline_parallel is None: + pipeline_parallel = (mpu.get_pipeline_model_parallel_world_size() > 1) + if tensor_rank is None: + tensor_rank = mpu.get_tensor_model_parallel_rank() + if pipeline_rank is None: + pipeline_rank = mpu.get_pipeline_model_parallel_rank() + if expert_parallel is None: + expert_parallel = (mpu.get_expert_model_parallel_world_size() > 1) + if expert_rank is None: + expert_rank = mpu.get_expert_model_parallel_rank() + + # Use both the tensor and pipeline MP rank. If using the distributed + # optimizer, then the optimizer's path must additionally include the + # data parallel rank. + if not pipeline_parallel: + common_path = os.path.join(checkpoints_path, directory, + f'mp_rank_{tensor_rank:02d}') + else: + common_path = os.path.join(checkpoints_path, directory, + f'mp_rank_{tensor_rank:02d}_{pipeline_rank:03d}') + + if expert_parallel: + common_path = common_path + f'_{expert_rank:03d}' + + return os.path.join(common_path, basename) + + +def get_distributed_optimizer_checkpoint_name(model_checkpoint_name): + return os.path.join(os.path.dirname(model_checkpoint_name), + "distrib_optim.pt") + + +def find_checkpoint_rank_0(checkpoints_path, iteration, release=False): + """Finds the checkpoint for rank 0 without knowing if we are using + pipeline parallelism/expert parallelism or not. + + Since the checkpoint naming scheme changes if pipeline or expert + parallelism is present, we need to look for both naming schemes if + we don't know if the checkpoint has pipeline or expert parallelism. + """ + + # Look for checkpoint with no pipelining and no expert parallelism + filename = get_checkpoint_name(checkpoints_path, iteration, release, + pipeline_parallel=False, + tensor_rank=0, pipeline_rank=0, + expert_parallel=False, expert_rank=0) + if os.path.isfile(filename): + return filename + + # Look for checkpoint with no pipelining and expert parallelism + filename = get_checkpoint_name(checkpoints_path, iteration, release, + pipeline_parallel=False, + tensor_rank=0, pipeline_rank=0, + expert_parallel=True, expert_rank=0) + if os.path.isfile(filename): + return filename + + # Look for checkpoint with pipelining and no expert parallelism + filename = get_checkpoint_name(checkpoints_path, iteration, release, + pipeline_parallel=True, + tensor_rank=0, pipeline_rank=0, + expert_parallel=False, expert_rank=0) + if os.path.isfile(filename): + return filename + + # Look for checkpoint with pipelining and expert parallelism + filename = get_checkpoint_name(checkpoints_path, iteration, release, + pipeline_parallel=True, + tensor_rank=0, pipeline_rank=0, + expert_parallel=True, expert_rank=0) + if os.path.isfile(filename): + return filename + + # Look for a distributed checkpoint + filename = get_checkpoint_name(checkpoints_path, iteration, release, + pipeline_parallel=True, + return_base_dir=True) + if dist_checkpointing.check_is_distributed_checkpoint(filename): + return filename + + return None + + +def get_checkpoint_tracker_filename(checkpoints_path): + + """Tracker file rescords the latest chckpoint during + training to restart from.""" + return os.path.join(checkpoints_path, 'latest_checkpointed_iteration.txt') + + +def checkpoint_exists(checkpoints_path): + if checkpoints_path is None: + return False + load_step = 'latest_checkpointed_iteration.txt' + return os.path.exists(os.path.join(checkpoints_path, load_step)) + + +def read_metadata(tracker_filename): + # Read the tracker file and either set the iteration or + # mark it as a release checkpoint. + iteration = 0 + release = False + with open(tracker_filename, 'r') as f: + metastring = f.read().strip() + try: + iteration = int(metastring) + except ValueError: + release = metastring == 'release' + if not release: + print_rank_0('ERROR: Invalid metadata file {}. Exiting'.format( + tracker_filename)) + sys.exit() + assert iteration > 0 or release, 'error parsing metadata file {}'.format( + tracker_filename) + + # Get the max iteration retrieved across the ranks. + if torch.distributed.is_initialized(): + iters_cuda = torch.tensor([iteration], dtype=torch.long, device='cuda') + torch.distributed.all_reduce(iters_cuda, op=torch.distributed.ReduceOp.MAX) + max_iter = iters_cuda[0].item() + + # We should now have all the same iteration. + # If not, print a warning and chose the maximum + # iteration across all ranks. + if iteration != max_iter: + rank = torch.distributed.get_rank() + print('WARNING: on rank {} found iteration {} in the ' + 'metadata while max iteration across the ranks ' + 'is {}, replacing it with max iteration.'.format( + rank, iteration, max_iter), flush=True) + else: + # When loading a checkpoint outside of training (for example, + # when editing it), we might not have torch distributed + # initialized, in this case, just assume we have the latest + max_iter = iteration + return max_iter, release + + +def get_rng_state(use_dist_ckpt: bool = False): + """ collect rng state across data parallel ranks """ + args = get_args() + rng_state = { + 'random_rng_state': random.getstate(), + 'np_rng_state': np.random.get_state(), + 'torch_rng_state': torch.get_rng_state(), + 'cuda_rng_state': torch.cuda.get_rng_state(), + 'rng_tracker_states': tensor_parallel.get_cuda_rng_tracker().get_states()} + + rng_state_list = None + if torch.distributed.is_initialized() and \ + mpu.get_data_parallel_world_size() > 1 and \ + args.data_parallel_random_init: + rng_state_list = \ + [None for i in range(mpu.get_data_parallel_world_size())] + torch.distributed.all_gather_object( + rng_state_list, + rng_state, + group=mpu.get_data_parallel_group()) + else: + rng_state_list = [rng_state] + + if use_dist_ckpt: + pp_rank = mpu.get_pipeline_model_parallel_rank() + pp_size = mpu.get_pipeline_model_parallel_world_size() + tp_rank = mpu.get_tensor_model_parallel_rank() + tp_size = mpu.get_tensor_model_parallel_world_size() + rng_state_list = ShardedObject('rng_state', rng_state_list, (pp_size, tp_size), (pp_rank, tp_rank), + replica_id=mpu.get_data_parallel_rank(with_context_parallel=True)) + + return rng_state_list + +class CheckpointType(Enum): + LEGACY = auto() + LOCAL = auto() + GLOBAL = auto() + +def save_checkpoint(iteration, model, optimizer, opt_param_scheduler, num_floating_point_operations_so_far, + checkpointing_context=None, pipeline_rank=None, expert_rank=None, tensor_rank=None, pipeline_parallel=None, expert_parallel=None, non_persistent_ckpt=False, + train_data_iterator=None, preprocess_common_state_dict_fn = None): + """Save a model, optimizer and optionally dataloader checkpoint. + + Checkpointing context is used to persist some checkpointing state + throughout a single job. Must be initialized externally (not used if None). + + If non_persistent_ckpt is True, + the checkpoint will be saved with special functionality for removing old checkpoints. + There are several types of non-persistent checkpoints: + "global" - Saved as a standard checkpoint (e.g., on Lustre) with old checkpoints being removed. + "local" - Each rank saves a portion of the checkpoint locally (e.g., on SSD/ramdisk). + + Dataloader checkpoint is only saved if the dataloader supports it. Currently this applies only + to the Megatron Energon dataloader (multimodal) and not the built-in Megatron dataloader (text-only). + """ + start_ckpt = time() + args = get_args() + + if args.async_save and not is_empty_async_queue(): + print_rank_0('WARNING: Starting a checkpoint save before previous has finished. Consider increasing the checkpoint interval.') + + # Prepare E2E metrics at start of save checkpoint + productive_metrics = on_save_checkpoint_start(args.async_save) + + # Monitor for the checkpointing timeout (no-op if FT is not enabled) + ft_integration.on_checkpointing_start() + + # Only rank zero of the data parallel writes to the disk. + model = unwrap_model(model) + + # Handle non_persistent_ckpt flag. Besides overwriting `args.save` and + # `args.use_dist_ckpt`, non-persistent global ckpt requires no additional logic + ckpt_type = CheckpointType.GLOBAL if args.use_dist_ckpt else CheckpointType.LEGACY + save_dir = args.save + if non_persistent_ckpt: + if args.non_persistent_ckpt_type == 'global': + ckpt_type = CheckpointType.GLOBAL + save_dir = ( + args.non_persistent_global_ckpt_dir + if args.non_persistent_global_ckpt_dir + else os.path.join(save_dir, _NON_PERSISTENT_CKPT_SUBDIR) + ) + # TODO Can we ensure the previous checkpoint is saved? We don't want to allow two saves in parallel. + cleanup_old_non_persistent_checkpoint( + save_dir, leave_ckpt_num=1, do_async=args.async_save + ) + elif args.non_persistent_ckpt_type == 'local': + ckpt_type = CheckpointType.LOCAL + save_dir = checkpointing_context['local_checkpoint_manager'].local_ckpt_dir + else: + assert False, 'Please use local or global non-persistent checkpoints' \ + f'(got: {args.non_persistent_ckpt_type})' + + ckpt_format = args.ckpt_format if ckpt_type == CheckpointType.GLOBAL else 'torch' + print_rank_0('saving checkpoint at iteration {:7d} to {} in {} format'.format( + iteration, save_dir, ckpt_format)) + + # Collect rng state across data parallel ranks. + rng_state = get_rng_state(ckpt_type != CheckpointType.LEGACY) + + # Collect rerun state across all ranks + rerun_state_machine = get_rerun_state_machine() + rerun_state = rerun_state_machine.state_dict( + data_iterator=train_data_iterator, use_dist_ckpt=ckpt_type != CheckpointType.LEGACY + ) + + # Checkpoint name. + return_base_dir = (ckpt_type != CheckpointType.LEGACY) + checkpoint_name = get_checkpoint_name(save_dir, iteration, release=False, pipeline_parallel=pipeline_parallel, + tensor_rank=tensor_rank, pipeline_rank=pipeline_rank, expert_parallel=expert_parallel, expert_rank=expert_rank, return_base_dir=return_base_dir) + + # Save dataloader state if the dataloader supports it (currently only Megatron Energon). + maybe_save_dataloader_state(train_data_iterator, iteration, getattr(args, "dataloader_save", None)) + + # Save distributed optimizer's custom parameter state. + if ( + args.use_distributed_optimizer + and not args.no_save_optim + and optimizer is not None + and ckpt_type == CheckpointType.LEGACY + ): + optim_checkpoint_name = \ + get_distributed_optimizer_checkpoint_name(checkpoint_name) + ensure_directory_exists(optim_checkpoint_name) + if not optimizer.is_stub_optimizer: + optimizer.save_parameter_state(optim_checkpoint_name) + + async_save_request = None + if args.async_save: + if ckpt_type == CheckpointType.LEGACY: + raise NotImplementedError('Async checkpoint save not implemented for legacy checkpoints') + elif ckpt_type == CheckpointType.GLOBAL and args.ckpt_format != 'torch_dist': + raise NotImplementedError(f'Async checkpoint save not implemented for {args.ckpt_format} distributed checkpoint format') + + rank = torch.distributed.get_rank() if torch.distributed.is_initialized() else 0 + + # Collect args, model, RNG. + if not torch.distributed.is_initialized() \ + or mpu.get_expert_data_parallel_rank() == 0 \ + or ckpt_type != CheckpointType.LEGACY: + optim_sd_kwargs = {} + if ckpt_type != CheckpointType.LEGACY and args.use_distributed_optimizer: + optim_sd_kwargs['sharding_type'] = ('fully_sharded_model_space' + if args.ckpt_fully_parallel_save + else 'dp_zero_gather_scatter') + print_rank_0(f'Storing distributed optimizer sharded state of type {optim_sd_kwargs["sharding_type"]}') + state_dict = generate_state_dict( + args, + model, + optimizer, + opt_param_scheduler, + rng_state, + use_dist_ckpt=ckpt_type != CheckpointType.LEGACY, + iteration=iteration, + optim_sd_kwargs=optim_sd_kwargs, + rerun_state=rerun_state, + ) + + state_dict['num_floating_point_operations_so_far'] = num_floating_point_operations_so_far + if ckpt_type == CheckpointType.GLOBAL: + if not torch.distributed.is_initialized() or torch.distributed.get_rank() == 0: + # TODO Handle non-empty directories (e.g., after a crash during saving). + ensure_directory_exists(checkpoint_name, check_parent=False) + if checkpointing_context is not None and 'save_strategy' in checkpointing_context: + save_strategy = checkpointing_context['save_strategy'] + # Already saved once before - don't need to rerun sharding validation + validate_sharding_integrity = not args.ckpt_assume_constant_structure + else: + validate_sharding_integrity = True + save_strategy = get_default_save_sharded_strategy(args.ckpt_format) + if args.ckpt_assume_constant_structure and args.ckpt_format == 'torch_dist': + save_strategy.use_cached_ckpt_structure = args.ckpt_assume_constant_structure + if checkpointing_context is not None and 'load_strategy' in checkpointing_context: + cached_global_metadata = getattr(checkpointing_context['load_strategy'], 'cached_global_metadata', None) + if cached_global_metadata is not None: + logger.debug("Plugging in the read metadata from the load strategy...") + save_strategy.cached_global_metadata = cached_global_metadata + else: + logger.debug("Failed to plug in the read metadata from the load strategy...") + + if args.ckpt_fully_parallel_save: + save_strategy = FullyParallelSaveStrategyWrapper(save_strategy, mpu.get_data_parallel_group(with_context_parallel=True), + args.ckpt_assume_constant_structure) + # Store save strategy for future checkpoint saves + if checkpointing_context is not None: + checkpointing_context['save_strategy'] = save_strategy + end_ckpt = time() + logger.debug(f"rank: {rank}, takes {end_ckpt - start_ckpt} to prepare state dict for ckpt ") + async_save_request = dist_checkpointing.save(state_dict, checkpoint_name, save_strategy, + async_sharded_save=args.async_save, + validate_access_integrity=validate_sharding_integrity, + preprocess_common_before_consistancy_check=preprocess_common_state_dict_fn) + # [ModelOpt]: save sharded modelopt_state + if has_nvidia_modelopt: + save_sharded_modelopt_state(model, checkpoint_name, (args.ckpt_format, 1)) + else: + # [ModelOpt]: Inject modelopt_state into state_dict + if has_nvidia_modelopt: + if ckpt_type == CheckpointType.LOCAL: + print_rank_0('WARNING: Local checkpointing does not support nvidia_modelopt.') + else: + save_modelopt_state(model, state_dict) + + end_ckpt = time() + logger.debug(f"rank: {rank}, takes {end_ckpt - start_ckpt} to prepare state dict for ckpt ") + if ckpt_type == CheckpointType.LOCAL: + try: + from megatron.core.dist_checkpointing.tensor_aware_state_dict import MCoreTensorAwareStateDict + except ModuleNotFoundError: + raise RuntimeError("The 'nvidia_resiliency_ext' module is required for local " + "checkpointing but was not found. Please ensure it is installed.") + + algo = args.non_persistent_local_ckpt_algo + cached_metadata = None + if args.ckpt_assume_constant_structure and 'local_checkpoint_cache' in checkpointing_context: + cached_metadata = checkpointing_context['local_checkpoint_cache'] + state_dict_for_save, cacheable_metadata = MCoreTensorAwareStateDict.from_state_dict( + state_dict, algo=algo, cached_metadata=cached_metadata, + parallelization_group=mpu.get_data_parallel_group(with_context_parallel=True) + ) + async_save_request = checkpointing_context['local_checkpoint_manager'].save( + state_dict_for_save, iteration, is_async=bool(args.async_save) + ) + checkpointing_context['local_checkpoint_cache'] = cacheable_metadata + else: + assert ckpt_type == CheckpointType.LEGACY + # Save. + ensure_directory_exists(checkpoint_name) + torch.save(state_dict, checkpoint_name) + start_misc = time() + if ckpt_type != CheckpointType.LOCAL: + if not args.async_save: + assert async_save_request is None + # Wait so everyone is done (necessary) + if torch.distributed.is_initialized(): + torch.distributed.barrier() + + # And update the latest iteration + if not torch.distributed.is_initialized() \ + or torch.distributed.get_rank() == 0: + tracker_filename = get_checkpoint_tracker_filename(save_dir) + + if ckpt_type == CheckpointType.LOCAL: + def iter_finalize_fn(): + print_rank_0(' successfully saved local checkpoint from iteration {:7d}' + .format(iteration)) + if args.log_progress and args.async_save: + append_to_progress_log(f'Saved async local checkpoint\tIteration: {iteration}', + barrier=False) + else: + def iter_finalize_fn(): + with open(tracker_filename, 'w') as f: + f.write(str(iteration)) + print_rank_0(f' successfully saved checkpoint from iteration {int(iteration):7d} to {args.save} ' + f'[ t {(tensor_rank if tensor_rank is not None else mpu.get_tensor_model_parallel_rank()) + 1}/{mpu.get_tensor_model_parallel_world_size()}, ' + f'p {(pipeline_rank if pipeline_rank is not None else mpu.get_pipeline_model_parallel_rank()) + 1}/{mpu.get_pipeline_model_parallel_world_size()} ]') + if args.log_progress and args.async_save: + append_to_progress_log(f'Saved async checkpoint\tIteration: {iteration}', + barrier=False) + + if args.async_save: + assert async_save_request is not None + async_save_request.add_finalize_fn(iter_finalize_fn) + else: + iter_finalize_fn() + + # Additional callback for one_logger (last rank) + if not torch.distributed.is_initialized() \ + or is_last_rank(): + def onelogger_finalize_fn(): + on_save_checkpoint_success(productive_metrics, args.async_save) + if args.async_save: + assert async_save_request is not None + async_save_request.add_finalize_fn(onelogger_finalize_fn) + else: + onelogger_finalize_fn() + + # Additional callback for wandb (last rank) + if not torch.distributed.is_initialized() \ + or is_last_rank(): + def wandb_finalize_fn(): + wandb_utils.on_save_checkpoint_success(checkpoint_name, get_checkpoint_tracker_filename(save_dir), save_dir, iteration) + if args.async_save: + assert async_save_request is not None + async_save_request.add_finalize_fn(wandb_finalize_fn) + else: + wandb_finalize_fn() + + if args.async_save: + schedule_async_save(async_save_request) + print_rank_0(' scheduled an async checkpoint save at iteration {:7d} to {}' \ + .format(iteration, save_dir)) + + # Wait so everyone is done (not necessary) + if torch.distributed.is_initialized(): + torch.distributed.barrier() + + end_misc = time() + logger.debug(f"rank: {rank}, takes {end_misc - start_misc} to finalize ckpt save ") + + ft_integration.on_checkpointing_end(is_async_finalization=False) + +def cleanup_old_non_persistent_checkpoint(save_dir, leave_ckpt_num=1, do_async=False): + if torch.distributed.is_initialized() and torch.distributed.get_rank() != 0: + return + save_dir = Path(save_dir) + + iter_prefix = "iter_" + iter_ckpts = save_dir.rglob(f'{iter_prefix}*') + sorted_iter_ckpts = sorted(iter_ckpts, key=lambda ckpt_name: int(ckpt_name.name[len(iter_prefix):])) + if not sorted_iter_ckpts: + return + rm_iter_ckpts = sorted_iter_ckpts[:-leave_ckpt_num] + print_rank_0(f'Non-persistent checkpoints scheduled for removal: {rm_iter_ckpts}') + print_rank_0(f'Non-persistent checkpoints to be kept: {sorted_iter_ckpts[-leave_ckpt_num:]}') + + def remove_iter_ckpts(_iter_ckpts): + for ckpt in _iter_ckpts: + shutil.rmtree(ckpt) + if do_async: + threading.Thread(target=remove_iter_ckpts, args=(rm_iter_ckpts,)).start() + else: + remove_iter_ckpts(rm_iter_ckpts) + + +def maybe_save_dataloader_state(train_iterator, iteration, dataloader_save_path): + """Saves dataloader state if the dataloader supports it. + + Currently, this is only used by Megatron Energon dataloader (multimodal) to store its state at a + specific iteration. The Megatron built-in dataloader (text-only) creates index files upfront + to track its state. + + If the provided dataloader has `save_state` method, then it is called to save the state. + Otherwise, no state is saved. + + Args: + train_iterator (iterable): Train dataloader. + iteration (int): Current iteration. + dataloader_save_path (str): Path where the dataloader state is saved. + """ + # If no dataloader or saving path is provided, exit early, otherwise, raise an error. + if train_iterator is None or dataloader_save_path is None or dataloader_save_path == "": + return + + # If dataloader doesn't support saving state, raise an error. + if not hasattr(train_iterator.iterable, "save_state"): + raise RuntimeError(f"Could not find a save_state for the train_iterator of type {type(train_iterator)}") + + # Save dataloader state for each data parallel rank only once. + first_rank = mpu.is_pipeline_first_stage(ignore_virtual=True) and mpu.get_tensor_model_parallel_rank() == 0 + if not first_rank: + return + + dp_rank = mpu.get_data_parallel_rank() + print(f"saving dataloader checkpoint at iteration {iteration} to {dataloader_save_path}") + train_dataloader_state_dict = train_iterator.iterable.save_state() + data_state_save_path = get_checkpoint_name( + dataloader_save_path, iteration, + basename=f'train_dataloader_dprank{dp_rank:03d}.pt' + ) + + torch.distributed.barrier(group=mpu.get_data_parallel_group()) + + if mpu.get_data_parallel_rank() == 0: + ensure_directory_exists(data_state_save_path) + + torch.distributed.barrier(group=mpu.get_data_parallel_group()) + + dataloader_save_dict = {} + dataloader_save_dict['dataloader_state_dict'] = train_dataloader_state_dict + torch.save(dataloader_save_dict, data_state_save_path) + + +def generate_state_dict(args, model, optimizer, opt_param_scheduler, + rng_state, use_dist_ckpt=False, iteration=None, + optim_sd_kwargs=None, rerun_state=None): + # Arguments, iteration, and model. + state_dict = {} + state_dict['args'] = args + state_dict['checkpoint_version'] = 3.0 + if iteration is not None: + state_dict['iteration'] = iteration + + if len(model) == 1: + state_dict['model'] = (model[0].sharded_state_dict() + if use_dist_ckpt else + model[0].state_dict_for_save_checkpoint()) + else: + for i in range(len(model)): + mpu.set_virtual_pipeline_model_parallel_rank(i) + state_dict['model%d' % i] = ( + model[i].sharded_state_dict() + if use_dist_ckpt else + model[i].state_dict_for_save_checkpoint()) + # Optimizer stuff. + if not args.no_save_optim: + if optimizer is not None and not optimizer.is_stub_optimizer: + state_dict['optimizer'] = (optimizer.sharded_state_dict(state_dict, **(optim_sd_kwargs or {})) + if use_dist_ckpt else + optimizer.state_dict()) + if opt_param_scheduler is not None: + state_dict['opt_param_scheduler'] = \ + opt_param_scheduler.state_dict() + + # Rerun state + state_dict['rerun_state_machine'] = rerun_state + + # RNG states. + if not args.no_save_rng: + state_dict["rng_state"] = rng_state + return state_dict + + +def _transpose_first_dim(t, num_splits, num_splits_first, model): + input_shape = t.size() + # We use a self_attention module but the values extracted aren't + # specific to self attention so should work for cross attention as well + while hasattr(model, 'module'): + model = model.module + attention_module = model.language_model.encoder.layers[0].self_attention + hidden_size_per_attention_head = attention_module.hidden_size_per_attention_head + num_attention_heads_per_partition = attention_module.num_attention_heads_per_partition + if num_splits_first: + """[num_splits * np * hn, h] + -->(view) [num_splits, np, hn, h] + -->(tranpose) [np, num_splits, hn, h] + -->(view) [np * num_splits * hn, h] """ + + intermediate_shape = \ + (num_splits, num_attention_heads_per_partition, + hidden_size_per_attention_head) + input_shape[1:] + + t = t.view(*intermediate_shape) + t = t.transpose(0, 1).contiguous() + else: + """[np * hn * num_splits, h] + -->(view) [np, hn, num_splits, h] + -->(tranpose) [np, num_splits, hn, h] + -->(view) [np * num_splits * hn, h] """ + + intermediate_shape = \ + (num_attention_heads_per_partition, + hidden_size_per_attention_head, num_splits) +\ + input_shape[1:] + + t = t.view(*intermediate_shape) + t = t.transpose(1, 2).contiguous() + t = t.view(*input_shape) + + return t + + +def fix_query_key_value_ordering(model, checkpoint_version): + """Fix up query/key/value matrix ordering if checkpoint + version is smaller than 2.0 + """ + if checkpoint_version < 2.0: + if isinstance(model, list): + assert len(model)==1 + model = model[0] + for name, param in model.named_parameters(): + if name.endswith(('.query_key_value.weight', '.query_key_value.bias')): + if checkpoint_version == 0: + fixed_param = _transpose_first_dim(param.data, 3, True, model) + elif checkpoint_version == 1.0: + fixed_param = _transpose_first_dim(param.data, 3, False, model) + else: + print_rank_0(f"Invalid checkpoint version {checkpoint_version}.") + sys.exit() + param.data.copy_(fixed_param) + if name.endswith(('.key_value.weight', '.key_value.bias')): + if checkpoint_version == 0: + fixed_param = _transpose_first_dim(param.data, 2, True, model) + elif checkpoint_version == 1.0: + fixed_param = _transpose_first_dim(param.data, 2, False, model) + else: + print_rank_0(f"Invalid checkpoint version {checkpoint_version}.") + sys.exit() + param.data.copy_(fixed_param) + print_rank_0(" successfully fixed query-key-values ordering for" + " checkpoint version {}".format(checkpoint_version)) + + +def _get_non_persistent_iteration(non_persistent_global_dir, args, checkpointing_context=None): + if args.non_persistent_ckpt_type is None: + return -1 + elif args.non_persistent_ckpt_type == "global": + tracker_filename = get_checkpoint_tracker_filename(non_persistent_global_dir) + if os.path.isfile(tracker_filename): + iteration, release = read_metadata(tracker_filename) + if release: + raise RuntimeError('Non-persistent checkpoint can\'t be a release checkpoint') + else: + iteration = -1 + print_rank_0('WARNING: could not find the metadata file {}'.format(tracker_filename)) + print_rank_0(' will not load any non-persistent checkpoint') + return iteration + elif args.non_persistent_ckpt_type == "local": + return checkpointing_context['local_checkpoint_manager'].find_latest() + else: + assert False, 'Please use local or global non-persistent checkpoints' \ + f'(got: {args.non_persistent_ckpt_type})' + + +def _load_non_persistent_base_checkpoint( + non_persistent_global_dir, + args, + rank0, + sharded_state_dict, + non_persistent_iteration, + checkpointing_context=None, +): + """ Load the base state_dict from a non-persistent distributed checkpoint. + Depending on the non_persistent_ckpt_type, different logic may be required. + """ + assert args.non_persistent_ckpt_type is not None + if args.non_persistent_ckpt_type == "global": + if not rank0: + print_rank_0( + f'Loading from a non-persistent checkpoint (non-persistent iter {non_persistent_iteration})' + ) + return _load_global_dist_base_checkpoint( + non_persistent_global_dir, args, rank0, sharded_state_dict, non_persistent_iteration, False, + checkpointing_context=checkpointing_context + ) + elif args.non_persistent_ckpt_type == "local": + intermediate_state_dict, checkpoint_name = checkpointing_context[ + 'local_checkpoint_manager' + ].load() + state_dict = intermediate_state_dict.to_state_dict( + sharded_state_dict, + algo=args.non_persistent_local_ckpt_algo, + parallelization_group = mpu.get_data_parallel_group(with_context_parallel=True) + ) + return state_dict, checkpoint_name, False, CheckpointType.LOCAL + else: + assert False, 'Please use local or global non-persistent checkpoints' \ + f'(got: {args.non_persistent_ckpt_type})' + + +def _load_global_dist_base_checkpoint( + load_dir, args, rank0, sharded_state_dict, iteration, release, checkpointing_context=None +): + """ Load the base state_dict from the given directory containing the global distributed checkpoint """ + if rank0: + checkpoint_name = find_checkpoint_rank_0(load_dir, iteration, release) + state_dict = dist_checkpointing.load_common_state_dict(checkpoint_name) + return state_dict, checkpoint_name, release, CheckpointType.GLOBAL + + if sharded_state_dict is None: + assert not args.auto_detect_ckpt_format and not args.use_dist_ckpt, ( + args.auto_detect_ckpt_format, + args.use_dist_ckpt, + ) + raise RuntimeError( + 'Detected load from a distributed checkpoint, but neither --use-dist-ckpt nor --auto-detect-ckpt-format is set.' + ) + + checkpoint_name = get_checkpoint_name(load_dir, iteration, release, return_base_dir=True) + load_strategy = get_default_load_sharded_strategy(checkpoint_name) + # NOTE: `args.ckpt_fully_parallel_load` applies to both persistent and non-persistent checkpoints. + if args.ckpt_fully_parallel_load: + load_strategy = FullyParallelLoadStrategyWrapper( + load_strategy, mpu.get_data_parallel_group(with_context_parallel=True) + ) + if checkpointing_context is not None: + checkpointing_context["load_strategy"] = load_strategy + state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) + return state_dict, checkpoint_name, release, CheckpointType.GLOBAL + + +def _load_base_checkpoint( + load_dir, + args, + rank0=False, + sharded_state_dict=None, + checkpointing_context=None, +): + """ Load the base state_dict from the given directory + + If rank0 is true, just loads rank 0 checkpoint, ignoring arguments. + """ + # Try to load non-persistent checkpoint first + non_persistent_global_dir = ( + args.non_persistent_global_ckpt_dir + if args.non_persistent_global_ckpt_dir or load_dir is None + else os.path.join(load_dir, _NON_PERSISTENT_CKPT_SUBDIR) + ) + non_persistent_iteration = _get_non_persistent_iteration( + non_persistent_global_dir, args, checkpointing_context + ) + iteration, release = -1, False + tracker_filename = 'because load directory is not defined' + if load_dir is not None: + tracker_filename = get_checkpoint_tracker_filename(load_dir) + if os.path.isfile(tracker_filename): + iteration, release = read_metadata(tracker_filename) + if non_persistent_iteration != -1: # there is a non-persistent checkpoint + if non_persistent_iteration >= iteration: + return _load_non_persistent_base_checkpoint( + non_persistent_global_dir, + args, + rank0, + sharded_state_dict, + non_persistent_iteration, + checkpointing_context, + ) + else: + print_rank_0('WARNING: non-persistent checkpoints are older than persistent checkpoint') + + # Otherwise we are dealing with global checkpoints + # If no tracker file, return nothing + if iteration == -1: + if not rank0: + print_rank_0('WARNING: could not find the metadata file {}'.format(tracker_filename)) + print_rank_0(' will not load any checkpoints and will start from random') + # Conditionally exit if checkpoint not found. + if args.exit_on_missing_checkpoint: + print_rank_0(">> '--exit-on-missing-checkpoint' set ... exiting. <<") + if torch.distributed.is_initialized(): + torch.distributed.barrier() + sys.exit() + + return None, "", False, None + + # Determine the type of the checkpoint + checkpoint_name = get_checkpoint_name(load_dir, iteration, release, return_base_dir=True) + is_dist_ckpt = dist_checkpointing.check_is_distributed_checkpoint(checkpoint_name) + if not rank0: + dist_infix = "distributed " if is_dist_ckpt else "" + if release: + print_rank_0(f' loading release {dist_infix}checkpoint from {load_dir}') + else: + print_rank_0( + f' loading {dist_infix}checkpoint from {load_dir} at iteration {iteration}' + ) + + # Handle global distributed checkpoint + if is_dist_ckpt: + return _load_global_dist_base_checkpoint( + load_dir, args, rank0, sharded_state_dict, iteration, release, checkpointing_context=checkpointing_context + ) + # Handle global legacy checkpoint + if rank0: + checkpoint_name = find_checkpoint_rank_0(load_dir, iteration, release) + else: + checkpoint_name = get_checkpoint_name(load_dir, iteration, release, return_base_dir=False) + try: + state_dict = torch.load(checkpoint_name, map_location='cpu') + except ModuleNotFoundError: + from megatron.legacy.fp16_deprecated import loss_scaler + + # For backward compatibility. + if not rank0: + print_rank_0(' > deserializing using the old code structure ...') + sys.modules['fp16.loss_scaler'] = sys.modules['megatron.legacy.fp16_deprecated.loss_scaler'] + sys.modules['megatron.fp16.loss_scaler'] = sys.modules[ + 'megatron.legacy.fp16_deprecated.loss_scaler' + ] + sys.modules['megatron.model'] = sys.modules['megatron.legacy.model'] + state_dict = torch.load(checkpoint_name, map_location='cpu') + sys.modules.pop('fp16.loss_scaler', None) + sys.modules.pop('megatron.fp16.loss_scaler', None) + sys.modules.pop('megatron.model', None) + except Exception as e: + print('could not load the checkpoint') + print(e) + sys.exit() + + return state_dict, checkpoint_name, release, CheckpointType.LEGACY + + +def load_args_from_checkpoint( + args, load_arg='load', checkpointing_context=None +): + """Set required arguments from the checkpoint specified in the + arguments. + + Will overwrite arguments that have a non-None default value, but + will leave any arguments that default to None as set. + + Returns the same args NameSpace with the new values added/updated. + + If no checkpoint is specified in args, or if the checkpoint is + there but invalid, the arguments will not be modified + + """ + load_dir = getattr(args, load_arg) + + if load_dir is None: + print_rank_0('No load directory specified, using provided arguments.') + return args + + state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( + load_dir, + args, + rank0=True, + checkpointing_context=checkpointing_context, + ) + + # Args. + if not state_dict: + print_rank_0('Checkpoint not found to provide arguments, using provided arguments.') + return args + + if 'args' not in state_dict: + print_rank_0('Checkpoint provided does not have arguments saved, using provided arguments.') + return args + + checkpoint_args = state_dict['args'] + checkpoint_version = state_dict.get('checkpoint_version', 0) + args.iteration = state_dict['iteration'] + + # One-off conversion for foundation models + if hasattr(checkpoint_args, 'disable_bias_linear'): + setattr( + checkpoint_args, 'add_bias_linear', not getattr(checkpoint_args, 'disable_bias_linear') + ) + + def _set_arg(arg_name, old_arg_name=None, force=False): + if not force and getattr(args, arg_name, None) is not None: + return + + if old_arg_name is not None: + checkpoint_value = getattr(checkpoint_args, old_arg_name, None) + else: + checkpoint_value = getattr(checkpoint_args, arg_name, None) + + if checkpoint_value is not None: + print_rank_0(f"Setting {arg_name} to {checkpoint_value} from checkpoint") + setattr(args, arg_name, checkpoint_value) + else: + print_rank_0(f"Checkpoint did not provide arguments {arg_name}") + + # Model args. + _set_arg('num_layers') + _set_arg('hidden_size') + _set_arg('ffn_hidden_size') + _set_arg('seq_length') + _set_arg('num_attention_heads') + _set_arg('num_query_groups', force=True) + _set_arg('group_query_attention', force=True) + _set_arg('kv_channels') + _set_arg('max_position_embeddings') + _set_arg('position_embedding_type', force=True) + _set_arg('add_position_embedding', force=True) + _set_arg('use_rotary_position_embeddings', force=True) + _set_arg('rotary_base', force=True) + _set_arg('rotary_percent', force=True) + _set_arg('rotary_interleaved', force=True) + _set_arg('add_bias_linear', force=True) + _set_arg('add_qkv_bias', force=True) + _set_arg('squared_relu', force=True) + _set_arg('swiglu', force=True) + _set_arg('untie_embeddings_and_output_weights', force=True) + _set_arg('apply_layernorm_1p', force=True) + _set_arg('normalization', force=True) + _set_arg('apply_query_key_layer_scaling', force=True) + _set_arg('attention_dropout', force=True) + _set_arg('hidden_dropout', force=True) + + _set_arg('hybrid_override_pattern', force=True) + _set_arg('spec', force=True) + _set_arg('hybrid_attention_ratio', force=True) + _set_arg('hybrid_mlp_ratio', force=True) + + _set_arg('num_experts', force=True) + _set_arg('moe_layer_freq', force=True) + _set_arg('moe_ffn_hidden_size', force=True) + _set_arg('moe_router_topk', force=True) + _set_arg('moe_token_dispatcher_type', force=True) + _set_arg('moe_router_pre_softmax', force=True) + _set_arg('moe_grouped_gemm', force=True) + _set_arg('moe_shared_expert_intermediate_size', force=True) + + # Tokenizer args. + _set_arg('tokenizer_type', force=True) + # Using checkpoint version might not always be safe (e.g., if running on different cluster). + if args.use_tokenizer_model_from_checkpoint_args: + _set_arg('tokenizer_model', force=True) + _set_arg('tiktoken_pattern', force=True) + _set_arg('padded_vocab_size') + + # Checkpoint args. + _set_arg('ckpt_format') + + # Model parallelism args. + if args.use_mp_args_from_checkpoint_args: + if checkpoint_version < 3.0: + _set_arg('tensor_model_parallel_size', 'model_parallel_size') + else: + _set_arg('tensor_model_parallel_size', force=True) + _set_arg('pipeline_model_parallel_size', force=True) + _set_arg('virtual_pipeline_model_parallel_size', force=True) + _set_arg('num_layers_per_virtual_pipeline_stage') + _set_arg('expert_model_parallel_size', force=True) + + return args, checkpoint_args + + +def fix_fp8_params_lose_precision_when_loading_dist_ckpt(state_dict): + """ + When "--fp8-param-gather" and "--use-dist-ckpt" are both enabled, the state dict read from + dist-checkpoint loses precision (the weights read from checkpoint go through the process of + bf16/fp16 -> fp8 -> bf16/fp16). This function is implemented to solve this problem. + When "--fp8-param-gather" is disabled, this function doesn't modify anything. + """ + for key in state_dict.keys(): + if key.startswith('model'): + for _, sharded_tensor in state_dict[key].items(): + if is_float8tensor(sharded_tensor.data): + sharded_tensor.data = sharded_tensor.data.from_float8().cpu() + + +def load_checkpoint(model, optimizer, opt_param_scheduler, load_arg='load', strict=True, + checkpointing_context=None, skip_load_to_model_and_opt=False): + """Load a model checkpoint and return the iteration. + strict (bool): whether to strictly enforce that the keys in + :attr:`state_dict` of the checkpoint match the names of + parameters and buffers in model. + skip_load_to_model_and_opt (bool): whether to call `load_state_dict` + for :attr:`model` and :attr:`optimizer`. In case of running FSDP2 + or other torch features that uses DTensor in state dict, the tensors + are already loaded in-place by `_load_base_checkpoint`. + """ + args = get_args() + load_dir = getattr(args, load_arg) + + # Finetuning directories + pretrained_dir = getattr(args, 'pretrained_checkpoint', None) + if pretrained_dir is not None and not checkpoint_exists(load_dir): + print_rank_0( + f'Checkpoint file not found in load directory {load_dir} attempting to finetune with checkpoint in {pretrained_dir}' + ) + load_dir = pretrained_dir + if not checkpoint_exists(load_dir): + raise FileNotFoundError("No checkpoint found in load directory or pretrained directory") + args.finetune = True + + ddp_model = model + model = unwrap_model(ddp_model) + + load_kwargs = {} + is_dist_ckpt = False + if ( + args.auto_detect_ckpt_format + or args.use_dist_ckpt + or args.non_persistent_save_interval is not None + ): + state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( + load_dir, + args, + rank0=True, + checkpointing_context=checkpointing_context, + ) + + is_dist_ckpt = ( + ckpt_type == CheckpointType.LOCAL + or dist_checkpointing.check_is_distributed_checkpoint(checkpoint_name) + ) + if is_dist_ckpt: + ckpt_tp_pp = ( + state_dict['args'].tensor_model_parallel_size, + state_dict['args'].pipeline_model_parallel_size, + getattr(state_dict['args'], 'encoder_tensor_model_parallel_size', 0), + getattr(state_dict['args'], 'encoder_pipeline_model_parallel_size', 0), + ) + run_tp_pp = ( + args.tensor_model_parallel_size, + args.pipeline_model_parallel_size, + # TODO: change this to args.encoder_tensor_model_parallel_size after 30th Nov 24 + getattr(args, 'encoder_tensor_model_parallel_size', 0), + getattr(args, 'encoder_pipeline_model_parallel_size', 0), + ) + mismatch_msg = "(TP, PP, encoder TP, encoder PP) mismatch after resume ({} vs {} from checkpoint)".format( + run_tp_pp, ckpt_tp_pp + ) + + # Determine if RNG state will be loaded + if (ckpt_tp_pp == run_tp_pp and not release and not args.finetune and not args.no_load_rng + and not getattr(state_dict['args'], 'no_save_rng', False)): + gen_sd_rng_state = get_rng_state(True) # we can load the rng state + else: + gen_sd_rng_state = None + if ckpt_tp_pp != run_tp_pp: + print_rank_0("{}: RNG state will be ignored".format(mismatch_msg)) + + optim_sd_kwargs = dict(is_loading=True) + # Determine if optimizer state will be loaded + if (not release and not args.finetune and not args.no_load_optim + and not getattr(state_dict['args'], 'no_save_optim', False)): + gen_sd_optim = optimizer + gen_sd_opt_param_scheduler = opt_param_scheduler + + if args.use_distributed_optimizer: + optim_sd_kwargs['sharding_type'] = ('fully_sharded_model_space' + if getattr(state_dict['args'], 'ckpt_fully_parallel_save', False) + else 'dp_zero_gather_scatter') + # This is for backwards-compatibility. Can be removed once 'fully_sharded_bucket_space' loading is removed + for maybe_dist_opt_optim_state in (state_dict['optimizer'], *state_dict['optimizer'].values()): + if 'param_state_sharding_type' in maybe_dist_opt_optim_state: + if maybe_dist_opt_optim_state['param_state_sharding_type'] == 'fully_sharded_bucket_space': + print_rank_0('Detected deprecated `fully_sharded_bucket_space` DistributedOptimizer checkpoint format') + optim_sd_kwargs['sharding_type'] = maybe_dist_opt_optim_state['param_state_sharding_type'] + break + + if ckpt_tp_pp != run_tp_pp and optim_sd_kwargs['sharding_type'] != 'fully_sharded_model_space': + raise RuntimeError(f"{mismatch_msg}: not supported for DistributedOptimizer with sharding type {optim_sd_kwargs['sharding_type']}." + f" Please use `--ckpt-fully-parallel-save` flag during checkpoint saving.") + else: + gen_sd_optim = None + gen_sd_opt_param_scheduler = None + + # Determine if rerun state will be loaded + if ( + ckpt_tp_pp == run_tp_pp + and not release + and not args.finetune + and 'rerun_state_machine' in state_dict + ): + rerun_state_machine = get_rerun_state_machine() + gen_sd_rerun_state = rerun_state_machine.state_dict( + data_iterator=None, use_dist_ckpt=True + ) + else: + gen_sd_rerun_state = None + if ckpt_tp_pp != run_tp_pp: + print_rank_0("{}: Rerun state will be ignored".format(mismatch_msg)) + + # [ModelOpt]: IMPORTANT! Restoring modelopt_state (sharded or not) must be performed + # after the model instance has been created and before _load_base_checkpoint is called. + if has_nvidia_modelopt: + if ckpt_type == CheckpointType.LOCAL: + print_rank_0('WARNING: Local checkpointing does not support nvidia_modelopt.') + elif ckpt_type == CheckpointType.GLOBAL: + restore_modelopt_state(model, state_dict) + else: + restore_sharded_modelopt_state(model, checkpoint_name) + + # [ModelOpt]: Initial loading from non-resume sharded checkpoint to a Distillation Model + # will result in key mismatch with loss modules potentially containing parameters, since + # it requires generating a state_dict before loading. Here we hide those modules if present. + with contextlib.ExitStack() as stack: # Allows multiple context managers for each model shard + if args.finetune and hasattr(model[0], "hide_loss_modules"): + for m in model: + stack.enter_context(m.hide_loss_modules()) + load_kwargs['sharded_state_dict'] = generate_state_dict( + args, model, gen_sd_optim, gen_sd_opt_param_scheduler, gen_sd_rng_state, + use_dist_ckpt=True, optim_sd_kwargs=optim_sd_kwargs, rerun_state=gen_sd_rerun_state + ) + + # When "--fp8-param-gather" is disabled, this function doesn't modify anything. + fix_fp8_params_lose_precision_when_loading_dist_ckpt(load_kwargs['sharded_state_dict']) + + state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( + load_dir, args, rank0=False, checkpointing_context=checkpointing_context, + **load_kwargs + ) + + # Checkpoint not loaded. + if state_dict is None: + # Iteration and num_floating_point_operations_so_far default to 0. + return 0, 0 + + # Set checkpoint version. + set_checkpoint_version(state_dict.get('checkpoint_version', 0)) + + # Set iteration. + if args.finetune or release: + iteration = 0 + else: + try: + iteration = state_dict['iteration'] + except KeyError: + try: # Backward compatible with older checkpoints + iteration = state_dict['total_iters'] + except KeyError: + print_rank_0('A metadata file exists but unable to load ' + 'iteration from checkpoint {}, exiting'.format(checkpoint_name)) + sys.exit() + num_floating_point_operations_so_far = state_dict.get('num_floating_point_operations_so_far', 0) + + # Check arguments. + assert args.consumed_train_samples == 0 + assert args.skipped_train_samples == 0 + assert args.consumed_valid_samples == 0 + if 'args' in state_dict and not args.finetune: + checkpoint_args = state_dict['args'] + check_checkpoint_args(checkpoint_args) + args.consumed_train_samples = getattr(checkpoint_args, + 'consumed_train_samples', 0) + args.skipped_train_samples = getattr(checkpoint_args, + 'skipped_train_samples', 0) + update_num_microbatches(consumed_samples=args.consumed_train_samples, verbose=True) + args.consumed_valid_samples = getattr(checkpoint_args, + 'consumed_valid_samples', 0) + else: + print_rank_0('could not find arguments in the checkpoint ...') + + # Model. + strict = False if args.retro_add_retriever else strict + if not skip_load_to_model_and_opt: + if len(ddp_model) == 1: + ddp_model[0].load_state_dict(state_dict['model'], strict=strict) + else: + for i in range(len(ddp_model)): + mpu.set_virtual_pipeline_model_parallel_rank(i) + ddp_model[i].load_state_dict(state_dict['model%d' % i], strict=strict) + + # Fix up query/key/value matrix ordering if needed. + checkpoint_version = get_checkpoint_version() + print_rank_0(f' checkpoint version {checkpoint_version}') + fix_query_key_value_ordering(model, checkpoint_version) + + # Optimizer. + if not release and not args.finetune and not args.no_load_optim: + try: + # Load state dict. + if not skip_load_to_model_and_opt and optimizer is not None and not optimizer.is_stub_optimizer: + optimizer.load_state_dict(state_dict['optimizer']) + + # Load distributed optimizer's custom parameter state. + # For distributed checkpoint it's already loaded in load_state_dict above + if args.use_distributed_optimizer and not is_dist_ckpt: + # NOTE: this is a manual read of the tracker file. + # This code should not be reached when reading from a non_persistent checkpoint + assert not is_dist_ckpt + tracker_filename = get_checkpoint_tracker_filename(load_dir) + iteration, release = read_metadata(tracker_filename) + model_checkpoint_name = \ + get_checkpoint_name(load_dir, iteration, release) + optim_checkpoint_name = \ + get_distributed_optimizer_checkpoint_name( + model_checkpoint_name) + optimizer.load_parameter_state(optim_checkpoint_name, + update_legacy_format=args.ckpt_convert_update_legacy_dist_opt_format) + + # Load scheduler. + if opt_param_scheduler is not None: + if 'lr_scheduler' in state_dict: # backward compatbility + opt_param_scheduler.load_state_dict(state_dict['lr_scheduler']) + else: + opt_param_scheduler.load_state_dict(state_dict['opt_param_scheduler']) + except KeyError as e: + print_rank_0('Unable to load optimizer from checkpoint {}. ' + 'Specify --no-load-optim or --finetune to prevent ' + 'attempting to load the optimizer state, ' + 'exiting ...'.format(checkpoint_name)) + raise e + else: + if (args.fp16 or args.bf16) and optimizer is not None: + optimizer.reload_model_params() + + # rerun state + try: + if 'rerun_state_machine' in state_dict: + get_rerun_state_machine().load_state_dict(state_dict['rerun_state_machine']) + except Exception as e: + print(f"Unable to restore RerunMachine from checkpoint: {e}") + sys.exit() + + # rng states. + if not release and not args.finetune and not args.no_load_rng: + try: + if 'rng_state' in state_dict: + # access rng_state for data parallel rank + if args.data_parallel_random_init: + rng_state = state_dict['rng_state'][mpu.get_data_parallel_rank()] + else: + rng_state = state_dict['rng_state'][0] + random.setstate(rng_state['random_rng_state']) + np.random.set_state(rng_state['np_rng_state']) + torch.set_rng_state(rng_state['torch_rng_state']) + torch.cuda.set_rng_state(rng_state['cuda_rng_state']) + # Check for empty states array + if not rng_state['rng_tracker_states']: + raise KeyError + tensor_parallel.get_cuda_rng_tracker().set_states( + rng_state['rng_tracker_states']) + else: # backward compatability + random.setstate(state_dict['random_rng_state']) + np.random.set_state(state_dict['np_rng_state']) + torch.set_rng_state(state_dict['torch_rng_state']) + torch.cuda.set_rng_state(state_dict['cuda_rng_state']) + # Check for empty states array + if not state_dict['rng_tracker_states']: + raise KeyError + tensor_parallel.get_cuda_rng_tracker().set_states( + state_dict['rng_tracker_states']) + except KeyError: + print_rank_0('Unable to load rng state from checkpoint {}. ' + 'Specify --no-load-rng or --finetune to prevent ' + 'attempting to load the rng state, ' + 'exiting ...'.format(checkpoint_name)) + sys.exit() + + # Some utilities want to load a checkpoint without distributed being initialized + if torch.distributed.is_initialized(): + torch.distributed.barrier() + + print_rank_0(f' successfully loaded checkpoint from {load_dir} ' + f'[ t {mpu.get_tensor_model_parallel_rank() + 1}/{mpu.get_tensor_model_parallel_world_size()}, ' + f'p {mpu.get_pipeline_model_parallel_rank() + 1}/{mpu.get_pipeline_model_parallel_world_size()} ] ' + f'at iteration {iteration}') + + # Additional callback for wandb (last rank) + if not torch.distributed.is_initialized() \ + or is_last_rank(): + wandb_utils.on_load_checkpoint_success(checkpoint_name, load_dir) + + torch.cuda.empty_cache() + + if iteration > 0: + # Notify FT that a checkpoint was loaded. + is_local_chkpt = (ckpt_type == CheckpointType.LOCAL) + ft_integration.on_checkpoint_loaded(is_local_chkpt=is_local_chkpt) + + return iteration, num_floating_point_operations_so_far + + +def load_biencoder_checkpoint(model, only_query_model=False, + only_context_model=False, custom_load_path=None): + """ + selectively load retrieval models for indexing/retrieving + from saved checkpoints + """ + + args = get_args() + + model = unwrap_model(model) + + load_path = custom_load_path if custom_load_path is not None else args.load + + tracker_filename = get_checkpoint_tracker_filename(load_path) + with open(tracker_filename, 'r') as f: + iteration = int(f.read().strip()) + + checkpoint_name = get_checkpoint_name(load_path, iteration, + args.use_distributed_optimizer, + release=False) + + if mpu.get_data_parallel_rank() == 0: + print('global rank {} is loading checkpoint {}'.format( + torch.distributed.get_rank(), checkpoint_name)) + + state_dict = torch.load(checkpoint_name, map_location='cpu') + ret_state_dict = state_dict['model'] + + if only_query_model: + ret_state_dict.pop('context_model') + if only_context_model: + ret_state_dict.pop('query_model') + + assert len(model) == 1 + model[0].load_state_dict(ret_state_dict) + torch.distributed.barrier() + + if mpu.get_data_parallel_rank() == 0: + print(' successfully loaded {}'.format(checkpoint_name)) + + return model diff --git a/megatron/training/ft_integration.py b/megatron/training/ft_integration.py index 2502627..420161d 100644 --- a/megatron/training/ft_integration.py +++ b/megatron/training/ft_integration.py @@ -1,110 +1,367 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -""" -FT Package Integration - -This file is part of the integration process for the FT package, a custom heartbeat-based -system developed by NVIDIA. The FT package monitors the ranks to detect hangs, gracefully -terminates the workload, and respawns it from the last checkpoints. It includes an auto -config feature that automatically sets up timeouts based on the observed time of iterations. - -Note: This tool is an internal NVIDIA tool and is not open source. This file does not -contain the FT package itself but supports its integration. -""" - -import types -from enum import Enum, auto -from . import global_vars - -class StateMachineActions(Enum): - NONE = auto() - SAVE_CHECKPOINT = auto() - TRAIN_HEARTBEAT = auto() - EVAL_HEARTBEAT = auto() - UPDATE_TIMEOUT = auto() - -class _TrainingStateMachine: - """ - This class encapsulates logic for determining when: - - FT timeouts can be updated (`.can_update_timeouts` property) - - `on_ ...` methods update the state and should be called from the corresponding places. - """ - - MIN_ITERS_FOR_TIMEOUT_UPDATE = 2 - - def __init__(self): - self.num_tr_iters_total = 0 - self.num_tr_iter_at_last_save = None - self.seen_checkpointing = False - self.timeouts_updated = False - - def on_save_checkpoint(self): - self.num_tr_iter_at_last_save = self.num_tr_iters_total - - def on_train_heartbeat(self): - self.num_tr_iters_total += 1 - if not self.seen_checkpointing and self.num_tr_iter_at_last_save is not None: - # detect mid-epoch checkpointing that makes hearbeat interval longer - iters_pre_save = self.num_tr_iter_at_last_save - iters_post_save = self.num_tr_iters_total - self.num_tr_iter_at_last_save - self.seen_checkpointing = iters_pre_save > 0 and iters_post_save > 0 - - def on_eval_heartbeat(self): - pass - - def on_timeouts_updated(self): - self.timeouts_updated = True - - @property - def can_update_timeouts(self) -> bool: - """ - Returns True if new timeouts can be computed. - `.on_timeouts_updated()` resets this property back to False. - """ - if self.timeouts_updated: - # timeouts are updated at most once per training run - return False - if self.num_tr_iters_total < self.MIN_ITERS_FOR_TIMEOUT_UPDATE: - # need a few training iters - return False - # check if there was checkoint saving - # this makes heartbeat iterval longer than usual. - return self.seen_checkpointing - - def perform_action(self, action: StateMachineActions): - if action == StateMachineActions.TRAIN_HEARTBEAT: - self.on_train_heartbeat() - elif action == StateMachineActions.SAVE_CHECKPOINT: - self.on_save_checkpoint() - elif action == StateMachineActions.EVAL_HEARTBEAT: - self.on_eval_heartbeat() - elif action == StateMachineActions.UPDATE_TIMEOUT: - self.on_timeouts_updated() - assert not self.can_update_timeouts - # No action for StateMachineActions.NONE - - -_GLOBAL_RANK_MONITOR_CLIENT = None -_GLOBAL_STATE_MACHINE = _TrainingStateMachine() - -def _set_rank_monitor_client(): - from nvidia_resiliency_ext.fault_tolerance import RankMonitorClient - cli = RankMonitorClient() - global _GLOBAL_RANK_MONITOR_CLIENT - global_vars._ensure_var_is_not_initialized(_GLOBAL_RANK_MONITOR_CLIENT, 'rank monitor client') - _GLOBAL_RANK_MONITOR_CLIENT = cli - -def get_rank_monitor_client(action=StateMachineActions.NONE): - global _GLOBAL_RANK_MONITOR_CLIENT, _GLOBAL_STATE_MACHINE - if _GLOBAL_RANK_MONITOR_CLIENT is None: - try: - _set_rank_monitor_client() - except ImportError: - _GLOBAL_RANK_MONITOR_CLIENT = None - _GLOBAL_STATE_MACHINE.perform_action(action) - return _GLOBAL_RANK_MONITOR_CLIENT - -def can_update_timeouts(): - global _GLOBAL_STATE_MACHINE - return _GLOBAL_STATE_MACHINE.can_update_timeouts +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +""" +Fault Tolerance (FT) package integration for Megatron-LM, using the FT section-based API. + +The FT package is included in "nvidia-resiliency-ext" +(https://github.com/NVIDIA/nvidia-resiliency-ext). + +NOTE: The workload must be run using the `ft_launcher` tool provided by `nvidia-resiliency-ext.` +NOTE: Calls to the public API of this module are no-ops if FT is not initialized +(`ft_integration.setup` was not called). +NOTE: Default distributed process group should be initialized before calling `ft_integration.setup` + +The "setup" FT section is opened during FT initialization and closed before the first training or +eval iteration. Training and evaluation steps are wrapped in the "step" section, but only after a +few warmup iterations. This is because the initial iterations may be slower, and we want the "step" +timeout to be short. These warmup steps, which are not wrapped in the "step" section, will fall into +the out-of-section area. All checkpoint-saving-related operations (including asynchronous +checkpointing finalization) are wrapped in the "checkpointing" section. + +If timeout calculation is enabled (--calc-ft-timeouts), +FT timeouts are updated after each checkpoint and at the end of the run. +Updated values are based on observed intervals. + +`ft_launcher` command example: +``` +ft_launcher \ + --rdzv_backend=c10d --rdzv_endpoint=${MASTER_ADDR}:${MASTER_PORT} \ + --nnodes=${NUM_NODES} --nproc-per-node=${NUM_GPUS_PER_NODE} \ + --ft-param-rank_section_timeouts=setup:600,step:180,checkpointing:420 \ + --ft-param-rank_out_of_section_timeout=300 \ + train_script_with_ft.py +``` +""" + +import argparse +import json +import os +import random +import signal +import sys +import threading +import time +from typing import Any, Optional + +import torch + +from . import global_vars +from .utils import is_rank0, print_rank_0 + +_GLOBAL_RANK_MONITOR_CLIENT = None + +_ft_state_path = None +_is_persistent_chkpt_loaded = False +_is_async_chkpt_enabled = False +_is_calculating_timeouts = False +_is_setup_section_open = False +_seen_checkpoints_cnt = 0 +_seen_tr_iters_cnt = 0 +_curr_eval_iter_idx = 0 + +_NUM_WARMUP_ITERS = 1 +_MIN_ITERS_FOR_STEP_TIMEOUT_UPDATE = 16 + + +def get_rank_monitor_client() -> Optional[Any]: + """Returns the underlying fault tolerance client instance + + Returns: + RankMonitorClient: rank monitor client instance, or None if FT was not initialized + """ + return _GLOBAL_RANK_MONITOR_CLIENT + + +def setup(args: argparse.Namespace) -> None: + """Initialize fault tolerance + + Args: + args (argparse.Namespace): parsed Megatron-LM command line arguments + + Raises: + ValueError: if invalid config is provided + """ + from nvidia_resiliency_ext.fault_tolerance import RankMonitorClient + + print_rank_0(f"FT: initializing...") + + checkpoint_dir = args.save + if not checkpoint_dir: + raise ValueError("checkpointing save dir must be set to enable fault tolerance") + if is_rank0() and not os.path.exists(checkpoint_dir): + # MLM checkpoint dir will be needed for saving FT state. + # it can happen before the checkpointing, so create it in advance + os.makedirs(checkpoint_dir, exist_ok=True) + + cli = RankMonitorClient() + global _GLOBAL_RANK_MONITOR_CLIENT + global_vars._ensure_var_is_not_initialized(_GLOBAL_RANK_MONITOR_CLIENT, 'rank monitor client') + _GLOBAL_RANK_MONITOR_CLIENT = cli + + global _ft_state_path + _ft_state_path = os.path.join(checkpoint_dir, "ft_state.json") + + global _is_async_chkpt_enabled + _is_async_chkpt_enabled = args.async_save + + global _is_calculating_timeouts + _is_calculating_timeouts = args.calc_ft_timeouts + + cli.init_workload_monitoring() + _load_state_if_exists() + print_rank_0(f"FT: initialized. Timeouts={cli.section_timeouts}") + + cli.start_section("setup") + global _is_setup_section_open + _is_setup_section_open = True + + +def on_training_step_start() -> None: + """Should be called before each training step""" + rmon_cli = get_rank_monitor_client() + if rmon_cli is not None: + global _is_setup_section_open + if _is_setup_section_open: + rmon_cli.end_section("setup") + _is_setup_section_open = False + if _seen_tr_iters_cnt >= _NUM_WARMUP_ITERS: + rmon_cli.start_section("step") + # reset eval step index. we started training, so evaluation is done + global _curr_eval_iter_idx + _curr_eval_iter_idx = 0 + + +def on_training_step_end() -> None: + """Should be called after each training step""" + rmon_cli = get_rank_monitor_client() + if rmon_cli is not None: + global _seen_tr_iters_cnt + if _seen_tr_iters_cnt >= _NUM_WARMUP_ITERS: + rmon_cli.end_section("step") + _seen_tr_iters_cnt += 1 + + +def on_eval_step_start() -> None: + """Should be called before each validation step""" + rmon_cli = get_rank_monitor_client() + if rmon_cli is not None: + global _is_setup_section_open + if _is_setup_section_open: + # setup section can be open if there were no training iters before evaluation + rmon_cli.end_section("setup") + _is_setup_section_open = False + if _curr_eval_iter_idx >= _NUM_WARMUP_ITERS: + rmon_cli.start_section("step") + + +def on_eval_step_end() -> None: + """Should be called after each validation step""" + rmon_cli = get_rank_monitor_client() + if rmon_cli is not None: + global _curr_eval_iter_idx + if _curr_eval_iter_idx >= _NUM_WARMUP_ITERS: + rmon_cli.end_section("step") + _curr_eval_iter_idx += 1 + + +def on_checkpointing_start() -> None: + """Should be called before each checkpoint-saving-related operation.""" + rmon_cli = get_rank_monitor_client() + if rmon_cli is not None: + rmon_cli.start_section("checkpointing") + + +def on_checkpointing_end(is_async_finalization: bool) -> None: + """Should be called after each checkpoint-saving-related operation. + + Args: + is_async_finalization (bool): true if called after an async checkpointing finalization + """ + rmon_cli = get_rank_monitor_client() + if rmon_cli is not None: + rmon_cli.end_section("checkpointing") + # async checkpointing finalization is called before each training iter, it can be no-op. + # let's try to update the timeouts only on the `save_checkpoint` + if not is_async_finalization: + global _seen_checkpoints_cnt + _seen_checkpoints_cnt += 1 + _maybe_update_timeouts() + + +def on_checkpoint_loaded(is_local_chkpt: bool) -> None: + """Should be called after a checkpoint was loaded + + Args: + is_local_chkpt (bool): true if it was a local checkpoint, false if global + """ + # checkpoint can be loaded during "setup" + # check if persistent checkpoint was loaded, + # in-memory checkpoint reading can be very fast, + # so we could underestimate the "setup" timeout + global _is_persistent_chkpt_loaded + _is_persistent_chkpt_loaded = not is_local_chkpt + + +def shutdown() -> None: + """Shutdowns fault folerance, updates the FT timeouts if possible""" + global _GLOBAL_RANK_MONITOR_CLIENT + rmon_cli = get_rank_monitor_client() + if rmon_cli is not None: + print_rank_0("FT: closing...") + _maybe_update_timeouts(is_closing_ft=True) + rmon_cli.shutdown_workload_monitoring() + print_rank_0("FT: closed.") + _GLOBAL_RANK_MONITOR_CLIENT = None + + +def _load_state_if_exists(): + rmon_cli = get_rank_monitor_client() + if os.path.exists(_ft_state_path): + with open(_ft_state_path, "r") as f: + ft_state = json.load(f) + rmon_cli.load_state_dict(ft_state) + print_rank_0(f"FT: loaded timeouts from {_ft_state_path}. {rmon_cli.section_timeouts}") + + +def _update_timeouts(selected_sections, calc_out_of_section): + print_rank_0( + f"FT: updating timeouts for: {selected_sections} " + + f"update out-of-section: {calc_out_of_section} ..." + ) + rmon_cli = get_rank_monitor_client() + rmon_cli.calculate_and_set_section_timeouts( + selected_sections=selected_sections, calc_out_of_section=calc_out_of_section + ) + if is_rank0(): + ft_state = rmon_cli.state_dict() + with open(_ft_state_path, "w") as f: + json.dump(ft_state, f) + print_rank_0(f"FT: updated timeouts saved to {_ft_state_path}. {rmon_cli.section_timeouts}") + + +def _maybe_update_timeouts(is_closing_ft=False): + rmon_cli = get_rank_monitor_client() + if rmon_cli is None: + return + if not _is_calculating_timeouts: + return + + # Decide which section timeouts can be updated + sections_to_update = [] + + if _is_persistent_chkpt_loaded: + sections_to_update.append("setup") + else: + print_rank_0( + "FT: can't update the setup section timeout until persistent checkpoint is loaded" + ) + + if _seen_tr_iters_cnt >= _MIN_ITERS_FOR_STEP_TIMEOUT_UPDATE: + sections_to_update.append("step") + else: + print_rank_0("FT: need to see more training iterations to update the step section timeout") + + if _seen_checkpoints_cnt > 0: + if not _is_async_chkpt_enabled: + sections_to_update.append("checkpointing") + else: + # There can be too much checkpointing section time variability + # across runs with the async checkpointing, e.g. in some runs all checkpointing + # work can be parallelized (=short checkpointing sections) while in others we can + # hit a costly finalization. + print_rank_0( + "FT: can't update the checkpointing section timeout with async checkpointing" + ) + else: + print_rank_0("FT: checkpointing section is not updated until a checkpoint was saved") + + update_out_of_section = False + if is_closing_ft: + # with async checkpointing, "checkpointing" section is not updated, + # but still we want to see some checkpointing to ensure that is was a complete run + if {'setup', 'step'}.issubset(sections_to_update) and _seen_checkpoints_cnt > 0: + update_out_of_section = True + else: + print_rank_0( + "FT: the out-of-section timeout won't be updated until all FT sections were seen" + ) + + else: + print_rank_0("FT: the out-of-section timeout won't be updated as the FT is not closing yet") + + if sections_to_update or update_out_of_section: + _update_timeouts( + selected_sections=sections_to_update, calc_out_of_section=update_out_of_section + ) + + +def maybe_setup_simulated_fault() -> None: + """Sets a simulated fault, based on `FT_SIM_FAULT_DESC` env variable. + Simulated fault description format: + rank_hung|rank_killed;rank_to_fail|"";base_delay + NOTE: This if for FT testing only + """ + + simulated_fault_desc = os.environ.get('FT_SIM_FAULT_DESC', None) + if not simulated_fault_desc: + return + fault_type: Any # silence mypy + rank_to_fail: Any # silence mypy + base_delay: Any # silence mypy + fault_type, rank_to_fail, base_delay = simulated_fault_desc.split(';') + fault_type = fault_type.strip() + rank_to_fail = rank_to_fail.strip() + rank_to_fail = int(rank_to_fail) if rank_to_fail else None + base_delay = float(base_delay.strip()) + + rng = random.Random() + + print_rank_0( + f"FT: Initializing simulated fault: {fault_type}," + + f"rank to fail: {rank_to_fail}, base delay: {base_delay}" + ) + + # rank that simulates a fault can be explicitly specified in the `rank_to_fail` field + # if not specified, it just picks a random rank + rank = torch.distributed.get_rank() + rand_rank = rng.randint(0, torch.distributed.get_world_size() - 1) + rank_to_fail = rank_to_fail if rank_to_fail is not None else rand_rank + rank_to_fail = torch.tensor([rank_to_fail], device=torch.cuda.current_device()) + torch.distributed.broadcast(rank_to_fail, 0) + rank_to_fail = int(rank_to_fail.item()) + + if rank != rank_to_fail: + # this rank is not going to simulate a fault, nothing more to do + return + + if fault_type == 'random': + fault_type = rng.choice(['rank_killed', 'rank_hung']) + + if fault_type == 'rank_killed': + target_pid = os.getpid() + elif fault_type == 'rank_hung': + target_pid = os.getpid() + else: + raise Exception(f"Unknown fault type {fault_type} expected one of: rank_killed, rank_hung.") + + # add some randomness to the delay + delay = base_delay + 0.2 * rng.random() * base_delay + + print_rank_0(f"FT: Selected fault={fault_type}; target rank={rank_to_fail}; delay={delay}") + + def __fault_thread(): + time.sleep(delay) + for of in [sys.stdout, sys.stderr]: + print( + f"\n####\nFT: Simulating fault: {fault_type}; rank to fail: {rank_to_fail}\n####\n", + file=of, + flush=True, + ) + if fault_type == 'rank_hung': + os.kill(target_pid, signal.SIGSTOP) + else: + os.kill(target_pid, signal.SIGKILL) + + fault_sim_thread = threading.Thread(target=__fault_thread) + fault_sim_thread.daemon = True + fault_sim_thread.start() diff --git a/megatron/training/initialize.py b/megatron/training/initialize.py index eebe42f..9154d2a 100644 --- a/megatron/training/initialize.py +++ b/megatron/training/initialize.py @@ -1,478 +1,504 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. - -"""Megatron initialization.""" -import logging -import random -import os -import time -import warnings - -import numpy as np -import torch -from datetime import timedelta - -from megatron.legacy import fused_kernels -from megatron.training import get_adlr_autoresume -from megatron.training import get_args -from megatron.training import get_tensorboard_writer -from megatron.core import mpu, tensor_parallel -from megatron.core.rerun_state_machine import initialize_rerun_state_machine, RerunErrorInjector, RerunDiagnostic, RerunMode -from megatron.training.arguments import parse_args, validate_args -from megatron.training.yaml_arguments import validate_yaml -from megatron.training.checkpointing import load_args_from_checkpoint -from megatron.training.global_vars import set_global_variables -from megatron.core.fusions.fused_bias_dropout import bias_dropout_add_fused_train -from megatron.core.fusions.fused_bias_gelu import bias_gelu -from megatron.core.fusions.fused_bias_swiglu import bias_swiglu -from megatron.core.utils import get_te_version, is_te_min_version, is_torch_min_version - -logger = logging.getLogger(__name__) - - -def initialize_megatron( - extra_args_provider=None, - args_defaults={}, - ignore_unknown_args=False, - allow_no_cuda=False, - skip_mpu_initialization=False, - get_embedding_ranks=None, - get_position_embedding_ranks=None -): - """Set global variables, initialize distributed, and - set autoresume and random seeds. - `allow_no_cuda` should not be set unless using megatron for cpu only - data processing. In general this arg should not be set unless you know - what you are doing. - Returns a function to finalize distributed env initialization - (optionally, only when args.lazy_mpu_init == True) - """ - if not allow_no_cuda: - # Make sure cuda is available. - assert torch.cuda.is_available(), "Megatron requires CUDA." - - # Parse arguments - args = parse_args(extra_args_provider, ignore_unknown_args) - - # Prep for checkpoint conversion. - if args.ckpt_convert_format is not None: - assert args.ckpt_convert_save is not None - assert args.load is not None - args.exit_on_missing_checkpoint = True - - if args.use_checkpoint_args or args_defaults.get("use_checkpoint_args", False): - assert args.load is not None, "--use-checkpoint-args requires --load argument" - load_args_from_checkpoint(args) - - if args.yaml_cfg is not None: - args = validate_yaml(args, args_defaults) - else: - validate_args(args, args_defaults) - - - # set global args, build tokenizer, and set adlr-autoresume, - # tensorboard-writer, and timers. - set_global_variables(args) - - # set logging level - setup_logging() - - # init rerun state - def state_save_func(): - return { - 'rng_tracker_states': tensor_parallel.get_cuda_rng_tracker().get_states() - } - - def state_restore_func(state_dict): - if state_dict['rng_tracker_states']: - tensor_parallel.get_cuda_rng_tracker().set_states(state_dict['rng_tracker_states']) - - args = get_args() - initialize_rerun_state_machine( - state_save_func=state_save_func, - state_restore_func=state_restore_func, - mode=RerunMode(args.rerun_mode), - error_injector=RerunErrorInjector( - error_injection_rate=args.error_injection_rate, - error_injection_type=RerunDiagnostic(args.error_injection_type), - ), - ) - - # torch.distributed initialization - def finish_mpu_init(): - args = get_args() - # Pytorch distributed. - _initialize_distributed(get_embedding_ranks, get_position_embedding_ranks) - - # Random seeds for reproducibility. - if args.rank == 0: - print("> setting random seeds to {} ...".format(args.seed)) - _set_random_seed(args.seed, args.data_parallel_random_init) - - if skip_mpu_initialization: - return None - - args = get_args() - if args.lazy_mpu_init: - # TODO is this still a necessary option? - args.use_cpu_initialization = True - # delayed initialization of DDP-related stuff - # We only set basic DDP globals - mpu.set_tensor_model_parallel_world_size(args.tensor_model_parallel_size) - # and return function for external DDP manager - # to call when it has DDP initialized - mpu.set_tensor_model_parallel_rank(args.rank) - return finish_mpu_init - else: - # Megatron's MPU is the master. Complete initialization right away. - finish_mpu_init() - - # Autoresume. - _init_autoresume() - - # Compile dependencies. - _compile_dependencies() - - if args.tp_comm_overlap: - #TODO: Should this be activated with just decoder-tp-comm-overlap too? - _initialize_tp_communicators() - - # No continuation function - return None - - -def _compile_dependencies(): - - args = get_args() - - # ========================= - # Compile dataset C++ code. - # ========================= - # TODO: move this to ninja - if torch.distributed.get_rank() == 0: - start_time = time.time() - print("> compiling dataset index builder ...") - from megatron.core.datasets.utils import compile_helpers - - compile_helpers() - print( - ">>> done with dataset index builder. Compilation time: {:.3f} " - "seconds".format(time.time() - start_time), - flush=True, - ) - - # ================== - # Load fused kernels - # ================== - - # Custom kernel constraints check. - seq_len = args.seq_length - attn_batch_size = ( - args.num_attention_heads / args.tensor_model_parallel_size - ) * args.micro_batch_size - # Constraints on sequence length and attn_batch_size to enable warp based - # optimization and upper triangular optimization (for causal mask) - custom_kernel_constraint = ( - seq_len > 16 - and seq_len <= 16384 - and seq_len % 4 == 0 - and attn_batch_size % 4 == 0 - ) - # Print a warning. - if not ( - (args.fp16 or args.bf16) - and custom_kernel_constraint - and args.masked_softmax_fusion - ): - if args.rank == 0: - print( - "WARNING: constraints for invoking optimized" - " fused softmax kernel are not met. We default" - " back to unfused kernel invocations.", - flush=True, - ) - - # Always build on rank zero first. - if torch.distributed.get_rank() == 0: - start_time = time.time() - print("> compiling and loading fused kernels ...", flush=True) - #fused_kernels.load(args) - torch.distributed.barrier() - else: - torch.distributed.barrier() - #fused_kernels.load(args) - # Simple barrier to make sure all ranks have passed the - # compilation phase successfully before moving on to the - # rest of the program. We think this might ensure that - # the lock is released. - torch.distributed.barrier() - if torch.distributed.get_rank() == 0: - print( - ">>> done with compiling and loading fused kernels. " - "Compilation time: {:.3f} seconds".format(time.time() - start_time), - flush=True, - ) - -def _initialize_tp_communicators(): - """ initializing the communicators with user buffers for high-performance tensor-model-parallel - communication overlap """ - - try: - import yaml - - import transformer_engine - from transformer_engine.pytorch import module as te_module - - except ImportError: - raise RuntimeError("Tensor Parallel Communication/GEMM Overlap optimization needs 'yaml' and " - "'transformer_engine' packages") - - args = get_args() - - if args.tp_comm_overlap_cfg is not None: - with open(args.tp_comm_overlap_cfg,"r") as stream: - ub_cfgs = yaml.safe_load(stream) - else: - ub_cfgs = {} - - if getattr(args, 'decoder_tp_comm_overlap', False): - input_shape = [(args.decoder_seq_length * args.micro_batch_size) // args.context_parallel_size , args.hidden_size] - else: - input_shape = [(args.seq_length * args.micro_batch_size) // args.context_parallel_size , args.hidden_size] - - if is_te_min_version("1.9.0"): - # The process group with the target bootstrap backend is created in Transformer Engine. - te_module.base.initialize_ub(shape = input_shape, tp_size = args.tensor_model_parallel_size, - use_fp8 = (args.fp8 is not None) , ub_cfgs = ub_cfgs, - bootstrap_backend = args.tp_comm_bootstrap_backend) - else: - if args.tp_comm_bootstrap_backend != 'mpi': - warnings.warn( - f"Transformer Engine v{get_te_version()} supports only MPI bootstrap backend." - ) - # Create a MPI process group to help with TP communication overlap bootstrap. - torch.distributed.new_group(backend='mpi') - - te_module.base.initialize_ub(shape = input_shape, tp_size = args.tensor_model_parallel_size, - use_fp8 = (args.fp8 is not None) , ub_cfgs = ub_cfgs) - -def _initialize_distributed(get_embedding_ranks, get_position_embedding_ranks): - """Initialize torch.distributed and core model parallel.""" - args = get_args() - - device_count = torch.cuda.device_count() - if torch.distributed.is_initialized(): - - if args.rank == 0: - print( - "torch distributed is already initialized, " - "skipping initialization ...", - flush=True, - ) - args.rank = torch.distributed.get_rank() - args.world_size = torch.distributed.get_world_size() - - else: - if args.rank == 0: - print("> initializing torch distributed ...", flush=True) - # Manually set the device ids. - if device_count > 0: - torch.cuda.set_device(args.local_rank) - device_id = torch.device(f'cuda:{args.local_rank}') - else: - device_id = None - - # Call the init process - init_process_group_kwargs = { - 'backend' : args.distributed_backend, - 'world_size': args.world_size, - 'rank': args.rank, - 'init_method': args.dist_url, - 'timeout': timedelta(minutes=args.distributed_timeout_minutes), - } - - torch.distributed.init_process_group(**init_process_group_kwargs) - - # Set the tensor model-parallel, pipeline model-parallel, and - # data-parallel communicators. - if device_count > 0: - if mpu.model_parallel_is_initialized(): - print("model parallel is already initialized") - else: - mpu.initialize_model_parallel( - args.tensor_model_parallel_size, - args.pipeline_model_parallel_size, - args.virtual_pipeline_model_parallel_size, - args.pipeline_model_parallel_split_rank, - context_parallel_size=args.context_parallel_size, - hierarchical_context_parallel_sizes=args.hierarchical_context_parallel_sizes, - expert_model_parallel_size=args.expert_model_parallel_size, - num_distributed_optimizer_instances=args.num_distributed_optimizer_instances, - expert_tensor_parallel_size=args.expert_tensor_parallel_size, - distributed_timeout_minutes=args.distributed_timeout_minutes, - nccl_communicator_config_path=args.nccl_communicator_config_path, - order='tp-cp-ep-dp-pp' if not args.use_tp_pp_dp_mapping else 'tp-pp-dp', - encoder_tensor_model_parallel_size=args.encoder_tensor_model_parallel_size, - encoder_pipeline_model_parallel_size=args.encoder_pipeline_model_parallel_size, - get_embedding_ranks=get_embedding_ranks, - get_position_embedding_ranks=get_position_embedding_ranks, - ) - if args.rank == 0: - print( - f"> initialized tensor model parallel with size " - f"{mpu.get_tensor_model_parallel_world_size()}" - ) - print( - f"> initialized pipeline model parallel with size " - f"{mpu.get_pipeline_model_parallel_world_size()}" - ) - - -def _init_autoresume(): - """Set autoresume start time.""" - autoresume = get_adlr_autoresume() - if autoresume: - torch.distributed.barrier() - autoresume.init() - torch.distributed.barrier() - - -def _set_random_seed(seed_, data_parallel_random_init=False): - """Set random seed for reproducability.""" - if seed_ is not None and seed_ > 0: - # Ensure that different pipeline MP stages get different seeds. - seed = seed_ + (100 * mpu.get_pipeline_model_parallel_rank()) - # Ensure different data parallel ranks get different seeds - if data_parallel_random_init: - seed = seed + (10 * mpu.get_data_parallel_rank()) - random.seed(seed) - np.random.seed(seed) - torch.manual_seed(seed) - if torch.cuda.device_count() > 0: - tensor_parallel.model_parallel_cuda_manual_seed(seed) - else: - raise ValueError("Seed ({}) should be a positive integer.".format(seed_)) - - -def write_args_to_tensorboard(): - """Write arguments to tensorboard.""" - args = get_args() - writer = get_tensorboard_writer() - if writer: - for arg in vars(args): - writer.add_text(arg, str(getattr(args, arg)), global_step=args.iteration) - - -def set_jit_fusion_options(): - """Set PyTorch JIT layer fusion options.""" - # flags required to enable jit fusion kernels - if is_torch_min_version("2.2.0a0"): - pass # we're using torch.compile for jit fusion - elif is_torch_min_version("1.10.0a0"): - # nvfuser - torch._C._jit_set_profiling_executor(True) - torch._C._jit_set_profiling_mode(True) - torch._C._jit_override_can_fuse_on_cpu(False) - torch._C._jit_override_can_fuse_on_gpu(False) - torch._C._jit_set_texpr_fuser_enabled(False) - torch._C._jit_set_nvfuser_enabled(False)#(True) - torch._C._debug_set_autodiff_subgraph_inlining(False) - else: - # legacy pytorch fuser - torch._C._jit_set_profiling_mode(False) - torch._C._jit_set_profiling_executor(False) - torch._C._jit_override_can_fuse_on_cpu(True) - torch._C._jit_override_can_fuse_on_gpu(True) - - _warmup_jit_function() - - -def _warmup_jit_function(): - """Compilie JIT functions before the main training steps""" - args = get_args() - if args.bf16: - dtype = torch.bfloat16 - elif args.fp16: - dtype = torch.float16 - else: - dtype = torch.float32 - - # Warmup fused bias+gelu - bias = torch.rand( - args.ffn_hidden_size // args.tensor_model_parallel_size, - dtype=dtype, - device="cuda", - ) - input = torch.rand( - ( - args.seq_length // args.context_parallel_size, - args.micro_batch_size, - args.ffn_hidden_size // args.tensor_model_parallel_size, - ), - dtype=dtype, - device="cuda", - ) - # Warmup JIT fusions with the input grad_enable state of both forward - # prop and recomputation - for bias_grad, input_grad in zip([True, True], [False, True]): - bias.requires_grad, input.requires_grad = bias_grad, input_grad - for _ in range(5): - if args.swiglu: - output = bias_swiglu(input, bias) - else: - output = bias_gelu(bias, input) - del bias, input, output - - # Warmup fused bias+dropout+add - if args.sequence_parallel: - seq_length = args.seq_length // mpu.get_tensor_model_parallel_world_size() - else: - seq_length = args.seq_length - input = torch.rand( - (seq_length // args.context_parallel_size, args.micro_batch_size, args.hidden_size), - dtype=dtype, - device="cuda", - ) - residual = torch.rand( - (seq_length // args.context_parallel_size, args.micro_batch_size, args.hidden_size), - dtype=dtype, - device="cuda", - ) - bias = torch.rand((args.hidden_size), dtype=dtype, device="cuda").expand_as( - residual - ) - dropout_rate = 0.1 - # Warmup JIT fusions with the input grad_enable state of both forward - # prop and recomputation - for input_grad, bias_grad, residual_grad in zip( - [False, True], [True, True], [True, True] - ): - input.requires_grad = input_grad - bias.requires_grad = bias_grad - residual.requires_grad = residual_grad - for _ in range(5): - output = bias_dropout_add_fused_train([input, bias], residual, dropout_rate) - del bias, input, residual, output - torch.cuda.empty_cache() - - -def setup_logging() -> None: - """ Sets the default logging level based on cmdline args and env vars. - - Precedence: - 1. Command line argument `--logging-level` - 2. Env var `MEGATRON_LOGGING_LEVEL` - 3. Default logging level (INFO) - - Returns: None - """ - args = get_args() - logging_level = None - env_logging_level = os.getenv('MEGATRON_LOGGING_LEVEL', None) - if env_logging_level is not None: - logging_level = int(env_logging_level) - if args.logging_level is not None: - logging_level = args.logging_level - - if logging_level is not None: - logger.info(f'Setting logging level to {logging_level}') - logging.getLogger().setLevel(logging_level) +# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. + +"""Megatron initialization.""" +import logging +import os +import random +import time +import warnings +from datetime import timedelta + +import numpy as np +import torch + +from megatron.core import mpu, tensor_parallel +from megatron.core.fusions.fused_bias_dropout import bias_dropout_add_fused_train +from megatron.core.fusions.fused_bias_gelu import bias_gelu +from megatron.core.fusions.fused_bias_swiglu import bias_swiglu +from megatron.core.parallel_state import create_group +from megatron.core.rerun_state_machine import ( + RerunDiagnostic, + RerunErrorInjector, + RerunMode, + initialize_rerun_state_machine, +) +from megatron.core.utils import get_te_version, is_te_min_version, is_torch_min_version +from megatron.legacy import fused_kernels +from megatron.training import get_adlr_autoresume, get_args, get_tensorboard_writer +from megatron.training.arguments import parse_args, validate_args +from megatron.training.async_utils import init_persistent_async_worker +from megatron.training.checkpointing import load_args_from_checkpoint +from megatron.training.global_vars import set_global_variables +from megatron.training.yaml_arguments import validate_yaml + +logger = logging.getLogger(__name__) + + +def initialize_megatron( + extra_args_provider=None, + args_defaults={}, + ignore_unknown_args=False, + allow_no_cuda=False, + skip_mpu_initialization=False, + get_embedding_ranks=None, + get_position_embedding_ranks=None, +): + """Set global variables, initialize distributed, and + set autoresume and random seeds. + `allow_no_cuda` should not be set unless using megatron for cpu only + data processing. In general this arg should not be set unless you know + what you are doing. + Returns a function to finalize distributed env initialization + (optionally, only when args.lazy_mpu_init == True) + """ + if not allow_no_cuda: + # Make sure cuda is available. + assert torch.cuda.is_available(), "Megatron requires CUDA." + + # Parse arguments + args = parse_args(extra_args_provider, ignore_unknown_args) + + # Prep for checkpoint conversion. + if args.ckpt_convert_format is not None: + assert args.ckpt_convert_save is not None + assert args.load is not None + args.exit_on_missing_checkpoint = True + + if args.use_checkpoint_args or args_defaults.get("use_checkpoint_args", False): + assert args.load is not None, "--use-checkpoint-args requires --load argument" + assert args.non_persistent_ckpt_type != "local", ( + "--use-checkpoint-args is not supported with --non_persistent_ckpt_type=local. " + "Two-stage checkpoint loading is not implemented, and all arguments must be defined " + "before initializing LocalCheckpointManager." + ) + load_args_from_checkpoint(args) + + if args.async_save and args.use_persistent_ckpt_worker: + init_persistent_async_worker() + + if args.yaml_cfg is not None: + args = validate_yaml(args, args_defaults) + else: + validate_args(args, args_defaults) + + # set global args, build tokenizer, and set adlr-autoresume, + # tensorboard-writer, and timers. + set_global_variables(args) + + # set logging level + setup_logging() + + # init rerun state + def state_save_func(): + return {'rng_tracker_states': tensor_parallel.get_cuda_rng_tracker().get_states()} + + def state_restore_func(state_dict): + if state_dict['rng_tracker_states']: + tensor_parallel.get_cuda_rng_tracker().set_states(state_dict['rng_tracker_states']) + + args = get_args() + initialize_rerun_state_machine( + state_save_func=state_save_func, + state_restore_func=state_restore_func, + mode=RerunMode(args.rerun_mode), + error_injector=RerunErrorInjector( + error_injection_rate=args.error_injection_rate, + error_injection_type=RerunDiagnostic(args.error_injection_type), + ), + result_rejected_tracker_filename=args.result_rejected_tracker_filename, + ) + + # torch.distributed initialization + def finish_mpu_init(): + args = get_args() + # Pytorch distributed. + _initialize_distributed(get_embedding_ranks, get_position_embedding_ranks) + + # Random seeds for reproducibility. + if args.rank == 0: + print("> setting random seeds to {} ...".format(args.seed)) + _set_random_seed( + args.seed, + args.data_parallel_random_init, + args.te_rng_tracker, + args.inference_rng_tracker, + ) + + if skip_mpu_initialization: + return None + + args = get_args() + if args.lazy_mpu_init: + # TODO is this still a necessary option? + args.use_cpu_initialization = True + # delayed initialization of DDP-related stuff + # We only set basic DDP globals + mpu.set_tensor_model_parallel_world_size(args.tensor_model_parallel_size) + # and return function for external DDP manager + # to call when it has DDP initialized + mpu.set_tensor_model_parallel_rank(args.rank) + return finish_mpu_init + else: + # Megatron's MPU is the master. Complete initialization right away. + finish_mpu_init() + + # Autoresume. + _init_autoresume() + + # Compile dependencies. + _compile_dependencies() + + if args.tp_comm_overlap: + # TODO: Should this be activated with just decoder-tp-comm-overlap too? + _initialize_tp_communicators() + + # No continuation function + return None + + +def _compile_dependencies(): + + args = get_args() + + # ========================= + # Compile dataset C++ code. + # ========================= + # TODO: move this to ninja + if torch.distributed.get_rank() == 0: + start_time = time.time() + print("> compiling dataset index builder ...") + from megatron.core.datasets.utils import compile_helpers + + compile_helpers() + print( + ">>> done with dataset index builder. Compilation time: {:.3f} " + "seconds".format(time.time() - start_time), + flush=True, + ) + + # ================== + # Load fused kernels + # ================== + + # Custom kernel constraints check. + seq_len = args.seq_length + attn_batch_size = ( + args.num_attention_heads / args.tensor_model_parallel_size + ) * args.micro_batch_size + # Constraints on sequence length and attn_batch_size to enable warp based + # optimization and upper triangular optimization (for causal mask) + custom_kernel_constraint = ( + seq_len > 16 and seq_len <= 16384 and seq_len % 4 == 0 and attn_batch_size % 4 == 0 + ) + # Print a warning. + if not ((args.fp16 or args.bf16) and custom_kernel_constraint and args.masked_softmax_fusion): + if args.rank == 0: + print( + "WARNING: constraints for invoking optimized" + " fused softmax kernel are not met. We default" + " back to unfused kernel invocations.", + flush=True, + ) + + # Always build on rank zero first. + if torch.distributed.get_rank() == 0: + start_time = time.time() + print("> compiling and loading fused kernels ...", flush=True) + #fused_kernels.load(args) + torch.distributed.barrier() + else: + torch.distributed.barrier() + #fused_kernels.load(args) + # Simple barrier to make sure all ranks have passed the + # compilation phase successfully before moving on to the + # rest of the program. We think this might ensure that + # the lock is released. + torch.distributed.barrier() + if torch.distributed.get_rank() == 0: + print( + ">>> done with compiling and loading fused kernels. " + "Compilation time: {:.3f} seconds".format(time.time() - start_time), + flush=True, + ) + + +def _initialize_tp_communicators(): + """initializing the communicators with user buffers for high-performance tensor-model-parallel + communication overlap""" + + try: + import transformer_engine + import yaml + from transformer_engine.pytorch import module as te_module + + except ImportError: + raise RuntimeError( + "Tensor Parallel Communication/GEMM Overlap optimization needs 'yaml' and " + "'transformer_engine' packages" + ) + + args = get_args() + + if args.tp_comm_overlap_cfg is not None: + with open(args.tp_comm_overlap_cfg, "r") as stream: + ub_cfgs = yaml.safe_load(stream) + else: + ub_cfgs = {} + + if getattr(args, 'decoder_tp_comm_overlap', False): + input_shape = [ + (args.decoder_seq_length * args.micro_batch_size) // args.context_parallel_size, + args.hidden_size, + ] + else: + input_shape = [ + (args.seq_length * args.micro_batch_size) // args.context_parallel_size, + args.hidden_size, + ] + + if is_te_min_version("1.9.0"): + # The process group with the target bootstrap backend is created in Transformer Engine. + te_module.base.initialize_ub( + shape=input_shape, + tp_size=args.tensor_model_parallel_size, + use_fp8=(args.fp8 is not None), + ub_cfgs=ub_cfgs, + bootstrap_backend=args.tp_comm_bootstrap_backend, + ) + else: + if args.tp_comm_bootstrap_backend != 'mpi': + warnings.warn( + f"Transformer Engine v{get_te_version()} supports only MPI bootstrap backend." + ) + # Create a MPI process group to help with TP communication overlap bootstrap. + create_group(backend='mpi', group_desc='TP_BOOTSTRAP_GROUP_MPI') + + te_module.base.initialize_ub( + shape=input_shape, + tp_size=args.tensor_model_parallel_size, + use_fp8=(args.fp8 is not None), + ub_cfgs=ub_cfgs, + ) + + +def _initialize_distributed(get_embedding_ranks, get_position_embedding_ranks): + """Initialize torch.distributed and core model parallel.""" + args = get_args() + + device_count = torch.cuda.device_count() + if torch.distributed.is_initialized(): + + if args.rank == 0: + print( + "torch distributed is already initialized, " "skipping initialization ...", + flush=True, + ) + args.rank = torch.distributed.get_rank() + args.world_size = torch.distributed.get_world_size() + + else: + + if args.rank == 0: + print("> initializing torch distributed ...", flush=True) + # Manually set the device ids. + if device_count > 0: + torch.cuda.set_device(args.local_rank) + device_id = torch.device(f'cuda:{args.local_rank}') + else: + device_id = None + + # Call the init process + init_process_group_kwargs = { + 'backend': args.distributed_backend, + 'world_size': args.world_size, + 'rank': args.rank, + 'init_method': args.dist_url, + 'timeout': timedelta(minutes=args.distributed_timeout_minutes), + } + + torch.distributed.init_process_group(**init_process_group_kwargs) + + # Set the tensor model-parallel, pipeline model-parallel, and + # data-parallel communicators. + if device_count > 0: + if mpu.model_parallel_is_initialized(): + print("model parallel is already initialized") + else: + mpu.initialize_model_parallel( + args.tensor_model_parallel_size, + args.pipeline_model_parallel_size, + args.virtual_pipeline_model_parallel_size, + args.pipeline_model_parallel_split_rank, + pipeline_model_parallel_comm_backend=args.pipeline_model_parallel_comm_backend, + context_parallel_size=args.context_parallel_size, + hierarchical_context_parallel_sizes=args.hierarchical_context_parallel_sizes, + expert_model_parallel_size=args.expert_model_parallel_size, + num_distributed_optimizer_instances=args.num_distributed_optimizer_instances, + expert_tensor_parallel_size=args.expert_tensor_parallel_size, + distributed_timeout_minutes=args.distributed_timeout_minutes, + nccl_communicator_config_path=args.nccl_communicator_config_path, + order='tp-cp-ep-dp-pp' if not args.use_tp_pp_dp_mapping else 'tp-cp-ep-pp-dp', + encoder_tensor_model_parallel_size=args.encoder_tensor_model_parallel_size, + encoder_pipeline_model_parallel_size=args.encoder_pipeline_model_parallel_size, + get_embedding_ranks=get_embedding_ranks, + get_position_embedding_ranks=get_position_embedding_ranks, + create_gloo_process_groups=args.enable_gloo_process_groups, + ) + if args.rank == 0: + print( + f"> initialized tensor model parallel with size " + f"{mpu.get_tensor_model_parallel_world_size()}" + ) + print( + f"> initialized pipeline model parallel with size " + f"{mpu.get_pipeline_model_parallel_world_size()}" + ) + + +def _init_autoresume(): + """Set autoresume start time.""" + autoresume = get_adlr_autoresume() + if autoresume: + torch.distributed.barrier() + autoresume.init() + torch.distributed.barrier() + + +def _set_random_seed( + seed_, data_parallel_random_init=False, te_rng_tracker=False, inference_rng_tracker=False +): + """Set random seed for reproducability.""" + if seed_ is not None and seed_ > 0: + # Ensure that different pipeline MP stages get different seeds. + seed = seed_ + (100 * mpu.get_pipeline_model_parallel_rank()) + # Ensure different data parallel ranks get different seeds + if data_parallel_random_init: + seed = seed + (10 * mpu.get_data_parallel_rank()) + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.device_count() > 0: + tensor_parallel.model_parallel_cuda_manual_seed( + seed, te_rng_tracker, inference_rng_tracker + ) + else: + raise ValueError("Seed ({}) should be a positive integer.".format(seed_)) + + +def write_args_to_tensorboard(): + """Write arguments to tensorboard.""" + args = get_args() + writer = get_tensorboard_writer() + if writer: + for arg in vars(args): + writer.add_text(arg, str(getattr(args, arg)), global_step=args.iteration) + + +def set_jit_fusion_options(): + """Set PyTorch JIT layer fusion options.""" + # flags required to enable jit fusion kernels + if is_torch_min_version("2.2.0a0"): + pass # we're using torch.compile for jit fusion + elif is_torch_min_version("1.10.0a0"): + # nvfuser + torch._C._jit_set_profiling_executor(True) + torch._C._jit_set_profiling_mode(True) + torch._C._jit_override_can_fuse_on_cpu(False) + torch._C._jit_override_can_fuse_on_gpu(False) + torch._C._jit_set_texpr_fuser_enabled(False) + torch._C._jit_set_nvfuser_enabled(True) + torch._C._debug_set_autodiff_subgraph_inlining(False) + else: + # legacy pytorch fuser + torch._C._jit_set_profiling_mode(False) + torch._C._jit_set_profiling_executor(False) + torch._C._jit_override_can_fuse_on_cpu(True) + torch._C._jit_override_can_fuse_on_gpu(True) + + _warmup_jit_function() + + +def _warmup_jit_function(): + """Compilie JIT functions before the main training steps""" + args = get_args() + if args.bf16: + dtype = torch.bfloat16 + elif args.fp16: + dtype = torch.float16 + else: + dtype = torch.float32 + + # Warmup fused bias+gelu + bias = torch.rand( + args.ffn_hidden_size // args.tensor_model_parallel_size, dtype=dtype, device="cuda" + ) + input = torch.rand( + ( + args.seq_length // args.context_parallel_size, + args.micro_batch_size, + args.ffn_hidden_size // args.tensor_model_parallel_size, + ), + dtype=dtype, + device="cuda", + ) + # Warmup JIT fusions with the input grad_enable state of both forward + # prop and recomputation + for bias_grad, input_grad in zip([True, True], [False, True]): + bias.requires_grad, input.requires_grad = bias_grad, input_grad + for _ in range(5): + if args.swiglu: + output = bias_swiglu(input, bias) + else: + output = bias_gelu(bias, input) + del bias, input, output + + # Warmup fused bias+dropout+add + if args.sequence_parallel: + seq_length = args.seq_length // mpu.get_tensor_model_parallel_world_size() + else: + seq_length = args.seq_length + input = torch.rand( + (seq_length // args.context_parallel_size, args.micro_batch_size, args.hidden_size), + dtype=dtype, + device="cuda", + ) + residual = torch.rand( + (seq_length // args.context_parallel_size, args.micro_batch_size, args.hidden_size), + dtype=dtype, + device="cuda", + ) + bias = torch.rand((args.hidden_size), dtype=dtype, device="cuda").expand_as(residual) + dropout_rate = 0.1 + # Warmup JIT fusions with the input grad_enable state of both forward + # prop and recomputation + for input_grad, bias_grad, residual_grad in zip([False, True], [True, True], [True, True]): + input.requires_grad = input_grad + bias.requires_grad = bias_grad + residual.requires_grad = residual_grad + for _ in range(5): + output = bias_dropout_add_fused_train([input, bias], residual, dropout_rate) + del bias, input, residual, output + torch.cuda.empty_cache() + + +def setup_logging() -> None: + """Sets the default logging level based on cmdline args and env vars. + + Precedence: + 1. Command line argument `--logging-level` + 2. Env var `MEGATRON_LOGGING_LEVEL` + 3. Default logging level (INFO) + + Returns: None + """ + args = get_args() + logging_level = None + env_logging_level = os.getenv('MEGATRON_LOGGING_LEVEL', None) + if env_logging_level is not None: + logging_level = int(env_logging_level) + if args.logging_level is not None: + logging_level = args.logging_level + + if logging_level is not None: + logger.info(f'Setting logging level to {logging_level}') + logging.getLogger().setLevel(logging_level) diff --git a/megatron/training/one_logger_utils.py b/megatron/training/one_logger_utils.py index 3a45712..dd80546 100644 --- a/megatron/training/one_logger_utils.py +++ b/megatron/training/one_logger_utils.py @@ -1,463 +1,466 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -import time, os - -from .global_vars import get_one_logger, get_args - - -def get_timestamp_in_ms(): - """Helper function to get timestamp in ms - - Returns: - [int]: [timestamp in ms] - """ - return round(time.time() * 1000.0) - - -def on_train_start(iteration, consumed_train_samples, train_samples, seq_length, - train_iters, save, async_save, log_throughput, - num_floating_point_operations_so_far): - """Function will be called at the start of train function to prepare and track E2E metrics. - - Args: - iteration (int): current iteration number - consumed_train_samples (int): consumed sample numbers so far - train_samples (int): total train sample number - seq_length (int): sequence length - train_iters (type): target iteration - save (str): output directory to save checkpoints to - async_save (bool): apply async checkpointing save - log_throughput (bool): log throughput or not - num_floating_point_operations_so_far (int): flops so far - """ - one_logger = get_one_logger() - - if one_logger: - with one_logger.get_context_manager(): - # Get app train loop start time - app_train_loop_start_time = get_timestamp_in_ms() - one_logger.store_set('app_train_loop_start_time', app_train_loop_start_time) - - # Set up initial values in store - one_logger.store_set('iteration_start', iteration) - one_logger.store_set('train_samples_start', consumed_train_samples) - - # Init accumulative metric values in one-logger store - one_logger.store_set('train_iterations_time_msecs_total', 0) - one_logger.store_set('tracked_train_iterations', iteration) - one_logger.store_set('validation_iterations_time_msecs_total', 0) - one_logger.store_set('tracked_validation_iterations', 0) - one_logger.store_set('save_checkpoint_count', 0) - one_logger.store_set('save_checkpoint_sync_time_total', 0.0) - - train_samples_target = train_samples - train_tokens_target = seq_length * train_samples_target - e2e_metrics = { - 'train_samples_start': consumed_train_samples, - 'train_iterations_start': iteration, - 'train_samples_target': train_samples_target, - 'train_iterations_target': train_iters, - 'train_tokens_target': train_tokens_target, - 'app_train_loop_start_time': app_train_loop_start_time, - 'is_save_checkpoint_enabled': save is not None, - 'save_checkpoint_strategy': 'async' if async_save else 'sync', - } - if log_throughput: - e2e_metrics.update({ - 'train_tflop_start': float(num_floating_point_operations_so_far) / (10**12), - }) - one_logger.log_metrics(e2e_metrics) - - -def _produce_e2e_metrics(log_throughput=False, throughput=None): - """ Generate APP metrics for E2E tracking - NOTE: always call this function after barrier call - - Args: - log_throughput (bool, optional): if log throughput or not. Defaults to False. - throughput (int, optional): throughput value to log. Defaults to None. - - Returns: - dict: all E2E metrics - """ - one_logger = get_one_logger() - - if one_logger: - with one_logger.get_context_manager(): - # Unpack and assign local vars - base_metrics = one_logger.store_get('get_e2e_base_metrics')() - (iteration, train_duration, eval_duration, eval_iterations, - total_flops, num_floating_point_operations_so_far, - consumed_train_samples, world_size, seq_length) = base_metrics.values() - - iteration_start = one_logger.store_get('iteration_start') - train_samples_start = one_logger.store_get('train_samples_start') - - train_samples = consumed_train_samples - train_samples_start - train_iterations = iteration - iteration_start - train_iterations_time_msecs_avg = (train_duration * 1000.0) / train_iterations - if eval_iterations: - validation_iterations_time_msecs_avg = (eval_duration * 1000.0) / eval_iterations - else: - validation_iterations_time_msecs_avg = None - - if not one_logger.store_has_key('first_logged_train_iterations_finish_time'): - one_logger.store_set( - 'first_logged_train_iterations_finish_time', - get_timestamp_in_ms() - ) - - train_tokens = train_samples * seq_length - - e2e_metrics = { - 'first_logged_train_iterations_finish_time': \ - one_logger.store_get('first_logged_train_iterations_finish_time'), - 'train_iterations_end': iteration, - 'train_samples_end': consumed_train_samples, - 'train_iterations': train_iterations, - 'train_samples': train_samples, - 'train_iterations_time_msecs_avg': train_iterations_time_msecs_avg, - 'validation_iterations_time_total': eval_duration, - 'validation_iterations_time_msecs_avg': validation_iterations_time_msecs_avg, - 'train_tokens': train_tokens, - 'train_iterations_time_total': train_duration, - 'last_logged_train_iterations_finish_time': get_timestamp_in_ms(), - } - - if log_throughput: - if train_duration: - train_throughput_per_gpu = total_flops / (train_duration * 10**12 * world_size) - else: - train_throughput_per_gpu = 0.0 - - train_throughput_per_gpu_max = one_logger.store_get('train_throughput_per_gpu_max') - if throughput: - train_throughput_per_gpu_max = max(throughput, train_throughput_per_gpu_max) - one_logger.store_set('train_throughput_per_gpu_max', train_throughput_per_gpu_max) - - throughput_metrics = { - 'train_tflop_end': float(num_floating_point_operations_so_far) / (10**12), - 'train_tflop': float(total_flops) / (10**12), - 'train_throughput_per_gpu': train_throughput_per_gpu, - 'train_throughput_per_gpu_max': train_throughput_per_gpu_max, - } - e2e_metrics.update(throughput_metrics) - - # Tracking minimal train/validation iteration duration metrics - # Minimal train iteration duration - current_train_iterations_time_msecs_total = train_duration * 1000.0 - current_train_iteration = iteration - prev_train_iterations_time_msecs_total = one_logger.store_get('train_iterations_time_msecs_total') - tracked_train_iterations = one_logger.store_get('tracked_train_iterations') - - if current_train_iteration > tracked_train_iterations: - train_iterations_time_msecs = ( - (current_train_iterations_time_msecs_total - prev_train_iterations_time_msecs_total) / - (current_train_iteration - tracked_train_iterations) - ) - - if not one_logger.store_has_key('train_iterations_time_msecs_min'): - train_iterations_time_msecs_min = train_iterations_time_msecs - else: - train_iterations_time_msecs_min = min( - one_logger.store_get('train_iterations_time_msecs_min'), - train_iterations_time_msecs - ) - one_logger.store_set('train_iterations_time_msecs_min', train_iterations_time_msecs_min) - one_logger.store_set('train_iterations_time_msecs_total', current_train_iterations_time_msecs_total) - one_logger.store_set('tracked_train_iterations', current_train_iteration) - - e2e_metrics.update({ - 'train_iterations_time_msecs_min': train_iterations_time_msecs_min - }) - - # Minimal validation iteration duration - current_validation_iterations_time_msecs_total = eval_duration * 1000.0 - current_validation_iteration = eval_iterations - prev_validation_iterations_time_msecs_total = \ - one_logger.store_get('validation_iterations_time_msecs_total') - tracked_validation_iterations = one_logger.store_get('tracked_validation_iterations') - - if current_validation_iteration > tracked_validation_iterations: - validation_iterations_time_msecs = ( - (current_validation_iterations_time_msecs_total - prev_validation_iterations_time_msecs_total) / - (current_validation_iteration - tracked_validation_iterations) - ) - - # Cache minimal validation iteration duration - if not one_logger.store_has_key('validation_iterations_time_msecs_min'): - validation_iterations_time_msecs_min = validation_iterations_time_msecs - else: - validation_iterations_time_msecs_min = min( - one_logger.store_get('validation_iterations_time_msecs_min'), - validation_iterations_time_msecs - ) - one_logger.store_set('validation_iterations_time_msecs_min', validation_iterations_time_msecs_min) - one_logger.store_set('validation_iterations_time_msecs_total', current_validation_iterations_time_msecs_total) - one_logger.store_set('tracked_validation_iterations', current_validation_iteration) - - e2e_metrics.update({ - 'validation_iterations_time_msecs_min': validation_iterations_time_msecs_min - }) - return e2e_metrics - - -def track_e2e_metrics(log_throughput=False, throughput=None): - """Track E2E application metrics with one-logger - - NOTE: the function should be called after barrier call. - - Args: - log_throughput (bool, optional): if log throughput or not. Defaults to False. - throughput (int, optional): throughput value to log. Defaults to None. - """ - one_logger = get_one_logger() - - if one_logger: - with one_logger.get_context_manager(): - e2e_metrics = _produce_e2e_metrics(log_throughput, throughput) - one_logger.log_metrics(e2e_metrics) - - -def on_save_checkpoint_start(async_save): - """Function to be called before save-checkpoint start to generate productive metrics to log after ckpt succeeds. - - Args: - async_save (bool): apply async checkpointing save - - Returns: - dict: productive metrics to be stored to DB after ckpt succeeds - """ - one_logger = get_one_logger() - - if one_logger: - with one_logger.get_context_manager(): - # Unpack and assign local vars - base_metrics = one_logger.store_get('get_e2e_base_metrics')() - (iteration, train_duration, eval_duration, eval_iterations, - total_flops, num_floating_point_operations_so_far, - consumed_train_samples, world_size, seq_length) = base_metrics.values() - - save_checkpoint_count = one_logger.store_get('save_checkpoint_count') + 1 - one_logger.store_set('save_checkpoint_count', save_checkpoint_count) - one_logger.log_metrics({ - 'train_iterations_save_checkpoint_end': iteration, - 'save_checkpoint_count': save_checkpoint_count, - }) - productive_metrics = { - 'train_tflop_productive_end': float(num_floating_point_operations_so_far) / (10**12), - 'train_iterations_productive_end': iteration, - 'train_samples_productive_end': consumed_train_samples, - 'train_iterations_time_total_productive': train_duration, - 'validation_iterations_time_total_productive': eval_duration, - } - if async_save: - productive_metrics.update({ - 'save_checkpoint_async_count': save_checkpoint_count, - }) - return productive_metrics - - -def on_pretrain_start(): - """ Function to be called at the start of pretrain function to track E2E meta data - """ - args = get_args() - one_logger = get_one_logger() - - if one_logger: - with one_logger.get_context_manager(): - job_name = os.environ.get('SLURM_JOB_NAME', None) - app_tag_run_name = job_name if not args.app_tag_run_name else args.app_tag_run_name - app_tag_run_version = args.app_tag_run_version - one_logger.store_set('app_tag_run_name', app_tag_run_name) - one_logger.store_set('app_tag_run_version', app_tag_run_version) - one_logger.store_set('train_throughput_per_gpu_max', 0.0) - - one_logger.log_metrics({ - 'train_iterations_warmup': 5, - 'data_parallel_size' : args.data_parallel_size, - 'context_parallel_size': args.context_parallel_size, - 'global_batch_size': args.global_batch_size, - 'micro_batch_size': args.micro_batch_size, - 'pipeline_model_parallel_size': args.pipeline_model_parallel_size, - 'tensor_model_parallel_size': args.tensor_model_parallel_size, - 'expert_model_parallel_size' : args.expert_model_parallel_size, - 'world_size': args.world_size, - 'model_seq_length': args.seq_length, - 'app_tag_run_name': app_tag_run_name, - 'app_tag_run_version': app_tag_run_version, - 'is_log_throughput_enabled': args.log_throughput, - 'app_run_type': 'training', - 'summary_data_schema_version': '1.0.0', - 'app_metrics_feature_tags': 'full', - }) - -def track_config_flags(train_iters, skip_train, do_train, do_valid, do_test, - dataloader_type, retro_project_dir, retro_cyclic_train_iters): - """Track flags about train/validation/test enablement - - Args: - train_iters (int): target train iteration number - skip_train (bool): flag to skip train iterations - do_train (bool): flags to do train - do_valid (bool): flags to do validation - do_test (bool): flags to do test - dataloader_type (str): dataloader type - retro_project_dir (str): Retro project directory - retro_cyclic_train_iters (int): iteration number for cyclic retro training - """ - one_logger = get_one_logger() - if one_logger: - with one_logger.get_context_manager(): - # Update train_iters for cyclic loader - if dataloader_type == 'cyclic' and retro_project_dir: - assert retro_cyclic_train_iters is not None - train_iters = retro_cyclic_train_iters - # Track if training is enabled. Can only be done once args.do_train is assigned after dataloader is built. - train_enabled = train_iters and (not skip_train) and do_train and train_iters > 0 - one_logger.log_metrics({ - 'is_train_iterations_enabled': train_enabled, - 'is_validation_iterations_enabled': bool(do_valid), - 'is_test_iterations_enabled': bool(do_test), - }) - -def on_save_checkpoint_success(productive_metrics, async_save): - """Function to be called after checkpointing succeeds and checkpoint is persisted for storing productive metrics - - Args: - productive_metrics (dict): productive related E2E metrics generated at the start of save checkpoint - async_save (bool): apply async checkpointing save - """ - one_logger = get_one_logger() - - if one_logger: - with one_logger.get_context_manager(): - # Accumulate train_iterations_time_total_productive for current iteration - prod_iteration = productive_metrics['train_iterations_productive_end'] - - # Log start timestamp of first iteration that was successfully checkpointed - if not one_logger.store_has_key('first_checkpoint_success'): - app_train_loop_start_time = one_logger.store_get('app_train_loop_start_time') - one_logger.store_set('first_checkpoint_success', True) - one_logger.log_metrics({ - 'first_saved_train_iterations_start_time': app_train_loop_start_time - }) - - # Handle possible out-of-order async checkpoint callbacks - need_update = True - if one_logger.store_has_key('iters_prod_max'): - need_update = prod_iteration > one_logger.store_get('iters_prod_max') - - if need_update: - # Update cache - one_logger.store_set('iters_prod_max', prod_iteration) - - if async_save: - save_checkpoint_sync_time_total_productive = \ - one_logger.store_pop(f'save_checkpoint_sync_time_total_productive:{prod_iteration}') - last_successful_save_checkpoint_sync_finish_time = \ - one_logger.store_pop(f'save_checkpoint_sync_finish_time:{prod_iteration}') - # Update productive metrics and log to DB - productive_metrics.update({ - 'save_checkpoint_sync_time_total_productive': save_checkpoint_sync_time_total_productive, - 'last_successful_save_checkpoint_sync_finish_time': last_successful_save_checkpoint_sync_finish_time - }) - one_logger.log_metrics(productive_metrics) - - -def on_save_checkpoint_end(save_checkpoint_duration, current_iteration, async_save): - """Function to be called after checkpointing ends - - Args: - save_checkpoint_duration (float): duration of current save checkpoint process - current_iteration (int): current train iteration step number - async_save (bool): apply async checkpointing save - """ - one_logger = get_one_logger() - if one_logger: - with one_logger.get_context_manager(): - save_checkpoint_sync_finish_time = get_timestamp_in_ms() - - # Track finish timestamp of the sync part of first successful save checkpoint - if (one_logger.store_has_key('first_checkpoint_success') - and not one_logger.store_has_key('first_successful_checkpoint_end')): - one_logger.store_set('first_successful_checkpoint_end', True) - one_logger.log_metrics({ - 'first_successful_save_checkpoint_sync_finish_time': save_checkpoint_sync_finish_time - }) - - save_checkpoint_sync_count = one_logger.store_get('save_checkpoint_count') - - # accumulate total sync checkpointing duration - save_checkpoint_sync_time_total = \ - one_logger.store_get('save_checkpoint_sync_time_total') + save_checkpoint_duration - one_logger.store_set('save_checkpoint_sync_time_total', save_checkpoint_sync_time_total) - - e2e_metrics = {} - if async_save: - # Cache total sync checkpointing duration - one_logger.store_set( - f'save_checkpoint_sync_time_total_productive:{current_iteration}', - save_checkpoint_sync_time_total - ) - # Cache finish time for current iteration - one_logger.store_set(f'save_checkpoint_sync_finish_time:{current_iteration}', - save_checkpoint_sync_finish_time) - else: - e2e_metrics.update({ - # Track productive total time directly for sync ckpt - 'save_checkpoint_sync_time_total_productive': save_checkpoint_sync_time_total, - 'last_successful_save_checkpoint_sync_finish_time': save_checkpoint_sync_finish_time, - }) - - # Tracking min & max value sync checkpointing duration - # For the first comparison - if not one_logger.store_has_key('save_checkpoint_sync_time_max'): - one_logger.store_set('save_checkpoint_sync_time_max', save_checkpoint_duration) - if not one_logger.store_has_key('save_checkpoint_sync_time_min'): - one_logger.store_set('save_checkpoint_sync_time_min', save_checkpoint_duration) - - save_checkpoint_sync_time_max = max( - one_logger.store_get('save_checkpoint_sync_time_max'), - save_checkpoint_duration - ) - save_checkpoint_sync_time_min = min( - one_logger.store_get('save_checkpoint_sync_time_min'), - save_checkpoint_duration - ) - one_logger.store_set('save_checkpoint_sync_time_max', save_checkpoint_sync_time_max) - one_logger.store_set('save_checkpoint_sync_time_min', save_checkpoint_sync_time_min) - e2e_metrics.update({ - 'save_checkpoint_sync_count': save_checkpoint_sync_count, - 'save_checkpoint_sync_time_max': save_checkpoint_sync_time_max, - 'save_checkpoint_sync_time_min': save_checkpoint_sync_time_min, - 'save_checkpoint_sync_time_total': save_checkpoint_sync_time_total, - }) - one_logger.log_metrics(e2e_metrics) - - -def track_app_tag(batch_size, world_size, seq_length): - """Track app_tag and app_tag ID - - Args: - batch_size (int): current batch size - world_size (int): the number of processes of current job - seq_length (int): current sequence length - """ - # Track app tag & app tag ID - one_logger = get_one_logger() - if one_logger: - with one_logger.get_context_manager(): - app_tag_run_name = one_logger.store_get('app_tag_run_name') - app_tag_run_version = one_logger.store_get('app_tag_run_version') - current_app_tag = (f'{app_tag_run_name}_{app_tag_run_version}_{batch_size}' - f'_{world_size}_{seq_length}') - one_logger.log_app_tag(current_app_tag) - - -def finish(): - """Flush E2E metrics to remote server - """ - one_logger = get_one_logger() - if one_logger: - with one_logger.get_context_manager(): - one_logger.finish() +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import time, os + +from .global_vars import get_one_logger, get_args + +_one_logger_utils_version = "1.0.0-mlm" + + +def get_timestamp_in_ms(): + """Helper function to get timestamp in ms + + Returns: + [int]: [timestamp in ms] + """ + return round(time.time() * 1000.0) + + +def on_train_start(iteration, consumed_train_samples, train_samples, seq_length, + train_iters, save, async_save, log_throughput, + num_floating_point_operations_so_far): + """Function will be called at the start of train function to prepare and track E2E metrics. + + Args: + iteration (int): current iteration number + consumed_train_samples (int): consumed sample numbers so far + train_samples (int): total train sample number + seq_length (int): sequence length + train_iters (type): target iteration + save (str): output directory to save checkpoints to + async_save (bool): apply async checkpointing save + log_throughput (bool): log throughput or not + num_floating_point_operations_so_far (int): flops so far + """ + one_logger = get_one_logger() + + if one_logger: + with one_logger.get_context_manager(): + # Get app train loop start time + app_train_loop_start_time = get_timestamp_in_ms() + one_logger.store_set('app_train_loop_start_time', app_train_loop_start_time) + + # Set up initial values in store + one_logger.store_set('iteration_start', iteration) + one_logger.store_set('train_samples_start', consumed_train_samples) + + # Init accumulative metric values in one-logger store + one_logger.store_set('train_iterations_time_msecs_total', 0) + one_logger.store_set('tracked_train_iterations', iteration) + one_logger.store_set('validation_iterations_time_msecs_total', 0) + one_logger.store_set('tracked_validation_iterations', 0) + one_logger.store_set('save_checkpoint_count', 0) + one_logger.store_set('save_checkpoint_sync_time_total', 0.0) + + train_samples_target = train_samples + train_tokens_target = seq_length * train_samples_target + e2e_metrics = { + 'train_samples_start': consumed_train_samples, + 'train_iterations_start': iteration, + 'train_samples_target': train_samples_target, + 'train_iterations_target': train_iters, + 'train_tokens_target': train_tokens_target, + 'app_train_loop_start_time': app_train_loop_start_time, + 'is_save_checkpoint_enabled': save is not None, + 'save_checkpoint_strategy': 'async' if async_save else 'sync', + } + if log_throughput: + e2e_metrics.update({ + 'train_tflop_start': float(num_floating_point_operations_so_far) / (10**12), + }) + one_logger.log_metrics(e2e_metrics) + + +def _produce_e2e_metrics(log_throughput=False, throughput=None): + """ Generate APP metrics for E2E tracking + NOTE: always call this function after barrier call + + Args: + log_throughput (bool, optional): if log throughput or not. Defaults to False. + throughput (int, optional): throughput value to log. Defaults to None. + + Returns: + dict: all E2E metrics + """ + one_logger = get_one_logger() + + if one_logger: + with one_logger.get_context_manager(): + # Unpack and assign local vars + base_metrics = one_logger.store_get('get_e2e_base_metrics')() + (iteration, train_duration, eval_duration, eval_iterations, + total_flops_since_current_train_start, num_floating_point_operations_so_far, + consumed_train_samples, world_size, seq_length) = base_metrics.values() + + iteration_start = one_logger.store_get('iteration_start') + train_samples_start = one_logger.store_get('train_samples_start') + + train_samples = consumed_train_samples - train_samples_start + train_iterations = iteration - iteration_start + train_iterations_time_msecs_avg = (train_duration * 1000.0) / train_iterations + if eval_iterations: + validation_iterations_time_msecs_avg = (eval_duration * 1000.0) / eval_iterations + else: + validation_iterations_time_msecs_avg = None + + if not one_logger.store_has_key('first_logged_train_iterations_finish_time'): + one_logger.store_set( + 'first_logged_train_iterations_finish_time', + get_timestamp_in_ms() + ) + + train_tokens = train_samples * seq_length + + e2e_metrics = { + 'first_logged_train_iterations_finish_time': \ + one_logger.store_get('first_logged_train_iterations_finish_time'), + 'train_iterations_end': iteration, + 'train_samples_end': consumed_train_samples, + 'train_iterations': train_iterations, + 'train_samples': train_samples, + 'train_iterations_time_msecs_avg': train_iterations_time_msecs_avg, + 'validation_iterations_time_total': eval_duration, + 'validation_iterations_time_msecs_avg': validation_iterations_time_msecs_avg, + 'train_tokens': train_tokens, + 'train_iterations_time_total': train_duration, + 'last_logged_train_iterations_finish_time': get_timestamp_in_ms(), + } + + if log_throughput: + if train_duration: + train_throughput_per_gpu = total_flops_since_current_train_start / (train_duration * 10**12 * world_size) + else: + train_throughput_per_gpu = 0.0 + + train_throughput_per_gpu_max = one_logger.store_get('train_throughput_per_gpu_max') + if throughput: + train_throughput_per_gpu_max = max(throughput, train_throughput_per_gpu_max) + one_logger.store_set('train_throughput_per_gpu_max', train_throughput_per_gpu_max) + + throughput_metrics = { + 'train_tflop_end': float(num_floating_point_operations_so_far) / (10**12), + 'train_tflop': float(total_flops_since_current_train_start) / (10**12), + 'train_throughput_per_gpu': train_throughput_per_gpu, + 'train_throughput_per_gpu_max': train_throughput_per_gpu_max, + } + e2e_metrics.update(throughput_metrics) + + # Tracking minimal train/validation iteration duration metrics + # Minimal train iteration duration + current_train_iterations_time_msecs_total = train_duration * 1000.0 + current_train_iteration = iteration + prev_train_iterations_time_msecs_total = one_logger.store_get('train_iterations_time_msecs_total') + tracked_train_iterations = one_logger.store_get('tracked_train_iterations') + + if current_train_iteration > tracked_train_iterations: + train_iterations_time_msecs = ( + (current_train_iterations_time_msecs_total - prev_train_iterations_time_msecs_total) / + (current_train_iteration - tracked_train_iterations) + ) + + if not one_logger.store_has_key('train_iterations_time_msecs_min'): + train_iterations_time_msecs_min = train_iterations_time_msecs + else: + train_iterations_time_msecs_min = min( + one_logger.store_get('train_iterations_time_msecs_min'), + train_iterations_time_msecs + ) + one_logger.store_set('train_iterations_time_msecs_min', train_iterations_time_msecs_min) + one_logger.store_set('train_iterations_time_msecs_total', current_train_iterations_time_msecs_total) + one_logger.store_set('tracked_train_iterations', current_train_iteration) + + e2e_metrics.update({ + 'train_iterations_time_msecs_min': train_iterations_time_msecs_min + }) + + # Minimal validation iteration duration + current_validation_iterations_time_msecs_total = eval_duration * 1000.0 + current_validation_iteration = eval_iterations + prev_validation_iterations_time_msecs_total = \ + one_logger.store_get('validation_iterations_time_msecs_total') + tracked_validation_iterations = one_logger.store_get('tracked_validation_iterations') + + if current_validation_iteration > tracked_validation_iterations: + validation_iterations_time_msecs = ( + (current_validation_iterations_time_msecs_total - prev_validation_iterations_time_msecs_total) / + (current_validation_iteration - tracked_validation_iterations) + ) + + # Cache minimal validation iteration duration + if not one_logger.store_has_key('validation_iterations_time_msecs_min'): + validation_iterations_time_msecs_min = validation_iterations_time_msecs + else: + validation_iterations_time_msecs_min = min( + one_logger.store_get('validation_iterations_time_msecs_min'), + validation_iterations_time_msecs + ) + one_logger.store_set('validation_iterations_time_msecs_min', validation_iterations_time_msecs_min) + one_logger.store_set('validation_iterations_time_msecs_total', current_validation_iterations_time_msecs_total) + one_logger.store_set('tracked_validation_iterations', current_validation_iteration) + + e2e_metrics.update({ + 'validation_iterations_time_msecs_min': validation_iterations_time_msecs_min + }) + return e2e_metrics + + +def track_e2e_metrics(log_throughput=False, throughput=None): + """Track E2E application metrics with one-logger + + NOTE: the function should be called after barrier call. + + Args: + log_throughput (bool, optional): if log throughput or not. Defaults to False. + throughput (int, optional): throughput value to log. Defaults to None. + """ + one_logger = get_one_logger() + + if one_logger: + with one_logger.get_context_manager(): + e2e_metrics = _produce_e2e_metrics(log_throughput, throughput) + one_logger.log_metrics(e2e_metrics) + + +def on_save_checkpoint_start(async_save): + """Function to be called before save-checkpoint start to generate productive metrics to log after ckpt succeeds. + + Args: + async_save (bool): apply async checkpointing save + + Returns: + dict: productive metrics to be stored to DB after ckpt succeeds + """ + one_logger = get_one_logger() + + if one_logger: + with one_logger.get_context_manager(): + # Unpack and assign local vars + base_metrics = one_logger.store_get('get_e2e_base_metrics')() + (iteration, train_duration, eval_duration, eval_iterations, + total_flops_since_current_train_start, num_floating_point_operations_so_far, + consumed_train_samples, world_size, seq_length) = base_metrics.values() + + save_checkpoint_count = one_logger.store_get('save_checkpoint_count') + 1 + one_logger.store_set('save_checkpoint_count', save_checkpoint_count) + one_logger.log_metrics({ + 'train_iterations_save_checkpoint_end': iteration, + 'save_checkpoint_count': save_checkpoint_count, + }) + productive_metrics = { + 'train_tflop_productive_end': float(num_floating_point_operations_so_far) / (10**12), + 'train_iterations_productive_end': iteration, + 'train_samples_productive_end': consumed_train_samples, + 'train_iterations_time_total_productive': train_duration, + 'validation_iterations_time_total_productive': eval_duration, + } + if async_save: + productive_metrics.update({ + 'save_checkpoint_async_count': save_checkpoint_count, + }) + return productive_metrics + + +def on_pretrain_start(): + """ Function to be called at the start of pretrain function to track E2E meta data + """ + args = get_args() + one_logger = get_one_logger() + + if one_logger: + with one_logger.get_context_manager(): + job_name = os.environ.get('SLURM_JOB_NAME', None) + app_tag_run_name = job_name if not args.app_tag_run_name else args.app_tag_run_name + app_tag_run_version = args.app_tag_run_version + one_logger.store_set('app_tag_run_name', app_tag_run_name) + one_logger.store_set('app_tag_run_version', app_tag_run_version) + one_logger.store_set('train_throughput_per_gpu_max', 0.0) + + one_logger.log_metrics({ + 'train_iterations_warmup': 5, + 'data_parallel_size' : args.data_parallel_size, + 'context_parallel_size': args.context_parallel_size, + 'global_batch_size': args.global_batch_size, + 'micro_batch_size': args.micro_batch_size, + 'pipeline_model_parallel_size': args.pipeline_model_parallel_size, + 'tensor_model_parallel_size': args.tensor_model_parallel_size, + 'expert_model_parallel_size' : args.expert_model_parallel_size, + 'world_size': args.world_size, + 'model_seq_length': args.seq_length, + 'app_tag_run_name': app_tag_run_name, + 'app_tag_run_version': app_tag_run_version, + 'is_log_throughput_enabled': args.log_throughput, + 'app_run_type': 'training', + 'summary_data_schema_version': '1.0.0', + 'app_metrics_feature_tags': 'full', + 'one_logger_utils_version': _one_logger_utils_version, + }) + +def track_config_flags(train_iters, skip_train, do_train, do_valid, do_test, + dataloader_type, retro_project_dir, retro_cyclic_train_iters): + """Track flags about train/validation/test enablement + + Args: + train_iters (int): target train iteration number + skip_train (bool): flag to skip train iterations + do_train (bool): flags to do train + do_valid (bool): flags to do validation + do_test (bool): flags to do test + dataloader_type (str): dataloader type + retro_project_dir (str): Retro project directory + retro_cyclic_train_iters (int): iteration number for cyclic retro training + """ + one_logger = get_one_logger() + if one_logger: + with one_logger.get_context_manager(): + # Update train_iters for cyclic loader + if dataloader_type == 'cyclic' and retro_project_dir: + assert retro_cyclic_train_iters is not None + train_iters = retro_cyclic_train_iters + # Track if training is enabled. Can only be done once args.do_train is assigned after dataloader is built. + train_enabled = train_iters and (not skip_train) and do_train and train_iters > 0 + one_logger.log_metrics({ + 'is_train_iterations_enabled': train_enabled, + 'is_validation_iterations_enabled': bool(do_valid), + 'is_test_iterations_enabled': bool(do_test), + }) + +def on_save_checkpoint_success(productive_metrics, async_save): + """Function to be called after checkpointing succeeds and checkpoint is persisted for storing productive metrics + + Args: + productive_metrics (dict): productive related E2E metrics generated at the start of save checkpoint + async_save (bool): apply async checkpointing save + """ + one_logger = get_one_logger() + + if one_logger: + with one_logger.get_context_manager(): + # Accumulate train_iterations_time_total_productive for current iteration + prod_iteration = productive_metrics['train_iterations_productive_end'] + + # Log start timestamp of first iteration that was successfully checkpointed + if not one_logger.store_has_key('first_checkpoint_success'): + app_train_loop_start_time = one_logger.store_get('app_train_loop_start_time') + one_logger.store_set('first_checkpoint_success', True) + one_logger.log_metrics({ + 'first_saved_train_iterations_start_time': app_train_loop_start_time + }) + + # Handle possible out-of-order async checkpoint callbacks + need_update = True + if one_logger.store_has_key('iters_prod_max'): + need_update = prod_iteration > one_logger.store_get('iters_prod_max') + + if need_update: + # Update cache + one_logger.store_set('iters_prod_max', prod_iteration) + + if async_save: + save_checkpoint_sync_time_total_productive = \ + one_logger.store_pop(f'save_checkpoint_sync_time_total_productive:{prod_iteration}') + last_successful_save_checkpoint_sync_finish_time = \ + one_logger.store_pop(f'save_checkpoint_sync_finish_time:{prod_iteration}') + # Update productive metrics and log to DB + productive_metrics.update({ + 'save_checkpoint_sync_time_total_productive': save_checkpoint_sync_time_total_productive, + 'last_successful_save_checkpoint_sync_finish_time': last_successful_save_checkpoint_sync_finish_time + }) + one_logger.log_metrics(productive_metrics) + + +def on_save_checkpoint_end(save_checkpoint_duration, current_iteration, async_save): + """Function to be called after checkpointing ends + + Args: + save_checkpoint_duration (float): duration of current save checkpoint process + current_iteration (int): current train iteration step number + async_save (bool): apply async checkpointing save + """ + one_logger = get_one_logger() + if one_logger: + with one_logger.get_context_manager(): + save_checkpoint_sync_finish_time = get_timestamp_in_ms() + + # Track finish timestamp of the sync part of first successful save checkpoint + if (one_logger.store_has_key('first_checkpoint_success') + and not one_logger.store_has_key('first_successful_checkpoint_end')): + one_logger.store_set('first_successful_checkpoint_end', True) + one_logger.log_metrics({ + 'first_successful_save_checkpoint_sync_finish_time': save_checkpoint_sync_finish_time + }) + + save_checkpoint_sync_count = one_logger.store_get('save_checkpoint_count') + + # accumulate total sync checkpointing duration + save_checkpoint_sync_time_total = \ + one_logger.store_get('save_checkpoint_sync_time_total') + save_checkpoint_duration + one_logger.store_set('save_checkpoint_sync_time_total', save_checkpoint_sync_time_total) + + e2e_metrics = {} + if async_save: + # Cache total sync checkpointing duration + one_logger.store_set( + f'save_checkpoint_sync_time_total_productive:{current_iteration}', + save_checkpoint_sync_time_total + ) + # Cache finish time for current iteration + one_logger.store_set(f'save_checkpoint_sync_finish_time:{current_iteration}', + save_checkpoint_sync_finish_time) + else: + e2e_metrics.update({ + # Track productive total time directly for sync ckpt + 'save_checkpoint_sync_time_total_productive': save_checkpoint_sync_time_total, + 'last_successful_save_checkpoint_sync_finish_time': save_checkpoint_sync_finish_time, + }) + + # Tracking min & max value sync checkpointing duration + # For the first comparison + if not one_logger.store_has_key('save_checkpoint_sync_time_max'): + one_logger.store_set('save_checkpoint_sync_time_max', save_checkpoint_duration) + if not one_logger.store_has_key('save_checkpoint_sync_time_min'): + one_logger.store_set('save_checkpoint_sync_time_min', save_checkpoint_duration) + + save_checkpoint_sync_time_max = max( + one_logger.store_get('save_checkpoint_sync_time_max'), + save_checkpoint_duration + ) + save_checkpoint_sync_time_min = min( + one_logger.store_get('save_checkpoint_sync_time_min'), + save_checkpoint_duration + ) + one_logger.store_set('save_checkpoint_sync_time_max', save_checkpoint_sync_time_max) + one_logger.store_set('save_checkpoint_sync_time_min', save_checkpoint_sync_time_min) + e2e_metrics.update({ + 'save_checkpoint_sync_count': save_checkpoint_sync_count, + 'save_checkpoint_sync_time_max': save_checkpoint_sync_time_max, + 'save_checkpoint_sync_time_min': save_checkpoint_sync_time_min, + 'save_checkpoint_sync_time_total': save_checkpoint_sync_time_total, + }) + one_logger.log_metrics(e2e_metrics) + + +def track_app_tag(batch_size, world_size, seq_length): + """Track app_tag and app_tag ID + + Args: + batch_size (int): current batch size + world_size (int): the number of processes of current job + seq_length (int): current sequence length + """ + # Track app tag & app tag ID + one_logger = get_one_logger() + if one_logger: + with one_logger.get_context_manager(): + app_tag_run_name = one_logger.store_get('app_tag_run_name') + app_tag_run_version = one_logger.store_get('app_tag_run_version') + current_app_tag = (f'{app_tag_run_name}_{app_tag_run_version}_{batch_size}' + f'_{world_size}_{seq_length}') + one_logger.log_app_tag(current_app_tag) + + +def finish(): + """Flush E2E metrics to remote server + """ + one_logger = get_one_logger() + if one_logger: + with one_logger.get_context_manager(): + one_logger.finish() diff --git a/megatron/training/tokenizer/multimodal_tokenizer.py b/megatron/training/tokenizer/multimodal_tokenizer.py index c5ea95c..a2dd669 100644 --- a/megatron/training/tokenizer/multimodal_tokenizer.py +++ b/megatron/training/tokenizer/multimodal_tokenizer.py @@ -1,274 +1,289 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -"""Multimodal tokenizer.""" -from dataclasses import dataclass -from typing import Dict, List, Union - -import numpy as np - -from megatron.core.datasets.megatron_tokenizer import MegatronTokenizer - -# Mark tokens that will be ignored in the loss function with this value. -# Same ignore_index in https://pytorch.org/docs/stable/generated/torch.nn.CrossEntropyLoss.html -from megatron.core.models.multimodal.llava_model import IGNORE_INDEX, IMAGE_TOKEN - -IMAGE_TAGS = { - "nvlm": ("", ""), - "internvl": ("", ""), - "": None, # Image tag not used. -} - - -# The default mistral template raises exceptions so we use a custom one. -mistral_custom_template = """ -{{- bos_token }} -{%- for message in messages %} - {%- if message['role'] == 'user' %} - {{- '[INST] ' + message['content'] + '[/INST]' }} - {%- elif message['role'] == 'assistant' %} - {{- ' ' + message['content'] + eos_token}} - {%- endif %} -{%- endfor %} -{% if add_generation_prompt %}{{ ' ' }}{% endif %} -""" - - -nvlm_yi_34b_template = "{{- bos_token }}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}" - - -qwen2p0_custom_template = "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}" - - - -@dataclass -class PromptConfig: - """Config options for different prompt formats.""" - - # How many tokens are used for the assistant prefix, e.g. "<|im_start|>assistant\n". - # Used for masking the assistant prefix. - assistant_prefix_len: int - # Padding token ID. - pad_token_id: int - # For overriding the default chat format template. - custom_chat_template: str - # If the tokenizer inserts BOS token by default. - has_bos: bool - # If the tokenizer supports a separate role for system messages. - has_system_role: bool - - -class MultimodalTokenizer(MegatronTokenizer): - """Multimodal Tokenizer.""" - - def __init__( - self, - tokenizer: MegatronTokenizer, - prompt_format: str, - special_tokens: List[str], - image_tag_type: str, - ): - """Tokenizer with a support for non-text inputs. - - Note: Currently, only HuggingFaceTokenizer is supported as the underlying text tokenizer. - - Args: - tokenizer (MegatronTokenizer): Underlying tokenizer. - prompt_format (str): Prompt format for the tokenizer. - special_tokens (List[str]): Non-text tokens. - image_tag_type (str): Image tag to apply, if any. For example . - """ - self._vocab_size = len(tokenizer) - - num_added_tokens = tokenizer.add_tokens(special_tokens, special_tokens=True) - assert num_added_tokens == len( - special_tokens - ), f"failed to add {len(special_tokens)} special tokens; only added {num_added_tokens}" - - self._tokenizer = tokenizer - - if prompt_format == "mistral": - # Mistral format doesn't have prefix for the assistant message. - self._prompt_config = PromptConfig( - assistant_prefix_len=0, - pad_token_id=tokenizer.unk_token_id, - custom_chat_template=mistral_custom_template, - has_bos=True, - has_system_role=False, - ) - elif prompt_format == "llama3": - # "<|start_header_id|>assistant<|end_header|>\n\n" is the prefix for assistant messages. - self._prompt_config = PromptConfig( - assistant_prefix_len=4, - pad_token_id=tokenizer.convert_tokens_to_ids("<|end_of_text|>"), - custom_chat_template=None, - has_bos=True, - has_system_role=True, - ) - elif prompt_format == "nvlm-yi-34b": - self._prompt_config = PromptConfig( - assistant_prefix_len=4, - pad_token_id=tokenizer.pad_token_id, - custom_chat_template=nvlm_yi_34b_template, - has_bos=True, - has_system_role=True, - ) - elif prompt_format == "chatml": - # "<|im_start|>assistant\n" is the prefix for assistant messages - self._prompt_config = PromptConfig( - assistant_prefix_len=3, - pad_token_id=tokenizer.pad_token_id, - custom_chat_template=None, - has_bos=False, - has_system_role=True, - ) - elif prompt_format == "qwen2p0": - # "<|im_start|>assistant\n" is the prefix for assistant messages - self._prompt_config = PromptConfig( - assistant_prefix_len=3, - pad_token_id=tokenizer.pad_token_id, - custom_chat_template=qwen2p0_custom_template, - has_bos=False, - has_system_role=True, - ) - else: - raise NotImplementedError("unknown multimodal tokenizer type", prompt_format) - - self._image_tag = IMAGE_TAGS[image_tag_type] - - def _apply_image_tag(self, text: Union[str, List[Dict]]): - """Surround with image tags such as and .""" - if self._image_tag is None: - return text - - replacement = f"{self._image_tag[0]}{IMAGE_TOKEN}{self._image_tag[1]}" - - if isinstance(text, list): - for turn in text: - turn["content"] = turn["content"].replace(IMAGE_TOKEN, replacement) - else: - text = text.replace(IMAGE_TOKEN, replacement) - - return text - - def tokenize(self, text: Union[str, List[Dict]]): - """Tokenize conversation or string input.""" - if isinstance(text, list): - # This code path is used by the inference code currently. - return self.tokenize_conversation(text, False, True).tolist() - - return self._encode(text) - - def _encode(self, text: str): - """Tokenize text input.""" - text = self._apply_image_tag(text) - return self._tokenizer.encode(text) - - def tokenize_conversation( - self, conversation: List[Dict], return_target: bool, add_generation_prompt: bool - ): - """Convert a conversation to tokens. - - Args: - conversation (List[Dict]): Sequence of system/user/assistant messages. - Must be in the following format: - [ - {"role": "user", "content": "something"}, - {"role": "assistant", "content": "something2"}, - ] - return_target (bool): Return target tokens with system and assistant masked. - add_generation_prompt (bool): Add assistant prefix to the end. - """ - # Skip system message if the tokenizer doesn't have a system role. - if not self._prompt_config.has_system_role and conversation[0]["role"] == "system": - conversation = conversation[1:] - - # Apply possible image tag. - conversation = self._apply_image_tag(conversation) - - tokens = self._tokenizer.apply_chat_template( - conversation, - tokenize=True, - add_generation_prompt=add_generation_prompt, - return_assistant_token_mask=False, - return_tensors="np", - chat_template=self._prompt_config.custom_chat_template, - )[0] - - if not return_target: - return tokens - - target = tokens.copy() - - # Mask system and user tokens in the target. - idx = 0 - for turn_idx, turn in enumerate(conversation): - if len(turn["content"]) == 0: - raise ValueError(f"empty turn in conversation: {conversation}. Skipping.") - - turn_tokens = self._tokenizer.apply_chat_template( - [turn], tokenize=True, chat_template=self._prompt_config.custom_chat_template - ) - - # There should be only one BOS at the very beginning. - # After the first turn, skip BOS token. - if self._prompt_config.has_bos and turn_idx > 0: - turn_tokens = turn_tokens[1:] - - turn_len = len(turn_tokens) - - role = turn["role"] - if role in ("system", "user"): - target[idx : idx + turn_len] = IGNORE_INDEX - elif role == "assistant": - if IMAGE_TOKEN in turn["content"]: - raise RuntimeError(f"{IMAGE_TOKEN} not allowed in assistant content!") - - if self._prompt_config.assistant_prefix_len > 0: - target[idx : idx + self._prompt_config.assistant_prefix_len] = IGNORE_INDEX - - assert np.allclose( - tokens[idx : idx + turn_len], turn_tokens - ), f"expected turn tokens to match tokens in conversation {conversation}" - - idx += turn_len - - assert idx == len(tokens), f"mismatch in target masking the conversation {conversation}" - - return tokens, target - - def convert_tokens_to_ids(self, tokens: List[str]): - """Convert tokens to IDs.""" - return self._tokenizer.convert_tokens_to_ids(tokens) - - def detokenize(self, tokens: List[int]): - """Detokenize tokens.""" - return self._tokenizer.decode(tokens) - - def get_special_tokens(self): - """Get special tokens.""" - return self._tokenizer.get_added_vocab() - - @property - def pad(self): - """Pad token ID.""" - return self._prompt_config.pad_token_id - - @property - def eod(self): - """End of sentence token ID.""" - return self._tokenizer.eos_token_id - - @property - def vocab(self): - """Vocab.""" - return NotImplementedError("not used") - - @property - def inv_vocab(self): - """Inverse vocab.""" - return NotImplementedError("not used") - - @property - def vocab_size(self): - """Vocabulary size.""" - return self._vocab_size +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Multimodal tokenizer.""" +from dataclasses import dataclass +from typing import Dict, List, Union + +import numpy as np + +from megatron.core.datasets.megatron_tokenizer import MegatronTokenizer + +# Mark tokens that will be ignored in the loss function with this value. +# Same ignore_index in https://pytorch.org/docs/stable/generated/torch.nn.CrossEntropyLoss.html +from megatron.core.models.multimodal.llava_model import IGNORE_INDEX, IMAGE_TOKEN + +IMAGE_TAGS = { + "nvlm": ("", ""), + "internvl": ("", ""), + "": None, # Image tag not used. +} + + +# The default mistral template raises exceptions so we use a custom one. +mistral_custom_template = """ +{{- bos_token }} +{%- for message in messages %} + {%- if message['role'] == 'user' %} + {{- '[INST] ' + message['content'] + '[/INST]' }} + {%- elif message['role'] == 'assistant' %} + {{- ' ' + message['content'] + eos_token}} + {%- endif %} +{%- endfor %} +{% if add_generation_prompt %}{{ ' ' }}{% endif %} +""" + + +nvlm_yi_34b_template = "{{- bos_token }}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}" + + +qwen2p0_custom_template = "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}" + + +# Note: this is the same template as https://huggingface.co/meta-llama/Llama-3.1-8B-Instruct/blob/main/tokenizer_config.json#L2053 +# but we removed the forced system message. +llama3p1_chat_template = """{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = none %}\n{%- endif %}\n\n{%- if system_message is not none %}{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{%-endif %}{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n""" + + + +@dataclass +class PromptConfig: + """Config options for different prompt formats.""" + + # How many tokens are used for the assistant prefix, e.g. "<|im_start|>assistant\n". + # Used for masking the assistant prefix. + assistant_prefix_len: int + # Padding token ID. + pad_token_id: int + # For overriding the default chat format template. + custom_chat_template: str + # If the tokenizer inserts BOS token by default. + has_bos: bool + # If the tokenizer supports a separate role for system messages. + has_system_role: bool + + +class MultimodalTokenizer(MegatronTokenizer): + """Multimodal Tokenizer.""" + + def __init__( + self, + tokenizer: MegatronTokenizer, + prompt_format: str, + special_tokens: List[str], + image_tag_type: str, + ): + """Tokenizer with a support for non-text inputs. + + Note: Currently, only HuggingFaceTokenizer is supported as the underlying text tokenizer. + + Args: + tokenizer (MegatronTokenizer): Underlying tokenizer. + prompt_format (str): Prompt format for the tokenizer. + special_tokens (List[str]): Non-text tokens. + image_tag_type (str): Image tag to apply, if any. For example . + """ + self._vocab_size = len(tokenizer) + + num_added_tokens = tokenizer.add_tokens(special_tokens, special_tokens=True) + assert num_added_tokens == len( + special_tokens + ), f"failed to add {len(special_tokens)} special tokens; only added {num_added_tokens}" + + self._tokenizer = tokenizer + + if prompt_format == "mistral": + # Mistral format doesn't have prefix for the assistant message. + self._prompt_config = PromptConfig( + assistant_prefix_len=0, + pad_token_id=tokenizer.unk_token_id, + custom_chat_template=mistral_custom_template, + has_bos=True, + has_system_role=False, + ) + elif prompt_format == "llama3": + # "<|start_header_id|>assistant<|end_header|>\n\n" is the prefix for assistant messages. + self._prompt_config = PromptConfig( + assistant_prefix_len=4, + pad_token_id=tokenizer.convert_tokens_to_ids("<|end_of_text|>"), + custom_chat_template=None, + has_bos=True, + has_system_role=True, + ) + elif prompt_format in ("llama3p1", "llama3p2"): + # "<|start_header_id|>assistant<|end_header|>\n\n" is the prefix for assistant messages. + # That occupies 4 tokens and can be masked in the target. + self._prompt_config = PromptConfig( + assistant_prefix_len=4, + pad_token_id=tokenizer.convert_tokens_to_ids("<|finetune_right_pad_id|>"), + custom_chat_template=llama3p1_chat_template, + has_bos=True, + has_system_role=True, + ) + elif prompt_format == "nvlm-yi-34b": + self._prompt_config = PromptConfig( + assistant_prefix_len=4, + pad_token_id=tokenizer.pad_token_id, + custom_chat_template=nvlm_yi_34b_template, + has_bos=True, + has_system_role=True, + ) + elif prompt_format == "chatml": + # "<|im_start|>assistant\n" is the prefix for assistant messages + self._prompt_config = PromptConfig( + assistant_prefix_len=3, + pad_token_id=tokenizer.pad_token_id, + custom_chat_template=None, + has_bos=False, + has_system_role=True, + ) + elif prompt_format in ("qwen2p0", "qwen2p5"): + # "<|im_start|>assistant\n" is the prefix for assistant messages + self._prompt_config = PromptConfig( + assistant_prefix_len=3, + pad_token_id=tokenizer.pad_token_id, + custom_chat_template=qwen2p0_custom_template, + has_bos=False, + has_system_role=True, + ) + else: + raise NotImplementedError("unknown multimodal tokenizer type", prompt_format) + + self._image_tag = IMAGE_TAGS[image_tag_type] + + def _apply_image_tag(self, text: Union[str, List[Dict]]): + """Surround with image tags such as and .""" + if self._image_tag is None: + return text + + replacement = f"{self._image_tag[0]}{IMAGE_TOKEN}{self._image_tag[1]}" + + if isinstance(text, list): + for turn in text: + turn["content"] = turn["content"].replace(IMAGE_TOKEN, replacement) + else: + text = text.replace(IMAGE_TOKEN, replacement) + + return text + + def tokenize(self, text: Union[str, List[Dict]]): + """Tokenize conversation or string input.""" + if isinstance(text, list): + # This code path is used by the inference code currently. + return self.tokenize_conversation(text, False, True).tolist() + + return self._encode(text) + + def _encode(self, text: str): + """Tokenize text input.""" + text = self._apply_image_tag(text) + return self._tokenizer.encode(text) + + def tokenize_conversation( + self, conversation: List[Dict], return_target: bool, add_generation_prompt: bool + ): + """Convert a conversation to tokens. + + Args: + conversation (List[Dict]): Sequence of system/user/assistant messages. + Must be in the following format: + [ + {"role": "user", "content": "something"}, + {"role": "assistant", "content": "something2"}, + ] + return_target (bool): Return target tokens with system and assistant masked. + add_generation_prompt (bool): Add assistant prefix to the end. + """ + # Skip system message if the tokenizer doesn't have a system role. + if not self._prompt_config.has_system_role and conversation[0]["role"] == "system": + conversation = conversation[1:] + + # Apply possible image tag. + conversation = self._apply_image_tag(conversation) + + tokens = self._tokenizer.apply_chat_template( + conversation, + tokenize=True, + add_generation_prompt=add_generation_prompt, + return_assistant_token_mask=False, + return_tensors="np", + chat_template=self._prompt_config.custom_chat_template, + )[0] + + if not return_target: + return tokens + + target = tokens.copy() + + # Mask system and user tokens in the target. + idx = 0 + for turn_idx, turn in enumerate(conversation): + if len(turn["content"]) == 0: + raise ValueError(f"empty turn in conversation: {conversation}. Skipping.") + + turn_tokens = self._tokenizer.apply_chat_template( + [turn], tokenize=True, chat_template=self._prompt_config.custom_chat_template + ) + + # There should be only one BOS at the very beginning. + # After the first turn, skip BOS token. + if self._prompt_config.has_bos and turn_idx > 0: + turn_tokens = turn_tokens[1:] + + turn_len = len(turn_tokens) + + role = turn["role"] + if role in ("system", "user"): + target[idx : idx + turn_len] = IGNORE_INDEX + elif role == "assistant": + if IMAGE_TOKEN in turn["content"]: + raise RuntimeError(f"{IMAGE_TOKEN} not allowed in assistant content!") + + if self._prompt_config.assistant_prefix_len > 0: + target[idx : idx + self._prompt_config.assistant_prefix_len] = IGNORE_INDEX + + assert np.allclose( + tokens[idx : idx + turn_len], turn_tokens + ), f"expected turn tokens to match tokens in conversation {conversation}" + + idx += turn_len + + assert idx == len(tokens), f"mismatch in target masking the conversation {conversation}" + + return tokens, target + + def convert_tokens_to_ids(self, tokens: List[str]): + """Convert tokens to IDs.""" + return self._tokenizer.convert_tokens_to_ids(tokens) + + def detokenize(self, tokens: List[int]): + """Detokenize tokens.""" + return self._tokenizer.decode(tokens) + + def get_special_tokens(self): + """Get special tokens.""" + return self._tokenizer.get_added_vocab() + + @property + def pad(self): + """Pad token ID.""" + return self._prompt_config.pad_token_id + + @property + def eod(self): + """End of sentence token ID.""" + return self._tokenizer.eos_token_id + + @property + def vocab(self): + """Vocab.""" + return NotImplementedError("not used") + + @property + def inv_vocab(self): + """Inverse vocab.""" + return NotImplementedError("not used") + + @property + def vocab_size(self): + """Vocabulary size.""" + return self._vocab_size diff --git a/megatron/training/tokenizer/tokenizer.py b/megatron/training/tokenizer/tokenizer.py index f3412a9..164d19a 100644 --- a/megatron/training/tokenizer/tokenizer.py +++ b/megatron/training/tokenizer/tokenizer.py @@ -1,919 +1,837 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -"""Megatron tokenizers.""" - -import base64 -import json -import math -import types -from abc import ABC, abstractmethod -from pathlib import Path -from typing import Dict, List, Optional - -from megatron.core.datasets.megatron_tokenizer import MegatronTokenizer - -from .bert_tokenization import FullTokenizer as FullBertTokenizer -from .gpt2_tokenization import GPT2Tokenizer -from megatron.training.tokenizer.multimodal_tokenizer import MultimodalTokenizer -from transformers import Qwen2Tokenizer - - -def build_tokenizer(args, **kwargs): - """Initialize tokenizer.""" - if args.rank == 0: - print('> building {} tokenizer ...'.format(args.tokenizer_type), flush=True) - - # Select and instantiate the tokenizer. - if args.tokenizer_type == 'BertWordPieceLowerCase': - assert args.vocab_file is not None - tokenizer = _BertWordPieceTokenizer( - vocab_file=args.vocab_file, lower_case=True, vocab_extra_ids=args.vocab_extra_ids - ) - elif args.tokenizer_type == 'BertWordPieceCase': - assert args.vocab_file is not None - tokenizer = _BertWordPieceTokenizer( - vocab_file=args.vocab_file, lower_case=False, vocab_extra_ids=args.vocab_extra_ids - ) - elif args.tokenizer_type == 'GPT2BPETokenizer': - assert args.vocab_file is not None - assert args.merge_file is not None - tokenizer = _GPT2BPETokenizer(args.vocab_file, args.merge_file) - elif args.tokenizer_type == 'SentencePieceTokenizer': - assert args.tokenizer_model is not None - tokenizer = _SentencePieceTokenizer( - args.tokenizer_model, vocab_extra_ids=args.vocab_extra_ids - ) - elif args.tokenizer_type == 'GPTSentencePieceTokenizer': - assert args.tokenizer_model is not None - tokenizer = _GPTSentencePieceTokenizer(args.tokenizer_model) - elif args.tokenizer_type == 'HuggingFaceTokenizer': - tokenizer = _HuggingFaceTokenizer(args.tokenizer_model, **kwargs) - elif args.tokenizer_type == 'Llama2Tokenizer': - assert args.tokenizer_model is not None - tokenizer = _Llama2Tokenizer(args.tokenizer_model) - elif args.tokenizer_type == 'Llama3Tokenizer': - assert args.tokenizer_model is not None - tokenizer = _Llama3Tokenizer(args.tokenizer_model) - elif args.tokenizer_type == 'QwenTokenizer': - tokenizer = _Qwen2Tokenizer(args.vocab_file, args.merge_file) - elif args.tokenizer_type == 'TikTokenizer': - assert args.tokenizer_model is not None - assert args.tiktoken_pattern is not None - assert args.tiktoken_pattern in {"v1", "v2"} - pattern = PATTERN_TIKTOKEN if args.tiktoken_pattern == "v1" else PATTERN_TIKTOKEN_V2 - tokenizer = CustomTikTokenizer( - path=args.tokenizer_model, - pattern=pattern, - vocab_size=args.vocab_size, - num_special_tokens=args.tiktoken_num_special_tokens, - special_tokens=args.tiktoken_special_tokens, - ) - elif args.tokenizer_type == 'NullTokenizer': - assert args.vocab_size is not None - tokenizer = _NullTokenizer(args.vocab_size) - elif args.tokenizer_type == "MultimodalTokenizer": - try: - import transformers - except ImportError: - raise ImportError( - "MultimodalTokenizer currently requires transformers library to be installed" - ) - - kwargs = dict() - if args.tokenizer_prompt_format == "nvlm-yi-34b": - kwargs = { - "from_slow": True, - "legacy": False, - "add_bos_token": True, - } - - # Currently, only HuggingFace tokenizers are supported. - underlying_tokenizer = transformers.AutoTokenizer.from_pretrained( - pretrained_model_name_or_path=args.tokenizer_model, **kwargs - ) - - tokenizer = MultimodalTokenizer( - underlying_tokenizer, - args.tokenizer_prompt_format, - args.special_tokens, - args.image_tag_type, - ) - else: - raise NotImplementedError('{} tokenizer is not ' 'implemented.'.format(args.tokenizer_type)) - - # Add vocab size (if not already set from a checkpoint). - if getattr(args, "padded_vocab_size", None) is None: - args.padded_vocab_size = _vocab_size_with_padding(tokenizer.vocab_size, args) - - return tokenizer - - -def _vocab_size_with_padding(orig_vocab_size, args, logging_enabled=True): - """Pad vocab size so it is divisible by model parallel size and - still having GPU friendly size.""" - - after = orig_vocab_size - multiple = args.make_vocab_size_divisible_by * args.tensor_model_parallel_size - after = int(math.ceil(after / multiple) * multiple) - if args.rank == 0 and logging_enabled: - print( - ' > padded vocab (size: {}) with {} dummy tokens ' - '(new size: {})'.format(orig_vocab_size, after - orig_vocab_size, after), - flush=True, - ) - return after - - -class _HuggingFaceTokenizer(MegatronTokenizer): - def __init__(self, pretrained_model_name_or_path, **kwargs): - super().__init__(pretrained_model_name_or_path, **kwargs) - try: - import transformers - except ImportError: - raise EnvironmentError( - f"The transformers library must be installed to use huggingface_tokenizer_provider" - ) - - # TODO(bnorick): download tokenizer once to lustre and use force offline to make sure all tasks read it from there - self._tokenizer = transformers.AutoTokenizer.from_pretrained( - pretrained_model_name_or_path=pretrained_model_name_or_path, **kwargs - ) - self._vocab = self._tokenizer.get_vocab() - self._inv_vocab = {token_id: token for token, token_id in self._vocab.items()} - - @property - def vocab_size(self): - return len(self._tokenizer) - - @property - def vocab(self): - """Dictionary from vocab text token to id token.""" - return self._vocab - - @property - def inv_vocab(self): - """Dictionary from vocab id token to text token.""" - return self._inv_vocab - - @property - def decoder(self): - return self._inv_vocab - - def tokenize(self, text, **kwargs): - return self._tokenizer(text, **kwargs).input_ids - - def detokenize(self, token_ids, **kwargs): - return self._tokenizer.decode(token_ids, **kwargs) - - def offsets(self, ids: list[int], text: str) -> list[int]: - retok_ids: "transformers.BatchEncoding" = self._tokenizer(text) - offsets, next_start_idx = [], 0 - for i in range(len(ids)): - span = retok_ids.token_to_chars(i) - if span is not None: - offsets.append(span.start) - next_start_idx = span.end - else: - offsets.append(next_start_idx) - return offsets - - @property - def eod(self): - return self._tokenizer.eos_token_id - - -class _BertWordPieceTokenizer(MegatronTokenizer): - """Original BERT wordpiece tokenizer.""" - - def __init__(self, vocab_file, lower_case=True, vocab_extra_ids=0): - super().__init__(vocab_file, lower_case=lower_case, vocab_extra_ids=vocab_extra_ids) - self.tokenizer = FullBertTokenizer(vocab_file, do_lower_case=lower_case) - self.cls_id = self.tokenizer.vocab['[CLS]'] - self.sep_id = self.tokenizer.vocab['[SEP]'] - self.pad_id = self.tokenizer.vocab['[PAD]'] - self.mask_id = self.tokenizer.vocab['[MASK]'] - self._additional_special_tokens = [] - - # (dsachan) Add BOS and EOS tokens - SPECIAL_TOKENS = {'eos_token': '[EOS]', 'bos_token': '[BOS]'} - self._bos_token = '[BOS]' - self.add_token(self._bos_token) - self._bos_token_id = self.vocab.get(self._bos_token) - - self._eos_token = '[EOS]' - self.add_token(self._eos_token) - self._eos_token_id = self.vocab.get(self._eos_token) - - # (dsachan) Add additional special tokens - # These can be used as sentinel tokens in T5 model inputs - additional_special_tokens = [] - additional_special_tokens.extend( - ["".format(i) for i in range(vocab_extra_ids)] - ) - self.add_additional_special_tokens(additional_special_tokens) - - def add_token(self, token): - if token not in self.vocab: - self.inv_vocab[self.vocab_size] = token - # self.vocab_size comes from len(vocab) - # and it will increase as we add elements - self.vocab[token] = self.vocab_size - - def add_additional_special_tokens(self, tokens_list): - setattr(self, "additional_special_tokens", tokens_list) - for value in tokens_list: - self.add_token(value) - - @property - def vocab_size(self): - return self.tokenizer.vocab_size() - - @property - def vocab(self): - return self.tokenizer.vocab - - @property - def inv_vocab(self): - return self.tokenizer.inv_vocab - - def tokenize(self, text): - text_tokens = self.tokenizer.tokenize(text) - return self.tokenizer.convert_tokens_to_ids(text_tokens) - - def decode(self, ids): - tokens = self.tokenizer.convert_ids_to_tokens(ids) - return self.tokenizer.convert_tokens_to_string(tokens) - - def detokenize(self, token_ids): - """Copy of decode() method for inference pipeline compatibility""" - return self.decode(token_ids) - - def decode_token_ids(self, token_ids): - tokens = self.tokenizer.convert_ids_to_tokens(token_ids) - exclude_list = ['[PAD]', '[CLS]'] - non_pads = [t for t in tokens if t not in exclude_list] - - result = "" - for s in non_pads: - if s.startswith("##"): - result += s[2:] - else: - result += " " + s - - return result - - @property - def cls(self): - return self.cls_id - - @property - def sep(self): - return self.sep_id - - @property - def pad(self): - return self.pad_id - - @property - def mask(self): - return self.mask_id - - @property - def bos(self): - """Id of the beginning of sentence token in the vocabulary.""" - return self._bos_token_id - - @property - def eos(self): - """Id of the end of sentence token in the vocabulary.""" - return self._eos_token_id - - @property - def eod(self): - """Copy of eod property for inference pipeline compatibility""" - return self.eos - - @property - def bos_token(self): - """Beginning of sentence token id""" - return self._bos_token - - @property - def eos_token(self): - """End of sentence token id""" - return self._eos_token - - @property - def additional_special_tokens(self): - """All the additional special tokens you may want to use (list of strings).""" - return self._additional_special_tokens - - @property - def additional_special_tokens_ids(self): - """Ids of all the additional special tokens in the vocabulary (list of integers).""" - return [self.vocab.get(token) for token in self._additional_special_tokens] - - @additional_special_tokens.setter - def additional_special_tokens(self, value): - self._additional_special_tokens = value - - -class _GPT2BPETokenizer(MegatronTokenizer): - """Original GPT2 BPE tokenizer.""" - - def __init__(self, vocab_file, merge_file): - super().__init__(vocab_file, merge_file) - - self.tokenizer = GPT2Tokenizer( - vocab_file, merge_file, errors='replace', special_tokens=[], max_len=None - ) - self.eod_id = self.tokenizer.encoder['<|endoftext|>'] - - @property - def vocab_size(self): - return len(self.tokenizer.encoder) - - @property - def vocab(self): - return self.tokenizer.encoder - - @property - def inv_vocab(self): - return self.tokenizer.decoder - - def tokenize(self, text): - return self.tokenizer.encode(text) - - def detokenize(self, token_ids): - return self.tokenizer.decode(token_ids) - - @property - def eod(self): - return self.eod_id - - -class _SentencePieceTokenizer(MegatronTokenizer): - """SentencePieceTokenizer-Megatron wrapper""" - - def __init__(self, model_file, vocab_extra_ids=0): - super().__init__(model_file, vocab_extra_ids=vocab_extra_ids) - - import sentencepiece - - self.tokenizer = sentencepiece.SentencePieceProcessor(model_file=model_file) - self._initalize(vocab_extra_ids) - - def _populate_vocab(self): - self._vocab = {} - self._inv_vocab = {} - - for i in range(len(self.tokenizer)): - t = self.tokenizer.id_to_piece(i) - self._inv_vocab[i] = t - self._vocab[t] = i - - def _initalize(self, vocab_extra_ids): - self._populate_vocab() - self._special_tokens = {} - self._inv_special_tokens = {} - - self._t5_tokens = [] - - def _add_special_token(t): - if t not in self._vocab: - next_id = len(self._vocab) - self._vocab[t] = next_id - self._inv_vocab[next_id] = t - self._special_tokens[t] = self._vocab[t] - self._inv_special_tokens[self._vocab[t]] = t - - _add_special_token('') - self._cls_id = self._vocab[''] - _add_special_token('') - self._sep_id = self._vocab[''] - _add_special_token('') - self._eod_id = self._vocab[''] - _add_special_token('') - self._mask_id = self._vocab[''] - - pad_id = self.tokenizer.pad_id() - try: - pad_token = self.tokenizer.id_to_piece(pad_id) - except IndexError: - pad_token = '' - _add_special_token(pad_token) - self._pad_id = self._vocab[pad_token] - - bos_id = self.tokenizer.bos_id() - try: - bos_token = self.tokenizer.id_to_piece(bos_id) - except IndexError: - bos_token = '' - _add_special_token(bos_token) - self._bos_id = self._vocab[bos_token] - - eos_id = self.tokenizer.eos_id() - try: - eos_token = self.tokenizer.id_to_piece(eos_id) - except IndexError: - eos_token = '' - _add_special_token(eos_token) - self._eos_id = self._vocab[eos_token] - - for i in range(vocab_extra_ids): - t = "".format(i) - _add_special_token(t) - self._t5_tokens += [t] - - @property - def vocab_size(self): - return len(self._vocab) - - @property - def vocab(self): - return self._vocab - - @property - def inv_vocab(self): - return self._inv_vocab - - @property - def decoder(self): - return self._inv_vocab - - @property - def encoder(self): - return self._vocab - - # From: - # https://github.com/NVIDIA/NeMo/blob/c8fa217e811d60d11d014827c7f3845ff6c99ae7/nemo/collections/common/tokenizers/sentencepiece_tokenizer.py#L89 - def tokenize(self, text): - ids = [] - idx = 0 - - while 1: - indices = {} - for token in self._special_tokens: - try: - indices[token] = text[idx:].index(token) - except ValueError: - continue - if len(indices) == 0: - break - - next_token = min(indices, key=indices.get) - next_idx = idx + indices[next_token] - - ids.extend(self.tokenizer.encode_as_ids(text[idx:next_idx])) - ids.append(self._special_tokens[next_token]) - idx = next_idx + len(next_token) - - ids.extend(self.tokenizer.encode_as_ids(text[idx:])) - return ids - - # From: - # https://github.com/NVIDIA/NeMo/blob/c8fa217e811d60d11d014827c7f3845ff6c99ae7/nemo/collections/common/tokenizers/sentencepiece_tokenizer.py#L125 - def detokenize(self, ids): - text = "" - last_i = 0 - - for i, id in enumerate(ids): - if id in self._inv_special_tokens: - text += self.tokenizer.decode_ids(ids[last_i:i]) + " " - text += self._inv_special_tokens[id] + " " - last_i = i + 1 - - text += self.tokenizer.decode_ids(ids[last_i:]) - return text - - def offsets(self, ids: list[int], text: str) -> list[int]: - return [p.begin for p in self.tokenizer.decode_ids_as_immutable_proto(ids).pieces] - - @property - def cls(self): - return self._cls_id - - @property - def sep(self): - return self._sep_id - - @property - def pad(self): - return self._pad_id - - @property - def bos(self): - return self._bos_id - - @property - def eod(self): - return self._eod_id - - @property - def eos(self): - return self._eos_id - - @property - def mask(self): - return self._mask_id - - @property - def additional_special_tokens_ids(self): - return [self.vocab[k] for k in self._t5_tokens] - - -class _GPTSentencePieceTokenizer(_SentencePieceTokenizer): - """SentencePieceTokenizer-Megatron wrapper""" - - def __init__(self, model_file): - super().__init__(model_file, vocab_extra_ids=0) - - def _initalize(self, vocab_extra_ids): - self._populate_vocab() - - self._pad_id = self.tokenizer.pad_id() - self._bos_id = self.tokenizer.bos_id() - self._eos_id = self.tokenizer.eos_id() - - def tokenize(self, text): - return self.tokenizer.encode_as_ids(text) - - def detokenize(self, ids): - return self.tokenizer.decode_ids(ids) - - @property - def cls(self): - return -1 - - @property - def sep(self): - return -1 - - @property - def mask(self): - return -1 - - @property - def eod(self): - return self._eos_id - - @property - def additional_special_tokens_ids(self): - return None - - -class _Llama2Tokenizer(_SentencePieceTokenizer): - """SentencePieceTokenizer-Megatron wrapper""" - - def __init__(self, model_file): - super().__init__(model_file, vocab_extra_ids=0) - - def _initalize(self, vocab_extra_ids): - self._populate_vocab() - - # BOS / EOS token IDs - self.n_words: int = self.tokenizer.vocab_size() - self.bos_id: int = self.tokenizer.bos_id() - self.eos_id: int = self.tokenizer.eos_id() - self.pad_id: int = self.tokenizer.pad_id() - assert self.tokenizer.vocab_size() == self.tokenizer.get_piece_size() - - def tokenize(self, s: str, bos=True, eos=False): - '''Default args for text completion, not chat/dialog.''' - assert type(s) is str - t = self.tokenizer.encode(s) - if bos: - t = [self.bos_id] + t - if eos: - t = t + [self.eos_id] - return t - - def detokenize(self, ids): - return self.tokenizer.decode_ids(ids) - - @property - def cls(self): - return -1 - - @property - def sep(self): - return -1 - - @property - def mask(self): - return -1 - - @property - def eod(self): - return self.eos_id - - @property - def additional_special_tokens_ids(self): - return None - - -class _Llama3Tokenizer(MegatronTokenizer): - """tiktokenTokenizer-Megatron llama3 改写""" - # https://github.com/meta-llama/llama3/blob/main/llama/tokenizer.py - - def __init__(self, model_file): - super().__init__(model_file) - from pathlib import Path - import tiktoken - from tiktoken.load import load_tiktoken_bpe - tokenizer_path=model_file - special_tokens = [ - "<|begin_of_text|>", - "<|end_of_text|>", - "<|reserved_special_token_0|>", - "<|reserved_special_token_1|>", - "<|reserved_special_token_2|>", - "<|reserved_special_token_3|>", - "<|start_header_id|>", - "<|end_header_id|>", - "<|reserved_special_token_4|>", - "<|eot_id|>", # end of turn - ] + [f"<|reserved_special_token_{i}|>" for i in range (5, 256 - 5)] - mergeable_ranks = load_tiktoken_bpe(tokenizer_path) - self.tokenizer = tiktoken.Encoding(tokenizer_path, - pat_str = r"(?i:'s|'t|'re|'ve|'m|'ll|'d)|[^\r\n\p{L}\p{N}]?\p{L}+|\p{N}{1,3}| ?[^\s\p{L}\p{N}]+[\r\n]*|\s*[\r\n]+|\s+(?!\S)|\s+", - mergeable_ranks=mergeable_ranks, - special_tokens={token: len (mergeable_ranks) + i for i, token in enumerate (special_tokens)}, - ) - - self.eod_id = self.tokenizer.encode("<|end_of_text|>", allowed_special="all")[0] - @property - def vocab_size(self): - return self.tokenizer.n_vocab - - @property - def vocab(self): - return self.tokenizer.encode - - @property - def inv_vocab(self): - return self.tokenizer.encode - - def tokenize(self, text): - return self.tokenizer.encode(text) - - def detokenize(self, token_ids): - return self.tokenizer.encode(token_ids) - - @property - def eod(self): - return self.eod_id - -class _Qwen2Tokenizer(MegatronTokenizer): - def __init__(self, vocab_file, merge_file,extra_vocab_size=0): - super().__init__(vocab_file, merge_file) - self.tokenizer = Qwen2Tokenizer(vocab_file, merge_file) - self.extra_vocab_size = extra_vocab_size - self.tokenizer.add_special_tokens(special_tokens_dict=dict(pad_token="<|extra_0|>")) - - @property - def vocab_size(self): - return len(self.tokenizer.encoder) + self.extra_vocab_size - - @property - def vocab(self): - return self.tokenizer.encoder - - @property - def inv_vocab(self): - return self.tokenizer.decoder - - def tokenize(self, text): - return self.tokenizer.encode(text) - - def detokenize(self, token_ids): - return self.tokenizer.decode(token_ids) - - @property - def eod(self): - return self.tokenizer.eos_token_id - - @property - def eos_token(self): - return self.tokenizer.eos_token - - @property - def pad_token_id(self): - return self.tokenizer.pad_token_id - - -def reload_mergeable_ranks(path: str, max_vocab: Optional[int] = None) -> Dict[bytes, int]: - """ - Reload our tokenizer JSON file and convert it to Tiktoken format. - """ - from ..utils import print_rank_0 # To prevent circular import. - - assert path.endswith(".json") - - # reload vocab - with open(path, "r") as f: - vocab = json.load(f) - assert isinstance(vocab, list) - print_rank_0(f"Vocab size: {len(vocab)}") - if max_vocab is not None: - vocab = vocab[:max_vocab] - print_rank_0(f"Cutting vocab to first {len(vocab)} tokens.") - - # build ranks - ranks: Dict[bytes, int] = {} - for i, x in enumerate(vocab): - assert x.keys() == {"rank", "token_bytes", "token_str"} - assert x["rank"] == i - merge = base64.b64decode(x["token_bytes"]) - assert i >= 256 or merge == bytes([i]) - ranks[merge] = x["rank"] - - # sanity check - assert len(ranks) == len(vocab) - assert set(ranks.values()) == set(range(len(ranks))) - - return ranks - - -PATTERN_TIKTOKEN = ( - r"[^\r\n\p{L}\p{N}]?+\p{L}+|\p{N}| ?[^\s\p{L}\p{N}]++[\r\n]*|\s*[\r\n]|\s+(?!\S)|\s+" -) -PATTERN_TIKTOKEN_V2 = "[^\\r\\n\\p{L}\\p{N}]?[\\p{Lu}\\p{Lt}\\p{Lm}\\p{Lo}\\p{M}]*[\\p{Ll}\\p{Lm}\\p{Lo}\\p{M}]+|[^\\r\\n\\p{L}\\p{N}]?[\\p{Lu}\\p{Lt}\\p{Lm}\\p{Lo}\\p{M}]+[\\p{Ll}\\p{Lm}\\p{Lo}\\p{M}]*|\\p{N}| ?[^\\s\\p{L}\\p{N}]+[\\r\\n/]*|\\s*[\\r\\n]+|\\s+(?!\\S)|\\s+" - - -class CustomTikTokenizer(MegatronTokenizer): - def __init__( - self, - path: str, - pattern: str, - vocab_size: Optional[int], - num_special_tokens: int, - special_tokens: Optional[List[str]], - ): - super().__init__( - path, - pattern=pattern, - vocab_size=vocab_size, - num_special_tokens=num_special_tokens, - special_tokens=special_tokens, - ) - import tiktoken - - from .. import print_rank_0 # To prevent circular import. - - if vocab_size is None: - vocab_size = 2**17 # Fallback vocab size is 131072. - self._vocab_size = vocab_size - - SPECIAL_TOKENS = ["", "", ""] - if special_tokens is None: - special_tokens = SPECIAL_TOKENS.copy() - assert len(special_tokens) == len( - set(special_tokens) - ), f"Special tokens should be unique: {special_tokens}" - assert len(special_tokens) <= num_special_tokens < self._vocab_size - assert set(SPECIAL_TOKENS) <= set( - special_tokens - ), f"Custom special tokens should include {SPECIAL_TOKENS}" - - special_filler = [ - "".format(id=i) for i in range(len(special_tokens), num_special_tokens) - ] - if special_filler: - print_rank_0(f"Adding special tokens {special_filler[0]}, ..., {special_filler[-1]}") - special_tokens = special_tokens + special_filler - assert len(set(special_tokens)) == len(special_tokens) == num_special_tokens, special_tokens - inner_vocab_size = self._vocab_size - num_special_tokens - - token_to_id_without_special_tokens = reload_mergeable_ranks( - path, max_vocab=inner_vocab_size - ) - # Create space for special tokens. - token_to_id_without_special_tokens = { - t: i + num_special_tokens for t, i in token_to_id_without_special_tokens.items() - } - - special_tokens = {t: i for i, t in enumerate(special_tokens)} - self._unk_id = special_tokens[""] - self._bos_id = special_tokens[""] - self._eos_id = special_tokens[""] - - # Create tiktoken model. - self._model = tiktoken.Encoding( - name=Path(path).parent.name, - pat_str=pattern, - mergeable_ranks=token_to_id_without_special_tokens, - special_tokens=special_tokens, - ) - - # Create final _id_to_token and _token_to_id data structures with special tokens inserted - # into appropriate locations. - assert set(token_to_id_without_special_tokens.keys()).isdisjoint(set(special_tokens.keys())) - self._token_to_id = token_to_id_without_special_tokens.copy() - self._token_to_id.update(special_tokens) - self._id_to_token = {v: k for k, v in self._token_to_id.items()} - assert set(range(self._vocab_size)) == set(self._id_to_token.keys()) - - @property - def bos(self) -> int: - return self._bos_id - - @property - def eos(self) -> int: - return self._eos_id - - @property - def unk(self) -> int: - return self._unk_id - - @property - def eod(self) -> int: - return self._eos_id - - @property - def vocab(self): - return self._token_to_id - - @property - def inv_vocab(self): - return self._id_to_token - - def tokenize(self, s: str, bos: bool = False, eos: bool = False) -> List[int]: - tokens = self._model.encode_ordinary(s) - if bos: - tokens = [self.bos, *tokens] - if eos: - tokens = [*tokens, self.eos] - - return tokens - - def detokenize(self, tokens: List[int]) -> str: - return self._model.decode(tokens) - - def offsets(self, ids: list[int], text: str) -> list[int]: - return self._model.decode_with_offsets(ids)[1] - - @property - def vocab_size(self) -> int: - return self._vocab_size - - @property - def encoder(self): - return self._token_to_id - - @property - def decoder(self): - return self._id_to_token - - -class _NullTokenizer(MegatronTokenizer): - def __init__(self, vocab_size): - super().__init__(None, vocab_size=vocab_size) - self._vocab_size_without_eod = int(vocab_size) - self._eod_id = self._vocab_size_without_eod - - def tokenize(self, text): - return [int(x) for x in text.split(' ')] - - def detokenize(self, ids): - text = [str(x) for x in ids] - return ' '.join(text) - - def offsets(self, ids: list[int], text: str) -> list[int]: - offsets, start_idx = [], 0 - for id_ in ids: - offsets.append(start_idx) - start_idx += 1 + len(str(id_)) - return offsets - - @property - def vocab_size(self): - return self._vocab_size_without_eod + 1 - - @property - def vocab(self): - raise NotImplementedError - - @property - def inv_vocab(self): - raise NotImplementedError - - @property - def cls(self): - return -1 - - @property - def sep(self): - return -1 - - @property - def mask(self): - return -1 - - @property - def eod(self): - return self._eod_id - - @property - def additional_special_tokens_ids(self): - return None +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +"""Megatron tokenizers.""" + +import base64 +import json +import math +import types +from abc import ABC, abstractmethod +from pathlib import Path +from typing import Dict, List, Optional + +from megatron.core.datasets.megatron_tokenizer import MegatronTokenizer + +from .bert_tokenization import FullTokenizer as FullBertTokenizer +from .gpt2_tokenization import GPT2Tokenizer +from megatron.training.tokenizer.multimodal_tokenizer import MultimodalTokenizer + + +def build_tokenizer(args, **kwargs): + """Initialize tokenizer.""" + if args.rank == 0: + print('> building {} tokenizer ...'.format(args.tokenizer_type), flush=True) + + # Select and instantiate the tokenizer. + if args.tokenizer_type == 'BertWordPieceLowerCase': + assert args.vocab_file is not None + tokenizer = _BertWordPieceTokenizer( + vocab_file=args.vocab_file, lower_case=True, vocab_extra_ids=args.vocab_extra_ids + ) + elif args.tokenizer_type == 'BertWordPieceCase': + assert args.vocab_file is not None + tokenizer = _BertWordPieceTokenizer( + vocab_file=args.vocab_file, lower_case=False, vocab_extra_ids=args.vocab_extra_ids + ) + elif args.tokenizer_type == 'GPT2BPETokenizer': + assert args.vocab_file is not None + assert args.merge_file is not None + tokenizer = _GPT2BPETokenizer(args.vocab_file, args.merge_file) + elif args.tokenizer_type == 'SentencePieceTokenizer': + assert args.tokenizer_model is not None + tokenizer = _SentencePieceTokenizer( + args.tokenizer_model, vocab_extra_ids=args.vocab_extra_ids + ) + elif args.tokenizer_type == 'GPTSentencePieceTokenizer': + assert args.tokenizer_model is not None + tokenizer = _GPTSentencePieceTokenizer(args.tokenizer_model) + elif args.tokenizer_type == 'HuggingFaceTokenizer': + tokenizer = _HuggingFaceTokenizer(args.tokenizer_model, **kwargs) + elif args.tokenizer_type == 'Llama2Tokenizer': + assert args.tokenizer_model is not None + tokenizer = _Llama2Tokenizer(args.tokenizer_model) + elif args.tokenizer_type == 'TikTokenizer': + assert args.tokenizer_model is not None + assert args.tiktoken_pattern is not None + assert args.tiktoken_pattern in {"v1", "v2"} + pattern = PATTERN_TIKTOKEN if args.tiktoken_pattern == "v1" else PATTERN_TIKTOKEN_V2 + tokenizer = CustomTikTokenizer( + path=args.tokenizer_model, + pattern=pattern, + vocab_size=args.vocab_size, + num_special_tokens=args.tiktoken_num_special_tokens, + special_tokens=args.tiktoken_special_tokens, + ) + elif args.tokenizer_type == 'NullTokenizer': + assert args.vocab_size is not None + tokenizer = _NullTokenizer(args.vocab_size) + elif args.tokenizer_type == "MultimodalTokenizer": + try: + import transformers + except ImportError: + raise ImportError( + "MultimodalTokenizer currently requires transformers library to be installed" + ) + + kwargs = dict() + if args.tokenizer_prompt_format == "nvlm-yi-34b": + kwargs = { + "from_slow": True, + "legacy": False, + "add_bos_token": True, + } + + # Currently, only HuggingFace tokenizers are supported. + underlying_tokenizer = transformers.AutoTokenizer.from_pretrained( + pretrained_model_name_or_path=args.tokenizer_model, **kwargs + ) + + tokenizer = MultimodalTokenizer( + underlying_tokenizer, + args.tokenizer_prompt_format, + args.special_tokens, + args.image_tag_type, + ) + else: + raise NotImplementedError('{} tokenizer is not ' 'implemented.'.format(args.tokenizer_type)) + + # Add vocab size (if not already set from a checkpoint). + if getattr(args, "padded_vocab_size", None) is None: + args.padded_vocab_size = _vocab_size_with_padding(tokenizer.vocab_size, args) + + return tokenizer + + +def _vocab_size_with_padding(orig_vocab_size, args, logging_enabled=True): + """Pad vocab size so it is divisible by model parallel size and + still having GPU friendly size.""" + + after = orig_vocab_size + multiple = args.make_vocab_size_divisible_by * args.tensor_model_parallel_size + after = int(math.ceil(after / multiple) * multiple) + if args.rank == 0 and logging_enabled: + print( + ' > padded vocab (size: {}) with {} dummy tokens ' + '(new size: {})'.format(orig_vocab_size, after - orig_vocab_size, after), + flush=True, + ) + return after + + +class _HuggingFaceTokenizer(MegatronTokenizer): + def __init__(self, pretrained_model_name_or_path, **kwargs): + super().__init__(pretrained_model_name_or_path, **kwargs) + try: + import transformers + except ImportError: + raise EnvironmentError( + f"The transformers library must be installed to use huggingface_tokenizer_provider" + ) + + # TODO(bnorick): download tokenizer once to lustre and use force offline to make sure all tasks read it from there + self._tokenizer = transformers.AutoTokenizer.from_pretrained( + pretrained_model_name_or_path=pretrained_model_name_or_path, **kwargs + ) + self._vocab = self._tokenizer.get_vocab() + self._inv_vocab = {token_id: token for token, token_id in self._vocab.items()} + + @property + def vocab_size(self): + return len(self._tokenizer) + + @property + def vocab(self): + """Dictionary from vocab text token to id token.""" + return self._vocab + + @property + def inv_vocab(self): + """Dictionary from vocab id token to text token.""" + return self._inv_vocab + + @property + def decoder(self): + return self._inv_vocab + + def tokenize(self, text, **kwargs): + return self._tokenizer(text, **kwargs).input_ids + + def detokenize(self, token_ids, **kwargs): + return self._tokenizer.decode(token_ids, **kwargs) + + def offsets(self, ids: list[int], text: str) -> list[int]: + retok_ids: "transformers.BatchEncoding" = self._tokenizer(text) + offsets, next_start_idx = [], 0 + for i in range(len(ids)): + span = retok_ids.token_to_chars(i) + if span is not None: + offsets.append(span.start) + next_start_idx = span.end + else: + offsets.append(next_start_idx) + return offsets + + @property + def eod(self): + return self._tokenizer.eos_token_id + + +class _BertWordPieceTokenizer(MegatronTokenizer): + """Original BERT wordpiece tokenizer.""" + + def __init__(self, vocab_file, lower_case=True, vocab_extra_ids=0): + super().__init__(vocab_file, lower_case=lower_case, vocab_extra_ids=vocab_extra_ids) + self.tokenizer = FullBertTokenizer(vocab_file, do_lower_case=lower_case) + self.cls_id = self.tokenizer.vocab['[CLS]'] + self.sep_id = self.tokenizer.vocab['[SEP]'] + self.pad_id = self.tokenizer.vocab['[PAD]'] + self.mask_id = self.tokenizer.vocab['[MASK]'] + self._additional_special_tokens = [] + + # (dsachan) Add BOS and EOS tokens + SPECIAL_TOKENS = {'eos_token': '[EOS]', 'bos_token': '[BOS]'} + self._bos_token = '[BOS]' + self.add_token(self._bos_token) + self._bos_token_id = self.vocab.get(self._bos_token) + + self._eos_token = '[EOS]' + self.add_token(self._eos_token) + self._eos_token_id = self.vocab.get(self._eos_token) + + # (dsachan) Add additional special tokens + # These can be used as sentinel tokens in T5 model inputs + additional_special_tokens = [] + additional_special_tokens.extend( + ["".format(i) for i in range(vocab_extra_ids)] + ) + self.add_additional_special_tokens(additional_special_tokens) + + def add_token(self, token): + if token not in self.vocab: + self.inv_vocab[self.vocab_size] = token + # self.vocab_size comes from len(vocab) + # and it will increase as we add elements + self.vocab[token] = self.vocab_size + + def add_additional_special_tokens(self, tokens_list): + setattr(self, "additional_special_tokens", tokens_list) + for value in tokens_list: + self.add_token(value) + + @property + def vocab_size(self): + return self.tokenizer.vocab_size() + + @property + def vocab(self): + return self.tokenizer.vocab + + @property + def inv_vocab(self): + return self.tokenizer.inv_vocab + + def tokenize(self, text): + text_tokens = self.tokenizer.tokenize(text) + return self.tokenizer.convert_tokens_to_ids(text_tokens) + + def decode(self, ids): + tokens = self.tokenizer.convert_ids_to_tokens(ids) + return self.tokenizer.convert_tokens_to_string(tokens) + + def detokenize(self, token_ids): + """Copy of decode() method for inference pipeline compatibility""" + return self.decode(token_ids) + + def decode_token_ids(self, token_ids): + tokens = self.tokenizer.convert_ids_to_tokens(token_ids) + exclude_list = ['[PAD]', '[CLS]'] + non_pads = [t for t in tokens if t not in exclude_list] + + result = "" + for s in non_pads: + if s.startswith("##"): + result += s[2:] + else: + result += " " + s + + return result + + @property + def cls(self): + return self.cls_id + + @property + def sep(self): + return self.sep_id + + @property + def pad(self): + return self.pad_id + + @property + def mask(self): + return self.mask_id + + @property + def bos(self): + """Id of the beginning of sentence token in the vocabulary.""" + return self._bos_token_id + + @property + def eos(self): + """Id of the end of sentence token in the vocabulary.""" + return self._eos_token_id + + @property + def eod(self): + """Copy of eod property for inference pipeline compatibility""" + return self.eos + + @property + def bos_token(self): + """Beginning of sentence token id""" + return self._bos_token + + @property + def eos_token(self): + """End of sentence token id""" + return self._eos_token + + @property + def additional_special_tokens(self): + """All the additional special tokens you may want to use (list of strings).""" + return self._additional_special_tokens + + @property + def additional_special_tokens_ids(self): + """Ids of all the additional special tokens in the vocabulary (list of integers).""" + return [self.vocab.get(token) for token in self._additional_special_tokens] + + @additional_special_tokens.setter + def additional_special_tokens(self, value): + self._additional_special_tokens = value + + +class _GPT2BPETokenizer(MegatronTokenizer): + """Original GPT2 BPE tokenizer.""" + + def __init__(self, vocab_file, merge_file): + super().__init__(vocab_file, merge_file) + + self.tokenizer = GPT2Tokenizer( + vocab_file, merge_file, errors='replace', special_tokens=[], max_len=None + ) + self.eod_id = self.tokenizer.encoder['<|endoftext|>'] + + @property + def vocab_size(self): + return len(self.tokenizer.encoder) + + @property + def vocab(self): + return self.tokenizer.encoder + + @property + def inv_vocab(self): + return self.tokenizer.decoder + + def tokenize(self, text): + return self.tokenizer.encode(text) + + def detokenize(self, token_ids): + return self.tokenizer.decode(token_ids) + + @property + def eod(self): + return self.eod_id + + +class _SentencePieceTokenizer(MegatronTokenizer): + """SentencePieceTokenizer-Megatron wrapper""" + + def __init__(self, model_file, vocab_extra_ids=0): + super().__init__(model_file, vocab_extra_ids=vocab_extra_ids) + + import sentencepiece + + self.tokenizer = sentencepiece.SentencePieceProcessor(model_file=model_file) + self._initalize(vocab_extra_ids) + + def _populate_vocab(self): + self._vocab = {} + self._inv_vocab = {} + + for i in range(len(self.tokenizer)): + t = self.tokenizer.id_to_piece(i) + self._inv_vocab[i] = t + self._vocab[t] = i + + def _initalize(self, vocab_extra_ids): + self._populate_vocab() + self._special_tokens = {} + self._inv_special_tokens = {} + + self._t5_tokens = [] + + def _add_special_token(t): + if t not in self._vocab: + next_id = len(self._vocab) + self._vocab[t] = next_id + self._inv_vocab[next_id] = t + self._special_tokens[t] = self._vocab[t] + self._inv_special_tokens[self._vocab[t]] = t + + _add_special_token('') + self._cls_id = self._vocab[''] + _add_special_token('') + self._sep_id = self._vocab[''] + _add_special_token('') + self._eod_id = self._vocab[''] + _add_special_token('') + self._mask_id = self._vocab[''] + + pad_id = self.tokenizer.pad_id() + try: + pad_token = self.tokenizer.id_to_piece(pad_id) + except IndexError: + pad_token = '' + _add_special_token(pad_token) + self._pad_id = self._vocab[pad_token] + + bos_id = self.tokenizer.bos_id() + try: + bos_token = self.tokenizer.id_to_piece(bos_id) + except IndexError: + bos_token = '' + _add_special_token(bos_token) + self._bos_id = self._vocab[bos_token] + + eos_id = self.tokenizer.eos_id() + try: + eos_token = self.tokenizer.id_to_piece(eos_id) + except IndexError: + eos_token = '' + _add_special_token(eos_token) + self._eos_id = self._vocab[eos_token] + + for i in range(vocab_extra_ids): + t = "".format(i) + _add_special_token(t) + self._t5_tokens += [t] + + @property + def vocab_size(self): + return len(self._vocab) + + @property + def vocab(self): + return self._vocab + + @property + def inv_vocab(self): + return self._inv_vocab + + @property + def decoder(self): + return self._inv_vocab + + @property + def encoder(self): + return self._vocab + + # From: + # https://github.com/NVIDIA/NeMo/blob/c8fa217e811d60d11d014827c7f3845ff6c99ae7/nemo/collections/common/tokenizers/sentencepiece_tokenizer.py#L89 + def tokenize(self, text): + ids = [] + idx = 0 + + while 1: + indices = {} + for token in self._special_tokens: + try: + indices[token] = text[idx:].index(token) + except ValueError: + continue + if len(indices) == 0: + break + + next_token = min(indices, key=indices.get) + next_idx = idx + indices[next_token] + + ids.extend(self.tokenizer.encode_as_ids(text[idx:next_idx])) + ids.append(self._special_tokens[next_token]) + idx = next_idx + len(next_token) + + ids.extend(self.tokenizer.encode_as_ids(text[idx:])) + return ids + + # From: + # https://github.com/NVIDIA/NeMo/blob/c8fa217e811d60d11d014827c7f3845ff6c99ae7/nemo/collections/common/tokenizers/sentencepiece_tokenizer.py#L125 + def detokenize(self, ids): + text = "" + last_i = 0 + + for i, id in enumerate(ids): + if id in self._inv_special_tokens: + text += self.tokenizer.decode_ids(ids[last_i:i]) + " " + text += self._inv_special_tokens[id] + " " + last_i = i + 1 + + text += self.tokenizer.decode_ids(ids[last_i:]) + return text + + def offsets(self, ids: list[int], text: str) -> list[int]: + return [p.begin for p in self.tokenizer.decode_ids_as_immutable_proto(ids).pieces] + + @property + def cls(self): + return self._cls_id + + @property + def sep(self): + return self._sep_id + + @property + def pad(self): + return self._pad_id + + @property + def bos(self): + return self._bos_id + + @property + def eod(self): + return self._eod_id + + @property + def eos(self): + return self._eos_id + + @property + def mask(self): + return self._mask_id + + @property + def additional_special_tokens_ids(self): + return [self.vocab[k] for k in self._t5_tokens] + + +class _GPTSentencePieceTokenizer(_SentencePieceTokenizer): + """SentencePieceTokenizer-Megatron wrapper""" + + def __init__(self, model_file): + super().__init__(model_file, vocab_extra_ids=0) + + def _initalize(self, vocab_extra_ids): + self._populate_vocab() + + self._pad_id = self.tokenizer.pad_id() + self._bos_id = self.tokenizer.bos_id() + self._eos_id = self.tokenizer.eos_id() + + def tokenize(self, text): + return self.tokenizer.encode_as_ids(text) + + def detokenize(self, ids): + return self.tokenizer.decode_ids(ids) + + @property + def cls(self): + return -1 + + @property + def sep(self): + return -1 + + @property + def mask(self): + return -1 + + @property + def eod(self): + return self._eos_id + + @property + def additional_special_tokens_ids(self): + return None + + +class _Llama2Tokenizer(_SentencePieceTokenizer): + """SentencePieceTokenizer-Megatron wrapper""" + + def __init__(self, model_file): + super().__init__(model_file, vocab_extra_ids=0) + + def _initalize(self, vocab_extra_ids): + self._populate_vocab() + + # BOS / EOS token IDs + self.n_words: int = self.tokenizer.vocab_size() + self.bos_id: int = self.tokenizer.bos_id() + self.eos_id: int = self.tokenizer.eos_id() + self.pad_id: int = self.tokenizer.pad_id() + assert self.tokenizer.vocab_size() == self.tokenizer.get_piece_size() + + def tokenize(self, s: str, bos=True, eos=False): + '''Default args for text completion, not chat/dialog.''' + assert type(s) is str + t = self.tokenizer.encode(s) + if bos: + t = [self.bos_id] + t + if eos: + t = t + [self.eos_id] + return t + + def detokenize(self, ids): + return self.tokenizer.decode_ids(ids) + + @property + def cls(self): + return -1 + + @property + def sep(self): + return -1 + + @property + def mask(self): + return -1 + + @property + def eod(self): + return self.eos_id + + @property + def additional_special_tokens_ids(self): + return None + + +def reload_mergeable_ranks(path: str, max_vocab: Optional[int] = None) -> Dict[bytes, int]: + """ + Reload our tokenizer JSON file and convert it to Tiktoken format. + """ + from ..utils import print_rank_0 # To prevent circular import. + + assert path.endswith(".json") + + # reload vocab + with open(path, "r") as f: + vocab = json.load(f) + assert isinstance(vocab, list) + print_rank_0(f"Vocab size: {len(vocab)}") + if max_vocab is not None: + vocab = vocab[:max_vocab] + print_rank_0(f"Cutting vocab to first {len(vocab)} tokens.") + + # build ranks + ranks: Dict[bytes, int] = {} + for i, x in enumerate(vocab): + assert x.keys() == {"rank", "token_bytes", "token_str"} + assert x["rank"] == i + merge = base64.b64decode(x["token_bytes"]) + assert i >= 256 or merge == bytes([i]) + ranks[merge] = x["rank"] + + # sanity check + assert len(ranks) == len(vocab) + assert set(ranks.values()) == set(range(len(ranks))) + + return ranks + + +PATTERN_TIKTOKEN = ( + r"[^\r\n\p{L}\p{N}]?+\p{L}+|\p{N}| ?[^\s\p{L}\p{N}]++[\r\n]*|\s*[\r\n]|\s+(?!\S)|\s+" +) +PATTERN_TIKTOKEN_V2 = "[^\\r\\n\\p{L}\\p{N}]?[\\p{Lu}\\p{Lt}\\p{Lm}\\p{Lo}\\p{M}]*[\\p{Ll}\\p{Lm}\\p{Lo}\\p{M}]+|[^\\r\\n\\p{L}\\p{N}]?[\\p{Lu}\\p{Lt}\\p{Lm}\\p{Lo}\\p{M}]+[\\p{Ll}\\p{Lm}\\p{Lo}\\p{M}]*|\\p{N}| ?[^\\s\\p{L}\\p{N}]+[\\r\\n/]*|\\s*[\\r\\n]+|\\s+(?!\\S)|\\s+" + + +class CustomTikTokenizer(MegatronTokenizer): + def __init__( + self, + path: str, + pattern: str, + vocab_size: Optional[int], + num_special_tokens: int, + special_tokens: Optional[List[str]], + ): + super().__init__( + path, + pattern=pattern, + vocab_size=vocab_size, + num_special_tokens=num_special_tokens, + special_tokens=special_tokens, + ) + import tiktoken + + from .. import print_rank_0 # To prevent circular import. + + if vocab_size is None: + vocab_size = 2**17 # Fallback vocab size is 131072. + self._vocab_size = vocab_size + + SPECIAL_TOKENS = ["", "", ""] + if special_tokens is None: + special_tokens = SPECIAL_TOKENS.copy() + assert len(special_tokens) == len( + set(special_tokens) + ), f"Special tokens should be unique: {special_tokens}" + assert len(special_tokens) <= num_special_tokens < self._vocab_size + assert set(SPECIAL_TOKENS) <= set( + special_tokens + ), f"Custom special tokens should include {SPECIAL_TOKENS}" + + special_filler = [ + "".format(id=i) for i in range(len(special_tokens), num_special_tokens) + ] + if special_filler: + print_rank_0(f"Adding special tokens {special_filler[0]}, ..., {special_filler[-1]}") + special_tokens = special_tokens + special_filler + assert len(set(special_tokens)) == len(special_tokens) == num_special_tokens, special_tokens + inner_vocab_size = self._vocab_size - num_special_tokens + + token_to_id_without_special_tokens = reload_mergeable_ranks( + path, max_vocab=inner_vocab_size + ) + # Create space for special tokens. + token_to_id_without_special_tokens = { + t: i + num_special_tokens for t, i in token_to_id_without_special_tokens.items() + } + + special_tokens = {t: i for i, t in enumerate(special_tokens)} + self._unk_id = special_tokens[""] + self._bos_id = special_tokens[""] + self._eos_id = special_tokens[""] + + # Create tiktoken model. + self._model = tiktoken.Encoding( + name=Path(path).parent.name, + pat_str=pattern, + mergeable_ranks=token_to_id_without_special_tokens, + special_tokens=special_tokens, + ) + + # Create final _id_to_token and _token_to_id data structures with special tokens inserted + # into appropriate locations. + assert set(token_to_id_without_special_tokens.keys()).isdisjoint(set(special_tokens.keys())) + self._token_to_id = token_to_id_without_special_tokens.copy() + self._token_to_id.update(special_tokens) + self._id_to_token = {v: k for k, v in self._token_to_id.items()} + assert set(range(self._vocab_size)) == set(self._id_to_token.keys()) + + @property + def bos(self) -> int: + return self._bos_id + + @property + def eos(self) -> int: + return self._eos_id + + @property + def unk(self) -> int: + return self._unk_id + + @property + def eod(self) -> int: + return self._eos_id + + @property + def vocab(self): + return self._token_to_id + + @property + def inv_vocab(self): + return self._id_to_token + + def tokenize(self, s: str, bos: bool = False, eos: bool = False) -> List[int]: + tokens = self._model.encode_ordinary(s) + if bos: + tokens = [self.bos, *tokens] + if eos: + tokens = [*tokens, self.eos] + + return tokens + + def detokenize(self, tokens: List[int]) -> str: + return self._model.decode(tokens) + + def offsets(self, ids: list[int], text: str) -> list[int]: + try: + return self._model.decode_with_offsets(ids)[1] + except UnicodeDecodeError: + # Tiktoken has an unnecessary check that raises UnicodeDecodeError + # from `text = b"".join(token_bytes).decode("utf-8", errors="strict")` + # which is not needed for our use case. So we re-implement it, without + # the check. + + token_bytes = self._model.decode_tokens_bytes(ids) + text_len = 0 + offsets = [] + for token in token_bytes: + offsets.append(max(0, text_len - (0x80 <= token[0] < 0xC0))) + text_len += sum(1 for c in token if not 0x80 <= c < 0xC0) + return offsets + + @property + def vocab_size(self) -> int: + return self._vocab_size + + @property + def encoder(self): + return self._token_to_id + + @property + def decoder(self): + return self._id_to_token + + +class _NullTokenizer(MegatronTokenizer): + def __init__(self, vocab_size): + super().__init__(None, vocab_size=vocab_size) + self._vocab_size_without_eod = int(vocab_size) + self._eod_id = self._vocab_size_without_eod + + def tokenize(self, text): + return [int(x) for x in text.split(' ')] + + def detokenize(self, ids): + text = [str(x) for x in ids] + return ' '.join(text) + + def offsets(self, ids: list[int], text: str) -> list[int]: + offsets, start_idx = [], 0 + for id_ in ids: + offsets.append(start_idx) + start_idx += 1 + len(str(id_)) + return offsets + + @property + def vocab_size(self): + return self._vocab_size_without_eod + 1 + + @property + def vocab(self): + raise NotImplementedError + + @property + def inv_vocab(self): + raise NotImplementedError + + @property + def cls(self): + return -1 + + @property + def sep(self): + return -1 + + @property + def mask(self): + return -1 + + @property + def eod(self): + return self._eod_id + + @property + def additional_special_tokens_ids(self): + return None diff --git a/megatron/training/training.py b/megatron/training/training.py index ad5d4fa..ea3702e 100644 --- a/megatron/training/training.py +++ b/megatron/training/training.py @@ -1,1957 +1,2049 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -"""Pretrain utilities.""" - -import dataclasses -from datetime import datetime -import functools -import gc -import logging -import math -import os -import sys -from .log_handler import CustomHandler -# Make default logging level INFO, but filter out all log messages not from MCore. -logging.basicConfig(handlers=[CustomHandler()], level=logging.INFO) -from .theoretical_memory_usage import report_theoretical_memory -import time -# The earliest we can measure the start time. -_TRAIN_START_TIME = time.time() -import torch - -from megatron.core import mpu, tensor_parallel -from megatron.core.utils import ( - check_param_hashes_across_dp_replicas, - get_model_config, - StragglerDetector, - is_float8tensor, -) -from megatron.training.checkpointing import load_checkpoint -from megatron.training.checkpointing import save_checkpoint -from megatron.training.checkpointing import checkpoint_exists -from megatron.legacy.model import Float16Module -from megatron.core.distributed import DistributedDataParallelConfig -from megatron.core.distributed import DistributedDataParallel as DDP -try: - from megatron.core.distributed import TorchFullyShardedDataParallel as torch_FSDP - - HAVE_FSDP2 = True -except ImportError: - HAVE_FSDP2 = False - -from megatron.core.distributed import finalize_model_grads -from megatron.core.enums import ModelType -from megatron.core.optimizer import get_megatron_optimizer, OptimizerConfig -from megatron.core.rerun_state_machine import ( - get_rerun_state_machine, - destroy_rerun_state_machine, - RerunDataIterator, - RerunMode, -) -from megatron.training.initialize import initialize_megatron -from megatron.training.initialize import write_args_to_tensorboard -from megatron.training.initialize import set_jit_fusion_options -from megatron.legacy.data.data_samplers import build_pretraining_data_loader -from megatron.core.optimizer_param_scheduler import OptimizerParamScheduler -from megatron.core.transformer.moe import upcycling_utils -from megatron.core.transformer.moe.moe_utils import track_moe_metrics -from megatron.core.parallel_state import ( - destroy_global_memory_buffer, - destroy_model_parallel, -) -from megatron.core.pipeline_parallel import get_forward_backward_func -from megatron.core.num_microbatches_calculator import ( - destroy_num_microbatches_calculator, - get_current_global_batch_size, - get_current_running_global_batch_size, - get_num_microbatches, - update_num_microbatches) - -from .async_utils import maybe_finalize_async_save -from .utils import ( - append_to_progress_log, - calc_params_l2_norm, - check_adlr_autoresume_termination, - logical_and_across_model_parallel_group, - reduce_max_stat_across_model_parallel_group, - is_last_rank, - print_rank_0, - print_rank_last, - report_memory, - unwrap_model, - update_use_dist_ckpt, -) -from .global_vars import ( - destroy_global_vars, - get_args, - get_signal_handler, - get_timers, - get_tensorboard_writer, - get_wandb_writer, - get_one_logger, -) -from . import one_logger_utils - -from . import ft_integration - -stimer = StragglerDetector() - - -def destroy_global_state(): - destroy_global_vars() - destroy_num_microbatches_calculator() - destroy_global_memory_buffer() - destroy_model_parallel() - destroy_rerun_state_machine() - - -def print_datetime(string): - """Note that this call will sync across all ranks.""" - torch.distributed.barrier() - time_str = datetime.now().strftime('%Y-%m-%d %H:%M:%S') - print_rank_0(f'[{string}] datetime: {time_str} ') - - -def num_floating_point_operations(args, batch_size): - # Attention projection size. - query_projection_size = args.kv_channels * args.num_attention_heads - query_projection_to_hidden_size_ratio = query_projection_size / args.hidden_size - # Group Query Attention. - if not args.group_query_attention: - args.num_query_groups = args.num_attention_heads - # MoE. - num_experts_routed_to = 1 if args.num_experts is None else args.moe_router_topk - gated_linear_multiplier = 3 / 2 if args.swiglu else 1 - shared_expert_ffn_hidden_size = ( - 0 - if args.moe_shared_expert_intermediate_size is None - else args.moe_shared_expert_intermediate_size - ) - - # The 12x term below comes from the following factors; for more details, see - # "APPENDIX: FLOATING-POINT OPERATIONS" in https://arxiv.org/abs/2104.04473. - # - 3x: Each GEMM in the model needs to be performed 3 times (forward pass, - # backward wgrad [weight gradient], backward dgrad [data gradient]). - # - 2x: GEMMs of a particular size are stacked twice in the standard Transformer model - # architectures implemented in this codebase (e.g., h->ffn_h GEMM and ffn_h->h GEMM - # in MLP layer). - # - 2x: A GEMM of a m*n tensor with a n*k tensor requires 2mnk floating-point operations. - expansion_factor = 3 * 2 * 2 - - return ( - expansion_factor - * batch_size - * args.seq_length - * args.num_layers - * args.hidden_size - * args.hidden_size - * ( - # Attention. - ( - ( - 1 - + (args.num_query_groups / args.num_attention_heads) - + (args.seq_length / args.hidden_size) - ) * query_projection_to_hidden_size_ratio - ) - # MLP. - + ( - (args.ffn_hidden_size / args.hidden_size) - * num_experts_routed_to - * gated_linear_multiplier - ) - # Shared Experts. - + ((shared_expert_ffn_hidden_size / args.hidden_size) * gated_linear_multiplier) - # Logit. - + (args.padded_vocab_size / (2 * args.num_layers * args.hidden_size)) - ) - ) - - -def get_start_time_from_progress_log(): - """ - Gets start time of earliest job with same world size. Also returns the number - of floating-point operations completed in last saved checkpoint. - """ - args = get_args() - assert args.save is not None - progress_log_filename = os.path.join(args.save, "progress.txt") - - # start_time is time when job with same world size started. - # start_num_floating_point_operations is the number of floating-point operations - # completed when this job started. - # latest_num_floating_point_operations is the number of floating-point operations - # completed in most recent saved checkpoint. - start_time = None - start_num_floating_point_operations = None - latest_num_floating_point_operations = 0 - - def _get_field(string, type): - return type(string.split(': ')[1]) - - with open(progress_log_filename, 'r') as f: - for line in f: - line = line.strip() - line_tokens = line.split('\t') - world_size_in_line = _get_field(line_tokens[2], int) - if line_tokens[3] == "Saved checkpoint": - latest_num_floating_point_operations = \ - _get_field(line_tokens[7], float) - if world_size_in_line != args.world_size: - # Re-start search if we see a different world size. - start_time = None - start_num_floating_point_operations = None - continue - if line_tokens[3] == "Starting job": - if start_time is None: - start_time = line_tokens[0] - start_num_floating_point_operations = \ - latest_num_floating_point_operations - assert start_time is not None and start_num_floating_point_operations is not None, \ - "Should have seen at least one 'Starting job' entry with same world_size" - return datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S'), \ - start_num_floating_point_operations - - -def preprocess_common_state_dict(common_state_dict): - import copy - # Convert args key of type namespace to dictionary - preprocessed_common_state_dict = copy.deepcopy(common_state_dict) - preprocessed_common_state_dict['args'] = vars(preprocessed_common_state_dict['args']) - # Remove rank and local rank from state dict if it exists, since they are expected to be different - preprocessed_common_state_dict['args'].pop('local_rank', None) - preprocessed_common_state_dict['args'].pop('rank', None) - return preprocessed_common_state_dict - - -def pretrain( - train_valid_test_dataset_provider, - model_provider, - model_type, - forward_step_func, - process_non_loss_data_func=None, - extra_args_provider=None, - args_defaults={}, - get_embedding_ranks=None, - get_position_embedding_ranks=None, - non_loss_data_func=None, -): - """Main training program. - - This function will run the followings in the order provided: - 1) initialize Megatron. - 2) setup model, optimizer and lr schedule using the model_provider. - 3) call train_val_test_data_provider to get train/val/test datasets. - 4) train the model using the forward_step_func. - - Args: - train_valid_test_dataset_provider: a function that takes the size of - train/valid/test dataset and returns `train, valid, test` datasets. - model_provider: a function that returns a vanilla version of the - model. By vanilla we mean a simple model on cpu with no fp16 or ddp. - model_type: an enum that specifies the type of model being trained. - forward_step_func: a function that takes a `data iterator` and `model`, - and returns a `loss` scalar with a dictionary with key:values being - the info we would like to monitor during training, for example - `lm-loss: value`. We also require that this function add - `batch generator` to the timers class. - process_non_loss_data_func: a function to post process outputs of the - network. It can be used for dumping output tensors (e.g images) to - tensorboard. It takes `collected data`(list of tensors), - `current iteration index` and `tensorboard writer` as arguments. - extra_args_provider: a function that takes a parser and adds arguments - to it. It is used for programs to add their own arguments. - args_defaults: a dictionary from argument-name to argument-value. It - to set already parse arguments. - get_embedding_ranks (TODO): - get_position_embedding_ranks (TODO): - non_loss_data_func (callable): A custom function to call during evaluation. - It can run e.g. benchmarks. - """ - - # Initalize and get arguments, timers, and Tensorboard writer. - initialize_megatron( - extra_args_provider=extra_args_provider, - args_defaults=args_defaults, - get_embedding_ranks=get_embedding_ranks, - get_position_embedding_ranks=get_position_embedding_ranks - ) - - args = get_args() - timers = get_timers() - - if args.log_progress: - append_to_progress_log("Starting job") - - # Set pytorch JIT layer fusion options and warmup JIT functions. - set_jit_fusion_options() - - # Adjust the startup time so it reflects the largest value. - # This will be closer to what scheduler will see (outside of - # image ... launches. - global _TRAIN_START_TIME - start_time_tensor = torch.tensor([_TRAIN_START_TIME], - dtype=torch.double, - device='cuda') - torch.distributed.all_reduce(start_time_tensor, - op=torch.distributed.ReduceOp.MIN) - _TRAIN_START_TIME = start_time_tensor.item() - - app_metrics = {} - app_metrics['app_start_time'] = round(_TRAIN_START_TIME * 1000.0) - app_metrics['app_model_init_start_time'] = round(_TRAIN_START_TIME * 1000.0) - - print_rank_0('time to initialize megatron (seconds): {:.3f}'.format( - time.time() - _TRAIN_START_TIME)) - print_datetime('after megatron is initialized') - app_metrics['app_model_init_finish_time'] = one_logger_utils.get_timestamp_in_ms() - - # Track E2E metrics on pretrain start - one_logger_utils.on_pretrain_start() - - # Context used for persisting some state between checkpoint saves. - if args.non_persistent_ckpt_type == 'local': - raise RuntimeError('LocalCheckpointManagers are not yet integrated') - checkpointing_context = { - 'local_checkpoint_manager': BasicLocalCheckpointManager( - args.non_persistent_local_ckpt_dir - ) - } - else: - checkpointing_context = {} - - # Model, optimizer, and learning rate. - timers('model-and-optimizer-setup', log_level=0).start(barrier=True) - app_metrics['app_build_optimizer_start_time'] = one_logger_utils.get_timestamp_in_ms() - model, optimizer, opt_param_scheduler = setup_model_and_optimizer( - model_provider, model_type, checkpointing_context=checkpointing_context) - - timers('model-and-optimizer-setup').stop() - print_datetime('after model, optimizer, and learning rate ' - 'scheduler are built') - app_metrics['app_build_optimizer_finish_time'] = one_logger_utils.get_timestamp_in_ms() - config = get_model_config(model[0]) - - # Data stuff. - app_metrics['app_build_dataiters_start_time'] = one_logger_utils.get_timestamp_in_ms() - timers('train/valid/test-data-iterators-setup', log_level=0).start( - barrier=True) - if args.virtual_pipeline_model_parallel_size is not None: - train_data_iterator = [] - valid_data_iterator = [] - test_data_iterator = [] - for i in range(len(model)): - mpu.set_virtual_pipeline_model_parallel_rank(i) - iterators = build_train_valid_test_data_iterators( - train_valid_test_dataset_provider) - train_data_iterator.append(iterators[0]) - valid_data_iterator.append(iterators[1]) - test_data_iterator.append(iterators[2]) - else: - train_data_iterator, valid_data_iterator, test_data_iterator \ - = build_train_valid_test_data_iterators( - train_valid_test_dataset_provider) - timers('train/valid/test-data-iterators-setup').stop() - print_datetime('after dataloaders are built') - app_metrics['app_build_dataiters_finish_time'] = one_logger_utils.get_timestamp_in_ms() - - # Track if training is enabled. Can only be done once args.do_train is assigned after dataloader is built. - one_logger_utils.track_config_flags(args.train_iters, args.skip_train, args.do_train, - args.do_valid, args.do_test, args.dataloader_type, - args.retro_project_dir, args.retro_cyclic_train_iters) - - if args.enable_ft_package and ft_integration.get_rank_monitor_client() is not None: - ft_integration.get_rank_monitor_client().init_workload_monitoring() - ft_timeouts = ft_integration.get_rank_monitor_client().timeouts - print_rank_0(f"Fault tolerance client initialized. Timeouts: {ft_timeouts}") - - # Print setup timing. - print_rank_0('done with setup ...') - timers.log(['model-and-optimizer-setup', - 'train/valid/test-data-iterators-setup'], barrier=True) - - one_logger = get_one_logger() - one_logger and one_logger.log_metrics(app_metrics) - - if not args.skip_train: - print_rank_0('training ...') - - if args.dataloader_type == 'cyclic' and args.retro_project_dir: - assert args.retro_cyclic_train_iters is not None - args.train_iters = args.retro_cyclic_train_iters - print_rank_0("retro cyclic train iters : %d" % args.train_iters) - - iteration = 0 - if args.do_train and args.train_iters > 0: - iteration, num_floating_point_operations_so_far = train( - forward_step_func, - model, optimizer, opt_param_scheduler, - train_data_iterator, valid_data_iterator, - process_non_loss_data_func, config, checkpointing_context, - non_loss_data_func) - - print_datetime('after training is done') - - if args.save and iteration != 0 and iteration % args.save_interval != 0: - save_checkpoint(iteration, model, optimizer, opt_param_scheduler, - num_floating_point_operations_so_far, checkpointing_context, - train_data_iterator=train_data_iterator, - ft_client=ft_integration.get_rank_monitor_client( - ft_integration.StateMachineActions.SAVE_CHECKPOINT), preprocess_common_state_dict_fn=preprocess_common_state_dict) - - one_logger and one_logger.log_metrics({ - 'app_train_loop_finish_time': one_logger_utils.get_timestamp_in_ms() - }) - - else: - print_rank_0('skipping training (--skip-train is on) ...') - - iteration = args.iteration - - if args.do_valid: - prefix = f'iteration {iteration} on validation set' - evaluate_and_print_results(prefix, forward_step_func, - valid_data_iterator, model, - iteration, process_non_loss_data_func, config, - verbose=True, write_to_tensorboard=not args.skip_train, - non_loss_data_func=non_loss_data_func) - - if args.do_test: - prefix = f'iteration {iteration} on test set' - evaluate_and_print_results(prefix, forward_step_func, - test_data_iterator, model, - iteration, process_non_loss_data_func, config, - verbose=True, write_to_tensorboard=not args.skip_train, - non_loss_data_func=non_loss_data_func) - - wandb_writer = get_wandb_writer() - if wandb_writer: - wandb_writer.finish() - maybe_finalize_async_save(blocking=True) - - one_logger and one_logger.log_metrics({ - 'app_finish_time': one_logger_utils.get_timestamp_in_ms() - }) - one_logger_utils.finish() - - -def update_train_iters(args): - - # For iteration-based training, we don't need to do anything - if args.train_iters: - return - - # Constant batch size with sample-based training. - if args.rampup_batch_size is None: - args.train_iters = args.train_samples // args.global_batch_size - - else: - # Sample based training with rampup batch size. - iterations = 0 - consumed_samples = 0 - # Rampup phase. - while consumed_samples <= int(args.rampup_batch_size[2]) and consumed_samples <= args.train_samples: - update_num_microbatches(consumed_samples, consistency_check=False) - consumed_samples += get_current_global_batch_size() - iterations += 1 - # Reset - update_num_microbatches(0, consistency_check=False) - # Constant phase - # Note that we throw away any partial last batch. - if args.train_samples > consumed_samples: - iterations += (args.train_samples - consumed_samples) // \ - args.global_batch_size - args.train_iters = iterations - - print_rank_0(f'setting training iterations to {args.train_iters}') - - -def get_model(model_provider_func, model_type=ModelType.encoder_or_decoder, wrap_with_ddp=True): - """Build the model.""" - args = get_args() - args.model_type = model_type - - # Build model. - if mpu.get_pipeline_model_parallel_world_size() > 1 and \ - args.virtual_pipeline_model_parallel_size is not None: - assert model_type != ModelType.encoder_and_decoder, \ - "Interleaved schedule not supported for model with both encoder and decoder" - model = [] - for i in range(args.virtual_pipeline_model_parallel_size): - mpu.set_virtual_pipeline_model_parallel_rank(i) - # Set pre_process and post_process only after virtual rank is set. - pre_process = mpu.is_pipeline_first_stage() - post_process = mpu.is_pipeline_last_stage() - this_model = model_provider_func( - pre_process=pre_process, - post_process=post_process - ) - this_model.model_type = model_type - model.append(this_model) - else: - pre_process = mpu.is_pipeline_first_stage() - post_process = mpu.is_pipeline_last_stage() - add_encoder = True - add_decoder = True - if model_type == ModelType.encoder_and_decoder: - if mpu.get_pipeline_model_parallel_world_size() > 1: - rank = mpu.get_pipeline_model_parallel_rank() - first_decoder_rank = args.encoder_pipeline_model_parallel_size - world_size = mpu.get_pipeline_model_parallel_world_size() - pre_process = rank == 0 or rank == first_decoder_rank - post_process = (rank == (first_decoder_rank - 1)) or (rank == (world_size - 1)) - add_encoder = mpu.is_inside_encoder(rank) - add_decoder = mpu.is_inside_decoder(rank) - model = model_provider_func( - pre_process=pre_process, - post_process=post_process, - add_encoder=add_encoder, - add_decoder=add_decoder) - else: - model = model_provider_func( - pre_process=pre_process, - post_process=post_process - ) - model.model_type = model_type - - if not isinstance(model, list): - model = [model] - - # Set tensor model parallel attributes if not set. - # Only parameters that are already tensor model parallel have these - # attributes set for them. We should make sure the default attributes - # are set for all params so the optimizer can use them. - for model_module in model: - for param in model_module.parameters(): - tensor_parallel.set_defaults_if_not_set_tensor_model_parallel_attributes(param) - - # Print number of parameters. - if mpu.get_data_parallel_rank() == 0: - print(' > number of parameters on (tensor, pipeline) ' - 'model parallel rank ({}, {}): {}'.format( - mpu.get_tensor_model_parallel_rank(), - mpu.get_pipeline_model_parallel_rank(), - sum([sum([p.nelement() for p in model_module.parameters()]) - for model_module in model])), flush=True) - - # GPU allocation. - for model_module in model: - model_module.cuda(torch.cuda.current_device()) - - # Fp16 conversion. - if args.fp16 or args.bf16: - model = [Float16Module(model_module, args) for model_module in model] - - # The model_module.bfloat16()/model_module.half() above will call the inplace copy of TE's - # Float8Tensor, which will write an unwanted value (amax calculated from the current fp8 - # param) to its amax_history. The following logic will correct the amax_history back. - for model_module in model: - for param in model_module.parameters(): - if is_float8tensor(param) and param._fp8_meta is not None: - fp8_meta = param._fp8_meta['scaling_fwd'] - fp8_meta_index = param._fp8_meta_index - if hasattr(param, 'get_high_precision_init_val'): - fp8_meta.amax_history[0][fp8_meta_index].copy_( - param.get_high_precision_init_val().abs().max() - ) - else: - fp8_meta.amax_history[0][fp8_meta_index] = 0 - - if wrap_with_ddp: - if getattr(args, "use_torch_fsdp2", False): - assert HAVE_FSDP2, "Torch FSDP2 requires torch>=2.4.0" - DP = torch_FSDP - else: - DP = DDP - - config = get_model_config(model[0]) - - kwargs = {} - for f in dataclasses.fields(DistributedDataParallelConfig): - if hasattr(args, f.name): - kwargs[f.name] = getattr(args, f.name) - kwargs['grad_reduce_in_fp32'] = args.accumulate_allreduce_grads_in_fp32 - kwargs['check_for_nan_in_grad'] = args.check_for_nan_in_loss_and_grad - kwargs['bucket_size'] = args.ddp_bucket_size - kwargs['average_in_collective'] = args.ddp_average_in_collective - ddp_config = DistributedDataParallelConfig(**kwargs) - - overlap_param_gather_with_optimizer_step = getattr(args, 'overlap_param_gather_with_optimizer_step', False) - model = [DP(config=config, - ddp_config=ddp_config, - module=model_chunk, - # Turn off bucketing for model_chunk 2 onwards, since communication for these - # model chunks is overlapped with compute anyway. - disable_bucketing=(model_chunk_idx > 0) or overlap_param_gather_with_optimizer_step) - for (model_chunk_idx, model_chunk) in enumerate(model)] - - # Broadcast params from data parallel src rank to other data parallel ranks. - if args.data_parallel_random_init: - for model_module in model: - model_module.broadcast_params() - - return model - - -def get_optimizer_param_scheduler(optimizer): - """Build the learning rate scheduler.""" - args = get_args() - - # Iteration-based training. - if args.train_iters: - if args.lr_decay_iters is None: - args.lr_decay_iters = args.train_iters - lr_decay_steps = args.lr_decay_iters * args.global_batch_size - wd_incr_steps = args.train_iters * args.global_batch_size - wsd_decay_steps = None - if args.lr_wsd_decay_iters is not None: - wsd_decay_steps = args.lr_wsd_decay_iters * args.global_batch_size - if args.lr_warmup_fraction is not None: - lr_warmup_steps = args.lr_warmup_fraction * lr_decay_steps - else: - lr_warmup_steps = args.lr_warmup_iters * args.global_batch_size - # Sample-based training. - elif args.train_samples: - # We need to set training iters for later use. Technically - # we need to adjust the training samples too (due to last - # batch being incomplete) but we leave it as is for now. - update_train_iters(args) - if args.lr_decay_samples is None: - args.lr_decay_samples = args.train_samples - lr_decay_steps = args.lr_decay_samples - wd_incr_steps = args.train_samples - wsd_decay_steps = args.lr_wsd_decay_samples - if args.lr_warmup_fraction is not None: - lr_warmup_steps = args.lr_warmup_fraction * lr_decay_steps - else: - lr_warmup_steps = args.lr_warmup_samples - else: - raise Exception( - 'either train-iters or train-samples should be provided.') - - opt_param_scheduler = OptimizerParamScheduler( - optimizer, - init_lr=args.lr_warmup_init, - max_lr=args.lr, - min_lr=args.min_lr, - lr_warmup_steps=lr_warmup_steps, - lr_decay_steps=lr_decay_steps, - lr_decay_style=args.lr_decay_style, - start_wd=args.start_weight_decay, - end_wd=args.end_weight_decay, - wd_incr_steps=wd_incr_steps, - wd_incr_style=args.weight_decay_incr_style, - use_checkpoint_opt_param_scheduler=args.use_checkpoint_opt_param_scheduler, - override_opt_param_scheduler=args.override_opt_param_scheduler, - wsd_decay_steps=wsd_decay_steps, - lr_wsd_decay_style=args.lr_wsd_decay_style) - - return opt_param_scheduler - - -def setup_model_and_optimizer(model_provider_func, - model_type, - no_wd_decay_cond=None, - scale_lr_cond=None, - lr_mult=1.0, - checkpointing_context=None): - """Setup model and optimizer.""" - args = get_args() - timers = get_timers() - one_logger = get_one_logger() - - model = get_model(model_provider_func, model_type) - unwrapped_model = unwrap_model(model) - - kwargs = {} - for f in dataclasses.fields(OptimizerConfig): - if hasattr(args, f.name): - kwargs[f.name] = getattr(args, f.name) - config = OptimizerConfig(**kwargs) - config.timers = timers - optimizer = get_megatron_optimizer(config, model, no_wd_decay_cond, - scale_lr_cond, lr_mult) - opt_param_scheduler = get_optimizer_param_scheduler(optimizer) - - if args.moe_use_upcycling: - torch.distributed.barrier() - assert not checkpoint_exists( - args.save - ), ("The upcycling destination directory already exists. " - "Please check if --moe-use-upcycling is mistakenly enabled. " - "Upcycling should only be set for the first run when converting the dense model. " - "All subsequent runs should remove this flag. ") - num_experts = args.num_experts - args.num_experts = None - expert_model_parallel_size = args.expert_model_parallel_size - args.expert_model_parallel_size = 1 - dense_model_for_upcycling = get_model(model_provider_func, model_type) - args.num_experts = num_experts - args.expert_model_parallel_size = expert_model_parallel_size - _, args.num_floating_point_operations_so_far = upcycling_utils.load_and_upcycle_model( - load_checkpoint, - unwrapped_model, - dense_model_for_upcycling, - load_kwargs = {'model': dense_model_for_upcycling, 'optimizer': None, 'opt_param_scheduler': None} - ) - args.iteration = 1 - save_checkpoint(args.iteration, model, None, None, args.num_floating_point_operations_so_far) - torch.distributed.barrier() - del dense_model_for_upcycling - if (args.fp16 or args.bf16) and optimizer is not None: - optimizer.reload_model_params() - print_rank_0(f'Upcycled checkpoint saved to {args.save}') - - if (args.load is not None or args.pretrained_checkpoint is not None) and not args.moe_use_upcycling: - one_logger and one_logger.log_metrics({ - 'load_checkpoint_start_time': one_logger_utils.get_timestamp_in_ms() - }) - timers('load-checkpoint', log_level=0).start(barrier=True) - - args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( - model, optimizer, opt_param_scheduler, - ft_client=ft_integration.get_rank_monitor_client(), checkpointing_context=checkpointing_context, - skip_load_to_model_and_opt=HAVE_FSDP2 and getattr(args, "use_torch_fsdp2", False)) - timers('load-checkpoint').stop(barrier=True) - timers.log(['load-checkpoint']) - one_logger and one_logger.log_metrics({ - 'load_checkpoint_finish_time': one_logger_utils.get_timestamp_in_ms(), - 'load_checkpoint_time': timers('load-checkpoint').active_time() - }) - else: - args.iteration = 0 - args.num_floating_point_operations_so_far = 0 - - # get model without FP16 and/or DDP wrappers - if args.iteration == 0 and len(unwrapped_model) == 1 \ - and hasattr(unwrapped_model[0], 'init_state_dict_from_bert'): - print_rank_0("Initializing ICT from pretrained BERT model") - unwrapped_model[0].init_state_dict_from_bert() - if args.fp16: - optimizer.reload_model_params() - - # Convert checkpoint format. - if args.ckpt_convert_format is not None: - load_ckpt_format = args.ckpt_format - args.ckpt_format = args.ckpt_convert_format - args.save = os.path.join(args.ckpt_convert_save, args.ckpt_convert_format) - update_use_dist_ckpt(args) - - save_checkpoint(args.iteration, model, optimizer, opt_param_scheduler, - args.num_floating_point_operations_so_far, - preprocess_common_state_dict_fn=preprocess_common_state_dict) - - print_rank_0("> converted checkpoint: %s -> %s." % (load_ckpt_format, args.ckpt_format)) - torch.distributed.barrier() - exit() - - return model, optimizer, opt_param_scheduler - - -def train_step(forward_step_func, data_iterator, - model, optimizer, opt_param_scheduler, config): - """Single training step.""" - args = get_args() - timers = get_timers() - - rerun_state_machine = get_rerun_state_machine() - while rerun_state_machine.should_run_forward_backward(data_iterator): - # Set grad to zero. - for model_chunk in model: - model_chunk.zero_grad_buffer() - optimizer.zero_grad() - - # Forward pass. - forward_backward_func = get_forward_backward_func() - losses_reduced = forward_backward_func( - forward_step_func=forward_step_func, - data_iterator=data_iterator, - model=model, - num_microbatches=get_num_microbatches(), - seq_length=args.seq_length, - micro_batch_size=args.micro_batch_size, - decoder_seq_length=args.decoder_seq_length, - forward_only=False) - should_checkpoint, should_exit, exit_code = rerun_state_machine.should_checkpoint_and_exit() - if should_exit: - return {}, True, should_checkpoint, should_exit, exit_code, None, None - - # Empty unused memory. - if args.empty_unused_memory_level >= 1: - torch.cuda.empty_cache() - - # Vision gradients. - if getattr(args, 'vision_pretraining', False) and args.vision_pretraining_type == "dino": - unwrapped_model = unwrap_model(model[0]) - unwrapped_model.cancel_gradients_last_layer(args.curr_iteration) - - # Update parameters. - - timers('optimizer', log_level=1).start(barrier=args.barrier_with_L1_time) - update_successful, grad_norm, num_zeros_in_grad = optimizer.step() - timers('optimizer').stop() - - # when freezing sub-models we may have a mixture of successful and unsucessful ranks, - # so we must gather across mp ranks - update_successful = logical_and_across_model_parallel_group(update_successful) - # grad_norm and num_zeros_in_grad will be None on ranks without trainable params, - # so we must gather across mp ranks - grad_norm = reduce_max_stat_across_model_parallel_group(grad_norm) - if args.log_num_zeros_in_grad: - num_zeros_in_grad = reduce_max_stat_across_model_parallel_group(num_zeros_in_grad) - - # Vision momentum. - if getattr(args, 'vision_pretraining', False) and args.vision_pretraining_type == "dino": - unwrapped_model = unwrap_model(model[0]) - unwrapped_model.update_momentum(args.curr_iteration) - - # Update learning rate. - if update_successful: - increment = get_num_microbatches() * \ - args.micro_batch_size * \ - args.data_parallel_size - opt_param_scheduler.step(increment=increment) - skipped_iter = 0 - else: - skipped_iter = 1 - - # Empty unused memory. - if args.empty_unused_memory_level >= 2: - torch.cuda.empty_cache() - - if mpu.is_pipeline_last_stage(ignore_virtual=True): - # Average loss across microbatches. - loss_reduced = {} - for key in losses_reduced[0].keys(): - numerator = 0 - denominator = 0 - for x in losses_reduced: - val = x[key] - # there is one dict per microbatch. in new reporting, we average - # over the total number of tokens across the global batch. - if isinstance(val, tuple) or isinstance(val, list): - numerator += val[0] - denominator += val[1] - else: - # legacy behavior. we average over the number of microbatches, - # and so the denominator is 1. - numerator += val - denominator += 1 - loss_reduced[key] = numerator / denominator - return loss_reduced, skipped_iter, should_checkpoint, should_exit, exit_code, grad_norm, num_zeros_in_grad - return {}, skipped_iter, should_checkpoint, should_exit, exit_code, grad_norm, num_zeros_in_grad - - -def training_log(loss_dict, total_loss_dict, learning_rate, decoupled_learning_rate, iteration, - loss_scale, report_memory_flag, skipped_iter, - grad_norm, params_norm, num_zeros_in_grad): - """Log training information such as losses, timing, ....""" - args = get_args() - timers = get_timers() - writer = get_tensorboard_writer() - wandb_writer = get_wandb_writer() - one_logger = get_one_logger() - - # Advanced, skipped, and Nan iterations. - advanced_iters_key = 'advanced iterations' - skipped_iters_key = 'skipped iterations' - nan_iters_key = 'nan iterations' - # Advanced iterations. - if not skipped_iter: - total_loss_dict[advanced_iters_key] = total_loss_dict.get( - advanced_iters_key, 0) + 1 - else: - if advanced_iters_key not in total_loss_dict: - total_loss_dict[advanced_iters_key] = 0 - # Skipped iterations. - total_loss_dict[skipped_iters_key] = total_loss_dict.get( - skipped_iters_key, 0) + skipped_iter - # Update losses and set nan iterations - got_nan = False - for key in loss_dict: - if not skipped_iter: - total_loss_dict[key] = total_loss_dict.get( - key, torch.tensor([0.0], dtype=torch.float, device='cuda')) + loss_dict[key] - else: - value = loss_dict[key].float().sum().item() - is_nan = value == float('inf') or \ - value == -float('inf') or \ - value != value - got_nan = got_nan or is_nan - total_loss_dict[nan_iters_key] = total_loss_dict.get( - nan_iters_key, 0) + int(got_nan) - - # Logging. - timers_to_log = [ - 'forward-backward', - 'forward-compute', - 'backward-compute', - 'batch-generator', - 'forward-recv', - 'forward-send', - 'backward-recv', - 'backward-send', - 'forward-send-forward-recv', - 'forward-send-backward-recv', - 'backward-send-forward-recv', - 'backward-send-backward-recv', - 'forward-backward-send-forward-backward-recv', - 'layernorm-grads-all-reduce', - 'embedding-grads-all-reduce', - 'all-grads-sync', - 'params-all-gather', - 'optimizer-copy-to-main-grad', - 'optimizer-unscale-and-check-inf', - 'optimizer-clip-main-grad', - 'optimizer-count-zeros', - 'optimizer-inner-step', - 'optimizer-copy-main-to-model-params', - 'optimizer'] - - # Calculate batch size. - batch_size = args.micro_batch_size * args.data_parallel_size * \ - get_num_microbatches() - - # Track app tag & app tag ID - one_logger_utils.track_app_tag(batch_size, args.world_size, args.seq_length) - - total_iterations = total_loss_dict[advanced_iters_key] + \ - total_loss_dict[skipped_iters_key] - - # learning rate will be None on ranks without trainable params, so we must gather across mp ranks - learning_rate = reduce_max_stat_across_model_parallel_group(learning_rate) - # Tensorboard values. - # Timer requires all the ranks to call. - if args.log_timers_to_tensorboard and \ - (iteration % args.tensorboard_log_interval == 0): - timers.write(timers_to_log, writer, iteration, - normalizer=total_iterations) - if writer and (iteration % args.tensorboard_log_interval == 0): - if args.record_memory_history and is_last_rank(): - snapshot = torch.cuda.memory._snapshot() - from pickle import dump - with open(args.memory_snapshot_path , 'wb') as f: - dump(snapshot, f) - - if wandb_writer: - wandb_writer.log({'samples vs steps': args.consumed_train_samples}, - iteration) - writer.add_scalar('learning-rate', learning_rate, iteration) - writer.add_scalar('learning-rate vs samples', learning_rate, - args.consumed_train_samples) - if wandb_writer: - wandb_writer.log({'learning-rate': learning_rate}, iteration) - if args.decoupled_lr is not None: - writer.add_scalar('decoupled-learning-rate', decoupled_learning_rate, iteration) - if args.skipped_train_samples > 0: - writer.add_scalar('skipped-train-samples', args.skipped_train_samples, iteration) - if wandb_writer: - wandb_writer.log({'skipped-train-samples': args.skipped_train_samples}, iteration) - writer.add_scalar('batch-size', batch_size, iteration) - writer.add_scalar('batch-size vs samples', batch_size, - args.consumed_train_samples) - if wandb_writer: - wandb_writer.log({'batch-size': batch_size}, iteration) - for key in loss_dict: - writer.add_scalar(key , loss_dict[key], iteration) - writer.add_scalar(key + ' vs samples', loss_dict[key], - args.consumed_train_samples) - if wandb_writer: - wandb_writer.log({key: loss_dict[key]}, iteration) - if args.log_loss_scale_to_tensorboard: - writer.add_scalar('loss-scale', loss_scale, iteration) - writer.add_scalar('loss-scale vs samples', loss_scale, - args.consumed_train_samples) - if wandb_writer: - wandb_writer.log({'loss-scale': loss_scale}, iteration) - if args.log_world_size_to_tensorboard: - writer.add_scalar('world-size', args.world_size, iteration) - writer.add_scalar('world-size vs samples', args.world_size, - args.consumed_train_samples) - if wandb_writer: - wandb_writer.log({'world-size': args.world_size}, iteration) - if grad_norm is not None: - writer.add_scalar('grad-norm', grad_norm, iteration) - writer.add_scalar('grad-norm vs samples', grad_norm, - args.consumed_train_samples) - if wandb_writer: - wandb_writer.log({'grad-norm': grad_norm}, iteration) - if num_zeros_in_grad is not None: - writer.add_scalar('num-zeros', num_zeros_in_grad, iteration) - writer.add_scalar('num-zeros vs samples', num_zeros_in_grad, - args.consumed_train_samples) - if wandb_writer: - wandb_writer.log({'num-zeros': num_zeros_in_grad}, iteration) - if params_norm is not None: - writer.add_scalar('params-norm', params_norm, iteration) - writer.add_scalar('params-norm vs samples', params_norm, - args.consumed_train_samples) - if wandb_writer: - wandb_writer.log({'params-norm': params_norm}, iteration) - if args.log_memory_to_tensorboard: - mem_stats = torch.cuda.memory_stats() - writer.add_scalar( - "mem-reserved-bytes", - mem_stats["reserved_bytes.all.current"], - iteration, - ) - writer.add_scalar( - "mem-allocated-bytes", - mem_stats["allocated_bytes.all.current"], - iteration, - ) - writer.add_scalar( - "mem-allocated-count", - mem_stats["allocation.all.current"], - iteration, - ) - if args.num_experts is not None: - moe_loss_scale = 1 / get_num_microbatches() - track_moe_metrics(moe_loss_scale, iteration, writer, wandb_writer, total_loss_dict, args.moe_per_layer_logging) - - if iteration % args.log_interval == 0: - elapsed_time = timers('interval-time').elapsed(barrier=True) - elapsed_time_per_iteration = elapsed_time / total_iterations - - throughput = num_floating_point_operations(args, batch_size) / ( - elapsed_time_per_iteration * 10**12 * args.world_size) - - one_logger_utils.track_e2e_metrics(args.log_throughput, throughput) - - if args.log_timers_to_tensorboard: - if writer: - writer.add_scalar('iteration-time', - elapsed_time_per_iteration, iteration) - if wandb_writer: - wandb_writer.log({'iteration-time': elapsed_time_per_iteration}, - iteration) - log_string = f" [{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}]" - log_string += ' iteration {:8d}/{:8d} |'.format( - iteration, args.train_iters) - log_string += ' consumed samples: {:12d} |'.format( - args.consumed_train_samples) - if args.skipped_train_samples > 0: - log_string += ' skipped samples: {:12d} |'.format( - args.skipped_train_samples) - log_string += ' elapsed time per iteration (ms): {:.1f} |'.format( - elapsed_time_per_iteration * 1000.0) - if args.log_throughput: - log_string += f' throughput per GPU (TFLOP/s/GPU): {throughput:.1f} |' - if args.log_timers_to_tensorboard: - if writer: - writer.add_scalar('throughput', throughput, iteration) - if wandb_writer: - wandb_writer.log({'throughput': throughput}, iteration) - # Decoupled_learning_rate should be not None only on first and last pipeline stage. - log_string += f' learning rate: {learning_rate:.6E} |' - if args.decoupled_lr is not None and (mpu.is_pipeline_first_stage(ignore_virtual=True) or - mpu.is_pipeline_last_stage(ignore_virtual=True)): - assert decoupled_learning_rate is not None - log_string += f' decoupled learning rate: {decoupled_learning_rate:.6E} |' - else: - assert decoupled_learning_rate is None - log_string += f' global batch size: {batch_size:5d} |' - for key in total_loss_dict: - if key not in [advanced_iters_key, skipped_iters_key, - nan_iters_key]: - avg = total_loss_dict[key].item() / \ - float(max(1, total_loss_dict[advanced_iters_key])) - if avg > 0.0: - log_string += ' {}: {:.6E} |'.format(key, avg) - total_loss_dict[key] = torch.tensor([0.0], dtype=torch.float, device='cuda') - log_string += f' loss scale: {loss_scale:.1f} |' - if grad_norm is not None: - log_string += f' grad norm: {grad_norm:.3f} |' - if num_zeros_in_grad is not None: - log_string += f' num zeros: {num_zeros_in_grad} |' - if params_norm is not None: - log_string += f' params norm: {params_norm:.3f} |' - log_string += ' number of skipped iterations: {:3d} |'.format( - total_loss_dict[skipped_iters_key]) - log_string += ' number of nan iterations: {:3d} |'.format( - total_loss_dict[nan_iters_key]) - total_loss_dict[advanced_iters_key] = 0 - total_loss_dict[skipped_iters_key] = 0 - total_loss_dict[nan_iters_key] = 0 - print_rank_last(log_string) - if report_memory_flag: - # Report memory after optimizer state has been initialized. - if torch.distributed.get_rank() == 0: - num_microbatches = get_num_microbatches() - report_theoretical_memory(args, num_microbatches=num_microbatches, verbose=True) - report_memory(f'(after {iteration} iterations)') - report_memory_flag = False - timers.log(timers_to_log, normalizer=args.log_interval) - - return report_memory_flag - - -def compute_throughputs_and_append_to_progress_log(iteration, - num_floating_point_operations_so_far): - args = get_args() - if args.save is None: - return - - # Compute job throughput. - # args.num_floating_point_operations_so_far keeps track of floating-point operations - # completed at the start of job. - global _TRAIN_START_TIME - job_throughput = \ - (num_floating_point_operations_so_far - - args.num_floating_point_operations_so_far) / ( - (time.time() - _TRAIN_START_TIME) * 10**12 * args.world_size) - - # Compute cumulative throughput since jobs of this world size were launched. - # `get_start_time_from_progress_log` returns start time and number of floating-point - # operations of first job of this world size. - start_time, start_num_floating_point_operations = get_start_time_from_progress_log() - elapsed_time = (datetime.now() - start_time).total_seconds() - cumulative_throughput = \ - (num_floating_point_operations_so_far - - start_num_floating_point_operations) / ( - elapsed_time * 10**12 * args.world_size) - - tokens_so_far = args.consumed_train_samples * args.seq_length - saved_ckpt_prefix = 'Saving async checkpoint' if args.async_save else 'Saved checkpoint' - append_to_progress_log(f"{saved_ckpt_prefix}\tIteration: {iteration}\t" - f"Job throughput: {job_throughput:.1f} TFLOP/s/GPU\t" - f"Cumulative throughput: {cumulative_throughput:.1f} TFLOP/s/GPU\t" - f"Floating-point operations: {num_floating_point_operations_so_far:.2e}\t" - f"Tokens (in billions): {tokens_so_far / 10**9:.2f}") - - -def enable_forward_pre_hook(model_chunks): - for model_chunk in model_chunks: - assert isinstance(model_chunk, DDP) - model_chunk.enable_forward_pre_hook() - - -def disable_forward_pre_hook(model_chunks, param_sync=True): - for model_chunk in model_chunks: - assert isinstance(model_chunk, DDP) - model_chunk.disable_forward_pre_hook(param_sync=param_sync) - - -def save_checkpoint_and_time(iteration, model, optimizer, opt_param_scheduler, - num_floating_point_operations_so_far, checkpointing_context, - non_persistent_ckpt=False, train_data_iterator=None): - args = get_args() - timers = get_timers() - - # Stop timer to get accurate train interval time and exclude checkpointing duration - timers('interval-time').stop() - # Extra barrier is added to make sure all ranks report the max time. - timer_key = 'save-checkpoint-non-persistent' if non_persistent_ckpt else 'save-checkpoint' - timers(timer_key, log_level=0).start(barrier=True) - save_checkpoint_start_time = timers('save-checkpoint').active_time() - - # Log E2E metrics before save-checkpoint - one_logger_utils.track_e2e_metrics() - if args.use_distributed_optimizer and args.overlap_param_gather: - disable_forward_pre_hook(model) - save_checkpoint(iteration, model, optimizer, opt_param_scheduler, - num_floating_point_operations_so_far, checkpointing_context, - non_persistent_ckpt=non_persistent_ckpt, train_data_iterator=train_data_iterator, - ft_client=ft_integration.get_rank_monitor_client( - ft_integration.StateMachineActions.SAVE_CHECKPOINT), preprocess_common_state_dict_fn=preprocess_common_state_dict) - if args.use_distributed_optimizer and args.overlap_param_gather: - enable_forward_pre_hook(model) - timers(timer_key).stop(barrier=True) - timers.log([timer_key]) - save_checkpoint_finish_time = timers('save-checkpoint').active_time() - - # Log E2E metrics after save-checkpoint - one_logger_utils.track_e2e_metrics() - save_checkpoint_duration = save_checkpoint_finish_time - save_checkpoint_start_time - one_logger_utils.on_save_checkpoint_end(save_checkpoint_duration, iteration, args.async_save) - - if args.log_progress and not non_persistent_ckpt: - compute_throughputs_and_append_to_progress_log(iteration, - num_floating_point_operations_so_far) - - # Recover timing - timers('interval-time', log_level=0).start(barrier=True) - - -def post_training_step_callbacks(model, optimizer, opt_param_scheduler, iteration, prof, - num_floating_point_operations_since_last_log_event): - """Run all post-training-step functions (e.g., FT heartbeats, GC).""" - args = get_args() - - # Send heartbeat to FT package and update timeouts. - if args.enable_ft_package: - ft_client = ft_integration.get_rank_monitor_client( - ft_integration.StateMachineActions.TRAIN_HEARTBEAT) - if ft_client is not None: - ft_client.send_heartbeat() - # TODO: We are always calculating timeouts in the current implementation. - # If we want to rely on manually setting these, then we need to add additional - # arguments to training and pass it here. - if ft_integration.can_update_timeouts(): - ft_integration.get_rank_monitor_client( - ft_integration.StateMachineActions.UPDATE_TIMEOUT).calculate_and_set_timeouts() - print_rank_0(f'Updated FT timeouts. New values: \ - {ft_integration.get_rank_monitor_client().timeouts}') - - # Bring CPU and GPU back in sync if on right iteration. - if args.train_sync_interval and iteration % args.train_sync_interval == 0: - torch.cuda.synchronize() - - # Straggler detector. - if iteration % args.log_interval == 0 and args.log_straggler: - stimer.report(num_floating_point_operations_since_last_log_event, args.log_interval) - num_floating_point_operations_since_last_log_event = 0.0 - - # Check weight hash across DP replicas. - if args.check_weight_hash_across_dp_replicas_interval is not None and \ - iteration % args.check_weight_hash_across_dp_replicas_interval == 0: - if args.use_distributed_optimizer and args.overlap_param_gather: - disable_forward_pre_hook(model) - assert check_param_hashes_across_dp_replicas(model, cross_check=True), \ - "Parameter hashes not matching across DP replicas" - torch.distributed.barrier() - print_rank_0(f">>> Weight hashes match after {iteration} iterations...") - if args.use_distributed_optimizer and args.overlap_param_gather: - enable_forward_pre_hook(model) - - # Autoresume. - if args.adlr_autoresume and \ - (iteration % args.adlr_autoresume_interval == 0): - check_adlr_autoresume_termination(iteration, model, optimizer, - opt_param_scheduler) - - # Profiling. - if args.profile and \ - iteration == args.profile_step_end and \ - torch.distributed.get_rank() in args.profile_ranks: - if args.use_pytorch_profiler: - assert prof is not None - prof.stop() - else: - torch.cuda.cudart().cudaProfilerStop() - - # Manual garbage collection. - if args.manual_gc: - if args.manual_gc_interval != 0 and iteration % args.manual_gc_interval == 0: - gc.collect() - - -def checkpoint_and_decide_exit(model, optimizer, opt_param_scheduler, iteration, - num_floating_point_operations_so_far, checkpointing_context, - train_data_iterator): - """Save checkpoint and decide whether to exit based on arguments (e.g., if - --exit-duration-in-mins is set). Actual exit happens in main training loop - based on the return value of this function.""" - args = get_args() - timers = get_timers() - - # Exit based on signal handler. - saved_checkpoint = False - if args.exit_signal_handler: - signal_handler = get_signal_handler() - if any(signal_handler.signals_received()): - if args.save: - save_checkpoint_and_time(iteration, model, optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context, train_data_iterator=train_data_iterator) - print_datetime('exiting program after receiving SIGTERM.') - - return True - - # Regular save (persistent and non-persistent). - if args.save and args.save_interval and \ - iteration % args.save_interval == 0: - save_checkpoint_and_time(iteration, model, optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context, train_data_iterator=train_data_iterator) - saved_checkpoint = True - - elif args.save and args.non_persistent_save_interval and \ - iteration % args.non_persistent_save_interval == 0: - timers('interval-time').stop() - save_checkpoint_and_time(iteration, model, optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context, - non_persistent_ckpt=True, train_data_iterator=train_data_iterator) - saved_checkpoint = True - timers('interval-time', log_level=0).start(barrier=True) - - # Exit based on duration. - if args.exit_duration_in_mins: - train_time = (time.time() - _TRAIN_START_TIME) / 60.0 - done_cuda = torch.tensor( - [train_time > args.exit_duration_in_mins], - dtype=torch.int, device='cuda') - torch.distributed.all_reduce( - done_cuda, op=torch.distributed.ReduceOp.MAX) - done = done_cuda.item() - if done: - if args.save and not saved_checkpoint: - save_checkpoint_and_time(iteration, model, optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context, train_data_iterator=train_data_iterator) - print_datetime(f'exiting program after {train_time} minutes') - - return True - - # Exit based on iterations. - if args.exit_interval and iteration % args.exit_interval == 0: - if args.save and not saved_checkpoint: - save_checkpoint_and_time(iteration, model, optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context, train_data_iterator=train_data_iterator) - torch.distributed.barrier() - print_datetime(f'exiting program at iteration {iteration}') - - return True - - return False - - -def train(forward_step_func, model, optimizer, opt_param_scheduler, - train_data_iterator, valid_data_iterator, - process_non_loss_data_func, config, checkpointing_context, non_loss_data_func): - """Training function: run train_step desired number of times, run validation, checkpoint.""" - args = get_args() - timers = get_timers() - one_logger = get_one_logger() - - # Write args to tensorboard - write_args_to_tensorboard() - - # Turn on training mode which enables dropout. - for model_module in model: - model_module.train() - - # Tracking loss. - total_loss_dict = {} - - # Iterations. - iteration = args.iteration - - # Track E2E metrics at the start of training. - one_logger_utils.on_train_start(iteration=iteration, consumed_train_samples=args.consumed_train_samples, - train_samples=args.train_samples, seq_length=args.seq_length, - train_iters=args.train_iters, save=args.save, async_save=args.async_save, - log_throughput=args.log_throughput, - num_floating_point_operations_so_far=args.num_floating_point_operations_so_far) - - num_floating_point_operations_so_far = args.num_floating_point_operations_so_far - - # Setup some training config params. - config.grad_scale_func = optimizer.scale_loss - config.timers = timers - if isinstance(model[0], DDP) and args.overlap_grad_reduce: - assert config.no_sync_func is None, \ - ('When overlap_grad_reduce is True, config.no_sync_func must be None; ' - 'a custom no_sync_func is not supported when overlapping grad-reduce') - config.no_sync_func = [model_chunk.no_sync for model_chunk in model] - if len(model) == 1: - config.no_sync_func = config.no_sync_func[0] - if args.align_grad_reduce: - config.grad_sync_func = [model_chunk.start_grad_sync for model_chunk in model] - if len(model) == 1: - config.grad_sync_func = config.grad_sync_func[0] - if args.overlap_param_gather and args.align_param_gather: - config.param_sync_func = [model_chunk.start_param_sync for model_chunk in model] - if len(model) == 1: - config.param_sync_func = config.param_sync_func[0] - config.finalize_model_grads_func = finalize_model_grads - - timers('interval-time', log_level=0).start(barrier=True) - print_datetime('before the start of training step') - report_memory_flag = True - pre_hook_enabled = False - should_exit = False - exit_code = 0 - - if args.manual_gc: - # Disable the default garbage collector and perform the collection manually. - # This is to align the timing of garbage collection across ranks. - assert args.manual_gc_interval >= 0, \ - 'Manual garbage collection interval should be larger than or equal to 0' - gc.disable() - gc.collect() - - # Singleton initialization of straggler detector. - if args.log_straggler: - global stimer - world = torch.distributed.get_world_size() - rank = torch.distributed.get_rank() - mmcnt = args.straggler_minmax_count - stimer.configure(world, rank, - mmcnt = mmcnt, - enabled = not args.disable_straggler_on_startup, - port = args.straggler_ctrlr_port) - num_floating_point_operations_since_last_log_event = 0.0 - - num_microbatches = get_num_microbatches() - eval_duration = 0.0 - eval_iterations = 0 - - def get_e2e_base_metrics(): - """Get base metrics values for one-logger to calculate E2E tracking metrics. - """ - return { - 'iteration': iteration, - 'train_duration': timers('interval-time').active_time(), - 'eval_duration': eval_duration, - 'eval_iterations': eval_iterations, - 'total_flops': num_floating_point_operations_since_last_log_event, - 'num_floating_point_operations_so_far': num_floating_point_operations_so_far, - 'consumed_train_samples': args.consumed_train_samples, - 'world_size': args.world_size, - 'seq_length': args.seq_length - } - # Cache into one-logger for callback. - if one_logger: - with one_logger.get_context_manager(): - one_logger.store_set('get_e2e_base_metrics', get_e2e_base_metrics) - - prof = None - if args.profile and torch.distributed.get_rank() in args.profile_ranks and args.use_pytorch_profiler: - - def trace_handler(p): - from pathlib import Path - Path(f"{args.profile_dir}").mkdir(parents=True, exist_ok=True) - if args.rank in [0]: - print(p.key_averages(group_by_input_shape=True, - group_by_stack_n=5).table(sort_by="self_cuda_time_total", - row_limit=-1, - max_src_column_width=100, - max_name_column_width=280, - max_shapes_column_width=200)) - - p.export_chrome_trace("{path}/trace_rank{rank}_step{step}.json".format( - path=args.profile_dir, rank=torch.distributed.get_rank(), step=p.step_num)) - - prof = torch.profiler.profile( - activities=[ - torch.profiler.ProfilerActivity.CPU, - torch.profiler.ProfilerActivity.CUDA, - ], - schedule=torch.profiler.schedule( - wait=max(args.profile_step_start-1, 0), - warmup=1 if args.profile_step_start > 0 else 0, - active=args.profile_step_end-args.profile_step_start, - repeat=1), - record_shapes=True, - #on_trace_ready=torch.profiler.tensorboard_trace_handler('./torch_prof_data')) - on_trace_ready=trace_handler) - prof.start() - elif args.profile and torch.distributed.get_rank() in args.profile_ranks and args.use_hip_profiler: - import ctypes - roctracer = ctypes.cdll.LoadLibrary("/opt/dtk/roctracer/lib/libroctracer64.so") - - start_iteration = iteration - # Disable forward pre-hook to start training to ensure that errors in checkpoint loading - # or random initialization don't propagate to all ranks in first all-gather (which is a - # no-op if things work correctly). - if args.use_distributed_optimizer and args.overlap_param_gather: - disable_forward_pre_hook(model, param_sync=False) - # Also remove param_sync_func temporarily so that sync calls made in - # `forward_backward_func` are no-ops. - param_sync_func = config.param_sync_func - config.param_sync_func = None - pre_hook_enabled = False - # Also, check weight hash across DP replicas to be very pedantic. - if args.check_weight_hash_across_dp_replicas_interval is not None: - assert check_param_hashes_across_dp_replicas(model, cross_check=True), \ - "Parameter hashes not matching across DP replicas" - torch.distributed.barrier() - print_rank_0(f">>> Weight hashes match after {iteration} iterations...") - - # Run training iterations till done. - while iteration < args.train_iters: - if args.profile and torch.distributed.get_rank() in args.profile_ranks: - if args.use_pytorch_profiler: - prof.step() - elif args.use_hip_profiler: - if iteration == args.profile_step_start: roctracer.roctracer_start() - if iteration == args.profile_step_end: roctracer.roctracer_stop() - elif iteration == args.profile_step_start: - torch.cuda.cudart().cudaProfilerStart() - torch.autograd.profiler.emit_nvtx(record_shapes=True).__enter__() - - maybe_finalize_async_save(blocking=False) - - # Update number of microbatches first without consistency check to decide if a - # checkpoint should be saved. If the number of microbatches is different - # from the previous iteration, save a checkpoint. Then run consistency check - # to make sure training configuration is still valid. - update_num_microbatches(args.consumed_train_samples, consistency_check=False, verbose=True) - if get_num_microbatches() != num_microbatches and iteration != 0: - assert get_num_microbatches() > num_microbatches, \ - (f"Number of microbatches should be increasing due to batch size rampup; " - f"instead going from {num_microbatches} to {get_num_microbatches()}") - if args.save is not None: - save_checkpoint_and_time(iteration, model, optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context, train_data_iterator=train_data_iterator) - num_microbatches = get_num_microbatches() - update_num_microbatches(args.consumed_train_samples, consistency_check=True, verbose=True) - - # Run training step. - args.curr_iteration = iteration - loss_dict, skipped_iter, should_checkpoint, should_exit, exit_code, grad_norm, num_zeros_in_grad = \ - train_step(forward_step_func, - train_data_iterator, - model, - optimizer, - opt_param_scheduler, - config) - if should_checkpoint: - save_checkpoint_and_time(iteration, model, optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context, train_data_iterator=train_data_iterator) - if should_exit: - break - - # Enable forward pre-hooks after first set of forward and backward passes. - # When running in fp16, skip all NaN iterations until steady-state loss scaling value - # is reached. - if iteration == start_iteration: - if skipped_iter: - # Only enable forward pre-hook after a training step has successfully run. Relevant - # for fp16 codepath where first XX iterations are skipped until steady-state loss - # scale value is reached. - start_iteration = iteration + 1 - else: - # Enable forward pre-hook after training step has successfully run. All subsequent - # forward passes will use the forward pre-hook / `param_sync_func` in - # `forward_backward_func`. - if args.use_distributed_optimizer and args.overlap_param_gather: - enable_forward_pre_hook(model) - config.param_sync_func = param_sync_func - pre_hook_enabled = True - - iteration += 1 - batch_size = mpu.get_data_parallel_world_size() * \ - args.micro_batch_size * \ - get_num_microbatches() - args.consumed_train_samples += batch_size - num_skipped_samples_in_batch = (get_current_global_batch_size() - - get_current_running_global_batch_size()) - if args.decrease_batch_size_if_needed: - assert num_skipped_samples_in_batch >= 0 - else: - assert num_skipped_samples_in_batch == 0 - args.skipped_train_samples += num_skipped_samples_in_batch - num_floating_point_operations_in_batch = num_floating_point_operations(args, batch_size) - num_floating_point_operations_so_far += num_floating_point_operations_in_batch - num_floating_point_operations_since_last_log_event += num_floating_point_operations_in_batch - - # Logging. - if not optimizer.is_stub_optimizer: - loss_scale = optimizer.get_loss_scale().item() - else: - loss_scale = 1.0 - params_norm = None - - if args.log_params_norm: - params_norm = calc_params_l2_norm(model) - learning_rate = None - decoupled_learning_rate = None - for param_group in optimizer.param_groups: - if param_group['is_decoupled_lr']: - decoupled_learning_rate = param_group['lr'] - else: - learning_rate = param_group['lr'] - report_memory_flag = training_log(loss_dict, total_loss_dict, - learning_rate, - decoupled_learning_rate, - iteration, loss_scale, - report_memory_flag, skipped_iter, - grad_norm, params_norm, num_zeros_in_grad) - - # Evaluation. - if args.eval_interval and iteration % args.eval_interval == 0 and \ - args.do_valid: - timers('interval-time').stop() - if args.use_distributed_optimizer and args.overlap_param_gather: - disable_forward_pre_hook(model) - pre_hook_enabled = False - if args.manual_gc and args.manual_gc_eval: - # Collect all objects. - gc.collect() - prefix = f'iteration {iteration}' - timers('eval-time', log_level=0).start(barrier=True) - evaluate_and_print_results(prefix, forward_step_func, - valid_data_iterator, model, - iteration, process_non_loss_data_func, - config, verbose=False, write_to_tensorboard=True, - non_loss_data_func=non_loss_data_func) - eval_duration += timers('eval-time').elapsed() - eval_iterations += args.eval_iters - timers('eval-time').stop() - one_logger_utils.track_e2e_metrics() - - if args.manual_gc and args.manual_gc_eval: - # Collect only the objects created and used in evaluation. - gc.collect(generation=0) - if args.use_distributed_optimizer and args.overlap_param_gather: - enable_forward_pre_hook(model) - pre_hook_enabled = True - timers('interval-time', log_level=0).start(barrier=True) - - if args.enable_ft_package and ft_integration.get_rank_monitor_client() is not None: - ft_integration.get_rank_monitor_client( - ft_integration.StateMachineActions.EVAL_HEARTBEAT).send_heartbeat() - - # Miscellaneous post-training-step functions (e.g., FT heartbeats, GC). - # Some of these only happen at specific iterations. - post_training_step_callbacks(model, optimizer, opt_param_scheduler, iteration, prof, - num_floating_point_operations_since_last_log_event) - - # Checkpoint and decide whether to exit. - should_exit = checkpoint_and_decide_exit(model, optimizer, opt_param_scheduler, iteration, - num_floating_point_operations_so_far, - checkpointing_context, train_data_iterator) - if should_exit: - break - - one_logger_utils.track_e2e_metrics() - - # Flush TensorBoard, WandB writers and one-logger. - writer = get_tensorboard_writer() - if writer: - writer.flush() - - # Close out pre-hooks if using distributed optimizer and overlapped param gather. - if pre_hook_enabled: - disable_forward_pre_hook(model) - - if args.enable_ft_package and ft_integration.get_rank_monitor_client() is not None: - ft_integration.get_rank_monitor_client().shutdown_workload_monitoring() - - maybe_finalize_async_save(blocking=True) - - # If any exit conditions (signal handler, duration, iterations) have been reached, exit. - if should_exit: - wandb_writer = get_wandb_writer() - if wandb_writer: - wandb_writer.finish() - sys.exit(exit_code) - - return iteration, num_floating_point_operations_so_far - - -def evaluate(forward_step_func, - data_iterator, - model, - process_non_loss_data_func, - config, - verbose=False, - non_loss_data_func=None): - """Evaluation.""" - args = get_args() - timers = get_timers() - - timers('evaluate', log_level=0).start(barrier=True) - - if args.vision_pretraining and args.vision_pretraining_type == "dino": - from megatron.legacy.model.vision.knn_monitor import compute_feature_bank - compute_feature_bank(model) - - # Turn on evaluation mode which disables dropout. - for model_module in model: - model_module.eval() - - # Disable result validation during evaluation - rerun_state_machine = get_rerun_state_machine() - rerun_mode = rerun_state_machine.get_mode() - rerun_state_machine.set_mode(RerunMode.DISABLED) - - total_loss_dict = {} - - # make validation batch size independent from training batch size - eval_batch_size = args.global_batch_size - eval_num_microbatches = eval_batch_size // \ - (args.micro_batch_size * args.data_parallel_size) - - with torch.no_grad(): - iteration = 0 - if verbose: - print_rank_0(f'Evaluating on {args.eval_iters * eval_batch_size} samples') - while iteration < args.eval_iters: - iteration += 1 - if verbose: - print_rank_0(f'Evaluating iter {iteration}/{args.eval_iters}') - - forward_backward_func = get_forward_backward_func() - # Don't care about timing during evaluation - config.timers = None - loss_dicts = forward_backward_func( - forward_step_func=forward_step_func, - data_iterator=data_iterator, - model=model, - num_microbatches=eval_num_microbatches, - seq_length=args.seq_length, - micro_batch_size=args.micro_batch_size, - decoder_seq_length=args.decoder_seq_length, - forward_only=True) - config.timers = get_timers() - - # Empty unused memory - if args.empty_unused_memory_level >= 1: - torch.cuda.empty_cache() - - if mpu.is_pipeline_last_stage(ignore_virtual=True): - # Reduce across processes. - for loss_dict in loss_dicts: - for key in loss_dict: - if key not in total_loss_dict: - total_loss_dict[key] = torch.tensor([0.0, 0.0], dtype=torch.float).cuda() - val = loss_dict[key] - if isinstance(val, tuple) or isinstance(val, list): - total_loss_dict[key][0] += val[0] - total_loss_dict[key][1] += val[1] - else: - total_loss_dict[key][0] += val - total_loss_dict[key][1] += 1 - - args.consumed_valid_samples += eval_batch_size - - if args.exit_duration_in_mins: - train_time = (time.time() - _TRAIN_START_TIME) / 60.0 - done_cuda = torch.tensor( - [train_time > args.exit_duration_in_mins], - dtype=torch.int, device='cuda') - torch.distributed.all_reduce( - done_cuda, op=torch.distributed.ReduceOp.MAX) - done = done_cuda.item() - if done: - rerun_state_machine.set_mode(rerun_mode) - print_rank_0('Exiting during evaluation, timelimit reached') - return None, None, True - - collected_non_loss_data = None - if non_loss_data_func is not None: - collected_non_loss_data = non_loss_data_func(model) - elif process_non_loss_data_func is not None and is_last_rank(): - collected_non_loss_data = forward_backward_func( - forward_step_func=forward_step_func, - data_iterator=data_iterator, - model=model, - num_microbatches=get_num_microbatches(), - seq_length=args.seq_length, - micro_batch_size=args.micro_batch_size, - decoder_seq_length=args.decoder_seq_length, - forward_only=True, - collect_non_loss_data=True) - - # Move model back to the train mode. - for model_module in model: - model_module.train() - - for key in total_loss_dict: - numerator, denominator = total_loss_dict[key] - total_loss_dict[key] = numerator / denominator - - timers('evaluate').stop() - timers.log(['evaluate']) - - rerun_state_machine.set_mode(rerun_mode) - - rerun_state_machine.set_mode(rerun_mode) - - return total_loss_dict, collected_non_loss_data, False - -def evaluate_and_print_results(prefix, forward_step_func, - data_iterator, model, - iteration, process_non_loss_data_func, config, - verbose=False, write_to_tensorboard=True, non_loss_data_func=None): - """Helper function to evaluate and dump results on screen.""" - args = get_args() - if write_to_tensorboard: - writer = get_tensorboard_writer() - else: - writer = None - - wandb_writer = get_wandb_writer() - - total_loss_dict, collected_non_loss_data, timelimit = evaluate( - forward_step_func, data_iterator, model, - process_non_loss_data_func, config, verbose, non_loss_data_func) - # Timelimit hit during evaluation - if timelimit: - return - string = f' validation loss at {prefix} | ' - for key in total_loss_dict: - string += '{} value: {:.6E} | '.format(key, total_loss_dict[key].item()) - ppl = math.exp(min(20, total_loss_dict[key].item())) - string += '{} PPL: {:.6E} | '.format(key, ppl) - if writer: - writer.add_scalar('{} validation'.format(key), - total_loss_dict[key].item(), - iteration) - writer.add_scalar('{} validation vs samples'.format(key), - total_loss_dict[key].item(), - args.consumed_train_samples) - if args.log_validation_ppl_to_tensorboard: - writer.add_scalar('{} validation ppl'.format(key), ppl, - iteration) - writer.add_scalar('{} validation ppl vs samples'.format(key), - ppl, args.consumed_train_samples) - if wandb_writer and is_last_rank(): - wandb_writer.log({ - '{} validation'.format(key): total_loss_dict[key].item()}, - iteration) - - if process_non_loss_data_func is not None and writer and is_last_rank(): - process_non_loss_data_func(collected_non_loss_data, iteration, writer) - - length = len(string) + 1 - print_rank_last('-' * length) - print_rank_last(string) - print_rank_last('-' * length) - - -def cyclic_iter(iter): - while True: - for x in iter: - yield x - - -def get_train_valid_test_num_samples(): - """Train/valid/test num samples.""" - - args = get_args() - - # Number of train/valid/test samples. - if args.train_samples: - train_samples = args.train_samples - else: - train_samples = args.train_iters * args.global_batch_size - eval_iters = (args.train_iters // args.eval_interval + 1) * \ - args.eval_iters - test_iters = args.eval_iters - - return ( - train_samples, - eval_iters * args.global_batch_size, - test_iters * args.global_batch_size, - ) - - -def build_train_valid_test_datasets(build_train_valid_test_datasets_provider): - """Build pretraining datasets.""" - train_valid_test_num_samples = get_train_valid_test_num_samples() - print_rank_0(' > datasets target sizes (minimum size):') - print_rank_0(' train: {}'.format(train_valid_test_num_samples[0])) - print_rank_0(' validation: {}'.format(train_valid_test_num_samples[1])) - print_rank_0(' test: {}'.format(train_valid_test_num_samples[2])) - return build_train_valid_test_datasets_provider(train_valid_test_num_samples) - - -def build_train_valid_test_data_loaders( - build_train_valid_test_datasets_provider): - """Build pretraining data loaders.""" - - args = get_args() - - (train_dataloader, valid_dataloader, test_dataloader) = (None, None, None) - - print_rank_0('> building train, validation, and test datasets ...') - - # Backward compatibility, assume fixed batch size. - if args.iteration > 0 and args.consumed_train_samples == 0: - assert args.train_samples is None, \ - 'Only backward compatiblity support for iteration-based training' - args.consumed_train_samples = args.iteration * args.global_batch_size - if args.iteration > 0 and args.consumed_valid_samples == 0: - if args.train_samples is None: - args.consumed_valid_samples = (args.iteration // args.eval_interval) * \ - args.eval_iters * args.global_batch_size - - # Rely on distributed-aware core datasets, temporary - is_distributed = getattr(build_train_valid_test_datasets_provider, "is_distributed", False) - - # Construct the data pipeline - if is_distributed or mpu.get_tensor_model_parallel_rank() == 0: - - # Build datasets. - train_ds, valid_ds, test_ds = build_train_valid_test_datasets( - build_train_valid_test_datasets_provider) - # Build dataloders. - train_dataloader = build_pretraining_data_loader( - train_ds, args.consumed_train_samples) - if args.skip_train: - valid_dataloader = build_pretraining_data_loader(valid_ds, 0) - else: - valid_dataloader = build_pretraining_data_loader( - valid_ds, args.consumed_valid_samples) - test_dataloader = build_pretraining_data_loader(test_ds, 0) - - # Flags to know if we need to do training/validation/testing. - do_train = train_dataloader is not None and args.train_iters > 0 - do_valid = valid_dataloader is not None and args.eval_iters > 0 - do_test = test_dataloader is not None and args.eval_iters > 0 - flags = torch.tensor( - [int(do_train), int(do_valid), int(do_test)], - dtype=torch.long, device='cuda') - else: - flags = torch.tensor([0, 0, 0], dtype=torch.long, device='cuda') - - torch.distributed.broadcast(flags, 0) - - args.do_train = getattr(args, "do_train", False) or flags[0].item() - args.do_valid = getattr(args, "do_valid", False) or flags[1].item() - args.do_test = getattr(args, "do_test", False) or flags[2].item() - - return train_dataloader, valid_dataloader, test_dataloader - - -def build_train_valid_test_data_iterators( - build_train_valid_test_datasets_provider): - """Build pretraining data iterators.""" - - args = get_args() - - # Build loaders. - train_dataloader, valid_dataloader, test_dataloader = \ - build_train_valid_test_data_loaders( - build_train_valid_test_datasets_provider) - - # Build iterators. - dl_type = args.dataloader_type - assert dl_type in ['single', 'cyclic', 'external'] - - def _get_iterator(dataloader_type, dataloader): - """Return dataset iterator.""" - if dataloader_type == "single": - return RerunDataIterator(iter(dataloader)) - elif dataloader_type == "cyclic": - return RerunDataIterator(iter(cyclic_iter(dataloader))) - elif dataloader_type == "external": - # External dataloader is passed through. User is expected to define how to iterate. - if isinstance(dataloader, list): - return [RerunDataIterator(d) for d in dataloader] - else: - return RerunDataIterator(dataloader) - else: - raise RuntimeError("unexpected dataloader type") - - if train_dataloader is not None: - train_data_iterator = _get_iterator(dl_type, train_dataloader) - else: - train_data_iterator = None - - if valid_dataloader is not None: - valid_data_iterator = _get_iterator(dl_type, valid_dataloader) - else: - valid_data_iterator = None - - if test_dataloader is not None: - test_data_iterator = _get_iterator(dl_type, test_dataloader) - else: - test_data_iterator = None - - return train_data_iterator, valid_data_iterator, test_data_iterator +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Pretrain utilities.""" + +import dataclasses +from datetime import datetime +import functools +import gc +import logging +import math +import os +import sys +from typing import List + +import torch.distributed +from .log_handler import CustomHandler +# Make default logging level INFO, but filter out all log messages not from MCore. +logging.basicConfig(handlers=[CustomHandler()], level=logging.INFO) +from .theoretical_memory_usage import report_theoretical_memory +import time +# The earliest we can measure the start time. +_TRAIN_START_TIME = time.time() +import torch + +from megatron.core import mpu, tensor_parallel +from megatron.core.utils import ( + check_param_hashes_across_dp_replicas, + get_model_config, + StragglerDetector, + is_float8tensor, +) +from megatron.training.checkpointing import load_checkpoint +from megatron.training.checkpointing import save_checkpoint +from megatron.training.checkpointing import checkpoint_exists +from megatron.legacy.model import Float16Module +from megatron.core.distributed import DistributedDataParallelConfig +from megatron.core.distributed import DistributedDataParallel as DDP +from megatron.core.distributed.custom_fsdp import FullyShardedDataParallel as custom_FSDP +try: + from megatron.core.distributed import TorchFullyShardedDataParallel as torch_FSDP + + HAVE_FSDP2 = True +except ImportError: + HAVE_FSDP2 = False + +from megatron.core.distributed import finalize_model_grads +from megatron.core.enums import ModelType +from megatron.core.optimizer import get_megatron_optimizer, OptimizerConfig +from megatron.core.rerun_state_machine import ( + get_rerun_state_machine, + destroy_rerun_state_machine, + RerunDataIterator, + RerunMode, +) +from megatron.training.initialize import initialize_megatron +from megatron.training.initialize import write_args_to_tensorboard +from megatron.training.initialize import set_jit_fusion_options +from megatron.training.utils import ( + get_batch_on_this_cp_rank, + get_batch_on_this_tp_rank, +) +from megatron.legacy.data.data_samplers import build_pretraining_data_loader +from megatron.core.optimizer_param_scheduler import OptimizerParamScheduler +from megatron.core.transformer.moe import upcycling_utils +from megatron.core.transformer.moe.moe_utils import track_moe_metrics +from megatron.core.parallel_state import ( + destroy_global_memory_buffer, + destroy_model_parallel, +) +from megatron.core.pipeline_parallel import get_forward_backward_func +from megatron.core.num_microbatches_calculator import ( + destroy_num_microbatches_calculator, + get_current_global_batch_size, + get_current_running_global_batch_size, + get_num_microbatches, + update_num_microbatches) + +from .async_utils import maybe_finalize_async_save +from .utils import ( + append_to_progress_log, + calc_params_l2_norm, + check_adlr_autoresume_termination, + logical_and_across_model_parallel_group, + reduce_max_stat_across_model_parallel_group, + is_last_rank, + print_rank_0, + print_rank_last, + report_memory, + unwrap_model, + update_use_dist_ckpt, +) +from .global_vars import ( + destroy_global_vars, + get_args, + get_signal_handler, + get_timers, + get_tensorboard_writer, + get_wandb_writer, + get_one_logger, +) +from . import one_logger_utils + +from . import ft_integration + +stimer = StragglerDetector() + + +def destroy_global_state(): + destroy_global_vars() + destroy_num_microbatches_calculator() + destroy_global_memory_buffer() + destroy_model_parallel() + destroy_rerun_state_machine() + + +def print_datetime(string): + """Note that this call will sync across all ranks.""" + torch.distributed.barrier() + time_str = datetime.now().strftime('%Y-%m-%d %H:%M:%S') + print_rank_0(f'[{string}] datetime: {time_str} ') + + +def num_floating_point_operations(args, batch_size): + # Attention projection size. + query_projection_size = args.kv_channels * args.num_attention_heads + query_projection_to_hidden_size_ratio = query_projection_size / args.hidden_size + # Group Query Attention. + if not args.group_query_attention: + args.num_query_groups = args.num_attention_heads + # MoE. + num_experts_routed_to = 1 if args.num_experts is None else args.moe_router_topk + gated_linear_multiplier = 3 / 2 if args.swiglu else 1 + shared_expert_ffn_hidden_size = ( + 0 + if args.moe_shared_expert_intermediate_size is None + else args.moe_shared_expert_intermediate_size + ) + if args.num_experts is None: + ffn_hidden_size = args.ffn_hidden_size + else: + ffn_hidden_size = args.moe_ffn_hidden_size + + # The 12x term below comes from the following factors; for more details, see + # "APPENDIX: FLOATING-POINT OPERATIONS" in https://arxiv.org/abs/2104.04473. + # - 3x: Each GEMM in the model needs to be performed 3 times (forward pass, + # backward wgrad [weight gradient], backward dgrad [data gradient]). + # - 2x: GEMMs of a particular size are stacked twice in the standard Transformer model + # architectures implemented in this codebase (e.g., h->ffn_h GEMM and ffn_h->h GEMM + # in MLP layer). + # - 2x: A GEMM of a m*n tensor with a n*k tensor requires 2mnk floating-point operations. + expansion_factor = 3 * 2 * 2 + + return ( + expansion_factor + * batch_size + * args.seq_length + * args.num_layers + * args.hidden_size + * args.hidden_size + * ( + # Attention. + ( + ( + 1 + + (args.num_query_groups / args.num_attention_heads) + + (args.seq_length / args.hidden_size) + ) * query_projection_to_hidden_size_ratio + ) + # MLP. + + ( + (ffn_hidden_size / args.hidden_size) + * num_experts_routed_to + * gated_linear_multiplier + ) + # Shared Experts. + + ((shared_expert_ffn_hidden_size / args.hidden_size) * gated_linear_multiplier) + # Logit. + + (args.padded_vocab_size / (2 * args.num_layers * args.hidden_size)) + ) + ) + + +def get_start_time_from_progress_log(): + """ + Gets start time of earliest job with same world size. Also returns the number + of floating-point operations completed in last saved checkpoint. + """ + args = get_args() + assert args.save is not None + progress_log_filename = os.path.join(args.save, "progress.txt") + + # start_time is time when job with same world size started. + # start_num_floating_point_operations is the number of floating-point operations + # completed when this job started. + # latest_num_floating_point_operations is the number of floating-point operations + # completed in most recent saved checkpoint. + start_time = None + start_num_floating_point_operations = None + latest_num_floating_point_operations = 0 + + def _get_field(string, type): + return type(string.split(': ')[1]) + + with open(progress_log_filename, 'r') as f: + for line in f: + line = line.strip() + line_tokens = line.split('\t') + world_size_in_line = _get_field(line_tokens[2], int) + if line_tokens[3] == "Saved checkpoint": + latest_num_floating_point_operations = \ + _get_field(line_tokens[7], float) + if world_size_in_line != args.world_size: + # Re-start search if we see a different world size. + start_time = None + start_num_floating_point_operations = None + continue + if line_tokens[3] == "Starting job": + if start_time is None: + start_time = line_tokens[0] + start_num_floating_point_operations = \ + latest_num_floating_point_operations + assert start_time is not None and start_num_floating_point_operations is not None, \ + "Should have seen at least one 'Starting job' entry with same world_size" + return datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S'), \ + start_num_floating_point_operations + + +def preprocess_common_state_dict(common_state_dict): + import copy + # Convert args key of type namespace to dictionary + preprocessed_common_state_dict = copy.deepcopy(common_state_dict) + preprocessed_common_state_dict['args'] = vars(preprocessed_common_state_dict['args']) + # Remove rank and local rank from state dict if it exists, since they are expected to be different + preprocessed_common_state_dict['args'].pop('local_rank', None) + preprocessed_common_state_dict['args'].pop('rank', None) + return preprocessed_common_state_dict + + +def pretrain( + train_valid_test_dataset_provider, + model_provider, + model_type, + forward_step_func, + process_non_loss_data_func=None, + extra_args_provider=None, + args_defaults={}, + get_embedding_ranks=None, + get_position_embedding_ranks=None, + non_loss_data_func=None, +): + """Main training program. + + This function will run the followings in the order provided: + 1) initialize Megatron. + 2) setup model, optimizer and lr schedule using the model_provider. + 3) call train_val_test_data_provider to get train/val/test datasets. + 4) train the model using the forward_step_func. + + Args: + train_valid_test_dataset_provider: a function that takes the size of + train/valid/test dataset and returns `train, valid, test` datasets. + model_provider: a function that returns a vanilla version of the + model. By vanilla we mean a simple model on cpu with no fp16 or ddp. + model_type: an enum that specifies the type of model being trained. + forward_step_func: a function that takes a `data iterator` and `model`, + and returns a `loss` scalar with a dictionary with key:values being + the info we would like to monitor during training, for example + `lm-loss: value`. We also require that this function add + `batch generator` to the timers class. + process_non_loss_data_func: a function to post process outputs of the + network. It can be used for dumping output tensors (e.g images) to + tensorboard. It takes `collected data`(list of tensors), + `current iteration index` and `tensorboard writer` as arguments. + extra_args_provider: a function that takes a parser and adds arguments + to it. It is used for programs to add their own arguments. + args_defaults: a dictionary from argument-name to argument-value. It + to set already parse arguments. + get_embedding_ranks (TODO): + get_position_embedding_ranks (TODO): + non_loss_data_func (callable): A custom function to call during evaluation. + It can run e.g. benchmarks. + """ + + # Initalize and get arguments, timers, and Tensorboard writer. + initialize_megatron( + extra_args_provider=extra_args_provider, + args_defaults=args_defaults, + get_embedding_ranks=get_embedding_ranks, + get_position_embedding_ranks=get_position_embedding_ranks + ) + + args = get_args() + timers = get_timers() + + if args.log_progress: + append_to_progress_log("Starting job") + + # Initialize fault tolerance + # NOTE: ft_integration functions other than `setup` are no-op if the FT is not initialized + if args.enable_ft_package: + ft_integration.setup(args) + ft_integration.maybe_setup_simulated_fault() + + # Set pytorch JIT layer fusion options and warmup JIT functions. + set_jit_fusion_options() + + # Adjust the startup time so it reflects the largest value. + # This will be closer to what scheduler will see (outside of + # image ... launches. + global _TRAIN_START_TIME + start_time_tensor = torch.tensor([_TRAIN_START_TIME], + dtype=torch.double, + device='cuda') + torch.distributed.all_reduce(start_time_tensor, + op=torch.distributed.ReduceOp.MIN) + _TRAIN_START_TIME = start_time_tensor.item() + + app_metrics = {} + app_metrics['app_start_time'] = round(_TRAIN_START_TIME * 1000.0) + app_metrics['app_model_init_start_time'] = round(_TRAIN_START_TIME * 1000.0) + + print_rank_0('time to initialize megatron (seconds): {:.3f}'.format( + time.time() - _TRAIN_START_TIME)) + print_datetime('after megatron is initialized') + app_metrics['app_model_init_finish_time'] = one_logger_utils.get_timestamp_in_ms() + + # Track E2E metrics on pretrain start + one_logger_utils.on_pretrain_start() + + # Context used for persisting some state between checkpoint saves. + if args.non_persistent_ckpt_type == 'local': + try: + from nvidia_resiliency_ext.checkpointing.local.ckpt_managers.local_manager import \ + LocalCheckpointManager + from nvidia_resiliency_ext.checkpointing.local.replication.group_utils import \ + parse_group_sequence, GroupWrapper + from nvidia_resiliency_ext.checkpointing.local.replication.strategies import \ + CliqueReplicationStrategy + except ModuleNotFoundError: + raise RuntimeError("The 'nvidia_resiliency_ext' module is required for local " + "checkpointing but was not found. Please ensure it is installed.") + + if args.replication: + repl_strategy = CliqueReplicationStrategy.from_replication_params( + args.replication_jump, + args.replication_factor + ) + else: + repl_strategy = None + + checkpointing_context = { + 'local_checkpoint_manager': LocalCheckpointManager(args.non_persistent_local_ckpt_dir, + repl_strategy=repl_strategy + ) + } + else: + checkpointing_context = {} + + # Model, optimizer, and learning rate. + timers('model-and-optimizer-setup', log_level=0).start(barrier=True) + app_metrics['app_build_optimizer_start_time'] = one_logger_utils.get_timestamp_in_ms() + model, optimizer, opt_param_scheduler = setup_model_and_optimizer( + model_provider, model_type, checkpointing_context=checkpointing_context) + + timers('model-and-optimizer-setup').stop() + print_datetime('after model, optimizer, and learning rate ' + 'scheduler are built') + app_metrics['app_build_optimizer_finish_time'] = one_logger_utils.get_timestamp_in_ms() + config = get_model_config(model[0]) + + # Data stuff. + app_metrics['app_build_dataiters_start_time'] = one_logger_utils.get_timestamp_in_ms() + timers('train/valid/test-data-iterators-setup', log_level=0).start( + barrier=True) + if args.virtual_pipeline_model_parallel_size is not None: + train_data_iterator = [] + valid_data_iterator = [] + test_data_iterator = [] + for i in range(len(model)): + mpu.set_virtual_pipeline_model_parallel_rank(i) + iterators = build_train_valid_test_data_iterators( + train_valid_test_dataset_provider) + train_data_iterator.append(iterators[0]) + valid_data_iterator.append(iterators[1]) + test_data_iterator.append(iterators[2]) + else: + train_data_iterator, valid_data_iterator, test_data_iterator \ + = build_train_valid_test_data_iterators( + train_valid_test_dataset_provider) + timers('train/valid/test-data-iterators-setup').stop() + print_datetime('after dataloaders are built') + app_metrics['app_build_dataiters_finish_time'] = one_logger_utils.get_timestamp_in_ms() + + # Track if training is enabled. Can only be done once args.do_train is assigned after dataloader is built. + one_logger_utils.track_config_flags(args.train_iters, args.skip_train, args.do_train, + args.do_valid, args.do_test, args.dataloader_type, + args.retro_project_dir, args.retro_cyclic_train_iters) + + # Print setup timing. + print_rank_0('done with setup ...') + timers.log(['model-and-optimizer-setup', + 'train/valid/test-data-iterators-setup'], barrier=True) + + one_logger = get_one_logger() + one_logger and one_logger.log_metrics(app_metrics) + + if not args.skip_train: + print_rank_0('training ...') + + if args.dataloader_type == 'cyclic' and args.retro_project_dir: + assert args.retro_cyclic_train_iters is not None + args.train_iters = args.retro_cyclic_train_iters + print_rank_0("retro cyclic train iters : %d" % args.train_iters) + + iteration = 0 + if args.do_train and args.train_iters > 0: + iteration, num_floating_point_operations_so_far = train( + forward_step_func, + model, optimizer, opt_param_scheduler, + train_data_iterator, valid_data_iterator, + process_non_loss_data_func, config, checkpointing_context, + non_loss_data_func) + + print_datetime('after training is done') + + if args.save and iteration != 0 and iteration % args.save_interval != 0: + save_checkpoint(iteration, model, optimizer, opt_param_scheduler, + num_floating_point_operations_so_far, checkpointing_context, + train_data_iterator=train_data_iterator, + preprocess_common_state_dict_fn=preprocess_common_state_dict) + + one_logger and one_logger.log_metrics({ + 'app_train_loop_finish_time': one_logger_utils.get_timestamp_in_ms() + }) + + else: + print_rank_0('skipping training (--skip-train is on) ...') + + iteration = args.iteration + + if args.do_valid: + prefix = f'iteration {iteration} on validation set' + evaluate_and_print_results(prefix, forward_step_func, + valid_data_iterator, model, + iteration, process_non_loss_data_func, config, + verbose=True, write_to_tensorboard=not args.skip_train, + non_loss_data_func=non_loss_data_func) + + if args.do_test: + prefix = f'iteration {iteration} on test set' + evaluate_and_print_results(prefix, forward_step_func, + test_data_iterator, model, + iteration, process_non_loss_data_func, config, + verbose=True, write_to_tensorboard=not args.skip_train, + non_loss_data_func=non_loss_data_func) + + wandb_writer = get_wandb_writer() + if wandb_writer: + wandb_writer.finish() + + ft_integration.on_checkpointing_start() + maybe_finalize_async_save(blocking=True, terminate=True) + ft_integration.on_checkpointing_end(is_async_finalization=True) + + one_logger and one_logger.log_metrics({ + 'app_finish_time': one_logger_utils.get_timestamp_in_ms() + }) + + ft_integration.shutdown() + one_logger_utils.finish() + + +def update_train_iters(args): + + # For iteration-based training, we don't need to do anything + if args.train_iters: + return + + # Constant batch size with sample-based training. + if args.rampup_batch_size is None: + args.train_iters = args.train_samples // args.global_batch_size + + else: + # Sample based training with rampup batch size. + iterations = 0 + consumed_samples = 0 + # Rampup phase. + while consumed_samples <= int(args.rampup_batch_size[2]) and consumed_samples <= args.train_samples: + update_num_microbatches(consumed_samples, consistency_check=False) + consumed_samples += get_current_global_batch_size() + iterations += 1 + # Reset + update_num_microbatches(0, consistency_check=False) + # Constant phase + # Note that we throw away any partial last batch. + if args.train_samples > consumed_samples: + iterations += (args.train_samples - consumed_samples) // \ + args.global_batch_size + args.train_iters = iterations + + print_rank_0(f'setting training iterations to {args.train_iters}') + + +def get_model(model_provider_func, model_type=ModelType.encoder_or_decoder, wrap_with_ddp=True): + """Build the model.""" + args = get_args() + args.model_type = model_type + + # Build model. + def build_model(): + if mpu.get_pipeline_model_parallel_world_size() > 1 and \ + args.virtual_pipeline_model_parallel_size is not None: + assert model_type != ModelType.encoder_and_decoder, \ + "Interleaved schedule not supported for model with both encoder and decoder" + model = [] + for i in range(args.virtual_pipeline_model_parallel_size): + mpu.set_virtual_pipeline_model_parallel_rank(i) + # Set pre_process and post_process only after virtual rank is set. + pre_process = mpu.is_pipeline_first_stage() + post_process = mpu.is_pipeline_last_stage() + this_model = model_provider_func( + pre_process=pre_process, + post_process=post_process + ) + this_model.model_type = model_type + model.append(this_model) + else: + pre_process = mpu.is_pipeline_first_stage() + post_process = mpu.is_pipeline_last_stage() + add_encoder = True + add_decoder = True + if model_type == ModelType.encoder_and_decoder: + if mpu.get_pipeline_model_parallel_world_size() > 1: + rank = mpu.get_pipeline_model_parallel_rank() + first_decoder_rank = args.encoder_pipeline_model_parallel_size + world_size = mpu.get_pipeline_model_parallel_world_size() + pre_process = rank == 0 or rank == first_decoder_rank + post_process = (rank == (first_decoder_rank - 1)) or (rank == (world_size - 1)) + add_encoder = mpu.is_inside_encoder(rank) + add_decoder = mpu.is_inside_decoder(rank) + model = model_provider_func( + pre_process=pre_process, + post_process=post_process, + add_encoder=add_encoder, + add_decoder=add_decoder) + else: + model = model_provider_func( + pre_process=pre_process, + post_process=post_process + ) + model.model_type = model_type + return model + if args.init_model_with_meta_device: + with torch.device('meta'): + model = build_model() + else: + model = build_model() + + if not isinstance(model, list): + model = [model] + + # Set tensor model parallel attributes if not set. + # Only parameters that are already tensor model parallel have these + # attributes set for them. We should make sure the default attributes + # are set for all params so the optimizer can use them. + for model_module in model: + for param in model_module.parameters(): + tensor_parallel.set_defaults_if_not_set_tensor_model_parallel_attributes(param) + + # Print number of parameters. + num_parameters = sum( + [sum([p.nelement() for p in model_module.parameters()]) + for model_module in model] + ) + if mpu.get_data_parallel_rank() == 0: + print(' > number of parameters on (tensor, pipeline) ' + 'model parallel rank ({}, {}): {}'.format( + mpu.get_tensor_model_parallel_rank(), + mpu.get_pipeline_model_parallel_rank(), + num_parameters), flush=True) + + # GPU allocation. + # For FSDP2, we don't allocate GPU memory here. We allocate GPU memory + # in the fully_shard function of FSDP2 instead. + if not (args.use_torch_fsdp2 and args.use_cpu_initialization) and not args.init_model_with_meta_device: + for model_module in model: + model_module.cuda(torch.cuda.current_device()) + + # Fp16 conversion. + if args.fp16 or args.bf16: + model = [Float16Module(model_module, args) for model_module in model] + + # The model_module.bfloat16()/model_module.half() above will call the inplace copy of TE's + # Float8Tensor, which will write an unwanted value (amax calculated from the current fp8 + # param) to its amax_history. The following logic will correct the amax_history back. + for model_module in model: + for param in model_module.parameters(): + if is_float8tensor(param) and param._fp8_meta is not None: + fp8_meta = param._fp8_meta['scaling_fwd'] + fp8_meta_index = param._fp8_meta_index + if hasattr(param, 'get_high_precision_init_val'): + fp8_meta.amax_history[0][fp8_meta_index].copy_( + param.get_high_precision_init_val().abs().max() + ) + else: + fp8_meta.amax_history[0][fp8_meta_index] = 0 + + if wrap_with_ddp: + if args.use_torch_fsdp2: + assert HAVE_FSDP2, "Torch FSDP2 requires torch>=2.4.0" + DP = torch_FSDP + elif args.use_custom_fsdp: + DP = custom_FSDP + else: + DP = DDP + + config = get_model_config(model[0]) + + kwargs = {} + for f in dataclasses.fields(DistributedDataParallelConfig): + if hasattr(args, f.name): + kwargs[f.name] = getattr(args, f.name) + kwargs['grad_reduce_in_fp32'] = args.accumulate_allreduce_grads_in_fp32 + kwargs['check_for_nan_in_grad'] = args.check_for_nan_in_loss_and_grad + kwargs['check_for_large_grads'] = args.check_for_large_grads + if args.ddp_num_buckets is not None: + assert args.ddp_bucket_size is None, \ + "Cannot specify both --ddp-num-buckets and --ddp-bucket-size" + assert args.ddp_num_buckets > 0, \ + "--ddp-num-buckets must be greater than 0" + kwargs['bucket_size'] = num_parameters // args.ddp_num_buckets + else: + kwargs['bucket_size'] = args.ddp_bucket_size + kwargs['pad_buckets_for_high_nccl_busbw'] = args.ddp_pad_buckets_for_high_nccl_busbw + kwargs['average_in_collective'] = args.ddp_average_in_collective + if args.use_custom_fsdp and args.use_precision_aware_optimizer: + kwargs["preserve_fp32_weights"] = False + ddp_config = DistributedDataParallelConfig(**kwargs) + + if not getattr(args, "use_torch_fsdp2", False): + # In the custom FSDP and DDP use path, we need to initialize the bucket size. + + # If bucket_size is not provided as an input, use sane default. + # If using very large dp_sizes, make buckets larger to ensure that chunks used in NCCL + # ring-reduce implementations are large enough to remain bandwidth-bound rather than + # latency-bound. + if ddp_config.bucket_size is None: + ddp_config.bucket_size = max( + 40000000, 1000000 * mpu.get_data_parallel_world_size(with_context_parallel=True) + ) + # Set bucket_size to infinity if overlap_grad_reduce is False. + if not ddp_config.overlap_grad_reduce: + ddp_config.bucket_size = None + + model = [DP(config=config, + ddp_config=ddp_config, + module=model_chunk, + # Turn off bucketing for model_chunk 2 onwards, since communication for these + # model chunks is overlapped with compute anyway. + disable_bucketing=(model_chunk_idx > 0) or args.overlap_param_gather_with_optimizer_step) + for (model_chunk_idx, model_chunk) in enumerate(model)] + + # Broadcast params from data parallel src rank to other data parallel ranks. + if args.data_parallel_random_init: + for model_module in model: + model_module.broadcast_params() + + return model + + +def get_optimizer_param_scheduler(optimizer): + """Build the learning rate scheduler.""" + args = get_args() + + # Iteration-based training. + if args.train_iters: + if args.lr_decay_iters is None: + args.lr_decay_iters = args.train_iters + lr_decay_steps = args.lr_decay_iters * args.global_batch_size + wd_incr_steps = args.train_iters * args.global_batch_size + wsd_decay_steps = None + if args.lr_wsd_decay_iters is not None: + wsd_decay_steps = args.lr_wsd_decay_iters * args.global_batch_size + if args.lr_warmup_fraction is not None: + lr_warmup_steps = args.lr_warmup_fraction * lr_decay_steps + else: + lr_warmup_steps = args.lr_warmup_iters * args.global_batch_size + # Sample-based training. + elif args.train_samples: + # We need to set training iters for later use. Technically + # we need to adjust the training samples too (due to last + # batch being incomplete) but we leave it as is for now. + update_train_iters(args) + if args.lr_decay_samples is None: + args.lr_decay_samples = args.train_samples + lr_decay_steps = args.lr_decay_samples + wd_incr_steps = args.train_samples + wsd_decay_steps = args.lr_wsd_decay_samples + if args.lr_warmup_fraction is not None: + lr_warmup_steps = args.lr_warmup_fraction * lr_decay_steps + else: + lr_warmup_steps = args.lr_warmup_samples + else: + raise Exception( + 'either train-iters or train-samples should be provided.') + + opt_param_scheduler = OptimizerParamScheduler( + optimizer, + init_lr=args.lr_warmup_init, + max_lr=args.lr, + min_lr=args.min_lr, + lr_warmup_steps=lr_warmup_steps, + lr_decay_steps=lr_decay_steps, + lr_decay_style=args.lr_decay_style, + start_wd=args.start_weight_decay, + end_wd=args.end_weight_decay, + wd_incr_steps=wd_incr_steps, + wd_incr_style=args.weight_decay_incr_style, + use_checkpoint_opt_param_scheduler=args.use_checkpoint_opt_param_scheduler, + override_opt_param_scheduler=args.override_opt_param_scheduler, + wsd_decay_steps=wsd_decay_steps, + lr_wsd_decay_style=args.lr_wsd_decay_style) + + return opt_param_scheduler + + +def setup_model_and_optimizer(model_provider_func, + model_type, + no_wd_decay_cond=None, + scale_lr_cond=None, + lr_mult=1.0, + checkpointing_context=None): + """Setup model and optimizer.""" + args = get_args() + timers = get_timers() + one_logger = get_one_logger() + + model = get_model(model_provider_func, model_type) + unwrapped_model = unwrap_model(model) + + kwargs = {} + for f in dataclasses.fields(OptimizerConfig): + if hasattr(args, f.name): + kwargs[f.name] = getattr(args, f.name) + config = OptimizerConfig(**kwargs) + config.timers = timers + optimizer = get_megatron_optimizer(config, model, no_wd_decay_cond, + scale_lr_cond, lr_mult, + use_gloo_process_groups=args.enable_gloo_process_groups) + opt_param_scheduler = get_optimizer_param_scheduler(optimizer) + + if args.moe_use_upcycling: + torch.distributed.barrier() + assert not checkpoint_exists( + args.save + ), ("The upcycling destination directory already exists. " + "Please check if --moe-use-upcycling is mistakenly enabled. " + "Upcycling should only be set for the first run when converting the dense model. " + "All subsequent runs should remove this flag. ") + num_experts = args.num_experts + args.num_experts = None + expert_model_parallel_size = args.expert_model_parallel_size + args.expert_model_parallel_size = 1 + dense_model_for_upcycling = get_model(model_provider_func, model_type) + args.num_experts = num_experts + args.expert_model_parallel_size = expert_model_parallel_size + _, args.num_floating_point_operations_so_far = upcycling_utils.load_and_upcycle_model( + load_checkpoint, + unwrapped_model, + dense_model_for_upcycling, + load_kwargs = {'model': dense_model_for_upcycling, 'optimizer': None, 'opt_param_scheduler': None} + ) + args.iteration = 1 + save_checkpoint(args.iteration, model, None, None, args.num_floating_point_operations_so_far) + torch.distributed.barrier() + del dense_model_for_upcycling + if (args.fp16 or args.bf16) and optimizer is not None: + optimizer.reload_model_params() + print_rank_0(f'Upcycled checkpoint saved to {args.save}') + + if (args.load is not None or args.pretrained_checkpoint is not None) and not args.moe_use_upcycling: + one_logger and one_logger.log_metrics({ + 'load_checkpoint_start_time': one_logger_utils.get_timestamp_in_ms() + }) + timers('load-checkpoint', log_level=0).start(barrier=True) + + args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( + model, optimizer, opt_param_scheduler, checkpointing_context=checkpointing_context, + skip_load_to_model_and_opt=HAVE_FSDP2 and args.use_torch_fsdp2) + timers('load-checkpoint').stop(barrier=True) + timers.log(['load-checkpoint']) + one_logger and one_logger.log_metrics({ + 'load_checkpoint_finish_time': one_logger_utils.get_timestamp_in_ms(), + 'load_checkpoint_time': timers('load-checkpoint').active_time() + }) + else: + args.iteration = 0 + args.num_floating_point_operations_so_far = 0 + + # get model without FP16 and/or DDP wrappers + if args.iteration == 0 and len(unwrapped_model) == 1 \ + and hasattr(unwrapped_model[0], 'init_state_dict_from_bert'): + print_rank_0("Initializing ICT from pretrained BERT model") + unwrapped_model[0].init_state_dict_from_bert() + if args.fp16: + optimizer.reload_model_params() + + # Convert checkpoint format. + if args.ckpt_convert_format is not None: + load_ckpt_format = args.ckpt_format + args.ckpt_format = args.ckpt_convert_format + args.save = os.path.join(args.ckpt_convert_save, args.ckpt_convert_format) + update_use_dist_ckpt(args) + + save_checkpoint(args.iteration, model, optimizer, opt_param_scheduler, + args.num_floating_point_operations_so_far, + preprocess_common_state_dict_fn=preprocess_common_state_dict) + + print_rank_0("> converted checkpoint: %s -> %s." % (load_ckpt_format, args.ckpt_format)) + torch.distributed.barrier() + exit() + + return model, optimizer, opt_param_scheduler + + +def dummy_train_step(data_iterator): + """Single dummy training step.""" + num_microbatches = get_num_microbatches() + for _ in range(num_microbatches): + # Re-use methods used in get_batch() from pretrain_{gpt, mamba}.py. + batch = get_batch_on_this_tp_rank(data_iterator) + batch = get_batch_on_this_cp_rank(batch) + + +def train_step(forward_step_func, data_iterator, + model, optimizer, opt_param_scheduler, config): + """Single training step.""" + args = get_args() + timers = get_timers() + + rerun_state_machine = get_rerun_state_machine() + while rerun_state_machine.should_run_forward_backward(data_iterator): + # Set grad to zero. + for model_chunk in model: + model_chunk.zero_grad_buffer() + optimizer.zero_grad() + + # Forward pass. + forward_backward_func = get_forward_backward_func() + losses_reduced = forward_backward_func( + forward_step_func=forward_step_func, + data_iterator=data_iterator, + model=model, + num_microbatches=get_num_microbatches(), + seq_length=args.seq_length, + micro_batch_size=args.micro_batch_size, + decoder_seq_length=args.decoder_seq_length, + forward_only=False) + should_checkpoint, should_exit, exit_code = rerun_state_machine.should_checkpoint_and_exit() + if should_exit: + return {}, True, should_checkpoint, should_exit, exit_code, None, None + + # Empty unused memory. + if args.empty_unused_memory_level >= 1: + torch.cuda.empty_cache() + + # Vision gradients. + if args.vision_pretraining and args.vision_pretraining_type == "dino": + unwrapped_model = unwrap_model(model[0]) + unwrapped_model.cancel_gradients_last_layer(args.curr_iteration) + + # Update parameters. + + timers('optimizer', log_level=1).start(barrier=args.barrier_with_L1_time) + update_successful, grad_norm, num_zeros_in_grad = optimizer.step() + timers('optimizer').stop() + + # when freezing sub-models we may have a mixture of successful and unsucessful ranks, + # so we must gather across mp ranks + update_successful = logical_and_across_model_parallel_group(update_successful) + # grad_norm and num_zeros_in_grad will be None on ranks without trainable params, + # so we must gather across mp ranks + grad_norm = reduce_max_stat_across_model_parallel_group(grad_norm) + if args.log_num_zeros_in_grad: + num_zeros_in_grad = reduce_max_stat_across_model_parallel_group(num_zeros_in_grad) + + # Vision momentum. + if args.vision_pretraining and args.vision_pretraining_type == "dino": + unwrapped_model = unwrap_model(model[0]) + unwrapped_model.update_momentum(args.curr_iteration) + + # Update learning rate. + if update_successful: + increment = get_num_microbatches() * \ + args.micro_batch_size * \ + args.data_parallel_size + opt_param_scheduler.step(increment=increment) + skipped_iter = 0 + else: + skipped_iter = 1 + + # Empty unused memory. + if args.empty_unused_memory_level >= 2: + torch.cuda.empty_cache() + + if mpu.is_pipeline_last_stage(ignore_virtual=True): + # Average loss across microbatches. + loss_reduced = {} + for key in losses_reduced[0].keys(): + numerator = 0 + denominator = 0 + for x in losses_reduced: + val = x[key] + # there is one dict per microbatch. in new reporting, we average + # over the total number of tokens across the global batch. + if isinstance(val, tuple) or isinstance(val, list): + numerator += val[0] + denominator += val[1] + else: + # legacy behavior. we average over the number of microbatches, + # and so the denominator is 1. + numerator += val + denominator += 1 + loss_reduced[key] = numerator / denominator + return loss_reduced, skipped_iter, should_checkpoint, should_exit, exit_code, grad_norm, num_zeros_in_grad + return {}, skipped_iter, should_checkpoint, should_exit, exit_code, grad_norm, num_zeros_in_grad + + +def training_log(loss_dict, total_loss_dict, learning_rate, decoupled_learning_rate, iteration, + loss_scale, report_memory_flag, skipped_iter, + grad_norm, params_norm, num_zeros_in_grad): + """Log training information such as losses, timing, ....""" + args = get_args() + timers = get_timers() + writer = get_tensorboard_writer() + wandb_writer = get_wandb_writer() + one_logger = get_one_logger() + + # Advanced, skipped, and Nan iterations. + advanced_iters_key = 'advanced iterations' + skipped_iters_key = 'skipped iterations' + nan_iters_key = 'nan iterations' + # Advanced iterations. + if not skipped_iter: + total_loss_dict[advanced_iters_key] = total_loss_dict.get( + advanced_iters_key, 0) + 1 + else: + if advanced_iters_key not in total_loss_dict: + total_loss_dict[advanced_iters_key] = 0 + # Skipped iterations. + total_loss_dict[skipped_iters_key] = total_loss_dict.get( + skipped_iters_key, 0) + skipped_iter + # Update losses and set nan iterations + got_nan = False + for key in loss_dict: + if not skipped_iter: + total_loss_dict[key] = total_loss_dict.get( + key, torch.tensor([0.0], dtype=torch.float, device='cuda')) + loss_dict[key] + else: + value = loss_dict[key].float().sum().item() + is_nan = value == float('inf') or \ + value == -float('inf') or \ + value != value + got_nan = got_nan or is_nan + total_loss_dict[nan_iters_key] = total_loss_dict.get( + nan_iters_key, 0) + int(got_nan) + + # Logging. + timers_to_log = [ + 'forward-backward', + 'forward-compute', + 'backward-compute', + 'batch-generator', + 'forward-recv', + 'forward-send', + 'backward-recv', + 'backward-send', + 'forward-send-forward-recv', + 'forward-send-backward-recv', + 'backward-send-forward-recv', + 'backward-send-backward-recv', + 'forward-backward-send-forward-backward-recv', + 'layernorm-grads-all-reduce', + 'embedding-grads-all-reduce', + 'all-grads-sync', + 'params-all-gather', + 'optimizer-copy-to-main-grad', + 'optimizer-unscale-and-check-inf', + 'optimizer-clip-main-grad', + 'optimizer-count-zeros', + 'optimizer-inner-step', + 'optimizer-copy-main-to-model-params', + 'optimizer'] + + # Calculate batch size. + batch_size = args.micro_batch_size * args.data_parallel_size * \ + get_num_microbatches() + + # Track app tag & app tag ID + one_logger_utils.track_app_tag(batch_size, args.world_size, args.seq_length) + + total_iterations = total_loss_dict[advanced_iters_key] + \ + total_loss_dict[skipped_iters_key] + + # learning rate will be None on ranks without trainable params, so we must gather across mp ranks + learning_rate = reduce_max_stat_across_model_parallel_group(learning_rate) + # Tensorboard values. + # Timer requires all the ranks to call. + if args.log_timers_to_tensorboard and \ + (iteration % args.tensorboard_log_interval == 0): + timers.write(timers_to_log, writer, iteration, + normalizer=total_iterations) + if writer and (iteration % args.tensorboard_log_interval == 0): + if wandb_writer: + wandb_writer.log({'samples vs steps': args.consumed_train_samples}, + iteration) + writer.add_scalar('learning-rate', learning_rate, iteration) + writer.add_scalar('learning-rate vs samples', learning_rate, + args.consumed_train_samples) + if wandb_writer: + wandb_writer.log({'learning-rate': learning_rate}, iteration) + if args.decoupled_lr is not None: + writer.add_scalar('decoupled-learning-rate', decoupled_learning_rate, iteration) + if args.skipped_train_samples > 0: + writer.add_scalar('skipped-train-samples', args.skipped_train_samples, iteration) + if wandb_writer: + wandb_writer.log({'skipped-train-samples': args.skipped_train_samples}, iteration) + writer.add_scalar('batch-size', batch_size, iteration) + writer.add_scalar('batch-size vs samples', batch_size, + args.consumed_train_samples) + if wandb_writer: + wandb_writer.log({'batch-size': batch_size}, iteration) + for key in loss_dict: + writer.add_scalar(key , loss_dict[key], iteration) + writer.add_scalar(key + ' vs samples', loss_dict[key], + args.consumed_train_samples) + if wandb_writer: + wandb_writer.log({key: loss_dict[key]}, iteration) + if args.log_loss_scale_to_tensorboard: + writer.add_scalar('loss-scale', loss_scale, iteration) + writer.add_scalar('loss-scale vs samples', loss_scale, + args.consumed_train_samples) + if wandb_writer: + wandb_writer.log({'loss-scale': loss_scale}, iteration) + if args.log_world_size_to_tensorboard: + writer.add_scalar('world-size', args.world_size, iteration) + writer.add_scalar('world-size vs samples', args.world_size, + args.consumed_train_samples) + if wandb_writer: + wandb_writer.log({'world-size': args.world_size}, iteration) + if grad_norm is not None: + writer.add_scalar('grad-norm', grad_norm, iteration) + writer.add_scalar('grad-norm vs samples', grad_norm, + args.consumed_train_samples) + if wandb_writer: + wandb_writer.log({'grad-norm': grad_norm}, iteration) + if num_zeros_in_grad is not None: + writer.add_scalar('num-zeros', num_zeros_in_grad, iteration) + writer.add_scalar('num-zeros vs samples', num_zeros_in_grad, + args.consumed_train_samples) + if wandb_writer: + wandb_writer.log({'num-zeros': num_zeros_in_grad}, iteration) + if params_norm is not None: + writer.add_scalar('params-norm', params_norm, iteration) + writer.add_scalar('params-norm vs samples', params_norm, + args.consumed_train_samples) + if wandb_writer: + wandb_writer.log({'params-norm': params_norm}, iteration) + if args.log_memory_to_tensorboard: + mem_stats = torch.cuda.memory_stats() + writer.add_scalar( + "mem-reserved-bytes", + mem_stats["reserved_bytes.all.current"], + iteration, + ) + writer.add_scalar( + "mem-allocated-bytes", + mem_stats["allocated_bytes.all.current"], + iteration, + ) + writer.add_scalar( + "mem-max-allocated-bytes", + mem_stats["allocated_bytes.all.peak"], + iteration, + ) + writer.add_scalar( + "mem-allocated-count", + mem_stats["allocation.all.current"], + iteration, + ) + if args.num_experts is not None: + moe_loss_scale = 1 / get_num_microbatches() + track_moe_metrics(moe_loss_scale, iteration, writer, wandb_writer, total_loss_dict, args.moe_per_layer_logging) + + if iteration % args.log_interval == 0: + if args.record_memory_history and is_last_rank(): + snapshot = torch.cuda.memory._snapshot() + from pickle import dump + with open(args.memory_snapshot_path, 'wb') as f: + dump(snapshot, f) + + elapsed_time = timers('interval-time').elapsed(barrier=True) + elapsed_time_per_iteration = elapsed_time / total_iterations + + throughput = num_floating_point_operations(args, batch_size) / ( + elapsed_time_per_iteration * 10**12 * args.world_size) + + one_logger_utils.track_e2e_metrics(args.log_throughput, throughput) + + if args.log_timers_to_tensorboard: + if writer: + writer.add_scalar('iteration-time', + elapsed_time_per_iteration, iteration) + if wandb_writer: + wandb_writer.log({'iteration-time': elapsed_time_per_iteration}, + iteration) + log_string = f" [{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}]" + log_string += ' iteration {:8d}/{:8d} |'.format( + iteration, args.train_iters) + log_string += ' consumed samples: {:12d} |'.format( + args.consumed_train_samples) + if args.skipped_train_samples > 0: + log_string += ' skipped samples: {:12d} |'.format( + args.skipped_train_samples) + log_string += ' elapsed time per iteration (ms): {:.1f} |'.format( + elapsed_time_per_iteration * 1000.0) + if args.log_throughput: + log_string += f' throughput per GPU (TFLOP/s/GPU): {throughput:.1f} |' + if args.log_timers_to_tensorboard: + if writer: + writer.add_scalar('throughput', throughput, iteration) + if wandb_writer: + wandb_writer.log({'throughput': throughput}, iteration) + # Decoupled_learning_rate should be not None only on first and last pipeline stage. + log_string += f' learning rate: {learning_rate:.6E} |' + if args.decoupled_lr is not None and (mpu.is_pipeline_first_stage(ignore_virtual=True) or + mpu.is_pipeline_last_stage(ignore_virtual=True)): + assert decoupled_learning_rate is not None + log_string += f' decoupled learning rate: {decoupled_learning_rate:.6E} |' + else: + assert decoupled_learning_rate is None + log_string += f' global batch size: {batch_size:5d} |' + for key in total_loss_dict: + if key not in [advanced_iters_key, skipped_iters_key, + nan_iters_key]: + avg = total_loss_dict[key].item() / \ + float(max(1, total_loss_dict[advanced_iters_key])) + if avg > 0.0: + log_string += ' {}: {:.6E} |'.format(key, avg) + total_loss_dict[key] = torch.tensor([0.0], dtype=torch.float, device='cuda') + log_string += f' loss scale: {loss_scale:.1f} |' + if grad_norm is not None: + log_string += f' grad norm: {grad_norm:.3f} |' + if num_zeros_in_grad is not None: + log_string += f' num zeros: {num_zeros_in_grad} |' + if params_norm is not None: + log_string += f' params norm: {params_norm:.3f} |' + log_string += ' number of skipped iterations: {:3d} |'.format( + total_loss_dict[skipped_iters_key]) + log_string += ' number of nan iterations: {:3d} |'.format( + total_loss_dict[nan_iters_key]) + total_loss_dict[advanced_iters_key] = 0 + total_loss_dict[skipped_iters_key] = 0 + total_loss_dict[nan_iters_key] = 0 + print_rank_last(log_string) + if report_memory_flag: + # Report memory after optimizer state has been initialized. + if torch.distributed.get_rank() == 0: + num_microbatches = get_num_microbatches() + report_theoretical_memory(args, num_microbatches=num_microbatches, verbose=True) + report_memory(f'(after {iteration} iterations)') + report_memory_flag = False + timers.log(timers_to_log, normalizer=args.log_interval) + + return report_memory_flag + + +def compute_throughputs_and_append_to_progress_log(iteration, + num_floating_point_operations_so_far): + args = get_args() + if args.save is None: + return + + # Compute job throughput. + # args.num_floating_point_operations_so_far keeps track of floating-point operations + # completed at the start of job. + global _TRAIN_START_TIME + job_throughput = \ + (num_floating_point_operations_so_far - + args.num_floating_point_operations_so_far) / ( + (time.time() - _TRAIN_START_TIME) * 10**12 * args.world_size) + + # Compute cumulative throughput since jobs of this world size were launched. + # `get_start_time_from_progress_log` returns start time and number of floating-point + # operations of first job of this world size. + start_time, start_num_floating_point_operations = get_start_time_from_progress_log() + elapsed_time = (datetime.now() - start_time).total_seconds() + cumulative_throughput = \ + (num_floating_point_operations_so_far - + start_num_floating_point_operations) / ( + elapsed_time * 10**12 * args.world_size) + + tokens_so_far = args.consumed_train_samples * args.seq_length + saved_ckpt_prefix = 'Saving async checkpoint' if args.async_save else 'Saved checkpoint' + append_to_progress_log(f"{saved_ckpt_prefix}\tIteration: {iteration}\t" + f"Job throughput: {job_throughput:.1f} TFLOP/s/GPU\t" + f"Cumulative throughput: {cumulative_throughput:.1f} TFLOP/s/GPU\t" + f"Floating-point operations: {num_floating_point_operations_so_far:.2e}\t" + f"Tokens (in billions): {tokens_so_far / 10**9:.2f}") + + +def enable_forward_pre_hook(model_chunks): + for model_chunk in model_chunks: + assert isinstance(model_chunk, DDP) + model_chunk.enable_forward_pre_hook() + + +def disable_forward_pre_hook(model_chunks, param_sync=True): + for model_chunk in model_chunks: + assert isinstance(model_chunk, DDP) + model_chunk.disable_forward_pre_hook(param_sync=param_sync) + + +def save_checkpoint_and_time(iteration, model, optimizer, opt_param_scheduler, + num_floating_point_operations_so_far, checkpointing_context, + non_persistent_ckpt=False, train_data_iterator=None): + args = get_args() + timers = get_timers() + + # Stop timer to get accurate train interval time and exclude checkpointing duration + timers('interval-time').stop() + # Extra barrier is added to make sure all ranks report the max time. + timer_key = 'save-checkpoint-non-persistent' if non_persistent_ckpt else 'save-checkpoint' + timers(timer_key, log_level=0).start(barrier=True) + + # Log E2E metrics before save-checkpoint + one_logger_utils.track_e2e_metrics() + if should_disable_forward_pre_hook(args): + disable_forward_pre_hook(model) + save_checkpoint(iteration, model, optimizer, opt_param_scheduler, + num_floating_point_operations_so_far, checkpointing_context, + non_persistent_ckpt=non_persistent_ckpt, train_data_iterator=train_data_iterator, + preprocess_common_state_dict_fn=preprocess_common_state_dict) + if should_disable_forward_pre_hook(args): + enable_forward_pre_hook(model) + timers(timer_key).stop(barrier=True) + timers.log([timer_key]) + + # Log E2E metrics after save-checkpoint + one_logger_utils.track_e2e_metrics() + save_checkpoint_duration = timers(timer_key).elapsed() + one_logger_utils.on_save_checkpoint_end(save_checkpoint_duration, iteration, args.async_save) + + if args.log_progress and not non_persistent_ckpt: + compute_throughputs_and_append_to_progress_log(iteration, + num_floating_point_operations_so_far) + + # Recover timing + timers('interval-time', log_level=0).start(barrier=True) + + +def post_training_step_callbacks(model, optimizer, opt_param_scheduler, iteration, prof, + num_floating_point_operations_since_last_log_event): + """Run all post-training-step functions (e.g., FT heartbeats, GC).""" + args = get_args() + + # Bring CPU and GPU back in sync if on right iteration. + if args.train_sync_interval and iteration % args.train_sync_interval == 0: + torch.cuda.synchronize() + + # Straggler detector. + if iteration % args.log_interval == 0 and args.log_straggler: + stimer.report(num_floating_point_operations_since_last_log_event, args.log_interval) + num_floating_point_operations_since_last_log_event = 0.0 + + # Check weight hash across DP replicas. + if args.check_weight_hash_across_dp_replicas_interval is not None and \ + iteration % args.check_weight_hash_across_dp_replicas_interval == 0: + if should_disable_forward_pre_hook(args): + disable_forward_pre_hook(model) + assert check_param_hashes_across_dp_replicas(model, cross_check=True), \ + "Parameter hashes not matching across DP replicas" + torch.distributed.barrier() + print_rank_0(f">>> Weight hashes match after {iteration} iterations...") + if should_disable_forward_pre_hook(args): + enable_forward_pre_hook(model) + + # Autoresume. + if args.adlr_autoresume and \ + (iteration % args.adlr_autoresume_interval == 0): + check_adlr_autoresume_termination(iteration, model, optimizer, + opt_param_scheduler) + + # Profiling. + if args.profile and \ + iteration == args.profile_step_end and \ + torch.distributed.get_rank() in args.profile_ranks: + if args.use_pytorch_profiler: + assert prof is not None + prof.stop() + else: + torch.cuda.cudart().cudaProfilerStop() + + # Manual garbage collection. + if args.manual_gc: + if args.manual_gc_interval != 0 and iteration % args.manual_gc_interval == 0: + gc.collect() + + +def checkpoint_and_decide_exit(model, optimizer, opt_param_scheduler, iteration, + num_floating_point_operations_so_far, checkpointing_context, + train_data_iterator): + """Save checkpoint and decide whether to exit based on arguments (e.g., if + --exit-duration-in-mins is set). Actual exit happens in main training loop + based on the return value of this function.""" + args = get_args() + timers = get_timers() + + # Exit based on signal handler. + saved_checkpoint = False + if args.exit_signal_handler: + signal_handler = get_signal_handler() + if any(signal_handler.signals_received()): + if args.save: + save_checkpoint_and_time(iteration, model, optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context, train_data_iterator=train_data_iterator) + print_datetime('exiting program after receiving SIGTERM.') + + return True + + # Regular save (persistent and non-persistent). + if args.save and args.save_interval and \ + iteration % args.save_interval == 0: + save_checkpoint_and_time(iteration, model, optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context, train_data_iterator=train_data_iterator) + saved_checkpoint = True + + elif args.save and args.non_persistent_save_interval and \ + iteration % args.non_persistent_save_interval == 0: + save_checkpoint_and_time(iteration, model, optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context, + non_persistent_ckpt=True, train_data_iterator=train_data_iterator) + saved_checkpoint = True + + # Exit based on duration. + if args.exit_duration_in_mins: + train_time = (time.time() - _TRAIN_START_TIME) / 60.0 + done_cuda = torch.tensor( + [train_time > args.exit_duration_in_mins], + dtype=torch.int, device='cuda') + torch.distributed.all_reduce( + done_cuda, op=torch.distributed.ReduceOp.MAX) + done = done_cuda.item() + if done: + if args.save and not saved_checkpoint: + save_checkpoint_and_time(iteration, model, optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context, train_data_iterator=train_data_iterator) + print_datetime(f'exiting program after {train_time} minutes') + + return True + + # Exit based on iterations. + if args.exit_interval and iteration % args.exit_interval == 0: + if args.save and not saved_checkpoint: + save_checkpoint_and_time(iteration, model, optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context, train_data_iterator=train_data_iterator) + torch.distributed.barrier() + print_datetime(f'exiting program at iteration {iteration}') + + return True + + return False + + +def train(forward_step_func, model, optimizer, opt_param_scheduler, + train_data_iterator, valid_data_iterator, + process_non_loss_data_func, config, checkpointing_context, non_loss_data_func): + """Training function: run train_step desired number of times, run validation, checkpoint.""" + args = get_args() + timers = get_timers() + one_logger = get_one_logger() + + # Write args to tensorboard + write_args_to_tensorboard() + + # Turn on training mode which enables dropout. + for model_module in model: + model_module.train() + + # Tracking loss. + total_loss_dict = {} + + # Iterations. + iteration = args.iteration + # Make sure rerun_state_machine has the right iteration loaded from checkpoint. + rerun_state_machine = get_rerun_state_machine() + if rerun_state_machine.current_iteration != iteration: + print_rank_0(f"Setting rerun_state_machine.current_iteration to {iteration}...") + rerun_state_machine.current_iteration = iteration + + # Track E2E metrics at the start of training. + one_logger_utils.on_train_start(iteration=iteration, consumed_train_samples=args.consumed_train_samples, + train_samples=args.train_samples, seq_length=args.seq_length, + train_iters=args.train_iters, save=args.save, async_save=args.async_save, + log_throughput=args.log_throughput, + num_floating_point_operations_so_far=args.num_floating_point_operations_so_far) + + num_floating_point_operations_so_far = args.num_floating_point_operations_so_far + + # Setup some training config params. + config.grad_scale_func = optimizer.scale_loss + config.timers = timers + if isinstance(model[0], (custom_FSDP, DDP)) and args.overlap_grad_reduce: + assert config.no_sync_func is None, \ + ('When overlap_grad_reduce is True, config.no_sync_func must be None; ' + 'a custom no_sync_func is not supported when overlapping grad-reduce') + config.no_sync_func = [model_chunk.no_sync for model_chunk in model] + if len(model) == 1: + config.no_sync_func = config.no_sync_func[0] + if args.align_grad_reduce: + config.grad_sync_func = [model_chunk.start_grad_sync for model_chunk in model] + if len(model) == 1: + config.grad_sync_func = config.grad_sync_func[0] + if args.overlap_param_gather and args.align_param_gather: + config.param_sync_func = [model_chunk.start_param_sync for model_chunk in model] + if len(model) == 1: + config.param_sync_func = config.param_sync_func[0] + config.finalize_model_grads_func = finalize_model_grads + + timers('interval-time', log_level=0).start(barrier=True) + print_datetime('before the start of training step') + report_memory_flag = True + pre_hook_enabled = False + should_exit = False + exit_code = 0 + + if args.manual_gc: + # Disable the default garbage collector and perform the collection manually. + # This is to align the timing of garbage collection across ranks. + assert args.manual_gc_interval >= 0, \ + 'Manual garbage collection interval should be larger than or equal to 0' + gc.disable() + gc.collect() + + # Singleton initialization of straggler detector. + if args.log_straggler: + global stimer + world = torch.distributed.get_world_size() + rank = torch.distributed.get_rank() + mmcnt = args.straggler_minmax_count + stimer.configure(world, rank, + mmcnt = mmcnt, + enabled = not args.disable_straggler_on_startup, + port = args.straggler_ctrlr_port) + num_floating_point_operations_since_last_log_event = 0.0 + + num_microbatches = get_num_microbatches() + eval_duration = 0.0 + eval_iterations = 0 + + def get_e2e_base_metrics(): + """Get base metrics values for one-logger to calculate E2E tracking metrics. + """ + num_floating_point_operations_since_current_train_start = \ + num_floating_point_operations_so_far - args.num_floating_point_operations_so_far + return { + 'iteration': iteration, + 'train_duration': timers('interval-time').active_time(), + 'eval_duration': eval_duration, + 'eval_iterations': eval_iterations, + 'total_flops_since_current_train_start': num_floating_point_operations_since_current_train_start, + 'num_floating_point_operations_so_far': num_floating_point_operations_so_far, + 'consumed_train_samples': args.consumed_train_samples, + 'world_size': args.world_size, + 'seq_length': args.seq_length + } + # Cache into one-logger for callback. + if one_logger: + with one_logger.get_context_manager(): + one_logger.store_set('get_e2e_base_metrics', get_e2e_base_metrics) + + prof = None + if args.profile and torch.distributed.get_rank() in args.profile_ranks and args.use_pytorch_profiler: + def trace_handler(p): + from pathlib import Path + Path(f"{args.profile_dir}").mkdir(parents=True, exist_ok=True) + if args.rank in [0]: + print(p.key_averages(group_by_input_shape=True, + group_by_stack_n=5).table(sort_by="self_cuda_time_total", + row_limit=-1, + max_src_column_width=100, + max_name_column_width=280, + max_shapes_column_width=200)) + + p.export_chrome_trace("{path}/trace_rank{rank}_step{step}.json".format( + path=args.profile_dir, rank=torch.distributed.get_rank(), step=p.step_num)) + + prof = torch.profiler.profile( + activities=[ + torch.profiler.ProfilerActivity.CPU, + torch.profiler.ProfilerActivity.CUDA, + ], + schedule=torch.profiler.schedule( + wait=max(args.profile_step_start-1, 0), + warmup=1 if args.profile_step_start > 0 else 0, + active=args.profile_step_end-args.profile_step_start, + repeat=1), + record_shapes=True, + #on_trace_ready=torch.profiler.tensorboard_trace_handler('./torch_prof_data')) + on_trace_ready=trace_handler) + prof.start() + + start_iteration = iteration + # Disable forward pre-hook to start training to ensure that errors in checkpoint loading + # or random initialization don't propagate to all ranks in first all-gather (which is a + # no-op if things work correctly). + if should_disable_forward_pre_hook(args): + disable_forward_pre_hook(model, param_sync=False) + # Also remove param_sync_func temporarily so that sync calls made in + # `forward_backward_func` are no-ops. + param_sync_func = config.param_sync_func + config.param_sync_func = None + pre_hook_enabled = False + # Also, check weight hash across DP replicas to be very pedantic. + if args.check_weight_hash_across_dp_replicas_interval is not None: + assert check_param_hashes_across_dp_replicas(model, cross_check=True), \ + "Parameter hashes not matching across DP replicas" + torch.distributed.barrier() + print_rank_0(f">>> Weight hashes match after {iteration} iterations...") + + # Run training iterations till done. + while iteration < args.train_iters: + if args.profile and torch.distributed.get_rank() in args.profile_ranks: + if args.use_pytorch_profiler: + prof.step() + elif iteration == args.profile_step_start: + torch.cuda.cudart().cudaProfilerStart() + torch.autograd.profiler.emit_nvtx(record_shapes=True).__enter__() + + ft_integration.on_checkpointing_start() + maybe_finalize_async_save(blocking=False) + ft_integration.on_checkpointing_end(is_async_finalization=True) + + # Update number of microbatches first without consistency check to decide if a + # checkpoint should be saved. If the number of microbatches is different + # from the previous iteration, save a checkpoint. Then run consistency check + # to make sure training configuration is still valid. + update_num_microbatches(args.consumed_train_samples, consistency_check=False, verbose=True) + if get_num_microbatches() != num_microbatches and iteration != 0: + assert get_num_microbatches() > num_microbatches, \ + (f"Number of microbatches should be increasing due to batch size rampup; " + f"instead going from {num_microbatches} to {get_num_microbatches()}") + if args.save is not None: + save_checkpoint_and_time(iteration, model, optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context, train_data_iterator=train_data_iterator) + num_microbatches = get_num_microbatches() + update_num_microbatches(args.consumed_train_samples, consistency_check=True, verbose=True) + + # Completely skip iteration if needed. + if iteration in args.iterations_to_skip: + # Dummy train_step to fast forward train_data_iterator. + dummy_train_step(train_data_iterator) + iteration += 1 + batch_size = mpu.get_data_parallel_world_size() * \ + args.micro_batch_size * \ + get_num_microbatches() + args.consumed_train_samples += batch_size + args.skipped_train_samples += batch_size + continue + + # Run training step. + args.curr_iteration = iteration + ft_integration.on_training_step_start() + loss_dict, skipped_iter, should_checkpoint, should_exit, exit_code, grad_norm, num_zeros_in_grad = \ + train_step(forward_step_func, + train_data_iterator, + model, + optimizer, + opt_param_scheduler, + config) + ft_integration.on_training_step_end() + if should_checkpoint: + save_checkpoint_and_time(iteration, model, optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context, train_data_iterator=train_data_iterator) + if should_exit: + break + + # Enable forward pre-hooks after first set of forward and backward passes. + # When running in fp16, skip all NaN iterations until steady-state loss scaling value + # is reached. + if iteration == start_iteration: + if skipped_iter: + # Only enable forward pre-hook after a training step has successfully run. Relevant + # for fp16 codepath where first XX iterations are skipped until steady-state loss + # scale value is reached. + start_iteration = iteration + 1 + else: + # Enable forward pre-hook after training step has successfully run. All subsequent + # forward passes will use the forward pre-hook / `param_sync_func` in + # `forward_backward_func`. + if should_disable_forward_pre_hook(args): + enable_forward_pre_hook(model) + config.param_sync_func = param_sync_func + pre_hook_enabled = True + + iteration += 1 + batch_size = mpu.get_data_parallel_world_size() * \ + args.micro_batch_size * \ + get_num_microbatches() + args.consumed_train_samples += batch_size + num_skipped_samples_in_batch = (get_current_global_batch_size() - + get_current_running_global_batch_size()) + if args.decrease_batch_size_if_needed: + assert num_skipped_samples_in_batch >= 0 + else: + assert num_skipped_samples_in_batch == 0 + args.skipped_train_samples += num_skipped_samples_in_batch + num_floating_point_operations_in_batch = num_floating_point_operations(args, batch_size) + num_floating_point_operations_so_far += num_floating_point_operations_in_batch + num_floating_point_operations_since_last_log_event += num_floating_point_operations_in_batch + + # Logging. + if not optimizer.is_stub_optimizer: + loss_scale = optimizer.get_loss_scale().item() + else: + loss_scale = 1.0 + params_norm = None + + if args.log_params_norm: + params_norm = calc_params_l2_norm(model) + learning_rate = None + decoupled_learning_rate = None + for param_group in optimizer.param_groups: + if param_group['is_decoupled_lr']: + decoupled_learning_rate = param_group['lr'] + else: + learning_rate = param_group['lr'] + report_memory_flag = training_log(loss_dict, total_loss_dict, + learning_rate, + decoupled_learning_rate, + iteration, loss_scale, + report_memory_flag, skipped_iter, + grad_norm, params_norm, num_zeros_in_grad) + + # Evaluation. + if args.eval_interval and iteration % args.eval_interval == 0 and \ + args.do_valid: + timers('interval-time').stop() + if should_disable_forward_pre_hook(args): + disable_forward_pre_hook(model) + pre_hook_enabled = False + if args.manual_gc and args.manual_gc_eval: + # Collect all objects. + gc.collect() + prefix = f'iteration {iteration}' + timers('eval-time', log_level=0).start(barrier=True) + evaluate_and_print_results(prefix, forward_step_func, + valid_data_iterator, model, + iteration, process_non_loss_data_func, + config, verbose=False, write_to_tensorboard=True, + non_loss_data_func=non_loss_data_func) + eval_duration += timers('eval-time').elapsed() + eval_iterations += args.eval_iters + timers('eval-time').stop() + one_logger_utils.track_e2e_metrics() + + if args.manual_gc and args.manual_gc_eval: + # Collect only the objects created and used in evaluation. + gc.collect(generation=0) + if should_disable_forward_pre_hook(args): + enable_forward_pre_hook(model) + pre_hook_enabled = True + timers('interval-time', log_level=0).start(barrier=True) + + # Miscellaneous post-training-step functions (e.g., FT heartbeats, GC). + # Some of these only happen at specific iterations. + post_training_step_callbacks(model, optimizer, opt_param_scheduler, iteration, prof, + num_floating_point_operations_since_last_log_event) + + # Checkpoint and decide whether to exit. + should_exit = checkpoint_and_decide_exit(model, optimizer, opt_param_scheduler, iteration, + num_floating_point_operations_so_far, + checkpointing_context, train_data_iterator) + if should_exit: + break + + one_logger_utils.track_e2e_metrics() + + # Flush TensorBoard, WandB writers and one-logger. + writer = get_tensorboard_writer() + if writer: + writer.flush() + + # Close out pre-hooks if using distributed optimizer and overlapped param gather. + if pre_hook_enabled: + disable_forward_pre_hook(model) + + ft_integration.on_checkpointing_start() + # This will finalize all unfinalized async request and terminate + # a persistent async worker if persistent ckpt worker is enabled + maybe_finalize_async_save(blocking=True, terminate=True) + ft_integration.on_checkpointing_end(is_async_finalization=True) + if args.enable_ft_package and ft_integration.get_rank_monitor_client() is not None: + ft_integration.get_rank_monitor_client().shutdown_workload_monitoring() + + # If any exit conditions (signal handler, duration, iterations) have been reached, exit. + if should_exit: + wandb_writer = get_wandb_writer() + if wandb_writer: + wandb_writer.finish() + ft_integration.shutdown() + sys.exit(exit_code) + + return iteration, num_floating_point_operations_so_far + + +def evaluate(forward_step_func, + data_iterator, + model, + process_non_loss_data_func, + config, + verbose=False, + non_loss_data_func=None): + """Evaluation.""" + args = get_args() + timers = get_timers() + + timers('evaluate', log_level=0).start(barrier=True) + + if args.vision_pretraining and args.vision_pretraining_type == "dino": + from megatron.legacy.model.vision.knn_monitor import compute_feature_bank + compute_feature_bank(model) + + # Turn on evaluation mode which disables dropout. + for model_module in model: + model_module.eval() + + # Disable result validation during evaluation + rerun_state_machine = get_rerun_state_machine() + rerun_mode = rerun_state_machine.get_mode() + rerun_state_machine.set_mode(RerunMode.DISABLED) + + total_loss_dict = {} + + # make validation batch size independent from training batch size + eval_batch_size = args.global_batch_size + eval_num_microbatches = eval_batch_size // \ + (args.micro_batch_size * args.data_parallel_size) + + with torch.no_grad(): + iteration = 0 + if verbose: + print_rank_0(f'Evaluating on {args.eval_iters * eval_batch_size} samples') + while iteration < args.eval_iters: + iteration += 1 + if verbose: + print_rank_0(f'Evaluating iter {iteration}/{args.eval_iters}') + + forward_backward_func = get_forward_backward_func() + # Don't care about timing during evaluation + config.timers = None + ft_integration.on_eval_step_start() + loss_dicts = forward_backward_func( + forward_step_func=forward_step_func, + data_iterator=data_iterator, + model=model, + num_microbatches=eval_num_microbatches, + seq_length=args.seq_length, + micro_batch_size=args.micro_batch_size, + decoder_seq_length=args.decoder_seq_length, + forward_only=True) + ft_integration.on_eval_step_end() + config.timers = get_timers() + + # Empty unused memory + if args.empty_unused_memory_level >= 1: + torch.cuda.empty_cache() + + if mpu.is_pipeline_last_stage(ignore_virtual=True): + # Reduce across processes. + for loss_dict in loss_dicts: + for key in loss_dict: + if key not in total_loss_dict: + total_loss_dict[key] = torch.tensor([0.0, 0.0], dtype=torch.float).cuda() + val = loss_dict[key] + if isinstance(val, tuple) or isinstance(val, list): + total_loss_dict[key][0] += val[0] + total_loss_dict[key][1] += val[1] + else: + total_loss_dict[key][0] += val + total_loss_dict[key][1] += 1 + + args.consumed_valid_samples += eval_batch_size + + if args.exit_duration_in_mins: + train_time = (time.time() - _TRAIN_START_TIME) / 60.0 + done_cuda = torch.tensor( + [train_time > args.exit_duration_in_mins], + dtype=torch.int, device='cuda') + torch.distributed.all_reduce( + done_cuda, op=torch.distributed.ReduceOp.MAX) + done = done_cuda.item() + if done: + rerun_state_machine.set_mode(rerun_mode) + print_rank_0('Exiting during evaluation, timelimit reached') + return None, None, True + + collected_non_loss_data = None + if non_loss_data_func is not None: + collected_non_loss_data = non_loss_data_func(model) + elif process_non_loss_data_func is not None and is_last_rank(): + collected_non_loss_data = forward_backward_func( + forward_step_func=forward_step_func, + data_iterator=data_iterator, + model=model, + num_microbatches=get_num_microbatches(), + seq_length=args.seq_length, + micro_batch_size=args.micro_batch_size, + decoder_seq_length=args.decoder_seq_length, + forward_only=True, + collect_non_loss_data=True) + + # Move model back to the train mode. + for model_module in model: + model_module.train() + + for key in total_loss_dict: + numerator, denominator = total_loss_dict[key] + total_loss_dict[key] = numerator / denominator + + timers('evaluate').stop() + timers.log(['evaluate']) + + rerun_state_machine.set_mode(rerun_mode) + + rerun_state_machine.set_mode(rerun_mode) + + return total_loss_dict, collected_non_loss_data, False + +def evaluate_and_print_results(prefix, forward_step_func, + data_iterator, model, + iteration, process_non_loss_data_func, config, + verbose=False, write_to_tensorboard=True, non_loss_data_func=None): + """Helper function to evaluate and dump results on screen.""" + args = get_args() + if write_to_tensorboard: + writer = get_tensorboard_writer() + else: + writer = None + + wandb_writer = get_wandb_writer() + + total_loss_dict, collected_non_loss_data, timelimit = evaluate( + forward_step_func, data_iterator, model, + process_non_loss_data_func, config, verbose, non_loss_data_func) + # Timelimit hit during evaluation + if timelimit: + return + string = f' validation loss at {prefix} | ' + for key in total_loss_dict: + string += '{} value: {:.6E} | '.format(key, total_loss_dict[key].item()) + ppl = math.exp(min(20, total_loss_dict[key].item())) + string += '{} PPL: {:.6E} | '.format(key, ppl) + if writer: + writer.add_scalar('{} validation'.format(key), + total_loss_dict[key].item(), + iteration) + writer.add_scalar('{} validation vs samples'.format(key), + total_loss_dict[key].item(), + args.consumed_train_samples) + if args.log_validation_ppl_to_tensorboard: + writer.add_scalar('{} validation ppl'.format(key), ppl, + iteration) + writer.add_scalar('{} validation ppl vs samples'.format(key), + ppl, args.consumed_train_samples) + if wandb_writer and is_last_rank(): + wandb_writer.log({ + '{} validation'.format(key): total_loss_dict[key].item()}, + iteration) + + if process_non_loss_data_func is not None and writer and is_last_rank(): + process_non_loss_data_func(collected_non_loss_data, iteration, writer) + + length = len(string) + 1 + print_rank_last('-' * length) + print_rank_last(string) + print_rank_last('-' * length) + + +def cyclic_iter(iter): + while True: + for x in iter: + yield x + + +def get_train_valid_test_num_samples(): + """Train/valid/test num samples.""" + + args = get_args() + + # Number of train/valid/test samples. + if args.train_samples: + train_samples = args.train_samples + else: + train_samples = args.train_iters * args.global_batch_size + eval_iters = (args.train_iters // args.eval_interval + 1) * \ + args.eval_iters + test_iters = args.eval_iters + + return ( + train_samples, + eval_iters * args.global_batch_size, + test_iters * args.global_batch_size, + ) + + +def build_train_valid_test_datasets(build_train_valid_test_datasets_provider): + """Build pretraining datasets.""" + train_valid_test_num_samples = get_train_valid_test_num_samples() + print_rank_0(' > datasets target sizes (minimum size):') + print_rank_0(' train: {}'.format(train_valid_test_num_samples[0])) + print_rank_0(' validation: {}'.format(train_valid_test_num_samples[1])) + print_rank_0(' test: {}'.format(train_valid_test_num_samples[2])) + return build_train_valid_test_datasets_provider(train_valid_test_num_samples) + + +def build_train_valid_test_data_loaders( + build_train_valid_test_datasets_provider): + """Build pretraining data loaders.""" + + args = get_args() + + (train_dataloader, valid_dataloader, test_dataloader) = (None, None, None) + + print_rank_0('> building train, validation, and test datasets ...') + + # Backward compatibility, assume fixed batch size. + if args.iteration > 0 and args.consumed_train_samples == 0: + assert args.train_samples is None, \ + 'Only backward compatiblity support for iteration-based training' + args.consumed_train_samples = args.iteration * args.global_batch_size + if args.iteration > 0 and args.consumed_valid_samples == 0: + if args.train_samples is None: + args.consumed_valid_samples = (args.iteration // args.eval_interval) * \ + args.eval_iters * args.global_batch_size + + # Rely on distributed-aware core datasets, temporary + is_distributed = getattr(build_train_valid_test_datasets_provider, "is_distributed", False) + + # Construct the data pipeline + if is_distributed or mpu.get_tensor_model_parallel_rank() == 0: + + # Build datasets. + train_ds, valid_ds, test_ds = build_train_valid_test_datasets( + build_train_valid_test_datasets_provider) + # Build dataloders. + train_dataloader = build_pretraining_data_loader( + train_ds, args.consumed_train_samples) + if args.skip_train: + valid_dataloader = build_pretraining_data_loader(valid_ds, 0) + else: + valid_dataloader = build_pretraining_data_loader( + valid_ds, args.consumed_valid_samples) + test_dataloader = build_pretraining_data_loader(test_ds, 0) + + # Flags to know if we need to do training/validation/testing. + do_train = train_dataloader is not None and args.train_iters > 0 + do_valid = valid_dataloader is not None and args.eval_iters > 0 + do_test = test_dataloader is not None and args.eval_iters > 0 + flags = torch.tensor( + [int(do_train), int(do_valid), int(do_test)], + dtype=torch.long, device='cuda') + else: + flags = torch.tensor([0, 0, 0], dtype=torch.long, device='cuda') + + torch.distributed.broadcast(flags, 0) + + args.do_train = getattr(args, "do_train", False) or flags[0].item() + args.do_valid = getattr(args, "do_valid", False) or flags[1].item() + args.do_test = getattr(args, "do_test", False) or flags[2].item() + + return train_dataloader, valid_dataloader, test_dataloader + + +def build_train_valid_test_data_iterators( + build_train_valid_test_datasets_provider): + """Build pretraining data iterators.""" + + args = get_args() + + # Build loaders. + train_dataloader, valid_dataloader, test_dataloader = \ + build_train_valid_test_data_loaders( + build_train_valid_test_datasets_provider) + + # Build iterators. + dl_type = args.dataloader_type + assert dl_type in ['single', 'cyclic', 'external'] + + def _get_iterator(dataloader_type, dataloader): + """Return dataset iterator.""" + if dataloader_type == "single": + return RerunDataIterator(iter(dataloader)) + elif dataloader_type == "cyclic": + return RerunDataIterator(iter(cyclic_iter(dataloader))) + elif dataloader_type == "external": + # External dataloader is passed through. User is expected to define how to iterate. + if isinstance(dataloader, list): + return [RerunDataIterator(d) for d in dataloader] + else: + return RerunDataIterator(dataloader) + else: + raise RuntimeError("unexpected dataloader type") + + if train_dataloader is not None: + train_data_iterator = _get_iterator(dl_type, train_dataloader) + else: + train_data_iterator = None + + if valid_dataloader is not None: + valid_data_iterator = _get_iterator(dl_type, valid_dataloader) + else: + valid_data_iterator = None + + if test_dataloader is not None: + test_data_iterator = _get_iterator(dl_type, test_dataloader) + else: + test_data_iterator = None + + return train_data_iterator, valid_data_iterator, test_data_iterator + + +def should_disable_forward_pre_hook(args): + """Block forward pre-hook for certain configurations.""" + return not args.use_custom_fsdp and args.use_distributed_optimizer and args.overlap_param_gather diff --git a/megatron/training/utils.py b/megatron/training/utils.py index 2f517d2..e392880 100644 --- a/megatron/training/utils.py +++ b/megatron/training/utils.py @@ -1,472 +1,534 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. - -"""General utilities.""" -import json -import os -import sys -from datetime import datetime - -import torch - -try: - from transformer_engine.pytorch.optimizers import multi_tensor_applier, multi_tensor_l2norm -except ImportError: - try: - from amp_C import multi_tensor_l2norm - from apex.multi_tensor_apply import multi_tensor_applier - except ImportError: - - import warnings - warnings.warn( - f'Transformer Engine and Apex are not installed. ' - 'Falling back to local implementations of ' - 'multi_tensor_applier and multi_tensor_l2norm' - ) - - from megatron.core.utils import ( - local_multi_tensor_l2_norm as multi_tensor_l2norm, - local_multi_tensor_applier as multi_tensor_applier, - ) - -from megatron.training import ( - get_args, - get_adlr_autoresume, -) -from megatron.core import DistributedDataParallel as DDP -from megatron.core import mpu -from megatron.core.datasets.utils import get_blend_from_list -from megatron.core.tensor_parallel import param_is_not_tensor_parallel_duplicate -from megatron.core.utils import ( - get_batch_on_this_cp_rank, - get_data_parallel_group_if_dtensor, - to_local_if_dtensor, -) -from megatron.legacy.model import Float16Module -from megatron.legacy.model.module import param_is_not_shared - -try: - from megatron.core.distributed import TorchFullyShardedDataParallel as torch_FSDP - ALL_MODULE_WRAPPER_CLASSNAMES = (DDP, torch_FSDP, Float16Module) -except ImportError: - ALL_MODULE_WRAPPER_CLASSNAMES = (DDP, Float16Module) - - -def unwrap_model(model, module_instances=ALL_MODULE_WRAPPER_CLASSNAMES): - return_list = True - if not isinstance(model, list): - model = [model] - return_list = False - unwrapped_model = [] - for model_module in model: - while isinstance(model_module, module_instances): - model_module = model_module.module - unwrapped_model.append(model_module) - if not return_list: - return unwrapped_model[0] - return unwrapped_model - - -def calc_params_l2_norm(model): - """Calculate l2 norm of parameters """ - args = get_args() - if not isinstance(model, list): - model = [model] - # Seperate moe and dense params - params_data = [] - moe_params_data = [] - data_parallel_group = None - - for model_chunk in model: - for i, param in enumerate(model_chunk.parameters()): - data_parallel_group = get_data_parallel_group_if_dtensor(param, data_parallel_group) - is_not_tp_duplicate = param_is_not_tensor_parallel_duplicate(param) - if not (param.requires_grad and is_not_tp_duplicate): - continue - assert is_not_tp_duplicate - if not getattr(param, 'allreduce', True): - assert param_is_not_shared(param) - param = to_local_if_dtensor(param) - moe_params_data.append(param.data.float() if args.bf16 else param.data) - else: - if param_is_not_shared(param): - param = to_local_if_dtensor(param) - params_data.append(param.data.float() if args.bf16 else param.data) - - # Calculate dense param norm - dummy_overflow_buf = torch.tensor([0], dtype=torch.int, device='cuda') - if len(params_data) > 0: - norm, _ = multi_tensor_applier( - multi_tensor_l2norm, - dummy_overflow_buf, - [params_data], - False # no per-parameter norm - ) - norm_2 = norm * norm - else: - norm_2 = torch.tensor([0.0], dtype=torch.float32, device='cuda') - - if data_parallel_group is not None: - torch.distributed.all_reduce(norm_2, - op=torch.distributed.ReduceOp.SUM, - group=data_parallel_group) - - # Sum across all model-parallel GPUs(tensor + pipeline). - torch.distributed.all_reduce( - norm_2, - op=torch.distributed.ReduceOp.SUM, - group=mpu.get_model_parallel_group() - ) - # Calculate moe norm - if len(moe_params_data) > 0: - moe_norm, _ = multi_tensor_applier( - multi_tensor_l2norm, - dummy_overflow_buf, - [moe_params_data], - False # no per-parameter norm - ) - moe_norm_2 = moe_norm * moe_norm - # Sum across expert tensor, model and pipeline parallel GPUs. - torch.distributed.all_reduce( - moe_norm_2, - op=torch.distributed.ReduceOp.SUM, - group=mpu.get_expert_tensor_model_pipeline_parallel_group() - ) - norm_2 += moe_norm_2 - return norm_2.item() ** 0.5 - - -def average_losses_across_data_parallel_group(losses): - """Reduce a tensor of losses across all GPUs.""" - averaged_losses = torch.cat( - [loss.clone().detach().view(1) for loss in losses]) - torch.distributed.all_reduce(averaged_losses, - group=mpu.get_data_parallel_group()) - averaged_losses = averaged_losses / \ - torch.distributed.get_world_size(group=mpu.get_data_parallel_group()) - - return averaged_losses - - -def reduce_max_stat_across_model_parallel_group(stat: float) -> float: - """ - Ranks without an optimizer will have no grad_norm or num_zeros_in_grad stats. - We need to ensure the logging and writer rank has those values. - This function reduces a stat tensor across the model parallel group. - - We use an all_reduce max since the values have already been summed across optimizer ranks where possible - """ - if stat is None: - stat = -1.0 - stat = torch.tensor([stat], dtype=torch.float32, device=torch.cuda.current_device()) - torch.distributed.all_reduce( - stat, op=torch.distributed.ReduceOp.MAX, group=mpu.get_model_parallel_group() - ) - if stat.item() == -1.0: - return None - else: - return stat.item() - - -def logical_and_across_model_parallel_group(input: bool) -> bool: - """ - This function gathers a bool value across the model parallel group - """ - if input is True: - input = 1 - else: - input = 0 - input = torch.tensor([input], dtype=torch.int, device=torch.cuda.current_device()) - torch.distributed.all_reduce( - input, op=torch.distributed.ReduceOp.MIN, group=mpu.get_model_parallel_group() - ) - return bool(input.item()) - - -def report_memory(name): - """Simple GPU memory report.""" - mega_bytes = 1024.0 * 1024.0 - string = name + ' memory (MB)' - string += ' | allocated: {}'.format( - torch.cuda.memory_allocated() / mega_bytes) - string += ' | max allocated: {}'.format( - torch.cuda.max_memory_allocated() / mega_bytes) - string += ' | reserved: {}'.format( - torch.cuda.memory_reserved() / mega_bytes) - string += ' | max reserved: {}'.format( - torch.cuda.max_memory_reserved() / mega_bytes) - if mpu.get_data_parallel_rank() == 0: - print("[Rank {}] {}".format(torch.distributed.get_rank(), string), - flush=True) - - -def print_params_min_max_norm(optimizer, iteration): - """Print min, max, and norm of all parameters.""" - index = 0 - rank = torch.distributed.get_rank() - string = 'iteration, rank, index, tensor-model-parallel, min, max, norm\n' - optimizer_ = optimizer.optimizer - for param_group in optimizer_.param_groups: - for param in param_group['params']: - index += 1 - min_ = param.data.min() - max_ = param.data.max() - norm = torch.linalg.norm(param.data) - string += '{:7d}, {:4d}, {:4d}, {:2d}, '.format( - iteration, rank, index, int(param.tensor_model_parallel)) - string += '{:.6E}, {:.6E}, {:.6E}\n'.format(min_, max_, norm) - print(string, flush=True) - - -def check_adlr_autoresume_termination(iteration, model, - optimizer, opt_param_scheduler): - """Check for autoresume signal and exit if it is received.""" - from megatron.training.checkpointing import save_checkpoint - - args = get_args() - autoresume = get_adlr_autoresume() - # Add barrier to ensure consistnecy. - torch.distributed.barrier() - if autoresume.termination_requested(): - if args.save: - save_checkpoint(iteration, model, optimizer, opt_param_scheduler) - print_rank_0(">>> autoresume termination request found!") - if torch.distributed.get_rank() == 0: - autoresume.request_resume() - print_rank_0(">>> training terminated. Returning") - sys.exit(0) - - -def get_ltor_masks_and_position_ids(data, - eod_token, - reset_position_ids, - reset_attention_mask, - eod_mask_loss): - """Build masks and position id for left to right model.""" - - # Extract batch size and sequence length. - micro_batch_size, seq_length = data.size() - - # Attention mask (lower triangular). - if reset_attention_mask: - att_mask_batch = micro_batch_size - else: - att_mask_batch = 1 - attention_mask = torch.tril(torch.ones( - (att_mask_batch, seq_length, seq_length), device=data.device)).view( - att_mask_batch, 1, seq_length, seq_length) - - # Loss mask. - loss_mask = torch.ones(data.size(), dtype=torch.float, device=data.device) - if eod_mask_loss: - loss_mask[data == eod_token] = 0.0 - - # Position ids. - position_ids = torch.arange(seq_length, dtype=torch.long, - device=data.device) - position_ids = position_ids.unsqueeze(0).expand_as(data) - # We need to clone as the ids will be modifed based on batch index. - if reset_position_ids: - position_ids = position_ids.clone() - - if reset_position_ids or reset_attention_mask: - # Loop through the batches: - for b in range(micro_batch_size): - - # Find indecies where EOD token is. - eod_index = position_ids[b, data[b] == eod_token] - # Detach indecies from positions if going to modify positions. - if reset_position_ids: - eod_index = eod_index.clone() - - # Loop through EOD indecies: - prev_index = 0 - for j in range(eod_index.size()[0]): - i = eod_index[j] - # Mask attention loss. - if reset_attention_mask: - attention_mask[b, 0, (i + 1):, :(i + 1)] = 0 - # Reset positions. - if reset_position_ids: - position_ids[b, (i + 1):] -= (i + 1 - prev_index) - prev_index = i + 1 - - # Convert attention mask to binary: - attention_mask = (attention_mask < 0.5) - - return attention_mask, loss_mask, position_ids - - -def print_rank_0(message): - """If distributed is initialized, print only on rank 0.""" - if torch.distributed.is_initialized(): - if torch.distributed.get_rank() == 0: - print(message, flush=True) - else: - print(message, flush=True) - -def is_last_rank(): - return torch.distributed.get_rank() == ( - torch.distributed.get_world_size() - 1) - -def print_rank_last(message): - """If distributed is initialized, print only on last rank.""" - if torch.distributed.is_initialized(): - if is_last_rank(): - print(message, flush=True) - else: - print(message, flush=True) - - -def append_to_progress_log(string, barrier=True): - """Append given string to progress log.""" - args = get_args() - if args.save is None: - return - progress_log_filename = os.path.join(args.save, "progress.txt") - if barrier: - torch.distributed.barrier() - if torch.distributed.get_rank() == 0: - with open(progress_log_filename, 'a') as f: - job_id = os.getenv('SLURM_JOB_ID', '') - num_gpus = args.world_size - f.write(f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\tJob ID: {job_id}\t" - f"# GPUs: {num_gpus}\t{string}\n") - - -def get_blend_and_blend_per_split(args): - """Get blend and blend_per_split from passed-in arguments.""" - use_data_path = args.data_path is not None or \ - args.data_args_path is not None - use_per_split_data_path = any( - elt is not None - for elt in [args.train_data_path, - args.valid_data_path, - args.test_data_path]) or \ - args.per_split_data_args_path is not None - - blend = None - blend_per_split = None - if use_data_path: - if args.data_args_path is not None: - assert args.data_path is None - with open(args.data_args_path, 'r') as f: - blend = get_blend_from_list(f.read().split()) - else: - assert args.data_path is not None - blend = get_blend_from_list(args.data_path) - elif use_per_split_data_path: - if args.per_split_data_args_path is not None: - with open(args.per_split_data_args_path, 'r') as f: - per_split_data_args = json.load(f) - # Each element in blend_per_split should be a list of files (and optional - # weights), so split string if needed. - for split in ["train", "valid", "test"]: - if isinstance(per_split_data_args[split], str): - per_split_data_args[split] = per_split_data_args[split].split() - - blend_per_split = [ - get_blend_from_list(per_split_data_args["train"]), - get_blend_from_list(per_split_data_args["valid"]), - get_blend_from_list(per_split_data_args["test"]) - ] - else: - blend_per_split = [ - get_blend_from_list(args.train_data_path), - get_blend_from_list(args.valid_data_path), - get_blend_from_list(args.test_data_path) - ] - else: - blend, blend_per_split = None, None - - return blend, blend_per_split - - -def get_batch_on_this_tp_rank(data_iterator): - - args = get_args() - - def _broadcast(item): - if item is not None: - torch.distributed.broadcast(item, mpu.get_tensor_model_parallel_src_rank(), group=mpu.get_tensor_model_parallel_group()) - - if mpu.get_tensor_model_parallel_rank() == 0: - - if data_iterator is not None: - data = next(data_iterator) - else: - data = None - - batch = { - 'tokens': data["tokens"].cuda(non_blocking = True), - 'labels': data["labels"].cuda(non_blocking = True), - 'loss_mask': data["loss_mask"].cuda(non_blocking = True), - 'attention_mask': None if "attention_mask" not in data else data["attention_mask"].cuda(non_blocking = True), - 'position_ids': data["position_ids"].cuda(non_blocking = True) - } - - if args.pipeline_model_parallel_size == 1: - _broadcast(batch['tokens']) - _broadcast(batch['labels']) - _broadcast(batch['loss_mask']) - _broadcast(batch['attention_mask']) - _broadcast(batch['position_ids']) - - elif mpu.is_pipeline_first_stage(): - _broadcast(batch['tokens']) - _broadcast(batch['attention_mask']) - _broadcast(batch['position_ids']) - - elif mpu.is_pipeline_last_stage(): - _broadcast(batch['labels']) - _broadcast(batch['loss_mask']) - _broadcast(batch['attention_mask']) - - else: - - tokens=torch.empty((args.micro_batch_size,args.seq_length), dtype = torch.int64 , device = torch.cuda.current_device()) - labels=torch.empty((args.micro_batch_size,args.seq_length), dtype = torch.int64 , device = torch.cuda.current_device()) - loss_mask=torch.empty((args.micro_batch_size,args.seq_length), dtype = torch.float32 , device = torch.cuda.current_device()) - if args.create_attention_mask_in_dataloader: - attention_mask=torch.empty( - (args.micro_batch_size,1,args.seq_length,args.seq_length), dtype = torch.bool , device = torch.cuda.current_device() - ) - else: - attention_mask=None - position_ids=torch.empty((args.micro_batch_size,args.seq_length), dtype = torch.int64 , device = torch.cuda.current_device()) - - if args.pipeline_model_parallel_size == 1: - _broadcast(tokens) - _broadcast(labels) - _broadcast(loss_mask) - _broadcast(attention_mask) - _broadcast(position_ids) - - elif mpu.is_pipeline_first_stage(): - labels=None - loss_mask=None - - _broadcast(tokens) - _broadcast(attention_mask) - _broadcast(position_ids) - - elif mpu.is_pipeline_last_stage(): - tokens=None - position_ids=None - - _broadcast(labels) - _broadcast(loss_mask) - _broadcast(attention_mask) - - batch = { - 'tokens': tokens, - 'labels': labels, - 'loss_mask': loss_mask, - 'attention_mask': attention_mask, - 'position_ids': position_ids - } - - return batch - - -def update_use_dist_ckpt(args): - args.use_dist_ckpt = args.ckpt_format != "torch" +# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. + +"""General utilities.""" +import json +import os +import sys +from datetime import datetime + +import torch + +try: + from transformer_engine.pytorch.optimizers import multi_tensor_applier, multi_tensor_l2norm +except ImportError: + try: + from amp_C import multi_tensor_l2norm + from apex.multi_tensor_apply import multi_tensor_applier + except ImportError: + + import warnings + warnings.warn( + f'Transformer Engine and Apex are not installed. ' + 'Falling back to local implementations of ' + 'multi_tensor_applier and multi_tensor_l2norm' + ) + + from megatron.core.utils import ( + local_multi_tensor_l2_norm as multi_tensor_l2norm, + local_multi_tensor_applier as multi_tensor_applier, + ) + +from megatron.training import ( + get_args, + get_adlr_autoresume, +) +from megatron.core import DistributedDataParallel as DDP +from megatron.core.distributed.custom_fsdp import FullyShardedDataParallel as custom_FSDP +from megatron.core import mpu +from megatron.core.datasets.utils import get_blend_from_list +from megatron.core.tensor_parallel import param_is_not_tensor_parallel_duplicate +from megatron.core.utils import ( + get_batch_on_this_cp_rank, + get_data_parallel_group_if_dtensor, + to_local_if_dtensor, +) +from megatron.legacy.model import Float16Module +from megatron.legacy.model.module import param_is_not_shared + +try: + from megatron.core.distributed import TorchFullyShardedDataParallel as torch_FSDP + ALL_MODULE_WRAPPER_CLASSNAMES = (DDP, torch_FSDP, custom_FSDP, Float16Module) +except ImportError: + ALL_MODULE_WRAPPER_CLASSNAMES = (DDP, custom_FSDP, Float16Module) + + +def unwrap_model(model, module_instances=ALL_MODULE_WRAPPER_CLASSNAMES): + return_list = True + if not isinstance(model, list): + model = [model] + return_list = False + unwrapped_model = [] + for model_module in model: + while isinstance(model_module, module_instances): + model_module = model_module.module + unwrapped_model.append(model_module) + if not return_list: + return unwrapped_model[0] + return unwrapped_model + + +def calc_params_l2_norm(model, force_create_fp32_copy=False): + """Calculate l2 norm of parameters """ + args = get_args() + if not isinstance(model, list): + model = [model] + # Seperate moe and dense params + params_data = [] + moe_params_data = [] + sharded_params_data = [] + data_parallel_group = None + + custom_fsdp_all_param_is_shared = False + for model_chunk in model: + for param in model_chunk.parameters(): + data_parallel_group = get_data_parallel_group_if_dtensor(param, data_parallel_group) + is_not_tp_duplicate = param_is_not_tensor_parallel_duplicate(param) + if not is_not_tp_duplicate: + continue + assert is_not_tp_duplicate + if hasattr(param, "fully_shard_param_local_shard"): + param = param.fully_shard_param_local_shard + assert [getattr(p, "fully_shard_param_local_shard", None) is not None for p in model_chunk.parameters()] + custom_fsdp_all_param_is_shared = True + if param.numel() == 0: + continue + if not getattr(param, 'allreduce', True): + # TODO: Implement memory optimization for MoE parameters. + assert param_is_not_shared(param) + param = to_local_if_dtensor(param) + moe_params_data.append(param.data.float() if args.bf16 else param.data) + else: + if param_is_not_shared(param): + param = to_local_if_dtensor(param) + if args.bf16: + if not force_create_fp32_copy and hasattr(param, 'main_param'): + if getattr(param, 'main_param_sharded', False): + if param.main_param is not None: + sharded_params_data.append(param.main_param) + else: + params_data.append(param.main_param) + else: + # Fallback to original logic of making a fp32 copy of the + # parameter if `.main_param` attribute is not available. + params_data.append(param.data.float()) + else: + params_data.append(param.data) + + # Calculate norm. + dummy_overflow_buf = torch.tensor([0], dtype=torch.int, device='cuda') + if len(params_data) > 0: + norm, _ = multi_tensor_applier( + multi_tensor_l2norm, + dummy_overflow_buf, + [params_data], + False # no per-parameter norm. + ) + norm_2 = norm * norm + else: + norm_2 = torch.zeros((1,), dtype=torch.float32, device='cuda') + + if data_parallel_group is not None: + torch.distributed.all_reduce(norm_2, + op=torch.distributed.ReduceOp.SUM, + group=data_parallel_group) + + # Add norm contribution from params with sharded main_params. These norms need to be + # accumulated across the DP group since the main parameters are sharded because + # of distributed optimizer. + if len(sharded_params_data) > 0: + dummy_overflow_buf = torch.tensor([0], dtype=torch.int, device='cuda') + sharded_norm, _ = multi_tensor_applier( + multi_tensor_l2norm, + dummy_overflow_buf, + [sharded_params_data], + False # no per-parameter norm. + ) + sharded_norm_2 = sharded_norm * sharded_norm + # Sum over all DP groups. + torch.distributed.all_reduce( + sharded_norm_2, + op=torch.distributed.ReduceOp.SUM, + group=mpu.get_data_parallel_group() + ) + norm_2 += sharded_norm_2 + + if custom_fsdp_all_param_is_shared: + torch.distributed.all_reduce(norm_2, + op=torch.distributed.ReduceOp.SUM, + group=mpu.get_data_parallel_group()) + + # Sum across all model-parallel GPUs (tensor + pipeline). + torch.distributed.all_reduce( + norm_2, + op=torch.distributed.ReduceOp.SUM, + group=mpu.get_model_parallel_group() + ) + + # Add norm contribution from expert layers in MoEs. + if len(moe_params_data) > 0: + moe_norm, _ = multi_tensor_applier( + multi_tensor_l2norm, + dummy_overflow_buf, + [moe_params_data], + False # no per-parameter norm. + ) + moe_norm_2 = moe_norm * moe_norm + + if custom_fsdp_all_param_is_shared: + torch.distributed.all_reduce(moe_norm_2, + op=torch.distributed.ReduceOp.SUM, + group=mpu.get_expert_data_parallel_group()) + + # Sum across expert tensor, model and pipeline parallel GPUs. + torch.distributed.all_reduce( + moe_norm_2, + op=torch.distributed.ReduceOp.SUM, + group=mpu.get_expert_tensor_model_pipeline_parallel_group() + ) + norm_2 += moe_norm_2 + + return norm_2.item() ** 0.5 + + +def average_losses_across_data_parallel_group(losses): + """Reduce a tensor of losses across all GPUs.""" + averaged_losses = torch.cat( + [loss.clone().detach().view(1) for loss in losses]) + torch.distributed.all_reduce(averaged_losses, + group=mpu.get_data_parallel_group()) + averaged_losses = averaged_losses / \ + torch.distributed.get_world_size(group=mpu.get_data_parallel_group()) + + return averaged_losses + + +def reduce_max_stat_across_model_parallel_group(stat: float) -> float: + """ + Ranks without an optimizer will have no grad_norm or num_zeros_in_grad stats. + We need to ensure the logging and writer rank has those values. + This function reduces a stat tensor across the model parallel group. + + We use an all_reduce max since the values have already been summed across optimizer ranks where possible + """ + if stat is None: + stat = -1.0 + stat = torch.tensor([stat], dtype=torch.float32, device=torch.cuda.current_device()) + torch.distributed.all_reduce( + stat, op=torch.distributed.ReduceOp.MAX, group=mpu.get_model_parallel_group() + ) + if stat.item() == -1.0: + return None + else: + return stat.item() + + +def logical_and_across_model_parallel_group(input: bool) -> bool: + """ + This function gathers a bool value across the model parallel group + """ + if input is True: + input = 1 + else: + input = 0 + input = torch.tensor([input], dtype=torch.int, device=torch.cuda.current_device()) + torch.distributed.all_reduce( + input, op=torch.distributed.ReduceOp.MIN, group=mpu.get_model_parallel_group() + ) + return bool(input.item()) + + +def report_memory(name): + """Simple GPU memory report.""" + mega_bytes = 1024.0 * 1024.0 + string = name + ' memory (MB)' + string += ' | allocated: {}'.format( + torch.cuda.memory_allocated() / mega_bytes) + string += ' | max allocated: {}'.format( + torch.cuda.max_memory_allocated() / mega_bytes) + string += ' | reserved: {}'.format( + torch.cuda.memory_reserved() / mega_bytes) + string += ' | max reserved: {}'.format( + torch.cuda.max_memory_reserved() / mega_bytes) + if mpu.get_data_parallel_rank() == 0: + print("[Rank {}] {}".format(torch.distributed.get_rank(), string), + flush=True) + + +def print_params_min_max_norm(optimizer, iteration): + """Print min, max, and norm of all parameters.""" + index = 0 + rank = torch.distributed.get_rank() + string = 'iteration, rank, index, tensor-model-parallel, min, max, norm\n' + optimizer_ = optimizer.optimizer + for param_group in optimizer_.param_groups: + for param in param_group['params']: + index += 1 + min_ = param.data.min() + max_ = param.data.max() + norm = torch.linalg.norm(param.data) + string += '{:7d}, {:4d}, {:4d}, {:2d}, '.format( + iteration, rank, index, int(param.tensor_model_parallel)) + string += '{:.6E}, {:.6E}, {:.6E}\n'.format(min_, max_, norm) + print(string, flush=True) + + +def check_adlr_autoresume_termination(iteration, model, + optimizer, opt_param_scheduler): + """Check for autoresume signal and exit if it is received.""" + from megatron.training.checkpointing import save_checkpoint + + args = get_args() + autoresume = get_adlr_autoresume() + # Add barrier to ensure consistnecy. + torch.distributed.barrier() + if autoresume.termination_requested(): + if args.save: + save_checkpoint(iteration, model, optimizer, opt_param_scheduler) + print_rank_0(">>> autoresume termination request found!") + if torch.distributed.get_rank() == 0: + autoresume.request_resume() + print_rank_0(">>> training terminated. Returning") + sys.exit(0) + + +def get_ltor_masks_and_position_ids(data, + eod_token, + reset_position_ids, + reset_attention_mask, + eod_mask_loss): + """Build masks and position id for left to right model.""" + + # Extract batch size and sequence length. + micro_batch_size, seq_length = data.size() + + # Attention mask (lower triangular). + if reset_attention_mask: + att_mask_batch = micro_batch_size + else: + att_mask_batch = 1 + attention_mask = torch.tril(torch.ones( + (att_mask_batch, seq_length, seq_length), device=data.device)).view( + att_mask_batch, 1, seq_length, seq_length) + + # Loss mask. + loss_mask = torch.ones(data.size(), dtype=torch.float, device=data.device) + if eod_mask_loss: + loss_mask[data == eod_token] = 0.0 + + # Position ids. + position_ids = torch.arange(seq_length, dtype=torch.long, + device=data.device) + position_ids = position_ids.unsqueeze(0).expand_as(data) + # We need to clone as the ids will be modifed based on batch index. + if reset_position_ids: + position_ids = position_ids.clone() + + if reset_position_ids or reset_attention_mask: + # Loop through the batches: + for b in range(micro_batch_size): + + # Find indecies where EOD token is. + eod_index = position_ids[b, data[b] == eod_token] + # Detach indecies from positions if going to modify positions. + if reset_position_ids: + eod_index = eod_index.clone() + + # Loop through EOD indecies: + prev_index = 0 + for j in range(eod_index.size()[0]): + i = eod_index[j] + # Mask attention loss. + if reset_attention_mask: + attention_mask[b, 0, (i + 1):, :(i + 1)] = 0 + # Reset positions. + if reset_position_ids: + position_ids[b, (i + 1):] -= (i + 1 - prev_index) + prev_index = i + 1 + + # Convert attention mask to binary: + attention_mask = (attention_mask < 0.5) + + return attention_mask, loss_mask, position_ids + + +def print_rank_0(message): + """If distributed is initialized, print only on rank 0.""" + if torch.distributed.is_initialized(): + if torch.distributed.get_rank() == 0: + print(message, flush=True) + else: + print(message, flush=True) + +def is_rank0(): + """Returns true if called in the rank0, false otherwise""" + return torch.distributed.is_initialized() and torch.distributed.get_rank() == 0 + +def is_last_rank(): + return torch.distributed.get_rank() == ( + torch.distributed.get_world_size() - 1) + +def print_rank_last(message): + """If distributed is initialized, print only on last rank.""" + if torch.distributed.is_initialized(): + if is_last_rank(): + print(message, flush=True) + else: + print(message, flush=True) + +def get_device_arch_version(): + """Returns GPU arch version (8: Ampere, 9: Hopper, 10: Blackwell, ...)""" + return torch.cuda.get_device_properties(torch.device("cuda:0")).major + +def append_to_progress_log(string, barrier=True): + """Append given string to progress log.""" + args = get_args() + if args.save is None: + return + progress_log_filename = os.path.join(args.save, "progress.txt") + if barrier: + torch.distributed.barrier() + if torch.distributed.get_rank() == 0: + with open(progress_log_filename, 'a') as f: + job_id = os.getenv('SLURM_JOB_ID', '') + num_gpus = args.world_size + f.write(f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\tJob ID: {job_id}\t" + f"# GPUs: {num_gpus}\t{string}\n") + + +def get_blend_and_blend_per_split(args): + """Get blend and blend_per_split from passed-in arguments.""" + use_data_path = args.data_path is not None or \ + args.data_args_path is not None + use_per_split_data_path = any( + elt is not None + for elt in [args.train_data_path, + args.valid_data_path, + args.test_data_path]) or \ + args.per_split_data_args_path is not None + + blend = None + blend_per_split = None + if use_data_path: + if args.data_args_path is not None: + assert args.data_path is None + with open(args.data_args_path, 'r') as f: + blend = get_blend_from_list(f.read().split()) + else: + assert args.data_path is not None + blend = get_blend_from_list(args.data_path) + elif use_per_split_data_path: + if args.per_split_data_args_path is not None: + with open(args.per_split_data_args_path, 'r') as f: + per_split_data_args = json.load(f) + # Each element in blend_per_split should be a list of files (and optional + # weights), so split string if needed. + for split in ["train", "valid", "test"]: + if isinstance(per_split_data_args[split], str): + per_split_data_args[split] = per_split_data_args[split].split() + + blend_per_split = [ + get_blend_from_list(per_split_data_args["train"]), + get_blend_from_list(per_split_data_args["valid"]), + get_blend_from_list(per_split_data_args["test"]) + ] + else: + blend_per_split = [ + get_blend_from_list(args.train_data_path), + get_blend_from_list(args.valid_data_path), + get_blend_from_list(args.test_data_path) + ] + else: + blend, blend_per_split = None, None + + return blend, blend_per_split + + +def get_batch_on_this_tp_rank(data_iterator): + + args = get_args() + + def _broadcast(item): + if item is not None: + torch.distributed.broadcast(item, mpu.get_tensor_model_parallel_src_rank(), group=mpu.get_tensor_model_parallel_group()) + + if mpu.get_tensor_model_parallel_rank() == 0: + + if data_iterator is not None: + data = next(data_iterator) + else: + data = None + + batch = { + 'tokens': data["tokens"].cuda(non_blocking = True), + 'labels': data["labels"].cuda(non_blocking = True), + 'loss_mask': data["loss_mask"].cuda(non_blocking = True), + 'attention_mask': None if "attention_mask" not in data else data["attention_mask"].cuda(non_blocking = True), + 'position_ids': data["position_ids"].cuda(non_blocking = True) + } + + if args.pipeline_model_parallel_size == 1: + _broadcast(batch['tokens']) + _broadcast(batch['labels']) + _broadcast(batch['loss_mask']) + _broadcast(batch['attention_mask']) + _broadcast(batch['position_ids']) + + elif mpu.is_pipeline_first_stage(): + _broadcast(batch['tokens']) + _broadcast(batch['attention_mask']) + _broadcast(batch['position_ids']) + + elif mpu.is_pipeline_last_stage(): + _broadcast(batch['labels']) + _broadcast(batch['loss_mask']) + _broadcast(batch['attention_mask']) + + else: + + tokens=torch.empty((args.micro_batch_size,args.seq_length), dtype = torch.int64 , device = torch.cuda.current_device()) + labels=torch.empty((args.micro_batch_size,args.seq_length), dtype = torch.int64 , device = torch.cuda.current_device()) + loss_mask=torch.empty((args.micro_batch_size,args.seq_length), dtype = torch.float32 , device = torch.cuda.current_device()) + if args.create_attention_mask_in_dataloader: + attention_mask=torch.empty( + (args.micro_batch_size,1,args.seq_length,args.seq_length), dtype = torch.bool , device = torch.cuda.current_device() + ) + else: + attention_mask=None + position_ids=torch.empty((args.micro_batch_size,args.seq_length), dtype = torch.int64 , device = torch.cuda.current_device()) + + if args.pipeline_model_parallel_size == 1: + _broadcast(tokens) + _broadcast(labels) + _broadcast(loss_mask) + _broadcast(attention_mask) + _broadcast(position_ids) + + elif mpu.is_pipeline_first_stage(): + labels=None + loss_mask=None + + _broadcast(tokens) + _broadcast(attention_mask) + _broadcast(position_ids) + + elif mpu.is_pipeline_last_stage(): + tokens=None + position_ids=None + + _broadcast(labels) + _broadcast(loss_mask) + _broadcast(attention_mask) + + batch = { + 'tokens': tokens, + 'labels': labels, + 'loss_mask': loss_mask, + 'attention_mask': attention_mask, + 'position_ids': position_ids + } + + return batch + + +def update_use_dist_ckpt(args): + args.use_dist_ckpt = args.ckpt_format != "torch" diff --git a/megatron/training/wandb_utils.py b/megatron/training/wandb_utils.py new file mode 100644 index 0000000..fc88dda --- /dev/null +++ b/megatron/training/wandb_utils.py @@ -0,0 +1,63 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +from pathlib import Path +from typing import Tuple + +from megatron.training.global_vars import get_wandb_writer +from megatron.training.utils import print_rank_last + + +def _get_wandb_artifact_tracker_filename(save_dir: str) -> Path: + """Wandb artifact tracker file rescords the latest artifact wandb entity and project""" + return Path(save_dir) / "latest_wandb_artifact_path.txt" + + +def _get_artifact_name_and_version(save_dir: Path, checkpoint_path: Path) -> Tuple[str, str]: + return save_dir.stem, checkpoint_path.stem + + +def on_save_checkpoint_success(checkpoint_path: str, tracker_filename: str, save_dir: str, iteration: int) -> None: + """Function to be called after checkpointing succeeds and checkpoint is persisted for logging it as an artifact in W&B + + Args: + checkpoint_path (str): path of the saved checkpoint + tracker_filename (str): path of the tracker filename for the checkpoint iteration + save_dir (str): path of the root save folder for all checkpoints + iteration (int): iteration of the checkpoint + """ + + wandb_writer = get_wandb_writer() + + if wandb_writer: + metadata = {"iteration": iteration} + artifact_name, artifact_version = _get_artifact_name_and_version(Path(save_dir), Path(checkpoint_path)) + artifact = wandb_writer.Artifact(artifact_name, type="model", metadata=metadata) + artifact.add_reference(f"file://{checkpoint_path}", checksum=False) + artifact.add_file(tracker_filename) + wandb_writer.run.log_artifact(artifact, aliases=[artifact_version]) + wandb_tracker_filename = _get_wandb_artifact_tracker_filename(save_dir) + wandb_tracker_filename.write_text(f"{wandb_writer.run.entity}/{wandb_writer.run.project}") + + +def on_load_checkpoint_success(checkpoint_path: str, load_dir: str) -> None: + """Function to be called after succesful loading of a checkpoint, for aggregation and logging it to W&B + + Args: + checkpoint_path (str): path of the loaded checkpoint + load_dir (str): path of the root save folder for all checkpoints + iteration (int): iteration of the checkpoint + """ + + wandb_writer = get_wandb_writer() + + if wandb_writer: + try: + artifact_name, artifact_version = _get_artifact_name_and_version(Path(load_dir), Path(checkpoint_path)) + wandb_tracker_filename = _get_wandb_artifact_tracker_filename(load_dir) + artifact_path = "" + if wandb_tracker_filename.is_file(): + artifact_path = wandb_tracker_filename.read_text().strip() + artifact_path = f"{artifact_path}/" + wandb_writer.run.use_artifact(f"{artifact_path}{artifact_name}:{artifact_version}") + except Exception: + print_rank_last(f" failed to find checkpoint {checkpoint_path} in wandb") \ No newline at end of file diff --git a/megatron/training/yaml_arguments.py b/megatron/training/yaml_arguments.py index 622c917..2e6b2b1 100644 --- a/megatron/training/yaml_arguments.py +++ b/megatron/training/yaml_arguments.py @@ -1,458 +1,458 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -"""Megatron arguments.""" - -import argparse -import dataclasses -import json -import os -import torch -import types - -from itertools import chain, starmap -from types import SimpleNamespace -import yaml, re, os -from types import SimpleNamespace - -import torch.nn.functional as F - -from megatron.core.transformer import TransformerConfig, MLATransformerConfig -from megatron.core.utils import get_torch_version, is_torch_min_version - -# Taken from https://stackoverflow.com/questions/65414773/parse-environment-variable-from-yaml-with-pyyaml -# Allows for yaml to use environment variables -env_pattern = re.compile(r".*?\${(.*?)}.*?") -def env_constructor(loader, node): - value = loader.construct_scalar(node) - for group in env_pattern.findall(value): - assert os.environ.get(group) is not None, f"environment variable {group} in yaml not found" - value = value.replace(f"${{{group}}}", os.environ.get(group)) - return value -yaml.add_implicit_resolver("!pathex", env_pattern) -yaml.add_constructor("!pathex", env_constructor) - - -str_dtype_to_torch = { - "float32" : torch.float32, - "float16" : torch.float16, - "bfloat16" : torch.bfloat16 -} - -def validate_yaml(args, defaults={}): - - # This is for legacy script env var setting - if type(args.data_path) is str: - # If no white space its a single path - split_data_path = args.data_path.split() - if len(split_data_path) != 1: - args.data_path = split_data_path - - # Tensor model parallel size. - args.model_parallel.tensor_model_parallel_size = min( - args.model_parallel.tensor_model_parallel_size, args.world_size) - assert args.world_size % args.model_parallel.tensor_model_parallel_size == 0, 'world size'\ - ' ({}) is not divisible by tensor model parallel size ({})'.format( - args.world_size, args.model_parallel.tensor_model_parallel_size) - # Pipeline model parallel size. - args.model_parallel.pipeline_model_parallel_size = min( - args.model_parallel.pipeline_model_parallel_size, - (args.world_size // args.model_parallel.tensor_model_parallel_size)) - args.model_parallel.transformer_pipeline_model_parallel_size = ( - args.model_parallel.pipeline_model_parallel_size - 1 - if args.standalone_embedding_stage else - args.model_parallel.pipeline_model_parallel_size - ) - # Checks. - model_parallel_size = args.model_parallel.pipeline_model_parallel_size * \ - args.model_parallel.tensor_model_parallel_size - assert args.world_size % (model_parallel_size * args.model_parallel.context_parallel_size) == 0, \ - 'world size ({}) is not divisible by tensor parallel size ({}) times ' \ - 'pipeline parallel size ({}) times context parallel size ({})'.format( - args.world_size, args.model_parallel.tensor_model_parallel_size, - args.model_parallel.pipeline_model_parallel_size, args.model_parallel.context_parallel_size) - - # data_parallel_size is not in model parallel config - args.data_parallel_size = args.world_size // (model_parallel_size * args.model_parallel.context_parallel_size) - if args.rank == 0: - print('using world size: {}, data-parallel size: {}, ' - 'context-parallel size: {} ' - 'tensor-model-parallel size: {}, ' - 'pipeline-model-parallel size: {} '.format( - args.world_size, args.data_parallel_size, - args.model_parallel.context_parallel_size, - args.model_parallel.tensor_model_parallel_size, - args.model_parallel.pipeline_model_parallel_size), flush=True) - if args.model_parallel.pipeline_model_parallel_size > 1: - if args.model_parallel.pipeline_model_parallel_split_rank is not None: - assert args.model_parallel.pipeline_model_parallel_split_rank < \ - args.model_parallel.pipeline_model_parallel_size, 'split rank needs'\ - ' to be less than pipeline model parallel size ({})'.format( - args.model_parallel.pipeline_model_parallel_size) - - if args.model_parallel.tp_comm_overlap: - assert args.model_parallel.sequence_parallel == True, 'Tensor parallel communication/GEMM overlap can happen only when sequence parallelism is enabled' - - # Set input defaults. - for key in defaults: - # For default to be valid, it should not be provided in the - # arguments that are passed to the program. We check this by - # ensuring the arg is set to None. - if getattr(args, key, None) is not None: - if args.rank == 0: - print('WARNING: overriding default arguments for {key}:{v} \ - with {key}:{v2}'.format(key=key, v=defaults[key], - v2=getattr(args, key)), - flush=True) - else: - setattr(args, key, defaults[key]) - - # Batch size. - assert args.micro_batch_size is not None - assert args.micro_batch_size > 0 - if args.global_batch_size is None: - args.global_batch_size = args.micro_batch_size * args.data_parallel_size - if args.rank == 0: - print('setting global batch size to {}'.format( - args.global_batch_size), flush=True) - assert args.global_batch_size > 0 - - # num_layers_per_virtual_pipeline_stage is not insde model parallel for checkpointing - if args.num_layers_per_virtual_pipeline_stage is not None: - assert args.model_parallel.pipeline_model_parallel_size > 2, \ - 'pipeline-model-parallel size should be greater than 2 with ' \ - 'interleaved schedule' - assert args.language_model.num_layers % args.model_parallel.transformer_pipeline_model_parallel_size == 0, \ - 'number of layers should be divisible by the pipeline parallel size' - num_layers_per_pipeline_stage = args.language_model.num_layers // args.model_parallel.transformer_pipeline_model_parallel_size - assert num_layers_per_pipeline_stage % args.num_layers_per_virtual_pipeline_stage == 0, \ - 'number of layers per pipeline stage must be divisible number of layers per virtual pipeline stage' - args.model_parallel.virtual_pipeline_model_parallel_size = num_layers_per_pipeline_stage // \ - args.num_layers_per_virtual_pipeline_stage - else: - args.model_parallel.virtual_pipeline_model_parallel_size = None - # Overlap P2P communication is disabled if not using the interleaved schedule. - args.model_parallel.overlap_p2p_comm = False - if args.rank == 0: - print('WARNING: Setting args.overlap_p2p_comm to False since non-interleaved ' - 'schedule does not support overlapping p2p communication') - - if args.overlap_param_gather: - assert args.use_distributed_optimizer, \ - '--overlap-param-gather only supported with distributed optimizer' - assert args.overlap_grad_reduce, \ - '--overlap-grad-reduce should be turned on when using --overlap-param-gather' - - # Parameters dtype. - if args.model_parallel.fp16: - assert not args.model_parallel.bf16 - args.model_parallel.params_dtype = torch.half - if args.model_parallel.bf16: - assert not args.model_parallel.fp16 - args.model_parallel.params_dtype = torch.bfloat16 - # bfloat16 requires gradient accumulation and all-reduce to - # be done in fp32. - if not args.accumulate_allreduce_grads_in_fp32: - args.accumulate_allreduce_grads_in_fp32 = True - if args.rank == 0: - print('accumulate and all-reduce gradients in fp32 for ' - 'bfloat16 data type.', flush=True) - - if args.rank == 0: - print('using {} for parameters ...'.format(args.model_parallel.params_dtype), - flush=True) - - if args.dataloader_type is None: - args.dataloader_type = 'single' - - # Consumed tokens. - args.consumed_train_samples = 0 - args.consumed_valid_samples = 0 - - # Support for variable sequence lengths across batches/microbatches. - # set it if the dataloader supports generation of variable sequence lengths - # across batches/microbatches. Due to additional communication overhead - # during pipeline parallelism, it should not be set if sequence length - # is constant during training. - args.model_parallel.variable_seq_lengths = False - - # Iteration-based training. - if args.train_iters: - # If we use iteration-based training, make sure the - # sample-based options are off. - assert args.train_samples is None, \ - 'expected iteration-based training' - assert args.lr_decay_samples is None, \ - 'expected iteration-based learning rate decay' - assert args.lr_warmup_samples == 0, \ - 'expected iteration-based learning rate warmup' - assert args.rampup_batch_size is None, \ - 'expected no batch-size rampup for iteration-based training' - if args.lr_warmup_fraction is not None: - assert args.lr_warmup_iters == 0, \ - 'can only specify one of lr-warmup-fraction and lr-warmup-iters' - - # Sample-based training. - if args.train_samples: - # If we use sample-based training, make sure the - # iteration-based options are off. - assert args.train_iters is None, \ - 'expected sample-based training' - assert args.lr_decay_iters is None, \ - 'expected sample-based learning rate decay' - assert args.lr_warmup_iters == 0, \ - 'expected sample-based learnig rate warmup' - if args.lr_warmup_fraction is not None: - assert args.lr_warmup_samples == 0, \ - 'can only specify one of lr-warmup-fraction ' \ - 'and lr-warmup-samples' - - # How to handle this better - if args.language_model.num_layers is not None: - assert args.encoder_num_layers is None, \ - 'cannot have both num-layers and encoder-num-layers specified' - args.encoder_num_layers = args.language_model.num_layers - else: - assert args.encoder_num_layers is not None, \ - 'either num-layers or encoder-num-layers should be specified' - args.language_model.num_layers = args.encoder_num_layers - - # Check required arguments. - # removed max_position_embeddings from reqs - required_args = ['num_layers', 'hidden_size', 'num_attention_heads'] - for req_arg in required_args: - _check_arg_is_not_none(args.language_model, req_arg) - - # Checks. - if args.language_model.ffn_hidden_size is None: - if args.language_model.activation_func == "swiglu": - # reduce the dimnesion for MLP since projections happens on - # two linear layers. this keeps the number of paramters in - # the same ballpark as the counterpart with 4*h size - # we keep it a multiple of 64, which means the actual tensor size - # will be a multiple of 64 / tp_size - args.language_model.ffn_hidden_size = int((4 * args.language_model.hidden_size * 2 / 3) / 64) * 64 - else: - args.language_model.ffn_hidden_size = 4 * args.language_model.hidden_size - - if args.language_model.kv_channels is None: - assert args.language_model.hidden_size % args.language_model.num_attention_heads == 0 - args.language_model.kv_channels = args.language_model.hidden_size // args.language_model.num_attention_heads - - #TODO: Implement arguments for encoder-decoder - if args.seq_length is not None: - assert args.encoder_seq_length is None - args.encoder_seq_length = args.seq_length - else: - assert args.encoder_seq_length is not None - args.seq_length = args.encoder_seq_length - - if args.seq_length is not None: - assert args.max_position_embeddings >= args.seq_length - if args.decoder_seq_length is not None: - assert args.max_position_embeddings >= args.decoder_seq_length - if args.lr is not None: - assert args.min_lr <= args.lr - if args.save is not None: - assert args.save_interval is not None - # Mixed precision checks. - if args.fp16_lm_cross_entropy: - assert args.fp16, 'lm cross entropy in fp16 only support in fp16 mode.' - if args.language_model.fp32_residual_connection: - assert args.model_parallel.fp16 or args.model_parallel.bf16, \ - 'residual connection in fp32 only supported when using fp16 or bf16.' - - if args.language_model.moe_grouped_gemm: - assert args.model_parallel.bf16, 'Currently GroupedGEMM for MoE only supports bf16 dtype.' - dc = torch.cuda.get_device_capability() - assert dc[0] >= 8, "Unsupported compute capability for GroupedGEMM kernels." - - if args.weight_decay_incr_style == 'constant': - assert args.start_weight_decay is None - assert args.end_weight_decay is None - args.start_weight_decay = args.weight_decay - args.end_weight_decay = args.weight_decay - else: - assert args.start_weight_decay is not None - assert args.end_weight_decay is not None - - # Persistent fused layer norm. - if not is_torch_min_version("1.11.0a0"): - args.language_model.persist_layer_norm = False - if args.rank == 0: - print('Persistent fused layer norm kernel is supported from ' - 'pytorch v1.11 (nvidia pytorch container paired with v1.11). ' - 'Defaulting to no_persist_layer_norm=True') - - # Activation recomputing. - if args.language_model.distribute_saved_activations: - assert args.model_parallel.tensor_model_parallel_size > 1, 'can distribute ' \ - 'recomputed activations only across tensor model ' \ - 'parallel groups' - assert args.language_model.recompute_granularity == 'full', \ - 'distributed recompute activations is only '\ - 'application to full recompute granularity' - assert args.language_model.recompute_method is not None, \ - 'for distributed recompute activations to work you '\ - 'need to use a recompute method ' - assert is_torch_min_version("1.10.0a0"), \ - 'distributed recompute activations are supported for pytorch ' \ - 'v1.10 and above (Nvidia Pytorch container >= 21.07). Current ' \ - f'pytorch version is v{get_torch_version()}.' - - if args.language_model.recompute_granularity == 'selective': - assert args.language_model.recompute_method is None, \ - 'recompute method is not yet supported for ' \ - 'selective recomputing granularity' - - # disable sequence parallelism when tp=1 - # to avoid change in numerics when - # sequence_parallelism is enabled. - if args.model_parallel.tensor_model_parallel_size == 1: - args.model_parallel.sequence_parallel = False - - # disable async_tensor_model_parallel_allreduce when - # model parallel memory optimization is enabled - if args.model_parallel.sequence_parallel: - args.model_parallel.async_tensor_model_parallel_allreduce = False - - if os.environ.get('CUDA_DEVICE_MAX_CONNECTIONS') != "1": - if args.model_parallel.sequence_parallel: - raise RuntimeError( - "Using sequence parallelism requires setting the environment variable " - "CUDA_DEVICE_MAX_CONNECTIONS to 1") - if args.model_parallel.async_tensor_model_parallel_allreduce: - raise RuntimeError( - "Using async gradient all reduce requires setting the environment " - "variable CUDA_DEVICE_MAX_CONNECTIONS to 1") - - # Retro checks. - if getattr(args, 'retro_add_retriever', False): - raise Exception("Retro untested for yaml args. See arguments.py.") - - # Sequence parallelism unsupported. - assert not args.sequence_parallel, \ - "retro currently does not support sequence parallelism." - - # Pipeline parallelism unsupported. - assert args.pipeline_model_parallel_size == 1, \ - "retro currently does not support pipeline parallelism." - - #TODO: Retro args loading not tested - # Load retro args (used by both Retro & GPT). - if getattr(args, 'retro_project_dir', None) is not None: - raise Exception("Retro untested for yaml args. See arguments.py.") - - if args.language_model.rotary_interleaved and args.language_model.apply_rope_fusion: - raise RuntimeError('--rotary-interleaved does not work with rope_fusion.') - - # MoE Spec check - if args.language_model.num_moe_experts is not None: - assert args.spec is None, "Model Spec must be None when using MoEs" - if args.model_parallel.tensor_model_parallel_size > 1: - assert args.model_parallel.sequence_parallel, \ - "When using MoE and tensor parallelism, sequence parallelism must be used." - - # Expert parallelism check - if args.model_parallel.expert_model_parallel_size > 1: - assert args.language_model.num_moe_experts is not None, "num_experts must be non None to use expert model parallelism" - assert args.language_model.num_moe_experts % args.model_parallel.expert_model_parallel_size == 0, \ - "Number of experts should be a multiple of expert model parallel_size." - assert not args.model_parallel.fp16, \ - "Expert parallelism is not supported with fp16 training." - - # Print arguments. - _print_args("arguments", args) - - #TODO: Added as much of the global initialization requires the model parallel arguments - args = SimpleNamespace(**args.__dict__, **args.model_parallel.__dict__) - args = SimpleNamespace(**args.__dict__, **args.language_model.__dict__) - # For GPT Layer spec in pretrain_gpt - args.num_experts = args.language_model.num_moe_experts - - return args - -def _print_args(title, args): - """Print arguments.""" - if args.rank == 0: - print(f'------------------------ {title} ------------------------', - flush=True) - str_list = [] - for arg in vars(args): - dots = '.' * (48 - len(arg)) - str_list.append(' {} {} {}'.format(arg, dots, getattr(args, arg))) - for arg in sorted(str_list, key=lambda x: x.lower()): - print(arg, flush=True) - print(f'-------------------- end of {title} ---------------------', - flush=True) - -def core_config_from_args(args, dataclass=TransformerConfig): - """Builds core config object from namespace args from given dataclass - - Raises exception if argument missing in args - - Args: - args(SimpleNamespace, optional): Namespace to pull argument values from - dataclass (dataclass, optional): Core dataclass config to pull argument names from - - - Returns: - SimpleNamespace: The returned namespace to build core config from - """ - kw_args = {} - for f in dataclasses.fields(dataclass): - if hasattr(args, f.name): - kw_args[f.name] = getattr(args, f.name) - else: - raise Exception(f"Missing argument {f.name} for {str(dataclass)} config") - return kw_args - -def _check_arg_is_not_none(args, arg): - assert getattr(args, arg) is not None, '{} argument is None'.format(arg) - -def core_transformer_config_from_yaml(args, transfomer_key = "language_model"): - # Combine transfomer config with model parallel args - args = SimpleNamespace(**vars(getattr(args, transfomer_key)), **vars(args.model_parallel)) - # Translate args to core transformer configuration - kw_args = core_config_from_args(args, TransformerConfig) - - # Hardcoded - kw_args['deallocate_pipeline_outputs'] = True - kw_args['pipeline_dtype'] = kw_args['params_dtype'] - kw_args['batch_p2p_comm'] = not args.overlap_p2p_comm - - assert args.activation_func in ["swiglu","squaredrelu","gelu"], f"{args.activation_func} is not a supported activation function" - if args.activation_func == "swiglu": - kw_args['activation_func'] = F.silu - kw_args['gated_linear_unit'] = True - kw_args['bias_activation_fusion'] = args.bias_swiglu_fusion - elif args.activation_func == "squaredrelu": - def squared_relu(x): - return torch.pow(F.relu(x), 2) - kw_args['activation_func'] = squared_relu - elif args.activation_func == "gelu": - kw_args['activation_func'] = F.gelu - if args.add_bias_linear: - kw_args['bias_activation_fusion'] = False - else: - kw_args['bias_activation_fusion'] = args.bias_activation_fusion - - if args.init_method == "xavier_uniform": - kw_args['init_method'] = torch.nn.init.xavier_uniform_ - kw_args['scaled_init_method'] = torch.nn.init.xavier_uniform_ - - # Return Transformer config. - if getattr(args, "multi_latent_attention", False): - return MLATransformerConfig(**kw_args) - else: - return TransformerConfig(**kw_args) - -def load_yaml(yaml_path): - print(f"warning using experimental yaml arguments feature, argparse arguments will be ignored") - with open(yaml_path, "r") as f: - config = yaml.load(f,Loader=yaml.FullLoader) - # Convert to nested namespace - config_namespace = json.loads(json.dumps(config), object_hook=lambda item: SimpleNamespace(**item)) - # Add config location to namespace - config_namespace.yaml_cfg = yaml_path - return config_namespace - +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Megatron arguments.""" + +import argparse +import dataclasses +import json +import os +import torch +import types + +from itertools import chain, starmap +from types import SimpleNamespace +import yaml, re, os +from types import SimpleNamespace + +import torch.nn.functional as F + +from megatron.core.transformer import TransformerConfig, MLATransformerConfig +from megatron.core.utils import get_torch_version, is_torch_min_version + +# Taken from https://stackoverflow.com/questions/65414773/parse-environment-variable-from-yaml-with-pyyaml +# Allows for yaml to use environment variables +env_pattern = re.compile(r".*?\${(.*?)}.*?") +def env_constructor(loader, node): + value = loader.construct_scalar(node) + for group in env_pattern.findall(value): + assert os.environ.get(group) is not None, f"environment variable {group} in yaml not found" + value = value.replace(f"${{{group}}}", os.environ.get(group)) + return value +yaml.add_implicit_resolver("!pathex", env_pattern) +yaml.add_constructor("!pathex", env_constructor) + + +str_dtype_to_torch = { + "float32" : torch.float32, + "float16" : torch.float16, + "bfloat16" : torch.bfloat16 +} + +def validate_yaml(args, defaults={}): + + # This is for legacy script env var setting + if type(args.data_path) is str: + # If no white space its a single path + split_data_path = args.data_path.split() + if len(split_data_path) != 1: + args.data_path = split_data_path + + # Tensor model parallel size. + args.model_parallel.tensor_model_parallel_size = min( + args.model_parallel.tensor_model_parallel_size, args.world_size) + assert args.world_size % args.model_parallel.tensor_model_parallel_size == 0, 'world size'\ + ' ({}) is not divisible by tensor model parallel size ({})'.format( + args.world_size, args.model_parallel.tensor_model_parallel_size) + # Pipeline model parallel size. + args.model_parallel.pipeline_model_parallel_size = min( + args.model_parallel.pipeline_model_parallel_size, + (args.world_size // args.model_parallel.tensor_model_parallel_size)) + args.model_parallel.transformer_pipeline_model_parallel_size = ( + args.model_parallel.pipeline_model_parallel_size - 1 + if args.account_for_embedding_in_pipeline_split else + args.model_parallel.pipeline_model_parallel_size + ) + # Checks. + model_parallel_size = args.model_parallel.pipeline_model_parallel_size * \ + args.model_parallel.tensor_model_parallel_size + assert args.world_size % (model_parallel_size * args.model_parallel.context_parallel_size) == 0, \ + 'world size ({}) is not divisible by tensor parallel size ({}) times ' \ + 'pipeline parallel size ({}) times context parallel size ({})'.format( + args.world_size, args.model_parallel.tensor_model_parallel_size, + args.model_parallel.pipeline_model_parallel_size, args.model_parallel.context_parallel_size) + + # data_parallel_size is not in model parallel config + args.data_parallel_size = args.world_size // (model_parallel_size * args.model_parallel.context_parallel_size) + if args.rank == 0: + print('using world size: {}, data-parallel size: {}, ' + 'context-parallel size: {} ' + 'tensor-model-parallel size: {}, ' + 'pipeline-model-parallel size: {} '.format( + args.world_size, args.data_parallel_size, + args.model_parallel.context_parallel_size, + args.model_parallel.tensor_model_parallel_size, + args.model_parallel.pipeline_model_parallel_size), flush=True) + if args.model_parallel.pipeline_model_parallel_size > 1: + if args.model_parallel.pipeline_model_parallel_split_rank is not None: + assert args.model_parallel.pipeline_model_parallel_split_rank < \ + args.model_parallel.pipeline_model_parallel_size, 'split rank needs'\ + ' to be less than pipeline model parallel size ({})'.format( + args.model_parallel.pipeline_model_parallel_size) + + if args.model_parallel.tp_comm_overlap: + assert args.model_parallel.sequence_parallel == True, 'Tensor parallel communication/GEMM overlap can happen only when sequence parallelism is enabled' + + # Set input defaults. + for key in defaults: + # For default to be valid, it should not be provided in the + # arguments that are passed to the program. We check this by + # ensuring the arg is set to None. + if getattr(args, key, None) is not None: + if args.rank == 0: + print('WARNING: overriding default arguments for {key}:{v} \ + with {key}:{v2}'.format(key=key, v=defaults[key], + v2=getattr(args, key)), + flush=True) + else: + setattr(args, key, defaults[key]) + + # Batch size. + assert args.micro_batch_size is not None + assert args.micro_batch_size > 0 + if args.global_batch_size is None: + args.global_batch_size = args.micro_batch_size * args.data_parallel_size + if args.rank == 0: + print('setting global batch size to {}'.format( + args.global_batch_size), flush=True) + assert args.global_batch_size > 0 + + # num_layers_per_virtual_pipeline_stage is not insde model parallel for checkpointing + if args.num_layers_per_virtual_pipeline_stage is not None: + assert args.model_parallel.pipeline_model_parallel_size > 2, \ + 'pipeline-model-parallel size should be greater than 2 with ' \ + 'interleaved schedule' + assert args.language_model.num_layers % args.model_parallel.transformer_pipeline_model_parallel_size == 0, \ + 'number of layers should be divisible by the pipeline parallel size' + num_layers_per_pipeline_stage = args.language_model.num_layers // args.model_parallel.transformer_pipeline_model_parallel_size + assert num_layers_per_pipeline_stage % args.num_layers_per_virtual_pipeline_stage == 0, \ + 'number of layers per pipeline stage must be divisible number of layers per virtual pipeline stage' + args.model_parallel.virtual_pipeline_model_parallel_size = num_layers_per_pipeline_stage // \ + args.num_layers_per_virtual_pipeline_stage + else: + args.model_parallel.virtual_pipeline_model_parallel_size = None + # Overlap P2P communication is disabled if not using the interleaved schedule. + args.model_parallel.overlap_p2p_comm = False + if args.rank == 0: + print('WARNING: Setting args.overlap_p2p_comm to False since non-interleaved ' + 'schedule does not support overlapping p2p communication') + + if args.overlap_param_gather: + assert args.use_distributed_optimizer, \ + '--overlap-param-gather only supported with distributed optimizer' + assert args.overlap_grad_reduce, \ + '--overlap-grad-reduce should be turned on when using --overlap-param-gather' + + # Parameters dtype. + if args.model_parallel.fp16: + assert not args.model_parallel.bf16 + args.model_parallel.params_dtype = torch.half + if args.model_parallel.bf16: + assert not args.model_parallel.fp16 + args.model_parallel.params_dtype = torch.bfloat16 + # bfloat16 requires gradient accumulation and all-reduce to + # be done in fp32. + if not args.accumulate_allreduce_grads_in_fp32: + args.accumulate_allreduce_grads_in_fp32 = True + if args.rank == 0: + print('accumulate and all-reduce gradients in fp32 for ' + 'bfloat16 data type.', flush=True) + + if args.rank == 0: + print('using {} for parameters ...'.format(args.model_parallel.params_dtype), + flush=True) + + if args.dataloader_type is None: + args.dataloader_type = 'single' + + # Consumed tokens. + args.consumed_train_samples = 0 + args.consumed_valid_samples = 0 + + # Support for variable sequence lengths across batches/microbatches. + # set it if the dataloader supports generation of variable sequence lengths + # across batches/microbatches. Due to additional communication overhead + # during pipeline parallelism, it should not be set if sequence length + # is constant during training. + args.model_parallel.variable_seq_lengths = False + + # Iteration-based training. + if args.train_iters: + # If we use iteration-based training, make sure the + # sample-based options are off. + assert args.train_samples is None, \ + 'expected iteration-based training' + assert args.lr_decay_samples is None, \ + 'expected iteration-based learning rate decay' + assert args.lr_warmup_samples == 0, \ + 'expected iteration-based learning rate warmup' + assert args.rampup_batch_size is None, \ + 'expected no batch-size rampup for iteration-based training' + if args.lr_warmup_fraction is not None: + assert args.lr_warmup_iters == 0, \ + 'can only specify one of lr-warmup-fraction and lr-warmup-iters' + + # Sample-based training. + if args.train_samples: + # If we use sample-based training, make sure the + # iteration-based options are off. + assert args.train_iters is None, \ + 'expected sample-based training' + assert args.lr_decay_iters is None, \ + 'expected sample-based learning rate decay' + assert args.lr_warmup_iters == 0, \ + 'expected sample-based learnig rate warmup' + if args.lr_warmup_fraction is not None: + assert args.lr_warmup_samples == 0, \ + 'can only specify one of lr-warmup-fraction ' \ + 'and lr-warmup-samples' + + # How to handle this better + if args.language_model.num_layers is not None: + assert args.encoder_num_layers is None, \ + 'cannot have both num-layers and encoder-num-layers specified' + args.encoder_num_layers = args.language_model.num_layers + else: + assert args.encoder_num_layers is not None, \ + 'either num-layers or encoder-num-layers should be specified' + args.language_model.num_layers = args.encoder_num_layers + + # Check required arguments. + # removed max_position_embeddings from reqs + required_args = ['num_layers', 'hidden_size', 'num_attention_heads'] + for req_arg in required_args: + _check_arg_is_not_none(args.language_model, req_arg) + + # Checks. + if args.language_model.ffn_hidden_size is None: + if args.language_model.activation_func == "swiglu": + # reduce the dimnesion for MLP since projections happens on + # two linear layers. this keeps the number of paramters in + # the same ballpark as the counterpart with 4*h size + # we keep it a multiple of 64, which means the actual tensor size + # will be a multiple of 64 / tp_size + args.language_model.ffn_hidden_size = int((4 * args.language_model.hidden_size * 2 / 3) / 64) * 64 + else: + args.language_model.ffn_hidden_size = 4 * args.language_model.hidden_size + + if args.language_model.kv_channels is None: + assert args.language_model.hidden_size % args.language_model.num_attention_heads == 0 + args.language_model.kv_channels = args.language_model.hidden_size // args.language_model.num_attention_heads + + #TODO: Implement arguments for encoder-decoder + if args.seq_length is not None: + assert args.encoder_seq_length is None + args.encoder_seq_length = args.seq_length + else: + assert args.encoder_seq_length is not None + args.seq_length = args.encoder_seq_length + + if args.seq_length is not None: + assert args.max_position_embeddings >= args.seq_length + if args.decoder_seq_length is not None: + assert args.max_position_embeddings >= args.decoder_seq_length + if args.lr is not None: + assert args.min_lr <= args.lr + if args.save is not None: + assert args.save_interval is not None + # Mixed precision checks. + if args.fp16_lm_cross_entropy: + assert args.fp16, 'lm cross entropy in fp16 only support in fp16 mode.' + if args.language_model.fp32_residual_connection: + assert args.model_parallel.fp16 or args.model_parallel.bf16, \ + 'residual connection in fp32 only supported when using fp16 or bf16.' + + if args.language_model.moe_grouped_gemm: + assert args.model_parallel.bf16, 'Currently GroupedGEMM for MoE only supports bf16 dtype.' + dc = torch.cuda.get_device_capability() + assert dc[0] >= 8, "Unsupported compute capability for GroupedGEMM kernels." + + if args.weight_decay_incr_style == 'constant': + assert args.start_weight_decay is None + assert args.end_weight_decay is None + args.start_weight_decay = args.weight_decay + args.end_weight_decay = args.weight_decay + else: + assert args.start_weight_decay is not None + assert args.end_weight_decay is not None + + # Persistent fused layer norm. + if not is_torch_min_version("1.11.0a0"): + args.language_model.persist_layer_norm = False + if args.rank == 0: + print('Persistent fused layer norm kernel is supported from ' + 'pytorch v1.11 (nvidia pytorch container paired with v1.11). ' + 'Defaulting to no_persist_layer_norm=True') + + # Activation recomputing. + if args.language_model.distribute_saved_activations: + assert args.model_parallel.tensor_model_parallel_size > 1, 'can distribute ' \ + 'recomputed activations only across tensor model ' \ + 'parallel groups' + assert args.language_model.recompute_granularity == 'full', \ + 'distributed recompute activations is only '\ + 'application to full recompute granularity' + assert args.language_model.recompute_method is not None, \ + 'for distributed recompute activations to work you '\ + 'need to use a recompute method ' + assert is_torch_min_version("1.10.0a0"), \ + 'distributed recompute activations are supported for pytorch ' \ + 'v1.10 and above (Nvidia Pytorch container >= 21.07). Current ' \ + f'pytorch version is v{get_torch_version()}.' + + if args.language_model.recompute_granularity == 'selective': + assert args.language_model.recompute_method is None, \ + 'recompute method is not yet supported for ' \ + 'selective recomputing granularity' + + # disable sequence parallelism when tp=1 + # to avoid change in numerics when + # sequence_parallelism is enabled. + if args.model_parallel.tensor_model_parallel_size == 1: + args.model_parallel.sequence_parallel = False + + # disable async_tensor_model_parallel_allreduce when + # model parallel memory optimization is enabled + if args.model_parallel.sequence_parallel: + args.model_parallel.async_tensor_model_parallel_allreduce = False + + if os.environ.get('CUDA_DEVICE_MAX_CONNECTIONS') != "1": + if args.model_parallel.sequence_parallel: + raise RuntimeError( + "Using sequence parallelism requires setting the environment variable " + "CUDA_DEVICE_MAX_CONNECTIONS to 1") + if args.model_parallel.async_tensor_model_parallel_allreduce: + raise RuntimeError( + "Using async gradient all reduce requires setting the environment " + "variable CUDA_DEVICE_MAX_CONNECTIONS to 1") + + # Retro checks. + if getattr(args, 'retro_add_retriever', False): + raise Exception("Retro untested for yaml args. See arguments.py.") + + # Sequence parallelism unsupported. + assert not args.sequence_parallel, \ + "retro currently does not support sequence parallelism." + + # Pipeline parallelism unsupported. + assert args.pipeline_model_parallel_size == 1, \ + "retro currently does not support pipeline parallelism." + + #TODO: Retro args loading not tested + # Load retro args (used by both Retro & GPT). + if getattr(args, 'retro_project_dir', None) is not None: + raise Exception("Retro untested for yaml args. See arguments.py.") + + if args.language_model.rotary_interleaved and args.language_model.apply_rope_fusion: + raise RuntimeError('--rotary-interleaved does not work with rope_fusion.') + + # MoE Spec check + if args.language_model.num_moe_experts is not None: + assert args.spec is None, "Model Spec must be None when using MoEs" + if args.model_parallel.tensor_model_parallel_size > 1: + assert args.model_parallel.sequence_parallel, \ + "When using MoE and tensor parallelism, sequence parallelism must be used." + + # Expert parallelism check + if args.model_parallel.expert_model_parallel_size > 1: + assert args.language_model.num_moe_experts is not None, "num_experts must be non None to use expert model parallelism" + assert args.language_model.num_moe_experts % args.model_parallel.expert_model_parallel_size == 0, \ + "Number of experts should be a multiple of expert model parallel_size." + assert not args.model_parallel.fp16, \ + "Expert parallelism is not supported with fp16 training." + + # Print arguments. + _print_args("arguments", args) + + #TODO: Added as much of the global initialization requires the model parallel arguments + args = SimpleNamespace(**args.__dict__, **args.model_parallel.__dict__) + args = SimpleNamespace(**args.__dict__, **args.language_model.__dict__) + # For GPT Layer spec in pretrain_gpt + args.num_experts = args.language_model.num_moe_experts + + return args + +def _print_args(title, args): + """Print arguments.""" + if args.rank == 0: + print(f'------------------------ {title} ------------------------', + flush=True) + str_list = [] + for arg in vars(args): + dots = '.' * (48 - len(arg)) + str_list.append(' {} {} {}'.format(arg, dots, getattr(args, arg))) + for arg in sorted(str_list, key=lambda x: x.lower()): + print(arg, flush=True) + print(f'-------------------- end of {title} ---------------------', + flush=True) + +def core_config_from_args(args, dataclass=TransformerConfig): + """Builds core config object from namespace args from given dataclass + + Raises exception if argument missing in args + + Args: + args(SimpleNamespace, optional): Namespace to pull argument values from + dataclass (dataclass, optional): Core dataclass config to pull argument names from + + + Returns: + SimpleNamespace: The returned namespace to build core config from + """ + kw_args = {} + for f in dataclasses.fields(dataclass): + if hasattr(args, f.name): + kw_args[f.name] = getattr(args, f.name) + else: + raise Exception(f"Missing argument {f.name} for {str(dataclass)} config") + return kw_args + +def _check_arg_is_not_none(args, arg): + assert getattr(args, arg) is not None, '{} argument is None'.format(arg) + +def core_transformer_config_from_yaml(args, transfomer_key = "language_model"): + # Combine transfomer config with model parallel args + args = SimpleNamespace(**vars(getattr(args, transfomer_key)), **vars(args.model_parallel)) + # Translate args to core transformer configuration + kw_args = core_config_from_args(args, TransformerConfig) + + # Hardcoded + kw_args['deallocate_pipeline_outputs'] = True + kw_args['pipeline_dtype'] = kw_args['params_dtype'] + kw_args['batch_p2p_comm'] = not args.overlap_p2p_comm + + assert args.activation_func in ["swiglu","squaredrelu","gelu"], f"{args.activation_func} is not a supported activation function" + if args.activation_func == "swiglu": + kw_args['activation_func'] = F.silu + kw_args['gated_linear_unit'] = True + kw_args['bias_activation_fusion'] = args.bias_swiglu_fusion + elif args.activation_func == "squaredrelu": + def squared_relu(x): + return torch.pow(F.relu(x), 2) + kw_args['activation_func'] = squared_relu + elif args.activation_func == "gelu": + kw_args['activation_func'] = F.gelu + if args.add_bias_linear: + kw_args['bias_activation_fusion'] = False + else: + kw_args['bias_activation_fusion'] = args.bias_activation_fusion + + if args.init_method == "xavier_uniform": + kw_args['init_method'] = torch.nn.init.xavier_uniform_ + kw_args['scaled_init_method'] = torch.nn.init.xavier_uniform_ + + # Return Transformer config. + if getattr(args, "multi_latent_attention", False): + return MLATransformerConfig(**kw_args) + else: + return TransformerConfig(**kw_args) + +def load_yaml(yaml_path): + print(f"warning using experimental yaml arguments feature, argparse arguments will be ignored") + with open(yaml_path, "r") as f: + config = yaml.load(f,Loader=yaml.FullLoader) + # Convert to nested namespace + config_namespace = json.loads(json.dumps(config), object_hook=lambda item: SimpleNamespace(**item)) + # Add config location to namespace + config_namespace.yaml_cfg = yaml_path + return config_namespace + diff --git a/mixtralnodes b/mixtralnodes deleted file mode 100644 index 1ebdf38..0000000 --- a/mixtralnodes +++ /dev/null @@ -1,2 +0,0 @@ -node021 slots=8 -node022 slots=8 \ No newline at end of file diff --git a/pretrain_bert.py b/pretrain_bert.py index 35884ec..e54cb55 100644 --- a/pretrain_bert.py +++ b/pretrain_bert.py @@ -1,193 +1,192 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. - -"""Pretrain BERT""" - -from functools import partial - -import torch -import torch.nn.functional as F - -from megatron.training import get_args -from megatron.training import get_tokenizer -from megatron.training import print_rank_0 -from megatron.training import get_timers -from megatron.core import tensor_parallel -from megatron.core.enums import ModelType -import megatron.legacy.model -from megatron.core.models.bert.bert_model import BertModel -from megatron.training import pretrain -from megatron.training.utils import average_losses_across_data_parallel_group -from megatron.training.arguments import core_transformer_config_from_args -from megatron.core.transformer.spec_utils import import_module -from megatron.core.models.bert.bert_layer_specs import bert_layer_with_transformer_engine_spec, bert_layer_local_spec -from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder -from megatron.core.datasets.bert_dataset import BERTMaskedWordPieceDataset, BERTMaskedWordPieceDatasetConfig -from megatron.core.datasets.utils import get_blend_from_list -from megatron.core import mpu, tensor_parallel - - -def model_provider(pre_process=True, post_process=True): - """Build the model.""" - - print_rank_0('building BERT model ...') - - args = get_args() - config = core_transformer_config_from_args(args) - num_tokentypes = 2 if args.bert_binary_head else 0 - - if args.use_legacy_models: - model = megatron.legacy.model.BertModel( - config=config, - num_tokentypes=num_tokentypes, - add_binary_head=args.bert_binary_head, - parallel_output=True, - pre_process=pre_process, - post_process=post_process) - else: - if args.spec is None: - transformer_layer_spec = bert_layer_with_transformer_engine_spec #default spec - elif args.spec[0] == 'local': - print_rank_0('Using Local spec for transformer layers') - transformer_layer_spec = bert_layer_local_spec - else : - transformer_layer_spec = import_module(args.spec) - - model = BertModel( - config=config, - transformer_layer_spec=transformer_layer_spec, - vocab_size=args.padded_vocab_size, - max_sequence_length=args.max_position_embeddings, - num_tokentypes=num_tokentypes, - add_binary_head=args.bert_binary_head, - share_embeddings_and_output_weights=not args.untie_embeddings_and_output_weights, - parallel_output=True, - pre_process=pre_process, - post_process=post_process) - - return model - - -def get_batch(data_iterator): - """Build the batch.""" - - # Items and their type. - keys = ['text', 'types', 'labels', - 'is_random', 'loss_mask', 'padding_mask'] - datatype = torch.int64 - - # Broadcast data. - if data_iterator is not None: - data = next(data_iterator) - else: - data = None - data_b = tensor_parallel.broadcast_data(keys, data, datatype) - - # Unpack. - tokens = data_b['text'].long() - types = data_b['types'].long() - sentence_order = data_b['is_random'].long() - loss_mask = data_b['loss_mask'].float() - lm_labels = data_b['labels'].long() - padding_mask = data_b['padding_mask'].long() - - return tokens, types, sentence_order, loss_mask, lm_labels, padding_mask - - -def loss_func(loss_mask, sentence_order, output_tensor): - lm_loss_, sop_logits = output_tensor - - lm_loss_ = lm_loss_.float() - loss_mask = loss_mask.float() - lm_loss = torch.sum( - lm_loss_.view(-1) * loss_mask.reshape(-1)) / loss_mask.sum() - - if sop_logits is not None: - sop_loss = F.cross_entropy(sop_logits.view(-1, 2).float(), - sentence_order.view(-1), - ignore_index=-1) - sop_loss = sop_loss.float() - loss = lm_loss + sop_loss - averaged_losses = average_losses_across_data_parallel_group( - [lm_loss, sop_loss]) - return loss, {'lm loss': averaged_losses[0], - 'sop loss': averaged_losses[1]} - else: - loss = lm_loss - averaged_losses = average_losses_across_data_parallel_group( - [lm_loss]) - return loss, {'lm loss': averaged_losses[0]} - - -def forward_step(data_iterator, model): - """Forward step.""" - args = get_args() - timers = get_timers() - - # Get the batch. - timers('batch-generator', log_level=2).start() - tokens, types, sentence_order, loss_mask, lm_labels, padding_mask = get_batch( - data_iterator) - timers('batch-generator').stop() - - if not args.bert_binary_head: - types = None - - # Forward pass through the model. - output_tensor = model(tokens, padding_mask, - tokentype_ids=types, lm_labels=lm_labels) - - return output_tensor, partial(loss_func, loss_mask, sentence_order) - - -def train_valid_test_datasets_provider(train_val_test_num_samples): - """Build train, valid, and test datasets.""" - args = get_args() - - tokenizer = get_tokenizer() - - config = BERTMaskedWordPieceDatasetConfig( - random_seed=args.seed, - sequence_length=args.seq_length, - blend=get_blend_from_list(args.data_path), - blend_per_split=[ - get_blend_from_list(args.train_data_path), - get_blend_from_list(args.valid_data_path), - get_blend_from_list(args.test_data_path) - ], - renormalize_blend_weights=args.renormalize_blend_weights, - split=args.split, - path_to_cache=args.data_cache_path, - tokenizer=tokenizer, - masking_probability=args.mask_prob, - short_sequence_probability=args.short_seq_prob, - masking_max_ngram=3, - masking_do_full_word=True, - masking_do_permutation=False, - masking_use_longer_ngrams=False, - masking_use_geometric_distribution=False, - classification_head=args.bert_binary_head, - ) - - print_rank_0('> building train, validation, and test datasets ' - 'for BERT ...') - - train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( - BERTMaskedWordPieceDataset, - train_val_test_num_samples, - lambda: mpu.get_tensor_model_parallel_rank() == 0, - config, - ).build() - - print_rank_0("> finished creating BERT datasets ...") - - return train_ds, valid_ds, test_ds - - -if __name__ == "__main__": - - # Temporary for transition to core datasets - train_valid_test_datasets_provider.is_distributed = True - - pretrain(train_valid_test_datasets_provider, model_provider, - ModelType.encoder_or_decoder, - forward_step, args_defaults={'tokenizer_type': 'BertWordPieceLowerCase'}) +# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. + +"""Pretrain BERT""" + +from functools import partial + +import torch +import torch.nn.functional as F + +from megatron.training import get_args +from megatron.training import get_tokenizer +from megatron.training import print_rank_0 +from megatron.training import get_timers +from megatron.core import tensor_parallel +from megatron.core.enums import ModelType +import megatron.legacy.model +from megatron.core.models.bert.bert_model import BertModel +from megatron.training import pretrain +from megatron.training.utils import average_losses_across_data_parallel_group +from megatron.training.arguments import core_transformer_config_from_args +from megatron.core.transformer.spec_utils import import_module +from megatron.core.models.bert.bert_layer_specs import bert_layer_with_transformer_engine_spec, bert_layer_local_spec +from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder +from megatron.core.datasets.bert_dataset import BERTMaskedWordPieceDataset, BERTMaskedWordPieceDatasetConfig +from megatron.core.datasets.utils import get_blend_from_list +from megatron.core import mpu, tensor_parallel + + +def model_provider(pre_process=True, post_process=True): + """Build the model.""" + + print_rank_0('building BERT model ...') + + args = get_args() + config = core_transformer_config_from_args(args) + num_tokentypes = 2 if args.bert_binary_head else 0 + + if args.use_legacy_models: + model = megatron.legacy.model.BertModel( + config=config, + num_tokentypes=num_tokentypes, + add_binary_head=args.bert_binary_head, + parallel_output=True, + pre_process=pre_process, + post_process=post_process) + else: + if args.spec is None: + transformer_layer_spec = bert_layer_with_transformer_engine_spec #default spec + elif args.spec[0] == 'local': + print_rank_0('Using Local spec for transformer layers') + transformer_layer_spec = bert_layer_local_spec + else : + transformer_layer_spec = import_module(args.spec) + + model = BertModel( + config=config, + transformer_layer_spec=transformer_layer_spec, + vocab_size=args.padded_vocab_size, + max_sequence_length=args.max_position_embeddings, + num_tokentypes=num_tokentypes, + add_binary_head=args.bert_binary_head, + share_embeddings_and_output_weights=not args.untie_embeddings_and_output_weights, + parallel_output=True, + pre_process=pre_process, + post_process=post_process) + + return model + + +def get_batch(data_iterator): + """Build the batch.""" + + # Items and their type. + keys = ['text', 'types', 'labels', + 'is_random', 'loss_mask', 'padding_mask'] + datatype = torch.int64 + + # Broadcast data. + if data_iterator is not None: + data = next(data_iterator) + else: + data = None + data_b = tensor_parallel.broadcast_data(keys, data, datatype) + + # Unpack. + tokens = data_b['text'].long() + types = data_b['types'].long() + sentence_order = data_b['is_random'].long() + loss_mask = data_b['loss_mask'].float() + lm_labels = data_b['labels'].long() + padding_mask = data_b['padding_mask'].long() + + return tokens, types, sentence_order, loss_mask, lm_labels, padding_mask + + +def loss_func(loss_mask, sentence_order, output_tensor): + lm_loss_, sop_logits = output_tensor + + lm_loss_ = lm_loss_.float() + loss_mask = loss_mask.float() + lm_loss = torch.sum( + lm_loss_.view(-1) * loss_mask.reshape(-1)) / loss_mask.sum() + + if sop_logits is not None: + sop_loss = F.cross_entropy(sop_logits.view(-1, 2).float(), + sentence_order.view(-1), + ignore_index=-1) + sop_loss = sop_loss.float() + loss = lm_loss + sop_loss + averaged_losses = average_losses_across_data_parallel_group( + [lm_loss, sop_loss]) + return loss, {'lm loss': averaged_losses[0], + 'sop loss': averaged_losses[1]} + else: + loss = lm_loss + averaged_losses = average_losses_across_data_parallel_group( + [lm_loss]) + return loss, {'lm loss': averaged_losses[0]} + + +def forward_step(data_iterator, model): + """Forward step.""" + args = get_args() + timers = get_timers() + + # Get the batch. + timers('batch-generator', log_level=2).start() + tokens, types, sentence_order, loss_mask, lm_labels, padding_mask = get_batch( + data_iterator) + timers('batch-generator').stop() + + if not args.bert_binary_head: + types = None + + # Forward pass through the model. + output_tensor = model(tokens, padding_mask, + tokentype_ids=types, lm_labels=lm_labels) + + return output_tensor, partial(loss_func, loss_mask, sentence_order) + + +def train_valid_test_datasets_provider(train_val_test_num_samples): + """Build train, valid, and test datasets.""" + args = get_args() + + tokenizer = get_tokenizer() + + config = BERTMaskedWordPieceDatasetConfig( + random_seed=args.seed, + sequence_length=args.seq_length, + blend=get_blend_from_list(args.data_path), + blend_per_split=[ + get_blend_from_list(args.train_data_path), + get_blend_from_list(args.valid_data_path), + get_blend_from_list(args.test_data_path) + ], + split=args.split, + path_to_cache=args.data_cache_path, + tokenizer=tokenizer, + masking_probability=args.mask_prob, + short_sequence_probability=args.short_seq_prob, + masking_max_ngram=3, + masking_do_full_word=True, + masking_do_permutation=False, + masking_use_longer_ngrams=False, + masking_use_geometric_distribution=False, + classification_head=args.bert_binary_head, + ) + + print_rank_0('> building train, validation, and test datasets ' + 'for BERT ...') + + train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( + BERTMaskedWordPieceDataset, + train_val_test_num_samples, + lambda: mpu.get_tensor_model_parallel_rank() == 0, + config, + ).build() + + print_rank_0("> finished creating BERT datasets ...") + + return train_ds, valid_ds, test_ds + + +if __name__ == "__main__": + + # Temporary for transition to core datasets + train_valid_test_datasets_provider.is_distributed = True + + pretrain(train_valid_test_datasets_provider, model_provider, + ModelType.encoder_or_decoder, + forward_step, args_defaults={'tokenizer_type': 'BertWordPieceLowerCase'}) diff --git a/pretrain_gpt.py b/pretrain_gpt.py index e25b621..702a2f7 100644 --- a/pretrain_gpt.py +++ b/pretrain_gpt.py @@ -1,309 +1,325 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -"""Pretrain GPT.""" - -import os -import torch -from functools import partial -from contextlib import nullcontext -import inspect - -from typing import List, Optional, Tuple, Union -from megatron.training import get_args -from megatron.training import print_rank_0 -from megatron.training import get_timers -from megatron.training import get_tokenizer -from megatron.core import mpu -from megatron.core.enums import ModelType -from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder -from megatron.core.datasets.gpt_dataset import GPTDatasetConfig -from megatron.core.datasets.gpt_dataset import MockGPTDataset, GPTDataset -from megatron.core.rerun_state_machine import get_rerun_state_machine -import megatron.legacy.model -from megatron.core.models.gpt import GPTModel -from megatron.training import pretrain -from megatron.core.utils import StragglerDetector -from megatron.core.transformer.spec_utils import import_module -from megatron.training.utils import ( - get_batch_on_this_cp_rank, - get_batch_on_this_tp_rank, - get_blend_and_blend_per_split, -) -from megatron.training.arguments import core_transformer_config_from_args -from megatron.training.yaml_arguments import core_transformer_config_from_yaml -from megatron.core.models.gpt.gpt_layer_specs import ( - get_gpt_decoder_block_spec, - get_gpt_layer_local_spec, - get_gpt_layer_with_transformer_engine_spec, -) -import torch._dynamo -torch._dynamo.config.suppress_errors = True - -stimer = StragglerDetector() - -def model_provider(pre_process=True, post_process=True) -> Union[GPTModel, megatron.legacy.model.GPTModel]: - """Builds the model. - - If you set the use_legacy_models to True, it will return the legacy GPT model and if not the mcore GPT model. - - Args: - pre_process (bool, optional): Set to true if you need to compute embedings. Defaults to True. - post_process (bool, optional): Set to true if you need to want to compute output logits/loss. Defaults to True. - - - Returns: - Union[GPTModel, megatron.legacy.model.GPTModel]: The returned model - """ - args = get_args() - use_te = args.transformer_impl == "transformer_engine" - - if args.record_memory_history: - torch.cuda.memory._record_memory_history(True, - # keep 100,000 alloc/free events from before the snapshot - trace_alloc_max_entries=100000, - - # record stack information for the trace events - trace_alloc_record_context=True) - - print_rank_0('building GPT model ...') - # Experimental loading arguments from yaml - if args.yaml_cfg is not None: - config = core_transformer_config_from_yaml(args, "language_model") - else: - config = core_transformer_config_from_args(args) - - if args.use_legacy_models: - model = megatron.legacy.model.GPTModel( - config, - num_tokentypes=0, - parallel_output=True, - pre_process=pre_process, - post_process=post_process, - ) - else: # using core models - if args.spec is not None: - transformer_layer_spec = import_module(args.spec) - else: - if args.num_experts: - # Define the decoder block spec - transformer_layer_spec = get_gpt_decoder_block_spec(config, use_transformer_engine=use_te) - else: - # Define the decoder layer spec - if use_te: - transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec( - args.num_experts, args.moe_grouped_gemm, - args.qk_layernorm, args.multi_latent_attention, args.moe_use_legacy_grouped_gemm) - else: - transformer_layer_spec = get_gpt_layer_local_spec( - args.num_experts, args.moe_grouped_gemm, - args.qk_layernorm, args.multi_latent_attention, args.moe_use_legacy_grouped_gemm) - - build_model_context = nullcontext - build_model_context_args = {} - if args.fp8_param_gather: - try: - from transformer_engine.pytorch import fp8_model_init - - build_model_context = fp8_model_init - build_model_context_args["enabled"] = True - - # Check if fp8_model_init supports preserve_high_precision_init_val - if "preserve_high_precision_init_val" in inspect.signature(fp8_model_init).parameters: - build_model_context_args["preserve_high_precision_init_val"] = True - except: - raise RuntimeError("--fp8-param-gather requires `fp8_model_init` from TransformerEngine, but not found.") - - with build_model_context(**build_model_context_args): - model = GPTModel( - config=config, - transformer_layer_spec=transformer_layer_spec, - vocab_size=args.padded_vocab_size, - max_sequence_length=args.max_position_embeddings, - pre_process=pre_process, - post_process=post_process, - fp16_lm_cross_entropy=args.fp16_lm_cross_entropy, - parallel_output=True, - share_embeddings_and_output_weights=not args.untie_embeddings_and_output_weights, - position_embedding_type=args.position_embedding_type, - rotary_percent=args.rotary_percent, - rotary_base=args.rotary_base, - rope_scaling=args.use_rope_scaling - ) - model = torch.compile(model,mode='max-autotune-no-cudagraphs') - print_rank_0(model) - - return model - - -def get_batch(data_iterator): - """Generate a batch.""" - - # TODO: this is pretty hacky, find a better way - if (not mpu.is_pipeline_first_stage()) and (not mpu.is_pipeline_last_stage()): - return None, None, None, None, None - - # get batches based on the TP rank you are on - batch = get_batch_on_this_tp_rank(data_iterator) - - # slice batch along sequence dimension for context parallelism - batch = get_batch_on_this_cp_rank(batch) - - return batch.values() - - -# define spiky loss as a variation of 20% or more -SPIKY_LOSS_PERC = 0.2 - - -def loss_func(loss_mask: torch.Tensor, output_tensor: torch.Tensor): - """Loss function. - - Args: - loss_mask (torch.Tensor): Used to mask out some portions of the loss - output_tensor (torch.Tensor): The tensor with the losses - - Returns: - the loss scalar for this micro-batch - the number of non-padded tokens in this microbatch - a dict containing reporting metrics on the loss and number of tokens across - the data parallel ranks - """ - args = get_args() - - losses = output_tensor.float() - loss_mask = loss_mask.view(-1).float() - total_tokens = loss_mask.sum() - loss = torch.cat([torch.sum(losses.view(-1) * loss_mask).view(1), total_tokens.view(1)]) - - if args.context_parallel_size > 1: - torch.distributed.all_reduce(loss, group=mpu.get_context_parallel_group()) - - # Check individual rank losses are not NaN prior to DP all-reduce. - rerun_state_machine = get_rerun_state_machine() - if args.check_for_nan_in_loss_and_grad: - rerun_state_machine.validate_result( - result=loss[0], - rejection_func=torch.isnan, - message="found NaN in local forward loss calculation", - tolerance=0.0, # forward pass calculations are determinisic - fatal=True, - ) - # Check for spiky loss - if args.check_for_spiky_loss: - rerun_state_machine.validate_result( - result=loss[0], - rejection_func=partial(rerun_state_machine.is_spiky_loss, threshold=SPIKY_LOSS_PERC), - message="Spiky loss", - tolerance=0.0, # forward pass calculations are determinisic - fatal=False, - ) - # Reduce loss for logging. - reporting_loss = loss.clone().detach() - torch.distributed.all_reduce(reporting_loss, group=mpu.get_data_parallel_group()) - - local_num_tokens = loss[1].clone().detach().to(torch.int) - return ( - loss[0] * args.context_parallel_size, - local_num_tokens, - {'lm loss': (reporting_loss[0], reporting_loss[1])}, - ) - - -def forward_step(data_iterator, model: GPTModel): - """Forward training step. - - Args: - data_iterator : Input data iterator - model (GPTModel): The GPT Model - """ - args = get_args() - timers = get_timers() - - # Get the batch. - timers('batch-generator', log_level=2).start() - global stimer - with stimer(bdata=True): - tokens, labels, loss_mask, attention_mask, position_ids = get_batch( - data_iterator) - timers('batch-generator').stop() - - with stimer: - output_tensor = model(tokens, position_ids, attention_mask, - labels=labels) - - return output_tensor, partial(loss_func, loss_mask) - - -def is_dataset_built_on_rank(): - return ( - mpu.is_pipeline_first_stage() or mpu.is_pipeline_last_stage() - ) and mpu.get_tensor_model_parallel_rank() == 0 - - -def core_gpt_dataset_config_from_args(args): - tokenizer = get_tokenizer() - - # Sometimes --data-path is too long, instead we parse it from a file. - blend: Optional[Tuple[List[str], Optional[List[float]]]] - blend_per_split: Optional[List[Optional[Tuple[List[str], Optional[List[float]]]]]] - blend, blend_per_split = get_blend_and_blend_per_split(args) - - return GPTDatasetConfig( - random_seed=args.seed, - sequence_length=args.seq_length, - blend=blend, - blend_per_split=blend_per_split, - renormalize_blend_weights=args.renormalize_blend_weights, - split=args.split, - num_dataset_builder_threads=args.num_dataset_builder_threads, - path_to_cache=args.data_cache_path, - mmap_bin_files=args.mmap_bin_files, - tokenizer=tokenizer, - reset_position_ids=args.reset_position_ids, - reset_attention_mask=args.reset_attention_mask, - eod_mask_loss=args.eod_mask_loss, - create_attention_mask=args.create_attention_mask_in_dataloader, - s3_cache_path=args.s3_cache_path, - ) - - -def train_valid_test_datasets_provider(train_val_test_num_samples): - """Build the train test and validation datasets. - - Args: - train_val_test_num_samples : A list containing the number of samples in train test and validation. - """ - args = get_args() - - config = core_gpt_dataset_config_from_args(args) - - if args.mock_data: - dataset_type = MockGPTDataset - else: - dataset_type = GPTDataset - - print_rank_0("> building train, validation, and test datasets for GPT ...") - - train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( - dataset_type, - train_val_test_num_samples, - is_dataset_built_on_rank, - config - ).build() - - print_rank_0("> finished creating GPT datasets ...") - - return train_ds, valid_ds, test_ds - - -if __name__ == "__main__": - - # Temporary for transition to core datasets - train_valid_test_datasets_provider.is_distributed = True - - pretrain( - train_valid_test_datasets_provider, - model_provider, - ModelType.encoder_or_decoder, - forward_step, - args_defaults={'tokenizer_type': 'GPT2BPETokenizer'}, - ) +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. +"""Pretrain GPT.""" + +import os +import torch +from functools import partial +from contextlib import nullcontext +import inspect + +from typing import List, Optional, Tuple, Union +from megatron.training import get_args +from megatron.training import print_rank_0 +from megatron.training import get_timers +from megatron.training import get_tokenizer +from megatron.core import mpu +from megatron.core.enums import ModelType +from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder +from megatron.core.datasets.gpt_dataset import GPTDatasetConfig +from megatron.core.datasets.gpt_dataset import MockGPTDataset, GPTDataset +from megatron.core.rerun_state_machine import get_rerun_state_machine +import megatron.legacy.model +from megatron.core.models.gpt import GPTModel +from megatron.training import pretrain +from megatron.core.utils import StragglerDetector +from megatron.core.transformer.spec_utils import import_module +from megatron.training.utils import ( + get_batch_on_this_cp_rank, + get_batch_on_this_tp_rank, + get_blend_and_blend_per_split, +) +from megatron.training.arguments import core_transformer_config_from_args +from megatron.training.yaml_arguments import core_transformer_config_from_yaml +from megatron.core.models.gpt.gpt_layer_specs import ( + get_gpt_decoder_block_spec, + get_gpt_layer_local_spec, + get_gpt_layer_with_transformer_engine_spec, +) + + +stimer = StragglerDetector() + +def model_provider(pre_process=True, post_process=True) -> Union[GPTModel, megatron.legacy.model.GPTModel]: + """Builds the model. + + If you set the use_legacy_models to True, it will return the legacy GPT model and if not the mcore GPT model. + + Args: + pre_process (bool, optional): Set to true if you need to compute embedings. Defaults to True. + post_process (bool, optional): Set to true if you need to want to compute output logits/loss. Defaults to True. + + + Returns: + Union[GPTModel, megatron.legacy.model.GPTModel]: The returned model + """ + args = get_args() + use_te = args.transformer_impl == "transformer_engine" + + if args.record_memory_history: + torch.cuda.memory._record_memory_history(True, + # keep 100,000 alloc/free events from before the snapshot + trace_alloc_max_entries=100000, + + # record stack information for the trace events + trace_alloc_record_context=True) + + def oom_observer(device, alloc, device_alloc, device_free): + # snapshot right after an OOM happened + print('saving allocated state during OOM') + snapshot = torch.cuda.memory._snapshot() + from pickle import dump + dump(snapshot, open(f"oom_rank-{torch.distributed.get_rank()}_{args.memory_snapshot_path}", 'wb')) + + torch._C._cuda_attach_out_of_memory_observer(oom_observer) + + print_rank_0('building GPT model ...') + # Experimental loading arguments from yaml + if args.yaml_cfg is not None: + config = core_transformer_config_from_yaml(args, "language_model") + else: + config = core_transformer_config_from_args(args) + + if args.use_legacy_models: + model = megatron.legacy.model.GPTModel( + config, + num_tokentypes=0, + parallel_output=True, + pre_process=pre_process, + post_process=post_process, + ) + else: # using core models + if args.spec is not None: + transformer_layer_spec = import_module(args.spec) + else: + if args.num_experts: + # Define the decoder block spec + transformer_layer_spec = get_gpt_decoder_block_spec(config, use_transformer_engine=use_te) + else: + # Define the decoder layer spec + if use_te: + transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec( + args.num_experts, args.moe_grouped_gemm, + args.qk_layernorm, args.multi_latent_attention, args.moe_use_legacy_grouped_gemm) + else: + transformer_layer_spec = get_gpt_layer_local_spec( + args.num_experts, args.moe_grouped_gemm, + args.qk_layernorm, args.multi_latent_attention, args.moe_use_legacy_grouped_gemm) + + build_model_context = nullcontext + build_model_context_args = {} + if args.fp8_param_gather: + try: + from transformer_engine.pytorch import fp8_model_init + + build_model_context = fp8_model_init + build_model_context_args["enabled"] = True + + # Check if fp8_model_init supports preserve_high_precision_init_val + if "preserve_high_precision_init_val" in inspect.signature(fp8_model_init).parameters: + build_model_context_args["preserve_high_precision_init_val"] = True + except: + raise RuntimeError("--fp8-param-gather requires `fp8_model_init` from TransformerEngine, but not found.") + + with build_model_context(**build_model_context_args): + model = GPTModel( + config=config, + transformer_layer_spec=transformer_layer_spec, + vocab_size=args.padded_vocab_size, + max_sequence_length=args.max_position_embeddings, + pre_process=pre_process, + post_process=post_process, + fp16_lm_cross_entropy=args.fp16_lm_cross_entropy, + parallel_output=True, + share_embeddings_and_output_weights=not args.untie_embeddings_and_output_weights, + position_embedding_type=args.position_embedding_type, + rotary_percent=args.rotary_percent, + rotary_base=args.rotary_base, + rope_scaling=args.use_rope_scaling + ) + + return model + + +def get_batch(data_iterator): + """Generate a batch.""" + + # TODO: this is pretty hacky, find a better way + if (not mpu.is_pipeline_first_stage()) and (not mpu.is_pipeline_last_stage()): + return None, None, None, None, None + + # get batches based on the TP rank you are on + batch = get_batch_on_this_tp_rank(data_iterator) + + # slice batch along sequence dimension for context parallelism + batch = get_batch_on_this_cp_rank(batch) + + return batch.values() + + +# define spiky loss as a loss that's 10x the max loss observed +SPIKY_LOSS_FACTOR = 10 + + +def loss_func(loss_mask: torch.Tensor, output_tensor: torch.Tensor): + """Loss function. + + Args: + loss_mask (torch.Tensor): Used to mask out some portions of the loss + output_tensor (torch.Tensor): The tensor with the losses + + Returns: + the loss scalar for this micro-batch + the number of non-padded tokens in this microbatch + a dict containing reporting metrics on the loss and number of tokens across + the data parallel ranks + """ + args = get_args() + + losses = output_tensor.float() + loss_mask = loss_mask.view(-1).float() + total_tokens = loss_mask.sum() + loss = torch.cat([torch.sum(losses.view(-1) * loss_mask).view(1), total_tokens.view(1)]) + + if args.context_parallel_size > 1: + torch.distributed.all_reduce(loss, group=mpu.get_context_parallel_group()) + + # Check individual rank losses are not NaN prior to DP all-reduce. + rerun_state_machine = get_rerun_state_machine() + if args.check_for_nan_in_loss_and_grad: + rerun_state_machine.validate_result( + result=loss[0], + rejection_func=torch.isnan, + message="found NaN in local forward loss calculation", + tolerance=0.0, # forward pass calculations are determinisic + fatal=True, + ) + rerun_state_machine.validate_result( + result=loss[0], + rejection_func=torch.isinf, + message="found Inf in local forward loss calculation", + tolerance=0.0, # forward pass calculations are determinisic + fatal=True, + ) + # Check for spiky loss + if args.check_for_spiky_loss: + rerun_state_machine.validate_result( + result=loss[0], + rejection_func=partial( + rerun_state_machine.is_unexpectedly_large, + threshold=SPIKY_LOSS_FACTOR, + context="loss", + ), + message="Spiky loss", + tolerance=0.0, # forward pass calculations are determinisic + fatal=False, + ) + # Reduce loss for logging. + reporting_loss = loss.clone().detach() + torch.distributed.all_reduce(reporting_loss, group=mpu.get_data_parallel_group()) + + local_num_tokens = loss[1].clone().detach().to(torch.int) + return ( + loss[0] * args.context_parallel_size, + local_num_tokens, + {'lm loss': (reporting_loss[0], reporting_loss[1])}, + ) + + +def forward_step(data_iterator, model: GPTModel): + """Forward training step. + + Args: + data_iterator : Input data iterator + model (GPTModel): The GPT Model + """ + args = get_args() + timers = get_timers() + + # Get the batch. + timers('batch-generator', log_level=2).start() + global stimer + with stimer(bdata=True): + tokens, labels, loss_mask, attention_mask, position_ids = get_batch( + data_iterator) + timers('batch-generator').stop() + + with stimer: + output_tensor = model(tokens, position_ids, attention_mask, + labels=labels) + + return output_tensor, partial(loss_func, loss_mask) + + +def is_dataset_built_on_rank(): + return ( + mpu.is_pipeline_first_stage() or mpu.is_pipeline_last_stage() + ) and mpu.get_tensor_model_parallel_rank() == 0 + + +def core_gpt_dataset_config_from_args(args): + tokenizer = get_tokenizer() + + # Sometimes --data-path is too long, instead we parse it from a file. + blend: Optional[Tuple[List[str], Optional[List[float]]]] + blend_per_split: Optional[List[Optional[Tuple[List[str], Optional[List[float]]]]]] + blend, blend_per_split = get_blend_and_blend_per_split(args) + + return GPTDatasetConfig( + random_seed=args.seed, + sequence_length=args.seq_length, + blend=blend, + blend_per_split=blend_per_split, + split=args.split, + num_dataset_builder_threads=args.num_dataset_builder_threads, + path_to_cache=args.data_cache_path, + mmap_bin_files=args.mmap_bin_files, + tokenizer=tokenizer, + reset_position_ids=args.reset_position_ids, + reset_attention_mask=args.reset_attention_mask, + eod_mask_loss=args.eod_mask_loss, + create_attention_mask=args.create_attention_mask_in_dataloader, + s3_cache_path=args.s3_cache_path, + ) + + +def train_valid_test_datasets_provider(train_val_test_num_samples): + """Build the train test and validation datasets. + + Args: + train_val_test_num_samples : A list containing the number of samples in train test and validation. + """ + args = get_args() + + config = core_gpt_dataset_config_from_args(args) + + if args.mock_data: + dataset_type = MockGPTDataset + else: + dataset_type = GPTDataset + + print_rank_0("> building train, validation, and test datasets for GPT ...") + + train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( + dataset_type, + train_val_test_num_samples, + is_dataset_built_on_rank, + config + ).build() + + print_rank_0("> finished creating GPT datasets ...") + + return train_ds, valid_ds, test_ds + + +if __name__ == "__main__": + + # Temporary for transition to core datasets + train_valid_test_datasets_provider.is_distributed = True + + pretrain( + train_valid_test_datasets_provider, + model_provider, + ModelType.encoder_or_decoder, + forward_step, + args_defaults={'tokenizer_type': 'GPT2BPETokenizer'}, + ) diff --git a/pretrain_mamba.py b/pretrain_mamba.py index df5fa9f..2963121 100644 --- a/pretrain_mamba.py +++ b/pretrain_mamba.py @@ -1,262 +1,272 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -"""Pretrain Mamba.""" - -import os -import torch -from functools import partial -from typing import List, Optional, Tuple, Union - -from megatron.training import get_args -from megatron.training import print_rank_0 -from megatron.training import get_timers -from megatron.training import get_tokenizer -from megatron.core import mpu -from megatron.core.enums import ModelType -from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder -from megatron.core.datasets.gpt_dataset import GPTDatasetConfig -from megatron.core.datasets.gpt_dataset import MockGPTDataset, GPTDataset -from megatron.core.rerun_state_machine import get_rerun_state_machine -from megatron.core.models.mamba import MambaModel -from megatron.training import pretrain -from megatron.core.utils import StragglerDetector -from megatron.core.transformer.spec_utils import import_module -from megatron.training.utils import ( - get_batch_on_this_cp_rank, - get_batch_on_this_tp_rank, - get_blend_and_blend_per_split, -) -from megatron.training.arguments import core_transformer_config_from_args -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec - - -stimer = StragglerDetector() - -def count_parameters_in_layer(model, layer_name): - num_params = 0 - for name, param in model.named_parameters(): - if layer_name in name: - num_params += param.numel() - print_rank_0(f" - {name}: {param.numel()}") - return num_params - - -def model_provider(pre_process=True, post_process=True) -> MambaModel: - """Builds the model. - - Args: - pre_process (bool, optional): Set to true if you need to compute embedings. Defaults to True. - post_process (bool, optional): Set to true if you need to want to compute output logits/loss. Defaults to True. - - - Returns: - MambaModel: The returned model - """ - args = get_args() - - print_rank_0('building Mamba model ...') - config = core_transformer_config_from_args(get_args()) - - assert args.use_legacy_models == False, "Mamba only supported in Mcore!" - - if args.spec is not None: - mamba_stack_spec = import_module(args.spec) - else: - raise("You must provide a valid Mamba layer spec!") - - model = MambaModel( - config=config, - mamba_stack_spec=mamba_stack_spec, - vocab_size=args.padded_vocab_size, - max_sequence_length=args.max_position_embeddings, - pre_process=pre_process, - hybrid_attention_ratio=args.hybrid_attention_ratio, - hybrid_mlp_ratio=args.hybrid_mlp_ratio, - hybrid_override_pattern=args.hybrid_override_pattern, - post_process=post_process, - fp16_lm_cross_entropy=args.fp16_lm_cross_entropy, - parallel_output=True, - share_embeddings_and_output_weights=not args.untie_embeddings_and_output_weights, - position_embedding_type=args.position_embedding_type, - rotary_percent=args.rotary_percent, - rotary_base=args.rotary_base - ) - - for l in range(model.decoder.num_layers_per_pipeline_rank): - layer_params = count_parameters_in_layer(model, f'decoder.layers.{l}.') - print_rank_0(f" == params layer {l}: {layer_params}") - - return model - - -def get_batch(data_iterator): - """Generate a batch.""" - - # TODO: this is pretty hacky, find a better way - if (not mpu.is_pipeline_first_stage()) and (not mpu.is_pipeline_last_stage()): - return None, None, None, None, None - - # get batches based on the TP rank you are on - batch = get_batch_on_this_tp_rank(data_iterator) - - # slice batch along sequence dimension for context parallelism - batch = get_batch_on_this_cp_rank(batch) - - return batch.values() - - -# define spiky loss as a variation of 20% or more -SPIKY_LOSS_PERC = 0.2 - - -def loss_func(loss_mask: torch.Tensor, output_tensor: torch.Tensor): - """Loss function. - - Args: - loss_mask (torch.Tensor): Used to mask out some portions of the loss - output_tensor (torch.Tensor): The tensor with the losses - - Returns: - the loss scalar for this micro-batch - the number of non-padded tokens in this microbatch - a dict containing reporting metrics on the loss and number of tokens across - the data parallel ranks - """ - args = get_args() - - losses = output_tensor.float() - loss_mask = loss_mask.view(-1).float() - total_tokens = loss_mask.sum() - loss = torch.cat([torch.sum(losses.view(-1) * loss_mask).view(1), total_tokens.view(1)]) - - if args.context_parallel_size > 1: - torch.distributed.all_reduce(loss, group=mpu.get_context_parallel_group()) - - # Check individual rank losses are not NaN prior to DP all-reduce. - rerun_state_machine = get_rerun_state_machine() - if args.check_for_nan_in_loss_and_grad: - rerun_state_machine.validate_result( - result=loss[0], - rejection_func=torch.isnan, - message="found NaN in local forward loss calculation", - tolerance=0.0, # forward pass calculations are determinisic - fatal=True, - ) - # Check for spiky loss - if args.check_for_spiky_loss: - rerun_state_machine.validate_result( - result=loss[0], - rejection_func=partial(rerun_state_machine.is_spiky_loss, threshold=SPIKY_LOSS_PERC), - message="Spiky loss", - tolerance=0.0, # forward pass calculations are determinisic - fatal=False, - ) - - # Reduce loss for logging. - reporting_loss = loss.clone().detach() - torch.distributed.all_reduce(reporting_loss, group=mpu.get_data_parallel_group()) - - local_num_tokens = loss[1].clone().detach().to(torch.int) - return ( - loss[0] * args.context_parallel_size, - local_num_tokens, - {'lm loss': (reporting_loss[0], reporting_loss[1])}, - ) - - -def forward_step(data_iterator, model: MambaModel): - """Forward training step. - - Args: - data_iterator : Input data iterator - model (MambaModel): The GPT Model - """ - args = get_args() - timers = get_timers() - - # Get the batch. - timers('batch-generator', log_level=2).start() - global stimer - with stimer(bdata=True): - tokens, labels, loss_mask, attention_mask, position_ids = get_batch( - data_iterator) - timers('batch-generator').stop() - - with stimer: - output_tensor = model(tokens, position_ids, attention_mask, - labels=labels) - - return output_tensor, partial(loss_func, loss_mask) - - -def is_dataset_built_on_rank(): - return ( - mpu.is_pipeline_first_stage() or mpu.is_pipeline_last_stage() - ) and mpu.get_tensor_model_parallel_rank() == 0 - - -def core_gpt_dataset_config_from_args(args): - tokenizer = get_tokenizer() - - # Sometimes --data-path is too long, instead we parse it from a file. - blend: Optional[Tuple[List[str], Optional[List[float]]]] - blend_per_split: Optional[List[Optional[Tuple[List[str], Optional[List[float]]]]]] - blend, blend_per_split = get_blend_and_blend_per_split(args) - - return GPTDatasetConfig( - random_seed=args.seed, - sequence_length=args.seq_length, - blend=blend, - blend_per_split=blend_per_split, - renormalize_blend_weights=args.renormalize_blend_weights, - split=args.split, - num_dataset_builder_threads=args.num_dataset_builder_threads, - path_to_cache=args.data_cache_path, - mmap_bin_files=args.mmap_bin_files, - tokenizer=tokenizer, - reset_position_ids=args.reset_position_ids, - reset_attention_mask=args.reset_attention_mask, - eod_mask_loss=args.eod_mask_loss, - create_attention_mask=args.create_attention_mask_in_dataloader, - s3_cache_path=args.s3_cache_path, - ) - - -def train_valid_test_datasets_provider(train_val_test_num_samples): - """Build the train test and validation datasets. - - Args: - train_val_test_num_samples : A list containing the number of samples in train test and validation. - """ - args = get_args() - - config = core_gpt_dataset_config_from_args(args) - - if args.mock_data: - dataset_type = MockGPTDataset - else: - dataset_type = GPTDataset - - print_rank_0("> building train, validation, and test datasets for GPT ...") - - train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( - dataset_type, - train_val_test_num_samples, - is_dataset_built_on_rank, - config - ).build() - - print_rank_0("> finished creating GPT datasets ...") - - return train_ds, valid_ds, test_ds - - -if __name__ == "__main__": - - # Temporary for transition to core datasets - train_valid_test_datasets_provider.is_distributed = True - - pretrain(train_valid_test_datasets_provider, - model_provider, - ModelType.encoder_or_decoder, - forward_step, - args_defaults={'tokenizer_type': 'GPT2BPETokenizer'}) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +"""Pretrain Mamba.""" + +import os +import torch +from functools import partial +from typing import List, Optional, Tuple, Union + +from megatron.training import get_args +from megatron.training import print_rank_0 +from megatron.training import get_timers +from megatron.training import get_tokenizer +from megatron.core import mpu +from megatron.core.enums import ModelType +from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder +from megatron.core.datasets.gpt_dataset import GPTDatasetConfig +from megatron.core.datasets.gpt_dataset import MockGPTDataset, GPTDataset +from megatron.core.rerun_state_machine import get_rerun_state_machine +from megatron.core.models.mamba import MambaModel +from megatron.training import pretrain +from megatron.core.utils import StragglerDetector +from megatron.core.transformer.spec_utils import import_module +from megatron.training.utils import ( + get_batch_on_this_cp_rank, + get_batch_on_this_tp_rank, + get_blend_and_blend_per_split, +) +from megatron.training.arguments import core_transformer_config_from_args +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec + + +stimer = StragglerDetector() + +def count_parameters_in_layer(model, layer_name): + num_params = 0 + for name, param in model.named_parameters(): + if layer_name in name: + num_params += param.numel() + print_rank_0(f" - {name}: {param.numel()}") + return num_params + + +def model_provider(pre_process=True, post_process=True) -> MambaModel: + """Builds the model. + + Args: + pre_process (bool, optional): Set to true if you need to compute embedings. Defaults to True. + post_process (bool, optional): Set to true if you need to want to compute output logits/loss. Defaults to True. + + + Returns: + MambaModel: The returned model + """ + args = get_args() + + print_rank_0('building Mamba model ...') + config = core_transformer_config_from_args(get_args()) + + assert args.use_legacy_models == False, "Mamba only supported in Mcore!" + + if args.spec is not None: + mamba_stack_spec = import_module(args.spec) + else: + raise("You must provide a valid Mamba layer spec!") + + model = MambaModel( + config=config, + mamba_stack_spec=mamba_stack_spec, + vocab_size=args.padded_vocab_size, + max_sequence_length=args.max_position_embeddings, + pre_process=pre_process, + hybrid_attention_ratio=args.hybrid_attention_ratio, + hybrid_mlp_ratio=args.hybrid_mlp_ratio, + hybrid_override_pattern=args.hybrid_override_pattern, + post_process=post_process, + fp16_lm_cross_entropy=args.fp16_lm_cross_entropy, + parallel_output=True, + share_embeddings_and_output_weights=not args.untie_embeddings_and_output_weights, + position_embedding_type=args.position_embedding_type, + rotary_percent=args.rotary_percent, + rotary_base=args.rotary_base + ) + + for l in range(model.decoder.num_layers_per_pipeline_rank): + layer_params = count_parameters_in_layer(model, f'decoder.layers.{l}.') + print_rank_0(f" == params layer {l}: {layer_params}") + + return model + + +def get_batch(data_iterator): + """Generate a batch.""" + + # TODO: this is pretty hacky, find a better way + if (not mpu.is_pipeline_first_stage()) and (not mpu.is_pipeline_last_stage()): + return None, None, None, None, None + + # get batches based on the TP rank you are on + batch = get_batch_on_this_tp_rank(data_iterator) + + # slice batch along sequence dimension for context parallelism + batch = get_batch_on_this_cp_rank(batch) + + return batch.values() + + +# define spiky loss as a loss that's 10x the max loss observed +SPIKY_LOSS_FACTOR = 10 + + +def loss_func(loss_mask: torch.Tensor, output_tensor: torch.Tensor): + """Loss function. + + Args: + loss_mask (torch.Tensor): Used to mask out some portions of the loss + output_tensor (torch.Tensor): The tensor with the losses + + Returns: + the loss scalar for this micro-batch + the number of non-padded tokens in this microbatch + a dict containing reporting metrics on the loss and number of tokens across + the data parallel ranks + """ + args = get_args() + + losses = output_tensor.float() + loss_mask = loss_mask.view(-1).float() + total_tokens = loss_mask.sum() + loss = torch.cat([torch.sum(losses.view(-1) * loss_mask).view(1), total_tokens.view(1)]) + + if args.context_parallel_size > 1: + torch.distributed.all_reduce(loss, group=mpu.get_context_parallel_group()) + + # Check individual rank losses are not NaN prior to DP all-reduce. + rerun_state_machine = get_rerun_state_machine() + if args.check_for_nan_in_loss_and_grad: + rerun_state_machine.validate_result( + result=loss[0], + rejection_func=torch.isnan, + message="found NaN in local forward loss calculation", + tolerance=0.0, # forward pass calculations are determinisic + fatal=True, + ) + rerun_state_machine.validate_result( + result=loss[0], + rejection_func=torch.isinf, + message="found Inf in local forward loss calculation", + tolerance=0.0, # forward pass calculations are determinisic + fatal=True, + ) + # Check for spiky loss + if args.check_for_spiky_loss: + rerun_state_machine.validate_result( + result=loss[0], + rejection_func=partial( + rerun_state_machine.is_unexpectedly_large, + threshold=SPIKY_LOSS_FACTOR, + context="loss", + ), + message="Spiky loss", + tolerance=0.0, # forward pass calculations are determinisic + fatal=False, + ) + + # Reduce loss for logging. + reporting_loss = loss.clone().detach() + torch.distributed.all_reduce(reporting_loss, group=mpu.get_data_parallel_group()) + + local_num_tokens = loss[1].clone().detach().to(torch.int) + return ( + loss[0] * args.context_parallel_size, + local_num_tokens, + {'lm loss': (reporting_loss[0], reporting_loss[1])}, + ) + + +def forward_step(data_iterator, model: MambaModel): + """Forward training step. + + Args: + data_iterator : Input data iterator + model (MambaModel): The GPT Model + """ + args = get_args() + timers = get_timers() + + # Get the batch. + timers('batch-generator', log_level=2).start() + global stimer + with stimer(bdata=True): + tokens, labels, loss_mask, attention_mask, position_ids = get_batch( + data_iterator) + timers('batch-generator').stop() + + with stimer: + output_tensor = model(tokens, position_ids, attention_mask, + labels=labels) + + return output_tensor, partial(loss_func, loss_mask) + + +def is_dataset_built_on_rank(): + return ( + mpu.is_pipeline_first_stage() or mpu.is_pipeline_last_stage() + ) and mpu.get_tensor_model_parallel_rank() == 0 + + +def core_gpt_dataset_config_from_args(args): + tokenizer = get_tokenizer() + + # Sometimes --data-path is too long, instead we parse it from a file. + blend: Optional[Tuple[List[str], Optional[List[float]]]] + blend_per_split: Optional[List[Optional[Tuple[List[str], Optional[List[float]]]]]] + blend, blend_per_split = get_blend_and_blend_per_split(args) + + return GPTDatasetConfig( + random_seed=args.seed, + sequence_length=args.seq_length, + blend=blend, + blend_per_split=blend_per_split, + split=args.split, + num_dataset_builder_threads=args.num_dataset_builder_threads, + path_to_cache=args.data_cache_path, + mmap_bin_files=args.mmap_bin_files, + tokenizer=tokenizer, + reset_position_ids=args.reset_position_ids, + reset_attention_mask=args.reset_attention_mask, + eod_mask_loss=args.eod_mask_loss, + create_attention_mask=args.create_attention_mask_in_dataloader, + s3_cache_path=args.s3_cache_path, + ) + + +def train_valid_test_datasets_provider(train_val_test_num_samples): + """Build the train test and validation datasets. + + Args: + train_val_test_num_samples : A list containing the number of samples in train test and validation. + """ + args = get_args() + + config = core_gpt_dataset_config_from_args(args) + + if args.mock_data: + dataset_type = MockGPTDataset + else: + dataset_type = GPTDataset + + print_rank_0("> building train, validation, and test datasets for GPT ...") + + train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( + dataset_type, + train_val_test_num_samples, + is_dataset_built_on_rank, + config + ).build() + + print_rank_0("> finished creating GPT datasets ...") + + return train_ds, valid_ds, test_ds + + +if __name__ == "__main__": + + # Temporary for transition to core datasets + train_valid_test_datasets_provider.is_distributed = True + + pretrain(train_valid_test_datasets_provider, + model_provider, + ModelType.encoder_or_decoder, + forward_step, + args_defaults={'tokenizer_type': 'GPT2BPETokenizer'}) diff --git a/pretrain_retro.py b/pretrain_retro.py index 0aecbf1..ac15e46 100644 --- a/pretrain_retro.py +++ b/pretrain_retro.py @@ -1,245 +1,244 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -"""Pretrain Retro.""" - -from functools import partial -import torch - -from megatron.training import get_args -from megatron.training import get_timers -from megatron.training import get_tokenizer -from megatron.training import print_rank_0 -from megatron.training.arguments import core_transformer_config_from_args -from megatron.core import tensor_parallel -from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder -from megatron.core.datasets.utils import get_blend_from_list -from megatron.core.datasets.retro.query.retro_dataset import get_retro_datasets -from megatron.core.datasets.retro.query.multi_split_gpt_dataset import MultiSplitGPTDataset, MultiSplitGPTDatasetConfig -from megatron.core.enums import ModelType -from megatron.core.models.retro import get_retro_decoder_block_spec, RetroConfig, RetroModel -from megatron.core.models.retro.utils import get_all_true_mask -from megatron.training import pretrain -from megatron.training.utils import get_ltor_masks_and_position_ids -from pretrain_gpt import ( - is_dataset_built_on_rank, - loss_func, - model_provider as default_model_provider, - train_valid_test_datasets_provider as gpt_train_valid_test_datasets_provider, -) - - -def get_retro_config(): - return core_transformer_config_from_args(get_args(), RetroConfig) - - -def core_model_provider(pre_process=True, post_process=True): - """Build the model using Megatron-Core.""" - - args = get_args() - config = get_retro_config() - - # NOTE: Experimental customization feature - if args.spec is not None: - block_spec = import_module(args.spec)() - else: - block_spec = get_retro_decoder_block_spec(config, use_transformer_engine=True) - - print_rank_0('building GPT model ...') - model = RetroModel( - config=config, - transformer_layer_spec=block_spec, - vocab_size=args.padded_vocab_size, - max_sequence_length=args.max_position_embeddings, - pre_process=pre_process, - post_process=post_process, - fp16_lm_cross_entropy=args.fp16_lm_cross_entropy, - parallel_output=True, - share_embeddings_and_output_weights=not args.untie_embeddings_and_output_weights, - position_embedding_type=args.position_embedding_type, - rotary_percent=args.rotary_percent - ) - return model - - -def model_provider(pre_process=True, post_process=True): - """Build the model. - - Select between two different model classes: - 1. Default model (uses megatron.legacy.models/gpt_model.py). - 2. Core model (uses megatron/core/models/retro/model.py). - """ - - args = get_args() - if not args.use_legacy_models and args.retro_add_retriever: - provider = core_model_provider - else: - provider = default_model_provider - model = provider(pre_process=pre_process, post_process=post_process) - return model - - -def get_batch(data_iterator): - """Generate a batch""" - - args = get_args() - tokenizer = get_tokenizer() - config = get_retro_config() - - # Items and their type. - keys = ['text'] - if args.retro_add_retriever: - keys.append('neighbor_tokens') - datatype = torch.int64 - - # Broadcast data. - if data_iterator is not None: - data = next(data_iterator) - else: - data = None - - data_b = tensor_parallel.broadcast_data(keys, data, datatype) - - # Unpack. - tokens_ = data_b['text'].long() - labels = tokens_[:, 1:].contiguous() - tokens = tokens_[:, :-1].contiguous() - - # Get the masks and postition ids. - attention_mask, loss_mask, position_ids = get_ltor_masks_and_position_ids( - tokens, - tokenizer.eod, - args.reset_position_ids, - args.reset_attention_mask, - args.eod_mask_loss) - - if args.retro_add_retriever: - # note: [bs * l * k, r] - # note: 2x == neighbor, continuation - neighbor_tokens = data_b['neighbor_tokens'] \ - .view(-1, config.retro_retrieved_length).long() - _, _, neighbor_position_ids = get_ltor_masks_and_position_ids( - neighbor_tokens, - tokenizer.eod, - args.reset_position_ids, - args.reset_attention_mask, - args.eod_mask_loss) - neighbor_attention_mask = get_all_true_mask( - (1, 1, config.retro_retrieved_length, config.retro_retrieved_length), - neighbor_tokens.device) - return tokens, labels, loss_mask, attention_mask, position_ids, \ - neighbor_tokens, neighbor_attention_mask, neighbor_position_ids - - else: - return tokens, labels, loss_mask, attention_mask, position_ids - - -def forward_step(data_iterator, model): - """Forward step.""" - args = get_args() - timers = get_timers() - - # Get the batch. - timers('batch-generator').start() - if args.retro_add_retriever: - tokens, labels, loss_mask, attention_mask, position_ids, \ - neighbor_tokens, neighbor_attention_mask, neighbor_position_ids = \ - get_batch(data_iterator) - else: - tokens, labels, loss_mask, attention_mask, position_ids = get_batch( - data_iterator) - neighbor_tokens, neighbor_attention_mask, neighbor_position_ids = \ - None, None, None - timers('batch-generator').stop() - - # Model call. - if args.use_legacy_models: - forward_kwargs = { - "retriever_input_ids" : neighbor_tokens, - "retriever_position_ids" : neighbor_position_ids, - "retriever_attn_mask" : neighbor_attention_mask, - } - else: - if args.retro_add_retriever: - forward_kwargs = { - "context_input_ids" : neighbor_tokens, - "context_position_ids" : neighbor_position_ids, - "context_mask" : neighbor_attention_mask, - } - else: - forward_kwargs = {} - - output_tensor = model(tokens, position_ids, attention_mask, - labels=labels, **forward_kwargs) - - return output_tensor, partial(loss_func, loss_mask) - - -def train_valid_test_datasets_provider(train_valid_test_num_samples): - """Build train, valid, and test datasets.""" - args = get_args() - - # Dataset config. - retro_config = get_retro_config() - data_config = MultiSplitGPTDatasetConfig( - random_seed=args.seed, - sequence_length=args.seq_length, - blend=get_blend_from_list(args.data_path), - blend_per_split=[ - get_blend_from_list(args.train_data_path), - get_blend_from_list(args.valid_data_path), - get_blend_from_list(args.test_data_path) - ], - renormalize_blend_weights=args.renormalize_blend_weights, - split=args.split, - split_preprocessing=retro_config.retro_split_preprocessing, - path_to_cache=args.data_cache_path, - return_document_ids=False, - tokenizer=get_tokenizer(), - reset_position_ids=args.reset_position_ids, - reset_attention_mask=args.reset_attention_mask, - eod_mask_loss=args.eod_mask_loss, - ) - - # GPT datasets. - print_rank_0(" > multi-split gpt datasets.") - train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( - MultiSplitGPTDataset, - train_valid_test_num_samples, - is_dataset_built_on_rank, - data_config, - ).build() - - gpt_datasets = { - "train" : (train_ds, train_valid_test_num_samples[0]), - "valid" : (valid_ds, train_valid_test_num_samples[1]), - "test" : (test_ds, train_valid_test_num_samples[2]), - } - - # Retro datasets. - if args.retro_add_retriever: - return get_retro_datasets( - config=retro_config, - gpt_datasets=gpt_datasets, - sample_length=args.seq_length, - eod_token_id=get_tokenizer().eod, - ) - - # Multi-split GPT datasets. - else: - return ( - gpt_datasets["train"][0], - gpt_datasets["valid"][0], - gpt_datasets["test"][0], - ) - - -if __name__ == "__main__": - - # Temporary for transition to core datasets. - train_valid_test_datasets_provider.is_distributed = True - - pretrain(train_valid_test_datasets_provider, - model_provider, - ModelType.retro_decoder, - forward_step, - args_defaults={'tokenizer_type': 'GPT2BPETokenizer'}) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Pretrain Retro.""" + +from functools import partial +import torch + +from megatron.training import get_args +from megatron.training import get_timers +from megatron.training import get_tokenizer +from megatron.training import print_rank_0 +from megatron.training.arguments import core_transformer_config_from_args +from megatron.core import tensor_parallel +from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder +from megatron.core.datasets.utils import get_blend_from_list +from megatron.core.datasets.retro.query.retro_dataset import get_retro_datasets +from megatron.core.datasets.retro.query.multi_split_gpt_dataset import MultiSplitGPTDataset, MultiSplitGPTDatasetConfig +from megatron.core.enums import ModelType +from megatron.core.models.retro import get_retro_decoder_block_spec, RetroConfig, RetroModel +from megatron.core.models.retro.utils import get_all_true_mask +from megatron.training import pretrain +from megatron.training.utils import get_ltor_masks_and_position_ids +from pretrain_gpt import ( + is_dataset_built_on_rank, + loss_func, + model_provider as default_model_provider, + train_valid_test_datasets_provider as gpt_train_valid_test_datasets_provider, +) + + +def get_retro_config(): + return core_transformer_config_from_args(get_args(), RetroConfig) + + +def core_model_provider(pre_process=True, post_process=True): + """Build the model using Megatron-Core.""" + + args = get_args() + config = get_retro_config() + + # NOTE: Experimental customization feature + if args.spec is not None: + block_spec = import_module(args.spec)() + else: + block_spec = get_retro_decoder_block_spec(config, use_transformer_engine=True) + + print_rank_0('building GPT model ...') + model = RetroModel( + config=config, + transformer_layer_spec=block_spec, + vocab_size=args.padded_vocab_size, + max_sequence_length=args.max_position_embeddings, + pre_process=pre_process, + post_process=post_process, + fp16_lm_cross_entropy=args.fp16_lm_cross_entropy, + parallel_output=True, + share_embeddings_and_output_weights=not args.untie_embeddings_and_output_weights, + position_embedding_type=args.position_embedding_type, + rotary_percent=args.rotary_percent + ) + return model + + +def model_provider(pre_process=True, post_process=True): + """Build the model. + + Select between two different model classes: + 1. Default model (uses megatron.legacy.models/gpt_model.py). + 2. Core model (uses megatron/core/models/retro/model.py). + """ + + args = get_args() + if not args.use_legacy_models and args.retro_add_retriever: + provider = core_model_provider + else: + provider = default_model_provider + model = provider(pre_process=pre_process, post_process=post_process) + return model + + +def get_batch(data_iterator): + """Generate a batch""" + + args = get_args() + tokenizer = get_tokenizer() + config = get_retro_config() + + # Items and their type. + keys = ['text'] + if args.retro_add_retriever: + keys.append('neighbor_tokens') + datatype = torch.int64 + + # Broadcast data. + if data_iterator is not None: + data = next(data_iterator) + else: + data = None + + data_b = tensor_parallel.broadcast_data(keys, data, datatype) + + # Unpack. + tokens_ = data_b['text'].long() + labels = tokens_[:, 1:].contiguous() + tokens = tokens_[:, :-1].contiguous() + + # Get the masks and postition ids. + attention_mask, loss_mask, position_ids = get_ltor_masks_and_position_ids( + tokens, + tokenizer.eod, + args.reset_position_ids, + args.reset_attention_mask, + args.eod_mask_loss) + + if args.retro_add_retriever: + # note: [bs * l * k, r] + # note: 2x == neighbor, continuation + neighbor_tokens = data_b['neighbor_tokens'] \ + .view(-1, config.retro_retrieved_length).long() + _, _, neighbor_position_ids = get_ltor_masks_and_position_ids( + neighbor_tokens, + tokenizer.eod, + args.reset_position_ids, + args.reset_attention_mask, + args.eod_mask_loss) + neighbor_attention_mask = get_all_true_mask( + (1, 1, config.retro_retrieved_length, config.retro_retrieved_length), + neighbor_tokens.device) + return tokens, labels, loss_mask, attention_mask, position_ids, \ + neighbor_tokens, neighbor_attention_mask, neighbor_position_ids + + else: + return tokens, labels, loss_mask, attention_mask, position_ids + + +def forward_step(data_iterator, model): + """Forward step.""" + args = get_args() + timers = get_timers() + + # Get the batch. + timers('batch-generator').start() + if args.retro_add_retriever: + tokens, labels, loss_mask, attention_mask, position_ids, \ + neighbor_tokens, neighbor_attention_mask, neighbor_position_ids = \ + get_batch(data_iterator) + else: + tokens, labels, loss_mask, attention_mask, position_ids = get_batch( + data_iterator) + neighbor_tokens, neighbor_attention_mask, neighbor_position_ids = \ + None, None, None + timers('batch-generator').stop() + + # Model call. + if args.use_legacy_models: + forward_kwargs = { + "retriever_input_ids" : neighbor_tokens, + "retriever_position_ids" : neighbor_position_ids, + "retriever_attn_mask" : neighbor_attention_mask, + } + else: + if args.retro_add_retriever: + forward_kwargs = { + "context_input_ids" : neighbor_tokens, + "context_position_ids" : neighbor_position_ids, + "context_mask" : neighbor_attention_mask, + } + else: + forward_kwargs = {} + + output_tensor = model(tokens, position_ids, attention_mask, + labels=labels, **forward_kwargs) + + return output_tensor, partial(loss_func, loss_mask) + + +def train_valid_test_datasets_provider(train_valid_test_num_samples): + """Build train, valid, and test datasets.""" + args = get_args() + + # Dataset config. + retro_config = get_retro_config() + data_config = MultiSplitGPTDatasetConfig( + random_seed=args.seed, + sequence_length=args.seq_length, + blend=get_blend_from_list(args.data_path), + blend_per_split=[ + get_blend_from_list(args.train_data_path), + get_blend_from_list(args.valid_data_path), + get_blend_from_list(args.test_data_path) + ], + split=args.split, + split_preprocessing=retro_config.retro_split_preprocessing, + path_to_cache=args.data_cache_path, + return_document_ids=False, + tokenizer=get_tokenizer(), + reset_position_ids=args.reset_position_ids, + reset_attention_mask=args.reset_attention_mask, + eod_mask_loss=args.eod_mask_loss, + ) + + # GPT datasets. + print_rank_0(" > multi-split gpt datasets.") + train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( + MultiSplitGPTDataset, + train_valid_test_num_samples, + is_dataset_built_on_rank, + data_config, + ).build() + + gpt_datasets = { + "train" : (train_ds, train_valid_test_num_samples[0]), + "valid" : (valid_ds, train_valid_test_num_samples[1]), + "test" : (test_ds, train_valid_test_num_samples[2]), + } + + # Retro datasets. + if args.retro_add_retriever: + return get_retro_datasets( + config=retro_config, + gpt_datasets=gpt_datasets, + sample_length=args.seq_length, + eod_token_id=get_tokenizer().eod, + ) + + # Multi-split GPT datasets. + else: + return ( + gpt_datasets["train"][0], + gpt_datasets["valid"][0], + gpt_datasets["test"][0], + ) + + +if __name__ == "__main__": + + # Temporary for transition to core datasets. + train_valid_test_datasets_provider.is_distributed = True + + pretrain(train_valid_test_datasets_provider, + model_provider, + ModelType.retro_decoder, + forward_step, + args_defaults={'tokenizer_type': 'GPT2BPETokenizer'}) diff --git a/pretrain_t5.py b/pretrain_t5.py index 21e5d4d..6af2a27 100644 --- a/pretrain_t5.py +++ b/pretrain_t5.py @@ -1,307 +1,308 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. - -"""Pretrain T5""" - -from copy import deepcopy -from functools import partial -from typing import Union - -import torch - -import megatron -from megatron.core import mpu, tensor_parallel -from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder -from megatron.core.datasets.t5_dataset import ( - T5MaskedWordPieceDataset, - T5MaskedWordPieceDatasetConfig, -) -from megatron.core.datasets.utils import get_blend_from_list -from megatron.core.enums import ModelType -from megatron.core.models.T5 import T5Model -from megatron.core.models.T5.t5_spec import ( - get_t5_decoder_with_local_block_spec, - get_t5_decoder_with_transformer_engine_block_spec, - get_t5_encoder_with_local_block_spec, - get_t5_encoder_with_transformer_engine_block_spec, -) -from megatron.training import get_args, get_timers, get_tokenizer, pretrain, print_rank_0 -from megatron.training.arguments import core_transformer_config_from_args -from pretrain_gpt import loss_func - -""" -Pipeline parallelism for T5 - -T5 is a model architecture with both encoder and decoder blocks. -Consequently, pipeline parallelism is implemented slightly differently -compared to architectures like GPT and BERT. - -In particular, when pipeline_model_parallel_world_size > 1, each stage -either executes an encoder block or a decoder block. The ---pipeline-model-parallel-split-rank argument controls the rank at which -the split happens: all ranks lower than this argument execute the -encoder block, and all ranks equal to or higher than this argument value -execute the decoder block. - -In the encoder section of the model, only one tensor is sent downstream: -the intermediate encoder_hidden_state. In the decoder section of the -model, two tensors are sent downstream in the forward pass: the fully -computed encoder_hidden_state, and the intermediate decoder_hidden_state. - -In particular, these are the shapes of the tensors sent between -different workers: - If rank is in decoder section: - intermediate decoder_hidden_state (pre-transpose), - complete encoder_hidden_state (post-transpose). - If rank is at boundary between encoder and decoder sections: - complete encoder_hidden_state (post-transpose). - If rank is in encoder section: - intermediate encoder_hidden_state (pre-transpose). - -Additionally, we have code in the backward_step function in schedules.py -to accumulate the encoder_hidden_state gradient across skip connections -(encoder_hidden_state fed in as input to each layer in the decoder). -""" - - -def model_provider( - pre_process=True, post_process=True, add_encoder=True, add_decoder=True -) -> Union[megatron.legacy.model.T5Model, T5Model]: - """Builds the model. - - Args: - pre_process (bool, optional): Set to true if you need to - compute embedings. Defaults to True. - post_process (bool, optional): Set to true if you need to want to - compute output logits/loss. Defaults to True. - add_encoder (bool, optional): Defaults to True - add_decoder (bool, optional): Defaults to True - Returns: - T5Model: The returned T5 model - """ - - args = get_args() - - assert ( - args.encoder_tensor_model_parallel_size == 0 - or args.encoder_tensor_model_parallel_size == args.tensor_model_parallel_size - ), f"Because word embeddings are shared between the encoder & decoder, these \ - have to have the same tensor parallel size." - - config = core_transformer_config_from_args(args) - if args.use_legacy_models: - model = megatron.legacy.model.T5Model( - config=config, - num_tokentypes=0, - parallel_output=True, - pre_process=pre_process, - post_process=post_process, - add_encoder=add_encoder, - add_decoder=add_decoder, - ) - else: - encoder_config = deepcopy(config) - encoder_config.num_layers = args.encoder_num_layers - - if args.pipeline_model_parallel_size > 1: - assert ( - args.encoder_pipeline_model_parallel_size > 0 - ), "Need to know how to shard the encoder & decoder." - - if args.encoder_pipeline_model_parallel_size > 0: - encoder_config.pipeline_model_parallel_size = args.encoder_pipeline_model_parallel_size - - encoder_layers_per_pipeline = ( - encoder_config.num_layers // encoder_config.pipeline_model_parallel_size - ) - decoder_layers_per_pipeline = config.num_layers // config.pipeline_model_parallel_size - - if args.transformer_impl == "local": - en_block_spec = get_t5_encoder_with_local_block_spec(encoder_layers_per_pipeline) - de_block_spec = get_t5_decoder_with_local_block_spec(decoder_layers_per_pipeline) - elif args.transformer_impl == "transformer_engine": - en_block_spec = get_t5_encoder_with_transformer_engine_block_spec( - encoder_layers_per_pipeline - ) - de_block_spec = get_t5_decoder_with_transformer_engine_block_spec( - decoder_layers_per_pipeline - ) - - print_rank_0('building T5 model ...') - model = T5Model( - config=config, - encoder_config=encoder_config, - transformer_encoder_layer_spec=en_block_spec, - transformer_decoder_layer_spec=de_block_spec, - vocab_size=args.padded_vocab_size, - max_sequence_length=args.max_position_embeddings, - pre_process=pre_process, - post_process=post_process, - fp16_lm_cross_entropy=args.fp16_lm_cross_entropy, - parallel_output=True, - share_embeddings_and_output_weights=not args.untie_embeddings_and_output_weights, - position_embedding_type=args.position_embedding_type, - rotary_percent=args.rotary_percent, - add_encoder=add_encoder, - add_decoder=add_decoder, - ) - - return model - - -def get_batch(data_iterator, use_local): - """Build the batch.""" - - keys = ['text_enc', 'text_dec', 'labels', 'loss_mask', 'enc_mask', 'dec_mask'] - datatype = torch.int64 - - # Broadcast data. - if data_iterator is not None: - data = next(data_iterator) - else: - data = None - data_b = tensor_parallel.broadcast_data(keys, data, datatype) - - # Unpack. - tokens_enc = data_b['text_enc'].long() - tokens_dec = data_b['text_dec'].long() - labels = data_b['labels'].long() - loss_mask = data_b['loss_mask'].float() - enc_mask = data_b['enc_mask'] < 0.5 - dec_mask = data_b['dec_mask'] < 0.5 - - # Configure attention mask based on different conditions - # (e.g., transformer-impl, TE versions, TE backends) - enc_mask, dec_mask, enc_dec_mask = T5MaskedWordPieceDataset.config_attention_mask( - tokens_enc, tokens_dec, enc_mask, dec_mask, use_local - ) - - return tokens_enc, tokens_dec, loss_mask, labels, enc_mask, dec_mask, enc_dec_mask - - -def forward_step(data_iterator, model: T5Model): - """Forward training step. - - Args: - data_iterator : Input data iterator - model (T5Model): The T5 Model - """ - - args = get_args() - timers = get_timers() - - # Get the batch. - timers('batch generator', log_level=2).start() - use_local = args.transformer_impl == "local" - tokens_enc, tokens_dec, loss_mask, lm_labels, enc_mask, dec_mask, enc_dec_mask = get_batch( - data_iterator, use_local - ) - timers('batch generator').stop() - - # Forward model lm_labels - output_tensor = model( - tokens_enc, tokens_dec, enc_mask, dec_mask, enc_dec_mask, lm_labels=lm_labels - ) - - return output_tensor, partial(loss_func, loss_mask) - - -def train_valid_test_datasets_provider(train_val_test_num_samples: int): - """Build the train test and validation datasets. - - Args: - train_val_test_num_samples : A list containing the number of samples - in train test and validation. - """ - args = get_args() - - tokenizer = get_tokenizer() - - config = T5MaskedWordPieceDatasetConfig( - random_seed=args.seed, - sequence_length=args.encoder_seq_length, - sequence_length_decoder=args.decoder_seq_length, - blend=get_blend_from_list(args.data_path), - blend_per_split=[ - get_blend_from_list(args.train_data_path), - get_blend_from_list(args.valid_data_path), - get_blend_from_list(args.test_data_path), - ], - renormalize_blend_weights=args.renormalize_blend_weights, - split=args.split, - path_to_cache=args.data_cache_path, - tokenizer=tokenizer, - masking_probability=args.mask_prob, - short_sequence_probability=args.short_seq_prob, - masking_max_ngram=10, - masking_do_full_word=True, - masking_do_permutation=False, - masking_use_longer_ngrams=False, - masking_use_geometric_distribution=True, - ) - - print_rank_0('> building train, validation, and test datasets for T5 ...') - - train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( - T5MaskedWordPieceDataset, - train_val_test_num_samples, - lambda: mpu.get_tensor_model_parallel_rank() == 0, - config, - ).build() - - print_rank_0("> finished creating T5 datasets ...") - - return train_ds, valid_ds, test_ds - - -def t5_embedding_ranks(pp_ranks): - """T5's embedding ranks consist of the encoder's first rank, and - the decoder's first & last ranks. - Args: - pp_ranks: A list of global ranks that constitute a pipeline group. - """ - args = get_args() - - first_rank = pp_ranks[0] - last_rank = pp_ranks[-1] - - # encoder size is also the index to the first rank of the decoder. - epp = args.encoder_pipeline_model_parallel_size - - if len(pp_ranks) == 1: - return [first_rank] - elif pp_ranks[epp] not in (first_rank, last_rank): - return [first_rank, pp_ranks[epp], last_rank] - else: - return [first_rank, last_rank] - - -def t5_position_embedding_ranks(pp_ranks): - """T5's positional embeddings are the encoder & decoder first rank stages - Args: - pp_ranks: A list of global ranks that constitute a pipeline group. - """ - args = get_args() - - # encoder size is also the index to the first rank of the decoder. - epp = args.encoder_pipeline_model_parallel_size - - if len(pp_ranks) == 1 or pp_ranks[0] == pp_ranks[epp]: - return [pp_ranks[0]] - else: - return [pp_ranks[0], pp_ranks[epp]] - - -if __name__ == "__main__": - - # Temporary for transition to core datasets - train_valid_test_datasets_provider.is_distributed = True - - pretrain( - train_valid_test_datasets_provider, - model_provider, - ModelType.encoder_and_decoder, - forward_step, - args_defaults={'tokenizer_type': 'BertWordPieceLowerCase'}, - get_embedding_ranks=t5_embedding_ranks, - get_position_embedding_ranks=t5_position_embedding_ranks, - ) +# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. + +"""Pretrain T5""" + +from copy import deepcopy +from functools import partial +from typing import Union + +import torch + +import megatron +from megatron.core import mpu, tensor_parallel +from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder +from megatron.core.datasets.t5_dataset import ( + T5MaskedWordPieceDataset, + T5MaskedWordPieceDatasetConfig, +) +from megatron.core.datasets.utils import get_blend_from_list +from megatron.core.enums import ModelType +from megatron.core.models.T5 import T5Model +from megatron.core.models.T5.t5_spec import ( + get_t5_decoder_with_local_block_spec, + get_t5_decoder_with_transformer_engine_block_spec, + get_t5_encoder_with_local_block_spec, + get_t5_encoder_with_transformer_engine_block_spec, +) +from megatron.training import get_args, get_timers, get_tokenizer, pretrain, print_rank_0 +from megatron.training.arguments import core_transformer_config_from_args +from pretrain_gpt import loss_func + +""" +Pipeline parallelism for T5 + +T5 is a model architecture with both encoder and decoder blocks. +Consequently, pipeline parallelism is implemented slightly differently +compared to architectures like GPT and BERT. + +In particular, when pipeline_model_parallel_world_size > 1, each stage +either executes an encoder block or a decoder block. The +--pipeline-model-parallel-split-rank argument controls the rank at which +the split happens: all ranks lower than this argument execute the +encoder block, and all ranks equal to or higher than this argument value +execute the decoder block. + +In the encoder section of the model, only one tensor is sent downstream: +the intermediate encoder_hidden_state. In the decoder section of the +model, two tensors are sent downstream in the forward pass: the fully +computed encoder_hidden_state, and the intermediate decoder_hidden_state. + +In particular, these are the shapes of the tensors sent between +different workers: + If rank is in decoder section: + intermediate decoder_hidden_state (pre-transpose), + complete encoder_hidden_state (post-transpose). + If rank is at boundary between encoder and decoder sections: + complete encoder_hidden_state (post-transpose). + If rank is in encoder section: + intermediate encoder_hidden_state (pre-transpose). + +Additionally, we have code in the backward_step function in schedules.py +to accumulate the encoder_hidden_state gradient across skip connections +(encoder_hidden_state fed in as input to each layer in the decoder). +""" + + +def model_provider( + pre_process=True, post_process=True, add_encoder=True, add_decoder=True +) -> Union[megatron.legacy.model.T5Model, T5Model]: + """Builds the model. + + Args: + pre_process (bool, optional): Set to true if you need to + compute embedings. Defaults to True. + post_process (bool, optional): Set to true if you need to want to + compute output logits/loss. Defaults to True. + add_encoder (bool, optional): Defaults to True + add_decoder (bool, optional): Defaults to True + Returns: + T5Model: The returned T5 model + """ + + args = get_args() + + assert ( + args.encoder_tensor_model_parallel_size == 0 + or args.encoder_tensor_model_parallel_size == args.tensor_model_parallel_size + ), f"Because word embeddings are shared between the encoder & decoder, these \ + have to have the same tensor parallel size." + + config = core_transformer_config_from_args(args) + if args.use_legacy_models: + model = megatron.legacy.model.T5Model( + config=config, + num_tokentypes=0, + parallel_output=True, + pre_process=pre_process, + post_process=post_process, + add_encoder=add_encoder, + add_decoder=add_decoder, + ) + else: + encoder_config = deepcopy(config) + encoder_config.num_layers = args.encoder_num_layers + + if args.pipeline_model_parallel_size > 1: + assert ( + args.encoder_pipeline_model_parallel_size > 0 + ), "Need to know how to shard the encoder & decoder." + + if args.encoder_pipeline_model_parallel_size > 0: + encoder_config.pipeline_model_parallel_size = args.encoder_pipeline_model_parallel_size + + encoder_layers_per_pipeline = ( + encoder_config.num_layers // encoder_config.pipeline_model_parallel_size + ) + decoder_layers_per_pipeline = config.num_layers // config.pipeline_model_parallel_size + + if args.transformer_impl == "local": + en_block_spec = get_t5_encoder_with_local_block_spec(encoder_layers_per_pipeline) + de_block_spec = get_t5_decoder_with_local_block_spec(decoder_layers_per_pipeline) + elif args.transformer_impl == "transformer_engine": + en_block_spec = get_t5_encoder_with_transformer_engine_block_spec( + encoder_layers_per_pipeline + ) + de_block_spec = get_t5_decoder_with_transformer_engine_block_spec( + decoder_layers_per_pipeline + ) + + print_rank_0('building T5 model ...') + model = T5Model( + config=config, + encoder_config=encoder_config, + transformer_encoder_layer_spec=en_block_spec, + transformer_decoder_layer_spec=de_block_spec, + vocab_size=args.padded_vocab_size, + max_sequence_length=args.max_position_embeddings, + pre_process=pre_process, + post_process=post_process, + fp16_lm_cross_entropy=args.fp16_lm_cross_entropy, + parallel_output=True, + share_embeddings_and_output_weights=not args.untie_embeddings_and_output_weights, + position_embedding_type=args.position_embedding_type, + rotary_percent=args.rotary_percent, + relative_attention_num_buckets=args.relative_attention_num_buckets, + relative_attention_max_distance=args.relative_attention_max_distance, + add_encoder=add_encoder, + add_decoder=add_decoder, + ) + + return model + + +def get_batch(data_iterator, use_local): + """Build the batch.""" + + keys = ['text_enc', 'text_dec', 'labels', 'loss_mask', 'enc_mask', 'dec_mask'] + datatype = torch.int64 + + # Broadcast data. + if data_iterator is not None: + data = next(data_iterator) + else: + data = None + data_b = tensor_parallel.broadcast_data(keys, data, datatype) + + # Unpack. + tokens_enc = data_b['text_enc'].long() + tokens_dec = data_b['text_dec'].long() + labels = data_b['labels'].long() + loss_mask = data_b['loss_mask'].float() + enc_mask = data_b['enc_mask'] < 0.5 + dec_mask = data_b['dec_mask'] < 0.5 + + # Configure attention mask based on different conditions + # (e.g., transformer-impl, TE versions, TE backends) + enc_mask, dec_mask, enc_dec_mask = T5MaskedWordPieceDataset.config_attention_mask( + tokens_enc, tokens_dec, enc_mask, dec_mask, use_local + ) + + return tokens_enc, tokens_dec, loss_mask, labels, enc_mask, dec_mask, enc_dec_mask + + +def forward_step(data_iterator, model: T5Model): + """Forward training step. + + Args: + data_iterator : Input data iterator + model (T5Model): The T5 Model + """ + + args = get_args() + timers = get_timers() + + # Get the batch. + timers('batch generator', log_level=2).start() + use_local = args.transformer_impl == "local" + tokens_enc, tokens_dec, loss_mask, lm_labels, enc_mask, dec_mask, enc_dec_mask = get_batch( + data_iterator, use_local + ) + timers('batch generator').stop() + + # Forward model lm_labels + output_tensor = model( + tokens_enc, tokens_dec, enc_mask, dec_mask, enc_dec_mask, lm_labels=lm_labels + ) + + return output_tensor, partial(loss_func, loss_mask) + + +def train_valid_test_datasets_provider(train_val_test_num_samples: int): + """Build the train test and validation datasets. + + Args: + train_val_test_num_samples : A list containing the number of samples + in train test and validation. + """ + args = get_args() + + tokenizer = get_tokenizer() + + config = T5MaskedWordPieceDatasetConfig( + random_seed=args.seed, + sequence_length=args.encoder_seq_length, + sequence_length_decoder=args.decoder_seq_length, + blend=get_blend_from_list(args.data_path), + blend_per_split=[ + get_blend_from_list(args.train_data_path), + get_blend_from_list(args.valid_data_path), + get_blend_from_list(args.test_data_path), + ], + split=args.split, + path_to_cache=args.data_cache_path, + tokenizer=tokenizer, + masking_probability=args.mask_prob, + short_sequence_probability=args.short_seq_prob, + masking_max_ngram=10, + masking_do_full_word=True, + masking_do_permutation=False, + masking_use_longer_ngrams=False, + masking_use_geometric_distribution=True, + ) + + print_rank_0('> building train, validation, and test datasets for T5 ...') + + train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( + T5MaskedWordPieceDataset, + train_val_test_num_samples, + lambda: mpu.get_tensor_model_parallel_rank() == 0, + config, + ).build() + + print_rank_0("> finished creating T5 datasets ...") + + return train_ds, valid_ds, test_ds + + +def t5_embedding_ranks(pp_ranks): + """T5's embedding ranks consist of the encoder's first rank, and + the decoder's first & last ranks. + Args: + pp_ranks: A list of global ranks that constitute a pipeline group. + """ + args = get_args() + + first_rank = pp_ranks[0] + last_rank = pp_ranks[-1] + + # encoder size is also the index to the first rank of the decoder. + epp = args.encoder_pipeline_model_parallel_size + + if len(pp_ranks) == 1: + return [first_rank] + elif pp_ranks[epp] not in (first_rank, last_rank): + return [first_rank, pp_ranks[epp], last_rank] + else: + return [first_rank, last_rank] + + +def t5_position_embedding_ranks(pp_ranks): + """T5's positional embeddings are the encoder & decoder first rank stages + Args: + pp_ranks: A list of global ranks that constitute a pipeline group. + """ + args = get_args() + + # encoder size is also the index to the first rank of the decoder. + epp = args.encoder_pipeline_model_parallel_size + + if len(pp_ranks) == 1 or pp_ranks[0] == pp_ranks[epp]: + return [pp_ranks[0]] + else: + return [pp_ranks[0], pp_ranks[epp]] + + +if __name__ == "__main__": + + # Temporary for transition to core datasets + train_valid_test_datasets_provider.is_distributed = True + + pretrain( + train_valid_test_datasets_provider, + model_provider, + ModelType.encoder_and_decoder, + forward_step, + args_defaults={'tokenizer_type': 'BertWordPieceLowerCase'}, + get_embedding_ranks=t5_embedding_ranks, + get_position_embedding_ranks=t5_position_embedding_ranks, + ) diff --git a/pretrain_vlm.py b/pretrain_vlm.py index 1870a77..831c909 100644 --- a/pretrain_vlm.py +++ b/pretrain_vlm.py @@ -1,465 +1,452 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -"""Pretrain vision language model.""" -from copy import deepcopy -from functools import partial -import warnings - -import torch - -from megatron.core import parallel_state, tensor_parallel -from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder -from megatron.core.datasets.multimodal_dataset import MockMultimodalDataset, MultimodalDatasetConfig -from megatron.core.enums import ModelType -from megatron.core.models.vision.clip_vit_model import get_num_image_embeddings -from megatron.core.transformer.enums import AttnMaskType -from megatron.core.models.multimodal.llava_model import LLaVAModel, DEFAULT_IMAGE_TOKEN_INDEX -from megatron.core.models.multimodal.llava_spec import ( - decoder_model_with_transformer_engine_default_spec, - decoder_model_with_local_default_spec, -) -from megatron.core.models.vision.vit_layer_specs import ( - get_vit_layer_with_transformer_engine_spec, - get_vit_layer_with_local_spec, -) -from megatron.core.transformer.spec_utils import import_module -from megatron.core.packed_seq_params import PackedSeqParams -from megatron.training import get_args, get_timers, get_tokenizer, pretrain, print_rank_0 -from megatron.training.arguments import core_transformer_config_from_args -from megatron.training.utils import get_batch_on_this_cp_rank -from megatron.core import mpu -from pretrain_gpt import loss_func - -def calculate_model_parallel_padding(decoder_seq_len, text_only=False): - args = get_args() - cp_size = args.context_parallel_size - tp_size = args.tensor_model_parallel_size - - mp_padding_needed = 0 - # TP Comm overlap is performed with combined text+image embeddings. - # text_only flag skips using the full sequence length to calculate padding and uses - # the provided decoder_seq_len - if args.sequence_parallel and args.decoder_tp_comm_overlap and not text_only: - # If TP Comm Overlap is enabled for combined text+image embedding in LM backbone, - # user needs to provide decoder_seq_length with any potential padding needed for SP+CP - assert args.decoder_seq_length is not None, \ - "Please provide --decoder-seq-length when using TP Comm overlap for LM backbone" - mp_padding_needed = args.decoder_seq_length - decoder_seq_len - elif args.sequence_parallel or cp_size > 1: - if args.sequence_parallel and cp_size > 1: - # Padding to multiple of tp_size * cp_size*2 when using sequence parallel and context parallel - padding_factor = tp_size * cp_size * 2 - elif cp_size > 1: - padding_factor = cp_size * 2 - elif args.sequence_parallel: - padding_factor = tp_size - mp_padding_needed = int((decoder_seq_len + padding_factor - 1) // (padding_factor) * (padding_factor)) - decoder_seq_len - args.decoder_seq_length = decoder_seq_len + mp_padding_needed - else: - args.decoder_seq_length = decoder_seq_len - - return mp_padding_needed - -def model_provider( - pre_process=True, post_process=True, add_encoder=True, add_decoder=True, parallel_output=True -) -> LLaVAModel: - """Builds the model. - - Note: currently, only LLaVA model is supported. Follow-up changes will make this configurable. - - Args: - pre_process (bool): Include the embedding layer in the gpt decoder (used with pipeline parallelism). Defaults to True. - post_process (bool): Include an output layer and a layernorm in the gpt decoder (used with pipeline parallelism). Defaults to True. - add_encoder (bool): Construct the encoder module (used with pipeline parallelism). Defaults to True. When we use pipelining, the encoder - will live on only a subset of the pipeline stages (specifically, only the first stage). - add_decoder (bool): Construct the decoder module (used with pipeline parallelism). Defaults to True. When we use pipelining, the decoder - will live on only a subset of the pipeline stages (specifically, every stage after the first one). - parallel_output (bool): Enable model parallel output. - - Returns: - model (megatron.core.models.multimodal.llava_model.LLaVAModel): A multimodal model - """ - args = get_args() - vision_model_type = "clip" - - assert args.ckpt_format == 'torch', "Only ckpt-format torch is supported for VLM training currently." - - num_image_embeddings = get_num_image_embeddings( - args.img_h, args.img_w, args.patch_dim, vision_model_type, args.disable_vision_class_token, - class_token_len=1, pixel_shuffle=False, use_tile_tags=False - ) - - old_seq_length = args.seq_length - # dataloader-seq-length is required to determine the length of text seq len - if args.dataloader_seq_length is None: - args.dataloader_seq_length = args.seq_length - - # decoder_seq_len denotes the language model sequence length. - decoder_seq_len = args.dataloader_seq_length + num_image_embeddings - - # seq_length and encoder_seq_length denote the vision model sequence length. Override if the user provided something else. - args.seq_length = args.encoder_seq_length = num_image_embeddings - if torch.distributed.get_rank() == 0 and old_seq_length != args.seq_length: - warnings.warn( - f"Changed seq_length and encoder_seq_length (vision model sequence length) from {old_seq_length} to num_image_tokens ({num_image_embeddings})" - ) - mp_padding_needed = calculate_model_parallel_padding(decoder_seq_len) - - args.max_position_embeddings = max(args.max_position_embeddings, args.decoder_seq_length) - - print_rank_0('building a multimodal model ...') - language_transformer_config = core_transformer_config_from_args(get_args()) - if args.decoder_tp_comm_overlap: - assert args.transformer_impl == "transformer_engine", \ - "TransformerEngine is needed to support Decoder TP Comm overlap" - language_transformer_config.tp_comm_overlap = args.decoder_tp_comm_overlap - - if args.spec is not None: - language_transformer_layer_spec = import_module(args.spec) - elif args.transformer_impl == "transformer_engine": - language_transformer_layer_spec = decoder_model_with_transformer_engine_default_spec( - args.num_experts, args.moe_grouped_gemm - ) - else: # transformer_impl == "local" - language_transformer_layer_spec = decoder_model_with_local_default_spec( - args.num_experts, args.moe_grouped_gemm - ) - - # Prepare mask type for any required padding to support CP/SP sequence sharding. - if mp_padding_needed > 0: - if language_transformer_layer_spec.submodules.self_attention.params.get('attn_mask_type', '') == AttnMaskType.causal: - language_transformer_layer_spec.submodules.self_attention.params['attn_mask_type'] = AttnMaskType.padding_causal - elif language_transformer_layer_spec.submodules.self_attention.params.get('attn_mask_type', '') == AttnMaskType.no_mask: - language_transformer_layer_spec.submodules.self_attention.params['attn_mask_type'] = AttnMaskType.padding - - if args.transformer_impl == "transformer_engine": - vision_transformer_layer_spec = get_vit_layer_with_transformer_engine_spec() - else: # transformer_impl == "local" - vision_transformer_layer_spec = get_vit_layer_with_local_spec() - - # TODO: Make these configurable via input .yaml config. - vision_transformer_config = deepcopy(language_transformer_config) - vision_transformer_config.num_layers = args.encoder_num_layers - vision_transformer_config.first_pipeline_num_layers = None - vision_transformer_config.last_pipeline_num_layers = None - vision_transformer_config.vision_model_type = vision_model_type - vision_transformer_config.context_parallel_size = 1 # Force CP=1 for Vision Transformer - if vision_transformer_config.sequence_parallel: - print_rank_0("> Disabling Sequence parallelism in Vision Transformer. Not yet supported") - vision_transformer_config.sequence_parallel = False - if vision_transformer_config.tp_comm_overlap: - print_rank_0("> Disabling TP Comm overlap in Vision Transformer. Not yet supported") - vision_transformer_config.tp_comm_overlap = False - - vision_projection_type = "mlp" - vision_projection_config = deepcopy(language_transformer_config) - vision_projection_config.context_parallel_size = 1 # Force CP=1 for Vision Projection - if vision_projection_config.sequence_parallel: - print_rank_0("> Disabling Sequence parallelism in Vision Projection. Not yet supported") - vision_projection_config.sequence_parallel = False - if vision_projection_config.tp_comm_overlap: - print_rank_0("> Disabling TP Comm overlap in Vision Projection. Not yet supported") - vision_projection_config.tp_comm_overlap = False - - if args.encoder_pipeline_model_parallel_size > 0: - assert ( - args.encoder_pipeline_model_parallel_size == 1 - ), "ViT can only live on 1 pipeline stage." - vision_transformer_config.pipeline_model_parallel_size = ( - args.encoder_pipeline_model_parallel_size - ) - vision_projection_config.pipeline_model_parallel_size = ( - args.encoder_pipeline_model_parallel_size - ) - if args.encoder_tensor_model_parallel_size > 0: - vision_transformer_config.tensor_model_parallel_size = ( - args.encoder_tensor_model_parallel_size - ) - vision_projection_config.tensor_model_parallel_size = ( - args.encoder_tensor_model_parallel_size - ) - - vision_projection_modules = deepcopy(language_transformer_layer_spec.submodules.mlp.submodules) - - if args.virtual_pipeline_model_parallel_size: - raise NotImplementedError("virtual pipeline model parallelism is not supported yet.") - - model = LLaVAModel( - language_transformer_config=language_transformer_config, - language_transformer_layer_spec=language_transformer_layer_spec, - language_vocab_size=args.padded_vocab_size, - language_max_sequence_length=args.decoder_seq_length, - vision_transformer_config=vision_transformer_config, - vision_transformer_layer_spec=vision_transformer_layer_spec, - drop_vision_class_token=args.disable_vision_class_token, - vision_projection_config=vision_projection_config, - vision_projection_layer_spec=vision_projection_modules, - vision_projection_type=vision_projection_type, - parallel_output=parallel_output, - language_position_embedding_type=args.position_embedding_type, - language_rotary_percent=args.rotary_percent, - language_rope_scaling=args.use_rope_scaling, - pre_process=pre_process, - post_process=post_process, - add_encoder=add_encoder, - add_decoder=add_decoder, - img_h=args.img_h, - img_w=args.img_w, - patch_dim=args.patch_dim, - ) - - model.freeze( - freeze_language_model=args.freeze_LM, - freeze_vision_model=args.freeze_ViT, - freeze_vision_projection=False, - ) - - return model - - -def train_valid_test_datasets_provider(train_val_test_num_samples): - """Build the train test and validation datasets. - - Args: - train_val_test_num_samples : A list containing the number of samples in train, validation, and test sets. - - Returns: - train_ds, val_ds, test_ds (megatron.core.datasets.multimodal_dataset.MockMultimodalDataset): Train, validation, and test datasets, respectively. - """ - args = get_args() - - config = MultimodalDatasetConfig( - random_seed=args.seed, - split=args.split, - sequence_length=args.dataloader_seq_length, - tokenizer=get_tokenizer(), - reset_position_ids=args.reset_position_ids, - reset_attention_mask=args.reset_attention_mask, - eod_mask_loss=args.eod_mask_loss, - image_h=args.img_h, - image_w=args.img_w, - preprocess_func=_preprocess_data_for_llava, - ) - - print_rank_0("> building train, validation, and test datasets for multimodal ...") - - train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( - MockMultimodalDataset, - train_val_test_num_samples, - lambda: parallel_state.get_tensor_model_parallel_rank() == 0, - config, - ).build() - - print_rank_0("> finished creating multimodal datasets ...") - - return train_ds, valid_ds, test_ds - - -def _preprocess_data_for_llava(data): - """Preprocess data sample to the format expected by a LLaVA model. - - Note: This doesn't support all the different modes in the official LLaVA repo yet. - - Args: - data (dict): Data sample with keys like 'image', 'tokens', etc. - - Returns: - data (dict): Processed data sample suitable for the model. - """ - # Prepend image token index to tokens. - data["tokens"] = torch.cat( - [ - DEFAULT_IMAGE_TOKEN_INDEX - * torch.ones(1, dtype=data["tokens"].dtype, device=data["tokens"].device), - data["tokens"], - ] - ) - # Prepend labels accordingly. - data["labels"] = torch.cat([data["tokens"][1].unsqueeze(0), data["labels"]]) - # Zero loss mask for the image token index. - data["loss_mask"] = torch.cat( - [ - torch.zeros(1, dtype=data["loss_mask"].dtype, device=data["loss_mask"].device), - data["loss_mask"], - ] - ) - # Add one more position id. - data["position_ids"] = torch.cat( - [data["position_ids"], data["position_ids"][-1].unsqueeze(0) + 1] - ) - - return data - -def get_batch(data_iterator): - """Generate a batch. - - Args: - data_iterator: Iterable dataset. - - Returns: - sample: A data sample with images, tokens, etc. - """ - def _get_packed_seq_params(tokens, img_seq_len, mp_padding_needed): - batch_size = tokens.shape[0] - # Calculate the valid token seq len that LM backbone should compute on - combined_valid_seqlen = tokens.shape[1] + img_seq_len - mp_padding_needed - cu_seqlens = torch.arange( - 0, (batch_size + 1) * (combined_valid_seqlen), step=(combined_valid_seqlen), dtype=torch.int32, device=tokens.device) - # Calculate the total padded token seq len - combined_padded_seqlen = tokens.shape[1] + img_seq_len - cu_seqlens_padded = None - qkv_format = 'sbhd' - if cp_size > 1: - # Provide cu_seqlens__padded for CP support - cu_seqlens_padded = torch.arange( - 0, (batch_size + 1) * (combined_padded_seqlen), step=(combined_padded_seqlen), dtype=torch.int32, device=tokens.device) - # CP with padding mask type requires THD format - qkv_format = 'thd' - packed_seq_params = PackedSeqParams( - cu_seqlens_q=cu_seqlens, - cu_seqlens_kv=cu_seqlens, - cu_seqlens_q_padded=cu_seqlens_padded, - cu_seqlens_kv_padded=cu_seqlens_padded, - max_seqlen_q=combined_padded_seqlen, - max_seqlen_kv=combined_padded_seqlen, - qkv_format=qkv_format, - ) - return packed_seq_params - - args = get_args() - cp_size = args.context_parallel_size - # Broadcast data. - if data_iterator is not None: - data = next(data_iterator) - else: - data = None - - data_i = tensor_parallel.broadcast_data(["tokens", "position_ids", "labels"], data, torch.int64) - data_f = tensor_parallel.broadcast_data(["image", "loss_mask"], data, torch.float32) - - batch = dict() - packed_seq_params = None - image_token_mask = None - # Create batch with tokens and position_ids for CP sharding. - tokens = data_i["tokens"].long() - position_ids = data_i["position_ids"].long() - labels = data_i["labels"].long() - loss_mask = data_f["loss_mask"].float() - images = data_f["image"].float() - - if cp_size > 1 or args.sequence_parallel: - vision_model_type = "clip" - # Calculate the number of image embedding tokens will be added to text tokens - num_image_embeddings_per_tile = get_num_image_embeddings( - args.img_h, args.img_w, args.patch_dim, vision_model_type, args.disable_vision_class_token, 1 - ) - # Pad to make sure the text sequence can be sharded equally by CP chunks. - mp_padding_needed_for_text = calculate_model_parallel_padding(tokens.shape[1], text_only=True) - if mp_padding_needed_for_text > 0: - tokens, position_ids, labels, loss_mask = [torch.nn.functional.pad(item, (0, mp_padding_needed_for_text)) for item in (tokens, position_ids, labels, loss_mask)] - # Image token mask must be supplied before distributed sequence to CP ranks. - image_token_mask = tokens == DEFAULT_IMAGE_TOKEN_INDEX - num_images_per_sample = torch.sum(image_token_mask, dim=-1) - img_seq_len = (num_image_embeddings_per_tile * num_images_per_sample - num_images_per_sample).max() - packed_seq_params = _get_packed_seq_params(tokens, img_seq_len, mp_padding_needed_for_text) - - # slice batch along sequence dimension for context parallelism - batch = get_batch_on_this_cp_rank({"tokens": tokens, "position_ids": position_ids}) - attention_mask = None # Use the attention mask type defined in layer spec. Typically no mask for the vision model and causal mask for the vision model. - - return batch["tokens"], batch["position_ids"], labels, images, loss_mask, attention_mask, image_token_mask, packed_seq_params - - -def forward_step(data_iterator, model: LLaVAModel): - """Forward training step. - - Args: - data_iterator: Iterable dataset. - model (megatron.core.models.multimodal.llava_model.LLaVAModel): Multimodal model - - Returns: - output_tensor (torch.Tensor): Loss of shape [b, s] if labels are provided, otherwise logits of shape [b, s, vocab_size]. - loss_func (callable): Loss function with a loss mask specified. - """ - timers = get_timers() - - # Get the batch. - timers('batch-generator', log_level=2).start() - tokens, position_ids, labels, images, loss_mask, attention_mask, image_token_mask, packed_seq_params = get_batch(data_iterator) - timers('batch-generator').stop() - - output_tensor, loss_mask = model( - images, tokens, position_ids, attention_mask, labels, loss_mask, image_token_mask=image_token_mask, packed_seq_params=packed_seq_params - ) - - return output_tensor, partial(loss_func, loss_mask) - - -def add_vlm_extra_args(parser): - """Extra arguments.""" - group = parser.add_argument_group(title='vision language model specific arguments') - group.add_argument( - '--freeze-LM', action='store_true', default=False, help="Freeze language model weights" - ) - group.add_argument( - '--freeze-ViT', action='store_true', default=False, help="Freeze vision model (ViT) weights" - ) - group.add_argument( - "--disable-vision-class-token", - action="store_true", - default=False, - help="Drop vision model class token", - ) - group.add_argument("--dataloader-seq-length", type=int, help="Make dataloader to produce sequences of specific length.") - group.add_argument("--decoder-tp-comm-overlap", action="store_true", default=False, help="Enables the overlap of " - "Tensor parallel communication and GEMM kernels in Decoder only. " - "Please provide decoder-seq-length when using this feature.") - return parser - - -def llava_embedding_ranks(pp_ranks): - """LLava's embedding ranks consist of the decoder's first and last ranks (ie, the ViT has no embeddings). - Args: - pp_ranks: A list of global ranks that constitute a pipeline group. - """ - args = get_args() - - # encoder size is also the index to the first rank of the decoder. - epp = args.encoder_pipeline_model_parallel_size - - last_rank = pp_ranks[-1] - if len(pp_ranks) == 1 or pp_ranks[epp] == last_rank: - return [last_rank] - else: - return [pp_ranks[epp], last_rank] - - -def llava_position_embedding_ranks(pp_ranks): - """LLava's embedding ranks consist of the singular rank of the model or the decoder's first rank. - Args: - pp_ranks: A list of global ranks that constitute a pipeline group. - """ - args = get_args() - - # encoder size is also the index to the first rank of the decoder. - epp = args.encoder_pipeline_model_parallel_size - - last_rank = pp_ranks[-1] - if len(pp_ranks) == 1: - return [last_rank] - else: - return [pp_ranks[epp]] - - -if __name__ == "__main__": - train_valid_test_datasets_provider.is_distributed = True - - pretrain( - train_valid_test_datasets_provider, - model_provider, - ModelType.encoder_and_decoder, - forward_step, - args_defaults={'tokenizer_type': 'GPT2BPETokenizer'}, - extra_args_provider=add_vlm_extra_args, - get_embedding_ranks=llava_embedding_ranks, - get_position_embedding_ranks=llava_position_embedding_ranks, - ) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +"""Pretrain vision language model.""" +from copy import deepcopy +from functools import partial +import warnings + +import torch + +from megatron.core import parallel_state, tensor_parallel +from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder +from megatron.core.datasets.multimodal_dataset import MockMultimodalDataset, MultimodalDatasetConfig +from megatron.core.enums import ModelType +from megatron.core.models.vision.clip_vit_model import get_num_image_embeddings +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.models.multimodal.llava_model import LLaVAModel, DEFAULT_IMAGE_TOKEN_INDEX +from megatron.core.models.multimodal.llava_spec import ( + decoder_model_with_transformer_engine_default_spec, + decoder_model_with_local_default_spec, +) +from megatron.core.models.vision.vit_layer_specs import ( + get_vit_layer_with_transformer_engine_spec, + get_vit_layer_with_local_spec, +) +from megatron.core.transformer.spec_utils import import_module +from megatron.training import get_args, get_timers, get_tokenizer, pretrain, print_rank_0 +from megatron.training.arguments import core_transformer_config_from_args +from megatron.training.utils import get_batch_on_this_cp_rank +from megatron.core import mpu +from megatron.core.models.multimodal import context_parallel +from pretrain_gpt import loss_func + + +def model_provider( + pre_process=True, post_process=True, add_encoder=True, add_decoder=True, parallel_output=True +) -> LLaVAModel: + """Builds the model. + + Note: currently, only LLaVA model is supported. Follow-up changes will make this configurable. + + Args: + pre_process (bool): Include the embedding layer in the gpt decoder (used with pipeline parallelism). Defaults to True. + post_process (bool): Include an output layer and a layernorm in the gpt decoder (used with pipeline parallelism). Defaults to True. + add_encoder (bool): Construct the encoder module (used with pipeline parallelism). Defaults to True. When we use pipelining, the encoder + will live on only a subset of the pipeline stages (specifically, only the first stage). + add_decoder (bool): Construct the decoder module (used with pipeline parallelism). Defaults to True. When we use pipelining, the decoder + will live on only a subset of the pipeline stages (specifically, every stage after the first one). + parallel_output (bool): Enable model parallel output. + + Returns: + model (megatron.core.models.multimodal.llava_model.LLaVAModel): A multimodal model + """ + args = get_args() + vision_model_type = "clip" + + assert args.ckpt_format == 'torch', "Only ckpt-format torch is supported for VLM training currently." + assert not (args.context_parallel_size > 1 and args.pipeline_model_parallel_size > 1), "PP+CP is not yet supported by this script. \ + Current mock dataset does not support natively packed sequence dataset required for correct PP comm shapes." + + num_image_embeddings = get_num_image_embeddings( + args.img_h, args.img_w, args.patch_dim, vision_model_type, args.disable_vision_class_token, + class_token_len=1, pixel_shuffle=False, use_tile_tags=False + ) + + old_seq_length = args.seq_length + # dataloader-seq-length is required to determine the length of text seq len + if args.dataloader_seq_length is None: + args.dataloader_seq_length = args.seq_length + + # decoder_seq_len denotes the language model sequence length. + decoder_seq_len = args.dataloader_seq_length + num_image_embeddings + + # seq_length and encoder_seq_length denote the vision model sequence length. Override if the user provided something else. + args.seq_length = args.encoder_seq_length = num_image_embeddings + if torch.distributed.get_rank() == 0 and old_seq_length != args.seq_length: + warnings.warn( + f"Changed seq_length and encoder_seq_length (vision model sequence length) from {old_seq_length} to num_image_tokens ({num_image_embeddings})" + ) + mp_padding_needed = context_parallel.get_padding( + decoder_seq_len, + args.context_parallel_size, + args.tensor_model_parallel_size, + args.sequence_parallel, + args.decoder_tp_comm_overlap, + args.decoder_seq_length + ) + args.decoder_seq_length = decoder_seq_len + mp_padding_needed + + args.max_position_embeddings = max(args.max_position_embeddings, args.decoder_seq_length) + + print_rank_0('building a multimodal model ...') + language_transformer_config = core_transformer_config_from_args(get_args()) + if args.decoder_tp_comm_overlap: + assert args.transformer_impl == "transformer_engine", \ + "TransformerEngine is needed to support Decoder TP Comm overlap" + language_transformer_config.tp_comm_overlap = args.decoder_tp_comm_overlap + + if args.spec is not None: + language_transformer_layer_spec = import_module(args.spec) + elif args.transformer_impl == "transformer_engine": + language_transformer_layer_spec = decoder_model_with_transformer_engine_default_spec( + args.num_experts, args.moe_grouped_gemm + ) + else: # transformer_impl == "local" + language_transformer_layer_spec = decoder_model_with_local_default_spec( + args.num_experts, args.moe_grouped_gemm + ) + + # Prepare mask type for any required padding to support CP/SP sequence sharding. + if mp_padding_needed > 0: + if language_transformer_layer_spec.submodules.self_attention.params.get('attn_mask_type', '') == AttnMaskType.causal: + language_transformer_layer_spec.submodules.self_attention.params['attn_mask_type'] = AttnMaskType.padding_causal + elif language_transformer_layer_spec.submodules.self_attention.params.get('attn_mask_type', '') == AttnMaskType.no_mask: + language_transformer_layer_spec.submodules.self_attention.params['attn_mask_type'] = AttnMaskType.padding + + if args.transformer_impl == "transformer_engine": + vision_transformer_layer_spec = get_vit_layer_with_transformer_engine_spec() + else: # transformer_impl == "local" + vision_transformer_layer_spec = get_vit_layer_with_local_spec() + + # TODO: Make these configurable via input .yaml config. + vision_transformer_config = deepcopy(language_transformer_config) + vision_transformer_config.num_layers = args.encoder_num_layers + vision_transformer_config.first_pipeline_num_layers = None + vision_transformer_config.last_pipeline_num_layers = None + vision_transformer_config.vision_model_type = vision_model_type + vision_transformer_config.context_parallel_size = 1 # Force CP=1 for Vision Transformer + if vision_transformer_config.sequence_parallel: + print_rank_0("> Disabling Sequence parallelism in Vision Transformer. Not yet supported") + vision_transformer_config.sequence_parallel = False + if vision_transformer_config.tp_comm_overlap: + print_rank_0("> Disabling TP Comm overlap in Vision Transformer. Not yet supported") + vision_transformer_config.tp_comm_overlap = False + + vision_projection_type = "mlp" + vision_projection_config = deepcopy(language_transformer_config) + vision_projection_config.context_parallel_size = 1 # Force CP=1 for Vision Projection + if vision_projection_config.sequence_parallel: + print_rank_0("> Disabling Sequence parallelism in Vision Projection. Not yet supported") + vision_projection_config.sequence_parallel = False + if vision_projection_config.tp_comm_overlap: + print_rank_0("> Disabling TP Comm overlap in Vision Projection. Not yet supported") + vision_projection_config.tp_comm_overlap = False + + if args.encoder_pipeline_model_parallel_size > 0: + assert ( + args.encoder_pipeline_model_parallel_size == 1 + ), "ViT can only live on 1 pipeline stage." + vision_transformer_config.pipeline_model_parallel_size = ( + args.encoder_pipeline_model_parallel_size + ) + vision_projection_config.pipeline_model_parallel_size = ( + args.encoder_pipeline_model_parallel_size + ) + if args.encoder_tensor_model_parallel_size > 0: + vision_transformer_config.tensor_model_parallel_size = ( + args.encoder_tensor_model_parallel_size + ) + vision_projection_config.tensor_model_parallel_size = ( + args.encoder_tensor_model_parallel_size + ) + + vision_projection_modules = deepcopy(language_transformer_layer_spec.submodules.mlp.submodules) + + if args.virtual_pipeline_model_parallel_size: + raise NotImplementedError("virtual pipeline model parallelism is not supported yet.") + + language_max_sequence_length = args.decoder_seq_length + if args.context_parallel_size > 1: + if args.use_packed_sequence or mp_padding_needed > 0: + # Use THD data format + language_max_sequence_length = args.decoder_seq_length * args.micro_batch_size + model = LLaVAModel( + language_transformer_config=language_transformer_config, + language_transformer_layer_spec=language_transformer_layer_spec, + language_vocab_size=args.padded_vocab_size, + language_max_sequence_length=language_max_sequence_length, + vision_transformer_config=vision_transformer_config, + vision_transformer_layer_spec=vision_transformer_layer_spec, + drop_vision_class_token=args.disable_vision_class_token, + vision_projection_config=vision_projection_config, + vision_projection_layer_spec=vision_projection_modules, + vision_projection_type=vision_projection_type, + parallel_output=parallel_output, + language_position_embedding_type=args.position_embedding_type, + language_rotary_percent=args.rotary_percent, + language_rope_scaling=args.use_rope_scaling, + pre_process=pre_process, + post_process=post_process, + add_encoder=add_encoder, + add_decoder=add_decoder, + img_h=args.img_h, + img_w=args.img_w, + patch_dim=args.patch_dim, + ) + + model.freeze( + freeze_language_model=args.freeze_LM, + freeze_vision_model=args.freeze_ViT, + freeze_vision_projection=False, + ) + + return model + + +def train_valid_test_datasets_provider(train_val_test_num_samples): + """Build the train test and validation datasets. + + Args: + train_val_test_num_samples : A list containing the number of samples in train, validation, and test sets. + + Returns: + train_ds, val_ds, test_ds (megatron.core.datasets.multimodal_dataset.MockMultimodalDataset): Train, validation, and test datasets, respectively. + """ + args = get_args() + + config = MultimodalDatasetConfig( + random_seed=args.seed, + split=args.split, + sequence_length=args.dataloader_seq_length, + tokenizer=get_tokenizer(), + reset_position_ids=args.reset_position_ids, + reset_attention_mask=args.reset_attention_mask, + eod_mask_loss=args.eod_mask_loss, + image_h=args.img_h, + image_w=args.img_w, + preprocess_func=_preprocess_data_for_llava, + ) + + print_rank_0("> building train, validation, and test datasets for multimodal ...") + + train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( + MockMultimodalDataset, + train_val_test_num_samples, + lambda: parallel_state.get_tensor_model_parallel_rank() == 0, + config, + ).build() + + print_rank_0("> finished creating multimodal datasets ...") + + return train_ds, valid_ds, test_ds + + +def _preprocess_data_for_llava(data): + """Preprocess data sample to the format expected by a LLaVA model. + + Note: This doesn't support all the different modes in the official LLaVA repo yet. + + Args: + data (dict): Data sample with keys like 'image', 'tokens', etc. + + Returns: + data (dict): Processed data sample suitable for the model. + """ + # Prepend image token index to tokens. + data["tokens"] = torch.cat( + [ + DEFAULT_IMAGE_TOKEN_INDEX + * torch.ones(1, dtype=data["tokens"].dtype, device=data["tokens"].device), + data["tokens"], + ] + ) + # Prepend labels accordingly. + data["labels"] = torch.cat([data["tokens"][1].unsqueeze(0), data["labels"]]) + # Zero loss mask for the image token index. + data["loss_mask"] = torch.cat( + [ + torch.zeros(1, dtype=data["loss_mask"].dtype, device=data["loss_mask"].device), + data["loss_mask"], + ] + ) + # Add one more position id. + data["position_ids"] = torch.cat( + [data["position_ids"], data["position_ids"][-1].unsqueeze(0) + 1] + ) + + return data + + +def get_batch(data_iterator): + """Generate a batch. + + Args: + data_iterator: Iterable dataset. + + Returns: + sample: A data sample with images, tokens, etc. + """ + args = get_args() + cp_size = args.context_parallel_size + # Broadcast data. + if data_iterator is not None: + data = next(data_iterator) + else: + data = None + + data_i = tensor_parallel.broadcast_data(["tokens", "position_ids", "labels"], data, torch.int64) + data_f = tensor_parallel.broadcast_data(["image", "loss_mask"], data, torch.float32) + + batch = dict() + packed_seq_params = None + image_token_mask = None + # Create batch with tokens and position_ids for CP sharding. + tokens = data_i["tokens"].long() + position_ids = data_i["position_ids"].long() + labels = data_i["labels"].long() + loss_mask = data_f["loss_mask"].float() + images = data_f["image"].float() + + if cp_size > 1 or args.sequence_parallel: + vision_model_type = "clip" + # Calculate the number of image embedding tokens will be added to text tokens + num_image_embeddings_per_tile = get_num_image_embeddings( + args.img_h, args.img_w, args.patch_dim, vision_model_type, args.disable_vision_class_token, 1 + ) + # Pad to make sure the text sequence can be sharded equally by CP chunks. + image_token_mask = tokens == DEFAULT_IMAGE_TOKEN_INDEX + num_images_per_sample = torch.sum(image_token_mask, dim=-1) + img_seq_len = (num_image_embeddings_per_tile * num_images_per_sample - num_images_per_sample).max() + mp_padding_needed_for_text = context_parallel.get_padding( + tokens.shape[1] + img_seq_len, + args.context_parallel_size, + args.tensor_model_parallel_size, + args.sequence_parallel, + args.decoder_tp_comm_overlap, + args.decoder_seq_length + ) + if mp_padding_needed_for_text > 0: + tokens, position_ids, labels, loss_mask = [torch.nn.functional.pad(item, (0, mp_padding_needed_for_text)) for item in (tokens, position_ids, labels, loss_mask)] + packed_seq_params = context_parallel.get_packed_seq_params(tokens, img_seq_len, mp_padding_needed_for_text, cp_size, args.use_packed_sequence) + + if packed_seq_params.qkv_format == 'thd': + # Reshape from [B,S] to [T,1] + tokens = ( + tokens.contiguous() + .view(tokens.shape[0] * tokens.shape[1]) + .unsqueeze(0) + ) + position_ids = ( + position_ids.contiguous() + .view(position_ids.shape[0] * position_ids.shape[1]) + .unsqueeze(0) + ) + labels = labels.view(labels.shape[0] * labels.shape[1]).unsqueeze(0) + loss_mask = loss_mask.view( + loss_mask.shape[0] * loss_mask.shape[1] + ).unsqueeze(0) + + attention_mask = None # Use the attention mask type defined in layer spec. Typically no mask for the vision model and causal mask for the vision model. + + return tokens, position_ids, labels, images, loss_mask, attention_mask, packed_seq_params + + +def forward_step(data_iterator, model: LLaVAModel): + """Forward training step. + + Args: + data_iterator: Iterable dataset. + model (megatron.core.models.multimodal.llava_model.LLaVAModel): Multimodal model + + Returns: + output_tensor (torch.Tensor): Loss of shape [b, s] if labels are provided, otherwise logits of shape [b, s, vocab_size]. + loss_func (callable): Loss function with a loss mask specified. + """ + timers = get_timers() + + # Get the batch. + timers('batch-generator', log_level=2).start() + tokens, position_ids, labels, images, loss_mask, attention_mask, packed_seq_params = get_batch(data_iterator) + timers('batch-generator').stop() + + output_tensor, loss_mask = model( + images, tokens, position_ids, attention_mask, labels, loss_mask, packed_seq_params=packed_seq_params + ) + + return output_tensor, partial(loss_func, loss_mask) + + +def add_vlm_extra_args(parser): + """Extra arguments.""" + group = parser.add_argument_group(title='vision language model specific arguments') + group.add_argument( + '--freeze-LM', action='store_true', default=False, help="Freeze language model weights" + ) + group.add_argument( + '--freeze-ViT', action='store_true', default=False, help="Freeze vision model (ViT) weights" + ) + group.add_argument( + "--disable-vision-class-token", + action="store_true", + default=False, + help="Drop vision model class token", + ) + group.add_argument("--dataloader-seq-length", type=int, help="Make dataloader to produce sequences of specific length.") + group.add_argument("--decoder-tp-comm-overlap", action="store_true", default=False, help="Enables the overlap of " + "Tensor parallel communication and GEMM kernels in Decoder only. " + "Please provide decoder-seq-length when using this feature.") + group.add_argument( + "--use-packed-sequence", + action="store_true", + default=False, + help="Use packed sequence", + ) + return parser + + +def llava_embedding_ranks(pp_ranks): + """LLava's embedding ranks consist of the decoder's first and last ranks (ie, the ViT has no embeddings). + Args: + pp_ranks: A list of global ranks that constitute a pipeline group. + """ + args = get_args() + + # encoder size is also the index to the first rank of the decoder. + epp = args.encoder_pipeline_model_parallel_size + + last_rank = pp_ranks[-1] + if len(pp_ranks) == 1 or pp_ranks[epp] == last_rank: + return [last_rank] + else: + return [pp_ranks[epp], last_rank] + + +def llava_position_embedding_ranks(pp_ranks): + """LLava's embedding ranks consist of the singular rank of the model or the decoder's first rank. + Args: + pp_ranks: A list of global ranks that constitute a pipeline group. + """ + args = get_args() + + # encoder size is also the index to the first rank of the decoder. + epp = args.encoder_pipeline_model_parallel_size + + last_rank = pp_ranks[-1] + if len(pp_ranks) == 1: + return [last_rank] + else: + return [pp_ranks[epp]] + + +if __name__ == "__main__": + train_valid_test_datasets_provider.is_distributed = True + + pretrain( + train_valid_test_datasets_provider, + model_provider, + ModelType.encoder_and_decoder, + forward_step, + args_defaults={'tokenizer_type': 'GPT2BPETokenizer'}, + extra_args_provider=add_vlm_extra_args, + get_embedding_ranks=llava_embedding_ranks, + get_position_embedding_ranks=llava_position_embedding_ranks, + ) diff --git a/pytest.ini b/pytest.ini index c75f3b9..4b8a75a 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,4 +1,6 @@ -# content of pytest.ini -[pytest] -markers = - internal: mark a test as a test to private/internal functions. \ No newline at end of file +# content of pytest.ini +[pytest] +markers = + internal: mark a test as a test to private/internal functions. + flaky: mark flaky tests for LTS environment + flaky_in_dev: mark flaky tests for DEV environment diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 42ed710..0000000 --- a/requirements.txt +++ /dev/null @@ -1,19 +0,0 @@ -trl -transformers >= 4.43.0 -packaging -six -regex -pyyaml -sentencepiece -pybind11 -blobfile - -# ==== test ==== -nltk -pytest -requests -wrapt -tensorboard -tensorboardX -scipy -psutil diff --git a/requirements/pytorch_24.01/requirements.txt b/requirements/pytorch_24.01/requirements.txt index 0fe7b92..b2d1702 100644 --- a/requirements/pytorch_24.01/requirements.txt +++ b/requirements/pytorch_24.01/requirements.txt @@ -1,15 +1,16 @@ -einops -flask-restful -nltk -pytest -pytest-cov -pytest_mock -pytest-random-order -sentencepiece -tiktoken -wrapt -zarr -wandb -triton==2.1.0 -tensorstore==0.1.45 -nvidia-modelopt[torch]>=0.19.0; sys_platform != "darwin" \ No newline at end of file +einops +flask-restful +nltk +pytest +pytest_asyncio +pytest-cov +pytest_mock +pytest-random-order +sentencepiece +tiktoken +wrapt +zarr +wandb +triton==2.1.0 +tensorstore!=0.1.46,!=0.1.72 +nvidia-modelopt[torch]>=0.19.0; sys_platform != "darwin" diff --git a/requirements/pytorch_24.07/requirements.txt b/requirements/pytorch_24.07/requirements.txt index 2fe096f..58bb239 100644 --- a/requirements/pytorch_24.07/requirements.txt +++ b/requirements/pytorch_24.07/requirements.txt @@ -1,14 +1,16 @@ -einops -flask-restful -nltk -pytest -pytest-cov -pytest_mock -pytest-random-order -sentencepiece -tiktoken -wrapt -zarr -wandb -tensorstore==0.1.45 -nvidia-modelopt[torch]>=0.19.0; sys_platform != "darwin" \ No newline at end of file +einops +flask-restful +nltk +pytest +pytest_asyncio +pytest-cov +pytest_mock +pytest-random-order +sentencepiece +tiktoken +wrapt +zarr +wandb +tensorstore!=0.1.46 +nvidia-modelopt[torch]>=0.19.0; sys_platform != "darwin" +nvidia-resiliency-ext diff --git a/requirements/pytorch_24.10/requirements.txt b/requirements/pytorch_24.10/requirements.txt new file mode 100644 index 0000000..ab93895 --- /dev/null +++ b/requirements/pytorch_24.10/requirements.txt @@ -0,0 +1,17 @@ +einops +flask-restful +nltk +pytest +pytest_asyncio +pytest-cov +pytest_mock +pytest-random-order +sentencepiece +tiktoken +wrapt +zarr +wandb +tensorstore!=0.1.46,!=0.1.72 +torch +nvidia-modelopt[torch]>=0.19.0; sys_platform != "darwin" +nvidia-resiliency-ext; sys_platform != "darwin" diff --git a/run.sh b/run.sh deleted file mode 100644 index 2401cc0..0000000 --- a/run.sh +++ /dev/null @@ -1,16 +0,0 @@ -export TORCHINDUCTOR_COORDINATE_DESCENT_TUNING=1 -export TORCHINDUCTOR_BENCHMARK_FUSION=1 -export TORCHINDUCTOR_BENCHMARK_MULTI_TEMPLATES=1 - -# export TORCHINDUCTOR_BENCHMARK_KERNEL=1 -export TORCHINDUCTOR_MAX_AUTOTUNE=1 - -#export FLASH_ATTENTION_PRINT_PARAM=1 -export TORCHINDUCTOR_CACHE_DIR=./cache - -# export USE_AOTRITON_FA=1 -# export USE_BSHD=1 # use fa bsdh layout -#for uniq kernel name -#export TORCHINDUCTOR_UNIQUE_KERNEL_NAMES=1 - -mpirun --allow-run-as-root -np 8 ./Llama_pretraining.sh localhost diff --git a/setup.py b/setup.py index 756348b..062f4ad 100644 --- a/setup.py +++ b/setup.py @@ -1,123 +1,128 @@ -"""Setup for pip package.""" - -import importlib.util -import subprocess -import os -import setuptools -from setuptools import Extension - -spec = importlib.util.spec_from_file_location('package_info', 'megatron/core/package_info.py') -package_info = importlib.util.module_from_spec(spec) -spec.loader.exec_module(package_info) - - -__contact_emails__ = package_info.__contact_emails__ -__contact_names__ = package_info.__contact_names__ -__description__ = package_info.__description__ -__download_url__ = package_info.__download_url__ -__homepage__ = package_info.__homepage__ -__keywords__ = package_info.__keywords__ -__license__ = package_info.__license__ -__package_name__ = package_info.__package_name__ -__repository_url__ = package_info.__repository_url__ -__version__ = package_info.__version__ - - -with open("megatron/core/README.md", "r", encoding='utf-8') as fh: - long_description = fh.read() -long_description_content_type = "text/markdown" - - -def req_file(filename, folder="requirements"): - environment = os.getenv("PY_ENV", "pytorch:24.07") - - with open(os.path.join(folder, environment, filename), encoding='utf-8') as f: - content = f.readlines() - # you may also want to remove whitespace characters - # Example: `\n` at the end of each line - return [x.strip() for x in content] - - -install_requires = req_file("requirements.txt") - -############################################################################### -# Extension Making # -# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # - -############################################################################### - -setuptools.setup( - name=__package_name__, - # Versions should comply with PEP440. For a discussion on single-sourcing - # the version across setup.py and the project code, see - # https://packaging.python.org/en/latest/single_source_version.html - version=__version__, - description=__description__, - long_description=long_description, - long_description_content_type=long_description_content_type, - # The project's main homepage. - url=__repository_url__, - download_url=__download_url__, - # Author details - author=__contact_names__, - author_email=__contact_emails__, - # maintainer Details - maintainer=__contact_names__, - maintainer_email=__contact_emails__, - # The licence under which the project is released - license=__license__, - classifiers=[ - # How mature is this project? Common values are - # 1 - Planning - # 2 - Pre-Alpha - # 3 - Alpha - # 4 - Beta - # 5 - Production/Stable - # 6 - Mature - # 7 - Inactive - 'Development Status :: 5 - Production/Stable', - # Indicate who your project is intended for - 'Intended Audience :: Developers', - 'Intended Audience :: Science/Research', - 'Intended Audience :: Information Technology', - # Indicate what your project relates to - 'Topic :: Scientific/Engineering', - 'Topic :: Scientific/Engineering :: Mathematics', - 'Topic :: Scientific/Engineering :: Image Recognition', - 'Topic :: Scientific/Engineering :: Artificial Intelligence', - 'Topic :: Software Development :: Libraries', - 'Topic :: Software Development :: Libraries :: Python Modules', - 'Topic :: Utilities', - # Pick your license as you wish (should match "license" above) - 'License :: OSI Approved :: BSD License', - # Supported python versions - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - # Additional Setting - 'Environment :: Console', - 'Natural Language :: English', - 'Operating System :: OS Independent', - ], - packages=setuptools.find_namespace_packages(include=["megatron.core", "megatron.core.*"]), - ext_modules=[ - Extension( - "megatron.core.datasets.helpers_cpp", - sources=["megatron/core/datasets/helpers.cpp"], - language="c++", - extra_compile_args=( - subprocess.check_output(["python3", "-m", "pybind11", "--includes"]) - .decode("utf-8") - .strip() - .split() - ) - + ['-O3', '-Wall', '-std=c++17'], - optional=True, - ) - ], - # Add in any packaged data. - include_package_data=True, - # PyPI package information. - keywords=__keywords__, - install_requires=install_requires, -) +"""Setup for pip package.""" + +import importlib.util +import subprocess +import os +import setuptools +from setuptools import Extension + +spec = importlib.util.spec_from_file_location('package_info', 'megatron/core/package_info.py') +package_info = importlib.util.module_from_spec(spec) +spec.loader.exec_module(package_info) + + +__contact_emails__ = package_info.__contact_emails__ +__contact_names__ = package_info.__contact_names__ +__description__ = package_info.__description__ +__download_url__ = package_info.__download_url__ +__homepage__ = package_info.__homepage__ +__keywords__ = package_info.__keywords__ +__license__ = package_info.__license__ +__package_name__ = package_info.__package_name__ +__repository_url__ = package_info.__repository_url__ +__version__ = package_info.__version__ + + +with open("megatron/core/README.md", "r", encoding='utf-8') as fh: + long_description = fh.read() +long_description_content_type = "text/markdown" + + +def req_file(filename, folder="requirements"): + environment = os.getenv("PY_ENV", "pytorch_24.10") + + content = [] + with open(os.path.join(folder, environment, filename), encoding='utf-8') as f: + content += f.readlines() + + with open(os.path.join("megatron", "core", "requirements.txt"), encoding='utf-8') as f: + content += f.readlines() + + # you may also want to remove whitespace characters + # Example: `\n` at the end of each line + return [x.strip() for x in content] + + +install_requires = req_file("requirements.txt") + +############################################################################### +# Extension Making # +# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # + +############################################################################### + +setuptools.setup( + name=__package_name__, + # Versions should comply with PEP440. For a discussion on single-sourcing + # the version across setup.py and the project code, see + # https://packaging.python.org/en/latest/single_source_version.html + version=__version__, + description=__description__, + long_description=long_description, + long_description_content_type=long_description_content_type, + # The project's main homepage. + url=__repository_url__, + download_url=__download_url__, + # Author details + author=__contact_names__, + author_email=__contact_emails__, + # maintainer Details + maintainer=__contact_names__, + maintainer_email=__contact_emails__, + # The licence under which the project is released + license=__license__, + classifiers=[ + # How mature is this project? Common values are + # 1 - Planning + # 2 - Pre-Alpha + # 3 - Alpha + # 4 - Beta + # 5 - Production/Stable + # 6 - Mature + # 7 - Inactive + 'Development Status :: 5 - Production/Stable', + # Indicate who your project is intended for + 'Intended Audience :: Developers', + 'Intended Audience :: Science/Research', + 'Intended Audience :: Information Technology', + # Indicate what your project relates to + 'Topic :: Scientific/Engineering', + 'Topic :: Scientific/Engineering :: Mathematics', + 'Topic :: Scientific/Engineering :: Image Recognition', + 'Topic :: Scientific/Engineering :: Artificial Intelligence', + 'Topic :: Software Development :: Libraries', + 'Topic :: Software Development :: Libraries :: Python Modules', + 'Topic :: Utilities', + # Pick your license as you wish (should match "license" above) + 'License :: OSI Approved :: BSD License', + # Supported python versions + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + # Additional Setting + 'Environment :: Console', + 'Natural Language :: English', + 'Operating System :: OS Independent', + ], + packages=setuptools.find_namespace_packages(include=["megatron.core", "megatron.core.*"]), + ext_modules=[ + Extension( + "megatron.core.datasets.helpers_cpp", + sources=["megatron/core/datasets/helpers.cpp"], + language="c++", + extra_compile_args=( + subprocess.check_output(["python3", "-m", "pybind11", "--includes"]) + .decode("utf-8") + .strip() + .split() + ) + + ['-O3', '-Wall', '-std=c++17'], + optional=True, + ) + ], + # Add in any packaged data. + include_package_data=True, + # PyPI package information. + keywords=__keywords__, + install_requires=install_requires, +) diff --git a/tasks/orqa/evaluate_utils.py b/tasks/orqa/evaluate_utils.py index b7ce3fc..1a70216 100644 --- a/tasks/orqa/evaluate_utils.py +++ b/tasks/orqa/evaluate_utils.py @@ -1,175 +1,176 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. - -import torch - -from megatron.training import get_args, print_rank_0 -from megatron.training.checkpointing import load_biencoder_checkpoint -from megatron.legacy.data.orqa_wiki_dataset import get_open_retrieval_wiki_dataset -from megatron.legacy.data.realm_index import OpenRetreivalDataStore, FaissMIPSIndex -from megatron.legacy.model.biencoder_model import get_model_provider -from megatron.training import get_model -from tasks.orqa.unsupervised.nq import get_nq_dataset -from tasks.orqa.unsupervised.nq import get_one_epoch_nq_dataloader -from tasks.orqa.unsupervised.nq import process_nq_batch -from tasks.orqa.unsupervised.qa_utils import calculate_matches - - -class ORQAEvaluator(object): - def __init__(self): - args = get_args() - self.embedding_size = args.hidden_size - self.faiss_use_gpu = args.faiss_use_gpu - self.evidence_embedder_obj = None - self.evidence_dataset = None - self.mips_index = None - self.eval_dataset = None - - # Get Evidence (Wikipedia) dataset - self.get_evidence_dataset() - - # Load query encoder checkpoint - only_query_model = True - if args.biencoder_shared_query_context_model: - only_query_model = False - - model = get_model(get_model_provider(only_query_model=only_query_model, - biencoder_shared_query_context_model=args.biencoder_shared_query_context_model)) - - self.model = load_biencoder_checkpoint(model, - only_query_model=only_query_model) - - assert len(self.model) == 1 - self.model[0].eval() - - # Load faiss indexer - self.faiss_wrapper() - - def get_evidence_embedding(self): - # This will load the embedding from the embedding path - self.evidence_embedder_obj = OpenRetreivalDataStore(load_from_path=True) - - def get_evidence_dataset(self): - self.evidence_dataset = get_open_retrieval_wiki_dataset() - - def faiss_wrapper(self): - # Initialize FAISS wrapper on local rank = 0 as the evidence embeddings - # is distributed over all the GPUs in a node and FAISS is not - # thread-safe - args = get_args() - if args.local_rank == 0: - # Get evidence embeddings computed using context encoder - self.get_evidence_embedding() - - assert self.evidence_embedder_obj is not None - self.mips_index = FaissMIPSIndex(embed_size=self.embedding_size, - embed_data=self.evidence_embedder_obj, - use_gpu=self.faiss_use_gpu) - - # Wait for the FAISS index to be initialized in all the nodes - torch.distributed.barrier() - - def generate_query_vectors(self, qa_data, split): - - self.eval_dataset = get_nq_dataset(qa_data, split) - dataloader = get_one_epoch_nq_dataloader(self.eval_dataset) - - query_vectors = [] - reference_list = [] - - for batch in dataloader: - # batch also has query_tokens and query_pad_data - query_tokens, query_mask, query_types, \ - query_len, reference = process_nq_batch(batch) - - assert len(self.model) == 1 - unwrapped_model = self.model[0] - while not hasattr(unwrapped_model, 'embed_text'): - unwrapped_model = unwrapped_model.module - - with torch.no_grad(): - query_logits = unwrapped_model.embed_text( - unwrapped_model.query_model, query_tokens, - query_mask, query_types) - - reference_list.extend(reference) - query_vectors.extend(query_logits.split(1, dim=0)) - if len(query_vectors) % 100 == 0: - print_rank_0('Encoded queries {}'.format(len(query_vectors))) - - query_tensor = torch.cat(query_vectors, dim=0) - print_rank_0('Total encoded queries tensor {}'.format(query_tensor.size())) - - assert query_tensor.size(0) == len(self.eval_dataset) - return query_tensor, reference_list - - def evaluate(self, qa_data, split): - args = get_args() - query_tensor, reference_list = self.generate_query_vectors(qa_data, \ - split) - local_rank = args.local_rank - rank = torch.distributed.get_rank() - device_count = torch.cuda.device_count() - num_nodes = torch.distributed.get_world_size() // device_count - node_id = rank // device_count - - for node in range(num_nodes): - start_rank = node * device_count - end_rank = (node + 1) * device_count - ranks_list = list(range(start_rank, end_rank)) - node_group = torch.distributed.new_group(ranks=ranks_list) - - if node_id == node: - device_start_rank = start_rank - group = node_group - - input_ = torch.empty_like(query_tensor).copy_(query_tensor).detach_() - tensor_list = [torch.empty_like(input_) for _ in range(device_count)] - torch.distributed.all_gather(tensor_list, query_tensor, group=group) - - if local_rank == 0 and self.mips_index is not None: - all_query_tensor = torch.cat(tensor_list, dim=0).contiguous() - - distance, topkindex = self.mips_index.search_mips_index( - all_query_tensor, top_k=args.faiss_topk_retrievals, - reconstruct=False) - distance = torch.from_numpy(distance).cuda() - topkindex = torch.LongTensor(topkindex).cuda() - - if local_rank != 0: - distance = torch.empty(device_count * len(query_tensor), \ - args.faiss_topk_retrievals, dtype=torch.float32).cuda() - topkindex = torch.empty(device_count * len(query_tensor), \ - args.faiss_topk_retrievals, dtype=torch.int64).cuda() - - torch.distributed.broadcast(distance, src=device_start_rank, \ - group=group) - torch.distributed.broadcast(topkindex, src=device_start_rank, \ - group=group) - - distance = torch.split(distance, len(query_tensor), dim=0)\ - [local_rank] - topkindex = torch.split(topkindex, len(query_tensor), dim=0)\ - [local_rank] - - top_ids_and_scores = [] - for darray, topkarray in zip(distance, topkindex): - top_ids_and_scores.append((topkarray.tolist(), darray.tolist())) - - passages = self.evidence_dataset.id2text - match_stats = calculate_matches(passages, - reference_list, - top_ids_and_scores, - workers_num=args.num_workers, - match_type=args.faiss_match) - top_k_hits = match_stats.top_k_hits - - print_rank_0("{} SET RESULTS".format(split)) - print_rank_0("topk-{} documents hits {}".format( - args.faiss_topk_retrievals, top_k_hits)) - top_k_hits = [v / len(top_ids_and_scores) for v in top_k_hits] - print_rank_0("top-k documents hits accuracy {}".format(top_k_hits)) - - for i in args.retriever_report_topk_accuracies: - print_rank_0("top-{}: {:.2f}".format(i, top_k_hits[i-1] * 100)) - - return +# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. + +import torch + +from megatron.training import get_args, print_rank_0 +from megatron.training.checkpointing import load_biencoder_checkpoint +from megatron.legacy.data.orqa_wiki_dataset import get_open_retrieval_wiki_dataset +from megatron.legacy.data.realm_index import OpenRetreivalDataStore, FaissMIPSIndex +from megatron.legacy.model.biencoder_model import get_model_provider +from megatron.training import get_model +from megatron.core.parallel_state import create_group +from tasks.orqa.unsupervised.nq import get_nq_dataset +from tasks.orqa.unsupervised.nq import get_one_epoch_nq_dataloader +from tasks.orqa.unsupervised.nq import process_nq_batch +from tasks.orqa.unsupervised.qa_utils import calculate_matches + + +class ORQAEvaluator(object): + def __init__(self): + args = get_args() + self.embedding_size = args.hidden_size + self.faiss_use_gpu = args.faiss_use_gpu + self.evidence_embedder_obj = None + self.evidence_dataset = None + self.mips_index = None + self.eval_dataset = None + + # Get Evidence (Wikipedia) dataset + self.get_evidence_dataset() + + # Load query encoder checkpoint + only_query_model = True + if args.biencoder_shared_query_context_model: + only_query_model = False + + model = get_model(get_model_provider(only_query_model=only_query_model, + biencoder_shared_query_context_model=args.biencoder_shared_query_context_model)) + + self.model = load_biencoder_checkpoint(model, + only_query_model=only_query_model) + + assert len(self.model) == 1 + self.model[0].eval() + + # Load faiss indexer + self.faiss_wrapper() + + def get_evidence_embedding(self): + # This will load the embedding from the embedding path + self.evidence_embedder_obj = OpenRetreivalDataStore(load_from_path=True) + + def get_evidence_dataset(self): + self.evidence_dataset = get_open_retrieval_wiki_dataset() + + def faiss_wrapper(self): + # Initialize FAISS wrapper on local rank = 0 as the evidence embeddings + # is distributed over all the GPUs in a node and FAISS is not + # thread-safe + args = get_args() + if args.local_rank == 0: + # Get evidence embeddings computed using context encoder + self.get_evidence_embedding() + + assert self.evidence_embedder_obj is not None + self.mips_index = FaissMIPSIndex(embed_size=self.embedding_size, + embed_data=self.evidence_embedder_obj, + use_gpu=self.faiss_use_gpu) + + # Wait for the FAISS index to be initialized in all the nodes + torch.distributed.barrier() + + def generate_query_vectors(self, qa_data, split): + + self.eval_dataset = get_nq_dataset(qa_data, split) + dataloader = get_one_epoch_nq_dataloader(self.eval_dataset) + + query_vectors = [] + reference_list = [] + + for batch in dataloader: + # batch also has query_tokens and query_pad_data + query_tokens, query_mask, query_types, \ + query_len, reference = process_nq_batch(batch) + + assert len(self.model) == 1 + unwrapped_model = self.model[0] + while not hasattr(unwrapped_model, 'embed_text'): + unwrapped_model = unwrapped_model.module + + with torch.no_grad(): + query_logits = unwrapped_model.embed_text( + unwrapped_model.query_model, query_tokens, + query_mask, query_types) + + reference_list.extend(reference) + query_vectors.extend(query_logits.split(1, dim=0)) + if len(query_vectors) % 100 == 0: + print_rank_0('Encoded queries {}'.format(len(query_vectors))) + + query_tensor = torch.cat(query_vectors, dim=0) + print_rank_0('Total encoded queries tensor {}'.format(query_tensor.size())) + + assert query_tensor.size(0) == len(self.eval_dataset) + return query_tensor, reference_list + + def evaluate(self, qa_data, split): + args = get_args() + query_tensor, reference_list = self.generate_query_vectors(qa_data, \ + split) + local_rank = args.local_rank + rank = torch.distributed.get_rank() + device_count = torch.cuda.device_count() + num_nodes = torch.distributed.get_world_size() // device_count + node_id = rank // device_count + + for node in range(num_nodes): + start_rank = node * device_count + end_rank = (node + 1) * device_count + ranks_list = list(range(start_rank, end_rank)) + node_group = create_group(ranks=ranks_list, group_desc=f'QA_EVALUATOR_NODE_GROUP') + + if node_id == node: + device_start_rank = start_rank + group = node_group + + input_ = torch.empty_like(query_tensor).copy_(query_tensor).detach_() + tensor_list = [torch.empty_like(input_) for _ in range(device_count)] + torch.distributed.all_gather(tensor_list, query_tensor, group=group) + + if local_rank == 0 and self.mips_index is not None: + all_query_tensor = torch.cat(tensor_list, dim=0).contiguous() + + distance, topkindex = self.mips_index.search_mips_index( + all_query_tensor, top_k=args.faiss_topk_retrievals, + reconstruct=False) + distance = torch.from_numpy(distance).cuda() + topkindex = torch.LongTensor(topkindex).cuda() + + if local_rank != 0: + distance = torch.empty(device_count * len(query_tensor), \ + args.faiss_topk_retrievals, dtype=torch.float32).cuda() + topkindex = torch.empty(device_count * len(query_tensor), \ + args.faiss_topk_retrievals, dtype=torch.int64).cuda() + + torch.distributed.broadcast(distance, src=device_start_rank, \ + group=group) + torch.distributed.broadcast(topkindex, src=device_start_rank, \ + group=group) + + distance = torch.split(distance, len(query_tensor), dim=0)\ + [local_rank] + topkindex = torch.split(topkindex, len(query_tensor), dim=0)\ + [local_rank] + + top_ids_and_scores = [] + for darray, topkarray in zip(distance, topkindex): + top_ids_and_scores.append((topkarray.tolist(), darray.tolist())) + + passages = self.evidence_dataset.id2text + match_stats = calculate_matches(passages, + reference_list, + top_ids_and_scores, + workers_num=args.num_workers, + match_type=args.faiss_match) + top_k_hits = match_stats.top_k_hits + + print_rank_0("{} SET RESULTS".format(split)) + print_rank_0("topk-{} documents hits {}".format( + args.faiss_topk_retrievals, top_k_hits)) + top_k_hits = [v / len(top_ids_and_scores) for v in top_k_hits] + print_rank_0("top-k documents hits accuracy {}".format(top_k_hits)) + + for i in args.retriever_report_topk_accuracies: + print_rank_0("top-{}: {:.2f}".format(i, top_k_hits[i-1] * 100)) + + return diff --git a/tasks/orqa/unsupervised/qa_utils.py b/tasks/orqa/unsupervised/qa_utils.py old mode 100644 new mode 100755 diff --git a/tasks/orqa/unsupervised/tokenizers.py b/tasks/orqa/unsupervised/tokenizers.py old mode 100644 new mode 100755 diff --git a/tasks/vision/segmentation/metrics.py b/tasks/vision/segmentation/metrics.py old mode 100644 new mode 100755 diff --git a/tests/functional_tests/python_test_utils/common.py b/tests/functional_tests/python_test_utils/common.py index 1b21fa8..900965e 100644 --- a/tests/functional_tests/python_test_utils/common.py +++ b/tests/functional_tests/python_test_utils/common.py @@ -1,95 +1,284 @@ -import enum -import glob -import json -import logging -import os - -from tensorboard.backend.event_processing import event_accumulator - -# By default TB tries to be smart about what to load in memory to avoid OOM -# Since we expect every step to be there when we do our comparisons, we explicitly -# set the size guidance to 0 so that we load everything. It's okay given our tests -# are small/short. -SIZE_GUIDANCE = {event_accumulator.TENSORS: 0, event_accumulator.SCALARS: 0} - -logger = logging.getLogger() - - -class TypeOfTest(enum.Enum): - APPROX = 1 - DETERMINISTIC = 2 - - -TYPE_OF_TEST_TO_METRIC = { - TypeOfTest.DETERMINISTIC: ["lm loss", "num-zeros"], - TypeOfTest.APPROX: ["lm loss", "iteration-time", "mem-allocated-bytes"], -} - -METRIC_TO_THRESHOLD = { - "iteration-time": 0.8, - "mem-allocated-bytes": 3 * 1000 * 1000, # 3MB - "lm loss": 0.05, -} - - -def read_tb_logs_as_list(path, index=0): - """Reads a TensorBoard Events file from the input path, and returns the - summary specified as input as a list. - - Args: - path: str, path to the dir where the events file is located. - summary_name: str, name of the summary to read from the TB logs. - - Returns: - summary_list: list, the values in the read summary list, formatted as a list. - """ - files = glob.glob(f"{path}/events*tfevents*") - files += glob.glob(f"{path}/results/events*tfevents*") - - summaries = {} - - if not files: - logger.info(f"File not found matching: {path}/events* || {path}/results/events*") - return summaries - - files.sort(key=lambda x: os.path.getmtime(os.path.join(path, x))) - accumulators = [] - - if index == -1: - for event_file in files: - ea = event_accumulator.EventAccumulator(event_file, size_guidance=SIZE_GUIDANCE) - ea.Reload() - accumulators.append(ea) - else: - event_file = files[index] - ea = event_accumulator.EventAccumulator(event_file, size_guidance=SIZE_GUIDANCE) - ea.Reload() - accumulators.append(ea) - - for ea in accumulators: - for scalar_name in ea.Tags()["scalars"]: - if scalar_name in summaries: - summaries[scalar_name] += [round(x.value, 5) for x in ea.Scalars(scalar_name)] - else: - summaries[scalar_name] = [round(x.value, 5) for x in ea.Scalars(scalar_name)] - - print( - f"Extracted {len(summaries[scalar_name])} values of {scalar_name} from Tensorboard \ - logs. Here are the first 5 values: {summaries[scalar_name][:5]}" - ) - - return summaries - - -def load_expected_data(): - expected_metrics_file = os.getenv("EXPECTED_METRICS_FILE") - - if expected_metrics_file is None: - raise ValueError("Unknown EXPECTED_METRICS_FILE") - - with open(expected_metrics_file) as f: - if os.path.exists(expected_metrics_file): - with open(expected_metrics_file) as f: - return json.load(f) - else: - print(f"File {expected_metrics_file} not found!") +import enum +import glob +import json +import logging +import os +import pathlib +from typing import Callable, Dict, List, Optional, Union + +import numpy as np +import pydantic +from tensorboard.backend.event_processing import event_accumulator + +# By default TB tries to be smart about what to load in memory to avoid OOM +# Since we expect every step to be there when we do our comparisons, we explicitly +# set the size guidance to 0 so that we load everything. It's okay given our tests +# are small/short. +SIZE_GUIDANCE = {event_accumulator.TENSORS: 0, event_accumulator.SCALARS: 0} + +logger = logging.getLogger(__name__) + + +def approximate_threshold(rtol: float) -> Callable: + def _func(y_pred: List[Union[float, int]], y_true: List[Union[float, int]]): + return np.mean([np.mean(y_pred), np.mean(y_true)]) * rtol + + return _func + + +class TypeOfTestResult(enum.Enum): + APPROXIMATE = 1 + DETERMINISTIC = 2 + + +class Test(pydantic.BaseModel): + pass + + +class NotApproximateError(Exception): + """Raised if comparison is not within approximate bounds""" + + +class NotDeterminsticError(Exception): + """Raised if comparison is not within approximate bounds""" + + +class ApproximateTest(Test): + atol: Optional[Union[int, float]] = 0 + atol_func: Optional[Callable] = None + rtol: float = 1e-5 + + @property + def type_of_test_result(self) -> TypeOfTestResult: + return TypeOfTestResult.APPROXIMATE + + def error_message(self, metric_name: str) -> NotApproximateError: + return NotApproximateError(f"Approximate comparison of {metric_name}: FAILED") + + +class DeterministicTest(Test): + @property + def atol(self) -> Union[int, float]: + return 0 + + atol_func: Optional[Callable] = None + + @property + def rtol(self) -> float: + return 0.0 + + @property + def type_of_test_result(self) -> TypeOfTestResult: + return TypeOfTestResult.DETERMINISTIC + + def error_message(self, metric_name: str) -> NotDeterminsticError: + return NotDeterminsticError(f"Exact comparison of {metric_name}: FAILED") + + +class GoldenValueMetric(pydantic.BaseModel): + start_step: int + end_step: int + step_interval: int + values: Dict[int, Union[int, float, str]] + + def __repr__(self): + return f"Values ({self.start_step},{self.end_step},{self.step_interval}): {', '.join([str(f'({step}, {value})') for step, value in self.values.items()])}" + + +class GoldenValues(pydantic.RootModel): + root: Dict[str, GoldenValueMetric] + + +class MissingTensorboardLogsError(Exception): + """Raised if TensorboardLogs not found""" + + +class UndefinedMetricError(Exception): + """Raised of golden values metric has no test definition""" + + +class SkipMetricError(Exception): + """Raised if metric shall be skipped""" + + +def read_tb_logs_as_list( + path, index: int = 0, train_iters: int = 50, start_idx: int = 1, step_size: int = 5 +) -> Optional[Dict[str, GoldenValueMetric]]: + """Reads a TensorBoard Events file from the input path, and returns the + summary specified as input as a list. + + Args: + path: str, path to the dir where the events file is located. + summary_name: str, name of the summary to read from the TB logs. + + Returns: + summary_list: list, the values in the read summary list, formatted as a list. + """ + files = glob.glob(f"{path}/events*tfevents*") + files += glob.glob(f"{path}/results/events*tfevents*") + + if not files: + logger.error(f"File not found matching: {path}/events* || {path}/results/events*") + return None + + files.sort(key=lambda x: os.path.getmtime(os.path.join(path, pathlib.Path(x).name))) + accumulators = [] + + if index == -1: + for event_file in files: + ea = event_accumulator.EventAccumulator(event_file, size_guidance=SIZE_GUIDANCE) + ea.Reload() + accumulators.append(ea) + else: + event_file = files[index] + ea = event_accumulator.EventAccumulator(event_file, size_guidance=SIZE_GUIDANCE) + ea.Reload() + accumulators.append(ea) + + summaries = {} + for ea in accumulators: + for scalar_name in ea.Tags()["scalars"]: + if scalar_name in summaries: + for x in ea.Scalars(scalar_name): + if x.step not in summaries[scalar_name]: + summaries[scalar_name][x.step] = round(x.value, 5) + + else: + summaries[scalar_name] = { + x.step: round(x.value, 5) for x in ea.Scalars(scalar_name) + } + + golden_values = {} + + for metric, values in summaries.items(): + + # Add missing values + values = { + k: (values[k] if k in values else "nan") + for k in range(1, train_iters + 1) + if k == start_idx or (k > start_idx and int(k) % step_size == 0) + } + + golden_values[metric] = GoldenValueMetric( + start_step=min(values.keys()), + end_step=max(values.keys()), + step_interval=step_size, + values=values, + ) + + # for metric_name, golden_value in golden_values.items(): + # logger.info( + # f"Extracted {golden_value.end_step} values of {metric_name} from Tensorboard logs. Here are the sampled values: {golden_value.values}" + # ) + + return golden_values + + +def read_golden_values_from_json( + golden_values_path: Union[str, pathlib.Path] +) -> Dict[str, GoldenValueMetric]: + with open(golden_values_path) as f: + if os.path.exists(golden_values_path): + with open(golden_values_path) as f: + return GoldenValues(**json.load(f)).root + + raise ValueError(f"File {golden_values_path} not found!") + + +def _filter_checks( + checks: List[Union[ApproximateTest, DeterministicTest]], filter_for_type_of_check +): + return [test for test in checks if test.type_of_test_result == filter_for_type_of_check] + + +def pipeline( + compare_approximate_results: bool, + golden_values: Dict[str, GoldenValueMetric], + tensorboard_logs: Dict[str, GoldenValueMetric], + checks: Dict[str, List[Union[ApproximateTest, DeterministicTest]]], +): + + all_test_passed = True + failed_metrics = [] + + for golden_value_key, golden_value in golden_values.items(): + + try: + if golden_value_key not in list(tensorboard_logs.keys()): + raise MissingTensorboardLogsError( + f"Metric {golden_value_key} not found in Tensorboard logs! Please modify `model_config.yaml` to record it." + ) + + if golden_value_key not in checks or (golden_value_key in checks and len(checks) == 0): + logger.debug( + "For metric `%s`, no check was defined. Will fall back to `DeterminsticTest` with exact thresholds.", + golden_value_key, + ) + test = DeterministicTest() + else: + # For approximate tests, we cannot use deterministic + if compare_approximate_results is True: + tests = _filter_checks(checks[golden_value_key], TypeOfTestResult.APPROXIMATE) + + # For deterministic, we can fall back to approximate + else: + tests = _filter_checks( + checks[golden_value_key], TypeOfTestResult.DETERMINISTIC + ) or _filter_checks(checks[golden_value_key], TypeOfTestResult.APPROXIMATE) + + if len(tests) != 1: + raise SkipMetricError( + f"No {'approximate' if compare_approximate_results is True else 'deterministic'} check found for {golden_value_key}: SKIPPED" + ) + + test = tests[0] + + golden_value_list = list(golden_value.values.values()) + actual_value_list = [ + value + for value_step, value in tensorboard_logs[golden_value_key].values.items() + if value_step in golden_value.values.keys() + ] + + if golden_value_key == "iteration-time": + actual_value_list = actual_value_list[3:-1] + golden_value_list = golden_value_list[3:-1] + logger.info( + "For metric `%s`, the first 3 and the last scalars are removed from the list to reduce noise.", + golden_value_key, + ) + + actual_value_list = [np.inf if type(v) is str else v for v in actual_value_list] + golden_value_list = [np.inf if type(v) is str else v for v in golden_value_list] + + if not np.allclose( + actual_value_list, + golden_value_list, + rtol=test.rtol, + atol=( + test.atol_func(actual_value_list, golden_value_list) + if test.atol_func is not None + else test.atol + ), + ): + logger.info("Actual values: %s", ", ".join([str(v) for v in actual_value_list])) + logger.info("Golden values: %s", ", ".join([str(v) for v in golden_value_list])) + raise test.error_message(golden_value_key) + + result = f"{test.type_of_test_result.name} test for metric {golden_value_key}: PASSED" + result_code = 0 + + except (NotApproximateError, NotDeterminsticError, MissingTensorboardLogsError) as e: + result = str(e) + result_code = 1 + except SkipMetricError: + logger.info(f"{test.type_of_test_result.name} test for {golden_value_key}: SKIPPED") + continue + + log_emitter = logger.info if result_code == 0 else logger.error + log_emitter(result) + if result_code == 1: + all_test_passed = False + failed_metrics.append(golden_value_key) + + assert all_test_passed, f"The following metrics failed: {', '.join(failed_metrics)}" diff --git a/tests/functional_tests/python_test_utils/conftest.py b/tests/functional_tests/python_test_utils/conftest.py new file mode 100644 index 0000000..c00bec1 --- /dev/null +++ b/tests/functional_tests/python_test_utils/conftest.py @@ -0,0 +1,61 @@ +from typing import Dict, List, Union + +import pytest + +from tests.functional_tests.python_test_utils import common + + +def pytest_addoption(parser): + """ + Additional command-line arguments passed to pytest. + """ + parser.addoption( + "--allow-nondeterministic-algo", + action="store_true", + default=False, + help="If set, test system checks for approximate results.", + ) + parser.addoption("--golden-values-path", action="store", help="Path to golden values") + parser.addoption( + "--train-iters", action="store", default=100, help="Number of train iters", type=int + ) + parser.addoption("--tensorboard-path", action="store", help="Path to tensorboard records") + parser.addoption("--model-config-path", action="store", help="Path to model_config.yaml") + + +@pytest.fixture +def compare_approximate_results(request) -> bool: + """Simple fixture returning whether to check against results approximately.""" + return request.config.getoption("--allow-nondeterministic-algo") is True + + +@pytest.fixture +def golden_values(request): + """Simple fixture returning golden values.""" + return common.read_golden_values_from_json(request.config.getoption("--golden-values-path")) + + +@pytest.fixture +def train_iters(request): + """Simple fixture returning number of train iters.""" + return request.config.getoption("--train-iters") + + +@pytest.fixture +def tensorboard_logs(request, train_iters): + """Simple fixture returning tensorboard metrics.""" + return common.read_tb_logs_as_list( + request.config.getoption("--tensorboard-path"), train_iters=train_iters + ) + + +@pytest.fixture +def tensorboard_path(request): + """Simple fixture returning path to tensorboard logs.""" + return request.config.getoption("--tensorboard-path") + + +@pytest.fixture +def model_config_path(request): + """Simple fixture returning path to model_config.yaml.""" + return request.config.getoption("--model-config-path") diff --git a/tests/functional_tests/python_test_utils/get_test_results_from_tensorboard_logs.py b/tests/functional_tests/python_test_utils/get_test_results_from_tensorboard_logs.py index d046b25..39400ef 100644 --- a/tests/functional_tests/python_test_utils/get_test_results_from_tensorboard_logs.py +++ b/tests/functional_tests/python_test_utils/get_test_results_from_tensorboard_logs.py @@ -1,39 +1,62 @@ -import os - -os.environ["OPENBLAS_NUM_THREADS"] = "1" -import json - -import click - -from tests.functional_tests.python_test_utils import common - - -@click.command() -@click.option("--logs-dir", required=True, type=str, help="Path to Tensorboard logs") -@click.option("--output-path", required=False, type=str, help="Path to write golden values") -@click.option( - "--is-convergence-test/--is-normal-test", - type=bool, - help="Tensorboard index to extract", - default=False, -) -def collect_train_test_metrics(logs_dir: str, output_path: str, is_convergence_test: bool): - summaries = common.read_tb_logs_as_list(logs_dir, index=-1 if is_convergence_test else 0) - - train_metrics = { - metric_name: { - "start_step": 0, - "end_step": len(metric_values), - "step_interval": 5, - "values": metric_values[0 : len(metric_values) : 5], - } - for metric_name, metric_values in summaries.items() - } - - if output_path is not None: - with open(output_path, "w") as fh: - json.dump(train_metrics, fh) - - -if __name__ == "__main__": - collect_train_test_metrics() +import os + +os.environ["OPENBLAS_NUM_THREADS"] = "1" +import json +import logging + +import click +import yaml + +from tests.functional_tests.python_test_utils import common + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +@click.command() +@click.option("--logs-dir", required=True, type=str, help="Path to Tensorboard logs") +@click.option("--train-iters", required=True, type=int, help="Number of train iters") +@click.option("--output-path", required=False, type=str, help="Path to write golden values") +@click.option( + "--is-convergence-test/--is-normal-test", + type=bool, + help="Tensorboard index to extract", + default=False, +) +def collect_train_test_metrics( + logs_dir: str, train_iters: str, output_path: str, is_convergence_test: bool +): + summaries = common.read_tb_logs_as_list( + logs_dir, index=(0 if not is_convergence_test else -1), train_iters=train_iters, start_idx=1 + ) + + if summaries is None: + logger.warning("No tensorboard logs found, no golden values created.") + return + + summaries = { + golden_value_key: golden_value + for (golden_value_key, golden_value) in summaries.items() + if golden_value_key + in [ + "iteration-time", + "mem-allocated-bytes", + "mem-max-allocated-bytes", + "lm loss", + "num-zeros", + ] + } + + if output_path is not None: + with open(output_path, "w") as fh: + json.dump( + { + golden_value_key: golden_values.model_dump() + for golden_value_key, golden_values in summaries.items() + }, + fh, + ) + + +if __name__ == "__main__": + collect_train_test_metrics() diff --git a/tests/functional_tests/python_test_utils/test_ci_pipeline.py b/tests/functional_tests/python_test_utils/test_ci_pipeline.py deleted file mode 100644 index 9066248..0000000 --- a/tests/functional_tests/python_test_utils/test_ci_pipeline.py +++ /dev/null @@ -1,96 +0,0 @@ -import os -from typing import List, Union - -import numpy as np -import pytest - -from .common import ( - METRIC_TO_THRESHOLD, - TYPE_OF_TEST_TO_METRIC, - TypeOfTest, - load_expected_data, - read_tb_logs_as_list, -) - - -@pytest.fixture(params=load_expected_data().items()) -def expected_data(request): - return request.param - - -# If we require a variation of tests for any of the other pipelines we can just inherit this class. -class TestCIPipeline: - allow_nondeterministic = bool(int(os.getenv("NVTE_ALLOW_NONDETERMINISTIC_ALGO"))) - logs_dir = os.getenv("LOGS_DIR") - - # Replace symbol in namespace to fix function call result for lifetime of - # this class. - - def _test_helper(self, metric_type: str, metric_dict: List[Union[int, float]], test_type): - expected_list = metric_dict['values'] - print(f"The list of expected values: {expected_list} for metric {metric_type}") - - try: - actual_list = read_tb_logs_as_list(self.logs_dir)[metric_type] - except KeyError as e: - raise KeyError( - f"Required metric {metric_type} not found in TB logs. Please make sure your model \ -exports this metric as its required by the test case/golden values file" - ) from e - - if actual_list is None: - raise ValueError(f"No values of {metric_type} found in TB logs.") - - actual_list_sliced = actual_list[ - metric_dict["start_step"] : metric_dict["end_step"] : metric_dict["step_interval"] - ] - print(f"The list of actual values: {actual_list_sliced}") - - if metric_type == "iteration-time": - actual_list_sliced = actual_list_sliced[3:] - expected_list = expected_list[3:] - print("Removing first items of values for metric_type iteration-time") - - if test_type == TypeOfTest.DETERMINISTIC: - assert np.allclose( - actual_list_sliced, expected_list, rtol=0, atol=0 - ), f"Actual is not equal to Expected for {metric_type}" - elif test_type == TypeOfTest.APPROX: - assert np.allclose( - actual_list_sliced, expected_list, rtol=1e-5, atol=METRIC_TO_THRESHOLD[metric_type] - ), f"Actual is not equal to Expected for {metric_type}" - else: - raise ValueError(f"Unexpected test_type {test_type} provided") - - def test_approx(self, expected_data): - expected_metric, expected_values = expected_data - - if expected_metric in TYPE_OF_TEST_TO_METRIC[TypeOfTest.APPROX]: - self._test_helper(expected_metric, expected_values, TypeOfTest.APPROX) - else: - print(f"Skipping metric {expected_metric} for approximate as it is deterministic only.") - - @pytest.mark.skipif(allow_nondeterministic, reason="Cannot expect exact results") - def test_deterministic(self, expected_data): - expected_metric, expected_values = expected_data - - if expected_metric in TYPE_OF_TEST_TO_METRIC[TypeOfTest.DETERMINISTIC]: - self._test_helper(expected_metric, expected_values, TypeOfTest.DETERMINISTIC) - else: - print(f"Skipping metric {expected_metric} for deterministic as it is approximate only.") - - # # @TODO: This is inactive, do we want to activate it? - # def iteration_timing_node(self): - # expected_iteration_timing_avg = self.expected["train_step_timing_avg"] - # iteration_time = read_tb_logs_as_list(LOGS_DIR)["iteration-time"] - # idx = len(iteration_time) // 3 - # iteration_time_avg = sum(iteration_time[idx:]) / len(iteration_time[idx:]) - # assert ( - # expected_iteration_timing_avg - # == pytest.approx(expected=iteration_time_avg, rel=self.margin_time) - # ), f"The time per global step must be approximately {expected_iteration_timing_avg} but " - # "it is {iteration_time_avg}." - - -# if deterministic, then also approx -# if not determinstic, then also aprox diff --git a/tests/functional_tests/python_test_utils/test_fp8_ci_pipeline.py b/tests/functional_tests/python_test_utils/test_fp8_ci_pipeline.py deleted file mode 100644 index b6a9b61..0000000 --- a/tests/functional_tests/python_test_utils/test_fp8_ci_pipeline.py +++ /dev/null @@ -1,113 +0,0 @@ -import json -import os - -import numpy as np -import pytest -import scipy.stats as ss -from scipy.integrate import trapezoid - -from .common import read_tb_logs_as_list - -LOGS_DIR = os.getenv("LOGS_DIR") -EXPECTED_METRICS_FILE = os.getenv("EXPECTED_METRICS_FILE") - - -# If we require a variation of tests for any of the other pipelines we can just inherit this class. -class TestFP8CIPipeline: - margin_loss, margin_time = 0.2, 0.1 - auc_threshold, correlation_threshold = 0.01, 0.999 - expected = None - - def _setup(self): - if os.path.exists(EXPECTED_METRICS_FILE): - with open(EXPECTED_METRICS_FILE) as f: - self.expected = json.load(f) - if self.expected is None: - raise FileNotFoundError("Expected data is none") - - def _get_actual(self, loss_type): - actual_list = read_tb_logs_as_list(LOGS_DIR)[loss_type] - assert ( - actual_list is not None - ), f"No TensorBoard events file was found in the logs for {loss_type}." - return actual_list - - def _margin_test_helper(self, loss_type): - expected = self.expected[loss_type] - expected_list = np.array(expected["values"]) - actual_list = self._get_actual(loss_type) - actual_list_sliced = np.array( - actual_list[expected["start_step"] : expected["end_step"] : expected["step_interval"]] - ) - - max_diff_index = np.argmax(np.abs(actual_list_sliced - expected_list)) - max_diff = np.abs(actual_list_sliced[max_diff_index] - expected_list[max_diff_index]) - - print( - "[INFO - margin]: " - f"maximum absolute difference for {loss_type} is {max_diff} at index {max_diff_index}, " - f"Actual: {actual_list_sliced[max_diff_index]}, " - f"Expected: {expected_list[max_diff_index]}" - ) - assert np.allclose( - actual_list_sliced, expected_list, rtol=1e-5, atol=self.margin_loss - ), f"Actual is not equal to Expected for {loss_type}" - - def _auc_test_helper(self, loss_type): - expected = self.expected[loss_type] - expected_list = np.array(expected["values"]) - actual_list = self._get_actual(loss_type) - actual_list_sliced = np.array( - actual_list[expected["start_step"] : expected["end_step"] : expected["step_interval"]] - ) - - def compute_auc(y_values): - x_values = np.arange(0, len(y_values), 1) - area = trapezoid(y_values, x_values) - return round(area, 5) - - baseline_area = compute_auc(expected_list) - current_area = compute_auc(actual_list_sliced) - diff = abs(baseline_area - current_area) - - print( - f"[INFO - AUC]: AUC diff: {diff * 100 / baseline_area} %, current: {current_area}, " - f"baseline: {baseline_area}" - ) - assert (baseline_area <= 0) or (diff <= self.auc_threshold * baseline_area) - - def _correlation_test_helper(self, loss_type): - expected = self.expected[loss_type] - expected_list = np.array(expected["values"]) - actual_list = self._get_actual(loss_type) - actual_list_sliced = np.array( - actual_list[expected["start_step"] : expected["end_step"] : expected["step_interval"]] - ) - corr = ss.pearsonr(actual_list_sliced, expected_list).statistic - - print(f"[INFO - Corr]: Corr: {corr}") - assert corr > self.correlation_threshold - - @pytest.mark.xfail - def test_lm_loss_margin(self): - self._setup() - self._margin_test_helper("lm loss") - - def test_lm_loss_auc(self): - self._setup() - self._auc_test_helper("lm loss") - - @pytest.mark.xfail - def test_lm_loss_correlation(self): - self._setup() - self._correlation_test_helper("lm loss") - - def iteration_timing_node(self): - expected_iteration_timing_avg = self.expected["train_step_timing_avg"] - iteration_time = read_tb_logs_as_list(LOGS_DIR)["iteration-time"] - idx = len(iteration_time) // 3 - iteration_time_avg = sum(iteration_time[idx:]) / len(iteration_time[idx:]) - assert expected_iteration_timing_avg == pytest.approx( - expected=iteration_time_avg, rel=self.margin_time - ), f"The time per global step must be approximately {expected_iteration_timing_avg} but it \ -is {iteration_time_avg}." diff --git a/tests/functional_tests/python_test_utils/test_regular_pipeline.py b/tests/functional_tests/python_test_utils/test_regular_pipeline.py new file mode 100644 index 0000000..7e4a788 --- /dev/null +++ b/tests/functional_tests/python_test_utils/test_regular_pipeline.py @@ -0,0 +1,57 @@ +import logging +from typing import Dict, List, Optional + +import numpy as np + +from tests.functional_tests.python_test_utils import common + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +def test_regular_pipeline( + compare_approximate_results: bool, + golden_values: Dict[str, common.GoldenValueMetric], + tensorboard_logs: Dict[str, common.GoldenValueMetric], + checks: Optional[Dict[str, List[common.Test]]] = None, +): + if checks is None: + checks = { + "iteration-time": [common.ApproximateTest(atol=2.0, rtol=0)], + "mem-allocated-bytes": [ + common.ApproximateTest(atol_func=common.approximate_threshold(rtol=0.05), rtol=0) + ], + "mem-max-allocated-bytes": [ + common.ApproximateTest(atol_func=common.approximate_threshold(rtol=0.05), rtol=0) + ], + "lm loss": [ + common.DeterministicTest(), + common.ApproximateTest(atol_func=common.approximate_threshold(rtol=0.05), rtol=0), + ], + "num-zeros": [ + common.DeterministicTest(), + common.ApproximateTest(atol_func=common.approximate_threshold(rtol=0.20), rtol=0), + ], + } + + if ( + len( + missing_metrics := [ + golden_metric + for golden_metric in checks.keys() + if golden_metric not in golden_values.keys() + ] + ) + > 0 + ): + logger.error( + f"The following metrics are required but not provided in golden values: {', '.join(missing_metrics)}" + ) + assert False + + common.pipeline( + compare_approximate_results=compare_approximate_results, + golden_values=golden_values, + tensorboard_logs=tensorboard_logs, + checks=checks, + ) diff --git a/tests/functional_tests/python_test_utils/test_resume_checkpoint_pipeline.py b/tests/functional_tests/python_test_utils/test_resume_checkpoint_pipeline.py index 61955e8..0f43f31 100644 --- a/tests/functional_tests/python_test_utils/test_resume_checkpoint_pipeline.py +++ b/tests/functional_tests/python_test_utils/test_resume_checkpoint_pipeline.py @@ -1,63 +1,77 @@ -import os - -os.environ["OPENBLAS_NUM_THREADS"] = "1" -import pytest - -from tests.functional_tests.python_test_utils.common import TypeOfTest, read_tb_logs_as_list - -LOGS_DIR = os.getenv("LOGS_DIR") -ALLOW_NONDETERMINISTIC = os.getenv("NVTE_ALLOW_NONDETERMINISTIC_ALGO") -STEP_INTERVAL = 5 - - -def collect_train_test_metrics(logs_dir, index): - train_loss_list = read_tb_logs_as_list(logs_dir, index)["lm loss"] - train_loss_list = [round(elem, 3) for elem in train_loss_list] - train_metrics = {"lm loss": train_loss_list[0 : len(train_loss_list) : STEP_INTERVAL]} - str_train_metrics = str(train_metrics).replace("'", '"') - print("\n ----------- The following are the metrics for ----------") - print(f"\n {str_train_metrics}", flush=True) - return train_metrics - - -class TestCIPipeline: - margin_loss = 0.005 - allow_nondeterministic = bool(int(ALLOW_NONDETERMINISTIC)) - train_metrics_100 = collect_train_test_metrics(LOGS_DIR, 0) - train_metrics_50_to_100 = collect_train_test_metrics(LOGS_DIR, 1) - - def _test_helper(self, loss_type, test_type): - expected = self.train_metrics_100[loss_type] - assert ( - len(expected) == 100 // STEP_INTERVAL - ), "Train metrics from first run (before checkpoint load) should \ -have {100 // STEP_INTERVAL} elements" - print("expected : " + str(expected)) - actual = self.train_metrics_50_to_100[loss_type] - assert ( - len(actual) == 50 // STEP_INTERVAL - ), "Train metrics from second run (after checkpoint load) should have \ -{50 // STEP_INTERVAL} elements" - print("actual : " + str(actual)) - start_idx_expected = len(expected) - len(actual) - print("start_idx_expected:", start_idx_expected) - # Here we will just be comparing values of actual and second half (50-100) of expected - for i, (expected_val, actual_val) in enumerate(zip(expected[start_idx_expected:], actual)): - step = start_idx_expected + i * STEP_INTERVAL - if test_type == TypeOfTest.APPROX: - assert actual_val == pytest.approx( - expected=expected_val, rel=self.margin_loss - ), f"The loss at step {step} should be approximately {expected_val} but it is \ -{actual_val}." - else: - assert ( - actual_val == expected_val - ), f"The value at step {step} should be {expected_val} but it is {actual_val}." - - @pytest.mark.skipif(allow_nondeterministic, reason="Nondeterministic is allowed.") - def test_lm_loss_deterministic(self): - self._test_helper("lm loss", TypeOfTest.DETERMINISTIC) - - @pytest.mark.skipif(not allow_nondeterministic, reason="Nondeterministic is not allowed.") - def test_lm_loss_nondeterministic(self): - self._test_helper("lm loss", TypeOfTest.APPROX) +import logging +from typing import Dict + +import numpy as np +import yaml + +from tests.functional_tests.python_test_utils import common, test_regular_pipeline + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +def test_resume_checkpoint_pipeline( + compare_approximate_results: bool, tensorboard_path: str, train_iters: int +): + + first_run_values = common.read_tb_logs_as_list( + tensorboard_path, index=0, train_iters=train_iters, start_idx=(train_iters // 2) + 1 + ) + second_run_values = common.read_tb_logs_as_list( + tensorboard_path, index=1, train_iters=train_iters, start_idx=(train_iters // 2) + 1 + ) + + checks = { + "iteration-time": [common.ApproximateTest(atol=2.0, rtol=0)], + "mem-allocated-bytes": [ + common.ApproximateTest(atol_func=common.approximate_threshold(rtol=0.05), rtol=0) + ], + "mem-max-allocated-bytes": [ + common.ApproximateTest(atol_func=common.approximate_threshold(rtol=0.05), rtol=0) + ], + "lm loss": [ + common.DeterministicTest(), + common.ApproximateTest(atol_func=common.approximate_threshold(rtol=0.05), rtol=0), + ], + "num-zeros": [ + common.DeterministicTest(), + common.ApproximateTest(atol_func=common.approximate_threshold(rtol=0.20), rtol=0), + ], + } + + if ( + len( + missing_metrics := [ + golden_metric + for golden_metric in checks.keys() + if golden_metric not in first_run_values.keys() + ] + ) + > 0 + ): + logger.error( + f"The following metrics are required but not logged during training: {', '.join(missing_metrics)}" + ) + assert False + + first_run_values = { + metric_name: metric_values + for (metric_name, metric_values) in first_run_values.items() + if metric_name in checks.keys() + } + + second_run_values = { + metric_name: metric_values + for (metric_name, metric_values) in second_run_values.items() + if metric_name in checks.keys() + } + + logger.info(first_run_values) + logger.info(second_run_values) + + test_regular_pipeline.test_regular_pipeline( + compare_approximate_results=compare_approximate_results, + golden_values=first_run_values, + tensorboard_logs=second_run_values, + checks=checks, + ) diff --git a/tests/functional_tests/shell_test_utils/_run_training.sh b/tests/functional_tests/shell_test_utils/_run_training.sh old mode 100644 new mode 100755 index b7757ce..2a4b9aa --- a/tests/functional_tests/shell_test_utils/_run_training.sh +++ b/tests/functional_tests/shell_test_utils/_run_training.sh @@ -1,93 +1,119 @@ -#!/bin/bash - -# This script can be used for model onboarding and testing. - -# For onboarding, it extract scalars from Tensorboard logs only. -# For testing, it compares extracted Tensorboard scalars against -# a set of `GOLDEN_VALUES`. - -set -euxo pipefail - -echo "------ARGUMENTS LIST --------" -for ARGUMENT in "$@"; do - KEY=$(echo $ARGUMENT | cut -f1 -d=) - - KEY_LENGTH=${#KEY} - VALUE="${ARGUMENT:$KEY_LENGTH+1}" - - export "$KEY"="$VALUE" - echo "$KEY=$VALUE" -done -echo "---------------------------------" - -# Check that mandatory vars are set -MANDATORY_VARS=( - "TRAINING_SCRIPT_PATH" - "TRAINING_PARAMS_PATH" - "OUTPUT_PATH" - "TENSORBOARD_PATH" - "CHECKPOINT_PATH" - "DATA_PATH" - "RUN_NUMBER" -) -for mandatory_var in "${MANDATORY_VARS[@]}"; do - if [[ -z "${!mandatory_var}" ]]; then - echo 'Providing $'$mandatory_var' is mandatory.' - exit 1 - fi -done - -cp $TRAINING_PARAMS_PATH "$TRAINING_PARAMS_PATH.${SLURM_PROCID}" -TRAINING_PARAMS_PATH="$TRAINING_PARAMS_PATH.${SLURM_PROCID}" - -# Envsubst model_params -cat $TRAINING_PARAMS_PATH | envsubst "$(env | cut -d= -f1 | sed -e 's/^/$/')" >$TRAINING_PARAMS_PATH.tmp -mv $TRAINING_PARAMS_PATH.tmp "$TRAINING_PARAMS_PATH" - -# Pull env vars to export -ENV_VARS=$(yq '... comments="" | .ENV_VARS | to_entries | .[] | [.key + "=" + .value] | join(" ")' "$TRAINING_PARAMS_PATH") -while IFS= read -r ARGUMENT; do - KEY=$(echo $ARGUMENT | cut -f1 -d=) - - KEY_LENGTH=${#KEY} - VALUE="${ARGUMENT:$KEY_LENGTH+1}" - - export "$KEY"="$VALUE" - echo "$KEY=$VALUE" -done <<< "$ENV_VARS" - -# Run before script -SCRIPT=$(cat "$TRAINING_PARAMS_PATH" | yq '.BEFORE_SCRIPT') -if [[ "$SCRIPT" != null ]]; then - eval "$SCRIPT" -fi; - -# Exit earlier to leave time for properly saving checkpoint -if [[ $(echo "$TRAINING_SCRIPT_PATH" | tr '[:upper:]' '[:lower:]') == *nemo* ]]; then - PARAMS="" - TRAINING_PARAMS_FROM_CONFIG=$(yq '... comments="" | .MODEL_ARGS | to_entries | .[] | with(select(.value == "true"); .value = "") | [.key + "=" + .value] | join("")' "$TRAINING_PARAMS_PATH" | tr '\n' ' ') - -else - # If this is a second run (of checkpoint-resume), we might want to use a - # different model configuration than during first time. So if key `MODEL_ARGS_2` - # exists we use it, otherwise we use the same as for the first run. - if [[ $RUN_NUMBER -eq 2 && $(yq 'has("MODEL_ARGS_2")' "$TRAINING_PARAMS_PATH") == true ]]; then - export KEY="MODEL_ARGS_2" - else - export KEY="MODEL_ARGS" - fi - - TRAINING_PARAMS_FROM_CONFIG=$(yq '... comments="" | .[env(KEY)] | to_entries | .[] | with(select(.value == "true"); .value = "") | [.key + " " + .value] | join("")' "$TRAINING_PARAMS_PATH" | tr '\n' ' ') - PARAMS="--exit-duration-in-mins $((($SLURM_JOB_END_TIME - $SLURM_JOB_START_TIME) / 60 - 15))" -fi - -# Extract training params -PARAMS="$PARAMS $TRAINING_PARAMS_FROM_CONFIG" - -# Set PYTHONPATH -export PYTHONPATH="$(pwd):${PYTHONPATH:-}" -export WANDB_API_KEY="${WANDB_API_KEY:-}" - -# Start training -python $TRAINING_SCRIPT_PATH $PARAMS - +#!/bin/bash + +# This script can be used for model onboarding and testing. + +# For onboarding, it extract scalars from Tensorboard logs only. +# For testing, it compares extracted Tensorboard scalars against +# a set of `GOLDEN_VALUES`. + +set -euxo pipefail + +echo "------ARGUMENTS LIST --------" +for ARGUMENT in "$@"; do + KEY=$(echo $ARGUMENT | cut -f1 -d=) + + KEY_LENGTH=${#KEY} + VALUE="${ARGUMENT:$KEY_LENGTH+1}" + + export "$KEY"="$VALUE" + echo "$KEY=$VALUE" +done +echo "---------------------------------" + +# Check that mandatory vars are set +MANDATORY_VARS=( + "TRAINING_SCRIPT_PATH" + "TRAINING_PARAMS_PATH" + "OUTPUT_PATH" + "TENSORBOARD_PATH" + "CHECKPOINT_SAVE_PATH" + "CHECKPOINT_LOAD_PATH" + "DATA_PATH" + "RUN_NUMBER" + "REPEAT" +) +for mandatory_var in "${MANDATORY_VARS[@]}"; do + if [[ -z "${!mandatory_var}" ]]; then + echo 'Providing $'$mandatory_var' is mandatory.' + exit 1 + fi +done + +# Envsubst model_params +cat $TRAINING_PARAMS_PATH | envsubst "$(env | cut -d= -f1 | sed -e 's/^/$/')" >$TRAINING_PARAMS_PATH.tmp +TRAINING_PARAMS_PATH="$TRAINING_PARAMS_PATH.tmp" + +# Pull env vars to export +ENV_VARS=$(yq '... comments="" | .ENV_VARS | to_entries | .[] | [.key + "=" + .value] | join(" ")' "$TRAINING_PARAMS_PATH") +while IFS= read -r ARGUMENT; do + KEY=$(echo $ARGUMENT | cut -f1 -d=) + + KEY_LENGTH=${#KEY} + VALUE="${ARGUMENT:$KEY_LENGTH+1}" + + export "$KEY"="$VALUE" + echo "$KEY=$VALUE" +done <<<"$ENV_VARS" + +# Run before script +BEFORE_SCRIPT=$(cat "$TRAINING_PARAMS_PATH" | yq '.BEFORE_SCRIPT') +if [[ "$BEFORE_SCRIPT" != null ]]; then + eval "$BEFORE_SCRIPT" +fi + +# Exit earlier to leave time for properly saving checkpoint +if [[ $(echo "$TRAINING_SCRIPT_PATH" | tr '[:upper:]' '[:lower:]') == *nemo* ]]; then + PARAMS="" + TRAINING_PARAMS_FROM_CONFIG=$(yq '... comments="" | .MODEL_ARGS | to_entries | .[] | with(select(.value == "true"); .value = "") | [.key + "=" + .value] | join("")' "$TRAINING_PARAMS_PATH" | tr '\n' ' ') + +else + # If this is a second run (of checkpoint-resume), we might want to use a + # different model configuration than during first time. So if key `MODEL_ARGS_2` + # exists we use it, otherwise we use the same as for the first run. + if [[ $RUN_NUMBER -eq 2 && $(yq 'has("MODEL_ARGS_2")' "$TRAINING_PARAMS_PATH") == true ]]; then + export KEY="MODEL_ARGS_2" + else + export KEY="MODEL_ARGS" + fi + + TRAINING_PARAMS_FROM_CONFIG=$(yq '... comments="" | .[env(KEY)] | to_entries | .[] | with(select(.value == "true"); .value = "") | [.key + " " + .value] | join("")' "$TRAINING_PARAMS_PATH" | tr '\n' ' ') + PARAMS="--exit-duration-in-mins $((($SLURM_JOB_END_TIME - $SLURM_JOB_START_TIME) / 60 - 15))" +fi + +# Extract training params +PARAMS="$PARAMS $TRAINING_PARAMS_FROM_CONFIG" + +# Set PYTHONPATH +export PYTHONPATH="$(pwd):${PYTHONPATH:-}" +export WANDB_API_KEY="${WANDB_API_KEY:-}" + +######## Distributed training settings. ######## +echo "------ARGUMENTS for SLURM ---" +MASTER_ADDR=${MASTER_ADDR:-localhost} +MASTER_PORT=${MASTER_PORT:-6000} +NUM_NODES=${NUM_NODES:-${SLURM_NNODES}} +GPUS_PER_NODE=${GPUS_PER_NODE:-8} +NODE_RANK=${SLURM_NODEID:-${SLURM_NODEID}} +LAST_RANK=7 +export LOG_DIR=$OUTPUT_PATH/logs/$REPEAT +mkdir -p $LOG_DIR + +DISTRIBUTED_ARGS=( + --nproc_per_node $GPUS_PER_NODE + --nnodes $NUM_NODES + --master_addr $MASTER_ADDR + --master_port $MASTER_PORT + --node_rank $SLURM_NODEID + --log-dir $LOG_DIR + --tee "0:3,7:3" + --redirects "3" +) + +# Start training +torchrun ${DISTRIBUTED_ARGS[@]} $TRAINING_SCRIPT_PATH $PARAMS || EXIT_CODE=$? + +# Run after script +AFTER_SCRIPT=$(cat "$TRAINING_PARAMS_PATH" | yq '.AFTER_SCRIPT') +if [[ "$AFTER_SCRIPT" != null ]]; then + eval "$AFTER_SCRIPT" +fi diff --git a/tests/functional_tests/shell_test_utils/run_ci_test.sh b/tests/functional_tests/shell_test_utils/run_ci_test.sh old mode 100644 new mode 100755 index e585ab7..3101656 --- a/tests/functional_tests/shell_test_utils/run_ci_test.sh +++ b/tests/functional_tests/shell_test_utils/run_ci_test.sh @@ -1,103 +1,167 @@ -#!/bin/bash - -set -exo pipefail - -echo "------ARGUMENTS LIST --------" -for ARGUMENT in "$@"; do - echo $ARGUMENT - KEY=$(echo $ARGUMENT | cut -f1 -d=) - - KEY_LENGTH=${#KEY} - VALUE=$(eval echo ${ARGUMENT:$KEY_LENGTH+1}) - export "$KEY"="$VALUE" - echo "$KEY=$VALUE" -done -echo "---------------------------------" - -# Check that mandatory vars are set -MANDATORY_VARS=( - "TRAINING_SCRIPT_PATH" - "TRAINING_PARAMS_PATH" - "GOLDEN_VALUES_PATH" - "OUTPUT_PATH" - "TENSORBOARD_PATH" - "CHECKPOINT_PATH" - "DATA_PATH" - "DATA_CACHE_PATH" -) -for mandatory_var in "${MANDATORY_VARS[@]}"; do - if [[ -z "${!mandatory_var}" ]]; then - echo 'Providing $'$mandatory_var' is mandatory.' - exit 1 - fi -done - -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -ROOT_DIR=$(realpath $SCRIPT_DIR/../../../) - -# Extract settings from params file -TEST_TYPE=$(cat $TRAINING_PARAMS_PATH \ - | yq '.TEST_TYPE') -NVTE_ALLOW_NONDETERMINISTIC_ALGO=$(cat $TRAINING_PARAMS_PATH \ - | yq '.ENV_VARS.NVTE_ALLOW_NONDETERMINISTIC_ALGO') -SKIP_PYTEST=$(cat $TRAINING_PARAMS_PATH \ - | yq '.ENV_VARS.SKIP_PYTEST') - -for i in $(seq 1 $N_REPEAT); -do - if [[ $i -gt 1 ]]; then - rm -rf $CHECKPOINT_PATH/* - fi - - # Training - export RUN_NUMBER=1 - bash $ROOT_DIR/tests/functional_tests/shell_test_utils/_run_training.sh - - # Maybe checkpoint resume training - if [[ "$TEST_TYPE" == "ckpt-resume" ]]; then - if [[ ${SLURM_PROCID} -eq 0 ]]; then - rm -rf $CHECKPOINT_PATH/iter_0000100; - echo 50 > $CHECKPOINT_PATH/latest_checkpointed_iteration.txt; - fi - - export RUN_NUMBER=2 - bash $ROOT_DIR/tests/functional_tests/shell_test_utils/_run_training.sh - fi - - if [[ ${SLURM_PROCID} -gt 0 ]]; then - continue - fi - - # Save run results - export PYTHONPATH=$ROOT_DIR - if [[ "$TEST_TYPE" == "release" ]]; then - EXTRACT_ARGS=("--is-convergence-test") - else - EXTRACT_ARGS=("--is-normal-test") - fi - python3 $ROOT_DIR/tests/functional_tests/python_test_utils/get_test_results_from_tensorboard_logs.py \ - --logs-dir $TENSORBOARD_PATH \ - --output-path ${OUTPUT_PATH}/$(basename $GOLDEN_VALUES_PATH) \ - "${EXTRACT_ARGS[@]}" - - # Maybe run tests - if [[ ${SKIP_PYTEST:-0} != 1 ]]; then - export NVTE_ALLOW_NONDETERMINISTIC_ALGO - export LOGS_DIR=$TENSORBOARD_PATH - - if [[ "$TEST_TYPE" == "ckpt-resume" ]]; then - echo "Running pytest 1st vs 2nd run comparison" - pytest -s $ROOT_DIR/tests/functional_tests/python_test_utils/test_resume_checkpoint_pipeline.py - - elif [[ "$TEST_TYPE" == "regular" ]]; then - echo "Running pytest checks against golden values" - export EXPECTED_METRICS_FILE=$GOLDEN_VALUES_PATH - pytest -s $ROOT_DIR/tests/functional_tests/python_test_utils/test_ci_pipeline.py - - else - echo "Test type $TEST_TYPE not yet implemented." - fi - fi -done - - +#!/bin/bash + +set -exo pipefail + +echo "------ARGUMENTS LIST --------" +for ARGUMENT in "$@"; do + echo $ARGUMENT + KEY=$(echo $ARGUMENT | cut -f1 -d=) + + KEY_LENGTH=${#KEY} + VALUE=$(eval echo ${ARGUMENT:$KEY_LENGTH+1}) + export "$KEY"="$VALUE" + echo "$KEY=$VALUE" +done +echo "---------------------------------" + +# Check that mandatory vars are set +MANDATORY_VARS=( + "TRAINING_SCRIPT_PATH" + "TRAINING_PARAMS_PATH" + "GOLDEN_VALUES_PATH" + "OUTPUT_PATH" + "TENSORBOARD_PATH" + "CHECKPOINT_SAVE_PATH" + "CHECKPOINT_LOAD_PATH" + "DATA_PATH" + "DATA_CACHE_PATH" +) +for mandatory_var in "${MANDATORY_VARS[@]}"; do + if [[ -z "${!mandatory_var}" ]]; then + echo 'Providing $'$mandatory_var' is mandatory.' + exit 1 + fi +done + +RECORD_CHECKPOINTS=${RECORD_CHECKPOINTS:-"false"} + +TEST_TYPES=("regular" "ckpt-resume" "frozen-resume" "frozen-start" "release") + +mkdir -p $CHECKPOINT_SAVE_PATH +mkdir -p $CHECKPOINT_LOAD_PATH +_CHECKPOINT_LOAD_PATH=$CHECKPOINT_LOAD_PATH +_CHECKPOINT_SAVE_PATH=$CHECKPOINT_SAVE_PATH + +SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) +ROOT_DIR=$(realpath $SCRIPT_DIR/../../../) + +# Extract settings from params file +TEST_TYPE=$(cat $TRAINING_PARAMS_PATH | + yq '.TEST_TYPE') +NVTE_ALLOW_NONDETERMINISTIC_ALGO=$(cat $TRAINING_PARAMS_PATH | + yq '.ENV_VARS.NVTE_ALLOW_NONDETERMINISTIC_ALGO') +SKIP_PYTEST=$(cat $TRAINING_PARAMS_PATH | + yq '.ENV_VARS.SKIP_PYTEST') +TRAIN_ITERS=$(cat $TRAINING_PARAMS_PATH | + yq '.MODEL_ARGS."--train-iters" // "100"') + +for i in $(seq 1 $N_REPEAT); do + if [[ $i -gt 1 ]]; then + rm -rf $CHECKPOINT_SAVE_PATH/* + rm -rf /tmp/checkpoints/* + rm -rf $TENSORBOARD_PATH/* + fi + + # First run never loads from a checkpoint + export RUN_NUMBER=1 + export REPEAT=$i + export CHECKPOINT_SAVE_PATH=$_CHECKPOINT_SAVE_PATH + + if [[ "$TEST_TYPE" = "frozen-start" ]]; then + export CHECKPOINT_LOAD_PATH=$_CHECKPOINT_LOAD_PATH + else + export CHECKPOINT_LOAD_PATH=/tmp/checkpoints/ + fi + + if [[ "$TEST_TYPE" = "release" ]]; then + export CHECKPOINT_LOAD_PATH=$_CHECKPOINT_LOAD_PATH + export CHECKPOINT_SAVE_PATH=$_CHECKPOINT_SAVE_PATH + fi + + bash $ROOT_DIR/tests/functional_tests/shell_test_utils/_run_training.sh + + if [[ "$TEST_TYPE" = "frozen-resume" && -z "$(ls -A "$_CHECKPOINT_LOAD_PATH" 2>/dev/null)" ]]; then + echo "No frozen checkpoint found. Will skip second run." + + export CHECKPOINT_SAVE_PATH=$_CHECKPOINT_SAVE_PATH + rm -rf "$CHECKPOINT_SAVE_PATH/iter_0000$TRAIN_ITERS" + echo $((TRAIN_ITERS / 2)) >$CHECKPOINT_SAVE_PATH/latest_checkpointed_iteration.txt + break + fi + + if [[ "$TEST_TYPE" == "ckpt-resume" ]]; then + export CHECKPOINT_LOAD_PATH=$CHECKPOINT_SAVE_PATH + + rm -rf "$CHECKPOINT_LOAD_PATH/iter_0000$TRAIN_ITERS" + echo $((TRAIN_ITERS / 2)) >$CHECKPOINT_LOAD_PATH/latest_checkpointed_iteration.txt + + export RUN_NUMBER=2 + bash $ROOT_DIR/tests/functional_tests/shell_test_utils/_run_training.sh + fi + + if [[ "$TEST_TYPE" == "frozen-resume" ]]; then + + # Checkpoint-resume tests load from prev run + export CHECKPOINT_LOAD_PATH=$_CHECKPOINT_LOAD_PATH + export CHECKPOINT_SAVE_PATH=/tmp/checkpoints/ + + export RUN_NUMBER=2 + bash $ROOT_DIR/tests/functional_tests/shell_test_utils/_run_training.sh + + export CHECKPOINT_SAVE_PATH=$_CHECKPOINT_SAVE_PATH + rm -rf "$CHECKPOINT_SAVE_PATH/iter_0000$TRAIN_ITERS" + echo $((TRAIN_ITERS / 2)) >$CHECKPOINT_SAVE_PATH/latest_checkpointed_iteration.txt + fi + + if [[ "$TEST_TYPE" == "release" ]]; then + SKIP_PYTEST=0 + fi + + if [[ ${RECORD_CHECKPOINTS} == "true" ]]; then + echo "Skipping Pytest during checkpoint recording." + SKIP_PYTEST=1 + fi + + # Maybe run tests + if [[ ${SKIP_PYTEST:-0} != 1 ]]; then + # Save run results + export PYTHONPATH=$ROOT_DIR + if [[ "$TEST_TYPE" == "release" ]]; then + EXTRACT_ARGS=("--is-convergence-test") + else + EXTRACT_ARGS=("--is-normal-test") + fi + + python3 $ROOT_DIR/tests/functional_tests/python_test_utils/get_test_results_from_tensorboard_logs.py \ + --logs-dir $TENSORBOARD_PATH \ + --train-iters $TRAIN_ITERS \ + --output-path ${OUTPUT_PATH}/$(basename $GOLDEN_VALUES_PATH) \ + "${EXTRACT_ARGS[@]}" + + export NVTE_ALLOW_NONDETERMINISTIC_ALGO + if [[ "${NVTE_ALLOW_NONDETERMINISTIC_ALGO}" == "1" ]]; then + ALLOW_NONDETERMINISTIC_ALGO_ARG="--allow-nondeterministic-algo" + fi + + echo "Running pytest checks against golden values" + + pytest -s -o log_cli=true --log-cli-level=info $ROOT_DIR/tests/functional_tests/python_test_utils/test_regular_pipeline.py \ + --golden-values-path $GOLDEN_VALUES_PATH \ + --tensorboard-path $TENSORBOARD_PATH \ + --model-config-path ${TRAINING_PARAMS_PATH} \ + $ALLOW_NONDETERMINISTIC_ALGO_ARG + + if [[ "$TEST_TYPE" == "ckpt-resume" || "$TEST_TYPE" == "frozen-resume" ]]; then + echo "Running pytest 1st vs 2nd run comparison" + pytest -s -o log_cli=true --log-cli-level=info $ROOT_DIR/tests/functional_tests/python_test_utils/test_resume_checkpoint_pipeline.py \ + --tensorboard-path $TENSORBOARD_PATH \ + --train-iters $TRAIN_ITERS \ + --model-config-path ${TRAINING_PARAMS_PATH} \ + $ALLOW_NONDETERMINISTIC_ALGO_ARG + fi + + if [[ ! " ${TEST_TYPES[*]} " =~ " ${TEST_TYPE} " ]]; then + echo "Test type $TEST_TYPE not yet implemented." + fi + fi +done diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json index 0f6772f..596b254 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json @@ -1,52 +1 @@ -{ "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.49569, - 10.48173, - 10.48047, - 10.45353, - 10.44394, - 10.35611, - 10.13779, - 10.04017, - 9.86834, - 9.67307 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 2254.0, - 2585.0, - 2101.0, - 2157.0, - 2241.0, - 2475.0, - 2890.0, - 3199.0, - 3524.0, - 3090.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 13.65829, - 1.27589, - 1.2782, - 1.32374, - 1.26543, - 1.26423, - 1.26203, - 1.54723, - 1.27297, - 1.26491 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.49574, "5": 10.48424, "10": 10.49936, "15": 10.46628, "20": 10.44794, "25": 10.34964, "30": 10.17263, "35": 10.04261, "40": 9.90783, "45": 9.75774, "50": 9.67693}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2182.0, "5": 2584.0, "10": 2205.0, "15": 2539.0, "20": 2089.0, "25": 2604.0, "30": 2913.0, "35": 2967.0, "40": 2378.0, "45": 3923.0, "50": 3599.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1754654208.0, "5": 1754654208.0, "10": 1754654208.0, "15": 1754654208.0, "20": 1754654208.0, "25": 1754654208.0, "30": 1754654208.0, "35": 1754654208.0, "40": 1754654208.0, "45": 1754654208.0, "50": 1754654208.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2313432064.0, "5": 3055894528.0, "10": 3055894528.0, "15": 3055894528.0, "20": 3055894528.0, "25": 3055894528.0, "30": 3055894528.0, "35": 3055894528.0, "40": 3055894528.0, "45": 3055894528.0, "50": 3055894528.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 11.04517, "5": 1.25649, "10": 1.25549, "15": 1.2581, "20": 1.26387, "25": 1.25714, "30": 1.25866, "35": 1.26592, "40": 1.24291, "45": 1.23727, "50": 1.24404}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json index 26ee3ea..4e7ca3c 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.49574, 10.48174, 10.4804, 10.45344, 10.44396, 10.35607, 10.13786, 10.04016, 9.86838, 9.67302]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [2182.0, 2462.0, 2158.0, 2112.0, 2291.0, 2485.0, 2953.0, 3287.0, 3440.0, 3059.0]}, "iteration_timing_avg": 0.8110379411764704} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.49574, "5": 10.48398, "10": 10.49943, "15": 10.4663, "20": 10.44775, "25": 10.34964, "30": 10.1728, "35": 10.04262, "40": 9.90767, "45": 9.75792, "50": 9.67684}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2182.0, "5": 2568.0, "10": 2108.0, "15": 2533.0, "20": 2151.0, "25": 2601.0, "30": 2801.0, "35": 3107.0, "40": 2294.0, "45": 3909.0, "50": 3482.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1754654208.0, "5": 1754654208.0, "10": 1754654208.0, "15": 1754654208.0, "20": 1754654208.0, "25": 1754654208.0, "30": 1754654208.0, "35": 1754654208.0, "40": 1754654208.0, "45": 1754654208.0, "50": 1754654208.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2300849152.0, "5": 3043311616.0, "10": 3043311616.0, "15": 3043311616.0, "20": 3043311616.0, "25": 3043311616.0, "30": 3043311616.0, "35": 3043311616.0, "40": 3043311616.0, "45": 3043311616.0, "50": 3043311616.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 14.67278, "5": 1.17709, "10": 1.18485, "15": 1.20778, "20": 1.16573, "25": 1.17871, "30": 1.16949, "35": 1.16897, "40": 1.16996, "45": 1.16571, "50": 1.17045}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml index 1293c0b..7973715 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 990000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-bert_00_text_sentence --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..ebf7238 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.49569, "5": 10.48402, "10": 10.49933, "15": 10.46635, "20": 10.44782, "25": 10.34968, "30": 10.17276, "35": 10.04265, "40": 9.90757, "45": 9.75784, "50": 9.67694, "55": 9.55383, "60": 9.45452, "65": 9.42152, "70": 9.30114, "75": 9.3222, "80": 9.26181, "85": 9.2967, "90": 9.23351, "95": 9.23792, "100": 9.10613}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2254.0, "5": 2635.0, "10": 2165.0, "15": 2534.0, "20": 2227.0, "25": 2559.0, "30": 2905.0, "35": 3026.0, "40": 2314.0, "45": 3924.0, "50": 3557.0, "55": 3573.0, "60": 2689.0, "65": 3434.0, "70": 3935.0, "75": 5047.0, "80": 3601.0, "85": 4133.0, "90": 4603.0, "95": 4291.0, "100": 3165.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1754654208.0, "5": 1754654208.0, "10": 1754654208.0, "15": 1754654208.0, "20": 1754654208.0, "25": 1754654208.0, "30": 1754654208.0, "35": 1754654208.0, "40": 1754654208.0, "45": 1754654208.0, "50": 1754654208.0, "55": 1754654208.0, "60": 1754654208.0, "65": 1754654208.0, "70": 1754654208.0, "75": 1754654208.0, "80": 1754654208.0, "85": 1754654208.0, "90": 1754654208.0, "95": 1754654208.0, "100": 1754654208.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2503224320.0, "5": 3245686784.0, "10": 3245686784.0, "15": 3245686784.0, "20": 3245686784.0, "25": 3245686784.0, "30": 3245686784.0, "35": 3245686784.0, "40": 3245686784.0, "45": 3245686784.0, "50": 3245686784.0, "55": 3245686784.0, "60": 3245686784.0, "65": 3245686784.0, "70": 3245686784.0, "75": 3245686784.0, "80": 3245686784.0, "85": 3245686784.0, "90": 3245686784.0, "95": 3245686784.0, "100": 3245686784.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 14.19715, "5": 1.20808, "10": 1.19543, "15": 1.19563, "20": 1.42719, "25": 1.40901, "30": 1.18769, "35": 1.43684, "40": 1.18523, "45": 1.18204, "50": 1.18891, "55": 1.20368, "60": 1.19171, "65": 1.18981, "70": 1.17772, "75": 1.18903, "80": 1.17548, "85": 1.1753, "90": 1.36634, "95": 1.17827, "100": 1.17843}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..7c71e4a --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.49574, "5": 10.48398, "10": 10.49943, "15": 10.4663, "20": 10.44775, "25": 10.34964, "30": 10.1728, "35": 10.04262, "40": 9.90767, "45": 9.75792, "50": 9.67684, "55": 9.55378, "60": 9.45458, "65": 9.42133, "70": 9.30109, "75": 9.32203, "80": 9.26184, "85": 9.29667, "90": 9.23332, "95": 9.23793, "100": 9.10611}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2182.0, "5": 2568.0, "10": 2108.0, "15": 2533.0, "20": 2151.0, "25": 2601.0, "30": 2801.0, "35": 3107.0, "40": 2294.0, "45": 3909.0, "50": 3482.0, "55": 3606.0, "60": 2653.0, "65": 3341.0, "70": 3849.0, "75": 5090.0, "80": 3613.0, "85": 4194.0, "90": 4618.0, "95": 4439.0, "100": 3224.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1754654208.0, "5": 1754654208.0, "10": 1754654208.0, "15": 1754654208.0, "20": 1754654208.0, "25": 1754654208.0, "30": 1754654208.0, "35": 1754654208.0, "40": 1754654208.0, "45": 1754654208.0, "50": 1754654208.0, "55": 1754654208.0, "60": 1754654208.0, "65": 1754654208.0, "70": 1754654208.0, "75": 1754654208.0, "80": 1754654208.0, "85": 1754654208.0, "90": 1754654208.0, "95": 1754654208.0, "100": 1754654208.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 12.95742, "5": 1.16734, "10": 2.45473, "15": 1.45839, "20": 1.51474, "25": 1.15989, "30": 1.14801, "35": 1.14584, "40": 1.15517, "45": 1.14468, "50": 1.14969, "55": 1.15684, "60": 1.14892, "65": 1.14737, "70": 1.30233, "75": 1.37176, "80": 1.1466, "85": 1.24468, "90": 1.15157, "95": 1.15026, "100": 1.15254}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 0000000..2b4dca7 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,46 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 24 + --hidden-size: 1024 + --num-attention-heads: 16 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --log-memory-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 128 + --seq-length: 512 + --max-position-embeddings: 512 + --train-iters: 100 + --timing-log-level: 2 + --lr-decay-iters: 990000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-bert_00_text_sentence + --vocab-file: ${DATA_PATH}/vocab.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.0001 + --min-lr: 0.00001 + --lr-warmup-fraction: 0.01 + --log-interval: 1 + --save-interval: 50 + --eval-interval: 1000 + --eval-iters: 10 + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 2 + --deterministic-mode: true + --use-checkpoint-args: true + --use-checkpoint-opt_param-scheduler: true + --no-gradient-accumulation-fusion: true + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --ckpt-format: torch + --attention-backend: unfused +TEST_TYPE: frozen-resume diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values_dev.json index a1443c9..0549a87 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values_dev.json @@ -1,70 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.49566, - 10.48172, - 10.48046, - 10.45369, - 10.44391, - 10.35613, - 10.13791, - 10.04025, - 9.86848, - 9.67328 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 2183.0, - 2571.0, - 2097.0, - 2118.0, - 2414.0, - 2464.0, - 2988.0, - 3223.0, - 3481.0, - 3046.0 - ] - }, - "mem-allocated-bytes": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1767237120.0, - 1767237632.0, - 1767237632.0, - 1767237632.0, - 1767237632.0, - 1767237632.0, - 1767237632.0, - 1767237632.0, - 1767237632.0, - 1767237632.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 13.74859, - 1.16037, - 1.15664, - 1.28303, - 1.16087, - 1.1576, - 1.15188, - 1.1644, - 1.15171, - 1.38366 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.49566, "5": 10.48412, "10": 10.49946, "15": 10.46625, "20": 10.44783, "25": 10.34967, "30": 10.17283, "35": 10.04281, "40": 9.90782, "45": 9.75786, "50": 9.67692}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2183.0, "5": 2683.0, "10": 2206.0, "15": 2493.0, "20": 2165.0, "25": 2528.0, "30": 2774.0, "35": 3054.0, "40": 2250.0, "45": 3947.0, "50": 3608.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1767237120.0, "5": 1767237120.0, "10": 1767237120.0, "15": 1767237120.0, "20": 1767237120.0, "25": 1768285696.0, "30": 1767237120.0, "35": 1767237120.0, "40": 1767237120.0, "45": 1767237120.0, "50": 1767237120.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2313432064.0, "5": 3055894528.0, "10": 3055894528.0, "15": 3055894528.0, "20": 3055894528.0, "25": 3055894528.0, "30": 3055894528.0, "35": 3055894528.0, "40": 3055894528.0, "45": 3055894528.0, "50": 3055894528.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.83402, "5": 1.12649, "10": 1.1312, "15": 1.12374, "20": 1.12209, "25": 1.13995, "30": 1.38104, "35": 1.14649, "40": 1.14975, "45": 1.14816, "50": 1.15079}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values_lts.json index 1950cd0..5a7a837 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/golden_values_lts.json @@ -1,70 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.49566, - 10.48166, - 10.48045, - 10.45348, - 10.44412, - 10.3561, - 10.13792, - 10.04026, - 9.86832, - 9.67306 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 2183.0, - 2469.0, - 2115.0, - 2126.0, - 2281.0, - 2389.0, - 3013.0, - 3255.0, - 3491.0, - 3062.0 - ] - }, - "mem-allocated-bytes": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1767237120.0, - 1767237120.0, - 1767237120.0, - 1767237120.0, - 1767237120.0, - 1767237120.0, - 1767237120.0, - 1767237120.0, - 1767237120.0, - 1767237120.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 14.75035, - 1.17988, - 1.18643, - 1.18301, - 1.19116, - 1.19494, - 1.54654, - 1.19342, - 1.1823, - 1.18039 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.49566, "5": 10.48418, "10": 10.49947, "15": 10.46646, "20": 10.44777, "25": 10.34987, "30": 10.17278, "35": 10.04282, "40": 9.90771, "45": 9.75789, "50": 9.67683}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2183.0, "5": 2533.0, "10": 2162.0, "15": 2548.0, "20": 2180.0, "25": 2557.0, "30": 2908.0, "35": 2999.0, "40": 2252.0, "45": 3808.0, "50": 3622.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1767237120.0, "5": 1767237120.0, "10": 1767237120.0, "15": 1767237120.0, "20": 1767237120.0, "25": 1767237120.0, "30": 1767237120.0, "35": 1767237120.0, "40": 1767237120.0, "45": 1767237120.0, "50": 1767237120.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2313432064.0, "5": 3055894528.0, "10": 3055894528.0, "15": 3055894528.0, "20": 3055894528.0, "25": 3055894528.0, "30": 3055894528.0, "35": 3055894528.0, "40": 3055894528.0, "45": 3055894528.0, "50": 3055894528.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 12.23164, "5": 1.1819, "10": 1.18193, "15": 1.18147, "20": 1.18394, "25": 1.37105, "30": 1.18551, "35": 1.18659, "40": 1.18004, "45": 1.183, "50": 1.196}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/model_config.yaml index 3815e30..d6743e1 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 990000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-bert_00_text_sentence --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..c03a361 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.49574, "5": 10.48424, "10": 10.49936, "15": 10.46628, "20": 10.44794, "25": 10.34964, "30": 10.17263, "35": 10.04261, "40": 9.90783, "45": 9.75774, "50": 9.67693, "55": 9.55372, "60": 9.4546, "65": 9.42161, "70": 9.3011, "75": 9.32209, "80": 9.26181, "85": 9.2967, "90": 9.23338, "95": 9.2382, "100": 9.10601}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2182.0, "5": 2584.0, "10": 2205.0, "15": 2539.0, "20": 2089.0, "25": 2604.0, "30": 2913.0, "35": 2967.0, "40": 2378.0, "45": 3923.0, "50": 3599.0, "55": 3628.0, "60": 2617.0, "65": 3408.0, "70": 3944.0, "75": 4932.0, "80": 3598.0, "85": 4221.0, "90": 4643.0, "95": 4427.0, "100": 3170.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1754654208.0, "5": 1754654208.0, "10": 1754654208.0, "15": 1754654208.0, "20": 1754654208.0, "25": 1754654208.0, "30": 1754654208.0, "35": 1754654208.0, "40": 1754654208.0, "45": 1754654208.0, "50": 1754654208.0, "55": 1754654208.0, "60": 1754654208.0, "65": 1754654208.0, "70": 1754654208.0, "75": 1754654208.0, "80": 1754654208.0, "85": 1754654208.0, "90": 1754654208.0, "95": 1754654208.0, "100": 1754654208.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2313432064.0, "5": 3055894528.0, "10": 3055894528.0, "15": 3055894528.0, "20": 3055894528.0, "25": 3055894528.0, "30": 3055894528.0, "35": 3055894528.0, "40": 3055894528.0, "45": 3055894528.0, "50": 3055894528.0, "55": 3055894528.0, "60": 3055894528.0, "65": 3055894528.0, "70": 3055894528.0, "75": 3055894528.0, "80": 3055894528.0, "85": 3055894528.0, "90": 3055894528.0, "95": 3055894528.0, "100": 3055894528.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.24712, "5": 1.27471, "10": 1.26014, "15": 1.26739, "20": 1.25462, "25": 1.25062, "30": 1.25515, "35": 1.25462, "40": 1.257, "45": 1.25361, "50": 1.25724, "55": 1.25002, "60": 1.25409, "65": 1.24828, "70": 1.44976, "75": 1.24651, "80": 1.45548, "85": 1.2481, "90": 1.25739, "95": 1.26824, "100": 1.25641}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..8b55b29 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.49574, "5": 10.48398, "10": 10.49943, "15": 10.4663, "20": 10.44775, "25": 10.34964, "30": 10.1728, "35": 10.04262, "40": 9.90767, "45": 9.75792, "50": 9.67684, "55": 9.55378, "60": 9.45458, "65": 9.42133, "70": 9.30109, "75": 9.32203, "80": 9.26184, "85": 9.29667, "90": 9.23332, "95": 9.23793, "100": 9.10611}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2182.0, "5": 2568.0, "10": 2108.0, "15": 2533.0, "20": 2151.0, "25": 2601.0, "30": 2801.0, "35": 3107.0, "40": 2294.0, "45": 3909.0, "50": 3482.0, "55": 3606.0, "60": 2653.0, "65": 3341.0, "70": 3849.0, "75": 5090.0, "80": 3613.0, "85": 4194.0, "90": 4618.0, "95": 4439.0, "100": 3224.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1754654208.0, "5": 1754654208.0, "10": 1754654208.0, "15": 1755702784.0, "20": 1754654208.0, "25": 1754654208.0, "30": 1754654208.0, "35": 1754654208.0, "40": 1754654208.0, "45": 1754654208.0, "50": 1754654208.0, "55": 1754654208.0, "60": 1754654208.0, "65": 1754654208.0, "70": 1754654208.0, "75": 1754654208.0, "80": 1754654208.0, "85": 1754654208.0, "90": 1754654208.0, "95": 1754654208.0, "100": 1754654208.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2300849152.0, "5": 3043311616.0, "10": 3043311616.0, "15": 3043311616.0, "20": 3043311616.0, "25": 3043311616.0, "30": 3043311616.0, "35": 3043311616.0, "40": 3043311616.0, "45": 3043311616.0, "50": 3043311616.0, "55": 3043311616.0, "60": 3043311616.0, "65": 3043311616.0, "70": 3043311616.0, "75": 3043311616.0, "80": 3043311616.0, "85": 3043311616.0, "90": 3043311616.0, "95": 3043311616.0, "100": 3043311616.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.69855, "5": 1.14588, "10": 1.142, "15": 1.13872, "20": 1.13346, "25": 1.13589, "30": 1.13376, "35": 1.13181, "40": 1.13363, "45": 1.1355, "50": 1.13479, "55": 1.16002, "60": 1.15257, "65": 1.1392, "70": 1.32661, "75": 1.1411, "80": 1.14105, "85": 1.15914, "90": 1.14305, "95": 1.14054, "100": 1.29661}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml index e5f60e6..2a3bdc4 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 990000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-bert_00_text_sentence --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..51ae738 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.49566, "5": 10.48412, "10": 10.49946, "15": 10.46625, "20": 10.44783, "25": 10.34967, "30": 10.17283, "35": 10.04281, "40": 9.90782, "45": 9.75786, "50": 9.67692, "55": 9.55379, "60": 9.45457, "65": 9.42149, "70": 9.30109, "75": 9.32221, "80": 9.26179, "85": 9.29668, "90": 9.23347, "95": 9.23813, "100": 9.10619}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2183.0, "5": 2683.0, "10": 2206.0, "15": 2493.0, "20": 2165.0, "25": 2528.0, "30": 2774.0, "35": 3054.0, "40": 2250.0, "45": 3947.0, "50": 3608.0, "55": 3626.0, "60": 2776.0, "65": 3410.0, "70": 3977.0, "75": 4842.0, "80": 3634.0, "85": 4149.0, "90": 4712.0, "95": 4379.0, "100": 3097.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1767237120.0, "5": 1767237120.0, "10": 1767237120.0, "15": 1767237120.0, "20": 1767237120.0, "25": 1767237120.0, "30": 1767237120.0, "35": 1767237120.0, "40": 1767237120.0, "45": 1767237120.0, "50": 1767237120.0, "55": 1767237120.0, "60": 1767237120.0, "65": 1767237120.0, "70": 1767237120.0, "75": 1767237120.0, "80": 1767237120.0, "85": 1767237120.0, "90": 1767237120.0, "95": 1767237120.0, "100": 1767237120.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2313432064.0, "5": 3055894528.0, "10": 3055894528.0, "15": 3055894528.0, "20": 3055894528.0, "25": 3055894528.0, "30": 3055894528.0, "35": 3055894528.0, "40": 3055894528.0, "45": 3055894528.0, "50": 3055894528.0, "55": 3055894528.0, "60": 3055894528.0, "65": 3055894528.0, "70": 3055894528.0, "75": 3055894528.0, "80": 3055894528.0, "85": 3055894528.0, "90": 3055894528.0, "95": 3055894528.0, "100": 3055894528.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.79142, "5": 1.14146, "10": 1.17812, "15": 1.14757, "20": 1.13488, "25": 1.13555, "30": 1.16806, "35": 1.13623, "40": 1.13913, "45": 1.14262, "50": 1.13979, "55": 1.13753, "60": 1.13567, "65": 1.14117, "70": 1.13534, "75": 1.13522, "80": 1.13276, "85": 1.13285, "90": 1.13482, "95": 1.13479, "100": 1.13076}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..9c2d56b --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.49566, "5": 10.48418, "10": 10.49947, "15": 10.46646, "20": 10.44777, "25": 10.34987, "30": 10.17278, "35": 10.04282, "40": 9.90771, "45": 9.75789, "50": 9.67683, "55": 9.55376, "60": 9.45455, "65": 9.42139, "70": 9.30101, "75": 9.32207, "80": 9.26182, "85": 9.29681, "90": 9.23351, "95": 9.2381, "100": 9.10611}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2183.0, "5": 2533.0, "10": 2162.0, "15": 2548.0, "20": 2180.0, "25": 2557.0, "30": 2908.0, "35": 2999.0, "40": 2252.0, "45": 3808.0, "50": 3622.0, "55": 3598.0, "60": 2567.0, "65": 3371.0, "70": 4001.0, "75": 5046.0, "80": 3461.0, "85": 4137.0, "90": 4512.0, "95": 4417.0, "100": 3152.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1767237120.0, "5": 1767237120.0, "10": 1767237120.0, "15": 1767237120.0, "20": 1767237120.0, "25": 1767237120.0, "30": 1768285696.0, "35": 1767237120.0, "40": 1767237120.0, "45": 1767237120.0, "50": 1767237120.0, "55": 1767237120.0, "60": 1768285696.0, "65": 1767237120.0, "70": 1767237120.0, "75": 1767237120.0, "80": 1767237120.0, "85": 1767237120.0, "90": 1767237120.0, "95": 1767237120.0, "100": 1767237120.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2313432064.0, "5": 3055894528.0, "10": 3055894528.0, "15": 3055894528.0, "20": 3055894528.0, "25": 3055894528.0, "30": 3055894528.0, "35": 3055894528.0, "40": 3055894528.0, "45": 3055894528.0, "50": 3055894528.0, "55": 3055894528.0, "60": 3055894528.0, "65": 3055894528.0, "70": 3055894528.0, "75": 3055894528.0, "80": 3055894528.0, "85": 3055894528.0, "90": 3055894528.0, "95": 3055894528.0, "100": 3055894528.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 12.34148, "5": 1.17345, "10": 1.15021, "15": 1.17577, "20": 1.18238, "25": 1.42203, "30": 1.18928, "35": 1.18467, "40": 1.17861, "45": 1.18052, "50": 1.18213, "55": 1.19019, "60": 1.18562, "65": 1.1842, "70": 1.17896, "75": 1.17997, "80": 1.18574, "85": 1.18887, "90": 1.18285, "95": 1.18023, "100": 1.18199}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/model_config.yaml index df52ea5..41f7839 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 990000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-bert_00_text_sentence --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values_dev.json index 83fd267..0edeba9 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.54308, 10.53881, 10.55633, 10.53805, 10.52589, 10.49568, 10.45958, 10.32846, 10.17264, 9.96952]}, "num-zeros": {"start_step": 0, "end_step": 34, "step_interval": 5, "values": [22584.0, 20590.0, 27442.0, 22852.0, 22567.0, 20740.0, 23315.0]}, "iteration_timing_avg": 0.7692817647058824} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.54308, "5": 10.54015, "10": 10.54067, "15": 10.56164, "20": 10.54299, "25": 10.53253, "30": 10.45969, "35": 10.31933, "40": 10.18146, "45": 10.03915, "50": 9.91421}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1416466432.0, "5": 1416466432.0, "10": 1416466432.0, "15": 1416466432.0, "20": 2277237760.0, "25": 2277237760.0, "30": 2277237760.0, "35": 2277237760.0, "40": 2277237760.0, "45": 2277237760.0, "50": 2277237760.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4155522048.0, "5": 4155523072.0, "10": 4155523072.0, "15": 4155523072.0, "20": 5016294400.0, "25": 5016294400.0, "30": 5016294400.0, "35": 5016294400.0, "40": 5016294400.0, "45": 5016294400.0, "50": 5016294400.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 14.03874, "5": 0.9798, "10": 0.94533, "15": 0.84098, "20": 0.837, "25": 0.85187, "30": 0.85092, "35": 0.81519, "40": 0.79898, "45": 0.80833, "50": 1.05286}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 20512.0, "25": 29674.0, "30": 21582.0, "35": 23934.0, "40": 23635.0, "45": 32392.0, "50": 31688.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values_lts.json index 83fd267..d1ec4fa 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.54308, 10.53881, 10.55633, 10.53805, 10.52589, 10.49568, 10.45958, 10.32846, 10.17264, 9.96952]}, "num-zeros": {"start_step": 0, "end_step": 34, "step_interval": 5, "values": [22584.0, 20590.0, 27442.0, 22852.0, 22567.0, 20740.0, 23315.0]}, "iteration_timing_avg": 0.7692817647058824} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.54308, "5": 10.54015, "10": 10.54067, "15": 10.56164, "20": 10.54299, "25": 10.53253, "30": 10.45969, "35": 10.31933, "40": 10.18146, "45": 10.03915, "50": 9.91421}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1416466432.0, "5": 1416466432.0, "10": 1416466432.0, "15": 1416466432.0, "20": 2277237760.0, "25": 2277237760.0, "30": 2277237760.0, "35": 2277237760.0, "40": 2277237760.0, "45": 2277237760.0, "50": 2277237760.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4155522048.0, "5": 4155523072.0, "10": 4155523072.0, "15": 4155523072.0, "20": 5016294400.0, "25": 5016294400.0, "30": 5016294400.0, "35": 5016294400.0, "40": 5016294400.0, "45": 5016294400.0, "50": 5016294400.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 14.89307, "5": 0.78162, "10": 0.83533, "15": 0.81477, "20": 0.89929, "25": 1.00162, "30": 0.78191, "35": 0.79314, "40": 1.12991, "45": 0.97013, "50": 0.80459}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 20512.0, "25": 29674.0, "30": 21582.0, "35": 23934.0, "40": 23635.0, "45": 32392.0, "50": 31688.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/model_config.yaml index d6ce45e..69c1b0a 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_dgx_a100_1N8G/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 990000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-bert_00_text_sentence --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..49db3d7 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.54308, "5": 10.54015, "10": 10.54067, "15": 10.56164, "20": 10.54299, "25": 10.53253, "30": 10.45969, "35": 10.31933, "40": 10.18146, "45": 10.03915, "50": 9.91421, "55": 9.75787, "60": 9.62542, "65": 9.56458, "70": 9.44843, "75": 9.43593, "80": 9.35302, "85": 9.39268, "90": 9.29853, "95": 9.29715, "100": 9.17013}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1416466432.0, "5": 1416466432.0, "10": 1416466432.0, "15": 1416466432.0, "20": 2277237760.0, "25": 2277237760.0, "30": 2277237760.0, "35": 2277237760.0, "40": 2277237760.0, "45": 2277237760.0, "50": 2277237760.0, "55": 2277237760.0, "60": 2277237760.0, "65": 2277237760.0, "70": 2277237760.0, "75": 2277237760.0, "80": 2277237760.0, "85": 2277237760.0, "90": 2277237760.0, "95": 2277237760.0, "100": 2277237760.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4155522048.0, "5": 4155523072.0, "10": 4155523072.0, "15": 4155523072.0, "20": 5016294400.0, "25": 5016294400.0, "30": 5016294400.0, "35": 5016294400.0, "40": 5016294400.0, "45": 5016294400.0, "50": 5016294400.0, "55": 5016294400.0, "60": 5016294400.0, "65": 5016294400.0, "70": 5016294400.0, "75": 5016294400.0, "80": 5016294400.0, "85": 5016294400.0, "90": 5016294400.0, "95": 5016294400.0, "100": 5016294400.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 14.16083, "5": 0.87236, "10": 0.96745, "15": 0.76125, "20": 0.80903, "25": 0.79936, "30": 0.91232, "35": 0.78252, "40": 0.7981, "45": 0.7982, "50": 1.16802, "55": 1.0754, "60": 0.81262, "65": 0.78986, "70": 1.17774, "75": 0.90398, "80": 0.89328, "85": 0.8043, "90": 0.8754, "95": 0.90921, "100": 0.82266}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 20512.0, "25": 29674.0, "30": 21582.0, "35": 23934.0, "40": 23635.0, "45": 32392.0, "50": 31688.0, "55": 30923.0, "60": 24642.0, "65": 26839.0, "70": 31192.0, "75": 40009.0, "80": 29301.0, "85": 31592.0, "90": 33685.0, "95": 33411.0, "100": 22706.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..e7a0416 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.54308, "5": 10.54015, "10": 10.54067, "15": 10.56164, "20": 10.54299, "25": 10.53253, "30": 10.45969, "35": 10.31933, "40": 10.18146, "45": 10.03915, "50": 9.91421, "55": 9.75787, "60": 9.62542, "65": 9.56458, "70": 9.44843, "75": 9.43593, "80": 9.35302, "85": 9.39268, "90": 9.29853, "95": 9.29715, "100": 9.17013}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1416466432.0, "5": 1416466432.0, "10": 1416466432.0, "15": 1416466432.0, "20": 2277237760.0, "25": 2277237760.0, "30": 2277237760.0, "35": 2277237760.0, "40": 2277237760.0, "45": 2277237760.0, "50": 2277237760.0, "55": 2277237760.0, "60": 2277237760.0, "65": 2277237760.0, "70": 2277237760.0, "75": 2277237760.0, "80": 2277237760.0, "85": 2277237760.0, "90": 2277237760.0, "95": 2277237760.0, "100": 2277237760.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4155522048.0, "5": 4155523072.0, "10": 4155523072.0, "15": 4155523072.0, "20": 5016294400.0, "25": 5016294400.0, "30": 5016294400.0, "35": 5016294400.0, "40": 5016294400.0, "45": 5016294400.0, "50": 5016294400.0, "55": 5016294400.0, "60": 5016294400.0, "65": 5016294400.0, "70": 5016294400.0, "75": 5016294400.0, "80": 5016294400.0, "85": 5016294400.0, "90": 5016294400.0, "95": 5016294400.0, "100": 5016294400.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 18.37849, "5": 0.83211, "10": 0.87143, "15": 0.80812, "20": 0.92064, "25": 1.00154, "30": 0.80774, "35": 0.78379, "40": 1.05451, "45": 0.79216, "50": 0.81105, "55": 1.02367, "60": 0.86175, "65": 0.80509, "70": 1.11382, "75": 0.77236, "80": 0.81252, "85": 0.95294, "90": 0.80408, "95": 0.76715, "100": 1.02921}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 20512.0, "25": 29674.0, "30": 21582.0, "35": 23934.0, "40": 23635.0, "45": 32392.0, "50": 31688.0, "55": 30923.0, "60": 24642.0, "65": 26839.0, "70": 31192.0, "75": 40009.0, "80": 29301.0, "85": 31592.0, "90": 33685.0, "95": 33411.0, "100": 22706.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/model_config.yaml index 0a0c079..b9cc330 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 990000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-bert_00_text_sentence --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json index 5e5b762..226b2b7 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.43755, 10.43587, 10.44704, 10.44395, 10.44965, 10.44295, 10.32757, 10.23341, 10.09049, 9.93294]}, "num-zeros": {"start_step": 0, "end_step": 34, "step_interval": 5, "values": [27979.0, 20991.0, 29735.0, 24779.0, 26808.0, 33075.0, 24387.0]}, "iteration_timing_avg": 0.7523635294117648} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.43755, "5": 10.43739, "10": 10.45582, "15": 10.45606, "20": 10.44388, "25": 10.42748, "30": 10.39565, "35": 10.24752, "40": 10.11101, "45": 9.99773, "50": 9.88142}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1210690048.0, "5": 1210690048.0, "10": 1210690048.0, "15": 1210690048.0, "20": 1952102912.0, "25": 1952102912.0, "30": 1952102912.0, "35": 1952102912.0, "40": 1952102912.0, "45": 1952102912.0, "50": 1952102912.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2127591936.0, "5": 2127592960.0, "10": 2127592960.0, "15": 2127592960.0, "20": 2867957248.0, "25": 2867957248.0, "30": 2867957248.0, "35": 2867957248.0, "40": 2867957248.0, "45": 2867957248.0, "50": 2867957248.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 12.58907, "5": 1.10935, "10": 1.09468, "15": 1.08912, "20": 1.39243, "25": 1.1296, "30": 1.11603, "35": 1.34495, "40": 1.39742, "45": 1.11931, "50": 1.12017}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 22585.0, "25": 33970.0, "30": 23056.0, "35": 26873.0, "40": 22716.0, "45": 35165.0, "50": 31348.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json index 5e5b762..f986360 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.43755, 10.43587, 10.44704, 10.44395, 10.44965, 10.44295, 10.32757, 10.23341, 10.09049, 9.93294]}, "num-zeros": {"start_step": 0, "end_step": 34, "step_interval": 5, "values": [27979.0, 20991.0, 29735.0, 24779.0, 26808.0, 33075.0, 24387.0]}, "iteration_timing_avg": 0.7523635294117648} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.43755, "5": 10.43739, "10": 10.45582, "15": 10.45606, "20": 10.44388, "25": 10.42748, "30": 10.39565, "35": 10.24752, "40": 10.11101, "45": 9.99773, "50": 9.88142}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1210690048.0, "5": 1210690048.0, "10": 1210690048.0, "15": 1210690048.0, "20": 1952102912.0, "25": 1952102912.0, "30": 1952102912.0, "35": 1952102912.0, "40": 1952102912.0, "45": 1952102912.0, "50": 1952102912.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2127591936.0, "5": 2127592960.0, "10": 2127592960.0, "15": 2127592960.0, "20": 2867957248.0, "25": 2867957248.0, "30": 2867957248.0, "35": 2867957248.0, "40": 2867957248.0, "45": 2867957248.0, "50": 2867957248.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 11.34618, "5": 1.16528, "10": 1.16219, "15": 1.16386, "20": 1.18006, "25": 1.37981, "30": 1.17956, "35": 1.39195, "40": 1.18406, "45": 1.19213, "50": 1.18956}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 22585.0, "25": 33970.0, "30": 23056.0, "35": 26873.0, "40": 22716.0, "45": 35165.0, "50": 31348.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml index 40b2d06..f62eb63 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 990000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-bert_00_text_sentence --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..15f1ad3 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.43755, "5": 10.43739, "10": 10.45582, "15": 10.45606, "20": 10.44388, "25": 10.42748, "30": 10.39565, "35": 10.24752, "40": 10.11101, "45": 9.99773, "50": 9.88142, "55": 9.73343, "60": 9.62126, "65": 9.55968, "70": 9.44177, "75": 9.43855, "80": 9.35357, "85": 9.38316, "90": 9.30523, "95": 9.30959, "100": 9.17509}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1210690048.0, "5": 1210690048.0, "10": 1210690048.0, "15": 1210690048.0, "20": 1952102912.0, "25": 1952102912.0, "30": 1952102912.0, "35": 1952102912.0, "40": 1952102912.0, "45": 1952102912.0, "50": 1952102912.0, "55": 1952102912.0, "60": 1952102912.0, "65": 1952102912.0, "70": 1952102912.0, "75": 1952102912.0, "80": 1952102912.0, "85": 1952102912.0, "90": 1952102912.0, "95": 1952102912.0, "100": 1952102912.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2127591936.0, "5": 2127592960.0, "10": 2127592960.0, "15": 2127592960.0, "20": 2867957248.0, "25": 2867957248.0, "30": 2867957248.0, "35": 2867957248.0, "40": 2867957248.0, "45": 2867957248.0, "50": 2867957248.0, "55": 2867957248.0, "60": 2867957248.0, "65": 2867957248.0, "70": 2867957248.0, "75": 2867957248.0, "80": 2867957248.0, "85": 2867957248.0, "90": 2867957248.0, "95": 2867957248.0, "100": 2867957248.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.13637, "5": 1.33125, "10": 1.3265, "15": 1.31217, "20": 1.64322, "25": 1.33727, "30": 1.34028, "35": 1.55181, "40": 1.6152, "45": 1.37118, "50": 1.37854, "55": 1.29942, "60": 1.29229, "65": 1.30075, "70": 1.29686, "75": 1.36267, "80": 1.3054, "85": 1.31603, "90": 1.28771, "95": 1.29886, "100": 1.29338}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 22585.0, "25": 33970.0, "30": 23056.0, "35": 26873.0, "40": 22716.0, "45": 35165.0, "50": 31348.0, "55": 32824.0, "60": 23375.0, "65": 26746.0, "70": 30011.0, "75": 39617.0, "80": 31497.0, "85": 31636.0, "90": 32832.0, "95": 38873.0, "100": 24755.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..6283bba --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.43755, "5": 10.43739, "10": 10.45582, "15": 10.45606, "20": 10.44388, "25": 10.42748, "30": 10.39565, "35": 10.24752, "40": 10.11101, "45": 9.99773, "50": 9.88142, "55": 9.73343, "60": 9.62126, "65": 9.55968, "70": 9.44177, "75": 9.43855, "80": 9.35357, "85": 9.38316, "90": 9.30523, "95": 9.30959, "100": 9.17509}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1210690048.0, "5": 1210690048.0, "10": 1210690048.0, "15": 1210690048.0, "20": 1952102912.0, "25": 1952102912.0, "30": 1952102912.0, "35": 1952102912.0, "40": 1952102912.0, "45": 1952102912.0, "50": 1952102912.0, "55": 1952102912.0, "60": 1952102912.0, "65": 1952102912.0, "70": 1952102912.0, "75": 1952102912.0, "80": 1952102912.0, "85": 1952102912.0, "90": 1952102912.0, "95": 1952102912.0, "100": 1952102912.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2127591936.0, "5": 2127592960.0, "10": 2127592960.0, "15": 2127592960.0, "20": 2867957248.0, "25": 2867957248.0, "30": 2867957248.0, "35": 2867957248.0, "40": 2867957248.0, "45": 2867957248.0, "50": 2867957248.0, "55": 2867957248.0, "60": 2867957248.0, "65": 2867957248.0, "70": 2867957248.0, "75": 2867957248.0, "80": 2867957248.0, "85": 2867957248.0, "90": 2867957248.0, "95": 2867957248.0, "100": 2867957248.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 12.4846, "5": 1.29736, "10": 1.172, "15": 1.15505, "20": 1.16055, "25": 1.56732, "30": 1.18656, "35": 1.37506, "40": 1.17417, "45": 1.16819, "50": 1.17821, "55": 1.16442, "60": 1.19327, "65": 1.1497, "70": 1.17034, "75": 1.15327, "80": 1.1535, "85": 1.16731, "90": 1.15301, "95": 1.1516, "100": 1.16053}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 22585.0, "25": 33970.0, "30": 23056.0, "35": 26873.0, "40": 22716.0, "45": 35165.0, "50": 31348.0, "55": 32824.0, "60": 23375.0, "65": 26746.0, "70": 30011.0, "75": 39617.0, "80": 31497.0, "85": 31636.0, "90": 32832.0, "95": 38873.0, "100": 24755.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml index 567f459..8db8d5a 100644 --- a/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 990000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-bert_00_text_sentence --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_dev.json index bfc68cb..ee8073d 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_dev.json +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.49411, - 10.4825, - 10.49242, - 10.47802, - 10.46608, - 10.35193, - 10.17693, - 10.07728, - 9.88753, - 9.68034 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1931.0, - 2555.0, - 2017.0, - 2135.0, - 2440.0, - 2464.0, - 3070.0, - 3006.0, - 2932.0, - 2303.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.94975, - 0.67196, - 0.67378, - 0.66862, - 0.69618, - 0.66936, - 0.67757, - 0.67189, - 0.67519, - 0.67762 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.49405, "5": 10.49924, "10": 10.49606, "15": 10.48729, "20": 10.4657, "25": 10.39493, "30": 10.21023, "35": 10.0733, "40": 9.93987, "45": 9.75668, "50": 9.69018}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2018.0, "5": 2803.0, "10": 2242.0, "15": 2551.0, "20": 2294.0, "25": 2736.0, "30": 2631.0, "35": 2878.0, "40": 1867.0, "45": 4062.0, "50": 3040.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3375511040.0, "5": 3375511040.0, "10": 3375511040.0, "15": 3375511040.0, "20": 3375511040.0, "25": 3375511040.0, "30": 3375511040.0, "35": 3375511040.0, "40": 3375511040.0, "45": 3375511040.0, "50": 3375511040.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4165166080.0, "5": 5630557184.0, "10": 5630557184.0, "15": 5630557184.0, "20": 5630557184.0, "25": 5630557184.0, "30": 5630557184.0, "35": 5630557184.0, "40": 5630557184.0, "45": 5630557184.0, "50": 5630557184.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 6.38574, "5": 0.7037, "10": 0.74678, "15": 0.67056, "20": 0.66842, "25": 0.93898, "30": 0.68891, "35": 0.87958, "40": 0.66027, "45": 0.66606, "50": 0.6644}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json index 25faec6..7c25cb8 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.49405, 10.48276, 10.49249, 10.47813, 10.46623, 10.35183, 10.17697, 10.07728, 9.8875, 9.68029]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [2018.0, 2636.0, 2067.0, 2225.0, 2555.0, 2554.0, 2969.0, 2935.0, 2967.0, 2287.0]}, "iteration_timing_avg": 0.5847132352941178} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.49405, "5": 10.49933, "10": 10.49631, "15": 10.4873, "20": 10.46572, "25": 10.39496, "30": 10.2104, "35": 10.07333, "40": 9.94011, "45": 9.75651, "50": 9.69025}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2018.0, "5": 2740.0, "10": 2260.0, "15": 2649.0, "20": 2205.0, "25": 2675.0, "30": 2687.0, "35": 2930.0, "40": 1853.0, "45": 4016.0, "50": 2978.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3375511040.0, "5": 3375511040.0, "10": 3375511040.0, "15": 3375511040.0, "20": 3375511040.0, "25": 3375511040.0, "30": 3375511040.0, "35": 3375511040.0, "40": 3375511040.0, "45": 3375511040.0, "50": 3375511040.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4153629696.0, "5": 5620069376.0, "10": 5620069376.0, "15": 5620069376.0, "20": 5620069376.0, "25": 5620069376.0, "30": 5620069376.0, "35": 5620069376.0, "40": 5620069376.0, "45": 5620069376.0, "50": 5620069376.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.76357, "5": 0.63754, "10": 0.61134, "15": 0.62429, "20": 0.60864, "25": 0.8008, "30": 0.61228, "35": 0.84121, "40": 0.6217, "45": 0.62022, "50": 0.61774}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml index 0360c72..6097e94 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 990000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-bert_00_text_sentence --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 @@ -41,4 +41,4 @@ MODEL_ARGS: --bf16: true --ckpt-format: torch --attention-backend: unfused -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_dev.json index 915df96..2275b15 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_dev.json +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.46796, - 10.45723, - 10.44911, - 10.44107, - 10.41739, - 10.34626, - 10.11387, - 10.0439, - 9.86702, - 9.679 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 2404.0, - 2610.0, - 2173.0, - 2312.0, - 2371.0, - 2652.0, - 3089.0, - 3200.0, - 3497.0, - 3075.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 15.80389, - 0.94155, - 0.88518, - 1.22442, - 0.86955, - 0.85166, - 1.02329, - 1.07525, - 0.90283, - 0.88308 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.4681, "5": 10.45367, "10": 10.45093, "15": 10.45825, "20": 10.42046, "25": 10.34044, "30": 10.18377, "35": 10.0388, "40": 9.89825, "45": 9.7511, "50": 9.67015}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2373.0, "5": 2811.0, "10": 2502.0, "15": 2735.0, "20": 2341.0, "25": 2828.0, "30": 2945.0, "35": 3125.0, "40": 2406.0, "45": 3739.0, "50": 3475.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2032164352.0, "5": 2032164352.0, "10": 2032164352.0, "15": 2032164352.0, "20": 2032164352.0, "25": 2032164352.0, "30": 2032164352.0, "35": 2032164352.0, "40": 2032164352.0, "45": 2032164352.0, "50": 2032164352.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4360259072.0, "5": 5220507136.0, "10": 5220507136.0, "15": 5220507136.0, "20": 5220507136.0, "25": 5220507136.0, "30": 5220507136.0, "35": 5220507136.0, "40": 5220507136.0, "45": 5220507136.0, "50": 5220507136.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 7.15888, "5": 1.41456, "10": 0.87396, "15": 0.86252, "20": 0.86858, "25": 1.09113, "30": 0.82733, "35": 0.83789, "40": 0.86729, "45": 1.13695, "50": 1.09113}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_lts.json index 6b516a3..fd0b12f 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_lts.json +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/golden_values_lts.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.4681, - 10.45734, - 10.4491, - 10.44121, - 10.41764, - 10.34626, - 10.11384, - 10.04383, - 9.86686, - 9.67906 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 2373.0, - 2593.0, - 2187.0, - 2325.0, - 2407.0, - 2627.0, - 3036.0, - 3109.0, - 3568.0, - 3019.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 22.86543, - 0.84168, - 0.92727, - 0.84734, - 0.93196, - 0.86308, - 0.86633, - 0.86112, - 0.87598, - 1.02461 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.4681, "5": 10.45367, "10": 10.45093, "15": 10.45815, "20": 10.42047, "25": 10.34052, "30": 10.18387, "35": 10.03878, "40": 9.89837, "45": 9.75113, "50": 9.67035}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2373.0, "5": 2811.0, "10": 2502.0, "15": 2700.0, "20": 2461.0, "25": 2883.0, "30": 2859.0, "35": 3009.0, "40": 2378.0, "45": 3799.0, "50": 3628.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2032164352.0, "5": 2032164352.0, "10": 2032164352.0, "15": 2032164352.0, "20": 2032164352.0, "25": 2032164352.0, "30": 2032164352.0, "35": 2032164352.0, "40": 2032164352.0, "45": 2032164352.0, "50": 2032164352.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4341384704.0, "5": 5201632768.0, "10": 5201632768.0, "15": 5201632768.0, "20": 5201632768.0, "25": 5201632768.0, "30": 5201632768.0, "35": 5201632768.0, "40": 5201632768.0, "45": 5201632768.0, "50": 5201632768.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 19.82133, "5": 0.83907, "10": 0.86137, "15": 0.79121, "20": 0.88909, "25": 0.97007, "30": 0.76254, "35": 0.78908, "40": 1.03257, "45": 0.78678, "50": 0.76108}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/model_config.yaml index 5bb4ae6..9dc8c03 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 990000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-bert_00_text_sentence --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 @@ -42,4 +42,4 @@ MODEL_ARGS: --bf16: true --ckpt-format: torch --attention-backend: unfused -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json index 65e3ca2..201d0bd 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.42085, - 10.42901, - 10.43576, - 10.40804, - 10.38463, - 10.32426, - 10.13148, - 10.04317, - 9.86257, - 9.65771 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 3252.0, - 2595.0, - 3240.0, - 3429.0, - 3463.0, - 3509.0, - 4065.0, - 4114.0, - 4651.0, - 4253.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.83012, - 2.26196, - 2.22779, - 2.22677, - 2.23847, - 2.24307, - 2.23859, - 2.23544, - 2.2414, - 2.25107 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.4209, "5": 10.44499, "10": 10.4421, "15": 10.43146, "20": 10.40923, "25": 10.32639, "30": 10.18342, "35": 10.03454, "40": 9.91262, "45": 9.74932, "50": 9.66164}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2230.0, "5": 2902.0, "10": 3454.0, "15": 2607.0, "20": 3332.0, "25": 3721.0, "30": 3878.0, "35": 4165.0, "40": 3354.0, "45": 4875.0, "50": 4729.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1632405504.0, "5": 1632405504.0, "10": 1632405504.0, "15": 1632405504.0, "20": 1632405504.0, "25": 1632405504.0, "30": 1632405504.0, "35": 1632405504.0, "40": 1632405504.0, "45": 1632405504.0, "50": 1632405504.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2475474432.0, "5": 3175492096.0, "10": 3175492096.0, "15": 3175492096.0, "20": 3175493120.0, "25": 3175493120.0, "30": 3176545280.0, "35": 3176545280.0, "40": 3176545280.0, "45": 3176545280.0, "50": 3176545280.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 6.36768, "5": 2.77129, "10": 2.2485, "15": 2.24504, "20": 2.2713, "25": 2.27356, "30": 2.26503, "35": 2.2618, "40": 2.25789, "45": 2.58105, "50": 2.26297}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_lts.json index 4c21933..b069785 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_lts.json +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_lts.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.4209, - 10.42905, - 10.43557, - 10.40806, - 10.38457, - 10.32414, - 10.13167, - 10.04335, - 9.86262, - 9.65771 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 2249.0, - 3640.0, - 3249.0, - 2318.0, - 3512.0, - 3601.0, - 4111.0, - 3175.0, - 4713.0, - 3320.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 12.51144, - 2.1285, - 2.28886, - 2.24273, - 2.20818, - 2.20231, - 2.18786, - 2.17554, - 2.213, - 2.18811 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.4209, "5": 10.44497, "10": 10.4422, "15": 10.43154, "20": 10.40919, "25": 10.32623, "30": 10.18344, "35": 10.03437, "40": 9.91272, "45": 9.74952, "50": 9.66165}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2249.0, "5": 3813.0, "10": 2393.0, "15": 3636.0, "20": 2343.0, "25": 3815.0, "30": 3843.0, "35": 4191.0, "40": 3318.0, "45": 4876.0, "50": 4696.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1632405504.0, "5": 1632405504.0, "10": 1632405504.0, "15": 1632405504.0, "20": 1632405504.0, "25": 1632405504.0, "30": 1632405504.0, "35": 1632405504.0, "40": 1632405504.0, "45": 1632405504.0, "50": 1632405504.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2458703360.0, "5": 3155576320.0, "10": 3155576320.0, "15": 3155576320.0, "20": 3155576320.0, "25": 3155576320.0, "30": 3155576320.0, "35": 3155576320.0, "40": 3155576320.0, "45": 3155576320.0, "50": 3155576320.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.80095, "5": 2.34418, "10": 2.03688, "15": 2.03547, "20": 2.0237, "25": 2.06209, "30": 2.04226, "35": 2.19438, "40": 2.04294, "45": 2.0364, "50": 2.03778}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml index 4ef1092..5aa269c 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 990000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-bert_00_text_sentence --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 @@ -41,4 +41,4 @@ MODEL_ARGS: --bf16: true --ckpt-format: torch --attention-backend: unfused -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_dev.json index 428150f..9e26dfe 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_dev.json +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_dev.json @@ -1,50 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.49101, - 10.49526, - 10.48682, - 10.48817, - 10.49415, - 10.4724, - 10.42265, - 10.29901, - 10.1572, - 9.97594 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 12.56945, - 0.58599, - 0.58451, - 0.68178, - 0.6056, - 0.609, - 0.59965, - 0.60618, - 0.60152, - 0.59945 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 34, - "step_interval": 5, - "values": [ - 17032.0, - 16918.0, - 19957.0, - 18761.0, - 25689.0, - 19897.0, - 22224.0 - ] - } -} \ No newline at end of file +{} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json index ab9cc2b..653085e 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json @@ -1,50 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.50096, - 10.48594, - 10.4936, - 10.48501, - 10.50417, - 10.4773, - 10.42154, - 10.29716, - 10.15831, - 9.96751 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 12.85743, - 0.58922, - 0.54928, - 0.54147, - 0.56305, - 0.56895, - 0.56282, - 0.56247, - 0.56751, - 0.69574 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 34, - "step_interval": 5, - "values": [ - 16595.0, - 18537.0, - 19509.0, - 18532.0, - 26712.0, - 20164.0, - 20981.0 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.50096, "5": 10.49354, "10": 10.49659, "15": 10.46666, "20": 10.49707, "25": 10.47716, "30": 10.43665, "35": 10.30674, "40": 10.15647, "45": 10.03905, "50": 9.9192}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2278173696.0, "5": 2278173696.0, "10": 2278173696.0, "15": 2278173696.0, "20": 3743563776.0, "25": 3743563776.0, "30": 3743563776.0, "35": 3743563776.0, "40": 3743563776.0, "45": 3743563776.0, "50": 3743563776.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3788467712.0, "5": 3788468736.0, "10": 3788468736.0, "15": 3788468736.0, "20": 5254907392.0, "25": 5254907392.0, "30": 5254907392.0, "35": 5254907392.0, "40": 5254907392.0, "45": 5254907392.0, "50": 5254907392.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 11.64757, "5": 0.57904, "10": 0.55485, "15": 0.54428, "20": 0.56278, "25": 0.56384, "30": 0.5642, "35": 0.58037, "40": 0.59811, "45": 0.57054, "50": 0.56519}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 14368.0, "25": 19189.0, "30": 21709.0, "35": 18201.0, "40": 19483.0, "45": 24956.0, "50": 21241.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml index f45b7b3..0b7a609 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml @@ -21,8 +21,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 990000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-bert_00_text_sentence --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 @@ -44,4 +44,4 @@ MODEL_ARGS: --apply-query-key-layer-scaling: true --ckpt-format: torch --attention-backend: unfused -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_dev.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_dev.json index 9cd1672..9e26dfe 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_dev.json +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_dev.json @@ -1,50 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.49734, - 10.49243, - 10.49325, - 10.50311, - 10.48985, - 10.4721, - 10.41217, - 10.2805, - 10.14052, - 9.94191 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 8.58282, - 2.06311, - 2.05789, - 2.24493, - 2.05273, - 2.05118, - 2.05666, - 2.04533, - 2.05152, - 2.04761 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 34, - "step_interval": 5, - "values": [ - 26081.0, - 18799.0, - 24479.0, - 23782.0, - 21056.0, - 19877.0, - 19774.0 - ] - } -} \ No newline at end of file +{} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json index a09f1d9..16d8b3c 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json @@ -1,50 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.48685, - 10.49276, - 10.48837, - 10.51348, - 10.49396, - 10.4755, - 10.41921, - 10.28044, - 10.14256, - 9.94738 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.8221, - 1.96114, - 1.9401, - 2.22227, - 1.94508, - 1.94212, - 1.93958, - 1.94562, - 1.9442, - 1.94606 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 34, - "step_interval": 5, - "values": [ - 26876.0, - 19339.0, - 24146.0, - 23625.0, - 21440.0, - 17865.0, - 19282.0 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.48685, "5": 10.48748, "10": 10.49154, "15": 10.49856, "20": 10.49971, "25": 10.47921, "30": 10.44762, "35": 10.29221, "40": 10.1426, "45": 10.01072, "50": 9.88753}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1110956544.0, "5": 1110956544.0, "10": 1110956544.0, "15": 1110956544.0, "20": 1809925632.0, "25": 1809925632.0, "30": 1809925632.0, "35": 1809925632.0, "40": 1809925632.0, "45": 1809925632.0, "50": 1809925632.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2286739456.0, "5": 2286740480.0, "10": 2286740480.0, "15": 2286740480.0, "20": 2983612416.0, "25": 2983612416.0, "30": 2983612416.0, "35": 2983612416.0, "40": 2983612416.0, "45": 2983612416.0, "50": 2983612416.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 8.15965, "5": 1.93164, "10": 2.35467, "15": 1.92211, "20": 1.96054, "25": 1.91619, "30": 1.92166, "35": 1.91436, "40": 1.91896, "45": 1.92099, "50": 1.92773}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 21908.0, "25": "nan", "30": 21225.0, "35": 23321.0, "40": 20665.0, "45": 34638.0, "50": 29484.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml index d8832ea..d1f1819 100644 --- a/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml @@ -21,8 +21,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 990000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-bert_00_text_sentence --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 @@ -44,4 +44,4 @@ MODEL_ARGS: --apply-query-key-layer-scaling: true --ckpt-format: torch --attention-backend: unfused -TEST_TYPE: regular \ No newline at end of file +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/bert/bert_release/golden_values_0.10.0.json b/tests/functional_tests/test_cases/bert/bert_release/golden_values_0.10.0.json new file mode 100644 index 0000000..ff36f31 --- /dev/null +++ b/tests/functional_tests/test_cases/bert/bert_release/golden_values_0.10.0.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 20000, "step_interval": 5, "values": {"1": 10.51817, "5": 10.5175, "10": 10.51541, "15": 10.51677, "20": 10.13032, "25": 9.5518, "30": 9.44404, "35": 9.15174, "40": 9.05764, "45": 8.99256, "50": 8.75433, "55": 8.8141, "60": 8.47097, "65": 8.53559, "70": 8.20228, "75": 8.31011, "80": 7.96546, "85": 7.87162, "90": 7.66496, "95": 7.67741, "100": 7.63736, "105": 7.37164, "110": 7.52373, "115": 7.16816, "120": 6.99674, "125": 7.13817, "130": 6.93339, "135": 6.84533, "140": 7.05697, "145": 6.99313, "150": 6.99189, "155": 6.93579, "160": 6.91743, "165": 7.02675, "170": 6.84859, "175": 6.89494, "180": 6.58084, "185": 6.98964, "190": 6.97544, "195": 6.83777, "200": 6.86351, "205": 6.77972, "210": 6.8278, "215": 6.85589, "220": 6.64778, "225": 6.77656, "230": 6.83728, "235": 6.6403, "240": 6.56299, "245": 6.92769, "250": 6.67212, "255": 6.59089, "260": 6.6886, "265": 6.55814, "270": 6.47987, "275": 6.77552, "280": 6.61728, "285": 6.49133, "290": 6.58778, "295": 6.67183, "300": 6.35487, "305": 6.35101, "310": 6.95761, "315": 6.78118, "320": 6.73969, "325": 6.52394, "330": 6.67967, "335": 6.58673, "340": 6.44122, "345": 6.60091, "350": 6.4961, "355": 6.65303, "360": 6.55435, "365": 6.69325, "370": 6.52082, "375": 6.63467, "380": 6.35463, "385": 6.47242, "390": 6.52105, "395": 6.31308, "400": 6.58662, "405": 6.48288, "410": 6.58296, "415": 6.5595, "420": 6.49457, "425": 6.41838, "430": 6.56902, "435": 6.39676, "440": 6.56238, "445": 6.46871, "450": 6.62489, "455": 6.51513, "460": 6.82933, "465": 6.75526, "470": 6.4396, "475": 6.18229, "480": 6.48381, "485": 6.54946, "490": 6.21873, "495": 6.28572, "500": 6.52573, "505": 6.45653, "510": 6.50992, "515": 6.172, "520": 6.55684, "525": 6.50698, "530": 6.31881, "535": 6.13755, "540": 6.58936, "545": 6.42254, "550": 6.42606, "555": 6.39976, "560": 6.42189, "565": 6.30133, "570": 6.38344, "575": 6.587, "580": 6.46785, "585": 6.46954, "590": 6.21249, "595": 6.2525, "600": 6.44785, "605": 6.37774, "610": 6.45558, "615": 6.43486, "620": 6.47888, "625": 6.38285, "630": 6.47552, "635": 6.43607, "640": 6.34837, "645": 6.20656, "650": 6.30271, "655": 6.55191, "660": 6.33696, "665": 6.3171, "670": 6.42225, "675": 6.31522, "680": 6.38141, "685": 6.51535, "690": 6.29353, "695": 6.18153, "700": 6.09304, "705": 6.34929, "710": 6.1791, "715": 6.4177, "720": 6.42039, "725": 6.29226, "730": 6.38785, "735": 6.4945, "740": 6.63836, "745": 6.21078, "750": 6.22274, "755": 6.00822, "760": 6.54247, "765": 6.29186, "770": 6.43211, "775": 6.12405, "780": 6.26062, "785": 6.28638, "790": 6.45124, "795": 6.40875, "800": 6.1263, "805": 6.376, "810": 6.23893, "815": 6.16663, "820": 6.3575, "825": 6.62729, "830": 6.36179, "835": 6.19003, "840": 6.14768, "845": 6.4116, "850": 6.45332, "855": 6.48173, "860": 6.19826, "865": 6.36353, "870": 6.31679, "875": 6.23294, "880": 6.28484, "885": 6.35442, "890": 6.20864, "895": 6.2911, "900": 6.33959, "905": 6.23518, "910": 6.16488, "915": 6.44247, "920": 6.39225, "925": 6.17239, "930": 6.07364, "935": 6.44404, "940": 6.30443, "945": 6.06343, "950": 6.23702, "955": 6.02012, "960": 6.35362, "965": 6.18563, "970": 6.35383, "975": 6.08907, "980": 6.04181, "985": 6.33339, "990": 6.18815, "995": 6.33018, "1000": 6.23352, "1005": 6.4058, "1010": 6.19362, "1015": 6.11686, "1020": 6.08203, "1025": 6.35785, "1030": 6.11803, "1035": 6.03195, "1040": 6.01018, "1045": 6.60093, "1050": 6.34873, "1055": 6.09027, "1060": 6.16403, "1065": 5.85887, "1070": 6.25593, "1075": 5.94946, "1080": 6.10403, "1085": 6.13551, "1090": 6.02268, "1095": 6.25209, "1100": 6.08705, "1105": 6.29611, "1110": 6.19967, "1115": 6.1011, "1120": 6.16046, "1125": 6.14261, "1130": 5.95602, "1135": 6.32195, "1140": 6.40249, "1145": 6.20263, "1150": 6.09901, "1155": 6.03405, "1160": 6.24143, "1165": 6.22108, "1170": 6.39871, "1175": 6.05912, "1180": 6.13279, "1185": 6.24595, "1190": 6.3738, "1195": 6.2675, "1200": 6.02483, "1205": 6.24352, "1210": 6.24381, "1215": 6.08485, "1220": 6.06558, "1225": 6.41096, "1230": 6.22266, "1235": 6.00361, "1240": 6.57248, "1245": 6.07717, "1250": 5.98109, "1255": 6.08225, "1260": 6.1592, "1265": 6.05607, "1270": 5.94447, "1275": 6.10328, "1280": 5.88732, "1285": 5.91185, "1290": 6.11708, "1295": 6.10313, "1300": 6.24041, "1305": 5.96596, "1310": 6.03315, "1315": 6.02144, "1320": 6.07709, "1325": 6.25622, "1330": 6.06937, "1335": 6.03641, "1340": 6.21518, "1345": 6.06063, "1350": 5.92036, "1355": 6.26228, "1360": 6.32347, "1365": 6.07062, "1370": 6.03043, "1375": 6.2884, "1380": 6.14084, "1385": 5.91489, "1390": 5.98972, "1395": 6.00301, "1400": 6.21583, "1405": 5.99994, "1410": 6.1255, "1415": 6.33421, "1420": 6.14871, "1425": 6.1415, "1430": 5.9813, "1435": 6.35489, "1440": 6.31621, "1445": 6.28693, "1450": 6.07265, "1455": 5.78189, "1460": 6.23124, "1465": 5.9566, "1470": 6.00064, "1475": 6.16614, "1480": 6.20798, "1485": 6.02411, "1490": 6.20953, "1495": 6.17788, "1500": 6.16957, "1505": 6.25605, "1510": 6.35487, "1515": 6.12147, "1520": 5.88284, "1525": 5.97354, "1530": 5.97133, "1535": 6.18283, "1540": 6.42884, "1545": 6.28868, "1550": 6.06395, "1555": 5.97533, "1560": 6.20183, "1565": 6.15308, "1570": 6.08513, "1575": 6.07142, "1580": 6.05003, "1585": 6.03634, "1590": 6.30695, "1595": 6.23795, "1600": 5.93011, "1605": 5.94669, "1610": 6.00829, "1615": 6.17491, "1620": 6.09426, "1625": 6.02182, "1630": 6.05703, "1635": 6.34675, "1640": 6.08607, "1645": 6.16877, "1650": 6.27363, "1655": 6.26811, "1660": 6.27462, "1665": 5.87312, "1670": 6.17929, "1675": 5.91525, "1680": 6.13344, "1685": 6.32443, "1690": 6.2644, "1695": 6.01757, "1700": 6.26122, "1705": 6.12344, "1710": 6.19754, "1715": 5.98887, "1720": 6.29412, "1725": 6.22684, "1730": 6.20743, "1735": 6.15486, "1740": 6.04205, "1745": 5.94038, "1750": 6.35924, "1755": 6.40246, "1760": 5.86366, "1765": 6.07505, "1770": 6.23461, "1775": 5.94154, "1780": 6.26637, "1785": 5.81716, "1790": 6.02314, "1795": 6.26227, "1800": 6.14013, "1805": 5.98584, "1810": 6.63134, "1815": 6.21314, "1820": 6.25555, "1825": 6.14411, "1830": 6.15875, "1835": 5.99699, "1840": 5.99038, "1845": 6.1556, "1850": 6.20895, "1855": 6.12019, "1860": 6.11647, "1865": 5.97461, "1870": 6.24845, "1875": 6.15245, "1880": 5.91023, "1885": 6.19165, "1890": 6.13825, "1895": 6.04321, "1900": 6.00342, "1905": 6.33723, "1910": 6.37854, "1915": 6.26141, "1920": 6.11687, "1925": 5.85322, "1930": 5.98664, "1935": 6.09483, "1940": 6.00604, "1945": 5.875, "1950": 6.09782, "1955": 5.96109, "1960": 6.13107, "1965": 6.07042, "1970": 6.04534, "1975": 6.09995, "1980": 6.39683, "1985": 6.30799, "1990": 5.99772, "1995": 6.07316, "2000": 6.00425, "2005": 6.16907, "2010": 6.27124, "2015": 6.05479, "2020": 6.20438, "2025": 6.249, "2030": 6.10151, "2035": 6.14185, "2040": 5.82018, "2045": 5.99301, "2050": 6.18173, "2055": 6.02603, "2060": 6.33486, "2065": 6.18433, "2070": 5.95901, "2075": 6.05091, "2080": 6.11619, "2085": 6.03563, "2090": 6.0318, "2095": 6.24495, "2100": 6.14167, "2105": 6.11567, "2110": 6.02763, "2115": 6.15262, "2120": 6.02194, "2125": 6.25424, "2130": 6.20629, "2135": 5.96906, "2140": 6.07103, "2145": 5.75309, "2150": 6.16524, "2155": 6.07608, "2160": 5.98145, "2165": 5.97813, "2170": 6.01644, "2175": 6.01627, "2180": 5.99747, "2185": 5.89013, "2190": 5.86032, "2195": 6.00796, "2200": 6.26774, "2205": 6.1098, "2210": 5.94775, "2215": 6.14702, "2220": 6.37079, "2225": 5.97553, "2230": 6.16571, "2235": 6.05938, "2240": 5.95049, "2245": 6.23155, "2250": 5.96877, "2255": 5.79235, "2260": 6.13931, "2265": 5.93813, "2270": 5.98911, "2275": 6.05952, "2280": 6.1092, "2285": 5.95013, "2290": 6.02738, "2295": 5.97596, "2300": 6.21482, "2305": 5.7741, "2310": 6.07717, "2315": 6.07771, "2320": 6.02548, "2325": 5.79078, "2330": 5.91772, "2335": 5.99312, "2340": 6.05866, "2345": 5.87771, "2350": 5.93828, "2355": 6.16529, "2360": 5.97877, "2365": 6.16995, "2370": 6.11135, "2375": 6.18199, "2380": 6.05936, "2385": 6.1578, "2390": 6.19628, "2395": 6.07546, "2400": 5.78213, "2405": 5.94596, "2410": 6.21064, "2415": 6.03963, "2420": 6.08969, "2425": 5.83476, "2430": 6.3565, "2435": 6.17395, "2440": 5.93125, "2445": 5.83191, "2450": 6.03301, "2455": 5.69234, "2460": 5.98659, "2465": 6.25034, "2470": 5.84446, "2475": 5.93569, "2480": 5.9655, "2485": 6.0591, "2490": 6.0859, "2495": 6.12826, "2500": 6.07619, "2505": 5.95908, "2510": 5.81342, "2515": 6.04185, "2520": 6.2213, "2525": 5.81758, "2530": 5.96947, "2535": 5.96991, "2540": 6.16668, "2545": 5.96674, "2550": 6.00418, "2555": 5.72836, "2560": 6.20934, "2565": 5.85917, "2570": 6.08596, "2575": 5.69813, "2580": 5.91079, "2585": 5.9093, "2590": 6.12244, "2595": 6.13138, "2600": 6.04809, "2605": 6.04506, "2610": 6.30458, "2615": 6.07116, "2620": 5.98128, "2625": 6.11299, "2630": 6.1626, "2635": 6.16181, "2640": 5.92204, "2645": 6.01467, "2650": 6.03531, "2655": 5.86098, "2660": 5.84779, "2665": 6.17657, "2670": 6.00478, "2675": 5.97899, "2680": 6.13153, "2685": 5.98755, "2690": 6.06885, "2695": 5.96494, "2700": 6.31447, "2705": 6.07933, "2710": 6.13432, "2715": 5.84604, "2720": 5.97065, "2725": 6.19304, "2730": 6.05715, "2735": 6.17752, "2740": 5.88714, "2745": 6.11119, "2750": 5.98743, "2755": 5.99078, "2760": 5.87443, "2765": 6.18089, "2770": 6.03467, "2775": 6.23017, "2780": 6.20071, "2785": 6.02831, "2790": 6.03936, "2795": 5.87056, "2800": 6.12169, "2805": 5.99708, "2810": 6.1354, "2815": 6.09528, "2820": 6.0085, "2825": 5.87472, "2830": 6.28475, "2835": 5.95398, "2840": 6.09487, "2845": 5.88595, "2850": 5.85459, "2855": 6.16742, "2860": 5.83636, "2865": 6.09151, "2870": 6.06759, "2875": 6.11959, "2880": 5.91152, "2885": 5.93185, "2890": 6.05905, "2895": 6.07588, "2900": 5.78771, "2905": 6.10237, "2910": 6.00624, "2915": 6.16915, "2920": 5.9086, "2925": 5.77926, "2930": 6.03226, "2935": 6.03469, "2940": 6.19466, "2945": 5.90452, "2950": 6.10891, "2955": 6.00063, "2960": 5.9171, "2965": 5.88471, "2970": 5.88766, "2975": 5.92895, "2980": 6.14647, "2985": 6.1873, "2990": 5.9706, "2995": 6.26542, "3000": 6.1188, "3005": 5.8684, "3010": 6.31907, "3015": 5.86517, "3020": 5.82006, "3025": 5.88243, "3030": 6.00027, "3035": 5.98462, "3040": 6.25786, "3045": 5.8475, "3050": 6.22786, "3055": 5.88956, "3060": 5.9048, "3065": 5.94588, "3070": 6.14207, "3075": 5.84503, "3080": 5.89853, "3085": 6.08275, "3090": 6.17043, "3095": 6.03701, "3100": 6.10486, "3105": 5.99793, "3110": 5.84414, "3115": 6.06988, "3120": 6.28283, "3125": 6.19285, "3130": 5.98218, "3135": 6.12629, "3140": 5.85226, "3145": 6.36395, "3150": 6.01258, "3155": 6.25503, "3160": 6.04938, "3165": 6.07205, "3170": 6.20961, "3175": 6.02655, "3180": 6.02716, "3185": 6.06282, "3190": 5.99221, "3195": 5.8809, "3200": 6.06466, "3205": 5.9382, "3210": 5.98764, "3215": 5.97029, "3220": 5.89104, "3225": 5.79917, "3230": 6.12085, "3235": 5.8384, "3240": 6.12442, "3245": 6.14385, "3250": 5.97425, "3255": 6.0545, "3260": 5.94933, "3265": 6.07255, "3270": 6.06938, "3275": 6.07655, "3280": 6.0017, "3285": 5.8481, "3290": 6.07231, "3295": 5.77201, "3300": 5.9592, "3305": 5.98806, "3310": 5.84933, "3315": 5.45612, "3320": 5.9375, "3325": 5.86696, "3330": 5.9198, "3335": 6.04057, "3340": 6.01008, "3345": 5.96425, "3350": 5.88267, "3355": 5.92749, "3360": 5.78791, "3365": 6.28219, "3370": 6.08834, "3375": 5.85299, "3380": 6.17452, "3385": 5.93284, "3390": 5.88271, "3395": 5.99314, "3400": 6.03594, "3405": 5.98793, "3410": 6.2627, "3415": 5.96437, "3420": 5.76598, "3425": 6.257, "3430": 5.9041, "3435": 5.93149, "3440": 5.78047, "3445": 5.95834, "3450": 6.04761, "3455": 5.89934, "3460": 5.95712, "3465": 6.02859, "3470": 5.87991, "3475": 5.97779, "3480": 5.83089, "3485": 5.92359, "3490": 5.9974, "3495": 6.074, "3500": 6.00408, "3505": 6.12925, "3510": 6.03277, "3515": 6.05753, "3520": 6.02408, "3525": 6.07911, "3530": 5.91587, "3535": 6.10122, "3540": 6.01707, "3545": 6.23434, "3550": 6.02742, "3555": 6.20687, "3560": 6.03194, "3565": 6.01322, "3570": 6.01305, "3575": 5.75022, "3580": 6.11734, "3585": 5.8682, "3590": 5.97368, "3595": 5.85319, "3600": 5.66878, "3605": 6.05195, "3610": 6.13052, "3615": 5.94939, "3620": 5.97577, "3625": 6.17555, "3630": 5.85973, "3635": 6.06535, "3640": 6.22585, "3645": 5.82602, "3650": 6.05082, "3655": 5.85874, "3660": 5.98228, "3665": 5.82464, "3670": 6.1001, "3675": 5.95103, "3680": 5.99762, "3685": 5.89004, "3690": 5.91176, "3695": 6.05488, "3700": 5.9664, "3705": 6.02399, "3710": 5.93288, "3715": 5.78374, "3720": 6.0928, "3725": 6.20757, "3730": 5.97971, "3735": 5.90662, "3740": 5.87716, "3745": 5.99158, "3750": 6.1208, "3755": 5.81412, "3760": 6.07629, "3765": 5.72966, "3770": 6.11711, "3775": 5.92014, "3780": 6.22326, "3785": 6.01246, "3790": 5.90445, "3795": 6.38234, "3800": 5.71407, "3805": 6.04038, "3810": 5.99639, "3815": 5.97388, "3820": 5.91052, "3825": 6.01581, "3830": 5.69454, "3835": 5.82222, "3840": 5.92453, "3845": 5.86473, "3850": 5.75107, "3855": 6.0997, "3860": 6.0792, "3865": 5.80667, "3870": 5.8698, "3875": 5.91752, "3880": 6.05322, "3885": 6.07776, "3890": 5.93372, "3895": 6.02571, "3900": 5.93583, "3905": 5.77554, "3910": 5.98079, "3915": 5.93836, "3920": 6.0384, "3925": 6.05052, "3930": 5.82468, "3935": 6.00309, "3940": 5.97644, "3945": 5.80364, "3950": 5.71242, "3955": 5.64709, "3960": 5.6928, "3965": 5.8392, "3970": 5.95118, "3975": 6.03905, "3980": 6.01592, "3985": 5.88058, "3990": 5.89601, "3995": 6.06335, "4000": 5.92695, "4005": 5.83767, "4010": 5.90926, "4015": 6.05247, "4020": 5.72979, "4025": 5.9811, "4030": 5.85282, "4035": 5.61899, "4040": 6.1771, "4045": 5.99571, "4050": 5.91783, "4055": 6.17885, "4060": 5.84221, "4065": 5.8833, "4070": 5.96066, "4075": 5.86633, "4080": 5.83322, "4085": 6.04471, "4090": 6.10456, "4095": 5.74723, "4100": 5.75289, "4105": 6.14218, "4110": 5.64252, "4115": 6.01125, "4120": 5.9786, "4125": 5.84296, "4130": 5.94813, "4135": 6.13013, "4140": 5.88369, "4145": 6.04204, "4150": 5.89849, "4155": 5.90384, "4160": 5.70369, "4165": 5.86758, "4170": 5.9657, "4175": 5.57474, "4180": 6.11062, "4185": 5.98628, "4190": 5.84044, "4195": 6.09738, "4200": 5.57131, "4205": 5.80155, "4210": 6.02393, "4215": 5.88891, "4220": 5.82996, "4225": 5.7451, "4230": 5.8151, "4235": 5.78634, "4240": 5.8465, "4245": 5.84784, "4250": 6.11465, "4255": 6.01487, "4260": 5.89099, "4265": 6.08233, "4270": 6.03876, "4275": 5.78635, "4280": 5.96225, "4285": 5.44958, "4290": 5.85094, "4295": 5.75537, "4300": 6.0828, "4305": 5.79049, "4310": 5.88714, "4315": 6.00792, "4320": 5.61666, "4325": 5.91629, "4330": 5.87369, "4335": 6.09393, "4340": 5.64895, "4345": 5.98922, "4350": 6.18923, "4355": 6.0726, "4360": 5.94713, "4365": 5.9624, "4370": 6.12777, "4375": 5.81442, "4380": 6.2151, "4385": 5.95352, "4390": 5.90547, "4395": 5.85397, "4400": 5.96785, "4405": 6.06758, "4410": 5.69651, "4415": 5.68279, "4420": 5.97437, "4425": 5.99099, "4430": 5.73795, "4435": 5.6077, "4440": 5.94604, "4445": 5.98559, "4450": 5.98021, "4455": 5.98509, "4460": 5.91287, "4465": 5.75576, "4470": 6.01444, "4475": 5.97226, "4480": 5.8727, "4485": 5.83316, "4490": 5.72116, "4495": 6.08876, "4500": 5.99656, "4505": 6.06873, "4510": 5.7272, "4515": 6.04657, "4520": 6.02537, "4525": 5.66054, "4530": 5.84621, "4535": 5.78212, "4540": 5.94376, "4545": 5.86302, "4550": 6.06617, "4555": 5.7309, "4560": 5.987, "4565": 5.96833, "4570": 5.8168, "4575": 6.02794, "4580": 5.7049, "4585": 5.98809, "4590": 5.97186, "4595": 5.7722, "4600": 5.96165, "4605": 5.89799, "4610": 5.82583, "4615": 5.9817, "4620": 6.10493, "4625": 5.71283, "4630": 5.83466, "4635": 5.85632, "4640": 5.76916, "4645": 5.82717, "4650": 5.78336, "4655": 5.95854, "4660": 6.04608, "4665": 5.75261, "4670": 6.06464, "4675": 6.22357, "4680": 5.9045, "4685": 5.673, "4690": 5.86325, "4695": 5.70763, "4700": 5.98168, "4705": 5.85174, "4710": 5.96109, "4715": 6.11571, "4720": 5.99887, "4725": 5.93275, "4730": 6.04045, "4735": 5.80809, "4740": 5.94978, "4745": 5.91324, "4750": 5.85717, "4755": 5.65827, "4760": 5.92101, "4765": 5.93362, "4770": 6.09067, "4775": 5.87135, "4780": 6.08819, "4785": 5.8731, "4790": 5.94972, "4795": 5.84614, "4800": 5.64225, "4805": 5.70128, "4810": 5.89968, "4815": 5.9997, "4820": 5.49148, "4825": 5.98043, "4830": 5.94247, "4835": 5.81849, "4840": 6.12999, "4845": 6.01971, "4850": 5.91519, "4855": 6.16703, "4860": 6.06059, "4865": 5.81255, "4870": 5.94657, "4875": 5.96162, "4880": 5.8084, "4885": 6.05684, "4890": 5.7932, "4895": 5.95185, "4900": 5.96264, "4905": 5.88461, "4910": 5.77821, "4915": 5.92909, "4920": 5.9975, "4925": 6.12044, "4930": 5.94594, "4935": 6.02831, "4940": 5.86724, "4945": 5.86159, "4950": 5.78799, "4955": 5.98601, "4960": 5.66078, "4965": 6.06929, "4970": 5.8495, "4975": 6.06051, "4980": 6.09698, "4985": 5.59213, "4990": 5.81326, "4995": 5.91457, "5000": 6.07364, "5005": 5.94825, "5010": 5.95511, "5015": 5.83173, "5020": 6.07714, "5025": 5.91783, "5030": 6.08646, "5035": 5.86503, "5040": 5.89568, "5045": 6.08986, "5050": 5.78338, "5055": 5.95993, "5060": 6.13316, "5065": 5.74849, "5070": 5.8501, "5075": 5.81988, "5080": 5.89304, "5085": 5.87833, "5090": 5.88168, "5095": 6.03774, "5100": 5.67416, "5105": 5.82695, "5110": 5.87161, "5115": 5.91142, "5120": 5.89405, "5125": 6.02768, "5130": 6.11376, "5135": 5.95289, "5140": 5.84183, "5145": 5.67579, "5150": 5.89657, "5155": 5.82823, "5160": 5.93903, "5165": 5.84558, "5170": 6.02827, "5175": 6.09318, "5180": 6.08378, "5185": 6.10424, "5190": 6.16258, "5195": 5.99145, "5200": 6.05603, "5205": 5.86883, "5210": 5.81461, "5215": 5.96669, "5220": 6.03105, "5225": 5.66095, "5230": 6.12338, "5235": 5.72441, "5240": 5.92626, "5245": 6.08622, "5250": 5.75723, "5255": 6.03679, "5260": 5.72606, "5265": 5.90968, "5270": 5.84854, "5275": 5.68685, "5280": 5.75602, "5285": 6.06916, "5290": 6.02692, "5295": 5.94059, "5300": 5.95885, "5305": 6.00005, "5310": 5.67134, "5315": 5.748, "5320": 5.63422, "5325": 5.87397, "5330": 5.66023, "5335": 5.8339, "5340": 5.73971, "5345": 5.78845, "5350": 5.9025, "5355": 5.92206, "5360": 6.07336, "5365": 5.77985, "5370": 5.85376, "5375": 5.6049, "5380": 5.91661, "5385": 6.12394, "5390": 5.82189, "5395": 5.87105, "5400": 5.95236, "5405": 5.68995, "5410": 5.46309, "5415": 6.06397, "5420": 5.9973, "5425": 5.90584, "5430": 5.67683, "5435": 5.977, "5440": 5.87155, "5445": 5.80642, "5450": 6.23539, "5455": 6.06937, "5460": 5.96546, "5465": 5.7541, "5470": 6.10511, "5475": 5.91961, "5480": 5.81743, "5485": 5.80802, "5490": 6.05039, "5495": 5.82617, "5500": 5.93446, "5505": 5.74063, "5510": 5.89965, "5515": 6.04098, "5520": 6.0738, "5525": 5.73201, "5530": 5.97738, "5535": 5.64904, "5540": 5.67747, "5545": 5.69992, "5550": 5.92799, "5555": 5.85669, "5560": 5.83134, "5565": 5.56101, "5570": 5.92158, "5575": 6.0932, "5580": 5.95349, "5585": 6.11276, "5590": 5.98373, "5595": 6.11635, "5600": 6.00274, "5605": 6.05319, "5610": 5.97314, "5615": 5.71298, "5620": 6.24565, "5625": 6.06485, "5630": 6.1334, "5635": 5.88619, "5640": 5.75622, "5645": 6.13513, "5650": 5.876, "5655": 5.69305, "5660": 6.13191, "5665": 5.81365, "5670": 5.73347, "5675": 5.924, "5680": 5.84027, "5685": 5.84329, "5690": 5.75277, "5695": 5.84447, "5700": 5.83425, "5705": 5.87433, "5710": 6.12193, "5715": 5.80536, "5720": 5.68856, "5725": 5.85027, "5730": 5.63776, "5735": 5.81068, "5740": 5.67376, "5745": 5.98577, "5750": 6.02996, "5755": 5.86208, "5760": 6.25156, "5765": 5.71894, "5770": 5.82406, "5775": 5.71575, "5780": 6.12775, "5785": 6.09879, "5790": 6.08178, "5795": 5.93148, "5800": 6.08462, "5805": 5.65564, "5810": 5.55791, "5815": 5.93041, "5820": 5.68461, "5825": 5.84204, "5830": 6.03798, "5835": 6.05667, "5840": 5.86697, "5845": 5.83712, "5850": 5.88625, "5855": 5.96867, "5860": 6.00277, "5865": 5.98086, "5870": 5.63804, "5875": 5.84283, "5880": 5.96442, "5885": 5.97583, "5890": 5.99021, "5895": 5.64368, "5900": 6.02987, "5905": 6.089, "5910": 5.78788, "5915": 5.95501, "5920": 5.71454, "5925": 5.81768, "5930": 5.78305, "5935": 5.9069, "5940": 5.85167, "5945": 5.86709, "5950": 5.90808, "5955": 5.81667, "5960": 5.90225, "5965": 5.79852, "5970": 5.91004, "5975": 5.77538, "5980": 5.99056, "5985": 5.7286, "5990": 5.8351, "5995": 5.97124, "6000": 5.92089, "6005": 6.03179, "6010": 5.88793, "6015": 5.88359, "6020": 6.07171, "6025": 5.89094, "6030": 5.94989, "6035": 5.82931, "6040": 5.82518, "6045": 6.00943, "6050": 5.80604, "6055": 5.95161, "6060": 5.8181, "6065": 6.00115, "6070": 5.75933, "6075": 5.59026, "6080": 5.78149, "6085": 5.87118, "6090": 6.03671, "6095": 5.97082, "6100": 5.57972, "6105": 5.72684, "6110": 6.0764, "6115": 6.0097, "6120": 6.03518, "6125": 5.69707, "6130": 5.95461, "6135": 5.54152, "6140": 5.87637, "6145": 5.75229, "6150": 5.8015, "6155": 5.85735, "6160": 5.78699, "6165": 5.97731, "6170": 5.99949, "6175": 5.98108, "6180": 5.94716, "6185": 5.59701, "6190": 5.87248, "6195": 5.89317, "6200": 5.69626, "6205": 5.45686, "6210": 5.7563, "6215": 5.63617, "6220": 5.88549, "6225": 5.88844, "6230": 5.69624, "6235": 5.86791, "6240": 5.82125, "6245": 5.92873, "6250": 5.96425, "6255": 5.7482, "6260": 5.91595, "6265": 5.79466, "6270": 5.93485, "6275": 5.95373, "6280": 5.7759, "6285": 5.60686, "6290": 5.84971, "6295": 5.71141, "6300": 5.74117, "6305": 5.92574, "6310": 5.61886, "6315": 5.60544, "6320": 5.86826, "6325": 5.93603, "6330": 5.84852, "6335": 5.91784, "6340": 6.0349, "6345": 5.87701, "6350": 5.83306, "6355": 5.81394, "6360": 5.72538, "6365": 6.01883, "6370": 5.77839, "6375": 5.63804, "6380": 5.91633, "6385": 5.82156, "6390": 5.69607, "6395": 6.04616, "6400": 5.77809, "6405": 5.97584, "6410": 5.80263, "6415": 5.97836, "6420": 5.87948, "6425": 5.98962, "6430": 5.79481, "6435": 5.58722, "6440": 5.90316, "6445": 5.9463, "6450": 5.97537, "6455": 5.91002, "6460": 5.972, "6465": 5.75623, "6470": 5.7302, "6475": 5.79067, "6480": 6.09147, "6485": 5.85549, "6490": 5.7152, "6495": 5.79304, "6500": 6.00552, "6505": 5.77009, "6510": 5.78143, "6515": 5.90694, "6520": 6.09193, "6525": 5.81564, "6530": 5.98241, "6535": 5.79532, "6540": 6.03136, "6545": 5.86131, "6550": 5.94544, "6555": 5.66157, "6560": 5.90597, "6565": 5.87107, "6570": 5.8972, "6575": 5.9105, "6580": 5.83622, "6585": 6.03871, "6590": 5.71132, "6595": 6.00204, "6600": 5.90645, "6605": 6.09848, "6610": 6.04391, "6615": 5.71854, "6620": 5.75878, "6625": 5.9494, "6630": 5.70529, "6635": 5.85265, "6640": 5.72095, "6645": 5.66608, "6650": 5.79589, "6655": 6.06189, "6660": 5.82669, "6665": 5.89547, "6670": 6.07109, "6675": 5.74193, "6680": 5.93431, "6685": 5.89928, "6690": 5.79495, "6695": 5.91007, "6700": 5.65257, "6705": 6.01783, "6710": 5.71358, "6715": 5.82411, "6720": 5.98228, "6725": 5.98447, "6730": 5.54247, "6735": 5.82771, "6740": 5.94344, "6745": 5.72387, "6750": 5.65983, "6755": 5.68804, "6760": 5.8599, "6765": 5.94824, "6770": 5.89592, "6775": 5.71643, "6780": 5.75446, "6785": 5.89789, "6790": 5.60939, "6795": 5.84857, "6800": 5.66142, "6805": 5.72656, "6810": 5.83857, "6815": 5.9619, "6820": 5.58571, "6825": 6.01492, "6830": 6.04983, "6835": 5.84127, "6840": 6.04584, "6845": 5.79189, "6850": 5.62788, "6855": 5.83524, "6860": 6.03239, "6865": 5.67083, "6870": 5.79229, "6875": 5.85901, "6880": 6.10503, "6885": 5.96496, "6890": 5.91272, "6895": 5.77253, "6900": 5.90441, "6905": 6.00789, "6910": 5.78713, "6915": 5.90119, "6920": 6.10898, "6925": 5.82724, "6930": 5.98957, "6935": 5.94344, "6940": 5.82049, "6945": 5.63854, "6950": 5.91236, "6955": 5.63333, "6960": 5.81133, "6965": 5.69925, "6970": 5.85376, "6975": 5.85359, "6980": 5.89083, "6985": 5.91882, "6990": 5.73713, "6995": 5.89793, "7000": 5.82103, "7005": 5.90244, "7010": 5.72638, "7015": 5.95834, "7020": 5.64784, "7025": 5.8768, "7030": 5.90527, "7035": 5.91034, "7040": 5.82568, "7045": 5.90077, "7050": 5.82944, "7055": 6.15177, "7060": 5.72798, "7065": 5.52932, "7070": 5.87274, "7075": 5.82944, "7080": 5.81256, "7085": 6.05009, "7090": 5.91977, "7095": 5.70034, "7100": 5.87272, "7105": 5.82099, "7110": 5.79679, "7115": 5.78455, "7120": 5.80729, "7125": 5.78187, "7130": 5.90432, "7135": 5.65231, "7140": 5.95303, "7145": 5.74481, "7150": 5.84274, "7155": 5.70761, "7160": 5.54263, "7165": 5.78246, "7170": 5.92449, "7175": 5.75226, "7180": 5.83466, "7185": 6.01277, "7190": 5.7504, "7195": 6.00854, "7200": 5.36623, "7205": 5.82657, "7210": 5.75722, "7215": 5.70763, "7220": 5.98623, "7225": 5.87521, "7230": 5.83534, "7235": 5.78719, "7240": 5.81129, "7245": 5.75627, "7250": 5.94062, "7255": 5.74688, "7260": 5.90828, "7265": 5.80926, "7270": 5.65987, "7275": 5.61302, "7280": 5.68157, "7285": 6.09783, "7290": 5.78166, "7295": 5.82733, "7300": 6.03172, "7305": 5.80954, "7310": 5.75934, "7315": 5.67646, "7320": 5.82747, "7325": 5.94145, "7330": 5.89161, "7335": 5.81415, "7340": 6.12372, "7345": 5.89585, "7350": 5.89517, "7355": 5.76747, "7360": 5.86277, "7365": 5.97572, "7370": 5.92695, "7375": 5.9998, "7380": 5.62649, "7385": 5.60713, "7390": 5.37832, "7395": 5.84424, "7400": 6.0214, "7405": 5.6846, "7410": 5.53331, "7415": 5.74369, "7420": 5.74541, "7425": 5.71479, "7430": 5.66435, "7435": 5.90399, "7440": 5.78136, "7445": 5.99807, "7450": 5.6349, "7455": 5.73046, "7460": 5.92914, "7465": 5.66417, "7470": 5.90493, "7475": 5.893, "7480": 6.11112, "7485": 6.01242, "7490": 5.78887, "7495": 5.91944, "7500": 5.7222, "7505": 5.46517, "7510": 5.63186, "7515": 5.80544, "7520": 5.56263, "7525": 6.17304, "7530": 5.62589, "7535": 5.87513, "7540": 5.64947, "7545": 5.82008, "7550": 6.10426, "7555": 5.61303, "7560": 5.50947, "7565": 5.72512, "7570": 5.87354, "7575": 5.82948, "7580": 5.85046, "7585": 5.82411, "7590": 5.72754, "7595": 5.91624, "7600": 5.95791, "7605": 5.7625, "7610": 5.86581, "7615": 5.7302, "7620": 5.93153, "7625": 5.5793, "7630": 5.95246, "7635": 5.74829, "7640": 5.63638, "7645": 5.8871, "7650": 5.90615, "7655": 5.87861, "7660": 5.94713, "7665": 5.70738, "7670": 5.96559, "7675": 5.74314, "7680": 5.78888, "7685": 5.58587, "7690": 6.02031, "7695": 5.72888, "7700": 5.88408, "7705": 5.83287, "7710": 5.95595, "7715": 5.99481, "7720": 5.86884, "7725": 5.8895, "7730": 5.81375, "7735": 5.93829, "7740": 5.83309, "7745": 5.77619, "7750": 5.83994, "7755": 5.97219, "7760": 6.1147, "7765": 5.59473, "7770": 5.76657, "7775": 5.74115, "7780": 5.85874, "7785": 5.6001, "7790": 5.884, "7795": 5.94862, "7800": 5.70014, "7805": 5.81943, "7810": 5.41855, "7815": 5.92261, "7820": 5.99822, "7825": 5.73806, "7830": 5.67513, "7835": 5.61757, "7840": 5.78784, "7845": 5.94562, "7850": 5.77884, "7855": 5.80756, "7860": 5.54746, "7865": 5.25609, "7870": 5.94755, "7875": 5.6905, "7880": 5.79617, "7885": 5.68094, "7890": 5.7579, "7895": 5.58516, "7900": 5.95009, "7905": 5.7974, "7910": 5.83403, "7915": 5.62173, "7920": 5.65485, "7925": 5.67705, "7930": 5.90316, "7935": 5.89119, "7940": 5.64913, "7945": 5.76924, "7950": 6.01473, "7955": 5.71644, "7960": 5.77164, "7965": 5.72695, "7970": 5.74712, "7975": 5.90801, "7980": 5.44652, "7985": 5.91901, "7990": 5.69615, "7995": 5.6298, "8000": 5.72284, "8005": 5.86332, "8010": 5.48412, "8015": 5.8511, "8020": 5.80825, "8025": 5.40491, "8030": 5.74312, "8035": 5.64714, "8040": 5.72874, "8045": 5.56588, "8050": 5.79171, "8055": 5.92568, "8060": 5.69134, "8065": 5.86486, "8070": 5.82576, "8075": 5.84657, "8080": 5.64556, "8085": 5.76015, "8090": 5.69718, "8095": 5.57212, "8100": 5.82403, "8105": 5.71188, "8110": 5.59603, "8115": 5.60957, "8120": 5.80626, "8125": 5.516, "8130": 5.93733, "8135": 5.6502, "8140": 5.8428, "8145": 5.80404, "8150": 6.05433, "8155": 5.9796, "8160": 5.73751, "8165": 5.66863, "8170": 5.83506, "8175": 6.02733, "8180": 5.68482, "8185": 5.63858, "8190": 5.71248, "8195": 5.62369, "8200": 5.75792, "8205": 5.5708, "8210": 5.75834, "8215": 5.54481, "8220": 5.67746, "8225": 5.73509, "8230": 5.90221, "8235": 5.63434, "8240": 5.60042, "8245": 5.55364, "8250": 5.87727, "8255": 5.77999, "8260": 5.7375, "8265": 5.69489, "8270": 5.80426, "8275": 5.6306, "8280": 5.81055, "8285": 5.53062, "8290": 5.67358, "8295": 5.81196, "8300": 5.73593, "8305": 5.69522, "8310": 5.84368, "8315": 5.48173, "8320": 5.73258, "8325": 5.9263, "8330": 5.80573, "8335": 5.83229, "8340": 5.67704, "8345": 5.79635, "8350": 5.50639, "8355": 5.82299, "8360": 5.77546, "8365": 5.66248, "8370": 5.76869, "8375": 5.69422, "8380": 5.85131, "8385": 5.78815, "8390": 5.60328, "8395": 5.67423, "8400": 5.81522, "8405": 5.68575, "8410": 5.71193, "8415": 5.84812, "8420": 5.82082, "8425": 5.81672, "8430": 5.43725, "8435": 5.49064, "8440": 5.73602, "8445": 5.40615, "8450": 5.83461, "8455": 5.6964, "8460": 5.71148, "8465": 5.41237, "8470": 5.91572, "8475": 5.74038, "8480": 5.44003, "8485": 5.86751, "8490": 5.75691, "8495": 5.50735, "8500": 5.85486, "8505": 5.46488, "8510": 5.94808, "8515": 5.609, "8520": 5.76642, "8525": 5.38807, "8530": 5.65796, "8535": 5.92774, "8540": 5.55606, "8545": 5.79748, "8550": 5.61649, "8555": 5.75795, "8560": 5.81342, "8565": 5.83405, "8570": 5.38373, "8575": 5.71012, "8580": 5.66105, "8585": 5.4942, "8590": 5.68779, "8595": 5.56707, "8600": 5.82228, "8605": 5.74586, "8610": 5.70183, "8615": 5.86348, "8620": 5.45154, "8625": 5.6085, "8630": 5.79859, "8635": 5.49512, "8640": 5.64539, "8645": 5.745, "8650": 5.70007, "8655": 5.59801, "8660": 5.6, "8665": 5.74373, "8670": 5.40236, "8675": 5.54804, "8680": 5.79269, "8685": 5.81167, "8690": 5.69646, "8695": 5.84499, "8700": 5.66007, "8705": 5.70066, "8710": 5.70913, "8715": 5.60484, "8720": 5.64372, "8725": 5.75667, "8730": 5.7823, "8735": 5.84955, "8740": 5.74703, "8745": 5.61787, "8750": 5.94007, "8755": 5.61608, "8760": 5.46402, "8765": 5.59276, "8770": 6.02898, "8775": 5.51498, "8780": 5.9845, "8785": 5.70372, "8790": 5.82905, "8795": 5.81061, "8800": 5.71435, "8805": 5.83535, "8810": 5.80174, "8815": 5.61371, "8820": 5.66594, "8825": 5.46885, "8830": 5.75579, "8835": 5.74373, "8840": 5.49764, "8845": 5.60756, "8850": 5.73135, "8855": 5.44014, "8860": 5.51977, "8865": 5.62161, "8870": 5.54434, "8875": 5.71215, "8880": 5.63476, "8885": 5.69293, "8890": 5.7104, "8895": 5.62832, "8900": 5.43094, "8905": 5.70281, "8910": 5.70314, "8915": 5.91589, "8920": 5.16861, "8925": 5.67876, "8930": 5.6559, "8935": 5.07527, "8940": 5.86749, "8945": 5.78208, "8950": 5.65786, "8955": 5.74818, "8960": 5.25266, "8965": 5.9926, "8970": 5.60364, "8975": 5.39638, "8980": 5.721, "8985": 5.67142, "8990": 5.8837, "8995": 5.45353, "9000": 5.50147, "9005": 5.49486, "9010": 5.75017, "9015": 5.63327, "9020": 5.75668, "9025": 5.67242, "9030": 5.4956, "9035": 5.80081, "9040": 5.90551, "9045": 5.70986, "9050": 5.76715, "9055": 5.72592, "9060": 5.73708, "9065": 5.5525, "9070": 5.65479, "9075": 5.66817, "9080": 5.65021, "9085": 5.34984, "9090": 5.66204, "9095": 5.40914, "9100": 5.55722, "9105": 5.76811, "9110": 5.78093, "9115": 5.59328, "9120": 5.66293, "9125": 5.57456, "9130": 5.5401, "9135": 5.73938, "9140": 5.86389, "9145": 5.66871, "9150": 5.82171, "9155": 5.56699, "9160": 5.41264, "9165": 5.57659, "9170": 5.64255, "9175": 5.73597, "9180": 5.43397, "9185": 5.63406, "9190": 5.6363, "9195": 5.6396, "9200": 5.58592, "9205": 5.75986, "9210": 5.72434, "9215": 5.68457, "9220": 5.48039, "9225": 5.67313, "9230": 5.86744, "9235": 5.65768, "9240": 5.51386, "9245": 5.79919, "9250": 5.73313, "9255": 5.55845, "9260": 5.35994, "9265": 5.75772, "9270": 5.66636, "9275": 5.53637, "9280": 5.36932, "9285": 5.79772, "9290": 5.61365, "9295": 5.90657, "9300": 5.70808, "9305": 5.7012, "9310": 5.33009, "9315": 5.62003, "9320": 5.58207, "9325": 5.49885, "9330": 5.54335, "9335": 5.82413, "9340": 5.50917, "9345": 5.77417, "9350": 5.48905, "9355": 5.42847, "9360": 5.51044, "9365": 5.29568, "9370": 5.35347, "9375": 5.65943, "9380": 5.59457, "9385": 5.55737, "9390": 5.63691, "9395": 5.57395, "9400": 5.58915, "9405": 5.29538, "9410": 5.4566, "9415": 5.48807, "9420": 5.56795, "9425": 5.75716, "9430": 5.37538, "9435": 5.09477, "9440": 5.59755, "9445": 5.63519, "9450": 5.5206, "9455": 5.44631, "9460": 5.62906, "9465": 5.48316, "9470": 5.68383, "9475": 5.6122, "9480": 5.33545, "9485": 5.65824, "9490": 5.72671, "9495": 5.63357, "9500": 5.63213, "9505": 5.84218, "9510": 5.61837, "9515": 5.42445, "9520": 5.50514, "9525": 5.8456, "9530": 5.70762, "9535": 5.51983, "9540": 5.42291, "9545": 5.48033, "9550": 5.41643, "9555": 5.70902, "9560": 5.55505, "9565": 5.46654, "9570": 5.54315, "9575": 5.64194, "9580": 5.48847, "9585": 5.33709, "9590": 5.60695, "9595": 5.4822, "9600": 5.58624, "9605": 5.37234, "9610": 5.31541, "9615": 5.09623, "9620": 5.54282, "9625": 5.49912, "9630": 5.71081, "9635": 5.52619, "9640": 5.62244, "9645": 5.41501, "9650": 5.75509, "9655": 5.62307, "9660": 5.3647, "9665": 5.30849, "9670": 5.72415, "9675": 5.23017, "9680": 5.5966, "9685": 5.77998, "9690": 5.51237, "9695": 5.48578, "9700": 5.37464, "9705": 5.61373, "9710": 5.68007, "9715": 5.54804, "9720": 5.65807, "9725": 5.3914, "9730": 5.46214, "9735": 5.40781, "9740": 5.72982, "9745": 5.48765, "9750": 5.33607, "9755": 5.42646, "9760": 5.58826, "9765": 5.62524, "9770": 5.19389, "9775": 5.58636, "9780": 5.57133, "9785": 5.33364, "9790": 5.45077, "9795": 5.58703, "9800": 5.50592, "9805": 5.47202, "9810": 5.23286, "9815": 5.49554, "9820": 5.43161, "9825": 5.60899, "9830": 5.36394, "9835": 5.50149, "9840": 5.40274, "9845": 5.74392, "9850": 5.35515, "9855": 5.6459, "9860": 5.44274, "9865": 5.31234, "9870": 5.57249, "9875": 5.5532, "9880": 5.00493, "9885": 5.5462, "9890": 5.65539, "9895": 5.37119, "9900": 5.51365, "9905": 5.33651, "9910": 5.50684, "9915": 5.24271, "9920": 5.66817, "9925": 5.54969, "9930": 5.4118, "9935": 5.54992, "9940": 5.37945, "9945": 5.50179, "9950": 5.5705, "9955": 5.28958, "9960": 5.69402, "9965": 5.57838, "9970": 5.49323, "9975": 5.50238, "9980": 5.62863, "9985": 5.5802, "9990": 5.76479, "9995": 5.37461, "10000": 5.37217, "10005": 5.36176, "10010": 5.54772, "10015": 5.73754, "10020": 5.7272, "10025": 5.60862, "10030": 5.0291, "10035": 5.60087, "10040": 5.441, "10045": 5.77326, "10050": 5.69889, "10055": 5.69791, "10060": 5.36611, "10065": 5.4376, "10070": 5.39917, "10075": 5.34005, "10080": 5.51841, "10085": 5.34771, "10090": 5.6845, "10095": 5.46646, "10100": 5.39041, "10105": 5.40078, "10110": 5.65632, "10115": 5.60608, "10120": 5.02873, "10125": 5.30725, "10130": 5.37288, "10135": 5.5169, "10140": 5.61937, "10145": 5.3742, "10150": 5.34816, "10155": 5.47273, "10160": 5.38916, "10165": 5.58045, "10170": 5.45593, "10175": 5.42438, "10180": 5.2709, "10185": 5.39061, "10190": 5.23709, "10195": 5.39072, "10200": 5.34009, "10205": 5.37003, "10210": 5.57952, "10215": 5.35967, "10220": 5.59501, "10225": 5.41379, "10230": 5.4957, "10235": 5.42919, "10240": 5.52993, "10245": 5.46694, "10250": 5.13013, "10255": 5.08857, "10260": 5.48043, "10265": 5.50391, "10270": 5.4464, "10275": 5.23887, "10280": 5.38853, "10285": 5.2312, "10290": 5.21898, "10295": 5.53407, "10300": 5.41463, "10305": 5.33047, "10310": 5.43449, "10315": 5.21006, "10320": 5.14259, "10325": 5.3647, "10330": 5.41698, "10335": 5.35589, "10340": 5.35679, "10345": 5.26156, "10350": 5.58519, "10355": 5.2316, "10360": 5.39321, "10365": 5.20598, "10370": 5.48835, "10375": 5.55115, "10380": 5.52903, "10385": 5.61277, "10390": 5.38487, "10395": 5.46827, "10400": 5.44774, "10405": 5.4081, "10410": 5.70945, "10415": 5.35008, "10420": 5.4518, "10425": 5.55035, "10430": 5.34628, "10435": 5.25202, "10440": 5.27857, "10445": 5.39387, "10450": 5.59736, "10455": 5.3822, "10460": 5.61593, "10465": 5.52211, "10470": 5.2085, "10475": 5.39076, "10480": 5.43431, "10485": 5.37379, "10490": 5.11247, "10495": 5.17833, "10500": 5.44333, "10505": 5.61455, "10510": 5.37368, "10515": 5.4714, "10520": 5.43458, "10525": 5.3523, "10530": 5.48889, "10535": 5.54636, "10540": 5.33585, "10545": 5.60668, "10550": 5.22712, "10555": 5.57587, "10560": 5.4946, "10565": 5.0012, "10570": 5.4399, "10575": 5.3784, "10580": 5.43564, "10585": 5.4542, "10590": 5.57402, "10595": 5.34212, "10600": 5.28003, "10605": 5.31644, "10610": 5.41609, "10615": 5.35182, "10620": 5.25325, "10625": 5.66255, "10630": 5.35798, "10635": 5.43567, "10640": 5.20932, "10645": 5.35234, "10650": 5.52953, "10655": 5.43858, "10660": 5.45034, "10665": 5.39874, "10670": 5.356, "10675": 5.39505, "10680": 5.26005, "10685": 5.30895, "10690": 5.67238, "10695": 5.3382, "10700": 5.61352, "10705": 5.36639, "10710": 5.2546, "10715": 4.7648, "10720": 5.36798, "10725": 5.08581, "10730": 5.38774, "10735": 5.2315, "10740": 5.50009, "10745": 5.31682, "10750": 4.95337, "10755": 5.46252, "10760": 5.3463, "10765": 5.39233, "10770": 5.26877, "10775": 5.36279, "10780": 5.40003, "10785": 5.13531, "10790": 5.09205, "10795": 5.41714, "10800": 5.24182, "10805": 5.37759, "10810": 5.1841, "10815": 5.20587, "10820": 5.45114, "10825": 5.54565, "10830": 5.24614, "10835": 5.36473, "10840": 5.19786, "10845": 5.18242, "10850": 5.5072, "10855": 5.30986, "10860": 5.40324, "10865": 5.52718, "10870": 5.68078, "10875": 5.53167, "10880": 5.12654, "10885": 5.38662, "10890": 5.39428, "10895": 5.24526, "10900": 5.30257, "10905": 5.38606, "10910": 5.46742, "10915": 5.36187, "10920": 5.4652, "10925": 5.31216, "10930": 5.27235, "10935": 5.52601, "10940": 5.4506, "10945": 5.45866, "10950": 5.04867, "10955": 5.18984, "10960": 5.27339, "10965": 5.26806, "10970": 5.1956, "10975": 5.28983, "10980": 4.98698, "10985": 5.25205, "10990": 5.28065, "10995": 5.32792, "11000": 5.06033, "11005": 5.24549, "11010": 5.37188, "11015": 5.19974, "11020": 5.24522, "11025": 5.08564, "11030": 5.3484, "11035": 5.22237, "11040": 5.34708, "11045": 5.07465, "11050": 5.27395, "11055": 5.5143, "11060": 5.3757, "11065": 5.27853, "11070": 5.26735, "11075": 5.35381, "11080": 5.13287, "11085": 5.22699, "11090": 5.28493, "11095": 5.19245, "11100": 5.34048, "11105": 5.28978, "11110": 5.24824, "11115": 5.31027, "11120": 4.82308, "11125": 5.3662, "11130": 5.54704, "11135": 5.19661, "11140": 5.17157, "11145": 5.3315, "11150": 5.59279, "11155": 5.27752, "11160": 5.03615, "11165": 5.30773, "11170": 5.28313, "11175": 5.21376, "11180": 5.01974, "11185": 5.11332, "11190": 5.4275, "11195": 5.08001, "11200": 5.05585, "11205": 5.27463, "11210": 5.24983, "11215": 5.51707, "11220": 5.10348, "11225": 5.34594, "11230": 5.19582, "11235": 5.23772, "11240": 5.1086, "11245": 5.47357, "11250": 5.38475, "11255": 5.17473, "11260": 5.3059, "11265": 4.97653, "11270": 5.26496, "11275": 5.265, "11280": 4.87211, "11285": 5.3036, "11290": 5.06537, "11295": 5.1824, "11300": 5.01714, "11305": 5.4564, "11310": 5.35135, "11315": 5.30915, "11320": 5.43378, "11325": 5.27055, "11330": 5.24777, "11335": 5.2721, "11340": 5.46113, "11345": 5.04206, "11350": 5.37593, "11355": 5.41239, "11360": 5.60221, "11365": 5.38167, "11370": 5.4196, "11375": 5.30141, "11380": 5.37361, "11385": 5.20055, "11390": 5.18606, "11395": 5.28461, "11400": 4.95284, "11405": 5.07589, "11410": 5.26203, "11415": 5.48228, "11420": 5.00538, "11425": 5.14265, "11430": 5.23302, "11435": 5.02339, "11440": 5.24131, "11445": 5.2604, "11450": 5.3917, "11455": 5.3178, "11460": 5.1791, "11465": 5.45179, "11470": 5.23893, "11475": 5.3124, "11480": 5.09538, "11485": 5.27877, "11490": 5.23733, "11495": 5.5879, "11500": 5.28776, "11505": 5.11349, "11510": 5.17614, "11515": 5.09869, "11520": 5.22095, "11525": 5.2381, "11530": 5.07422, "11535": 5.30826, "11540": 5.13735, "11545": 5.16093, "11550": 5.14439, "11555": 5.36028, "11560": 5.27581, "11565": 5.08411, "11570": 5.09748, "11575": 5.13256, "11580": 5.5915, "11585": 5.27239, "11590": 5.0515, "11595": 5.28831, "11600": 5.22951, "11605": 5.29821, "11610": 5.1794, "11615": 5.27238, "11620": 5.07543, "11625": 5.2532, "11630": 5.16624, "11635": 5.48732, "11640": 4.97976, "11645": 5.09481, "11650": 5.07, "11655": 5.4163, "11660": 5.14729, "11665": 5.22332, "11670": 5.29066, "11675": 5.01149, "11680": 5.24745, "11685": 5.3955, "11690": 5.2877, "11695": 5.31718, "11700": 5.27337, "11705": 5.17844, "11710": 5.0163, "11715": 5.01964, "11720": 5.24626, "11725": 5.18448, "11730": 5.16792, "11735": 5.21466, "11740": 5.22501, "11745": 5.16252, "11750": 5.16248, "11755": 5.16407, "11760": 5.2844, "11765": 5.28437, "11770": 5.04897, "11775": 5.53847, "11780": 5.28538, "11785": 5.53002, "11790": 5.2765, "11795": 5.32103, "11800": 5.13019, "11805": 5.51958, "11810": 5.1034, "11815": 5.38569, "11820": 5.02823, "11825": 4.87016, "11830": 5.12661, "11835": 5.18867, "11840": 5.19562, "11845": 5.17948, "11850": 5.30315, "11855": 5.01859, "11860": 5.42594, "11865": 5.214, "11870": 5.4507, "11875": 5.04925, "11880": 5.28351, "11885": 4.99344, "11890": 5.27355, "11895": 4.94409, "11900": 5.33673, "11905": 5.16021, "11910": 5.36325, "11915": 5.11683, "11920": 5.27474, "11925": 5.14645, "11930": 5.14292, "11935": 5.10027, "11940": 4.93739, "11945": 5.37039, "11950": 5.11549, "11955": 5.03135, "11960": 4.90919, "11965": 5.11583, "11970": 5.20048, "11975": 4.99678, "11980": 5.04864, "11985": 5.16687, "11990": 5.12482, "11995": 5.12138, "12000": 5.08971, "12005": 5.04863, "12010": 5.23243, "12015": 4.9521, "12020": 5.26004, "12025": 5.12761, "12030": 4.93221, "12035": 4.90509, "12040": 5.11885, "12045": 5.35792, "12050": 5.33254, "12055": 4.95158, "12060": 5.24892, "12065": 5.25555, "12070": 5.161, "12075": 5.27294, "12080": 5.24702, "12085": 5.09961, "12090": 5.0043, "12095": 5.48174, "12100": 4.97703, "12105": 5.10722, "12110": 5.18597, "12115": 4.94918, "12120": 4.7691, "12125": 5.01242, "12130": 5.23206, "12135": 5.15475, "12140": 5.28888, "12145": 5.06497, "12150": 4.85407, "12155": 5.08306, "12160": 5.1481, "12165": 5.23872, "12170": 5.19311, "12175": 5.23483, "12180": 5.36383, "12185": 5.20117, "12190": 4.82771, "12195": 5.02846, "12200": 5.1913, "12205": 5.09558, "12210": 5.0232, "12215": 5.07948, "12220": 5.20316, "12225": 5.17347, "12230": 4.99102, "12235": 5.31941, "12240": 5.10751, "12245": 5.1069, "12250": 5.22126, "12255": 4.96663, "12260": 4.85554, "12265": 4.87446, "12270": 4.98725, "12275": 5.14349, "12280": 4.882, "12285": 4.92647, "12290": 5.17059, "12295": 5.25714, "12300": 5.48431, "12305": 5.1721, "12310": 5.27149, "12315": 5.03535, "12320": 5.00285, "12325": 5.28719, "12330": 5.2087, "12335": 5.37795, "12340": 4.96675, "12345": 5.3169, "12350": 4.93664, "12355": 5.1021, "12360": 5.01402, "12365": 4.77975, "12370": 5.10945, "12375": 4.87665, "12380": 5.21343, "12385": 5.22589, "12390": 5.09046, "12395": 5.15768, "12400": 5.13948, "12405": 5.32145, "12410": 5.16139, "12415": 5.30719, "12420": 5.39248, "12425": 5.24358, "12430": 4.92588, "12435": 5.02191, "12440": 4.98595, "12445": 5.24576, "12450": 5.08259, "12455": 5.068, "12460": 4.80591, "12465": 4.96585, "12470": 5.43363, "12475": 5.09508, "12480": 4.94249, "12485": 5.25178, "12490": 5.03435, "12495": 5.36914, "12500": 5.47981, "12505": 5.29396, "12510": 4.79058, "12515": 5.0575, "12520": 5.14624, "12525": 5.11066, "12530": 4.89231, "12535": 5.31974, "12540": 4.99776, "12545": 4.93234, "12550": 5.42742, "12555": 5.0198, "12560": 4.95947, "12565": 5.30194, "12570": 5.011, "12575": 5.15742, "12580": 4.9487, "12585": 5.24504, "12590": 4.81696, "12595": 5.20341, "12600": 5.21134, "12605": 5.10257, "12610": 5.21869, "12615": 5.14651, "12620": 5.32386, "12625": 5.02724, "12630": 5.11801, "12635": 5.30002, "12640": 4.92816, "12645": 5.29811, "12650": 4.97494, "12655": 5.0749, "12660": 5.13519, "12665": 5.07599, "12670": 5.01249, "12675": 5.32137, "12680": 4.90018, "12685": 4.95909, "12690": 5.22862, "12695": 4.88605, "12700": 5.00287, "12705": 5.13113, "12710": 4.94577, "12715": 4.98293, "12720": 4.9516, "12725": 5.04837, "12730": 4.94009, "12735": 4.8895, "12740": 5.11633, "12745": 4.78201, "12750": 4.80699, "12755": 5.05351, "12760": 4.73109, "12765": 5.18477, "12770": 5.01334, "12775": 5.05904, "12780": 5.25674, "12785": 5.08532, "12790": 5.03848, "12795": 5.07792, "12800": 5.26545, "12805": 4.7628, "12810": 5.0439, "12815": 4.87974, "12820": 4.89723, "12825": 5.11194, "12830": 4.99866, "12835": 5.2359, "12840": 5.049, "12845": 5.09335, "12850": 4.74596, "12855": 5.05245, "12860": 5.0862, "12865": 5.10011, "12870": 4.93434, "12875": 5.19139, "12880": 5.00966, "12885": 5.10951, "12890": 5.40403, "12895": 5.06375, "12900": 4.86895, "12905": 5.12256, "12910": 4.89725, "12915": 4.9582, "12920": 5.22713, "12925": 5.02956, "12930": 5.04264, "12935": 4.97126, "12940": 5.28037, "12945": 4.75445, "12950": 5.21552, "12955": 4.77142, "12960": 5.04675, "12965": 5.0651, "12970": 4.74546, "12975": 5.05552, "12980": 4.95216, "12985": 4.91902, "12990": 4.87467, "12995": 5.09789, "13000": 5.05835, "13005": 5.07946, "13010": 4.7969, "13015": 4.97847, "13020": 5.01629, "13025": 5.07514, "13030": 5.07913, "13035": 4.91507, "13040": 5.06975, "13045": 5.06224, "13050": 5.03068, "13055": 4.9495, "13060": 5.23862, "13065": 5.28355, "13070": 5.14095, "13075": 5.22139, "13080": 4.97788, "13085": 5.24489, "13090": 5.06377, "13095": 5.12901, "13100": 5.01727, "13105": 5.12282, "13110": 5.02922, "13115": 4.85394, "13120": 5.09122, "13125": 4.96556, "13130": 5.12758, "13135": 4.98896, "13140": 4.86273, "13145": 5.41116, "13150": 5.15503, "13155": 5.26178, "13160": 4.88213, "13165": 4.9631, "13170": 5.2502, "13175": 5.05872, "13180": 4.64755, "13185": 5.27215, "13190": 4.96128, "13195": 5.25958, "13200": 4.85133, "13205": 5.31655, "13210": 4.77255, "13215": 5.14284, "13220": 4.82247, "13225": 5.19207, "13230": 5.00187, "13235": 5.20956, "13240": 5.00193, "13245": 5.26294, "13250": 4.83095, "13255": 4.68528, "13260": 5.00094, "13265": 5.18161, "13270": 5.10141, "13275": 5.07983, "13280": 4.78838, "13285": 5.3192, "13290": 5.12347, "13295": 5.19099, "13300": 5.11485, "13305": 4.8425, "13310": 4.8633, "13315": 5.11963, "13320": 5.12848, "13325": 4.96432, "13330": 4.91523, "13335": 5.05602, "13340": 4.91728, "13345": 4.73624, "13350": 5.09215, "13355": 4.9043, "13360": 4.95717, "13365": 4.93766, "13370": 4.93088, "13375": 5.02886, "13380": 5.07135, "13385": 4.8992, "13390": 5.25579, "13395": 5.11737, "13400": 4.84342, "13405": 4.94273, "13410": 4.77126, "13415": 4.85958, "13420": 4.98369, "13425": 4.9368, "13430": 4.97047, "13435": 4.98269, "13440": 4.8267, "13445": 4.82577, "13450": 5.0193, "13455": 4.81057, "13460": 4.99756, "13465": 4.97396, "13470": 5.0439, "13475": 4.95065, "13480": 5.07169, "13485": 4.96041, "13490": 5.24316, "13495": 5.12133, "13500": 4.77398, "13505": 4.95219, "13510": 5.20853, "13515": 4.94971, "13520": 5.11479, "13525": 4.92216, "13530": 4.79978, "13535": 4.82208, "13540": 5.08482, "13545": 4.83079, "13550": 5.0817, "13555": 5.05558, "13560": 5.23019, "13565": 5.10773, "13570": 4.94405, "13575": 4.98796, "13580": 4.67198, "13585": 5.37584, "13590": 5.09156, "13595": 4.98963, "13600": 4.9818, "13605": 4.9783, "13610": 5.12268, "13615": 4.82499, "13620": 4.96206, "13625": 5.17516, "13630": 4.76413, "13635": 4.74155, "13640": 5.00152, "13645": 4.9424, "13650": 4.77218, "13655": 4.72743, "13660": 4.97736, "13665": 5.05784, "13670": 4.84851, "13675": 5.19533, "13680": 5.26388, "13685": 4.96761, "13690": 4.94108, "13695": 5.07516, "13700": 4.92774, "13705": 5.00406, "13710": 4.95611, "13715": 4.85472, "13720": 4.85383, "13725": 4.87021, "13730": 4.84225, "13735": 4.98071, "13740": 5.17429, "13745": 5.05733, "13750": 4.89094, "13755": 4.74427, "13760": 4.68823, "13765": 5.02159, "13770": 5.08746, "13775": 4.83846, "13780": 4.7199, "13785": 5.05337, "13790": 4.78727, "13795": 4.78073, "13800": 4.89405, "13805": 4.84388, "13810": 4.96631, "13815": 4.77642, "13820": 4.79512, "13825": 4.96673, "13830": 5.10604, "13835": 4.92983, "13840": 5.18486, "13845": 4.92708, "13850": 4.81692, "13855": 4.84876, "13860": 5.03127, "13865": 4.80171, "13870": 5.14411, "13875": 4.83777, "13880": 4.9312, "13885": 4.98524, "13890": 4.90116, "13895": 4.81853, "13900": 4.58189, "13905": 4.94366, "13910": 4.9113, "13915": 5.04097, "13920": 4.75519, "13925": 5.17589, "13930": 4.63194, "13935": 5.07544, "13940": 5.21085, "13945": 4.96179, "13950": 5.03088, "13955": 4.68391, "13960": 4.93965, "13965": 5.09069, "13970": 5.08755, "13975": 4.92924, "13980": 4.90532, "13985": 4.91967, "13990": 4.97455, "13995": 4.92017, "14000": 5.06041, "14005": 4.90539, "14010": 4.88075, "14015": 4.79011, "14020": 5.06071, "14025": 4.91145, "14030": 4.96213, "14035": 4.78011, "14040": 4.735, "14045": 4.8533, "14050": 4.88, "14055": 5.07218, "14060": 4.9295, "14065": 5.07337, "14070": 4.80278, "14075": 5.03373, "14080": 4.69606, "14085": 5.08593, "14090": 5.07326, "14095": 4.84672, "14100": 4.95517, "14105": 4.87061, "14110": 4.78297, "14115": 5.07097, "14120": 5.01941, "14125": 4.49577, "14130": 5.03963, "14135": 4.84274, "14140": 5.06925, "14145": 4.66266, "14150": 4.85154, "14155": 4.85915, "14160": 4.77968, "14165": 4.95106, "14170": 4.97778, "14175": 4.84399, "14180": 4.80436, "14185": 4.62534, "14190": 5.05832, "14195": 5.00631, "14200": 4.54448, "14205": 5.09607, "14210": 5.00415, "14215": 4.92459, "14220": 4.71498, "14225": 5.27886, "14230": 4.92925, "14235": 4.74359, "14240": 4.85047, "14245": 4.92229, "14250": 5.19985, "14255": 5.22076, "14260": 4.82777, "14265": 4.96885, "14270": 5.24391, "14275": 5.01143, "14280": 4.96848, "14285": 4.67474, "14290": 5.16947, "14295": 5.00669, "14300": 4.69678, "14305": 4.87495, "14310": 5.19659, "14315": 4.86804, "14320": 5.11845, "14325": 5.10999, "14330": 4.60944, "14335": 5.04777, "14340": 4.68584, "14345": 4.89273, "14350": 4.97276, "14355": 4.68719, "14360": 4.87797, "14365": 4.82528, "14370": 4.89646, "14375": 4.94693, "14380": 4.79239, "14385": 5.1627, "14390": 4.91135, "14395": 4.67264, "14400": 4.85019, "14405": 4.94017, "14410": 4.80152, "14415": 5.07025, "14420": 5.18188, "14425": 4.96092, "14430": 4.96924, "14435": 5.17383, "14440": 4.5748, "14445": 4.61065, "14450": 5.03842, "14455": 4.55462, "14460": 4.84223, "14465": 4.96105, "14470": 4.91723, "14475": 4.82672, "14480": 4.89362, "14485": 4.89321, "14490": 4.99969, "14495": 4.79335, "14500": 4.91907, "14505": 5.15574, "14510": 4.59256, "14515": 4.90456, "14520": 4.90639, "14525": 5.0542, "14530": 4.56362, "14535": 5.22447, "14540": 4.95248, "14545": 4.8606, "14550": 4.86175, "14555": 4.74854, "14560": 4.60689, "14565": 4.78215, "14570": 4.96199, "14575": 5.07844, "14580": 4.9694, "14585": 4.75212, "14590": 4.79211, "14595": 4.94406, "14600": 4.76458, "14605": 4.75717, "14610": 4.79664, "14615": 5.04495, "14620": 4.7861, "14625": 4.80258, "14630": 5.0808, "14635": 4.81526, "14640": 4.76814, "14645": 5.06811, "14650": 4.95967, "14655": 4.82524, "14660": 5.02712, "14665": 4.93405, "14670": 4.44961, "14675": 5.00453, "14680": 4.66128, "14685": 4.63629, "14690": 4.65834, "14695": 4.70234, "14700": 4.97001, "14705": 4.84886, "14710": 4.77302, "14715": 5.06561, "14720": 4.73985, "14725": 4.82684, "14730": 4.597, "14735": 4.66732, "14740": 4.95551, "14745": 4.91261, "14750": 4.98104, "14755": 4.91475, "14760": 5.13584, "14765": 4.98991, "14770": 4.94256, "14775": 4.49032, "14780": 4.78737, "14785": 4.83908, "14790": 4.72766, "14795": 4.79291, "14800": 4.69019, "14805": 4.64297, "14810": 4.92113, "14815": 4.58727, "14820": 4.57895, "14825": 4.63825, "14830": 5.11037, "14835": 4.49824, "14840": 5.0666, "14845": 5.14354, "14850": 4.49585, "14855": 4.47509, "14860": 5.02477, "14865": 4.73656, "14870": 5.03937, "14875": 4.78677, "14880": 4.97573, "14885": 4.94523, "14890": 4.7547, "14895": 4.711, "14900": 4.80398, "14905": 4.66544, "14910": 5.11436, "14915": 4.8928, "14920": 5.02061, "14925": 4.69705, "14930": 4.85289, "14935": 5.09179, "14940": 4.66701, "14945": 4.91429, "14950": 4.96155, "14955": 4.88416, "14960": 4.81647, "14965": 4.82959, "14970": 4.72983, "14975": 4.8859, "14980": 4.7443, "14985": 5.01679, "14990": 4.62507, "14995": 5.1501, "15000": 4.61726, "15005": 4.46448, "15010": 4.77228, "15015": 4.78616, "15020": 4.86662, "15025": 4.91126, "15030": 4.82187, "15035": 4.61415, "15040": 4.67773, "15045": 4.7885, "15050": 4.90907, "15055": 4.46094, "15060": 5.03479, "15065": 4.67986, "15070": 4.71613, "15075": 4.80371, "15080": 4.69624, "15085": 4.76377, "15090": 5.10905, "15095": 4.80407, "15100": 4.83957, "15105": 4.78592, "15110": 4.66904, "15115": 4.96919, "15120": 4.75555, "15125": 4.94584, "15130": 4.7302, "15135": 4.81139, "15140": 4.84251, "15145": 4.84664, "15150": 5.04692, "15155": 4.79099, "15160": 4.63616, "15165": 4.67299, "15170": 4.64371, "15175": 4.33728, "15180": 4.93077, "15185": 4.76129, "15190": 4.81275, "15195": 4.732, "15200": 5.00335, "15205": 4.68967, "15210": 5.00343, "15215": 4.99915, "15220": 4.91845, "15225": 4.88304, "15230": 4.56211, "15235": 4.90379, "15240": 4.70754, "15245": 4.79589, "15250": 4.57129, "15255": 5.02666, "15260": 4.63088, "15265": 4.8357, "15270": 4.65458, "15275": 5.09927, "15280": 4.84823, "15285": 4.71025, "15290": 4.88109, "15295": 5.11724, "15300": 4.62948, "15305": 4.87938, "15310": 4.76728, "15315": 4.64962, "15320": 4.91794, "15325": 5.11232, "15330": 4.74389, "15335": 5.06915, "15340": 4.76123, "15345": 4.73613, "15350": 4.57708, "15355": 4.86128, "15360": 4.61814, "15365": 4.91632, "15370": 4.86305, "15375": 4.91495, "15380": 4.56651, "15385": 4.66439, "15390": 4.88144, "15395": 4.53516, "15400": 4.82358, "15405": 4.51013, "15410": 4.59654, "15415": 4.7378, "15420": 4.98025, "15425": 4.78413, "15430": 4.6208, "15435": 4.64564, "15440": 4.88049, "15445": 4.83509, "15450": 4.60014, "15455": 5.06714, "15460": 4.63317, "15465": 4.86895, "15470": 4.67476, "15475": 4.69375, "15480": 4.44911, "15485": 4.82973, "15490": 4.87481, "15495": 4.73048, "15500": 4.73209, "15505": 4.70153, "15510": 4.80897, "15515": 4.91657, "15520": 4.74986, "15525": 4.86181, "15530": 4.49376, "15535": 4.89904, "15540": 4.71343, "15545": 4.42941, "15550": 4.66939, "15555": 4.98194, "15560": 4.83922, "15565": 4.71441, "15570": 4.87976, "15575": 4.76321, "15580": 4.61043, "15585": 4.85345, "15590": 4.75028, "15595": 4.96181, "15600": 4.82499, "15605": 4.94746, "15610": 5.02294, "15615": 4.90828, "15620": 4.75538, "15625": 4.63064, "15630": 4.35107, "15635": 4.95641, "15640": 4.74162, "15645": 4.72694, "15650": 4.838, "15655": 4.89808, "15660": 4.89726, "15665": 4.86149, "15670": 4.68214, "15675": 4.61003, "15680": 4.91538, "15685": 4.67333, "15690": 4.99196, "15695": 4.94638, "15700": 4.87047, "15705": 4.70504, "15710": 4.95721, "15715": 4.8336, "15720": 4.71177, "15725": 4.66986, "15730": 4.80267, "15735": 4.5262, "15740": 4.89773, "15745": 4.76169, "15750": 4.89063, "15755": 4.80249, "15760": 4.58412, "15765": 4.78709, "15770": 4.73682, "15775": 4.89219, "15780": 4.74371, "15785": 4.80648, "15790": 4.94953, "15795": 4.77252, "15800": 5.00844, "15805": 4.97006, "15810": 4.77161, "15815": 4.52479, "15820": 4.49663, "15825": 5.21024, "15830": 4.70069, "15835": 4.7152, "15840": 4.83244, "15845": 5.05392, "15850": 4.65416, "15855": 4.78907, "15860": 4.87932, "15865": 4.59653, "15870": 4.67887, "15875": 4.82706, "15880": 4.81861, "15885": 4.55304, "15890": 4.83067, "15895": 4.67164, "15900": 4.89679, "15905": 4.73252, "15910": 4.67523, "15915": 5.10474, "15920": 4.71633, "15925": 4.98675, "15930": 4.72251, "15935": 4.76102, "15940": 4.65008, "15945": 4.72186, "15950": 4.59063, "15955": 4.59168, "15960": 4.97179, "15965": 4.43611, "15970": 4.76462, "15975": 4.75878, "15980": 4.51832, "15985": 4.75727, "15990": 4.42976, "15995": 4.90496, "16000": 4.73644, "16005": 4.75611, "16010": 4.77518, "16015": 4.74127, "16020": 4.96216, "16025": 4.52848, "16030": 5.10113, "16035": 4.76771, "16040": 4.9631, "16045": 4.59155, "16050": 4.69177, "16055": 4.21813, "16060": 4.79882, "16065": 5.03091, "16070": 4.26489, "16075": 4.57745, "16080": 4.82988, "16085": 4.51359, "16090": 4.8553, "16095": 4.38598, "16100": 4.76893, "16105": 4.74054, "16110": 4.59574, "16115": 4.82994, "16120": 4.61522, "16125": 4.46617, "16130": 4.65964, "16135": 4.42344, "16140": 4.9383, "16145": 4.75281, "16150": 4.90806, "16155": 4.58389, "16160": 5.01088, "16165": 4.67459, "16170": 5.24962, "16175": 4.71061, "16180": 4.85859, "16185": 4.78086, "16190": 4.72136, "16195": 4.86534, "16200": 4.63469, "16205": 4.90568, "16210": 4.87132, "16215": 4.66195, "16220": 4.73631, "16225": 4.71388, "16230": 5.01414, "16235": 4.69234, "16240": 4.57211, "16245": 4.75379, "16250": 4.88843, "16255": 4.91551, "16260": 4.72797, "16265": 4.78156, "16270": 4.13823, "16275": 4.7422, "16280": 4.75245, "16285": 4.64212, "16290": 4.74042, "16295": 4.61102, "16300": 4.84722, "16305": 4.69841, "16310": 4.52174, "16315": 4.76673, "16320": 4.98793, "16325": 4.46588, "16330": 4.33829, "16335": 4.77882, "16340": 4.69208, "16345": 4.51736, "16350": 4.60563, "16355": 4.69158, "16360": 4.41956, "16365": 4.79952, "16370": 4.64817, "16375": 4.59589, "16380": 4.69301, "16385": 4.81637, "16390": 4.57045, "16395": 4.77635, "16400": 4.61618, "16405": 4.71955, "16410": 4.63093, "16415": 4.83192, "16420": 4.70967, "16425": 4.83068, "16430": 4.71843, "16435": 4.51644, "16440": 4.79729, "16445": 4.78431, "16450": 4.60779, "16455": 4.66745, "16460": 4.70261, "16465": 4.73449, "16470": 4.67175, "16475": 4.64561, "16480": 4.82412, "16485": 5.03595, "16490": 4.57843, "16495": 4.5867, "16500": 4.89045, "16505": 5.02283, "16510": 4.8717, "16515": 4.76853, "16520": 4.5538, "16525": 4.65332, "16530": 4.79114, "16535": 4.9551, "16540": 4.65485, "16545": 4.73293, "16550": 4.55559, "16555": 4.57983, "16560": 4.6274, "16565": 4.55136, "16570": 4.58413, "16575": 4.8398, "16580": 4.77564, "16585": 4.56099, "16590": 4.81681, "16595": 4.64738, "16600": 4.72925, "16605": 4.46543, "16610": 4.91362, "16615": 4.80961, "16620": 4.99331, "16625": 4.744, "16630": 4.66241, "16635": 4.91067, "16640": 4.95584, "16645": 4.48154, "16650": 4.3473, "16655": 4.76362, "16660": 4.70514, "16665": 4.70789, "16670": 4.66806, "16675": 4.80162, "16680": 4.57499, "16685": 4.61219, "16690": 4.99249, "16695": 4.85615, "16700": 4.60476, "16705": 4.87509, "16710": 4.75305, "16715": 4.50295, "16720": 4.72607, "16725": 4.61441, "16730": 4.53768, "16735": 4.56662, "16740": 4.58874, "16745": 4.88318, "16750": 4.07585, "16755": 4.30116, "16760": 4.63713, "16765": 4.50832, "16770": 4.90641, "16775": 4.63917, "16780": 4.85792, "16785": 4.43596, "16790": 4.58465, "16795": 4.72904, "16800": 4.65113, "16805": 4.53219, "16810": 4.47866, "16815": 4.3967, "16820": 4.59255, "16825": 4.4347, "16830": 4.83586, "16835": 4.71485, "16840": 4.71498, "16845": 4.74316, "16850": 4.79782, "16855": 4.66274, "16860": 4.82589, "16865": 4.67853, "16870": 4.47921, "16875": 4.56143, "16880": 4.71357, "16885": 4.85956, "16890": 4.54368, "16895": 4.57018, "16900": 4.73456, "16905": 4.64634, "16910": 4.5792, "16915": 4.54175, "16920": 4.87765, "16925": 4.7918, "16930": 4.70895, "16935": 4.90053, "16940": 4.56292, "16945": 4.61775, "16950": 4.68105, "16955": 4.35182, "16960": 4.60685, "16965": 4.63398, "16970": 4.53511, "16975": 4.64009, "16980": 4.72335, "16985": 4.47582, "16990": 4.51653, "16995": 4.79228, "17000": 4.81006, "17005": 4.55551, "17010": 4.76457, "17015": 4.57893, "17020": 4.89882, "17025": 4.54143, "17030": 4.6781, "17035": 4.64487, "17040": 4.83278, "17045": 4.62344, "17050": 4.46731, "17055": 4.57201, "17060": 4.79965, "17065": 4.5098, "17070": 4.71259, "17075": 4.68352, "17080": 4.88659, "17085": 4.51092, "17090": 4.3399, "17095": 4.44552, "17100": 4.87553, "17105": 4.76108, "17110": 4.75337, "17115": 4.88472, "17120": 4.70485, "17125": 4.58564, "17130": 4.6705, "17135": 4.51344, "17140": 4.61783, "17145": 4.78291, "17150": 4.69412, "17155": 4.62375, "17160": 4.26359, "17165": 4.73242, "17170": 4.44998, "17175": 4.90387, "17180": 4.48798, "17185": 4.63893, "17190": 4.82657, "17195": 5.01016, "17200": 4.67327, "17205": 4.80176, "17210": 4.57979, "17215": 4.70195, "17220": 4.47825, "17225": 4.56226, "17230": 4.92622, "17235": 4.57845, "17240": 4.64112, "17245": 4.93738, "17250": 4.84863, "17255": 4.69749, "17260": 4.69931, "17265": 4.57799, "17270": 5.10435, "17275": 4.80572, "17280": 4.71771, "17285": 4.57033, "17290": 4.80859, "17295": 4.54175, "17300": 4.63263, "17305": 4.89761, "17310": 4.71247, "17315": 4.63245, "17320": 4.52882, "17325": 4.4364, "17330": 4.51325, "17335": 4.52212, "17340": 4.66336, "17345": 4.82936, "17350": 4.75599, "17355": 4.42479, "17360": 4.71834, "17365": 4.75938, "17370": 4.48265, "17375": 4.66737, "17380": 4.62505, "17385": 4.65761, "17390": 4.66028, "17395": 4.59545, "17400": 4.59905, "17405": 4.90184, "17410": 4.6109, "17415": 5.06896, "17420": 4.46752, "17425": 4.5227, "17430": 4.67853, "17435": 4.75048, "17440": 4.74094, "17445": 4.83501, "17450": 4.62396, "17455": 4.5253, "17460": 4.51618, "17465": 4.57263, "17470": 4.57913, "17475": 4.71038, "17480": 4.6467, "17485": 4.74419, "17490": 4.70918, "17495": 4.4527, "17500": 4.42808, "17505": 4.61633, "17510": 4.3621, "17515": 4.67811, "17520": 4.42217, "17525": 4.68344, "17530": 4.79629, "17535": 4.47512, "17540": 4.77905, "17545": 4.79439, "17550": 4.89005, "17555": 4.62477, "17560": 4.37778, "17565": 4.43796, "17570": 4.53317, "17575": 4.79207, "17580": 4.62344, "17585": 4.56632, "17590": 4.78663, "17595": 4.20756, "17600": 4.33947, "17605": 4.77035, "17610": 4.40628, "17615": 4.49643, "17620": 4.69887, "17625": 4.48489, "17630": 4.59626, "17635": 4.85978, "17640": 4.68308, "17645": 4.49584, "17650": 4.58199, "17655": 4.70881, "17660": 4.36156, "17665": 4.54974, "17670": 4.88606, "17675": 4.85542, "17680": 4.83807, "17685": 4.72655, "17690": 4.87652, "17695": 4.55257, "17700": 4.4832, "17705": 4.79245, "17710": 4.66351, "17715": 4.8224, "17720": 4.90719, "17725": 4.3819, "17730": 4.75448, "17735": 4.50075, "17740": 4.6142, "17745": 4.66771, "17750": 4.29231, "17755": 4.71925, "17760": 4.43914, "17765": 4.57809, "17770": 4.58769, "17775": 4.55025, "17780": 4.71631, "17785": 4.49097, "17790": 4.62366, "17795": 4.88061, "17800": 4.56311, "17805": 4.7476, "17810": 4.22324, "17815": 4.88838, "17820": 4.69264, "17825": 4.80549, "17830": 4.7681, "17835": 4.63043, "17840": 4.80906, "17845": 4.63118, "17850": 4.62579, "17855": 4.69438, "17860": 4.77084, "17865": 4.75619, "17870": 4.68389, "17875": 4.76077, "17880": 4.73793, "17885": 4.69999, "17890": 4.63289, "17895": 4.36349, "17900": 4.2935, "17905": 4.62394, "17910": 4.70538, "17915": 4.46943, "17920": 4.57111, "17925": 4.50366, "17930": 4.82982, "17935": 4.62689, "17940": 4.62205, "17945": 4.51045, "17950": 4.75139, "17955": 4.48931, "17960": 4.51317, "17965": 4.53564, "17970": 4.57989, "17975": 4.46925, "17980": 4.41075, "17985": 4.59817, "17990": 4.64773, "17995": 4.9858, "18000": 4.54267, "18005": 4.62839, "18010": 4.75788, "18015": 4.67409, "18020": 4.60056, "18025": 4.78308, "18030": 4.5976, "18035": 4.42175, "18040": 4.71128, "18045": 4.6728, "18050": 4.69279, "18055": 4.31758, "18060": 4.67305, "18065": 4.59785, "18070": 4.78982, "18075": 4.61025, "18080": 4.24851, "18085": 4.67895, "18090": 4.54553, "18095": 4.67201, "18100": 4.85044, "18105": 4.63047, "18110": 4.74025, "18115": 4.62119, "18120": 4.60827, "18125": 4.74019, "18130": 4.67008, "18135": 4.81161, "18140": 4.64753, "18145": 4.369, "18150": 4.62485, "18155": 4.46126, "18160": 4.84904, "18165": 4.51582, "18170": 4.53564, "18175": 4.64274, "18180": 4.55416, "18185": 4.58186, "18190": 4.48279, "18195": 4.59479, "18200": 4.74886, "18205": 4.49389, "18210": 4.41001, "18215": 4.84476, "18220": 4.39071, "18225": 4.71667, "18230": 4.49112, "18235": 4.62935, "18240": 4.79786, "18245": 4.60775, "18250": 4.63901, "18255": 4.6543, "18260": 4.58442, "18265": 4.51652, "18270": 4.80473, "18275": 4.48737, "18280": 4.56113, "18285": 4.89253, "18290": 4.64952, "18295": 4.81316, "18300": 4.60972, "18305": 4.33753, "18310": 4.69129, "18315": 4.18635, "18320": 4.28598, "18325": 4.50003, "18330": 4.46752, "18335": 4.76026, "18340": 5.00781, "18345": 4.58459, "18350": 4.60438, "18355": 4.52475, "18360": 4.49551, "18365": 4.44677, "18370": 4.60457, "18375": 4.50868, "18380": 4.67696, "18385": 4.62345, "18390": 4.74967, "18395": 4.65603, "18400": 4.5379, "18405": 4.65347, "18410": 4.52271, "18415": 4.5702, "18420": 4.63743, "18425": 4.64645, "18430": 4.75553, "18435": 4.24213, "18440": 4.44301, "18445": 4.7483, "18450": 4.39981, "18455": 4.63998, "18460": 4.31669, "18465": 4.54673, "18470": 4.28561, "18475": 4.87112, "18480": 4.56955, "18485": 4.57785, "18490": 4.76414, "18495": 4.6237, "18500": 4.73452, "18505": 4.7449, "18510": 4.47383, "18515": 4.36314, "18520": 4.60693, "18525": 4.35145, "18530": 4.74158, "18535": 4.74229, "18540": 4.44775, "18545": 4.6846, "18550": 4.90053, "18555": 4.70875, "18560": 4.31332, "18565": 4.59618, "18570": 4.67911, "18575": 4.55279, "18580": 4.82963, "18585": 4.5419, "18590": 4.81632, "18595": 4.64786, "18600": 4.36163, "18605": 4.73325, "18610": 4.51279, "18615": 4.69515, "18620": 4.36364, "18625": 4.42725, "18630": 4.33219, "18635": 4.40085, "18640": 4.856, "18645": 4.64445, "18650": 4.4747, "18655": 4.46273, "18660": 4.71444, "18665": 4.61387, "18670": 4.52895, "18675": 4.54565, "18680": 4.33753, "18685": 4.43054, "18690": 4.60592, "18695": 4.52619, "18700": 4.74638, "18705": 4.42479, "18710": 4.32703, "18715": 4.4658, "18720": 4.5123, "18725": 4.57057, "18730": 4.47144, "18735": 4.36268, "18740": 4.57819, "18745": 4.44156, "18750": 4.58138, "18755": 4.50182, "18760": 4.75272, "18765": 4.63178, "18770": 4.65315, "18775": 4.26451, "18780": 4.47413, "18785": 4.90897, "18790": 4.48637, "18795": 4.80837, "18800": 4.6657, "18805": 4.28007, "18810": 4.7012, "18815": 4.71722, "18820": 4.54938, "18825": 4.79182, "18830": 4.56919, "18835": 4.59487, "18840": 4.44581, "18845": 4.56059, "18850": 4.72333, "18855": 4.44977, "18860": 4.52308, "18865": 4.75003, "18870": 4.60174, "18875": 4.40419, "18880": 4.78091, "18885": 4.62416, "18890": 4.5369, "18895": 4.54541, "18900": 4.5916, "18905": 4.56475, "18910": 4.62544, "18915": 4.74832, "18920": 4.4313, "18925": 4.26308, "18930": 4.64015, "18935": 4.71589, "18940": 4.5718, "18945": 4.67173, "18950": 4.5568, "18955": 4.72252, "18960": 4.83922, "18965": 4.04581, "18970": 4.24623, "18975": 4.75319, "18980": 4.67134, "18985": 4.38808, "18990": 4.64328, "18995": 4.78835, "19000": 4.61253, "19005": 4.47447, "19010": 4.58417, "19015": 4.49477, "19020": 4.60018, "19025": 4.60011, "19030": 4.46917, "19035": 4.51305, "19040": 4.58187, "19045": 4.74315, "19050": 4.43168, "19055": 4.38998, "19060": 4.50199, "19065": 4.80845, "19070": 4.65877, "19075": 4.60243, "19080": 4.73796, "19085": 4.52475, "19090": 4.57921, "19095": 4.5585, "19100": 4.656, "19105": 4.80028, "19110": 4.4344, "19115": 4.43435, "19120": 4.13201, "19125": 4.37658, "19130": 4.70057, "19135": 4.53089, "19140": 4.58025, "19145": 4.59107, "19150": 4.53357, "19155": 4.37327, "19160": 4.65978, "19165": 4.64166, "19170": 4.42109, "19175": 4.36964, "19180": 4.74881, "19185": 4.70015, "19190": 4.52469, "19195": 4.70496, "19200": 4.44202, "19205": 4.43192, "19210": 4.6448, "19215": 4.49806, "19220": 4.58907, "19225": 4.68636, "19230": 4.31844, "19235": 4.6502, "19240": 4.58176, "19245": 4.59193, "19250": 4.26414, "19255": 4.63363, "19260": 4.5978, "19265": 4.44427, "19270": 4.62637, "19275": 4.42743, "19280": 4.58868, "19285": 4.71063, "19290": 4.43136, "19295": 4.64557, "19300": 4.5755, "19305": 4.72014, "19310": 4.2826, "19315": 4.22894, "19320": 4.83209, "19325": 4.46836, "19330": 4.7038, "19335": 4.35192, "19340": 4.64478, "19345": 4.5561, "19350": 4.67583, "19355": 4.62954, "19360": 4.58004, "19365": 4.87397, "19370": 4.773, "19375": 4.6667, "19380": 4.40798, "19385": 4.3882, "19390": 4.5454, "19395": 4.41139, "19400": 4.6563, "19405": 4.74907, "19410": 4.47842, "19415": 4.66354, "19420": 4.64515, "19425": 4.77727, "19430": 4.67154, "19435": 4.67746, "19440": 4.51631, "19445": 4.34048, "19450": 4.19112, "19455": 4.57009, "19460": 4.52358, "19465": 4.63305, "19470": 4.59146, "19475": 4.25064, "19480": 4.89052, "19485": 4.49461, "19490": 4.35244, "19495": 4.62929, "19500": 4.44557, "19505": 4.61865, "19510": 4.47805, "19515": 4.52399, "19520": 4.64291, "19525": 4.59414, "19530": 4.40508, "19535": 4.53949, "19540": 4.71058, "19545": 4.68448, "19550": 4.52389, "19555": 4.4384, "19560": 4.81034, "19565": 4.91339, "19570": 4.52993, "19575": 4.71901, "19580": 4.77623, "19585": 4.41676, "19590": 4.34956, "19595": 4.68046, "19600": 4.40916, "19605": 4.77361, "19610": 4.7495, "19615": 4.41667, "19620": 4.79909, "19625": 4.19617, "19630": 4.73191, "19635": 4.6578, "19640": 4.60394, "19645": 4.55133, "19650": 4.31026, "19655": 4.59461, "19660": 4.21976, "19665": 4.67517, "19670": 4.5575, "19675": 4.4969, "19680": 4.43156, "19685": 4.64169, "19690": 4.83713, "19695": 4.36072, "19700": 4.65441, "19705": 4.53829, "19710": 4.6117, "19715": 4.54582, "19720": 4.47752, "19725": 4.63546, "19730": 4.57761, "19735": 4.44372, "19740": 4.52101, "19745": 4.55598, "19750": 4.47943, "19755": 4.37928, "19760": 4.38406, "19765": 4.36144, "19770": 4.61172, "19775": 4.42726, "19780": 4.48209, "19785": 4.85932, "19790": 4.5821, "19795": 4.6069, "19800": 4.55334, "19805": 4.457, "19810": 4.72266, "19815": 4.58852, "19820": 4.88717, "19825": 4.54052, "19830": 4.83609, "19835": 4.81761, "19840": 4.37338, "19845": 4.74161, "19850": 4.76964, "19855": 4.46719, "19860": 4.45521, "19865": 4.51713, "19870": 4.5979, "19875": 4.46938, "19880": 4.50257, "19885": 4.69777, "19890": 4.5317, "19895": 4.48675, "19900": 4.44091, "19905": 4.47098, "19910": 4.73032, "19915": 4.30771, "19920": 4.56773, "19925": 4.42538, "19930": 4.33667, "19935": 4.87762, "19940": 4.71203, "19945": 4.5584, "19950": 4.80008, "19955": 4.50588, "19960": 4.43657, "19965": 4.38298, "19970": 4.37515, "19975": 4.50591, "19980": 4.64319, "19985": 4.49542, "19990": 4.64373, "19995": 4.30228, "20000": 4.81103}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 20000, "step_interval": 5, "values": {"1": 146450944.0, "5": 146450944.0, "10": 146450944.0, "15": 146450944.0, "20": 224809984.0, "25": 225334272.0, "30": 225727488.0, "35": 224809984.0, "40": 224809984.0, "45": 225334272.0, "50": 224809984.0, "55": 224809984.0, "60": 224809984.0, "65": 224809984.0, "70": 225334272.0, "75": 225334272.0, "80": 224809984.0, "85": 225203200.0, "90": 225334272.0, "95": 225334272.0, "100": 225334272.0, "105": 224809984.0, "110": 225334272.0, "115": 224809984.0, "120": 224809984.0, "125": 225858560.0, "130": 224809984.0, "135": 224809984.0, "140": 224809984.0, "145": 225334272.0, "150": 225203200.0, "155": 224809984.0, "160": 225334272.0, "165": 225334272.0, "170": 224809984.0, "175": 225334272.0, "180": 225334272.0, "185": 224809984.0, "190": 224809984.0, "195": 224809984.0, "200": 225334272.0, "205": 225334272.0, "210": 224809984.0, "215": 225334272.0, "220": 225858560.0, "225": 224809984.0, "230": 224809984.0, "235": 224809984.0, "240": 225334272.0, "245": 224809984.0, "250": 224809984.0, "255": 224809984.0, "260": 225334272.0, "265": 224809984.0, "270": 225334272.0, "275": 225334272.0, "280": 225334272.0, "285": 225858560.0, "290": 225334272.0, "295": 225203200.0, "300": 225858560.0, "305": 225334272.0, "310": 225858560.0, "315": 225334272.0, "320": 224809984.0, "325": 225334272.0, "330": 225203200.0, "335": 224809984.0, "340": 225334272.0, "345": 225334272.0, "350": 224809984.0, "355": 225334272.0, "360": 224809984.0, "365": 224809984.0, "370": 225334272.0, "375": 225334272.0, "380": 225334272.0, "385": 225334272.0, "390": 224809984.0, "395": 224809984.0, "400": 225334272.0, "405": 225334272.0, "410": 224809984.0, "415": 225334272.0, "420": 225858560.0, "425": 224809984.0, "430": 224809984.0, "435": 224809984.0, "440": 225334272.0, "445": 225334272.0, "450": 224809984.0, "455": 225334272.0, "460": 225334272.0, "465": 225858560.0, "470": 224809984.0, "475": 224809984.0, "480": 224809984.0, "485": 225334272.0, "490": 224809984.0, "495": 224809984.0, "500": 225334272.0, "505": 225858560.0, "510": 225334272.0, "515": 225334272.0, "520": 225858560.0, "525": 224809984.0, "530": 225334272.0, "535": 224809984.0, "540": 224809984.0, "545": 224809984.0, "550": 225334272.0, "555": 225334272.0, "560": 225334272.0, "565": 224809984.0, "570": 225334272.0, "575": 225858560.0, "580": 225858560.0, "585": 224809984.0, "590": 224809984.0, "595": 224809984.0, "600": 224809984.0, "605": 224809984.0, "610": 225334272.0, "615": 225334272.0, "620": 225334272.0, "625": 225334272.0, "630": 224809984.0, "635": 225334272.0, "640": 224809984.0, "645": 225858560.0, "650": 225334272.0, "655": 225334272.0, "660": 225858560.0, "665": 224809984.0, "670": 225334272.0, "675": 224809984.0, "680": 224809984.0, "685": 224809984.0, "690": 225203200.0, "695": 225334272.0, "700": 224809984.0, "705": 225334272.0, "710": 224809984.0, "715": 225334272.0, "720": 225858560.0, "725": 225334272.0, "730": 225334272.0, "735": 224809984.0, "740": 225334272.0, "745": 225858560.0, "750": 224809984.0, "755": 224809984.0, "760": 224809984.0, "765": 224809984.0, "770": 225334272.0, "775": 225334272.0, "780": 224809984.0, "785": 225334272.0, "790": 225334272.0, "795": 225858560.0, "800": 224809984.0, "805": 225334272.0, "810": 224809984.0, "815": 225334272.0, "820": 224809984.0, "825": 225334272.0, "830": 225334272.0, "835": 224809984.0, "840": 224809984.0, "845": 225858560.0, "850": 225334272.0, "855": 224809984.0, "860": 225334272.0, "865": 224809984.0, "870": 225334272.0, "875": 225858560.0, "880": 224809984.0, "885": 224809984.0, "890": 224809984.0, "895": 224809984.0, "900": 225858560.0, "905": 225858560.0, "910": 224809984.0, "915": 224809984.0, "920": 224809984.0, "925": 225334272.0, "930": 225334272.0, "935": 225203200.0, "940": 225334272.0, "945": 225858560.0, "950": 224809984.0, "955": 224809984.0, "960": 224809984.0, "965": 224809984.0, "970": 225334272.0, "975": 224809984.0, "980": 225334272.0, "985": 225334272.0, "990": 224809984.0, "995": 225334272.0, "1000": 224809984.0, "1005": 225203200.0, "1010": 224809984.0, "1015": 224809984.0, "1020": 224809984.0, "1025": 224809984.0, "1030": 224809984.0, "1035": 224809984.0, "1040": 225334272.0, "1045": 225334272.0, "1050": 225334272.0, "1055": 224809984.0, "1060": 225858560.0, "1065": 224809984.0, "1070": 225334272.0, "1075": 225858560.0, "1080": 224809984.0, "1085": 225334272.0, "1090": 225858560.0, "1095": 225334272.0, "1100": 225334272.0, "1105": 225334272.0, "1110": 225334272.0, "1115": 224809984.0, "1120": 224809984.0, "1125": 225334272.0, "1130": 224809984.0, "1135": 225858560.0, "1140": 224809984.0, "1145": 225334272.0, "1150": 224809984.0, "1155": 225334272.0, "1160": 224809984.0, "1165": 225334272.0, "1170": 224809984.0, "1175": 225334272.0, "1180": 225334272.0, "1185": 225334272.0, "1190": 225334272.0, "1195": 225334272.0, "1200": 225858560.0, "1205": 224809984.0, "1210": 224809984.0, "1215": 225334272.0, "1220": 225334272.0, "1225": 225334272.0, "1230": 225334272.0, "1235": 225334272.0, "1240": 225334272.0, "1245": 225334272.0, "1250": 225334272.0, "1255": 224809984.0, "1260": 224809984.0, "1265": 224809984.0, "1270": 224809984.0, "1275": 225334272.0, "1280": 224809984.0, "1285": 225334272.0, "1290": 225334272.0, "1295": 224809984.0, "1300": 224809984.0, "1305": 225334272.0, "1310": 224809984.0, "1315": 224809984.0, "1320": 225334272.0, "1325": 225334272.0, "1330": 224809984.0, "1335": 224809984.0, "1340": 225858560.0, "1345": 225334272.0, "1350": 224809984.0, "1355": 225334272.0, "1360": 225858560.0, "1365": 225334272.0, "1370": 225334272.0, "1375": 224809984.0, "1380": 224809984.0, "1385": 225727488.0, "1390": 224809984.0, "1395": 225334272.0, "1400": 225203200.0, "1405": 225858560.0, "1410": 224809984.0, "1415": 224809984.0, "1420": 225334272.0, "1425": 225334272.0, "1430": 225334272.0, "1435": 225334272.0, "1440": 225858560.0, "1445": 225334272.0, "1450": 225334272.0, "1455": 225334272.0, "1460": 224809984.0, "1465": 225334272.0, "1470": 225334272.0, "1475": 225858560.0, "1480": 225334272.0, "1485": 224809984.0, "1490": 225334272.0, "1495": 224809984.0, "1500": 224809984.0, "1505": 225334272.0, "1510": 224809984.0, "1515": 225334272.0, "1520": 224809984.0, "1525": 224809984.0, "1530": 225858560.0, "1535": 225334272.0, "1540": 225334272.0, "1545": 224809984.0, "1550": 225334272.0, "1555": 225334272.0, "1560": 224809984.0, "1565": 225334272.0, "1570": 225858560.0, "1575": 225334272.0, "1580": 224809984.0, "1585": 224809984.0, "1590": 225334272.0, "1595": 225334272.0, "1600": 225334272.0, "1605": 225203200.0, "1610": 224809984.0, "1615": 224809984.0, "1620": 224809984.0, "1625": 224809984.0, "1630": 225334272.0, "1635": 225858560.0, "1640": 225334272.0, "1645": 224809984.0, "1650": 224809984.0, "1655": 224809984.0, "1660": 224809984.0, "1665": 224809984.0, "1670": 224809984.0, "1675": 225334272.0, "1680": 224809984.0, "1685": 225334272.0, "1690": 225334272.0, "1695": 224809984.0, "1700": 225334272.0, "1705": 225334272.0, "1710": 224809984.0, "1715": 224809984.0, "1720": 225334272.0, "1725": 224809984.0, "1730": 225334272.0, "1735": 224809984.0, "1740": 224809984.0, "1745": 224809984.0, "1750": 225334272.0, "1755": 224809984.0, "1760": 224809984.0, "1765": 224809984.0, "1770": 224809984.0, "1775": 225334272.0, "1780": 225334272.0, "1785": 225334272.0, "1790": 225334272.0, "1795": 224809984.0, "1800": 225334272.0, "1805": 224809984.0, "1810": 225858560.0, "1815": 225334272.0, "1820": 225334272.0, "1825": 225334272.0, "1830": 225334272.0, "1835": 225858560.0, "1840": 225334272.0, "1845": 225334272.0, "1850": 225858560.0, "1855": 225334272.0, "1860": 224809984.0, "1865": 225334272.0, "1870": 224809984.0, "1875": 225334272.0, "1880": 225334272.0, "1885": 224809984.0, "1890": 225334272.0, "1895": 224809984.0, "1900": 225334272.0, "1905": 224809984.0, "1910": 225203200.0, "1915": 224809984.0, "1920": 225334272.0, "1925": 225334272.0, "1930": 225858560.0, "1935": 224809984.0, "1940": 225858560.0, "1945": 225203200.0, "1950": 224809984.0, "1955": 225334272.0, "1960": 225334272.0, "1965": 225334272.0, "1970": 224809984.0, "1975": 224809984.0, "1980": 225334272.0, "1985": 224809984.0, "1990": 225334272.0, "1995": 224809984.0, "2000": 225334272.0, "2005": 224809984.0, "2010": 225334272.0, "2015": 224809984.0, "2020": 225334272.0, "2025": 225334272.0, "2030": 225334272.0, "2035": 225334272.0, "2040": 225334272.0, "2045": 225727488.0, "2050": 225334272.0, "2055": 224809984.0, "2060": 224809984.0, "2065": 224809984.0, "2070": 225334272.0, "2075": 224809984.0, "2080": 225334272.0, "2085": 225334272.0, "2090": 225334272.0, "2095": 225858560.0, "2100": 224809984.0, "2105": 225334272.0, "2110": 225334272.0, "2115": 225858560.0, "2120": 225334272.0, "2125": 224809984.0, "2130": 224809984.0, "2135": 225334272.0, "2140": 224809984.0, "2145": 225858560.0, "2150": 225334272.0, "2155": 224809984.0, "2160": 224809984.0, "2165": 225334272.0, "2170": 224809984.0, "2175": 224809984.0, "2180": 224809984.0, "2185": 224809984.0, "2190": 225334272.0, "2195": 225334272.0, "2200": 225334272.0, "2205": 225858560.0, "2210": 224809984.0, "2215": 225203200.0, "2220": 225334272.0, "2225": 224809984.0, "2230": 225858560.0, "2235": 225203200.0, "2240": 224809984.0, "2245": 225334272.0, "2250": 225334272.0, "2255": 225334272.0, "2260": 225858560.0, "2265": 224809984.0, "2270": 224809984.0, "2275": 224809984.0, "2280": 224809984.0, "2285": 224809984.0, "2290": 224809984.0, "2295": 225334272.0, "2300": 225858560.0, "2305": 225858560.0, "2310": 225334272.0, "2315": 225334272.0, "2320": 224809984.0, "2325": 224809984.0, "2330": 224809984.0, "2335": 225334272.0, "2340": 225334272.0, "2345": 225334272.0, "2350": 225334272.0, "2355": 225334272.0, "2360": 225334272.0, "2365": 225334272.0, "2370": 224809984.0, "2375": 225334272.0, "2380": 225334272.0, "2385": 225334272.0, "2390": 225334272.0, "2395": 225334272.0, "2400": 225334272.0, "2405": 225334272.0, "2410": 225334272.0, "2415": 225334272.0, "2420": 225334272.0, "2425": 225334272.0, "2430": 224809984.0, "2435": 225334272.0, "2440": 225334272.0, "2445": 224809984.0, "2450": 225858560.0, "2455": 225334272.0, "2460": 225858560.0, "2465": 224809984.0, "2470": 225334272.0, "2475": 225334272.0, "2480": 225334272.0, "2485": 224809984.0, "2490": 225334272.0, "2495": 225334272.0, "2500": 225334272.0, "2505": 224809984.0, "2510": 225334272.0, "2515": 224809984.0, "2520": 225334272.0, "2525": 225334272.0, "2530": 225334272.0, "2535": 224809984.0, "2540": 225334272.0, "2545": 225858560.0, "2550": 224809984.0, "2555": 224809984.0, "2560": 225334272.0, "2565": 224809984.0, "2570": 225334272.0, "2575": 225334272.0, "2580": 225334272.0, "2585": 224809984.0, "2590": 225858560.0, "2595": 225858560.0, "2600": 225858560.0, "2605": 225727488.0, "2610": 224809984.0, "2615": 225334272.0, "2620": 225334272.0, "2625": 225334272.0, "2630": 225334272.0, "2635": 225334272.0, "2640": 225334272.0, "2645": 225334272.0, "2650": 225334272.0, "2655": 225334272.0, "2660": 225334272.0, "2665": 225334272.0, "2670": 225334272.0, "2675": 225334272.0, "2680": 224809984.0, "2685": 225334272.0, "2690": 225334272.0, "2695": 225334272.0, "2700": 225334272.0, "2705": 225334272.0, "2710": 225334272.0, "2715": 225334272.0, "2720": 224809984.0, "2725": 225334272.0, "2730": 225334272.0, "2735": 225334272.0, "2740": 225334272.0, "2745": 225334272.0, "2750": 225334272.0, "2755": 225334272.0, "2760": 225334272.0, "2765": 225334272.0, "2770": 225334272.0, "2775": 225334272.0, "2780": 224809984.0, "2785": 225334272.0, "2790": 225334272.0, "2795": 224809984.0, "2800": 225334272.0, "2805": 225334272.0, "2810": 225334272.0, "2815": 224809984.0, "2820": 225858560.0, "2825": 225334272.0, "2830": 224809984.0, "2835": 225334272.0, "2840": 225334272.0, "2845": 224809984.0, "2850": 225334272.0, "2855": 224809984.0, "2860": 224809984.0, "2865": 225334272.0, "2870": 225334272.0, "2875": 225334272.0, "2880": 225334272.0, "2885": 225334272.0, "2890": 225334272.0, "2895": 224809984.0, "2900": 225334272.0, "2905": 225334272.0, "2910": 225334272.0, "2915": 225334272.0, "2920": 225334272.0, "2925": 225334272.0, "2930": 225334272.0, "2935": 225334272.0, "2940": 225727488.0, "2945": 224809984.0, "2950": 225334272.0, "2955": 225334272.0, "2960": 225203200.0, "2965": 225334272.0, "2970": 224809984.0, "2975": 225334272.0, "2980": 225334272.0, "2985": 225334272.0, "2990": 224809984.0, "2995": 225334272.0, "3000": 225334272.0, "3005": 225858560.0, "3010": 225334272.0, "3015": 224809984.0, "3020": 225334272.0, "3025": 224809984.0, "3030": 225334272.0, "3035": 224809984.0, "3040": 225858560.0, "3045": 225334272.0, "3050": 224809984.0, "3055": 225334272.0, "3060": 225334272.0, "3065": 222843904.0, "3070": 223237120.0, "3075": 223237120.0, "3080": 223237120.0, "3085": 222843904.0, "3090": 223368192.0, "3095": 223237120.0, "3100": 223237120.0, "3105": 223368192.0, "3110": 223237120.0, "3115": 223237120.0, "3120": 223237120.0, "3125": 222843904.0, "3130": 223368192.0, "3135": 222843904.0, "3140": 222843904.0, "3145": 223237120.0, "3150": 223237120.0, "3155": 223237120.0, "3160": 222843904.0, "3165": 223368192.0, "3170": 223368192.0, "3175": 223237120.0, "3180": 223237120.0, "3185": 223237120.0, "3190": 223237120.0, "3195": 222843904.0, "3200": 223237120.0, "3205": 223237120.0, "3210": 223237120.0, "3215": 222843904.0, "3220": 223368192.0, "3225": 222843904.0, "3230": 222843904.0, "3235": 223237120.0, "3240": 222843904.0, "3245": 223237120.0, "3250": 223237120.0, "3255": 222843904.0, "3260": 222843904.0, "3265": 222843904.0, "3270": 223237120.0, "3275": 222843904.0, "3280": 223368192.0, "3285": 223237120.0, "3290": 222843904.0, "3295": 222843904.0, "3300": 222843904.0, "3305": 223237120.0, "3310": 223368192.0, "3315": 223368192.0, "3320": 222843904.0, "3325": 223237120.0, "3330": 223237120.0, "3335": 223237120.0, "3340": 222843904.0, "3345": 223237120.0, "3350": 222843904.0, "3355": 222843904.0, "3360": 223368192.0, "3365": 222843904.0, "3370": 222843904.0, "3375": 223237120.0, "3380": 222843904.0, "3385": 222843904.0, "3390": 222843904.0, "3395": 223237120.0, "3400": 223237120.0, "3405": 222843904.0, "3410": 223237120.0, "3415": 223237120.0, "3420": 222843904.0, "3425": 223368192.0, "3430": 223237120.0, "3435": 222843904.0, "3440": 223237120.0, "3445": 222843904.0, "3450": 223368192.0, "3455": 223237120.0, "3460": 222843904.0, "3465": 222843904.0, "3470": 223368192.0, "3475": 223237120.0, "3480": 223237120.0, "3485": 223237120.0, "3490": 223368192.0, "3495": 223237120.0, "3500": 223237120.0, "3505": 223368192.0, "3510": 223368192.0, "3515": 222843904.0, "3520": 223368192.0, "3525": 223368192.0, "3530": 222843904.0, "3535": 223237120.0, "3540": 223237120.0, "3545": 223237120.0, "3550": 223237120.0, "3555": 223368192.0, "3560": 223237120.0, "3565": 223237120.0, "3570": 223237120.0, "3575": 222843904.0, "3580": 223368192.0, "3585": 222843904.0, "3590": 223237120.0, "3595": 222843904.0, "3600": 223237120.0, "3605": 223368192.0, "3610": 222843904.0, "3615": 223368192.0, "3620": 222843904.0, "3625": 222843904.0, "3630": 223237120.0, "3635": 223368192.0, "3640": 223237120.0, "3645": 223368192.0, "3650": 222843904.0, "3655": 223368192.0, "3660": 222843904.0, "3665": 223368192.0, "3670": 223237120.0, "3675": 223237120.0, "3680": 223368192.0, "3685": 222843904.0, "3690": 223237120.0, "3695": 223237120.0, "3700": 223368192.0, "3705": 223368192.0, "3710": 223237120.0, "3715": 222843904.0, "3720": 223237120.0, "3725": 223368192.0, "3730": 223237120.0, "3735": 223368192.0, "3740": 223368192.0, "3745": 223368192.0, "3750": 223368192.0, "3755": 222843904.0, "3760": 223237120.0, "3765": 222843904.0, "3770": 223237120.0, "3775": 223237120.0, "3780": 223237120.0, "3785": 223237120.0, "3790": 222843904.0, "3795": 223237120.0, "3800": 223237120.0, "3805": 222843904.0, "3810": 223237120.0, "3815": 222843904.0, "3820": 222843904.0, "3825": 223237120.0, "3830": 222843904.0, "3835": 222843904.0, "3840": 222843904.0, "3845": 223237120.0, "3850": 223368192.0, "3855": 222843904.0, "3860": 222843904.0, "3865": 223237120.0, "3870": 222843904.0, "3875": 223368192.0, "3880": 223237120.0, "3885": 222843904.0, "3890": 223368192.0, "3895": 223237120.0, "3900": 222843904.0, "3905": 222843904.0, "3910": 222843904.0, "3915": 223368192.0, "3920": 223237120.0, "3925": 223237120.0, "3930": 222843904.0, "3935": 222843904.0, "3940": 222843904.0, "3945": 223237120.0, "3950": 223237120.0, "3955": 222843904.0, "3960": 222843904.0, "3965": 222843904.0, "3970": 223237120.0, "3975": 223237120.0, "3980": 223368192.0, "3985": 222843904.0, "3990": 222843904.0, "3995": 222843904.0, "4000": 222843904.0, "4005": 222843904.0, "4010": 223237120.0, "4015": 223237120.0, "4020": 222843904.0, "4025": 222843904.0, "4030": 223237120.0, "4035": 222843904.0, "4040": 223237120.0, "4045": 222843904.0, "4050": 222843904.0, "4055": 222843904.0, "4060": 222843904.0, "4065": 223368192.0, "4070": 222843904.0, "4075": 223237120.0, "4080": 222843904.0, "4085": 223368192.0, "4090": 223237120.0, "4095": 223237120.0, "4100": 223368192.0, "4105": 223237120.0, "4110": 223237120.0, "4115": 223237120.0, "4120": 223237120.0, "4125": 223368192.0, "4130": 223237120.0, "4135": 223237120.0, "4140": 223237120.0, "4145": 223237120.0, "4150": 223237120.0, "4155": 223237120.0, "4160": 223237120.0, "4165": 222843904.0, "4170": 222843904.0, "4175": 223237120.0, "4180": 223237120.0, "4185": 223237120.0, "4190": 223237120.0, "4195": 222843904.0, "4200": 223237120.0, "4205": 223237120.0, "4210": 223237120.0, "4215": 223368192.0, "4220": 223237120.0, "4225": 223237120.0, "4230": 223237120.0, "4235": 222843904.0, "4240": 223237120.0, "4245": 223237120.0, "4250": 223237120.0, "4255": 223237120.0, "4260": 223237120.0, "4265": 223237120.0, "4270": 223237120.0, "4275": 222843904.0, "4280": 223237120.0, "4285": 223237120.0, "4290": 223237120.0, "4295": 223237120.0, "4300": 223237120.0, "4305": 223237120.0, "4310": 223237120.0, "4315": 222843904.0, "4320": 223237120.0, "4325": 223368192.0, "4330": 223237120.0, "4335": 223237120.0, "4340": 223237120.0, "4345": 223237120.0, "4350": 222843904.0, "4355": 223237120.0, "4360": 223368192.0, "4365": 222843904.0, "4370": 222843904.0, "4375": 223237120.0, "4380": 223237120.0, "4385": 223237120.0, "4390": 223237120.0, "4395": 223237120.0, "4400": 223237120.0, "4405": 223368192.0, "4410": 223237120.0, "4415": 223368192.0, "4420": 223237120.0, "4425": 223237120.0, "4430": 223237120.0, "4435": 223237120.0, "4440": 223237120.0, "4445": 223237120.0, "4450": 223237120.0, "4455": 223237120.0, "4460": 223237120.0, "4465": 222843904.0, "4470": 223237120.0, "4475": 223237120.0, "4480": 222843904.0, "4485": 223368192.0, "4490": 223237120.0, "4495": 222843904.0, "4500": 223368192.0, "4505": 222843904.0, "4510": 223237120.0, "4515": 223237120.0, "4520": 223237120.0, "4525": 222843904.0, "4530": 223237120.0, "4535": 222843904.0, "4540": 223237120.0, "4545": 223237120.0, "4550": 223237120.0, "4555": 223237120.0, "4560": 223237120.0, "4565": 222843904.0, "4570": 222843904.0, "4575": 223237120.0, "4580": 223237120.0, "4585": 223237120.0, "4590": 223368192.0, "4595": 223237120.0, "4600": 223237120.0, "4605": 223237120.0, "4610": 222843904.0, "4615": 223237120.0, "4620": 222843904.0, "4625": 222843904.0, "4630": 223237120.0, "4635": 222843904.0, "4640": 223368192.0, "4645": 222843904.0, "4650": 223237120.0, "4655": 223237120.0, "4660": 222843904.0, "4665": 222843904.0, "4670": 223237120.0, "4675": 223368192.0, "4680": 222843904.0, "4685": 222843904.0, "4690": 223237120.0, "4695": 222843904.0, "4700": 222843904.0, "4705": 223237120.0, "4710": 222843904.0, "4715": 223237120.0, "4720": 223237120.0, "4725": 223237120.0, "4730": 223237120.0, "4735": 223237120.0, "4740": 222843904.0, "4745": 222843904.0, "4750": 223368192.0, "4755": 223237120.0, "4760": 223237120.0, "4765": 222843904.0, "4770": 223237120.0, "4775": 223237120.0, "4780": 222843904.0, "4785": 223237120.0, "4790": 223237120.0, "4795": 222843904.0, "4800": 222843904.0, "4805": 223237120.0, "4810": 223237120.0, "4815": 222843904.0, "4820": 223237120.0, "4825": 223237120.0, "4830": 223237120.0, "4835": 223237120.0, "4840": 222843904.0, "4845": 222843904.0, "4850": 222843904.0, "4855": 223237120.0, "4860": 223368192.0, "4865": 222843904.0, "4870": 223237120.0, "4875": 222843904.0, "4880": 223237120.0, "4885": 222843904.0, "4890": 222843904.0, "4895": 223237120.0, "4900": 222843904.0, "4905": 222843904.0, "4910": 222843904.0, "4915": 222843904.0, "4920": 222843904.0, "4925": 223237120.0, "4930": 223237120.0, "4935": 223237120.0, "4940": 223237120.0, "4945": 223237120.0, "4950": 223237120.0, "4955": 223237120.0, "4960": 223237120.0, "4965": 222843904.0, "4970": 223237120.0, "4975": 222843904.0, "4980": 223237120.0, "4985": 223237120.0, "4990": 223237120.0, "4995": 222843904.0, "5000": 223237120.0, "5005": 223237120.0, "5010": 223237120.0, "5015": 223237120.0, "5020": 223237120.0, "5025": 223368192.0, "5030": 222843904.0, "5035": 223237120.0, "5040": 223237120.0, "5045": 223237120.0, "5050": 223237120.0, "5055": 223237120.0, "5060": 223237120.0, "5065": 223237120.0, "5070": 223368192.0, "5075": 223237120.0, "5080": 223237120.0, "5085": 223368192.0, "5090": 222843904.0, "5095": 222843904.0, "5100": 223237120.0, "5105": 223237120.0, "5110": 222843904.0, "5115": 222843904.0, "5120": 223237120.0, "5125": 223237120.0, "5130": 223237120.0, "5135": 222843904.0, "5140": 222843904.0, "5145": 223237120.0, "5150": 222843904.0, "5155": 223237120.0, "5160": 222843904.0, "5165": 223237120.0, "5170": 223237120.0, "5175": 222843904.0, "5180": 223368192.0, "5185": 223237120.0, "5190": 223237120.0, "5195": 223237120.0, "5200": 223237120.0, "5205": 223237120.0, "5210": 223237120.0, "5215": 223368192.0, "5220": 222843904.0, "5225": 223237120.0, "5230": 223237120.0, "5235": 223237120.0, "5240": 222843904.0, "5245": 223237120.0, "5250": 223237120.0, "5255": 223237120.0, "5260": 223237120.0, "5265": 222843904.0, "5270": 223237120.0, "5275": 223237120.0, "5280": 222843904.0, "5285": 223237120.0, "5290": 223237120.0, "5295": 223237120.0, "5300": 223237120.0, "5305": 223237120.0, "5310": 223237120.0, "5315": 223237120.0, "5320": 223237120.0, "5325": 222843904.0, "5330": 222843904.0, "5335": 223368192.0, "5340": 223237120.0, "5345": 222843904.0, "5350": 223237120.0, "5355": 223237120.0, "5360": 222843904.0, "5365": 223237120.0, "5370": 223237120.0, "5375": 222843904.0, "5380": 222843904.0, "5385": 223237120.0, "5390": 223237120.0, "5395": 223237120.0, "5400": 222843904.0, "5405": 222843904.0, "5410": 223237120.0, "5415": 223237120.0, "5420": 222843904.0, "5425": 223237120.0, "5430": 223237120.0, "5435": 223237120.0, "5440": 222843904.0, "5445": 222843904.0, "5450": 223237120.0, "5455": 222843904.0, "5460": 222843904.0, "5465": 222843904.0, "5470": 222843904.0, "5475": 223237120.0, "5480": 223237120.0, "5485": 222843904.0, "5490": 222843904.0, "5495": 223368192.0, "5500": 223237120.0, "5505": 223368192.0, "5510": 222843904.0, "5515": 223368192.0, "5520": 222843904.0, "5525": 223237120.0, "5530": 223237120.0, "5535": 223237120.0, "5540": 222843904.0, "5545": 223368192.0, "5550": 222843904.0, "5555": 223237120.0, "5560": 223237120.0, "5565": 222843904.0, "5570": 222843904.0, "5575": 222843904.0, "5580": 223237120.0, "5585": 222843904.0, "5590": 222843904.0, "5595": 223237120.0, "5600": 223237120.0, "5605": 222843904.0, "5610": 223237120.0, "5615": 222843904.0, "5620": 222843904.0, "5625": 222843904.0, "5630": 223237120.0, "5635": 223237120.0, "5640": 223368192.0, "5645": 222843904.0, "5650": 223237120.0, "5655": 223237120.0, "5660": 223237120.0, "5665": 222843904.0, "5670": 223237120.0, "5675": 222843904.0, "5680": 223237120.0, "5685": 222843904.0, "5690": 223237120.0, "5695": 222843904.0, "5700": 222843904.0, "5705": 223368192.0, "5710": 223237120.0, "5715": 222843904.0, "5720": 223237120.0, "5725": 222843904.0, "5730": 222843904.0, "5735": 222843904.0, "5740": 222843904.0, "5745": 222843904.0, "5750": 222843904.0, "5755": 222843904.0, "5760": 223237120.0, "5765": 222843904.0, "5770": 222843904.0, "5775": 223237120.0, "5780": 222843904.0, "5785": 223237120.0, "5790": 223237120.0, "5795": 223237120.0, "5800": 222843904.0, "5805": 222843904.0, "5810": 222843904.0, "5815": 223368192.0, "5820": 222843904.0, "5825": 223237120.0, "5830": 222843904.0, "5835": 222843904.0, "5840": 223237120.0, "5845": 222843904.0, "5850": 222843904.0, "5855": 222843904.0, "5860": 223237120.0, "5865": 223237120.0, "5870": 222843904.0, "5875": 222843904.0, "5880": 223237120.0, "5885": 223237120.0, "5890": 223237120.0, "5895": 223237120.0, "5900": 223237120.0, "5905": 223237120.0, "5910": 223368192.0, "5915": 223237120.0, "5920": 223237120.0, "5925": 223368192.0, "5930": 222843904.0, "5935": 223237120.0, "5940": 223237120.0, "5945": 222843904.0, "5950": 223237120.0, "5955": 222843904.0, "5960": 222843904.0, "5965": 222843904.0, "5970": 223237120.0, "5975": 223368192.0, "5980": 223368192.0, "5985": 223237120.0, "5990": 223237120.0, "5995": 222843904.0, "6000": 223368192.0, "6005": 223237120.0, "6010": 223237120.0, "6015": 223237120.0, "6020": 223237120.0, "6025": 222843904.0, "6030": 223237120.0, "6035": 222843904.0, "6040": 223237120.0, "6045": 223237120.0, "6050": 223237120.0, "6055": 223237120.0, "6060": 223237120.0, "6065": 223237120.0, "6070": 223237120.0, "6075": 223368192.0, "6080": 223237120.0, "6085": 222843904.0, "6090": 222843904.0, "6095": 222843904.0, "6100": 223237120.0, "6105": 222843904.0, "6110": 222843904.0, "6115": 223368192.0, "6120": 223237120.0, "6125": 222843904.0, "6130": 223368192.0, "6135": 223237120.0, "6140": 223237120.0, "6145": 222843904.0, "6150": 223368192.0, "6155": 223237120.0, "6160": 222843904.0, "6165": 222843904.0, "6170": 222843904.0, "6175": 223368192.0, "6180": 223237120.0, "6185": 223237120.0, "6190": 222843904.0, "6195": 223237120.0, "6200": 223237120.0, "6205": 223237120.0, "6210": 223368192.0, "6215": 222843904.0, "6220": 223237120.0, "6225": 222843904.0, "6230": 223237120.0, "6235": 222843904.0, "6240": 222843904.0, "6245": 223237120.0, "6250": 223368192.0, "6255": 223237120.0, "6260": 223237120.0, "6265": 223368192.0, "6270": 222843904.0, "6275": 223237120.0, "6280": 222843904.0, "6285": 223368192.0, "6290": 223237120.0, "6295": 223237120.0, "6300": 222843904.0, "6305": 223237120.0, "6310": 223237120.0, "6315": 223237120.0, "6320": 223237120.0, "6325": 223368192.0, "6330": 222843904.0, "6335": 222843904.0, "6340": 223237120.0, "6345": 222843904.0, "6350": 223237120.0, "6355": 222843904.0, "6360": 223237120.0, "6365": 222843904.0, "6370": 222843904.0, "6375": 222843904.0, "6380": 223237120.0, "6385": 222843904.0, "6390": 223237120.0, "6395": 223237120.0, "6400": 223237120.0, "6405": 223237120.0, "6410": 223237120.0, "6415": 223237120.0, "6420": 222843904.0, "6425": 222843904.0, "6430": 222843904.0, "6435": 222843904.0, "6440": 223237120.0, "6445": 223368192.0, "6450": 222843904.0, "6455": 223368192.0, "6460": 222843904.0, "6465": 223368192.0, "6470": 222843904.0, "6475": 223237120.0, "6480": 223368192.0, "6485": 222843904.0, "6490": 223237120.0, "6495": 223237120.0, "6500": 223237120.0, "6505": 222843904.0, "6510": 223237120.0, "6515": 223237120.0, "6520": 223237120.0, "6525": 223237120.0, "6530": 222843904.0, "6535": 223237120.0, "6540": 222843904.0, "6545": 222843904.0, "6550": 223368192.0, "6555": 223237120.0, "6560": 223237120.0, "6565": 223237120.0, "6570": 223237120.0, "6575": 222843904.0, "6580": 222843904.0, "6585": 223237120.0, "6590": 223237120.0, "6595": 223237120.0, "6600": 222843904.0, "6605": 223237120.0, "6610": 222843904.0, "6615": 222843904.0, "6620": 222843904.0, "6625": 223237120.0, "6630": 222843904.0, "6635": 223237120.0, "6640": 223237120.0, "6645": 222843904.0, "6650": 223237120.0, "6655": 222843904.0, "6660": 223237120.0, "6665": 222843904.0, "6670": 222843904.0, "6675": 222843904.0, "6680": 223237120.0, "6685": 223237120.0, "6690": 222843904.0, "6695": 223237120.0, "6700": 223237120.0, "6705": 222843904.0, "6710": 223368192.0, "6715": 222843904.0, "6720": 222843904.0, "6725": 222843904.0, "6730": 223368192.0, "6735": 223237120.0, "6740": 222843904.0, "6745": 223237120.0, "6750": 222843904.0, "6755": 222843904.0, "6760": 222843904.0, "6765": 223368192.0, "6770": 223237120.0, "6775": 223237120.0, "6780": 223237120.0, "6785": 223237120.0, "6790": 223237120.0, "6795": 223237120.0, "6800": 223237120.0, "6805": 222843904.0, "6810": 222843904.0, "6815": 223237120.0, "6820": 223237120.0, "6825": 223368192.0, "6830": 223237120.0, "6835": 223237120.0, "6840": 223237120.0, "6845": 222843904.0, "6850": 222843904.0, "6855": 223237120.0, "6860": 222843904.0, "6865": 223237120.0, "6870": 223237120.0, "6875": 223237120.0, "6880": 223368192.0, "6885": 223237120.0, "6890": 223237120.0, "6895": 222843904.0, "6900": 222843904.0, "6905": 223368192.0, "6910": 223237120.0, "6915": 223237120.0, "6920": 223237120.0, "6925": 223237120.0, "6930": 223237120.0, "6935": 223368192.0, "6940": 223237120.0, "6945": 223237120.0, "6950": 223368192.0, "6955": 223237120.0, "6960": 223368192.0, "6965": 222843904.0, "6970": 223237120.0, "6975": 223237120.0, "6980": 222843904.0, "6985": 222843904.0, "6990": 223368192.0, "6995": 223237120.0, "7000": 223237120.0, "7005": 223368192.0, "7010": 223368192.0, "7015": 222843904.0, "7020": 223237120.0, "7025": 223368192.0, "7030": 222843904.0, "7035": 223237120.0, "7040": 223368192.0, "7045": 223237120.0, "7050": 223237120.0, "7055": 223237120.0, "7060": 223368192.0, "7065": 223368192.0, "7070": 222843904.0, "7075": 223237120.0, "7080": 222843904.0, "7085": 223237120.0, "7090": 223237120.0, "7095": 223368192.0, "7100": 223237120.0, "7105": 223237120.0, "7110": 222843904.0, "7115": 222843904.0, "7120": 223368192.0, "7125": 223368192.0, "7130": 223237120.0, "7135": 222843904.0, "7140": 223237120.0, "7145": 223368192.0, "7150": 223237120.0, "7155": 223237120.0, "7160": 223237120.0, "7165": 223368192.0, "7170": 223368192.0, "7175": 223368192.0, "7180": 223237120.0, "7185": 223237120.0, "7190": 222843904.0, "7195": 222843904.0, "7200": 223368192.0, "7205": 223237120.0, "7210": 223237120.0, "7215": 223237120.0, "7220": 223237120.0, "7225": 222843904.0, "7230": 223368192.0, "7235": 223237120.0, "7240": 222843904.0, "7245": 222843904.0, "7250": 222843904.0, "7255": 222843904.0, "7260": 223368192.0, "7265": 223237120.0, "7270": 223237120.0, "7275": 223368192.0, "7280": 223237120.0, "7285": 223368192.0, "7290": 223237120.0, "7295": 223237120.0, "7300": 223237120.0, "7305": 222843904.0, "7310": 223237120.0, "7315": 223237120.0, "7320": 223237120.0, "7325": 223237120.0, "7330": 222843904.0, "7335": 223237120.0, "7340": 223237120.0, "7345": 222843904.0, "7350": 223237120.0, "7355": 222843904.0, "7360": 223237120.0, "7365": 223237120.0, "7370": 222843904.0, "7375": 223368192.0, "7380": 222843904.0, "7385": 223368192.0, "7390": 223368192.0, "7395": 223237120.0, "7400": 223237120.0, "7405": 223368192.0, "7410": 223368192.0, "7415": 222843904.0, "7420": 223237120.0, "7425": 223237120.0, "7430": 223368192.0, "7435": 222843904.0, "7440": 223237120.0, "7445": 222843904.0, "7450": 223237120.0, "7455": 223368192.0, "7460": 223368192.0, "7465": 223237120.0, "7470": 223368192.0, "7475": 223237120.0, "7480": 223237120.0, "7485": 223237120.0, "7490": 223368192.0, "7495": 223237120.0, "7500": 222843904.0, "7505": 223237120.0, "7510": 223368192.0, "7515": 222843904.0, "7520": 222843904.0, "7525": 223237120.0, "7530": 223368192.0, "7535": 222843904.0, "7540": 223237120.0, "7545": 223368192.0, "7550": 222843904.0, "7555": 223237120.0, "7560": 223237120.0, "7565": 222843904.0, "7570": 223237120.0, "7575": 223237120.0, "7580": 222843904.0, "7585": 222843904.0, "7590": 222843904.0, "7595": 223368192.0, "7600": 222843904.0, "7605": 222843904.0, "7610": 223237120.0, "7615": 222843904.0, "7620": 222843904.0, "7625": 222843904.0, "7630": 222843904.0, "7635": 222843904.0, "7640": 222843904.0, "7645": 223237120.0, "7650": 223237120.0, "7655": 223237120.0, "7660": 222843904.0, "7665": 223368192.0, "7670": 223368192.0, "7675": 222843904.0, "7680": 223237120.0, "7685": 223368192.0, "7690": 223237120.0, "7695": 222843904.0, "7700": 223368192.0, "7705": 222843904.0, "7710": 223368192.0, "7715": 222843904.0, "7720": 223237120.0, "7725": 222843904.0, "7730": 222843904.0, "7735": 222843904.0, "7740": 223237120.0, "7745": 222843904.0, "7750": 222843904.0, "7755": 222843904.0, "7760": 223237120.0, "7765": 223237120.0, "7770": 223237120.0, "7775": 222843904.0, "7780": 223237120.0, "7785": 222843904.0, "7790": 223237120.0, "7795": 223237120.0, "7800": 223237120.0, "7805": 223237120.0, "7810": 223368192.0, "7815": 223237120.0, "7820": 223237120.0, "7825": 222843904.0, "7830": 222843904.0, "7835": 223368192.0, "7840": 222843904.0, "7845": 222843904.0, "7850": 223237120.0, "7855": 222843904.0, "7860": 222843904.0, "7865": 223237120.0, "7870": 223368192.0, "7875": 223237120.0, "7880": 222843904.0, "7885": 223237120.0, "7890": 223237120.0, "7895": 223237120.0, "7900": 223237120.0, "7905": 223237120.0, "7910": 223237120.0, "7915": 223368192.0, "7920": 223237120.0, "7925": 223368192.0, "7930": 223368192.0, "7935": 223368192.0, "7940": 223237120.0, "7945": 223237120.0, "7950": 223237120.0, "7955": 223368192.0, "7960": 223368192.0, "7965": 223368192.0, "7970": 223237120.0, "7975": 223237120.0, "7980": 223368192.0, "7985": 222843904.0, "7990": 223237120.0, "7995": 223237120.0, "8000": 223237120.0, "8005": 223237120.0, "8010": 223368192.0, "8015": 223368192.0, "8020": 222843904.0, "8025": 222843904.0, "8030": 223237120.0, "8035": 223237120.0, "8040": 223368192.0, "8045": 223237120.0, "8050": 223368192.0, "8055": 222843904.0, "8060": 222843904.0, "8065": 222843904.0, "8070": 223368192.0, "8075": 223368192.0, "8080": 223237120.0, "8085": 222843904.0, "8090": 223368192.0, "8095": 222843904.0, "8100": 223237120.0, "8105": 223237120.0, "8110": 222843904.0, "8115": 223368192.0, "8120": 222843904.0, "8125": 222843904.0, "8130": 222843904.0, "8135": 223237120.0, "8140": 222843904.0, "8145": 222843904.0, "8150": 223237120.0, "8155": 222843904.0, "8160": 223237120.0, "8165": 223368192.0, "8170": 222843904.0, "8175": 222843904.0, "8180": 223237120.0, "8185": 223368192.0, "8190": 223237120.0, "8195": 222843904.0, "8200": 223237120.0, "8205": 223237120.0, "8210": 223237120.0, "8215": 223237120.0, "8220": 223237120.0, "8225": 223237120.0, "8230": 222843904.0, "8235": 223237120.0, "8240": 223368192.0, "8245": 222843904.0, "8250": 222843904.0, "8255": 222843904.0, "8260": 223368192.0, "8265": 222843904.0, "8270": 223237120.0, "8275": 222843904.0, "8280": 223237120.0, "8285": 223237120.0, "8290": 222843904.0, "8295": 223237120.0, "8300": 222843904.0, "8305": 223368192.0, "8310": 223237120.0, "8315": 223237120.0, "8320": 223237120.0, "8325": 223237120.0, "8330": 223368192.0, "8335": 222843904.0, "8340": 223237120.0, "8345": 223237120.0, "8350": 222843904.0, "8355": 223237120.0, "8360": 223237120.0, "8365": 222843904.0, "8370": 223237120.0, "8375": 223237120.0, "8380": 223368192.0, "8385": 223237120.0, "8390": 223237120.0, "8395": 222843904.0, "8400": 223237120.0, "8405": 223237120.0, "8410": 223237120.0, "8415": 223237120.0, "8420": 222843904.0, "8425": 223237120.0, "8430": 223237120.0, "8435": 223237120.0, "8440": 223368192.0, "8445": 223237120.0, "8450": 223237120.0, "8455": 222843904.0, "8460": 222843904.0, "8465": 223237120.0, "8470": 223368192.0, "8475": 223368192.0, "8480": 222843904.0, "8485": 222843904.0, "8490": 223237120.0, "8495": 222843904.0, "8500": 223237120.0, "8505": 223237120.0, "8510": 222843904.0, "8515": 223237120.0, "8520": 222843904.0, "8525": 223368192.0, "8530": 222843904.0, "8535": 223237120.0, "8540": 223237120.0, "8545": 223237120.0, "8550": 222843904.0, "8555": 223237120.0, "8560": 223368192.0, "8565": 223368192.0, "8570": 222843904.0, "8575": 223237120.0, "8580": 223237120.0, "8585": 223237120.0, "8590": 222843904.0, "8595": 223368192.0, "8600": 223368192.0, "8605": 223237120.0, "8610": 222843904.0, "8615": 223237120.0, "8620": 223368192.0, "8625": 223368192.0, "8630": 222843904.0, "8635": 222843904.0, "8640": 222843904.0, "8645": 222843904.0, "8650": 222843904.0, "8655": 223237120.0, "8660": 222843904.0, "8665": 223237120.0, "8670": 222843904.0, "8675": 222843904.0, "8680": 222843904.0, "8685": 222843904.0, "8690": 223237120.0, "8695": 222843904.0, "8700": 223237120.0, "8705": 223237120.0, "8710": 222843904.0, "8715": 223368192.0, "8720": 223368192.0, "8725": 223368192.0, "8730": 222843904.0, "8735": 222843904.0, "8740": 223368192.0, "8745": 223237120.0, "8750": 222843904.0, "8755": 223237120.0, "8760": 223237120.0, "8765": 223237120.0, "8770": 223237120.0, "8775": 222843904.0, "8780": 222843904.0, "8785": 222843904.0, "8790": 223237120.0, "8795": 223237120.0, "8800": 223237120.0, "8805": 223237120.0, "8810": 223237120.0, "8815": 223237120.0, "8820": 223237120.0, "8825": 222843904.0, "8830": 223237120.0, "8835": 223368192.0, "8840": 223237120.0, "8845": 223237120.0, "8850": 223368192.0, "8855": 223237120.0, "8860": 223368192.0, "8865": 223368192.0, "8870": 223237120.0, "8875": 223237120.0, "8880": 223368192.0, "8885": 222843904.0, "8890": 222843904.0, "8895": 223237120.0, "8900": 223237120.0, "8905": 222843904.0, "8910": 223237120.0, "8915": 223237120.0, "8920": 223237120.0, "8925": 222843904.0, "8930": 222843904.0, "8935": 223237120.0, "8940": 222843904.0, "8945": 223237120.0, "8950": 222843904.0, "8955": 222843904.0, "8960": 222843904.0, "8965": 223237120.0, "8970": 222843904.0, "8975": 223237120.0, "8980": 223368192.0, "8985": 223237120.0, "8990": 222843904.0, "8995": 222843904.0, "9000": 223237120.0, "9005": 223237120.0, "9010": 222843904.0, "9015": 223368192.0, "9020": 223237120.0, "9025": 222843904.0, "9030": 223237120.0, "9035": 223368192.0, "9040": 223237120.0, "9045": 222843904.0, "9050": 222843904.0, "9055": 223237120.0, "9060": 222843904.0, "9065": 223237120.0, "9070": 222843904.0, "9075": 223237120.0, "9080": 223237120.0, "9085": 222843904.0, "9090": 223237120.0, "9095": 222843904.0, "9100": 223368192.0, "9105": 223237120.0, "9110": 223237120.0, "9115": 223237120.0, "9120": 223237120.0, "9125": 223368192.0, "9130": 223237120.0, "9135": 223237120.0, "9140": 223237120.0, "9145": 223237120.0, "9150": 222843904.0, "9155": 222843904.0, "9160": 223237120.0, "9165": 223237120.0, "9170": 222843904.0, "9175": 222843904.0, "9180": 223368192.0, "9185": 223368192.0, "9190": 223237120.0, "9195": 223237120.0, "9200": 223237120.0, "9205": 223237120.0, "9210": 223237120.0, "9215": 223368192.0, "9220": 223237120.0, "9225": 222843904.0, "9230": 222843904.0, "9235": 222843904.0, "9240": 223237120.0, "9245": 223368192.0, "9250": 223237120.0, "9255": 223237120.0, "9260": 223237120.0, "9265": 223237120.0, "9270": 222843904.0, "9275": 223237120.0, "9280": 223237120.0, "9285": 223237120.0, "9290": 223237120.0, "9295": 222843904.0, "9300": 223368192.0, "9305": 223237120.0, "9310": 222843904.0, "9315": 223237120.0, "9320": 223237120.0, "9325": 222843904.0, "9330": 223368192.0, "9335": 223368192.0, "9340": 223237120.0, "9345": 222843904.0, "9350": 222843904.0, "9355": 223237120.0, "9360": 223237120.0, "9365": 222843904.0, "9370": 222843904.0, "9375": 222843904.0, "9380": 222843904.0, "9385": 222843904.0, "9390": 223237120.0, "9395": 223237120.0, "9400": 222843904.0, "9405": 222843904.0, "9410": 222843904.0, "9415": 222843904.0, "9420": 222843904.0, "9425": 223237120.0, "9430": 223368192.0, "9435": 223237120.0, "9440": 222843904.0, "9445": 223237120.0, "9450": 223237120.0, "9455": 222843904.0, "9460": 222843904.0, "9465": 222843904.0, "9470": 222843904.0, "9475": 222843904.0, "9480": 222843904.0, "9485": 222843904.0, "9490": 223237120.0, "9495": 223237120.0, "9500": 222843904.0, "9505": 223237120.0, "9510": 222843904.0, "9515": 222843904.0, "9520": 222843904.0, "9525": 222843904.0, "9530": 223237120.0, "9535": 222843904.0, "9540": 223237120.0, "9545": 223237120.0, "9550": 223237120.0, "9555": 222843904.0, "9560": 223237120.0, "9565": 222843904.0, "9570": 222843904.0, "9575": 223237120.0, "9580": 222843904.0, "9585": 222843904.0, "9590": 223237120.0, "9595": 222843904.0, "9600": 223237120.0, "9605": 222843904.0, "9610": 223237120.0, "9615": 223237120.0, "9620": 222843904.0, "9625": 222843904.0, "9630": 223237120.0, "9635": 223237120.0, "9640": 222843904.0, "9645": 223237120.0, "9650": 223237120.0, "9655": 223237120.0, "9660": 222843904.0, "9665": 222843904.0, "9670": 222843904.0, "9675": 223237120.0, "9680": 222843904.0, "9685": 222843904.0, "9690": 222843904.0, "9695": 222843904.0, "9700": 223237120.0, "9705": 222843904.0, "9710": 223237120.0, "9715": 222843904.0, "9720": 223237120.0, "9725": 222843904.0, "9730": 223237120.0, "9735": 223368192.0, "9740": 222843904.0, "9745": 223237120.0, "9750": 223368192.0, "9755": 223237120.0, "9760": 223237120.0, "9765": 223237120.0, "9770": 223237120.0, "9775": 223237120.0, "9780": 222843904.0, "9785": 223237120.0, "9790": 223237120.0, "9795": 223237120.0, "9800": 223237120.0, "9805": 222843904.0, "9810": 223237120.0, "9815": 222843904.0, "9820": 223237120.0, "9825": 222843904.0, "9830": 223237120.0, "9835": 223237120.0, "9840": 222843904.0, "9845": 223237120.0, "9850": 223237120.0, "9855": 223237120.0, "9860": 223237120.0, "9865": 223237120.0, "9870": 222843904.0, "9875": 223237120.0, "9880": 222843904.0, "9885": 222843904.0, "9890": 223237120.0, "9895": 223237120.0, "9900": 223368192.0, "9905": 223237120.0, "9910": 223237120.0, "9915": 223237120.0, "9920": 222843904.0, "9925": 223237120.0, "9930": 222843904.0, "9935": 223237120.0, "9940": 222843904.0, "9945": 222843904.0, "9950": 223237120.0, "9955": 223237120.0, "9960": 223237120.0, "9965": 223368192.0, "9970": 223237120.0, "9975": 223237120.0, "9980": 223237120.0, "9985": 223237120.0, "9990": 223237120.0, "9995": 222843904.0, "10000": 223237120.0, "10005": 223237120.0, "10010": 223237120.0, "10015": 223237120.0, "10020": 223237120.0, "10025": 222843904.0, "10030": 223237120.0, "10035": 223237120.0, "10040": 223237120.0, "10045": 223237120.0, "10050": 223237120.0, "10055": 223237120.0, "10060": 223237120.0, "10065": 223237120.0, "10070": 222843904.0, "10075": 223237120.0, "10080": 222843904.0, "10085": 223237120.0, "10090": 223237120.0, "10095": 223237120.0, "10100": 223237120.0, "10105": 223237120.0, "10110": 222843904.0, "10115": 223237120.0, "10120": 223237120.0, "10125": 223237120.0, "10130": 223237120.0, "10135": 223237120.0, "10140": 222843904.0, "10145": 223237120.0, "10150": 223237120.0, "10155": 222843904.0, "10160": 222843904.0, "10165": 223237120.0, "10170": 223237120.0, "10175": 223237120.0, "10180": 223237120.0, "10185": 223237120.0, "10190": 222843904.0, "10195": 223237120.0, "10200": 223237120.0, "10205": 223237120.0, "10210": 223368192.0, "10215": 223237120.0, "10220": 222843904.0, "10225": 222843904.0, "10230": 223237120.0, "10235": 223237120.0, "10240": 222843904.0, "10245": 223237120.0, "10250": 222843904.0, "10255": 223237120.0, "10260": 223237120.0, "10265": 223237120.0, "10270": 223368192.0, "10275": 223237120.0, "10280": 223237120.0, "10285": 222843904.0, "10290": 223237120.0, "10295": 223237120.0, "10300": 222843904.0, "10305": 222843904.0, "10310": 223237120.0, "10315": 223237120.0, "10320": 222843904.0, "10325": 223237120.0, "10330": 223237120.0, "10335": 223237120.0, "10340": 223237120.0, "10345": 223237120.0, "10350": 222843904.0, "10355": 222843904.0, "10360": 223237120.0, "10365": 223237120.0, "10370": 223237120.0, "10375": 222843904.0, "10380": 223237120.0, "10385": 222843904.0, "10390": 223237120.0, "10395": 222843904.0, "10400": 223237120.0, "10405": 223237120.0, "10410": 223368192.0, "10415": 223237120.0, "10420": 222843904.0, "10425": 222843904.0, "10430": 223237120.0, "10435": 223237120.0, "10440": 223237120.0, "10445": 223237120.0, "10450": 223237120.0, "10455": 223237120.0, "10460": 223237120.0, "10465": 223237120.0, "10470": 222843904.0, "10475": 223237120.0, "10480": 223237120.0, "10485": 223237120.0, "10490": 223237120.0, "10495": 223237120.0, "10500": 223237120.0, "10505": 223237120.0, "10510": 223237120.0, "10515": 223237120.0, "10520": 223237120.0, "10525": 223237120.0, "10530": 223237120.0, "10535": 222843904.0, "10540": 223237120.0, "10545": 223237120.0, "10550": 222843904.0, "10555": 223237120.0, "10560": 222843904.0, "10565": 222843904.0, "10570": 223237120.0, "10575": 223237120.0, "10580": 223237120.0, "10585": 223237120.0, "10590": 223237120.0, "10595": 222843904.0, "10600": 222843904.0, "10605": 223237120.0, "10610": 222843904.0, "10615": 223237120.0, "10620": 223237120.0, "10625": 222843904.0, "10630": 223237120.0, "10635": 222843904.0, "10640": 223237120.0, "10645": 223237120.0, "10650": 222843904.0, "10655": 223237120.0, "10660": 223237120.0, "10665": 222843904.0, "10670": 223237120.0, "10675": 223237120.0, "10680": 223237120.0, "10685": 222843904.0, "10690": 223237120.0, "10695": 223237120.0, "10700": 223237120.0, "10705": 223237120.0, "10710": 223237120.0, "10715": 223237120.0, "10720": 222843904.0, "10725": 223237120.0, "10730": 222843904.0, "10735": 223237120.0, "10740": 222843904.0, "10745": 222843904.0, "10750": 222843904.0, "10755": 223237120.0, "10760": 222843904.0, "10765": 223237120.0, "10770": 223237120.0, "10775": 223237120.0, "10780": 223237120.0, "10785": 223237120.0, "10790": 222843904.0, "10795": 222843904.0, "10800": 223237120.0, "10805": 222843904.0, "10810": 223237120.0, "10815": 222843904.0, "10820": 223237120.0, "10825": 223237120.0, "10830": 223237120.0, "10835": 222843904.0, "10840": 223237120.0, "10845": 222843904.0, "10850": 223237120.0, "10855": 222843904.0, "10860": 222843904.0, "10865": 223237120.0, "10870": 223237120.0, "10875": 223237120.0, "10880": 223237120.0, "10885": 223237120.0, "10890": 223237120.0, "10895": 223237120.0, "10900": 223237120.0, "10905": 222843904.0, "10910": 222843904.0, "10915": 223237120.0, "10920": 223237120.0, "10925": 222843904.0, "10930": 223237120.0, "10935": 223237120.0, "10940": 222843904.0, "10945": 222843904.0, "10950": 223237120.0, "10955": 223368192.0, "10960": 223368192.0, "10965": 223237120.0, "10970": 223237120.0, "10975": 223237120.0, "10980": 223237120.0, "10985": 223237120.0, "10990": 222843904.0, "10995": 223237120.0, "11000": 222843904.0, "11005": 222843904.0, "11010": 223237120.0, "11015": 223237120.0, "11020": 223237120.0, "11025": 223368192.0, "11030": 223237120.0, "11035": 223237120.0, "11040": 222843904.0, "11045": 222843904.0, "11050": 223237120.0, "11055": 222843904.0, "11060": 223368192.0, "11065": 223237120.0, "11070": 223237120.0, "11075": 223237120.0, "11080": 223368192.0, "11085": 223368192.0, "11090": 223237120.0, "11095": 223237120.0, "11100": 222843904.0, "11105": 223237120.0, "11110": 223237120.0, "11115": 222843904.0, "11120": 222843904.0, "11125": 223237120.0, "11130": 222843904.0, "11135": 223237120.0, "11140": 222843904.0, "11145": 223237120.0, "11150": 223237120.0, "11155": 222843904.0, "11160": 223237120.0, "11165": 223237120.0, "11170": 222843904.0, "11175": 222843904.0, "11180": 223237120.0, "11185": 223237120.0, "11190": 223237120.0, "11195": 223237120.0, "11200": 222843904.0, "11205": 222843904.0, "11210": 222843904.0, "11215": 223237120.0, "11220": 223237120.0, "11225": 222843904.0, "11230": 223368192.0, "11235": 223237120.0, "11240": 223237120.0, "11245": 223237120.0, "11250": 223237120.0, "11255": 223237120.0, "11260": 223237120.0, "11265": 223237120.0, "11270": 223237120.0, "11275": 223237120.0, "11280": 223237120.0, "11285": 222843904.0, "11290": 223237120.0, "11295": 223237120.0, "11300": 222843904.0, "11305": 223237120.0, "11310": 223237120.0, "11315": 223237120.0, "11320": 223237120.0, "11325": 223237120.0, "11330": 223368192.0, "11335": 223237120.0, "11340": 223368192.0, "11345": 223237120.0, "11350": 223368192.0, "11355": 222843904.0, "11360": 223368192.0, "11365": 223237120.0, "11370": 223237120.0, "11375": 223237120.0, "11380": 223368192.0, "11385": 223237120.0, "11390": 223237120.0, "11395": 223368192.0, "11400": 223237120.0, "11405": 223368192.0, "11410": 223237120.0, "11415": 223368192.0, "11420": 223237120.0, "11425": 223368192.0, "11430": 223237120.0, "11435": 223237120.0, "11440": 222843904.0, "11445": 223237120.0, "11450": 223368192.0, "11455": 222843904.0, "11460": 223237120.0, "11465": 223237120.0, "11470": 223237120.0, "11475": 223237120.0, "11480": 223237120.0, "11485": 223368192.0, "11490": 222843904.0, "11495": 223368192.0, "11500": 223237120.0, "11505": 223368192.0, "11510": 223237120.0, "11515": 222843904.0, "11520": 223237120.0, "11525": 223368192.0, "11530": 223237120.0, "11535": 222843904.0, "11540": 223237120.0, "11545": 223237120.0, "11550": 223368192.0, "11555": 223237120.0, "11560": 223237120.0, "11565": 223368192.0, "11570": 222843904.0, "11575": 223237120.0, "11580": 223368192.0, "11585": 222843904.0, "11590": 223237120.0, "11595": 222843904.0, "11600": 223368192.0, "11605": 223237120.0, "11610": 223368192.0, "11615": 223237120.0, "11620": 223368192.0, "11625": 223237120.0, "11630": 223368192.0, "11635": 223237120.0, "11640": 223237120.0, "11645": 223237120.0, "11650": 222843904.0, "11655": 223237120.0, "11660": 222843904.0, "11665": 223237120.0, "11670": 223237120.0, "11675": 223368192.0, "11680": 223237120.0, "11685": 223237120.0, "11690": 223368192.0, "11695": 223237120.0, "11700": 222843904.0, "11705": 222843904.0, "11710": 223237120.0, "11715": 223237120.0, "11720": 223237120.0, "11725": 223237120.0, "11730": 223237120.0, "11735": 223237120.0, "11740": 222843904.0, "11745": 222843904.0, "11750": 223237120.0, "11755": 223237120.0, "11760": 223368192.0, "11765": 222843904.0, "11770": 223237120.0, "11775": 222843904.0, "11780": 223237120.0, "11785": 223368192.0, "11790": 222843904.0, "11795": 223237120.0, "11800": 223237120.0, "11805": 222843904.0, "11810": 223237120.0, "11815": 223368192.0, "11820": 223368192.0, "11825": 223237120.0, "11830": 222843904.0, "11835": 223237120.0, "11840": 222843904.0, "11845": 222843904.0, "11850": 223237120.0, "11855": 223237120.0, "11860": 223237120.0, "11865": 223237120.0, "11870": 223237120.0, "11875": 223237120.0, "11880": 223237120.0, "11885": 223237120.0, "11890": 223237120.0, "11895": 223237120.0, "11900": 222843904.0, "11905": 223237120.0, "11910": 223237120.0, "11915": 223237120.0, "11920": 223237120.0, "11925": 223237120.0, "11930": 223237120.0, "11935": 223237120.0, "11940": 223237120.0, "11945": 223237120.0, "11950": 223237120.0, "11955": 223237120.0, "11960": 222843904.0, "11965": 223237120.0, "11970": 223237120.0, "11975": 223237120.0, "11980": 223237120.0, "11985": 223237120.0, "11990": 222843904.0, "11995": 222843904.0, "12000": 223237120.0, "12005": 223237120.0, "12010": 223237120.0, "12015": 223237120.0, "12020": 222843904.0, "12025": 223237120.0, "12030": 222843904.0, "12035": 223368192.0, "12040": 222843904.0, "12045": 223368192.0, "12050": 223237120.0, "12055": 222843904.0, "12060": 223237120.0, "12065": 223237120.0, "12070": 223237120.0, "12075": 222843904.0, "12080": 223237120.0, "12085": 223237120.0, "12090": 223368192.0, "12095": 223237120.0, "12100": 222843904.0, "12105": 223237120.0, "12110": 223237120.0, "12115": 222843904.0, "12120": 223237120.0, "12125": 223237120.0, "12130": 222843904.0, "12135": 222843904.0, "12140": 223237120.0, "12145": 222843904.0, "12150": 223237120.0, "12155": 223237120.0, "12160": 222843904.0, "12165": 223237120.0, "12170": 222843904.0, "12175": 222843904.0, "12180": 222843904.0, "12185": 223368192.0, "12190": 223237120.0, "12195": 223237120.0, "12200": 223237120.0, "12205": 222843904.0, "12210": 223237120.0, "12215": 223237120.0, "12220": 222843904.0, "12225": 222843904.0, "12230": 223237120.0, "12235": 222843904.0, "12240": 222843904.0, "12245": 222843904.0, "12250": 223237120.0, "12255": 222843904.0, "12260": 222843904.0, "12265": 222843904.0, "12270": 223237120.0, "12275": 223237120.0, "12280": 222843904.0, "12285": 223237120.0, "12290": 222843904.0, "12295": 222843904.0, "12300": 223237120.0, "12305": 222843904.0, "12310": 223237120.0, "12315": 222843904.0, "12320": 223368192.0, "12325": 223237120.0, "12330": 223237120.0, "12335": 222843904.0, "12340": 222843904.0, "12345": 223237120.0, "12350": 223237120.0, "12355": 223237120.0, "12360": 223237120.0, "12365": 222843904.0, "12370": 222843904.0, "12375": 222843904.0, "12380": 222843904.0, "12385": 222843904.0, "12390": 223368192.0, "12395": 223237120.0, "12400": 222843904.0, "12405": 222843904.0, "12410": 223237120.0, "12415": 222843904.0, "12420": 222843904.0, "12425": 223237120.0, "12430": 222843904.0, "12435": 222843904.0, "12440": 222843904.0, "12445": 223237120.0, "12450": 222843904.0, "12455": 223237120.0, "12460": 223237120.0, "12465": 222843904.0, "12470": 222843904.0, "12475": 222843904.0, "12480": 223237120.0, "12485": 222843904.0, "12490": 223237120.0, "12495": 223237120.0, "12500": 223237120.0, "12505": 222843904.0, "12510": 223237120.0, "12515": 223237120.0, "12520": 223237120.0, "12525": 223237120.0, "12530": 222843904.0, "12535": 222843904.0, "12540": 223368192.0, "12545": 222843904.0, "12550": 223237120.0, "12555": 222843904.0, "12560": 222843904.0, "12565": 223237120.0, "12570": 222843904.0, "12575": 223237120.0, "12580": 223368192.0, "12585": 222843904.0, "12590": 223368192.0, "12595": 223237120.0, "12600": 223237120.0, "12605": 223237120.0, "12610": 223237120.0, "12615": 223237120.0, "12620": 223237120.0, "12625": 222843904.0, "12630": 222843904.0, "12635": 223237120.0, "12640": 223237120.0, "12645": 223237120.0, "12650": 223237120.0, "12655": 222843904.0, "12660": 222843904.0, "12665": 222843904.0, "12670": 222843904.0, "12675": 223237120.0, "12680": 223237120.0, "12685": 223237120.0, "12690": 222843904.0, "12695": 222843904.0, "12700": 222843904.0, "12705": 222843904.0, "12710": 223237120.0, "12715": 223237120.0, "12720": 223237120.0, "12725": 223237120.0, "12730": 222843904.0, "12735": 223237120.0, "12740": 223237120.0, "12745": 223237120.0, "12750": 223237120.0, "12755": 222843904.0, "12760": 223237120.0, "12765": 223237120.0, "12770": 222843904.0, "12775": 223368192.0, "12780": 223237120.0, "12785": 222843904.0, "12790": 222843904.0, "12795": 223368192.0, "12800": 223237120.0, "12805": 223237120.0, "12810": 222843904.0, "12815": 222843904.0, "12820": 223237120.0, "12825": 222843904.0, "12830": 223368192.0, "12835": 222843904.0, "12840": 223237120.0, "12845": 223237120.0, "12850": 223237120.0, "12855": 223237120.0, "12860": 223237120.0, "12865": 222843904.0, "12870": 222843904.0, "12875": 223237120.0, "12880": 223237120.0, "12885": 222843904.0, "12890": 222843904.0, "12895": 222843904.0, "12900": 223368192.0, "12905": 222843904.0, "12910": 222843904.0, "12915": 223237120.0, "12920": 222843904.0, "12925": 223237120.0, "12930": 223368192.0, "12935": 223237120.0, "12940": 223237120.0, "12945": 222843904.0, "12950": 223237120.0, "12955": 222843904.0, "12960": 223237120.0, "12965": 223237120.0, "12970": 223368192.0, "12975": 223368192.0, "12980": 222843904.0, "12985": 222843904.0, "12990": 222843904.0, "12995": 223237120.0, "13000": 222843904.0, "13005": 222843904.0, "13010": 222843904.0, "13015": 223237120.0, "13020": 223237120.0, "13025": 223237120.0, "13030": 223237120.0, "13035": 223368192.0, "13040": 222843904.0, "13045": 223237120.0, "13050": 222843904.0, "13055": 223237120.0, "13060": 222843904.0, "13065": 223237120.0, "13070": 223368192.0, "13075": 222843904.0, "13080": 222843904.0, "13085": 223237120.0, "13090": 223368192.0, "13095": 223237120.0, "13100": 223237120.0, "13105": 223237120.0, "13110": 223237120.0, "13115": 223237120.0, "13120": 223237120.0, "13125": 222843904.0, "13130": 223237120.0, "13135": 222843904.0, "13140": 222843904.0, "13145": 222843904.0, "13150": 223368192.0, "13155": 223237120.0, "13160": 223237120.0, "13165": 223237120.0, "13170": 222843904.0, "13175": 223237120.0, "13180": 223237120.0, "13185": 223237120.0, "13190": 223237120.0, "13195": 223237120.0, "13200": 223237120.0, "13205": 223237120.0, "13210": 223368192.0, "13215": 222843904.0, "13220": 223237120.0, "13225": 222843904.0, "13230": 223237120.0, "13235": 222843904.0, "13240": 222843904.0, "13245": 223237120.0, "13250": 222843904.0, "13255": 222843904.0, "13260": 223237120.0, "13265": 222843904.0, "13270": 223237120.0, "13275": 223237120.0, "13280": 223368192.0, "13285": 223237120.0, "13290": 223237120.0, "13295": 223237120.0, "13300": 223237120.0, "13305": 223237120.0, "13310": 222843904.0, "13315": 222843904.0, "13320": 222843904.0, "13325": 223237120.0, "13330": 223237120.0, "13335": 222843904.0, "13340": 222843904.0, "13345": 222843904.0, "13350": 222843904.0, "13355": 223237120.0, "13360": 223237120.0, "13365": 223237120.0, "13370": 223237120.0, "13375": 223237120.0, "13380": 222843904.0, "13385": 222843904.0, "13390": 222843904.0, "13395": 223237120.0, "13400": 223237120.0, "13405": 222843904.0, "13410": 223237120.0, "13415": 222843904.0, "13420": 223237120.0, "13425": 223237120.0, "13430": 222843904.0, "13435": 222843904.0, "13440": 222843904.0, "13445": 223237120.0, "13450": 223237120.0, "13455": 223237120.0, "13460": 222843904.0, "13465": 223237120.0, "13470": 223368192.0, "13475": 223237120.0, "13480": 222843904.0, "13485": 223237120.0, "13490": 222843904.0, "13495": 222843904.0, "13500": 223237120.0, "13505": 222843904.0, "13510": 223237120.0, "13515": 223237120.0, "13520": 222843904.0, "13525": 223237120.0, "13530": 222843904.0, "13535": 223237120.0, "13540": 223237120.0, "13545": 223237120.0, "13550": 222843904.0, "13555": 222843904.0, "13560": 223237120.0, "13565": 223368192.0, "13570": 222843904.0, "13575": 223368192.0, "13580": 223237120.0, "13585": 222843904.0, "13590": 223368192.0, "13595": 223368192.0, "13600": 223368192.0, "13605": 223237120.0, "13610": 223368192.0, "13615": 223237120.0, "13620": 223237120.0, "13625": 223368192.0, "13630": 223237120.0, "13635": 223237120.0, "13640": 223237120.0, "13645": 223237120.0, "13650": 223237120.0, "13655": 223237120.0, "13660": 223237120.0, "13665": 222843904.0, "13670": 223237120.0, "13675": 223368192.0, "13680": 223237120.0, "13685": 223237120.0, "13690": 223237120.0, "13695": 222843904.0, "13700": 223237120.0, "13705": 222843904.0, "13710": 222843904.0, "13715": 222843904.0, "13720": 222843904.0, "13725": 222843904.0, "13730": 222843904.0, "13735": 223237120.0, "13740": 223237120.0, "13745": 223237120.0, "13750": 223368192.0, "13755": 223368192.0, "13760": 223237120.0, "13765": 222843904.0, "13770": 223368192.0, "13775": 223237120.0, "13780": 223368192.0, "13785": 223237120.0, "13790": 223237120.0, "13795": 223237120.0, "13800": 223237120.0, "13805": 222843904.0, "13810": 223237120.0, "13815": 222843904.0, "13820": 223237120.0, "13825": 222843904.0, "13830": 222843904.0, "13835": 223237120.0, "13840": 223237120.0, "13845": 222843904.0, "13850": 222843904.0, "13855": 223237120.0, "13860": 223368192.0, "13865": 223237120.0, "13870": 223237120.0, "13875": 223237120.0, "13880": 222843904.0, "13885": 223237120.0, "13890": 222843904.0, "13895": 223237120.0, "13900": 223237120.0, "13905": 223368192.0, "13910": 223237120.0, "13915": 222843904.0, "13920": 223237120.0, "13925": 223237120.0, "13930": 223237120.0, "13935": 222843904.0, "13940": 222843904.0, "13945": 223237120.0, "13950": 222843904.0, "13955": 223237120.0, "13960": 222843904.0, "13965": 223237120.0, "13970": 223237120.0, "13975": 222843904.0, "13980": 223237120.0, "13985": 223237120.0, "13990": 223237120.0, "13995": 223237120.0, "14000": 222843904.0, "14005": 222843904.0, "14010": 222843904.0, "14015": 222843904.0, "14020": 222843904.0, "14025": 222843904.0, "14030": 223237120.0, "14035": 222843904.0, "14040": 223368192.0, "14045": 222843904.0, "14050": 223237120.0, "14055": 222843904.0, "14060": 223237120.0, "14065": 223237120.0, "14070": 223237120.0, "14075": 222843904.0, "14080": 222843904.0, "14085": 222843904.0, "14090": 222843904.0, "14095": 223237120.0, "14100": 223237120.0, "14105": 223237120.0, "14110": 222843904.0, "14115": 222843904.0, "14120": 222843904.0, "14125": 222843904.0, "14130": 222843904.0, "14135": 222843904.0, "14140": 223237120.0, "14145": 223237120.0, "14150": 222843904.0, "14155": 223237120.0, "14160": 223237120.0, "14165": 223237120.0, "14170": 222843904.0, "14175": 222843904.0, "14180": 223237120.0, "14185": 222843904.0, "14190": 222843904.0, "14195": 223237120.0, "14200": 223237120.0, "14205": 223237120.0, "14210": 223237120.0, "14215": 223368192.0, "14220": 223368192.0, "14225": 223237120.0, "14230": 222843904.0, "14235": 223237120.0, "14240": 223368192.0, "14245": 223237120.0, "14250": 223237120.0, "14255": 223237120.0, "14260": 223237120.0, "14265": 223237120.0, "14270": 223237120.0, "14275": 222843904.0, "14280": 223237120.0, "14285": 223237120.0, "14290": 223368192.0, "14295": 223237120.0, "14300": 222843904.0, "14305": 222843904.0, "14310": 223368192.0, "14315": 223237120.0, "14320": 223368192.0, "14325": 223237120.0, "14330": 223368192.0, "14335": 223237120.0, "14340": 222843904.0, "14345": 222843904.0, "14350": 223237120.0, "14355": 222843904.0, "14360": 222843904.0, "14365": 222843904.0, "14370": 223368192.0, "14375": 222843904.0, "14380": 222843904.0, "14385": 223237120.0, "14390": 223368192.0, "14395": 222843904.0, "14400": 223237120.0, "14405": 223368192.0, "14410": 222843904.0, "14415": 223237120.0, "14420": 223237120.0, "14425": 222843904.0, "14430": 222843904.0, "14435": 223368192.0, "14440": 222843904.0, "14445": 222843904.0, "14450": 223368192.0, "14455": 223237120.0, "14460": 223237120.0, "14465": 222843904.0, "14470": 222843904.0, "14475": 222843904.0, "14480": 222843904.0, "14485": 222843904.0, "14490": 223237120.0, "14495": 223237120.0, "14500": 223237120.0, "14505": 223237120.0, "14510": 223237120.0, "14515": 223237120.0, "14520": 222843904.0, "14525": 223237120.0, "14530": 223237120.0, "14535": 223368192.0, "14540": 222843904.0, "14545": 222843904.0, "14550": 223237120.0, "14555": 223237120.0, "14560": 223237120.0, "14565": 222843904.0, "14570": 222843904.0, "14575": 222843904.0, "14580": 222843904.0, "14585": 223237120.0, "14590": 222843904.0, "14595": 223368192.0, "14600": 223237120.0, "14605": 222843904.0, "14610": 223237120.0, "14615": 223368192.0, "14620": 223237120.0, "14625": 222843904.0, "14630": 222843904.0, "14635": 223237120.0, "14640": 222843904.0, "14645": 222843904.0, "14650": 222843904.0, "14655": 223237120.0, "14660": 223237120.0, "14665": 222843904.0, "14670": 223368192.0, "14675": 222843904.0, "14680": 222843904.0, "14685": 222843904.0, "14690": 223368192.0, "14695": 223237120.0, "14700": 223237120.0, "14705": 222843904.0, "14710": 222843904.0, "14715": 222843904.0, "14720": 223237120.0, "14725": 223368192.0, "14730": 223237120.0, "14735": 222843904.0, "14740": 223237120.0, "14745": 222843904.0, "14750": 223368192.0, "14755": 223237120.0, "14760": 223237120.0, "14765": 223237120.0, "14770": 223237120.0, "14775": 222843904.0, "14780": 223237120.0, "14785": 222843904.0, "14790": 223237120.0, "14795": 222843904.0, "14800": 222843904.0, "14805": 223237120.0, "14810": 223237120.0, "14815": 223237120.0, "14820": 223368192.0, "14825": 223237120.0, "14830": 222843904.0, "14835": 222843904.0, "14840": 223237120.0, "14845": 223237120.0, "14850": 222843904.0, "14855": 223237120.0, "14860": 223237120.0, "14865": 222843904.0, "14870": 223237120.0, "14875": 223368192.0, "14880": 222843904.0, "14885": 223237120.0, "14890": 223237120.0, "14895": 223368192.0, "14900": 223368192.0, "14905": 223237120.0, "14910": 223368192.0, "14915": 223368192.0, "14920": 223237120.0, "14925": 223368192.0, "14930": 222843904.0, "14935": 223237120.0, "14940": 222843904.0, "14945": 223237120.0, "14950": 223237120.0, "14955": 222843904.0, "14960": 222843904.0, "14965": 222843904.0, "14970": 223237120.0, "14975": 222843904.0, "14980": 222843904.0, "14985": 222843904.0, "14990": 222843904.0, "14995": 222843904.0, "15000": 222843904.0, "15005": 223237120.0, "15010": 222843904.0, "15015": 223237120.0, "15020": 223237120.0, "15025": 223237120.0, "15030": 223368192.0, "15035": 222843904.0, "15040": 223237120.0, "15045": 223237120.0, "15050": 222843904.0, "15055": 222843904.0, "15060": 223237120.0, "15065": 223237120.0, "15070": 223237120.0, "15075": 223368192.0, "15080": 223368192.0, "15085": 222843904.0, "15090": 223368192.0, "15095": 223237120.0, "15100": 222843904.0, "15105": 223237120.0, "15110": 223237120.0, "15115": 223368192.0, "15120": 223368192.0, "15125": 223368192.0, "15130": 222843904.0, "15135": 223237120.0, "15140": 222843904.0, "15145": 222843904.0, "15150": 223237120.0, "15155": 222843904.0, "15160": 222843904.0, "15165": 222843904.0, "15170": 223237120.0, "15175": 222843904.0, "15180": 223237120.0, "15185": 223368192.0, "15190": 223368192.0, "15195": 223237120.0, "15200": 223237120.0, "15205": 223237120.0, "15210": 223237120.0, "15215": 223368192.0, "15220": 223237120.0, "15225": 223237120.0, "15230": 222843904.0, "15235": 223237120.0, "15240": 223237120.0, "15245": 223237120.0, "15250": 223237120.0, "15255": 223237120.0, "15260": 223237120.0, "15265": 223237120.0, "15270": 222843904.0, "15275": 223237120.0, "15280": 223368192.0, "15285": 223237120.0, "15290": 223368192.0, "15295": 223237120.0, "15300": 223237120.0, "15305": 222843904.0, "15310": 222843904.0, "15315": 223237120.0, "15320": 222843904.0, "15325": 223368192.0, "15330": 223368192.0, "15335": 223368192.0, "15340": 222843904.0, "15345": 223237120.0, "15350": 223237120.0, "15355": 223237120.0, "15360": 223368192.0, "15365": 222843904.0, "15370": 223237120.0, "15375": 223237120.0, "15380": 223368192.0, "15385": 223237120.0, "15390": 223237120.0, "15395": 223237120.0, "15400": 223237120.0, "15405": 222843904.0, "15410": 222843904.0, "15415": 223368192.0, "15420": 223237120.0, "15425": 223368192.0, "15430": 223237120.0, "15435": 223237120.0, "15440": 223237120.0, "15445": 223237120.0, "15450": 223237120.0, "15455": 222843904.0, "15460": 222843904.0, "15465": 223237120.0, "15470": 222843904.0, "15475": 223237120.0, "15480": 223368192.0, "15485": 223237120.0, "15490": 222843904.0, "15495": 223368192.0, "15500": 223237120.0, "15505": 223368192.0, "15510": 223237120.0, "15515": 222843904.0, "15520": 223237120.0, "15525": 223368192.0, "15530": 223368192.0, "15535": 223237120.0, "15540": 223237120.0, "15545": 223237120.0, "15550": 223237120.0, "15555": 223237120.0, "15560": 222843904.0, "15565": 222843904.0, "15570": 223237120.0, "15575": 223237120.0, "15580": 223237120.0, "15585": 223237120.0, "15590": 223368192.0, "15595": 223237120.0, "15600": 223237120.0, "15605": 223368192.0, "15610": 223368192.0, "15615": 222843904.0, "15620": 222843904.0, "15625": 223237120.0, "15630": 223368192.0, "15635": 222843904.0, "15640": 223368192.0, "15645": 223368192.0, "15650": 222843904.0, "15655": 223237120.0, "15660": 223237120.0, "15665": 223237120.0, "15670": 223368192.0, "15675": 223237120.0, "15680": 223237120.0, "15685": 222843904.0, "15690": 223237120.0, "15695": 223237120.0, "15700": 223237120.0, "15705": 222843904.0, "15710": 222843904.0, "15715": 223237120.0, "15720": 222843904.0, "15725": 223368192.0, "15730": 223237120.0, "15735": 223237120.0, "15740": 223237120.0, "15745": 222843904.0, "15750": 223237120.0, "15755": 223237120.0, "15760": 222843904.0, "15765": 223237120.0, "15770": 223368192.0, "15775": 222843904.0, "15780": 222843904.0, "15785": 223237120.0, "15790": 222843904.0, "15795": 223237120.0, "15800": 223237120.0, "15805": 222843904.0, "15810": 223237120.0, "15815": 222843904.0, "15820": 223237120.0, "15825": 223237120.0, "15830": 223237120.0, "15835": 223237120.0, "15840": 223237120.0, "15845": 223368192.0, "15850": 223368192.0, "15855": 223237120.0, "15860": 223237120.0, "15865": 223237120.0, "15870": 223237120.0, "15875": 223237120.0, "15880": 223368192.0, "15885": 223237120.0, "15890": 223237120.0, "15895": 222843904.0, "15900": 223237120.0, "15905": 223368192.0, "15910": 223237120.0, "15915": 222843904.0, "15920": 223237120.0, "15925": 223237120.0, "15930": 223368192.0, "15935": 223368192.0, "15940": 223237120.0, "15945": 222843904.0, "15950": 222843904.0, "15955": 223368192.0, "15960": 223368192.0, "15965": 223237120.0, "15970": 222843904.0, "15975": 223237120.0, "15980": 223368192.0, "15985": 222843904.0, "15990": 223237120.0, "15995": 223368192.0, "16000": 222843904.0, "16005": 223237120.0, "16010": 223237120.0, "16015": 222843904.0, "16020": 223237120.0, "16025": 223368192.0, "16030": 223237120.0, "16035": 223368192.0, "16040": 223237120.0, "16045": 223368192.0, "16050": 223237120.0, "16055": 223237120.0, "16060": 223237120.0, "16065": 223237120.0, "16070": 223368192.0, "16075": 223368192.0, "16080": 223368192.0, "16085": 223237120.0, "16090": 223237120.0, "16095": 223237120.0, "16100": 223368192.0, "16105": 223237120.0, "16110": 223237120.0, "16115": 223237120.0, "16120": 223368192.0, "16125": 222843904.0, "16130": 222843904.0, "16135": 223237120.0, "16140": 222843904.0, "16145": 223237120.0, "16150": 222843904.0, "16155": 223368192.0, "16160": 222843904.0, "16165": 222843904.0, "16170": 223368192.0, "16175": 223368192.0, "16180": 223368192.0, "16185": 223237120.0, "16190": 222843904.0, "16195": 223368192.0, "16200": 222843904.0, "16205": 222843904.0, "16210": 222843904.0, "16215": 223237120.0, "16220": 223237120.0, "16225": 223237120.0, "16230": 222843904.0, "16235": 223237120.0, "16240": 223237120.0, "16245": 222843904.0, "16250": 223237120.0, "16255": 222843904.0, "16260": 222843904.0, "16265": 222843904.0, "16270": 222843904.0, "16275": 223237120.0, "16280": 223237120.0, "16285": 222843904.0, "16290": 223237120.0, "16295": 223237120.0, "16300": 223237120.0, "16305": 223237120.0, "16310": 223237120.0, "16315": 223237120.0, "16320": 223368192.0, "16325": 223368192.0, "16330": 222843904.0, "16335": 223368192.0, "16340": 222843904.0, "16345": 223237120.0, "16350": 222843904.0, "16355": 223237120.0, "16360": 223237120.0, "16365": 223237120.0, "16370": 223237120.0, "16375": 223368192.0, "16380": 223237120.0, "16385": 223237120.0, "16390": 223368192.0, "16395": 222843904.0, "16400": 223237120.0, "16405": 223237120.0, "16410": 223368192.0, "16415": 223237120.0, "16420": 223237120.0, "16425": 222843904.0, "16430": 222843904.0, "16435": 223237120.0, "16440": 223368192.0, "16445": 223237120.0, "16450": 223237120.0, "16455": 223237120.0, "16460": 223368192.0, "16465": 223237120.0, "16470": 222843904.0, "16475": 223237120.0, "16480": 223237120.0, "16485": 223237120.0, "16490": 223237120.0, "16495": 222843904.0, "16500": 222843904.0, "16505": 223237120.0, "16510": 223237120.0, "16515": 222843904.0, "16520": 223368192.0, "16525": 222843904.0, "16530": 222843904.0, "16535": 223237120.0, "16540": 222843904.0, "16545": 223237120.0, "16550": 223237120.0, "16555": 222843904.0, "16560": 223237120.0, "16565": 223237120.0, "16570": 222843904.0, "16575": 222843904.0, "16580": 222843904.0, "16585": 223237120.0, "16590": 223237120.0, "16595": 222843904.0, "16600": 223237120.0, "16605": 222843904.0, "16610": 222843904.0, "16615": 222843904.0, "16620": 223237120.0, "16625": 223368192.0, "16630": 223237120.0, "16635": 223368192.0, "16640": 223237120.0, "16645": 222843904.0, "16650": 223237120.0, "16655": 223237120.0, "16660": 223237120.0, "16665": 223237120.0, "16670": 222843904.0, "16675": 223237120.0, "16680": 223237120.0, "16685": 223237120.0, "16690": 223237120.0, "16695": 223237120.0, "16700": 223237120.0, "16705": 222843904.0, "16710": 222843904.0, "16715": 222843904.0, "16720": 222843904.0, "16725": 223237120.0, "16730": 223237120.0, "16735": 222843904.0, "16740": 222843904.0, "16745": 223237120.0, "16750": 223237120.0, "16755": 222843904.0, "16760": 223237120.0, "16765": 223368192.0, "16770": 223237120.0, "16775": 223368192.0, "16780": 222843904.0, "16785": 223237120.0, "16790": 222843904.0, "16795": 223237120.0, "16800": 222843904.0, "16805": 222843904.0, "16810": 223237120.0, "16815": 223368192.0, "16820": 223237120.0, "16825": 223237120.0, "16830": 223237120.0, "16835": 223237120.0, "16840": 222843904.0, "16845": 222843904.0, "16850": 222843904.0, "16855": 223237120.0, "16860": 223368192.0, "16865": 223237120.0, "16870": 223368192.0, "16875": 223368192.0, "16880": 223237120.0, "16885": 222843904.0, "16890": 223237120.0, "16895": 222843904.0, "16900": 222843904.0, "16905": 222843904.0, "16910": 222843904.0, "16915": 223237120.0, "16920": 223237120.0, "16925": 223237120.0, "16930": 223368192.0, "16935": 222843904.0, "16940": 223237120.0, "16945": 222843904.0, "16950": 223368192.0, "16955": 222843904.0, "16960": 222843904.0, "16965": 223237120.0, "16970": 223237120.0, "16975": 223237120.0, "16980": 222843904.0, "16985": 222843904.0, "16990": 222843904.0, "16995": 223237120.0, "17000": 223368192.0, "17005": 222843904.0, "17010": 222843904.0, "17015": 222843904.0, "17020": 223368192.0, "17025": 222843904.0, "17030": 223237120.0, "17035": 223368192.0, "17040": 222843904.0, "17045": 223237120.0, "17050": 223237120.0, "17055": 223237120.0, "17060": 222843904.0, "17065": 223237120.0, "17070": 223237120.0, "17075": 223237120.0, "17080": 223237120.0, "17085": 223368192.0, "17090": 222843904.0, "17095": 223237120.0, "17100": 222843904.0, "17105": 223368192.0, "17110": 222843904.0, "17115": 223237120.0, "17120": 223237120.0, "17125": 222843904.0, "17130": 223237120.0, "17135": 223237120.0, "17140": 223237120.0, "17145": 223237120.0, "17150": 223237120.0, "17155": 223368192.0, "17160": 222843904.0, "17165": 223237120.0, "17170": 223237120.0, "17175": 222843904.0, "17180": 223368192.0, "17185": 222843904.0, "17190": 223237120.0, "17195": 223237120.0, "17200": 223237120.0, "17205": 223237120.0, "17210": 222843904.0, "17215": 223237120.0, "17220": 222843904.0, "17225": 223237120.0, "17230": 223237120.0, "17235": 222843904.0, "17240": 223237120.0, "17245": 223237120.0, "17250": 223237120.0, "17255": 223237120.0, "17260": 223237120.0, "17265": 223237120.0, "17270": 223237120.0, "17275": 222843904.0, "17280": 223237120.0, "17285": 223237120.0, "17290": 222843904.0, "17295": 223368192.0, "17300": 223237120.0, "17305": 222843904.0, "17310": 223237120.0, "17315": 223237120.0, "17320": 223237120.0, "17325": 223237120.0, "17330": 222843904.0, "17335": 222843904.0, "17340": 222843904.0, "17345": 222843904.0, "17350": 223237120.0, "17355": 223237120.0, "17360": 223368192.0, "17365": 222843904.0, "17370": 223368192.0, "17375": 223237120.0, "17380": 223237120.0, "17385": 222843904.0, "17390": 222843904.0, "17395": 222843904.0, "17400": 223237120.0, "17405": 222843904.0, "17410": 223237120.0, "17415": 222843904.0, "17420": 223237120.0, "17425": 222843904.0, "17430": 223237120.0, "17435": 222843904.0, "17440": 222843904.0, "17445": 223237120.0, "17450": 222843904.0, "17455": 223237120.0, "17460": 223237120.0, "17465": 223237120.0, "17470": 222843904.0, "17475": 223237120.0, "17480": 222843904.0, "17485": 222843904.0, "17490": 223237120.0, "17495": 223237120.0, "17500": 222843904.0, "17505": 223368192.0, "17510": 222843904.0, "17515": 223237120.0, "17520": 223237120.0, "17525": 223237120.0, "17530": 223237120.0, "17535": 223237120.0, "17540": 223368192.0, "17545": 223237120.0, "17550": 223237120.0, "17555": 223237120.0, "17560": 223237120.0, "17565": 223237120.0, "17570": 222843904.0, "17575": 223237120.0, "17580": 222843904.0, "17585": 222843904.0, "17590": 223237120.0, "17595": 222843904.0, "17600": 223368192.0, "17605": 222843904.0, "17610": 222843904.0, "17615": 223237120.0, "17620": 223237120.0, "17625": 223237120.0, "17630": 222843904.0, "17635": 223237120.0, "17640": 222843904.0, "17645": 223237120.0, "17650": 223237120.0, "17655": 223237120.0, "17660": 223237120.0, "17665": 223237120.0, "17670": 223237120.0, "17675": 223237120.0, "17680": 222843904.0, "17685": 223368192.0, "17690": 223237120.0, "17695": 223237120.0, "17700": 222843904.0, "17705": 222843904.0, "17710": 222843904.0, "17715": 223237120.0, "17720": 222843904.0, "17725": 222843904.0, "17730": 222843904.0, "17735": 223368192.0, "17740": 222843904.0, "17745": 222843904.0, "17750": 223237120.0, "17755": 223237120.0, "17760": 223237120.0, "17765": 223237120.0, "17770": 223237120.0, "17775": 223237120.0, "17780": 223237120.0, "17785": 223237120.0, "17790": 222843904.0, "17795": 223237120.0, "17800": 223237120.0, "17805": 223237120.0, "17810": 222843904.0, "17815": 223237120.0, "17820": 222843904.0, "17825": 223237120.0, "17830": 223368192.0, "17835": 223237120.0, "17840": 223368192.0, "17845": 222843904.0, "17850": 223237120.0, "17855": 223368192.0, "17860": 222843904.0, "17865": 222843904.0, "17870": 223237120.0, "17875": 222843904.0, "17880": 223368192.0, "17885": 223368192.0, "17890": 223237120.0, "17895": 223237120.0, "17900": 223237120.0, "17905": 222843904.0, "17910": 223368192.0, "17915": 222843904.0, "17920": 222843904.0, "17925": 223368192.0, "17930": 222843904.0, "17935": 223237120.0, "17940": 223237120.0, "17945": 223368192.0, "17950": 222843904.0, "17955": 223237120.0, "17960": 223237120.0, "17965": 223237120.0, "17970": 223237120.0, "17975": 223237120.0, "17980": 223237120.0, "17985": 222843904.0, "17990": 223237120.0, "17995": 222843904.0, "18000": 222843904.0, "18005": 223368192.0, "18010": 223237120.0, "18015": 223237120.0, "18020": 223368192.0, "18025": 223237120.0, "18030": 222843904.0, "18035": 222843904.0, "18040": 222843904.0, "18045": 222843904.0, "18050": 222843904.0, "18055": 223368192.0, "18060": 223368192.0, "18065": 223237120.0, "18070": 223237120.0, "18075": 222843904.0, "18080": 223237120.0, "18085": 223237120.0, "18090": 223237120.0, "18095": 223237120.0, "18100": 223237120.0, "18105": 223368192.0, "18110": 222843904.0, "18115": 223237120.0, "18120": 223237120.0, "18125": 222843904.0, "18130": 223237120.0, "18135": 223237120.0, "18140": 223237120.0, "18145": 222843904.0, "18150": 223368192.0, "18155": 223237120.0, "18160": 223237120.0, "18165": 223237120.0, "18170": 223237120.0, "18175": 223237120.0, "18180": 223237120.0, "18185": 222843904.0, "18190": 223237120.0, "18195": 222843904.0, "18200": 223237120.0, "18205": 223368192.0, "18210": 223237120.0, "18215": 223237120.0, "18220": 223237120.0, "18225": 223237120.0, "18230": 223237120.0, "18235": 223237120.0, "18240": 222843904.0, "18245": 222843904.0, "18250": 223237120.0, "18255": 222843904.0, "18260": 223368192.0, "18265": 223368192.0, "18270": 223368192.0, "18275": 223368192.0, "18280": 223237120.0, "18285": 223368192.0, "18290": 223237120.0, "18295": 223368192.0, "18300": 223237120.0, "18305": 223237120.0, "18310": 222843904.0, "18315": 223237120.0, "18320": 223237120.0, "18325": 223237120.0, "18330": 223368192.0, "18335": 223368192.0, "18340": 222843904.0, "18345": 223237120.0, "18350": 223368192.0, "18355": 223368192.0, "18360": 222843904.0, "18365": 223237120.0, "18370": 223237120.0, "18375": 223237120.0, "18380": 222843904.0, "18385": 223237120.0, "18390": 223237120.0, "18395": 222843904.0, "18400": 223237120.0, "18405": 222843904.0, "18410": 223368192.0, "18415": 223237120.0, "18420": 223368192.0, "18425": 223237120.0, "18430": 223237120.0, "18435": 223368192.0, "18440": 223368192.0, "18445": 223237120.0, "18450": 223237120.0, "18455": 223237120.0, "18460": 223368192.0, "18465": 223237120.0, "18470": 223237120.0, "18475": 223237120.0, "18480": 223237120.0, "18485": 222843904.0, "18490": 223237120.0, "18495": 223237120.0, "18500": 223237120.0, "18505": 222843904.0, "18510": 222843904.0, "18515": 223368192.0, "18520": 223237120.0, "18525": 223237120.0, "18530": 223237120.0, "18535": 223237120.0, "18540": 222843904.0, "18545": 223237120.0, "18550": 222843904.0, "18555": 223237120.0, "18560": 222843904.0, "18565": 223237120.0, "18570": 222843904.0, "18575": 223237120.0, "18580": 223237120.0, "18585": 223237120.0, "18590": 223368192.0, "18595": 223237120.0, "18600": 223237120.0, "18605": 223237120.0, "18610": 222843904.0, "18615": 222843904.0, "18620": 222843904.0, "18625": 223237120.0, "18630": 222843904.0, "18635": 222843904.0, "18640": 222843904.0, "18645": 223237120.0, "18650": 223237120.0, "18655": 223237120.0, "18660": 222843904.0, "18665": 223237120.0, "18670": 223237120.0, "18675": 223237120.0, "18680": 222843904.0, "18685": 223237120.0, "18690": 222843904.0, "18695": 223237120.0, "18700": 223368192.0, "18705": 223237120.0, "18710": 223237120.0, "18715": 222843904.0, "18720": 223237120.0, "18725": 223237120.0, "18730": 222843904.0, "18735": 223237120.0, "18740": 223237120.0, "18745": 222843904.0, "18750": 223237120.0, "18755": 223237120.0, "18760": 222843904.0, "18765": 222843904.0, "18770": 223237120.0, "18775": 222843904.0, "18780": 222843904.0, "18785": 223237120.0, "18790": 223237120.0, "18795": 222843904.0, "18800": 223237120.0, "18805": 222843904.0, "18810": 223368192.0, "18815": 223237120.0, "18820": 223237120.0, "18825": 223237120.0, "18830": 223237120.0, "18835": 223368192.0, "18840": 223368192.0, "18845": 223237120.0, "18850": 223237120.0, "18855": 223237120.0, "18860": 223237120.0, "18865": 222843904.0, "18870": 223237120.0, "18875": 222843904.0, "18880": 223237120.0, "18885": 223368192.0, "18890": 223368192.0, "18895": 223237120.0, "18900": 223237120.0, "18905": 223237120.0, "18910": 223237120.0, "18915": 223237120.0, "18920": 222843904.0, "18925": 222843904.0, "18930": 223237120.0, "18935": 223368192.0, "18940": 223368192.0, "18945": 222843904.0, "18950": 223368192.0, "18955": 223237120.0, "18960": 223237120.0, "18965": 223237120.0, "18970": 223237120.0, "18975": 223237120.0, "18980": 222843904.0, "18985": 222843904.0, "18990": 223368192.0, "18995": 223237120.0, "19000": 223237120.0, "19005": 222843904.0, "19010": 223237120.0, "19015": 222843904.0, "19020": 223237120.0, "19025": 222843904.0, "19030": 223237120.0, "19035": 223237120.0, "19040": 223237120.0, "19045": 222843904.0, "19050": 222843904.0, "19055": 223237120.0, "19060": 223237120.0, "19065": 223237120.0, "19070": 223237120.0, "19075": 223237120.0, "19080": 223237120.0, "19085": 223237120.0, "19090": 222843904.0, "19095": 222843904.0, "19100": 222843904.0, "19105": 223237120.0, "19110": 223237120.0, "19115": 223237120.0, "19120": 223237120.0, "19125": 222843904.0, "19130": 223237120.0, "19135": 223237120.0, "19140": 223237120.0, "19145": 223368192.0, "19150": 223368192.0, "19155": 223237120.0, "19160": 223237120.0, "19165": 223237120.0, "19170": 223237120.0, "19175": 223368192.0, "19180": 223237120.0, "19185": 222843904.0, "19190": 222843904.0, "19195": 222843904.0, "19200": 223237120.0, "19205": 223237120.0, "19210": 223368192.0, "19215": 222843904.0, "19220": 223368192.0, "19225": 223368192.0, "19230": 223237120.0, "19235": 223368192.0, "19240": 223237120.0, "19245": 223237120.0, "19250": 223368192.0, "19255": 223237120.0, "19260": 223237120.0, "19265": 223237120.0, "19270": 222843904.0, "19275": 222843904.0, "19280": 223237120.0, "19285": 223237120.0, "19290": 223237120.0, "19295": 222843904.0, "19300": 223237120.0, "19305": 222843904.0, "19310": 223237120.0, "19315": 223368192.0, "19320": 223237120.0, "19325": 222843904.0, "19330": 222843904.0, "19335": 223368192.0, "19340": 222843904.0, "19345": 222843904.0, "19350": 222843904.0, "19355": 223237120.0, "19360": 223237120.0, "19365": 222843904.0, "19370": 222843904.0, "19375": 223237120.0, "19380": 223368192.0, "19385": 223237120.0, "19390": 223237120.0, "19395": 222843904.0, "19400": 223237120.0, "19405": 222843904.0, "19410": 223237120.0, "19415": 223237120.0, "19420": 223237120.0, "19425": 223237120.0, "19430": 223237120.0, "19435": 222843904.0, "19440": 222843904.0, "19445": 223237120.0, "19450": 223237120.0, "19455": 223368192.0, "19460": 222843904.0, "19465": 223237120.0, "19470": 223237120.0, "19475": 223237120.0, "19480": 223237120.0, "19485": 223237120.0, "19490": 223237120.0, "19495": 223237120.0, "19500": 222843904.0, "19505": 223237120.0, "19510": 223237120.0, "19515": 223237120.0, "19520": 223368192.0, "19525": 223237120.0, "19530": 223237120.0, "19535": 223237120.0, "19540": 223237120.0, "19545": 223237120.0, "19550": 222843904.0, "19555": 222843904.0, "19560": 222843904.0, "19565": 223237120.0, "19570": 223237120.0, "19575": 222843904.0, "19580": 223368192.0, "19585": 223237120.0, "19590": 223237120.0, "19595": 223368192.0, "19600": 223237120.0, "19605": 223368192.0, "19610": 223237120.0, "19615": 223237120.0, "19620": 223237120.0, "19625": 222843904.0, "19630": 222843904.0, "19635": 223368192.0, "19640": 223237120.0, "19645": 223237120.0, "19650": 223237120.0, "19655": 222843904.0, "19660": 223237120.0, "19665": 223237120.0, "19670": 223237120.0, "19675": 222843904.0, "19680": 223368192.0, "19685": 222843904.0, "19690": 223237120.0, "19695": 223237120.0, "19700": 223237120.0, "19705": 223237120.0, "19710": 223237120.0, "19715": 223237120.0, "19720": 223237120.0, "19725": 222843904.0, "19730": 223368192.0, "19735": 222843904.0, "19740": 223237120.0, "19745": 223368192.0, "19750": 223237120.0, "19755": 222843904.0, "19760": 223237120.0, "19765": 223237120.0, "19770": 223237120.0, "19775": 223237120.0, "19780": 222843904.0, "19785": 223368192.0, "19790": 223237120.0, "19795": 223368192.0, "19800": 222843904.0, "19805": 223368192.0, "19810": 223237120.0, "19815": 223237120.0, "19820": 222843904.0, "19825": 223237120.0, "19830": 223237120.0, "19835": 223368192.0, "19840": 223237120.0, "19845": 223368192.0, "19850": 223237120.0, "19855": 222843904.0, "19860": 223237120.0, "19865": 222843904.0, "19870": 222843904.0, "19875": 222843904.0, "19880": 223237120.0, "19885": 223237120.0, "19890": 223237120.0, "19895": 222843904.0, "19900": 222843904.0, "19905": 223237120.0, "19910": 222843904.0, "19915": 223237120.0, "19920": 223237120.0, "19925": 222843904.0, "19930": 223368192.0, "19935": 223237120.0, "19940": 222843904.0, "19945": 223237120.0, "19950": 223237120.0, "19955": 223237120.0, "19960": 223237120.0, "19965": 223237120.0, "19970": 223237120.0, "19975": 222843904.0, "19980": 223237120.0, "19985": 223368192.0, "19990": 222843904.0, "19995": 223368192.0, "20000": 222843904.0}}, "num-zeros": {"start_step": 1, "end_step": 20000, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1398.0, "25": 1453.0, "30": 1525.0, "35": 1484.0, "40": 1567.0, "45": 1637.0, "50": 1613.0, "55": 1689.0, "60": 1587.0, "65": 1635.0, "70": 1667.0, "75": 1619.0, "80": 1591.0, "85": 1608.0, "90": 1435.0, "95": 1509.0, "100": 1488.0, "105": 1475.0, "110": 1490.0, "115": 1442.0, "120": 1527.0, "125": 1499.0, "130": 1458.0, "135": 1491.0, "140": 1506.0, "145": 1543.0, "150": 1472.0, "155": 1511.0, "160": 1480.0, "165": 1569.0, "170": 1430.0, "175": 1449.0, "180": 1511.0, "185": 1528.0, "190": 1509.0, "195": 1569.0, "200": 1441.0, "205": 1583.0, "210": 1447.0, "215": 1494.0, "220": 1509.0, "225": 1473.0, "230": 1473.0, "235": 1590.0, "240": 1505.0, "245": 1470.0, "250": 1531.0, "255": 1512.0, "260": 1580.0, "265": 1491.0, "270": 1524.0, "275": 1578.0, "280": 1504.0, "285": 1555.0, "290": 1629.0, "295": 1605.0, "300": 1568.0, "305": 1458.0, "310": 1518.0, "315": 1488.0, "320": 1596.0, "325": 1503.0, "330": 1502.0, "335": 1477.0, "340": 1540.0, "345": 1534.0, "350": 1526.0, "355": 1533.0, "360": 1492.0, "365": 1562.0, "370": 1532.0, "375": 1518.0, "380": 1589.0, "385": 1543.0, "390": 1438.0, "395": 1572.0, "400": 1554.0, "405": 1585.0, "410": 1563.0, "415": 1553.0, "420": 1471.0, "425": 1629.0, "430": 1491.0, "435": 1482.0, "440": 1548.0, "445": 1546.0, "450": 1513.0, "455": 1529.0, "460": 1549.0, "465": 1602.0, "470": 1468.0, "475": 1635.0, "480": 1580.0, "485": 1505.0, "490": 1560.0, "495": 1494.0, "500": 1437.0, "505": 1490.0, "510": 1557.0, "515": 1528.0, "520": 1530.0, "525": 1463.0, "530": 1528.0, "535": 1583.0, "540": 1582.0, "545": 1514.0, "550": 1634.0, "555": 1572.0, "560": 1554.0, "565": 1658.0, "570": 1692.0, "575": 1548.0, "580": 1551.0, "585": 1584.0, "590": 1513.0, "595": 1595.0, "600": 1600.0, "605": 1514.0, "610": 1507.0, "615": 1533.0, "620": 1590.0, "625": 1538.0, "630": 1599.0, "635": 1585.0, "640": 1481.0, "645": 1551.0, "650": 1570.0, "655": 1588.0, "660": 1569.0, "665": 1536.0, "670": 1568.0, "675": 1530.0, "680": 1601.0, "685": 1513.0, "690": 1495.0, "695": 1566.0, "700": 1500.0, "705": 1594.0, "710": 1493.0, "715": 1532.0, "720": 1553.0, "725": 1411.0, "730": 1501.0, "735": 1522.0, "740": 1548.0, "745": 1560.0, "750": 1596.0, "755": 1545.0, "760": 1638.0, "765": 1553.0, "770": 1567.0, "775": 1558.0, "780": 1575.0, "785": 1494.0, "790": 1650.0, "795": 1564.0, "800": 1547.0, "805": 1488.0, "810": 1584.0, "815": 1634.0, "820": 1552.0, "825": 1506.0, "830": 1612.0, "835": 1566.0, "840": 1629.0, "845": 1492.0, "850": 1536.0, "855": 1545.0, "860": 1552.0, "865": 1546.0, "870": 1487.0, "875": 1572.0, "880": 1550.0, "885": 1537.0, "890": 1589.0, "895": 1608.0, "900": 1564.0, "905": 1651.0, "910": 1562.0, "915": 1506.0, "920": 1598.0, "925": 1550.0, "930": 1550.0, "935": 1529.0, "940": 1531.0, "945": 1555.0, "950": 1538.0, "955": 1560.0, "960": 1597.0, "965": 1549.0, "970": 1505.0, "975": 1515.0, "980": 1551.0, "985": 1565.0, "990": 1521.0, "995": 1519.0, "1000": 1600.0, "1005": 1571.0, "1010": 1534.0, "1015": 1585.0, "1020": 1530.0, "1025": 1582.0, "1030": 1518.0, "1035": 1526.0, "1040": 1540.0, "1045": 1563.0, "1050": 1532.0, "1055": 1521.0, "1060": 1562.0, "1065": 1480.0, "1070": 1530.0, "1075": 1405.0, "1080": 1625.0, "1085": 1585.0, "1090": 1557.0, "1095": 1487.0, "1100": 1512.0, "1105": 1538.0, "1110": 1583.0, "1115": 1351.0, "1120": 1542.0, "1125": 1566.0, "1130": 1581.0, "1135": 1538.0, "1140": 1518.0, "1145": 1600.0, "1150": 1585.0, "1155": 1603.0, "1160": 1565.0, "1165": 1535.0, "1170": 1708.0, "1175": 1651.0, "1180": 1492.0, "1185": 1594.0, "1190": 1591.0, "1195": 1503.0, "1200": 1649.0, "1205": 1496.0, "1210": 1543.0, "1215": 1565.0, "1220": 1506.0, "1225": 1559.0, "1230": 1587.0, "1235": 1484.0, "1240": 1497.0, "1245": 1617.0, "1250": 1471.0, "1255": 1510.0, "1260": 1589.0, "1265": 1545.0, "1270": 1522.0, "1275": 1565.0, "1280": 1519.0, "1285": 1506.0, "1290": 1532.0, "1295": 1596.0, "1300": 1662.0, "1305": 1572.0, "1310": 1492.0, "1315": 1589.0, "1320": 1568.0, "1325": 1540.0, "1330": 1542.0, "1335": 1541.0, "1340": 1568.0, "1345": 1479.0, "1350": 1622.0, "1355": 1478.0, "1360": 1539.0, "1365": 1580.0, "1370": 1513.0, "1375": 1495.0, "1380": 1561.0, "1385": 1561.0, "1390": 1691.0, "1395": 1578.0, "1400": 1532.0, "1405": 1547.0, "1410": 1586.0, "1415": 1515.0, "1420": 1706.0, "1425": 1669.0, "1430": 1560.0, "1435": 1600.0, "1440": 1565.0, "1445": 1537.0, "1450": 1570.0, "1455": 1481.0, "1460": 1554.0, "1465": 1505.0, "1470": 1599.0, "1475": 1603.0, "1480": 1587.0, "1485": 1577.0, "1490": 1622.0, "1495": 1624.0, "1500": 1607.0, "1505": 1567.0, "1510": 1801.0, "1515": 1549.0, "1520": 1480.0, "1525": 1520.0, "1530": 1566.0, "1535": 1513.0, "1540": 1527.0, "1545": 1575.0, "1550": 1517.0, "1555": 1528.0, "1560": 1523.0, "1565": 1529.0, "1570": 1642.0, "1575": 1502.0, "1580": 1495.0, "1585": 1466.0, "1590": 1552.0, "1595": 1562.0, "1600": 1601.0, "1605": 1527.0, "1610": 1554.0, "1615": 1613.0, "1620": 1564.0, "1625": 1541.0, "1630": 1594.0, "1635": 1573.0, "1640": 1583.0, "1645": 1592.0, "1650": 1534.0, "1655": 1565.0, "1660": 1525.0, "1665": 1532.0, "1670": 1462.0, "1675": 1578.0, "1680": 1550.0, "1685": 1618.0, "1690": 1637.0, "1695": 1495.0, "1700": 1644.0, "1705": 1587.0, "1710": 1948.0, "1715": 1517.0, "1720": 1652.0, "1725": 1626.0, "1730": 1585.0, "1735": 1520.0, "1740": 1572.0, "1745": 1560.0, "1750": 1501.0, "1755": 1605.0, "1760": 1643.0, "1765": 1552.0, "1770": 1554.0, "1775": 1362.0, "1780": 1488.0, "1785": 1485.0, "1790": 1517.0, "1795": 1610.0, "1800": 1535.0, "1805": 1505.0, "1810": 1576.0, "1815": 1568.0, "1820": 1495.0, "1825": 1534.0, "1830": 1686.0, "1835": 1540.0, "1840": 1483.0, "1845": 1560.0, "1850": 1532.0, "1855": 1604.0, "1860": 1556.0, "1865": 1555.0, "1870": 1518.0, "1875": 1579.0, "1880": 1700.0, "1885": 1469.0, "1890": 1554.0, "1895": 1548.0, "1900": 1492.0, "1905": 1511.0, "1910": 1670.0, "1915": 1638.0, "1920": 1544.0, "1925": 1573.0, "1930": 1505.0, "1935": 1581.0, "1940": 1587.0, "1945": 1590.0, "1950": 1595.0, "1955": 1572.0, "1960": 1511.0, "1965": 1478.0, "1970": 1611.0, "1975": 1553.0, "1980": 1527.0, "1985": 1578.0, "1990": 1550.0, "1995": 1495.0, "2000": 1520.0, "2005": 1563.0, "2010": 1533.0, "2015": 1523.0, "2020": 1611.0, "2025": 1544.0, "2030": 1595.0, "2035": 1581.0, "2040": 1672.0, "2045": 1547.0, "2050": 1546.0, "2055": 1535.0, "2060": 1547.0, "2065": 1540.0, "2070": 1489.0, "2075": 1649.0, "2080": 1562.0, "2085": 1611.0, "2090": 1514.0, "2095": 1605.0, "2100": 1599.0, "2105": 1548.0, "2110": 1531.0, "2115": 1573.0, "2120": 1563.0, "2125": 1553.0, "2130": 1537.0, "2135": 1639.0, "2140": 1576.0, "2145": 1519.0, "2150": 1462.0, "2155": 1551.0, "2160": 1665.0, "2165": 1535.0, "2170": 1664.0, "2175": 1604.0, "2180": 1621.0, "2185": 1535.0, "2190": 1556.0, "2195": 1490.0, "2200": 1462.0, "2205": 1548.0, "2210": 1495.0, "2215": 1495.0, "2220": 1563.0, "2225": 1605.0, "2230": 1488.0, "2235": 1575.0, "2240": 1628.0, "2245": 1523.0, "2250": 1630.0, "2255": 1548.0, "2260": 1559.0, "2265": 1592.0, "2270": 1518.0, "2275": 1601.0, "2280": 1596.0, "2285": 1595.0, "2290": 1564.0, "2295": 1581.0, "2300": 1496.0, "2305": 1475.0, "2310": 1586.0, "2315": 1530.0, "2320": 1522.0, "2325": 1354.0, "2330": 1557.0, "2335": 1473.0, "2340": 1525.0, "2345": 1466.0, "2350": 1535.0, "2355": 1583.0, "2360": 1548.0, "2365": 1557.0, "2370": 1503.0, "2375": 1589.0, "2380": 1583.0, "2385": 1627.0, "2390": 1517.0, "2395": 1585.0, "2400": 1585.0, "2405": 1627.0, "2410": 1510.0, "2415": 1534.0, "2420": 1546.0, "2425": 1557.0, "2430": 1489.0, "2435": 1616.0, "2440": 1595.0, "2445": 1553.0, "2450": 1552.0, "2455": 1525.0, "2460": 1616.0, "2465": 1552.0, "2470": 1626.0, "2475": 1603.0, "2480": 1516.0, "2485": 1608.0, "2490": 1565.0, "2495": 1565.0, "2500": 1569.0, "2505": 1489.0, "2510": 1539.0, "2515": 1520.0, "2520": 1578.0, "2525": 1574.0, "2530": 1624.0, "2535": 1577.0, "2540": 1570.0, "2545": 1527.0, "2550": 1508.0, "2555": 1653.0, "2560": 1598.0, "2565": 1626.0, "2570": 1651.0, "2575": 1468.0, "2580": 1545.0, "2585": 1540.0, "2590": 1481.0, "2595": 1603.0, "2600": 1570.0, "2605": 1691.0, "2610": 1546.0, "2615": 1514.0, "2620": 1677.0, "2625": 1615.0, "2630": 1613.0, "2635": 1566.0, "2640": 1574.0, "2645": 1552.0, "2650": 1559.0, "2655": 1550.0, "2660": 1549.0, "2665": 1617.0, "2670": 1614.0, "2675": 1502.0, "2680": 1667.0, "2685": 1639.0, "2690": 1579.0, "2695": 1534.0, "2700": 1534.0, "2705": 1633.0, "2710": 1644.0, "2715": 1460.0, "2720": 1692.0, "2725": 1639.0, "2730": 1605.0, "2735": 1581.0, "2740": 1533.0, "2745": 1537.0, "2750": 1548.0, "2755": 1521.0, "2760": 1545.0, "2765": 1536.0, "2770": 1601.0, "2775": 1526.0, "2780": 1582.0, "2785": 1571.0, "2790": 1600.0, "2795": 1576.0, "2800": 1586.0, "2805": 1675.0, "2810": 1578.0, "2815": 1596.0, "2820": 1574.0, "2825": 1541.0, "2830": 1545.0, "2835": 1597.0, "2840": 1641.0, "2845": 1651.0, "2850": 1470.0, "2855": 1516.0, "2860": 1451.0, "2865": 1546.0, "2870": 1543.0, "2875": 1510.0, "2880": 1600.0, "2885": 1588.0, "2890": 1695.0, "2895": 1586.0, "2900": 1502.0, "2905": 1646.0, "2910": 1635.0, "2915": 1637.0, "2920": 1583.0, "2925": 1511.0, "2930": 1556.0, "2935": 1518.0, "2940": 1532.0, "2945": 1513.0, "2950": 1557.0, "2955": 1562.0, "2960": 1600.0, "2965": 1521.0, "2970": 1543.0, "2975": 1617.0, "2980": 1630.0, "2985": 1549.0, "2990": 1665.0, "2995": 1605.0, "3000": 1559.0, "3005": 1498.0, "3010": 1576.0, "3015": 1498.0, "3020": 1654.0, "3025": 1521.0, "3030": 1681.0, "3035": 1615.0, "3040": 1611.0, "3045": 1580.0, "3050": 1589.0, "3055": 1515.0, "3060": 1561.0, "3065": 1650.0, "3070": 1585.0, "3075": 1607.0, "3080": 1567.0, "3085": 1456.0, "3090": 1563.0, "3095": 1600.0, "3100": 1562.0, "3105": 1573.0, "3110": 1623.0, "3115": 1661.0, "3120": 1562.0, "3125": 1640.0, "3130": 1519.0, "3135": 1621.0, "3140": 1542.0, "3145": 1654.0, "3150": 1588.0, "3155": 1577.0, "3160": 1583.0, "3165": 1527.0, "3170": 1532.0, "3175": 1715.0, "3180": 1563.0, "3185": 1606.0, "3190": 1508.0, "3195": 1698.0, "3200": 1641.0, "3205": 1568.0, "3210": 1562.0, "3215": 1589.0, "3220": 1582.0, "3225": 1585.0, "3230": 1617.0, "3235": 1569.0, "3240": 1510.0, "3245": 1631.0, "3250": 1656.0, "3255": 1543.0, "3260": 1587.0, "3265": 1578.0, "3270": 1578.0, "3275": 1589.0, "3280": 1549.0, "3285": 1535.0, "3290": 1601.0, "3295": 1497.0, "3300": 1616.0, "3305": 1526.0, "3310": 1539.0, "3315": 1551.0, "3320": 1532.0, "3325": 1591.0, "3330": 1602.0, "3335": 1577.0, "3340": 1494.0, "3345": 1557.0, "3350": 1655.0, "3355": 1716.0, "3360": 1578.0, "3365": 1718.0, "3370": 1605.0, "3375": 1653.0, "3380": 1605.0, "3385": 1587.0, "3390": 1553.0, "3395": 1580.0, "3400": 1503.0, "3405": 1506.0, "3410": 1555.0, "3415": 1489.0, "3420": 1552.0, "3425": 1536.0, "3430": 1619.0, "3435": 1543.0, "3440": 1563.0, "3445": 1602.0, "3450": 1578.0, "3455": 1581.0, "3460": 1579.0, "3465": 1632.0, "3470": 1660.0, "3475": 1567.0, "3480": 1683.0, "3485": 1651.0, "3490": 1509.0, "3495": 1578.0, "3500": 1584.0, "3505": 1603.0, "3510": 1578.0, "3515": 1490.0, "3520": 1535.0, "3525": 1593.0, "3530": 1636.0, "3535": 1637.0, "3540": 1571.0, "3545": 1660.0, "3550": 1533.0, "3555": 1623.0, "3560": 1551.0, "3565": 1547.0, "3570": 1538.0, "3575": 1592.0, "3580": 1549.0, "3585": 1616.0, "3590": 1619.0, "3595": 1604.0, "3600": 1726.0, "3605": 1708.0, "3610": 1621.0, "3615": 1640.0, "3620": 1547.0, "3625": 1598.0, "3630": 1595.0, "3635": 1655.0, "3640": 1612.0, "3645": 1631.0, "3650": 1670.0, "3655": 1491.0, "3660": 1536.0, "3665": 1542.0, "3670": 1605.0, "3675": 1693.0, "3680": 1641.0, "3685": 1551.0, "3690": 1543.0, "3695": 1553.0, "3700": 1562.0, "3705": 1615.0, "3710": 1605.0, "3715": 1629.0, "3720": 1572.0, "3725": 1556.0, "3730": 1574.0, "3735": 1573.0, "3740": 1561.0, "3745": 1569.0, "3750": 1555.0, "3755": 1501.0, "3760": 1555.0, "3765": 1558.0, "3770": 1555.0, "3775": 1558.0, "3780": 1602.0, "3785": 1481.0, "3790": 1547.0, "3795": 1548.0, "3800": 1576.0, "3805": 1643.0, "3810": 1639.0, "3815": 1624.0, "3820": 1583.0, "3825": 1624.0, "3830": 1601.0, "3835": 1506.0, "3840": 1621.0, "3845": 1624.0, "3850": 1658.0, "3855": 1679.0, "3860": 1536.0, "3865": 1733.0, "3870": 1592.0, "3875": 1645.0, "3880": 1566.0, "3885": 1539.0, "3890": 1576.0, "3895": 1577.0, "3900": 1571.0, "3905": 1531.0, "3910": 1590.0, "3915": 1575.0, "3920": 1587.0, "3925": 1567.0, "3930": 1505.0, "3935": 1521.0, "3940": 1652.0, "3945": 1638.0, "3950": 1592.0, "3955": 1512.0, "3960": 1538.0, "3965": 1548.0, "3970": 1635.0, "3975": 1633.0, "3980": 1553.0, "3985": 1638.0, "3990": 1549.0, "3995": 1639.0, "4000": 1617.0, "4005": 1599.0, "4010": 1595.0, "4015": 1537.0, "4020": 1546.0, "4025": 1608.0, "4030": 1649.0, "4035": 1629.0, "4040": 1566.0, "4045": 1609.0, "4050": 1529.0, "4055": 1483.0, "4060": 1623.0, "4065": 1596.0, "4070": 1667.0, "4075": 1602.0, "4080": 1494.0, "4085": 1535.0, "4090": 1604.0, "4095": 1572.0, "4100": 1542.0, "4105": 1669.0, "4110": 1694.0, "4115": 1582.0, "4120": 1591.0, "4125": 1615.0, "4130": 1486.0, "4135": 1635.0, "4140": 1593.0, "4145": 1570.0, "4150": 1631.0, "4155": 1579.0, "4160": 1662.0, "4165": 1562.0, "4170": 1578.0, "4175": 1524.0, "4180": 1578.0, "4185": 1586.0, "4190": 1665.0, "4195": 1622.0, "4200": 1641.0, "4205": 1679.0, "4210": 1694.0, "4215": 1698.0, "4220": 1529.0, "4225": 1528.0, "4230": 1552.0, "4235": 1660.0, "4240": 1612.0, "4245": 1609.0, "4250": 1639.0, "4255": 1600.0, "4260": 1602.0, "4265": "nan", "4270": 1584.0, "4275": 1602.0, "4280": 1569.0, "4285": 1626.0, "4290": 1560.0, "4295": 1511.0, "4300": 1595.0, "4305": 1608.0, "4310": 1671.0, "4315": 1715.0, "4320": 1609.0, "4325": 1606.0, "4330": 1704.0, "4335": 1561.0, "4340": 1598.0, "4345": 1608.0, "4350": 1612.0, "4355": 1617.0, "4360": 1627.0, "4365": 1593.0, "4370": 1607.0, "4375": 1610.0, "4380": 1612.0, "4385": 1643.0, "4390": 1598.0, "4395": 1674.0, "4400": 1565.0, "4405": 1523.0, "4410": 1686.0, "4415": 1572.0, "4420": 1599.0, "4425": 1592.0, "4430": 1593.0, "4435": 1585.0, "4440": 1542.0, "4445": 1538.0, "4450": 1550.0, "4455": 1626.0, "4460": 1628.0, "4465": 1563.0, "4470": 1624.0, "4475": 1542.0, "4480": 1556.0, "4485": 1695.0, "4490": 1622.0, "4495": 1636.0, "4500": 1673.0, "4505": 1644.0, "4510": 1749.0, "4515": 1672.0, "4520": 1561.0, "4525": 1560.0, "4530": 1629.0, "4535": 1532.0, "4540": 1680.0, "4545": 1596.0, "4550": 1622.0, "4555": 1697.0, "4560": 1532.0, "4565": 1520.0, "4570": 1541.0, "4575": 1600.0, "4580": 1511.0, "4585": 1633.0, "4590": 1601.0, "4595": 1542.0, "4600": 1581.0, "4605": 1613.0, "4610": 1643.0, "4615": 1607.0, "4620": 1628.0, "4625": 1582.0, "4630": 1617.0, "4635": 1645.0, "4640": 1598.0, "4645": 1664.0, "4650": 1615.0, "4655": 1669.0, "4660": 1626.0, "4665": 1585.0, "4670": 1610.0, "4675": 1528.0, "4680": 1555.0, "4685": 1576.0, "4690": 1757.0, "4695": 1537.0, "4700": 1658.0, "4705": 1643.0, "4710": 1677.0, "4715": 1582.0, "4720": 1563.0, "4725": 1691.0, "4730": 1641.0, "4735": 1628.0, "4740": 1628.0, "4745": 1541.0, "4750": 1563.0, "4755": 1568.0, "4760": 1554.0, "4765": 1584.0, "4770": 1600.0, "4775": 1563.0, "4780": 1655.0, "4785": 1583.0, "4790": 1597.0, "4795": 1767.0, "4800": 1633.0, "4805": 1570.0, "4810": 1554.0, "4815": 1670.0, "4820": 1548.0, "4825": 1527.0, "4830": 1583.0, "4835": 1527.0, "4840": 1598.0, "4845": 1622.0, "4850": 1704.0, "4855": 1655.0, "4860": 1564.0, "4865": 1623.0, "4870": 1546.0, "4875": 1581.0, "4880": 1564.0, "4885": 1584.0, "4890": 1655.0, "4895": 1624.0, "4900": 1618.0, "4905": 1643.0, "4910": 1608.0, "4915": 1619.0, "4920": 1729.0, "4925": 1689.0, "4930": 1576.0, "4935": 1647.0, "4940": 1616.0, "4945": 1668.0, "4950": 1535.0, "4955": 1665.0, "4960": 1832.0, "4965": 1543.0, "4970": 1585.0, "4975": 1602.0, "4980": 1622.0, "4985": 1563.0, "4990": 1976.0, "4995": 1669.0, "5000": 1652.0, "5005": 1652.0, "5010": 1594.0, "5015": 1585.0, "5020": 1643.0, "5025": 1601.0, "5030": 1582.0, "5035": 1616.0, "5040": 1635.0, "5045": 1671.0, "5050": 1661.0, "5055": 1608.0, "5060": 1644.0, "5065": 1601.0, "5070": 1643.0, "5075": 1625.0, "5080": 1634.0, "5085": 1656.0, "5090": 1546.0, "5095": 1599.0, "5100": 1582.0, "5105": 1607.0, "5110": 1560.0, "5115": 1587.0, "5120": 1582.0, "5125": 1642.0, "5130": 1626.0, "5135": 1606.0, "5140": 1613.0, "5145": 1521.0, "5150": 1695.0, "5155": 1571.0, "5160": 1678.0, "5165": 1679.0, "5170": 1623.0, "5175": 1671.0, "5180": 1668.0, "5185": 1633.0, "5190": 1696.0, "5195": 1686.0, "5200": 1712.0, "5205": 1525.0, "5210": 1526.0, "5215": 1598.0, "5220": 1521.0, "5225": 1683.0, "5230": 1579.0, "5235": 1652.0, "5240": 1631.0, "5245": 1611.0, "5250": 1669.0, "5255": 1631.0, "5260": 1611.0, "5265": 1670.0, "5270": 1750.0, "5275": 1572.0, "5280": 1507.0, "5285": 1613.0, "5290": 1607.0, "5295": 1592.0, "5300": 1687.0, "5305": 1587.0, "5310": 1704.0, "5315": 1603.0, "5320": 1523.0, "5325": 1642.0, "5330": 1561.0, "5335": 1554.0, "5340": 1591.0, "5345": 1609.0, "5350": 1626.0, "5355": 1619.0, "5360": 1648.0, "5365": 1634.0, "5370": 1615.0, "5375": 1625.0, "5380": 1600.0, "5385": 1653.0, "5390": 1670.0, "5395": 1619.0, "5400": 1652.0, "5405": 1648.0, "5410": 1585.0, "5415": 1627.0, "5420": 1666.0, "5425": 1661.0, "5430": 1566.0, "5435": 1696.0, "5440": 1678.0, "5445": 1606.0, "5450": 1604.0, "5455": 1643.0, "5460": 1709.0, "5465": 1628.0, "5470": 1614.0, "5475": 1669.0, "5480": 1556.0, "5485": 1677.0, "5490": 1664.0, "5495": 1577.0, "5500": 1658.0, "5505": 1556.0, "5510": 1606.0, "5515": 1641.0, "5520": 1577.0, "5525": 1565.0, "5530": 1642.0, "5535": 1623.0, "5540": 1604.0, "5545": 1608.0, "5550": 1535.0, "5555": 1637.0, "5560": 1621.0, "5565": 1525.0, "5570": 1630.0, "5575": 1639.0, "5580": 1610.0, "5585": 1615.0, "5590": 1620.0, "5595": 1565.0, "5600": 1606.0, "5605": 1668.0, "5610": 1657.0, "5615": 1649.0, "5620": 1656.0, "5625": 1574.0, "5630": 1580.0, "5635": 1622.0, "5640": 1679.0, "5645": 1640.0, "5650": 1681.0, "5655": 1620.0, "5660": 1693.0, "5665": 1681.0, "5670": 1619.0, "5675": 1753.0, "5680": 1628.0, "5685": 1623.0, "5690": 1545.0, "5695": 1622.0, "5700": 1622.0, "5705": 1737.0, "5710": 1639.0, "5715": 1605.0, "5720": 1545.0, "5725": 1658.0, "5730": 1703.0, "5735": 1569.0, "5740": 1624.0, "5745": 1635.0, "5750": 1607.0, "5755": 1621.0, "5760": 1628.0, "5765": 1663.0, "5770": 1512.0, "5775": 1604.0, "5780": 1664.0, "5785": 1658.0, "5790": 1773.0, "5795": 1625.0, "5800": 1604.0, "5805": 1767.0, "5810": 1641.0, "5815": 1681.0, "5820": 1620.0, "5825": 1656.0, "5830": 1657.0, "5835": 1617.0, "5840": 1637.0, "5845": 1673.0, "5850": 1639.0, "5855": 1657.0, "5860": 1736.0, "5865": 1620.0, "5870": 1588.0, "5875": 1600.0, "5880": 1664.0, "5885": 1657.0, "5890": 1610.0, "5895": 1665.0, "5900": 1534.0, "5905": 1605.0, "5910": 1678.0, "5915": 1639.0, "5920": 1666.0, "5925": 1567.0, "5930": 1542.0, "5935": 1679.0, "5940": 1544.0, "5945": 1646.0, "5950": 1741.0, "5955": 1623.0, "5960": 1681.0, "5965": 1626.0, "5970": 1671.0, "5975": 1659.0, "5980": 1668.0, "5985": 1639.0, "5990": 1684.0, "5995": 1676.0, "6000": 1559.0, "6005": 1662.0, "6010": 1810.0, "6015": 1586.0, "6020": 1661.0, "6025": 1617.0, "6030": 1613.0, "6035": 1657.0, "6040": 1632.0, "6045": 1671.0, "6050": 1627.0, "6055": 1582.0, "6060": 1552.0, "6065": 1597.0, "6070": 1583.0, "6075": 1674.0, "6080": 1549.0, "6085": 1569.0, "6090": 1554.0, "6095": 1579.0, "6100": 1628.0, "6105": 1609.0, "6110": 1611.0, "6115": 1618.0, "6120": 1624.0, "6125": 1724.0, "6130": 1556.0, "6135": 1623.0, "6140": 1608.0, "6145": 1632.0, "6150": 1741.0, "6155": 1605.0, "6160": 1639.0, "6165": 1618.0, "6170": 1655.0, "6175": 1687.0, "6180": 1592.0, "6185": 1635.0, "6190": 1638.0, "6195": 1644.0, "6200": 1657.0, "6205": 1628.0, "6210": 1556.0, "6215": 1581.0, "6220": 1608.0, "6225": 1635.0, "6230": 1620.0, "6235": 1700.0, "6240": 1655.0, "6245": 1608.0, "6250": 1716.0, "6255": 1552.0, "6260": 1650.0, "6265": 1539.0, "6270": 1675.0, "6275": 1613.0, "6280": 1640.0, "6285": 1659.0, "6290": 1578.0, "6295": 1586.0, "6300": "nan", "6305": 1566.0, "6310": 1645.0, "6315": 1508.0, "6320": 1758.0, "6325": 1673.0, "6330": 1594.0, "6335": 1622.0, "6340": 1696.0, "6345": 1604.0, "6350": 1608.0, "6355": 1698.0, "6360": 1619.0, "6365": 1627.0, "6370": 1705.0, "6375": 1660.0, "6380": 1653.0, "6385": 1667.0, "6390": 1675.0, "6395": 1779.0, "6400": 1595.0, "6405": 1702.0, "6410": 1726.0, "6415": 1647.0, "6420": 1634.0, "6425": 1725.0, "6430": 1566.0, "6435": 1614.0, "6440": 1569.0, "6445": 1637.0, "6450": 1642.0, "6455": 1655.0, "6460": 1654.0, "6465": 1695.0, "6470": 1697.0, "6475": 1661.0, "6480": 1676.0, "6485": 1616.0, "6490": 1716.0, "6495": 1542.0, "6500": 1734.0, "6505": 1622.0, "6510": 1635.0, "6515": 1568.0, "6520": 1632.0, "6525": 1599.0, "6530": 1627.0, "6535": 1562.0, "6540": 1651.0, "6545": 1593.0, "6550": 1723.0, "6555": 1683.0, "6560": 1847.0, "6565": 1631.0, "6570": 1713.0, "6575": 1610.0, "6580": 1653.0, "6585": 1668.0, "6590": 1644.0, "6595": 1694.0, "6600": 1613.0, "6605": 1553.0, "6610": 1701.0, "6615": 1516.0, "6620": 1607.0, "6625": 1646.0, "6630": 1591.0, "6635": 1694.0, "6640": 1631.0, "6645": 1703.0, "6650": 1720.0, "6655": 1625.0, "6660": 1650.0, "6665": 1674.0, "6670": 1670.0, "6675": 1646.0, "6680": 1679.0, "6685": 1638.0, "6690": 1667.0, "6695": 1672.0, "6700": 1703.0, "6705": 1656.0, "6710": 1715.0, "6715": 1688.0, "6720": 1695.0, "6725": 1725.0, "6730": 1571.0, "6735": 1720.0, "6740": 1542.0, "6745": 1709.0, "6750": 1688.0, "6755": 1628.0, "6760": 1621.0, "6765": 1672.0, "6770": 1600.0, "6775": 1521.0, "6780": 1583.0, "6785": 1561.0, "6790": 1634.0, "6795": 1615.0, "6800": 1562.0, "6805": 1645.0, "6810": 1663.0, "6815": 1688.0, "6820": 1620.0, "6825": 1655.0, "6830": 1633.0, "6835": 1599.0, "6840": 1558.0, "6845": 1667.0, "6850": 1633.0, "6855": 1643.0, "6860": 1741.0, "6865": 1682.0, "6870": 1787.0, "6875": 1583.0, "6880": 1721.0, "6885": 1672.0, "6890": 1693.0, "6895": 1694.0, "6900": 1671.0, "6905": 1661.0, "6910": 1664.0, "6915": 1587.0, "6920": 1665.0, "6925": 1724.0, "6930": 1736.0, "6935": 1644.0, "6940": 1661.0, "6945": 1685.0, "6950": 1686.0, "6955": 1624.0, "6960": 1667.0, "6965": 1675.0, "6970": 1550.0, "6975": 1748.0, "6980": 1663.0, "6985": 1634.0, "6990": 1623.0, "6995": 1662.0, "7000": 1699.0, "7005": 1692.0, "7010": 1654.0, "7015": 1705.0, "7020": 1659.0, "7025": 1630.0, "7030": 1714.0, "7035": 1596.0, "7040": 1637.0, "7045": 1682.0, "7050": 1631.0, "7055": 1668.0, "7060": 1689.0, "7065": 1640.0, "7070": 1729.0, "7075": 1752.0, "7080": 1720.0, "7085": 1719.0, "7090": 1645.0, "7095": 1588.0, "7100": 1549.0, "7105": 1650.0, "7110": 1717.0, "7115": 1688.0, "7120": 1619.0, "7125": 1570.0, "7130": 1706.0, "7135": 1712.0, "7140": 1690.0, "7145": 1640.0, "7150": 1712.0, "7155": 1619.0, "7160": 1654.0, "7165": 1643.0, "7170": 1545.0, "7175": "nan", "7180": 1637.0, "7185": 1773.0, "7190": 1704.0, "7195": 2029.0, "7200": 1826.0, "7205": 1829.0, "7210": 1612.0, "7215": 1811.0, "7220": 1722.0, "7225": 1824.0, "7230": 1807.0, "7235": 1732.0, "7240": 1695.0, "7245": 1824.0, "7250": 1829.0, "7255": 1803.0, "7260": 1769.0, "7265": 1788.0, "7270": 1844.0, "7275": 1644.0, "7280": 1774.0, "7285": 1668.0, "7290": 1954.0, "7295": 1794.0, "7300": 1764.0, "7305": 1851.0, "7310": 1624.0, "7315": 1773.0, "7320": 1792.0, "7325": 1764.0, "7330": 1681.0, "7335": 1710.0, "7340": 1834.0, "7345": 1847.0, "7350": 1818.0, "7355": 1736.0, "7360": 1767.0, "7365": 1829.0, "7370": 1757.0, "7375": 1808.0, "7380": 1950.0, "7385": 1718.0, "7390": 1854.0, "7395": 1933.0, "7400": 1798.0, "7405": 1744.0, "7410": 1758.0, "7415": 1723.0, "7420": 1792.0, "7425": 1894.0, "7430": 1697.0, "7435": 1816.0, "7440": 1864.0, "7445": 1743.0, "7450": 1783.0, "7455": 1934.0, "7460": 1743.0, "7465": 1665.0, "7470": 1809.0, "7475": 1686.0, "7480": 1728.0, "7485": 1787.0, "7490": 1705.0, "7495": 1665.0, "7500": 1803.0, "7505": 1816.0, "7510": 1887.0, "7515": 1697.0, "7520": 1707.0, "7525": 1825.0, "7530": 1772.0, "7535": 1779.0, "7540": 1819.0, "7545": 1716.0, "7550": 1771.0, "7555": 1792.0, "7560": 1658.0, "7565": 1795.0, "7570": 1648.0, "7575": 1801.0, "7580": 1763.0, "7585": 1858.0, "7590": 1846.0, "7595": 1779.0, "7600": 1766.0, "7605": 1496.0, "7610": 1759.0, "7615": 1821.0, "7620": 1801.0, "7625": 1737.0, "7630": 1838.0, "7635": 1746.0, "7640": 1767.0, "7645": 2283.0, "7650": 1754.0, "7655": 1746.0, "7660": 1815.0, "7665": 1825.0, "7670": 1738.0, "7675": 1719.0, "7680": 1806.0, "7685": 1853.0, "7690": 1779.0, "7695": 1733.0, "7700": 1818.0, "7705": 1804.0, "7710": 1860.0, "7715": 1859.0, "7720": 1897.0, "7725": 1764.0, "7730": 1894.0, "7735": 1809.0, "7740": 1859.0, "7745": 1808.0, "7750": 1794.0, "7755": 1873.0, "7760": 1871.0, "7765": 1879.0, "7770": 1697.0, "7775": 1832.0, "7780": 1742.0, "7785": 1779.0, "7790": 1760.0, "7795": 1836.0, "7800": 1941.0, "7805": 1898.0, "7810": 1637.0, "7815": 1783.0, "7820": 1815.0, "7825": 1755.0, "7830": 1733.0, "7835": 1782.0, "7840": 1835.0, "7845": 1767.0, "7850": 1754.0, "7855": 1658.0, "7860": 1868.0, "7865": 1741.0, "7870": 1862.0, "7875": 1747.0, "7880": 1903.0, "7885": 2026.0, "7890": 1873.0, "7895": 1893.0, "7900": 1732.0, "7905": 2023.0, "7910": 1803.0, "7915": 1759.0, "7920": 1772.0, "7925": 1662.0, "7930": 1877.0, "7935": 1771.0, "7940": 1831.0, "7945": 1951.0, "7950": 1821.0, "7955": 1864.0, "7960": 1772.0, "7965": 1767.0, "7970": 1723.0, "7975": 1794.0, "7980": 1820.0, "7985": 1910.0, "7990": 1796.0, "7995": 1705.0, "8000": 1943.0, "8005": 1977.0, "8010": 1732.0, "8015": 1706.0, "8020": 1796.0, "8025": 1981.0, "8030": 1696.0, "8035": 1795.0, "8040": 1850.0, "8045": 1901.0, "8050": 1750.0, "8055": 1933.0, "8060": 1776.0, "8065": 1547.0, "8070": 1710.0, "8075": 1873.0, "8080": 1874.0, "8085": 1922.0, "8090": 1797.0, "8095": 1838.0, "8100": 1813.0, "8105": 1757.0, "8110": 1750.0, "8115": 1900.0, "8120": 1795.0, "8125": 1663.0, "8130": 1871.0, "8135": 1764.0, "8140": 1801.0, "8145": 1975.0, "8150": 1815.0, "8155": 1834.0, "8160": 1684.0, "8165": 1934.0, "8170": 1868.0, "8175": 1844.0, "8180": 1734.0, "8185": 1622.0, "8190": 1702.0, "8195": 1668.0, "8200": 1713.0, "8205": 1715.0, "8210": 1694.0, "8215": 1669.0, "8220": 1757.0, "8225": 1655.0, "8230": 1838.0, "8235": 1716.0, "8240": 1594.0, "8245": 1633.0, "8250": 1707.0, "8255": 1614.0, "8260": 1791.0, "8265": 1680.0, "8270": 1734.0, "8275": 1711.0, "8280": 1642.0, "8285": 1641.0, "8290": 1733.0, "8295": 1686.0, "8300": 1673.0, "8305": 1586.0, "8310": 1742.0, "8315": 1533.0, "8320": 1679.0, "8325": 1642.0, "8330": 1647.0, "8335": 1644.0, "8340": 1738.0, "8345": 1660.0, "8350": 1700.0, "8355": 1665.0, "8360": 1598.0, "8365": 1648.0, "8370": 1747.0, "8375": 1620.0, "8380": 1677.0, "8385": 1654.0, "8390": 1742.0, "8395": 1704.0, "8400": 1710.0, "8405": 1593.0, "8410": 1627.0, "8415": 1715.0, "8420": 1676.0, "8425": 1658.0, "8430": 1694.0, "8435": 1641.0, "8440": 1685.0, "8445": 1641.0, "8450": 1718.0, "8455": 1697.0, "8460": 1642.0, "8465": 1660.0, "8470": 1613.0, "8475": 1779.0, "8480": 1671.0, "8485": 1846.0, "8490": 1774.0, "8495": 1619.0, "8500": 1740.0, "8505": 1662.0, "8510": 1652.0, "8515": 1735.0, "8520": 1719.0, "8525": 1636.0, "8530": 1726.0, "8535": 1683.0, "8540": 1663.0, "8545": 1694.0, "8550": 1943.0, "8555": 1692.0, "8560": 1735.0, "8565": 1788.0, "8570": 1697.0, "8575": 1841.0, "8580": 1682.0, "8585": 1765.0, "8590": 1798.0, "8595": 1695.0, "8600": 1806.0, "8605": 1682.0, "8610": 1643.0, "8615": 1680.0, "8620": 1736.0, "8625": 1711.0, "8630": 1728.0, "8635": 1714.0, "8640": 1702.0, "8645": 1613.0, "8650": 1676.0, "8655": 1671.0, "8660": 1599.0, "8665": 1662.0, "8670": 1684.0, "8675": 1752.0, "8680": 1806.0, "8685": 1682.0, "8690": 1732.0, "8695": 1668.0, "8700": 1696.0, "8705": 1745.0, "8710": 1741.0, "8715": 1748.0, "8720": 1606.0, "8725": 1641.0, "8730": 1652.0, "8735": 1732.0, "8740": 1690.0, "8745": 1727.0, "8750": 1686.0, "8755": 1666.0, "8760": 1728.0, "8765": 1798.0, "8770": 1661.0, "8775": 1706.0, "8780": 1699.0, "8785": 1661.0, "8790": 1702.0, "8795": 1636.0, "8800": 1694.0, "8805": 1786.0, "8810": 1792.0, "8815": 1817.0, "8820": 1627.0, "8825": 1699.0, "8830": 1591.0, "8835": 1626.0, "8840": 1826.0, "8845": 1817.0, "8850": 1690.0, "8855": 1559.0, "8860": 1776.0, "8865": 1631.0, "8870": 1687.0, "8875": 1650.0, "8880": 1606.0, "8885": 1711.0, "8890": 1591.0, "8895": 1676.0, "8900": 1644.0, "8905": 1694.0, "8910": 1711.0, "8915": 1612.0, "8920": 1585.0, "8925": 1743.0, "8930": 1731.0, "8935": 1703.0, "8940": 1717.0, "8945": 1872.0, "8950": 1682.0, "8955": 1656.0, "8960": 1524.0, "8965": 1636.0, "8970": 1665.0, "8975": 1715.0, "8980": 1731.0, "8985": 1721.0, "8990": 1645.0, "8995": 1678.0, "9000": 1699.0, "9005": 1724.0, "9010": 1701.0, "9015": 1678.0, "9020": 1660.0, "9025": 1720.0, "9030": 1732.0, "9035": 1777.0, "9040": 1727.0, "9045": 1749.0, "9050": 1652.0, "9055": 1591.0, "9060": 1692.0, "9065": 1653.0, "9070": 1681.0, "9075": 1620.0, "9080": 1652.0, "9085": 1659.0, "9090": 1740.0, "9095": 1691.0, "9100": 1678.0, "9105": 1643.0, "9110": 1683.0, "9115": 1763.0, "9120": 1722.0, "9125": 1743.0, "9130": 1737.0, "9135": 1762.0, "9140": 1694.0, "9145": 1667.0, "9150": 1716.0, "9155": 1729.0, "9160": 1606.0, "9165": 2067.0, "9170": 1611.0, "9175": 1687.0, "9180": 1545.0, "9185": 1661.0, "9190": 1645.0, "9195": 1563.0, "9200": 1629.0, "9205": 1765.0, "9210": 1655.0, "9215": 1786.0, "9220": 1710.0, "9225": 1750.0, "9230": 1684.0, "9235": 1594.0, "9240": 1774.0, "9245": 1738.0, "9250": 1743.0, "9255": 1809.0, "9260": 1703.0, "9265": 1782.0, "9270": 1656.0, "9275": 1723.0, "9280": 1662.0, "9285": 1793.0, "9290": 1661.0, "9295": 1797.0, "9300": 1719.0, "9305": 1732.0, "9310": 1793.0, "9315": 1717.0, "9320": 1652.0, "9325": 1732.0, "9330": 1834.0, "9335": 1684.0, "9340": 1538.0, "9345": 1740.0, "9350": 1642.0, "9355": 1619.0, "9360": 1722.0, "9365": 1699.0, "9370": 1674.0, "9375": 1649.0, "9380": 1762.0, "9385": 1599.0, "9390": 1636.0, "9395": 1650.0, "9400": 1712.0, "9405": 1680.0, "9410": 1715.0, "9415": 1743.0, "9420": 1734.0, "9425": 1685.0, "9430": 1655.0, "9435": 1726.0, "9440": 1675.0, "9445": 1786.0, "9450": 1714.0, "9455": 1635.0, "9460": 1707.0, "9465": 1772.0, "9470": 1725.0, "9475": 1730.0, "9480": 1882.0, "9485": 1772.0, "9490": 1762.0, "9495": 1679.0, "9500": 1701.0, "9505": 1787.0, "9510": 1725.0, "9515": 1626.0, "9520": 1668.0, "9525": 1699.0, "9530": 1728.0, "9535": 1676.0, "9540": 1780.0, "9545": 1624.0, "9550": 1557.0, "9555": 1651.0, "9560": 1772.0, "9565": 1626.0, "9570": 1842.0, "9575": 1790.0, "9580": 1693.0, "9585": 1736.0, "9590": 1702.0, "9595": 1642.0, "9600": 1551.0, "9605": 1784.0, "9610": 1792.0, "9615": 1624.0, "9620": 1741.0, "9625": 1659.0, "9630": 1792.0, "9635": 1700.0, "9640": 1704.0, "9645": 1701.0, "9650": 1733.0, "9655": 1757.0, "9660": 1682.0, "9665": 1686.0, "9670": 1803.0, "9675": 1706.0, "9680": 1712.0, "9685": 1607.0, "9690": 1714.0, "9695": 1620.0, "9700": 1715.0, "9705": 1804.0, "9710": 1694.0, "9715": 1634.0, "9720": 1636.0, "9725": 1602.0, "9730": 1644.0, "9735": 1605.0, "9740": 1696.0, "9745": 1625.0, "9750": 1735.0, "9755": 1701.0, "9760": 1723.0, "9765": 1710.0, "9770": 1664.0, "9775": 1774.0, "9780": 1785.0, "9785": 1671.0, "9790": 1723.0, "9795": 1689.0, "9800": 1709.0, "9805": 1631.0, "9810": 1726.0, "9815": 1701.0, "9820": 1630.0, "9825": 1697.0, "9830": 1623.0, "9835": 1735.0, "9840": 1523.0, "9845": 1766.0, "9850": 1712.0, "9855": 1645.0, "9860": 1657.0, "9865": 1681.0, "9870": 1627.0, "9875": 1725.0, "9880": 1559.0, "9885": 1863.0, "9890": 1671.0, "9895": 1775.0, "9900": 1723.0, "9905": 1794.0, "9910": 1690.0, "9915": 1715.0, "9920": 1687.0, "9925": 1676.0, "9930": 1671.0, "9935": 1780.0, "9940": 1824.0, "9945": 1823.0, "9950": 1674.0, "9955": 1700.0, "9960": 1717.0, "9965": 1664.0, "9970": 1700.0, "9975": 1536.0, "9980": 1630.0, "9985": 1710.0, "9990": 1705.0, "9995": 1744.0, "10000": 1677.0, "10005": 1688.0, "10010": 1642.0, "10015": 1618.0, "10020": 1624.0, "10025": 1674.0, "10030": 1716.0, "10035": 1733.0, "10040": 1659.0, "10045": 1717.0, "10050": 1708.0, "10055": 1713.0, "10060": 1634.0, "10065": 1738.0, "10070": 1574.0, "10075": 1650.0, "10080": 1685.0, "10085": 1638.0, "10090": 1695.0, "10095": 1644.0, "10100": 1713.0, "10105": 1671.0, "10110": 1629.0, "10115": 1682.0, "10120": 1709.0, "10125": 1719.0, "10130": 1749.0, "10135": 1716.0, "10140": 1628.0, "10145": 1691.0, "10150": 1683.0, "10155": 1713.0, "10160": 1658.0, "10165": 1698.0, "10170": 1720.0, "10175": 1682.0, "10180": 1710.0, "10185": 1739.0, "10190": 1695.0, "10195": 1695.0, "10200": 1719.0, "10205": 1707.0, "10210": 2053.0, "10215": 1608.0, "10220": 1608.0, "10225": 1576.0, "10230": 1603.0, "10235": 1645.0, "10240": 1581.0, "10245": 1697.0, "10250": 2210.0, "10255": 1553.0, "10260": 1591.0, "10265": 1604.0, "10270": 1597.0, "10275": 1611.0, "10280": 1567.0, "10285": 1608.0, "10290": 1607.0, "10295": 1547.0, "10300": 1650.0, "10305": 1729.0, "10310": 1620.0, "10315": 1587.0, "10320": 1572.0, "10325": 1643.0, "10330": 1612.0, "10335": 1585.0, "10340": 1600.0, "10345": 1649.0, "10350": 1619.0, "10355": 1592.0, "10360": 1630.0, "10365": 1587.0, "10370": 1611.0, "10375": 1626.0, "10380": 1591.0, "10385": 1581.0, "10390": 1593.0, "10395": 1550.0, "10400": 1506.0, "10405": 1711.0, "10410": 1633.0, "10415": 1585.0, "10420": 1611.0, "10425": 1640.0, "10430": 1625.0, "10435": 1599.0, "10440": 1604.0, "10445": 1555.0, "10450": 1601.0, "10455": 1673.0, "10460": 1615.0, "10465": 1604.0, "10470": 1569.0, "10475": 1639.0, "10480": 1540.0, "10485": 1668.0, "10490": 1685.0, "10495": 1591.0, "10500": 1627.0, "10505": 1659.0, "10510": 1590.0, "10515": 1661.0, "10520": 1637.0, "10525": 1554.0, "10530": 1662.0, "10535": 1572.0, "10540": 1676.0, "10545": 1581.0, "10550": 1711.0, "10555": 1658.0, "10560": 1586.0, "10565": 1639.0, "10570": 1620.0, "10575": 1567.0, "10580": 1632.0, "10585": 1573.0, "10590": 1561.0, "10595": 1595.0, "10600": 1524.0, "10605": 1629.0, "10610": 1659.0, "10615": 1665.0, "10620": 1625.0, "10625": 1602.0, "10630": 1591.0, "10635": 1649.0, "10640": 1615.0, "10645": 1662.0, "10650": 1560.0, "10655": 1575.0, "10660": 1602.0, "10665": 1642.0, "10670": 1668.0, "10675": 1615.0, "10680": 1555.0, "10685": 1636.0, "10690": 1543.0, "10695": 1662.0, "10700": 1648.0, "10705": 1625.0, "10710": 1669.0, "10715": 1668.0, "10720": 1647.0, "10725": 1689.0, "10730": 1614.0, "10735": 1635.0, "10740": 1602.0, "10745": 1678.0, "10750": 1693.0, "10755": 1643.0, "10760": 1545.0, "10765": 1783.0, "10770": 1669.0, "10775": 1744.0, "10780": 1684.0, "10785": 1648.0, "10790": 1691.0, "10795": 1653.0, "10800": 1748.0, "10805": 1671.0, "10810": 1615.0, "10815": 1713.0, "10820": 1711.0, "10825": 1715.0, "10830": 1703.0, "10835": 1669.0, "10840": 1663.0, "10845": 1652.0, "10850": 1715.0, "10855": 1660.0, "10860": 1622.0, "10865": 1697.0, "10870": 1696.0, "10875": 1727.0, "10880": 1682.0, "10885": 1637.0, "10890": 1701.0, "10895": 1704.0, "10900": 1713.0, "10905": 1707.0, "10910": 1668.0, "10915": 1724.0, "10920": 1678.0, "10925": 1689.0, "10930": 1656.0, "10935": 1747.0, "10940": 1595.0, "10945": 1722.0, "10950": 1588.0, "10955": 1682.0, "10960": 1642.0, "10965": 1674.0, "10970": 1651.0, "10975": 1661.0, "10980": 1679.0, "10985": 1659.0, "10990": 1738.0, "10995": 1718.0, "11000": 1733.0, "11005": 1619.0, "11010": 1682.0, "11015": 1710.0, "11020": 1673.0, "11025": 1689.0, "11030": 1760.0, "11035": 1622.0, "11040": 1604.0, "11045": 1858.0, "11050": 1705.0, "11055": 1719.0, "11060": 1739.0, "11065": 1642.0, "11070": 1648.0, "11075": 1732.0, "11080": 1732.0, "11085": 1668.0, "11090": 1769.0, "11095": 1815.0, "11100": 1718.0, "11105": 1616.0, "11110": 1652.0, "11115": 1690.0, "11120": 1683.0, "11125": 1655.0, "11130": 1779.0, "11135": 1619.0, "11140": 1790.0, "11145": 1754.0, "11150": 1679.0, "11155": 1623.0, "11160": 1616.0, "11165": 1761.0, "11170": 1708.0, "11175": 1695.0, "11180": 1599.0, "11185": 1677.0, "11190": 1503.0, "11195": 1644.0, "11200": 1684.0, "11205": 1684.0, "11210": 1740.0, "11215": 1758.0, "11220": 1662.0, "11225": 1631.0, "11230": 2238.0, "11235": 1669.0, "11240": 1625.0, "11245": 1695.0, "11250": 1613.0, "11255": 1748.0, "11260": 1664.0, "11265": 1663.0, "11270": 1640.0, "11275": 1730.0, "11280": 1662.0, "11285": 1694.0, "11290": 1666.0, "11295": 1762.0, "11300": 1723.0, "11305": 1753.0, "11310": 1644.0, "11315": 1761.0, "11320": 1688.0, "11325": 1740.0, "11330": 1776.0, "11335": 1663.0, "11340": 1690.0, "11345": 1592.0, "11350": 1641.0, "11355": 1615.0, "11360": 1617.0, "11365": 1774.0, "11370": 1601.0, "11375": 1605.0, "11380": 1698.0, "11385": 1680.0, "11390": 1676.0, "11395": 1619.0, "11400": 1680.0, "11405": 1613.0, "11410": 1739.0, "11415": 1628.0, "11420": 1640.0, "11425": 1751.0, "11430": 1709.0, "11435": 1675.0, "11440": 1661.0, "11445": 1671.0, "11450": 1632.0, "11455": 1619.0, "11460": 1690.0, "11465": 1623.0, "11470": 1753.0, "11475": 1726.0, "11480": 1732.0, "11485": 1652.0, "11490": 1643.0, "11495": 1721.0, "11500": 1706.0, "11505": 1704.0, "11510": 1692.0, "11515": 1770.0, "11520": 1707.0, "11525": 1634.0, "11530": 1706.0, "11535": 1751.0, "11540": 1773.0, "11545": 1679.0, "11550": 1727.0, "11555": 1767.0, "11560": 1729.0, "11565": 1663.0, "11570": 1711.0, "11575": 1812.0, "11580": 1716.0, "11585": 1703.0, "11590": 1660.0, "11595": 1630.0, "11600": 1670.0, "11605": 1769.0, "11610": 1761.0, "11615": 1667.0, "11620": 1722.0, "11625": 1781.0, "11630": 1803.0, "11635": 1688.0, "11640": 1601.0, "11645": 1727.0, "11650": 1708.0, "11655": 1673.0, "11660": 1675.0, "11665": 1691.0, "11670": 1831.0, "11675": 1570.0, "11680": 1643.0, "11685": 1672.0, "11690": 1807.0, "11695": 1690.0, "11700": 1707.0, "11705": 1693.0, "11710": 1681.0, "11715": 1667.0, "11720": 1599.0, "11725": 1652.0, "11730": 1525.0, "11735": 1644.0, "11740": 1753.0, "11745": 1638.0, "11750": 1653.0, "11755": 1654.0, "11760": 1610.0, "11765": 1652.0, "11770": 1635.0, "11775": 1611.0, "11780": 1547.0, "11785": 1632.0, "11790": 1701.0, "11795": 1692.0, "11800": 1627.0, "11805": 1667.0, "11810": 1616.0, "11815": 1620.0, "11820": 1641.0, "11825": 1626.0, "11830": 1638.0, "11835": 1666.0, "11840": 1681.0, "11845": 1600.0, "11850": 1591.0, "11855": 1634.0, "11860": 1645.0, "11865": 1615.0, "11870": 1521.0, "11875": 1696.0, "11880": 1612.0, "11885": 1569.0, "11890": 1634.0, "11895": 1647.0, "11900": 1586.0, "11905": 1637.0, "11910": 1721.0, "11915": 1622.0, "11920": 1640.0, "11925": 1666.0, "11930": 1655.0, "11935": 1566.0, "11940": 1661.0, "11945": 1545.0, "11950": 1614.0, "11955": 1607.0, "11960": 1611.0, "11965": 1619.0, "11970": 1589.0, "11975": 1598.0, "11980": 1724.0, "11985": 1644.0, "11990": 1743.0, "11995": 1689.0, "12000": 1692.0, "12005": 1701.0, "12010": 1703.0, "12015": 1735.0, "12020": 1776.0, "12025": 1757.0, "12030": 1592.0, "12035": 1692.0, "12040": 1818.0, "12045": 1730.0, "12050": 1602.0, "12055": 1650.0, "12060": 1737.0, "12065": 1594.0, "12070": 1775.0, "12075": 1773.0, "12080": 1768.0, "12085": 1787.0, "12090": 1806.0, "12095": 1741.0, "12100": 1788.0, "12105": 1788.0, "12110": 1681.0, "12115": 1808.0, "12120": 1910.0, "12125": 1735.0, "12130": 1722.0, "12135": 1818.0, "12140": 1699.0, "12145": 1717.0, "12150": 1657.0, "12155": 1675.0, "12160": 1719.0, "12165": 1649.0, "12170": 1654.0, "12175": 1728.0, "12180": 1802.0, "12185": 1669.0, "12190": 1627.0, "12195": 1694.0, "12200": 1682.0, "12205": 1686.0, "12210": 1799.0, "12215": 1694.0, "12220": 1651.0, "12225": 1721.0, "12230": 1677.0, "12235": 1626.0, "12240": 1663.0, "12245": 1779.0, "12250": 1732.0, "12255": 1646.0, "12260": 1661.0, "12265": 1737.0, "12270": 1737.0, "12275": 1780.0, "12280": 1656.0, "12285": 1673.0, "12290": 1741.0, "12295": 1645.0, "12300": 1613.0, "12305": 1739.0, "12310": 1696.0, "12315": 1699.0, "12320": 1712.0, "12325": 1744.0, "12330": 1680.0, "12335": 1708.0, "12340": 1679.0, "12345": 1778.0, "12350": 1740.0, "12355": 1839.0, "12360": 1636.0, "12365": 1707.0, "12370": 1720.0, "12375": 1729.0, "12380": 1708.0, "12385": 1732.0, "12390": 1701.0, "12395": 1690.0, "12400": 1821.0, "12405": 1751.0, "12410": 1727.0, "12415": 1779.0, "12420": 1740.0, "12425": 1639.0, "12430": 1713.0, "12435": 1740.0, "12440": 1642.0, "12445": 1760.0, "12450": 1905.0, "12455": 1645.0, "12460": 1783.0, "12465": 1747.0, "12470": 1723.0, "12475": 1707.0, "12480": 1668.0, "12485": 1754.0, "12490": 1672.0, "12495": 1692.0, "12500": 1749.0, "12505": 1642.0, "12510": 1651.0, "12515": 1710.0, "12520": 1705.0, "12525": 1693.0, "12530": 1818.0, "12535": 1637.0, "12540": 1746.0, "12545": 1663.0, "12550": 1772.0, "12555": 1747.0, "12560": 1715.0, "12565": 1749.0, "12570": 1727.0, "12575": 1553.0, "12580": 1720.0, "12585": 1630.0, "12590": 1652.0, "12595": 1819.0, "12600": 1705.0, "12605": 1780.0, "12610": 1797.0, "12615": 1689.0, "12620": 1707.0, "12625": 1708.0, "12630": 1693.0, "12635": 1829.0, "12640": 1845.0, "12645": 1708.0, "12650": 1668.0, "12655": 1784.0, "12660": 1700.0, "12665": 1562.0, "12670": 1737.0, "12675": 1675.0, "12680": 1777.0, "12685": 1793.0, "12690": 1736.0, "12695": 1738.0, "12700": 1713.0, "12705": 1631.0, "12710": 1686.0, "12715": 1662.0, "12720": 1668.0, "12725": 1744.0, "12730": 1665.0, "12735": 1735.0, "12740": 1872.0, "12745": 1664.0, "12750": 1751.0, "12755": 1725.0, "12760": 1781.0, "12765": 1739.0, "12770": 1639.0, "12775": 1619.0, "12780": 1734.0, "12785": 1745.0, "12790": 1673.0, "12795": 1659.0, "12800": 1748.0, "12805": 1746.0, "12810": 1734.0, "12815": 1717.0, "12820": 1626.0, "12825": 1623.0, "12830": 1677.0, "12835": 1700.0, "12840": 1704.0, "12845": 1667.0, "12850": 1705.0, "12855": 1664.0, "12860": 1687.0, "12865": 1732.0, "12870": 1723.0, "12875": 1713.0, "12880": 1733.0, "12885": 1712.0, "12890": 1644.0, "12895": 1693.0, "12900": 1718.0, "12905": 1749.0, "12910": 1757.0, "12915": 1720.0, "12920": 1772.0, "12925": 1753.0, "12930": 1643.0, "12935": 1819.0, "12940": 1734.0, "12945": 1736.0, "12950": 2288.0, "12955": 1646.0, "12960": 1759.0, "12965": 1782.0, "12970": 1624.0, "12975": 1757.0, "12980": 1772.0, "12985": 1742.0, "12990": 1644.0, "12995": 1622.0, "13000": 1664.0, "13005": 1636.0, "13010": 1649.0, "13015": 1657.0, "13020": 1617.0, "13025": 1636.0, "13030": 1657.0, "13035": 1640.0, "13040": 1615.0, "13045": 2056.0, "13050": 1596.0, "13055": 1677.0, "13060": 1588.0, "13065": 1598.0, "13070": 1602.0, "13075": 1593.0, "13080": 1657.0, "13085": 1697.0, "13090": 1594.0, "13095": 1740.0, "13100": 1638.0, "13105": 1601.0, "13110": 1611.0, "13115": 1603.0, "13120": 1556.0, "13125": 1640.0, "13130": 1568.0, "13135": 1695.0, "13140": 1676.0, "13145": 1653.0, "13150": 1619.0, "13155": 1639.0, "13160": 1643.0, "13165": 1626.0, "13170": "nan", "13175": 1686.0, "13180": 1627.0, "13185": 1671.0, "13190": 1669.0, "13195": 1609.0, "13200": 1764.0, "13205": 1518.0, "13210": 1636.0, "13215": 1727.0, "13220": 1592.0, "13225": 1652.0, "13230": 1651.0, "13235": 1584.0, "13240": 1642.0, "13245": 1674.0, "13250": 1709.0, "13255": 1624.0, "13260": 1670.0, "13265": 1598.0, "13270": 1599.0, "13275": 1654.0, "13280": 1735.0, "13285": 1631.0, "13290": 1616.0, "13295": 1742.0, "13300": 1683.0, "13305": 1751.0, "13310": 1628.0, "13315": 2158.0, "13320": 1679.0, "13325": 1623.0, "13330": 1538.0, "13335": 1683.0, "13340": 1711.0, "13345": 1627.0, "13350": 1640.0, "13355": 1703.0, "13360": 1675.0, "13365": 1638.0, "13370": 1593.0, "13375": 1554.0, "13380": 1579.0, "13385": 1747.0, "13390": 1659.0, "13395": 1655.0, "13400": 1658.0, "13405": 1607.0, "13410": 1619.0, "13415": 1640.0, "13420": 1619.0, "13425": 1604.0, "13430": 1688.0, "13435": 1652.0, "13440": 1729.0, "13445": 1643.0, "13450": 1629.0, "13455": 1603.0, "13460": 1602.0, "13465": 1623.0, "13470": 1662.0, "13475": 1658.0, "13480": 1654.0, "13485": 1647.0, "13490": 1654.0, "13495": 1679.0, "13500": 1637.0, "13505": 1613.0, "13510": 1684.0, "13515": 1604.0, "13520": 1578.0, "13525": 1615.0, "13530": 1638.0, "13535": 1675.0, "13540": 1622.0, "13545": 1644.0, "13550": 1589.0, "13555": 1640.0, "13560": 1595.0, "13565": 1570.0, "13570": 1616.0, "13575": 1579.0, "13580": 1603.0, "13585": 1627.0, "13590": 1661.0, "13595": 1636.0, "13600": 1660.0, "13605": 1689.0, "13610": 1589.0, "13615": 1604.0, "13620": 1596.0, "13625": 1588.0, "13630": 1624.0, "13635": 1636.0, "13640": 1607.0, "13645": 1765.0, "13650": 1732.0, "13655": 1616.0, "13660": 1633.0, "13665": 1682.0, "13670": 1647.0, "13675": 1597.0, "13680": 1611.0, "13685": 1593.0, "13690": 1619.0, "13695": 1595.0, "13700": 1740.0, "13705": 1574.0, "13710": 1673.0, "13715": 1589.0, "13720": 1658.0, "13725": 1613.0, "13730": 1674.0, "13735": 1648.0, "13740": 1619.0, "13745": 1626.0, "13750": 1611.0, "13755": 1629.0, "13760": 1603.0, "13765": 1641.0, "13770": 1561.0, "13775": 1696.0, "13780": 1577.0, "13785": 1668.0, "13790": 1563.0, "13795": 1677.0, "13800": 1622.0, "13805": 1621.0, "13810": 1662.0, "13815": 1577.0, "13820": 1624.0, "13825": 1592.0, "13830": 1684.0, "13835": 1649.0, "13840": 1588.0, "13845": 1628.0, "13850": 1585.0, "13855": 1648.0, "13860": 1693.0, "13865": 1634.0, "13870": 1655.0, "13875": 2121.0, "13880": 1768.0, "13885": 1622.0, "13890": 1727.0, "13895": 1642.0, "13900": 1600.0, "13905": 1625.0, "13910": 1557.0, "13915": 1565.0, "13920": 1655.0, "13925": 1615.0, "13930": 1664.0, "13935": 1551.0, "13940": 1634.0, "13945": 1685.0, "13950": 1626.0, "13955": 1632.0, "13960": 1725.0, "13965": 1581.0, "13970": 1690.0, "13975": 1638.0, "13980": 1569.0, "13985": 1632.0, "13990": 1675.0, "13995": 1721.0, "14000": 1656.0, "14005": 1611.0, "14010": 1595.0, "14015": 1644.0, "14020": 1601.0, "14025": 1647.0, "14030": 1741.0, "14035": 1698.0, "14040": 1550.0, "14045": 1654.0, "14050": 1681.0, "14055": 1687.0, "14060": 1680.0, "14065": 1666.0, "14070": 1559.0, "14075": 1642.0, "14080": 1706.0, "14085": 1659.0, "14090": 1577.0, "14095": 1667.0, "14100": 1693.0, "14105": 1681.0, "14110": 1549.0, "14115": 1612.0, "14120": 1616.0, "14125": 1757.0, "14130": 1683.0, "14135": 1688.0, "14140": 1560.0, "14145": 1523.0, "14150": 1611.0, "14155": 1625.0, "14160": 1700.0, "14165": 1658.0, "14170": 1638.0, "14175": 1590.0, "14180": 1569.0, "14185": 1645.0, "14190": 1589.0, "14195": 1614.0, "14200": 1546.0, "14205": 1629.0, "14210": 1592.0, "14215": 1643.0, "14220": 1638.0, "14225": 1670.0, "14230": 1615.0, "14235": 1722.0, "14240": 1625.0, "14245": 1531.0, "14250": 1672.0, "14255": 1617.0, "14260": 1640.0, "14265": 1638.0, "14270": 1647.0, "14275": 1605.0, "14280": 1673.0, "14285": 1743.0, "14290": 1705.0, "14295": 1622.0, "14300": 1594.0, "14305": 1638.0, "14310": 1687.0, "14315": 1660.0, "14320": 1537.0, "14325": 1577.0, "14330": 1752.0, "14335": 1693.0, "14340": 1640.0, "14345": 1700.0, "14350": 1618.0, "14355": 1630.0, "14360": 1677.0, "14365": 1721.0, "14370": 1721.0, "14375": 1724.0, "14380": 1793.0, "14385": 1779.0, "14390": 1631.0, "14395": 1773.0, "14400": 1716.0, "14405": 1792.0, "14410": 1802.0, "14415": 1748.0, "14420": 1786.0, "14425": 1746.0, "14430": 1719.0, "14435": 1711.0, "14440": 1671.0, "14445": 1748.0, "14450": 1671.0, "14455": 1655.0, "14460": 1766.0, "14465": 1744.0, "14470": 1800.0, "14475": 1672.0, "14480": 1788.0, "14485": 1779.0, "14490": 1765.0, "14495": 1595.0, "14500": 1726.0, "14505": 1724.0, "14510": 1711.0, "14515": 1694.0, "14520": 1642.0, "14525": 1629.0, "14530": 1826.0, "14535": 1687.0, "14540": 1781.0, "14545": 1788.0, "14550": 1869.0, "14555": 1769.0, "14560": 1766.0, "14565": 1760.0, "14570": 1698.0, "14575": 1796.0, "14580": 1756.0, "14585": 1722.0, "14590": 1738.0, "14595": 1837.0, "14600": 1786.0, "14605": 1740.0, "14610": 1818.0, "14615": 1756.0, "14620": 1694.0, "14625": 1852.0, "14630": 1759.0, "14635": 1783.0, "14640": 1826.0, "14645": 1783.0, "14650": 1740.0, "14655": 1794.0, "14660": 1681.0, "14665": 1773.0, "14670": 1931.0, "14675": 1899.0, "14680": 1826.0, "14685": 1864.0, "14690": 1568.0, "14695": 1716.0, "14700": 1797.0, "14705": 1730.0, "14710": 1753.0, "14715": 1772.0, "14720": 1724.0, "14725": 1731.0, "14730": 1783.0, "14735": 1947.0, "14740": 1716.0, "14745": 1650.0, "14750": 1807.0, "14755": 1726.0, "14760": 1697.0, "14765": 1885.0, "14770": 1800.0, "14775": 1806.0, "14780": 1781.0, "14785": 1790.0, "14790": 1715.0, "14795": 1757.0, "14800": 1747.0, "14805": 1841.0, "14810": 1755.0, "14815": 1727.0, "14820": 1718.0, "14825": 1725.0, "14830": 1796.0, "14835": 1713.0, "14840": 1707.0, "14845": 1664.0, "14850": 1682.0, "14855": 1767.0, "14860": 1771.0, "14865": 1733.0, "14870": 1708.0, "14875": 1841.0, "14880": 1661.0, "14885": 1873.0, "14890": 1673.0, "14895": 1763.0, "14900": 1718.0, "14905": 1732.0, "14910": 1673.0, "14915": 1590.0, "14920": 1817.0, "14925": 1767.0, "14930": 1701.0, "14935": 1900.0, "14940": 1760.0, "14945": 1624.0, "14950": 1628.0, "14955": 1623.0, "14960": 1733.0, "14965": 1690.0, "14970": 1721.0, "14975": 1606.0, "14980": 1805.0, "14985": 1681.0, "14990": 1790.0, "14995": 1895.0, "15000": 1750.0, "15005": 1844.0, "15010": 1761.0, "15015": 1873.0, "15020": 1736.0, "15025": 1610.0, "15030": 1853.0, "15035": 1792.0, "15040": 1630.0, "15045": 1737.0, "15050": 1701.0, "15055": 1771.0, "15060": 1769.0, "15065": 1729.0, "15070": 1836.0, "15075": 1646.0, "15080": 1738.0, "15085": 1748.0, "15090": 1841.0, "15095": 1810.0, "15100": 1767.0, "15105": 1745.0, "15110": 1831.0, "15115": 1790.0, "15120": 1761.0, "15125": 1871.0, "15130": 1737.0, "15135": 1716.0, "15140": 1926.0, "15145": 1734.0, "15150": 1888.0, "15155": 1794.0, "15160": 1712.0, "15165": 1808.0, "15170": 1763.0, "15175": 1787.0, "15180": 1812.0, "15185": 1751.0, "15190": 1760.0, "15195": 1774.0, "15200": 1653.0, "15205": 1770.0, "15210": 1782.0, "15215": 1801.0, "15220": 1822.0, "15225": 1851.0, "15230": 1717.0, "15235": 1701.0, "15240": 1800.0, "15245": 1760.0, "15250": 1653.0, "15255": 1726.0, "15260": 1789.0, "15265": 1810.0, "15270": 1847.0, "15275": 1718.0, "15280": 1748.0, "15285": 1767.0, "15290": 1772.0, "15295": 1664.0, "15300": 1776.0, "15305": 1788.0, "15310": 1862.0, "15315": 1835.0, "15320": 1819.0, "15325": 1770.0, "15330": 1787.0, "15335": 1774.0, "15340": 1840.0, "15345": 1724.0, "15350": 1735.0, "15355": 1861.0, "15360": 1761.0, "15365": 1719.0, "15370": 1628.0, "15375": 1638.0, "15380": 1655.0, "15385": 1582.0, "15390": 1655.0, "15395": 1675.0, "15400": 1605.0, "15405": 1680.0, "15410": 1837.0, "15415": 1660.0, "15420": 1774.0, "15425": 1705.0, "15430": 1728.0, "15435": 1622.0, "15440": 1631.0, "15445": 1664.0, "15450": 1619.0, "15455": 1645.0, "15460": 1594.0, "15465": 1693.0, "15470": 1606.0, "15475": 1640.0, "15480": 1681.0, "15485": 1694.0, "15490": 1678.0, "15495": 1704.0, "15500": 1702.0, "15505": 1692.0, "15510": 1599.0, "15515": 1725.0, "15520": 1666.0, "15525": 1652.0, "15530": 1653.0, "15535": 1607.0, "15540": 1669.0, "15545": 1620.0, "15550": 1754.0, "15555": 1593.0, "15560": 1613.0, "15565": 1646.0, "15570": 1739.0, "15575": 1647.0, "15580": 1664.0, "15585": 1620.0, "15590": 1652.0, "15595": 1673.0, "15600": 1607.0, "15605": 1639.0, "15610": 1524.0, "15615": 1696.0, "15620": 1559.0, "15625": 1702.0, "15630": 1841.0, "15635": 1628.0, "15640": 1647.0, "15645": 1644.0, "15650": 1663.0, "15655": 1633.0, "15660": 1699.0, "15665": 1682.0, "15670": 1597.0, "15675": 1571.0, "15680": 1627.0, "15685": 1651.0, "15690": 1724.0, "15695": 1722.0, "15700": 1625.0, "15705": "nan", "15710": 1578.0, "15715": 1623.0, "15720": 1626.0, "15725": 1608.0, "15730": 1567.0, "15735": 1792.0, "15740": 1672.0, "15745": 1693.0, "15750": 1775.0, "15755": 1608.0, "15760": 1638.0, "15765": 1676.0, "15770": 1689.0, "15775": 1603.0, "15780": 1641.0, "15785": 1600.0, "15790": 1760.0, "15795": 1675.0, "15800": 1559.0, "15805": 1654.0, "15810": 1680.0, "15815": 1615.0, "15820": 1669.0, "15825": 1649.0, "15830": 1626.0, "15835": 1757.0, "15840": 1591.0, "15845": 1608.0, "15850": 1617.0, "15855": 1616.0, "15860": 1678.0, "15865": 1722.0, "15870": 1629.0, "15875": 1667.0, "15880": 1660.0, "15885": 1661.0, "15890": 1647.0, "15895": 1731.0, "15900": 1726.0, "15905": 1572.0, "15910": 1713.0, "15915": 1613.0, "15920": 1594.0, "15925": 1670.0, "15930": 1606.0, "15935": 1649.0, "15940": 1684.0, "15945": 1657.0, "15950": 1719.0, "15955": 1655.0, "15960": 1587.0, "15965": 1657.0, "15970": 1728.0, "15975": 1573.0, "15980": 1610.0, "15985": 1688.0, "15990": 1729.0, "15995": 1633.0, "16000": 1619.0, "16005": 1689.0, "16010": 1670.0, "16015": 1725.0, "16020": 1734.0, "16025": 1755.0, "16030": 1745.0, "16035": 1652.0, "16040": 1674.0, "16045": 1724.0, "16050": 1703.0, "16055": 1655.0, "16060": 1650.0, "16065": 1598.0, "16070": 1703.0, "16075": 1642.0, "16080": 1694.0, "16085": 1687.0, "16090": 1706.0, "16095": 1645.0, "16100": 1741.0, "16105": 1573.0, "16110": 1641.0, "16115": 1681.0, "16120": 1657.0, "16125": 1673.0, "16130": 1718.0, "16135": 1716.0, "16140": 1756.0, "16145": 1665.0, "16150": 1638.0, "16155": 1649.0, "16160": 1725.0, "16165": 1591.0, "16170": 1669.0, "16175": 1733.0, "16180": 1639.0, "16185": 1707.0, "16190": 1665.0, "16195": 1549.0, "16200": 1669.0, "16205": 1714.0, "16210": 1736.0, "16215": 1636.0, "16220": 1719.0, "16225": 1717.0, "16230": 1552.0, "16235": 1660.0, "16240": 1751.0, "16245": 1690.0, "16250": 1615.0, "16255": 1593.0, "16260": 1726.0, "16265": 1588.0, "16270": 1652.0, "16275": 1608.0, "16280": 1678.0, "16285": 1470.0, "16290": 1643.0, "16295": 1648.0, "16300": 1673.0, "16305": 1658.0, "16310": 1756.0, "16315": 1629.0, "16320": 1654.0, "16325": 1633.0, "16330": 1868.0, "16335": 1636.0, "16340": 1605.0, "16345": 1749.0, "16350": 1673.0, "16355": 1578.0, "16360": 1626.0, "16365": 1668.0, "16370": 2085.0, "16375": 1693.0, "16380": 1800.0, "16385": 1630.0, "16390": 1601.0, "16395": 1819.0, "16400": 1731.0, "16405": 1572.0, "16410": 1690.0, "16415": 1631.0, "16420": 1672.0, "16425": 1677.0, "16430": 1732.0, "16435": 1656.0, "16440": 1686.0, "16445": 1750.0, "16450": 1690.0, "16455": 1732.0, "16460": 1628.0, "16465": 1731.0, "16470": 1652.0, "16475": 1721.0, "16480": 1674.0, "16485": 1768.0, "16490": 1619.0, "16495": 1753.0, "16500": 1750.0, "16505": 1679.0, "16510": 1735.0, "16515": 1650.0, "16520": 1665.0, "16525": 1778.0, "16530": 1772.0, "16535": 1892.0, "16540": 1690.0, "16545": 1613.0, "16550": 1595.0, "16555": 1847.0, "16560": 1703.0, "16565": 1885.0, "16570": 1772.0, "16575": 1818.0, "16580": 1829.0, "16585": 1843.0, "16590": 1957.0, "16595": 1769.0, "16600": 1779.0, "16605": 1982.0, "16610": 1653.0, "16615": 1774.0, "16620": 1837.0, "16625": 1891.0, "16630": 1626.0, "16635": 1736.0, "16640": 1840.0, "16645": 1872.0, "16650": 1669.0, "16655": 1885.0, "16660": 1820.0, "16665": 1745.0, "16670": 1728.0, "16675": 1654.0, "16680": 1750.0, "16685": 1971.0, "16690": 1789.0, "16695": 1848.0, "16700": 1897.0, "16705": 1780.0, "16710": 1906.0, "16715": 1906.0, "16720": 1782.0, "16725": 1784.0, "16730": 1961.0, "16735": 1878.0, "16740": 1769.0, "16745": 1781.0, "16750": 1744.0, "16755": 1834.0, "16760": 1710.0, "16765": 1789.0, "16770": 1758.0, "16775": 1868.0, "16780": 1795.0, "16785": 1737.0, "16790": 1788.0, "16795": 1751.0, "16800": 1767.0, "16805": 1820.0, "16810": 1818.0, "16815": 1861.0, "16820": 1931.0, "16825": 1712.0, "16830": 1885.0, "16835": 1752.0, "16840": 1719.0, "16845": 1728.0, "16850": 1766.0, "16855": 1794.0, "16860": 1730.0, "16865": 1760.0, "16870": 1838.0, "16875": 1734.0, "16880": 1828.0, "16885": 1847.0, "16890": 1677.0, "16895": 1656.0, "16900": 1865.0, "16905": 1939.0, "16910": 1836.0, "16915": 1743.0, "16920": 1870.0, "16925": 1865.0, "16930": 1812.0, "16935": 1802.0, "16940": 1756.0, "16945": 1694.0, "16950": 1787.0, "16955": 1967.0, "16960": 1752.0, "16965": 1712.0, "16970": 1830.0, "16975": 1850.0, "16980": 1852.0, "16985": 1578.0, "16990": 1677.0, "16995": 1825.0, "17000": 1806.0, "17005": 1947.0, "17010": 1763.0, "17015": 1840.0, "17020": 1861.0, "17025": 1891.0, "17030": 1750.0, "17035": 1851.0, "17040": 1807.0, "17045": 1820.0, "17050": 1935.0, "17055": 1756.0, "17060": 1841.0, "17065": 1604.0, "17070": 1902.0, "17075": 1728.0, "17080": 1601.0, "17085": 1879.0, "17090": 1940.0, "17095": 1833.0, "17100": 1821.0, "17105": 1897.0, "17110": 1710.0, "17115": 1913.0, "17120": 1860.0, "17125": 1780.0, "17130": 1819.0, "17135": 1889.0, "17140": 1889.0, "17145": 1863.0, "17150": 1851.0, "17155": 1728.0, "17160": 1869.0, "17165": 1928.0, "17170": 1910.0, "17175": 1906.0, "17180": 1832.0, "17185": 1881.0, "17190": 1767.0, "17195": 1771.0, "17200": 1776.0, "17205": 1696.0, "17210": 1718.0, "17215": 1814.0, "17220": 1702.0, "17225": 1774.0, "17230": 1861.0, "17235": 1698.0, "17240": 1876.0, "17245": 1880.0, "17250": 1812.0, "17255": 1726.0, "17260": 1537.0, "17265": 1794.0, "17270": 2016.0, "17275": 1845.0, "17280": 1871.0, "17285": 1842.0, "17290": 1797.0, "17295": 1862.0, "17300": 1784.0, "17305": 1898.0, "17310": 1804.0, "17315": 1963.0, "17320": 1906.0, "17325": 1907.0, "17330": 1664.0, "17335": 1886.0, "17340": 1897.0, "17345": 2005.0, "17350": 1765.0, "17355": 1898.0, "17360": 1848.0, "17365": 1783.0, "17370": 1737.0, "17375": 1847.0, "17380": 1701.0, "17385": 1841.0, "17390": 1796.0, "17395": 1923.0, "17400": 1774.0, "17405": 1832.0, "17410": 1887.0, "17415": 1852.0, "17420": 1926.0, "17425": 1876.0, "17430": 1766.0, "17435": 1790.0, "17440": 1733.0, "17445": 1745.0, "17450": 1949.0, "17455": 1646.0, "17460": 1755.0, "17465": 1965.0, "17470": 1932.0, "17475": 1813.0, "17480": 1857.0, "17485": 1873.0, "17490": 1760.0, "17495": 1880.0, "17500": 1834.0, "17505": 1775.0, "17510": 1846.0, "17515": 1853.0, "17520": 1782.0, "17525": 1735.0, "17530": 1810.0, "17535": 1780.0, "17540": 1791.0, "17545": 1697.0, "17550": 1837.0, "17555": 1756.0, "17560": 1763.0, "17565": 1633.0, "17570": 1620.0, "17575": 1669.0, "17580": 1709.0, "17585": 1691.0, "17590": 1701.0, "17595": 1667.0, "17600": 1724.0, "17605": 1615.0, "17610": 1715.0, "17615": 1663.0, "17620": 1636.0, "17625": 1539.0, "17630": 1652.0, "17635": 1624.0, "17640": 1743.0, "17645": 1733.0, "17650": 1670.0, "17655": 1691.0, "17660": 1591.0, "17665": 1689.0, "17670": 1705.0, "17675": 1842.0, "17680": 1588.0, "17685": 1643.0, "17690": 1599.0, "17695": 1766.0, "17700": 1768.0, "17705": 1758.0, "17710": 1748.0, "17715": 1655.0, "17720": 1760.0, "17725": 1706.0, "17730": 1740.0, "17735": 1726.0, "17740": 1564.0, "17745": 1657.0, "17750": 1794.0, "17755": 1628.0, "17760": 1747.0, "17765": 1719.0, "17770": 1730.0, "17775": 1697.0, "17780": 1805.0, "17785": 1645.0, "17790": 1639.0, "17795": 1779.0, "17800": 1631.0, "17805": 1711.0, "17810": 1676.0, "17815": 1710.0, "17820": 1726.0, "17825": 1671.0, "17830": 1767.0, "17835": 1656.0, "17840": 1784.0, "17845": 1663.0, "17850": 1791.0, "17855": 1743.0, "17860": 1711.0, "17865": 1598.0, "17870": 1687.0, "17875": 1650.0, "17880": 1643.0, "17885": 1701.0, "17890": 1711.0, "17895": 1658.0, "17900": 1675.0, "17905": 1620.0, "17910": 1777.0, "17915": 1681.0, "17920": 1709.0, "17925": 1795.0, "17930": 1535.0, "17935": 1635.0, "17940": 1658.0, "17945": 1558.0, "17950": 1679.0, "17955": 1681.0, "17960": 1698.0, "17965": 1667.0, "17970": 1706.0, "17975": 1552.0, "17980": 1662.0, "17985": 1710.0, "17990": 1629.0, "17995": 1708.0, "18000": 1634.0, "18005": 1724.0, "18010": 1694.0, "18015": 1659.0, "18020": 1679.0, "18025": 1793.0, "18030": 1611.0, "18035": 1743.0, "18040": 1626.0, "18045": 1698.0, "18050": 1641.0, "18055": 1643.0, "18060": 1698.0, "18065": 1629.0, "18070": 1742.0, "18075": 1708.0, "18080": 1711.0, "18085": 1699.0, "18090": 1645.0, "18095": 1682.0, "18100": 1733.0, "18105": 1681.0, "18110": 1534.0, "18115": 1647.0, "18120": 1685.0, "18125": 1681.0, "18130": 1589.0, "18135": 1757.0, "18140": 1746.0, "18145": 1672.0, "18150": 1744.0, "18155": 1703.0, "18160": 1729.0, "18165": 1694.0, "18170": 1656.0, "18175": 1541.0, "18180": 1757.0, "18185": 1814.0, "18190": 1792.0, "18195": 1659.0, "18200": 1736.0, "18205": 1675.0, "18210": 1614.0, "18215": 1758.0, "18220": 1696.0, "18225": 1668.0, "18230": 1634.0, "18235": 1605.0, "18240": 1700.0, "18245": 1656.0, "18250": 1757.0, "18255": 1590.0, "18260": 1745.0, "18265": 1671.0, "18270": 1720.0, "18275": 1669.0, "18280": 1627.0, "18285": 1607.0, "18290": 1733.0, "18295": 1752.0, "18300": 1684.0, "18305": 1741.0, "18310": 1701.0, "18315": 1702.0, "18320": 1701.0, "18325": 1760.0, "18330": 1730.0, "18335": 1664.0, "18340": 1597.0, "18345": 1679.0, "18350": 1582.0, "18355": 1736.0, "18360": 1691.0, "18365": 1606.0, "18370": 1658.0, "18375": 1663.0, "18380": 1669.0, "18385": 1685.0, "18390": 1688.0, "18395": 1745.0, "18400": 1574.0, "18405": 1681.0, "18410": 1562.0, "18415": 1702.0, "18420": 1715.0, "18425": 1686.0, "18430": 1672.0, "18435": 1697.0, "18440": 1847.0, "18445": 1657.0, "18450": 1778.0, "18455": 1658.0, "18460": 1679.0, "18465": 1700.0, "18470": 1702.0, "18475": 1606.0, "18480": 1600.0, "18485": 1721.0, "18490": 1724.0, "18495": 1658.0, "18500": 1681.0, "18505": 1708.0, "18510": 1745.0, "18515": 1659.0, "18520": 1692.0, "18525": 1735.0, "18530": 1728.0, "18535": 1572.0, "18540": 1725.0, "18545": 1615.0, "18550": 1740.0, "18555": 1758.0, "18560": 1750.0, "18565": 1656.0, "18570": 1658.0, "18575": 1613.0, "18580": 1647.0, "18585": 1742.0, "18590": 1640.0, "18595": 1648.0, "18600": 1690.0, "18605": 1632.0, "18610": 1650.0, "18615": 1593.0, "18620": 1643.0, "18625": 1623.0, "18630": 1690.0, "18635": 1798.0, "18640": 1743.0, "18645": 1699.0, "18650": 1695.0, "18655": 1653.0, "18660": 1726.0, "18665": 1711.0, "18670": 1658.0, "18675": 1713.0, "18680": 1773.0, "18685": 1761.0, "18690": 1690.0, "18695": 1727.0, "18700": 1746.0, "18705": 1661.0, "18710": 1654.0, "18715": 1623.0, "18720": "nan", "18725": 1694.0, "18730": 1710.0, "18735": 1601.0, "18740": 1701.0, "18745": 1732.0, "18750": 1712.0, "18755": 1737.0, "18760": 1743.0, "18765": 1689.0, "18770": 1641.0, "18775": 1618.0, "18780": 1806.0, "18785": 1681.0, "18790": 1649.0, "18795": 1690.0, "18800": 1658.0, "18805": 1733.0, "18810": 1650.0, "18815": 1692.0, "18820": 1699.0, "18825": 1726.0, "18830": 1758.0, "18835": 1812.0, "18840": 1631.0, "18845": 1700.0, "18850": 1661.0, "18855": 1669.0, "18860": 1575.0, "18865": 1669.0, "18870": 1728.0, "18875": 1742.0, "18880": 1734.0, "18885": 1948.0, "18890": 1791.0, "18895": 1595.0, "18900": 1803.0, "18905": 1627.0, "18910": 1690.0, "18915": 1667.0, "18920": 1694.0, "18925": 1591.0, "18930": 1650.0, "18935": 1649.0, "18940": 1606.0, "18945": 1619.0, "18950": 1721.0, "18955": 1702.0, "18960": 1762.0, "18965": 1697.0, "18970": 1787.0, "18975": 1620.0, "18980": 1652.0, "18985": 1660.0, "18990": 1618.0, "18995": 1689.0, "19000": 1705.0, "19005": 1568.0, "19010": 1744.0, "19015": 1658.0, "19020": 1646.0, "19025": 1639.0, "19030": 1670.0, "19035": 1737.0, "19040": 1749.0, "19045": 1621.0, "19050": 1584.0, "19055": 1700.0, "19060": 1711.0, "19065": 1690.0, "19070": 1687.0, "19075": 1708.0, "19080": 1724.0, "19085": 1661.0, "19090": 1704.0, "19095": 1606.0, "19100": 1776.0, "19105": 1751.0, "19110": 1680.0, "19115": 1682.0, "19120": 1598.0, "19125": 1571.0, "19130": 1764.0, "19135": 1706.0, "19140": 1668.0, "19145": 1683.0, "19150": 1656.0, "19155": 1615.0, "19160": 1680.0, "19165": 1689.0, "19170": 1709.0, "19175": 1603.0, "19180": 1828.0, "19185": 1665.0, "19190": 1772.0, "19195": 1681.0, "19200": 1663.0, "19205": 1652.0, "19210": 1796.0, "19215": 1934.0, "19220": 1844.0, "19225": 1728.0, "19230": 1753.0, "19235": 1681.0, "19240": 1692.0, "19245": 1684.0, "19250": 1708.0, "19255": 1729.0, "19260": 1722.0, "19265": 1730.0, "19270": 2319.0, "19275": 1782.0, "19280": 1720.0, "19285": 1690.0, "19290": 1753.0, "19295": 1622.0, "19300": 1787.0, "19305": 1655.0, "19310": 1656.0, "19315": 1597.0, "19320": 1728.0, "19325": 1684.0, "19330": 1693.0, "19335": 1617.0, "19340": 1808.0, "19345": 1723.0, "19350": 1727.0, "19355": 1777.0, "19360": 1748.0, "19365": 1618.0, "19370": 1620.0, "19375": 1802.0, "19380": 1590.0, "19385": 1799.0, "19390": 1677.0, "19395": 1721.0, "19400": 1734.0, "19405": 1636.0, "19410": 1589.0, "19415": 1736.0, "19420": 1675.0, "19425": 1732.0, "19430": 1688.0, "19435": 1730.0, "19440": 1730.0, "19445": 1686.0, "19450": 1696.0, "19455": 1654.0, "19460": 1682.0, "19465": 1665.0, "19470": 1741.0, "19475": 1706.0, "19480": 1695.0, "19485": 1723.0, "19490": 1647.0, "19495": 1618.0, "19500": 1772.0, "19505": 1723.0, "19510": 1682.0, "19515": 1778.0, "19520": 1803.0, "19525": 1714.0, "19530": 1734.0, "19535": 1722.0, "19540": 1625.0, "19545": 1717.0, "19550": 1678.0, "19555": 1766.0, "19560": 1664.0, "19565": 1676.0, "19570": 1702.0, "19575": 1690.0, "19580": 1755.0, "19585": 1562.0, "19590": 1624.0, "19595": 1696.0, "19600": 1725.0, "19605": 1708.0, "19610": 1720.0, "19615": 1671.0, "19620": 1629.0, "19625": 1704.0, "19630": 1642.0, "19635": 1684.0, "19640": 1695.0, "19645": 1733.0, "19650": 1649.0, "19655": 1721.0, "19660": 1704.0, "19665": 1751.0, "19670": 1631.0, "19675": 1628.0, "19680": 1618.0, "19685": 1718.0, "19690": 1697.0, "19695": 1651.0, "19700": 1538.0, "19705": 1748.0, "19710": 1700.0, "19715": 1726.0, "19720": 1712.0, "19725": 1695.0, "19730": 1653.0, "19735": 1728.0, "19740": 1701.0, "19745": 1679.0, "19750": 1722.0, "19755": 1546.0, "19760": 1670.0, "19765": 1715.0, "19770": 1726.0, "19775": 1719.0, "19780": 1811.0, "19785": 1585.0, "19790": 1685.0, "19795": 1781.0, "19800": 1612.0, "19805": 1755.0, "19810": 1712.0, "19815": 1695.0, "19820": 1787.0, "19825": 1724.0, "19830": 1675.0, "19835": 1662.0, "19840": 1626.0, "19845": 1645.0, "19850": 1805.0, "19855": 1614.0, "19860": 1692.0, "19865": 1735.0, "19870": 1663.0, "19875": 1726.0, "19880": 1641.0, "19885": 1608.0, "19890": 1715.0, "19895": 1562.0, "19900": 1608.0, "19905": 1762.0, "19910": 1666.0, "19915": 1682.0, "19920": 1753.0, "19925": 1793.0, "19930": 1686.0, "19935": 1681.0, "19940": 1662.0, "19945": 1710.0, "19950": 1599.0, "19955": 1724.0, "19960": 1841.0, "19965": 1705.0, "19970": 1750.0, "19975": 1729.0, "19980": 1709.0, "19985": 1707.0, "19990": 1544.0, "19995": 1630.0, "20000": 1782.0}}, "iteration-time": {"start_step": 1, "end_step": 20000, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": "nan", "25": "nan", "30": "nan", "35": "nan", "40": "nan", "45": "nan", "50": "nan", "55": "nan", "60": "nan", "65": "nan", "70": "nan", "75": "nan", "80": "nan", "85": "nan", "90": "nan", "95": "nan", "100": 0.94985, "105": "nan", "110": "nan", "115": "nan", "120": "nan", "125": "nan", "130": "nan", "135": "nan", "140": "nan", "145": "nan", "150": "nan", "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": 0.39915, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": 0.40992, "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": 0.40713, "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": 0.4036, "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": 0.40566, "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": 0.42915, "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": 0.40308, "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": 0.40175, "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": 0.39937, "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": 0.41365, "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": 0.42487, "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": 0.40331, "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": 0.39899, "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": 0.3966, "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": 0.40096, "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": 0.40329, "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": 0.39614, "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": 0.39572, "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": 0.39339, "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": 0.40493, "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": 0.42303, "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": 0.4094, "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": 0.41345, "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": 0.41133, "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": 0.40218, "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": 0.41838, "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": 0.42431, "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": 0.41107, "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": 0.40422, "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": 0.39665, "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": 0.41837, "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": 0.4028, "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": 0.38774, "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": 0.39127, "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": 0.39836, "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": 0.39697, "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": 0.38917, "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": 0.39916, "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": 0.39698, "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": 0.40647, "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": 0.41067, "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": 0.40894, "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": 0.41009, "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": 0.40227, "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": 0.4248, "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": 0.41211, "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": 0.43365, "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": 0.42909, "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": 0.41732, "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": 0.42641, "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": 0.40863, "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": 0.41278, "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": 0.41069, "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": 0.42652, "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": 0.42102, "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": 0.42038, "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": 0.43627, "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": 0.42496, "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": 0.4472, "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": 0.41582, "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": 0.42424, "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": 0.43535, "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": 0.42616, "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": 0.41387, "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": 0.42489, "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": 0.41313, "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": 0.42286, "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": 0.41541, "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": 0.43306, "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": 0.43241, "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": 0.40933, "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": 0.4217, "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": 0.41565, "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": 0.43066, "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": 0.40777, "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": 0.41387, "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": 0.41251, "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": 0.43528, "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": 0.41126, "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": 0.40702, "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": 0.42912, "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": 0.40025, "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": 0.39933, "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": 0.4116, "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": 0.40721, "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": 0.43865, "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": 0.41131, "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": 0.40549, "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": 0.39706, "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": 0.41703, "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": 0.39494, "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": 0.41589, "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": 0.40272, "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": 0.40918, "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": 0.3881, "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": 0.4061, "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": 0.41645, "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": 0.40007, "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": 0.40686, "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": 0.40084, "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": 0.4015, "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": 0.40517, "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": 0.40816, "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": 0.39193, "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": 0.40278, "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": 0.38608, "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": 0.40038, "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": 0.41654, "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": 0.4018, "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": 0.39819, "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": 0.40654, "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": 0.40283, "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": 0.42215, "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": 0.4053, "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": 0.40138, "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": 0.40984, "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": 0.40079, "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": 0.42369, "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": 0.39174, "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": 0.39963, "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": 0.40465, "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": 0.41284, "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": 0.40579, "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": 0.39319, "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": 0.4131, "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": 0.41943, "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": 0.41905, "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": 0.40002, "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": 0.39723, "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": 0.40359, "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": 0.41146, "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": 0.40901, "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": 0.40375, "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": 0.4211, "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": 0.41665, "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": 0.40601, "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": 0.40935, "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": 0.42427, "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": 0.40557, "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": 0.40419, "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": 0.39372, "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": 0.39899, "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": 0.40207, "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": 0.42758, "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": 0.41745, "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": 0.39061, "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": 0.40267, "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": 0.39932, "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": 0.42282, "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": 0.41066, "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": 0.4072, "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": 0.40479, "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": 0.39917, "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": 0.42804, "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": 0.41931, "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": 0.40026, "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": 0.40181, "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": 0.40088, "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": 0.43555, "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": 0.40309, "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": 0.42391, "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": 0.39944, "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": 0.42405, "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": 0.39562, "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": 0.39619, "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": 0.40585, "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": 0.41462, "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": 0.40859, "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": 0.41618, "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": 0.39907, "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": 0.40292, "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": 0.40323, "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": 0.40139, "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": 0.41256, "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": 0.39279, "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": 0.38942, "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": 0.39907, "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": 0.39079, "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": 0.39824, "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": 0.40483, "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": 0.39563, "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": 0.39189, "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": 0.43972, "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": 0.43098, "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": 0.3995, "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": 0.40338, "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": 0.39813, "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": 0.3998, "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": 0.39506, "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": 0.39686, "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": 0.39934, "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": 0.39998, "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": 0.39475, "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": 0.38948, "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": 0.39502, "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": 0.39327, "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": 0.40069, "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": 0.38783, "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": 0.40251}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_release/golden_values_0.9.0.json b/tests/functional_tests/test_cases/bert/bert_release/golden_values_0.9.0.json index 2353210..fdc01a8 100644 --- a/tests/functional_tests/test_cases/bert/bert_release/golden_values_0.9.0.json +++ b/tests/functional_tests/test_cases/bert/bert_release/golden_values_0.9.0.json @@ -1,8063 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 20000, - "step_interval": 5, - "values": [ - 10.51817, - 10.50697, - 10.54245, - 10.50667, - 9.92479, - 9.60301, - 9.27159, - 9.15922, - 9.1102, - 8.9799, - 8.75283, - 8.49649, - 8.52147, - 8.46628, - 8.33981, - 8.126, - 8.11512, - 7.80749, - 7.79653, - 7.8064, - 7.45337, - 7.42126, - 7.37001, - 7.35008, - 7.16051, - 7.14867, - 6.98236, - 7.31865, - 7.08964, - 6.84725, - 6.91697, - 6.82774, - 6.81873, - 6.90941, - 6.94075, - 6.89522, - 6.98502, - 6.59654, - 6.63277, - 6.94323, - 6.6785, - 6.80563, - 6.78144, - 6.95029, - 6.97322, - 6.71342, - 6.75433, - 6.77541, - 6.84547, - 6.80697, - 6.70396, - 6.65091, - 6.7526, - 6.61228, - 6.83516, - 6.80936, - 6.79944, - 6.85291, - 6.91914, - 6.53032, - 6.56537, - 6.62259, - 7.02059, - 6.47323, - 6.35438, - 6.50088, - 6.56089, - 6.59465, - 6.78021, - 6.69531, - 6.56238, - 6.56812, - 6.68091, - 6.59664, - 6.41566, - 6.5857, - 6.54195, - 6.58479, - 6.73615, - 6.4443, - 6.54865, - 6.55916, - 6.59845, - 6.43595, - 6.45401, - 6.18586, - 6.49294, - 6.68185, - 6.60608, - 6.559, - 6.19033, - 6.4009, - 6.40274, - 6.57056, - 6.53271, - 6.49194, - 6.36749, - 6.64527, - 6.49944, - 6.45025, - 6.51408, - 6.25955, - 6.63222, - 6.18585, - 6.30021, - 6.26754, - 6.42376, - 6.38336, - 6.3996, - 6.20304, - 6.6971, - 6.28159, - 6.19231, - 6.44574, - 6.78283, - 6.57514, - 6.3222, - 6.45288, - 6.43441, - 6.05597, - 6.55394, - 6.51277, - 6.42845, - 6.43754, - 6.41117, - 6.52694, - 6.04904, - 6.43141, - 6.31829, - 6.38719, - 6.48179, - 6.38679, - 6.15156, - 6.43417, - 6.37958, - 6.19399, - 6.3122, - 6.34221, - 6.27933, - 6.4711, - 6.1234, - 6.49485, - 6.71635, - 6.10516, - 6.17404, - 6.37549, - 6.01451, - 6.41138, - 6.31646, - 6.4248, - 6.21942, - 6.47332, - 6.33059, - 6.31427, - 6.18997, - 6.37343, - 6.50451, - 6.01189, - 6.18301, - 5.92232, - 6.4218, - 6.19402, - 6.44301, - 6.45792, - 6.29853, - 6.23516, - 6.09728, - 6.30322, - 6.54659, - 6.38562, - 6.38736, - 6.18747, - 6.31506, - 6.2397, - 6.39278, - 6.34112, - 6.27398, - 6.31134, - 5.96738, - 6.33133, - 6.10347, - 6.35765, - 6.37403, - 6.27959, - 6.36945, - 6.07987, - 6.23722, - 6.23969, - 6.20518, - 6.33283, - 5.91523, - 6.06771, - 5.8396, - 6.30586, - 6.43435, - 6.33055, - 6.23108, - 6.31522, - 6.14368, - 6.35712, - 6.0813, - 6.38602, - 6.19308, - 6.39707, - 6.26784, - 5.95543, - 6.39075, - 6.24059, - 6.15195, - 6.59246, - 6.23993, - 5.98167, - 6.08794, - 6.22457, - 6.24932, - 6.19731, - 6.41025, - 6.16779, - 6.14702, - 6.3142, - 6.1905, - 6.48519, - 6.22603, - 6.1012, - 6.07963, - 6.07777, - 6.09788, - 6.21642, - 6.06703, - 6.0736, - 6.34331, - 6.13042, - 5.97578, - 6.08952, - 6.01427, - 6.19113, - 6.36768, - 5.90277, - 6.26481, - 6.17568, - 6.30063, - 6.36281, - 6.04123, - 6.22493, - 5.89205, - 6.2712, - 6.22852, - 6.20738, - 6.42681, - 6.24806, - 6.34901, - 6.42603, - 6.21449, - 6.05921, - 6.16218, - 6.10802, - 6.17101, - 6.00663, - 6.3087, - 6.21621, - 6.23808, - 6.35984, - 6.10643, - 6.21751, - 6.32045, - 6.17364, - 6.32778, - 6.11195, - 6.24344, - 6.41059, - 6.17918, - 6.20837, - 6.11848, - 5.81564, - 6.31861, - 6.08424, - 6.29686, - 6.16169, - 6.14986, - 6.3447, - 6.05647, - 6.28571, - 6.42451, - 6.12725, - 5.88995, - 5.97151, - 6.13232, - 6.36328, - 6.32436, - 5.83657, - 6.19237, - 6.13804, - 6.17165, - 6.05564, - 6.05336, - 6.3311, - 6.20131, - 6.25644, - 6.26059, - 6.15301, - 6.09441, - 5.96695, - 6.23876, - 6.40664, - 6.16058, - 6.07392, - 6.34433, - 6.14116, - 6.25574, - 5.85199, - 6.21815, - 6.39583, - 5.99999, - 6.14387, - 6.15051, - 6.25526, - 5.85115, - 6.07627, - 6.00124, - 5.96682, - 5.99723, - 6.23724, - 6.24784, - 6.05465, - 5.94052, - 6.0319, - 6.15907, - 6.35365, - 6.23999, - 6.02366, - 6.17868, - 6.27531, - 6.10036, - 5.99662, - 6.19096, - 5.98736, - 6.06427, - 5.85432, - 6.03222, - 6.06351, - 6.27157, - 6.08552, - 6.09093, - 5.99386, - 6.25373, - 6.0298, - 6.18881, - 5.93073, - 5.90092, - 6.22774, - 6.02014, - 6.18113, - 5.87635, - 5.76267, - 6.19385, - 6.0271, - 5.80885, - 6.11822, - 6.41123, - 6.15246, - 6.12562, - 6.11515, - 6.11178, - 6.14833, - 6.13696, - 6.0483, - 5.90552, - 5.821, - 6.26382, - 6.03231, - 6.146, - 6.11886, - 6.10893, - 6.16299, - 6.09743, - 6.12602, - 6.03215, - 6.02295, - 6.25967, - 6.1337, - 6.30705, - 6.45111, - 6.05164, - 5.92855, - 6.07976, - 6.18155, - 6.15608, - 6.1541, - 5.93571, - 6.14067, - 5.7221, - 6.23682, - 5.95431, - 5.82749, - 5.807, - 5.95881, - 6.39691, - 5.91315, - 5.96697, - 6.18937, - 6.20403, - 6.25608, - 5.85749, - 6.0781, - 5.90695, - 6.18268, - 6.02446, - 6.15587, - 6.27412, - 5.99697, - 6.08953, - 6.23896, - 6.22791, - 6.08966, - 6.05174, - 6.03454, - 6.02379, - 6.02549, - 6.12694, - 6.15147, - 6.13949, - 5.96208, - 6.039, - 5.93912, - 5.74178, - 6.00726, - 6.05676, - 6.07005, - 5.78401, - 6.18148, - 5.99094, - 6.05439, - 6.0011, - 5.94535, - 5.65689, - 5.90724, - 6.01688, - 5.86744, - 5.84958, - 5.83715, - 5.61111, - 5.93448, - 6.15726, - 6.02414, - 5.76973, - 6.29326, - 6.11649, - 5.83082, - 6.14223, - 6.00111, - 5.98988, - 6.43447, - 5.73371, - 5.91641, - 6.36336, - 6.16274, - 6.28, - 6.09012, - 5.8942, - 6.12913, - 6.01726, - 5.95304, - 5.94608, - 6.09611, - 6.04629, - 6.02524, - 6.10135, - 6.25692, - 5.93219, - 6.05535, - 6.08078, - 6.25733, - 6.10818, - 6.03638, - 6.22702, - 5.81009, - 6.10102, - 5.98953, - 5.84714, - 6.18397, - 6.06079, - 6.2054, - 6.05417, - 5.92869, - 5.84022, - 6.15406, - 5.96206, - 6.06074, - 6.07171, - 5.90473, - 6.0514, - 5.96242, - 6.06422, - 6.14824, - 6.09494, - 5.77827, - 6.3064, - 6.00993, - 6.2371, - 6.02496, - 5.84215, - 6.02974, - 6.14715, - 5.93831, - 6.37739, - 6.13046, - 5.94359, - 6.18319, - 5.93852, - 5.95794, - 5.85023, - 6.19997, - 5.99258, - 6.10812, - 5.94916, - 6.18755, - 5.96491, - 5.8899, - 6.17812, - 5.96364, - 6.10578, - 6.11038, - 5.97466, - 6.00693, - 5.98535, - 6.18803, - 5.96577, - 6.0219, - 6.0942, - 6.10419, - 6.13657, - 6.06244, - 5.87461, - 6.19408, - 6.12413, - 5.77577, - 6.08653, - 5.96586, - 6.06471, - 6.07338, - 5.84106, - 5.98622, - 5.97016, - 6.02866, - 6.01132, - 5.88509, - 6.00115, - 6.14698, - 6.02431, - 6.03975, - 6.0098, - 6.01558, - 6.1797, - 6.20138, - 5.95864, - 5.96013, - 6.04125, - 5.87593, - 5.80975, - 6.17579, - 6.17304, - 5.78979, - 6.25387, - 5.93408, - 5.93671, - 6.30197, - 6.12889, - 5.90932, - 6.11098, - 6.04489, - 6.05513, - 5.9135, - 6.06193, - 6.10079, - 6.10188, - 5.85069, - 5.8413, - 5.89402, - 6.26349, - 6.04118, - 6.08565, - 6.065, - 6.13269, - 6.11291, - 5.86254, - 6.10467, - 6.05387, - 5.94895, - 6.1818, - 6.05343, - 6.02384, - 5.9609, - 6.21701, - 6.09864, - 5.79897, - 6.20999, - 6.12097, - 5.83995, - 5.78299, - 6.20008, - 6.16731, - 6.10642, - 6.32568, - 6.13099, - 5.8644, - 6.14147, - 5.7461, - 5.63084, - 5.82654, - 6.26232, - 6.0985, - 5.92978, - 6.10104, - 6.12813, - 6.23907, - 5.88807, - 6.34628, - 6.06435, - 6.05448, - 6.07128, - 5.93676, - 6.03108, - 5.89012, - 6.1816, - 6.09598, - 6.12548, - 5.88057, - 5.87118, - 5.81435, - 6.09769, - 6.01679, - 5.93883, - 6.0273, - 6.0164, - 5.89597, - 6.17274, - 5.73088, - 6.28675, - 5.98412, - 6.21755, - 5.74064, - 6.06264, - 6.2111, - 6.18387, - 5.83547, - 5.99602, - 5.98562, - 5.92462, - 5.90849, - 6.06777, - 5.9088, - 6.0204, - 5.6665, - 5.80911, - 5.96813, - 6.23178, - 5.82357, - 6.05969, - 5.84712, - 6.04017, - 5.96287, - 5.90165, - 5.79747, - 5.91486, - 5.91607, - 6.02435, - 5.98636, - 5.86205, - 6.17819, - 5.63541, - 5.73696, - 6.11451, - 5.97651, - 6.07753, - 6.06145, - 6.08863, - 6.29546, - 6.02292, - 6.03794, - 5.85776, - 5.79737, - 6.06528, - 5.74563, - 6.05699, - 6.12658, - 5.92117, - 6.13579, - 5.54065, - 5.76269, - 5.87993, - 5.91242, - 6.03735, - 5.92272, - 6.09372, - 5.8169, - 5.86553, - 5.86954, - 5.76153, - 6.09647, - 5.73825, - 6.23511, - 6.06764, - 5.71329, - 6.21079, - 5.9418, - 6.12618, - 5.80646, - 6.14399, - 6.17109, - 5.9638, - 6.07147, - 5.87998, - 5.98958, - 6.10486, - 5.94009, - 5.98863, - 6.06121, - 6.25642, - 6.01759, - 5.86526, - 5.74566, - 6.16195, - 6.10693, - 6.05532, - 6.02885, - 5.78566, - 5.87564, - 5.83874, - 5.62324, - 5.81889, - 6.08758, - 5.88765, - 5.81942, - 6.04841, - 5.99598, - 5.95132, - 6.08819, - 6.26621, - 6.02789, - 5.84812, - 5.90048, - 5.7218, - 5.95754, - 6.01512, - 5.79566, - 5.89034, - 5.86056, - 5.9712, - 5.89064, - 5.73494, - 5.98824, - 6.00045, - 6.00537, - 5.99502, - 6.06507, - 5.84488, - 6.03438, - 5.71394, - 5.86569, - 5.91636, - 5.81769, - 5.67685, - 6.03505, - 5.49676, - 6.02789, - 5.90114, - 5.69273, - 6.04561, - 5.8742, - 6.11631, - 5.70595, - 6.10092, - 6.03107, - 6.12552, - 6.08357, - 5.87592, - 5.95572, - 6.14525, - 5.91104, - 6.02733, - 6.1637, - 6.03623, - 6.00631, - 5.81493, - 5.77306, - 5.90989, - 5.86642, - 5.92262, - 5.83316, - 6.01167, - 5.9438, - 6.0537, - 5.95341, - 6.09256, - 5.74826, - 5.76917, - 6.02621, - 6.03644, - 6.0784, - 5.95486, - 5.87948, - 6.03272, - 5.94087, - 6.08934, - 6.09997, - 5.9177, - 5.77976, - 5.89886, - 5.7164, - 6.01999, - 5.98272, - 5.78219, - 5.80691, - 5.85284, - 5.84277, - 5.95625, - 5.81189, - 6.05099, - 6.06015, - 5.75557, - 5.97108, - 5.81367, - 6.09467, - 5.96639, - 5.76024, - 5.9028, - 5.77803, - 6.05656, - 5.85214, - 6.00212, - 6.04935, - 5.72926, - 5.8153, - 5.91811, - 5.9014, - 5.56556, - 5.83749, - 5.76485, - 5.87879, - 5.93373, - 6.06735, - 6.03101, - 6.09616, - 6.04688, - 5.92916, - 5.86993, - 5.7176, - 5.86549, - 5.95245, - 5.69993, - 5.93455, - 5.69702, - 5.88953, - 5.94726, - 5.88734, - 5.93859, - 5.82601, - 5.9819, - 5.98518, - 5.84135, - 5.82831, - 6.04323, - 5.98497, - 6.02173, - 5.84704, - 5.83521, - 6.01448, - 5.87788, - 6.06302, - 6.01489, - 5.86304, - 6.17774, - 5.78696, - 5.86811, - 5.91998, - 5.71957, - 6.04416, - 6.02449, - 5.8539, - 5.88979, - 5.93267, - 5.87023, - 5.9243, - 5.92837, - 5.68343, - 5.85726, - 5.87625, - 5.99757, - 5.86586, - 6.01434, - 6.05585, - 5.79117, - 5.69103, - 5.76513, - 6.1054, - 5.90205, - 5.71626, - 5.72425, - 5.96747, - 5.78541, - 5.7318, - 5.9825, - 6.06086, - 5.85327, - 6.05739, - 5.90233, - 5.9151, - 5.70958, - 6.20464, - 5.88365, - 5.74122, - 5.77504, - 5.91744, - 6.03886, - 6.01076, - 5.96969, - 5.92302, - 6.06975, - 5.91473, - 5.95218, - 5.83588, - 5.58634, - 5.84976, - 6.1213, - 6.15442, - 5.85942, - 5.94779, - 5.99031, - 6.00633, - 5.95967, - 5.89928, - 6.01925, - 5.88478, - 5.94224, - 5.91401, - 5.82956, - 5.82824, - 5.83868, - 5.83117, - 5.87794, - 6.0331, - 5.89646, - 6.05464, - 5.86751, - 5.77017, - 5.81422, - 5.77389, - 5.86271, - 5.84156, - 6.12881, - 5.7815, - 6.00807, - 6.09046, - 5.9379, - 5.88377, - 5.94251, - 5.91166, - 5.92921, - 5.89292, - 5.96918, - 5.55188, - 5.76032, - 5.67902, - 5.84015, - 5.73224, - 5.94588, - 5.43833, - 5.84906, - 5.84235, - 5.77496, - 6.00021, - 5.77369, - 5.69096, - 6.11037, - 5.8926, - 5.69087, - 5.73564, - 5.9196, - 6.02277, - 6.0821, - 5.73689, - 6.06767, - 5.68134, - 5.88726, - 5.76632, - 5.94122, - 5.85097, - 6.06624, - 5.78789, - 6.12634, - 5.7086, - 5.74157, - 6.00467, - 6.06798, - 6.25098, - 5.84732, - 5.81206, - 5.87449, - 5.93454, - 5.5304, - 6.02019, - 6.01734, - 5.86044, - 5.99006, - 6.12051, - 5.89547, - 6.08783, - 5.98881, - 5.50672, - 5.65035, - 6.05277, - 5.79633, - 5.7667, - 5.80437, - 5.93654, - 6.02751, - 5.76962, - 5.88305, - 5.69771, - 5.90861, - 6.096, - 6.10885, - 6.02175, - 5.87293, - 5.85626, - 5.74448, - 5.88746, - 5.76223, - 5.97301, - 5.95833, - 6.07221, - 5.56389, - 5.74472, - 5.82477, - 5.9365, - 5.73817, - 5.49313, - 5.78058, - 5.9239, - 5.96589, - 6.12467, - 5.89207, - 5.79991, - 5.70344, - 5.95456, - 6.17915, - 6.17869, - 5.74695, - 5.91135, - 6.03182, - 5.90523, - 5.99983, - 5.67873, - 5.68088, - 6.01449, - 5.85001, - 6.18222, - 5.80411, - 5.80382, - 5.84815, - 5.96831, - 5.90235, - 6.03294, - 6.05113, - 6.14595, - 5.80833, - 5.96028, - 5.65118, - 5.85271, - 5.8623, - 6.07333, - 5.6907, - 5.91971, - 6.02173, - 5.96661, - 6.09506, - 5.72175, - 5.96678, - 5.88797, - 5.92198, - 5.49269, - 5.88569, - 5.96455, - 6.01671, - 5.70527, - 5.75155, - 5.78047, - 5.84001, - 5.86736, - 5.84501, - 5.83254, - 5.93259, - 6.02108, - 5.94471, - 6.12619, - 6.04959, - 5.78407, - 5.66789, - 6.11476, - 5.87561, - 5.91178, - 5.73906, - 5.93146, - 5.98557, - 6.09548, - 5.74059, - 5.98117, - 5.91247, - 5.93101, - 5.84936, - 5.69119, - 5.86238, - 5.89403, - 5.67395, - 5.88732, - 5.84461, - 5.67952, - 5.81781, - 5.80892, - 5.73643, - 5.94271, - 5.99453, - 5.71643, - 5.78788, - 5.97038, - 6.035, - 5.83654, - 5.91245, - 5.82831, - 5.43351, - 6.11724, - 5.63003, - 5.76819, - 5.73018, - 5.82327, - 5.93817, - 5.7622, - 6.00721, - 5.84835, - 5.82843, - 6.06111, - 6.00835, - 5.71861, - 5.86418, - 5.87246, - 5.8283, - 5.84512, - 5.7291, - 5.85626, - 6.00548, - 5.68508, - 5.72271, - 5.95573, - 5.91411, - 5.77567, - 5.97971, - 6.01619, - 5.94789, - 6.04235, - 5.92623, - 5.82736, - 6.03855, - 5.80717, - 5.82134, - 5.86947, - 5.94254, - 6.10217, - 5.87591, - 5.65855, - 5.91821, - 6.13018, - 5.63911, - 5.79941, - 5.77977, - 5.74167, - 5.79741, - 5.80638, - 5.86412, - 5.74558, - 5.8795, - 5.84981, - 5.94432, - 5.55934, - 5.92196, - 5.76573, - 6.16785, - 5.87734, - 5.60914, - 5.82916, - 5.85576, - 5.93431, - 6.04834, - 6.01633, - 5.94011, - 5.93521, - 5.79534, - 5.79225, - 5.68445, - 5.64982, - 5.79235, - 5.98056, - 6.054, - 5.91754, - 6.05105, - 5.73838, - 5.719, - 5.77888, - 5.72269, - 5.9901, - 5.91495, - 5.871, - 6.04414, - 6.01798, - 5.87393, - 6.15308, - 5.89919, - 6.2463, - 5.85094, - 5.99511, - 5.71773, - 5.97943, - 5.92089, - 5.92193, - 6.20199, - 5.87681, - 6.05154, - 5.99758, - 5.89011, - 5.57193, - 6.02664, - 5.99426, - 5.73991, - 5.92144, - 5.58033, - 5.80556, - 5.9772, - 5.80375, - 5.63945, - 5.75142, - 5.55072, - 5.53673, - 5.84958, - 5.61298, - 5.90347, - 5.75528, - 5.93477, - 5.62974, - 5.76581, - 5.81259, - 5.86702, - 6.07998, - 5.80322, - 5.91904, - 5.69643, - 5.91703, - 5.92627, - 5.6317, - 5.94898, - 5.30188, - 5.97203, - 5.75757, - 5.97019, - 5.97553, - 5.75687, - 5.93316, - 5.76571, - 5.73225, - 6.0253, - 5.80417, - 5.707, - 5.93621, - 5.69593, - 5.76353, - 6.03185, - 5.97027, - 5.82503, - 6.04874, - 5.74024, - 5.67189, - 5.91949, - 5.64414, - 5.86914, - 5.83681, - 5.91871, - 5.73788, - 5.85618, - 5.82104, - 5.99048, - 5.85878, - 5.94137, - 5.83757, - 5.91765, - 5.81586, - 5.92403, - 5.87708, - 5.77047, - 5.86524, - 6.15844, - 5.9869, - 5.97434, - 5.92558, - 5.7892, - 5.84703, - 5.88695, - 5.68735, - 5.86599, - 5.75874, - 5.81679, - 5.79944, - 5.73223, - 5.81132, - 5.79908, - 5.8077, - 5.95727, - 5.83627, - 5.91199, - 5.6967, - 6.04695, - 5.94184, - 5.73485, - 5.72855, - 5.81908, - 5.73976, - 5.92564, - 5.77489, - 5.95665, - 5.52984, - 5.70867, - 5.73005, - 5.98513, - 6.05166, - 5.94071, - 5.97337, - 5.86712, - 5.61517, - 5.77487, - 6.05967, - 6.02391, - 5.73958, - 5.7498, - 5.85126, - 6.03855, - 5.92835, - 5.88963, - 5.772, - 5.85759, - 5.60436, - 5.92853, - 5.78997, - 5.59679, - 5.9911, - 5.71415, - 5.93715, - 6.13991, - 5.5862, - 5.8774, - 6.11598, - 5.80606, - 5.62792, - 5.78293, - 5.90434, - 5.94513, - 5.69461, - 5.94406, - 5.8935, - 5.73361, - 5.79636, - 6.03205, - 5.90509, - 5.58558, - 6.01558, - 5.88857, - 5.77436, - 5.94823, - 5.85871, - 6.0355, - 5.75707, - 5.79768, - 5.67636, - 5.7253, - 5.88153, - 5.92901, - 5.39763, - 5.92955, - 5.68024, - 5.92206, - 5.83913, - 5.80502, - 5.76125, - 6.06211, - 5.86988, - 5.93483, - 5.8253, - 5.81727, - 5.95184, - 5.95516, - 5.85508, - 6.00283, - 5.82047, - 5.81943, - 5.86427, - 5.87532, - 5.8348, - 5.8545, - 5.93766, - 5.378, - 5.73824, - 5.74601, - 5.85273, - 5.82394, - 5.57251, - 5.82922, - 5.69758, - 5.99377, - 5.8443, - 5.91771, - 5.78867, - 5.65071, - 5.8881, - 5.75031, - 5.94389, - 5.89038, - 5.81134, - 5.96824, - 5.61951, - 5.75301, - 5.63601, - 5.72601, - 5.82447, - 6.01421, - 5.79561, - 5.80435, - 5.88217, - 5.88077, - 5.88073, - 5.61679, - 5.54178, - 5.87395, - 5.84007, - 5.82206, - 5.97586, - 5.72593, - 5.89843, - 5.9867, - 5.49935, - 5.68226, - 5.90707, - 5.82196, - 5.80617, - 6.01033, - 5.78375, - 5.69943, - 5.62976, - 5.81089, - 5.73651, - 5.97377, - 6.04683, - 5.70847, - 5.62338, - 5.93473, - 5.68378, - 5.87929, - 6.07437, - 5.58913, - 5.5587, - 5.95788, - 5.80927, - 5.81975, - 5.84129, - 5.93355, - 5.83822, - 5.56277, - 5.80884, - 5.71109, - 6.06421, - 5.53857, - 5.90978, - 5.97326, - 5.77918, - 5.81896, - 5.81587, - 5.50322, - 5.79004, - 5.68049, - 5.50592, - 5.59198, - 5.93173, - 5.59016, - 5.67392, - 5.79619, - 5.87002, - 6.03378, - 6.0934, - 5.5528, - 5.80135, - 5.63105, - 5.938, - 5.82999, - 6.01797, - 5.69501, - 5.61144, - 5.89177, - 6.08708, - 5.82596, - 5.49735, - 5.74006, - 5.99862, - 5.74806, - 6.1095, - 5.66165, - 5.71547, - 5.6484, - 5.78283, - 5.5931, - 5.9062, - 5.67977, - 5.31654, - 5.57789, - 5.78487, - 6.00066, - 5.73366, - 5.61612, - 5.97542, - 5.61031, - 5.81081, - 5.80517, - 6.00054, - 5.92824, - 5.56937, - 5.86793, - 5.64913, - 5.77547, - 5.62121, - 5.79237, - 5.76751, - 5.48263, - 6.12654, - 5.81921, - 5.55478, - 5.67251, - 5.85506, - 5.91582, - 5.85987, - 5.7451, - 5.6288, - 5.9358, - 5.77117, - 5.87969, - 5.68693, - 5.54155, - 5.46948, - 5.92449, - 5.69578, - 5.61774, - 5.91407, - 5.99281, - 5.7242, - 6.02733, - 5.83353, - 5.8941, - 5.90845, - 5.58274, - 5.90239, - 5.73442, - 5.76793, - 5.5455, - 5.80091, - 5.57495, - 5.93329, - 5.32212, - 5.69693, - 6.00364, - 5.84634, - 5.49144, - 5.70317, - 5.96304, - 5.75659, - 5.90796, - 5.46461, - 5.82196, - 5.70382, - 5.89507, - 5.85437, - 5.75404, - 5.7554, - 5.87031, - 5.59845, - 5.84484, - 5.4662, - 5.95048, - 5.6778, - 5.76869, - 5.6736, - 5.72082, - 5.72414, - 5.81206, - 5.56189, - 5.96838, - 5.90296, - 5.55599, - 5.86036, - 5.81815, - 5.87567, - 5.8659, - 5.83868, - 5.8297, - 5.96301, - 5.6167, - 5.71097, - 5.86768, - 5.60405, - 5.73223, - 5.84023, - 5.7564, - 5.8207, - 5.81478, - 5.46125, - 5.76515, - 5.87999, - 5.90936, - 5.83261, - 5.89529, - 5.76316, - 5.7638, - 5.47661, - 5.8634, - 5.61013, - 5.72378, - 5.75599, - 5.81251, - 6.0351, - 5.84867, - 5.87368, - 5.82237, - 5.70847, - 5.71423, - 5.95109, - 5.82724, - 5.78444, - 5.75695, - 5.69541, - 5.98377, - 5.54576, - 5.86877, - 5.81308, - 5.52578, - 5.47295, - 5.29252, - 5.73054, - 5.70435, - 5.89061, - 5.71961, - 6.18811, - 5.64285, - 5.75957, - 5.93835, - 5.52125, - 5.42426, - 5.75271, - 5.73761, - 5.98976, - 5.58229, - 5.7084, - 5.60565, - 5.64709, - 5.85746, - 5.99712, - 5.62785, - 5.70429, - 5.62972, - 5.649, - 5.68113, - 5.75792, - 5.70403, - 5.69472, - 5.66492, - 5.57693, - 5.65648, - 5.56991, - 5.88348, - 5.67161, - 5.73256, - 5.92812, - 5.56846, - 5.46481, - 5.80872, - 5.83126, - 5.7754, - 5.89272, - 5.54325, - 5.57892, - 5.71277, - 5.87338, - 5.70907, - 5.67721, - 5.51086, - 5.85753, - 5.76377, - 5.75087, - 5.90718, - 5.63706, - 5.8155, - 5.83352, - 5.8482, - 5.67357, - 5.63407, - 5.59035, - 5.71877, - 5.47683, - 5.74627, - 5.42606, - 5.73645, - 5.55478, - 5.95138, - 5.48409, - 5.54159, - 5.99212, - 5.52026, - 5.26822, - 5.64829, - 5.9037, - 5.55651, - 5.77397, - 5.64556, - 5.82035, - 5.73169, - 5.44745, - 5.65008, - 5.83118, - 5.82984, - 5.72634, - 5.64323, - 5.65479, - 5.74833, - 5.60132, - 5.47233, - 5.74113, - 5.63439, - 5.60235, - 5.44416, - 5.48049, - 5.58994, - 5.66653, - 5.66043, - 5.79726, - 5.70997, - 5.78961, - 5.62937, - 5.56678, - 5.80482, - 5.71759, - 5.78356, - 5.743, - 5.84223, - 5.42644, - 5.63196, - 5.80348, - 5.49088, - 5.826, - 5.52771, - 5.48095, - 5.35392, - 5.50077, - 5.3596, - 5.33064, - 5.86532, - 5.84238, - 5.57801, - 5.69746, - 5.74569, - 5.46517, - 5.50377, - 5.65439, - 5.63352, - 5.37607, - 5.5011, - 5.71651, - 5.90336, - 5.66397, - 5.73206, - 5.6508, - 5.52432, - 5.30448, - 5.81099, - 5.76475, - 5.56978, - 5.86827, - 5.51776, - 5.73968, - 5.59452, - 5.66373, - 5.55969, - 5.76577, - 5.91615, - 5.56708, - 5.74735, - 5.60566, - 5.35345, - 5.7854, - 5.76588, - 5.80156, - 5.74362, - 5.65695, - 5.73585, - 5.69036, - 5.57686, - 5.77655, - 5.62383, - 5.81772, - 5.75568, - 5.43952, - 5.6666, - 5.43186, - 5.65536, - 5.47906, - 5.63328, - 5.40467, - 5.66207, - 5.49452, - 5.43046, - 5.37363, - 5.54146, - 5.81395, - 5.52932, - 5.51237, - 5.3286, - 5.78025, - 5.81219, - 5.67441, - 5.64227, - 5.62336, - 5.60404, - 5.58174, - 5.59439, - 5.65366, - 5.39794, - 5.68567, - 5.40278, - 5.58909, - 5.71938, - 5.6502, - 5.617, - 5.77397, - 5.47779, - 5.56019, - 5.38541, - 5.32017, - 5.57065, - 5.85876, - 5.69156, - 5.61595, - 5.66446, - 5.82477, - 5.76422, - 5.74248, - 5.53179, - 5.42022, - 5.49126, - 5.5432, - 5.55075, - 5.6735, - 5.74431, - 5.73108, - 5.53347, - 5.47832, - 5.78369, - 5.63811, - 5.66957, - 5.58212, - 5.61234, - 5.56783, - 5.73898, - 5.17077, - 5.29027, - 5.28486, - 5.42042, - 5.65544, - 5.52742, - 5.69398, - 5.25064, - 5.29141, - 5.60403, - 5.51356, - 5.69282, - 5.60921, - 5.75197, - 5.39797, - 5.54715, - 5.59264, - 5.50544, - 5.74403, - 5.58659, - 5.73969, - 5.42799, - 5.71356, - 5.53956, - 5.2957, - 5.48232, - 5.49809, - 5.67207, - 5.50522, - 5.45096, - 5.39666, - 5.45412, - 5.62721, - 5.55272, - 5.73106, - 5.61996, - 5.36752, - 5.47768, - 5.84356, - 5.50586, - 5.50929, - 5.75589, - 5.81358, - 5.24376, - 5.3289, - 5.35628, - 5.39986, - 5.61486, - 5.6138, - 5.18214, - 5.51438, - 5.60589, - 5.44436, - 5.64708, - 5.50689, - 5.39556, - 5.76281, - 5.41118, - 5.57928, - 5.57219, - 5.49241, - 5.18128, - 5.47572, - 5.4267, - 5.60438, - 5.53136, - 5.57904, - 5.48748, - 5.59556, - 5.62021, - 5.33214, - 5.56346, - 5.31297, - 5.33727, - 5.14609, - 5.47305, - 5.69699, - 5.60172, - 5.52302, - 5.90634, - 5.52441, - 5.44089, - 5.40369, - 5.61849, - 5.30077, - 5.42964, - 5.69667, - 5.48485, - 5.5569, - 5.46049, - 5.452, - 5.45372, - 5.46275, - 5.07789, - 5.34791, - 5.48665, - 5.53812, - 5.26858, - 5.59704, - 5.53699, - 5.53245, - 5.29146, - 5.52025, - 5.42498, - 5.56623, - 5.33484, - 5.38538, - 5.43149, - 5.48089, - 5.45807, - 5.23074, - 5.44418, - 5.49082, - 5.56671, - 5.45221, - 5.83609, - 5.52985, - 5.26792, - 5.27749, - 5.58115, - 5.39591, - 5.63925, - 5.55577, - 5.65961, - 5.18139, - 5.6515, - 5.4231, - 5.33857, - 5.25229, - 5.27869, - 5.27201, - 5.45623, - 5.62906, - 5.29797, - 5.40776, - 5.35209, - 5.31923, - 5.66727, - 5.43877, - 5.33801, - 5.58614, - 5.46001, - 5.22625, - 5.46325, - 5.33833, - 5.40649, - 5.54292, - 5.6152, - 5.68297, - 5.39826, - 5.51364, - 5.49285, - 5.32128, - 5.52947, - 5.42864, - 5.54477, - 5.43745, - 5.29185, - 5.67558, - 5.54092, - 5.51634, - 5.42958, - 5.34685, - 5.34374, - 5.32932, - 5.47149, - 5.4214, - 5.55439, - 5.30149, - 5.43681, - 5.27134, - 5.43216, - 5.48044, - 5.53087, - 5.5032, - 5.55384, - 5.3391, - 5.49206, - 5.41623, - 5.52624, - 5.59869, - 5.22, - 5.3715, - 5.62166, - 5.45451, - 5.28584, - 5.50569, - 5.51017, - 5.4466, - 5.13754, - 5.44868, - 5.18499, - 5.46024, - 5.23826, - 5.42544, - 5.25092, - 5.55384, - 5.30178, - 5.28058, - 5.37146, - 5.59456, - 5.18002, - 5.27799, - 5.15724, - 5.31095, - 5.37193, - 5.54516, - 5.49711, - 5.24965, - 5.21013, - 5.57767, - 5.2507, - 5.4933, - 5.32102, - 5.10858, - 5.53542, - 5.36511, - 4.71173, - 5.51204, - 5.22079, - 5.33625, - 5.44288, - 5.18746, - 5.28881, - 5.27271, - 5.48616, - 5.37204, - 5.5184, - 5.06015, - 5.41652, - 5.35428, - 5.1541, - 5.34309, - 5.37151, - 5.46503, - 4.85724, - 5.26728, - 5.55824, - 5.2262, - 5.53201, - 5.45214, - 5.22074, - 5.42692, - 5.68887, - 5.35381, - 5.55141, - 5.3241, - 5.41281, - 5.11551, - 5.40312, - 5.21171, - 5.25316, - 5.3392, - 5.05048, - 5.35847, - 5.42669, - 5.56858, - 5.1747, - 5.46602, - 5.75666, - 5.32427, - 5.30176, - 5.63527, - 4.97713, - 5.26137, - 5.32693, - 5.2639, - 5.08794, - 5.18969, - 5.31055, - 5.20447, - 5.01636, - 5.15223, - 5.32107, - 5.77956, - 5.32862, - 5.38851, - 5.28772, - 5.30779, - 5.10187, - 5.23964, - 5.46528, - 5.14392, - 5.46838, - 5.45809, - 5.28989, - 5.51445, - 5.52868, - 5.02213, - 5.36721, - 5.40146, - 5.11598, - 5.40436, - 5.34648, - 5.21502, - 5.5097, - 5.34349, - 5.41626, - 5.42903, - 5.28654, - 5.19858, - 5.25407, - 5.22389, - 5.1878, - 5.52696, - 5.31761, - 5.32592, - 5.34449, - 5.30384, - 5.29588, - 5.06043, - 5.36704, - 5.38289, - 5.3147, - 5.12446, - 5.30151, - 5.23061, - 5.40578, - 5.32178, - 5.5677, - 5.2172, - 5.36517, - 5.04721, - 5.48196, - 5.11675, - 5.30977, - 5.35277, - 5.31389, - 5.03331, - 4.91443, - 5.16695, - 5.15749, - 5.25002, - 5.39032, - 5.41513, - 5.46878, - 5.10841, - 5.23591, - 5.13587, - 5.10942, - 5.34008, - 5.19869, - 5.43464, - 5.21271, - 5.24229, - 5.33876, - 5.10147, - 4.9879, - 5.15545, - 5.17442, - 5.36629, - 5.1683, - 5.31321, - 5.12776, - 5.20052, - 5.4809, - 5.41782, - 5.50602, - 5.32078, - 5.3394, - 5.33153, - 5.50257, - 5.38825, - 5.1136, - 5.27785, - 5.27292, - 5.19409, - 5.26564, - 5.33936, - 5.02114, - 5.26253, - 5.09193, - 5.23216, - 5.06008, - 4.86054, - 5.11267, - 5.59441, - 5.14097, - 5.23948, - 5.33491, - 5.43153, - 4.98945, - 5.17786, - 5.31712, - 5.34861, - 5.18015, - 5.31518, - 5.30742, - 5.39912, - 5.08969, - 5.17411, - 5.29569, - 5.24149, - 5.26019, - 5.32662, - 5.31137, - 5.4418, - 5.31443, - 5.66082, - 4.93711, - 4.87331, - 5.38169, - 4.92414, - 5.26322, - 5.24007, - 5.39664, - 5.10697, - 5.08402, - 5.11854, - 5.09357, - 5.09955, - 5.35863, - 5.27392, - 4.97619, - 5.308, - 5.17195, - 5.38842, - 5.35411, - 5.12821, - 5.11117, - 5.3141, - 5.05127, - 5.35491, - 5.28986, - 5.09619, - 5.28657, - 4.93423, - 5.07337, - 5.20424, - 5.19875, - 5.39102, - 5.53801, - 5.5996, - 5.30026, - 5.06866, - 5.21347, - 5.2345, - 5.34677, - 5.45026, - 5.23945, - 5.17821, - 5.2652, - 5.42398, - 5.11507, - 4.84804, - 5.06659, - 5.35822, - 5.35681, - 5.1749, - 4.89166, - 5.35909, - 5.16128, - 5.31103, - 5.40746, - 5.01967, - 5.07468, - 5.35477, - 4.92901, - 5.18326, - 5.30188, - 5.25777, - 5.06153, - 5.34074, - 5.01921, - 5.22785, - 5.33062, - 5.28423, - 5.35566, - 5.12203, - 4.87548, - 5.30273, - 5.26406, - 5.19015, - 5.25912, - 5.40361, - 5.04088, - 5.06439, - 5.21639, - 4.81718, - 5.26005, - 5.14982, - 5.10204, - 4.87488, - 5.26706, - 5.34184, - 5.03559, - 5.16921, - 5.09201, - 5.34235, - 5.04492, - 5.51481, - 5.21303, - 5.25327, - 5.29198, - 5.15068, - 5.19809, - 5.01813, - 5.21644, - 5.32524, - 5.32909, - 5.19627, - 5.13819, - 5.04436, - 5.27149, - 5.39707, - 5.32266, - 5.05586, - 5.28163, - 5.12252, - 5.09511, - 5.12202, - 5.25741, - 5.06226, - 5.10673, - 5.30161, - 5.64094, - 4.75382, - 4.94014, - 4.86893, - 5.11161, - 5.2992, - 5.05462, - 5.21631, - 5.25319, - 5.12557, - 5.09663, - 5.11625, - 5.25184, - 5.25183, - 5.12146, - 5.32237, - 5.27572, - 5.18663, - 5.44772, - 4.98199, - 5.13069, - 4.8904, - 5.26643, - 5.28753, - 5.16967, - 5.02555, - 5.06744, - 5.13618, - 5.60073, - 5.25329, - 5.23131, - 5.17239, - 5.2802, - 5.0492, - 5.2336, - 5.21103, - 5.0782, - 5.07578, - 5.27828, - 5.20161, - 5.17359, - 5.34911, - 5.56614, - 5.02903, - 5.27066, - 5.26847, - 5.12645, - 5.05682, - 5.31035, - 5.1279, - 5.35036, - 5.28608, - 4.98388, - 4.91951, - 4.97147, - 5.17543, - 5.42239, - 5.33696, - 5.32573, - 5.28952, - 4.99793, - 5.03698, - 5.05609, - 5.18092, - 5.25405, - 5.05309, - 4.98282, - 5.14047, - 4.95812, - 5.19651, - 5.36928, - 5.26988, - 5.11472, - 5.07285, - 5.19385, - 4.95, - 4.88092, - 5.08328, - 5.10312, - 5.03417, - 5.00403, - 5.36209, - 5.23387, - 5.15096, - 5.2094, - 5.09823, - 5.14726, - 5.34523, - 5.19852, - 5.32363, - 5.06802, - 5.06118, - 5.34192, - 5.39855, - 5.06357, - 5.08979, - 5.16987, - 5.08755, - 5.3038, - 4.78285, - 5.28166, - 5.44891, - 5.37895, - 5.18097, - 4.8459, - 4.96273, - 5.22204, - 5.29273, - 5.01692, - 5.10067, - 4.99983, - 5.18615, - 4.91466, - 5.07543, - 5.35625, - 5.23361, - 4.91442, - 5.27039, - 5.22696, - 5.03862, - 5.33039, - 5.19666, - 5.14329, - 5.15978, - 5.06526, - 5.07196, - 4.92824, - 5.21493, - 4.87279, - 5.11686, - 4.72383, - 4.76061, - 5.17244, - 5.19503, - 4.82076, - 5.07406, - 5.22216, - 5.22409, - 5.12517, - 5.14265, - 5.10973, - 4.92948, - 4.71399, - 5.05252, - 4.95447, - 5.04924, - 4.81134, - 5.02118, - 5.18932, - 5.31945, - 5.18727, - 5.02452, - 5.00977, - 5.20673, - 5.07912, - 4.84976, - 5.13559, - 4.9962, - 5.10494, - 5.01237, - 5.06375, - 5.17279, - 4.8862, - 5.21022, - 4.88218, - 5.1434, - 4.94841, - 5.06916, - 4.96878, - 5.11254, - 5.09921, - 4.94326, - 5.49375, - 5.10647, - 4.69007, - 5.31173, - 5.00468, - 5.2713, - 5.1166, - 5.01493, - 4.8162, - 5.24698, - 5.00906, - 5.19491, - 5.36891, - 5.31876, - 5.13686, - 5.06037, - 5.13931, - 5.10946, - 5.14347, - 5.18842, - 4.85183, - 5.12737, - 4.88633, - 5.05568, - 4.68849, - 4.81501, - 4.92576, - 4.84922, - 5.15192, - 4.82015, - 5.16202, - 5.22041, - 5.37737, - 5.07956, - 5.35763, - 5.00798, - 5.2017, - 4.9788, - 5.08903, - 5.1426, - 4.90204, - 5.15237, - 4.95937, - 4.93282, - 4.92471, - 5.26827, - 5.07379, - 5.06729, - 4.92603, - 5.11726, - 4.92719, - 5.12496, - 5.34107, - 4.99549, - 5.17694, - 4.82681, - 5.01582, - 4.84362, - 4.9221, - 5.04538, - 5.23487, - 5.05967, - 4.82045, - 5.01152, - 4.71046, - 5.18505, - 4.77454, - 5.06829, - 4.85174, - 4.98717, - 5.03624, - 5.16996, - 5.0774, - 5.21395, - 4.91876, - 4.93876, - 5.04977, - 4.9806, - 5.29482, - 4.96882, - 4.96496, - 4.66948, - 5.25628, - 4.98788, - 4.94659, - 5.03207, - 5.11041, - 5.14139, - 5.09407, - 5.05772, - 4.97315, - 5.13327, - 5.2315, - 5.07239, - 4.85819, - 5.01047, - 5.13299, - 5.21575, - 4.89224, - 4.9342, - 5.1189, - 4.84132, - 4.80748, - 5.21088, - 4.96589, - 4.97416, - 5.16597, - 5.25251, - 5.03592, - 4.83475, - 5.02735, - 4.93159, - 5.05248, - 5.17543, - 4.80193, - 5.1131, - 4.90378, - 4.85971, - 5.0546, - 5.04334, - 5.27759, - 4.92365, - 4.89075, - 5.16811, - 5.01965, - 5.06456, - 5.14603, - 5.16879, - 5.09529, - 5.10454, - 5.05635, - 4.53411, - 5.07558, - 4.82818, - 4.88269, - 4.7988, - 4.68321, - 4.74254, - 4.9743, - 4.62914, - 5.12113, - 4.73134, - 4.93406, - 4.90908, - 4.99734, - 5.01593, - 5.1358, - 5.01363, - 4.77115, - 5.01894, - 5.06754, - 4.73138, - 4.80455, - 5.09105, - 5.10281, - 4.95376, - 4.8858, - 5.02813, - 4.99256, - 4.96902, - 5.093, - 5.02664, - 5.29191, - 4.78074, - 4.87302, - 5.10413, - 4.66668, - 4.82994, - 4.92253, - 4.83069, - 5.08006, - 5.0081, - 4.87278, - 5.15447, - 5.10193, - 4.79101, - 4.97045, - 4.54486, - 5.10066, - 4.98344, - 5.0343, - 4.87791, - 5.21634, - 4.73051, - 5.03258, - 4.93226, - 5.17863, - 5.13533, - 4.82572, - 4.91473, - 4.76871, - 5.21024, - 4.89084, - 5.08113, - 4.84413, - 4.44255, - 4.9425, - 5.08367, - 4.7724, - 5.05834, - 4.74969, - 5.1975, - 4.87664, - 5.29003, - 4.5149, - 5.07023, - 4.96571, - 4.87528, - 4.77754, - 4.96962, - 4.91404, - 4.97801, - 4.92095, - 5.09617, - 5.15809, - 4.96239, - 5.00682, - 4.96028, - 5.09169, - 4.91383, - 4.88825, - 4.86715, - 4.83316, - 4.8298, - 4.82378, - 5.14118, - 4.78437, - 4.9359, - 5.27034, - 4.921, - 4.91902, - 4.98046, - 4.83012, - 4.94606, - 4.81653, - 5.1004, - 5.41017, - 5.14683, - 4.95879, - 4.87306, - 4.65655, - 4.78916, - 4.72125, - 4.54738, - 4.91692, - 5.18034, - 4.70348, - 4.90975, - 4.95122, - 5.06394, - 5.02376, - 5.05532, - 5.04508, - 4.59928, - 4.9365, - 5.16124, - 4.71402, - 5.05203, - 5.02425, - 5.06861, - 4.90856, - 4.8473, - 5.15348, - 4.82198, - 4.81148, - 4.87736, - 4.47952, - 4.99979, - 5.05571, - 5.06448, - 4.91699, - 4.94095, - 4.84269, - 5.12532, - 5.17372, - 5.08943, - 4.78796, - 4.73726, - 5.08513, - 4.76847, - 4.83308, - 4.69508, - 4.97773, - 5.24142, - 4.70306, - 4.76075, - 5.00465, - 4.93198, - 4.90839, - 4.96146, - 4.88986, - 5.06478, - 4.71712, - 4.8866, - 4.7257, - 5.14443, - 5.01238, - 4.94674, - 5.08232, - 5.06557, - 4.93642, - 4.93931, - 5.00897, - 5.02607, - 5.1895, - 4.62555, - 4.67647, - 4.78412, - 4.9345, - 5.00181, - 4.38944, - 4.78613, - 4.67168, - 4.94825, - 4.88356, - 4.73723, - 4.8337, - 4.84584, - 5.0559, - 4.76538, - 5.0068, - 4.84726, - 4.88129, - 5.17266, - 4.97863, - 4.83507, - 4.81127, - 4.91613, - 5.10594, - 4.85955, - 4.70434, - 5.156, - 4.58406, - 4.82188, - 4.90649, - 4.90668, - 4.77126, - 4.65307, - 4.79509, - 4.90096, - 4.84404, - 4.72258, - 4.96985, - 4.77938, - 4.74915, - 4.98339, - 4.84078, - 5.0713, - 4.95893, - 4.90614, - 4.82556, - 4.91752, - 4.66343, - 4.96711, - 4.68912, - 5.19357, - 4.92203, - 5.00221, - 4.69711, - 4.99184, - 4.9466, - 4.80699, - 5.0241, - 4.9194, - 4.6358, - 4.75728, - 4.63757, - 4.52199, - 4.778, - 4.85672, - 4.63766, - 4.65555, - 4.72331, - 5.00417, - 4.80136, - 4.5361, - 4.67642, - 4.61238, - 4.67066, - 4.82711, - 4.81724, - 5.03966, - 4.83222, - 5.04273, - 4.81673, - 4.75459, - 4.82335, - 4.79586, - 4.65742, - 4.74808, - 4.73714, - 4.77027, - 4.75121, - 4.93997, - 4.8925, - 4.39002, - 4.92446, - 4.96318, - 5.00597, - 4.83865, - 4.6797, - 4.84466, - 4.94055, - 4.88453, - 4.75694, - 4.91654, - 4.74394, - 4.81844, - 4.65404, - 4.94135, - 5.08495, - 4.86586, - 4.54448, - 4.94368, - 4.74296, - 4.9177, - 4.7828, - 4.89469, - 4.5575, - 4.85725, - 4.75316, - 4.4663, - 4.82665, - 4.93471, - 4.79203, - 4.69683, - 4.89445, - 4.54644, - 5.13239, - 4.78354, - 5.11798, - 4.71728, - 4.70348, - 4.82905, - 4.99073, - 4.99948, - 5.06421, - 4.74041, - 4.94062, - 4.7151, - 4.7583, - 4.88676, - 4.93765, - 4.54342, - 5.02781, - 4.88414, - 4.68454, - 4.72184, - 4.80538, - 4.74273, - 4.82498, - 5.03501, - 4.95931, - 4.98155, - 4.65003, - 4.94067, - 5.0547, - 5.03427, - 5.02286, - 4.81962, - 4.46941, - 4.555, - 4.71148, - 4.78092, - 5.02172, - 4.6691, - 4.97242, - 5.03252, - 4.7693, - 4.72714, - 4.74454, - 4.52712, - 4.87817, - 4.97618, - 4.82325, - 4.89448, - 4.7722, - 4.7574, - 4.94012, - 4.80216, - 4.70374, - 4.63951, - 4.71194, - 4.53908, - 4.69429, - 4.861, - 4.57406, - 4.83336, - 4.66998, - 4.69417, - 4.86433, - 4.86116, - 4.74981, - 4.59613, - 4.52309, - 4.81233, - 4.65262, - 4.82424, - 4.96584, - 5.13492, - 4.96271, - 4.74474, - 4.86967, - 4.89519, - 4.74874, - 4.93905, - 4.87187, - 4.79374, - 4.65773, - 4.46698, - 4.94658, - 5.01018, - 4.90586, - 4.79818, - 4.98402, - 4.71705, - 4.76742, - 4.79861, - 4.89004, - 4.97913, - 4.97592, - 4.62694, - 4.91304, - 4.98108, - 4.6234, - 4.7483, - 4.7996, - 4.81552, - 4.66072, - 4.86883, - 4.91147, - 4.73557, - 4.67527, - 4.96173, - 4.44699, - 4.95205, - 4.87557, - 4.89906, - 4.8322, - 4.92491, - 4.74044, - 4.64675, - 4.98908, - 4.77825, - 4.84855, - 4.53119, - 4.64729, - 4.80561, - 4.78764, - 5.17715, - 4.88161, - 4.96489, - 4.63451, - 4.96533, - 4.95231, - 4.48666, - 4.7945, - 4.65895, - 4.89201, - 4.68694, - 4.83585, - 4.76494, - 4.92638, - 4.75004, - 4.8721, - 4.62253, - 4.93577, - 4.49888, - 4.61243, - 4.92968, - 5.06833, - 4.84828, - 4.52167, - 4.83418, - 4.91635, - 4.43402, - 4.77372, - 4.75635, - 4.707, - 4.92021, - 4.50904, - 4.37403, - 4.76815, - 4.89243, - 4.95943, - 4.89886, - 4.78121, - 4.70513, - 4.72536, - 4.92538, - 4.59533, - 5.023, - 4.99462, - 4.78206, - 4.95085, - 4.68048, - 4.76939, - 4.87899, - 5.01258, - 4.76375, - 4.94918, - 4.81489, - 4.71644, - 4.47068, - 4.7182, - 5.00182, - 4.62038, - 4.93849, - 4.64511, - 4.89392, - 4.77172, - 4.65113, - 4.51912, - 4.76061, - 4.74293, - 4.74822, - 4.61258, - 4.95684, - 4.52337, - 4.94982, - 4.82506, - 4.65957, - 4.5881, - 4.76422, - 4.6201, - 4.70994, - 4.68428, - 4.61941, - 4.83295, - 4.36561, - 4.71132, - 4.8693, - 4.87761, - 4.76732, - 5.03105, - 4.72661, - 4.81114, - 4.71259, - 4.79226, - 4.47782, - 4.81517, - 4.86782, - 4.79763, - 4.79323, - 4.41935, - 4.50036, - 4.66148, - 4.61712, - 4.61785, - 4.57584, - 4.83758, - 4.73585, - 4.67555, - 4.77691, - 4.3531, - 4.78898, - 4.5717, - 4.72766, - 4.91778, - 4.86587, - 4.68556, - 4.62733, - 4.75051, - 4.69219, - 4.8262, - 4.76579, - 4.72255, - 5.0305, - 4.62665, - 4.87705, - 5.01315, - 4.95132, - 5.02254, - 4.79979, - 4.8721, - 4.63789, - 4.90881, - 4.5045, - 4.57007, - 4.58481, - 4.72475, - 4.58987, - 4.85788, - 4.7184, - 4.53701, - 4.6616, - 4.74751, - 4.55185, - 4.96845, - 4.80527, - 4.48706, - 4.64222, - 4.33111, - 4.34967, - 4.60991, - 4.82004, - 4.80822, - 4.75912, - 4.58271, - 4.76306, - 4.71321, - 4.65191, - 4.87146, - 4.75706, - 4.74148, - 4.68519, - 5.22143, - 4.82863, - 4.68958, - 4.53666, - 4.41878, - 4.8403, - 4.56877, - 4.61385, - 4.71419, - 4.68691, - 4.72142, - 4.40812, - 4.53968, - 4.83983, - 4.46803, - 4.88892, - 4.87992, - 4.64638, - 4.55693, - 4.91001, - 4.94812, - 4.62278, - 4.46418, - 5.13242, - 4.5809, - 4.8932, - 4.44557, - 4.93227, - 4.54996, - 4.90009, - 4.74107, - 4.88603, - 4.79131, - 4.84945, - 4.84955, - 4.69556, - 4.69301, - 4.59143, - 5.0594, - 4.70418, - 4.49565, - 4.95933, - 4.80063, - 4.69193, - 4.80112, - 4.99278, - 4.60273, - 4.60156, - 4.43148, - 4.66987, - 4.45753, - 4.72563, - 4.63314, - 4.35455, - 4.79335, - 4.78181, - 4.33556, - 4.69456, - 4.39282, - 4.88724, - 4.79315, - 4.80039, - 4.98918, - 4.88499, - 4.74577, - 4.28626, - 4.47457, - 4.75531, - 4.87661, - 4.81327, - 4.93896, - 4.63541, - 4.68472, - 4.80384, - 4.79265, - 4.39345, - 4.78201, - 4.59908, - 4.53096, - 4.56259, - 4.68667, - 4.73226, - 4.49424, - 4.51258, - 4.71925, - 4.29151, - 4.64394, - 4.6886, - 4.48675, - 4.60874, - 4.7459, - 4.59167, - 4.90537, - 4.86302, - 4.56329, - 4.5443, - 4.90112, - 4.74544, - 4.61742, - 4.64106, - 4.72808, - 4.61122, - 4.55426, - 4.52968, - 4.74333, - 4.70813, - 4.58609, - 4.77309, - 4.78556, - 4.74205, - 4.805, - 4.76053, - 4.72292, - 4.82051, - 4.61096, - 4.68862, - 4.98225, - 4.82846, - 4.88524, - 4.4182, - 4.6069, - 4.92732, - 4.52734, - 4.72748, - 4.19319, - 4.77101, - 4.87247, - 4.64524, - 4.53306, - 4.41046, - 4.71623, - 4.56602, - 4.68073, - 4.75376, - 4.62444, - 4.8382, - 4.54385, - 4.67121, - 4.69427, - 4.62846, - 4.68533, - 4.60622, - 4.78252, - 4.76775, - 4.87897, - 4.73587, - 4.83745, - 4.70528, - 4.89501, - 4.71472, - 4.61637, - 4.737, - 4.87617, - 4.90083, - 4.7506, - 4.5588, - 4.75967, - 4.85087, - 4.73015, - 4.81145, - 4.76526, - 4.63366, - 4.48227, - 4.69849, - 4.81696, - 4.88352, - 4.47812, - 4.82544, - 4.47752, - 4.56241, - 4.93227, - 4.604, - 4.9483, - 4.74325, - 4.53395, - 4.38275, - 4.59088, - 4.81957, - 4.86267, - 4.69082, - 4.6183, - 4.48508, - 4.47777, - 4.92044, - 4.41567, - 4.66611, - 4.50956, - 4.70706, - 4.46791, - 4.2489, - 4.79212, - 4.63609, - 4.66782, - 4.57674, - 4.52574, - 4.52076, - 4.68811, - 4.4077, - 4.59505, - 4.78101, - 4.82134, - 4.5967, - 4.5699, - 4.70792, - 4.45263, - 4.75155, - 4.59565, - 4.56182, - 4.541, - 4.848, - 4.98041, - 4.46207, - 4.52584, - 4.542, - 4.62486, - 4.84567, - 4.61011, - 4.54748, - 4.79613, - 4.52581, - 4.7345, - 4.4271, - 4.56367, - 4.69218, - 4.53595, - 4.6854, - 4.72463, - 4.48842, - 4.35671, - 4.61183, - 4.74, - 4.54254, - 4.84418, - 4.61797, - 4.38779, - 4.81359, - 4.56183, - 4.65887, - 4.46191, - 4.91723, - 4.39569, - 4.26122, - 4.56759, - 4.47002, - 4.43217, - 4.60467, - 4.65903, - 4.93846, - 4.72059, - 4.49106, - 4.55911, - 4.79906, - 4.57175, - 4.48215, - 5.01651, - 4.72988, - 4.45189, - 4.47739, - 4.56989, - 4.53543, - 4.79091, - 4.57685, - 4.78508, - 4.63958, - 4.30987, - 4.69767, - 4.50267, - 4.83635, - 4.65866, - 4.43906, - 4.40794, - 4.93722, - 4.42928, - 4.6151, - 4.76406, - 4.67267, - 4.35968, - 4.62109, - 4.70921, - 4.68381, - 4.82514, - 4.43462, - 4.78986, - 4.89696, - 4.63493, - 4.71161, - 4.63502, - 4.49747, - 4.38738, - 4.60161, - 4.63366, - 4.36558, - 4.94521, - 4.45435, - 4.42434, - 4.42549, - 4.66513, - 4.3614, - 4.87194, - 4.80276, - 4.57408, - 4.65278, - 4.478, - 4.67068, - 4.84789, - 4.7331, - 4.73461, - 4.45543, - 4.4324, - 4.56908, - 5.0239, - 4.40491, - 4.72816, - 4.74429, - 4.76328, - 4.47376, - 4.54905, - 4.52905, - 4.70333, - 4.66749, - 4.71595, - 4.84529, - 4.76991, - 4.66143, - 4.6457, - 4.66828, - 4.49731, - 4.47723, - 4.64761, - 4.76292, - 4.59988, - 4.4697, - 4.48628, - 4.72915, - 5.03539, - 4.6724, - 4.56098, - 4.55105, - 4.51542, - 4.35568, - 4.36428, - 4.62232, - 4.82502, - 4.59015, - 4.50845, - 4.71907, - 4.56084, - 4.42371, - 4.53453, - 4.5273, - 4.5586, - 4.79538, - 4.6946, - 4.72487, - 4.64867, - 4.44516, - 4.4869, - 4.5549, - 4.56073, - 4.64884, - 4.593, - 4.44246, - 4.44805, - 4.48248, - 4.66544, - 4.60929, - 4.50112, - 4.89481, - 4.73763, - 4.60314, - 4.57416, - 4.515, - 4.8013, - 4.44046, - 4.91568, - 4.36267, - 4.79157, - 4.46044, - 4.64113, - 4.74023, - 4.6115, - 4.44135, - 4.71949, - 4.42112, - 4.43986, - 4.54536, - 4.74759, - 4.5645, - 4.55679, - 4.74879, - 4.65864, - 4.59111, - 4.73591, - 4.69282, - 4.43475, - 4.66154, - 4.72677, - 4.67251, - 4.58189, - 4.65369, - 4.58673, - 4.40185, - 4.74522, - 4.49567, - 4.71353, - 4.56231, - 4.80139, - 4.58642, - 4.56526, - 4.54183, - 4.82074, - 4.54095, - 4.61208, - 4.43126, - 4.50204, - 4.48587, - 4.58407, - 4.75226, - 4.74894, - 4.47329, - 4.8106, - 4.41234, - 4.70224, - 4.57454, - 4.34152, - 4.50839, - 4.81964, - 4.52417, - 4.75229, - 4.64581, - 4.60497, - 4.56196, - 4.72701, - 4.61652, - 4.57347, - 4.52607, - 4.58864, - 4.43967, - 4.67806, - 4.6198, - 4.38904, - 4.53537, - 4.74797, - 4.67546, - 4.63032, - 4.60263, - 4.47735, - 4.85353, - 4.68097, - 4.55998, - 4.59091, - 4.28012, - 4.53379, - 4.63203, - 4.42094, - 4.72058, - 4.57502, - 4.53373, - 4.88208, - 4.47912, - 4.5987, - 4.76404, - 4.65396, - 4.52262, - 4.60806, - 4.53406, - 4.54706, - 4.27153, - 4.68066, - 4.6388, - 4.62344, - 4.34446, - 4.68423, - 4.28831, - 4.71138, - 4.56775, - 4.63956, - 4.49829, - 4.59388, - 4.53957, - 4.56707, - 4.48297, - 4.44764, - 4.6296, - 4.79919, - 4.46619, - 4.49137, - 4.3554, - 4.55926, - 4.59021, - 4.44268, - 4.60352, - 4.27378, - 4.56353, - 4.85971, - 4.80342, - 4.54588, - 4.56813, - 4.45779, - 4.4597, - 4.41689, - 4.63198, - 4.57405, - 4.45318, - 4.39915, - 4.63769, - 4.58178, - 4.79781, - 4.54699, - 4.5028, - 4.3809, - 4.25286, - 4.52546, - 4.58908, - 4.4455, - 4.68798, - 4.62052, - 4.8059, - 4.61084, - 4.72655, - 4.349, - 4.5331, - 4.2214, - 4.46107, - 4.79963, - 4.57864, - 4.75136, - 4.48273, - 4.4063, - 4.58783, - 4.59082, - 4.73156, - 4.54108, - 4.67216, - 4.40101, - 4.27656, - 4.65825, - 4.39989, - 4.68994, - 4.87981, - 4.6742, - 4.53359, - 4.71608, - 4.55351, - 4.64623, - 4.54775, - 4.37172, - 4.34842, - 4.47342, - 4.45296, - 4.54425, - 4.39586, - 4.54531, - 4.57998, - 4.61329, - 4.68849, - 4.49336, - 4.43721, - 4.46949, - 4.46216, - 4.57963, - 4.65987, - 4.3264, - 4.83465, - 4.2933, - 4.57975, - 4.62796, - 4.4096, - 4.63794, - 4.53411, - 4.61003, - 4.63975, - 4.64614, - 4.64884, - 4.57341, - 4.80396, - 4.37951, - 4.69415, - 4.58082, - 4.44623, - 4.55358, - 4.66278, - 4.53898, - 4.5471, - 4.84726, - 4.76963, - 4.93944, - 4.62704, - 4.57939, - 4.53964, - 4.44884, - 4.65882, - 4.75029, - 4.24253, - 4.42151, - 4.42955, - 4.67957, - 4.38614, - 4.61184, - 4.7456, - 4.37707, - 4.61539, - 4.69776, - 4.62103, - 4.34537, - 4.63357, - 4.78883, - 4.57809, - 4.28562, - 4.57732, - 4.4425, - 4.70698, - 4.49877, - 4.87636, - 4.40855, - 4.69371, - 4.61033, - 4.55689, - 4.65983, - 4.55797, - 4.27554, - 4.36855, - 4.59587, - 4.65479, - 4.47291, - 4.83287, - 4.51652, - 4.81102, - 4.34443, - 4.4466, - 4.50246, - 4.51167, - 4.65952, - 4.40659, - 4.59014, - 4.58451, - 4.26414, - 4.50805, - 4.62851, - 4.69117, - 4.61571, - 4.67024, - 4.90178, - 4.63149, - 4.61894, - 4.5956, - 4.55105, - 4.81719, - 4.44747, - 4.65896, - 4.81707, - 4.48081, - 4.58143, - 4.2798, - 4.25732, - 4.67628, - 4.32044, - 4.86509, - 4.56112, - 4.3144, - 4.51759, - 4.51046, - 4.66738, - 4.44102, - 4.29765, - 4.51393, - 4.70011, - 4.66309, - 4.40031, - 4.65412, - 4.59278, - 4.59517, - 4.20692, - 4.56527, - 4.59982, - 4.41203, - 4.39541, - 4.75475, - 4.64187, - 4.55217, - 4.52682, - 4.35298, - 4.48622, - 4.27236, - 4.64916, - 4.82192, - 4.66877, - 4.31221, - 4.81902, - 4.43135, - 4.44814, - 4.57743, - 4.52274, - 4.4689, - 4.62529, - 4.52039, - 4.70982, - 4.83861, - 4.48021, - 4.46196, - 4.35326, - 4.20743, - 4.50147, - 4.48667, - 4.43376, - 4.61605, - 4.42338, - 4.49059, - 4.67029, - 4.41904, - 4.6984, - 4.30837, - 4.31457, - 4.48496, - 4.43267, - 4.71633, - 4.37138, - 4.24058, - 4.52674, - 4.54254, - 4.39031, - 4.27544, - 4.71477, - 4.57593, - 4.58545, - 4.3442, - 4.37436, - 4.62931, - 4.36112, - 4.66586, - 4.8601, - 4.50136, - 4.26173, - 4.30726, - 4.69426 - ] - }, - "mem-allocated-bytes": { - "start_step": 0, - "end_step": 20000, - "step_interval": 5, - "values": [ - 146450944.0, - 146451456.0, - 146451456.0, - 225728000.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224679424.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224679424.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225203712.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224679424.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225203712.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224810496.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224679424.0, - 224286208.0, - 224679424.0, - 224679424.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224679424.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224810496.0, - 225334784.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225203712.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 225203712.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224679424.0, - 224286208.0, - 224810496.0, - 224679424.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224679424.0, - 224810496.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 225334784.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224679424.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224679424.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224679424.0, - 224810496.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224810496.0, - 225334784.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 225334784.0, - 225334784.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225203712.0, - 224679424.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224679424.0, - 224286208.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224679424.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225203712.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 225334784.0, - 225334784.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 225334784.0, - 225334784.0, - 225334784.0, - 224286208.0, - 225203712.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 225203712.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224679424.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 225334784.0, - 224286208.0, - 225334784.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224679424.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225203712.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 225334784.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224679424.0, - 224810496.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 225334784.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 225334784.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224679424.0, - 224810496.0, - 224286208.0, - 225334784.0, - 224679424.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 225334784.0, - 224810496.0, - 224810496.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224286208.0, - 224810496.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 200, - "step_interval": 5, - "values": [ - 0.91292, - 0.3432, - 0.34293, - 0.33763, - 0.34388, - 0.3393, - 0.35151, - 0.34797, - 0.34896, - 0.34251, - 0.34037, - 0.34118, - 0.34167, - 0.34039, - 0.34949, - 0.3385, - 0.34197, - 0.34513, - 0.33495, - 0.34333, - 0.33903, - 0.34152, - 0.33892, - 0.33816, - 0.33393, - 0.33258, - 0.33664, - 0.34074, - 0.33756, - 0.33902, - 0.33969, - 0.3437, - 0.33646, - 0.33934, - 0.33157, - 0.33564, - 0.33895, - 0.3388, - 0.33348, - 0.33456 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 20000, "step_interval": 5, "values": {"1": 10.51817, "5": 10.5175, "10": 10.51541, "15": 10.51677, "20": 10.13032, "25": 9.5518, "30": 9.44404, "35": 9.15174, "40": 9.05764, "45": 8.99256, "50": 8.75433, "55": 8.8141, "60": 8.47097, "65": 8.53559, "70": 8.20228, "75": 8.31011, "80": 7.96546, "85": 7.87162, "90": 7.66496, "95": 7.67741, "100": 7.63736, "105": 7.37164, "110": 7.52373, "115": 7.16816, "120": 6.99674, "125": 7.13817, "130": 6.93339, "135": 6.84533, "140": 7.05697, "145": 6.99313, "150": 6.99189, "155": 6.93579, "160": 6.91743, "165": 7.02675, "170": 6.84859, "175": 6.89494, "180": 6.58084, "185": 6.98964, "190": 6.97544, "195": 6.83777, "200": 6.86351, "205": 6.77972, "210": 6.8278, "215": 6.85589, "220": 6.64778, "225": 6.77656, "230": 6.83728, "235": 6.6403, "240": 6.56299, "245": 6.92769, "250": 6.67212, "255": 6.59089, "260": 6.6886, "265": 6.55814, "270": 6.47987, "275": 6.77552, "280": 6.61728, "285": 6.49133, "290": 6.58778, "295": 6.67183, "300": 6.35487, "305": 6.35101, "310": 6.95761, "315": 6.78118, "320": 6.73969, "325": 6.52394, "330": 6.67967, "335": 6.58673, "340": 6.44122, "345": 6.60091, "350": 6.4961, "355": 6.65303, "360": 6.55435, "365": 6.69325, "370": 6.52082, "375": 6.63467, "380": 6.35463, "385": 6.47242, "390": 6.52105, "395": 6.31308, "400": 6.58662, "405": 6.48288, "410": 6.58296, "415": 6.5595, "420": 6.49457, "425": 6.41838, "430": 6.56902, "435": 6.39676, "440": 6.56238, "445": 6.46871, "450": 6.62489, "455": 6.51513, "460": 6.82933, "465": 6.75526, "470": 6.4396, "475": 6.18229, "480": 6.48381, "485": 6.54946, "490": 6.21873, "495": 6.28572, "500": 6.52573, "505": 6.45653, "510": 6.50992, "515": 6.172, "520": 6.55684, "525": 6.50698, "530": 6.31881, "535": 6.13755, "540": 6.58936, "545": 6.42254, "550": 6.42606, "555": 6.39976, "560": 6.42189, "565": 6.30133, "570": 6.38344, "575": 6.587, "580": 6.46785, "585": 6.46954, "590": 6.21249, "595": 6.2525, "600": 6.44785, "605": 6.37774, "610": 6.45558, "615": 6.43486, "620": 6.47888, "625": 6.38285, "630": 6.47552, "635": 6.43607, "640": 6.34837, "645": 6.20656, "650": 6.30271, "655": 6.55191, "660": 6.33696, "665": 6.3171, "670": 6.42225, "675": 6.31522, "680": 6.38141, "685": 6.51535, "690": 6.29353, "695": 6.18153, "700": 6.09304, "705": 6.34929, "710": 6.1791, "715": 6.4177, "720": 6.42039, "725": 6.29226, "730": 6.38785, "735": 6.4945, "740": 6.63836, "745": 6.21078, "750": 6.22274, "755": 6.00822, "760": 6.54247, "765": 6.29186, "770": 6.43211, "775": 6.12405, "780": 6.26062, "785": 6.28638, "790": 6.45124, "795": 6.40875, "800": 6.1263, "805": 6.376, "810": 6.23893, "815": 6.16663, "820": 6.3575, "825": 6.62729, "830": 6.36179, "835": 6.19003, "840": 6.14768, "845": 6.4116, "850": 6.45332, "855": 6.48173, "860": 6.19826, "865": 6.36353, "870": 6.31679, "875": 6.23294, "880": 6.28484, "885": 6.35442, "890": 6.20864, "895": 6.2911, "900": 6.33959, "905": 6.23518, "910": 6.16488, "915": 6.44247, "920": 6.39225, "925": 6.17239, "930": 6.07364, "935": 6.44404, "940": 6.30443, "945": 6.06343, "950": 6.23702, "955": 6.02012, "960": 6.35362, "965": 6.18563, "970": 6.35383, "975": 6.08907, "980": 6.04181, "985": 6.33339, "990": 6.18815, "995": 6.33018, "1000": 6.23352, "1005": 6.4058, "1010": 6.19362, "1015": 6.11686, "1020": 6.08203, "1025": 6.35785, "1030": 6.11803, "1035": 6.03195, "1040": 6.01018, "1045": 6.60093, "1050": 6.34873, "1055": 6.09027, "1060": 6.16403, "1065": 5.85887, "1070": 6.25593, "1075": 5.94946, "1080": 6.10403, "1085": 6.13551, "1090": 6.02268, "1095": 6.25209, "1100": 6.08705, "1105": 6.29611, "1110": 6.19967, "1115": 6.1011, "1120": 6.16046, "1125": 6.14261, "1130": 5.95602, "1135": 6.32195, "1140": 6.40249, "1145": 6.20263, "1150": 6.09901, "1155": 6.03405, "1160": 6.24143, "1165": 6.22108, "1170": 6.39871, "1175": 6.05912, "1180": 6.13279, "1185": 6.24595, "1190": 6.3738, "1195": 6.2675, "1200": 6.02483, "1205": 6.24352, "1210": 6.24381, "1215": 6.08485, "1220": 6.06558, "1225": 6.41096, "1230": 6.22266, "1235": 6.00361, "1240": 6.57248, "1245": 6.07717, "1250": 5.98109, "1255": 6.08225, "1260": 6.1592, "1265": 6.05607, "1270": 5.94447, "1275": 6.10328, "1280": 5.88732, "1285": 5.91185, "1290": 6.11708, "1295": 6.10313, "1300": 6.24041, "1305": 5.96596, "1310": 6.03315, "1315": 6.02144, "1320": 6.07709, "1325": 6.25622, "1330": 6.06937, "1335": 6.03641, "1340": 6.21518, "1345": 6.06063, "1350": 5.92036, "1355": 6.26228, "1360": 6.32347, "1365": 6.07062, "1370": 6.03043, "1375": 6.2884, "1380": 6.14084, "1385": 5.91489, "1390": 5.98972, "1395": 6.00301, "1400": 6.21583, "1405": 5.99994, "1410": 6.1255, "1415": 6.33421, "1420": 6.14871, "1425": 6.1415, "1430": 5.9813, "1435": 6.35489, "1440": 6.31621, "1445": 6.28693, "1450": 6.07265, "1455": 5.78189, "1460": 6.23124, "1465": 5.9566, "1470": 6.00064, "1475": 6.16614, "1480": 6.20798, "1485": 6.02411, "1490": 6.20953, "1495": 6.17788, "1500": 6.16957, "1505": 6.25605, "1510": 6.35487, "1515": 6.12147, "1520": 5.88284, "1525": 5.97354, "1530": 5.97133, "1535": 6.18283, "1540": 6.42884, "1545": 6.28868, "1550": 6.06395, "1555": 5.97533, "1560": 6.20183, "1565": 6.15308, "1570": 6.08513, "1575": 6.07142, "1580": 6.05003, "1585": 6.03634, "1590": 6.30695, "1595": 6.23795, "1600": 5.93011, "1605": 5.94669, "1610": 6.00829, "1615": 6.17491, "1620": 6.09426, "1625": 6.02182, "1630": 6.05703, "1635": 6.34675, "1640": 6.08607, "1645": 6.16877, "1650": 6.27363, "1655": 6.26811, "1660": 6.27462, "1665": 5.87312, "1670": 6.17929, "1675": 5.91525, "1680": 6.13344, "1685": 6.32443, "1690": 6.2644, "1695": 6.01757, "1700": 6.26122, "1705": 6.12344, "1710": 6.19754, "1715": 5.98887, "1720": 6.29412, "1725": 6.22684, "1730": 6.20743, "1735": 6.15486, "1740": 6.04205, "1745": 5.94038, "1750": 6.35924, "1755": 6.40246, "1760": 5.86366, "1765": 6.07505, "1770": 6.23461, "1775": 5.94154, "1780": 6.26637, "1785": 5.81716, "1790": 6.02314, "1795": 6.26227, "1800": 6.14013, "1805": 5.98584, "1810": 6.63134, "1815": 6.21314, "1820": 6.25555, "1825": 6.14411, "1830": 6.15875, "1835": 5.99699, "1840": 5.99038, "1845": 6.1556, "1850": 6.20895, "1855": 6.12019, "1860": 6.11647, "1865": 5.97461, "1870": 6.24845, "1875": 6.15245, "1880": 5.91023, "1885": 6.19165, "1890": 6.13825, "1895": 6.04321, "1900": 6.00342, "1905": 6.33723, "1910": 6.37854, "1915": 6.26141, "1920": 6.11687, "1925": 5.85322, "1930": 5.98664, "1935": 6.09483, "1940": 6.00604, "1945": 5.875, "1950": 6.09782, "1955": 5.96109, "1960": 6.13107, "1965": 6.07042, "1970": 6.04534, "1975": 6.09995, "1980": 6.39683, "1985": 6.30799, "1990": 5.99772, "1995": 6.07316, "2000": 6.00425, "2005": 6.16907, "2010": 6.27124, "2015": 6.05479, "2020": 6.20438, "2025": 6.249, "2030": 6.10151, "2035": 6.14185, "2040": 5.82018, "2045": 5.99301, "2050": 6.18173, "2055": 6.02603, "2060": 6.33486, "2065": 6.18433, "2070": 5.95901, "2075": 6.05091, "2080": 6.11619, "2085": 6.03563, "2090": 6.0318, "2095": 6.24495, "2100": 6.14167, "2105": 6.11567, "2110": 6.02763, "2115": 6.15262, "2120": 6.02194, "2125": 6.25424, "2130": 6.20629, "2135": 5.96906, "2140": 6.07103, "2145": 5.75309, "2150": 6.16524, "2155": 6.07608, "2160": 5.98145, "2165": 5.97813, "2170": 6.01644, "2175": 6.01627, "2180": 5.99747, "2185": 5.89013, "2190": 5.86032, "2195": 6.00796, "2200": 6.26774, "2205": 6.1098, "2210": 5.94775, "2215": 6.14702, "2220": 6.37079, "2225": 5.97553, "2230": 6.16571, "2235": 6.05938, "2240": 5.95049, "2245": 6.23155, "2250": 5.96877, "2255": 5.79235, "2260": 6.13931, "2265": 5.93813, "2270": 5.98911, "2275": 6.05952, "2280": 6.1092, "2285": 5.95013, "2290": 6.02738, "2295": 5.97596, "2300": 6.21482, "2305": 5.7741, "2310": 6.07717, "2315": 6.07771, "2320": 6.02548, "2325": 5.79078, "2330": 5.91772, "2335": 5.99312, "2340": 6.05866, "2345": 5.87771, "2350": 5.93828, "2355": 6.16529, "2360": 5.97877, "2365": 6.16995, "2370": 6.11135, "2375": 6.18199, "2380": 6.05936, "2385": 6.1578, "2390": 6.19628, "2395": 6.07546, "2400": 5.78213, "2405": 5.94596, "2410": 6.21064, "2415": 6.03963, "2420": 6.08969, "2425": 5.83476, "2430": 6.3565, "2435": 6.17395, "2440": 5.93125, "2445": 5.83191, "2450": 6.03301, "2455": 5.69234, "2460": 5.98659, "2465": 6.25034, "2470": 5.84446, "2475": 5.93569, "2480": 5.9655, "2485": 6.0591, "2490": 6.0859, "2495": 6.12826, "2500": 6.07619, "2505": 5.95908, "2510": 5.81342, "2515": 6.04185, "2520": 6.2213, "2525": 5.81758, "2530": 5.96947, "2535": 5.96991, "2540": 6.16668, "2545": 5.96674, "2550": 6.00418, "2555": 5.72836, "2560": 6.20934, "2565": 5.85917, "2570": 6.08596, "2575": 5.69813, "2580": 5.91079, "2585": 5.9093, "2590": 6.12244, "2595": 6.13138, "2600": 6.04809, "2605": 6.04506, "2610": 6.30458, "2615": 6.07116, "2620": 5.98128, "2625": 6.11299, "2630": 6.1626, "2635": 6.16181, "2640": 5.92204, "2645": 6.01467, "2650": 6.03531, "2655": 5.86098, "2660": 5.84779, "2665": 6.17657, "2670": 6.00478, "2675": 5.97899, "2680": 6.13153, "2685": 5.98755, "2690": 6.06885, "2695": 5.96494, "2700": 6.31447, "2705": 6.07933, "2710": 6.13432, "2715": 5.84604, "2720": 5.97065, "2725": 6.19304, "2730": 6.05715, "2735": 6.17752, "2740": 5.88714, "2745": 6.11119, "2750": 5.98743, "2755": 5.99078, "2760": 5.87443, "2765": 6.18089, "2770": 6.03467, "2775": 6.23017, "2780": 6.20071, "2785": 6.02831, "2790": 6.03936, "2795": 5.87056, "2800": 6.12169, "2805": 5.99708, "2810": 6.1354, "2815": 6.09528, "2820": 6.0085, "2825": 5.87472, "2830": 6.28475, "2835": 5.95398, "2840": 6.09487, "2845": 5.88595, "2850": 5.85459, "2855": 6.16742, "2860": 5.83636, "2865": 6.09151, "2870": 6.06759, "2875": 6.11959, "2880": 5.91152, "2885": 5.93185, "2890": 6.05905, "2895": 6.07588, "2900": 5.78771, "2905": 6.10237, "2910": 6.00624, "2915": 6.16915, "2920": 5.9086, "2925": 5.77926, "2930": 6.03226, "2935": 6.03469, "2940": 6.19466, "2945": 5.90452, "2950": 6.10891, "2955": 6.00063, "2960": 5.9171, "2965": 5.88471, "2970": 5.88766, "2975": 5.92895, "2980": 6.14647, "2985": 6.1873, "2990": 5.9706, "2995": 6.26542, "3000": 6.1188, "3005": 5.8684, "3010": 6.31907, "3015": 5.86517, "3020": 5.82006, "3025": 5.88243, "3030": 6.00027, "3035": 5.98462, "3040": 6.25786, "3045": 5.8475, "3050": 6.22786, "3055": 5.88956, "3060": 5.9048, "3065": 5.94588, "3070": 6.14207, "3075": 5.84503, "3080": 5.89853, "3085": 6.08275, "3090": 6.17043, "3095": 6.03701, "3100": 6.10486, "3105": 5.99793, "3110": 5.84414, "3115": 6.06988, "3120": 6.28283, "3125": 6.19285, "3130": 5.98218, "3135": 6.12629, "3140": 5.85226, "3145": 6.36395, "3150": 6.01258, "3155": 6.25503, "3160": 6.04938, "3165": 6.07205, "3170": 6.20961, "3175": 6.02655, "3180": 6.02716, "3185": 6.06282, "3190": 5.99221, "3195": 5.8809, "3200": 6.06466, "3205": 5.9382, "3210": 5.98764, "3215": 5.97029, "3220": 5.89104, "3225": 5.79917, "3230": 6.12085, "3235": 5.8384, "3240": 6.12442, "3245": 6.14385, "3250": 5.97425, "3255": 6.0545, "3260": 5.94933, "3265": 6.07255, "3270": 6.06938, "3275": 6.07655, "3280": 6.0017, "3285": 5.8481, "3290": 6.07231, "3295": 5.77201, "3300": 5.9592, "3305": 5.98806, "3310": 5.84933, "3315": 5.45612, "3320": 5.9375, "3325": 5.86696, "3330": 5.9198, "3335": 6.04057, "3340": 6.01008, "3345": 5.96425, "3350": 5.88267, "3355": 5.92749, "3360": 5.78791, "3365": 6.28219, "3370": 6.08834, "3375": 5.85299, "3380": 6.17452, "3385": 5.93284, "3390": 5.88271, "3395": 5.99314, "3400": 6.03594, "3405": 5.98793, "3410": 6.2627, "3415": 5.96437, "3420": 5.76598, "3425": 6.257, "3430": 5.9041, "3435": 5.93149, "3440": 5.78047, "3445": 5.95834, "3450": 6.04761, "3455": 5.89934, "3460": 5.95712, "3465": 6.02859, "3470": 5.87991, "3475": 5.97779, "3480": 5.83089, "3485": 5.92359, "3490": 5.9974, "3495": 6.074, "3500": 6.00408, "3505": 6.12925, "3510": 6.03277, "3515": 6.05753, "3520": 6.02408, "3525": 6.07911, "3530": 5.91587, "3535": 6.10122, "3540": 6.01707, "3545": 6.23434, "3550": 6.02742, "3555": 6.20687, "3560": 6.03194, "3565": 6.01322, "3570": 6.01305, "3575": 5.75022, "3580": 6.11734, "3585": 5.8682, "3590": 5.97368, "3595": 5.85319, "3600": 5.66878, "3605": 6.05195, "3610": 6.13052, "3615": 5.94939, "3620": 5.97577, "3625": 6.17555, "3630": 5.85973, "3635": 6.06535, "3640": 6.22585, "3645": 5.82602, "3650": 6.05082, "3655": 5.85874, "3660": 5.98228, "3665": 5.82464, "3670": 6.1001, "3675": 5.95103, "3680": 5.99762, "3685": 5.89004, "3690": 5.91176, "3695": 6.05488, "3700": 5.9664, "3705": 6.02399, "3710": 5.93288, "3715": 5.78374, "3720": 6.0928, "3725": 6.20757, "3730": 5.97971, "3735": 5.90662, "3740": 5.87716, "3745": 5.99158, "3750": 6.1208, "3755": 5.81412, "3760": 6.07629, "3765": 5.72966, "3770": 6.11711, "3775": 5.92014, "3780": 6.22326, "3785": 6.01246, "3790": 5.90445, "3795": 6.38234, "3800": 5.71407, "3805": 6.04038, "3810": 5.99639, "3815": 5.97388, "3820": 5.91052, "3825": 6.01581, "3830": 5.69454, "3835": 5.82222, "3840": 5.92453, "3845": 5.86473, "3850": 5.75107, "3855": 6.0997, "3860": 6.0792, "3865": 5.80667, "3870": 5.8698, "3875": 5.91752, "3880": 6.05322, "3885": 6.07776, "3890": 5.93372, "3895": 6.02571, "3900": 5.93583, "3905": 5.77554, "3910": 5.98079, "3915": 5.93836, "3920": 6.0384, "3925": 6.05052, "3930": 5.82468, "3935": 6.00309, "3940": 5.97644, "3945": 5.80364, "3950": 5.71242, "3955": 5.64709, "3960": 5.6928, "3965": 5.8392, "3970": 5.95118, "3975": 6.03905, "3980": 6.01592, "3985": 5.88058, "3990": 5.89601, "3995": 6.06335, "4000": 5.92695, "4005": 5.83767, "4010": 5.90926, "4015": 6.05247, "4020": 5.72979, "4025": 5.9811, "4030": 5.85282, "4035": 5.61899, "4040": 6.1771, "4045": 5.99571, "4050": 5.91783, "4055": 6.17885, "4060": 5.84221, "4065": 5.8833, "4070": 5.96066, "4075": 5.86633, "4080": 5.83322, "4085": 6.04471, "4090": 6.10456, "4095": 5.74723, "4100": 5.75289, "4105": 6.14218, "4110": 5.64252, "4115": 6.01125, "4120": 5.9786, "4125": 5.84296, "4130": 5.94813, "4135": 6.13013, "4140": 5.88369, "4145": 6.04204, "4150": 5.89849, "4155": 5.90384, "4160": 5.70369, "4165": 5.86758, "4170": 5.9657, "4175": 5.57474, "4180": 6.11062, "4185": 5.98628, "4190": 5.84044, "4195": 6.09738, "4200": 5.57131, "4205": 5.80155, "4210": 6.02393, "4215": 5.88891, "4220": 5.82996, "4225": 5.7451, "4230": 5.8151, "4235": 5.78634, "4240": 5.8465, "4245": 5.84784, "4250": 6.11465, "4255": 6.01487, "4260": 5.89099, "4265": 6.08233, "4270": 6.03876, "4275": 5.78635, "4280": 5.96225, "4285": 5.44958, "4290": 5.85094, "4295": 5.75537, "4300": 6.0828, "4305": 5.79049, "4310": 5.88714, "4315": 6.00792, "4320": 5.61666, "4325": 5.91629, "4330": 5.87369, "4335": 6.09393, "4340": 5.64895, "4345": 5.98922, "4350": 6.18923, "4355": 6.0726, "4360": 5.94713, "4365": 5.9624, "4370": 6.12777, "4375": 5.81442, "4380": 6.2151, "4385": 5.95352, "4390": 5.90547, "4395": 5.85397, "4400": 5.96785, "4405": 6.06758, "4410": 5.69651, "4415": 5.68279, "4420": 5.97437, "4425": 5.99099, "4430": 5.73795, "4435": 5.6077, "4440": 5.94604, "4445": 5.98559, "4450": 5.98021, "4455": 5.98509, "4460": 5.91287, "4465": 5.75576, "4470": 6.01444, "4475": 5.97226, "4480": 5.8727, "4485": 5.83316, "4490": 5.72116, "4495": 6.08876, "4500": 5.99656, "4505": 6.06873, "4510": 5.7272, "4515": 6.04657, "4520": 6.02537, "4525": 5.66054, "4530": 5.84621, "4535": 5.78212, "4540": 5.94376, "4545": 5.86302, "4550": 6.06617, "4555": 5.7309, "4560": 5.987, "4565": 5.96833, "4570": 5.8168, "4575": 6.02794, "4580": 5.7049, "4585": 5.98809, "4590": 5.97186, "4595": 5.7722, "4600": 5.96165, "4605": 5.89799, "4610": 5.82583, "4615": 5.9817, "4620": 6.10493, "4625": 5.71283, "4630": 5.83466, "4635": 5.85632, "4640": 5.76916, "4645": 5.82717, "4650": 5.78336, "4655": 5.95854, "4660": 6.04608, "4665": 5.75261, "4670": 6.06464, "4675": 6.22357, "4680": 5.9045, "4685": 5.673, "4690": 5.86325, "4695": 5.70763, "4700": 5.98168, "4705": 5.85174, "4710": 5.96109, "4715": 6.11571, "4720": 5.99887, "4725": 5.93275, "4730": 6.04045, "4735": 5.80809, "4740": 5.94978, "4745": 5.91324, "4750": 5.85717, "4755": 5.65827, "4760": 5.92101, "4765": 5.93362, "4770": 6.09067, "4775": 5.87135, "4780": 6.08819, "4785": 5.8731, "4790": 5.94972, "4795": 5.84614, "4800": 5.64225, "4805": 5.70128, "4810": 5.89968, "4815": 5.9997, "4820": 5.49148, "4825": 5.98043, "4830": 5.94247, "4835": 5.81849, "4840": 6.12999, "4845": 6.01971, "4850": 5.91519, "4855": 6.16703, "4860": 6.06059, "4865": 5.81255, "4870": 5.94657, "4875": 5.96162, "4880": 5.8084, "4885": 6.05684, "4890": 5.7932, "4895": 5.95185, "4900": 5.96264, "4905": 5.88461, "4910": 5.77821, "4915": 5.92909, "4920": 5.9975, "4925": 6.12044, "4930": 5.94594, "4935": 6.02831, "4940": 5.86724, "4945": 5.86159, "4950": 5.78799, "4955": 5.98601, "4960": 5.66078, "4965": 6.06929, "4970": 5.8495, "4975": 6.06051, "4980": 6.09698, "4985": 5.59213, "4990": 5.81326, "4995": 5.91457, "5000": 6.07364, "5005": 5.94825, "5010": 5.95511, "5015": 5.83173, "5020": 6.07714, "5025": 5.91783, "5030": 6.08646, "5035": 5.86503, "5040": 5.89568, "5045": 6.08986, "5050": 5.78338, "5055": 5.95993, "5060": 6.13316, "5065": 5.74849, "5070": 5.8501, "5075": 5.81988, "5080": 5.89304, "5085": 5.87833, "5090": 5.88168, "5095": 6.03774, "5100": 5.67416, "5105": 5.82695, "5110": 5.87161, "5115": 5.91142, "5120": 5.89405, "5125": 6.02768, "5130": 6.11376, "5135": 5.95289, "5140": 5.84183, "5145": 5.67579, "5150": 5.89657, "5155": 5.82823, "5160": 5.93903, "5165": 5.84558, "5170": 6.02827, "5175": 6.09318, "5180": 6.08378, "5185": 6.10424, "5190": 6.16258, "5195": 5.99145, "5200": 6.05603, "5205": 5.86883, "5210": 5.81461, "5215": 5.96669, "5220": 6.03105, "5225": 5.66095, "5230": 6.12338, "5235": 5.72441, "5240": 5.92626, "5245": 6.08622, "5250": 5.75723, "5255": 6.03679, "5260": 5.72606, "5265": 5.90968, "5270": 5.84854, "5275": 5.68685, "5280": 5.75602, "5285": 6.06916, "5290": 6.02692, "5295": 5.94059, "5300": 5.95885, "5305": 6.00005, "5310": 5.67134, "5315": 5.748, "5320": 5.63422, "5325": 5.87397, "5330": 5.66023, "5335": 5.8339, "5340": 5.73971, "5345": 5.78845, "5350": 5.9025, "5355": 5.92206, "5360": 6.07336, "5365": 5.77985, "5370": 5.85376, "5375": 5.6049, "5380": 5.91661, "5385": 6.12394, "5390": 5.82189, "5395": 5.87105, "5400": 5.95236, "5405": 5.68995, "5410": 5.46309, "5415": 6.06397, "5420": 5.9973, "5425": 5.90584, "5430": 5.67683, "5435": 5.977, "5440": 5.87155, "5445": 5.80642, "5450": 6.23539, "5455": 6.06937, "5460": 5.96546, "5465": 5.7541, "5470": 6.10511, "5475": 5.91961, "5480": 5.81743, "5485": 5.80802, "5490": 6.05039, "5495": 5.82617, "5500": 5.93446, "5505": 5.74063, "5510": 5.89965, "5515": 6.04098, "5520": 6.0738, "5525": 5.73201, "5530": 5.97738, "5535": 5.64904, "5540": 5.67747, "5545": 5.69992, "5550": 5.92799, "5555": 5.85669, "5560": 5.83134, "5565": 5.56101, "5570": 5.92158, "5575": 6.0932, "5580": 5.95349, "5585": 6.11276, "5590": 5.98373, "5595": 6.11635, "5600": 6.00274, "5605": 6.05319, "5610": 5.97314, "5615": 5.71298, "5620": 6.24565, "5625": 6.06485, "5630": 6.1334, "5635": 5.88619, "5640": 5.75622, "5645": 6.13513, "5650": 5.876, "5655": 5.69305, "5660": 6.13191, "5665": 5.81365, "5670": 5.73347, "5675": 5.924, "5680": 5.84027, "5685": 5.84329, "5690": 5.75277, "5695": 5.84447, "5700": 5.83425, "5705": 5.87433, "5710": 6.12193, "5715": 5.80536, "5720": 5.68856, "5725": 5.85027, "5730": 5.63776, "5735": 5.81068, "5740": 5.67376, "5745": 5.98577, "5750": 6.02996, "5755": 5.86208, "5760": 6.25156, "5765": 5.71894, "5770": 5.82406, "5775": 5.71575, "5780": 6.12775, "5785": 6.09879, "5790": 6.08178, "5795": 5.93148, "5800": 6.08462, "5805": 5.65564, "5810": 5.55791, "5815": 5.93041, "5820": 5.68461, "5825": 5.84204, "5830": 6.03798, "5835": 6.05667, "5840": 5.86697, "5845": 5.83712, "5850": 5.88625, "5855": 5.96867, "5860": 6.00277, "5865": 5.98086, "5870": 5.63804, "5875": 5.84283, "5880": 5.96442, "5885": 5.97583, "5890": 5.99021, "5895": 5.64368, "5900": 6.02987, "5905": 6.089, "5910": 5.78788, "5915": 5.95501, "5920": 5.71454, "5925": 5.81768, "5930": 5.78305, "5935": 5.9069, "5940": 5.85167, "5945": 5.86709, "5950": 5.90808, "5955": 5.81667, "5960": 5.90225, "5965": 5.79852, "5970": 5.91004, "5975": 5.77538, "5980": 5.99056, "5985": 5.7286, "5990": 5.8351, "5995": 5.97124, "6000": 5.92089, "6005": 6.03179, "6010": 5.88793, "6015": 5.88359, "6020": 6.07171, "6025": 5.89094, "6030": 5.94989, "6035": 5.82931, "6040": 5.82518, "6045": 6.00943, "6050": 5.80604, "6055": 5.95161, "6060": 5.8181, "6065": 6.00115, "6070": 5.75933, "6075": 5.59026, "6080": 5.78149, "6085": 5.87118, "6090": 6.03671, "6095": 5.97082, "6100": 5.57972, "6105": 5.72684, "6110": 6.0764, "6115": 6.0097, "6120": 6.03518, "6125": 5.69707, "6130": 5.95461, "6135": 5.54152, "6140": 5.87637, "6145": 5.75229, "6150": 5.8015, "6155": 5.85735, "6160": 5.78699, "6165": 5.97731, "6170": 5.99949, "6175": 5.98108, "6180": 5.94716, "6185": 5.59701, "6190": 5.87248, "6195": 5.89317, "6200": 5.69626, "6205": 5.45686, "6210": 5.7563, "6215": 5.63617, "6220": 5.88549, "6225": 5.88844, "6230": 5.69624, "6235": 5.86791, "6240": 5.82125, "6245": 5.92873, "6250": 5.96425, "6255": 5.7482, "6260": 5.91595, "6265": 5.79466, "6270": 5.93485, "6275": 5.95373, "6280": 5.7759, "6285": 5.60686, "6290": 5.84971, "6295": 5.71141, "6300": 5.74117, "6305": 5.92574, "6310": 5.61886, "6315": 5.60544, "6320": 5.86826, "6325": 5.93603, "6330": 5.84852, "6335": 5.91784, "6340": 6.0349, "6345": 5.87701, "6350": 5.83306, "6355": 5.81394, "6360": 5.72538, "6365": 6.01883, "6370": 5.77839, "6375": 5.63804, "6380": 5.91633, "6385": 5.82156, "6390": 5.69607, "6395": 6.04616, "6400": 5.77809, "6405": 5.97584, "6410": 5.80263, "6415": 5.97836, "6420": 5.87948, "6425": 5.98962, "6430": 5.79481, "6435": 5.58722, "6440": 5.90316, "6445": 5.9463, "6450": 5.97537, "6455": 5.91002, "6460": 5.972, "6465": 5.75623, "6470": 5.7302, "6475": 5.79067, "6480": 6.09147, "6485": 5.85549, "6490": 5.7152, "6495": 5.79304, "6500": 6.00552, "6505": 5.77009, "6510": 5.78143, "6515": 5.90694, "6520": 6.09193, "6525": 5.81564, "6530": 5.98241, "6535": 5.79532, "6540": 6.03136, "6545": 5.86131, "6550": 5.94544, "6555": 5.66157, "6560": 5.90597, "6565": 5.87107, "6570": 5.8972, "6575": 5.9105, "6580": 5.83622, "6585": 6.03871, "6590": 5.71132, "6595": 6.00204, "6600": 5.90645, "6605": 6.09848, "6610": 6.04391, "6615": 5.71854, "6620": 5.75878, "6625": 5.9494, "6630": 5.70529, "6635": 5.85265, "6640": 5.72095, "6645": 5.66608, "6650": 5.79589, "6655": 6.06189, "6660": 5.82669, "6665": 5.89547, "6670": 6.07109, "6675": 5.74193, "6680": 5.93431, "6685": 5.89928, "6690": 5.79495, "6695": 5.91007, "6700": 5.65257, "6705": 6.01783, "6710": 5.71358, "6715": 5.82411, "6720": 5.98228, "6725": 5.98447, "6730": 5.54247, "6735": 5.82771, "6740": 5.94344, "6745": 5.72387, "6750": 5.65983, "6755": 5.68804, "6760": 5.8599, "6765": 5.94824, "6770": 5.89592, "6775": 5.71643, "6780": 5.75446, "6785": 5.89789, "6790": 5.60939, "6795": 5.84857, "6800": 5.66142, "6805": 5.72656, "6810": 5.83857, "6815": 5.9619, "6820": 5.58571, "6825": 6.01492, "6830": 6.04983, "6835": 5.84127, "6840": 6.04584, "6845": 5.79189, "6850": 5.62788, "6855": 5.83524, "6860": 6.03239, "6865": 5.67083, "6870": 5.79229, "6875": 5.85901, "6880": 6.10503, "6885": 5.96496, "6890": 5.91272, "6895": 5.77253, "6900": 5.90441, "6905": 6.00789, "6910": 5.78713, "6915": 5.90119, "6920": 6.10898, "6925": 5.82724, "6930": 5.98957, "6935": 5.94344, "6940": 5.82049, "6945": 5.63854, "6950": 5.91236, "6955": 5.63333, "6960": 5.81133, "6965": 5.69925, "6970": 5.85376, "6975": 5.85359, "6980": 5.89083, "6985": 5.91882, "6990": 5.73713, "6995": 5.89793, "7000": 5.82103, "7005": 5.90244, "7010": 5.72638, "7015": 5.95834, "7020": 5.64784, "7025": 5.8768, "7030": 5.90527, "7035": 5.91034, "7040": 5.82568, "7045": 5.90077, "7050": 5.82944, "7055": 6.15177, "7060": 5.72798, "7065": 5.52932, "7070": 5.87274, "7075": 5.82944, "7080": 5.81256, "7085": 6.05009, "7090": 5.91977, "7095": 5.70034, "7100": 5.87272, "7105": 5.82099, "7110": 5.79679, "7115": 5.78455, "7120": 5.80729, "7125": 5.78187, "7130": 5.90432, "7135": 5.65231, "7140": 5.95303, "7145": 5.74481, "7150": 5.84274, "7155": 5.70761, "7160": 5.54263, "7165": 5.78246, "7170": 5.92449, "7175": 5.75226, "7180": 5.83466, "7185": 6.01277, "7190": 5.7504, "7195": 6.00854, "7200": 5.36623, "7205": 5.82657, "7210": 5.75722, "7215": 5.70763, "7220": 5.98623, "7225": 5.87521, "7230": 5.83534, "7235": 5.78719, "7240": 5.81129, "7245": 5.75627, "7250": 5.94062, "7255": 5.74688, "7260": 5.90828, "7265": 5.80926, "7270": 5.65987, "7275": 5.61302, "7280": 5.68157, "7285": 6.09783, "7290": 5.78166, "7295": 5.82733, "7300": 6.03172, "7305": 5.80954, "7310": 5.75934, "7315": 5.67646, "7320": 5.82747, "7325": 5.94145, "7330": 5.89161, "7335": 5.81415, "7340": 6.12372, "7345": 5.89585, "7350": 5.89517, "7355": 5.76747, "7360": 5.86277, "7365": 5.97572, "7370": 5.92695, "7375": 5.9998, "7380": 5.62649, "7385": 5.60713, "7390": 5.37832, "7395": 5.84424, "7400": 6.0214, "7405": 5.6846, "7410": 5.53331, "7415": 5.74369, "7420": 5.74541, "7425": 5.71479, "7430": 5.66435, "7435": 5.90399, "7440": 5.78136, "7445": 5.99807, "7450": 5.6349, "7455": 5.73046, "7460": 5.92914, "7465": 5.66417, "7470": 5.90493, "7475": 5.893, "7480": 6.11112, "7485": 6.01242, "7490": 5.78887, "7495": 5.91944, "7500": 5.7222, "7505": 5.46517, "7510": 5.63186, "7515": 5.80544, "7520": 5.56263, "7525": 6.17304, "7530": 5.62589, "7535": 5.87513, "7540": 5.64947, "7545": 5.82008, "7550": 6.10426, "7555": 5.61303, "7560": 5.50947, "7565": 5.72512, "7570": 5.87354, "7575": 5.82948, "7580": 5.85046, "7585": 5.82411, "7590": 5.72754, "7595": 5.91624, "7600": 5.95791, "7605": 5.7625, "7610": 5.86581, "7615": 5.7302, "7620": 5.93153, "7625": 5.5793, "7630": 5.95246, "7635": 5.74829, "7640": 5.63638, "7645": 5.8871, "7650": 5.90615, "7655": 5.87861, "7660": 5.94713, "7665": 5.70738, "7670": 5.96559, "7675": 5.74314, "7680": 5.78888, "7685": 5.58587, "7690": 6.02031, "7695": 5.72888, "7700": 5.88408, "7705": 5.83287, "7710": 5.95595, "7715": 5.99481, "7720": 5.86884, "7725": 5.8895, "7730": 5.81375, "7735": 5.93829, "7740": 5.83309, "7745": 5.77619, "7750": 5.83994, "7755": 5.97219, "7760": 6.1147, "7765": 5.59473, "7770": 5.76657, "7775": 5.74115, "7780": 5.85874, "7785": 5.6001, "7790": 5.884, "7795": 5.94862, "7800": 5.70014, "7805": 5.81943, "7810": 5.41855, "7815": 5.92261, "7820": 5.99822, "7825": 5.73806, "7830": 5.67513, "7835": 5.61757, "7840": 5.78784, "7845": 5.94562, "7850": 5.77884, "7855": 5.80756, "7860": 5.54746, "7865": 5.25609, "7870": 5.94755, "7875": 5.6905, "7880": 5.79617, "7885": 5.68094, "7890": 5.7579, "7895": 5.58516, "7900": 5.95009, "7905": 5.7974, "7910": 5.83403, "7915": 5.62173, "7920": 5.65485, "7925": 5.67705, "7930": 5.90316, "7935": 5.89119, "7940": 5.64913, "7945": 5.76924, "7950": 6.01473, "7955": 5.71644, "7960": 5.77164, "7965": 5.72695, "7970": 5.74712, "7975": 5.90801, "7980": 5.44652, "7985": 5.91901, "7990": 5.69615, "7995": 5.6298, "8000": 5.72284, "8005": 5.86332, "8010": 5.48412, "8015": 5.8511, "8020": 5.80825, "8025": 5.40491, "8030": 5.74312, "8035": 5.64714, "8040": 5.72874, "8045": 5.56588, "8050": 5.79171, "8055": 5.92568, "8060": 5.69134, "8065": 5.86486, "8070": 5.82576, "8075": 5.84657, "8080": 5.64556, "8085": 5.76015, "8090": 5.69718, "8095": 5.57212, "8100": 5.82403, "8105": 5.71188, "8110": 5.59603, "8115": 5.60957, "8120": 5.80626, "8125": 5.516, "8130": 5.93733, "8135": 5.6502, "8140": 5.8428, "8145": 5.80404, "8150": 6.05433, "8155": 5.9796, "8160": 5.73751, "8165": 5.66863, "8170": 5.83506, "8175": 6.02733, "8180": 5.68482, "8185": 5.63858, "8190": 5.71248, "8195": 5.62369, "8200": 5.75792, "8205": 5.5708, "8210": 5.75834, "8215": 5.54481, "8220": 5.67746, "8225": 5.73509, "8230": 5.90221, "8235": 5.63434, "8240": 5.60042, "8245": 5.55364, "8250": 5.87727, "8255": 5.77999, "8260": 5.7375, "8265": 5.69489, "8270": 5.80426, "8275": 5.6306, "8280": 5.81055, "8285": 5.53062, "8290": 5.67358, "8295": 5.81196, "8300": 5.73593, "8305": 5.69522, "8310": 5.84368, "8315": 5.48173, "8320": 5.73258, "8325": 5.9263, "8330": 5.80573, "8335": 5.83229, "8340": 5.67704, "8345": 5.79635, "8350": 5.50639, "8355": 5.82299, "8360": 5.77546, "8365": 5.66248, "8370": 5.76869, "8375": 5.69422, "8380": 5.85131, "8385": 5.78815, "8390": 5.60328, "8395": 5.67423, "8400": 5.81522, "8405": 5.68575, "8410": 5.71193, "8415": 5.84812, "8420": 5.82082, "8425": 5.81672, "8430": 5.43725, "8435": 5.49064, "8440": 5.73602, "8445": 5.40615, "8450": 5.83461, "8455": 5.6964, "8460": 5.71148, "8465": 5.41237, "8470": 5.91572, "8475": 5.74038, "8480": 5.44003, "8485": 5.86751, "8490": 5.75691, "8495": 5.50735, "8500": 5.85486, "8505": 5.46488, "8510": 5.94808, "8515": 5.609, "8520": 5.76642, "8525": 5.38807, "8530": 5.65796, "8535": 5.92774, "8540": 5.55606, "8545": 5.79748, "8550": 5.61649, "8555": 5.75795, "8560": 5.81342, "8565": 5.83405, "8570": 5.38373, "8575": 5.71012, "8580": 5.66105, "8585": 5.4942, "8590": 5.68779, "8595": 5.56707, "8600": 5.82228, "8605": 5.74586, "8610": 5.70183, "8615": 5.86348, "8620": 5.45154, "8625": 5.6085, "8630": 5.79859, "8635": 5.49512, "8640": 5.64539, "8645": 5.745, "8650": 5.70007, "8655": 5.59801, "8660": 5.6, "8665": 5.74373, "8670": 5.40236, "8675": 5.54804, "8680": 5.79269, "8685": 5.81167, "8690": 5.69646, "8695": 5.84499, "8700": 5.66007, "8705": 5.70066, "8710": 5.70913, "8715": 5.60484, "8720": 5.64372, "8725": 5.75667, "8730": 5.7823, "8735": 5.84955, "8740": 5.74703, "8745": 5.61787, "8750": 5.94007, "8755": 5.61608, "8760": 5.46402, "8765": 5.59276, "8770": 6.02898, "8775": 5.51498, "8780": 5.9845, "8785": 5.70372, "8790": 5.82905, "8795": 5.81061, "8800": 5.71435, "8805": 5.83535, "8810": 5.80174, "8815": 5.61371, "8820": 5.66594, "8825": 5.46885, "8830": 5.75579, "8835": 5.74373, "8840": 5.49764, "8845": 5.60756, "8850": 5.73135, "8855": 5.44014, "8860": 5.51977, "8865": 5.62161, "8870": 5.54434, "8875": 5.71215, "8880": 5.63476, "8885": 5.69293, "8890": 5.7104, "8895": 5.62832, "8900": 5.43094, "8905": 5.70281, "8910": 5.70314, "8915": 5.91589, "8920": 5.16861, "8925": 5.67876, "8930": 5.6559, "8935": 5.07527, "8940": 5.86749, "8945": 5.78208, "8950": 5.65786, "8955": 5.74818, "8960": 5.25266, "8965": 5.9926, "8970": 5.60364, "8975": 5.39638, "8980": 5.721, "8985": 5.67142, "8990": 5.8837, "8995": 5.45353, "9000": 5.50147, "9005": 5.49486, "9010": 5.75017, "9015": 5.63327, "9020": 5.75668, "9025": 5.67242, "9030": 5.4956, "9035": 5.80081, "9040": 5.90551, "9045": 5.70986, "9050": 5.76715, "9055": 5.72592, "9060": 5.73708, "9065": 5.5525, "9070": 5.65479, "9075": 5.66817, "9080": 5.65021, "9085": 5.34984, "9090": 5.66204, "9095": 5.40914, "9100": 5.55722, "9105": 5.76811, "9110": 5.78093, "9115": 5.59328, "9120": 5.66293, "9125": 5.57456, "9130": 5.5401, "9135": 5.73938, "9140": 5.86389, "9145": 5.66871, "9150": 5.82171, "9155": 5.56699, "9160": 5.41264, "9165": 5.57659, "9170": 5.64255, "9175": 5.73597, "9180": 5.43397, "9185": 5.63406, "9190": 5.6363, "9195": 5.6396, "9200": 5.58592, "9205": 5.75986, "9210": 5.72434, "9215": 5.68457, "9220": 5.48039, "9225": 5.67313, "9230": 5.86744, "9235": 5.65768, "9240": 5.51386, "9245": 5.79919, "9250": 5.73313, "9255": 5.55845, "9260": 5.35994, "9265": 5.75772, "9270": 5.66636, "9275": 5.53637, "9280": 5.36932, "9285": 5.79772, "9290": 5.61365, "9295": 5.90657, "9300": 5.70808, "9305": 5.7012, "9310": 5.33009, "9315": 5.62003, "9320": 5.58207, "9325": 5.49885, "9330": 5.54335, "9335": 5.82413, "9340": 5.50917, "9345": 5.77417, "9350": 5.48905, "9355": 5.42847, "9360": 5.51044, "9365": 5.29568, "9370": 5.35347, "9375": 5.65943, "9380": 5.59457, "9385": 5.55737, "9390": 5.63691, "9395": 5.57395, "9400": 5.58915, "9405": 5.29538, "9410": 5.4566, "9415": 5.48807, "9420": 5.56795, "9425": 5.75716, "9430": 5.37538, "9435": 5.09477, "9440": 5.59755, "9445": 5.63519, "9450": 5.5206, "9455": 5.44631, "9460": 5.62906, "9465": 5.48316, "9470": 5.68383, "9475": 5.6122, "9480": 5.33545, "9485": 5.65824, "9490": 5.72671, "9495": 5.63357, "9500": 5.63213, "9505": 5.84218, "9510": 5.61837, "9515": 5.42445, "9520": 5.50514, "9525": 5.8456, "9530": 5.70762, "9535": 5.51983, "9540": 5.42291, "9545": 5.48033, "9550": 5.41643, "9555": 5.70902, "9560": 5.55505, "9565": 5.46654, "9570": 5.54315, "9575": 5.64194, "9580": 5.48847, "9585": 5.33709, "9590": 5.60695, "9595": 5.4822, "9600": 5.58624, "9605": 5.37234, "9610": 5.31541, "9615": 5.09623, "9620": 5.54282, "9625": 5.49912, "9630": 5.71081, "9635": 5.52619, "9640": 5.62244, "9645": 5.41501, "9650": 5.75509, "9655": 5.62307, "9660": 5.3647, "9665": 5.30849, "9670": 5.72415, "9675": 5.23017, "9680": 5.5966, "9685": 5.77998, "9690": 5.51237, "9695": 5.48578, "9700": 5.37464, "9705": 5.61373, "9710": 5.68007, "9715": 5.54804, "9720": 5.65807, "9725": 5.3914, "9730": 5.46214, "9735": 5.40781, "9740": 5.72982, "9745": 5.48765, "9750": 5.33607, "9755": 5.42646, "9760": 5.58826, "9765": 5.62524, "9770": 5.19389, "9775": 5.58636, "9780": 5.57133, "9785": 5.33364, "9790": 5.45077, "9795": 5.58703, "9800": 5.50592, "9805": 5.47202, "9810": 5.23286, "9815": 5.49554, "9820": 5.43161, "9825": 5.60899, "9830": 5.36394, "9835": 5.50149, "9840": 5.40274, "9845": 5.74392, "9850": 5.35515, "9855": 5.6459, "9860": 5.44274, "9865": 5.31234, "9870": 5.57249, "9875": 5.5532, "9880": 5.00493, "9885": 5.5462, "9890": 5.65539, "9895": 5.37119, "9900": 5.51365, "9905": 5.33651, "9910": 5.50684, "9915": 5.24271, "9920": 5.66817, "9925": 5.54969, "9930": 5.4118, "9935": 5.54992, "9940": 5.37945, "9945": 5.50179, "9950": 5.5705, "9955": 5.28958, "9960": 5.69402, "9965": 5.57838, "9970": 5.49323, "9975": 5.50238, "9980": 5.62863, "9985": 5.5802, "9990": 5.76479, "9995": 5.37461, "10000": 5.37217, "10005": 5.36176, "10010": 5.54772, "10015": 5.73754, "10020": 5.7272, "10025": 5.60862, "10030": 5.0291, "10035": 5.60087, "10040": 5.441, "10045": 5.77326, "10050": 5.69889, "10055": 5.69791, "10060": 5.36611, "10065": 5.4376, "10070": 5.39917, "10075": 5.34005, "10080": 5.51841, "10085": 5.34771, "10090": 5.6845, "10095": 5.46646, "10100": 5.39041, "10105": 5.40078, "10110": 5.65632, "10115": 5.60608, "10120": 5.02873, "10125": 5.30725, "10130": 5.37288, "10135": 5.5169, "10140": 5.61937, "10145": 5.3742, "10150": 5.34816, "10155": 5.47273, "10160": 5.38916, "10165": 5.58045, "10170": 5.45593, "10175": 5.42438, "10180": 5.2709, "10185": 5.39061, "10190": 5.23709, "10195": 5.39072, "10200": 5.34009, "10205": 5.37003, "10210": 5.57952, "10215": 5.35967, "10220": 5.59501, "10225": 5.41379, "10230": 5.4957, "10235": 5.42919, "10240": 5.52993, "10245": 5.46694, "10250": 5.13013, "10255": 5.08857, "10260": 5.48043, "10265": 5.50391, "10270": 5.4464, "10275": 5.23887, "10280": 5.38853, "10285": 5.2312, "10290": 5.21898, "10295": 5.53407, "10300": 5.41463, "10305": 5.33047, "10310": 5.43449, "10315": 5.21006, "10320": 5.14259, "10325": 5.3647, "10330": 5.41698, "10335": 5.35589, "10340": 5.35679, "10345": 5.26156, "10350": 5.58519, "10355": 5.2316, "10360": 5.39321, "10365": 5.20598, "10370": 5.48835, "10375": 5.55115, "10380": 5.52903, "10385": 5.61277, "10390": 5.38487, "10395": 5.46827, "10400": 5.44774, "10405": 5.4081, "10410": 5.70945, "10415": 5.35008, "10420": 5.4518, "10425": 5.55035, "10430": 5.34628, "10435": 5.25202, "10440": 5.27857, "10445": 5.39387, "10450": 5.59736, "10455": 5.3822, "10460": 5.61593, "10465": 5.52211, "10470": 5.2085, "10475": 5.39076, "10480": 5.43431, "10485": 5.37379, "10490": 5.11247, "10495": 5.17833, "10500": 5.44333, "10505": 5.61455, "10510": 5.37368, "10515": 5.4714, "10520": 5.43458, "10525": 5.3523, "10530": 5.48889, "10535": 5.54636, "10540": 5.33585, "10545": 5.60668, "10550": 5.22712, "10555": 5.57587, "10560": 5.4946, "10565": 5.0012, "10570": 5.4399, "10575": 5.3784, "10580": 5.43564, "10585": 5.4542, "10590": 5.57402, "10595": 5.34212, "10600": 5.28003, "10605": 5.31644, "10610": 5.41609, "10615": 5.35182, "10620": 5.25325, "10625": 5.66255, "10630": 5.35798, "10635": 5.43567, "10640": 5.20932, "10645": 5.35234, "10650": 5.52953, "10655": 5.43858, "10660": 5.45034, "10665": 5.39874, "10670": 5.356, "10675": 5.39505, "10680": 5.26005, "10685": 5.30895, "10690": 5.67238, "10695": 5.3382, "10700": 5.61352, "10705": 5.36639, "10710": 5.2546, "10715": 4.7648, "10720": 5.36798, "10725": 5.08581, "10730": 5.38774, "10735": 5.2315, "10740": 5.50009, "10745": 5.31682, "10750": 4.95337, "10755": 5.46252, "10760": 5.3463, "10765": 5.39233, "10770": 5.26877, "10775": 5.36279, "10780": 5.40003, "10785": 5.13531, "10790": 5.09205, "10795": 5.41714, "10800": 5.24182, "10805": 5.37759, "10810": 5.1841, "10815": 5.20587, "10820": 5.45114, "10825": 5.54565, "10830": 5.24614, "10835": 5.36473, "10840": 5.19786, "10845": 5.18242, "10850": 5.5072, "10855": 5.30986, "10860": 5.40324, "10865": 5.52718, "10870": 5.68078, "10875": 5.53167, "10880": 5.12654, "10885": 5.38662, "10890": 5.39428, "10895": 5.24526, "10900": 5.30257, "10905": 5.38606, "10910": 5.46742, "10915": 5.36187, "10920": 5.4652, "10925": 5.31216, "10930": 5.27235, "10935": 5.52601, "10940": 5.4506, "10945": 5.45866, "10950": 5.04867, "10955": 5.18984, "10960": 5.27339, "10965": 5.26806, "10970": 5.1956, "10975": 5.28983, "10980": 4.98698, "10985": 5.25205, "10990": 5.28065, "10995": 5.32792, "11000": 5.06033, "11005": 5.24549, "11010": 5.37188, "11015": 5.19974, "11020": 5.24522, "11025": 5.08564, "11030": 5.3484, "11035": 5.22237, "11040": 5.34708, "11045": 5.07465, "11050": 5.27395, "11055": 5.5143, "11060": 5.3757, "11065": 5.27853, "11070": 5.26735, "11075": 5.35381, "11080": 5.13287, "11085": 5.22699, "11090": 5.28493, "11095": 5.19245, "11100": 5.34048, "11105": 5.28978, "11110": 5.24824, "11115": 5.31027, "11120": 4.82308, "11125": 5.3662, "11130": 5.54704, "11135": 5.19661, "11140": 5.17157, "11145": 5.3315, "11150": 5.59279, "11155": 5.27752, "11160": 5.03615, "11165": 5.30773, "11170": 5.28313, "11175": 5.21376, "11180": 5.01974, "11185": 5.11332, "11190": 5.4275, "11195": 5.08001, "11200": 5.05585, "11205": 5.27463, "11210": 5.24983, "11215": 5.51707, "11220": 5.10348, "11225": 5.34594, "11230": 5.19582, "11235": 5.23772, "11240": 5.1086, "11245": 5.47357, "11250": 5.38475, "11255": 5.17473, "11260": 5.3059, "11265": 4.97653, "11270": 5.26496, "11275": 5.265, "11280": 4.87211, "11285": 5.3036, "11290": 5.06537, "11295": 5.1824, "11300": 5.01714, "11305": 5.4564, "11310": 5.35135, "11315": 5.30915, "11320": 5.43378, "11325": 5.27055, "11330": 5.24777, "11335": 5.2721, "11340": 5.46113, "11345": 5.04206, "11350": 5.37593, "11355": 5.41239, "11360": 5.60221, "11365": 5.38167, "11370": 5.4196, "11375": 5.30141, "11380": 5.37361, "11385": 5.20055, "11390": 5.18606, "11395": 5.28461, "11400": 4.95284, "11405": 5.07589, "11410": 5.26203, "11415": 5.48228, "11420": 5.00538, "11425": 5.14265, "11430": 5.23302, "11435": 5.02339, "11440": 5.24131, "11445": 5.2604, "11450": 5.3917, "11455": 5.3178, "11460": 5.1791, "11465": 5.45179, "11470": 5.23893, "11475": 5.3124, "11480": 5.09538, "11485": 5.27877, "11490": 5.23733, "11495": 5.5879, "11500": 5.28776, "11505": 5.11349, "11510": 5.17614, "11515": 5.09869, "11520": 5.22095, "11525": 5.2381, "11530": 5.07422, "11535": 5.30826, "11540": 5.13735, "11545": 5.16093, "11550": 5.14439, "11555": 5.36028, "11560": 5.27581, "11565": 5.08411, "11570": 5.09748, "11575": 5.13256, "11580": 5.5915, "11585": 5.27239, "11590": 5.0515, "11595": 5.28831, "11600": 5.22951, "11605": 5.29821, "11610": 5.1794, "11615": 5.27238, "11620": 5.07543, "11625": 5.2532, "11630": 5.16624, "11635": 5.48732, "11640": 4.97976, "11645": 5.09481, "11650": 5.07, "11655": 5.4163, "11660": 5.14729, "11665": 5.22332, "11670": 5.29066, "11675": 5.01149, "11680": 5.24745, "11685": 5.3955, "11690": 5.2877, "11695": 5.31718, "11700": 5.27337, "11705": 5.17844, "11710": 5.0163, "11715": 5.01964, "11720": 5.24626, "11725": 5.18448, "11730": 5.16792, "11735": 5.21466, "11740": 5.22501, "11745": 5.16252, "11750": 5.16248, "11755": 5.16407, "11760": 5.2844, "11765": 5.28437, "11770": 5.04897, "11775": 5.53847, "11780": 5.28538, "11785": 5.53002, "11790": 5.2765, "11795": 5.32103, "11800": 5.13019, "11805": 5.51958, "11810": 5.1034, "11815": 5.38569, "11820": 5.02823, "11825": 4.87016, "11830": 5.12661, "11835": 5.18867, "11840": 5.19562, "11845": 5.17948, "11850": 5.30315, "11855": 5.01859, "11860": 5.42594, "11865": 5.214, "11870": 5.4507, "11875": 5.04925, "11880": 5.28351, "11885": 4.99344, "11890": 5.27355, "11895": 4.94409, "11900": 5.33673, "11905": 5.16021, "11910": 5.36325, "11915": 5.11683, "11920": 5.27474, "11925": 5.14645, "11930": 5.14292, "11935": 5.10027, "11940": 4.93739, "11945": 5.37039, "11950": 5.11549, "11955": 5.03135, "11960": 4.90919, "11965": 5.11583, "11970": 5.20048, "11975": 4.99678, "11980": 5.04864, "11985": 5.16687, "11990": 5.12482, "11995": 5.12138, "12000": 5.08971, "12005": 5.04863, "12010": 5.23243, "12015": 4.9521, "12020": 5.26004, "12025": 5.12761, "12030": 4.93221, "12035": 4.90509, "12040": 5.11885, "12045": 5.35792, "12050": 5.33254, "12055": 4.95158, "12060": 5.24892, "12065": 5.25555, "12070": 5.161, "12075": 5.27294, "12080": 5.24702, "12085": 5.09961, "12090": 5.0043, "12095": 5.48174, "12100": 4.97703, "12105": 5.10722, "12110": 5.18597, "12115": 4.94918, "12120": 4.7691, "12125": 5.01242, "12130": 5.23206, "12135": 5.15475, "12140": 5.28888, "12145": 5.06497, "12150": 4.85407, "12155": 5.08306, "12160": 5.1481, "12165": 5.23872, "12170": 5.19311, "12175": 5.23483, "12180": 5.36383, "12185": 5.20117, "12190": 4.82771, "12195": 5.02846, "12200": 5.1913, "12205": 5.09558, "12210": 5.0232, "12215": 5.07948, "12220": 5.20316, "12225": 5.17347, "12230": 4.99102, "12235": 5.31941, "12240": 5.10751, "12245": 5.1069, "12250": 5.22126, "12255": 4.96663, "12260": 4.85554, "12265": 4.87446, "12270": 4.98725, "12275": 5.14349, "12280": 4.882, "12285": 4.92647, "12290": 5.17059, "12295": 5.25714, "12300": 5.48431, "12305": 5.1721, "12310": 5.27149, "12315": 5.03535, "12320": 5.00285, "12325": 5.28719, "12330": 5.2087, "12335": 5.37795, "12340": 4.96675, "12345": 5.3169, "12350": 4.93664, "12355": 5.1021, "12360": 5.01402, "12365": 4.77975, "12370": 5.10945, "12375": 4.87665, "12380": 5.21343, "12385": 5.22589, "12390": 5.09046, "12395": 5.15768, "12400": 5.13948, "12405": 5.32145, "12410": 5.16139, "12415": 5.30719, "12420": 5.39248, "12425": 5.24358, "12430": 4.92588, "12435": 5.02191, "12440": 4.98595, "12445": 5.24576, "12450": 5.08259, "12455": 5.068, "12460": 4.80591, "12465": 4.96585, "12470": 5.43363, "12475": 5.09508, "12480": 4.94249, "12485": 5.25178, "12490": 5.03435, "12495": 5.36914, "12500": 5.47981, "12505": 5.29396, "12510": 4.79058, "12515": 5.0575, "12520": 5.14624, "12525": 5.11066, "12530": 4.89231, "12535": 5.31974, "12540": 4.99776, "12545": 4.93234, "12550": 5.42742, "12555": 5.0198, "12560": 4.95947, "12565": 5.30194, "12570": 5.011, "12575": 5.15742, "12580": 4.9487, "12585": 5.24504, "12590": 4.81696, "12595": 5.20341, "12600": 5.21134, "12605": 5.10257, "12610": 5.21869, "12615": 5.14651, "12620": 5.32386, "12625": 5.02724, "12630": 5.11801, "12635": 5.30002, "12640": 4.92816, "12645": 5.29811, "12650": 4.97494, "12655": 5.0749, "12660": 5.13519, "12665": 5.07599, "12670": 5.01249, "12675": 5.32137, "12680": 4.90018, "12685": 4.95909, "12690": 5.22862, "12695": 4.88605, "12700": 5.00287, "12705": 5.13113, "12710": 4.94577, "12715": 4.98293, "12720": 4.9516, "12725": 5.04837, "12730": 4.94009, "12735": 4.8895, "12740": 5.11633, "12745": 4.78201, "12750": 4.80699, "12755": 5.05351, "12760": 4.73109, "12765": 5.18477, "12770": 5.01334, "12775": 5.05904, "12780": 5.25674, "12785": 5.08532, "12790": 5.03848, "12795": 5.07792, "12800": 5.26545, "12805": 4.7628, "12810": 5.0439, "12815": 4.87974, "12820": 4.89723, "12825": 5.11194, "12830": 4.99866, "12835": 5.2359, "12840": 5.049, "12845": 5.09335, "12850": 4.74596, "12855": 5.05245, "12860": 5.0862, "12865": 5.10011, "12870": 4.93434, "12875": 5.19139, "12880": 5.00966, "12885": 5.10951, "12890": 5.40403, "12895": 5.06375, "12900": 4.86895, "12905": 5.12256, "12910": 4.89725, "12915": 4.9582, "12920": 5.22713, "12925": 5.02956, "12930": 5.04264, "12935": 4.97126, "12940": 5.28037, "12945": 4.75445, "12950": 5.21552, "12955": 4.77142, "12960": 5.04675, "12965": 5.0651, "12970": 4.74546, "12975": 5.05552, "12980": 4.95216, "12985": 4.91902, "12990": 4.87467, "12995": 5.09789, "13000": 5.05835, "13005": 5.07946, "13010": 4.7969, "13015": 4.97847, "13020": 5.01629, "13025": 5.07514, "13030": 5.07913, "13035": 4.91507, "13040": 5.06975, "13045": 5.06224, "13050": 5.03068, "13055": 4.9495, "13060": 5.23862, "13065": 5.28355, "13070": 5.14095, "13075": 5.22139, "13080": 4.97788, "13085": 5.24489, "13090": 5.06377, "13095": 5.12901, "13100": 5.01727, "13105": 5.12282, "13110": 5.02922, "13115": 4.85394, "13120": 5.09122, "13125": 4.96556, "13130": 5.12758, "13135": 4.98896, "13140": 4.86273, "13145": 5.41116, "13150": 5.15503, "13155": 5.26178, "13160": 4.88213, "13165": 4.9631, "13170": 5.2502, "13175": 5.05872, "13180": 4.64755, "13185": 5.27215, "13190": 4.96128, "13195": 5.25958, "13200": 4.85133, "13205": 5.31655, "13210": 4.77255, "13215": 5.14284, "13220": 4.82247, "13225": 5.19207, "13230": 5.00187, "13235": 5.20956, "13240": 5.00193, "13245": 5.26294, "13250": 4.83095, "13255": 4.68528, "13260": 5.00094, "13265": 5.18161, "13270": 5.10141, "13275": 5.07983, "13280": 4.78838, "13285": 5.3192, "13290": 5.12347, "13295": 5.19099, "13300": 5.11485, "13305": 4.8425, "13310": 4.8633, "13315": 5.11963, "13320": 5.12848, "13325": 4.96432, "13330": 4.91523, "13335": 5.05602, "13340": 4.91728, "13345": 4.73624, "13350": 5.09215, "13355": 4.9043, "13360": 4.95717, "13365": 4.93766, "13370": 4.93088, "13375": 5.02886, "13380": 5.07135, "13385": 4.8992, "13390": 5.25579, "13395": 5.11737, "13400": 4.84342, "13405": 4.94273, "13410": 4.77126, "13415": 4.85958, "13420": 4.98369, "13425": 4.9368, "13430": 4.97047, "13435": 4.98269, "13440": 4.8267, "13445": 4.82577, "13450": 5.0193, "13455": 4.81057, "13460": 4.99756, "13465": 4.97396, "13470": 5.0439, "13475": 4.95065, "13480": 5.07169, "13485": 4.96041, "13490": 5.24316, "13495": 5.12133, "13500": 4.77398, "13505": 4.95219, "13510": 5.20853, "13515": 4.94971, "13520": 5.11479, "13525": 4.92216, "13530": 4.79978, "13535": 4.82208, "13540": 5.08482, "13545": 4.83079, "13550": 5.0817, "13555": 5.05558, "13560": 5.23019, "13565": 5.10773, "13570": 4.94405, "13575": 4.98796, "13580": 4.67198, "13585": 5.37584, "13590": 5.09156, "13595": 4.98963, "13600": 4.9818, "13605": 4.9783, "13610": 5.12268, "13615": 4.82499, "13620": 4.96206, "13625": 5.17516, "13630": 4.76413, "13635": 4.74155, "13640": 5.00152, "13645": 4.9424, "13650": 4.77218, "13655": 4.72743, "13660": 4.97736, "13665": 5.05784, "13670": 4.84851, "13675": 5.19533, "13680": 5.26388, "13685": 4.96761, "13690": 4.94108, "13695": 5.07516, "13700": 4.92774, "13705": 5.00406, "13710": 4.95611, "13715": 4.85472, "13720": 4.85383, "13725": 4.87021, "13730": 4.84225, "13735": 4.98071, "13740": 5.17429, "13745": 5.05733, "13750": 4.89094, "13755": 4.74427, "13760": 4.68823, "13765": 5.02159, "13770": 5.08746, "13775": 4.83846, "13780": 4.7199, "13785": 5.05337, "13790": 4.78727, "13795": 4.78073, "13800": 4.89405, "13805": 4.84388, "13810": 4.96631, "13815": 4.77642, "13820": 4.79512, "13825": 4.96673, "13830": 5.10604, "13835": 4.92983, "13840": 5.18486, "13845": 4.92708, "13850": 4.81692, "13855": 4.84876, "13860": 5.03127, "13865": 4.80171, "13870": 5.14411, "13875": 4.83777, "13880": 4.9312, "13885": 4.98524, "13890": 4.90116, "13895": 4.81853, "13900": 4.58189, "13905": 4.94366, "13910": 4.9113, "13915": 5.04097, "13920": 4.75519, "13925": 5.17589, "13930": 4.63194, "13935": 5.07544, "13940": 5.21085, "13945": 4.96179, "13950": 5.03088, "13955": 4.68391, "13960": 4.93965, "13965": 5.09069, "13970": 5.08755, "13975": 4.92924, "13980": 4.90532, "13985": 4.91967, "13990": 4.97455, "13995": 4.92017, "14000": 5.06041, "14005": 4.90539, "14010": 4.88075, "14015": 4.79011, "14020": 5.06071, "14025": 4.91145, "14030": 4.96213, "14035": 4.78011, "14040": 4.735, "14045": 4.8533, "14050": 4.88, "14055": 5.07218, "14060": 4.9295, "14065": 5.07337, "14070": 4.80278, "14075": 5.03373, "14080": 4.69606, "14085": 5.08593, "14090": 5.07326, "14095": 4.84672, "14100": 4.95517, "14105": 4.87061, "14110": 4.78297, "14115": 5.07097, "14120": 5.01941, "14125": 4.49577, "14130": 5.03963, "14135": 4.84274, "14140": 5.06925, "14145": 4.66266, "14150": 4.85154, "14155": 4.85915, "14160": 4.77968, "14165": 4.95106, "14170": 4.97778, "14175": 4.84399, "14180": 4.80436, "14185": 4.62534, "14190": 5.05832, "14195": 5.00631, "14200": 4.54448, "14205": 5.09607, "14210": 5.00415, "14215": 4.92459, "14220": 4.71498, "14225": 5.27886, "14230": 4.92925, "14235": 4.74359, "14240": 4.85047, "14245": 4.92229, "14250": 5.19985, "14255": 5.22076, "14260": 4.82777, "14265": 4.96885, "14270": 5.24391, "14275": 5.01143, "14280": 4.96848, "14285": 4.67474, "14290": 5.16947, "14295": 5.00669, "14300": 4.69678, "14305": 4.87495, "14310": 5.19659, "14315": 4.86804, "14320": 5.11845, "14325": 5.10999, "14330": 4.60944, "14335": 5.04777, "14340": 4.68584, "14345": 4.89273, "14350": 4.97276, "14355": 4.68719, "14360": 4.87797, "14365": 4.82528, "14370": 4.89646, "14375": 4.94693, "14380": 4.79239, "14385": 5.1627, "14390": 4.91135, "14395": 4.67264, "14400": 4.85019, "14405": 4.94017, "14410": 4.80152, "14415": 5.07025, "14420": 5.18188, "14425": 4.96092, "14430": 4.96924, "14435": 5.17383, "14440": 4.5748, "14445": 4.61065, "14450": 5.03842, "14455": 4.55462, "14460": 4.84223, "14465": 4.96105, "14470": 4.91723, "14475": 4.82672, "14480": 4.89362, "14485": 4.89321, "14490": 4.99969, "14495": 4.79335, "14500": 4.91907, "14505": 5.15574, "14510": 4.59256, "14515": 4.90456, "14520": 4.90639, "14525": 5.0542, "14530": 4.56362, "14535": 5.22447, "14540": 4.95248, "14545": 4.8606, "14550": 4.86175, "14555": 4.74854, "14560": 4.60689, "14565": 4.78215, "14570": 4.96199, "14575": 5.07844, "14580": 4.9694, "14585": 4.75212, "14590": 4.79211, "14595": 4.94406, "14600": 4.76458, "14605": 4.75717, "14610": 4.79664, "14615": 5.04495, "14620": 4.7861, "14625": 4.80258, "14630": 5.0808, "14635": 4.81526, "14640": 4.76814, "14645": 5.06811, "14650": 4.95967, "14655": 4.82524, "14660": 5.02712, "14665": 4.93405, "14670": 4.44961, "14675": 5.00453, "14680": 4.66128, "14685": 4.63629, "14690": 4.65834, "14695": 4.70234, "14700": 4.97001, "14705": 4.84886, "14710": 4.77302, "14715": 5.06561, "14720": 4.73985, "14725": 4.82684, "14730": 4.597, "14735": 4.66732, "14740": 4.95551, "14745": 4.91261, "14750": 4.98104, "14755": 4.91475, "14760": 5.13584, "14765": 4.98991, "14770": 4.94256, "14775": 4.49032, "14780": 4.78737, "14785": 4.83908, "14790": 4.72766, "14795": 4.79291, "14800": 4.69019, "14805": 4.64297, "14810": 4.92113, "14815": 4.58727, "14820": 4.57895, "14825": 4.63825, "14830": 5.11037, "14835": 4.49824, "14840": 5.0666, "14845": 5.14354, "14850": 4.49585, "14855": 4.47509, "14860": 5.02477, "14865": 4.73656, "14870": 5.03937, "14875": 4.78677, "14880": 4.97573, "14885": 4.94523, "14890": 4.7547, "14895": 4.711, "14900": 4.80398, "14905": 4.66544, "14910": 5.11436, "14915": 4.8928, "14920": 5.02061, "14925": 4.69705, "14930": 4.85289, "14935": 5.09179, "14940": 4.66701, "14945": 4.91429, "14950": 4.96155, "14955": 4.88416, "14960": 4.81647, "14965": 4.82959, "14970": 4.72983, "14975": 4.8859, "14980": 4.7443, "14985": 5.01679, "14990": 4.62507, "14995": 5.1501, "15000": 4.61726, "15005": 4.46448, "15010": 4.77228, "15015": 4.78616, "15020": 4.86662, "15025": 4.91126, "15030": 4.82187, "15035": 4.61415, "15040": 4.67773, "15045": 4.7885, "15050": 4.90907, "15055": 4.46094, "15060": 5.03479, "15065": 4.67986, "15070": 4.71613, "15075": 4.80371, "15080": 4.69624, "15085": 4.76377, "15090": 5.10905, "15095": 4.80407, "15100": 4.83957, "15105": 4.78592, "15110": 4.66904, "15115": 4.96919, "15120": 4.75555, "15125": 4.94584, "15130": 4.7302, "15135": 4.81139, "15140": 4.84251, "15145": 4.84664, "15150": 5.04692, "15155": 4.79099, "15160": 4.63616, "15165": 4.67299, "15170": 4.64371, "15175": 4.33728, "15180": 4.93077, "15185": 4.76129, "15190": 4.81275, "15195": 4.732, "15200": 5.00335, "15205": 4.68967, "15210": 5.00343, "15215": 4.99915, "15220": 4.91845, "15225": 4.88304, "15230": 4.56211, "15235": 4.90379, "15240": 4.70754, "15245": 4.79589, "15250": 4.57129, "15255": 5.02666, "15260": 4.63088, "15265": 4.8357, "15270": 4.65458, "15275": 5.09927, "15280": 4.84823, "15285": 4.71025, "15290": 4.88109, "15295": 5.11724, "15300": 4.62948, "15305": 4.87938, "15310": 4.76728, "15315": 4.64962, "15320": 4.91794, "15325": 5.11232, "15330": 4.74389, "15335": 5.06915, "15340": 4.76123, "15345": 4.73613, "15350": 4.57708, "15355": 4.86128, "15360": 4.61814, "15365": 4.91632, "15370": 4.86305, "15375": 4.91495, "15380": 4.56651, "15385": 4.66439, "15390": 4.88144, "15395": 4.53516, "15400": 4.82358, "15405": 4.51013, "15410": 4.59654, "15415": 4.7378, "15420": 4.98025, "15425": 4.78413, "15430": 4.6208, "15435": 4.64564, "15440": 4.88049, "15445": 4.83509, "15450": 4.60014, "15455": 5.06714, "15460": 4.63317, "15465": 4.86895, "15470": 4.67476, "15475": 4.69375, "15480": 4.44911, "15485": 4.82973, "15490": 4.87481, "15495": 4.73048, "15500": 4.73209, "15505": 4.70153, "15510": 4.80897, "15515": 4.91657, "15520": 4.74986, "15525": 4.86181, "15530": 4.49376, "15535": 4.89904, "15540": 4.71343, "15545": 4.42941, "15550": 4.66939, "15555": 4.98194, "15560": 4.83922, "15565": 4.71441, "15570": 4.87976, "15575": 4.76321, "15580": 4.61043, "15585": 4.85345, "15590": 4.75028, "15595": 4.96181, "15600": 4.82499, "15605": 4.94746, "15610": 5.02294, "15615": 4.90828, "15620": 4.75538, "15625": 4.63064, "15630": 4.35107, "15635": 4.95641, "15640": 4.74162, "15645": 4.72694, "15650": 4.838, "15655": 4.89808, "15660": 4.89726, "15665": 4.86149, "15670": 4.68214, "15675": 4.61003, "15680": 4.91538, "15685": 4.67333, "15690": 4.99196, "15695": 4.94638, "15700": 4.87047, "15705": 4.70504, "15710": 4.95721, "15715": 4.8336, "15720": 4.71177, "15725": 4.66986, "15730": 4.80267, "15735": 4.5262, "15740": 4.89773, "15745": 4.76169, "15750": 4.89063, "15755": 4.80249, "15760": 4.58412, "15765": 4.78709, "15770": 4.73682, "15775": 4.89219, "15780": 4.74371, "15785": 4.80648, "15790": 4.94953, "15795": 4.77252, "15800": 5.00844, "15805": 4.97006, "15810": 4.77161, "15815": 4.52479, "15820": 4.49663, "15825": 5.21024, "15830": 4.70069, "15835": 4.7152, "15840": 4.83244, "15845": 5.05392, "15850": 4.65416, "15855": 4.78907, "15860": 4.87932, "15865": 4.59653, "15870": 4.67887, "15875": 4.82706, "15880": 4.81861, "15885": 4.55304, "15890": 4.83067, "15895": 4.67164, "15900": 4.89679, "15905": 4.73252, "15910": 4.67523, "15915": 5.10474, "15920": 4.71633, "15925": 4.98675, "15930": 4.72251, "15935": 4.76102, "15940": 4.65008, "15945": 4.72186, "15950": 4.59063, "15955": 4.59168, "15960": 4.97179, "15965": 4.43611, "15970": 4.76462, "15975": 4.75878, "15980": 4.51832, "15985": 4.75727, "15990": 4.42976, "15995": 4.90496, "16000": 4.73644, "16005": 4.75611, "16010": 4.77518, "16015": 4.74127, "16020": 4.96216, "16025": 4.52848, "16030": 5.10113, "16035": 4.76771, "16040": 4.9631, "16045": 4.59155, "16050": 4.69177, "16055": 4.21813, "16060": 4.79882, "16065": 5.03091, "16070": 4.26489, "16075": 4.57745, "16080": 4.82988, "16085": 4.51359, "16090": 4.8553, "16095": 4.38598, "16100": 4.76893, "16105": 4.74054, "16110": 4.59574, "16115": 4.82994, "16120": 4.61522, "16125": 4.46617, "16130": 4.65964, "16135": 4.42344, "16140": 4.9383, "16145": 4.75281, "16150": 4.90806, "16155": 4.58389, "16160": 5.01088, "16165": 4.67459, "16170": 5.24962, "16175": 4.71061, "16180": 4.85859, "16185": 4.78086, "16190": 4.72136, "16195": 4.86534, "16200": 4.63469, "16205": 4.90568, "16210": 4.87132, "16215": 4.66195, "16220": 4.73631, "16225": 4.71388, "16230": 5.01414, "16235": 4.69234, "16240": 4.57211, "16245": 4.75379, "16250": 4.88843, "16255": 4.91551, "16260": 4.72797, "16265": 4.78156, "16270": 4.13823, "16275": 4.7422, "16280": 4.75245, "16285": 4.64212, "16290": 4.74042, "16295": 4.61102, "16300": 4.84722, "16305": 4.69841, "16310": 4.52174, "16315": 4.76673, "16320": 4.98793, "16325": 4.46588, "16330": 4.33829, "16335": 4.77882, "16340": 4.69208, "16345": 4.51736, "16350": 4.60563, "16355": 4.69158, "16360": 4.41956, "16365": 4.79952, "16370": 4.64817, "16375": 4.59589, "16380": 4.69301, "16385": 4.81637, "16390": 4.57045, "16395": 4.77635, "16400": 4.61618, "16405": 4.71955, "16410": 4.63093, "16415": 4.83192, "16420": 4.70967, "16425": 4.83068, "16430": 4.71843, "16435": 4.51644, "16440": 4.79729, "16445": 4.78431, "16450": 4.60779, "16455": 4.66745, "16460": 4.70261, "16465": 4.73449, "16470": 4.67175, "16475": 4.64561, "16480": 4.82412, "16485": 5.03595, "16490": 4.57843, "16495": 4.5867, "16500": 4.89045, "16505": 5.02283, "16510": 4.8717, "16515": 4.76853, "16520": 4.5538, "16525": 4.65332, "16530": 4.79114, "16535": 4.9551, "16540": 4.65485, "16545": 4.73293, "16550": 4.55559, "16555": 4.57983, "16560": 4.6274, "16565": 4.55136, "16570": 4.58413, "16575": 4.8398, "16580": 4.77564, "16585": 4.56099, "16590": 4.81681, "16595": 4.64738, "16600": 4.72925, "16605": 4.46543, "16610": 4.91362, "16615": 4.80961, "16620": 4.99331, "16625": 4.744, "16630": 4.66241, "16635": 4.91067, "16640": 4.95584, "16645": 4.48154, "16650": 4.3473, "16655": 4.76362, "16660": 4.70514, "16665": 4.70789, "16670": 4.66806, "16675": 4.80162, "16680": 4.57499, "16685": 4.61219, "16690": 4.99249, "16695": 4.85615, "16700": 4.60476, "16705": 4.87509, "16710": 4.75305, "16715": 4.50295, "16720": 4.72607, "16725": 4.61441, "16730": 4.53768, "16735": 4.56662, "16740": 4.58874, "16745": 4.88318, "16750": 4.07585, "16755": 4.30116, "16760": 4.63713, "16765": 4.50832, "16770": 4.90641, "16775": 4.63917, "16780": 4.85792, "16785": 4.43596, "16790": 4.58465, "16795": 4.72904, "16800": 4.65113, "16805": 4.53219, "16810": 4.47866, "16815": 4.3967, "16820": 4.59255, "16825": 4.4347, "16830": 4.83586, "16835": 4.71485, "16840": 4.71498, "16845": 4.74316, "16850": 4.79782, "16855": 4.66274, "16860": 4.82589, "16865": 4.67853, "16870": 4.47921, "16875": 4.56143, "16880": 4.71357, "16885": 4.85956, "16890": 4.54368, "16895": 4.57018, "16900": 4.73456, "16905": 4.64634, "16910": 4.5792, "16915": 4.54175, "16920": 4.87765, "16925": 4.7918, "16930": 4.70895, "16935": 4.90053, "16940": 4.56292, "16945": 4.61775, "16950": 4.68105, "16955": 4.35182, "16960": 4.60685, "16965": 4.63398, "16970": 4.53511, "16975": 4.64009, "16980": 4.72335, "16985": 4.47582, "16990": 4.51653, "16995": 4.79228, "17000": 4.81006, "17005": 4.55551, "17010": 4.76457, "17015": 4.57893, "17020": 4.89882, "17025": 4.54143, "17030": 4.6781, "17035": 4.64487, "17040": 4.83278, "17045": 4.62344, "17050": 4.46731, "17055": 4.57201, "17060": 4.79965, "17065": 4.5098, "17070": 4.71259, "17075": 4.68352, "17080": 4.88659, "17085": 4.51092, "17090": 4.3399, "17095": 4.44552, "17100": 4.87553, "17105": 4.76108, "17110": 4.75337, "17115": 4.88472, "17120": 4.70485, "17125": 4.58564, "17130": 4.6705, "17135": 4.51344, "17140": 4.61783, "17145": 4.78291, "17150": 4.69412, "17155": 4.62375, "17160": 4.26359, "17165": 4.73242, "17170": 4.44998, "17175": 4.90387, "17180": 4.48798, "17185": 4.63893, "17190": 4.82657, "17195": 5.01016, "17200": 4.67327, "17205": 4.80176, "17210": 4.57979, "17215": 4.70195, "17220": 4.47825, "17225": 4.56226, "17230": 4.92622, "17235": 4.57845, "17240": 4.64112, "17245": 4.93738, "17250": 4.84863, "17255": 4.69749, "17260": 4.69931, "17265": 4.57799, "17270": 5.10435, "17275": 4.80572, "17280": 4.71771, "17285": 4.57033, "17290": 4.80859, "17295": 4.54175, "17300": 4.63263, "17305": 4.89761, "17310": 4.71247, "17315": 4.63245, "17320": 4.52882, "17325": 4.4364, "17330": 4.51325, "17335": 4.52212, "17340": 4.66336, "17345": 4.82936, "17350": 4.75599, "17355": 4.42479, "17360": 4.71834, "17365": 4.75938, "17370": 4.48265, "17375": 4.66737, "17380": 4.62505, "17385": 4.65761, "17390": 4.66028, "17395": 4.59545, "17400": 4.59905, "17405": 4.90184, "17410": 4.6109, "17415": 5.06896, "17420": 4.46752, "17425": 4.5227, "17430": 4.67853, "17435": 4.75048, "17440": 4.74094, "17445": 4.83501, "17450": 4.62396, "17455": 4.5253, "17460": 4.51618, "17465": 4.57263, "17470": 4.57913, "17475": 4.71038, "17480": 4.6467, "17485": 4.74419, "17490": 4.70918, "17495": 4.4527, "17500": 4.42808, "17505": 4.61633, "17510": 4.3621, "17515": 4.67811, "17520": 4.42217, "17525": 4.68344, "17530": 4.79629, "17535": 4.47512, "17540": 4.77905, "17545": 4.79439, "17550": 4.89005, "17555": 4.62477, "17560": 4.37778, "17565": 4.43796, "17570": 4.53317, "17575": 4.79207, "17580": 4.62344, "17585": 4.56632, "17590": 4.78663, "17595": 4.20756, "17600": 4.33947, "17605": 4.77035, "17610": 4.40628, "17615": 4.49643, "17620": 4.69887, "17625": 4.48489, "17630": 4.59626, "17635": 4.85978, "17640": 4.68308, "17645": 4.49584, "17650": 4.58199, "17655": 4.70881, "17660": 4.36156, "17665": 4.54974, "17670": 4.88606, "17675": 4.85542, "17680": 4.83807, "17685": 4.72655, "17690": 4.87652, "17695": 4.55257, "17700": 4.4832, "17705": 4.79245, "17710": 4.66351, "17715": 4.8224, "17720": 4.90719, "17725": 4.3819, "17730": 4.75448, "17735": 4.50075, "17740": 4.6142, "17745": 4.66771, "17750": 4.29231, "17755": 4.71925, "17760": 4.43914, "17765": 4.57809, "17770": 4.58769, "17775": 4.55025, "17780": 4.71631, "17785": 4.49097, "17790": 4.62366, "17795": 4.88061, "17800": 4.56311, "17805": 4.7476, "17810": 4.22324, "17815": 4.88838, "17820": 4.69264, "17825": 4.80549, "17830": 4.7681, "17835": 4.63043, "17840": 4.80906, "17845": 4.63118, "17850": 4.62579, "17855": 4.69438, "17860": 4.77084, "17865": 4.75619, "17870": 4.68389, "17875": 4.76077, "17880": 4.73793, "17885": 4.69999, "17890": 4.63289, "17895": 4.36349, "17900": 4.2935, "17905": 4.62394, "17910": 4.70538, "17915": 4.46943, "17920": 4.57111, "17925": 4.50366, "17930": 4.82982, "17935": 4.62689, "17940": 4.62205, "17945": 4.51045, "17950": 4.75139, "17955": 4.48931, "17960": 4.51317, "17965": 4.53564, "17970": 4.57989, "17975": 4.46925, "17980": 4.41075, "17985": 4.59817, "17990": 4.64773, "17995": 4.9858, "18000": 4.54267, "18005": 4.62839, "18010": 4.75788, "18015": 4.67409, "18020": 4.60056, "18025": 4.78308, "18030": 4.5976, "18035": 4.42175, "18040": 4.71128, "18045": 4.6728, "18050": 4.69279, "18055": 4.31758, "18060": 4.67305, "18065": 4.59785, "18070": 4.78982, "18075": 4.61025, "18080": 4.24851, "18085": 4.67895, "18090": 4.54553, "18095": 4.67201, "18100": 4.85044, "18105": 4.63047, "18110": 4.74025, "18115": 4.62119, "18120": 4.60827, "18125": 4.74019, "18130": 4.67008, "18135": 4.81161, "18140": 4.64753, "18145": 4.369, "18150": 4.62485, "18155": 4.46126, "18160": 4.84904, "18165": 4.51582, "18170": 4.53564, "18175": 4.64274, "18180": 4.55416, "18185": 4.58186, "18190": 4.48279, "18195": 4.59479, "18200": 4.74886, "18205": 4.49389, "18210": 4.41001, "18215": 4.84476, "18220": 4.39071, "18225": 4.71667, "18230": 4.49112, "18235": 4.62935, "18240": 4.79786, "18245": 4.60775, "18250": 4.63901, "18255": 4.6543, "18260": 4.58442, "18265": 4.51652, "18270": 4.80473, "18275": 4.48737, "18280": 4.56113, "18285": 4.89253, "18290": 4.64952, "18295": 4.81316, "18300": 4.60972, "18305": 4.33753, "18310": 4.69129, "18315": 4.18635, "18320": 4.28598, "18325": 4.50003, "18330": 4.46752, "18335": 4.76026, "18340": 5.00781, "18345": 4.58459, "18350": 4.60438, "18355": 4.52475, "18360": 4.49551, "18365": 4.44677, "18370": 4.60457, "18375": 4.50868, "18380": 4.67696, "18385": 4.62345, "18390": 4.74967, "18395": 4.65603, "18400": 4.5379, "18405": 4.65347, "18410": 4.52271, "18415": 4.5702, "18420": 4.63743, "18425": 4.64645, "18430": 4.75553, "18435": 4.24213, "18440": 4.44301, "18445": 4.7483, "18450": 4.39981, "18455": 4.63998, "18460": 4.31669, "18465": 4.54673, "18470": 4.28561, "18475": 4.87112, "18480": 4.56955, "18485": 4.57785, "18490": 4.76414, "18495": 4.6237, "18500": 4.73452, "18505": 4.7449, "18510": 4.47383, "18515": 4.36314, "18520": 4.60693, "18525": 4.35145, "18530": 4.74158, "18535": 4.74229, "18540": 4.44775, "18545": 4.6846, "18550": 4.90053, "18555": 4.70875, "18560": 4.31332, "18565": 4.59618, "18570": 4.67911, "18575": 4.55279, "18580": 4.82963, "18585": 4.5419, "18590": 4.81632, "18595": 4.64786, "18600": 4.36163, "18605": 4.73325, "18610": 4.51279, "18615": 4.69515, "18620": 4.36364, "18625": 4.42725, "18630": 4.33219, "18635": 4.40085, "18640": 4.856, "18645": 4.64445, "18650": 4.4747, "18655": 4.46273, "18660": 4.71444, "18665": 4.61387, "18670": 4.52895, "18675": 4.54565, "18680": 4.33753, "18685": 4.43054, "18690": 4.60592, "18695": 4.52619, "18700": 4.74638, "18705": 4.42479, "18710": 4.32703, "18715": 4.4658, "18720": 4.5123, "18725": 4.57057, "18730": 4.47144, "18735": 4.36268, "18740": 4.57819, "18745": 4.44156, "18750": 4.58138, "18755": 4.50182, "18760": 4.75272, "18765": 4.63178, "18770": 4.65315, "18775": 4.26451, "18780": 4.47413, "18785": 4.90897, "18790": 4.48637, "18795": 4.80837, "18800": 4.6657, "18805": 4.28007, "18810": 4.7012, "18815": 4.71722, "18820": 4.54938, "18825": 4.79182, "18830": 4.56919, "18835": 4.59487, "18840": 4.44581, "18845": 4.56059, "18850": 4.72333, "18855": 4.44977, "18860": 4.52308, "18865": 4.75003, "18870": 4.60174, "18875": 4.40419, "18880": 4.78091, "18885": 4.62416, "18890": 4.5369, "18895": 4.54541, "18900": 4.5916, "18905": 4.56475, "18910": 4.62544, "18915": 4.74832, "18920": 4.4313, "18925": 4.26308, "18930": 4.64015, "18935": 4.71589, "18940": 4.5718, "18945": 4.67173, "18950": 4.5568, "18955": 4.72252, "18960": 4.83922, "18965": 4.04581, "18970": 4.24623, "18975": 4.75319, "18980": 4.67134, "18985": 4.38808, "18990": 4.64328, "18995": 4.78835, "19000": 4.61253, "19005": 4.47447, "19010": 4.58417, "19015": 4.49477, "19020": 4.60018, "19025": 4.60011, "19030": 4.46917, "19035": 4.51305, "19040": 4.58187, "19045": 4.74315, "19050": 4.43168, "19055": 4.38998, "19060": 4.50199, "19065": 4.80845, "19070": 4.65877, "19075": 4.60243, "19080": 4.73796, "19085": 4.52475, "19090": 4.57921, "19095": 4.5585, "19100": 4.656, "19105": 4.80028, "19110": 4.4344, "19115": 4.43435, "19120": 4.13201, "19125": 4.37658, "19130": 4.70057, "19135": 4.53089, "19140": 4.58025, "19145": 4.59107, "19150": 4.53357, "19155": 4.37327, "19160": 4.65978, "19165": 4.64166, "19170": 4.42109, "19175": 4.36964, "19180": 4.74881, "19185": 4.70015, "19190": 4.52469, "19195": 4.70496, "19200": 4.44202, "19205": 4.43192, "19210": 4.6448, "19215": 4.49806, "19220": 4.58907, "19225": 4.68636, "19230": 4.31844, "19235": 4.6502, "19240": 4.58176, "19245": 4.59193, "19250": 4.26414, "19255": 4.63363, "19260": 4.5978, "19265": 4.44427, "19270": 4.62637, "19275": 4.42743, "19280": 4.58868, "19285": 4.71063, "19290": 4.43136, "19295": 4.64557, "19300": 4.5755, "19305": 4.72014, "19310": 4.2826, "19315": 4.22894, "19320": 4.83209, "19325": 4.46836, "19330": 4.7038, "19335": 4.35192, "19340": 4.64478, "19345": 4.5561, "19350": 4.67583, "19355": 4.62954, "19360": 4.58004, "19365": 4.87397, "19370": 4.773, "19375": 4.6667, "19380": 4.40798, "19385": 4.3882, "19390": 4.5454, "19395": 4.41139, "19400": 4.6563, "19405": 4.74907, "19410": 4.47842, "19415": 4.66354, "19420": 4.64515, "19425": 4.77727, "19430": 4.67154, "19435": 4.67746, "19440": 4.51631, "19445": 4.34048, "19450": 4.19112, "19455": 4.57009, "19460": 4.52358, "19465": 4.63305, "19470": 4.59146, "19475": 4.25064, "19480": 4.89052, "19485": 4.49461, "19490": 4.35244, "19495": 4.62929, "19500": 4.44557, "19505": 4.61865, "19510": 4.47805, "19515": 4.52399, "19520": 4.64291, "19525": 4.59414, "19530": 4.40508, "19535": 4.53949, "19540": 4.71058, "19545": 4.68448, "19550": 4.52389, "19555": 4.4384, "19560": 4.81034, "19565": 4.91339, "19570": 4.52993, "19575": 4.71901, "19580": 4.77623, "19585": 4.41676, "19590": 4.34956, "19595": 4.68046, "19600": 4.40916, "19605": 4.77361, "19610": 4.7495, "19615": 4.41667, "19620": 4.79909, "19625": 4.19617, "19630": 4.73191, "19635": 4.6578, "19640": 4.60394, "19645": 4.55133, "19650": 4.31026, "19655": 4.59461, "19660": 4.21976, "19665": 4.67517, "19670": 4.5575, "19675": 4.4969, "19680": 4.43156, "19685": 4.64169, "19690": 4.83713, "19695": 4.36072, "19700": 4.65441, "19705": 4.53829, "19710": 4.6117, "19715": 4.54582, "19720": 4.47752, "19725": 4.63546, "19730": 4.57761, "19735": 4.44372, "19740": 4.52101, "19745": 4.55598, "19750": 4.47943, "19755": 4.37928, "19760": 4.38406, "19765": 4.36144, "19770": 4.61172, "19775": 4.42726, "19780": 4.48209, "19785": 4.85932, "19790": 4.5821, "19795": 4.6069, "19800": 4.55334, "19805": 4.457, "19810": 4.72266, "19815": 4.58852, "19820": 4.88717, "19825": 4.54052, "19830": 4.83609, "19835": 4.81761, "19840": 4.37338, "19845": 4.74161, "19850": 4.76964, "19855": 4.46719, "19860": 4.45521, "19865": 4.51713, "19870": 4.5979, "19875": 4.46938, "19880": 4.50257, "19885": 4.69777, "19890": 4.5317, "19895": 4.48675, "19900": 4.44091, "19905": 4.47098, "19910": 4.73032, "19915": 4.30771, "19920": 4.56773, "19925": 4.42538, "19930": 4.33667, "19935": 4.87762, "19940": 4.71203, "19945": 4.5584, "19950": 4.80008, "19955": 4.50588, "19960": 4.43657, "19965": 4.38298, "19970": 4.37515, "19975": 4.50591, "19980": 4.64319, "19985": 4.49542, "19990": 4.64373, "19995": 4.30228, "20000": 4.81103}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 20000, "step_interval": 5, "values": {"1": 146450944.0, "5": 146582528.0, "10": 146451456.0, "15": 145796096.0, "20": 224810496.0, "25": 224286208.0, "30": 224286208.0, "35": 224286208.0, "40": 224286208.0, "45": 224286208.0, "50": 224286208.0, "55": 224286208.0, "60": 224286208.0, "65": 224286208.0, "70": 224286208.0, "75": 224810496.0, "80": 224810496.0, "85": 224286208.0, "90": 224286208.0, "95": 224286208.0, "100": 225334784.0, "105": 224286208.0, "110": 224286208.0, "115": 224286208.0, "120": 224286208.0, "125": 224810496.0, "130": 224286208.0, "135": 224286208.0, "140": 224286208.0, "145": 224286208.0, "150": 224286208.0, "155": 224286208.0, "160": 224286208.0, "165": 224286208.0, "170": 224286208.0, "175": 224286208.0, "180": 224810496.0, "185": 224810496.0, "190": 224810496.0, "195": 224286208.0, "200": 224286208.0, "205": 224286208.0, "210": 224286208.0, "215": 224810496.0, "220": 224286208.0, "225": 224286208.0, "230": 224286208.0, "235": 224286208.0, "240": 224286208.0, "245": 224810496.0, "250": 224286208.0, "255": 224286208.0, "260": 224286208.0, "265": 224286208.0, "270": 224286208.0, "275": 224286208.0, "280": 224286208.0, "285": 224286208.0, "290": 224286208.0, "295": 224286208.0, "300": 224286208.0, "305": 224286208.0, "310": 224286208.0, "315": 224810496.0, "320": 224286208.0, "325": 224286208.0, "330": 224286208.0, "335": 224286208.0, "340": 224810496.0, "345": 224286208.0, "350": 224810496.0, "355": 224286208.0, "360": 224286208.0, "365": 224286208.0, "370": 224286208.0, "375": 224286208.0, "380": 224286208.0, "385": 224286208.0, "390": 224810496.0, "395": 224286208.0, "400": 224286208.0, "405": 224286208.0, "410": 224286208.0, "415": 224286208.0, "420": 224286208.0, "425": 224286208.0, "430": 224286208.0, "435": 224286208.0, "440": 224810496.0, "445": 224810496.0, "450": 224286208.0, "455": 224286208.0, "460": 224286208.0, "465": 224286208.0, "470": 224810496.0, "475": 224810496.0, "480": 224286208.0, "485": 224810496.0, "490": 224286208.0, "495": 224286208.0, "500": 224286208.0, "505": 224286208.0, "510": 224286208.0, "515": 224286208.0, "520": 224286208.0, "525": 224286208.0, "530": 224286208.0, "535": 224286208.0, "540": 224286208.0, "545": 224286208.0, "550": 224286208.0, "555": 224286208.0, "560": 224286208.0, "565": 224286208.0, "570": 224286208.0, "575": 224286208.0, "580": 224286208.0, "585": 224286208.0, "590": 224810496.0, "595": 224286208.0, "600": 224810496.0, "605": 224286208.0, "610": 224286208.0, "615": 224286208.0, "620": 224286208.0, "625": 224286208.0, "630": 224286208.0, "635": 224286208.0, "640": 224286208.0, "645": 224286208.0, "650": 224286208.0, "655": 224286208.0, "660": 224810496.0, "665": 224286208.0, "670": 224810496.0, "675": 224286208.0, "680": 224286208.0, "685": 225334784.0, "690": 224286208.0, "695": 224286208.0, "700": 224286208.0, "705": 224286208.0, "710": 224286208.0, "715": 224286208.0, "720": 224286208.0, "725": 224286208.0, "730": 224286208.0, "735": 224810496.0, "740": 224810496.0, "745": 225334784.0, "750": 224286208.0, "755": 224286208.0, "760": 224286208.0, "765": 224286208.0, "770": 224810496.0, "775": 224810496.0, "780": 224286208.0, "785": 224286208.0, "790": 224286208.0, "795": 224286208.0, "800": 224286208.0, "805": 224286208.0, "810": 224286208.0, "815": 224810496.0, "820": 224286208.0, "825": 224286208.0, "830": 224286208.0, "835": 224286208.0, "840": 224810496.0, "845": 224286208.0, "850": 224286208.0, "855": 224286208.0, "860": 224286208.0, "865": 224286208.0, "870": 224286208.0, "875": 224286208.0, "880": 225334784.0, "885": 224286208.0, "890": 224810496.0, "895": 224286208.0, "900": 224286208.0, "905": 224286208.0, "910": 224286208.0, "915": 224286208.0, "920": 224286208.0, "925": 224286208.0, "930": 224810496.0, "935": 224286208.0, "940": 224286208.0, "945": 224286208.0, "950": 224286208.0, "955": 224286208.0, "960": 224286208.0, "965": 224286208.0, "970": 224286208.0, "975": 224286208.0, "980": 224286208.0, "985": 224286208.0, "990": 224286208.0, "995": 224810496.0, "1000": 224286208.0, "1005": 224810496.0, "1010": 224286208.0, "1015": 224286208.0, "1020": 224286208.0, "1025": 224286208.0, "1030": 224286208.0, "1035": 224286208.0, "1040": 224810496.0, "1045": 224286208.0, "1050": 224810496.0, "1055": 224286208.0, "1060": 225334784.0, "1065": 224286208.0, "1070": 224286208.0, "1075": 224286208.0, "1080": 224810496.0, "1085": 224286208.0, "1090": 224286208.0, "1095": 224810496.0, "1100": 224286208.0, "1105": 224286208.0, "1110": 224286208.0, "1115": 224286208.0, "1120": 225334784.0, "1125": 224286208.0, "1130": 224286208.0, "1135": 224810496.0, "1140": 224810496.0, "1145": 224286208.0, "1150": 224679424.0, "1155": 224286208.0, "1160": 224810496.0, "1165": 224810496.0, "1170": 224286208.0, "1175": 224286208.0, "1180": 224286208.0, "1185": 224286208.0, "1190": 224286208.0, "1195": 224286208.0, "1200": 224286208.0, "1205": 224286208.0, "1210": 224286208.0, "1215": 224286208.0, "1220": 224286208.0, "1225": 224286208.0, "1230": 224810496.0, "1235": 224286208.0, "1240": 224286208.0, "1245": 224286208.0, "1250": 224286208.0, "1255": 224286208.0, "1260": 224286208.0, "1265": 224810496.0, "1270": 224286208.0, "1275": 224286208.0, "1280": 224286208.0, "1285": 224286208.0, "1290": 224286208.0, "1295": 224286208.0, "1300": 224286208.0, "1305": 224286208.0, "1310": 224810496.0, "1315": 224286208.0, "1320": 224286208.0, "1325": 224286208.0, "1330": 224286208.0, "1335": 224286208.0, "1340": 224286208.0, "1345": 224286208.0, "1350": 224286208.0, "1355": 224286208.0, "1360": 224286208.0, "1365": 224286208.0, "1370": 224286208.0, "1375": 224286208.0, "1380": 224286208.0, "1385": 224286208.0, "1390": 224286208.0, "1395": 224286208.0, "1400": 224286208.0, "1405": 224286208.0, "1410": 224286208.0, "1415": 224286208.0, "1420": 224286208.0, "1425": 224286208.0, "1430": 224286208.0, "1435": 224286208.0, "1440": 224810496.0, "1445": 224286208.0, "1450": 224286208.0, "1455": 224286208.0, "1460": 224286208.0, "1465": 224286208.0, "1470": 224286208.0, "1475": 224286208.0, "1480": 224286208.0, "1485": 224286208.0, "1490": 224286208.0, "1495": 224286208.0, "1500": 224286208.0, "1505": 224286208.0, "1510": 224286208.0, "1515": 224286208.0, "1520": 224286208.0, "1525": 224286208.0, "1530": 224286208.0, "1535": 224286208.0, "1540": 224286208.0, "1545": 224286208.0, "1550": 224286208.0, "1555": 224286208.0, "1560": 224286208.0, "1565": 224286208.0, "1570": 224286208.0, "1575": 224286208.0, "1580": 224286208.0, "1585": 224286208.0, "1590": 224679424.0, "1595": 224286208.0, "1600": 224286208.0, "1605": 224286208.0, "1610": 224286208.0, "1615": 224286208.0, "1620": 224286208.0, "1625": 224286208.0, "1630": 224286208.0, "1635": 224286208.0, "1640": 224286208.0, "1645": 224286208.0, "1650": 224286208.0, "1655": 224286208.0, "1660": 224286208.0, "1665": 224286208.0, "1670": 224286208.0, "1675": 224286208.0, "1680": 224286208.0, "1685": 224286208.0, "1690": 224286208.0, "1695": 224286208.0, "1700": 224286208.0, "1705": 224286208.0, "1710": 224286208.0, "1715": 224286208.0, "1720": 224286208.0, "1725": 224286208.0, "1730": 224286208.0, "1735": 224286208.0, "1740": 224286208.0, "1745": 224286208.0, "1750": 224286208.0, "1755": 224286208.0, "1760": 224286208.0, "1765": 224286208.0, "1770": 224286208.0, "1775": 224286208.0, "1780": 224286208.0, "1785": 224286208.0, "1790": 224286208.0, "1795": 224286208.0, "1800": 224286208.0, "1805": 224286208.0, "1810": 224286208.0, "1815": 224286208.0, "1820": 224286208.0, "1825": 224286208.0, "1830": 224286208.0, "1835": 225334784.0, "1840": 224286208.0, "1845": 224286208.0, "1850": 225334784.0, "1855": 224286208.0, "1860": 224286208.0, "1865": 224810496.0, "1870": 224679424.0, "1875": 224286208.0, "1880": 224286208.0, "1885": 224286208.0, "1890": 224286208.0, "1895": 224286208.0, "1900": 224286208.0, "1905": 224286208.0, "1910": 224286208.0, "1915": 224679424.0, "1920": 224286208.0, "1925": 224286208.0, "1930": 224286208.0, "1935": 225334784.0, "1940": 224286208.0, "1945": 224286208.0, "1950": 224286208.0, "1955": 224286208.0, "1960": 224810496.0, "1965": 224286208.0, "1970": 224286208.0, "1975": 224286208.0, "1980": 224286208.0, "1985": 224286208.0, "1990": 225334784.0, "1995": 224286208.0, "2000": 224810496.0, "2005": 224286208.0, "2010": 224286208.0, "2015": 224286208.0, "2020": 224286208.0, "2025": 224286208.0, "2030": 224286208.0, "2035": 224286208.0, "2040": 224286208.0, "2045": 224286208.0, "2050": 224286208.0, "2055": 224286208.0, "2060": 224286208.0, "2065": 224286208.0, "2070": 224286208.0, "2075": 224286208.0, "2080": 224286208.0, "2085": 224286208.0, "2090": 224286208.0, "2095": 224286208.0, "2100": 224286208.0, "2105": 224286208.0, "2110": 224286208.0, "2115": 224286208.0, "2120": 224286208.0, "2125": 224286208.0, "2130": 224286208.0, "2135": 224286208.0, "2140": 224286208.0, "2145": 224286208.0, "2150": 224679424.0, "2155": 224286208.0, "2160": 224286208.0, "2165": 224286208.0, "2170": 224286208.0, "2175": 224286208.0, "2180": 224286208.0, "2185": 224286208.0, "2190": 224286208.0, "2195": 224810496.0, "2200": 224286208.0, "2205": 224286208.0, "2210": 224286208.0, "2215": 224286208.0, "2220": 224286208.0, "2225": 224286208.0, "2230": 224286208.0, "2235": 225334784.0, "2240": 224286208.0, "2245": 224286208.0, "2250": 224286208.0, "2255": 224679424.0, "2260": 224286208.0, "2265": 224286208.0, "2270": 224286208.0, "2275": 224286208.0, "2280": 224286208.0, "2285": 224286208.0, "2290": 224286208.0, "2295": 224286208.0, "2300": 224286208.0, "2305": 225334784.0, "2310": 224286208.0, "2315": 225334784.0, "2320": 224286208.0, "2325": 224286208.0, "2330": 224286208.0, "2335": 224810496.0, "2340": 224286208.0, "2345": 224286208.0, "2350": 224286208.0, "2355": 224286208.0, "2360": 224286208.0, "2365": 224679424.0, "2370": 224810496.0, "2375": 224286208.0, "2380": 224286208.0, "2385": 224286208.0, "2390": 224286208.0, "2395": 224286208.0, "2400": 224286208.0, "2405": 224810496.0, "2410": 224286208.0, "2415": 224286208.0, "2420": 224286208.0, "2425": 224810496.0, "2430": 224286208.0, "2435": 225334784.0, "2440": 225334784.0, "2445": 224286208.0, "2450": 224286208.0, "2455": 224286208.0, "2460": 224286208.0, "2465": 224810496.0, "2470": 224286208.0, "2475": 224286208.0, "2480": 224810496.0, "2485": 224286208.0, "2490": 224286208.0, "2495": 224286208.0, "2500": 224286208.0, "2505": 225334784.0, "2510": 224286208.0, "2515": 224286208.0, "2520": 224286208.0, "2525": 224286208.0, "2530": 224286208.0, "2535": 224286208.0, "2540": 224286208.0, "2545": 224286208.0, "2550": 224286208.0, "2555": 224286208.0, "2560": 224286208.0, "2565": 224286208.0, "2570": 224286208.0, "2575": 224286208.0, "2580": 224286208.0, "2585": 224286208.0, "2590": 224286208.0, "2595": 224286208.0, "2600": 224286208.0, "2605": 224810496.0, "2610": 224810496.0, "2615": 224810496.0, "2620": 224286208.0, "2625": 224286208.0, "2630": 224286208.0, "2635": 224286208.0, "2640": 224286208.0, "2645": 224286208.0, "2650": 224286208.0, "2655": 224810496.0, "2660": 224286208.0, "2665": 224286208.0, "2670": 224286208.0, "2675": 224286208.0, "2680": 224810496.0, "2685": 224286208.0, "2690": 224286208.0, "2695": 224286208.0, "2700": 224286208.0, "2705": 224286208.0, "2710": 224286208.0, "2715": 224286208.0, "2720": 224286208.0, "2725": 224286208.0, "2730": 224286208.0, "2735": 224286208.0, "2740": 224679424.0, "2745": 224286208.0, "2750": 224810496.0, "2755": 224286208.0, "2760": 224286208.0, "2765": 224286208.0, "2770": 224810496.0, "2775": 224679424.0, "2780": 224286208.0, "2785": 224286208.0, "2790": 224286208.0, "2795": 224810496.0, "2800": 224286208.0, "2805": 224286208.0, "2810": 224286208.0, "2815": 224286208.0, "2820": 224810496.0, "2825": 224286208.0, "2830": 224286208.0, "2835": 224286208.0, "2840": 224810496.0, "2845": 224286208.0, "2850": 224286208.0, "2855": 224286208.0, "2860": 224679424.0, "2865": 224286208.0, "2870": 224286208.0, "2875": 224286208.0, "2880": 224679424.0, "2885": 224286208.0, "2890": 224286208.0, "2895": 224286208.0, "2900": 224286208.0, "2905": 224286208.0, "2910": 224810496.0, "2915": 224286208.0, "2920": 224286208.0, "2925": 224286208.0, "2930": 224810496.0, "2935": 224286208.0, "2940": 224286208.0, "2945": 224286208.0, "2950": 224286208.0, "2955": 224286208.0, "2960": 224286208.0, "2965": 224810496.0, "2970": 224810496.0, "2975": 224286208.0, "2980": 224810496.0, "2985": 224286208.0, "2990": 224286208.0, "2995": 224286208.0, "3000": 224286208.0, "3005": 224810496.0, "3010": 224286208.0, "3015": 224810496.0, "3020": 224286208.0, "3025": 224286208.0, "3030": 224286208.0, "3035": 224286208.0, "3040": 224286208.0, "3045": 224286208.0, "3050": 224286208.0, "3055": 224286208.0, "3060": 224286208.0, "3065": 224286208.0, "3070": 224286208.0, "3075": 224286208.0, "3080": 224286208.0, "3085": 224286208.0, "3090": 224286208.0, "3095": 224810496.0, "3100": 224286208.0, "3105": 224286208.0, "3110": 224286208.0, "3115": 224286208.0, "3120": 224286208.0, "3125": 224286208.0, "3130": 224810496.0, "3135": 224286208.0, "3140": 224286208.0, "3145": 224286208.0, "3150": 224810496.0, "3155": 224810496.0, "3160": 224286208.0, "3165": 224810496.0, "3170": 224286208.0, "3175": 224810496.0, "3180": 224286208.0, "3185": 224810496.0, "3190": 224810496.0, "3195": 224286208.0, "3200": 224286208.0, "3205": 224286208.0, "3210": 224286208.0, "3215": 224286208.0, "3220": 224810496.0, "3225": 224286208.0, "3230": 224286208.0, "3235": 224810496.0, "3240": 224286208.0, "3245": 224286208.0, "3250": 224286208.0, "3255": 224286208.0, "3260": 224286208.0, "3265": 224286208.0, "3270": 224286208.0, "3275": 224810496.0, "3280": 224286208.0, "3285": 224286208.0, "3290": 224286208.0, "3295": 224286208.0, "3300": 224286208.0, "3305": 224286208.0, "3310": 224286208.0, "3315": 224286208.0, "3320": 224286208.0, "3325": 224810496.0, "3330": 224810496.0, "3335": 224810496.0, "3340": 224286208.0, "3345": 224286208.0, "3350": 224286208.0, "3355": 224810496.0, "3360": 224286208.0, "3365": 224286208.0, "3370": 224286208.0, "3375": 224286208.0, "3380": 224286208.0, "3385": 224286208.0, "3390": 224286208.0, "3395": 224286208.0, "3400": 224810496.0, "3405": 224286208.0, "3410": 224286208.0, "3415": 224286208.0, "3420": 224286208.0, "3425": 224286208.0, "3430": 225334784.0, "3435": 224286208.0, "3440": 224810496.0, "3445": 224810496.0, "3450": 225334784.0, "3455": 224286208.0, "3460": 225334784.0, "3465": 224810496.0, "3470": 224286208.0, "3475": 224286208.0, "3480": 224810496.0, "3485": 224679424.0, "3490": 224286208.0, "3495": 224286208.0, "3500": 224286208.0, "3505": 224286208.0, "3510": 224286208.0, "3515": 224286208.0, "3520": 224810496.0, "3525": 224286208.0, "3530": 224286208.0, "3535": 224679424.0, "3540": 224286208.0, "3545": 224286208.0, "3550": 224810496.0, "3555": 224286208.0, "3560": 224286208.0, "3565": 224286208.0, "3570": 225334784.0, "3575": 224810496.0, "3580": 224286208.0, "3585": 224286208.0, "3590": 224286208.0, "3595": 225334784.0, "3600": 224286208.0, "3605": 224286208.0, "3610": 224286208.0, "3615": 224286208.0, "3620": 224286208.0, "3625": 224286208.0, "3630": 224286208.0, "3635": 224810496.0, "3640": 224286208.0, "3645": 224286208.0, "3650": 224286208.0, "3655": 224286208.0, "3660": 224286208.0, "3665": 224286208.0, "3670": 224810496.0, "3675": 224286208.0, "3680": 224286208.0, "3685": 224286208.0, "3690": 224286208.0, "3695": 224286208.0, "3700": 224286208.0, "3705": 224286208.0, "3710": 224286208.0, "3715": 224286208.0, "3720": 224286208.0, "3725": 224286208.0, "3730": 224286208.0, "3735": 224286208.0, "3740": 224286208.0, "3745": 224286208.0, "3750": 224286208.0, "3755": 224286208.0, "3760": 224286208.0, "3765": 224286208.0, "3770": 224286208.0, "3775": 224810496.0, "3780": 224286208.0, "3785": 224286208.0, "3790": 224286208.0, "3795": 224286208.0, "3800": 224286208.0, "3805": 224286208.0, "3810": 224286208.0, "3815": 224286208.0, "3820": 224286208.0, "3825": 224286208.0, "3830": 224286208.0, "3835": 224286208.0, "3840": 224286208.0, "3845": 224286208.0, "3850": 224286208.0, "3855": 224286208.0, "3860": 224286208.0, "3865": 224286208.0, "3870": 224286208.0, "3875": 224286208.0, "3880": 224286208.0, "3885": 224286208.0, "3890": 224810496.0, "3895": 224286208.0, "3900": 224286208.0, "3905": 224286208.0, "3910": 224286208.0, "3915": 224286208.0, "3920": 224286208.0, "3925": 225334784.0, "3930": 224286208.0, "3935": 224286208.0, "3940": 224286208.0, "3945": 224810496.0, "3950": 224810496.0, "3955": 224286208.0, "3960": 224810496.0, "3965": 224286208.0, "3970": 224286208.0, "3975": 224286208.0, "3980": 224286208.0, "3985": 224286208.0, "3990": 224286208.0, "3995": 224286208.0, "4000": 224286208.0, "4005": 224810496.0, "4010": 224286208.0, "4015": 224286208.0, "4020": 224810496.0, "4025": 224286208.0, "4030": 224286208.0, "4035": 224286208.0, "4040": 224286208.0, "4045": 224286208.0, "4050": 224286208.0, "4055": 224286208.0, "4060": 224286208.0, "4065": 224286208.0, "4070": 224286208.0, "4075": 224286208.0, "4080": 224286208.0, "4085": 224810496.0, "4090": 224679424.0, "4095": 225334784.0, "4100": 224810496.0, "4105": 224810496.0, "4110": 224286208.0, "4115": 224286208.0, "4120": 224286208.0, "4125": 224286208.0, "4130": 224286208.0, "4135": 224810496.0, "4140": 224810496.0, "4145": 224286208.0, "4150": 224286208.0, "4155": 224286208.0, "4160": 224286208.0, "4165": 224286208.0, "4170": 224286208.0, "4175": 224286208.0, "4180": 224810496.0, "4185": 224679424.0, "4190": 224286208.0, "4195": 224679424.0, "4200": 224286208.0, "4205": 224810496.0, "4210": 224286208.0, "4215": 224286208.0, "4220": 224286208.0, "4225": 224286208.0, "4230": 224286208.0, "4235": 224286208.0, "4240": 224286208.0, "4245": 224286208.0, "4250": 224286208.0, "4255": 224286208.0, "4260": 224286208.0, "4265": 224286208.0, "4270": 224286208.0, "4275": 224286208.0, "4280": 224286208.0, "4285": 224286208.0, "4290": 224286208.0, "4295": 224286208.0, "4300": 224286208.0, "4305": 224286208.0, "4310": 224286208.0, "4315": 224286208.0, "4320": 225334784.0, "4325": 224810496.0, "4330": 224286208.0, "4335": 224286208.0, "4340": 225334784.0, "4345": 224679424.0, "4350": 224286208.0, "4355": 224286208.0, "4360": 224286208.0, "4365": 224286208.0, "4370": 224286208.0, "4375": 224286208.0, "4380": 224286208.0, "4385": 224286208.0, "4390": 224286208.0, "4395": 224286208.0, "4400": 224286208.0, "4405": 224286208.0, "4410": 224810496.0, "4415": 224286208.0, "4420": 224810496.0, "4425": 224286208.0, "4430": 224810496.0, "4435": 224286208.0, "4440": 224286208.0, "4445": 224286208.0, "4450": 224286208.0, "4455": 224810496.0, "4460": 224286208.0, "4465": 225334784.0, "4470": 225334784.0, "4475": 224286208.0, "4480": 224810496.0, "4485": 224286208.0, "4490": 224286208.0, "4495": 224286208.0, "4500": 224810496.0, "4505": 224679424.0, "4510": 224286208.0, "4515": 224810496.0, "4520": 224286208.0, "4525": 224286208.0, "4530": 224810496.0, "4535": 224810496.0, "4540": 224810496.0, "4545": 224286208.0, "4550": 224286208.0, "4555": 224286208.0, "4560": 224286208.0, "4565": 224286208.0, "4570": 224286208.0, "4575": 224286208.0, "4580": 224286208.0, "4585": 224286208.0, "4590": 224810496.0, "4595": 224286208.0, "4600": 224286208.0, "4605": 224286208.0, "4610": 224286208.0, "4615": 224286208.0, "4620": 224286208.0, "4625": 224810496.0, "4630": 224286208.0, "4635": 224810496.0, "4640": 224810496.0, "4645": 224286208.0, "4650": 224810496.0, "4655": 224286208.0, "4660": 224286208.0, "4665": 224286208.0, "4670": 224810496.0, "4675": 224286208.0, "4680": 224286208.0, "4685": 224286208.0, "4690": 224810496.0, "4695": 224286208.0, "4700": 225334784.0, "4705": 224286208.0, "4710": 224286208.0, "4715": 224286208.0, "4720": 224286208.0, "4725": 224286208.0, "4730": 224286208.0, "4735": 224286208.0, "4740": 224286208.0, "4745": 224286208.0, "4750": 224286208.0, "4755": 224286208.0, "4760": 224810496.0, "4765": 224286208.0, "4770": 224286208.0, "4775": 224286208.0, "4780": 224286208.0, "4785": 224286208.0, "4790": 224286208.0, "4795": 224286208.0, "4800": 224679424.0, "4805": 224810496.0, "4810": 224810496.0, "4815": 224286208.0, "4820": 224810496.0, "4825": 224286208.0, "4830": 224810496.0, "4835": 224286208.0, "4840": 224286208.0, "4845": 224286208.0, "4850": 224286208.0, "4855": 224286208.0, "4860": 224286208.0, "4865": 224286208.0, "4870": 224286208.0, "4875": 224286208.0, "4880": 224810496.0, "4885": 224286208.0, "4890": 224286208.0, "4895": 224286208.0, "4900": 224810496.0, "4905": 224286208.0, "4910": 224286208.0, "4915": 224286208.0, "4920": 224286208.0, "4925": 224286208.0, "4930": 224286208.0, "4935": 224810496.0, "4940": 224286208.0, "4945": 224286208.0, "4950": 224810496.0, "4955": 224286208.0, "4960": 224286208.0, "4965": 224286208.0, "4970": 224286208.0, "4975": 224679424.0, "4980": 224286208.0, "4985": 224286208.0, "4990": 224286208.0, "4995": 224286208.0, "5000": 224286208.0, "5005": 224286208.0, "5010": 224679424.0, "5015": 224286208.0, "5020": 224286208.0, "5025": 224286208.0, "5030": 224810496.0, "5035": 224286208.0, "5040": 224286208.0, "5045": 224286208.0, "5050": 224286208.0, "5055": 224286208.0, "5060": 224810496.0, "5065": 224810496.0, "5070": 224286208.0, "5075": 224286208.0, "5080": 224286208.0, "5085": 224286208.0, "5090": 224286208.0, "5095": 224679424.0, "5100": 224286208.0, "5105": 224286208.0, "5110": 224286208.0, "5115": 224286208.0, "5120": 224286208.0, "5125": 224286208.0, "5130": 224286208.0, "5135": 224286208.0, "5140": 224286208.0, "5145": 224810496.0, "5150": 224679424.0, "5155": 224286208.0, "5160": 224286208.0, "5165": 224286208.0, "5170": 224286208.0, "5175": 224286208.0, "5180": 224286208.0, "5185": 224286208.0, "5190": 224286208.0, "5195": 224286208.0, "5200": 224679424.0, "5205": 224286208.0, "5210": 224286208.0, "5215": 224286208.0, "5220": 224286208.0, "5225": 224286208.0, "5230": 224286208.0, "5235": 224286208.0, "5240": 224286208.0, "5245": 224286208.0, "5250": 224679424.0, "5255": 224286208.0, "5260": 224286208.0, "5265": 224286208.0, "5270": 224810496.0, "5275": 224286208.0, "5280": 224286208.0, "5285": 224810496.0, "5290": 224286208.0, "5295": 224810496.0, "5300": 224286208.0, "5305": 224286208.0, "5310": 224286208.0, "5315": 224286208.0, "5320": 224286208.0, "5325": 224810496.0, "5330": 224286208.0, "5335": 224286208.0, "5340": 224810496.0, "5345": 224286208.0, "5350": 224286208.0, "5355": 224286208.0, "5360": 224286208.0, "5365": 224286208.0, "5370": 224286208.0, "5375": 224286208.0, "5380": 224286208.0, "5385": 224679424.0, "5390": 224286208.0, "5395": 224286208.0, "5400": 224286208.0, "5405": 224286208.0, "5410": 224286208.0, "5415": 224286208.0, "5420": 224286208.0, "5425": 224286208.0, "5430": 224286208.0, "5435": 224286208.0, "5440": 224286208.0, "5445": 224286208.0, "5450": 224286208.0, "5455": 224286208.0, "5460": 224286208.0, "5465": 224286208.0, "5470": 224286208.0, "5475": 224286208.0, "5480": 224286208.0, "5485": 224810496.0, "5490": 224286208.0, "5495": 224286208.0, "5500": 224286208.0, "5505": 224810496.0, "5510": 224286208.0, "5515": 224286208.0, "5520": 224286208.0, "5525": 224286208.0, "5530": 224810496.0, "5535": 224810496.0, "5540": 224286208.0, "5545": 224810496.0, "5550": 224286208.0, "5555": 224286208.0, "5560": 224286208.0, "5565": 224286208.0, "5570": 224286208.0, "5575": 224286208.0, "5580": 224286208.0, "5585": 224286208.0, "5590": 224679424.0, "5595": 224810496.0, "5600": 224286208.0, "5605": 224286208.0, "5610": 224286208.0, "5615": 224286208.0, "5620": 224286208.0, "5625": 224286208.0, "5630": 224286208.0, "5635": 224286208.0, "5640": 224286208.0, "5645": 224810496.0, "5650": 224286208.0, "5655": 224286208.0, "5660": 224286208.0, "5665": 224286208.0, "5670": 224286208.0, "5675": 224810496.0, "5680": 224286208.0, "5685": 224286208.0, "5690": 224286208.0, "5695": 224286208.0, "5700": 224286208.0, "5705": 224286208.0, "5710": 225334784.0, "5715": 224286208.0, "5720": 224286208.0, "5725": 224286208.0, "5730": 224286208.0, "5735": 224286208.0, "5740": 224286208.0, "5745": 224810496.0, "5750": 224286208.0, "5755": 224286208.0, "5760": 224286208.0, "5765": 224286208.0, "5770": 224286208.0, "5775": 224286208.0, "5780": 224679424.0, "5785": 224286208.0, "5790": 224286208.0, "5795": 224286208.0, "5800": 224286208.0, "5805": 224286208.0, "5810": 224286208.0, "5815": 224286208.0, "5820": 224286208.0, "5825": 224286208.0, "5830": 224286208.0, "5835": 224286208.0, "5840": 224286208.0, "5845": 224679424.0, "5850": 224286208.0, "5855": 224286208.0, "5860": 224286208.0, "5865": 224286208.0, "5870": 224286208.0, "5875": 224286208.0, "5880": 224286208.0, "5885": 224286208.0, "5890": 224810496.0, "5895": 224286208.0, "5900": 224286208.0, "5905": 224810496.0, "5910": 224286208.0, "5915": 224286208.0, "5920": 224286208.0, "5925": 224810496.0, "5930": 224286208.0, "5935": 224286208.0, "5940": 224286208.0, "5945": 224286208.0, "5950": 225334784.0, "5955": 224810496.0, "5960": 224286208.0, "5965": 224286208.0, "5970": 224286208.0, "5975": 224286208.0, "5980": 224810496.0, "5985": 224286208.0, "5990": 224286208.0, "5995": 224286208.0, "6000": 224286208.0, "6005": 224286208.0, "6010": 224286208.0, "6015": 224286208.0, "6020": 224286208.0, "6025": 225334784.0, "6030": 224286208.0, "6035": 224286208.0, "6040": 224286208.0, "6045": 224286208.0, "6050": 224286208.0, "6055": 224286208.0, "6060": 224286208.0, "6065": 224810496.0, "6070": 225334784.0, "6075": 224286208.0, "6080": 224286208.0, "6085": 224286208.0, "6090": 224810496.0, "6095": 224286208.0, "6100": 224286208.0, "6105": 224286208.0, "6110": 224286208.0, "6115": 224286208.0, "6120": 224286208.0, "6125": 224286208.0, "6130": 224286208.0, "6135": 224810496.0, "6140": 224286208.0, "6145": 225334784.0, "6150": 224286208.0, "6155": 224810496.0, "6160": 224286208.0, "6165": 225334784.0, "6170": 224286208.0, "6175": 224286208.0, "6180": 224810496.0, "6185": 224810496.0, "6190": 224286208.0, "6195": 224286208.0, "6200": 224810496.0, "6205": 224286208.0, "6210": 224286208.0, "6215": 224286208.0, "6220": 224286208.0, "6225": 224286208.0, "6230": 224286208.0, "6235": 224286208.0, "6240": 224810496.0, "6245": 224286208.0, "6250": 224286208.0, "6255": 224286208.0, "6260": 224286208.0, "6265": 224286208.0, "6270": 224286208.0, "6275": 224810496.0, "6280": 224810496.0, "6285": 224810496.0, "6290": 224286208.0, "6295": 224286208.0, "6300": 224286208.0, "6305": 224286208.0, "6310": 224810496.0, "6315": 224810496.0, "6320": 224286208.0, "6325": 224810496.0, "6330": 224810496.0, "6335": 224810496.0, "6340": 224286208.0, "6345": 224286208.0, "6350": 224286208.0, "6355": 224810496.0, "6360": 225334784.0, "6365": 224286208.0, "6370": 224810496.0, "6375": 224810496.0, "6380": 224810496.0, "6385": 224286208.0, "6390": 224286208.0, "6395": 224286208.0, "6400": 224810496.0, "6405": 224810496.0, "6410": 224810496.0, "6415": 224286208.0, "6420": 224810496.0, "6425": 225334784.0, "6430": 225334784.0, "6435": 224286208.0, "6440": 224810496.0, "6445": 224286208.0, "6450": 224810496.0, "6455": 224286208.0, "6460": 224286208.0, "6465": 224286208.0, "6470": 224810496.0, "6475": 224286208.0, "6480": 224810496.0, "6485": 224286208.0, "6490": 224286208.0, "6495": 224286208.0, "6500": 224286208.0, "6505": 224286208.0, "6510": 224810496.0, "6515": 224679424.0, "6520": 224286208.0, "6525": 225334784.0, "6530": 224286208.0, "6535": 224286208.0, "6540": 224286208.0, "6545": 224286208.0, "6550": 224286208.0, "6555": 224810496.0, "6560": 224286208.0, "6565": 224810496.0, "6570": 224810496.0, "6575": 224286208.0, "6580": 224286208.0, "6585": 224810496.0, "6590": 224810496.0, "6595": 224810496.0, "6600": 224286208.0, "6605": 224810496.0, "6610": 224286208.0, "6615": 224286208.0, "6620": 224286208.0, "6625": 224286208.0, "6630": 224286208.0, "6635": 225334784.0, "6640": 224286208.0, "6645": 224286208.0, "6650": 224810496.0, "6655": 224286208.0, "6660": 225334784.0, "6665": 224286208.0, "6670": 224286208.0, "6675": 224286208.0, "6680": 224810496.0, "6685": 224810496.0, "6690": 224286208.0, "6695": 224286208.0, "6700": 224679424.0, "6705": 224810496.0, "6710": 224286208.0, "6715": 225334784.0, "6720": 224679424.0, "6725": 224810496.0, "6730": 224810496.0, "6735": 225334784.0, "6740": 224286208.0, "6745": 224286208.0, "6750": 224286208.0, "6755": 224286208.0, "6760": 224286208.0, "6765": 224810496.0, "6770": 224286208.0, "6775": 224286208.0, "6780": 224286208.0, "6785": 224286208.0, "6790": 224286208.0, "6795": 225334784.0, "6800": 224286208.0, "6805": 224810496.0, "6810": 224286208.0, "6815": 224286208.0, "6820": 224286208.0, "6825": 224810496.0, "6830": 224286208.0, "6835": 224286208.0, "6840": 224286208.0, "6845": 224810496.0, "6850": 224286208.0, "6855": 224810496.0, "6860": 224810496.0, "6865": 224286208.0, "6870": 224286208.0, "6875": 224810496.0, "6880": 224286208.0, "6885": 224286208.0, "6890": 224286208.0, "6895": 224286208.0, "6900": 224286208.0, "6905": 224286208.0, "6910": 224286208.0, "6915": 224679424.0, "6920": 225334784.0, "6925": 224286208.0, "6930": 224810496.0, "6935": 224810496.0, "6940": 224810496.0, "6945": 224286208.0, "6950": 224286208.0, "6955": 224810496.0, "6960": 225334784.0, "6965": 224286208.0, "6970": 225334784.0, "6975": 224286208.0, "6980": 224286208.0, "6985": 225334784.0, "6990": 224286208.0, "6995": 224810496.0, "7000": 224679424.0, "7005": 225334784.0, "7010": 225334784.0, "7015": 224286208.0, "7020": 224286208.0, "7025": 224810496.0, "7030": 224810496.0, "7035": 224810496.0, "7040": 224810496.0, "7045": 224286208.0, "7050": 224810496.0, "7055": 224286208.0, "7060": 224286208.0, "7065": 224286208.0, "7070": 224286208.0, "7075": 224810496.0, "7080": 224286208.0, "7085": 224286208.0, "7090": 224286208.0, "7095": 224286208.0, "7100": 225334784.0, "7105": 224286208.0, "7110": 224810496.0, "7115": 224286208.0, "7120": 224286208.0, "7125": 224286208.0, "7130": 224810496.0, "7135": 224810496.0, "7140": 225334784.0, "7145": 225334784.0, "7150": 224286208.0, "7155": 225334784.0, "7160": 224810496.0, "7165": 224810496.0, "7170": 224286208.0, "7175": 224810496.0, "7180": 224286208.0, "7185": 224286208.0, "7190": 224286208.0, "7195": 225334784.0, "7200": 224286208.0, "7205": 224810496.0, "7210": 224286208.0, "7215": 225334784.0, "7220": 224810496.0, "7225": 224810496.0, "7230": 224810496.0, "7235": 224810496.0, "7240": 224286208.0, "7245": 224810496.0, "7250": 224286208.0, "7255": 224810496.0, "7260": 225334784.0, "7265": 224810496.0, "7270": 224810496.0, "7275": 225334784.0, "7280": 224810496.0, "7285": 224286208.0, "7290": 224286208.0, "7295": 224286208.0, "7300": 224810496.0, "7305": 224810496.0, "7310": 224810496.0, "7315": 224286208.0, "7320": 224810496.0, "7325": 224810496.0, "7330": 224810496.0, "7335": 224286208.0, "7340": 224810496.0, "7345": 224286208.0, "7350": 224810496.0, "7355": 224810496.0, "7360": 225334784.0, "7365": 224810496.0, "7370": 224286208.0, "7375": 224810496.0, "7380": 224810496.0, "7385": 224286208.0, "7390": 224286208.0, "7395": 224286208.0, "7400": 224810496.0, "7405": 224286208.0, "7410": 224810496.0, "7415": 224286208.0, "7420": 224286208.0, "7425": 224810496.0, "7430": 224810496.0, "7435": 224286208.0, "7440": 224286208.0, "7445": 224286208.0, "7450": 224286208.0, "7455": 224286208.0, "7460": 224286208.0, "7465": 225334784.0, "7470": 224286208.0, "7475": 224810496.0, "7480": 224286208.0, "7485": 224286208.0, "7490": 224286208.0, "7495": 225334784.0, "7500": 224679424.0, "7505": 224286208.0, "7510": 224286208.0, "7515": 224286208.0, "7520": 224286208.0, "7525": 224810496.0, "7530": 224286208.0, "7535": 224810496.0, "7540": 224810496.0, "7545": 225334784.0, "7550": 224286208.0, "7555": 224810496.0, "7560": 224286208.0, "7565": 224810496.0, "7570": 225334784.0, "7575": 224286208.0, "7580": 224810496.0, "7585": 224286208.0, "7590": 224286208.0, "7595": 224286208.0, "7600": 224810496.0, "7605": 224810496.0, "7610": 224286208.0, "7615": 224286208.0, "7620": 224286208.0, "7625": 224286208.0, "7630": 224810496.0, "7635": 224286208.0, "7640": 225334784.0, "7645": 224810496.0, "7650": 224286208.0, "7655": 224286208.0, "7660": 224810496.0, "7665": 224286208.0, "7670": 224286208.0, "7675": 224286208.0, "7680": 224286208.0, "7685": 224286208.0, "7690": 224286208.0, "7695": 224286208.0, "7700": 225334784.0, "7705": 225334784.0, "7710": 225334784.0, "7715": 224286208.0, "7720": 224286208.0, "7725": 224286208.0, "7730": 224286208.0, "7735": 224810496.0, "7740": 224286208.0, "7745": 224286208.0, "7750": 224810496.0, "7755": 224810496.0, "7760": 224810496.0, "7765": 224286208.0, "7770": 224286208.0, "7775": 224286208.0, "7780": 224810496.0, "7785": 224286208.0, "7790": 225203712.0, "7795": 224286208.0, "7800": 225334784.0, "7805": 224810496.0, "7810": 224810496.0, "7815": 224286208.0, "7820": 225334784.0, "7825": 224286208.0, "7830": 224286208.0, "7835": 224286208.0, "7840": 224286208.0, "7845": 224286208.0, "7850": 224810496.0, "7855": 224286208.0, "7860": 224810496.0, "7865": 224679424.0, "7870": 224286208.0, "7875": 224286208.0, "7880": 224286208.0, "7885": 224286208.0, "7890": 224286208.0, "7895": 224810496.0, "7900": 225334784.0, "7905": 224810496.0, "7910": 224286208.0, "7915": 224286208.0, "7920": 224286208.0, "7925": 224810496.0, "7930": 224810496.0, "7935": 224286208.0, "7940": 224286208.0, "7945": 224286208.0, "7950": 224286208.0, "7955": 224286208.0, "7960": 224286208.0, "7965": 224286208.0, "7970": 224286208.0, "7975": 224286208.0, "7980": 225334784.0, "7985": 224810496.0, "7990": 224810496.0, "7995": 224286208.0, "8000": 224286208.0, "8005": 224286208.0, "8010": 224286208.0, "8015": 224810496.0, "8020": 224286208.0, "8025": 224286208.0, "8030": 224286208.0, "8035": 224810496.0, "8040": 224286208.0, "8045": 224286208.0, "8050": 224286208.0, "8055": 224810496.0, "8060": 224286208.0, "8065": 224810496.0, "8070": 224286208.0, "8075": 224679424.0, "8080": 224810496.0, "8085": 224286208.0, "8090": 224286208.0, "8095": 224810496.0, "8100": 224286208.0, "8105": 224810496.0, "8110": 224286208.0, "8115": 224810496.0, "8120": 224810496.0, "8125": 224286208.0, "8130": 224286208.0, "8135": 224810496.0, "8140": 224810496.0, "8145": 224810496.0, "8150": 224810496.0, "8155": 224810496.0, "8160": 225334784.0, "8165": 224810496.0, "8170": 224286208.0, "8175": 224286208.0, "8180": 224810496.0, "8185": 224810496.0, "8190": 224810496.0, "8195": 224286208.0, "8200": 224810496.0, "8205": 224810496.0, "8210": 224286208.0, "8215": 225334784.0, "8220": 224810496.0, "8225": 224286208.0, "8230": 224810496.0, "8235": 224810496.0, "8240": 224810496.0, "8245": 224286208.0, "8250": 224286208.0, "8255": 224810496.0, "8260": 224810496.0, "8265": 224286208.0, "8270": 224810496.0, "8275": 224810496.0, "8280": 224286208.0, "8285": 224286208.0, "8290": 224810496.0, "8295": 224286208.0, "8300": 224286208.0, "8305": 224286208.0, "8310": 224810496.0, "8315": 225334784.0, "8320": 224286208.0, "8325": 224286208.0, "8330": 224810496.0, "8335": 224810496.0, "8340": 224810496.0, "8345": 224810496.0, "8350": 224286208.0, "8355": 224810496.0, "8360": 224286208.0, "8365": 224810496.0, "8370": 224810496.0, "8375": 224810496.0, "8380": 224810496.0, "8385": 224286208.0, "8390": 224286208.0, "8395": 224810496.0, "8400": 224286208.0, "8405": 224286208.0, "8410": 224810496.0, "8415": 224810496.0, "8420": 224810496.0, "8425": 224810496.0, "8430": 225334784.0, "8435": 224286208.0, "8440": 224286208.0, "8445": 224286208.0, "8450": 224810496.0, "8455": 224286208.0, "8460": 224810496.0, "8465": 225334784.0, "8470": 224286208.0, "8475": 224286208.0, "8480": 224286208.0, "8485": 224286208.0, "8490": 224810496.0, "8495": 224286208.0, "8500": 224286208.0, "8505": 225334784.0, "8510": 224810496.0, "8515": 224286208.0, "8520": 224810496.0, "8525": 224810496.0, "8530": 224286208.0, "8535": 224810496.0, "8540": 224286208.0, "8545": 224286208.0, "8550": 224810496.0, "8555": 224286208.0, "8560": 224810496.0, "8565": 224810496.0, "8570": 224810496.0, "8575": 225203712.0, "8580": 224810496.0, "8585": 224810496.0, "8590": 224810496.0, "8595": 224286208.0, "8600": 224810496.0, "8605": 224810496.0, "8610": 224810496.0, "8615": 224286208.0, "8620": 224810496.0, "8625": 225334784.0, "8630": 224810496.0, "8635": 224810496.0, "8640": 224810496.0, "8645": 224286208.0, "8650": 224810496.0, "8655": 224810496.0, "8660": 224810496.0, "8665": 225334784.0, "8670": 224810496.0, "8675": 225334784.0, "8680": 224810496.0, "8685": 224286208.0, "8690": 224810496.0, "8695": 224286208.0, "8700": 225334784.0, "8705": 224810496.0, "8710": 224810496.0, "8715": 224810496.0, "8720": 224810496.0, "8725": 224286208.0, "8730": 224810496.0, "8735": 224286208.0, "8740": 224286208.0, "8745": 224286208.0, "8750": 224810496.0, "8755": 224286208.0, "8760": 224286208.0, "8765": 224810496.0, "8770": 224810496.0, "8775": 224679424.0, "8780": 224286208.0, "8785": 224286208.0, "8790": 224810496.0, "8795": 224810496.0, "8800": 224810496.0, "8805": 224810496.0, "8810": 224810496.0, "8815": 224286208.0, "8820": 224286208.0, "8825": 224810496.0, "8830": 224286208.0, "8835": 225334784.0, "8840": 224286208.0, "8845": 225334784.0, "8850": 224810496.0, "8855": 224810496.0, "8860": 224286208.0, "8865": 224286208.0, "8870": 224286208.0, "8875": 224286208.0, "8880": 224810496.0, "8885": 224286208.0, "8890": 224286208.0, "8895": 224810496.0, "8900": 224286208.0, "8905": 224286208.0, "8910": 224810496.0, "8915": 224810496.0, "8920": 224286208.0, "8925": 224810496.0, "8930": 225334784.0, "8935": 224810496.0, "8940": 224810496.0, "8945": 224810496.0, "8950": 224810496.0, "8955": 224286208.0, "8960": 224810496.0, "8965": 224810496.0, "8970": 224286208.0, "8975": 224810496.0, "8980": 224286208.0, "8985": 224810496.0, "8990": 224286208.0, "8995": 225334784.0, "9000": 224810496.0, "9005": 224286208.0, "9010": 224286208.0, "9015": 225334784.0, "9020": 224286208.0, "9025": 224286208.0, "9030": 224810496.0, "9035": 224810496.0, "9040": 224286208.0, "9045": 224810496.0, "9050": 224810496.0, "9055": 224286208.0, "9060": 224286208.0, "9065": 224286208.0, "9070": 224810496.0, "9075": 224810496.0, "9080": 224286208.0, "9085": 224810496.0, "9090": 224286208.0, "9095": 224286208.0, "9100": 224810496.0, "9105": 224286208.0, "9110": 224286208.0, "9115": 224286208.0, "9120": 224286208.0, "9125": 224286208.0, "9130": 224286208.0, "9135": 224810496.0, "9140": 225334784.0, "9145": 224286208.0, "9150": 224810496.0, "9155": 224286208.0, "9160": 224286208.0, "9165": 224286208.0, "9170": 224810496.0, "9175": 224810496.0, "9180": 224286208.0, "9185": 224286208.0, "9190": 224286208.0, "9195": 225334784.0, "9200": 224286208.0, "9205": 224286208.0, "9210": 224286208.0, "9215": 224286208.0, "9220": 224810496.0, "9225": 224286208.0, "9230": 224286208.0, "9235": 224810496.0, "9240": 224286208.0, "9245": 224286208.0, "9250": 224810496.0, "9255": 224286208.0, "9260": 224286208.0, "9265": 224810496.0, "9270": 224286208.0, "9275": 225334784.0, "9280": 224810496.0, "9285": 224810496.0, "9290": 224810496.0, "9295": 224286208.0, "9300": 225334784.0, "9305": 224679424.0, "9310": 224286208.0, "9315": 224810496.0, "9320": 224286208.0, "9325": 224810496.0, "9330": 224286208.0, "9335": 224286208.0, "9340": 224286208.0, "9345": 224286208.0, "9350": 224286208.0, "9355": 224286208.0, "9360": 224286208.0, "9365": 224810496.0, "9370": 224286208.0, "9375": 224286208.0, "9380": 224286208.0, "9385": 224679424.0, "9390": 224286208.0, "9395": 224810496.0, "9400": 224286208.0, "9405": 224286208.0, "9410": 224286208.0, "9415": 224286208.0, "9420": 224286208.0, "9425": 224286208.0, "9430": 224286208.0, "9435": 224286208.0, "9440": 224286208.0, "9445": 224810496.0, "9450": 224810496.0, "9455": 224286208.0, "9460": 224810496.0, "9465": 224286208.0, "9470": 224286208.0, "9475": 224810496.0, "9480": 224286208.0, "9485": 224286208.0, "9490": 224286208.0, "9495": 224810496.0, "9500": 224286208.0, "9505": 224286208.0, "9510": 224286208.0, "9515": 224810496.0, "9520": 224286208.0, "9525": 224286208.0, "9530": 224286208.0, "9535": 224286208.0, "9540": 224286208.0, "9545": 224810496.0, "9550": 224810496.0, "9555": 224286208.0, "9560": 224286208.0, "9565": 224286208.0, "9570": 224286208.0, "9575": 224810496.0, "9580": 224286208.0, "9585": 224810496.0, "9590": 225334784.0, "9595": 224286208.0, "9600": 224810496.0, "9605": 224286208.0, "9610": 224286208.0, "9615": 224286208.0, "9620": 224286208.0, "9625": 224810496.0, "9630": 224286208.0, "9635": 224286208.0, "9640": 224286208.0, "9645": 224286208.0, "9650": 224286208.0, "9655": 224286208.0, "9660": 224286208.0, "9665": 224286208.0, "9670": 224286208.0, "9675": 224286208.0, "9680": 224286208.0, "9685": 225334784.0, "9690": 224810496.0, "9695": 224810496.0, "9700": 224286208.0, "9705": 224286208.0, "9710": 224286208.0, "9715": 224286208.0, "9720": 224810496.0, "9725": 224286208.0, "9730": 224810496.0, "9735": 224810496.0, "9740": 224286208.0, "9745": 224286208.0, "9750": 224810496.0, "9755": 224286208.0, "9760": 224286208.0, "9765": 224286208.0, "9770": 225334784.0, "9775": 224810496.0, "9780": 224286208.0, "9785": 224286208.0, "9790": 224810496.0, "9795": 224286208.0, "9800": 224286208.0, "9805": 224810496.0, "9810": 224286208.0, "9815": 224286208.0, "9820": 224810496.0, "9825": 224810496.0, "9830": 224810496.0, "9835": 224286208.0, "9840": 225334784.0, "9845": 225334784.0, "9850": 224286208.0, "9855": 224810496.0, "9860": 224286208.0, "9865": 224810496.0, "9870": 224286208.0, "9875": 224286208.0, "9880": 224286208.0, "9885": 224286208.0, "9890": 224810496.0, "9895": 224286208.0, "9900": 224286208.0, "9905": 224286208.0, "9910": 224810496.0, "9915": 224286208.0, "9920": 224286208.0, "9925": 224286208.0, "9930": 224286208.0, "9935": 224810496.0, "9940": 224286208.0, "9945": 224810496.0, "9950": 224810496.0, "9955": 224810496.0, "9960": 224286208.0, "9965": 224286208.0, "9970": 224810496.0, "9975": 224286208.0, "9980": 224286208.0, "9985": 225334784.0, "9990": 224810496.0, "9995": 224286208.0, "10000": 224810496.0, "10005": 224810496.0, "10010": 224286208.0, "10015": 224286208.0, "10020": 224286208.0, "10025": 224810496.0, "10030": 224810496.0, "10035": 224679424.0, "10040": 224810496.0, "10045": 224810496.0, "10050": 224810496.0, "10055": 225334784.0, "10060": 224286208.0, "10065": 224286208.0, "10070": 225334784.0, "10075": 224286208.0, "10080": 224810496.0, "10085": 224286208.0, "10090": 224810496.0, "10095": 224286208.0, "10100": 224810496.0, "10105": 224810496.0, "10110": 224810496.0, "10115": 224810496.0, "10120": 224810496.0, "10125": 224286208.0, "10130": 224810496.0, "10135": 224286208.0, "10140": 225334784.0, "10145": 224810496.0, "10150": 224286208.0, "10155": 224286208.0, "10160": 224286208.0, "10165": 224810496.0, "10170": 224810496.0, "10175": 224810496.0, "10180": 224810496.0, "10185": 224286208.0, "10190": 224810496.0, "10195": 224286208.0, "10200": 224810496.0, "10205": 224286208.0, "10210": 224286208.0, "10215": 224286208.0, "10220": 224810496.0, "10225": 224810496.0, "10230": 224286208.0, "10235": 224810496.0, "10240": 225334784.0, "10245": 225334784.0, "10250": 224810496.0, "10255": 224286208.0, "10260": 224286208.0, "10265": 224810496.0, "10270": 224286208.0, "10275": 224810496.0, "10280": 224286208.0, "10285": 224810496.0, "10290": 224810496.0, "10295": 224810496.0, "10300": 224286208.0, "10305": 224810496.0, "10310": 224810496.0, "10315": 224286208.0, "10320": 224810496.0, "10325": 224810496.0, "10330": 224286208.0, "10335": 224286208.0, "10340": 224286208.0, "10345": 224810496.0, "10350": 224286208.0, "10355": 224810496.0, "10360": 224286208.0, "10365": 224286208.0, "10370": 224810496.0, "10375": 224810496.0, "10380": 224286208.0, "10385": 224810496.0, "10390": 224810496.0, "10395": 224286208.0, "10400": 224286208.0, "10405": 224810496.0, "10410": 224286208.0, "10415": 224810496.0, "10420": 224810496.0, "10425": 224810496.0, "10430": 224679424.0, "10435": 224810496.0, "10440": 224286208.0, "10445": 224286208.0, "10450": 224810496.0, "10455": 224286208.0, "10460": 224810496.0, "10465": 224810496.0, "10470": 224286208.0, "10475": 224286208.0, "10480": 224810496.0, "10485": 224810496.0, "10490": 225334784.0, "10495": 224810496.0, "10500": 224286208.0, "10505": 224810496.0, "10510": 224810496.0, "10515": 224810496.0, "10520": 224286208.0, "10525": 224810496.0, "10530": 224810496.0, "10535": 224286208.0, "10540": 224286208.0, "10545": 224286208.0, "10550": 224286208.0, "10555": 224286208.0, "10560": 224286208.0, "10565": 225334784.0, "10570": 225334784.0, "10575": 224286208.0, "10580": 224286208.0, "10585": 225334784.0, "10590": 224286208.0, "10595": 224286208.0, "10600": 224286208.0, "10605": 224286208.0, "10610": 224810496.0, "10615": 224286208.0, "10620": 224286208.0, "10625": 225334784.0, "10630": 225334784.0, "10635": 224810496.0, "10640": 224286208.0, "10645": 224286208.0, "10650": 224810496.0, "10655": 225334784.0, "10660": 224286208.0, "10665": 224286208.0, "10670": 224286208.0, "10675": 224810496.0, "10680": 224810496.0, "10685": 224286208.0, "10690": 224286208.0, "10695": 224810496.0, "10700": 224810496.0, "10705": 224810496.0, "10710": 224810496.0, "10715": 224810496.0, "10720": 224286208.0, "10725": 225334784.0, "10730": 225334784.0, "10735": 224286208.0, "10740": 224286208.0, "10745": 224286208.0, "10750": 224286208.0, "10755": 224286208.0, "10760": 224810496.0, "10765": 224286208.0, "10770": 224286208.0, "10775": 224286208.0, "10780": 224286208.0, "10785": 225334784.0, "10790": 224810496.0, "10795": 225334784.0, "10800": 224286208.0, "10805": 224286208.0, "10810": 224810496.0, "10815": 224810496.0, "10820": 224810496.0, "10825": 224286208.0, "10830": 224286208.0, "10835": 224286208.0, "10840": 224286208.0, "10845": 224286208.0, "10850": 224286208.0, "10855": 224810496.0, "10860": 224286208.0, "10865": 225334784.0, "10870": 224810496.0, "10875": 224286208.0, "10880": 224810496.0, "10885": 224810496.0, "10890": 224286208.0, "10895": 224286208.0, "10900": 225334784.0, "10905": 224810496.0, "10910": 224286208.0, "10915": 224286208.0, "10920": 224286208.0, "10925": 224810496.0, "10930": 224810496.0, "10935": 224810496.0, "10940": 225334784.0, "10945": 224810496.0, "10950": 224286208.0, "10955": 224286208.0, "10960": 224810496.0, "10965": 224286208.0, "10970": 224286208.0, "10975": 224810496.0, "10980": 224286208.0, "10985": 224286208.0, "10990": 224810496.0, "10995": 224286208.0, "11000": 224286208.0, "11005": 224286208.0, "11010": 224810496.0, "11015": 224286208.0, "11020": 224810496.0, "11025": 225334784.0, "11030": 224286208.0, "11035": 224810496.0, "11040": 224810496.0, "11045": 224679424.0, "11050": 224286208.0, "11055": 224286208.0, "11060": 224286208.0, "11065": 224286208.0, "11070": 224286208.0, "11075": 224286208.0, "11080": 224810496.0, "11085": 224286208.0, "11090": 224286208.0, "11095": 224286208.0, "11100": 224286208.0, "11105": 224286208.0, "11110": 224286208.0, "11115": 224286208.0, "11120": 224810496.0, "11125": 224286208.0, "11130": 224286208.0, "11135": 224286208.0, "11140": 224679424.0, "11145": 225334784.0, "11150": 224286208.0, "11155": 224286208.0, "11160": 224286208.0, "11165": 224810496.0, "11170": 224810496.0, "11175": 224286208.0, "11180": 224810496.0, "11185": 224810496.0, "11190": 224810496.0, "11195": 224286208.0, "11200": 224810496.0, "11205": 224810496.0, "11210": 224286208.0, "11215": 224810496.0, "11220": 224810496.0, "11225": 224810496.0, "11230": 224286208.0, "11235": 224810496.0, "11240": 224286208.0, "11245": 224286208.0, "11250": 224286208.0, "11255": 224286208.0, "11260": 225334784.0, "11265": 224286208.0, "11270": 224810496.0, "11275": 224286208.0, "11280": 225334784.0, "11285": 224286208.0, "11290": 224286208.0, "11295": 224810496.0, "11300": 225334784.0, "11305": 224810496.0, "11310": 224810496.0, "11315": 224810496.0, "11320": 224810496.0, "11325": 224286208.0, "11330": 224286208.0, "11335": 224286208.0, "11340": 224810496.0, "11345": 225334784.0, "11350": 224810496.0, "11355": 224810496.0, "11360": 224286208.0, "11365": 224286208.0, "11370": 224810496.0, "11375": 224810496.0, "11380": 224286208.0, "11385": 224286208.0, "11390": 224286208.0, "11395": 224286208.0, "11400": 224286208.0, "11405": 224286208.0, "11410": 224810496.0, "11415": 224810496.0, "11420": 224286208.0, "11425": 224286208.0, "11430": 224810496.0, "11435": 224286208.0, "11440": 224810496.0, "11445": 224810496.0, "11450": 224286208.0, "11455": 224286208.0, "11460": 224286208.0, "11465": 224286208.0, "11470": 224286208.0, "11475": 224810496.0, "11480": 224286208.0, "11485": 224286208.0, "11490": 224286208.0, "11495": 224286208.0, "11500": 225334784.0, "11505": 224286208.0, "11510": 224286208.0, "11515": 224286208.0, "11520": 224810496.0, "11525": 224286208.0, "11530": 224810496.0, "11535": 224810496.0, "11540": 224286208.0, "11545": 224810496.0, "11550": 224286208.0, "11555": 224286208.0, "11560": 224286208.0, "11565": 224286208.0, "11570": 224286208.0, "11575": 224286208.0, "11580": 224286208.0, "11585": 224810496.0, "11590": 224810496.0, "11595": 225334784.0, "11600": 224810496.0, "11605": 224286208.0, "11610": 224810496.0, "11615": 224810496.0, "11620": 224810496.0, "11625": 225334784.0, "11630": 224286208.0, "11635": 224286208.0, "11640": 224810496.0, "11645": 225334784.0, "11650": 224286208.0, "11655": 224286208.0, "11660": 224286208.0, "11665": 225334784.0, "11670": 224286208.0, "11675": 224286208.0, "11680": 224286208.0, "11685": 224679424.0, "11690": 224286208.0, "11695": 224286208.0, "11700": 224286208.0, "11705": 224286208.0, "11710": 225334784.0, "11715": 225334784.0, "11720": 224810496.0, "11725": 225334784.0, "11730": 224286208.0, "11735": 224286208.0, "11740": 224286208.0, "11745": 224286208.0, "11750": 224286208.0, "11755": 224286208.0, "11760": 224286208.0, "11765": 224286208.0, "11770": 224286208.0, "11775": 224810496.0, "11780": 224810496.0, "11785": 224286208.0, "11790": 224286208.0, "11795": 224810496.0, "11800": 224810496.0, "11805": 224810496.0, "11810": 224810496.0, "11815": 224286208.0, "11820": 224286208.0, "11825": 224286208.0, "11830": 224286208.0, "11835": 224286208.0, "11840": 224286208.0, "11845": 224810496.0, "11850": 225334784.0, "11855": 224810496.0, "11860": 224286208.0, "11865": 225334784.0, "11870": 224286208.0, "11875": 224810496.0, "11880": 224810496.0, "11885": 224286208.0, "11890": 224286208.0, "11895": 224810496.0, "11900": 224286208.0, "11905": 224286208.0, "11910": 224810496.0, "11915": 225334784.0, "11920": 225334784.0, "11925": 224286208.0, "11930": 224810496.0, "11935": 224679424.0, "11940": 224286208.0, "11945": 225334784.0, "11950": 224286208.0, "11955": 224286208.0, "11960": 225334784.0, "11965": 224810496.0, "11970": 225334784.0, "11975": 224286208.0, "11980": 224286208.0, "11985": 225334784.0, "11990": 224286208.0, "11995": 224810496.0, "12000": 224810496.0, "12005": 224679424.0, "12010": 224286208.0, "12015": 224810496.0, "12020": 224286208.0, "12025": 224810496.0, "12030": 224286208.0, "12035": 224810496.0, "12040": 225334784.0, "12045": 225334784.0, "12050": 224286208.0, "12055": 224286208.0, "12060": 224810496.0, "12065": 224810496.0, "12070": 225334784.0, "12075": 224810496.0, "12080": 224286208.0, "12085": 224810496.0, "12090": 224286208.0, "12095": 224286208.0, "12100": 224286208.0, "12105": 224286208.0, "12110": 224286208.0, "12115": 225334784.0, "12120": 224286208.0, "12125": 224286208.0, "12130": 224286208.0, "12135": 224286208.0, "12140": 225334784.0, "12145": 224810496.0, "12150": 224286208.0, "12155": 224810496.0, "12160": 224286208.0, "12165": 224286208.0, "12170": 224810496.0, "12175": 225334784.0, "12180": 224810496.0, "12185": 224810496.0, "12190": 224286208.0, "12195": 224286208.0, "12200": 224810496.0, "12205": 224286208.0, "12210": 224810496.0, "12215": 224286208.0, "12220": 224810496.0, "12225": 224286208.0, "12230": 224286208.0, "12235": 224286208.0, "12240": 224286208.0, "12245": 224810496.0, "12250": 224286208.0, "12255": 224810496.0, "12260": 224286208.0, "12265": 224810496.0, "12270": 224286208.0, "12275": 224286208.0, "12280": 224810496.0, "12285": 224286208.0, "12290": 224810496.0, "12295": 224286208.0, "12300": 224286208.0, "12305": 224286208.0, "12310": 224286208.0, "12315": 224810496.0, "12320": 225334784.0, "12325": 224286208.0, "12330": 224810496.0, "12335": 225334784.0, "12340": 224810496.0, "12345": 224810496.0, "12350": 224286208.0, "12355": 224286208.0, "12360": 224286208.0, "12365": 224810496.0, "12370": 224810496.0, "12375": 224286208.0, "12380": 224286208.0, "12385": 224286208.0, "12390": 224810496.0, "12395": 225334784.0, "12400": 224286208.0, "12405": 224286208.0, "12410": 224286208.0, "12415": 224286208.0, "12420": 224810496.0, "12425": 224286208.0, "12430": 224810496.0, "12435": 224286208.0, "12440": 224810496.0, "12445": 225334784.0, "12450": 224286208.0, "12455": 224286208.0, "12460": 224810496.0, "12465": 225334784.0, "12470": 224810496.0, "12475": 224286208.0, "12480": 224810496.0, "12485": 224810496.0, "12490": 225334784.0, "12495": 224286208.0, "12500": 224810496.0, "12505": 224810496.0, "12510": 224286208.0, "12515": 224810496.0, "12520": 224810496.0, "12525": 224810496.0, "12530": 224286208.0, "12535": 224286208.0, "12540": 224810496.0, "12545": 224810496.0, "12550": 224286208.0, "12555": 224286208.0, "12560": 224810496.0, "12565": 224810496.0, "12570": 224286208.0, "12575": 225334784.0, "12580": 224810496.0, "12585": 224286208.0, "12590": 224810496.0, "12595": 224810496.0, "12600": 225334784.0, "12605": 225334784.0, "12610": 224286208.0, "12615": 225334784.0, "12620": 224286208.0, "12625": 225334784.0, "12630": 224810496.0, "12635": 224286208.0, "12640": 224286208.0, "12645": 224810496.0, "12650": 225334784.0, "12655": 225334784.0, "12660": 224286208.0, "12665": 224810496.0, "12670": 224286208.0, "12675": 225334784.0, "12680": 224810496.0, "12685": 224810496.0, "12690": 224810496.0, "12695": 224810496.0, "12700": 224286208.0, "12705": 224286208.0, "12710": 224810496.0, "12715": 224286208.0, "12720": 224286208.0, "12725": 224810496.0, "12730": 224286208.0, "12735": 224286208.0, "12740": 224810496.0, "12745": 224286208.0, "12750": 224286208.0, "12755": 224286208.0, "12760": 224286208.0, "12765": 224286208.0, "12770": 224810496.0, "12775": 224286208.0, "12780": 224286208.0, "12785": 224810496.0, "12790": 224810496.0, "12795": 224286208.0, "12800": 224286208.0, "12805": 224810496.0, "12810": 224286208.0, "12815": 224286208.0, "12820": 224286208.0, "12825": 225334784.0, "12830": 224810496.0, "12835": 224286208.0, "12840": 224810496.0, "12845": 224286208.0, "12850": 224810496.0, "12855": 224810496.0, "12860": 225334784.0, "12865": 224810496.0, "12870": 224286208.0, "12875": 224286208.0, "12880": 224810496.0, "12885": 224286208.0, "12890": 224286208.0, "12895": 224286208.0, "12900": 224286208.0, "12905": 224810496.0, "12910": 224810496.0, "12915": 224286208.0, "12920": 224810496.0, "12925": 224810496.0, "12930": 224286208.0, "12935": 224810496.0, "12940": 225334784.0, "12945": 224810496.0, "12950": 224286208.0, "12955": 224286208.0, "12960": 224286208.0, "12965": 224286208.0, "12970": 225334784.0, "12975": 224810496.0, "12980": 224810496.0, "12985": 224810496.0, "12990": 224286208.0, "12995": 224286208.0, "13000": 224286208.0, "13005": 224286208.0, "13010": 224810496.0, "13015": 224286208.0, "13020": 224286208.0, "13025": 224286208.0, "13030": 224810496.0, "13035": 224810496.0, "13040": 224810496.0, "13045": 224810496.0, "13050": 224286208.0, "13055": 224286208.0, "13060": 224810496.0, "13065": 224810496.0, "13070": 224286208.0, "13075": 224810496.0, "13080": 224286208.0, "13085": 224286208.0, "13090": 224810496.0, "13095": 224286208.0, "13100": 224286208.0, "13105": 224810496.0, "13110": 224286208.0, "13115": 224286208.0, "13120": 224810496.0, "13125": 224286208.0, "13130": 224810496.0, "13135": 224286208.0, "13140": 224810496.0, "13145": 225334784.0, "13150": 224286208.0, "13155": 224286208.0, "13160": 224286208.0, "13165": 224286208.0, "13170": 225334784.0, "13175": 224810496.0, "13180": 225334784.0, "13185": 224810496.0, "13190": 224810496.0, "13195": 224286208.0, "13200": 224810496.0, "13205": 224810496.0, "13210": 224286208.0, "13215": 224810496.0, "13220": 224286208.0, "13225": 224286208.0, "13230": 224810496.0, "13235": 224286208.0, "13240": 224286208.0, "13245": 224286208.0, "13250": 224810496.0, "13255": 225334784.0, "13260": 224810496.0, "13265": 225334784.0, "13270": 224810496.0, "13275": 224810496.0, "13280": 224286208.0, "13285": 224810496.0, "13290": 224286208.0, "13295": 224286208.0, "13300": 224286208.0, "13305": 224286208.0, "13310": 224286208.0, "13315": 224286208.0, "13320": 224286208.0, "13325": 224286208.0, "13330": 224286208.0, "13335": 224286208.0, "13340": 224810496.0, "13345": 224810496.0, "13350": 224286208.0, "13355": 224286208.0, "13360": 224810496.0, "13365": 225334784.0, "13370": 224286208.0, "13375": 224810496.0, "13380": 224810496.0, "13385": 224810496.0, "13390": 224286208.0, "13395": 224286208.0, "13400": 224810496.0, "13405": 224286208.0, "13410": 224286208.0, "13415": 224286208.0, "13420": 224286208.0, "13425": 224286208.0, "13430": 224286208.0, "13435": 224286208.0, "13440": 224810496.0, "13445": 224810496.0, "13450": 224286208.0, "13455": 224286208.0, "13460": 224810496.0, "13465": 224286208.0, "13470": 224286208.0, "13475": 224286208.0, "13480": 224286208.0, "13485": 224810496.0, "13490": 225334784.0, "13495": 224810496.0, "13500": 225334784.0, "13505": 224286208.0, "13510": 224810496.0, "13515": 224810496.0, "13520": 224286208.0, "13525": 224810496.0, "13530": 224286208.0, "13535": 224286208.0, "13540": 224286208.0, "13545": 224286208.0, "13550": 225334784.0, "13555": 224810496.0, "13560": 224286208.0, "13565": 224286208.0, "13570": 224286208.0, "13575": 224286208.0, "13580": 224286208.0, "13585": 224810496.0, "13590": 225334784.0, "13595": 224286208.0, "13600": 224810496.0, "13605": 224286208.0, "13610": 225334784.0, "13615": 224286208.0, "13620": 224286208.0, "13625": 224810496.0, "13630": 224810496.0, "13635": 224286208.0, "13640": 225334784.0, "13645": 224286208.0, "13650": 224810496.0, "13655": 225334784.0, "13660": 224810496.0, "13665": 224286208.0, "13670": 225334784.0, "13675": 224810496.0, "13680": 224286208.0, "13685": 224286208.0, "13690": 224286208.0, "13695": 225334784.0, "13700": 224286208.0, "13705": 224286208.0, "13710": 224286208.0, "13715": 224286208.0, "13720": 224286208.0, "13725": 224810496.0, "13730": 224286208.0, "13735": 224810496.0, "13740": 224286208.0, "13745": 224286208.0, "13750": 224286208.0, "13755": 224286208.0, "13760": 224286208.0, "13765": 224286208.0, "13770": 224679424.0, "13775": 224810496.0, "13780": 224810496.0, "13785": 225334784.0, "13790": 224810496.0, "13795": 224810496.0, "13800": 224286208.0, "13805": 224810496.0, "13810": 224810496.0, "13815": 224286208.0, "13820": 224810496.0, "13825": 224810496.0, "13830": 224810496.0, "13835": 224286208.0, "13840": 224810496.0, "13845": 224286208.0, "13850": 224286208.0, "13855": 224286208.0, "13860": 224286208.0, "13865": 224286208.0, "13870": 224286208.0, "13875": 224286208.0, "13880": 224286208.0, "13885": 224810496.0, "13890": 224286208.0, "13895": 224286208.0, "13900": 225334784.0, "13905": 224286208.0, "13910": 225334784.0, "13915": 224286208.0, "13920": 224810496.0, "13925": 224286208.0, "13930": 224810496.0, "13935": 224810496.0, "13940": 225334784.0, "13945": 225334784.0, "13950": 224810496.0, "13955": 224286208.0, "13960": 224286208.0, "13965": 224810496.0, "13970": 224810496.0, "13975": 224286208.0, "13980": 224810496.0, "13985": 224286208.0, "13990": 224286208.0, "13995": 224286208.0, "14000": 224810496.0, "14005": 225334784.0, "14010": 224810496.0, "14015": 224810496.0, "14020": 225334784.0, "14025": 224810496.0, "14030": 224810496.0, "14035": 224286208.0, "14040": 224810496.0, "14045": 224286208.0, "14050": 224810496.0, "14055": 224286208.0, "14060": 224810496.0, "14065": 224286208.0, "14070": 224286208.0, "14075": 224286208.0, "14080": 224286208.0, "14085": 224810496.0, "14090": 224286208.0, "14095": 224810496.0, "14100": 225334784.0, "14105": 224286208.0, "14110": 225334784.0, "14115": 225334784.0, "14120": 224810496.0, "14125": 224286208.0, "14130": 224286208.0, "14135": 224810496.0, "14140": 224810496.0, "14145": 224810496.0, "14150": 224286208.0, "14155": 224810496.0, "14160": 224286208.0, "14165": 224810496.0, "14170": 224286208.0, "14175": 224286208.0, "14180": 224286208.0, "14185": 225334784.0, "14190": 224810496.0, "14195": 224810496.0, "14200": 225334784.0, "14205": 224286208.0, "14210": 224810496.0, "14215": 224810496.0, "14220": 224286208.0, "14225": 224810496.0, "14230": 224286208.0, "14235": 224286208.0, "14240": 224286208.0, "14245": 224286208.0, "14250": 225334784.0, "14255": 224810496.0, "14260": 224286208.0, "14265": 224286208.0, "14270": 224810496.0, "14275": 224810496.0, "14280": 225334784.0, "14285": 224286208.0, "14290": 224810496.0, "14295": 224810496.0, "14300": 224286208.0, "14305": 224286208.0, "14310": 224286208.0, "14315": 224286208.0, "14320": 225334784.0, "14325": 225334784.0, "14330": 224286208.0, "14335": 224810496.0, "14340": 224810496.0, "14345": 224810496.0, "14350": 224286208.0, "14355": 224810496.0, "14360": 224286208.0, "14365": 225334784.0, "14370": 224810496.0, "14375": 224679424.0, "14380": 224286208.0, "14385": 224286208.0, "14390": 225334784.0, "14395": 224810496.0, "14400": 225334784.0, "14405": 224810496.0, "14410": 224810496.0, "14415": 224810496.0, "14420": 224286208.0, "14425": 224810496.0, "14430": 225334784.0, "14435": 224810496.0, "14440": 224286208.0, "14445": 224286208.0, "14450": 224286208.0, "14455": 224810496.0, "14460": 224286208.0, "14465": 224810496.0, "14470": 224810496.0, "14475": 224286208.0, "14480": 224810496.0, "14485": 224286208.0, "14490": 224810496.0, "14495": 224810496.0, "14500": 224810496.0, "14505": 224810496.0, "14510": 224810496.0, "14515": 224286208.0, "14520": 224810496.0, "14525": 224286208.0, "14530": 224810496.0, "14535": 224286208.0, "14540": 224810496.0, "14545": 224286208.0, "14550": 224810496.0, "14555": 224286208.0, "14560": 224286208.0, "14565": 224810496.0, "14570": 224810496.0, "14575": 225334784.0, "14580": 224286208.0, "14585": 224286208.0, "14590": 224286208.0, "14595": 224286208.0, "14600": 224286208.0, "14605": 224286208.0, "14610": 224286208.0, "14615": 224810496.0, "14620": 224810496.0, "14625": 224810496.0, "14630": 224286208.0, "14635": 224679424.0, "14640": 224286208.0, "14645": 224286208.0, "14650": 224810496.0, "14655": 224286208.0, "14660": 224286208.0, "14665": 224286208.0, "14670": 224286208.0, "14675": 224286208.0, "14680": 224286208.0, "14685": 224286208.0, "14690": 224810496.0, "14695": 224286208.0, "14700": 224286208.0, "14705": 225334784.0, "14710": 224286208.0, "14715": 224286208.0, "14720": 224286208.0, "14725": 224810496.0, "14730": 224810496.0, "14735": 224810496.0, "14740": 224286208.0, "14745": 224810496.0, "14750": 224679424.0, "14755": 224286208.0, "14760": 224810496.0, "14765": 224286208.0, "14770": 224810496.0, "14775": 224810496.0, "14780": 224286208.0, "14785": 224286208.0, "14790": 225334784.0, "14795": 224810496.0, "14800": 224286208.0, "14805": 224286208.0, "14810": 224810496.0, "14815": 224810496.0, "14820": 224286208.0, "14825": 224679424.0, "14830": 224810496.0, "14835": 224286208.0, "14840": 224810496.0, "14845": 224286208.0, "14850": 225334784.0, "14855": 224810496.0, "14860": 224810496.0, "14865": 224286208.0, "14870": 224286208.0, "14875": 224286208.0, "14880": 224286208.0, "14885": 224286208.0, "14890": 224286208.0, "14895": 224286208.0, "14900": 224286208.0, "14905": 224810496.0, "14910": 224810496.0, "14915": 224810496.0, "14920": 225334784.0, "14925": 224810496.0, "14930": 224810496.0, "14935": 224810496.0, "14940": 224286208.0, "14945": 224286208.0, "14950": 224810496.0, "14955": 224286208.0, "14960": 224286208.0, "14965": 224810496.0, "14970": 224810496.0, "14975": 225334784.0, "14980": 224810496.0, "14985": 224810496.0, "14990": 224810496.0, "14995": 224810496.0, "15000": 224286208.0, "15005": 225334784.0, "15010": 224286208.0, "15015": 224286208.0, "15020": 224286208.0, "15025": 224286208.0, "15030": 224679424.0, "15035": 224286208.0, "15040": 224810496.0, "15045": 225334784.0, "15050": 224810496.0, "15055": 224286208.0, "15060": 224286208.0, "15065": 224810496.0, "15070": 225334784.0, "15075": 224810496.0, "15080": 224286208.0, "15085": 224810496.0, "15090": 224286208.0, "15095": 224810496.0, "15100": 224810496.0, "15105": 224286208.0, "15110": 224286208.0, "15115": 224810496.0, "15120": 224286208.0, "15125": 224810496.0, "15130": 224810496.0, "15135": 224286208.0, "15140": 224286208.0, "15145": 224286208.0, "15150": 225334784.0, "15155": 224810496.0, "15160": 224810496.0, "15165": 225334784.0, "15170": 224810496.0, "15175": 224810496.0, "15180": 224286208.0, "15185": 224810496.0, "15190": 224286208.0, "15195": 224810496.0, "15200": 224286208.0, "15205": 224810496.0, "15210": 224286208.0, "15215": 224810496.0, "15220": 224810496.0, "15225": 224286208.0, "15230": 224810496.0, "15235": 224810496.0, "15240": 224286208.0, "15245": 224810496.0, "15250": 224286208.0, "15255": 224286208.0, "15260": 224810496.0, "15265": 224810496.0, "15270": 224286208.0, "15275": 224286208.0, "15280": 224810496.0, "15285": 224810496.0, "15290": 224286208.0, "15295": 224810496.0, "15300": 224810496.0, "15305": 224810496.0, "15310": 224286208.0, "15315": 225334784.0, "15320": 225334784.0, "15325": 225334784.0, "15330": 224286208.0, "15335": 225334784.0, "15340": 224810496.0, "15345": 224286208.0, "15350": 224810496.0, "15355": 224286208.0, "15360": 224286208.0, "15365": 224286208.0, "15370": 225334784.0, "15375": 224286208.0, "15380": 224286208.0, "15385": 224810496.0, "15390": 224810496.0, "15395": 224810496.0, "15400": 225334784.0, "15405": 224286208.0, "15410": 224810496.0, "15415": 224286208.0, "15420": 224286208.0, "15425": 224810496.0, "15430": 225334784.0, "15435": 224810496.0, "15440": 224286208.0, "15445": 224286208.0, "15450": 224286208.0, "15455": 224810496.0, "15460": 224286208.0, "15465": 224286208.0, "15470": 225334784.0, "15475": 224286208.0, "15480": 224286208.0, "15485": 224286208.0, "15490": 225334784.0, "15495": 224679424.0, "15500": 224286208.0, "15505": 224286208.0, "15510": 224810496.0, "15515": 224286208.0, "15520": 224286208.0, "15525": 224810496.0, "15530": 224286208.0, "15535": 224810496.0, "15540": 224286208.0, "15545": 224810496.0, "15550": 225334784.0, "15555": 224810496.0, "15560": 225334784.0, "15565": 224286208.0, "15570": 224286208.0, "15575": 224810496.0, "15580": 224810496.0, "15585": 225334784.0, "15590": 224286208.0, "15595": 224286208.0, "15600": 224286208.0, "15605": 224810496.0, "15610": 224810496.0, "15615": 224286208.0, "15620": 224810496.0, "15625": 224810496.0, "15630": 224810496.0, "15635": 224286208.0, "15640": 224286208.0, "15645": 224810496.0, "15650": 224286208.0, "15655": 224286208.0, "15660": 224286208.0, "15665": 224810496.0, "15670": 224810496.0, "15675": 224286208.0, "15680": 224286208.0, "15685": 224810496.0, "15690": 224286208.0, "15695": 224286208.0, "15700": 224810496.0, "15705": 224810496.0, "15710": 224810496.0, "15715": 224286208.0, "15720": 224810496.0, "15725": 224810496.0, "15730": 224810496.0, "15735": 224810496.0, "15740": 224810496.0, "15745": 224810496.0, "15750": 224810496.0, "15755": 224286208.0, "15760": 224286208.0, "15765": 224286208.0, "15770": 224810496.0, "15775": 225334784.0, "15780": 224286208.0, "15785": 225334784.0, "15790": 224810496.0, "15795": 224810496.0, "15800": 224810496.0, "15805": 224286208.0, "15810": 225334784.0, "15815": 224810496.0, "15820": 224810496.0, "15825": 225334784.0, "15830": 225334784.0, "15835": 224810496.0, "15840": 224810496.0, "15845": 224810496.0, "15850": 225334784.0, "15855": 224286208.0, "15860": 224286208.0, "15865": 224810496.0, "15870": 224286208.0, "15875": 225334784.0, "15880": 224810496.0, "15885": 224810496.0, "15890": 224810496.0, "15895": 224810496.0, "15900": 224810496.0, "15905": 224810496.0, "15910": 224810496.0, "15915": 224286208.0, "15920": 224286208.0, "15925": 224810496.0, "15930": 224810496.0, "15935": 224286208.0, "15940": 224810496.0, "15945": 224286208.0, "15950": 224286208.0, "15955": 225334784.0, "15960": 225334784.0, "15965": 224810496.0, "15970": 225334784.0, "15975": 224810496.0, "15980": 224810496.0, "15985": 224810496.0, "15990": 224810496.0, "15995": 224810496.0, "16000": 224286208.0, "16005": 225334784.0, "16010": 224810496.0, "16015": 224810496.0, "16020": 224286208.0, "16025": 224286208.0, "16030": 225334784.0, "16035": 224679424.0, "16040": 224286208.0, "16045": 224810496.0, "16050": 224286208.0, "16055": 224810496.0, "16060": 224286208.0, "16065": 224286208.0, "16070": 224810496.0, "16075": 224286208.0, "16080": 224286208.0, "16085": 224286208.0, "16090": 224286208.0, "16095": 225334784.0, "16100": 224286208.0, "16105": 224810496.0, "16110": 224286208.0, "16115": 224286208.0, "16120": 224810496.0, "16125": 224286208.0, "16130": 224286208.0, "16135": 224286208.0, "16140": 224810496.0, "16145": 224286208.0, "16150": 224810496.0, "16155": 224810496.0, "16160": 224810496.0, "16165": 224810496.0, "16170": 224286208.0, "16175": 224286208.0, "16180": 224286208.0, "16185": 224286208.0, "16190": 224810496.0, "16195": 225334784.0, "16200": 225334784.0, "16205": 224810496.0, "16210": 225334784.0, "16215": 224810496.0, "16220": 225334784.0, "16225": 224810496.0, "16230": 225334784.0, "16235": 224286208.0, "16240": 224286208.0, "16245": 224286208.0, "16250": 224286208.0, "16255": 224286208.0, "16260": 224286208.0, "16265": 225334784.0, "16270": 224810496.0, "16275": 224810496.0, "16280": 224286208.0, "16285": 225334784.0, "16290": 224286208.0, "16295": 224286208.0, "16300": 224286208.0, "16305": 224286208.0, "16310": 224286208.0, "16315": 224286208.0, "16320": 224286208.0, "16325": 224810496.0, "16330": 224286208.0, "16335": 224286208.0, "16340": 224810496.0, "16345": 224286208.0, "16350": 224810496.0, "16355": 224286208.0, "16360": 225334784.0, "16365": 224810496.0, "16370": 225334784.0, "16375": 224286208.0, "16380": 224286208.0, "16385": 224286208.0, "16390": 224286208.0, "16395": 224286208.0, "16400": 224810496.0, "16405": 224810496.0, "16410": 224286208.0, "16415": 224286208.0, "16420": 224810496.0, "16425": 224286208.0, "16430": 224286208.0, "16435": 224286208.0, "16440": 224286208.0, "16445": 224810496.0, "16450": 224810496.0, "16455": 224286208.0, "16460": 224286208.0, "16465": 224286208.0, "16470": 224286208.0, "16475": 224810496.0, "16480": 224286208.0, "16485": 224286208.0, "16490": 224286208.0, "16495": 224810496.0, "16500": 224286208.0, "16505": 225334784.0, "16510": 224286208.0, "16515": 224286208.0, "16520": 225334784.0, "16525": 225334784.0, "16530": 224286208.0, "16535": 224286208.0, "16540": 225334784.0, "16545": 225334784.0, "16550": 224810496.0, "16555": 224810496.0, "16560": 224286208.0, "16565": 224286208.0, "16570": 224286208.0, "16575": 224286208.0, "16580": 224810496.0, "16585": 224286208.0, "16590": 224810496.0, "16595": 225334784.0, "16600": 225334784.0, "16605": 224810496.0, "16610": 224286208.0, "16615": 224286208.0, "16620": 224286208.0, "16625": 224810496.0, "16630": 224810496.0, "16635": 224286208.0, "16640": 224286208.0, "16645": 224286208.0, "16650": 224810496.0, "16655": 225334784.0, "16660": 225203712.0, "16665": 224810496.0, "16670": 224286208.0, "16675": 224810496.0, "16680": 224286208.0, "16685": 224286208.0, "16690": 224286208.0, "16695": 224810496.0, "16700": 224810496.0, "16705": 224810496.0, "16710": 224810496.0, "16715": 225334784.0, "16720": 225334784.0, "16725": 224810496.0, "16730": 224810496.0, "16735": 224810496.0, "16740": 224810496.0, "16745": 224286208.0, "16750": 224286208.0, "16755": 224810496.0, "16760": 225334784.0, "16765": 224810496.0, "16770": 224810496.0, "16775": 224810496.0, "16780": 224286208.0, "16785": 224286208.0, "16790": 224286208.0, "16795": 224810496.0, "16800": 224810496.0, "16805": 225334784.0, "16810": 224286208.0, "16815": 225334784.0, "16820": 224810496.0, "16825": 224810496.0, "16830": 224810496.0, "16835": 224810496.0, "16840": 224286208.0, "16845": 224286208.0, "16850": 224810496.0, "16855": 224810496.0, "16860": 224810496.0, "16865": 224810496.0, "16870": 224810496.0, "16875": 224286208.0, "16880": 224810496.0, "16885": 224810496.0, "16890": 224810496.0, "16895": 224286208.0, "16900": 224810496.0, "16905": 224810496.0, "16910": 224286208.0, "16915": 224810496.0, "16920": 224810496.0, "16925": 225334784.0, "16930": 224286208.0, "16935": 225334784.0, "16940": 224286208.0, "16945": 224810496.0, "16950": 224286208.0, "16955": 224810496.0, "16960": 224286208.0, "16965": 224810496.0, "16970": 224810496.0, "16975": 224286208.0, "16980": 224286208.0, "16985": 224810496.0, "16990": 225334784.0, "16995": 224810496.0, "17000": 225334784.0, "17005": 224286208.0, "17010": 224286208.0, "17015": 224286208.0, "17020": 224286208.0, "17025": 224286208.0, "17030": 224286208.0, "17035": 224286208.0, "17040": 224810496.0, "17045": 224810496.0, "17050": 224286208.0, "17055": 224286208.0, "17060": 224286208.0, "17065": 224286208.0, "17070": 224286208.0, "17075": 225334784.0, "17080": 225334784.0, "17085": 224286208.0, "17090": 224286208.0, "17095": 224286208.0, "17100": 224286208.0, "17105": 224810496.0, "17110": 224286208.0, "17115": 224286208.0, "17120": 224286208.0, "17125": 224286208.0, "17130": 224286208.0, "17135": 224286208.0, "17140": 224286208.0, "17145": 224286208.0, "17150": 224810496.0, "17155": 224286208.0, "17160": 224810496.0, "17165": 224286208.0, "17170": 224286208.0, "17175": 224286208.0, "17180": 224286208.0, "17185": 224810496.0, "17190": 224286208.0, "17195": 225334784.0, "17200": 224286208.0, "17205": 224286208.0, "17210": 224810496.0, "17215": 224286208.0, "17220": 224810496.0, "17225": 224286208.0, "17230": 225334784.0, "17235": 224286208.0, "17240": 224286208.0, "17245": 224286208.0, "17250": 224286208.0, "17255": 225334784.0, "17260": 224286208.0, "17265": 224286208.0, "17270": 224286208.0, "17275": 224286208.0, "17280": 224286208.0, "17285": 224286208.0, "17290": 224286208.0, "17295": 224286208.0, "17300": 224286208.0, "17305": 224286208.0, "17310": 224286208.0, "17315": 224286208.0, "17320": 224286208.0, "17325": 224286208.0, "17330": 224286208.0, "17335": 225334784.0, "17340": 224810496.0, "17345": 224286208.0, "17350": 225334784.0, "17355": 224286208.0, "17360": 224286208.0, "17365": 224286208.0, "17370": 224286208.0, "17375": 224286208.0, "17380": 224286208.0, "17385": 224286208.0, "17390": 224286208.0, "17395": 224286208.0, "17400": 224810496.0, "17405": 224286208.0, "17410": 224286208.0, "17415": 224810496.0, "17420": 224810496.0, "17425": 224810496.0, "17430": 224286208.0, "17435": 224286208.0, "17440": 224286208.0, "17445": 224810496.0, "17450": 224286208.0, "17455": 224810496.0, "17460": 224810496.0, "17465": 224810496.0, "17470": 224286208.0, "17475": 224810496.0, "17480": 224286208.0, "17485": 224286208.0, "17490": 224810496.0, "17495": 224286208.0, "17500": 224286208.0, "17505": 224286208.0, "17510": 224286208.0, "17515": 224286208.0, "17520": 224810496.0, "17525": 224810496.0, "17530": 224810496.0, "17535": 224810496.0, "17540": 224286208.0, "17545": 224286208.0, "17550": 224286208.0, "17555": 224810496.0, "17560": 224679424.0, "17565": 225334784.0, "17570": 224286208.0, "17575": 224286208.0, "17580": 224286208.0, "17585": 224286208.0, "17590": 224286208.0, "17595": 224810496.0, "17600": 224286208.0, "17605": 224286208.0, "17610": 224286208.0, "17615": 224286208.0, "17620": 224810496.0, "17625": 225334784.0, "17630": 224810496.0, "17635": 224286208.0, "17640": 224286208.0, "17645": 224810496.0, "17650": 224286208.0, "17655": 224286208.0, "17660": 224286208.0, "17665": 224286208.0, "17670": 224810496.0, "17675": 225334784.0, "17680": 224286208.0, "17685": 225334784.0, "17690": 224286208.0, "17695": 225334784.0, "17700": 225334784.0, "17705": 224810496.0, "17710": 224286208.0, "17715": 224286208.0, "17720": 224810496.0, "17725": 224810496.0, "17730": 224286208.0, "17735": 225334784.0, "17740": 224810496.0, "17745": 224810496.0, "17750": 224810496.0, "17755": 224810496.0, "17760": 224286208.0, "17765": 224286208.0, "17770": 224286208.0, "17775": 224810496.0, "17780": 224286208.0, "17785": 224810496.0, "17790": 224286208.0, "17795": 224810496.0, "17800": 225334784.0, "17805": 224810496.0, "17810": 224286208.0, "17815": 225334784.0, "17820": 224286208.0, "17825": 224286208.0, "17830": 224286208.0, "17835": 224286208.0, "17840": 224286208.0, "17845": 224286208.0, "17850": 224286208.0, "17855": 224286208.0, "17860": 224286208.0, "17865": 224286208.0, "17870": 224810496.0, "17875": 224286208.0, "17880": 224286208.0, "17885": 224286208.0, "17890": 224286208.0, "17895": 224286208.0, "17900": 224286208.0, "17905": 224286208.0, "17910": 224286208.0, "17915": 224286208.0, "17920": 224286208.0, "17925": 224286208.0, "17930": 224286208.0, "17935": 224810496.0, "17940": 224810496.0, "17945": 224810496.0, "17950": 224810496.0, "17955": 224286208.0, "17960": 224286208.0, "17965": 224286208.0, "17970": 224810496.0, "17975": 224286208.0, "17980": 224286208.0, "17985": 224810496.0, "17990": 224286208.0, "17995": 224286208.0, "18000": 224286208.0, "18005": 224286208.0, "18010": 224286208.0, "18015": 224286208.0, "18020": 224286208.0, "18025": 224286208.0, "18030": 224286208.0, "18035": 224286208.0, "18040": 224286208.0, "18045": 224810496.0, "18050": 224286208.0, "18055": 224286208.0, "18060": 224286208.0, "18065": 224286208.0, "18070": 224286208.0, "18075": 224810496.0, "18080": 224286208.0, "18085": 224286208.0, "18090": 224286208.0, "18095": 224286208.0, "18100": 224286208.0, "18105": 224286208.0, "18110": 224286208.0, "18115": 224286208.0, "18120": 224810496.0, "18125": 224286208.0, "18130": 224286208.0, "18135": 224286208.0, "18140": 224286208.0, "18145": 224286208.0, "18150": 224286208.0, "18155": 224286208.0, "18160": 224286208.0, "18165": 224286208.0, "18170": 224810496.0, "18175": 224286208.0, "18180": 224286208.0, "18185": 224286208.0, "18190": 224286208.0, "18195": 224286208.0, "18200": 224286208.0, "18205": 224810496.0, "18210": 224286208.0, "18215": 224286208.0, "18220": 224286208.0, "18225": 224286208.0, "18230": 224286208.0, "18235": 224286208.0, "18240": 224286208.0, "18245": 224286208.0, "18250": 224286208.0, "18255": 224286208.0, "18260": 224286208.0, "18265": 224286208.0, "18270": 224286208.0, "18275": 224286208.0, "18280": 224286208.0, "18285": 224286208.0, "18290": 224286208.0, "18295": 224286208.0, "18300": 225334784.0, "18305": 224286208.0, "18310": 224286208.0, "18315": 224286208.0, "18320": 224286208.0, "18325": 224286208.0, "18330": 224286208.0, "18335": 224810496.0, "18340": 224286208.0, "18345": 224286208.0, "18350": 224286208.0, "18355": 224286208.0, "18360": 224679424.0, "18365": 224286208.0, "18370": 224286208.0, "18375": 224286208.0, "18380": 224286208.0, "18385": 224810496.0, "18390": 224286208.0, "18395": 225334784.0, "18400": 224286208.0, "18405": 224286208.0, "18410": 224286208.0, "18415": 224286208.0, "18420": 224286208.0, "18425": 224286208.0, "18430": 224286208.0, "18435": 224286208.0, "18440": 224286208.0, "18445": 224286208.0, "18450": 224810496.0, "18455": 225334784.0, "18460": 224286208.0, "18465": 224286208.0, "18470": 224286208.0, "18475": 224286208.0, "18480": 224286208.0, "18485": 224286208.0, "18490": 224286208.0, "18495": 224286208.0, "18500": 224286208.0, "18505": 224679424.0, "18510": 224286208.0, "18515": 225334784.0, "18520": 224286208.0, "18525": 224286208.0, "18530": 224286208.0, "18535": 224286208.0, "18540": 225334784.0, "18545": 224286208.0, "18550": 224810496.0, "18555": 224810496.0, "18560": 224286208.0, "18565": 224810496.0, "18570": 224286208.0, "18575": 224286208.0, "18580": 224286208.0, "18585": 224286208.0, "18590": 224286208.0, "18595": 224286208.0, "18600": 224810496.0, "18605": 224286208.0, "18610": 224286208.0, "18615": 224286208.0, "18620": 224286208.0, "18625": 224286208.0, "18630": 224286208.0, "18635": 224286208.0, "18640": 224286208.0, "18645": 224810496.0, "18650": 224286208.0, "18655": 224286208.0, "18660": 224810496.0, "18665": 224286208.0, "18670": 224286208.0, "18675": 224286208.0, "18680": 224286208.0, "18685": 224286208.0, "18690": 224286208.0, "18695": 224286208.0, "18700": 224286208.0, "18705": 224286208.0, "18710": 224286208.0, "18715": 224286208.0, "18720": 224286208.0, "18725": 224286208.0, "18730": 224286208.0, "18735": 224286208.0, "18740": 224286208.0, "18745": 224286208.0, "18750": 224286208.0, "18755": 224286208.0, "18760": 224286208.0, "18765": 224286208.0, "18770": 224286208.0, "18775": 224286208.0, "18780": 224286208.0, "18785": 224286208.0, "18790": 224286208.0, "18795": 224286208.0, "18800": 224286208.0, "18805": 225334784.0, "18810": 224286208.0, "18815": 224286208.0, "18820": 224286208.0, "18825": 224810496.0, "18830": 224286208.0, "18835": 224810496.0, "18840": 224286208.0, "18845": 224286208.0, "18850": 224286208.0, "18855": 224286208.0, "18860": 224286208.0, "18865": 224286208.0, "18870": 224286208.0, "18875": 224286208.0, "18880": 224286208.0, "18885": 224286208.0, "18890": 224679424.0, "18895": 224286208.0, "18900": 224286208.0, "18905": 224679424.0, "18910": 224286208.0, "18915": 224286208.0, "18920": 224286208.0, "18925": 224286208.0, "18930": 224286208.0, "18935": 224286208.0, "18940": 224286208.0, "18945": 224286208.0, "18950": 224286208.0, "18955": 224286208.0, "18960": 224286208.0, "18965": 224286208.0, "18970": 224286208.0, "18975": 224286208.0, "18980": 224286208.0, "18985": 224286208.0, "18990": 224286208.0, "18995": 224286208.0, "19000": 224286208.0, "19005": 224286208.0, "19010": 224810496.0, "19015": 224286208.0, "19020": 224810496.0, "19025": 224286208.0, "19030": 224286208.0, "19035": 225334784.0, "19040": 224286208.0, "19045": 224286208.0, "19050": 225334784.0, "19055": 224286208.0, "19060": 224286208.0, "19065": 224286208.0, "19070": 224286208.0, "19075": 225334784.0, "19080": 224810496.0, "19085": 224286208.0, "19090": 224286208.0, "19095": 224286208.0, "19100": 224286208.0, "19105": 224286208.0, "19110": 224286208.0, "19115": 224286208.0, "19120": 224286208.0, "19125": 224810496.0, "19130": 224286208.0, "19135": 224810496.0, "19140": 224286208.0, "19145": 224286208.0, "19150": 224286208.0, "19155": 224286208.0, "19160": 224286208.0, "19165": 224810496.0, "19170": 224286208.0, "19175": 224286208.0, "19180": 224286208.0, "19185": 224286208.0, "19190": 224286208.0, "19195": 224286208.0, "19200": 224810496.0, "19205": 224286208.0, "19210": 224286208.0, "19215": 225334784.0, "19220": 224286208.0, "19225": 224286208.0, "19230": 224286208.0, "19235": 224286208.0, "19240": 224286208.0, "19245": 224286208.0, "19250": 224286208.0, "19255": 224286208.0, "19260": 224286208.0, "19265": 224286208.0, "19270": 224286208.0, "19275": 224286208.0, "19280": 224286208.0, "19285": 224286208.0, "19290": 224286208.0, "19295": 224286208.0, "19300": 224286208.0, "19305": 224286208.0, "19310": 224286208.0, "19315": 224286208.0, "19320": 224286208.0, "19325": 224286208.0, "19330": 224286208.0, "19335": 224286208.0, "19340": 224286208.0, "19345": 224286208.0, "19350": 224286208.0, "19355": 224810496.0, "19360": 224810496.0, "19365": 224286208.0, "19370": 224286208.0, "19375": 224286208.0, "19380": 224286208.0, "19385": 224286208.0, "19390": 224286208.0, "19395": 224286208.0, "19400": 224286208.0, "19405": 224286208.0, "19410": 224286208.0, "19415": 224810496.0, "19420": 224286208.0, "19425": 224286208.0, "19430": 224286208.0, "19435": 224286208.0, "19440": 224286208.0, "19445": 224286208.0, "19450": 224286208.0, "19455": 224286208.0, "19460": 224286208.0, "19465": 224810496.0, "19470": 224810496.0, "19475": 224286208.0, "19480": 224286208.0, "19485": 224810496.0, "19490": 224286208.0, "19495": 224286208.0, "19500": 224810496.0, "19505": 224286208.0, "19510": 224286208.0, "19515": 224286208.0, "19520": 224286208.0, "19525": 224679424.0, "19530": 224286208.0, "19535": 224286208.0, "19540": 224286208.0, "19545": 224286208.0, "19550": 224286208.0, "19555": 224286208.0, "19560": 224286208.0, "19565": 225334784.0, "19570": 224810496.0, "19575": 224810496.0, "19580": 224286208.0, "19585": 224286208.0, "19590": 224286208.0, "19595": 224286208.0, "19600": 224810496.0, "19605": 224286208.0, "19610": 224810496.0, "19615": 224286208.0, "19620": 224286208.0, "19625": 224286208.0, "19630": 224286208.0, "19635": 224286208.0, "19640": 224286208.0, "19645": 224810496.0, "19650": 224286208.0, "19655": 224679424.0, "19660": 224286208.0, "19665": 224286208.0, "19670": 225334784.0, "19675": 224286208.0, "19680": 224286208.0, "19685": 224286208.0, "19690": 224286208.0, "19695": 225334784.0, "19700": 225334784.0, "19705": 224286208.0, "19710": 224810496.0, "19715": 224286208.0, "19720": 224286208.0, "19725": 224286208.0, "19730": 224286208.0, "19735": 224810496.0, "19740": 224286208.0, "19745": 224286208.0, "19750": 224286208.0, "19755": 224286208.0, "19760": 224286208.0, "19765": 224286208.0, "19770": 224286208.0, "19775": 224286208.0, "19780": 224286208.0, "19785": 225334784.0, "19790": 224286208.0, "19795": 224810496.0, "19800": 224286208.0, "19805": 224286208.0, "19810": 224286208.0, "19815": 224286208.0, "19820": 224810496.0, "19825": 224286208.0, "19830": 224810496.0, "19835": 224286208.0, "19840": 224286208.0, "19845": 224286208.0, "19850": 224286208.0, "19855": 224810496.0, "19860": 224286208.0, "19865": 224679424.0, "19870": 224286208.0, "19875": 224286208.0, "19880": 224286208.0, "19885": 224286208.0, "19890": 224679424.0, "19895": 224810496.0, "19900": 224286208.0, "19905": 224286208.0, "19910": 224679424.0, "19915": 224286208.0, "19920": 224286208.0, "19925": 224286208.0, "19930": 224286208.0, "19935": 224286208.0, "19940": 224286208.0, "19945": 224810496.0, "19950": 224810496.0, "19955": 224286208.0, "19960": 224286208.0, "19965": 224286208.0, "19970": 224286208.0, "19975": 224286208.0, "19980": 224286208.0, "19985": 224810496.0, "19990": 224679424.0, "19995": 224286208.0, "20000": 224810496.0}}, "num-zeros": {"start_step": 1, "end_step": 20000, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1398.0, "25": 1453.0, "30": 1525.0, "35": 1484.0, "40": 1567.0, "45": 1637.0, "50": 1613.0, "55": 1689.0, "60": 1587.0, "65": 1635.0, "70": 1667.0, "75": 1619.0, "80": 1591.0, "85": 1608.0, "90": 1435.0, "95": 1509.0, "100": 1488.0, "105": 1475.0, "110": 1490.0, "115": 1442.0, "120": 1527.0, "125": 1499.0, "130": 1458.0, "135": 1491.0, "140": 1506.0, "145": 1543.0, "150": 1472.0, "155": 1511.0, "160": 1480.0, "165": 1569.0, "170": 1430.0, "175": 1449.0, "180": 1511.0, "185": 1528.0, "190": 1509.0, "195": 1569.0, "200": 1441.0, "205": 1583.0, "210": 1447.0, "215": 1494.0, "220": 1509.0, "225": 1473.0, "230": 1473.0, "235": 1590.0, "240": 1505.0, "245": 1470.0, "250": 1531.0, "255": 1512.0, "260": 1580.0, "265": 1491.0, "270": 1524.0, "275": 1578.0, "280": 1504.0, "285": 1555.0, "290": 1629.0, "295": 1605.0, "300": 1568.0, "305": 1458.0, "310": 1518.0, "315": 1488.0, "320": 1596.0, "325": 1503.0, "330": 1502.0, "335": 1477.0, "340": 1540.0, "345": 1534.0, "350": 1526.0, "355": 1533.0, "360": 1492.0, "365": 1562.0, "370": 1532.0, "375": 1518.0, "380": 1589.0, "385": 1543.0, "390": 1438.0, "395": 1572.0, "400": 1554.0, "405": 1585.0, "410": 1563.0, "415": 1553.0, "420": 1471.0, "425": 1629.0, "430": 1491.0, "435": 1482.0, "440": 1548.0, "445": 1546.0, "450": 1513.0, "455": 1529.0, "460": 1549.0, "465": 1602.0, "470": 1468.0, "475": 1635.0, "480": 1580.0, "485": 1505.0, "490": 1560.0, "495": 1494.0, "500": 1437.0, "505": 1490.0, "510": 1557.0, "515": 1528.0, "520": 1530.0, "525": 1463.0, "530": 1528.0, "535": 1583.0, "540": 1582.0, "545": 1514.0, "550": 1634.0, "555": 1572.0, "560": 1554.0, "565": 1658.0, "570": 1692.0, "575": 1548.0, "580": 1551.0, "585": 1584.0, "590": 1513.0, "595": 1595.0, "600": 1600.0, "605": 1514.0, "610": 1507.0, "615": 1533.0, "620": 1590.0, "625": 1538.0, "630": 1599.0, "635": 1585.0, "640": 1481.0, "645": 1551.0, "650": 1570.0, "655": 1588.0, "660": 1569.0, "665": 1536.0, "670": 1568.0, "675": 1530.0, "680": 1601.0, "685": 1513.0, "690": 1495.0, "695": 1566.0, "700": 1500.0, "705": 1594.0, "710": 1493.0, "715": 1532.0, "720": 1553.0, "725": 1411.0, "730": 1501.0, "735": 1522.0, "740": 1548.0, "745": 1560.0, "750": 1596.0, "755": 1545.0, "760": 1638.0, "765": 1553.0, "770": 1567.0, "775": 1558.0, "780": 1575.0, "785": 1494.0, "790": 1650.0, "795": 1564.0, "800": 1547.0, "805": 1488.0, "810": 1584.0, "815": 1634.0, "820": 1552.0, "825": 1506.0, "830": 1612.0, "835": 1566.0, "840": 1629.0, "845": 1492.0, "850": 1536.0, "855": 1545.0, "860": 1552.0, "865": 1546.0, "870": 1487.0, "875": 1572.0, "880": 1550.0, "885": 1537.0, "890": 1589.0, "895": 1608.0, "900": 1564.0, "905": 1651.0, "910": 1562.0, "915": 1506.0, "920": 1598.0, "925": 1550.0, "930": 1550.0, "935": 1529.0, "940": 1531.0, "945": 1555.0, "950": 1538.0, "955": 1560.0, "960": 1597.0, "965": 1549.0, "970": 1505.0, "975": 1515.0, "980": 1551.0, "985": 1565.0, "990": 1521.0, "995": 1519.0, "1000": 1600.0, "1005": 1571.0, "1010": 1534.0, "1015": 1585.0, "1020": 1530.0, "1025": 1582.0, "1030": 1518.0, "1035": 1526.0, "1040": 1540.0, "1045": 1563.0, "1050": 1532.0, "1055": 1521.0, "1060": 1562.0, "1065": 1480.0, "1070": 1530.0, "1075": 1405.0, "1080": 1625.0, "1085": 1585.0, "1090": 1557.0, "1095": 1487.0, "1100": 1512.0, "1105": 1538.0, "1110": 1583.0, "1115": 1351.0, "1120": 1542.0, "1125": 1566.0, "1130": 1581.0, "1135": 1538.0, "1140": 1518.0, "1145": 1600.0, "1150": 1585.0, "1155": 1603.0, "1160": 1565.0, "1165": 1535.0, "1170": 1708.0, "1175": 1651.0, "1180": 1492.0, "1185": 1594.0, "1190": 1591.0, "1195": 1503.0, "1200": 1649.0, "1205": 1496.0, "1210": 1543.0, "1215": 1565.0, "1220": 1506.0, "1225": 1559.0, "1230": 1587.0, "1235": 1484.0, "1240": 1497.0, "1245": 1617.0, "1250": 1471.0, "1255": 1510.0, "1260": 1589.0, "1265": 1545.0, "1270": 1522.0, "1275": 1565.0, "1280": 1519.0, "1285": 1506.0, "1290": 1532.0, "1295": 1596.0, "1300": 1662.0, "1305": 1572.0, "1310": 1492.0, "1315": 1589.0, "1320": 1568.0, "1325": 1540.0, "1330": 1542.0, "1335": 1541.0, "1340": 1568.0, "1345": 1479.0, "1350": 1622.0, "1355": 1478.0, "1360": 1539.0, "1365": 1580.0, "1370": 1513.0, "1375": 1495.0, "1380": 1561.0, "1385": 1561.0, "1390": 1691.0, "1395": 1578.0, "1400": 1532.0, "1405": 1547.0, "1410": 1586.0, "1415": 1515.0, "1420": 1706.0, "1425": 1669.0, "1430": 1560.0, "1435": 1600.0, "1440": 1565.0, "1445": 1537.0, "1450": 1570.0, "1455": 1481.0, "1460": 1554.0, "1465": 1505.0, "1470": 1599.0, "1475": 1603.0, "1480": 1587.0, "1485": 1577.0, "1490": 1622.0, "1495": 1624.0, "1500": 1607.0, "1505": 1567.0, "1510": 1801.0, "1515": 1549.0, "1520": 1480.0, "1525": 1520.0, "1530": 1566.0, "1535": 1513.0, "1540": 1527.0, "1545": 1575.0, "1550": 1517.0, "1555": 1528.0, "1560": 1523.0, "1565": 1529.0, "1570": 1642.0, "1575": 1502.0, "1580": 1495.0, "1585": 1466.0, "1590": 1552.0, "1595": 1562.0, "1600": 1601.0, "1605": 1527.0, "1610": 1554.0, "1615": 1613.0, "1620": 1564.0, "1625": 1541.0, "1630": 1594.0, "1635": 1573.0, "1640": 1583.0, "1645": 1592.0, "1650": 1534.0, "1655": 1565.0, "1660": 1525.0, "1665": 1532.0, "1670": 1462.0, "1675": 1578.0, "1680": 1550.0, "1685": 1618.0, "1690": 1637.0, "1695": 1495.0, "1700": 1644.0, "1705": 1587.0, "1710": 1948.0, "1715": 1517.0, "1720": 1652.0, "1725": 1626.0, "1730": 1585.0, "1735": 1520.0, "1740": 1572.0, "1745": 1560.0, "1750": 1501.0, "1755": 1605.0, "1760": 1643.0, "1765": 1552.0, "1770": 1554.0, "1775": 1362.0, "1780": 1488.0, "1785": 1485.0, "1790": 1517.0, "1795": 1610.0, "1800": 1535.0, "1805": 1505.0, "1810": 1576.0, "1815": 1568.0, "1820": 1495.0, "1825": 1534.0, "1830": 1686.0, "1835": 1540.0, "1840": 1483.0, "1845": 1560.0, "1850": 1532.0, "1855": 1604.0, "1860": 1556.0, "1865": 1555.0, "1870": 1518.0, "1875": 1579.0, "1880": 1700.0, "1885": 1469.0, "1890": 1554.0, "1895": 1548.0, "1900": 1492.0, "1905": 1511.0, "1910": 1670.0, "1915": 1638.0, "1920": 1544.0, "1925": 1573.0, "1930": 1505.0, "1935": 1581.0, "1940": 1587.0, "1945": 1590.0, "1950": 1595.0, "1955": 1572.0, "1960": 1511.0, "1965": 1478.0, "1970": 1611.0, "1975": 1553.0, "1980": 1527.0, "1985": 1578.0, "1990": 1550.0, "1995": 1495.0, "2000": 1520.0, "2005": 1563.0, "2010": 1533.0, "2015": 1523.0, "2020": 1611.0, "2025": 1544.0, "2030": 1595.0, "2035": 1581.0, "2040": 1672.0, "2045": 1547.0, "2050": 1546.0, "2055": 1535.0, "2060": 1547.0, "2065": 1540.0, "2070": 1489.0, "2075": 1649.0, "2080": 1562.0, "2085": 1611.0, "2090": 1514.0, "2095": 1605.0, "2100": 1599.0, "2105": 1548.0, "2110": 1531.0, "2115": 1573.0, "2120": 1563.0, "2125": 1553.0, "2130": 1537.0, "2135": 1639.0, "2140": 1576.0, "2145": 1519.0, "2150": 1462.0, "2155": 1551.0, "2160": 1665.0, "2165": 1535.0, "2170": 1664.0, "2175": 1604.0, "2180": 1621.0, "2185": 1535.0, "2190": 1556.0, "2195": 1490.0, "2200": 1462.0, "2205": 1548.0, "2210": 1495.0, "2215": 1495.0, "2220": 1563.0, "2225": 1605.0, "2230": 1488.0, "2235": 1575.0, "2240": 1628.0, "2245": 1523.0, "2250": 1630.0, "2255": 1548.0, "2260": 1559.0, "2265": 1592.0, "2270": 1518.0, "2275": 1601.0, "2280": 1596.0, "2285": 1595.0, "2290": 1564.0, "2295": 1581.0, "2300": 1496.0, "2305": 1475.0, "2310": 1586.0, "2315": 1530.0, "2320": 1522.0, "2325": 1354.0, "2330": 1557.0, "2335": 1473.0, "2340": 1525.0, "2345": 1466.0, "2350": 1535.0, "2355": 1583.0, "2360": 1548.0, "2365": 1557.0, "2370": 1503.0, "2375": 1589.0, "2380": 1583.0, "2385": 1627.0, "2390": 1517.0, "2395": 1585.0, "2400": 1585.0, "2405": 1627.0, "2410": 1510.0, "2415": 1534.0, "2420": 1546.0, "2425": 1557.0, "2430": 1489.0, "2435": 1616.0, "2440": 1595.0, "2445": 1553.0, "2450": 1552.0, "2455": 1525.0, "2460": 1616.0, "2465": 1552.0, "2470": 1626.0, "2475": 1603.0, "2480": 1516.0, "2485": 1608.0, "2490": 1565.0, "2495": 1565.0, "2500": 1569.0, "2505": 1489.0, "2510": 1539.0, "2515": 1520.0, "2520": 1578.0, "2525": 1574.0, "2530": 1624.0, "2535": 1577.0, "2540": 1570.0, "2545": 1527.0, "2550": 1508.0, "2555": 1653.0, "2560": 1598.0, "2565": 1626.0, "2570": 1651.0, "2575": 1468.0, "2580": 1545.0, "2585": 1540.0, "2590": 1481.0, "2595": 1603.0, "2600": 1570.0, "2605": 1691.0, "2610": 1546.0, "2615": 1514.0, "2620": 1677.0, "2625": 1615.0, "2630": 1613.0, "2635": 1566.0, "2640": 1574.0, "2645": 1552.0, "2650": 1559.0, "2655": 1550.0, "2660": 1549.0, "2665": 1617.0, "2670": 1614.0, "2675": 1502.0, "2680": 1667.0, "2685": 1639.0, "2690": 1579.0, "2695": 1534.0, "2700": 1534.0, "2705": 1633.0, "2710": 1644.0, "2715": 1460.0, "2720": 1692.0, "2725": 1639.0, "2730": 1605.0, "2735": 1581.0, "2740": 1533.0, "2745": 1537.0, "2750": 1548.0, "2755": 1521.0, "2760": 1545.0, "2765": 1536.0, "2770": 1601.0, "2775": 1526.0, "2780": 1582.0, "2785": 1571.0, "2790": 1600.0, "2795": 1576.0, "2800": 1586.0, "2805": 1675.0, "2810": 1578.0, "2815": 1596.0, "2820": 1574.0, "2825": 1541.0, "2830": 1545.0, "2835": 1597.0, "2840": 1641.0, "2845": 1651.0, "2850": 1470.0, "2855": 1516.0, "2860": 1451.0, "2865": 1546.0, "2870": 1543.0, "2875": 1510.0, "2880": 1600.0, "2885": 1588.0, "2890": 1695.0, "2895": 1586.0, "2900": 1502.0, "2905": 1646.0, "2910": 1635.0, "2915": 1637.0, "2920": 1583.0, "2925": 1511.0, "2930": 1556.0, "2935": 1518.0, "2940": 1532.0, "2945": 1513.0, "2950": 1557.0, "2955": 1562.0, "2960": 1600.0, "2965": 1521.0, "2970": 1543.0, "2975": 1617.0, "2980": 1630.0, "2985": 1549.0, "2990": 1665.0, "2995": 1605.0, "3000": 1559.0, "3005": 1498.0, "3010": 1576.0, "3015": 1498.0, "3020": 1654.0, "3025": 1521.0, "3030": 1681.0, "3035": 1615.0, "3040": 1611.0, "3045": 1580.0, "3050": 1589.0, "3055": 1515.0, "3060": 1561.0, "3065": 1650.0, "3070": 1585.0, "3075": 1607.0, "3080": 1567.0, "3085": 1456.0, "3090": 1563.0, "3095": 1600.0, "3100": 1562.0, "3105": 1573.0, "3110": 1623.0, "3115": 1661.0, "3120": 1562.0, "3125": 1640.0, "3130": 1519.0, "3135": 1621.0, "3140": 1542.0, "3145": 1654.0, "3150": 1588.0, "3155": 1577.0, "3160": 1583.0, "3165": 1527.0, "3170": 1532.0, "3175": 1715.0, "3180": 1563.0, "3185": 1606.0, "3190": 1508.0, "3195": 1698.0, "3200": 1641.0, "3205": 1568.0, "3210": 1562.0, "3215": 1589.0, "3220": 1582.0, "3225": 1585.0, "3230": 1617.0, "3235": 1569.0, "3240": 1510.0, "3245": 1631.0, "3250": 1656.0, "3255": 1543.0, "3260": 1587.0, "3265": 1578.0, "3270": 1578.0, "3275": 1589.0, "3280": 1549.0, "3285": 1535.0, "3290": 1601.0, "3295": 1497.0, "3300": 1616.0, "3305": 1526.0, "3310": 1539.0, "3315": 1551.0, "3320": 1532.0, "3325": 1591.0, "3330": 1602.0, "3335": 1577.0, "3340": 1494.0, "3345": 1557.0, "3350": 1655.0, "3355": 1716.0, "3360": 1578.0, "3365": 1718.0, "3370": 1605.0, "3375": 1653.0, "3380": 1605.0, "3385": 1587.0, "3390": 1553.0, "3395": 1580.0, "3400": 1503.0, "3405": 1506.0, "3410": 1555.0, "3415": 1489.0, "3420": 1552.0, "3425": 1536.0, "3430": 1619.0, "3435": 1543.0, "3440": 1563.0, "3445": 1602.0, "3450": 1578.0, "3455": 1581.0, "3460": 1579.0, "3465": 1632.0, "3470": 1660.0, "3475": 1567.0, "3480": 1683.0, "3485": 1651.0, "3490": 1509.0, "3495": 1578.0, "3500": 1584.0, "3505": 1603.0, "3510": 1578.0, "3515": 1490.0, "3520": 1535.0, "3525": 1593.0, "3530": 1636.0, "3535": 1637.0, "3540": 1571.0, "3545": 1660.0, "3550": 1533.0, "3555": 1623.0, "3560": 1551.0, "3565": 1547.0, "3570": 1538.0, "3575": 1592.0, "3580": 1549.0, "3585": 1616.0, "3590": 1619.0, "3595": 1604.0, "3600": 1726.0, "3605": 1708.0, "3610": 1621.0, "3615": 1640.0, "3620": 1547.0, "3625": 1598.0, "3630": 1595.0, "3635": 1655.0, "3640": 1612.0, "3645": 1631.0, "3650": 1670.0, "3655": 1491.0, "3660": 1536.0, "3665": 1542.0, "3670": 1605.0, "3675": 1693.0, "3680": 1641.0, "3685": 1551.0, "3690": 1543.0, "3695": 1553.0, "3700": 1562.0, "3705": 1615.0, "3710": 1605.0, "3715": 1629.0, "3720": 1572.0, "3725": 1556.0, "3730": 1574.0, "3735": 1573.0, "3740": 1561.0, "3745": 1569.0, "3750": 1555.0, "3755": 1501.0, "3760": 1555.0, "3765": 1558.0, "3770": 1555.0, "3775": 1558.0, "3780": 1602.0, "3785": 1481.0, "3790": 1547.0, "3795": 1548.0, "3800": 1576.0, "3805": 1643.0, "3810": 1639.0, "3815": 1624.0, "3820": 1583.0, "3825": 1624.0, "3830": 1601.0, "3835": 1506.0, "3840": 1621.0, "3845": 1624.0, "3850": 1658.0, "3855": 1679.0, "3860": 1536.0, "3865": 1733.0, "3870": 1592.0, "3875": 1645.0, "3880": 1566.0, "3885": 1539.0, "3890": 1576.0, "3895": 1577.0, "3900": 1571.0, "3905": 1531.0, "3910": 1590.0, "3915": 1575.0, "3920": 1587.0, "3925": 1567.0, "3930": 1505.0, "3935": 1521.0, "3940": 1652.0, "3945": 1638.0, "3950": 1592.0, "3955": 1512.0, "3960": 1538.0, "3965": 1548.0, "3970": 1635.0, "3975": 1633.0, "3980": 1553.0, "3985": 1638.0, "3990": 1549.0, "3995": 1639.0, "4000": 1617.0, "4005": 1599.0, "4010": 1595.0, "4015": 1537.0, "4020": 1546.0, "4025": 1608.0, "4030": 1649.0, "4035": 1629.0, "4040": 1566.0, "4045": 1609.0, "4050": 1529.0, "4055": 1483.0, "4060": 1623.0, "4065": 1596.0, "4070": 1667.0, "4075": 1602.0, "4080": 1494.0, "4085": 1535.0, "4090": 1604.0, "4095": 1572.0, "4100": 1542.0, "4105": 1669.0, "4110": 1694.0, "4115": 1582.0, "4120": 1591.0, "4125": 1615.0, "4130": 1486.0, "4135": 1635.0, "4140": 1593.0, "4145": 1570.0, "4150": 1631.0, "4155": 1579.0, "4160": 1662.0, "4165": 1562.0, "4170": 1578.0, "4175": 1524.0, "4180": 1578.0, "4185": 1586.0, "4190": 1665.0, "4195": 1622.0, "4200": 1641.0, "4205": 1679.0, "4210": 1694.0, "4215": 1698.0, "4220": 1529.0, "4225": 1528.0, "4230": 1552.0, "4235": 1660.0, "4240": 1612.0, "4245": 1609.0, "4250": 1639.0, "4255": 1600.0, "4260": 1602.0, "4265": "nan", "4270": 1584.0, "4275": 1602.0, "4280": 1569.0, "4285": 1626.0, "4290": 1560.0, "4295": 1511.0, "4300": 1595.0, "4305": 1608.0, "4310": 1671.0, "4315": 1715.0, "4320": 1609.0, "4325": 1606.0, "4330": 1704.0, "4335": 1561.0, "4340": 1598.0, "4345": 1608.0, "4350": 1612.0, "4355": 1617.0, "4360": 1627.0, "4365": 1593.0, "4370": 1607.0, "4375": 1610.0, "4380": 1612.0, "4385": 1643.0, "4390": 1598.0, "4395": 1674.0, "4400": 1565.0, "4405": 1523.0, "4410": 1686.0, "4415": 1572.0, "4420": 1599.0, "4425": 1592.0, "4430": 1593.0, "4435": 1585.0, "4440": 1542.0, "4445": 1538.0, "4450": 1550.0, "4455": 1626.0, "4460": 1628.0, "4465": 1563.0, "4470": 1624.0, "4475": 1542.0, "4480": 1556.0, "4485": 1695.0, "4490": 1622.0, "4495": 1636.0, "4500": 1673.0, "4505": 1644.0, "4510": 1749.0, "4515": 1672.0, "4520": 1561.0, "4525": 1560.0, "4530": 1629.0, "4535": 1532.0, "4540": 1680.0, "4545": 1596.0, "4550": 1622.0, "4555": 1697.0, "4560": 1532.0, "4565": 1520.0, "4570": 1541.0, "4575": 1600.0, "4580": 1511.0, "4585": 1633.0, "4590": 1601.0, "4595": 1542.0, "4600": 1581.0, "4605": 1613.0, "4610": 1643.0, "4615": 1607.0, "4620": 1628.0, "4625": 1582.0, "4630": 1617.0, "4635": 1645.0, "4640": 1598.0, "4645": 1664.0, "4650": 1615.0, "4655": 1669.0, "4660": 1626.0, "4665": 1585.0, "4670": 1610.0, "4675": 1528.0, "4680": 1555.0, "4685": 1576.0, "4690": 1757.0, "4695": 1537.0, "4700": 1658.0, "4705": 1643.0, "4710": 1677.0, "4715": 1582.0, "4720": 1563.0, "4725": 1691.0, "4730": 1641.0, "4735": 1628.0, "4740": 1628.0, "4745": 1541.0, "4750": 1563.0, "4755": 1568.0, "4760": 1554.0, "4765": 1584.0, "4770": 1600.0, "4775": 1563.0, "4780": 1655.0, "4785": 1583.0, "4790": 1597.0, "4795": 1767.0, "4800": 1633.0, "4805": 1570.0, "4810": 1554.0, "4815": 1670.0, "4820": 1548.0, "4825": 1527.0, "4830": 1583.0, "4835": 1527.0, "4840": 1598.0, "4845": 1622.0, "4850": 1704.0, "4855": 1655.0, "4860": 1564.0, "4865": 1623.0, "4870": 1546.0, "4875": 1581.0, "4880": 1564.0, "4885": 1584.0, "4890": 1655.0, "4895": 1624.0, "4900": 1618.0, "4905": 1643.0, "4910": 1608.0, "4915": 1619.0, "4920": 1729.0, "4925": 1689.0, "4930": 1576.0, "4935": 1647.0, "4940": 1616.0, "4945": 1668.0, "4950": 1535.0, "4955": 1665.0, "4960": 1832.0, "4965": 1543.0, "4970": 1585.0, "4975": 1602.0, "4980": 1622.0, "4985": 1563.0, "4990": 1976.0, "4995": 1669.0, "5000": 1652.0, "5005": 1652.0, "5010": 1594.0, "5015": 1585.0, "5020": 1643.0, "5025": 1601.0, "5030": 1582.0, "5035": 1616.0, "5040": 1635.0, "5045": 1671.0, "5050": 1661.0, "5055": 1608.0, "5060": 1644.0, "5065": 1601.0, "5070": 1643.0, "5075": 1625.0, "5080": 1634.0, "5085": 1656.0, "5090": 1546.0, "5095": 1599.0, "5100": 1582.0, "5105": 1607.0, "5110": 1560.0, "5115": 1587.0, "5120": 1582.0, "5125": 1642.0, "5130": 1626.0, "5135": 1606.0, "5140": 1613.0, "5145": 1521.0, "5150": 1695.0, "5155": 1571.0, "5160": 1678.0, "5165": 1679.0, "5170": 1623.0, "5175": 1671.0, "5180": 1668.0, "5185": 1633.0, "5190": 1696.0, "5195": 1686.0, "5200": 1712.0, "5205": 1525.0, "5210": 1526.0, "5215": 1598.0, "5220": 1521.0, "5225": 1683.0, "5230": 1579.0, "5235": 1652.0, "5240": 1631.0, "5245": 1611.0, "5250": 1669.0, "5255": 1631.0, "5260": 1611.0, "5265": 1670.0, "5270": 1750.0, "5275": 1572.0, "5280": 1507.0, "5285": 1613.0, "5290": 1607.0, "5295": 1592.0, "5300": 1687.0, "5305": 1587.0, "5310": 1704.0, "5315": 1603.0, "5320": 1523.0, "5325": 1642.0, "5330": 1561.0, "5335": 1554.0, "5340": 1591.0, "5345": 1609.0, "5350": 1626.0, "5355": 1619.0, "5360": 1648.0, "5365": 1634.0, "5370": 1615.0, "5375": 1625.0, "5380": 1600.0, "5385": 1653.0, "5390": 1670.0, "5395": 1619.0, "5400": 1652.0, "5405": 1648.0, "5410": 1585.0, "5415": 1627.0, "5420": 1666.0, "5425": 1661.0, "5430": 1566.0, "5435": 1696.0, "5440": 1678.0, "5445": 1606.0, "5450": 1604.0, "5455": 1643.0, "5460": 1709.0, "5465": 1628.0, "5470": 1614.0, "5475": 1669.0, "5480": 1556.0, "5485": 1677.0, "5490": 1664.0, "5495": 1577.0, "5500": 1658.0, "5505": 1556.0, "5510": 1606.0, "5515": 1641.0, "5520": 1577.0, "5525": 1565.0, "5530": 1642.0, "5535": 1623.0, "5540": 1604.0, "5545": 1608.0, "5550": 1535.0, "5555": 1637.0, "5560": 1621.0, "5565": 1525.0, "5570": 1630.0, "5575": 1639.0, "5580": 1610.0, "5585": 1615.0, "5590": 1620.0, "5595": 1565.0, "5600": 1606.0, "5605": 1668.0, "5610": 1657.0, "5615": 1649.0, "5620": 1656.0, "5625": 1574.0, "5630": 1580.0, "5635": 1622.0, "5640": 1679.0, "5645": 1640.0, "5650": 1681.0, "5655": 1620.0, "5660": 1693.0, "5665": 1681.0, "5670": 1619.0, "5675": 1753.0, "5680": 1628.0, "5685": 1623.0, "5690": 1545.0, "5695": 1622.0, "5700": 1622.0, "5705": 1737.0, "5710": 1639.0, "5715": 1605.0, "5720": 1545.0, "5725": 1658.0, "5730": 1703.0, "5735": 1569.0, "5740": 1624.0, "5745": 1635.0, "5750": 1607.0, "5755": 1621.0, "5760": 1628.0, "5765": 1663.0, "5770": 1512.0, "5775": 1604.0, "5780": 1664.0, "5785": 1658.0, "5790": 1773.0, "5795": 1625.0, "5800": 1604.0, "5805": 1767.0, "5810": 1641.0, "5815": 1681.0, "5820": 1620.0, "5825": 1656.0, "5830": 1657.0, "5835": 1617.0, "5840": 1637.0, "5845": 1673.0, "5850": 1639.0, "5855": 1657.0, "5860": 1736.0, "5865": 1620.0, "5870": 1588.0, "5875": 1600.0, "5880": 1664.0, "5885": 1657.0, "5890": 1610.0, "5895": 1665.0, "5900": 1534.0, "5905": 1605.0, "5910": 1678.0, "5915": 1639.0, "5920": 1666.0, "5925": 1567.0, "5930": 1542.0, "5935": 1679.0, "5940": 1544.0, "5945": 1646.0, "5950": 1741.0, "5955": 1623.0, "5960": 1681.0, "5965": 1626.0, "5970": 1671.0, "5975": 1659.0, "5980": 1668.0, "5985": 1639.0, "5990": 1684.0, "5995": 1676.0, "6000": 1559.0, "6005": 1662.0, "6010": 1810.0, "6015": 1586.0, "6020": 1661.0, "6025": 1617.0, "6030": 1613.0, "6035": 1657.0, "6040": 1632.0, "6045": 1671.0, "6050": 1627.0, "6055": 1582.0, "6060": 1552.0, "6065": 1597.0, "6070": 1583.0, "6075": 1674.0, "6080": 1549.0, "6085": 1569.0, "6090": 1554.0, "6095": 1579.0, "6100": 1628.0, "6105": 1609.0, "6110": 1611.0, "6115": 1618.0, "6120": 1624.0, "6125": 1724.0, "6130": 1556.0, "6135": 1623.0, "6140": 1608.0, "6145": 1632.0, "6150": 1741.0, "6155": 1605.0, "6160": 1639.0, "6165": 1618.0, "6170": 1655.0, "6175": 1687.0, "6180": 1592.0, "6185": 1635.0, "6190": 1638.0, "6195": 1644.0, "6200": 1657.0, "6205": 1628.0, "6210": 1556.0, "6215": 1581.0, "6220": 1608.0, "6225": 1635.0, "6230": 1620.0, "6235": 1700.0, "6240": 1655.0, "6245": 1608.0, "6250": 1716.0, "6255": 1552.0, "6260": 1650.0, "6265": 1539.0, "6270": 1675.0, "6275": 1613.0, "6280": 1640.0, "6285": 1659.0, "6290": 1578.0, "6295": 1586.0, "6300": "nan", "6305": 1566.0, "6310": 1645.0, "6315": 1508.0, "6320": 1758.0, "6325": 1673.0, "6330": 1594.0, "6335": 1622.0, "6340": 1696.0, "6345": 1604.0, "6350": 1608.0, "6355": 1698.0, "6360": 1619.0, "6365": 1627.0, "6370": 1705.0, "6375": 1660.0, "6380": 1653.0, "6385": 1667.0, "6390": 1675.0, "6395": 1779.0, "6400": 1595.0, "6405": 1702.0, "6410": 1726.0, "6415": 1647.0, "6420": 1634.0, "6425": 1725.0, "6430": 1566.0, "6435": 1614.0, "6440": 1569.0, "6445": 1637.0, "6450": 1642.0, "6455": 1655.0, "6460": 1654.0, "6465": 1695.0, "6470": 1697.0, "6475": 1661.0, "6480": 1676.0, "6485": 1616.0, "6490": 1716.0, "6495": 1542.0, "6500": 1734.0, "6505": 1622.0, "6510": 1635.0, "6515": 1568.0, "6520": 1632.0, "6525": 1599.0, "6530": 1627.0, "6535": 1562.0, "6540": 1651.0, "6545": 1593.0, "6550": 1723.0, "6555": 1683.0, "6560": 1847.0, "6565": 1631.0, "6570": 1713.0, "6575": 1610.0, "6580": 1653.0, "6585": 1668.0, "6590": 1644.0, "6595": 1694.0, "6600": 1613.0, "6605": 1553.0, "6610": 1701.0, "6615": 1516.0, "6620": 1607.0, "6625": 1646.0, "6630": 1591.0, "6635": 1694.0, "6640": 1631.0, "6645": 1703.0, "6650": 1720.0, "6655": 1625.0, "6660": 1650.0, "6665": 1674.0, "6670": 1670.0, "6675": 1646.0, "6680": 1679.0, "6685": 1638.0, "6690": 1667.0, "6695": 1672.0, "6700": 1703.0, "6705": 1656.0, "6710": 1715.0, "6715": 1688.0, "6720": 1695.0, "6725": 1725.0, "6730": 1571.0, "6735": 1720.0, "6740": 1542.0, "6745": 1709.0, "6750": 1688.0, "6755": 1628.0, "6760": 1621.0, "6765": 1672.0, "6770": 1600.0, "6775": 1521.0, "6780": 1583.0, "6785": 1561.0, "6790": 1634.0, "6795": 1615.0, "6800": 1562.0, "6805": 1645.0, "6810": 1663.0, "6815": 1688.0, "6820": 1620.0, "6825": 1655.0, "6830": 1633.0, "6835": 1599.0, "6840": 1558.0, "6845": 1667.0, "6850": 1633.0, "6855": 1643.0, "6860": 1741.0, "6865": 1682.0, "6870": 1787.0, "6875": 1583.0, "6880": 1721.0, "6885": 1672.0, "6890": 1693.0, "6895": 1694.0, "6900": 1671.0, "6905": 1661.0, "6910": 1664.0, "6915": 1587.0, "6920": 1665.0, "6925": 1724.0, "6930": 1736.0, "6935": 1644.0, "6940": 1661.0, "6945": 1685.0, "6950": 1686.0, "6955": 1624.0, "6960": 1667.0, "6965": 1675.0, "6970": 1550.0, "6975": 1748.0, "6980": 1663.0, "6985": 1634.0, "6990": 1623.0, "6995": 1662.0, "7000": 1699.0, "7005": 1692.0, "7010": 1654.0, "7015": 1705.0, "7020": 1659.0, "7025": 1630.0, "7030": 1714.0, "7035": 1596.0, "7040": 1637.0, "7045": 1682.0, "7050": 1631.0, "7055": 1668.0, "7060": 1689.0, "7065": 1640.0, "7070": 1729.0, "7075": 1752.0, "7080": 1720.0, "7085": 1719.0, "7090": 1645.0, "7095": 1588.0, "7100": 1549.0, "7105": 1650.0, "7110": 1717.0, "7115": 1688.0, "7120": 1619.0, "7125": 1570.0, "7130": 1706.0, "7135": 1712.0, "7140": 1690.0, "7145": 1640.0, "7150": 1712.0, "7155": 1619.0, "7160": 1654.0, "7165": 1643.0, "7170": 1545.0, "7175": "nan", "7180": 1637.0, "7185": 1773.0, "7190": 1704.0, "7195": 2029.0, "7200": 1826.0, "7205": 1829.0, "7210": 1612.0, "7215": 1811.0, "7220": 1722.0, "7225": 1824.0, "7230": 1807.0, "7235": 1732.0, "7240": 1695.0, "7245": 1824.0, "7250": 1829.0, "7255": 1803.0, "7260": 1769.0, "7265": 1788.0, "7270": 1844.0, "7275": 1644.0, "7280": 1774.0, "7285": 1668.0, "7290": 1954.0, "7295": 1794.0, "7300": 1764.0, "7305": 1851.0, "7310": 1624.0, "7315": 1773.0, "7320": 1792.0, "7325": 1764.0, "7330": 1681.0, "7335": 1710.0, "7340": 1834.0, "7345": 1847.0, "7350": 1818.0, "7355": 1736.0, "7360": 1767.0, "7365": 1829.0, "7370": 1757.0, "7375": 1808.0, "7380": 1950.0, "7385": 1718.0, "7390": 1854.0, "7395": 1933.0, "7400": 1798.0, "7405": 1744.0, "7410": 1758.0, "7415": 1723.0, "7420": 1792.0, "7425": 1894.0, "7430": 1697.0, "7435": 1816.0, "7440": 1864.0, "7445": 1743.0, "7450": 1783.0, "7455": 1934.0, "7460": 1743.0, "7465": 1665.0, "7470": 1809.0, "7475": 1686.0, "7480": 1728.0, "7485": 1787.0, "7490": 1705.0, "7495": 1665.0, "7500": 1803.0, "7505": 1816.0, "7510": 1887.0, "7515": 1697.0, "7520": 1707.0, "7525": 1825.0, "7530": 1772.0, "7535": 1779.0, "7540": 1819.0, "7545": 1716.0, "7550": 1771.0, "7555": 1792.0, "7560": 1658.0, "7565": 1795.0, "7570": 1648.0, "7575": 1801.0, "7580": 1763.0, "7585": 1858.0, "7590": 1846.0, "7595": 1779.0, "7600": 1766.0, "7605": 1496.0, "7610": 1759.0, "7615": 1821.0, "7620": 1801.0, "7625": 1737.0, "7630": 1838.0, "7635": 1746.0, "7640": 1767.0, "7645": 2283.0, "7650": 1754.0, "7655": 1746.0, "7660": 1815.0, "7665": 1825.0, "7670": 1738.0, "7675": 1719.0, "7680": 1806.0, "7685": 1853.0, "7690": 1779.0, "7695": 1733.0, "7700": 1818.0, "7705": 1804.0, "7710": 1860.0, "7715": 1859.0, "7720": 1897.0, "7725": 1764.0, "7730": 1894.0, "7735": 1809.0, "7740": 1859.0, "7745": 1808.0, "7750": 1794.0, "7755": 1873.0, "7760": 1871.0, "7765": 1879.0, "7770": 1697.0, "7775": 1832.0, "7780": 1742.0, "7785": 1779.0, "7790": 1760.0, "7795": 1836.0, "7800": 1941.0, "7805": 1898.0, "7810": 1637.0, "7815": 1783.0, "7820": 1815.0, "7825": 1755.0, "7830": 1733.0, "7835": 1782.0, "7840": 1835.0, "7845": 1767.0, "7850": 1754.0, "7855": 1658.0, "7860": 1868.0, "7865": 1741.0, "7870": 1862.0, "7875": 1747.0, "7880": 1903.0, "7885": 2026.0, "7890": 1873.0, "7895": 1893.0, "7900": 1732.0, "7905": 2023.0, "7910": 1803.0, "7915": 1759.0, "7920": 1772.0, "7925": 1662.0, "7930": 1877.0, "7935": 1771.0, "7940": 1831.0, "7945": 1951.0, "7950": 1821.0, "7955": 1864.0, "7960": 1772.0, "7965": 1767.0, "7970": 1723.0, "7975": 1794.0, "7980": 1820.0, "7985": 1910.0, "7990": 1796.0, "7995": 1705.0, "8000": 1943.0, "8005": 1977.0, "8010": 1732.0, "8015": 1706.0, "8020": 1796.0, "8025": 1981.0, "8030": 1696.0, "8035": 1795.0, "8040": 1850.0, "8045": 1901.0, "8050": 1750.0, "8055": 1933.0, "8060": 1776.0, "8065": 1547.0, "8070": 1710.0, "8075": 1873.0, "8080": 1874.0, "8085": 1922.0, "8090": 1797.0, "8095": 1838.0, "8100": 1813.0, "8105": 1757.0, "8110": 1750.0, "8115": 1900.0, "8120": 1795.0, "8125": 1663.0, "8130": 1871.0, "8135": 1764.0, "8140": 1801.0, "8145": 1975.0, "8150": 1815.0, "8155": 1834.0, "8160": 1684.0, "8165": 1934.0, "8170": 1868.0, "8175": 1844.0, "8180": 1734.0, "8185": 1622.0, "8190": 1702.0, "8195": 1668.0, "8200": 1713.0, "8205": 1715.0, "8210": 1694.0, "8215": 1669.0, "8220": 1757.0, "8225": 1655.0, "8230": 1838.0, "8235": 1716.0, "8240": 1594.0, "8245": 1633.0, "8250": 1707.0, "8255": 1614.0, "8260": 1791.0, "8265": 1680.0, "8270": 1734.0, "8275": 1711.0, "8280": 1642.0, "8285": 1641.0, "8290": 1733.0, "8295": 1686.0, "8300": 1673.0, "8305": 1586.0, "8310": 1742.0, "8315": 1533.0, "8320": 1679.0, "8325": 1642.0, "8330": 1647.0, "8335": 1644.0, "8340": 1738.0, "8345": 1660.0, "8350": 1700.0, "8355": 1665.0, "8360": 1598.0, "8365": 1648.0, "8370": 1747.0, "8375": 1620.0, "8380": 1677.0, "8385": 1654.0, "8390": 1742.0, "8395": 1704.0, "8400": 1710.0, "8405": 1593.0, "8410": 1627.0, "8415": 1715.0, "8420": 1676.0, "8425": 1658.0, "8430": 1694.0, "8435": 1641.0, "8440": 1685.0, "8445": 1641.0, "8450": 1718.0, "8455": 1697.0, "8460": 1642.0, "8465": 1660.0, "8470": 1613.0, "8475": 1779.0, "8480": 1671.0, "8485": 1846.0, "8490": 1774.0, "8495": 1619.0, "8500": 1740.0, "8505": 1662.0, "8510": 1652.0, "8515": 1735.0, "8520": 1719.0, "8525": 1636.0, "8530": 1726.0, "8535": 1683.0, "8540": 1663.0, "8545": 1694.0, "8550": 1943.0, "8555": 1692.0, "8560": 1735.0, "8565": 1788.0, "8570": 1697.0, "8575": 1841.0, "8580": 1682.0, "8585": 1765.0, "8590": 1798.0, "8595": 1695.0, "8600": 1806.0, "8605": 1682.0, "8610": 1643.0, "8615": 1680.0, "8620": 1736.0, "8625": 1711.0, "8630": 1728.0, "8635": 1714.0, "8640": 1702.0, "8645": 1613.0, "8650": 1676.0, "8655": 1671.0, "8660": 1599.0, "8665": 1662.0, "8670": 1684.0, "8675": 1752.0, "8680": 1806.0, "8685": 1682.0, "8690": 1732.0, "8695": 1668.0, "8700": 1696.0, "8705": 1745.0, "8710": 1741.0, "8715": 1748.0, "8720": 1606.0, "8725": 1641.0, "8730": 1652.0, "8735": 1732.0, "8740": 1690.0, "8745": 1727.0, "8750": 1686.0, "8755": 1666.0, "8760": 1728.0, "8765": 1798.0, "8770": 1661.0, "8775": 1706.0, "8780": 1699.0, "8785": 1661.0, "8790": 1702.0, "8795": 1636.0, "8800": 1694.0, "8805": 1786.0, "8810": 1792.0, "8815": 1817.0, "8820": 1627.0, "8825": 1699.0, "8830": 1591.0, "8835": 1626.0, "8840": 1826.0, "8845": 1817.0, "8850": 1690.0, "8855": 1559.0, "8860": 1776.0, "8865": 1631.0, "8870": 1687.0, "8875": 1650.0, "8880": 1606.0, "8885": 1711.0, "8890": 1591.0, "8895": 1676.0, "8900": 1644.0, "8905": 1694.0, "8910": 1711.0, "8915": 1612.0, "8920": 1585.0, "8925": 1743.0, "8930": 1731.0, "8935": 1703.0, "8940": 1717.0, "8945": 1872.0, "8950": 1682.0, "8955": 1656.0, "8960": 1524.0, "8965": 1636.0, "8970": 1665.0, "8975": 1715.0, "8980": 1731.0, "8985": 1721.0, "8990": 1645.0, "8995": 1678.0, "9000": 1699.0, "9005": 1724.0, "9010": 1701.0, "9015": 1678.0, "9020": 1660.0, "9025": 1720.0, "9030": 1732.0, "9035": 1777.0, "9040": 1727.0, "9045": 1749.0, "9050": 1652.0, "9055": 1591.0, "9060": 1692.0, "9065": 1653.0, "9070": 1681.0, "9075": 1620.0, "9080": 1652.0, "9085": 1659.0, "9090": 1740.0, "9095": 1691.0, "9100": 1678.0, "9105": 1643.0, "9110": 1683.0, "9115": 1763.0, "9120": 1722.0, "9125": 1743.0, "9130": 1737.0, "9135": 1762.0, "9140": 1694.0, "9145": 1667.0, "9150": 1716.0, "9155": 1729.0, "9160": 1606.0, "9165": 2067.0, "9170": 1611.0, "9175": 1687.0, "9180": 1545.0, "9185": 1661.0, "9190": 1645.0, "9195": 1563.0, "9200": 1629.0, "9205": 1765.0, "9210": 1655.0, "9215": 1786.0, "9220": 1710.0, "9225": 1750.0, "9230": 1684.0, "9235": 1594.0, "9240": 1774.0, "9245": 1738.0, "9250": 1743.0, "9255": 1809.0, "9260": 1703.0, "9265": 1782.0, "9270": 1656.0, "9275": 1723.0, "9280": 1662.0, "9285": 1793.0, "9290": 1661.0, "9295": 1797.0, "9300": 1719.0, "9305": 1732.0, "9310": 1793.0, "9315": 1717.0, "9320": 1652.0, "9325": 1732.0, "9330": 1834.0, "9335": 1684.0, "9340": 1538.0, "9345": 1740.0, "9350": 1642.0, "9355": 1619.0, "9360": 1722.0, "9365": 1699.0, "9370": 1674.0, "9375": 1649.0, "9380": 1762.0, "9385": 1599.0, "9390": 1636.0, "9395": 1650.0, "9400": 1712.0, "9405": 1680.0, "9410": 1715.0, "9415": 1743.0, "9420": 1734.0, "9425": 1685.0, "9430": 1655.0, "9435": 1726.0, "9440": 1675.0, "9445": 1786.0, "9450": 1714.0, "9455": 1635.0, "9460": 1707.0, "9465": 1772.0, "9470": 1725.0, "9475": 1730.0, "9480": 1882.0, "9485": 1772.0, "9490": 1762.0, "9495": 1679.0, "9500": 1701.0, "9505": 1787.0, "9510": 1725.0, "9515": 1626.0, "9520": 1668.0, "9525": 1699.0, "9530": 1728.0, "9535": 1676.0, "9540": 1780.0, "9545": 1624.0, "9550": 1557.0, "9555": 1651.0, "9560": 1772.0, "9565": 1626.0, "9570": 1842.0, "9575": 1790.0, "9580": 1693.0, "9585": 1736.0, "9590": 1702.0, "9595": 1642.0, "9600": 1551.0, "9605": 1784.0, "9610": 1792.0, "9615": 1624.0, "9620": 1741.0, "9625": 1659.0, "9630": 1792.0, "9635": 1700.0, "9640": 1704.0, "9645": 1701.0, "9650": 1733.0, "9655": 1757.0, "9660": 1682.0, "9665": 1686.0, "9670": 1803.0, "9675": 1706.0, "9680": 1712.0, "9685": 1607.0, "9690": 1714.0, "9695": 1620.0, "9700": 1715.0, "9705": 1804.0, "9710": 1694.0, "9715": 1634.0, "9720": 1636.0, "9725": 1602.0, "9730": 1644.0, "9735": 1605.0, "9740": 1696.0, "9745": 1625.0, "9750": 1735.0, "9755": 1701.0, "9760": 1723.0, "9765": 1710.0, "9770": 1664.0, "9775": 1774.0, "9780": 1785.0, "9785": 1671.0, "9790": 1723.0, "9795": 1689.0, "9800": 1709.0, "9805": 1631.0, "9810": 1726.0, "9815": 1701.0, "9820": 1630.0, "9825": 1697.0, "9830": 1623.0, "9835": 1735.0, "9840": 1523.0, "9845": 1766.0, "9850": 1712.0, "9855": 1645.0, "9860": 1657.0, "9865": 1681.0, "9870": 1627.0, "9875": 1725.0, "9880": 1559.0, "9885": 1863.0, "9890": 1671.0, "9895": 1775.0, "9900": 1723.0, "9905": 1794.0, "9910": 1690.0, "9915": 1715.0, "9920": 1687.0, "9925": 1676.0, "9930": 1671.0, "9935": 1780.0, "9940": 1824.0, "9945": 1823.0, "9950": 1674.0, "9955": 1700.0, "9960": 1717.0, "9965": 1664.0, "9970": 1700.0, "9975": 1536.0, "9980": 1630.0, "9985": 1710.0, "9990": 1705.0, "9995": 1744.0, "10000": 1677.0, "10005": 1688.0, "10010": 1642.0, "10015": 1618.0, "10020": 1624.0, "10025": 1674.0, "10030": 1716.0, "10035": 1733.0, "10040": 1659.0, "10045": 1717.0, "10050": 1708.0, "10055": 1713.0, "10060": 1634.0, "10065": 1738.0, "10070": 1574.0, "10075": 1650.0, "10080": 1685.0, "10085": 1638.0, "10090": 1695.0, "10095": 1644.0, "10100": 1713.0, "10105": 1671.0, "10110": 1629.0, "10115": 1682.0, "10120": 1709.0, "10125": 1719.0, "10130": 1749.0, "10135": 1716.0, "10140": 1628.0, "10145": 1691.0, "10150": 1683.0, "10155": 1713.0, "10160": 1658.0, "10165": 1698.0, "10170": 1720.0, "10175": 1682.0, "10180": 1710.0, "10185": 1739.0, "10190": 1695.0, "10195": 1695.0, "10200": 1719.0, "10205": 1707.0, "10210": 2053.0, "10215": 1608.0, "10220": 1608.0, "10225": 1576.0, "10230": 1603.0, "10235": 1645.0, "10240": 1581.0, "10245": 1697.0, "10250": 2210.0, "10255": 1553.0, "10260": 1591.0, "10265": 1604.0, "10270": 1597.0, "10275": 1611.0, "10280": 1567.0, "10285": 1608.0, "10290": 1607.0, "10295": 1547.0, "10300": 1650.0, "10305": 1729.0, "10310": 1620.0, "10315": 1587.0, "10320": 1572.0, "10325": 1643.0, "10330": 1612.0, "10335": 1585.0, "10340": 1600.0, "10345": 1649.0, "10350": 1619.0, "10355": 1592.0, "10360": 1630.0, "10365": 1587.0, "10370": 1611.0, "10375": 1626.0, "10380": 1591.0, "10385": 1581.0, "10390": 1593.0, "10395": 1550.0, "10400": 1506.0, "10405": 1711.0, "10410": 1633.0, "10415": 1585.0, "10420": 1611.0, "10425": 1640.0, "10430": 1625.0, "10435": 1599.0, "10440": 1604.0, "10445": 1555.0, "10450": 1601.0, "10455": 1673.0, "10460": 1615.0, "10465": 1604.0, "10470": 1569.0, "10475": 1639.0, "10480": 1540.0, "10485": 1668.0, "10490": 1685.0, "10495": 1591.0, "10500": 1627.0, "10505": 1659.0, "10510": 1590.0, "10515": 1661.0, "10520": 1637.0, "10525": 1554.0, "10530": 1662.0, "10535": 1572.0, "10540": 1676.0, "10545": 1581.0, "10550": 1711.0, "10555": 1658.0, "10560": 1586.0, "10565": 1639.0, "10570": 1620.0, "10575": 1567.0, "10580": 1632.0, "10585": 1573.0, "10590": 1561.0, "10595": 1595.0, "10600": 1524.0, "10605": 1629.0, "10610": 1659.0, "10615": 1665.0, "10620": 1625.0, "10625": 1602.0, "10630": 1591.0, "10635": 1649.0, "10640": 1615.0, "10645": 1662.0, "10650": 1560.0, "10655": 1575.0, "10660": 1602.0, "10665": 1642.0, "10670": 1668.0, "10675": 1615.0, "10680": 1555.0, "10685": 1636.0, "10690": 1543.0, "10695": 1662.0, "10700": 1648.0, "10705": 1625.0, "10710": 1669.0, "10715": 1668.0, "10720": 1647.0, "10725": 1689.0, "10730": 1614.0, "10735": 1635.0, "10740": 1602.0, "10745": 1678.0, "10750": 1693.0, "10755": 1643.0, "10760": 1545.0, "10765": 1783.0, "10770": 1669.0, "10775": 1744.0, "10780": 1684.0, "10785": 1648.0, "10790": 1691.0, "10795": 1653.0, "10800": 1748.0, "10805": 1671.0, "10810": 1615.0, "10815": 1713.0, "10820": 1711.0, "10825": 1715.0, "10830": 1703.0, "10835": 1669.0, "10840": 1663.0, "10845": 1652.0, "10850": 1715.0, "10855": 1660.0, "10860": 1622.0, "10865": 1697.0, "10870": 1696.0, "10875": 1727.0, "10880": 1682.0, "10885": 1637.0, "10890": 1701.0, "10895": 1704.0, "10900": 1713.0, "10905": 1707.0, "10910": 1668.0, "10915": 1724.0, "10920": 1678.0, "10925": 1689.0, "10930": 1656.0, "10935": 1747.0, "10940": 1595.0, "10945": 1722.0, "10950": 1588.0, "10955": 1682.0, "10960": 1642.0, "10965": 1674.0, "10970": 1651.0, "10975": 1661.0, "10980": 1679.0, "10985": 1659.0, "10990": 1738.0, "10995": 1718.0, "11000": 1733.0, "11005": 1619.0, "11010": 1682.0, "11015": 1710.0, "11020": 1673.0, "11025": 1689.0, "11030": 1760.0, "11035": 1622.0, "11040": 1604.0, "11045": 1858.0, "11050": 1705.0, "11055": 1719.0, "11060": 1739.0, "11065": 1642.0, "11070": 1648.0, "11075": 1732.0, "11080": 1732.0, "11085": 1668.0, "11090": 1769.0, "11095": 1815.0, "11100": 1718.0, "11105": 1616.0, "11110": 1652.0, "11115": 1690.0, "11120": 1683.0, "11125": 1655.0, "11130": 1779.0, "11135": 1619.0, "11140": 1790.0, "11145": 1754.0, "11150": 1679.0, "11155": 1623.0, "11160": 1616.0, "11165": 1761.0, "11170": 1708.0, "11175": 1695.0, "11180": 1599.0, "11185": 1677.0, "11190": 1503.0, "11195": 1644.0, "11200": 1684.0, "11205": 1684.0, "11210": 1740.0, "11215": 1758.0, "11220": 1662.0, "11225": 1631.0, "11230": 2238.0, "11235": 1669.0, "11240": 1625.0, "11245": 1695.0, "11250": 1613.0, "11255": 1748.0, "11260": 1664.0, "11265": 1663.0, "11270": 1640.0, "11275": 1730.0, "11280": 1662.0, "11285": 1694.0, "11290": 1666.0, "11295": 1762.0, "11300": 1723.0, "11305": 1753.0, "11310": 1644.0, "11315": 1761.0, "11320": 1688.0, "11325": 1740.0, "11330": 1776.0, "11335": 1663.0, "11340": 1690.0, "11345": 1592.0, "11350": 1641.0, "11355": 1615.0, "11360": 1617.0, "11365": 1774.0, "11370": 1601.0, "11375": 1605.0, "11380": 1698.0, "11385": 1680.0, "11390": 1676.0, "11395": 1619.0, "11400": 1680.0, "11405": 1613.0, "11410": 1739.0, "11415": 1628.0, "11420": 1640.0, "11425": 1751.0, "11430": 1709.0, "11435": 1675.0, "11440": 1661.0, "11445": 1671.0, "11450": 1632.0, "11455": 1619.0, "11460": 1690.0, "11465": 1623.0, "11470": 1753.0, "11475": 1726.0, "11480": 1732.0, "11485": 1652.0, "11490": 1643.0, "11495": 1721.0, "11500": 1706.0, "11505": 1704.0, "11510": 1692.0, "11515": 1770.0, "11520": 1707.0, "11525": 1634.0, "11530": 1706.0, "11535": 1751.0, "11540": 1773.0, "11545": 1679.0, "11550": 1727.0, "11555": 1767.0, "11560": 1729.0, "11565": 1663.0, "11570": 1711.0, "11575": 1812.0, "11580": 1716.0, "11585": 1703.0, "11590": 1660.0, "11595": 1630.0, "11600": 1670.0, "11605": 1769.0, "11610": 1761.0, "11615": 1667.0, "11620": 1722.0, "11625": 1781.0, "11630": 1803.0, "11635": 1688.0, "11640": 1601.0, "11645": 1727.0, "11650": 1708.0, "11655": 1673.0, "11660": 1675.0, "11665": 1691.0, "11670": 1831.0, "11675": 1570.0, "11680": 1643.0, "11685": 1672.0, "11690": 1807.0, "11695": 1690.0, "11700": 1707.0, "11705": 1693.0, "11710": 1681.0, "11715": 1667.0, "11720": 1599.0, "11725": 1652.0, "11730": 1525.0, "11735": 1644.0, "11740": 1753.0, "11745": 1638.0, "11750": 1653.0, "11755": 1654.0, "11760": 1610.0, "11765": 1652.0, "11770": 1635.0, "11775": 1611.0, "11780": 1547.0, "11785": 1632.0, "11790": 1701.0, "11795": 1692.0, "11800": 1627.0, "11805": 1667.0, "11810": 1616.0, "11815": 1620.0, "11820": 1641.0, "11825": 1626.0, "11830": 1638.0, "11835": 1666.0, "11840": 1681.0, "11845": 1600.0, "11850": 1591.0, "11855": 1634.0, "11860": 1645.0, "11865": 1615.0, "11870": 1521.0, "11875": 1696.0, "11880": 1612.0, "11885": 1569.0, "11890": 1634.0, "11895": 1647.0, "11900": 1586.0, "11905": 1637.0, "11910": 1721.0, "11915": 1622.0, "11920": 1640.0, "11925": 1666.0, "11930": 1655.0, "11935": 1566.0, "11940": 1661.0, "11945": 1545.0, "11950": 1614.0, "11955": 1607.0, "11960": 1611.0, "11965": 1619.0, "11970": 1589.0, "11975": 1598.0, "11980": 1724.0, "11985": 1644.0, "11990": 1743.0, "11995": 1689.0, "12000": 1692.0, "12005": 1701.0, "12010": 1703.0, "12015": 1735.0, "12020": 1776.0, "12025": 1757.0, "12030": 1592.0, "12035": 1692.0, "12040": 1818.0, "12045": 1730.0, "12050": 1602.0, "12055": 1650.0, "12060": 1737.0, "12065": 1594.0, "12070": 1775.0, "12075": 1773.0, "12080": 1768.0, "12085": 1787.0, "12090": 1806.0, "12095": 1741.0, "12100": 1788.0, "12105": 1788.0, "12110": 1681.0, "12115": 1808.0, "12120": 1910.0, "12125": 1735.0, "12130": 1722.0, "12135": 1818.0, "12140": 1699.0, "12145": 1717.0, "12150": 1657.0, "12155": 1675.0, "12160": 1719.0, "12165": 1649.0, "12170": 1654.0, "12175": 1728.0, "12180": 1802.0, "12185": 1669.0, "12190": 1627.0, "12195": 1694.0, "12200": 1682.0, "12205": 1686.0, "12210": 1799.0, "12215": 1694.0, "12220": 1651.0, "12225": 1721.0, "12230": 1677.0, "12235": 1626.0, "12240": 1663.0, "12245": 1779.0, "12250": 1732.0, "12255": 1646.0, "12260": 1661.0, "12265": 1737.0, "12270": 1737.0, "12275": 1780.0, "12280": 1656.0, "12285": 1673.0, "12290": 1741.0, "12295": 1645.0, "12300": 1613.0, "12305": 1739.0, "12310": 1696.0, "12315": 1699.0, "12320": 1712.0, "12325": 1744.0, "12330": 1680.0, "12335": 1708.0, "12340": 1679.0, "12345": 1778.0, "12350": 1740.0, "12355": 1839.0, "12360": 1636.0, "12365": 1707.0, "12370": 1720.0, "12375": 1729.0, "12380": 1708.0, "12385": 1732.0, "12390": 1701.0, "12395": 1690.0, "12400": 1821.0, "12405": 1751.0, "12410": 1727.0, "12415": 1779.0, "12420": 1740.0, "12425": 1639.0, "12430": 1713.0, "12435": 1740.0, "12440": 1642.0, "12445": 1760.0, "12450": 1905.0, "12455": 1645.0, "12460": 1783.0, "12465": 1747.0, "12470": 1723.0, "12475": 1707.0, "12480": 1668.0, "12485": 1754.0, "12490": 1672.0, "12495": 1692.0, "12500": 1749.0, "12505": 1642.0, "12510": 1651.0, "12515": 1710.0, "12520": 1705.0, "12525": 1693.0, "12530": 1818.0, "12535": 1637.0, "12540": 1746.0, "12545": 1663.0, "12550": 1772.0, "12555": 1747.0, "12560": 1715.0, "12565": 1749.0, "12570": 1727.0, "12575": 1553.0, "12580": 1720.0, "12585": 1630.0, "12590": 1652.0, "12595": 1819.0, "12600": 1705.0, "12605": 1780.0, "12610": 1797.0, "12615": 1689.0, "12620": 1707.0, "12625": 1708.0, "12630": 1693.0, "12635": 1829.0, "12640": 1845.0, "12645": 1708.0, "12650": 1668.0, "12655": 1784.0, "12660": 1700.0, "12665": 1562.0, "12670": 1737.0, "12675": 1675.0, "12680": 1777.0, "12685": 1793.0, "12690": 1736.0, "12695": 1738.0, "12700": 1713.0, "12705": 1631.0, "12710": 1686.0, "12715": 1662.0, "12720": 1668.0, "12725": 1744.0, "12730": 1665.0, "12735": 1735.0, "12740": 1872.0, "12745": 1664.0, "12750": 1751.0, "12755": 1725.0, "12760": 1781.0, "12765": 1739.0, "12770": 1639.0, "12775": 1619.0, "12780": 1734.0, "12785": 1745.0, "12790": 1673.0, "12795": 1659.0, "12800": 1748.0, "12805": 1746.0, "12810": 1734.0, "12815": 1717.0, "12820": 1626.0, "12825": 1623.0, "12830": 1677.0, "12835": 1700.0, "12840": 1704.0, "12845": 1667.0, "12850": 1705.0, "12855": 1664.0, "12860": 1687.0, "12865": 1732.0, "12870": 1723.0, "12875": 1713.0, "12880": 1733.0, "12885": 1712.0, "12890": 1644.0, "12895": 1693.0, "12900": 1718.0, "12905": 1749.0, "12910": 1757.0, "12915": 1720.0, "12920": 1772.0, "12925": 1753.0, "12930": 1643.0, "12935": 1819.0, "12940": 1734.0, "12945": 1736.0, "12950": 2288.0, "12955": 1646.0, "12960": 1759.0, "12965": 1782.0, "12970": 1624.0, "12975": 1757.0, "12980": 1772.0, "12985": 1742.0, "12990": 1644.0, "12995": 1622.0, "13000": 1664.0, "13005": 1636.0, "13010": 1649.0, "13015": 1657.0, "13020": 1617.0, "13025": 1636.0, "13030": 1657.0, "13035": 1640.0, "13040": 1615.0, "13045": 2056.0, "13050": 1596.0, "13055": 1677.0, "13060": 1588.0, "13065": 1598.0, "13070": 1602.0, "13075": 1593.0, "13080": 1657.0, "13085": 1697.0, "13090": 1594.0, "13095": 1740.0, "13100": 1638.0, "13105": 1601.0, "13110": 1611.0, "13115": 1603.0, "13120": 1556.0, "13125": 1640.0, "13130": 1568.0, "13135": 1695.0, "13140": 1676.0, "13145": 1653.0, "13150": 1619.0, "13155": 1639.0, "13160": 1643.0, "13165": 1626.0, "13170": "nan", "13175": 1686.0, "13180": 1627.0, "13185": 1671.0, "13190": 1669.0, "13195": 1609.0, "13200": 1764.0, "13205": 1518.0, "13210": 1636.0, "13215": 1727.0, "13220": 1592.0, "13225": 1652.0, "13230": 1651.0, "13235": 1584.0, "13240": 1642.0, "13245": 1674.0, "13250": 1709.0, "13255": 1624.0, "13260": 1670.0, "13265": 1598.0, "13270": 1599.0, "13275": 1654.0, "13280": 1735.0, "13285": 1631.0, "13290": 1616.0, "13295": 1742.0, "13300": 1683.0, "13305": 1751.0, "13310": 1628.0, "13315": 2158.0, "13320": 1679.0, "13325": 1623.0, "13330": 1538.0, "13335": 1683.0, "13340": 1711.0, "13345": 1627.0, "13350": 1640.0, "13355": 1703.0, "13360": 1675.0, "13365": 1638.0, "13370": 1593.0, "13375": 1554.0, "13380": 1579.0, "13385": 1747.0, "13390": 1659.0, "13395": 1655.0, "13400": 1658.0, "13405": 1607.0, "13410": 1619.0, "13415": 1640.0, "13420": 1619.0, "13425": 1604.0, "13430": 1688.0, "13435": 1652.0, "13440": 1729.0, "13445": 1643.0, "13450": 1629.0, "13455": 1603.0, "13460": 1602.0, "13465": 1623.0, "13470": 1662.0, "13475": 1658.0, "13480": 1654.0, "13485": 1647.0, "13490": 1654.0, "13495": 1679.0, "13500": 1637.0, "13505": 1613.0, "13510": 1684.0, "13515": 1604.0, "13520": 1578.0, "13525": 1615.0, "13530": 1638.0, "13535": 1675.0, "13540": 1622.0, "13545": 1644.0, "13550": 1589.0, "13555": 1640.0, "13560": 1595.0, "13565": 1570.0, "13570": 1616.0, "13575": 1579.0, "13580": 1603.0, "13585": 1627.0, "13590": 1661.0, "13595": 1636.0, "13600": 1660.0, "13605": 1689.0, "13610": 1589.0, "13615": 1604.0, "13620": 1596.0, "13625": 1588.0, "13630": 1624.0, "13635": 1636.0, "13640": 1607.0, "13645": 1765.0, "13650": 1732.0, "13655": 1616.0, "13660": 1633.0, "13665": 1682.0, "13670": 1647.0, "13675": 1597.0, "13680": 1611.0, "13685": 1593.0, "13690": 1619.0, "13695": 1595.0, "13700": 1740.0, "13705": 1574.0, "13710": 1673.0, "13715": 1589.0, "13720": 1658.0, "13725": 1613.0, "13730": 1674.0, "13735": 1648.0, "13740": 1619.0, "13745": 1626.0, "13750": 1611.0, "13755": 1629.0, "13760": 1603.0, "13765": 1641.0, "13770": 1561.0, "13775": 1696.0, "13780": 1577.0, "13785": 1668.0, "13790": 1563.0, "13795": 1677.0, "13800": 1622.0, "13805": 1621.0, "13810": 1662.0, "13815": 1577.0, "13820": 1624.0, "13825": 1592.0, "13830": 1684.0, "13835": 1649.0, "13840": 1588.0, "13845": 1628.0, "13850": 1585.0, "13855": 1648.0, "13860": 1693.0, "13865": 1634.0, "13870": 1655.0, "13875": 2121.0, "13880": 1768.0, "13885": 1622.0, "13890": 1727.0, "13895": 1642.0, "13900": 1600.0, "13905": 1625.0, "13910": 1557.0, "13915": 1565.0, "13920": 1655.0, "13925": 1615.0, "13930": 1664.0, "13935": 1551.0, "13940": 1634.0, "13945": 1685.0, "13950": 1626.0, "13955": 1632.0, "13960": 1725.0, "13965": 1581.0, "13970": 1690.0, "13975": 1638.0, "13980": 1569.0, "13985": 1632.0, "13990": 1675.0, "13995": 1721.0, "14000": 1656.0, "14005": 1611.0, "14010": 1595.0, "14015": 1644.0, "14020": 1601.0, "14025": 1647.0, "14030": 1741.0, "14035": 1698.0, "14040": 1550.0, "14045": 1654.0, "14050": 1681.0, "14055": 1687.0, "14060": 1680.0, "14065": 1666.0, "14070": 1559.0, "14075": 1642.0, "14080": 1706.0, "14085": 1659.0, "14090": 1577.0, "14095": 1667.0, "14100": 1693.0, "14105": 1681.0, "14110": 1549.0, "14115": 1612.0, "14120": 1616.0, "14125": 1757.0, "14130": 1683.0, "14135": 1688.0, "14140": 1560.0, "14145": 1523.0, "14150": 1611.0, "14155": 1625.0, "14160": 1700.0, "14165": 1658.0, "14170": 1638.0, "14175": 1590.0, "14180": 1569.0, "14185": 1645.0, "14190": 1589.0, "14195": 1614.0, "14200": 1546.0, "14205": 1629.0, "14210": 1592.0, "14215": 1643.0, "14220": 1638.0, "14225": 1670.0, "14230": 1615.0, "14235": 1722.0, "14240": 1625.0, "14245": 1531.0, "14250": 1672.0, "14255": 1617.0, "14260": 1640.0, "14265": 1638.0, "14270": 1647.0, "14275": 1605.0, "14280": 1673.0, "14285": 1743.0, "14290": 1705.0, "14295": 1622.0, "14300": 1594.0, "14305": 1638.0, "14310": 1687.0, "14315": 1660.0, "14320": 1537.0, "14325": 1577.0, "14330": 1752.0, "14335": 1693.0, "14340": 1640.0, "14345": 1700.0, "14350": 1618.0, "14355": 1630.0, "14360": 1677.0, "14365": 1721.0, "14370": 1721.0, "14375": 1724.0, "14380": 1793.0, "14385": 1779.0, "14390": 1631.0, "14395": 1773.0, "14400": 1716.0, "14405": 1792.0, "14410": 1802.0, "14415": 1748.0, "14420": 1786.0, "14425": 1746.0, "14430": 1719.0, "14435": 1711.0, "14440": 1671.0, "14445": 1748.0, "14450": 1671.0, "14455": 1655.0, "14460": 1766.0, "14465": 1744.0, "14470": 1800.0, "14475": 1672.0, "14480": 1788.0, "14485": 1779.0, "14490": 1765.0, "14495": 1595.0, "14500": 1726.0, "14505": 1724.0, "14510": 1711.0, "14515": 1694.0, "14520": 1642.0, "14525": 1629.0, "14530": 1826.0, "14535": 1687.0, "14540": 1781.0, "14545": 1788.0, "14550": 1869.0, "14555": 1769.0, "14560": 1766.0, "14565": 1760.0, "14570": 1698.0, "14575": 1796.0, "14580": 1756.0, "14585": 1722.0, "14590": 1738.0, "14595": 1837.0, "14600": 1786.0, "14605": 1740.0, "14610": 1818.0, "14615": 1756.0, "14620": 1694.0, "14625": 1852.0, "14630": 1759.0, "14635": 1783.0, "14640": 1826.0, "14645": 1783.0, "14650": 1740.0, "14655": 1794.0, "14660": 1681.0, "14665": 1773.0, "14670": 1931.0, "14675": 1899.0, "14680": 1826.0, "14685": 1864.0, "14690": 1568.0, "14695": 1716.0, "14700": 1797.0, "14705": 1730.0, "14710": 1753.0, "14715": 1772.0, "14720": 1724.0, "14725": 1731.0, "14730": 1783.0, "14735": 1947.0, "14740": 1716.0, "14745": 1650.0, "14750": 1807.0, "14755": 1726.0, "14760": 1697.0, "14765": 1885.0, "14770": 1800.0, "14775": 1806.0, "14780": 1781.0, "14785": 1790.0, "14790": 1715.0, "14795": 1757.0, "14800": 1747.0, "14805": 1841.0, "14810": 1755.0, "14815": 1727.0, "14820": 1718.0, "14825": 1725.0, "14830": 1796.0, "14835": 1713.0, "14840": 1707.0, "14845": 1664.0, "14850": 1682.0, "14855": 1767.0, "14860": 1771.0, "14865": 1733.0, "14870": 1708.0, "14875": 1841.0, "14880": 1661.0, "14885": 1873.0, "14890": 1673.0, "14895": 1763.0, "14900": 1718.0, "14905": 1732.0, "14910": 1673.0, "14915": 1590.0, "14920": 1817.0, "14925": 1767.0, "14930": 1701.0, "14935": 1900.0, "14940": 1760.0, "14945": 1624.0, "14950": 1628.0, "14955": 1623.0, "14960": 1733.0, "14965": 1690.0, "14970": 1721.0, "14975": 1606.0, "14980": 1805.0, "14985": 1681.0, "14990": 1790.0, "14995": 1895.0, "15000": 1750.0, "15005": 1844.0, "15010": 1761.0, "15015": 1873.0, "15020": 1736.0, "15025": 1610.0, "15030": 1853.0, "15035": 1792.0, "15040": 1630.0, "15045": 1737.0, "15050": 1701.0, "15055": 1771.0, "15060": 1769.0, "15065": 1729.0, "15070": 1836.0, "15075": 1646.0, "15080": 1738.0, "15085": 1748.0, "15090": 1841.0, "15095": 1810.0, "15100": 1767.0, "15105": 1745.0, "15110": 1831.0, "15115": 1790.0, "15120": 1761.0, "15125": 1871.0, "15130": 1737.0, "15135": 1716.0, "15140": 1926.0, "15145": 1734.0, "15150": 1888.0, "15155": 1794.0, "15160": 1712.0, "15165": 1808.0, "15170": 1763.0, "15175": 1787.0, "15180": 1812.0, "15185": 1751.0, "15190": 1760.0, "15195": 1774.0, "15200": 1653.0, "15205": 1770.0, "15210": 1782.0, "15215": 1801.0, "15220": 1822.0, "15225": 1851.0, "15230": 1717.0, "15235": 1701.0, "15240": 1800.0, "15245": 1760.0, "15250": 1653.0, "15255": 1726.0, "15260": 1789.0, "15265": 1810.0, "15270": 1847.0, "15275": 1718.0, "15280": 1748.0, "15285": 1767.0, "15290": 1772.0, "15295": 1664.0, "15300": 1776.0, "15305": 1788.0, "15310": 1862.0, "15315": 1835.0, "15320": 1819.0, "15325": 1770.0, "15330": 1787.0, "15335": 1774.0, "15340": 1840.0, "15345": 1724.0, "15350": 1735.0, "15355": 1861.0, "15360": 1761.0, "15365": 1719.0, "15370": 1628.0, "15375": 1638.0, "15380": 1655.0, "15385": 1582.0, "15390": 1655.0, "15395": 1675.0, "15400": 1605.0, "15405": 1680.0, "15410": 1837.0, "15415": 1660.0, "15420": 1774.0, "15425": 1705.0, "15430": 1728.0, "15435": 1622.0, "15440": 1631.0, "15445": 1664.0, "15450": 1619.0, "15455": 1645.0, "15460": 1594.0, "15465": 1693.0, "15470": 1606.0, "15475": 1640.0, "15480": 1681.0, "15485": 1694.0, "15490": 1678.0, "15495": 1704.0, "15500": 1702.0, "15505": 1692.0, "15510": 1599.0, "15515": 1725.0, "15520": 1666.0, "15525": 1652.0, "15530": 1653.0, "15535": 1607.0, "15540": 1669.0, "15545": 1620.0, "15550": 1754.0, "15555": 1593.0, "15560": 1613.0, "15565": 1646.0, "15570": 1739.0, "15575": 1647.0, "15580": 1664.0, "15585": 1620.0, "15590": 1652.0, "15595": 1673.0, "15600": 1607.0, "15605": 1639.0, "15610": 1524.0, "15615": 1696.0, "15620": 1559.0, "15625": 1702.0, "15630": 1841.0, "15635": 1628.0, "15640": 1647.0, "15645": 1644.0, "15650": 1663.0, "15655": 1633.0, "15660": 1699.0, "15665": 1682.0, "15670": 1597.0, "15675": 1571.0, "15680": 1627.0, "15685": 1651.0, "15690": 1724.0, "15695": 1722.0, "15700": 1625.0, "15705": "nan", "15710": 1578.0, "15715": 1623.0, "15720": 1626.0, "15725": 1608.0, "15730": 1567.0, "15735": 1792.0, "15740": 1672.0, "15745": 1693.0, "15750": 1775.0, "15755": 1608.0, "15760": 1638.0, "15765": 1676.0, "15770": 1689.0, "15775": 1603.0, "15780": 1641.0, "15785": 1600.0, "15790": 1760.0, "15795": 1675.0, "15800": 1559.0, "15805": 1654.0, "15810": 1680.0, "15815": 1615.0, "15820": 1669.0, "15825": 1649.0, "15830": 1626.0, "15835": 1757.0, "15840": 1591.0, "15845": 1608.0, "15850": 1617.0, "15855": 1616.0, "15860": 1678.0, "15865": 1722.0, "15870": 1629.0, "15875": 1667.0, "15880": 1660.0, "15885": 1661.0, "15890": 1647.0, "15895": 1731.0, "15900": 1726.0, "15905": 1572.0, "15910": 1713.0, "15915": 1613.0, "15920": 1594.0, "15925": 1670.0, "15930": 1606.0, "15935": 1649.0, "15940": 1684.0, "15945": 1657.0, "15950": 1719.0, "15955": 1655.0, "15960": 1587.0, "15965": 1657.0, "15970": 1728.0, "15975": 1573.0, "15980": 1610.0, "15985": 1688.0, "15990": 1729.0, "15995": 1633.0, "16000": 1619.0, "16005": 1689.0, "16010": 1670.0, "16015": 1725.0, "16020": 1734.0, "16025": 1755.0, "16030": 1745.0, "16035": 1652.0, "16040": 1674.0, "16045": 1724.0, "16050": 1703.0, "16055": 1655.0, "16060": 1650.0, "16065": 1598.0, "16070": 1703.0, "16075": 1642.0, "16080": 1694.0, "16085": 1687.0, "16090": 1706.0, "16095": 1645.0, "16100": 1741.0, "16105": 1573.0, "16110": 1641.0, "16115": 1681.0, "16120": 1657.0, "16125": 1673.0, "16130": 1718.0, "16135": 1716.0, "16140": 1756.0, "16145": 1665.0, "16150": 1638.0, "16155": 1649.0, "16160": 1725.0, "16165": 1591.0, "16170": 1669.0, "16175": 1733.0, "16180": 1639.0, "16185": 1707.0, "16190": 1665.0, "16195": 1549.0, "16200": 1669.0, "16205": 1714.0, "16210": 1736.0, "16215": 1636.0, "16220": 1719.0, "16225": 1717.0, "16230": 1552.0, "16235": 1660.0, "16240": 1751.0, "16245": 1690.0, "16250": 1615.0, "16255": 1593.0, "16260": 1726.0, "16265": 1588.0, "16270": 1652.0, "16275": 1608.0, "16280": 1678.0, "16285": 1470.0, "16290": 1643.0, "16295": 1648.0, "16300": 1673.0, "16305": 1658.0, "16310": 1756.0, "16315": 1629.0, "16320": 1654.0, "16325": 1633.0, "16330": 1868.0, "16335": 1636.0, "16340": 1605.0, "16345": 1749.0, "16350": 1673.0, "16355": 1578.0, "16360": 1626.0, "16365": 1668.0, "16370": 2085.0, "16375": 1693.0, "16380": 1800.0, "16385": 1630.0, "16390": 1601.0, "16395": 1819.0, "16400": 1731.0, "16405": 1572.0, "16410": 1690.0, "16415": 1631.0, "16420": 1672.0, "16425": 1677.0, "16430": 1732.0, "16435": 1656.0, "16440": 1686.0, "16445": 1750.0, "16450": 1690.0, "16455": 1732.0, "16460": 1628.0, "16465": 1731.0, "16470": 1652.0, "16475": 1721.0, "16480": 1674.0, "16485": 1768.0, "16490": 1619.0, "16495": 1753.0, "16500": 1750.0, "16505": 1679.0, "16510": 1735.0, "16515": 1650.0, "16520": 1665.0, "16525": 1778.0, "16530": 1772.0, "16535": 1892.0, "16540": 1690.0, "16545": 1613.0, "16550": 1595.0, "16555": 1847.0, "16560": 1703.0, "16565": 1885.0, "16570": 1772.0, "16575": 1818.0, "16580": 1829.0, "16585": 1843.0, "16590": 1957.0, "16595": 1769.0, "16600": 1779.0, "16605": 1982.0, "16610": 1653.0, "16615": 1774.0, "16620": 1837.0, "16625": 1891.0, "16630": 1626.0, "16635": 1736.0, "16640": 1840.0, "16645": 1872.0, "16650": 1669.0, "16655": 1885.0, "16660": 1820.0, "16665": 1745.0, "16670": 1728.0, "16675": 1654.0, "16680": 1750.0, "16685": 1971.0, "16690": 1789.0, "16695": 1848.0, "16700": 1897.0, "16705": 1780.0, "16710": 1906.0, "16715": 1906.0, "16720": 1782.0, "16725": 1784.0, "16730": 1961.0, "16735": 1878.0, "16740": 1769.0, "16745": 1781.0, "16750": 1744.0, "16755": 1834.0, "16760": 1710.0, "16765": 1789.0, "16770": 1758.0, "16775": 1868.0, "16780": 1795.0, "16785": 1737.0, "16790": 1788.0, "16795": 1751.0, "16800": 1767.0, "16805": 1820.0, "16810": 1818.0, "16815": 1861.0, "16820": 1931.0, "16825": 1712.0, "16830": 1885.0, "16835": 1752.0, "16840": 1719.0, "16845": 1728.0, "16850": 1766.0, "16855": 1794.0, "16860": 1730.0, "16865": 1760.0, "16870": 1838.0, "16875": 1734.0, "16880": 1828.0, "16885": 1847.0, "16890": 1677.0, "16895": 1656.0, "16900": 1865.0, "16905": 1939.0, "16910": 1836.0, "16915": 1743.0, "16920": 1870.0, "16925": 1865.0, "16930": 1812.0, "16935": 1802.0, "16940": 1756.0, "16945": 1694.0, "16950": 1787.0, "16955": 1967.0, "16960": 1752.0, "16965": 1712.0, "16970": 1830.0, "16975": 1850.0, "16980": 1852.0, "16985": 1578.0, "16990": 1677.0, "16995": 1825.0, "17000": 1806.0, "17005": 1947.0, "17010": 1763.0, "17015": 1840.0, "17020": 1861.0, "17025": 1891.0, "17030": 1750.0, "17035": 1851.0, "17040": 1807.0, "17045": 1820.0, "17050": 1935.0, "17055": 1756.0, "17060": 1841.0, "17065": 1604.0, "17070": 1902.0, "17075": 1728.0, "17080": 1601.0, "17085": 1879.0, "17090": 1940.0, "17095": 1833.0, "17100": 1821.0, "17105": 1897.0, "17110": 1710.0, "17115": 1913.0, "17120": 1860.0, "17125": 1780.0, "17130": 1819.0, "17135": 1889.0, "17140": 1889.0, "17145": 1863.0, "17150": 1851.0, "17155": 1728.0, "17160": 1869.0, "17165": 1928.0, "17170": 1910.0, "17175": 1906.0, "17180": 1832.0, "17185": 1881.0, "17190": 1767.0, "17195": 1771.0, "17200": 1776.0, "17205": 1696.0, "17210": 1718.0, "17215": 1814.0, "17220": 1702.0, "17225": 1774.0, "17230": 1861.0, "17235": 1698.0, "17240": 1876.0, "17245": 1880.0, "17250": 1812.0, "17255": 1726.0, "17260": 1537.0, "17265": 1794.0, "17270": 2016.0, "17275": 1845.0, "17280": 1871.0, "17285": 1842.0, "17290": 1797.0, "17295": 1862.0, "17300": 1784.0, "17305": 1898.0, "17310": 1804.0, "17315": 1963.0, "17320": 1906.0, "17325": 1907.0, "17330": 1664.0, "17335": 1886.0, "17340": 1897.0, "17345": 2005.0, "17350": 1765.0, "17355": 1898.0, "17360": 1848.0, "17365": 1783.0, "17370": 1737.0, "17375": 1847.0, "17380": 1701.0, "17385": 1841.0, "17390": 1796.0, "17395": 1923.0, "17400": 1774.0, "17405": 1832.0, "17410": 1887.0, "17415": 1852.0, "17420": 1926.0, "17425": 1876.0, "17430": 1766.0, "17435": 1790.0, "17440": 1733.0, "17445": 1745.0, "17450": 1949.0, "17455": 1646.0, "17460": 1755.0, "17465": 1965.0, "17470": 1932.0, "17475": 1813.0, "17480": 1857.0, "17485": 1873.0, "17490": 1760.0, "17495": 1880.0, "17500": 1834.0, "17505": 1775.0, "17510": 1846.0, "17515": 1853.0, "17520": 1782.0, "17525": 1735.0, "17530": 1810.0, "17535": 1780.0, "17540": 1791.0, "17545": 1697.0, "17550": 1837.0, "17555": 1756.0, "17560": 1763.0, "17565": 1633.0, "17570": 1620.0, "17575": 1669.0, "17580": 1709.0, "17585": 1691.0, "17590": 1701.0, "17595": 1667.0, "17600": 1724.0, "17605": 1615.0, "17610": 1715.0, "17615": 1663.0, "17620": 1636.0, "17625": 1539.0, "17630": 1652.0, "17635": 1624.0, "17640": 1743.0, "17645": 1733.0, "17650": 1670.0, "17655": 1691.0, "17660": 1591.0, "17665": 1689.0, "17670": 1705.0, "17675": 1842.0, "17680": 1588.0, "17685": 1643.0, "17690": 1599.0, "17695": 1766.0, "17700": 1768.0, "17705": 1758.0, "17710": 1748.0, "17715": 1655.0, "17720": 1760.0, "17725": 1706.0, "17730": 1740.0, "17735": 1726.0, "17740": 1564.0, "17745": 1657.0, "17750": 1794.0, "17755": 1628.0, "17760": 1747.0, "17765": 1719.0, "17770": 1730.0, "17775": 1697.0, "17780": 1805.0, "17785": 1645.0, "17790": 1639.0, "17795": 1779.0, "17800": 1631.0, "17805": 1711.0, "17810": 1676.0, "17815": 1710.0, "17820": 1726.0, "17825": 1671.0, "17830": 1767.0, "17835": 1656.0, "17840": 1784.0, "17845": 1663.0, "17850": 1791.0, "17855": 1743.0, "17860": 1711.0, "17865": 1598.0, "17870": 1687.0, "17875": 1650.0, "17880": 1643.0, "17885": 1701.0, "17890": 1711.0, "17895": 1658.0, "17900": 1675.0, "17905": 1620.0, "17910": 1777.0, "17915": 1681.0, "17920": 1709.0, "17925": 1795.0, "17930": 1535.0, "17935": 1635.0, "17940": 1658.0, "17945": 1558.0, "17950": 1679.0, "17955": 1681.0, "17960": 1698.0, "17965": 1667.0, "17970": 1706.0, "17975": 1552.0, "17980": 1662.0, "17985": 1710.0, "17990": 1629.0, "17995": 1708.0, "18000": 1634.0, "18005": 1724.0, "18010": 1694.0, "18015": 1659.0, "18020": 1679.0, "18025": 1793.0, "18030": 1611.0, "18035": 1743.0, "18040": 1626.0, "18045": 1698.0, "18050": 1641.0, "18055": 1643.0, "18060": 1698.0, "18065": 1629.0, "18070": 1742.0, "18075": 1708.0, "18080": 1711.0, "18085": 1699.0, "18090": 1645.0, "18095": 1682.0, "18100": 1733.0, "18105": 1681.0, "18110": 1534.0, "18115": 1647.0, "18120": 1685.0, "18125": 1681.0, "18130": 1589.0, "18135": 1757.0, "18140": 1746.0, "18145": 1672.0, "18150": 1744.0, "18155": 1703.0, "18160": 1729.0, "18165": 1694.0, "18170": 1656.0, "18175": 1541.0, "18180": 1757.0, "18185": 1814.0, "18190": 1792.0, "18195": 1659.0, "18200": 1736.0, "18205": 1675.0, "18210": 1614.0, "18215": 1758.0, "18220": 1696.0, "18225": 1668.0, "18230": 1634.0, "18235": 1605.0, "18240": 1700.0, "18245": 1656.0, "18250": 1757.0, "18255": 1590.0, "18260": 1745.0, "18265": 1671.0, "18270": 1720.0, "18275": 1669.0, "18280": 1627.0, "18285": 1607.0, "18290": 1733.0, "18295": 1752.0, "18300": 1684.0, "18305": 1741.0, "18310": 1701.0, "18315": 1702.0, "18320": 1701.0, "18325": 1760.0, "18330": 1730.0, "18335": 1664.0, "18340": 1597.0, "18345": 1679.0, "18350": 1582.0, "18355": 1736.0, "18360": 1691.0, "18365": 1606.0, "18370": 1658.0, "18375": 1663.0, "18380": 1669.0, "18385": 1685.0, "18390": 1688.0, "18395": 1745.0, "18400": 1574.0, "18405": 1681.0, "18410": 1562.0, "18415": 1702.0, "18420": 1715.0, "18425": 1686.0, "18430": 1672.0, "18435": 1697.0, "18440": 1847.0, "18445": 1657.0, "18450": 1778.0, "18455": 1658.0, "18460": 1679.0, "18465": 1700.0, "18470": 1702.0, "18475": 1606.0, "18480": 1600.0, "18485": 1721.0, "18490": 1724.0, "18495": 1658.0, "18500": 1681.0, "18505": 1708.0, "18510": 1745.0, "18515": 1659.0, "18520": 1692.0, "18525": 1735.0, "18530": 1728.0, "18535": 1572.0, "18540": 1725.0, "18545": 1615.0, "18550": 1740.0, "18555": 1758.0, "18560": 1750.0, "18565": 1656.0, "18570": 1658.0, "18575": 1613.0, "18580": 1647.0, "18585": 1742.0, "18590": 1640.0, "18595": 1648.0, "18600": 1690.0, "18605": 1632.0, "18610": 1650.0, "18615": 1593.0, "18620": 1643.0, "18625": 1623.0, "18630": 1690.0, "18635": 1798.0, "18640": 1743.0, "18645": 1699.0, "18650": 1695.0, "18655": 1653.0, "18660": 1726.0, "18665": 1711.0, "18670": 1658.0, "18675": 1713.0, "18680": 1773.0, "18685": 1761.0, "18690": 1690.0, "18695": 1727.0, "18700": 1746.0, "18705": 1661.0, "18710": 1654.0, "18715": 1623.0, "18720": "nan", "18725": 1694.0, "18730": 1710.0, "18735": 1601.0, "18740": 1701.0, "18745": 1732.0, "18750": 1712.0, "18755": 1737.0, "18760": 1743.0, "18765": 1689.0, "18770": 1641.0, "18775": 1618.0, "18780": 1806.0, "18785": 1681.0, "18790": 1649.0, "18795": 1690.0, "18800": 1658.0, "18805": 1733.0, "18810": 1650.0, "18815": 1692.0, "18820": 1699.0, "18825": 1726.0, "18830": 1758.0, "18835": 1812.0, "18840": 1631.0, "18845": 1700.0, "18850": 1661.0, "18855": 1669.0, "18860": 1575.0, "18865": 1669.0, "18870": 1728.0, "18875": 1742.0, "18880": 1734.0, "18885": 1948.0, "18890": 1791.0, "18895": 1595.0, "18900": 1803.0, "18905": 1627.0, "18910": 1690.0, "18915": 1667.0, "18920": 1694.0, "18925": 1591.0, "18930": 1650.0, "18935": 1649.0, "18940": 1606.0, "18945": 1619.0, "18950": 1721.0, "18955": 1702.0, "18960": 1762.0, "18965": 1697.0, "18970": 1787.0, "18975": 1620.0, "18980": 1652.0, "18985": 1660.0, "18990": 1618.0, "18995": 1689.0, "19000": 1705.0, "19005": 1568.0, "19010": 1744.0, "19015": 1658.0, "19020": 1646.0, "19025": 1639.0, "19030": 1670.0, "19035": 1737.0, "19040": 1749.0, "19045": 1621.0, "19050": 1584.0, "19055": 1700.0, "19060": 1711.0, "19065": 1690.0, "19070": 1687.0, "19075": 1708.0, "19080": 1724.0, "19085": 1661.0, "19090": 1704.0, "19095": 1606.0, "19100": 1776.0, "19105": 1751.0, "19110": 1680.0, "19115": 1682.0, "19120": 1598.0, "19125": 1571.0, "19130": 1764.0, "19135": 1706.0, "19140": 1668.0, "19145": 1683.0, "19150": 1656.0, "19155": 1615.0, "19160": 1680.0, "19165": 1689.0, "19170": 1709.0, "19175": 1603.0, "19180": 1828.0, "19185": 1665.0, "19190": 1772.0, "19195": 1681.0, "19200": 1663.0, "19205": 1652.0, "19210": 1796.0, "19215": 1934.0, "19220": 1844.0, "19225": 1728.0, "19230": 1753.0, "19235": 1681.0, "19240": 1692.0, "19245": 1684.0, "19250": 1708.0, "19255": 1729.0, "19260": 1722.0, "19265": 1730.0, "19270": 2319.0, "19275": 1782.0, "19280": 1720.0, "19285": 1690.0, "19290": 1753.0, "19295": 1622.0, "19300": 1787.0, "19305": 1655.0, "19310": 1656.0, "19315": 1597.0, "19320": 1728.0, "19325": 1684.0, "19330": 1693.0, "19335": 1617.0, "19340": 1808.0, "19345": 1723.0, "19350": 1727.0, "19355": 1777.0, "19360": 1748.0, "19365": 1618.0, "19370": 1620.0, "19375": 1802.0, "19380": 1590.0, "19385": 1799.0, "19390": 1677.0, "19395": 1721.0, "19400": 1734.0, "19405": 1636.0, "19410": 1589.0, "19415": 1736.0, "19420": 1675.0, "19425": 1732.0, "19430": 1688.0, "19435": 1730.0, "19440": 1730.0, "19445": 1686.0, "19450": 1696.0, "19455": 1654.0, "19460": 1682.0, "19465": 1665.0, "19470": 1741.0, "19475": 1706.0, "19480": 1695.0, "19485": 1723.0, "19490": 1647.0, "19495": 1618.0, "19500": 1772.0, "19505": 1723.0, "19510": 1682.0, "19515": 1778.0, "19520": 1803.0, "19525": 1714.0, "19530": 1734.0, "19535": 1722.0, "19540": 1625.0, "19545": 1717.0, "19550": 1678.0, "19555": 1766.0, "19560": 1664.0, "19565": 1676.0, "19570": 1702.0, "19575": 1690.0, "19580": 1755.0, "19585": 1562.0, "19590": 1624.0, "19595": 1696.0, "19600": 1725.0, "19605": 1708.0, "19610": 1720.0, "19615": 1671.0, "19620": 1629.0, "19625": 1704.0, "19630": 1642.0, "19635": 1684.0, "19640": 1695.0, "19645": 1733.0, "19650": 1649.0, "19655": 1721.0, "19660": 1704.0, "19665": 1751.0, "19670": 1631.0, "19675": 1628.0, "19680": 1618.0, "19685": 1718.0, "19690": 1697.0, "19695": 1651.0, "19700": 1538.0, "19705": 1748.0, "19710": 1700.0, "19715": 1726.0, "19720": 1712.0, "19725": 1695.0, "19730": 1653.0, "19735": 1728.0, "19740": 1701.0, "19745": 1679.0, "19750": 1722.0, "19755": 1546.0, "19760": 1670.0, "19765": 1715.0, "19770": 1726.0, "19775": 1719.0, "19780": 1811.0, "19785": 1585.0, "19790": 1685.0, "19795": 1781.0, "19800": 1612.0, "19805": 1755.0, "19810": 1712.0, "19815": 1695.0, "19820": 1787.0, "19825": 1724.0, "19830": 1675.0, "19835": 1662.0, "19840": 1626.0, "19845": 1645.0, "19850": 1805.0, "19855": 1614.0, "19860": 1692.0, "19865": 1735.0, "19870": 1663.0, "19875": 1726.0, "19880": 1641.0, "19885": 1608.0, "19890": 1715.0, "19895": 1562.0, "19900": 1608.0, "19905": 1762.0, "19910": 1666.0, "19915": 1682.0, "19920": 1753.0, "19925": 1793.0, "19930": 1686.0, "19935": 1681.0, "19940": 1662.0, "19945": 1710.0, "19950": 1599.0, "19955": 1724.0, "19960": 1841.0, "19965": 1705.0, "19970": 1750.0, "19975": 1729.0, "19980": 1709.0, "19985": 1707.0, "19990": 1544.0, "19995": 1630.0, "20000": 1782.0}}, "iteration-time": {"start_step": 1, "end_step": 20000, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": "nan", "25": "nan", "30": "nan", "35": "nan", "40": "nan", "45": "nan", "50": "nan", "55": "nan", "60": "nan", "65": "nan", "70": "nan", "75": "nan", "80": "nan", "85": "nan", "90": "nan", "95": "nan", "100": 0.91292, "105": "nan", "110": "nan", "115": "nan", "120": "nan", "125": "nan", "130": "nan", "135": "nan", "140": "nan", "145": "nan", "150": "nan", "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": 0.32784, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": 0.33519, "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": 0.33112, "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": 0.33523, "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": 0.3432, "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": 0.33386, "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": 0.33875, "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": 0.33858, "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": 0.3302, "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": 0.34293, "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": 0.34392, "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": 0.33512, "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": 0.33975, "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": 0.33834, "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": 0.33763, "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": 0.33552, "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": 0.33502, "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": 0.34001, "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": 0.33879, "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": 0.34388, "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": 0.34835, "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": 0.34466, "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": 0.34267, "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": 0.33866, "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": 0.3393, "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": 0.35245, "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": 0.35786, "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": 0.34528, "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": 0.34896, "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": 0.35151, "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": 0.34584, "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": 0.34582, "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": 0.34216, "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": 0.34124, "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": 0.34797, "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": 0.35186, "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": 0.34609, "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": 0.34393, "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": 0.34984, "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": 0.34896, "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": 0.3506, "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": 0.45882, "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": 0.37314, "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": 0.33734, "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": 0.34251, "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": 0.34779, "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": 0.34069, "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": 0.33704, "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": 0.34293, "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": 0.34037, "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": 0.33298, "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": 0.34167, "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": 0.33679, "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": 0.33318, "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": 0.34118, "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": 0.34805, "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": 0.3416, "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": 0.35604, "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": 0.35206, "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": 0.34167, "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": 0.34138, "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": 0.34243, "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": 0.34614, "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": 0.33927, "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": 0.34039, "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": 0.34165, "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": 0.34147, "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": 0.34676, "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": 0.33592, "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": 0.34949, "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": 0.34231, "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": 0.34033, "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": 0.33786, "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": 0.33643, "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": 0.3385, "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": 0.33905, "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": 0.34054, "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": 0.3486, "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": 0.34059, "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": 0.34197, "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": 0.33395, "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": 0.34298, "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": 0.35464, "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": 0.4285, "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": 0.34513, "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": 0.34594, "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": 0.34342, "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": 0.35219, "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": 0.34634, "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": 0.33495, "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": 0.33066, "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": 0.34279, "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": 0.34036, "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": 0.34316, "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": 0.34333, "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": 0.34798, "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": 0.34497, "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": 0.34155, "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": 0.3382, "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": 0.33903, "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": 0.33528, "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": 0.3341, "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": 0.33929, "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": 0.33563, "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": 0.34152, "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": 0.34121, "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": 0.33513, "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": 0.33747, "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": 0.33991, "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": 0.33892, "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": 0.34014, "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": 0.34123, "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": 0.34434, "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": 0.33596, "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": 0.33816, "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": 0.33474, "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": 0.34829, "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": 0.3445, "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": 0.34043, "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": 0.33393, "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": 0.33328, "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": 0.33257, "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": 0.3419, "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": 0.34599, "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": 0.33258, "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": 0.33127, "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": 0.33811, "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": 0.33907, "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": 0.33854, "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": 0.33664, "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": 0.33529, "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": 0.32939, "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": 0.34101, "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": 0.34178, "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": 0.34074, "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": 0.33396, "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": 0.33507, "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": 0.33147, "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": 0.33986, "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": 0.33756, "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": 0.33568, "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": 0.33396, "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": 0.33892, "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": 0.33588, "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": 0.33902, "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": 0.33652, "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": 0.34119, "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": 0.3399, "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": 0.338, "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": 0.33969, "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": 0.34286, "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": 0.33579, "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": 0.33908, "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": 0.33736, "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": 0.3437, "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": 0.33639, "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": 0.34159, "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": 0.33693, "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": 0.36098, "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": 0.33646, "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": 0.33584, "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": 0.33472, "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": 0.34035, "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": 0.33561, "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": 0.33934, "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": 0.32974, "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": 0.33637, "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": 0.33598, "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": 0.33059, "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": 0.33157, "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": 0.3396, "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": 0.33515, "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": 0.33962, "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": 0.34326, "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": 0.33564, "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": 0.34281, "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": 0.33788, "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": 0.33819, "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": 0.34141, "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": 0.33895, "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": 0.34267, "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": 0.34104, "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": 0.33806, "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": 0.34004, "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": 0.3388, "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": 0.33781, "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": 0.34113, "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": 0.33606, "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": 0.33626, "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": 0.33348, "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": 0.33681, "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": 0.34319, "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": 0.34625, "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": 0.33895, "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": 0.33456, "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": 0.33621, "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": 0.34058, "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": 0.33516, "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": 0.33846}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/bert/bert_release/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_release/model_config.yaml index 4c8864a..0967ee8 100644 --- a/tests/functional_tests/test_cases/bert/bert_release/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_release/model_config.yaml @@ -1,48 +1,48 @@ -ENV_VARS: - CUDA_DEVICE_MAX_CONNECTIONS: '1' - NVTE_ALLOW_NONDETERMINISTIC_ALGO: '1' - -TEST_TYPE: 'release' -MODEL_ARGS: - # Bert model args - --num-layers: 24 - --hidden-size: 1024 - --num-attention-heads: 16 - --seq-length: 512 - --max-position-embeddings: 512 - # Training args - --micro-batch-size: 4 - --global-batch-size: 32 - --train-iters: 20000 - --weight-decay: 1e-2 - --clip-grad: 1.0 - --fp16: true - --lr: 0.0001 - --lr-decay-style: linear - --min-lr: 1.0e-5 - --lr-warmup-fraction: .01 - --bert-no-binary-head: true - # Model parallel - --tensor-model-parallel-size: 8 - --pipeline-model-parallel-size: 8 - # Data args - --data-path: ${DATA_BLEND} - --vocab-file: ${DATA_PATH}/vocab.txt - --split: 949,50,1 - --data-cache-path: ${DATA_CACHE_PATH} - # EVAL_AND_LOGGING_ARGS - --log-interval: 100 - --save-interval: 2000 - --eval-interval: 1000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} - --eval-iters: 10 - --tensorboard-dir: ${TENSORBOARD_PATH} - --log-timers-to-tensorboard: true - --log-memory-to-tensorboard: true - --log-num-zeros-in-grad: true - --log-params-norm: true - --log-validation-ppl-to-tensorboard: true - --wandb-project: megatron-core-release-runs - --wandb-exp-name: ${WANDB_EXPERIMENT} +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: '1' + NVTE_ALLOW_NONDETERMINISTIC_ALGO: '1' + +TEST_TYPE: 'release' +MODEL_ARGS: + # Bert model args + --num-layers: 24 + --hidden-size: 1024 + --num-attention-heads: 16 + --seq-length: 512 + --max-position-embeddings: 512 + # Training args + --micro-batch-size: 4 + --global-batch-size: 32 + --train-iters: 20000 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --fp16: true + --lr: 0.0001 + --lr-decay-style: linear + --min-lr: 1.0e-5 + --lr-warmup-fraction: .01 + --bert-no-binary-head: true + # Model parallel + --tensor-model-parallel-size: 8 + --pipeline-model-parallel-size: 8 + # Data args + --data-path: ${DATA_BLEND} + --vocab-file: ${DATA_PATH}/vocab.txt + --split: 949,50,1 + --data-cache-path: ${DATA_CACHE_PATH} + # EVAL_AND_LOGGING_ARGS + --log-interval: 100 + --save-interval: 2000 + --eval-interval: 1000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --eval-iters: 10 + --tensorboard-dir: ${TENSORBOARD_PATH} + --log-timers-to-tensorboard: true + --log-memory-to-tensorboard: true + --log-num-zeros-in-grad: true + --log-params-norm: true + --log-validation-ppl-to-tensorboard: true + --wandb-project: megatron-core-release-runs + --wandb-exp-name: ${WANDB_EXPERIMENT} --attention-backend: unfused \ No newline at end of file diff --git a/tests/functional_tests/test_cases/common/ckpt_converter/__main__.py b/tests/functional_tests/test_cases/common/ckpt_converter/__main__.py index ac5482b..05270d4 100644 --- a/tests/functional_tests/test_cases/common/ckpt_converter/__main__.py +++ b/tests/functional_tests/test_cases/common/ckpt_converter/__main__.py @@ -1,630 +1,630 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -import os -import shutil -import subprocess -import sys -import time -import types -import typing as T -from collections import namedtuple - -import numpy as np -import torch - -from megatron.core import parallel_state -from megatron.core.datasets.gpt_dataset import _get_ltor_masks_and_position_ids -from megatron.core.enums import ModelType -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec -from megatron.core.pipeline_parallel import get_forward_backward_func -from megatron.core.tensor_parallel.mappings import gather_from_tensor_model_parallel_region -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.utils import get_attr_wrapped_model -from megatron.training import get_args, get_tokenizer -from megatron.training.arguments import parse_args, validate_args -from megatron.training.checkpointing import load_checkpoint as _load_checkpoint -from megatron.training.checkpointing import save_checkpoint as _save_checkpoint -from megatron.training.global_vars import set_global_variables, unset_global_variables -from megatron.training.training import get_model -from pretrain_gpt import model_provider -from tests.unit_tests.test_utilities import Utils - -CHECKPOINTS_DIR = "/tmp/ckpt-converter-tests" -FORWARD_ITERS = 1 # *3 -SKIP_CONVERSION = False - - -class TempSharedDir: - """Context that makes & removes a directory to hold the checkpoints.""" - - def __enter__(self): - """Make checkpoint directory.""" - torch.distributed.barrier() - if torch.distributed.get_rank() == 0: - shutil.rmtree(CHECKPOINTS_DIR, ignore_errors=True) - os.mkdir(CHECKPOINTS_DIR) - torch.distributed.barrier() - - def __exit__(self, exc_type, exc_value, exc_tb): - """Remove checkpoint directory.""" - torch.distributed.barrier() - if torch.distributed.get_rank() == 0: - shutil.rmtree(CHECKPOINTS_DIR, ignore_errors=True) - torch.distributed.barrier() - - -_ModelParallelState = namedtuple("_ModelParallelState", "tp pp ep") - - -class ModelParallelState(_ModelParallelState): - """Parallel state struct, that contains TP, PP, and EP.""" - - def __new__(cls, tp=1, pp=1, ep=1): - return super(ModelParallelState, cls).__new__(cls, tp, pp, ep) - - -class ModelMeta: - """Basic information about a model. - - Args: - format (str): 'mcore', 'megatron', 'meta', or 'hf'. - mp (ModelParallelState): Defines TP, PP, EP. - transformer_impl (str): 'transformer_engine' or 'local'. - """ - - def __init__(self, format: str, mp: ModelParallelState, transformer_impl: str = None): - - if isinstance(mp, tuple): - mp = ModelParallelState(*mp) - if transformer_impl is None: - transformer_impl = "transformer_engine" if format == "mcore" else "local" - - assert format in ("mcore", "megatron", "meta", "hf") - assert isinstance(mp, ModelParallelState) - assert transformer_impl in ("transformer_engine", "local") - - self.format = format - self.mp = mp - self.transformer_impl = transformer_impl - - -class Pipeline: - """A pipeline manages a single conversion and validation. - - The pipeline consists of the following steps: - - Initialize model & inference pass. - - Save model. - - Convert model. - - Load model & inference pass. - - Validate before/after output tensors. - - Args: - src (ModelMeta): Model meta for loading. - dst (ModelMeta): Model meta for storing. - """ - - def __init__(self, src: ModelMeta, dst: ModelMeta): - """Source & destination metas.""" - assert isinstance(src, ModelMeta) - assert isinstance(dst, ModelMeta) - self.src = src - self.dst = dst - - def get_model_argv(self): - """Get argv list for customizing initialization.""" - raise NotImplementedError(self.__class__.__name__ + ".get_model_argv()") - - def get_converter_model_type(self): - """Get converter type: 'GPT' or 'Bert'.""" - raise NotImplementedError(self.__class__.__name__ + ".get_converter_model_type()") - - def get_meta(self, key): - """Get meta from key, which must be either 'src' or 'dst'.""" - assert key in ("src", "dst") - return getattr(self, f"{key}") - - def init_args_and_model(self, key): - """Initialize Megatron and build model.""" - - meta = self.get_meta(key) - - # Destroy & initialize new parallel state. - unset_global_variables() - Utils.destroy_model_parallel() - Utils.initialize_model_parallel( - tensor_model_parallel_size=meta.mp.tp, - pipeline_model_parallel_size=meta.mp.pp, - expert_model_parallel_size=meta.mp.ep, - ) - - # Environment vars. - os.environ["CUDA_DEVICE_MAX_CONNECTIONS"] = "1" - os.environ["NVTE_ALLOW_NONDETERMINISTIC_ALGO"] = "0" - - # Command line args. - sys.argv = [ - "[script]", - *self.get_model_argv(), - "--tensor-model-parallel-size", - str(meta.mp.tp), - "--pipeline-model-parallel-size", - str(meta.mp.pp), - "--expert-model-parallel-size", - str(meta.mp.ep), - "--save-interval", - "2", - "--save", - os.path.join(CHECKPOINTS_DIR, "src"), - "--load", - os.path.join(CHECKPOINTS_DIR, "dst" if not SKIP_CONVERSION else "src"), - "--ckpt-format", - "torch", - "--use-checkpoint-args", - "--no-save-optim", - "--no-save-rng", - "--no-load-optim", - "--no-load-rng", - "--bf16", - "--use-cpu-initialization", - "--no-one-logger", - "--transformer-impl", - meta.transformer_impl, - ] - - # Fail on missing checkpoint. - if key == "dst": - sys.argv.append("--exit-on-missing-checkpoint") - - # Use legacy. - if meta.format == "megatron": - sys.argv.append("--use-legacy-models") - - # Parse args. - args = parse_args() - validate_args(args) - - # Set global args, build tokenizer. - unset_global_variables() - set_global_variables(args) - - # Random seed. - torch.manual_seed(123) - model_parallel_cuda_manual_seed(123) - - # Model. - models = get_model( - model_provider_func=model_provider, model_type=ModelType.encoder_or_decoder - ) - [m.eval() for m in models] - - return args, models - - @classmethod - def is_model_parallel_rank_0(cls): - return ( - parallel_state.get_tensor_model_parallel_rank() == 0 - and parallel_state.get_pipeline_model_parallel_rank() == 0 - ) - - @classmethod - def get_input_ids(cls): - """Randomly initialize input token IDs.""" - if cls.is_model_parallel_rank_0(): - # Generate different data on each DP rank. - args = get_args() - - orig_numpy_seed = np.random.get_state()[1][0] - temp_numpy_seed = orig_numpy_seed + torch.distributed.get_rank() - - np.random.seed(temp_numpy_seed) - numpy_input_ids = np.random.randint( - low=0, high=args.vocab_size, size=(args.seq_length,), dtype=np.int64 - ) - np.random.seed(orig_numpy_seed) - - torch_input_ids = torch.from_numpy(numpy_input_ids).to("cuda") - - return torch_input_ids - else: - return None - - @classmethod - def _broadcast(cls, item): - """Broadcast data from TP rank 0 to other ranks.""" - if item is not None: - torch.distributed.broadcast( - item, - parallel_state.get_tensor_model_parallel_src_rank(), - group=parallel_state.get_tensor_model_parallel_group(), - ) - - @classmethod - def get_batch(cls, input_ids): - """Get batch of data, from input token IDs.""" - - args = get_args() - - # TP rank 0, PP rank 0. - # (Note: mimics megatron/training/utils.py:get_batch_on_this_tp_rank().) - if cls.is_model_parallel_rank_0(): - - tokenizer = get_tokenizer() - - attention_mask, loss_mask, position_ids = _get_ltor_masks_and_position_ids( - data=input_ids, - eod_token=tokenizer.eod, - reset_position_ids=args.reset_position_ids, - reset_attention_mask=args.reset_attention_mask, - eod_mask_loss=args.eod_mask_loss, - create_attention_mask=args.create_attention_mask_in_dataloader, - ) - input_ids = input_ids.unsqueeze(0) - position_ids = position_ids.unsqueeze(0) - attention_mask = attention_mask.unsqueeze(0) - - # Other TP ranks on PP rank 0. - elif parallel_state.is_pipeline_first_stage(): - input_ids = torch.empty( - (args.micro_batch_size, args.seq_length), - dtype=torch.int64, - device=torch.cuda.current_device(), - ) - position_ids = torch.empty( - (args.micro_batch_size, args.seq_length), - dtype=torch.int64, - device=torch.cuda.current_device(), - ) - if args.create_attention_mask_in_dataloader: - attention_mask = torch.empty( - (args.micro_batch_size, 1, args.seq_length, args.seq_length), - dtype=torch.bool, - device=torch.cuda.current_device(), - ) - else: - attention_mask = None - - # Other PP ranks. - # (Note: mimics pretrain_gpt.py:get_batch().) - else: - input_ids = None - position_ids = None - attention_mask = None - - # Broadcast. - if parallel_state.is_pipeline_first_stage(): - cls._broadcast(input_ids) - cls._broadcast(attention_mask) - cls._broadcast(position_ids) - - return input_ids, position_ids, attention_mask - - @classmethod - def forward_step(cls, orig_input_ids: T.Iterator, model: torch.nn.Module): - """Forward step. - - Args: - orig_input_ids (T.Iterator): Input token IDs. - model (GPTModel): The GPT Model. - """ - - # Unpack input ids. - orig_input_ids = list(orig_input_ids)[0] - - # Get batch. - input_ids, position_ids, attention_mask = cls.get_batch(orig_input_ids) - - # Forward pass test data (multi iters for JIT warm-up). - for _ in range(FORWARD_ITERS): - output_tensor = model(input_ids, position_ids, attention_mask) - - # Aggregate data, for validation. - data = { - "orig_input_ids": orig_input_ids, - "input_ids": input_ids, - "position_ids": position_ids, - "attention_mask": attention_mask, - "output_tensor": output_tensor, - } - - return output_tensor, lambda _, non_loss_data: data - - @classmethod - def forward_model(cls, models, orig_input_ids): - """Forward pass data, and gather parallel output tensors.""" - - args = get_args() - - # Forward pass. - forward_backward_func = get_forward_backward_func() - data = forward_backward_func( - forward_step_func=cls.forward_step, - data_iterator=iter([orig_input_ids]), - model=models, - num_microbatches=1, - seq_length=args.seq_length, - micro_batch_size=args.micro_batch_size, - forward_only=True, - collect_non_loss_data=True, - ) - if parallel_state.is_pipeline_last_stage(): - output_tensor = data[0]["output_tensor"] - else: - output_tensor = None - - # All-gather across the partitions. - if parallel_state.is_pipeline_last_stage(): - output_tensor_gathered = gather_from_tensor_model_parallel_region(output_tensor) - else: - output_tensor_gathered = None - - return output_tensor_gathered - - def rand_init_model_params(self, key, models): - """Randomly initialize model params.""" - - meta = self.get_meta(key) - - with torch.no_grad(): - - # Randomly initialize all params. - for m in models: - for p in m.parameters(): - p.normal_(0, 0.1) - - # Synchronize embeddings. - if meta.mp.pp != 1 and parallel_state.is_rank_in_embedding_group(): - if parallel_state.is_pipeline_first_stage(): - emb = models[0].module.module.shared_embedding_or_output_weight() - elif parallel_state.is_pipeline_last_stage(): - emb = models[-1].module.module.shared_embedding_or_output_weight() - else: - raise Exception("should be either first/last pipeline rank.") - torch.distributed.all_reduce(emb, group=parallel_state.get_embedding_group()) - - def save_checkpoint(self): - """Initialize params, forward pass data, and save checkpoint.""" - - args, models = self.init_args_and_model("src") - - # Init params. - self.rand_init_model_params("src", models) - - # Test data. - orig_input_ids = self.get_input_ids() - output_tensor = self.forward_model(models, orig_input_ids) - - # Save checkpoint. - _save_checkpoint( - iteration=2, - model=models, - optimizer=None, - opt_param_scheduler=None, - num_floating_point_operations_so_far=None, - ) - - return output_tensor, orig_input_ids - - def load_checkpoint(self, orig_input_ids): - """Load checkpoint, and forward pass data.""" - - args, models = self.init_args_and_model("dst") - - # Load checkpoint. - args.iteration, args.num_floating_point_operations_so_far = _load_checkpoint( - models, optimizer=None, opt_param_scheduler=None - ) - - # Test data. - output_tensor_real = self.forward_model(models, orig_input_ids) - - # Random output tensor. - # Note: need two random initializations to differ from `save_checkpoint()` above. - self.rand_init_model_params("dst", models) - self.rand_init_model_params("dst", models) - output_tensor_fake = self.forward_model(models, orig_input_ids) - - return output_tensor_real, output_tensor_fake - - def convert_checkpoint(self): - """Convert checkpoint""" - - args = get_args() - - torch.distributed.barrier() - - # Convert. - if torch.distributed.get_rank() == 0: - - cmd = [ - "python", - "tools/checkpoint/convert.py", - "--model-type", - self.get_converter_model_type(), - "--loader", - self.src.format, - "--load-dir", - args.save, - "--loader-transformer-impl", - self.src.transformer_impl, - "--saver", - self.dst.format, - "--save-dir", - args.load, - "--saver-transformer-impl", - self.dst.transformer_impl, - "--target-tensor-parallel-size", - str(self.dst.mp.tp), - "--target-pipeline-parallel-size", - str(self.dst.mp.pp), - "--megatron-path", - os.getcwd(), - ] - print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") - print("convert checkpoint cmd: %s" % " ".join(cmd)) - print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") - - result = subprocess.run(cmd) - - assert result.returncode == 0, "checkpoint conversion failed." - - torch.distributed.barrier() - - def run(self): - """Run pipeline. - - Running a pipeline consists of: - - - Save checkpoint (includes initializing params & forward passing data). - - Convert checkpoint. - - Load checkpoint (includes forward passing data). - - Validate before/after output tensors. - """ - - Utils.initialize_model_parallel( - tensor_model_parallel_size=self.src.mp.tp, - pipeline_model_parallel_size=self.src.mp.pp, - expert_model_parallel_size=self.src.mp.ep, - ) - with TempSharedDir(): - - # Save checkpoint. - src_output_tensor, input_ids = self.save_checkpoint() - - # Convert checkpoint. - if not SKIP_CONVERSION: - self.convert_checkpoint() - - # Load checkpoint. - dst_output_tensor_real, dst_output_tensor_fake = self.load_checkpoint(input_ids) - - # Validate output tensor. - torch.distributed.barrier() - rank = torch.distributed.get_rank() - world_size = torch.distributed.get_world_size() - if rank == world_size - 1: - args = get_args() - get_mse = lambda dst_output_tensor: torch.nn.MSELoss()( - src_output_tensor[:, :, : args.vocab_size], - dst_output_tensor[:, :, : args.vocab_size], - ).item() - mse_real = get_mse(dst_output_tensor_real) - mse_fake = get_mse(dst_output_tensor_fake) - assert mse_real < 0.01 * mse_fake, "mse_real (%e) >= 0.01 mse_fake (%e)." % ( - mse_real, - mse_fake, - ) - torch.distributed.barrier() - - # Teardown. - unset_global_variables() - Utils.destroy_model_parallel() - - # Broadcast MSE's. - mses = torch.zeros((2,), dtype=torch.float, device="cuda") - if rank == world_size - 1: - mses[0] = mse_real - mses[1] = mse_fake - torch.distributed.broadcast(mses, world_size - 1) - - return mses.tolist() - - -class GPTPipeline(Pipeline): - """GPT-specific pipeline customizations. - - Args: - src (Union[ModelMeta, Tuple]): Model meta for loading. - dst (Union[ModelMeta, Tuple]): Model meta for storing. - num_moe_experts (Optional[int]): Number of MoE experts. - """ - - def __init__(self, src: ModelMeta, dst: ModelMeta, num_moe_experts: T.Optional[int] = None): - super().__init__(ModelMeta(*src), ModelMeta(*dst)) - assert isinstance(num_moe_experts, (int, types.NoneType)) - self.num_moe_experts = num_moe_experts - - def get_model_argv(self): - """GPT model args.""" - args = [ - "--num-layers", - "8", - "--hidden-size", - "16", - "--num-attention-heads", - "8", - "--seq-length", - "16", - "--max-position-embeddings", - "16", - "--micro-batch-size", - "1", # single sample generated. - "--tokenizer-type", - "NullTokenizer", - "--vocab-size", - "127", # ... NullTokenizer adds +1 EOD token. - "--make-vocab-size-divisible-by", - "1", - ] - if self.num_moe_experts is not None and self.num_moe_experts > 1: - args.extend(["--num-experts", str(self.num_moe_experts or 1), "--sequence-parallel"]) - return args - - def get_converter_model_type(self): - return "GPT" - - -def get_gpt_pipelines(): - """Get GPT (non-MoE) pipelines.""" - return [ - GPTPipeline(("mcore", (8, 1)), ("mcore", (1, 8))), - GPTPipeline(("mcore", (4, 2)), ("mcore", (2, 4))), - GPTPipeline(("mcore", (2, 4)), ("mcore", (4, 2))), - GPTPipeline(("mcore", (1, 8)), ("mcore", (8, 1))), - GPTPipeline(("mcore", (4, 2)), ("mcore", (2, 4), "local")), - GPTPipeline(("megatron", (4, 2)), ("mcore", (2, 4))), - GPTPipeline(("mcore", (4, 2), "local"), ("mcore", (2, 4), "local")), - GPTPipeline(("mcore", (4, 2), "local"), ("mcore", (2, 4))), - # [todo] GPTPipeline(("megatron", (4, 2)), ("megatron", (2, 4))), - # [todo] GPTPipeline(("megatron", (4, 2), "te"), ("megatron", (2, 4), "te")), - # [todo] GPTPipeline("meta", "mcore", None, (8, 1)), - # [todo] GPTPipeline("hf", "mcore", None, (8, 1)), - ] - - -def get_moe_pipelines(): - """Get MoE pipelines.""" - return [ - GPTPipeline(("mcore", (2, 1, 2)), ("mcore", (1, 4, 1)), num_moe_experts=8), - GPTPipeline(("mcore", (1, 4, 1)), ("mcore", (2, 1, 2)), num_moe_experts=4), - ] - - -def test_all_pipelines(): - """Run all pipelines.""" - - # Collect pipelines. - pipelines = [ - *get_gpt_pipelines(), - # [todo] *get_moe_pipelines(), # todo: MoE support in loader_mcore.py. - # [todo] *get_bert_pipelines(), - # [todo] *get_t5_pipelines(), - ] - - # Run pipelines. - results = [] - for pipeline in pipelines: - t = time.time() - mses = pipeline.run() - elapsed_time = time.time() - t - results.append((elapsed_time, *mses)) - - # Print results. - if int(os.environ["RANK"]) == 0: - print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") - print("checkpoint converter results:") - [print(" t %.1f sec ... mse %.1e, %.1e." % (t, r, f)) for t, r, f in results] - print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") - - -if __name__ == "__main__": - test_all_pipelines() +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import os +import shutil +import subprocess +import sys +import time +import types +import typing as T +from collections import namedtuple + +import numpy as np +import torch + +from megatron.core import parallel_state +from megatron.core.datasets.gpt_dataset import _get_ltor_masks_and_position_ids +from megatron.core.enums import ModelType +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec +from megatron.core.pipeline_parallel import get_forward_backward_func +from megatron.core.tensor_parallel.mappings import gather_from_tensor_model_parallel_region +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.utils import get_attr_wrapped_model +from megatron.training import get_args, get_tokenizer +from megatron.training.arguments import parse_args, validate_args +from megatron.training.checkpointing import load_checkpoint as _load_checkpoint +from megatron.training.checkpointing import save_checkpoint as _save_checkpoint +from megatron.training.global_vars import set_global_variables, unset_global_variables +from megatron.training.training import get_model +from pretrain_gpt import model_provider +from tests.unit_tests.test_utilities import Utils + +CHECKPOINTS_DIR = "/tmp/ckpt-converter-tests" +FORWARD_ITERS = 1 # *3 +SKIP_CONVERSION = False + + +class TempSharedDir: + """Context that makes & removes a directory to hold the checkpoints.""" + + def __enter__(self): + """Make checkpoint directory.""" + torch.distributed.barrier() + if torch.distributed.get_rank() == 0: + shutil.rmtree(CHECKPOINTS_DIR, ignore_errors=True) + os.mkdir(CHECKPOINTS_DIR) + torch.distributed.barrier() + + def __exit__(self, exc_type, exc_value, exc_tb): + """Remove checkpoint directory.""" + torch.distributed.barrier() + if torch.distributed.get_rank() == 0: + shutil.rmtree(CHECKPOINTS_DIR, ignore_errors=True) + torch.distributed.barrier() + + +_ModelParallelState = namedtuple("_ModelParallelState", "tp pp ep") + + +class ModelParallelState(_ModelParallelState): + """Parallel state struct, that contains TP, PP, and EP.""" + + def __new__(cls, tp=1, pp=1, ep=1): + return super(ModelParallelState, cls).__new__(cls, tp, pp, ep) + + +class ModelMeta: + """Basic information about a model. + + Args: + format (str): 'core', 'legacy', 'meta', or 'hf'. + mp (ModelParallelState): Defines TP, PP, EP. + transformer_impl (str): 'transformer_engine' or 'local'. + """ + + def __init__(self, format: str, mp: ModelParallelState, transformer_impl: str = None): + + if isinstance(mp, tuple): + mp = ModelParallelState(*mp) + if transformer_impl is None: + transformer_impl = "transformer_engine" if format == "core" else "local" + + assert format in ("core", "legacy", "meta", "hf") + assert isinstance(mp, ModelParallelState) + assert transformer_impl in ("transformer_engine", "local") + + self.format = format + self.mp = mp + self.transformer_impl = transformer_impl + + +class Pipeline: + """A pipeline manages a single conversion and validation. + + The pipeline consists of the following steps: + - Initialize model & inference pass. + - Save model. + - Convert model. + - Load model & inference pass. + - Validate before/after output tensors. + + Args: + src (ModelMeta): Model meta for loading. + dst (ModelMeta): Model meta for storing. + """ + + def __init__(self, src: ModelMeta, dst: ModelMeta): + """Source & destination metas.""" + assert isinstance(src, ModelMeta) + assert isinstance(dst, ModelMeta) + self.src = src + self.dst = dst + + def get_model_argv(self): + """Get argv list for customizing initialization.""" + raise NotImplementedError(self.__class__.__name__ + ".get_model_argv()") + + def get_converter_model_type(self): + """Get converter type: 'GPT' or 'Bert'.""" + raise NotImplementedError(self.__class__.__name__ + ".get_converter_model_type()") + + def get_meta(self, key): + """Get meta from key, which must be either 'src' or 'dst'.""" + assert key in ("src", "dst") + return getattr(self, f"{key}") + + def init_args_and_model(self, key): + """Initialize Megatron and build model.""" + + meta = self.get_meta(key) + + # Destroy & initialize new parallel state. + unset_global_variables() + Utils.destroy_model_parallel() + Utils.initialize_model_parallel( + tensor_model_parallel_size=meta.mp.tp, + pipeline_model_parallel_size=meta.mp.pp, + expert_model_parallel_size=meta.mp.ep, + ) + + # Environment vars. + os.environ["CUDA_DEVICE_MAX_CONNECTIONS"] = "1" + os.environ["NVTE_ALLOW_NONDETERMINISTIC_ALGO"] = "0" + + # Command line args. + sys.argv = [ + "[script]", + *self.get_model_argv(), + "--tensor-model-parallel-size", + str(meta.mp.tp), + "--pipeline-model-parallel-size", + str(meta.mp.pp), + "--expert-model-parallel-size", + str(meta.mp.ep), + "--save-interval", + "2", + "--save", + os.path.join(CHECKPOINTS_DIR, "src"), + "--load", + os.path.join(CHECKPOINTS_DIR, "dst" if not SKIP_CONVERSION else "src"), + "--ckpt-format", + "torch", + "--use-checkpoint-args", + "--no-save-optim", + "--no-save-rng", + "--no-load-optim", + "--no-load-rng", + "--bf16", + "--use-cpu-initialization", + "--no-one-logger", + "--transformer-impl", + meta.transformer_impl, + ] + + # Fail on missing checkpoint. + if key == "dst": + sys.argv.append("--exit-on-missing-checkpoint") + + # Use legacy. + if meta.format == "legacy": + sys.argv.append("--use-legacy-models") + + # Parse args. + args = parse_args() + validate_args(args) + + # Set global args, build tokenizer. + unset_global_variables() + set_global_variables(args) + + # Random seed. + torch.manual_seed(123) + model_parallel_cuda_manual_seed(123) + + # Model. + models = get_model( + model_provider_func=model_provider, model_type=ModelType.encoder_or_decoder + ) + [m.eval() for m in models] + + return args, models + + @classmethod + def is_model_parallel_rank_0(cls): + return ( + parallel_state.get_tensor_model_parallel_rank() == 0 + and parallel_state.get_pipeline_model_parallel_rank() == 0 + ) + + @classmethod + def get_input_ids(cls): + """Randomly initialize input token IDs.""" + if cls.is_model_parallel_rank_0(): + # Generate different data on each DP rank. + args = get_args() + + orig_numpy_seed = np.random.get_state()[1][0] + temp_numpy_seed = orig_numpy_seed + torch.distributed.get_rank() + + np.random.seed(temp_numpy_seed) + numpy_input_ids = np.random.randint( + low=0, high=args.vocab_size, size=(args.seq_length,), dtype=np.int64 + ) + np.random.seed(orig_numpy_seed) + + torch_input_ids = torch.from_numpy(numpy_input_ids).to("cuda") + + return torch_input_ids + else: + return None + + @classmethod + def _broadcast(cls, item): + """Broadcast data from TP rank 0 to other ranks.""" + if item is not None: + torch.distributed.broadcast( + item, + parallel_state.get_tensor_model_parallel_src_rank(), + group=parallel_state.get_tensor_model_parallel_group(), + ) + + @classmethod + def get_batch(cls, input_ids): + """Get batch of data, from input token IDs.""" + + args = get_args() + + # TP rank 0, PP rank 0. + # (Note: mimics megatron/training/utils.py:get_batch_on_this_tp_rank().) + if cls.is_model_parallel_rank_0(): + + tokenizer = get_tokenizer() + + attention_mask, loss_mask, position_ids = _get_ltor_masks_and_position_ids( + data=input_ids, + eod_token=tokenizer.eod, + reset_position_ids=args.reset_position_ids, + reset_attention_mask=args.reset_attention_mask, + eod_mask_loss=args.eod_mask_loss, + create_attention_mask=args.create_attention_mask_in_dataloader, + ) + input_ids = input_ids.unsqueeze(0) + position_ids = position_ids.unsqueeze(0) + attention_mask = attention_mask.unsqueeze(0) + + # Other TP ranks on PP rank 0. + elif parallel_state.is_pipeline_first_stage(): + input_ids = torch.empty( + (args.micro_batch_size, args.seq_length), + dtype=torch.int64, + device=torch.cuda.current_device(), + ) + position_ids = torch.empty( + (args.micro_batch_size, args.seq_length), + dtype=torch.int64, + device=torch.cuda.current_device(), + ) + if args.create_attention_mask_in_dataloader: + attention_mask = torch.empty( + (args.micro_batch_size, 1, args.seq_length, args.seq_length), + dtype=torch.bool, + device=torch.cuda.current_device(), + ) + else: + attention_mask = None + + # Other PP ranks. + # (Note: mimics pretrain_gpt.py:get_batch().) + else: + input_ids = None + position_ids = None + attention_mask = None + + # Broadcast. + if parallel_state.is_pipeline_first_stage(): + cls._broadcast(input_ids) + cls._broadcast(attention_mask) + cls._broadcast(position_ids) + + return input_ids, position_ids, attention_mask + + @classmethod + def forward_step(cls, orig_input_ids: T.Iterator, model: torch.nn.Module): + """Forward step. + + Args: + orig_input_ids (T.Iterator): Input token IDs. + model (GPTModel): The GPT Model. + """ + + # Unpack input ids. + orig_input_ids = list(orig_input_ids)[0] + + # Get batch. + input_ids, position_ids, attention_mask = cls.get_batch(orig_input_ids) + + # Forward pass test data (multi iters for JIT warm-up). + for _ in range(FORWARD_ITERS): + output_tensor = model(input_ids, position_ids, attention_mask) + + # Aggregate data, for validation. + data = { + "orig_input_ids": orig_input_ids, + "input_ids": input_ids, + "position_ids": position_ids, + "attention_mask": attention_mask, + "output_tensor": output_tensor, + } + + return output_tensor, lambda _, non_loss_data: data + + @classmethod + def forward_model(cls, models, orig_input_ids): + """Forward pass data, and gather parallel output tensors.""" + + args = get_args() + + # Forward pass. + forward_backward_func = get_forward_backward_func() + data = forward_backward_func( + forward_step_func=cls.forward_step, + data_iterator=iter([orig_input_ids]), + model=models, + num_microbatches=1, + seq_length=args.seq_length, + micro_batch_size=args.micro_batch_size, + forward_only=True, + collect_non_loss_data=True, + ) + if parallel_state.is_pipeline_last_stage(): + output_tensor = data[0]["output_tensor"] + else: + output_tensor = None + + # All-gather across the partitions. + if parallel_state.is_pipeline_last_stage(): + output_tensor_gathered = gather_from_tensor_model_parallel_region(output_tensor) + else: + output_tensor_gathered = None + + return output_tensor_gathered + + def rand_init_model_params(self, key, models): + """Randomly initialize model params.""" + + meta = self.get_meta(key) + + with torch.no_grad(): + + # Randomly initialize all params. + for m in models: + for p in m.parameters(): + p.normal_(0, 0.1) + + # Synchronize embeddings. + if meta.mp.pp != 1 and parallel_state.is_rank_in_embedding_group(): + if parallel_state.is_pipeline_first_stage(): + emb = models[0].module.module.shared_embedding_or_output_weight() + elif parallel_state.is_pipeline_last_stage(): + emb = models[-1].module.module.shared_embedding_or_output_weight() + else: + raise Exception("should be either first/last pipeline rank.") + torch.distributed.all_reduce(emb, group=parallel_state.get_embedding_group()) + + def save_checkpoint(self): + """Initialize params, forward pass data, and save checkpoint.""" + + args, models = self.init_args_and_model("src") + + # Init params. + self.rand_init_model_params("src", models) + + # Test data. + orig_input_ids = self.get_input_ids() + output_tensor = self.forward_model(models, orig_input_ids) + + # Save checkpoint. + _save_checkpoint( + iteration=2, + model=models, + optimizer=None, + opt_param_scheduler=None, + num_floating_point_operations_so_far=None, + ) + + return output_tensor, orig_input_ids + + def load_checkpoint(self, orig_input_ids): + """Load checkpoint, and forward pass data.""" + + args, models = self.init_args_and_model("dst") + + # Load checkpoint. + args.iteration, args.num_floating_point_operations_so_far = _load_checkpoint( + models, optimizer=None, opt_param_scheduler=None + ) + + # Test data. + output_tensor_real = self.forward_model(models, orig_input_ids) + + # Random output tensor. + # Note: need two random initializations to differ from `save_checkpoint()` above. + self.rand_init_model_params("dst", models) + self.rand_init_model_params("dst", models) + output_tensor_fake = self.forward_model(models, orig_input_ids) + + return output_tensor_real, output_tensor_fake + + def convert_checkpoint(self): + """Convert checkpoint""" + + args = get_args() + + torch.distributed.barrier() + + # Convert. + if torch.distributed.get_rank() == 0: + + cmd = [ + "python", + "tools/checkpoint/convert.py", + "--model-type", + self.get_converter_model_type(), + "--loader", + self.src.format, + "--load-dir", + args.save, + "--loader-transformer-impl", + self.src.transformer_impl, + "--saver", + self.dst.format, + "--save-dir", + args.load, + "--saver-transformer-impl", + self.dst.transformer_impl, + "--target-tensor-parallel-size", + str(self.dst.mp.tp), + "--target-pipeline-parallel-size", + str(self.dst.mp.pp), + "--megatron-path", + os.getcwd(), + ] + print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + print("convert checkpoint cmd: %s" % " ".join(cmd)) + print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + + result = subprocess.run(cmd) + + assert result.returncode == 0, "checkpoint conversion failed." + + torch.distributed.barrier() + + def run(self): + """Run pipeline. + + Running a pipeline consists of: + + - Save checkpoint (includes initializing params & forward passing data). + - Convert checkpoint. + - Load checkpoint (includes forward passing data). + - Validate before/after output tensors. + """ + + Utils.initialize_model_parallel( + tensor_model_parallel_size=self.src.mp.tp, + pipeline_model_parallel_size=self.src.mp.pp, + expert_model_parallel_size=self.src.mp.ep, + ) + with TempSharedDir(): + + # Save checkpoint. + src_output_tensor, input_ids = self.save_checkpoint() + + # Convert checkpoint. + if not SKIP_CONVERSION: + self.convert_checkpoint() + + # Load checkpoint. + dst_output_tensor_real, dst_output_tensor_fake = self.load_checkpoint(input_ids) + + # Validate output tensor. + torch.distributed.barrier() + rank = torch.distributed.get_rank() + world_size = torch.distributed.get_world_size() + if rank == world_size - 1: + args = get_args() + get_mse = lambda dst_output_tensor: torch.nn.MSELoss()( + src_output_tensor[:, :, : args.vocab_size], + dst_output_tensor[:, :, : args.vocab_size], + ).item() + mse_real = get_mse(dst_output_tensor_real) + mse_fake = get_mse(dst_output_tensor_fake) + assert mse_real < 0.01 * mse_fake, "mse_real (%e) >= 0.01 mse_fake (%e)." % ( + mse_real, + mse_fake, + ) + torch.distributed.barrier() + + # Teardown. + unset_global_variables() + Utils.destroy_model_parallel() + + # Broadcast MSE's. + mses = torch.zeros((2,), dtype=torch.float, device="cuda") + if rank == world_size - 1: + mses[0] = mse_real + mses[1] = mse_fake + torch.distributed.broadcast(mses, world_size - 1) + + return mses.tolist() + + +class GPTPipeline(Pipeline): + """GPT-specific pipeline customizations. + + Args: + src (Union[ModelMeta, Tuple]): Model meta for loading. + dst (Union[ModelMeta, Tuple]): Model meta for storing. + num_moe_experts (Optional[int]): Number of MoE experts. + """ + + def __init__(self, src: ModelMeta, dst: ModelMeta, num_moe_experts: T.Optional[int] = None): + super().__init__(ModelMeta(*src), ModelMeta(*dst)) + assert isinstance(num_moe_experts, (int, types.NoneType)) + self.num_moe_experts = num_moe_experts + + def get_model_argv(self): + """GPT model args.""" + args = [ + "--num-layers", + "8", + "--hidden-size", + "16", + "--num-attention-heads", + "8", + "--seq-length", + "16", + "--max-position-embeddings", + "16", + "--micro-batch-size", + "1", # single sample generated. + "--tokenizer-type", + "NullTokenizer", + "--vocab-size", + "127", # ... NullTokenizer adds +1 EOD token. + "--make-vocab-size-divisible-by", + "1", + ] + if self.num_moe_experts is not None and self.num_moe_experts > 1: + args.extend(["--num-experts", str(self.num_moe_experts or 1), "--sequence-parallel"]) + return args + + def get_converter_model_type(self): + return "GPT" + + +def get_gpt_pipelines(): + """Get GPT (non-MoE) pipelines.""" + return [ + GPTPipeline(("core", (8, 1)), ("core", (1, 8))), + GPTPipeline(("core", (4, 2)), ("core", (2, 4))), + GPTPipeline(("core", (2, 4)), ("core", (4, 2))), + GPTPipeline(("core", (1, 8)), ("core", (8, 1))), + GPTPipeline(("core", (4, 2)), ("core", (2, 4), "local")), + GPTPipeline(("legacy", (4, 2)), ("core", (2, 4))), + GPTPipeline(("core", (4, 2), "local"), ("core", (2, 4), "local")), + GPTPipeline(("core", (4, 2), "local"), ("core", (2, 4))), + # [todo] GPTPipeline(("legacy", (4, 2)), ("legacy", (2, 4))), + # [todo] GPTPipeline(("legacy", (4, 2), "te"), ("legacy", (2, 4), "te")), + # [todo] GPTPipeline("meta", "core", None, (8, 1)), + # [todo] GPTPipeline("hf", "core", None, (8, 1)), + ] + + +def get_moe_pipelines(): + """Get MoE pipelines.""" + return [ + GPTPipeline(("core", (2, 1, 2)), ("core", (1, 4, 1)), num_moe_experts=8), + GPTPipeline(("core", (1, 4, 1)), ("core", (2, 1, 2)), num_moe_experts=4), + ] + + +def test_all_pipelines(): + """Run all pipelines.""" + + # Collect pipelines. + pipelines = [ + *get_gpt_pipelines(), + # [todo] *get_moe_pipelines(), # todo: MoE support in loader_core.py. + # [todo] *get_bert_pipelines(), + # [todo] *get_t5_pipelines(), + ] + + # Run pipelines. + results = [] + for pipeline in pipelines: + t = time.time() + mses = pipeline.run() + elapsed_time = time.time() - t + results.append((elapsed_time, *mses)) + + # Print results. + if int(os.environ["RANK"]) == 0: + print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + print("checkpoint converter results:") + [print(" t %.1f sec ... mse %.1e, %.1e." % (t, r, f)) for t, r, f in results] + print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + + +if __name__ == "__main__": + test_all_pipelines() diff --git a/tests/functional_tests/test_cases/gpt-nemo/gpt3-nemo_126m_mr_mbs1_gbs8_mcore_te_8experts_tp2_ep2_pp1_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt-nemo/gpt3-nemo_126m_mr_mbs1_gbs8_mcore_te_8experts_tp2_ep2_pp1_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 0000000..20bacd5 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt-nemo/gpt3-nemo_126m_mr_mbs1_gbs8_mcore_te_8experts_tp2_ep2_pp1_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,43 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + SKIP_PYTEST: 1 +MODEL_ARGS: + trainer.num_nodes: 1 + trainer.devices: 8 + trainer.max_steps: 50 + trainer.val_check_interval: 50 + trainer.limit_val_batches: 50 + trainer.max_epochs: 'null' + trainer.precision: bf16 + model.num_layers: 12 + model.hidden_size: 768 + model.num_attention_heads: 12 + model.micro_batch_size: 1 + model.global_batch_size: 8 + model.tensor_model_parallel_size: 2 + model.pipeline_model_parallel_size: 1 + model.expert_model_parallel_size: 2 + model.virtual_pipeline_model_parallel_size: 'null' + model.encoder_seq_length: 2048 + model.max_position_embeddings: 2048 + model.ffn_hidden_size: 3072 + model.mcore_gpt: 'True' + model.apply_query_key_layer_scaling: 'True' + model.megatron_amp_O2: 'True' + model.data.data_prefix: '[]' + model.data.data_impl: mock + model.data.splits_string: '[99990,8,2]' + model.optim.name: mcore_distributed_optim + model.optim.weight_decay: 0.1 + exp_manager.create_checkpoint_callback: 'False' + model.sequence_parallel: 'True' + model.overlap_p2p_comm: 'True' + model.batch_p2p_comm: 'False' + model.bias: 'False' + model.bias_activation_fusion: 'False' + ++model.num_moe_experts: 8 + ++model.moe_grouped_gemm: 'True' + ++model.moe_router_load_balancing_type: aux_loss + ++model.moe_router_topk: 2 + ++model.moe_aux_loss_coeff: 1e-2 +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.10.0.json b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.10.0.json new file mode 100644 index 0000000..bdde50c --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.10.0.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 12.98419, "5": 12.93854, "10": 12.06408, "15": 11.97892, "20": 10.53587, "25": 10.11956, "30": 9.72863, "35": 9.44175, "40": 9.23732, "45": 9.03759, "50": 8.85191, "55": 8.64107, "60": 8.61561, "65": 8.50651, "70": 8.46202, "75": 8.37008, "80": 8.15465, "85": 8.14293, "90": 7.95917, "95": 7.93919, "100": 7.81846, "105": 7.66559, "110": 7.54375, "115": 7.42185, "120": 7.43258, "125": 7.45249, "130": 7.3096, "135": 7.26082, "140": 7.21765, "145": 7.07741, "150": 7.15183, "155": 7.04132, "160": 6.95122, "165": 6.96528, "170": 6.89588, "175": 6.87439, "180": 6.82744, "185": 6.73844, "190": 6.72094, "195": 6.60441, "200": 6.65289, "205": 6.66843, "210": 6.52039, "215": 6.4892, "220": 6.50387, "225": 6.50635, "230": 6.46843, "235": 6.43952, "240": 6.36827, "245": 6.41136, "250": 6.34245, "255": 6.44068, "260": 6.35852, "265": 6.2911, "270": 6.23195, "275": 6.2332, "280": 6.19499, "285": 6.22925, "290": 6.16682, "295": 6.14298, "300": 6.11519, "305": 6.02148, "310": 6.09654, "315": 6.08591, "320": 5.96517, "325": 5.94871, "330": 5.98181, "335": 6.02179, "340": 5.95273, "345": 5.91441, "350": 5.9025, "355": 5.83575, "360": 5.85794, "365": 5.84307, "370": 5.81322, "375": 5.8343, "380": 5.83059, "385": 5.8039, "390": 5.79969, "395": 5.69665, "400": 5.64736, "405": 5.65772, "410": 5.66935, "415": 5.72639, "420": 5.66405, "425": 5.67999, "430": 5.6241, "435": 5.5903, "440": 5.61545, "445": 5.53123, "450": 5.56342, "455": 5.52319, "460": 5.52938, "465": 5.58752, "470": 5.58632, "475": 5.49493, "480": 5.48924, "485": 5.50884, "490": 5.49816, "495": 5.47218, "500": 5.44659, "505": 5.38219, "510": 5.43668, "515": 5.41551, "520": 5.44898, "525": 5.29376, "530": 5.352, "535": 5.32867, "540": 5.35929, "545": 5.39092, "550": 5.38016, "555": 5.19751, "560": 5.33912, "565": 5.29306, "570": 5.27761, "575": 5.31695, "580": 5.21144, "585": 5.21672, "590": 5.20767, "595": 5.21835, "600": 5.26425, "605": 5.21789, "610": 5.21397, "615": 5.1857, "620": 5.20834, "625": 5.20938, "630": 5.13639, "635": 5.1128, "640": 5.09192, "645": 5.12528, "650": 5.1631, "655": 5.1407, "660": 5.063, "665": 5.10364, "670": 5.05373, "675": 5.03829, "680": 5.02316, "685": 5.00944, "690": 5.02087, "695": 4.97227, "700": 4.99151, "705": 4.93584, "710": 4.98862, "715": 4.89609, "720": 4.86643, "725": 4.82834, "730": 4.87046, "735": 4.85399, "740": 4.85813, "745": 4.74252, "750": 4.73976, "755": 4.81415, "760": 4.79354, "765": 4.74295, "770": 4.68915, "775": 4.66162, "780": 4.68695, "785": 4.77497, "790": 4.67531, "795": 4.63582, "800": 4.61575, "805": 4.63436, "810": 4.64209, "815": 4.58669, "820": 4.62108, "825": 4.58189, "830": 4.57306, "835": 4.5592, "840": 4.48154, "845": 4.47494, "850": 4.44023, "855": 4.4849, "860": 4.44459, "865": 4.4994, "870": 4.49007, "875": 4.37053, "880": 4.43074, "885": 4.3909, "890": 4.43772, "895": 4.40774, "900": 4.39766, "905": 4.33831, "910": 4.37033, "915": 4.34643, "920": 4.37646, "925": 4.38198, "930": 4.29422, "935": 4.30067, "940": 4.34626, "945": 4.32201, "950": 4.34466, "955": 4.25654, "960": 4.20595, "965": 4.27065, "970": 4.27764, "975": 4.25431, "980": 4.22694, "985": 4.18579, "990": 4.14082, "995": 4.17595, "1000": 4.24671, "1005": 4.20213, "1010": 4.17465, "1015": 4.1295, "1020": 4.1563, "1025": 4.21273, "1030": 4.1218, "1035": 4.09165, "1040": 4.13332, "1045": 4.10698, "1050": 4.15373, "1055": 4.10306, "1060": 4.10569, "1065": 4.05599, "1070": 4.07158, "1075": 4.09064, "1080": 4.07256, "1085": 4.0744, "1090": 4.01623, "1095": 4.0905, "1100": 4.0688, "1105": 4.08359, "1110": 4.02838, "1115": 4.00103, "1120": 3.99865, "1125": 4.00668, "1130": 4.03821, "1135": 4.0186, "1140": 4.01696, "1145": 3.94293, "1150": 4.03031, "1155": 3.99474, "1160": 3.97371, "1165": 3.88015, "1170": 3.93366, "1175": 3.94219, "1180": 3.9521, "1185": 3.96886, "1190": 3.93105, "1195": 3.94711, "1200": 3.89736, "1205": 3.87175, "1210": 4.00691, "1215": 3.8302, "1220": 3.86678, "1225": 3.81337, "1230": 3.9097, "1235": 3.91642, "1240": 3.88192, "1245": 3.80138, "1250": 3.845, "1255": 3.85664, "1260": 3.88502, "1265": 3.78424, "1270": 3.88578, "1275": 3.83649, "1280": 3.83975, "1285": 3.8519, "1290": 3.87941, "1295": 3.84447, "1300": 3.82086, "1305": 3.83591, "1310": 3.82736, "1315": 3.82023, "1320": 3.81581, "1325": 3.71152, "1330": 3.79257, "1335": 3.76263, "1340": 3.7585, "1345": 3.75849, "1350": 3.73475, "1355": 3.77355, "1360": 3.73662, "1365": 3.72985, "1370": 3.75457, "1375": 3.74517, "1380": 3.75758, "1385": 3.75769, "1390": 3.6711, "1395": 3.74462, "1400": 3.72314, "1405": 3.65238, "1410": 3.66211, "1415": 3.67777, "1420": 3.70278, "1425": 3.70904, "1430": 3.66848, "1435": 3.66212, "1440": 3.641, "1445": 3.67553, "1450": 3.67516, "1455": 3.63861, "1460": 3.65441, "1465": 3.68208, "1470": 3.63371, "1475": 3.69927, "1480": 3.66998, "1485": 3.66531, "1490": 3.63777, "1495": 3.60688, "1500": 3.64897, "1505": 3.68052, "1510": 3.55088, "1515": 3.60446, "1520": 3.63704, "1525": 3.62597, "1530": 3.60255, "1535": 3.59817, "1540": 3.61522, "1545": 3.61295, "1550": 3.56212, "1555": 3.57097, "1560": 3.60745, "1565": 3.62379, "1570": 3.59202, "1575": 3.55726, "1580": 3.59199, "1585": 3.57601, "1590": 3.46736, "1595": 3.50991, "1600": 3.49773, "1605": 3.55677, "1610": 3.5707, "1615": 3.50881, "1620": 3.52902, "1625": 3.48463, "1630": 3.49796, "1635": 3.55096, "1640": 3.5246, "1645": 3.54001, "1650": 3.48747, "1655": 3.47898, "1660": 3.54158, "1665": 3.46015, "1670": 3.51035, "1675": 3.49726, "1680": 3.4738, "1685": 3.47946, "1690": 3.47944, "1695": 3.49373, "1700": 3.4571, "1705": 3.39504, "1710": 3.50523, "1715": 3.49875, "1720": 3.43518, "1725": 3.42591, "1730": 3.41998, "1735": 3.45435, "1740": 3.45514, "1745": 3.43764, "1750": 3.40661, "1755": 3.43589, "1760": 3.38464, "1765": 3.42991, "1770": 3.44036, "1775": 3.38962, "1780": 3.43426, "1785": 3.41964, "1790": 3.38924, "1795": 3.41547, "1800": 3.34271, "1805": 3.38698, "1810": 3.32311, "1815": 3.41714, "1820": 3.41515, "1825": 3.39789, "1830": 3.32932, "1835": 3.42735, "1840": 3.39868, "1845": 3.43723, "1850": 3.38928, "1855": 3.37932, "1860": 3.34341, "1865": 3.38406, "1870": 3.31068, "1875": 3.43761, "1880": 3.34653, "1885": 3.35382, "1890": 3.34727, "1895": 3.39902, "1900": 3.37851, "1905": 3.30251, "1910": 3.35134, "1915": 3.34269, "1920": 3.36871, "1925": 3.33974, "1930": 3.31057, "1935": 3.30763, "1940": 3.36792, "1945": 3.27779, "1950": 3.41332, "1955": 3.30476, "1960": 3.30933, "1965": 3.26958, "1970": 3.28947, "1975": 3.32983, "1980": 3.33609, "1985": 3.24923, "1990": 3.31462, "1995": 3.27479, "2000": 3.29158, "2005": 3.2662, "2010": 3.26416, "2015": 3.22477, "2020": 3.2846, "2025": 3.28146, "2030": 3.28469, "2035": 3.29732, "2040": 3.24642, "2045": 3.24446, "2050": 3.2745, "2055": 3.32281, "2060": 3.28835, "2065": 3.24243, "2070": 3.28811, "2075": 3.26552, "2080": 3.25521, "2085": 3.39762, "2090": 3.20915, "2095": 3.3192, "2100": 3.23548, "2105": 3.21061, "2110": 3.2168, "2115": 3.22574, "2120": 3.17449, "2125": 3.21759, "2130": 3.21981, "2135": 3.28291, "2140": 3.1949, "2145": 3.20321, "2150": 3.22427, "2155": 3.24736, "2160": 3.21441, "2165": 3.24678, "2170": 3.21743, "2175": 3.16389, "2180": 3.22177, "2185": 3.25637, "2190": 3.24498, "2195": 3.171, "2200": 3.1913, "2205": 3.1799, "2210": 3.13108, "2215": 3.2124, "2220": 3.20441, "2225": 3.19274, "2230": 3.14339, "2235": 3.19257, "2240": 3.22315, "2245": 3.17902, "2250": 3.19389, "2255": 3.13411, "2260": 3.16486, "2265": 3.25029, "2270": 3.18648, "2275": 3.1488, "2280": 3.1761, "2285": 3.16329, "2290": 3.17717, "2295": 3.19692, "2300": 3.14262, "2305": 3.16367, "2310": 3.13201, "2315": 3.06324, "2320": 3.12133, "2325": 3.18302, "2330": 3.12822, "2335": 3.11532, "2340": 3.16932, "2345": 3.11719, "2350": 3.12771, "2355": 3.12814, "2360": 3.16524, "2365": 3.10189, "2370": 3.17137, "2375": 3.14214, "2380": 3.15206, "2385": 3.09812, "2390": 3.09383, "2395": 3.09412, "2400": 3.09488, "2405": 3.09982, "2410": 3.08952, "2415": 3.08613, "2420": 3.07867, "2425": 3.08418, "2430": 3.0823, "2435": 3.06935, "2440": 3.09363, "2445": 3.0619, "2450": 3.12144, "2455": 3.15908, "2460": 3.08318, "2465": 3.08371, "2470": 3.03632, "2475": 3.06386, "2480": 3.09389, "2485": 3.06734, "2490": 3.06593, "2495": 3.08316, "2500": 3.04129, "2505": 3.10387, "2510": 3.11135, "2515": 3.06443, "2520": 3.0832, "2525": 3.02581, "2530": 3.05348, "2535": 3.09406, "2540": 3.07766, "2545": 3.05587, "2550": 3.00598, "2555": 3.07536, "2560": 3.04397, "2565": 3.10961, "2570": 3.01063, "2575": 3.05505, "2580": 3.08462, "2585": 3.02454, "2590": 3.07149, "2595": 2.99798, "2600": 3.07586, "2605": 3.05776, "2610": 3.04999, "2615": 3.06137, "2620": 2.98579, "2625": 3.01319, "2630": 3.0387, "2635": 3.05945, "2640": 3.01188, "2645": 3.05169, "2650": 3.02344, "2655": 3.00109, "2660": 3.01352, "2665": 3.04278, "2670": 2.98222, "2675": 2.97174, "2680": 2.98967, "2685": 3.00903, "2690": 3.00129, "2695": 2.98933, "2700": 3.03421, "2705": 2.98405, "2710": 2.97866, "2715": 2.97247, "2720": 3.02924, "2725": 3.00627, "2730": 3.04091, "2735": 2.98775, "2740": 2.99922, "2745": 3.00953, "2750": 3.00514, "2755": 2.97835, "2760": 2.99909, "2765": 3.00875, "2770": 2.97736, "2775": 2.99558, "2780": 3.01117, "2785": 2.95356, "2790": 2.95705, "2795": 2.95444, "2800": 2.95807, "2805": 2.94113, "2810": 2.97782, "2815": 2.9526, "2820": 3.0804, "2825": 3.06637, "2830": 3.01432, "2835": 2.92291, "2840": 2.94512, "2845": 2.97307, "2850": 2.97327, "2855": 2.97972, "2860": 2.95612, "2865": 2.96438, "2870": 3.01121, "2875": 2.92466, "2880": 2.94757, "2885": 2.94066, "2890": 2.98496, "2895": 2.92566, "2900": 2.95474, "2905": 2.99612, "2910": 2.90015, "2915": 2.93666, "2920": 2.95378, "2925": 2.94122, "2930": 2.94854, "2935": 2.94633, "2940": 3.0159, "2945": 2.93331, "2950": 3.00487, "2955": 2.9142, "2960": 2.97142, "2965": 2.86473, "2970": 2.96042, "2975": 2.99238, "2980": 2.93954, "2985": 3.03123, "2990": 2.94077, "2995": 2.87339, "3000": 2.93764, "3005": 2.89063, "3010": 2.94329, "3015": 2.95425, "3020": 2.94521, "3025": 2.94116, "3030": 2.92749, "3035": 2.95728, "3040": 2.92297, "3045": 2.83835, "3050": 2.8942, "3055": 2.89271, "3060": 2.92435, "3065": 2.92577, "3070": 2.88613, "3075": 2.90708, "3080": 2.96095, "3085": 2.90933, "3090": 2.92326, "3095": 2.93741, "3100": 2.86528, "3105": 2.92394, "3110": 2.89856, "3115": 2.94432, "3120": 2.96799, "3125": 2.87086, "3130": 2.93517, "3135": 2.92459, "3140": 2.88027, "3145": 2.92659, "3150": 2.8581, "3155": 2.85371, "3160": 2.84515, "3165": 2.85026, "3170": 2.90155, "3175": 2.91129, "3180": 2.86697, "3185": 2.93369, "3190": 2.93069, "3195": 2.93419, "3200": 2.93035, "3205": 2.86435, "3210": 2.87298, "3215": 2.92037, "3220": 2.87469, "3225": 2.87298, "3230": 2.81914, "3235": 2.87948, "3240": 2.87296, "3245": 2.9085, "3250": 2.86027, "3255": 2.85681, "3260": 2.86851, "3265": 2.87812, "3270": 2.85091, "3275": 2.87621, "3280": 2.80872, "3285": 2.81882, "3290": 2.87081, "3295": 2.90664, "3300": 2.87857, "3305": 2.86932, "3310": 2.86794, "3315": 2.83532, "3320": 2.91304, "3325": 2.8505, "3330": 2.84604, "3335": 2.85324, "3340": 2.82795, "3345": 2.84034, "3350": 2.85927, "3355": 2.88198, "3360": 2.81119, "3365": 2.85636, "3370": 2.84482, "3375": 2.85163, "3380": 2.85748, "3385": 2.88446, "3390": 2.87078, "3395": 2.81824, "3400": 2.79545, "3405": 2.83873, "3410": 2.85435, "3415": 2.86556, "3420": 2.83057, "3425": 2.81522, "3430": 2.84937, "3435": 2.90018, "3440": 2.83304, "3445": 2.87612, "3450": 2.82297, "3455": 2.79733, "3460": 2.82351, "3465": 2.85631, "3470": 2.84922, "3475": 2.78028, "3480": 2.84688, "3485": 2.83002, "3490": 2.89989, "3495": 2.86087, "3500": 2.83926, "3505": 2.8329, "3510": 2.81992, "3515": 2.85135, "3520": 2.78496, "3525": 2.81419, "3530": 2.86531, "3535": 2.79309, "3540": 2.84304, "3545": 2.81793, "3550": 2.80631, "3555": 2.82472, "3560": 2.8277, "3565": 2.83384, "3570": 2.81016, "3575": 2.81198, "3580": 2.8233, "3585": 2.84287, "3590": 2.84168, "3595": 2.78692, "3600": 2.75783, "3605": 2.79942, "3610": 2.85552, "3615": 2.75669, "3620": 2.81607, "3625": 2.89208, "3630": 2.78175, "3635": 2.79306, "3640": 2.7918, "3645": 2.77412, "3650": 2.81304, "3655": 2.82606, "3660": 2.77343, "3665": 2.78956, "3670": 2.78455, "3675": 2.78401, "3680": 2.81473, "3685": 2.81397, "3690": 2.81241, "3695": 2.81675, "3700": 2.79602, "3705": 2.79154, "3710": 2.75823, "3715": 2.80796, "3720": 2.7997, "3725": 2.79805, "3730": 2.84474, "3735": 2.80524, "3740": 2.75746, "3745": 2.79743, "3750": 2.81671, "3755": 2.80418, "3760": 2.76493, "3765": 2.76064, "3770": 2.7696, "3775": 2.77649, "3780": 2.76598, "3785": 2.78549, "3790": 2.74842, "3795": 2.79563, "3800": 2.80795, "3805": 2.75692, "3810": 2.81273, "3815": 2.77072, "3820": 2.78908, "3825": 2.74155, "3830": 2.75391, "3835": 2.82054, "3840": 2.73052, "3845": 2.72246, "3850": 2.77941, "3855": 2.72394, "3860": 2.80753, "3865": 2.75761, "3870": 2.78391, "3875": 2.76322, "3880": 2.79432, "3885": 2.79077, "3890": 2.7492, "3895": 2.80029, "3900": 2.77101, "3905": 2.72837, "3910": 2.75105, "3915": 2.75519, "3920": 2.80329, "3925": 2.78514, "3930": 2.71635, "3935": 2.7584, "3940": 2.76185, "3945": 2.74823, "3950": 2.73286, "3955": 2.77901, "3960": 2.7711, "3965": 2.74066, "3970": 2.75905, "3975": 2.73201, "3980": 2.7429, "3985": 2.75166, "3990": 2.69587, "3995": 2.78643, "4000": 2.74164, "4005": 2.77526, "4010": 2.71578, "4015": 2.72976, "4020": 2.75497, "4025": 2.73761, "4030": 2.6648, "4035": 2.70288, "4040": 2.75453, "4045": 2.75786, "4050": 2.79589, "4055": 2.72818, "4060": 2.72009, "4065": 2.65861, "4070": 2.81223, "4075": 2.76312, "4080": 2.7255, "4085": 2.79128, "4090": 2.7397, "4095": 2.71751, "4100": 2.728, "4105": 2.74756, "4110": 2.73519, "4115": 2.71319, "4120": 2.73228, "4125": 2.70683, "4130": 2.70332, "4135": 2.69389, "4140": 2.68846, "4145": 2.78919, "4150": 2.71748, "4155": 2.7463, "4160": 2.76551, "4165": 2.72586, "4170": 2.68054, "4175": 2.72265, "4180": 2.73288, "4185": 2.73486, "4190": 2.74517, "4195": 2.70268, "4200": 2.71029, "4205": 2.75862, "4210": 2.68365, "4215": 2.67255, "4220": 2.66479, "4225": 2.71034, "4230": 2.73193, "4235": 2.73823, "4240": 2.71111, "4245": 2.70177, "4250": 2.71741, "4255": 2.66274, "4260": 2.73103, "4265": 2.74176, "4270": 2.72941, "4275": 2.69578, "4280": 2.70662, "4285": 2.73861, "4290": 2.69408, "4295": 2.69876, "4300": 2.70844, "4305": 2.70422, "4310": 2.73614, "4315": 2.71396, "4320": 2.70947, "4325": 2.71224, "4330": 2.71647, "4335": 2.69956, "4340": 2.70775, "4345": 2.73113, "4350": 2.68011, "4355": 2.70126, "4360": 2.7182, "4365": 2.7914, "4370": 2.73825, "4375": 2.74875, "4380": 2.70681, "4385": 2.70266, "4390": 2.70642, "4395": 2.76047, "4400": 2.67044, "4405": 2.67056, "4410": 2.68935, "4415": 2.71212, "4420": 2.7115, "4425": 2.72896, "4430": 2.69643, "4435": 2.68652, "4440": 2.69906, "4445": 2.68758, "4450": 2.66325, "4455": 2.67346, "4460": 2.69178, "4465": 2.7059, "4470": 2.67216, "4475": 2.69037, "4480": 2.66268, "4485": 2.71419, "4490": 2.65793, "4495": 2.71479, "4500": 2.70782, "4505": 2.69773, "4510": 2.6542, "4515": 2.70466, "4520": 2.67252, "4525": 2.67792, "4530": 2.67886, "4535": 2.67674, "4540": 2.71154, "4545": 2.66233, "4550": 2.70664, "4555": 2.68605, "4560": 2.66128, "4565": 2.64611, "4570": 2.64576, "4575": 2.67137, "4580": 2.69219, "4585": 2.68755, "4590": 2.62145, "4595": 2.66712, "4600": 2.68332, "4605": 2.68385, "4610": 2.66134, "4615": 2.67526, "4620": 2.66394, "4625": 2.69086, "4630": 2.68045, "4635": 2.64892, "4640": 2.69714, "4645": 2.64913, "4650": 2.70468, "4655": 2.70994, "4660": 2.68033, "4665": 2.69112, "4670": 2.67816, "4675": 2.69269, "4680": 2.66718, "4685": 2.66215, "4690": 2.70844, "4695": 2.65925, "4700": 2.67687, "4705": 2.65395, "4710": 2.67912, "4715": 2.65067, "4720": 2.72644, "4725": 2.63151, "4730": 2.65452, "4735": 2.69154, "4740": 2.65076, "4745": 2.6559, "4750": 2.64776, "4755": 2.66095, "4760": 2.67003, "4765": 2.64825, "4770": 2.62782, "4775": 2.66166, "4780": 2.66056, "4785": 2.69279, "4790": 2.65191, "4795": 2.67843, "4800": 2.63452, "4805": 2.64493, "4810": 2.66614, "4815": 2.64834, "4820": 2.67559, "4825": 2.65411, "4830": 2.61885, "4835": 2.65227, "4840": 2.66175, "4845": 2.6386, "4850": 2.62941, "4855": 2.60341, "4860": 2.65656, "4865": 2.62949, "4870": 2.64445, "4875": 2.62204, "4880": 2.62783, "4885": 2.62968, "4890": 2.68357, "4895": 2.66598, "4900": 2.61954, "4905": 2.62398, "4910": 2.64396, "4915": 2.61668, "4920": 2.65681, "4925": 2.65403, "4930": 2.57311, "4935": 2.6553, "4940": 2.63764, "4945": 2.64211, "4950": 2.62962, "4955": 2.62107, "4960": 2.6247, "4965": 2.66683, "4970": 2.60375, "4975": 2.65881, "4980": 2.62307, "4985": 2.63617, "4990": 2.66214, "4995": 2.58785, "5000": 2.66628, "5005": 2.6671, "5010": 2.68856, "5015": 2.6383, "5020": 2.64426, "5025": 2.69186, "5030": 2.64852, "5035": 2.62171, "5040": 2.62372, "5045": 2.60726, "5050": 2.62902, "5055": 2.6518, "5060": 2.64774, "5065": 2.69046, "5070": 2.6068, "5075": 2.61871, "5080": 2.61137, "5085": 2.6065, "5090": 2.59511, "5095": 2.65517, "5100": 2.65344, "5105": 2.61144, "5110": 2.66669, "5115": 2.62448, "5120": 2.67682, "5125": 2.63353, "5130": 2.62187, "5135": 2.61953, "5140": 2.58001, "5145": 2.63186, "5150": 2.64256, "5155": 2.62201, "5160": 2.6759, "5165": 2.58998, "5170": 2.59615, "5175": 2.62349, "5180": 2.61276, "5185": 2.62639, "5190": 2.62589, "5195": 2.67291, "5200": 2.60192, "5205": 2.6089, "5210": 2.61021, "5215": 2.64951, "5220": 2.59025, "5225": 2.55539, "5230": 2.63687, "5235": 2.61805, "5240": 2.70666, "5245": 2.63618, "5250": 2.60204, "5255": 2.62325, "5260": 2.56052, "5265": 2.59972, "5270": 2.60013, "5275": 2.6235, "5280": 2.61392, "5285": 2.60574, "5290": 2.63406, "5295": 2.62527, "5300": 2.58255, "5305": 2.60553, "5310": 2.61145, "5315": 2.5901, "5320": 2.61864, "5325": 2.64727, "5330": 2.61362, "5335": 2.58879, "5340": 2.5682, "5345": 2.66159, "5350": 2.62423, "5355": 2.5824, "5360": 2.60011, "5365": 2.62361, "5370": 2.61629, "5375": 2.63346, "5380": 2.58193, "5385": 2.56588, "5390": 2.5896, "5395": 2.62137, "5400": 2.61037, "5405": 2.54886, "5410": 2.61767, "5415": 2.59889, "5420": 2.61569, "5425": 2.62769, "5430": 2.63231, "5435": 2.58033, "5440": 2.58782, "5445": 2.63266, "5450": 2.65337, "5455": 2.61596, "5460": 2.59479, "5465": 2.60854, "5470": 2.60241, "5475": 2.62957, "5480": 2.59353, "5485": 2.5951, "5490": 2.58061, "5495": 2.57479, "5500": 2.57217, "5505": 2.622, "5510": 2.62958, "5515": 2.58587, "5520": 2.56117, "5525": 2.5892, "5530": 2.66829, "5535": 2.62993, "5540": 2.5778, "5545": 2.60272, "5550": 2.57126, "5555": 2.58866, "5560": 2.57384, "5565": 2.61428, "5570": 2.65531, "5575": 2.63536, "5580": 2.57904, "5585": 2.5996, "5590": 2.56633, "5595": 2.5927, "5600": 2.55869, "5605": 2.60371, "5610": 2.58519, "5615": 2.58545, "5620": 2.58625, "5625": 2.55581, "5630": 2.5753, "5635": 2.63654, "5640": 2.60029, "5645": 2.57553, "5650": 2.58266, "5655": 2.55074, "5660": 2.56127, "5665": 2.59074, "5670": 2.57261, "5675": 2.61377, "5680": 2.53336, "5685": 2.5726, "5690": 2.60503, "5695": 2.56306, "5700": 2.60062, "5705": 2.60218, "5710": 2.58566, "5715": 2.58854, "5720": 2.53918, "5725": 2.6068, "5730": 2.57837, "5735": 2.61387, "5740": 2.60134, "5745": 2.56248, "5750": 2.54598, "5755": 2.56382, "5760": 2.61721, "5765": 2.56106, "5770": 2.54402, "5775": 2.59153, "5780": 2.58173, "5785": 2.54437, "5790": 2.56704, "5795": 2.60771, "5800": 2.54939, "5805": 2.54159, "5810": 2.561, "5815": 2.52971, "5820": 2.59967, "5825": 2.51041, "5830": 2.5026, "5835": 2.60364, "5840": 2.54605, "5845": 2.55704, "5850": 2.61737, "5855": 2.51326, "5860": 2.56594, "5865": 2.5206, "5870": 2.57719, "5875": 2.61137, "5880": 2.58989, "5885": 2.5703, "5890": 2.58725, "5895": 2.55994, "5900": 2.62142, "5905": 2.56013, "5910": 2.59959, "5915": 2.61442, "5920": 2.59158, "5925": 2.53216, "5930": 2.57923, "5935": 2.55715, "5940": 2.57408, "5945": 2.52213, "5950": 2.55856, "5955": 2.59399, "5960": 2.56884, "5965": 2.62405, "5970": 2.55561, "5975": 2.58971, "5980": 2.56685, "5985": 2.56814, "5990": 2.56278, "5995": 2.56529, "6000": 2.5614, "6005": 2.52461, "6010": 2.56598, "6015": 2.52876, "6020": 2.53932, "6025": 2.5616, "6030": 2.61115, "6035": 2.54616, "6040": 2.5543, "6045": 2.4947, "6050": 2.59872, "6055": 2.52178, "6060": 2.55, "6065": 2.5295, "6070": 2.53314, "6075": 2.54182, "6080": 2.5384, "6085": 2.60097, "6090": 2.57231, "6095": 2.5391, "6100": 2.54612, "6105": 2.52606, "6110": 2.55978, "6115": 2.5898, "6120": 2.56024, "6125": 2.54233, "6130": 2.47719, "6135": 2.56233, "6140": 2.56053, "6145": 2.56134, "6150": 2.5296, "6155": 2.51341, "6160": 2.54208, "6165": 2.576, "6170": 2.54976, "6175": 2.60618, "6180": 2.51602, "6185": 2.55611, "6190": 2.49712, "6195": 2.58374, "6200": 2.55543, "6205": 2.54252, "6210": 2.52306, "6215": 2.51703, "6220": 2.56737, "6225": 2.51809, "6230": 2.51553, "6235": 2.56389, "6240": 2.55346, "6245": 2.52822, "6250": 2.53622, "6255": 2.58125, "6260": 2.52709, "6265": 2.57622, "6270": 2.52765, "6275": 2.56942, "6280": 2.5299, "6285": 2.52587, "6290": 2.52392, "6295": 2.51011, "6300": 2.5627, "6305": 2.52787, "6310": 2.51602, "6315": 2.53987, "6320": 2.49129, "6325": 2.60244, "6330": 2.56133, "6335": 2.51319, "6340": 2.51589, "6345": 2.55971, "6350": 2.55972, "6355": 2.52746, "6360": 2.52497, "6365": 2.48883, "6370": 2.53865, "6375": 2.50013, "6380": 2.56153, "6385": 2.57977, "6390": 2.51116, "6395": 2.55466, "6400": 2.51479, "6405": 2.53225, "6410": 2.51853, "6415": 2.52919, "6420": 2.54691, "6425": 2.53816, "6430": 2.58393, "6435": 2.54725, "6440": 2.54019, "6445": 2.53341, "6450": 2.53493, "6455": 2.52689, "6460": 2.51939, "6465": 2.56644, "6470": 2.52311, "6475": 2.52726, "6480": 2.49034, "6485": 2.53435, "6490": 2.51246, "6495": 2.50192, "6500": 2.52676, "6505": 2.49827, "6510": 2.54795, "6515": 2.5124, "6520": 2.51282, "6525": 2.49715, "6530": 2.54677, "6535": 2.53718, "6540": 2.53643, "6545": 2.5639, "6550": 2.50392, "6555": 2.56115, "6560": 2.51415, "6565": 2.52381, "6570": 2.58708, "6575": 2.52575, "6580": 2.50066, "6585": 2.5081, "6590": 2.51138, "6595": 2.50607, "6600": 2.49719, "6605": 2.54455, "6610": 2.47886, "6615": 2.5698, "6620": 2.53624, "6625": 2.5159, "6630": 2.51561, "6635": 2.47688, "6640": 2.54115, "6645": 2.59785, "6650": 2.51227, "6655": 2.50014, "6660": 2.57734, "6665": 2.52237, "6670": 2.57045, "6675": 2.47294, "6680": 2.55132, "6685": 2.53922, "6690": 2.51872, "6695": 2.49389, "6700": 2.5298, "6705": 2.52195, "6710": 2.49389, "6715": 2.51837, "6720": 2.51119, "6725": 2.52201, "6730": 2.52236, "6735": 2.48422, "6740": 2.51431, "6745": 2.49582, "6750": 2.5596, "6755": 2.47622, "6760": 2.54581, "6765": 2.48987, "6770": 2.52057, "6775": 2.50949, "6780": 2.54003, "6785": 2.47408, "6790": 2.54599, "6795": 2.49988, "6800": 2.53024, "6805": 2.51456, "6810": 2.50646, "6815": 2.52401, "6820": 2.48885, "6825": 2.50832, "6830": 2.54226, "6835": 2.50893, "6840": 2.51175, "6845": 2.52754, "6850": 2.47778, "6855": 2.51505, "6860": 2.50619, "6865": 2.49087, "6870": 2.55587, "6875": 2.47675, "6880": 2.55249, "6885": 2.47984, "6890": 2.55085, "6895": 2.50241, "6900": 2.48895, "6905": 2.49947, "6910": 2.51935, "6915": 2.52019, "6920": 2.5347, "6925": 2.54695, "6930": 2.49415, "6935": 2.52189, "6940": 2.50251, "6945": 2.4646, "6950": 2.48606, "6955": 2.52936, "6960": 2.52268, "6965": 2.49704, "6970": 2.47343, "6975": 2.52408, "6980": 2.45511, "6985": 2.52072, "6990": 2.53123, "6995": 2.46397, "7000": 2.4911, "7005": 2.47202, "7010": 2.47663, "7015": 2.52252, "7020": 2.4689, "7025": 2.45587, "7030": 2.48732, "7035": 2.48082, "7040": 2.50989, "7045": 2.52236, "7050": 2.52828, "7055": 2.44314, "7060": 2.47273, "7065": 2.4837, "7070": 2.49246, "7075": 2.49486, "7080": 2.53757, "7085": 2.48759, "7090": 2.47919, "7095": 2.50509, "7100": 2.5179, "7105": 2.48938, "7110": 2.48849, "7115": 2.50652, "7120": 2.47559, "7125": 2.46351, "7130": 2.48672, "7135": 2.51571, "7140": 2.50102, "7145": 2.50009, "7150": 2.51089, "7155": 2.5067, "7160": 2.47555, "7165": 2.45742, "7170": 2.50631, "7175": 2.5054, "7180": 2.50619, "7185": 2.48237, "7190": 2.4618, "7195": 2.46735, "7200": 2.51161, "7205": 2.49142, "7210": 2.44475, "7215": 2.48318, "7220": 2.44641, "7225": 2.51459, "7230": 2.51019, "7235": 2.48413, "7240": 2.47943, "7245": 2.50194, "7250": 2.50973, "7255": 2.49521, "7260": 2.46039, "7265": 2.45216, "7270": 2.47328, "7275": 2.5025, "7280": 2.49527, "7285": 2.42661, "7290": 2.48338, "7295": 2.48914, "7300": 2.42074, "7305": 2.44936, "7310": 2.45207, "7315": 2.49341, "7320": 2.48828, "7325": 2.46164, "7330": 2.49186, "7335": 2.4752, "7340": 2.46638, "7345": 2.49626, "7350": 2.51189, "7355": 2.49823, "7360": 2.47963, "7365": 2.47313, "7370": 2.47391, "7375": 2.45218, "7380": 2.49755, "7385": 2.48779, "7390": 2.47644, "7395": 2.47618, "7400": 2.48296, "7405": 2.44106, "7410": 2.48424, "7415": 2.4729, "7420": 2.49622, "7425": 2.46004, "7430": 2.52681, "7435": 2.49274, "7440": 2.52321, "7445": 2.51062, "7450": 2.47524, "7455": 2.45683, "7460": 2.46583, "7465": 2.47855, "7470": 2.45052, "7475": 2.46012, "7480": 2.51285, "7485": 2.4508, "7490": 2.47546, "7495": 2.48135, "7500": 2.49597, "7505": 2.44405, "7510": 2.43826, "7515": 2.42328, "7520": 2.49549, "7525": 2.49977, "7530": 2.47849, "7535": 2.46248, "7540": 2.47346, "7545": 2.47544, "7550": 2.49213, "7555": 2.45528, "7560": 2.42963, "7565": 2.50996, "7570": 2.48642, "7575": 2.43965, "7580": 2.45914, "7585": 2.48299, "7590": 2.48206, "7595": 2.46609, "7600": 2.46516, "7605": 2.44904, "7610": 2.45129, "7615": 2.42778, "7620": 2.54622, "7625": 2.48143, "7630": 2.42658, "7635": 2.42863, "7640": 2.45643, "7645": 2.4744, "7650": 2.46451, "7655": 2.48551, "7660": 2.45427, "7665": 2.43455, "7670": 2.44251, "7675": 2.4576, "7680": 2.48961, "7685": 2.43401, "7690": 2.48225, "7695": 2.45683, "7700": 2.48334, "7705": 2.50436, "7710": 2.49575, "7715": 2.44462, "7720": 2.47374, "7725": 2.48378, "7730": 2.46106, "7735": 2.47327, "7740": 2.43995, "7745": 2.45188, "7750": 2.44049, "7755": 2.46954, "7760": 2.45239, "7765": 2.45626, "7770": 2.472, "7775": 2.45496, "7780": 2.41932, "7785": 2.44604, "7790": 2.48505, "7795": 2.44188, "7800": 2.46164, "7805": 2.4901, "7810": 2.50459, "7815": 2.49362, "7820": 2.44966, "7825": 2.51653, "7830": 2.45443, "7835": 2.47021, "7840": 2.48113, "7845": 2.46363, "7850": 2.41775, "7855": 2.47355, "7860": 2.50098, "7865": 2.42599, "7870": 2.47746, "7875": 2.44916, "7880": 2.45774, "7885": 2.46453, "7890": 2.47219, "7895": 2.44676, "7900": 2.43949, "7905": 2.43728, "7910": 2.42647, "7915": 2.48369, "7920": 2.47802, "7925": 2.42301, "7930": 2.4751, "7935": 2.45176, "7940": 2.4281, "7945": 2.47175, "7950": 2.44621, "7955": 2.42034, "7960": 2.49104, "7965": 2.51887, "7970": 2.52571, "7975": 2.45241, "7980": 2.44395, "7985": 2.47069, "7990": 2.43396, "7995": 2.47169, "8000": 2.43762, "8005": 2.41926, "8010": 2.4599, "8015": 2.47159, "8020": 2.48389, "8025": 2.47654, "8030": 2.45332, "8035": 2.47384, "8040": 2.42255, "8045": 2.45399, "8050": 2.45504, "8055": 2.42603, "8060": 2.44549, "8065": 2.46363, "8070": 2.46309, "8075": 2.4643, "8080": 2.44779, "8085": 2.44773, "8090": 2.43022, "8095": 2.42683, "8100": 2.44141, "8105": 2.49824, "8110": 2.43953, "8115": 2.44417, "8120": 2.46895, "8125": 2.47323, "8130": 2.45462, "8135": 2.45343, "8140": 2.44239, "8145": 2.42917, "8150": 2.42453, "8155": 2.48753, "8160": 2.45631, "8165": 2.44487, "8170": 2.43782, "8175": 2.4253, "8180": 2.49869, "8185": 2.42857, "8190": 2.47133, "8195": 2.46132, "8200": 2.44939, "8205": 2.44754, "8210": 2.43437, "8215": 2.44265, "8220": 2.43647, "8225": 2.41212, "8230": 2.44265, "8235": 2.46929, "8240": 2.42939, "8245": 2.45287, "8250": 2.45768, "8255": 2.44523, "8260": 2.43531, "8265": 2.42814, "8270": 2.43199, "8275": 2.44266, "8280": 2.39888, "8285": 2.43962, "8290": 2.48052, "8295": 2.44923, "8300": 2.45902, "8305": 2.40772, "8310": 2.43561, "8315": 2.45759, "8320": 2.39997, "8325": 2.39568, "8330": 2.43712, "8335": 2.44754, "8340": 2.4916, "8345": 2.44881, "8350": 2.45061, "8355": 2.40715, "8360": 2.40275, "8365": 2.45575, "8370": 2.45297, "8375": 2.42589, "8380": 2.41962, "8385": 2.42461, "8390": 2.43777, "8395": 2.44107, "8400": 2.44254, "8405": 2.49209, "8410": 2.44054, "8415": 2.43565, "8420": 2.41915, "8425": 2.44297, "8430": 2.46165, "8435": 2.40681, "8440": 2.45351, "8445": 2.46102, "8450": 2.40877, "8455": 2.46091, "8460": 2.45616, "8465": 2.43782, "8470": 2.41034, "8475": 2.47971, "8480": 2.4061, "8485": 2.41693, "8490": 2.46606, "8495": 2.43807, "8500": 2.446, "8505": 2.40508, "8510": 2.40468, "8515": 2.43033, "8520": 2.42722, "8525": 2.49353, "8530": 2.37485, "8535": 2.40257, "8540": 2.4856, "8545": 2.38223, "8550": 2.44137, "8555": 2.45406, "8560": 2.47112, "8565": 2.42143, "8570": 2.43259, "8575": 2.44967, "8580": 2.44191, "8585": 2.42048, "8590": 2.40368, "8595": 2.42752, "8600": 2.41293, "8605": 2.49368, "8610": 2.42217, "8615": 2.3892, "8620": 2.44872, "8625": 2.42585, "8630": 2.45581, "8635": 2.44958, "8640": 2.43496, "8645": 2.4742, "8650": 2.42234, "8655": 2.45416, "8660": 2.4556, "8665": 2.38685, "8670": 2.41073, "8675": 2.43012, "8680": 2.44989, "8685": 2.43207, "8690": 2.41168, "8695": 2.44457, "8700": 2.43533, "8705": 2.42255, "8710": 2.43075, "8715": 2.45108, "8720": 2.48003, "8725": 2.4115, "8730": 2.39414, "8735": 2.43597, "8740": 2.43134, "8745": 2.40049, "8750": 2.43836, "8755": 2.42532, "8760": 2.40128, "8765": 2.43603, "8770": 2.40579, "8775": 2.43914, "8780": 2.42231, "8785": 2.47241, "8790": 2.42102, "8795": 2.42052, "8800": 2.41616, "8805": 2.41075, "8810": 2.41145, "8815": 2.47615, "8820": 2.45441, "8825": 2.42671, "8830": 2.38781, "8835": 2.42343, "8840": 2.39605, "8845": 2.42743, "8850": 2.43565, "8855": 2.40513, "8860": 2.42872, "8865": 2.42921, "8870": 2.43481, "8875": 2.44137, "8880": 2.41314, "8885": 2.39487, "8890": 2.44733, "8895": 2.42975, "8900": 2.41236, "8905": 2.40314, "8910": 2.40164, "8915": 2.41799, "8920": 2.43446, "8925": 2.46731, "8930": 2.41682, "8935": 2.41021, "8940": 2.39149, "8945": 2.39565, "8950": 2.41987, "8955": 2.39668, "8960": 2.43612, "8965": 2.4181, "8970": 2.40554, "8975": 2.47699, "8980": 2.44171, "8985": 2.37559, "8990": 2.41076, "8995": 2.4178, "9000": 2.45818, "9005": 2.41536, "9010": 2.37823, "9015": 2.4087, "9020": 2.39861, "9025": 2.37084, "9030": 2.39972, "9035": 2.42543, "9040": 2.42224, "9045": 2.42155, "9050": 2.39588, "9055": 2.41994, "9060": 2.41975, "9065": 2.40599, "9070": 2.44622, "9075": 2.39463, "9080": 2.43731, "9085": 2.41409, "9090": 2.41416, "9095": 2.39736, "9100": 2.40257, "9105": 2.35941, "9110": 2.46703, "9115": 2.41669, "9120": 2.40531, "9125": 2.46068, "9130": 2.39483, "9135": 2.44982, "9140": 2.43542, "9145": 2.42907, "9150": 2.42747, "9155": 2.37672, "9160": 2.41867, "9165": 2.42663, "9170": 2.37623, "9175": 2.41926, "9180": 2.37752, "9185": 2.43983, "9190": 2.41352, "9195": 2.39794, "9200": 2.39385, "9205": 2.45108, "9210": 2.36331, "9215": 2.46658, "9220": 2.44918, "9225": 2.38541, "9230": 2.44785, "9235": 2.39704, "9240": 2.40354, "9245": 2.43884, "9250": 2.43192, "9255": 2.43159, "9260": 2.38789, "9265": 2.44075, "9270": 2.43704, "9275": 2.39454, "9280": 2.39, "9285": 2.42371, "9290": 2.40453, "9295": 2.38581, "9300": 2.42484, "9305": 2.40523, "9310": 2.41622, "9315": 2.41034, "9320": 2.44448, "9325": 2.37178, "9330": 2.40432, "9335": 2.36135, "9340": 2.40895, "9345": 2.4165, "9350": 2.44289, "9355": 2.47883, "9360": 2.43909, "9365": 2.3898, "9370": 2.43679, "9375": 2.43355, "9380": 2.35664, "9385": 2.40395, "9390": 2.38267, "9395": 2.38878, "9400": 2.44512, "9405": 2.41495, "9410": 2.39905, "9415": 2.43839, "9420": 2.44519, "9425": 2.43303, "9430": 2.44767, "9435": 2.41508, "9440": 2.47843, "9445": 2.37671, "9450": 2.39428, "9455": 2.40397, "9460": 2.38671, "9465": 2.3795, "9470": 2.38228, "9475": 2.36792, "9480": 2.4371, "9485": 2.39088, "9490": 2.4222, "9495": 2.38393, "9500": 2.36475, "9505": 2.43172, "9510": 2.39972, "9515": 2.43102, "9520": 2.42003, "9525": 2.39114, "9530": 2.45514, "9535": 2.40146, "9540": 2.41915, "9545": 2.38075, "9550": 2.42301, "9555": 2.38948, "9560": 2.42361, "9565": 2.40794, "9570": 2.37507, "9575": 2.41197, "9580": 2.39803, "9585": 2.42342, "9590": 2.43008, "9595": 2.44897, "9600": 2.3925, "9605": 2.38419, "9610": 2.42308, "9615": 2.41535, "9620": 2.41585, "9625": 2.44769, "9630": 2.39767, "9635": 2.40377, "9640": 2.44707, "9645": 2.4105, "9650": 2.40041, "9655": 2.37299, "9660": 2.42437, "9665": 2.38983, "9670": 2.38278, "9675": 2.35701, "9680": 2.39846, "9685": 2.39752, "9690": 2.46404, "9695": 2.38068, "9700": 2.37751, "9705": 2.38382, "9710": 2.36779, "9715": 2.38923, "9720": 2.43577, "9725": 2.44283, "9730": 2.42833, "9735": 2.38923, "9740": 2.38192, "9745": 2.42826, "9750": 2.39965, "9755": 2.40951, "9760": 2.41155, "9765": 2.36936, "9770": 2.44772, "9775": 2.40531, "9780": 2.36402, "9785": 2.40253, "9790": 2.40868, "9795": 2.36045, "9800": 2.39758, "9805": 2.40631, "9810": 2.40884, "9815": 2.3792, "9820": 2.37973, "9825": 2.40437, "9830": 2.42282, "9835": 2.38493, "9840": 2.41435, "9845": 2.36409, "9850": 2.39956, "9855": 2.39496, "9860": 2.38877, "9865": 2.38079, "9870": 2.38607, "9875": 2.38073, "9880": 2.45266, "9885": 2.39259, "9890": 2.35403, "9895": 2.32143, "9900": 2.39601, "9905": 2.42729, "9910": 2.35626, "9915": 2.36477, "9920": 2.41167, "9925": 2.39835, "9930": 2.3842, "9935": 2.35228, "9940": 2.38519, "9945": 2.38916, "9950": 2.40627, "9955": 2.45017, "9960": 2.43272, "9965": 2.35605, "9970": 2.4104, "9975": 2.38685, "9980": 2.33208, "9985": 2.40831, "9990": 2.39643, "9995": 2.39637, "10000": 2.36721, "10005": 2.37368, "10010": 2.38436, "10015": 2.44703, "10020": 2.36507, "10025": 2.38912, "10030": 2.38846, "10035": 2.41126, "10040": 2.40638, "10045": 2.3845, "10050": 2.35263, "10055": 2.36874, "10060": 2.41984, "10065": 2.37463, "10070": 2.42468, "10075": 2.37308, "10080": 2.36203, "10085": 2.3716, "10090": 2.34771, "10095": 2.40452, "10100": 2.31489, "10105": 2.38225, "10110": 2.41162, "10115": 2.38823, "10120": 2.35976, "10125": 2.37194, "10130": 2.36025, "10135": 2.38421, "10140": 2.4128, "10145": 2.40806, "10150": 2.37783, "10155": 2.39663, "10160": 2.3612, "10165": 2.38492, "10170": 2.42814, "10175": 2.32688, "10180": 2.39643, "10185": 2.38375, "10190": 2.4438, "10195": 2.40264, "10200": 2.39095, "10205": 2.38749, "10210": 2.36954, "10215": 2.34522, "10220": 2.41894, "10225": 2.43304, "10230": 2.35499, "10235": 2.38838, "10240": 2.37311, "10245": 2.39231, "10250": 2.38934, "10255": 2.41718, "10260": 2.33542, "10265": 2.34999, "10270": 2.3513, "10275": 2.37106, "10280": 2.45057, "10285": 2.35914, "10290": 2.38942, "10295": 2.37652, "10300": 2.37171, "10305": 2.41999, "10310": 2.39084, "10315": 2.3631, "10320": 2.36861, "10325": 2.36138, "10330": 2.41236, "10335": 2.3631, "10340": 2.42088, "10345": 2.37051, "10350": 2.35736, "10355": 2.39741, "10360": 2.37455, "10365": 2.36297, "10370": 2.34127, "10375": 2.35799, "10380": 2.41915, "10385": 2.40878, "10390": 2.38385, "10395": 2.36065, "10400": 2.37801, "10405": 2.35112, "10410": 2.34176, "10415": 2.41757, "10420": 2.38273, "10425": 2.32718, "10430": 2.3604, "10435": 2.3727, "10440": 2.37431, "10445": 2.36259, "10450": 2.36233, "10455": 2.38173, "10460": 2.38293, "10465": 2.30397, "10470": 2.35738, "10475": 2.38103, "10480": 2.36436, "10485": 2.36258, "10490": 2.41315, "10495": 2.36751, "10500": 2.36468, "10505": 2.37362, "10510": 2.38358, "10515": 2.37505, "10520": 2.40332, "10525": 2.3916, "10530": 2.39341, "10535": 2.35672, "10540": 2.408, "10545": 2.3596, "10550": 2.37942, "10555": 2.359, "10560": 2.34105, "10565": 2.37452, "10570": 2.37553, "10575": 2.35413, "10580": 2.38062, "10585": 2.36967, "10590": 2.38022, "10595": 2.37744, "10600": 2.33154, "10605": 2.37312, "10610": 2.36641, "10615": 2.36478, "10620": 2.34972, "10625": 2.42039, "10630": 2.37283, "10635": 2.32444, "10640": 2.36466, "10645": 2.42282, "10650": 2.36368, "10655": 2.31064, "10660": 2.34854, "10665": 2.40138, "10670": 2.31641, "10675": 2.41762, "10680": 2.36407, "10685": 2.292, "10690": 2.38733, "10695": 2.33281, "10700": 2.3858, "10705": 2.38637, "10710": 2.34337, "10715": 2.38395, "10720": 2.32555, "10725": 2.35334, "10730": 2.3503, "10735": 2.35565, "10740": 2.31867, "10745": 2.34, "10750": 2.33508, "10755": 2.4054, "10760": 2.3663, "10765": 2.33626, "10770": 2.36881, "10775": 2.38587, "10780": 2.36921, "10785": 2.39064, "10790": 2.34667, "10795": 2.38704, "10800": 2.32419, "10805": 2.39755, "10810": 2.37562, "10815": 2.35415, "10820": 2.34345, "10825": 2.37197, "10830": 2.33854, "10835": 2.34907, "10840": 2.33088, "10845": 2.38744, "10850": 2.33281, "10855": 2.36514, "10860": 2.33353, "10865": 2.32177, "10870": 2.32639, "10875": 2.30612, "10880": 2.39426, "10885": 2.40569, "10890": 2.36449, "10895": 2.37348, "10900": 2.33407, "10905": 2.31344, "10910": 2.40821, "10915": 2.37173, "10920": 2.37496, "10925": 2.36405, "10930": 2.32036, "10935": 2.36177, "10940": 2.35461, "10945": 2.34906, "10950": 2.36345, "10955": 2.36398, "10960": 2.31158, "10965": 2.36546, "10970": 2.35827, "10975": 2.4096, "10980": 2.37473, "10985": 2.34426, "10990": 2.39821, "10995": 2.36431, "11000": 2.33792, "11005": 2.36227, "11010": 2.34327, "11015": 2.32589, "11020": 2.33456, "11025": 2.36748, "11030": 2.34239, "11035": 2.31593, "11040": 2.31941, "11045": 2.31923, "11050": 2.31865, "11055": 2.29136, "11060": 2.34119, "11065": 2.31037, "11070": 2.39464, "11075": 2.32001, "11080": 2.35565, "11085": 2.33793, "11090": 2.3471, "11095": 2.37218, "11100": 2.33013, "11105": 2.31899, "11110": 2.36384, "11115": 2.37442, "11120": 2.38428, "11125": 2.31612, "11130": 2.35151, "11135": 2.33444, "11140": 2.37417, "11145": 2.35014, "11150": 2.39586, "11155": 2.34302, "11160": 2.36615, "11165": 2.36624, "11170": 2.343, "11175": 2.33599, "11180": 2.37573, "11185": 2.31254, "11190": 2.27882, "11195": 2.3293, "11200": 2.34729, "11205": 2.36181, "11210": 2.3324, "11215": 2.32122, "11220": 2.34427, "11225": 2.37153, "11230": 2.36752, "11235": 2.3211, "11240": 2.34313, "11245": 2.35722, "11250": 2.33304, "11255": 2.33705, "11260": 2.35744, "11265": 2.38972, "11270": 2.28873, "11275": 2.31506, "11280": 2.37052, "11285": 2.30987, "11290": 2.34764, "11295": 2.36528, "11300": 2.38188, "11305": 2.33626, "11310": 2.33096, "11315": 2.29853, "11320": 2.30734, "11325": 2.31673, "11330": 2.35488, "11335": 2.33923, "11340": 2.3079, "11345": 2.31413, "11350": 2.29586, "11355": 2.32214, "11360": 2.35247, "11365": 2.29371, "11370": 2.35395, "11375": 2.32881, "11380": 2.34087, "11385": 2.34894, "11390": 2.33502, "11395": 2.2884, "11400": 2.30872, "11405": 2.35544, "11410": 2.3568, "11415": 2.38676, "11420": 2.35256, "11425": 2.30977, "11430": 2.36963, "11435": 2.36241, "11440": 2.34866, "11445": 2.36439, "11450": 2.32465, "11455": 2.30666, "11460": 2.35277, "11465": 2.34464, "11470": 2.37517, "11475": 2.31443, "11480": 2.3257, "11485": 2.31107, "11490": 2.34802, "11495": 2.40884, "11500": 2.34082, "11505": 2.3508, "11510": 2.36454, "11515": 2.32236, "11520": 2.30733, "11525": 2.36193, "11530": 2.31597, "11535": 2.32276, "11540": 2.3477, "11545": 2.3444, "11550": 2.36623, "11555": 2.32655, "11560": 2.35061, "11565": 2.34134, "11570": 2.35106, "11575": 2.29687, "11580": 2.32925, "11585": 2.3547, "11590": 2.363, "11595": 2.3361, "11600": 2.35902, "11605": 2.32358, "11610": 2.36227, "11615": 2.36004, "11620": 2.2981, "11625": 2.27697, "11630": 2.33144, "11635": 2.34278, "11640": 2.30624, "11645": 2.3087, "11650": 2.3294, "11655": 2.3522, "11660": 2.33836, "11665": 2.33129, "11670": 2.30056, "11675": 2.29831, "11680": 2.32594, "11685": 2.33673, "11690": 2.34585, "11695": 2.31934, "11700": 2.32573, "11705": 2.30181, "11710": 2.34591, "11715": 2.31446, "11720": 2.30041, "11725": 2.34026, "11730": 2.30392, "11735": 2.32846, "11740": 2.27309, "11745": 2.31794, "11750": 2.32885, "11755": 2.35239, "11760": 2.31371, "11765": 2.34224, "11770": 2.27583, "11775": 2.32625, "11780": 2.25521, "11785": 2.29984, "11790": 2.3147, "11795": 2.32163, "11800": 2.33468, "11805": 2.30414, "11810": 2.30597, "11815": 2.33169, "11820": 2.31994, "11825": 2.36159, "11830": 2.31811, "11835": 2.33841, "11840": 2.34209, "11845": 2.31799, "11850": 2.30481, "11855": 2.31517, "11860": 2.34454, "11865": 2.35974, "11870": 2.38084, "11875": 2.28252, "11880": 2.29271, "11885": 2.33769, "11890": 2.29289, "11895": 2.29417, "11900": 2.33599, "11905": 2.31834, "11910": 2.27955, "11915": 2.3114, "11920": 2.33475, "11925": 2.30537, "11930": 2.30934, "11935": 2.31815, "11940": 2.31966, "11945": 2.34415, "11950": 2.30116, "11955": 2.31467, "11960": 2.33865, "11965": 2.29518, "11970": 2.28255, "11975": 2.33642, "11980": 2.30706, "11985": 2.27991, "11990": 2.30453, "11995": 2.3319, "12000": 2.32528, "12005": 2.32659, "12010": 2.29056, "12015": 2.31078, "12020": 2.33062, "12025": 2.33675, "12030": 2.3128, "12035": 2.33795, "12040": 2.31684, "12045": 2.31526, "12050": 2.30954, "12055": 2.33454, "12060": 2.2986, "12065": 2.33071, "12070": 2.30368, "12075": 2.27763, "12080": 2.35185, "12085": 2.33991, "12090": 2.33265, "12095": 2.28294, "12100": 2.31842, "12105": 2.31076, "12110": 2.33165, "12115": 2.30675, "12120": 2.30709, "12125": 2.29525, "12130": 2.30299, "12135": 2.33064, "12140": 2.29932, "12145": 2.25928, "12150": 2.26376, "12155": 2.34443, "12160": 2.36038, "12165": 2.32019, "12170": 2.33359, "12175": 2.34399, "12180": 2.33199, "12185": 2.34292, "12190": 2.33532, "12195": 2.29881, "12200": 2.30116, "12205": 2.32465, "12210": 2.35786, "12215": 2.30505, "12220": 2.30039, "12225": 2.24617, "12230": 2.33684, "12235": 2.3407, "12240": 2.32566, "12245": 2.2901, "12250": 2.27591, "12255": 2.33759, "12260": 2.31591, "12265": 2.3427, "12270": 2.31351, "12275": 2.31488, "12280": 2.32099, "12285": 2.28632, "12290": 2.31156, "12295": 2.26679, "12300": 2.33088, "12305": 2.26926, "12310": 2.28921, "12315": 2.35495, "12320": 2.29692, "12325": 2.32148, "12330": 2.30084, "12335": 2.32096, "12340": 2.34287, "12345": 2.36948, "12350": 2.34473, "12355": 2.30731, "12360": 2.31518, "12365": 2.33224, "12370": 2.29319, "12375": 2.30332, "12380": 2.29499, "12385": 2.2936, "12390": 2.25171, "12395": 2.3058, "12400": 2.30364, "12405": 2.31348, "12410": 2.30547, "12415": 2.28482, "12420": 2.31903, "12425": 2.30174, "12430": 2.31645, "12435": 2.3002, "12440": 2.33415, "12445": 2.32165, "12450": 2.30792, "12455": 2.241, "12460": 2.33726, "12465": 2.36536, "12470": 2.2779, "12475": 2.27512, "12480": 2.29378, "12485": 2.30769, "12490": 2.33297, "12495": 2.27134, "12500": 2.3226, "12505": 2.33654, "12510": 2.35687, "12515": 2.27184, "12520": 2.32088, "12525": 2.28643, "12530": 2.32262, "12535": 2.27318, "12540": 2.28783, "12545": 2.29208, "12550": 2.31635, "12555": 2.32377, "12560": 2.30149, "12565": 2.33618, "12570": 2.27928, "12575": 2.30201, "12580": 2.31265, "12585": 2.29277, "12590": 2.33649, "12595": 2.32379, "12600": 2.283, "12605": 2.32167, "12610": 2.36518, "12615": 2.30832, "12620": 2.33499, "12625": 2.33235, "12630": 2.29956, "12635": 2.33672, "12640": 2.2974, "12645": 2.28103, "12650": 2.32777, "12655": 2.26732, "12660": 2.3438, "12665": 2.31888, "12670": 2.31111, "12675": 2.32037, "12680": 2.2761, "12685": 2.36864, "12690": 2.30387, "12695": 2.33294, "12700": 2.29361, "12705": 2.30835, "12710": 2.31, "12715": 2.28987, "12720": 2.31495, "12725": 2.27831, "12730": 2.34028, "12735": 2.29831, "12740": 2.33848, "12745": 2.28858, "12750": 2.27472, "12755": 2.28333, "12760": 2.26826, "12765": 2.3383, "12770": 2.32748, "12775": 2.26762, "12780": 2.32164, "12785": 2.30267, "12790": 2.3074, "12795": 2.32294, "12800": 2.29867, "12805": 2.3174, "12810": 2.28177, "12815": 2.30218, "12820": 2.32567, "12825": 2.32585, "12830": 2.2949, "12835": 2.27085, "12840": 2.27388, "12845": 2.31573, "12850": 2.27941, "12855": 2.27306, "12860": 2.27298, "12865": 2.31757, "12870": 2.2676, "12875": 2.34174, "12880": 2.31826, "12885": 2.28315, "12890": 2.30987, "12895": 2.24603, "12900": 2.32579, "12905": 2.31456, "12910": 2.28661, "12915": 2.28573, "12920": 2.29953, "12925": 2.3007, "12930": 2.2737, "12935": 2.24127, "12940": 2.25926, "12945": 2.31175, "12950": 2.28482, "12955": 2.32753, "12960": 2.31878, "12965": 2.29102, "12970": 2.27397, "12975": 2.2688, "12980": 2.33284, "12985": 2.2797, "12990": 2.28213, "12995": 2.27531, "13000": 2.2523, "13005": 2.3436, "13010": 2.32134, "13015": 2.27707, "13020": 2.28094, "13025": 2.30241, "13030": 2.27858, "13035": 2.28823, "13040": 2.32325, "13045": 2.27864, "13050": 2.28544, "13055": 2.29213, "13060": 2.30066, "13065": 2.30067, "13070": 2.32656, "13075": 2.29877, "13080": 2.27979, "13085": 2.28025, "13090": 2.29724, "13095": 2.30841, "13100": 2.30418, "13105": 2.31231, "13110": 2.34054, "13115": 2.22942, "13120": 2.30746, "13125": 2.24292, "13130": 2.27305, "13135": 2.31556, "13140": 2.30608, "13145": 2.26606, "13150": 2.28574, "13155": 2.26, "13160": 2.24794, "13165": 2.2587, "13170": 2.31255, "13175": 2.21579, "13180": 2.28538, "13185": 2.26612, "13190": 2.27365, "13195": 2.26434, "13200": 2.32018, "13205": 2.311, "13210": 2.28021, "13215": 2.30533, "13220": 2.28517, "13225": 2.29728, "13230": 2.25364, "13235": 2.26825, "13240": 2.27143, "13245": 2.31076, "13250": 2.27192, "13255": 2.29474, "13260": 2.29265, "13265": 2.27829, "13270": 2.27845, "13275": 2.27311, "13280": 2.26893, "13285": 2.28229, "13290": 2.30726, "13295": 2.31097, "13300": 2.24544, "13305": 2.27075, "13310": 2.26286, "13315": 2.22495, "13320": 2.25417, "13325": 2.31159, "13330": 2.27436, "13335": 2.30743, "13340": 2.28383, "13345": 2.29408, "13350": 2.25618, "13355": 2.2824, "13360": 2.23025, "13365": 2.26043, "13370": 2.32245, "13375": 2.31586, "13380": 2.29307, "13385": 2.29609, "13390": 2.32284, "13395": 2.2744, "13400": 2.27482, "13405": 2.3237, "13410": 2.26884, "13415": 2.26776, "13420": 2.29812, "13425": 2.25437, "13430": 2.28812, "13435": 2.30439, "13440": 2.32502, "13445": 2.27292, "13450": 2.29613, "13455": 2.30193, "13460": 2.29757, "13465": 2.29144, "13470": 2.28364, "13475": 2.28439, "13480": 2.27452, "13485": 2.28838, "13490": 2.2618, "13495": 2.25065, "13500": 2.23548, "13505": 2.28902, "13510": 2.26575, "13515": 2.25156, "13520": 2.31606, "13525": 2.26539, "13530": 2.30173, "13535": 2.31329, "13540": 2.27165, "13545": 2.25644, "13550": 2.23937, "13555": 2.29818, "13560": 2.29594, "13565": 2.3057, "13570": 2.23868, "13575": 2.23197, "13580": 2.28408, "13585": 2.30877, "13590": 2.33117, "13595": 2.27659, "13600": 2.20288, "13605": 2.29154, "13610": 2.22464, "13615": 2.3203, "13620": 2.29649, "13625": 2.26263, "13630": 2.27443, "13635": 2.22991, "13640": 2.24665, "13645": 2.27547, "13650": 2.30435, "13655": 2.27494, "13660": 2.24772, "13665": 2.25362, "13670": 2.25248, "13675": 2.28156, "13680": 2.23257, "13685": 2.30215, "13690": 2.26356, "13695": 2.27952, "13700": 2.2749, "13705": 2.26406, "13710": 2.24869, "13715": 2.27091, "13720": 2.28089, "13725": 2.29792, "13730": 2.32536, "13735": 2.34745, "13740": 2.27584, "13745": 2.27384, "13750": 2.32932, "13755": 2.26868, "13760": 2.27586, "13765": 2.28305, "13770": 2.29765, "13775": 2.35036, "13780": 2.21365, "13785": 2.22278, "13790": 2.29118, "13795": 2.26895, "13800": 2.30432, "13805": 2.27598, "13810": 2.25555, "13815": 2.24916, "13820": 2.26981, "13825": 2.25098, "13830": 2.28958, "13835": 2.28971, "13840": 2.20692, "13845": 2.26959, "13850": 2.27679, "13855": 2.3096, "13860": 2.28531, "13865": 2.26207, "13870": 2.22029, "13875": 2.25888, "13880": 2.25175, "13885": 2.26261, "13890": 2.23056, "13895": 2.30088, "13900": 2.2296, "13905": 2.2727, "13910": 2.22958, "13915": 2.27909, "13920": 2.26187, "13925": 2.29898, "13930": 2.23614, "13935": 2.287, "13940": 2.31783, "13945": 2.27775, "13950": 2.23702, "13955": 2.2998, "13960": 2.24251, "13965": 2.24384, "13970": 2.25319, "13975": 2.27564, "13980": 2.28154, "13985": 2.28717, "13990": 2.2967, "13995": 2.2665, "14000": 2.23332, "14005": 2.25947, "14010": 2.28496, "14015": 2.30559, "14020": 2.22635, "14025": 2.2392, "14030": 2.28143, "14035": 2.26071, "14040": 2.27062, "14045": 2.24386, "14050": 2.24913, "14055": 2.26201, "14060": 2.21839, "14065": 2.3182, "14070": 2.25283, "14075": 2.24853, "14080": 2.32079, "14085": 2.28186, "14090": 2.25916, "14095": 2.31648, "14100": 2.26906, "14105": 2.27411, "14110": 2.24641, "14115": 2.25479, "14120": 2.30369, "14125": 2.2078, "14130": 2.22595, "14135": 2.22828, "14140": 2.25781, "14145": 2.26038, "14150": 2.30618, "14155": 2.24576, "14160": 2.26327, "14165": 2.20856, "14170": 2.34904, "14175": 2.29791, "14180": 2.2299, "14185": 2.24532, "14190": 2.25459, "14195": 2.26117, "14200": 2.30647, "14205": 2.26398, "14210": 2.25057, "14215": 2.22416, "14220": 2.24004, "14225": 2.28424, "14230": 2.21762, "14235": 2.24052, "14240": 2.28617, "14245": 2.28313, "14250": 2.25769, "14255": 2.2957, "14260": 2.25541, "14265": 2.23205, "14270": 2.20124, "14275": 2.24995, "14280": 2.24241, "14285": 2.28954, "14290": 2.23024, "14295": 2.26785, "14300": 2.28488, "14305": 2.28564, "14310": 2.22303, "14315": 2.26676, "14320": 2.26573, "14325": 2.2936, "14330": 2.29348, "14335": 2.31034, "14340": 2.30331, "14345": 2.25742, "14350": 2.27505, "14355": 2.29251, "14360": 2.27972, "14365": 2.28216, "14370": 2.27289, "14375": 2.27338, "14380": 2.2836, "14385": 2.2232, "14390": 2.26928, "14395": 2.30165, "14400": 2.26059, "14405": 2.23941, "14410": 2.2127, "14415": 2.22316, "14420": 2.27564, "14425": 2.27052, "14430": 2.26076, "14435": 2.26702, "14440": 2.2235, "14445": 2.24434, "14450": 2.25508, "14455": 2.29474, "14460": 2.26795, "14465": 2.28841, "14470": 2.24546, "14475": 2.24546, "14480": 2.2359, "14485": 2.25971, "14490": 2.29106, "14495": 2.26786, "14500": 2.25457, "14505": 2.28502, "14510": 2.20064, "14515": 2.27682, "14520": 2.24273, "14525": 2.24773, "14530": 2.26005, "14535": 2.30289, "14540": 2.23432, "14545": 2.2851, "14550": 2.25773, "14555": 2.25816, "14560": 2.26408, "14565": 2.27301, "14570": 2.27005, "14575": 2.30357, "14580": 2.28136, "14585": 2.22576, "14590": 2.2341, "14595": 2.2733, "14600": 2.26609, "14605": 2.2374, "14610": 2.3111, "14615": 2.26737, "14620": 2.30016, "14625": 2.2488, "14630": 2.26717, "14635": 2.25803, "14640": 2.26389, "14645": 2.28233, "14650": 2.31238, "14655": 2.27032, "14660": 2.23082, "14665": 2.23107, "14670": 2.26201, "14675": 2.23351, "14680": 2.25482, "14685": 2.25831, "14690": 2.27042, "14695": 2.27539, "14700": 2.26212, "14705": 2.24589, "14710": 2.22933, "14715": 2.26315, "14720": 2.24179, "14725": 2.22357, "14730": 2.27007, "14735": 2.24381, "14740": 2.255, "14745": 2.20923, "14750": 2.24153, "14755": 2.32151, "14760": 2.22632, "14765": 2.25278, "14770": 2.24315, "14775": 2.21729, "14780": 2.21828, "14785": 2.26579, "14790": 2.27533, "14795": 2.24123, "14800": 2.28159, "14805": 2.27245, "14810": 2.27825, "14815": 2.26057, "14820": 2.26176, "14825": 2.26713, "14830": 2.21604, "14835": 2.2538, "14840": 2.24372, "14845": 2.22765, "14850": 2.30928, "14855": 2.28072, "14860": 2.23967, "14865": 2.24243, "14870": 2.27019, "14875": 2.24592, "14880": 2.23468, "14885": 2.28034, "14890": 2.27176, "14895": 2.25663, "14900": 2.27715, "14905": 2.22045, "14910": 2.29435, "14915": 2.27307, "14920": 2.27263, "14925": 2.2632, "14930": 2.25884, "14935": 2.21264, "14940": 2.29082, "14945": 2.26684, "14950": 2.23039, "14955": 2.23124, "14960": 2.26156, "14965": 2.2547, "14970": 2.25636, "14975": 2.25261, "14980": 2.21485, "14985": 2.19733, "14990": 2.25343, "14995": 2.23127, "15000": 2.18094, "15005": 2.29104, "15010": 2.2269, "15015": 2.24065, "15020": 2.25846, "15025": 2.20528, "15030": 2.24898, "15035": 2.23185, "15040": 2.25641, "15045": 2.25487, "15050": 2.23547, "15055": 2.20232, "15060": 2.24374, "15065": 2.23207, "15070": 2.24899, "15075": 2.22796, "15080": 2.2197, "15085": 2.21334, "15090": 2.2645, "15095": 2.27468, "15100": 2.25827, "15105": 2.24316, "15110": 2.23125, "15115": 2.27457, "15120": 2.27614, "15125": 2.20774, "15130": 2.2542, "15135": 2.25468, "15140": 2.2987, "15145": 2.24145, "15150": 2.24668, "15155": 2.24317, "15160": 2.2495, "15165": 2.24336, "15170": 2.2361, "15175": 2.25675, "15180": 2.28313, "15185": 2.24738, "15190": 2.24613, "15195": 2.23996, "15200": 2.23353, "15205": 2.27281, "15210": 2.23745, "15215": 2.2769, "15220": 2.2421, "15225": 2.28403, "15230": 2.25883, "15235": 2.24635, "15240": 2.22342, "15245": 2.23491, "15250": 2.24769, "15255": 2.24423, "15260": 2.26504, "15265": 2.24446, "15270": 2.25664, "15275": 2.2614, "15280": 2.22644, "15285": 2.25761, "15290": 2.24831, "15295": 2.25768, "15300": 2.27351, "15305": 2.27392, "15310": 2.3144, "15315": 2.22523, "15320": 2.24759, "15325": 2.25224, "15330": 2.27899, "15335": 2.2187, "15340": 2.22356, "15345": 2.24076, "15350": 2.22418, "15355": 2.24606, "15360": 2.24216, "15365": 2.22199, "15370": 2.26786, "15375": 2.21474, "15380": 2.20828, "15385": 2.20648, "15390": 2.28228, "15395": 2.24737, "15400": 2.28367, "15405": 2.23191, "15410": 2.23068, "15415": 2.28443, "15420": 2.24837, "15425": 2.23712, "15430": 2.23626, "15435": 2.26707, "15440": 2.2618, "15445": 2.24766, "15450": 2.25914, "15455": 2.27152, "15460": 2.21625, "15465": 2.24521, "15470": 2.25194, "15475": 2.21938, "15480": 2.22529, "15485": 2.20506, "15490": 2.28134, "15495": 2.24358, "15500": 2.20549, "15505": 2.2084, "15510": 2.20845, "15515": 2.23951, "15520": 2.20552, "15525": 2.269, "15530": 2.25037, "15535": 2.23677, "15540": 2.25274, "15545": 2.23831, "15550": 2.23353, "15555": 2.24129, "15560": 2.27391, "15565": 2.21705, "15570": 2.20772, "15575": 2.26604, "15580": 2.26832, "15585": 2.22978, "15590": 2.26733, "15595": 2.19127, "15600": 2.30378, "15605": 2.25075, "15610": 2.21019, "15615": 2.23339, "15620": 2.23456, "15625": 2.19623, "15630": 2.1944, "15635": 2.20539, "15640": 2.23231, "15645": 2.25533, "15650": 2.20804, "15655": 2.21516, "15660": 2.21827, "15665": 2.24245, "15670": 2.21508, "15675": 2.23151, "15680": 2.21427, "15685": 2.26473, "15690": 2.20568, "15695": 2.19618, "15700": 2.24546, "15705": 2.19512, "15710": 2.19588, "15715": 2.25032, "15720": 2.24312, "15725": 2.24502, "15730": 2.22859, "15735": 2.28592, "15740": 2.19058, "15745": 2.23528, "15750": 2.22464, "15755": 2.26071, "15760": 2.26546, "15765": 2.2551, "15770": 2.25785, "15775": 2.21746, "15780": 2.282, "15785": 2.19888, "15790": 2.24432, "15795": 2.24309, "15800": 2.22154, "15805": 2.23876, "15810": 2.16371, "15815": 2.27422, "15820": 2.22558, "15825": 2.20394, "15830": 2.20598, "15835": 2.20895, "15840": 2.25453, "15845": 2.22463, "15850": 2.22082, "15855": 2.23743, "15860": 2.23011, "15865": 2.24435, "15870": 2.23625, "15875": 2.26721, "15880": 2.24477, "15885": 2.20521, "15890": 2.21663, "15895": 2.27992, "15900": 2.19737, "15905": 2.293, "15910": 2.25346, "15915": 2.19737, "15920": 2.26707, "15925": 2.22413, "15930": 2.23448, "15935": 2.2333, "15940": 2.19264, "15945": 2.2412, "15950": 2.22831, "15955": 2.22088, "15960": 2.23416, "15965": 2.22268, "15970": 2.22926, "15975": 2.19976, "15980": 2.25344, "15985": 2.21768, "15990": 2.23364, "15995": 2.23965, "16000": 2.20541, "16005": 2.21279, "16010": 2.18377, "16015": 2.21241, "16020": 2.21617, "16025": 2.24123, "16030": 2.24426, "16035": 2.17215, "16040": 2.27508, "16045": 2.21076, "16050": 2.24363, "16055": 2.22471, "16060": 2.19611, "16065": 2.23531, "16070": 2.24419, "16075": 2.18554, "16080": 2.27711, "16085": 2.23132, "16090": 2.21797, "16095": 2.23373, "16100": 2.22187, "16105": 2.27193, "16110": 2.2649, "16115": 2.1992, "16120": 2.19707, "16125": 2.23616, "16130": 2.21328, "16135": 2.25227, "16140": 2.18836, "16145": 2.23756, "16150": 2.21073, "16155": 2.24627, "16160": 2.28108, "16165": 2.2084, "16170": 2.20158, "16175": 2.17473, "16180": 2.24239, "16185": 2.23323, "16190": 2.234, "16195": 2.25624, "16200": 2.18941, "16205": 2.24237, "16210": 2.24453, "16215": 2.22894, "16220": 2.2472, "16225": 2.25126, "16230": 2.22977, "16235": 2.21255, "16240": 2.26592, "16245": 2.21862, "16250": 2.20911, "16255": 2.21591, "16260": 2.22551, "16265": 2.24631, "16270": 2.24364, "16275": 2.23385, "16280": 2.17088, "16285": 2.23989, "16290": 2.24239, "16295": 2.22962, "16300": 2.26909, "16305": 2.21084, "16310": 2.20106, "16315": 2.2076, "16320": 2.29678, "16325": 2.25159, "16330": 2.19068, "16335": 2.1912, "16340": 2.1829, "16345": 2.21764, "16350": 2.26324, "16355": 2.22472, "16360": 2.22582, "16365": 2.22334, "16370": 2.24132, "16375": 2.24017, "16380": 2.22091, "16385": 2.21028, "16390": 2.21027, "16395": 2.22756, "16400": 2.20112, "16405": 2.22024, "16410": 2.24785, "16415": 2.21562, "16420": 2.22806, "16425": 2.20896, "16430": 2.27758, "16435": 2.252, "16440": 2.24315, "16445": 2.20988, "16450": 2.20869, "16455": 2.26134, "16460": 2.24585, "16465": 2.24194, "16470": 2.1841, "16475": 2.2225, "16480": 2.15948, "16485": 2.25695, "16490": 2.20517, "16495": 2.22393, "16500": 2.23193, "16505": 2.20771, "16510": 2.24359, "16515": 2.26501, "16520": 2.2312, "16525": 2.17233, "16530": 2.26275, "16535": 2.18385, "16540": 2.25529, "16545": 2.20208, "16550": 2.19758, "16555": 2.22456, "16560": 2.24874, "16565": 2.24139, "16570": 2.23309, "16575": 2.19831, "16580": 2.18824, "16585": 2.21244, "16590": 2.23058, "16595": 2.18431, "16600": 2.19491, "16605": 2.2163, "16610": 2.23137, "16615": 2.20987, "16620": 2.21349, "16625": 2.21051, "16630": 2.23962, "16635": 2.20547, "16640": 2.23671, "16645": 2.21392, "16650": 2.25346, "16655": 2.25842, "16660": 2.22073, "16665": 2.18901, "16670": 2.21252, "16675": 2.23032, "16680": 2.22058, "16685": 2.22624, "16690": 2.21878, "16695": 2.26686, "16700": 2.18724, "16705": 2.22522, "16710": 2.23859, "16715": 2.27468, "16720": 2.23679, "16725": 2.24036, "16730": 2.18152, "16735": 2.20896, "16740": 2.19345, "16745": 2.21836, "16750": 2.2233, "16755": 2.21127, "16760": 2.19736, "16765": 2.21501, "16770": 2.22512, "16775": 2.20555, "16780": 2.23773, "16785": 2.2241, "16790": 2.21688, "16795": 2.2593, "16800": 2.22356, "16805": 2.24666, "16810": 2.17347, "16815": 2.2305, "16820": 2.23061, "16825": 2.22297, "16830": 2.22749, "16835": 2.19782, "16840": 2.20505, "16845": 2.14647, "16850": 2.24974, "16855": 2.22808, "16860": 2.23301, "16865": 2.25614, "16870": 2.19229, "16875": 2.17911, "16880": 2.22201, "16885": 2.21223, "16890": 2.24715, "16895": 2.18383, "16900": 2.24557, "16905": 2.24878, "16910": 2.22445, "16915": 2.23889, "16920": 2.21175, "16925": 2.19468, "16930": 2.2137, "16935": 2.20021, "16940": 2.24294, "16945": 2.19923, "16950": 2.21889, "16955": 2.23681, "16960": 2.22871, "16965": 2.18286, "16970": 2.19194, "16975": 2.21623, "16980": 2.21673, "16985": 2.21159, "16990": 2.22458, "16995": 2.17423, "17000": 2.2649, "17005": 2.24841, "17010": 2.22125, "17015": 2.18887, "17020": 2.23002, "17025": 2.17204, "17030": 2.1971, "17035": 2.20119, "17040": 2.235, "17045": 2.2062, "17050": 2.16387, "17055": 2.21901, "17060": 2.24673, "17065": 2.19056, "17070": 2.18364, "17075": 2.21042, "17080": 2.21339, "17085": 2.22822, "17090": 2.24109, "17095": 2.21855, "17100": 2.255, "17105": 2.25034, "17110": 2.22618, "17115": 2.19378, "17120": 2.17127, "17125": 2.21495, "17130": 2.24931, "17135": 2.19998, "17140": 2.23499, "17145": 2.22519, "17150": 2.18663, "17155": 2.19161, "17160": 2.25816, "17165": 2.21602, "17170": 2.22851, "17175": 2.21473, "17180": 2.2054, "17185": 2.21626, "17190": 2.16695, "17195": 2.22119, "17200": 2.21671, "17205": 2.19925, "17210": 2.23492, "17215": 2.2035, "17220": 2.23021, "17225": 2.21074, "17230": 2.22833, "17235": 2.21429, "17240": 2.20778, "17245": 2.21148, "17250": 2.20345, "17255": 2.19111, "17260": 2.2179, "17265": 2.21051, "17270": 2.21654, "17275": 2.25776, "17280": 2.21727, "17285": 2.22932, "17290": 2.21466, "17295": 2.22748, "17300": 2.208, "17305": 2.22799, "17310": 2.23522, "17315": 2.22613, "17320": 2.20811, "17325": 2.25865, "17330": 2.24401, "17335": 2.23425, "17340": 2.15894, "17345": 2.19232, "17350": 2.18871, "17355": 2.22453, "17360": 2.19163, "17365": 2.19496, "17370": 2.21132, "17375": 2.23987, "17380": 2.21497, "17385": 2.19683, "17390": 2.20376, "17395": 2.18637, "17400": 2.18847, "17405": 2.23875, "17410": 2.19692, "17415": 2.17909, "17420": 2.21793, "17425": 2.20286, "17430": 2.19471, "17435": 2.19354, "17440": 2.22581, "17445": 2.16828, "17450": 2.22069, "17455": 2.19415, "17460": 2.20837, "17465": 2.16979, "17470": 2.21271, "17475": 2.23511, "17480": 2.18021, "17485": 2.22841, "17490": 2.19635, "17495": 2.16785, "17500": 2.21107, "17505": 2.20845, "17510": 2.17143, "17515": 2.21506, "17520": 2.19612, "17525": 2.20754, "17530": 2.19885, "17535": 2.19042, "17540": 2.2128, "17545": 2.16883, "17550": 2.2109, "17555": 2.16026, "17560": 2.18679, "17565": 2.21415, "17570": 2.19332, "17575": 2.193, "17580": 2.18327, "17585": 2.19947, "17590": 2.25836, "17595": 2.22572, "17600": 2.24656, "17605": 2.20572, "17610": 2.20153, "17615": 2.21251, "17620": 2.21637, "17625": 2.1459, "17630": 2.20677, "17635": 2.18762, "17640": 2.2107, "17645": 2.28043, "17650": 2.18334, "17655": 2.20791, "17660": 2.24483, "17665": 2.2195, "17670": 2.22773, "17675": 2.22801, "17680": 2.20169, "17685": 2.20791, "17690": 2.16274, "17695": 2.24331, "17700": 2.2174, "17705": 2.24666, "17710": 2.16428, "17715": 2.15444, "17720": 2.21577, "17725": 2.21636, "17730": 2.1984, "17735": 2.18804, "17740": 2.1821, "17745": 2.21512, "17750": 2.26373, "17755": 2.20586, "17760": 2.20305, "17765": 2.18563, "17770": 2.19786, "17775": 2.21737, "17780": 2.23957, "17785": 2.20262, "17790": 2.18639, "17795": 2.20495, "17800": 2.18418, "17805": 2.1948, "17810": 2.1974, "17815": 2.20465, "17820": 2.16726, "17825": 2.26025, "17830": 2.24032, "17835": 2.22092, "17840": 2.1855, "17845": 2.1831, "17850": 2.19949, "17855": 2.20855, "17860": 2.18076, "17865": 2.18196, "17870": 2.24089, "17875": 2.2092, "17880": 2.18337, "17885": 2.23248, "17890": 2.22444, "17895": 2.21778, "17900": 2.15859, "17905": 2.20869, "17910": 2.20569, "17915": 2.19719, "17920": 2.24116, "17925": 2.19659, "17930": 2.16801, "17935": 2.20705, "17940": 2.22129, "17945": 2.19374, "17950": 2.23187, "17955": 2.22719, "17960": 2.17312, "17965": 2.19921, "17970": 2.23007, "17975": 2.13705, "17980": 2.1982, "17985": 2.21174, "17990": 2.22239, "17995": 2.20899, "18000": 2.19041, "18005": 2.19458, "18010": 2.21537, "18015": 2.1909, "18020": 2.22439, "18025": 2.21476, "18030": 2.21249, "18035": 2.22106, "18040": 2.21413, "18045": 2.21743, "18050": 2.21611, "18055": 2.20836, "18060": 2.23192, "18065": 2.17785, "18070": 2.23878, "18075": 2.21378, "18080": 2.15854, "18085": 2.22282, "18090": 2.17756, "18095": 2.19054, "18100": 2.15973, "18105": 2.1853, "18110": 2.21127, "18115": 2.24432, "18120": 2.23764, "18125": 2.19213, "18130": 2.20361, "18135": 2.15987, "18140": 2.2047, "18145": 2.18752, "18150": 2.20886, "18155": 2.20367, "18160": 2.23743, "18165": 2.22697, "18170": 2.21256, "18175": 2.19252, "18180": 2.21048, "18185": 2.19305, "18190": 2.18662, "18195": 2.16103, "18200": 2.23254, "18205": 2.2019, "18210": 2.20841, "18215": 2.17089, "18220": 2.17861, "18225": 2.2276, "18230": 2.22891, "18235": 2.19174, "18240": 2.203, "18245": 2.20759, "18250": 2.18334, "18255": 2.28075, "18260": 2.21153, "18265": 2.20778, "18270": 2.1849, "18275": 2.18571, "18280": 2.19998, "18285": 2.22101, "18290": 2.17957, "18295": 2.21917, "18300": 2.22384, "18305": 2.24954, "18310": 2.24189, "18315": 2.2209, "18320": 2.18229, "18325": 2.21535, "18330": 2.20375, "18335": 2.21514, "18340": 2.20206, "18345": 2.18152, "18350": 2.24217, "18355": 2.17136, "18360": 2.22825, "18365": 2.18609, "18370": 2.16149, "18375": 2.21275, "18380": 2.21396, "18385": 2.21117, "18390": 2.18888, "18395": 2.24284, "18400": 2.19621, "18405": 2.24047, "18410": 2.20594, "18415": 2.17447, "18420": 2.17988, "18425": 2.18303, "18430": 2.1544, "18435": 2.19514, "18440": 2.19972, "18445": 2.16732, "18450": 2.21712, "18455": 2.1545, "18460": 2.19549, "18465": 2.21174, "18470": 2.19829, "18475": 2.1544, "18480": 2.19926, "18485": 2.16042, "18490": 2.19909, "18495": 2.24456, "18500": 2.2001, "18505": 2.16822, "18510": 2.21976, "18515": 2.17586, "18520": 2.19182, "18525": 2.17348, "18530": 2.18109, "18535": 2.17367, "18540": 2.21477, "18545": 2.17718, "18550": 2.21837, "18555": 2.17005, "18560": 2.19242, "18565": 2.22523, "18570": 2.21444, "18575": 2.14757, "18580": 2.22305, "18585": 2.21103, "18590": 2.21375, "18595": 2.18154, "18600": 2.1594, "18605": 2.21161, "18610": 2.20955, "18615": 2.19903, "18620": 2.21594, "18625": 2.19789, "18630": 2.1733, "18635": 2.20144, "18640": 2.16966, "18645": 2.17637, "18650": 2.25784, "18655": 2.1669, "18660": 2.23555, "18665": 2.22521, "18670": 2.20987, "18675": 2.19305, "18680": 2.20971, "18685": 2.18046, "18690": 2.21452, "18695": 2.15752, "18700": 2.16337, "18705": 2.18279, "18710": 2.24586, "18715": 2.17867, "18720": 2.21953, "18725": 2.1832, "18730": 2.18264, "18735": 2.14012, "18740": 2.1935, "18745": 2.21784, "18750": 2.21696, "18755": 2.19825, "18760": 2.22602, "18765": 2.16019, "18770": 2.1525, "18775": 2.21494, "18780": 2.1692, "18785": 2.21257, "18790": 2.20286, "18795": 2.20771, "18800": 2.18724, "18805": 2.18303, "18810": 2.20787, "18815": 2.16895, "18820": 2.19057, "18825": 2.21174, "18830": 2.20065, "18835": 2.22428, "18840": 2.14595, "18845": 2.19731, "18850": 2.18839, "18855": 2.17783, "18860": 2.15856, "18865": 2.18496, "18870": 2.16587, "18875": 2.16471, "18880": 2.21495, "18885": 2.17664, "18890": 2.18102, "18895": 2.20104, "18900": 2.17226, "18905": 2.18495, "18910": 2.18054, "18915": 2.17664, "18920": 2.19317, "18925": 2.27301, "18930": 2.1997, "18935": 2.20789, "18940": 2.23627, "18945": 2.26411, "18950": 2.19259, "18955": 2.19151, "18960": 2.20775, "18965": 2.14254, "18970": 2.20857, "18975": 2.22179, "18980": 2.20678, "18985": 2.19418, "18990": 2.23417, "18995": 2.19979, "19000": 2.16653, "19005": 2.16863, "19010": 2.19979, "19015": 2.19755, "19020": 2.20501, "19025": 2.17012, "19030": 2.22165, "19035": 2.16724, "19040": 2.15151, "19045": 2.22142, "19050": 2.16535, "19055": 2.1879, "19060": 2.18755, "19065": 2.19165, "19070": 2.17634, "19075": 2.22016, "19080": 2.18811, "19085": 2.18089, "19090": 2.17865, "19095": 2.16816, "19100": 2.2013, "19105": 2.1957, "19110": 2.17577, "19115": 2.20341, "19120": 2.1949, "19125": 2.21619, "19130": 2.14847, "19135": 2.22854, "19140": 2.21376, "19145": 2.19507, "19150": 2.18544, "19155": 2.21331, "19160": 2.1854, "19165": 2.13253, "19170": 2.17731, "19175": 2.18595, "19180": 2.12856, "19185": 2.19414, "19190": 2.18603, "19195": 2.2094, "19200": 2.20402, "19205": 2.18649, "19210": 2.18845, "19215": 2.13484, "19220": 2.17981, "19225": 2.16393, "19230": 2.22146, "19235": 2.21305, "19240": 2.17333, "19245": 2.17506, "19250": 2.18701, "19255": 2.15193, "19260": 2.21963, "19265": 2.15687, "19270": 2.17342, "19275": 2.18819, "19280": 2.17723, "19285": 2.14162, "19290": 2.17099, "19295": 2.19051, "19300": 2.17155, "19305": 2.22532, "19310": 2.22422, "19315": 2.20814, "19320": 2.16189, "19325": 2.19253, "19330": 2.21961, "19335": 2.21801, "19340": 2.21697, "19345": 2.1943, "19350": 2.16753, "19355": 2.13397, "19360": 2.15572, "19365": 2.18245, "19370": 2.18714, "19375": 2.19589, "19380": 2.16467, "19385": 2.1605, "19390": 2.17804, "19395": 2.18269, "19400": 2.18517, "19405": 2.17832, "19410": 2.16271, "19415": 2.18236, "19420": 2.19293, "19425": 2.17882, "19430": 2.24136, "19435": 2.17018, "19440": 2.16688, "19445": 2.18164, "19450": 2.17701, "19455": 2.17083, "19460": 2.20136, "19465": 2.17077, "19470": 2.18198, "19475": 2.22668, "19480": 2.18634, "19485": 2.17011, "19490": 2.18841, "19495": 2.17851, "19500": 2.20419, "19505": 2.15174, "19510": 2.21393, "19515": 2.17386, "19520": 2.19245, "19525": 2.17231, "19530": 2.16575, "19535": 2.13567, "19540": 2.2199, "19545": 2.18637, "19550": 2.14868, "19555": 2.22243, "19560": 2.18661, "19565": 2.15148, "19570": 2.17873, "19575": 2.18085, "19580": 2.19939, "19585": 2.224, "19590": 2.20848, "19595": 2.19785, "19600": 2.20097, "19605": 2.21629, "19610": 2.19289, "19615": 2.17706, "19620": 2.21452, "19625": 2.19044, "19630": 2.19278, "19635": 2.19104, "19640": 2.17125, "19645": 2.1466, "19650": 2.1739, "19655": 2.15307, "19660": 2.17153, "19665": 2.17369, "19670": 2.1798, "19675": 2.20897, "19680": 2.19325, "19685": 2.17916, "19690": 2.18483, "19695": 2.15717, "19700": 2.16826, "19705": 2.18859, "19710": 2.24608, "19715": 2.15858, "19720": 2.15217, "19725": 2.23082, "19730": 2.17239, "19735": 2.18095, "19740": 2.21812, "19745": 2.20418, "19750": 2.18556, "19755": 2.17184, "19760": 2.15714, "19765": 2.21041, "19770": 2.21192, "19775": 2.17262, "19780": 2.15554, "19785": 2.17331, "19790": 2.22597, "19795": 2.1678, "19800": 2.16481, "19805": 2.16696, "19810": 2.21633, "19815": 2.18892, "19820": 2.20088, "19825": 2.21285, "19830": 2.17092, "19835": 2.18387, "19840": 2.2068, "19845": 2.16546, "19850": 2.17315, "19855": 2.17659, "19860": 2.18457, "19865": 2.16971, "19870": 2.20936, "19875": 2.1351, "19880": 2.16784, "19885": 2.15455, "19890": 2.16411, "19895": 2.2063, "19900": 2.18571, "19905": 2.15547, "19910": 2.17521, "19915": 2.19649, "19920": 2.1095, "19925": 2.1661, "19930": 2.14798, "19935": 2.17242, "19940": 2.17752, "19945": 2.18386, "19950": 2.16777, "19955": 2.17065, "19960": 2.16614, "19965": 2.16917, "19970": 2.12978, "19975": 2.17828, "19980": 2.21538, "19985": 2.17195, "19990": 2.17211, "19995": 2.19771, "20000": 2.22259, "20005": 2.19135, "20010": 2.17572, "20015": 2.19049, "20020": 2.16423, "20025": 2.11898, "20030": 2.14692, "20035": 2.1852, "20040": 2.18588, "20045": 2.2037, "20050": 2.19553, "20055": 2.19094, "20060": 2.20188, "20065": 2.16647, "20070": 2.187, "20075": 2.149, "20080": 2.16467, "20085": 2.19462, "20090": 2.17948, "20095": 2.1634, "20100": 2.19092, "20105": 2.14333, "20110": 2.17572, "20115": 2.15837, "20120": 2.18989, "20125": 2.164, "20130": 2.17483, "20135": 2.18314, "20140": 2.16382, "20145": 2.17763, "20150": 2.14485, "20155": 2.23131, "20160": 2.18039, "20165": 2.17739, "20170": 2.16458, "20175": 2.14972, "20180": 2.18565, "20185": 2.18524, "20190": 2.19723, "20195": 2.199, "20200": 2.16005, "20205": 2.18246, "20210": 2.1763, "20215": 2.18848, "20220": 2.17917, "20225": 2.21374, "20230": 2.17049, "20235": 2.20942, "20240": 2.18263, "20245": 2.19206, "20250": 2.14342, "20255": 2.14756, "20260": 2.17306, "20265": 2.18119, "20270": 2.18042, "20275": 2.18282, "20280": 2.14995, "20285": 2.17021, "20290": 2.1918, "20295": 2.19785, "20300": 2.15127, "20305": 2.21705, "20310": 2.11353, "20315": 2.17038, "20320": 2.18489, "20325": 2.18736, "20330": 2.15342, "20335": 2.20822, "20340": 2.17509, "20345": 2.16963, "20350": 2.14708, "20355": 2.19253, "20360": 2.19673, "20365": 2.16962, "20370": 2.20143, "20375": 2.1865, "20380": 2.18418, "20385": 2.17873, "20390": 2.1552, "20395": 2.15574, "20400": 2.26465, "20405": 2.15171, "20410": 2.14402, "20415": 2.19674, "20420": 2.15673, "20425": 2.1794, "20430": 2.16649, "20435": 2.14843, "20440": 2.16088, "20445": 2.18199, "20450": 2.17384, "20455": 2.20986, "20460": 2.17916, "20465": 2.16804, "20470": 2.18128, "20475": 2.15945, "20480": 2.19667, "20485": 2.19926, "20490": 2.19577, "20495": 2.17149, "20500": 2.17356, "20505": 2.15875, "20510": 2.19091, "20515": 2.18938, "20520": 2.15876, "20525": 2.15912, "20530": 2.17432, "20535": 2.19152, "20540": 2.15547, "20545": 2.15529, "20550": 2.15115, "20555": 2.1287, "20560": 2.17195, "20565": 2.18551, "20570": 2.1769, "20575": 2.15744, "20580": 2.19949, "20585": 2.16187, "20590": 2.1907, "20595": 2.13173, "20600": 2.14473, "20605": 2.18185, "20610": 2.1974, "20615": 2.15534, "20620": 2.14395, "20625": 2.14755, "20630": 2.12462, "20635": 2.14155, "20640": 2.19576, "20645": 2.19236, "20650": 2.17817, "20655": 2.20922, "20660": 2.18755, "20665": 2.18604, "20670": 2.1704, "20675": 2.11829, "20680": 2.17534, "20685": 2.19006, "20690": 2.19021, "20695": 2.19909, "20700": 2.13839, "20705": 2.19226, "20710": 2.15496, "20715": 2.17574, "20720": 2.22664, "20725": 2.17967, "20730": 2.15373, "20735": 2.14529, "20740": 2.16178, "20745": 2.1888, "20750": 2.17849, "20755": 2.18429, "20760": 2.16062, "20765": 2.15313, "20770": 2.18372, "20775": 2.23269, "20780": 2.18936, "20785": 2.17928, "20790": 2.16285, "20795": 2.17024, "20800": 2.2071, "20805": 2.17369, "20810": 2.20334, "20815": 2.16584, "20820": 2.1984, "20825": 2.18368, "20830": 2.14387, "20835": 2.14174, "20840": 2.19087, "20845": 2.21659, "20850": 2.18207, "20855": 2.20835, "20860": 2.18697, "20865": 2.19269, "20870": 2.20996, "20875": 2.20748, "20880": 2.18805, "20885": 2.18734, "20890": 2.1765, "20895": 2.16403, "20900": 2.17899, "20905": 2.18039, "20910": 2.18115, "20915": 2.17031, "20920": 2.17164, "20925": 2.18163, "20930": 2.19929, "20935": 2.20571, "20940": 2.15989, "20945": 2.14718, "20950": 2.17518, "20955": 2.20232, "20960": 2.17409, "20965": 2.14385, "20970": 2.16402, "20975": 2.16946, "20980": 2.15105, "20985": 2.18513, "20990": 2.16891, "20995": 2.12095, "21000": 2.14746, "21005": 2.19661, "21010": 2.20557, "21015": 2.12038, "21020": 2.15589, "21025": 2.1562, "21030": 2.14543, "21035": 2.14618, "21040": 2.14193, "21045": 2.17062, "21050": 2.12996, "21055": 2.17055, "21060": 2.10018, "21065": 2.17207, "21070": 2.13715, "21075": 2.17278, "21080": 2.17547, "21085": 2.19145, "21090": 2.18042, "21095": 2.20056, "21100": 2.16498, "21105": 2.19902, "21110": 2.17429, "21115": 2.19486, "21120": 2.20955, "21125": 2.1654, "21130": 2.19212, "21135": 2.17887, "21140": 2.151, "21145": 2.18896, "21150": 2.16208, "21155": 2.16891, "21160": 2.15758, "21165": 2.16226, "21170": 2.14699, "21175": 2.16116, "21180": 2.16896, "21185": 2.19734, "21190": 2.14399, "21195": 2.15789, "21200": 2.17332, "21205": 2.1895, "21210": 2.1596, "21215": 2.1478, "21220": 2.1795, "21225": 2.1542, "21230": 2.17985, "21235": 2.1559, "21240": 2.18883, "21245": 2.15705, "21250": 2.15111, "21255": 2.14262, "21260": 2.19007, "21265": 2.21836, "21270": 2.22454, "21275": 2.21595, "21280": 2.19438, "21285": 2.18067, "21290": 2.15244, "21295": 2.1467, "21300": 2.21616, "21305": 2.21866, "21310": 2.17079, "21315": 2.17126, "21320": 2.16219, "21325": 2.13341, "21330": 2.15823, "21335": 2.12625, "21340": 2.18025, "21345": 2.16239, "21350": 2.14034, "21355": 2.18611, "21360": 2.17043, "21365": 2.19081, "21370": 2.14539, "21375": 2.14519, "21380": 2.15706, "21385": 2.18094, "21390": 2.18454, "21395": 2.13377, "21400": 2.18494, "21405": 2.20116, "21410": 2.15651, "21415": 2.19081, "21420": 2.21152, "21425": 2.16994, "21430": 2.20526, "21435": 2.159, "21440": 2.20758, "21445": 2.1736, "21450": 2.16651, "21455": 2.18957, "21460": 2.16195, "21465": 2.16566, "21470": 2.1975, "21475": 2.14588, "21480": 2.19495, "21485": 2.21662, "21490": 2.21172, "21495": 2.18121, "21500": 2.14147, "21505": 2.18323, "21510": 2.17078, "21515": 2.17832, "21520": 2.13394, "21525": 2.15878, "21530": 2.12419, "21535": 2.15899, "21540": 2.18587, "21545": 2.17181, "21550": 2.18615, "21555": 2.1563, "21560": 2.15353, "21565": 2.14207, "21570": 2.12581, "21575": 2.19374, "21580": 2.15933, "21585": 2.18073, "21590": 2.19795, "21595": 2.18966, "21600": 2.18371, "21605": 2.181, "21610": 2.16526, "21615": 2.159, "21620": 2.18486, "21625": 2.17953, "21630": 2.20034, "21635": 2.13888, "21640": 2.18541, "21645": 2.17253, "21650": 2.16653, "21655": 2.17596, "21660": 2.15784, "21665": 2.1081, "21670": 2.17795, "21675": 2.13761, "21680": 2.17134, "21685": 2.14102, "21690": 2.12586, "21695": 2.17611, "21700": 2.1608, "21705": 2.18609, "21710": 2.09566, "21715": 2.14135, "21720": 2.18519, "21725": 2.15484, "21730": 2.15963, "21735": 2.13069, "21740": 2.16537, "21745": 2.18427, "21750": 2.16554, "21755": 2.18043, "21760": 2.13996, "21765": 2.12857, "21770": 2.14724, "21775": 2.16379, "21780": 2.19571, "21785": 2.17598, "21790": 2.16228, "21795": 2.16302, "21800": 2.13412, "21805": 2.1616, "21810": 2.16725, "21815": 2.18667, "21820": 2.15255, "21825": 2.16829, "21830": 2.12736, "21835": 2.14433, "21840": 2.17052, "21845": 2.15069, "21850": 2.183, "21855": 2.14236, "21860": 2.16634, "21865": 2.17032, "21870": 2.14317, "21875": 2.18213, "21880": 2.14929, "21885": 2.18307, "21890": 2.17907, "21895": 2.16198, "21900": 2.19224, "21905": 2.1778, "21910": 2.17374, "21915": 2.19058, "21920": 2.15995, "21925": 2.19132, "21930": 2.1329, "21935": 2.20153, "21940": 2.15033, "21945": 2.17307, "21950": 2.16835, "21955": 2.12519, "21960": 2.11332, "21965": 2.1774, "21970": 2.15691, "21975": 2.14199, "21980": 2.18103, "21985": 2.13465, "21990": 2.16624, "21995": 2.17288, "22000": 2.20924, "22005": 2.14889, "22010": 2.17998, "22015": 2.17485, "22020": 2.19409, "22025": 2.15338, "22030": 2.16625, "22035": 2.12033, "22040": 2.18571, "22045": 2.20553, "22050": 2.12375, "22055": 2.13949, "22060": 2.17508, "22065": 2.14997, "22070": 2.14293, "22075": 2.17141, "22080": 2.2002, "22085": 2.13377, "22090": 2.16674, "22095": 2.15392, "22100": 2.15323, "22105": 2.18547, "22110": 2.16082, "22115": 2.14089, "22120": 2.16963, "22125": 2.14245, "22130": 2.18409, "22135": 2.15972, "22140": 2.18929, "22145": 2.14756, "22150": 2.18703, "22155": 2.1354, "22160": 2.13521, "22165": 2.17721, "22170": 2.14269, "22175": 2.19442, "22180": 2.12066, "22185": 2.15424, "22190": 2.1671, "22195": 2.17625, "22200": 2.18175, "22205": 2.17906, "22210": 2.10357, "22215": 2.14172, "22220": 2.18948, "22225": 2.12434, "22230": 2.17441, "22235": 2.18095, "22240": 2.13866, "22245": 2.18385, "22250": 2.17661, "22255": 2.13077, "22260": 2.19073, "22265": 2.12244, "22270": 2.14956, "22275": 2.14577, "22280": 2.19075, "22285": 2.2014, "22290": 2.17317, "22295": 2.1785, "22300": 2.14297, "22305": 2.12818, "22310": 2.16103, "22315": 2.15592, "22320": 2.15261, "22325": 2.15948, "22330": 2.14712, "22335": 2.19247, "22340": 2.1708, "22345": 2.1641, "22350": 2.19261, "22355": 2.16329, "22360": 2.18074, "22365": 2.17382, "22370": 2.20586, "22375": 2.15437, "22380": 2.15781, "22385": 2.15625, "22390": 2.17179, "22395": 2.1745, "22400": 2.15848, "22405": 2.10407, "22410": 2.18394, "22415": 2.21506, "22420": 2.17348, "22425": 2.18121, "22430": 2.1312, "22435": 2.16687, "22440": 2.19163, "22445": 2.20224, "22450": 2.14203, "22455": 2.14981, "22460": 2.1556, "22465": 2.15811, "22470": 2.199, "22475": 2.19475, "22480": 2.11661, "22485": 2.14235, "22490": 2.17196, "22495": 2.12415, "22500": 2.09691, "22505": 2.17264, "22510": 2.12113, "22515": 2.14938, "22520": 2.18776, "22525": 2.14929, "22530": 2.13932, "22535": 2.16732, "22540": 2.16838, "22545": 2.20558, "22550": 2.21813, "22555": 2.13818, "22560": 2.1488, "22565": 2.16293, "22570": 2.15008, "22575": 2.15435, "22580": 2.09734, "22585": 2.17506, "22590": 2.13534, "22595": 2.1482, "22600": 2.17668, "22605": 2.13459, "22610": 2.15236, "22615": 2.14881, "22620": 2.11937, "22625": 2.20649, "22630": 2.17995, "22635": 2.19636, "22640": 2.13809, "22645": 2.11629, "22650": 2.17103, "22655": 2.13739, "22660": 2.16412, "22665": 2.13027, "22670": 2.15676, "22675": 2.11237, "22680": 2.16546, "22685": 2.20251, "22690": 2.1654, "22695": 2.16075, "22700": 2.14207, "22705": 2.13206, "22710": 2.1481, "22715": 2.14786, "22720": 2.14323, "22725": 2.13688, "22730": 2.16203, "22735": 2.18988, "22740": 2.15269, "22745": 2.15262, "22750": 2.08825, "22755": 2.16756, "22760": 2.19935, "22765": 2.19342, "22770": 2.14896, "22775": 2.19199, "22780": 2.15667, "22785": 2.1571, "22790": 2.185, "22795": 2.12675, "22800": 2.1235, "22805": 2.11236, "22810": 2.15025, "22815": 2.08696, "22820": 2.17199, "22825": 2.14865, "22830": 2.15697, "22835": 2.13356, "22840": 2.19377, "22845": 2.18167, "22850": 2.15834, "22855": 2.13541, "22860": 2.13868, "22865": 2.15571, "22870": 2.14806, "22875": 2.13604, "22880": 2.17783, "22885": 2.15904, "22890": 2.13907, "22895": 2.1363, "22900": 2.16305, "22905": 2.14474, "22910": 2.16855, "22915": 2.17569, "22920": 2.13963, "22925": 2.13295, "22930": 2.1734, "22935": 2.14545, "22940": 2.16214, "22945": 2.17913, "22950": 2.11556, "22955": 2.18342, "22960": 2.1383, "22965": 2.09555, "22970": 2.17536, "22975": 2.17044, "22980": 2.1649, "22985": 2.15634, "22990": 2.11344, "22995": 2.19327, "23000": 2.16674, "23005": 2.11705, "23010": 2.18553, "23015": 2.13792, "23020": 2.14554, "23025": 2.14816, "23030": 2.10438, "23035": 2.18288, "23040": 2.16013, "23045": 2.1673, "23050": 2.15242, "23055": 2.16152, "23060": 2.15024, "23065": 2.15034, "23070": 2.14323, "23075": 2.17544, "23080": 2.15385, "23085": 2.14297, "23090": 2.11258, "23095": 2.15592, "23100": 2.11295, "23105": 2.13461, "23110": 2.18547, "23115": 2.17514, "23120": 2.15036, "23125": 2.12783, "23130": 2.1644, "23135": 2.19198, "23140": 2.13589, "23145": 2.12273, "23150": 2.14584, "23155": 2.16076, "23160": 2.1757, "23165": 2.18315, "23170": 2.12202, "23175": 2.14155, "23180": 2.19116, "23185": 2.14181, "23190": 2.1601, "23195": 2.13825, "23200": 2.1327, "23205": 2.13301, "23210": 2.17792, "23215": 2.18054, "23220": 2.1521, "23225": 2.15778, "23230": 2.1843, "23235": 2.13699, "23240": 2.16733, "23245": 2.1811, "23250": 2.18797, "23255": 2.1868, "23260": 2.20716, "23265": 2.14836, "23270": 2.17168, "23275": 2.1712, "23280": 2.14075, "23285": 2.10014, "23290": 2.20478, "23295": 2.15712, "23300": 2.19423, "23305": 2.2035, "23310": 2.1097, "23315": 2.15599, "23320": 2.17716, "23325": 2.18057, "23330": 2.1202, "23335": 2.18639, "23340": 2.18193, "23345": 2.15854, "23350": 2.14433, "23355": 2.14325, "23360": 2.16877, "23365": 2.10448, "23370": 2.18145, "23375": 2.11632, "23380": 2.10616, "23385": 2.1402, "23390": 2.1324, "23395": 2.17158, "23400": 2.13234, "23405": 2.11517, "23410": 2.13131, "23415": 2.12272, "23420": 2.17984, "23425": 2.20413, "23430": 2.17276, "23435": 2.16818, "23440": 2.13318, "23445": 2.10938, "23450": 2.14138, "23455": 2.16726, "23460": 2.18734, "23465": 2.18453, "23470": 2.10007, "23475": 2.1184, "23480": 2.16668, "23485": 2.14425, "23490": 2.16422, "23495": 2.11277, "23500": 2.17137, "23505": 2.11757, "23510": 2.14684, "23515": 2.13292, "23520": 2.16237, "23525": 2.11689, "23530": 2.14004, "23535": 2.13854, "23540": 2.14544, "23545": 2.10219, "23550": 2.1345, "23555": 2.19432, "23560": 2.15048, "23565": 2.15163, "23570": 2.15036, "23575": 2.14576, "23580": 2.14448, "23585": 2.17197, "23590": 2.14039, "23595": 2.12618, "23600": 2.1136, "23605": 2.18858, "23610": 2.16748, "23615": 2.14388, "23620": 2.1391, "23625": 2.16996, "23630": 2.18884, "23635": 2.17492, "23640": 2.18112, "23645": 2.16249, "23650": 2.14976, "23655": 2.15074, "23660": 2.12918, "23665": 2.16051, "23670": 2.17192, "23675": 2.16559, "23680": 2.13397, "23685": 2.16094, "23690": 2.21142, "23695": 2.16108, "23700": 2.11046, "23705": 2.13534, "23710": 2.17895, "23715": 2.10598, "23720": 2.09095, "23725": 2.1574, "23730": 2.15442, "23735": 2.16361, "23740": 2.13347, "23745": 2.19098, "23750": 2.1459, "23755": 2.16568, "23760": 2.16675, "23765": 2.19922, "23770": 2.12379, "23775": 2.12939, "23780": 2.14543, "23785": 2.12743, "23790": 2.16367, "23795": 2.16609, "23800": 2.14011, "23805": 2.17249, "23810": 2.12747, "23815": 2.17207, "23820": 2.1168, "23825": 2.17429, "23830": 2.19388, "23835": 2.16927, "23840": 2.09599, "23845": 2.16059, "23850": 2.19355, "23855": 2.10595, "23860": 2.14511, "23865": 2.1161, "23870": 2.15449, "23875": 2.17456, "23880": 2.14246, "23885": 2.13183, "23890": 2.15155, "23895": 2.12443, "23900": 2.15917, "23905": 2.14196, "23910": 2.14708, "23915": 2.0965, "23920": 2.1141, "23925": 2.13266, "23930": 2.16609, "23935": 2.15952, "23940": 2.16817, "23945": 2.13555, "23950": 2.12819, "23955": 2.14635, "23960": 2.12847, "23965": 2.16675, "23970": 2.1515, "23975": 2.16488, "23980": 2.19132, "23985": 2.12017, "23990": 2.13318, "23995": 2.10591, "24000": 2.15401, "24005": 2.167, "24010": 2.1688, "24015": 2.12676, "24020": 2.13402, "24025": 2.1868, "24030": 2.1294, "24035": 2.18977, "24040": 2.13263, "24045": 2.14216, "24050": 2.16988, "24055": 2.19294, "24060": 2.14424, "24065": 2.11734, "24070": 2.10895, "24075": 2.14179, "24080": 2.12081, "24085": 2.19326, "24090": 2.14769, "24095": 2.13794, "24100": 2.15009, "24105": 2.10703, "24110": 2.09762, "24115": 2.12904, "24120": 2.17451, "24125": 2.11072, "24130": 2.17851, "24135": 2.14138, "24140": 2.14529, "24145": 2.17635, "24150": 2.14218, "24155": 2.14989, "24160": 2.10897, "24165": 2.16899, "24170": 2.17043, "24175": 2.16685, "24180": 2.1382, "24185": 2.16081, "24190": 2.14128, "24195": 2.17079, "24200": 2.20096, "24205": 2.10999, "24210": 2.13881, "24215": 2.19183, "24220": 2.16443, "24225": 2.15401, "24230": 2.16541, "24235": 2.17589, "24240": 2.16822, "24245": 2.14328, "24250": 2.15766, "24255": 2.13724, "24260": 2.18169, "24265": 2.17497, "24270": 2.15869, "24275": 2.13738, "24280": 2.15312, "24285": 2.1077, "24290": 2.15575, "24295": 2.11759, "24300": 2.14629, "24305": 2.13454, "24310": 2.14827, "24315": 2.16026, "24320": 2.15744, "24325": 2.1754, "24330": 2.12545, "24335": 2.12799, "24340": 2.13246, "24345": 2.1078, "24350": 2.20597, "24355": 2.12958, "24360": 2.12521, "24365": 2.1356, "24370": 2.1588, "24375": 2.12859, "24380": 2.16743, "24385": 2.14985, "24390": 2.10475, "24395": 2.17659, "24400": 2.18638, "24405": 2.20649, "24410": 2.11989, "24415": 2.15081, "24420": 2.16732, "24425": 2.12137, "24430": 2.13432, "24435": 2.1609, "24440": 2.11555, "24445": 2.11624, "24450": 2.16917, "24455": 2.12641, "24460": 2.15528, "24465": 2.11728, "24470": 2.09698, "24475": 2.13981, "24480": 2.14105, "24485": 2.16208, "24490": 2.15089, "24495": 2.09613, "24500": 2.17612, "24505": 2.1594, "24510": 2.15856, "24515": 2.10177, "24520": 2.1418, "24525": 2.1134, "24530": 2.19374, "24535": 2.11635, "24540": 2.17787, "24545": 2.14238, "24550": 2.14154, "24555": 2.15151, "24560": 2.18622, "24565": 2.18688, "24570": 2.13941, "24575": 2.12416, "24580": 2.17329, "24585": 2.15738, "24590": 2.17787, "24595": 2.1315, "24600": 2.16161, "24605": 2.16359, "24610": 2.17913, "24615": 2.10296, "24620": 2.1233, "24625": 2.12665, "24630": 2.1815, "24635": 2.10469, "24640": 2.16871, "24645": 2.17451, "24650": 2.1716, "24655": 2.10636, "24660": 2.12636, "24665": 2.16528, "24670": 2.15398, "24675": 2.14853, "24680": 2.14053, "24685": 2.14221, "24690": 2.13528, "24695": 2.17777, "24700": 2.12095, "24705": 2.15338, "24710": 2.12509, "24715": 2.16247, "24720": 2.19683, "24725": 2.1901, "24730": 2.18663, "24735": 2.13017, "24740": 2.13681, "24745": 2.1379, "24750": 2.14318, "24755": 2.16957, "24760": 2.13569, "24765": 2.12917, "24770": 2.14667, "24775": 2.13125, "24780": 2.09865, "24785": 2.14015, "24790": 2.16259, "24795": 2.14298, "24800": 2.12998, "24805": 2.16805, "24810": 2.14582, "24815": 2.13574, "24820": 2.12324, "24825": 2.16276, "24830": 2.12134, "24835": 2.15192, "24840": 2.1608, "24845": 2.1704, "24850": 2.14543, "24855": 2.15574, "24860": 2.12608, "24865": 2.08968, "24870": 2.14962, "24875": 2.20741, "24880": 2.17034, "24885": 2.1446, "24890": 2.13483, "24895": 2.16291, "24900": 2.13881, "24905": 2.15379, "24910": 2.14479, "24915": 2.1485, "24920": 2.09612, "24925": 2.14086, "24930": 2.14478, "24935": 2.19046, "24940": 2.09384, "24945": 2.13843, "24950": 2.12685, "24955": 2.13196, "24960": 2.12324, "24965": 2.15206, "24970": 2.09293, "24975": 2.14857, "24980": 2.1339, "24985": 2.17232, "24990": 2.14987, "24995": 2.12973, "25000": 2.10435, "25005": 2.09357, "25010": 2.14706, "25015": 2.14054, "25020": 2.10522, "25025": 2.16885, "25030": 2.1246, "25035": 2.18464, "25040": 2.11695, "25045": 2.12248, "25050": 2.13641, "25055": 2.15019, "25060": 2.11537, "25065": 2.16165, "25070": 2.13567, "25075": 2.16587, "25080": 2.11922, "25085": 2.17683, "25090": 2.15822, "25095": 2.11303, "25100": 2.10505, "25105": 2.10257, "25110": 2.13699, "25115": 2.12457, "25120": 2.143, "25125": 2.11944, "25130": 2.11434, "25135": 2.13274, "25140": 2.10557, "25145": 2.1976, "25150": 2.14248, "25155": 2.15787, "25160": 2.12121, "25165": 2.10794, "25170": 2.17274, "25175": 2.11577, "25180": 2.12479, "25185": 2.14694, "25190": 2.14898, "25195": 2.15761, "25200": 2.13233, "25205": 2.17749, "25210": 2.18668, "25215": 2.11655, "25220": 2.15084, "25225": 2.13219, "25230": 2.14518, "25235": 2.14394, "25240": 2.15021, "25245": 2.12404, "25250": 2.13144, "25255": 2.15298, "25260": 2.17727, "25265": 2.11692, "25270": 2.13729, "25275": 2.09799, "25280": 2.15461, "25285": 2.13521, "25290": 2.16666, "25295": 2.1348, "25300": 2.12393, "25305": 2.11563, "25310": 2.14305, "25315": 2.09354, "25320": 2.14537, "25325": 2.13976, "25330": 2.19185, "25335": 2.12382, "25340": 2.13029, "25345": 2.12989, "25350": 2.13317, "25355": 2.11897, "25360": 2.10574, "25365": 2.09716, "25370": 2.11235, "25375": 2.14592, "25380": 2.10804, "25385": 2.17211, "25390": 2.11272, "25395": 2.13843, "25400": 2.11016, "25405": 2.1398, "25410": 2.19878, "25415": 2.17371, "25420": 2.12637, "25425": 2.15446, "25430": 2.09271, "25435": 2.15872, "25440": 2.10738, "25445": 2.16097, "25450": 2.12749, "25455": 2.15135, "25460": 2.1673, "25465": 2.17488, "25470": 2.17794, "25475": 2.12428, "25480": 2.14066, "25485": 2.14446, "25490": 2.1538, "25495": 2.1529, "25500": 2.15963, "25505": 2.16533, "25510": 2.11378, "25515": 2.15574, "25520": 2.12895, "25525": 2.09932, "25530": 2.15712, "25535": 2.13036, "25540": 2.13156, "25545": 2.16028, "25550": 2.14883, "25555": 2.16214, "25560": 2.10835, "25565": 2.13334, "25570": 2.1464, "25575": 2.16733, "25580": 2.12176, "25585": 2.17741, "25590": 2.15612, "25595": 2.16231, "25600": 2.18585, "25605": 2.14822, "25610": 2.10442, "25615": 2.13993, "25620": 2.11933, "25625": 2.16741, "25630": 2.09574, "25635": 2.14949, "25640": 2.12643, "25645": 2.13956, "25650": 2.15796, "25655": 2.18843, "25660": 2.07445, "25665": 2.13539, "25670": 2.11134, "25675": 2.11624, "25680": 2.16446, "25685": 2.1511, "25690": 2.14373, "25695": 2.13596, "25700": 2.13218, "25705": 2.12968, "25710": 2.13193, "25715": 2.07001, "25720": 2.11608, "25725": 2.19392, "25730": 2.14674, "25735": 2.15473, "25740": 2.10619, "25745": 2.12706, "25750": 2.13107, "25755": 2.15555, "25760": 2.16712, "25765": 2.16067, "25770": 2.11774, "25775": 2.10631, "25780": 2.1086, "25785": 2.12943, "25790": 2.17539, "25795": 2.1453, "25800": 2.12913, "25805": 2.08521, "25810": 2.12881, "25815": 2.09334, "25820": 2.16861, "25825": 2.14129, "25830": 2.10464, "25835": 2.15288, "25840": 2.15839, "25845": 2.15008, "25850": 2.1332, "25855": 2.13209, "25860": 2.15894, "25865": 2.16187, "25870": 2.0936, "25875": 2.0826, "25880": 2.10602, "25885": 2.14421, "25890": 2.11661, "25895": 2.12416, "25900": 2.11348, "25905": 2.12238, "25910": 2.16736, "25915": 2.12668, "25920": 2.09531, "25925": 2.15193, "25930": 2.16504, "25935": 2.11431, "25940": 2.14864, "25945": 2.0886, "25950": 2.13603, "25955": 2.09418, "25960": 2.13488, "25965": 2.19688, "25970": 2.10905, "25975": 2.13463, "25980": 2.14021, "25985": 2.17938, "25990": 2.11152, "25995": 2.16795, "26000": 2.15327, "26005": 2.17271, "26010": 2.12261, "26015": 2.137, "26020": 2.15132, "26025": 2.13951, "26030": 2.16404, "26035": 2.14212, "26040": 2.13661, "26045": 2.17595, "26050": 2.15557, "26055": 2.16907, "26060": 2.15976, "26065": 2.13445, "26070": 2.16143, "26075": 2.16207, "26080": 2.19873, "26085": 2.11934, "26090": 2.10516, "26095": 2.08726, "26100": 2.15977, "26105": 2.08825, "26110": 2.09827, "26115": 2.14424, "26120": 2.10284, "26125": 2.11374, "26130": 2.12997, "26135": 2.15075, "26140": 2.13818, "26145": 2.14626, "26150": 2.17883, "26155": 2.167, "26160": 2.13564, "26165": 2.11923, "26170": 2.10311, "26175": 2.13491, "26180": 2.15958, "26185": 2.15939, "26190": 2.1358, "26195": 2.13262, "26200": 2.09241, "26205": 2.16265, "26210": 2.0909, "26215": 2.12675, "26220": 2.11596, "26225": 2.15902, "26230": 2.15843, "26235": 2.15587, "26240": 2.13702, "26245": 2.15058, "26250": 2.19084, "26255": 2.10782, "26260": 2.14644, "26265": 2.15586, "26270": 2.12802, "26275": 2.11553, "26280": 2.11184, "26285": 2.13101, "26290": 2.13074, "26295": 2.14643, "26300": 2.11749, "26305": 2.13711, "26310": 2.15644, "26315": 2.19018, "26320": 2.1162, "26325": 2.14217, "26330": 2.17273, "26335": 2.14612, "26340": 2.16309, "26345": 2.12852, "26350": 2.13661, "26355": 2.10432, "26360": 2.1028, "26365": 2.16308, "26370": 2.09335, "26375": 2.12078, "26380": 2.10229, "26385": 2.11831, "26390": 2.15569, "26395": 2.12459, "26400": 2.12566, "26405": 2.09963, "26410": 2.1117, "26415": 2.12835, "26420": 2.1178, "26425": 2.13083, "26430": 2.11038, "26435": 2.14141, "26440": 2.16792, "26445": 2.13617, "26450": 2.14333, "26455": 2.09645, "26460": 2.1151, "26465": 2.1084, "26470": 2.1422, "26475": 2.14422, "26480": 2.11823, "26485": 2.13505, "26490": 2.1355, "26495": 2.13332, "26500": 2.15511, "26505": 2.10785, "26510": 2.13351, "26515": 2.16101, "26520": 2.12548, "26525": 2.16357, "26530": 2.14593, "26535": 2.11617, "26540": 2.12466, "26545": 2.15364, "26550": 2.10763, "26555": 2.11674, "26560": 2.10384, "26565": 2.07094, "26570": 2.12749, "26575": 2.16554, "26580": 2.07098, "26585": 2.11946, "26590": 2.10544, "26595": 2.11928, "26600": 2.1305, "26605": 2.16534, "26610": 2.11595, "26615": 2.1372, "26620": 2.13975, "26625": 2.15843, "26630": 2.13784, "26635": 2.16047, "26640": 2.07839, "26645": 2.14863, "26650": 2.14115, "26655": 2.16374, "26660": 2.12907, "26665": 2.14454, "26670": 2.12705, "26675": 2.15327, "26680": 2.13919, "26685": 2.10478, "26690": 2.1574, "26695": 2.13985, "26700": 2.10247, "26705": 2.15762, "26710": 2.14709, "26715": 2.11598, "26720": 2.08168, "26725": 2.09241, "26730": 2.12413, "26735": 2.10568, "26740": 2.11447, "26745": 2.13993, "26750": 2.18197, "26755": 2.12108, "26760": 2.10659, "26765": 2.14513, "26770": 2.08892, "26775": 2.1363, "26780": 2.16273, "26785": 2.09543, "26790": 2.11846, "26795": 2.16739, "26800": 2.11016, "26805": 2.12787, "26810": 2.09376, "26815": 2.11138, "26820": 2.11764, "26825": 2.13343, "26830": 2.10619, "26835": 2.13167, "26840": 2.13469, "26845": 2.14576, "26850": 2.09716, "26855": 2.09322, "26860": 2.07271, "26865": 2.11291, "26870": 2.15284, "26875": 2.15043, "26880": 2.12665, "26885": 2.12161, "26890": 2.14176, "26895": 2.15648, "26900": 2.12401, "26905": 2.13433, "26910": 2.13981, "26915": 2.09922, "26920": 2.14698, "26925": 2.16828, "26930": 2.137, "26935": 2.10272, "26940": 2.16999, "26945": 2.12903, "26950": 2.13269, "26955": 2.121, "26960": 2.19543, "26965": 2.11308, "26970": 2.11447, "26975": 2.12547, "26980": 2.11987, "26985": 2.09802, "26990": 2.11394, "26995": 2.11469, "27000": 2.14636, "27005": 2.11919, "27010": 2.11354, "27015": 2.09971, "27020": 2.13207, "27025": 2.11651, "27030": 2.17009, "27035": 2.12943, "27040": 2.13719, "27045": 2.13697, "27050": 2.16108, "27055": 2.11106, "27060": 2.09324, "27065": 2.1507, "27070": 2.13129, "27075": 2.15587, "27080": 2.13769, "27085": 2.11644, "27090": 2.11292, "27095": 2.12995, "27100": 2.13397, "27105": 2.15567, "27110": 2.12476, "27115": 2.12352, "27120": 2.13895, "27125": 2.12776, "27130": 2.11203, "27135": 2.12824, "27140": 2.1436, "27145": 2.13402, "27150": 2.14281, "27155": 2.11657, "27160": 2.10713, "27165": 2.1297, "27170": 2.12816, "27175": 2.15628, "27180": 2.11511, "27185": 2.12136, "27190": 2.13777, "27195": 2.13537, "27200": 2.12053, "27205": 2.10167, "27210": 2.17798, "27215": 2.13626, "27220": 2.12744, "27225": 2.12251, "27230": 2.1377, "27235": 2.09047, "27240": 2.13434, "27245": 2.13488, "27250": 2.10094, "27255": 2.11752, "27260": 2.10219, "27265": 2.14452, "27270": 2.16921, "27275": 2.03241, "27280": 2.11391, "27285": 2.14386, "27290": 2.13816, "27295": 2.15056, "27300": 2.11968, "27305": 2.13699, "27310": 2.09656, "27315": 2.15076, "27320": 2.19696, "27325": 2.1256, "27330": 2.10265, "27335": 2.09854, "27340": 2.09556, "27345": 2.13441, "27350": 2.12368, "27355": 2.15512, "27360": 2.09608, "27365": 2.11945, "27370": 2.12943, "27375": 2.15785, "27380": 2.10777, "27385": 2.13047, "27390": 2.15733, "27395": 2.10188, "27400": 2.132, "27405": 2.17286, "27410": 2.13549, "27415": 2.12185, "27420": 2.12334, "27425": 2.121, "27430": 2.10874, "27435": 2.11908, "27440": 2.15855, "27445": 2.10725, "27450": 2.12655, "27455": 2.10331, "27460": 2.10966, "27465": 2.12478, "27470": 2.09446, "27475": 2.1211, "27480": 2.12645, "27485": 2.12301, "27490": 2.12083, "27495": 2.11038, "27500": 2.11749, "27505": 2.13989, "27510": 2.10466, "27515": 2.11629, "27520": 2.10422, "27525": 2.15031, "27530": 2.19603, "27535": 2.13947, "27540": 2.08011, "27545": 2.13992, "27550": 2.16873, "27555": 2.09336, "27560": 2.12543, "27565": 2.14297, "27570": 2.09288, "27575": 2.13505, "27580": 2.12532, "27585": 2.14523, "27590": 2.12212, "27595": 2.17628, "27600": 2.13532, "27605": 2.12216, "27610": 2.15334, "27615": 2.12445, "27620": 2.11075, "27625": 2.13287, "27630": 2.08689, "27635": 2.16337, "27640": 2.13013, "27645": 2.12553, "27650": 2.13627, "27655": 2.04266, "27660": 2.08297, "27665": 2.1602, "27670": 2.10684, "27675": 2.1262, "27680": 2.14715, "27685": 2.11836, "27690": 2.13179, "27695": 2.11253, "27700": 2.12612, "27705": 2.13023, "27710": 2.14596, "27715": 2.09497, "27720": 2.1433, "27725": 2.10055, "27730": 2.11761, "27735": 2.11883, "27740": 2.15225, "27745": 2.13154, "27750": 2.12747, "27755": 2.11685, "27760": 2.05544, "27765": 2.13837, "27770": 2.10997, "27775": 2.09799, "27780": 2.10844, "27785": 2.08987, "27790": 2.1032, "27795": 2.08741, "27800": 2.10166, "27805": 2.10036, "27810": 2.12854, "27815": 2.17063, "27820": 2.11633, "27825": 2.17701, "27830": 2.11984, "27835": 2.11893, "27840": 2.14233, "27845": 2.08985, "27850": 2.15274, "27855": 2.16187, "27860": 2.08903, "27865": 2.12946, "27870": 2.09435, "27875": 2.1531, "27880": 2.08699, "27885": 2.12997, "27890": 2.10952, "27895": 2.13973, "27900": 2.14778, "27905": 2.10934, "27910": 2.10281, "27915": 2.11071, "27920": 2.17709, "27925": 2.13798, "27930": 2.13666, "27935": 2.12076, "27940": 2.13338, "27945": 2.14056, "27950": 2.10557, "27955": 2.13195, "27960": 2.10225, "27965": 2.09754, "27970": 2.11647, "27975": 2.10746, "27980": 2.12011, "27985": 2.14646, "27990": 2.1413, "27995": 2.17811, "28000": 2.1314, "28005": 2.07749, "28010": 2.12688, "28015": 2.13352, "28020": 2.11501, "28025": 2.12899, "28030": 2.09194, "28035": 2.11394, "28040": 2.13453, "28045": 2.12279, "28050": 2.09087, "28055": 2.12937, "28060": 2.12627, "28065": 2.11835, "28070": 2.10795, "28075": 2.1146, "28080": 2.14124, "28085": 2.11652, "28090": 2.16472, "28095": 2.08861, "28100": 2.14509, "28105": 2.12194, "28110": 2.09718, "28115": 2.11389, "28120": 2.0898, "28125": 2.11828, "28130": 2.14519, "28135": 2.09323, "28140": 2.15553, "28145": 2.11968, "28150": 2.17904, "28155": 2.12521, "28160": 2.17456, "28165": 2.11462, "28170": 2.17663, "28175": 2.10778, "28180": 2.09907, "28185": 2.11155, "28190": 2.16967, "28195": 2.10239, "28200": 2.1509, "28205": 2.14156, "28210": 2.13654, "28215": 2.12707, "28220": 2.12055, "28225": 2.1414, "28230": 2.15335, "28235": 2.12579, "28240": 2.13916, "28245": 2.1625, "28250": 2.10784, "28255": 2.13903, "28260": 2.10238, "28265": 2.13398, "28270": 2.09502, "28275": 2.14819, "28280": 2.12128, "28285": 2.12122, "28290": 2.13676, "28295": 2.14146, "28300": 2.15105, "28305": 2.09811, "28310": 2.13628, "28315": 2.11465, "28320": 2.11924, "28325": 2.14783, "28330": 2.11677, "28335": 2.0916, "28340": 2.11331, "28345": 2.1251, "28350": 2.13702, "28355": 2.15942, "28360": 2.13698, "28365": 2.1184, "28370": 2.12465, "28375": 2.09383, "28380": 2.11774, "28385": 2.09799, "28390": 2.1252, "28395": 2.12602, "28400": 2.09565, "28405": 2.13893, "28410": 2.08489, "28415": 2.10225, "28420": 2.10432, "28425": 2.13807, "28430": 2.13128, "28435": 2.13485, "28440": 2.10211, "28445": 2.08475, "28450": 2.12016, "28455": 2.10442, "28460": 2.12452, "28465": 2.19395, "28470": 2.08759, "28475": 2.12617, "28480": 2.09238, "28485": 2.09679, "28490": 2.13913, "28495": 2.09852, "28500": 2.08603, "28505": 2.11407, "28510": 2.15275, "28515": 2.16003, "28520": 2.11118, "28525": 2.1268, "28530": 2.11028, "28535": 2.10759, "28540": 2.12775, "28545": 2.11551, "28550": 2.08729, "28555": 2.13286, "28560": 2.12139, "28565": 2.15723, "28570": 2.13229, "28575": 2.09793, "28580": 2.11255, "28585": 2.09776, "28590": 2.11228, "28595": 2.15106, "28600": 2.08807, "28605": 2.0994, "28610": 2.09875, "28615": 2.14202, "28620": 2.05851, "28625": 2.11135, "28630": 2.10297, "28635": 2.10058, "28640": 2.10205, "28645": 2.13776, "28650": 2.13811, "28655": 2.10503, "28660": 2.07244, "28665": 2.1232, "28670": 2.16803, "28675": 2.09758, "28680": 2.08476, "28685": 2.16481, "28690": 2.06783, "28695": 2.10871, "28700": 2.16133, "28705": 2.1134, "28710": 2.11137, "28715": 2.09021, "28720": 2.11258, "28725": 2.11434, "28730": 2.13214, "28735": 2.11816, "28740": 2.10247, "28745": 2.11114, "28750": 2.07468, "28755": 2.13119, "28760": 2.11446, "28765": 2.10687, "28770": 2.16301, "28775": 2.10227, "28780": 2.14726, "28785": 2.10398, "28790": 2.10568, "28795": 2.12523, "28800": 2.09824, "28805": 2.17833, "28810": 2.13893, "28815": 2.17324, "28820": 2.0888, "28825": 2.11543, "28830": 2.1352, "28835": 2.14193, "28840": 2.11085, "28845": 2.11462, "28850": 2.12083, "28855": 2.16483, "28860": 2.15613, "28865": 2.15849, "28870": 2.15109, "28875": 2.13914, "28880": 2.1481, "28885": 2.14408, "28890": 2.15938, "28895": 2.10983, "28900": 2.11366, "28905": 2.09742, "28910": 2.11281, "28915": 2.10628, "28920": 2.14842, "28925": 2.13504, "28930": 2.08634, "28935": 2.08626, "28940": 2.13889, "28945": 2.10072, "28950": 2.11108, "28955": 2.15122, "28960": 2.12901, "28965": 2.13059, "28970": 2.16957, "28975": 2.10548, "28980": 2.15319, "28985": 2.12574, "28990": 2.13542, "28995": 2.09825, "29000": 2.1028, "29005": 2.13747, "29010": 2.09019, "29015": 2.11671, "29020": 2.08481, "29025": 2.09471, "29030": 2.1372, "29035": 2.07282, "29040": 2.12544, "29045": 2.11082, "29050": 2.07591, "29055": 2.11622, "29060": 2.09999, "29065": 2.1208, "29070": 2.0826, "29075": 2.11017, "29080": 2.10068, "29085": 2.09828, "29090": 2.12191, "29095": 2.10219, "29100": 2.13401, "29105": 2.13268, "29110": 2.12001, "29115": 2.0984, "29120": 2.12283, "29125": 2.17245, "29130": 2.13967, "29135": 2.07514, "29140": 2.09053, "29145": 2.11965, "29150": 2.18151, "29155": 2.08069, "29160": 2.1014, "29165": 2.10841, "29170": 2.13114, "29175": 2.09192, "29180": 2.13509, "29185": 2.0989, "29190": 2.13485, "29195": 2.0279, "29200": 2.15281, "29205": 2.1335, "29210": 2.10181, "29215": 2.15356, "29220": 2.1524, "29225": 2.14482, "29230": 2.08607, "29235": 2.12033, "29240": 2.07726, "29245": 2.10405, "29250": 2.088, "29255": 2.12977, "29260": 2.11043, "29265": 2.15283, "29270": 2.13664, "29275": 2.11606, "29280": 2.12847, "29285": 2.10451, "29290": 2.1106, "29295": 2.12585, "29300": 2.07071, "29305": 2.08518, "29310": 2.13791, "29315": 2.0864, "29320": 2.09068, "29325": 2.06856, "29330": 2.13837, "29335": 2.14277, "29340": 2.1157, "29345": 2.14547, "29350": 2.16397, "29355": 2.14966, "29360": 2.15925, "29365": 2.1184, "29370": 2.13645, "29375": 2.09083, "29380": 2.10097, "29385": 2.12093, "29390": 2.13244, "29395": 2.14497, "29400": 2.0927, "29405": 2.11168, "29410": 2.10738, "29415": 2.12163, "29420": 2.10253, "29425": 2.14952, "29430": 2.11527, "29435": 2.11552, "29440": 2.11887, "29445": 2.11431, "29450": 2.08386, "29455": 2.13564, "29460": 2.0635, "29465": 2.16444, "29470": 2.10065, "29475": 2.14802, "29480": 2.13654, "29485": 2.14372, "29490": 2.11142, "29495": 2.14474, "29500": 2.09001, "29505": 2.15821, "29510": 2.1237, "29515": 2.1195, "29520": 2.12647, "29525": 2.13338, "29530": 2.1181, "29535": 2.11749, "29540": 2.10589, "29545": 2.11648, "29550": 2.13089, "29555": 2.10663, "29560": 2.11522, "29565": 2.08592, "29570": 2.11308, "29575": 2.11835, "29580": 2.08022, "29585": 2.10626, "29590": 2.1278, "29595": 2.12271, "29600": 2.12653, "29605": 2.13661, "29610": 2.13492, "29615": 2.13144, "29620": 2.07803, "29625": 2.09013, "29630": 2.10767, "29635": 2.11476, "29640": 2.09466, "29645": 2.1182, "29650": 2.09812, "29655": 2.11872, "29660": 2.13131, "29665": 2.10578, "29670": 2.10928, "29675": 2.12772, "29680": 2.11267, "29685": 2.08849, "29690": 2.14983, "29695": 2.13866, "29700": 2.09576, "29705": 2.16508, "29710": 2.1166, "29715": 2.11062, "29720": 2.12093, "29725": 2.10792, "29730": 2.10051, "29735": 2.15089, "29740": 2.07788, "29745": 2.14436, "29750": 2.09191, "29755": 2.09881, "29760": 2.13972, "29765": 2.09636, "29770": 2.06003, "29775": 2.11578, "29780": 2.10598, "29785": 2.14967, "29790": 2.12715, "29795": 2.12597, "29800": 2.12218, "29805": 2.11318, "29810": 2.15568, "29815": 2.18731, "29820": 2.11398, "29825": 2.10028, "29830": 2.11914, "29835": 2.12472, "29840": 2.13184, "29845": 2.1003, "29850": 2.11025, "29855": 2.09331, "29860": 2.10095, "29865": 2.14992, "29870": 2.09271, "29875": 2.12025, "29880": 2.09327, "29885": 2.0996, "29890": 2.14039, "29895": 2.06974, "29900": 2.12662, "29905": 2.10303, "29910": 2.10373, "29915": 2.16119, "29920": 2.13933, "29925": 2.07015, "29930": 2.12685, "29935": 2.11515, "29940": 2.10423, "29945": 2.10339, "29950": 2.08901, "29955": 2.09774, "29960": 2.11152, "29965": 2.10223, "29970": 2.14932, "29975": 2.16021, "29980": 2.09042, "29985": 2.10224, "29990": 2.13962, "29995": 2.09258, "30000": 2.15622, "30005": 2.08743, "30010": 2.136, "30015": 2.12546, "30020": 2.11194, "30025": 2.09911, "30030": 2.06765, "30035": 2.10573, "30040": 2.10262, "30045": 2.10604, "30050": 2.16043, "30055": 2.10087, "30060": 2.10017, "30065": 2.10866, "30070": 2.12975, "30075": 2.10015, "30080": 2.11751, "30085": 2.09866, "30090": 2.13519, "30095": 2.11405, "30100": 2.07186, "30105": 2.1297, "30110": 2.12825, "30115": 2.12531, "30120": 2.09187, "30125": 2.08157, "30130": 2.1259, "30135": 2.11873, "30140": 2.14666, "30145": 2.05404, "30150": 2.09441, "30155": 2.12176, "30160": 2.13043, "30165": 2.13817, "30170": 2.13607, "30175": 2.12793, "30180": 2.14558, "30185": 2.10109, "30190": 2.13299, "30195": 2.09784, "30200": 2.14478, "30205": 2.10991, "30210": 2.12946, "30215": 2.09859, "30220": 2.13763, "30225": 2.13608, "30230": 2.12542, "30235": 2.09788, "30240": 2.11231, "30245": 2.09676, "30250": 2.10801, "30255": 2.09269, "30260": 2.05631, "30265": 2.11163, "30270": 2.09856, "30275": 2.12997, "30280": 2.07079, "30285": 2.1129, "30290": 2.10517, "30295": 2.10039, "30300": 2.15963, "30305": 2.12885, "30310": 2.12417, "30315": 2.10157, "30320": 2.09633, "30325": 2.14341, "30330": 2.07341, "30335": 2.12089, "30340": 2.11683, "30345": 2.13955, "30350": 2.13776, "30355": 2.15892, "30360": 2.10096, "30365": 2.15669, "30370": 2.12742, "30375": 2.10866, "30380": 2.15386, "30385": 2.14929, "30390": 2.1045, "30395": 2.12865, "30400": 2.06761, "30405": 2.11003, "30410": 2.09309, "30415": 2.1407, "30420": 2.11299, "30425": 2.10385, "30430": 2.13428, "30435": 2.10821, "30440": 2.06022, "30445": 2.14225, "30450": 2.11867, "30455": 2.14575, "30460": 2.10349, "30465": 2.10739, "30470": 2.0933, "30475": 2.12685, "30480": 2.10347, "30485": 2.13765, "30490": 2.09866, "30495": 2.10165, "30500": 2.10933, "30505": 2.1304, "30510": 2.14001, "30515": 2.13179, "30520": 2.13186, "30525": 2.11392, "30530": 2.14815, "30535": 2.07652, "30540": 2.09681, "30545": 2.1016, "30550": 2.10978, "30555": 2.13695, "30560": 2.12639, "30565": 2.10599, "30570": 2.12878, "30575": 2.07847, "30580": 2.1126, "30585": 2.10063, "30590": 2.11356, "30595": 2.10578, "30600": 2.12579, "30605": 2.08986, "30610": 2.13389, "30615": 2.15494, "30620": 2.11534, "30625": 2.09054, "30630": 2.13466, "30635": 2.12223, "30640": 2.07513, "30645": 2.10549, "30650": 2.09902, "30655": 2.12648, "30660": 2.10485, "30665": 2.09576, "30670": 2.11644, "30675": 2.10134, "30680": 2.07961, "30685": 2.16753, "30690": 2.08157, "30695": 2.11101, "30700": 2.07134, "30705": 2.11275, "30710": 2.14944, "30715": 2.12385, "30720": 2.13398, "30725": 2.13852, "30730": 2.13192, "30735": 2.10451, "30740": 2.1478, "30745": 2.13092, "30750": 2.11134, "30755": 2.10136, "30760": 2.11109, "30765": 2.16146, "30770": 2.13114, "30775": 2.11994, "30780": 2.11952, "30785": 2.1445, "30790": 2.11833, "30795": 2.11646, "30800": 2.11826, "30805": 2.12268, "30810": 2.12838, "30815": 2.14698, "30820": 2.12335, "30825": 2.07507, "30830": 2.0969, "30835": 2.11466, "30840": 2.10037, "30845": 2.15332, "30850": 2.10899, "30855": 2.09689, "30860": 2.11314, "30865": 2.12645, "30870": 2.11582, "30875": 2.10414, "30880": 2.09924, "30885": 2.07464, "30890": 2.08663, "30895": 2.11744, "30900": 2.15815, "30905": 2.14438, "30910": 2.11867, "30915": 2.09279, "30920": 2.12617, "30925": 2.12969, "30930": 2.09968, "30935": 2.08897, "30940": 2.1385, "30945": 2.12097, "30950": 2.11995, "30955": 2.08562, "30960": 2.11685, "30965": 2.13223, "30970": 2.10531, "30975": 2.10302, "30980": 2.16691, "30985": 2.12822, "30990": 2.14907, "30995": 2.09178, "31000": 2.09215, "31005": 2.08759, "31010": 2.07956, "31015": 2.11329, "31020": 2.12191, "31025": 2.13872, "31030": 2.1449, "31035": 2.08499, "31040": 2.12471, "31045": 2.16904, "31050": 2.12722, "31055": 2.07873, "31060": 2.07744, "31065": 2.10045, "31070": 2.0697, "31075": 2.12633, "31080": 2.07478, "31085": 2.15597, "31090": 2.04364, "31095": 2.15288, "31100": 2.10453, "31105": 2.16244, "31110": 2.16718, "31115": 2.1131, "31120": 2.11364, "31125": 2.14122, "31130": 2.0819, "31135": 2.14884, "31140": 2.10824, "31145": 2.06394, "31150": 2.1245, "31155": 2.07001, "31160": 2.11064, "31165": 2.10279, "31170": 2.13855, "31175": 2.11543, "31180": 2.08434, "31185": 2.09294, "31190": 2.08449, "31195": 2.15985, "31200": 2.1229, "31205": 2.11467, "31210": 2.06886, "31215": 2.10455, "31220": 2.11482, "31225": 2.09591, "31230": 2.16121, "31235": 2.10766, "31240": 2.1261, "31245": 2.1471, "31250": 2.12931, "31255": 2.06268, "31260": 2.15181, "31265": 2.11476, "31270": 2.09243, "31275": 2.09725, "31280": 2.12048, "31285": 2.12096, "31290": 2.09951, "31295": 2.14249, "31300": 2.13633, "31305": 2.07661, "31310": 2.06981, "31315": 2.12012, "31320": 2.15263, "31325": 2.14257, "31330": 2.12174, "31335": 2.10966, "31340": 2.10281, "31345": 2.07796, "31350": 2.14649, "31355": 2.13664, "31360": 2.13767, "31365": 2.05332, "31370": 2.11678, "31375": 2.09762, "31380": 2.14509, "31385": 2.11871, "31390": 2.11465, "31395": 2.14944, "31400": 2.10943, "31405": 2.09727, "31410": 2.08123, "31415": 2.13425, "31420": 2.11943, "31425": 2.14897, "31430": 2.11796, "31435": 2.09941, "31440": 2.10911, "31445": 2.1101, "31450": 2.07692, "31455": 2.13485, "31460": 2.14559, "31465": 2.13644, "31470": 2.12506, "31475": 2.07291, "31480": 2.07348, "31485": 2.11638, "31490": 2.09758, "31495": 2.10007, "31500": 2.1011, "31505": 2.14747, "31510": 2.11243, "31515": 2.13694, "31520": 2.12992, "31525": 2.11132, "31530": 2.12154, "31535": 2.10059, "31540": 2.15418, "31545": 2.10503, "31550": 2.13489, "31555": 2.10987, "31560": 2.12794, "31565": 2.09897, "31570": 2.10444, "31575": 2.08995, "31580": 2.11667, "31585": 2.12651, "31590": 2.1342, "31595": 2.13534, "31600": 2.1169, "31605": 2.137, "31610": 2.10364, "31615": 2.11559, "31620": 2.10911, "31625": 2.10889, "31630": 2.12106, "31635": 2.08235, "31640": 2.10296, "31645": 2.08332, "31650": 2.11431, "31655": 2.0877, "31660": 2.07994, "31665": 2.108, "31670": 2.06924, "31675": 2.10365, "31680": 2.1913, "31685": 2.06705, "31690": 2.13291, "31695": 2.15599, "31700": 2.13149, "31705": 2.12986, "31710": 2.12513, "31715": 2.13602, "31720": 2.10009, "31725": 2.08005, "31730": 2.13922, "31735": 2.10876, "31740": 2.12331, "31745": 2.1278, "31750": 2.09261, "31755": 2.11288, "31760": 2.12478, "31765": 2.14883, "31770": 2.13481, "31775": 2.09659, "31780": 2.11666, "31785": 2.10809, "31790": 2.09087, "31795": 2.07158, "31800": 2.11137, "31805": 2.04777, "31810": 2.10797, "31815": 2.07856, "31820": 2.07249, "31825": 2.12522, "31830": 2.12665, "31835": 2.08099, "31840": 2.12296, "31845": 2.03353, "31850": 2.08825, "31855": 2.1382, "31860": 2.13386, "31865": 2.12644, "31870": 2.0723, "31875": 2.08066, "31880": 2.10302, "31885": 2.11867, "31890": 2.11769, "31895": 2.11032, "31900": 2.10693, "31905": 2.09029, "31910": 2.12866, "31915": 2.14429, "31920": 2.11564, "31925": 2.10896, "31930": 2.12581, "31935": 2.12081, "31940": 2.12228, "31945": 2.06116, "31950": 2.11472, "31955": 2.11639, "31960": 2.11401, "31965": 2.12067, "31970": 2.05798, "31975": 2.05031, "31980": 2.11144, "31985": 2.08599, "31990": 2.14186, "31995": 2.14585, "32000": 2.08673, "32005": 2.08439, "32010": 2.11469, "32015": 2.12663, "32020": 2.12693, "32025": 2.04958, "32030": 2.06508, "32035": 2.07828, "32040": 2.09538, "32045": 2.10335, "32050": 2.10162, "32055": 2.08832, "32060": 2.06102, "32065": 2.15942, "32070": 2.1049, "32075": 2.12572, "32080": 2.09201, "32085": 2.10866, "32090": 2.16367, "32095": 2.1512, "32100": 2.14194, "32105": 2.09624, "32110": 2.11969, "32115": 2.08721, "32120": 2.1257, "32125": 2.10136, "32130": 2.1342, "32135": 2.0692, "32140": 2.05177, "32145": 2.14081, "32150": 2.09256, "32155": 2.08888, "32160": 2.08413, "32165": 2.10108, "32170": 2.11472, "32175": 2.10823, "32180": 2.10288, "32185": 2.09969, "32190": 2.08486, "32195": 2.10878, "32200": 2.09339, "32205": 2.05767, "32210": 2.11287, "32215": 2.12353, "32220": 2.10574, "32225": 2.14189, "32230": 2.08995, "32235": 2.09088, "32240": 2.11115, "32245": 2.1013, "32250": 2.1019, "32255": 2.15538, "32260": 2.05927, "32265": 2.09354, "32270": 2.03812, "32275": 2.14482, "32280": 2.12997, "32285": 2.14706, "32290": 2.08201, "32295": 2.12489, "32300": 2.10829, "32305": 2.09597, "32310": 2.12648, "32315": 2.16849, "32320": 2.09178, "32325": 2.1095, "32330": 2.10497, "32335": 2.12556, "32340": 2.10919, "32345": 2.08849, "32350": 2.09691, "32355": 2.09555, "32360": 2.10528, "32365": 2.08776, "32370": 2.09054, "32375": 2.08805, "32380": 2.12988, "32385": 2.08988, "32390": 2.08509, "32395": 2.10599, "32400": 2.08257, "32405": 2.09098, "32410": 2.12844, "32415": 2.10107, "32420": 2.09143, "32425": 2.09083, "32430": 2.10798, "32435": 2.15468, "32440": 2.12537, "32445": 2.09642, "32450": 2.09853, "32455": 2.06757, "32460": 2.10077, "32465": 2.10724, "32470": 2.08296, "32475": 2.08492, "32480": 2.0962, "32485": 2.12672, "32490": 2.09746, "32495": 2.09069, "32500": 2.11395, "32505": 2.09164, "32510": 2.06234, "32515": 2.13661, "32520": 2.08491, "32525": 2.08801, "32530": 2.12616, "32535": 2.09971, "32540": 2.1177, "32545": 2.11763, "32550": 2.10775, "32555": 2.05844, "32560": 2.11202, "32565": 2.11793, "32570": 2.10981, "32575": 2.11885, "32580": 2.12708, "32585": 2.10396, "32590": 2.11479, "32595": 2.12099, "32600": 2.13031, "32605": 2.09063, "32610": 2.10377, "32615": 2.12161, "32620": 2.12494, "32625": 2.07968, "32630": 2.099, "32635": 2.09951, "32640": 2.08395, "32645": 2.11329, "32650": 2.11403, "32655": 2.09588, "32660": 2.10056, "32665": 2.08225, "32670": 2.1465, "32675": 2.08274, "32680": 2.08232, "32685": 2.06812, "32690": 2.1187, "32695": 2.07136, "32700": 2.13178, "32705": 2.10398, "32710": 2.08198, "32715": 2.13933, "32720": 2.06813, "32725": 2.08021, "32730": 2.05908, "32735": 2.10242, "32740": 2.0951, "32745": 2.10934, "32750": 2.06232, "32755": 2.09715, "32760": 2.11361, "32765": 2.11679, "32770": 2.09018, "32775": 2.10469, "32780": 2.11222, "32785": 2.10395, "32790": 2.11905, "32795": 2.10008, "32800": 2.12083, "32805": 2.10521, "32810": 2.09706, "32815": 2.0907, "32820": 2.13039, "32825": 2.09288, "32830": 2.11203, "32835": 2.12887, "32840": 2.09384, "32845": 2.10198, "32850": 2.11557, "32855": 2.11043, "32860": 2.09593, "32865": 2.10767, "32870": 2.10765, "32875": 2.09867, "32880": 2.08688, "32885": 2.05329, "32890": 2.08839, "32895": 2.12283, "32900": 2.11493, "32905": 2.12916, "32910": 2.09969, "32915": 2.10464, "32920": 2.08767, "32925": 2.08309, "32930": 2.10455, "32935": 2.12258, "32940": 2.09811, "32945": 2.08907, "32950": 2.04869, "32955": 2.10887, "32960": 2.08477, "32965": 2.16996, "32970": 2.14265, "32975": 2.07237, "32980": 2.0858, "32985": 2.07527, "32990": 2.16235, "32995": 2.14662, "33000": 2.13607, "33005": 2.10049, "33010": 2.09447, "33015": 2.10633, "33020": 2.11978, "33025": 2.07746, "33030": 2.10888, "33035": 2.08099, "33040": 2.0882, "33045": 2.11811, "33050": 2.10154, "33055": 2.12852, "33060": 2.08085, "33065": 2.10609, "33070": 2.09426, "33075": 2.11928, "33080": 2.0952, "33085": 2.10983, "33090": 2.11723, "33095": 2.0915, "33100": 2.13448, "33105": 2.07672, "33110": 2.10062, "33115": 2.09796, "33120": 2.07644, "33125": 2.10605, "33130": 2.0818, "33135": 2.11739, "33140": 2.10032, "33145": 2.09787, "33150": 2.16651, "33155": 2.17511, "33160": 2.11578, "33165": 2.05569, "33170": 2.1193, "33175": 2.08641, "33180": 2.09801, "33185": 2.10295, "33190": 2.08564, "33195": 2.11865, "33200": 2.05345, "33205": 2.097, "33210": 2.1015, "33215": 2.07681, "33220": 2.09503, "33225": 2.10935, "33230": 2.07394, "33235": 2.09097, "33240": 2.1235, "33245": 2.08088, "33250": 2.12777, "33255": 2.09855, "33260": 2.10612, "33265": 2.07857, "33270": 2.09714, "33275": 2.11119, "33280": 2.09978, "33285": 2.10501, "33290": 2.10184, "33295": 2.12056, "33300": 2.06973, "33305": 2.11186, "33310": 2.06857, "33315": 2.09705, "33320": 2.07737, "33325": 2.12073, "33330": 2.11893, "33335": 2.11394, "33340": 2.09353, "33345": 2.1201, "33350": 2.11285, "33355": 2.09438, "33360": 2.08414, "33365": 2.17143, "33370": 2.06503, "33375": 2.06422, "33380": 2.1217, "33385": 2.08435, "33390": 2.09113, "33395": 2.11579, "33400": 2.08943, "33405": 2.13243, "33410": 2.09104, "33415": 2.09203, "33420": 2.11859, "33425": 2.12304, "33430": 2.1266, "33435": 2.1193, "33440": 2.12998, "33445": 2.13917, "33450": 2.11043, "33455": 2.09325, "33460": 2.08287, "33465": 2.10856, "33470": 2.1357, "33475": 2.14768, "33480": 2.12496, "33485": 2.14337, "33490": 2.09738, "33495": 2.10027, "33500": 2.0658, "33505": 2.12791, "33510": 2.10136, "33515": 2.10285, "33520": 2.13179, "33525": 2.11054, "33530": 2.1405, "33535": 2.11978, "33540": 2.12708, "33545": 2.09059, "33550": 2.11589, "33555": 2.09657, "33560": 2.07905, "33565": 2.08873, "33570": 2.08455, "33575": 2.12315, "33580": 2.1043, "33585": 2.1209, "33590": 2.10003, "33595": 2.13017, "33600": 2.09266, "33605": 2.07747, "33610": 2.08171, "33615": 2.11195, "33620": 2.13737, "33625": 2.09086, "33630": 2.13524, "33635": 2.10571, "33640": 2.12528, "33645": 2.06121, "33650": 2.07356, "33655": 2.08306, "33660": 2.05333, "33665": 2.10012, "33670": 2.14132, "33675": 2.09374, "33680": 2.09882, "33685": 2.086, "33690": 2.07194, "33695": 2.13282, "33700": 2.1075, "33705": 2.09011, "33710": 2.12947, "33715": 2.08642, "33720": 2.11507, "33725": 2.1084, "33730": 2.09474, "33735": 2.16689, "33740": 2.09387, "33745": 2.09891, "33750": 2.13154, "33755": 2.12317, "33760": 2.11646, "33765": 2.10095, "33770": 2.10796, "33775": 2.10647, "33780": 2.05766, "33785": 2.07398, "33790": 2.08768, "33795": 2.08861, "33800": 2.12196, "33805": 2.1115, "33810": 2.10838, "33815": 2.10788, "33820": 2.13115, "33825": 2.07408, "33830": 2.06611, "33835": 2.09433, "33840": 2.08469, "33845": 2.07548, "33850": 2.0856, "33855": 2.06958, "33860": 2.08755, "33865": 2.11367, "33870": 2.07479, "33875": 2.11239, "33880": 2.09967, "33885": 2.08587, "33890": 2.09619, "33895": 2.12735, "33900": 2.10294, "33905": 2.12417, "33910": 2.09238, "33915": 2.09505, "33920": 2.13419, "33925": 2.09939, "33930": 2.06397, "33935": 2.08524, "33940": 2.11121, "33945": 2.10452, "33950": 2.11117, "33955": 2.092, "33960": 2.05752, "33965": 2.08348, "33970": 2.11157, "33975": 2.09545, "33980": 2.11163, "33985": 2.07859, "33990": 2.11232, "33995": 2.04346, "34000": 2.1087, "34005": 2.07541, "34010": 2.08323, "34015": 2.09058, "34020": 2.11972, "34025": 2.11819, "34030": 2.1064, "34035": 2.08026, "34040": 2.07886, "34045": 2.08617, "34050": 2.11627, "34055": 2.09247, "34060": 2.10499, "34065": 2.10096, "34070": 2.05696, "34075": 2.07213, "34080": 2.1341, "34085": 2.12143, "34090": 2.06874, "34095": 2.07694, "34100": 2.0996, "34105": 2.11835, "34110": 2.11676, "34115": 2.12261, "34120": 2.12263, "34125": 2.12253, "34130": 2.12396, "34135": 2.1168, "34140": 2.06602, "34145": 2.09116, "34150": 2.11843, "34155": 2.12056, "34160": 2.10565, "34165": 2.12256, "34170": 2.06978, "34175": 2.11752, "34180": 2.06668, "34185": 2.12969, "34190": 2.09672, "34195": 2.10946, "34200": 2.09411, "34205": 2.08623, "34210": 2.1324, "34215": 2.10282, "34220": 2.12037, "34225": 2.11871, "34230": 2.09198, "34235": 2.07012, "34240": 2.12905, "34245": 2.08206, "34250": 2.07465, "34255": 2.09288, "34260": 2.05305, "34265": 2.10579, "34270": 2.0718, "34275": 2.11111, "34280": 2.1058, "34285": 2.10759, "34290": 2.11062, "34295": 2.12445, "34300": 2.09777, "34305": 2.08659, "34310": 2.03779, "34315": 2.11349, "34320": 2.1196, "34325": 2.14776, "34330": 2.12778, "34335": 2.09555, "34340": 2.12312, "34345": 2.11145, "34350": 2.08955, "34355": 2.14, "34360": 2.07999, "34365": 2.06828, "34370": 2.08355, "34375": 2.13273, "34380": 2.09028, "34385": 2.12473, "34390": 2.09667, "34395": 2.12365, "34400": 2.07623, "34405": 2.07704, "34410": 2.0929, "34415": 2.05464, "34420": 2.12365, "34425": 2.1295, "34430": 2.08027, "34435": 2.06332, "34440": 2.10157, "34445": 2.08425, "34450": 2.1527, "34455": 2.09914, "34460": 2.12703, "34465": 2.15412, "34470": 2.06558, "34475": 2.11262, "34480": 2.09573, "34485": 2.11266, "34490": 2.09284, "34495": 2.12159, "34500": 2.09226, "34505": 2.14406, "34510": 2.1368, "34515": 2.10479, "34520": 2.08076, "34525": 2.11261, "34530": 2.08151, "34535": 2.12243, "34540": 2.07171, "34545": 2.11465, "34550": 2.09696, "34555": 2.15458, "34560": 2.11308, "34565": 2.066, "34570": 2.12865, "34575": 2.09804, "34580": 2.09711, "34585": 2.09755, "34590": 2.07862, "34595": 2.10405, "34600": 2.04104, "34605": 2.08687, "34610": 2.09513, "34615": 2.08183, "34620": 2.10271, "34625": 2.07921, "34630": 2.07534, "34635": 2.07659, "34640": 2.07195, "34645": 2.12367, "34650": 2.08398, "34655": 2.09421, "34660": 2.10204, "34665": 2.15905, "34670": 2.10259, "34675": 2.09871, "34680": 2.10551, "34685": 2.09823, "34690": 2.14197, "34695": 2.13781, "34700": 2.10091, "34705": 2.13404, "34710": 2.06944, "34715": 2.07174, "34720": 2.0714, "34725": 2.04568, "34730": 2.12059, "34735": 2.07441, "34740": 2.1023, "34745": 2.10249, "34750": 2.09657, "34755": 2.06484, "34760": 2.11702, "34765": 2.08166, "34770": 2.12261, "34775": 2.07929, "34780": 2.04963, "34785": 2.11484, "34790": 2.0635, "34795": 2.12424, "34800": 2.09113, "34805": 2.05159, "34810": 2.07084, "34815": 2.11687, "34820": 2.12382, "34825": 2.1138, "34830": 2.13609, "34835": 2.12809, "34840": 2.09667, "34845": 2.07942, "34850": 2.09851, "34855": 2.07607, "34860": 2.12533, "34865": 2.0662, "34870": 2.09964, "34875": 2.10712, "34880": 2.08768, "34885": 2.02455, "34890": 2.07667, "34895": 2.10322, "34900": 2.09454, "34905": 2.08153, "34910": 2.07902, "34915": 2.10368, "34920": 2.07549, "34925": 2.09322, "34930": 2.13986, "34935": 2.09157, "34940": 2.12496, "34945": 2.07261, "34950": 2.07197, "34955": 2.13408, "34960": 2.08789, "34965": 2.10016, "34970": 2.07632, "34975": 2.0732, "34980": 2.12055, "34985": 2.06942, "34990": 2.12281, "34995": 2.07402, "35000": 2.08161, "35005": 2.12517, "35010": 2.08707, "35015": 2.06774, "35020": 2.07909, "35025": 2.10334, "35030": 2.11993, "35035": 2.08837, "35040": 2.08456, "35045": 2.12921, "35050": 2.07783, "35055": 2.08802, "35060": 2.11867, "35065": 2.11289, "35070": 2.0843, "35075": 2.09387, "35080": 2.09857, "35085": 2.10549, "35090": 2.10192, "35095": 2.06221, "35100": 2.10005, "35105": 2.09887, "35110": 2.10133, "35115": 2.07701, "35120": 2.05782, "35125": 2.06345, "35130": 2.07778, "35135": 2.10183, "35140": 2.12714, "35145": 2.0822, "35150": 2.07895, "35155": 2.1279, "35160": 2.05896, "35165": 2.11293, "35170": 2.07969, "35175": 2.10026, "35180": 2.0952, "35185": 2.07863, "35190": 2.06139, "35195": 2.13385, "35200": 2.06915, "35205": 2.11464, "35210": 2.08496, "35215": 2.06172, "35220": 2.12575, "35225": 2.111, "35230": 2.09318, "35235": 2.08549, "35240": 2.07524, "35245": 2.10507, "35250": 2.109, "35255": 2.11265, "35260": 2.10857, "35265": 2.07448, "35270": 2.09931, "35275": 2.0825, "35280": 2.07419, "35285": 2.10035, "35290": 2.07455, "35295": 2.06983, "35300": 2.06679, "35305": 2.09938, "35310": 2.10422, "35315": 2.06878, "35320": 2.04953, "35325": 2.11653, "35330": 2.1043, "35335": 2.09803, "35340": 2.07819, "35345": 2.0726, "35350": 2.1092, "35355": 2.11489, "35360": 2.06956, "35365": 2.07383, "35370": 2.11682, "35375": 2.11121, "35380": 2.10843, "35385": 2.11865, "35390": 2.11384, "35395": 2.06266, "35400": 2.12337, "35405": 2.09942, "35410": 2.10608, "35415": 2.10502, "35420": 2.08812, "35425": 2.07251, "35430": 2.12275, "35435": 2.11596, "35440": 2.12422, "35445": 2.12052, "35450": 2.05928, "35455": 2.08715, "35460": 2.11561, "35465": 2.12743, "35470": 2.0863, "35475": 2.12302, "35480": 2.07149, "35485": 2.07785, "35490": 2.0577, "35495": 2.09769, "35500": 2.07077, "35505": 2.09631, "35510": 2.08541, "35515": 2.04984, "35520": 2.09102, "35525": 2.09077, "35530": 2.07265, "35535": 2.07435, "35540": 2.10895, "35545": 2.09994, "35550": 2.08169, "35555": 2.07284, "35560": 2.08935, "35565": 2.11728, "35570": 2.09653, "35575": 2.13294, "35580": 2.13914, "35585": 2.11168, "35590": 2.12786, "35595": 2.06984, "35600": 2.09514, "35605": 2.12021, "35610": 2.1472, "35615": 2.11701, "35620": 2.093, "35625": 2.08497, "35630": 2.0819, "35635": 2.09095, "35640": 2.10003, "35645": 2.08819, "35650": 2.06722, "35655": 2.10623, "35660": 2.10852, "35665": 2.11164, "35670": 2.12443, "35675": 2.06648, "35680": 2.09194, "35685": 2.07983, "35690": 2.10798, "35695": 2.12507, "35700": 2.07166, "35705": 2.13831, "35710": 2.05337, "35715": 2.08528, "35720": 2.10476, "35725": 2.1152, "35730": 2.06428, "35735": 2.09543, "35740": 2.10969, "35745": 2.1097, "35750": 2.09364, "35755": 2.08592, "35760": 2.09543, "35765": 2.09389, "35770": 2.11084, "35775": 2.07864, "35780": 2.05584, "35785": 2.07021, "35790": 2.03672, "35795": 2.0745, "35800": 2.08022, "35805": 2.08772, "35810": 2.11164, "35815": 2.06148, "35820": 2.08346, "35825": 2.05977, "35830": 2.06014, "35835": 2.09534, "35840": 2.0867, "35845": 2.1053, "35850": 2.07434, "35855": 2.10055, "35860": 2.08781, "35865": 2.10239, "35870": 2.07893, "35875": 2.11417, "35880": 2.09151, "35885": 2.07252, "35890": 2.12576, "35895": 2.1264, "35900": 2.08915, "35905": 2.06494, "35910": 2.0956, "35915": 2.10222, "35920": 2.07609, "35925": 2.08118, "35930": 2.10999, "35935": 2.12863, "35940": 2.09167, "35945": 2.09712, "35950": 2.10193, "35955": 2.07947, "35960": 2.07912, "35965": 2.08539, "35970": 2.11023, "35975": 2.12456, "35980": 2.094, "35985": 2.0622, "35990": 2.09534, "35995": 2.11673, "36000": 2.10287, "36005": 2.07095, "36010": 2.05573, "36015": 2.08938, "36020": 2.07209, "36025": 2.08268, "36030": 2.1284, "36035": 2.08926, "36040": 2.11288, "36045": 2.06081, "36050": 2.09756, "36055": 2.12933, "36060": 2.1546, "36065": 2.09298, "36070": 2.12603, "36075": 2.07977, "36080": 2.12568, "36085": 2.0861, "36090": 2.08483, "36095": 2.10696, "36100": 2.09783, "36105": 2.10306, "36110": 2.08096, "36115": 2.06535, "36120": 2.09232, "36125": 2.08162, "36130": 2.07532, "36135": 2.0851, "36140": 2.07915, "36145": 2.11538, "36150": 2.11569, "36155": 2.12283, "36160": 2.11629, "36165": 2.09303, "36170": 2.09887, "36175": 2.09583, "36180": 2.06026, "36185": 2.03452, "36190": 2.08712, "36195": 2.10959, "36200": 2.12915, "36205": 2.05435, "36210": 2.07935, "36215": 2.08781, "36220": 2.10906, "36225": 2.08141, "36230": 2.10033, "36235": 2.08477, "36240": 2.09477, "36245": 2.10367, "36250": 2.11012, "36255": 2.08366, "36260": 2.08172, "36265": 2.08496, "36270": 2.06732, "36275": 2.10864, "36280": 2.07951, "36285": 2.09521, "36290": 2.12012, "36295": 2.07472, "36300": 2.1099, "36305": 2.10843, "36310": 2.07119, "36315": 2.13332, "36320": 2.1555, "36325": 2.13188, "36330": 2.1011, "36335": 2.11414, "36340": 2.09427, "36345": 2.11057, "36350": 2.067, "36355": 2.05411, "36360": 2.07054, "36365": 2.12935, "36370": 2.13099, "36375": 2.07972, "36380": 2.08615, "36385": 2.16658, "36390": 2.09973, "36395": 2.10157, "36400": 2.09353, "36405": 2.0694, "36410": 2.11682, "36415": 2.1285, "36420": 2.12065, "36425": 2.08426, "36430": 2.05091, "36435": 2.08378, "36440": 2.11646, "36445": 2.1012, "36450": 2.12919, "36455": 2.06038, "36460": 2.09685, "36465": 2.10589, "36470": 2.07632, "36475": 2.06004, "36480": 2.09961, "36485": 2.09897, "36490": 2.09838, "36495": 2.09728, "36500": 2.07376, "36505": 2.10188, "36510": 2.11481, "36515": 2.08639, "36520": 2.13958, "36525": 2.05713, "36530": 2.09225, "36535": 2.08745, "36540": 2.07421, "36545": 2.12114, "36550": 2.09132, "36555": 2.09697, "36560": 2.05352, "36565": 2.07634, "36570": 2.11984, "36575": 2.12404, "36580": 2.12519, "36585": 2.07661, "36590": 2.07137, "36595": 2.11438, "36600": 2.05036, "36605": 2.12171, "36610": 2.10086, "36615": 2.09199, "36620": 2.14665, "36625": 2.11188, "36630": 2.09104, "36635": 2.10828, "36640": 2.11362, "36645": 2.0845, "36650": 2.09656, "36655": 2.08637, "36660": 2.14125, "36665": 2.08249, "36670": 2.11807, "36675": 2.08812, "36680": 2.12513, "36685": 2.09254, "36690": 2.07427, "36695": 2.07667, "36700": 2.13006, "36705": 2.10465, "36710": 2.06568, "36715": 2.05312, "36720": 2.08842, "36725": 2.09498, "36730": 2.11602, "36735": 2.08395, "36740": 2.10951, "36745": 2.09695, "36750": 2.09915, "36755": 2.1189, "36760": 2.09951, "36765": 2.06179, "36770": 2.10977, "36775": 2.0491, "36780": 2.12274, "36785": 2.08508, "36790": 2.08725, "36795": 2.12002, "36800": 2.09886, "36805": 2.1083, "36810": 2.10648, "36815": 2.04437, "36820": 2.08107, "36825": 2.06871, "36830": 2.09764, "36835": 2.08244, "36840": 2.0789, "36845": 2.07433, "36850": 2.12181, "36855": 2.05078, "36860": 2.04515, "36865": 2.07848, "36870": 2.12256, "36875": 2.05564, "36880": 2.07636, "36885": 2.10006, "36890": 2.10677, "36895": 2.0727, "36900": 2.10299, "36905": 2.06993, "36910": 2.10052, "36915": 2.09517, "36920": 2.07176, "36925": 2.07221, "36930": 2.09734, "36935": 2.08515, "36940": 2.09335, "36945": 2.09227, "36950": 2.08887, "36955": 2.08559, "36960": 2.05948, "36965": 2.11228, "36970": 2.09752, "36975": 2.10646, "36980": 2.09711, "36985": 2.07109, "36990": 2.07213, "36995": 2.11111, "37000": 2.03515, "37005": 2.091, "37010": 2.0714, "37015": 2.10348, "37020": 2.1156, "37025": 2.09389, "37030": 2.06017, "37035": 2.06417, "37040": 2.08545, "37045": 2.12967, "37050": 2.09895, "37055": 2.0866, "37060": 2.08701, "37065": 2.10067, "37070": 2.11822, "37075": 2.12123, "37080": 2.1168, "37085": 2.08964, "37090": 2.12193, "37095": 2.06875, "37100": 2.12955, "37105": 2.05539, "37110": 2.0567, "37115": 2.04951, "37120": 2.08362, "37125": 2.12013, "37130": 2.08407, "37135": 2.11813, "37140": 2.06097, "37145": 2.10184, "37150": 2.10117, "37155": 2.0977, "37160": 2.12268, "37165": 2.06477, "37170": 2.09207, "37175": 2.08355, "37180": 2.09747, "37185": 2.09655, "37190": 2.05736, "37195": 2.09123, "37200": 2.09616, "37205": 2.09987, "37210": 2.10101, "37215": 2.06116, "37220": 2.05338, "37225": 2.11214, "37230": 2.0764, "37235": 2.09189, "37240": 2.06936, "37245": 2.06156, "37250": 2.07268, "37255": 2.1101, "37260": 2.08321, "37265": 2.06915, "37270": 2.06397, "37275": 2.05236, "37280": 2.09243, "37285": 2.08728, "37290": 2.10311, "37295": 2.08442, "37300": 2.09026, "37305": 2.10924, "37310": 2.07761, "37315": 2.12088, "37320": 2.11545, "37325": 2.1174, "37330": 2.09102, "37335": 2.05188, "37340": 2.04877, "37345": 2.04926, "37350": 2.11829, "37355": 2.05625, "37360": 2.12483, "37365": 2.08676, "37370": 2.12553, "37375": 2.08036, "37380": 2.0706, "37385": 2.06629, "37390": 2.06713, "37395": 2.0933, "37400": 2.12256, "37405": 2.08148, "37410": 2.08868, "37415": 2.08425, "37420": 2.10506, "37425": 2.10233, "37430": 2.10465, "37435": 2.06194, "37440": 2.07281, "37445": 2.09263, "37450": 2.09093, "37455": 2.11317, "37460": 2.12911, "37465": 2.08723, "37470": 2.12481, "37475": 2.08445, "37480": 2.0524, "37485": 2.0395, "37490": 2.12087, "37495": 2.10065, "37500": 2.11227, "37505": 2.10344, "37510": 2.08236, "37515": 2.09036, "37520": 2.09741, "37525": 2.09166, "37530": 2.10023, "37535": 2.02646, "37540": 2.09674, "37545": 2.07175, "37550": 2.05987, "37555": 2.11303, "37560": 2.1048, "37565": 2.10033, "37570": 2.0643, "37575": 2.08877, "37580": 2.12058, "37585": 2.09784, "37590": 2.05001, "37595": 2.09426, "37600": 2.08837, "37605": 2.10709, "37610": 2.07233, "37615": 2.09735, "37620": 2.09784, "37625": 2.10516, "37630": 2.1403, "37635": 2.09918, "37640": 2.06481, "37645": 2.06001, "37650": 2.08151, "37655": 2.15867, "37660": 2.10095, "37665": 2.07196, "37670": 2.10277, "37675": 2.08127, "37680": 2.08064, "37685": 2.13582, "37690": 2.11232, "37695": 2.06547, "37700": 2.08692, "37705": 2.07526, "37710": 2.11918, "37715": 2.09237, "37720": 2.0777, "37725": 2.0578, "37730": 2.1079, "37735": 2.10837, "37740": 2.13074, "37745": 2.10751, "37750": 2.09386, "37755": 2.10501, "37760": 2.08702, "37765": 2.11501, "37770": 2.06315, "37775": 2.10839, "37780": 2.09001, "37785": 2.09484, "37790": 2.11733, "37795": 2.0951, "37800": 2.10355, "37805": 2.09057, "37810": 2.08782, "37815": 2.07471, "37820": 2.07209, "37825": 2.12111, "37830": 2.125, "37835": 2.09785, "37840": 2.06153, "37845": 2.1538, "37850": 2.13082, "37855": 2.10515, "37860": 2.1301, "37865": 2.12356, "37870": 2.03644, "37875": 2.07947, "37880": 2.11014, "37885": 2.06099, "37890": 2.08276, "37895": 2.10117, "37900": 2.07783, "37905": 2.07138, "37910": 2.05542, "37915": 2.10188, "37920": 2.13261, "37925": 2.09689, "37930": 2.08342, "37935": 2.09514, "37940": 2.08067, "37945": 2.07724, "37950": 2.11399, "37955": 2.09715, "37960": 2.12081, "37965": 2.10519, "37970": 2.02087, "37975": 2.07143, "37980": 2.09507, "37985": 2.09086, "37990": 2.07494, "37995": 2.0698, "38000": 2.11526, "38005": 2.10069, "38010": 2.09747, "38015": 2.06456, "38020": 2.0791, "38025": 2.09097, "38030": 2.0775, "38035": 2.08946, "38040": 2.09452, "38045": 2.07805, "38050": 2.07073, "38055": 2.12047, "38060": 2.10191, "38065": 2.11263, "38070": 2.10616, "38075": 2.07758, "38080": 2.08181, "38085": 2.09739, "38090": 2.06799, "38095": 2.10721, "38100": 2.06092, "38105": 2.1049, "38110": 2.08666, "38115": 2.09741, "38120": 2.08045, "38125": 2.11963, "38130": 2.06172, "38135": 2.1179, "38140": 2.05247, "38145": 2.06822, "38150": 2.10063, "38155": 2.0926, "38160": 2.1066, "38165": 2.05643, "38170": 2.04088, "38175": 2.07027, "38180": 2.08847, "38185": 2.11659, "38190": 2.09597, "38195": 2.08253, "38200": 2.0787, "38205": 2.10063, "38210": 2.08398, "38215": 2.11553, "38220": 2.10862, "38225": 2.12345, "38230": 2.13856, "38235": 2.07246, "38240": 2.10382, "38245": 2.09801, "38250": 2.06829, "38255": 2.06868, "38260": 2.1025, "38265": 2.12998, "38270": 2.10813, "38275": 2.10104, "38280": 2.05721, "38285": 2.08329, "38290": 2.09038, "38295": 2.06365, "38300": 2.04922, "38305": 2.04132, "38310": 2.0633, "38315": 2.09751, "38320": 2.06969, "38325": 2.05418, "38330": 2.10095, "38335": 2.09755, "38340": 2.05631, "38345": 2.08452, "38350": 2.09091, "38355": 2.08376, "38360": 2.047, "38365": 2.07126, "38370": 2.0862, "38375": 2.12438, "38380": 2.09481, "38385": 2.06201, "38390": 2.1096, "38395": 2.09236, "38400": 2.09491, "38405": 2.081, "38410": 2.09129, "38415": 2.089, "38420": 2.09481, "38425": 2.09031, "38430": 2.05468, "38435": 2.1173, "38440": 2.09489, "38445": 2.0551, "38450": 2.13609, "38455": 2.10022, "38460": 2.05721, "38465": 2.06178, "38470": 2.08427, "38475": 2.09523, "38480": 2.09024, "38485": 2.09864, "38490": 2.09665, "38495": 2.1114, "38500": 2.09412, "38505": 2.11441, "38510": 2.10274, "38515": 2.10382, "38520": 2.09445, "38525": 2.09324, "38530": 2.08901, "38535": 2.09282, "38540": 2.09424, "38545": 2.07802, "38550": 2.06646, "38555": 2.04004, "38560": 2.08084, "38565": 2.0733, "38570": 2.07343, "38575": 2.09389, "38580": 2.11636, "38585": 2.10601, "38590": 2.0892, "38595": 2.07065, "38600": 2.10512, "38605": 2.07219, "38610": 2.09342, "38615": 2.09243, "38620": 2.05121, "38625": 2.05637, "38630": 2.05028, "38635": 2.09093, "38640": 2.13394, "38645": 2.07473, "38650": 2.03355, "38655": 2.10702, "38660": 2.0738, "38665": 2.11829, "38670": 2.13121, "38675": 2.06826, "38680": 2.09579, "38685": 2.06592, "38690": 2.11228, "38695": 2.09828, "38700": 2.0788, "38705": 2.0869, "38710": 2.06087, "38715": 2.07047, "38720": 2.09941, "38725": 2.05932, "38730": 2.07197, "38735": 2.05226, "38740": 2.07018, "38745": 2.11007, "38750": 2.07505, "38755": 2.06293, "38760": 2.10013, "38765": 2.06815, "38770": 2.07612, "38775": 2.07987, "38780": 2.02329, "38785": 2.10148, "38790": 2.04322, "38795": 2.10669, "38800": 2.09446, "38805": 2.08732, "38810": 2.08675, "38815": 2.07885, "38820": 2.07003, "38825": 2.10008, "38830": 2.11335, "38835": 2.08083, "38840": 2.11331, "38845": 2.09393, "38850": 2.07309, "38855": 2.06076, "38860": 2.0745, "38865": 2.1101, "38870": 2.09176, "38875": 2.07503, "38880": 2.13004, "38885": 2.07879, "38890": 2.11098, "38895": 2.08331, "38900": 2.07691, "38905": 2.10133, "38910": 2.08197, "38915": 2.08977, "38920": 2.05565, "38925": 2.06974, "38930": 2.10454, "38935": 2.08248, "38940": 2.0635, "38945": 2.06709, "38950": 2.09118, "38955": 2.09326, "38960": 2.05319, "38965": 2.08328, "38970": 2.09587, "38975": 2.08746, "38980": 2.06697, "38985": 2.07516, "38990": 2.08925, "38995": 2.07102, "39000": 2.11532, "39005": 2.11975, "39010": 2.11864, "39015": 2.07938, "39020": 2.1107, "39025": 2.11589, "39030": 2.07075, "39035": 2.08439, "39040": 2.07998, "39045": 2.06052, "39050": 2.09406, "39055": 2.09145, "39060": 2.08692, "39065": 2.09109, "39070": 2.09965, "39075": 2.08931, "39080": 2.06756, "39085": 2.12225, "39090": 2.09008, "39095": 2.09804, "39100": 2.09997, "39105": 2.06696, "39110": 2.09504, "39115": 2.06862, "39120": 2.11357, "39125": 2.07932, "39130": 2.05519, "39135": 2.0723, "39140": 2.09757, "39145": 2.16143, "39150": 2.08189, "39155": 2.07226, "39160": 2.08235, "39165": 2.12322, "39170": 2.12644, "39175": 2.07006, "39180": 2.10623, "39185": 2.09325, "39190": 2.10731, "39195": 2.12244, "39200": 2.07373, "39205": 2.12699, "39210": 2.11505, "39215": 2.08672, "39220": 2.12673, "39225": 2.09832, "39230": 2.07273, "39235": 2.08043, "39240": 2.1109, "39245": 2.07979, "39250": 2.1209, "39255": 2.08688, "39260": 2.09799, "39265": 2.13127, "39270": 2.10904, "39275": 2.04967, "39280": 2.08588, "39285": 2.0604, "39290": 2.08873, "39295": 2.12105, "39300": 2.05378, "39305": 2.08792, "39310": 2.11547, "39315": 2.08189, "39320": 2.08249, "39325": 2.03186, "39330": 2.09278, "39335": 2.1087, "39340": 2.09487, "39345": 2.09315, "39350": 2.05994, "39355": 2.08787, "39360": 2.03134, "39365": 2.07225, "39370": 2.07748, "39375": 2.09431, "39380": 2.1059, "39385": 2.08915, "39390": 2.06051, "39395": 2.06111, "39400": 2.08015, "39405": 2.05611, "39410": 2.09257, "39415": 2.07185, "39420": 2.05504, "39425": 2.06359, "39430": 2.04216, "39435": 2.09331, "39440": 2.07374, "39445": 2.06499, "39450": 2.11344, "39455": 2.05284, "39460": 2.08093, "39465": 2.08077, "39470": 2.05357, "39475": 2.08881, "39480": 2.11235, "39485": 2.1341, "39490": 2.07102, "39495": 2.10407, "39500": 2.09776, "39505": 2.10826, "39510": 2.0972, "39515": 2.11632, "39520": 2.07757, "39525": 2.05708, "39530": 2.11908, "39535": 2.13409, "39540": 2.07537, "39545": 2.1046, "39550": 2.07489, "39555": 2.08462, "39560": 2.06153, "39565": 2.08629, "39570": 2.05378, "39575": 2.05634, "39580": 2.06492, "39585": 2.07155, "39590": 2.10126, "39595": 2.09193, "39600": 2.1126, "39605": 2.08671, "39610": 2.08724, "39615": 2.04213, "39620": 2.12752, "39625": 2.10117, "39630": 2.10395, "39635": 2.10812, "39640": 2.05584, "39645": 2.08013, "39650": 2.06216, "39655": 2.07803, "39660": 2.0694, "39665": 2.07545, "39670": 2.08617, "39675": 2.07101, "39680": 2.0575, "39685": 2.06662, "39690": 2.10767, "39695": 2.10106, "39700": 2.10554, "39705": 2.07799, "39710": 2.0935, "39715": 2.02852, "39720": 2.13013, "39725": 2.08205, "39730": 2.08677, "39735": 2.0972, "39740": 2.10851, "39745": 2.07158, "39750": 2.0797, "39755": 2.09532, "39760": 2.07674, "39765": 2.12822, "39770": 2.09747, "39775": 2.07183, "39780": 2.09854, "39785": 2.06293, "39790": 2.07244, "39795": 2.09582, "39800": 2.08286, "39805": 2.13186, "39810": 2.07299, "39815": 2.08717, "39820": 2.03705, "39825": 2.07924, "39830": 2.09752, "39835": 2.08786, "39840": 2.11721, "39845": 2.09108, "39850": 2.0625, "39855": 2.08103, "39860": 2.08345, "39865": 2.07546, "39870": 2.0745, "39875": 2.09082, "39880": 2.04591, "39885": 2.09487, "39890": 2.07699, "39895": 2.06396, "39900": 2.06194, "39905": 2.09747, "39910": 2.08501, "39915": 2.05207, "39920": 2.06579, "39925": 2.10766, "39930": 2.09453, "39935": 2.07067, "39940": 2.11396, "39945": 2.09924, "39950": 2.036, "39955": 2.10321, "39960": 2.10975, "39965": 2.11333, "39970": 2.0489, "39975": 2.07701, "39980": 2.0942, "39985": 2.06465, "39990": 2.08432, "39995": 2.09743, "40000": 2.07521, "40005": 2.06723, "40010": 2.06827, "40015": 2.11177, "40020": 2.07896, "40025": 2.08987, "40030": 2.09365, "40035": 2.09079, "40040": 2.07763, "40045": 2.07025, "40050": 2.10067, "40055": 2.11724, "40060": 2.11732, "40065": 2.11567, "40070": 2.07725, "40075": 2.06583, "40080": 2.09609, "40085": 2.08411, "40090": 2.09762, "40095": 2.0912, "40100": 2.07425, "40105": 2.02803, "40110": 2.08166, "40115": 2.07494, "40120": 2.05972, "40125": 2.07031, "40130": 2.06044, "40135": 2.08817, "40140": 2.0965, "40145": 2.06654, "40150": 2.11978, "40155": 2.0622, "40160": 2.07017, "40165": 2.08786, "40170": 2.06565, "40175": 2.12574, "40180": 2.10771, "40185": 2.06054, "40190": 2.08717, "40195": 2.0478, "40200": 2.05279, "40205": 2.08094, "40210": 2.0757, "40215": 2.08768, "40220": 2.09209, "40225": 2.05369, "40230": 2.12305, "40235": 2.0695, "40240": 2.06672, "40245": 2.08388, "40250": 2.11529, "40255": 2.11957, "40260": 2.08071, "40265": 2.04839, "40270": 2.0584, "40275": 2.09546, "40280": 2.07869, "40285": 2.0816, "40290": 2.04978, "40295": 2.07016, "40300": 2.05997, "40305": 2.10436, "40310": 2.12074, "40315": 2.08431, "40320": 2.11679, "40325": 2.09128, "40330": 2.10073, "40335": 2.10746, "40340": 2.09643, "40345": 2.08674, "40350": 2.0694, "40355": 2.09714, "40360": 2.09128, "40365": 2.06822, "40370": 2.11276, "40375": 2.09067, "40380": 2.10042, "40385": 2.06772, "40390": 2.10016, "40395": 2.08395, "40400": 2.0429, "40405": 2.05927, "40410": 2.07932, "40415": 2.06262, "40420": 2.05532, "40425": 2.0595, "40430": 2.10117, "40435": 2.04806, "40440": 2.0517, "40445": 2.11063, "40450": 2.06043, "40455": 2.02768, "40460": 2.05387, "40465": 2.08359, "40470": 2.07454, "40475": 2.08493, "40480": 2.07298, "40485": 2.06431, "40490": 2.13243, "40495": 2.07099, "40500": 2.05559, "40505": 2.08095, "40510": 2.02298, "40515": 2.08271, "40520": 2.10101, "40525": 2.11518, "40530": 2.08596, "40535": 2.04383, "40540": 2.08347, "40545": 2.1041, "40550": 2.05512, "40555": 2.08778, "40560": 2.11428, "40565": 2.08381, "40570": 2.05078, "40575": 2.05396, "40580": 2.09486, "40585": 2.06032, "40590": 2.05859, "40595": 2.08629, "40600": 2.05516, "40605": 2.04983, "40610": 2.07032, "40615": 2.09149, "40620": 2.04639, "40625": 2.06841, "40630": 2.11253, "40635": 2.09902, "40640": 2.07388, "40645": 2.06356, "40650": 2.09492, "40655": 2.11848, "40660": 2.05862, "40665": 2.06817, "40670": 2.09262, "40675": 2.06981, "40680": 2.08884, "40685": 2.08666, "40690": 2.08388, "40695": 2.07889, "40700": 2.08177, "40705": 2.06512, "40710": 2.06301, "40715": 2.13018, "40720": 2.06011, "40725": 2.02463, "40730": 2.06111, "40735": 2.07952, "40740": 2.1109, "40745": 2.07702, "40750": 2.07414, "40755": 2.07617, "40760": 2.10694, "40765": 2.11711, "40770": 2.03648, "40775": 2.06487, "40780": 2.04815, "40785": 2.08822, "40790": 2.09504, "40795": 2.11305, "40800": 2.05237, "40805": 2.04833, "40810": 2.04664, "40815": 2.08003, "40820": 2.10589, "40825": 2.08047, "40830": 2.07267, "40835": 2.07913, "40840": 2.09978, "40845": 2.10117, "40850": 2.06682, "40855": 2.08535, "40860": 2.06867, "40865": 2.09535, "40870": 2.07194, "40875": 2.05161, "40880": 2.06805, "40885": 2.04521, "40890": 2.11065, "40895": 2.09306, "40900": 2.09264, "40905": 2.05996, "40910": 2.1115, "40915": 2.08232, "40920": 2.09622, "40925": 2.10373, "40930": 2.09601, "40935": 2.07969, "40940": 2.05875, "40945": 2.0677, "40950": 2.08027, "40955": 2.11129, "40960": 2.10161, "40965": 2.0664, "40970": 2.07546, "40975": 2.08033, "40980": 2.08052, "40985": 2.09026, "40990": 2.08912, "40995": 2.10369, "41000": 2.07783, "41005": 2.05937, "41010": 2.07469, "41015": 2.11753, "41020": 2.06114, "41025": 2.12391, "41030": 2.0394, "41035": 2.07773, "41040": 2.07654, "41045": 2.06982, "41050": 2.07617, "41055": 2.11197, "41060": 2.09145, "41065": 2.09338, "41070": 2.13423, "41075": 2.10556, "41080": 2.1123, "41085": 2.05971, "41090": 2.08883, "41095": 2.07244, "41100": 2.07024, "41105": 2.09835, "41110": 2.0794, "41115": 2.08519, "41120": 2.097, "41125": 2.08881, "41130": 2.04732, "41135": 2.06663, "41140": 2.0778, "41145": 2.04605, "41150": 2.04883, "41155": 2.07186, "41160": 2.10608, "41165": 2.06848, "41170": 2.09463, "41175": 2.08138, "41180": 2.07306, "41185": 2.0561, "41190": 2.0371, "41195": 2.04474, "41200": 2.01711, "41205": 2.0241, "41210": 2.09308, "41215": 2.12722, "41220": 2.07565, "41225": 2.11099, "41230": 2.0194, "41235": 2.08669, "41240": 2.07741, "41245": 2.0638, "41250": 2.08246, "41255": 2.06152, "41260": 2.07748, "41265": 2.05244, "41270": 2.09682, "41275": 2.08436, "41280": 2.08515, "41285": 2.11965, "41290": 2.12726, "41295": 2.046, "41300": 2.16642, "41305": 2.03578, "41310": 2.08024, "41315": 2.11608, "41320": 2.10183, "41325": 2.05777, "41330": 2.0755, "41335": 2.09458, "41340": 2.12563, "41345": 2.10566, "41350": 2.08166, "41355": 2.07948, "41360": 2.06233, "41365": 2.11578, "41370": 2.08607, "41375": 2.08049, "41380": 2.04037, "41385": 2.05959, "41390": 2.03085, "41395": 2.1291, "41400": 2.11764, "41405": 2.13089, "41410": 2.06535, "41415": 2.06949, "41420": 2.05864, "41425": 2.08581, "41430": 2.08726, "41435": 2.10066, "41440": 2.07936, "41445": 2.08493, "41450": 2.07473, "41455": 2.05502, "41460": 2.08855, "41465": 2.08158, "41470": 2.11166, "41475": 2.06412, "41480": 2.06894, "41485": 2.07878, "41490": 2.08926, "41495": 2.08409, "41500": 2.09268, "41505": 2.04669, "41510": 2.10691, "41515": 2.08375, "41520": 2.0975, "41525": 2.09304, "41530": 2.03584, "41535": 2.0696, "41540": 2.08633, "41545": 2.08394, "41550": 2.05309, "41555": 2.06369, "41560": 2.10232, "41565": 2.04581, "41570": 2.06086, "41575": 2.09465, "41580": 2.09433, "41585": 2.11181, "41590": 2.06886, "41595": 2.07565, "41600": 2.05748, "41605": 2.09627, "41610": 2.04274, "41615": 2.11271, "41620": 2.08668, "41625": 2.07236, "41630": 2.0849, "41635": 2.1105, "41640": 2.08501, "41645": 2.11168, "41650": 2.05097, "41655": 2.09208, "41660": 2.09189, "41665": 2.07142, "41670": 2.05392, "41675": 2.07093, "41680": 2.04861, "41685": 2.1092, "41690": 2.08352, "41695": 2.04537, "41700": 2.08579, "41705": 2.09079, "41710": 2.12026, "41715": 2.05082, "41720": 2.0613, "41725": 2.05577, "41730": 2.09414, "41735": 2.10061, "41740": 2.09362, "41745": 2.08972, "41750": 2.11447, "41755": 2.07083, "41760": 2.06711, "41765": 2.08241, "41770": 2.09361, "41775": 2.08662, "41780": 2.08556, "41785": 2.06706, "41790": 2.07352, "41795": 2.1264, "41800": 2.09164, "41805": 2.08992, "41810": 2.09332, "41815": 2.12544, "41820": 2.11513, "41825": 2.03718, "41830": 2.11009, "41835": 2.06097, "41840": 2.0515, "41845": 2.10502, "41850": 2.03837, "41855": 2.11185, "41860": 2.06086, "41865": 2.08357, "41870": 2.09407, "41875": 2.11086, "41880": 2.08595, "41885": 2.08959, "41890": 2.10409, "41895": 2.07105, "41900": 2.13094, "41905": 2.07204, "41910": 2.05197, "41915": 2.10694, "41920": 2.09686, "41925": 2.05514, "41930": 2.10266, "41935": 2.06746, "41940": 2.11029, "41945": 2.06782, "41950": 2.07697, "41955": 2.06612, "41960": 2.1316, "41965": 2.04645, "41970": 2.12311, "41975": 2.05483, "41980": 2.09196, "41985": 2.07516, "41990": 2.05896, "41995": 2.08415, "42000": 2.07727, "42005": 2.11631, "42010": 2.08378, "42015": 2.08886, "42020": 2.0848, "42025": 2.07277, "42030": 2.06814, "42035": 2.10776, "42040": 2.11053, "42045": 2.06458, "42050": 2.04814, "42055": 2.09094, "42060": 2.08379, "42065": 2.05132, "42070": 2.09392, "42075": 2.09736, "42080": 2.08584, "42085": 2.04253, "42090": 2.08226, "42095": 2.03139, "42100": 2.04505, "42105": 2.07419, "42110": 2.09684, "42115": 2.08095, "42120": 2.06644, "42125": 2.05253, "42130": 2.09279, "42135": 2.06633, "42140": 2.06256, "42145": 2.06125, "42150": 2.07379, "42155": 2.07241, "42160": 2.09851, "42165": 2.03681, "42170": 2.0915, "42175": 2.06458, "42180": 2.10411, "42185": 2.08809, "42190": 2.09872, "42195": 2.06446, "42200": 2.06505, "42205": 2.08129, "42210": 2.08146, "42215": 2.07362, "42220": 2.03173, "42225": 2.08049, "42230": 2.08221, "42235": 2.0941, "42240": 2.07647, "42245": 2.10737, "42250": 2.1224, "42255": 2.07454, "42260": 2.0982, "42265": 2.08457, "42270": 2.11095, "42275": 2.07558, "42280": 2.08632, "42285": 2.08493, "42290": 2.05495, "42295": 2.04854, "42300": 2.03862, "42305": 2.05849, "42310": 2.08847, "42315": 2.06757, "42320": 2.09717, "42325": 2.07992, "42330": 2.08035, "42335": 2.0729, "42340": 2.11331, "42345": 2.07578, "42350": 2.08667, "42355": 2.02908, "42360": 2.04926, "42365": 2.08648, "42370": 2.04811, "42375": 2.09155, "42380": 2.08256, "42385": 2.05663, "42390": 2.05127, "42395": 2.11737, "42400": 2.1039, "42405": 2.11023, "42410": 2.08195, "42415": 2.05099, "42420": 2.1011, "42425": 2.11709, "42430": 2.04006, "42435": 2.07518, "42440": 2.08495, "42445": 2.09811, "42450": 2.0935, "42455": 2.11444, "42460": 2.10275, "42465": 2.10125, "42470": 2.05863, "42475": 2.05849, "42480": 2.05206, "42485": 2.07121, "42490": 2.09299, "42495": 2.05519, "42500": 2.09485, "42505": 2.09263, "42510": 2.13337, "42515": 2.07676, "42520": 2.03036, "42525": 2.07408, "42530": 2.09218, "42535": 2.10579, "42540": 2.06986, "42545": 2.10574, "42550": 2.08404, "42555": 2.06301, "42560": 2.03621, "42565": 2.09801, "42570": 2.05373, "42575": 2.0323, "42580": 2.0642, "42585": 2.09332, "42590": 2.06702, "42595": 2.06246, "42600": 2.06236, "42605": 2.01698, "42610": 2.06879, "42615": 2.0932, "42620": 2.10128, "42625": 2.08431, "42630": 2.06282, "42635": 2.07749, "42640": 2.08662, "42645": 2.05787, "42650": 2.06674, "42655": 2.09272, "42660": 2.05935, "42665": 2.04434, "42670": 2.10879, "42675": 2.02978, "42680": 2.06435, "42685": 2.07307, "42690": 2.05512, "42695": 2.0846, "42700": 2.0828, "42705": 2.04049, "42710": 2.11276, "42715": 2.04628, "42720": 2.08188, "42725": 2.05324, "42730": 2.03638, "42735": 2.06321, "42740": 2.08624, "42745": 2.03857, "42750": 2.08268, "42755": 2.02567, "42760": 2.07493, "42765": 2.07787, "42770": 2.09778, "42775": 2.10433, "42780": 2.05853, "42785": 2.10957, "42790": 2.04146, "42795": 2.06165, "42800": 2.1108, "42805": 2.08526, "42810": 2.07937, "42815": 2.12394, "42820": 2.08239, "42825": 2.07886, "42830": 2.08011, "42835": 2.11639, "42840": 2.03702, "42845": 2.09389, "42850": 2.10381, "42855": 2.10478, "42860": 2.08529, "42865": 2.10542, "42870": 2.05851, "42875": 2.06922, "42880": 2.07146, "42885": 2.05855, "42890": 2.09885, "42895": 2.07905, "42900": 2.07157, "42905": 2.09338, "42910": 2.10129, "42915": 2.04225, "42920": 2.0546, "42925": 2.10591, "42930": 2.07953, "42935": 2.09883, "42940": 2.05746, "42945": 2.0535, "42950": 2.07217, "42955": 2.04812, "42960": 2.09894, "42965": 2.11716, "42970": 2.03822, "42975": 2.05334, "42980": 2.05379, "42985": 2.08811, "42990": 2.02331, "42995": 2.06938, "43000": 2.04541, "43005": 2.09526, "43010": 2.09456, "43015": 2.05431, "43020": 2.05347, "43025": 2.07269, "43030": 2.05014, "43035": 2.0772, "43040": 2.09618, "43045": 2.03495, "43050": 2.05663, "43055": 2.06955, "43060": 2.08834, "43065": 2.04139, "43070": 2.09847, "43075": 2.07793, "43080": 2.0309, "43085": 2.09143, "43090": 2.11987, "43095": 2.03181, "43100": 2.08002, "43105": 2.06995, "43110": 2.10089, "43115": 2.0775, "43120": 2.09625, "43125": 2.09025, "43130": 2.14327, "43135": 2.07721, "43140": 2.0903, "43145": 2.1086, "43150": 2.10071, "43155": 2.06699, "43160": 2.05259, "43165": 2.06628, "43170": 2.12039, "43175": 2.07804, "43180": 2.09049, "43185": 2.08058, "43190": 2.10871, "43195": 2.11614, "43200": 2.09384, "43205": 2.00272, "43210": 2.08447, "43215": 2.09304, "43220": 2.07609, "43225": 2.13565, "43230": 2.04017, "43235": 2.1159, "43240": 2.0825, "43245": 2.06996, "43250": 2.12441, "43255": 2.05486, "43260": 2.11905, "43265": 2.0491, "43270": 2.09649, "43275": 2.11089, "43280": 2.06879, "43285": 2.07396, "43290": 2.09323, "43295": 2.0828, "43300": 2.09891, "43305": 2.14756, "43310": 2.09709, "43315": 2.08226, "43320": 2.08785, "43325": 2.06884, "43330": 2.0784, "43335": 2.06691, "43340": 2.0708, "43345": 2.03353, "43350": 2.10691, "43355": 2.09758, "43360": 2.07513, "43365": 2.05051, "43370": 2.08542, "43375": 2.09993, "43380": 2.0725, "43385": 2.07552, "43390": 2.096, "43395": 2.09396, "43400": 2.06918, "43405": 2.04564, "43410": 2.07909, "43415": 2.04882, "43420": 2.07359, "43425": 2.06504, "43430": 2.07014, "43435": 2.08343, "43440": 2.07578, "43445": 2.0895, "43450": 2.03515, "43455": 2.10709, "43460": 2.11452, "43465": 2.09672, "43470": 2.0852, "43475": 2.10123, "43480": 2.06906, "43485": 2.08111, "43490": 2.09702, "43495": 2.08488, "43500": 2.06197, "43505": 2.06578, "43510": 2.06196, "43515": 2.05691, "43520": 2.0908, "43525": 2.02225, "43530": 2.08141, "43535": 2.08355, "43540": 2.08509, "43545": 2.05648, "43550": 2.07503, "43555": 2.0382, "43560": 2.06253, "43565": 2.09752, "43570": 2.05879, "43575": 2.07565, "43580": 2.08278, "43585": 2.02094, "43590": 2.09164, "43595": 2.05947, "43600": 2.067, "43605": 2.10191, "43610": 2.09114, "43615": 2.05198, "43620": 2.09574, "43625": 2.10426, "43630": 2.0629, "43635": 2.04797, "43640": 2.03406, "43645": 2.06327, "43650": 2.10107, "43655": 2.09526, "43660": 2.07678, "43665": 2.06029, "43670": 2.04684, "43675": 2.05489, "43680": 2.08786, "43685": 2.03272, "43690": 2.05408, "43695": 2.09747, "43700": 2.07398, "43705": 2.08312, "43710": 2.06707, "43715": 2.05529, "43720": 2.12282, "43725": 2.10094, "43730": 2.05458, "43735": 2.08041, "43740": 2.04255, "43745": 2.04491, "43750": 2.08325, "43755": 2.05301, "43760": 2.09117, "43765": 2.08989, "43770": 2.07125, "43775": 2.04827, "43780": 2.10948, "43785": 2.06109, "43790": 2.09735, "43795": 2.07735, "43800": 2.11835, "43805": 2.09854, "43810": 2.08527, "43815": 2.07521, "43820": 2.04952, "43825": 2.14479, "43830": 2.05289, "43835": 2.0902, "43840": 2.06252, "43845": 2.09173, "43850": 2.07584, "43855": 2.06608, "43860": 2.06211, "43865": 2.12145, "43870": 2.0709, "43875": 2.03778, "43880": 2.06972, "43885": 2.09863, "43890": 2.0255, "43895": 2.06468, "43900": 2.06246, "43905": 2.05051, "43910": 2.13313, "43915": 2.12407, "43920": 2.0392, "43925": 2.04935, "43930": 2.06119, "43935": 2.0438, "43940": 2.07503, "43945": 2.02661, "43950": 2.10809, "43955": 2.03829, "43960": 2.10008, "43965": 2.06529, "43970": 2.10113, "43975": 2.05672, "43980": 2.03975, "43985": 2.07488, "43990": 2.042, "43995": 2.08402, "44000": 2.05218, "44005": 2.04169, "44010": 2.10207, "44015": 2.04773, "44020": 2.08359, "44025": 2.10455, "44030": 2.07772, "44035": 2.05551, "44040": 2.00403, "44045": 2.1147, "44050": 2.08762, "44055": 2.10282, "44060": 2.11173, "44065": 2.0599, "44070": 2.08615, "44075": 2.09469, "44080": 2.01532, "44085": 2.07617, "44090": 2.04474, "44095": 2.06723, "44100": 2.10088, "44105": 2.0959, "44110": 2.06454, "44115": 2.07323, "44120": 2.05324, "44125": 2.07867, "44130": 2.09096, "44135": 2.0643, "44140": 2.05283, "44145": 2.05784, "44150": 2.10642, "44155": 2.09446, "44160": 2.09732, "44165": 2.08858, "44170": 2.05669, "44175": 2.07081, "44180": 2.07672, "44185": 2.05363, "44190": 2.094, "44195": 2.03724, "44200": 2.05254, "44205": 2.08122, "44210": 2.05692, "44215": 2.06747, "44220": 2.07743, "44225": 2.07091, "44230": 2.08501, "44235": 2.06045, "44240": 2.11381, "44245": 2.0713, "44250": 2.07115, "44255": 2.08257, "44260": 2.04233, "44265": 2.05014, "44270": 2.0861, "44275": 2.04373, "44280": 2.07999, "44285": 2.14218, "44290": 2.09634, "44295": 2.11713, "44300": 2.0691, "44305": 2.05551, "44310": 2.05634, "44315": 2.04966, "44320": 2.04792, "44325": 2.07625, "44330": 2.09783, "44335": 2.08623, "44340": 2.08521, "44345": 2.07719, "44350": 2.06428, "44355": 2.09567, "44360": 2.10429, "44365": 2.08493, "44370": 2.08386, "44375": 2.07831, "44380": 2.087, "44385": 2.06612, "44390": 2.08461, "44395": 2.10809, "44400": 2.06979, "44405": 2.05108, "44410": 2.05885, "44415": 2.08393, "44420": 2.07482, "44425": 2.09838, "44430": 2.05916, "44435": 2.08046, "44440": 2.02904, "44445": 2.11718, "44450": 2.05714, "44455": 2.08166, "44460": 2.04622, "44465": 2.06243, "44470": 2.08856, "44475": 2.09212, "44480": 2.06221, "44485": 2.06245, "44490": 2.06678, "44495": 2.09465, "44500": 2.10372, "44505": 2.06677, "44510": 2.06911, "44515": 2.05753, "44520": 2.04368, "44525": 2.03435, "44530": 2.07064, "44535": 2.04518, "44540": 2.10093, "44545": 2.10663, "44550": 2.05976, "44555": 2.10829, "44560": 2.03605, "44565": 2.07965, "44570": 2.05757, "44575": 2.10991, "44580": 2.08188, "44585": 2.03285, "44590": 2.08163, "44595": 2.06666, "44600": 2.08337, "44605": 2.06231, "44610": 2.10609, "44615": 2.05843, "44620": 2.03745, "44625": 2.07639, "44630": 2.09286, "44635": 2.06037, "44640": 2.02758, "44645": 2.05385, "44650": 2.08981, "44655": 2.06535, "44660": 2.08979, "44665": 2.09133, "44670": 2.08933, "44675": 2.09717, "44680": 2.1121, "44685": 2.095, "44690": 2.05908, "44695": 2.07263, "44700": 2.06577, "44705": 2.04948, "44710": 2.09823, "44715": 2.03934, "44720": 2.06422, "44725": 2.07339, "44730": 2.10433, "44735": 2.04394, "44740": 2.03965, "44745": 2.05045, "44750": 2.10723, "44755": 2.10457, "44760": 2.11888, "44765": 2.0766, "44770": 2.07647, "44775": 2.05768, "44780": 2.04813, "44785": 2.08314, "44790": 2.09856, "44795": 2.0793, "44800": 2.05974, "44805": 2.08352, "44810": 2.08892, "44815": 2.08973, "44820": 2.08474, "44825": 2.1245, "44830": 2.05113, "44835": 2.1089, "44840": 2.09168, "44845": 2.1038, "44850": 2.09837, "44855": 2.08212, "44860": 2.06104, "44865": 2.12209, "44870": 2.08029, "44875": 2.09777, "44880": 2.08141, "44885": 2.10496, "44890": 2.05218, "44895": 2.10035, "44900": 2.04776, "44905": 2.04694, "44910": 2.06215, "44915": 2.07526, "44920": 2.084, "44925": 2.0456, "44930": 2.07821, "44935": 2.01492, "44940": 2.10077, "44945": 2.0737, "44950": 2.08742, "44955": 2.01432, "44960": 2.13879, "44965": 2.07409, "44970": 2.0646, "44975": 2.0972, "44980": 2.06388, "44985": 2.07042, "44990": 2.10554, "44995": 2.02853, "45000": 2.08801, "45005": 2.0941, "45010": 2.06404, "45015": 2.07209, "45020": 2.06342, "45025": 2.07728, "45030": 2.0819, "45035": 2.07344, "45040": 2.05743, "45045": 2.09138, "45050": 2.10455, "45055": 2.07455, "45060": 2.08022, "45065": 2.08425, "45070": 2.05936, "45075": 2.04802, "45080": 2.05277, "45085": 2.04905, "45090": 2.06275, "45095": 2.08538, "45100": 2.07313, "45105": 2.12673, "45110": 2.07745, "45115": 2.06564, "45120": 2.12772, "45125": 2.08641, "45130": 2.05348, "45135": 2.05293, "45140": 2.0534, "45145": 2.04879, "45150": 2.0561, "45155": 2.06643, "45160": 2.06994, "45165": 2.10246, "45170": 2.08039, "45175": 2.05209, "45180": 2.08747, "45185": 2.07394, "45190": 2.07883, "45195": 2.09282, "45200": 2.08084, "45205": 2.05668, "45210": 2.10572, "45215": 2.07575, "45220": 2.03622, "45225": 2.08558, "45230": 2.09711, "45235": 2.09498, "45240": 2.08688, "45245": 2.10264, "45250": 2.02219, "45255": 2.04907, "45260": 2.08763, "45265": 2.08158, "45270": 2.05903, "45275": 2.06418, "45280": 2.02097, "45285": 2.06929, "45290": 2.06611, "45295": 2.10997, "45300": 2.04421, "45305": 2.06475, "45310": 2.07163, "45315": 2.09744, "45320": 2.1116, "45325": 2.0484, "45330": 2.04759, "45335": 2.08559, "45340": 2.10533, "45345": 2.0715, "45350": 2.0791, "45355": 2.06905, "45360": 2.03906, "45365": 2.08597, "45370": 2.09546, "45375": 2.08666, "45380": 2.09242, "45385": 2.12296, "45390": 2.11658, "45395": 2.08986, "45400": 2.07393, "45405": 2.06235, "45410": 2.07582, "45415": 2.13112, "45420": 2.08383, "45425": 2.06975, "45430": 2.04517, "45435": 2.05123, "45440": 2.06927, "45445": 2.08501, "45450": 2.09722, "45455": 2.06026, "45460": 2.07708, "45465": 2.05165, "45470": 2.04294, "45475": 2.08992, "45480": 2.09105, "45485": 2.07468, "45490": 2.07207, "45495": 2.03907, "45500": 2.03726, "45505": 2.0896, "45510": 2.10253, "45515": 2.06617, "45520": 2.04821, "45525": 2.05899, "45530": 2.11994, "45535": 2.04749, "45540": 2.0447, "45545": 2.04278, "45550": 2.09447, "45555": 2.05341, "45560": 2.061, "45565": 2.10605, "45570": 2.04545, "45575": 2.06816, "45580": 2.06643, "45585": 2.12068, "45590": 2.00209, "45595": 2.08217, "45600": 2.06253, "45605": 2.10979, "45610": 2.10011, "45615": 2.0409, "45620": 2.07565, "45625": 2.06663, "45630": 2.06741, "45635": 2.09616, "45640": 2.07856, "45645": 2.09992, "45650": 2.0458, "45655": 2.03759, "45660": 2.07478, "45665": 2.10219, "45670": 2.07445, "45675": 2.03825, "45680": 2.06501, "45685": 2.07789, "45690": 2.04259, "45695": 2.11546, "45700": 2.06559, "45705": 2.0098, "45710": 2.09787, "45715": 2.09825, "45720": 2.03922, "45725": 2.10018, "45730": 2.05809, "45735": 2.0402, "45740": 2.12447, "45745": 2.04989, "45750": 2.03823, "45755": 2.10013, "45760": 2.06021, "45765": 2.04716, "45770": 2.08596, "45775": 2.08062, "45780": 2.03772, "45785": 2.05059, "45790": 2.08635, "45795": 2.0758, "45800": 2.09925, "45805": 2.08028, "45810": 2.05035, "45815": 2.0817, "45820": 2.09902, "45825": 2.05644, "45830": 2.02698, "45835": 2.04883, "45840": 2.06604, "45845": 2.03726, "45850": 2.07867, "45855": 2.07259, "45860": 2.1179, "45865": 2.0899, "45870": 2.02853, "45875": 2.12949, "45880": 2.09498, "45885": 2.05655, "45890": 2.05192, "45895": 2.06602, "45900": 2.0546, "45905": 2.09868, "45910": 2.06697, "45915": 2.10057, "45920": 2.10384, "45925": 2.09352, "45930": 2.03916, "45935": 2.10284, "45940": 2.08458, "45945": 2.09837, "45950": 2.05624, "45955": 2.04797, "45960": 2.07534, "45965": 2.09572, "45970": 2.06532, "45975": 2.07902, "45980": 2.11731, "45985": 2.08386, "45990": 2.07204, "45995": 2.0674, "46000": 2.09019, "46005": 2.1049, "46010": 2.12969, "46015": 2.09216, "46020": 2.02462, "46025": 2.0355, "46030": 2.04716, "46035": 2.07704, "46040": 2.09246, "46045": 2.07831, "46050": 2.0981, "46055": 2.09025, "46060": 2.04597, "46065": 2.06153, "46070": 2.05778, "46075": 2.08314, "46080": 2.07508, "46085": 2.08473, "46090": 2.03511, "46095": 2.06076, "46100": 2.04463, "46105": 2.0904, "46110": 2.06652, "46115": 2.04437, "46120": 2.03645, "46125": 2.08314, "46130": 2.05821, "46135": 2.05172, "46140": 2.08217, "46145": 2.0592, "46150": 2.07065, "46155": 2.06714, "46160": 2.07138, "46165": 2.093, "46170": 2.10496, "46175": 2.07588, "46180": 2.05811, "46185": 2.02649, "46190": 2.08438, "46195": 2.07194, "46200": 2.06133, "46205": 2.0922, "46210": 2.02326, "46215": 2.0331, "46220": 2.06931, "46225": 2.08145, "46230": 2.02333, "46235": 2.07518, "46240": 2.0485, "46245": 2.06612, "46250": 2.06185, "46255": 2.07267, "46260": 2.08573, "46265": 2.08296, "46270": 2.06773, "46275": 2.07061, "46280": 2.09019, "46285": 2.05568, "46290": 2.06804, "46295": 2.05348, "46300": 2.07796, "46305": 2.09611, "46310": 2.04994, "46315": 2.09766, "46320": 2.06083, "46325": 2.07716, "46330": 2.02257, "46335": 2.06048, "46340": 2.09435, "46345": 2.08745, "46350": 2.07569, "46355": 2.05351, "46360": 2.07487, "46365": 2.10591, "46370": 2.05287, "46375": 2.06866, "46380": 2.077, "46385": 2.04518, "46390": 2.0663, "46395": 2.08336, "46400": 2.04188, "46405": 2.05816, "46410": 2.0912, "46415": 2.05146, "46420": 2.05313, "46425": 2.0636, "46430": 2.08189, "46435": 2.04089, "46440": 2.11156, "46445": 2.05714, "46450": 2.04025, "46455": 2.0754, "46460": 2.07492, "46465": 2.04732, "46470": 2.00678, "46475": 2.10071, "46480": 2.09961, "46485": 2.08476, "46490": 2.11393, "46495": 2.11053, "46500": 2.08348, "46505": 2.06537, "46510": 2.08225, "46515": 2.07636, "46520": 2.01477, "46525": 2.08197, "46530": 2.05542, "46535": 2.08383, "46540": 2.0842, "46545": 2.05252, "46550": 2.07063, "46555": 2.09015, "46560": 2.06034, "46565": 2.09515, "46570": 2.07041, "46575": 2.08243, "46580": 2.07078, "46585": 2.06383, "46590": 2.0575, "46595": 2.06707, "46600": 2.09338, "46605": 2.08313, "46610": 2.10074, "46615": 2.06696, "46620": 2.06346, "46625": 2.0742, "46630": 2.08961, "46635": 2.06404, "46640": 2.10882, "46645": 2.09548, "46650": 2.0315, "46655": 2.03294, "46660": 2.05627, "46665": 2.09792, "46670": 2.08018, "46675": 2.05326, "46680": 2.03919, "46685": 2.03416, "46690": 2.0966, "46695": 2.06342, "46700": 2.04194, "46705": 2.06369, "46710": 2.04756, "46715": 2.10259, "46720": 2.08671, "46725": 2.08154, "46730": 2.06774, "46735": 2.05146, "46740": 2.08543, "46745": 2.06755, "46750": 2.09452, "46755": 2.05541, "46760": 2.0748, "46765": 2.06879, "46770": 2.05388, "46775": 2.09097, "46780": 2.1158, "46785": 2.07168, "46790": 2.09137, "46795": 2.08835, "46800": 2.07168, "46805": 2.06831, "46810": 2.07861, "46815": 2.03244, "46820": 2.05942, "46825": 2.07573, "46830": 2.10137, "46835": 2.07075, "46840": 2.07023, "46845": 2.0531, "46850": 2.11512, "46855": 2.08487, "46860": 2.0528, "46865": 2.10441, "46870": 2.07623, "46875": 2.01856, "46880": 2.127, "46885": 2.09606, "46890": 2.08723, "46895": 2.0553, "46900": 2.0155, "46905": 2.06726, "46910": 2.04132, "46915": 2.05583, "46920": 2.04565, "46925": 2.07442, "46930": 2.08206, "46935": 2.06473, "46940": 2.09229, "46945": 2.07386, "46950": 2.045, "46955": 2.05638, "46960": 2.09247, "46965": 2.07506, "46970": 2.0211, "46975": 2.04253, "46980": 2.10299, "46985": 2.09662, "46990": 2.07573, "46995": 2.03368, "47000": 2.07842, "47005": 2.08545, "47010": 2.06213, "47015": 2.07911, "47020": 2.06348, "47025": 2.07188, "47030": 2.04946, "47035": 2.08951, "47040": 2.05431, "47045": 2.05858, "47050": 2.04545, "47055": 2.03181, "47060": 2.05458, "47065": 2.05621, "47070": 2.05956, "47075": 2.05226, "47080": 2.04924, "47085": 2.1041, "47090": 2.03375, "47095": 2.05156, "47100": 2.12269, "47105": 2.05657, "47110": 2.06389, "47115": 2.09797, "47120": 2.05702, "47125": 2.02921, "47130": 2.06196, "47135": 2.08562, "47140": 2.07668, "47145": 2.09076, "47150": 2.05937, "47155": 2.04254, "47160": 2.08894, "47165": 2.05649, "47170": 2.07317, "47175": 2.08651, "47180": 2.0683, "47185": 2.05248, "47190": 2.07111, "47195": 2.09056, "47200": 2.08893, "47205": 2.05034, "47210": 2.08904, "47215": 2.06855, "47220": 2.04484, "47225": 2.10499, "47230": 2.08558, "47235": 2.04133, "47240": 2.1065, "47245": 2.05831, "47250": 2.11202, "47255": 2.10147, "47260": 2.07668, "47265": 2.08509, "47270": 2.07395, "47275": 2.09122, "47280": 2.0752, "47285": 2.08688, "47290": 2.04261, "47295": 2.05616, "47300": 2.03711, "47305": 2.0709, "47310": 2.06384, "47315": 2.03492, "47320": 2.07477, "47325": 2.05545, "47330": 2.10124, "47335": 2.09145, "47340": 2.07982, "47345": 2.09288, "47350": 2.0487, "47355": 2.06805, "47360": 2.0769, "47365": 2.08248, "47370": 2.09342, "47375": 2.05982, "47380": 2.03198, "47385": 2.05693, "47390": 2.10862, "47395": 2.05386, "47400": 2.08467, "47405": 2.06107, "47410": 2.0811, "47415": 2.06804, "47420": 2.02918, "47425": 2.03377, "47430": 2.08981, "47435": 2.0667, "47440": 2.07669, "47445": 2.1075, "47450": 2.08053, "47455": 2.04104, "47460": 2.09157, "47465": 2.05851, "47470": 2.03062, "47475": 2.09072, "47480": 2.06171, "47485": 2.04097, "47490": 2.06122, "47495": 2.09642, "47500": 2.04635, "47505": 2.07209, "47510": 2.05906, "47515": 2.10972, "47520": 2.10551, "47525": 2.02225, "47530": 2.07964, "47535": 2.04273, "47540": 2.09209, "47545": 2.06555, "47550": 2.08857, "47555": 2.08631, "47560": 2.07088, "47565": 2.06634, "47570": 2.02721, "47575": 2.08064, "47580": 2.06473, "47585": 2.06914, "47590": 2.058, "47595": 2.08644, "47600": 2.10089, "47605": 2.09101, "47610": 2.00753, "47615": 2.08539, "47620": 2.03658, "47625": 2.12547, "47630": 2.06468, "47635": 2.06556, "47640": 2.07787, "47645": 2.0455, "47650": 2.0221, "47655": 2.0688, "47660": 2.07418, "47665": 2.07716, "47670": 2.02853, "47675": 2.07255, "47680": 2.02811, "47685": 1.98459, "47690": 2.06548, "47695": 2.08959, "47700": 2.06108, "47705": 2.1038, "47710": 2.07185, "47715": 2.07209, "47720": 2.02981, "47725": 2.09873, "47730": 2.09178, "47735": 2.04858, "47740": 2.05076, "47745": 2.04596, "47750": 2.04726, "47755": 2.07934, "47760": 2.07559, "47765": 2.04988, "47770": 2.04191, "47775": 2.05999, "47780": 2.06844, "47785": 2.06402, "47790": 2.04114, "47795": 2.04555, "47800": 2.08736, "47805": 2.05125, "47810": 2.08599, "47815": 2.11929, "47820": 2.06259, "47825": 2.0542, "47830": 2.0775, "47835": 2.06019, "47840": 2.0722, "47845": 2.05067, "47850": 2.09254, "47855": 2.05332, "47860": 2.06692, "47865": 2.06607, "47870": 2.05541, "47875": 2.08267, "47880": 2.0089, "47885": 2.06309, "47890": 2.11578, "47895": 2.05681, "47900": 2.03787, "47905": 2.05533, "47910": 2.08554, "47915": 2.05282, "47920": 2.0922, "47925": 2.07465, "47930": 2.09985, "47935": 2.07386, "47940": 2.06368, "47945": 2.10431, "47950": 2.05604, "47955": 2.02429, "47960": 2.07655, "47965": 2.12878, "47970": 2.08988, "47975": 2.08998, "47980": 2.07596, "47985": 2.02543, "47990": 2.08119, "47995": 2.03798, "48000": 2.09073, "48005": 2.06363, "48010": 2.08355, "48015": 2.06267, "48020": 2.15051, "48025": 2.03175, "48030": 2.05102, "48035": 2.05075, "48040": 2.05736, "48045": 2.0364, "48050": 2.09401, "48055": 1.99891, "48060": 2.02029, "48065": 2.04717, "48070": 2.09214, "48075": 2.07811, "48080": 2.06617, "48085": 2.03945, "48090": 2.08333, "48095": 2.05095, "48100": 2.08507, "48105": 2.05386, "48110": 2.0589, "48115": 2.07731, "48120": 2.07171, "48125": 2.1315, "48130": 2.04865, "48135": 2.08132, "48140": 2.05724, "48145": 2.02411, "48150": 2.09863, "48155": 2.03817, "48160": 2.06783, "48165": 2.06205, "48170": 2.04473, "48175": 2.06555, "48180": 2.07056, "48185": 2.03146, "48190": 2.02947, "48195": 2.0337, "48200": 2.06037, "48205": 2.06673, "48210": 2.02723, "48215": 2.08578, "48220": 2.0538, "48225": 2.0402, "48230": 2.0709, "48235": 2.11064, "48240": 2.05683, "48245": 2.06692, "48250": 2.07768, "48255": 2.05397, "48260": 2.07683, "48265": 2.0741, "48270": 2.07905, "48275": 2.08839, "48280": 2.02212, "48285": 2.09459, "48290": 2.05977, "48295": 2.06699, "48300": 2.05668, "48305": 2.08727, "48310": 2.06981, "48315": 2.07693, "48320": 2.05844, "48325": 2.08851, "48330": 2.06526, "48335": 2.10306, "48340": 2.08858, "48345": 2.1092, "48350": 2.06617, "48355": 2.08788, "48360": 2.05523, "48365": 2.07336, "48370": 2.05724, "48375": 2.05869, "48380": 2.07958, "48385": 2.06937, "48390": 2.09975, "48395": 2.00954, "48400": 2.05135, "48405": 2.07472, "48410": 2.09014, "48415": 2.06665, "48420": 2.0893, "48425": 2.05897, "48430": 2.06364, "48435": 2.02721, "48440": 2.10688, "48445": 2.05173, "48450": 2.10391, "48455": 2.03746, "48460": 2.06229, "48465": 2.08505, "48470": 2.02336, "48475": 2.06105, "48480": 2.11778, "48485": 2.0544, "48490": 2.08443, "48495": 2.10913, "48500": 2.0613, "48505": 2.08615, "48510": 2.08098, "48515": 2.09145, "48520": 2.03067, "48525": 2.06217, "48530": 2.05103, "48535": 2.07705, "48540": 2.02196, "48545": 2.1023, "48550": 2.05393, "48555": 2.09123, "48560": 2.03268, "48565": 2.04971, "48570": 2.01218, "48575": 2.07047, "48580": 2.10143, "48585": 2.01648, "48590": 2.06013, "48595": 2.02937, "48600": 2.08719, "48605": 2.11298, "48610": 2.04372, "48615": 2.05542, "48620": 2.06155, "48625": 2.04849, "48630": 2.1229, "48635": 2.08235, "48640": 2.07516, "48645": 2.0641, "48650": 2.07715, "48655": 2.05816, "48660": 2.06965, "48665": 2.05886, "48670": 2.03109, "48675": 2.0439, "48680": 2.05425, "48685": 2.06289, "48690": 2.0869, "48695": 2.08887, "48700": 2.08967, "48705": 2.05651, "48710": 2.0712, "48715": 2.10178, "48720": 2.01382, "48725": 2.06186, "48730": 2.05786, "48735": 2.04178, "48740": 2.07259, "48745": 2.01315, "48750": 2.06112, "48755": 2.0641, "48760": 2.03873, "48765": 2.09267, "48770": 2.08865, "48775": 2.03586, "48780": 2.02137, "48785": 2.07636, "48790": 2.06449, "48795": 2.03541, "48800": 2.05398, "48805": 2.05702, "48810": 2.05872, "48815": 2.04904, "48820": 2.11636, "48825": 2.03303, "48830": 2.04346, "48835": 2.03745, "48840": 2.05786, "48845": 2.08434, "48850": 2.04533, "48855": 2.05397, "48860": 2.0676, "48865": 2.07056, "48870": 2.08622, "48875": 2.05762, "48880": 2.03225, "48885": 2.07277, "48890": 2.03997, "48895": 2.05324, "48900": 2.08818, "48905": 2.07929, "48910": 2.05617, "48915": 2.09766, "48920": 2.09702, "48925": 2.06933, "48930": 2.09205, "48935": 2.07257, "48940": 2.03242, "48945": 2.04497, "48950": 2.07471, "48955": 2.09836, "48960": 2.05545, "48965": 2.03611, "48970": 2.0386, "48975": 2.07243, "48980": 2.12562, "48985": 2.08322, "48990": 2.06782, "48995": 2.09402, "49000": 2.08672, "49005": 2.09746, "49010": 2.0998, "49015": 2.04963, "49020": 2.07056, "49025": 2.07095, "49030": 2.09932, "49035": 2.05387, "49040": 2.10163, "49045": 2.07719, "49050": 2.07229, "49055": 2.06972, "49060": 2.03223, "49065": 2.06462, "49070": 2.05032, "49075": 2.05478, "49080": 2.09173, "49085": 2.11314, "49090": 2.05276, "49095": 2.02906, "49100": 2.06254, "49105": 2.08592, "49110": 2.09428, "49115": 2.13329, "49120": 2.10333, "49125": 2.03984, "49130": 2.09259, "49135": 2.08335, "49140": 2.07296, "49145": 2.10267, "49150": 2.0611, "49155": 2.04247, "49160": 2.01727, "49165": 2.08118, "49170": 2.06391, "49175": 2.0425, "49180": 2.01991, "49185": 2.04682, "49190": 2.04966, "49195": 2.05783, "49200": 2.02989, "49205": 2.05546, "49210": 2.10414, "49215": 2.01991, "49220": 2.04333, "49225": 2.06355, "49230": 2.03692, "49235": 2.07122, "49240": 2.08242, "49245": 2.0506, "49250": 2.01649, "49255": 2.01834, "49260": 2.05235, "49265": 2.06911, "49270": 2.05041, "49275": 2.07652, "49280": 2.06235, "49285": 2.04883, "49290": 2.06607, "49295": 2.07886, "49300": 2.03359, "49305": 2.07044, "49310": 2.05328, "49315": 2.04207, "49320": 2.08937, "49325": 2.08625, "49330": 2.07249, "49335": 2.02371, "49340": 2.03636, "49345": 2.07074, "49350": 2.04695, "49355": 2.08067, "49360": 2.0523, "49365": 2.05371, "49370": 2.04518, "49375": 2.11473, "49380": 2.08241, "49385": 2.08627, "49390": 2.07317, "49395": 2.06566, "49400": 2.07894, "49405": 2.05347, "49410": 2.08932, "49415": 2.08469, "49420": 2.04965, "49425": 2.05214, "49430": 2.09225, "49435": 2.12042, "49440": 2.11588, "49445": 2.05941, "49450": 2.02414, "49455": 2.05852, "49460": 2.08532, "49465": 1.99333, "49470": 2.1046, "49475": 2.06667, "49480": 2.07691, "49485": 2.04889, "49490": 2.04583, "49495": 2.06996, "49500": 2.03251, "49505": 2.08813, "49510": 2.05048, "49515": 2.07205, "49520": 2.10324, "49525": 2.06949, "49530": 2.13429, "49535": 2.07099, "49540": 2.04001, "49545": 2.03286, "49550": 2.09417, "49555": 2.04296, "49560": 2.0934, "49565": 2.10931, "49570": 2.06312, "49575": 2.10002, "49580": 2.11441, "49585": 2.05941, "49590": 2.07021, "49595": 2.07772, "49600": 2.09127, "49605": 2.07442, "49610": 2.08542, "49615": 2.05362, "49620": 2.09164, "49625": 2.08698, "49630": 2.09333, "49635": 2.0642, "49640": 2.06978, "49645": 2.01434, "49650": 2.07195, "49655": 2.11787, "49660": 2.10796, "49665": 2.06508, "49670": 2.0704, "49675": 2.05614, "49680": 2.04016, "49685": 2.04601, "49690": 2.05876, "49695": 2.10628, "49700": 2.01635, "49705": 2.09286, "49710": 2.08524, "49715": 2.09057, "49720": 2.05961, "49725": 2.08612, "49730": 2.08945, "49735": 2.00684, "49740": 2.06896, "49745": 2.06017, "49750": 2.07966, "49755": 2.09462, "49760": 1.98678, "49765": 2.04673, "49770": 2.03504, "49775": 2.05633, "49780": 2.05411, "49785": 2.0309, "49790": 2.03162, "49795": 2.09549, "49800": 2.0576, "49805": 2.00522, "49810": 2.07599, "49815": 2.02073, "49820": 2.04299, "49825": 2.03176, "49830": 2.05753, "49835": 2.048, "49840": 2.06298, "49845": 2.11889, "49850": 2.1194, "49855": 2.06678, "49860": 2.0448, "49865": 2.09231, "49870": 2.0614, "49875": 2.04095, "49880": 2.06546, "49885": 2.08336, "49890": 2.0866, "49895": 2.03543, "49900": 2.07771, "49905": 2.07433, "49910": 2.0373, "49915": 2.06055, "49920": 2.05709, "49925": 2.02624, "49930": 2.10562, "49935": 2.06985, "49940": 2.04436, "49945": 2.08869, "49950": 2.04819, "49955": 2.09477, "49960": 2.05728, "49965": 2.06981, "49970": 2.09713, "49975": 2.078, "49980": 2.12038, "49985": 2.06359, "49990": 2.08591, "49995": 2.06745, "50000": 2.05934, "50005": 2.06226, "50010": 2.09103, "50015": 2.054, "50020": 2.02862, "50025": 2.12516, "50030": 2.02377, "50035": 2.04228, "50040": 2.0824, "50045": 2.07415, "50050": 2.09662, "50055": 2.07282, "50060": 2.05524, "50065": 2.06806, "50070": 2.03066, "50075": 2.05531, "50080": 2.09128, "50085": 2.08069, "50090": 2.04726, "50095": 2.07684, "50100": 2.0323, "50105": 2.08824, "50110": 2.05321, "50115": 2.07365, "50120": 2.06518, "50125": 2.05811, "50130": 2.0173, "50135": 2.04403, "50140": 2.10998, "50145": 2.04857, "50150": 2.08173, "50155": 2.07202, "50160": 2.0334, "50165": 2.061, "50170": 2.06375, "50175": 2.08796, "50180": 2.06559, "50185": 2.06731, "50190": 2.02669, "50195": 2.05701, "50200": 2.04007, "50205": 2.07659, "50210": 2.07182, "50215": 2.08771, "50220": 2.07035, "50225": 2.05351, "50230": 2.0839, "50235": 2.09183, "50240": 2.07951, "50245": 2.06167, "50250": 2.05103, "50255": 2.06807, "50260": 2.02384, "50265": 2.0312, "50270": 1.98117, "50275": 2.0497, "50280": 2.06458, "50285": 2.08158, "50290": 2.06404, "50295": 2.09946, "50300": 2.05794, "50305": 2.08177, "50310": 2.04371, "50315": 2.0701, "50320": 2.07706, "50325": 2.05825, "50330": 2.07634, "50335": 2.08281, "50340": 2.07072, "50345": 2.05896, "50350": 2.04271, "50355": 2.0744, "50360": 2.06672, "50365": 2.05688, "50370": 2.0583, "50375": 2.06237, "50380": 2.0737, "50385": 2.06682, "50390": 2.05265, "50395": 2.07578, "50400": 2.04412, "50405": 2.07481, "50410": 2.05579, "50415": 2.01759, "50420": 2.06907, "50425": 2.05724, "50430": 2.08154, "50435": 2.06762, "50440": 2.01766, "50445": 2.03708, "50450": 2.10034, "50455": 2.06234, "50460": 2.06675, "50465": 2.04735, "50470": 2.07245, "50475": 2.04838, "50480": 2.10192, "50485": 2.09982, "50490": 2.06858, "50495": 2.09674, "50500": 2.06937, "50505": 2.09438, "50510": 2.02985, "50515": 2.06658, "50520": 2.06087, "50525": 2.09083, "50530": 2.0836, "50535": 2.06183, "50540": 2.03757, "50545": 2.02865, "50550": 2.07683, "50555": 2.05737, "50560": 2.0428, "50565": 2.08951, "50570": 2.01811, "50575": 2.03801, "50580": 2.03279, "50585": 2.07416, "50590": 2.06086, "50595": 2.08897, "50600": 2.05972, "50605": 2.05461, "50610": 2.08588, "50615": 2.09687, "50620": 2.09468, "50625": 2.05332, "50630": 2.03896, "50635": 2.10063, "50640": 2.08757, "50645": 2.09813, "50650": 2.05155, "50655": 2.05682, "50660": 2.03376, "50665": 2.03038, "50670": 2.05001, "50675": 2.08535, "50680": 2.04665, "50685": 2.03371, "50690": 2.04549, "50695": 2.04815, "50700": 2.10548, "50705": 2.05667, "50710": 2.11963, "50715": 2.01061, "50720": 2.0448, "50725": 2.08561, "50730": 2.07361, "50735": 2.08829, "50740": 2.02645, "50745": 2.09152, "50750": 2.07666, "50755": 2.09679, "50760": 2.1109, "50765": 2.06432, "50770": 2.06828, "50775": 2.07109, "50780": 2.06925, "50785": 2.0405, "50790": 2.06101, "50795": 2.06856, "50800": 2.09007, "50805": 2.06285, "50810": 2.06693, "50815": 2.0617, "50820": 2.0355, "50825": 2.06457, "50830": 2.02715, "50835": 2.0421, "50840": 2.08469, "50845": 2.03977, "50850": 2.06984, "50855": 2.06272, "50860": 2.06916}}, "num-zeros": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 956236352.0, "5": 967337984.0, "10": 971388160.0, "15": 946451136.0, "20": 961367552.0, "25": 1083593856.0, "30": 1211427328.0, "35": 1297695104.0, "40": 1271662208.0, "45": 1174972416.0, "50": 1126596224.0, "55": 1083471232.0, "60": 1045093568.0, "65": 1029609472.0, "70": 1002075200.0, "75": 1000129536.0, "80": 1025326144.0, "85": 1013987968.0, "90": 990840768.0, "95": 961652672.0, "100": 971655552.0, "105": 979952128.0, "110": 977933504.0, "115": 976486848.0, "120": 961170560.0, "125": 942466048.0, "130": 975964672.0, "135": 965313344.0, "140": 962883328.0, "145": 976480832.0, "150": 921585856.0, "155": 968481152.0, "160": 956474880.0, "165": 959859968.0, "170": 974345792.0, "175": 949013440.0, "180": 946720128.0, "185": 972006144.0, "190": 969052288.0, "195": 985149056.0, "200": 945770688.0, "205": 958337024.0, "210": 979441600.0, "215": 967488512.0, "220": 956430208.0, "225": 962392960.0, "230": 948177856.0, "235": 965230080.0, "240": 966070208.0, "245": 969162112.0, "250": 974433216.0, "255": 925052544.0, "260": 965637120.0, "265": 970672128.0, "270": 959133696.0, "275": 953996480.0, "280": 963431296.0, "285": 945780608.0, "290": 974126656.0, "295": 966700096.0, "300": 967146624.0, "305": 964506112.0, "310": 940356096.0, "315": 967398400.0, "320": 969005504.0, "325": 980555392.0, "330": 972100224.0, "335": 946867200.0, "340": 966583552.0, "345": 973024384.0, "350": 973909184.0, "355": 963259072.0, "360": 948353536.0, "365": 964822656.0, "370": 962946048.0, "375": 958445056.0, "380": 947158720.0, "385": 955991488.0, "390": 945397056.0, "395": 970419968.0, "400": 979778944.0, "405": 968354560.0, "410": 970065920.0, "415": 953158016.0, "420": 943566976.0, "425": 954773248.0, "430": 962665216.0, "435": 977079360.0, "440": 954805568.0, "445": 971893504.0, "450": 963515264.0, "455": 973134720.0, "460": 983712768.0, "465": 945279744.0, "470": 942054912.0, "475": 967002176.0, "480": 966098880.0, "485": 976412544.0, "490": 962530688.0, "495": 945459712.0, "500": 964441728.0, "505": 986008960.0, "510": 965682304.0, "515": 943407360.0, "520": 945015808.0, "525": 971262272.0, "530": 971882688.0, "535": 979136576.0, "540": 969526656.0, "545": 954125824.0, "550": 951266560.0, "555": 987225216.0, "560": 960428032.0, "565": 966617344.0, "570": 975723520.0, "575": 927218688.0, "580": 970699392.0, "585": 961174400.0, "590": 972969280.0, "595": 963680832.0, "600": 937081472.0, "605": 951475072.0, "610": 963364928.0, "615": 970012416.0, "620": 976465920.0, "625": 949584192.0, "630": 954446528.0, "635": 986044032.0, "640": 980977856.0, "645": 955008896.0, "650": 958547456.0, "655": 951651456.0, "660": 961040128.0, "665": 967554560.0, "670": 962516096.0, "675": 968335872.0, "680": 965623040.0, "685": 962865408.0, "690": 961921408.0, "695": 954765952.0, "700": 970339648.0, "705": 945516928.0, "710": 943884288.0, "715": 973356544.0, "720": 968368384.0, "725": 978487552.0, "730": 952192896.0, "735": 948810560.0, "740": 955636480.0, "745": 975868928.0, "750": 981235968.0, "755": 962155264.0, "760": 951965056.0, "765": 967345344.0, "770": 976149888.0, "775": 970547968.0, "780": 977538240.0, "785": 931524096.0, "790": 960441344.0, "795": 964581632.0, "800": 967019712.0, "805": 962317632.0, "810": 940973632.0, "815": 949037440.0, "820": 953182528.0, "825": 954503552.0, "830": 976442496.0, "835": 956071808.0, "840": 948403584.0, "845": 965156992.0, "850": 966029568.0, "855": 960903936.0, "860": 976023936.0, "865": 938159872.0, "870": 966410752.0, "875": 972314112.0, "880": 963115328.0, "885": 967744512.0, "890": 949966720.0, "895": 960019072.0, "900": 974229120.0, "905": 963965184.0, "910": 958431424.0, "915": 956354944.0, "920": 943971072.0, "925": 960832640.0, "930": 978849088.0, "935": 971070720.0, "940": 960906752.0, "945": 945065344.0, "950": 957426304.0, "955": 979036800.0, "960": 983586688.0, "965": 966165696.0, "970": 951228032.0, "975": 961574144.0, "980": 968068736.0, "985": 968989568.0, "990": 984390912.0, "995": 953290368.0, "1000": 934779072.0, "1005": 960143616.0, "1010": 971539968.0, "1015": 985184896.0, "1020": 962781440.0, "1025": 935009536.0, "1030": 974679808.0, "1035": 964992448.0, "1040": 980462784.0, "1045": 960826560.0, "1050": 955200384.0, "1055": 957779520.0, "1060": 967748352.0, "1065": 967117760.0, "1070": 966600448.0, "1075": 950059520.0, "1080": 954509632.0, "1085": 967249984.0, "1090": 977134272.0, "1095": 961236864.0, "1100": 979611904.0, "1105": 953363328.0, "1110": 965955008.0, "1115": 966987264.0, "1120": 970353920.0, "1125": 965710208.0, "1130": 954943232.0, "1135": 965843456.0, "1140": 965174016.0, "1145": 970987456.0, "1150": 955558528.0, "1155": 930579584.0, "1160": 957776128.0, "1165": 978124608.0, "1170": 974302400.0, "1175": 973060352.0, "1180": 973085824.0, "1185": 947344384.0, "1190": 964794752.0, "1195": 953136960.0, "1200": 972843392.0, "1205": 988476800.0, "1210": 931121792.0, "1215": 968646016.0, "1220": 969161536.0, "1225": 975948288.0, "1230": 967334784.0, "1235": 943443456.0, "1240": 955854464.0, "1245": 981503872.0, "1250": 966111616.0, "1255": 973672960.0, "1260": 946494784.0, "1265": 963999168.0, "1270": 960485376.0, "1275": 973617280.0, "1280": 961111040.0, "1285": 957582976.0, "1290": 952527744.0, "1295": 971612928.0, "1300": 968863616.0, "1305": 963737792.0, "1310": 963336960.0, "1315": 943552512.0, "1320": 966306944.0, "1325": 989785728.0, "1330": 969507392.0, "1335": 972302208.0, "1340": 972268928.0, "1345": 960655872.0, "1350": 968637568.0, "1355": 955852416.0, "1360": 971821056.0, "1365": 960385792.0, "1370": 948791680.0, "1375": 973530880.0, "1380": 953468416.0, "1385": 969145344.0, "1390": 975720192.0, "1395": 931675904.0, "1400": 945856448.0, "1405": 976753984.0, "1410": 974512000.0, "1415": 967570560.0, "1420": 966745280.0, "1425": 937380608.0, "1430": 973914496.0, "1435": 978333440.0, "1440": 964180224.0, "1445": 958058368.0, "1450": 946148992.0, "1455": 983923328.0, "1460": 968651456.0, "1465": 948747136.0, "1470": 984241472.0, "1475": 943906176.0, "1480": 963975296.0, "1485": 957350528.0, "1490": 961259520.0, "1495": 980539904.0, "1500": 958332544.0, "1505": 942867008.0, "1510": 984182144.0, "1515": 959092864.0, "1520": 959107072.0, "1525": 952784704.0, "1530": 957744128.0, "1535": 949429760.0, "1540": 971085184.0, "1545": 963133952.0, "1550": 978667456.0, "1555": 952320128.0, "1560": 980092288.0, "1565": 967313728.0, "1570": 973843200.0, "1575": 975493760.0, "1580": 941863296.0, "1585": 970029312.0, "1590": 983821056.0, "1595": 948632960.0, "1600": 967445248.0, "1605": 952451136.0, "1610": 969619840.0, "1615": 983147520.0, "1620": 968019328.0, "1625": 970716928.0, "1630": 962887168.0, "1635": 942310400.0, "1640": 981612416.0, "1645": 973979136.0, "1650": 974186880.0, "1655": 967265024.0, "1660": 940687616.0, "1665": 961704576.0, "1670": 962902976.0, "1675": 971278400.0, "1680": 980879808.0, "1685": 944418560.0, "1690": 964688640.0, "1695": 965644032.0, "1700": 966344064.0, "1705": 985200576.0, "1710": 978354752.0, "1715": 943213056.0, "1720": 977088256.0, "1725": 965871872.0, "1730": 968969984.0, "1735": 965088832.0, "1740": 949714432.0, "1745": 970014080.0, "1750": 959683520.0, "1755": 960087296.0, "1760": 966382656.0, "1765": 951817408.0, "1770": 954664832.0, "1775": 973753088.0, "1780": 970534208.0, "1785": 968826880.0, "1790": 950235008.0, "1795": 945132672.0, "1800": 984666624.0, "1805": 987164032.0, "1810": 977769856.0, "1815": 948006464.0, "1820": 949208704.0, "1825": 978853312.0, "1830": 966363520.0, "1835": 964131072.0, "1840": 972319360.0, "1845": 935414528.0, "1850": 952499968.0, "1855": 980052032.0, "1860": 975867968.0, "1865": 958965888.0, "1870": 958947712.0, "1875": 932594304.0, "1880": 973574784.0, "1885": 978845696.0, "1890": 971359104.0, "1895": 959210496.0, "1900": 947392384.0, "1905": 981831296.0, "1910": 969122304.0, "1915": 970039040.0, "1920": 975597952.0, "1925": 960495360.0, "1930": 977923904.0, "1935": 963251584.0, "1940": 952461760.0, "1945": 981336576.0, "1950": 939170304.0, "1955": 960607616.0, "1960": 970032448.0, "1965": 981176384.0, "1970": 962046016.0, "1975": 952825088.0, "1980": 936848512.0, "1985": 975939456.0, "1990": 965965312.0, "1995": 962614272.0, "2000": 960553600.0, "2005": 954498048.0, "2010": 975581056.0, "2015": 991802752.0, "2020": 975435136.0, "2025": 974303872.0, "2030": 952081920.0, "2035": 967850112.0, "2040": 987459904.0, "2045": 976479360.0, "2050": 984701696.0, "2055": 942838976.0, "2060": 942595584.0, "2065": 966210688.0, "2070": 969624448.0, "2075": 980553856.0, "2080": 977596032.0, "2085": 939629248.0, "2090": 969869760.0, "2095": 961273216.0, "2100": 976718592.0, "2105": 972536000.0, "2110": 959901568.0, "2115": 956878720.0, "2120": 977481728.0, "2125": 962566528.0, "2130": 979618688.0, "2135": 950538752.0, "2140": 946996160.0, "2145": 962276736.0, "2150": 973404032.0, "2155": 972688576.0, "2160": 970312896.0, "2165": 948643520.0, "2170": 961545728.0, "2175": 969378048.0, "2180": 969329664.0, "2185": 947448448.0, "2190": 940481664.0, "2195": 986086272.0, "2200": 961863296.0, "2205": 978922880.0, "2210": 964102656.0, "2215": 963499776.0, "2220": 951311424.0, "2225": 969317440.0, "2230": 976329088.0, "2235": 974025920.0, "2240": 975493376.0, "2245": 960233408.0, "2250": 967641600.0, "2255": 969132672.0, "2260": 975064512.0, "2265": 968258816.0, "2270": 951744640.0, "2275": 962768704.0, "2280": 969640064.0, "2285": 971693568.0, "2290": 962889536.0, "2295": 931410240.0, "2300": 959905920.0, "2305": 970428160.0, "2310": 967445504.0, "2315": 970906240.0, "2320": 975589376.0, "2325": 938587840.0, "2330": 988437760.0, "2335": 977489152.0, "2340": 964598656.0, "2345": 964167424.0, "2350": 947555008.0, "2355": 977028864.0, "2360": 966899200.0, "2365": 977295744.0, "2370": 965070656.0, "2375": 953966016.0, "2380": 962917504.0, "2385": 967194048.0, "2390": 963076224.0, "2395": 974465536.0, "2400": 958410624.0, "2405": 968120192.0, "2410": 951587712.0, "2415": 965904960.0, "2420": 966517504.0, "2425": 959044736.0, "2430": 956685632.0, "2435": 961389248.0, "2440": 959754304.0, "2445": 970891008.0, "2450": 961997696.0, "2455": 922720512.0, "2460": 951953536.0, "2465": 955730432.0, "2470": 972571008.0, "2475": 973812800.0, "2480": 943895936.0, "2485": 944184832.0, "2490": 972412544.0, "2495": 974451392.0, "2500": 973910912.0, "2505": 958491776.0, "2510": 939510784.0, "2515": 979553024.0, "2520": 970473280.0, "2525": 964390784.0, "2530": 955799424.0, "2535": 936598528.0, "2540": 969028992.0, "2545": 970386176.0, "2550": 969462784.0, "2555": 969441152.0, "2560": 964978688.0, "2565": 959764352.0, "2570": 985176704.0, "2575": 957425664.0, "2580": 967424768.0, "2585": 966023872.0, "2590": 956354752.0, "2595": 981830528.0, "2600": 959532800.0, "2605": 962999616.0, "2610": 965972096.0, "2615": 951926400.0, "2620": 971241984.0, "2625": 976456832.0, "2630": 974409728.0, "2635": 948071936.0, "2640": 948137344.0, "2645": 963037248.0, "2650": 953983360.0, "2655": 977112896.0, "2660": 949623296.0, "2665": 953928320.0, "2670": 959064320.0, "2675": 979276544.0, "2680": 961396992.0, "2685": 970700736.0, "2690": 965221120.0, "2695": 943553536.0, "2700": 969425216.0, "2705": 978961600.0, "2710": 971811200.0, "2715": 990814848.0, "2720": 942650240.0, "2725": 967967616.0, "2730": 955467264.0, "2735": 970686464.0, "2740": 977922368.0, "2745": 932281216.0, "2750": 947870464.0, "2755": 956317440.0, "2760": 981709696.0, "2765": 966124992.0, "2770": 948914688.0, "2775": 935843712.0, "2780": 964791872.0, "2785": 969570048.0, "2790": 974274752.0, "2795": 966886400.0, "2800": 944390656.0, "2805": 964354304.0, "2810": 969611520.0, "2815": 975857920.0, "2820": 963084480.0, "2825": 937629056.0, "2830": 956739584.0, "2835": 986322752.0, "2840": 961770624.0, "2845": 967520256.0, "2850": 951715328.0, "2855": 962092160.0, "2860": 954243712.0, "2865": 955892864.0, "2870": 944663040.0, "2875": 974664512.0, "2880": 968214272.0, "2885": 981082496.0, "2890": 953467904.0, "2895": 957179776.0, "2900": 964990272.0, "2905": 931707776.0, "2910": 955732096.0, "2915": 979477952.0, "2920": 970493312.0, "2925": 964977024.0, "2930": 964049152.0, "2935": 940141696.0, "2940": 964909184.0, "2945": 989161088.0, "2950": 965207424.0, "2955": 965117504.0, "2960": 933161728.0, "2965": 968806784.0, "2970": 973035264.0, "2975": 958093184.0, "2980": 964511232.0, "2985": 937269952.0, "2990": 951254912.0, "2995": 978316032.0, "3000": 969276480.0, "3005": 974687744.0, "3010": 950234624.0, "3015": 943854464.0, "3020": 958454144.0, "3025": 975185792.0, "3030": 965018240.0, "3035": 963454080.0, "3040": 952133248.0, "3045": 989793856.0, "3050": 965558720.0, "3055": 982533632.0, "3060": 971228800.0, "3065": 943916928.0, "3070": 978434304.0, "3075": 975217152.0, "3080": 961002880.0, "3085": 962365056.0, "3090": 945965312.0, "3095": 938115328.0, "3100": 972941248.0, "3105": 962002560.0, "3110": 970668864.0, "3115": 963391232.0, "3120": 947128704.0, "3125": 972732608.0, "3130": 952986496.0, "3135": 966054016.0, "3140": 968502016.0, "3145": 937864320.0, "3150": 975022272.0, "3155": 976827264.0, "3160": 969653504.0, "3165": 982220032.0, "3170": 937985280.0, "3175": 953851008.0, "3180": 983835328.0, "3185": 965193344.0, "3190": 968506688.0, "3195": 950957568.0, "3200": 945127296.0, "3205": 959875968.0, "3210": 957511552.0, "3215": 958045696.0, "3220": 968155136.0, "3225": 935627648.0, "3230": 962614016.0, "3235": 975789056.0, "3240": 962634880.0, "3245": 981298304.0, "3250": 943285120.0, "3255": 954623360.0, "3260": 980375872.0, "3265": 963644800.0, "3270": 965189120.0, "3275": 959743232.0, "3280": 967069952.0, "3285": 982501248.0, "3290": 947726336.0, "3295": 966434176.0, "3300": 959202688.0, "3305": 949155584.0, "3310": 979535744.0, "3315": 964306432.0, "3320": 969232128.0, "3325": 956204992.0, "3330": 941167488.0, "3335": 964973440.0, "3340": 956925568.0, "3345": 972513536.0, "3350": 964599936.0, "3355": 943358208.0, "3360": 970049280.0, "3365": 969466880.0, "3370": 954787200.0, "3375": 958689536.0, "3380": 971476160.0, "3385": 947986560.0, "3390": 965794240.0, "3395": 978414592.0, "3400": 978138368.0, "3405": 976723520.0, "3410": 924206080.0, "3415": 955437440.0, "3420": 971832832.0, "3425": 977179776.0, "3430": 973851008.0, "3435": 936096832.0, "3440": 970516224.0, "3445": 957328128.0, "3450": 959864832.0, "3455": 963865856.0, "3460": 967894272.0, "3465": 931343232.0, "3470": 952347840.0, "3475": 973716416.0, "3480": 959762944.0, "3485": 979972352.0, "3490": 944695808.0, "3495": 953917760.0, "3500": 969342464.0, "3505": 964372352.0, "3510": 971246720.0, "3515": 955970944.0, "3520": 958758272.0, "3525": 971927168.0, "3530": 964174208.0, "3535": 983204352.0, "3540": 937501568.0, "3545": 944767936.0, "3550": 984475264.0, "3555": 978070784.0, "3560": 974398080.0, "3565": 968824832.0, "3570": 946707584.0, "3575": 976130816.0, "3580": 977506048.0, "3585": 954592384.0, "3590": 956438144.0, "3595": 951468672.0, "3600": 989021568.0, "3605": 962020224.0, "3610": 965065856.0, "3615": 974660992.0, "3620": 954901056.0, "3625": 939527296.0, "3630": 990161792.0, "3635": 971448320.0, "3640": 976037888.0, "3645": 961501440.0, "3650": 945815040.0, "3655": 965794816.0, "3660": 976206208.0, "3665": 964033024.0, "3670": 977443328.0, "3675": 943488384.0, "3680": 958197248.0, "3685": 964298240.0, "3690": 982105280.0, "3695": 963149440.0, "3700": 950582464.0, "3705": 947357248.0, "3710": 982366528.0, "3715": 972693120.0, "3720": 976150912.0, "3725": 964051008.0, "3730": 948874624.0, "3735": 967091840.0, "3740": 960972288.0, "3745": 969334144.0, "3750": 963948288.0, "3755": 953433216.0, "3760": 976654976.0, "3765": 979853824.0, "3770": 972371968.0, "3775": 972386560.0, "3780": 952585216.0, "3785": 960237568.0, "3790": 985610752.0, "3795": 969208192.0, "3800": 957890112.0, "3805": 972432384.0, "3810": 954529728.0, "3815": 974579392.0, "3820": 963032640.0, "3825": 962083968.0, "3830": 969406592.0, "3835": 934757504.0, "3840": 971269312.0, "3845": 986862592.0, "3850": 968872832.0, "3855": 965259968.0, "3860": 948070464.0, "3865": 975062272.0, "3870": 985110784.0, "3875": 983055104.0, "3880": 963627712.0, "3885": 953024256.0, "3890": 960296704.0, "3895": 960603584.0, "3900": 984925568.0, "3905": 976223104.0, "3910": 987347712.0, "3915": 946017344.0, "3920": 974867328.0, "3925": 961235072.0, "3930": 976777664.0, "3935": 978924928.0, "3940": 950292480.0, "3945": 960272768.0, "3950": 974184320.0, "3955": 972968000.0, "3960": 974078208.0, "3965": 950886720.0, "3970": 980681088.0, "3975": 960737920.0, "3980": 977543488.0, "3985": 962962048.0, "3990": 972755520.0, "3995": 953700992.0, "4000": 974987136.0, "4005": 971646336.0, "4010": 978408960.0, "4015": 971517184.0, "4020": 950323200.0, "4025": 968445952.0, "4030": 997948288.0, "4035": 978572800.0, "4040": 959812352.0, "4045": 939663680.0, "4050": 944742400.0, "4055": 980997248.0, "4060": 977709056.0, "4065": 975755968.0, "4070": 942189376.0, "4075": 945777024.0, "4080": 988778496.0, "4085": 962090112.0, "4090": 983355648.0, "4095": 986943552.0, "4100": 957223616.0, "4105": 954109696.0, "4110": 966498816.0, "4115": 976039936.0, "4120": 983543040.0, "4125": 960077504.0, "4130": 967314944.0, "4135": 971491968.0, "4140": 963207680.0, "4145": 956235648.0, "4150": 960318976.0, "4155": 946240896.0, "4160": 968439104.0, "4165": 970351872.0, "4170": 972008448.0, "4175": 955873920.0, "4180": 940984192.0, "4185": 968318016.0, "4190": 968040320.0, "4195": 989259072.0, "4200": 962690560.0, "4205": 960618368.0, "4210": 971862784.0, "4215": 974170624.0, "4220": 981144192.0, "4225": 975245952.0, "4230": 952725888.0, "4235": 958493824.0, "4240": 966811648.0, "4245": 961744512.0, "4250": 965869760.0, "4255": 958291776.0, "4260": 949581952.0, "4265": 964162432.0, "4270": 978419968.0, "4275": 975445504.0, "4280": 962696576.0, "4285": 951463616.0, "4290": 980053696.0, "4295": 968866816.0, "4300": 958299008.0, "4305": 966815040.0, "4310": 939613696.0, "4315": 949440640.0, "4320": 984535040.0, "4325": 982583936.0, "4330": 974770816.0, "4335": 949459712.0, "4340": 959527232.0, "4345": 956656768.0, "4350": 979958784.0, "4355": 968881728.0, "4360": 966351104.0, "4365": 941256960.0, "4370": 969519232.0, "4375": 973007232.0, "4380": 966172416.0, "4385": 972036352.0, "4390": 954201472.0, "4395": 951893632.0, "4400": 973665280.0, "4405": 972490432.0, "4410": 967956992.0, "4415": 958996992.0, "4420": 960778816.0, "4425": 976465216.0, "4430": 965979008.0, "4435": 975926144.0, "4440": 962331328.0, "4445": 954848512.0, "4450": 978330240.0, "4455": 960254464.0, "4460": 968642176.0, "4465": 968855232.0, "4470": 944176256.0, "4475": 952046912.0, "4480": 978873984.0, "4485": 968291136.0, "4490": 957074240.0, "4495": 938974784.0, "4500": 953290240.0, "4505": 977203712.0, "4510": 978749696.0, "4515": 962504832.0, "4520": 958850240.0, "4525": 958151552.0, "4530": 964440320.0, "4535": 976654400.0, "4540": 976884224.0, "4545": 970181056.0, "4550": 953235584.0, "4555": 959591680.0, "4560": 972460672.0, "4565": 973478592.0, "4570": 978933760.0, "4575": 957822336.0, "4580": 963228096.0, "4585": 957419520.0, "4590": 986568256.0, "4595": 960177152.0, "4600": 952255232.0, "4605": 959249728.0, "4610": 963735552.0, "4615": 957986816.0, "4620": 960255360.0, "4625": 973774080.0, "4630": 944507904.0, "4635": 977062016.0, "4640": 960298432.0, "4645": 982030400.0, "4650": 962374656.0, "4655": 939470720.0, "4660": 964025472.0, "4665": 962577344.0, "4670": 976684672.0, "4675": 963455104.0, "4680": 957452032.0, "4685": 949698944.0, "4690": 956973760.0, "4695": 969952128.0, "4700": 961214592.0, "4705": 970768960.0, "4710": 934493440.0, "4715": 970426496.0, "4720": 966350336.0, "4725": 980291712.0, "4730": 965850624.0, "4735": 937870976.0, "4740": 960125312.0, "4745": 976034304.0, "4750": 967959296.0, "4755": 984984960.0, "4760": 959159744.0, "4765": 955212096.0, "4770": 958682240.0, "4775": 991130688.0, "4780": 976794752.0, "4785": 967607808.0, "4790": 943744640.0, "4795": 955873728.0, "4800": 967722112.0, "4805": 976481664.0, "4810": 965136000.0, "4815": 958019712.0, "4820": 974018688.0, "4825": 961542592.0, "4830": 962675200.0, "4835": 972544576.0, "4840": 948938112.0, "4845": 965741568.0, "4850": 960329472.0, "4855": 964144256.0, "4860": 963075520.0, "4865": 967495424.0, "4870": 957194624.0, "4875": 983611776.0, "4880": 957108864.0, "4885": 977003904.0, "4890": 959765824.0, "4895": 942188480.0, "4900": 973724288.0, "4905": 975239040.0, "4910": 969220608.0, "4915": 970102912.0, "4920": 941193984.0, "4925": 954760256.0, "4930": 977022848.0, "4935": 963787008.0, "4940": 972660224.0, "4945": 960044096.0, "4950": 940819456.0, "4955": 968049344.0, "4960": 976826240.0, "4965": 961009280.0, "4970": 958579840.0, "4975": 933763072.0, "4980": 960824896.0, "4985": 963012608.0, "4990": 963589632.0, "4995": 986320768.0, "5000": 940764352.0, "5005": 968960768.0, "5010": 970321024.0, "5015": 965233280.0, "5020": 966727808.0, "5025": 949434624.0, "5030": 953503104.0, "5035": 967329152.0, "5040": 955677888.0, "5045": 969129856.0, "5050": 953418112.0, "5055": 954824832.0, "5060": 963079168.0, "5065": 952195840.0, "5070": 973598080.0, "5075": 978618112.0, "5080": 942878848.0, "5085": 965839616.0, "5090": 972870272.0, "5095": 964423424.0, "5100": 958355264.0, "5105": 965354560.0, "5110": 950413952.0, "5115": 972371456.0, "5120": 960462208.0, "5125": 969780544.0, "5130": 938832256.0, "5135": 943648576.0, "5140": 969938560.0, "5145": 968677696.0, "5150": 970601472.0, "5155": 972697088.0, "5160": 926573568.0, "5165": 961592192.0, "5170": 966886144.0, "5175": 966023808.0, "5180": 963679616.0, "5185": 930851328.0, "5190": 949803904.0, "5195": 972422016.0, "5200": 973820800.0, "5205": 968225920.0, "5210": 960528576.0, "5215": 928791936.0, "5220": 979211264.0, "5225": 984841984.0, "5230": 975048576.0, "5235": 975080064.0, "5240": 944387776.0, "5245": 970835072.0, "5250": 972452480.0, "5255": 966921600.0, "5260": 976742272.0, "5265": 942284160.0, "5270": 969225088.0, "5275": 970060032.0, "5280": 962861824.0, "5285": 964048128.0, "5290": 932515456.0, "5295": 951764096.0, "5300": 975576192.0, "5305": 951854848.0, "5310": 968003840.0, "5315": 955756544.0, "5320": 950909632.0, "5325": 973053248.0, "5330": 967782016.0, "5335": 967560512.0, "5340": 966417216.0, "5345": 962948352.0, "5350": 978862656.0, "5355": 972240000.0, "5360": 963830784.0, "5365": 965271488.0, "5370": 947788480.0, "5375": 948772224.0, "5380": 967235392.0, "5385": 980504192.0, "5390": 965251840.0, "5395": 955073792.0, "5400": 948237440.0, "5405": 974293248.0, "5410": 967868416.0, "5415": 976112192.0, "5420": 967395584.0, "5425": 937351680.0, "5430": 963845568.0, "5435": 971849472.0, "5440": 969042560.0, "5445": 957391488.0, "5450": 919321472.0, "5455": 952015360.0, "5460": 962351232.0, "5465": 978930304.0, "5470": 980883392.0, "5475": 941624448.0, "5480": 955706368.0, "5485": 964927488.0, "5490": 975887744.0, "5495": 962694784.0, "5500": 971132288.0, "5505": 956891520.0, "5510": 968585600.0, "5515": 945405568.0, "5520": 963095936.0, "5525": 975858496.0, "5530": 936549440.0, "5535": 970657536.0, "5540": 960245120.0, "5545": 971937152.0, "5550": 967822208.0, "5555": 955894784.0, "5560": 954471296.0, "5565": 968739712.0, "5570": 945100416.0, "5575": 960514816.0, "5580": 960586688.0, "5585": 959471616.0, "5590": 977692864.0, "5595": 975145088.0, "5600": 962947136.0, "5605": 964089984.0, "5610": 943144320.0, "5615": 966490624.0, "5620": 963347840.0, "5625": 982385664.0, "5630": 976111616.0, "5635": 957187200.0, "5640": 951479680.0, "5645": 967780736.0, "5650": 979083520.0, "5655": 983449216.0, "5660": 956395136.0, "5665": 953524992.0, "5670": 966097920.0, "5675": 967665408.0, "5680": 978660160.0, "5685": 962019712.0, "5690": 935761472.0, "5695": 963738880.0, "5700": 952450944.0, "5705": 974451584.0, "5710": 971303680.0, "5715": 946085824.0, "5720": 975027840.0, "5725": 967374848.0, "5730": 978561408.0, "5735": 964843776.0, "5740": 943622656.0, "5745": 971167168.0, "5750": 981876096.0, "5755": 956827264.0, "5760": 963708032.0, "5765": 957633664.0, "5770": 955698048.0, "5775": 970749952.0, "5780": 962775040.0, "5785": 970587584.0, "5790": 974763904.0, "5795": 949801792.0, "5800": 965889280.0, "5805": 968712576.0, "5810": 976097536.0, "5815": 970017728.0, "5820": 936353344.0, "5825": 969327488.0, "5830": 977626880.0, "5835": 975017728.0, "5840": 963024512.0, "5845": 968702272.0, "5850": 942884480.0, "5855": 975937792.0, "5860": 979488896.0, "5865": 978517120.0, "5870": 968776704.0, "5875": 942315776.0, "5880": 964570560.0, "5885": 974794432.0, "5890": 972634304.0, "5895": 965583104.0, "5900": 941273280.0, "5905": 961778624.0, "5910": 958753344.0, "5915": 968149504.0, "5920": 977332736.0, "5925": 959518464.0, "5930": 946785664.0, "5935": 952381824.0, "5940": 977674688.0, "5945": 984780928.0, "5950": 980825472.0, "5955": 935058560.0, "5960": 961823488.0, "5965": 965963776.0, "5970": 970599936.0, "5975": 961969344.0, "5980": 958171520.0, "5985": 964575232.0, "5990": 973503616.0, "5995": 955930368.0, "6000": 955607424.0, "6005": 961329152.0, "6010": 952681280.0, "6015": 974563072.0, "6020": 978325888.0, "6025": 972030080.0, "6030": 955445504.0, "6035": 946978816.0, "6040": 962788480.0, "6045": 983754880.0, "6050": 956576640.0, "6055": 963445632.0, "6060": 945742336.0, "6065": 958479744.0, "6070": 978415488.0, "6075": 978000448.0, "6080": 957407488.0, "6085": 947579776.0, "6090": 953572992.0, "6095": 964783488.0, "6100": 979764736.0, "6105": 971097984.0, "6110": 961705600.0, "6115": 943604544.0, "6120": 968500480.0, "6125": 960519360.0, "6130": 983990784.0, "6135": 960981504.0, "6140": 958609856.0, "6145": 971273472.0, "6150": 968443584.0, "6155": 974966784.0, "6160": 977376128.0, "6165": 952620544.0, "6170": 951091840.0, "6175": 963236608.0, "6180": 969477248.0, "6185": 966247104.0, "6190": 963707520.0, "6195": 947019904.0, "6200": 969255168.0, "6205": 967259776.0, "6210": 959241344.0, "6215": 972766720.0, "6220": 936350720.0, "6225": 978568960.0, "6230": 975789568.0, "6235": 971668544.0, "6240": 965920384.0, "6245": 955820096.0, "6250": 956339456.0, "6255": 973288832.0, "6260": 978865024.0, "6265": 974770048.0, "6270": 958830784.0, "6275": 963860864.0, "6280": 973078912.0, "6285": 965999872.0, "6290": 971002496.0, "6295": 987502720.0, "6300": 947429888.0, "6305": 964869184.0, "6310": 978955328.0, "6315": 978393216.0, "6320": 971846848.0, "6325": 923002240.0, "6330": 959132928.0, "6335": 974891008.0, "6340": 984750336.0, "6345": 967008000.0, "6350": 944722624.0, "6355": 958039168.0, "6360": 972821376.0, "6365": 972246016.0, "6370": 959250048.0, "6375": 967031424.0, "6380": 951289344.0, "6385": 973597568.0, "6390": 965465856.0, "6395": 975241408.0, "6400": 984032064.0, "6405": 944028800.0, "6410": 977178112.0, "6415": 971389952.0, "6420": 956443392.0, "6425": 961036288.0, "6430": 957437888.0, "6435": 960039040.0, "6440": 968824320.0, "6445": 973442816.0, "6450": 974305536.0, "6455": 962079488.0, "6460": 941118464.0, "6465": 974421184.0, "6470": 979862784.0, "6475": 960775040.0, "6480": 967656256.0, "6485": 948487296.0, "6490": 970921088.0, "6495": 988565696.0, "6500": 980481792.0, "6505": 972035648.0, "6510": 951550784.0, "6515": 957602432.0, "6520": 979049728.0, "6525": 978817920.0, "6530": 973309760.0, "6535": 967829504.0, "6540": 950148416.0, "6545": 966269120.0, "6550": 979384576.0, "6555": 967156352.0, "6560": 975390720.0, "6565": 949573376.0, "6570": 951957248.0, "6575": 962738368.0, "6580": 976073344.0, "6585": 979534080.0, "6590": 949200640.0, "6595": 961606784.0, "6600": 961268032.0, "6605": 961472896.0, "6610": 985249728.0, "6615": 959567616.0, "6620": 944477952.0, "6625": 971114624.0, "6630": 971307136.0, "6635": 964006528.0, "6640": 959857600.0, "6645": 950808960.0, "6650": 978737984.0, "6655": 965741568.0, "6660": 968457344.0, "6665": 968658048.0, "6670": 932740480.0, "6675": 970920448.0, "6680": 968831616.0, "6685": 958514304.0, "6690": 956055360.0, "6695": 955278336.0, "6700": 961803456.0, "6705": 978971648.0, "6710": 970889984.0, "6715": 966752320.0, "6720": 973902976.0, "6725": 941793280.0, "6730": 979300864.0, "6735": 994427392.0, "6740": 976637952.0, "6745": 974762752.0, "6750": 939063872.0, "6755": 977822144.0, "6760": 969892160.0, "6765": 978566144.0, "6770": 975279616.0, "6775": 943708288.0, "6780": 947209600.0, "6785": 975347520.0, "6790": 960409792.0, "6795": 976243072.0, "6800": 973332608.0, "6805": 946757248.0, "6810": 958523904.0, "6815": 970967296.0, "6820": 977876992.0, "6825": 969635840.0, "6830": 950595840.0, "6835": 981820544.0, "6840": 983441024.0, "6845": 949181184.0, "6850": 965920640.0, "6855": 954260992.0, "6860": 979247232.0, "6865": 984087040.0, "6870": 965063680.0, "6875": 979020672.0, "6880": 950556800.0, "6885": 958790528.0, "6890": 960656896.0, "6895": 966181760.0, "6900": 985247872.0, "6905": 968602496.0, "6910": 950478592.0, "6915": 971135232.0, "6920": 967350784.0, "6925": 965193600.0, "6930": 964569984.0, "6935": 952147456.0, "6940": 963081088.0, "6945": 986900864.0, "6950": 973548544.0, "6955": 964986368.0, "6960": 940261760.0, "6965": 974529408.0, "6970": 978542784.0, "6975": 984760384.0, "6980": 982766336.0, "6985": 959741440.0, "6990": 945348736.0, "6995": 987569600.0, "7000": 963293184.0, "7005": 962825280.0, "7010": 985193600.0, "7015": 945632768.0, "7020": 982871808.0, "7025": 968804416.0, "7030": 953502080.0, "7035": 982700544.0, "7040": 950373632.0, "7045": 956152448.0, "7050": 960281792.0, "7055": 964090240.0, "7060": 976655360.0, "7065": 968024192.0, "7070": 953473984.0, "7075": 956880768.0, "7080": 969052160.0, "7085": 965832320.0, "7090": 969178880.0, "7095": 959968128.0, "7100": 973332160.0, "7105": 973344512.0, "7110": 970341504.0, "7115": 958645632.0, "7120": 949027072.0, "7125": 963337600.0, "7130": 971521536.0, "7135": 964689664.0, "7140": 961522496.0, "7145": 930810624.0, "7150": 946563456.0, "7155": 991289600.0, "7160": 968357376.0, "7165": 957036288.0, "7170": 968092160.0, "7175": 955841024.0, "7180": 958466432.0, "7185": 985139008.0, "7190": 978859392.0, "7195": 973995264.0, "7200": 936189760.0, "7205": 957625280.0, "7210": 967188736.0, "7215": 969523648.0, "7220": 982453504.0, "7225": 928765184.0, "7230": 949672896.0, "7235": 967067968.0, "7240": 967079680.0, "7245": 967873664.0, "7250": 949738624.0, "7255": 957094080.0, "7260": 970030080.0, "7265": 974985856.0, "7270": 959862784.0, "7275": 959312512.0, "7280": 957196864.0, "7285": 977540032.0, "7290": 977287680.0, "7295": 963085184.0, "7300": 975412864.0, "7305": 964367104.0, "7310": 977527680.0, "7315": 967289216.0, "7320": 974811392.0, "7325": 967152896.0, "7330": 959604864.0, "7335": 964320384.0, "7340": 977877056.0, "7345": 967631616.0, "7350": 984732224.0, "7355": 959762560.0, "7360": 949224448.0, "7365": 972589632.0, "7370": 982790784.0, "7375": 963125248.0, "7380": 964210688.0, "7385": 948628416.0, "7390": 963848960.0, "7395": 958295552.0, "7400": 969886144.0, "7405": 987576704.0, "7410": 952140032.0, "7415": 950408320.0, "7420": 967251456.0, "7425": 982964416.0, "7430": 966266176.0, "7435": 973259392.0, "7440": 936994432.0, "7445": 969197568.0, "7450": 980205312.0, "7455": 971218688.0, "7460": 972565888.0, "7465": 939353344.0, "7470": 971839616.0, "7475": 958328768.0, "7480": 969529344.0, "7485": 961306240.0, "7490": 934344256.0, "7495": 957334400.0, "7500": 969057408.0, "7505": 970198144.0, "7510": 972406016.0, "7515": 979533760.0, "7520": 951895808.0, "7525": 970555776.0, "7530": 954140160.0, "7535": 971506816.0, "7540": 979671296.0, "7545": 959248384.0, "7550": 960464640.0, "7555": 960416640.0, "7560": 970320512.0, "7565": 955033472.0, "7570": 942509568.0, "7575": 965753408.0, "7580": 982519936.0, "7585": 979277440.0, "7590": 970204864.0, "7595": 950115456.0, "7600": 946116864.0, "7605": 982538816.0, "7610": 969303680.0, "7615": 988613632.0, "7620": 957172288.0, "7625": 941111168.0, "7630": 971538560.0, "7635": 984375424.0, "7640": 983532800.0, "7645": 967889920.0, "7650": 959037568.0, "7655": 962326272.0, "7660": 969273344.0, "7665": 978319488.0, "7670": 975525440.0, "7675": 975400576.0, "7680": 942639104.0, "7685": 959947264.0, "7690": 975401600.0, "7695": 982283520.0, "7700": 979161216.0, "7705": 940902912.0, "7710": 974394880.0, "7715": 979148416.0, "7720": 967713344.0, "7725": 960320064.0, "7730": 943300224.0, "7735": 968340608.0, "7740": 980487808.0, "7745": 964003456.0, "7750": 963670208.0, "7755": 959874560.0, "7760": 970605888.0, "7765": 970851904.0, "7770": 962410752.0, "7775": 982068736.0, "7780": 964789440.0, "7785": 959789056.0, "7790": 968222016.0, "7795": 969105600.0, "7800": 971931008.0, "7805": 968405632.0, "7810": 946008640.0, "7815": 963504128.0, "7820": 974345152.0, "7825": 963439808.0, "7830": 957420800.0, "7835": 949586048.0, "7840": 957404032.0, "7845": 954041536.0, "7850": 979968384.0, "7855": 987200384.0, "7860": 947276544.0, "7865": 949032832.0, "7870": 965347712.0, "7875": 975579840.0, "7880": 968688256.0, "7885": 969384704.0, "7890": 952273920.0, "7895": 974635136.0, "7900": 963756672.0, "7905": 964088704.0, "7910": 965722624.0, "7915": 943452352.0, "7920": 951116544.0, "7925": 969820032.0, "7930": 964686336.0, "7935": 984669632.0, "7940": 964989952.0, "7945": 950673664.0, "7950": 962331776.0, "7955": 980229888.0, "7960": 963505664.0, "7965": 953008896.0, "7970": 951825600.0, "7975": 969771136.0, "7980": 965414912.0, "7985": 959238656.0, "7990": 967920512.0, "7995": 946718464.0, "8000": 962571520.0, "8005": 980372032.0, "8010": 965605248.0, "8015": 983177024.0, "8020": 960918144.0, "8025": 965237568.0, "8030": 958064960.0, "8035": 975691136.0, "8040": 960631424.0, "8045": 948235776.0, "8050": 959672768.0, "8055": 979224064.0, "8060": 969186560.0, "8065": 958050304.0, "8070": 964066496.0, "8075": 942216704.0, "8080": 965850624.0, "8085": 967061120.0, "8090": 983858944.0, "8095": 988563712.0, "8100": 966371200.0, "8105": 944536576.0, "8110": 968859904.0, "8115": 985905664.0, "8120": 974825280.0, "8125": 964165760.0, "8130": 966679488.0, "8135": 968129536.0, "8140": 963933376.0, "8145": 995174272.0, "8150": 973411840.0, "8155": 938599552.0, "8160": 964511872.0, "8165": 973467136.0, "8170": 968632256.0, "8175": 961771904.0, "8180": 936680256.0, "8185": 963042816.0, "8190": 968045056.0, "8195": 977618752.0, "8200": 956600320.0, "8205": 960566528.0, "8210": 946790080.0, "8215": 982558336.0, "8220": 988307456.0, "8225": 966132416.0, "8230": 962442112.0, "8235": 933935360.0, "8240": 980268288.0, "8245": 976668416.0, "8250": 964216192.0, "8255": 977382528.0, "8260": 956896384.0, "8265": 983116416.0, "8270": 952872448.0, "8275": 974295616.0, "8280": 974488576.0, "8285": 953298624.0, "8290": 940108416.0, "8295": 981273216.0, "8300": 973138496.0, "8305": 978146688.0, "8310": 951686528.0, "8315": 938155840.0, "8320": 977559424.0, "8325": 968093504.0, "8330": 990327552.0, "8335": 976103680.0, "8340": 947906560.0, "8345": 971071616.0, "8350": 970488448.0, "8355": 975284992.0, "8360": 979664768.0, "8365": 933100992.0, "8370": 965537664.0, "8375": 980003712.0, "8380": 965371520.0, "8385": 972999424.0, "8390": 962895936.0, "8395": 951212160.0, "8400": 972778304.0, "8405": 951661120.0, "8410": 960372352.0, "8415": 965660160.0, "8420": 941913600.0, "8425": 967699456.0, "8430": 961061376.0, "8435": 965894272.0, "8440": 969707712.0, "8445": 952627776.0, "8450": 984216512.0, "8455": 990507008.0, "8460": 968927104.0, "8465": 967552768.0, "8470": 963095168.0, "8475": 942761344.0, "8480": 987323264.0, "8485": 980181504.0, "8490": 992317440.0, "8495": 971668352.0, "8500": 950987520.0, "8505": 983228160.0, "8510": 973978048.0, "8515": 968825536.0, "8520": 961681152.0, "8525": 945119488.0, "8530": 984156608.0, "8535": 978215680.0, "8540": 967913344.0, "8545": 968695808.0, "8550": 942225024.0, "8555": 971609984.0, "8560": 958554368.0, "8565": 975517184.0, "8570": 975146560.0, "8575": 971780096.0, "8580": 932129792.0, "8585": 966004480.0, "8590": 978872768.0, "8595": 979081472.0, "8600": 983769408.0, "8605": 958025536.0, "8610": 984119488.0, "8615": 977699776.0, "8620": 963338752.0, "8625": 979528640.0, "8630": 943249024.0, "8635": 961331200.0, "8640": 972983936.0, "8645": 970305280.0, "8650": 969622336.0, "8655": 970640896.0, "8660": 944533376.0, "8665": 986646528.0, "8670": 960551296.0, "8675": 974415296.0, "8680": 962694528.0, "8685": 955705472.0, "8690": 978539008.0, "8695": 968475200.0, "8700": 972889728.0, "8705": 973595008.0, "8710": 946969280.0, "8715": 973700032.0, "8720": 958279680.0, "8725": 978412672.0, "8730": 985304768.0, "8735": 952473152.0, "8740": 940705600.0, "8745": 987150080.0, "8750": 972023808.0, "8755": 971467776.0, "8760": 965360256.0, "8765": 934838912.0, "8770": 986349376.0, "8775": 969617344.0, "8780": 967245568.0, "8785": 962393408.0, "8790": 947601792.0, "8795": 969492608.0, "8800": 970827328.0, "8805": 973509632.0, "8810": 983271872.0, "8815": 951668480.0, "8820": 939894848.0, "8825": 964286016.0, "8830": 981164032.0, "8835": 971348544.0, "8840": 979357184.0, "8845": 950866368.0, "8850": 986601152.0, "8855": 970758656.0, "8860": 961398912.0, "8865": 956941248.0, "8870": 945431424.0, "8875": 968260032.0, "8880": 983657344.0, "8885": 971406080.0, "8890": 969772288.0, "8895": 952356864.0, "8900": 961890304.0, "8905": 976532736.0, "8910": 981779264.0, "8915": 980364352.0, "8920": 967681024.0, "8925": 939588032.0, "8930": 970270400.0, "8935": 963905856.0, "8940": 977681984.0, "8945": 981839872.0, "8950": 945697024.0, "8955": 972682048.0, "8960": 973701056.0, "8965": 973372096.0, "8970": 966422336.0, "8975": 936720064.0, "8980": 952583232.0, "8985": 977497856.0, "8990": 967277632.0, "8995": 980504896.0, "9000": 952249536.0, "9005": 950465344.0, "9010": 975169984.0, "9015": 982484160.0, "9020": 959217984.0, "9025": 979239552.0, "9030": 953494656.0, "9035": 968763456.0, "9040": 977913792.0, "9045": 968511424.0, "9050": 982791616.0, "9055": 947861184.0, "9060": 956158336.0, "9065": 969743360.0, "9070": 967955136.0, "9075": 980747648.0, "9080": 952676800.0, "9085": 971307776.0, "9090": 963802368.0, "9095": 968215552.0, "9100": 974436736.0, "9105": 959419392.0, "9110": 947998720.0, "9115": 956393024.0, "9120": 985544512.0, "9125": 963100096.0, "9130": 957928704.0, "9135": 951522624.0, "9140": 966902656.0, "9145": 977140032.0, "9150": 987036032.0, "9155": 976920192.0, "9160": 957756032.0, "9165": 950269312.0, "9170": 988345344.0, "9175": 971300288.0, "9180": 967142208.0, "9185": 955042240.0, "9190": 956698944.0, "9195": 965931520.0, "9200": 968869184.0, "9205": 967243776.0, "9210": 984511424.0, "9215": 931449856.0, "9220": 949772032.0, "9225": 970961472.0, "9230": 971012992.0, "9235": 971600768.0, "9240": 959438720.0, "9245": 963620992.0, "9250": 961270144.0, "9255": 982786944.0, "9260": 979161216.0, "9265": 952474496.0, "9270": 949032064.0, "9275": 978478464.0, "9280": 977619968.0, "9285": 962361984.0, "9290": 978900224.0, "9295": 958352448.0, "9300": 965249280.0, "9305": 968862464.0, "9310": 972891392.0, "9315": 975888000.0, "9320": 948022400.0, "9325": 979026560.0, "9330": 977632640.0, "9335": 975525504.0, "9340": 960212608.0, "9345": 943145152.0, "9350": 952664192.0, "9355": 963316032.0, "9360": 960270336.0, "9365": 983387008.0, "9370": 982545920.0, "9375": 941885440.0, "9380": 983044736.0, "9385": 985333632.0, "9390": 972964864.0, "9395": 978098240.0, "9400": 937744256.0, "9405": 968268096.0, "9410": 981607040.0, "9415": 991687168.0, "9420": 960073344.0, "9425": 956695552.0, "9430": 938904704.0, "9435": 974252864.0, "9440": 959076608.0, "9445": 973426048.0, "9450": 961472832.0, "9455": 945739840.0, "9460": 978074240.0, "9465": 988114560.0, "9470": 963124992.0, "9475": 983687552.0, "9480": 930915456.0, "9485": 987221504.0, "9490": 963028544.0, "9495": 972355840.0, "9500": 981974720.0, "9505": 969868160.0, "9510": 964420096.0, "9515": 956742272.0, "9520": 948298624.0, "9525": 965079168.0, "9530": 958088704.0, "9535": 950988288.0, "9540": 953899648.0, "9545": 979678976.0, "9550": 955765504.0, "9555": 952951808.0, "9560": 957695488.0, "9565": 969868800.0, "9570": 977371008.0, "9575": 958838144.0, "9580": 962876672.0, "9585": 945903104.0, "9590": 947914624.0, "9595": 966554368.0, "9600": 984307200.0, "9605": 984614848.0, "9610": 943161792.0, "9615": 952162560.0, "9620": 980650048.0, "9625": 978181248.0, "9630": 969598080.0, "9635": 974476288.0, "9640": 940171520.0, "9645": 962094592.0, "9650": 970907392.0, "9655": 987226560.0, "9660": 963026304.0, "9665": 949702272.0, "9670": 966212096.0, "9675": 962867904.0, "9680": 964697536.0, "9685": 985690112.0, "9690": 939957952.0, "9695": 950362432.0, "9700": 974878912.0, "9705": 972294656.0, "9710": 966957312.0, "9715": 971047936.0, "9720": 940196736.0, "9725": 966024448.0, "9730": 973694784.0, "9735": 973903424.0, "9740": 970952960.0, "9745": 950458688.0, "9750": 979445120.0, "9755": 970096320.0, "9760": 967782272.0, "9765": 963553664.0, "9770": 952369216.0, "9775": 956645632.0, "9780": 969713920.0, "9785": 958135552.0, "9790": 960809088.0, "9795": 958018048.0, "9800": 949095552.0, "9805": 961926784.0, "9810": 978234496.0, "9815": 977028096.0, "9820": 982127360.0, "9825": 938895744.0, "9830": 969220928.0, "9835": 972314688.0, "9840": 970913920.0, "9845": 966631296.0, "9850": 946297856.0, "9855": 956810624.0, "9860": 986807616.0, "9865": 969745856.0, "9870": 989450112.0, "9875": 956458880.0, "9880": 930834944.0, "9885": 963064832.0, "9890": 971889152.0, "9895": 983133696.0, "9900": 956423680.0, "9905": 938678592.0, "9910": 978087296.0, "9915": 973062784.0, "9920": 944328960.0, "9925": 962068992.0, "9930": 947049088.0, "9935": 959996416.0, "9940": 965390592.0, "9945": 958604800.0, "9950": 963969920.0, "9955": 943413376.0, "9960": 966365056.0, "9965": 983165952.0, "9970": 966288256.0, "9975": 963217920.0, "9980": 980231168.0, "9985": 942242304.0, "9990": 976269312.0, "9995": 982374144.0, "10000": 971446848.0, "10005": 969493632.0, "10010": 943680640.0, "10015": 983056256.0, "10020": 978001152.0, "10025": 979242624.0, "10030": 971048704.0, "10035": 946250880.0, "10040": 949859456.0, "10045": 977742912.0, "10050": 985611712.0, "10055": 989928320.0, "10060": 958640768.0, "10065": 947012864.0, "10070": 966809344.0, "10075": 979290368.0, "10080": 971437056.0, "10085": 974278528.0, "10090": 943726400.0, "10095": 962498816.0, "10100": 971766592.0, "10105": 975544128.0, "10110": 971773248.0, "10115": 948759680.0, "10120": 962189568.0, "10125": 973888704.0, "10130": 980270336.0, "10135": 972186368.0, "10140": 957769472.0, "10145": 933653824.0, "10150": 973131008.0, "10155": 969532480.0, "10160": 961803520.0, "10165": 974641344.0, "10170": 943936384.0, "10175": 978967296.0, "10180": 983492480.0, "10185": 978513408.0, "10190": 955250048.0, "10195": 936769792.0, "10200": 987749760.0, "10205": 972407232.0, "10210": 966299392.0, "10215": 975753664.0, "10220": 948136896.0, "10225": 950239872.0, "10230": 975432768.0, "10235": 954077376.0, "10240": 968964992.0, "10245": 961698880.0, "10250": 936067904.0, "10255": 979012480.0, "10260": 964484992.0, "10265": 967133824.0, "10270": 968155712.0, "10275": 935292416.0, "10280": 968862272.0, "10285": 995875328.0, "10290": 979148416.0, "10295": 981429440.0, "10300": 951299200.0, "10305": 971679616.0, "10310": 959265152.0, "10315": 970708736.0, "10320": 984886528.0, "10325": 983019776.0, "10330": 934619904.0, "10335": 976073088.0, "10340": 957234624.0, "10345": 973596928.0, "10350": 984584192.0, "10355": 941988224.0, "10360": 961664320.0, "10365": 974047360.0, "10370": 980272640.0, "10375": 969556352.0, "10380": 961552128.0, "10385": 955280640.0, "10390": 989922560.0, "10395": 964722176.0, "10400": 960642176.0, "10405": 949758976.0, "10410": 955219072.0, "10415": 975789376.0, "10420": 966706816.0, "10425": 969506432.0, "10430": 964364544.0, "10435": 962828160.0, "10440": 971433984.0, "10445": 972012544.0, "10450": 974875072.0, "10455": 966060544.0, "10460": 948185472.0, "10465": 971546560.0, "10470": 972375680.0, "10475": 978998208.0, "10480": 996972928.0, "10485": 949471104.0, "10490": 934453952.0, "10495": 969008576.0, "10500": 978184704.0, "10505": 959157696.0, "10510": 951094016.0, "10515": 953571712.0, "10520": 971595840.0, "10525": 969474624.0, "10530": 969958016.0, "10535": 986132736.0, "10540": 946485504.0, "10545": 969931392.0, "10550": 969186816.0, "10555": 959153344.0, "10560": 975991744.0, "10565": 961068288.0, "10570": 968811456.0, "10575": 972891904.0, "10580": 960513856.0, "10585": 972902976.0, "10590": 951167360.0, "10595": 956110336.0, "10600": 967357440.0, "10605": 986330240.0, "10610": 965987520.0, "10615": 976514624.0, "10620": 940982656.0, "10625": 964900480.0, "10630": 967394944.0, "10635": 972584704.0, "10640": 974320640.0, "10645": 948286464.0, "10650": 965393152.0, "10655": 985059264.0, "10660": 975846016.0, "10665": 966806144.0, "10670": 954869760.0, "10675": 934049536.0, "10680": 985966080.0, "10685": 990623360.0, "10690": 963504896.0, "10695": 971687680.0, "10700": 949674112.0, "10705": 977884928.0, "10710": 968058816.0, "10715": 966984256.0, "10720": 965952512.0, "10725": 943974144.0, "10730": 980023680.0, "10735": 960526976.0, "10740": 970852736.0, "10745": 984089408.0, "10750": 981454336.0, "10755": 944649984.0, "10760": 969563136.0, "10765": 972395520.0, "10770": 973569216.0, "10775": 958170496.0, "10780": 949262336.0, "10785": 953277312.0, "10790": 969936768.0, "10795": 960225472.0, "10800": 971829504.0, "10805": 951006784.0, "10810": 973830400.0, "10815": 959338368.0, "10820": 970908544.0, "10825": 967064064.0, "10830": 956364416.0, "10835": 962610432.0, "10840": 970236480.0, "10845": 963591424.0, "10850": 957523968.0, "10855": 967265408.0, "10860": 950283840.0, "10865": 964061184.0, "10870": 982951616.0, "10875": 981893056.0, "10880": 958156480.0, "10885": 954196032.0, "10890": 972668416.0, "10895": 973139584.0, "10900": 970239104.0, "10905": 964631040.0, "10910": 938386176.0, "10915": 960210432.0, "10920": 983137344.0, "10925": 969845760.0, "10930": 968366144.0, "10935": 962712320.0, "10940": 953820992.0, "10945": 964741952.0, "10950": 972209344.0, "10955": 966191104.0, "10960": 971610176.0, "10965": 966271232.0, "10970": 983117632.0, "10975": 965514368.0, "10980": 974202496.0, "10985": 986192000.0, "10990": 950847488.0, "10995": 963132416.0, "11000": 985138432.0, "11005": 977884416.0, "11010": 971098880.0, "11015": 969396352.0, "11020": 947445888.0, "11025": 959134144.0, "11030": 977714816.0, "11035": 975574912.0, "11040": 985872256.0, "11045": 956500352.0, "11050": 973088256.0, "11055": 974199168.0, "11060": 961831296.0, "11065": 984878592.0, "11070": 949311104.0, "11075": 976296192.0, "11080": 971749888.0, "11085": 966763456.0, "11090": 975845760.0, "11095": 945799552.0, "11100": 965375616.0, "11105": 973745152.0, "11110": 980670848.0, "11115": 967554944.0, "11120": 957039424.0, "11125": 956413632.0, "11130": 975295104.0, "11135": 979046912.0, "11140": 964354944.0, "11145": 965983104.0, "11150": 935703488.0, "11155": 976050560.0, "11160": 983912832.0, "11165": 982118016.0, "11170": 977449984.0, "11175": 957556224.0, "11180": 961974976.0, "11185": 971671680.0, "11190": 979433472.0, "11195": 985048768.0, "11200": 982644864.0, "11205": 941689088.0, "11210": 984237248.0, "11215": 967003136.0, "11220": 982997824.0, "11225": 961458176.0, "11230": 952721984.0, "11235": 981202048.0, "11240": 977080704.0, "11245": 965840960.0, "11250": 969133952.0, "11255": 959970560.0, "11260": 979531392.0, "11265": 963362432.0, "11270": 981020864.0, "11275": 967887616.0, "11280": 955683520.0, "11285": 953368576.0, "11290": 956169664.0, "11295": 967988096.0, "11300": 962328960.0, "11305": 958305216.0, "11310": 946119552.0, "11315": 982582336.0, "11320": 964385920.0, "11325": 980698112.0, "11330": 975096832.0, "11335": 951633856.0, "11340": 970191744.0, "11345": 969570368.0, "11350": 981222528.0, "11355": 982003968.0, "11360": 940734400.0, "11365": 970357376.0, "11370": 978743936.0, "11375": 975166976.0, "11380": 968146816.0, "11385": 958415872.0, "11390": 937727424.0, "11395": 977063680.0, "11400": 973220224.0, "11405": 960905088.0, "11410": 965713152.0, "11415": 928845504.0, "11420": 964215872.0, "11425": 980753664.0, "11430": 978255040.0, "11435": 969490688.0, "11440": 944765568.0, "11445": 974651712.0, "11450": 984207488.0, "11455": 970910528.0, "11460": 964396416.0, "11465": 959561536.0, "11470": 954503104.0, "11475": 972236544.0, "11480": 956272128.0, "11485": 976696640.0, "11490": 986294016.0, "11495": 958580608.0, "11500": 969890304.0, "11505": 963643136.0, "11510": 976468416.0, "11515": 977670784.0, "11520": 953717760.0, "11525": 975694592.0, "11530": 976366784.0, "11535": 979539840.0, "11540": 974213888.0, "11545": 953237760.0, "11550": 952876608.0, "11555": 981523968.0, "11560": 984397568.0, "11565": 964885632.0, "11570": 966179712.0, "11575": 951326144.0, "11580": 975963392.0, "11585": 977094976.0, "11590": 969142528.0, "11595": 976288256.0, "11600": 945702080.0, "11605": 972990592.0, "11610": 981990784.0, "11615": 971826048.0, "11620": 968871552.0, "11625": 949134656.0, "11630": 937098624.0, "11635": 973015936.0, "11640": 981051136.0, "11645": 979761088.0, "11650": 971523904.0, "11655": 955871616.0, "11660": 980551360.0, "11665": 957970112.0, "11670": 982200704.0, "11675": 972144256.0, "11680": 955991552.0, "11685": 982352768.0, "11690": 967901888.0, "11695": 967324672.0, "11700": 973474048.0, "11705": 956002304.0, "11710": 964387520.0, "11715": 982791040.0, "11720": 983414464.0, "11725": 964946432.0, "11730": 955116992.0, "11735": 942535616.0, "11740": 973473024.0, "11745": 970953344.0, "11750": 961329024.0, "11755": 962932032.0, "11760": 949762880.0, "11765": 983359872.0, "11770": 983926208.0, "11775": 975106496.0, "11780": 985147648.0, "11785": 946671680.0, "11790": 971940736.0, "11795": 970055936.0, "11800": 972920320.0, "11805": 986485504.0, "11810": 967198016.0, "11815": 955468800.0, "11820": 973065216.0, "11825": 970436608.0, "11830": 974723840.0, "11835": 961580288.0, "11840": 944012608.0, "11845": 980460608.0, "11850": 974036992.0, "11855": 977587584.0, "11860": 970838272.0, "11865": 938374016.0, "11870": 939584896.0, "11875": 989765824.0, "11880": 972060096.0, "11885": 962533504.0, "11890": 970036416.0, "11895": 964790784.0, "11900": 978644480.0, "11905": 961382400.0, "11910": 983285376.0, "11915": 989673280.0, "11920": 944389504.0, "11925": 993831552.0, "11930": 964324608.0, "11935": 963034240.0, "11940": 976559616.0, "11945": 944295168.0, "11950": 977183616.0, "11955": 978734592.0, "11960": 971857216.0, "11965": 976015360.0, "11970": 962975104.0, "11975": 962956160.0, "11980": 977118464.0, "11985": 952901504.0, "11990": 968065856.0, "11995": 964763648.0, "12000": 958159360.0, "12005": 974104448.0, "12010": 979022592.0, "12015": 971991680.0, "12020": 972997440.0, "12025": 934381568.0, "12030": 968816192.0, "12035": 983794432.0, "12040": 977346560.0, "12045": 981534656.0, "12050": 931237888.0, "12055": 938422208.0, "12060": 974225920.0, "12065": 965440192.0, "12070": 967822080.0, "12075": 949560064.0, "12080": 953071232.0, "12085": 972268032.0, "12090": 963464960.0, "12095": 963160192.0, "12100": 976455488.0, "12105": 950087936.0, "12110": 971641664.0, "12115": 967795072.0, "12120": 986064000.0, "12125": 980441216.0, "12130": 941392832.0, "12135": 955393408.0, "12140": 975621056.0, "12145": 979231104.0, "12150": 979447552.0, "12155": 961509760.0, "12160": 946346688.0, "12165": 968194176.0, "12170": 963957632.0, "12175": 967433344.0, "12180": 974487424.0, "12185": 952700800.0, "12190": 988139264.0, "12195": 970419968.0, "12200": 964451712.0, "12205": 968250624.0, "12210": 939045440.0, "12215": 996551936.0, "12220": 969730496.0, "12225": 979199488.0, "12230": 980110976.0, "12235": 950040064.0, "12240": 963539072.0, "12245": 965432576.0, "12250": 976486144.0, "12255": 967696512.0, "12260": 983259840.0, "12265": 931599104.0, "12270": 966248448.0, "12275": 979259904.0, "12280": 977310080.0, "12285": 969977664.0, "12290": 929106816.0, "12295": 976601920.0, "12300": 985780736.0, "12305": 969646976.0, "12310": 986092416.0, "12315": 935956096.0, "12320": 957300480.0, "12325": 966253760.0, "12330": 968036032.0, "12335": 963579776.0, "12340": 957187136.0, "12345": 944112512.0, "12350": 966529216.0, "12355": 975463936.0, "12360": 978808512.0, "12365": 964314688.0, "12370": 948817536.0, "12375": 963568832.0, "12380": 964456704.0, "12385": 972856512.0, "12390": 961045248.0, "12395": 961321920.0, "12400": 975050368.0, "12405": 976178368.0, "12410": 953460224.0, "12415": 962906112.0, "12420": 944284032.0, "12425": 949358464.0, "12430": 972415808.0, "12435": 969262976.0, "12440": 961868672.0, "12445": 952009728.0, "12450": 947488960.0, "12455": 981286144.0, "12460": 973915776.0, "12465": 954449728.0, "12470": 981056896.0, "12475": 958359808.0, "12480": 966654464.0, "12485": 978353152.0, "12490": 973860608.0, "12495": 969484288.0, "12500": 961315776.0, "12505": 943362560.0, "12510": 960915456.0, "12515": 969443264.0, "12520": 973813504.0, "12525": 972206272.0, "12530": 944362368.0, "12535": 976327744.0, "12540": 965339392.0, "12545": 971542272.0, "12550": 969622528.0, "12555": 940870016.0, "12560": 964210496.0, "12565": 947289856.0, "12570": 974049664.0, "12575": 962758528.0, "12580": 957698432.0, "12585": 963981312.0, "12590": 965775872.0, "12595": 978633600.0, "12600": 981896896.0, "12605": 948892160.0, "12610": 937658752.0, "12615": 962336512.0, "12620": 960967168.0, "12625": 966064896.0, "12630": 970541312.0, "12635": 962186112.0, "12640": 977926912.0, "12645": 969185280.0, "12650": 969952256.0, "12655": 963883840.0, "12660": 932034624.0, "12665": 956643456.0, "12670": 985860800.0, "12675": 965296512.0, "12680": 960792960.0, "12685": 950972288.0, "12690": 945096832.0, "12695": 977981952.0, "12700": 984909568.0, "12705": 958576000.0, "12710": 968232896.0, "12715": 956169216.0, "12720": 976242752.0, "12725": 965009664.0, "12730": 969062912.0, "12735": 986433664.0, "12740": 937000064.0, "12745": 970493376.0, "12750": 974142720.0, "12755": 979585792.0, "12760": 969744320.0, "12765": 941692672.0, "12770": 951692032.0, "12775": 950967424.0, "12780": 968932864.0, "12785": 955875712.0, "12790": 962636480.0, "12795": 952586624.0, "12800": 962483264.0, "12805": 972246336.0, "12810": 973436032.0, "12815": 952158848.0, "12820": 940514688.0, "12825": 966155520.0, "12830": 998413184.0, "12835": 976364288.0, "12840": 962113600.0, "12845": 941478656.0, "12850": 958607616.0, "12855": 961030400.0, "12860": 971819904.0, "12865": 977734592.0, "12870": 974092160.0, "12875": 955347712.0, "12880": 967355392.0, "12885": 980923008.0, "12890": 959482048.0, "12895": 969087360.0, "12900": 937029440.0, "12905": 967658240.0, "12910": 980892800.0, "12915": 975173760.0, "12920": 955353920.0, "12925": 948786560.0, "12930": 959860096.0, "12935": 990709760.0, "12940": 967429120.0, "12945": 975184832.0, "12950": 971174784.0, "12955": 955414208.0, "12960": 977010816.0, "12965": 960637312.0, "12970": 961588096.0, "12975": 959651840.0, "12980": 937535360.0, "12985": 962478464.0, "12990": 967415296.0, "12995": 975045120.0, "13000": 979641856.0, "13005": 954120256.0, "13010": 951023488.0, "13015": 964293952.0, "13020": 960099328.0, "13025": 979529088.0, "13030": 971672192.0, "13035": 954662912.0, "13040": 953572352.0, "13045": 972095616.0, "13050": 967288448.0, "13055": 967195072.0, "13060": 951119616.0, "13065": 969747136.0, "13070": 974785536.0, "13075": 961115520.0, "13080": 960720128.0, "13085": 943947136.0, "13090": 973382144.0, "13095": 980402304.0, "13100": 964829632.0, "13105": 973786688.0, "13110": 924433600.0, "13115": 973397888.0, "13120": 980726656.0, "13125": 974874112.0, "13130": 951140352.0, "13135": 944402368.0, "13140": 925472512.0, "13145": 989946624.0, "13150": 983128384.0, "13155": 976112256.0, "13160": 969493376.0, "13165": 952687936.0, "13170": 976828416.0, "13175": 982051968.0, "13180": 976617152.0, "13185": 974823424.0, "13190": 948948224.0, "13195": 964233472.0, "13200": 969500032.0, "13205": 982801600.0, "13210": 971484480.0, "13215": 947706304.0, "13220": 962348288.0, "13225": 963725056.0, "13230": 975788416.0, "13235": 976924352.0, "13240": 965733504.0, "13245": 936130432.0, "13250": 977690752.0, "13255": 960274944.0, "13260": 966328768.0, "13265": 972906304.0, "13270": 954610816.0, "13275": 959105792.0, "13280": 976593536.0, "13285": 974588800.0, "13290": 966582528.0, "13295": 941927680.0, "13300": 969092608.0, "13305": 981875136.0, "13310": 971520064.0, "13315": 967657344.0, "13320": 976186560.0, "13325": 943584000.0, "13330": 972305088.0, "13335": 980015232.0, "13340": 960096128.0, "13345": 978147968.0, "13350": 954834112.0, "13355": 968692672.0, "13360": 982357632.0, "13365": 957037248.0, "13370": 966670208.0, "13375": 937474176.0, "13380": 953019776.0, "13385": 980252544.0, "13390": 978729472.0, "13395": 982379136.0, "13400": 951355840.0, "13405": 934069824.0, "13410": 975715840.0, "13415": 972746816.0, "13420": 984842304.0, "13425": 965315968.0, "13430": 954044160.0, "13435": 980554688.0, "13440": 966386688.0, "13445": 966260736.0, "13450": 969370880.0, "13455": 951342464.0, "13460": 964361920.0, "13465": 974124160.0, "13470": 971772032.0, "13475": 969524096.0, "13480": 966082944.0, "13485": 966661248.0, "13490": 977907904.0, "13495": 959929344.0, "13500": 971247744.0, "13505": 964952960.0, "13510": 963014016.0, "13515": 990387200.0, "13520": 958634496.0, "13525": 976411584.0, "13530": 983425152.0, "13535": 931233664.0, "13540": 946424192.0, "13545": 972178560.0, "13550": 970752512.0, "13555": 980824192.0, "13560": 959283200.0, "13565": 964057856.0, "13570": 972234496.0, "13575": 977301376.0, "13580": 976126016.0, "13585": 963726080.0, "13590": 940146432.0, "13595": 983421824.0, "13600": 990338112.0, "13605": 969167872.0, "13610": 973680896.0, "13615": 945169344.0, "13620": 972426880.0, "13625": 971570176.0, "13630": 959058880.0, "13635": 985242432.0, "13640": 980444160.0, "13645": 960904832.0, "13650": 953180800.0, "13655": 965462464.0, "13660": 978077120.0, "13665": 969974528.0, "13670": 956391808.0, "13675": 977613696.0, "13680": 976548800.0, "13685": 967873280.0, "13690": 971494336.0, "13695": 944145664.0, "13700": 974259776.0, "13705": 973368768.0, "13710": 975971520.0, "13715": 950612416.0, "13720": 951507712.0, "13725": 972559424.0, "13730": 969219776.0, "13735": 961791424.0, "13740": 972933184.0, "13745": 974013632.0, "13750": 958267072.0, "13755": 977652608.0, "13760": 964775872.0, "13765": 967891520.0, "13770": 978885440.0, "13775": 928849536.0, "13780": 973995648.0, "13785": 981754816.0, "13790": 963664832.0, "13795": 979678528.0, "13800": 955925248.0, "13805": 967662976.0, "13810": 963849216.0, "13815": 981097536.0, "13820": 976126720.0, "13825": 958134784.0, "13830": 945729792.0, "13835": 974588608.0, "13840": 996426560.0, "13845": 967754624.0, "13850": 974985344.0, "13855": 933642304.0, "13860": 971550912.0, "13865": 976804864.0, "13870": 986909376.0, "13875": 978891328.0, "13880": 951238144.0, "13885": 975656192.0, "13890": 968130624.0, "13895": 945049920.0, "13900": 979336000.0, "13905": 966581056.0, "13910": 965624512.0, "13915": 977527936.0, "13920": 975723328.0, "13925": 980898560.0, "13930": 962939008.0, "13935": 956708736.0, "13940": 965476864.0, "13945": 965272512.0, "13950": 977735872.0, "13955": 977185856.0, "13960": 974487168.0, "13965": 974733952.0, "13970": 972502592.0, "13975": 971343360.0, "13980": 983221440.0, "13985": 968884224.0, "13990": 962307264.0, "13995": 980396544.0, "14000": 972012288.0, "14005": 973001280.0, "14010": 965791552.0, "14015": 942087488.0, "14020": 980372160.0, "14025": 977030912.0, "14030": 972029760.0, "14035": 971317248.0, "14040": 936423488.0, "14045": 968543168.0, "14050": 975151424.0, "14055": 978947392.0, "14060": 976588096.0, "14065": 928615424.0, "14070": 978142400.0, "14075": 979737856.0, "14080": 964897856.0, "14085": 969085440.0, "14090": 965731392.0, "14095": 950655552.0, "14100": 972197120.0, "14105": 970998912.0, "14110": 987596672.0, "14115": 967720768.0, "14120": 944377536.0, "14125": 973113664.0, "14130": 970942976.0, "14135": 978799488.0, "14140": 972459584.0, "14145": 947562880.0, "14150": 974518272.0, "14155": 979781248.0, "14160": 973137344.0, "14165": 972925632.0, "14170": 947136512.0, "14175": 949512896.0, "14180": 976771776.0, "14185": 971901824.0, "14190": 979880192.0, "14195": 973749632.0, "14200": 945151040.0, "14205": 980824320.0, "14210": 981374528.0, "14215": 970179840.0, "14220": 965415680.0, "14225": 956436096.0, "14230": 983963904.0, "14235": 984069888.0, "14240": 980598400.0, "14245": 969838144.0, "14250": 954012352.0, "14255": 941338240.0, "14260": 963530368.0, "14265": 988763584.0, "14270": 987012800.0, "14275": 968936000.0, "14280": 954836480.0, "14285": 962355904.0, "14290": 966005696.0, "14295": 966073024.0, "14300": 983166144.0, "14305": 936971200.0, "14310": 969462784.0, "14315": 977215168.0, "14320": 975826880.0, "14325": 980184256.0, "14330": 957130752.0, "14335": 963026496.0, "14340": 956103616.0, "14345": 968131328.0, "14350": 963530496.0, "14355": 946436736.0, "14360": 949594112.0, "14365": 972277120.0, "14370": 953597824.0, "14375": 956086528.0, "14380": 957659072.0, "14385": 951449088.0, "14390": 982023232.0, "14395": 970940288.0, "14400": 963401536.0, "14405": 969278144.0, "14410": 965452416.0, "14415": 963647360.0, "14420": 960365824.0, "14425": 976716096.0, "14430": 979015680.0, "14435": 970515840.0, "14440": 973365184.0, "14445": 980543488.0, "14450": 973429312.0, "14455": 961209856.0, "14460": 955830336.0, "14465": 934785152.0, "14470": 960442880.0, "14475": 966037120.0, "14480": 967167168.0, "14485": 975945536.0, "14490": 947079744.0, "14495": 959660992.0, "14500": 971302848.0, "14505": 966386560.0, "14510": 977723904.0, "14515": 942421120.0, "14520": 953452992.0, "14525": 971592576.0, "14530": 957228160.0, "14535": 979033152.0, "14540": 954435072.0, "14545": 940022720.0, "14550": 988529792.0, "14555": 971993920.0, "14560": 973580032.0, "14565": 969273344.0, "14570": 948413504.0, "14575": 966245056.0, "14580": 965976320.0, "14585": 976792896.0, "14590": 975140480.0, "14595": 945539008.0, "14600": 958305792.0, "14605": 968487104.0, "14610": 953784320.0, "14615": 967453824.0, "14620": 953019840.0, "14625": 963292608.0, "14630": 971277248.0, "14635": 976349632.0, "14640": 978192320.0, "14645": 974341952.0, "14650": 946824320.0, "14655": 977048128.0, "14660": 975139776.0, "14665": 953938304.0, "14670": 970499648.0, "14675": 952308352.0, "14680": 970371072.0, "14685": 971631168.0, "14690": 967186432.0, "14695": 949231744.0, "14700": 934527744.0, "14705": 959035648.0, "14710": 978488192.0, "14715": 981811200.0, "14720": 967425216.0, "14725": 962273216.0, "14730": 938071552.0, "14735": 954949760.0, "14740": 967682944.0, "14745": 987363968.0, "14750": 973757632.0, "14755": 944139776.0, "14760": 974070464.0, "14765": 970028736.0, "14770": 989020416.0, "14775": 982251968.0, "14780": 945227968.0, "14785": 958184832.0, "14790": 971355904.0, "14795": 974832128.0, "14800": 969346560.0, "14805": 949752000.0, "14810": 951611264.0, "14815": 966461568.0, "14820": 968131712.0, "14825": 964095040.0, "14830": 956590976.0, "14835": 946108800.0, "14840": 967462528.0, "14845": 980569344.0, "14850": 953620992.0, "14855": 960824192.0, "14860": 967263808.0, "14865": 977179648.0, "14870": 967118272.0, "14875": 979900224.0, "14880": 957215680.0, "14885": 962314368.0, "14890": 950065536.0, "14895": 976325504.0, "14900": 971491520.0, "14905": 980176000.0, "14910": 972257408.0, "14915": 945729728.0, "14920": 952683584.0, "14925": 977302272.0, "14930": 976040640.0, "14935": 973716864.0, "14940": 939925504.0, "14945": 975551360.0, "14950": 977207488.0, "14955": 983293376.0, "14960": 955337664.0, "14965": 956364160.0, "14970": 943336064.0, "14975": 960502976.0, "14980": 977400448.0, "14985": 984364160.0, "14990": 966785216.0, "14995": 952542720.0, "15000": 976701184.0, "15005": 965286784.0, "15010": 966896512.0, "15015": 971431424.0, "15020": 944496768.0, "15025": 974002880.0, "15030": 970299200.0, "15035": 969918400.0, "15040": 971444416.0, "15045": 949840896.0, "15050": 948290880.0, "15055": 967996544.0, "15060": 970649856.0, "15065": 972621376.0, "15070": 953729728.0, "15075": 948886656.0, "15080": 970400832.0, "15085": 973499712.0, "15090": 971479424.0, "15095": 949907904.0, "15100": 957973440.0, "15105": 987142528.0, "15110": 980630784.0, "15115": 956878080.0, "15120": 959515072.0, "15125": 962171200.0, "15130": 968246912.0, "15135": 982215360.0, "15140": 956310016.0, "15145": 976362432.0, "15150": 938072512.0, "15155": 938482560.0, "15160": 967764800.0, "15165": 975011008.0, "15170": 959489216.0, "15175": 974076096.0, "15180": 945108864.0, "15185": 964669312.0, "15190": 963029312.0, "15195": 970861376.0, "15200": 985236480.0, "15205": 941382400.0, "15210": 963389760.0, "15215": 965045440.0, "15220": 983217088.0, "15225": 966254784.0, "15230": 949398976.0, "15235": 933362240.0, "15240": 979732672.0, "15245": 958018176.0, "15250": 964651072.0, "15255": 971958208.0, "15260": 955123200.0, "15265": 981443072.0, "15270": 958343808.0, "15275": 973505664.0, "15280": 967743680.0, "15285": 942624320.0, "15290": 980272576.0, "15295": 967661504.0, "15300": 956577472.0, "15305": 960713024.0, "15310": 932036096.0, "15315": 939240448.0, "15320": 969011584.0, "15325": 969967552.0, "15330": 955103808.0, "15335": 966999552.0, "15340": 951423168.0, "15345": 987533888.0, "15350": 973249152.0, "15355": 972087424.0, "15360": 968278784.0, "15365": 940677056.0, "15370": 955596864.0, "15375": 972153728.0, "15380": 972382720.0, "15385": 977270016.0, "15390": 946240256.0, "15395": 950081728.0, "15400": 961571200.0, "15405": 973218752.0, "15410": 964240256.0, "15415": 964005632.0, "15420": 961338624.0, "15425": 969642176.0, "15430": 973194112.0, "15435": 959152640.0, "15440": 956156352.0, "15445": 951726400.0, "15450": 979291520.0, "15455": 955560640.0, "15460": 971117248.0, "15465": 979960192.0, "15470": 966361536.0, "15475": 958637568.0, "15480": 967886784.0, "15485": 956233536.0, "15490": 956001152.0, "15495": 954442624.0, "15500": 968334144.0, "15505": 967731968.0, "15510": 966450496.0, "15515": 971994240.0, "15520": 963718080.0, "15525": 928754304.0, "15530": 977234752.0, "15535": 976172672.0, "15540": 974639040.0, "15545": 967611328.0, "15550": 950307008.0, "15555": 963235968.0, "15560": 982952448.0, "15565": 962352832.0, "15570": 968922048.0, "15575": 954520320.0, "15580": 959606528.0, "15585": 977561664.0, "15590": 986996224.0, "15595": 974488064.0, "15600": 951860992.0, "15605": 943877248.0, "15610": 973470848.0, "15615": 970583040.0, "15620": 953505600.0, "15625": 979770688.0, "15630": 950325504.0, "15635": 976742976.0, "15640": 962887424.0, "15645": 961754752.0, "15650": 983836608.0, "15655": 939388992.0, "15660": 979476416.0, "15665": 965754432.0, "15670": 971486976.0, "15675": 978668288.0, "15680": 952215488.0, "15685": 945604416.0, "15690": 967702656.0, "15695": 965316992.0, "15700": 975053376.0, "15705": 967230848.0, "15710": 949558464.0, "15715": 979138496.0, "15720": 960112128.0, "15725": 971013824.0, "15730": 959742656.0, "15735": 943066624.0, "15740": 985974400.0, "15745": 978520256.0, "15750": 981951616.0, "15755": 941255104.0, "15760": 958231040.0, "15765": 955155136.0, "15770": 979571072.0, "15775": 964156608.0, "15780": 952286720.0, "15785": 962304128.0, "15790": 934782208.0, "15795": 968425472.0, "15800": 974214016.0, "15805": 971295936.0, "15810": 974817280.0, "15815": 939217728.0, "15820": 973671360.0, "15825": 987712384.0, "15830": 968005056.0, "15835": 980922816.0, "15840": 932451648.0, "15845": 969832192.0, "15850": 964188928.0, "15855": 984884800.0, "15860": 957691520.0, "15865": 963402496.0, "15870": 940917952.0, "15875": 972015040.0, "15880": 964860096.0, "15885": 982036736.0, "15890": 968766656.0, "15895": 955700224.0, "15900": 991766144.0, "15905": 965953280.0, "15910": 980477696.0, "15915": 971952896.0, "15920": 948512960.0, "15925": 968065600.0, "15930": 983285504.0, "15935": 968218688.0, "15940": 971039168.0, "15945": 980944704.0, "15950": 963955648.0, "15955": 974919744.0, "15960": 974027392.0, "15965": 971123776.0, "15970": 968797056.0, "15975": 954375936.0, "15980": 975755328.0, "15985": 975761536.0, "15990": 985048384.0, "15995": 974007040.0, "16000": 968893504.0, "16005": 961021824.0, "16010": 973023936.0, "16015": 980725056.0, "16020": 964699520.0, "16025": 960988864.0, "16030": 951469504.0, "16035": 984177984.0, "16040": 960567168.0, "16045": 970837376.0, "16050": 955387520.0, "16055": 965798848.0, "16060": 964951424.0, "16065": 972075328.0, "16070": 968096512.0, "16075": 978183680.0, "16080": 950462976.0, "16085": 970515648.0, "16090": 973137728.0, "16095": 967084160.0, "16100": 981180928.0, "16105": 929836096.0, "16110": 961697088.0, "16115": 967708800.0, "16120": 970840768.0, "16125": 975689472.0, "16130": 960785472.0, "16135": 950516416.0, "16140": 977223808.0, "16145": 977198592.0, "16150": 982183808.0, "16155": 977113920.0, "16160": 942220672.0, "16165": 970271808.0, "16170": 962959744.0, "16175": 978328128.0, "16180": 976568320.0, "16185": 953091968.0, "16190": 951505024.0, "16195": 977869632.0, "16200": 975379840.0, "16205": 946390144.0, "16210": 961931264.0, "16215": 950583808.0, "16220": 978548544.0, "16225": 967937984.0, "16230": 972024256.0, "16235": 973089472.0, "16240": 940666880.0, "16245": 974809728.0, "16250": 979361600.0, "16255": 989790720.0, "16260": 969772864.0, "16265": 954990016.0, "16270": 963717376.0, "16275": 969010944.0, "16280": 991873152.0, "16285": 976681216.0, "16290": 940481024.0, "16295": 944142976.0, "16300": 957550464.0, "16305": 980615360.0, "16310": 975318400.0, "16315": 972553856.0, "16320": 942250880.0, "16325": 974972928.0, "16330": 978329856.0, "16335": 975290560.0, "16340": 988127424.0, "16345": 947279744.0, "16350": 962223296.0, "16355": 967594368.0, "16360": 957610496.0, "16365": 972186624.0, "16370": 934185792.0, "16375": 962348800.0, "16380": 984477824.0, "16385": 974656128.0, "16390": 976886976.0, "16395": 962924736.0, "16400": 951315072.0, "16405": 961755392.0, "16410": 963828928.0, "16415": 979821120.0, "16420": 967534144.0, "16425": 955781056.0, "16430": 959290048.0, "16435": 971726976.0, "16440": 970332544.0, "16445": 969030208.0, "16450": 946113920.0, "16455": 933989376.0, "16460": 981124992.0, "16465": 964632064.0, "16470": 979162688.0, "16475": 950150784.0, "16480": 962357824.0, "16485": 971289152.0, "16490": 965963968.0, "16495": 983838272.0, "16500": 990342016.0, "16505": 957155264.0, "16510": 967255424.0, "16515": 954166016.0, "16520": 971568384.0, "16525": 977283776.0, "16530": 931377088.0, "16535": 973975424.0, "16540": 958078144.0, "16545": 970957312.0, "16550": 974243456.0, "16555": 951244224.0, "16560": 959233408.0, "16565": 964797568.0, "16570": 970842368.0, "16575": 975001472.0, "16580": 971691840.0, "16585": 946407936.0, "16590": 964558720.0, "16595": 974832576.0, "16600": 980680256.0, "16605": 972615936.0, "16610": 946420096.0, "16615": 985336896.0, "16620": 959355456.0, "16625": 978286976.0, "16630": 973369728.0, "16635": 956651840.0, "16640": 967439616.0, "16645": 969840576.0, "16650": 967476288.0, "16655": 964821888.0, "16660": 975388544.0, "16665": 951522624.0, "16670": 972260160.0, "16675": 966166528.0, "16680": 960696768.0, "16685": 968686272.0, "16690": 945726272.0, "16695": 968601024.0, "16700": 970315968.0, "16705": 962944832.0, "16710": 972351424.0, "16715": 946892992.0, "16720": 963673536.0, "16725": 966691136.0, "16730": 974686336.0, "16735": 988790144.0, "16740": 951990976.0, "16745": 949748480.0, "16750": 949361792.0, "16755": 965034880.0, "16760": 968675712.0, "16765": 967437888.0, "16770": 925727360.0, "16775": 972290560.0, "16780": 972330368.0, "16785": 956975104.0, "16790": 960513024.0, "16795": 941955520.0, "16800": 950376896.0, "16805": 965461184.0, "16810": 960173440.0, "16815": 964331712.0, "16820": 952401856.0, "16825": 965658560.0, "16830": 972284992.0, "16835": 976195456.0, "16840": 965222080.0, "16845": 972768576.0, "16850": 962422784.0, "16855": 984159040.0, "16860": 975896384.0, "16865": 960584896.0, "16870": 952908992.0, "16875": 960424128.0, "16880": 976028480.0, "16885": 975222784.0, "16890": 965333952.0, "16895": 965906176.0, "16900": 951617792.0, "16905": 960878208.0, "16910": 965266240.0, "16915": 973538368.0, "16920": 957390272.0, "16925": 977255744.0, "16930": 939947840.0, "16935": 977075264.0, "16940": 968981504.0, "16945": 977769408.0, "16950": 961893760.0, "16955": 945175488.0, "16960": 970314560.0, "16965": 971461568.0, "16970": 987751360.0, "16975": 975135424.0, "16980": 947243328.0, "16985": 960180544.0, "16990": 972977664.0, "16995": 960401280.0, "17000": 958489536.0, "17005": 966241536.0, "17010": 938411648.0, "17015": 979168064.0, "17020": 964805376.0, "17025": 968058176.0, "17030": 971427968.0, "17035": 951260352.0, "17040": 965111680.0, "17045": 973343040.0, "17050": 974798528.0, "17055": 976194560.0, "17060": 943417856.0, "17065": 978102656.0, "17070": 978867904.0, "17075": 971241600.0, "17080": 971332224.0, "17085": 956242944.0, "17090": 963562752.0, "17095": 982920896.0, "17100": 960358464.0, "17105": 960677248.0, "17110": 962164480.0, "17115": 940308864.0, "17120": 970400128.0, "17125": 968193536.0, "17130": 962444672.0, "17135": 969163968.0, "17140": 951687104.0, "17145": 969805184.0, "17150": 962226816.0, "17155": 966809152.0, "17160": 976322112.0, "17165": 963705472.0, "17170": 963353536.0, "17175": 977543552.0, "17180": 958577216.0, "17185": 983415616.0, "17190": 974238848.0, "17195": 936899712.0, "17200": 962513024.0, "17205": 970889728.0, "17210": 982771648.0, "17215": 979158336.0, "17220": 937253888.0, "17225": 961064768.0, "17230": 949822272.0, "17235": 969595328.0, "17240": 981936192.0, "17245": 951998976.0, "17250": 967198720.0, "17255": 977273984.0, "17260": 962347840.0, "17265": 964929408.0, "17270": 956084096.0, "17275": 944942208.0, "17280": 975411456.0, "17285": 968698560.0, "17290": 954031168.0, "17295": 957250112.0, "17300": 935599040.0, "17305": 978723072.0, "17310": 964720896.0, "17315": 971155072.0, "17320": 956952832.0, "17325": 944716352.0, "17330": 954740224.0, "17335": 965640832.0, "17340": 965853504.0, "17345": 970905920.0, "17350": 952532864.0, "17355": 948969216.0, "17360": 980897152.0, "17365": 963183424.0, "17370": 967446400.0, "17375": 961690432.0, "17380": 944445504.0, "17385": 967809728.0, "17390": 966690944.0, "17395": 968709760.0, "17400": 977065856.0, "17405": 946112000.0, "17410": 962593856.0, "17415": 979435328.0, "17420": 963282240.0, "17425": 966960512.0, "17430": 952351040.0, "17435": 951644544.0, "17440": 963726720.0, "17445": 967403456.0, "17450": 962674432.0, "17455": 984241152.0, "17460": 943773376.0, "17465": 968692288.0, "17470": 975308672.0, "17475": 958256192.0, "17480": 973974144.0, "17485": 949638336.0, "17490": 970660608.0, "17495": 969462848.0, "17500": 962754048.0, "17505": 963251712.0, "17510": 953793088.0, "17515": 960236608.0, "17520": 982376064.0, "17525": 969747904.0, "17530": 965922304.0, "17535": 960848128.0, "17540": 952698112.0, "17545": 971907904.0, "17550": 986961664.0, "17555": 974110080.0, "17560": 955917696.0, "17565": 945362880.0, "17570": 965742592.0, "17575": 987562496.0, "17580": 977576896.0, "17585": 959675840.0, "17590": 941248640.0, "17595": 943275904.0, "17600": 969671872.0, "17605": 968406528.0, "17610": 977230656.0, "17615": 954118464.0, "17620": 956100096.0, "17625": 983353344.0, "17630": 967845312.0, "17635": 981018304.0, "17640": 973547072.0, "17645": 945836160.0, "17650": 966224640.0, "17655": 975406528.0, "17660": 965978432.0, "17665": 971208448.0, "17670": 958744320.0, "17675": 950538304.0, "17680": 957124544.0, "17685": 964157696.0, "17690": 980965376.0, "17695": 953211392.0, "17700": 948592320.0, "17705": 969111040.0, "17710": 977671360.0, "17715": 974264000.0, "17720": 960263936.0, "17725": 933741184.0, "17730": 960575168.0, "17735": 974578624.0, "17740": 972193088.0, "17745": 964344128.0, "17750": 955554432.0, "17755": 973237056.0, "17760": 977684480.0, "17765": 978858752.0, "17770": 959344064.0, "17775": 958976128.0, "17780": 954037120.0, "17785": 974960768.0, "17790": 975178176.0, "17795": 958393600.0, "17800": 950286848.0, "17805": 954339328.0, "17810": 979006784.0, "17815": 963527936.0, "17820": 972991744.0, "17825": 967177152.0, "17830": 934930240.0, "17835": 961285888.0, "17840": 974165952.0, "17845": 987790272.0, "17850": 984524416.0, "17855": 940426944.0, "17860": 963084288.0, "17865": 970860544.0, "17870": 958984384.0, "17875": 973319232.0, "17880": 958163584.0, "17885": 943018304.0, "17890": 959682752.0, "17895": 975314496.0, "17900": 952184064.0, "17905": 982646848.0, "17910": 948269952.0, "17915": 962348480.0, "17920": 956734592.0, "17925": 959282496.0, "17930": 983047872.0, "17935": 962428160.0, "17940": 959424960.0, "17945": 963589184.0, "17950": 949808896.0, "17955": 959130944.0, "17960": 956891584.0, "17965": 962914560.0, "17970": 968700672.0, "17975": 965996928.0, "17980": 986726208.0, "17985": 965843264.0, "17990": 941355712.0, "17995": 973860160.0, "18000": 977828352.0, "18005": 976420352.0, "18010": 960996736.0, "18015": 959012480.0, "18020": 967538496.0, "18025": 982076736.0, "18030": 973972544.0, "18035": 958748160.0, "18040": 942179072.0, "18045": 959097984.0, "18050": 966525760.0, "18055": 966071872.0, "18060": 971985280.0, "18065": 960965888.0, "18070": 943918400.0, "18075": 971335936.0, "18080": 962708224.0, "18085": 966710336.0, "18090": 987123008.0, "18095": 946611840.0, "18100": 984333056.0, "18105": 970649216.0, "18110": 969867200.0, "18115": 980873472.0, "18120": 948304192.0, "18125": 968164288.0, "18130": 950702464.0, "18135": 971857600.0, "18140": 956186944.0, "18145": 956816384.0, "18150": 940174528.0, "18155": 976168320.0, "18160": 961176704.0, "18165": 973669440.0, "18170": 969447936.0, "18175": 958004928.0, "18180": 967492032.0, "18185": 971452928.0, "18190": 990824128.0, "18195": 973513984.0, "18200": 934250368.0, "18205": 961805184.0, "18210": 968073024.0, "18215": 972178368.0, "18220": 975529792.0, "18225": 958585792.0, "18230": 949288896.0, "18235": 979645440.0, "18240": 957908736.0, "18245": 960569792.0, "18250": 961457664.0, "18255": 936037120.0, "18260": 973394048.0, "18265": 958449280.0, "18270": 978110272.0, "18275": 979202688.0, "18280": 929103296.0, "18285": 953485696.0, "18290": 969633728.0, "18295": 979497600.0, "18300": 956621504.0, "18305": 941945664.0, "18310": 943587712.0, "18315": 959676352.0, "18320": 968497728.0, "18325": 970299264.0, "18330": 958773760.0, "18335": 945692288.0, "18340": 970467264.0, "18345": 969019584.0, "18350": 951612736.0, "18355": 974088064.0, "18360": 952257024.0, "18365": 980198592.0, "18370": 968313536.0, "18375": 972265728.0, "18380": 958061056.0, "18385": 954107520.0, "18390": 962499008.0, "18395": 967513152.0, "18400": 967515136.0, "18405": 955126976.0, "18410": 960415424.0, "18415": 965041856.0, "18420": 966219136.0, "18425": 966133696.0, "18430": 997019136.0, "18435": 974711232.0, "18440": 957926080.0, "18445": 978245888.0, "18450": 965581504.0, "18455": 986973696.0, "18460": 982029184.0, "18465": 951684800.0, "18470": 957134912.0, "18475": 973012544.0, "18480": 994751296.0, "18485": 973953280.0, "18490": 955629760.0, "18495": 935115136.0, "18500": 979000064.0, "18505": 971841152.0, "18510": 967508608.0, "18515": 976642496.0, "18520": 933745920.0, "18525": 991654144.0, "18530": 979063616.0, "18535": 985749760.0, "18540": 969267072.0, "18545": 964715328.0, "18550": 971362176.0, "18555": 962632000.0, "18560": 967496384.0, "18565": 975532864.0, "18570": 965695936.0, "18575": 976086464.0, "18580": 974320960.0, "18585": 968904000.0, "18590": 964220608.0, "18595": 967276480.0, "18600": 953793152.0, "18605": 978363200.0, "18610": 976844928.0, "18615": 966797312.0, "18620": 963192576.0, "18625": 950932608.0, "18630": 974093248.0, "18635": 971887936.0, "18640": 963371200.0, "18645": 967594816.0, "18650": 942159040.0, "18655": 980451392.0, "18660": 972659392.0, "18665": 968656128.0, "18670": 971851584.0, "18675": 950324288.0, "18680": 943713472.0, "18685": 979972288.0, "18690": 962768128.0, "18695": 981290240.0, "18700": 978597760.0, "18705": 950008256.0, "18710": 960953664.0, "18715": 983939200.0, "18720": 971435008.0, "18725": 978313280.0, "18730": 944193088.0, "18735": 966865408.0, "18740": 963254592.0, "18745": 973560896.0, "18750": 969842880.0, "18755": 957241024.0, "18760": 939609920.0, "18765": 977096832.0, "18770": 966931520.0, "18775": 973736704.0, "18780": 978030848.0, "18785": 957705536.0, "18790": 970038528.0, "18795": 982177664.0, "18800": 967871424.0, "18805": 976630784.0, "18810": 947540928.0, "18815": 988292928.0, "18820": 966303040.0, "18825": 968836480.0, "18830": 964583744.0, "18835": 953181056.0, "18840": 962561280.0, "18845": 958572672.0, "18850": 969400576.0, "18855": 980487040.0, "18860": 971408256.0, "18865": 944428416.0, "18870": 965974400.0, "18875": 971998656.0, "18880": 961894912.0, "18885": 959254528.0, "18890": 957683136.0, "18895": 991787840.0, "18900": 975676864.0, "18905": 969733440.0, "18910": 979492096.0, "18915": 954321152.0, "18920": 945814720.0, "18925": 978234112.0, "18930": 973338432.0, "18935": 981887168.0, "18940": 962591232.0, "18945": 933676736.0, "18950": 964666432.0, "18955": 974493824.0, "18960": 986200000.0, "18965": 972651712.0, "18970": 942370624.0, "18975": 960520192.0, "18980": 970663744.0, "18985": 963885312.0, "18990": 962373184.0, "18995": 952128960.0, "19000": 960340608.0, "19005": 975449280.0, "19010": 972227200.0, "19015": 986173888.0, "19020": 954223872.0, "19025": 953141568.0, "19030": 970180736.0, "19035": 970378752.0, "19040": 962121600.0, "19045": 961324736.0, "19050": 946540672.0, "19055": 969895360.0, "19060": 964830464.0, "19065": 980621504.0, "19070": 962633408.0, "19075": 932727040.0, "19080": 965149696.0, "19085": 981382848.0, "19090": 956473152.0, "19095": 974062144.0, "19100": 933960192.0, "19105": 955332160.0, "19110": 967945792.0, "19115": 972632832.0, "19120": 958798528.0, "19125": 953611328.0, "19130": 949898880.0, "19135": 964230976.0, "19140": 955212736.0, "19145": 975604800.0, "19150": 977208064.0, "19155": 933893248.0, "19160": 952756096.0, "19165": 977575744.0, "19170": 963529664.0, "19175": 969370880.0, "19180": 950603392.0, "19185": 964848384.0, "19190": 971661312.0, "19195": 956215488.0, "19200": 969428160.0, "19205": 976884544.0, "19210": 947380608.0, "19215": 972648256.0, "19220": 959315264.0, "19225": 977761024.0, "19230": 976715328.0, "19235": 952051072.0, "19240": 978068096.0, "19245": 981299584.0, "19250": 984282752.0, "19255": 966788544.0, "19260": 935486080.0, "19265": 964541248.0, "19270": 972778112.0, "19275": 966816128.0, "19280": 961968320.0, "19285": 971908032.0, "19290": 946398528.0, "19295": 973472768.0, "19300": 983437632.0, "19305": 958045056.0, "19310": 954958976.0, "19315": 950102528.0, "19320": 979548928.0, "19325": 968216576.0, "19330": 963356480.0, "19335": 973655424.0, "19340": 939865472.0, "19345": 965736832.0, "19350": 973130816.0, "19355": 981005120.0, "19360": 976416192.0, "19365": 957146176.0, "19370": 947808832.0, "19375": 962607680.0, "19380": 981442816.0, "19385": 988441216.0, "19390": 979042432.0, "19395": 927609024.0, "19400": 971243520.0, "19405": 978583488.0, "19410": 968769664.0, "19415": 975598336.0, "19420": 938733312.0, "19425": 962072640.0, "19430": 949507200.0, "19435": 964221696.0, "19440": 984078720.0, "19445": 948813696.0, "19450": 945858048.0, "19455": 965394944.0, "19460": 972468928.0, "19465": 974178240.0, "19470": 965199936.0, "19475": 946861632.0, "19480": 975192896.0, "19485": 965302848.0, "19490": 962970368.0, "19495": 969554368.0, "19500": 947688832.0, "19505": 968248192.0, "19510": 950829056.0, "19515": 961460352.0, "19520": 965446528.0, "19525": 946190528.0, "19530": 977091456.0, "19535": 985431808.0, "19540": 956980288.0, "19545": 964561024.0, "19550": 946649792.0, "19555": 951073792.0, "19560": 989542016.0, "19565": 978792448.0, "19570": 967723392.0, "19575": 959080512.0, "19580": 944989248.0, "19585": 963974912.0, "19590": 967305856.0, "19595": 971684096.0, "19600": 969394560.0, "19605": 946605504.0, "19610": 973773632.0, "19615": 971585856.0, "19620": 952292096.0, "19625": 971446464.0, "19630": 975844416.0, "19635": 940342464.0, "19640": 971490752.0, "19645": 967646848.0, "19650": 971201728.0, "19655": 981033792.0, "19660": 946127936.0, "19665": 956998272.0, "19670": 964879424.0, "19675": 964918464.0, "19680": 971638720.0, "19685": 942019264.0, "19690": 970845184.0, "19695": 964999232.0, "19700": 967442368.0, "19705": 977280000.0, "19710": 946090176.0, "19715": 966561600.0, "19720": 970476864.0, "19725": 981582080.0, "19730": 966071616.0, "19735": 970683520.0, "19740": 939906752.0, "19745": 966141504.0, "19750": 966616576.0, "19755": 978492224.0, "19760": 942953600.0, "19765": 935134016.0, "19770": 968346624.0, "19775": 963405632.0, "19780": 965244992.0, "19785": 964309696.0, "19790": 939642240.0, "19795": 969006016.0, "19800": 981397888.0, "19805": 971323584.0, "19810": 966980480.0, "19815": 946106112.0, "19820": 937542016.0, "19825": 970677632.0, "19830": 974193856.0, "19835": 981512960.0, "19840": 976939392.0, "19845": 938495424.0, "19850": 990720192.0, "19855": 970156288.0, "19860": 955210496.0, "19865": 972591296.0, "19870": 949400256.0, "19875": 979505536.0, "19880": 975419776.0, "19885": 967884096.0, "19890": 976373824.0, "19895": 946447552.0, "19900": 955145152.0, "19905": 975599168.0, "19910": 959631424.0, "19915": 976674048.0, "19920": 984658816.0, "19925": 964588992.0, "19930": 979252160.0, "19935": 978613568.0, "19940": 969658176.0, "19945": 959323648.0, "19950": 956080128.0, "19955": 975562944.0, "19960": 960158400.0, "19965": 973129664.0, "19970": 977989632.0, "19975": 944341696.0, "19980": 959600512.0, "19985": 964040128.0, "19990": 968673728.0, "19995": 957332736.0, "20000": 960562688.0, "20005": 939034176.0, "20010": 974680000.0, "20015": 967056960.0, "20020": 994523648.0, "20025": 960802496.0, "20030": 942043072.0, "20035": 966649600.0, "20040": 972471616.0, "20045": 979315584.0, "20050": 967969280.0, "20055": 946177792.0, "20060": 958031936.0, "20065": 984213824.0, "20070": 959942400.0, "20075": 972289152.0, "20080": 969370816.0, "20085": 947952640.0, "20090": 960814464.0, "20095": 962763072.0, "20100": 958651328.0, "20105": 971482240.0, "20110": 956726976.0, "20115": 967553856.0, "20120": 961083200.0, "20125": 969247872.0, "20130": 965813312.0, "20135": 946547776.0, "20140": 959285440.0, "20145": 968867904.0, "20150": 965384192.0, "20155": 977176704.0, "20160": 962811520.0, "20165": 966532096.0, "20170": 971833536.0, "20175": 974494080.0, "20180": 964997376.0, "20185": 957078784.0, "20190": 942552896.0, "20195": 961394240.0, "20200": 969004096.0, "20205": 976699392.0, "20210": 959044608.0, "20215": 937824384.0, "20220": 957844608.0, "20225": 970948032.0, "20230": 967135168.0, "20235": 970408320.0, "20240": 943574912.0, "20245": 943224128.0, "20250": 979426176.0, "20255": 976200256.0, "20260": 970918976.0, "20265": 952732224.0, "20270": 942750656.0, "20275": 971427264.0, "20280": 962874752.0, "20285": 970503040.0, "20290": 968812032.0, "20295": 939576512.0, "20300": 969248832.0, "20305": 961747328.0, "20310": 972793536.0, "20315": 991660160.0, "20320": 953121792.0, "20325": 948385344.0, "20330": 969882944.0, "20335": 962098176.0, "20340": 959559168.0, "20345": 961394560.0, "20350": 952381568.0, "20355": 974719040.0, "20360": 963801024.0, "20365": 971472640.0, "20370": 975279744.0, "20375": 943243328.0, "20380": 968764736.0, "20385": 965214656.0, "20390": 969124096.0, "20395": 986057600.0, "20400": 956952896.0, "20405": 964715136.0, "20410": 980622656.0, "20415": 966292736.0, "20420": 964690560.0, "20425": 947603136.0, "20430": 932482560.0, "20435": 981047232.0, "20440": 968548992.0, "20445": 980160000.0, "20450": 959336768.0, "20455": 939003968.0, "20460": 955138944.0, "20465": 979918272.0, "20470": 977633344.0, "20475": 968681792.0, "20480": 945871232.0, "20485": 957332992.0, "20490": 979114944.0, "20495": 963089152.0, "20500": 973989376.0, "20505": 961219776.0, "20510": 944204928.0, "20515": 981296768.0, "20520": 989534912.0, "20525": 979293824.0, "20530": 961794176.0, "20535": 945386624.0, "20540": 969870272.0, "20545": 957345088.0, "20550": 965808576.0, "20555": 985260544.0, "20560": 943665920.0, "20565": 964955392.0, "20570": 968494592.0, "20575": 961595712.0, "20580": 953131904.0, "20585": 953912512.0, "20590": 986070784.0, "20595": 964241728.0, "20600": 960626944.0, "20605": 964691520.0, "20610": 944502720.0, "20615": 935571392.0, "20620": 975021504.0, "20625": 974277184.0, "20630": 970029760.0, "20635": 984198464.0, "20640": 953678848.0, "20645": 970072704.0, "20650": 972462144.0, "20655": 956584448.0, "20660": 965764352.0, "20665": 942878784.0, "20670": 958603456.0, "20675": 981554560.0, "20680": 970594496.0, "20685": 977962880.0, "20690": 964804288.0, "20695": 935216640.0, "20700": 971300160.0, "20705": 964175168.0, "20710": 975900672.0, "20715": 975672576.0, "20720": 954119360.0, "20725": 975587648.0, "20730": 976900864.0, "20735": 961191616.0, "20740": 991193984.0, "20745": 956019968.0, "20750": 956508736.0, "20755": 975196992.0, "20760": 985211264.0, "20765": 984450752.0, "20770": 950227776.0, "20775": 928476032.0, "20780": 967439680.0, "20785": 976643072.0, "20790": 961012608.0, "20795": 973923264.0, "20800": 949590848.0, "20805": 987098688.0, "20810": 979476288.0, "20815": 969789888.0, "20820": 965531840.0, "20825": 948632128.0, "20830": 976269696.0, "20835": 962609472.0, "20840": 974972864.0, "20845": 971251968.0, "20850": 939398976.0, "20855": 964144896.0, "20860": 974740032.0, "20865": 967897344.0, "20870": 963375936.0, "20875": 963690816.0, "20880": 954213440.0, "20885": 971862016.0, "20890": 976804352.0, "20895": 964501632.0, "20900": 982757376.0, "20905": 935432448.0, "20910": 963560704.0, "20915": 966189824.0, "20920": 983300864.0, "20925": 969912256.0, "20930": 951131776.0, "20935": 943711872.0, "20940": 987816128.0, "20945": 965687168.0, "20950": 951731968.0, "20955": 957197568.0, "20960": 938415104.0, "20965": 966627712.0, "20970": 968924864.0, "20975": 965343424.0, "20980": 973284096.0, "20985": 940462656.0, "20990": 969376896.0, "20995": 986837696.0, "21000": 979264192.0, "21005": 957248896.0, "21010": 951196544.0, "21015": 978585152.0, "21020": 974932992.0, "21025": 974863360.0, "21030": 967923200.0, "21035": 968265088.0, "21040": 977972288.0, "21045": 980069824.0, "21050": 963618688.0, "21055": 972207360.0, "21060": 970440064.0, "21065": 944699648.0, "21070": 984955072.0, "21075": 977393152.0, "21080": 968066688.0, "21085": 966753472.0, "21090": 948642816.0, "21095": 987264960.0, "21100": 980950208.0, "21105": 960612480.0, "21110": 963249408.0, "21115": 963382400.0, "21120": 964501312.0, "21125": 976685376.0, "21130": 972645504.0, "21135": 971508480.0, "21140": 960882560.0, "21145": 935827776.0, "21150": 965748224.0, "21155": 1000169920.0, "21160": 973080064.0, "21165": 988480896.0, "21170": 955718528.0, "21175": 969962048.0, "21180": 967514112.0, "21185": 967959808.0, "21190": 977920320.0, "21195": 966284480.0, "21200": 954853184.0, "21205": 980604800.0, "21210": 964111808.0, "21215": 985312000.0, "21220": 967702336.0, "21225": 959033856.0, "21230": 961240000.0, "21235": 971068160.0, "21240": 980913856.0, "21245": 974318592.0, "21250": 946148480.0, "21255": 982591424.0, "21260": 971786112.0, "21265": 968297600.0, "21270": 952804416.0, "21275": 945669824.0, "21280": 965251136.0, "21285": 981029120.0, "21290": 974427136.0, "21295": 970156928.0, "21300": 925880000.0, "21305": 955716672.0, "21310": 987626304.0, "21315": 988411584.0, "21320": 966193664.0, "21325": 970377920.0, "21330": 957299200.0, "21335": 978978816.0, "21340": 976354816.0, "21345": 978800576.0, "21350": 979573248.0, "21355": 946413248.0, "21360": 964261888.0, "21365": 967274944.0, "21370": 992132992.0, "21375": 971969216.0, "21380": 957420672.0, "21385": 942579712.0, "21390": 958260864.0, "21395": 983607936.0, "21400": 970522944.0, "21405": 961423488.0, "21410": 963020480.0, "21415": 964058304.0, "21420": 963297664.0, "21425": 980662784.0, "21430": 962278144.0, "21435": 949563712.0, "21440": 963734656.0, "21445": 969390656.0, "21450": 977290304.0, "21455": 972627392.0, "21460": 961595328.0, "21465": 968649088.0, "21470": 966719232.0, "21475": 964205952.0, "21480": 964728704.0, "21485": 938030592.0, "21490": 942322176.0, "21495": 968796864.0, "21500": 968819456.0, "21505": 976810176.0, "21510": 985299968.0, "21515": 942838272.0, "21520": 976251776.0, "21525": 981460160.0, "21530": 978083136.0, "21535": 970773120.0, "21540": 939803392.0, "21545": 958255936.0, "21550": 974987136.0, "21555": 977842432.0, "21560": 955508608.0, "21565": 970814336.0, "21570": 934286720.0, "21575": 960744512.0, "21580": 972217408.0, "21585": 963731328.0, "21590": 966498688.0, "21595": 949600576.0, "21600": 973992256.0, "21605": 966577472.0, "21610": 952891840.0, "21615": 969354368.0, "21620": 944428800.0, "21625": 956485312.0, "21630": 965958336.0, "21635": 984236160.0, "21640": 963857472.0, "21645": 944385984.0, "21650": 948381952.0, "21655": 962983616.0, "21660": 965564736.0, "21665": 992317376.0, "21670": 955626752.0, "21675": 950256832.0, "21680": 975084096.0, "21685": 978769216.0, "21690": 974673792.0, "21695": 952400256.0, "21700": 953062656.0, "21705": 967101504.0, "21710": 976793664.0, "21715": 973023680.0, "21720": 954528000.0, "21725": 936862144.0, "21730": 960503872.0, "21735": 978824832.0, "21740": 970141568.0, "21745": 955762880.0, "21750": 944177536.0, "21755": 961006976.0, "21760": 973556096.0, "21765": 966989376.0, "21770": 978617472.0, "21775": 980907008.0, "21780": 946228800.0, "21785": 979587776.0, "21790": 976502016.0, "21795": 974880768.0, "21800": 971023616.0, "21805": 949132800.0, "21810": 969125760.0, "21815": 953176192.0, "21820": 977020736.0, "21825": 963833472.0, "21830": 963173696.0, "21835": 958308800.0, "21840": 968808256.0, "21845": 964391424.0, "21850": 965331392.0, "21855": 981674560.0, "21860": 950552768.0, "21865": 954796544.0, "21870": 968364480.0, "21875": 971393664.0, "21880": 964238656.0, "21885": 960687808.0, "21890": 968512448.0, "21895": 970782656.0, "21900": 971584256.0, "21905": 959170624.0, "21910": 982350848.0, "21915": 950056256.0, "21920": 969883520.0, "21925": 969979136.0, "21930": 962885760.0, "21935": 943611200.0, "21940": 940044352.0, "21945": 976307776.0, "21950": 967883840.0, "21955": 967400832.0, "21960": 978160512.0, "21965": 938004480.0, "21970": 975537920.0, "21975": 964753792.0, "21980": 958238272.0, "21985": 974274560.0, "21990": 952905216.0, "21995": 939577344.0, "22000": 965698560.0, "22005": 966689024.0, "22010": 969306048.0, "22015": 966231872.0, "22020": 950937408.0, "22025": 975287168.0, "22030": 978726400.0, "22035": 985795712.0, "22040": 964225216.0, "22045": 938866560.0, "22050": 976262848.0, "22055": 971835968.0, "22060": 969924032.0, "22065": 969855104.0, "22070": 954933120.0, "22075": 944798720.0, "22080": 966763520.0, "22085": 966430528.0, "22090": 978837824.0, "22095": 971682240.0, "22100": 949120640.0, "22105": 963363968.0, "22110": 975473728.0, "22115": 973480704.0, "22120": 984111680.0, "22125": 950621120.0, "22130": 969477056.0, "22135": 955483264.0, "22140": 974274304.0, "22145": 968685056.0, "22150": 943963584.0, "22155": 960327936.0, "22160": 981618688.0, "22165": 967940864.0, "22170": 956289024.0, "22175": 928404480.0, "22180": 966047040.0, "22185": 972112896.0, "22190": 957207936.0, "22195": 956568064.0, "22200": 949413440.0, "22205": 941479488.0, "22210": 985795904.0, "22215": 978654848.0, "22220": 964125568.0, "22225": 975521856.0, "22230": 936849472.0, "22235": 957010688.0, "22240": 974077632.0, "22245": 973807744.0, "22250": 962067072.0, "22255": 965533632.0, "22260": 942807744.0, "22265": 974041344.0, "22270": 983413952.0, "22275": 964589440.0, "22280": 959129024.0, "22285": 952855360.0, "22290": 953747584.0, "22295": 959797504.0, "22300": 982573760.0, "22305": 971981568.0, "22310": 936204992.0, "22315": 968107840.0, "22320": 956044032.0, "22325": 957369984.0, "22330": 972927488.0, "22335": 946429120.0, "22340": 951450176.0, "22345": 969605376.0, "22350": 969830528.0, "22355": 966197248.0, "22360": 954576832.0, "22365": 968660736.0, "22370": 964677248.0, "22375": 975126336.0, "22380": 975408192.0, "22385": 962854144.0, "22390": 951457344.0, "22395": 970975808.0, "22400": 966761856.0, "22405": 961340352.0, "22410": 969312192.0, "22415": 936652480.0, "22420": 972247552.0, "22425": 964891328.0, "22430": 978068992.0, "22435": 973353024.0, "22440": 931514816.0, "22445": 951294848.0, "22450": 975083840.0, "22455": 959938304.0, "22460": 967231616.0, "22465": 987603328.0, "22470": 943072384.0, "22475": 975162560.0, "22480": 978209344.0, "22485": 976682752.0, "22490": 966248192.0, "22495": 941855296.0, "22500": 980415616.0, "22505": 957027968.0, "22510": 984402304.0, "22515": 966091904.0, "22520": 951610304.0, "22525": 965417280.0, "22530": 985393600.0, "22535": 973832064.0, "22540": 982952896.0, "22545": 952101568.0, "22550": 946726720.0, "22555": 971816960.0, "22560": 972267072.0, "22565": 966367744.0, "22570": 979838016.0, "22575": 947757120.0, "22580": 963678144.0, "22585": 947537216.0, "22590": 975939328.0, "22595": 982779712.0, "22600": 950139648.0, "22605": 977909696.0, "22610": 968795904.0, "22615": 950602240.0, "22620": 984727488.0, "22625": 958839744.0, "22630": 943521984.0, "22635": 954546944.0, "22640": 970861184.0, "22645": 988521408.0, "22650": 956505984.0, "22655": 945199616.0, "22660": 962573120.0, "22665": 971412992.0, "22670": 969188864.0, "22675": 978654976.0, "22680": 940017728.0, "22685": 958696448.0, "22690": 977900224.0, "22695": 958616512.0, "22700": 978331264.0, "22705": 954546304.0, "22710": 958464960.0, "22715": 972492864.0, "22720": 983105152.0, "22725": 961836096.0, "22730": 959184192.0, "22735": 948864896.0, "22740": 968518336.0, "22745": 965528000.0, "22750": 998209088.0, "22755": 974201088.0, "22760": 940148928.0, "22765": 967020288.0, "22770": 968957056.0, "22775": 949860544.0, "22780": 977846336.0, "22785": 955552128.0, "22790": 955252032.0, "22795": 976298816.0, "22800": 970372480.0, "22805": 962554304.0, "22810": 958878080.0, "22815": 945407040.0, "22820": 978493440.0, "22825": 966078464.0, "22830": 966468544.0, "22835": 969287744.0, "22840": 940558976.0, "22845": 973264192.0, "22850": 974956544.0, "22855": 951862464.0, "22860": 963868096.0, "22865": 948976320.0, "22870": 977542080.0, "22875": 974453824.0, "22880": 969630592.0, "22885": 977298176.0, "22890": 946818880.0, "22895": 943960000.0, "22900": 961830656.0, "22905": 971571968.0, "22910": 979859136.0, "22915": 971337472.0, "22920": 946279040.0, "22925": 970773504.0, "22930": 975784320.0, "22935": 969125824.0, "22940": 972894272.0, "22945": 940829952.0, "22950": 975601152.0, "22955": 972365824.0, "22960": 975978816.0, "22965": 964947008.0, "22970": 956950336.0, "22975": 931194560.0, "22980": 979275712.0, "22985": 959927296.0, "22990": 965051904.0, "22995": 958379328.0, "23000": 966075520.0, "23005": 972935168.0, "23010": 964718848.0, "23015": 968555264.0, "23020": 981636288.0, "23025": 955738880.0, "23030": 990790656.0, "23035": 968518272.0, "23040": 973757888.0, "23045": 959228096.0, "23050": 947603200.0, "23055": 949354688.0, "23060": 966707008.0, "23065": 980850240.0, "23070": 960460416.0, "23075": 957654144.0, "23080": 941289600.0, "23085": 975680320.0, "23090": 979732096.0, "23095": 965878592.0, "23100": 986540352.0, "23105": 952341120.0, "23110": 970745408.0, "23115": 970233536.0, "23120": 974839872.0, "23125": 971695872.0, "23130": 941211776.0, "23135": 948191296.0, "23140": 980410176.0, "23145": 979882112.0, "23150": 982463232.0, "23155": 970059264.0, "23160": 936512640.0, "23165": 969642880.0, "23170": 980994560.0, "23175": 991932544.0, "23180": 958328896.0, "23185": 950357056.0, "23190": 956506432.0, "23195": 981847808.0, "23200": 977806400.0, "23205": 967934848.0, "23210": 951791296.0, "23215": 931141504.0, "23220": 966418496.0, "23225": 958714176.0, "23230": 977340096.0, "23235": 964555072.0, "23240": 958177728.0, "23245": 973783296.0, "23250": 950651520.0, "23255": 973895232.0, "23260": 963734656.0, "23265": 954120896.0, "23270": 981161472.0, "23275": 974311296.0, "23280": 966737536.0, "23285": 971435008.0, "23290": 939954880.0, "23295": 964873216.0, "23300": 959969920.0, "23305": 952251200.0, "23310": 966539840.0, "23315": 958183808.0, "23320": 959583552.0, "23325": 971985856.0, "23330": 985693376.0, "23335": 958301440.0, "23340": 935876992.0, "23345": 945205376.0, "23350": 976780800.0, "23355": 988432896.0, "23360": 966635136.0, "23365": 985208704.0, "23370": 938461312.0, "23375": 962710592.0, "23380": 961463808.0, "23385": 989279424.0, "23390": 986546752.0, "23395": 959306240.0, "23400": 961850560.0, "23405": 958831808.0, "23410": 965791360.0, "23415": 981510272.0, "23420": 951880832.0, "23425": 937918464.0, "23430": 970872192.0, "23435": 969406144.0, "23440": 976173312.0, "23445": 971583488.0, "23450": 955344896.0, "23455": 972291776.0, "23460": 964705024.0, "23465": 950715008.0, "23470": 978408064.0, "23475": 957777024.0, "23480": 961065536.0, "23485": 985299712.0, "23490": 961682432.0, "23495": 974327040.0, "23500": 956480512.0, "23505": 948681920.0, "23510": 975485952.0, "23515": 967998080.0, "23520": 961555200.0, "23525": 977536128.0, "23530": 945912704.0, "23535": 975878528.0, "23540": 978981184.0, "23545": 979367104.0, "23550": 975257472.0, "23555": 940167680.0, "23560": 963433600.0, "23565": 964788864.0, "23570": 978304576.0, "23575": 945345344.0, "23580": 941391104.0, "23585": 958700288.0, "23590": 972312896.0, "23595": 970148608.0, "23600": 966029376.0, "23605": 961732224.0, "23610": 937483520.0, "23615": 973182848.0, "23620": 973008512.0, "23625": 981265024.0, "23630": 969756352.0, "23635": 949467136.0, "23640": 961561088.0, "23645": 982382976.0, "23650": 967761280.0, "23655": 969263872.0, "23660": 953630336.0, "23665": 951572736.0, "23670": 984907200.0, "23675": 972507136.0, "23680": 976002048.0, "23685": 959343232.0, "23690": 932246336.0, "23695": 963546112.0, "23700": 966611008.0, "23705": 977389632.0, "23710": 961413760.0, "23715": 954635136.0, "23720": 988272000.0, "23725": 968384128.0, "23730": 970223168.0, "23735": 959080832.0, "23740": 952010816.0, "23745": 948393344.0, "23750": 979114176.0, "23755": 967298496.0, "23760": 971994752.0, "23765": 947754240.0, "23770": 951422272.0, "23775": 968151808.0, "23780": 967144832.0, "23785": 962634816.0, "23790": 979972352.0, "23795": 944001600.0, "23800": 974773632.0, "23805": 974336832.0, "23810": 973229568.0, "23815": 971254848.0, "23820": 958532096.0, "23825": 959926656.0, "23830": 965335872.0, "23835": 970032256.0, "23840": 980305280.0, "23845": 943840576.0, "23850": 945184832.0, "23855": 970284608.0, "23860": 970380800.0, "23865": 970603840.0, "23870": 986279040.0, "23875": 947598720.0, "23880": 955135808.0, "23885": 971963648.0, "23890": 972070400.0, "23895": 984766784.0, "23900": 951248384.0, "23905": 954572096.0, "23910": 971606208.0, "23915": 962015168.0, "23920": 960344576.0, "23925": 943385472.0, "23930": 948460160.0, "23935": 981302016.0, "23940": 970939456.0, "23945": 969981312.0, "23950": 971921344.0, "23955": 943969472.0, "23960": 973728576.0, "23965": 976586560.0, "23970": 963328768.0, "23975": 960203264.0, "23980": 944769152.0, "23985": 969269440.0, "23990": 997622336.0, "23995": 975498432.0, "24000": 962847168.0, "24005": 936794624.0, "24010": 949510784.0, "24015": 967728512.0, "24020": 985829504.0, "24025": 967086080.0, "24030": 966437248.0, "24035": 937324608.0, "24040": 963145216.0, "24045": 984551296.0, "24050": 962353664.0, "24055": 968090240.0, "24060": 946768512.0, "24065": 974977216.0, "24070": 974961216.0, "24075": 975349760.0, "24080": 979691392.0, "24085": 956604096.0, "24090": 970240768.0, "24095": 963397376.0, "24100": 977176384.0, "24105": 981147648.0, "24110": 968069056.0, "24115": 953010816.0, "24120": 962814912.0, "24125": 963894528.0, "24130": 965056960.0, "24135": 959222528.0, "24140": 965466112.0, "24145": 966108288.0, "24150": 968501824.0, "24155": 954999360.0, "24160": 971681536.0, "24165": 939325824.0, "24170": 959748608.0, "24175": 980937152.0, "24180": 968150080.0, "24185": 957033088.0, "24190": 950801344.0, "24195": 951915136.0, "24200": 969369216.0, "24205": 971786112.0, "24210": 962214528.0, "24215": 931697664.0, "24220": 956739328.0, "24225": 980953088.0, "24230": 958847744.0, "24235": 947926912.0, "24240": 968998784.0, "24245": 949446976.0, "24250": 955899648.0, "24255": 973155840.0, "24260": 968563328.0, "24265": 968183360.0, "24270": 953416448.0, "24275": 961337920.0, "24280": 958098560.0, "24285": 986872128.0, "24290": 969697664.0, "24295": 964398144.0, "24300": 945965696.0, "24305": 980257536.0, "24310": 971565056.0, "24315": 974245760.0, "24320": 964613888.0, "24325": 938953216.0, "24330": 955295296.0, "24335": 966789568.0, "24340": 991398528.0, "24345": 970805760.0, "24350": 924112256.0, "24355": 968918784.0, "24360": 967010624.0, "24365": 963821440.0, "24370": 968152192.0, "24375": 973988544.0, "24380": 953318848.0, "24385": 977722112.0, "24390": 974397696.0, "24395": 956194432.0, "24400": 968548992.0, "24405": 942919936.0, "24410": 979544384.0, "24415": 981867008.0, "24420": 958516736.0, "24425": 972800384.0, "24430": 944589312.0, "24435": 958417664.0, "24440": 978685952.0, "24445": 972395520.0, "24450": 950838336.0, "24455": 941275904.0, "24460": 947183232.0, "24465": 974901504.0, "24470": 987872704.0, "24475": 973839744.0, "24480": 962371456.0, "24485": 920846080.0, "24490": 969871104.0, "24495": 982266112.0, "24500": 976131968.0, "24505": 966754304.0, "24510": 958208640.0, "24515": 999758016.0, "24520": 954413184.0, "24525": 965645632.0, "24530": 958498304.0, "24535": 957788224.0, "24540": 956292736.0, "24545": 968508160.0, "24550": 962466048.0, "24555": 958650240.0, "24560": 954257344.0, "24565": 947379968.0, "24570": 966579968.0, "24575": 973496704.0, "24580": 965420416.0, "24585": 957859904.0, "24590": 941814720.0, "24595": 973349440.0, "24600": 970694656.0, "24605": 971199488.0, "24610": 953282624.0, "24615": 942852800.0, "24620": 958596032.0, "24625": 979644416.0, "24630": 979286528.0, "24635": 976113664.0, "24640": 941664960.0, "24645": 956248960.0, "24650": 954056448.0, "24655": 980874816.0, "24660": 956045312.0, "24665": 957669312.0, "24670": 952500992.0, "24675": 960595648.0, "24680": 970160704.0, "24685": 964357440.0, "24690": 958786112.0, "24695": 938072448.0, "24700": 980141184.0, "24705": 970359296.0, "24710": 959167872.0, "24715": 972224768.0, "24720": 947229696.0, "24725": 962021632.0, "24730": 961270784.0, "24735": 951486464.0, "24740": 981649280.0, "24745": 964204992.0, "24750": 949127168.0, "24755": 971075776.0, "24760": 963116416.0, "24765": 987514688.0, "24770": 969078528.0, "24775": 941893568.0, "24780": 967861120.0, "24785": 970429248.0, "24790": 960457600.0, "24795": 961468864.0, "24800": 945424768.0, "24805": 933597120.0, "24810": 964990528.0, "24815": 963243392.0, "24820": 971936256.0, "24825": 960338688.0, "24830": 962858560.0, "24835": 978914368.0, "24840": 956512896.0, "24845": 963794944.0, "24850": 973558400.0, "24855": 948854592.0, "24860": 989109248.0, "24865": 994069504.0, "24870": 970849152.0, "24875": 963810240.0, "24880": 948079360.0, "24885": 964405312.0, "24890": 970037504.0, "24895": 973466496.0, "24900": 966186560.0, "24905": 950149440.0, "24910": 965868992.0, "24915": 957393408.0, "24920": 985404992.0, "24925": 975975552.0, "24930": 964958592.0, "24935": 914698432.0, "24940": 962899200.0, "24945": 982426560.0, "24950": 982457408.0, "24955": 961721664.0, "24960": 948442560.0, "24965": 972658560.0, "24970": 964550656.0, "24975": 967685120.0, "24980": 964792448.0, "24985": 956315648.0, "24990": 971149888.0, "24995": 967119680.0, "25000": 954080768.0, "25005": 969399808.0, "25010": 973009728.0, "25015": 956309184.0, "25020": 980872768.0, "25025": 959431552.0, "25030": 957495360.0, "25035": 965154944.0, "25040": 949568256.0, "25045": 977577856.0, "25050": 967697344.0, "25055": 962127744.0, "25060": 969326592.0, "25065": 950217600.0, "25070": 951864064.0, "25075": 969944512.0, "25080": 962772928.0, "25085": 966560640.0, "25090": 959305728.0, "25095": 935475776.0, "25100": 990004992.0, "25105": 977206080.0, "25110": 977368640.0, "25115": 962933696.0, "25120": 950338304.0, "25125": 960523392.0, "25130": 972870336.0, "25135": 976427328.0, "25140": 971894656.0, "25145": 938038464.0, "25150": 961382784.0, "25155": 964786816.0, "25160": 964186240.0, "25165": 977746240.0, "25170": 937736768.0, "25175": 971641856.0, "25180": 976106496.0, "25185": 984085440.0, "25190": 974994688.0, "25195": 958551424.0, "25200": 951493824.0, "25205": 951079232.0, "25210": 963490496.0, "25215": 975320064.0, "25220": 972800256.0, "25225": 943545600.0, "25230": 966581760.0, "25235": 968557312.0, "25240": 963754432.0, "25245": 963741184.0, "25250": 955222080.0, "25255": 961956480.0, "25260": 972636864.0, "25265": 965007488.0, "25270": 975591552.0, "25275": 965314304.0, "25280": 941014912.0, "25285": 970567296.0, "25290": 957494784.0, "25295": 964450176.0, "25300": 969964928.0, "25305": 943767936.0, "25310": 964259200.0, "25315": 974872192.0, "25320": 962957952.0, "25325": 956266368.0, "25330": 949654656.0, "25335": 977618048.0, "25340": 965818368.0, "25345": 975242432.0, "25350": 974256256.0, "25355": 955358784.0, "25360": 954509824.0, "25365": 975380736.0, "25370": 987234176.0, "25375": 967822208.0, "25380": 980406144.0, "25385": 935433792.0, "25390": 967913408.0, "25395": 968896640.0, "25400": 974963584.0, "25405": 984409856.0, "25410": 940501504.0, "25415": 973925376.0, "25420": 963845888.0, "25425": 980763392.0, "25430": 970816896.0, "25435": 943082368.0, "25440": 946441088.0, "25445": 972141056.0, "25450": 979704768.0, "25455": 976864064.0, "25460": 951417600.0, "25465": 954745856.0, "25470": 980611456.0, "25475": 961603200.0, "25480": 957404416.0, "25485": 968206848.0, "25490": 956649088.0, "25495": 985861376.0, "25500": 973995200.0, "25505": 993161792.0, "25510": 963331328.0, "25515": 961273344.0, "25520": 960512512.0, "25525": 984582528.0, "25530": 970604096.0, "25535": 970738496.0, "25540": 955567616.0, "25545": 950916416.0, "25550": 972630528.0, "25555": 973145408.0, "25560": 977209408.0, "25565": 964124224.0, "25570": 930519360.0, "25575": 973696512.0, "25580": 976778176.0, "25585": 978024128.0, "25590": 981333632.0, "25595": 940470656.0, "25600": 948486912.0, "25605": 969474048.0, "25610": 974701824.0, "25615": 967777664.0, "25620": 970576704.0, "25625": 948124672.0, "25630": 980215424.0, "25635": 966150848.0, "25640": 976537408.0, "25645": 975038720.0, "25650": 953847232.0, "25655": 975390976.0, "25660": 968496128.0, "25665": 963761664.0, "25670": 976144000.0, "25675": 957611456.0, "25680": 943086528.0, "25685": 973415424.0, "25690": 970220480.0, "25695": 981037504.0, "25700": 957644096.0, "25705": 948803648.0, "25710": 978558144.0, "25715": 978546304.0, "25720": 966795840.0, "25725": 952043712.0, "25730": 957621632.0, "25735": 976562176.0, "25740": 977250688.0, "25745": 972416128.0, "25750": 958215744.0, "25755": 958117056.0, "25760": 960507584.0, "25765": 987781568.0, "25770": 974429440.0, "25775": 969331904.0, "25780": 967121536.0, "25785": 954044096.0, "25790": 964868288.0, "25795": 970134784.0, "25800": 971413760.0, "25805": 980639296.0, "25810": 938455552.0, "25815": 973230272.0, "25820": 964516096.0, "25825": 973012608.0, "25830": 986001664.0, "25835": 939614656.0, "25840": 974578240.0, "25845": 967699328.0, "25850": 967394304.0, "25855": 988041600.0, "25860": 940857472.0, "25865": 942944896.0, "25870": 969614656.0, "25875": 968072896.0, "25880": 965038464.0, "25885": 946476096.0, "25890": 951648960.0, "25895": 975737536.0, "25900": 968059392.0, "25905": 974907840.0, "25910": 963354816.0, "25915": 944341440.0, "25920": 963992960.0, "25925": 970795136.0, "25930": 962323968.0, "25935": 973865664.0, "25940": 952766208.0, "25945": 981782144.0, "25950": 981770368.0, "25955": 965321152.0, "25960": 961028544.0, "25965": 944197376.0, "25970": 974663424.0, "25975": 991712192.0, "25980": 957018240.0, "25985": 981217280.0, "25990": 967274560.0, "25995": 930797696.0, "26000": 972679552.0, "26005": 968224576.0, "26010": 978993344.0, "26015": 973286656.0, "26020": 953484800.0, "26025": 969624448.0, "26030": 968053120.0, "26035": 984631744.0, "26040": 966838080.0, "26045": 957513216.0, "26050": 961126272.0, "26055": 959375232.0, "26060": 966795328.0, "26065": 981155072.0, "26070": 976371840.0, "26075": 937146816.0, "26080": 966576064.0, "26085": 964584896.0, "26090": 979194688.0, "26095": 980763904.0, "26100": 958870912.0, "26105": 972846656.0, "26110": 967348992.0, "26115": 970089024.0, "26120": 968740928.0, "26125": 949646976.0, "26130": 967124544.0, "26135": 978759040.0, "26140": 971740544.0, "26145": 964520704.0, "26150": 950889088.0, "26155": 954271616.0, "26160": 967672768.0, "26165": 976947328.0, "26170": 976093824.0, "26175": 966960512.0, "26180": 949854080.0, "26185": 969961280.0, "26190": 991223040.0, "26195": 964007168.0, "26200": 987149568.0, "26205": 947890624.0, "26210": 971635200.0, "26215": 971358784.0, "26220": 969620800.0, "26225": 973780288.0, "26230": 955772224.0, "26235": 943394432.0, "26240": 973885376.0, "26245": 960636416.0, "26250": 969565824.0, "26255": 968108800.0, "26260": 952364864.0, "26265": 984428928.0, "26270": 963890816.0, "26275": 976599104.0, "26280": 982115072.0, "26285": 958774784.0, "26290": 982685568.0, "26295": 966107840.0, "26300": 957673088.0, "26305": 980650880.0, "26310": 955244864.0, "26315": 941630528.0, "26320": 963884480.0, "26325": 965185280.0, "26330": 960747008.0, "26335": 955071232.0, "26340": 939118208.0, "26345": 961524800.0, "26350": 967117184.0, "26355": 965977472.0, "26360": 971928512.0, "26365": 949990912.0, "26370": 969319744.0, "26375": 974436352.0, "26380": 967760704.0, "26385": 975619968.0, "26390": 951715904.0, "26395": 966771520.0, "26400": 975409024.0, "26405": 964201408.0, "26410": 957264000.0, "26415": 968349184.0, "26420": 947354048.0, "26425": 972643648.0, "26430": 967009856.0, "26435": 962671232.0, "26440": 964096960.0, "26445": 943161664.0, "26450": 966124608.0, "26455": 979025344.0, "26460": 964645760.0, "26465": 968366272.0, "26470": 945838080.0, "26475": 971825024.0, "26480": 978755328.0, "26485": 979486016.0, "26490": 975100032.0, "26495": 965076480.0, "26500": 963442368.0, "26505": 970278720.0, "26510": 959523840.0, "26515": 966033536.0, "26520": 972705792.0, "26525": 935439424.0, "26530": 970630016.0, "26535": 958419904.0, "26540": 961931008.0, "26545": 968006016.0, "26550": 956029696.0, "26555": 965972352.0, "26560": 964443776.0, "26565": 965187008.0, "26570": 962502016.0, "26575": 949013504.0, "26580": 963459456.0, "26585": 963287936.0, "26590": 964803904.0, "26595": 964786048.0, "26600": 970055808.0, "26605": 937822848.0, "26610": 970938944.0, "26615": 969609472.0, "26620": 979090752.0, "26625": 959928512.0, "26630": 943996352.0, "26635": 959365184.0, "26640": 968866752.0, "26645": 965427584.0, "26650": 967108480.0, "26655": 940351744.0, "26660": 945864384.0, "26665": 972569984.0, "26670": 969246976.0, "26675": 967253312.0, "26680": 951859200.0, "26685": 945894592.0, "26690": 967405568.0, "26695": 971174464.0, "26700": 975633984.0, "26705": 982790720.0, "26710": 952809216.0, "26715": 963761728.0, "26720": 986492416.0, "26725": 951851904.0, "26730": 969869632.0, "26735": 960044416.0, "26740": 960934592.0, "26745": 964911488.0, "26750": 967072384.0, "26755": 977416000.0, "26760": 975224256.0, "26765": 943019200.0, "26770": 972361536.0, "26775": 974331520.0, "26780": 962815552.0, "26785": 969571136.0, "26790": 942996416.0, "26795": 948413184.0, "26800": 965738688.0, "26805": 963888704.0, "26810": 967285632.0, "26815": 951753472.0, "26820": 969811264.0, "26825": 963227136.0, "26830": 979923008.0, "26835": 971315328.0, "26840": 957845824.0, "26845": 946323072.0, "26850": 983192064.0, "26855": 970554368.0, "26860": 973146880.0, "26865": 963067264.0, "26870": 954451712.0, "26875": 972630400.0, "26880": 972803264.0, "26885": 968504128.0, "26890": 961372672.0, "26895": 938150144.0, "26900": 967210496.0, "26905": 967005312.0, "26910": 957234112.0, "26915": 974473408.0, "26920": 943547840.0, "26925": 951072320.0, "26930": 961625408.0, "26935": 976093504.0, "26940": 966963456.0, "26945": 973604096.0, "26950": 949504960.0, "26955": 971588096.0, "26960": 972674112.0, "26965": 972906496.0, "26970": 971115456.0, "26975": 948966400.0, "26980": 981652352.0, "26985": 975959744.0, "26990": 974827840.0, "26995": 961544832.0, "27000": 952467712.0, "27005": 956631360.0, "27010": 968570496.0, "27015": 962540864.0, "27020": 956251520.0, "27025": 990404544.0, "27030": 935779648.0, "27035": 954094208.0, "27040": 959352320.0, "27045": 978243776.0, "27050": 951637248.0, "27055": 951805184.0, "27060": 991235072.0, "27065": 964321280.0, "27070": 975281088.0, "27075": 965535680.0, "27080": 943239744.0, "27085": 962779328.0, "27090": 958584192.0, "27095": 980289792.0, "27100": 958460224.0, "27105": 938996096.0, "27110": 948745408.0, "27115": 973902400.0, "27120": 974057856.0, "27125": 971490688.0, "27130": 960008128.0, "27135": 948376832.0, "27140": 968197248.0, "27145": 973570688.0, "27150": 967643520.0, "27155": 975359616.0, "27160": 943786560.0, "27165": 974013120.0, "27170": 961017728.0, "27175": 967765504.0, "27180": 968672448.0, "27185": 975190720.0, "27190": 944610432.0, "27195": 961345856.0, "27200": 969252352.0, "27205": 975302656.0, "27210": 955605248.0, "27215": 950086144.0, "27220": 974830720.0, "27225": 962817088.0, "27230": 962851840.0, "27235": 952914752.0, "27240": 956396352.0, "27245": 963125568.0, "27250": 965653952.0, "27255": 969496960.0, "27260": 952712896.0, "27265": 961908416.0, "27270": 963374784.0, "27275": 992522944.0, "27280": 957677696.0, "27285": 964388608.0, "27290": 969107712.0, "27295": 932602368.0, "27300": 975777280.0, "27305": 962390592.0, "27310": 975131968.0, "27315": 968332736.0, "27320": 951820800.0, "27325": 965315968.0, "27330": 960303744.0, "27335": 983907008.0, "27340": 980105408.0, "27345": 941323648.0, "27350": 958112960.0, "27355": 957626112.0, "27360": 975419072.0, "27365": 968287040.0, "27370": 958998336.0, "27375": 939242496.0, "27380": 966090112.0, "27385": 958749312.0, "27390": 957120576.0, "27395": 984854528.0, "27400": 952594816.0, "27405": 977279040.0, "27410": 982856576.0, "27415": 960544704.0, "27420": 972410048.0, "27425": 938566848.0, "27430": 960377664.0, "27435": 963303488.0, "27440": 963253184.0, "27445": 970679680.0, "27450": 935183936.0, "27455": 976077248.0, "27460": 961275328.0, "27465": 976991424.0, "27470": 971560576.0, "27475": 964684160.0, "27480": 948978112.0, "27485": 963958400.0, "27490": 968890944.0, "27495": 971021312.0, "27500": 967371776.0, "27505": 960542848.0, "27510": 966169664.0, "27515": 967859008.0, "27520": 979798976.0, "27525": 966613696.0, "27530": 952408384.0, "27535": 951607872.0, "27540": 975408640.0, "27545": 970529792.0, "27550": 965778496.0, "27555": 957227008.0, "27560": 941177856.0, "27565": 966519936.0, "27570": 963731712.0, "27575": 978911104.0, "27580": 952332736.0, "27585": 935629696.0, "27590": 963116608.0, "27595": 969190272.0, "27600": 976198144.0, "27605": 963810688.0, "27610": 945433856.0, "27615": 962441280.0, "27620": 972753664.0, "27625": 973282816.0, "27630": 963055296.0, "27635": 957810496.0, "27640": 949697472.0, "27645": 981408128.0, "27650": 972385792.0, "27655": 976805760.0, "27660": 973448256.0, "27665": 953817152.0, "27670": 956240640.0, "27675": 957759744.0, "27680": 958636544.0, "27685": 977945728.0, "27690": 932645248.0, "27695": 986426688.0, "27700": 967520384.0, "27705": 963095232.0, "27710": 953349056.0, "27715": 955932160.0, "27720": 949915584.0, "27725": 987385600.0, "27730": 969086592.0, "27735": 972920320.0, "27740": 962490112.0, "27745": 945571072.0, "27750": 972926848.0, "27755": 974947584.0, "27760": 971764032.0, "27765": 972109888.0, "27770": 946379456.0, "27775": 970653056.0, "27780": 979196288.0, "27785": 968788096.0, "27790": 975469824.0, "27795": 941391616.0, "27800": 959801088.0, "27805": 975802048.0, "27810": 955280640.0, "27815": 973766784.0, "27820": 985075392.0, "27825": 940170880.0, "27830": 968030592.0, "27835": 979490112.0, "27840": 959708800.0, "27845": 974209536.0, "27850": 949340288.0, "27855": 979644800.0, "27860": 985248704.0, "27865": 978704640.0, "27870": 983705792.0, "27875": 939616832.0, "27880": 970944320.0, "27885": 980316800.0, "27890": 970298176.0, "27895": 970082432.0, "27900": 937976448.0, "27905": 952406528.0, "27910": 979567872.0, "27915": 979674816.0, "27920": 953395392.0, "27925": 970713664.0, "27930": 948078848.0, "27935": 978702464.0, "27940": 990996800.0, "27945": 964243264.0, "27950": 968982336.0, "27955": 939330048.0, "27960": 976208512.0, "27965": 969597504.0, "27970": 961189056.0, "27975": 966533760.0, "27980": 942892032.0, "27985": 963206848.0, "27990": 976072128.0, "27995": 975443392.0, "28000": 976752512.0, "28005": 960265856.0, "28010": 940195776.0, "28015": 977534464.0, "28020": 964605760.0, "28025": 973665024.0, "28030": 966720576.0, "28035": 942400704.0, "28040": 960900736.0, "28045": 971341504.0, "28050": 974835712.0, "28055": 970726528.0, "28060": 949647360.0, "28065": 953190848.0, "28070": 972086272.0, "28075": 964511168.0, "28080": 973008512.0, "28085": 987537792.0, "28090": 941515136.0, "28095": 972785344.0, "28100": 971342144.0, "28105": 973086912.0, "28110": 985155840.0, "28115": 949609472.0, "28120": 992292224.0, "28125": 961860032.0, "28130": 968264960.0, "28135": 951556352.0, "28140": 936266368.0, "28145": 951152896.0, "28150": 962707328.0, "28155": 969413888.0, "28160": 965000832.0, "28165": 944792832.0, "28170": 944085568.0, "28175": 976279680.0, "28180": 972749120.0, "28185": 976082560.0, "28190": 952198080.0, "28195": 950143104.0, "28200": 973824896.0, "28205": 975863104.0, "28210": 978514944.0, "28215": 979241024.0, "28220": 936871936.0, "28225": 968556608.0, "28230": 973724544.0, "28235": 959084416.0, "28240": 967811520.0, "28245": 952921664.0, "28250": 959284608.0, "28255": 956145152.0, "28260": 959410496.0, "28265": 973320640.0, "28270": 973921984.0, "28275": 951017536.0, "28280": 964327616.0, "28285": 964745600.0, "28290": 969910144.0, "28295": 965207936.0, "28300": 954346240.0, "28305": 964709632.0, "28310": 963688768.0, "28315": 964629184.0, "28320": 961992576.0, "28325": 948754112.0, "28330": 970208064.0, "28335": 966410112.0, "28340": 977307392.0, "28345": 965076352.0, "28350": 959746176.0, "28355": 951884544.0, "28360": 966142336.0, "28365": 988320000.0, "28370": 971865600.0, "28375": 961691584.0, "28380": 949064768.0, "28385": 965207296.0, "28390": 972159744.0, "28395": 964807040.0, "28400": 983724544.0, "28405": 955702592.0, "28410": 980419840.0, "28415": 975910592.0, "28420": 969996928.0, "28425": 974260096.0, "28430": 939918592.0, "28435": 955961408.0, "28440": 976675584.0, "28445": 974061440.0, "28450": 981361472.0, "28455": 975205888.0, "28460": 955359808.0, "28465": 966669504.0, "28470": 956449664.0, "28475": 969991552.0, "28480": 976666304.0, "28485": 947547904.0, "28490": 949919104.0, "28495": 962916736.0, "28500": 990037760.0, "28505": 968769088.0, "28510": 948918592.0, "28515": 946671680.0, "28520": 970384256.0, "28525": 973234304.0, "28530": 972089920.0, "28535": 966766272.0, "28540": 958321472.0, "28545": 967208704.0, "28550": 966256768.0, "28555": 973978880.0, "28560": 969641728.0, "28565": 955980096.0, "28570": 980278528.0, "28575": 958565632.0, "28580": 972559360.0, "28585": 970762880.0, "28590": 948155072.0, "28595": 950134848.0, "28600": 970550976.0, "28605": 972302528.0, "28610": 977373504.0, "28615": 949494272.0, "28620": 948248960.0, "28625": 967496320.0, "28630": 969916864.0, "28635": 966224576.0, "28640": 968745536.0, "28645": 954387776.0, "28650": 974314048.0, "28655": 982811264.0, "28660": 983433728.0, "28665": 967015680.0, "28670": 944635840.0, "28675": 964690688.0, "28680": 985577920.0, "28685": 969894336.0, "28690": 989171904.0, "28695": 946386880.0, "28700": 931143872.0, "28705": 965814208.0, "28710": 978079488.0, "28715": 978767808.0, "28720": 985298368.0, "28725": 955545856.0, "28730": 966301696.0, "28735": 960744768.0, "28740": 976495488.0, "28745": 965933632.0, "28750": 948217088.0, "28755": 951335680.0, "28760": 969050432.0, "28765": 975730176.0, "28770": 955747392.0, "28775": 971378240.0, "28780": 945925952.0, "28785": 964585152.0, "28790": 970008384.0, "28795": 975626432.0, "28800": 957262464.0, "28805": 953939712.0, "28810": 971378560.0, "28815": 964776960.0, "28820": 972994176.0, "28825": 948364928.0, "28830": 933097088.0, "28835": 966293120.0, "28840": 978055424.0, "28845": 967105280.0, "28850": 975903872.0, "28855": 956687488.0, "28860": 939568832.0, "28865": 973526400.0, "28870": 963015360.0, "28875": 966237824.0, "28880": 962208512.0, "28885": 961450560.0, "28890": 966613056.0, "28895": 965838144.0, "28900": 971022784.0, "28905": 984977152.0, "28910": 930725440.0, "28915": 962945344.0, "28920": 967670080.0, "28925": 970741440.0, "28930": 989806656.0, "28935": 949943488.0, "28940": 951104640.0, "28945": 985535040.0, "28950": 962376448.0, "28955": 959815040.0, "28960": 961621120.0, "28965": 974125056.0, "28970": 958482304.0, "28975": 968680768.0, "28980": 957976000.0, "28985": 966164224.0, "28990": 937217536.0, "28995": 959932224.0, "29000": 980228608.0, "29005": 970824128.0, "29010": 976980160.0, "29015": 946442560.0, "29020": 963828416.0, "29025": 948678592.0, "29030": 976253440.0, "29035": 983318016.0, "29040": 943161216.0, "29045": 964180480.0, "29050": 986948608.0, "29055": 968289472.0, "29060": 952757824.0, "29065": 959690560.0, "29070": 944988672.0, "29075": 977093248.0, "29080": 968473536.0, "29085": 973952064.0, "29090": 973834688.0, "29095": 921396224.0, "29100": 960445184.0, "29105": 983841728.0, "29110": 974461120.0, "29115": 967523072.0, "29120": 946859968.0, "29125": 945067520.0, "29130": 979771264.0, "29135": 968774848.0, "29140": 970232896.0, "29145": 960986176.0, "29150": 947247232.0, "29155": 986993344.0, "29160": 970711744.0, "29165": 983919488.0, "29170": 969930304.0, "29175": 952837312.0, "29180": 969450816.0, "29185": 970690880.0, "29190": 948162240.0, "29195": 978612672.0, "29200": 958959872.0, "29205": 966108800.0, "29210": 974191424.0, "29215": 965992320.0, "29220": 953539328.0, "29225": 962325376.0, "29230": 948081152.0, "29235": 973651392.0, "29240": 975887104.0, "29245": 970484352.0, "29250": 977064576.0, "29255": 957154944.0, "29260": 972965504.0, "29265": 982692352.0, "29270": 966151552.0, "29275": 952024512.0, "29280": 937270400.0, "29285": 975353216.0, "29290": 980248320.0, "29295": 958669056.0, "29300": 965530048.0, "29305": 961048128.0, "29310": 951157888.0, "29315": 982252672.0, "29320": 965487168.0, "29325": 985051456.0, "29330": 962147072.0, "29335": 937603840.0, "29340": 967781568.0, "29345": 963425216.0, "29350": 969223104.0, "29355": 985192000.0, "29360": 941504384.0, "29365": 961257152.0, "29370": 970851456.0, "29375": 970211392.0, "29380": 976657344.0, "29385": 954600320.0, "29390": 954167488.0, "29395": 961613504.0, "29400": 979903168.0, "29405": 963365568.0, "29410": 961196800.0, "29415": 947075136.0, "29420": 983085440.0, "29425": 965526208.0, "29430": 971982528.0, "29435": 969259264.0, "29440": 933876032.0, "29445": 985468032.0, "29450": 961447488.0, "29455": 976335168.0, "29460": 977973824.0, "29465": 941552704.0, "29470": 956093248.0, "29475": 974269056.0, "29480": 957613824.0, "29485": 972091456.0, "29490": 956369728.0, "29495": 951393792.0, "29500": 973453888.0, "29505": 961908608.0, "29510": 969463680.0, "29515": 973634624.0, "29520": 950390464.0, "29525": 961607040.0, "29530": 968962880.0, "29535": 970692544.0, "29540": 965157760.0, "29545": 928189248.0, "29550": 934282880.0, "29555": 978056192.0, "29560": 963271296.0, "29565": 972325824.0, "29570": 962927616.0, "29575": 947892480.0, "29580": 968812480.0, "29585": 974675008.0, "29590": 971440000.0, "29595": 958709056.0, "29600": 949851392.0, "29605": 958805824.0, "29610": 963776832.0, "29615": 960098496.0, "29620": 972888832.0, "29625": 965814592.0, "29630": 964218816.0, "29635": 965798016.0, "29640": 973512320.0, "29645": 970999488.0, "29650": 970758528.0, "29655": 936529216.0, "29660": 957037120.0, "29665": 964537280.0, "29670": 956800640.0, "29675": 972706368.0, "29680": 946119168.0, "29685": 978707840.0, "29690": 963688512.0, "29695": 964628544.0, "29700": 975769984.0, "29705": 960925312.0, "29710": 965558400.0, "29715": 971409472.0, "29720": 970574272.0, "29725": 967712704.0, "29730": 960099136.0, "29735": 953983872.0, "29740": 986679296.0, "29745": 958953408.0, "29750": 971832896.0, "29755": 953837184.0, "29760": 941725376.0, "29765": 964894400.0, "29770": 974236544.0, "29775": 971119808.0, "29780": 966121920.0, "29785": 939390272.0, "29790": 969112256.0, "29795": 974628992.0, "29800": 973646464.0, "29805": 981454912.0, "29810": 935646336.0, "29815": 949839168.0, "29820": 969321088.0, "29825": 969766080.0, "29830": 969493760.0, "29835": 957372992.0, "29840": 935129536.0, "29845": 954016320.0, "29850": 972375488.0, "29855": 961650432.0, "29860": 980680064.0, "29865": 938124224.0, "29870": 958579200.0, "29875": 978983232.0, "29880": 965788864.0, "29885": 962042880.0, "29890": 920033152.0, "29895": 993785408.0, "29900": 955232128.0, "29905": 959373696.0, "29910": 954784896.0, "29915": 965417600.0, "29920": 961983488.0, "29925": 985822528.0, "29930": 956836864.0, "29935": 970175808.0, "29940": 970076288.0, "29945": 969049920.0, "29950": 964699200.0, "29955": 977132224.0, "29960": 974769856.0, "29965": 972494912.0, "29970": 923700480.0, "29975": 958788416.0, "29980": 972608896.0, "29985": 963842944.0, "29990": 967492544.0, "29995": 967804480.0, "30000": 942448640.0, "30005": 967436544.0, "30010": 956657216.0, "30015": 966091648.0, "30020": 957805696.0, "30025": 944630080.0, "30030": 978113216.0, "30035": 978287936.0, "30040": 983191424.0, "30045": 965880320.0, "30050": 937747008.0, "30055": 972858304.0, "30060": 959818624.0, "30065": 976387200.0, "30070": 962515008.0, "30075": 956983168.0, "30080": 953997184.0, "30085": 965471296.0, "30090": 972951808.0, "30095": 950037504.0, "30100": 962467392.0, "30105": 952909440.0, "30110": 963440832.0, "30115": 963125440.0, "30120": 971999616.0, "30125": 965305920.0, "30130": 933864256.0, "30135": 965381568.0, "30140": 953509824.0, "30145": 980694592.0, "30150": 975223360.0, "30155": 930969728.0, "30160": 967351360.0, "30165": 966456000.0, "30170": 970383616.0, "30175": 955987968.0, "30180": 932021696.0, "30185": 944385280.0, "30190": 970231296.0, "30195": 972792832.0, "30200": 958169600.0, "30205": 950687168.0, "30210": 940319488.0, "30215": 969618240.0, "30220": 963178432.0, "30225": 954653632.0, "30230": 964991488.0, "30235": 953188288.0, "30240": 958942016.0, "30245": 974002752.0, "30250": 968799744.0, "30255": 985673792.0, "30260": 978178944.0, "30265": 969045824.0, "30270": 966674944.0, "30275": 974882112.0, "30280": 984075008.0, "30285": 966965312.0, "30290": 959447296.0, "30295": 960374016.0, "30300": 953371712.0, "30305": 980335552.0, "30310": 967057088.0, "30315": 962025344.0, "30320": 966497856.0, "30325": 962208064.0, "30330": 960817472.0, "30335": 960704832.0, "30340": 959661376.0, "30345": 957701504.0, "30350": 972157568.0, "30355": 974310784.0, "30360": 966645888.0, "30365": 968500224.0, "30370": 948321792.0, "30375": 975948544.0, "30380": 963781632.0, "30385": 962959808.0, "30390": 986291008.0, "30395": 930711104.0, "30400": 976159552.0, "30405": 990262912.0, "30410": 977453632.0, "30415": 962547904.0, "30420": 945733632.0, "30425": 969599744.0, "30430": 977189056.0, "30435": 972263296.0, "30440": 986630848.0, "30445": 936608640.0, "30450": 945173056.0, "30455": 955737856.0, "30460": 973546176.0, "30465": 980985600.0, "30470": 981684672.0, "30475": 937056192.0, "30480": 964671232.0, "30485": 960089984.0, "30490": 986712000.0, "30495": 975638912.0, "30500": 958462848.0, "30505": 975417216.0, "30510": 963550080.0, "30515": 970568576.0, "30520": 974887168.0, "30525": 944153728.0, "30530": 965103552.0, "30535": 967252352.0, "30540": 975666880.0, "30545": 967190656.0, "30550": 982132608.0, "30555": 955349760.0, "30560": 974788288.0, "30565": 970001856.0, "30570": 965262144.0, "30575": 966005568.0, "30580": 940338752.0, "30585": 973529152.0, "30590": 960669888.0, "30595": 972460672.0, "30600": 978787904.0, "30605": 940122048.0, "30610": 968739456.0, "30615": 964440128.0, "30620": 968303168.0, "30625": 969660288.0, "30630": 965788608.0, "30635": 946234048.0, "30640": 975644160.0, "30645": 972996480.0, "30650": 967276672.0, "30655": 959600768.0, "30660": 943390976.0, "30665": 983281280.0, "30670": 967574272.0, "30675": 971194944.0, "30680": 975322624.0, "30685": 934133760.0, "30690": 961430592.0, "30695": 967122432.0, "30700": 972448832.0, "30705": 961399616.0, "30710": 964025536.0, "30715": 946517824.0, "30720": 969348224.0, "30725": 972823616.0, "30730": 976544128.0, "30735": 967368448.0, "30740": 947648320.0, "30745": 966907328.0, "30750": 964617600.0, "30755": 994897664.0, "30760": 988510592.0, "30765": 942254528.0, "30770": 961843072.0, "30775": 970981888.0, "30780": 977561408.0, "30785": 970534144.0, "30790": 952535168.0, "30795": 961914112.0, "30800": 961965568.0, "30805": 959181248.0, "30810": 951588416.0, "30815": 956277504.0, "30820": 949078656.0, "30825": 989038592.0, "30830": 974412736.0, "30835": 966563968.0, "30840": 975451520.0, "30845": 937138176.0, "30850": 968381824.0, "30855": 987510016.0, "30860": 970575040.0, "30865": 968443712.0, "30870": 956749312.0, "30875": 948581632.0, "30880": 985861248.0, "30885": 981196352.0, "30890": 974865664.0, "30895": 946534400.0, "30900": 941988480.0, "30905": 952489600.0, "30910": 977369920.0, "30915": 980531328.0, "30920": 969126528.0, "30925": 949498880.0, "30930": 974929344.0, "30935": 967307776.0, "30940": 958783168.0, "30945": 974580608.0, "30950": 950657088.0, "30955": 973094720.0, "30960": 971528000.0, "30965": 965237824.0, "30970": 973869696.0, "30975": 975145024.0, "30980": 947162560.0, "30985": 971461696.0, "30990": 966183488.0, "30995": 967847488.0, "31000": 968354624.0, "31005": 952668224.0, "31010": 983512896.0, "31015": 957312192.0, "31020": 961344960.0, "31025": 964440896.0, "31030": 943715520.0, "31035": 977596864.0, "31040": 981754496.0, "31045": 963555456.0, "31050": 967235840.0, "31055": 953169664.0, "31060": 978775232.0, "31065": 980876992.0, "31070": 972601728.0, "31075": 970133504.0, "31080": 972701056.0, "31085": 940161664.0, "31090": 987269632.0, "31095": 970010944.0, "31100": 975881344.0, "31105": 961542272.0, "31110": 953108736.0, "31115": 968279808.0, "31120": 978259904.0, "31125": 971540224.0, "31130": 969961216.0, "31135": 936814336.0, "31140": 952604480.0, "31145": 986778368.0, "31150": 967018624.0, "31155": 967387648.0, "31160": 954586304.0, "31165": 958339200.0, "31170": 969697856.0, "31175": 968535616.0, "31180": 968363136.0, "31185": 981120640.0, "31190": 956290176.0, "31195": 963144576.0, "31200": 957083072.0, "31205": 958487488.0, "31210": 985518016.0, "31215": 941528704.0, "31220": 960939712.0, "31225": 967506624.0, "31230": 962259520.0, "31235": 968261184.0, "31240": 959158976.0, "31245": 942961536.0, "31250": 956540864.0, "31255": 959840384.0, "31260": 966013696.0, "31265": 949642688.0, "31270": 948660544.0, "31275": 973702656.0, "31280": 977633152.0, "31285": 963409088.0, "31290": 973162304.0, "31295": 935314176.0, "31300": 977258880.0, "31305": 988349312.0, "31310": 969832384.0, "31315": 958052224.0, "31320": 945996160.0, "31325": 940496960.0, "31330": 969844736.0, "31335": 980711744.0, "31340": 956112384.0, "31345": 960418688.0, "31350": 936284928.0, "31355": 963323584.0, "31360": 961871168.0, "31365": 974175744.0, "31370": 968769920.0, "31375": 936332096.0, "31380": 957267584.0, "31385": 956818048.0, "31390": 959697792.0, "31395": 981900352.0, "31400": 966012608.0, "31405": 955767232.0, "31410": 960373184.0, "31415": 972652928.0, "31420": 969280000.0, "31425": 943433472.0, "31430": 960644736.0, "31435": 974440640.0, "31440": 972851776.0, "31445": 970690560.0, "31450": 980435200.0, "31455": 937053952.0, "31460": 988860992.0, "31465": 963209728.0, "31470": 952684160.0, "31475": 962695104.0, "31480": 952996032.0, "31485": 960386688.0, "31490": 974686592.0, "31495": 957289472.0, "31500": 975034112.0, "31505": 944752128.0, "31510": 958677504.0, "31515": 959463552.0, "31520": 975071360.0, "31525": 963918400.0, "31530": 965372992.0, "31535": 933519488.0, "31540": 961075072.0, "31545": 976195648.0, "31550": 973298176.0, "31555": 967153728.0, "31560": 946720896.0, "31565": 940712960.0, "31570": 971296128.0, "31575": 979432960.0, "31580": 961614400.0, "31585": 943893952.0, "31590": 929412672.0, "31595": 967426880.0, "31600": 964055936.0, "31605": 959454336.0, "31610": 966398720.0, "31615": 946258944.0, "31620": 968688640.0, "31625": 943415616.0, "31630": 960873344.0, "31635": 971761472.0, "31640": 947709248.0, "31645": 958425088.0, "31650": 963086976.0, "31655": 984242432.0, "31660": 961026688.0, "31665": 968154560.0, "31670": 974740544.0, "31675": 960533248.0, "31680": 957063936.0, "31685": 981642240.0, "31690": 957328000.0, "31695": 933540224.0, "31700": 957119360.0, "31705": 951684224.0, "31710": 963834880.0, "31715": 959712000.0, "31720": 941892224.0, "31725": 965262848.0, "31730": 965478464.0, "31735": 965993472.0, "31740": 959447232.0, "31745": 943628352.0, "31750": 970962304.0, "31755": 966499776.0, "31760": 970221440.0, "31765": 971490560.0, "31770": 944362368.0, "31775": 953849600.0, "31780": 968954880.0, "31785": 985254016.0, "31790": 986539264.0, "31795": 966539648.0, "31800": 943594368.0, "31805": 967847680.0, "31810": 974146496.0, "31815": 967862400.0, "31820": 956254528.0, "31825": 938510976.0, "31830": 962939520.0, "31835": 976763840.0, "31840": 964182272.0, "31845": 991838656.0, "31850": 967020160.0, "31855": 936244416.0, "31860": 968409344.0, "31865": 961176448.0, "31870": 989085440.0, "31875": 984444608.0, "31880": 952020096.0, "31885": 965804480.0, "31890": 942843904.0, "31895": 963789184.0, "31900": 966950208.0, "31905": 946639552.0, "31910": 975571520.0, "31915": 964691904.0, "31920": 950262464.0, "31925": 961823872.0, "31930": 947230336.0, "31935": 955944832.0, "31940": 975060288.0, "31945": 975828736.0, "31950": 962089536.0, "31955": 955958400.0, "31960": 942162752.0, "31965": 954070016.0, "31970": 984493632.0, "31975": 971099968.0, "31980": 971864960.0, "31985": 949000320.0, "31990": 964642368.0, "31995": 976245312.0, "32000": 974593408.0, "32005": 989688448.0, "32010": 948250176.0, "32015": 952431616.0, "32020": 964036480.0, "32025": 983497728.0, "32030": 966819136.0, "32035": 973499520.0, "32040": 944926208.0, "32045": 972316288.0, "32050": 951463040.0, "32055": 980950784.0, "32060": 965834240.0, "32065": 958193664.0, "32070": 961842560.0, "32075": 968083392.0, "32080": 976157248.0, "32085": 969540928.0, "32090": 946253376.0, "32095": 984198784.0, "32100": 971681920.0, "32105": 976907136.0, "32110": 971424256.0, "32115": 963489152.0, "32120": 970891008.0, "32125": 966502016.0, "32130": 958058560.0, "32135": 964848640.0, "32140": 971859584.0, "32145": 923951872.0, "32150": 975268992.0, "32155": 979115776.0, "32160": 957665920.0, "32165": 962769472.0, "32170": 938578112.0, "32175": 950657408.0, "32180": 971852544.0, "32185": 984948096.0, "32190": 958623104.0, "32195": 954022912.0, "32200": 959440384.0, "32205": 975617664.0, "32210": 979959232.0, "32215": 961966464.0, "32220": 957316672.0, "32225": 946375296.0, "32230": 979895552.0, "32235": 973254656.0, "32240": 965207872.0, "32245": 968401024.0, "32250": 965048384.0, "32255": 961156032.0, "32260": 975306368.0, "32265": 971258432.0, "32270": 968750848.0, "32275": 952101312.0, "32280": 949263744.0, "32285": 961410432.0, "32290": 976582336.0, "32295": 969950720.0, "32300": 948390656.0, "32305": 953867968.0, "32310": 966453184.0, "32315": 956462464.0, "32320": 970581696.0, "32325": 963726400.0, "32330": 950009280.0, "32335": 982398528.0, "32340": 960523200.0, "32345": 964850176.0, "32350": 966730240.0, "32355": 952377280.0, "32360": 964399168.0, "32365": 976797440.0, "32370": 965341248.0, "32375": 967493120.0, "32380": 931237824.0, "32385": 964728640.0, "32390": 980009024.0, "32395": 962216640.0, "32400": 961296000.0, "32405": 969201472.0, "32410": 952516800.0, "32415": 941789440.0, "32420": 973763840.0, "32425": 976012096.0, "32430": 983917760.0, "32435": 946690176.0, "32440": 962197312.0, "32445": 979963392.0, "32450": 975163456.0, "32455": 971771584.0, "32460": 968892352.0, "32465": 955394368.0, "32470": 962921856.0, "32475": 968249600.0, "32480": 975912256.0, "32485": 949872448.0, "32490": 949931840.0, "32495": 957419904.0, "32500": 971698688.0, "32505": 974306112.0, "32510": 989506496.0, "32515": 951660224.0, "32520": 947193152.0, "32525": 967443968.0, "32530": 961216256.0, "32535": 982349696.0, "32540": 957880832.0, "32545": 958643264.0, "32550": 979684736.0, "32555": 972895680.0, "32560": 961244864.0, "32565": 958899072.0, "32570": 946289792.0, "32575": 964855616.0, "32580": 962639808.0, "32585": 973174656.0, "32590": 973867968.0, "32595": 952897088.0, "32600": 981668288.0, "32605": 975660992.0, "32610": 954393920.0, "32615": 953494976.0, "32620": 952615616.0, "32625": 979063040.0, "32630": 969213952.0, "32635": 980610176.0, "32640": 960853504.0, "32645": 940947968.0, "32650": 966515776.0, "32655": 962413184.0, "32660": 974503232.0, "32665": 988573056.0, "32670": 967768128.0, "32675": 952425856.0, "32680": 970906688.0, "32685": 972536128.0, "32690": 973733824.0, "32695": 976208768.0, "32700": 937220480.0, "32705": 964993088.0, "32710": 978187264.0, "32715": 960160064.0, "32720": 979303552.0, "32725": 956983936.0, "32730": 969396672.0, "32735": 959085824.0, "32740": 954339904.0, "32745": 980632000.0, "32750": 964324160.0, "32755": 958834048.0, "32760": 970854016.0, "32765": 962756800.0, "32770": 969212160.0, "32775": 974027648.0, "32780": 949460288.0, "32785": 964450944.0, "32790": 984432192.0, "32795": 969337088.0, "32800": 973107840.0, "32805": 950466048.0, "32810": 952426048.0, "32815": 976029056.0, "32820": 978213248.0, "32825": 979450560.0, "32830": 951903936.0, "32835": 949483840.0, "32840": 975239424.0, "32845": 977959808.0, "32850": 962946176.0, "32855": 975250432.0, "32860": 953176576.0, "32865": 963107328.0, "32870": 962697792.0, "32875": 975680704.0, "32880": 965786880.0, "32885": 970654080.0, "32890": 970496512.0, "32895": 957617024.0, "32900": 974175744.0, "32905": 975448000.0, "32910": 959944640.0, "32915": 957223680.0, "32920": 957727680.0, "32925": 970810240.0, "32930": 959497152.0, "32935": 977767424.0, "32940": 957727232.0, "32945": 968894080.0, "32950": 987731648.0, "32955": 980611456.0, "32960": 967142912.0, "32965": 924789184.0, "32970": 945657600.0, "32975": 970200000.0, "32980": 975140224.0, "32985": 978887040.0, "32990": 934970688.0, "32995": 967856960.0, "33000": 963434880.0, "33005": 960576704.0, "33010": 976103232.0, "33015": 961592320.0, "33020": 956718848.0, "33025": 984340032.0, "33030": 976541248.0, "33035": 976882432.0, "33040": 982810496.0, "33045": 941224000.0, "33050": 971293760.0, "33055": 965650176.0, "33060": 973715072.0, "33065": 970474240.0, "33070": 946998592.0, "33075": 969883392.0, "33080": 976098752.0, "33085": 986110272.0, "33090": 958830144.0, "33095": 961337408.0, "33100": 946083072.0, "33105": 971910016.0, "33110": 988396032.0, "33115": 968549504.0, "33120": 966061312.0, "33125": 940884864.0, "33130": 963820544.0, "33135": 966508736.0, "33140": 983039104.0, "33145": 973328832.0, "33150": 949112512.0, "33155": 955268096.0, "33160": 959858240.0, "33165": 966080832.0, "33170": 968667392.0, "33175": 967398400.0, "33180": 938036800.0, "33185": 964408832.0, "33190": 961160832.0, "33195": 975111296.0, "33200": 964637568.0, "33205": 942838656.0, "33210": 976503744.0, "33215": 965760512.0, "33220": 983579392.0, "33225": 981939072.0, "33230": 961019712.0, "33235": 967653696.0, "33240": 973039552.0, "33245": 972363456.0, "33250": 964600000.0, "33255": 947115136.0, "33260": 944817920.0, "33265": 979642112.0, "33270": 978667072.0, "33275": 971676992.0, "33280": 967924224.0, "33285": 960663296.0, "33290": 971576960.0, "33295": 961096256.0, "33300": 975250496.0, "33305": 981974528.0, "33310": 970974720.0, "33315": 974166528.0, "33320": 966439040.0, "33325": 973046720.0, "33330": 957779520.0, "33335": 950964608.0, "33340": 958330432.0, "33345": 974074688.0, "33350": 975242880.0, "33355": 987818048.0, "33360": 966285760.0, "33365": 946237056.0, "33370": 986993664.0, "33375": 961307136.0, "33380": 972420096.0, "33385": 993076288.0, "33390": 956652288.0, "33395": 964115648.0, "33400": 972208576.0, "33405": 974863232.0, "33410": 981346560.0, "33415": 947766912.0, "33420": 969141888.0, "33425": 991029120.0, "33430": 960174080.0, "33435": 971884736.0, "33440": 952102848.0, "33445": 924716032.0, "33450": 967832064.0, "33455": 967474560.0, "33460": 974715200.0, "33465": 982198528.0, "33470": 958873856.0, "33475": 982453632.0, "33480": 971258112.0, "33485": 961154496.0, "33490": 974949184.0, "33495": 953801344.0, "33500": 984868480.0, "33505": 983474880.0, "33510": 975556736.0, "33515": 964608704.0, "33520": 955696448.0, "33525": 955982592.0, "33530": 965324736.0, "33535": 971712320.0, "33540": 981901888.0, "33545": 960074880.0, "33550": 934044736.0, "33555": 967652352.0, "33560": 986707968.0, "33565": 975005824.0, "33570": 973264704.0, "33575": 938404032.0, "33580": 964890624.0, "33585": 973395648.0, "33590": 986239680.0, "33595": 979863232.0, "33600": 957885696.0, "33605": 951438144.0, "33610": 969251072.0, "33615": 957701568.0, "33620": 972063808.0, "33625": 975233408.0, "33630": 952987648.0, "33635": 978599680.0, "33640": 951810624.0, "33645": 968035520.0, "33650": 980978048.0, "33655": 957658624.0, "33660": 981520960.0, "33665": 966582656.0, "33670": 961639616.0, "33675": 984052992.0, "33680": 938140416.0, "33685": 950659264.0, "33690": 971604224.0, "33695": 978308992.0, "33700": 963981120.0, "33705": 966730240.0, "33710": 966211200.0, "33715": 977709568.0, "33720": 964971008.0, "33725": 981484480.0, "33730": 966596992.0, "33735": 932890176.0, "33740": 967025152.0, "33745": 971835072.0, "33750": 969634560.0, "33755": 957267136.0, "33760": 935517184.0, "33765": 961136256.0, "33770": 971217472.0, "33775": 975534144.0, "33780": 977043392.0, "33785": 951198784.0, "33790": 950170752.0, "33795": 960178112.0, "33800": 971826304.0, "33805": 975739520.0, "33810": 959705408.0, "33815": 946807296.0, "33820": 969912768.0, "33825": 976549248.0, "33830": 973081152.0, "33835": 965410688.0, "33840": 953250496.0, "33845": 961050304.0, "33850": 973628416.0, "33855": 955977088.0, "33860": 971926848.0, "33865": 949394176.0, "33870": 959008576.0, "33875": 970362368.0, "33880": 960216832.0, "33885": 969502592.0, "33890": 949846080.0, "33895": 943530176.0, "33900": 962894464.0, "33905": 965258752.0, "33910": 972357120.0, "33915": 954449152.0, "33920": 937287232.0, "33925": 972362688.0, "33930": 974899136.0, "33935": 954042752.0, "33940": 979673792.0, "33945": 953163712.0, "33950": 968766208.0, "33955": 967675712.0, "33960": 972132480.0, "33965": 973401664.0, "33970": 953838272.0, "33975": 959630976.0, "33980": 958617856.0, "33985": 975546240.0, "33990": 983455872.0, "33995": 953478016.0, "34000": 932476864.0, "34005": 965104384.0, "34010": 977793728.0, "34015": 962488960.0, "34020": 977129792.0, "34025": 930401024.0, "34030": 959662784.0, "34035": 973882176.0, "34040": 968648832.0, "34045": 968379520.0, "34050": 943201408.0, "34055": 968363328.0, "34060": 973287872.0, "34065": 975902720.0, "34070": 959115008.0, "34075": 968874496.0, "34080": 944502976.0, "34085": 965215360.0, "34090": 971136384.0, "34095": 964243008.0, "34100": 960386624.0, "34105": 956093952.0, "34110": 976306048.0, "34115": 956933568.0, "34120": 962326848.0, "34125": 956622976.0, "34130": 947144768.0, "34135": 956372864.0, "34140": 972252928.0, "34145": 969100160.0, "34150": 953749376.0, "34155": 951487104.0, "34160": 950516864.0, "34165": 963347584.0, "34170": 987289088.0, "34175": 971976960.0, "34180": 975296448.0, "34185": 932145856.0, "34190": 956327616.0, "34195": 973205312.0, "34200": 971247360.0, "34205": 950610048.0, "34210": 917405056.0, "34215": 948699136.0, "34220": 967690752.0, "34225": 979059840.0, "34230": 968064896.0, "34235": 950767104.0, "34240": 948072704.0, "34245": 971644608.0, "34250": 993855360.0, "34255": 961885248.0, "34260": 976400320.0, "34265": 941585984.0, "34270": 968359104.0, "34275": 965209728.0, "34280": 968726784.0, "34285": 972443328.0, "34290": 951339072.0, "34295": 950891904.0, "34300": 978678656.0, "34305": 972530944.0, "34310": 972425472.0, "34315": 953732736.0, "34320": 948030656.0, "34325": 966497792.0, "34330": 976104000.0, "34335": 971626176.0, "34340": 961837568.0, "34345": 950556736.0, "34350": 954730688.0, "34355": 965671680.0, "34360": 954944640.0, "34365": 957280064.0, "34370": 941052928.0, "34375": 972683648.0, "34380": 979168832.0, "34385": 959093248.0, "34390": 966982784.0, "34395": 958685696.0, "34400": 953571072.0, "34405": 968047232.0, "34410": 974426240.0, "34415": 977128896.0, "34420": 966144192.0, "34425": 954029440.0, "34430": 972148224.0, "34435": 968742528.0, "34440": 972051456.0, "34445": 979943360.0, "34450": 934468416.0, "34455": 959182720.0, "34460": 968696832.0, "34465": 973101760.0, "34470": 971394560.0, "34475": 939166464.0, "34480": 972125376.0, "34485": 973176128.0, "34490": 965241344.0, "34495": 971597184.0, "34500": 958169728.0, "34505": 954557632.0, "34510": 974457408.0, "34515": 967216704.0, "34520": 975439936.0, "34525": 967398144.0, "34530": 947118464.0, "34535": 966291008.0, "34540": 975466816.0, "34545": 957591424.0, "34550": 960587712.0, "34555": 932359360.0, "34560": 961653312.0, "34565": 977905664.0, "34570": 956353600.0, "34575": 969207552.0, "34580": 953254528.0, "34585": 966936448.0, "34590": 965916352.0, "34595": 958432768.0, "34600": 968217088.0, "34605": 977867008.0, "34610": 950118528.0, "34615": 968021888.0, "34620": 984974976.0, "34625": 971862592.0, "34630": 970862272.0, "34635": 942973440.0, "34640": 961153920.0, "34645": 964699584.0, "34650": 978248320.0, "34655": 971615744.0, "34660": 948219520.0, "34665": 943976768.0, "34670": 972862016.0, "34675": 969113152.0, "34680": 974822144.0, "34685": 968511936.0, "34690": 928328640.0, "34695": 968796608.0, "34700": 970417792.0, "34705": 961654080.0, "34710": 976579584.0, "34715": 953938240.0, "34720": 967972672.0, "34725": 969422144.0, "34730": 963684608.0, "34735": 987855744.0, "34740": 962108288.0, "34745": 966170816.0, "34750": 984611520.0, "34755": 964324480.0, "34760": 972273856.0, "34765": 973647360.0, "34770": 947097216.0, "34775": 991479104.0, "34780": 972556480.0, "34785": 975509952.0, "34790": 957396224.0, "34795": 947490432.0, "34800": 954792064.0, "34805": 983999744.0, "34810": 968914752.0, "34815": 958999552.0, "34820": 935640256.0, "34825": 974712576.0, "34830": 975282880.0, "34835": 965895168.0, "34840": 974837504.0, "34845": 954945088.0, "34850": 941508096.0, "34855": 980504832.0, "34860": 964307904.0, "34865": 966893056.0, "34870": 976631872.0, "34875": 941530368.0, "34880": 959406400.0, "34885": 969457408.0, "34890": 975189696.0, "34895": 980315968.0, "34900": 948729664.0, "34905": 960989440.0, "34910": 956407552.0, "34915": 956477952.0, "34920": 975102080.0, "34925": 949344000.0, "34930": 939503296.0, "34935": 966504128.0, "34940": 971538816.0, "34945": 972977344.0, "34950": 979004928.0, "34955": 940798976.0, "34960": 959248128.0, "34965": 985275200.0, "34970": 974991168.0, "34975": 975661760.0, "34980": 946809024.0, "34985": 973199872.0, "34990": 975596160.0, "34995": 969967808.0, "35000": 972839808.0, "35005": 938339008.0, "35010": 951919744.0, "35015": 983107584.0, "35020": 973718080.0, "35025": 971420672.0, "35030": 954527488.0, "35035": 961307776.0, "35040": 968749184.0, "35045": 969405184.0, "35050": 982345088.0, "35055": 969662080.0, "35060": 930544448.0, "35065": 975260416.0, "35070": 972060032.0, "35075": 970498432.0, "35080": 973426368.0, "35085": 944559488.0, "35090": 966418944.0, "35095": 997920064.0, "35100": 970925312.0, "35105": 978484288.0, "35110": 941471040.0, "35115": 959650304.0, "35120": 976191104.0, "35125": 959704960.0, "35130": 979821568.0, "35135": 972536384.0, "35140": 936372672.0, "35145": 967088832.0, "35150": 970363456.0, "35155": 974490368.0, "35160": 977787200.0, "35165": 959240000.0, "35170": 963995008.0, "35175": 974306816.0, "35180": 970508992.0, "35185": 982566016.0, "35190": 961266240.0, "35195": 962474112.0, "35200": 957938240.0, "35205": 975098112.0, "35210": 980804480.0, "35215": 979557376.0, "35220": 945309312.0, "35225": 967826304.0, "35230": 979101632.0, "35235": 974500992.0, "35240": 962302208.0, "35245": 951100992.0, "35250": 953959232.0, "35255": 977717888.0, "35260": 975246080.0, "35265": 963408704.0, "35270": 942900480.0, "35275": 956176000.0, "35280": 983524864.0, "35285": 973108160.0, "35290": 971751680.0, "35295": 961315840.0, "35300": 957484736.0, "35305": 976259968.0, "35310": 975516800.0, "35315": 987564928.0, "35320": 974182080.0, "35325": 958232256.0, "35330": 974344192.0, "35335": 969459648.0, "35340": 976392832.0, "35345": 973788928.0, "35350": 946052224.0, "35355": 954769536.0, "35360": 967664640.0, "35365": 956836480.0, "35370": 973328832.0, "35375": 951791488.0, "35380": 965236224.0, "35385": 977277760.0, "35390": 967929920.0, "35395": 966518400.0, "35400": 977089664.0, "35405": 931027776.0, "35410": 964419264.0, "35415": 966272064.0, "35420": 963087552.0, "35425": 970843200.0, "35430": 950849216.0, "35435": 953332992.0, "35440": 958073536.0, "35445": 978144960.0, "35450": 975562880.0, "35455": 940813440.0, "35460": 949616384.0, "35465": 973016000.0, "35470": 975816000.0, "35475": 958488192.0, "35480": 953987968.0, "35485": 960012416.0, "35490": 961234880.0, "35495": 980656640.0, "35500": 967987328.0, "35505": 964700544.0, "35510": 954518272.0, "35515": 971380608.0, "35520": 975804160.0, "35525": 964479232.0, "35530": 971659520.0, "35535": 959842432.0, "35540": 946438976.0, "35545": 967926720.0, "35550": 974786304.0, "35555": 981369344.0, "35560": 959247168.0, "35565": 946236160.0, "35570": 968127744.0, "35575": 977685248.0, "35580": 982813312.0, "35585": 961156608.0, "35590": 927098112.0, "35595": 985011712.0, "35600": 971069376.0, "35605": 977094464.0, "35610": 956642368.0, "35615": 959838336.0, "35620": 965005312.0, "35625": 968024448.0, "35630": 971626496.0, "35635": 972468608.0, "35640": 965596928.0, "35645": 938788288.0, "35650": 956579712.0, "35655": 973649216.0, "35660": 966585344.0, "35665": 984104320.0, "35670": 938189056.0, "35675": 981418240.0, "35680": 967830976.0, "35685": 981208448.0, "35690": 969890688.0, "35695": 941605504.0, "35700": 958390016.0, "35705": 969516416.0, "35710": 977464704.0, "35715": 972505920.0, "35720": 967431680.0, "35725": 939301888.0, "35730": 964984256.0, "35735": 989212736.0, "35740": 968146304.0, "35745": 962702464.0, "35750": 947933696.0, "35755": 967101568.0, "35760": 965510080.0, "35765": 970123008.0, "35770": 957165184.0, "35775": 947408832.0, "35780": 970872384.0, "35785": 970387200.0, "35790": 969225088.0, "35795": 971395136.0, "35800": 948439616.0, "35805": 945371648.0, "35810": 968456896.0, "35815": 961695872.0, "35820": 985945024.0, "35825": 979004672.0, "35830": 957955712.0, "35835": 969323136.0, "35840": 966684160.0, "35845": 967302912.0, "35850": 986589696.0, "35855": 956191296.0, "35860": 958288384.0, "35865": 968092544.0, "35870": 974692992.0, "35875": 968711936.0, "35880": 950621376.0, "35885": 968519424.0, "35890": 986274880.0, "35895": 961342400.0, "35900": 970422528.0, "35905": 954571264.0, "35910": 954995840.0, "35915": 979061120.0, "35920": 973147200.0, "35925": 981588928.0, "35930": 957354816.0, "35935": 958920256.0, "35940": 969561984.0, "35945": 969754496.0, "35950": 978931456.0, "35955": 967483840.0, "35960": 939780608.0, "35965": 944776064.0, "35970": 957825856.0, "35975": 978085440.0, "35980": 973973696.0, "35985": 961806400.0, "35990": 944081536.0, "35995": 958577792.0, "36000": 970410496.0, "36005": 969367168.0, "36010": 981430400.0, "36015": 953720832.0, "36020": 983169152.0, "36025": 981576704.0, "36030": 971002112.0, "36035": 965338752.0, "36040": 944578624.0, "36045": 974030080.0, "36050": 969976384.0, "36055": 966689984.0, "36060": 973698496.0, "36065": 946852416.0, "36070": 956801024.0, "36075": 976208768.0, "36080": 962386304.0, "36085": 971898752.0, "36090": 956918720.0, "36095": 951253248.0, "36100": 960727168.0, "36105": 963952320.0, "36110": 974277248.0, "36115": 975067520.0, "36120": 962242752.0, "36125": 965160768.0, "36130": 969851520.0, "36135": 975522048.0, "36140": 970520192.0, "36145": 934600384.0, "36150": 968832448.0, "36155": 960369344.0, "36160": 963034240.0, "36165": 956963712.0, "36170": 940665024.0, "36175": 953298176.0, "36180": 975909184.0, "36185": 966554176.0, "36190": 973097216.0, "36195": 941121344.0, "36200": 923495488.0, "36205": 974668864.0, "36210": 972653696.0, "36215": 971659264.0, "36220": 966390016.0, "36225": 951482752.0, "36230": 960008640.0, "36235": 977069312.0, "36240": 973647168.0, "36245": 966735296.0, "36250": 966720768.0, "36255": 962966848.0, "36260": 966823296.0, "36265": 968883712.0, "36270": 973337728.0, "36275": 972775040.0, "36280": 947551104.0, "36285": 952820224.0, "36290": 960683136.0, "36295": 974673920.0, "36300": 975642368.0, "36305": 944506432.0, "36310": 967524480.0, "36315": 953215872.0, "36320": 953332352.0, "36325": 960743872.0, "36330": 943370048.0, "36335": 963532480.0, "36340": 975804608.0, "36345": 966239616.0, "36350": 978526400.0, "36355": 962765376.0, "36360": 955826816.0, "36365": 973598464.0, "36370": 961046656.0, "36375": 982063104.0, "36380": 948563840.0, "36385": 941500864.0, "36390": 962041600.0, "36395": 967943552.0, "36400": 970940160.0, "36405": 960394816.0, "36410": 939105408.0, "36415": 947969664.0, "36420": 965758720.0, "36425": 963238208.0, "36430": 973715392.0, "36435": 966297856.0, "36440": 945837248.0, "36445": 952790592.0, "36450": 965528000.0, "36455": 986104576.0, "36460": 979295360.0, "36465": 939270464.0, "36470": 980109056.0, "36475": 974950784.0, "36480": 967459392.0, "36485": 985614592.0, "36490": 945137152.0, "36495": 964122688.0, "36500": 968211136.0, "36505": 967544768.0, "36510": 961572288.0, "36515": 960845312.0, "36520": 935663424.0, "36525": 978274240.0, "36530": 963804288.0, "36535": 967051776.0, "36540": 975561088.0, "36545": 945664448.0, "36550": 967580032.0, "36555": 973661888.0, "36560": 962206976.0, "36565": 972798656.0, "36570": 963648128.0, "36575": 976713344.0, "36580": 954972544.0, "36585": 972108160.0, "36590": 984735808.0, "36595": 968451584.0, "36600": 965824576.0, "36605": 971457728.0, "36610": 972862528.0, "36615": 981505920.0, "36620": 952193536.0, "36625": 940168192.0, "36630": 971014720.0, "36635": 972309504.0, "36640": 964382144.0, "36645": 976825984.0, "36650": 935446784.0, "36655": 980293952.0, "36660": 971971840.0, "36665": 962360192.0, "36670": 966537792.0, "36675": 951944320.0, "36680": 946389568.0, "36685": 978312064.0, "36690": 984729984.0, "36695": 991441280.0, "36700": 969890112.0, "36705": 927008640.0, "36710": 966805440.0, "36715": 970158848.0, "36720": 975185792.0, "36725": 969909312.0, "36730": 948982336.0, "36735": 968078528.0, "36740": 968728448.0, "36745": 951151360.0, "36750": 962539264.0, "36755": 935838016.0, "36760": 964522496.0, "36765": 978634752.0, "36770": 963251008.0, "36775": 965596416.0, "36780": 949559552.0, "36785": 946286720.0, "36790": 978427904.0, "36795": 965129024.0, "36800": 960696384.0, "36805": 954110144.0, "36810": 936903232.0, "36815": 986576256.0, "36820": 972390400.0, "36825": 978259392.0, "36830": 969079296.0, "36835": 931158592.0, "36840": 971713600.0, "36845": 979582400.0, "36850": 961938240.0, "36855": 949430400.0, "36860": 961972032.0, "36865": 945192640.0, "36870": 972642432.0, "36875": 973576512.0, "36880": 971799616.0, "36885": 978101824.0, "36890": 933909248.0, "36895": 979558144.0, "36900": 957625792.0, "36905": 967274496.0, "36910": 980462336.0, "36915": 937863424.0, "36920": 960940480.0, "36925": 979555840.0, "36930": 973425664.0, "36935": 978154752.0, "36940": 960222848.0, "36945": 955185920.0, "36950": 970835392.0, "36955": 974903488.0, "36960": 969365248.0, "36965": 946410048.0, "36970": 944610944.0, "36975": 972889984.0, "36980": 974725568.0, "36985": 953538176.0, "36990": 956370048.0, "36995": 943326400.0, "37000": 975246912.0, "37005": 974802560.0, "37010": 963016896.0, "37015": 966818368.0, "37020": 946513792.0, "37025": 958033024.0, "37030": 964816768.0, "37035": 956218240.0, "37040": 978749312.0, "37045": 943196864.0, "37050": 955268096.0, "37055": 980977216.0, "37060": 966916544.0, "37065": 967602560.0, "37070": 967579328.0, "37075": 952697152.0, "37080": 959647424.0, "37085": 973063936.0, "37090": 960907776.0, "37095": 975325184.0, "37100": 942527360.0, "37105": 955667392.0, "37110": 956760960.0, "37115": 971636992.0, "37120": 962735296.0, "37125": 950661824.0, "37130": 939049280.0, "37135": 981537024.0, "37140": 976233600.0, "37145": 971613952.0, "37150": 970683456.0, "37155": 938251456.0, "37160": 961397760.0, "37165": 964479936.0, "37170": 958189184.0, "37175": 986037632.0, "37180": 942475072.0, "37185": 957716096.0, "37190": 969980544.0, "37195": 963613248.0, "37200": 976270592.0, "37205": 944329600.0, "37210": 966561600.0, "37215": 974913984.0, "37220": 957137600.0, "37225": 975381760.0, "37230": 970944704.0, "37235": 953010432.0, "37240": 976492096.0, "37245": 985534848.0, "37250": 955758720.0, "37255": 955020672.0, "37260": 949307712.0, "37265": 989999424.0, "37270": 962746752.0, "37275": 973807616.0, "37280": 958767552.0, "37285": 946238720.0, "37290": 948259200.0, "37295": 980429952.0, "37300": 947336064.0, "37305": 958987136.0, "37310": 953060480.0, "37315": 942043520.0, "37320": 962017280.0, "37325": 977894272.0, "37330": 976127296.0, "37335": 967409408.0, "37340": 942015360.0, "37345": 950581632.0, "37350": 958300928.0, "37355": 971524672.0, "37360": 974380160.0, "37365": 961451584.0, "37370": 951343360.0, "37375": 967212736.0, "37380": 976351616.0, "37385": 965701632.0, "37390": 964652096.0, "37395": 959482496.0, "37400": 963133952.0, "37405": 983448832.0, "37410": 966315136.0, "37415": 964849408.0, "37420": 939265856.0, "37425": 964330880.0, "37430": 963114624.0, "37435": 955893888.0, "37440": 963655552.0, "37445": 947181760.0, "37450": 993347456.0, "37455": 965138432.0, "37460": 961243200.0, "37465": 962334848.0, "37470": 921122752.0, "37475": 951747456.0, "37480": 981540224.0, "37485": 978999424.0, "37490": 968895104.0, "37495": 949972544.0, "37500": 944257408.0, "37505": 958867968.0, "37510": 973384448.0, "37515": 983766400.0, "37520": 962293312.0, "37525": 951603840.0, "37530": 963517632.0, "37535": 980388160.0, "37540": 952596992.0, "37545": 994210880.0, "37550": 951797760.0, "37555": 966647808.0, "37560": 978170496.0, "37565": 972287808.0, "37570": 967735168.0, "37575": 972962688.0, "37580": 943583616.0, "37585": 960156928.0, "37590": 979285824.0, "37595": 977083776.0, "37600": 980639488.0, "37605": 956973184.0, "37610": 975485568.0, "37615": 973671808.0, "37620": 968490176.0, "37625": 982092544.0, "37630": 948407104.0, "37635": 977611456.0, "37640": 959633600.0, "37645": 977740416.0, "37650": 963453632.0, "37655": 925694528.0, "37660": 947484288.0, "37665": 967151552.0, "37670": 966099840.0, "37675": 975131136.0, "37680": 976512832.0, "37685": 949230016.0, "37690": 971181248.0, "37695": 968649216.0, "37700": 967508096.0, "37705": 978539008.0, "37710": 934186368.0, "37715": 977499648.0, "37720": 972852928.0, "37725": 990570752.0, "37730": 985123840.0, "37735": 934559232.0, "37740": 941717120.0, "37745": 967480256.0, "37750": 962063936.0, "37755": 970397440.0, "37760": 958520320.0, "37765": 932006912.0, "37770": 969701248.0, "37775": 963515584.0, "37780": 962274496.0, "37785": 964380864.0, "37790": 940243392.0, "37795": 979927040.0, "37800": 963135552.0, "37805": 973237184.0, "37810": 983024768.0, "37815": 959928704.0, "37820": 938083136.0, "37825": 955056192.0, "37830": 959987264.0, "37835": 974688512.0, "37840": 984813504.0, "37845": 931019712.0, "37850": 952565184.0, "37855": 979240576.0, "37860": 960584768.0, "37865": 964896512.0, "37870": 960131456.0, "37875": 986847680.0, "37880": 951599488.0, "37885": 976983296.0, "37890": 966058432.0, "37895": 934491456.0, "37900": 976506048.0, "37905": 969387520.0, "37910": 979813248.0, "37915": 962801728.0, "37920": 947159744.0, "37925": 945532416.0, "37930": 985873472.0, "37935": 965411072.0, "37940": 960475392.0, "37945": 972431808.0, "37950": 941478720.0, "37955": 965827456.0, "37960": 981325184.0, "37965": 969149952.0, "37970": 992237056.0, "37975": 944923520.0, "37980": 949351296.0, "37985": 976762368.0, "37990": 973587520.0, "37995": 970553408.0, "38000": 940967104.0, "38005": 945535872.0, "38010": 980724096.0, "38015": 969425024.0, "38020": 977205440.0, "38025": 964204608.0, "38030": 950300416.0, "38035": 979662464.0, "38040": 960444928.0, "38045": 968626816.0, "38050": 982679296.0, "38055": 951398016.0, "38060": 971060480.0, "38065": 985642304.0, "38070": 967143552.0, "38075": 970050368.0, "38080": 947436096.0, "38085": 972992512.0, "38090": 977905024.0, "38095": 965013184.0, "38100": 955935104.0, "38105": 963044800.0, "38110": 946746688.0, "38115": 959670656.0, "38120": 972353536.0, "38125": 971352576.0, "38130": 974140480.0, "38135": 950309760.0, "38140": 966293248.0, "38145": 971990784.0, "38150": 974091904.0, "38155": 966542400.0, "38160": 949106176.0, "38165": 965260416.0, "38170": 980376640.0, "38175": 967901120.0, "38180": 965471104.0, "38185": 949929280.0, "38190": 933614592.0, "38195": 983364352.0, "38200": 996344064.0, "38205": 975395392.0, "38210": 964001280.0, "38215": 946446848.0, "38220": 973024128.0, "38225": 963890944.0, "38230": 955554688.0, "38235": 969576832.0, "38240": 934333568.0, "38245": 974097984.0, "38250": 992968640.0, "38255": 967679616.0, "38260": 968393472.0, "38265": 953296704.0, "38270": 948983488.0, "38275": 967092032.0, "38280": 980006912.0, "38285": 967417408.0, "38290": 962292928.0, "38295": 958099456.0, "38300": 971984576.0, "38305": 981111616.0, "38310": 959642240.0, "38315": 961929152.0, "38320": 954095744.0, "38325": 982015552.0, "38330": 974483328.0, "38335": 968341248.0, "38340": 979704704.0, "38345": 941783424.0, "38350": 956649216.0, "38355": 974398144.0, "38360": 963108864.0, "38365": 979025664.0, "38370": 942111424.0, "38375": 937817984.0, "38380": 978072640.0, "38385": 966629504.0, "38390": 953050752.0, "38395": 959333632.0, "38400": 942164608.0, "38405": 969607936.0, "38410": 975312768.0, "38415": 975880000.0, "38420": 964209472.0, "38425": 944977024.0, "38430": 953386240.0, "38435": 960517504.0, "38440": 981580096.0, "38445": 968625600.0, "38450": 946115968.0, "38455": 941428416.0, "38460": 961624384.0, "38465": 961974976.0, "38470": 960352640.0, "38475": 993750016.0, "38480": 952690752.0, "38485": 971591744.0, "38490": 971972544.0, "38495": 951835392.0, "38500": 968818944.0, "38505": 945009024.0, "38510": 971053376.0, "38515": 967544384.0, "38520": 958994112.0, "38525": 967383616.0, "38530": 944916160.0, "38535": 953751168.0, "38540": 966223744.0, "38545": 959030528.0, "38550": 961701440.0, "38555": 971063232.0, "38560": 969191424.0, "38565": 975217536.0, "38570": 964160640.0, "38575": 960145216.0, "38580": 969614592.0, "38585": 946973568.0, "38590": 962385984.0, "38595": 974479296.0, "38600": 968123072.0, "38605": 982576000.0, "38610": 931920256.0, "38615": 946881216.0, "38620": 971235648.0, "38625": 969436288.0, "38630": 981485056.0, "38635": 966314624.0, "38640": 948737536.0, "38645": 966248064.0, "38650": 980148608.0, "38655": 965185472.0, "38660": 955793920.0, "38665": 936202240.0, "38670": 945132224.0, "38675": 954608576.0, "38680": 974388544.0, "38685": 988131200.0, "38690": 938426304.0, "38695": 960915968.0, "38700": 971310336.0, "38705": 982327360.0, "38710": 961279616.0, "38715": 992806656.0, "38720": 954720064.0, "38725": 967645632.0, "38730": 961867904.0, "38735": 957029248.0, "38740": 967464128.0, "38745": 938458752.0, "38750": 988852480.0, "38755": 966856448.0, "38760": 966967488.0, "38765": 976260864.0, "38770": 950568768.0, "38775": 962489728.0, "38780": 961777152.0, "38785": 978471296.0, "38790": 984287744.0, "38795": 941833600.0, "38800": 970468352.0, "38805": 980394496.0, "38810": 949854016.0, "38815": 973502976.0, "38820": 954183936.0, "38825": 949762880.0, "38830": 975676224.0, "38835": 967111232.0, "38840": 969829312.0, "38845": 963459520.0, "38850": 944856832.0, "38855": 962926976.0, "38860": 978614016.0, "38865": 966568064.0, "38870": 961084672.0, "38875": 956789184.0, "38880": 963692672.0, "38885": 965640448.0, "38890": 962386816.0, "38895": 977201856.0, "38900": 950781696.0, "38905": 946761344.0, "38910": 958368768.0, "38915": 977799168.0, "38920": 980220928.0, "38925": 975340864.0, "38930": 933800768.0, "38935": 960524928.0, "38940": 968237568.0, "38945": 965493376.0, "38950": 967300096.0, "38955": 958263360.0, "38960": 955642112.0, "38965": 954068992.0, "38970": 967308352.0, "38975": 957828864.0, "38980": 947872256.0, "38985": 947920896.0, "38990": 964305536.0, "38995": 961785920.0, "39000": 966382528.0, "39005": 961697920.0, "39010": 941280512.0, "39015": 984034944.0, "39020": 976793344.0, "39025": 956076736.0, "39030": 958678080.0, "39035": 944968704.0, "39040": 984577600.0, "39045": 968275712.0, "39050": 954905152.0, "39055": 973225472.0, "39060": 950174464.0, "39065": 949730560.0, "39070": 968829184.0, "39075": 979228864.0, "39080": 975709632.0, "39085": 963109632.0, "39090": 946900800.0, "39095": 943298560.0, "39100": 973140224.0, "39105": 980332416.0, "39110": 969429760.0, "39115": 949839424.0, "39120": 961923264.0, "39125": 975007360.0, "39130": 989157760.0, "39135": 965049152.0, "39140": 944911104.0, "39145": 950328640.0, "39150": 962908480.0, "39155": 958533888.0, "39160": 966233216.0, "39165": 939151616.0, "39170": 937333632.0, "39175": 984613312.0, "39180": 961888704.0, "39185": 964670464.0, "39190": 974147520.0, "39195": 940601984.0, "39200": 963831488.0, "39205": 953449408.0, "39210": 977162944.0, "39215": 969006208.0, "39220": 944616640.0, "39225": 962608256.0, "39230": 961648704.0, "39235": 967807808.0, "39240": 971316736.0, "39245": 951820672.0, "39250": 940368192.0, "39255": 975572032.0, "39260": 958197248.0, "39265": 972188736.0, "39270": 964877824.0, "39275": 954467264.0, "39280": 968963904.0, "39285": 975187904.0, "39290": 973120896.0, "39295": 961516416.0, "39300": 936562816.0, "39305": 959278592.0, "39310": 975008000.0, "39315": 971426752.0, "39320": 963578560.0, "39325": 975214720.0, "39330": 942909056.0, "39335": 976137088.0, "39340": 963201536.0, "39345": 968695616.0, "39350": 983429120.0, "39355": 957718848.0, "39360": 972707968.0, "39365": 972390784.0, "39370": 982259648.0, "39375": 963491136.0, "39380": 932854080.0, "39385": 958879616.0, "39390": 967721536.0, "39395": 965273600.0, "39400": 963326656.0, "39405": 956958912.0, "39410": 937001024.0, "39415": 954106432.0, "39420": 978404608.0, "39425": 961489024.0, "39430": 960907520.0, "39435": 951777728.0, "39440": 971482048.0, "39445": 966581312.0, "39450": 955912000.0, "39455": 964234048.0, "39460": 937301568.0, "39465": 994779456.0, "39470": 973522880.0, "39475": 960260736.0, "39480": 956871552.0, "39485": 964036864.0, "39490": 963591616.0, "39495": 968258048.0, "39500": 961373376.0, "39505": 973203520.0, "39510": 941129408.0, "39515": 934766912.0, "39520": 970806912.0, "39525": 965843456.0, "39530": 981100928.0, "39535": 955010176.0, "39540": 954794880.0, "39545": 951354944.0, "39550": 970555520.0, "39555": 973939456.0, "39560": 990356096.0, "39565": 953615872.0, "39570": 957673024.0, "39575": 971757568.0, "39580": 960212160.0, "39585": 971636096.0, "39590": 976956032.0, "39595": 948935296.0, "39600": 954426176.0, "39605": 957988992.0, "39610": 968361216.0, "39615": 979217088.0, "39620": 934239104.0, "39625": 978403456.0, "39630": 948422912.0, "39635": 961435200.0, "39640": 972594304.0, "39645": 951618048.0, "39650": 975728064.0, "39655": 970089088.0, "39660": 973228544.0, "39665": 959915200.0, "39670": 964038208.0, "39675": 959270336.0, "39680": 968382016.0, "39685": 971572800.0, "39690": 967692544.0, "39695": 958480192.0, "39700": 933209664.0, "39705": 974614208.0, "39710": 984822016.0, "39715": 990558144.0, "39720": 964847360.0, "39725": 943790528.0, "39730": 976605376.0, "39735": 970807296.0, "39740": 965438592.0, "39745": 973749568.0, "39750": 929914176.0, "39755": 961508736.0, "39760": 970544512.0, "39765": 962489408.0, "39770": 967463616.0, "39775": 965234496.0, "39780": 948055552.0, "39785": 970364416.0, "39790": 972989632.0, "39795": 973859712.0, "39800": 968553088.0, "39805": 951158336.0, "39810": 978587584.0, "39815": 971938176.0, "39820": 973649152.0, "39825": 975493440.0, "39830": 951575424.0, "39835": 950276032.0, "39840": 972925952.0, "39845": 972495360.0, "39850": 976710656.0, "39855": 956163328.0, "39860": 941698688.0, "39865": 963866944.0, "39870": 968320448.0, "39875": 975504000.0, "39880": 975270528.0, "39885": 937093504.0, "39890": 964125824.0, "39895": 975142528.0, "39900": 969817792.0, "39905": 967699072.0, "39910": 938762112.0, "39915": 982383872.0, "39920": 978331072.0, "39925": 960393600.0, "39930": 966108672.0, "39935": 954752128.0, "39940": 948909120.0, "39945": 977790784.0, "39950": 974051072.0, "39955": 966383680.0, "39960": 967379584.0, "39965": 940361600.0, "39970": 972876928.0, "39975": 977953856.0, "39980": 980831744.0, "39985": 970654336.0, "39990": 947739008.0, "39995": 962989632.0, "40000": 969021824.0, "40005": 970376000.0, "40010": 967608896.0, "40015": 959371776.0, "40020": 952061760.0, "40025": 967912320.0, "40030": 970044992.0, "40035": 988662656.0, "40040": 958302720.0, "40045": 959867776.0, "40050": 966877056.0, "40055": 965660608.0, "40060": 966306688.0, "40065": 966490048.0, "40070": 955794048.0, "40075": 981960512.0, "40080": 975522880.0, "40085": 954721344.0, "40090": 970603392.0, "40095": 943403328.0, "40100": 951367872.0, "40105": 964042368.0, "40110": 956750144.0, "40115": 978977024.0, "40120": 973510336.0, "40125": 946294848.0, "40130": 986634432.0, "40135": 975586496.0, "40140": 971509952.0, "40145": 978742976.0, "40150": 929698240.0, "40155": 989607104.0, "40160": 984180480.0, "40165": 971556032.0, "40170": 983318016.0, "40175": 931024704.0, "40180": 965038464.0, "40185": 981531072.0, "40190": 975426432.0, "40195": 964015680.0, "40200": 941332032.0, "40205": 961004352.0, "40210": 984769472.0, "40215": 963128768.0, "40220": 966066112.0, "40225": 961779072.0, "40230": 944936896.0, "40235": 972311744.0, "40240": 968449728.0, "40245": 975767744.0, "40250": 976985280.0, "40255": 957452992.0, "40260": 974569536.0, "40265": 963793344.0, "40270": 961845568.0, "40275": 978608320.0, "40280": 962940672.0, "40285": 973356096.0, "40290": 971697472.0, "40295": 969221696.0, "40300": 971184448.0, "40305": 960207424.0, "40310": 941185792.0, "40315": 973474304.0, "40320": 964757120.0, "40325": 961275200.0, "40330": 975820096.0, "40335": 944318336.0, "40340": 973018304.0, "40345": 963837248.0, "40350": 967336384.0, "40355": 971888576.0, "40360": 945917504.0, "40365": 965069440.0, "40370": 969131968.0, "40375": 979577216.0, "40380": 982705792.0, "40385": 970290240.0, "40390": 926442240.0, "40395": 962351104.0, "40400": 979504640.0, "40405": 973787712.0, "40410": 964172736.0, "40415": 943654016.0, "40420": 965942528.0, "40425": 968775552.0, "40430": 962541760.0, "40435": 971507840.0, "40440": 942566848.0, "40445": 962853952.0, "40450": 973855872.0, "40455": 986138816.0, "40460": 972107392.0, "40465": 940883264.0, "40470": 947382528.0, "40475": 981752512.0, "40480": 972787712.0, "40485": 988139968.0, "40490": 940699968.0, "40495": 949882368.0, "40500": 969289664.0, "40505": 966704000.0, "40510": 974172032.0, "40515": 984688576.0, "40520": 941117248.0, "40525": 962208320.0, "40530": 973646656.0, "40535": 959696256.0, "40540": 955968000.0, "40545": 959763712.0, "40550": 965323456.0, "40555": 975714880.0, "40560": 973257536.0, "40565": 969013760.0, "40570": 976251072.0, "40575": 943296000.0, "40580": 975468672.0, "40585": 977633728.0, "40590": 974584576.0, "40595": 975941184.0, "40600": 960218752.0, "40605": 972368704.0, "40610": 984826688.0, "40615": 968130048.0, "40620": 984312640.0, "40625": 946495232.0, "40630": 948628480.0, "40635": 974680256.0, "40640": 983024768.0, "40645": 977361024.0, "40650": 952743488.0, "40655": 951656448.0, "40660": 964659136.0, "40665": 977502208.0, "40670": 991880320.0, "40675": 971328768.0, "40680": 947394048.0, "40685": 959555072.0, "40690": 966378688.0, "40695": 973005568.0, "40700": 966975296.0, "40705": 969103680.0, "40710": 987322048.0, "40715": 967460928.0, "40720": 961356160.0, "40725": 986990912.0, "40730": 963940096.0, "40735": 955581376.0, "40740": 966458048.0, "40745": 959867328.0, "40750": 965336320.0, "40755": 982306112.0, "40760": 945558400.0, "40765": 972588992.0, "40770": 972838208.0, "40775": 974596032.0, "40780": 975499456.0, "40785": 949616576.0, "40790": 972931648.0, "40795": 977416640.0, "40800": 964798016.0, "40805": 960722752.0, "40810": 949143552.0, "40815": 962343936.0, "40820": 970938304.0, "40825": 968523840.0, "40830": 967238400.0, "40835": 971303104.0, "40840": 943672576.0, "40845": 962073664.0, "40850": 980007040.0, "40855": 968437760.0, "40860": 968835456.0, "40865": 944322944.0, "40870": 978734592.0, "40875": 968343488.0, "40880": 977606144.0, "40885": 965693056.0, "40890": 947260416.0, "40895": 952406784.0, "40900": 953246464.0, "40905": 982109440.0, "40910": 971385280.0, "40915": 959575872.0, "40920": 945206336.0, "40925": 966857408.0, "40930": 971307840.0, "40935": 980839296.0, "40940": 974400064.0, "40945": 946231872.0, "40950": 969387200.0, "40955": 967877248.0, "40960": 969184320.0, "40965": 974551872.0, "40970": 945470016.0, "40975": 951381312.0, "40980": 970714624.0, "40985": 971697792.0, "40990": 962601088.0, "40995": 942002688.0, "41000": 972827648.0, "41005": 963594304.0, "41010": 966119936.0, "41015": 953812224.0, "41020": 961092096.0, "41025": 934871616.0, "41030": 974087872.0, "41035": 973343232.0, "41040": 954408768.0, "41045": 973164864.0, "41050": 946330624.0, "41055": 963240192.0, "41060": 985522880.0, "41065": 973822720.0, "41070": 969135104.0, "41075": 942708224.0, "41080": 959086400.0, "41085": 968619712.0, "41090": 960640384.0, "41095": 975396480.0, "41100": 943240768.0, "41105": 947485504.0, "41110": 962134784.0, "41115": 967271168.0, "41120": 981489728.0, "41125": 956132736.0, "41130": 946856832.0, "41135": 959898944.0, "41140": 966377920.0, "41145": 963879168.0, "41150": 981067904.0, "41155": 952376448.0, "41160": 940127296.0, "41165": 972078592.0, "41170": 969195840.0, "41175": 956553792.0, "41180": 958283968.0, "41185": 958054144.0, "41190": 972397696.0, "41195": 965880192.0, "41200": 977951616.0, "41205": 965345088.0, "41210": 939625216.0, "41215": 982839040.0, "41220": 975069376.0, "41225": 961699200.0, "41230": 993684224.0, "41235": 944556032.0, "41240": 970446272.0, "41245": 954894720.0, "41250": 980129856.0, "41255": 960868288.0, "41260": 965373248.0, "41265": 951604800.0, "41270": 964678656.0, "41275": 975004096.0, "41280": 975949312.0, "41285": 957650624.0, "41290": 933161600.0, "41295": 968417344.0, "41300": 972277248.0, "41305": 962877952.0, "41310": 984435776.0, "41315": 936208640.0, "41320": 944427648.0, "41325": 970604736.0, "41330": 969239040.0, "41335": 973669568.0, "41340": 946706688.0, "41345": 935995456.0, "41350": 969198656.0, "41355": 973466432.0, "41360": 982157440.0, "41365": 944285056.0, "41370": 951540800.0, "41375": 964334464.0, "41380": 958289408.0, "41385": 965440192.0, "41390": 977886848.0, "41395": 939144704.0, "41400": 947244288.0, "41405": 964315648.0, "41410": 977268928.0, "41415": 962848128.0, "41420": 946784128.0, "41425": 950973952.0, "41430": 981937088.0, "41435": 975444928.0, "41440": 965870784.0, "41445": 957940096.0, "41450": 942184704.0, "41455": 988353856.0, "41460": 955993472.0, "41465": 955482688.0, "41470": 972994304.0, "41475": 955489856.0, "41480": 979852992.0, "41485": 966807616.0, "41490": 965001856.0, "41495": 964360448.0, "41500": 939353536.0, "41505": 966605824.0, "41510": 957805440.0, "41515": 982984576.0, "41520": 965348672.0, "41525": 953915968.0, "41530": 970464640.0, "41535": 959662144.0, "41540": 972238592.0, "41545": 987967616.0, "41550": 968788864.0, "41555": 949242432.0, "41560": 951718592.0, "41565": 977219392.0, "41570": 975035200.0, "41575": 957819136.0, "41580": 945721664.0, "41585": 966673152.0, "41590": 975714368.0, "41595": 959752256.0, "41600": 980553088.0, "41605": 945716288.0, "41610": 948941760.0, "41615": 965448192.0, "41620": 971033664.0, "41625": 978325888.0, "41630": 960496128.0, "41635": 950429504.0, "41640": 977375168.0, "41645": 958240384.0, "41650": 973122944.0, "41655": 968069888.0, "41660": 942578112.0, "41665": 975777856.0, "41670": 984350272.0, "41675": 958173440.0, "41680": 972319104.0, "41685": 940372416.0, "41690": 964953792.0, "41695": 960878208.0, "41700": 976427328.0, "41705": 955458112.0, "41710": 956816320.0, "41715": 952061824.0, "41720": 973270656.0, "41725": 971924608.0, "41730": 952399616.0, "41735": 959701120.0, "41740": 946467200.0, "41745": 964310656.0, "41750": 973001728.0, "41755": 974688896.0, "41760": 973849664.0, "41765": 948935232.0, "41770": 964763840.0, "41775": 964001344.0, "41780": 973319232.0, "41785": 977046976.0, "41790": 953279808.0, "41795": 954629120.0, "41800": 960514560.0, "41805": 958300928.0, "41810": 971288704.0, "41815": 960789888.0, "41820": 954748160.0, "41825": 962292928.0, "41830": 957440704.0, "41835": 970635136.0, "41840": 974531008.0, "41845": 934311744.0, "41850": 973337088.0, "41855": 970229376.0, "41860": 976249792.0, "41865": 979868288.0, "41870": 967328320.0, "41875": 963111680.0, "41880": 966201536.0, "41885": 960902272.0, "41890": 966115456.0, "41895": 952478592.0, "41900": 931217024.0, "41905": 982815872.0, "41910": 975249792.0, "41915": 967488960.0, "41920": 965291712.0, "41925": 936770304.0, "41930": 968270592.0, "41935": 978141888.0, "41940": 987723776.0, "41945": 957014080.0, "41950": 965895168.0, "41955": 980094464.0, "41960": 959930112.0, "41965": 964660224.0, "41970": 968112896.0, "41975": 954946816.0, "41980": 955445696.0, "41985": 969517184.0, "41990": 979111488.0, "41995": 970323840.0, "42000": 977482496.0, "42005": 937248960.0, "42010": 966792832.0, "42015": 976192704.0, "42020": 966507328.0, "42025": 963191424.0, "42030": 955226688.0, "42035": 948248768.0, "42040": 958646080.0, "42045": 969160512.0, "42050": 979204224.0, "42055": 927586048.0, "42060": 935319296.0, "42065": 977330432.0, "42070": 964322560.0, "42075": 978712576.0, "42080": 951883584.0, "42085": 947489920.0, "42090": 966873344.0, "42095": 966668032.0, "42100": 959424832.0, "42105": 968013184.0, "42110": 965849280.0, "42115": 977759424.0, "42120": 967759872.0, "42125": 971722944.0, "42130": 963808256.0, "42135": 946873408.0, "42140": 969155392.0, "42145": 974009408.0, "42150": 970974464.0, "42155": 963411648.0, "42160": 949017344.0, "42165": 942336768.0, "42170": 964087872.0, "42175": 987594304.0, "42180": 956466624.0, "42185": 951718080.0, "42190": 942186944.0, "42195": 971477696.0, "42200": 976338560.0, "42205": 959161216.0, "42210": 966747200.0, "42215": 945564096.0, "42220": 956285888.0, "42225": 984682368.0, "42230": 959069760.0, "42235": 974978048.0, "42240": 954776192.0, "42245": 943844544.0, "42250": 962522560.0, "42255": 962677120.0, "42260": 967222016.0, "42265": 964788352.0, "42270": 955276800.0, "42275": 975162176.0, "42280": 970071808.0, "42285": 963229824.0, "42290": 978049600.0, "42295": 951693696.0, "42300": 966213440.0, "42305": 978854976.0, "42310": 952822720.0, "42315": 971367104.0, "42320": 960586560.0, "42325": 957060672.0, "42330": 974365568.0, "42335": 971585792.0, "42340": 960521280.0, "42345": 968793024.0, "42350": 947178048.0, "42355": 985523456.0, "42360": 972847616.0, "42365": 969481600.0, "42370": 975051584.0, "42375": 946239872.0, "42380": 960723008.0, "42385": 975564672.0, "42390": 980635328.0, "42395": 948235200.0, "42400": 935453696.0, "42405": 932114496.0, "42410": 969430848.0, "42415": 975776064.0, "42420": 974336000.0, "42425": 953198144.0, "42430": 938764160.0, "42435": 974695360.0, "42440": 967803904.0, "42445": 984104448.0, "42450": 970254208.0, "42455": 949984768.0, "42460": 974123200.0, "42465": 967497344.0, "42470": 972520960.0, "42475": 971380416.0, "42480": 945411904.0, "42485": 941598336.0, "42490": 960496128.0, "42495": 972736064.0, "42500": 958254400.0, "42505": 962091392.0, "42510": 945023872.0, "42515": 965986560.0, "42520": 969783232.0, "42525": 976723840.0, "42530": 972019264.0, "42535": 958988672.0, "42540": 970758784.0, "42545": 970887104.0, "42550": 963732224.0, "42555": 961570048.0, "42560": 947412288.0, "42565": 951031808.0, "42570": 970237760.0, "42575": 971765888.0, "42580": 969418944.0, "42585": 953307648.0, "42590": 967441664.0, "42595": 956988608.0, "42600": 981727744.0, "42605": 970985984.0, "42610": 978399872.0, "42615": 952467968.0, "42620": 969464704.0, "42625": 979549824.0, "42630": 983032000.0, "42635": 966509120.0, "42640": 935047680.0, "42645": 961632384.0, "42650": 968881472.0, "42655": 985407488.0, "42660": 976913152.0, "42665": 948166720.0, "42670": 955765824.0, "42675": 982116992.0, "42680": 966600000.0, "42685": 971059968.0, "42690": 977233344.0, "42695": 943384832.0, "42700": 972099008.0, "42705": 972105024.0, "42710": 968401408.0, "42715": 974290176.0, "42720": 958960512.0, "42725": 972841024.0, "42730": 974690752.0, "42735": 959666944.0, "42740": 983137280.0, "42745": 970004352.0, "42750": 956012800.0, "42755": 967602560.0, "42760": 967993728.0, "42765": 972682368.0, "42770": 963225728.0, "42775": 944276096.0, "42780": 984903232.0, "42785": 967987200.0, "42790": 975397440.0, "42795": 983655168.0, "42800": 941137088.0, "42805": 981973760.0, "42810": 970353920.0, "42815": 962275776.0, "42820": 968690112.0, "42825": 936755968.0, "42830": 962480512.0, "42835": 961823488.0, "42840": 988175168.0, "42845": 972595648.0, "42850": 950303232.0, "42855": 948342080.0, "42860": 978000128.0, "42865": 977589632.0, "42870": 976962496.0, "42875": 971895360.0, "42880": 948851136.0, "42885": 981082240.0, "42890": 963893248.0, "42895": 972190272.0, "42900": 970974464.0, "42905": 945993344.0, "42910": 946805760.0, "42915": 968825664.0, "42920": 966055168.0, "42925": 981217024.0, "42930": 957976512.0, "42935": 952713664.0, "42940": 977494464.0, "42945": 956614016.0, "42950": 968825088.0, "42955": 966631552.0, "42960": 949181440.0, "42965": 980182208.0, "42970": 976196352.0, "42975": 984878656.0, "42980": 957547200.0, "42985": 938264000.0, "42990": 976939520.0, "42995": 968787136.0, "43000": 970371584.0, "43005": 976163136.0, "43010": 954746240.0, "43015": 974615488.0, "43020": 981621056.0, "43025": 973264192.0, "43030": 966718528.0, "43035": 979063168.0, "43040": 953158656.0, "43045": 960519360.0, "43050": 973296896.0, "43055": 975724224.0, "43060": 970126208.0, "43065": 947370368.0, "43070": 960872320.0, "43075": 969029248.0, "43080": 985720256.0, "43085": 980765632.0, "43090": 945269184.0, "43095": 961976896.0, "43100": 972742272.0, "43105": 981445824.0, "43110": 968885952.0, "43115": 960776896.0, "43120": 950066240.0, "43125": 970941952.0, "43130": 956013440.0, "43135": 970040704.0, "43140": 971948480.0, "43145": 961568192.0, "43150": 966626368.0, "43155": 970820416.0, "43160": 968353600.0, "43165": 973385280.0, "43170": 955109504.0, "43175": 966885248.0, "43180": 968155648.0, "43185": 970765120.0, "43190": 961981824.0, "43195": 947980032.0, "43200": 944409856.0, "43205": 971987328.0, "43210": 952756224.0, "43215": 971730688.0, "43220": 964881024.0, "43225": 950116224.0, "43230": 979181568.0, "43235": 965827648.0, "43240": 962227968.0, "43245": 977587968.0, "43250": 952332288.0, "43255": 973475008.0, "43260": 965545152.0, "43265": 970508352.0, "43270": 956994432.0, "43275": 944754176.0, "43280": 956735360.0, "43285": 957387584.0, "43290": 965445312.0, "43295": 985105920.0, "43300": 961251840.0, "43305": 931792128.0, "43310": 969082432.0, "43315": 971630592.0, "43320": 973284672.0, "43325": 974247104.0, "43330": 951441472.0, "43335": 975585408.0, "43340": 973304960.0, "43345": 976533632.0, "43350": 968802048.0, "43355": 946370304.0, "43360": 962428608.0, "43365": 970684288.0, "43370": 961201728.0, "43375": 991777536.0, "43380": 973462912.0, "43385": 934592704.0, "43390": 974119168.0, "43395": 966534080.0, "43400": 953955584.0, "43405": 966754752.0, "43410": 942893312.0, "43415": 984287040.0, "43420": 970583808.0, "43425": 959385984.0, "43430": 966834240.0, "43435": 947965312.0, "43440": 946149056.0, "43445": 968726656.0, "43450": 968067776.0, "43455": 959095872.0, "43460": 962977920.0, "43465": 947057856.0, "43470": 978773824.0, "43475": 959753472.0, "43480": 962924800.0, "43485": 959851072.0, "43490": 939091008.0, "43495": 967049728.0, "43500": 989693120.0, "43505": 973353280.0, "43510": 969467520.0, "43515": 948436416.0, "43520": 951233152.0, "43525": 968001664.0, "43530": 976631232.0, "43535": 985642432.0, "43540": 939956544.0, "43545": 942707200.0, "43550": 951503744.0, "43555": 967766976.0, "43560": 986303744.0, "43565": 954562560.0, "43570": 938848768.0, "43575": 970266944.0, "43580": 976741696.0, "43585": 975122432.0, "43590": 969725888.0, "43595": 942977920.0, "43600": 957485760.0, "43605": 977508416.0, "43610": 966901248.0, "43615": 986149248.0, "43620": 943458176.0, "43625": 932236928.0, "43630": 975555264.0, "43635": 944336384.0, "43640": 975547200.0, "43645": 950691904.0, "43650": 944795712.0, "43655": 968316352.0, "43660": 971408256.0, "43665": 965041920.0, "43670": 965140032.0, "43675": 950600768.0, "43680": 964298496.0, "43685": 970795776.0, "43690": 969704320.0, "43695": 963867136.0, "43700": 930895424.0, "43705": 965109760.0, "43710": 967865408.0, "43715": 968863232.0, "43720": 960592128.0, "43725": 953604224.0, "43730": 952742208.0, "43735": 972693632.0, "43740": 994006208.0, "43745": 962932096.0, "43750": 965301952.0, "43755": 940594816.0, "43760": 968334592.0, "43765": 965799808.0, "43770": 952752576.0, "43775": 969686528.0, "43780": 937083328.0, "43785": 965231616.0, "43790": 956977920.0, "43795": 948764096.0, "43800": 970730304.0, "43805": 932106112.0, "43810": 957006720.0, "43815": 961369600.0, "43820": 970100864.0, "43825": 961505088.0, "43830": 974370368.0, "43835": 934577280.0, "43840": 962812864.0, "43845": 972694080.0, "43850": 961349568.0, "43855": 962664576.0, "43860": 953415808.0, "43865": 959956032.0, "43870": 975303488.0, "43875": 972901184.0, "43880": 960966272.0, "43885": 941376576.0, "43890": 970776768.0, "43895": 982448512.0, "43900": 974074368.0, "43905": 969640512.0, "43910": 950882240.0, "43915": 938747520.0, "43920": 967791360.0, "43925": 969752896.0, "43930": 958644672.0, "43935": 977869824.0, "43940": 957360448.0, "43945": 996229248.0, "43950": 968510336.0, "43955": 983184000.0, "43960": 969876416.0, "43965": 947775488.0, "43970": 964536704.0, "43975": 968621888.0, "43980": 973051648.0, "43985": 960221824.0, "43990": 955643136.0, "43995": 943782912.0, "44000": 960919936.0, "44005": 950889856.0, "44010": 970340672.0, "44015": 984421504.0, "44020": 939949632.0, "44025": 955563904.0, "44030": 975962944.0, "44035": 977304704.0, "44040": 978641792.0, "44045": 933424576.0, "44050": 954967808.0, "44055": 967247360.0, "44060": 981729408.0, "44065": 973338432.0, "44070": 939927872.0, "44075": 932714624.0, "44080": 975579392.0, "44085": 963402880.0, "44090": 969052928.0, "44095": 961932480.0, "44100": 951656000.0, "44105": 963825472.0, "44110": 966036736.0, "44115": 964542400.0, "44120": 961270400.0, "44125": 951344448.0, "44130": 965187904.0, "44135": 970538432.0, "44140": 964585024.0, "44145": 973834880.0, "44150": 945326080.0, "44155": 950625280.0, "44160": 965488192.0, "44165": 987526592.0, "44170": 981993408.0, "44175": 958082752.0, "44180": 938501376.0, "44185": 974978816.0, "44190": 966534784.0, "44195": 966657088.0, "44200": 963719616.0, "44205": 937498880.0, "44210": 971164864.0, "44215": 968959552.0, "44220": 974455936.0, "44225": 973511488.0, "44230": 956290112.0, "44235": 962669504.0, "44240": 957179520.0, "44245": 966794816.0, "44250": 972888768.0, "44255": 961609856.0, "44260": 962859904.0, "44265": 962945280.0, "44270": 972318272.0, "44275": 960784256.0, "44280": 975391744.0, "44285": 938630528.0, "44290": 956551232.0, "44295": 972128896.0, "44300": 979216640.0, "44305": 968628800.0, "44310": 954186816.0, "44315": 955858304.0, "44320": 986755456.0, "44325": 972362368.0, "44330": 950185024.0, "44335": 950416704.0, "44340": 943913728.0, "44345": 983963008.0, "44350": 965304064.0, "44355": 959348352.0, "44360": 950520512.0, "44365": 934408960.0, "44370": 966024512.0, "44375": 974051008.0, "44380": 973278016.0, "44385": 959800960.0, "44390": 947916288.0, "44395": 963836736.0, "44400": 978805120.0, "44405": 980862336.0, "44410": 965539456.0, "44415": 961515328.0, "44420": 955804160.0, "44425": 967926464.0, "44430": 970052416.0, "44435": 971139840.0, "44440": 953856384.0, "44445": 936788352.0, "44450": 966982656.0, "44455": 956737792.0, "44460": 971584640.0, "44465": 989310656.0, "44470": 958000512.0, "44475": 944188416.0, "44480": 955967808.0, "44485": 963101888.0, "44490": 969739904.0, "44495": 954017600.0, "44500": 965851840.0, "44505": 976015488.0, "44510": 968905024.0, "44515": 967869632.0, "44520": 965653888.0, "44525": 957117632.0, "44530": 966730880.0, "44535": 985996864.0, "44540": 970588224.0, "44545": 970797312.0, "44550": 959972736.0, "44555": 950932032.0, "44560": 975619200.0, "44565": 971049088.0, "44570": 958544192.0, "44575": 945515456.0, "44580": 967583296.0, "44585": 960195712.0, "44590": 972954240.0, "44595": 975832576.0, "44600": 942376704.0, "44605": 956388480.0, "44610": 962400896.0, "44615": 981636736.0, "44620": 986167424.0, "44625": 969935104.0, "44630": 950595968.0, "44635": 968810624.0, "44640": 982097536.0, "44645": 967762240.0, "44650": 965784256.0, "44655": 952761024.0, "44660": 959680256.0, "44665": 960956288.0, "44670": 960244096.0, "44675": 963319104.0, "44680": 945331456.0, "44685": 943680256.0, "44690": 960368448.0, "44695": 963911360.0, "44700": 979801664.0, "44705": 975754816.0, "44710": 961921600.0, "44715": 963869184.0, "44720": 970512832.0, "44725": 951362176.0, "44730": 958677440.0, "44735": 965547904.0, "44740": 970401792.0, "44745": 968878592.0, "44750": 964082688.0, "44755": 961558528.0, "44760": 941426624.0, "44765": 955205120.0, "44770": 975784960.0, "44775": 969568640.0, "44780": 975943808.0, "44785": 953271552.0, "44790": 952478848.0, "44795": 953870144.0, "44800": 981413440.0, "44805": 988277504.0, "44810": 965416320.0, "44815": 947654656.0, "44820": 974340096.0, "44825": 954366976.0, "44830": 978328448.0, "44835": 968400512.0, "44840": 926107200.0, "44845": 970994048.0, "44850": 973927744.0, "44855": 979991040.0, "44860": 961804416.0, "44865": 946540672.0, "44870": 959297536.0, "44875": 963635520.0, "44880": 975561600.0, "44885": 958494080.0, "44890": 978608448.0, "44895": 939516928.0, "44900": 973124416.0, "44905": 958437504.0, "44910": 970817024.0, "44915": 963696000.0, "44920": 947157440.0, "44925": 959370688.0, "44930": 964189248.0, "44935": 977035264.0, "44940": 966474240.0, "44945": 946208576.0, "44950": 961330944.0, "44955": 978743232.0, "44960": 961125376.0, "44965": 960612928.0, "44970": 971206720.0, "44975": 954745664.0, "44980": 979904320.0, "44985": 961171648.0, "44990": 967404352.0, "44995": 973151680.0, "45000": 940168000.0, "45005": 957014080.0, "45010": 970390528.0, "45015": 978064512.0, "45020": 967781824.0, "45025": 947920000.0, "45030": 971001856.0, "45035": 963615808.0, "45040": 972460800.0, "45045": 973044800.0, "45050": 957288128.0, "45055": 941474688.0, "45060": 962410112.0, "45065": 957900864.0, "45070": 993864960.0, "45075": 981819072.0, "45080": 948050816.0, "45085": 969686912.0, "45090": 979942720.0, "45095": 993521280.0, "45100": 969776512.0, "45105": 937850048.0, "45110": 951086144.0, "45115": 985064704.0, "45120": 958470464.0, "45125": 977331392.0, "45130": 959019072.0, "45135": 964845056.0, "45140": 985562944.0, "45145": 969985664.0, "45150": 968418816.0, "45155": 967677824.0, "45160": 957436864.0, "45165": 967670592.0, "45170": 978073920.0, "45175": 967631168.0, "45180": 955939264.0, "45185": 957879168.0, "45190": 973762688.0, "45195": 961865152.0, "45200": 975366656.0, "45205": 982372416.0, "45210": 938574848.0, "45215": 955436864.0, "45220": 992137728.0, "45225": 972501696.0, "45230": 967455232.0, "45235": 969762176.0, "45240": 945801728.0, "45245": 975027584.0, "45250": 977823680.0, "45255": 979279232.0, "45260": 967725248.0, "45265": 939039296.0, "45270": 983221952.0, "45275": 966673216.0, "45280": 978516864.0, "45285": 978855936.0, "45290": 945115328.0, "45295": 954477440.0, "45300": 974916416.0, "45305": 979646656.0, "45310": 968586496.0, "45315": 965691904.0, "45320": 943702208.0, "45325": 979308352.0, "45330": 983081216.0, "45335": 977041728.0, "45340": 959894784.0, "45345": 946731072.0, "45350": 967337472.0, "45355": 967043968.0, "45360": 969026048.0, "45365": 977309632.0, "45370": 946584256.0, "45375": 948225600.0, "45380": 966369984.0, "45385": 958025088.0, "45390": 963045696.0, "45395": 960637056.0, "45400": 945162112.0, "45405": 976600256.0, "45410": 962602816.0, "45415": 965116864.0, "45420": 962811712.0, "45425": 952776640.0, "45430": 972206144.0, "45435": 976601088.0, "45440": 972762432.0, "45445": 959094976.0, "45450": 947169024.0, "45455": 970520128.0, "45460": 976349568.0, "45465": 958408448.0, "45470": 977169024.0, "45475": 953952576.0, "45480": 938983168.0, "45485": 976601408.0, "45490": 966926272.0, "45495": 982473088.0, "45500": 980944128.0, "45505": 927488640.0, "45510": 972740096.0, "45515": 961385984.0, "45520": 978215232.0, "45525": 988910656.0, "45530": 941554880.0, "45535": 971071552.0, "45540": 960853056.0, "45545": 965667264.0, "45550": 975986944.0, "45555": 961300544.0, "45560": 963836736.0, "45565": 952654720.0, "45570": 980970176.0, "45575": 971257408.0, "45580": 951607168.0, "45585": 943998592.0, "45590": 989178752.0, "45595": 975053056.0, "45600": 968921024.0, "45605": 974642304.0, "45610": 959542656.0, "45615": 967861632.0, "45620": 966220288.0, "45625": 977750528.0, "45630": 955908224.0, "45635": 948204160.0, "45640": 949778752.0, "45645": 976683904.0, "45650": 963298496.0, "45655": 970997248.0, "45660": 962639872.0, "45665": 956807936.0, "45670": 973676608.0, "45675": 962213120.0, "45680": 972165120.0, "45685": 968862528.0, "45690": 951399168.0, "45695": 972634112.0, "45700": 968617408.0, "45705": 971821440.0, "45710": 961240704.0, "45715": 935568640.0, "45720": 949580288.0, "45725": 966540608.0, "45730": 965896576.0, "45735": 988251072.0, "45740": 945657472.0, "45745": 945104128.0, "45750": 969709312.0, "45755": 958434688.0, "45760": 965094720.0, "45765": 979274176.0, "45770": 940793792.0, "45775": 966068160.0, "45780": 968717632.0, "45785": 958055168.0, "45790": 968697216.0, "45795": 953332864.0, "45800": 940840128.0, "45805": 971445760.0, "45810": 977074176.0, "45815": 963887296.0, "45820": 941671232.0, "45825": 963633664.0, "45830": 959582656.0, "45835": 965012736.0, "45840": 972263296.0, "45845": 974697472.0, "45850": 941002240.0, "45855": 963155136.0, "45860": 972852160.0, "45865": 952720000.0, "45870": 968381952.0, "45875": 948496512.0, "45880": 973911936.0, "45885": 985677312.0, "45890": 972212032.0, "45895": 963646976.0, "45900": 943845760.0, "45905": 964808192.0, "45910": 975801088.0, "45915": 952524864.0, "45920": 956879872.0, "45925": 949279040.0, "45930": 937618432.0, "45935": 985950016.0, "45940": 958738560.0, "45945": 972485504.0, "45950": 976348160.0, "45955": 939950208.0, "45960": 975740096.0, "45965": 961711616.0, "45970": 978583296.0, "45975": 974655616.0, "45980": 917688704.0, "45985": 962768448.0, "45990": 958769664.0, "45995": 968394432.0, "46000": 971484736.0, "46005": 966207936.0, "46010": 950624320.0, "46015": 964619008.0, "46020": 978579136.0, "46025": 968334528.0, "46030": 969458560.0, "46035": 950938112.0, "46040": 952819072.0, "46045": 973228800.0, "46050": 961921216.0, "46055": 972196608.0, "46060": 953621888.0, "46065": 967898112.0, "46070": 952026048.0, "46075": 966380032.0, "46080": 960729984.0, "46085": 939319104.0, "46090": 970022528.0, "46095": 984566400.0, "46100": 970423360.0, "46105": 963641344.0, "46110": 941877696.0, "46115": 952875008.0, "46120": 977926272.0, "46125": 968700928.0, "46130": 979691840.0, "46135": 966840192.0, "46140": 952228160.0, "46145": 956413184.0, "46150": 963488576.0, "46155": 968289984.0, "46160": 963284736.0, "46165": 939988992.0, "46170": 971331968.0, "46175": 977492032.0, "46180": 966884736.0, "46185": 967612992.0, "46190": 950148032.0, "46195": 951343744.0, "46200": 953882432.0, "46205": 974807488.0, "46210": 964930944.0, "46215": 975919616.0, "46220": 952718912.0, "46225": 970232448.0, "46230": 961724800.0, "46235": 969119488.0, "46240": 975599424.0, "46245": 962185216.0, "46250": 984489152.0, "46255": 972250624.0, "46260": 973983808.0, "46265": 956773632.0, "46270": 945479616.0, "46275": 959865280.0, "46280": 960022976.0, "46285": 979867072.0, "46290": 974137664.0, "46295": 972303168.0, "46300": 933028672.0, "46305": 961030976.0, "46310": 972822336.0, "46315": 963101312.0, "46320": 947019200.0, "46325": 951466176.0, "46330": 975422784.0, "46335": 978690944.0, "46340": 971865024.0, "46345": 978208192.0, "46350": 939428288.0, "46355": 954918016.0, "46360": 976608512.0, "46365": 967551104.0, "46370": 969414848.0, "46375": 952289408.0, "46380": 939156928.0, "46385": 986790464.0, "46390": 970691072.0, "46395": 965496384.0, "46400": 956889664.0, "46405": 937333824.0, "46410": 978908160.0, "46415": 975650688.0, "46420": 971943936.0, "46425": 957941952.0, "46430": 944118080.0, "46435": 952905344.0, "46440": 949993920.0, "46445": 980898944.0, "46450": 969587328.0, "46455": 972706368.0, "46460": 946622656.0, "46465": 968179072.0, "46470": 988270720.0, "46475": 961179648.0, "46480": 974867328.0, "46485": 947393152.0, "46490": 963953728.0, "46495": 959316416.0, "46500": 966975488.0, "46505": 950017856.0, "46510": 954721408.0, "46515": 978224896.0, "46520": 963710208.0, "46525": 959523648.0, "46530": 973594112.0, "46535": 949310592.0, "46540": 951856448.0, "46545": 975106944.0, "46550": 965268928.0, "46555": 950404864.0, "46560": 960865344.0, "46565": 943569984.0, "46570": 979560704.0, "46575": 967596096.0, "46580": 976010944.0, "46585": 956089408.0, "46590": 945472000.0, "46595": 957332864.0, "46600": 973477312.0, "46605": 966509632.0, "46610": 971433536.0, "46615": 958241152.0, "46620": 948445888.0, "46625": 968561664.0, "46630": 964660864.0, "46635": 966717440.0, "46640": 956325312.0, "46645": 941167232.0, "46650": 962715072.0, "46655": 962764416.0, "46660": 957296128.0, "46665": 959288320.0, "46670": 942943616.0, "46675": 971255616.0, "46680": 964661632.0, "46685": 970945344.0, "46690": 974267584.0, "46695": 962688128.0, "46700": 971261696.0, "46705": 976274048.0, "46710": 965551744.0, "46715": 962302464.0, "46720": 966841920.0, "46725": 960927104.0, "46730": 955472640.0, "46735": 970734976.0, "46740": 967265344.0, "46745": 966049408.0, "46750": 948496512.0, "46755": 972339456.0, "46760": 967719616.0, "46765": 968689280.0, "46770": 984418944.0, "46775": 940804800.0, "46780": 938453248.0, "46785": 961711808.0, "46790": 955361152.0, "46795": 971391872.0, "46800": 942401856.0, "46805": 948863360.0, "46810": 972807872.0, "46815": 980114880.0, "46820": 966362560.0, "46825": 963322432.0, "46830": 944661696.0, "46835": 973495296.0, "46840": 978805376.0, "46845": 970390528.0, "46850": 955622528.0, "46855": 940186496.0, "46860": 963611840.0, "46865": 966044032.0, "46870": 976895360.0, "46875": 961858624.0, "46880": 942592640.0, "46885": 951940032.0, "46890": 982889280.0, "46895": 958383360.0, "46900": 968320448.0, "46905": 953280384.0, "46910": 949808256.0, "46915": 975108672.0, "46920": 959931520.0, "46925": 979066240.0, "46930": 983111744.0, "46935": 951411840.0, "46940": 958236864.0, "46945": 949225216.0, "46950": 964370816.0, "46955": 973567168.0, "46960": 948963456.0, "46965": 965416704.0, "46970": 981222848.0, "46975": 969776320.0, "46980": 957880384.0, "46985": 929929152.0, "46990": 932323648.0, "46995": 977873600.0, "47000": 973341248.0, "47005": 969183104.0, "47010": 971933440.0, "47015": 942033088.0, "47020": 979372672.0, "47025": 975047808.0, "47030": 972822528.0, "47035": 960320640.0, "47040": 946025792.0, "47045": 962206272.0, "47050": 972382848.0, "47055": 976368000.0, "47060": 976854080.0, "47065": 970606528.0, "47070": 948239360.0, "47075": 974692544.0, "47080": 982699776.0, "47085": 962161856.0, "47090": 987117760.0, "47095": 934849344.0, "47100": 956782080.0, "47105": 968918592.0, "47110": 982945536.0, "47115": 977457984.0, "47120": 939129920.0, "47125": 982732032.0, "47130": 983787456.0, "47135": 969387584.0, "47140": 973493568.0, "47145": 964158720.0, "47150": 956670080.0, "47155": 982962368.0, "47160": 968342400.0, "47165": 975607104.0, "47170": 970312576.0, "47175": 964359616.0, "47180": 979176192.0, "47185": 962105216.0, "47190": 973843520.0, "47195": 977736448.0, "47200": 963077376.0, "47205": 970858176.0, "47210": 971049856.0, "47215": 976299904.0, "47220": 976415168.0, "47225": 948238464.0, "47230": 949927104.0, "47235": 975428672.0, "47240": 977425088.0, "47245": 978246784.0, "47250": 953372224.0, "47255": 934882816.0, "47260": 985667520.0, "47265": 982650496.0, "47270": 965649600.0, "47275": 963425792.0, "47280": 934753024.0, "47285": 964164416.0, "47290": 981644096.0, "47295": 971882816.0, "47300": 987008000.0, "47305": 949418752.0, "47310": 965467520.0, "47315": 987509184.0, "47320": 974422400.0, "47325": 972271424.0, "47330": 963194688.0, "47335": 940503168.0, "47340": 967055744.0, "47345": 982474752.0, "47350": 976749440.0, "47355": 973557312.0, "47360": 951448960.0, "47365": 974875392.0, "47370": 957188608.0, "47375": 953324544.0, "47380": 983355968.0, "47385": 960166144.0, "47390": 956874560.0, "47395": 966216768.0, "47400": 967756224.0, "47405": 975470656.0, "47410": 932269184.0, "47415": 949644800.0, "47420": 979722624.0, "47425": 969592576.0, "47430": 965072448.0, "47435": 970324672.0, "47440": 948589504.0, "47445": 972131648.0, "47450": 967163136.0, "47455": 968157312.0, "47460": 975753664.0, "47465": 950214016.0, "47470": 980936384.0, "47475": 971383488.0, "47480": 971970688.0, "47485": 976245760.0, "47490": 958322880.0, "47495": 950848896.0, "47500": 974717440.0, "47505": 979845312.0, "47510": 985900928.0, "47515": 961245376.0, "47520": 942061120.0, "47525": 975348288.0, "47530": 976394048.0, "47535": 974373248.0, "47540": 968502656.0, "47545": 942728640.0, "47550": 966787840.0, "47555": 970750336.0, "47560": 979838720.0, "47565": 975712960.0, "47570": 946056960.0, "47575": 960878784.0, "47580": 969767488.0, "47585": 974570176.0, "47590": 961307520.0, "47595": 966639680.0, "47600": 959436480.0, "47605": 968617536.0, "47610": 979366656.0, "47615": 963794176.0, "47620": 973533568.0, "47625": 936869824.0, "47630": 964262656.0, "47635": 968493312.0, "47640": 964510016.0, "47645": 958495424.0, "47650": 951823808.0, "47655": 976054528.0, "47660": 980993984.0, "47665": 970589824.0, "47670": 974113344.0, "47675": 955652032.0, "47680": 968445696.0, "47685": 985255872.0, "47690": 962402368.0, "47695": 971350464.0, "47700": 979148736.0, "47705": 958175168.0, "47710": 973031616.0, "47715": 983924224.0, "47720": 983364672.0, "47725": 973248960.0, "47730": 939597056.0, "47735": 966869696.0, "47740": 969568192.0, "47745": 983001792.0, "47750": 983983680.0, "47755": 937710720.0, "47760": 954253440.0, "47765": 963344896.0, "47770": 965197632.0, "47775": 956958656.0, "47780": 975096704.0, "47785": 959862848.0, "47790": 967099072.0, "47795": 974169472.0, "47800": 954011328.0, "47805": 981628288.0, "47810": 943087488.0, "47815": 969680704.0, "47820": 962984320.0, "47825": 974047040.0, "47830": 976345600.0, "47835": 945349632.0, "47840": 961939968.0, "47845": 975261440.0, "47850": 969496128.0, "47855": 979506176.0, "47860": 962652480.0, "47865": 955982784.0, "47870": 961818240.0, "47875": 961519104.0, "47880": 974801984.0, "47885": 964434240.0, "47890": 940112000.0, "47895": 970345664.0, "47900": 987471424.0, "47905": 963457088.0, "47910": 962038976.0, "47915": 942868992.0, "47920": 963579456.0, "47925": 979863936.0, "47930": 971521152.0, "47935": 955150848.0, "47940": 963789056.0, "47945": 936014272.0, "47950": 972024576.0, "47955": 975927168.0, "47960": 980818048.0, "47965": 958628928.0, "47970": 945375040.0, "47975": 966954368.0, "47980": 956867136.0, "47985": 992600192.0, "47990": 978117632.0, "47995": 952557376.0, "48000": 961743680.0, "48005": 967046080.0, "48010": 974715584.0, "48015": 963114944.0, "48020": 939996224.0, "48025": 961860672.0, "48030": 959899648.0, "48035": 978998784.0, "48040": 978966464.0, "48045": 961847744.0, "48050": 954613312.0, "48055": 981840128.0, "48060": 970558464.0, "48065": 980283584.0, "48070": 958112896.0, "48075": 944681408.0, "48080": 982554240.0, "48085": 974084032.0, "48090": 966036608.0, "48095": 979734272.0, "48100": 944052992.0, "48105": 975563840.0, "48110": 966202496.0, "48115": 962235200.0, "48120": 969478912.0, "48125": 940408128.0, "48130": 948976448.0, "48135": 960926336.0, "48140": 972586112.0, "48145": 970518784.0, "48150": 956210368.0, "48155": 938532480.0, "48160": 961485888.0, "48165": 966352768.0, "48170": 980660096.0, "48175": 975200640.0, "48180": 936179520.0, "48185": 949631040.0, "48190": 982040896.0, "48195": 968715520.0, "48200": 970083072.0, "48205": 965838336.0, "48210": 956935680.0, "48215": 952832064.0, "48220": 971529216.0, "48225": 980600832.0, "48230": 980145536.0, "48235": 935547968.0, "48240": 963929344.0, "48245": 981029824.0, "48250": 957748096.0, "48255": 976858816.0, "48260": 935727296.0, "48265": 976153536.0, "48270": 962489920.0, "48275": 971990144.0, "48280": 959649984.0, "48285": 949984384.0, "48290": 957217472.0, "48295": 974269888.0, "48300": 975575552.0, "48305": 968842368.0, "48310": 948925632.0, "48315": 952272320.0, "48320": 972715584.0, "48325": 967884608.0, "48330": 972861440.0, "48335": 958924096.0, "48340": 934282560.0, "48345": 953232192.0, "48350": 970263552.0, "48355": 973009280.0, "48360": 958998464.0, "48365": 930149632.0, "48370": 958107328.0, "48375": 973121472.0, "48380": 974918592.0, "48385": 960110784.0, "48390": 936377344.0, "48395": 976851072.0, "48400": 972062848.0, "48405": 973038912.0, "48410": 966418880.0, "48415": 968267136.0, "48420": 940007360.0, "48425": 978480768.0, "48430": 965036864.0, "48435": 974519872.0, "48440": 970165248.0, "48445": 957913664.0, "48450": 961351808.0, "48455": 959932352.0, "48460": 967995968.0, "48465": 970792448.0, "48470": 951954368.0, "48475": 936329408.0, "48480": 958874688.0, "48485": 973972480.0, "48490": 958758464.0, "48495": 952642816.0, "48500": 935708160.0, "48505": 969655744.0, "48510": 957194240.0, "48515": 974712000.0, "48520": 960629184.0, "48525": 933944960.0, "48530": 961417152.0, "48535": 976031104.0, "48540": 976268928.0, "48545": 969844352.0, "48550": 949860096.0, "48555": 951716288.0, "48560": 968010048.0, "48565": 972916544.0, "48570": 975350016.0, "48575": 961690048.0, "48580": 932656192.0, "48585": 979143488.0, "48590": 983097216.0, "48595": 966828352.0, "48600": 957146240.0, "48605": 938613376.0, "48610": 957971328.0, "48615": 971350784.0, "48620": 975165184.0, "48625": 981115712.0, "48630": 940266112.0, "48635": 957197056.0, "48640": 978480960.0, "48645": 967568000.0, "48650": 970858752.0, "48655": 963518336.0, "48660": 945633088.0, "48665": 968764224.0, "48670": 972692736.0, "48675": 984537280.0, "48680": 960340416.0, "48685": 950051264.0, "48690": 965331264.0, "48695": 971964544.0, "48700": 966969984.0, "48705": 967444608.0, "48710": 947385984.0, "48715": 960866112.0, "48720": 965712448.0, "48725": 956937664.0, "48730": 972454016.0, "48735": 962562368.0, "48740": 959341376.0, "48745": 972186688.0, "48750": 966509440.0, "48755": 981553600.0, "48760": 968357824.0, "48765": 949049280.0, "48770": 957267328.0, "48775": 988713920.0, "48780": 965875456.0, "48785": 965732544.0, "48790": 942292864.0, "48795": 952303104.0, "48800": 978598720.0, "48805": 982464064.0, "48810": 957431680.0, "48815": 955454464.0, "48820": 928526976.0, "48825": 977949184.0, "48830": 971401088.0, "48835": 969889024.0, "48840": 969961344.0, "48845": 954680704.0, "48850": 964250112.0, "48855": 971371968.0, "48860": 974703872.0, "48865": 971659328.0, "48870": 949932480.0, "48875": 972136640.0, "48880": 974488832.0, "48885": 961769536.0, "48890": 979928704.0, "48895": 956713344.0, "48900": 947777664.0, "48905": 962567744.0, "48910": 964113728.0, "48915": 960493632.0, "48920": 961644096.0, "48925": 939025216.0, "48930": 965333312.0, "48935": 965993984.0, "48940": 951039232.0, "48945": 986962496.0, "48950": 940148736.0, "48955": 973211840.0, "48960": 970084352.0, "48965": 962953920.0, "48970": 969328384.0, "48975": 929830080.0, "48980": 967007936.0, "48985": 968826624.0, "48990": 975759872.0, "48995": 974109184.0, "49000": 961723712.0, "49005": 942207424.0, "49010": 973492224.0, "49015": 971781824.0, "49020": 959949888.0, "49025": 945086976.0, "49030": 935014400.0, "49035": 978991552.0, "49040": 972824896.0, "49045": 963228736.0, "49050": 961240896.0, "49055": 941800128.0, "49060": 955798272.0, "49065": 965798720.0, "49070": 978499008.0, "49075": 975300352.0, "49080": 939172288.0, "49085": 949110656.0, "49090": 962904064.0, "49095": 983327552.0, "49100": 967801664.0, "49105": 970072448.0, "49110": 944485056.0, "49115": 978898048.0, "49120": 977825280.0, "49125": 981766912.0, "49130": 947046656.0, "49135": 954232192.0, "49140": 952681536.0, "49145": 970318336.0, "49150": 951309248.0, "49155": 967648640.0, "49160": 947753024.0, "49165": 973752640.0, "49170": 978751552.0, "49175": 971594240.0, "49180": 979795840.0, "49185": 974797120.0, "49190": 961738048.0, "49195": 988504320.0, "49200": 969613952.0, "49205": 962868352.0, "49210": 978068992.0, "49215": 940386752.0, "49220": 978949824.0, "49225": 963979072.0, "49230": 976312512.0, "49235": 975157248.0, "49240": 944926144.0, "49245": 961597696.0, "49250": 974681024.0, "49255": 993682176.0, "49260": 972223104.0, "49265": 950339648.0, "49270": 940577920.0, "49275": 961946560.0, "49280": 987787904.0, "49285": 983879424.0, "49290": 961746432.0, "49295": 939462464.0, "49300": 975796736.0, "49305": 980066688.0, "49310": 961297600.0, "49315": 967952704.0, "49320": 942759296.0, "49325": 965700544.0, "49330": 962863488.0, "49335": 959005248.0, "49340": 977288320.0, "49345": 964675136.0, "49350": 955602240.0, "49355": 971202816.0, "49360": 966100672.0, "49365": 958533376.0, "49370": 951133312.0, "49375": 930009984.0, "49380": 968721984.0, "49385": 958612544.0, "49390": 948805376.0, "49395": 974999808.0, "49400": 930168128.0, "49405": 959036544.0, "49410": 966564096.0, "49415": 968778496.0, "49420": 968932800.0, "49425": 947046016.0, "49430": 961393344.0, "49435": 970026048.0, "49440": 964550336.0, "49445": 967516544.0, "49450": 963108800.0, "49455": 937740992.0, "49460": 975078912.0, "49465": 969218880.0, "49470": 960130688.0, "49475": 973315904.0, "49480": 961283328.0, "49485": 960639744.0, "49490": 976196416.0, "49495": 978451904.0, "49500": 953587776.0, "49505": 952747840.0, "49510": 972061568.0, "49515": 960308032.0, "49520": 968107968.0, "49525": 973856448.0, "49530": 943381440.0, "49535": 952719488.0, "49540": 962297280.0, "49545": 982048896.0, "49550": 982774912.0, "49555": 974464192.0, "49560": 938374016.0, "49565": 965731200.0, "49570": 965301952.0, "49575": 973930304.0, "49580": 979244288.0, "49585": 961990848.0, "49590": 977039424.0, "49595": 973143936.0, "49600": 978317376.0, "49605": 960693184.0, "49610": 945694976.0, "49615": 955761792.0, "49620": 958054208.0, "49625": 947353920.0, "49630": 968881024.0, "49635": 963090944.0, "49640": 957960320.0, "49645": 989257088.0, "49650": 970437824.0, "49655": 952088512.0, "49660": 959594752.0, "49665": 948814208.0, "49670": 970332096.0, "49675": 979315776.0, "49680": 968268544.0, "49685": 968364992.0, "49690": 958864192.0, "49695": 941231552.0, "49700": 967904320.0, "49705": 981073088.0, "49710": 970496448.0, "49715": 968764032.0, "49720": 938932992.0, "49725": 955444288.0, "49730": 978241344.0, "49735": 983580672.0, "49740": 959196160.0, "49745": 927074880.0, "49750": 977413504.0, "49755": 960486528.0, "49760": 987032448.0, "49765": 963772928.0, "49770": 964151296.0, "49775": 951069184.0, "49780": 979808896.0, "49785": 967395648.0, "49790": 969747392.0, "49795": 948483648.0, "49800": 961920000.0, "49805": 978069568.0, "49810": 975349120.0, "49815": 971902528.0, "49820": 958447616.0, "49825": 966592064.0, "49830": 973864576.0, "49835": 966124800.0, "49840": 965215232.0, "49845": 966580032.0, "49850": 931450304.0, "49855": 972064384.0, "49860": 962524544.0, "49865": 958436928.0, "49870": 980114944.0, "49875": 959767616.0, "49880": 967862912.0, "49885": 968202752.0, "49890": 967672896.0, "49895": 975525312.0, "49900": 965483904.0, "49905": 952600704.0, "49910": 994496192.0, "49915": 977486464.0, "49920": 963134656.0, "49925": 978927808.0, "49930": 959061824.0, "49935": 954875584.0, "49940": 981462016.0, "49945": 977204800.0, "49950": 969540544.0, "49955": 945948160.0, "49960": 955538816.0, "49965": 963660992.0, "49970": 973242752.0, "49975": 964917504.0, "49980": 976136576.0, "49985": 960742592.0, "49990": 973642624.0, "49995": 967475584.0, "50000": 978038848.0, "50005": 978818816.0, "50010": 943239424.0, "50015": 958696192.0, "50020": 970593600.0, "50025": 980789312.0, "50030": 977965504.0, "50035": 955637632.0, "50040": 952390912.0, "50045": 967309312.0, "50050": 965896576.0, "50055": 943295936.0, "50060": 970245632.0, "50065": 956672448.0, "50070": 980771904.0, "50075": 959484544.0, "50080": 964158784.0, "50085": 966069888.0, "50090": 949761664.0, "50095": 983667520.0, "50100": 969110272.0, "50105": 957530176.0, "50110": 973869760.0, "50115": 954124352.0, "50120": 967746688.0, "50125": 959667904.0, "50130": 978935552.0, "50135": 974311552.0, "50140": 942379264.0, "50145": 974979648.0, "50150": 988489728.0, "50155": 983099584.0, "50160": 983043008.0, "50165": 969913728.0, "50170": 939351616.0, "50175": 948146496.0, "50180": 996213056.0, "50185": 969592960.0, "50190": 976018432.0, "50195": 952197888.0, "50200": 971142976.0, "50205": 958121280.0, "50210": 973823744.0, "50215": 971831424.0, "50220": 953540352.0, "50225": 949312384.0, "50230": 973358400.0, "50235": 963851328.0, "50240": 972806208.0, "50245": 963949760.0, "50250": 959015680.0, "50255": 970222976.0, "50260": 985579200.0, "50265": 975934016.0, "50270": 980796544.0, "50275": 950010816.0, "50280": 970871296.0, "50285": 967511296.0, "50290": 957106688.0, "50295": 964673408.0, "50300": 942895104.0, "50305": 963728448.0, "50310": 977690176.0, "50315": 966002176.0, "50320": 961937664.0, "50325": 955148672.0, "50330": 954359680.0, "50335": 974010688.0, "50340": 971826688.0, "50345": 961806336.0, "50350": 970753024.0, "50355": 945904576.0, "50360": 980011136.0, "50365": 966234176.0, "50370": 974667904.0, "50375": 974451776.0, "50380": 947818624.0, "50385": 971829632.0, "50390": 979310016.0, "50395": 971487168.0, "50400": 977923712.0, "50405": 953827200.0, "50410": 941562240.0, "50415": 967615360.0, "50420": 972634752.0, "50425": 987383616.0, "50430": 964386688.0, "50435": 951861312.0, "50440": 970854912.0, "50445": 971264768.0, "50450": 963225600.0, "50455": 975122048.0, "50460": 948923072.0, "50465": 977497664.0, "50470": 979975488.0, "50475": 968470592.0, "50480": 968705792.0, "50485": 955761600.0, "50490": 954811136.0, "50495": 962419904.0, "50500": 963187136.0, "50505": 966836672.0, "50510": 967708480.0, "50515": 942099456.0, "50520": 971528960.0, "50525": 955082304.0, "50530": 954778944.0, "50535": 977470016.0, "50540": 938317888.0, "50545": 972400640.0, "50550": 987696256.0, "50555": 971172544.0, "50560": 973532032.0, "50565": 946490304.0, "50570": 960904448.0, "50575": 970357120.0, "50580": 966119424.0, "50585": 969004032.0, "50590": 958072384.0, "50595": 949295680.0, "50600": 972168128.0, "50605": 965281728.0, "50610": 977583808.0, "50615": 963145856.0, "50620": 933870400.0, "50625": 976933568.0, "50630": 983203456.0, "50635": 974287104.0, "50640": 959073664.0, "50645": 943429376.0, "50650": 957692160.0, "50655": 963346304.0, "50660": 963119744.0, "50665": 976909824.0, "50670": 948475328.0, "50675": 949217344.0, "50680": 967611840.0, "50685": 969005056.0, "50690": 971911872.0, "50695": 971301376.0, "50700": 953852032.0, "50705": 973868864.0, "50710": 965521472.0, "50715": 978424064.0, "50720": 961573568.0, "50725": 944731520.0, "50730": 965368512.0, "50735": 962572032.0, "50740": 958361408.0, "50745": 966647296.0, "50750": 962403328.0, "50755": 952904896.0, "50760": 968821120.0, "50765": 956468544.0, "50770": 960821376.0, "50775": 954990208.0, "50780": 935063296.0, "50785": 986337984.0, "50790": 973493824.0, "50795": 972677504.0, "50800": 956641408.0, "50805": 939339904.0, "50810": 956077760.0, "50815": 985099840.0, "50820": 974201664.0, "50825": 958875328.0, "50830": 955902144.0, "50835": 951524160.0, "50840": 970755072.0, "50845": 977003712.0, "50850": 974765056.0, "50855": 949768000.0, "50860": 947870080.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 12698293248.0, "5": 12698293248.0, "10": 12698293248.0, "15": 12698293248.0, "20": 12698293248.0, "25": 12698293248.0, "30": 12698293248.0, "35": 12698293248.0, "40": 12698293248.0, "45": 12698293248.0, "50": 12698293248.0, "55": 12698293248.0, "60": 12698293248.0, "65": 12698293248.0, "70": 12698293248.0, "75": 12698293248.0, "80": 12698293248.0, "85": 12698293248.0, "90": 12698293248.0, "95": 12698293248.0, "100": 12698293248.0, "105": 12698293248.0, "110": 12698293248.0, "115": 12698293248.0, "120": 12698293248.0, "125": 12698293248.0, "130": 12698293248.0, "135": 12698293248.0, "140": 12698293248.0, "145": 12698293248.0, "150": 12698293248.0, "155": 12698293248.0, "160": 12698293248.0, "165": 12698293248.0, "170": 12698293248.0, "175": 12698293248.0, "180": 12698293248.0, "185": 12698293248.0, "190": 12698293248.0, "195": 12698293248.0, "200": 12698293248.0, "205": 12698293248.0, "210": 12698293248.0, "215": 12698293248.0, "220": 12698293248.0, "225": 12698293248.0, "230": 12698293248.0, "235": 12698293248.0, "240": 12698293248.0, "245": 12698293248.0, "250": 12698293248.0, "255": 12698293248.0, "260": 12698293248.0, "265": 12698293248.0, "270": 12698293248.0, "275": 12698293248.0, "280": 12698293248.0, "285": 12698293248.0, "290": 12698293248.0, "295": 12698293248.0, "300": 12698293248.0, "305": 12698293248.0, "310": 12698293248.0, "315": 12698293248.0, "320": 12698293248.0, "325": 12698293248.0, "330": 12698293248.0, "335": 12698293248.0, "340": 12698293248.0, "345": 12698293248.0, "350": 12698293248.0, "355": 12698293248.0, "360": 12698293248.0, "365": 12698293248.0, "370": 12698293248.0, "375": 12698293248.0, "380": 12698293248.0, "385": 12698293248.0, "390": 12698293248.0, "395": 12698293248.0, "400": 12698293248.0, "405": 12698293248.0, "410": 12698293248.0, "415": 12698293248.0, "420": 12698293248.0, "425": 12698293248.0, "430": 12698293248.0, "435": 12698293248.0, "440": 12698293248.0, "445": 12698293248.0, "450": 12698293248.0, "455": 12698293248.0, "460": 12698293248.0, "465": 12698293248.0, "470": 12698293248.0, "475": 12698293248.0, "480": 12698293248.0, "485": 12698293248.0, "490": 12698293248.0, "495": 12698293248.0, "500": 12698293248.0, "505": 12698293248.0, "510": 12698293248.0, "515": 12698293248.0, "520": 12698293248.0, "525": 12698293248.0, "530": 12698293248.0, "535": 12698293248.0, "540": 12698293248.0, "545": 12698293248.0, "550": 12698293248.0, "555": 12698293248.0, "560": 12698293248.0, "565": 12698293248.0, "570": 12698293248.0, "575": 12698293248.0, "580": 12698293248.0, "585": 12698293248.0, "590": 12698293248.0, "595": 12698293248.0, "600": 12698293248.0, "605": 12698293248.0, "610": 12698293248.0, "615": 12698293248.0, "620": 12698293248.0, "625": 12698293248.0, "630": 12698293248.0, "635": 12698293248.0, "640": 12698293248.0, "645": 12698293248.0, "650": 12698293248.0, "655": 12698293248.0, "660": 12698293248.0, "665": 12698293248.0, "670": 12698293248.0, "675": 12698293248.0, "680": 12698293248.0, "685": 12698293248.0, "690": 12698293248.0, "695": 12698293248.0, "700": 12698293248.0, "705": 12698293248.0, "710": 12698293248.0, "715": 12698293248.0, "720": 12698293248.0, "725": 12698293248.0, "730": 12698293248.0, "735": 12698293248.0, "740": 12698293248.0, "745": 12698293248.0, "750": 12698293248.0, "755": 12698293248.0, "760": 12698293248.0, "765": 12698293248.0, "770": 12698293248.0, "775": 12698293248.0, "780": 12698293248.0, "785": 12698293248.0, "790": 12698293248.0, "795": 12698293248.0, "800": 12698293248.0, "805": 12698293248.0, "810": 12698293248.0, "815": 12698293248.0, "820": 12698293248.0, "825": 12698293248.0, "830": 12698293248.0, "835": 12698293248.0, "840": 12698293248.0, "845": 12698293248.0, "850": 12698293248.0, "855": 12698293248.0, "860": 12698293248.0, "865": 12698293248.0, "870": 12698293248.0, "875": 12698293248.0, "880": 12698293248.0, "885": 12698293248.0, "890": 12698293248.0, "895": 12698293248.0, "900": 12698293248.0, "905": 12698293248.0, "910": 12698293248.0, "915": 12698293248.0, "920": 12698293248.0, "925": 12698293248.0, "930": 12698293248.0, "935": 12698293248.0, "940": 12698293248.0, "945": 12698293248.0, "950": 12698293248.0, "955": 12698293248.0, "960": 12698293248.0, "965": 12698293248.0, "970": 12698293248.0, "975": 12698293248.0, "980": 12698293248.0, "985": 12698293248.0, "990": 12698293248.0, "995": 12698293248.0, "1000": 12698293248.0, "1005": 12698293248.0, "1010": 12698293248.0, "1015": 12698293248.0, "1020": 12698293248.0, "1025": 12698293248.0, "1030": 12698293248.0, "1035": 12698293248.0, "1040": 12698293248.0, "1045": 12698293248.0, "1050": 12698293248.0, "1055": 12698293248.0, "1060": 12698293248.0, "1065": 12698293248.0, "1070": 12698293248.0, "1075": 12698293248.0, "1080": 12698293248.0, "1085": 12698293248.0, "1090": 12698293248.0, "1095": 12698293248.0, "1100": 12698293248.0, "1105": 12698293248.0, "1110": 12698293248.0, "1115": 12698293248.0, "1120": 12698293248.0, "1125": 12698293248.0, "1130": 12698293248.0, "1135": 12698293248.0, "1140": 12698293248.0, "1145": 12698293248.0, "1150": 12698293248.0, "1155": 12698293248.0, "1160": 12698293248.0, "1165": 12698293248.0, "1170": 12698293248.0, "1175": 12698293248.0, "1180": 12698293248.0, "1185": 12698293248.0, "1190": 12698293248.0, "1195": 12698293248.0, "1200": 12698293248.0, "1205": 12698293248.0, "1210": 12698293248.0, "1215": 12698293248.0, "1220": 12698293248.0, "1225": 12698293248.0, "1230": 12698293248.0, "1235": 12698293248.0, "1240": 12698293248.0, "1245": 12698293248.0, "1250": 12698293248.0, "1255": 12698293248.0, "1260": 12698293248.0, "1265": 12698293248.0, "1270": 12698293248.0, "1275": 12698293248.0, "1280": 12698293248.0, "1285": 12698293248.0, "1290": 12698293248.0, "1295": 12698293248.0, "1300": 12698293248.0, "1305": 12698293248.0, "1310": 12698293248.0, "1315": 12698293248.0, "1320": 12698293248.0, "1325": 12698293248.0, "1330": 12698293248.0, "1335": 12698293248.0, "1340": 12698293248.0, "1345": 12698293248.0, "1350": 12698293248.0, "1355": 12698293248.0, "1360": 12698293248.0, "1365": 12698293248.0, "1370": 12698293248.0, "1375": 12698293248.0, "1380": 12698293248.0, "1385": 12698293248.0, "1390": 12698293248.0, "1395": 12698293248.0, "1400": 12698293248.0, "1405": 12698293248.0, "1410": 12698293248.0, "1415": 12698293248.0, "1420": 12698293248.0, "1425": 12698293248.0, "1430": 12698293248.0, "1435": 12698293248.0, "1440": 12698293248.0, "1445": 12698293248.0, "1450": 12698293248.0, "1455": 12698293248.0, "1460": 12698293248.0, "1465": 12698293248.0, "1470": 12698293248.0, "1475": 12698293248.0, "1480": 12698293248.0, "1485": 12698293248.0, "1490": 12698293248.0, "1495": 12698293248.0, "1500": 12698293248.0, "1505": 12698293248.0, "1510": 12698293248.0, "1515": 12698293248.0, "1520": 12698293248.0, "1525": 12698293248.0, "1530": 12698293248.0, "1535": 12698293248.0, "1540": 12698293248.0, "1545": 12698293248.0, "1550": 12698293248.0, "1555": 12698293248.0, "1560": 12698293248.0, "1565": 12698293248.0, "1570": 12698293248.0, "1575": 12698293248.0, "1580": 12698293248.0, "1585": 12698293248.0, "1590": 12698293248.0, "1595": 12698293248.0, "1600": 12698293248.0, "1605": 12698293248.0, "1610": 12698293248.0, "1615": 12698293248.0, "1620": 12698293248.0, "1625": 12698293248.0, "1630": 12698293248.0, "1635": 12698293248.0, "1640": 12698293248.0, "1645": 12698293248.0, "1650": 12698293248.0, "1655": 12698293248.0, "1660": 12698293248.0, "1665": 12698293248.0, "1670": 12698293248.0, "1675": 12698293248.0, "1680": 12698293248.0, "1685": 12698293248.0, "1690": 12698293248.0, "1695": 12698293248.0, "1700": 12698293248.0, "1705": 12698293248.0, "1710": 12698293248.0, "1715": 12698293248.0, "1720": 12698293248.0, "1725": 12698293248.0, "1730": 12698293248.0, "1735": 12698293248.0, "1740": 12698293248.0, "1745": 12698293248.0, "1750": 12698293248.0, "1755": 12698293248.0, "1760": 12698293248.0, "1765": 12698293248.0, "1770": 12698293248.0, "1775": 12698293248.0, "1780": 12698293248.0, "1785": 12698293248.0, "1790": 12698293248.0, "1795": 12698293248.0, "1800": 12698293248.0, "1805": 12698293248.0, "1810": 12698293248.0, "1815": 12698293248.0, "1820": 12698293248.0, "1825": 12698293248.0, "1830": 12698293248.0, "1835": 12698293248.0, "1840": 12698293248.0, "1845": 12698293248.0, "1850": 12698293248.0, "1855": 12698293248.0, "1860": 12698293248.0, "1865": 12698293248.0, "1870": 12698293248.0, "1875": 12698293248.0, "1880": 12698293248.0, "1885": 12698293248.0, "1890": 12698293248.0, "1895": 12698293248.0, "1900": 12698293248.0, "1905": 12698293248.0, "1910": 12698293248.0, "1915": 12698293248.0, "1920": 12698293248.0, "1925": 12698293248.0, "1930": 12698293248.0, "1935": 12698293248.0, "1940": 12698293248.0, "1945": 12698293248.0, "1950": 12698293248.0, "1955": 12698293248.0, "1960": 12698293248.0, "1965": 12698293248.0, "1970": 12698293248.0, "1975": 12698293248.0, "1980": 12698293248.0, "1985": 12698293248.0, "1990": 12698293248.0, "1995": 12698293248.0, "2000": 12698293248.0, "2005": 12698293248.0, "2010": 12698293248.0, "2015": 12698293248.0, "2020": 12698293248.0, "2025": 12698293248.0, "2030": 12698293248.0, "2035": 12698293248.0, "2040": 12698293248.0, "2045": 12698293248.0, "2050": 12698293248.0, "2055": 12698293248.0, "2060": 12698293248.0, "2065": 12698293248.0, "2070": 12698293248.0, "2075": 12698293248.0, "2080": 12698293248.0, "2085": 12698293248.0, "2090": 12698293248.0, "2095": 12698293248.0, "2100": 12698293248.0, "2105": 12698293248.0, "2110": 12698293248.0, "2115": 12698293248.0, "2120": 12698293248.0, "2125": 12698293248.0, "2130": 12698293248.0, "2135": 12698293248.0, "2140": 12698293248.0, "2145": 12698293248.0, "2150": 12698293248.0, "2155": 12698293248.0, "2160": 12698293248.0, "2165": 12698293248.0, "2170": 12698293248.0, "2175": 12698293248.0, "2180": 12698293248.0, "2185": 12698293248.0, "2190": 12698293248.0, "2195": 12698293248.0, "2200": 12698293248.0, "2205": 12698293248.0, "2210": 12698293248.0, "2215": 12698293248.0, "2220": 12698293248.0, "2225": 12698293248.0, "2230": 12698293248.0, "2235": 12698293248.0, "2240": 12698293248.0, "2245": 12698293248.0, "2250": 12698293248.0, "2255": 12698293248.0, "2260": 12698293248.0, "2265": 12698293248.0, "2270": 12698293248.0, "2275": 12698293248.0, "2280": 12698293248.0, "2285": 12698293248.0, "2290": 12698293248.0, "2295": 12698293248.0, "2300": 12698293248.0, "2305": 12698293248.0, "2310": 12698293248.0, "2315": 12698293248.0, "2320": 12698293248.0, "2325": 12698293248.0, "2330": 12698293248.0, "2335": 12698293248.0, "2340": 12698293248.0, "2345": 12698293248.0, "2350": 12698293248.0, "2355": 12698293248.0, "2360": 12698293248.0, "2365": 12698293248.0, "2370": 12698293248.0, "2375": 12698293248.0, "2380": 12698293248.0, "2385": 12698293248.0, "2390": 12698293248.0, "2395": 12698293248.0, "2400": 12698293248.0, "2405": 12698293248.0, "2410": 12698293248.0, "2415": 12698293248.0, "2420": 12698293248.0, "2425": 12698293248.0, "2430": 12698293248.0, "2435": 12698293248.0, "2440": 12698293248.0, "2445": 12698293248.0, "2450": 12698293248.0, "2455": 12698293248.0, "2460": 12698293248.0, "2465": 12698293248.0, "2470": 12698293248.0, "2475": 12698293248.0, "2480": 12698293248.0, "2485": 12698293248.0, "2490": 12698293248.0, "2495": 12698293248.0, "2500": 12698293248.0, "2505": 12698293248.0, "2510": 12698293248.0, "2515": 12698293248.0, "2520": 12698293248.0, "2525": 12698293248.0, "2530": 12698293248.0, "2535": 12698293248.0, "2540": 12698293248.0, "2545": 12698293248.0, "2550": 12698293248.0, "2555": 12698293248.0, "2560": 12698293248.0, "2565": 12698293248.0, "2570": 12698293248.0, "2575": 12698293248.0, "2580": 12698293248.0, "2585": 12698293248.0, "2590": 12698293248.0, "2595": 12698293248.0, "2600": 12698293248.0, "2605": 12698293248.0, "2610": 12698293248.0, "2615": 12698293248.0, "2620": 12698293248.0, "2625": 12698293248.0, "2630": 12698293248.0, "2635": 12698293248.0, "2640": 12698293248.0, "2645": 12698293248.0, "2650": 12698293248.0, "2655": 12698293248.0, "2660": 12698293248.0, "2665": 12698293248.0, "2670": 12698293248.0, "2675": 12698293248.0, "2680": 12698293248.0, "2685": 12698293248.0, "2690": 12698293248.0, "2695": 12698293248.0, "2700": 12698293248.0, "2705": 12698293248.0, "2710": 12698293248.0, "2715": 12698293248.0, "2720": 12698293248.0, "2725": 12698293248.0, "2730": 12698293248.0, "2735": 12698293248.0, "2740": 12698293248.0, "2745": 12698293248.0, "2750": 12698293248.0, "2755": 12698293248.0, "2760": 12698293248.0, "2765": 12698293248.0, "2770": 12698293248.0, "2775": 12698293248.0, "2780": 12698293248.0, "2785": 12698293248.0, "2790": 12698293248.0, "2795": 12698293248.0, "2800": 12698293248.0, "2805": 12698293248.0, "2810": 12698293248.0, "2815": 12698293248.0, "2820": 12698293248.0, "2825": 12698293248.0, "2830": 12698293248.0, "2835": 12698293248.0, "2840": 12698293248.0, "2845": 12698293248.0, "2850": 12698293248.0, "2855": 12698293248.0, "2860": 12698293248.0, "2865": 12698293248.0, "2870": 12698293248.0, "2875": 12698293248.0, "2880": 12698293248.0, "2885": 12698293248.0, "2890": 12698293248.0, "2895": 12698293248.0, "2900": 12698293248.0, "2905": 12698293248.0, "2910": 12698293248.0, "2915": 12698293248.0, "2920": 12698293248.0, "2925": 12698293248.0, "2930": 12698293248.0, "2935": 12698293248.0, "2940": 12698293248.0, "2945": 12698293248.0, "2950": 12698293248.0, "2955": 12698293248.0, "2960": 12698293248.0, "2965": 12698293248.0, "2970": 12698293248.0, "2975": 12698293248.0, "2980": 12698293248.0, "2985": 12698293248.0, "2990": 12698293248.0, "2995": 12698293248.0, "3000": 12698293248.0, "3005": 12698293248.0, "3010": 12698293248.0, "3015": 12698293248.0, "3020": 12698293248.0, "3025": 12698293248.0, "3030": 12698293248.0, "3035": 12698293248.0, "3040": 12698293248.0, "3045": 12698293248.0, "3050": 12698293248.0, "3055": 12698293248.0, "3060": 12698293248.0, "3065": 12698293248.0, "3070": 12698293248.0, "3075": 12698293248.0, "3080": 12698293248.0, "3085": 12698293248.0, "3090": 12698293248.0, "3095": 12698293248.0, "3100": 12698293248.0, "3105": 12698293248.0, "3110": 12698293248.0, "3115": 12698293248.0, "3120": 12698293248.0, "3125": 12698293248.0, "3130": 12698293248.0, "3135": 12698293248.0, "3140": 12698293248.0, "3145": 12698293248.0, "3150": 12698293248.0, "3155": 12698293248.0, "3160": 12698293248.0, "3165": 12698293248.0, "3170": 12698293248.0, "3175": 12698293248.0, "3180": 12698492928.0, "3185": 12698492928.0, "3190": 12698492928.0, "3195": 12698492928.0, "3200": 12698492928.0, "3205": 12698492928.0, "3210": 12698492928.0, "3215": 12698492928.0, "3220": 12698492928.0, "3225": 12698492928.0, "3230": 12698492928.0, "3235": 12698492928.0, "3240": 12698492928.0, "3245": 12698492928.0, "3250": 12698492928.0, "3255": 12698492928.0, "3260": 12698492928.0, "3265": 12698492928.0, "3270": 12698492928.0, "3275": 12698492928.0, "3280": 12698492928.0, "3285": 12698492928.0, "3290": 12698492928.0, "3295": 12698492928.0, "3300": 12698492928.0, "3305": 12698492928.0, "3310": 12698492928.0, "3315": 12698492928.0, "3320": 12698492928.0, "3325": 12698492928.0, "3330": 12698492928.0, "3335": 12698492928.0, "3340": 12698492928.0, "3345": 12698492928.0, "3350": 12698492928.0, "3355": 12698492928.0, "3360": 12698492928.0, "3365": 12698492928.0, "3370": 12698492928.0, "3375": 12698492928.0, "3380": 12698492928.0, "3385": 12698492928.0, "3390": 12698492928.0, "3395": 12698492928.0, "3400": 12698492928.0, "3405": 12698492928.0, "3410": 12698492928.0, "3415": 12698492928.0, "3420": 12698492928.0, "3425": 12698492928.0, "3430": 12698492928.0, "3435": 12698492928.0, "3440": 12698492928.0, "3445": 12698492928.0, "3450": 12698492928.0, "3455": 12698492928.0, "3460": 12698492928.0, "3465": 12698492928.0, "3470": 12698492928.0, "3475": 12698492928.0, "3480": 12698492928.0, "3485": 12698492928.0, "3490": 12698492928.0, "3495": 12698492928.0, "3500": 12698492928.0, "3505": 12698492928.0, "3510": 12698492928.0, "3515": 12698492928.0, "3520": 12698492928.0, "3525": 12698492928.0, "3530": 12698492928.0, "3535": 12698492928.0, "3540": 12698492928.0, "3545": 12698492928.0, "3550": 12698492928.0, "3555": 12698492928.0, "3560": 12698492928.0, "3565": 12698492928.0, "3570": 12698492928.0, "3575": 12698492928.0, "3580": 12698492928.0, "3585": 12698492928.0, "3590": 12698492928.0, "3595": 12698492928.0, "3600": 12698492928.0, "3605": 12698492928.0, "3610": 12698492928.0, "3615": 12698492928.0, "3620": 12698492928.0, "3625": 12698492928.0, "3630": 12698492928.0, "3635": 12698492928.0, "3640": 12698492928.0, "3645": 12698492928.0, "3650": 12698492928.0, "3655": 12698492928.0, "3660": 12698492928.0, "3665": 12698492928.0, "3670": 12698492928.0, "3675": 12698492928.0, "3680": 12698492928.0, "3685": 12698492928.0, "3690": 12698492928.0, "3695": 12698492928.0, "3700": 12698492928.0, "3705": 12698492928.0, "3710": 12698492928.0, "3715": 12698492928.0, "3720": 12698492928.0, "3725": 12698492928.0, "3730": 12698492928.0, "3735": 12698492928.0, "3740": 12698492928.0, "3745": 12698492928.0, "3750": 12698492928.0, "3755": 12698492928.0, "3760": 12698492928.0, "3765": 12698492928.0, "3770": 12698492928.0, "3775": 12698492928.0, "3780": 12698492928.0, "3785": 12698492928.0, "3790": 12698492928.0, "3795": 12698492928.0, "3800": 12698492928.0, "3805": 12698492928.0, "3810": 12698492928.0, "3815": 12698492928.0, "3820": 12698492928.0, "3825": 12698492928.0, "3830": 12698492928.0, "3835": 12698492928.0, "3840": 12698492928.0, "3845": 12698492928.0, "3850": 12698492928.0, "3855": 12698492928.0, "3860": 12698492928.0, "3865": 12698492928.0, "3870": 12698492928.0, "3875": 12698492928.0, "3880": 12698492928.0, "3885": 12698492928.0, "3890": 12698492928.0, "3895": 12698492928.0, "3900": 12698492928.0, "3905": 12698492928.0, "3910": 12698492928.0, "3915": 12698492928.0, "3920": 12698492928.0, "3925": 12698492928.0, "3930": 12698492928.0, "3935": 12698492928.0, "3940": 12698492928.0, "3945": 12698492928.0, "3950": 12698492928.0, "3955": 12698492928.0, "3960": 12698492928.0, "3965": 12698492928.0, "3970": 12698492928.0, "3975": 12698492928.0, "3980": 12698492928.0, "3985": 12698492928.0, "3990": 12698492928.0, "3995": 12698492928.0, "4000": 12698492928.0, "4005": 12698492928.0, "4010": 12698492928.0, "4015": 12698492928.0, "4020": 12698492928.0, "4025": 12698492928.0, "4030": 12698492928.0, "4035": 12698492928.0, "4040": 12698492928.0, "4045": 12698492928.0, "4050": 12698492928.0, "4055": 12698492928.0, "4060": 12698492928.0, "4065": 12698492928.0, "4070": 12698492928.0, "4075": 12698492928.0, "4080": 12698492928.0, "4085": 12698492928.0, "4090": 12698492928.0, "4095": 12698492928.0, "4100": 12698492928.0, "4105": 12698492928.0, "4110": 12698492928.0, "4115": 12698492928.0, "4120": 12698492928.0, "4125": 12698492928.0, "4130": 12698492928.0, "4135": 12698492928.0, "4140": 12698492928.0, "4145": 12698492928.0, "4150": 12698492928.0, "4155": 12698492928.0, "4160": 12698492928.0, "4165": 12698492928.0, "4170": 12698492928.0, "4175": 12698492928.0, "4180": 12698492928.0, "4185": 12698492928.0, "4190": 12698492928.0, "4195": 12698492928.0, "4200": 12698492928.0, "4205": 12698492928.0, "4210": 12698492928.0, "4215": 12698492928.0, "4220": 12698492928.0, "4225": 12698492928.0, "4230": 12698492928.0, "4235": 12698492928.0, "4240": 12698492928.0, "4245": 12698492928.0, "4250": 12698492928.0, "4255": 12698492928.0, "4260": 12698492928.0, "4265": 12698492928.0, "4270": 12698492928.0, "4275": 12698492928.0, "4280": 12698492928.0, "4285": 12698492928.0, "4290": 12698492928.0, "4295": 12698492928.0, "4300": 12698492928.0, "4305": 12698492928.0, "4310": 12698492928.0, "4315": 12698492928.0, "4320": 12698492928.0, "4325": 12698492928.0, "4330": 12698492928.0, "4335": 12698492928.0, "4340": 12698492928.0, "4345": 12698492928.0, "4350": 12698492928.0, "4355": 12698492928.0, "4360": 12698492928.0, "4365": 12698492928.0, "4370": 12698492928.0, "4375": 12698492928.0, "4380": 12698492928.0, "4385": 12698492928.0, "4390": 12698492928.0, "4395": 12698492928.0, "4400": 12698492928.0, "4405": 12698492928.0, "4410": 12698492928.0, "4415": 12698492928.0, "4420": 12698492928.0, "4425": 12698492928.0, "4430": 12698492928.0, "4435": 12698492928.0, "4440": 12698492928.0, "4445": 12698492928.0, "4450": 12698492928.0, "4455": 12698492928.0, "4460": 12698492928.0, "4465": 12698492928.0, "4470": 12698492928.0, "4475": 12698492928.0, "4480": 12698492928.0, "4485": 12698492928.0, "4490": 12698492928.0, "4495": 12698492928.0, "4500": 12698492928.0, "4505": 12698492928.0, "4510": 12698492928.0, "4515": 12698492928.0, "4520": 12698492928.0, "4525": 12698492928.0, "4530": 12698492928.0, "4535": 12698492928.0, "4540": 12698492928.0, "4545": 12698492928.0, "4550": 12698492928.0, "4555": 12698492928.0, "4560": 12698492928.0, "4565": 12698492928.0, "4570": 12698492928.0, "4575": 12698492928.0, "4580": 12698492928.0, "4585": 12698492928.0, "4590": 12698492928.0, "4595": 12698492928.0, "4600": 12698492928.0, "4605": 12698492928.0, "4610": 12698492928.0, "4615": 12698492928.0, "4620": 12698492928.0, "4625": 12698492928.0, "4630": 12698492928.0, "4635": 12698492928.0, "4640": 12698492928.0, "4645": 12698492928.0, "4650": 12698492928.0, "4655": 12698492928.0, "4660": 12698492928.0, "4665": 12698492928.0, "4670": 12698492928.0, "4675": 12698492928.0, "4680": 12698492928.0, "4685": 12698492928.0, "4690": 12698492928.0, "4695": 12698492928.0, "4700": 12698492928.0, "4705": 12698492928.0, "4710": 12698492928.0, "4715": 12698492928.0, "4720": 12698492928.0, "4725": 12698492928.0, "4730": 12698492928.0, "4735": 12698492928.0, "4740": 12698492928.0, "4745": 12698492928.0, "4750": 12698492928.0, "4755": 12698492928.0, "4760": 12698492928.0, "4765": 12698492928.0, "4770": 12698492928.0, "4775": 12698492928.0, "4780": 12698492928.0, "4785": 12698492928.0, "4790": 12698492928.0, "4795": 12698492928.0, "4800": 12698492928.0, "4805": 12698492928.0, "4810": 12698492928.0, "4815": 12698492928.0, "4820": 12698492928.0, "4825": 12698492928.0, "4830": 12698492928.0, "4835": 12698492928.0, "4840": 12698492928.0, "4845": 12698492928.0, "4850": 12698492928.0, "4855": 12698492928.0, "4860": 12698492928.0, "4865": 12698492928.0, "4870": 12698492928.0, "4875": 12698492928.0, "4880": 12698492928.0, "4885": 12698492928.0, "4890": 12698492928.0, "4895": 12698492928.0, "4900": 12698492928.0, "4905": 12698492928.0, "4910": 12698492928.0, "4915": 12698492928.0, "4920": 12698492928.0, "4925": 12698492928.0, "4930": 12698492928.0, "4935": 12698492928.0, "4940": 12698492928.0, "4945": 12698492928.0, "4950": 12698492928.0, "4955": 12698492928.0, "4960": 12698492928.0, "4965": 12698492928.0, "4970": 12698492928.0, "4975": 12698492928.0, "4980": 12698492928.0, "4985": 12698492928.0, "4990": 12698492928.0, "4995": 12698492928.0, "5000": 12698492928.0, "5005": 12698492928.0, "5010": 12698492928.0, "5015": 12698492928.0, "5020": 12698492928.0, "5025": 12698492928.0, "5030": 12698492928.0, "5035": 12698492928.0, "5040": 12698492928.0, "5045": 12698492928.0, "5050": 12698492928.0, "5055": 12698492928.0, "5060": 12698492928.0, "5065": 12698492928.0, "5070": 12698492928.0, "5075": 12698492928.0, "5080": 12698492928.0, "5085": 12698492928.0, "5090": 12698492928.0, "5095": 12698492928.0, "5100": 12698492928.0, "5105": 12698492928.0, "5110": 12698492928.0, "5115": 12698492928.0, "5120": 12698492928.0, "5125": 12698492928.0, "5130": 12698492928.0, "5135": 12698492928.0, "5140": 12698492928.0, "5145": 12698492928.0, "5150": 12698492928.0, "5155": 12698492928.0, "5160": 12698492928.0, "5165": 12698492928.0, "5170": 12698492928.0, "5175": 12698492928.0, "5180": 12698492928.0, "5185": 12698492928.0, "5190": 12698492928.0, "5195": 12698492928.0, "5200": 12698492928.0, "5205": 12698492928.0, "5210": 12698492928.0, "5215": 12698492928.0, "5220": 12698492928.0, "5225": 12698492928.0, "5230": 12698492928.0, "5235": 12698492928.0, "5240": 12698492928.0, "5245": 12698492928.0, "5250": 12698492928.0, "5255": 12698492928.0, "5260": 12698492928.0, "5265": 12698492928.0, "5270": 12698492928.0, "5275": 12698492928.0, "5280": 12698492928.0, "5285": 12698492928.0, "5290": 12698492928.0, "5295": 12698492928.0, "5300": 12698492928.0, "5305": 12698492928.0, "5310": 12698492928.0, "5315": 12698492928.0, "5320": 12698492928.0, "5325": 12698492928.0, "5330": 12698492928.0, "5335": 12698492928.0, "5340": 12698492928.0, "5345": 12698492928.0, "5350": 12698492928.0, "5355": 12698492928.0, "5360": 12698492928.0, "5365": 12698492928.0, "5370": 12698492928.0, "5375": 12698492928.0, "5380": 12698492928.0, "5385": 12698492928.0, "5390": 12698492928.0, "5395": 12698492928.0, "5400": 12698492928.0, "5405": 12698492928.0, "5410": 12698492928.0, "5415": 12698492928.0, "5420": 12698492928.0, "5425": 12698492928.0, "5430": 12698492928.0, "5435": 12698492928.0, "5440": 12698492928.0, "5445": 12698492928.0, "5450": 12698492928.0, "5455": 12698492928.0, "5460": 12698492928.0, "5465": 12698492928.0, "5470": 12698492928.0, "5475": 12698492928.0, "5480": 12698492928.0, "5485": 12698492928.0, "5490": 12698492928.0, "5495": 12698492928.0, "5500": 12698492928.0, "5505": 12698492928.0, "5510": 12698492928.0, "5515": 12698492928.0, "5520": 12698492928.0, "5525": 12698492928.0, "5530": 12698492928.0, "5535": 12698492928.0, "5540": 12698492928.0, "5545": 12698492928.0, "5550": 12698492928.0, "5555": 12698492928.0, "5560": 12698492928.0, "5565": 12698492928.0, "5570": 12698492928.0, "5575": 12698492928.0, "5580": 12698492928.0, "5585": 12698492928.0, "5590": 12698492928.0, "5595": 12698492928.0, "5600": 12698492928.0, "5605": 12698492928.0, "5610": 12698492928.0, "5615": 12698492928.0, "5620": 12698492928.0, "5625": 12698492928.0, "5630": 12698492928.0, "5635": 12698492928.0, "5640": 12698492928.0, "5645": 12698492928.0, "5650": 12698492928.0, "5655": 12698492928.0, "5660": 12698492928.0, "5665": 12698492928.0, "5670": 12698492928.0, "5675": 12698492928.0, "5680": 12698492928.0, "5685": 12698492928.0, "5690": 12698492928.0, "5695": 12698492928.0, "5700": 12698492928.0, "5705": 12698492928.0, "5710": 12698492928.0, "5715": 12698492928.0, "5720": 12698492928.0, "5725": 12698492928.0, "5730": 12698492928.0, "5735": 12698492928.0, "5740": 12698492928.0, "5745": 12698492928.0, "5750": 12698492928.0, "5755": 12698492928.0, "5760": 12698492928.0, "5765": 12698492928.0, "5770": 12698492928.0, "5775": 12698492928.0, "5780": 12698492928.0, "5785": 12698492928.0, "5790": 12698492928.0, "5795": 12698492928.0, "5800": 12698492928.0, "5805": 12698492928.0, "5810": 12698492928.0, "5815": 12698492928.0, "5820": 12698492928.0, "5825": 12698492928.0, "5830": 12698492928.0, "5835": 12698492928.0, "5840": 12698492928.0, "5845": 12698492928.0, "5850": 12698492928.0, "5855": 12698492928.0, "5860": 12698492928.0, "5865": 12698492928.0, "5870": 12698492928.0, "5875": 12698492928.0, "5880": 12698492928.0, "5885": 12698492928.0, "5890": 12698492928.0, "5895": 12698492928.0, "5900": 12698492928.0, "5905": 12698492928.0, "5910": 12698492928.0, "5915": 12698492928.0, "5920": 12698492928.0, "5925": 12698492928.0, "5930": 12698492928.0, "5935": 12698492928.0, "5940": 12698492928.0, "5945": 12698492928.0, "5950": 12698492928.0, "5955": 12698492928.0, "5960": 12698492928.0, "5965": 12698492928.0, "5970": 12698492928.0, "5975": 12698492928.0, "5980": 12698492928.0, "5985": 12698492928.0, "5990": 12698492928.0, "5995": 12698492928.0, "6000": 12698492928.0, "6005": 12698492928.0, "6010": 12698492928.0, "6015": 12698492928.0, "6020": 12698492928.0, "6025": 12698492928.0, "6030": 12698492928.0, "6035": 12698492928.0, "6040": 12698492928.0, "6045": 12698492928.0, "6050": 12698492928.0, "6055": 12698492928.0, "6060": 12698492928.0, "6065": 12698492928.0, "6070": 12698492928.0, "6075": 12698492928.0, "6080": 12698492928.0, "6085": 12698492928.0, "6090": 12698492928.0, "6095": 12698492928.0, "6100": 12698492928.0, "6105": 12698492928.0, "6110": 12698492928.0, "6115": 12698492928.0, "6120": 12698492928.0, "6125": 12698492928.0, "6130": 12698492928.0, "6135": 12698492928.0, "6140": 12698492928.0, "6145": 12698492928.0, "6150": 12698492928.0, "6155": 12698492928.0, "6160": 12698492928.0, "6165": 12698492928.0, "6170": 12698492928.0, "6175": 12698492928.0, "6180": 12698492928.0, "6185": 12698492928.0, "6190": 12698492928.0, "6195": 12698492928.0, "6200": 12698492928.0, "6205": 12698492928.0, "6210": 12698492928.0, "6215": 12698492928.0, "6220": 12698492928.0, "6225": 12698492928.0, "6230": 12698492928.0, "6235": 12698492928.0, "6240": 12698492928.0, "6245": 12698492928.0, "6250": 12698492928.0, "6255": 12698492928.0, "6260": 12698492928.0, "6265": 12698492928.0, "6270": 12698492928.0, "6275": 12698492928.0, "6280": 12698492928.0, "6285": 12698492928.0, "6290": 12698492928.0, "6295": 12698492928.0, "6300": 12698492928.0, "6305": 12698492928.0, "6310": 12698492928.0, "6315": 12698492928.0, "6320": 12698492928.0, "6325": 12698492928.0, "6330": 12698492928.0, "6335": 12698492928.0, "6340": 12698492928.0, "6345": 12698492928.0, "6350": 12698492928.0, "6355": 12698492928.0, "6360": 12698492928.0, "6365": 12698492928.0, "6370": 12698492928.0, "6375": 12698492928.0, "6380": 12698492928.0, "6385": 12698492928.0, "6390": 12698492928.0, "6395": 12698492928.0, "6400": 12698492928.0, "6405": 12698492928.0, "6410": 12698492928.0, "6415": 12698492928.0, "6420": 12698492928.0, "6425": 12698492928.0, "6430": 12698492928.0, "6435": 12698492928.0, "6440": 12698492928.0, "6445": 12698492928.0, "6450": 12698492928.0, "6455": 12698492928.0, "6460": 12698492928.0, "6465": 12698492928.0, "6470": 12698492928.0, "6475": 12698492928.0, "6480": 12698492928.0, "6485": 12698492928.0, "6490": 12698492928.0, "6495": 12698492928.0, "6500": 12698492928.0, "6505": 12698492928.0, "6510": 12698492928.0, "6515": 12698492928.0, "6520": 12698492928.0, "6525": 12698492928.0, "6530": 12698492928.0, "6535": 12698492928.0, "6540": 12698492928.0, "6545": 12698492928.0, "6550": 12698492928.0, "6555": 12698492928.0, "6560": 12698492928.0, "6565": 12698492928.0, "6570": 12698492928.0, "6575": 12698492928.0, "6580": 12698492928.0, "6585": 12698492928.0, "6590": 12698492928.0, "6595": 12698492928.0, "6600": 12698492928.0, "6605": 12698492928.0, "6610": 12698492928.0, "6615": 12698492928.0, "6620": 12698492928.0, "6625": 12698492928.0, "6630": 12698492928.0, "6635": 12698492928.0, "6640": 12698492928.0, "6645": 12698492928.0, "6650": 12698492928.0, "6655": 12698492928.0, "6660": 12698492928.0, "6665": 12698492928.0, "6670": 12698492928.0, "6675": 12698492928.0, "6680": 12698492928.0, "6685": 12698492928.0, "6690": 12698492928.0, "6695": 12698492928.0, "6700": 12698492928.0, "6705": 12698492928.0, "6710": 12698492928.0, "6715": 12698492928.0, "6720": 12698492928.0, "6725": 12698492928.0, "6730": 12698492928.0, "6735": 12698492928.0, "6740": 12698492928.0, "6745": 12698492928.0, "6750": 12698492928.0, "6755": 12698492928.0, "6760": 12698492928.0, "6765": 12698492928.0, "6770": 12698492928.0, "6775": 12698492928.0, "6780": 12698492928.0, "6785": 12698492928.0, "6790": 12698492928.0, "6795": 12698492928.0, "6800": 12698492928.0, "6805": 12698492928.0, "6810": 12698492928.0, "6815": 12698492928.0, "6820": 12698492928.0, "6825": 12698492928.0, "6830": 12698492928.0, "6835": 12698492928.0, "6840": 12698492928.0, "6845": 12698492928.0, "6850": 12698492928.0, "6855": 12698492928.0, "6860": 12698492928.0, "6865": 12698492928.0, "6870": 12698492928.0, "6875": 12698492928.0, "6880": 12698492928.0, "6885": 12698492928.0, "6890": 12698492928.0, "6895": 12698492928.0, "6900": 12698492928.0, "6905": 12698492928.0, "6910": 12698492928.0, "6915": 12698492928.0, "6920": 12698492928.0, "6925": 12698492928.0, "6930": 12698492928.0, "6935": 12698492928.0, "6940": 12698492928.0, "6945": 12698492928.0, "6950": 12698492928.0, "6955": 12698492928.0, "6960": 12698492928.0, "6965": 12698492928.0, "6970": 12698492928.0, "6975": 12698492928.0, "6980": 12698492928.0, "6985": 12698492928.0, "6990": 12698492928.0, "6995": 12698492928.0, "7000": 12698492928.0, "7005": 12698492928.0, "7010": 12698492928.0, "7015": 12698492928.0, "7020": 12698492928.0, "7025": 12698492928.0, "7030": 12698492928.0, "7035": 12698492928.0, "7040": 12698492928.0, "7045": 12698492928.0, "7050": 12698492928.0, "7055": 12698492928.0, "7060": 12698492928.0, "7065": 12698492928.0, "7070": 12698492928.0, "7075": 12698492928.0, "7080": 12698492928.0, "7085": 12698492928.0, "7090": 12698492928.0, "7095": 12698492928.0, "7100": 12698492928.0, "7105": 12698492928.0, "7110": 12698492928.0, "7115": 12698492928.0, "7120": 12698492928.0, "7125": 12698492928.0, "7130": 12698492928.0, "7135": 12698492928.0, "7140": 12698492928.0, "7145": 12698492928.0, "7150": 12698492928.0, "7155": 12698492928.0, "7160": 12698492928.0, "7165": 12698492928.0, "7170": 12698492928.0, "7175": 12698492928.0, "7180": 12698492928.0, "7185": 12698492928.0, "7190": 12698492928.0, "7195": 12698492928.0, "7200": 12698492928.0, "7205": 12698492928.0, "7210": 12698492928.0, "7215": 12698492928.0, "7220": 12698492928.0, "7225": 12698492928.0, "7230": 12698492928.0, "7235": 12698492928.0, "7240": 12698492928.0, "7245": 12698492928.0, "7250": 12698492928.0, "7255": 12698492928.0, "7260": 12698492928.0, "7265": 12698492928.0, "7270": 12698492928.0, "7275": 12698492928.0, "7280": 12698492928.0, "7285": 12698492928.0, "7290": 12698492928.0, "7295": 12698492928.0, "7300": 12698492928.0, "7305": 12698492928.0, "7310": 12698492928.0, "7315": 12698492928.0, "7320": 12698492928.0, "7325": 12698492928.0, "7330": 12698492928.0, "7335": 12698492928.0, "7340": 12698492928.0, "7345": 12698492928.0, "7350": 12698492928.0, "7355": 12698492928.0, "7360": 12698492928.0, "7365": 12698492928.0, "7370": 12698492928.0, "7375": 12698492928.0, "7380": 12698492928.0, "7385": 12698492928.0, "7390": 12698492928.0, "7395": 12698492928.0, "7400": 12698492928.0, "7405": 12698492928.0, "7410": 12698492928.0, "7415": 12698492928.0, "7420": 12698492928.0, "7425": 12698492928.0, "7430": 12698492928.0, "7435": 12698492928.0, "7440": 12698492928.0, "7445": 12698492928.0, "7450": 12698492928.0, "7455": 12698492928.0, "7460": 12698492928.0, "7465": 12698492928.0, "7470": 12698492928.0, "7475": 12698492928.0, "7480": 12698492928.0, "7485": 12698492928.0, "7490": 12698492928.0, "7495": 12698492928.0, "7500": 12698492928.0, "7505": 12698492928.0, "7510": 12698492928.0, "7515": 12698492928.0, "7520": 12698492928.0, "7525": 12698492928.0, "7530": 12698492928.0, "7535": 12698492928.0, "7540": 12698492928.0, "7545": 12698492928.0, "7550": 12698492928.0, "7555": 12698492928.0, "7560": 12698492928.0, "7565": 12698492928.0, "7570": 12698492928.0, "7575": 12698492928.0, "7580": 12698492928.0, "7585": 12698492928.0, "7590": 12698492928.0, "7595": 12698492928.0, "7600": 12698492928.0, "7605": 12698492928.0, "7610": 12698492928.0, "7615": 12698492928.0, "7620": 12698492928.0, "7625": 12698492928.0, "7630": 12698492928.0, "7635": 12698492928.0, "7640": 12698492928.0, "7645": 12698492928.0, "7650": 12698492928.0, "7655": 12698492928.0, "7660": 12698492928.0, "7665": 12698492928.0, "7670": 12698492928.0, "7675": 12698492928.0, "7680": 12698492928.0, "7685": 12698492928.0, "7690": 12698492928.0, "7695": 12698492928.0, "7700": 12698492928.0, "7705": 12698492928.0, "7710": 12698492928.0, "7715": 12698492928.0, "7720": 12698492928.0, "7725": 12698492928.0, "7730": 12698492928.0, "7735": 12698492928.0, "7740": 12698492928.0, "7745": 12698492928.0, "7750": 12698492928.0, "7755": 12698492928.0, "7760": 12698492928.0, "7765": 12698492928.0, "7770": 12698492928.0, "7775": 12698492928.0, "7780": 12698492928.0, "7785": 12698492928.0, "7790": 12698492928.0, "7795": 12698492928.0, "7800": 12698492928.0, "7805": 12698492928.0, "7810": 12698492928.0, "7815": 12698492928.0, "7820": 12698492928.0, "7825": 12698492928.0, "7830": 12698492928.0, "7835": 12698492928.0, "7840": 12698492928.0, "7845": 12698492928.0, "7850": 12698492928.0, "7855": 12698492928.0, "7860": 12698492928.0, "7865": 12698492928.0, "7870": 12698492928.0, "7875": 12698492928.0, "7880": 12698492928.0, "7885": 12698492928.0, "7890": 12698492928.0, "7895": 12698492928.0, "7900": 12698492928.0, "7905": 12698492928.0, "7910": 12698492928.0, "7915": 12698492928.0, "7920": 12698492928.0, "7925": 12698492928.0, "7930": 12698492928.0, "7935": 12698492928.0, "7940": 12698492928.0, "7945": 12698492928.0, "7950": 12698492928.0, "7955": 12698492928.0, "7960": 12698492928.0, "7965": 12698492928.0, "7970": 12698492928.0, "7975": 12698492928.0, "7980": 12698492928.0, "7985": 12698492928.0, "7990": 12698492928.0, "7995": 12698492928.0, "8000": 12698492928.0, "8005": 12698492928.0, "8010": 12698492928.0, "8015": 12698492928.0, "8020": 12698492928.0, "8025": 12698492928.0, "8030": 12698492928.0, "8035": 12698492928.0, "8040": 12698492928.0, "8045": 12698492928.0, "8050": 12698492928.0, "8055": 12698492928.0, "8060": 12698492928.0, "8065": 12698492928.0, "8070": 12698492928.0, "8075": 12698492928.0, "8080": 12698492928.0, "8085": 12698492928.0, "8090": 12698492928.0, "8095": 12698492928.0, "8100": 12698492928.0, "8105": 12698492928.0, "8110": 12698492928.0, "8115": 12698492928.0, "8120": 12698492928.0, "8125": 12698492928.0, "8130": 12698492928.0, "8135": 12698492928.0, "8140": 12698492928.0, "8145": 12698492928.0, "8150": 12698492928.0, "8155": 12698492928.0, "8160": 12698492928.0, "8165": 12698492928.0, "8170": 12698492928.0, "8175": 12698492928.0, "8180": 12698492928.0, "8185": 12698492928.0, "8190": 12698492928.0, "8195": 12698492928.0, "8200": 12698492928.0, "8205": 12698492928.0, "8210": 12698492928.0, "8215": 12698492928.0, "8220": 12698492928.0, "8225": 12698492928.0, "8230": 12698492928.0, "8235": 12698492928.0, "8240": 12698492928.0, "8245": 12698492928.0, "8250": 12698492928.0, "8255": 12698492928.0, "8260": 12698492928.0, "8265": 12698492928.0, "8270": 12698492928.0, "8275": 12698492928.0, "8280": 12698492928.0, "8285": 12698492928.0, "8290": 12698492928.0, "8295": 12698492928.0, "8300": 12698492928.0, "8305": 12698492928.0, "8310": 12698492928.0, "8315": 12698492928.0, "8320": 12698492928.0, "8325": 12698492928.0, "8330": 12698492928.0, "8335": 12698492928.0, "8340": 12698492928.0, "8345": 12698492928.0, "8350": 12698492928.0, "8355": 12698492928.0, "8360": 12698492928.0, "8365": 12698492928.0, "8370": 12698492928.0, "8375": 12698492928.0, "8380": 12698492928.0, "8385": 12698492928.0, "8390": 12698492928.0, "8395": 12698492928.0, "8400": 12698492928.0, "8405": 12698492928.0, "8410": 12698492928.0, "8415": 12698492928.0, "8420": 12698492928.0, "8425": 12698492928.0, "8430": 12698492928.0, "8435": 12698492928.0, "8440": 12698492928.0, "8445": 12698492928.0, "8450": 12698492928.0, "8455": 12698492928.0, "8460": 12698492928.0, "8465": 12698492928.0, "8470": 12698492928.0, "8475": 12698492928.0, "8480": 12698492928.0, "8485": 12698492928.0, "8490": 12698492928.0, "8495": 12698492928.0, "8500": 12698492928.0, "8505": 12698492928.0, "8510": 12698492928.0, "8515": 12698492928.0, "8520": 12698492928.0, "8525": 12698492928.0, "8530": 12698492928.0, "8535": 12698492928.0, "8540": 12698492928.0, "8545": 12698492928.0, "8550": 12698492928.0, "8555": 12698492928.0, "8560": 12698492928.0, "8565": 12698492928.0, "8570": 12698492928.0, "8575": 12698492928.0, "8580": 12698492928.0, "8585": 12698492928.0, "8590": 12698492928.0, "8595": 12698492928.0, "8600": 12698492928.0, "8605": 12698492928.0, "8610": 12698492928.0, "8615": 12698492928.0, "8620": 12698492928.0, "8625": 12698492928.0, "8630": 12698492928.0, "8635": 12698492928.0, "8640": 12698492928.0, "8645": 12698492928.0, "8650": 12698492928.0, "8655": 12698492928.0, "8660": 12698492928.0, "8665": 12698492928.0, "8670": 12698492928.0, "8675": 12698492928.0, "8680": 12698492928.0, "8685": 12698492928.0, "8690": 12698492928.0, "8695": 12698492928.0, "8700": 12698492928.0, "8705": 12698492928.0, "8710": 12698492928.0, "8715": 12698492928.0, "8720": 12698492928.0, "8725": 12698492928.0, "8730": 12698492928.0, "8735": 12698492928.0, "8740": 12698492928.0, "8745": 12698492928.0, "8750": 12698492928.0, "8755": 12698492928.0, "8760": 12698492928.0, "8765": 12698492928.0, "8770": 12698492928.0, "8775": 12698492928.0, "8780": 12698492928.0, "8785": 12698492928.0, "8790": 12698492928.0, "8795": 12698492928.0, "8800": 12698492928.0, "8805": 12698492928.0, "8810": 12698492928.0, "8815": 12698492928.0, "8820": 12698492928.0, "8825": 12698492928.0, "8830": 12698492928.0, "8835": 12698492928.0, "8840": 12698492928.0, "8845": 12698492928.0, "8850": 12698492928.0, "8855": 12698492928.0, "8860": 12698492928.0, "8865": 12698492928.0, "8870": 12698492928.0, "8875": 12698492928.0, "8880": 12698492928.0, "8885": 12698492928.0, "8890": 12698492928.0, "8895": 12698492928.0, "8900": 12698492928.0, "8905": 12698492928.0, "8910": 12698492928.0, "8915": 12698492928.0, "8920": 12698492928.0, "8925": 12698492928.0, "8930": 12698492928.0, "8935": 12698492928.0, "8940": 12698492928.0, "8945": 12698492928.0, "8950": 12698492928.0, "8955": 12698492928.0, "8960": 12698492928.0, "8965": 12698492928.0, "8970": 12698492928.0, "8975": 12698492928.0, "8980": 12698492928.0, "8985": 12698492928.0, "8990": 12698492928.0, "8995": 12698492928.0, "9000": 12698492928.0, "9005": 12698492928.0, "9010": 12698492928.0, "9015": 12698492928.0, "9020": 12698492928.0, "9025": 12698492928.0, "9030": 12698492928.0, "9035": 12698492928.0, "9040": 12698492928.0, "9045": 12698492928.0, "9050": 12698492928.0, "9055": 12698492928.0, "9060": 12698492928.0, "9065": 12698492928.0, "9070": 12698492928.0, "9075": 12698492928.0, "9080": 12698492928.0, "9085": 12698492928.0, "9090": 12698492928.0, "9095": 12698492928.0, "9100": 12698492928.0, "9105": 12698492928.0, "9110": 12698492928.0, "9115": 12698492928.0, "9120": 12698492928.0, "9125": 12698492928.0, "9130": 12698492928.0, "9135": 12698492928.0, "9140": 12698492928.0, "9145": 12698492928.0, "9150": 12698492928.0, "9155": 12698492928.0, "9160": 12698492928.0, "9165": 12698492928.0, "9170": 12698492928.0, "9175": 12698492928.0, "9180": 12698492928.0, "9185": 12698492928.0, "9190": 12698492928.0, "9195": 12698492928.0, "9200": 12698492928.0, "9205": 12698492928.0, "9210": 12698492928.0, "9215": 12698492928.0, "9220": 12698492928.0, "9225": 12698492928.0, "9230": 12698492928.0, "9235": 12698492928.0, "9240": 12698492928.0, "9245": 12698492928.0, "9250": 12698492928.0, "9255": 12698492928.0, "9260": 12698492928.0, "9265": 12698492928.0, "9270": 12698492928.0, "9275": 12698492928.0, "9280": 12698492928.0, "9285": 12698492928.0, "9290": 12698492928.0, "9295": 12698492928.0, "9300": 12698492928.0, "9305": 12698492928.0, "9310": 12698492928.0, "9315": 12698492928.0, "9320": 12698492928.0, "9325": 12698492928.0, "9330": 12698492928.0, "9335": 12698492928.0, "9340": 12698492928.0, "9345": 12698492928.0, "9350": 12698492928.0, "9355": 12698492928.0, "9360": 12698492928.0, "9365": 12698492928.0, "9370": 12698492928.0, "9375": 12698492928.0, "9380": 12698492928.0, "9385": 12698492928.0, "9390": 12698492928.0, "9395": 12698492928.0, "9400": 12698492928.0, "9405": 12698492928.0, "9410": 12698492928.0, "9415": 12698492928.0, "9420": 12698492928.0, "9425": 12698492928.0, "9430": 12698492928.0, "9435": 12698492928.0, "9440": 12698492928.0, "9445": 12698492928.0, "9450": 12698492928.0, "9455": 12698492928.0, "9460": 12698492928.0, "9465": 12698492928.0, "9470": 12698492928.0, "9475": 12698492928.0, "9480": 12698492928.0, "9485": 12698492928.0, "9490": 12698492928.0, "9495": 12698492928.0, "9500": 12698492928.0, "9505": 12698492928.0, "9510": 12698492928.0, "9515": 12698492928.0, "9520": 12698492928.0, "9525": 12698492928.0, "9530": 12698492928.0, "9535": 12698492928.0, "9540": 12698492928.0, "9545": 12698492928.0, "9550": 12698492928.0, "9555": 12698492928.0, "9560": 12698492928.0, "9565": 12698492928.0, "9570": 12698492928.0, "9575": 12698492928.0, "9580": 12698492928.0, "9585": 12698492928.0, "9590": 12698492928.0, "9595": 12698492928.0, "9600": 12698492928.0, "9605": 12698492928.0, "9610": 12698492928.0, "9615": 12698492928.0, "9620": 12698492928.0, "9625": 12698492928.0, "9630": 12698492928.0, "9635": 12698492928.0, "9640": 12698492928.0, "9645": 12698492928.0, "9650": 12698492928.0, "9655": 12698492928.0, "9660": 12698492928.0, "9665": 12698492928.0, "9670": 12698492928.0, "9675": 12698492928.0, "9680": 12698492928.0, "9685": 12698492928.0, "9690": 12698492928.0, "9695": 12698492928.0, "9700": 12698492928.0, "9705": 12698492928.0, "9710": 12698492928.0, "9715": 12698492928.0, "9720": 12698492928.0, "9725": 12698492928.0, "9730": 12698492928.0, "9735": 12698492928.0, "9740": 12698492928.0, "9745": 12698492928.0, "9750": 12698492928.0, "9755": 12698492928.0, "9760": 12698492928.0, "9765": 12698492928.0, "9770": 12698492928.0, "9775": 12698492928.0, "9780": 12698492928.0, "9785": 12698492928.0, "9790": 12698492928.0, "9795": 12698492928.0, "9800": 12698492928.0, "9805": 12698492928.0, "9810": 12698492928.0, "9815": 12698492928.0, "9820": 12698492928.0, "9825": 12698492928.0, "9830": 12698492928.0, "9835": 12698492928.0, "9840": 12698492928.0, "9845": 12698492928.0, "9850": 12698492928.0, "9855": 12698492928.0, "9860": 12698492928.0, "9865": 12698492928.0, "9870": 12698492928.0, "9875": 12698492928.0, "9880": 12698492928.0, "9885": 12698492928.0, "9890": 12698492928.0, "9895": 12698492928.0, "9900": 12698492928.0, "9905": 12698492928.0, "9910": 12698492928.0, "9915": 12698492928.0, "9920": 12698492928.0, "9925": 12698492928.0, "9930": 12698492928.0, "9935": 12698492928.0, "9940": 12698492928.0, "9945": 12698492928.0, "9950": 12698492928.0, "9955": 12698492928.0, "9960": 12698492928.0, "9965": 12698492928.0, "9970": 12698492928.0, "9975": 12698492928.0, "9980": 12698492928.0, "9985": 12698492928.0, "9990": 12698492928.0, "9995": 12698492928.0, "10000": 12698492928.0, "10005": 12698492928.0, "10010": 12698492928.0, "10015": 12698492928.0, "10020": 12698492928.0, "10025": 12698492928.0, "10030": 12698492928.0, "10035": 12698492928.0, "10040": 12698492928.0, "10045": 12698492928.0, "10050": 12698492928.0, "10055": 12698492928.0, "10060": 12698492928.0, "10065": 12698492928.0, "10070": 12698492928.0, "10075": 12698492928.0, "10080": 12698492928.0, "10085": 12698492928.0, "10090": 12698492928.0, "10095": 12698492928.0, "10100": 12698492928.0, "10105": 12698492928.0, "10110": 12698492928.0, "10115": 12698492928.0, "10120": 12698492928.0, "10125": 12698492928.0, "10130": 12698492928.0, "10135": 12698492928.0, "10140": 12698492928.0, "10145": 12698492928.0, "10150": 12698492928.0, "10155": 12698492928.0, "10160": 12698492928.0, "10165": 12698492928.0, "10170": 12698492928.0, "10175": 12698492928.0, "10180": 12698492928.0, "10185": 12698492928.0, "10190": 12698492928.0, "10195": 12698492928.0, "10200": 12698492928.0, "10205": 12698492928.0, "10210": 12698492928.0, "10215": 12698492928.0, "10220": 12698492928.0, "10225": 12698492928.0, "10230": 12698492928.0, "10235": 12698492928.0, "10240": 12698492928.0, "10245": 12698492928.0, "10250": 12698492928.0, "10255": 12698492928.0, "10260": 12698492928.0, "10265": 12698492928.0, "10270": 12698492928.0, "10275": 12698492928.0, "10280": 12698492928.0, "10285": 12698492928.0, "10290": 12698492928.0, "10295": 12698492928.0, "10300": 12698492928.0, "10305": 12698492928.0, "10310": 12698492928.0, "10315": 12698492928.0, "10320": 12698492928.0, "10325": 12698492928.0, "10330": 12698492928.0, "10335": 12698492928.0, "10340": 12698492928.0, "10345": 12698492928.0, "10350": 12698492928.0, "10355": 12698492928.0, "10360": 12698492928.0, "10365": 12698492928.0, "10370": 12698492928.0, "10375": 12698492928.0, "10380": 12698492928.0, "10385": 12698492928.0, "10390": 12698492928.0, "10395": 12698492928.0, "10400": 12698492928.0, "10405": 12698492928.0, "10410": 12698492928.0, "10415": 12698492928.0, "10420": 12698492928.0, "10425": 12698492928.0, "10430": 12698492928.0, "10435": 12698492928.0, "10440": 12698492928.0, "10445": 12698492928.0, "10450": 12698492928.0, "10455": 12698492928.0, "10460": 12698492928.0, "10465": 12698492928.0, "10470": 12698492928.0, "10475": 12698492928.0, "10480": 12698492928.0, "10485": 12698492928.0, "10490": 12698492928.0, "10495": 12698492928.0, "10500": 12698492928.0, "10505": 12698492928.0, "10510": 12698492928.0, "10515": 12698492928.0, "10520": 12698492928.0, "10525": 12698492928.0, "10530": 12698492928.0, "10535": 12698492928.0, "10540": 12698492928.0, "10545": 12698492928.0, "10550": 12698492928.0, "10555": 12698492928.0, "10560": 12698492928.0, "10565": 12698492928.0, "10570": 12698492928.0, "10575": 12698492928.0, "10580": 12698492928.0, "10585": 12698492928.0, "10590": 12698492928.0, "10595": 12698492928.0, "10600": 12698492928.0, "10605": 12698492928.0, "10610": 12698492928.0, "10615": 12698492928.0, "10620": 12698492928.0, "10625": 12698492928.0, "10630": 12698492928.0, "10635": 12698492928.0, "10640": 12698492928.0, "10645": 12698492928.0, "10650": 12698492928.0, "10655": 12698492928.0, "10660": 12698492928.0, "10665": 12698492928.0, "10670": 12698492928.0, "10675": 12698492928.0, "10680": 12698492928.0, "10685": 12698492928.0, "10690": 12698492928.0, "10695": 12698492928.0, "10700": 12698492928.0, "10705": 12698492928.0, "10710": 12698492928.0, "10715": 12698492928.0, "10720": 12698492928.0, "10725": 12698492928.0, "10730": 12698492928.0, "10735": 12698492928.0, "10740": 12698492928.0, "10745": 12698492928.0, "10750": 12698492928.0, "10755": 12698492928.0, "10760": 12698492928.0, "10765": 12698492928.0, "10770": 12698492928.0, "10775": 12698492928.0, "10780": 12698492928.0, "10785": 12698492928.0, "10790": 12698492928.0, "10795": 12698492928.0, "10800": 12698492928.0, "10805": 12698492928.0, "10810": 12698492928.0, "10815": 12698492928.0, "10820": 12698492928.0, "10825": 12698492928.0, "10830": 12698492928.0, "10835": 12698492928.0, "10840": 12698492928.0, "10845": 12698492928.0, "10850": 12698492928.0, "10855": 12698492928.0, "10860": 12698492928.0, "10865": 12698492928.0, "10870": 12698492928.0, "10875": 12698492928.0, "10880": 12698492928.0, "10885": 12698492928.0, "10890": 12698492928.0, "10895": 12698492928.0, "10900": 12698492928.0, "10905": 12698492928.0, "10910": 12698492928.0, "10915": 12698492928.0, "10920": 12698492928.0, "10925": 12698492928.0, "10930": 12698492928.0, "10935": 12698492928.0, "10940": 12698492928.0, "10945": 12698492928.0, "10950": 12698492928.0, "10955": 12698492928.0, "10960": 12698492928.0, "10965": 12698492928.0, "10970": 12698492928.0, "10975": 12698492928.0, "10980": 12698492928.0, "10985": 12698492928.0, "10990": 12698492928.0, "10995": 12698492928.0, "11000": 12698492928.0, "11005": 12698492928.0, "11010": 12698492928.0, "11015": 12698492928.0, "11020": 12698492928.0, "11025": 12698492928.0, "11030": 12698492928.0, "11035": 12698492928.0, "11040": 12698492928.0, "11045": 12698492928.0, "11050": 12698492928.0, "11055": 12698492928.0, "11060": 12698492928.0, "11065": 12698492928.0, "11070": 12698492928.0, "11075": 12698492928.0, "11080": 12698492928.0, "11085": 12698492928.0, "11090": 12698492928.0, "11095": 12698492928.0, "11100": 12698492928.0, "11105": 12698492928.0, "11110": 12698492928.0, "11115": 12698492928.0, "11120": 12698492928.0, "11125": 12698492928.0, "11130": 12698492928.0, "11135": 12698492928.0, "11140": 12698492928.0, "11145": 12698492928.0, "11150": 12698492928.0, "11155": 12698492928.0, "11160": 12698492928.0, "11165": 12698492928.0, "11170": 12698492928.0, "11175": 12698492928.0, "11180": 12698492928.0, "11185": 12698492928.0, "11190": 12698492928.0, "11195": 12698492928.0, "11200": 12698492928.0, "11205": 12698492928.0, "11210": 12698492928.0, "11215": 12698492928.0, "11220": 12698492928.0, "11225": 12698492928.0, "11230": 12698492928.0, "11235": 12698492928.0, "11240": 12698492928.0, "11245": 12698492928.0, "11250": 12698492928.0, "11255": 12698492928.0, "11260": 12698492928.0, "11265": 12698492928.0, "11270": 12698492928.0, "11275": 12698492928.0, "11280": 12698492928.0, "11285": 12698492928.0, "11290": 12698492928.0, "11295": 12698492928.0, "11300": 12698492928.0, "11305": 12698492928.0, "11310": 12698492928.0, "11315": 12698492928.0, "11320": 12698492928.0, "11325": 12698492928.0, "11330": 12698492928.0, "11335": 12698492928.0, "11340": 12698492928.0, "11345": 12698492928.0, "11350": 12698492928.0, "11355": 12698492928.0, "11360": 12698492928.0, "11365": 12698492928.0, "11370": 12698492928.0, "11375": 12698492928.0, "11380": 12698492928.0, "11385": 12698492928.0, "11390": 12698492928.0, "11395": 12698492928.0, "11400": 12698492928.0, "11405": 12698492928.0, "11410": 12698492928.0, "11415": 12698492928.0, "11420": 12698492928.0, "11425": 12698492928.0, "11430": 12698492928.0, "11435": 12698492928.0, "11440": 12698492928.0, "11445": 12698492928.0, "11450": 12698492928.0, "11455": 12698492928.0, "11460": 12698492928.0, "11465": 12698492928.0, "11470": 12698492928.0, "11475": 12698492928.0, "11480": 12698492928.0, "11485": 12698492928.0, "11490": 12698492928.0, "11495": 12698492928.0, "11500": 12698492928.0, "11505": 12698492928.0, "11510": 12698492928.0, "11515": 12698492928.0, "11520": 12698492928.0, "11525": 12698492928.0, "11530": 12698492928.0, "11535": 12698492928.0, "11540": 12698492928.0, "11545": 12698492928.0, "11550": 12698492928.0, "11555": 12698492928.0, "11560": 12698492928.0, "11565": 12698492928.0, "11570": 12698492928.0, "11575": 12698492928.0, "11580": 12698492928.0, "11585": 12698492928.0, "11590": 12698492928.0, "11595": 12698492928.0, "11600": 12698492928.0, "11605": 12698492928.0, "11610": 12698492928.0, "11615": 12698492928.0, "11620": 12698492928.0, "11625": 12698492928.0, "11630": 12698492928.0, "11635": 12698492928.0, "11640": 12698492928.0, "11645": 12698492928.0, "11650": 12698492928.0, "11655": 12698492928.0, "11660": 12698492928.0, "11665": 12698492928.0, "11670": 12698492928.0, "11675": 12698492928.0, "11680": 12698492928.0, "11685": 12698492928.0, "11690": 12698492928.0, "11695": 12698492928.0, "11700": 12698492928.0, "11705": 12698492928.0, "11710": 12698492928.0, "11715": 12698492928.0, "11720": 12698492928.0, "11725": 12698492928.0, "11730": 12698492928.0, "11735": 12698492928.0, "11740": 12698492928.0, "11745": 12698492928.0, "11750": 12698492928.0, "11755": 12698492928.0, "11760": 12698492928.0, "11765": 12698492928.0, "11770": 12698492928.0, "11775": 12698492928.0, "11780": 12698492928.0, "11785": 12698492928.0, "11790": 12698492928.0, "11795": 12698492928.0, "11800": 12698492928.0, "11805": 12698492928.0, "11810": 12698492928.0, "11815": 12698492928.0, "11820": 12698492928.0, "11825": 12698492928.0, "11830": 12698492928.0, "11835": 12698492928.0, "11840": 12698492928.0, "11845": 12698492928.0, "11850": 12698492928.0, "11855": 12698492928.0, "11860": 12698492928.0, "11865": 12698492928.0, "11870": 12698492928.0, "11875": 12698492928.0, "11880": 12698492928.0, "11885": 12698492928.0, "11890": 12698492928.0, "11895": 12698492928.0, "11900": 12698492928.0, "11905": 12698492928.0, "11910": 12698492928.0, "11915": 12698492928.0, "11920": 12698492928.0, "11925": 12698492928.0, "11930": 12698492928.0, "11935": 12698492928.0, "11940": 12698492928.0, "11945": 12698492928.0, "11950": 12698492928.0, "11955": 12698492928.0, "11960": 12698492928.0, "11965": 12698492928.0, "11970": 12698492928.0, "11975": 12698492928.0, "11980": 12698492928.0, "11985": 12698492928.0, "11990": 12698492928.0, "11995": 12698492928.0, "12000": 12698492928.0, "12005": 12698492928.0, "12010": 12698492928.0, "12015": 12698492928.0, "12020": 12698492928.0, "12025": 12698492928.0, "12030": 12698492928.0, "12035": 12698492928.0, "12040": 12698492928.0, "12045": 12698492928.0, "12050": 12698492928.0, "12055": 12698492928.0, "12060": 12698492928.0, "12065": 12698492928.0, "12070": 12698492928.0, "12075": 12698492928.0, "12080": 12698492928.0, "12085": 12698492928.0, "12090": 12698492928.0, "12095": 12698492928.0, "12100": 12698492928.0, "12105": 12698492928.0, "12110": 12698492928.0, "12115": 12698492928.0, "12120": 12698492928.0, "12125": 12698492928.0, "12130": 12698492928.0, "12135": 12698492928.0, "12140": 12698492928.0, "12145": 12698492928.0, "12150": 12698492928.0, "12155": 12698492928.0, "12160": 12698492928.0, "12165": 12698492928.0, "12170": 12698492928.0, "12175": 12698492928.0, "12180": 12698492928.0, "12185": 12698492928.0, "12190": 12698492928.0, "12195": 12698492928.0, "12200": 12698492928.0, "12205": 12698492928.0, "12210": 12698492928.0, "12215": 12698492928.0, "12220": 12698492928.0, "12225": 12698492928.0, "12230": 12698492928.0, "12235": 12698492928.0, "12240": 12698492928.0, "12245": 12698492928.0, "12250": 12698492928.0, "12255": 12698492928.0, "12260": 12698492928.0, "12265": 12698492928.0, "12270": 12698492928.0, "12275": 12698492928.0, "12280": 12698492928.0, "12285": 12698492928.0, "12290": 12698492928.0, "12295": 12698492928.0, "12300": 12698492928.0, "12305": 12698492928.0, "12310": 12698492928.0, "12315": 12698492928.0, "12320": 12698492928.0, "12325": 12698492928.0, "12330": 12698492928.0, "12335": 12698492928.0, "12340": 12698492928.0, "12345": 12698492928.0, "12350": 12698492928.0, "12355": 12698492928.0, "12360": 12698492928.0, "12365": 12698492928.0, "12370": 12698492928.0, "12375": 12698492928.0, "12380": 12698492928.0, "12385": 12698492928.0, "12390": 12698492928.0, "12395": 12698492928.0, "12400": 12698492928.0, "12405": 12698492928.0, "12410": 12698492928.0, "12415": 12698492928.0, "12420": 12698492928.0, "12425": 12698492928.0, "12430": 12698492928.0, "12435": 12698492928.0, "12440": 12698492928.0, "12445": 12698492928.0, "12450": 12698492928.0, "12455": 12698492928.0, "12460": 12698492928.0, "12465": 12698492928.0, "12470": 12698492928.0, "12475": 12698492928.0, "12480": 12698492928.0, "12485": 12698492928.0, "12490": 12698492928.0, "12495": 12698492928.0, "12500": 12698492928.0, "12505": 12698492928.0, "12510": 12698492928.0, "12515": 12698492928.0, "12520": 12698492928.0, "12525": 12698492928.0, "12530": 12698492928.0, "12535": 12698492928.0, "12540": 12698492928.0, "12545": 12698492928.0, "12550": 12698492928.0, "12555": 12698492928.0, "12560": 12698492928.0, "12565": 12698492928.0, "12570": 12698492928.0, "12575": 12698492928.0, "12580": 12698492928.0, "12585": 12698492928.0, "12590": 12698492928.0, "12595": 12698492928.0, "12600": 12698492928.0, "12605": 12698492928.0, "12610": 12698492928.0, "12615": 12698492928.0, "12620": 12698492928.0, "12625": 12698492928.0, "12630": 12698492928.0, "12635": 12698492928.0, "12640": 12698492928.0, "12645": 12698492928.0, "12650": 12698492928.0, "12655": 12698492928.0, "12660": 12698492928.0, "12665": 12698492928.0, "12670": 12698492928.0, "12675": 12698492928.0, "12680": 12698492928.0, "12685": 12698492928.0, "12690": 12698492928.0, "12695": 12698492928.0, "12700": 12698492928.0, "12705": 12698492928.0, "12710": 12698492928.0, "12715": 12698492928.0, "12720": 12698492928.0, "12725": 12698492928.0, "12730": 12698492928.0, "12735": 12698492928.0, "12740": 12698492928.0, "12745": 12698492928.0, "12750": 12698492928.0, "12755": 12698492928.0, "12760": 12698492928.0, "12765": 12698492928.0, "12770": 12698492928.0, "12775": 12698492928.0, "12780": 12698492928.0, "12785": 12698492928.0, "12790": 12698492928.0, "12795": 12698492928.0, "12800": 12698492928.0, "12805": 12698492928.0, "12810": 12698492928.0, "12815": 12698492928.0, "12820": 12698492928.0, "12825": 12698492928.0, "12830": 12698492928.0, "12835": 12698492928.0, "12840": 12698492928.0, "12845": 12698492928.0, "12850": 12698492928.0, "12855": 12698492928.0, "12860": 12698492928.0, "12865": 12698492928.0, "12870": 12698492928.0, "12875": 12698492928.0, "12880": 12698492928.0, "12885": 12698492928.0, "12890": 12698492928.0, "12895": 12698492928.0, "12900": 12698492928.0, "12905": 12698492928.0, "12910": 12698492928.0, "12915": 12698492928.0, "12920": 12698492928.0, "12925": 12698492928.0, "12930": 12698492928.0, "12935": 12698492928.0, "12940": 12698492928.0, "12945": 12698492928.0, "12950": 12698492928.0, "12955": 12698492928.0, "12960": 12698492928.0, "12965": 12698492928.0, "12970": 12698492928.0, "12975": 12698492928.0, "12980": 12698492928.0, "12985": 12698492928.0, "12990": 12698492928.0, "12995": 12698492928.0, "13000": 12698492928.0, "13005": 12698492928.0, "13010": 12698492928.0, "13015": 12698492928.0, "13020": 12698492928.0, "13025": 12698492928.0, "13030": 12698492928.0, "13035": 12698492928.0, "13040": 12698492928.0, "13045": 12698492928.0, "13050": 12698492928.0, "13055": 12698492928.0, "13060": 12698492928.0, "13065": 12698492928.0, "13070": 12698492928.0, "13075": 12698492928.0, "13080": 12698492928.0, "13085": 12698492928.0, "13090": 12698492928.0, "13095": 12698492928.0, "13100": 12698492928.0, "13105": 12698492928.0, "13110": 12698492928.0, "13115": 12698492928.0, "13120": 12698492928.0, "13125": 12698492928.0, "13130": 12698492928.0, "13135": 12698492928.0, "13140": 12698492928.0, "13145": 12698492928.0, "13150": 12698492928.0, "13155": 12698492928.0, "13160": 12698492928.0, "13165": 12698492928.0, "13170": 12698492928.0, "13175": 12698492928.0, "13180": 12698492928.0, "13185": 12698492928.0, "13190": 12698492928.0, "13195": 12698492928.0, "13200": 12698492928.0, "13205": 12698492928.0, "13210": 12698492928.0, "13215": 12698492928.0, "13220": 12698492928.0, "13225": 12698492928.0, "13230": 12698492928.0, "13235": 12698492928.0, "13240": 12698492928.0, "13245": 12698492928.0, "13250": 12698492928.0, "13255": 12698492928.0, "13260": 12698492928.0, "13265": 12698492928.0, "13270": 12698492928.0, "13275": 12698492928.0, "13280": 12698492928.0, "13285": 12698492928.0, "13290": 12698492928.0, "13295": 12698492928.0, "13300": 12698492928.0, "13305": 12698492928.0, "13310": 12698492928.0, "13315": 12698492928.0, "13320": 12698492928.0, "13325": 12698492928.0, "13330": 12698492928.0, "13335": 12698492928.0, "13340": 12698492928.0, "13345": 12698492928.0, "13350": 12698492928.0, "13355": 12698492928.0, "13360": 12698492928.0, "13365": 12698492928.0, "13370": 12698492928.0, "13375": 12698492928.0, "13380": 12698492928.0, "13385": 12698492928.0, "13390": 12698492928.0, "13395": 12698492928.0, "13400": 12698492928.0, "13405": 12698492928.0, "13410": 12698492928.0, "13415": 12698492928.0, "13420": 12698492928.0, "13425": 12698492928.0, "13430": 12698492928.0, "13435": 12698492928.0, "13440": 12698492928.0, "13445": 12698492928.0, "13450": 12698492928.0, "13455": 12698492928.0, "13460": 12698492928.0, "13465": 12698492928.0, "13470": 12698492928.0, "13475": 12698492928.0, "13480": 12698492928.0, "13485": 12698492928.0, "13490": 12698492928.0, "13495": 12698492928.0, "13500": 12698492928.0, "13505": 12698492928.0, "13510": 12698492928.0, "13515": 12698492928.0, "13520": 12698492928.0, "13525": 12698492928.0, "13530": 12698492928.0, "13535": 12698492928.0, "13540": 12698492928.0, "13545": 12698492928.0, "13550": 12698492928.0, "13555": 12698492928.0, "13560": 12698492928.0, "13565": 12698492928.0, "13570": 12698492928.0, "13575": 12698492928.0, "13580": 12698492928.0, "13585": 12698492928.0, "13590": 12698492928.0, "13595": 12698492928.0, "13600": 12698492928.0, "13605": 12698492928.0, "13610": 12698492928.0, "13615": 12698492928.0, "13620": 12698492928.0, "13625": 12698492928.0, "13630": 12698492928.0, "13635": 12698492928.0, "13640": 12698492928.0, "13645": 12698492928.0, "13650": 12698492928.0, "13655": 12698492928.0, "13660": 12698492928.0, "13665": 12698492928.0, "13670": 12698492928.0, "13675": 12698492928.0, "13680": 12698492928.0, "13685": 12698492928.0, "13690": 12698492928.0, "13695": 12698492928.0, "13700": 12698492928.0, "13705": 12698492928.0, "13710": 12698492928.0, "13715": 12698492928.0, "13720": 12698492928.0, "13725": 12698492928.0, "13730": 12698492928.0, "13735": 12698492928.0, "13740": 12698492928.0, "13745": 12698492928.0, "13750": 12698492928.0, "13755": 12698492928.0, "13760": 12698492928.0, "13765": 12698492928.0, "13770": 12698492928.0, "13775": 12698492928.0, "13780": 12698492928.0, "13785": 12698492928.0, "13790": 12698492928.0, "13795": 12698492928.0, "13800": 12698492928.0, "13805": 12698492928.0, "13810": 12698492928.0, "13815": 12698492928.0, "13820": 12698492928.0, "13825": 12698492928.0, "13830": 12698492928.0, "13835": 12698492928.0, "13840": 12698492928.0, "13845": 12698492928.0, "13850": 12698492928.0, "13855": 12698492928.0, "13860": 12698492928.0, "13865": 12698492928.0, "13870": 12698492928.0, "13875": 12698492928.0, "13880": 12698492928.0, "13885": 12698492928.0, "13890": 12698492928.0, "13895": 12698492928.0, "13900": 12698492928.0, "13905": 12698492928.0, "13910": 12698492928.0, "13915": 12698492928.0, "13920": 12698492928.0, "13925": 12698492928.0, "13930": 12698492928.0, "13935": 12698492928.0, "13940": 12698492928.0, "13945": 12698492928.0, "13950": 12698492928.0, "13955": 12698492928.0, "13960": 12698492928.0, "13965": 12698492928.0, "13970": 12698492928.0, "13975": 12698492928.0, "13980": 12698492928.0, "13985": 12698492928.0, "13990": 12698492928.0, "13995": 12698492928.0, "14000": 12698492928.0, "14005": 12698492928.0, "14010": 12698492928.0, "14015": 12698492928.0, "14020": 12698492928.0, "14025": 12698492928.0, "14030": 12698492928.0, "14035": 12698492928.0, "14040": 12698492928.0, "14045": 12698492928.0, "14050": 12698492928.0, "14055": 12698492928.0, "14060": 12698492928.0, "14065": 12698492928.0, "14070": 12698492928.0, "14075": 12698492928.0, "14080": 12698492928.0, "14085": 12698492928.0, "14090": 12698492928.0, "14095": 12698492928.0, "14100": 12698492928.0, "14105": 12698492928.0, "14110": 12698492928.0, "14115": 12698492928.0, "14120": 12698492928.0, "14125": 12698492928.0, "14130": 12698492928.0, "14135": 12698492928.0, "14140": 12698492928.0, "14145": 12698492928.0, "14150": 12698492928.0, "14155": 12698492928.0, "14160": 12698492928.0, "14165": 12698492928.0, "14170": 12698492928.0, "14175": 12698492928.0, "14180": 12698492928.0, "14185": 12698492928.0, "14190": 12698492928.0, "14195": 12698492928.0, "14200": 12698492928.0, "14205": 12698492928.0, "14210": 12698492928.0, "14215": 12698492928.0, "14220": 12698492928.0, "14225": 12698492928.0, "14230": 12698492928.0, "14235": 12698492928.0, "14240": 12698492928.0, "14245": 12698492928.0, "14250": 12698492928.0, "14255": 12698492928.0, "14260": 12698492928.0, "14265": 12698492928.0, "14270": 12698492928.0, "14275": 12698492928.0, "14280": 12698492928.0, "14285": 12698492928.0, "14290": 12698492928.0, "14295": 12698492928.0, "14300": 12698492928.0, "14305": 12698492928.0, "14310": 12698492928.0, "14315": 12698492928.0, "14320": 12698492928.0, "14325": 12698492928.0, "14330": 12698492928.0, "14335": 12698492928.0, "14340": 12698492928.0, "14345": 12698492928.0, "14350": 12698492928.0, "14355": 12698492928.0, "14360": 12698492928.0, "14365": 12698492928.0, "14370": 12698492928.0, "14375": 12698492928.0, "14380": 12698492928.0, "14385": 12698492928.0, "14390": 12698492928.0, "14395": 12698492928.0, "14400": 12698492928.0, "14405": 12698492928.0, "14410": 12698492928.0, "14415": 12698492928.0, "14420": 12698492928.0, "14425": 12698492928.0, "14430": 12698492928.0, "14435": 12698492928.0, "14440": 12698492928.0, "14445": 12698492928.0, "14450": 12698492928.0, "14455": 12698492928.0, "14460": 12698492928.0, "14465": 12698492928.0, "14470": 12698492928.0, "14475": 12698492928.0, "14480": 12698492928.0, "14485": 12698492928.0, "14490": 12698492928.0, "14495": 12698492928.0, "14500": 12698492928.0, "14505": 12698492928.0, "14510": 12698492928.0, "14515": 12698492928.0, "14520": 12698492928.0, "14525": 12698492928.0, "14530": 12698492928.0, "14535": 12698492928.0, "14540": 12698492928.0, "14545": 12698492928.0, "14550": 12698492928.0, "14555": 12698492928.0, "14560": 12698492928.0, "14565": 12698492928.0, "14570": 12698492928.0, "14575": 12698492928.0, "14580": 12698492928.0, "14585": 12698492928.0, "14590": 12698492928.0, "14595": 12698492928.0, "14600": 12698492928.0, "14605": 12698492928.0, "14610": 12698492928.0, "14615": 12698492928.0, "14620": 12698492928.0, "14625": 12698492928.0, "14630": 12698492928.0, "14635": 12698492928.0, "14640": 12698492928.0, "14645": 12698492928.0, "14650": 12698492928.0, "14655": 12698492928.0, "14660": 12698492928.0, "14665": 12698492928.0, "14670": 12698492928.0, "14675": 12698492928.0, "14680": 12698492928.0, "14685": 12698492928.0, "14690": 12698492928.0, "14695": 12698492928.0, "14700": 12698492928.0, "14705": 12698492928.0, "14710": 12698492928.0, "14715": 12698492928.0, "14720": 12698492928.0, "14725": 12698492928.0, "14730": 12698492928.0, "14735": 12698492928.0, "14740": 12698492928.0, "14745": 12698492928.0, "14750": 12698492928.0, "14755": 12698492928.0, "14760": 12698492928.0, "14765": 12698492928.0, "14770": 12698492928.0, "14775": 12698492928.0, "14780": 12698492928.0, "14785": 12698492928.0, "14790": 12698492928.0, "14795": 12698492928.0, "14800": 12698492928.0, "14805": 12698492928.0, "14810": 12698492928.0, "14815": 12698492928.0, "14820": 12698492928.0, "14825": 12698492928.0, "14830": 12698492928.0, "14835": 12698492928.0, "14840": 12698492928.0, "14845": 12698492928.0, "14850": 12698492928.0, "14855": 12698492928.0, "14860": 12698492928.0, "14865": 12698492928.0, "14870": 12698492928.0, "14875": 12698492928.0, "14880": 12698492928.0, "14885": 12698492928.0, "14890": 12698492928.0, "14895": 12698492928.0, "14900": 12698492928.0, "14905": 12698492928.0, "14910": 12698492928.0, "14915": 12698492928.0, "14920": 12698492928.0, "14925": 12698492928.0, "14930": 12698492928.0, "14935": 12698492928.0, "14940": 12698492928.0, "14945": 12698492928.0, "14950": 12698492928.0, "14955": 12698492928.0, "14960": 12698492928.0, "14965": 12698492928.0, "14970": 12698492928.0, "14975": 12698492928.0, "14980": 12698492928.0, "14985": 12698492928.0, "14990": 12698492928.0, "14995": 12698492928.0, "15000": 12698492928.0, "15005": 12698492928.0, "15010": 12698492928.0, "15015": 12698492928.0, "15020": 12698492928.0, "15025": 12698492928.0, "15030": 12698492928.0, "15035": 12698492928.0, "15040": 12698492928.0, "15045": 12698492928.0, "15050": 12698492928.0, "15055": 12698492928.0, "15060": 12698492928.0, "15065": 12698492928.0, "15070": 12698492928.0, "15075": 12698492928.0, "15080": 12698492928.0, "15085": 12698492928.0, "15090": 12698492928.0, "15095": 12698492928.0, "15100": 12698492928.0, "15105": 12698492928.0, "15110": 12698492928.0, "15115": 12698492928.0, "15120": 12698492928.0, "15125": 12698492928.0, "15130": 12698492928.0, "15135": 12698492928.0, "15140": 12698492928.0, "15145": 12698492928.0, "15150": 12698492928.0, "15155": 12698492928.0, "15160": 12698492928.0, "15165": 12698492928.0, "15170": 12698492928.0, "15175": 12698492928.0, "15180": 12698492928.0, "15185": 12698492928.0, "15190": 12698492928.0, "15195": 12698492928.0, "15200": 12698492928.0, "15205": 12698492928.0, "15210": 12698492928.0, "15215": 12698492928.0, "15220": 12698492928.0, "15225": 12698492928.0, "15230": 12698492928.0, "15235": 12698492928.0, "15240": 12698492928.0, "15245": 12698492928.0, "15250": 12698492928.0, "15255": 12698492928.0, "15260": 12698492928.0, "15265": 12698492928.0, "15270": 12698492928.0, "15275": 12698492928.0, "15280": 12698492928.0, "15285": 12698492928.0, "15290": 12698492928.0, "15295": 12698492928.0, "15300": 12698492928.0, "15305": 12698492928.0, "15310": 12698492928.0, "15315": 12698492928.0, "15320": 12698492928.0, "15325": 12698492928.0, "15330": 12698492928.0, "15335": 12698492928.0, "15340": 12698492928.0, "15345": 12698492928.0, "15350": 12698492928.0, "15355": 12698492928.0, "15360": 12698492928.0, "15365": 12698492928.0, "15370": 12698492928.0, "15375": 12698492928.0, "15380": 12698492928.0, "15385": 12698492928.0, "15390": 12698492928.0, "15395": 12698492928.0, "15400": 12698492928.0, "15405": 12698492928.0, "15410": 12698492928.0, "15415": 12698492928.0, "15420": 12698492928.0, "15425": 12698492928.0, "15430": 12698492928.0, "15435": 12698492928.0, "15440": 12698492928.0, "15445": 12698492928.0, "15450": 12698492928.0, "15455": 12698492928.0, "15460": 12698492928.0, "15465": 12698492928.0, "15470": 12698492928.0, "15475": 12698492928.0, "15480": 12698492928.0, "15485": 12698492928.0, "15490": 12698492928.0, "15495": 12698492928.0, "15500": 12698492928.0, "15505": 12698492928.0, "15510": 12698492928.0, "15515": 12698492928.0, "15520": 12698492928.0, "15525": 12698492928.0, "15530": 12698492928.0, "15535": 12698492928.0, "15540": 12698492928.0, "15545": 12698492928.0, "15550": 12698492928.0, "15555": 12698492928.0, "15560": 12698492928.0, "15565": 12698492928.0, "15570": 12698492928.0, "15575": 12698492928.0, "15580": 12698492928.0, "15585": 12698492928.0, "15590": 12698492928.0, "15595": 12698492928.0, "15600": 12698492928.0, "15605": 12698492928.0, "15610": 12698492928.0, "15615": 12698492928.0, "15620": 12698492928.0, "15625": 12698492928.0, "15630": 12698492928.0, "15635": 12698492928.0, "15640": 12698492928.0, "15645": 12698492928.0, "15650": 12698492928.0, "15655": 12698492928.0, "15660": 12698492928.0, "15665": 12698492928.0, "15670": 12698492928.0, "15675": 12698492928.0, "15680": 12698492928.0, "15685": 12698492928.0, "15690": 12698492928.0, "15695": 12698492928.0, "15700": 12698492928.0, "15705": 12698492928.0, "15710": 12698492928.0, "15715": 12698492928.0, "15720": 12698492928.0, "15725": 12698492928.0, "15730": 12698492928.0, "15735": 12698492928.0, "15740": 12698492928.0, "15745": 12698492928.0, "15750": 12698492928.0, "15755": 12698492928.0, "15760": 12698492928.0, "15765": 12698492928.0, "15770": 12698492928.0, "15775": 12698492928.0, "15780": 12698492928.0, "15785": 12698492928.0, "15790": 12698492928.0, "15795": 12698492928.0, "15800": 12698492928.0, "15805": 12698492928.0, "15810": 12698492928.0, "15815": 12698492928.0, "15820": 12698492928.0, "15825": 12698492928.0, "15830": 12698492928.0, "15835": 12698492928.0, "15840": 12698492928.0, "15845": 12698492928.0, "15850": 12698492928.0, "15855": 12698492928.0, "15860": 12698492928.0, "15865": 12698492928.0, "15870": 12698492928.0, "15875": 12698492928.0, "15880": 12698492928.0, "15885": 12698492928.0, "15890": 12698492928.0, "15895": 12698492928.0, "15900": 12698492928.0, "15905": 12698492928.0, "15910": 12698492928.0, "15915": 12698492928.0, "15920": 12698492928.0, "15925": 12698492928.0, "15930": 12698492928.0, "15935": 12698492928.0, "15940": 12698492928.0, "15945": 12698492928.0, "15950": 12698492928.0, "15955": 12698492928.0, "15960": 12698492928.0, "15965": 12698492928.0, "15970": 12698492928.0, "15975": 12698492928.0, "15980": 12698492928.0, "15985": 12698492928.0, "15990": 12698492928.0, "15995": 12698492928.0, "16000": 12698492928.0, "16005": 12698492928.0, "16010": 12698492928.0, "16015": 12698492928.0, "16020": 12698492928.0, "16025": 12698492928.0, "16030": 12698492928.0, "16035": 12698492928.0, "16040": 12698492928.0, "16045": 12698492928.0, "16050": 12698492928.0, "16055": 12698492928.0, "16060": 12698492928.0, "16065": 12698492928.0, "16070": 12698492928.0, "16075": 12698492928.0, "16080": 12698492928.0, "16085": 12698492928.0, "16090": 12698492928.0, "16095": 12698492928.0, "16100": 12698492928.0, "16105": 12698492928.0, "16110": 12698492928.0, "16115": 12698492928.0, "16120": 12698492928.0, "16125": 12698492928.0, "16130": 12698492928.0, "16135": 12698492928.0, "16140": 12698492928.0, "16145": 12698492928.0, "16150": 12698492928.0, "16155": 12698492928.0, "16160": 12698492928.0, "16165": 12698492928.0, "16170": 12698492928.0, "16175": 12698492928.0, "16180": 12698492928.0, "16185": 12698492928.0, "16190": 12698492928.0, "16195": 12698492928.0, "16200": 12698492928.0, "16205": 12698492928.0, "16210": 12698492928.0, "16215": 12698492928.0, "16220": 12698492928.0, "16225": 12698492928.0, "16230": 12698492928.0, "16235": 12698492928.0, "16240": 12698492928.0, "16245": 12698492928.0, "16250": 12698492928.0, "16255": 12698492928.0, "16260": 12698492928.0, "16265": 12698492928.0, "16270": 12698492928.0, "16275": 12698492928.0, "16280": 12698492928.0, "16285": 12698492928.0, "16290": 12698492928.0, "16295": 12698492928.0, "16300": 12698492928.0, "16305": 12698492928.0, "16310": 12698492928.0, "16315": 12698492928.0, "16320": 12698492928.0, "16325": 12698492928.0, "16330": 12698492928.0, "16335": 12698492928.0, "16340": 12698492928.0, "16345": 12698492928.0, "16350": 12698492928.0, "16355": 12698492928.0, "16360": 12698492928.0, "16365": 12698492928.0, "16370": 12698492928.0, "16375": 12698492928.0, "16380": 12698492928.0, "16385": 12698492928.0, "16390": 12698492928.0, "16395": 12698492928.0, "16400": 12698492928.0, "16405": 12698492928.0, "16410": 12698492928.0, "16415": 12698492928.0, "16420": 12698492928.0, "16425": 12698492928.0, "16430": 12698492928.0, "16435": 12698492928.0, "16440": 12698492928.0, "16445": 12698492928.0, "16450": 12698492928.0, "16455": 12698492928.0, "16460": 12698492928.0, "16465": 12698492928.0, "16470": 12698492928.0, "16475": 12698492928.0, "16480": 12698492928.0, "16485": 12698492928.0, "16490": 12698492928.0, "16495": 12698492928.0, "16500": 12698492928.0, "16505": 12698492928.0, "16510": 12698492928.0, "16515": 12698492928.0, "16520": 12698492928.0, "16525": 12698492928.0, "16530": 12698492928.0, "16535": 12698492928.0, "16540": 12698492928.0, "16545": 12698492928.0, "16550": 12698492928.0, "16555": 12698492928.0, "16560": 12698492928.0, "16565": 12698492928.0, "16570": 12698492928.0, "16575": 12698492928.0, "16580": 12698492928.0, "16585": 12698492928.0, "16590": 12698492928.0, "16595": 12698492928.0, "16600": 12698492928.0, "16605": 12698492928.0, "16610": 12698492928.0, "16615": 12698492928.0, "16620": 12698492928.0, "16625": 12698492928.0, "16630": 12698492928.0, "16635": 12698492928.0, "16640": 12698492928.0, "16645": 12698492928.0, "16650": 12698492928.0, "16655": 12698492928.0, "16660": 12698492928.0, "16665": 12698492928.0, "16670": 12698492928.0, "16675": 12698492928.0, "16680": 12698492928.0, "16685": 12698492928.0, "16690": 12698492928.0, "16695": 12698492928.0, "16700": 12698492928.0, "16705": 12698492928.0, "16710": 12698492928.0, "16715": 12698492928.0, "16720": 12698492928.0, "16725": 12698492928.0, "16730": 12698492928.0, "16735": 12698492928.0, "16740": 12698492928.0, "16745": 12698492928.0, "16750": 12698492928.0, "16755": 12698492928.0, "16760": 12698492928.0, "16765": 12698492928.0, "16770": 12698492928.0, "16775": 12698492928.0, "16780": 12698492928.0, "16785": 12698492928.0, "16790": 12698492928.0, "16795": 12698492928.0, "16800": 12698492928.0, "16805": 12698492928.0, "16810": 12698492928.0, "16815": 12698492928.0, "16820": 12698492928.0, "16825": 12698492928.0, "16830": 12698492928.0, "16835": 12698492928.0, "16840": 12698492928.0, "16845": 12698492928.0, "16850": 12698492928.0, "16855": 12698492928.0, "16860": 12698492928.0, "16865": 12698492928.0, "16870": 12698492928.0, "16875": 12698492928.0, "16880": 12698492928.0, "16885": 12698492928.0, "16890": 12698492928.0, "16895": 12698492928.0, "16900": 12698492928.0, "16905": 12698492928.0, "16910": 12698492928.0, "16915": 12698492928.0, "16920": 12698492928.0, "16925": 12698492928.0, "16930": 12698492928.0, "16935": 12698492928.0, "16940": 12698492928.0, "16945": 12698492928.0, "16950": 12698492928.0, "16955": 12698492928.0, "16960": 12698492928.0, "16965": 12698492928.0, "16970": 12698492928.0, "16975": 12698492928.0, "16980": 12698492928.0, "16985": 12698492928.0, "16990": 12698492928.0, "16995": 12698492928.0, "17000": 12698492928.0, "17005": 12698492928.0, "17010": 12698492928.0, "17015": 12698492928.0, "17020": 12698492928.0, "17025": 12698492928.0, "17030": 12698492928.0, "17035": 12698492928.0, "17040": 12698492928.0, "17045": 12698492928.0, "17050": 12698492928.0, "17055": 12698492928.0, "17060": 12698492928.0, "17065": 12698492928.0, "17070": 12698492928.0, "17075": 12698492928.0, "17080": 12698492928.0, "17085": 12698492928.0, "17090": 12698492928.0, "17095": 12698492928.0, "17100": 12698492928.0, "17105": 12698492928.0, "17110": 12698492928.0, "17115": 12698492928.0, "17120": 12698492928.0, "17125": 12698492928.0, "17130": 12698492928.0, "17135": 12698492928.0, "17140": 12698492928.0, "17145": 12698492928.0, "17150": 12698492928.0, "17155": 12698492928.0, "17160": 12698492928.0, "17165": 12698492928.0, "17170": 12698492928.0, "17175": 12698492928.0, "17180": 12698492928.0, "17185": 12698492928.0, "17190": 12698492928.0, "17195": 12698492928.0, "17200": 12698492928.0, "17205": 12698492928.0, "17210": 12698492928.0, "17215": 12698492928.0, "17220": 12698492928.0, "17225": 12698492928.0, "17230": 12698492928.0, "17235": 12698492928.0, "17240": 12698492928.0, "17245": 12698492928.0, "17250": 12698492928.0, "17255": 12698492928.0, "17260": 12698492928.0, "17265": 12698492928.0, "17270": 12698492928.0, "17275": 12698492928.0, "17280": 12698492928.0, "17285": 12698492928.0, "17290": 12698492928.0, "17295": 12698492928.0, "17300": 12698492928.0, "17305": 12698492928.0, "17310": 12698492928.0, "17315": 12698492928.0, "17320": 12698492928.0, "17325": 12698492928.0, "17330": 12698492928.0, "17335": 12698492928.0, "17340": 12698492928.0, "17345": 12698492928.0, "17350": 12698492928.0, "17355": 12698492928.0, "17360": 12698492928.0, "17365": 12698492928.0, "17370": 12698492928.0, "17375": 12698492928.0, "17380": 12698492928.0, "17385": 12698492928.0, "17390": 12698492928.0, "17395": 12698492928.0, "17400": 12698492928.0, "17405": 12698492928.0, "17410": 12698492928.0, "17415": 12698492928.0, "17420": 12698492928.0, "17425": 12698492928.0, "17430": 12698492928.0, "17435": 12698492928.0, "17440": 12698492928.0, "17445": 12698492928.0, "17450": 12698492928.0, "17455": 12698492928.0, "17460": 12698492928.0, "17465": 12698492928.0, "17470": 12698492928.0, "17475": 12698492928.0, "17480": 12698492928.0, "17485": 12698492928.0, "17490": 12698492928.0, "17495": 12698492928.0, "17500": 12698492928.0, "17505": 12698492928.0, "17510": 12698492928.0, "17515": 12698492928.0, "17520": 12698492928.0, "17525": 12698492928.0, "17530": 12698492928.0, "17535": 12698492928.0, "17540": 12698492928.0, "17545": 12698492928.0, "17550": 12698492928.0, "17555": 12698492928.0, "17560": 12698492928.0, "17565": 12698492928.0, "17570": 12698492928.0, "17575": 12698492928.0, "17580": 12698492928.0, "17585": 12698492928.0, "17590": 12698492928.0, "17595": 12698492928.0, "17600": 12698492928.0, "17605": 12698492928.0, "17610": 12698492928.0, "17615": 12698492928.0, "17620": 12698492928.0, "17625": 12698492928.0, "17630": 12698492928.0, "17635": 12698492928.0, "17640": 12698492928.0, "17645": 12698492928.0, "17650": 12698492928.0, "17655": 12698492928.0, "17660": 12698492928.0, "17665": 12698492928.0, "17670": 12698492928.0, "17675": 12698492928.0, "17680": 12698492928.0, "17685": 12698492928.0, "17690": 12698492928.0, "17695": 12698492928.0, "17700": 12698492928.0, "17705": 12698492928.0, "17710": 12698492928.0, "17715": 12698492928.0, "17720": 12698492928.0, "17725": 12698492928.0, "17730": 12698492928.0, "17735": 12698492928.0, "17740": 12698492928.0, "17745": 12698492928.0, "17750": 12698492928.0, "17755": 12698492928.0, "17760": 12698492928.0, "17765": 12698492928.0, "17770": 12698492928.0, "17775": 12698492928.0, "17780": 12698492928.0, "17785": 12698492928.0, "17790": 12698492928.0, "17795": 12698492928.0, "17800": 12698492928.0, "17805": 12698492928.0, "17810": 12698492928.0, "17815": 12698492928.0, "17820": 12698492928.0, "17825": 12698492928.0, "17830": 12698492928.0, "17835": 12698492928.0, "17840": 12698492928.0, "17845": 12698492928.0, "17850": 12698492928.0, "17855": 12698492928.0, "17860": 12698492928.0, "17865": 12698492928.0, "17870": 12698492928.0, "17875": 12698492928.0, "17880": 12698492928.0, "17885": 12698492928.0, "17890": 12698492928.0, "17895": 12698492928.0, "17900": 12698492928.0, "17905": 12698492928.0, "17910": 12698492928.0, "17915": 12698492928.0, "17920": 12698492928.0, "17925": 12698492928.0, "17930": 12698492928.0, "17935": 12698492928.0, "17940": 12698492928.0, "17945": 12698492928.0, "17950": 12698492928.0, "17955": 12698492928.0, "17960": 12698492928.0, "17965": 12698492928.0, "17970": 12698492928.0, "17975": 12698492928.0, "17980": 12698492928.0, "17985": 12698492928.0, "17990": 12698492928.0, "17995": 12698492928.0, "18000": 12698492928.0, "18005": 12698492928.0, "18010": 12698492928.0, "18015": 12698492928.0, "18020": 12698492928.0, "18025": 12698492928.0, "18030": 12698492928.0, "18035": 12698492928.0, "18040": 12698492928.0, "18045": 12698492928.0, "18050": 12698492928.0, "18055": 12698492928.0, "18060": 12698492928.0, "18065": 12698492928.0, "18070": 12698492928.0, "18075": 12698492928.0, "18080": 12698492928.0, "18085": 12698492928.0, "18090": 12698492928.0, "18095": 12698492928.0, "18100": 12698492928.0, "18105": 12698492928.0, "18110": 12698492928.0, "18115": 12698492928.0, "18120": 12698492928.0, "18125": 12698492928.0, "18130": 12698492928.0, "18135": 12698492928.0, "18140": 12698492928.0, "18145": 12698492928.0, "18150": 12698492928.0, "18155": 12698492928.0, "18160": 12698492928.0, "18165": 12698492928.0, "18170": 12698492928.0, "18175": 12698492928.0, "18180": 12698492928.0, "18185": 12698492928.0, "18190": 12698492928.0, "18195": 12698492928.0, "18200": 12698492928.0, "18205": 12698492928.0, "18210": 12698492928.0, "18215": 12698492928.0, "18220": 12698492928.0, "18225": 12698492928.0, "18230": 12698492928.0, "18235": 12698492928.0, "18240": 12698492928.0, "18245": 12698492928.0, "18250": 12698492928.0, "18255": 12698492928.0, "18260": 12698492928.0, "18265": 12698492928.0, "18270": 12698492928.0, "18275": 12698492928.0, "18280": 12698492928.0, "18285": 12698492928.0, "18290": 12698492928.0, "18295": 12698492928.0, "18300": 12698492928.0, "18305": 12698492928.0, "18310": 12698492928.0, "18315": 12698492928.0, "18320": 12698492928.0, "18325": 12698492928.0, "18330": 12698492928.0, "18335": 12698492928.0, "18340": 12698492928.0, "18345": 12698492928.0, "18350": 12698492928.0, "18355": 12698492928.0, "18360": 12698492928.0, "18365": 12698492928.0, "18370": 12698492928.0, "18375": 12698492928.0, "18380": 12698492928.0, "18385": 12698492928.0, "18390": 12698492928.0, "18395": 12698492928.0, "18400": 12698492928.0, "18405": 12698492928.0, "18410": 12698492928.0, "18415": 12698492928.0, "18420": 12698492928.0, "18425": 12698492928.0, "18430": 12698492928.0, "18435": 12698492928.0, "18440": 12698492928.0, "18445": 12698492928.0, "18450": 12698492928.0, "18455": 12698492928.0, "18460": 12698492928.0, "18465": 12698492928.0, "18470": 12698492928.0, "18475": 12698492928.0, "18480": 12698492928.0, "18485": 12698492928.0, "18490": 12698492928.0, "18495": 12698492928.0, "18500": 12698492928.0, "18505": 12698492928.0, "18510": 12698492928.0, "18515": 12698492928.0, "18520": 12698492928.0, "18525": 12698492928.0, "18530": 12698492928.0, "18535": 12698492928.0, "18540": 12698492928.0, "18545": 12698492928.0, "18550": 12698492928.0, "18555": 12698492928.0, "18560": 12698492928.0, "18565": 12698492928.0, "18570": 12698492928.0, "18575": 12698492928.0, "18580": 12698492928.0, "18585": 12698492928.0, "18590": 12698492928.0, "18595": 12698492928.0, "18600": 12698492928.0, "18605": 12698492928.0, "18610": 12698492928.0, "18615": 12698492928.0, "18620": 12698492928.0, "18625": 12698492928.0, "18630": 12698492928.0, "18635": 12698492928.0, "18640": 12698492928.0, "18645": 12698492928.0, "18650": 12698492928.0, "18655": 12698492928.0, "18660": 12698492928.0, "18665": 12698492928.0, "18670": 12698492928.0, "18675": 12698492928.0, "18680": 12698492928.0, "18685": 12698492928.0, "18690": 12698492928.0, "18695": 12698492928.0, "18700": 12698492928.0, "18705": 12698492928.0, "18710": 12698492928.0, "18715": 12698492928.0, "18720": 12698492928.0, "18725": 12698492928.0, "18730": 12698492928.0, "18735": 12698492928.0, "18740": 12698492928.0, "18745": 12698492928.0, "18750": 12698492928.0, "18755": 12698492928.0, "18760": 12698492928.0, "18765": 12698492928.0, "18770": 12698492928.0, "18775": 12698492928.0, "18780": 12698492928.0, "18785": 12698492928.0, "18790": 12698492928.0, "18795": 12698492928.0, "18800": 12698492928.0, "18805": 12698492928.0, "18810": 12698492928.0, "18815": 12698492928.0, "18820": 12698492928.0, "18825": 12698492928.0, "18830": 12698492928.0, "18835": 12698492928.0, "18840": 12698492928.0, "18845": 12698492928.0, "18850": 12698492928.0, "18855": 12698492928.0, "18860": 12698492928.0, "18865": 12698492928.0, "18870": 12698492928.0, "18875": 12698492928.0, "18880": 12698492928.0, "18885": 12698492928.0, "18890": 12698492928.0, "18895": 12698492928.0, "18900": 12698492928.0, "18905": 12698492928.0, "18910": 12698492928.0, "18915": 12698492928.0, "18920": 12698492928.0, "18925": 12698492928.0, "18930": 12698492928.0, "18935": 12698492928.0, "18940": 12698492928.0, "18945": 12698492928.0, "18950": 12698492928.0, "18955": 12698492928.0, "18960": 12698492928.0, "18965": 12698492928.0, "18970": 12698492928.0, "18975": 12698492928.0, "18980": 12698492928.0, "18985": 12698492928.0, "18990": 12698492928.0, "18995": 12698492928.0, "19000": 12698492928.0, "19005": 12698492928.0, "19010": 12698492928.0, "19015": 12698492928.0, "19020": 12698492928.0, "19025": 12698492928.0, "19030": 12698492928.0, "19035": 12698492928.0, "19040": 12698492928.0, "19045": 12698492928.0, "19050": 12698492928.0, "19055": 12698492928.0, "19060": 12698492928.0, "19065": 12698492928.0, "19070": 12698492928.0, "19075": 12698492928.0, "19080": 12698492928.0, "19085": 12698492928.0, "19090": 12698492928.0, "19095": 12698492928.0, "19100": 12698492928.0, "19105": 12698492928.0, "19110": 12698492928.0, "19115": 12698492928.0, "19120": 12698492928.0, "19125": 12698492928.0, "19130": 12698492928.0, "19135": 12698492928.0, "19140": 12698492928.0, "19145": 12698492928.0, "19150": 12698492928.0, "19155": 12698492928.0, "19160": 12698492928.0, "19165": 12698492928.0, "19170": 12698492928.0, "19175": 12698492928.0, "19180": 12698492928.0, "19185": 12698492928.0, "19190": 12698492928.0, "19195": 12698492928.0, "19200": 12698492928.0, "19205": 12698492928.0, "19210": 12698492928.0, "19215": 12698492928.0, "19220": 12698492928.0, "19225": 12698492928.0, "19230": 12698492928.0, "19235": 12698492928.0, "19240": 12698492928.0, "19245": 12698492928.0, "19250": 12698492928.0, "19255": 12698492928.0, "19260": 12698492928.0, "19265": 12698492928.0, "19270": 12698492928.0, "19275": 12698492928.0, "19280": 12698492928.0, "19285": 12698492928.0, "19290": 12698492928.0, "19295": 12698492928.0, "19300": 12698492928.0, "19305": 12698492928.0, "19310": 12698492928.0, "19315": 12698492928.0, "19320": 12698492928.0, "19325": 12698492928.0, "19330": 12698492928.0, "19335": 12698492928.0, "19340": 12698492928.0, "19345": 12698492928.0, "19350": 12698492928.0, "19355": 12698492928.0, "19360": 12698492928.0, "19365": 12698492928.0, "19370": 12698492928.0, "19375": 12698492928.0, "19380": 12698492928.0, "19385": 12698492928.0, "19390": 12698492928.0, "19395": 12698492928.0, "19400": 12698492928.0, "19405": 12698492928.0, "19410": 12698492928.0, "19415": 12698492928.0, "19420": 12698492928.0, "19425": 12698492928.0, "19430": 12698492928.0, "19435": 12698492928.0, "19440": 12698492928.0, "19445": 12698492928.0, "19450": 12698492928.0, "19455": 12698492928.0, "19460": 12698492928.0, "19465": 12698492928.0, "19470": 12698492928.0, "19475": 12698492928.0, "19480": 12698492928.0, "19485": 12698492928.0, "19490": 12698492928.0, "19495": 12698492928.0, "19500": 12698492928.0, "19505": 12698492928.0, "19510": 12698492928.0, "19515": 12698492928.0, "19520": 12698492928.0, "19525": 12698492928.0, "19530": 12698492928.0, "19535": 12698492928.0, "19540": 12698492928.0, "19545": 12698492928.0, "19550": 12698492928.0, "19555": 12698492928.0, "19560": 12698492928.0, "19565": 12698492928.0, "19570": 12698492928.0, "19575": 12698492928.0, "19580": 12698492928.0, "19585": 12698492928.0, "19590": 12698492928.0, "19595": 12698492928.0, "19600": 12698492928.0, "19605": 12698492928.0, "19610": 12698492928.0, "19615": 12698492928.0, "19620": 12698492928.0, "19625": 12698492928.0, "19630": 12698492928.0, "19635": 12698492928.0, "19640": 12698492928.0, "19645": 12698492928.0, "19650": 12698492928.0, "19655": 12698492928.0, "19660": 12698492928.0, "19665": 12698492928.0, "19670": 12698492928.0, "19675": 12698492928.0, "19680": 12698492928.0, "19685": 12698492928.0, "19690": 12698492928.0, "19695": 12698492928.0, "19700": 12698492928.0, "19705": 12698492928.0, "19710": 12698492928.0, "19715": 12698492928.0, "19720": 12698492928.0, "19725": 12698492928.0, "19730": 12698492928.0, "19735": 12698492928.0, "19740": 12698492928.0, "19745": 12698492928.0, "19750": 12698492928.0, "19755": 12698492928.0, "19760": 12698492928.0, "19765": 12698492928.0, "19770": 12698492928.0, "19775": 12698492928.0, "19780": 12698492928.0, "19785": 12698492928.0, "19790": 12698492928.0, "19795": 12698492928.0, "19800": 12698492928.0, "19805": 12698492928.0, "19810": 12698492928.0, "19815": 12698492928.0, "19820": 12698492928.0, "19825": 12698492928.0, "19830": 12698492928.0, "19835": 12698492928.0, "19840": 12698492928.0, "19845": 12698492928.0, "19850": 12698492928.0, "19855": 12698492928.0, "19860": 12698492928.0, "19865": 12698492928.0, "19870": 12698492928.0, "19875": 12698492928.0, "19880": 12698492928.0, "19885": 12698492928.0, "19890": 12698492928.0, "19895": 12698492928.0, "19900": 12698492928.0, "19905": 12698492928.0, "19910": 12698492928.0, "19915": 12698492928.0, "19920": 12698492928.0, "19925": 12698492928.0, "19930": 12698492928.0, "19935": 12698492928.0, "19940": 12698492928.0, "19945": 12698492928.0, "19950": 12698492928.0, "19955": 12698492928.0, "19960": 12698492928.0, "19965": 12698492928.0, "19970": 12698492928.0, "19975": 12698492928.0, "19980": 12698492928.0, "19985": 12698492928.0, "19990": 12698492928.0, "19995": 12698492928.0, "20000": 12698492928.0, "20005": 12698492928.0, "20010": 12698492928.0, "20015": 12698492928.0, "20020": 12698492928.0, "20025": 12698492928.0, "20030": 12698492928.0, "20035": 12698492928.0, "20040": 12698492928.0, "20045": 12698492928.0, "20050": 12698492928.0, "20055": 12698492928.0, "20060": 12698492928.0, "20065": 12698492928.0, "20070": 12698492928.0, "20075": 12698492928.0, "20080": 12698492928.0, "20085": 12698492928.0, "20090": 12698492928.0, "20095": 12698492928.0, "20100": 12698492928.0, "20105": 12698492928.0, "20110": 12698492928.0, "20115": 12698492928.0, "20120": 12698492928.0, "20125": 12698492928.0, "20130": 12698492928.0, "20135": 12698492928.0, "20140": 12698492928.0, "20145": 12698492928.0, "20150": 12698492928.0, "20155": 12698492928.0, "20160": 12698492928.0, "20165": 12698492928.0, "20170": 12698492928.0, "20175": 12698492928.0, "20180": 12698492928.0, "20185": 12698492928.0, "20190": 12698492928.0, "20195": 12698492928.0, "20200": 12698492928.0, "20205": 12698492928.0, "20210": 12698492928.0, "20215": 12698492928.0, "20220": 12698492928.0, "20225": 12698492928.0, "20230": 12698492928.0, "20235": 12698492928.0, "20240": 12698492928.0, "20245": 12698492928.0, "20250": 12698492928.0, "20255": 12698492928.0, "20260": 12698492928.0, "20265": 12698492928.0, "20270": 12698492928.0, "20275": 12698492928.0, "20280": 12698492928.0, "20285": 12698492928.0, "20290": 12698492928.0, "20295": 12698492928.0, "20300": 12698492928.0, "20305": 12698492928.0, "20310": 12698492928.0, "20315": 12698492928.0, "20320": 12698492928.0, "20325": 12698492928.0, "20330": 12698492928.0, "20335": 12698492928.0, "20340": 12698492928.0, "20345": 12698492928.0, "20350": 12698492928.0, "20355": 12698492928.0, "20360": 12698492928.0, "20365": 12698492928.0, "20370": 12698492928.0, "20375": 12698492928.0, "20380": 12698492928.0, "20385": 12698492928.0, "20390": 12698492928.0, "20395": 12698492928.0, "20400": 12698492928.0, "20405": 12698492928.0, "20410": 12698492928.0, "20415": 12698492928.0, "20420": 12698492928.0, "20425": 12698492928.0, "20430": 12698492928.0, "20435": 12698492928.0, "20440": 12698492928.0, "20445": 12698492928.0, "20450": 12698492928.0, "20455": 12698492928.0, "20460": 12698492928.0, "20465": 12698492928.0, "20470": 12698492928.0, "20475": 12698492928.0, "20480": 12698492928.0, "20485": 12698492928.0, "20490": 12698492928.0, "20495": 12698492928.0, "20500": 12698492928.0, "20505": 12698492928.0, "20510": 12698492928.0, "20515": 12698492928.0, "20520": 12698492928.0, "20525": 12698492928.0, "20530": 12698492928.0, "20535": 12698492928.0, "20540": 12698492928.0, "20545": 12698492928.0, "20550": 12698492928.0, "20555": 12698492928.0, "20560": 12698492928.0, "20565": 12698492928.0, "20570": 12698492928.0, "20575": 12698492928.0, "20580": 12698492928.0, "20585": 12698492928.0, "20590": 12698492928.0, "20595": 12698492928.0, "20600": 12698492928.0, "20605": 12698492928.0, "20610": 12698492928.0, "20615": 12698492928.0, "20620": 12698492928.0, "20625": 12698492928.0, "20630": 12698492928.0, "20635": 12698492928.0, "20640": 12698492928.0, "20645": 12698492928.0, "20650": 12698492928.0, "20655": 12698492928.0, "20660": 12698492928.0, "20665": 12698492928.0, "20670": 12698492928.0, "20675": 12698492928.0, "20680": 12698492928.0, "20685": 12698492928.0, "20690": 12698492928.0, "20695": 12698492928.0, "20700": 12698492928.0, "20705": 12698492928.0, "20710": 12698492928.0, "20715": 12698492928.0, "20720": 12698492928.0, "20725": 12698492928.0, "20730": 12698492928.0, "20735": 12698492928.0, "20740": 12698492928.0, "20745": 12698492928.0, "20750": 12698492928.0, "20755": 12698492928.0, "20760": 12698492928.0, "20765": 12698492928.0, "20770": 12698492928.0, "20775": 12698492928.0, "20780": 12698492928.0, "20785": 12698492928.0, "20790": 12698492928.0, "20795": 12698492928.0, "20800": 12698492928.0, "20805": 12698492928.0, "20810": 12698492928.0, "20815": 12698492928.0, "20820": 12698492928.0, "20825": 12698492928.0, "20830": 12698492928.0, "20835": 12698492928.0, "20840": 12698492928.0, "20845": 12698492928.0, "20850": 12698492928.0, "20855": 12698492928.0, "20860": 12698492928.0, "20865": 12698492928.0, "20870": 12698492928.0, "20875": 12698492928.0, "20880": 12698492928.0, "20885": 12698492928.0, "20890": 12698492928.0, "20895": 12698492928.0, "20900": 12698492928.0, "20905": 12698492928.0, "20910": 12698492928.0, "20915": 12698492928.0, "20920": 12698492928.0, "20925": 12698492928.0, "20930": 12698492928.0, "20935": 12698492928.0, "20940": 12698492928.0, "20945": 12698492928.0, "20950": 12698492928.0, "20955": 12698492928.0, "20960": 12698492928.0, "20965": 12698492928.0, "20970": 12698492928.0, "20975": 12698492928.0, "20980": 12698492928.0, "20985": 12698492928.0, "20990": 12698492928.0, "20995": 12698492928.0, "21000": 12698492928.0, "21005": 12698492928.0, "21010": 12698492928.0, "21015": 12698492928.0, "21020": 12698492928.0, "21025": 12698492928.0, "21030": 12698492928.0, "21035": 12698492928.0, "21040": 12698492928.0, "21045": 12698492928.0, "21050": 12698492928.0, "21055": 12698492928.0, "21060": 12698492928.0, "21065": 12698492928.0, "21070": 12698492928.0, "21075": 12698492928.0, "21080": 12698492928.0, "21085": 12698492928.0, "21090": 12698492928.0, "21095": 12698492928.0, "21100": 12698492928.0, "21105": 12698492928.0, "21110": 12698492928.0, "21115": 12698492928.0, "21120": 12698492928.0, "21125": 12698492928.0, "21130": 12698492928.0, "21135": 12698492928.0, "21140": 12698492928.0, "21145": 12698492928.0, "21150": 12698492928.0, "21155": 12698492928.0, "21160": 12698492928.0, "21165": 12698492928.0, "21170": 12698492928.0, "21175": 12698492928.0, "21180": 12698492928.0, "21185": 12698492928.0, "21190": 12698492928.0, "21195": 12698492928.0, "21200": 12698492928.0, "21205": 12698492928.0, "21210": 12698492928.0, "21215": 12698492928.0, "21220": 12698492928.0, "21225": 12698492928.0, "21230": 12698492928.0, "21235": 12698492928.0, "21240": 12698492928.0, "21245": 12698492928.0, "21250": 12698492928.0, "21255": 12698492928.0, "21260": 12698492928.0, "21265": 12698492928.0, "21270": 12698492928.0, "21275": 12698492928.0, "21280": 12698492928.0, "21285": 12698492928.0, "21290": 12698492928.0, "21295": 12698492928.0, "21300": 12698492928.0, "21305": 12698492928.0, "21310": 12698492928.0, "21315": 12698492928.0, "21320": 12698492928.0, "21325": 12698492928.0, "21330": 12698492928.0, "21335": 12698492928.0, "21340": 12698492928.0, "21345": 12698492928.0, "21350": 12698492928.0, "21355": 12698492928.0, "21360": 12698492928.0, "21365": 12698492928.0, "21370": 12698492928.0, "21375": 12698492928.0, "21380": 12698492928.0, "21385": 12698492928.0, "21390": 12698492928.0, "21395": 12698492928.0, "21400": 12698492928.0, "21405": 12698492928.0, "21410": 12698492928.0, "21415": 12698492928.0, "21420": 12698492928.0, "21425": 12698492928.0, "21430": 12698492928.0, "21435": 12698492928.0, "21440": 12698492928.0, "21445": 12698492928.0, "21450": 12698492928.0, "21455": 12698492928.0, "21460": 12698492928.0, "21465": 12698492928.0, "21470": 12698492928.0, "21475": 12698492928.0, "21480": 12698492928.0, "21485": 12698492928.0, "21490": 12698492928.0, "21495": 12698492928.0, "21500": 12698492928.0, "21505": 12698492928.0, "21510": 12698492928.0, "21515": 12698492928.0, "21520": 12698492928.0, "21525": 12698492928.0, "21530": 12698492928.0, "21535": 12698492928.0, "21540": 12698492928.0, "21545": 12698492928.0, "21550": 12698492928.0, "21555": 12698492928.0, "21560": 12698492928.0, "21565": 12698492928.0, "21570": 12698492928.0, "21575": 12698492928.0, "21580": 12698492928.0, "21585": 12698492928.0, "21590": 12698492928.0, "21595": 12698492928.0, "21600": 12698492928.0, "21605": 12698492928.0, "21610": 12698492928.0, "21615": 12698492928.0, "21620": 12698492928.0, "21625": 12698492928.0, "21630": 12698492928.0, "21635": 12698492928.0, "21640": 12698492928.0, "21645": 12698492928.0, "21650": 12698492928.0, "21655": 12698492928.0, "21660": 12698492928.0, "21665": 12698492928.0, "21670": 12698492928.0, "21675": 12698492928.0, "21680": 12698492928.0, "21685": 12698492928.0, "21690": 12698492928.0, "21695": 12698492928.0, "21700": 12698492928.0, "21705": 12698492928.0, "21710": 12698492928.0, "21715": 12698492928.0, "21720": 12698492928.0, "21725": 12698492928.0, "21730": 12698492928.0, "21735": 12698492928.0, "21740": 12698492928.0, "21745": 12698492928.0, "21750": 12698492928.0, "21755": 12698492928.0, "21760": 12698492928.0, "21765": 12698492928.0, "21770": 12698492928.0, "21775": 12698492928.0, "21780": 12698492928.0, "21785": 12698492928.0, "21790": 12698492928.0, "21795": 12698492928.0, "21800": 12698492928.0, "21805": 12698492928.0, "21810": 12698492928.0, "21815": 12698492928.0, "21820": 12698492928.0, "21825": 12698492928.0, "21830": 12698492928.0, "21835": 12698492928.0, "21840": 12698492928.0, "21845": 12698492928.0, "21850": 12698492928.0, "21855": 12698492928.0, "21860": 12698492928.0, "21865": 12698492928.0, "21870": 12698492928.0, "21875": 12698492928.0, "21880": 12698492928.0, "21885": 12698492928.0, "21890": 12698492928.0, "21895": 12698492928.0, "21900": 12698492928.0, "21905": 12698492928.0, "21910": 12698492928.0, "21915": 12698492928.0, "21920": 12698492928.0, "21925": 12698492928.0, "21930": 12698492928.0, "21935": 12698492928.0, "21940": 12698492928.0, "21945": 12698492928.0, "21950": 12698492928.0, "21955": 12698492928.0, "21960": 12698492928.0, "21965": 12698492928.0, "21970": 12698492928.0, "21975": 12698492928.0, "21980": 12698492928.0, "21985": 12698492928.0, "21990": 12698492928.0, "21995": 12698492928.0, "22000": 12698492928.0, "22005": 12698492928.0, "22010": 12698492928.0, "22015": 12698492928.0, "22020": 12698492928.0, "22025": 12698492928.0, "22030": 12698492928.0, "22035": 12698492928.0, "22040": 12698492928.0, "22045": 12698492928.0, "22050": 12698492928.0, "22055": 12698492928.0, "22060": 12698492928.0, "22065": 12698492928.0, "22070": 12698492928.0, "22075": 12698492928.0, "22080": 12698492928.0, "22085": 12698492928.0, "22090": 12698492928.0, "22095": 12698492928.0, "22100": 12698492928.0, "22105": 12698492928.0, "22110": 12698492928.0, "22115": 12698492928.0, "22120": 12698492928.0, "22125": 12698492928.0, "22130": 12698492928.0, "22135": 12698492928.0, "22140": 12698492928.0, "22145": 12698492928.0, "22150": 12698492928.0, "22155": 12698492928.0, "22160": 12698492928.0, "22165": 12698492928.0, "22170": 12698492928.0, "22175": 12698492928.0, "22180": 12698492928.0, "22185": 12698492928.0, "22190": 12698492928.0, "22195": 12698492928.0, "22200": 12698492928.0, "22205": 12698492928.0, "22210": 12698492928.0, "22215": 12698492928.0, "22220": 12698492928.0, "22225": 12698492928.0, "22230": 12698492928.0, "22235": 12698492928.0, "22240": 12698492928.0, "22245": 12698492928.0, "22250": 12698492928.0, "22255": 12698492928.0, "22260": 12698492928.0, "22265": 12698492928.0, "22270": 12698492928.0, "22275": 12698492928.0, "22280": 12698492928.0, "22285": 12698492928.0, "22290": 12698492928.0, "22295": 12698492928.0, "22300": 12698492928.0, "22305": 12698492928.0, "22310": 12698492928.0, "22315": 12698492928.0, "22320": 12698492928.0, "22325": 12698492928.0, "22330": 12698492928.0, "22335": 12698492928.0, "22340": 12698492928.0, "22345": 12698492928.0, "22350": 12698492928.0, "22355": 12698492928.0, "22360": 12698492928.0, "22365": 12698492928.0, "22370": 12698492928.0, "22375": 12698492928.0, "22380": 12698492928.0, "22385": 12698492928.0, "22390": 12698492928.0, "22395": 12698492928.0, "22400": 12698492928.0, "22405": 12698492928.0, "22410": 12698492928.0, "22415": 12698492928.0, "22420": 12698492928.0, "22425": 12698492928.0, "22430": 12698492928.0, "22435": 12698492928.0, "22440": 12698492928.0, "22445": 12698492928.0, "22450": 12698492928.0, "22455": 12698492928.0, "22460": 12698492928.0, "22465": 12698492928.0, "22470": 12698492928.0, "22475": 12698492928.0, "22480": 12698492928.0, "22485": 12698492928.0, "22490": 12698492928.0, "22495": 12698492928.0, "22500": 12698492928.0, "22505": 12698492928.0, "22510": 12698492928.0, "22515": 12698492928.0, "22520": 12698492928.0, "22525": 12698492928.0, "22530": 12698492928.0, "22535": 12698492928.0, "22540": 12698492928.0, "22545": 12698492928.0, "22550": 12698492928.0, "22555": 12698492928.0, "22560": 12698492928.0, "22565": 12698492928.0, "22570": 12698492928.0, "22575": 12698492928.0, "22580": 12698492928.0, "22585": 12698492928.0, "22590": 12698492928.0, "22595": 12698492928.0, "22600": 12698492928.0, "22605": 12698492928.0, "22610": 12698492928.0, "22615": 12698492928.0, "22620": 12698492928.0, "22625": 12698492928.0, "22630": 12698492928.0, "22635": 12698492928.0, "22640": 12698492928.0, "22645": 12698492928.0, "22650": 12698492928.0, "22655": 12698492928.0, "22660": 12698492928.0, "22665": 12698492928.0, "22670": 12698492928.0, "22675": 12698492928.0, "22680": 12698492928.0, "22685": 12698492928.0, "22690": 12698492928.0, "22695": 12698492928.0, "22700": 12698492928.0, "22705": 12698492928.0, "22710": 12698492928.0, "22715": 12698492928.0, "22720": 12698492928.0, "22725": 12698492928.0, "22730": 12698492928.0, "22735": 12698492928.0, "22740": 12698492928.0, "22745": 12698492928.0, "22750": 12698492928.0, "22755": 12698492928.0, "22760": 12698492928.0, "22765": 12698492928.0, "22770": 12698492928.0, "22775": 12698492928.0, "22780": 12698492928.0, "22785": 12698492928.0, "22790": 12698492928.0, "22795": 12698492928.0, "22800": 12698492928.0, "22805": 12698492928.0, "22810": 12698492928.0, "22815": 12698492928.0, "22820": 12698492928.0, "22825": 12698492928.0, "22830": 12698492928.0, "22835": 12698492928.0, "22840": 12698492928.0, "22845": 12698492928.0, "22850": 12698492928.0, "22855": 12698492928.0, "22860": 12698492928.0, "22865": 12698492928.0, "22870": 12698492928.0, "22875": 12698492928.0, "22880": 12698492928.0, "22885": 12698492928.0, "22890": 12698492928.0, "22895": 12698492928.0, "22900": 12698492928.0, "22905": 12698492928.0, "22910": 12698492928.0, "22915": 12698492928.0, "22920": 12698492928.0, "22925": 12698492928.0, "22930": 12698492928.0, "22935": 12698492928.0, "22940": 12698492928.0, "22945": 12698492928.0, "22950": 12698492928.0, "22955": 12698492928.0, "22960": 12698492928.0, "22965": 12698492928.0, "22970": 12698492928.0, "22975": 12698492928.0, "22980": 12698492928.0, "22985": 12698492928.0, "22990": 12698492928.0, "22995": 12698492928.0, "23000": 12698492928.0, "23005": 12698492928.0, "23010": 12698492928.0, "23015": 12698492928.0, "23020": 12698492928.0, "23025": 12698492928.0, "23030": 12698492928.0, "23035": 12698492928.0, "23040": 12698492928.0, "23045": 12698492928.0, "23050": 12698492928.0, "23055": 12698492928.0, "23060": 12698492928.0, "23065": 12698492928.0, "23070": 12698492928.0, "23075": 12698492928.0, "23080": 12698492928.0, "23085": 12698492928.0, "23090": 12698492928.0, "23095": 12698492928.0, "23100": 12698492928.0, "23105": 12698492928.0, "23110": 12698492928.0, "23115": 12698492928.0, "23120": 12698492928.0, "23125": 12698492928.0, "23130": 12698492928.0, "23135": 12698492928.0, "23140": 12698492928.0, "23145": 12698492928.0, "23150": 12698492928.0, "23155": 12698492928.0, "23160": 12698492928.0, "23165": 12698492928.0, "23170": 12698492928.0, "23175": 12698492928.0, "23180": 12698492928.0, "23185": 12698492928.0, "23190": 12698492928.0, "23195": 12698492928.0, "23200": 12698492928.0, "23205": 12698492928.0, "23210": 12698492928.0, "23215": 12698492928.0, "23220": 12698492928.0, "23225": 12698492928.0, "23230": 12698492928.0, "23235": 12698492928.0, "23240": 12698492928.0, "23245": 12698492928.0, "23250": 12698492928.0, "23255": 12698492928.0, "23260": 12698492928.0, "23265": 12698492928.0, "23270": 12698492928.0, "23275": 12698492928.0, "23280": 12698492928.0, "23285": 12698492928.0, "23290": 12698492928.0, "23295": 12698492928.0, "23300": 12698492928.0, "23305": 12698492928.0, "23310": 12698492928.0, "23315": 12698492928.0, "23320": 12698492928.0, "23325": 12698492928.0, "23330": 12698492928.0, "23335": 12698492928.0, "23340": 12698492928.0, "23345": 12698492928.0, "23350": 12698492928.0, "23355": 12698492928.0, "23360": 12698492928.0, "23365": 12698492928.0, "23370": 12698492928.0, "23375": 12698492928.0, "23380": 12698492928.0, "23385": 12698492928.0, "23390": 12698492928.0, "23395": 12698492928.0, "23400": 12698492928.0, "23405": 12698492928.0, "23410": 12698492928.0, "23415": 12698492928.0, "23420": 12698492928.0, "23425": 12698492928.0, "23430": 12698492928.0, "23435": 12698492928.0, "23440": 12698492928.0, "23445": 12698492928.0, "23450": 12698492928.0, "23455": 12698492928.0, "23460": 12698492928.0, "23465": 12698492928.0, "23470": 12698492928.0, "23475": 12698492928.0, "23480": 12698492928.0, "23485": 12698492928.0, "23490": 12698492928.0, "23495": 12698492928.0, "23500": 12698492928.0, "23505": 12698492928.0, "23510": 12698492928.0, "23515": 12698492928.0, "23520": 12698492928.0, "23525": 12698492928.0, "23530": 12698492928.0, "23535": 12698492928.0, "23540": 12698492928.0, "23545": 12698492928.0, "23550": 12698492928.0, "23555": 12698492928.0, "23560": 12698492928.0, "23565": 12698492928.0, "23570": 12698492928.0, "23575": 12698492928.0, "23580": 12698492928.0, "23585": 12698492928.0, "23590": 12698492928.0, "23595": 12698492928.0, "23600": 12698492928.0, "23605": 12698492928.0, "23610": 12698492928.0, "23615": 12698492928.0, "23620": 12698492928.0, "23625": 12698492928.0, "23630": 12698492928.0, "23635": 12698492928.0, "23640": 12698492928.0, "23645": 12698492928.0, "23650": 12698492928.0, "23655": 12698492928.0, "23660": 12698492928.0, "23665": 12698492928.0, "23670": 12698492928.0, "23675": 12698492928.0, "23680": 12698492928.0, "23685": 12698492928.0, "23690": 12698492928.0, "23695": 12698492928.0, "23700": 12698492928.0, "23705": 12698492928.0, "23710": 12698492928.0, "23715": 12698492928.0, "23720": 12698492928.0, "23725": 12698492928.0, "23730": 12698492928.0, "23735": 12698492928.0, "23740": 12698492928.0, "23745": 12698492928.0, "23750": 12698492928.0, "23755": 12698492928.0, "23760": 12698492928.0, "23765": 12698492928.0, "23770": 12698492928.0, "23775": 12698492928.0, "23780": 12698492928.0, "23785": 12698492928.0, "23790": 12698492928.0, "23795": 12698492928.0, "23800": 12698492928.0, "23805": 12698492928.0, "23810": 12698492928.0, "23815": 12698492928.0, "23820": 12698492928.0, "23825": 12698492928.0, "23830": 12698492928.0, "23835": 12698492928.0, "23840": 12698492928.0, "23845": 12698492928.0, "23850": 12698492928.0, "23855": 12698492928.0, "23860": 12698492928.0, "23865": 12698492928.0, "23870": 12698492928.0, "23875": 12698492928.0, "23880": 12698492928.0, "23885": 12698492928.0, "23890": 12698492928.0, "23895": 12698492928.0, "23900": 12698492928.0, "23905": 12698492928.0, "23910": 12698492928.0, "23915": 12698492928.0, "23920": 12698492928.0, "23925": 12698492928.0, "23930": 12698492928.0, "23935": 12698492928.0, "23940": 12698492928.0, "23945": 12698492928.0, "23950": 12698492928.0, "23955": 12698492928.0, "23960": 12698492928.0, "23965": 12698492928.0, "23970": 12698492928.0, "23975": 12698492928.0, "23980": 12698492928.0, "23985": 12698492928.0, "23990": 12698492928.0, "23995": 12698492928.0, "24000": 12698492928.0, "24005": 12698492928.0, "24010": 12698492928.0, "24015": 12698492928.0, "24020": 12698492928.0, "24025": 12698492928.0, "24030": 12698492928.0, "24035": 12698492928.0, "24040": 12698492928.0, "24045": 12698492928.0, "24050": 12698492928.0, "24055": 12698492928.0, "24060": 12698492928.0, "24065": 12698492928.0, "24070": 12698492928.0, "24075": 12698492928.0, "24080": 12698492928.0, "24085": 12698492928.0, "24090": 12698492928.0, "24095": 12698492928.0, "24100": 12698492928.0, "24105": 12698492928.0, "24110": 12698492928.0, "24115": 12698492928.0, "24120": 12698492928.0, "24125": 12698492928.0, "24130": 12698492928.0, "24135": 12698492928.0, "24140": 12698492928.0, "24145": 12698492928.0, "24150": 12698492928.0, "24155": 12698492928.0, "24160": 12698492928.0, "24165": 12698492928.0, "24170": 12698492928.0, "24175": 12698492928.0, "24180": 12698492928.0, "24185": 12698492928.0, "24190": 12698492928.0, "24195": 12698492928.0, "24200": 12698492928.0, "24205": 12698492928.0, "24210": 12698492928.0, "24215": 12698492928.0, "24220": 12698492928.0, "24225": 12698492928.0, "24230": 12698492928.0, "24235": 12698492928.0, "24240": 12698492928.0, "24245": 12698492928.0, "24250": 12698492928.0, "24255": 12698492928.0, "24260": 12698492928.0, "24265": 12698492928.0, "24270": 12698492928.0, "24275": 12698492928.0, "24280": 12698492928.0, "24285": 12698492928.0, "24290": 12698492928.0, "24295": 12698492928.0, "24300": 12698492928.0, "24305": 12698492928.0, "24310": 12698492928.0, "24315": 12698492928.0, "24320": 12698492928.0, "24325": 12698492928.0, "24330": 12698492928.0, "24335": 12698492928.0, "24340": 12698492928.0, "24345": 12698492928.0, "24350": 12698492928.0, "24355": 12698492928.0, "24360": 12698492928.0, "24365": 12698492928.0, "24370": 12698492928.0, "24375": 12698492928.0, "24380": 12698492928.0, "24385": 12698492928.0, "24390": 12698492928.0, "24395": 12698492928.0, "24400": 12698492928.0, "24405": 12698492928.0, "24410": 12698492928.0, "24415": 12698492928.0, "24420": 12698492928.0, "24425": 12698492928.0, "24430": 12698492928.0, "24435": 12698492928.0, "24440": 12698492928.0, "24445": 12698492928.0, "24450": 12698492928.0, "24455": 12698492928.0, "24460": 12698492928.0, "24465": 12698492928.0, "24470": 12698492928.0, "24475": 12698492928.0, "24480": 12698492928.0, "24485": 12698492928.0, "24490": 12698492928.0, "24495": 12698492928.0, "24500": 12698492928.0, "24505": 12698492928.0, "24510": 12698492928.0, "24515": 12698492928.0, "24520": 12698492928.0, "24525": 12698492928.0, "24530": 12698492928.0, "24535": 12698492928.0, "24540": 12698492928.0, "24545": 12698492928.0, "24550": 12698492928.0, "24555": 12698492928.0, "24560": 12698492928.0, "24565": 12698492928.0, "24570": 12698492928.0, "24575": 12698492928.0, "24580": 12698492928.0, "24585": 12698492928.0, "24590": 12698492928.0, "24595": 12698492928.0, "24600": 12698492928.0, "24605": 12698492928.0, "24610": 12698492928.0, "24615": 12698492928.0, "24620": 12698492928.0, "24625": 12698492928.0, "24630": 12698492928.0, "24635": 12698492928.0, "24640": 12698492928.0, "24645": 12698492928.0, "24650": 12698492928.0, "24655": 12698492928.0, "24660": 12698492928.0, "24665": 12698492928.0, "24670": 12698492928.0, "24675": 12698492928.0, "24680": 12698492928.0, "24685": 12698492928.0, "24690": 12698492928.0, "24695": 12698492928.0, "24700": 12698492928.0, "24705": 12698492928.0, "24710": 12698492928.0, "24715": 12698492928.0, "24720": 12698492928.0, "24725": 12698492928.0, "24730": 12698492928.0, "24735": 12698492928.0, "24740": 12698492928.0, "24745": 12698492928.0, "24750": 12698492928.0, "24755": 12698492928.0, "24760": 12698492928.0, "24765": 12698492928.0, "24770": 12698492928.0, "24775": 12698492928.0, "24780": 12698492928.0, "24785": 12698492928.0, "24790": 12698492928.0, "24795": 12698492928.0, "24800": 12698492928.0, "24805": 12698492928.0, "24810": 12698492928.0, "24815": 12698492928.0, "24820": 12698492928.0, "24825": 12698492928.0, "24830": 12698492928.0, "24835": 12698492928.0, "24840": 12698492928.0, "24845": 12698492928.0, "24850": 12698492928.0, "24855": 12698492928.0, "24860": 12698492928.0, "24865": 12698492928.0, "24870": 12698492928.0, "24875": 12698492928.0, "24880": 12698492928.0, "24885": 12698492928.0, "24890": 12698492928.0, "24895": 12698492928.0, "24900": 12698492928.0, "24905": 12698492928.0, "24910": 12698492928.0, "24915": 12698492928.0, "24920": 12698492928.0, "24925": 12698492928.0, "24930": 12698492928.0, "24935": 12698492928.0, "24940": 12698492928.0, "24945": 12698492928.0, "24950": 12698492928.0, "24955": 12698492928.0, "24960": 12698492928.0, "24965": 12698492928.0, "24970": 12698492928.0, "24975": 12698492928.0, "24980": 12698492928.0, "24985": 12698492928.0, "24990": 12698492928.0, "24995": 12698492928.0, "25000": 12698492928.0, "25005": 12698492928.0, "25010": 12698492928.0, "25015": 12698492928.0, "25020": 12698492928.0, "25025": 12698492928.0, "25030": 12698492928.0, "25035": 12698492928.0, "25040": 12698492928.0, "25045": 12698492928.0, "25050": 12698492928.0, "25055": 12698492928.0, "25060": 12698492928.0, "25065": 12698492928.0, "25070": 12698492928.0, "25075": 12698492928.0, "25080": 12698492928.0, "25085": 12698492928.0, "25090": 12698492928.0, "25095": 12698492928.0, "25100": 12698492928.0, "25105": 12698492928.0, "25110": 12698492928.0, "25115": 12698492928.0, "25120": 12698492928.0, "25125": 12698492928.0, "25130": 12698492928.0, "25135": 12698492928.0, "25140": 12698492928.0, "25145": 12698492928.0, "25150": 12698492928.0, "25155": 12698492928.0, "25160": 12698492928.0, "25165": 12698492928.0, "25170": 12698492928.0, "25175": 12698492928.0, "25180": 12698492928.0, "25185": 12698492928.0, "25190": 12698492928.0, "25195": 12698492928.0, "25200": 12698492928.0, "25205": 12698492928.0, "25210": 12698492928.0, "25215": 12698492928.0, "25220": 12698492928.0, "25225": 12698492928.0, "25230": 12698492928.0, "25235": 12698492928.0, "25240": 12698492928.0, "25245": 12698492928.0, "25250": 12698492928.0, "25255": 12698492928.0, "25260": 12698492928.0, "25265": 12698492928.0, "25270": 12698492928.0, "25275": 12698492928.0, "25280": 12698492928.0, "25285": 12698492928.0, "25290": 12698492928.0, "25295": 12698492928.0, "25300": 12698492928.0, "25305": 12698492928.0, "25310": 12698492928.0, "25315": 12698492928.0, "25320": 12698492928.0, "25325": 12698492928.0, "25330": 12698492928.0, "25335": 12698492928.0, "25340": 12698492928.0, "25345": 12698492928.0, "25350": 12698492928.0, "25355": 12698492928.0, "25360": 12698492928.0, "25365": 12698492928.0, "25370": 12698492928.0, "25375": 12698492928.0, "25380": 12698492928.0, "25385": 12698492928.0, "25390": 12698492928.0, "25395": 12698492928.0, "25400": 12698492928.0, "25405": 12698492928.0, "25410": 12698492928.0, "25415": 12698492928.0, "25420": 12698492928.0, "25425": 12698492928.0, "25430": 12698492928.0, "25435": 12698492928.0, "25440": 12698492928.0, "25445": 12698492928.0, "25450": 12698492928.0, "25455": 12698492928.0, "25460": 12698492928.0, "25465": 12698492928.0, "25470": 12698492928.0, "25475": 12698492928.0, "25480": 12698492928.0, "25485": 12698492928.0, "25490": 12698492928.0, "25495": 12698492928.0, "25500": 12698492928.0, "25505": 12698492928.0, "25510": 12698492928.0, "25515": 12698492928.0, "25520": 12698492928.0, "25525": 12698492928.0, "25530": 12698492928.0, "25535": 12698492928.0, "25540": 12698492928.0, "25545": 12698492928.0, "25550": 12698492928.0, "25555": 12698492928.0, "25560": 12698492928.0, "25565": 12698492928.0, "25570": 12698492928.0, "25575": 12698492928.0, "25580": 12698492928.0, "25585": 12698492928.0, "25590": 12698492928.0, "25595": 12698492928.0, "25600": 12698492928.0, "25605": 12698492928.0, "25610": 12698492928.0, "25615": 12698492928.0, "25620": 12698492928.0, "25625": 12698492928.0, "25630": 12698492928.0, "25635": 12698492928.0, "25640": 12698492928.0, "25645": 12698492928.0, "25650": 12698492928.0, "25655": 12698492928.0, "25660": 12698492928.0, "25665": 12698492928.0, "25670": 12698492928.0, "25675": 12698492928.0, "25680": 12698492928.0, "25685": 12698492928.0, "25690": 12698492928.0, "25695": 12698492928.0, "25700": 12698492928.0, "25705": 12698492928.0, "25710": 12698492928.0, "25715": 12698492928.0, "25720": 12698492928.0, "25725": 12698492928.0, "25730": 12698492928.0, "25735": 12698492928.0, "25740": 12698492928.0, "25745": 12698492928.0, "25750": 12698492928.0, "25755": 12698492928.0, "25760": 12698492928.0, "25765": 12698492928.0, "25770": 12698492928.0, "25775": 12698492928.0, "25780": 12698492928.0, "25785": 12698492928.0, "25790": 12698492928.0, "25795": 12698492928.0, "25800": 12698492928.0, "25805": 12698492928.0, "25810": 12698492928.0, "25815": 12698492928.0, "25820": 12698492928.0, "25825": 12698492928.0, "25830": 12698492928.0, "25835": 12698492928.0, "25840": 12698492928.0, "25845": 12698492928.0, "25850": 12698492928.0, "25855": 12698492928.0, "25860": 12698492928.0, "25865": 12698492928.0, "25870": 12698492928.0, "25875": 12698492928.0, "25880": 12698492928.0, "25885": 12698492928.0, "25890": 12698492928.0, "25895": 12698492928.0, "25900": 12698492928.0, "25905": 12698492928.0, "25910": 12698492928.0, "25915": 12698492928.0, "25920": 12698492928.0, "25925": 12698492928.0, "25930": 12698492928.0, "25935": 12698492928.0, "25940": 12698492928.0, "25945": 12698492928.0, "25950": 12698492928.0, "25955": 12698492928.0, "25960": 12698492928.0, "25965": 12698492928.0, "25970": 12698492928.0, "25975": 12698492928.0, "25980": 12698492928.0, "25985": 12698492928.0, "25990": 12698492928.0, "25995": 12698492928.0, "26000": 12698492928.0, "26005": 12698492928.0, "26010": 12698492928.0, "26015": 12698492928.0, "26020": 12698492928.0, "26025": 12698492928.0, "26030": 12698492928.0, "26035": 12698492928.0, "26040": 12698492928.0, "26045": 12698492928.0, "26050": 12698492928.0, "26055": 12698492928.0, "26060": 12698492928.0, "26065": 12698492928.0, "26070": 12698492928.0, "26075": 12698492928.0, "26080": 12698492928.0, "26085": 12698492928.0, "26090": 12698492928.0, "26095": 12698492928.0, "26100": 12698492928.0, "26105": 12698492928.0, "26110": 12698492928.0, "26115": 12698492928.0, "26120": 12698492928.0, "26125": 12698492928.0, "26130": 12698492928.0, "26135": 12698492928.0, "26140": 12698492928.0, "26145": 12698492928.0, "26150": 12698492928.0, "26155": 12698492928.0, "26160": 12698492928.0, "26165": 12698492928.0, "26170": 12698492928.0, "26175": 12698492928.0, "26180": 12698492928.0, "26185": 12698492928.0, "26190": 12698492928.0, "26195": 12698492928.0, "26200": 12698492928.0, "26205": 12698492928.0, "26210": 12698492928.0, "26215": 12698492928.0, "26220": 12698492928.0, "26225": 12698492928.0, "26230": 12698492928.0, "26235": 12698492928.0, "26240": 12698492928.0, "26245": 12698492928.0, "26250": 12698492928.0, "26255": 12698492928.0, "26260": 12698492928.0, "26265": 12698492928.0, "26270": 12698492928.0, "26275": 12698492928.0, "26280": 12698492928.0, "26285": 12698492928.0, "26290": 12698492928.0, "26295": 12698492928.0, "26300": 12698492928.0, "26305": 12698492928.0, "26310": 12698492928.0, "26315": 12698492928.0, "26320": 12698492928.0, "26325": 12698492928.0, "26330": 12698492928.0, "26335": 12698492928.0, "26340": 12698492928.0, "26345": 12698492928.0, "26350": 12698492928.0, "26355": 12698492928.0, "26360": 12698492928.0, "26365": 12698492928.0, "26370": 12698492928.0, "26375": 12698492928.0, "26380": 12698492928.0, "26385": 12698492928.0, "26390": 12698492928.0, "26395": 12698492928.0, "26400": 12698492928.0, "26405": 12698492928.0, "26410": 12698492928.0, "26415": 12698492928.0, "26420": 12698492928.0, "26425": 12698492928.0, "26430": 12698492928.0, "26435": 12698492928.0, "26440": 12698492928.0, "26445": 12698492928.0, "26450": 12698492928.0, "26455": 12698492928.0, "26460": 12698492928.0, "26465": 12698492928.0, "26470": 12698492928.0, "26475": 12698492928.0, "26480": 12698492928.0, "26485": 12698492928.0, "26490": 12698492928.0, "26495": 12698492928.0, "26500": 12698492928.0, "26505": 12698492928.0, "26510": 12698492928.0, "26515": 12698492928.0, "26520": 12698492928.0, "26525": 12698492928.0, "26530": 12698492928.0, "26535": 12698492928.0, "26540": 12698492928.0, "26545": 12698492928.0, "26550": 12698492928.0, "26555": 12698492928.0, "26560": 12698492928.0, "26565": 12698492928.0, "26570": 12698492928.0, "26575": 12698492928.0, "26580": 12698492928.0, "26585": 12698492928.0, "26590": 12698492928.0, "26595": 12698492928.0, "26600": 12698492928.0, "26605": 12698492928.0, "26610": 12698492928.0, "26615": 12698492928.0, "26620": 12698492928.0, "26625": 12698492928.0, "26630": 12698492928.0, "26635": 12698492928.0, "26640": 12698492928.0, "26645": 12698492928.0, "26650": 12698492928.0, "26655": 12698492928.0, "26660": 12698492928.0, "26665": 12698492928.0, "26670": 12698492928.0, "26675": 12698492928.0, "26680": 12698492928.0, "26685": 12698492928.0, "26690": 12698492928.0, "26695": 12698492928.0, "26700": 12698492928.0, "26705": 12698492928.0, "26710": 12698492928.0, "26715": 12698492928.0, "26720": 12698492928.0, "26725": 12698492928.0, "26730": 12698492928.0, "26735": 12698492928.0, "26740": 12698492928.0, "26745": 12698492928.0, "26750": 12698492928.0, "26755": 12698492928.0, "26760": 12698492928.0, "26765": 12698492928.0, "26770": 12698492928.0, "26775": 12698492928.0, "26780": 12698492928.0, "26785": 12698492928.0, "26790": 12698492928.0, "26795": 12698492928.0, "26800": 12698492928.0, "26805": 12698492928.0, "26810": 12698492928.0, "26815": 12698492928.0, "26820": 12698492928.0, "26825": 12698492928.0, "26830": 12698492928.0, "26835": 12698492928.0, "26840": 12698492928.0, "26845": 12698492928.0, "26850": 12698492928.0, "26855": 12698492928.0, "26860": 12698492928.0, "26865": 12698492928.0, "26870": 12698492928.0, "26875": 12698492928.0, "26880": 12698492928.0, "26885": 12698492928.0, "26890": 12698492928.0, "26895": 12698492928.0, "26900": 12698492928.0, "26905": 12698492928.0, "26910": 12698492928.0, "26915": 12698492928.0, "26920": 12698492928.0, "26925": 12698492928.0, "26930": 12698492928.0, "26935": 12698492928.0, "26940": 12698492928.0, "26945": 12698492928.0, "26950": 12698492928.0, "26955": 12698492928.0, "26960": 12698492928.0, "26965": 12698492928.0, "26970": 12698492928.0, "26975": 12698492928.0, "26980": 12698492928.0, "26985": 12698492928.0, "26990": 12698492928.0, "26995": 12698492928.0, "27000": 12698492928.0, "27005": 12698492928.0, "27010": 12698492928.0, "27015": 12698492928.0, "27020": 12698492928.0, "27025": 12698492928.0, "27030": 12698492928.0, "27035": 12698492928.0, "27040": 12698492928.0, "27045": 12698492928.0, "27050": 12698492928.0, "27055": 12698492928.0, "27060": 12698492928.0, "27065": 12698492928.0, "27070": 12698492928.0, "27075": 12698492928.0, "27080": 12698492928.0, "27085": 12698492928.0, "27090": 12698492928.0, "27095": 12698492928.0, "27100": 12698492928.0, "27105": 12698492928.0, "27110": 12698492928.0, "27115": 12698492928.0, "27120": 12698492928.0, "27125": 12698492928.0, "27130": 12698492928.0, "27135": 12698492928.0, "27140": 12698492928.0, "27145": 12698492928.0, "27150": 12698492928.0, "27155": 12698492928.0, "27160": 12698492928.0, "27165": 12698492928.0, "27170": 12698492928.0, "27175": 12698492928.0, "27180": 12698492928.0, "27185": 12698492928.0, "27190": 12698492928.0, "27195": 12698492928.0, "27200": 12698492928.0, "27205": 12698492928.0, "27210": 12698492928.0, "27215": 12698492928.0, "27220": 12698492928.0, "27225": 12698492928.0, "27230": 12698492928.0, "27235": 12698492928.0, "27240": 12698492928.0, "27245": 12698492928.0, "27250": 12698492928.0, "27255": 12698492928.0, "27260": 12698492928.0, "27265": 12698492928.0, "27270": 12698492928.0, "27275": 12698492928.0, "27280": 12698492928.0, "27285": 12698492928.0, "27290": 12698492928.0, "27295": 12698492928.0, "27300": 12698492928.0, "27305": 12698492928.0, "27310": 12698492928.0, "27315": 12698492928.0, "27320": 12698492928.0, "27325": 12698492928.0, "27330": 12698492928.0, "27335": 12698492928.0, "27340": 12698492928.0, "27345": 12698492928.0, "27350": 12698492928.0, "27355": 12698492928.0, "27360": 12698492928.0, "27365": 12698492928.0, "27370": 12698492928.0, "27375": 12698492928.0, "27380": 12698492928.0, "27385": 12698492928.0, "27390": 12698492928.0, "27395": 12698492928.0, "27400": 12698492928.0, "27405": 12698492928.0, "27410": 12698492928.0, "27415": 12698492928.0, "27420": 12698492928.0, "27425": 12698492928.0, "27430": 12698492928.0, "27435": 12698492928.0, "27440": 12698492928.0, "27445": 12698492928.0, "27450": 12698492928.0, "27455": 12698492928.0, "27460": 12698492928.0, "27465": 12698492928.0, "27470": 12698492928.0, "27475": 12698492928.0, "27480": 12698492928.0, "27485": 12698492928.0, "27490": 12698492928.0, "27495": 12698492928.0, "27500": 12698492928.0, "27505": 12698492928.0, "27510": 12698492928.0, "27515": 12698492928.0, "27520": 12698492928.0, "27525": 12698492928.0, "27530": 12698492928.0, "27535": 12698492928.0, "27540": 12698492928.0, "27545": 12698492928.0, "27550": 12698492928.0, "27555": 12698492928.0, "27560": 12698492928.0, "27565": 12698492928.0, "27570": 12698492928.0, "27575": 12698492928.0, "27580": 12698492928.0, "27585": 12698492928.0, "27590": 12698492928.0, "27595": 12698492928.0, "27600": 12698492928.0, "27605": 12698492928.0, "27610": 12698492928.0, "27615": 12698492928.0, "27620": 12698492928.0, "27625": 12698492928.0, "27630": 12698492928.0, "27635": 12698492928.0, "27640": 12698492928.0, "27645": 12698492928.0, "27650": 12698492928.0, "27655": 12698492928.0, "27660": 12698492928.0, "27665": 12698492928.0, "27670": 12698492928.0, "27675": 12698492928.0, "27680": 12698492928.0, "27685": 12698492928.0, "27690": 12698492928.0, "27695": 12698492928.0, "27700": 12698492928.0, "27705": 12698492928.0, "27710": 12698492928.0, "27715": 12698492928.0, "27720": 12698492928.0, "27725": 12698492928.0, "27730": 12698492928.0, "27735": 12698492928.0, "27740": 12698492928.0, "27745": 12698492928.0, "27750": 12698492928.0, "27755": 12698492928.0, "27760": 12698492928.0, "27765": 12698492928.0, "27770": 12698492928.0, "27775": 12698492928.0, "27780": 12698492928.0, "27785": 12698492928.0, "27790": 12698492928.0, "27795": 12698492928.0, "27800": 12698492928.0, "27805": 12698492928.0, "27810": 12698492928.0, "27815": 12698492928.0, "27820": 12698492928.0, "27825": 12698492928.0, "27830": 12698492928.0, "27835": 12698492928.0, "27840": 12698492928.0, "27845": 12698492928.0, "27850": 12698492928.0, "27855": 12698492928.0, "27860": 12698492928.0, "27865": 12698492928.0, "27870": 12698492928.0, "27875": 12698492928.0, "27880": 12698492928.0, "27885": 12698492928.0, "27890": 12698492928.0, "27895": 12698492928.0, "27900": 12698492928.0, "27905": 12698492928.0, "27910": 12698492928.0, "27915": 12698492928.0, "27920": 12698492928.0, "27925": 12698492928.0, "27930": 12698492928.0, "27935": 12698492928.0, "27940": 12698492928.0, "27945": 12698492928.0, "27950": 12698492928.0, "27955": 12698492928.0, "27960": 12698492928.0, "27965": 12698492928.0, "27970": 12698492928.0, "27975": 12698492928.0, "27980": 12698492928.0, "27985": 12698492928.0, "27990": 12698492928.0, "27995": 12698492928.0, "28000": 12698492928.0, "28005": 12698492928.0, "28010": 12698492928.0, "28015": 12698492928.0, "28020": 12698492928.0, "28025": 12698492928.0, "28030": 12698492928.0, "28035": 12698492928.0, "28040": 12698492928.0, "28045": 12698492928.0, "28050": 12698492928.0, "28055": 12698492928.0, "28060": 12698492928.0, "28065": 12698492928.0, "28070": 12698492928.0, "28075": 12698492928.0, "28080": 12698492928.0, "28085": 12698492928.0, "28090": 12698492928.0, "28095": 12698492928.0, "28100": 12698492928.0, "28105": 12698492928.0, "28110": 12698492928.0, "28115": 12698492928.0, "28120": 12698492928.0, "28125": 12698492928.0, "28130": 12698492928.0, "28135": 12698492928.0, "28140": 12698492928.0, "28145": 12698492928.0, "28150": 12698492928.0, "28155": 12698492928.0, "28160": 12698492928.0, "28165": 12698492928.0, "28170": 12698492928.0, "28175": 12698492928.0, "28180": 12698492928.0, "28185": 12698492928.0, "28190": 12698492928.0, "28195": 12698492928.0, "28200": 12698492928.0, "28205": 12698492928.0, "28210": 12698492928.0, "28215": 12698492928.0, "28220": 12698492928.0, "28225": 12698492928.0, "28230": 12698492928.0, "28235": 12698492928.0, "28240": 12698492928.0, "28245": 12698492928.0, "28250": 12698492928.0, "28255": 12698492928.0, "28260": 12698492928.0, "28265": 12698492928.0, "28270": 12698492928.0, "28275": 12698492928.0, "28280": 12698492928.0, "28285": 12698492928.0, "28290": 12698492928.0, "28295": 12698492928.0, "28300": 12698492928.0, "28305": 12698492928.0, "28310": 12698492928.0, "28315": 12698492928.0, "28320": 12698492928.0, "28325": 12698492928.0, "28330": 12698492928.0, "28335": 12698492928.0, "28340": 12698492928.0, "28345": 12698492928.0, "28350": 12698492928.0, "28355": 12698492928.0, "28360": 12698492928.0, "28365": 12698492928.0, "28370": 12698492928.0, "28375": 12698492928.0, "28380": 12698492928.0, "28385": 12698492928.0, "28390": 12698492928.0, "28395": 12698492928.0, "28400": 12698492928.0, "28405": 12698492928.0, "28410": 12698492928.0, "28415": 12698492928.0, "28420": 12698492928.0, "28425": 12698492928.0, "28430": 12698492928.0, "28435": 12698492928.0, "28440": 12698492928.0, "28445": 12698492928.0, "28450": 12698492928.0, "28455": 12698492928.0, "28460": 12698492928.0, "28465": 12698492928.0, "28470": 12698492928.0, "28475": 12698492928.0, "28480": 12698492928.0, "28485": 12698492928.0, "28490": 12698492928.0, "28495": 12698492928.0, "28500": 12698492928.0, "28505": 12698492928.0, "28510": 12698492928.0, "28515": 12698492928.0, "28520": 12698492928.0, "28525": 12698492928.0, "28530": 12698492928.0, "28535": 12698492928.0, "28540": 12698492928.0, "28545": 12698492928.0, "28550": 12698492928.0, "28555": 12698492928.0, "28560": 12698492928.0, "28565": 12698492928.0, "28570": 12698492928.0, "28575": 12698492928.0, "28580": 12698492928.0, "28585": 12698492928.0, "28590": 12698492928.0, "28595": 12698492928.0, "28600": 12698492928.0, "28605": 12698492928.0, "28610": 12698492928.0, "28615": 12698492928.0, "28620": 12698492928.0, "28625": 12698492928.0, "28630": 12698492928.0, "28635": 12698492928.0, "28640": 12698492928.0, "28645": 12698492928.0, "28650": 12698492928.0, "28655": 12698492928.0, "28660": 12698492928.0, "28665": 12698492928.0, "28670": 12698492928.0, "28675": 12698492928.0, "28680": 12698492928.0, "28685": 12698492928.0, "28690": 12698492928.0, "28695": 12698492928.0, "28700": 12698492928.0, "28705": 12698492928.0, "28710": 12698492928.0, "28715": 12698492928.0, "28720": 12698492928.0, "28725": 12698492928.0, "28730": 12698492928.0, "28735": 12698492928.0, "28740": 12698492928.0, "28745": 12698492928.0, "28750": 12698492928.0, "28755": 12698492928.0, "28760": 12698492928.0, "28765": 12698492928.0, "28770": 12698492928.0, "28775": 12698492928.0, "28780": 12698492928.0, "28785": 12698492928.0, "28790": 12698492928.0, "28795": 12698492928.0, "28800": 12698492928.0, "28805": 12698492928.0, "28810": 12698492928.0, "28815": 12698492928.0, "28820": 12698492928.0, "28825": 12698492928.0, "28830": 12698492928.0, "28835": 12698492928.0, "28840": 12698492928.0, "28845": 12698492928.0, "28850": 12698492928.0, "28855": 12698492928.0, "28860": 12698492928.0, "28865": 12698492928.0, "28870": 12698492928.0, "28875": 12698492928.0, "28880": 12698492928.0, "28885": 12698492928.0, "28890": 12698492928.0, "28895": 12698492928.0, "28900": 12698492928.0, "28905": 12698492928.0, "28910": 12698492928.0, "28915": 12698492928.0, "28920": 12698492928.0, "28925": 12698492928.0, "28930": 12698492928.0, "28935": 12698492928.0, "28940": 12698492928.0, "28945": 12698492928.0, "28950": 12698492928.0, "28955": 12698492928.0, "28960": 12698492928.0, "28965": 12698492928.0, "28970": 12698492928.0, "28975": 12698492928.0, "28980": 12698492928.0, "28985": 12698492928.0, "28990": 12698492928.0, "28995": 12698492928.0, "29000": 12698492928.0, "29005": 12698492928.0, "29010": 12698492928.0, "29015": 12698492928.0, "29020": 12698492928.0, "29025": 12698492928.0, "29030": 12698492928.0, "29035": 12698492928.0, "29040": 12698492928.0, "29045": 12698492928.0, "29050": 12698492928.0, "29055": 12698492928.0, "29060": 12698492928.0, "29065": 12698492928.0, "29070": 12698492928.0, "29075": 12698492928.0, "29080": 12698492928.0, "29085": 12698492928.0, "29090": 12698492928.0, "29095": 12698492928.0, "29100": 12698492928.0, "29105": 12698492928.0, "29110": 12698492928.0, "29115": 12698492928.0, "29120": 12698492928.0, "29125": 12698492928.0, "29130": 12698492928.0, "29135": 12698492928.0, "29140": 12698492928.0, "29145": 12698492928.0, "29150": 12698492928.0, "29155": 12698492928.0, "29160": 12698492928.0, "29165": 12698492928.0, "29170": 12698492928.0, "29175": 12698492928.0, "29180": 12698492928.0, "29185": 12698492928.0, "29190": 12698492928.0, "29195": 12698492928.0, "29200": 12698492928.0, "29205": 12698492928.0, "29210": 12698492928.0, "29215": 12698492928.0, "29220": 12698492928.0, "29225": 12698492928.0, "29230": 12698492928.0, "29235": 12698492928.0, "29240": 12698492928.0, "29245": 12698492928.0, "29250": 12698492928.0, "29255": 12698492928.0, "29260": 12698492928.0, "29265": 12698492928.0, "29270": 12698492928.0, "29275": 12698492928.0, "29280": 12698492928.0, "29285": 12698492928.0, "29290": 12698492928.0, "29295": 12698492928.0, "29300": 12698492928.0, "29305": 12698492928.0, "29310": 12698492928.0, "29315": 12698492928.0, "29320": 12698492928.0, "29325": 12698492928.0, "29330": 12698492928.0, "29335": 12698492928.0, "29340": 12698492928.0, "29345": 12698492928.0, "29350": 12698492928.0, "29355": 12698492928.0, "29360": 12698492928.0, "29365": 12698492928.0, "29370": 12698492928.0, "29375": 12698492928.0, "29380": 12698492928.0, "29385": 12698492928.0, "29390": 12698492928.0, "29395": 12698492928.0, "29400": 12698492928.0, "29405": 12698492928.0, "29410": 12698492928.0, "29415": 12698492928.0, "29420": 12698492928.0, "29425": 12698492928.0, "29430": 12698492928.0, "29435": 12698492928.0, "29440": 12698492928.0, "29445": 12698492928.0, "29450": 12698492928.0, "29455": 12698492928.0, "29460": 12698492928.0, "29465": 12698492928.0, "29470": 12698492928.0, "29475": 12698492928.0, "29480": 12698492928.0, "29485": 12698492928.0, "29490": 12698492928.0, "29495": 12698492928.0, "29500": 12698492928.0, "29505": 12698492928.0, "29510": 12698492928.0, "29515": 12698492928.0, "29520": 12698492928.0, "29525": 12698492928.0, "29530": 12698492928.0, "29535": 12698492928.0, "29540": 12698492928.0, "29545": 12698492928.0, "29550": 12698492928.0, "29555": 12698492928.0, "29560": 12698492928.0, "29565": 12698492928.0, "29570": 12698492928.0, "29575": 12698492928.0, "29580": 12698492928.0, "29585": 12698492928.0, "29590": 12698492928.0, "29595": 12698492928.0, "29600": 12698492928.0, "29605": 12698492928.0, "29610": 12698492928.0, "29615": 12698492928.0, "29620": 12698492928.0, "29625": 12698492928.0, "29630": 12698492928.0, "29635": 12698492928.0, "29640": 12698492928.0, "29645": 12698492928.0, "29650": 12698492928.0, "29655": 12698492928.0, "29660": 12698492928.0, "29665": 12698492928.0, "29670": 12698492928.0, "29675": 12698492928.0, "29680": 12698492928.0, "29685": 12698492928.0, "29690": 12698492928.0, "29695": 12698492928.0, "29700": 12698492928.0, "29705": 12698492928.0, "29710": 12698492928.0, "29715": 12698492928.0, "29720": 12698492928.0, "29725": 12698492928.0, "29730": 12698492928.0, "29735": 12698492928.0, "29740": 12698492928.0, "29745": 12698492928.0, "29750": 12698492928.0, "29755": 12698492928.0, "29760": 12698492928.0, "29765": 12698492928.0, "29770": 12698492928.0, "29775": 12698492928.0, "29780": 12698492928.0, "29785": 12698492928.0, "29790": 12698492928.0, "29795": 12698492928.0, "29800": 12698492928.0, "29805": 12698492928.0, "29810": 12698492928.0, "29815": 12698492928.0, "29820": 12698492928.0, "29825": 12698492928.0, "29830": 12698492928.0, "29835": 12698492928.0, "29840": 12698492928.0, "29845": 12698492928.0, "29850": 12698492928.0, "29855": 12698492928.0, "29860": 12698492928.0, "29865": 12698492928.0, "29870": 12698492928.0, "29875": 12698492928.0, "29880": 12698492928.0, "29885": 12698492928.0, "29890": 12698492928.0, "29895": 12698492928.0, "29900": 12698492928.0, "29905": 12698492928.0, "29910": 12698492928.0, "29915": 12698492928.0, "29920": 12698492928.0, "29925": 12698492928.0, "29930": 12698492928.0, "29935": 12698492928.0, "29940": 12698492928.0, "29945": 12698492928.0, "29950": 12698492928.0, "29955": 12698492928.0, "29960": 12698492928.0, "29965": 12698492928.0, "29970": 12698492928.0, "29975": 12698492928.0, "29980": 12698492928.0, "29985": 12698492928.0, "29990": 12698492928.0, "29995": 12698492928.0, "30000": 12698492928.0, "30005": 12698492928.0, "30010": 12698492928.0, "30015": 12698492928.0, "30020": 12698492928.0, "30025": 12698492928.0, "30030": 12698492928.0, "30035": 12698492928.0, "30040": 12698492928.0, "30045": 12698492928.0, "30050": 12698492928.0, "30055": 12698492928.0, "30060": 12698492928.0, "30065": 12698492928.0, "30070": 12698492928.0, "30075": 12698492928.0, "30080": 12698492928.0, "30085": 12698492928.0, "30090": 12698492928.0, "30095": 12698492928.0, "30100": 12698492928.0, "30105": 12698492928.0, "30110": 12698492928.0, "30115": 12698492928.0, "30120": 12698492928.0, "30125": 12698492928.0, "30130": 12698492928.0, "30135": 12698492928.0, "30140": 12698492928.0, "30145": 12698492928.0, "30150": 12698492928.0, "30155": 12698492928.0, "30160": 12698492928.0, "30165": 12698492928.0, "30170": 12698492928.0, "30175": 12698492928.0, "30180": 12698492928.0, "30185": 12698492928.0, "30190": 12698492928.0, "30195": 12698492928.0, "30200": 12698492928.0, "30205": 12698492928.0, "30210": 12698492928.0, "30215": 12698492928.0, "30220": 12698492928.0, "30225": 12698492928.0, "30230": 12698492928.0, "30235": 12698492928.0, "30240": 12698492928.0, "30245": 12698492928.0, "30250": 12698492928.0, "30255": 12698492928.0, "30260": 12698492928.0, "30265": 12698492928.0, "30270": 12698492928.0, "30275": 12698492928.0, "30280": 12698492928.0, "30285": 12698492928.0, "30290": 12698492928.0, "30295": 12698492928.0, "30300": 12698492928.0, "30305": 12698492928.0, "30310": 12698492928.0, "30315": 12698492928.0, "30320": 12698492928.0, "30325": 12698492928.0, "30330": 12698492928.0, "30335": 12698492928.0, "30340": 12698492928.0, "30345": 12698492928.0, "30350": 12698492928.0, "30355": 12698492928.0, "30360": 12698492928.0, "30365": 12698492928.0, "30370": 12698492928.0, "30375": 12698492928.0, "30380": 12698492928.0, "30385": 12698492928.0, "30390": 12698492928.0, "30395": 12698492928.0, "30400": 12698492928.0, "30405": 12698492928.0, "30410": 12698492928.0, "30415": 12698492928.0, "30420": 12698492928.0, "30425": 12698492928.0, "30430": 12698492928.0, "30435": 12698492928.0, "30440": 12698492928.0, "30445": 12698492928.0, "30450": 12698492928.0, "30455": 12698492928.0, "30460": 12698492928.0, "30465": 12698492928.0, "30470": 12698492928.0, "30475": 12698492928.0, "30480": 12698492928.0, "30485": 12698492928.0, "30490": 12698492928.0, "30495": 12698492928.0, "30500": 12698492928.0, "30505": 12698492928.0, "30510": 12698492928.0, "30515": 12698492928.0, "30520": 12698492928.0, "30525": 12698492928.0, "30530": 12698492928.0, "30535": 12698492928.0, "30540": 12698492928.0, "30545": 12698492928.0, "30550": 12698492928.0, "30555": 12698492928.0, "30560": 12698492928.0, "30565": 12698492928.0, "30570": 12698492928.0, "30575": 12698492928.0, "30580": 12698492928.0, "30585": 12698492928.0, "30590": 12698492928.0, "30595": 12698492928.0, "30600": 12698492928.0, "30605": 12698492928.0, "30610": 12698492928.0, "30615": 12698492928.0, "30620": 12698492928.0, "30625": 12698492928.0, "30630": 12698492928.0, "30635": 12698492928.0, "30640": 12698492928.0, "30645": 12698492928.0, "30650": 12698492928.0, "30655": 12698492928.0, "30660": 12698492928.0, "30665": 12698492928.0, "30670": 12698492928.0, "30675": 12698492928.0, "30680": 12698492928.0, "30685": 12698492928.0, "30690": 12698492928.0, "30695": 12698492928.0, "30700": 12698492928.0, "30705": 12698492928.0, "30710": 12698492928.0, "30715": 12698492928.0, "30720": 12698492928.0, "30725": 12698492928.0, "30730": 12698492928.0, "30735": 12698492928.0, "30740": 12698492928.0, "30745": 12698492928.0, "30750": 12698492928.0, "30755": 12698492928.0, "30760": 12698492928.0, "30765": 12698492928.0, "30770": 12698492928.0, "30775": 12698492928.0, "30780": 12698492928.0, "30785": 12698492928.0, "30790": 12698492928.0, "30795": 12698492928.0, "30800": 12698492928.0, "30805": 12698492928.0, "30810": 12698492928.0, "30815": 12698492928.0, "30820": 12698492928.0, "30825": 12698492928.0, "30830": 12698492928.0, "30835": 12698492928.0, "30840": 12698492928.0, "30845": 12698492928.0, "30850": 12698492928.0, "30855": 12698492928.0, "30860": 12698492928.0, "30865": 12698492928.0, "30870": 12698492928.0, "30875": 12698492928.0, "30880": 12698492928.0, "30885": 12698492928.0, "30890": 12698492928.0, "30895": 12698492928.0, "30900": 12698492928.0, "30905": 12698492928.0, "30910": 12698492928.0, "30915": 12698492928.0, "30920": 12698492928.0, "30925": 12698492928.0, "30930": 12698492928.0, "30935": 12698492928.0, "30940": 12698492928.0, "30945": 12698492928.0, "30950": 12698492928.0, "30955": 12698492928.0, "30960": 12698492928.0, "30965": 12698492928.0, "30970": 12698492928.0, "30975": 12698492928.0, "30980": 12698492928.0, "30985": 12698492928.0, "30990": 12698492928.0, "30995": 12698492928.0, "31000": 12698492928.0, "31005": 12698492928.0, "31010": 12698492928.0, "31015": 12698492928.0, "31020": 12698492928.0, "31025": 12698492928.0, "31030": 12698492928.0, "31035": 12698492928.0, "31040": 12698492928.0, "31045": 12698492928.0, "31050": 12698492928.0, "31055": 12698492928.0, "31060": 12698492928.0, "31065": 12698492928.0, "31070": 12698492928.0, "31075": 12698492928.0, "31080": 12698492928.0, "31085": 12698492928.0, "31090": 12698492928.0, "31095": 12698492928.0, "31100": 12698492928.0, "31105": 12698492928.0, "31110": 12698492928.0, "31115": 12698492928.0, "31120": 12698492928.0, "31125": 12698492928.0, "31130": 12698492928.0, "31135": 12698492928.0, "31140": 12698492928.0, "31145": 12698492928.0, "31150": 12698492928.0, "31155": 12698492928.0, "31160": 12698492928.0, "31165": 12698492928.0, "31170": 12698492928.0, "31175": 12698492928.0, "31180": 12698492928.0, "31185": 12698492928.0, "31190": 12698492928.0, "31195": 12698492928.0, "31200": 12698492928.0, "31205": 12698492928.0, "31210": 12698492928.0, "31215": 12698492928.0, "31220": 12698492928.0, "31225": 12698492928.0, "31230": 12698492928.0, "31235": 12698492928.0, "31240": 12698492928.0, "31245": 12698492928.0, "31250": 12698492928.0, "31255": 12698492928.0, "31260": 12698492928.0, "31265": 12698492928.0, "31270": 12698492928.0, "31275": 12698492928.0, "31280": 12698492928.0, "31285": 12698492928.0, "31290": 12698492928.0, "31295": 12698492928.0, "31300": 12698492928.0, "31305": 12698492928.0, "31310": 12698492928.0, "31315": 12698492928.0, "31320": 12698492928.0, "31325": 12698492928.0, "31330": 12698492928.0, "31335": 12698492928.0, "31340": 12698492928.0, "31345": 12698492928.0, "31350": 12698492928.0, "31355": 12698492928.0, "31360": 12698492928.0, "31365": 12698492928.0, "31370": 12698492928.0, "31375": 12698492928.0, "31380": 12698492928.0, "31385": 12698492928.0, "31390": 12698492928.0, "31395": 12698492928.0, "31400": 12698492928.0, "31405": 12698492928.0, "31410": 12698492928.0, "31415": 12698492928.0, "31420": 12698492928.0, "31425": 12698492928.0, "31430": 12698492928.0, "31435": 12698492928.0, "31440": 12698492928.0, "31445": 12698492928.0, "31450": 12698492928.0, "31455": 12698492928.0, "31460": 12698492928.0, "31465": 12698492928.0, "31470": 12698492928.0, "31475": 12698492928.0, "31480": 12698492928.0, "31485": 12698492928.0, "31490": 12698492928.0, "31495": 12698492928.0, "31500": 12698492928.0, "31505": 12698492928.0, "31510": 12698492928.0, "31515": 12698492928.0, "31520": 12698492928.0, "31525": 12698492928.0, "31530": 12698492928.0, "31535": 12698492928.0, "31540": 12698492928.0, "31545": 12698492928.0, "31550": 12698492928.0, "31555": 12698492928.0, "31560": 12698492928.0, "31565": 12698492928.0, "31570": 12698492928.0, "31575": 12698492928.0, "31580": 12698492928.0, "31585": 12698492928.0, "31590": 12698492928.0, "31595": 12698492928.0, "31600": 12698492928.0, "31605": 12698492928.0, "31610": 12698492928.0, "31615": 12698492928.0, "31620": 12698492928.0, "31625": 12698492928.0, "31630": 12698492928.0, "31635": 12698492928.0, "31640": 12698492928.0, "31645": 12698492928.0, "31650": 12698492928.0, "31655": 12698492928.0, "31660": 12698492928.0, "31665": 12698492928.0, "31670": 12698492928.0, "31675": 12698492928.0, "31680": 12698492928.0, "31685": 12698492928.0, "31690": 12698492928.0, "31695": 12698492928.0, "31700": 12698492928.0, "31705": 12698492928.0, "31710": 12698492928.0, "31715": 12698492928.0, "31720": 12698492928.0, "31725": 12698492928.0, "31730": 12698492928.0, "31735": 12698492928.0, "31740": 12698492928.0, "31745": 12698492928.0, "31750": 12698492928.0, "31755": 12698492928.0, "31760": 12698492928.0, "31765": 12698492928.0, "31770": 12698492928.0, "31775": 12698492928.0, "31780": 12698492928.0, "31785": 12698492928.0, "31790": 12698492928.0, "31795": 12698492928.0, "31800": 12698492928.0, "31805": 12698492928.0, "31810": 12698492928.0, "31815": 12698492928.0, "31820": 12698492928.0, "31825": 12698492928.0, "31830": 12698492928.0, "31835": 12698492928.0, "31840": 12698492928.0, "31845": 12698492928.0, "31850": 12698492928.0, "31855": 12698492928.0, "31860": 12698492928.0, "31865": 12698492928.0, "31870": 12698492928.0, "31875": 12698492928.0, "31880": 12698492928.0, "31885": 12698492928.0, "31890": 12698492928.0, "31895": 12698492928.0, "31900": 12698492928.0, "31905": 12698492928.0, "31910": 12698492928.0, "31915": 12698492928.0, "31920": 12698492928.0, "31925": 12698492928.0, "31930": 12698492928.0, "31935": 12698492928.0, "31940": 12698492928.0, "31945": 12698492928.0, "31950": 12698492928.0, "31955": 12698492928.0, "31960": 12698492928.0, "31965": 12698492928.0, "31970": 12698492928.0, "31975": 12698492928.0, "31980": 12698492928.0, "31985": 12698492928.0, "31990": 12698492928.0, "31995": 12698492928.0, "32000": 12698492928.0, "32005": 12698492928.0, "32010": 12698492928.0, "32015": 12698492928.0, "32020": 12698492928.0, "32025": 12698492928.0, "32030": 12698492928.0, "32035": 12698492928.0, "32040": 12698492928.0, "32045": 12698492928.0, "32050": 12698492928.0, "32055": 12698492928.0, "32060": 12698492928.0, "32065": 12698492928.0, "32070": 12698492928.0, "32075": 12698492928.0, "32080": 12698492928.0, "32085": 12698492928.0, "32090": 12698492928.0, "32095": 12698492928.0, "32100": 12698492928.0, "32105": 12698492928.0, "32110": 12698492928.0, "32115": 12698492928.0, "32120": 12698492928.0, "32125": 12698492928.0, "32130": 12698492928.0, "32135": 12698492928.0, "32140": 12698492928.0, "32145": 12698492928.0, "32150": 12698492928.0, "32155": 12698492928.0, "32160": 12698492928.0, "32165": 12698492928.0, "32170": 12698492928.0, "32175": 12698492928.0, "32180": 12698492928.0, "32185": 12698492928.0, "32190": 12698492928.0, "32195": 12698492928.0, "32200": 12698492928.0, "32205": 12698492928.0, "32210": 12698492928.0, "32215": 12698492928.0, "32220": 12698492928.0, "32225": 12698492928.0, "32230": 12698492928.0, "32235": 12698492928.0, "32240": 12698492928.0, "32245": 12698492928.0, "32250": 12698492928.0, "32255": 12698492928.0, "32260": 12698492928.0, "32265": 12698492928.0, "32270": 12698492928.0, "32275": 12698492928.0, "32280": 12698492928.0, "32285": 12698492928.0, "32290": 12698492928.0, "32295": 12698492928.0, "32300": 12698492928.0, "32305": 12698492928.0, "32310": 12698492928.0, "32315": 12698492928.0, "32320": 12698492928.0, "32325": 12698492928.0, "32330": 12698492928.0, "32335": 12698492928.0, "32340": 12698492928.0, "32345": 12698492928.0, "32350": 12698492928.0, "32355": 12698492928.0, "32360": 12698492928.0, "32365": 12698492928.0, "32370": 12698492928.0, "32375": 12698492928.0, "32380": 12698492928.0, "32385": 12698492928.0, "32390": 12698492928.0, "32395": 12698492928.0, "32400": 12698492928.0, "32405": 12698492928.0, "32410": 12698492928.0, "32415": 12698492928.0, "32420": 12698492928.0, "32425": 12698492928.0, "32430": 12698492928.0, "32435": 12698492928.0, "32440": 12698492928.0, "32445": 12698492928.0, "32450": 12698492928.0, "32455": 12698492928.0, "32460": 12698492928.0, "32465": 12698492928.0, "32470": 12698492928.0, "32475": 12698492928.0, "32480": 12698492928.0, "32485": 12698492928.0, "32490": 12698492928.0, "32495": 12698492928.0, "32500": 12698492928.0, "32505": 12698492928.0, "32510": 12698492928.0, "32515": 12698492928.0, "32520": 12698492928.0, "32525": 12698492928.0, "32530": 12698492928.0, "32535": 12698492928.0, "32540": 12698492928.0, "32545": 12698492928.0, "32550": 12698492928.0, "32555": 12698492928.0, "32560": 12698492928.0, "32565": 12698492928.0, "32570": 12698492928.0, "32575": 12698492928.0, "32580": 12698492928.0, "32585": 12698492928.0, "32590": 12698492928.0, "32595": 12698492928.0, "32600": 12698492928.0, "32605": 12698492928.0, "32610": 12698492928.0, "32615": 12698492928.0, "32620": 12698492928.0, "32625": 12698492928.0, "32630": 12698492928.0, "32635": 12698492928.0, "32640": 12698492928.0, "32645": 12698492928.0, "32650": 12698492928.0, "32655": 12698492928.0, "32660": 12698492928.0, "32665": 12698492928.0, "32670": 12698492928.0, "32675": 12698492928.0, "32680": 12698492928.0, "32685": 12698492928.0, "32690": 12698492928.0, "32695": 12698492928.0, "32700": 12698492928.0, "32705": 12698492928.0, "32710": 12698492928.0, "32715": 12698492928.0, "32720": 12698492928.0, "32725": 12698492928.0, "32730": 12698492928.0, "32735": 12698492928.0, "32740": 12698492928.0, "32745": 12698492928.0, "32750": 12698492928.0, "32755": 12698492928.0, "32760": 12698492928.0, "32765": 12698492928.0, "32770": 12698492928.0, "32775": 12698492928.0, "32780": 12698492928.0, "32785": 12698492928.0, "32790": 12698492928.0, "32795": 12698492928.0, "32800": 12698492928.0, "32805": 12698492928.0, "32810": 12698492928.0, "32815": 12698492928.0, "32820": 12698492928.0, "32825": 12698492928.0, "32830": 12698492928.0, "32835": 12698492928.0, "32840": 12698492928.0, "32845": 12698492928.0, "32850": 12698492928.0, "32855": 12698492928.0, "32860": 12698492928.0, "32865": 12698492928.0, "32870": 12698492928.0, "32875": 12698492928.0, "32880": 12698492928.0, "32885": 12698492928.0, "32890": 12698492928.0, "32895": 12698492928.0, "32900": 12698492928.0, "32905": 12698492928.0, "32910": 12698492928.0, "32915": 12698492928.0, "32920": 12698492928.0, "32925": 12698492928.0, "32930": 12698492928.0, "32935": 12698492928.0, "32940": 12698492928.0, "32945": 12698492928.0, "32950": 12698492928.0, "32955": 12698492928.0, "32960": 12698492928.0, "32965": 12698492928.0, "32970": 12698492928.0, "32975": 12698492928.0, "32980": 12698492928.0, "32985": 12698492928.0, "32990": 12698492928.0, "32995": 12698492928.0, "33000": 12698492928.0, "33005": 12698492928.0, "33010": 12698492928.0, "33015": 12698492928.0, "33020": 12698492928.0, "33025": 12698492928.0, "33030": 12698492928.0, "33035": 12698492928.0, "33040": 12698492928.0, "33045": 12698492928.0, "33050": 12698492928.0, "33055": 12698492928.0, "33060": 12698492928.0, "33065": 12698492928.0, "33070": 12698492928.0, "33075": 12698492928.0, "33080": 12698492928.0, "33085": 12698492928.0, "33090": 12698492928.0, "33095": 12698492928.0, "33100": 12698492928.0, "33105": 12698492928.0, "33110": 12698492928.0, "33115": 12698492928.0, "33120": 12698492928.0, "33125": 12698492928.0, "33130": 12698492928.0, "33135": 12698492928.0, "33140": 12698492928.0, "33145": 12698492928.0, "33150": 12698492928.0, "33155": 12698492928.0, "33160": 12698492928.0, "33165": 12698492928.0, "33170": 12698492928.0, "33175": 12698492928.0, "33180": 12698492928.0, "33185": 12698492928.0, "33190": 12698492928.0, "33195": 12698492928.0, "33200": 12698492928.0, "33205": 12698492928.0, "33210": 12698492928.0, "33215": 12698492928.0, "33220": 12698492928.0, "33225": 12698492928.0, "33230": 12698492928.0, "33235": 12698492928.0, "33240": 12698492928.0, "33245": 12698492928.0, "33250": 12698492928.0, "33255": 12698492928.0, "33260": 12698492928.0, "33265": 12698492928.0, "33270": 12698492928.0, "33275": 12698492928.0, "33280": 12698492928.0, "33285": 12698492928.0, "33290": 12698492928.0, "33295": 12698492928.0, "33300": 12698492928.0, "33305": 12698492928.0, "33310": 12698492928.0, "33315": 12698492928.0, "33320": 12698492928.0, "33325": 12698492928.0, "33330": 12698492928.0, "33335": 12698492928.0, "33340": 12698492928.0, "33345": 12698492928.0, "33350": 12698492928.0, "33355": 12698492928.0, "33360": 12698492928.0, "33365": 12698492928.0, "33370": 12698492928.0, "33375": 12698492928.0, "33380": 12698492928.0, "33385": 12698492928.0, "33390": 12698492928.0, "33395": 12698492928.0, "33400": 12698492928.0, "33405": 12698492928.0, "33410": 12698492928.0, "33415": 12698492928.0, "33420": 12698492928.0, "33425": 12698492928.0, "33430": 12698492928.0, "33435": 12698492928.0, "33440": 12698492928.0, "33445": 12698492928.0, "33450": 12698492928.0, "33455": 12698492928.0, "33460": 12698492928.0, "33465": 12698492928.0, "33470": 12698492928.0, "33475": 12698492928.0, "33480": 12698492928.0, "33485": 12698492928.0, "33490": 12698492928.0, "33495": 12698492928.0, "33500": 12698492928.0, "33505": 12698492928.0, "33510": 12698492928.0, "33515": 12698492928.0, "33520": 12698492928.0, "33525": 12698492928.0, "33530": 12698492928.0, "33535": 12698492928.0, "33540": 12698492928.0, "33545": 12698492928.0, "33550": 12698492928.0, "33555": 12698492928.0, "33560": 12698492928.0, "33565": 12698492928.0, "33570": 12698492928.0, "33575": 12698492928.0, "33580": 12698492928.0, "33585": 12698492928.0, "33590": 12698492928.0, "33595": 12698492928.0, "33600": 12698492928.0, "33605": 12698492928.0, "33610": 12698492928.0, "33615": 12698492928.0, "33620": 12698492928.0, "33625": 12698492928.0, "33630": 12698492928.0, "33635": 12698492928.0, "33640": 12698492928.0, "33645": 12698492928.0, "33650": 12698492928.0, "33655": 12698492928.0, "33660": 12698492928.0, "33665": 12698492928.0, "33670": 12698492928.0, "33675": 12698492928.0, "33680": 12698492928.0, "33685": 12698492928.0, "33690": 12698492928.0, "33695": 12698492928.0, "33700": 12698492928.0, "33705": 12698492928.0, "33710": 12698492928.0, "33715": 12698492928.0, "33720": 12698492928.0, "33725": 12698492928.0, "33730": 12698492928.0, "33735": 12698492928.0, "33740": 12698492928.0, "33745": 12698492928.0, "33750": 12698492928.0, "33755": 12698492928.0, "33760": 12698492928.0, "33765": 12698492928.0, "33770": 12698492928.0, "33775": 12698492928.0, "33780": 12698492928.0, "33785": 12698492928.0, "33790": 12698492928.0, "33795": 12698492928.0, "33800": 12698492928.0, "33805": 12698492928.0, "33810": 12698492928.0, "33815": 12698492928.0, "33820": 12698492928.0, "33825": 12698492928.0, "33830": 12698492928.0, "33835": 12698492928.0, "33840": 12698492928.0, "33845": 12698492928.0, "33850": 12698492928.0, "33855": 12698492928.0, "33860": 12698492928.0, "33865": 12698492928.0, "33870": 12698492928.0, "33875": 12698492928.0, "33880": 12698492928.0, "33885": 12698492928.0, "33890": 12698492928.0, "33895": 12698492928.0, "33900": 12698492928.0, "33905": 12698492928.0, "33910": 12698492928.0, "33915": 12698492928.0, "33920": 12698492928.0, "33925": 12698492928.0, "33930": 12698492928.0, "33935": 12698492928.0, "33940": 12698492928.0, "33945": 12698492928.0, "33950": 12698492928.0, "33955": 12698492928.0, "33960": 12698492928.0, "33965": 12698492928.0, "33970": 12698492928.0, "33975": 12698492928.0, "33980": 12698492928.0, "33985": 12698492928.0, "33990": 12698492928.0, "33995": 12698492928.0, "34000": 12698492928.0, "34005": 12698492928.0, "34010": 12698492928.0, "34015": 12698492928.0, "34020": 12698492928.0, "34025": 12698492928.0, "34030": 12698492928.0, "34035": 12698492928.0, "34040": 12698492928.0, "34045": 12698492928.0, "34050": 12698492928.0, "34055": 12698492928.0, "34060": 12698492928.0, "34065": 12698492928.0, "34070": 12698492928.0, "34075": 12698492928.0, "34080": 12698492928.0, "34085": 12698492928.0, "34090": 12698492928.0, "34095": 12698492928.0, "34100": 12698492928.0, "34105": 12698492928.0, "34110": 12698492928.0, "34115": 12698492928.0, "34120": 12698492928.0, "34125": 12698492928.0, "34130": 12698492928.0, "34135": 12698492928.0, "34140": 12698492928.0, "34145": 12698492928.0, "34150": 12698492928.0, "34155": 12698492928.0, "34160": 12698492928.0, "34165": 12698492928.0, "34170": 12698492928.0, "34175": 12698492928.0, "34180": 12698492928.0, "34185": 12698492928.0, "34190": 12698492928.0, "34195": 12698492928.0, "34200": 12698492928.0, "34205": 12698492928.0, "34210": 12698492928.0, "34215": 12698492928.0, "34220": 12698492928.0, "34225": 12698492928.0, "34230": 12698492928.0, "34235": 12698492928.0, "34240": 12698492928.0, "34245": 12698492928.0, "34250": 12698492928.0, "34255": 12698492928.0, "34260": 12698492928.0, "34265": 12698492928.0, "34270": 12698492928.0, "34275": 12698492928.0, "34280": 12698492928.0, "34285": 12698492928.0, "34290": 12698492928.0, "34295": 12698492928.0, "34300": 12698492928.0, "34305": 12698492928.0, "34310": 12698492928.0, "34315": 12698492928.0, "34320": 12698492928.0, "34325": 12698492928.0, "34330": 12698492928.0, "34335": 12698492928.0, "34340": 12698492928.0, "34345": 12698492928.0, "34350": 12698492928.0, "34355": 12698492928.0, "34360": 12698492928.0, "34365": 12698492928.0, "34370": 12698492928.0, "34375": 12698492928.0, "34380": 12698492928.0, "34385": 12698492928.0, "34390": 12698492928.0, "34395": 12698492928.0, "34400": 12698492928.0, "34405": 12698492928.0, "34410": 12698492928.0, "34415": 12698492928.0, "34420": 12698492928.0, "34425": 12698492928.0, "34430": 12698492928.0, "34435": 12698492928.0, "34440": 12698492928.0, "34445": 12698492928.0, "34450": 12698492928.0, "34455": 12698492928.0, "34460": 12698492928.0, "34465": 12698492928.0, "34470": 12698492928.0, "34475": 12698492928.0, "34480": 12698492928.0, "34485": 12698492928.0, "34490": 12698492928.0, "34495": 12698492928.0, "34500": 12698492928.0, "34505": 12698492928.0, "34510": 12698492928.0, "34515": 12698492928.0, "34520": 12698492928.0, "34525": 12698492928.0, "34530": 12698492928.0, "34535": 12698492928.0, "34540": 12698492928.0, "34545": 12698492928.0, "34550": 12698492928.0, "34555": 12698492928.0, "34560": 12698492928.0, "34565": 12698492928.0, "34570": 12698492928.0, "34575": 12698492928.0, "34580": 12698492928.0, "34585": 12698492928.0, "34590": 12698492928.0, "34595": 12698492928.0, "34600": 12698492928.0, "34605": 12698492928.0, "34610": 12698492928.0, "34615": 12698492928.0, "34620": 12698492928.0, "34625": 12698492928.0, "34630": 12698492928.0, "34635": 12698492928.0, "34640": 12698492928.0, "34645": 12698492928.0, "34650": 12698492928.0, "34655": 12698492928.0, "34660": 12698492928.0, "34665": 12698492928.0, "34670": 12698492928.0, "34675": 12698492928.0, "34680": 12698492928.0, "34685": 12698492928.0, "34690": 12698492928.0, "34695": 12698492928.0, "34700": 12698492928.0, "34705": 12698492928.0, "34710": 12698492928.0, "34715": 12698492928.0, "34720": 12698492928.0, "34725": 12698492928.0, "34730": 12698492928.0, "34735": 12698492928.0, "34740": 12698492928.0, "34745": 12698492928.0, "34750": 12698492928.0, "34755": 12698492928.0, "34760": 12698492928.0, "34765": 12698492928.0, "34770": 12698492928.0, "34775": 12698492928.0, "34780": 12698492928.0, "34785": 12698492928.0, "34790": 12698492928.0, "34795": 12698492928.0, "34800": 12698492928.0, "34805": 12698492928.0, "34810": 12698492928.0, "34815": 12698492928.0, "34820": 12698492928.0, "34825": 12698492928.0, "34830": 12698492928.0, "34835": 12698492928.0, "34840": 12698492928.0, "34845": 12698492928.0, "34850": 12698492928.0, "34855": 12698492928.0, "34860": 12698492928.0, "34865": 12698492928.0, "34870": 12698492928.0, "34875": 12698492928.0, "34880": 12698492928.0, "34885": 12698492928.0, "34890": 12698492928.0, "34895": 12698492928.0, "34900": 12698492928.0, "34905": 12698492928.0, "34910": 12698492928.0, "34915": 12698492928.0, "34920": 12698492928.0, "34925": 12698492928.0, "34930": 12698492928.0, "34935": 12698492928.0, "34940": 12698492928.0, "34945": 12698492928.0, "34950": 12698492928.0, "34955": 12698492928.0, "34960": 12698492928.0, "34965": 12698492928.0, "34970": 12698492928.0, "34975": 12698492928.0, "34980": 12698492928.0, "34985": 12698492928.0, "34990": 12698492928.0, "34995": 12698492928.0, "35000": 12698492928.0, "35005": 12698492928.0, "35010": 12698492928.0, "35015": 12698492928.0, "35020": 12698492928.0, "35025": 12698492928.0, "35030": 12698492928.0, "35035": 12698492928.0, "35040": 12698492928.0, "35045": 12698492928.0, "35050": 12698492928.0, "35055": 12698492928.0, "35060": 12698492928.0, "35065": 12698492928.0, "35070": 12698492928.0, "35075": 12698492928.0, "35080": 12698492928.0, "35085": 12698492928.0, "35090": 12698492928.0, "35095": 12698492928.0, "35100": 12698492928.0, "35105": 12698492928.0, "35110": 12698492928.0, "35115": 12698492928.0, "35120": 12698492928.0, "35125": 12698492928.0, "35130": 12698492928.0, "35135": 12698492928.0, "35140": 12698492928.0, "35145": 12698492928.0, "35150": 12698492928.0, "35155": 12698492928.0, "35160": 12698492928.0, "35165": 12698492928.0, "35170": 12698492928.0, "35175": 12698492928.0, "35180": 12698492928.0, "35185": 12698492928.0, "35190": 12698492928.0, "35195": 12698492928.0, "35200": 12698492928.0, "35205": 12698492928.0, "35210": 12698492928.0, "35215": 12698492928.0, "35220": 12698492928.0, "35225": 12698492928.0, "35230": 12698492928.0, "35235": 12698492928.0, "35240": 12698492928.0, "35245": 12698492928.0, "35250": 12698492928.0, "35255": 12698492928.0, "35260": 12698492928.0, "35265": 12698492928.0, "35270": 12698492928.0, "35275": 12698492928.0, "35280": 12698492928.0, "35285": 12698492928.0, "35290": 12698492928.0, "35295": 12698492928.0, "35300": 12698492928.0, "35305": 12698492928.0, "35310": 12698492928.0, "35315": 12698492928.0, "35320": 12698492928.0, "35325": 12698492928.0, "35330": 12698492928.0, "35335": 12698492928.0, "35340": 12698492928.0, "35345": 12698492928.0, "35350": 12698492928.0, "35355": 12698492928.0, "35360": 12698492928.0, "35365": 12698492928.0, "35370": 12698492928.0, "35375": 12698492928.0, "35380": 12698492928.0, "35385": 12698492928.0, "35390": 12698492928.0, "35395": 12698492928.0, "35400": 12698492928.0, "35405": 12698492928.0, "35410": 12698492928.0, "35415": 12698492928.0, "35420": 12698492928.0, "35425": 12698492928.0, "35430": 12698492928.0, "35435": 12698492928.0, "35440": 12698492928.0, "35445": 12698492928.0, "35450": 12698492928.0, "35455": 12698492928.0, "35460": 12698492928.0, "35465": 12698492928.0, "35470": 12698492928.0, "35475": 12698492928.0, "35480": 12698492928.0, "35485": 12698492928.0, "35490": 12698492928.0, "35495": 12698492928.0, "35500": 12698492928.0, "35505": 12698492928.0, "35510": 12698492928.0, "35515": 12698492928.0, "35520": 12698492928.0, "35525": 12698492928.0, "35530": 12698492928.0, "35535": 12698492928.0, "35540": 12698492928.0, "35545": 12698492928.0, "35550": 12698492928.0, "35555": 12698492928.0, "35560": 12698492928.0, "35565": 12698492928.0, "35570": 12698492928.0, "35575": 12698492928.0, "35580": 12698492928.0, "35585": 12698492928.0, "35590": 12698492928.0, "35595": 12698492928.0, "35600": 12698492928.0, "35605": 12698492928.0, "35610": 12698492928.0, "35615": 12698492928.0, "35620": 12698492928.0, "35625": 12698492928.0, "35630": 12698492928.0, "35635": 12698492928.0, "35640": 12698492928.0, "35645": 12698492928.0, "35650": 12698492928.0, "35655": 12698492928.0, "35660": 12698492928.0, "35665": 12698492928.0, "35670": 12698492928.0, "35675": 12698492928.0, "35680": 12698492928.0, "35685": 12698492928.0, "35690": 12698492928.0, "35695": 12698492928.0, "35700": 12698492928.0, "35705": 12698492928.0, "35710": 12698492928.0, "35715": 12698492928.0, "35720": 12698492928.0, "35725": 12698492928.0, "35730": 12698492928.0, "35735": 12698492928.0, "35740": 12698492928.0, "35745": 12698492928.0, "35750": 12698492928.0, "35755": 12698492928.0, "35760": 12698492928.0, "35765": 12698492928.0, "35770": 12698492928.0, "35775": 12698492928.0, "35780": 12698492928.0, "35785": 12698492928.0, "35790": 12698492928.0, "35795": 12698492928.0, "35800": 12698492928.0, "35805": 12698492928.0, "35810": 12698492928.0, "35815": 12698492928.0, "35820": 12698492928.0, "35825": 12698492928.0, "35830": 12698492928.0, "35835": 12698492928.0, "35840": 12698492928.0, "35845": 12698492928.0, "35850": 12698492928.0, "35855": 12698492928.0, "35860": 12698492928.0, "35865": 12698492928.0, "35870": 12698492928.0, "35875": 12698492928.0, "35880": 12698492928.0, "35885": 12698492928.0, "35890": 12698492928.0, "35895": 12698492928.0, "35900": 12698492928.0, "35905": 12698492928.0, "35910": 12698492928.0, "35915": 12698492928.0, "35920": 12698492928.0, "35925": 12698492928.0, "35930": 12698492928.0, "35935": 12698492928.0, "35940": 12698492928.0, "35945": 12698492928.0, "35950": 12698492928.0, "35955": 12698492928.0, "35960": 12698492928.0, "35965": 12698492928.0, "35970": 12698492928.0, "35975": 12698492928.0, "35980": 12698492928.0, "35985": 12698492928.0, "35990": 12698492928.0, "35995": 12698492928.0, "36000": 12698492928.0, "36005": 12698492928.0, "36010": 12698492928.0, "36015": 12698492928.0, "36020": 12698492928.0, "36025": 12698492928.0, "36030": 12698492928.0, "36035": 12698492928.0, "36040": 12698492928.0, "36045": 12698492928.0, "36050": 12698492928.0, "36055": 12698492928.0, "36060": 12698492928.0, "36065": 12698492928.0, "36070": 12698492928.0, "36075": 12698492928.0, "36080": 12698492928.0, "36085": 12698492928.0, "36090": 12698492928.0, "36095": 12698492928.0, "36100": 12698492928.0, "36105": 12698492928.0, "36110": 12698492928.0, "36115": 12698492928.0, "36120": 12698492928.0, "36125": 12698492928.0, "36130": 12698492928.0, "36135": 12698492928.0, "36140": 12698492928.0, "36145": 12698492928.0, "36150": 12698492928.0, "36155": 12698492928.0, "36160": 12698492928.0, "36165": 12698492928.0, "36170": 12698492928.0, "36175": 12698492928.0, "36180": 12698492928.0, "36185": 12698492928.0, "36190": 12698492928.0, "36195": 12698492928.0, "36200": 12698492928.0, "36205": 12698492928.0, "36210": 12698492928.0, "36215": 12698492928.0, "36220": 12698492928.0, "36225": 12698492928.0, "36230": 12698492928.0, "36235": 12698492928.0, "36240": 12698492928.0, "36245": 12698492928.0, "36250": 12698492928.0, "36255": 12698492928.0, "36260": 12698492928.0, "36265": 12698492928.0, "36270": 12698492928.0, "36275": 12698492928.0, "36280": 12698492928.0, "36285": 12698492928.0, "36290": 12698492928.0, "36295": 12698492928.0, "36300": 12698492928.0, "36305": 12698492928.0, "36310": 12698492928.0, "36315": 12698492928.0, "36320": 12698492928.0, "36325": 12698492928.0, "36330": 12698492928.0, "36335": 12698492928.0, "36340": 12698492928.0, "36345": 12698492928.0, "36350": 12698492928.0, "36355": 12698492928.0, "36360": 12698492928.0, "36365": 12698492928.0, "36370": 12698492928.0, "36375": 12698492928.0, "36380": 12698492928.0, "36385": 12698492928.0, "36390": 12698492928.0, "36395": 12698492928.0, "36400": 12698492928.0, "36405": 12698492928.0, "36410": 12698492928.0, "36415": 12698492928.0, "36420": 12698492928.0, "36425": 12698492928.0, "36430": 12698492928.0, "36435": 12698492928.0, "36440": 12698492928.0, "36445": 12698492928.0, "36450": 12698492928.0, "36455": 12698492928.0, "36460": 12698492928.0, "36465": 12698492928.0, "36470": 12698492928.0, "36475": 12698492928.0, "36480": 12698492928.0, "36485": 12698492928.0, "36490": 12698492928.0, "36495": 12698492928.0, "36500": 12698492928.0, "36505": 12698492928.0, "36510": 12698492928.0, "36515": 12698492928.0, "36520": 12698492928.0, "36525": 12698492928.0, "36530": 12698492928.0, "36535": 12698492928.0, "36540": 12698492928.0, "36545": 12698492928.0, "36550": 12698492928.0, "36555": 12698492928.0, "36560": 12698492928.0, "36565": 12698492928.0, "36570": 12698492928.0, "36575": 12698492928.0, "36580": 12698492928.0, "36585": 12698492928.0, "36590": 12698492928.0, "36595": 12698492928.0, "36600": 12698492928.0, "36605": 12698492928.0, "36610": 12698492928.0, "36615": 12698492928.0, "36620": 12698492928.0, "36625": 12698492928.0, "36630": 12698492928.0, "36635": 12698492928.0, "36640": 12698492928.0, "36645": 12698492928.0, "36650": 12698492928.0, "36655": 12698492928.0, "36660": 12698492928.0, "36665": 12698492928.0, "36670": 12698492928.0, "36675": 12698492928.0, "36680": 12698492928.0, "36685": 12698492928.0, "36690": 12698492928.0, "36695": 12698492928.0, "36700": 12698492928.0, "36705": 12698492928.0, "36710": 12698492928.0, "36715": 12698492928.0, "36720": 12698492928.0, "36725": 12698492928.0, "36730": 12698492928.0, "36735": 12698492928.0, "36740": 12698492928.0, "36745": 12698492928.0, "36750": 12698492928.0, "36755": 12698492928.0, "36760": 12698492928.0, "36765": 12698492928.0, "36770": 12698492928.0, "36775": 12698492928.0, "36780": 12698492928.0, "36785": 12698492928.0, "36790": 12698492928.0, "36795": 12698492928.0, "36800": 12698492928.0, "36805": 12698492928.0, "36810": 12698492928.0, "36815": 12698492928.0, "36820": 12698492928.0, "36825": 12698492928.0, "36830": 12698492928.0, "36835": 12698492928.0, "36840": 12698492928.0, "36845": 12698492928.0, "36850": 12698492928.0, "36855": 12698492928.0, "36860": 12698492928.0, "36865": 12698492928.0, "36870": 12698492928.0, "36875": 12698492928.0, "36880": 12698492928.0, "36885": 12698492928.0, "36890": 12698492928.0, "36895": 12698492928.0, "36900": 12698492928.0, "36905": 12698492928.0, "36910": 12698492928.0, "36915": 12698492928.0, "36920": 12698492928.0, "36925": 12698492928.0, "36930": 12698492928.0, "36935": 12698492928.0, "36940": 12698492928.0, "36945": 12698492928.0, "36950": 12698492928.0, "36955": 12698492928.0, "36960": 12698492928.0, "36965": 12698492928.0, "36970": 12698492928.0, "36975": 12698492928.0, "36980": 12698492928.0, "36985": 12698492928.0, "36990": 12698492928.0, "36995": 12698492928.0, "37000": 12698492928.0, "37005": 12698492928.0, "37010": 12698492928.0, "37015": 12698492928.0, "37020": 12698492928.0, "37025": 12698492928.0, "37030": 12698492928.0, "37035": 12698492928.0, "37040": 12698492928.0, "37045": 12698492928.0, "37050": 12698492928.0, "37055": 12698492928.0, "37060": 12698492928.0, "37065": 12698492928.0, "37070": 12698492928.0, "37075": 12698492928.0, "37080": 12698492928.0, "37085": 12698492928.0, "37090": 12698492928.0, "37095": 12698492928.0, "37100": 12698492928.0, "37105": 12698492928.0, "37110": 12698492928.0, "37115": 12698492928.0, "37120": 12698492928.0, "37125": 12698492928.0, "37130": 12698492928.0, "37135": 12698492928.0, "37140": 12698492928.0, "37145": 12698492928.0, "37150": 12698492928.0, "37155": 12698492928.0, "37160": 12698492928.0, "37165": 12698492928.0, "37170": 12698492928.0, "37175": 12698492928.0, "37180": 12698492928.0, "37185": 12698492928.0, "37190": 12698492928.0, "37195": 12698492928.0, "37200": 12698492928.0, "37205": 12698492928.0, "37210": 12698492928.0, "37215": 12698492928.0, "37220": 12698492928.0, "37225": 12698492928.0, "37230": 12698492928.0, "37235": 12698492928.0, "37240": 12698492928.0, "37245": 12698492928.0, "37250": 12698492928.0, "37255": 12698492928.0, "37260": 12698492928.0, "37265": 12698492928.0, "37270": 12698492928.0, "37275": 12698492928.0, "37280": 12698492928.0, "37285": 12698492928.0, "37290": 12698492928.0, "37295": 12698492928.0, "37300": 12698492928.0, "37305": 12698492928.0, "37310": 12698492928.0, "37315": 12698492928.0, "37320": 12698492928.0, "37325": 12698492928.0, "37330": 12698492928.0, "37335": 12698492928.0, "37340": 12698492928.0, "37345": 12698492928.0, "37350": 12698492928.0, "37355": 12698492928.0, "37360": 12698492928.0, "37365": 12698492928.0, "37370": 12698492928.0, "37375": 12698492928.0, "37380": 12698492928.0, "37385": 12698492928.0, "37390": 12698492928.0, "37395": 12698492928.0, "37400": 12698492928.0, "37405": 12698492928.0, "37410": 12698492928.0, "37415": 12698492928.0, "37420": 12698492928.0, "37425": 12698492928.0, "37430": 12698492928.0, "37435": 12698492928.0, "37440": 12698492928.0, "37445": 12698492928.0, "37450": 12698492928.0, "37455": 12698492928.0, "37460": 12698492928.0, "37465": 12698492928.0, "37470": 12698492928.0, "37475": 12698492928.0, "37480": 12698492928.0, "37485": 12698492928.0, "37490": 12698492928.0, "37495": 12698492928.0, "37500": 12698492928.0, "37505": 12698492928.0, "37510": 12698492928.0, "37515": 12698492928.0, "37520": 12698492928.0, "37525": 12698492928.0, "37530": 12698492928.0, "37535": 12698492928.0, "37540": 12698492928.0, "37545": 12698492928.0, "37550": 12698492928.0, "37555": 12698492928.0, "37560": 12698492928.0, "37565": 12698492928.0, "37570": 12698492928.0, "37575": 12698492928.0, "37580": 12698492928.0, "37585": 12698492928.0, "37590": 12698492928.0, "37595": 12698492928.0, "37600": 12698492928.0, "37605": 12698492928.0, "37610": 12698492928.0, "37615": 12698492928.0, "37620": 12698492928.0, "37625": 12698492928.0, "37630": 12698492928.0, "37635": 12698492928.0, "37640": 12698492928.0, "37645": 12698492928.0, "37650": 12698492928.0, "37655": 12698492928.0, "37660": 12698492928.0, "37665": 12698492928.0, "37670": 12698492928.0, "37675": 12698492928.0, "37680": 12698492928.0, "37685": 12698492928.0, "37690": 12698492928.0, "37695": 12698492928.0, "37700": 12698492928.0, "37705": 12698492928.0, "37710": 12698492928.0, "37715": 12698492928.0, "37720": 12698492928.0, "37725": 12698492928.0, "37730": 12698492928.0, "37735": 12698492928.0, "37740": 12698492928.0, "37745": 12698492928.0, "37750": 12698492928.0, "37755": 12698492928.0, "37760": 12698492928.0, "37765": 12698492928.0, "37770": 12698492928.0, "37775": 12698492928.0, "37780": 12698492928.0, "37785": 12698492928.0, "37790": 12698492928.0, "37795": 12698492928.0, "37800": 12698492928.0, "37805": 12698492928.0, "37810": 12698492928.0, "37815": 12698492928.0, "37820": 12698492928.0, "37825": 12698492928.0, "37830": 12698492928.0, "37835": 12698492928.0, "37840": 12698492928.0, "37845": 12698492928.0, "37850": 12698492928.0, "37855": 12698492928.0, "37860": 12698492928.0, "37865": 12698492928.0, "37870": 12698492928.0, "37875": 12698492928.0, "37880": 12698492928.0, "37885": 12698492928.0, "37890": 12698492928.0, "37895": 12698492928.0, "37900": 12698492928.0, "37905": 12698492928.0, "37910": 12698492928.0, "37915": 12698492928.0, "37920": 12698492928.0, "37925": 12698492928.0, "37930": 12698492928.0, "37935": 12698492928.0, "37940": 12698492928.0, "37945": 12698492928.0, "37950": 12698492928.0, "37955": 12698492928.0, "37960": 12698492928.0, "37965": 12698492928.0, "37970": 12698492928.0, "37975": 12698492928.0, "37980": 12698492928.0, "37985": 12698492928.0, "37990": 12698492928.0, "37995": 12698492928.0, "38000": 12698492928.0, "38005": 12698492928.0, "38010": 12698492928.0, "38015": 12698492928.0, "38020": 12698492928.0, "38025": 12698492928.0, "38030": 12698492928.0, "38035": 12698492928.0, "38040": 12698492928.0, "38045": 12698492928.0, "38050": 12698492928.0, "38055": 12698492928.0, "38060": 12698492928.0, "38065": 12698492928.0, "38070": 12698492928.0, "38075": 12698492928.0, "38080": 12698492928.0, "38085": 12698492928.0, "38090": 12698492928.0, "38095": 12698492928.0, "38100": 12698492928.0, "38105": 12698492928.0, "38110": 12698492928.0, "38115": 12698492928.0, "38120": 12698492928.0, "38125": 12698492928.0, "38130": 12698492928.0, "38135": 12698492928.0, "38140": 12698492928.0, "38145": 12698492928.0, "38150": 12698492928.0, "38155": 12698492928.0, "38160": 12698492928.0, "38165": 12698492928.0, "38170": 12698492928.0, "38175": 12698492928.0, "38180": 12698492928.0, "38185": 12698492928.0, "38190": 12698492928.0, "38195": 12698492928.0, "38200": 12698492928.0, "38205": 12698492928.0, "38210": 12698492928.0, "38215": 12698492928.0, "38220": 12698492928.0, "38225": 12698492928.0, "38230": 12698492928.0, "38235": 12698492928.0, "38240": 12698492928.0, "38245": 12698492928.0, "38250": 12698492928.0, "38255": 12698492928.0, "38260": 12698492928.0, "38265": 12698492928.0, "38270": 12698492928.0, "38275": 12698492928.0, "38280": 12698492928.0, "38285": 12698492928.0, "38290": 12698492928.0, "38295": 12698492928.0, "38300": 12698492928.0, "38305": 12698492928.0, "38310": 12698492928.0, "38315": 12698492928.0, "38320": 12698492928.0, "38325": 12698492928.0, "38330": 12698492928.0, "38335": 12698492928.0, "38340": 12698492928.0, "38345": 12698492928.0, "38350": 12698492928.0, "38355": 12698492928.0, "38360": 12698492928.0, "38365": 12698492928.0, "38370": 12698492928.0, "38375": 12698492928.0, "38380": 12698492928.0, "38385": 12698492928.0, "38390": 12698492928.0, "38395": 12698492928.0, "38400": 12698492928.0, "38405": 12698492928.0, "38410": 12698492928.0, "38415": 12698492928.0, "38420": 12698492928.0, "38425": 12698492928.0, "38430": 12698492928.0, "38435": 12698492928.0, "38440": 12698492928.0, "38445": 12698492928.0, "38450": 12698492928.0, "38455": 12698492928.0, "38460": 12698492928.0, "38465": 12698492928.0, "38470": 12698492928.0, "38475": 12698492928.0, "38480": 12698492928.0, "38485": 12698492928.0, "38490": 12698492928.0, "38495": 12698492928.0, "38500": 12698492928.0, "38505": 12698492928.0, "38510": 12698492928.0, "38515": 12698492928.0, "38520": 12698492928.0, "38525": 12698492928.0, "38530": 12698492928.0, "38535": 12698492928.0, "38540": 12698492928.0, "38545": 12698492928.0, "38550": 12698492928.0, "38555": 12698492928.0, "38560": 12698492928.0, "38565": 12698492928.0, "38570": 12698492928.0, "38575": 12698492928.0, "38580": 12698492928.0, "38585": 12698492928.0, "38590": 12698492928.0, "38595": 12698492928.0, "38600": 12698492928.0, "38605": 12698492928.0, "38610": 12698492928.0, "38615": 12698492928.0, "38620": 12698492928.0, "38625": 12698492928.0, "38630": 12698492928.0, "38635": 12698492928.0, "38640": 12698492928.0, "38645": 12698492928.0, "38650": 12698492928.0, "38655": 12698492928.0, "38660": 12698492928.0, "38665": 12698492928.0, "38670": 12698492928.0, "38675": 12698492928.0, "38680": 12698492928.0, "38685": 12698492928.0, "38690": 12698492928.0, "38695": 12698492928.0, "38700": 12698492928.0, "38705": 12698492928.0, "38710": 12698492928.0, "38715": 12698492928.0, "38720": 12698492928.0, "38725": 12698492928.0, "38730": 12698492928.0, "38735": 12698492928.0, "38740": 12698492928.0, "38745": 12698492928.0, "38750": 12698492928.0, "38755": 12698492928.0, "38760": 12698492928.0, "38765": 12698492928.0, "38770": 12698492928.0, "38775": 12698492928.0, "38780": 12698492928.0, "38785": 12698492928.0, "38790": 12698492928.0, "38795": 12698492928.0, "38800": 12698492928.0, "38805": 12698492928.0, "38810": 12698492928.0, "38815": 12698492928.0, "38820": 12698492928.0, "38825": 12698492928.0, "38830": 12698492928.0, "38835": 12698492928.0, "38840": 12698492928.0, "38845": 12698492928.0, "38850": 12698492928.0, "38855": 12698492928.0, "38860": 12698492928.0, "38865": 12698492928.0, "38870": 12698492928.0, "38875": 12698492928.0, "38880": 12698492928.0, "38885": 12698492928.0, "38890": 12698492928.0, "38895": 12698492928.0, "38900": 12698492928.0, "38905": 12698492928.0, "38910": 12698492928.0, "38915": 12698492928.0, "38920": 12698492928.0, "38925": 12698492928.0, "38930": 12698492928.0, "38935": 12698492928.0, "38940": 12698492928.0, "38945": 12698492928.0, "38950": 12698492928.0, "38955": 12698492928.0, "38960": 12698492928.0, "38965": 12698492928.0, "38970": 12698492928.0, "38975": 12698492928.0, "38980": 12698492928.0, "38985": 12698492928.0, "38990": 12698492928.0, "38995": 12698492928.0, "39000": 12698492928.0, "39005": 12698492928.0, "39010": 12698492928.0, "39015": 12698492928.0, "39020": 12698492928.0, "39025": 12698492928.0, "39030": 12698492928.0, "39035": 12698492928.0, "39040": 12698492928.0, "39045": 12698492928.0, "39050": 12698492928.0, "39055": 12698492928.0, "39060": 12698492928.0, "39065": 12698492928.0, "39070": 12698492928.0, "39075": 12698492928.0, "39080": 12698492928.0, "39085": 12698492928.0, "39090": 12698492928.0, "39095": 12698492928.0, "39100": 12698492928.0, "39105": 12698492928.0, "39110": 12698492928.0, "39115": 12698492928.0, "39120": 12698492928.0, "39125": 12698492928.0, "39130": 12698492928.0, "39135": 12698492928.0, "39140": 12698492928.0, "39145": 12698492928.0, "39150": 12698492928.0, "39155": 12698492928.0, "39160": 12698492928.0, "39165": 12698492928.0, "39170": 12698492928.0, "39175": 12698492928.0, "39180": 12698492928.0, "39185": 12698492928.0, "39190": 12698492928.0, "39195": 12698492928.0, "39200": 12698492928.0, "39205": 12698492928.0, "39210": 12698492928.0, "39215": 12698492928.0, "39220": 12698492928.0, "39225": 12698492928.0, "39230": 12698492928.0, "39235": 12698492928.0, "39240": 12698492928.0, "39245": 12698492928.0, "39250": 12698492928.0, "39255": 12698492928.0, "39260": 12698492928.0, "39265": 12698492928.0, "39270": 12698492928.0, "39275": 12698492928.0, "39280": 12698492928.0, "39285": 12698492928.0, "39290": 12698492928.0, "39295": 12698492928.0, "39300": 12698492928.0, "39305": 12698492928.0, "39310": 12698492928.0, "39315": 12698492928.0, "39320": 12698492928.0, "39325": 12698492928.0, "39330": 12698492928.0, "39335": 12698492928.0, "39340": 12698492928.0, "39345": 12698492928.0, "39350": 12698492928.0, "39355": 12698492928.0, "39360": 12698492928.0, "39365": 12698492928.0, "39370": 12698492928.0, "39375": 12698492928.0, "39380": 12698492928.0, "39385": 12698492928.0, "39390": 12698492928.0, "39395": 12698492928.0, "39400": 12698492928.0, "39405": 12698492928.0, "39410": 12698492928.0, "39415": 12698492928.0, "39420": 12698492928.0, "39425": 12698492928.0, "39430": 12698492928.0, "39435": 12698492928.0, "39440": 12698492928.0, "39445": 12698492928.0, "39450": 12698492928.0, "39455": 12698492928.0, "39460": 12698492928.0, "39465": 12698492928.0, "39470": 12698492928.0, "39475": 12698492928.0, "39480": 12698492928.0, "39485": 12698492928.0, "39490": 12698492928.0, "39495": 12698492928.0, "39500": 12698492928.0, "39505": 12698492928.0, "39510": 12698492928.0, "39515": 12698492928.0, "39520": 12698492928.0, "39525": 12698492928.0, "39530": 12698492928.0, "39535": 12698492928.0, "39540": 12698492928.0, "39545": 12698492928.0, "39550": 12698492928.0, "39555": 12698492928.0, "39560": 12698492928.0, "39565": 12698492928.0, "39570": 12698492928.0, "39575": 12698492928.0, "39580": 12698492928.0, "39585": 12698492928.0, "39590": 12698492928.0, "39595": 12698492928.0, "39600": 12698492928.0, "39605": 12698492928.0, "39610": 12698492928.0, "39615": 12698492928.0, "39620": 12698492928.0, "39625": 12698492928.0, "39630": 12698492928.0, "39635": 12698492928.0, "39640": 12698492928.0, "39645": 12698492928.0, "39650": 12698492928.0, "39655": 12698492928.0, "39660": 12698492928.0, "39665": 12698492928.0, "39670": 12698492928.0, "39675": 12698492928.0, "39680": 12698492928.0, "39685": 12698492928.0, "39690": 12698492928.0, "39695": 12698492928.0, "39700": 12698492928.0, "39705": 12698492928.0, "39710": 12698492928.0, "39715": 12698492928.0, "39720": 12698492928.0, "39725": 12698492928.0, "39730": 12698492928.0, "39735": 12698492928.0, "39740": 12698492928.0, "39745": 12698492928.0, "39750": 12698492928.0, "39755": 12698492928.0, "39760": 12698492928.0, "39765": 12698492928.0, "39770": 12698492928.0, "39775": 12698492928.0, "39780": 12698492928.0, "39785": 12698492928.0, "39790": 12698492928.0, "39795": 12698492928.0, "39800": 12698492928.0, "39805": 12698492928.0, "39810": 12698492928.0, "39815": 12698492928.0, "39820": 12698492928.0, "39825": 12698492928.0, "39830": 12698492928.0, "39835": 12698492928.0, "39840": 12698492928.0, "39845": 12698492928.0, "39850": 12698492928.0, "39855": 12698492928.0, "39860": 12698492928.0, "39865": 12698492928.0, "39870": 12698492928.0, "39875": 12698492928.0, "39880": 12698492928.0, "39885": 12698492928.0, "39890": 12698492928.0, "39895": 12698492928.0, "39900": 12698492928.0, "39905": 12698492928.0, "39910": 12698492928.0, "39915": 12698492928.0, "39920": 12698492928.0, "39925": 12698492928.0, "39930": 12698492928.0, "39935": 12698492928.0, "39940": 12698492928.0, "39945": 12698492928.0, "39950": 12698492928.0, "39955": 12698492928.0, "39960": 12698492928.0, "39965": 12698492928.0, "39970": 12698492928.0, "39975": 12698492928.0, "39980": 12698492928.0, "39985": 12698492928.0, "39990": 12698492928.0, "39995": 12698492928.0, "40000": 12698492928.0, "40005": 12698492928.0, "40010": 12698492928.0, "40015": 12698492928.0, "40020": 12698492928.0, "40025": 12698492928.0, "40030": 12698492928.0, "40035": 12698492928.0, "40040": 12698492928.0, "40045": 12698492928.0, "40050": 12698492928.0, "40055": 12698492928.0, "40060": 12698492928.0, "40065": 12698492928.0, "40070": 12698492928.0, "40075": 12698492928.0, "40080": 12698492928.0, "40085": 12698492928.0, "40090": 12698492928.0, "40095": 12698492928.0, "40100": 12698492928.0, "40105": 12698492928.0, "40110": 12698492928.0, "40115": 12698492928.0, "40120": 12698492928.0, "40125": 12698492928.0, "40130": 12698492928.0, "40135": 12698492928.0, "40140": 12698492928.0, "40145": 12698492928.0, "40150": 12698492928.0, "40155": 12698492928.0, "40160": 12698492928.0, "40165": 12698492928.0, "40170": 12698492928.0, "40175": 12698492928.0, "40180": 12698492928.0, "40185": 12698492928.0, "40190": 12698492928.0, "40195": 12698492928.0, "40200": 12698492928.0, "40205": 12698492928.0, "40210": 12698492928.0, "40215": 12698492928.0, "40220": 12698492928.0, "40225": 12698492928.0, "40230": 12698492928.0, "40235": 12698492928.0, "40240": 12698492928.0, "40245": 12698492928.0, "40250": 12698492928.0, "40255": 12698492928.0, "40260": 12698492928.0, "40265": 12698492928.0, "40270": 12698492928.0, "40275": 12698492928.0, "40280": 12698492928.0, "40285": 12698492928.0, "40290": 12698492928.0, "40295": 12698492928.0, "40300": 12698492928.0, "40305": 12698492928.0, "40310": 12698492928.0, "40315": 12698492928.0, "40320": 12698492928.0, "40325": 12698492928.0, "40330": 12698492928.0, "40335": 12698492928.0, "40340": 12698492928.0, "40345": 12698492928.0, "40350": 12698492928.0, "40355": 12698492928.0, "40360": 12698492928.0, "40365": 12698492928.0, "40370": 12698492928.0, "40375": 12698492928.0, "40380": 12698492928.0, "40385": 12698492928.0, "40390": 12698492928.0, "40395": 12698492928.0, "40400": 12698492928.0, "40405": 12698492928.0, "40410": 12698492928.0, "40415": 12698492928.0, "40420": 12698492928.0, "40425": 12698492928.0, "40430": 12698492928.0, "40435": 12698492928.0, "40440": 12698492928.0, "40445": 12698492928.0, "40450": 12698492928.0, "40455": 12698492928.0, "40460": 12698492928.0, "40465": 12698492928.0, "40470": 12698492928.0, "40475": 12698492928.0, "40480": 12698492928.0, "40485": 12698492928.0, "40490": 12698492928.0, "40495": 12698492928.0, "40500": 12698492928.0, "40505": 12698492928.0, "40510": 12698492928.0, "40515": 12698492928.0, "40520": 12698492928.0, "40525": 12698492928.0, "40530": 12698492928.0, "40535": 12698492928.0, "40540": 12698492928.0, "40545": 12698492928.0, "40550": 12698492928.0, "40555": 12698492928.0, "40560": 12698492928.0, "40565": 12698492928.0, "40570": 12698492928.0, "40575": 12698492928.0, "40580": 12698492928.0, "40585": 12698492928.0, "40590": 12698492928.0, "40595": 12698492928.0, "40600": 12698492928.0, "40605": 12698492928.0, "40610": 12698492928.0, "40615": 12698492928.0, "40620": 12698492928.0, "40625": 12698492928.0, "40630": 12698492928.0, "40635": 12698492928.0, "40640": 12698492928.0, "40645": 12698492928.0, "40650": 12698492928.0, "40655": 12698492928.0, "40660": 12698492928.0, "40665": 12698492928.0, "40670": 12698492928.0, "40675": 12698492928.0, "40680": 12698492928.0, "40685": 12698492928.0, "40690": 12698492928.0, "40695": 12698492928.0, "40700": 12698492928.0, "40705": 12698492928.0, "40710": 12698492928.0, "40715": 12698492928.0, "40720": 12698492928.0, "40725": 12698492928.0, "40730": 12698492928.0, "40735": 12698492928.0, "40740": 12698492928.0, "40745": 12698492928.0, "40750": 12698492928.0, "40755": 12698492928.0, "40760": 12698492928.0, "40765": 12698492928.0, "40770": 12698492928.0, "40775": 12698492928.0, "40780": 12698492928.0, "40785": 12698492928.0, "40790": 12698492928.0, "40795": 12698492928.0, "40800": 12698492928.0, "40805": 12698492928.0, "40810": 12698492928.0, "40815": 12698492928.0, "40820": 12698492928.0, "40825": 12698492928.0, "40830": 12698492928.0, "40835": 12698492928.0, "40840": 12698492928.0, "40845": 12698492928.0, "40850": 12698492928.0, "40855": 12698492928.0, "40860": 12698492928.0, "40865": 12698492928.0, "40870": 12698492928.0, "40875": 12698492928.0, "40880": 12698492928.0, "40885": 12698492928.0, "40890": 12698492928.0, "40895": 12698492928.0, "40900": 12698492928.0, "40905": 12698492928.0, "40910": 12698492928.0, "40915": 12698492928.0, "40920": 12698492928.0, "40925": 12698492928.0, "40930": 12698492928.0, "40935": 12698492928.0, "40940": 12698492928.0, "40945": 12698492928.0, "40950": 12698492928.0, "40955": 12698492928.0, "40960": 12698492928.0, "40965": 12698492928.0, "40970": 12698492928.0, "40975": 12698492928.0, "40980": 12698492928.0, "40985": 12698492928.0, "40990": 12698492928.0, "40995": 12698492928.0, "41000": 12698492928.0, "41005": 12698492928.0, "41010": 12698492928.0, "41015": 12698492928.0, "41020": 12698492928.0, "41025": 12698492928.0, "41030": 12698492928.0, "41035": 12698492928.0, "41040": 12698492928.0, "41045": 12698492928.0, "41050": 12698492928.0, "41055": 12698492928.0, "41060": 12698492928.0, "41065": 12698492928.0, "41070": 12698492928.0, "41075": 12698492928.0, "41080": 12698492928.0, "41085": 12698492928.0, "41090": 12698492928.0, "41095": 12698492928.0, "41100": 12698492928.0, "41105": 12698492928.0, "41110": 12698492928.0, "41115": 12698492928.0, "41120": 12698492928.0, "41125": 12698492928.0, "41130": 12698492928.0, "41135": 12698492928.0, "41140": 12698492928.0, "41145": 12698492928.0, "41150": 12698492928.0, "41155": 12698492928.0, "41160": 12698492928.0, "41165": 12698492928.0, "41170": 12698492928.0, "41175": 12698492928.0, "41180": 12698492928.0, "41185": 12698492928.0, "41190": 12698492928.0, "41195": 12698492928.0, "41200": 12698492928.0, "41205": 12698492928.0, "41210": 12698492928.0, "41215": 12698492928.0, "41220": 12698492928.0, "41225": 12698492928.0, "41230": 12698492928.0, "41235": 12698492928.0, "41240": 12698492928.0, "41245": 12698492928.0, "41250": 12698492928.0, "41255": 12698492928.0, "41260": 12698492928.0, "41265": 12698492928.0, "41270": 12698492928.0, "41275": 12698492928.0, "41280": 12698492928.0, "41285": 12698492928.0, "41290": 12698492928.0, "41295": 12698492928.0, "41300": 12698492928.0, "41305": 12698492928.0, "41310": 12698492928.0, "41315": 12698492928.0, "41320": 12698492928.0, "41325": 12698492928.0, "41330": 12698492928.0, "41335": 12698492928.0, "41340": 12698492928.0, "41345": 12698492928.0, "41350": 12698492928.0, "41355": 12698492928.0, "41360": 12698492928.0, "41365": 12698492928.0, "41370": 12698492928.0, "41375": 12698492928.0, "41380": 12698492928.0, "41385": 12698492928.0, "41390": 12698492928.0, "41395": 12698492928.0, "41400": 12698492928.0, "41405": 12698492928.0, "41410": 12698492928.0, "41415": 12698492928.0, "41420": 12698492928.0, "41425": 12698492928.0, "41430": 12698492928.0, "41435": 12698492928.0, "41440": 12698492928.0, "41445": 12698492928.0, "41450": 12698492928.0, "41455": 12698492928.0, "41460": 12698492928.0, "41465": 12698492928.0, "41470": 12698492928.0, "41475": 12698492928.0, "41480": 12698492928.0, "41485": 12698492928.0, "41490": 12698492928.0, "41495": 12698492928.0, "41500": 12698492928.0, "41505": 12698492928.0, "41510": 12698492928.0, "41515": 12698492928.0, "41520": 12698492928.0, "41525": 12698492928.0, "41530": 12698492928.0, "41535": 12698492928.0, "41540": 12698492928.0, "41545": 12698492928.0, "41550": 12698492928.0, "41555": 12698492928.0, "41560": 12698492928.0, "41565": 12698492928.0, "41570": 12698492928.0, "41575": 12698492928.0, "41580": 12698492928.0, "41585": 12698492928.0, "41590": 12698492928.0, "41595": 12698492928.0, "41600": 12698492928.0, "41605": 12698492928.0, "41610": 12698492928.0, "41615": 12698492928.0, "41620": 12698492928.0, "41625": 12698492928.0, "41630": 12698492928.0, "41635": 12698492928.0, "41640": 12698492928.0, "41645": 12698492928.0, "41650": 12698492928.0, "41655": 12698492928.0, "41660": 12698492928.0, "41665": 12698492928.0, "41670": 12698492928.0, "41675": 12698492928.0, "41680": 12698492928.0, "41685": 12698492928.0, "41690": 12698492928.0, "41695": 12698492928.0, "41700": 12698492928.0, "41705": 12698492928.0, "41710": 12698492928.0, "41715": 12698492928.0, "41720": 12698492928.0, "41725": 12698492928.0, "41730": 12698492928.0, "41735": 12698492928.0, "41740": 12698492928.0, "41745": 12698492928.0, "41750": 12698492928.0, "41755": 12698492928.0, "41760": 12698492928.0, "41765": 12698492928.0, "41770": 12698492928.0, "41775": 12698492928.0, "41780": 12698492928.0, "41785": 12698492928.0, "41790": 12698492928.0, "41795": 12698492928.0, "41800": 12698492928.0, "41805": 12698492928.0, "41810": 12698492928.0, "41815": 12698492928.0, "41820": 12698492928.0, "41825": 12698492928.0, "41830": 12698492928.0, "41835": 12698492928.0, "41840": 12698492928.0, "41845": 12698492928.0, "41850": 12698492928.0, "41855": 12698492928.0, "41860": 12698492928.0, "41865": 12698492928.0, "41870": 12698492928.0, "41875": 12698492928.0, "41880": 12698492928.0, "41885": 12698492928.0, "41890": 12698492928.0, "41895": 12698492928.0, "41900": 12698492928.0, "41905": 12698492928.0, "41910": 12698492928.0, "41915": 12698492928.0, "41920": 12698492928.0, "41925": 12698492928.0, "41930": 12698492928.0, "41935": 12698492928.0, "41940": 12698492928.0, "41945": 12698492928.0, "41950": 12698492928.0, "41955": 12698492928.0, "41960": 12698492928.0, "41965": 12698492928.0, "41970": 12698492928.0, "41975": 12698492928.0, "41980": 12698492928.0, "41985": 12698492928.0, "41990": 12698492928.0, "41995": 12698492928.0, "42000": 12698492928.0, "42005": 12698492928.0, "42010": 12698492928.0, "42015": 12698492928.0, "42020": 12698492928.0, "42025": 12698492928.0, "42030": 12698492928.0, "42035": 12698492928.0, "42040": 12698492928.0, "42045": 12698492928.0, "42050": 12698492928.0, "42055": 12698492928.0, "42060": 12698492928.0, "42065": 12698492928.0, "42070": 12698492928.0, "42075": 12698492928.0, "42080": 12698492928.0, "42085": 12698492928.0, "42090": 12698492928.0, "42095": 12698492928.0, "42100": 12698492928.0, "42105": 12698492928.0, "42110": 12698492928.0, "42115": 12698492928.0, "42120": 12698492928.0, "42125": 12698492928.0, "42130": 12698492928.0, "42135": 12698492928.0, "42140": 12698492928.0, "42145": 12698492928.0, "42150": 12698492928.0, "42155": 12698492928.0, "42160": 12698492928.0, "42165": 12698492928.0, "42170": 12698492928.0, "42175": 12698492928.0, "42180": 12698492928.0, "42185": 12698492928.0, "42190": 12698492928.0, "42195": 12698492928.0, "42200": 12698492928.0, "42205": 12698492928.0, "42210": 12698492928.0, "42215": 12698492928.0, "42220": 12698492928.0, "42225": 12698492928.0, "42230": 12698492928.0, "42235": 12698492928.0, "42240": 12698492928.0, "42245": 12698492928.0, "42250": 12698492928.0, "42255": 12698492928.0, "42260": 12698492928.0, "42265": 12698492928.0, "42270": 12698492928.0, "42275": 12698492928.0, "42280": 12698492928.0, "42285": 12698492928.0, "42290": 12698492928.0, "42295": 12698492928.0, "42300": 12698492928.0, "42305": 12698492928.0, "42310": 12698492928.0, "42315": 12698492928.0, "42320": 12698492928.0, "42325": 12698492928.0, "42330": 12698492928.0, "42335": 12698492928.0, "42340": 12698492928.0, "42345": 12698492928.0, "42350": 12698492928.0, "42355": 12698492928.0, "42360": 12698492928.0, "42365": 12698492928.0, "42370": 12698492928.0, "42375": 12698492928.0, "42380": 12698492928.0, "42385": 12698492928.0, "42390": 12698492928.0, "42395": 12698492928.0, "42400": 12698492928.0, "42405": 12698492928.0, "42410": 12698492928.0, "42415": 12698492928.0, "42420": 12698492928.0, "42425": 12698492928.0, "42430": 12698492928.0, "42435": 12698492928.0, "42440": 12698492928.0, "42445": 12698492928.0, "42450": 12698492928.0, "42455": 12698492928.0, "42460": 12698492928.0, "42465": 12698492928.0, "42470": 12698492928.0, "42475": 12698492928.0, "42480": 12698492928.0, "42485": 12698492928.0, "42490": 12698492928.0, "42495": 12698492928.0, "42500": 12698492928.0, "42505": 12698492928.0, "42510": 12698492928.0, "42515": 12698492928.0, "42520": 12698492928.0, "42525": 12698492928.0, "42530": 12698492928.0, "42535": 12698492928.0, "42540": 12698492928.0, "42545": 12698492928.0, "42550": 12698492928.0, "42555": 12698492928.0, "42560": 12698492928.0, "42565": 12698492928.0, "42570": 12698492928.0, "42575": 12698492928.0, "42580": 12698492928.0, "42585": 12698492928.0, "42590": 12698492928.0, "42595": 12698492928.0, "42600": 12698492928.0, "42605": 12698492928.0, "42610": 12698492928.0, "42615": 12698492928.0, "42620": 12698492928.0, "42625": 12698492928.0, "42630": 12698492928.0, "42635": 12698492928.0, "42640": 12698492928.0, "42645": 12698492928.0, "42650": 12698492928.0, "42655": 12698492928.0, "42660": 12698492928.0, "42665": 12698492928.0, "42670": 12698492928.0, "42675": 12698492928.0, "42680": 12698492928.0, "42685": 12698492928.0, "42690": 12698492928.0, "42695": 12698492928.0, "42700": 12698492928.0, "42705": 12698492928.0, "42710": 12698492928.0, "42715": 12698492928.0, "42720": 12698492928.0, "42725": 12698492928.0, "42730": 12698492928.0, "42735": 12698492928.0, "42740": 12698492928.0, "42745": 12698492928.0, "42750": 12698492928.0, "42755": 12698492928.0, "42760": 12698492928.0, "42765": 12698492928.0, "42770": 12698492928.0, "42775": 12698492928.0, "42780": 12698492928.0, "42785": 12698492928.0, "42790": 12698492928.0, "42795": 12698492928.0, "42800": 12698492928.0, "42805": 12698492928.0, "42810": 12698492928.0, "42815": 12698492928.0, "42820": 12698492928.0, "42825": 12698492928.0, "42830": 12698492928.0, "42835": 12698492928.0, "42840": 12698492928.0, "42845": 12698492928.0, "42850": 12698492928.0, "42855": 12698492928.0, "42860": 12698492928.0, "42865": 12698492928.0, "42870": 12698492928.0, "42875": 12698492928.0, "42880": 12698492928.0, "42885": 12698492928.0, "42890": 12698492928.0, "42895": 12698492928.0, "42900": 12698492928.0, "42905": 12698492928.0, "42910": 12698492928.0, "42915": 12698492928.0, "42920": 12698492928.0, "42925": 12698492928.0, "42930": 12698492928.0, "42935": 12698492928.0, "42940": 12698492928.0, "42945": 12698492928.0, "42950": 12698492928.0, "42955": 12698492928.0, "42960": 12698492928.0, "42965": 12698492928.0, "42970": 12698492928.0, "42975": 12698492928.0, "42980": 12698492928.0, "42985": 12698492928.0, "42990": 12698492928.0, "42995": 12698492928.0, "43000": 12698492928.0, "43005": 12698492928.0, "43010": 12698492928.0, "43015": 12698492928.0, "43020": 12698492928.0, "43025": 12698492928.0, "43030": 12698492928.0, "43035": 12698492928.0, "43040": 12698492928.0, "43045": 12698492928.0, "43050": 12698492928.0, "43055": 12698492928.0, "43060": 12698492928.0, "43065": 12698492928.0, "43070": 12698492928.0, "43075": 12698492928.0, "43080": 12698492928.0, "43085": 12698492928.0, "43090": 12698492928.0, "43095": 12698492928.0, "43100": 12698492928.0, "43105": 12698492928.0, "43110": 12698492928.0, "43115": 12698492928.0, "43120": 12698492928.0, "43125": 12698492928.0, "43130": 12698492928.0, "43135": 12698492928.0, "43140": 12698492928.0, "43145": 12698492928.0, "43150": 12698492928.0, "43155": 12698492928.0, "43160": 12698492928.0, "43165": 12698492928.0, "43170": 12698492928.0, "43175": 12698492928.0, "43180": 12698492928.0, "43185": 12698492928.0, "43190": 12698492928.0, "43195": 12698492928.0, "43200": 12698492928.0, "43205": 12698492928.0, "43210": 12698492928.0, "43215": 12698492928.0, "43220": 12698492928.0, "43225": 12698492928.0, "43230": 12698492928.0, "43235": 12698492928.0, "43240": 12698492928.0, "43245": 12698492928.0, "43250": 12698492928.0, "43255": 12698492928.0, "43260": 12698492928.0, "43265": 12698492928.0, "43270": 12698492928.0, "43275": 12698492928.0, "43280": 12698492928.0, "43285": 12698492928.0, "43290": 12698492928.0, "43295": 12698492928.0, "43300": 12698492928.0, "43305": 12698492928.0, "43310": 12698492928.0, "43315": 12698492928.0, "43320": 12698492928.0, "43325": 12698492928.0, "43330": 12698492928.0, "43335": 12698492928.0, "43340": 12698492928.0, "43345": 12698492928.0, "43350": 12698492928.0, "43355": 12698492928.0, "43360": 12698492928.0, "43365": 12698492928.0, "43370": 12698492928.0, "43375": 12698492928.0, "43380": 12698492928.0, "43385": 12698492928.0, "43390": 12698492928.0, "43395": 12698492928.0, "43400": 12698492928.0, "43405": 12698492928.0, "43410": 12698492928.0, "43415": 12698492928.0, "43420": 12698492928.0, "43425": 12698492928.0, "43430": 12698492928.0, "43435": 12698492928.0, "43440": 12698492928.0, "43445": 12698492928.0, "43450": 12698492928.0, "43455": 12698492928.0, "43460": 12698492928.0, "43465": 12698492928.0, "43470": 12698492928.0, "43475": 12698492928.0, "43480": 12698492928.0, "43485": 12698492928.0, "43490": 12698492928.0, "43495": 12698492928.0, "43500": 12698492928.0, "43505": 12698492928.0, "43510": 12698492928.0, "43515": 12698492928.0, "43520": 12698492928.0, "43525": 12698492928.0, "43530": 12698492928.0, "43535": 12698492928.0, "43540": 12698492928.0, "43545": 12698492928.0, "43550": 12698492928.0, "43555": 12698492928.0, "43560": 12698492928.0, "43565": 12698492928.0, "43570": 12698492928.0, "43575": 12698492928.0, "43580": 12698492928.0, "43585": 12698492928.0, "43590": 12698492928.0, "43595": 12698492928.0, "43600": 12698492928.0, "43605": 12698492928.0, "43610": 12698492928.0, "43615": 12698492928.0, "43620": 12698492928.0, "43625": 12698492928.0, "43630": 12698492928.0, "43635": 12698492928.0, "43640": 12698492928.0, "43645": 12698492928.0, "43650": 12698492928.0, "43655": 12698492928.0, "43660": 12698492928.0, "43665": 12698492928.0, "43670": 12698492928.0, "43675": 12698492928.0, "43680": 12698492928.0, "43685": 12698492928.0, "43690": 12698492928.0, "43695": 12698492928.0, "43700": 12698492928.0, "43705": 12698492928.0, "43710": 12698492928.0, "43715": 12698492928.0, "43720": 12698492928.0, "43725": 12698492928.0, "43730": 12698492928.0, "43735": 12698492928.0, "43740": 12698492928.0, "43745": 12698492928.0, "43750": 12698492928.0, "43755": 12698492928.0, "43760": 12698492928.0, "43765": 12698492928.0, "43770": 12698492928.0, "43775": 12698492928.0, "43780": 12698492928.0, "43785": 12698492928.0, "43790": 12698492928.0, "43795": 12698492928.0, "43800": 12698492928.0, "43805": 12698492928.0, "43810": 12698492928.0, "43815": 12698492928.0, "43820": 12698492928.0, "43825": 12698492928.0, "43830": 12698492928.0, "43835": 12698492928.0, "43840": 12698492928.0, "43845": 12698492928.0, "43850": 12698492928.0, "43855": 12698492928.0, "43860": 12698492928.0, "43865": 12698492928.0, "43870": 12698492928.0, "43875": 12698492928.0, "43880": 12698492928.0, "43885": 12698492928.0, "43890": 12698492928.0, "43895": 12698492928.0, "43900": 12698492928.0, "43905": 12698492928.0, "43910": 12698492928.0, "43915": 12698492928.0, "43920": 12698492928.0, "43925": 12698492928.0, "43930": 12698492928.0, "43935": 12698492928.0, "43940": 12698492928.0, "43945": 12698492928.0, "43950": 12698492928.0, "43955": 12698492928.0, "43960": 12698492928.0, "43965": 12698492928.0, "43970": 12698492928.0, "43975": 12698492928.0, "43980": 12698492928.0, "43985": 12698492928.0, "43990": 12698492928.0, "43995": 12698492928.0, "44000": 12698492928.0, "44005": 12698492928.0, "44010": 12698492928.0, "44015": 12698492928.0, "44020": 12698492928.0, "44025": 12698492928.0, "44030": 12698492928.0, "44035": 12698492928.0, "44040": 12698492928.0, "44045": 12698492928.0, "44050": 12698492928.0, "44055": 12698492928.0, "44060": 12698492928.0, "44065": 12698492928.0, "44070": 12698492928.0, "44075": 12698492928.0, "44080": 12698492928.0, "44085": 12698492928.0, "44090": 12698492928.0, "44095": 12698492928.0, "44100": 12698492928.0, "44105": 12698492928.0, "44110": 12698492928.0, "44115": 12698492928.0, "44120": 12698492928.0, "44125": 12698492928.0, "44130": 12698492928.0, "44135": 12698492928.0, "44140": 12698492928.0, "44145": 12698492928.0, "44150": 12698492928.0, "44155": 12698492928.0, "44160": 12698492928.0, "44165": 12698492928.0, "44170": 12698492928.0, "44175": 12698492928.0, "44180": 12698492928.0, "44185": 12698492928.0, "44190": 12698492928.0, "44195": 12698492928.0, "44200": 12698492928.0, "44205": 12698492928.0, "44210": 12698492928.0, "44215": 12698492928.0, "44220": 12698492928.0, "44225": 12698492928.0, "44230": 12698492928.0, "44235": 12698492928.0, "44240": 12698492928.0, "44245": 12698492928.0, "44250": 12698492928.0, "44255": 12698492928.0, "44260": 12698492928.0, "44265": 12698492928.0, "44270": 12698492928.0, "44275": 12698492928.0, "44280": 12698492928.0, "44285": 12698492928.0, "44290": 12698492928.0, "44295": 12698492928.0, "44300": 12698492928.0, "44305": 12698492928.0, "44310": 12698492928.0, "44315": 12698492928.0, "44320": 12698492928.0, "44325": 12698492928.0, "44330": 12698492928.0, "44335": 12698492928.0, "44340": 12698492928.0, "44345": 12698492928.0, "44350": 12698492928.0, "44355": 12698492928.0, "44360": 12698492928.0, "44365": 12698492928.0, "44370": 12698492928.0, "44375": 12698492928.0, "44380": 12698492928.0, "44385": 12698492928.0, "44390": 12698492928.0, "44395": 12698492928.0, "44400": 12698492928.0, "44405": 12698492928.0, "44410": 12698492928.0, "44415": 12698492928.0, "44420": 12698492928.0, "44425": 12698492928.0, "44430": 12698492928.0, "44435": 12698492928.0, "44440": 12698492928.0, "44445": 12698492928.0, "44450": 12698492928.0, "44455": 12698492928.0, "44460": 12698492928.0, "44465": 12698492928.0, "44470": 12698492928.0, "44475": 12698492928.0, "44480": 12698492928.0, "44485": 12698492928.0, "44490": 12698492928.0, "44495": 12698492928.0, "44500": 12698492928.0, "44505": 12698492928.0, "44510": 12698492928.0, "44515": 12698492928.0, "44520": 12698492928.0, "44525": 12698492928.0, "44530": 12698492928.0, "44535": 12698492928.0, "44540": 12698492928.0, "44545": 12698492928.0, "44550": 12698492928.0, "44555": 12698492928.0, "44560": 12698492928.0, "44565": 12698492928.0, "44570": 12698492928.0, "44575": 12698492928.0, "44580": 12698492928.0, "44585": 12698492928.0, "44590": 12698492928.0, "44595": 12698492928.0, "44600": 12698492928.0, "44605": 12698492928.0, "44610": 12698492928.0, "44615": 12698492928.0, "44620": 12698492928.0, "44625": 12698492928.0, "44630": 12698492928.0, "44635": 12698492928.0, "44640": 12698492928.0, "44645": 12698492928.0, "44650": 12698492928.0, "44655": 12698492928.0, "44660": 12698492928.0, "44665": 12698492928.0, "44670": 12698492928.0, "44675": 12698492928.0, "44680": 12698492928.0, "44685": 12698492928.0, "44690": 12698492928.0, "44695": 12698492928.0, "44700": 12698492928.0, "44705": 12698492928.0, "44710": 12698492928.0, "44715": 12698492928.0, "44720": 12698492928.0, "44725": 12698492928.0, "44730": 12698492928.0, "44735": 12698492928.0, "44740": 12698492928.0, "44745": 12698492928.0, "44750": 12698492928.0, "44755": 12698492928.0, "44760": 12698492928.0, "44765": 12698492928.0, "44770": 12698492928.0, "44775": 12698492928.0, "44780": 12698492928.0, "44785": 12698492928.0, "44790": 12698492928.0, "44795": 12698492928.0, "44800": 12698492928.0, "44805": 12698492928.0, "44810": 12698492928.0, "44815": 12698492928.0, "44820": 12698492928.0, "44825": 12698492928.0, "44830": 12698492928.0, "44835": 12698492928.0, "44840": 12698492928.0, "44845": 12698492928.0, "44850": 12698492928.0, "44855": 12698492928.0, "44860": 12698492928.0, "44865": 12698492928.0, "44870": 12698492928.0, "44875": 12698492928.0, "44880": 12698492928.0, "44885": 12698492928.0, "44890": 12698492928.0, "44895": 12698492928.0, "44900": 12698492928.0, "44905": 12698492928.0, "44910": 12698492928.0, "44915": 12698492928.0, "44920": 12698492928.0, "44925": 12698492928.0, "44930": 12698492928.0, "44935": 12698492928.0, "44940": 12698492928.0, "44945": 12698492928.0, "44950": 12698492928.0, "44955": 12698492928.0, "44960": 12698492928.0, "44965": 12698492928.0, "44970": 12698492928.0, "44975": 12698492928.0, "44980": 12698492928.0, "44985": 12698492928.0, "44990": 12698492928.0, "44995": 12698492928.0, "45000": 12698492928.0, "45005": 12698492928.0, "45010": 12698492928.0, "45015": 12698492928.0, "45020": 12698492928.0, "45025": 12698492928.0, "45030": 12698492928.0, "45035": 12698492928.0, "45040": 12698492928.0, "45045": 12698492928.0, "45050": 12698492928.0, "45055": 12698492928.0, "45060": 12698492928.0, "45065": 12698492928.0, "45070": 12698492928.0, "45075": 12698492928.0, "45080": 12698492928.0, "45085": 12698492928.0, "45090": 12698492928.0, "45095": 12698492928.0, "45100": 12698492928.0, "45105": 12698492928.0, "45110": 12698492928.0, "45115": 12698492928.0, "45120": 12698492928.0, "45125": 12698492928.0, "45130": 12698492928.0, "45135": 12698492928.0, "45140": 12698492928.0, "45145": 12698492928.0, "45150": 12698492928.0, "45155": 12698492928.0, "45160": 12698492928.0, "45165": 12698492928.0, "45170": 12698492928.0, "45175": 12698492928.0, "45180": 12698492928.0, "45185": 12698492928.0, "45190": 12698492928.0, "45195": 12698492928.0, "45200": 12698492928.0, "45205": 12698492928.0, "45210": 12698492928.0, "45215": 12698492928.0, "45220": 12698492928.0, "45225": 12698492928.0, "45230": 12698492928.0, "45235": 12698492928.0, "45240": 12698492928.0, "45245": 12698492928.0, "45250": 12698492928.0, "45255": 12698492928.0, "45260": 12698492928.0, "45265": 12698492928.0, "45270": 12698492928.0, "45275": 12698492928.0, "45280": 12698492928.0, "45285": 12698492928.0, "45290": 12698492928.0, "45295": 12698492928.0, "45300": 12698492928.0, "45305": 12698492928.0, "45310": 12698492928.0, "45315": 12698492928.0, "45320": 12698492928.0, "45325": 12698492928.0, "45330": 12698492928.0, "45335": 12698492928.0, "45340": 12698492928.0, "45345": 12698492928.0, "45350": 12698492928.0, "45355": 12698492928.0, "45360": 12698492928.0, "45365": 12698492928.0, "45370": 12698492928.0, "45375": 12698492928.0, "45380": 12698492928.0, "45385": 12698492928.0, "45390": 12698492928.0, "45395": 12698492928.0, "45400": 12698492928.0, "45405": 12698492928.0, "45410": 12698492928.0, "45415": 12698492928.0, "45420": 12698492928.0, "45425": 12698492928.0, "45430": 12698492928.0, "45435": 12698492928.0, "45440": 12698492928.0, "45445": 12698492928.0, "45450": 12698492928.0, "45455": 12698492928.0, "45460": 12698492928.0, "45465": 12698492928.0, "45470": 12698492928.0, "45475": 12698492928.0, "45480": 12698492928.0, "45485": 12698492928.0, "45490": 12698492928.0, "45495": 12698492928.0, "45500": 12698492928.0, "45505": 12698492928.0, "45510": 12698492928.0, "45515": 12698492928.0, "45520": 12698492928.0, "45525": 12698492928.0, "45530": 12698492928.0, "45535": 12698492928.0, "45540": 12698492928.0, "45545": 12698492928.0, "45550": 12698492928.0, "45555": 12698492928.0, "45560": 12698492928.0, "45565": 12698492928.0, "45570": 12698492928.0, "45575": 12698492928.0, "45580": 12698492928.0, "45585": 12698492928.0, "45590": 12698492928.0, "45595": 12698492928.0, "45600": 12698492928.0, "45605": 12698492928.0, "45610": 12698492928.0, "45615": 12698492928.0, "45620": 12698492928.0, "45625": 12698492928.0, "45630": 12698492928.0, "45635": 12698492928.0, "45640": 12698492928.0, "45645": 12698492928.0, "45650": 12698492928.0, "45655": 12698492928.0, "45660": 12698492928.0, "45665": 12698492928.0, "45670": 12698492928.0, "45675": 12698492928.0, "45680": 12698492928.0, "45685": 12698492928.0, "45690": 12698492928.0, "45695": 12698492928.0, "45700": 12698492928.0, "45705": 12698492928.0, "45710": 12698492928.0, "45715": 12698492928.0, "45720": 12698492928.0, "45725": 12698492928.0, "45730": 12698492928.0, "45735": 12698492928.0, "45740": 12698492928.0, "45745": 12698492928.0, "45750": 12698492928.0, "45755": 12698492928.0, "45760": 12698492928.0, "45765": 12698492928.0, "45770": 12698492928.0, "45775": 12698492928.0, "45780": 12698492928.0, "45785": 12698492928.0, "45790": 12698492928.0, "45795": 12698492928.0, "45800": 12698492928.0, "45805": 12698492928.0, "45810": 12698492928.0, "45815": 12698492928.0, "45820": 12698492928.0, "45825": 12698492928.0, "45830": 12698492928.0, "45835": 12698492928.0, "45840": 12698492928.0, "45845": 12698492928.0, "45850": 12698492928.0, "45855": 12698492928.0, "45860": 12698492928.0, "45865": 12698492928.0, "45870": 12698492928.0, "45875": 12698492928.0, "45880": 12698492928.0, "45885": 12698492928.0, "45890": 12698492928.0, "45895": 12698492928.0, "45900": 12698492928.0, "45905": 12698492928.0, "45910": 12698492928.0, "45915": 12698492928.0, "45920": 12698492928.0, "45925": 12698492928.0, "45930": 12698492928.0, "45935": 12698492928.0, "45940": 12698492928.0, "45945": 12698492928.0, "45950": 12698492928.0, "45955": 12698492928.0, "45960": 12698492928.0, "45965": 12698492928.0, "45970": 12698492928.0, "45975": 12698492928.0, "45980": 12698492928.0, "45985": 12698492928.0, "45990": 12698492928.0, "45995": 12698492928.0, "46000": 12698492928.0, "46005": 12698492928.0, "46010": 12698492928.0, "46015": 12698492928.0, "46020": 12698492928.0, "46025": 12698492928.0, "46030": 12698492928.0, "46035": 12698492928.0, "46040": 12698492928.0, "46045": 12698492928.0, "46050": 12698492928.0, "46055": 12698492928.0, "46060": 12698492928.0, "46065": 12698492928.0, "46070": 12698492928.0, "46075": 12698492928.0, "46080": 12698492928.0, "46085": 12698492928.0, "46090": 12698492928.0, "46095": 12698492928.0, "46100": 12698492928.0, "46105": 12698492928.0, "46110": 12698492928.0, "46115": 12698492928.0, "46120": 12698492928.0, "46125": 12698492928.0, "46130": 12698492928.0, "46135": 12698492928.0, "46140": 12698492928.0, "46145": 12698492928.0, "46150": 12698492928.0, "46155": 12698492928.0, "46160": 12698492928.0, "46165": 12698492928.0, "46170": 12698492928.0, "46175": 12698492928.0, "46180": 12698492928.0, "46185": 12698492928.0, "46190": 12698492928.0, "46195": 12698492928.0, "46200": 12698492928.0, "46205": 12698492928.0, "46210": 12698492928.0, "46215": 12698492928.0, "46220": 12698492928.0, "46225": 12698492928.0, "46230": 12698492928.0, "46235": 12698492928.0, "46240": 12698492928.0, "46245": 12698492928.0, "46250": 12698492928.0, "46255": 12698492928.0, "46260": 12698492928.0, "46265": 12698492928.0, "46270": 12698492928.0, "46275": 12698492928.0, "46280": 12698492928.0, "46285": 12698492928.0, "46290": 12698492928.0, "46295": 12698492928.0, "46300": 12698492928.0, "46305": 12698492928.0, "46310": 12698492928.0, "46315": 12698492928.0, "46320": 12698492928.0, "46325": 12698492928.0, "46330": 12698492928.0, "46335": 12698492928.0, "46340": 12698492928.0, "46345": 12698492928.0, "46350": 12698492928.0, "46355": 12698492928.0, "46360": 12698492928.0, "46365": 12698492928.0, "46370": 12698492928.0, "46375": 12698492928.0, "46380": 12698492928.0, "46385": 12698492928.0, "46390": 12698492928.0, "46395": 12698492928.0, "46400": 12698492928.0, "46405": 12698492928.0, "46410": 12698492928.0, "46415": 12698492928.0, "46420": 12698492928.0, "46425": 12698492928.0, "46430": 12698492928.0, "46435": 12698492928.0, "46440": 12698492928.0, "46445": 12698492928.0, "46450": 12698492928.0, "46455": 12698492928.0, "46460": 12698492928.0, "46465": 12698492928.0, "46470": 12698492928.0, "46475": 12698492928.0, "46480": 12698492928.0, "46485": 12698492928.0, "46490": 12698492928.0, "46495": 12698492928.0, "46500": 12698492928.0, "46505": 12698492928.0, "46510": 12698492928.0, "46515": 12698492928.0, "46520": 12698492928.0, "46525": 12698492928.0, "46530": 12698492928.0, "46535": 12698492928.0, "46540": 12698492928.0, "46545": 12698492928.0, "46550": 12698492928.0, "46555": 12698492928.0, "46560": 12698492928.0, "46565": 12698492928.0, "46570": 12698492928.0, "46575": 12698492928.0, "46580": 12698492928.0, "46585": 12698492928.0, "46590": 12698492928.0, "46595": 12698492928.0, "46600": 12698492928.0, "46605": 12698492928.0, "46610": 12698492928.0, "46615": 12698492928.0, "46620": 12698492928.0, "46625": 12698492928.0, "46630": 12698492928.0, "46635": 12698492928.0, "46640": 12698492928.0, "46645": 12698492928.0, "46650": 12698492928.0, "46655": 12698492928.0, "46660": 12698492928.0, "46665": 12698492928.0, "46670": 12698492928.0, "46675": 12698492928.0, "46680": 12698492928.0, "46685": 12698492928.0, "46690": 12698492928.0, "46695": 12698492928.0, "46700": 12698492928.0, "46705": 12698492928.0, "46710": 12698492928.0, "46715": 12698492928.0, "46720": 12698492928.0, "46725": 12698492928.0, "46730": 12698492928.0, "46735": 12698492928.0, "46740": 12698492928.0, "46745": 12698492928.0, "46750": 12698492928.0, "46755": 12698492928.0, "46760": 12698492928.0, "46765": 12698492928.0, "46770": 12698492928.0, "46775": 12698492928.0, "46780": 12698492928.0, "46785": 12698492928.0, "46790": 12698492928.0, "46795": 12698492928.0, "46800": 12698492928.0, "46805": 12698492928.0, "46810": 12698492928.0, "46815": 12698492928.0, "46820": 12698492928.0, "46825": 12698492928.0, "46830": 12698492928.0, "46835": 12698492928.0, "46840": 12698492928.0, "46845": 12698492928.0, "46850": 12698492928.0, "46855": 12698492928.0, "46860": 12698492928.0, "46865": 12698492928.0, "46870": 12698492928.0, "46875": 12698492928.0, "46880": 12698492928.0, "46885": 12698492928.0, "46890": 12698492928.0, "46895": 12698492928.0, "46900": 12698492928.0, "46905": 12698492928.0, "46910": 12698492928.0, "46915": 12698492928.0, "46920": 12698492928.0, "46925": 12698492928.0, "46930": 12698492928.0, "46935": 12698492928.0, "46940": 12698492928.0, "46945": 12698492928.0, "46950": 12698492928.0, "46955": 12698492928.0, "46960": 12698492928.0, "46965": 12698492928.0, "46970": 12698492928.0, "46975": 12698492928.0, "46980": 12698492928.0, "46985": 12698492928.0, "46990": 12698492928.0, "46995": 12698492928.0, "47000": 12698492928.0, "47005": 12698492928.0, "47010": 12698492928.0, "47015": 12698492928.0, "47020": 12698492928.0, "47025": 12698492928.0, "47030": 12698492928.0, "47035": 12698492928.0, "47040": 12698492928.0, "47045": 12698492928.0, "47050": 12698492928.0, "47055": 12698492928.0, "47060": 12698492928.0, "47065": 12698492928.0, "47070": 12698492928.0, "47075": 12698492928.0, "47080": 12698492928.0, "47085": 12698492928.0, "47090": 12698492928.0, "47095": 12698492928.0, "47100": 12698492928.0, "47105": 12698492928.0, "47110": 12698492928.0, "47115": 12698492928.0, "47120": 12698492928.0, "47125": 12698492928.0, "47130": 12698492928.0, "47135": 12698492928.0, "47140": 12698492928.0, "47145": 12698492928.0, "47150": 12698492928.0, "47155": 12698492928.0, "47160": 12698492928.0, "47165": 12698492928.0, "47170": 12698492928.0, "47175": 12698492928.0, "47180": 12698492928.0, "47185": 12698492928.0, "47190": 12698492928.0, "47195": 12698492928.0, "47200": 12698492928.0, "47205": 12698492928.0, "47210": 12698492928.0, "47215": 12698492928.0, "47220": 12698492928.0, "47225": 12698492928.0, "47230": 12698492928.0, "47235": 12698492928.0, "47240": 12698492928.0, "47245": 12698492928.0, "47250": 12698492928.0, "47255": 12698492928.0, "47260": 12698492928.0, "47265": 12698492928.0, "47270": 12698492928.0, "47275": 12698492928.0, "47280": 12698492928.0, "47285": 12698492928.0, "47290": 12698492928.0, "47295": 12698492928.0, "47300": 12698492928.0, "47305": 12698492928.0, "47310": 12698492928.0, "47315": 12698492928.0, "47320": 12698492928.0, "47325": 12698492928.0, "47330": 12698492928.0, "47335": 12698492928.0, "47340": 12698492928.0, "47345": 12698492928.0, "47350": 12698492928.0, "47355": 12698492928.0, "47360": 12698492928.0, "47365": 12698492928.0, "47370": 12698492928.0, "47375": 12698492928.0, "47380": 12698492928.0, "47385": 12698492928.0, "47390": 12698492928.0, "47395": 12698492928.0, "47400": 12698492928.0, "47405": 12698492928.0, "47410": 12698492928.0, "47415": 12698492928.0, "47420": 12698492928.0, "47425": 12698492928.0, "47430": 12698492928.0, "47435": 12698492928.0, "47440": 12698492928.0, "47445": 12698492928.0, "47450": 12698492928.0, "47455": 12698492928.0, "47460": 12698492928.0, "47465": 12698492928.0, "47470": 12698492928.0, "47475": 12698492928.0, "47480": 12698492928.0, "47485": 12698492928.0, "47490": 12698492928.0, "47495": 12698492928.0, "47500": 12698492928.0, "47505": 12698492928.0, "47510": 12698492928.0, "47515": 12698492928.0, "47520": 12698492928.0, "47525": 12698492928.0, "47530": 12698492928.0, "47535": 12698492928.0, "47540": 12698492928.0, "47545": 12698492928.0, "47550": 12698492928.0, "47555": 12698492928.0, "47560": 12698492928.0, "47565": 12698492928.0, "47570": 12698492928.0, "47575": 12698492928.0, "47580": 12698492928.0, "47585": 12698492928.0, "47590": 12698492928.0, "47595": 12698492928.0, "47600": 12698492928.0, "47605": 12698492928.0, "47610": 12698492928.0, "47615": 12698492928.0, "47620": 12698492928.0, "47625": 12698492928.0, "47630": 12698492928.0, "47635": 12698492928.0, "47640": 12698492928.0, "47645": 12698492928.0, "47650": 12698492928.0, "47655": 12698492928.0, "47660": 12698492928.0, "47665": 12698492928.0, "47670": 12698492928.0, "47675": 12698492928.0, "47680": 12698492928.0, "47685": 12698492928.0, "47690": 12698492928.0, "47695": 12698492928.0, "47700": 12698492928.0, "47705": 12698492928.0, "47710": 12698492928.0, "47715": 12698492928.0, "47720": 12698492928.0, "47725": 12698492928.0, "47730": 12698492928.0, "47735": 12698492928.0, "47740": 12698492928.0, "47745": 12698492928.0, "47750": 12698492928.0, "47755": 12698492928.0, "47760": 12698492928.0, "47765": 12698492928.0, "47770": 12698492928.0, "47775": 12698492928.0, "47780": 12698492928.0, "47785": 12698492928.0, "47790": 12698492928.0, "47795": 12698492928.0, "47800": 12698492928.0, "47805": 12698492928.0, "47810": 12698492928.0, "47815": 12698492928.0, "47820": 12698492928.0, "47825": 12698492928.0, "47830": 12698492928.0, "47835": 12698492928.0, "47840": 12698492928.0, "47845": 12698492928.0, "47850": 12698492928.0, "47855": 12698492928.0, "47860": 12698492928.0, "47865": 12698492928.0, "47870": 12698492928.0, "47875": 12698492928.0, "47880": 12698492928.0, "47885": 12698492928.0, "47890": 12698492928.0, "47895": 12698492928.0, "47900": 12698492928.0, "47905": 12698492928.0, "47910": 12698492928.0, "47915": 12698492928.0, "47920": 12698492928.0, "47925": 12698492928.0, "47930": 12698492928.0, "47935": 12698492928.0, "47940": 12698492928.0, "47945": 12698492928.0, "47950": 12698492928.0, "47955": 12698492928.0, "47960": 12698492928.0, "47965": 12698492928.0, "47970": 12698492928.0, "47975": 12698492928.0, "47980": 12698492928.0, "47985": 12698492928.0, "47990": 12698492928.0, "47995": 12698492928.0, "48000": 12698492928.0, "48005": 12698492928.0, "48010": 12698492928.0, "48015": 12698492928.0, "48020": 12698492928.0, "48025": 12698492928.0, "48030": 12698492928.0, "48035": 12698492928.0, "48040": 12698492928.0, "48045": 12698492928.0, "48050": 12698492928.0, "48055": 12698492928.0, "48060": 12698492928.0, "48065": 12698492928.0, "48070": 12698492928.0, "48075": 12698492928.0, "48080": 12698492928.0, "48085": 12698492928.0, "48090": 12698492928.0, "48095": 12698492928.0, "48100": 12698492928.0, "48105": 12698492928.0, "48110": 12698492928.0, "48115": 12698492928.0, "48120": 12698492928.0, "48125": 12698492928.0, "48130": 12698492928.0, "48135": 12698492928.0, "48140": 12698492928.0, "48145": 12698492928.0, "48150": 12698492928.0, "48155": 12698492928.0, "48160": 12698492928.0, "48165": 12698492928.0, "48170": 12698492928.0, "48175": 12698492928.0, "48180": 12698492928.0, "48185": 12698492928.0, "48190": 12698492928.0, "48195": 12698492928.0, "48200": 12698492928.0, "48205": 12698492928.0, "48210": 12698492928.0, "48215": 12698492928.0, "48220": 12698492928.0, "48225": 12698492928.0, "48230": 12698492928.0, "48235": 12698492928.0, "48240": 12698492928.0, "48245": 12698492928.0, "48250": 12698492928.0, "48255": 12698492928.0, "48260": 12698492928.0, "48265": 12698492928.0, "48270": 12698492928.0, "48275": 12698492928.0, "48280": 12698492928.0, "48285": 12698492928.0, "48290": 12698492928.0, "48295": 12698492928.0, "48300": 12698492928.0, "48305": 12698492928.0, "48310": 12698492928.0, "48315": 12698492928.0, "48320": 12698492928.0, "48325": 12698492928.0, "48330": 12698492928.0, "48335": 12698492928.0, "48340": 12698492928.0, "48345": 12698492928.0, "48350": 12698492928.0, "48355": 12698492928.0, "48360": 12698492928.0, "48365": 12698492928.0, "48370": 12698492928.0, "48375": 12698492928.0, "48380": 12698492928.0, "48385": 12698492928.0, "48390": 12698492928.0, "48395": 12698492928.0, "48400": 12698492928.0, "48405": 12698492928.0, "48410": 12698492928.0, "48415": 12698492928.0, "48420": 12698492928.0, "48425": 12698492928.0, "48430": 12698492928.0, "48435": 12698492928.0, "48440": 12698492928.0, "48445": 12698492928.0, "48450": 12698492928.0, "48455": 12698492928.0, "48460": 12698492928.0, "48465": 12698492928.0, "48470": 12698492928.0, "48475": 12698492928.0, "48480": 12698492928.0, "48485": 12698492928.0, "48490": 12698492928.0, "48495": 12698492928.0, "48500": 12698492928.0, "48505": 12698492928.0, "48510": 12698492928.0, "48515": 12698492928.0, "48520": 12698492928.0, "48525": 12698492928.0, "48530": 12698492928.0, "48535": 12698492928.0, "48540": 12698492928.0, "48545": 12698492928.0, "48550": 12698492928.0, "48555": 12698492928.0, "48560": 12698492928.0, "48565": 12698492928.0, "48570": 12698492928.0, "48575": 12698492928.0, "48580": 12698492928.0, "48585": 12698492928.0, "48590": 12698492928.0, "48595": 12698492928.0, "48600": 12698492928.0, "48605": 12698492928.0, "48610": 12698492928.0, "48615": 12698492928.0, "48620": 12698492928.0, "48625": 12698492928.0, "48630": 12698492928.0, "48635": 12698492928.0, "48640": 12698492928.0, "48645": 12698492928.0, "48650": 12698492928.0, "48655": 12698492928.0, "48660": 12698492928.0, "48665": 12698492928.0, "48670": 12698492928.0, "48675": 12698492928.0, "48680": 12698492928.0, "48685": 12698492928.0, "48690": 12698492928.0, "48695": 12698492928.0, "48700": 12698492928.0, "48705": 12698492928.0, "48710": 12698492928.0, "48715": 12698492928.0, "48720": 12698492928.0, "48725": 12698492928.0, "48730": 12698492928.0, "48735": 12698492928.0, "48740": 12698492928.0, "48745": 12698492928.0, "48750": 12698492928.0, "48755": 12698492928.0, "48760": 12698492928.0, "48765": 12698492928.0, "48770": 12698492928.0, "48775": 12698492928.0, "48780": 12698492928.0, "48785": 12698492928.0, "48790": 12698492928.0, "48795": 12698492928.0, "48800": 12698492928.0, "48805": 12698492928.0, "48810": 12698492928.0, "48815": 12698492928.0, "48820": 12698492928.0, "48825": 12698492928.0, "48830": 12698492928.0, "48835": 12698492928.0, "48840": 12698492928.0, "48845": 12698492928.0, "48850": 12698492928.0, "48855": 12698492928.0, "48860": 12698492928.0, "48865": 12698492928.0, "48870": 12698492928.0, "48875": 12698492928.0, "48880": 12698492928.0, "48885": 12698492928.0, "48890": 12698492928.0, "48895": 12698492928.0, "48900": 12698492928.0, "48905": 12698492928.0, "48910": 12698492928.0, "48915": 12698492928.0, "48920": 12698492928.0, "48925": 12698492928.0, "48930": 12698492928.0, "48935": 12698492928.0, "48940": 12698492928.0, "48945": 12698492928.0, "48950": 12698492928.0, "48955": 12698492928.0, "48960": 12698492928.0, "48965": 12698492928.0, "48970": 12698492928.0, "48975": 12698492928.0, "48980": 12698492928.0, "48985": 12698492928.0, "48990": 12698492928.0, "48995": 12698492928.0, "49000": 12698492928.0, "49005": 12698492928.0, "49010": 12698492928.0, "49015": 12698492928.0, "49020": 12698492928.0, "49025": 12698492928.0, "49030": 12698492928.0, "49035": 12698492928.0, "49040": 12698492928.0, "49045": 12698492928.0, "49050": 12698492928.0, "49055": 12698492928.0, "49060": 12698492928.0, "49065": 12698492928.0, "49070": 12698492928.0, "49075": 12698492928.0, "49080": 12698492928.0, "49085": 12698492928.0, "49090": 12698492928.0, "49095": 12698492928.0, "49100": 12698492928.0, "49105": 12698492928.0, "49110": 12698492928.0, "49115": 12698492928.0, "49120": 12698492928.0, "49125": 12698492928.0, "49130": 12698492928.0, "49135": 12698492928.0, "49140": 12698492928.0, "49145": 12698492928.0, "49150": 12698492928.0, "49155": 12698492928.0, "49160": 12698492928.0, "49165": 12698492928.0, "49170": 12698492928.0, "49175": 12698492928.0, "49180": 12698492928.0, "49185": 12698492928.0, "49190": 12698492928.0, "49195": 12698492928.0, "49200": 12698492928.0, "49205": 12698492928.0, "49210": 12698492928.0, "49215": 12698492928.0, "49220": 12698492928.0, "49225": 12698492928.0, "49230": 12698492928.0, "49235": 12698492928.0, "49240": 12698492928.0, "49245": 12698492928.0, "49250": 12698492928.0, "49255": 12698492928.0, "49260": 12698492928.0, "49265": 12698492928.0, "49270": 12698492928.0, "49275": 12698492928.0, "49280": 12698492928.0, "49285": 12698492928.0, "49290": 12698492928.0, "49295": 12698492928.0, "49300": 12698492928.0, "49305": 12698492928.0, "49310": 12698492928.0, "49315": 12698492928.0, "49320": 12698492928.0, "49325": 12698492928.0, "49330": 12698492928.0, "49335": 12698492928.0, "49340": 12698492928.0, "49345": 12698492928.0, "49350": 12698492928.0, "49355": 12698492928.0, "49360": 12698492928.0, "49365": 12698492928.0, "49370": 12698492928.0, "49375": 12698492928.0, "49380": 12698492928.0, "49385": 12698492928.0, "49390": 12698492928.0, "49395": 12698492928.0, "49400": 12698492928.0, "49405": 12698492928.0, "49410": 12698492928.0, "49415": 12698492928.0, "49420": 12698492928.0, "49425": 12698492928.0, "49430": 12698492928.0, "49435": 12698492928.0, "49440": 12698492928.0, "49445": 12698492928.0, "49450": 12698492928.0, "49455": 12698492928.0, "49460": 12698492928.0, "49465": 12698492928.0, "49470": 12698492928.0, "49475": 12698492928.0, "49480": 12698492928.0, "49485": 12698492928.0, "49490": 12698492928.0, "49495": 12698492928.0, "49500": 12698492928.0, "49505": 12698492928.0, "49510": 12698492928.0, "49515": 12698492928.0, "49520": 12698492928.0, "49525": 12698492928.0, "49530": 12698492928.0, "49535": 12698492928.0, "49540": 12698492928.0, "49545": 12698492928.0, "49550": 12698492928.0, "49555": 12698492928.0, "49560": 12698492928.0, "49565": 12698492928.0, "49570": 12698492928.0, "49575": 12698492928.0, "49580": 12698492928.0, "49585": 12698492928.0, "49590": 12698492928.0, "49595": 12698492928.0, "49600": 12698492928.0, "49605": 12698492928.0, "49610": 12698492928.0, "49615": 12698492928.0, "49620": 12698492928.0, "49625": 12698492928.0, "49630": 12698492928.0, "49635": 12698492928.0, "49640": 12698492928.0, "49645": 12698492928.0, "49650": 12698492928.0, "49655": 12698492928.0, "49660": 12698492928.0, "49665": 12698492928.0, "49670": 12698492928.0, "49675": 12698492928.0, "49680": 12698492928.0, "49685": 12698492928.0, "49690": 12698492928.0, "49695": 12698492928.0, "49700": 12698492928.0, "49705": 12698492928.0, "49710": 12698492928.0, "49715": 12698492928.0, "49720": 12698492928.0, "49725": 12698492928.0, "49730": 12698492928.0, "49735": 12698492928.0, "49740": 12698492928.0, "49745": 12698492928.0, "49750": 12698492928.0, "49755": 12698492928.0, "49760": 12698492928.0, "49765": 12698492928.0, "49770": 12698492928.0, "49775": 12698492928.0, "49780": 12698492928.0, "49785": 12698492928.0, "49790": 12698492928.0, "49795": 12698492928.0, "49800": 12698492928.0, "49805": 12698492928.0, "49810": 12698492928.0, "49815": 12698492928.0, "49820": 12698492928.0, "49825": 12698492928.0, "49830": 12698492928.0, "49835": 12698492928.0, "49840": 12698492928.0, "49845": 12698492928.0, "49850": 12698492928.0, "49855": 12698492928.0, "49860": 12698492928.0, "49865": 12698492928.0, "49870": 12698492928.0, "49875": 12698492928.0, "49880": 12698492928.0, "49885": 12698492928.0, "49890": 12698492928.0, "49895": 12698492928.0, "49900": 12698492928.0, "49905": 12698492928.0, "49910": 12698492928.0, "49915": 12698492928.0, "49920": 12698492928.0, "49925": 12698492928.0, "49930": 12698492928.0, "49935": 12698492928.0, "49940": 12698492928.0, "49945": 12698492928.0, "49950": 12698492928.0, "49955": 12698492928.0, "49960": 12698492928.0, "49965": 12698492928.0, "49970": 12698492928.0, "49975": 12698492928.0, "49980": 12698492928.0, "49985": 12698492928.0, "49990": 12698492928.0, "49995": 12698492928.0, "50000": 12698492928.0, "50005": 12698492928.0, "50010": 12698492928.0, "50015": 12698492928.0, "50020": 12698492928.0, "50025": 12698492928.0, "50030": 12698492928.0, "50035": 12698492928.0, "50040": 12698492928.0, "50045": 12698492928.0, "50050": 12698492928.0, "50055": 12698492928.0, "50060": 12698492928.0, "50065": 12698492928.0, "50070": 12698492928.0, "50075": 12698492928.0, "50080": 12698492928.0, "50085": 12698492928.0, "50090": 12698492928.0, "50095": 12698492928.0, "50100": 12698492928.0, "50105": 12698492928.0, "50110": 12698492928.0, "50115": 12698492928.0, "50120": 12698492928.0, "50125": 12698492928.0, "50130": 12698492928.0, "50135": 12698492928.0, "50140": 12698492928.0, "50145": 12698492928.0, "50150": 12698492928.0, "50155": 12698492928.0, "50160": 12698492928.0, "50165": 12698492928.0, "50170": 12698492928.0, "50175": 12698492928.0, "50180": 12698492928.0, "50185": 12698492928.0, "50190": 12698492928.0, "50195": 12698492928.0, "50200": 12698492928.0, "50205": 12698492928.0, "50210": 12698492928.0, "50215": 12698492928.0, "50220": 12698492928.0, "50225": 12698492928.0, "50230": 12698492928.0, "50235": 12698492928.0, "50240": 12698492928.0, "50245": 12698492928.0, "50250": 12698492928.0, "50255": 12698492928.0, "50260": 12698492928.0, "50265": 12698492928.0, "50270": 12698492928.0, "50275": 12698492928.0, "50280": 12698492928.0, "50285": 12698492928.0, "50290": 12698492928.0, "50295": 12698492928.0, "50300": 12698492928.0, "50305": 12698492928.0, "50310": 12698492928.0, "50315": 12698492928.0, "50320": 12698492928.0, "50325": 12698492928.0, "50330": 12698492928.0, "50335": 12698492928.0, "50340": 12698492928.0, "50345": 12698492928.0, "50350": 12698492928.0, "50355": 12698492928.0, "50360": 12698492928.0, "50365": 12698492928.0, "50370": 12698492928.0, "50375": 12698492928.0, "50380": 12698492928.0, "50385": 12698492928.0, "50390": 12698492928.0, "50395": 12698492928.0, "50400": 12698492928.0, "50405": 12698492928.0, "50410": 12698492928.0, "50415": 12698492928.0, "50420": 12698492928.0, "50425": 12698492928.0, "50430": 12698492928.0, "50435": 12698492928.0, "50440": 12698492928.0, "50445": 12698492928.0, "50450": 12698492928.0, "50455": 12698492928.0, "50460": 12698492928.0, "50465": 12698492928.0, "50470": 12698492928.0, "50475": 12698492928.0, "50480": 12698492928.0, "50485": 12698492928.0, "50490": 12698492928.0, "50495": 12698492928.0, "50500": 12698492928.0, "50505": 12698492928.0, "50510": 12698492928.0, "50515": 12698492928.0, "50520": 12698492928.0, "50525": 12698492928.0, "50530": 12698492928.0, "50535": 12698492928.0, "50540": 12698492928.0, "50545": 12698492928.0, "50550": 12698492928.0, "50555": 12698492928.0, "50560": 12698492928.0, "50565": 12698492928.0, "50570": 12698492928.0, "50575": 12698492928.0, "50580": 12698492928.0, "50585": 12698492928.0, "50590": 12698492928.0, "50595": 12698492928.0, "50600": 12698492928.0, "50605": 12698492928.0, "50610": 12698492928.0, "50615": 12698492928.0, "50620": 12698492928.0, "50625": 12698492928.0, "50630": 12698492928.0, "50635": 12698492928.0, "50640": 12698492928.0, "50645": 12698492928.0, "50650": 12698492928.0, "50655": 12698492928.0, "50660": 12698492928.0, "50665": 12698492928.0, "50670": 12698492928.0, "50675": 12698492928.0, "50680": 12698492928.0, "50685": 12698492928.0, "50690": 12698492928.0, "50695": 12698492928.0, "50700": 12698492928.0, "50705": 12698492928.0, "50710": 12698492928.0, "50715": 12698492928.0, "50720": 12698492928.0, "50725": 12698492928.0, "50730": 12698492928.0, "50735": 12698492928.0, "50740": 12698492928.0, "50745": 12698492928.0, "50750": 12698492928.0, "50755": 12698492928.0, "50760": 12698492928.0, "50765": 12698492928.0, "50770": 12698492928.0, "50775": 12698492928.0, "50780": 12698492928.0, "50785": 12698492928.0, "50790": 12698492928.0, "50795": 12698492928.0, "50800": 12698492928.0, "50805": 12698492928.0, "50810": 12698492928.0, "50815": 12698492928.0, "50820": 12698492928.0, "50825": 12698492928.0, "50830": 12698492928.0, "50835": 12698492928.0, "50840": 12698492928.0, "50845": 12698492928.0, "50850": 12698492928.0, "50855": 12698492928.0, "50860": 12698492928.0}}, "iteration-time": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": "nan", "25": "nan", "30": "nan", "35": "nan", "40": "nan", "45": "nan", "50": "nan", "55": "nan", "60": "nan", "65": "nan", "70": "nan", "75": "nan", "80": "nan", "85": "nan", "90": "nan", "95": "nan", "100": 5.85644, "105": "nan", "110": "nan", "115": "nan", "120": "nan", "125": "nan", "130": "nan", "135": "nan", "140": "nan", "145": "nan", "150": "nan", "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": 3.4691, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": 3.47424, "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": 3.47478, "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": 3.47047, "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": 3.46947, "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": 3.46702, "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": 3.45553, "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": 3.45275, "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": 3.44663, "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": 3.4399, "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": 3.43441, "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": 3.42785, "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": 3.42504, "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": 3.42222, "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": 3.41907, "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": 3.417, "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": 3.4147, "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": 3.41218, "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": 4.27717, "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": 3.41032, "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": 3.40919, "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": 3.40833, "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": 3.40609, "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": 3.40489, "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": 3.40409, "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": 3.40395, "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": 3.40372, "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": 3.40371, "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": 3.40238, "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": 3.4021, "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": 4.02898, "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": 3.4094, "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": 3.40894, "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": 3.40865, "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": 3.40861, "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": 3.40792, "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": 3.40786, "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": 3.4073, "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": 3.40645, "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": 3.40633, "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": 3.40654, "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": 3.4073, "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": 3.40734, "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": 3.40718, "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": 3.40641, "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": 3.40562, "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": 3.40642, "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": 3.40616, "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": 3.40622, "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": 3.40468, "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": 3.40563, "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": 3.40655, "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": 3.40549, "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": 3.40619, "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": 3.40652, "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": 3.40596, "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": 3.40656, "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": 3.40614, "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": 3.4049, "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": 3.40544, "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": 3.40573, "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": 3.40602, "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": 3.40552, "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": 3.40592, "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": 3.60606, "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": 3.39688, "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": 4.24999, "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": 4.04737, "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": 4.98787, "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": 3.39702, "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": 3.397, "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": 3.39705, "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": 3.39697, "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": 6.72499, "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": 3.39515, "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": 3.39484, "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": 3.39486, "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": 3.39471, "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": 3.39551, "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": 3.39589, "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": 3.39811, "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": 3.3956, "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": 3.39719, "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": 3.39629, "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": 3.39667, "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": 3.39583, "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": 3.39649, "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": 3.39619, "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": 3.39737, "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": 3.39609, "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": 3.39752, "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": 3.39668, "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": 3.3952, "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": 3.39587, "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": 3.39493, "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": 3.3963, "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": 3.39586, "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": 3.70604, "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": 3.40885, "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": 3.40479, "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": 3.40838, "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": 3.40877, "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": 3.41109, "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": 3.40964, "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": 3.40927, "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": 3.4088, "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": 3.40768, "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": 3.40745, "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": 3.40661, "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": 3.40752, "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": 3.40704, "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": 3.40664, "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": 3.40682, "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": 3.4063, "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": 3.40609, "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": 3.40649, "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": 3.40712, "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": 3.40613, "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": 3.40716, "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": 3.40595, "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": 3.40732, "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": 3.40687, "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": 3.40687, "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": 3.40746, "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": 3.40678, "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": 3.407, "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": 3.40833, "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": 3.40823, "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": 3.40849, "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": 3.40748, "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": 3.40817, "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": 3.40745, "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": 3.40721, "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": 3.40862, "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": 3.51748, "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": 3.40124, "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": 3.39982, "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": 3.39761, "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": 3.397, "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": 3.39596, "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": 3.39831, "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": 3.39737, "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": 3.39668, "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": 3.39876, "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": 3.39764, "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": 3.39762, "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": 3.39675, "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": 3.3965, "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": 3.3969, "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": 3.395, "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": 3.39861, "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": 3.39888, "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": 3.39885, "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": 3.39744, "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": 3.39856, "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": 3.39779, "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": 3.39822, "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": 3.39793, "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": 3.39796, "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": 3.39763, "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": 3.39823, "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": 3.39726, "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": 3.39768, "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": 3.39774, "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": 3.39774, "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": 3.39834, "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": 3.39782, "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": 3.39745, "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": 3.56716, "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": 3.40908, "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": 3.40646, "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": 3.97787, "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": 3.40753, "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": 3.40909, "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": 3.4062, "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": 3.40541, "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": 3.40443, "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": 3.40159, "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": 3.40382, "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": 3.40406, "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": 3.40383, "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": 3.40453, "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": 3.40355, "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": 3.40633, "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": 3.40499, "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": 3.40461, "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": 3.40549, "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": 3.40617, "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": 3.40379, "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": 3.4048, "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": 3.40521, "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": 3.40363, "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": 3.40374, "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": 3.40151, "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": 3.40372, "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": 3.40429, "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": 3.40533, "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": 3.4037, "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": 3.40703, "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": 3.40326, "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": 3.40431, "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": 3.40349, "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": 3.40305, "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": 3.40294, "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": 3.5372, "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": 3.40267, "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": 3.40483, "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": 3.4119, "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": 3.41156, "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": 3.41332, "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": 3.41221, "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": 3.41147, "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": 3.41344, "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": 3.4137, "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": 3.41488, "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": 3.41231, "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": 3.41201, "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": 3.4118, "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": 3.412, "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": 3.4006, "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": 3.40649, "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": 3.41145, "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": 3.40998, "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": 3.41196, "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": 3.40922, "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": 3.4117, "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": 3.4122, "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": 3.41179, "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": 3.41276, "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": 3.41285, "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": 3.41474, "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": 3.4149, "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": 3.41027, "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": 3.40294, "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": 3.40581, "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": 3.41129, "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": 3.4128, "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": 3.41179, "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": 3.41169, "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": 3.40521, "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": 3.40455, "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": 3.53741, "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": 3.40162, "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": 3.40328, "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": 3.40232, "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": 3.403, "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": 3.40335, "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": 3.40358, "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": 4.31736, "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": 4.77911, "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": 5.91011, "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": 4.39594, "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": 4.57265, "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": 5.22342, "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": 3.40343, "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": 3.40239, "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": 3.40274, "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": 3.40141, "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": 3.40159, "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": 3.40275, "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": 3.40031, "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": 3.40039, "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": 3.40101, "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": 3.40098, "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": 3.39979, "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": 3.4012, "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": 3.39993, "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": 3.39999, "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": 3.4005, "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": 3.40104, "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": 3.40011, "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": 3.40068, "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": 3.402, "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": 3.57882, "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": 3.39922, "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": 3.39935, "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": 3.39848, "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": 3.39883, "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": 3.39874, "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": 3.39922, "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": 3.39808, "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": 3.39884, "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": 3.39786, "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": 3.39805, "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": 3.39849, "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": 3.39919, "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": 3.40223, "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": 3.40794, "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": 3.40743, "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": 3.40474, "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": 3.40439, "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": 3.403, "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": 3.4052, "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": 3.40703, "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": 3.40579, "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": 3.4032, "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": 3.40333, "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": 3.40297, "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": 3.40361, "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": 3.40246, "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": 3.40424, "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": 3.40289, "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": 3.4034, "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": 3.40194, "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": 3.40262, "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": 3.40122, "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": 3.40217, "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": 3.40369, "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": 3.40165, "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": 3.4002, "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": 3.98485, "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": 3.48883, "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": 3.48802, "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": 3.48992, "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": 3.48785, "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": 3.48904, "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": 3.48886, "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": 3.48877, "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": 3.48476, "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": 3.48487, "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": 3.48426, "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": 3.4846, "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": 3.4844, "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": 3.48555, "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": 3.48457, "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": 3.48464, "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": 3.48484, "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": 3.48546, "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": 3.48433, "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": 3.48501, "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": 3.48473, "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": 3.48508, "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": 3.48496, "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": 3.4846, "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": 3.48517, "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": 3.48526, "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": 3.48494, "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": 3.48498, "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": 3.48522, "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": 3.48514, "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": 3.48537, "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": 3.48557, "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": 3.48658, "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": 3.48604, "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": 3.48554, "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": 3.48563, "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": 3.48588, "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": 3.5598, "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": 3.40263, "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": 3.40231, "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": 3.40228, "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": 3.40264, "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": 3.40219, "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": 3.40336, "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": 3.40306, "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": 3.40313, "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": 3.40344, "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": 3.40323, "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": 3.40245, "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": 3.40414, "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": 3.40451, "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": 3.40361, "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": 3.40508, "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": 3.4037, "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": 3.40458, "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": 3.40313, "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": 3.40367, "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": 3.40284, "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": 3.40398, "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": 3.40274, "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": 3.40362, "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": 3.40309, "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": 3.40347, "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": 3.40383, "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": 3.40316, "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": 3.40359, "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": 3.40166, "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": 3.40302, "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": 3.40272, "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": 3.40213, "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": 3.40274, "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": 3.4025, "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": 3.61562, "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": 3.39968, "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": 3.39881, "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": 3.39927, "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": 3.39889, "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": 3.39875, "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": 3.39953, "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": 3.3992, "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": 3.39942, "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": 3.39957, "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": 3.39935, "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": 3.39988, "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": 3.39914, "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": 3.39902, "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": 3.39842, "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": 3.39751, "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": 3.39811, "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": 3.39547, "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": 3.39727, "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": 3.39754, "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": 3.39772, "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": 3.39736, "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": 3.39684, "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": 3.39835, "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": 3.39831, "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": 3.39691, "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": 3.39684, "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": 3.39731, "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": 3.39755, "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": 3.39755, "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": 3.39568, "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": 3.39615, "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": 3.39682, "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": 3.39575, "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": 3.39729, "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": 3.39707, "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": 3.39732, "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": 3.56917, "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": 3.40864, "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": 3.40745, "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": 3.40887, "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": 3.40842, "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": 3.40911, "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": 3.40879, "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": 3.40966, "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": 3.41005, "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": 3.4089, "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": 3.40927, "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": 3.40843, "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": 3.41078, "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": 3.40978, "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": 3.40965, "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": 3.41006, "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": 3.41118, "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": 3.41104, "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": 3.4097, "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": 3.41078, "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": 3.40987, "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": 3.4105, "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": 3.40991, "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": 3.40968, "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": 3.41015, "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": 3.41026, "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": 3.41007, "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": 3.40983, "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": 3.40956, "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": 3.41008, "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": 3.40811, "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": 3.41047, "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": 3.41004, "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": 3.41084, "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": 3.40954, "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": 3.41012, "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": 3.98578, "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": 3.48367, "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": 3.48549, "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": 3.4854, "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": 3.48511, "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": 3.48569, "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": 3.48441, "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": 3.48555, "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": 3.48541, "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": 3.48521, "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": 3.48541, "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": 3.48591, "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": 3.48741, "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": 3.4862, "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": 3.48581, "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": 3.48614, "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": 3.48551, "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": 3.48617, "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": 3.48633, "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": 3.48532, "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": 3.48571, "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": 3.48599, "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": 3.48626, "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": 3.48553, "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": 3.48475, "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": 3.48611, "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": 3.48545, "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": 3.48482, "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": 3.48575, "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": 3.48643, "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": 3.48583, "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": 3.48562, "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": 3.48579, "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": 3.48565, "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": 3.4852, "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": 4.18918, "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": 3.40446, "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": 3.44159, "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": 3.40557, "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": 3.40489, "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": 3.40495, "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": 3.40541, "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": 3.40533, "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": 3.40537, "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": 3.40443, "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": 3.40498, "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": 3.40479, "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": 3.40586, "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": 3.40508, "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": 3.40499, "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": 3.40502, "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": 3.40443, "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.9.0.json b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.9.0.json index 8980f68..8339618 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.9.0.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/golden_values_0.9.0.json @@ -1,32049 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 53183, - "step_interval": 5, - "values": [ - 12.98419, - 12.91908, - 12.86794, - 11.80404, - 10.36067, - 10.02501, - 9.62196, - 9.49541, - 9.14868, - 8.94843, - 8.84285, - 8.739, - 8.65228, - 8.4803, - 8.27706, - 8.30883, - 8.20317, - 8.06388, - 8.01718, - 7.89727, - 7.77931, - 7.69837, - 7.57899, - 7.62154, - 7.50171, - 7.32272, - 7.32606, - 7.22861, - 7.15479, - 7.15683, - 7.03266, - 6.99443, - 6.88133, - 6.8455, - 6.90151, - 6.79922, - 6.7058, - 6.68805, - 6.67142, - 6.65646, - 6.64242, - 6.57541, - 6.53691, - 6.51028, - 6.53759, - 6.49952, - 6.40743, - 6.43299, - 6.36578, - 6.36631, - 6.3464, - 6.22929, - 6.26552, - 6.22281, - 6.24165, - 6.26106, - 6.20117, - 6.16901, - 6.08495, - 6.14694, - 6.11357, - 6.14213, - 6.03523, - 6.03786, - 6.00835, - 5.94486, - 6.04637, - 5.89847, - 5.9588, - 5.92718, - 5.88896, - 5.87864, - 5.84874, - 5.78918, - 5.82016, - 5.72101, - 5.77954, - 5.7496, - 5.74263, - 5.74162, - 5.67057, - 5.7516, - 5.69378, - 5.62135, - 5.58512, - 5.59513, - 5.62787, - 5.65226, - 5.56646, - 5.5468, - 5.55958, - 5.57677, - 5.59785, - 5.48969, - 5.45037, - 5.44465, - 5.4802, - 5.46002, - 5.43968, - 5.41462, - 5.43837, - 5.41611, - 5.4328, - 5.42789, - 5.35512, - 5.3339, - 5.36373, - 5.35987, - 5.37546, - 5.32334, - 5.34594, - 5.35304, - 5.27175, - 5.31666, - 5.3014, - 5.24568, - 5.3172, - 5.22113, - 5.17969, - 5.2957, - 5.18428, - 5.14478, - 5.17169, - 5.18525, - 5.19099, - 5.19711, - 5.14148, - 5.12108, - 5.11314, - 5.14493, - 5.12742, - 5.14362, - 5.05985, - 5.03878, - 5.07784, - 5.08032, - 5.04553, - 4.99105, - 5.0338, - 4.96559, - 5.01587, - 4.89967, - 4.89247, - 4.92978, - 4.87118, - 4.9224, - 4.91386, - 4.81396, - 4.81013, - 4.78872, - 4.85803, - 4.81016, - 4.75921, - 4.75526, - 4.75735, - 4.73742, - 4.74295, - 4.63332, - 4.64861, - 4.65814, - 4.64983, - 4.62055, - 4.64685, - 4.60608, - 4.60148, - 4.53416, - 4.57535, - 4.5439, - 4.51442, - 4.51116, - 4.4958, - 4.4381, - 4.54965, - 4.42558, - 4.44803, - 4.41747, - 4.41138, - 4.42972, - 4.43969, - 4.34347, - 4.45788, - 4.36819, - 4.39574, - 4.35585, - 4.32917, - 4.3533, - 4.32413, - 4.30382, - 4.36074, - 4.25067, - 4.30811, - 4.23739, - 4.21233, - 4.26024, - 4.23104, - 4.19611, - 4.23352, - 4.23584, - 4.18101, - 4.22907, - 4.1586, - 4.17231, - 4.20159, - 4.18734, - 4.15726, - 4.13587, - 4.10493, - 4.11823, - 4.07787, - 4.1653, - 4.10161, - 4.11814, - 4.10383, - 4.05246, - 4.10388, - 4.01047, - 4.06683, - 4.04952, - 4.04421, - 4.04533, - 4.0388, - 4.02576, - 3.96637, - 4.01096, - 4.03711, - 4.07673, - 4.02488, - 4.00188, - 3.98159, - 4.01223, - 3.97921, - 3.96743, - 3.97293, - 3.97897, - 3.85555, - 3.92234, - 3.94774, - 3.91426, - 3.94461, - 3.91534, - 3.87929, - 3.9411, - 3.88143, - 3.86679, - 3.8553, - 3.88821, - 3.83123, - 3.85266, - 3.84551, - 3.88909, - 3.84973, - 3.85953, - 3.82762, - 3.82071, - 3.84309, - 3.80714, - 3.83137, - 3.81531, - 3.78891, - 3.7809, - 3.75503, - 3.78689, - 3.7963, - 3.78109, - 3.70658, - 3.76395, - 3.80263, - 3.80963, - 3.73183, - 3.86115, - 3.73697, - 3.72256, - 3.73822, - 3.79105, - 3.73342, - 3.68097, - 3.73596, - 3.70602, - 3.75098, - 3.68107, - 3.66367, - 3.71469, - 3.69341, - 3.69057, - 3.66595, - 3.66825, - 3.64835, - 3.686, - 3.68602, - 3.65497, - 3.68047, - 3.66293, - 3.61094, - 3.62359, - 3.65903, - 3.59935, - 3.63558, - 3.5599, - 3.6547, - 3.63513, - 3.61388, - 3.58081, - 3.65811, - 3.61744, - 3.61355, - 3.62284, - 3.61707, - 3.55356, - 3.6029, - 3.56837, - 3.54483, - 3.56704, - 3.611, - 3.59329, - 3.58814, - 3.59871, - 3.51559, - 3.52262, - 3.56131, - 3.50849, - 3.60802, - 3.5961, - 3.48829, - 3.47554, - 3.48074, - 3.56141, - 3.4539, - 3.51638, - 3.51675, - 3.45733, - 3.51842, - 3.50406, - 3.49069, - 3.44249, - 3.47773, - 3.46363, - 3.55154, - 3.48545, - 3.46725, - 3.48369, - 3.43862, - 3.51175, - 3.47131, - 3.46854, - 3.45139, - 3.42636, - 3.4575, - 3.48506, - 3.42788, - 3.4359, - 3.4285, - 3.45492, - 3.45567, - 3.37167, - 3.38145, - 3.38504, - 3.41001, - 3.44639, - 3.4458, - 3.37718, - 3.43357, - 3.41693, - 3.40982, - 3.38623, - 3.42285, - 3.3654, - 3.3697, - 3.35109, - 3.46915, - 3.3605, - 3.42528, - 3.34254, - 3.31809, - 3.37538, - 3.3352, - 3.34618, - 3.37505, - 3.36954, - 3.34879, - 3.33113, - 3.29592, - 3.35797, - 3.28196, - 3.31722, - 3.36562, - 3.33716, - 3.35187, - 3.28997, - 3.31062, - 3.37159, - 3.27541, - 3.30545, - 3.33852, - 3.32558, - 3.27672, - 3.28821, - 3.25892, - 3.29762, - 3.29732, - 3.25202, - 3.31146, - 3.29029, - 3.30011, - 3.29203, - 3.23834, - 3.26237, - 3.3225, - 3.23396, - 3.27615, - 3.2507, - 3.26527, - 3.21649, - 3.25948, - 3.26662, - 3.24859, - 3.28338, - 3.30685, - 3.24206, - 3.2265, - 3.24162, - 3.22024, - 3.2434, - 3.17623, - 3.26649, - 3.18358, - 3.16895, - 3.186, - 3.24542, - 3.20835, - 3.17379, - 3.20578, - 3.23138, - 3.28144, - 3.29039, - 3.23571, - 3.23105, - 3.18598, - 3.20142, - 3.15922, - 3.21054, - 3.1879, - 3.18374, - 3.22548, - 3.18672, - 3.18695, - 3.22257, - 3.20346, - 3.22214, - 3.21936, - 3.14212, - 3.13831, - 3.16945, - 3.12089, - 3.22079, - 3.1756, - 3.19436, - 3.14402, - 3.14306, - 3.21999, - 3.17097, - 3.13181, - 3.09422, - 3.11322, - 3.13357, - 3.13941, - 3.11551, - 3.07559, - 3.15389, - 3.14509, - 3.14922, - 3.14026, - 3.13487, - 3.15091, - 3.11567, - 3.09468, - 3.11667, - 3.09644, - 3.08766, - 3.07902, - 3.16316, - 3.12037, - 3.13054, - 3.10603, - 3.13903, - 3.12847, - 3.11667, - 3.08897, - 3.04173, - 3.10995, - 3.0873, - 3.13949, - 3.08735, - 3.14988, - 3.09382, - 3.0723, - 3.05878, - 3.05924, - 3.05126, - 3.06549, - 3.07887, - 3.13286, - 3.19623, - 3.08624, - 3.0392, - 3.04488, - 3.01615, - 3.08774, - 2.99622, - 3.02914, - 3.02947, - 3.09067, - 3.11401, - 3.08468, - 3.05285, - 3.02889, - 2.9696, - 3.07302, - 2.99563, - 3.03485, - 3.01352, - 3.02108, - 3.06754, - 3.02656, - 2.99796, - 3.03663, - 3.00679, - 2.98737, - 3.01097, - 3.05347, - 3.02116, - 3.01341, - 3.02204, - 3.06755, - 3.02376, - 3.0096, - 3.02609, - 2.99124, - 2.99161, - 3.01815, - 2.97387, - 3.01255, - 2.99293, - 3.04182, - 3.03241, - 3.00223, - 3.04234, - 3.07248, - 3.09676, - 3.10294, - 3.19843, - 3.06778, - 2.99661, - 3.02581, - 2.97053, - 2.98138, - 2.9383, - 2.93503, - 2.95344, - 2.96671, - 2.95751, - 2.96192, - 2.96042, - 2.96135, - 3.01044, - 2.97769, - 2.9561, - 3.09305, - 3.02437, - 2.97395, - 3.02485, - 2.981, - 2.948, - 2.9446, - 2.92086, - 2.94248, - 3.01167, - 2.91831, - 2.93553, - 2.98174, - 2.89493, - 2.973, - 2.96363, - 2.99416, - 2.96201, - 2.94617, - 2.98645, - 2.97847, - 2.94128, - 2.93834, - 2.93446, - 2.96779, - 2.95177, - 2.8867, - 2.96466, - 2.97525, - 2.93456, - 2.93265, - 2.85252, - 2.9222, - 2.97286, - 2.90604, - 2.98789, - 2.91011, - 2.9286, - 2.88644, - 2.89074, - 2.94705, - 2.9526, - 2.94425, - 2.94716, - 2.9229, - 2.90919, - 2.87595, - 2.97207, - 2.8887, - 2.91916, - 2.85855, - 2.92068, - 2.89862, - 2.91754, - 2.94756, - 2.85766, - 2.90518, - 2.91967, - 2.92002, - 2.89104, - 2.91582, - 2.89176, - 2.91633, - 2.87038, - 2.82494, - 2.85775, - 2.87309, - 2.93097, - 2.89861, - 2.84242, - 2.90866, - 2.83677, - 2.91942, - 2.94944, - 2.84783, - 2.85024, - 2.80212, - 2.89931, - 2.87082, - 2.85774, - 2.85876, - 2.93155, - 2.87041, - 2.87513, - 2.82293, - 2.85404, - 2.84661, - 2.846, - 2.88063, - 2.85407, - 2.84886, - 2.86981, - 2.79641, - 2.88895, - 2.89171, - 2.80083, - 2.85598, - 2.82243, - 2.91043, - 2.89791, - 2.82592, - 2.92519, - 2.88935, - 2.93367, - 2.93402, - 2.82809, - 2.87602, - 2.83651, - 2.84219, - 2.84956, - 2.84504, - 2.83968, - 2.82287, - 2.86714, - 2.85398, - 2.8445, - 2.821, - 2.80801, - 2.85356, - 2.86331, - 2.88855, - 2.84713, - 2.82335, - 2.83445, - 2.83796, - 2.86726, - 2.85303, - 2.8329, - 2.783, - 2.75861, - 2.87956, - 2.81064, - 2.84658, - 2.85592, - 2.80521, - 2.77466, - 2.82725, - 2.80499, - 2.81019, - 2.79605, - 2.80587, - 2.85307, - 2.85023, - 2.77447, - 2.77115, - 2.79416, - 2.83456, - 2.82582, - 2.79226, - 2.79049, - 2.78918, - 2.82485, - 2.86423, - 2.77456, - 2.81596, - 2.8141, - 2.85011, - 2.83399, - 2.83108, - 2.78418, - 2.76324, - 2.78822, - 2.84092, - 2.82659, - 2.83108, - 2.84488, - 2.82732, - 2.78741, - 2.86013, - 2.79839, - 2.83151, - 2.74863, - 2.73853, - 2.83164, - 2.74581, - 2.78201, - 2.76296, - 2.73349, - 2.81648, - 2.80169, - 2.78341, - 2.77496, - 2.76252, - 2.79892, - 2.77346, - 2.73542, - 2.78466, - 2.76123, - 2.80823, - 2.78521, - 2.76411, - 2.78331, - 2.74127, - 2.75627, - 2.82989, - 2.83589, - 2.81394, - 2.75656, - 2.79305, - 2.73452, - 2.80567, - 2.74423, - 2.77838, - 2.77774, - 2.79062, - 2.74438, - 2.76191, - 2.736, - 2.75827, - 2.83205, - 2.73078, - 2.77335, - 2.75757, - 2.74508, - 2.73489, - 2.77663, - 2.79235, - 2.77173, - 2.76863, - 2.69548, - 2.72459, - 2.71633, - 2.79954, - 2.74726, - 2.68926, - 2.74916, - 2.73581, - 2.76657, - 2.70092, - 2.75065, - 2.76108, - 2.73907, - 2.74262, - 2.73596, - 2.80021, - 2.72376, - 2.73266, - 2.75955, - 2.74406, - 2.7226, - 2.75581, - 2.76734, - 2.7851, - 2.75595, - 2.6995, - 2.69929, - 2.71547, - 2.74243, - 2.70713, - 2.77846, - 2.72904, - 2.71435, - 2.70781, - 2.7877, - 2.7351, - 2.72156, - 2.77158, - 2.79335, - 2.74251, - 2.77298, - 2.73439, - 2.72965, - 2.74746, - 2.7702, - 2.74092, - 2.71081, - 2.69085, - 2.64368, - 2.69356, - 2.74094, - 2.70176, - 2.69215, - 2.67547, - 2.69488, - 2.77212, - 2.75865, - 2.66891, - 2.73618, - 2.73656, - 2.7385, - 2.75532, - 2.69934, - 2.67207, - 2.65692, - 2.69801, - 2.72377, - 2.71155, - 2.70355, - 2.70758, - 2.67797, - 2.71973, - 2.6857, - 2.69295, - 2.70358, - 2.68169, - 2.73862, - 2.67394, - 2.68954, - 2.73816, - 2.66373, - 2.68648, - 2.66598, - 2.7194, - 2.67951, - 2.70225, - 2.70741, - 2.72767, - 2.69146, - 2.68471, - 2.68885, - 2.70103, - 2.75286, - 2.70084, - 2.69385, - 2.67393, - 2.66134, - 2.73428, - 2.74802, - 2.66833, - 2.73713, - 2.68683, - 2.68042, - 2.6732, - 2.681, - 2.71559, - 2.68703, - 2.69938, - 2.68443, - 2.68584, - 2.6813, - 2.66379, - 2.61926, - 2.65717, - 2.68524, - 2.67082, - 2.64322, - 2.66691, - 2.71284, - 2.63993, - 2.64571, - 2.64294, - 2.62535, - 2.64654, - 2.69179, - 2.67462, - 2.69557, - 2.68745, - 2.66002, - 2.70778, - 2.68837, - 2.67251, - 2.67251, - 2.69555, - 2.70804, - 2.7017, - 2.63079, - 2.68191, - 2.68339, - 2.71709, - 2.65548, - 2.66565, - 2.62854, - 2.63167, - 2.6936, - 2.69876, - 2.65896, - 2.6522, - 2.6606, - 2.63048, - 2.67646, - 2.70366, - 2.65661, - 2.69764, - 2.65852, - 2.66819, - 2.67769, - 2.68095, - 2.67396, - 2.69301, - 2.67953, - 2.6367, - 2.59549, - 2.66537, - 2.6787, - 2.67001, - 2.7172, - 2.6412, - 2.6181, - 2.67814, - 2.65454, - 2.67921, - 2.69037, - 2.63561, - 2.66344, - 2.61298, - 2.69973, - 2.63666, - 2.65655, - 2.63696, - 2.68234, - 2.61719, - 2.65599, - 2.66065, - 2.64616, - 2.67095, - 2.59275, - 2.64435, - 2.65471, - 2.69924, - 2.64539, - 2.60645, - 2.66212, - 2.71533, - 2.68817, - 2.66263, - 2.64011, - 2.6414, - 2.66992, - 2.61474, - 2.64712, - 2.64041, - 2.6534, - 2.62336, - 2.66051, - 2.67468, - 2.60067, - 2.61385, - 2.61745, - 2.64008, - 2.57779, - 2.58634, - 2.64649, - 2.62782, - 2.61556, - 2.63198, - 2.67001, - 2.65, - 2.65546, - 2.62416, - 2.66066, - 2.65857, - 2.60059, - 2.60206, - 2.63312, - 2.61806, - 2.63129, - 2.62377, - 2.59056, - 2.66388, - 2.6675, - 2.62269, - 2.63428, - 2.62533, - 2.64793, - 2.65119, - 2.63294, - 2.59744, - 2.62581, - 2.64768, - 2.63606, - 2.61877, - 2.60563, - 2.65874, - 2.64996, - 2.65706, - 2.60299, - 2.63145, - 2.61945, - 2.63531, - 2.64766, - 2.63675, - 2.6322, - 2.62394, - 2.59152, - 2.60842, - 2.65137, - 2.60099, - 2.58619, - 2.622, - 2.60498, - 2.62332, - 2.67063, - 2.63481, - 2.55966, - 2.59884, - 2.57809, - 2.56345, - 2.61952, - 2.57435, - 2.57911, - 2.61293, - 2.56825, - 2.62418, - 2.57672, - 2.5657, - 2.55569, - 2.6583, - 2.59679, - 2.57316, - 2.52258, - 2.56856, - 2.56653, - 2.60895, - 2.60955, - 2.60742, - 2.60524, - 2.58511, - 2.61865, - 2.54429, - 2.57955, - 2.60742, - 2.60812, - 2.58147, - 2.61105, - 2.57176, - 2.58242, - 2.55882, - 2.5998, - 2.60262, - 2.54016, - 2.62618, - 2.6191, - 2.58602, - 2.63077, - 2.57095, - 2.60009, - 2.56923, - 2.56645, - 2.58642, - 2.59774, - 2.60899, - 2.56033, - 2.64222, - 2.59506, - 2.62285, - 2.59309, - 2.59015, - 2.56993, - 2.58954, - 2.61676, - 2.55554, - 2.57971, - 2.60456, - 2.55721, - 2.57422, - 2.57879, - 2.60781, - 2.51687, - 2.56004, - 2.50109, - 2.6096, - 2.57868, - 2.58675, - 2.60828, - 2.57062, - 2.58576, - 2.59196, - 2.60063, - 2.55805, - 2.61719, - 2.62474, - 2.5756, - 2.52894, - 2.61512, - 2.57136, - 2.59832, - 2.57085, - 2.5437, - 2.54518, - 2.57654, - 2.61867, - 2.5582, - 2.57172, - 2.55028, - 2.53879, - 2.54825, - 2.58383, - 2.55716, - 2.55585, - 2.59319, - 2.58946, - 2.52414, - 2.54023, - 2.60288, - 2.59264, - 2.55414, - 2.56634, - 2.59225, - 2.56708, - 2.59247, - 2.58039, - 2.60525, - 2.55538, - 2.59248, - 2.59206, - 2.57052, - 2.55799, - 2.61974, - 2.54098, - 2.57906, - 2.56644, - 2.55381, - 2.5323, - 2.5873, - 2.55185, - 2.59869, - 2.53981, - 2.5837, - 2.57577, - 2.54476, - 2.5592, - 2.53242, - 2.52013, - 2.61405, - 2.53815, - 2.5568, - 2.54179, - 2.53228, - 2.57172, - 2.5355, - 2.53033, - 2.54588, - 2.56312, - 2.55533, - 2.54647, - 2.52223, - 2.54247, - 2.56063, - 2.55561, - 2.57172, - 2.54352, - 2.54393, - 2.50013, - 2.53398, - 2.55553, - 2.59468, - 2.52424, - 2.5382, - 2.57504, - 2.54588, - 2.57543, - 2.51161, - 2.55126, - 2.51887, - 2.53646, - 2.55676, - 2.5304, - 2.59277, - 2.54044, - 2.57123, - 2.6003, - 2.49646, - 2.53898, - 2.52565, - 2.56482, - 2.60363, - 2.57907, - 2.48965, - 2.50199, - 2.55087, - 2.55861, - 2.56767, - 2.55119, - 2.56728, - 2.56228, - 2.5453, - 2.57644, - 2.52451, - 2.5021, - 2.59152, - 2.54781, - 2.5724, - 2.51337, - 2.52616, - 2.53721, - 2.52757, - 2.52641, - 2.55016, - 2.54188, - 2.54979, - 2.56938, - 2.54981, - 2.52435, - 2.5921, - 2.5229, - 2.55128, - 2.55864, - 2.56234, - 2.52253, - 2.52182, - 2.55833, - 2.50951, - 2.56224, - 2.55813, - 2.56019, - 2.53151, - 2.52623, - 2.55852, - 2.54794, - 2.49912, - 2.54606, - 2.53852, - 2.54865, - 2.53166, - 2.53923, - 2.51674, - 2.50393, - 2.48558, - 2.52789, - 2.55185, - 2.54107, - 2.53168, - 2.5522, - 2.54562, - 2.54469, - 2.57939, - 2.4972, - 2.54304, - 2.51904, - 2.53839, - 2.52036, - 2.52717, - 2.52244, - 2.53731, - 2.54459, - 2.5515, - 2.56656, - 2.53226, - 2.44153, - 2.48606, - 2.49793, - 2.52143, - 2.51475, - 2.5032, - 2.53246, - 2.55709, - 2.52275, - 2.50349, - 2.53142, - 2.52539, - 2.56627, - 2.50335, - 2.49016, - 2.50717, - 2.45547, - 2.53239, - 2.54252, - 2.4854, - 2.47096, - 2.49029, - 2.5684, - 2.51388, - 2.52363, - 2.51274, - 2.53134, - 2.57428, - 2.51913, - 2.49343, - 2.52374, - 2.46945, - 2.51212, - 2.51176, - 2.53629, - 2.54166, - 2.48024, - 2.49983, - 2.50244, - 2.46708, - 2.50453, - 2.52617, - 2.52839, - 2.47474, - 2.54907, - 2.51612, - 2.50456, - 2.51193, - 2.53536, - 2.52447, - 2.57062, - 2.49637, - 2.53967, - 2.52325, - 2.49184, - 2.54194, - 2.46873, - 2.5236, - 2.49495, - 2.51795, - 2.4885, - 2.50693, - 2.50458, - 2.51677, - 2.46832, - 2.51039, - 2.48969, - 2.5417, - 2.51261, - 2.50471, - 2.50959, - 2.53441, - 2.47371, - 2.47498, - 2.47009, - 2.49353, - 2.51926, - 2.49677, - 2.48562, - 2.5401, - 2.48562, - 2.54572, - 2.47338, - 2.51237, - 2.50847, - 2.51632, - 2.50885, - 2.49845, - 2.46106, - 2.48298, - 2.49227, - 2.50196, - 2.49089, - 2.49019, - 2.49425, - 2.51916, - 2.4712, - 2.51248, - 2.52114, - 2.46329, - 2.47717, - 2.49578, - 2.53218, - 2.47959, - 2.4718, - 2.50834, - 2.48089, - 2.52138, - 2.54444, - 2.47143, - 2.50868, - 2.47049, - 2.49498, - 2.54311, - 2.51507, - 2.5268, - 2.50941, - 2.50588, - 2.47824, - 2.51134, - 2.54083, - 2.51842, - 2.49119, - 2.49874, - 2.48358, - 2.46988, - 2.49678, - 2.5227, - 2.52353, - 2.46098, - 2.4835, - 2.50653, - 2.52461, - 2.49873, - 2.51227, - 2.44116, - 2.43741, - 2.45375, - 2.48973, - 2.51768, - 2.5229, - 2.48912, - 2.46431, - 2.47457, - 2.47566, - 2.49241, - 2.46526, - 2.43836, - 2.48552, - 2.46722, - 2.50475, - 2.49552, - 2.49723, - 2.48812, - 2.4622, - 2.52397, - 2.47532, - 2.49661, - 2.53455, - 2.45947, - 2.48932, - 2.50029, - 2.46941, - 2.52551, - 2.50054, - 2.43772, - 2.52083, - 2.47606, - 2.46856, - 2.47513, - 2.52144, - 2.46683, - 2.45432, - 2.48696, - 2.48036, - 2.50704, - 2.52042, - 2.5283, - 2.44247, - 2.47057, - 2.49015, - 2.48899, - 2.49301, - 2.5368, - 2.48499, - 2.477, - 2.50119, - 2.51599, - 2.48781, - 2.48645, - 2.50422, - 2.47308, - 2.46711, - 2.48569, - 2.51404, - 2.49852, - 2.49996, - 2.51047, - 2.50389, - 2.47199, - 2.45675, - 2.50458, - 2.50673, - 2.50761, - 2.48005, - 2.46156, - 2.46481, - 2.51002, - 2.48861, - 2.44232, - 2.47867, - 2.44272, - 2.51273, - 2.50682, - 2.48148, - 2.47751, - 2.49822, - 2.50632, - 2.49264, - 2.45902, - 2.44918, - 2.47203, - 2.50082, - 2.4936, - 2.42406, - 2.48076, - 2.48853, - 2.41644, - 2.44562, - 2.44746, - 2.48856, - 2.48456, - 2.45951, - 2.48788, - 2.47264, - 2.46361, - 2.49379, - 2.51188, - 2.49719, - 2.47921, - 2.47002, - 2.47636, - 2.45043, - 2.49448, - 2.48338, - 2.4714, - 2.47708, - 2.48189, - 2.43904, - 2.48078, - 2.46934, - 2.49312, - 2.45741, - 2.52217, - 2.49114, - 2.52001, - 2.50908, - 2.47191, - 2.45726, - 2.46327, - 2.51216, - 2.46282, - 2.46216, - 2.51233, - 2.45002, - 2.47264, - 2.47781, - 2.49215, - 2.43742, - 2.43408, - 2.41878, - 2.49157, - 2.49674, - 2.47366, - 2.461, - 2.47251, - 2.47477, - 2.48874, - 2.45467, - 2.42854, - 2.5089, - 2.4855, - 2.43789, - 2.45628, - 2.48046, - 2.4811, - 2.46436, - 2.46119, - 2.44883, - 2.44836, - 2.42589, - 2.54467, - 2.48679, - 2.42558, - 2.42779, - 2.45567, - 2.47442, - 2.46326, - 2.48475, - 2.45112, - 2.43099, - 2.44148, - 2.45381, - 2.48534, - 2.43155, - 2.4798, - 2.45362, - 2.48073, - 2.53277, - 2.4947, - 2.44257, - 2.47023, - 2.48024, - 2.45757, - 2.47364, - 2.43789, - 2.45069, - 2.43908, - 2.46809, - 2.44938, - 2.45398, - 2.46977, - 2.4516, - 2.41585, - 2.44424, - 2.48174, - 2.4399, - 2.46276, - 2.48028, - 2.50232, - 2.48649, - 2.44632, - 2.51331, - 2.45198, - 2.46772, - 2.47924, - 2.46174, - 2.41598, - 2.47149, - 2.50108, - 2.42365, - 2.4672, - 2.44726, - 2.45445, - 2.46386, - 2.47119, - 2.44565, - 2.43915, - 2.43623, - 2.42684, - 2.48212, - 2.47656, - 2.42247, - 2.47218, - 2.45116, - 2.4212, - 2.46954, - 2.44465, - 2.41909, - 2.48952, - 2.51748, - 2.52221, - 2.44872, - 2.44206, - 2.46907, - 2.43174, - 2.47023, - 2.43705, - 2.4185, - 2.4569, - 2.46952, - 2.48206, - 2.47408, - 2.4539, - 2.47445, - 2.42394, - 2.45395, - 2.44834, - 2.42642, - 2.44206, - 2.46098, - 2.45543, - 2.45796, - 2.44468, - 2.44098, - 2.42427, - 2.4239, - 2.43791, - 2.49488, - 2.43737, - 2.44396, - 2.46736, - 2.4683, - 2.45407, - 2.4542, - 2.44154, - 2.42637, - 2.42361, - 2.48675, - 2.45458, - 2.4439, - 2.43621, - 2.42222, - 2.49616, - 2.42608, - 2.46972, - 2.45859, - 2.44728, - 2.44741, - 2.43318, - 2.44258, - 2.43579, - 2.41052, - 2.44061, - 2.46347, - 2.42659, - 2.44777, - 2.44381, - 2.43926, - 2.4344, - 2.42818, - 2.43351, - 2.44399, - 2.39769, - 2.43949, - 2.48018, - 2.44648, - 2.45692, - 2.40909, - 2.43483, - 2.45647, - 2.39934, - 2.39287, - 2.43614, - 2.44456, - 2.48993, - 2.44823, - 2.44936, - 2.40574, - 2.40074, - 2.45376, - 2.45123, - 2.42492, - 2.41836, - 2.42335, - 2.43323, - 2.43933, - 2.43792, - 2.48867, - 2.43787, - 2.43378, - 2.41573, - 2.43863, - 2.46001, - 2.40407, - 2.44993, - 2.45847, - 2.40583, - 2.45827, - 2.45425, - 2.43504, - 2.41136, - 2.47834, - 2.40462, - 2.41501, - 2.46588, - 2.43642, - 2.44544, - 2.40237, - 2.40361, - 2.42828, - 2.42495, - 2.49418, - 2.37629, - 2.40121, - 2.48734, - 2.38038, - 2.43845, - 2.4517, - 2.4699, - 2.41947, - 2.43187, - 2.44657, - 2.44123, - 2.41938, - 2.40222, - 2.42545, - 2.41268, - 2.49022, - 2.42048, - 2.38719, - 2.4488, - 2.42704, - 2.45788, - 2.44896, - 2.43458, - 2.47298, - 2.41989, - 2.45365, - 2.4551, - 2.38841, - 2.40977, - 2.42921, - 2.44837, - 2.43066, - 2.4104, - 2.44185, - 2.43418, - 2.42102, - 2.42816, - 2.4481, - 2.47833, - 2.41271, - 2.39075, - 2.43393, - 2.4301, - 2.39789, - 2.43808, - 2.42409, - 2.3998, - 2.4348, - 2.40504, - 2.43412, - 2.41964, - 2.47073, - 2.42032, - 2.4182, - 2.41686, - 2.4091, - 2.41202, - 2.4744, - 2.45341, - 2.42216, - 2.38629, - 2.42227, - 2.3949, - 2.42597, - 2.43345, - 2.4033, - 2.42782, - 2.42795, - 2.43672, - 2.43901, - 2.41077, - 2.3959, - 2.44701, - 2.4326, - 2.41483, - 2.40245, - 2.40167, - 2.41886, - 2.43415, - 2.46731, - 2.41425, - 2.40864, - 2.38945, - 2.39272, - 2.41816, - 2.39451, - 2.43208, - 2.41808, - 2.40419, - 2.47542, - 2.44037, - 2.37254, - 2.40797, - 2.4161, - 2.4555, - 2.41324, - 2.37544, - 2.40916, - 2.39928, - 2.36893, - 2.39834, - 2.42514, - 2.42034, - 2.41952, - 2.39531, - 2.41875, - 2.41904, - 2.40517, - 2.4455, - 2.39346, - 2.43404, - 2.41116, - 2.4104, - 2.39527, - 2.40085, - 2.35791, - 2.46814, - 2.41736, - 2.40424, - 2.4578, - 2.39449, - 2.44911, - 2.43566, - 2.43022, - 2.48053, - 2.39956, - 2.42973, - 2.43203, - 2.37597, - 2.41757, - 2.37497, - 2.43604, - 2.40956, - 2.38516, - 2.38833, - 2.44666, - 2.36002, - 2.46161, - 2.44621, - 2.38175, - 2.44658, - 2.39635, - 2.40173, - 2.4385, - 2.42944, - 2.4297, - 2.38568, - 2.43804, - 2.43503, - 2.39494, - 2.38995, - 2.42145, - 2.40455, - 2.38452, - 2.42348, - 2.40443, - 2.41578, - 2.41045, - 2.44383, - 2.37083, - 2.40343, - 2.36111, - 2.40886, - 2.41537, - 2.43849, - 2.47706, - 2.43722, - 2.38781, - 2.43626, - 2.43463, - 2.35431, - 2.40143, - 2.3807, - 2.3874, - 2.44311, - 2.41326, - 2.39779, - 2.4384, - 2.44513, - 2.43208, - 2.44734, - 2.41476, - 2.47766, - 2.37664, - 2.39589, - 2.40416, - 2.38793, - 2.37903, - 2.38143, - 2.36649, - 2.4344, - 2.38476, - 2.42088, - 2.38202, - 2.36308, - 2.43007, - 2.3996, - 2.43126, - 2.42001, - 2.38902, - 2.45338, - 2.40084, - 2.4181, - 2.37636, - 2.42268, - 2.38875, - 2.42246, - 2.40696, - 2.37248, - 2.41147, - 2.3964, - 2.42269, - 2.42928, - 2.44764, - 2.38972, - 2.38337, - 2.42218, - 2.41398, - 2.4144, - 2.44582, - 2.39876, - 2.40281, - 2.4479, - 2.40925, - 2.39995, - 2.37399, - 2.42343, - 2.39007, - 2.38361, - 2.35764, - 2.39641, - 2.39661, - 2.462, - 2.38067, - 2.3763, - 2.38298, - 2.36606, - 2.38746, - 2.43554, - 2.44202, - 2.42766, - 2.38651, - 2.38103, - 2.42624, - 2.39899, - 2.40719, - 2.41077, - 2.36751, - 2.45914, - 2.40187, - 2.3622, - 2.39932, - 2.40727, - 2.35981, - 2.39686, - 2.40559, - 2.40829, - 2.37755, - 2.37567, - 2.40269, - 2.41889, - 2.38588, - 2.41283, - 2.36274, - 2.39852, - 2.39475, - 2.38881, - 2.37977, - 2.38436, - 2.38116, - 2.45097, - 2.39336, - 2.35309, - 2.3193, - 2.39562, - 2.42489, - 2.35553, - 2.36392, - 2.41132, - 2.39906, - 2.38236, - 2.34957, - 2.38655, - 2.37886, - 2.4032, - 2.44724, - 2.42583, - 2.35575, - 2.40803, - 2.38587, - 2.32984, - 2.40585, - 2.39817, - 2.39539, - 2.36618, - 2.37288, - 2.38173, - 2.44428, - 2.36327, - 2.38855, - 2.38821, - 2.40833, - 2.40302, - 2.38264, - 2.34846, - 2.3694, - 2.41922, - 2.37434, - 2.42192, - 2.37205, - 2.3617, - 2.37145, - 2.34717, - 2.40241, - 2.31411, - 2.38114, - 2.4103, - 2.38677, - 2.35757, - 2.37079, - 2.35967, - 2.38387, - 2.41274, - 2.40819, - 2.37717, - 2.39562, - 2.36174, - 2.38422, - 2.42365, - 2.32535, - 2.39445, - 2.3837, - 2.44464, - 2.40211, - 2.39042, - 2.38827, - 2.36975, - 2.34269, - 2.41897, - 2.42899, - 2.35431, - 2.38611, - 2.37312, - 2.3915, - 2.38932, - 2.4127, - 2.33445, - 2.34791, - 2.34999, - 2.37074, - 2.44889, - 2.35828, - 2.38525, - 2.37374, - 2.36779, - 2.41399, - 2.38956, - 2.36053, - 2.36688, - 2.36029, - 2.41255, - 2.36126, - 2.42017, - 2.37035, - 2.3579, - 2.39731, - 2.37274, - 2.36164, - 2.3406, - 2.35618, - 2.41837, - 2.40452, - 2.38041, - 2.35802, - 2.3776, - 2.35, - 2.34043, - 2.41691, - 2.37895, - 2.32466, - 2.35918, - 2.36973, - 2.37125, - 2.36101, - 2.35971, - 2.37979, - 2.37985, - 2.30211, - 2.35671, - 2.37984, - 2.36267, - 2.36033, - 2.41398, - 2.36709, - 2.3638, - 2.37147, - 2.38241, - 2.37443, - 2.40214, - 2.38842, - 2.3924, - 2.35504, - 2.40521, - 2.35751, - 2.3778, - 2.35868, - 2.34116, - 2.37323, - 2.37569, - 2.35289, - 2.37776, - 2.36834, - 2.37741, - 2.37573, - 2.33007, - 2.37332, - 2.36447, - 2.36356, - 2.34745, - 2.41894, - 2.3699, - 2.32165, - 2.3626, - 2.42148, - 2.36015, - 2.30794, - 2.34737, - 2.39952, - 2.31543, - 2.41693, - 2.35574, - 2.28794, - 2.38521, - 2.33121, - 2.38382, - 2.38452, - 2.34225, - 2.38258, - 2.32508, - 2.35264, - 2.34782, - 2.35467, - 2.31892, - 2.33791, - 2.33464, - 2.40442, - 2.36503, - 2.33589, - 2.36791, - 2.38653, - 2.37104, - 2.39368, - 2.34645, - 2.38549, - 2.32241, - 2.3949, - 2.37387, - 2.35282, - 2.34102, - 2.37072, - 2.33689, - 2.34766, - 2.32982, - 2.38524, - 2.33179, - 2.36397, - 2.33285, - 2.32107, - 2.32406, - 2.30448, - 2.39387, - 2.40308, - 2.36095, - 2.3717, - 2.33301, - 2.31196, - 2.40569, - 2.37152, - 2.37446, - 2.36441, - 2.31796, - 2.36133, - 2.35281, - 2.34712, - 2.36205, - 2.36266, - 2.30883, - 2.36213, - 2.35561, - 2.40853, - 2.37288, - 2.34161, - 2.3968, - 2.36399, - 2.33852, - 2.36198, - 2.34423, - 2.32484, - 2.33432, - 2.36546, - 2.33976, - 2.31307, - 2.3184, - 2.31741, - 2.31843, - 2.28965, - 2.34009, - 2.30929, - 2.39347, - 2.31745, - 2.35377, - 2.33591, - 2.34666, - 2.37045, - 2.32797, - 2.31528, - 2.36211, - 2.37247, - 2.38143, - 2.31443, - 2.34936, - 2.33315, - 2.37157, - 2.34943, - 2.39519, - 2.34092, - 2.36524, - 2.36448, - 2.34077, - 2.33426, - 2.37359, - 2.31207, - 2.27711, - 2.32888, - 2.34586, - 2.36063, - 2.3318, - 2.31964, - 2.34302, - 2.37103, - 2.36492, - 2.31915, - 2.34072, - 2.35957, - 2.3319, - 2.33556, - 2.3562, - 2.38816, - 2.2878, - 2.31349, - 2.36829, - 2.28982, - 2.34635, - 2.36405, - 2.38149, - 2.33435, - 2.33024, - 2.29923, - 2.30443, - 2.31556, - 2.35307, - 2.33861, - 2.30846, - 2.31353, - 2.29566, - 2.32083, - 2.35146, - 2.29441, - 2.35297, - 2.32767, - 2.34018, - 2.34667, - 2.33407, - 2.28717, - 2.30826, - 2.3541, - 2.35607, - 2.38586, - 2.35185, - 2.30789, - 2.36756, - 2.36125, - 2.34786, - 2.36249, - 2.32214, - 2.30432, - 2.35128, - 2.34236, - 2.37517, - 2.31364, - 2.32562, - 2.31039, - 2.34544, - 2.40571, - 2.33947, - 2.34913, - 2.36287, - 2.3212, - 2.30485, - 2.36056, - 2.31541, - 2.32215, - 2.34605, - 2.34271, - 2.36568, - 2.32517, - 2.34936, - 2.34077, - 2.34932, - 2.29629, - 2.32931, - 2.35075, - 2.362, - 2.33497, - 2.35549, - 2.32194, - 2.36096, - 2.36015, - 2.29582, - 2.27681, - 2.32794, - 2.34127, - 2.30457, - 2.3071, - 2.32661, - 2.35084, - 2.33485, - 2.32981, - 2.29971, - 2.29722, - 2.32502, - 2.33562, - 2.34413, - 2.31711, - 2.32385, - 2.3013, - 2.34517, - 2.31441, - 2.29988, - 2.33875, - 2.30426, - 2.32811, - 2.27243, - 2.31843, - 2.32735, - 2.35129, - 2.31243, - 2.33749, - 2.27449, - 2.3257, - 2.25419, - 2.29672, - 2.3124, - 2.31962, - 2.33483, - 2.30304, - 2.30413, - 2.33105, - 2.31994, - 2.35972, - 2.31645, - 2.33765, - 2.33977, - 2.31776, - 2.30349, - 2.31356, - 2.34195, - 2.35769, - 2.37973, - 2.28063, - 2.29228, - 2.33746, - 2.29104, - 2.29211, - 2.33338, - 2.31777, - 2.27725, - 2.307, - 2.33335, - 2.30224, - 2.30553, - 2.31524, - 2.31688, - 2.34076, - 2.29786, - 2.31358, - 2.33641, - 2.29565, - 2.28182, - 2.33547, - 2.30591, - 2.27764, - 2.30327, - 2.33003, - 2.32329, - 2.32525, - 2.28749, - 2.31093, - 2.32738, - 2.33409, - 2.31175, - 2.33567, - 2.31535, - 2.311, - 2.30972, - 2.33276, - 2.29739, - 2.32964, - 2.30207, - 2.27677, - 2.3503, - 2.33818, - 2.33365, - 2.28167, - 2.31607, - 2.30898, - 2.32936, - 2.3051, - 2.30535, - 2.29316, - 2.30575, - 2.32814, - 2.29362, - 2.25537, - 2.25836, - 2.34003, - 2.35558, - 2.31729, - 2.32946, - 2.33906, - 2.32978, - 2.33966, - 2.33326, - 2.29669, - 2.29924, - 2.32072, - 2.35547, - 2.3035, - 2.29738, - 2.24206, - 2.33233, - 2.33684, - 2.32312, - 2.28649, - 2.27303, - 2.33374, - 2.3125, - 2.34015, - 2.3112, - 2.3141, - 2.31768, - 2.28583, - 2.31022, - 2.26557, - 2.32764, - 2.26705, - 2.28732, - 2.35371, - 2.2953, - 2.31997, - 2.30031, - 2.31895, - 2.33904, - 2.36762, - 2.34275, - 2.30489, - 2.31493, - 2.32912, - 2.291, - 2.29867, - 2.29168, - 2.29001, - 2.24825, - 2.30495, - 2.29858, - 2.31002, - 2.3044, - 2.28227, - 2.31635, - 2.30022, - 2.31452, - 2.29895, - 2.3311, - 2.31911, - 2.30548, - 2.23997, - 2.3353, - 2.36311, - 2.27473, - 2.2722, - 2.29061, - 2.3044, - 2.32973, - 2.26708, - 2.31933, - 2.33451, - 2.3549, - 2.26994, - 2.32027, - 2.28571, - 2.3195, - 2.27086, - 2.28465, - 2.29026, - 2.31531, - 2.32206, - 2.30039, - 2.33538, - 2.27727, - 2.30024, - 2.31034, - 2.2913, - 2.33377, - 2.3245, - 2.28124, - 2.3192, - 2.36317, - 2.30549, - 2.33118, - 2.32956, - 2.29643, - 2.33456, - 2.29492, - 2.27967, - 2.32514, - 2.26525, - 2.34146, - 2.31721, - 2.3095, - 2.31842, - 2.27477, - 2.36543, - 2.30209, - 2.33102, - 2.29281, - 2.30537, - 2.30877, - 2.28741, - 2.31256, - 2.27592, - 2.33802, - 2.29691, - 2.33722, - 2.28763, - 2.27307, - 2.28154, - 2.26603, - 2.33762, - 2.32565, - 2.26349, - 2.31934, - 2.30015, - 2.30581, - 2.32179, - 2.29746, - 2.31545, - 2.27709, - 2.29831, - 2.32369, - 2.32282, - 2.29007, - 2.26772, - 2.27034, - 2.31313, - 2.27646, - 2.27135, - 2.2711, - 2.31532, - 2.26508, - 2.33919, - 2.31847, - 2.28195, - 2.30779, - 2.24485, - 2.32588, - 2.31598, - 2.28815, - 2.28607, - 2.30007, - 2.30106, - 2.2734, - 2.24112, - 2.2586, - 2.31028, - 2.28471, - 2.32799, - 2.31743, - 2.2891, - 2.2722, - 2.26724, - 2.33275, - 2.27824, - 2.28047, - 2.27328, - 2.25161, - 2.34134, - 2.31941, - 2.27379, - 2.278, - 2.30143, - 2.27707, - 2.28433, - 2.31914, - 2.27659, - 2.28272, - 2.29019, - 2.29962, - 2.29996, - 2.32479, - 2.2974, - 2.27877, - 2.27834, - 2.29428, - 2.30593, - 2.30184, - 2.31135, - 2.33953, - 2.22678, - 2.30668, - 2.24082, - 2.27051, - 2.31478, - 2.30401, - 2.26316, - 2.28387, - 2.25895, - 2.24659, - 2.25712, - 2.31148, - 2.21367, - 2.28321, - 2.26488, - 2.26945, - 2.26141, - 2.3179, - 2.309, - 2.27742, - 2.30301, - 2.28325, - 2.29617, - 2.25262, - 2.26874, - 2.27095, - 2.30893, - 2.27123, - 2.29399, - 2.29153, - 2.27741, - 2.27633, - 2.27156, - 2.26737, - 2.28168, - 2.30604, - 2.30977, - 2.24271, - 2.26894, - 2.26102, - 2.22229, - 2.25247, - 2.30878, - 2.27168, - 2.30424, - 2.28097, - 2.29077, - 2.25369, - 2.27975, - 2.22882, - 2.25941, - 2.32174, - 2.31329, - 2.29222, - 2.29252, - 2.31835, - 2.27207, - 2.27184, - 2.32122, - 2.26802, - 2.26493, - 2.29336, - 2.25048, - 2.28585, - 2.30154, - 2.32283, - 2.27142, - 2.2949, - 2.30116, - 2.29588, - 2.28977, - 2.28252, - 2.28442, - 2.27311, - 2.28592, - 2.25947, - 2.24684, - 2.23176, - 2.286, - 2.26311, - 2.24889, - 2.31326, - 2.26237, - 2.29902, - 2.31138, - 2.26962, - 2.25494, - 2.23909, - 2.29693, - 2.29296, - 2.30222, - 2.23661, - 2.23045, - 2.28157, - 2.30548, - 2.32873, - 2.27367, - 2.19852, - 2.28908, - 2.22143, - 2.31705, - 2.29283, - 2.26405, - 2.27247, - 2.22796, - 2.24569, - 2.27137, - 2.30207, - 2.27222, - 2.24397, - 2.25135, - 2.25066, - 2.2795, - 2.23164, - 2.30015, - 2.263, - 2.27733, - 2.27297, - 2.26413, - 2.24749, - 2.26877, - 2.27833, - 2.29671, - 2.32373, - 2.34461, - 2.27396, - 2.27066, - 2.32654, - 2.26566, - 2.27202, - 2.28009, - 2.29428, - 2.34702, - 2.21399, - 2.22244, - 2.28987, - 2.2678, - 2.30161, - 2.27397, - 2.25324, - 2.24715, - 2.26753, - 2.24871, - 2.28586, - 2.28708, - 2.20494, - 2.26623, - 2.2741, - 2.30765, - 2.28199, - 2.26124, - 2.21894, - 2.25519, - 2.24896, - 2.26031, - 2.22856, - 2.29874, - 2.2271, - 2.27081, - 2.22766, - 2.27599, - 2.25844, - 2.29885, - 2.2347, - 2.28497, - 2.31597, - 2.27505, - 2.23547, - 2.29681, - 2.24009, - 2.24159, - 2.25183, - 2.27174, - 2.27964, - 2.2845, - 2.2952, - 2.26439, - 2.23067, - 2.25705, - 2.2831, - 2.30329, - 2.22301, - 2.23729, - 2.27918, - 2.25807, - 2.26794, - 2.2421, - 2.2466, - 2.26048, - 2.21555, - 2.3154, - 2.25099, - 2.24706, - 2.31945, - 2.2796, - 2.25629, - 2.31402, - 2.26547, - 2.27183, - 2.24525, - 2.25277, - 2.30176, - 2.20707, - 2.22433, - 2.22723, - 2.25621, - 2.25819, - 2.30353, - 2.2426, - 2.26048, - 2.20818, - 2.34739, - 2.29828, - 2.2285, - 2.24406, - 2.25237, - 2.25692, - 2.30262, - 2.26141, - 2.24704, - 2.22083, - 2.23604, - 2.2809, - 2.21527, - 2.23686, - 2.28301, - 2.28014, - 2.25412, - 2.29256, - 2.25096, - 2.22856, - 2.19706, - 2.24572, - 2.23912, - 2.28371, - 2.22828, - 2.26356, - 2.28211, - 2.28233, - 2.22137, - 2.26463, - 2.26212, - 2.2908, - 2.29192, - 2.31109, - 2.3013, - 2.25506, - 2.27361, - 2.28979, - 2.27712, - 2.28039, - 2.27155, - 2.27079, - 2.28127, - 2.22103, - 2.26647, - 2.30047, - 2.25897, - 2.23723, - 2.20951, - 2.22234, - 2.27251, - 2.26997, - 2.25904, - 2.26619, - 2.22155, - 2.24171, - 2.2541, - 2.29241, - 2.26703, - 2.28625, - 2.24318, - 2.24285, - 2.23389, - 2.25815, - 2.28947, - 2.26555, - 2.25154, - 2.2828, - 2.19781, - 2.2746, - 2.24191, - 2.24755, - 2.26066, - 2.30043, - 2.23375, - 2.28005, - 2.25571, - 2.25661, - 2.26161, - 2.2714, - 2.26885, - 2.30167, - 2.27867, - 2.22438, - 2.2331, - 2.27016, - 2.26315, - 2.23641, - 2.30983, - 2.2661, - 2.2989, - 2.24743, - 2.2647, - 2.25619, - 2.2609, - 2.28082, - 2.30966, - 2.26783, - 2.22843, - 2.23044, - 2.25996, - 2.23219, - 2.25266, - 2.25615, - 2.26885, - 2.273, - 2.26008, - 2.24419, - 2.22667, - 2.26038, - 2.24018, - 2.22072, - 2.2686, - 2.24281, - 2.25009, - 2.20681, - 2.23877, - 2.32055, - 2.22457, - 2.25065, - 2.24086, - 2.2145, - 2.21653, - 2.26435, - 2.27299, - 2.23922, - 2.28132, - 2.2703, - 2.277, - 2.25949, - 2.26024, - 2.26521, - 2.21293, - 2.25174, - 2.24268, - 2.22512, - 2.30825, - 2.27955, - 2.23685, - 2.24023, - 2.26787, - 2.24209, - 2.23372, - 2.27888, - 2.27049, - 2.25464, - 2.27517, - 2.21792, - 2.29258, - 2.27042, - 2.27142, - 2.26137, - 2.25661, - 2.21069, - 2.29061, - 2.26525, - 2.22938, - 2.23041, - 2.25913, - 2.25231, - 2.25351, - 2.25021, - 2.21251, - 2.19543, - 2.25193, - 2.22868, - 2.17977, - 2.28988, - 2.2263, - 2.23866, - 2.25927, - 2.20465, - 2.24969, - 2.2294, - 2.25592, - 2.25309, - 2.23502, - 2.20113, - 2.2426, - 2.23169, - 2.24738, - 2.22658, - 2.21879, - 2.21201, - 2.2637, - 2.27222, - 2.25559, - 2.24115, - 2.2294, - 2.27283, - 2.27579, - 2.20695, - 2.25348, - 2.25106, - 2.29619, - 2.24014, - 2.24642, - 2.24057, - 2.24666, - 2.23374, - 2.23241, - 2.25486, - 2.28059, - 2.24519, - 2.2445, - 2.23902, - 2.23049, - 2.26964, - 2.23568, - 2.27511, - 2.23997, - 2.28266, - 2.25762, - 2.24458, - 2.2207, - 2.23317, - 2.24448, - 2.24122, - 2.26386, - 2.24813, - 2.25642, - 2.26275, - 2.22676, - 2.25657, - 2.24688, - 2.2559, - 2.27123, - 2.27252, - 2.3105, - 2.22187, - 2.24516, - 2.2509, - 2.27687, - 2.21641, - 2.22104, - 2.23885, - 2.22289, - 2.24141, - 2.24335, - 2.22094, - 2.26742, - 2.21861, - 2.20891, - 2.2061, - 2.28183, - 2.24503, - 2.28091, - 2.22907, - 2.22878, - 2.28197, - 2.24617, - 2.23746, - 2.26137, - 2.26632, - 2.26075, - 2.24664, - 2.25997, - 2.27046, - 2.21454, - 2.24372, - 2.24965, - 2.21759, - 2.22405, - 2.20312, - 2.28102, - 2.2421, - 2.20396, - 2.20726, - 2.20819, - 2.23877, - 2.20466, - 2.26779, - 2.24921, - 2.23536, - 2.25159, - 2.23653, - 2.23253, - 2.24051, - 2.27492, - 2.21496, - 2.20726, - 2.26435, - 2.26531, - 2.22791, - 2.26591, - 2.18891, - 2.30193, - 2.24878, - 2.20736, - 2.23167, - 2.23327, - 2.19672, - 2.1943, - 2.20467, - 2.23222, - 2.25391, - 2.20702, - 2.21312, - 2.21716, - 2.24114, - 2.21358, - 2.23025, - 2.21369, - 2.26312, - 2.20486, - 2.19672, - 2.24469, - 2.19429, - 2.19666, - 2.24965, - 2.24365, - 2.26443, - 2.23697, - 2.28952, - 2.19175, - 2.23533, - 2.22425, - 2.26002, - 2.26293, - 2.25339, - 2.25575, - 2.21611, - 2.28037, - 2.19663, - 2.24342, - 2.24181, - 2.22055, - 2.23641, - 2.16185, - 2.27231, - 2.22533, - 2.20262, - 2.2042, - 2.2072, - 2.25298, - 2.22359, - 2.21866, - 2.23734, - 2.22935, - 2.24302, - 2.23509, - 2.26453, - 2.24443, - 2.20471, - 2.21579, - 2.27924, - 2.19698, - 2.29148, - 2.25224, - 2.1962, - 2.2656, - 2.22161, - 2.23362, - 2.23203, - 2.19204, - 2.24016, - 2.22655, - 2.22054, - 2.23323, - 2.22276, - 2.22851, - 2.19944, - 2.2511, - 2.2176, - 2.23201, - 2.23884, - 2.20434, - 2.21057, - 2.18305, - 2.21192, - 2.21541, - 2.24033, - 2.24525, - 2.17242, - 2.27383, - 2.20978, - 2.24201, - 2.22347, - 2.19631, - 2.23404, - 2.24319, - 2.18459, - 2.27573, - 2.22857, - 2.2158, - 2.23134, - 2.22049, - 2.26988, - 2.26421, - 2.19765, - 2.19646, - 2.23463, - 2.2113, - 2.2507, - 2.1872, - 2.23676, - 2.20931, - 2.24544, - 2.27864, - 2.20702, - 2.20036, - 2.17364, - 2.24238, - 2.23131, - 2.23186, - 2.25269, - 2.18756, - 2.23956, - 2.24208, - 2.22705, - 2.2445, - 2.24644, - 2.22745, - 2.21172, - 2.26562, - 2.21675, - 2.20704, - 2.21538, - 2.22449, - 2.24353, - 2.24164, - 2.23281, - 2.16963, - 2.23757, - 2.24092, - 2.22678, - 2.26761, - 2.20965, - 2.19952, - 2.20648, - 2.2957, - 2.24925, - 2.18888, - 2.19019, - 2.18239, - 2.21649, - 2.26061, - 2.22504, - 2.22334, - 2.22078, - 2.23979, - 2.23915, - 2.21966, - 2.20811, - 2.20911, - 2.2271, - 2.20099, - 2.21655, - 2.24889, - 2.21637, - 2.23056, - 2.20812, - 2.2769, - 2.25091, - 2.24396, - 2.20858, - 2.2084, - 2.25965, - 2.24494, - 2.24198, - 2.18277, - 2.22092, - 2.15779, - 2.25506, - 2.20356, - 2.22225, - 2.23111, - 2.20607, - 2.24196, - 2.26393, - 2.22827, - 2.172, - 2.2621, - 2.18329, - 2.25431, - 2.20124, - 2.19573, - 2.22409, - 2.24819, - 2.24108, - 2.23197, - 2.19632, - 2.18857, - 2.21233, - 2.23028, - 2.18295, - 2.19351, - 2.21518, - 2.22952, - 2.20828, - 2.21205, - 2.20824, - 2.2387, - 2.20393, - 2.23443, - 2.21199, - 2.25188, - 2.2562, - 2.2203, - 2.18899, - 2.21131, - 2.22809, - 2.22014, - 2.22407, - 2.21843, - 2.26856, - 2.18797, - 2.22494, - 2.23875, - 2.27295, - 2.23967, - 2.23981, - 2.18051, - 2.20797, - 2.19298, - 2.21851, - 2.22431, - 2.21201, - 2.19524, - 2.21444, - 2.22351, - 2.20566, - 2.23687, - 2.22342, - 2.21503, - 2.25832, - 2.22103, - 2.24585, - 2.17213, - 2.2287, - 2.22911, - 2.22208, - 2.22572, - 2.19645, - 2.2042, - 2.14498, - 2.2471, - 2.22748, - 2.23159, - 2.25433, - 2.19095, - 2.17744, - 2.22185, - 2.20914, - 2.24606, - 2.1812, - 2.24469, - 2.24636, - 2.2235, - 2.2379, - 2.21194, - 2.19506, - 2.21344, - 2.19904, - 2.24134, - 2.19789, - 2.21885, - 2.23527, - 2.2274, - 2.18237, - 2.19056, - 2.21468, - 2.21474, - 2.20981, - 2.22273, - 2.173, - 2.26311, - 2.24765, - 2.22107, - 2.18842, - 2.22802, - 2.17172, - 2.19625, - 2.20099, - 2.23226, - 2.205, - 2.16246, - 2.21725, - 2.24505, - 2.18956, - 2.18247, - 2.20926, - 2.21139, - 2.22716, - 2.23963, - 2.21784, - 2.25488, - 2.25087, - 2.22603, - 2.19324, - 2.17134, - 2.21469, - 2.24885, - 2.19814, - 2.23438, - 2.22379, - 2.18645, - 2.19048, - 2.26294, - 2.21659, - 2.2291, - 2.21383, - 2.20328, - 2.21457, - 2.16515, - 2.22091, - 2.21627, - 2.19729, - 2.23379, - 2.20164, - 2.22897, - 2.20838, - 2.22746, - 2.21223, - 2.20605, - 2.21004, - 2.20278, - 2.18889, - 2.21508, - 2.21088, - 2.21543, - 2.25657, - 2.21637, - 2.22832, - 2.21336, - 2.22711, - 2.2061, - 2.22568, - 2.23374, - 2.22531, - 2.20687, - 2.25749, - 2.24376, - 2.23437, - 2.15815, - 2.1908, - 2.18676, - 2.22369, - 2.19005, - 2.19435, - 2.2098, - 2.23888, - 2.21464, - 2.19578, - 2.20222, - 2.18432, - 2.18878, - 2.23715, - 2.19603, - 2.1787, - 2.21657, - 2.20199, - 2.19578, - 2.19258, - 2.22656, - 2.16703, - 2.22065, - 2.19388, - 2.20789, - 2.17001, - 2.21117, - 2.23408, - 2.18041, - 2.22712, - 2.19562, - 2.16716, - 2.21055, - 2.20713, - 2.1713, - 2.21497, - 2.19658, - 2.20757, - 2.20027, - 2.18994, - 2.21117, - 2.16733, - 2.2107, - 2.16034, - 2.18521, - 2.21242, - 2.19298, - 2.19285, - 2.18318, - 2.19937, - 2.25748, - 2.2242, - 2.24497, - 2.20767, - 2.2005, - 2.21122, - 2.21584, - 2.14569, - 2.20592, - 2.1879, - 2.21068, - 2.27923, - 2.18232, - 2.20699, - 2.24365, - 2.22019, - 2.22732, - 2.22696, - 2.19996, - 2.2076, - 2.1618, - 2.24236, - 2.21538, - 2.24597, - 2.1647, - 2.15413, - 2.2151, - 2.21547, - 2.19728, - 2.18719, - 2.18188, - 2.2145, - 2.26362, - 2.20403, - 2.20246, - 2.18506, - 2.19727, - 2.2175, - 2.24009, - 2.20184, - 2.18475, - 2.20479, - 2.18445, - 2.19447, - 2.19756, - 2.20463, - 2.16656, - 2.259, - 2.24037, - 2.21995, - 2.18527, - 2.18214, - 2.19891, - 2.20758, - 2.17869, - 2.18176, - 2.24069, - 2.20986, - 2.18334, - 2.23201, - 2.2231, - 2.21626, - 2.15789, - 2.20736, - 2.20452, - 2.1969, - 2.24178, - 2.19462, - 2.16635, - 2.20613, - 2.21965, - 2.19277, - 2.23078, - 2.22622, - 2.17316, - 2.19892, - 2.22889, - 2.13626, - 2.19802, - 2.21082, - 2.2211, - 2.20861, - 2.19092, - 2.19321, - 2.21281, - 2.19061, - 2.22331, - 2.21377, - 2.21097, - 2.22023, - 2.21364, - 2.21695, - 2.21525, - 2.20792, - 2.23189, - 2.17622, - 2.23871, - 2.21325, - 2.15775, - 2.22191, - 2.17794, - 2.19138, - 2.15929, - 2.1846, - 2.20952, - 2.24375, - 2.2376, - 2.19207, - 2.20191, - 2.15854, - 2.20346, - 2.18676, - 2.20789, - 2.20248, - 2.23652, - 2.22614, - 2.21133, - 2.1916, - 2.21076, - 2.19274, - 2.18646, - 2.16035, - 2.23142, - 2.20169, - 2.20634, - 2.16964, - 2.17719, - 2.22733, - 2.22773, - 2.1917, - 2.20324, - 2.20843, - 2.18351, - 2.28204, - 2.21039, - 2.20862, - 2.18473, - 2.18581, - 2.20056, - 2.21968, - 2.17868, - 2.21771, - 2.22493, - 2.24893, - 2.24074, - 2.22117, - 2.1812, - 2.21478, - 2.20271, - 2.21441, - 2.20156, - 2.18085, - 2.24194, - 2.17072, - 2.22654, - 2.18459, - 2.16064, - 2.2127, - 2.21268, - 2.2075, - 2.18771, - 2.2412, - 2.19567, - 2.23818, - 2.20639, - 2.17262, - 2.17941, - 2.18159, - 2.1532, - 2.19474, - 2.19922, - 2.16617, - 2.21663, - 2.15394, - 2.19594, - 2.20902, - 2.19627, - 2.15241, - 2.19928, - 2.16016, - 2.19956, - 2.24343, - 2.19729, - 2.15239, - 2.19926, - 2.16015, - 2.19952, - 2.24334, - 2.19734, - 2.16842, - 2.22048, - 2.17577, - 2.19094, - 2.17378, - 2.18015, - 2.17338, - 2.21369, - 2.17643, - 2.2176, - 2.16992, - 2.19244, - 2.22764, - 2.21336, - 2.14604, - 2.2221, - 2.2102, - 2.21349, - 2.18116, - 2.15912, - 2.21113, - 2.20936, - 2.19783, - 2.21537, - 2.19813, - 2.17213, - 2.19955, - 2.16916, - 2.17469, - 2.25863, - 2.16602, - 2.23827, - 2.22504, - 2.20831, - 2.19234, - 2.2084, - 2.18026, - 2.21383, - 2.15706, - 2.16266, - 2.18302, - 2.24512, - 2.1781, - 2.21879, - 2.1834, - 2.18299, - 2.14026, - 2.19335, - 2.21695, - 2.21689, - 2.19752, - 2.22457, - 2.15914, - 2.15213, - 2.21437, - 2.16924, - 2.21181, - 2.2019, - 2.20662, - 2.18745, - 2.18372, - 2.20772, - 2.16942, - 2.18976, - 2.21133, - 2.20043, - 2.22123, - 2.14495, - 2.19675, - 2.18768, - 2.17767, - 2.15831, - 2.18366, - 2.16631, - 2.1641, - 2.2107, - 2.17591, - 2.18002, - 2.19929, - 2.17186, - 2.18516, - 2.1805, - 2.1761, - 2.19196, - 2.27241, - 2.20002, - 2.2073, - 2.23544, - 2.26259, - 2.19286, - 2.19042, - 2.20764, - 2.14257, - 2.20939, - 2.22146, - 2.20637, - 2.19244, - 2.23398, - 2.19825, - 2.16565, - 2.16901, - 2.20003, - 2.19801, - 2.20519, - 2.16926, - 2.21995, - 2.16604, - 2.14999, - 2.22083, - 2.16442, - 2.18866, - 2.187, - 2.19109, - 2.17532, - 2.21806, - 2.18666, - 2.17899, - 2.17863, - 2.16642, - 2.20048, - 2.19494, - 2.17443, - 2.20327, - 2.19404, - 2.21443, - 2.14888, - 2.22845, - 2.21441, - 2.19559, - 2.18534, - 2.21377, - 2.1852, - 2.1314, - 2.17638, - 2.18514, - 2.12761, - 2.1935, - 2.18724, - 2.20804, - 2.20378, - 2.1871, - 2.18737, - 2.13451, - 2.17889, - 2.16364, - 2.22186, - 2.2131, - 2.17384, - 2.17538, - 2.18701, - 2.15132, - 2.21864, - 2.15574, - 2.17345, - 2.18948, - 2.17734, - 2.14107, - 2.16922, - 2.18955, - 2.17062, - 2.22445, - 2.22347, - 2.20846, - 2.16172, - 2.19281, - 2.22074, - 2.21853, - 2.2179, - 2.19498, - 2.16798, - 2.13389, - 2.15565, - 2.18191, - 2.18506, - 2.19379, - 2.1651, - 2.1597, - 2.17774, - 2.18309, - 2.18548, - 2.17875, - 2.1647, - 2.18344, - 2.1937, - 2.18061, - 2.24236, - 2.17225, - 2.16795, - 2.18216, - 2.17772, - 2.17197, - 2.20252, - 2.17159, - 2.18217, - 2.22712, - 2.18749, - 2.17006, - 2.18883, - 2.17821, - 2.20445, - 2.1517, - 2.21262, - 2.17422, - 2.19338, - 2.17166, - 2.16346, - 2.13421, - 2.21842, - 2.18567, - 2.1472, - 2.22321, - 2.18658, - 2.15171, - 2.1778, - 2.17479, - 2.18861, - 2.21819, - 2.20546, - 2.19571, - 2.20015, - 2.21495, - 2.19301, - 2.17685, - 2.21443, - 2.19095, - 2.19199, - 2.19132, - 2.17147, - 2.1467, - 2.1735, - 2.1527, - 2.17177, - 2.1733, - 2.17979, - 2.20872, - 2.19373, - 2.17966, - 2.18571, - 2.15685, - 2.16672, - 2.18822, - 2.24412, - 2.15758, - 2.15271, - 2.23147, - 2.17206, - 2.181, - 2.21899, - 2.20409, - 2.18629, - 2.17353, - 2.15818, - 2.21138, - 2.21197, - 2.17169, - 2.15749, - 2.17335, - 2.22805, - 2.16633, - 2.16424, - 2.16652, - 2.21848, - 2.19068, - 2.20309, - 2.21376, - 2.16991, - 2.1835, - 2.20526, - 2.166, - 2.17374, - 2.177, - 2.18478, - 2.16993, - 2.20882, - 2.13416, - 2.16707, - 2.15516, - 2.16373, - 2.20626, - 2.18509, - 2.15541, - 2.17454, - 2.19609, - 2.10769, - 2.16538, - 2.14836, - 2.17317, - 2.17682, - 2.18426, - 2.16881, - 2.17014, - 2.16452, - 2.16755, - 2.12889, - 2.17789, - 2.21524, - 2.17162, - 2.17213, - 2.19698, - 2.22117, - 2.19178, - 2.17581, - 2.19096, - 2.16373, - 2.11816, - 2.14627, - 2.18512, - 2.19521, - 2.19665, - 2.19628, - 2.18991, - 2.20444, - 2.16578, - 2.18633, - 2.15008, - 2.1641, - 2.19327, - 2.17938, - 2.16376, - 2.18979, - 2.14261, - 2.17485, - 2.15901, - 2.18961, - 2.16367, - 2.17294, - 2.18237, - 2.16375, - 2.17763, - 2.14412, - 2.23155, - 2.18071, - 2.17755, - 2.16625, - 2.14994, - 2.18536, - 2.1851, - 2.19508, - 2.19961, - 2.15979, - 2.18119, - 2.17653, - 2.18864, - 2.17955, - 2.21378, - 2.17088, - 2.20922, - 2.18446, - 2.19155, - 2.14343, - 2.14728, - 2.17404, - 2.17996, - 2.18006, - 2.1816, - 2.14984, - 2.16943, - 2.1921, - 2.19744, - 2.1525, - 2.21724, - 2.11438, - 2.17021, - 2.18621, - 2.18711, - 2.15281, - 2.20832, - 2.17414, - 2.16847, - 2.14683, - 2.19263, - 2.19615, - 2.16999, - 2.20088, - 2.18569, - 2.18355, - 2.17963, - 2.15445, - 2.15536, - 2.26344, - 2.15138, - 2.14383, - 2.19653, - 2.15733, - 2.17847, - 2.16653, - 2.14876, - 2.16023, - 2.18213, - 2.17377, - 2.20933, - 2.1799, - 2.16824, - 2.18085, - 2.15923, - 2.19493, - 2.19784, - 2.19531, - 2.17005, - 2.17337, - 2.15707, - 2.19014, - 2.18798, - 2.15813, - 2.15847, - 2.17383, - 2.18981, - 2.15524, - 2.15583, - 2.15085, - 2.12696, - 2.17162, - 2.18542, - 2.17662, - 2.15636, - 2.19926, - 2.16174, - 2.19083, - 2.13156, - 2.14885, - 2.18351, - 2.19694, - 2.15617, - 2.14488, - 2.14642, - 2.12363, - 2.14041, - 2.19571, - 2.19216, - 2.17894, - 2.20783, - 2.18743, - 2.18487, - 2.16926, - 2.11756, - 2.17457, - 2.18933, - 2.18984, - 2.19816, - 2.13683, - 2.19122, - 2.15497, - 2.1748, - 2.22715, - 2.18044, - 2.1534, - 2.14391, - 2.16126, - 2.18936, - 2.17912, - 2.18483, - 2.16115, - 2.15323, - 2.18309, - 2.23305, - 2.18876, - 2.17963, - 2.16238, - 2.17015, - 2.20679, - 2.17327, - 2.20301, - 2.16498, - 2.19734, - 2.1824, - 2.14627, - 2.14243, - 2.19251, - 2.21814, - 2.18329, - 2.20867, - 2.18759, - 2.19187, - 2.20729, - 2.2057, - 2.18725, - 2.1847, - 2.17537, - 2.16339, - 2.1786, - 2.17951, - 2.17996, - 2.16891, - 2.17069, - 2.18127, - 2.19872, - 2.20472, - 2.15939, - 2.14811, - 2.17522, - 2.20313, - 2.17461, - 2.14452, - 2.16394, - 2.16964, - 2.15049, - 2.18439, - 2.16792, - 2.11975, - 2.14771, - 2.19557, - 2.20576, - 2.12044, - 2.1549, - 2.15546, - 2.14708, - 2.14473, - 2.14109, - 2.171, - 2.12942, - 2.17106, - 2.10015, - 2.27051, - 2.17798, - 2.19201, - 2.18754, - 2.19809, - 2.18437, - 2.20419, - 2.16753, - 2.19971, - 2.17484, - 2.19263, - 2.20859, - 2.16484, - 2.19198, - 2.1779, - 2.15021, - 2.18804, - 2.16078, - 2.16841, - 2.15725, - 2.1613, - 2.14764, - 2.16085, - 2.16933, - 2.1966, - 2.14398, - 2.15847, - 2.17247, - 2.18909, - 2.15898, - 2.1478, - 2.17818, - 2.15456, - 2.17928, - 2.15588, - 2.18713, - 2.15734, - 2.1517, - 2.14255, - 2.18992, - 2.21926, - 2.22612, - 2.21743, - 2.19475, - 2.1801, - 2.15852, - 2.14612, - 2.21622, - 2.21616, - 2.16975, - 2.17048, - 2.16175, - 2.13239, - 2.15726, - 2.12556, - 2.17941, - 2.16216, - 2.14035, - 2.18469, - 2.1696, - 2.19059, - 2.14463, - 2.14517, - 2.15618, - 2.18068, - 2.18458, - 2.13348, - 2.18515, - 2.2014, - 2.15721, - 2.18946, - 2.21125, - 2.17046, - 2.20573, - 2.15866, - 2.20669, - 2.17205, - 2.16632, - 2.18938, - 2.16222, - 2.16632, - 2.19873, - 2.14604, - 2.19569, - 2.21645, - 2.21248, - 2.18156, - 2.14153, - 2.18355, - 2.17111, - 2.17867, - 2.13356, - 2.15927, - 2.12408, - 2.15861, - 2.18723, - 2.17267, - 2.18654, - 2.15728, - 2.15302, - 2.14231, - 2.12637, - 2.19394, - 2.15926, - 2.18104, - 2.19901, - 2.1902, - 2.18474, - 2.18173, - 2.16629, - 2.15979, - 2.18367, - 2.18037, - 2.20064, - 2.13752, - 2.18504, - 2.17159, - 2.1661, - 2.17655, - 2.15915, - 2.10873, - 2.17854, - 2.13846, - 2.17051, - 2.14174, - 2.12537, - 2.17608, - 2.16135, - 2.18615, - 2.09541, - 2.14057, - 2.18523, - 2.15555, - 2.15936, - 2.1318, - 2.16706, - 2.18395, - 2.16847, - 2.18098, - 2.14105, - 2.12816, - 2.14824, - 2.16294, - 2.19564, - 2.17697, - 2.1621, - 2.16185, - 2.13345, - 2.16218, - 2.16696, - 2.18757, - 2.153, - 2.16848, - 2.12694, - 2.1439, - 2.16917, - 2.14999, - 2.18294, - 2.1425, - 2.16657, - 2.16947, - 2.1431, - 2.18161, - 2.14911, - 2.18262, - 2.1797, - 2.16234, - 2.19183, - 2.1784, - 2.17465, - 2.19013, - 2.16067, - 2.19193, - 2.13367, - 2.20197, - 2.15076, - 2.17321, - 2.16784, - 2.12477, - 2.11399, - 2.17824, - 2.156, - 2.14096, - 2.18114, - 2.13447, - 2.16557, - 2.17357, - 2.20938, - 2.14777, - 2.18127, - 2.1744, - 2.19442, - 2.15363, - 2.16685, - 2.12111, - 2.18725, - 2.20475, - 2.12231, - 2.13934, - 2.17479, - 2.14848, - 2.14109, - 2.17038, - 2.19984, - 2.13387, - 2.167, - 2.15354, - 2.15302, - 2.18602, - 2.16062, - 2.14146, - 2.17027, - 2.14351, - 2.18497, - 2.16019, - 2.19006, - 2.1479, - 2.18671, - 2.13551, - 2.135, - 2.17669, - 2.14165, - 2.19581, - 2.12177, - 2.15406, - 2.16763, - 2.17618, - 2.181, - 2.17901, - 2.10328, - 2.14171, - 2.19008, - 2.12351, - 2.17358, - 2.17955, - 2.13902, - 2.18343, - 2.1763, - 2.13078, - 2.19134, - 2.12578, - 2.14905, - 2.14637, - 2.19027, - 2.25382, - 2.17345, - 2.17834, - 2.14327, - 2.12737, - 2.1608, - 2.1556, - 2.15124, - 2.15839, - 2.14512, - 2.19067, - 2.16934, - 2.16245, - 2.19191, - 2.16126, - 2.17952, - 2.17233, - 2.20475, - 2.15288, - 2.15615, - 2.15589, - 2.17093, - 2.17351, - 2.15767, - 2.1031, - 2.18355, - 2.21361, - 2.17387, - 2.18068, - 2.13022, - 2.16683, - 2.19119, - 2.2019, - 2.1415, - 2.14956, - 2.15678, - 2.1577, - 2.19968, - 2.19445, - 2.11721, - 2.14302, - 2.17216, - 2.1248, - 2.09752, - 2.17449, - 2.12292, - 2.14993, - 2.18809, - 2.14888, - 2.14015, - 2.16722, - 2.16813, - 2.20578, - 2.21819, - 2.13705, - 2.14802, - 2.16233, - 2.14961, - 2.15414, - 2.09723, - 2.18731, - 2.1363, - 2.14775, - 2.17624, - 2.1336, - 2.15152, - 2.14756, - 2.11907, - 2.20711, - 2.17921, - 2.19652, - 2.13845, - 2.11612, - 2.17092, - 2.13699, - 2.16441, - 2.1313, - 2.15736, - 2.11473, - 2.16612, - 2.2035, - 2.16649, - 2.16057, - 2.141, - 2.13255, - 2.14794, - 2.14774, - 2.14235, - 2.13635, - 2.16235, - 2.19152, - 2.15345, - 2.1511, - 2.08878, - 2.16734, - 2.20028, - 2.19222, - 2.14872, - 2.19182, - 2.15673, - 2.1572, - 2.18504, - 2.127, - 2.12302, - 2.11176, - 2.14987, - 2.08642, - 2.17168, - 2.14896, - 2.15704, - 2.13415, - 2.19367, - 2.18156, - 2.15787, - 2.13577, - 2.13732, - 2.15458, - 2.14696, - 2.13656, - 2.17765, - 2.15875, - 2.13939, - 2.13572, - 2.16372, - 2.14554, - 2.16876, - 2.1763, - 2.14148, - 2.13363, - 2.17448, - 2.14582, - 2.16399, - 2.17864, - 2.11704, - 2.18451, - 2.13791, - 2.09483, - 2.17485, - 2.171, - 2.16585, - 2.15641, - 2.11398, - 2.1933, - 2.16659, - 2.11705, - 2.18533, - 2.1376, - 2.14452, - 2.14798, - 2.10416, - 2.18204, - 2.15977, - 2.16837, - 2.15676, - 2.16268, - 2.15171, - 2.14989, - 2.14358, - 2.17646, - 2.15323, - 2.1435, - 2.11332, - 2.15491, - 2.11292, - 2.13509, - 2.18815, - 2.17583, - 2.15105, - 2.12616, - 2.16429, - 2.19165, - 2.13445, - 2.12668, - 2.14715, - 2.16051, - 2.17577, - 2.18437, - 2.12147, - 2.14173, - 2.19119, - 2.14259, - 2.16069, - 2.13931, - 2.13257, - 2.13368, - 2.17843, - 2.18003, - 2.15228, - 2.15841, - 2.18479, - 2.13727, - 2.16872, - 2.18235, - 2.18741, - 2.18707, - 2.20625, - 2.14712, - 2.17132, - 2.17173, - 2.14073, - 2.10116, - 2.20496, - 2.15772, - 2.19509, - 2.20366, - 2.11044, - 2.156, - 2.17841, - 2.1801, - 2.12048, - 2.18712, - 2.18221, - 2.15968, - 2.1459, - 2.1443, - 2.16884, - 2.107, - 2.18104, - 2.1166, - 2.10592, - 2.1412, - 2.13225, - 2.17143, - 2.13275, - 2.11507, - 2.13192, - 2.12221, - 2.17945, - 2.20474, - 2.17471, - 2.16931, - 2.13238, - 2.10923, - 2.14124, - 2.16795, - 2.18898, - 2.18312, - 2.09957, - 2.11802, - 2.16699, - 2.14606, - 2.16508, - 2.11333, - 2.17366, - 2.11857, - 2.14846, - 2.13323, - 2.16219, - 2.11718, - 2.13992, - 2.13892, - 2.1457, - 2.10234, - 2.13532, - 2.19414, - 2.15058, - 2.15193, - 2.15096, - 2.14659, - 2.14549, - 2.17342, - 2.14192, - 2.12625, - 2.11478, - 2.18829, - 2.16783, - 2.14319, - 2.13884, - 2.17131, - 2.18925, - 2.17489, - 2.18202, - 2.16298, - 2.1508, - 2.15014, - 2.12937, - 2.16168, - 2.1714, - 2.1668, - 2.13418, - 2.16065, - 2.21061, - 2.16126, - 2.11185, - 2.14461, - 2.17969, - 2.10698, - 2.09044, - 2.15758, - 2.15375, - 2.16383, - 2.13245, - 2.19047, - 2.1472, - 2.16643, - 2.16811, - 2.19967, - 2.1244, - 2.13006, - 2.14583, - 2.12804, - 2.16276, - 2.16689, - 2.14063, - 2.17279, - 2.12726, - 2.17034, - 2.11752, - 2.17501, - 2.1926, - 2.16911, - 2.09497, - 2.16066, - 2.19386, - 2.10672, - 2.147, - 2.11698, - 2.15454, - 2.17636, - 2.14325, - 2.13193, - 2.15237, - 2.12483, - 2.15946, - 2.14216, - 2.14877, - 2.09697, - 2.11371, - 2.13351, - 2.16581, - 2.16066, - 2.16743, - 2.13634, - 2.12924, - 2.14702, - 2.12892, - 2.1668, - 2.1522, - 2.16604, - 2.19061, - 2.11983, - 2.13366, - 2.10699, - 2.15441, - 2.1676, - 2.1694, - 2.12743, - 2.13471, - 2.18747, - 2.13023, - 2.19107, - 2.1321, - 2.14259, - 2.16956, - 2.19361, - 2.14398, - 2.11797, - 2.10863, - 2.14346, - 2.12159, - 2.19451, - 2.14807, - 2.13874, - 2.1516, - 2.10797, - 2.09939, - 2.12946, - 2.17435, - 2.11143, - 2.17784, - 2.14156, - 2.14533, - 2.17696, - 2.14203, - 2.15071, - 2.11011, - 2.16908, - 2.1706, - 2.16703, - 2.13855, - 2.16176, - 2.14157, - 2.17087, - 2.20186, - 2.10983, - 2.13922, - 2.19236, - 2.16432, - 2.1754, - 2.1656, - 2.17702, - 2.17027, - 2.14538, - 2.15832, - 2.13773, - 2.18334, - 2.17546, - 2.15989, - 2.13713, - 2.15447, - 2.10695, - 2.15466, - 2.11713, - 2.14668, - 2.13398, - 2.14844, - 2.16052, - 2.15726, - 2.17533, - 2.12558, - 2.12761, - 2.13157, - 2.10692, - 2.20562, - 2.12857, - 2.12588, - 2.1346, - 2.15945, - 2.1288, - 2.16761, - 2.14991, - 2.10526, - 2.17739, - 2.18675, - 2.20731, - 2.12029, - 2.1523, - 2.16777, - 2.12095, - 2.13545, - 2.16134, - 2.11709, - 2.11789, - 2.16944, - 2.12856, - 2.15495, - 2.1182, - 2.09788, - 2.14004, - 2.14291, - 2.16266, - 2.15156, - 2.0972, - 2.17693, - 2.15852, - 2.15903, - 2.10183, - 2.1416, - 2.11404, - 2.19407, - 2.11699, - 2.17899, - 2.14283, - 2.14344, - 2.15259, - 2.18662, - 2.18779, - 2.13915, - 2.12533, - 2.17327, - 2.15896, - 2.17776, - 2.13174, - 2.16252, - 2.1644, - 2.1793, - 2.10426, - 2.12368, - 2.12738, - 2.18203, - 2.10629, - 2.1689, - 2.17597, - 2.17203, - 2.10734, - 2.12659, - 2.16685, - 2.15431, - 2.14967, - 2.14079, - 2.1438, - 2.13513, - 2.18143, - 2.12313, - 2.15419, - 2.12765, - 2.164, - 2.16244, - 2.15503, - 2.16961, - 2.11907, - 2.13193, - 2.13485, - 2.14159, - 2.16923, - 2.13656, - 2.1314, - 2.14872, - 2.13233, - 2.10057, - 2.14367, - 2.16474, - 2.14571, - 2.13129, - 2.17073, - 2.14878, - 2.13761, - 2.12414, - 2.16312, - 2.12182, - 2.15251, - 2.16149, - 2.17208, - 2.14538, - 2.15571, - 2.12569, - 2.08976, - 2.14935, - 2.20761, - 2.17022, - 2.14493, - 2.13671, - 2.16371, - 2.13993, - 2.15544, - 2.14585, - 2.14978, - 2.0978, - 2.14243, - 2.14532, - 2.19018, - 2.09518, - 2.13939, - 2.12702, - 2.13127, - 2.12441, - 2.15245, - 2.09389, - 2.14901, - 2.13478, - 2.17157, - 2.15137, - 2.12996, - 2.10468, - 2.09343, - 2.14596, - 2.14001, - 2.1059, - 2.17019, - 2.12371, - 2.18654, - 2.11822, - 2.12322, - 2.13852, - 2.14918, - 2.11615, - 2.16195, - 2.13596, - 2.16663, - 2.11985, - 2.17567, - 2.15815, - 2.11397, - 2.10551, - 2.10105, - 2.13678, - 2.12597, - 2.143, - 2.11903, - 2.11374, - 2.13401, - 2.10533, - 2.19884, - 2.14265, - 2.15892, - 2.12189, - 2.1075, - 2.17377, - 2.11619, - 2.12564, - 2.14689, - 2.14838, - 2.15968, - 2.13385, - 2.17871, - 2.18743, - 2.11674, - 2.15358, - 2.13287, - 2.14467, - 2.14385, - 2.15097, - 2.12389, - 2.13063, - 2.15403, - 2.17818, - 2.1176, - 2.13839, - 2.09886, - 2.15505, - 2.13632, - 2.16768, - 2.13509, - 2.12509, - 2.11603, - 2.14385, - 2.09451, - 2.1456, - 2.1422, - 2.19208, - 2.12414, - 2.13025, - 2.12967, - 2.13282, - 2.11999, - 2.10608, - 2.09721, - 2.11294, - 2.14824, - 2.1077, - 2.17249, - 2.11254, - 2.13875, - 2.10992, - 2.14203, - 2.19748, - 2.17373, - 2.12571, - 2.15508, - 2.09296, - 2.15969, - 2.10727, - 2.16069, - 2.1281, - 2.15192, - 2.16759, - 2.17505, - 2.17871, - 2.12461, - 2.14144, - 2.14497, - 2.15439, - 2.15332, - 2.1599, - 2.16703, - 2.11559, - 2.15726, - 2.13004, - 2.09935, - 2.15864, - 2.13041, - 2.13299, - 2.16125, - 2.14967, - 2.16318, - 2.10817, - 2.133, - 2.14493, - 2.16514, - 2.12097, - 2.17644, - 2.15639, - 2.16246, - 2.18479, - 2.14845, - 2.10433, - 2.1395, - 2.11984, - 2.1692, - 2.09604, - 2.14929, - 2.12645, - 2.1407, - 2.15826, - 2.18878, - 2.07415, - 2.13586, - 2.11267, - 2.11688, - 2.16593, - 2.15135, - 2.14363, - 2.1358, - 2.13361, - 2.12986, - 2.13311, - 2.07136, - 2.11647, - 2.19506, - 2.14691, - 2.15606, - 2.10683, - 2.12736, - 2.13159, - 2.15623, - 2.16743, - 2.16151, - 2.11969, - 2.10611, - 2.10962, - 2.13044, - 2.17478, - 2.1448, - 2.12965, - 2.08623, - 2.13043, - 2.09283, - 2.16873, - 2.14139, - 2.1043, - 2.15255, - 2.15873, - 2.15032, - 2.13322, - 2.13143, - 2.16012, - 2.16421, - 2.09401, - 2.08427, - 2.10674, - 2.14381, - 2.11744, - 2.12551, - 2.11385, - 2.12282, - 2.1678, - 2.1262, - 2.0947, - 2.15236, - 2.16461, - 2.11428, - 2.14919, - 2.08848, - 2.13702, - 2.09586, - 2.1369, - 2.19728, - 2.11058, - 2.13479, - 2.14056, - 2.17871, - 2.11145, - 2.16839, - 2.15406, - 2.1731, - 2.12341, - 2.13816, - 2.15165, - 2.14093, - 2.16582, - 2.14207, - 2.13801, - 2.17713, - 2.15638, - 2.17091, - 2.16117, - 2.13487, - 2.16257, - 2.16206, - 2.19882, - 2.11888, - 2.10646, - 2.08643, - 2.16012, - 2.08846, - 2.09914, - 2.14465, - 2.10321, - 2.10914, - 2.12985, - 2.15083, - 2.13683, - 2.14648, - 2.17932, - 2.16821, - 2.13741, - 2.1201, - 2.10379, - 2.13683, - 2.16058, - 2.15999, - 2.13644, - 2.13412, - 2.09325, - 2.16394, - 2.09119, - 2.12577, - 2.11695, - 2.15944, - 2.15893, - 2.15669, - 2.13675, - 2.14947, - 2.19116, - 2.10843, - 2.14734, - 2.15731, - 2.12981, - 2.11599, - 2.11285, - 2.1318, - 2.132, - 2.14687, - 2.11874, - 2.1381, - 2.15827, - 2.19088, - 2.1165, - 2.14317, - 2.17349, - 2.14614, - 2.16461, - 2.12818, - 2.13753, - 2.10454, - 2.10475, - 2.16402, - 2.09478, - 2.1212, - 2.10195, - 2.1199, - 2.15636, - 2.12659, - 2.12693, - 2.09993, - 2.11189, - 2.1289, - 2.11812, - 2.13287, - 2.11231, - 2.14206, - 2.16843, - 2.13639, - 2.14425, - 2.09665, - 2.11477, - 2.10752, - 2.14236, - 2.14631, - 2.12025, - 2.13563, - 2.13685, - 2.13369, - 2.15586, - 2.10845, - 2.13446, - 2.16196, - 2.12616, - 2.16333, - 2.14753, - 2.11648, - 2.12531, - 2.15338, - 2.10907, - 2.11759, - 2.10461, - 2.07099, - 2.1288, - 2.16598, - 2.07058, - 2.11899, - 2.10584, - 2.11741, - 2.13033, - 2.1663, - 2.11573, - 2.1372, - 2.14031, - 2.15917, - 2.13693, - 2.16147, - 2.07929, - 2.14901, - 2.1409, - 2.16247, - 2.12957, - 2.14447, - 2.12736, - 2.15479, - 2.13856, - 2.10616, - 2.15782, - 2.14136, - 2.10211, - 2.15777, - 2.14765, - 2.11804, - 2.0819, - 2.092, - 2.12426, - 2.10807, - 2.1149, - 2.14078, - 2.18298, - 2.1223, - 2.10649, - 2.14487, - 2.08981, - 2.13699, - 2.16398, - 2.09739, - 2.11924, - 2.16895, - 2.11007, - 2.12884, - 2.09463, - 2.11184, - 2.11767, - 2.13542, - 2.10656, - 2.13339, - 2.1366, - 2.14579, - 2.09656, - 2.09435, - 2.07356, - 2.11332, - 2.15238, - 2.15207, - 2.12598, - 2.12335, - 2.1421, - 2.15679, - 2.12453, - 2.13526, - 2.14133, - 2.10196, - 2.14753, - 2.16914, - 2.13765, - 2.10407, - 2.1711, - 2.1303, - 2.13426, - 2.12031, - 2.1961, - 2.11324, - 2.11445, - 2.12486, - 2.1204, - 2.09879, - 2.11375, - 2.11677, - 2.14572, - 2.11955, - 2.11567, - 2.1003, - 2.13393, - 2.11633, - 2.17204, - 2.13136, - 2.13734, - 2.13796, - 2.16168, - 2.11231, - 2.09353, - 2.15149, - 2.13124, - 2.15622, - 2.13868, - 2.11608, - 2.11149, - 2.13024, - 2.13585, - 2.15504, - 2.12449, - 2.12367, - 2.1399, - 2.12866, - 2.11289, - 2.12934, - 2.14393, - 2.13566, - 2.14373, - 2.11753, - 2.10841, - 2.13074, - 2.12789, - 2.15526, - 2.11489, - 2.12104, - 2.13843, - 2.13777, - 2.12097, - 2.10244, - 2.17778, - 2.13605, - 2.12675, - 2.12159, - 2.13815, - 2.08907, - 2.13444, - 2.13577, - 2.10076, - 2.11821, - 2.10232, - 2.14453, - 2.17023, - 2.0337, - 2.11439, - 2.14401, - 2.13903, - 2.1518, - 2.12047, - 2.13882, - 2.099, - 2.15143, - 2.19799, - 2.12641, - 2.1025, - 2.09817, - 2.09579, - 2.13479, - 2.12495, - 2.15583, - 2.09657, - 2.12034, - 2.12975, - 2.15929, - 2.10809, - 2.13027, - 2.15783, - 2.10149, - 2.1334, - 2.17382, - 2.14305, - 2.12402, - 2.12527, - 2.12312, - 2.11042, - 2.12055, - 2.15865, - 2.10883, - 2.12948, - 2.10529, - 2.11077, - 2.1249, - 2.09475, - 2.12472, - 2.12687, - 2.12713, - 2.12256, - 2.11256, - 2.11841, - 2.14053, - 2.1064, - 2.11714, - 2.10714, - 2.15293, - 2.19692, - 2.14055, - 2.08169, - 2.13974, - 2.16855, - 2.09478, - 2.12631, - 2.14383, - 2.09277, - 2.13721, - 2.13032, - 2.14967, - 2.12394, - 2.17736, - 2.13786, - 2.12334, - 2.1533, - 2.12572, - 2.11051, - 2.17335, - 2.08796, - 2.16495, - 2.13117, - 2.12382, - 2.13507, - 2.04445, - 2.08573, - 2.16131, - 2.10625, - 2.12618, - 2.14758, - 2.11864, - 2.13185, - 2.11287, - 2.12533, - 2.13137, - 2.14742, - 2.09504, - 2.14279, - 2.10047, - 2.11993, - 2.11881, - 2.15383, - 2.13342, - 2.12715, - 2.11787, - 2.05652, - 2.13874, - 2.11141, - 2.09975, - 2.10952, - 2.09028, - 2.10495, - 2.08814, - 2.10335, - 2.09943, - 2.13021, - 2.17148, - 2.11765, - 2.17736, - 2.12111, - 2.11913, - 2.14293, - 2.09066, - 2.15396, - 2.16153, - 2.08881, - 2.13141, - 2.09804, - 2.15381, - 2.08805, - 2.13143, - 2.11033, - 2.14109, - 2.14728, - 2.1091, - 2.10329, - 2.11108, - 2.17749, - 2.13786, - 2.13742, - 2.12179, - 2.13358, - 2.14135, - 2.10708, - 2.13164, - 2.10376, - 2.09768, - 2.11786, - 2.10825, - 2.1197, - 2.14667, - 2.14201, - 2.18491, - 2.13168, - 2.07802, - 2.12686, - 2.13434, - 2.11713, - 2.13025, - 2.09278, - 2.11446, - 2.13802, - 2.12397, - 2.09113, - 2.13059, - 2.1282, - 2.11799, - 2.10972, - 2.11513, - 2.14225, - 2.11859, - 2.16514, - 2.08961, - 2.14516, - 2.12416, - 2.09814, - 2.11396, - 2.08971, - 2.11929, - 2.14696, - 2.09441, - 2.15763, - 2.12072, - 2.18128, - 2.12681, - 2.17585, - 2.11701, - 2.17835, - 2.10973, - 2.10133, - 2.11217, - 2.1711, - 2.10351, - 2.15197, - 2.14303, - 2.13709, - 2.12931, - 2.12122, - 2.14236, - 2.15559, - 2.12635, - 2.14091, - 2.16287, - 2.10875, - 2.14038, - 2.10369, - 2.13428, - 2.09718, - 2.1489, - 2.1227, - 2.12243, - 2.13812, - 2.14285, - 2.15294, - 2.09895, - 2.13794, - 2.11598, - 2.12054, - 2.14944, - 2.11722, - 2.09128, - 2.11423, - 2.12521, - 2.13723, - 2.16048, - 2.13869, - 2.11923, - 2.12547, - 2.09441, - 2.1185, - 2.09894, - 2.12675, - 2.12524, - 2.09801, - 2.14031, - 2.08554, - 2.10324, - 2.10534, - 2.14002, - 2.1316, - 2.13571, - 2.10256, - 2.08533, - 2.12025, - 2.10473, - 2.12501, - 2.1933, - 2.08989, - 2.12629, - 2.09351, - 2.09922, - 2.1404, - 2.09956, - 2.08689, - 2.11506, - 2.15424, - 2.16101, - 2.11189, - 2.12862, - 2.11177, - 2.10821, - 2.12846, - 2.11742, - 2.08781, - 2.13473, - 2.12221, - 2.15802, - 2.13391, - 2.09907, - 2.11351, - 2.09979, - 2.11353, - 2.15312, - 2.08958, - 2.10074, - 2.09865, - 2.14159, - 2.05822, - 2.11044, - 2.10347, - 2.10134, - 2.10349, - 2.13831, - 2.13878, - 2.10616, - 2.07396, - 2.12464, - 2.16997, - 2.09815, - 2.08547, - 2.16503, - 2.06907, - 2.10988, - 2.16151, - 2.1141, - 2.11294, - 2.09218, - 2.11275, - 2.11515, - 2.13305, - 2.11775, - 2.10267, - 2.1121, - 2.07591, - 2.1332, - 2.11559, - 2.10773, - 2.16294, - 2.10317, - 2.14781, - 2.1044, - 2.10788, - 2.12625, - 2.09901, - 2.17952, - 2.13967, - 2.17455, - 2.09002, - 2.11658, - 2.13498, - 2.14351, - 2.11181, - 2.11601, - 2.12249, - 2.16597, - 2.15764, - 2.1597, - 2.15078, - 2.13907, - 2.14725, - 2.14415, - 2.16097, - 2.10853, - 2.11451, - 2.09799, - 2.11377, - 2.10592, - 2.14911, - 2.1337, - 2.08712, - 2.08662, - 2.14033, - 2.10219, - 2.11061, - 2.15216, - 2.12996, - 2.13128, - 2.17102, - 2.10687, - 2.15353, - 2.12543, - 2.13553, - 2.10056, - 2.10464, - 2.13733, - 2.0902, - 2.11825, - 2.08609, - 2.09566, - 2.13765, - 2.07274, - 2.12641, - 2.11197, - 2.07709, - 2.118, - 2.10084, - 2.12198, - 2.08523, - 2.11117, - 2.1018, - 2.09848, - 2.12199, - 2.10204, - 2.13525, - 2.13304, - 2.12105, - 2.09973, - 2.12237, - 2.17302, - 2.1398, - 2.07602, - 2.09201, - 2.12109, - 2.18325, - 2.08152, - 2.10198, - 2.10918, - 2.13383, - 2.09263, - 2.13685, - 2.09968, - 2.13612, - 2.03047, - 2.15391, - 2.13358, - 2.10222, - 2.15451, - 2.15211, - 2.14633, - 2.08741, - 2.12117, - 2.07721, - 2.10413, - 2.08823, - 2.12938, - 2.11048, - 2.15263, - 2.13725, - 2.11799, - 2.13048, - 2.1067, - 2.11096, - 2.12536, - 2.07133, - 2.08747, - 2.13986, - 2.08873, - 2.09246, - 2.07017, - 2.14036, - 2.14424, - 2.11736, - 2.14807, - 2.16531, - 2.15071, - 2.16051, - 2.12, - 2.13679, - 2.09274, - 2.10173, - 2.12141, - 2.13333, - 2.14599, - 2.09426, - 2.11227, - 2.10872, - 2.12231, - 2.10324, - 2.15173, - 2.11666, - 2.11765, - 2.11968, - 2.11489, - 2.08386, - 2.13578, - 2.06377, - 2.16615, - 2.10211, - 2.14858, - 2.13675, - 2.14573, - 2.11208, - 2.14561, - 2.09079, - 2.15821, - 2.1238, - 2.12045, - 2.12735, - 2.13403, - 2.11798, - 2.11864, - 2.10731, - 2.1176, - 2.13106, - 2.1066, - 2.11646, - 2.08695, - 2.11385, - 2.11768, - 2.08169, - 2.10635, - 2.12933, - 2.12261, - 2.12714, - 2.13656, - 2.13486, - 2.13317, - 2.0787, - 2.09095, - 2.10864, - 2.11584, - 2.09483, - 2.11854, - 2.09834, - 2.1198, - 2.13201, - 2.10561, - 2.10857, - 2.12778, - 2.11358, - 2.08942, - 2.15128, - 2.13853, - 2.09613, - 2.16559, - 2.11753, - 2.11102, - 2.12098, - 2.10367, - 2.0972, - 2.1504, - 2.07743, - 2.14421, - 2.09319, - 2.09999, - 2.14038, - 2.09829, - 2.06088, - 2.11746, - 2.10754, - 2.15191, - 2.12793, - 2.12689, - 2.12444, - 2.1136, - 2.15682, - 2.18835, - 2.11507, - 2.10239, - 2.12042, - 2.12467, - 2.13243, - 2.10058, - 2.11116, - 2.09426, - 2.10201, - 2.14905, - 2.09256, - 2.12082, - 2.09389, - 2.10008, - 2.14122, - 2.06972, - 2.12729, - 2.10368, - 2.10274, - 2.16134, - 2.14008, - 2.07028, - 2.12761, - 2.11435, - 2.10445, - 2.10342, - 2.08907, - 2.09885, - 2.11214, - 2.10246, - 2.15113, - 2.16171, - 2.09088, - 2.10272, - 2.14088, - 2.09274, - 2.15749, - 2.0888, - 2.13651, - 2.12688, - 2.11257, - 2.099, - 2.06837, - 2.1057, - 2.10333, - 2.10685, - 2.1596, - 2.10119, - 2.10185, - 2.10856, - 2.12995, - 2.09983, - 2.11709, - 2.09944, - 2.1366, - 2.11599, - 2.07312, - 2.13018, - 2.12862, - 2.12638, - 2.0916, - 2.08332, - 2.12767, - 2.11948, - 2.14687, - 2.05501, - 2.09528, - 2.122, - 2.13165, - 2.13842, - 2.136, - 2.12782, - 2.14612, - 2.10212, - 2.13352, - 2.09932, - 2.14526, - 2.11047, - 2.12999, - 2.09918, - 2.13857, - 2.13681, - 2.12591, - 2.09873, - 2.11258, - 2.09789, - 2.10837, - 2.09302, - 2.05611, - 2.11237, - 2.09868, - 2.13083, - 2.07146, - 2.11314, - 2.10693, - 2.10226, - 2.16095, - 2.12994, - 2.12499, - 2.10417, - 2.09787, - 2.14465, - 2.07466, - 2.12115, - 2.11671, - 2.14006, - 2.13841, - 2.15919, - 2.10292, - 2.15698, - 2.12656, - 2.10877, - 2.1537, - 2.15074, - 2.10501, - 2.12851, - 2.06822, - 2.11096, - 2.09334, - 2.14231, - 2.1149, - 2.10343, - 2.13568, - 2.10919, - 2.06212, - 2.14188, - 2.10983, - 2.14342, - 2.10149, - 2.10594, - 2.09393, - 2.12907, - 2.10547, - 2.14079, - 2.10112, - 2.1024, - 2.11135, - 2.13122, - 2.14234, - 2.13394, - 2.1343, - 2.11667, - 2.15002, - 2.07717, - 2.09863, - 2.10294, - 2.11124, - 2.13817, - 2.12715, - 2.10742, - 2.12945, - 2.07979, - 2.11329, - 2.10245, - 2.11476, - 2.10666, - 2.12662, - 2.09066, - 2.13525, - 2.15508, - 2.11572, - 2.09151, - 2.13588, - 2.12427, - 2.07667, - 2.10647, - 2.09852, - 2.12708, - 2.10559, - 2.09543, - 2.11798, - 2.10156, - 2.08074, - 2.16775, - 2.0821, - 2.11155, - 2.07267, - 2.11383, - 2.15074, - 2.12435, - 2.13439, - 2.13878, - 2.13466, - 2.10563, - 2.14833, - 2.13105, - 2.11144, - 2.10283, - 2.11132, - 2.16253, - 2.13083, - 2.12205, - 2.11975, - 2.14621, - 2.1179, - 2.11658, - 2.11814, - 2.12209, - 2.12992, - 2.14866, - 2.12431, - 2.07592, - 2.09754, - 2.11437, - 2.10174, - 2.1532, - 2.1097, - 2.09777, - 2.1132, - 2.12782, - 2.11668, - 2.10415, - 2.10071, - 2.07662, - 2.08775, - 2.11871, - 2.15896, - 2.14489, - 2.11918, - 2.09371, - 2.12675, - 2.13066, - 2.10031, - 2.08973, - 2.13965, - 2.12181, - 2.12068, - 2.0862, - 2.11716, - 2.13296, - 2.10429, - 2.10337, - 2.1663, - 2.12839, - 2.14981, - 2.09164, - 2.09305, - 2.08868, - 2.0809, - 2.11478, - 2.12271, - 2.14028, - 2.1456, - 2.08634, - 2.12598, - 2.16927, - 2.12709, - 2.07928, - 2.07875, - 2.10032, - 2.07097, - 2.12703, - 2.0748, - 2.15601, - 2.04427, - 2.15366, - 2.10555, - 2.16358, - 2.16841, - 2.11347, - 2.11532, - 2.14135, - 2.08267, - 2.14937, - 2.10843, - 2.06433, - 2.12438, - 2.06865, - 2.11036, - 2.10042, - 2.14013, - 2.1162, - 2.08568, - 2.09292, - 2.0854, - 2.16585, - 2.12376, - 2.11553, - 2.06899, - 2.10559, - 2.1145, - 2.09611, - 2.1624, - 2.1083, - 2.12812, - 2.14808, - 2.13212, - 2.06439, - 2.15418, - 2.11621, - 2.0956, - 2.10022, - 2.12325, - 2.12367, - 2.10142, - 2.14421, - 2.13841, - 2.07838, - 2.07186, - 2.12188, - 2.15406, - 2.14266, - 2.1229, - 2.11076, - 2.10514, - 2.0762, - 2.14684, - 2.13763, - 2.13527, - 2.05441, - 2.11823, - 2.09946, - 2.1464, - 2.11881, - 2.11644, - 2.15045, - 2.11092, - 2.09864, - 2.08114, - 2.13503, - 2.12081, - 2.15014, - 2.11874, - 2.10068, - 2.11017, - 2.1104, - 2.07771, - 2.13573, - 2.14541, - 2.13773, - 2.12585, - 2.07406, - 2.07394, - 2.11684, - 2.09787, - 2.10144, - 2.10216, - 2.14838, - 2.11385, - 2.13748, - 2.13107, - 2.11188, - 2.12136, - 2.10122, - 2.15393, - 2.10399, - 2.1372, - 2.11311, - 2.1312, - 2.09991, - 2.10515, - 2.09197, - 2.11815, - 2.12686, - 2.13439, - 2.13564, - 2.11732, - 2.13738, - 2.1037, - 2.1166, - 2.10967, - 2.11031, - 2.12079, - 2.08297, - 2.1031, - 2.08526, - 2.11682, - 2.09061, - 2.0816, - 2.10823, - 2.06917, - 2.10493, - 2.19266, - 2.06893, - 2.1334, - 2.15658, - 2.13214, - 2.13136, - 2.1256, - 2.13736, - 2.10044, - 2.08031, - 2.14049, - 2.10938, - 2.12393, - 2.13127, - 2.09463, - 2.11427, - 2.12542, - 2.14941, - 2.13633, - 2.0972, - 2.11632, - 2.10902, - 2.09105, - 2.07251, - 2.11304, - 2.04841, - 2.10883, - 2.07946, - 2.07144, - 2.12564, - 2.12779, - 2.08207, - 2.12264, - 2.03334, - 2.08839, - 2.13933, - 2.13504, - 2.12715, - 2.07327, - 2.08083, - 2.10245, - 2.11919, - 2.1179, - 2.11169, - 2.10775, - 2.09161, - 2.12922, - 2.14466, - 2.1176, - 2.10895, - 2.12638, - 2.1217, - 2.1236, - 2.062, - 2.11499, - 2.11532, - 2.11533, - 2.12165, - 2.05903, - 2.05048, - 2.11155, - 2.08588, - 2.14275, - 2.14686, - 2.08855, - 2.08491, - 2.11618, - 2.12594, - 2.12694, - 2.0507, - 2.06586, - 2.07829, - 2.0957, - 2.10548, - 2.10286, - 2.08992, - 2.06176, - 2.16347, - 2.10563, - 2.12687, - 2.09314, - 2.10999, - 2.16416, - 2.1525, - 2.14271, - 2.09874, - 2.11999, - 2.08824, - 2.12786, - 2.10107, - 2.13507, - 2.0694, - 2.05255, - 2.1406, - 2.0938, - 2.08902, - 2.08339, - 2.09782, - 2.1093, - 2.1057, - 2.1015, - 2.09923, - 2.08497, - 2.10736, - 2.09418, - 2.05813, - 2.1128, - 2.12381, - 2.10771, - 2.14169, - 2.08912, - 2.09353, - 2.11167, - 2.10226, - 2.10304, - 2.15715, - 2.06084, - 2.09316, - 2.04001, - 2.14578, - 2.13184, - 2.14647, - 2.08318, - 2.1242, - 2.10819, - 2.09615, - 2.12652, - 2.1688, - 2.09062, - 2.10937, - 2.1056, - 2.12596, - 2.10903, - 2.08865, - 2.09684, - 2.0953, - 2.10568, - 2.08781, - 2.09239, - 2.0882, - 2.13025, - 2.08914, - 2.0843, - 2.10737, - 2.08174, - 2.09075, - 2.12883, - 2.10422, - 2.09078, - 2.09076, - 2.10793, - 2.15559, - 2.12571, - 2.0969, - 2.10006, - 2.06794, - 2.10081, - 2.10797, - 2.08278, - 2.08529, - 2.09632, - 2.12571, - 2.10009, - 2.09381, - 2.11587, - 2.0916, - 2.06305, - 2.13881, - 2.08573, - 2.08954, - 2.12742, - 2.10051, - 2.11899, - 2.119, - 2.10857, - 2.0609, - 2.1132, - 2.1187, - 2.11131, - 2.11885, - 2.12773, - 2.10396, - 2.11555, - 2.12243, - 2.13098, - 2.09087, - 2.1037, - 2.12126, - 2.1262, - 2.08191, - 2.10034, - 2.10169, - 2.08573, - 2.11542, - 2.11536, - 2.09658, - 2.10137, - 2.0822, - 2.1477, - 2.08404, - 2.08256, - 2.07026, - 2.11902, - 2.07066, - 2.13347, - 2.10546, - 2.08366, - 2.1391, - 2.06905, - 2.0822, - 2.06181, - 2.10263, - 2.09687, - 2.11236, - 2.06395, - 2.0989, - 2.11544, - 2.11754, - 2.09087, - 2.10556, - 2.11526, - 2.10532, - 2.11946, - 2.1017, - 2.12131, - 2.10685, - 2.09847, - 2.09136, - 2.13061, - 2.0925, - 2.11353, - 2.13076, - 2.09426, - 2.10268, - 2.11683, - 2.11117, - 2.09733, - 2.10809, - 2.10898, - 2.10014, - 2.08859, - 2.05355, - 2.08973, - 2.12353, - 2.11629, - 2.1302, - 2.10023, - 2.10594, - 2.08855, - 2.0856, - 2.1062, - 2.12423, - 2.09963, - 2.09202, - 2.05013, - 2.11092, - 2.08575, - 2.17081, - 2.14317, - 2.07335, - 2.08635, - 2.07546, - 2.16259, - 2.148, - 2.1365, - 2.10186, - 2.09534, - 2.10661, - 2.12105, - 2.07725, - 2.10682, - 2.08054, - 2.08816, - 2.11856, - 2.10141, - 2.12913, - 2.08397, - 2.10721, - 2.09556, - 2.12001, - 2.09538, - 2.11098, - 2.11675, - 2.09161, - 2.13679, - 2.07696, - 2.10134, - 2.10029, - 2.07851, - 2.10683, - 2.08231, - 2.11878, - 2.10359, - 2.09802, - 2.1655, - 2.17459, - 2.11559, - 2.05537, - 2.11955, - 2.08611, - 2.0985, - 2.10376, - 2.08761, - 2.12019, - 2.05312, - 2.09649, - 2.10215, - 2.07715, - 2.09539, - 2.11081, - 2.07505, - 2.09207, - 2.12478, - 2.0814, - 2.12825, - 2.09797, - 2.10614, - 2.0788, - 2.09873, - 2.11141, - 2.10013, - 2.10456, - 2.10275, - 2.12107, - 2.07007, - 2.11339, - 2.06818, - 2.09674, - 2.07993, - 2.1209, - 2.12027, - 2.11478, - 2.0946, - 2.12106, - 2.11344, - 2.0964, - 2.08432, - 2.17123, - 2.06489, - 2.06496, - 2.12209, - 2.08492, - 2.09291, - 2.11554, - 2.09089, - 2.13346, - 2.09253, - 2.09334, - 2.12004, - 2.12385, - 2.12791, - 2.12034, - 2.13092, - 2.14082, - 2.11062, - 2.09416, - 2.08322, - 2.10757, - 2.13516, - 2.1486, - 2.12679, - 2.14402, - 2.10016, - 2.10142, - 2.06724, - 2.12923, - 2.10272, - 2.10503, - 2.13334, - 2.11112, - 2.14127, - 2.12135, - 2.12854, - 2.09047, - 2.11605, - 2.09861, - 2.08075, - 2.09016, - 2.0851, - 2.12463, - 2.10433, - 2.12242, - 2.10118, - 2.13192, - 2.09297, - 2.07851, - 2.08258, - 2.11345, - 2.13759, - 2.09233, - 2.13678, - 2.10654, - 2.12496, - 2.06254, - 2.07418, - 2.08389, - 2.05478, - 2.1006, - 2.14225, - 2.09367, - 2.09963, - 2.08671, - 2.07201, - 2.13346, - 2.10889, - 2.08936, - 2.13049, - 2.08738, - 2.11575, - 2.10834, - 2.09693, - 2.16835, - 2.09483, - 2.09864, - 2.13117, - 2.12231, - 2.11713, - 2.10095, - 2.10958, - 2.1074, - 2.05837, - 2.07441, - 2.08849, - 2.08541, - 2.12236, - 2.11222, - 2.10835, - 2.1094, - 2.13227, - 2.07565, - 2.06678, - 2.09589, - 2.08653, - 2.07551, - 2.08663, - 2.06998, - 2.08961, - 2.11457, - 2.07528, - 2.11256, - 2.09992, - 2.08741, - 2.09757, - 2.12835, - 2.10383, - 2.12511, - 2.09195, - 2.09593, - 2.13512, - 2.09902, - 2.06434, - 2.08625, - 2.11179, - 2.10545, - 2.11185, - 2.09286, - 2.05862, - 2.0833, - 2.11229, - 2.09577, - 2.11248, - 2.07811, - 2.11289, - 2.04395, - 2.10967, - 2.09016, - 2.10445, - 2.13323, - 2.09937, - 2.0905, - 2.09134, - 2.11346, - 2.10284, - 2.10076, - 2.12552, - 2.10759, - 2.12309, - 2.11907, - 2.16316, - 2.09405, - 2.10661, - 2.10951, - 2.1044, - 2.09601, - 2.14319, - 2.13767, - 2.12855, - 2.15743, - 2.13383, - 2.0933, - 2.13527, - 2.12198, - 2.14071, - 2.12616, - 2.16645, - 2.12557, - 2.16896, - 2.15717, - 2.08972, - 2.15932, - 2.1134, - 2.12489, - 2.09882, - 2.15485, - 2.08909, - 2.10607, - 2.05191, - 2.11141, - 2.10934, - 2.10798, - 2.1033, - 2.08456, - 2.07636, - 2.07837, - 2.13496, - 2.09643, - 2.11455, - 2.10343, - 2.10321, - 2.09973, - 2.1121, - 2.10006, - 2.05961, - 2.10401, - 2.10049, - 2.14238, - 2.10851, - 2.09455, - 2.07084, - 2.09814, - 2.06783, - 2.0998, - 2.08823, - 2.14169, - 2.13139, - 2.06817, - 2.04504, - 2.08312, - 2.09165, - 2.10754, - 2.1246, - 2.13016, - 2.10119, - 2.11131, - 2.13605, - 2.11911, - 2.08954, - 2.10385, - 2.12509, - 2.092, - 2.09581, - 2.13514, - 2.09897, - 2.06428, - 2.08628, - 2.11177, - 2.10561, - 2.11216, - 2.09304, - 2.05879, - 2.08348, - 2.11267, - 2.0955, - 2.11276, - 2.07812, - 2.11317, - 2.04434, - 2.1098, - 2.09018, - 2.10443, - 2.13322, - 2.09939, - 2.09052, - 2.09134, - 2.11337, - 2.10292, - 2.1008, - 2.12559, - 2.10747, - 2.12321, - 2.11915, - 2.16266, - 2.09374, - 2.10667, - 2.10957, - 2.10416, - 2.09595, - 2.14307, - 2.1324, - 2.08768, - 2.1324, - 2.11586, - 2.08046, - 2.1134, - 2.10567, - 2.11588, - 2.10786, - 2.15328, - 2.1159, - 2.13031, - 2.11987, - 2.05435, - 2.13161, - 2.09307, - 2.10958, - 2.06581, - 2.12824, - 2.06724, - 2.09124, - 2.13078, - 2.12588, - 2.12134, - 2.10528, - 2.08407, - 2.11277, - 2.11056, - 2.08924, - 2.11989, - 2.07131, - 2.09351, - 2.09357, - 2.10894, - 2.11871, - 2.11277, - 2.08631, - 2.11436, - 2.14298, - 2.06895, - 2.09966, - 2.07538, - 2.09502, - 2.07037, - 2.13407, - 2.08811, - 2.09918, - 2.10239, - 2.14773, - 2.09637, - 2.09676, - 2.04734, - 2.07151, - 2.12237, - 2.07237, - 2.10426, - 2.14383, - 2.08661, - 2.12782, - 2.06748, - 2.0871, - 2.0999, - 2.08179, - 2.12103, - 2.10404, - 2.12417, - 2.06728, - 2.11108, - 2.10973, - 2.07025, - 2.08332, - 2.07144, - 2.11024, - 2.06834, - 2.10748, - 2.11418, - 2.12133, - 2.09432, - 2.10385, - 2.15316, - 2.09387, - 2.14333, - 2.09369, - 2.06787, - 2.09103, - 2.05213, - 2.15258, - 2.11999, - 2.09972, - 2.06161, - 2.13498, - 2.07523, - 2.08574, - 2.03125, - 2.09567, - 2.12747, - 2.14236, - 2.13313, - 2.06481, - 2.0936, - 2.13754, - 2.09769, - 2.07196, - 2.10742, - 2.141, - 2.08099, - 2.10648, - 2.14101, - 2.0656, - 2.07148, - 2.10422, - 2.12623, - 2.14751, - 2.08189, - 2.08156, - 2.12093, - 2.10611, - 2.08514, - 2.12521, - 2.13582, - 2.07225, - 2.09676, - 2.09669, - 2.08848, - 2.03674, - 2.0724, - 2.10142, - 2.11808, - 2.10209, - 2.11128, - 2.07591, - 2.12053, - 2.09825, - 2.10078, - 2.11936, - 2.07833, - 2.13521, - 2.11673, - 2.14116, - 2.099, - 2.09872, - 2.11647, - 2.09999, - 2.1321, - 2.09224, - 2.06726, - 2.08, - 2.10369, - 2.06814, - 2.1236, - 2.06975, - 2.10169, - 2.06154, - 2.09703, - 2.12044, - 2.08402, - 2.06741, - 2.12646, - 2.11801, - 2.13434, - 2.14057, - 2.10057, - 2.10402, - 2.11245, - 2.10053, - 2.10266, - 2.09836, - 2.07688, - 2.12974, - 2.0731, - 2.13473, - 2.08735, - 2.14243, - 2.07735, - 2.08035, - 2.1475, - 2.11681, - 2.09822, - 2.10717, - 2.11196, - 2.11311, - 2.08322, - 2.09443, - 2.11489, - 2.08463, - 2.09878, - 2.11821, - 2.09373, - 2.08053, - 2.10385, - 2.11338, - 2.11182, - 2.1359, - 2.08034, - 2.11564, - 2.11028, - 2.09547, - 2.10754, - 2.05115, - 2.12086, - 2.09529, - 2.09539, - 2.11435, - 2.06017, - 2.10198, - 2.10129, - 2.11379, - 2.10922, - 2.08196, - 2.08235, - 2.09316, - 2.09473, - 2.06074, - 2.09008, - 2.11558, - 2.06168, - 2.04899, - 2.13167, - 2.07514, - 2.0657, - 2.05858, - 2.13046, - 2.06957, - 2.08703, - 2.08972, - 2.10367, - 2.11116, - 2.12866, - 2.08427, - 2.09166, - 2.12225, - 2.06212, - 2.09346, - 2.10469, - 2.11802, - 2.0951, - 2.08621, - 2.089, - 2.10053, - 2.11112, - 2.12166, - 2.07351, - 2.07086, - 2.11991, - 2.08847, - 2.09969, - 2.08987, - 2.13822, - 2.09394, - 2.08502, - 2.09523, - 2.0664, - 2.09318, - 2.10795, - 2.15593, - 2.08014, - 2.12669, - 2.07, - 2.11125, - 2.09611, - 2.10782, - 2.10584, - 2.10432, - 2.11452, - 2.08957, - 2.1039, - 2.12054, - 2.12427, - 2.13049, - 2.10253, - 2.09089, - 2.06794, - 2.10768, - 2.08209, - 2.11417, - 2.08014, - 2.12132, - 2.09373, - 2.0605, - 2.08931, - 2.09021, - 2.11118, - 2.09853, - 2.08579, - 2.0702, - 2.12662, - 2.12348, - 2.13885, - 2.12671, - 2.05302, - 2.11984, - 2.07264, - 2.12689, - 2.03701, - 2.11099, - 2.08242, - 2.06807, - 2.09228, - 2.15375, - 2.10134, - 2.04924, - 2.08427, - 2.13279, - 2.11157, - 2.13081, - 2.09664, - 2.0798, - 2.15527, - 2.13708, - 2.07399, - 2.10856, - 2.09424, - 2.07676, - 2.12892, - 2.05308, - 2.08168, - 2.11769, - 2.05781, - 2.12467, - 2.08988, - 2.1375, - 2.09106, - 2.10885, - 2.06267, - 2.08971, - 2.09516, - 2.09701, - 2.06081, - 2.11809, - 2.11845, - 2.13437, - 2.06495, - 2.10327, - 2.05966, - 2.07574, - 2.06925, - 2.07874, - 2.09389, - 2.06341, - 2.07773, - 2.07421, - 2.11104, - 2.04235, - 2.09856, - 2.13038, - 2.10812, - 2.0618, - 2.10282, - 2.12047, - 2.1379, - 2.12604, - 2.09465, - 2.12027, - 2.05536, - 2.06585, - 2.07283, - 2.09314, - 2.08156, - 2.09773, - 2.09311, - 2.08832, - 2.08206, - 2.09767, - 2.12737, - 2.12048, - 2.09093, - 2.15471, - 2.00003, - 2.10537, - 2.06497, - 2.07986, - 2.07597, - 2.10255, - 2.07982, - 2.12385, - 2.10461, - 2.15121, - 2.10165, - 2.09726, - 2.1101, - 2.11545, - 2.09468, - 2.06628, - 2.12442, - 2.12598, - 2.07944, - 2.0538, - 2.11384, - 2.06292, - 2.10443, - 2.08688, - 2.11002, - 2.09943, - 2.08693, - 2.11298, - 2.02259, - 2.11681, - 2.12197, - 2.10672, - 2.08883, - 2.09375, - 2.09969, - 2.11866, - 2.11617, - 2.12659, - 2.07292, - 2.0781, - 2.10871, - 2.11787, - 2.09411, - 2.13548, - 2.11227, - 2.09332, - 2.11571, - 2.13785, - 2.06586, - 2.09005, - 2.04047, - 2.12497, - 2.11605, - 2.09245, - 2.05766, - 2.09222, - 2.09161, - 2.09476, - 2.07674, - 2.11504, - 2.12976, - 2.09222, - 2.1253, - 2.15186, - 2.09651, - 2.05625, - 2.08863, - 2.13027, - 2.08821, - 2.09687, - 2.09658, - 2.11429, - 2.08166, - 2.11065, - 2.10563, - 2.11231, - 2.12958, - 2.09018, - 2.11388, - 2.10017, - 2.11136, - 2.1114, - 2.1202, - 2.11537, - 2.12565, - 2.10027, - 2.10328, - 2.0766, - 2.11225, - 2.06139, - 2.04301, - 2.08991, - 2.08229, - 2.09654, - 2.10403, - 2.09937, - 2.08194, - 2.07951, - 2.12614, - 2.11067, - 2.08105, - 2.10351, - 2.05756, - 2.0708, - 2.12028, - 2.11107, - 2.06484, - 2.07546, - 2.06042, - 2.08996, - 2.08669, - 2.07811, - 2.08105, - 2.13315, - 2.09134, - 2.11837, - 2.11918, - 2.11397, - 2.10322, - 2.03457, - 2.09114, - 2.10641, - 2.08809, - 2.11127, - 2.0929, - 2.07461, - 2.13201, - 2.1, - 2.07983, - 2.05016, - 2.11926, - 2.09402, - 2.09424, - 2.0407, - 2.07725, - 2.13009, - 2.0863, - 2.08075, - 2.04933, - 2.11939, - 2.09537, - 2.11806, - 2.07563, - 2.0732, - 2.11964, - 2.1085, - 2.1678, - 2.10751, - 2.08208, - 2.0874, - 2.09751, - 2.02705, - 2.1027, - 2.10972, - 2.06049, - 2.08074, - 2.0693, - 2.10067, - 2.12153, - 2.09802, - 2.10666, - 2.08899, - 2.03996, - 2.13123, - 2.09047, - 2.08445, - 2.09419, - 2.07958, - 2.1101, - 2.12156, - 2.0984, - 2.06641, - 2.12267, - 2.07243, - 2.09189, - 2.08061, - 2.14167, - 2.13256, - 2.0944, - 2.08772, - 2.07841, - 2.1044, - 2.0728, - 2.10042, - 2.12066, - 2.08692, - 2.05475, - 2.07194, - 2.07746, - 2.09341, - 2.07412, - 2.11191, - 2.06382, - 2.1197, - 2.10776, - 2.11953, - 2.09591, - 2.13968, - 2.11585, - 2.1467, - 2.10557, - 2.10006, - 2.07337, - 2.0651, - 2.1098, - 2.11514, - 2.10837, - 2.08931, - 2.08453, - 2.1203, - 2.02606, - 2.09877, - 2.0765, - 2.1027, - 2.09517, - 2.07433, - 2.09534, - 2.11624, - 2.0879, - 2.07413, - 2.1031, - 2.09143, - 2.07034, - 2.0763, - 2.07013, - 2.07654, - 2.09725, - 2.08833, - 2.11137, - 2.0836, - 2.10489, - 2.10347, - 2.09001, - 2.03992, - 2.08092, - 2.10671, - 2.07911, - 2.08061, - 2.08642, - 2.08222, - 2.10061, - 2.08912, - 2.08715, - 2.09146, - 2.05037, - 2.08328, - 2.10473, - 2.12535, - 2.11547, - 2.13278, - 2.07959, - 2.03649, - 2.04683, - 2.08181, - 2.13441, - 2.09196, - 2.12319, - 2.0978, - 2.09405, - 2.07381, - 2.09497, - 2.1336, - 2.1476, - 2.10042, - 2.12433, - 2.08461, - 2.0586, - 2.11721, - 2.08698, - 2.10823, - 2.09564, - 2.12007, - 2.07142, - 2.09724, - 2.11452, - 2.11077, - 2.04676, - 2.07262, - 2.05052, - 2.04568, - 2.11771, - 2.05858, - 2.12589, - 2.11001, - 2.08672, - 2.10446, - 2.12478, - 2.06013, - 2.06934, - 2.08455, - 2.10222, - 2.11318, - 2.10892, - 2.09463, - 2.1009, - 2.07613, - 2.08639, - 2.11295, - 2.08638, - 2.05296, - 2.08926, - 2.04999, - 2.07934, - 2.08437, - 2.12289, - 2.06711, - 2.12135, - 2.06803, - 2.09185, - 2.11472, - 2.03603, - 2.07015, - 2.11787, - 2.07796, - 2.08919, - 2.0838, - 2.11849, - 2.10949, - 2.11639, - 2.08362, - 2.09219, - 2.10379, - 2.07892, - 2.13159, - 2.13565, - 2.13879, - 2.09135, - 2.09996, - 2.08503, - 2.11075, - 2.06709, - 2.08659, - 2.08976, - 2.12967, - 2.05811, - 2.08639, - 2.02437, - 2.08323, - 2.10559, - 2.09048, - 2.09136, - 2.03587, - 2.13308, - 2.06462, - 2.06395, - 2.07907, - 2.13731, - 2.12066, - 2.10337, - 2.09609, - 2.10533, - 2.09973, - 2.11423, - 2.04909, - 2.13439, - 2.09195, - 2.12315, - 2.09779, - 2.09418, - 2.07373, - 2.09508, - 2.13369, - 2.14796, - 2.10015, - 2.12438, - 2.08458, - 2.05884, - 2.1175, - 2.08747, - 2.10876, - 2.09519, - 2.12018, - 2.07168, - 2.09807, - 2.11454, - 2.11068, - 2.0472, - 2.07282, - 2.05064, - 2.04584, - 2.11857, - 2.05853, - 2.1256, - 2.11004, - 2.08697, - 2.10408, - 2.12443, - 2.06017, - 2.06937, - 2.08432, - 2.10238, - 2.11337, - 2.10874, - 2.0939, - 2.10093, - 2.0769, - 2.08623, - 2.11314, - 2.08608, - 2.05477, - 2.08955, - 2.0504, - 2.07974, - 2.08445, - 2.12293, - 2.06754, - 2.12157, - 2.0679, - 2.09183, - 2.11491, - 2.03558, - 2.06995, - 2.11809, - 2.07815, - 2.08901, - 2.08319, - 2.11867, - 2.10972, - 2.11619, - 2.08425, - 2.09194, - 2.10369, - 2.07944, - 2.13195, - 2.13616, - 2.13907, - 2.09137, - 2.10014, - 2.08522, - 2.11125, - 2.06722, - 2.08681, - 2.08979, - 2.12976, - 2.05845, - 2.08641, - 2.02469, - 2.08325, - 2.10554, - 2.0904, - 2.092, - 2.03593, - 2.13276, - 2.06471, - 2.06334, - 2.0786, - 2.13688, - 2.12118, - 2.1033, - 2.09583, - 2.10538, - 2.10035, - 2.1138, - 2.04889, - 2.04289, - 2.04691, - 2.09922, - 2.12097, - 2.13194, - 2.07754, - 2.0612, - 2.15512, - 2.07488, - 2.05054, - 2.09664, - 2.09831, - 2.06057, - 2.0965, - 2.06725, - 2.08369, - 2.09128, - 2.07436, - 2.08583, - 2.06845, - 2.0827, - 2.10783, - 2.10186, - 2.14613, - 2.09824, - 2.09723, - 2.10645, - 2.10689, - 2.08293, - 2.08173, - 2.0602, - 2.11949, - 2.09526, - 2.10137, - 2.08709, - 2.07324, - 2.06737, - 2.11184, - 2.0775, - 2.08746, - 2.08486, - 2.09847, - 2.11629, - 2.10249, - 2.05841, - 2.10626, - 2.05666, - 2.10754, - 2.06704, - 2.11023, - 2.08425, - 2.05884, - 2.06716, - 2.10135, - 2.08181, - 2.06685, - 2.0911, - 2.1347, - 2.07458, - 2.07549, - 2.07925, - 2.1053, - 2.07424, - 2.1146, - 2.11257, - 2.11152, - 2.09372, - 2.10031, - 2.13394, - 2.05025, - 2.07571, - 2.02393, - 2.08141, - 2.07007, - 2.10897, - 2.07025, - 2.05638, - 2.04464, - 2.07345, - 2.14502, - 2.08775, - 2.08409, - 2.10322, - 2.10695, - 2.07463, - 2.10133, - 2.09982, - 2.07712, - 2.07024, - 2.12441, - 2.09999, - 2.09197, - 2.09026, - 2.09286, - 2.11957, - 2.07738, - 2.05048, - 2.09967, - 2.06101, - 2.0905, - 2.08941, - 2.06632, - 2.13217, - 2.101, - 2.07864, - 2.07156, - 2.12795, - 2.10655, - 2.09343, - 2.0503, - 2.08784, - 2.07271, - 2.09959, - 2.09446, - 2.08776, - 2.03948, - 2.06637, - 2.10863, - 2.04401, - 2.08815, - 2.08574, - 2.08404, - 2.09443, - 2.08955, - 2.04146, - 2.05584, - 2.09305, - 2.08704, - 2.0587, - 2.02268, - 2.07957, - 2.06195, - 2.10838, - 2.1086, - 2.09949, - 2.11813, - 2.10691, - 2.07836, - 2.1, - 2.11768, - 2.15881, - 2.05739, - 2.05395, - 2.063, - 2.10729, - 2.09813, - 2.09254, - 2.09126, - 2.10648, - 2.12479, - 2.07773, - 2.09705, - 2.08614, - 2.0683, - 2.12441, - 2.05408, - 2.1024, - 2.08646, - 2.04864, - 2.08595, - 2.11069, - 2.12415, - 2.13584, - 2.05826, - 2.15183, - 2.08533, - 2.08579, - 2.10263, - 2.05604, - 2.09913, - 2.0426, - 2.09536, - 2.09949, - 2.12122, - 2.09356, - 2.09187, - 2.061, - 2.06944, - 2.08944, - 2.0963, - 2.12999, - 2.08213, - 2.04805, - 2.10029, - 2.07195, - 2.0886, - 2.10707, - 2.10623, - 2.10845, - 2.09652, - 2.13214, - 2.13584, - 2.10206, - 2.0829, - 2.09791, - 2.09588, - 2.13023, - 2.10339, - 2.09214, - 2.07051, - 2.12472, - 2.10342, - 2.10598, - 2.08505, - 2.08838, - 2.09039, - 2.11055, - 2.12397, - 2.06223, - 2.0918, - 2.09842, - 2.08748, - 2.08887, - 2.05685, - 2.08731, - 2.12245, - 2.05449, - 2.07866, - 2.11917, - 2.0922, - 2.06907, - 2.09925, - 2.07451, - 2.05826, - 2.08682, - 2.10202, - 2.08652, - 2.10335, - 2.09913, - 2.10716, - 2.09881, - 2.08714, - 2.1251, - 2.12328, - 2.09031, - 2.11961, - 2.0931, - 2.07796, - 2.15007, - 2.11835, - 2.05743, - 2.07616, - 2.06552, - 2.10627, - 2.09284, - 2.06918, - 2.0734, - 2.07621, - 2.06208, - 2.09916, - 2.0627, - 2.07966, - 2.08952, - 2.07785, - 2.12109, - 2.10251, - 2.02107, - 2.06974, - 2.05881, - 2.09446, - 2.09775, - 2.07788, - 2.08673, - 2.08469, - 2.04777, - 2.11251, - 2.10486, - 2.09493, - 2.09553, - 2.0723, - 2.13109, - 2.10334, - 2.08097, - 2.09396, - 2.12636, - 2.12286, - 2.07346, - 2.10427, - 2.08923, - 2.07212, - 2.12381, - 2.08856, - 2.08012, - 2.11567, - 2.10469, - 2.06984, - 2.08729, - 2.12328, - 2.08989, - 2.08642, - 2.08523, - 2.08854, - 2.085, - 2.04304, - 2.05886, - 2.09755, - 2.10323, - 2.10132, - 2.12043, - 2.06787, - 2.03554, - 2.0957, - 2.10313, - 2.05696, - 2.10489, - 2.05021, - 2.11158, - 2.12675, - 2.12208, - 2.0765, - 2.06034, - 2.07848, - 2.09132, - 2.07292, - 2.09782, - 2.11947, - 2.10653, - 2.12227, - 2.0748, - 2.06801, - 2.07298, - 2.05972, - 2.06571, - 2.06922, - 2.08372, - 2.10146, - 2.10018, - 2.07359, - 2.08328, - 2.10039, - 2.10386, - 2.11963, - 2.11858, - 2.0812, - 2.07245, - 2.06842, - 2.06073, - 2.11729, - 2.13842, - 2.13436, - 2.13398, - 2.04752, - 2.05488, - 2.09527, - 2.13393, - 2.11515, - 2.11088, - 2.09179, - 2.05163, - 2.07817, - 2.116, - 2.06634, - 2.05998, - 2.01873, - 2.07106, - 2.1448, - 2.07112, - 2.02371, - 2.06006, - 2.02195, - 2.08308, - 2.11839, - 2.10119, - 2.13485, - 2.12654, - 2.07129, - 2.13548, - 2.06165, - 2.07055, - 2.10295, - 2.08998, - 2.07216, - 2.05962, - 2.07752, - 2.06957, - 2.11763, - 2.11275, - 2.08079, - 2.08301, - 2.10635, - 2.06846, - 2.02151, - 2.11866, - 2.09562, - 2.10763, - 2.06944, - 2.06856, - 2.11621, - 2.1065, - 2.09911, - 2.05517, - 2.09748, - 2.08566, - 2.09452, - 2.10373, - 2.09792, - 2.07524, - 2.1093, - 2.06658, - 2.06717, - 2.09922, - 2.09453, - 2.08397, - 2.10798, - 2.12758, - 2.11995, - 2.1065, - 2.07729, - 2.10613, - 2.13148, - 2.11141, - 2.11728, - 2.07739, - 2.12254, - 2.07265, - 2.06665, - 2.09089, - 2.09769, - 2.06281, - 2.06896, - 2.11468, - 2.09628, - 2.08994, - 2.06794, - 2.10469, - 2.07076, - 2.08426, - 2.106, - 2.06419, - 2.07929, - 2.10119, - 2.0587, - 2.09376, - 2.09313, - 2.13314, - 2.12789, - 2.09447, - 2.04731, - 2.03974, - 2.03627, - 2.11309, - 2.08333, - 2.0584, - 2.11016, - 2.04904, - 2.09975, - 2.09743, - 2.07581, - 2.09565, - 2.088, - 2.07598, - 2.09794, - 2.06686, - 2.06295, - 2.12246, - 2.07078, - 2.11724, - 2.13111, - 2.1144, - 2.08208, - 2.10715, - 2.06639, - 2.04684, - 2.07558, - 2.13074, - 2.09625, - 2.10731, - 2.11323, - 2.05347, - 2.13191, - 2.07187, - 2.06746, - 2.12448, - 2.09149, - 2.08851, - 2.10077, - 2.03253, - 2.08439, - 2.10265, - 2.03517, - 2.07242, - 2.03287, - 2.09627, - 2.09448, - 2.05116, - 2.11545, - 2.06232, - 2.1289, - 2.07363, - 2.07365, - 2.05519, - 2.08325, - 2.07023, - 2.11855, - 2.1246, - 2.06969, - 2.02418, - 2.06376, - 2.07419, - 2.11971, - 2.09144, - 2.06944, - 2.05285, - 2.09272, - 2.06798, - 2.0879, - 2.07679, - 2.06037, - 2.04153, - 2.06114, - 2.07846, - 2.09302, - 2.09872, - 2.06204, - 2.09117, - 2.07405, - 2.06132, - 2.11032, - 2.12258, - 2.12476, - 2.10153, - 2.05844, - 2.09875, - 2.06078, - 2.09617, - 2.09009, - 2.05718, - 2.08136, - 2.09068, - 2.10408, - 2.0709, - 2.06394, - 2.10259, - 2.07684, - 2.01176, - 2.09628, - 2.0951, - 2.08657, - 2.06408, - 2.09429, - 2.0895, - 2.10804, - 2.13887, - 2.08537, - 2.08856, - 2.10149, - 2.10213, - 2.06974, - 2.10697, - 2.03775, - 2.12834, - 2.09157, - 2.08567, - 2.10145, - 2.08087, - 2.07896, - 2.08834, - 2.07865, - 2.09297, - 2.11197, - 2.10232, - 2.13835, - 2.10429, - 2.10778, - 2.06674, - 2.08575, - 2.09611, - 2.0998, - 2.08506, - 2.07854, - 2.06014, - 2.13055, - 2.11796, - 2.08149, - 2.10116, - 2.01822, - 2.09331, - 2.10711, - 2.08424, - 2.10424, - 2.14944, - 2.06657, - 2.07341, - 2.09647, - 2.09436, - 2.09904, - 2.07487, - 2.08358, - 2.11845, - 2.08397, - 2.09633, - 2.10993, - 2.10346, - 2.07718, - 2.10695, - 2.11706, - 2.04332, - 2.07797, - 2.08331, - 2.10631, - 2.09146, - 2.06963, - 2.05271, - 2.09263, - 2.06798, - 2.08777, - 2.07683, - 2.06034, - 2.04114, - 2.06142, - 2.07831, - 2.09312, - 2.09842, - 2.06201, - 2.09172, - 2.07431, - 2.06118, - 2.11033, - 2.12265, - 2.12487, - 2.10151, - 2.05839, - 2.09875, - 2.06046, - 2.09599, - 2.09009, - 2.05691, - 2.08128, - 2.09096, - 2.10413, - 2.07097, - 2.06415, - 2.10242, - 2.07668, - 2.01179, - 2.09641, - 2.09538, - 2.08651, - 2.06548, - 2.09417, - 2.08952, - 2.10764, - 2.13901, - 2.08488, - 2.08864, - 2.10134, - 2.10199, - 2.07005, - 2.10724, - 2.03783, - 2.12822, - 2.09169, - 2.08593, - 2.1016, - 2.08095, - 2.07898, - 2.08822, - 2.07875, - 2.09309, - 2.1117, - 2.10225, - 2.13878, - 2.10413, - 2.10775, - 2.06687, - 2.08583, - 2.0961, - 2.09915, - 2.08492, - 2.07844, - 2.05969, - 2.13047, - 2.11809, - 2.08153, - 2.10135, - 2.0182, - 2.09345, - 2.10656, - 2.08473, - 2.10427, - 2.14908, - 2.06661, - 2.07316, - 2.09662, - 2.09375, - 2.09916, - 2.07504, - 2.08343, - 2.11746, - 2.08373, - 2.09611, - 2.10981, - 2.10323, - 2.07728, - 2.10722, - 2.11695, - 2.04346, - 2.07806, - 2.08347, - 2.10663, - 2.08043, - 2.04505, - 2.1048, - 2.08303, - 2.07854, - 2.05536, - 2.11643, - 2.06591, - 2.10849, - 2.09725, - 2.08039, - 2.07709, - 2.12408, - 2.07253, - 2.08683, - 2.12794, - 2.09084, - 2.12566, - 2.07755, - 2.06987, - 2.07661, - 2.1023, - 2.09358, - 2.11616, - 2.05576, - 2.09122, - 2.09471, - 2.10692, - 2.0665, - 2.08946, - 2.08255, - 2.12395, - 2.12509, - 2.04766, - 2.07894, - 2.07597, - 2.10236, - 2.03503, - 2.06975, - 2.07148, - 2.05525, - 2.08864, - 2.09491, - 2.03657, - 2.09354, - 2.07463, - 2.09701, - 2.07202, - 2.06547, - 2.10918, - 2.12351, - 2.09561, - 2.09525, - 2.11662, - 2.09051, - 2.11144, - 2.07958, - 2.10655, - 2.03949, - 2.07171, - 2.09375, - 2.06162, - 2.10012, - 2.08185, - 2.07238, - 2.08966, - 2.11654, - 2.06334, - 2.0926, - 2.06076, - 2.07291, - 2.05788, - 2.06538, - 2.08936, - 2.1104, - 2.05993, - 2.06691, - 2.07988, - 2.12817, - 2.10208, - 2.07474, - 2.05988, - 2.0868, - 2.01628, - 2.14018, - 2.07299, - 2.03875, - 2.09557, - 2.10139, - 2.05867, - 2.05316, - 2.05812, - 2.0623, - 2.04358, - 2.09851, - 2.0675, - 2.05869, - 2.03702, - 2.08454, - 2.05864, - 2.09884, - 2.08665, - 2.07063, - 2.06642, - 2.0885, - 2.06934, - 2.06589, - 2.07052, - 2.10257, - 2.09019, - 2.11186, - 2.14445, - 2.03977, - 2.08416, - 2.08654, - 2.0924, - 2.11458, - 2.03922, - 2.1272, - 2.06544, - 2.05078, - 2.09775, - 2.08163, - 2.09138, - 2.05996, - 2.10267, - 2.08119, - 2.10443, - 2.07308, - 2.04093, - 2.08307, - 2.07903, - 2.10926, - 2.06683, - 2.08505, - 2.03746, - 2.10187, - 2.07522, - 2.09414, - 2.06713, - 2.0813, - 2.12283, - 2.07033, - 2.096, - 2.0552, - 2.08068, - 2.09601, - 2.12776, - 2.09016, - 2.10288, - 2.06026, - 2.07984, - 2.04847, - 2.08397, - 2.1003, - 2.10518, - 2.10366, - 2.08387, - 2.0902, - 2.04577, - 2.06658, - 2.07087, - 2.08707, - 2.08373, - 2.07321, - 2.07081, - 2.10632, - 2.10721, - 2.08504, - 2.10297, - 2.07605, - 2.1469, - 2.12291, - 2.05689, - 2.09461, - 2.08428, - 2.09265, - 2.07257, - 2.06616, - 2.07735, - 2.05198, - 2.07846, - 2.0764, - 2.04547, - 2.11645, - 2.10511, - 2.06025, - 2.1253, - 2.06085, - 2.07713, - 2.07634, - 2.06057, - 2.0578, - 2.11922, - 2.06137, - 2.07451, - 2.07419, - 2.07277, - 2.05022, - 2.08168, - 2.12137, - 2.12011, - 2.03465, - 2.08435, - 2.09123, - 2.12258, - 2.08346, - 2.07602, - 2.09872, - 2.09051, - 2.05632, - 2.087, - 2.06028, - 2.0466, - 2.06252, - 2.04798, - 2.10266, - 2.06713, - 2.1217, - 2.05497, - 2.07324, - 2.1148, - 2.09923, - 2.08689, - 2.07593, - 2.11822, - 2.0619, - 2.08733, - 2.098, - 2.09384, - 2.10911, - 2.05167, - 2.08098, - 2.09456, - 2.06901, - 2.07216, - 2.04075, - 2.06373, - 2.11588, - 2.09423, - 2.09993, - 2.06928, - 2.12473, - 2.05194, - 2.11029, - 2.06026, - 2.10506, - 2.0804, - 2.08087, - 2.05112, - 2.0843, - 2.10935, - 2.0985, - 2.06056, - 2.10068, - 2.05948, - 2.04805, - 2.12716, - 2.07627, - 2.07049, - 2.09788, - 2.07515, - 2.11238, - 2.09656, - 2.12371, - 2.07977, - 2.09153, - 2.10288, - 2.07111, - 2.07405, - 2.06376, - 2.06079, - 2.08842, - 2.11169, - 2.08552, - 2.08482, - 2.02204, - 2.0772, - 2.09601, - 2.05512, - 2.11255, - 2.10262, - 2.0636, - 2.06416, - 2.08982, - 2.11174, - 2.09312, - 2.13062, - 2.06198, - 2.06375, - 2.08542, - 2.07611, - 2.10387, - 2.09522, - 2.12607, - 2.08022, - 2.07528, - 2.06532, - 2.10365, - 2.08761, - 2.05663, - 2.06875, - 2.05836, - 2.08143, - 2.09483, - 2.05902, - 2.09163, - 2.10836, - 2.08567, - 2.05533, - 2.07711, - 2.12288, - 2.10423, - 2.06923, - 2.1203, - 2.10564, - 2.06994, - 2.12217, - 2.03497, - 2.07911, - 2.11873, - 2.08968, - 2.10346, - 2.09182, - 2.06833, - 2.03355, - 2.05659, - 2.06155, - 2.09926, - 2.05596, - 2.06278, - 2.11847, - 2.10373, - 2.08777, - 2.05289, - 2.12416, - 2.05798, - 2.06442, - 2.12758, - 2.11889, - 2.0416, - 2.08452, - 2.02277, - 2.07556, - 2.08256, - 2.02478, - 2.04719, - 2.11391, - 2.08714, - 2.06351, - 2.10666, - 2.06932, - 2.08329, - 2.06435, - 2.11976, - 2.11844, - 2.0873, - 2.05953, - 2.11118, - 2.08226, - 2.07769, - 2.08505, - 2.09095, - 2.05275, - 2.08866, - 2.09562, - 2.04215, - 2.05068, - 2.1001, - 2.05694, - 2.12675, - 2.0334, - 2.06717, - 2.08989, - 2.06923, - 2.09298, - 2.06426, - 2.0629, - 2.02511, - 2.07929, - 2.04437, - 2.08417, - 2.06064, - 2.09003, - 2.06628, - 2.06122, - 2.11097, - 2.09126, - 2.10252, - 2.06604, - 2.06349, - 2.07337, - 2.05215, - 2.08857, - 2.13949, - 2.06609, - 2.07581, - 2.12268, - 2.06477, - 2.09056, - 2.05787, - 2.00883, - 2.08707, - 2.09604, - 2.07625, - 2.09148, - 2.06991, - 2.11352, - 2.0438, - 2.08512, - 2.06766, - 2.05929, - 2.08035, - 2.11654, - 2.09132, - 2.11966, - 2.1159, - 2.07105, - 2.09959, - 2.09889, - 2.09091, - 2.08547, - 2.05556, - 2.08718, - 2.09751, - 2.09123, - 2.09681, - 2.06888, - 2.04773, - 2.08595, - 2.10319, - 2.0929, - 2.05359, - 2.08184, - 2.06045, - 2.12861, - 2.08992, - 2.08418, - 2.06194, - 2.11682, - 2.11539, - 2.05905, - 2.11134, - 2.05981, - 2.08274, - 2.06057, - 2.08552, - 2.05969, - 2.07935, - 2.10099, - 2.09862, - 2.0588, - 2.09788, - 2.09069, - 2.07122, - 2.12526, - 2.07846, - 2.12294, - 2.06142, - 2.09649, - 2.10652, - 2.06719, - 2.06306, - 2.08764, - 2.06519, - 2.07706, - 2.09012, - 2.06024, - 2.06515, - 2.06063, - 2.06292, - 2.12548, - 2.08961, - 2.12033, - 2.09931, - 2.06415, - 2.14557, - 2.03202, - 2.10872, - 2.053, - 2.09556, - 2.06367, - 2.05812, - 2.08683, - 2.0491, - 2.03682, - 2.08205, - 2.06524, - 2.06201, - 2.05524, - 2.09024, - 2.06554, - 2.09236, - 2.08219, - 2.08024, - 2.0805, - 2.07846, - 2.10037, - 2.05679, - 2.07127, - 2.08339, - 2.07768, - 2.07857, - 2.07662, - 2.07109, - 2.0986, - 2.04538, - 2.06269, - 2.08985, - 2.0621, - 2.08073, - 2.05557, - 2.12261, - 2.09842, - 2.07569, - 2.11347, - 2.08591, - 2.1163, - 2.02601, - 2.05824, - 2.00829, - 2.05696, - 2.0615, - 2.05655, - 2.06949, - 2.11406, - 2.1244, - 2.07441, - 2.05983, - 2.10407, - 2.07259, - 2.08, - 2.05796, - 2.09392, - 2.05073, - 2.12743, - 2.05912, - 2.08566, - 2.0682, - 2.05966, - 2.05903, - 2.04884, - 2.08183, - 2.11952, - 2.07953, - 2.08785, - 2.05368, - 2.03879, - 2.0548, - 2.06324, - 2.09984, - 2.06099, - 2.09321, - 2.08512, - 2.05445, - 2.0597, - 2.08637, - 2.05671, - 1.99227, - 2.04717, - 2.02678, - 2.03974, - 2.09651, - 2.08302, - 2.08366, - 2.07526, - 2.06673, - 2.0294, - 2.067, - 2.0567, - 2.06297, - 2.04506, - 2.11536, - 2.04981, - 2.05585, - 2.04318, - 2.04887, - 2.10711, - 2.07321, - 2.08547, - 2.09732, - 2.06317, - 2.07037, - 2.07334, - 2.07226, - 2.07104, - 2.03595, - 2.10362, - 2.02985, - 2.08893, - 2.08775, - 2.11041, - 2.07342, - 2.10319, - 2.07934, - 2.09242, - 2.08463, - 2.10033, - 2.07327, - 2.09963, - 2.06216, - 2.08503, - 2.10085, - 2.04542, - 2.09524, - 2.03729, - 2.08433, - 2.07364, - 2.06008, - 2.05635, - 2.06348, - 2.03741, - 2.04527, - 2.08118, - 2.02316, - 2.07548, - 2.06789, - 2.09955, - 2.07938, - 2.08133, - 2.09237, - 2.02361, - 2.06733, - 2.08178, - 2.0531, - 2.05742, - 2.10409, - 2.07953, - 2.03531, - 2.04234, - 2.05826, - 2.07766, - 2.03685, - 2.08491, - 2.05073, - 2.07777, - 2.06776, - 2.08128, - 2.0701, - 2.07449, - 2.12519, - 2.0408, - 2.09978, - 2.03957, - 2.10379, - 2.04729, - 2.10488, - 2.05869, - 2.07174, - 2.06904, - 2.09313, - 2.07434, - 2.05022, - 2.08851, - 2.05876, - 2.0425, - 2.10804, - 2.07809, - 2.09268, - 2.08669, - 2.1114, - 2.04435, - 2.05874, - 2.08143, - 2.0483, - 2.08565, - 2.09463, - 2.0664, - 2.08522, - 2.0932, - 2.108, - 2.05429, - 2.07244, - 2.11475, - 2.12878, - 2.10167, - 2.07024, - 2.03518, - 2.11433, - 2.08113, - 2.03473, - 2.05096, - 2.0971, - 2.04405, - 2.09277, - 2.06344, - 2.08085, - 2.0826, - 2.07086, - 2.06865, - 2.09876, - 2.07484, - 2.10361, - 2.10566, - 2.09083, - 2.06321, - 2.05549, - 2.12655, - 2.0783, - 2.09003, - 2.08244, - 2.06561, - 2.08722, - 2.08595, - 2.01068, - 2.04847, - 2.08158, - 2.10165, - 2.08706, - 2.04755, - 2.07976, - 2.03745, - 2.06788, - 2.0838, - 2.0957, - 2.05815, - 2.07837, - 2.04492, - 2.06233, - 2.06889, - 2.05383, - 2.04364, - 2.04696, - 2.08086, - 2.10603, - 2.07821, - 2.07552, - 2.07279, - 2.06644, - 2.05424, - 2.05115, - 2.06567, - 2.08855, - 2.07676, - 2.0535, - 2.03515, - 2.07661, - 2.08295, - 2.07087, - 2.12964, - 2.1083, - 2.07008, - 2.07236, - 2.08364, - 2.06902, - 2.07303, - 2.04524, - 2.04759, - 2.06112, - 2.07253, - 2.05656, - 2.07857, - 2.08133, - 2.09672, - 2.09143, - 2.08258, - 2.07353, - 2.10649, - 2.00744, - 2.10176, - 2.111, - 2.05974, - 2.05428, - 2.07754, - 2.06603, - 2.08125, - 2.11034, - 2.08609, - 2.03903, - 2.09737, - 2.10204, - 2.06438, - 2.0723, - 2.08264, - 2.03853, - 2.07443, - 2.0853, - 2.05132, - 2.06242, - 2.07401, - 2.06993, - 2.11031, - 2.08853, - 2.04626, - 2.09489, - 2.06417, - 2.07078, - 2.12536, - 2.06705, - 2.06293, - 2.057, - 2.06853, - 2.08192, - 2.11164, - 2.08612, - 2.05315, - 2.02937, - 2.11841, - 2.09766, - 2.01826, - 2.07782, - 2.03111, - 2.10365, - 2.06427, - 2.03151, - 2.13872, - 2.04938, - 2.09609, - 2.11322, - 2.07392, - 2.08912, - 2.07484, - 2.09911, - 2.08997, - 2.06037, - 2.06054, - 2.1092, - 2.06866, - 2.07059, - 2.05486, - 2.07062, - 2.11486, - 2.06138, - 2.08323, - 2.05476, - 2.0595, - 2.07122, - 2.06643, - 2.08598, - 2.04996, - 2.06984, - 2.07735, - 2.05319, - 2.10446, - 2.11218, - 2.12446, - 2.10195, - 2.09207, - 2.07045, - 2.09209, - 2.07994, - 2.03823, - 2.10558, - 2.05995, - 2.08283, - 2.04201, - 2.04279, - 2.05379, - 2.10799, - 2.05601, - 2.11753, - 2.10003, - 2.08922, - 2.03212, - 2.02351, - 2.08876, - 2.06804, - 2.1154, - 2.03402, - 2.04906, - 2.09092, - 2.08807, - 2.03694, - 2.06683, - 2.10941, - 2.07538, - 2.08424, - 2.03637, - 2.07526, - 2.0696, - 2.08612, - 2.09094, - 2.07163, - 2.07926, - 2.0436, - 2.04763, - 2.07245, - 2.07232, - 2.03811, - 2.03332, - 2.07774, - 2.081, - 2.11632, - 2.0517, - 2.04891, - 2.04275, - 2.08843, - 2.07145, - 2.09188, - 2.09834, - 2.07899, - 2.06966, - 2.09097, - 2.08361, - 2.09158, - 2.06205, - 2.0416, - 2.07187, - 2.08834, - 2.06646, - 2.05203, - 2.06597, - 2.10952, - 2.08278, - 2.03716, - 2.0806, - 2.02703, - 2.06257, - 2.10693, - 2.02978, - 2.07814, - 2.07698, - 2.07721, - 2.08516, - 2.09677, - 2.04944, - 2.09755, - 2.05212, - 2.09593, - 2.08961, - 2.06584, - 2.05998, - 2.11107, - 2.06061, - 2.07297, - 2.08069, - 2.0974, - 2.08085, - 2.08304, - 2.03449, - 2.05481, - 2.03087, - 2.0516, - 2.09421, - 2.09367, - 2.03753, - 2.08647, - 2.03627, - 2.08571, - 2.10527, - 2.08331, - 2.05384, - 2.04836, - 2.08465, - 2.04643, - 2.13185, - 2.05415, - 2.10417, - 2.06103, - 2.07331, - 2.08225, - 2.08421, - 2.07497, - 2.11551, - 2.1103, - 2.09086, - 2.06248, - 2.02085, - 2.07909, - 2.09713, - 2.10516, - 2.03844, - 2.02803, - 2.04845, - 2.03926, - 2.07185, - 2.09035, - 2.10247, - 2.08527, - 2.06027, - 2.08861, - 2.05728, - 2.06764, - 2.11167, - 2.04776, - 2.03874, - 2.0677, - 2.09069, - 2.06484, - 2.06663, - 2.06817, - 2.08222, - 2.07262, - 2.12079, - 2.06122, - 2.05905, - 2.03688, - 2.06852, - 2.11339, - 2.05377, - 2.0445, - 2.10575, - 2.1056, - 2.11083, - 2.06392, - 2.08807, - 2.03652, - 2.1092, - 2.10076, - 2.10486, - 2.06538, - 2.07225, - 2.08579, - 2.0326, - 2.05998, - 2.07024, - 2.07479, - 2.04807, - 2.0728, - 2.09785, - 2.05145, - 2.04431, - 2.11824, - 2.04312, - 2.03268, - 2.09024, - 2.03737, - 2.10626, - 2.12688, - 2.09582, - 2.06452, - 2.09179, - 2.08186, - 2.09928, - 2.06191, - 2.09476, - 2.01981, - 2.047, - 2.03228, - 2.00172, - 2.09233, - 2.07273, - 2.05614, - 2.08759, - 2.06359, - 2.08411, - 2.09002, - 2.07199, - 2.0966, - 2.0663, - 2.11224, - 2.07224, - 2.03215, - 2.0657, - 2.09718, - 2.08311, - 2.08796, - 2.09028, - 2.05719, - 2.09571, - 2.06604, - 2.07665, - 2.11751, - 2.05893, - 2.04589, - 2.05035, - 2.12615, - 2.08933, - 2.03781, - 2.03699, - 2.04465, - 2.09132, - 2.06001, - 2.02439, - 2.04713, - 2.08635, - 2.08251, - 2.05064, - 2.05604, - 2.03746, - 2.08633, - 2.04423, - 2.04517, - 2.10912, - 2.04242, - 2.04988, - 2.05275, - 2.02955, - 2.07594, - 2.03874, - 2.12035, - 2.04269, - 2.10422, - 2.1321, - 2.07987, - 2.0338, - 2.05583, - 2.02542, - 2.05657, - 2.05868, - 2.08488, - 2.03435, - 2.03493, - 2.11027, - 2.04879, - 2.07019, - 2.04808, - 2.04899, - 2.03533, - 2.09001, - 2.05763, - 2.06704, - 2.05423, - 2.0094, - 2.05476, - 2.06344, - 2.08255, - 2.05822, - 2.04538, - 2.07641, - 2.11605, - 2.06253, - 2.10053, - 2.0454, - 2.08173, - 2.0958, - 2.06008, - 2.04141, - 2.10506, - 2.06804, - 2.10793, - 2.1113, - 2.08151, - 2.04239, - 2.08228, - 2.03401, - 2.07153, - 2.09194, - 2.11955, - 2.05519, - 2.13479, - 2.08353, - 2.05744, - 2.04628, - 2.03103, - 2.04818, - 2.09127, - 2.07482, - 2.09692, - 2.08122, - 2.05804, - 2.09636, - 2.07358, - 2.07065, - 2.04836, - 2.06417, - 2.07228, - 2.09008, - 2.06119, - 2.08591, - 1.98737, - 2.07877, - 2.07344, - 2.06367, - 2.05838, - 2.0747, - 2.04492, - 2.09362, - 2.10211, - 2.06115, - 2.07565, - 2.03927, - 2.05576, - 2.1045, - 2.06089, - 2.07477, - 2.09973, - 2.10691, - 2.08703, - 2.08386, - 2.04263, - 2.07413, - 2.04991, - 2.05306, - 2.05785, - 2.09713, - 2.04, - 2.07001, - 2.06954, - 2.09927, - 2.04752, - 2.05949, - 2.05096, - 2.12425, - 2.06031, - 2.08131, - 2.06549, - 2.03506, - 2.05842, - 2.09037, - 2.05977, - 2.06899, - 2.04334, - 2.08199, - 2.03997, - 2.09957, - 2.07667, - 2.02675, - 2.0637, - 2.07252, - 2.09879, - 2.10545, - 2.02426, - 2.05537, - 2.04638, - 2.08495, - 2.09223, - 2.09918, - 2.04542, - 2.03041, - 2.11142, - 2.0758, - 2.02005, - 2.06528, - 2.09088, - 2.03257, - 2.09392, - 2.05435, - 2.10514, - 2.04785, - 2.07381, - 2.0773, - 2.06316, - 2.04501, - 2.07996, - 2.06552, - 2.11218, - 2.10057, - 2.06543, - 2.04405, - 2.02708, - 2.03475, - 2.07201, - 2.06388, - 2.09521, - 2.10629, - 2.05307, - 2.07467, - 2.07584, - 2.10318, - 2.09129, - 2.08565, - 2.11003, - 2.0314, - 2.05657, - 2.06752, - 2.10609, - 2.08033, - 2.08165, - 2.04454, - 2.07803, - 2.0591, - 2.1017, - 2.10863, - 2.07402, - 2.04595, - 2.08145, - 2.04032, - 2.06491, - 2.06006, - 2.07435, - 2.05599, - 2.08956, - 2.078, - 2.06495, - 2.06656, - 2.08641, - 2.08241, - 2.0823, - 2.08903, - 2.04061, - 2.06527, - 2.09438, - 2.08173, - 2.11144, - 2.08193, - 2.04989, - 2.05816, - 2.08623, - 2.09481, - 2.05844, - 2.04585, - 2.0281, - 2.04477, - 2.04074, - 2.07343, - 2.04321, - 2.07098, - 2.09753, - 2.09038, - 2.11503, - 2.06641, - 2.05276, - 2.09645, - 2.07398, - 2.08126, - 2.09451, - 2.0589, - 2.04451, - 2.05744, - 2.06871, - 2.07664, - 2.1098, - 2.04961, - 2.06867, - 2.05256, - 2.05141, - 2.06876, - 2.06913, - 2.09934, - 2.07355, - 2.08036, - 2.03735, - 2.1077, - 2.09777, - 2.11925, - 2.08052, - 2.09469, - 2.08265, - 2.05817, - 2.04492, - 2.06288, - 2.09642, - 2.08577, - 2.05511, - 2.04801, - 2.0758, - 2.04557, - 2.05793, - 2.02491, - 2.08815, - 2.10922, - 2.09084, - 2.05135, - 2.07325, - 2.04706, - 2.0154, - 2.02765, - 2.0913, - 2.06243, - 1.9949, - 2.04451, - 2.03504, - 2.10352, - 2.04774, - 2.07402, - 2.05802, - 2.01303, - 2.07871, - 2.09751, - 2.07597, - 2.06821, - 2.05218, - 2.10225, - 2.10491, - 2.05806, - 2.04556, - 2.14102, - 2.07449, - 2.08151, - 2.06749, - 2.03969, - 2.1059, - 2.06709, - 2.13775, - 2.07773, - 2.07881, - 2.09716, - 2.07145, - 2.04586, - 2.11348, - 2.04382, - 2.06848, - 2.06425, - 2.09541, - 2.05727, - 2.09571, - 2.09677, - 2.05239, - 2.05834, - 2.04982, - 2.06149, - 2.05031, - 2.0554, - 2.04473, - 2.1298, - 2.09963, - 2.0506, - 2.0853, - 2.08459, - 2.02537, - 2.07238, - 2.06157, - 2.09353, - 2.07341, - 2.07942, - 2.06609, - 2.05659, - 2.01597, - 2.05387, - 2.04831, - 2.11018, - 2.09594, - 2.05744, - 2.07539, - 2.07705, - 2.12184, - 2.06034, - 2.04273, - 2.00969, - 2.1075, - 2.09496, - 2.04663, - 2.08296, - 2.06888, - 2.05665, - 2.05057, - 2.07947, - 2.07115, - 2.09229, - 2.06313, - 2.07687, - 2.09609, - 2.08649, - 2.09809, - 2.08379, - 2.03045, - 2.08328, - 2.09646, - 2.11508, - 2.06418, - 2.08226, - 2.14535, - 2.0782, - 2.0672, - 2.08399, - 2.02413, - 2.06002, - 2.06956, - 2.06763, - 2.09652, - 2.02934, - 2.04722, - 2.05634, - 2.0643, - 2.05565, - 2.04201, - 2.04117, - 2.07521, - 2.06606, - 2.0917, - 2.07226, - 2.03138, - 2.04496, - 2.05672, - 2.05884, - 2.06376, - 2.03163, - 2.10323, - 2.06051, - 2.08882, - 2.05615, - 2.10374, - 2.0503, - 2.10046, - 2.07639, - 2.05222, - 2.04735, - 2.06247, - 2.04949, - 2.05873, - 2.06981, - 2.05954, - 2.0731, - 2.10982, - 2.04023, - 2.06787, - 2.03663, - 2.1172, - 2.0539, - 2.07288, - 2.08881, - 2.06794, - 2.04086, - 2.0744, - 2.04996, - 2.06058, - 2.09462, - 2.09685, - 2.09389, - 2.05206, - 2.0722, - 2.07621, - 2.05716, - 2.08468, - 2.09906, - 2.08742, - 2.0136, - 2.06123, - 2.0188, - 2.07659, - 2.10099, - 2.07016, - 2.09132, - 2.08453, - 2.07252, - 1.97667, - 2.04901, - 2.08879, - 2.08173, - 2.03213, - 2.07158, - 2.06173, - 2.07976, - 2.05656, - 2.02242, - 2.02673, - 2.04831, - 2.09884, - 2.09832, - 2.0495, - 2.08063, - 2.03231, - 2.09724, - 2.09128, - 2.03108, - 2.1062, - 2.07741, - 2.07042, - 2.02213, - 2.05987, - 2.03948, - 2.03855, - 2.10079, - 2.11157, - 2.03026, - 2.03894, - 2.05506, - 2.04623, - 2.10682, - 2.10896, - 2.06236, - 2.04543, - 2.07251, - 2.06593, - 2.06126, - 2.05703, - 2.03603, - 2.0266, - 2.05137, - 2.05257, - 2.11632, - 2.07882, - 2.11579, - 2.06083, - 2.12163, - 2.047, - 2.10293, - 2.07675, - 2.01206, - 2.07546, - 2.09803, - 2.06398, - 2.06775, - 2.07545, - 2.09841, - 2.04833, - 2.08732, - 2.07691, - 2.06115, - 2.02649, - 2.13323, - 2.02234, - 2.06283, - 2.08298, - 2.07213, - 2.09094, - 2.04938, - 2.07172, - 2.0698, - 2.07841, - 2.02131, - 2.08268, - 2.04224, - 2.0695, - 2.03673, - 2.04604, - 2.04904, - 2.08746, - 2.0491, - 2.05123, - 2.09723, - 2.08269, - 2.05124, - 2.07054, - 2.10118, - 2.08105, - 2.06108, - 2.0915, - 2.05991, - 2.05882, - 2.06397, - 2.03865, - 2.09982, - 2.06927, - 2.07037, - 2.03851, - 2.07727, - 2.08466, - 2.04756, - 2.0518, - 2.03833, - 2.04635, - 2.07881, - 2.04457, - 2.06897, - 2.07481, - 2.08105, - 2.05199, - 2.12006, - 2.0454, - 2.03682, - 2.07238, - 2.05344, - 2.09753, - 2.02979, - 2.07929, - 2.06087, - 2.04431, - 2.11623, - 2.04065, - 2.04942, - 2.05687, - 2.08458, - 2.08085, - 2.05046, - 2.08918, - 2.03928, - 2.05363, - 2.00712, - 2.0735, - 2.05258, - 2.05499, - 2.05847, - 2.0914, - 2.05494, - 2.08039, - 2.01086, - 2.09805, - 2.07575, - 2.10792, - 2.11025, - 2.06458, - 2.0273, - 2.05811, - 2.04642, - 2.09066, - 2.04924, - 2.06526, - 2.02682, - 2.04789, - 2.10452, - 2.01919, - 2.07131, - 2.07442, - 2.11376, - 2.06014, - 2.0615, - 2.11177, - 2.06651, - 2.04953, - 2.06775, - 2.0567, - 2.08066, - 2.05155, - 2.02535, - 2.08063, - 2.07325, - 2.09533, - 2.0943, - 2.03607, - 2.0792, - 2.08868, - 2.06284, - 2.07879, - 2.08687, - 2.07723, - 2.08824, - 2.07305, - 2.07188, - 2.06916, - 2.04886, - 2.05256, - 2.09059, - 2.10037, - 2.05897, - 2.05534, - 2.02594, - 2.063, - 2.09497, - 2.09092, - 2.07039, - 2.07083, - 2.0666, - 2.12682, - 2.09667, - 2.02766, - 2.07734, - 2.09582, - 2.10131, - 2.02342, - 2.0425, - 2.05154, - 2.06863, - 2.03837, - 2.0839, - 2.02418, - 2.0881, - 2.08475, - 2.02315, - 2.09048, - 2.06403, - 2.0433, - 2.04349, - 2.02662, - 2.09695, - 2.06178, - 2.07451, - 2.08244, - 2.06202, - 2.05895, - 2.06559, - 2.06002, - 2.04423, - 2.0658, - 2.07005, - 2.06321, - 2.04857, - 2.04002, - 2.04688, - 2.06172, - 2.10751, - 2.02393, - 1.99349, - 2.03704, - 2.01605, - 2.11855, - 2.10612, - 2.08396, - 2.04103, - 2.07212, - 2.06869, - 2.08831, - 2.06112, - 2.053, - 2.06579, - 2.04157, - 2.05572, - 2.01758, - 2.07438, - 2.04125, - 2.06797, - 2.068, - 2.03829, - 2.05513, - 2.0797, - 2.05015, - 2.0817, - 2.06168, - 2.0538, - 2.03781, - 2.07469, - 2.08785, - 2.09313, - 2.07224, - 2.05207, - 2.04484, - 2.07601, - 2.05114, - 2.07108, - 2.03635, - 2.05828, - 2.06879, - 2.06825, - 2.09608, - 2.02772, - 2.07735, - 2.07481, - 2.0561, - 2.10218, - 2.05183, - 2.05943, - 2.05363, - 2.02933, - 2.04582, - 2.07108, - 2.1126, - 2.09854, - 2.04744, - 2.0731, - 2.05374, - 2.04776, - 2.09109, - 2.08215, - 2.07233, - 2.07128, - 2.07266, - 2.06832, - 2.06511, - 2.08429, - 2.03042, - 2.0661, - 2.03241, - 2.02887, - 2.06301, - 2.07562, - 2.07054, - 2.02542, - 2.07439, - 2.05013, - 2.08904, - 2.06968, - 2.03345, - 2.04215, - 2.03525, - 2.04019, - 2.05763, - 2.05524, - 2.08205, - 2.01128, - 2.0674, - 2.10451, - 2.06705, - 2.04287, - 2.03218, - 2.03945, - 2.05258, - 2.03794, - 2.04784, - 2.08807, - 2.05793, - 2.08379, - 2.04009, - 2.05416, - 2.07032, - 2.07983, - 2.09094, - 2.06061, - 2.09135, - 2.09565, - 2.09122, - 2.01277, - 2.11322, - 2.02085, - 2.07146, - 2.05154, - 2.04755, - 2.06514, - 2.04912, - 2.0506, - 2.09276, - 2.01748, - 2.11268, - 2.06466, - 2.102, - 2.0888, - 2.06228, - 2.07457, - 2.0545, - 2.05416, - 2.07107, - 2.05555, - 2.07771, - 2.08619, - 2.03492, - 2.08688, - 2.06589, - 2.07428, - 2.05994, - 2.07196, - 2.08413, - 2.09792, - 2.03176, - 2.04281, - 2.07963, - 2.08783, - 2.10229, - 2.0806, - 2.06436, - 2.06393, - 2.07591, - 2.04416, - 2.06419, - 2.02994, - 2.07, - 2.06459, - 2.04818, - 2.05616, - 2.05595, - 2.05967, - 2.10924, - 2.07207, - 2.07944, - 2.04368, - 2.03419, - 2.07548, - 2.05645, - 2.07395, - 2.07202, - 2.09124, - 2.10283, - 2.06007, - 2.06086, - 2.06013, - 2.0613, - 2.05274, - 2.11108, - 2.07372, - 2.08513, - 2.04595, - 2.04625, - 2.11262, - 2.06451, - 2.05242, - 2.05972, - 2.08432, - 2.08604, - 2.07219, - 2.04963, - 2.04076, - 2.06975, - 2.08389, - 2.11041, - 2.07472, - 2.08351, - 2.06993, - 2.03487, - 2.06355, - 2.07169, - 2.06573, - 2.05064, - 2.06776, - 2.10188, - 2.03205, - 2.08174, - 2.05715, - 2.04901, - 2.06824, - 2.06143, - 2.056, - 2.07084, - 2.05222, - 2.03319, - 2.08047, - 2.07566, - 2.12745, - 2.08515, - 2.06198, - 2.10327, - 2.09468, - 2.05548, - 2.03834, - 2.11002, - 2.08029, - 2.05268, - 2.0335, - 2.02677, - 2.06304, - 2.04452, - 2.09899, - 2.05809, - 2.07477, - 2.03045, - 2.03504, - 2.05041, - 2.08417, - 2.03559, - 2.02935, - 2.03407, - 2.07136, - 2.07384, - 2.05954, - 2.02755, - 2.06172, - 2.09393, - 2.06967, - 2.07662, - 2.0216, - 2.1009, - 2.06231, - 2.07253, - 2.08237, - 2.06263, - 2.04769, - 2.04909, - 2.08691, - 2.07693, - 2.06829, - 2.04875, - 2.05418, - 2.08913, - 2.03112, - 2.04847, - 2.06328, - 2.07853, - 2.10147, - 2.04872, - 2.06594, - 2.02462, - 2.07055, - 2.05633, - 2.13906, - 2.10186, - 2.06236, - 2.06541, - 2.08143, - 2.06161, - 2.07694, - 2.0402, - 2.02456, - 2.05621, - 2.03083, - 2.09178, - 2.05554, - 2.06884, - 2.04159, - 2.01934, - 2.03423, - 2.09268, - 2.08845, - 2.04913, - 2.07277, - 2.10327, - 2.06987, - 2.07943, - 2.05538, - 2.04082, - 2.03667, - 2.05249, - 2.04705, - 2.06035, - 2.0747, - 2.04502, - 2.07857, - 2.05529, - 2.07013, - 2.07326, - 2.05817, - 2.06388, - 2.07611, - 2.07169, - 2.07389, - 2.05946, - 2.05697, - 2.05845, - 2.02988, - 2.06169, - 2.06378, - 2.07877, - 2.09078, - 2.05866, - 2.05292, - 2.05089, - 2.04567, - 2.06807, - 2.05176, - 2.09768, - 2.05187, - 2.07603, - 2.09116, - 2.06851, - 2.08508, - 2.05732, - 2.0648, - 2.03648, - 2.08369, - 2.08778, - 2.06682, - 2.07705, - 2.08575, - 2.07415, - 2.04854, - 2.00188, - 2.0663, - 2.04615, - 2.07906, - 2.02555, - 2.07715, - 2.05058, - 2.08828, - 2.0185, - 2.06391, - 2.05002, - 2.06629, - 2.02972, - 2.03557, - 2.08113, - 2.03979, - 2.04057, - 2.04033, - 2.04492, - 2.06139, - 2.0621, - 2.06174, - 2.07726, - 2.08054, - 2.08416, - 2.08596, - 2.03534, - 2.0732, - 2.06318, - 2.0642, - 2.06995, - 2.09707, - 2.0473, - 2.03983, - 2.03072, - 2.10328, - 2.06546, - 2.06347, - 2.07614, - 2.02531, - 2.10226, - 2.02717, - 2.07241 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 53183, - "step_interval": 5, - "values": [ - 956236928.0, - 966297984.0, - 931263232.0, - 979001984.0, - 1017102592.0, - 1115523200.0, - 1228648832.0, - 1260442880.0, - 1274906240.0, - 1188215936.0, - 1114331392.0, - 1063800192.0, - 1034780672.0, - 1023118592.0, - 1031812800.0, - 997922496.0, - 990128448.0, - 1007822656.0, - 954958528.0, - 979914752.0, - 976519296.0, - 966956864.0, - 983542592.0, - 935246336.0, - 949317120.0, - 972322432.0, - 966361728.0, - 989361920.0, - 959320256.0, - 939321856.0, - 972486592.0, - 967056640.0, - 973175616.0, - 976699264.0, - 941081664.0, - 960376576.0, - 970076032.0, - 976963840.0, - 969814912.0, - 945497856.0, - 971986176.0, - 957465472.0, - 964594816.0, - 970090496.0, - 945187264.0, - 948235648.0, - 970803840.0, - 971995776.0, - 967290752.0, - 970860672.0, - 955190080.0, - 989670592.0, - 974899328.0, - 969701504.0, - 977055232.0, - 956681152.0, - 959799040.0, - 968847296.0, - 973418496.0, - 958463104.0, - 948492928.0, - 946244672.0, - 982634880.0, - 962569216.0, - 967340096.0, - 963788032.0, - 937076544.0, - 982140928.0, - 969179136.0, - 966437440.0, - 955682944.0, - 950046656.0, - 965051776.0, - 974682240.0, - 965249472.0, - 994598272.0, - 965535232.0, - 958391808.0, - 964343168.0, - 965317888.0, - 981618368.0, - 952652416.0, - 942381056.0, - 959562112.0, - 974225152.0, - 971466880.0, - 969723904.0, - 935331712.0, - 972597760.0, - 964452608.0, - 958906752.0, - 962584768.0, - 955827328.0, - 968080896.0, - 983626752.0, - 981340864.0, - 958177280.0, - 952030976.0, - 943679744.0, - 978380160.0, - 973635776.0, - 963469696.0, - 973458368.0, - 952654720.0, - 993118208.0, - 982178048.0, - 978971008.0, - 978863616.0, - 946708736.0, - 971266880.0, - 962552896.0, - 954115968.0, - 977178624.0, - 948182912.0, - 943696896.0, - 969076096.0, - 975933888.0, - 982984704.0, - 964016256.0, - 941500288.0, - 972584896.0, - 992368000.0, - 974312832.0, - 967078336.0, - 940384960.0, - 950985024.0, - 972144256.0, - 962619520.0, - 972211840.0, - 956094720.0, - 949694336.0, - 955943040.0, - 974435328.0, - 976947584.0, - 959628928.0, - 940096320.0, - 956687872.0, - 966752256.0, - 969991680.0, - 965977088.0, - 946613504.0, - 983479360.0, - 970198272.0, - 962031360.0, - 978563328.0, - 953855104.0, - 933921984.0, - 980918144.0, - 980894848.0, - 968294912.0, - 950791168.0, - 940875904.0, - 977888128.0, - 959555968.0, - 961631616.0, - 956901120.0, - 937276800.0, - 990016000.0, - 980194304.0, - 966400256.0, - 962776704.0, - 963650432.0, - 948112320.0, - 975020992.0, - 981020864.0, - 979346560.0, - 954804352.0, - 961996288.0, - 968445952.0, - 961078784.0, - 969625600.0, - 989069184.0, - 939656064.0, - 971510528.0, - 962650240.0, - 970263616.0, - 979359616.0, - 949088384.0, - 954937344.0, - 968487424.0, - 970966528.0, - 965073792.0, - 941464256.0, - 954787840.0, - 969760320.0, - 963802880.0, - 961585792.0, - 961546688.0, - 950831040.0, - 986249216.0, - 953181696.0, - 983777984.0, - 969822016.0, - 944355648.0, - 974090560.0, - 981993984.0, - 963965952.0, - 968954432.0, - 945811392.0, - 966583872.0, - 971404288.0, - 963074304.0, - 978777856.0, - 963672896.0, - 945809728.0, - 980356736.0, - 988883712.0, - 968083840.0, - 966711168.0, - 953608512.0, - 952222976.0, - 971077568.0, - 988861184.0, - 967546368.0, - 945471168.0, - 959263552.0, - 967589568.0, - 959563008.0, - 974096512.0, - 960774272.0, - 945660416.0, - 964831936.0, - 982000384.0, - 966573824.0, - 953778560.0, - 941442432.0, - 952174720.0, - 960408064.0, - 971333632.0, - 959543040.0, - 935563520.0, - 970196864.0, - 975607680.0, - 969626752.0, - 977067584.0, - 955251904.0, - 946566208.0, - 974689856.0, - 961485824.0, - 969863168.0, - 975770816.0, - 928496704.0, - 971732736.0, - 983123392.0, - 971397888.0, - 972253952.0, - 946673536.0, - 968406272.0, - 967845888.0, - 977969664.0, - 964665600.0, - 951950656.0, - 965283648.0, - 957817408.0, - 966574720.0, - 962980544.0, - 960866624.0, - 960872448.0, - 971006720.0, - 967430912.0, - 964223616.0, - 976873600.0, - 943776896.0, - 972782592.0, - 971944320.0, - 963222976.0, - 972755200.0, - 949749248.0, - 972270464.0, - 946714368.0, - 976009024.0, - 975114624.0, - 942428352.0, - 937521216.0, - 971873664.0, - 964832896.0, - 980996544.0, - 958193792.0, - 949157760.0, - 981266304.0, - 1002562816.0, - 965688576.0, - 956397696.0, - 947556992.0, - 967352960.0, - 985068928.0, - 961939456.0, - 958531328.0, - 940314432.0, - 948597504.0, - 954072320.0, - 976647488.0, - 977725952.0, - 977351104.0, - 923629696.0, - 968122112.0, - 962981248.0, - 970977280.0, - 960578688.0, - 947681280.0, - 970398720.0, - 965265920.0, - 968329280.0, - 972982912.0, - 958098816.0, - 956131008.0, - 963140736.0, - 975662912.0, - 972161024.0, - 957985728.0, - 949574336.0, - 967115008.0, - 955687616.0, - 955139520.0, - 957795968.0, - 948440384.0, - 991033088.0, - 972434304.0, - 958435328.0, - 974467520.0, - 946778432.0, - 953109632.0, - 970676608.0, - 981506048.0, - 982325056.0, - 943241472.0, - 955595968.0, - 971163008.0, - 972335872.0, - 971438592.0, - 952993600.0, - 941876352.0, - 968755520.0, - 980965760.0, - 975712896.0, - 968755648.0, - 926065152.0, - 967955328.0, - 968369600.0, - 954213120.0, - 966003840.0, - 940838656.0, - 950562816.0, - 964996672.0, - 966226432.0, - 973740160.0, - 962446464.0, - 953449728.0, - 973701440.0, - 977707008.0, - 974458048.0, - 970564736.0, - 951166208.0, - 977151872.0, - 953486272.0, - 986293440.0, - 978351744.0, - 944845952.0, - 964798976.0, - 968518528.0, - 957154304.0, - 952551552.0, - 962480448.0, - 961705472.0, - 963932160.0, - 966965888.0, - 963232128.0, - 968922112.0, - 919231744.0, - 971251456.0, - 953488384.0, - 963616768.0, - 973595520.0, - 941224960.0, - 946671616.0, - 980045824.0, - 974265152.0, - 971957248.0, - 955011072.0, - 961865216.0, - 982746368.0, - 952993536.0, - 973301760.0, - 958616448.0, - 934147072.0, - 959319680.0, - 959587456.0, - 988043520.0, - 970044480.0, - 949872640.0, - 960568192.0, - 960477504.0, - 948289280.0, - 981668032.0, - 967253568.0, - 974346240.0, - 968881280.0, - 972328064.0, - 963505472.0, - 975099456.0, - 949332864.0, - 975490304.0, - 961732352.0, - 969003136.0, - 975262336.0, - 954261696.0, - 960925952.0, - 959660544.0, - 957844352.0, - 973904192.0, - 948029696.0, - 966380736.0, - 969579328.0, - 953091648.0, - 955097664.0, - 945219584.0, - 940006144.0, - 965635392.0, - 966299776.0, - 971419968.0, - 971268736.0, - 938026560.0, - 962939392.0, - 973374016.0, - 985977408.0, - 966907008.0, - 944082816.0, - 956681856.0, - 985219072.0, - 971489536.0, - 960750848.0, - 935828992.0, - 947535104.0, - 956713408.0, - 965886272.0, - 960114944.0, - 958588928.0, - 947630272.0, - 960947456.0, - 960160832.0, - 975881984.0, - 965135808.0, - 945328384.0, - 965250688.0, - 969733376.0, - 956886784.0, - 963201024.0, - 954089088.0, - 945766016.0, - 983172032.0, - 959089856.0, - 968875136.0, - 971375616.0, - 929161600.0, - 967081856.0, - 975473536.0, - 979295552.0, - 969007488.0, - 944139392.0, - 965862656.0, - 980288704.0, - 960557312.0, - 960808384.0, - 960665344.0, - 945841536.0, - 967415040.0, - 980777280.0, - 959611904.0, - 963326848.0, - 936646336.0, - 973895296.0, - 973523072.0, - 984626368.0, - 965800960.0, - 951103424.0, - 964475392.0, - 967130496.0, - 972868480.0, - 968606592.0, - 937799936.0, - 963920768.0, - 962300672.0, - 984582336.0, - 970657152.0, - 958129408.0, - 945137280.0, - 963545984.0, - 980697216.0, - 965970944.0, - 971669952.0, - 940721472.0, - 981216384.0, - 963291840.0, - 962634752.0, - 967161408.0, - 945838336.0, - 970257152.0, - 965920000.0, - 963273664.0, - 978148160.0, - 945108864.0, - 941872768.0, - 973247872.0, - 970531136.0, - 965414400.0, - 961477888.0, - 947346944.0, - 985874304.0, - 974578560.0, - 981267520.0, - 970101568.0, - 941165632.0, - 954045696.0, - 968758080.0, - 975334208.0, - 979983040.0, - 946234112.0, - 957536256.0, - 948876160.0, - 971205440.0, - 975455296.0, - 954846976.0, - 957184448.0, - 977263104.0, - 982726400.0, - 968362880.0, - 968661696.0, - 956578048.0, - 963730048.0, - 961888384.0, - 975290752.0, - 972071680.0, - 952020608.0, - 966721728.0, - 979876736.0, - 958467712.0, - 968135424.0, - 970088384.0, - 952620672.0, - 987006976.0, - 968030720.0, - 965132288.0, - 966259456.0, - 935491072.0, - 981837824.0, - 960136192.0, - 980994048.0, - 964894144.0, - 946168192.0, - 962419840.0, - 970129216.0, - 967397120.0, - 950755456.0, - 962047872.0, - 971795328.0, - 982853120.0, - 984033024.0, - 966213888.0, - 979698368.0, - 936401344.0, - 974222656.0, - 975151872.0, - 974611584.0, - 963445312.0, - 956257728.0, - 985656960.0, - 960890496.0, - 959103104.0, - 971417984.0, - 953449984.0, - 953272064.0, - 974320384.0, - 957978880.0, - 980414336.0, - 968114048.0, - 957925376.0, - 959204096.0, - 967840768.0, - 978194816.0, - 981490432.0, - 949241344.0, - 974498944.0, - 962907520.0, - 971319808.0, - 967826688.0, - 940208384.0, - 946853888.0, - 976296512.0, - 964332800.0, - 953401472.0, - 967096576.0, - 967335104.0, - 987259520.0, - 974338688.0, - 970915584.0, - 969659200.0, - 962167744.0, - 977161728.0, - 965629184.0, - 970142848.0, - 969767360.0, - 936472320.0, - 965654144.0, - 979920896.0, - 982816768.0, - 961410688.0, - 943136192.0, - 941828480.0, - 962931840.0, - 972480896.0, - 977744384.0, - 961236480.0, - 937120576.0, - 959086848.0, - 966152960.0, - 971771136.0, - 981055296.0, - 948983424.0, - 967500928.0, - 969970176.0, - 959233280.0, - 991930880.0, - 958040320.0, - 954914560.0, - 971846016.0, - 971645056.0, - 969226112.0, - 967635136.0, - 940400704.0, - 975749376.0, - 988319488.0, - 969703040.0, - 962130176.0, - 937729664.0, - 961836288.0, - 976724224.0, - 957261440.0, - 968533120.0, - 956409856.0, - 957384448.0, - 968198272.0, - 968694528.0, - 980996736.0, - 965114176.0, - 942542976.0, - 970263296.0, - 987176320.0, - 972393344.0, - 957116160.0, - 962226688.0, - 991216768.0, - 979054720.0, - 973000448.0, - 974246464.0, - 956047488.0, - 963014272.0, - 971058240.0, - 977931648.0, - 981451136.0, - 948277248.0, - 934772480.0, - 971566080.0, - 971026688.0, - 977299328.0, - 951372928.0, - 956004544.0, - 975343616.0, - 958989632.0, - 956213120.0, - 981110976.0, - 937820544.0, - 969835008.0, - 956856832.0, - 965621504.0, - 972665344.0, - 957806976.0, - 949370112.0, - 972162304.0, - 972793984.0, - 955829632.0, - 964673536.0, - 953344768.0, - 991925888.0, - 973686848.0, - 952864832.0, - 961605248.0, - 944941952.0, - 979913216.0, - 980744064.0, - 980410752.0, - 954187008.0, - 947690432.0, - 947004672.0, - 975350528.0, - 962248064.0, - 988725632.0, - 944005376.0, - 950973824.0, - 966515200.0, - 975706240.0, - 978185536.0, - 976357120.0, - 943320192.0, - 966277376.0, - 962358080.0, - 976203264.0, - 971541952.0, - 937391616.0, - 965716352.0, - 978746752.0, - 972062144.0, - 977814912.0, - 958274176.0, - 938146816.0, - 972887808.0, - 973872064.0, - 958181952.0, - 971533504.0, - 956207232.0, - 971964800.0, - 975739136.0, - 983632960.0, - 959550976.0, - 922478528.0, - 967331584.0, - 958768576.0, - 959299584.0, - 977023232.0, - 949655168.0, - 944128000.0, - 955172480.0, - 971687616.0, - 977042176.0, - 952715584.0, - 934506944.0, - 966462016.0, - 965424256.0, - 981044864.0, - 969115392.0, - 949028864.0, - 978318464.0, - 977286016.0, - 967010496.0, - 969966848.0, - 938616576.0, - 953810880.0, - 962589248.0, - 981771840.0, - 978158144.0, - 968694144.0, - 956072960.0, - 968669184.0, - 959074688.0, - 990117056.0, - 984952192.0, - 945928000.0, - 955999360.0, - 961347264.0, - 967386496.0, - 970175936.0, - 938555008.0, - 951180480.0, - 960621952.0, - 972563584.0, - 969886080.0, - 965413760.0, - 955745920.0, - 972470912.0, - 961199232.0, - 954917504.0, - 974695168.0, - 953781504.0, - 974168192.0, - 965886848.0, - 979201152.0, - 970595712.0, - 944832256.0, - 970407680.0, - 978049024.0, - 978761024.0, - 958308160.0, - 943358528.0, - 959222656.0, - 960499008.0, - 965978496.0, - 981567232.0, - 975720448.0, - 947471488.0, - 969540288.0, - 974729984.0, - 977585856.0, - 961660480.0, - 947232128.0, - 972027776.0, - 972947776.0, - 973900288.0, - 963578624.0, - 947418880.0, - 956223872.0, - 973477952.0, - 942272768.0, - 973858496.0, - 975669632.0, - 937300480.0, - 964836224.0, - 979479424.0, - 965719040.0, - 950291648.0, - 943686400.0, - 985054720.0, - 971481088.0, - 972492928.0, - 972867264.0, - 948047616.0, - 969571840.0, - 967249280.0, - 971339072.0, - 964827840.0, - 973121536.0, - 932679680.0, - 964294528.0, - 985944064.0, - 962825856.0, - 947888064.0, - 936149888.0, - 953951488.0, - 970412160.0, - 966899712.0, - 975869632.0, - 931199296.0, - 962632192.0, - 966259968.0, - 976717696.0, - 984519040.0, - 952739712.0, - 951672448.0, - 975127808.0, - 967755392.0, - 988302016.0, - 965631104.0, - 944607360.0, - 963863424.0, - 973068800.0, - 960641408.0, - 966871232.0, - 959102208.0, - 963087616.0, - 966583488.0, - 974475136.0, - 964317504.0, - 961807360.0, - 944256000.0, - 978687872.0, - 972219392.0, - 966101184.0, - 982098944.0, - 958169216.0, - 969383552.0, - 976667776.0, - 972001216.0, - 967387264.0, - 929629824.0, - 972970432.0, - 966004736.0, - 957420864.0, - 978226816.0, - 936304896.0, - 973770304.0, - 962480384.0, - 981225344.0, - 961436992.0, - 945802624.0, - 947120000.0, - 962646272.0, - 960313728.0, - 975292672.0, - 957344832.0, - 931126336.0, - 971525248.0, - 965347264.0, - 973184512.0, - 985979456.0, - 943119616.0, - 950755712.0, - 973222016.0, - 943791104.0, - 972633216.0, - 960040064.0, - 943144704.0, - 967239168.0, - 984837952.0, - 975966464.0, - 954906304.0, - 932064960.0, - 971269952.0, - 964653312.0, - 952385408.0, - 968069440.0, - 967820032.0, - 975079040.0, - 974181632.0, - 965506816.0, - 969878848.0, - 972414080.0, - 965286784.0, - 969768256.0, - 975729024.0, - 965469824.0, - 976016000.0, - 927634304.0, - 969923968.0, - 972692480.0, - 966305280.0, - 979099520.0, - 933469376.0, - 970328704.0, - 975082880.0, - 968108608.0, - 971076480.0, - 921772928.0, - 954107712.0, - 982986112.0, - 976599936.0, - 969982976.0, - 952207488.0, - 948687360.0, - 970931392.0, - 965315328.0, - 980079872.0, - 963099136.0, - 956383936.0, - 973570048.0, - 969001216.0, - 958367616.0, - 967154048.0, - 944004096.0, - 944353152.0, - 977154560.0, - 971526016.0, - 968135552.0, - 970517504.0, - 961082880.0, - 968432128.0, - 971897472.0, - 941140224.0, - 953927552.0, - 954830848.0, - 969211648.0, - 976125504.0, - 967907200.0, - 951694336.0, - 933555968.0, - 958688896.0, - 974772992.0, - 990033152.0, - 957152000.0, - 941381952.0, - 933954048.0, - 967968512.0, - 976938368.0, - 965889088.0, - 964921408.0, - 951561856.0, - 963441152.0, - 957167360.0, - 969800576.0, - 970812928.0, - 933750336.0, - 987980160.0, - 963943680.0, - 968096512.0, - 968938112.0, - 941729024.0, - 948668672.0, - 960978304.0, - 967097536.0, - 975592448.0, - 960261056.0, - 927577600.0, - 952773440.0, - 955839296.0, - 956968000.0, - 966235648.0, - 940525440.0, - 968861312.0, - 966428864.0, - 972941952.0, - 973784064.0, - 942931712.0, - 957293184.0, - 976446464.0, - 977009216.0, - 960880448.0, - 975425344.0, - 955295872.0, - 984794944.0, - 977519360.0, - 962804352.0, - 956125184.0, - 940138112.0, - 974768512.0, - 956950336.0, - 964995456.0, - 964968448.0, - 958196736.0, - 957048704.0, - 974119168.0, - 975092160.0, - 978090112.0, - 950592192.0, - 947219712.0, - 961843328.0, - 957277568.0, - 980805184.0, - 936176640.0, - 952659392.0, - 974612032.0, - 969829376.0, - 962165888.0, - 966396032.0, - 953853952.0, - 958404352.0, - 976985088.0, - 955728000.0, - 975196416.0, - 960412800.0, - 973993728.0, - 963404480.0, - 967338368.0, - 962311552.0, - 950462848.0, - 954982784.0, - 979908096.0, - 968403392.0, - 981193984.0, - 967248448.0, - 941855872.0, - 973427136.0, - 955793024.0, - 971974784.0, - 971067264.0, - 953390080.0, - 955315200.0, - 976971392.0, - 967621184.0, - 962955392.0, - 940864128.0, - 950788096.0, - 968097536.0, - 975609728.0, - 979082368.0, - 981442048.0, - 939197312.0, - 967601152.0, - 955614144.0, - 965604544.0, - 976276864.0, - 958159232.0, - 969673728.0, - 964368896.0, - 976473920.0, - 984933120.0, - 945408512.0, - 955131008.0, - 968269696.0, - 989501120.0, - 973395072.0, - 974450432.0, - 945549888.0, - 959462208.0, - 957757568.0, - 963945600.0, - 971289984.0, - 948245888.0, - 970380032.0, - 969388160.0, - 978407296.0, - 965915264.0, - 942466624.0, - 969376192.0, - 989745664.0, - 976958592.0, - 973684800.0, - 970581760.0, - 944723968.0, - 992036992.0, - 969085120.0, - 965606144.0, - 954714368.0, - 949960320.0, - 990495488.0, - 959941760.0, - 977775616.0, - 974907520.0, - 940307968.0, - 954688896.0, - 969823872.0, - 977357056.0, - 969442816.0, - 968550784.0, - 944871936.0, - 960301312.0, - 955657408.0, - 966825408.0, - 972898816.0, - 947804032.0, - 971944832.0, - 965897344.0, - 966991360.0, - 985332608.0, - 946609792.0, - 966702208.0, - 984187840.0, - 989248512.0, - 976693120.0, - 956147264.0, - 958625152.0, - 956838208.0, - 965746112.0, - 968585984.0, - 970818496.0, - 963311168.0, - 979459328.0, - 962145152.0, - 962750336.0, - 954498688.0, - 927377280.0, - 971597440.0, - 985275776.0, - 982057984.0, - 967315584.0, - 949563264.0, - 960774528.0, - 982319936.0, - 983654656.0, - 976209408.0, - 960582592.0, - 946093312.0, - 975270848.0, - 984077312.0, - 978947072.0, - 978699136.0, - 934841984.0, - 982260352.0, - 982412224.0, - 967934720.0, - 979692096.0, - 969859392.0, - 965724928.0, - 967185600.0, - 951217664.0, - 973305216.0, - 959712512.0, - 972240512.0, - 959816576.0, - 949676672.0, - 982215040.0, - 978217216.0, - 956105216.0, - 963003392.0, - 962008064.0, - 972696448.0, - 952320768.0, - 938416768.0, - 969812352.0, - 973631104.0, - 962018880.0, - 972861632.0, - 956590720.0, - 952745216.0, - 978028672.0, - 972173440.0, - 964957568.0, - 957725952.0, - 946529792.0, - 971824128.0, - 973380544.0, - 973034048.0, - 969466752.0, - 942162304.0, - 965866240.0, - 972854016.0, - 973553600.0, - 978981504.0, - 938434304.0, - 963183040.0, - 978777216.0, - 963204224.0, - 968651008.0, - 939730496.0, - 945842176.0, - 982510976.0, - 969312896.0, - 984278464.0, - 980115712.0, - 946382912.0, - 955306752.0, - 971466432.0, - 974870400.0, - 976486656.0, - 959631168.0, - 959441984.0, - 974943104.0, - 984933952.0, - 970557440.0, - 953767936.0, - 952936704.0, - 980647808.0, - 976730240.0, - 981763584.0, - 974525568.0, - 951145984.0, - 972715520.0, - 953703616.0, - 972640832.0, - 965368832.0, - 929201408.0, - 974378368.0, - 972664256.0, - 975873216.0, - 977676160.0, - 941912448.0, - 945939584.0, - 982339328.0, - 989044736.0, - 975330560.0, - 964403456.0, - 953013504.0, - 964140032.0, - 960992640.0, - 983076736.0, - 971134848.0, - 932200192.0, - 964982656.0, - 970636416.0, - 966597376.0, - 971914176.0, - 958890880.0, - 965859904.0, - 961412224.0, - 968295296.0, - 965042688.0, - 976074112.0, - 955784128.0, - 967541632.0, - 955408064.0, - 960772544.0, - 953401856.0, - 951111680.0, - 956564480.0, - 963308928.0, - 966602112.0, - 957272832.0, - 944127616.0, - 954476160.0, - 977947904.0, - 972748800.0, - 967345792.0, - 950356736.0, - 926433344.0, - 959305920.0, - 983548032.0, - 976030592.0, - 965808512.0, - 942812800.0, - 992129536.0, - 963470656.0, - 984910528.0, - 963058368.0, - 944563712.0, - 968320768.0, - 966872768.0, - 974587712.0, - 961067776.0, - 952780992.0, - 941043456.0, - 957669824.0, - 968178496.0, - 957092992.0, - 956137216.0, - 935319680.0, - 961558528.0, - 969268288.0, - 945601344.0, - 977856000.0, - 956514816.0, - 964333184.0, - 980359680.0, - 981116160.0, - 981550464.0, - 965524160.0, - 960060992.0, - 965492096.0, - 966940608.0, - 964796160.0, - 961017216.0, - 961000064.0, - 966589888.0, - 971398656.0, - 958346624.0, - 956560512.0, - 945636864.0, - 969575424.0, - 963311616.0, - 969463936.0, - 964146816.0, - 945761536.0, - 950282496.0, - 974740224.0, - 972449152.0, - 970820224.0, - 965580928.0, - 941215616.0, - 964771712.0, - 985743744.0, - 981028352.0, - 960709888.0, - 937586048.0, - 972650368.0, - 981054592.0, - 982141632.0, - 961028736.0, - 942443776.0, - 962297216.0, - 966968448.0, - 974794496.0, - 971104640.0, - 960944384.0, - 947720192.0, - 955030720.0, - 970907968.0, - 962854336.0, - 969850880.0, - 954673280.0, - 977656320.0, - 965586816.0, - 964284736.0, - 977895808.0, - 950171904.0, - 958758272.0, - 975057792.0, - 981652736.0, - 964278528.0, - 953100224.0, - 936073088.0, - 976656384.0, - 955601536.0, - 967410880.0, - 964629632.0, - 946551872.0, - 979427584.0, - 980291968.0, - 976661760.0, - 959077312.0, - 937599104.0, - 964687232.0, - 964531456.0, - 968297344.0, - 977308288.0, - 951500544.0, - 952577536.0, - 961679424.0, - 977802880.0, - 957297280.0, - 961520896.0, - 941937920.0, - 990111936.0, - 971157824.0, - 969659008.0, - 982089280.0, - 942284928.0, - 961127104.0, - 967933056.0, - 960637696.0, - 969640128.0, - 944865472.0, - 976667776.0, - 969624064.0, - 968694848.0, - 954255616.0, - 958824448.0, - 963376640.0, - 975696256.0, - 956984832.0, - 979015936.0, - 948632768.0, - 957725952.0, - 972760832.0, - 962197632.0, - 972281024.0, - 971318528.0, - 953186432.0, - 973235584.0, - 967958464.0, - 958712832.0, - 972651520.0, - 960120960.0, - 945822592.0, - 979486784.0, - 961022720.0, - 981902464.0, - 968142784.0, - 936793984.0, - 975751552.0, - 968800512.0, - 982655104.0, - 981753856.0, - 942031040.0, - 972898688.0, - 961089792.0, - 977049728.0, - 976967296.0, - 952619264.0, - 937529024.0, - 960402688.0, - 974264192.0, - 983761792.0, - 952518528.0, - 946042752.0, - 969584256.0, - 972417408.0, - 965862464.0, - 967098368.0, - 952154816.0, - 970673088.0, - 973226880.0, - 961164352.0, - 951871488.0, - 931615232.0, - 985304000.0, - 973270784.0, - 972243392.0, - 967320256.0, - 943751424.0, - 946028416.0, - 969689216.0, - 961680640.0, - 968185472.0, - 963840576.0, - 954674944.0, - 968198080.0, - 969529280.0, - 965901760.0, - 972870464.0, - 943331968.0, - 963033984.0, - 962295552.0, - 973162176.0, - 981048320.0, - 960194752.0, - 945251840.0, - 964505728.0, - 972163456.0, - 974918016.0, - 976994048.0, - 951481216.0, - 976355456.0, - 949967680.0, - 972655232.0, - 978462464.0, - 941959424.0, - 973197568.0, - 962812288.0, - 984604032.0, - 945226112.0, - 982712320.0, - 968570816.0, - 953119488.0, - 982344384.0, - 950385152.0, - 955500032.0, - 959667072.0, - 963720576.0, - 976224640.0, - 968207104.0, - 953179648.0, - 956425088.0, - 968585088.0, - 965475968.0, - 969178048.0, - 959304704.0, - 973148288.0, - 972890816.0, - 969935360.0, - 958288896.0, - 948720256.0, - 962796544.0, - 971312512.0, - 964073728.0, - 960969344.0, - 930392960.0, - 945751936.0, - 990380160.0, - 968074240.0, - 956704896.0, - 967846272.0, - 955607808.0, - 957716736.0, - 984708288.0, - 978233600.0, - 973357184.0, - 935562624.0, - 957242880.0, - 966722688.0, - 969499136.0, - 981839616.0, - 928371776.0, - 949352320.0, - 966846336.0, - 966686272.0, - 967394816.0, - 949245952.0, - 957081920.0, - 969882368.0, - 974471168.0, - 959456768.0, - 958894592.0, - 956754176.0, - 977281856.0, - 976832960.0, - 962951552.0, - 975217408.0, - 963593152.0, - 977185472.0, - 966663296.0, - 974025280.0, - 966772800.0, - 959408640.0, - 963792128.0, - 977484160.0, - 967485056.0, - 984401536.0, - 959565824.0, - 948574720.0, - 972245120.0, - 982372736.0, - 962659264.0, - 963855360.0, - 948211008.0, - 963775616.0, - 958247808.0, - 969518400.0, - 987503104.0, - 951810432.0, - 950395968.0, - 966734976.0, - 982498816.0, - 965418368.0, - 972902080.0, - 936970880.0, - 968694784.0, - 979824128.0, - 971072256.0, - 971791488.0, - 939068672.0, - 971568768.0, - 957750400.0, - 968755456.0, - 961146240.0, - 933924608.0, - 957175040.0, - 968922112.0, - 969693952.0, - 971914560.0, - 979325824.0, - 951648768.0, - 970210816.0, - 953955136.0, - 971113344.0, - 979363200.0, - 959137856.0, - 959911936.0, - 960096896.0, - 969816896.0, - 954936512.0, - 942594624.0, - 965458880.0, - 982078592.0, - 978908864.0, - 970193024.0, - 949624704.0, - 945822272.0, - 981924352.0, - 968849280.0, - 988431104.0, - 956877376.0, - 940951552.0, - 971059584.0, - 983921152.0, - 983396544.0, - 967999936.0, - 958767360.0, - 961822592.0, - 968904704.0, - 978123648.0, - 975329024.0, - 974969664.0, - 942589696.0, - 959713280.0, - 975106688.0, - 982049536.0, - 979469632.0, - 940608000.0, - 974395456.0, - 979087360.0, - 967504192.0, - 960086016.0, - 943152896.0, - 967960064.0, - 980230144.0, - 963831680.0, - 963265536.0, - 959640512.0, - 970199872.0, - 970729344.0, - 962030848.0, - 981835392.0, - 964593024.0, - 959666688.0, - 968112000.0, - 968565504.0, - 971795712.0, - 968122624.0, - 945530176.0, - 963123328.0, - 974173440.0, - 963489664.0, - 957261888.0, - 949538240.0, - 957148416.0, - 953684864.0, - 979784768.0, - 986819200.0, - 947400704.0, - 948909952.0, - 965028992.0, - 975494144.0, - 968528896.0, - 968991296.0, - 952064896.0, - 974659712.0, - 963534848.0, - 964100864.0, - 965353408.0, - 943095936.0, - 950772096.0, - 969513216.0, - 964380160.0, - 984301824.0, - 964561216.0, - 950735296.0, - 961816320.0, - 980082432.0, - 963702016.0, - 953082944.0, - 951740416.0, - 969242368.0, - 964727616.0, - 959251456.0, - 967797632.0, - 946596032.0, - 962079680.0, - 980372224.0, - 965237248.0, - 982809344.0, - 960378240.0, - 965200768.0, - 958090560.0, - 975113728.0, - 960176256.0, - 947768128.0, - 959303680.0, - 978732672.0, - 969075968.0, - 957632512.0, - 963698432.0, - 942094784.0, - 966145984.0, - 966619776.0, - 983282432.0, - 988539712.0, - 966372736.0, - 944180480.0, - 968811008.0, - 985685120.0, - 974531072.0, - 964031680.0, - 966544512.0, - 967491264.0, - 963823360.0, - 995027200.0, - 973191680.0, - 938402944.0, - 964524032.0, - 972792320.0, - 968313600.0, - 961465728.0, - 936090880.0, - 962700288.0, - 967591488.0, - 977029248.0, - 956073344.0, - 960740096.0, - 946767104.0, - 982017344.0, - 988210944.0, - 966330112.0, - 962442752.0, - 934132800.0, - 980256512.0, - 976386816.0, - 963885696.0, - 977186560.0, - 956614016.0, - 982651008.0, - 952333696.0, - 973792960.0, - 974501760.0, - 953039936.0, - 939703872.0, - 981249280.0, - 972881280.0, - 977926912.0, - 951061184.0, - 937516672.0, - 977339328.0, - 967702208.0, - 990167296.0, - 975674240.0, - 947367680.0, - 970703232.0, - 970009216.0, - 974930176.0, - 979701696.0, - 932856192.0, - 965022208.0, - 979660160.0, - 965323648.0, - 972670144.0, - 962995968.0, - 950673344.0, - 972606720.0, - 951478016.0, - 960643968.0, - 965316736.0, - 941754304.0, - 967909760.0, - 960803776.0, - 965674240.0, - 969266176.0, - 952763264.0, - 984044736.0, - 990052288.0, - 968375936.0, - 967405824.0, - 962972544.0, - 942650752.0, - 987261056.0, - 979284480.0, - 992133376.0, - 971017280.0, - 951307264.0, - 982885760.0, - 974063488.0, - 968568576.0, - 961594688.0, - 944972864.0, - 983837568.0, - 978412032.0, - 967581888.0, - 968756096.0, - 941574400.0, - 971292224.0, - 958283264.0, - 975812608.0, - 974360256.0, - 971620480.0, - 931969664.0, - 965538688.0, - 978798464.0, - 979266048.0, - 983707520.0, - 957975808.0, - 983873536.0, - 977417472.0, - 963129984.0, - 979024896.0, - 943335168.0, - 961540352.0, - 973266752.0, - 970047040.0, - 969316288.0, - 970616832.0, - 944042240.0, - 986351616.0, - 960342016.0, - 973579136.0, - 962190208.0, - 955545856.0, - 978440448.0, - 968560640.0, - 972779072.0, - 973495808.0, - 946637888.0, - 973024192.0, - 958180736.0, - 978572608.0, - 985661952.0, - 951968960.0, - 940693504.0, - 987063552.0, - 971913600.0, - 970914496.0, - 964771456.0, - 934606336.0, - 986079744.0, - 969507584.0, - 967233024.0, - 962025600.0, - 947726336.0, - 969480256.0, - 970779648.0, - 973080448.0, - 983468032.0, - 951103744.0, - 939465920.0, - 963918016.0, - 980930432.0, - 971177856.0, - 979467008.0, - 950412288.0, - 985938304.0, - 970857536.0, - 961497856.0, - 956633920.0, - 945690496.0, - 968481280.0, - 983780480.0, - 971184256.0, - 969637056.0, - 952246400.0, - 961509248.0, - 976643136.0, - 981730048.0, - 980609664.0, - 967668608.0, - 939772032.0, - 970320000.0, - 963732736.0, - 977485760.0, - 981631424.0, - 945746816.0, - 972116480.0, - 973540736.0, - 973175360.0, - 966066944.0, - 936670080.0, - 952732032.0, - 977313024.0, - 967006464.0, - 980247552.0, - 951831808.0, - 949984896.0, - 975022912.0, - 981808256.0, - 958861568.0, - 978811136.0, - 953703360.0, - 968368960.0, - 977667712.0, - 968228864.0, - 982963456.0, - 947629248.0, - 955507584.0, - 969670016.0, - 967550272.0, - 980648576.0, - 952615680.0, - 970705408.0, - 963557760.0, - 968057344.0, - 974339968.0, - 959936256.0, - 947985728.0, - 956355712.0, - 985459328.0, - 963088064.0, - 957991360.0, - 951522432.0, - 966915328.0, - 977176064.0, - 986378240.0, - 976842752.0, - 957545856.0, - 949887552.0, - 987582720.0, - 970992768.0, - 966588672.0, - 954783296.0, - 956379072.0, - 965881472.0, - 968599424.0, - 967134720.0, - 984683136.0, - 931338688.0, - 949491008.0, - 970887104.0, - 970963776.0, - 971379136.0, - 959562368.0, - 963597376.0, - 961184192.0, - 982921664.0, - 979050624.0, - 952621440.0, - 949265920.0, - 978269056.0, - 977521408.0, - 962387072.0, - 979011264.0, - 958561792.0, - 965200640.0, - 968900224.0, - 972240384.0, - 975677952.0, - 947801216.0, - 979185920.0, - 977730688.0, - 974997440.0, - 959979648.0, - 942900096.0, - 952712960.0, - 962836864.0, - 959496512.0, - 983437696.0, - 982361984.0, - 941725248.0, - 982578304.0, - 984915520.0, - 972806016.0, - 978331776.0, - 937670272.0, - 967641536.0, - 981484288.0, - 990962048.0, - 959851968.0, - 956485760.0, - 938229376.0, - 974449088.0, - 959002944.0, - 973131392.0, - 961139840.0, - 945260032.0, - 977570624.0, - 987683968.0, - 962928000.0, - 983368832.0, - 930780800.0, - 986718720.0, - 963263104.0, - 971655168.0, - 982111040.0, - 969881216.0, - 964076160.0, - 956213568.0, - 948041472.0, - 964980992.0, - 957953920.0, - 950926336.0, - 953789952.0, - 979125696.0, - 955324928.0, - 952301312.0, - 957732800.0, - 969389568.0, - 977259648.0, - 958580352.0, - 962569984.0, - 945890432.0, - 948026944.0, - 966418304.0, - 984258368.0, - 984983872.0, - 943260544.0, - 952384512.0, - 980540800.0, - 978144896.0, - 969622528.0, - 973972608.0, - 940000064.0, - 962032896.0, - 970968704.0, - 987005312.0, - 962866880.0, - 949542912.0, - 966065024.0, - 962585856.0, - 964585856.0, - 985850368.0, - 940117760.0, - 949747392.0, - 975297600.0, - 972442624.0, - 966982272.0, - 970937472.0, - 939975552.0, - 965705152.0, - 973486592.0, - 973362944.0, - 970977728.0, - 950963904.0, - 979199616.0, - 970035456.0, - 967635264.0, - 963358080.0, - 952247168.0, - 956216064.0, - 969788800.0, - 958001088.0, - 960883584.0, - 957624960.0, - 948788480.0, - 961669184.0, - 978087296.0, - 977028224.0, - 981930816.0, - 938700288.0, - 969013760.0, - 972265600.0, - 971086528.0, - 966399488.0, - 946396800.0, - 956897920.0, - 986979712.0, - 969291456.0, - 989720960.0, - 956655360.0, - 930761152.0, - 963077312.0, - 972295232.0, - 983035520.0, - 956374720.0, - 938088960.0, - 978049664.0, - 973334016.0, - 944131456.0, - 962438848.0, - 946681536.0, - 960536576.0, - 965082880.0, - 958125376.0, - 963724352.0, - 943107264.0, - 966611200.0, - 982909056.0, - 966287872.0, - 963279872.0, - 980414848.0, - 941665152.0, - 976234496.0, - 982362496.0, - 971164032.0, - 969297600.0, - 943890688.0, - 982564992.0, - 977436288.0, - 978886912.0, - 970827392.0, - 945931520.0, - 950228480.0, - 977412352.0, - 985059072.0, - 989978176.0, - 958051072.0, - 946830720.0, - 966662784.0, - 978381952.0, - 971252736.0, - 973885952.0, - 943174080.0, - 962659136.0, - 971300352.0, - 975618176.0, - 971404480.0, - 948232576.0, - 961759488.0, - 973642880.0, - 980135424.0, - 971769344.0, - 957572864.0, - 933775872.0, - 973487424.0, - 969372992.0, - 961126848.0, - 974677632.0, - 944122112.0, - 978242816.0, - 983408128.0, - 978427968.0, - 954968192.0, - 936573312.0, - 987430400.0, - 972124544.0, - 965832960.0, - 975606784.0, - 947903616.0, - 950006656.0, - 975150912.0, - 953439360.0, - 968940608.0, - 961036352.0, - 935909312.0, - 979123456.0, - 963945152.0, - 966544512.0, - 968057920.0, - 935623808.0, - 969181952.0, - 995754240.0, - 978976256.0, - 980901376.0, - 951608320.0, - 971471744.0, - 959721152.0, - 970636416.0, - 984667520.0, - 982811264.0, - 934178112.0, - 975963648.0, - 956830080.0, - 972798720.0, - 984363712.0, - 941791872.0, - 961542656.0, - 973753216.0, - 980186880.0, - 969692416.0, - 961281792.0, - 954728768.0, - 989910400.0, - 964453120.0, - 960015744.0, - 949367808.0, - 954594752.0, - 975065280.0, - 967038848.0, - 969236096.0, - 964217472.0, - 962300096.0, - 971509184.0, - 971435008.0, - 974802816.0, - 965583296.0, - 947338048.0, - 970809984.0, - 971921856.0, - 978742016.0, - 996777728.0, - 949276288.0, - 933999744.0, - 968274304.0, - 977914944.0, - 958532288.0, - 950861056.0, - 952761856.0, - 971412864.0, - 969254656.0, - 969823808.0, - 985973760.0, - 946511232.0, - 969796480.0, - 968647104.0, - 958945216.0, - 975352448.0, - 960958528.0, - 968443648.0, - 972584896.0, - 960072640.0, - 972977664.0, - 951475712.0, - 955927232.0, - 967173440.0, - 986208128.0, - 965668032.0, - 976196928.0, - 940602752.0, - 964360512.0, - 966548096.0, - 972474880.0, - 974100224.0, - 947771840.0, - 965123264.0, - 985146112.0, - 975958592.0, - 966414976.0, - 954538112.0, - 933791744.0, - 985552512.0, - 990465536.0, - 963272320.0, - 971467712.0, - 949330112.0, - 977442304.0, - 967678912.0, - 966750528.0, - 965843520.0, - 943925824.0, - 979668096.0, - 960466368.0, - 970657152.0, - 983659968.0, - 980694080.0, - 944319104.0, - 969219456.0, - 972360000.0, - 973532480.0, - 957519936.0, - 948992768.0, - 953068672.0, - 969274624.0, - 959968000.0, - 971228224.0, - 950749376.0, - 973302208.0, - 959227840.0, - 970578944.0, - 966622400.0, - 956279104.0, - 962315520.0, - 970164032.0, - 963272064.0, - 957413888.0, - 966982464.0, - 950112960.0, - 963435840.0, - 982521920.0, - 981439424.0, - 957886400.0, - 953618880.0, - 972140800.0, - 972574528.0, - 969552192.0, - 963967168.0, - 937931840.0, - 959792320.0, - 982695360.0, - 969096832.0, - 967604480.0, - 962319296.0, - 953353728.0, - 964435776.0, - 971693760.0, - 966006912.0, - 971449792.0, - 965964608.0, - 983068992.0, - 965355328.0, - 973981632.0, - 985763264.0, - 950380544.0, - 962849856.0, - 984696640.0, - 978032448.0, - 970939136.0, - 969445056.0, - 947336320.0, - 959564608.0, - 977603968.0, - 975451264.0, - 985860032.0, - 956168704.0, - 972917696.0, - 973708928.0, - 961488832.0, - 985186048.0, - 949030336.0, - 975965760.0, - 971664960.0, - 966653440.0, - 976054528.0, - 945996928.0, - 965548416.0, - 973599680.0, - 980302656.0, - 967617664.0, - 956744832.0, - 956168704.0, - 974829056.0, - 978900416.0, - 963803456.0, - 965899456.0, - 935298240.0, - 975768832.0, - 983533120.0, - 981822784.0, - 977400960.0, - 957507904.0, - 961753600.0, - 971365312.0, - 979127104.0, - 984951168.0, - 982093312.0, - 941529472.0, - 983868928.0, - 966979840.0, - 982691456.0, - 961335424.0, - 952575552.0, - 980760384.0, - 976750016.0, - 965706752.0, - 969000832.0, - 959332160.0, - 979323392.0, - 963239808.0, - 981069568.0, - 967778048.0, - 955402048.0, - 952766464.0, - 956145024.0, - 967793408.0, - 962232448.0, - 958466176.0, - 946095744.0, - 982546496.0, - 964325952.0, - 980637248.0, - 974888256.0, - 951892608.0, - 970130944.0, - 969289472.0, - 980805888.0, - 982004480.0, - 940931840.0, - 970395136.0, - 978573056.0, - 975142976.0, - 968097984.0, - 958159040.0, - 937506624.0, - 976905280.0, - 973024256.0, - 960868608.0, - 965629312.0, - 928453504.0, - 964290176.0, - 980607360.0, - 977911680.0, - 969675648.0, - 944643072.0, - 974050688.0, - 984023808.0, - 970787136.0, - 964618560.0, - 959463872.0, - 954479488.0, - 972360256.0, - 956101120.0, - 976733952.0, - 985840576.0, - 958384128.0, - 969573056.0, - 963288576.0, - 976199104.0, - 977610560.0, - 953632128.0, - 975708160.0, - 976330944.0, - 979344704.0, - 973920896.0, - 953017600.0, - 952767040.0, - 981303360.0, - 984029120.0, - 964543168.0, - 965946624.0, - 951044608.0, - 975743616.0, - 976876416.0, - 968810112.0, - 976216000.0, - 946182144.0, - 972659456.0, - 981967040.0, - 971432320.0, - 968908800.0, - 948963648.0, - 936902784.0, - 973200320.0, - 980805248.0, - 979578176.0, - 971279552.0, - 955651840.0, - 980159488.0, - 957699264.0, - 982226176.0, - 971690368.0, - 955794304.0, - 982354240.0, - 967976896.0, - 967325696.0, - 973205504.0, - 955916928.0, - 964352000.0, - 982668672.0, - 983293952.0, - 964787264.0, - 955178944.0, - 942254784.0, - 973436608.0, - 970794112.0, - 961046720.0, - 962908160.0, - 949851456.0, - 983325376.0, - 984209856.0, - 974678528.0, - 984976128.0, - 946474496.0, - 972187328.0, - 970179840.0, - 972786432.0, - 986351808.0, - 966793920.0, - 955481920.0, - 973164544.0, - 970475200.0, - 974539520.0, - 961372672.0, - 944087808.0, - 980474368.0, - 974160064.0, - 977514496.0, - 971245376.0, - 938116672.0, - 939856000.0, - 989607104.0, - 971937984.0, - 962472256.0, - 969840768.0, - 964964544.0, - 979000512.0, - 960978048.0, - 983261120.0, - 989539008.0, - 944341952.0, - 993746880.0, - 964276480.0, - 963232512.0, - 976610624.0, - 944407488.0, - 977418368.0, - 978834624.0, - 971871104.0, - 975734464.0, - 962815872.0, - 962920512.0, - 977155456.0, - 952620800.0, - 968188736.0, - 964801856.0, - 958062656.0, - 974032384.0, - 978925888.0, - 971758976.0, - 972924800.0, - 934113408.0, - 969001344.0, - 983635776.0, - 977360000.0, - 981351744.0, - 930858368.0, - 938177408.0, - 973956800.0, - 965073088.0, - 967858304.0, - 949253376.0, - 953109632.0, - 971789376.0, - 963601728.0, - 963075008.0, - 976382208.0, - 950176512.0, - 971641536.0, - 967857792.0, - 986224768.0, - 980344640.0, - 941307904.0, - 955159872.0, - 975757440.0, - 979380672.0, - 979350720.0, - 961437568.0, - 946262592.0, - 968123456.0, - 963922944.0, - 966870272.0, - 974525824.0, - 952431168.0, - 987822272.0, - 970064896.0, - 964392832.0, - 968238784.0, - 938703168.0, - 996356672.0, - 969584320.0, - 978894144.0, - 979707904.0, - 949733824.0, - 963307456.0, - 964943424.0, - 976390528.0, - 967674688.0, - 983212992.0, - 931121728.0, - 966041216.0, - 979260992.0, - 977151808.0, - 970127168.0, - 928813632.0, - 976481216.0, - 985536896.0, - 969624064.0, - 986035072.0, - 935797824.0, - 957608896.0, - 966046400.0, - 968013504.0, - 963445248.0, - 957385472.0, - 943979200.0, - 966506624.0, - 975255552.0, - 978663168.0, - 964205312.0, - 948695552.0, - 963496896.0, - 964567808.0, - 972784960.0, - 961207232.0, - 961298752.0, - 974965504.0, - 976105728.0, - 952883968.0, - 962219136.0, - 943610496.0, - 948535232.0, - 971740352.0, - 968575616.0, - 961145408.0, - 951484032.0, - 946801792.0, - 980573632.0, - 973289856.0, - 954094720.0, - 980628608.0, - 958189568.0, - 966422080.0, - 977641984.0, - 973641152.0, - 968993472.0, - 960825344.0, - 943203776.0, - 960585408.0, - 969358272.0, - 973605696.0, - 971886848.0, - 944143104.0, - 975812544.0, - 965290496.0, - 971470080.0, - 969047168.0, - 940294400.0, - 963904832.0, - 947056960.0, - 974076544.0, - 962073216.0, - 957711360.0, - 963994624.0, - 965937536.0, - 978425344.0, - 981726848.0, - 948685504.0, - 937389824.0, - 962448832.0, - 960662528.0, - 966016960.0, - 970505728.0, - 961904768.0, - 978014784.0, - 968929536.0, - 969781696.0, - 963823872.0, - 932158976.0, - 956682368.0, - 985824960.0, - 965333824.0, - 960746048.0, - 950900160.0, - 945037440.0, - 978180096.0, - 984947904.0, - 958612096.0, - 968185408.0, - 956194880.0, - 976281216.0, - 964788992.0, - 968903936.0, - 986458624.0, - 937148928.0, - 970235712.0, - 974094272.0, - 979672512.0, - 969672256.0, - 941497536.0, - 951448832.0, - 951018560.0, - 968859584.0, - 955667456.0, - 962440384.0, - 952574912.0, - 962459456.0, - 972357632.0, - 973204672.0, - 952295168.0, - 941006208.0, - 966426880.0, - 998354240.0, - 976476416.0, - 962262592.0, - 941357248.0, - 958793280.0, - 961055552.0, - 972029440.0, - 977576704.0, - 974241152.0, - 955667904.0, - 967431104.0, - 980837184.0, - 958991040.0, - 968756352.0, - 936932416.0, - 967534720.0, - 980463488.0, - 974646016.0, - 954913280.0, - 948394048.0, - 959638976.0, - 990254336.0, - 967258560.0, - 974963584.0, - 970684224.0, - 955156928.0, - 976667840.0, - 960294784.0, - 961231936.0, - 959308800.0, - 937475264.0, - 962245248.0, - 967650176.0, - 975082560.0, - 979618752.0, - 953874944.0, - 950754368.0, - 963804416.0, - 960271936.0, - 979702016.0, - 971587648.0, - 954566080.0, - 953463936.0, - 972294016.0, - 967461952.0, - 967282240.0, - 950986496.0, - 969834816.0, - 974811072.0, - 961141952.0, - 960868480.0, - 944243968.0, - 973321344.0, - 980513472.0, - 965077824.0, - 973763456.0, - 924311168.0, - 973399680.0, - 980765056.0, - 974949632.0, - 951117312.0, - 944539456.0, - 925608448.0, - 989776576.0, - 983093056.0, - 976174528.0, - 969236352.0, - 952627648.0, - 977000832.0, - 982029312.0, - 976495616.0, - 974812224.0, - 949060416.0, - 964321344.0, - 969488320.0, - 982912896.0, - 971767744.0, - 947757376.0, - 962411136.0, - 963763712.0, - 975741376.0, - 977233664.0, - 965918784.0, - 936192896.0, - 977779072.0, - 960361728.0, - 966538688.0, - 973043584.0, - 954648000.0, - 959451776.0, - 976656576.0, - 974861056.0, - 966620032.0, - 942063168.0, - 969118272.0, - 982134784.0, - 971667840.0, - 967658560.0, - 976212480.0, - 943523648.0, - 972270272.0, - 980114624.0, - 960195840.0, - 978223936.0, - 954960128.0, - 968459648.0, - 982481472.0, - 957186432.0, - 966880256.0, - 937487552.0, - 952872960.0, - 979948096.0, - 978890624.0, - 982442304.0, - 951320256.0, - 934107776.0, - 975766592.0, - 972871616.0, - 984904960.0, - 965993728.0, - 954231424.0, - 980875968.0, - 966290368.0, - 966201280.0, - 969668224.0, - 951651712.0, - 964609792.0, - 974064640.0, - 971761280.0, - 969500032.0, - 966415680.0, - 966637632.0, - 977847104.0, - 960212096.0, - 971532480.0, - 965213184.0, - 963248896.0, - 990388288.0, - 958538880.0, - 976756864.0, - 983425024.0, - 931321344.0, - 946745408.0, - 972389376.0, - 970839680.0, - 980935616.0, - 959234944.0, - 963986496.0, - 972310144.0, - 976823744.0, - 975771712.0, - 963359296.0, - 939804224.0, - 983545472.0, - 990107008.0, - 969120832.0, - 973733120.0, - 945268800.0, - 972478592.0, - 971448576.0, - 958999168.0, - 985219392.0, - 980530880.0, - 960931008.0, - 953292608.0, - 965451648.0, - 978077120.0, - 969804544.0, - 956380352.0, - 977689280.0, - 976501440.0, - 967911232.0, - 971495936.0, - 944195136.0, - 974261376.0, - 973308672.0, - 975996864.0, - 950649984.0, - 951448192.0, - 972720128.0, - 969294272.0, - 961792384.0, - 973032576.0, - 973866496.0, - 958256256.0, - 977567168.0, - 964839680.0, - 967831232.0, - 978984896.0, - 928985984.0, - 973935488.0, - 981719744.0, - 963765568.0, - 979261120.0, - 955877952.0, - 967651520.0, - 963543552.0, - 981258176.0, - 976177216.0, - 958088000.0, - 945731328.0, - 974651520.0, - 996439424.0, - 967843456.0, - 975134272.0, - 933767232.0, - 971477952.0, - 976842560.0, - 987009536.0, - 978941376.0, - 951325632.0, - 975767296.0, - 968266304.0, - 944866624.0, - 979275904.0, - 966534080.0, - 965749504.0, - 977553216.0, - 975725184.0, - 980912256.0, - 963014208.0, - 956772672.0, - 965539456.0, - 965396736.0, - 977848640.0, - 977259328.0, - 974586368.0, - 974931648.0, - 972626752.0, - 971565696.0, - 983223424.0, - 968934592.0, - 962259904.0, - 980496960.0, - 972112256.0, - 973174080.0, - 965890816.0, - 941965760.0, - 980546688.0, - 977131008.0, - 972129920.0, - 971405248.0, - 936352000.0, - 968445888.0, - 975153344.0, - 979059008.0, - 976662976.0, - 928849856.0, - 978131328.0, - 979579904.0, - 964862272.0, - 969209408.0, - 965940416.0, - 950791616.0, - 972296896.0, - 970938816.0, - 987498560.0, - 967758592.0, - 944513792.0, - 973016064.0, - 970758656.0, - 978738624.0, - 972522752.0, - 947268032.0, - 974494336.0, - 979807680.0, - 972941952.0, - 972914688.0, - 947223040.0, - 949709632.0, - 976846592.0, - 971902272.0, - 979733056.0, - 973786752.0, - 944968192.0, - 980787648.0, - 981227456.0, - 969726080.0, - 965378240.0, - 956140992.0, - 983781056.0, - 983824000.0, - 980612032.0, - 969728704.0, - 953852800.0, - 941328320.0, - 963630016.0, - 988763456.0, - 987013184.0, - 968937088.0, - 955058368.0, - 962529024.0, - 966191232.0, - 966160128.0, - 983290624.0, - 936971200.0, - 969623360.0, - 977266048.0, - 976023872.0, - 980393920.0, - 957279232.0, - 963027968.0, - 956338176.0, - 968107584.0, - 963630016.0, - 946412992.0, - 949717888.0, - 972425792.0, - 953770624.0, - 956161728.0, - 957709952.0, - 951672064.0, - 982406272.0, - 971004096.0, - 963427136.0, - 969586176.0, - 965564544.0, - 963809280.0, - 960527616.0, - 976778688.0, - 979100224.0, - 970700672.0, - 973844736.0, - 980557184.0, - 973676864.0, - 961148928.0, - 955967552.0, - 934774656.0, - 960542400.0, - 966358144.0, - 967413504.0, - 975995840.0, - 947116800.0, - 959785088.0, - 971377152.0, - 966559168.0, - 977737920.0, - 942668736.0, - 953736576.0, - 971814400.0, - 957328192.0, - 979194368.0, - 954583360.0, - 940405952.0, - 988628608.0, - 972020096.0, - 973802688.0, - 969470848.0, - 948660992.0, - 966444352.0, - 966197696.0, - 976904704.0, - 975301888.0, - 945847872.0, - 958453248.0, - 968476032.0, - 953920512.0, - 967651392.0, - 953145280.0, - 963428480.0, - 971401216.0, - 976572160.0, - 978156544.0, - 974490880.0, - 946837632.0, - 977234944.0, - 975239232.0, - 954075072.0, - 970649472.0, - 952555840.0, - 970667520.0, - 971792512.0, - 967248640.0, - 949294336.0, - 934664832.0, - 959160576.0, - 978588288.0, - 982095872.0, - 967414592.0, - 962372608.0, - 938147008.0, - 954839040.0, - 967599104.0, - 987279104.0, - 973881408.0, - 944140736.0, - 974096064.0, - 970029824.0, - 988972928.0, - 982314752.0, - 945278016.0, - 958064320.0, - 971393856.0, - 974845568.0, - 969471424.0, - 949740864.0, - 951452288.0, - 966450880.0, - 968281408.0, - 964171008.0, - 956763072.0, - 945851264.0, - 967526272.0, - 980497408.0, - 953512768.0, - 960849664.0, - 967291264.0, - 977291584.0, - 967267520.0, - 979975552.0, - 957254144.0, - 962218048.0, - 950189888.0, - 976278400.0, - 971407488.0, - 980312704.0, - 972296576.0, - 945828928.0, - 952708992.0, - 977351872.0, - 976028864.0, - 973840448.0, - 939853376.0, - 975404544.0, - 977270144.0, - 983293440.0, - 955462208.0, - 956524288.0, - 943288000.0, - 960540736.0, - 977475264.0, - 984475968.0, - 966799168.0, - 952593280.0, - 976813440.0, - 965177728.0, - 966935488.0, - 971482048.0, - 944571904.0, - 974077632.0, - 970348416.0, - 969883968.0, - 971506368.0, - 949940096.0, - 948415936.0, - 967998144.0, - 970786048.0, - 972610304.0, - 953778816.0, - 949085120.0, - 970402240.0, - 973548480.0, - 971664192.0, - 950142400.0, - 957999680.0, - 987353024.0, - 980863680.0, - 956866048.0, - 959761984.0, - 962540928.0, - 968469760.0, - 982511232.0, - 956334912.0, - 976498368.0, - 938281856.0, - 938656896.0, - 968072128.0, - 975133888.0, - 959514048.0, - 974384832.0, - 945356096.0, - 964806016.0, - 963140800.0, - 971082752.0, - 985360768.0, - 941469248.0, - 963634880.0, - 965207552.0, - 983131328.0, - 966267136.0, - 949436992.0, - 933252992.0, - 979782208.0, - 958031232.0, - 964578560.0, - 972007936.0, - 955061440.0, - 981651712.0, - 958466368.0, - 973604544.0, - 967792768.0, - 942698176.0, - 980495424.0, - 967711296.0, - 956541376.0, - 960934976.0, - 932012480.0, - 939512000.0, - 969221824.0, - 970176896.0, - 955228736.0, - 967148224.0, - 951535232.0, - 987683072.0, - 973311488.0, - 972248704.0, - 968304320.0, - 940715328.0, - 955683840.0, - 972289984.0, - 972432192.0, - 977282432.0, - 946449536.0, - 950327744.0, - 961743552.0, - 973305600.0, - 964289792.0, - 964008192.0, - 961436672.0, - 969741056.0, - 972801088.0, - 959189952.0, - 956217856.0, - 951800576.0, - 979267200.0, - 955622144.0, - 971251648.0, - 980316736.0, - 966459712.0, - 958822336.0, - 968083840.0, - 955938368.0, - 956038336.0, - 954539968.0, - 968531456.0, - 967929024.0, - 966696704.0, - 972142400.0, - 963902656.0, - 928926464.0, - 977321024.0, - 976504960.0, - 974799360.0, - 967733888.0, - 950444032.0, - 963469440.0, - 983125440.0, - 962636224.0, - 969218176.0, - 954742016.0, - 959397952.0, - 977733248.0, - 987229824.0, - 974280192.0, - 952094528.0, - 944122304.0, - 973594176.0, - 970815232.0, - 953764736.0, - 979919040.0, - 950571520.0, - 976964992.0, - 962998336.0, - 961976768.0, - 983838208.0, - 939549120.0, - 979587200.0, - 965891456.0, - 971683584.0, - 978816960.0, - 952414016.0, - 945802560.0, - 967777728.0, - 965661952.0, - 975286912.0, - 967464128.0, - 949828992.0, - 979188096.0, - 960283392.0, - 971307904.0, - 959975040.0, - 943335104.0, - 986146048.0, - 978715968.0, - 982196032.0, - 941391104.0, - 958416704.0, - 955412480.0, - 979742592.0, - 964329536.0, - 952458688.0, - 962585920.0, - 935138752.0, - 968731776.0, - 974533888.0, - 971529472.0, - 975038464.0, - 939388992.0, - 973917632.0, - 987897024.0, - 968189888.0, - 981193024.0, - 932611456.0, - 969980352.0, - 964373248.0, - 985266048.0, - 957972608.0, - 963796288.0, - 941077376.0, - 972322432.0, - 965118656.0, - 982258624.0, - 969098816.0, - 955848128.0, - 992000832.0, - 966236096.0, - 980576256.0, - 972248384.0, - 948820608.0, - 968422912.0, - 983495296.0, - 968379520.0, - 971286528.0, - 981129728.0, - 964410432.0, - 975215232.0, - 974163712.0, - 971359040.0, - 968993984.0, - 954499904.0, - 975915456.0, - 975861056.0, - 985295616.0, - 974192320.0, - 969102784.0, - 961317824.0, - 973245696.0, - 980958336.0, - 964872768.0, - 961061888.0, - 951701440.0, - 984447808.0, - 960826624.0, - 971121856.0, - 955659072.0, - 966056384.0, - 965210496.0, - 972345408.0, - 968244032.0, - 978429632.0, - 950635584.0, - 970614656.0, - 973470272.0, - 967378048.0, - 981500928.0, - 930009728.0, - 961955712.0, - 967930176.0, - 971063360.0, - 975972608.0, - 960872064.0, - 950836544.0, - 977347328.0, - 977384128.0, - 982418304.0, - 977347712.0, - 942442752.0, - 970529984.0, - 963182080.0, - 978538368.0, - 976776768.0, - 953436544.0, - 951689728.0, - 978092608.0, - 975700416.0, - 946662208.0, - 962189952.0, - 950867392.0, - 978599616.0, - 968208704.0, - 972271808.0, - 973348800.0, - 940888960.0, - 974958976.0, - 979534592.0, - 989962496.0, - 970006336.0, - 955223872.0, - 963987328.0, - 969159104.0, - 992095360.0, - 976756288.0, - 940654656.0, - 944364672.0, - 957784896.0, - 980825536.0, - 975541120.0, - 972887168.0, - 942410432.0, - 975195200.0, - 978565056.0, - 975548672.0, - 988348736.0, - 947441664.0, - 962531264.0, - 967766528.0, - 957954048.0, - 972555840.0, - 934506112.0, - 962717952.0, - 984748224.0, - 975013184.0, - 976998208.0, - 963122688.0, - 951635712.0, - 962124672.0, - 964161088.0, - 980128704.0, - 967977472.0, - 956174720.0, - 959794368.0, - 972108608.0, - 970626880.0, - 969361088.0, - 946458816.0, - 934309888.0, - 981432768.0, - 964879104.0, - 979482496.0, - 950446464.0, - 962714560.0, - 971536512.0, - 966210368.0, - 984085760.0, - 990649600.0, - 957426496.0, - 967576320.0, - 954460672.0, - 971948992.0, - 977640640.0, - 931561536.0, - 974222016.0, - 958423488.0, - 971424896.0, - 974600896.0, - 951440768.0, - 959566144.0, - 965252544.0, - 971064704.0, - 975333056.0, - 972011520.0, - 946616384.0, - 964608896.0, - 975104128.0, - 980903360.0, - 972813568.0, - 946703360.0, - 985879552.0, - 959701696.0, - 978619712.0, - 973641664.0, - 956983936.0, - 967820224.0, - 970038336.0, - 967709952.0, - 965205760.0, - 975709504.0, - 951745536.0, - 972494784.0, - 966351552.0, - 960954432.0, - 969165440.0, - 945948224.0, - 968908864.0, - 970833856.0, - 963325568.0, - 972647552.0, - 947188864.0, - 964141120.0, - 966924736.0, - 974957440.0, - 988913600.0, - 952238016.0, - 950326784.0, - 949767040.0, - 965159104.0, - 968921216.0, - 967732480.0, - 925482752.0, - 972807488.0, - 972638080.0, - 957369664.0, - 960858688.0, - 942446336.0, - 950831616.0, - 965830144.0, - 960531648.0, - 964774784.0, - 952980288.0, - 966027456.0, - 972790400.0, - 976626304.0, - 965603840.0, - 973089920.0, - 962951424.0, - 984466560.0, - 976216576.0, - 960892864.0, - 953216576.0, - 960806272.0, - 976360704.0, - 975529728.0, - 965753536.0, - 966348096.0, - 952085760.0, - 961088768.0, - 965697792.0, - 973895168.0, - 957637248.0, - 977637696.0, - 940232064.0, - 977431936.0, - 969338432.0, - 978101120.0, - 962238848.0, - 945607296.0, - 970621376.0, - 971733888.0, - 988034880.0, - 975479360.0, - 947674176.0, - 960562112.0, - 973360000.0, - 960894528.0, - 958956928.0, - 966526144.0, - 938854848.0, - 979477120.0, - 965198720.0, - 968328576.0, - 971859008.0, - 951716480.0, - 965420736.0, - 973760704.0, - 975044480.0, - 976613568.0, - 943884992.0, - 978484224.0, - 979261824.0, - 971783424.0, - 971739072.0, - 956646528.0, - 963846336.0, - 983289344.0, - 960728704.0, - 961292672.0, - 962509696.0, - 940788736.0, - 970893056.0, - 968734912.0, - 962900992.0, - 969508352.0, - 952155712.0, - 970346432.0, - 962669120.0, - 967300288.0, - 976827264.0, - 964134784.0, - 963821312.0, - 977887680.0, - 958922816.0, - 983797504.0, - 974620288.0, - 937600960.0, - 963017408.0, - 971395200.0, - 983263872.0, - 979736128.0, - 937672000.0, - 961483456.0, - 950204544.0, - 970087040.0, - 982427968.0, - 952478720.0, - 967691200.0, - 977851776.0, - 962691968.0, - 965434752.0, - 956612928.0, - 945445184.0, - 975929152.0, - 969228544.0, - 954448128.0, - 957755456.0, - 936189888.0, - 979276544.0, - 965163648.0, - 971635520.0, - 957348096.0, - 945257728.0, - 955305408.0, - 966231616.0, - 966333696.0, - 971360832.0, - 953111744.0, - 949290624.0, - 981340800.0, - 963663616.0, - 967803456.0, - 962046656.0, - 944950208.0, - 968349696.0, - 967084928.0, - 969202624.0, - 977582784.0, - 946554432.0, - 963036608.0, - 980124992.0, - 963762368.0, - 967440064.0, - 953014016.0, - 952111744.0, - 964207552.0, - 968005824.0, - 963228224.0, - 984584128.0, - 944364160.0, - 969063552.0, - 975689664.0, - 958785408.0, - 974479168.0, - 950242240.0, - 971004416.0, - 970004224.0, - 963171136.0, - 963596160.0, - 954199296.0, - 960654592.0, - 982819584.0, - 970337088.0, - 966501056.0, - 961341696.0, - 953177664.0, - 972313728.0, - 987355072.0, - 974503680.0, - 956472384.0, - 945806016.0, - 966235136.0, - 988140288.0, - 978116608.0, - 960206208.0, - 941950784.0, - 943693696.0, - 970237824.0, - 968935040.0, - 977637120.0, - 954881408.0, - 956555840.0, - 983993536.0, - 968422400.0, - 981401408.0, - 974248256.0, - 946328704.0, - 966728768.0, - 975775616.0, - 966496256.0, - 971699840.0, - 959260800.0, - 951018304.0, - 957813632.0, - 964649472.0, - 981483776.0, - 953678976.0, - 948986176.0, - 969763264.0, - 978162752.0, - 974768192.0, - 960720896.0, - 934270528.0, - 961092672.0, - 975365376.0, - 972710208.0, - 964899072.0, - 956035200.0, - 973742336.0, - 978201344.0, - 979485888.0, - 959934976.0, - 959615616.0, - 954542592.0, - 975416256.0, - 975719936.0, - 958922432.0, - 950817024.0, - 954942912.0, - 979512064.0, - 964267584.0, - 973486016.0, - 967681792.0, - 935557696.0, - 961839872.0, - 974424960.0, - 988294464.0, - 985091328.0, - 941165504.0, - 963614208.0, - 971402368.0, - 959588096.0, - 973921856.0, - 958716800.0, - 943572800.0, - 960335872.0, - 975819648.0, - 952713152.0, - 983175360.0, - 948491392.0, - 962829632.0, - 957288128.0, - 959541888.0, - 983565056.0, - 962983296.0, - 960064960.0, - 964155456.0, - 950264576.0, - 959635456.0, - 957470656.0, - 963542016.0, - 969230272.0, - 966453312.0, - 987144640.0, - 966569920.0, - 941984064.0, - 974474752.0, - 978442624.0, - 976999616.0, - 961451648.0, - 959529344.0, - 967994752.0, - 982728832.0, - 974488832.0, - 959375936.0, - 942917504.0, - 959750272.0, - 966918976.0, - 966538624.0, - 972441472.0, - 961642816.0, - 944569152.0, - 971878272.0, - 963299840.0, - 967215552.0, - 987664640.0, - 947288896.0, - 984886080.0, - 971314304.0, - 970495680.0, - 981465088.0, - 948857600.0, - 968643968.0, - 951244352.0, - 972461184.0, - 956593216.0, - 957309312.0, - 940704512.0, - 976784256.0, - 961705728.0, - 974186112.0, - 970002880.0, - 958595904.0, - 967958720.0, - 972104896.0, - 991389248.0, - 974030464.0, - 934730496.0, - 962359552.0, - 968602944.0, - 972818048.0, - 976059392.0, - 959127936.0, - 949671424.0, - 980125120.0, - 958315584.0, - 961110272.0, - 962059840.0, - 936578176.0, - 973996992.0, - 958719936.0, - 978700672.0, - 979829760.0, - 929410240.0, - 953891392.0, - 969671360.0, - 979375808.0, - 956561088.0, - 942290176.0, - 944030528.0, - 960044864.0, - 968718016.0, - 970754880.0, - 959313856.0, - 946086912.0, - 970983680.0, - 969499392.0, - 952019328.0, - 974469888.0, - 952712448.0, - 980567808.0, - 968682176.0, - 972784192.0, - 958615040.0, - 954550272.0, - 962916608.0, - 967968960.0, - 967909824.0, - 955607360.0, - 960908096.0, - 965459968.0, - 966661632.0, - 966662528.0, - 997560448.0, - 975216256.0, - 958295936.0, - 978651136.0, - 966134208.0, - 987465536.0, - 982706432.0, - 952116224.0, - 957602688.0, - 973381376.0, - 995193792.0, - 974494976.0, - 956035840.0, - 935559168.0, - 979505408.0, - 973369600.0, - 995180928.0, - 974482048.0, - 956048512.0, - 935546880.0, - 979492224.0, - 972321152.0, - 967976704.0, - 977072960.0, - 934262272.0, - 992121216.0, - 979542144.0, - 986180608.0, - 969832320.0, - 965121792.0, - 971854272.0, - 963149312.0, - 968050112.0, - 975986368.0, - 966238784.0, - 976454784.0, - 974676672.0, - 969408768.0, - 964701056.0, - 967743616.0, - 954235712.0, - 978781120.0, - 977436224.0, - 967240192.0, - 963770752.0, - 951522240.0, - 974659008.0, - 972527424.0, - 963813952.0, - 967693888.0, - 942688192.0, - 981055488.0, - 973114368.0, - 969197696.0, - 972257856.0, - 950853760.0, - 944255296.0, - 980598336.0, - 963370176.0, - 981818624.0, - 979003648.0, - 950562944.0, - 961397504.0, - 984405760.0, - 971902144.0, - 978915904.0, - 944721152.0, - 967431168.0, - 963746112.0, - 974065536.0, - 970237504.0, - 957807680.0, - 940188672.0, - 977651200.0, - 967448128.0, - 974191424.0, - 978437248.0, - 958137024.0, - 970507136.0, - 982706688.0, - 968413312.0, - 977110784.0, - 947945920.0, - 988735936.0, - 966843776.0, - 969401920.0, - 965002496.0, - 953648576.0, - 963052672.0, - 959078208.0, - 969904576.0, - 980597120.0, - 971864256.0, - 944882944.0, - 966466688.0, - 972540352.0, - 962410816.0, - 959906560.0, - 958274560.0, - 992011520.0, - 976084672.0, - 970262272.0, - 979911168.0, - 954764800.0, - 946356928.0, - 978702592.0, - 973806400.0, - 982366720.0, - 963095104.0, - 934132928.0, - 965146880.0, - 974949312.0, - 986778688.0, - 973217536.0, - 942887936.0, - 961124416.0, - 971254400.0, - 964488000.0, - 962902912.0, - 952610176.0, - 960660928.0, - 976027968.0, - 972744448.0, - 986592704.0, - 954754048.0, - 953670592.0, - 970586496.0, - 970882688.0, - 962601280.0, - 961794176.0, - 946996416.0, - 970363840.0, - 965310976.0, - 981188416.0, - 963151808.0, - 933158272.0, - 965520448.0, - 981912576.0, - 957014080.0, - 974480704.0, - 934489280.0, - 955800512.0, - 968537024.0, - 973002432.0, - 959339904.0, - 954163968.0, - 950501952.0, - 964747776.0, - 955618048.0, - 976023936.0, - 977687424.0, - 934446400.0, - 953234432.0, - 977944704.0, - 964133248.0, - 969924800.0, - 951144640.0, - 965340992.0, - 972165504.0, - 956645888.0, - 969969472.0, - 977290560.0, - 947898752.0, - 973202816.0, - 959819712.0, - 978168000.0, - 977121728.0, - 952616832.0, - 978487488.0, - 981730624.0, - 984701952.0, - 967378880.0, - 935953280.0, - 964983872.0, - 973220928.0, - 967259520.0, - 962472576.0, - 972423360.0, - 947037568.0, - 974026624.0, - 983978048.0, - 958513472.0, - 955427008.0, - 950644288.0, - 980127488.0, - 968634944.0, - 963911104.0, - 974233536.0, - 940209280.0, - 966117440.0, - 973585024.0, - 981495424.0, - 976896640.0, - 957589248.0, - 948326848.0, - 963149376.0, - 982156864.0, - 989143744.0, - 979645376.0, - 928395904.0, - 971871296.0, - 979172864.0, - 969396544.0, - 976201472.0, - 939298304.0, - 962638848.0, - 949949568.0, - 964836864.0, - 984534144.0, - 949341696.0, - 946375040.0, - 965998336.0, - 973132416.0, - 974720064.0, - 965766400.0, - 947390528.0, - 975673024.0, - 965857088.0, - 963191488.0, - 970292096.0, - 948316352.0, - 968948224.0, - 951689792.0, - 962271040.0, - 966257728.0, - 946903936.0, - 977928768.0, - 986181952.0, - 957792704.0, - 965299904.0, - 947424576.0, - 951874240.0, - 990291136.0, - 979603456.0, - 968499648.0, - 960028416.0, - 945666880.0, - 964715136.0, - 968058752.0, - 972375168.0, - 969973504.0, - 947430464.0, - 974598144.0, - 972250624.0, - 953018752.0, - 972244608.0, - 976545920.0, - 941104768.0, - 972265728.0, - 968262208.0, - 971828288.0, - 981783744.0, - 946866944.0, - 957577280.0, - 965776384.0, - 965607232.0, - 972388160.0, - 942611776.0, - 971584256.0, - 965639360.0, - 968205440.0, - 977930752.0, - 946756096.0, - 967349440.0, - 971102976.0, - 982247104.0, - 966552256.0, - 971236864.0, - 940521152.0, - 966707328.0, - 967366336.0, - 979107328.0, - 943544448.0, - 935810240.0, - 968936448.0, - 963945920.0, - 965944384.0, - 964949312.0, - 940316992.0, - 969596224.0, - 982049984.0, - 972036160.0, - 967644608.0, - 946474944.0, - 938193728.0, - 971120384.0, - 974599296.0, - 982041024.0, - 977332608.0, - 939135424.0, - 991187200.0, - 970708800.0, - 955801536.0, - 973083136.0, - 950052736.0, - 980071168.0, - 976010624.0, - 968413696.0, - 976950336.0, - 947037312.0, - 955699008.0, - 976213056.0, - 960257536.0, - 977301248.0, - 985250624.0, - 965203584.0, - 979916032.0, - 979227712.0, - 970150016.0, - 959938688.0, - 956621248.0, - 976153344.0, - 960736512.0, - 973707776.0, - 978420800.0, - 944955648.0, - 960080064.0, - 964519104.0, - 969141440.0, - 957836544.0, - 961142080.0, - 939710912.0, - 975219392.0, - 967561280.0, - 994904640.0, - 961430080.0, - 942571200.0, - 967128832.0, - 973088000.0, - 979930176.0, - 968572416.0, - 946731264.0, - 958634176.0, - 984853568.0, - 960618752.0, - 972831040.0, - 970021824.0, - 948553088.0, - 961491776.0, - 963327232.0, - 959266240.0, - 971938496.0, - 957255488.0, - 968034176.0, - 961661120.0, - 969765376.0, - 966452096.0, - 947101504.0, - 959729536.0, - 969458304.0, - 965900672.0, - 977718144.0, - 963340864.0, - 966987072.0, - 972251008.0, - 974875328.0, - 965427648.0, - 957522048.0, - 942958592.0, - 961911360.0, - 969458368.0, - 977289536.0, - 959535552.0, - 938390848.0, - 958323072.0, - 971501440.0, - 967787136.0, - 970875136.0, - 944067264.0, - 943765568.0, - 980054080.0, - 976730368.0, - 971471872.0, - 953346048.0, - 943427712.0, - 971981120.0, - 963550016.0, - 971155072.0, - 969415488.0, - 939969408.0, - 969691712.0, - 962313216.0, - 973469312.0, - 992090816.0, - 953564992.0, - 948975232.0, - 970424896.0, - 962479360.0, - 960027264.0, - 961837568.0, - 952972416.0, - 975235136.0, - 964317248.0, - 972064640.0, - 975809728.0, - 943748032.0, - 969219904.0, - 965645632.0, - 969604864.0, - 986414080.0, - 957371328.0, - 965120896.0, - 981114048.0, - 966760640.0, - 965194688.0, - 948058880.0, - 932876032.0, - 981514496.0, - 969076928.0, - 980687424.0, - 959755520.0, - 939557376.0, - 955594752.0, - 980484992.0, - 978223040.0, - 969002304.0, - 946351936.0, - 957885632.0, - 979544512.0, - 963545600.0, - 974468032.0, - 961651136.0, - 944623808.0, - 981752960.0, - 989928896.0, - 979737536.0, - 962284864.0, - 945915392.0, - 970411712.0, - 957714944.0, - 966153408.0, - 985703744.0, - 944171200.0, - 965398848.0, - 968913792.0, - 962138112.0, - 953674752.0, - 954368640.0, - 986489088.0, - 964599232.0, - 961119296.0, - 965256832.0, - 945031616.0, - 936002880.0, - 975415296.0, - 974744512.0, - 970484352.0, - 984617088.0, - 954195008.0, - 970331456.0, - 972916992.0, - 956965952.0, - 966292928.0, - 943359680.0, - 959033856.0, - 982058240.0, - 971036480.0, - 978443584.0, - 965332352.0, - 935597504.0, - 971644672.0, - 964545344.0, - 976257856.0, - 976116032.0, - 954636096.0, - 976165376.0, - 977419264.0, - 961709056.0, - 991612800.0, - 956523904.0, - 956840896.0, - 975737472.0, - 985580608.0, - 984906112.0, - 950670720.0, - 929029888.0, - 967870912.0, - 977184128.0, - 961444032.0, - 974476544.0, - 950107200.0, - 987578688.0, - 980018304.0, - 970295040.0, - 966061120.0, - 949025600.0, - 976736448.0, - 963015680.0, - 975354816.0, - 971719040.0, - 939841344.0, - 964463872.0, - 975060864.0, - 968426112.0, - 963818816.0, - 964171328.0, - 954704512.0, - 972341952.0, - 977223040.0, - 964833344.0, - 983089600.0, - 935789568.0, - 963881024.0, - 966608320.0, - 983804992.0, - 970478848.0, - 951524416.0, - 944129984.0, - 988247616.0, - 965969920.0, - 952212288.0, - 957567808.0, - 938833984.0, - 967033344.0, - 969380224.0, - 965773440.0, - 973727296.0, - 940893760.0, - 969796416.0, - 987207744.0, - 979695616.0, - 957643008.0, - 951528768.0, - 979017472.0, - 975387520.0, - 975281408.0, - 968427840.0, - 968806592.0, - 978402368.0, - 980427264.0, - 964074688.0, - 972711808.0, - 970944832.0, - 945103616.0, - 985440256.0, - 978079488.0, - 968653760.0, - 967265792.0, - 949218112.0, - 987740736.0, - 981401856.0, - 961260928.0, - 963837440.0, - 963823872.0, - 964992832.0, - 977090944.0, - 973198528.0, - 971912960.0, - 961262656.0, - 936331456.0, - 966092544.0, - 1000624256.0, - 973620544.0, - 989009600.0, - 956136704.0, - 970453248.0, - 968043584.0, - 968487744.0, - 978424512.0, - 966799872.0, - 955270336.0, - 981133312.0, - 964456192.0, - 985901632.0, - 968218752.0, - 959426880.0, - 961755200.0, - 971472384.0, - 981381120.0, - 974785856.0, - 946603648.0, - 983058880.0, - 972203712.0, - 968703936.0, - 953199040.0, - 946063168.0, - 965680832.0, - 981508864.0, - 974784832.0, - 970561856.0, - 926223488.0, - 956196224.0, - 987872704.0, - 988890496.0, - 966574464.0, - 970932608.0, - 957729024.0, - 979138432.0, - 976908736.0, - 979244032.0, - 979929728.0, - 946818816.0, - 964716864.0, - 967669440.0, - 992563840.0, - 972361984.0, - 957813632.0, - 943059840.0, - 958729216.0, - 984136384.0, - 970941120.0, - 961854144.0, - 963400896.0, - 964438016.0, - 963765824.0, - 981154176.0, - 962837504.0, - 949981184.0, - 964162944.0, - 969636352.0, - 977646976.0, - 973118144.0, - 962051200.0, - 969115712.0, - 967173888.0, - 964661184.0, - 965281792.0, - 938461568.0, - 942789120.0, - 969238976.0, - 969396736.0, - 977326272.0, - 985693440.0, - 943355136.0, - 976669440.0, - 981866048.0, - 978464768.0, - 971240320.0, - 940368192.0, - 958882496.0, - 975565824.0, - 978469248.0, - 956037696.0, - 971318016.0, - 934791808.0, - 961199104.0, - 972597248.0, - 964259392.0, - 966891584.0, - 949945024.0, - 974521152.0, - 966959296.0, - 953346688.0, - 969797376.0, - 944896512.0, - 957087872.0, - 966351232.0, - 984740032.0, - 964399872.0, - 944806080.0, - 948838272.0, - 963391296.0, - 966104512.0, - 992895168.0, - 956093952.0, - 950773824.0, - 975550720.0, - 979162944.0, - 975228032.0, - 952794304.0, - 953541952.0, - 967666560.0, - 977321344.0, - 973576448.0, - 955081024.0, - 937280448.0, - 960970944.0, - 979243840.0, - 970645824.0, - 956387520.0, - 944582272.0, - 961511488.0, - 974060864.0, - 967481408.0, - 979095488.0, - 981448192.0, - 946732864.0, - 979993856.0, - 977129472.0, - 975372224.0, - 971553024.0, - 949612288.0, - 969716864.0, - 953815808.0, - 977586176.0, - 964361088.0, - 963590720.0, - 958937408.0, - 969643456.0, - 965128768.0, - 966118016.0, - 982338752.0, - 951279104.0, - 955521664.0, - 968892672.0, - 972106112.0, - 964865536.0, - 961278720.0, - 968992064.0, - 971422464.0, - 972100480.0, - 959760704.0, - 982879424.0, - 950610880.0, - 970486528.0, - 970533824.0, - 963341312.0, - 944189376.0, - 940487680.0, - 976971456.0, - 968511808.0, - 967965824.0, - 978763776.0, - 938520832.0, - 976066176.0, - 965320000.0, - 958779136.0, - 974729408.0, - 953506240.0, - 940081920.0, - 966190592.0, - 967302784.0, - 969921024.0, - 966736512.0, - 951392832.0, - 975828992.0, - 979206592.0, - 986264128.0, - 964680448.0, - 939334400.0, - 976793024.0, - 972326912.0, - 970404672.0, - 970494336.0, - 955573440.0, - 945401216.0, - 967255680.0, - 967032384.0, - 979673216.0, - 972223872.0, - 949601344.0, - 963855616.0, - 976013056.0, - 973998656.0, - 984590912.0, - 951088256.0, - 970067328.0, - 956061184.0, - 974937472.0, - 969055040.0, - 944543104.0, - 961078912.0, - 982184000.0, - 968457984.0, - 956830912.0, - 928821760.0, - 966601344.0, - 972727104.0, - 957699712.0, - 956924928.0, - 949783616.0, - 942032512.0, - 986361984.0, - 979171584.0, - 964691328.0, - 976037568.0, - 937390720.0, - 957477952.0, - 974595456.0, - 974311104.0, - 962558336.0, - 966012480.0, - 943301248.0, - 974594048.0, - 983782784.0, - 964934656.0, - 959768384.0, - 952992064.0, - 953711872.0, - 959589312.0, - 982365312.0, - 971797824.0, - 936081664.0, - 967763712.0, - 955761536.0, - 957234944.0, - 972708096.0, - 946432064.0, - 951500736.0, - 969433664.0, - 969855296.0, - 966247488.0, - 954553664.0, - 968611072.0, - 964777024.0, - 975212608.0, - 975459008.0, - 962989568.0, - 951605632.0, - 971357632.0, - 967008960.0, - 961796288.0, - 969693440.0, - 936850176.0, - 972468608.0, - 965346112.0, - 978498688.0, - 973979776.0, - 932054144.0, - 951860608.0, - 975564032.0, - 960246144.0, - 967539584.0, - 988022144.0, - 943540288.0, - 975703936.0, - 978688704.0, - 977150080.0, - 966899904.0, - 942507712.0, - 981041280.0, - 957581568.0, - 984980608.0, - 966805504.0, - 952115136.0, - 965811776.0, - 985910272.0, - 974078272.0, - 983529920.0, - 952556608.0, - 947170176.0, - 972406656.0, - 972955264.0, - 966760768.0, - 980416832.0, - 948150784.0, - 964207616.0, - 947524736.0, - 976332160.0, - 982941376.0, - 950301376.0, - 978155712.0, - 968844992.0, - 950886144.0, - 985023104.0, - 959110144.0, - 943816256.0, - 955002112.0, - 971378176.0, - 988853184.0, - 956716096.0, - 945667648.0, - 962857408.0, - 971720640.0, - 969484480.0, - 978926400.0, - 939906240.0, - 958570688.0, - 977714752.0, - 958491520.0, - 978134272.0, - 954262208.0, - 958182784.0, - 972283648.0, - 982909824.0, - 961628352.0, - 958913984.0, - 948644032.0, - 968260544.0, - 965479232.0, - 997951488.0, - 973870208.0, - 939940352.0, - 966812096.0, - 968759872.0, - 949725248.0, - 977650688.0, - 955403968.0, - 955030080.0, - 976225792.0, - 970213760.0, - 962492416.0, - 958755776.0, - 945295936.0, - 978518528.0, - 965980608.0, - 966358656.0, - 969410496.0, - 940608704.0, - 973240320.0, - 975068800.0, - 951888256.0, - 964066880.0, - 949149888.0, - 977665728.0, - 974602496.0, - 969645312.0, - 977445888.0, - 946845056.0, - 944096192.0, - 961941184.0, - 971781760.0, - 980031744.0, - 971509696.0, - 946439488.0, - 970772800.0, - 975968896.0, - 969260160.0, - 973054144.0, - 941027456.0, - 975760192.0, - 972611840.0, - 976027328.0, - 965119616.0, - 957061056.0, - 931256448.0, - 979264192.0, - 960038336.0, - 965137344.0, - 958527360.0, - 966014400.0, - 973020352.0, - 964743296.0, - 968654272.0, - 981821632.0, - 955935872.0, - 991025024.0, - 968775744.0, - 973782272.0, - 959377344.0, - 947800384.0, - 949367232.0, - 966707200.0, - 980937088.0, - 960609088.0, - 957851904.0, - 941302592.0, - 975655424.0, - 979904256.0, - 965988800.0, - 986714112.0, - 952366272.0, - 970783104.0, - 970343616.0, - 974974528.0, - 971842752.0, - 941395648.0, - 948387520.0, - 980668736.0, - 980053760.0, - 982500096.0, - 970084736.0, - 936919040.0, - 969876352.0, - 981326784.0, - 992018560.0, - 958539648.0, - 950516480.0, - 956740608.0, - 982094144.0, - 977917248.0, - 968119744.0, - 952073984.0, - 931399680.0, - 966554112.0, - 958850880.0, - 977573952.0, - 964592192.0, - 958312704.0, - 974005888.0, - 950970624.0, - 974338496.0, - 963808896.0, - 954280000.0, - 981481088.0, - 974654976.0, - 966983488.0, - 971694144.0, - 940360576.0, - 965095104.0, - 960203840.0, - 952547008.0, - 966836608.0, - 958368576.0, - 959804416.0, - 972355200.0, - 985891200.0, - 958696128.0, - 936294912.0, - 945463296.0, - 977076032.0, - 988789248.0, - 966621568.0, - 985454784.0, - 938732992.0, - 963043200.0, - 961942912.0, - 989489600.0, - 987013312.0, - 959490944.0, - 961899648.0, - 958968000.0, - 966210816.0, - 981719936.0, - 952090944.0, - 938251968.0, - 971376576.0, - 969824576.0, - 976530240.0, - 971830336.0, - 955762752.0, - 972647168.0, - 965210240.0, - 950826048.0, - 978837824.0, - 958071680.0, - 961483136.0, - 985632192.0, - 962112576.0, - 974645824.0, - 956923328.0, - 948963840.0, - 975927616.0, - 968292352.0, - 962047872.0, - 977941696.0, - 946268288.0, - 976358528.0, - 979349632.0, - 979796608.0, - 975724736.0, - 940562432.0, - 963765888.0, - 965244032.0, - 978698112.0, - 945850816.0, - 941845440.0, - 959131072.0, - 972693952.0, - 970566336.0, - 966508544.0, - 962100224.0, - 937939136.0, - 973749696.0, - 973512704.0, - 981707456.0, - 970136768.0, - 949885696.0, - 962003328.0, - 982789568.0, - 968080960.0, - 969705536.0, - 954171072.0, - 952187136.0, - 985361856.0, - 972913600.0, - 976518272.0, - 959725056.0, - 932516480.0, - 964037696.0, - 967028736.0, - 977857216.0, - 961843648.0, - 955102400.0, - 988763328.0, - 968715968.0, - 970518336.0, - 959374656.0, - 952353472.0, - 948822592.0, - 979556224.0, - 967519488.0, - 972424768.0, - 947987136.0, - 951507968.0, - 968237504.0, - 967390336.0, - 962856448.0, - 980083776.0, - 944050368.0, - 975006848.0, - 974312256.0, - 973574208.0, - 971708544.0, - 958864384.0, - 960295360.0, - 965778560.0, - 970290752.0, - 980613376.0, - 944283776.0, - 945492480.0, - 970518528.0, - 970185088.0, - 970997184.0, - 986612032.0, - 948066816.0, - 955517312.0, - 972393344.0, - 972488640.0, - 985050304.0, - 951690944.0, - 954792960.0, - 972011136.0, - 962667904.0, - 960713792.0, - 943963072.0, - 948743936.0, - 981819456.0, - 971381696.0, - 970545984.0, - 972548288.0, - 944227328.0, - 974196096.0, - 977102336.0, - 963895680.0, - 960720192.0, - 945273216.0, - 969737216.0, - 998076864.0, - 975855808.0, - 963338816.0, - 937053696.0, - 949942336.0, - 968207552.0, - 986284160.0, - 967589184.0, - 966929408.0, - 937815552.0, - 963158336.0, - 985092928.0, - 962796480.0, - 968078016.0, - 947321280.0, - 975432384.0, - 975331264.0, - 975791424.0, - 980182720.0, - 956997120.0, - 970744192.0, - 963803712.0, - 977681152.0, - 981516800.0, - 968661248.0, - 953440768.0, - 963244800.0, - 964300416.0, - 965498240.0, - 959689600.0, - 965970560.0, - 966674048.0, - 968969728.0, - 955515712.0, - 972235712.0, - 939842752.0, - 960277312.0, - 981393088.0, - 968752512.0, - 957488448.0, - 951367040.0, - 952284032.0, - 969787776.0, - 972364160.0, - 962866624.0, - 932202944.0, - 957318592.0, - 981520000.0, - 959303104.0, - 948332288.0, - 969478848.0, - 949914944.0, - 956194496.0, - 973598976.0, - 969103872.0, - 968650560.0, - 953933632.0, - 961696640.0, - 958652864.0, - 987229120.0, - 969981056.0, - 964830208.0, - 946335104.0, - 980762048.0, - 971935168.0, - 974750656.0, - 965119616.0, - 939432640.0, - 955701888.0, - 967307264.0, - 991853696.0, - 971335552.0, - 924678016.0, - 969423360.0, - 967502400.0, - 963945280.0, - 968606656.0, - 974431104.0, - 953848576.0, - 978189824.0, - 975013440.0, - 956637632.0, - 968968256.0, - 943301056.0, - 979950784.0, - 982469120.0, - 959020224.0, - 973195264.0, - 944738560.0, - 958959424.0, - 979202240.0, - 972960320.0, - 951416512.0, - 941890432.0, - 947712832.0, - 975258496.0, - 988450240.0, - 974012928.0, - 962630592.0, - 921264064.0, - 970375040.0, - 982832832.0, - 976660288.0, - 967197056.0, - 958454976.0, - 1000298880.0, - 954954880.0, - 966162624.0, - 958965696.0, - 958415872.0, - 956820224.0, - 968914240.0, - 962834880.0, - 959190720.0, - 954761728.0, - 947724480.0, - 967121152.0, - 973963712.0, - 966010944.0, - 958401152.0, - 942305472.0, - 973938816.0, - 971260032.0, - 971667776.0, - 953775232.0, - 943344320.0, - 959075200.0, - 980050304.0, - 979433984.0, - 976606272.0, - 942133504.0, - 956704768.0, - 954276992.0, - 981502208.0, - 956537472.0, - 958234560.0, - 952944192.0, - 961186560.0, - 970739008.0, - 964836224.0, - 959376320.0, - 938600640.0, - 980743936.0, - 970924416.0, - 959672512.0, - 972666496.0, - 947624960.0, - 962766400.0, - 961655232.0, - 951784640.0, - 982070592.0, - 964440960.0, - 949561280.0, - 971409536.0, - 963268096.0, - 987873600.0, - 969461504.0, - 942434624.0, - 968452608.0, - 971008256.0, - 960998336.0, - 961911680.0, - 945805824.0, - 934175296.0, - 965408832.0, - 963442240.0, - 972466048.0, - 960792896.0, - 963350976.0, - 979455616.0, - 956931904.0, - 964225856.0, - 974062144.0, - 949420480.0, - 989551488.0, - 994637760.0, - 971231232.0, - 963970560.0, - 948497792.0, - 964947456.0, - 970541440.0, - 974020160.0, - 966556992.0, - 950605248.0, - 966238784.0, - 957847872.0, - 985896640.0, - 976405952.0, - 965389184.0, - 915178176.0, - 963368000.0, - 982967744.0, - 983009664.0, - 962054528.0, - 948861504.0, - 973102208.0, - 965067840.0, - 968189696.0, - 965075712.0, - 956771584.0, - 971409024.0, - 967623680.0, - 954622208.0, - 970002432.0, - 973575488.0, - 956789184.0, - 981327488.0, - 959849216.0, - 957913152.0, - 965475840.0, - 950060352.0, - 978034432.0, - 968152768.0, - 962766656.0, - 969904448.0, - 950783424.0, - 952283264.0, - 970217024.0, - 963142464.0, - 967003904.0, - 959724096.0, - 936029056.0, - 990472512.0, - 977625088.0, - 977825024.0, - 963217600.0, - 950928512.0, - 960868864.0, - 973374272.0, - 976636416.0, - 972201408.0, - 938382144.0, - 961862144.0, - 965315392.0, - 964543424.0, - 978128576.0, - 938131584.0, - 972171200.0, - 976696704.0, - 984454976.0, - 975423936.0, - 958847232.0, - 952034240.0, - 951423680.0, - 963932608.0, - 975787904.0, - 973280000.0, - 944062208.0, - 966852608.0, - 969012800.0, - 964098432.0, - 964232384.0, - 955763712.0, - 962337344.0, - 973103872.0, - 965437632.0, - 976107584.0, - 965253824.0, - 941408832.0, - 971009344.0, - 958048704.0, - 964609664.0, - 970383424.0, - 944223680.0, - 964641088.0, - 975353024.0, - 963216000.0, - 956843584.0, - 949851264.0, - 977999744.0, - 966273856.0, - 975746624.0, - 974540032.0, - 955812736.0, - 954867392.0, - 975837184.0, - 987603008.0, - 968191872.0, - 980909888.0, - 935765056.0, - 968295104.0, - 969191680.0, - 975296576.0, - 984730560.0, - 940931008.0, - 974232704.0, - 964276672.0, - 981304640.0, - 971199104.0, - 943623168.0, - 946810048.0, - 972410880.0, - 980049280.0, - 977307904.0, - 951884608.0, - 955077888.0, - 981102784.0, - 962022400.0, - 957946688.0, - 968624064.0, - 956834432.0, - 986279808.0, - 974450304.0, - 993519808.0, - 963823872.0, - 961741632.0, - 961028608.0, - 984976512.0, - 971022464.0, - 970911040.0, - 956058816.0, - 951333760.0, - 973084160.0, - 973563712.0, - 977726272.0, - 964567424.0, - 930937984.0, - 973832576.0, - 976974720.0, - 978134848.0, - 981408192.0, - 940666496.0, - 948610688.0, - 969757824.0, - 974738176.0, - 968011904.0, - 970674944.0, - 948271616.0, - 980399424.0, - 966324544.0, - 976598784.0, - 975199616.0, - 954019392.0, - 975527360.0, - 968828608.0, - 964216064.0, - 976426624.0, - 957919936.0, - 943160704.0, - 973871488.0, - 970541312.0, - 981382272.0, - 957914240.0, - 948976576.0, - 978926784.0, - 978817344.0, - 967202176.0, - 952486400.0, - 957904256.0, - 977004160.0, - 977582144.0, - 972821696.0, - 958474368.0, - 958560768.0, - 960841408.0, - 988150848.0, - 974835648.0, - 969553728.0, - 967503872.0, - 954326528.0, - 965065792.0, - 970564736.0, - 971599424.0, - 981106752.0, - 938739904.0, - 973612928.0, - 964725120.0, - 973270464.0, - 986174528.0, - 939959168.0, - 974849408.0, - 968068352.0, - 967849984.0, - 988423360.0, - 941041728.0, - 943350400.0, - 969911616.0, - 968380864.0, - 965419712.0, - 946773056.0, - 951969152.0, - 976044864.0, - 968452224.0, - 975423040.0, - 963698944.0, - 944575616.0, - 964251968.0, - 971090752.0, - 962767488.0, - 974284352.0, - 953160128.0, - 982262336.0, - 982164096.0, - 965801280.0, - 961299328.0, - 944725696.0, - 974897088.0, - 991996096.0, - 957498688.0, - 981525632.0, - 967816448.0, - 931301568.0, - 973258304.0, - 968595136.0, - 979546240.0, - 973765888.0, - 954125568.0, - 970055616.0, - 968299968.0, - 985013440.0, - 967392768.0, - 958041664.0, - 961655232.0, - 960015168.0, - 967324736.0, - 981402176.0, - 976802688.0, - 937603328.0, - 967031296.0, - 965052224.0, - 979589888.0, - 981316352.0, - 959215040.0, - 973302080.0, - 967704192.0, - 970518976.0, - 969294208.0, - 949856448.0, - 967728384.0, - 979262848.0, - 972170240.0, - 965048576.0, - 951281984.0, - 954714560.0, - 968276224.0, - 977366592.0, - 976548800.0, - 967489472.0, - 950112960.0, - 970514688.0, - 991727168.0, - 964326656.0, - 987481472.0, - 948382848.0, - 972066240.0, - 971654208.0, - 970150208.0, - 974186688.0, - 956165824.0, - 943899264.0, - 974364352.0, - 960993216.0, - 970082432.0, - 968749184.0, - 952795264.0, - 985093632.0, - 964221248.0, - 976967744.0, - 982484416.0, - 959192768.0, - 983127936.0, - 966610944.0, - 958042240.0, - 980946496.0, - 955983424.0, - 942097024.0, - 964350720.0, - 965628736.0, - 961262528.0, - 955575360.0, - 939499072.0, - 961869440.0, - 967743616.0, - 966309504.0, - 972100800.0, - 950385600.0, - 969897280.0, - 974915520.0, - 968265216.0, - 975927552.0, - 952268096.0, - 966918720.0, - 975372096.0, - 964387328.0, - 957277824.0, - 968436416.0, - 947367360.0, - 972595264.0, - 966886208.0, - 962633024.0, - 964268544.0, - 943111040.0, - 966199104.0, - 979073600.0, - 964781568.0, - 968280000.0, - 945923904.0, - 971825088.0, - 978705536.0, - 979387968.0, - 975173760.0, - 965174336.0, - 963589376.0, - 970487232.0, - 959623360.0, - 966108288.0, - 972779456.0, - 935563840.0, - 970765184.0, - 958642112.0, - 962041536.0, - 968177344.0, - 956190144.0, - 966033792.0, - 964530048.0, - 965372480.0, - 962724864.0, - 949198912.0, - 963558528.0, - 963447680.0, - 964988736.0, - 964933696.0, - 970143552.0, - 937960064.0, - 971137600.0, - 969831808.0, - 979275520.0, - 960125760.0, - 944070784.0, - 959488576.0, - 969027136.0, - 965588352.0, - 967232640.0, - 940622272.0, - 945988096.0, - 972705856.0, - 969395584.0, - 967463616.0, - 951970368.0, - 945919040.0, - 967662336.0, - 971383552.0, - 975806528.0, - 982927680.0, - 952994112.0, - 963969856.0, - 986701120.0, - 952023552.0, - 970077312.0, - 960094208.0, - 961081728.0, - 965083840.0, - 967231424.0, - 977440576.0, - 975297600.0, - 942971648.0, - 972595072.0, - 974553216.0, - 962913024.0, - 969718336.0, - 943192512.0, - 948647040.0, - 965911552.0, - 964147584.0, - 967384384.0, - 951766720.0, - 969970752.0, - 963362752.0, - 980107200.0, - 971437760.0, - 957932608.0, - 946457920.0, - 983375936.0, - 970740672.0, - 973367296.0, - 963288448.0, - 954637760.0, - 972827968.0, - 972902336.0, - 968836224.0, - 961336192.0, - 938383360.0, - 967467904.0, - 967238528.0, - 957343744.0, - 974524160.0, - 943794432.0, - 951146944.0, - 961809664.0, - 976303040.0, - 967136064.0, - 973762688.0, - 949713600.0, - 971735872.0, - 972907328.0, - 972992384.0, - 971164800.0, - 949211648.0, - 981886080.0, - 976059776.0, - 975098944.0, - 961717568.0, - 952480704.0, - 956693376.0, - 968644864.0, - 962700352.0, - 956191232.0, - 990552000.0, - 935804032.0, - 954107200.0, - 959364800.0, - 978269312.0, - 951698240.0, - 951989248.0, - 991284864.0, - 964332736.0, - 975417536.0, - 965645888.0, - 943253184.0, - 962853632.0, - 958807296.0, - 980278400.0, - 958644992.0, - 939119488.0, - 948831360.0, - 974136960.0, - 974169408.0, - 971564800.0, - 959983936.0, - 948426496.0, - 968406144.0, - 973707584.0, - 967865920.0, - 975432704.0, - 943908736.0, - 974013376.0, - 961091712.0, - 967949888.0, - 968758272.0, - 975363392.0, - 944782848.0, - 961383360.0, - 969374464.0, - 975388928.0, - 955702848.0, - 950196032.0, - 974744512.0, - 962855232.0, - 962962368.0, - 953050368.0, - 956594240.0, - 963186624.0, - 965790080.0, - 969557952.0, - 952897600.0, - 961956992.0, - 963387712.0, - 992559680.0, - 957787264.0, - 964560576.0, - 969303808.0, - 932638848.0, - 976011648.0, - 962513856.0, - 975204992.0, - 968566592.0, - 951994240.0, - 965452480.0, - 960548864.0, - 984055104.0, - 980254784.0, - 941545600.0, - 958248192.0, - 957811776.0, - 975603712.0, - 968386944.0, - 959279744.0, - 939403072.0, - 966078144.0, - 959020864.0, - 957134144.0, - 984928448.0, - 952804736.0, - 977573696.0, - 983040384.0, - 960741184.0, - 972496256.0, - 938911552.0, - 960537536.0, - 963278208.0, - 963289664.0, - 970740672.0, - 935307392.0, - 976323200.0, - 961312192.0, - 977152064.0, - 971782592.0, - 964880768.0, - 949039488.0, - 964129600.0, - 969086784.0, - 971316416.0, - 967508544.0, - 960702208.0, - 966329152.0, - 968020160.0, - 979848256.0, - 966748352.0, - 952717504.0, - 951754816.0, - 975666688.0, - 970677696.0, - 965876672.0, - 957349632.0, - 941275392.0, - 966852288.0, - 963880000.0, - 978972352.0, - 952381312.0, - 935715584.0, - 963361664.0, - 969399424.0, - 976406528.0, - 963896832.0, - 945520512.0, - 962600256.0, - 972852608.0, - 973184576.0, - 963019072.0, - 957626880.0, - 949598912.0, - 981199808.0, - 972227392.0, - 976719488.0, - 973338368.0, - 953693504.0, - 956079744.0, - 957734912.0, - 958488512.0, - 977933376.0, - 932571712.0, - 986439296.0, - 967509120.0, - 963144576.0, - 953336448.0, - 956104768.0, - 949976896.0, - 987421504.0, - 969001088.0, - 972957504.0, - 962489664.0, - 945620160.0, - 973000896.0, - 975045696.0, - 971812864.0, - 972073408.0, - 946393280.0, - 970606016.0, - 979429376.0, - 968875072.0, - 975618944.0, - 941368128.0, - 959739200.0, - 975790208.0, - 955453696.0, - 973890816.0, - 985247296.0, - 940293760.0, - 968178432.0, - 979540096.0, - 959783040.0, - 974319488.0, - 949450240.0, - 979878464.0, - 985235968.0, - 978790720.0, - 983719424.0, - 939677952.0, - 970797056.0, - 980414400.0, - 970359040.0, - 970081600.0, - 937915520.0, - 952333376.0, - 979505856.0, - 979478592.0, - 953235200.0, - 970615040.0, - 948029440.0, - 978493888.0, - 990812224.0, - 964144000.0, - 968921664.0, - 939206528.0, - 976269952.0, - 969561536.0, - 961115904.0, - 966461120.0, - 942768384.0, - 963134336.0, - 976011008.0, - 975344768.0, - 976678592.0, - 960278976.0, - 940133760.0, - 977436672.0, - 964483200.0, - 973764352.0, - 966671488.0, - 942376704.0, - 960924672.0, - 971255552.0, - 974823744.0, - 970653376.0, - 949611520.0, - 953117248.0, - 972012736.0, - 964462592.0, - 973082304.0, - 987549376.0, - 941428800.0, - 972785472.0, - 971244032.0, - 973160704.0, - 985143424.0, - 949573376.0, - 992390400.0, - 961834176.0, - 968338432.0, - 951679488.0, - 936266240.0, - 951091648.0, - 962658496.0, - 969425152.0, - 965073664.0, - 944978560.0, - 944183680.0, - 976292096.0, - 972761728.0, - 976144384.0, - 952296832.0, - 950193024.0, - 973788544.0, - 975900224.0, - 978513792.0, - 979278144.0, - 936786432.0, - 968568320.0, - 973700544.0, - 959145664.0, - 967774400.0, - 953044672.0, - 959332352.0, - 956206592.0, - 959445696.0, - 973294592.0, - 973872000.0, - 950893440.0, - 964301440.0, - 964745536.0, - 969885632.0, - 965207296.0, - 954259904.0, - 964745216.0, - 963812352.0, - 964617344.0, - 962164352.0, - 948716864.0, - 970232704.0, - 966398016.0, - 977294784.0, - 965150272.0, - 959745984.0, - 951908544.0, - 966104768.0, - 988442048.0, - 971915456.0, - 961666944.0, - 949015360.0, - 965207296.0, - 972221504.0, - 964808832.0, - 983736640.0, - 955788608.0, - 980358592.0, - 975898368.0, - 969959680.0, - 974199104.0, - 939894784.0, - 955800832.0, - 976698816.0, - 973913600.0, - 981422080.0, - 975105920.0, - 955285696.0, - 966522048.0, - 956449536.0, - 969893760.0, - 976778496.0, - 947510080.0, - 949980224.0, - 962904128.0, - 990148544.0, - 968781760.0, - 948956032.0, - 946621056.0, - 970508672.0, - 973233600.0, - 972127360.0, - 966778752.0, - 958284736.0, - 967196480.0, - 966231552.0, - 973855296.0, - 969750336.0, - 955944256.0, - 980303360.0, - 958554944.0, - 972545728.0, - 970652160.0, - 948179968.0, - 949998720.0, - 970587712.0, - 972276864.0, - 977373632.0, - 949628992.0, - 948334976.0, - 967682368.0, - 970113344.0, - 966447616.0, - 968831360.0, - 954559296.0, - 974449792.0, - 982847808.0, - 983556736.0, - 967126912.0, - 944710848.0, - 964678592.0, - 985468160.0, - 969857344.0, - 989257920.0, - 946398528.0, - 931107136.0, - 965849728.0, - 978189568.0, - 978718144.0, - 985299136.0, - 955497280.0, - 966239808.0, - 960832512.0, - 976461376.0, - 965884544.0, - 948155520.0, - 951423488.0, - 968965184.0, - 975668416.0, - 955821568.0, - 971427904.0, - 945778816.0, - 964547328.0, - 969923200.0, - 975564928.0, - 957127296.0, - 953939712.0, - 971291712.0, - 964763328.0, - 972956608.0, - 948315968.0, - 933072832.0, - 966281088.0, - 978116480.0, - 967044224.0, - 975879104.0, - 956724544.0, - 939582208.0, - 973464128.0, - 963027840.0, - 966226816.0, - 962247488.0, - 961438464.0, - 966564160.0, - 965973440.0, - 971071872.0, - 985012736.0, - 930724672.0, - 962994496.0, - 967571264.0, - 970828480.0, - 989781824.0, - 949894848.0, - 951055552.0, - 985401088.0, - 962364032.0, - 959778368.0, - 961597056.0, - 974075776.0, - 958506752.0, - 968643776.0, - 958013696.0, - 966115648.0, - 937143104.0, - 959942656.0, - 980228864.0, - 970700736.0, - 976956672.0, - 946456576.0, - 963817088.0, - 948654720.0, - 976193536.0, - 983209344.0, - 943088832.0, - 964205696.0, - 986925376.0, - 968215936.0, - 952683840.0, - 959629696.0, - 944938880.0, - 977094208.0, - 968412480.0, - 973843072.0, - 973784768.0, - 921360192.0, - 960347008.0, - 983767360.0, - 974511232.0, - 967499328.0, - 946859392.0, - 945055232.0, - 979709568.0, - 968872960.0, - 970305536.0, - 960998848.0, - 947197440.0, - 987041984.0, - 970712000.0, - 983894784.0, - 969881216.0, - 952739072.0, - 969241920.0, - 970751872.0, - 948162432.0, - 978588288.0, - 958849088.0, - 966012096.0, - 974179648.0, - 965955328.0, - 953478144.0, - 962338816.0, - 948082240.0, - 973504512.0, - 975912512.0, - 970496128.0, - 977114688.0, - 957253440.0, - 972977984.0, - 982692224.0, - 966226368.0, - 952172416.0, - 937258496.0, - 975366272.0, - 980247680.0, - 958719744.0, - 965531712.0, - 961147840.0, - 951220288.0, - 982266176.0, - 965548736.0, - 984989184.0, - 962283520.0, - 937615168.0, - 967855744.0, - 963401728.0, - 969174720.0, - 985252992.0, - 941517568.0, - 961269888.0, - 970950720.0, - 970138304.0, - 976718976.0, - 954686784.0, - 954291712.0, - 961638592.0, - 979856064.0, - 963379200.0, - 961332416.0, - 947062272.0, - 983171648.0, - 965416128.0, - 972068480.0, - 969358208.0, - 933961792.0, - 985517952.0, - 961558464.0, - 976432576.0, - 978010944.0, - 941443072.0, - 956131072.0, - 974381504.0, - 957675776.0, - 972152256.0, - 956615168.0, - 951517888.0, - 973441792.0, - 961947008.0, - 969538432.0, - 973597888.0, - 950416320.0, - 961668992.0, - 969023808.0, - 970656128.0, - 965169472.0, - 928397440.0, - 934467264.0, - 978082048.0, - 963382784.0, - 972485504.0, - 963051008.0, - 948003072.0, - 968812032.0, - 974810816.0, - 971538816.0, - 958721792.0, - 949776512.0, - 958928384.0, - 963862976.0, - 960073280.0, - 972865408.0, - 965913280.0, - 964293248.0, - 965932800.0, - 973660288.0, - 971048640.0, - 970819264.0, - 936653376.0, - 957160256.0, - 964599168.0, - 956811456.0, - 972767936.0, - 946143680.0, - 978819456.0, - 963762816.0, - 964653376.0, - 975832576.0, - 961012736.0, - 965595776.0, - 971409984.0, - 970710464.0, - 967910336.0, - 960150272.0, - 953985728.0, - 986790400.0, - 959003712.0, - 972030336.0, - 953911680.0, - 941837120.0, - 965127936.0, - 974224384.0, - 971219200.0, - 966096960.0, - 939365312.0, - 969099840.0, - 974691008.0, - 973880064.0, - 981528640.0, - 935658304.0, - 950010112.0, - 969443904.0, - 969827200.0, - 969579904.0, - 957485056.0, - 935227840.0, - 954078464.0, - 972510784.0, - 961786688.0, - 980644480.0, - 938357824.0, - 958728256.0, - 979267072.0, - 965789824.0, - 962056320.0, - 920034368.0, - 993872448.0, - 955232768.0, - 959374080.0, - 954846720.0, - 965491328.0, - 962094976.0, - 985822848.0, - 957046912.0, - 970249088.0, - 970162688.0, - 969172928.0, - 964821888.0, - 977317696.0, - 974905728.0, - 972570048.0, - 923725440.0, - 958935488.0, - 972595584.0, - 963867904.0, - 967702208.0, - 967891520.0, - 942485312.0, - 967524736.0, - 956522816.0, - 966104384.0, - 957793920.0, - 944605184.0, - 978150080.0, - 978178240.0, - 983204736.0, - 965906176.0, - 937687552.0, - 972870336.0, - 959842944.0, - 976423680.0, - 962552576.0, - 956921728.0, - 954046784.0, - 965483968.0, - 972903744.0, - 950048384.0, - 962516800.0, - 952921856.0, - 963355712.0, - 963076864.0, - 972000640.0, - 965294272.0, - 933841728.0, - 965369344.0, - 953449152.0, - 980671104.0, - 975236480.0, - 930823104.0, - 967363264.0, - 966518528.0, - 970347328.0, - 956038144.0, - 931960320.0, - 944360448.0, - 970181824.0, - 972669376.0, - 958170624.0, - 950540352.0, - 940246080.0, - 969580416.0, - 963093440.0, - 954739904.0, - 964955392.0, - 953176256.0, - 958955136.0, - 973978368.0, - 968812608.0, - 985562944.0, - 978141504.0, - 969058304.0, - 966713216.0, - 974808064.0, - 984063360.0, - 966990784.0, - 959373376.0, - 960349440.0, - 953334784.0, - 980396672.0, - 967019904.0, - 961928192.0, - 966572032.0, - 962305536.0, - 960780928.0, - 960643776.0, - 959538368.0, - 957590016.0, - 972084736.0, - 974102720.0, - 966522880.0, - 968475584.0, - 948236160.0, - 975949824.0, - 963794688.0, - 963009216.0, - 986218368.0, - 930699264.0, - 976172544.0, - 990139072.0, - 977453248.0, - 962462080.0, - 945796288.0, - 969537856.0, - 977129664.0, - 972228544.0, - 986717696.0, - 936598464.0, - 944995904.0, - 955667328.0, - 973499520.0, - 980912896.0, - 981662400.0, - 936935104.0, - 964624384.0, - 959895936.0, - 986651456.0, - 975640192.0, - 958426624.0, - 975357312.0, - 963550336.0, - 970582272.0, - 974691392.0, - 944117696.0, - 965030272.0, - 967080704.0, - 975631616.0, - 967179392.0, - 982170944.0, - 955264704.0, - 974654400.0, - 969905152.0, - 965275264.0, - 965981440.0, - 940290752.0, - 973531136.0, - 960609792.0, - 972436864.0, - 978824704.0, - 940060864.0, - 968653184.0, - 964429056.0, - 968082752.0, - 969574400.0, - 965763776.0, - 946198080.0, - 975619648.0, - 973022592.0, - 967326272.0, - 959540160.0, - 943441024.0, - 983268864.0, - 967697600.0, - 971282368.0, - 975432256.0, - 934134656.0, - 961381120.0, - 967247296.0, - 972474944.0, - 961314432.0, - 964050496.0, - 946494016.0, - 969322432.0, - 972848896.0, - 976618944.0, - 967442624.0, - 947587456.0, - 966871488.0, - 964618368.0, - 994787968.0, - 988412288.0, - 942145152.0, - 961744832.0, - 970945984.0, - 977523904.0, - 970607616.0, - 952486848.0, - 961963136.0, - 961892416.0, - 959132416.0, - 951528128.0, - 956242368.0, - 948932288.0, - 989051328.0, - 974463232.0, - 966427200.0, - 975378112.0, - 937088704.0, - 968369984.0, - 987338176.0, - 970588864.0, - 968531456.0, - 956638208.0, - 948582400.0, - 985812480.0, - 981196416.0, - 974768896.0, - 946486016.0, - 941977088.0, - 952414848.0, - 977420160.0, - 980446144.0, - 969054144.0, - 949351488.0, - 974843648.0, - 967210176.0, - 958707904.0, - 974507328.0, - 950730368.0, - 973157504.0, - 971576448.0, - 965261056.0, - 973908224.0, - 975159040.0, - 947273024.0, - 971511680.0, - 966220480.0, - 967885504.0, - 968404352.0, - 952753856.0, - 983745600.0, - 957472256.0, - 961332416.0, - 964501824.0, - 943728896.0, - 977682368.0, - 981852992.0, - 963727936.0, - 967334720.0, - 953132480.0, - 978972160.0, - 981037376.0, - 972663104.0, - 970084928.0, - 972737472.0, - 940333888.0, - 987577472.0, - 970059840.0, - 975905920.0, - 961738816.0, - 953195072.0, - 968280128.0, - 978371008.0, - 971405696.0, - 969986112.0, - 936838720.0, - 952641344.0, - 986705088.0, - 966993856.0, - 967387712.0, - 954611840.0, - 958291136.0, - 969723456.0, - 968560064.0, - 968486720.0, - 981047808.0, - 956524800.0, - 963046848.0, - 957060544.0, - 958426624.0, - 985285312.0, - 941419456.0, - 960780032.0, - 967297152.0, - 962075008.0, - 968262272.0, - 959072128.0, - 942765632.0, - 956369216.0, - 959791808.0, - 965952448.0, - 949544320.0, - 948598912.0, - 973556288.0, - 977461312.0, - 963175808.0, - 973002816.0, - 935190592.0, - 977148288.0, - 988324800.0, - 969807616.0, - 957966784.0, - 945861952.0, - 940448192.0, - 969709952.0, - 980650816.0, - 955865408.0, - 960284864.0, - 936297664.0, - 963262272.0, - 961871552.0, - 973965504.0, - 968831552.0, - 936296320.0, - 957131968.0, - 956695488.0, - 959624064.0, - 981766016.0, - 965865792.0, - 955595520.0, - 960115520.0, - 972505408.0, - 969194048.0, - 943397248.0, - 960521088.0, - 974330368.0, - 972667904.0, - 970653632.0, - 980275200.0, - 936819648.0, - 988542400.0, - 963037568.0, - 952548928.0, - 962609600.0, - 952786944.0, - 960264896.0, - 974453312.0, - 957190592.0, - 974812992.0, - 944580416.0, - 958541888.0, - 959315520.0, - 975046336.0, - 963746368.0, - 965262784.0, - 933421888.0, - 960867840.0, - 976219904.0, - 973223488.0, - 967116608.0, - 946622336.0, - 940638784.0, - 971085056.0, - 979334016.0, - 961466304.0, - 943832256.0, - 929239872.0, - 967302144.0, - 964007168.0, - 959526592.0, - 966386176.0, - 946160192.0, - 968565632.0, - 943378688.0, - 960701504.0, - 971588416.0, - 947586240.0, - 958351744.0, - 962951744.0, - 984119232.0, - 961026176.0, - 968020096.0, - 974852416.0, - 960410368.0, - 957064320.0, - 981581120.0, - 957182336.0, - 933307392.0, - 957020608.0, - 951573696.0, - 963787136.0, - 959650688.0, - 941719552.0, - 965030208.0, - 965331392.0, - 965931328.0, - 959375040.0, - 943457600.0, - 970753728.0, - 966362944.0, - 970086592.0, - 971502656.0, - 944190528.0, - 953849664.0, - 968904704.0, - 985095488.0, - 986465472.0, - 966331200.0, - 943385536.0, - 967688000.0, - 973926400.0, - 967664640.0, - 956166784.0, - 938424320.0, - 962817984.0, - 976629888.0, - 963985536.0, - 991779584.0, - 966873152.0, - 936206784.0, - 968224192.0, - 961028928.0, - 989036544.0, - 984309504.0, - 951836032.0, - 965718656.0, - 942684800.0, - 963555584.0, - 966728960.0, - 946504000.0, - 975448000.0, - 964568704.0, - 950212928.0, - 961762880.0, - 947056832.0, - 955735552.0, - 974926080.0, - 975692992.0, - 961978624.0, - 955910016.0, - 941977920.0, - 953971968.0, - 984272128.0, - 970927744.0, - 971754688.0, - 948815744.0, - 964617472.0, - 976331072.0, - 974519808.0, - 989516480.0, - 948103040.0, - 952357952.0, - 963913600.0, - 983301056.0, - 966695616.0, - 973377024.0, - 944717312.0, - 972132480.0, - 951375936.0, - 980717760.0, - 965723392.0, - 958243776.0, - 961657344.0, - 967972480.0, - 975973248.0, - 969575552.0, - 946204480.0, - 984173248.0, - 971643776.0, - 976845696.0, - 971362944.0, - 963377856.0, - 970694720.0, - 966316992.0, - 957935296.0, - 964638912.0, - 971663232.0, - 923816832.0, - 975231680.0, - 978931648.0, - 957507264.0, - 962622976.0, - 938568704.0, - 950523328.0, - 971668352.0, - 984826112.0, - 958353920.0, - 953863168.0, - 959330048.0, - 975530560.0, - 979873536.0, - 961880320.0, - 957192960.0, - 946302208.0, - 979920448.0, - 973217728.0, - 965171520.0, - 968205376.0, - 964973248.0, - 961020608.0, - 975194560.0, - 971147776.0, - 968591104.0, - 952052800.0, - 949152704.0, - 961409664.0, - 976582656.0, - 969878144.0, - 948465600.0, - 953818688.0, - 966417152.0, - 956377024.0, - 970556864.0, - 963652736.0, - 950035008.0, - 982264768.0, - 960511168.0, - 964802112.0, - 966632384.0, - 952291904.0, - 964435200.0, - 976723328.0, - 965133056.0, - 967481408.0, - 931151360.0, - 964692608.0, - 980070784.0, - 962143680.0, - 961062848.0, - 969127744.0, - 952444608.0, - 941764544.0, - 973702464.0, - 975889088.0, - 983844224.0, - 946641472.0, - 961976384.0, - 979876608.0, - 975089792.0, - 971760000.0, - 968782720.0, - 955185856.0, - 962823616.0, - 968077888.0, - 975827072.0, - 949773632.0, - 949869760.0, - 957432640.0, - 971686080.0, - 974256128.0, - 989518400.0, - 951585664.0, - 947193216.0, - 967357760.0, - 961240320.0, - 982374016.0, - 958028160.0, - 958692352.0, - 979599680.0, - 972809408.0, - 961097152.0, - 958948224.0, - 946387072.0, - 964978944.0, - 962835584.0, - 973247488.0, - 974088704.0, - 952958784.0, - 981753984.0, - 975820480.0, - 954763136.0, - 953689664.0, - 952849792.0, - 979234560.0, - 969361472.0, - 980917888.0, - 960989120.0, - 941193984.0, - 966687232.0, - 962609856.0, - 974734976.0, - 988843008.0, - 968050752.0, - 952659328.0, - 971140288.0, - 972805568.0, - 974052864.0, - 976467456.0, - 937378624.0, - 965140672.0, - 978458048.0, - 960431040.0, - 979525888.0, - 957217856.0, - 969703808.0, - 959344192.0, - 954610432.0, - 980951488.0, - 964569024.0, - 959116992.0, - 971099456.0, - 962928704.0, - 969459136.0, - 974187200.0, - 949669440.0, - 964647488.0, - 984592320.0, - 969596352.0, - 973169472.0, - 950625536.0, - 952672192.0, - 976262400.0, - 978434368.0, - 979720896.0, - 952211904.0, - 949790784.0, - 975559040.0, - 978205824.0, - 963229568.0, - 975362240.0, - 953066752.0, - 962996864.0, - 962671872.0, - 975643456.0, - 965725760.0, - 970741312.0, - 970435520.0, - 957665984.0, - 974212352.0, - 975483840.0, - 960092160.0, - 957273088.0, - 957728448.0, - 970785664.0, - 959509248.0, - 977901888.0, - 957934528.0, - 969029248.0, - 987927424.0, - 980561536.0, - 967277376.0, - 925047552.0, - 945694592.0, - 970188608.0, - 975151680.0, - 979169728.0, - 935205440.0, - 968040960.0, - 963594816.0, - 960712256.0, - 976533312.0, - 961861504.0, - 956841984.0, - 984648192.0, - 976726016.0, - 977018112.0, - 983204288.0, - 941420864.0, - 971602048.0, - 965834816.0, - 973837568.0, - 970657984.0, - 947158976.0, - 970141952.0, - 976233792.0, - 986233088.0, - 959099968.0, - 961520640.0, - 946280448.0, - 971910336.0, - 988432832.0, - 968733632.0, - 966379712.0, - 941240512.0, - 964067264.0, - 967122880.0, - 983430720.0, - 973709632.0, - 949628352.0, - 955711552.0, - 960252608.0, - 966449856.0, - 969061120.0, - 967693312.0, - 938528768.0, - 964876608.0, - 961517888.0, - 975615744.0, - 965115968.0, - 943306624.0, - 976712576.0, - 966227968.0, - 984008576.0, - 982578688.0, - 961487104.0, - 968181632.0, - 973395776.0, - 972841984.0, - 965128064.0, - 947619840.0, - 945285760.0, - 980244736.0, - 979232320.0, - 972242496.0, - 968526528.0, - 961141760.0, - 971945344.0, - 961514112.0, - 975681408.0, - 982502848.0, - 971588160.0, - 974645888.0, - 966941120.0, - 973488128.0, - 958184640.0, - 951346176.0, - 958820736.0, - 974529664.0, - 975623424.0, - 988260224.0, - 966642368.0, - 946606656.0, - 987313856.0, - 961603200.0, - 972827072.0, - 993334784.0, - 956973568.0, - 964533632.0, - 972627392.0, - 974839744.0, - 981789248.0, - 948171328.0, - 969399488.0, - 991472064.0, - 960616256.0, - 972474496.0, - 952595456.0, - 925109120.0, - 968372544.0, - 968064832.0, - 975109248.0, - 982653952.0, - 959342464.0, - 983058560.0, - 971739520.0, - 961757056.0, - 975478656.0, - 954294528.0, - 985396096.0, - 984114496.0, - 976023552.0, - 965210560.0, - 956236096.0, - 956499264.0, - 965890816.0, - 972277760.0, - 982332288.0, - 960553856.0, - 934424896.0, - 968267392.0, - 987247808.0, - 975718784.0, - 973757568.0, - 938969664.0, - 965516032.0, - 974022848.0, - 986853888.0, - 980466112.0, - 958550720.0, - 952015936.0, - 969878656.0, - 958279296.0, - 972604992.0, - 975836096.0, - 953564992.0, - 979066496.0, - 952399936.0, - 968564544.0, - 981480448.0, - 958236032.0, - 982074816.0, - 967049856.0, - 962132224.0, - 984581056.0, - 938472320.0, - 951162496.0, - 972205504.0, - 978641408.0, - 964497472.0, - 967210176.0, - 966715200.0, - 978138752.0, - 965499456.0, - 982062464.0, - 967014080.0, - 933283840.0, - 967528448.0, - 972387904.0, - 970224832.0, - 957721792.0, - 936020288.0, - 961665088.0, - 971708928.0, - 976050688.0, - 977412608.0, - 951679104.0, - 950734848.0, - 960669504.0, - 972341184.0, - 976244288.0, - 960160000.0, - 947311552.0, - 970428608.0, - 977004032.0, - 973598336.0, - 965952128.0, - 953631104.0, - 961531072.0, - 974096064.0, - 956493632.0, - 972419008.0, - 949921408.0, - 959389568.0, - 970915840.0, - 960707328.0, - 969883072.0, - 950362496.0, - 944046976.0, - 963459712.0, - 965798208.0, - 972922624.0, - 954916544.0, - 937705152.0, - 972928704.0, - 975403712.0, - 954521728.0, - 980202560.0, - 953754048.0, - 969368832.0, - 968118784.0, - 972525696.0, - 973869248.0, - 954355200.0, - 960220928.0, - 958779008.0, - 976038272.0, - 983960448.0, - 953832512.0, - 932845952.0, - 970799552.0, - 959723712.0, - 948840896.0, - 964909248.0, - 971099904.0, - 984398912.0, - 967586496.0, - 957644096.0, - 975620480.0, - 959153472.0, - 965121344.0, - 960652800.0, - 954977216.0, - 965387968.0, - 975576000.0, - 964114304.0, - 967188480.0, - 964494912.0, - 956100608.0, - 980596480.0, - 934784704.0, - 967844672.0, - 960337792.0, - 984000384.0, - 978473344.0, - 941712128.0, - 940852544.0, - 985462272.0, - 969591488.0, - 954145344.0, - 945101440.0, - 942309120.0, - 967699648.0, - 976427584.0, - 966555648.0, - 971402176.0, - 933462400.0, - 972825600.0, - 967395584.0, - 979034688.0, - 977268480.0, - 974676928.0, - 969167360.0, - 965481088.0, - 951445376.0, - 966911040.0, - 973285312.0, - 976264576.0, - 981931840.0, - 947635904.0, - 976055296.0, - 966089152.0, - 972691712.0, - 963840320.0, - 941326208.0, - 957706688.0, - 969287104.0, - 976068224.0, - 974985856.0, - 950714816.0, - 952715328.0, - 984747904.0, - 948829312.0, - 957722496.0, - 973929664.0, - 975078016.0, - 981288960.0, - 970005376.0, - 938006720.0, - 953810176.0, - 979296768.0, - 983487808.0, - 971574336.0, - 951608896.0, - 959221376.0, - 971856768.0, - 959724992.0, - 976883008.0, - 954026496.0, - 953275904.0, - 978716032.0, - 944000576.0, - 963460480.0, - 965799040.0, - 972922752.0, - 954891840.0, - 937705664.0, - 972915136.0, - 975415296.0, - 954533888.0, - 980177536.0, - 953753536.0, - 969381248.0, - 968142976.0, - 972537600.0, - 973831808.0, - 954343424.0, - 960220224.0, - 958790080.0, - 976038016.0, - 983972096.0, - 953807232.0, - 932920064.0, - 970799232.0, - 959723200.0, - 948864896.0, - 964884480.0, - 971112576.0, - 984398720.0, - 967574912.0, - 957656512.0, - 975620352.0, - 959166272.0, - 965121472.0, - 960652416.0, - 954976960.0, - 965413056.0, - 975551552.0, - 964127872.0, - 967163776.0, - 964531584.0, - 956100864.0, - 980619648.0, - 934836864.0, - 967758784.0, - 960241536.0, - 983953408.0, - 978476288.0, - 941728128.0, - 940770368.0, - 985442304.0, - 969497024.0, - 953989952.0, - 945055552.0, - 942263104.0, - 967777280.0, - 976334592.0, - 966571840.0, - 971235520.0, - 933357312.0, - 972671744.0, - 967401792.0, - 979026880.0, - 977224000.0, - 959510016.0, - 950832384.0, - 967844480.0, - 980699008.0, - 987294720.0, - 965245376.0, - 965616000.0, - 974624832.0, - 967250176.0, - 975755136.0, - 962569152.0, - 948885888.0, - 967623488.0, - 972936064.0, - 971688000.0, - 969572288.0, - 965403712.0, - 976450048.0, - 989531008.0, - 970944256.0, - 972181888.0, - 936046528.0, - 966370304.0, - 972200768.0, - 969336704.0, - 970431936.0, - 965288192.0, - 933166272.0, - 971364160.0, - 978929344.0, - 976331584.0, - 987327296.0, - 946889280.0, - 958214528.0, - 977118720.0, - 965547392.0, - 984612736.0, - 935114432.0, - 955475520.0, - 972495680.0, - 975402432.0, - 988739072.0, - 968162624.0, - 943416128.0, - 970848256.0, - 967395264.0, - 965988672.0, - 962378304.0, - 963967360.0, - 974648768.0, - 966361280.0, - 969268864.0, - 971763968.0, - 950869760.0, - 974286400.0, - 963961600.0, - 968563968.0, - 985711744.0, - 954805696.0, - 939713024.0, - 968065728.0, - 974649024.0, - 971763008.0, - 976928640.0, - 955428352.0, - 972459392.0, - 973543424.0, - 976338688.0, - 987718144.0, - 951488512.0, - 983185536.0, - 990421184.0, - 975663808.0, - 988157376.0, - 934474752.0, - 951868096.0, - 973459264.0, - 986276992.0, - 962858752.0, - 955511168.0, - 954075456.0, - 985214144.0, - 982672000.0, - 961882304.0, - 967703552.0, - 946924160.0, - 959210112.0, - 978227776.0, - 990556352.0, - 984685696.0, - 953637440.0, - 958717504.0, - 969459200.0, - 978187648.0, - 976566016.0, - 957046720.0, - 944725248.0, - 962267136.0, - 953223680.0, - 978039936.0, - 963942272.0, - 948576896.0, - 958290880.0, - 958961728.0, - 972314368.0, - 966842496.0, - 945102400.0, - 977229952.0, - 979577792.0, - 965203776.0, - 968379712.0, - 943055680.0, - 962542976.0, - 975157952.0, - 975780416.0, - 969534784.0, - 953857728.0, - 938889856.0, - 963288384.0, - 974510976.0, - 965829248.0, - 968128256.0, - 935808384.0, - 960364928.0, - 968863872.0, - 980017088.0, - 966261760.0, - 962851008.0, - 950767872.0, - 967333888.0, - 965313216.0, - 972419840.0, - 962101504.0, - 943887040.0, - 969182656.0, - 987024576.0, - 965374016.0, - 959152896.0, - 935401664.0, - 957712768.0, - 962500096.0, - 965676736.0, - 952943680.0, - 947405440.0, - 978652480.0, - 976018432.0, - 976036544.0, - 953411200.0, - 956691520.0, - 950750080.0, - 974362624.0, - 963357504.0, - 973498368.0, - 982044224.0, - 935211136.0, - 980957504.0, - 957125504.0, - 990201792.0, - 983980224.0, - 954271168.0, - 964047040.0, - 972504448.0, - 972838976.0, - 969950016.0, - 936930688.0, - 948201088.0, - 965381120.0, - 973615040.0, - 969120832.0, - 959927104.0, - 932712768.0, - 940376960.0, - 971462336.0, - 958116544.0, - 960537024.0, - 942766848.0, - 971615104.0, - 974988928.0, - 965281088.0, - 968122240.0, - 955333376.0, - 977009664.0, - 975159168.0, - 971173824.0, - 975966208.0, - 968325312.0, - 954160704.0, - 965048576.0, - 970862976.0, - 973944320.0, - 967710656.0, - 960264576.0, - 974721024.0, - 960673984.0, - 967960576.0, - 966799232.0, - 945622080.0, - 961106112.0, - 972691520.0, - 975854912.0, - 967126144.0, - 973556224.0, - 941441152.0, - 969625728.0, - 974827008.0, - 966048192.0, - 961113920.0, - 940970176.0, - 977645248.0, - 957299008.0, - 982105536.0, - 951640832.0, - 944700992.0, - 942565184.0, - 963256512.0, - 982825856.0, - 968952384.0, - 951149504.0, - 945334912.0, - 952500224.0, - 975310848.0, - 978003264.0, - 956148416.0, - 943718720.0, - 982953792.0, - 963869312.0, - 948198016.0, - 967967552.0, - 947703872.0, - 955879552.0, - 972025792.0, - 969435008.0, - 968857984.0, - 923345024.0, - 952490304.0, - 970841408.0, - 964098560.0, - 961509760.0, - 964125952.0, - 948952000.0, - 978255488.0, - 947187904.0, - 951233920.0, - 968771136.0, - 938644800.0, - 964122816.0, - 977037632.0, - 974097152.0, - 973905280.0, - 955970624.0, - 962578944.0, - 962210048.0, - 986755008.0, - 966615552.0, - 975372800.0, - 937327680.0, - 973089984.0, - 980755456.0, - 966728320.0, - 977002112.0, - 941108672.0, - 962906176.0, - 972684928.0, - 987922688.0, - 966439424.0, - 934246528.0, - 952499136.0, - 960191168.0, - 965288896.0, - 960432640.0, - 958084736.0, - 959877888.0, - 988470784.0, - 959201856.0, - 971490432.0, - 954907456.0, - 939653568.0, - 971027136.0, - 966186880.0, - 973723328.0, - 969431488.0, - 948177984.0, - 976689152.0, - 970478656.0, - 971602432.0, - 980754432.0, - 934142080.0, - 952305088.0, - 971836928.0, - 955838016.0, - 974878592.0, - 962123904.0, - 936078720.0, - 980041792.0, - 986885440.0, - 976687104.0, - 967537792.0, - 943618880.0, - 965750144.0, - 958588672.0, - 966946880.0, - 973014656.0, - 947739072.0, - 936636736.0, - 956736576.0, - 961392640.0, - 972654528.0, - 965664384.0, - 947980160.0, - 960048064.0, - 978128128.0, - 978347584.0, - 973416384.0, - 952793536.0, - 967907200.0, - 960362496.0, - 985349440.0, - 975228096.0, - 940725888.0, - 975590848.0, - 970927360.0, - 977941568.0, - 967524352.0, - 947491392.0, - 956494912.0, - 976898176.0, - 968660288.0, - 967410176.0, - 956779456.0, - 947900800.0, - 968135104.0, - 962090944.0, - 953876800.0, - 974625536.0, - 942612928.0, - 965259392.0, - 975841856.0, - 962702400.0, - 970134016.0, - 961016192.0, - 948337728.0, - 970443648.0, - 964928704.0, - 963995136.0, - 951901632.0, - 947745408.0, - 970828800.0, - 964468288.0, - 974607680.0, - 973945920.0, - 935287040.0, - 967770304.0, - 965945280.0, - 968131840.0, - 983505536.0, - 946878976.0, - 957194688.0, - 968776704.0, - 962673536.0, - 979175616.0, - 957675008.0, - 948040256.0, - 974693824.0, - 961418944.0, - 961560896.0, - 975510336.0, - 922718464.0, - 976531200.0, - 970026176.0, - 968694208.0, - 959724480.0, - 953878592.0, - 959786176.0, - 957711360.0, - 970779072.0, - 961660480.0, - 939856128.0, - 927031872.0, - 973463552.0, - 972118144.0, - 961870784.0, - 963176832.0, - 943803904.0, - 964440768.0, - 961923136.0, - 980821056.0, - 956335424.0, - 952625664.0, - 981711872.0, - 961568320.0, - 964838464.0, - 968888448.0, - 965878528.0, - 951641984.0, - 966636160.0, - 977746048.0, - 973747712.0, - 973595008.0, - 941317888.0, - 963372032.0, - 973433216.0, - 967439680.0, - 971444480.0, - 954296192.0, - 964910336.0, - 957482816.0, - 972145536.0, - 981093632.0, - 938479488.0, - 943498048.0, - 970730752.0, - 970503296.0, - 969970048.0, - 958199808.0, - 941458752.0, - 974803712.0, - 969307904.0, - 959485248.0, - 972349696.0, - 937029248.0, - 974571712.0, - 971851840.0, - 963073088.0, - 965308800.0, - 947386304.0, - 950749504.0, - 973501440.0, - 966506496.0, - 965451008.0, - 946476800.0, - 954803776.0, - 960677888.0, - 975553280.0, - 985697472.0, - 967333056.0, - 963199360.0, - 976725312.0, - 967323200.0, - 992654080.0, - 986509056.0, - 943164288.0, - 953127744.0, - 977015040.0, - 965123136.0, - 969399616.0, - 961335488.0, - 945745024.0, - 974935296.0, - 968672640.0, - 973500288.0, - 963175616.0, - 950083776.0, - 988846400.0, - 962997248.0, - 972118976.0, - 983144960.0, - 952239232.0, - 972193728.0, - 965854016.0, - 965680256.0, - 974366720.0, - 953477760.0, - 939308352.0, - 972329984.0, - 981523200.0, - 969490048.0, - 978188160.0, - 938694016.0, - 968842816.0, - 971686016.0, - 970983040.0, - 971740736.0, - 930656576.0, - 984600832.0, - 975402176.0, - 988961728.0, - 965132288.0, - 947274624.0, - 955488128.0, - 973385280.0, - 977471808.0, - 963681600.0, - 960927552.0, - 953023424.0, - 987824832.0, - 965590976.0, - 963980096.0, - 960733760.0, - 949200448.0, - 967588352.0, - 966703104.0, - 969066240.0, - 976370560.0, - 941354688.0, - 966642816.0, - 966047488.0, - 969355904.0, - 990853056.0, - 949508736.0, - 950438720.0, - 981411520.0, - 968631616.0, - 970798592.0, - 964391936.0, - 941445376.0, - 976633344.0, - 976118848.0, - 969960576.0, - 976733184.0, - 945326720.0, - 948413952.0, - 976370624.0, - 979899776.0, - 960155776.0, - 938561792.0, - 950783168.0, - 985888768.0, - 971780864.0, - 949372096.0, - 963471488.0, - 931262656.0, - 958901056.0, - 967674688.0, - 979127360.0, - 986270144.0, - 953936576.0, - 969436672.0, - 974246272.0, - 977548736.0, - 965172032.0, - 936836864.0, - 957858240.0, - 971553152.0, - 959909888.0, - 968876608.0, - 961975744.0, - 946142976.0, - 965700224.0, - 963430208.0, - 968827456.0, - 979429888.0, - 957229312.0, - 978127168.0, - 957243392.0, - 971896576.0, - 960394560.0, - 946881600.0, - 978391808.0, - 977875008.0, - 968016256.0, - 980692352.0, - 957090624.0, - 948157504.0, - 981413248.0, - 986890368.0, - 971801600.0, - 970665024.0, - 943533568.0, - 965002304.0, - 978120960.0, - 968204416.0, - 976235008.0, - 945316992.0, - 958159104.0, - 965148736.0, - 983245248.0, - 973754624.0, - 941476224.0, - 969210304.0, - 970490048.0, - 993738944.0, - 962900480.0, - 960056640.0, - 958914688.0, - 969335552.0, - 970590464.0, - 970878528.0, - 961918208.0, - 953020416.0, - 976659328.0, - 966341248.0, - 951200576.0, - 989766400.0, - 960174400.0, - 951518144.0, - 970880192.0, - 965541056.0, - 972397696.0, - 959394624.0, - 945449536.0, - 978595136.0, - 968110784.0, - 978378688.0, - 970272128.0, - 948860160.0, - 960002688.0, - 977512064.0, - 973280832.0, - 969158464.0, - 948459584.0, - 948905408.0, - 962331392.0, - 971808064.0, - 945258816.0, - 955597504.0, - 949455360.0, - 978680576.0, - 948608704.0, - 967549120.0, - 951794112.0, - 942391424.0, - 972997888.0, - 968783232.0, - 984962624.0, - 969719360.0, - 949466304.0, - 950536576.0, - 962350208.0, - 983682880.0, - 971230080.0, - 954590464.0, - 943698688.0, - 969232832.0, - 983961856.0, - 979398080.0, - 972813440.0, - 958214464.0, - 957435136.0, - 966599616.0, - 976473088.0, - 970263232.0, - 962195200.0, - 972318912.0, - 968081344.0, - 955904960.0, - 967072896.0, - 969866688.0, - 952741824.0, - 966310720.0, - 957955392.0, - 969470720.0, - 944493824.0, - 940694784.0, - 965002752.0, - 970607616.0, - 981817728.0, - 968428480.0, - 953521984.0, - 965388416.0, - 978347328.0, - 966850624.0, - 960573952.0, - 933111680.0, - 964919808.0, - 964675648.0, - 977765696.0, - 981308736.0, - 949135872.0, - 922461440.0, - 975057792.0, - 991951936.0, - 992210432.0, - 967401600.0, - 941656640.0, - 960320832.0, - 960569920.0, - 965271424.0, - 961143872.0, - 956793408.0, - 954331776.0, - 967170240.0, - 966033472.0, - 966194624.0, - 935685696.0, - 958637312.0, - 964073920.0, - 974766976.0, - 960167808.0, - 962641856.0, - 944531328.0, - 956464576.0, - 969817216.0, - 967020800.0, - 964316928.0, - 950040640.0, - 965108288.0, - 970707520.0, - 980185728.0, - 954227968.0, - 936634944.0, - 957711040.0, - 970087360.0, - 962822208.0, - 965236480.0, - 979863808.0, - 942509696.0, - 962691712.0, - 949963776.0, - 968699840.0, - 965042816.0, - 948904512.0, - 962331520.0, - 971807808.0, - 945271168.0, - 955597760.0, - 949480384.0, - 978692800.0, - 948633472.0, - 967562112.0, - 951806016.0, - 942356352.0, - 972985728.0, - 968772288.0, - 984974592.0, - 969719616.0, - 949453056.0, - 950524416.0, - 962338048.0, - 983694528.0, - 971254464.0, - 954589504.0, - 943710464.0, - 969159104.0, - 983937152.0, - 979348992.0, - 972752576.0, - 958227328.0, - 957435456.0, - 966537856.0, - 976399232.0, - 970201984.0, - 962133248.0, - 972320256.0, - 968094912.0, - 955942080.0, - 967048960.0, - 969793600.0, - 952729216.0, - 966249792.0, - 957856000.0, - 969446016.0, - 944469504.0, - 940671232.0, - 965039744.0, - 970583872.0, - 981854400.0, - 968454016.0, - 953595776.0, - 965265216.0, - 978310272.0, - 966863360.0, - 960635648.0, - 933161216.0, - 964981120.0, - 964736704.0, - 977729216.0, - 981369856.0, - 949234688.0, - 922472896.0, - 975021504.0, - 991977024.0, - 992198400.0, - 967413376.0, - 941779264.0, - 960358272.0, - 960520512.0, - 965234304.0, - 961217984.0, - 956818368.0, - 954282368.0, - 967119360.0, - 966021440.0, - 966181120.0, - 935647680.0, - 958686080.0, - 964085824.0, - 974692800.0, - 960190976.0, - 962678208.0, - 944629888.0, - 956341248.0, - 969866048.0, - 966983168.0, - 964305920.0, - 950040960.0, - 965118976.0, - 970683328.0, - 980246336.0, - 954313152.0, - 936660224.0, - 957858176.0, - 970259200.0, - 962982400.0, - 965395968.0, - 979852160.0, - 942571520.0, - 962814848.0, - 950123776.0, - 968750016.0, - 965141760.0, - 948823808.0, - 983623680.0, - 940748288.0, - 963410048.0, - 963626816.0, - 939562048.0, - 970609984.0, - 963134400.0, - 974736704.0, - 966612352.0, - 963030144.0, - 937643136.0, - 961444800.0, - 960444480.0, - 977787712.0, - 971132288.0, - 936175168.0, - 973130176.0, - 975083584.0, - 961572544.0, - 963151104.0, - 932620736.0, - 970335360.0, - 966968384.0, - 954511040.0, - 968624704.0, - 945154176.0, - 952199488.0, - 966007488.0, - 960615488.0, - 964620480.0, - 964969408.0, - 956707776.0, - 968735616.0, - 978274240.0, - 958711488.0, - 959233728.0, - 955382144.0, - 962608128.0, - 977112960.0, - 974712192.0, - 971333376.0, - 941660416.0, - 944241984.0, - 963605952.0, - 965682432.0, - 983975040.0, - 960205312.0, - 959871232.0, - 960268288.0, - 971132416.0, - 978150400.0, - 982862528.0, - 941887168.0, - 961903872.0, - 970491520.0, - 974277312.0, - 962182208.0, - 954811008.0, - 944578496.0, - 970455424.0, - 966991744.0, - 957494528.0, - 964056000.0, - 944423936.0, - 980454208.0, - 974465280.0, - 954423104.0, - 960839872.0, - 937270976.0, - 973537088.0, - 969785024.0, - 970211840.0, - 981995712.0, - 926688512.0, - 973748032.0, - 970980928.0, - 977698432.0, - 961616192.0, - 942736064.0, - 961197184.0, - 968038720.0, - 971284672.0, - 977131584.0, - 974754496.0, - 949533696.0, - 958820480.0, - 969128000.0, - 968374592.0, - 972292992.0, - 947686400.0, - 964750464.0, - 958587776.0, - 967742336.0, - 969104640.0, - 959429760.0, - 961944064.0, - 967365632.0, - 973772096.0, - 978696448.0, - 969170432.0, - 940230528.0, - 963335168.0, - 962181120.0, - 981141568.0, - 978090112.0, - 935244096.0, - 962151232.0, - 969541120.0, - 963164928.0, - 974049536.0, - 964759744.0, - 960492544.0, - 955703296.0, - 980971840.0, - 966296320.0, - 953239168.0, - 945131968.0, - 978993728.0, - 971790528.0, - 957845696.0, - 956807936.0, - 949911232.0, - 971328768.0, - 970873152.0, - 954099072.0, - 952163968.0, - 936398080.0, - 969994880.0, - 981506048.0, - 966612288.0, - 988070656.0, - 956919872.0, - 944482112.0, - 969565056.0, - 977715904.0, - 980382464.0, - 975873344.0, - 947583936.0, - 949577472.0, - 952022016.0, - 978221120.0, - 978280768.0, - 959719360.0, - 958698240.0, - 977777216.0, - 971708736.0, - 968023168.0, - 944388096.0, - 929667264.0, - 971642816.0, - 959842176.0, - 960068416.0, - 977488000.0, - 946279616.0, - 972871424.0, - 965121152.0, - 963813248.0, - 972704512.0, - 948418368.0, - 967054528.0, - 976690496.0, - 957752128.0, - 965221888.0, - 939264320.0, - 949405568.0, - 979472768.0, - 972559104.0, - 961187072.0, - 958784576.0, - 955768896.0, - 976584832.0, - 975012864.0, - 963368064.0, - 961595904.0, - 942477504.0, - 967543744.0, - 987212416.0, - 970426816.0, - 962507008.0, - 932487296.0, - 968146496.0, - 971241984.0, - 963397184.0, - 965990016.0, - 975485760.0, - 959697920.0, - 957662528.0, - 959848512.0, - 964331840.0, - 973422784.0, - 944137856.0, - 959017792.0, - 968962944.0, - 963458624.0, - 965024960.0, - 950269376.0, - 944364608.0, - 976819584.0, - 974035776.0, - 975248256.0, - 951434944.0, - 958625984.0, - 978308032.0, - 968245952.0, - 964074816.0, - 958005696.0, - 944474752.0, - 956913152.0, - 979996544.0, - 963568768.0, - 961635776.0, - 941341568.0, - 977417408.0, - 968409280.0, - 983728768.0, - 959474560.0, - 952618368.0, - 948522176.0, - 972658624.0, - 968114880.0, - 987826176.0, - 979746368.0, - 951888000.0, - 974990528.0, - 970640384.0, - 983833984.0, - 955228416.0, - 938310784.0, - 987369344.0, - 968621056.0, - 982585856.0, - 971603136.0, - 946924800.0, - 960180224.0, - 973838720.0, - 956210240.0, - 977756096.0, - 955286400.0, - 956882368.0, - 975506176.0, - 982850816.0, - 972406336.0, - 955346432.0, - 954768256.0, - 971891264.0, - 976223872.0, - 965384960.0, - 988329536.0, - 940920000.0, - 963516736.0, - 973791744.0, - 961151936.0, - 962630848.0, - 945259840.0, - 962798080.0, - 960549376.0, - 965974080.0, - 976438784.0, - 955598720.0, - 936489600.0, - 981645120.0, - 971192576.0, - 979336256.0, - 979060288.0, - 937376640.0, - 965843264.0, - 961182976.0, - 975227776.0, - 985569344.0, - 925643264.0, - 950198272.0, - 968529856.0, - 963685760.0, - 964228672.0, - 940943680.0, - 964576512.0, - 986008448.0, - 959602368.0, - 973525952.0, - 965438208.0, - 949892032.0, - 973680576.0, - 964967040.0, - 968299904.0, - 969289280.0, - 968079616.0, - 958577408.0, - 965750208.0, - 981167168.0, - 967182912.0, - 955320704.0, - 952202112.0, - 978290560.0, - 967783360.0, - 979566144.0, - 962871104.0, - 946183552.0, - 980836992.0, - 960626880.0, - 972459520.0, - 963098752.0, - 938030592.0, - 963154048.0, - 970648512.0, - 975693952.0, - 969214912.0, - 939156160.0, - 960843904.0, - 983181056.0, - 969683072.0, - 983899968.0, - 957171392.0, - 955291520.0, - 975634176.0, - 950389504.0, - 968456128.0, - 973664448.0, - 955240576.0, - 968927104.0, - 965345600.0, - 974902528.0, - 977416192.0, - 953380032.0, - 946584256.0, - 975541632.0, - 978207232.0, - 966041728.0, - 955186368.0, - 951993344.0, - 969656640.0, - 964069440.0, - 961641024.0, - 973128448.0, - 939283392.0, - 972562176.0, - 965967872.0, - 967518784.0, - 964891712.0, - 950547584.0, - 957620352.0, - 976627584.0, - 966624064.0, - 965923456.0, - 949839616.0, - 961386048.0, - 962042496.0, - 964597056.0, - 992649600.0, - 966484416.0, - 933762560.0, - 980412096.0, - 973889024.0, - 991910848.0, - 962221504.0, - 927516608.0, - 957914688.0, - 1003087936.0, - 969438336.0, - 994572928.0, - 957337152.0, - 945402752.0, - 973264000.0, - 963371072.0, - 970002112.0, - 978065536.0, - 932970944.0, - 977331136.0, - 974472512.0, - 966659840.0, - 980392768.0, - 948684800.0, - 978253760.0, - 964314496.0, - 974387840.0, - 974428800.0, - 960729920.0, - 961564480.0, - 974459776.0, - 971480448.0, - 964652608.0, - 966532032.0, - 954160512.0, - 968842496.0, - 974479040.0, - 955530432.0, - 979164288.0, - 933598720.0, - 969210112.0, - 970310272.0, - 989090368.0, - 976012416.0, - 944329024.0, - 958350016.0, - 966741376.0, - 974725312.0, - 964733760.0, - 950395456.0, - 937944768.0, - 986087296.0, - 967035968.0, - 968190208.0, - 968882560.0, - 942668800.0, - 958466624.0, - 967102208.0, - 968608064.0, - 974031808.0, - 955323136.0, - 945243136.0, - 966673472.0, - 959799104.0, - 961131328.0, - 950403200.0, - 958410368.0, - 985041920.0, - 962865792.0, - 951850560.0, - 963336960.0, - 955052032.0, - 973814400.0, - 973320128.0, - 970091712.0, - 983395328.0, - 941096832.0, - 970075712.0, - 985897984.0, - 960378240.0, - 968476480.0, - 946361280.0, - 955714624.0, - 961451904.0, - 984933056.0, - 970828992.0, - 962079808.0, - 931361344.0, - 963916352.0, - 968430656.0, - 970390592.0, - 979846656.0, - 943707392.0, - 961262400.0, - 970290496.0, - 971187776.0, - 970230336.0, - 948264832.0, - 953755520.0, - 967838720.0, - 969190720.0, - 973588032.0, - 971746112.0, - 927563648.0, - 975884352.0, - 967900480.0, - 950607296.0, - 968911168.0, - 952115648.0, - 971788736.0, - 967855360.0, - 974516352.0, - 966063552.0, - 951767104.0, - 963103232.0, - 973122304.0, - 959739008.0, - 958534272.0, - 974417088.0, - 954375424.0, - 974756032.0, - 956526208.0, - 971175296.0, - 973135104.0, - 956416576.0, - 960451904.0, - 978049216.0, - 963036864.0, - 983686336.0, - 945734784.0, - 955926016.0, - 976058432.0, - 968833536.0, - 972618816.0, - 927228160.0, - 958656448.0, - 980451072.0, - 968281600.0, - 983305408.0, - 962883328.0, - 936271360.0, - 980970048.0, - 980767040.0, - 978618816.0, - 983502976.0, - 934806784.0, - 966015616.0, - 965425664.0, - 977339008.0, - 978005504.0, - 947828288.0, - 946365760.0, - 967452352.0, - 977266560.0, - 966671936.0, - 977114112.0, - 945662592.0, - 960290304.0, - 975321280.0, - 961174784.0, - 969118016.0, - 941631424.0, - 967631616.0, - 970321856.0, - 960391040.0, - 957362112.0, - 942030016.0, - 968485120.0, - 971643776.0, - 965604032.0, - 959727488.0, - 945985280.0, - 945622848.0, - 972329152.0, - 973611712.0, - 966334720.0, - 949630208.0, - 935228416.0, - 964480704.0, - 964293952.0, - 974332480.0, - 970879616.0, - 935772288.0, - 961582784.0, - 966219520.0, - 962436224.0, - 984202496.0, - 972814784.0, - 954326848.0, - 962301696.0, - 967726976.0, - 977598912.0, - 967686464.0, - 940986560.0, - 960195072.0, - 970812352.0, - 968921536.0, - 960585280.0, - 948979328.0, - 962759872.0, - 965529280.0, - 974816960.0, - 952455744.0, - 957332288.0, - 953608576.0, - 977573312.0, - 965862720.0, - 956113792.0, - 950569664.0, - 941777600.0, - 969468352.0, - 966002432.0, - 958425664.0, - 975031808.0, - 937451584.0, - 964906560.0, - 981260416.0, - 969369152.0, - 972111296.0, - 952362304.0, - 976727168.0, - 964479552.0, - 969750464.0, - 959772672.0, - 944965504.0, - 961007488.0, - 963736832.0, - 979597376.0, - 960763776.0, - 972219584.0, - 942147456.0, - 960588096.0, - 959118592.0, - 975184256.0, - 969104192.0, - 952613120.0, - 971003008.0, - 966003712.0, - 968722688.0, - 981709184.0, - 958637952.0, - 942135808.0, - 969012736.0, - 956066816.0, - 961078848.0, - 970604352.0, - 959763904.0, - 955736000.0, - 962221568.0, - 968104256.0, - 967102464.0, - 945729856.0, - 967452096.0, - 977266816.0, - 966684352.0, - 977138496.0, - 945675136.0, - 960314624.0, - 975333248.0, - 961163392.0, - 969118656.0, - 941668224.0, - 967618752.0, - 970310848.0, - 960390656.0, - 957349952.0, - 942054272.0, - 968522496.0, - 971630912.0, - 965654400.0, - 959715072.0, - 945985536.0, - 945622912.0, - 972304320.0, - 973623872.0, - 966310336.0, - 949592576.0, - 935240704.0, - 964480640.0, - 964294144.0, - 974319744.0, - 970904320.0, - 935772608.0, - 961582656.0, - 966231744.0, - 962412480.0, - 984191040.0, - 972813760.0, - 954352128.0, - 962312960.0, - 967787968.0, - 977586176.0, - 967588288.0, - 940987136.0, - 960217856.0, - 970800640.0, - 968921728.0, - 960646912.0, - 948992128.0, - 962796928.0, - 965480256.0, - 974755904.0, - 952406272.0, - 957295936.0, - 953620608.0, - 977634496.0, - 965862528.0, - 956126976.0, - 950631168.0, - 941765696.0, - 969492864.0, - 965991168.0, - 958413248.0, - 975006912.0, - 937452224.0, - 964857280.0, - 981273344.0, - 969332608.0, - 972110976.0, - 952312256.0, - 976764032.0, - 964503616.0, - 969714432.0, - 959760512.0, - 944964736.0, - 960970496.0, - 963712000.0, - 979598528.0, - 960813632.0, - 972195008.0, - 942196480.0, - 960526144.0, - 959204864.0, - 975196480.0, - 969104384.0, - 952576256.0, - 971002816.0, - 966052416.0, - 968722944.0, - 981745984.0, - 958625536.0, - 942160448.0, - 969013568.0, - 956042624.0, - 961053696.0, - 970629248.0, - 959739200.0, - 955724736.0, - 962209088.0, - 968142464.0, - 967089280.0, - 945668864.0, - 960898432.0, - 977235008.0, - 969578176.0, - 951888832.0, - 950502208.0, - 968757248.0, - 975886080.0, - 981332416.0, - 964812288.0, - 943024320.0, - 940390656.0, - 973548160.0, - 965943360.0, - 966471936.0, - 959265536.0, - 921419520.0, - 966048448.0, - 972807872.0, - 968119936.0, - 973638208.0, - 950156992.0, - 942198208.0, - 956521728.0, - 957227008.0, - 974578816.0, - 964789376.0, - 947673280.0, - 958552960.0, - 969896832.0, - 973866304.0, - 963319232.0, - 946974656.0, - 970507136.0, - 974300928.0, - 968728256.0, - 967993664.0, - 944390016.0, - 973438848.0, - 966476032.0, - 966619840.0, - 948474624.0, - 949144256.0, - 952625920.0, - 968869184.0, - 966905280.0, - 969443712.0, - 953125312.0, - 950726016.0, - 963289408.0, - 967115584.0, - 959554112.0, - 961955136.0, - 949928832.0, - 962123072.0, - 974075328.0, - 964812864.0, - 968112192.0, - 935624256.0, - 965690432.0, - 975013376.0, - 972230656.0, - 983225600.0, - 950191424.0, - 941864832.0, - 968202112.0, - 959672128.0, - 963905280.0, - 970108288.0, - 938069504.0, - 956557248.0, - 974909952.0, - 970088640.0, - 985589312.0, - 950439488.0, - 971229504.0, - 960636544.0, - 973406400.0, - 963754944.0, - 958400000.0, - 955605056.0, - 980480384.0, - 978698560.0, - 959990272.0, - 998419264.0, - 955564032.0, - 963239104.0, - 962140224.0, - 967289216.0, - 967623488.0, - 939625088.0, - 992438272.0, - 974271680.0, - 959808000.0, - 979177664.0, - 945195200.0, - 970064064.0, - 978481792.0, - 981026304.0, - 979290944.0, - 947546688.0, - 936087040.0, - 969423872.0, - 980170304.0, - 982353344.0, - 967697472.0, - 941079040.0, - 976418240.0, - 974431168.0, - 971272640.0, - 978628032.0, - 947147392.0, - 957343680.0, - 972823040.0, - 973406848.0, - 975070016.0, - 966886848.0, - 936935360.0, - 1005433536.0, - 963226496.0, - 972925376.0, - 984034944.0, - 943340416.0, - 959539456.0, - 958324736.0, - 983017664.0, - 966851392.0, - 948965248.0, - 982532160.0, - 971716800.0, - 966982912.0, - 972968512.0, - 976725824.0, - 945723520.0, - 981040576.0, - 971900864.0, - 972293312.0, - 955699392.0, - 937940608.0, - 978129344.0, - 962060160.0, - 966207296.0, - 974121344.0, - 944960256.0, - 961661312.0, - 985523584.0, - 973240384.0, - 964819072.0, - 939521344.0, - 969320256.0, - 967837952.0, - 970653312.0, - 983024064.0, - 970004416.0, - 937659904.0, - 958299712.0, - 964612224.0, - 963229888.0, - 974880768.0, - 937586176.0, - 977672640.0, - 981671680.0, - 973553152.0, - 962791808.0, - 959164352.0, - 973840640.0, - 985303168.0, - 965766528.0, - 961121280.0, - 973491776.0, - 952712768.0, - 965113856.0, - 964555520.0, - 971201728.0, - 970863488.0, - 950785472.0, - 959686464.0, - 970848704.0, - 975874880.0, - 979675904.0, - 960527168.0, - 959776256.0, - 965007936.0, - 972823616.0, - 972689152.0, - 948126912.0, - 940385024.0, - 987545728.0, - 974689920.0, - 982222080.0, - 983776064.0, - 948529472.0, - 970900096.0, - 991997760.0, - 968460864.0, - 981246912.0, - 948523264.0, - 968977408.0, - 970034368.0, - 980296064.0, - 973424512.0, - 953459648.0, - 955207168.0, - 984964224.0, - 971993728.0, - 966674368.0, - 953027328.0, - 941834752.0, - 973236864.0, - 965828544.0, - 973984000.0, - 981075840.0, - 964756992.0, - 976059200.0, - 963879360.0, - 988287680.0, - 978435072.0, - 945715904.0, - 961802048.0, - 969206336.0, - 977976960.0, - 952105024.0, - 956877824.0, - 956256512.0, - 990695744.0, - 980071360.0, - 953720896.0, - 962829120.0, - 945668032.0, - 972284032.0, - 972888640.0, - 967761792.0, - 980776448.0, - 948615040.0, - 966361792.0, - 982206272.0, - 966370944.0, - 986622464.0, - 948144704.0, - 949329088.0, - 959902784.0, - 970838912.0, - 966989184.0, - 957025216.0, - 942351104.0, - 958215360.0, - 960865856.0, - 983184960.0, - 972305856.0, - 961650560.0, - 944967104.0, - 977176128.0, - 960722368.0, - 973730560.0, - 957799104.0, - 950623808.0, - 984631680.0, - 965180288.0, - 971907776.0, - 959599424.0, - 951507904.0, - 962582528.0, - 971796544.0, - 973951552.0, - 956933184.0, - 951876480.0, - 965066496.0, - 957428736.0, - 945454016.0, - 963840192.0, - 951509568.0, - 948340736.0, - 964039680.0, - 959940032.0, - 961440512.0, - 953579328.0, - 945393536.0, - 977347392.0, - 968001984.0, - 963222592.0, - 981661248.0, - 936674816.0, - 969665088.0, - 974688832.0, - 955779008.0, - 971591680.0, - 939876160.0, - 957993856.0, - 972945152.0, - 981450880.0, - 979301504.0, - 938979392.0, - 938124992.0, - 960974528.0, - 966305216.0, - 956191104.0, - 975439232.0, - 935804096.0, - 957565824.0, - 968625344.0, - 962437568.0, - 977906112.0, - 964598784.0, - 977459584.0, - 982078016.0, - 966283200.0, - 973177920.0, - 954320512.0, - 943455744.0, - 970327808.0, - 971942080.0, - 973416896.0, - 961534848.0, - 950097792.0, - 982887680.0, - 952245760.0, - 957750528.0, - 964709760.0, - 937046016.0, - 977337216.0, - 965727808.0, - 943180864.0, - 960242560.0, - 925050112.0, - 958928576.0, - 969931136.0, - 969109184.0, - 971194816.0, - 962613440.0, - 939920512.0, - 976433472.0, - 971829440.0, - 958282368.0, - 971260416.0, - 949039552.0, - 956771968.0, - 956933952.0, - 980506752.0, - 973143168.0, - 927628224.0, - 974584512.0, - 976833664.0, - 960569856.0, - 988030656.0, - 965211520.0, - 935127616.0, - 976348608.0, - 967932480.0, - 963279040.0, - 975688640.0, - 952014400.0, - 968965312.0, - 961304384.0, - 952195008.0, - 965002880.0, - 941603392.0, - 953584704.0, - 977179904.0, - 976776576.0, - 972946368.0, - 953639360.0, - 946864960.0, - 976625664.0, - 964002432.0, - 973798976.0, - 960317632.0, - 945066496.0, - 988350464.0, - 980451392.0, - 977086400.0, - 963622592.0, - 929048384.0, - 981782464.0, - 967433024.0, - 972647936.0, - 974506560.0, - 945966016.0, - 939222784.0, - 957534976.0, - 987667840.0, - 965267520.0, - 976038848.0, - 937561920.0, - 972810240.0, - 975164800.0, - 972472064.0, - 972491264.0, - 947998336.0, - 966361344.0, - 970009856.0, - 973619008.0, - 981154560.0, - 952933696.0, - 976786560.0, - 947885376.0, - 958564544.0, - 966372736.0, - 966783488.0, - 938809408.0, - 983439936.0, - 967003904.0, - 968498368.0, - 967651008.0, - 952398144.0, - 964309888.0, - 961600256.0, - 966716352.0, - 962245312.0, - 929510080.0, - 961024832.0, - 972022528.0, - 961711488.0, - 966981184.0, - 957040896.0, - 942684160.0, - 974605760.0, - 954346688.0, - 959398464.0, - 962317760.0, - 951121408.0, - 967225728.0, - 974124032.0, - 979479744.0, - 987104128.0, - 940624832.0, - 958289152.0, - 960102720.0, - 980962752.0, - 971064960.0, - 964880256.0, - 948795200.0, - 964321600.0, - 959236608.0, - 988032000.0, - 964875392.0, - 931460544.0, - 970996480.0, - 972416960.0, - 967662656.0, - 954256448.0, - 945120064.0, - 963872000.0, - 964610944.0, - 979560832.0, - 960020160.0, - 950951424.0, - 960822336.0, - 994535232.0, - 958599040.0, - 942380928.0, - 968216000.0, - 947105856.0, - 971760960.0, - 980036736.0, - 963646656.0, - 967657152.0, - 936693440.0, - 963997696.0, - 972429440.0, - 971884224.0, - 956699840.0, - 943542208.0, - 956398720.0, - 982384256.0, - 972313088.0, - 983851520.0, - 955359232.0, - 951103872.0, - 972202688.0, - 986218368.0, - 978935680.0, - 979468096.0, - 934389888.0, - 946535424.0, - 967828992.0, - 951572160.0, - 965640768.0, - 947936128.0, - 967830016.0, - 968956096.0, - 965479936.0, - 969829888.0, - 963991040.0, - 946903872.0, - 971556160.0, - 961360832.0, - 973492480.0, - 967809536.0, - 948909056.0, - 968958976.0, - 981511360.0, - 976309312.0, - 971950272.0, - 945601024.0, - 971416320.0, - 977988608.0, - 958511168.0, - 972856256.0, - 947430848.0, - 960966720.0, - 991648448.0, - 964147264.0, - 952902528.0, - 951459264.0, - 937504256.0, - 972234304.0, - 971107904.0, - 965070272.0, - 961047680.0, - 947676672.0, - 976734656.0, - 979049792.0, - 983569920.0, - 973526592.0, - 938792064.0, - 973177216.0, - 970189824.0, - 984988288.0, - 966742784.0, - 980391424.0, - 946019712.0, - 961028928.0, - 970450240.0, - 960787584.0, - 977265216.0, - 945821120.0, - 956521664.0, - 961719232.0, - 973778304.0, - 964537856.0, - 940492864.0, - 950683200.0, - 955745792.0, - 971825472.0, - 957766528.0, - 939073984.0, - 947324096.0, - 969824128.0, - 973455232.0, - 983569600.0, - 961739712.0, - 938999168.0, - 974623552.0, - 984473728.0, - 949941632.0, - 965813184.0, - 946405376.0, - 968927872.0, - 973865344.0, - 977084224.0, - 964973248.0, - 947135360.0, - 946273472.0, - 972392256.0, - 974191488.0, - 971267776.0, - 972360256.0, - 964770368.0, - 977415488.0, - 984290560.0, - 977601408.0, - 965566272.0, - 954436736.0, - 970806720.0, - 978717184.0, - 982710016.0, - 944809728.0, - 953924480.0, - 974160256.0, - 969196800.0, - 963922560.0, - 966249344.0, - 966987968.0, - 950023616.0, - 974795456.0, - 965070016.0, - 961694848.0, - 981401024.0, - 959930304.0, - 971520768.0, - 980754688.0, - 974664320.0, - 993916992.0, - 937818432.0, - 962183936.0, - 976438720.0, - 963917376.0, - 990203968.0, - 956792064.0, - 943964096.0, - 980457856.0, - 981783616.0, - 954637568.0, - 961753088.0, - 935091328.0, - 965711360.0, - 977455232.0, - 979657920.0, - 970022336.0, - 930166272.0, - 963039936.0, - 972477696.0, - 966914560.0, - 976458048.0, - 967621824.0, - 950980096.0, - 968104384.0, - 970179776.0, - 983982592.0, - 971714880.0, - 956250368.0, - 961398784.0, - 996187264.0, - 983184064.0, - 980320000.0, - 946448128.0, - 963747712.0, - 963375360.0, - 957764736.0, - 971193984.0, - 951312768.0, - 963498624.0, - 980757504.0, - 960145024.0, - 951851264.0, - 975585024.0, - 950430208.0, - 991695616.0, - 977995712.0, - 979880320.0, - 974014528.0, - 948867968.0, - 951865344.0, - 978824000.0, - 983955712.0, - 971592512.0, - 945306560.0, - 965366016.0, - 987411392.0, - 966968768.0, - 978241216.0, - 939017216.0, - 951207360.0, - 959384384.0, - 979550016.0, - 967499968.0, - 968249536.0, - 947116352.0, - 962058560.0, - 986022656.0, - 970979648.0, - 979891520.0, - 958160960.0, - 973625600.0, - 970199936.0, - 936042048.0, - 974542720.0, - 966317376.0, - 967736960.0, - 966451648.0, - 941509312.0, - 946934464.0, - 985022272.0, - 993832640.0, - 963818624.0, - 943571520.0, - 960695104.0, - 964601024.0, - 981035712.0, - 975136896.0, - 963840832.0, - 931345600.0, - 974278464.0, - 977487936.0, - 954886336.0, - 959859008.0, - 949456320.0, - 970041024.0, - 957902336.0, - 967944512.0, - 973971968.0, - 965403520.0, - 970025792.0, - 964872320.0, - 981099712.0, - 980828288.0, - 964662976.0, - 945581120.0, - 967718208.0, - 974442880.0, - 979596928.0, - 950164736.0, - 944028672.0, - 977515072.0, - 958398656.0, - 980205824.0, - 963889408.0, - 949533760.0, - 959894912.0, - 969487680.0, - 966771968.0, - 951593216.0, - 947880576.0, - 939232576.0, - 975483200.0, - 974284544.0, - 980492096.0, - 981782400.0, - 964036672.0, - 969631872.0, - 980470464.0, - 966010624.0, - 986144448.0, - 961163776.0, - 952830144.0, - 965579008.0, - 978729152.0, - 962246720.0, - 958934528.0, - 959206656.0, - 976089920.0, - 977606848.0, - 982533440.0, - 979123648.0, - 959382464.0, - 955497024.0, - 965305408.0, - 954315648.0, - 966078656.0, - 955758144.0, - 970187968.0, - 964871872.0, - 958676800.0, - 964101184.0, - 941854208.0, - 963804224.0, - 988312320.0, - 967540864.0, - 982727936.0, - 950355776.0, - 943842560.0, - 964206144.0, - 967681792.0, - 963985728.0, - 973952960.0, - 946185088.0, - 970556992.0, - 983165184.0, - 977010432.0, - 962128320.0, - 948087296.0, - 955686208.0, - 978376512.0, - 970443776.0, - 988269248.0, - 957204352.0, - 945734592.0, - 975788800.0, - 963080192.0, - 971975808.0, - 964220288.0, - 945264576.0, - 965027840.0, - 987998144.0, - 965919360.0, - 954813696.0, - 952554048.0, - 964279360.0, - 977191296.0, - 972061056.0, - 972829760.0, - 957928576.0, - 970136576.0, - 975593536.0, - 964497088.0, - 966447616.0, - 991256576.0, - 946500160.0, - 969212992.0, - 974068416.0, - 974737024.0, - 974696768.0, - 953009024.0, - 970906496.0, - 977557248.0, - 967094592.0, - 976999296.0, - 958727872.0, - 962367936.0, - 969229184.0, - 978496960.0, - 978030848.0, - 972421888.0, - 943807296.0, - 976963264.0, - 975615744.0, - 967325888.0, - 977507776.0, - 943290176.0, - 975217408.0, - 982035392.0, - 968135040.0, - 977053632.0, - 945691264.0, - 948240960.0, - 962050432.0, - 968998144.0, - 973971456.0, - 952708352.0, - 940449408.0, - 959019968.0, - 969570880.0, - 966268352.0, - 963873344.0, - 941665664.0, - 972860800.0, - 975966528.0, - 975972416.0, - 974561152.0, - 950864512.0, - 958872064.0, - 973215040.0, - 960422528.0, - 965365824.0, - 975283840.0, - 974657280.0, - 975230720.0, - 971587072.0, - 972595392.0, - 957307456.0, - 958130496.0, - 985611072.0, - 962916096.0, - 959985664.0, - 969508096.0, - 953627840.0, - 954611712.0, - 968197440.0, - 982575296.0, - 983971712.0, - 958382656.0, - 957011328.0, - 968621760.0, - 977772928.0, - 998082496.0, - 950702016.0, - 941183360.0, - 978453760.0, - 991169664.0, - 981956224.0, - 975812992.0, - 938254912.0, - 939543296.0, - 971699072.0, - 973283392.0, - 971791808.0, - 947605632.0, - 953044544.0, - 959760256.0, - 967676480.0, - 974007296.0, - 964421184.0, - 951546560.0, - 981677760.0, - 972406784.0, - 970918080.0, - 968455232.0, - 939796992.0, - 973474048.0, - 960469952.0, - 976404352.0, - 966766336.0, - 946603136.0, - 975151360.0, - 971010688.0, - 969470912.0, - 951497216.0, - 947792768.0, - 958404160.0, - 975930752.0, - 975256704.0, - 962240064.0, - 977318272.0, - 930841024.0, - 960398592.0, - 968235712.0, - 967766272.0, - 985691520.0, - 955201024.0, - 960639616.0, - 978853184.0, - 987039808.0, - 978473792.0, - 966890048.0, - 944261120.0, - 963210752.0, - 975936000.0, - 974629632.0, - 970267712.0, - 937977728.0, - 962709760.0, - 981735744.0, - 962920832.0, - 967363200.0, - 952978240.0, - 972963904.0, - 971441536.0, - 971740672.0, - 962539584.0, - 939496000.0, - 977551808.0, - 981093568.0, - 975887936.0, - 972821696.0, - 961747328.0, - 945311552.0, - 967977024.0, - 969105664.0, - 980798848.0, - 966242944.0, - 949797760.0, - 983714816.0, - 970833920.0, - 945755200.0, - 967193728.0, - 961072960.0, - 956049344.0, - 979794496.0, - 979409536.0, - 955583360.0, - 948328320.0, - 945778432.0, - 971047360.0, - 973259072.0, - 972804544.0, - 970561536.0, - 942075648.0, - 957838336.0, - 967917248.0, - 963194752.0, - 968840832.0, - 948597440.0, - 963875776.0, - 971170816.0, - 976631872.0, - 969871552.0, - 946608768.0, - 950977728.0, - 952536384.0, - 966601472.0, - 972600896.0, - 975292352.0, - 959579648.0, - 973327872.0, - 964020992.0, - 955797888.0, - 968677632.0, - 956773120.0, - 965181312.0, - 968129920.0, - 972135936.0, - 951427968.0, - 955557184.0, - 948683008.0, - 981261312.0, - 971077056.0, - 971476992.0, - 950447680.0, - 940191872.0, - 970275584.0, - 983097728.0, - 965519488.0, - 968750784.0, - 938610752.0, - 969587456.0, - 990765376.0, - 966579200.0, - 968761920.0, - 949857408.0, - 948727552.0, - 969548608.0, - 969403200.0, - 990644480.0, - 956706304.0, - 950657024.0, - 966418112.0, - 956336960.0, - 982417664.0, - 965955328.0, - 952908608.0, - 963553920.0, - 972780928.0, - 956650688.0, - 965306048.0, - 941660096.0, - 975323392.0, - 966645952.0, - 969169728.0, - 972649984.0, - 952689984.0, - 941567872.0, - 971130496.0, - 964185920.0, - 967277056.0, - 953614592.0, - 941154880.0, - 975814336.0, - 971055232.0, - 966501440.0, - 966091072.0, - 945412544.0, - 962485952.0, - 976194048.0, - 963347008.0, - 978652800.0, - 955002496.0, - 940073856.0, - 971572480.0, - 954295040.0, - 955446400.0, - 951189824.0, - 950797120.0, - 977538752.0, - 973928576.0, - 956914048.0, - 955816960.0, - 950760320.0, - 964704512.0, - 970046208.0, - 981827008.0, - 986930624.0, - 959809280.0, - 949219008.0, - 971119360.0, - 966263488.0, - 990474432.0, - 960558656.0, - 957388992.0, - 974515968.0, - 973033600.0, - 967214848.0, - 964596992.0, - 957578368.0, - 971036800.0, - 961374784.0, - 961208576.0, - 967172672.0, - 938409344.0, - 974357888.0, - 978312384.0, - 952390400.0, - 969554304.0, - 953537472.0, - 943304960.0, - 987164928.0, - 982131200.0, - 979497856.0, - 957850240.0, - 929631232.0, - 975801408.0, - 980536896.0, - 981911744.0, - 977005312.0, - 937022720.0, - 968465728.0, - 976653760.0, - 980911808.0, - 967166400.0, - 947136960.0, - 958265536.0, - 959514112.0, - 966745216.0, - 958495744.0, - 975898752.0, - 934748992.0, - 956127744.0, - 968496960.0, - 976967936.0, - 975069120.0, - 957240064.0, - 971066816.0, - 957350272.0, - 971126272.0, - 977724992.0, - 947334400.0, - 971159040.0, - 968806464.0, - 975622528.0, - 977878912.0, - 963468544.0, - 944571840.0, - 962560704.0, - 981287808.0, - 979527168.0, - 957787712.0, - 939059136.0, - 968560320.0, - 980471168.0, - 976077120.0, - 972815104.0, - 954501120.0, - 965551424.0, - 976883648.0, - 986746304.0, - 969805632.0, - 962991360.0, - 947270784.0, - 985450368.0, - 964469056.0, - 966434240.0, - 957432448.0, - 942539968.0, - 974965120.0, - 956821568.0, - 965717184.0, - 967280000.0, - 950170176.0, - 959438528.0, - 958206848.0, - 978313664.0, - 971045696.0, - 928319296.0, - 949907840.0, - 970086144.0, - 971957312.0, - 970379520.0, - 977625600.0, - 961762432.0, - 974383168.0, - 950196736.0, - 956589056.0, - 952494016.0, - 952963392.0, - 966447104.0, - 964753792.0, - 977809344.0, - 966891072.0, - 952598016.0, - 972681600.0, - 971991616.0, - 968174080.0, - 985592512.0, - 954238016.0, - 947522816.0, - 974858176.0, - 964008704.0, - 968909888.0, - 961032960.0, - 936363328.0, - 975954624.0, - 981625152.0, - 994467392.0, - 961430912.0, - 934254912.0, - 964986752.0, - 973942400.0, - 967812352.0, - 968865536.0, - 969067456.0, - 943684928.0, - 968590976.0, - 963968000.0, - 971096768.0, - 957842560.0, - 936353728.0, - 975125632.0, - 956620672.0, - 972860480.0, - 969087808.0, - 959302080.0, - 967400512.0, - 959865216.0, - 962037952.0, - 967559040.0, - 957276480.0, - 971644800.0, - 960631424.0, - 962179072.0, - 966372992.0, - 940861824.0, - 947034432.0, - 978493568.0, - 980964032.0, - 973897152.0, - 957368704.0, - 950063808.0, - 962376128.0, - 972362176.0, - 974479680.0, - 964861760.0, - 947381952.0, - 960873984.0, - 986188352.0, - 986653760.0, - 968953664.0, - 961995392.0, - 937458432.0, - 976422848.0, - 987054272.0, - 976164672.0, - 966153088.0, - 940852864.0, - 966988864.0, - 972323200.0, - 984427072.0, - 966263808.0, - 960219712.0, - 962609600.0, - 972372800.0, - 965145600.0, - 983216384.0, - 959191616.0, - 952017408.0, - 984503744.0, - 972381760.0, - 970925888.0, - 983927808.0, - 955741696.0, - 964807360.0, - 962663488.0, - 971010688.0, - 970122432.0, - 940264320.0, - 981067008.0, - 972853248.0, - 951584448.0, - 979899008.0, - 935559552.0, - 952981376.0, - 981274304.0, - 975550528.0, - 974468864.0, - 964181056.0, - 955264064.0, - 971069184.0, - 967755712.0, - 969733312.0, - 988884352.0, - 952290752.0, - 965828288.0, - 974338560.0, - 988345344.0, - 975821120.0, - 938162624.0, - 949911744.0, - 972447872.0, - 978433408.0, - 962590848.0, - 957763136.0, - 939956288.0, - 979724096.0, - 973045824.0, - 977720064.0, - 953820544.0, - 953856128.0, - 973335232.0, - 979608192.0, - 974271872.0, - 980235776.0, - 954629952.0, - 979941952.0, - 976590080.0, - 966590592.0, - 974112512.0, - 964212800.0, - 945699456.0, - 969678976.0, - 955055424.0, - 971074752.0, - 966606144.0, - 952254144.0, - 971029952.0, - 962129024.0, - 962424832.0, - 970693376.0, - 952322688.0, - 958444608.0, - 972927872.0, - 968439296.0, - 966921856.0, - 950682816.0, - 951214208.0, - 973332928.0, - 992733952.0, - 971332224.0, - 976244288.0, - 954827392.0, - 963696832.0, - 970490880.0, - 981176320.0, - 964407872.0, - 934496832.0, - 975446144.0, - 962134144.0, - 976278912.0, - 980446144.0, - 948943552.0, - 962128320.0, - 954484032.0, - 969118272.0, - 956123968.0, - 962984768.0, - 954831104.0, - 954525824.0, - 978705344.0, - 973173888.0, - 977060416.0, - 949226304.0, - 972265280.0, - 979094720.0, - 968731776.0, - 960491584.0, - 949041920.0, - 978376384.0, - 971173888.0, - 954371712.0, - 961983744.0, - 951910720.0, - 952841920.0, - 990631168.0, - 972559168.0, - 959304832.0, - 971809536.0, - 942383168.0, - 965231104.0, - 974464640.0, - 981380224.0, - 958910912.0, - 957755456.0, - 970527680.0, - 976583872.0, - 970140992.0, - 968759104.0, - 965870848.0, - 960037696.0, - 969258816.0, - 954101824.0, - 982122048.0, - 960570496.0, - 945480384.0, - 964073024.0, - 985843840.0, - 973869440.0, - 970753088.0, - 941949824.0, - 958639936.0, - 971054208.0, - 976788544.0, - 981324224.0, - 953916544.0, - 968612352.0, - 971466112.0, - 965251200.0, - 982072064.0, - 964857152.0, - 936473728.0, - 989248384.0, - 975082304.0, - 956401728.0, - 959035776.0, - 959967424.0, - 974521216.0, - 964208128.0, - 974205568.0, - 966699008.0, - 948480000.0, - 957248448.0, - 972548992.0, - 967801280.0, - 959898688.0, - 959591616.0, - 945049280.0, - 976845568.0, - 974118720.0, - 965360896.0, - 970676544.0, - 956050432.0, - 973345088.0, - 971380544.0, - 977565440.0, - 972866368.0, - 946684352.0, - 954313024.0, - 956690304.0, - 967831104.0, - 980876544.0, - 956017472.0, - 951929920.0, - 952476416.0, - 971459456.0, - 965668800.0, - 973300480.0, - 935285760.0, - 965915712.0, - 963629632.0, - 981445312.0, - 974570240.0, - 939342400.0, - 958580288.0, - 975360320.0, - 963977280.0, - 967263616.0, - 950951936.0, - 952195008.0, - 991103360.0, - 966405504.0, - 967564288.0, - 962578432.0, - 950104448.0, - 968568384.0, - 981835264.0, - 968462592.0, - 965158400.0, - 947679296.0, - 976035520.0, - 957253568.0, - 967911040.0, - 956425984.0, - 955563840.0, - 961449024.0, - 969612288.0, - 967868416.0, - 965920512.0, - 956017536.0, - 936955008.0, - 956162496.0, - 958886656.0, - 985937472.0, - 961879680.0, - 927042112.0, - 962634688.0, - 960232192.0, - 970858112.0, - 961795136.0, - 945729600.0, - 964316544.0, - 962578880.0, - 976056064.0, - 968943744.0, - 954059968.0, - 952211520.0, - 965631808.0, - 984753216.0, - 978760896.0, - 993282944.0, - 950888576.0, - 976827968.0, - 972381504.0, - 942402944.0, - 964386496.0, - 929799296.0, - 951978240.0, - 967774784.0, - 976081344.0, - 968537152.0, - 956775168.0, - 957879360.0, - 970892800.0, - 972498240.0, - 967353920.0, - 980255872.0, - 940492160.0, - 980501248.0, - 954292736.0, - 966397696.0, - 963227584.0, - 954642240.0, - 963893568.0, - 974775808.0, - 983215168.0, - 977195136.0, - 951423424.0, - 956400384.0, - 963034880.0, - 974961216.0, - 971533504.0, - 962922752.0, - 950258048.0, - 975637824.0, - 957381376.0, - 969819264.0, - 980625664.0, - 946940992.0, - 960805248.0, - 962985728.0, - 964744704.0, - 969727040.0, - 949888896.0, - 972552320.0, - 960921536.0, - 970367104.0, - 978911296.0, - 947163200.0, - 934725952.0, - 968431424.0, - 967967424.0, - 959508992.0, - 959371648.0, - 964364416.0, - 960264960.0, - 991169664.0, - 971080192.0, - 952040128.0, - 938593728.0, - 963616704.0, - 962915776.0, - 971360640.0, - 982134016.0, - 964101120.0, - 951304960.0, - 950762368.0, - 972821504.0, - 961204928.0, - 988112064.0, - 942493184.0, - 979211392.0, - 957885760.0, - 986932800.0, - 970256576.0, - 939706496.0, - 959901120.0, - 958390080.0, - 964506688.0, - 977971904.0, - 961017216.0, - 957600000.0, - 974556672.0, - 952241536.0, - 966172672.0, - 971275840.0, - 954361856.0, - 973363648.0, - 980531904.0, - 969458432.0, - 966105792.0, - 935105472.0, - 984801216.0, - 969606464.0, - 961833088.0, - 966787008.0, - 928877632.0, - 966106880.0, - 971920768.0, - 972951680.0, - 970035072.0, - 935989184.0, - 967732800.0, - 974979584.0, - 979531072.0, - 976741184.0, - 965059840.0, - 935687872.0, - 960361088.0, - 966069056.0, - 973935680.0, - 965925248.0, - 945275712.0, - 976974144.0, - 964358144.0, - 975030272.0, - 977392256.0, - 963665920.0, - 964441664.0, - 973377024.0, - 966946176.0, - 976391488.0, - 954897472.0, - 956866240.0, - 971802688.0, - 968992256.0, - 955209856.0, - 992473408.0, - 944883584.0, - 970791744.0, - 956528576.0, - 965104256.0, - 968989952.0, - 956756928.0, - 942567424.0, - 977942400.0, - 968843072.0, - 975863936.0, - 962286592.0, - 946168640.0, - 976761472.0, - 973547968.0, - 966428032.0, - 968753280.0, - 941782848.0, - 981568896.0, - 970650112.0, - 962541312.0, - 958340224.0, - 941857984.0, - 953693376.0, - 971379968.0, - 971345344.0, - 982117120.0, - 943655424.0, - 931906176.0, - 967732992.0, - 982273920.0, - 969781568.0, - 970274816.0, - 937666816.0, - 978240704.0, - 962954240.0, - 959127104.0, - 970177088.0, - 957474816.0, - 956872576.0, - 957039936.0, - 967306624.0, - 958317248.0, - 966183808.0, - 964544192.0, - 964224640.0, - 964107264.0, - 964336768.0, - 961554688.0, - 936262784.0, - 983615040.0, - 978130176.0, - 952057216.0, - 956989952.0, - 935590848.0, - 965808384.0, - 967137984.0, - 975077312.0, - 982873664.0, - 941896384.0, - 967565760.0, - 964772160.0, - 964473024.0, - 973621440.0, - 959005760.0, - 952679040.0, - 958533056.0, - 967776576.0, - 973948352.0, - 964684608.0, - 941531456.0, - 967012864.0, - 978867712.0, - 979581120.0, - 967211712.0, - 944920640.0, - 955282240.0, - 986541760.0, - 953864896.0, - 966455168.0, - 953371904.0, - 954932480.0, - 979053888.0, - 963094080.0, - 982322816.0, - 969551296.0, - 951063616.0, - 980398208.0, - 968261440.0, - 975850688.0, - 961723520.0, - 941625920.0, - 966718784.0, - 976810368.0, - 961055040.0, - 949607744.0, - 951856064.0, - 949875648.0, - 968905344.0, - 959880128.0, - 953734528.0, - 969506368.0, - 944838912.0, - 951733312.0, - 982731328.0, - 979609024.0, - 964157632.0, - 939245632.0, - 979957696.0, - 974389504.0, - 979366144.0, - 960522112.0, - 943308160.0, - 964122240.0, - 976184000.0, - 978814336.0, - 964108864.0, - 949786048.0, - 946045504.0, - 969010816.0, - 969111616.0, - 971748352.0, - 980925824.0, - 943806784.0, - 959547520.0, - 968200320.0, - 967044224.0, - 975558272.0, - 954056000.0, - 959260416.0, - 958877120.0, - 972952960.0, - 970241728.0, - 967678400.0, - 932853888.0, - 972036416.0, - 971098624.0, - 959330944.0, - 958193856.0, - 949360192.0, - 992170560.0, - 971812800.0, - 963243648.0, - 964975104.0, - 961604480.0, - 955190720.0, - 981019136.0, - 972152448.0, - 984506624.0, - 971607616.0, - 944088832.0, - 970249344.0, - 979284608.0, - 974471488.0, - 968311872.0, - 940442560.0, - 965120768.0, - 971700928.0, - 976549184.0, - 961977920.0, - 951775168.0, - 962387776.0, - 959103488.0, - 984542784.0, - 966540032.0, - 945429760.0, - 960994688.0, - 975391424.0, - 969736512.0, - 966153408.0, - 969823616.0, - 948580608.0, - 992408512.0, - 971539072.0, - 979703936.0, - 982296128.0, - 956905920.0, - 992412480.0, - 969729088.0, - 962617472.0, - 960805632.0, - 951380928.0, - 956359424.0, - 976190080.0, - 966485312.0, - 971786240.0, - 979065536.0, - 964077952.0, - 974641792.0, - 968888128.0, - 968237696.0, - 963236864.0, - 953285312.0, - 965282176.0, - 981066880.0, - 968741888.0, - 972894400.0, - 942543232.0, - 970599680.0, - 964458624.0, - 985496256.0, - 980776640.0, - 955896832.0, - 962174912.0, - 961911616.0, - 970182528.0, - 966946176.0, - 968785216.0, - 948882816.0, - 965135168.0, - 967639040.0, - 978747776.0, - 986414592.0, - 939405952.0, - 979846208.0, - 970650752.0, - 968850368.0, - 981602240.0, - 961640512.0, - 946454656.0, - 973582016.0, - 964789632.0, - 961473600.0, - 968343040.0, - 949969984.0, - 971928448.0, - 971314880.0, - 959104192.0, - 963365952.0, - 952575168.0, - 965523456.0, - 965695552.0, - 960338624.0, - 962264512.0, - 944583936.0, - 960016064.0, - 977076800.0, - 967023296.0, - 966516672.0, - 962657408.0, - 958591808.0, - 974540544.0, - 979476288.0, - 972672384.0, - 980966272.0, - 944032128.0, - 985064960.0, - 964441984.0, - 972774784.0, - 983797056.0, - 934553472.0, - 955930688.0, - 964994944.0, - 969340992.0, - 968677632.0, - 970268736.0, - 938323648.0, - 971731968.0, - 964198592.0, - 989328320.0, - 971778368.0, - 940107776.0, - 981913536.0, - 985520640.0, - 981864576.0, - 985729984.0, - 947180480.0, - 967223616.0, - 977136576.0, - 971312512.0, - 958620160.0, - 937219776.0, - 969906944.0, - 989536000.0, - 959000256.0, - 967185536.0, - 960442048.0, - 956690944.0, - 954768512.0, - 979761216.0, - 979127616.0, - 975673216.0, - 940251584.0, - 978542400.0, - 963822208.0, - 973824384.0, - 967194880.0, - 960176960.0, - 957544256.0, - 982852288.0, - 977266432.0, - 970722944.0, - 959931264.0, - 936303360.0, - 973506432.0, - 965473792.0, - 976978304.0, - 966051072.0, - 942156480.0, - 969740800.0, - 959723968.0, - 969843584.0, - 975390976.0, - 950899648.0, - 964472448.0, - 969880448.0, - 974273856.0, - 972444416.0, - 969481856.0, - 934030208.0, - 961135360.0, - 965749312.0, - 978463168.0, - 975870528.0, - 949311936.0, - 956377408.0, - 972544384.0, - 963309376.0, - 986862592.0, - 940750656.0, - 951446464.0, - 981448640.0, - 959576512.0, - 962948800.0, - 949163904.0, - 969286016.0, - 983874240.0, - 978678208.0, - 959104960.0, - 959925504.0, - 948311552.0, - 971295040.0, - 971687872.0, - 978941632.0, - 978186304.0, - 947857024.0, - 957856448.0, - 967444608.0, - 970969024.0, - 959270016.0, - 934897792.0, - 971406144.0, - 966228160.0, - 955068416.0, - 971829696.0, - 957611392.0, - 926622400.0, - 962200704.0, - 940676352.0, - 973363968.0, - 981754112.0, - 930013504.0, - 981521664.0, - 946296832.0, - 972613184.0, - 960888896.0, - 939908608.0, - 957204992.0, - 968745920.0, - 990500928.0, - 957769536.0, - 953502464.0, - 933075520.0, - 973375488.0, - 976109888.0, - 963894784.0, - 973716224.0, - 937010496.0, - 972700224.0, - 976633600.0, - 953237888.0, - 958961216.0, - 947493376.0, - 963495296.0, - 979683584.0, - 970673152.0, - 978585984.0, - 939018240.0, - 934343744.0, - 966421952.0, - 968439104.0, - 968599616.0, - 970528704.0, - 944416832.0, - 970915456.0, - 960240640.0, - 969255936.0, - 942678400.0, - 941709632.0, - 975365504.0, - 985196672.0, - 959022912.0, - 975623744.0, - 966364032.0, - 933010944.0, - 969849536.0, - 963707136.0, - 967841984.0, - 981740224.0, - 953160896.0, - 973561024.0, - 964611328.0, - 979193600.0, - 982682432.0, - 956872704.0, - 955965312.0, - 975309760.0, - 980521408.0, - 964034688.0, - 938742208.0, - 955473024.0, - 948317184.0, - 978696064.0, - 959955648.0, - 949428096.0, - 948040896.0, - 972056256.0, - 989929088.0, - 944271296.0, - 958224128.0, - 948599552.0, - 960164032.0, - 980739840.0, - 970414080.0, - 973131264.0, - 930045120.0, - 975961728.0, - 962432320.0, - 971418304.0, - 964445760.0, - 960394368.0, - 959721216.0, - 960424128.0, - 967191360.0, - 968555008.0, - 972729728.0, - 948144000.0, - 979811648.0, - 970586368.0, - 969357120.0, - 986145728.0, - 939891008.0, - 954734464.0, - 964696896.0, - 966558080.0, - 964827328.0, - 954691904.0, - 960255424.0, - 967464320.0, - 966189312.0, - 960184064.0, - 962468864.0, - 944331776.0, - 971257408.0, - 972093376.0, - 983859200.0, - 968536000.0, - 930046272.0, - 972099840.0, - 955244992.0, - 985020032.0, - 962813824.0, - 950037568.0, - 970445504.0, - 963828672.0, - 975663424.0, - 959841152.0, - 950675072.0, - 946751104.0, - 968549632.0, - 976556992.0, - 965479040.0, - 975826304.0, - 937158336.0, - 970600256.0, - 983125440.0, - 959760960.0, - 971615232.0, - 951297536.0, - 972050240.0, - 972057024.0, - 955729664.0, - 970269312.0, - 951941056.0, - 956084992.0, - 969265792.0, - 962445888.0, - 975414848.0, - 976239424.0, - 945387776.0, - 971847360.0, - 960410432.0, - 971381056.0, - 968721024.0, - 965416832.0, - 974304512.0, - 967333504.0, - 963842944.0, - 956424128.0, - 941470848.0, - 954526208.0, - 965194368.0, - 977462336.0, - 976274944.0, - 964775808.0, - 921698624.0, - 977154752.0, - 967402048.0, - 969439744.0, - 958520320.0, - 955023808.0, - 969488384.0, - 982145088.0, - 971976064.0, - 962958144.0, - 940867072.0, - 967438208.0, - 958047424.0, - 975604672.0, - 984077312.0, - 973499072.0, - 941667584.0, - 981995008.0, - 967646144.0, - 957658688.0, - 973554048.0, - 954348736.0, - 972350976.0, - 957113792.0, - 977383232.0, - 977440768.0, - 938197248.0, - 968629248.0, - 969027584.0, - 963562688.0, - 964570880.0, - 941094400.0, - 942868928.0, - 976054720.0, - 964561152.0, - 966265728.0, - 955035264.0, - 961535808.0, - 977594944.0, - 965112896.0, - 962419584.0, - 956202176.0, - 932968768.0, - 954615168.0, - 971031168.0, - 969518720.0, - 973900544.0, - 933633280.0, - 957480256.0, - 973260352.0 - ] - }, - "mem-allocated-bytes": { - "start_step": 0, - "end_step": 53183, - "step_interval": 5, - "values": [ - 12697244672.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697245696.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0, - 12697444352.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 538, - "step_interval": 5, - "values": [ - 3.57882, - 3.46053, - 3.44071, - 3.42167, - 3.41557, - 3.41167, - 3.40639, - 3.70469, - 3.40614, - 3.40461, - 3.40418, - 3.40457, - 3.4058, - 3.40552, - 3.40432, - 3.40132, - 3.39974, - 3.3997, - 3.39899, - 3.39892, - 3.40303, - 3.40219, - 3.4023, - 3.40694, - 3.40754, - 3.40621, - 3.40622, - 3.4068, - 3.40662, - 3.40558, - 3.40207, - 3.40601, - 3.40247, - 3.40246, - 3.40214, - 3.39978, - 3.40364, - 3.4028, - 3.41529, - 3.41488, - 3.41506, - 3.41612, - 3.4147, - 3.41362, - 3.41415, - 3.41328, - 3.40772, - 3.40883, - 3.40722, - 3.40638, - 3.40584, - 3.40696, - 3.40764, - 3.40703, - 3.40757, - 3.40934, - 3.40798, - 3.41966, - 3.40136, - 3.4013, - 3.40199, - 3.39865, - 3.39971, - 3.3997, - 3.39925, - 3.3985, - 3.3998, - 3.39822, - 3.39886, - 3.39721, - 7.76452, - 3.40286, - 3.3966, - 3.39748, - 3.39707, - 3.3953, - 3.39593, - 3.39593, - 3.39676, - 3.40901, - 3.40664, - 3.40628, - 3.40597, - 3.40474, - 3.40642, - 3.40886, - 3.47945, - 3.48178, - 3.48155, - 3.48108, - 3.48205, - 3.48135, - 3.48201, - 3.59385, - 3.48346, - 3.48397, - 3.48308, - 3.48148, - 3.48175, - 3.48116, - 3.48024, - 3.4036, - 3.40301, - 3.40493, - 3.40385, - 3.40345, - 3.40351, - 3.40362 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 12.98419, "5": 12.93856, "10": 12.06407, "15": 11.97888, "20": 10.53585, "25": 10.11953, "30": 9.72857, "35": 9.44172, "40": 9.23732, "45": 9.03755, "50": 8.85132, "55": 8.64055, "60": 8.63611, "65": 8.50639, "70": 8.42355, "75": 8.27794, "80": 8.16267, "85": 8.1237, "90": 7.97039, "95": 7.96196, "100": 7.84189, "105": 7.67937, "110": 7.56625, "115": 7.43953, "120": 7.47014, "125": 7.45871, "130": 7.3052, "135": 7.23768, "140": 7.22597, "145": 7.02852, "150": 7.19054, "155": 7.03556, "160": 6.91689, "165": 6.93288, "170": 6.82585, "175": 6.85521, "180": 6.80448, "185": 6.7433, "190": 6.70302, "195": 6.61405, "200": 6.65374, "205": 6.62876, "210": 6.52774, "215": 6.50252, "220": 6.50352, "225": 6.49067, "230": 6.47394, "235": 6.44832, "240": 6.37016, "245": 6.34927, "250": 6.31882, "255": 6.43959, "260": 6.32963, "265": 6.25986, "270": 6.22305, "275": 6.23477, "280": 6.18246, "285": 6.20551, "290": 6.1608, "295": 6.12841, "300": 6.10826, "305": 5.99979, "310": 6.08162, "315": 6.054, "320": 5.95168, "325": 5.92201, "330": 5.97286, "335": 6.02573, "340": 5.94258, "345": 5.90606, "350": 5.8842, "355": 5.85179, "360": 5.85533, "365": 5.84563, "370": 5.78632, "375": 5.82361, "380": 5.85025, "385": 5.78473, "390": 5.7766, "395": 5.67967, "400": 5.6472, "405": 5.64829, "410": 5.65495, "415": 5.70942, "420": 5.63962, "425": 5.65218, "430": 5.62076, "435": 5.58685, "440": 5.62115, "445": 5.50697, "450": 5.58389, "455": 5.51133, "460": 5.50823, "465": 5.57401, "470": 5.5815, "475": 5.5052, "480": 5.48183, "485": 5.49436, "490": 5.47916, "495": 5.4698, "500": 5.42985, "505": 5.3858, "510": 5.45905, "515": 5.4159, "520": 5.46309, "525": 5.30338, "530": 5.35748, "535": 5.33903, "540": 5.3171, "545": 5.37569, "550": 5.33942, "555": 5.21702, "560": 5.32415, "565": 5.31466, "570": 5.26404, "575": 5.30875, "580": 5.23889, "585": 5.19696, "590": 5.22853, "595": 5.2209, "600": 5.29182, "605": 5.19683, "610": 5.23658, "615": 5.17642, "620": 5.22538, "625": 5.22253, "630": 5.16876, "635": 5.14449, "640": 5.09279, "645": 5.1396, "650": 5.14203, "655": 5.1327, "660": 5.069, "665": 5.11523, "670": 5.07364, "675": 5.03109, "680": 5.02862, "685": 5.0379, "690": 5.02517, "695": 4.98288, "700": 4.97596, "705": 4.93259, "710": 4.98288, "715": 4.92544, "720": 4.88684, "725": 4.83237, "730": 4.85585, "735": 4.85771, "740": 4.86671, "745": 4.73527, "750": 4.73715, "755": 4.79771, "760": 4.81159, "765": 4.73746, "770": 4.6861, "775": 4.68238, "780": 4.70214, "785": 4.76512, "790": 4.67159, "795": 4.66419, "800": 4.60929, "805": 4.62921, "810": 4.65511, "815": 4.60814, "820": 4.62742, "825": 4.63166, "830": 4.56902, "835": 4.55875, "840": 4.49023, "845": 4.4834, "850": 4.45078, "855": 4.49927, "860": 4.42892, "865": 4.52525, "870": 4.47631, "875": 4.39075, "880": 4.41681, "885": 4.43645, "890": 4.48306, "895": 4.43651, "900": 4.40237, "905": 4.34175, "910": 4.36791, "915": 4.33894, "920": 4.37915, "925": 4.39728, "930": 4.31304, "935": 4.30474, "940": 4.34415, "945": 4.32028, "950": 4.35142, "955": 4.27226, "960": 4.20482, "965": 4.27733, "970": 4.27449, "975": 4.23635, "980": 4.22582, "985": 4.1643, "990": 4.15107, "995": 4.17374, "1000": 4.22372, "1005": 4.18549, "1010": 4.17291, "1015": 4.1279, "1020": 4.16006, "1025": 4.21349, "1030": 4.11468, "1035": 4.09825, "1040": 4.12099, "1045": 4.10025, "1050": 4.14186, "1055": 4.11333, "1060": 4.11123, "1065": 4.06606, "1070": 4.05536, "1075": 4.06145, "1080": 4.06278, "1085": 4.05379, "1090": 4.02624, "1095": 4.09639, "1100": 4.05199, "1105": 4.07302, "1110": 4.04754, "1115": 4.00737, "1120": 3.99935, "1125": 4.01414, "1130": 4.04951, "1135": 4.02532, "1140": 3.99464, "1145": 3.93733, "1150": 4.0371, "1155": 3.99851, "1160": 3.97508, "1165": 3.8668, "1170": 3.94637, "1175": 3.93388, "1180": 3.96628, "1185": 3.97143, "1190": 3.93572, "1195": 3.9376, "1200": 3.88869, "1205": 3.86639, "1210": 3.97744, "1215": 3.84185, "1220": 3.87311, "1225": 3.80935, "1230": 3.90177, "1235": 3.89744, "1240": 3.87807, "1245": 3.78759, "1250": 3.82383, "1255": 3.85646, "1260": 3.88683, "1265": 3.80232, "1270": 3.88677, "1275": 3.8363, "1280": 3.81943, "1285": 3.84572, "1290": 3.87706, "1295": 3.84061, "1300": 3.81858, "1305": 3.835, "1310": 3.81366, "1315": 3.81902, "1320": 3.82866, "1325": 3.72387, "1330": 3.78867, "1335": 3.75061, "1340": 3.74782, "1345": 3.76452, "1350": 3.7258, "1355": 3.78047, "1360": 3.72529, "1365": 3.72088, "1370": 3.75015, "1375": 3.75083, "1380": 3.74286, "1385": 3.74068, "1390": 3.66851, "1395": 3.74097, "1400": 3.73068, "1405": 3.66761, "1410": 3.67274, "1415": 3.65256, "1420": 3.69582, "1425": 3.71071, "1430": 3.66879, "1435": 3.65324, "1440": 3.64027, "1445": 3.68445, "1450": 3.68277, "1455": 3.6429, "1460": 3.64672, "1465": 3.68169, "1470": 3.62397, "1475": 3.69284, "1480": 3.66225, "1485": 3.66006, "1490": 3.62923, "1495": 3.61203, "1500": 3.64712, "1505": 3.68336, "1510": 3.56078, "1515": 3.60311, "1520": 3.62746, "1525": 3.5983, "1530": 3.61507, "1535": 3.59461, "1540": 3.60481, "1545": 3.60964, "1550": 3.56745, "1555": 3.56118, "1560": 3.6084, "1565": 3.61486, "1570": 3.57918, "1575": 3.55537, "1580": 3.59845, "1585": 3.57331, "1590": 3.4685, "1595": 3.51963, "1600": 3.50097, "1605": 3.55929, "1610": 3.57406, "1615": 3.4939, "1620": 3.52567, "1625": 3.46197, "1630": 3.48981, "1635": 3.53578, "1640": 3.53569, "1645": 3.53535, "1650": 3.49914, "1655": 3.47666, "1660": 3.54107, "1665": 3.46135, "1670": 3.51128, "1675": 3.48828, "1680": 3.46564, "1685": 3.48342, "1690": 3.48637, "1695": 3.49109, "1700": 3.47161, "1705": 3.40831, "1710": 3.49736, "1715": 3.483, "1720": 3.42824, "1725": 3.42617, "1730": 3.42006, "1735": 3.46659, "1740": 3.47356, "1745": 3.43893, "1750": 3.41016, "1755": 3.42439, "1760": 3.3874, "1765": 3.43063, "1770": 3.43956, "1775": 3.37548, "1780": 3.42421, "1785": 3.40796, "1790": 3.38509, "1795": 3.41119, "1800": 3.35023, "1805": 3.40505, "1810": 3.33757, "1815": 3.41574, "1820": 3.40843, "1825": 3.40426, "1830": 3.32002, "1835": 3.44265, "1840": 3.41218, "1845": 3.4254, "1850": 3.38946, "1855": 3.37336, "1860": 3.32964, "1865": 3.37423, "1870": 3.30055, "1875": 3.43683, "1880": 3.34138, "1885": 3.36386, "1890": 3.34167, "1895": 3.3913, "1900": 3.37861, "1905": 3.29999, "1910": 3.32579, "1915": 3.30584, "1920": 3.3585, "1925": 3.33771, "1930": 3.31443, "1935": 3.31851, "1940": 3.36038, "1945": 3.273, "1950": 3.41489, "1955": 3.28646, "1960": 3.3045, "1965": 3.27362, "1970": 3.30977, "1975": 3.35176, "1980": 3.33524, "1985": 3.23363, "1990": 3.29075, "1995": 3.273, "2000": 3.28919, "2005": 3.29275, "2010": 3.26651, "2015": 3.2371, "2020": 3.2595, "2025": 3.28011, "2030": 3.30953, "2035": 3.29527, "2040": 3.25203, "2045": 3.24543, "2050": 3.28488, "2055": 3.31675, "2060": 3.30355, "2065": 3.22736, "2070": 3.29022, "2075": 3.2411, "2080": 3.21718, "2085": 3.27426, "2090": 3.13947, "2095": 3.26813, "2100": 3.23611, "2105": 3.20012, "2110": 3.20151, "2115": 3.24697, "2120": 3.18551, "2125": 3.21274, "2130": 3.22018, "2135": 3.28124, "2140": 3.20596, "2145": 3.21157, "2150": 3.21779, "2155": 3.24967, "2160": 3.22156, "2165": 3.24188, "2170": 3.20294, "2175": 3.16531, "2180": 3.22092, "2185": 3.25748, "2190": 3.23037, "2195": 3.1558, "2200": 3.2021, "2205": 3.18481, "2210": 3.12243, "2215": 3.19293, "2220": 3.20346, "2225": 3.19439, "2230": 3.12798, "2235": 3.18314, "2240": 3.21747, "2245": 3.1703, "2250": 3.19647, "2255": 3.13469, "2260": 3.13639, "2265": 3.22942, "2270": 3.19498, "2275": 3.14951, "2280": 3.19802, "2285": 3.16901, "2290": 3.19549, "2295": 3.20592, "2300": 3.14223, "2305": 3.16312, "2310": 3.12235, "2315": 3.06132, "2320": 3.11093, "2325": 3.17328, "2330": 3.11801, "2335": 3.10969, "2340": 3.15639, "2345": 3.11936, "2350": 3.1257, "2355": 3.12881, "2360": 3.16094, "2365": 3.09985, "2370": 3.13248, "2375": 3.13487, "2380": 3.13392, "2385": 3.08581, "2390": 3.1003, "2395": 3.09615, "2400": 3.10213, "2405": 3.09983, "2410": 3.0924, "2415": 3.09139, "2420": 3.08584, "2425": 3.0834, "2430": 3.07832, "2435": 3.07401, "2440": 3.07886, "2445": 3.06504, "2450": 3.14332, "2455": 3.15721, "2460": 3.0898, "2465": 3.08953, "2470": 3.03773, "2475": 3.06013, "2480": 3.08458, "2485": 3.04992, "2490": 3.07188, "2495": 3.07063, "2500": 3.08502, "2505": 3.13574, "2510": 3.13824, "2515": 3.05633, "2520": 3.07382, "2525": 3.02855, "2530": 3.04672, "2535": 3.08793, "2540": 3.07355, "2545": 3.04682, "2550": 3.00237, "2555": 3.10682, "2560": 3.08752, "2565": 3.11187, "2570": 3.00475, "2575": 3.05831, "2580": 3.09156, "2585": 3.01892, "2590": 3.06307, "2595": 2.99978, "2600": 3.07142, "2605": 3.05079, "2610": 3.04784, "2615": 3.05592, "2620": 2.98258, "2625": 3.0058, "2630": 3.03419, "2635": 3.06039, "2640": 3.01595, "2645": 3.04799, "2650": 3.0189, "2655": 2.99768, "2660": 3.01597, "2665": 3.04174, "2670": 2.98634, "2675": 2.96639, "2680": 2.99241, "2685": 2.99774, "2690": 3.00515, "2695": 2.99249, "2700": 3.03516, "2705": 2.98228, "2710": 2.97803, "2715": 2.96615, "2720": 3.02726, "2725": 2.99649, "2730": 3.1557, "2735": 3.07017, "2740": 3.07245, "2745": 3.18562, "2750": 3.10679, "2755": 3.01816, "2760": 3.0161, "2765": 3.01682, "2770": 2.98123, "2775": 2.99323, "2780": 3.01519, "2785": 2.95101, "2790": 2.95977, "2795": 2.954, "2800": 2.95842, "2805": 2.93535, "2810": 2.98243, "2815": 2.96433, "2820": 2.99237, "2825": 3.02039, "2830": 3.10136, "2835": 2.98903, "2840": 2.95957, "2845": 3.01776, "2850": 3.00083, "2855": 2.9759, "2860": 2.9543, "2865": 2.91867, "2870": 2.98854, "2875": 2.92523, "2880": 2.95831, "2885": 2.92304, "2890": 2.98182, "2895": 2.93289, "2900": 2.96666, "2905": 3.01503, "2910": 2.92161, "2915": 2.94379, "2920": 2.95718, "2925": 2.94484, "2930": 2.95382, "2935": 2.94482, "2940": 2.945, "2945": 2.91905, "2950": 2.98289, "2955": 2.92398, "2960": 2.97676, "2965": 2.87409, "2970": 2.96461, "2975": 3.00225, "2980": 2.94932, "2985": 3.04082, "2990": 2.95239, "2995": 2.87687, "3000": 2.93735, "3005": 2.8988, "3010": 2.94014, "3015": 2.92084, "3020": 2.9178, "3025": 2.92668, "3030": 2.92389, "3035": 2.96118, "3040": 2.93897, "3045": 2.84358, "3050": 2.90264, "3055": 2.89986, "3060": 2.92858, "3065": 2.92788, "3070": 2.88443, "3075": 2.87427, "3080": 2.92817, "3085": 2.90375, "3090": 2.9239, "3095": 2.92752, "3100": 2.87323, "3105": 2.93435, "3110": 2.90813, "3115": 2.95166, "3120": 2.96012, "3125": 2.86065, "3130": 2.93565, "3135": 2.93199, "3140": 2.87581, "3145": 2.92429, "3150": 2.85637, "3155": 2.85418, "3160": 2.84218, "3165": 2.85043, "3170": 2.89735, "3175": 2.91401, "3180": 2.90638, "3185": 2.91874, "3190": 2.97698, "3195": 2.942, "3200": 2.93848, "3205": 2.86035, "3210": 2.87161, "3215": 2.91727, "3220": 2.87782, "3225": 2.88186, "3230": 2.8196, "3235": 2.87824, "3240": 2.87295, "3245": 2.90228, "3250": 2.86232, "3255": 2.8579, "3260": 2.87189, "3265": 2.87676, "3270": 2.8451, "3275": 2.87249, "3280": 2.80187, "3285": 2.81806, "3290": 2.87006, "3295": 2.90435, "3300": 2.87688, "3305": 2.86832, "3310": 2.86521, "3315": 2.81525, "3320": 2.82768, "3325": 2.83163, "3330": 2.83452, "3335": 2.85354, "3340": 2.82905, "3345": 2.84574, "3350": 2.90452, "3355": 2.88106, "3360": 2.80888, "3365": 2.86223, "3370": 2.85066, "3375": 2.84761, "3380": 2.85447, "3385": 2.88448, "3390": 2.87604, "3395": 2.8166, "3400": 2.79049, "3405": 2.83307, "3410": 2.84963, "3415": 2.86105, "3420": 2.82593, "3425": 2.81546, "3430": 2.83227, "3435": 2.89405, "3440": 2.82411, "3445": 2.87974, "3450": 2.82242, "3455": 2.79334, "3460": 2.82066, "3465": 2.85489, "3470": 2.84102, "3475": 2.77836, "3480": 2.84634, "3485": 2.82897, "3490": 2.89763, "3495": 2.86066, "3500": 2.84036, "3505": 2.82889, "3510": 2.82526, "3515": 2.83852, "3520": 2.7826, "3525": 2.8107, "3530": 2.85775, "3535": 2.78738, "3540": 2.84281, "3545": 2.81379, "3550": 2.8004, "3555": 2.83453, "3560": 2.83227, "3565": 2.8353, "3570": 2.80774, "3575": 2.80908, "3580": 2.82289, "3585": 2.84004, "3590": 2.83499, "3595": 2.78266, "3600": 2.75596, "3605": 2.79188, "3610": 2.85568, "3615": 2.75392, "3620": 2.80332, "3625": 2.88963, "3630": 2.78825, "3635": 2.79472, "3640": 2.79042, "3645": 2.7721, "3650": 2.81021, "3655": 2.8205, "3660": 2.77152, "3665": 2.78577, "3670": 2.77623, "3675": 2.78014, "3680": 2.81232, "3685": 2.8096, "3690": 2.80979, "3695": 2.8137, "3700": 2.79417, "3705": 2.79087, "3710": 2.76207, "3715": 2.81091, "3720": 2.79916, "3725": 2.79892, "3730": 2.84254, "3735": 2.7991, "3740": 2.75813, "3745": 2.79579, "3750": 2.80713, "3755": 2.79977, "3760": 2.76084, "3765": 2.76215, "3770": 2.76817, "3775": 2.77744, "3780": 2.7657, "3785": 2.78455, "3790": 2.74533, "3795": 2.79869, "3800": 2.80647, "3805": 2.75476, "3810": 2.80519, "3815": 2.76623, "3820": 2.789, "3825": 2.73814, "3830": 2.7503, "3835": 2.82441, "3840": 2.73152, "3845": 2.71923, "3850": 2.77827, "3855": 2.7218, "3860": 2.80588, "3865": 2.75798, "3870": 2.78232, "3875": 2.75857, "3880": 2.79235, "3885": 2.78695, "3890": 2.74809, "3895": 2.80046, "3900": 2.76731, "3905": 2.73059, "3910": 2.74932, "3915": 2.75745, "3920": 2.80221, "3925": 2.78154, "3930": 2.71145, "3935": 2.74448, "3940": 2.75438, "3945": 2.74928, "3950": 2.73113, "3955": 2.78718, "3960": 2.76678, "3965": 2.74075, "3970": 2.76185, "3975": 2.72973, "3980": 2.74114, "3985": 2.74764, "3990": 2.69118, "3995": 2.78316, "4000": 2.74187, "4005": 2.7721, "4010": 2.71388, "4015": 2.7254, "4020": 2.75385, "4025": 2.73593, "4030": 2.66418, "4035": 2.7001, "4040": 2.75491, "4045": 2.7528, "4050": 2.79665, "4055": 2.72816, "4060": 2.71888, "4065": 2.65622, "4070": 2.80994, "4075": 2.76135, "4080": 2.7187, "4085": 2.7507, "4090": 2.68375, "4095": 2.69178, "4100": 2.71212, "4105": 2.74157, "4110": 2.73134, "4115": 2.70885, "4120": 2.72986, "4125": 2.70715, "4130": 2.70197, "4135": 2.69096, "4140": 2.6847, "4145": 2.78467, "4150": 2.71364, "4155": 2.74561, "4160": 2.76188, "4165": 2.72216, "4170": 2.68098, "4175": 2.72401, "4180": 2.72802, "4185": 2.73281, "4190": 2.74343, "4195": 2.70207, "4200": 2.71606, "4205": 2.75884, "4210": 2.67952, "4215": 2.66838, "4220": 2.66722, "4225": 2.71062, "4230": 2.71734, "4235": 2.73462, "4240": 2.71132, "4245": 2.70062, "4250": 2.71591, "4255": 2.65584, "4260": 2.72698, "4265": 2.74152, "4270": 2.7261, "4275": 2.69626, "4280": 2.70777, "4285": 2.73709, "4290": 2.69242, "4295": 2.69709, "4300": 2.70305, "4305": 2.70401, "4310": 2.7353, "4315": 2.71552, "4320": 2.70246, "4325": 2.70726, "4330": 2.70989, "4335": 2.69493, "4340": 2.70795, "4345": 2.72946, "4350": 2.67736, "4355": 2.69654, "4360": 2.7133, "4365": 2.78795, "4370": 2.73502, "4375": 2.74353, "4380": 2.70241, "4385": 2.70239, "4390": 2.70636, "4395": 2.75623, "4400": 2.66706, "4405": 2.67178, "4410": 2.68975, "4415": 2.70998, "4420": 2.7065, "4425": 2.7244, "4430": 2.69639, "4435": 2.68419, "4440": 2.6982, "4445": 2.68105, "4450": 2.65509, "4455": 2.67228, "4460": 2.68984, "4465": 2.70369, "4470": 2.66982, "4475": 2.68732, "4480": 2.658, "4485": 2.70326, "4490": 2.65692, "4495": 2.71238, "4500": 2.70556, "4505": 2.69756, "4510": 2.65047, "4515": 2.70288, "4520": 2.6699, "4525": 2.67024, "4530": 2.67571, "4535": 2.67406, "4540": 2.71091, "4545": 2.65762, "4550": 2.7016, "4555": 2.68455, "4560": 2.66113, "4565": 2.64248, "4570": 2.64227, "4575": 2.66956, "4580": 2.69137, "4585": 2.68625, "4590": 2.62069, "4595": 2.66604, "4600": 2.67826, "4605": 2.68224, "4610": 2.65633, "4615": 2.66317, "4620": 2.65718, "4625": 2.68673, "4630": 2.67386, "4635": 2.64587, "4640": 2.69645, "4645": 2.64639, "4650": 2.70385, "4655": 2.70976, "4660": 2.67963, "4665": 2.69261, "4670": 2.68026, "4675": 2.6938, "4680": 2.6683, "4685": 2.66281, "4690": 2.70436, "4695": 2.65718, "4700": 2.67345, "4705": 2.65036, "4710": 2.67696, "4715": 2.64908, "4720": 2.72426, "4725": 2.63008, "4730": 2.6537, "4735": 2.68583, "4740": 2.64437, "4745": 2.65588, "4750": 2.64213, "4755": 2.65554, "4760": 2.66695, "4765": 2.64472, "4770": 2.62643, "4775": 2.65695, "4780": 2.65937, "4785": 2.69166, "4790": 2.65142, "4795": 2.67951, "4800": 2.63063, "4805": 2.64441, "4810": 2.66475, "4815": 2.64716, "4820": 2.67105, "4825": 2.65172, "4830": 2.61569, "4835": 2.6511, "4840": 2.65627, "4845": 2.63208, "4850": 2.62977, "4855": 2.60411, "4860": 2.65114, "4865": 2.62723, "4870": 2.63932, "4875": 2.62078, "4880": 2.62831, "4885": 2.62715, "4890": 2.68087, "4895": 2.66154, "4900": 2.61673, "4905": 2.62088, "4910": 2.63826, "4915": 2.61735, "4920": 2.65306, "4925": 2.6497, "4930": 2.57101, "4935": 2.65008, "4940": 2.63507, "4945": 2.63901, "4950": 2.62607, "4955": 2.61805, "4960": 2.62076, "4965": 2.65903, "4970": 2.60357, "4975": 2.65598, "4980": 2.62096, "4985": 2.63187, "4990": 2.65844, "4995": 2.58198, "5000": 2.66346, "5005": 2.66348, "5010": 2.68614, "5015": 2.63637, "5020": 2.64499, "5025": 2.68893, "5030": 2.6484, "5035": 2.63213, "5040": 2.62962, "5045": 2.60742, "5050": 2.62732, "5055": 2.65148, "5060": 2.64612, "5065": 2.69002, "5070": 2.60995, "5075": 2.61256, "5080": 2.60874, "5085": 2.60348, "5090": 2.59199, "5095": 2.65074, "5100": 2.64799, "5105": 2.61145, "5110": 2.66419, "5115": 2.6186, "5120": 2.66944, "5125": 2.63071, "5130": 2.61584, "5135": 2.61395, "5140": 2.57642, "5145": 2.63368, "5150": 2.63634, "5155": 2.61743, "5160": 2.66262, "5165": 2.58154, "5170": 2.59124, "5175": 2.6172, "5180": 2.60766, "5185": 2.62098, "5190": 2.62395, "5195": 2.67166, "5200": 2.59926, "5205": 2.60484, "5210": 2.60785, "5215": 2.64677, "5220": 2.5877, "5225": 2.55112, "5230": 2.63323, "5235": 2.61515, "5240": 2.64199, "5245": 2.63218, "5250": 2.60038, "5255": 2.62106, "5260": 2.55854, "5265": 2.59596, "5270": 2.58848, "5275": 2.61795, "5280": 2.6107, "5285": 2.60588, "5290": 2.6338, "5295": 2.62279, "5300": 2.57991, "5305": 2.59812, "5310": 2.61223, "5315": 2.58855, "5320": 2.61644, "5325": 2.64645, "5330": 2.60471, "5335": 2.58385, "5340": 2.56594, "5345": 2.65822, "5350": 2.62172, "5355": 2.58212, "5360": 2.59777, "5365": 2.61989, "5370": 2.61333, "5375": 2.62839, "5380": 2.58095, "5385": 2.56523, "5390": 2.58674, "5395": 2.62025, "5400": 2.60807, "5405": 2.5488, "5410": 2.61261, "5415": 2.59641, "5420": 2.61289, "5425": 2.62537, "5430": 2.62734, "5435": 2.57664, "5440": 2.5862, "5445": 2.63072, "5450": 2.64857, "5455": 2.61159, "5460": 2.59157, "5465": 2.60515, "5470": 2.6011, "5475": 2.62617, "5480": 2.59073, "5485": 2.59332, "5490": 2.58005, "5495": 2.57236, "5500": 2.57155, "5505": 2.61863, "5510": 2.627, "5515": 2.58335, "5520": 2.55848, "5525": 2.58612, "5530": 2.66396, "5535": 2.62153, "5540": 2.57018, "5545": 2.5966, "5550": 2.54918, "5555": 2.57253, "5560": 2.56413, "5565": 2.6073, "5570": 2.65535, "5575": 2.63128, "5580": 2.57399, "5585": 2.59568, "5590": 2.56212, "5595": 2.58353, "5600": 2.55263, "5605": 2.59812, "5610": 2.58251, "5615": 2.58157, "5620": 2.58026, "5625": 2.55098, "5630": 2.57032, "5635": 2.63171, "5640": 2.59389, "5645": 2.57166, "5650": 2.57754, "5655": 2.54718, "5660": 2.56055, "5665": 2.58622, "5670": 2.56682, "5675": 2.60832, "5680": 2.52901, "5685": 2.56862, "5690": 2.60273, "5695": 2.56079, "5700": 2.59915, "5705": 2.5964, "5710": 2.57839, "5715": 2.58675, "5720": 2.53755, "5725": 2.60243, "5730": 2.5726, "5735": 2.61053, "5740": 2.59556, "5745": 2.55977, "5750": 2.54167, "5755": 2.56089, "5760": 2.61503, "5765": 2.55935, "5770": 2.54197, "5775": 2.58502, "5780": 2.57862, "5785": 2.54458, "5790": 2.56421, "5795": 2.60217, "5800": 2.54399, "5805": 2.53543, "5810": 2.55738, "5815": 2.52444, "5820": 2.59644, "5825": 2.50598, "5830": 2.49753, "5835": 2.59812, "5840": 2.53814, "5845": 2.55328, "5850": 2.61267, "5855": 2.50809, "5860": 2.55925, "5865": 2.51828, "5870": 2.57487, "5875": 2.60908, "5880": 2.58702, "5885": 2.56659, "5890": 2.58317, "5895": 2.55562, "5900": 2.61248, "5905": 2.55702, "5910": 2.59933, "5915": 2.61203, "5920": 2.58792, "5925": 2.52878, "5930": 2.57537, "5935": 2.55128, "5940": 2.57133, "5945": 2.5203, "5950": 2.55581, "5955": 2.58709, "5960": 2.56857, "5965": 2.62016, "5970": 2.5536, "5975": 2.58172, "5980": 2.55904, "5985": 2.56056, "5990": 2.55861, "5995": 2.5593, "6000": 2.55707, "6005": 2.52175, "6010": 2.56256, "6015": 2.52587, "6020": 2.53485, "6025": 2.55688, "6030": 2.60411, "6035": 2.54202, "6040": 2.55035, "6045": 2.48988, "6050": 2.59653, "6055": 2.52247, "6060": 2.55023, "6065": 2.52597, "6070": 2.52862, "6075": 2.53428, "6080": 2.53469, "6085": 2.59513, "6090": 2.56765, "6095": 2.53369, "6100": 2.54464, "6105": 2.52066, "6110": 2.555, "6115": 2.58465, "6120": 2.55452, "6125": 2.53734, "6130": 2.47377, "6135": 2.55579, "6140": 2.55381, "6145": 2.5551, "6150": 2.5244, "6155": 2.50851, "6160": 2.5395, "6165": 2.57285, "6170": 2.54504, "6175": 2.59966, "6180": 2.51042, "6185": 2.5498, "6190": 2.49112, "6195": 2.57736, "6200": 2.55201, "6205": 2.53767, "6210": 2.51991, "6215": 2.51418, "6220": 2.56482, "6225": 2.51309, "6230": 2.51298, "6235": 2.56523, "6240": 2.55143, "6245": 2.52589, "6250": 2.53511, "6255": 2.57273, "6260": 2.522, "6265": 2.57259, "6270": 2.52342, "6275": 2.56323, "6280": 2.52282, "6285": 2.51853, "6290": 2.51831, "6295": 2.50373, "6300": 2.55304, "6305": 2.52226, "6310": 2.51111, "6315": 2.53768, "6320": 2.488, "6325": 2.59596, "6330": 2.5558, "6335": 2.51165, "6340": 2.5104, "6345": 2.5551, "6350": 2.55472, "6355": 2.52298, "6360": 2.52017, "6365": 2.48306, "6370": 2.53556, "6375": 2.49329, "6380": 2.55818, "6385": 2.57564, "6390": 2.50181, "6395": 2.5512, "6400": 2.50888, "6405": 2.52669, "6410": 2.5154, "6415": 2.52425, "6420": 2.54062, "6425": 2.53395, "6430": 2.57629, "6435": 2.543, "6440": 2.53665, "6445": 2.52678, "6450": 2.53226, "6455": 2.51994, "6460": 2.51598, "6465": 2.56057, "6470": 2.51651, "6475": 2.52349, "6480": 2.48601, "6485": 2.52705, "6490": 2.50802, "6495": 2.49753, "6500": 2.52472, "6505": 2.49355, "6510": 2.54327, "6515": 2.51057, "6520": 2.50867, "6525": 2.49327, "6530": 2.54194, "6535": 2.53162, "6540": 2.53012, "6545": 2.56085, "6550": 2.49887, "6555": 2.55479, "6560": 2.5112, "6565": 2.52041, "6570": 2.5839, "6575": 2.52087, "6580": 2.49697, "6585": 2.50629, "6590": 2.51237, "6595": 2.54022, "6600": 2.49807, "6605": 2.54105, "6610": 2.47715, "6615": 2.56557, "6620": 2.53161, "6625": 2.50848, "6630": 2.50865, "6635": 2.46944, "6640": 2.53819, "6645": 2.59464, "6650": 2.50754, "6655": 2.49548, "6660": 2.57388, "6665": 2.52206, "6670": 2.56868, "6675": 2.46616, "6680": 2.54606, "6685": 2.53475, "6690": 2.51446, "6695": 2.48593, "6700": 2.52181, "6705": 2.51828, "6710": 2.4932, "6715": 2.51442, "6720": 2.50949, "6725": 2.5188, "6730": 2.51941, "6735": 2.48408, "6740": 2.51247, "6745": 2.49444, "6750": 2.55718, "6755": 2.47484, "6760": 2.54103, "6765": 2.48715, "6770": 2.51838, "6775": 2.50886, "6780": 2.53938, "6785": 2.472, "6790": 2.54488, "6795": 2.49981, "6800": 2.52791, "6805": 2.50978, "6810": 2.50277, "6815": 2.52164, "6820": 2.48777, "6825": 2.50719, "6830": 2.5405, "6835": 2.50984, "6840": 2.50928, "6845": 2.52524, "6850": 2.47228, "6855": 2.5117, "6860": 2.50466, "6865": 2.49008, "6870": 2.55301, "6875": 2.47207, "6880": 2.55159, "6885": 2.47768, "6890": 2.54617, "6895": 2.501, "6900": 2.48567, "6905": 2.49838, "6910": 2.51834, "6915": 2.51661, "6920": 2.53089, "6925": 2.54325, "6930": 2.49049, "6935": 2.52109, "6940": 2.49944, "6945": 2.46052, "6950": 2.48402, "6955": 2.529, "6960": 2.5208, "6965": 2.49256, "6970": 2.46997, "6975": 2.52281, "6980": 2.45208, "6985": 2.51741, "6990": 2.53017, "6995": 2.46345, "7000": 2.48853, "7005": 2.46858, "7010": 2.47514, "7015": 2.52143, "7020": 2.46682, "7025": 2.45423, "7030": 2.48695, "7035": 2.48041, "7040": 2.50706, "7045": 2.5203, "7050": 2.52859, "7055": 2.44251, "7060": 2.47078, "7065": 2.4908, "7070": 2.48908, "7075": 2.49272, "7080": 2.53634, "7085": 2.4856, "7090": 2.47714, "7095": 2.5013, "7100": 2.51602, "7105": 2.48811, "7110": 2.48788, "7115": 2.50501, "7120": 2.47197, "7125": 2.46297, "7130": 2.48514, "7135": 2.51263, "7140": 2.49911, "7145": 2.50737, "7150": 2.51319, "7155": 2.50487, "7160": 2.47487, "7165": 2.45877, "7170": 2.5068, "7175": 2.50948, "7180": 2.50977, "7185": 2.48205, "7190": 2.46386, "7195": 2.46486, "7200": 2.5103, "7205": 2.48888, "7210": 2.44196, "7215": 2.4776, "7220": 2.44201, "7225": 2.51293, "7230": 2.50847, "7235": 2.48147, "7240": 2.47822, "7245": 2.49848, "7250": 2.50692, "7255": 2.49302, "7260": 2.45801, "7265": 2.45076, "7270": 2.47377, "7275": 2.51022, "7280": 2.49411, "7285": 2.43384, "7290": 2.48205, "7295": 2.4883, "7300": 2.41726, "7305": 2.44572, "7310": 2.44746, "7315": 2.48856, "7320": 2.48456, "7325": 2.45951, "7330": 2.48788, "7335": 2.47264, "7340": 2.46361, "7345": 2.49379, "7350": 2.51188, "7355": 2.49719, "7360": 2.47921, "7365": 2.47002, "7370": 2.47636, "7375": 2.45043, "7380": 2.49448, "7385": 2.48338, "7390": 2.4714, "7395": 2.47708, "7400": 2.48189, "7405": 2.43904, "7410": 2.48078, "7415": 2.46934, "7420": 2.49312, "7425": 2.45741, "7430": 2.52217, "7435": 2.49114, "7440": 2.52001, "7445": 2.50908, "7450": 2.47191, "7455": 2.45726, "7460": 2.46327, "7465": 2.51216, "7470": 2.46282, "7475": 2.46216, "7480": 2.51233, "7485": 2.45002, "7490": 2.47264, "7495": 2.47781, "7500": 2.49215, "7505": 2.43742, "7510": 2.43408, "7515": 2.41878, "7520": 2.49157, "7525": 2.49674, "7530": 2.47366, "7535": 2.461, "7540": 2.47251, "7545": 2.47477, "7550": 2.48874, "7555": 2.45467, "7560": 2.42854, "7565": 2.5089, "7570": 2.4855, "7575": 2.43789, "7580": 2.45628, "7585": 2.48046, "7590": 2.4811, "7595": 2.46436, "7600": 2.46119, "7605": 2.44883, "7610": 2.44836, "7615": 2.42589, "7620": 2.54467, "7625": 2.48679, "7630": 2.42558, "7635": 2.42779, "7640": 2.45567, "7645": 2.47442, "7650": 2.46326, "7655": 2.48475, "7660": 2.45112, "7665": 2.43099, "7670": 2.44148, "7675": 2.45381, "7680": 2.48534, "7685": 2.43155, "7690": 2.4798, "7695": 2.45362, "7700": 2.48073, "7705": 2.53277, "7710": 2.4947, "7715": 2.44257, "7720": 2.47023, "7725": 2.48024, "7730": 2.45757, "7735": 2.47364, "7740": 2.43789, "7745": 2.45069, "7750": 2.43908, "7755": 2.46809, "7760": 2.44938, "7765": 2.45398, "7770": 2.46977, "7775": 2.4516, "7780": 2.41585, "7785": 2.44424, "7790": 2.48174, "7795": 2.4399, "7800": 2.46276, "7805": 2.48028, "7810": 2.50232, "7815": 2.48649, "7820": 2.44632, "7825": 2.51331, "7830": 2.45198, "7835": 2.46772, "7840": 2.47924, "7845": 2.46174, "7850": 2.41598, "7855": 2.47149, "7860": 2.50108, "7865": 2.42365, "7870": 2.4672, "7875": 2.44726, "7880": 2.45445, "7885": 2.46386, "7890": 2.47119, "7895": 2.44565, "7900": 2.43915, "7905": 2.43623, "7910": 2.42684, "7915": 2.48212, "7920": 2.47656, "7925": 2.42247, "7930": 2.47218, "7935": 2.45116, "7940": 2.4212, "7945": 2.46954, "7950": 2.44465, "7955": 2.41909, "7960": 2.48952, "7965": 2.51748, "7970": 2.52221, "7975": 2.44872, "7980": 2.44206, "7985": 2.46907, "7990": 2.43174, "7995": 2.47023, "8000": 2.43705, "8005": 2.4185, "8010": 2.4569, "8015": 2.46952, "8020": 2.48206, "8025": 2.47408, "8030": 2.4539, "8035": 2.47445, "8040": 2.42394, "8045": 2.45395, "8050": 2.44834, "8055": 2.42642, "8060": 2.44206, "8065": 2.46098, "8070": 2.45543, "8075": 2.45796, "8080": 2.44468, "8085": 2.44098, "8090": 2.42427, "8095": 2.4239, "8100": 2.43791, "8105": 2.49488, "8110": 2.43737, "8115": 2.44396, "8120": 2.46736, "8125": 2.4683, "8130": 2.45407, "8135": 2.4542, "8140": 2.44154, "8145": 2.42637, "8150": 2.42361, "8155": 2.48675, "8160": 2.45458, "8165": 2.4439, "8170": 2.43621, "8175": 2.42222, "8180": 2.49616, "8185": 2.42608, "8190": 2.46972, "8195": 2.45859, "8200": 2.44728, "8205": 2.44741, "8210": 2.43318, "8215": 2.44258, "8220": 2.43579, "8225": 2.41052, "8230": 2.44061, "8235": 2.46347, "8240": 2.42659, "8245": 2.44777, "8250": 2.44381, "8255": 2.43926, "8260": 2.4344, "8265": 2.42818, "8270": 2.43351, "8275": 2.44399, "8280": 2.39769, "8285": 2.43949, "8290": 2.48018, "8295": 2.44648, "8300": 2.45692, "8305": 2.40909, "8310": 2.43483, "8315": 2.45647, "8320": 2.39934, "8325": 2.39287, "8330": 2.43614, "8335": 2.44456, "8340": 2.48993, "8345": 2.44823, "8350": 2.44936, "8355": 2.40574, "8360": 2.40074, "8365": 2.45376, "8370": 2.45123, "8375": 2.42492, "8380": 2.41836, "8385": 2.42335, "8390": 2.43323, "8395": 2.43933, "8400": 2.43792, "8405": 2.48867, "8410": 2.43787, "8415": 2.43378, "8420": 2.41573, "8425": 2.43863, "8430": 2.46001, "8435": 2.40407, "8440": 2.44993, "8445": 2.45847, "8450": 2.40583, "8455": 2.45827, "8460": 2.45425, "8465": 2.43504, "8470": 2.41136, "8475": 2.47834, "8480": 2.40462, "8485": 2.41501, "8490": 2.46588, "8495": 2.43642, "8500": 2.44544, "8505": 2.40237, "8510": 2.40361, "8515": 2.42828, "8520": 2.42495, "8525": 2.49418, "8530": 2.37629, "8535": 2.40121, "8540": 2.48734, "8545": 2.38038, "8550": 2.43845, "8555": 2.4517, "8560": 2.4699, "8565": 2.41947, "8570": 2.43187, "8575": 2.44657, "8580": 2.44123, "8585": 2.41938, "8590": 2.40222, "8595": 2.42545, "8600": 2.41268, "8605": 2.49022, "8610": 2.42048, "8615": 2.38719, "8620": 2.4488, "8625": 2.42704, "8630": 2.45788, "8635": 2.44896, "8640": 2.43458, "8645": 2.47298, "8650": 2.41989, "8655": 2.45365, "8660": 2.4551, "8665": 2.38841, "8670": 2.40977, "8675": 2.42921, "8680": 2.44837, "8685": 2.43066, "8690": 2.4104, "8695": 2.44185, "8700": 2.43418, "8705": 2.42102, "8710": 2.42816, "8715": 2.4481, "8720": 2.47833, "8725": 2.41271, "8730": 2.39075, "8735": 2.43393, "8740": 2.4301, "8745": 2.39789, "8750": 2.43808, "8755": 2.42409, "8760": 2.3998, "8765": 2.4348, "8770": 2.40504, "8775": 2.43412, "8780": 2.41964, "8785": 2.47073, "8790": 2.42032, "8795": 2.4182, "8800": 2.41686, "8805": 2.4091, "8810": 2.41202, "8815": 2.4744, "8820": 2.45341, "8825": 2.42216, "8830": 2.38629, "8835": 2.42227, "8840": 2.3949, "8845": 2.42597, "8850": 2.43345, "8855": 2.4033, "8860": 2.42782, "8865": 2.42795, "8870": 2.43672, "8875": 2.43901, "8880": 2.41077, "8885": 2.3959, "8890": 2.44701, "8895": 2.4326, "8900": 2.41483, "8905": 2.40245, "8910": 2.40167, "8915": 2.41886, "8920": 2.43415, "8925": 2.46731, "8930": 2.41425, "8935": 2.40864, "8940": 2.38945, "8945": 2.39272, "8950": 2.41816, "8955": 2.39451, "8960": 2.43208, "8965": 2.41808, "8970": 2.40419, "8975": 2.47542, "8980": 2.44037, "8985": 2.37254, "8990": 2.40797, "8995": 2.4161, "9000": 2.4555, "9005": 2.41324, "9010": 2.37544, "9015": 2.40916, "9020": 2.39928, "9025": 2.36893, "9030": 2.39834, "9035": 2.42514, "9040": 2.42034, "9045": 2.41952, "9050": 2.39531, "9055": 2.41875, "9060": 2.41904, "9065": 2.40517, "9070": 2.4455, "9075": 2.39346, "9080": 2.43404, "9085": 2.41116, "9090": 2.4104, "9095": 2.39527, "9100": 2.40085, "9105": 2.35791, "9110": 2.46814, "9115": 2.41736, "9120": 2.40424, "9125": 2.4578, "9130": 2.39449, "9135": 2.44911, "9140": 2.43566, "9145": 2.43022, "9150": 2.48053, "9155": 2.39956, "9160": 2.42973, "9165": 2.43203, "9170": 2.37597, "9175": 2.41757, "9180": 2.37497, "9185": 2.43604, "9190": 2.40956, "9195": 2.38516, "9200": 2.38833, "9205": 2.44666, "9210": 2.36002, "9215": 2.46161, "9220": 2.44621, "9225": 2.38175, "9230": 2.44658, "9235": 2.39635, "9240": 2.40173, "9245": 2.4385, "9250": 2.42944, "9255": 2.4297, "9260": 2.38568, "9265": 2.43804, "9270": 2.43503, "9275": 2.39494, "9280": 2.38995, "9285": 2.42145, "9290": 2.40455, "9295": 2.38452, "9300": 2.42348, "9305": 2.40443, "9310": 2.41578, "9315": 2.41045, "9320": 2.44383, "9325": 2.37083, "9330": 2.40343, "9335": 2.36111, "9340": 2.40886, "9345": 2.41537, "9350": 2.43849, "9355": 2.47706, "9360": 2.43722, "9365": 2.38781, "9370": 2.43626, "9375": 2.43463, "9380": 2.35431, "9385": 2.40143, "9390": 2.3807, "9395": 2.3874, "9400": 2.44311, "9405": 2.41326, "9410": 2.39779, "9415": 2.4384, "9420": 2.44513, "9425": 2.43208, "9430": 2.44734, "9435": 2.41476, "9440": 2.47766, "9445": 2.37664, "9450": 2.39589, "9455": 2.40416, "9460": 2.38793, "9465": 2.37903, "9470": 2.38143, "9475": 2.36649, "9480": 2.4344, "9485": 2.38476, "9490": 2.42088, "9495": 2.38202, "9500": 2.36308, "9505": 2.43007, "9510": 2.3996, "9515": 2.43126, "9520": 2.42001, "9525": 2.38902, "9530": 2.45338, "9535": 2.40084, "9540": 2.4181, "9545": 2.37636, "9550": 2.42268, "9555": 2.38875, "9560": 2.42246, "9565": 2.40696, "9570": 2.37248, "9575": 2.41147, "9580": 2.3964, "9585": 2.42269, "9590": 2.42928, "9595": 2.44764, "9600": 2.38972, "9605": 2.38337, "9610": 2.42218, "9615": 2.41398, "9620": 2.4144, "9625": 2.44582, "9630": 2.39876, "9635": 2.40281, "9640": 2.4479, "9645": 2.40925, "9650": 2.39995, "9655": 2.37399, "9660": 2.42343, "9665": 2.39007, "9670": 2.38361, "9675": 2.35764, "9680": 2.39641, "9685": 2.39661, "9690": 2.462, "9695": 2.38067, "9700": 2.3763, "9705": 2.38298, "9710": 2.36606, "9715": 2.38746, "9720": 2.43554, "9725": 2.44202, "9730": 2.42766, "9735": 2.38651, "9740": 2.38103, "9745": 2.42624, "9750": 2.39899, "9755": 2.40719, "9760": 2.41077, "9765": 2.36751, "9770": 2.45914, "9775": 2.40187, "9780": 2.3622, "9785": 2.39932, "9790": 2.40727, "9795": 2.35981, "9800": 2.39686, "9805": 2.40559, "9810": 2.40829, "9815": 2.37755, "9820": 2.37567, "9825": 2.40269, "9830": 2.41889, "9835": 2.38588, "9840": 2.41283, "9845": 2.36274, "9850": 2.39852, "9855": 2.39475, "9860": 2.38881, "9865": 2.37977, "9870": 2.38436, "9875": 2.38116, "9880": 2.45097, "9885": 2.39336, "9890": 2.35309, "9895": 2.3193, "9900": 2.39562, "9905": 2.42489, "9910": 2.35553, "9915": 2.36392, "9920": 2.41132, "9925": 2.39906, "9930": 2.38236, "9935": 2.34957, "9940": 2.38655, "9945": 2.37886, "9950": 2.4032, "9955": 2.44724, "9960": 2.42583, "9965": 2.35575, "9970": 2.40803, "9975": 2.38587, "9980": 2.32984, "9985": 2.40585, "9990": 2.39817, "9995": 2.39539, "10000": 2.36618, "10005": 2.37288, "10010": 2.38173, "10015": 2.44428, "10020": 2.36327, "10025": 2.38855, "10030": 2.38821, "10035": 2.40833, "10040": 2.40302, "10045": 2.38264, "10050": 2.34846, "10055": 2.3694, "10060": 2.41922, "10065": 2.37434, "10070": 2.42192, "10075": 2.37205, "10080": 2.3617, "10085": 2.37145, "10090": 2.34717, "10095": 2.40241, "10100": 2.31411, "10105": 2.38114, "10110": 2.4103, "10115": 2.38677, "10120": 2.35757, "10125": 2.37079, "10130": 2.35967, "10135": 2.38387, "10140": 2.41274, "10145": 2.40819, "10150": 2.37717, "10155": 2.39562, "10160": 2.36174, "10165": 2.38422, "10170": 2.42365, "10175": 2.32535, "10180": 2.39445, "10185": 2.3837, "10190": 2.44464, "10195": 2.40211, "10200": 2.39042, "10205": 2.38827, "10210": 2.36975, "10215": 2.34269, "10220": 2.41897, "10225": 2.42899, "10230": 2.35431, "10235": 2.38611, "10240": 2.37312, "10245": 2.3915, "10250": 2.38932, "10255": 2.4127, "10260": 2.33445, "10265": 2.34791, "10270": 2.34999, "10275": 2.37074, "10280": 2.44889, "10285": 2.35828, "10290": 2.38525, "10295": 2.37374, "10300": 2.36779, "10305": 2.41399, "10310": 2.38956, "10315": 2.36053, "10320": 2.36688, "10325": 2.36029, "10330": 2.41255, "10335": 2.36126, "10340": 2.42017, "10345": 2.37035, "10350": 2.3579, "10355": 2.39731, "10360": 2.37274, "10365": 2.36164, "10370": 2.3406, "10375": 2.35618, "10380": 2.41837, "10385": 2.40452, "10390": 2.38041, "10395": 2.35802, "10400": 2.3776, "10405": 2.35, "10410": 2.34043, "10415": 2.41691, "10420": 2.37895, "10425": 2.32466, "10430": 2.35918, "10435": 2.36973, "10440": 2.37125, "10445": 2.36101, "10450": 2.35971, "10455": 2.37979, "10460": 2.37985, "10465": 2.30211, "10470": 2.35671, "10475": 2.37984, "10480": 2.36267, "10485": 2.36033, "10490": 2.41398, "10495": 2.36709, "10500": 2.3638, "10505": 2.37147, "10510": 2.38241, "10515": 2.37443, "10520": 2.40214, "10525": 2.38842, "10530": 2.3924, "10535": 2.35504, "10540": 2.40521, "10545": 2.35751, "10550": 2.3778, "10555": 2.35868, "10560": 2.34116, "10565": 2.37323, "10570": 2.37569, "10575": 2.35289, "10580": 2.37776, "10585": 2.36834, "10590": 2.37741, "10595": 2.37573, "10600": 2.33007, "10605": 2.37332, "10610": 2.36447, "10615": 2.36356, "10620": 2.34745, "10625": 2.41894, "10630": 2.3699, "10635": 2.32165, "10640": 2.3626, "10645": 2.42148, "10650": 2.36015, "10655": 2.30794, "10660": 2.34737, "10665": 2.39952, "10670": 2.31543, "10675": 2.41693, "10680": 2.35574, "10685": 2.28794, "10690": 2.38521, "10695": 2.33121, "10700": 2.38382, "10705": 2.38452, "10710": 2.34225, "10715": 2.38258, "10720": 2.32508, "10725": 2.35264, "10730": 2.34782, "10735": 2.35467, "10740": 2.31892, "10745": 2.33791, "10750": 2.33464, "10755": 2.40442, "10760": 2.36503, "10765": 2.33589, "10770": 2.36791, "10775": 2.38653, "10780": 2.37104, "10785": 2.39368, "10790": 2.34645, "10795": 2.38549, "10800": 2.32241, "10805": 2.3949, "10810": 2.37387, "10815": 2.35282, "10820": 2.34102, "10825": 2.37072, "10830": 2.33689, "10835": 2.34766, "10840": 2.32982, "10845": 2.38524, "10850": 2.33179, "10855": 2.36397, "10860": 2.33285, "10865": 2.32107, "10870": 2.32406, "10875": 2.30448, "10880": 2.39387, "10885": 2.40308, "10890": 2.36095, "10895": 2.3717, "10900": 2.33301, "10905": 2.31196, "10910": 2.40569, "10915": 2.37152, "10920": 2.37446, "10925": 2.36441, "10930": 2.31796, "10935": 2.36133, "10940": 2.35281, "10945": 2.34712, "10950": 2.36205, "10955": 2.36266, "10960": 2.30883, "10965": 2.36213, "10970": 2.35561, "10975": 2.40853, "10980": 2.37288, "10985": 2.34161, "10990": 2.3968, "10995": 2.36399, "11000": 2.33852, "11005": 2.36198, "11010": 2.34423, "11015": 2.32484, "11020": 2.33432, "11025": 2.36546, "11030": 2.33976, "11035": 2.31307, "11040": 2.3184, "11045": 2.31741, "11050": 2.31843, "11055": 2.28965, "11060": 2.34009, "11065": 2.30929, "11070": 2.39347, "11075": 2.31745, "11080": 2.35377, "11085": 2.33591, "11090": 2.34666, "11095": 2.37045, "11100": 2.32797, "11105": 2.31528, "11110": 2.36211, "11115": 2.37247, "11120": 2.38143, "11125": 2.31443, "11130": 2.34936, "11135": 2.33315, "11140": 2.37157, "11145": 2.34943, "11150": 2.39519, "11155": 2.34092, "11160": 2.36524, "11165": 2.36448, "11170": 2.34077, "11175": 2.33426, "11180": 2.37359, "11185": 2.31207, "11190": 2.27711, "11195": 2.32888, "11200": 2.34586, "11205": 2.36063, "11210": 2.3318, "11215": 2.31964, "11220": 2.34302, "11225": 2.37103, "11230": 2.36492, "11235": 2.31915, "11240": 2.34072, "11245": 2.35957, "11250": 2.3319, "11255": 2.33556, "11260": 2.3562, "11265": 2.38816, "11270": 2.2878, "11275": 2.31349, "11280": 2.36829, "11285": 2.28982, "11290": 2.34635, "11295": 2.36405, "11300": 2.38149, "11305": 2.33435, "11310": 2.33024, "11315": 2.29923, "11320": 2.30443, "11325": 2.31556, "11330": 2.35307, "11335": 2.33861, "11340": 2.30846, "11345": 2.31353, "11350": 2.29566, "11355": 2.32083, "11360": 2.35146, "11365": 2.29441, "11370": 2.35297, "11375": 2.32767, "11380": 2.34018, "11385": 2.34667, "11390": 2.33407, "11395": 2.28717, "11400": 2.30826, "11405": 2.3541, "11410": 2.35607, "11415": 2.38586, "11420": 2.35185, "11425": 2.30789, "11430": 2.36756, "11435": 2.36125, "11440": 2.34786, "11445": 2.36249, "11450": 2.32214, "11455": 2.30432, "11460": 2.35128, "11465": 2.34236, "11470": 2.37517, "11475": 2.31364, "11480": 2.32562, "11485": 2.31039, "11490": 2.34544, "11495": 2.40571, "11500": 2.33947, "11505": 2.34913, "11510": 2.36287, "11515": 2.3212, "11520": 2.30485, "11525": 2.36056, "11530": 2.31541, "11535": 2.32215, "11540": 2.34605, "11545": 2.34271, "11550": 2.36568, "11555": 2.32517, "11560": 2.34936, "11565": 2.34077, "11570": 2.34932, "11575": 2.29629, "11580": 2.32931, "11585": 2.35075, "11590": 2.362, "11595": 2.33497, "11600": 2.35549, "11605": 2.32194, "11610": 2.36096, "11615": 2.36015, "11620": 2.29582, "11625": 2.27681, "11630": 2.32794, "11635": 2.34127, "11640": 2.30457, "11645": 2.3071, "11650": 2.32661, "11655": 2.35084, "11660": 2.33485, "11665": 2.32981, "11670": 2.29971, "11675": 2.29722, "11680": 2.32502, "11685": 2.33562, "11690": 2.34413, "11695": 2.31711, "11700": 2.32385, "11705": 2.3013, "11710": 2.34517, "11715": 2.31441, "11720": 2.29988, "11725": 2.33875, "11730": 2.30426, "11735": 2.32811, "11740": 2.27243, "11745": 2.31843, "11750": 2.32735, "11755": 2.35129, "11760": 2.31243, "11765": 2.33749, "11770": 2.27449, "11775": 2.3257, "11780": 2.25419, "11785": 2.29672, "11790": 2.3124, "11795": 2.31962, "11800": 2.33483, "11805": 2.30304, "11810": 2.30413, "11815": 2.33105, "11820": 2.31994, "11825": 2.35972, "11830": 2.31645, "11835": 2.33765, "11840": 2.33977, "11845": 2.31776, "11850": 2.30349, "11855": 2.31356, "11860": 2.34195, "11865": 2.35769, "11870": 2.37973, "11875": 2.28063, "11880": 2.29228, "11885": 2.33746, "11890": 2.29104, "11895": 2.29211, "11900": 2.33338, "11905": 2.31777, "11910": 2.27725, "11915": 2.307, "11920": 2.33335, "11925": 2.30224, "11930": 2.30553, "11935": 2.31524, "11940": 2.31688, "11945": 2.34076, "11950": 2.29786, "11955": 2.31358, "11960": 2.33641, "11965": 2.29565, "11970": 2.28182, "11975": 2.33547, "11980": 2.30591, "11985": 2.27764, "11990": 2.30327, "11995": 2.33003, "12000": 2.32329, "12005": 2.32525, "12010": 2.28749, "12015": 2.31093, "12020": 2.32738, "12025": 2.33409, "12030": 2.31175, "12035": 2.33567, "12040": 2.31535, "12045": 2.311, "12050": 2.30972, "12055": 2.33276, "12060": 2.29739, "12065": 2.32964, "12070": 2.30207, "12075": 2.27677, "12080": 2.3503, "12085": 2.33818, "12090": 2.33365, "12095": 2.28167, "12100": 2.31607, "12105": 2.30898, "12110": 2.32936, "12115": 2.3051, "12120": 2.30535, "12125": 2.29316, "12130": 2.30575, "12135": 2.32814, "12140": 2.29362, "12145": 2.25537, "12150": 2.25836, "12155": 2.34003, "12160": 2.35558, "12165": 2.31729, "12170": 2.32946, "12175": 2.33906, "12180": 2.32978, "12185": 2.33966, "12190": 2.33326, "12195": 2.29669, "12200": 2.29924, "12205": 2.32072, "12210": 2.35547, "12215": 2.3035, "12220": 2.29738, "12225": 2.24206, "12230": 2.33233, "12235": 2.33684, "12240": 2.32312, "12245": 2.28649, "12250": 2.27303, "12255": 2.33374, "12260": 2.3125, "12265": 2.34015, "12270": 2.3112, "12275": 2.3141, "12280": 2.31768, "12285": 2.28583, "12290": 2.31022, "12295": 2.26557, "12300": 2.32764, "12305": 2.26705, "12310": 2.28732, "12315": 2.35371, "12320": 2.2953, "12325": 2.31997, "12330": 2.30031, "12335": 2.31895, "12340": 2.33904, "12345": 2.36762, "12350": 2.34275, "12355": 2.30489, "12360": 2.31493, "12365": 2.32912, "12370": 2.291, "12375": 2.29867, "12380": 2.29168, "12385": 2.29001, "12390": 2.24825, "12395": 2.30495, "12400": 2.29858, "12405": 2.31002, "12410": 2.3044, "12415": 2.28227, "12420": 2.31635, "12425": 2.30022, "12430": 2.31452, "12435": 2.29895, "12440": 2.3311, "12445": 2.31911, "12450": 2.30548, "12455": 2.23997, "12460": 2.3353, "12465": 2.36311, "12470": 2.27473, "12475": 2.2722, "12480": 2.29061, "12485": 2.3044, "12490": 2.32973, "12495": 2.26708, "12500": 2.31933, "12505": 2.33451, "12510": 2.3549, "12515": 2.26994, "12520": 2.32027, "12525": 2.28571, "12530": 2.3195, "12535": 2.27086, "12540": 2.28465, "12545": 2.29026, "12550": 2.31531, "12555": 2.32206, "12560": 2.30039, "12565": 2.33538, "12570": 2.27727, "12575": 2.30024, "12580": 2.31034, "12585": 2.2913, "12590": 2.33377, "12595": 2.3245, "12600": 2.28124, "12605": 2.3192, "12610": 2.36317, "12615": 2.30549, "12620": 2.33118, "12625": 2.32956, "12630": 2.29643, "12635": 2.33456, "12640": 2.29492, "12645": 2.27967, "12650": 2.32514, "12655": 2.26525, "12660": 2.34146, "12665": 2.31721, "12670": 2.3095, "12675": 2.31842, "12680": 2.27477, "12685": 2.36543, "12690": 2.30209, "12695": 2.33102, "12700": 2.29281, "12705": 2.30537, "12710": 2.30877, "12715": 2.28741, "12720": 2.31256, "12725": 2.27592, "12730": 2.33802, "12735": 2.29691, "12740": 2.33722, "12745": 2.28763, "12750": 2.27307, "12755": 2.28154, "12760": 2.26603, "12765": 2.33762, "12770": 2.32565, "12775": 2.26349, "12780": 2.31934, "12785": 2.30015, "12790": 2.30581, "12795": 2.32179, "12800": 2.29746, "12805": 2.31545, "12810": 2.27709, "12815": 2.29831, "12820": 2.32369, "12825": 2.32282, "12830": 2.29007, "12835": 2.26772, "12840": 2.27034, "12845": 2.31313, "12850": 2.27646, "12855": 2.27135, "12860": 2.2711, "12865": 2.31532, "12870": 2.26508, "12875": 2.33919, "12880": 2.31847, "12885": 2.28195, "12890": 2.30779, "12895": 2.24485, "12900": 2.32588, "12905": 2.31598, "12910": 2.28815, "12915": 2.28607, "12920": 2.30007, "12925": 2.30106, "12930": 2.2734, "12935": 2.24112, "12940": 2.2586, "12945": 2.31028, "12950": 2.28471, "12955": 2.32799, "12960": 2.31743, "12965": 2.2891, "12970": 2.2722, "12975": 2.26724, "12980": 2.33275, "12985": 2.27824, "12990": 2.28047, "12995": 2.27328, "13000": 2.25161, "13005": 2.34134, "13010": 2.31941, "13015": 2.27379, "13020": 2.278, "13025": 2.30143, "13030": 2.27707, "13035": 2.28433, "13040": 2.31914, "13045": 2.27659, "13050": 2.28272, "13055": 2.29019, "13060": 2.29962, "13065": 2.29996, "13070": 2.32479, "13075": 2.2974, "13080": 2.27877, "13085": 2.27834, "13090": 2.29428, "13095": 2.30593, "13100": 2.30184, "13105": 2.31135, "13110": 2.33953, "13115": 2.22678, "13120": 2.30668, "13125": 2.24082, "13130": 2.27051, "13135": 2.31478, "13140": 2.30401, "13145": 2.26316, "13150": 2.28387, "13155": 2.25895, "13160": 2.24659, "13165": 2.25712, "13170": 2.31148, "13175": 2.21367, "13180": 2.28321, "13185": 2.26488, "13190": 2.26945, "13195": 2.26141, "13200": 2.3179, "13205": 2.309, "13210": 2.27742, "13215": 2.30301, "13220": 2.28325, "13225": 2.29617, "13230": 2.25262, "13235": 2.26874, "13240": 2.27095, "13245": 2.30893, "13250": 2.27123, "13255": 2.29399, "13260": 2.29153, "13265": 2.27741, "13270": 2.27633, "13275": 2.27156, "13280": 2.26737, "13285": 2.28168, "13290": 2.30604, "13295": 2.30977, "13300": 2.24271, "13305": 2.26894, "13310": 2.26102, "13315": 2.22229, "13320": 2.25247, "13325": 2.30878, "13330": 2.27168, "13335": 2.30424, "13340": 2.28097, "13345": 2.29077, "13350": 2.25369, "13355": 2.27975, "13360": 2.22882, "13365": 2.25941, "13370": 2.32174, "13375": 2.31329, "13380": 2.29222, "13385": 2.29252, "13390": 2.31835, "13395": 2.27207, "13400": 2.27184, "13405": 2.32122, "13410": 2.26802, "13415": 2.26493, "13420": 2.29336, "13425": 2.25048, "13430": 2.28585, "13435": 2.30154, "13440": 2.32283, "13445": 2.27142, "13450": 2.2949, "13455": 2.30116, "13460": 2.29588, "13465": 2.28977, "13470": 2.28252, "13475": 2.28442, "13480": 2.27311, "13485": 2.28592, "13490": 2.25947, "13495": 2.24684, "13500": 2.23176, "13505": 2.286, "13510": 2.26311, "13515": 2.24889, "13520": 2.31326, "13525": 2.26237, "13530": 2.29902, "13535": 2.31138, "13540": 2.26962, "13545": 2.25494, "13550": 2.23909, "13555": 2.29693, "13560": 2.29296, "13565": 2.30222, "13570": 2.23661, "13575": 2.23045, "13580": 2.28157, "13585": 2.30548, "13590": 2.32873, "13595": 2.27367, "13600": 2.19852, "13605": 2.28908, "13610": 2.22143, "13615": 2.31705, "13620": 2.29283, "13625": 2.26405, "13630": 2.27247, "13635": 2.22796, "13640": 2.24569, "13645": 2.27137, "13650": 2.30207, "13655": 2.27222, "13660": 2.24397, "13665": 2.25135, "13670": 2.25066, "13675": 2.2795, "13680": 2.23164, "13685": 2.30015, "13690": 2.263, "13695": 2.27733, "13700": 2.27297, "13705": 2.26413, "13710": 2.24749, "13715": 2.26877, "13720": 2.27833, "13725": 2.29671, "13730": 2.32373, "13735": 2.34461, "13740": 2.27396, "13745": 2.27066, "13750": 2.32654, "13755": 2.26566, "13760": 2.27202, "13765": 2.28009, "13770": 2.29428, "13775": 2.34702, "13780": 2.21399, "13785": 2.22244, "13790": 2.28987, "13795": 2.2678, "13800": 2.30161, "13805": 2.27397, "13810": 2.25324, "13815": 2.24715, "13820": 2.26753, "13825": 2.24871, "13830": 2.28586, "13835": 2.28708, "13840": 2.20494, "13845": 2.26623, "13850": 2.2741, "13855": 2.30765, "13860": 2.28199, "13865": 2.26124, "13870": 2.21894, "13875": 2.25519, "13880": 2.24896, "13885": 2.26031, "13890": 2.22856, "13895": 2.29874, "13900": 2.2271, "13905": 2.27081, "13910": 2.22766, "13915": 2.27599, "13920": 2.25844, "13925": 2.29885, "13930": 2.2347, "13935": 2.28497, "13940": 2.31597, "13945": 2.27505, "13950": 2.23547, "13955": 2.29681, "13960": 2.24009, "13965": 2.24159, "13970": 2.25183, "13975": 2.27174, "13980": 2.27964, "13985": 2.2845, "13990": 2.2952, "13995": 2.26439, "14000": 2.23067, "14005": 2.25705, "14010": 2.2831, "14015": 2.30329, "14020": 2.22301, "14025": 2.23729, "14030": 2.27918, "14035": 2.25807, "14040": 2.26794, "14045": 2.2421, "14050": 2.2466, "14055": 2.26048, "14060": 2.21555, "14065": 2.3154, "14070": 2.25099, "14075": 2.24706, "14080": 2.31945, "14085": 2.2796, "14090": 2.25629, "14095": 2.31402, "14100": 2.26547, "14105": 2.27183, "14110": 2.24525, "14115": 2.25277, "14120": 2.30176, "14125": 2.20707, "14130": 2.22433, "14135": 2.22723, "14140": 2.25621, "14145": 2.25819, "14150": 2.30353, "14155": 2.2426, "14160": 2.26048, "14165": 2.20818, "14170": 2.34739, "14175": 2.29828, "14180": 2.2285, "14185": 2.24406, "14190": 2.25237, "14195": 2.25692, "14200": 2.30262, "14205": 2.26141, "14210": 2.24704, "14215": 2.22083, "14220": 2.23604, "14225": 2.2809, "14230": 2.21527, "14235": 2.23686, "14240": 2.28301, "14245": 2.28014, "14250": 2.25412, "14255": 2.29256, "14260": 2.25096, "14265": 2.22856, "14270": 2.19706, "14275": 2.24572, "14280": 2.23912, "14285": 2.28371, "14290": 2.22828, "14295": 2.26356, "14300": 2.28211, "14305": 2.28233, "14310": 2.22137, "14315": 2.26463, "14320": 2.26212, "14325": 2.2908, "14330": 2.29192, "14335": 2.31109, "14340": 2.3013, "14345": 2.25506, "14350": 2.27361, "14355": 2.28979, "14360": 2.27712, "14365": 2.28039, "14370": 2.27155, "14375": 2.27079, "14380": 2.28127, "14385": 2.22103, "14390": 2.26647, "14395": 2.30047, "14400": 2.25897, "14405": 2.23723, "14410": 2.20951, "14415": 2.22234, "14420": 2.27251, "14425": 2.26997, "14430": 2.25904, "14435": 2.26619, "14440": 2.22155, "14445": 2.24171, "14450": 2.2541, "14455": 2.29241, "14460": 2.26703, "14465": 2.28625, "14470": 2.24318, "14475": 2.24285, "14480": 2.23389, "14485": 2.25815, "14490": 2.28947, "14495": 2.26555, "14500": 2.25154, "14505": 2.2828, "14510": 2.19781, "14515": 2.2746, "14520": 2.24191, "14525": 2.24755, "14530": 2.26066, "14535": 2.30043, "14540": 2.23375, "14545": 2.28005, "14550": 2.25571, "14555": 2.25661, "14560": 2.26161, "14565": 2.2714, "14570": 2.26885, "14575": 2.30167, "14580": 2.27867, "14585": 2.22438, "14590": 2.2331, "14595": 2.27016, "14600": 2.26315, "14605": 2.23641, "14610": 2.30983, "14615": 2.2661, "14620": 2.2989, "14625": 2.24743, "14630": 2.2647, "14635": 2.25619, "14640": 2.2609, "14645": 2.28082, "14650": 2.30966, "14655": 2.26783, "14660": 2.22843, "14665": 2.23044, "14670": 2.25996, "14675": 2.23219, "14680": 2.25266, "14685": 2.25615, "14690": 2.26885, "14695": 2.273, "14700": 2.26008, "14705": 2.24419, "14710": 2.22667, "14715": 2.26038, "14720": 2.24018, "14725": 2.22072, "14730": 2.2686, "14735": 2.24281, "14740": 2.25009, "14745": 2.20681, "14750": 2.23877, "14755": 2.32055, "14760": 2.22457, "14765": 2.25065, "14770": 2.24086, "14775": 2.2145, "14780": 2.21653, "14785": 2.26435, "14790": 2.27299, "14795": 2.23922, "14800": 2.28132, "14805": 2.2703, "14810": 2.277, "14815": 2.25949, "14820": 2.26024, "14825": 2.26521, "14830": 2.21293, "14835": 2.25174, "14840": 2.24268, "14845": 2.22512, "14850": 2.30825, "14855": 2.27955, "14860": 2.23685, "14865": 2.24023, "14870": 2.26787, "14875": 2.24209, "14880": 2.23372, "14885": 2.27888, "14890": 2.27049, "14895": 2.25464, "14900": 2.27517, "14905": 2.21792, "14910": 2.29258, "14915": 2.27042, "14920": 2.27142, "14925": 2.26137, "14930": 2.25661, "14935": 2.21069, "14940": 2.29061, "14945": 2.26525, "14950": 2.22938, "14955": 2.23041, "14960": 2.25913, "14965": 2.25231, "14970": 2.25351, "14975": 2.25021, "14980": 2.21251, "14985": 2.19543, "14990": 2.25193, "14995": 2.22868, "15000": 2.17977, "15005": 2.28988, "15010": 2.2263, "15015": 2.23866, "15020": 2.25927, "15025": 2.20465, "15030": 2.24969, "15035": 2.2294, "15040": 2.25592, "15045": 2.25309, "15050": 2.23502, "15055": 2.20113, "15060": 2.2426, "15065": 2.23169, "15070": 2.24738, "15075": 2.22658, "15080": 2.21879, "15085": 2.21201, "15090": 2.2637, "15095": 2.27222, "15100": 2.25559, "15105": 2.24115, "15110": 2.2294, "15115": 2.27283, "15120": 2.27579, "15125": 2.20695, "15130": 2.25348, "15135": 2.25106, "15140": 2.29619, "15145": 2.24014, "15150": 2.24642, "15155": 2.24057, "15160": 2.24666, "15165": 2.23374, "15170": 2.23241, "15175": 2.25486, "15180": 2.28059, "15185": 2.24519, "15190": 2.2445, "15195": 2.23902, "15200": 2.23049, "15205": 2.26964, "15210": 2.23568, "15215": 2.27511, "15220": 2.23997, "15225": 2.28266, "15230": 2.25762, "15235": 2.24458, "15240": 2.2207, "15245": 2.23317, "15250": 2.24448, "15255": 2.24122, "15260": 2.26386, "15265": 2.24813, "15270": 2.25642, "15275": 2.26275, "15280": 2.22676, "15285": 2.25657, "15290": 2.24688, "15295": 2.2559, "15300": 2.27123, "15305": 2.27252, "15310": 2.3105, "15315": 2.22187, "15320": 2.24516, "15325": 2.2509, "15330": 2.27687, "15335": 2.21641, "15340": 2.22104, "15345": 2.23885, "15350": 2.22289, "15355": 2.24141, "15360": 2.24335, "15365": 2.22094, "15370": 2.26742, "15375": 2.21861, "15380": 2.20891, "15385": 2.2061, "15390": 2.28183, "15395": 2.24503, "15400": 2.28091, "15405": 2.22907, "15410": 2.22878, "15415": 2.28197, "15420": 2.24617, "15425": 2.23746, "15430": 2.26137, "15435": 2.26632, "15440": 2.26075, "15445": 2.24664, "15450": 2.25997, "15455": 2.27046, "15460": 2.21454, "15465": 2.24372, "15470": 2.24965, "15475": 2.21759, "15480": 2.22405, "15485": 2.20312, "15490": 2.28102, "15495": 2.2421, "15500": 2.20396, "15505": 2.20726, "15510": 2.20819, "15515": 2.23877, "15520": 2.20466, "15525": 2.26779, "15530": 2.24921, "15535": 2.23536, "15540": 2.25159, "15545": 2.23653, "15550": 2.23253, "15555": 2.24051, "15560": 2.27492, "15565": 2.21496, "15570": 2.20726, "15575": 2.26435, "15580": 2.26531, "15585": 2.22791, "15590": 2.26591, "15595": 2.18891, "15600": 2.30193, "15605": 2.24878, "15610": 2.20736, "15615": 2.23167, "15620": 2.23327, "15625": 2.19672, "15630": 2.1943, "15635": 2.20467, "15640": 2.23222, "15645": 2.25391, "15650": 2.20702, "15655": 2.21312, "15660": 2.21716, "15665": 2.24114, "15670": 2.21358, "15675": 2.23025, "15680": 2.21369, "15685": 2.26312, "15690": 2.20486, "15695": 2.19672, "15700": 2.24469, "15705": 2.19429, "15710": 2.19666, "15715": 2.24965, "15720": 2.24365, "15725": 2.26443, "15730": 2.23697, "15735": 2.28952, "15740": 2.19175, "15745": 2.23533, "15750": 2.22425, "15755": 2.26002, "15760": 2.26293, "15765": 2.25339, "15770": 2.25575, "15775": 2.21611, "15780": 2.28037, "15785": 2.19663, "15790": 2.24342, "15795": 2.24181, "15800": 2.22055, "15805": 2.23641, "15810": 2.16185, "15815": 2.27231, "15820": 2.22533, "15825": 2.20262, "15830": 2.2042, "15835": 2.2072, "15840": 2.25298, "15845": 2.22359, "15850": 2.21866, "15855": 2.23734, "15860": 2.22935, "15865": 2.24302, "15870": 2.23509, "15875": 2.26453, "15880": 2.24443, "15885": 2.20471, "15890": 2.21579, "15895": 2.27924, "15900": 2.19698, "15905": 2.29148, "15910": 2.25224, "15915": 2.1962, "15920": 2.2656, "15925": 2.22161, "15930": 2.23362, "15935": 2.23203, "15940": 2.19204, "15945": 2.24016, "15950": 2.22655, "15955": 2.22054, "15960": 2.23323, "15965": 2.22276, "15970": 2.22851, "15975": 2.19944, "15980": 2.2511, "15985": 2.2176, "15990": 2.23201, "15995": 2.23884, "16000": 2.20434, "16005": 2.21057, "16010": 2.18305, "16015": 2.21192, "16020": 2.21541, "16025": 2.24033, "16030": 2.24525, "16035": 2.17242, "16040": 2.27383, "16045": 2.20978, "16050": 2.24201, "16055": 2.22347, "16060": 2.19631, "16065": 2.23404, "16070": 2.24319, "16075": 2.18459, "16080": 2.27573, "16085": 2.22857, "16090": 2.2158, "16095": 2.23134, "16100": 2.22049, "16105": 2.26988, "16110": 2.26421, "16115": 2.19765, "16120": 2.19646, "16125": 2.23463, "16130": 2.2113, "16135": 2.2507, "16140": 2.1872, "16145": 2.23676, "16150": 2.20931, "16155": 2.24544, "16160": 2.27864, "16165": 2.20702, "16170": 2.20036, "16175": 2.17364, "16180": 2.24238, "16185": 2.23131, "16190": 2.23186, "16195": 2.25269, "16200": 2.18756, "16205": 2.23956, "16210": 2.24208, "16215": 2.22705, "16220": 2.2445, "16225": 2.24644, "16230": 2.22745, "16235": 2.21172, "16240": 2.26562, "16245": 2.21675, "16250": 2.20704, "16255": 2.21538, "16260": 2.22449, "16265": 2.24353, "16270": 2.24164, "16275": 2.23281, "16280": 2.16963, "16285": 2.23757, "16290": 2.24092, "16295": 2.22678, "16300": 2.26761, "16305": 2.20965, "16310": 2.19952, "16315": 2.20648, "16320": 2.2957, "16325": 2.24925, "16330": 2.18888, "16335": 2.19019, "16340": 2.18239, "16345": 2.21649, "16350": 2.26061, "16355": 2.22504, "16360": 2.22334, "16365": 2.22078, "16370": 2.23979, "16375": 2.23915, "16380": 2.21966, "16385": 2.20811, "16390": 2.20911, "16395": 2.2271, "16400": 2.20099, "16405": 2.21655, "16410": 2.24889, "16415": 2.21637, "16420": 2.23056, "16425": 2.20812, "16430": 2.2769, "16435": 2.25091, "16440": 2.24396, "16445": 2.20858, "16450": 2.2084, "16455": 2.25965, "16460": 2.24494, "16465": 2.24198, "16470": 2.18277, "16475": 2.22092, "16480": 2.15779, "16485": 2.25506, "16490": 2.20356, "16495": 2.22225, "16500": 2.23111, "16505": 2.20607, "16510": 2.24196, "16515": 2.26393, "16520": 2.22827, "16525": 2.172, "16530": 2.2621, "16535": 2.18329, "16540": 2.25431, "16545": 2.20124, "16550": 2.19573, "16555": 2.22409, "16560": 2.24819, "16565": 2.24108, "16570": 2.23197, "16575": 2.19632, "16580": 2.18857, "16585": 2.21233, "16590": 2.23028, "16595": 2.18295, "16600": 2.19351, "16605": 2.21518, "16610": 2.22952, "16615": 2.20828, "16620": 2.21205, "16625": 2.20824, "16630": 2.2387, "16635": 2.20393, "16640": 2.23443, "16645": 2.21199, "16650": 2.25188, "16655": 2.2562, "16660": 2.2203, "16665": 2.18899, "16670": 2.21131, "16675": 2.22809, "16680": 2.22014, "16685": 2.22407, "16690": 2.21843, "16695": 2.26856, "16700": 2.18797, "16705": 2.22494, "16710": 2.23875, "16715": 2.27295, "16720": 2.23967, "16725": 2.23981, "16730": 2.18051, "16735": 2.20797, "16740": 2.19298, "16745": 2.21851, "16750": 2.22431, "16755": 2.21201, "16760": 2.19524, "16765": 2.21444, "16770": 2.22351, "16775": 2.20566, "16780": 2.23687, "16785": 2.22342, "16790": 2.21503, "16795": 2.25832, "16800": 2.22103, "16805": 2.24585, "16810": 2.17213, "16815": 2.2287, "16820": 2.22911, "16825": 2.22208, "16830": 2.22572, "16835": 2.19645, "16840": 2.2042, "16845": 2.14498, "16850": 2.2471, "16855": 2.22748, "16860": 2.23159, "16865": 2.25433, "16870": 2.19095, "16875": 2.17744, "16880": 2.22185, "16885": 2.20914, "16890": 2.24606, "16895": 2.1812, "16900": 2.24469, "16905": 2.24636, "16910": 2.2235, "16915": 2.2379, "16920": 2.21194, "16925": 2.19506, "16930": 2.21344, "16935": 2.19904, "16940": 2.24134, "16945": 2.19789, "16950": 2.21885, "16955": 2.23527, "16960": 2.2274, "16965": 2.18237, "16970": 2.19056, "16975": 2.21468, "16980": 2.21474, "16985": 2.20981, "16990": 2.22273, "16995": 2.173, "17000": 2.26311, "17005": 2.24765, "17010": 2.22107, "17015": 2.18842, "17020": 2.22802, "17025": 2.17172, "17030": 2.19625, "17035": 2.20099, "17040": 2.23226, "17045": 2.205, "17050": 2.16246, "17055": 2.21725, "17060": 2.24505, "17065": 2.18956, "17070": 2.18247, "17075": 2.20926, "17080": 2.21139, "17085": 2.22716, "17090": 2.23963, "17095": 2.21784, "17100": 2.25488, "17105": 2.25087, "17110": 2.22603, "17115": 2.19324, "17120": 2.17134, "17125": 2.21469, "17130": 2.24885, "17135": 2.19814, "17140": 2.23438, "17145": 2.22379, "17150": 2.18645, "17155": 2.19048, "17160": 2.26294, "17165": 2.21659, "17170": 2.2291, "17175": 2.21383, "17180": 2.20328, "17185": 2.21457, "17190": 2.16515, "17195": 2.22091, "17200": 2.21627, "17205": 2.19729, "17210": 2.23379, "17215": 2.20164, "17220": 2.22897, "17225": 2.20838, "17230": 2.22746, "17235": 2.21223, "17240": 2.20605, "17245": 2.21004, "17250": 2.20278, "17255": 2.18889, "17260": 2.21508, "17265": 2.21088, "17270": 2.21543, "17275": 2.25657, "17280": 2.21637, "17285": 2.22832, "17290": 2.21336, "17295": 2.22711, "17300": 2.2061, "17305": 2.22568, "17310": 2.23374, "17315": 2.22531, "17320": 2.20687, "17325": 2.25749, "17330": 2.24376, "17335": 2.23437, "17340": 2.15815, "17345": 2.1908, "17350": 2.18676, "17355": 2.22369, "17360": 2.19005, "17365": 2.19435, "17370": 2.2098, "17375": 2.23888, "17380": 2.21464, "17385": 2.19578, "17390": 2.20222, "17395": 2.18432, "17400": 2.18878, "17405": 2.23715, "17410": 2.19603, "17415": 2.1787, "17420": 2.21657, "17425": 2.20199, "17430": 2.19578, "17435": 2.19258, "17440": 2.22656, "17445": 2.16703, "17450": 2.22065, "17455": 2.19388, "17460": 2.20789, "17465": 2.17001, "17470": 2.21117, "17475": 2.23408, "17480": 2.18041, "17485": 2.22712, "17490": 2.19562, "17495": 2.16716, "17500": 2.21055, "17505": 2.20713, "17510": 2.1713, "17515": 2.21497, "17520": 2.19658, "17525": 2.20757, "17530": 2.20027, "17535": 2.18994, "17540": 2.21117, "17545": 2.16733, "17550": 2.2107, "17555": 2.16034, "17560": 2.18521, "17565": 2.21242, "17570": 2.19298, "17575": 2.19285, "17580": 2.18318, "17585": 2.19937, "17590": 2.25748, "17595": 2.2242, "17600": 2.24497, "17605": 2.20767, "17610": 2.2005, "17615": 2.21122, "17620": 2.21584, "17625": 2.14569, "17630": 2.20592, "17635": 2.1879, "17640": 2.21068, "17645": 2.27923, "17650": 2.18232, "17655": 2.20699, "17660": 2.24365, "17665": 2.22019, "17670": 2.22732, "17675": 2.22696, "17680": 2.19996, "17685": 2.2076, "17690": 2.1618, "17695": 2.24236, "17700": 2.21538, "17705": 2.24597, "17710": 2.1647, "17715": 2.15413, "17720": 2.2151, "17725": 2.21547, "17730": 2.19728, "17735": 2.18719, "17740": 2.18188, "17745": 2.2145, "17750": 2.26362, "17755": 2.20403, "17760": 2.20246, "17765": 2.18506, "17770": 2.19727, "17775": 2.2175, "17780": 2.24009, "17785": 2.20184, "17790": 2.18475, "17795": 2.20479, "17800": 2.18445, "17805": 2.19447, "17810": 2.19756, "17815": 2.20463, "17820": 2.16656, "17825": 2.259, "17830": 2.24037, "17835": 2.21995, "17840": 2.18527, "17845": 2.18214, "17850": 2.19891, "17855": 2.20758, "17860": 2.17869, "17865": 2.18176, "17870": 2.24069, "17875": 2.20986, "17880": 2.18334, "17885": 2.23201, "17890": 2.2231, "17895": 2.21626, "17900": 2.15789, "17905": 2.20736, "17910": 2.20452, "17915": 2.1969, "17920": 2.24178, "17925": 2.19462, "17930": 2.16635, "17935": 2.20613, "17940": 2.21965, "17945": 2.19277, "17950": 2.23078, "17955": 2.22622, "17960": 2.17316, "17965": 2.19892, "17970": 2.22889, "17975": 2.13626, "17980": 2.19802, "17985": 2.21082, "17990": 2.2211, "17995": 2.20861, "18000": 2.19092, "18005": 2.19321, "18010": 2.21281, "18015": 2.19061, "18020": 2.22331, "18025": 2.21377, "18030": 2.21097, "18035": 2.22023, "18040": 2.21364, "18045": 2.21695, "18050": 2.21525, "18055": 2.20792, "18060": 2.23189, "18065": 2.17622, "18070": 2.23871, "18075": 2.21325, "18080": 2.15775, "18085": 2.22191, "18090": 2.17794, "18095": 2.19138, "18100": 2.15929, "18105": 2.1846, "18110": 2.20952, "18115": 2.24375, "18120": 2.2376, "18125": 2.19207, "18130": 2.20191, "18135": 2.15854, "18140": 2.20346, "18145": 2.18676, "18150": 2.20789, "18155": 2.20248, "18160": 2.23652, "18165": 2.22614, "18170": 2.21133, "18175": 2.1916, "18180": 2.21076, "18185": 2.19274, "18190": 2.18646, "18195": 2.16035, "18200": 2.23142, "18205": 2.20169, "18210": 2.20634, "18215": 2.16964, "18220": 2.17719, "18225": 2.22733, "18230": 2.22773, "18235": 2.1917, "18240": 2.20324, "18245": 2.20843, "18250": 2.18351, "18255": 2.28204, "18260": 2.21039, "18265": 2.20862, "18270": 2.18473, "18275": 2.18581, "18280": 2.20056, "18285": 2.21968, "18290": 2.17868, "18295": 2.21771, "18300": 2.22493, "18305": 2.24893, "18310": 2.24074, "18315": 2.22117, "18320": 2.1812, "18325": 2.21478, "18330": 2.20271, "18335": 2.21441, "18340": 2.20156, "18345": 2.18085, "18350": 2.24194, "18355": 2.17072, "18360": 2.22654, "18365": 2.18459, "18370": 2.16064, "18375": 2.2127, "18380": 2.21268, "18385": 2.2075, "18390": 2.18771, "18395": 2.2412, "18400": 2.19567, "18405": 2.23818, "18410": 2.20639, "18415": 2.17262, "18420": 2.17941, "18425": 2.18159, "18430": 2.1532, "18435": 2.19474, "18440": 2.19922, "18445": 2.16617, "18450": 2.21663, "18455": 2.15394, "18460": 2.19594, "18465": 2.20902, "18470": 2.19627, "18475": 2.15241, "18480": 2.19928, "18485": 2.16016, "18490": 2.19956, "18495": 2.24343, "18500": 2.19729, "18505": 2.16842, "18510": 2.22048, "18515": 2.17577, "18520": 2.19094, "18525": 2.17378, "18530": 2.18015, "18535": 2.17338, "18540": 2.21369, "18545": 2.17643, "18550": 2.2176, "18555": 2.16992, "18560": 2.19244, "18565": 2.22764, "18570": 2.21336, "18575": 2.14604, "18580": 2.2221, "18585": 2.2102, "18590": 2.21349, "18595": 2.18116, "18600": 2.15912, "18605": 2.21113, "18610": 2.20936, "18615": 2.19783, "18620": 2.21537, "18625": 2.19813, "18630": 2.17213, "18635": 2.19955, "18640": 2.16916, "18645": 2.17469, "18650": 2.25863, "18655": 2.16602, "18660": 2.23827, "18665": 2.22504, "18670": 2.20831, "18675": 2.19234, "18680": 2.2084, "18685": 2.18026, "18690": 2.21383, "18695": 2.15706, "18700": 2.16266, "18705": 2.18302, "18710": 2.24512, "18715": 2.1781, "18720": 2.21879, "18725": 2.1834, "18730": 2.18299, "18735": 2.14026, "18740": 2.19335, "18745": 2.21695, "18750": 2.21689, "18755": 2.19752, "18760": 2.22457, "18765": 2.15914, "18770": 2.15213, "18775": 2.21437, "18780": 2.16924, "18785": 2.21181, "18790": 2.2019, "18795": 2.20662, "18800": 2.18745, "18805": 2.18372, "18810": 2.20772, "18815": 2.16942, "18820": 2.18976, "18825": 2.21133, "18830": 2.20043, "18835": 2.22123, "18840": 2.14495, "18845": 2.19675, "18850": 2.18768, "18855": 2.17767, "18860": 2.15831, "18865": 2.18366, "18870": 2.16631, "18875": 2.1641, "18880": 2.2107, "18885": 2.17591, "18890": 2.18002, "18895": 2.19929, "18900": 2.17186, "18905": 2.18516, "18910": 2.1805, "18915": 2.1761, "18920": 2.19196, "18925": 2.27241, "18930": 2.20002, "18935": 2.2073, "18940": 2.23544, "18945": 2.26259, "18950": 2.19286, "18955": 2.19042, "18960": 2.20764, "18965": 2.14257, "18970": 2.20939, "18975": 2.22146, "18980": 2.20637, "18985": 2.19244, "18990": 2.23398, "18995": 2.19825, "19000": 2.16565, "19005": 2.16901, "19010": 2.20003, "19015": 2.19801, "19020": 2.20519, "19025": 2.16926, "19030": 2.21995, "19035": 2.16604, "19040": 2.14999, "19045": 2.22083, "19050": 2.16442, "19055": 2.18866, "19060": 2.187, "19065": 2.19109, "19070": 2.17532, "19075": 2.21806, "19080": 2.18666, "19085": 2.17899, "19090": 2.17863, "19095": 2.16642, "19100": 2.20048, "19105": 2.19494, "19110": 2.17443, "19115": 2.20327, "19120": 2.19404, "19125": 2.21443, "19130": 2.14888, "19135": 2.22845, "19140": 2.21441, "19145": 2.19559, "19150": 2.18534, "19155": 2.21377, "19160": 2.1852, "19165": 2.1314, "19170": 2.17638, "19175": 2.18514, "19180": 2.12761, "19185": 2.1935, "19190": 2.18724, "19195": 2.20804, "19200": 2.20378, "19205": 2.1871, "19210": 2.18737, "19215": 2.13451, "19220": 2.17889, "19225": 2.16364, "19230": 2.22186, "19235": 2.2131, "19240": 2.17384, "19245": 2.17538, "19250": 2.18701, "19255": 2.15132, "19260": 2.21864, "19265": 2.15574, "19270": 2.17345, "19275": 2.18948, "19280": 2.17734, "19285": 2.14107, "19290": 2.16922, "19295": 2.18955, "19300": 2.17062, "19305": 2.22445, "19310": 2.22347, "19315": 2.20846, "19320": 2.16172, "19325": 2.19281, "19330": 2.22074, "19335": 2.21853, "19340": 2.2179, "19345": 2.19498, "19350": 2.16798, "19355": 2.13389, "19360": 2.15565, "19365": 2.18191, "19370": 2.18506, "19375": 2.19379, "19380": 2.1651, "19385": 2.1597, "19390": 2.17774, "19395": 2.18309, "19400": 2.18548, "19405": 2.17875, "19410": 2.1647, "19415": 2.18344, "19420": 2.1937, "19425": 2.18061, "19430": 2.24236, "19435": 2.17225, "19440": 2.16795, "19445": 2.18216, "19450": 2.17772, "19455": 2.17197, "19460": 2.20252, "19465": 2.17159, "19470": 2.18217, "19475": 2.22712, "19480": 2.18749, "19485": 2.17006, "19490": 2.18883, "19495": 2.17821, "19500": 2.20445, "19505": 2.1517, "19510": 2.21262, "19515": 2.17422, "19520": 2.19338, "19525": 2.17166, "19530": 2.16346, "19535": 2.13421, "19540": 2.21842, "19545": 2.18567, "19550": 2.1472, "19555": 2.22321, "19560": 2.18658, "19565": 2.15171, "19570": 2.1778, "19575": 2.17479, "19580": 2.18861, "19585": 2.21819, "19590": 2.20546, "19595": 2.19571, "19600": 2.20015, "19605": 2.21495, "19610": 2.19301, "19615": 2.17685, "19620": 2.21443, "19625": 2.19095, "19630": 2.19199, "19635": 2.19132, "19640": 2.17147, "19645": 2.1467, "19650": 2.1735, "19655": 2.1527, "19660": 2.17177, "19665": 2.1733, "19670": 2.17979, "19675": 2.20872, "19680": 2.19373, "19685": 2.17966, "19690": 2.18571, "19695": 2.15685, "19700": 2.16672, "19705": 2.18822, "19710": 2.24412, "19715": 2.15758, "19720": 2.15271, "19725": 2.23147, "19730": 2.17206, "19735": 2.181, "19740": 2.21899, "19745": 2.20409, "19750": 2.18629, "19755": 2.17353, "19760": 2.15818, "19765": 2.21138, "19770": 2.21197, "19775": 2.17169, "19780": 2.15749, "19785": 2.17335, "19790": 2.22805, "19795": 2.16633, "19800": 2.16424, "19805": 2.16652, "19810": 2.21848, "19815": 2.19068, "19820": 2.20309, "19825": 2.21376, "19830": 2.16991, "19835": 2.1835, "19840": 2.20526, "19845": 2.166, "19850": 2.17374, "19855": 2.177, "19860": 2.18478, "19865": 2.16993, "19870": 2.20882, "19875": 2.13416, "19880": 2.16707, "19885": 2.15516, "19890": 2.16373, "19895": 2.20626, "19900": 2.18509, "19905": 2.15541, "19910": 2.17454, "19915": 2.19609, "19920": 2.10769, "19925": 2.16538, "19930": 2.14836, "19935": 2.17317, "19940": 2.17682, "19945": 2.18426, "19950": 2.16881, "19955": 2.17014, "19960": 2.16452, "19965": 2.16755, "19970": 2.12889, "19975": 2.17789, "19980": 2.21524, "19985": 2.17162, "19990": 2.17213, "19995": 2.19698, "20000": 2.22117, "20005": 2.19178, "20010": 2.17581, "20015": 2.19096, "20020": 2.16373, "20025": 2.11816, "20030": 2.14627, "20035": 2.18512, "20040": 2.19521, "20045": 2.19665, "20050": 2.19628, "20055": 2.18991, "20060": 2.20444, "20065": 2.16578, "20070": 2.18633, "20075": 2.15008, "20080": 2.1641, "20085": 2.19327, "20090": 2.17938, "20095": 2.16376, "20100": 2.18979, "20105": 2.14261, "20110": 2.17485, "20115": 2.15901, "20120": 2.18961, "20125": 2.16367, "20130": 2.17294, "20135": 2.18237, "20140": 2.16375, "20145": 2.17763, "20150": 2.14412, "20155": 2.23155, "20160": 2.18071, "20165": 2.17755, "20170": 2.16625, "20175": 2.14994, "20180": 2.18536, "20185": 2.1851, "20190": 2.19508, "20195": 2.19961, "20200": 2.15979, "20205": 2.18119, "20210": 2.17653, "20215": 2.18864, "20220": 2.17955, "20225": 2.21378, "20230": 2.17088, "20235": 2.20922, "20240": 2.18446, "20245": 2.19155, "20250": 2.14343, "20255": 2.14728, "20260": 2.17404, "20265": 2.17996, "20270": 2.18006, "20275": 2.1816, "20280": 2.14984, "20285": 2.16943, "20290": 2.1921, "20295": 2.19744, "20300": 2.1525, "20305": 2.21724, "20310": 2.11438, "20315": 2.17021, "20320": 2.18621, "20325": 2.18711, "20330": 2.15281, "20335": 2.20832, "20340": 2.17414, "20345": 2.16847, "20350": 2.14683, "20355": 2.19263, "20360": 2.19615, "20365": 2.16999, "20370": 2.20088, "20375": 2.18569, "20380": 2.18355, "20385": 2.17963, "20390": 2.15445, "20395": 2.15536, "20400": 2.26344, "20405": 2.15138, "20410": 2.14383, "20415": 2.19653, "20420": 2.15733, "20425": 2.17847, "20430": 2.16653, "20435": 2.14876, "20440": 2.16023, "20445": 2.18213, "20450": 2.17377, "20455": 2.20933, "20460": 2.1799, "20465": 2.16824, "20470": 2.18085, "20475": 2.15923, "20480": 2.19493, "20485": 2.19784, "20490": 2.19531, "20495": 2.17005, "20500": 2.17337, "20505": 2.15707, "20510": 2.19014, "20515": 2.18798, "20520": 2.15813, "20525": 2.15847, "20530": 2.17383, "20535": 2.18981, "20540": 2.15524, "20545": 2.15583, "20550": 2.15085, "20555": 2.12696, "20560": 2.17162, "20565": 2.18542, "20570": 2.17662, "20575": 2.15636, "20580": 2.19926, "20585": 2.16174, "20590": 2.19083, "20595": 2.13156, "20600": 2.14885, "20605": 2.18351, "20610": 2.19694, "20615": 2.15617, "20620": 2.14488, "20625": 2.14642, "20630": 2.12363, "20635": 2.14041, "20640": 2.19571, "20645": 2.19216, "20650": 2.17894, "20655": 2.20783, "20660": 2.18743, "20665": 2.18487, "20670": 2.16926, "20675": 2.11756, "20680": 2.17457, "20685": 2.18933, "20690": 2.18984, "20695": 2.19816, "20700": 2.13683, "20705": 2.19122, "20710": 2.15497, "20715": 2.1748, "20720": 2.22715, "20725": 2.18044, "20730": 2.1534, "20735": 2.14391, "20740": 2.16126, "20745": 2.18936, "20750": 2.17912, "20755": 2.18483, "20760": 2.16115, "20765": 2.15323, "20770": 2.18309, "20775": 2.23305, "20780": 2.18876, "20785": 2.17963, "20790": 2.16238, "20795": 2.17015, "20800": 2.20679, "20805": 2.17327, "20810": 2.20301, "20815": 2.16498, "20820": 2.19734, "20825": 2.1824, "20830": 2.14627, "20835": 2.14243, "20840": 2.19251, "20845": 2.21814, "20850": 2.18329, "20855": 2.20867, "20860": 2.18759, "20865": 2.19187, "20870": 2.20729, "20875": 2.2057, "20880": 2.18725, "20885": 2.1847, "20890": 2.17537, "20895": 2.16339, "20900": 2.1786, "20905": 2.17951, "20910": 2.17996, "20915": 2.16891, "20920": 2.17069, "20925": 2.18127, "20930": 2.19872, "20935": 2.20472, "20940": 2.15939, "20945": 2.14811, "20950": 2.17522, "20955": 2.20313, "20960": 2.17461, "20965": 2.14452, "20970": 2.16394, "20975": 2.16964, "20980": 2.15049, "20985": 2.18439, "20990": 2.16792, "20995": 2.11975, "21000": 2.14771, "21005": 2.19557, "21010": 2.20576, "21015": 2.12044, "21020": 2.1549, "21025": 2.15546, "21030": 2.14708, "21035": 2.14473, "21040": 2.14109, "21045": 2.171, "21050": 2.12942, "21055": 2.17106, "21060": 2.10015, "21065": 2.27051, "21070": 2.17798, "21075": 2.19201, "21080": 2.18754, "21085": 2.19809, "21090": 2.18437, "21095": 2.20419, "21100": 2.16753, "21105": 2.19971, "21110": 2.17484, "21115": 2.19263, "21120": 2.20859, "21125": 2.16484, "21130": 2.19198, "21135": 2.1779, "21140": 2.15021, "21145": 2.18804, "21150": 2.16078, "21155": 2.16841, "21160": 2.15725, "21165": 2.1613, "21170": 2.14764, "21175": 2.16085, "21180": 2.16933, "21185": 2.1966, "21190": 2.14398, "21195": 2.15847, "21200": 2.17247, "21205": 2.18909, "21210": 2.15898, "21215": 2.1478, "21220": 2.17818, "21225": 2.15456, "21230": 2.17928, "21235": 2.15588, "21240": 2.18713, "21245": 2.15734, "21250": 2.1517, "21255": 2.14255, "21260": 2.18992, "21265": 2.21926, "21270": 2.22612, "21275": 2.21743, "21280": 2.19475, "21285": 2.1801, "21290": 2.15852, "21295": 2.14612, "21300": 2.21622, "21305": 2.21616, "21310": 2.16975, "21315": 2.17048, "21320": 2.16175, "21325": 2.13239, "21330": 2.15726, "21335": 2.12556, "21340": 2.17941, "21345": 2.16216, "21350": 2.14035, "21355": 2.18469, "21360": 2.1696, "21365": 2.19059, "21370": 2.14463, "21375": 2.14517, "21380": 2.15618, "21385": 2.18068, "21390": 2.18458, "21395": 2.13348, "21400": 2.18515, "21405": 2.2014, "21410": 2.15721, "21415": 2.18946, "21420": 2.21125, "21425": 2.17046, "21430": 2.20573, "21435": 2.15866, "21440": 2.20669, "21445": 2.17205, "21450": 2.16632, "21455": 2.18938, "21460": 2.16222, "21465": 2.16632, "21470": 2.19873, "21475": 2.14604, "21480": 2.19569, "21485": 2.21645, "21490": 2.21248, "21495": 2.18156, "21500": 2.14153, "21505": 2.18355, "21510": 2.17111, "21515": 2.17867, "21520": 2.13356, "21525": 2.15927, "21530": 2.12408, "21535": 2.15861, "21540": 2.18723, "21545": 2.17267, "21550": 2.18654, "21555": 2.15728, "21560": 2.15302, "21565": 2.14231, "21570": 2.12637, "21575": 2.19394, "21580": 2.15926, "21585": 2.18104, "21590": 2.19901, "21595": 2.1902, "21600": 2.18474, "21605": 2.18173, "21610": 2.16629, "21615": 2.15979, "21620": 2.18367, "21625": 2.18037, "21630": 2.20064, "21635": 2.13752, "21640": 2.18504, "21645": 2.17159, "21650": 2.1661, "21655": 2.17655, "21660": 2.15915, "21665": 2.10873, "21670": 2.17854, "21675": 2.13846, "21680": 2.17051, "21685": 2.14174, "21690": 2.12537, "21695": 2.17608, "21700": 2.16135, "21705": 2.18615, "21710": 2.09541, "21715": 2.14057, "21720": 2.18523, "21725": 2.15555, "21730": 2.15936, "21735": 2.1318, "21740": 2.16706, "21745": 2.18395, "21750": 2.16847, "21755": 2.18098, "21760": 2.14105, "21765": 2.12816, "21770": 2.14824, "21775": 2.16294, "21780": 2.19564, "21785": 2.17697, "21790": 2.1621, "21795": 2.16185, "21800": 2.13345, "21805": 2.16218, "21810": 2.16696, "21815": 2.18757, "21820": 2.153, "21825": 2.16848, "21830": 2.12694, "21835": 2.1439, "21840": 2.16917, "21845": 2.14999, "21850": 2.18294, "21855": 2.1425, "21860": 2.16657, "21865": 2.16947, "21870": 2.1431, "21875": 2.18161, "21880": 2.14911, "21885": 2.18262, "21890": 2.1797, "21895": 2.16234, "21900": 2.19183, "21905": 2.1784, "21910": 2.17465, "21915": 2.19013, "21920": 2.16067, "21925": 2.19193, "21930": 2.13367, "21935": 2.20197, "21940": 2.15076, "21945": 2.17321, "21950": 2.16784, "21955": 2.12477, "21960": 2.11399, "21965": 2.17824, "21970": 2.156, "21975": 2.14096, "21980": 2.18114, "21985": 2.13447, "21990": 2.16557, "21995": 2.17357, "22000": 2.20938, "22005": 2.14777, "22010": 2.18127, "22015": 2.1744, "22020": 2.19442, "22025": 2.15363, "22030": 2.16685, "22035": 2.12111, "22040": 2.18725, "22045": 2.20475, "22050": 2.12231, "22055": 2.13934, "22060": 2.17479, "22065": 2.14848, "22070": 2.14109, "22075": 2.17038, "22080": 2.19984, "22085": 2.13387, "22090": 2.167, "22095": 2.15354, "22100": 2.15302, "22105": 2.18602, "22110": 2.16062, "22115": 2.14146, "22120": 2.17027, "22125": 2.14351, "22130": 2.18497, "22135": 2.16019, "22140": 2.19006, "22145": 2.1479, "22150": 2.18671, "22155": 2.13551, "22160": 2.135, "22165": 2.17669, "22170": 2.14165, "22175": 2.19581, "22180": 2.12177, "22185": 2.15406, "22190": 2.16763, "22195": 2.17618, "22200": 2.181, "22205": 2.17901, "22210": 2.10328, "22215": 2.14171, "22220": 2.19008, "22225": 2.12351, "22230": 2.17358, "22235": 2.17955, "22240": 2.13902, "22245": 2.18343, "22250": 2.1763, "22255": 2.13078, "22260": 2.19134, "22265": 2.12578, "22270": 2.14905, "22275": 2.14637, "22280": 2.19027, "22285": 2.25382, "22290": 2.17345, "22295": 2.17834, "22300": 2.14327, "22305": 2.12737, "22310": 2.1608, "22315": 2.1556, "22320": 2.15124, "22325": 2.15839, "22330": 2.14512, "22335": 2.19067, "22340": 2.16934, "22345": 2.16245, "22350": 2.19191, "22355": 2.16126, "22360": 2.17952, "22365": 2.17233, "22370": 2.20475, "22375": 2.15288, "22380": 2.15615, "22385": 2.15589, "22390": 2.17093, "22395": 2.17351, "22400": 2.15767, "22405": 2.1031, "22410": 2.18355, "22415": 2.21361, "22420": 2.17387, "22425": 2.18068, "22430": 2.13022, "22435": 2.16683, "22440": 2.19119, "22445": 2.2019, "22450": 2.1415, "22455": 2.14956, "22460": 2.15678, "22465": 2.1577, "22470": 2.19968, "22475": 2.19445, "22480": 2.11721, "22485": 2.14302, "22490": 2.17216, "22495": 2.1248, "22500": 2.09752, "22505": 2.17449, "22510": 2.12292, "22515": 2.14993, "22520": 2.18809, "22525": 2.14888, "22530": 2.14015, "22535": 2.16722, "22540": 2.16813, "22545": 2.20578, "22550": 2.21819, "22555": 2.13705, "22560": 2.14802, "22565": 2.16233, "22570": 2.14961, "22575": 2.15414, "22580": 2.09723, "22585": 2.18731, "22590": 2.1363, "22595": 2.14775, "22600": 2.17624, "22605": 2.1336, "22610": 2.15152, "22615": 2.14756, "22620": 2.11907, "22625": 2.20711, "22630": 2.17921, "22635": 2.19652, "22640": 2.13845, "22645": 2.11612, "22650": 2.17092, "22655": 2.13699, "22660": 2.16441, "22665": 2.1313, "22670": 2.15736, "22675": 2.11473, "22680": 2.16612, "22685": 2.2035, "22690": 2.16649, "22695": 2.16057, "22700": 2.141, "22705": 2.13255, "22710": 2.14794, "22715": 2.14774, "22720": 2.14235, "22725": 2.13635, "22730": 2.16235, "22735": 2.19152, "22740": 2.15345, "22745": 2.1511, "22750": 2.08878, "22755": 2.16734, "22760": 2.20028, "22765": 2.19222, "22770": 2.14872, "22775": 2.19182, "22780": 2.15673, "22785": 2.1572, "22790": 2.18504, "22795": 2.127, "22800": 2.12302, "22805": 2.11176, "22810": 2.14987, "22815": 2.08642, "22820": 2.17168, "22825": 2.14896, "22830": 2.15704, "22835": 2.13415, "22840": 2.19367, "22845": 2.18156, "22850": 2.15787, "22855": 2.13577, "22860": 2.13732, "22865": 2.15458, "22870": 2.14696, "22875": 2.13656, "22880": 2.17765, "22885": 2.15875, "22890": 2.13939, "22895": 2.13572, "22900": 2.16372, "22905": 2.14554, "22910": 2.16876, "22915": 2.1763, "22920": 2.14148, "22925": 2.13363, "22930": 2.17448, "22935": 2.14582, "22940": 2.16399, "22945": 2.17864, "22950": 2.11704, "22955": 2.18451, "22960": 2.13791, "22965": 2.09483, "22970": 2.17485, "22975": 2.171, "22980": 2.16585, "22985": 2.15641, "22990": 2.11398, "22995": 2.1933, "23000": 2.16659, "23005": 2.11705, "23010": 2.18533, "23015": 2.1376, "23020": 2.14452, "23025": 2.14798, "23030": 2.10416, "23035": 2.18204, "23040": 2.15977, "23045": 2.16837, "23050": 2.15676, "23055": 2.16268, "23060": 2.15171, "23065": 2.14989, "23070": 2.14358, "23075": 2.17646, "23080": 2.15323, "23085": 2.1435, "23090": 2.11332, "23095": 2.15491, "23100": 2.11292, "23105": 2.13509, "23110": 2.18815, "23115": 2.17583, "23120": 2.15105, "23125": 2.12616, "23130": 2.16429, "23135": 2.19165, "23140": 2.13445, "23145": 2.12668, "23150": 2.14715, "23155": 2.16051, "23160": 2.17577, "23165": 2.18437, "23170": 2.12147, "23175": 2.14173, "23180": 2.19119, "23185": 2.14259, "23190": 2.16069, "23195": 2.13931, "23200": 2.13257, "23205": 2.13368, "23210": 2.17843, "23215": 2.18003, "23220": 2.15228, "23225": 2.15841, "23230": 2.18479, "23235": 2.13727, "23240": 2.16872, "23245": 2.18235, "23250": 2.18741, "23255": 2.18707, "23260": 2.20625, "23265": 2.14712, "23270": 2.17132, "23275": 2.17173, "23280": 2.14073, "23285": 2.10116, "23290": 2.20496, "23295": 2.15772, "23300": 2.19509, "23305": 2.20366, "23310": 2.11044, "23315": 2.156, "23320": 2.17841, "23325": 2.1801, "23330": 2.12048, "23335": 2.18712, "23340": 2.18221, "23345": 2.15968, "23350": 2.1459, "23355": 2.1443, "23360": 2.16884, "23365": 2.107, "23370": 2.18104, "23375": 2.1166, "23380": 2.10592, "23385": 2.1412, "23390": 2.13225, "23395": 2.17143, "23400": 2.13275, "23405": 2.11507, "23410": 2.13192, "23415": 2.12221, "23420": 2.17945, "23425": 2.20474, "23430": 2.17471, "23435": 2.16931, "23440": 2.13238, "23445": 2.10923, "23450": 2.14124, "23455": 2.16795, "23460": 2.18898, "23465": 2.18312, "23470": 2.09957, "23475": 2.11802, "23480": 2.16699, "23485": 2.14606, "23490": 2.16508, "23495": 2.11333, "23500": 2.17366, "23505": 2.11857, "23510": 2.14846, "23515": 2.13323, "23520": 2.16219, "23525": 2.11718, "23530": 2.13992, "23535": 2.13892, "23540": 2.1457, "23545": 2.10234, "23550": 2.13532, "23555": 2.19414, "23560": 2.15058, "23565": 2.15193, "23570": 2.15096, "23575": 2.14659, "23580": 2.14549, "23585": 2.17342, "23590": 2.14192, "23595": 2.12625, "23600": 2.11478, "23605": 2.18829, "23610": 2.16783, "23615": 2.14319, "23620": 2.13884, "23625": 2.17131, "23630": 2.18925, "23635": 2.17489, "23640": 2.18202, "23645": 2.16298, "23650": 2.1508, "23655": 2.15014, "23660": 2.12937, "23665": 2.16168, "23670": 2.1714, "23675": 2.1668, "23680": 2.13418, "23685": 2.16065, "23690": 2.21061, "23695": 2.16126, "23700": 2.11185, "23705": 2.14461, "23710": 2.17969, "23715": 2.10698, "23720": 2.09044, "23725": 2.15758, "23730": 2.15375, "23735": 2.16383, "23740": 2.13245, "23745": 2.19047, "23750": 2.1472, "23755": 2.16643, "23760": 2.16811, "23765": 2.19967, "23770": 2.1244, "23775": 2.13006, "23780": 2.14583, "23785": 2.12804, "23790": 2.16276, "23795": 2.16689, "23800": 2.14063, "23805": 2.17279, "23810": 2.12726, "23815": 2.17034, "23820": 2.11752, "23825": 2.17501, "23830": 2.1926, "23835": 2.16911, "23840": 2.09497, "23845": 2.16066, "23850": 2.19386, "23855": 2.10672, "23860": 2.147, "23865": 2.11698, "23870": 2.15454, "23875": 2.17636, "23880": 2.14325, "23885": 2.13193, "23890": 2.15237, "23895": 2.12483, "23900": 2.15946, "23905": 2.14216, "23910": 2.14877, "23915": 2.09697, "23920": 2.11371, "23925": 2.13351, "23930": 2.16581, "23935": 2.16066, "23940": 2.16743, "23945": 2.13634, "23950": 2.12924, "23955": 2.14702, "23960": 2.12892, "23965": 2.1668, "23970": 2.1522, "23975": 2.16604, "23980": 2.19061, "23985": 2.11983, "23990": 2.13366, "23995": 2.10699, "24000": 2.15441, "24005": 2.1676, "24010": 2.1694, "24015": 2.12743, "24020": 2.13471, "24025": 2.18747, "24030": 2.13023, "24035": 2.19107, "24040": 2.1321, "24045": 2.14259, "24050": 2.16956, "24055": 2.19361, "24060": 2.14398, "24065": 2.11797, "24070": 2.10863, "24075": 2.14346, "24080": 2.12159, "24085": 2.19451, "24090": 2.14807, "24095": 2.13874, "24100": 2.1516, "24105": 2.10797, "24110": 2.09939, "24115": 2.12946, "24120": 2.17435, "24125": 2.11143, "24130": 2.17784, "24135": 2.14156, "24140": 2.14533, "24145": 2.17696, "24150": 2.14203, "24155": 2.15071, "24160": 2.11011, "24165": 2.16908, "24170": 2.1706, "24175": 2.16703, "24180": 2.13855, "24185": 2.16176, "24190": 2.14157, "24195": 2.17087, "24200": 2.20186, "24205": 2.10983, "24210": 2.13922, "24215": 2.19236, "24220": 2.16432, "24225": 2.1754, "24230": 2.1656, "24235": 2.17702, "24240": 2.17027, "24245": 2.14538, "24250": 2.15832, "24255": 2.13773, "24260": 2.18334, "24265": 2.17546, "24270": 2.15989, "24275": 2.13713, "24280": 2.15447, "24285": 2.10695, "24290": 2.15466, "24295": 2.11713, "24300": 2.14668, "24305": 2.13398, "24310": 2.14844, "24315": 2.16052, "24320": 2.15726, "24325": 2.17533, "24330": 2.12558, "24335": 2.12761, "24340": 2.13157, "24345": 2.10692, "24350": 2.20562, "24355": 2.12857, "24360": 2.12588, "24365": 2.1346, "24370": 2.15945, "24375": 2.1288, "24380": 2.16761, "24385": 2.14991, "24390": 2.10526, "24395": 2.17739, "24400": 2.18675, "24405": 2.20731, "24410": 2.12029, "24415": 2.1523, "24420": 2.16777, "24425": 2.12095, "24430": 2.13545, "24435": 2.16134, "24440": 2.11709, "24445": 2.11789, "24450": 2.16944, "24455": 2.12856, "24460": 2.15495, "24465": 2.1182, "24470": 2.09788, "24475": 2.14004, "24480": 2.14291, "24485": 2.16266, "24490": 2.15156, "24495": 2.0972, "24500": 2.17693, "24505": 2.15852, "24510": 2.15903, "24515": 2.10183, "24520": 2.1416, "24525": 2.11404, "24530": 2.19407, "24535": 2.11699, "24540": 2.17899, "24545": 2.14283, "24550": 2.14344, "24555": 2.15259, "24560": 2.18662, "24565": 2.18779, "24570": 2.13915, "24575": 2.12533, "24580": 2.17327, "24585": 2.15896, "24590": 2.17776, "24595": 2.13174, "24600": 2.16252, "24605": 2.1644, "24610": 2.1793, "24615": 2.10426, "24620": 2.12368, "24625": 2.12738, "24630": 2.18203, "24635": 2.10629, "24640": 2.1689, "24645": 2.17597, "24650": 2.17203, "24655": 2.10734, "24660": 2.12659, "24665": 2.16685, "24670": 2.15431, "24675": 2.14967, "24680": 2.14079, "24685": 2.1438, "24690": 2.13513, "24695": 2.18143, "24700": 2.12313, "24705": 2.15419, "24710": 2.12765, "24715": 2.164, "24720": 2.16244, "24725": 2.15503, "24730": 2.16961, "24735": 2.11907, "24740": 2.13193, "24745": 2.13485, "24750": 2.14159, "24755": 2.16923, "24760": 2.13656, "24765": 2.1314, "24770": 2.14872, "24775": 2.13233, "24780": 2.10057, "24785": 2.14367, "24790": 2.16474, "24795": 2.14571, "24800": 2.13129, "24805": 2.17073, "24810": 2.14878, "24815": 2.13761, "24820": 2.12414, "24825": 2.16312, "24830": 2.12182, "24835": 2.15251, "24840": 2.16149, "24845": 2.17208, "24850": 2.14538, "24855": 2.15571, "24860": 2.12569, "24865": 2.08976, "24870": 2.14935, "24875": 2.20761, "24880": 2.17022, "24885": 2.14493, "24890": 2.13671, "24895": 2.16371, "24900": 2.13993, "24905": 2.15544, "24910": 2.14585, "24915": 2.14978, "24920": 2.0978, "24925": 2.14243, "24930": 2.14532, "24935": 2.19018, "24940": 2.09518, "24945": 2.13939, "24950": 2.12702, "24955": 2.13127, "24960": 2.12441, "24965": 2.15245, "24970": 2.09389, "24975": 2.14901, "24980": 2.13478, "24985": 2.17157, "24990": 2.15137, "24995": 2.12996, "25000": 2.10468, "25005": 2.09343, "25010": 2.14596, "25015": 2.14001, "25020": 2.1059, "25025": 2.17019, "25030": 2.12371, "25035": 2.18654, "25040": 2.11822, "25045": 2.12322, "25050": 2.13852, "25055": 2.14918, "25060": 2.11615, "25065": 2.16195, "25070": 2.13596, "25075": 2.16663, "25080": 2.11985, "25085": 2.17567, "25090": 2.15815, "25095": 2.11397, "25100": 2.10551, "25105": 2.10105, "25110": 2.13678, "25115": 2.12597, "25120": 2.143, "25125": 2.11903, "25130": 2.11374, "25135": 2.13401, "25140": 2.10533, "25145": 2.19884, "25150": 2.14265, "25155": 2.15892, "25160": 2.12189, "25165": 2.1075, "25170": 2.17377, "25175": 2.11619, "25180": 2.12564, "25185": 2.14689, "25190": 2.14838, "25195": 2.15968, "25200": 2.13385, "25205": 2.17871, "25210": 2.18743, "25215": 2.11674, "25220": 2.15358, "25225": 2.13287, "25230": 2.14467, "25235": 2.14385, "25240": 2.15097, "25245": 2.12389, "25250": 2.13063, "25255": 2.15403, "25260": 2.17818, "25265": 2.1176, "25270": 2.13839, "25275": 2.09886, "25280": 2.15505, "25285": 2.13632, "25290": 2.16768, "25295": 2.13509, "25300": 2.12509, "25305": 2.11603, "25310": 2.14385, "25315": 2.09451, "25320": 2.1456, "25325": 2.1422, "25330": 2.19208, "25335": 2.12414, "25340": 2.13025, "25345": 2.12967, "25350": 2.13282, "25355": 2.11999, "25360": 2.10608, "25365": 2.09721, "25370": 2.11294, "25375": 2.14824, "25380": 2.1077, "25385": 2.17249, "25390": 2.11254, "25395": 2.13875, "25400": 2.10992, "25405": 2.14203, "25410": 2.19748, "25415": 2.17373, "25420": 2.12571, "25425": 2.15508, "25430": 2.09296, "25435": 2.15969, "25440": 2.10727, "25445": 2.16069, "25450": 2.1281, "25455": 2.15192, "25460": 2.16759, "25465": 2.17505, "25470": 2.17871, "25475": 2.12461, "25480": 2.14144, "25485": 2.14497, "25490": 2.15439, "25495": 2.15332, "25500": 2.1599, "25505": 2.16703, "25510": 2.11559, "25515": 2.15726, "25520": 2.13004, "25525": 2.09935, "25530": 2.15864, "25535": 2.13041, "25540": 2.13299, "25545": 2.16125, "25550": 2.14967, "25555": 2.16318, "25560": 2.10817, "25565": 2.133, "25570": 2.14493, "25575": 2.16514, "25580": 2.12097, "25585": 2.17644, "25590": 2.15639, "25595": 2.16246, "25600": 2.18479, "25605": 2.14845, "25610": 2.10433, "25615": 2.1395, "25620": 2.11984, "25625": 2.1692, "25630": 2.09604, "25635": 2.14929, "25640": 2.12645, "25645": 2.1407, "25650": 2.15826, "25655": 2.18878, "25660": 2.07415, "25665": 2.13586, "25670": 2.11267, "25675": 2.11688, "25680": 2.16593, "25685": 2.15135, "25690": 2.14363, "25695": 2.1358, "25700": 2.13361, "25705": 2.12986, "25710": 2.13311, "25715": 2.07136, "25720": 2.11647, "25725": 2.19506, "25730": 2.14691, "25735": 2.15606, "25740": 2.10683, "25745": 2.12736, "25750": 2.13159, "25755": 2.15623, "25760": 2.16743, "25765": 2.16151, "25770": 2.11969, "25775": 2.10611, "25780": 2.10962, "25785": 2.13044, "25790": 2.17478, "25795": 2.1448, "25800": 2.12965, "25805": 2.08623, "25810": 2.13043, "25815": 2.09283, "25820": 2.16873, "25825": 2.14139, "25830": 2.1043, "25835": 2.15255, "25840": 2.15873, "25845": 2.15032, "25850": 2.13322, "25855": 2.13143, "25860": 2.16012, "25865": 2.16421, "25870": 2.09401, "25875": 2.08427, "25880": 2.10674, "25885": 2.14381, "25890": 2.11744, "25895": 2.12551, "25900": 2.11385, "25905": 2.12282, "25910": 2.1678, "25915": 2.1262, "25920": 2.0947, "25925": 2.15236, "25930": 2.16461, "25935": 2.11428, "25940": 2.14919, "25945": 2.08848, "25950": 2.13702, "25955": 2.09586, "25960": 2.1369, "25965": 2.19728, "25970": 2.11058, "25975": 2.13479, "25980": 2.14056, "25985": 2.17871, "25990": 2.11145, "25995": 2.16839, "26000": 2.15406, "26005": 2.1731, "26010": 2.12341, "26015": 2.13816, "26020": 2.15165, "26025": 2.14093, "26030": 2.16582, "26035": 2.14207, "26040": 2.13801, "26045": 2.17713, "26050": 2.15638, "26055": 2.17091, "26060": 2.16117, "26065": 2.13487, "26070": 2.16257, "26075": 2.16206, "26080": 2.19882, "26085": 2.11888, "26090": 2.10646, "26095": 2.08643, "26100": 2.16012, "26105": 2.08846, "26110": 2.09914, "26115": 2.14465, "26120": 2.10321, "26125": 2.10914, "26130": 2.12985, "26135": 2.15083, "26140": 2.13683, "26145": 2.14648, "26150": 2.17932, "26155": 2.16821, "26160": 2.13741, "26165": 2.1201, "26170": 2.10379, "26175": 2.13683, "26180": 2.16058, "26185": 2.15999, "26190": 2.13644, "26195": 2.13412, "26200": 2.09325, "26205": 2.16394, "26210": 2.09119, "26215": 2.12577, "26220": 2.11695, "26225": 2.15944, "26230": 2.15893, "26235": 2.15669, "26240": 2.13675, "26245": 2.14947, "26250": 2.19116, "26255": 2.10843, "26260": 2.14734, "26265": 2.15731, "26270": 2.12981, "26275": 2.11599, "26280": 2.11285, "26285": 2.1318, "26290": 2.132, "26295": 2.14687, "26300": 2.11874, "26305": 2.1381, "26310": 2.15827, "26315": 2.19088, "26320": 2.1165, "26325": 2.14317, "26330": 2.17349, "26335": 2.14614, "26340": 2.16461, "26345": 2.12818, "26350": 2.13753, "26355": 2.10454, "26360": 2.10475, "26365": 2.16402, "26370": 2.09478, "26375": 2.1212, "26380": 2.10195, "26385": 2.1199, "26390": 2.15636, "26395": 2.12659, "26400": 2.12693, "26405": 2.09993, "26410": 2.11189, "26415": 2.1289, "26420": 2.11812, "26425": 2.13287, "26430": 2.11231, "26435": 2.14206, "26440": 2.16843, "26445": 2.13639, "26450": 2.14425, "26455": 2.09665, "26460": 2.11477, "26465": 2.10752, "26470": 2.14236, "26475": 2.14631, "26480": 2.12025, "26485": 2.13563, "26490": 2.13685, "26495": 2.13369, "26500": 2.15586, "26505": 2.10845, "26510": 2.13446, "26515": 2.16196, "26520": 2.12616, "26525": 2.16333, "26530": 2.14753, "26535": 2.11648, "26540": 2.12531, "26545": 2.15338, "26550": 2.10907, "26555": 2.11759, "26560": 2.10461, "26565": 2.07099, "26570": 2.1288, "26575": 2.16598, "26580": 2.07058, "26585": 2.11899, "26590": 2.10584, "26595": 2.11741, "26600": 2.13033, "26605": 2.1663, "26610": 2.11573, "26615": 2.1372, "26620": 2.14031, "26625": 2.15917, "26630": 2.13693, "26635": 2.16147, "26640": 2.07929, "26645": 2.14901, "26650": 2.1409, "26655": 2.16247, "26660": 2.12957, "26665": 2.14447, "26670": 2.12736, "26675": 2.15479, "26680": 2.13856, "26685": 2.10616, "26690": 2.15782, "26695": 2.14136, "26700": 2.10211, "26705": 2.15777, "26710": 2.14765, "26715": 2.11804, "26720": 2.0819, "26725": 2.092, "26730": 2.12426, "26735": 2.10807, "26740": 2.1149, "26745": 2.14078, "26750": 2.18298, "26755": 2.1223, "26760": 2.10649, "26765": 2.14487, "26770": 2.08981, "26775": 2.13699, "26780": 2.16398, "26785": 2.09739, "26790": 2.11924, "26795": 2.16895, "26800": 2.11007, "26805": 2.12884, "26810": 2.09463, "26815": 2.11184, "26820": 2.11767, "26825": 2.13542, "26830": 2.10656, "26835": 2.13339, "26840": 2.1366, "26845": 2.14579, "26850": 2.09656, "26855": 2.09435, "26860": 2.07356, "26865": 2.11332, "26870": 2.15238, "26875": 2.15207, "26880": 2.12598, "26885": 2.12335, "26890": 2.1421, "26895": 2.15679, "26900": 2.12453, "26905": 2.13526, "26910": 2.14133, "26915": 2.10196, "26920": 2.14753, "26925": 2.16914, "26930": 2.13765, "26935": 2.10407, "26940": 2.1711, "26945": 2.1303, "26950": 2.13426, "26955": 2.12031, "26960": 2.1961, "26965": 2.11324, "26970": 2.11445, "26975": 2.12486, "26980": 2.1204, "26985": 2.09879, "26990": 2.11375, "26995": 2.11677, "27000": 2.14572, "27005": 2.11955, "27010": 2.11567, "27015": 2.1003, "27020": 2.13393, "27025": 2.11633, "27030": 2.17204, "27035": 2.13136, "27040": 2.13734, "27045": 2.13796, "27050": 2.16168, "27055": 2.11231, "27060": 2.09353, "27065": 2.15149, "27070": 2.13124, "27075": 2.15622, "27080": 2.13868, "27085": 2.11608, "27090": 2.11149, "27095": 2.13024, "27100": 2.13585, "27105": 2.15504, "27110": 2.12449, "27115": 2.12367, "27120": 2.1399, "27125": 2.12866, "27130": 2.11289, "27135": 2.12934, "27140": 2.14393, "27145": 2.13566, "27150": 2.14373, "27155": 2.11753, "27160": 2.10841, "27165": 2.13074, "27170": 2.12789, "27175": 2.15526, "27180": 2.11489, "27185": 2.12104, "27190": 2.13843, "27195": 2.13777, "27200": 2.12097, "27205": 2.10244, "27210": 2.17778, "27215": 2.13605, "27220": 2.12675, "27225": 2.12159, "27230": 2.13815, "27235": 2.08907, "27240": 2.13444, "27245": 2.13577, "27250": 2.10076, "27255": 2.11821, "27260": 2.10232, "27265": 2.14453, "27270": 2.17023, "27275": 2.0337, "27280": 2.11439, "27285": 2.14401, "27290": 2.13903, "27295": 2.1518, "27300": 2.12047, "27305": 2.13882, "27310": 2.099, "27315": 2.15143, "27320": 2.19799, "27325": 2.12641, "27330": 2.1025, "27335": 2.09817, "27340": 2.09579, "27345": 2.13479, "27350": 2.12495, "27355": 2.15583, "27360": 2.09657, "27365": 2.12034, "27370": 2.12975, "27375": 2.15929, "27380": 2.10809, "27385": 2.13027, "27390": 2.15783, "27395": 2.10149, "27400": 2.1334, "27405": 2.17382, "27410": 2.14305, "27415": 2.12402, "27420": 2.12527, "27425": 2.12312, "27430": 2.11042, "27435": 2.12055, "27440": 2.15865, "27445": 2.10883, "27450": 2.12948, "27455": 2.10529, "27460": 2.11077, "27465": 2.1249, "27470": 2.09475, "27475": 2.12472, "27480": 2.12687, "27485": 2.12713, "27490": 2.12256, "27495": 2.11256, "27500": 2.11841, "27505": 2.14053, "27510": 2.1064, "27515": 2.11714, "27520": 2.10714, "27525": 2.15293, "27530": 2.19692, "27535": 2.14055, "27540": 2.08169, "27545": 2.13974, "27550": 2.16855, "27555": 2.09478, "27560": 2.12631, "27565": 2.14383, "27570": 2.09277, "27575": 2.13721, "27580": 2.13032, "27585": 2.14967, "27590": 2.12394, "27595": 2.17736, "27600": 2.13786, "27605": 2.12334, "27610": 2.1533, "27615": 2.12572, "27620": 2.11051, "27625": 2.17335, "27630": 2.08796, "27635": 2.16495, "27640": 2.13117, "27645": 2.12382, "27650": 2.13507, "27655": 2.04445, "27660": 2.08573, "27665": 2.16131, "27670": 2.10625, "27675": 2.12618, "27680": 2.14758, "27685": 2.11864, "27690": 2.13185, "27695": 2.11287, "27700": 2.12533, "27705": 2.13137, "27710": 2.14742, "27715": 2.09504, "27720": 2.14279, "27725": 2.10047, "27730": 2.11993, "27735": 2.11881, "27740": 2.15383, "27745": 2.13342, "27750": 2.12715, "27755": 2.11787, "27760": 2.05652, "27765": 2.13874, "27770": 2.11141, "27775": 2.09975, "27780": 2.10952, "27785": 2.09028, "27790": 2.10495, "27795": 2.08814, "27800": 2.10335, "27805": 2.09943, "27810": 2.13021, "27815": 2.17148, "27820": 2.11765, "27825": 2.17736, "27830": 2.12111, "27835": 2.11913, "27840": 2.14293, "27845": 2.09066, "27850": 2.15396, "27855": 2.16153, "27860": 2.08881, "27865": 2.13141, "27870": 2.09804, "27875": 2.15381, "27880": 2.08805, "27885": 2.13143, "27890": 2.11033, "27895": 2.14109, "27900": 2.14728, "27905": 2.1091, "27910": 2.10329, "27915": 2.11108, "27920": 2.17749, "27925": 2.13786, "27930": 2.13742, "27935": 2.12179, "27940": 2.13358, "27945": 2.14135, "27950": 2.10708, "27955": 2.13164, "27960": 2.10376, "27965": 2.09768, "27970": 2.11786, "27975": 2.10825, "27980": 2.1197, "27985": 2.14667, "27990": 2.14201, "27995": 2.18491, "28000": 2.13168, "28005": 2.07802, "28010": 2.12686, "28015": 2.13434, "28020": 2.11713, "28025": 2.13025, "28030": 2.09278, "28035": 2.11446, "28040": 2.13802, "28045": 2.12397, "28050": 2.09113, "28055": 2.13059, "28060": 2.1282, "28065": 2.11799, "28070": 2.10972, "28075": 2.11513, "28080": 2.14225, "28085": 2.11859, "28090": 2.16514, "28095": 2.08961, "28100": 2.14516, "28105": 2.12416, "28110": 2.09814, "28115": 2.11396, "28120": 2.08971, "28125": 2.11929, "28130": 2.14696, "28135": 2.09441, "28140": 2.15763, "28145": 2.12072, "28150": 2.18128, "28155": 2.12681, "28160": 2.17585, "28165": 2.11701, "28170": 2.17835, "28175": 2.10973, "28180": 2.10133, "28185": 2.11217, "28190": 2.1711, "28195": 2.10351, "28200": 2.15197, "28205": 2.14303, "28210": 2.13709, "28215": 2.12931, "28220": 2.12122, "28225": 2.14236, "28230": 2.15559, "28235": 2.12635, "28240": 2.14091, "28245": 2.16287, "28250": 2.10875, "28255": 2.14038, "28260": 2.10369, "28265": 2.13428, "28270": 2.09718, "28275": 2.1489, "28280": 2.1227, "28285": 2.12243, "28290": 2.13812, "28295": 2.14285, "28300": 2.15294, "28305": 2.09895, "28310": 2.13794, "28315": 2.11598, "28320": 2.12054, "28325": 2.14944, "28330": 2.11722, "28335": 2.09128, "28340": 2.11423, "28345": 2.12521, "28350": 2.13723, "28355": 2.16048, "28360": 2.13869, "28365": 2.11923, "28370": 2.12547, "28375": 2.09441, "28380": 2.1185, "28385": 2.09894, "28390": 2.12675, "28395": 2.12524, "28400": 2.09801, "28405": 2.14031, "28410": 2.08554, "28415": 2.10324, "28420": 2.10534, "28425": 2.14002, "28430": 2.1316, "28435": 2.13571, "28440": 2.10256, "28445": 2.08533, "28450": 2.12025, "28455": 2.10473, "28460": 2.12501, "28465": 2.1933, "28470": 2.08989, "28475": 2.12629, "28480": 2.09351, "28485": 2.09922, "28490": 2.1404, "28495": 2.09956, "28500": 2.08689, "28505": 2.11506, "28510": 2.15424, "28515": 2.16101, "28520": 2.11189, "28525": 2.12862, "28530": 2.11177, "28535": 2.10821, "28540": 2.12846, "28545": 2.11742, "28550": 2.08781, "28555": 2.13473, "28560": 2.12221, "28565": 2.15802, "28570": 2.13391, "28575": 2.09907, "28580": 2.11351, "28585": 2.09979, "28590": 2.11353, "28595": 2.15312, "28600": 2.08958, "28605": 2.10074, "28610": 2.09865, "28615": 2.14159, "28620": 2.05822, "28625": 2.11044, "28630": 2.10347, "28635": 2.10134, "28640": 2.10349, "28645": 2.13831, "28650": 2.13878, "28655": 2.10616, "28660": 2.07396, "28665": 2.12464, "28670": 2.16997, "28675": 2.09815, "28680": 2.08547, "28685": 2.16503, "28690": 2.06907, "28695": 2.10988, "28700": 2.16151, "28705": 2.1141, "28710": 2.11294, "28715": 2.09218, "28720": 2.11275, "28725": 2.11515, "28730": 2.13305, "28735": 2.11775, "28740": 2.10267, "28745": 2.1121, "28750": 2.07591, "28755": 2.1332, "28760": 2.11559, "28765": 2.10773, "28770": 2.16294, "28775": 2.10317, "28780": 2.14781, "28785": 2.1044, "28790": 2.10788, "28795": 2.12625, "28800": 2.09901, "28805": 2.17952, "28810": 2.13967, "28815": 2.17455, "28820": 2.09002, "28825": 2.11658, "28830": 2.13498, "28835": 2.14351, "28840": 2.11181, "28845": 2.11601, "28850": 2.12249, "28855": 2.16597, "28860": 2.15764, "28865": 2.1597, "28870": 2.15078, "28875": 2.13907, "28880": 2.14725, "28885": 2.14415, "28890": 2.16097, "28895": 2.10853, "28900": 2.11451, "28905": 2.09799, "28910": 2.11377, "28915": 2.10592, "28920": 2.14911, "28925": 2.1337, "28930": 2.08712, "28935": 2.08662, "28940": 2.14033, "28945": 2.10219, "28950": 2.11061, "28955": 2.15216, "28960": 2.12996, "28965": 2.13128, "28970": 2.17102, "28975": 2.10687, "28980": 2.15353, "28985": 2.12543, "28990": 2.13553, "28995": 2.10056, "29000": 2.10464, "29005": 2.13733, "29010": 2.0902, "29015": 2.11825, "29020": 2.08609, "29025": 2.09566, "29030": 2.13765, "29035": 2.07274, "29040": 2.12641, "29045": 2.11197, "29050": 2.07709, "29055": 2.118, "29060": 2.10084, "29065": 2.12198, "29070": 2.08523, "29075": 2.11117, "29080": 2.1018, "29085": 2.09848, "29090": 2.12199, "29095": 2.10204, "29100": 2.13525, "29105": 2.13304, "29110": 2.12105, "29115": 2.09973, "29120": 2.12237, "29125": 2.17302, "29130": 2.1398, "29135": 2.07602, "29140": 2.09201, "29145": 2.12109, "29150": 2.18325, "29155": 2.08152, "29160": 2.10198, "29165": 2.10918, "29170": 2.13383, "29175": 2.09263, "29180": 2.13685, "29185": 2.09968, "29190": 2.13612, "29195": 2.03047, "29200": 2.15391, "29205": 2.13358, "29210": 2.10222, "29215": 2.15451, "29220": 2.15211, "29225": 2.14633, "29230": 2.08741, "29235": 2.12117, "29240": 2.07721, "29245": 2.10413, "29250": 2.08823, "29255": 2.12938, "29260": 2.11048, "29265": 2.15263, "29270": 2.13725, "29275": 2.11799, "29280": 2.13048, "29285": 2.1067, "29290": 2.11096, "29295": 2.12536, "29300": 2.07133, "29305": 2.08747, "29310": 2.13986, "29315": 2.08873, "29320": 2.09246, "29325": 2.07017, "29330": 2.14036, "29335": 2.14424, "29340": 2.11736, "29345": 2.14807, "29350": 2.16531, "29355": 2.15071, "29360": 2.16051, "29365": 2.12, "29370": 2.13679, "29375": 2.09274, "29380": 2.10173, "29385": 2.12141, "29390": 2.13333, "29395": 2.14599, "29400": 2.09426, "29405": 2.11227, "29410": 2.10872, "29415": 2.12231, "29420": 2.10324, "29425": 2.15173, "29430": 2.11666, "29435": 2.11765, "29440": 2.11968, "29445": 2.11489, "29450": 2.08386, "29455": 2.13578, "29460": 2.06377, "29465": 2.16615, "29470": 2.10211, "29475": 2.14858, "29480": 2.13675, "29485": 2.14573, "29490": 2.11208, "29495": 2.14561, "29500": 2.09079, "29505": 2.15821, "29510": 2.1238, "29515": 2.12045, "29520": 2.12735, "29525": 2.13403, "29530": 2.11798, "29535": 2.11864, "29540": 2.10731, "29545": 2.1176, "29550": 2.13106, "29555": 2.1066, "29560": 2.11646, "29565": 2.08695, "29570": 2.11385, "29575": 2.11768, "29580": 2.08169, "29585": 2.10635, "29590": 2.12933, "29595": 2.12261, "29600": 2.12714, "29605": 2.13656, "29610": 2.13486, "29615": 2.13317, "29620": 2.0787, "29625": 2.09095, "29630": 2.10864, "29635": 2.11584, "29640": 2.09483, "29645": 2.11854, "29650": 2.09834, "29655": 2.1198, "29660": 2.13201, "29665": 2.10561, "29670": 2.10857, "29675": 2.12778, "29680": 2.11358, "29685": 2.08942, "29690": 2.15128, "29695": 2.13853, "29700": 2.09613, "29705": 2.16559, "29710": 2.11753, "29715": 2.11102, "29720": 2.12098, "29725": 2.10367, "29730": 2.0972, "29735": 2.1504, "29740": 2.07743, "29745": 2.14421, "29750": 2.09319, "29755": 2.09999, "29760": 2.14038, "29765": 2.09829, "29770": 2.06088, "29775": 2.11746, "29780": 2.10754, "29785": 2.15191, "29790": 2.12793, "29795": 2.12689, "29800": 2.12444, "29805": 2.1136, "29810": 2.15682, "29815": 2.18835, "29820": 2.11507, "29825": 2.10239, "29830": 2.12042, "29835": 2.12467, "29840": 2.13243, "29845": 2.10058, "29850": 2.11116, "29855": 2.09426, "29860": 2.10201, "29865": 2.14905, "29870": 2.09256, "29875": 2.12082, "29880": 2.09389, "29885": 2.10008, "29890": 2.14122, "29895": 2.06972, "29900": 2.12729, "29905": 2.10368, "29910": 2.10274, "29915": 2.16134, "29920": 2.14008, "29925": 2.07028, "29930": 2.12761, "29935": 2.11435, "29940": 2.10445, "29945": 2.10342, "29950": 2.08907, "29955": 2.09885, "29960": 2.11214, "29965": 2.10246, "29970": 2.15113, "29975": 2.16171, "29980": 2.09088, "29985": 2.10272, "29990": 2.14088, "29995": 2.09274, "30000": 2.15749, "30005": 2.0888, "30010": 2.13651, "30015": 2.12688, "30020": 2.11257, "30025": 2.099, "30030": 2.06837, "30035": 2.1057, "30040": 2.10333, "30045": 2.10685, "30050": 2.1596, "30055": 2.10119, "30060": 2.10185, "30065": 2.10856, "30070": 2.12995, "30075": 2.09983, "30080": 2.11709, "30085": 2.09944, "30090": 2.1366, "30095": 2.11599, "30100": 2.07312, "30105": 2.13018, "30110": 2.12862, "30115": 2.12638, "30120": 2.0916, "30125": 2.08332, "30130": 2.12767, "30135": 2.11948, "30140": 2.14687, "30145": 2.05501, "30150": 2.09528, "30155": 2.122, "30160": 2.13165, "30165": 2.13842, "30170": 2.136, "30175": 2.12782, "30180": 2.14612, "30185": 2.10212, "30190": 2.13352, "30195": 2.09932, "30200": 2.14526, "30205": 2.11047, "30210": 2.12999, "30215": 2.09918, "30220": 2.13857, "30225": 2.13681, "30230": 2.12591, "30235": 2.09873, "30240": 2.11258, "30245": 2.09789, "30250": 2.10837, "30255": 2.09302, "30260": 2.05611, "30265": 2.11237, "30270": 2.09868, "30275": 2.13083, "30280": 2.07146, "30285": 2.11314, "30290": 2.10693, "30295": 2.10226, "30300": 2.16095, "30305": 2.12994, "30310": 2.12499, "30315": 2.10417, "30320": 2.09787, "30325": 2.14465, "30330": 2.07466, "30335": 2.12115, "30340": 2.11671, "30345": 2.14006, "30350": 2.13841, "30355": 2.15919, "30360": 2.10292, "30365": 2.15698, "30370": 2.12656, "30375": 2.10877, "30380": 2.1537, "30385": 2.15074, "30390": 2.10501, "30395": 2.12851, "30400": 2.06822, "30405": 2.11096, "30410": 2.09334, "30415": 2.14231, "30420": 2.1149, "30425": 2.10343, "30430": 2.13568, "30435": 2.10919, "30440": 2.06212, "30445": 2.14188, "30450": 2.10983, "30455": 2.14342, "30460": 2.10149, "30465": 2.10594, "30470": 2.09393, "30475": 2.12907, "30480": 2.10547, "30485": 2.14079, "30490": 2.10112, "30495": 2.1024, "30500": 2.11135, "30505": 2.13122, "30510": 2.14234, "30515": 2.13394, "30520": 2.1343, "30525": 2.11667, "30530": 2.15002, "30535": 2.07717, "30540": 2.09863, "30545": 2.10294, "30550": 2.11124, "30555": 2.13817, "30560": 2.12715, "30565": 2.10742, "30570": 2.12945, "30575": 2.07979, "30580": 2.11329, "30585": 2.10245, "30590": 2.11476, "30595": 2.10666, "30600": 2.12662, "30605": 2.09066, "30610": 2.13525, "30615": 2.15508, "30620": 2.11572, "30625": 2.09151, "30630": 2.13588, "30635": 2.12427, "30640": 2.07667, "30645": 2.10647, "30650": 2.09852, "30655": 2.12708, "30660": 2.10559, "30665": 2.09543, "30670": 2.11798, "30675": 2.10156, "30680": 2.08074, "30685": 2.16775, "30690": 2.0821, "30695": 2.11155, "30700": 2.07267, "30705": 2.11383, "30710": 2.15074, "30715": 2.12435, "30720": 2.13439, "30725": 2.13878, "30730": 2.13466, "30735": 2.10563, "30740": 2.14833, "30745": 2.13105, "30750": 2.11144, "30755": 2.10283, "30760": 2.11132, "30765": 2.16253, "30770": 2.13083, "30775": 2.12205, "30780": 2.11975, "30785": 2.14621, "30790": 2.1179, "30795": 2.11658, "30800": 2.11814, "30805": 2.12209, "30810": 2.12992, "30815": 2.14866, "30820": 2.12431, "30825": 2.07592, "30830": 2.09754, "30835": 2.11437, "30840": 2.10174, "30845": 2.1532, "30850": 2.1097, "30855": 2.09777, "30860": 2.1132, "30865": 2.12782, "30870": 2.11668, "30875": 2.10415, "30880": 2.10071, "30885": 2.07662, "30890": 2.08775, "30895": 2.11871, "30900": 2.15896, "30905": 2.14489, "30910": 2.11918, "30915": 2.09371, "30920": 2.12675, "30925": 2.13066, "30930": 2.10031, "30935": 2.08973, "30940": 2.13965, "30945": 2.12181, "30950": 2.12068, "30955": 2.0862, "30960": 2.11716, "30965": 2.13296, "30970": 2.10429, "30975": 2.10337, "30980": 2.1663, "30985": 2.12839, "30990": 2.14981, "30995": 2.09164, "31000": 2.09305, "31005": 2.08868, "31010": 2.0809, "31015": 2.11478, "31020": 2.12271, "31025": 2.14028, "31030": 2.1456, "31035": 2.08634, "31040": 2.12598, "31045": 2.16927, "31050": 2.12709, "31055": 2.07928, "31060": 2.07875, "31065": 2.10032, "31070": 2.07097, "31075": 2.12703, "31080": 2.0748, "31085": 2.15601, "31090": 2.04427, "31095": 2.15366, "31100": 2.10555, "31105": 2.16358, "31110": 2.16841, "31115": 2.11347, "31120": 2.11532, "31125": 2.14135, "31130": 2.08267, "31135": 2.14937, "31140": 2.10843, "31145": 2.06433, "31150": 2.12438, "31155": 2.06865, "31160": 2.11036, "31165": 2.10042, "31170": 2.14013, "31175": 2.1162, "31180": 2.08568, "31185": 2.09292, "31190": 2.0854, "31195": 2.16585, "31200": 2.12376, "31205": 2.11553, "31210": 2.06899, "31215": 2.10559, "31220": 2.1145, "31225": 2.09611, "31230": 2.1624, "31235": 2.1083, "31240": 2.12812, "31245": 2.14808, "31250": 2.13212, "31255": 2.06439, "31260": 2.15418, "31265": 2.11621, "31270": 2.0956, "31275": 2.10022, "31280": 2.12325, "31285": 2.12367, "31290": 2.10142, "31295": 2.14421, "31300": 2.13841, "31305": 2.07838, "31310": 2.07186, "31315": 2.12188, "31320": 2.15406, "31325": 2.14266, "31330": 2.1229, "31335": 2.11076, "31340": 2.10514, "31345": 2.0762, "31350": 2.14684, "31355": 2.13763, "31360": 2.13527, "31365": 2.05441, "31370": 2.11823, "31375": 2.09946, "31380": 2.1464, "31385": 2.11881, "31390": 2.11644, "31395": 2.15045, "31400": 2.11092, "31405": 2.09864, "31410": 2.08114, "31415": 2.13503, "31420": 2.12081, "31425": 2.15014, "31430": 2.11874, "31435": 2.10068, "31440": 2.11017, "31445": 2.1104, "31450": 2.07771, "31455": 2.13573, "31460": 2.14541, "31465": 2.13773, "31470": 2.12585, "31475": 2.07406, "31480": 2.07394, "31485": 2.11684, "31490": 2.09787, "31495": 2.10144, "31500": 2.10216, "31505": 2.14838, "31510": 2.11385, "31515": 2.13748, "31520": 2.13107, "31525": 2.11188, "31530": 2.12136, "31535": 2.10122, "31540": 2.15393, "31545": 2.10399, "31550": 2.1372, "31555": 2.11311, "31560": 2.1312, "31565": 2.09991, "31570": 2.10515, "31575": 2.09197, "31580": 2.11815, "31585": 2.12686, "31590": 2.13439, "31595": 2.13564, "31600": 2.11732, "31605": 2.13738, "31610": 2.1037, "31615": 2.1166, "31620": 2.10967, "31625": 2.11031, "31630": 2.12079, "31635": 2.08297, "31640": 2.1031, "31645": 2.08526, "31650": 2.11682, "31655": 2.09061, "31660": 2.0816, "31665": 2.10823, "31670": 2.06917, "31675": 2.10493, "31680": 2.19266, "31685": 2.06893, "31690": 2.1334, "31695": 2.15658, "31700": 2.13214, "31705": 2.13136, "31710": 2.1256, "31715": 2.13736, "31720": 2.10044, "31725": 2.08031, "31730": 2.14049, "31735": 2.10938, "31740": 2.12393, "31745": 2.13127, "31750": 2.09463, "31755": 2.11427, "31760": 2.12542, "31765": 2.14941, "31770": 2.13633, "31775": 2.0972, "31780": 2.11632, "31785": 2.10902, "31790": 2.09105, "31795": 2.07251, "31800": 2.11304, "31805": 2.04841, "31810": 2.10883, "31815": 2.07946, "31820": 2.07144, "31825": 2.12564, "31830": 2.12779, "31835": 2.08207, "31840": 2.12264, "31845": 2.03334, "31850": 2.08839, "31855": 2.13933, "31860": 2.13504, "31865": 2.12715, "31870": 2.07327, "31875": 2.08083, "31880": 2.10245, "31885": 2.11919, "31890": 2.1179, "31895": 2.11169, "31900": 2.10775, "31905": 2.09161, "31910": 2.12922, "31915": 2.14466, "31920": 2.1176, "31925": 2.10895, "31930": 2.12638, "31935": 2.1217, "31940": 2.1236, "31945": 2.062, "31950": 2.11499, "31955": 2.11532, "31960": 2.11533, "31965": 2.12165, "31970": 2.05903, "31975": 2.05048, "31980": 2.11155, "31985": 2.08588, "31990": 2.14275, "31995": 2.14686, "32000": 2.08855, "32005": 2.08491, "32010": 2.11618, "32015": 2.12594, "32020": 2.12694, "32025": 2.0507, "32030": 2.06586, "32035": 2.07829, "32040": 2.0957, "32045": 2.10548, "32050": 2.10286, "32055": 2.08992, "32060": 2.06176, "32065": 2.16347, "32070": 2.10563, "32075": 2.12687, "32080": 2.09314, "32085": 2.10999, "32090": 2.16416, "32095": 2.1525, "32100": 2.14271, "32105": 2.09874, "32110": 2.11999, "32115": 2.08824, "32120": 2.12786, "32125": 2.10107, "32130": 2.13507, "32135": 2.0694, "32140": 2.05255, "32145": 2.1406, "32150": 2.0938, "32155": 2.08902, "32160": 2.08339, "32165": 2.09782, "32170": 2.1093, "32175": 2.1057, "32180": 2.1015, "32185": 2.09923, "32190": 2.08497, "32195": 2.10736, "32200": 2.09418, "32205": 2.05813, "32210": 2.1128, "32215": 2.12381, "32220": 2.10771, "32225": 2.14169, "32230": 2.08912, "32235": 2.09353, "32240": 2.11167, "32245": 2.10226, "32250": 2.10304, "32255": 2.15715, "32260": 2.06084, "32265": 2.09316, "32270": 2.04001, "32275": 2.14578, "32280": 2.13184, "32285": 2.14647, "32290": 2.08318, "32295": 2.1242, "32300": 2.10819, "32305": 2.09615, "32310": 2.12652, "32315": 2.1688, "32320": 2.09062, "32325": 2.10937, "32330": 2.1056, "32335": 2.12596, "32340": 2.10903, "32345": 2.08865, "32350": 2.09684, "32355": 2.0953, "32360": 2.10568, "32365": 2.08781, "32370": 2.09239, "32375": 2.0882, "32380": 2.13025, "32385": 2.08914, "32390": 2.0843, "32395": 2.10737, "32400": 2.08174, "32405": 2.09075, "32410": 2.12883, "32415": 2.10422, "32420": 2.09078, "32425": 2.09076, "32430": 2.10793, "32435": 2.15559, "32440": 2.12571, "32445": 2.0969, "32450": 2.10006, "32455": 2.06794, "32460": 2.10081, "32465": 2.10797, "32470": 2.08278, "32475": 2.08529, "32480": 2.09632, "32485": 2.12571, "32490": 2.10009, "32495": 2.09381, "32500": 2.11587, "32505": 2.0916, "32510": 2.06305, "32515": 2.13881, "32520": 2.08573, "32525": 2.08954, "32530": 2.12742, "32535": 2.10051, "32540": 2.11899, "32545": 2.119, "32550": 2.10857, "32555": 2.0609, "32560": 2.1132, "32565": 2.1187, "32570": 2.11131, "32575": 2.11885, "32580": 2.12773, "32585": 2.10396, "32590": 2.11555, "32595": 2.12243, "32600": 2.13098, "32605": 2.09087, "32610": 2.1037, "32615": 2.12126, "32620": 2.1262, "32625": 2.08191, "32630": 2.10034, "32635": 2.10169, "32640": 2.08573, "32645": 2.11542, "32650": 2.11536, "32655": 2.09658, "32660": 2.10137, "32665": 2.0822, "32670": 2.1477, "32675": 2.08404, "32680": 2.08256, "32685": 2.07026, "32690": 2.11902, "32695": 2.07066, "32700": 2.13347, "32705": 2.10546, "32710": 2.08366, "32715": 2.1391, "32720": 2.06905, "32725": 2.0822, "32730": 2.06181, "32735": 2.10263, "32740": 2.09687, "32745": 2.11236, "32750": 2.06395, "32755": 2.0989, "32760": 2.11544, "32765": 2.11754, "32770": 2.09087, "32775": 2.10556, "32780": 2.11526, "32785": 2.10532, "32790": 2.11946, "32795": 2.1017, "32800": 2.12131, "32805": 2.10685, "32810": 2.09847, "32815": 2.09136, "32820": 2.13061, "32825": 2.0925, "32830": 2.11353, "32835": 2.13076, "32840": 2.09426, "32845": 2.10268, "32850": 2.11683, "32855": 2.11117, "32860": 2.09733, "32865": 2.10809, "32870": 2.10898, "32875": 2.10014, "32880": 2.08859, "32885": 2.05355, "32890": 2.08973, "32895": 2.12353, "32900": 2.11629, "32905": 2.1302, "32910": 2.10023, "32915": 2.10594, "32920": 2.08855, "32925": 2.0856, "32930": 2.1062, "32935": 2.12423, "32940": 2.09963, "32945": 2.09202, "32950": 2.05013, "32955": 2.11092, "32960": 2.08575, "32965": 2.17081, "32970": 2.14317, "32975": 2.07335, "32980": 2.08635, "32985": 2.07546, "32990": 2.16259, "32995": 2.148, "33000": 2.1365, "33005": 2.10186, "33010": 2.09534, "33015": 2.10661, "33020": 2.12105, "33025": 2.07725, "33030": 2.10682, "33035": 2.08054, "33040": 2.08816, "33045": 2.11856, "33050": 2.10141, "33055": 2.12913, "33060": 2.08397, "33065": 2.10721, "33070": 2.09556, "33075": 2.12001, "33080": 2.09538, "33085": 2.11098, "33090": 2.11675, "33095": 2.09161, "33100": 2.13679, "33105": 2.07696, "33110": 2.10134, "33115": 2.10029, "33120": 2.07851, "33125": 2.10683, "33130": 2.08231, "33135": 2.11878, "33140": 2.10359, "33145": 2.09802, "33150": 2.1655, "33155": 2.17459, "33160": 2.11559, "33165": 2.05537, "33170": 2.11955, "33175": 2.08611, "33180": 2.0985, "33185": 2.10376, "33190": 2.08761, "33195": 2.12019, "33200": 2.05312, "33205": 2.09649, "33210": 2.10215, "33215": 2.07715, "33220": 2.09539, "33225": 2.11081, "33230": 2.07505, "33235": 2.09207, "33240": 2.12478, "33245": 2.0814, "33250": 2.12825, "33255": 2.09797, "33260": 2.10614, "33265": 2.0788, "33270": 2.09873, "33275": 2.11141, "33280": 2.10013, "33285": 2.10456, "33290": 2.10275, "33295": 2.12107, "33300": 2.07007, "33305": 2.11339, "33310": 2.06818, "33315": 2.09674, "33320": 2.07993, "33325": 2.1209, "33330": 2.12027, "33335": 2.11478, "33340": 2.0946, "33345": 2.12106, "33350": 2.11344, "33355": 2.0964, "33360": 2.08432, "33365": 2.17123, "33370": 2.06489, "33375": 2.06496, "33380": 2.12209, "33385": 2.08492, "33390": 2.09291, "33395": 2.11554, "33400": 2.09089, "33405": 2.13346, "33410": 2.09253, "33415": 2.09334, "33420": 2.12004, "33425": 2.12385, "33430": 2.12791, "33435": 2.12034, "33440": 2.13092, "33445": 2.14082, "33450": 2.11062, "33455": 2.09416, "33460": 2.08322, "33465": 2.10757, "33470": 2.13516, "33475": 2.1486, "33480": 2.12679, "33485": 2.14402, "33490": 2.10016, "33495": 2.10142, "33500": 2.06724, "33505": 2.12923, "33510": 2.10272, "33515": 2.10503, "33520": 2.13334, "33525": 2.11112, "33530": 2.14127, "33535": 2.12135, "33540": 2.12854, "33545": 2.09047, "33550": 2.11605, "33555": 2.09861, "33560": 2.08075, "33565": 2.09016, "33570": 2.0851, "33575": 2.12463, "33580": 2.10433, "33585": 2.12242, "33590": 2.10118, "33595": 2.13192, "33600": 2.09297, "33605": 2.07851, "33610": 2.08258, "33615": 2.11345, "33620": 2.13759, "33625": 2.09233, "33630": 2.13678, "33635": 2.10654, "33640": 2.12496, "33645": 2.06254, "33650": 2.07418, "33655": 2.08389, "33660": 2.05478, "33665": 2.1006, "33670": 2.14225, "33675": 2.09367, "33680": 2.09963, "33685": 2.08671, "33690": 2.07201, "33695": 2.13346, "33700": 2.10889, "33705": 2.08936, "33710": 2.13049, "33715": 2.08738, "33720": 2.11575, "33725": 2.10834, "33730": 2.09693, "33735": 2.16835, "33740": 2.09483, "33745": 2.09864, "33750": 2.13117, "33755": 2.12231, "33760": 2.11713, "33765": 2.10095, "33770": 2.10958, "33775": 2.1074, "33780": 2.05837, "33785": 2.07441, "33790": 2.08849, "33795": 2.08541, "33800": 2.12236, "33805": 2.11222, "33810": 2.10835, "33815": 2.1094, "33820": 2.13227, "33825": 2.07565, "33830": 2.06678, "33835": 2.09589, "33840": 2.08653, "33845": 2.07551, "33850": 2.08663, "33855": 2.06998, "33860": 2.08961, "33865": 2.11457, "33870": 2.07528, "33875": 2.11256, "33880": 2.09992, "33885": 2.08741, "33890": 2.09757, "33895": 2.12835, "33900": 2.10383, "33905": 2.12511, "33910": 2.09195, "33915": 2.09593, "33920": 2.13512, "33925": 2.09902, "33930": 2.06434, "33935": 2.08625, "33940": 2.11179, "33945": 2.10545, "33950": 2.11185, "33955": 2.09286, "33960": 2.05862, "33965": 2.0833, "33970": 2.11229, "33975": 2.09577, "33980": 2.11248, "33985": 2.07811, "33990": 2.11289, "33995": 2.04395, "34000": 2.10967, "34005": 2.07529, "34010": 2.08264, "34015": 2.09097, "34020": 2.11954, "34025": 2.11956, "34030": 2.10653, "34035": 2.08078, "34040": 2.07771, "34045": 2.08531, "34050": 2.11729, "34055": 2.09208, "34060": 2.10708, "34065": 2.101, "34070": 2.05667, "34075": 2.07369, "34080": 2.13381, "34085": 2.12125, "34090": 2.0701, "34095": 2.0788, "34100": 2.1, "34105": 2.11957, "34110": 2.11757, "34115": 2.12318, "34120": 2.1234, "34125": 2.12211, "34130": 2.12401, "34135": 2.11647, "34140": 2.06634, "34145": 2.09176, "34150": 2.11955, "34155": 2.12148, "34160": 2.1066, "34165": 2.1233, "34170": 2.07117, "34175": 2.11737, "34180": 2.06635, "34185": 2.13101, "34190": 2.09783, "34195": 2.11015, "34200": 2.09452, "34205": 2.08691, "34210": 2.13576, "34215": 2.10333, "34220": 2.12149, "34225": 2.11935, "34230": 2.09279, "34235": 2.07131, "34240": 2.13026, "34245": 2.08238, "34250": 2.07551, "34255": 2.09284, "34260": 2.05316, "34265": 2.10608, "34270": 2.07198, "34275": 2.11207, "34280": 2.10644, "34285": 2.10856, "34290": 2.11025, "34295": 2.12482, "34300": 2.09813, "34305": 2.08602, "34310": 2.03822, "34315": 2.11434, "34320": 2.11867, "34325": 2.14775, "34330": 2.12765, "34335": 2.09534, "34340": 2.12247, "34345": 2.11392, "34350": 2.09011, "34355": 2.14156, "34360": 2.08092, "34365": 2.06993, "34370": 2.08454, "34375": 2.13383, "34380": 2.09016, "34385": 2.12526, "34390": 2.09686, "34395": 2.1242, "34400": 2.07613, "34405": 2.07766, "34410": 2.09363, "34415": 2.05511, "34420": 2.12468, "34425": 2.12982, "34430": 2.08264, "34435": 2.06456, "34440": 2.1018, "34445": 2.08292, "34450": 2.15304, "34455": 2.09931, "34460": 2.12809, "34465": 2.15448, "34470": 2.06563, "34475": 2.11328, "34480": 2.09684, "34485": 2.11377, "34490": 2.09452, "34495": 2.1227, "34500": 2.09189, "34505": 2.14253, "34510": 2.13651, "34515": 2.10486, "34520": 2.08092, "34525": 2.11204, "34530": 2.08368, "34535": 2.12302, "34540": 2.07279, "34545": 2.11576, "34550": 2.09761, "34555": 2.15436, "34560": 2.1124, "34565": 2.06749, "34570": 2.12986, "34575": 2.09911, "34580": 2.09922, "34585": 2.09841, "34590": 2.07866, "34595": 2.10573, "34600": 2.04209, "34605": 2.08764, "34610": 2.09538, "34615": 2.08166, "34620": 2.10239, "34625": 2.08001, "34630": 2.07573, "34635": 2.07822, "34640": 2.07383, "34645": 2.12398, "34650": 2.08383, "34655": 2.09555, "34660": 2.10345, "34665": 2.15837, "34670": 2.1038, "34675": 2.09905, "34680": 2.10533, "34685": 2.09891, "34690": 2.14216, "34695": 2.13834, "34700": 2.10059, "34705": 2.13431, "34710": 2.07009, "34715": 2.07179, "34720": 2.0726, "34725": 2.04505, "34730": 2.11969, "34735": 2.07556, "34740": 2.1023, "34745": 2.10311, "34750": 2.09824, "34755": 2.0652, "34760": 2.11892, "34765": 2.08322, "34770": 2.12349, "34775": 2.08031, "34780": 2.04932, "34785": 2.11602, "34790": 2.066, "34795": 2.12487, "34800": 2.09192, "34805": 2.05055, "34810": 2.07058, "34815": 2.11683, "34820": 2.1242, "34825": 2.11401, "34830": 2.13695, "34835": 2.12829, "34840": 2.09794, "34845": 2.08055, "34850": 2.09895, "34855": 2.0775, "34860": 2.12475, "34865": 2.06703, "34870": 2.10067, "34875": 2.10638, "34880": 2.08687, "34885": 2.02419, "34890": 2.07676, "34895": 2.10279, "34900": 2.09455, "34905": 2.08248, "34910": 2.07987, "34915": 2.1037, "34920": 2.07672, "34925": 2.09435, "34930": 2.14043, "34935": 2.09266, "34940": 2.125, "34945": 2.07401, "34950": 2.07456, "34955": 2.13524, "34960": 2.08926, "34965": 2.10202, "34970": 2.07611, "34975": 2.07138, "34980": 2.1202, "34985": 2.06936, "34990": 2.12258, "34995": 2.07303, "35000": 2.08126, "35005": 2.12636, "35010": 2.08731, "35015": 2.068, "35020": 2.07997, "35025": 2.10294, "35030": 2.11938, "35035": 2.08874, "35040": 2.08429, "35045": 2.12973, "35050": 2.07872, "35055": 2.08831, "35060": 2.11847, "35065": 2.11318, "35070": 2.08583, "35075": 2.09386, "35080": 2.09811, "35085": 2.10459, "35090": 2.10332, "35095": 2.06373, "35100": 2.10048, "35105": 2.09873, "35110": 2.10171, "35115": 2.07778, "35120": 2.05755, "35125": 2.06395, "35130": 2.07801, "35135": 2.10069, "35140": 2.12791, "35145": 2.08407, "35150": 2.07872, "35155": 2.12721, "35160": 2.05925, "35165": 2.11365, "35170": 2.07997, "35175": 2.10044, "35180": 2.09536, "35185": 2.08049, "35190": 2.06163, "35195": 2.13479, "35200": 2.06971, "35205": 2.116, "35210": 2.086, "35215": 2.0623, "35220": 2.12698, "35225": 2.11156, "35230": 2.0937, "35235": 2.08514, "35240": 2.07567, "35245": 2.10525, "35250": 2.10969, "35255": 2.11333, "35260": 2.10847, "35265": 2.07467, "35270": 2.09961, "35275": 2.08394, "35280": 2.07567, "35285": 2.1001, "35290": 2.07628, "35295": 2.07192, "35300": 2.06483, "35305": 2.0994, "35310": 2.1048, "35315": 2.06968, "35320": 2.05032, "35325": 2.11659, "35330": 2.10495, "35335": 2.09914, "35340": 2.07926, "35345": 2.07316, "35350": 2.10964, "35355": 2.11538, "35360": 2.06886, "35365": 2.07347, "35370": 2.11772, "35375": 2.1109, "35380": 2.10893, "35385": 2.12025, "35390": 2.11463, "35395": 2.06281, "35400": 2.12585, "35405": 2.09941, "35410": 2.10728, "35415": 2.10512, "35420": 2.08827, "35425": 2.07167, "35430": 2.12209, "35435": 2.11576, "35440": 2.12495, "35445": 2.12212, "35450": 2.06062, "35455": 2.08837, "35460": 2.11712, "35465": 2.12699, "35470": 2.087, "35475": 2.12363, "35480": 2.07341, "35485": 2.07838, "35490": 2.05914, "35495": 2.09843, "35500": 2.07023, "35505": 2.09731, "35510": 2.0859, "35515": 2.051, "35520": 2.09336, "35525": 2.09135, "35530": 2.07375, "35535": 2.07518, "35540": 2.10839, "35545": 2.10018, "35550": 2.081, "35555": 2.07235, "35560": 2.08942, "35565": 2.11827, "35570": 2.09683, "35575": 2.13362, "35580": 2.13878, "35585": 2.11219, "35590": 2.12793, "35595": 2.06974, "35600": 2.09539, "35605": 2.12257, "35610": 2.14782, "35615": 2.11812, "35620": 2.09294, "35625": 2.08513, "35630": 2.08132, "35635": 2.09409, "35640": 2.10049, "35645": 2.08824, "35650": 2.06851, "35655": 2.10549, "35660": 2.1087, "35665": 2.11283, "35670": 2.12642, "35675": 2.06694, "35680": 2.09169, "35685": 2.07929, "35690": 2.10864, "35695": 2.12499, "35700": 2.07186, "35705": 2.13959, "35710": 2.05529, "35715": 2.08474, "35720": 2.10611, "35725": 2.11548, "35730": 2.06449, "35735": 2.0965, "35740": 2.11075, "35745": 2.10919, "35750": 2.0938, "35755": 2.09483, "35760": 2.09523, "35765": 2.09318, "35770": 2.11324, "35775": 2.08076, "35780": 2.057, "35785": 2.0707, "35790": 2.03598, "35795": 2.0741, "35800": 2.08043, "35805": 2.08794, "35810": 2.11127, "35815": 2.05887, "35820": 2.08357, "35825": 2.05879, "35830": 2.06088, "35835": 2.09471, "35840": 2.08714, "35845": 2.10543, "35850": 2.07556, "35855": 2.10073, "35860": 2.08901, "35865": 2.10211, "35870": 2.07911, "35875": 2.11393, "35880": 2.09251, "35885": 2.07309, "35890": 2.1263, "35895": 2.12724, "35900": 2.08941, "35905": 2.06612, "35910": 2.09768, "35915": 2.10428, "35920": 2.07722, "35925": 2.08064, "35930": 2.11054, "35935": 2.12964, "35940": 2.09142, "35945": 2.09883, "35950": 2.10243, "35955": 2.07979, "35960": 2.08001, "35965": 2.08621, "35970": 2.11036, "35975": 2.1246, "35980": 2.09475, "35985": 2.06367, "35990": 2.09657, "35995": 2.11713, "36000": 2.10417, "36005": 2.07119, "36010": 2.05653, "36015": 2.08962, "36020": 2.07237, "36025": 2.08295, "36030": 2.12717, "36035": 2.08911, "36040": 2.11382, "36045": 2.06084, "36050": 2.09787, "36055": 2.13003, "36060": 2.15495, "36065": 2.09335, "36070": 2.1263, "36075": 2.08089, "36080": 2.12653, "36085": 2.08678, "36090": 2.08529, "36095": 2.10788, "36100": 2.09858, "36105": 2.10414, "36110": 2.08193, "36115": 2.0657, "36120": 2.0903, "36125": 2.08149, "36130": 2.07625, "36135": 2.08636, "36140": 2.0803, "36145": 2.11598, "36150": 2.11699, "36155": 2.1228, "36160": 2.11698, "36165": 2.09268, "36170": 2.09822, "36175": 2.09578, "36180": 2.06048, "36185": 2.03535, "36190": 2.0875, "36195": 2.11083, "36200": 2.13014, "36205": 2.05521, "36210": 2.08025, "36215": 2.08727, "36220": 2.10856, "36225": 2.08195, "36230": 2.09921, "36235": 2.08451, "36240": 2.09627, "36245": 2.10487, "36250": 2.1103, "36255": 2.08509, "36260": 2.08135, "36265": 2.08539, "36270": 2.06765, "36275": 2.10856, "36280": 2.07981, "36285": 2.09704, "36290": 2.12015, "36295": 2.07534, "36300": 2.10916, "36305": 2.1085, "36310": 2.07053, "36315": 2.13458, "36320": 2.15528, "36325": 2.13264, "36330": 2.10123, "36335": 2.11465, "36340": 2.09424, "36345": 2.11129, "36350": 2.06526, "36355": 2.05399, "36360": 2.07025, "36365": 2.13049, "36370": 2.13195, "36375": 2.08126, "36380": 2.08702, "36385": 2.16771, "36390": 2.10029, "36395": 2.1019, "36400": 2.09411, "36405": 2.07072, "36410": 2.11808, "36415": 2.12845, "36420": 2.12112, "36425": 2.08454, "36430": 2.05143, "36435": 2.0844, "36440": 2.11634, "36445": 2.10148, "36450": 2.12866, "36455": 2.06037, "36460": 2.09718, "36465": 2.10494, "36470": 2.07674, "36475": 2.06023, "36480": 2.09988, "36485": 2.09934, "36490": 2.09847, "36495": 2.09797, "36500": 2.07401, "36505": 2.10144, "36510": 2.11528, "36515": 2.08727, "36520": 2.13992, "36525": 2.05721, "36530": 2.09257, "36535": 2.08834, "36540": 2.07465, "36545": 2.12257, "36550": 2.09132, "36555": 2.098, "36560": 2.05397, "36565": 2.07603, "36570": 2.11999, "36575": 2.12484, "36580": 2.12441, "36585": 2.07575, "36590": 2.07135, "36595": 2.1146, "36600": 2.05161, "36605": 2.12148, "36610": 2.10056, "36615": 2.09119, "36620": 2.14796, "36625": 2.11267, "36630": 2.09239, "36635": 2.10757, "36640": 2.11326, "36645": 2.08473, "36650": 2.09632, "36655": 2.08782, "36660": 2.1407, "36665": 2.08266, "36670": 2.12069, "36675": 2.08855, "36680": 2.12608, "36685": 2.09427, "36690": 2.07558, "36695": 2.07614, "36700": 2.13186, "36705": 2.10679, "36710": 2.06629, "36715": 2.05275, "36720": 2.08844, "36725": 2.09545, "36730": 2.11677, "36735": 2.08389, "36740": 2.11098, "36745": 2.09781, "36750": 2.09885, "36755": 2.11923, "36760": 2.10056, "36765": 2.06118, "36770": 2.11003, "36775": 2.04976, "36780": 2.12232, "36785": 2.08608, "36790": 2.08826, "36795": 2.12134, "36800": 2.09949, "36805": 2.1093, "36810": 2.1057, "36815": 2.04456, "36820": 2.08082, "36825": 2.06936, "36830": 2.09814, "36835": 2.08364, "36840": 2.07868, "36845": 2.07469, "36850": 2.12133, "36855": 2.05145, "36860": 2.04661, "36865": 2.07885, "36870": 2.12333, "36875": 2.05633, "36880": 2.07664, "36885": 2.10049, "36890": 2.10034, "36895": 2.07113, "36900": 2.10154, "36905": 2.07051, "36910": 2.10073, "36915": 2.09515, "36920": 2.07117, "36925": 2.0711, "36930": 2.09879, "36935": 2.08416, "36940": 2.09362, "36945": 2.09277, "36950": 2.08802, "36955": 2.08528, "36960": 2.06007, "36965": 2.11274, "36970": 2.09851, "36975": 2.10629, "36980": 2.09769, "36985": 2.07256, "36990": 2.07243, "36995": 2.11047, "37000": 2.03652, "37005": 2.09075, "37010": 2.07177, "37015": 2.10373, "37020": 2.11621, "37025": 2.09583, "37030": 2.0626, "37035": 2.06562, "37040": 2.08676, "37045": 2.12988, "37050": 2.10001, "37055": 2.08955, "37060": 2.08654, "37065": 2.1017, "37070": 2.11951, "37075": 2.1215, "37080": 2.1161, "37085": 2.09012, "37090": 2.1224, "37095": 2.0703, "37100": 2.12869, "37105": 2.05564, "37110": 2.05705, "37115": 2.04962, "37120": 2.08504, "37125": 2.121, "37130": 2.0866, "37135": 2.11843, "37140": 2.06122, "37145": 2.10125, "37150": 2.10201, "37155": 2.09947, "37160": 2.12281, "37165": 2.06519, "37170": 2.0924, "37175": 2.08459, "37180": 2.09881, "37185": 2.09877, "37190": 2.05794, "37195": 2.09186, "37200": 2.09561, "37205": 2.10064, "37210": 2.10171, "37215": 2.06297, "37220": 2.05355, "37225": 2.11289, "37230": 2.07558, "37235": 2.09135, "37240": 2.07012, "37245": 2.06256, "37250": 2.07333, "37255": 2.11043, "37260": 2.08388, "37265": 2.06947, "37270": 2.06333, "37275": 2.05366, "37280": 2.09331, "37285": 2.08668, "37290": 2.10372, "37295": 2.08518, "37300": 2.09155, "37305": 2.10993, "37310": 2.07872, "37315": 2.12197, "37320": 2.11534, "37325": 2.11694, "37330": 2.09055, "37335": 2.05347, "37340": 2.05064, "37345": 2.04968, "37350": 2.11936, "37355": 2.05795, "37360": 2.12494, "37365": 2.08772, "37370": 2.12644, "37375": 2.08014, "37380": 2.07157, "37385": 2.06747, "37390": 2.06869, "37395": 2.09457, "37400": 2.1224, "37405": 2.08199, "37410": 2.08922, "37415": 2.08501, "37420": 2.10612, "37425": 2.10187, "37430": 2.1042, "37435": 2.06127, "37440": 2.07422, "37445": 2.09317, "37450": 2.10163, "37455": 2.11536, "37460": 2.13022, "37465": 2.088, "37470": 2.12426, "37475": 2.08448, "37480": 2.0535, "37485": 2.04031, "37490": 2.12117, "37495": 2.10144, "37500": 2.11295, "37505": 2.1032, "37510": 2.08333, "37515": 2.09072, "37520": 2.09889, "37525": 2.09433, "37530": 2.09926, "37535": 2.02783, "37540": 2.09748, "37545": 2.07203, "37550": 2.06047, "37555": 2.11245, "37560": 2.10709, "37565": 2.10013, "37570": 2.06554, "37575": 2.08935, "37580": 2.12186, "37585": 2.09757, "37590": 2.05106, "37595": 2.09452, "37600": 2.089, "37605": 2.1072, "37610": 2.07361, "37615": 2.09908, "37620": 2.0989, "37625": 2.10761, "37630": 2.14052, "37635": 2.09946, "37640": 2.06513, "37645": 2.06071, "37650": 2.0826, "37655": 2.15977, "37660": 2.10301, "37665": 2.07078, "37670": 2.10275, "37675": 2.08094, "37680": 2.08076, "37685": 2.13624, "37690": 2.11233, "37695": 2.06693, "37700": 2.08705, "37705": 2.07504, "37710": 2.12065, "37715": 2.09381, "37720": 2.0782, "37725": 2.05894, "37730": 2.10825, "37735": 2.10932, "37740": 2.13107, "37745": 2.10692, "37750": 2.09404, "37755": 2.10485, "37760": 2.08843, "37765": 2.11645, "37770": 2.06497, "37775": 2.11186, "37780": 2.09282, "37785": 2.0968, "37790": 2.11727, "37795": 2.09559, "37800": 2.103, "37805": 2.09093, "37810": 2.08806, "37815": 2.07377, "37820": 2.07275, "37825": 2.12128, "37830": 2.12582, "37835": 2.0977, "37840": 2.06227, "37845": 2.15315, "37850": 2.13029, "37855": 2.10568, "37860": 2.12973, "37865": 2.12275, "37870": 2.0372, "37875": 2.08006, "37880": 2.10999, "37885": 2.06049, "37890": 2.08522, "37895": 2.10607, "37900": 2.0802, "37905": 2.07287, "37910": 2.05639, "37915": 2.10369, "37920": 2.13276, "37925": 2.09754, "37930": 2.08461, "37935": 2.09624, "37940": 2.0816, "37945": 2.07644, "37950": 2.11437, "37955": 2.09828, "37960": 2.1209, "37965": 2.1059, "37970": 2.02127, "37975": 2.07109, "37980": 2.09532, "37985": 2.08984, "37990": 2.07576, "37995": 2.07044, "38000": 2.11599, "38005": 2.10186, "38010": 2.09892, "38015": 2.06492, "38020": 2.0805, "38025": 2.09187, "38030": 2.07906, "38035": 2.08926, "38040": 2.09523, "38045": 2.07844, "38050": 2.07081, "38055": 2.12085, "38060": 2.10338, "38065": 2.1124, "38070": 2.10729, "38075": 2.07827, "38080": 2.08295, "38085": 2.09746, "38090": 2.06887, "38095": 2.10825, "38100": 2.06133, "38105": 2.10468, "38110": 2.08811, "38115": 2.09733, "38120": 2.07968, "38125": 2.12103, "38130": 2.06155, "38135": 2.1186, "38140": 2.05336, "38145": 2.07074, "38150": 2.1014, "38155": 2.0931, "38160": 2.10774, "38165": 2.05631, "38170": 2.04128, "38175": 2.07135, "38180": 2.08934, "38185": 2.11537, "38190": 2.09548, "38195": 2.0833, "38200": 2.07975, "38205": 2.10119, "38210": 2.08467, "38215": 2.11636, "38220": 2.10927, "38225": 2.12394, "38230": 2.13934, "38235": 2.0734, "38240": 2.1041, "38245": 2.09873, "38250": 2.06843, "38255": 2.06944, "38260": 2.1044, "38265": 2.13173, "38270": 2.10806, "38275": 2.10224, "38280": 2.05742, "38285": 2.08434, "38290": 2.09102, "38295": 2.06381, "38300": 2.04903, "38305": 2.04176, "38310": 2.06599, "38315": 2.10026, "38320": 2.07091, "38325": 2.05485, "38330": 2.10244, "38335": 2.09892, "38340": 2.05703, "38345": 2.08717, "38350": 2.09235, "38355": 2.08446, "38360": 2.0477, "38365": 2.07097, "38370": 2.08699, "38375": 2.12477, "38380": 2.09508, "38385": 2.06158, "38390": 2.11219, "38395": 2.09213, "38400": 2.0956, "38405": 2.08125, "38410": 2.09206, "38415": 2.08756, "38420": 2.09496, "38425": 2.09134, "38430": 2.05512, "38435": 2.1179, "38440": 2.0956, "38445": 2.05566, "38450": 2.13721, "38455": 2.10053, "38460": 2.05899, "38465": 2.06171, "38470": 2.08353, "38475": 2.09633, "38480": 2.09063, "38485": 2.09879, "38490": 2.09744, "38495": 2.11215, "38500": 2.09392, "38505": 2.11561, "38510": 2.10364, "38515": 2.10452, "38520": 2.09513, "38525": 2.09362, "38530": 2.09039, "38535": 2.09367, "38540": 2.09489, "38545": 2.07963, "38550": 2.06705, "38555": 2.04109, "38560": 2.08087, "38565": 2.07303, "38570": 2.07384, "38575": 2.09606, "38580": 2.11792, "38585": 2.10772, "38590": 2.0902, "38595": 2.07152, "38600": 2.10671, "38605": 2.07455, "38610": 2.09486, "38615": 2.09204, "38620": 2.05456, "38625": 2.05848, "38630": 2.05338, "38635": 2.09489, "38640": 2.13795, "38645": 2.07624, "38650": 2.03446, "38655": 2.10931, "38660": 2.07371, "38665": 2.12025, "38670": 2.13017, "38675": 2.06854, "38680": 2.0969, "38685": 2.06545, "38690": 2.11154, "38695": 2.09932, "38700": 2.07908, "38705": 2.08718, "38710": 2.06154, "38715": 2.07127, "38720": 2.10023, "38725": 2.0593, "38730": 2.07295, "38735": 2.05339, "38740": 2.07016, "38745": 2.11149, "38750": 2.0753, "38755": 2.06359, "38760": 2.10073, "38765": 2.06975, "38770": 2.07727, "38775": 2.08105, "38780": 2.02377, "38785": 2.10098, "38790": 2.04398, "38795": 2.10862, "38800": 2.09435, "38805": 2.08776, "38810": 2.08601, "38815": 2.0807, "38820": 2.07122, "38825": 2.10169, "38830": 2.11445, "38835": 2.08071, "38840": 2.11398, "38845": 2.09518, "38850": 2.07354, "38855": 2.06137, "38860": 2.07551, "38865": 2.11014, "38870": 2.09282, "38875": 2.07635, "38880": 2.12923, "38885": 2.07879, "38890": 2.1118, "38895": 2.08418, "38900": 2.07717, "38905": 2.10209, "38910": 2.08272, "38915": 2.08974, "38920": 2.0564, "38925": 2.07057, "38930": 2.10603, "38935": 2.08372, "38940": 2.06502, "38945": 2.06929, "38950": 2.09222, "38955": 2.09509, "38960": 2.05295, "38965": 2.08435, "38970": 2.09769, "38975": 2.08833, "38980": 2.06768, "38985": 2.07665, "38990": 2.0914, "38995": 2.07171, "39000": 2.1162, "39005": 2.12022, "39010": 2.12031, "39015": 2.08028, "39020": 2.11041, "39025": 2.11564, "39030": 2.07136, "39035": 2.0862, "39040": 2.0807, "39045": 2.06153, "39050": 2.0946, "39055": 2.09159, "39060": 2.08813, "39065": 2.09255, "39070": 2.10044, "39075": 2.09049, "39080": 2.06924, "39085": 2.12261, "39090": 2.09032, "39095": 2.09906, "39100": 2.10165, "39105": 2.06879, "39110": 2.09568, "39115": 2.06839, "39120": 2.11475, "39125": 2.07884, "39130": 2.05602, "39135": 2.07242, "39140": 2.09796, "39145": 2.16103, "39150": 2.0824, "39155": 2.07289, "39160": 2.08288, "39165": 2.12334, "39170": 2.1277, "39175": 2.07197, "39180": 2.10793, "39185": 2.09579, "39190": 2.10861, "39195": 2.12408, "39200": 2.07425, "39205": 2.12762, "39210": 2.11554, "39215": 2.08841, "39220": 2.12713, "39225": 2.09778, "39230": 2.07257, "39235": 2.08117, "39240": 2.11235, "39245": 2.08054, "39250": 2.1222, "39255": 2.08711, "39260": 2.09814, "39265": 2.13251, "39270": 2.11065, "39275": 2.05111, "39280": 2.08538, "39285": 2.06057, "39290": 2.09019, "39295": 2.12196, "39300": 2.05462, "39305": 2.08904, "39310": 2.11564, "39315": 2.08275, "39320": 2.0842, "39325": 2.03136, "39330": 2.09224, "39335": 2.10937, "39340": 2.09558, "39345": 2.09295, "39350": 2.06029, "39355": 2.08848, "39360": 2.03144, "39365": 2.07249, "39370": 2.0787, "39375": 2.09503, "39380": 2.10639, "39385": 2.08991, "39390": 2.06143, "39395": 2.06174, "39400": 2.0809, "39405": 2.05601, "39410": 2.09356, "39415": 2.07378, "39420": 2.05557, "39425": 2.06351, "39430": 2.0432, "39435": 2.09602, "39440": 2.07372, "39445": 2.06594, "39450": 2.11469, "39455": 2.05211, "39460": 2.08049, "39465": 2.08161, "39470": 2.05431, "39475": 2.08896, "39480": 2.11473, "39485": 2.13417, "39490": 2.07046, "39495": 2.10523, "39500": 2.09872, "39505": 2.10838, "39510": 2.09773, "39515": 2.11764, "39520": 2.07706, "39525": 2.05721, "39530": 2.12028, "39535": 2.13358, "39540": 2.07738, "39545": 2.10609, "39550": 2.07532, "39555": 2.08566, "39560": 2.06165, "39565": 2.08684, "39570": 2.05586, "39575": 2.05705, "39580": 2.09418, "39585": 2.07387, "39590": 2.10318, "39595": 2.09246, "39600": 2.11305, "39605": 2.08801, "39610": 2.08911, "39615": 2.04209, "39620": 2.12843, "39625": 2.10041, "39630": 2.10382, "39635": 2.10729, "39640": 2.0561, "39645": 2.08055, "39650": 2.06283, "39655": 2.07752, "39660": 2.06905, "39665": 2.07471, "39670": 2.08403, "39675": 2.06954, "39680": 2.05556, "39685": 2.06499, "39690": 2.10487, "39695": 2.09916, "39700": 2.10261, "39705": 2.07751, "39710": 2.09246, "39715": 2.02716, "39720": 2.12904, "39725": 2.07896, "39730": 2.08499, "39735": 2.09681, "39740": 2.10773, "39745": 2.07214, "39750": 2.07886, "39755": 2.0948, "39760": 2.07746, "39765": 2.12859, "39770": 2.09706, "39775": 2.07145, "39780": 2.09975, "39785": 2.06322, "39790": 2.0728, "39795": 2.0958, "39800": 2.08167, "39805": 2.13203, "39810": 2.07299, "39815": 2.08737, "39820": 2.03736, "39825": 2.08012, "39830": 2.09906, "39835": 2.0883, "39840": 2.11777, "39845": 2.09045, "39850": 2.06224, "39855": 2.08159, "39860": 2.08307, "39865": 2.07518, "39870": 2.07462, "39875": 2.08994, "39880": 2.04715, "39885": 2.09604, "39890": 2.07714, "39895": 2.06626, "39900": 2.06319, "39905": 2.09798, "39910": 2.08586, "39915": 2.05189, "39920": 2.0662, "39925": 2.10849, "39930": 2.09627, "39935": 2.07153, "39940": 2.11444, "39945": 2.09987, "39950": 2.03717, "39955": 2.10351, "39960": 2.11028, "39965": 2.11558, "39970": 2.05011, "39975": 2.07909, "39980": 2.0958, "39985": 2.06576, "39990": 2.0867, "39995": 2.09905, "40000": 2.07774, "40005": 2.06747, "40010": 2.06951, "40015": 2.11305, "40020": 2.07977, "40025": 2.09076, "40030": 2.09392, "40035": 2.09209, "40040": 2.07827, "40045": 2.07016, "40050": 2.10075, "40055": 2.11835, "40060": 2.11781, "40065": 2.11509, "40070": 2.07586, "40075": 2.0659, "40080": 2.09611, "40085": 2.08248, "40090": 2.09719, "40095": 2.09123, "40100": 2.07432, "40105": 2.03026, "40110": 2.08251, "40115": 2.07607, "40120": 2.06002, "40125": 2.07049, "40130": 2.06122, "40135": 2.08914, "40140": 2.09727, "40145": 2.06677, "40150": 2.121, "40155": 2.06414, "40160": 2.07109, "40165": 2.08946, "40170": 2.06656, "40175": 2.12517, "40180": 2.10863, "40185": 2.06046, "40190": 2.08868, "40195": 2.04949, "40200": 2.05513, "40205": 2.08302, "40210": 2.07846, "40215": 2.0904, "40220": 2.09545, "40225": 2.05543, "40230": 2.12514, "40235": 2.07173, "40240": 2.06885, "40245": 2.08719, "40250": 2.11797, "40255": 2.12188, "40260": 2.08176, "40265": 2.05078, "40270": 2.06068, "40275": 2.09753, "40280": 2.08053, "40285": 2.08436, "40290": 2.05217, "40295": 2.07151, "40300": 2.06079, "40305": 2.10695, "40310": 2.12351, "40315": 2.08588, "40320": 2.11818, "40325": 2.09165, "40330": 2.10146, "40335": 2.10884, "40340": 2.09817, "40345": 2.08901, "40350": 2.07, "40355": 2.09783, "40360": 2.09264, "40365": 2.06935, "40370": 2.11498, "40375": 2.09303, "40380": 2.10154, "40385": 2.06928, "40390": 2.10108, "40395": 2.08427, "40400": 2.04339, "40405": 2.05997, "40410": 2.07999, "40415": 2.06451, "40420": 2.05736, "40425": 2.06063, "40430": 2.1023, "40435": 2.04846, "40440": 2.05267, "40445": 2.11232, "40450": 2.06125, "40455": 2.02872, "40460": 2.05613, "40465": 2.08462, "40470": 2.07581, "40475": 2.08702, "40480": 2.07269, "40485": 2.06488, "40490": 2.13352, "40495": 2.07257, "40500": 2.05586, "40505": 2.08124, "40510": 2.02424, "40515": 2.08178, "40520": 2.10069, "40525": 2.1168, "40530": 2.08691, "40535": 2.04656, "40540": 2.08473, "40545": 2.10449, "40550": 2.05516, "40555": 2.0886, "40560": 2.11571, "40565": 2.08371, "40570": 2.05184, "40575": 2.05546, "40580": 2.09398, "40585": 2.06005, "40590": 2.05994, "40595": 2.08789, "40600": 2.05714, "40605": 2.05149, "40610": 2.07119, "40615": 2.0933, "40620": 2.04746, "40625": 2.06948, "40630": 2.11364, "40635": 2.10062, "40640": 2.07506, "40645": 2.06453, "40650": 2.09545, "40655": 2.11831, "40660": 2.06021, "40665": 2.06886, "40670": 2.09063, "40675": 2.06963, "40680": 2.08847, "40685": 2.08776, "40690": 2.08394, "40695": 2.07968, "40700": 2.08101, "40705": 2.06582, "40710": 2.0629, "40715": 2.13049, "40720": 2.0606, "40725": 2.02527, "40730": 2.0627, "40735": 2.08174, "40740": 2.11165, "40745": 2.07779, "40750": 2.07559, "40755": 2.07749, "40760": 2.10744, "40765": 2.11659, "40770": 2.03764, "40775": 2.06654, "40780": 2.04777, "40785": 2.08806, "40790": 2.09541, "40795": 2.11391, "40800": 2.05321, "40805": 2.04855, "40810": 2.04716, "40815": 2.08086, "40820": 2.10625, "40825": 2.08055, "40830": 2.0744, "40835": 2.07958, "40840": 2.10013, "40845": 2.10224, "40850": 2.06705, "40855": 2.08634, "40860": 2.06817, "40865": 2.09546, "40870": 2.0738, "40875": 2.05223, "40880": 2.06793, "40885": 2.04799, "40890": 2.11047, "40895": 2.0934, "40900": 2.09396, "40905": 2.06036, "40910": 2.09984, "40915": 2.0764, "40920": 2.09166, "40925": 2.09994, "40930": 2.09466, "40935": 2.07982, "40940": 2.0594, "40945": 2.06963, "40950": 2.08082, "40955": 2.11358, "40960": 2.10195, "40965": 2.06689, "40970": 2.07739, "40975": 2.08042, "40980": 2.07998, "40985": 2.08816, "40990": 2.08934, "40995": 2.10379, "41000": 2.07885, "41005": 2.06052, "41010": 2.07563, "41015": 2.11776, "41020": 2.06276, "41025": 2.12492, "41030": 2.0409, "41035": 2.07854, "41040": 2.07805, "41045": 2.06963, "41050": 2.07771, "41055": 2.11324, "41060": 2.09194, "41065": 2.09411, "41070": 2.13461, "41075": 2.10691, "41080": 2.1138, "41085": 2.06098, "41090": 2.08822, "41095": 2.07273, "41100": 2.07106, "41105": 2.09864, "41110": 2.07988, "41115": 2.0863, "41120": 2.09664, "41125": 2.08957, "41130": 2.04821, "41135": 2.06738, "41140": 2.07919, "41145": 2.04749, "41150": 2.05127, "41155": 2.07259, "41160": 2.10617, "41165": 2.06894, "41170": 2.09626, "41175": 2.08267, "41180": 2.07437, "41185": 2.05643, "41190": 2.03771, "41195": 2.04575, "41200": 2.01795, "41205": 2.02462, "41210": 2.09448, "41215": 2.12721, "41220": 2.07645, "41225": 2.11208, "41230": 2.02153, "41235": 2.08676, "41240": 2.07963, "41245": 2.06408, "41250": 2.08035, "41255": 2.06207, "41260": 2.07731, "41265": 2.0537, "41270": 2.09768, "41275": 2.08454, "41280": 2.08631, "41285": 2.1205, "41290": 2.12618, "41295": 2.04575, "41300": 2.16859, "41305": 2.03649, "41310": 2.08054, "41315": 2.11659, "41320": 2.10314, "41325": 2.05903, "41330": 2.0775, "41335": 2.09535, "41340": 2.12656, "41345": 2.10579, "41350": 2.0827, "41355": 2.08076, "41360": 2.06375, "41365": 2.11681, "41370": 2.08744, "41375": 2.08189, "41380": 2.04146, "41385": 2.05898, "41390": 2.03237, "41395": 2.12993, "41400": 2.11819, "41405": 2.13198, "41410": 2.06577, "41415": 2.07072, "41420": 2.05841, "41425": 2.08628, "41430": 2.08833, "41435": 2.10196, "41440": 2.08059, "41445": 2.08607, "41450": 2.07486, "41455": 2.05615, "41460": 2.08853, "41465": 2.08243, "41470": 2.11227, "41475": 2.06543, "41480": 2.06977, "41485": 2.07936, "41490": 2.08899, "41495": 2.08466, "41500": 2.09351, "41505": 2.04754, "41510": 2.10742, "41515": 2.08514, "41520": 2.09822, "41525": 2.09196, "41530": 2.03548, "41535": 2.07024, "41540": 2.08563, "41545": 2.08511, "41550": 2.0545, "41555": 2.06483, "41560": 2.10296, "41565": 2.04609, "41570": 2.06267, "41575": 2.09533, "41580": 2.09424, "41585": 2.11288, "41590": 2.06966, "41595": 2.07616, "41600": 2.05878, "41605": 2.09684, "41610": 2.04378, "41615": 2.11278, "41620": 2.0876, "41625": 2.07422, "41630": 2.08479, "41635": 2.11175, "41640": 2.08529, "41645": 2.11095, "41650": 2.05154, "41655": 2.0935, "41660": 2.09095, "41665": 2.07164, "41670": 2.05508, "41675": 2.07141, "41680": 2.0495, "41685": 2.11008, "41690": 2.08589, "41695": 2.04702, "41700": 2.08669, "41705": 2.09101, "41710": 2.1211, "41715": 2.0522, "41720": 2.06178, "41725": 2.05705, "41730": 2.09515, "41735": 2.10029, "41740": 2.09491, "41745": 2.09067, "41750": 2.11526, "41755": 2.07145, "41760": 2.06777, "41765": 2.08464, "41770": 2.09418, "41775": 2.08788, "41780": 2.08589, "41785": 2.06775, "41790": 2.07306, "41795": 2.126, "41800": 2.09313, "41805": 2.08993, "41810": 2.09321, "41815": 2.12676, "41820": 2.11462, "41825": 2.03665, "41830": 2.11056, "41835": 2.06368, "41840": 2.05288, "41845": 2.10644, "41850": 2.03981, "41855": 2.11403, "41860": 2.06178, "41865": 2.08457, "41870": 2.09622, "41875": 2.11226, "41880": 2.08796, "41885": 2.09186, "41890": 2.10489, "41895": 2.07208, "41900": 2.13255, "41905": 2.07308, "41910": 2.05217, "41915": 2.1084, "41920": 2.09767, "41925": 2.05658, "41930": 2.10402, "41935": 2.06865, "41940": 2.11272, "41945": 2.06875, "41950": 2.07726, "41955": 2.06752, "41960": 2.13152, "41965": 2.04725, "41970": 2.12339, "41975": 2.0554, "41980": 2.09351, "41985": 2.07551, "41990": 2.05861, "41995": 2.08573, "42000": 2.07777, "42005": 2.11609, "42010": 2.08353, "42015": 2.08862, "42020": 2.08622, "42025": 2.0726, "42030": 2.06881, "42035": 2.10894, "42040": 2.11287, "42045": 2.06521, "42050": 2.04777, "42055": 2.0917, "42060": 2.08556, "42065": 2.05143, "42070": 2.09362, "42075": 2.09846, "42080": 2.08723, "42085": 2.04472, "42090": 2.08326, "42095": 2.03178, "42100": 2.0455, "42105": 2.07493, "42110": 2.09741, "42115": 2.08139, "42120": 2.06566, "42125": 2.0536, "42130": 2.09436, "42135": 2.06706, "42140": 2.0629, "42145": 2.06136, "42150": 2.07519, "42155": 2.07234, "42160": 2.0983, "42165": 2.03815, "42170": 2.09192, "42175": 2.06511, "42180": 2.10505, "42185": 2.08923, "42190": 2.09944, "42195": 2.06484, "42200": 2.0657, "42205": 2.08233, "42210": 2.0822, "42215": 2.07344, "42220": 2.03231, "42225": 2.08163, "42230": 2.08254, "42235": 2.09377, "42240": 2.07907, "42245": 2.10764, "42250": 2.12239, "42255": 2.07571, "42260": 2.09981, "42265": 2.08839, "42270": 2.111, "42275": 2.07698, "42280": 2.08633, "42285": 2.08687, "42290": 2.05526, "42295": 2.049, "42300": 2.03977, "42305": 2.05923, "42310": 2.09116, "42315": 2.06839, "42320": 2.09761, "42325": 2.07986, "42330": 2.08046, "42335": 2.0738, "42340": 2.11355, "42345": 2.07559, "42350": 2.08648, "42355": 2.02872, "42360": 2.0509, "42365": 2.0883, "42370": 2.04962, "42375": 2.09211, "42380": 2.08384, "42385": 2.05714, "42390": 2.05293, "42395": 2.11893, "42400": 2.10396, "42405": 2.11038, "42410": 2.08197, "42415": 2.05094, "42420": 2.10072, "42425": 2.11656, "42430": 2.03633, "42435": 2.07627, "42440": 2.08573, "42445": 2.10022, "42450": 2.09537, "42455": 2.11648, "42460": 2.10503, "42465": 2.105, "42470": 2.05946, "42475": 2.05906, "42480": 2.05223, "42485": 2.07294, "42490": 2.09454, "42495": 2.05551, "42500": 2.09675, "42505": 2.09792, "42510": 2.13401, "42515": 2.07687, "42520": 2.03125, "42525": 2.07626, "42530": 2.09297, "42535": 2.10588, "42540": 2.07016, "42545": 2.1064, "42550": 2.08506, "42555": 2.0631, "42560": 2.03696, "42565": 2.09783, "42570": 2.05487, "42575": 2.03304, "42580": 2.06506, "42585": 2.09463, "42590": 2.06696, "42595": 2.06223, "42600": 2.06267, "42605": 2.01827, "42610": 2.06947, "42615": 2.09564, "42620": 2.10399, "42625": 2.08653, "42630": 2.06623, "42635": 2.07953, "42640": 2.08898, "42645": 2.06067, "42650": 2.06799, "42655": 2.09362, "42660": 2.06145, "42665": 2.04679, "42670": 2.10983, "42675": 2.03118, "42680": 2.06561, "42685": 2.07369, "42690": 2.05634, "42695": 2.08638, "42700": 2.08358, "42705": 2.04125, "42710": 2.11309, "42715": 2.04703, "42720": 2.08307, "42725": 2.05468, "42730": 2.03716, "42735": 2.06333, "42740": 2.08591, "42745": 2.03939, "42750": 2.08386, "42755": 2.0263, "42760": 2.07654, "42765": 2.07802, "42770": 2.09866, "42775": 2.10532, "42780": 2.05851, "42785": 2.11115, "42790": 2.04301, "42795": 2.06323, "42800": 2.11151, "42805": 2.0882, "42810": 2.07987, "42815": 2.12385, "42820": 2.08356, "42825": 2.07908, "42830": 2.08092, "42835": 2.11498, "42840": 2.03728, "42845": 2.09405, "42850": 2.10307, "42855": 2.10513, "42860": 2.08659, "42865": 2.10612, "42870": 2.05987, "42875": 2.07084, "42880": 2.07502, "42885": 2.07354, "42890": 2.10571, "42895": 2.08462, "42900": 2.07515, "42905": 2.0949, "42910": 2.102, "42915": 2.04305, "42920": 2.05519, "42925": 2.10615, "42930": 2.0802, "42935": 2.09873, "42940": 2.058, "42945": 2.05407, "42950": 2.07349, "42955": 2.04869, "42960": 2.09916, "42965": 2.11872, "42970": 2.03981, "42975": 2.05457, "42980": 2.05466, "42985": 2.08653, "42990": 2.02359, "42995": 2.06888, "43000": 2.04624, "43005": 2.09453, "43010": 2.0959, "43015": 2.05569, "43020": 2.05371, "43025": 2.07423, "43030": 2.05116, "43035": 2.07798, "43040": 2.09751, "43045": 2.0357, "43050": 2.05781, "43055": 2.07035, "43060": 2.08848, "43065": 2.04942, "43070": 2.12099, "43075": 2.09022, "43080": 2.03864, "43085": 2.09761, "43090": 2.12483, "43095": 2.03597, "43100": 2.08358, "43105": 2.07313, "43110": 2.10244, "43115": 2.07812, "43120": 2.09791, "43125": 2.09108, "43130": 2.14423, "43135": 2.07771, "43140": 2.0922, "43145": 2.11304, "43150": 2.10174, "43155": 2.06753, "43160": 2.05341, "43165": 2.06835, "43170": 2.12108, "43175": 2.07961, "43180": 2.09268, "43185": 2.08119, "43190": 2.11017, "43195": 2.11744, "43200": 2.09395, "43205": 2.00348, "43210": 2.08561, "43215": 2.09472, "43220": 2.07761, "43225": 2.1365, "43230": 2.04111, "43235": 2.11743, "43240": 2.08236, "43245": 2.07073, "43250": 2.125, "43255": 2.05464, "43260": 2.1205, "43265": 2.04995, "43270": 2.09808, "43275": 2.11106, "43280": 2.06979, "43285": 2.07455, "43290": 2.09433, "43295": 2.083, "43300": 2.09904, "43305": 2.14927, "43310": 2.09832, "43315": 2.08465, "43320": 2.08986, "43325": 2.07195, "43330": 2.08233, "43335": 2.06983, "43340": 2.07392, "43345": 2.03706, "43350": 2.11094, "43355": 2.10122, "43360": 2.07828, "43365": 2.05312, "43370": 2.08897, "43375": 2.10205, "43380": 2.07513, "43385": 2.07722, "43390": 2.09842, "43395": 2.09612, "43400": 2.0717, "43405": 2.04834, "43410": 2.08102, "43415": 2.05047, "43420": 2.0753, "43425": 2.0677, "43430": 2.07259, "43435": 2.08581, "43440": 2.07935, "43445": 2.09366, "43450": 2.03801, "43455": 2.10856, "43460": 2.11664, "43465": 2.09783, "43470": 2.0872, "43475": 2.10261, "43480": 2.07119, "43485": 2.08221, "43490": 2.09884, "43495": 2.08688, "43500": 2.06225, "43505": 2.06733, "43510": 2.06369, "43515": 2.05728, "43520": 2.09161, "43525": 2.02245, "43530": 2.08247, "43535": 2.08312, "43540": 2.08717, "43545": 2.05751, "43550": 2.07613, "43555": 2.03826, "43560": 2.06431, "43565": 2.09832, "43570": 2.06092, "43575": 2.07613, "43580": 2.08465, "43585": 2.02167, "43590": 2.09279, "43595": 2.06046, "43600": 2.06706, "43605": 2.10225, "43610": 2.09245, "43615": 2.05194, "43620": 2.09592, "43625": 2.10348, "43630": 2.06391, "43635": 2.04836, "43640": 2.035, "43645": 2.06351, "43650": 2.1025, "43655": 2.09677, "43660": 2.07682, "43665": 2.06047, "43670": 2.04694, "43675": 2.05654, "43680": 2.08765, "43685": 2.03298, "43690": 2.055, "43695": 2.09882, "43700": 2.0736, "43705": 2.08353, "43710": 2.06763, "43715": 2.05616, "43720": 2.12317, "43725": 2.10234, "43730": 2.0545, "43735": 2.08047, "43740": 2.04324, "43745": 2.04617, "43750": 2.08426, "43755": 2.05411, "43760": 2.09261, "43765": 2.09077, "43770": 2.07127, "43775": 2.04827, "43780": 2.1098, "43785": 2.06187, "43790": 2.09891, "43795": 2.07816, "43800": 2.11879, "43805": 2.10038, "43810": 2.08541, "43815": 2.07622, "43820": 2.05033, "43825": 2.14569, "43830": 2.05438, "43835": 2.09096, "43840": 2.06368, "43845": 2.09312, "43850": 2.07709, "43855": 2.0675, "43860": 2.06315, "43865": 2.12168, "43870": 2.07119, "43875": 2.03831, "43880": 2.07018, "43885": 2.09958, "43890": 2.02656, "43895": 2.06581, "43900": 2.06375, "43905": 2.05223, "43910": 2.13475, "43915": 2.12534, "43920": 2.04062, "43925": 2.04967, "43930": 2.06188, "43935": 2.0446, "43940": 2.07534, "43945": 2.02627, "43950": 2.10785, "43955": 2.03801, "43960": 2.10098, "43965": 2.06516, "43970": 2.101, "43975": 2.0576, "43980": 2.04055, "43985": 2.07684, "43990": 2.04209, "43995": 2.08486, "44000": 2.0524, "44005": 2.0425, "44010": 2.10117, "44015": 2.04812, "44020": 2.08501, "44025": 2.10585, "44030": 2.07711, "44035": 2.05674, "44040": 2.00635, "44045": 2.11499, "44050": 2.0895, "44055": 2.1037, "44060": 2.11188, "44065": 2.06176, "44070": 2.08513, "44075": 2.0955, "44080": 2.01579, "44085": 2.07659, "44090": 2.04614, "44095": 2.06727, "44100": 2.10059, "44105": 2.0971, "44110": 2.06581, "44115": 2.07382, "44120": 2.05375, "44125": 2.08001, "44130": 2.09121, "44135": 2.0659, "44140": 2.05435, "44145": 2.05895, "44150": 2.10588, "44155": 2.09464, "44160": 2.0983, "44165": 2.08935, "44170": 2.05697, "44175": 2.07268, "44180": 2.07903, "44185": 2.05445, "44190": 2.09567, "44195": 2.03759, "44200": 2.05333, "44205": 2.08226, "44210": 2.05643, "44215": 2.06648, "44220": 2.07906, "44225": 2.07202, "44230": 2.08633, "44235": 2.06042, "44240": 2.11487, "44245": 2.07185, "44250": 2.07282, "44255": 2.08332, "44260": 2.04341, "44265": 2.05149, "44270": 2.08694, "44275": 2.045, "44280": 2.0804, "44285": 2.14317, "44290": 2.09798, "44295": 2.11738, "44300": 2.06795, "44305": 2.05623, "44310": 2.05517, "44315": 2.05062, "44320": 2.04858, "44325": 2.07518, "44330": 2.09992, "44335": 2.08648, "44340": 2.08618, "44345": 2.07768, "44350": 2.06476, "44355": 2.09561, "44360": 2.10484, "44365": 2.08599, "44370": 2.085, "44375": 2.07897, "44380": 2.08758, "44385": 2.06664, "44390": 2.08478, "44395": 2.10874, "44400": 2.0711, "44405": 2.05143, "44410": 2.05873, "44415": 2.08491, "44420": 2.07522, "44425": 2.09854, "44430": 2.05948, "44435": 2.08115, "44440": 2.03058, "44445": 2.11775, "44450": 2.05834, "44455": 2.08218, "44460": 2.04664, "44465": 2.06269, "44470": 2.08872, "44475": 2.09349, "44480": 2.06507, "44485": 2.06349, "44490": 2.06759, "44495": 2.09603, "44500": 2.10457, "44505": 2.06651, "44510": 2.07032, "44515": 2.0576, "44520": 2.04453, "44525": 2.03562, "44530": 2.07104, "44535": 2.04607, "44540": 2.10241, "44545": 2.10772, "44550": 2.06104, "44555": 2.10925, "44560": 2.03641, "44565": 2.08041, "44570": 2.05852, "44575": 2.11104, "44580": 2.08174, "44585": 2.03389, "44590": 2.08376, "44595": 2.06728, "44600": 2.08345, "44605": 2.06194, "44610": 2.10668, "44615": 2.05872, "44620": 2.03894, "44625": 2.07775, "44630": 2.09312, "44635": 2.06048, "44640": 2.02905, "44645": 2.05445, "44650": 2.09026, "44655": 2.06558, "44660": 2.0902, "44665": 2.09309, "44670": 2.08866, "44675": 2.09727, "44680": 2.11234, "44685": 2.0961, "44690": 2.05999, "44695": 2.07325, "44700": 2.06624, "44705": 2.05009, "44710": 2.1001, "44715": 2.0399, "44720": 2.06423, "44725": 2.07441, "44730": 2.10368, "44735": 2.04533, "44740": 2.03997, "44745": 2.04629, "44750": 2.10593, "44755": 2.10404, "44760": 2.11863, "44765": 2.07555, "44770": 2.07534, "44775": 2.05804, "44780": 2.04805, "44785": 2.08487, "44790": 2.10028, "44795": 2.08128, "44800": 2.06022, "44805": 2.08436, "44810": 2.09004, "44815": 2.09164, "44820": 2.08624, "44825": 2.12429, "44830": 2.05199, "44835": 2.10916, "44840": 2.09287, "44845": 2.10427, "44850": 2.09759, "44855": 2.08344, "44860": 2.06149, "44865": 2.12219, "44870": 2.08005, "44875": 2.0975, "44880": 2.08179, "44885": 2.10516, "44890": 2.05539, "44895": 2.10155, "44900": 2.04811, "44905": 2.04864, "44910": 2.06252, "44915": 2.07521, "44920": 2.08264, "44925": 2.04588, "44930": 2.07939, "44935": 2.01596, "44940": 2.10286, "44945": 2.07524, "44950": 2.08929, "44955": 2.01702, "44960": 2.14035, "44965": 2.07547, "44970": 2.0677, "44975": 2.09776, "44980": 2.06291, "44985": 2.07098, "44990": 2.10492, "44995": 2.0293, "45000": 2.08816, "45005": 2.09484, "45010": 2.06521, "45015": 2.07308, "45020": 2.06276, "45025": 2.07714, "45030": 2.08023, "45035": 2.07351, "45040": 2.05858, "45045": 2.09181, "45050": 2.10515, "45055": 2.07558, "45060": 2.0793, "45065": 2.08465, "45070": 2.06062, "45075": 2.04888, "45080": 2.05359, "45085": 2.0489, "45090": 2.06352, "45095": 2.08538, "45100": 2.07418, "45105": 2.12817, "45110": 2.07882, "45115": 2.06504, "45120": 2.12793, "45125": 2.08643, "45130": 2.0532, "45135": 2.05348, "45140": 2.05285, "45145": 2.0494, "45150": 2.05579, "45155": 2.06805, "45160": 2.07002, "45165": 2.10317, "45170": 2.08008, "45175": 2.05273, "45180": 2.08725, "45185": 2.07514, "45190": 2.07956, "45195": 2.09403, "45200": 2.08068, "45205": 2.05681, "45210": 2.10265, "45215": 2.07401, "45220": 2.03295, "45225": 2.08387, "45230": 2.09452, "45235": 2.09351, "45240": 2.08544, "45245": 2.10247, "45250": 2.02006, "45255": 2.0486, "45260": 2.08727, "45265": 2.08116, "45270": 2.05726, "45275": 2.06394, "45280": 2.02169, "45285": 2.06962, "45290": 2.06616, "45295": 2.1092, "45300": 2.0448, "45305": 2.06576, "45310": 2.0715, "45315": 2.09757, "45320": 2.11127, "45325": 2.04869, "45330": 2.04769, "45335": 2.08722, "45340": 2.10572, "45345": 2.07277, "45350": 2.07974, "45355": 2.06945, "45360": 2.03929, "45365": 2.08586, "45370": 2.09495, "45375": 2.08478, "45380": 2.09282, "45385": 2.12176, "45390": 2.11724, "45395": 2.08978, "45400": 2.07422, "45405": 2.06236, "45410": 2.07634, "45415": 2.13137, "45420": 2.08444, "45425": 2.07083, "45430": 2.04473, "45435": 2.05328, "45440": 2.06859, "45445": 2.08549, "45450": 2.09893, "45455": 2.06136, "45460": 2.07484, "45465": 2.05141, "45470": 2.0438, "45475": 2.08902, "45480": 2.09218, "45485": 2.07407, "45490": 2.07094, "45495": 2.04019, "45500": 2.0388, "45505": 2.09077, "45510": 2.10264, "45515": 2.06617, "45520": 2.04904, "45525": 2.05917, "45530": 2.1204, "45535": 2.04881, "45540": 2.04605, "45545": 2.04391, "45550": 2.09508, "45555": 2.05291, "45560": 2.06029, "45565": 2.10655, "45570": 2.04498, "45575": 2.06865, "45580": 2.06731, "45585": 2.12216, "45590": 2.00205, "45595": 2.0831, "45600": 2.06477, "45605": 2.11154, "45610": 2.10037, "45615": 2.04114, "45620": 2.07519, "45625": 2.06766, "45630": 2.06923, "45635": 2.09788, "45640": 2.07967, "45645": 2.10045, "45650": 2.046, "45655": 2.03866, "45660": 2.07519, "45665": 2.10256, "45670": 2.07668, "45675": 2.03846, "45680": 2.06562, "45685": 2.07758, "45690": 2.04282, "45695": 2.11615, "45700": 2.06559, "45705": 2.01092, "45710": 2.09812, "45715": 2.09913, "45720": 2.03888, "45725": 2.10011, "45730": 2.05784, "45735": 2.04125, "45740": 2.12498, "45745": 2.05134, "45750": 2.03925, "45755": 2.10156, "45760": 2.06005, "45765": 2.04934, "45770": 2.08643, "45775": 2.08164, "45780": 2.03911, "45785": 2.04899, "45790": 2.08574, "45795": 2.07547, "45800": 2.09973, "45805": 2.08043, "45810": 2.05083, "45815": 2.08326, "45820": 2.09965, "45825": 2.05647, "45830": 2.02828, "45835": 2.0486, "45840": 2.0678, "45845": 2.03703, "45850": 2.07962, "45855": 2.07386, "45860": 2.11809, "45865": 2.09053, "45870": 2.02965, "45875": 2.12998, "45880": 2.09541, "45885": 2.05756, "45890": 2.05223, "45895": 2.0665, "45900": 2.05502, "45905": 2.09886, "45910": 2.06755, "45915": 2.10038, "45920": 2.10521, "45925": 2.09439, "45930": 2.03924, "45935": 2.10385, "45940": 2.08461, "45945": 2.09951, "45950": 2.05645, "45955": 2.04856, "45960": 2.0755, "45965": 2.09545, "45970": 2.0653, "45975": 2.07922, "45980": 2.11856, "45985": 2.08459, "45990": 2.07212, "45995": 2.06865, "46000": 2.09077, "46005": 2.10428, "46010": 2.12967, "46015": 2.09169, "46020": 2.0259, "46025": 2.03477, "46030": 2.0479, "46035": 2.07823, "46040": 2.09268, "46045": 2.07771, "46050": 2.099, "46055": 2.09088, "46060": 2.04644, "46065": 2.06287, "46070": 2.05845, "46075": 2.08332, "46080": 2.07525, "46085": 2.08587, "46090": 2.03497, "46095": 2.06251, "46100": 2.04578, "46105": 2.09067, "46110": 2.06658, "46115": 2.04478, "46120": 2.03738, "46125": 2.08431, "46130": 2.05985, "46135": 2.05203, "46140": 2.08247, "46145": 2.05928, "46150": 2.07043, "46155": 2.06763, "46160": 2.07297, "46165": 2.09289, "46170": 2.10558, "46175": 2.07666, "46180": 2.0587, "46185": 2.0273, "46190": 2.08471, "46195": 2.07105, "46200": 2.06231, "46205": 2.09336, "46210": 2.02372, "46215": 2.03402, "46220": 2.07079, "46225": 2.08243, "46230": 2.02345, "46235": 2.07592, "46240": 2.04985, "46245": 2.06603, "46250": 2.06207, "46255": 2.07296, "46260": 2.08557, "46265": 2.08264, "46270": 2.06673, "46275": 2.07119, "46280": 2.09106, "46285": 2.05611, "46290": 2.06881, "46295": 2.05452, "46300": 2.07764, "46305": 2.09566, "46310": 2.05068, "46315": 2.09829, "46320": 2.06178, "46325": 2.07886, "46330": 2.02255, "46335": 2.06242, "46340": 2.09478, "46345": 2.0869, "46350": 2.07567, "46355": 2.05452, "46360": 2.07665, "46365": 2.10791, "46370": 2.05171, "46375": 2.06997, "46380": 2.07845, "46385": 2.04695, "46390": 2.0664, "46395": 2.08379, "46400": 2.04191, "46405": 2.05803, "46410": 2.0913, "46415": 2.05292, "46420": 2.05295, "46425": 2.06381, "46430": 2.08385, "46435": 2.04121, "46440": 2.11345, "46445": 2.0582, "46450": 2.04095, "46455": 2.07625, "46460": 2.0756, "46465": 2.04828, "46470": 2.00784, "46475": 2.10199, "46480": 2.1019, "46485": 2.08707, "46490": 2.11457, "46495": 2.11167, "46500": 2.08424, "46505": 2.06479, "46510": 2.08324, "46515": 2.07591, "46520": 2.01535, "46525": 2.08288, "46530": 2.05562, "46535": 2.08371, "46540": 2.08499, "46545": 2.05293, "46550": 2.07034, "46555": 2.09115, "46560": 2.06091, "46565": 2.09622, "46570": 2.07035, "46575": 2.08384, "46580": 2.07227, "46585": 2.06422, "46590": 2.05804, "46595": 2.06818, "46600": 2.09237, "46605": 2.08449, "46610": 2.10173, "46615": 2.06634, "46620": 2.06547, "46625": 2.07444, "46630": 2.09061, "46635": 2.06485, "46640": 2.11094, "46645": 2.09748, "46650": 2.03266, "46655": 2.03421, "46660": 2.05526, "46665": 2.1, "46670": 2.08106, "46675": 2.05279, "46680": 2.04088, "46685": 2.03686, "46690": 2.09805, "46695": 2.06357, "46700": 2.04305, "46705": 2.06448, "46710": 2.04862, "46715": 2.10256, "46720": 2.0869, "46725": 2.08144, "46730": 2.06923, "46735": 2.05084, "46740": 2.08516, "46745": 2.06862, "46750": 2.09495, "46755": 2.05635, "46760": 2.0747, "46765": 2.07003, "46770": 2.05399, "46775": 2.09179, "46780": 2.11602, "46785": 2.07284, "46790": 2.09308, "46795": 2.08876, "46800": 2.07215, "46805": 2.06859, "46810": 2.07961, "46815": 2.03332, "46820": 2.05926, "46825": 2.07808, "46830": 2.10042, "46835": 2.07063, "46840": 2.068, "46845": 2.05304, "46850": 2.11527, "46855": 2.08567, "46860": 2.05474, "46865": 2.10613, "46870": 2.07679, "46875": 2.01886, "46880": 2.12756, "46885": 2.09725, "46890": 2.08552, "46895": 2.05633, "46900": 2.01686, "46905": 2.06769, "46910": 2.04179, "46915": 2.05584, "46920": 2.04583, "46925": 2.07509, "46930": 2.08207, "46935": 2.06534, "46940": 2.09303, "46945": 2.07252, "46950": 2.04677, "46955": 2.05665, "46960": 2.09452, "46965": 2.0742, "46970": 2.02271, "46975": 2.04259, "46980": 2.10343, "46985": 2.0972, "46990": 2.07563, "46995": 2.03344, "47000": 2.07891, "47005": 2.08623, "47010": 2.06299, "47015": 2.07992, "47020": 2.06387, "47025": 2.07331, "47030": 2.05118, "47035": 2.09066, "47040": 2.05555, "47045": 2.05944, "47050": 2.04615, "47055": 2.03333, "47060": 2.05585, "47065": 2.05712, "47070": 2.06093, "47075": 2.05374, "47080": 2.04886, "47085": 2.10429, "47090": 2.03547, "47095": 2.05022, "47100": 2.12415, "47105": 2.05784, "47110": 2.06547, "47115": 2.09759, "47120": 2.05819, "47125": 2.03048, "47130": 2.06126, "47135": 2.08719, "47140": 2.07801, "47145": 2.09129, "47150": 2.05985, "47155": 2.04291, "47160": 2.08977, "47165": 2.05763, "47170": 2.07385, "47175": 2.08702, "47180": 2.06909, "47185": 2.05188, "47190": 2.0723, "47195": 2.09174, "47200": 2.08922, "47205": 2.05165, "47210": 2.08891, "47215": 2.0685, "47220": 2.04546, "47225": 2.10469, "47230": 2.08503, "47235": 2.04132, "47240": 2.1063, "47245": 2.05878, "47250": 2.11248, "47255": 2.10321, "47260": 2.07822, "47265": 2.08556, "47270": 2.07454, "47275": 2.09125, "47280": 2.07576, "47285": 2.08713, "47290": 2.04335, "47295": 2.05607, "47300": 2.03726, "47305": 2.07172, "47310": 2.06456, "47315": 2.03476, "47320": 2.07633, "47325": 2.05645, "47330": 2.10262, "47335": 2.09194, "47340": 2.07981, "47345": 2.09325, "47350": 2.04899, "47355": 2.06896, "47360": 2.07746, "47365": 2.08215, "47370": 2.09361, "47375": 2.06071, "47380": 2.03291, "47385": 2.05885, "47390": 2.10792, "47395": 2.05476, "47400": 2.08556, "47405": 2.06157, "47410": 2.08264, "47415": 2.06895, "47420": 2.03062, "47425": 2.03532, "47430": 2.09022, "47435": 2.06731, "47440": 2.07615, "47445": 2.10818, "47450": 2.08151, "47455": 2.04148, "47460": 2.09208, "47465": 2.05921, "47470": 2.03103, "47475": 2.09031, "47480": 2.06208, "47485": 2.04012, "47490": 2.06185, "47495": 2.09636, "47500": 2.04678, "47505": 2.07127, "47510": 2.0592, "47515": 2.10947, "47520": 2.10676, "47525": 2.02364, "47530": 2.08077, "47535": 2.04363, "47540": 2.09284, "47545": 2.06592, "47550": 2.0898, "47555": 2.08677, "47560": 2.0719, "47565": 2.06776, "47570": 2.02818, "47575": 2.07981, "47580": 2.06527, "47585": 2.06949, "47590": 2.05902, "47595": 2.0884, "47600": 2.10238, "47605": 2.09214, "47610": 2.00822, "47615": 2.08704, "47620": 2.03714, "47625": 2.12603, "47630": 2.06632, "47635": 2.06549, "47640": 2.07805, "47645": 2.04556, "47650": 2.02181, "47655": 2.06916, "47660": 2.07463, "47665": 2.07699, "47670": 2.02977, "47675": 2.07421, "47680": 2.02881, "47685": 1.98531, "47690": 2.06636, "47695": 2.09139, "47700": 2.06102, "47705": 2.10628, "47710": 2.07086, "47715": 2.07448, "47720": 2.03105, "47725": 2.10169, "47730": 2.09344, "47735": 2.04964, "47740": 2.0522, "47745": 2.04738, "47750": 2.04882, "47755": 2.07742, "47760": 2.07604, "47765": 2.05105, "47770": 2.04307, "47775": 2.06143, "47780": 2.06952, "47785": 2.06469, "47790": 2.04232, "47795": 2.0471, "47800": 2.08947, "47805": 2.05223, "47810": 2.08715, "47815": 2.12092, "47820": 2.06325, "47825": 2.05473, "47830": 2.07774, "47835": 2.05914, "47840": 2.07169, "47845": 2.05114, "47850": 2.09238, "47855": 2.05355, "47860": 2.06764, "47865": 2.06691, "47870": 2.05597, "47875": 2.08229, "47880": 2.01038, "47885": 2.06397, "47890": 2.11614, "47895": 2.05798, "47900": 2.03916, "47905": 2.05639, "47910": 2.08558, "47915": 2.05391, "47920": 2.09305, "47925": 2.07559, "47930": 2.10064, "47935": 2.0741, "47940": 2.06465, "47945": 2.10485, "47950": 2.0583, "47955": 2.02519, "47960": 2.07603, "47965": 2.13031, "47970": 2.09145, "47975": 2.09073, "47980": 2.07772, "47985": 2.0264, "47990": 2.08333, "47995": 2.03901, "48000": 2.09159, "48005": 2.0638, "48010": 2.08421, "48015": 2.06304, "48020": 2.1506, "48025": 2.03016, "48030": 2.05132, "48035": 2.05014, "48040": 2.05798, "48045": 2.03699, "48050": 2.09448, "48055": 1.99966, "48060": 2.02109, "48065": 2.04893, "48070": 2.09192, "48075": 2.07929, "48080": 2.06694, "48085": 2.03916, "48090": 2.08397, "48095": 2.05137, "48100": 2.0879, "48105": 2.05624, "48110": 2.06068, "48115": 2.07805, "48120": 2.07359, "48125": 2.13394, "48130": 2.04965, "48135": 2.08158, "48140": 2.05866, "48145": 2.02379, "48150": 2.09887, "48155": 2.03755, "48160": 2.06686, "48165": 2.06409, "48170": 2.04534, "48175": 2.06635, "48180": 2.07212, "48185": 2.03094, "48190": 2.02897, "48195": 2.03575, "48200": 2.06188, "48205": 2.06738, "48210": 2.02559, "48215": 2.08616, "48220": 2.05561, "48225": 2.04052, "48230": 2.07196, "48235": 2.11136, "48240": 2.05794, "48245": 2.06737, "48250": 2.07837, "48255": 2.05463, "48260": 2.07716, "48265": 2.07379, "48270": 2.07926, "48275": 2.08993, "48280": 2.02321, "48285": 2.09526, "48290": 2.06084, "48295": 2.06617, "48300": 2.05813, "48305": 2.08867, "48310": 2.06935, "48315": 2.07798, "48320": 2.05924, "48325": 2.08882, "48330": 2.06483, "48335": 2.10358, "48340": 2.09128, "48345": 2.11045, "48350": 2.06652, "48355": 2.08884, "48360": 2.05665, "48365": 2.07388, "48370": 2.05893, "48375": 2.05986, "48380": 2.08106, "48385": 2.06996, "48390": 2.10095, "48395": 2.01121, "48400": 2.05127, "48405": 2.07467, "48410": 2.09037, "48415": 2.06865, "48420": 2.09007, "48425": 2.06079, "48430": 2.06521, "48435": 2.02739, "48440": 2.10717, "48445": 2.05223, "48450": 2.10446, "48455": 2.0384, "48460": 2.06292, "48465": 2.08655, "48470": 2.02516, "48475": 2.0624, "48480": 2.11971, "48485": 2.05395, "48490": 2.084, "48495": 2.10958, "48500": 2.06141, "48505": 2.08756, "48510": 2.08186, "48515": 2.09279, "48520": 2.03065, "48525": 2.0628, "48530": 2.05093, "48535": 2.07876, "48540": 2.02232, "48545": 2.10341, "48550": 2.05488, "48555": 2.09233, "48560": 2.03357, "48565": 2.04982, "48570": 2.0136, "48575": 2.07041, "48580": 2.1033, "48585": 2.01674, "48590": 2.06235, "48595": 2.02984, "48600": 2.08884, "48605": 2.11419, "48610": 2.04438, "48615": 2.05652, "48620": 2.06158, "48625": 2.04961, "48630": 2.1241, "48635": 2.08271, "48640": 2.07697, "48645": 2.06425, "48650": 2.07697, "48655": 2.05863, "48660": 2.07201, "48665": 2.06089, "48670": 2.03116, "48675": 2.04447, "48680": 2.0546, "48685": 2.06197, "48690": 2.08681, "48695": 2.09098, "48700": 2.0912, "48705": 2.05688, "48710": 2.07248, "48715": 2.10305, "48720": 2.01454, "48725": 2.06191, "48730": 2.05862, "48735": 2.04324, "48740": 2.07337, "48745": 2.01396, "48750": 2.0615, "48755": 2.06354, "48760": 2.0398, "48765": 2.09339, "48770": 2.08698, "48775": 2.03754, "48780": 2.022, "48785": 2.07808, "48790": 2.0646, "48795": 2.03636, "48800": 2.054, "48805": 2.0582, "48810": 2.05922, "48815": 2.05103, "48820": 2.11689, "48825": 2.03383, "48830": 2.04439, "48835": 2.03775, "48840": 2.05918, "48845": 2.08381, "48850": 2.04677, "48855": 2.05422, "48860": 2.06852, "48865": 2.07067, "48870": 2.08754, "48875": 2.05861, "48880": 2.03372, "48885": 2.07385, "48890": 2.04034, "48895": 2.05532, "48900": 2.08891, "48905": 2.08005, "48910": 2.05703, "48915": 2.09895, "48920": 2.09719, "48925": 2.07091, "48930": 2.09345, "48935": 2.07371, "48940": 2.03242, "48945": 2.04559, "48950": 2.07501, "48955": 2.09815, "48960": 2.05712, "48965": 2.03633, "48970": 2.03871, "48975": 2.07356, "48980": 2.12629, "48985": 2.08467, "48990": 2.06851, "48995": 2.09471, "49000": 2.08692, "49005": 2.09785, "49010": 2.10091, "49015": 2.05038, "49020": 2.07043, "49025": 2.07125, "49030": 2.10069, "49035": 2.05366, "49040": 2.10207, "49045": 2.07703, "49050": 2.0736, "49055": 2.06995, "49060": 2.03276, "49065": 2.06475, "49070": 2.05069, "49075": 2.05594, "49080": 2.0914, "49085": 2.11351, "49090": 2.05301, "49095": 2.02943, "49100": 2.06276, "49105": 2.08691, "49110": 2.09454, "49115": 2.13349, "49120": 2.10402, "49125": 2.04014, "49130": 2.09307, "49135": 2.08429, "49140": 2.07361, "49145": 2.1035, "49150": 2.0616, "49155": 2.04294, "49160": 2.0187, "49165": 2.0823, "49170": 2.06435, "49175": 2.04259, "49180": 2.02003, "49185": 2.04637, "49190": 2.04918, "49195": 2.05845, "49200": 2.02989, "49205": 2.05558, "49210": 2.10436, "49215": 2.02075, "49220": 2.04222, "49225": 2.06247, "49230": 2.03759, "49235": 2.07176, "49240": 2.0819, "49245": 2.05008, "49250": 2.01669, "49255": 2.01894, "49260": 2.05473, "49265": 2.06938, "49270": 2.05168, "49275": 2.07829, "49280": 2.06229, "49285": 2.04878, "49290": 2.06645, "49295": 2.08, "49300": 2.03372, "49305": 2.07099, "49310": 2.05391, "49315": 2.04374, "49320": 2.09011, "49325": 2.08769, "49330": 2.07254, "49335": 2.02426, "49340": 2.03667, "49345": 2.07125, "49350": 2.0465, "49355": 2.08068, "49360": 2.05367, "49365": 2.05487, "49370": 2.0462, "49375": 2.11636, "49380": 2.08227, "49385": 2.08592, "49390": 2.07334, "49395": 2.06716, "49400": 2.08071, "49405": 2.05514, "49410": 2.09084, "49415": 2.08543, "49420": 2.05031, "49425": 2.0525, "49430": 2.09317, "49435": 2.12083, "49440": 2.11773, "49445": 2.06021, "49450": 2.02671, "49455": 2.05801, "49460": 2.08528, "49465": 1.99455, "49470": 2.10493, "49475": 2.06831, "49480": 2.07821, "49485": 2.05022, "49490": 2.04662, "49495": 2.07058, "49500": 2.03186, "49505": 2.08863, "49510": 2.04883, "49515": 2.0727, "49520": 2.10407, "49525": 2.07269, "49530": 2.13484, "49535": 2.07129, "49540": 2.0427, "49545": 2.03376, "49550": 2.0948, "49555": 2.04278, "49560": 2.09404, "49565": 2.1096, "49570": 2.06409, "49575": 2.10211, "49580": 2.11518, "49585": 2.0608, "49590": 2.07138, "49595": 2.07828, "49600": 2.09229, "49605": 2.07553, "49610": 2.08524, "49615": 2.05425, "49620": 2.09243, "49625": 2.08729, "49630": 2.09375, "49635": 2.06376, "49640": 2.07146, "49645": 2.0138, "49650": 2.07328, "49655": 2.11781, "49660": 2.10946, "49665": 2.06677, "49670": 2.07143, "49675": 2.0579, "49680": 2.04182, "49685": 2.04716, "49690": 2.05934, "49695": 2.1062, "49700": 2.01727, "49705": 2.09304, "49710": 2.08862, "49715": 2.0932, "49720": 2.06126, "49725": 2.08856, "49730": 2.09174, "49735": 2.00682, "49740": 2.07186, "49745": 2.06038, "49750": 2.08104, "49755": 2.09732, "49760": 1.98812, "49765": 2.04844, "49770": 2.03609, "49775": 2.05737, "49780": 2.05342, "49785": 2.03055, "49790": 2.03272, "49795": 2.09654, "49800": 2.05742, "49805": 2.00764, "49810": 2.07693, "49815": 2.02133, "49820": 2.0436, "49825": 2.03253, "49830": 2.0599, "49835": 2.05013, "49840": 2.06548, "49845": 2.12199, "49850": 2.11845, "49855": 2.0707, "49860": 2.04665, "49865": 2.09406, "49870": 2.06321, "49875": 2.04214, "49880": 2.06779, "49885": 2.08386, "49890": 2.08688, "49895": 2.03537, "49900": 2.07812, "49905": 2.07544, "49910": 2.03745, "49915": 2.06001, "49920": 2.05695, "49925": 2.02566, "49930": 2.10613, "49935": 2.07111, "49940": 2.04474, "49945": 2.08828, "49950": 2.04827, "49955": 2.09656, "49960": 2.05813, "49965": 2.07295, "49970": 2.09724, "49975": 2.0789, "49980": 2.12127, "49985": 2.06448, "49990": 2.08629, "49995": 2.06751, "50000": 2.06101, "50005": 2.06222, "50010": 2.09129, "50015": 2.05239, "50020": 2.03168, "50025": 2.12851, "50030": 2.02674, "50035": 2.04357, "50040": 2.08809, "50045": 2.08094, "50050": 2.10196, "50055": 2.07649, "50060": 2.05904, "50065": 2.06887, "50070": 2.03247, "50075": 2.05546, "50080": 2.09326, "50085": 2.08009, "50090": 2.04731, "50095": 2.07661, "50100": 2.03236, "50105": 2.08938, "50110": 2.0539, "50115": 2.07409, "50120": 2.06565, "50125": 2.06032, "50130": 2.01788, "50135": 2.04456, "50140": 2.11141, "50145": 2.04809, "50150": 2.07425, "50155": 2.06935, "50160": 2.03177, "50165": 2.06114, "50170": 2.06159, "50175": 2.08831, "50180": 2.0662, "50185": 2.06791, "50190": 2.02753, "50195": 2.05852, "50200": 2.04064, "50205": 2.07874, "50210": 2.07266, "50215": 2.08816, "50220": 2.07227, "50225": 2.05426, "50230": 2.08458, "50235": 2.09265, "50240": 2.08024, "50245": 2.06222, "50250": 2.0521, "50255": 2.06963, "50260": 2.02538, "50265": 2.03456, "50270": 1.99057, "50275": 2.05283, "50280": 2.06738, "50285": 2.08213, "50290": 2.06501, "50295": 2.09978, "50300": 2.05785, "50305": 2.08796, "50310": 2.04447, "50315": 2.0694, "50320": 2.07614, "50325": 2.0574, "50330": 2.07601, "50335": 2.08422, "50340": 2.07279, "50345": 2.05891, "50350": 2.0569, "50355": 2.07347, "50360": 2.06542, "50365": 2.05589, "50370": 2.05463, "50375": 2.06155, "50380": 2.07304, "50385": 2.06704, "50390": 2.05155, "50395": 2.08518, "50400": 2.04379, "50405": 2.07479, "50410": 2.05525, "50415": 2.01749, "50420": 2.06907, "50425": 2.05848, "50430": 2.08108, "50435": 2.06966, "50440": 2.01787, "50445": 2.03674, "50450": 2.09932, "50455": 2.06312, "50460": 2.06693, "50465": 2.04834, "50470": 2.07289, "50475": 2.04915, "50480": 2.10214, "50485": 2.10083, "50490": 2.06911, "50495": 2.09727, "50500": 2.06961, "50505": 2.09485, "50510": 2.03031, "50515": 2.06733, "50520": 2.06097, "50525": 2.09114, "50530": 2.08451, "50535": 2.0624, "50540": 2.03751, "50545": 2.03008, "50550": 2.07873, "50555": 2.05822, "50560": 2.04461, "50565": 2.09435, "50570": 2.01983, "50575": 2.0403, "50580": 2.0339, "50585": 2.07587, "50590": 2.06135, "50595": 2.08952, "50600": 2.05977, "50605": 2.05378, "50610": 2.08699, "50615": 2.09747, "50620": 2.09527, "50625": 2.05416, "50630": 2.04, "50635": 2.10136, "50640": 2.08818, "50645": 2.09948, "50650": 2.05318, "50655": 2.05188, "50660": 2.03457, "50665": 2.02946, "50670": 2.04924, "50675": 2.08535, "50680": 2.04751, "50685": 2.03453, "50690": 2.04694, "50695": 2.04856, "50700": 2.10647, "50705": 2.05531, "50710": 2.12054, "50715": 2.01025, "50720": 2.04476, "50725": 2.08743, "50730": 2.07428, "50735": 2.08882, "50740": 2.02705, "50745": 2.09111, "50750": 2.07687, "50755": 2.09712, "50760": 2.11246, "50765": 2.06464, "50770": 2.07015, "50775": 2.07245, "50780": 2.07073, "50785": 2.04104, "50790": 2.06147, "50795": 2.06839, "50800": 2.09148, "50805": 2.06424, "50810": 2.06386, "50815": 2.06304, "50820": 2.03577, "50825": 2.0661, "50830": 2.02841, "50835": 2.04344, "50840": 2.08649, "50845": 2.04099, "50850": 2.07127, "50855": 2.06397, "50860": 2.07038}}, "num-zeros": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 956236928.0, "5": 967337728.0, "10": 971387136.0, "15": 946463744.0, "20": 961330176.0, "25": 1083544448.0, "30": 1211120384.0, "35": 1297793792.0, "40": 1271957632.0, "45": 1175083776.0, "50": 1127233920.0, "55": 1082967936.0, "60": 1045344128.0, "65": 1033579264.0, "70": 1003119040.0, "75": 992980928.0, "80": 1013412160.0, "85": 1014307264.0, "90": 987263168.0, "95": 959646848.0, "100": 971491392.0, "105": 981338624.0, "110": 977498048.0, "115": 976790720.0, "120": 962167232.0, "125": 942458880.0, "130": 975917184.0, "135": 965006400.0, "140": 963484352.0, "145": 976401856.0, "150": 921441152.0, "155": 968141760.0, "160": 956385152.0, "165": 959895360.0, "170": 974386688.0, "175": 949088384.0, "180": 946685440.0, "185": 972011328.0, "190": 969052736.0, "195": 985118016.0, "200": 945771008.0, "205": 958353984.0, "210": 979431616.0, "215": 967489088.0, "220": 956439168.0, "225": 962396992.0, "230": 948176640.0, "235": 965219904.0, "240": 966069120.0, "245": 969165568.0, "250": 974439680.0, "255": 925060480.0, "260": 965638592.0, "265": 970678400.0, "270": 959137664.0, "275": 953996416.0, "280": 963432128.0, "285": 945779264.0, "290": 974122496.0, "295": 966697856.0, "300": 967147904.0, "305": 964515328.0, "310": 940352640.0, "315": 967396480.0, "320": 969005952.0, "325": 980554624.0, "330": 972099904.0, "335": 946863360.0, "340": 966590592.0, "345": 973024512.0, "350": 973921664.0, "355": 963257856.0, "360": 948352064.0, "365": 964823936.0, "370": 962951296.0, "375": 958450560.0, "380": 947152896.0, "385": 955989760.0, "390": 945400064.0, "395": 970423040.0, "400": 979769600.0, "405": 968347392.0, "410": 970068736.0, "415": 953152768.0, "420": 943570240.0, "425": 954779520.0, "430": 962661120.0, "435": 977081792.0, "440": 954805120.0, "445": 971891712.0, "450": 963511744.0, "455": 973130112.0, "460": 983711104.0, "465": 945276800.0, "470": 942048640.0, "475": 967002368.0, "480": 966097280.0, "485": 976411392.0, "490": 962539392.0, "495": 945464896.0, "500": 964450048.0, "505": 986009280.0, "510": 965683456.0, "515": 943411200.0, "520": 945020160.0, "525": 971259392.0, "530": 971887104.0, "535": 979139008.0, "540": 969536832.0, "545": 954128256.0, "550": 951269248.0, "555": 987226368.0, "560": 960432896.0, "565": 966611840.0, "570": 975730496.0, "575": 927218816.0, "580": 970693376.0, "585": 961179264.0, "590": 972965120.0, "595": 963682496.0, "600": 937080192.0, "605": 951479680.0, "610": 963361280.0, "615": 970007808.0, "620": 976465920.0, "625": 949577344.0, "630": 954440256.0, "635": 986043136.0, "640": 980981376.0, "645": 955011456.0, "650": 958550080.0, "655": 951652992.0, "660": 961041792.0, "665": 967555776.0, "670": 962514432.0, "675": 968335104.0, "680": 965622080.0, "685": 962859904.0, "690": 961922048.0, "695": 954771328.0, "700": 970338112.0, "705": 945518464.0, "710": 943886976.0, "715": 973355264.0, "720": 968365696.0, "725": 978490496.0, "730": 952197632.0, "735": 948807808.0, "740": 955636096.0, "745": 975868672.0, "750": 981237696.0, "755": 962161792.0, "760": 951963520.0, "765": 967348800.0, "770": 976151680.0, "775": 970546880.0, "780": 977539904.0, "785": 931532672.0, "790": 960438528.0, "795": 964581120.0, "800": 967022080.0, "805": 962320000.0, "810": 940974080.0, "815": 949039040.0, "820": 953184512.0, "825": 954503360.0, "830": 976444992.0, "835": 956072192.0, "840": 948404160.0, "845": 965153536.0, "850": 966028160.0, "855": 960905280.0, "860": 976027008.0, "865": 938159232.0, "870": 966415424.0, "875": 972310656.0, "880": 963122240.0, "885": 967741568.0, "890": 949966208.0, "895": 960019456.0, "900": 974230336.0, "905": 963965312.0, "910": 958434304.0, "915": 956352768.0, "920": 943975808.0, "925": 960831616.0, "930": 978849024.0, "935": 971073344.0, "940": 960907456.0, "945": 945062976.0, "950": 957425920.0, "955": 979037440.0, "960": 983589248.0, "965": 966166400.0, "970": 951231360.0, "975": 961578496.0, "980": 968067584.0, "985": 968992512.0, "990": 984389248.0, "995": 953290496.0, "1000": 934783680.0, "1005": 960146944.0, "1010": 971537792.0, "1015": 985183488.0, "1020": 962781312.0, "1025": 935009664.0, "1030": 974681856.0, "1035": 964994304.0, "1040": 980465280.0, "1045": 960829184.0, "1050": 955201408.0, "1055": 957780736.0, "1060": 967748864.0, "1065": 967117504.0, "1070": 966603264.0, "1075": 950062080.0, "1080": 954510720.0, "1085": 967252544.0, "1090": 977133696.0, "1095": 961239360.0, "1100": 979611904.0, "1105": 953366592.0, "1110": 965953920.0, "1115": 966987456.0, "1120": 970353152.0, "1125": 965707776.0, "1130": 954942784.0, "1135": 965838784.0, "1140": 965175936.0, "1145": 970990784.0, "1150": 955558784.0, "1155": 930578688.0, "1160": 957775488.0, "1165": 978124416.0, "1170": 974300544.0, "1175": 973059200.0, "1180": 973083072.0, "1185": 947344576.0, "1190": 964792320.0, "1195": 953137280.0, "1200": 972842112.0, "1205": 988479104.0, "1210": 931125760.0, "1215": 968645504.0, "1220": 969162240.0, "1225": 975949824.0, "1230": 967335296.0, "1235": 943447296.0, "1240": 955856128.0, "1245": 981505408.0, "1250": 966112128.0, "1255": 973673472.0, "1260": 946494656.0, "1265": 963998592.0, "1270": 960487168.0, "1275": 973617088.0, "1280": 961114112.0, "1285": 957585024.0, "1290": 952528000.0, "1295": 971612800.0, "1300": 968864832.0, "1305": 963739840.0, "1310": 963338112.0, "1315": 943553664.0, "1320": 966308224.0, "1325": 989786624.0, "1330": 969509760.0, "1335": 972302720.0, "1340": 972270656.0, "1345": 960658048.0, "1350": 968641408.0, "1355": 955852864.0, "1360": 971820928.0, "1365": 960387712.0, "1370": 948787328.0, "1375": 973533312.0, "1380": 953468672.0, "1385": 969148672.0, "1390": 975718272.0, "1395": 931675840.0, "1400": 945854656.0, "1405": 976754688.0, "1410": 974512256.0, "1415": 967572224.0, "1420": 966746112.0, "1425": 937378944.0, "1430": 973916288.0, "1435": 978333504.0, "1440": 964179712.0, "1445": 958059008.0, "1450": 946144640.0, "1455": 983923712.0, "1460": 968651392.0, "1465": 948745984.0, "1470": 984245504.0, "1475": 943907072.0, "1480": 963975552.0, "1485": 957350144.0, "1490": 961261632.0, "1495": 980538816.0, "1500": 958331136.0, "1505": 942868736.0, "1510": 984180864.0, "1515": 959095168.0, "1520": 959106432.0, "1525": 952784192.0, "1530": 957739968.0, "1535": 949431488.0, "1540": 971086656.0, "1545": 963134144.0, "1550": 978664704.0, "1555": 952320512.0, "1560": 980089728.0, "1565": 967315008.0, "1570": 973846144.0, "1575": 975494336.0, "1580": 941862208.0, "1585": 970030976.0, "1590": 983822976.0, "1595": 948634624.0, "1600": 967444352.0, "1605": 952451456.0, "1610": 969618816.0, "1615": 983148672.0, "1620": 968020672.0, "1625": 970716928.0, "1630": 962888512.0, "1635": 942311680.0, "1640": 981611904.0, "1645": 973978624.0, "1650": 974187456.0, "1655": 967266624.0, "1660": 940684800.0, "1665": 961703680.0, "1670": 962902784.0, "1675": 971279616.0, "1680": 980878912.0, "1685": 944417024.0, "1690": 964686976.0, "1695": 965643904.0, "1700": 966341504.0, "1705": 985198656.0, "1710": 978354688.0, "1715": 943213696.0, "1720": 977090944.0, "1725": 965873984.0, "1730": 968970368.0, "1735": 965089536.0, "1740": 949711104.0, "1745": 970012416.0, "1750": 959685056.0, "1755": 960086080.0, "1760": 966380544.0, "1765": 951815872.0, "1770": 954665344.0, "1775": 973752192.0, "1780": 970536064.0, "1785": 968827008.0, "1790": 950236032.0, "1795": 945131264.0, "1800": 984665728.0, "1805": 987162880.0, "1810": 977769600.0, "1815": 948005760.0, "1820": 949211136.0, "1825": 978852480.0, "1830": 966365440.0, "1835": 964132224.0, "1840": 972319936.0, "1845": 935413824.0, "1850": 952499328.0, "1855": 980052096.0, "1860": 975868992.0, "1865": 958968256.0, "1870": 958949504.0, "1875": 932595584.0, "1880": 973575744.0, "1885": 978845824.0, "1890": 971360512.0, "1895": 959212800.0, "1900": 947395200.0, "1905": 981831296.0, "1910": 969125504.0, "1915": 970042560.0, "1920": 975597824.0, "1925": 960497472.0, "1930": 977924608.0, "1935": 963251456.0, "1940": 952461312.0, "1945": 981337984.0, "1950": 939170176.0, "1955": 960606784.0, "1960": 970032000.0, "1965": 981179008.0, "1970": 962045568.0, "1975": 952821696.0, "1980": 936848896.0, "1985": 975940608.0, "1990": 965967872.0, "1995": 962612224.0, "2000": 960553088.0, "2005": 954497856.0, "2010": 975579840.0, "2015": 991803264.0, "2020": 975435520.0, "2025": 974304384.0, "2030": 952081536.0, "2035": 967849856.0, "2040": 987458048.0, "2045": 976481280.0, "2050": 984703360.0, "2055": 942838144.0, "2060": 942593216.0, "2065": 966209664.0, "2070": 969623808.0, "2075": 980554496.0, "2080": 977600512.0, "2085": 939640192.0, "2090": 969874816.0, "2095": 961276800.0, "2100": 976719168.0, "2105": 972536704.0, "2110": 959904064.0, "2115": 956878656.0, "2120": 977480320.0, "2125": 962566656.0, "2130": 979620224.0, "2135": 950538240.0, "2140": 946997056.0, "2145": 962276224.0, "2150": 973403264.0, "2155": 972690048.0, "2160": 970313088.0, "2165": 948643584.0, "2170": 961544640.0, "2175": 969380416.0, "2180": 969331456.0, "2185": 947448192.0, "2190": 940481920.0, "2195": 986087424.0, "2200": 961862272.0, "2205": 978922496.0, "2210": 964102720.0, "2215": 963502848.0, "2220": 951310080.0, "2225": 969316096.0, "2230": 976331456.0, "2235": 974026496.0, "2240": 975495040.0, "2245": 960233536.0, "2250": 967640704.0, "2255": 969131776.0, "2260": 975066496.0, "2265": 968259072.0, "2270": 951745024.0, "2275": 962768768.0, "2280": 969640576.0, "2285": 971693184.0, "2290": 962889920.0, "2295": 931409920.0, "2300": 959905344.0, "2305": 970427392.0, "2310": 967446400.0, "2315": 970906752.0, "2320": 975591168.0, "2325": 938586240.0, "2330": 988439040.0, "2335": 977490560.0, "2340": 964598528.0, "2345": 964167808.0, "2350": 947556352.0, "2355": 977028352.0, "2360": 966898560.0, "2365": 977298176.0, "2370": 965075200.0, "2375": 953966912.0, "2380": 962918720.0, "2385": 967195648.0, "2390": 963075840.0, "2395": 974466624.0, "2400": 958412416.0, "2405": 968121088.0, "2410": 951586496.0, "2415": 965904640.0, "2420": 966515968.0, "2425": 959044480.0, "2430": 956686848.0, "2435": 961389120.0, "2440": 959757824.0, "2445": 970890112.0, "2450": 961997184.0, "2455": 922721920.0, "2460": 951953152.0, "2465": 955729408.0, "2470": 972570752.0, "2475": 973813376.0, "2480": 943896192.0, "2485": 944186368.0, "2490": 972411264.0, "2495": 974451904.0, "2500": 973909888.0, "2505": 958490240.0, "2510": 939508864.0, "2515": 979554432.0, "2520": 970474240.0, "2525": 964389568.0, "2530": 955798400.0, "2535": 936599680.0, "2540": 969028672.0, "2545": 970384768.0, "2550": 969462912.0, "2555": 969436160.0, "2560": 964976384.0, "2565": 959764864.0, "2570": 985176896.0, "2575": 957425728.0, "2580": 967424512.0, "2585": 966023808.0, "2590": 956355840.0, "2595": 981828928.0, "2600": 959531648.0, "2605": 963000896.0, "2610": 965973056.0, "2615": 951925696.0, "2620": 971241600.0, "2625": 976457728.0, "2630": 974412544.0, "2635": 948070400.0, "2640": 948139072.0, "2645": 963039488.0, "2650": 953983616.0, "2655": 977112576.0, "2660": 949622656.0, "2665": 953929088.0, "2670": 959063552.0, "2675": 979276608.0, "2680": 961396608.0, "2685": 970701824.0, "2690": 965221376.0, "2695": 943555072.0, "2700": 969425280.0, "2705": 978962816.0, "2710": 971810432.0, "2715": 990814848.0, "2720": 942649472.0, "2725": 967955200.0, "2730": 955459648.0, "2735": 970671168.0, "2740": 977919808.0, "2745": 932273920.0, "2750": 947854336.0, "2755": 956314880.0, "2760": 981695808.0, "2765": 966111232.0, "2770": 948913728.0, "2775": 935829760.0, "2780": 964777088.0, "2785": 969568128.0, "2790": 974272576.0, "2795": 966885184.0, "2800": 944390080.0, "2805": 964353408.0, "2810": 969610624.0, "2815": 975845760.0, "2820": 963086208.0, "2825": 937630848.0, "2830": 956739712.0, "2835": 986321280.0, "2840": 961758464.0, "2845": 967504576.0, "2850": 951714688.0, "2855": 962092480.0, "2860": 954243648.0, "2865": 955882624.0, "2870": 944663936.0, "2875": 974663680.0, "2880": 968201856.0, "2885": 981082240.0, "2890": 953455232.0, "2895": 957178880.0, "2900": 964989312.0, "2905": 931708032.0, "2910": 955731584.0, "2915": 979476608.0, "2920": 970492800.0, "2925": 964978752.0, "2930": 964048832.0, "2935": 940141568.0, "2940": 964913792.0, "2945": 989150144.0, "2950": 965209856.0, "2955": 965106432.0, "2960": 933160960.0, "2965": 968793728.0, "2970": 973035648.0, "2975": 958092992.0, "2980": 964496640.0, "2985": 937269504.0, "2990": 951253888.0, "2995": 978315264.0, "3000": 969277696.0, "3005": 974686400.0, "3010": 950236032.0, "3015": 943842304.0, "3020": 958441920.0, "3025": 975186688.0, "3030": 965019392.0, "3035": 963454528.0, "3040": 952133440.0, "3045": 989795200.0, "3050": 965545216.0, "3055": 982522816.0, "3060": 971228544.0, "3065": 943916864.0, "3070": 978410240.0, "3075": 975205760.0, "3080": 960994176.0, "3085": 962353920.0, "3090": 945953792.0, "3095": 938116736.0, "3100": 972929024.0, "3105": 961990272.0, "3110": 970657152.0, "3115": 963402624.0, "3120": 947129856.0, "3125": 972720000.0, "3130": 952986368.0, "3135": 966055680.0, "3140": 968490368.0, "3145": 937864576.0, "3150": 975023744.0, "3155": 976815744.0, "3160": 969641600.0, "3165": 982207232.0, "3170": 937973696.0, "3175": 953837696.0, "3180": 983810944.0, "3185": 965169536.0, "3190": 968480128.0, "3195": 950932608.0, "3200": 945101312.0, "3205": 959863168.0, "3210": 957487744.0, "3215": 958019968.0, "3220": 968129792.0, "3225": 935613376.0, "3230": 962589440.0, "3235": 975775616.0, "3240": 962621056.0, "3245": 981286848.0, "3250": 943271488.0, "3255": 954622592.0, "3260": 980386816.0, "3265": 963632384.0, "3270": 965175488.0, "3275": 959743488.0, "3280": 967070080.0, "3285": 982502592.0, "3290": 947713664.0, "3295": 966446208.0, "3300": 959178752.0, "3305": 949155072.0, "3310": 979534208.0, "3315": 964307136.0, "3320": 969232256.0, "3325": 956217600.0, "3330": 941191680.0, "3335": 964972864.0, "3340": 956926272.0, "3345": 972513408.0, "3350": 964586368.0, "3355": 943358272.0, "3360": 970061824.0, "3365": 969453888.0, "3370": 954773824.0, "3375": 958700224.0, "3380": 971474368.0, "3385": 947984896.0, "3390": 965792768.0, "3395": 978413632.0, "3400": 978150912.0, "3405": 976747648.0, "3410": 924217600.0, "3415": 955436416.0, "3420": 971832192.0, "3425": 977167680.0, "3430": 973838080.0, "3435": 936083904.0, "3440": 970515968.0, "3445": 957315712.0, "3450": 959852608.0, "3455": 963864320.0, "3460": 967894272.0, "3465": 931331136.0, "3470": 952348160.0, "3475": 973716480.0, "3480": 959751296.0, "3485": 979961024.0, "3490": 944681856.0, "3495": 953917440.0, "3500": 969330496.0, "3505": 964359808.0, "3510": 971234496.0, "3515": 955957504.0, "3520": 958745088.0, "3525": 971926144.0, "3530": 964149376.0, "3535": 983216256.0, "3540": 937501056.0, "3545": 944742784.0, "3550": 984488512.0, "3555": 978069376.0, "3560": 974386432.0, "3565": 968825216.0, "3570": 946719616.0, "3575": 976117696.0, "3580": 977506240.0, "3585": 954593024.0, "3590": 956451392.0, "3595": 951456000.0, "3600": 989009664.0, "3605": 962020096.0, "3610": 965078528.0, "3615": 974648192.0, "3620": 954900480.0, "3625": 939540288.0, "3630": 990149632.0, "3635": 971447808.0, "3640": 976036800.0, "3645": 961525632.0, "3650": 945815168.0, "3655": 965807552.0, "3660": 976207232.0, "3665": 964033408.0, "3670": 977443328.0, "3675": 943486912.0, "3680": 958210240.0, "3685": 964309568.0, "3690": 982129536.0, "3695": 963149056.0, "3700": 950581376.0, "3705": 947356928.0, "3710": 982379392.0, "3715": 972705408.0, "3720": 976150144.0, "3725": 964062976.0, "3730": 948875904.0, "3735": 967091008.0, "3740": 960984320.0, "3745": 969346496.0, "3750": 963959168.0, "3755": 953444736.0, "3760": 976653568.0, "3765": 979865728.0, "3770": 972383744.0, "3775": 972397312.0, "3780": 952597312.0, "3785": 960249856.0, "3790": 985597568.0, "3795": 969220800.0, "3800": 957888768.0, "3805": 972432960.0, "3810": 954542720.0, "3815": 974579264.0, "3820": 963031680.0, "3825": 962108096.0, "3830": 969395072.0, "3835": 934757248.0, "3840": 971267072.0, "3845": 986875520.0, "3850": 968885184.0, "3855": 965272448.0, "3860": 948069888.0, "3865": 975060992.0, "3870": 985123200.0, "3875": 983054848.0, "3880": 963628032.0, "3885": 953023936.0, "3890": 960308352.0, "3895": 960627200.0, "3900": 984950656.0, "3905": 976247680.0, "3910": 987372416.0, "3915": 946042240.0, "3920": 974878848.0, "3925": 961234816.0, "3930": 976790272.0, "3935": 978913664.0, "3940": 950305344.0, "3945": 960285696.0, "3950": 974198400.0, "3955": 972966912.0, "3960": 974078976.0, "3965": 950899328.0, "3970": 980694144.0, "3975": 960725888.0, "3980": 977568768.0, "3985": 962961280.0, "3990": 972755200.0, "3995": 953701568.0, "4000": 975001408.0, "4005": 971670656.0, "4010": 978420736.0, "4015": 971504576.0, "4020": 950347776.0, "4025": 968457344.0, "4030": 997985664.0, "4035": 978586112.0, "4040": 959850560.0, "4045": 939701120.0, "4050": 944742720.0, "4055": 980985472.0, "4060": 977720832.0, "4065": 975767168.0, "4070": 942201216.0, "4075": 945801856.0, "4080": 988790144.0, "4085": 962116864.0, "4090": 983381952.0, "4095": 986970176.0, "4100": 957237504.0, "4105": 954135360.0, "4110": 966499328.0, "4115": 976054656.0, "4120": 983568640.0, "4125": 960078848.0, "4130": 967315008.0, "4135": 971467520.0, "4140": 963220352.0, "4145": 956212736.0, "4150": 960344704.0, "4155": 946253952.0, "4160": 968463104.0, "4165": 970363776.0, "4170": 972007936.0, "4175": 955911296.0, "4180": 941009664.0, "4185": 968355200.0, "4190": 968040512.0, "4195": 989234432.0, "4200": 962702592.0, "4205": 960619008.0, "4210": 971849472.0, "4215": 974206336.0, "4220": 981156480.0, "4225": 975257536.0, "4230": 952726528.0, "4235": 958530688.0, "4240": 966835776.0, "4245": 961769728.0, "4250": 965868032.0, "4255": 958304384.0, "4260": 949582592.0, "4265": 964162368.0, "4270": 978420992.0, "4275": 975458048.0, "4280": 962708032.0, "4285": 951437952.0, "4290": 980053504.0, "4295": 968878720.0, "4300": 958322176.0, "4305": 966802816.0, "4310": 939625216.0, "4315": 949428096.0, "4320": 984535296.0, "4325": 982572160.0, "4330": 974771456.0, "4335": 949458304.0, "4340": 959502656.0, "4345": 956656768.0, "4350": 979923200.0, "4355": 968881088.0, "4360": 966364416.0, "4365": 941220480.0, "4370": 969519232.0, "4375": 973007680.0, "4380": 966147456.0, "4385": 972048576.0, "4390": 954152960.0, "4395": 951880448.0, "4400": 973676992.0, "4405": 972490752.0, "4410": 967944128.0, "4415": 958984448.0, "4420": 960814656.0, "4425": 976452352.0, "4430": 965979264.0, "4435": 975974272.0, "4440": 962332416.0, "4445": 954861056.0, "4450": 978319232.0, "4455": 960254976.0, "4460": 968579968.0, "4465": 968867712.0, "4470": 944164224.0, "4475": 952022208.0, "4480": 978837120.0, "4485": 968303296.0, "4490": 957049408.0, "4495": 938951040.0, "4500": 953239680.0, "4505": 977178048.0, "4510": 978737792.0, "4515": 962504448.0, "4520": 958836992.0, "4525": 958064128.0, "4530": 964427904.0, "4535": 976591616.0, "4540": 976809664.0, "4545": 970181504.0, "4550": 953210624.0, "4555": 959565440.0, "4560": 972484864.0, "4565": 973479360.0, "4570": 978921856.0, "4575": 957810816.0, "4580": 963166208.0, "4585": 957393664.0, "4590": 986579584.0, "4595": 960189760.0, "4600": 952241920.0, "4605": 959297728.0, "4610": 963735936.0, "4615": 957975808.0, "4620": 960244160.0, "4625": 973835264.0, "4630": 944483328.0, "4635": 977000448.0, "4640": 960261888.0, "4645": 981980096.0, "4650": 962361536.0, "4655": 939420352.0, "4660": 964000256.0, "4665": 962540992.0, "4670": 976659200.0, "4675": 963452416.0, "4680": 957462848.0, "4685": 949612160.0, "4690": 956962432.0, "4695": 969938944.0, "4700": 961189504.0, "4705": 970731840.0, "4710": 934480576.0, "4715": 970512704.0, "4720": 966325760.0, "4725": 980266112.0, "4730": 965850560.0, "4735": 937846784.0, "4740": 960088128.0, "4745": 975997376.0, "4750": 967909888.0, "4755": 984897408.0, "4760": 959123840.0, "4765": 955213312.0, "4770": 958560384.0, "4775": 991068800.0, "4780": 976745728.0, "4785": 967571072.0, "4790": 943658240.0, "4795": 955800192.0, "4800": 967673024.0, "4805": 976480704.0, "4810": 965148544.0, "4815": 957945344.0, "4820": 973955072.0, "4825": 961432256.0, "4830": 962601664.0, "4835": 972495232.0, "4840": 948852864.0, "4845": 965691200.0, "4850": 960280448.0, "4855": 964069120.0, "4860": 963088640.0, "4865": 967519872.0, "4870": 957182784.0, "4875": 983526272.0, "4880": 957023744.0, "4885": 976991616.0, "4890": 959704256.0, "4895": 942140032.0, "4900": 973614592.0, "4905": 975152896.0, "4910": 969159552.0, "4915": 970029312.0, "4920": 941133056.0, "4925": 954699264.0, "4930": 976961664.0, "4935": 963688704.0, "4940": 972525120.0, "4945": 959995392.0, "4950": 940744960.0, "4955": 968000384.0, "4960": 976777472.0, "4965": 960923328.0, "4970": 958617088.0, "4975": 933690752.0, "4980": 960751488.0, "4985": 962913984.0, "4990": 963516928.0, "4995": 986296960.0, "5000": 940679168.0, "5005": 968888448.0, "5010": 970247168.0, "5015": 965234048.0, "5020": 966728064.0, "5025": 949362048.0, "5030": 953490816.0, "5035": 967313920.0, "5040": 955627136.0, "5045": 969189888.0, "5050": 953331456.0, "5055": 954873216.0, "5060": 962980992.0, "5065": 952146176.0, "5070": 973621632.0, "5075": 978556864.0, "5080": 942817408.0, "5085": 965851328.0, "5090": 972783936.0, "5095": 964460416.0, "5100": 958331392.0, "5105": 965354752.0, "5110": 950341504.0, "5115": 972323264.0, "5120": 960329152.0, "5125": 969695616.0, "5130": 938771136.0, "5135": 943661056.0, "5140": 969852672.0, "5145": 968590592.0, "5150": 970699264.0, "5155": 972721152.0, "5160": 926563456.0, "5165": 961543872.0, "5170": 966800768.0, "5175": 966049216.0, "5180": 963618880.0, "5185": 930852736.0, "5190": 949780160.0, "5195": 972447488.0, "5200": 973697728.0, "5205": 968226048.0, "5210": 960577600.0, "5215": 928816960.0, "5220": 979149440.0, "5225": 984816896.0, "5230": 975048960.0, "5235": 975080128.0, "5240": 944326784.0, "5245": 970774400.0, "5250": 972415040.0, "5255": 966883968.0, "5260": 976754880.0, "5265": 942234688.0, "5270": 969191232.0, "5275": 970074496.0, "5280": 962814272.0, "5285": 964122624.0, "5290": 932478592.0, "5295": 951763840.0, "5300": 975563392.0, "5305": 951867008.0, "5310": 967992576.0, "5315": 955756928.0, "5320": 950873088.0, "5325": 973016064.0, "5330": 967770624.0, "5335": 967562112.0, "5340": 966491968.0, "5345": 962949696.0, "5350": 978888704.0, "5355": 972179136.0, "5360": 963819904.0, "5365": 965222656.0, "5370": 947850112.0, "5375": 948833280.0, "5380": 967236864.0, "5385": 980504448.0, "5390": 965264384.0, "5395": 955136064.0, "5400": 948237184.0, "5405": 974306048.0, "5410": 967868800.0, "5415": 976026304.0, "5420": 967334336.0, "5425": 937302272.0, "5430": 963797312.0, "5435": 971863040.0, "5440": 969018304.0, "5445": 957489920.0, "5450": 919395072.0, "5455": 952103040.0, "5460": 962314880.0, "5465": 978808064.0, "5470": 980920640.0, "5475": 941539008.0, "5480": 955768320.0, "5485": 964928320.0, "5490": 975925504.0, "5495": 962720256.0, "5500": 971181632.0, "5505": 956915712.0, "5510": 968537856.0, "5515": 945393408.0, "5520": 963097152.0, "5525": 975834688.0, "5530": 936599552.0, "5535": 970646464.0, "5540": 960271360.0, "5545": 971937664.0, "5550": 967811840.0, "5555": 955933312.0, "5560": 954459712.0, "5565": 968801600.0, "5570": 945150336.0, "5575": 960553024.0, "5580": 960574976.0, "5585": 959421632.0, "5590": 977681664.0, "5595": 975184192.0, "5600": 963059328.0, "5605": 964128384.0, "5610": 943158912.0, "5615": 966516480.0, "5620": 963312064.0, "5625": 982349824.0, "5630": 976013312.0, "5635": 957225856.0, "5640": 951481408.0, "5645": 967880320.0, "5650": 979084480.0, "5655": 983487616.0, "5660": 956493568.0, "5665": 953660672.0, "5670": 965988224.0, "5675": 967640896.0, "5680": 978513152.0, "5685": 961910656.0, "5690": 935823168.0, "5695": 963677696.0, "5700": 952439616.0, "5705": 974500928.0, "5710": 971157888.0, "5715": 946037376.0, "5720": 975004288.0, "5725": 967389056.0, "5730": 978574656.0, "5735": 964770560.0, "5740": 943537536.0, "5745": 971082880.0, "5750": 981977088.0, "5755": 956645504.0, "5760": 963586944.0, "5765": 957621760.0, "5770": 955637120.0, "5775": 970749760.0, "5780": 962825344.0, "5785": 970736256.0, "5790": 974678592.0, "5795": 949802624.0, "5800": 965877952.0, "5805": 968774528.0, "5810": 975975040.0, "5815": 969992320.0, "5820": 936278656.0, "5825": 969278976.0, "5830": 977517184.0, "5835": 974895488.0, "5840": 963050176.0, "5845": 968776192.0, "5850": 942921856.0, "5855": 975814912.0, "5860": 979427456.0, "5865": 978444224.0, "5870": 968727488.0, "5875": 942131840.0, "5880": 964374272.0, "5885": 974660160.0, "5890": 972524992.0, "5895": 965633152.0, "5900": 941262080.0, "5905": 961681920.0, "5910": 958372288.0, "5915": 968051456.0, "5920": 977272384.0, "5925": 959407744.0, "5930": 946516992.0, "5935": 952224192.0, "5940": 977687104.0, "5945": 984782208.0, "5950": 980911552.0, "5955": 935020288.0, "5960": 961737472.0, "5965": 965852800.0, "5970": 970539520.0, "5975": 961933696.0, "5980": 958149376.0, "5985": 964638144.0, "5990": 973394944.0, "5995": 955834496.0, "6000": 955498880.0, "6005": 961294592.0, "6010": 952635008.0, "6015": 974478592.0, "6020": 978155776.0, "6025": 971958016.0, "6030": 955386944.0, "6035": 946856768.0, "6040": 962593408.0, "6045": 983669952.0, "6050": 956601344.0, "6055": 963359872.0, "6060": 945645184.0, "6065": 958517888.0, "6070": 978317824.0, "6075": 977977856.0, "6080": 957494016.0, "6085": 947543936.0, "6090": 953622272.0, "6095": 964574912.0, "6100": 979776640.0, "6105": 971075200.0, "6110": 961791744.0, "6115": 943643264.0, "6120": 968599680.0, "6125": 960840192.0, "6130": 984065920.0, "6135": 961068160.0, "6140": 958659968.0, "6145": 971508608.0, "6150": 968481600.0, "6155": 975116416.0, "6160": 977253952.0, "6165": 952768960.0, "6170": 951155456.0, "6175": 963458304.0, "6180": 969612672.0, "6185": 966358144.0, "6190": 963879616.0, "6195": 947070208.0, "6200": 969355072.0, "6205": 967076480.0, "6210": 959094656.0, "6215": 972964352.0, "6220": 936142336.0, "6225": 978802048.0, "6230": 975937280.0, "6235": 971680128.0, "6240": 965774208.0, "6245": 955881984.0, "6250": 956387968.0, "6255": 973019520.0, "6260": 978779008.0, "6265": 974795392.0, "6270": 958806784.0, "6275": 963886912.0, "6280": 973067264.0, "6285": 966012544.0, "6290": 970955264.0, "6295": 987295168.0, "6300": 947456384.0, "6305": 964785216.0, "6310": 979005376.0, "6315": 978345600.0, "6320": 971688064.0, "6325": 922782656.0, "6330": 959097984.0, "6335": 974793728.0, "6340": 984752704.0, "6345": 967033344.0, "6350": 944489792.0, "6355": 958186880.0, "6360": 972835200.0, "6365": 972122560.0, "6370": 959238336.0, "6375": 967093568.0, "6380": 951217024.0, "6385": 973561664.0, "6390": 965479424.0, "6395": 975094080.0, "6400": 983982464.0, "6405": 943796160.0, "6410": 977166656.0, "6415": 971316544.0, "6420": 956309248.0, "6425": 960951424.0, "6430": 957206400.0, "6435": 959989952.0, "6440": 968739008.0, "6445": 973393984.0, "6450": 974194944.0, "6455": 961934784.0, "6460": 941033728.0, "6465": 974361856.0, "6470": 979765696.0, "6475": 960628608.0, "6480": 967559232.0, "6485": 948685312.0, "6490": 970724544.0, "6495": 988297088.0, "6500": 980064128.0, "6505": 971889408.0, "6510": 951612864.0, "6515": 957357888.0, "6520": 978878080.0, "6525": 978719744.0, "6530": 973285632.0, "6535": 967694720.0, "6540": 950136704.0, "6545": 966270464.0, "6550": 979298816.0, "6555": 967132800.0, "6560": 975158848.0, "6565": 949107520.0, "6570": 952007808.0, "6575": 962296768.0, "6580": 975669120.0, "6585": 979398784.0, "6590": 948845184.0, "6595": 961225600.0, "6600": 960849856.0, "6605": 961154048.0, "6610": 985052480.0, "6615": 959272960.0, "6620": 944342528.0, "6625": 970807168.0, "6630": 971062144.0, "6635": 963835264.0, "6640": 959451904.0, "6645": 950662336.0, "6650": 978590144.0, "6655": 965680320.0, "6660": 968348032.0, "6665": 968474560.0, "6670": 932446336.0, "6675": 970478976.0, "6680": 968672448.0, "6685": 958416832.0, "6690": 956031168.0, "6695": 955119872.0, "6700": 961681664.0, "6705": 978910912.0, "6710": 970655488.0, "6715": 966544704.0, "6720": 973744896.0, "6725": 941573248.0, "6730": 978957696.0, "6735": 994353536.0, "6740": 976368960.0, "6745": 974223296.0, "6750": 938744192.0, "6755": 977441088.0, "6760": 969463296.0, "6765": 978160896.0, "6770": 975034176.0, "6775": 943265088.0, "6780": 946964416.0, "6785": 975199232.0, "6790": 960238208.0, "6795": 975751552.0, "6800": 972988544.0, "6805": 946499200.0, "6810": 958131264.0, "6815": 970291328.0, "6820": 977730048.0, "6825": 968861824.0, "6830": 950103296.0, "6835": 981145024.0, "6840": 982949312.0, "6845": 948469120.0, "6850": 965170688.0, "6855": 953805696.0, "6860": 979037952.0, "6865": 983645184.0, "6870": 964696000.0, "6875": 978675968.0, "6880": 950248896.0, "6885": 958385792.0, "6890": 960165376.0, "6895": 965789696.0, "6900": 985162752.0, "6905": 968295168.0, "6910": 950048192.0, "6915": 970853696.0, "6920": 967093632.0, "6925": 964985152.0, "6930": 964581504.0, "6935": 951570176.0, "6940": 962737920.0, "6945": 986396672.0, "6950": 973289856.0, "6955": 964457024.0, "6960": 940163712.0, "6965": 974160640.0, "6970": 978211520.0, "6975": 984735872.0, "6980": 982336128.0, "6985": 959519872.0, "6990": 945299904.0, "6995": 987151488.0, "7000": 962937088.0, "7005": 962812160.0, "7010": 984616512.0, "7015": 945250944.0, "7020": 982712448.0, "7025": 968571136.0, "7030": 953106688.0, "7035": 982319872.0, "7040": 950386304.0, "7045": 955475840.0, "7050": 959690880.0, "7055": 963684224.0, "7060": 976237440.0, "7065": 968183040.0, "7070": 953179264.0, "7075": 956511232.0, "7080": 968659264.0, "7085": 965611264.0, "7090": 969203200.0, "7095": 959304768.0, "7100": 973197184.0, "7105": 972828288.0, "7110": 969886592.0, "7115": 958289920.0, "7120": 948634752.0, "7125": 962820736.0, "7130": 971324672.0, "7135": 964124160.0, "7140": 961152896.0, "7145": 930404608.0, "7150": 945850368.0, "7155": 990540928.0, "7160": 968123648.0, "7165": 956593536.0, "7170": 968152768.0, "7175": 955557760.0, "7180": 957938176.0, "7185": 984671552.0, "7190": 978293248.0, "7195": 973356736.0, "7200": 935808512.0, "7205": 957439744.0, "7210": 966906240.0, "7215": 969486848.0, "7220": 981961728.0, "7225": 928383744.0, "7230": 949364096.0, "7235": 966981120.0, "7240": 966710144.0, "7245": 967430912.0, "7250": 949282816.0, "7255": 957290624.0, "7260": 969943040.0, "7265": 974752960.0, "7270": 959492224.0, "7275": 958930688.0, "7280": 957011968.0, "7285": 977207040.0, "7290": 976795200.0, "7295": 963061248.0, "7300": 975093376.0, "7305": 963630080.0, "7310": 977185472.0, "7315": 966663296.0, "7320": 974025280.0, "7325": 966772800.0, "7330": 959408640.0, "7335": 963792128.0, "7340": 977484160.0, "7345": 967485056.0, "7350": 984401536.0, "7355": 959565824.0, "7360": 948574720.0, "7365": 972245120.0, "7370": 982372736.0, "7375": 962659264.0, "7380": 963855360.0, "7385": 948211008.0, "7390": 963775616.0, "7395": 958247808.0, "7400": 969518400.0, "7405": 987503104.0, "7410": 951810432.0, "7415": 950395968.0, "7420": 966734976.0, "7425": 982498816.0, "7430": 965418368.0, "7435": 972902080.0, "7440": 936970880.0, "7445": 968694784.0, "7450": 979824128.0, "7455": 971072256.0, "7460": 971791488.0, "7465": 939068672.0, "7470": 971568768.0, "7475": 957750400.0, "7480": 968755456.0, "7485": 961146240.0, "7490": 933924608.0, "7495": 957175040.0, "7500": 968922112.0, "7505": 969693952.0, "7510": 971914560.0, "7515": 979325824.0, "7520": 951648768.0, "7525": 970210816.0, "7530": 953955136.0, "7535": 971113344.0, "7540": 979363200.0, "7545": 959137856.0, "7550": 959911936.0, "7555": 960096896.0, "7560": 969816896.0, "7565": 954936512.0, "7570": 942594624.0, "7575": 965458880.0, "7580": 982078592.0, "7585": 978908864.0, "7590": 970193024.0, "7595": 949624704.0, "7600": 945822272.0, "7605": 981924352.0, "7610": 968849280.0, "7615": 988431104.0, "7620": 956877376.0, "7625": 940951552.0, "7630": 971059584.0, "7635": 983921152.0, "7640": 983396544.0, "7645": 967999936.0, "7650": 958767360.0, "7655": 961822592.0, "7660": 968904704.0, "7665": 978123648.0, "7670": 975329024.0, "7675": 974969664.0, "7680": 942589696.0, "7685": 959713280.0, "7690": 975106688.0, "7695": 982049536.0, "7700": 979469632.0, "7705": 940608000.0, "7710": 974395456.0, "7715": 979087360.0, "7720": 967504192.0, "7725": 960086016.0, "7730": 943152896.0, "7735": 967960064.0, "7740": 980230144.0, "7745": 963831680.0, "7750": 963265536.0, "7755": 959640512.0, "7760": 970199872.0, "7765": 970729344.0, "7770": 962030848.0, "7775": 981835392.0, "7780": 964593024.0, "7785": 959666688.0, "7790": 968112000.0, "7795": 968565504.0, "7800": 971795712.0, "7805": 968122624.0, "7810": 945530176.0, "7815": 963123328.0, "7820": 974173440.0, "7825": 963489664.0, "7830": 957261888.0, "7835": 949538240.0, "7840": 957148416.0, "7845": 953684864.0, "7850": 979784768.0, "7855": 986819200.0, "7860": 947400704.0, "7865": 948909952.0, "7870": 965028992.0, "7875": 975494144.0, "7880": 968528896.0, "7885": 968991296.0, "7890": 952064896.0, "7895": 974659712.0, "7900": 963534848.0, "7905": 964100864.0, "7910": 965353408.0, "7915": 943095936.0, "7920": 950772096.0, "7925": 969513216.0, "7930": 964380160.0, "7935": 984301824.0, "7940": 964561216.0, "7945": 950735296.0, "7950": 961816320.0, "7955": 980082432.0, "7960": 963702016.0, "7965": 953082944.0, "7970": 951740416.0, "7975": 969242368.0, "7980": 964727616.0, "7985": 959251456.0, "7990": 967797632.0, "7995": 946596032.0, "8000": 962079680.0, "8005": 980372224.0, "8010": 965237248.0, "8015": 982809344.0, "8020": 960378240.0, "8025": 965200768.0, "8030": 958090560.0, "8035": 975113728.0, "8040": 960176256.0, "8045": 947768128.0, "8050": 959303680.0, "8055": 978732672.0, "8060": 969075968.0, "8065": 957632512.0, "8070": 963698432.0, "8075": 942094784.0, "8080": 966145984.0, "8085": 966619776.0, "8090": 983282432.0, "8095": 988539712.0, "8100": 966372736.0, "8105": 944180480.0, "8110": 968811008.0, "8115": 985685120.0, "8120": 974531072.0, "8125": 964031680.0, "8130": 966544512.0, "8135": 967491264.0, "8140": 963823360.0, "8145": 995027200.0, "8150": 973191680.0, "8155": 938402944.0, "8160": 964524032.0, "8165": 972792320.0, "8170": 968313600.0, "8175": 961465728.0, "8180": 936090880.0, "8185": 962700288.0, "8190": 967591488.0, "8195": 977029248.0, "8200": 956073344.0, "8205": 960740096.0, "8210": 946767104.0, "8215": 982017344.0, "8220": 988210944.0, "8225": 966330112.0, "8230": 962442752.0, "8235": 934132800.0, "8240": 980256512.0, "8245": 976386816.0, "8250": 963885696.0, "8255": 977186560.0, "8260": 956614016.0, "8265": 982651008.0, "8270": 952333696.0, "8275": 973792960.0, "8280": 974501760.0, "8285": 953039936.0, "8290": 939703872.0, "8295": 981249280.0, "8300": 972881280.0, "8305": 977926912.0, "8310": 951061184.0, "8315": 937516672.0, "8320": 977339328.0, "8325": 967702208.0, "8330": 990167296.0, "8335": 975674240.0, "8340": 947367680.0, "8345": 970703232.0, "8350": 970009216.0, "8355": 974930176.0, "8360": 979701696.0, "8365": 932856192.0, "8370": 965022208.0, "8375": 979660160.0, "8380": 965323648.0, "8385": 972670144.0, "8390": 962995968.0, "8395": 950673344.0, "8400": 972606720.0, "8405": 951478016.0, "8410": 960643968.0, "8415": 965316736.0, "8420": 941754304.0, "8425": 967909760.0, "8430": 960803776.0, "8435": 965674240.0, "8440": 969266176.0, "8445": 952763264.0, "8450": 984044736.0, "8455": 990052288.0, "8460": 968375936.0, "8465": 967405824.0, "8470": 962972544.0, "8475": 942650752.0, "8480": 987261056.0, "8485": 979284480.0, "8490": 992133376.0, "8495": 971017280.0, "8500": 951307264.0, "8505": 982885760.0, "8510": 974063488.0, "8515": 968568576.0, "8520": 961594688.0, "8525": 944972864.0, "8530": 983837568.0, "8535": 978412032.0, "8540": 967581888.0, "8545": 968756096.0, "8550": 941574400.0, "8555": 971292224.0, "8560": 958283264.0, "8565": 975812608.0, "8570": 974360256.0, "8575": 971620480.0, "8580": 931969664.0, "8585": 965538688.0, "8590": 978798464.0, "8595": 979266048.0, "8600": 983707520.0, "8605": 957975808.0, "8610": 983873536.0, "8615": 977417472.0, "8620": 963129984.0, "8625": 979024896.0, "8630": 943335168.0, "8635": 961540352.0, "8640": 973266752.0, "8645": 970047040.0, "8650": 969316288.0, "8655": 970616832.0, "8660": 944042240.0, "8665": 986351616.0, "8670": 960342016.0, "8675": 973579136.0, "8680": 962190208.0, "8685": 955545856.0, "8690": 978440448.0, "8695": 968560640.0, "8700": 972779072.0, "8705": 973495808.0, "8710": 946637888.0, "8715": 973024192.0, "8720": 958180736.0, "8725": 978572608.0, "8730": 985661952.0, "8735": 951968960.0, "8740": 940693504.0, "8745": 987063552.0, "8750": 971913600.0, "8755": 970914496.0, "8760": 964771456.0, "8765": 934606336.0, "8770": 986079744.0, "8775": 969507584.0, "8780": 967233024.0, "8785": 962025600.0, "8790": 947726336.0, "8795": 969480256.0, "8800": 970779648.0, "8805": 973080448.0, "8810": 983468032.0, "8815": 951103744.0, "8820": 939465920.0, "8825": 963918016.0, "8830": 980930432.0, "8835": 971177856.0, "8840": 979467008.0, "8845": 950412288.0, "8850": 985938304.0, "8855": 970857536.0, "8860": 961497856.0, "8865": 956633920.0, "8870": 945690496.0, "8875": 968481280.0, "8880": 983780480.0, "8885": 971184256.0, "8890": 969637056.0, "8895": 952246400.0, "8900": 961509248.0, "8905": 976643136.0, "8910": 981730048.0, "8915": 980609664.0, "8920": 967668608.0, "8925": 939772032.0, "8930": 970320000.0, "8935": 963732736.0, "8940": 977485760.0, "8945": 981631424.0, "8950": 945746816.0, "8955": 972116480.0, "8960": 973540736.0, "8965": 973175360.0, "8970": 966066944.0, "8975": 936670080.0, "8980": 952732032.0, "8985": 977313024.0, "8990": 967006464.0, "8995": 980247552.0, "9000": 951831808.0, "9005": 949984896.0, "9010": 975022912.0, "9015": 981808256.0, "9020": 958861568.0, "9025": 978811136.0, "9030": 953703360.0, "9035": 968368960.0, "9040": 977667712.0, "9045": 968228864.0, "9050": 982963456.0, "9055": 947629248.0, "9060": 955507584.0, "9065": 969670016.0, "9070": 967550272.0, "9075": 980648576.0, "9080": 952615680.0, "9085": 970705408.0, "9090": 963557760.0, "9095": 968057344.0, "9100": 974339968.0, "9105": 959936256.0, "9110": 947985728.0, "9115": 956355712.0, "9120": 985459328.0, "9125": 963088064.0, "9130": 957991360.0, "9135": 951522432.0, "9140": 966915328.0, "9145": 977176064.0, "9150": 986378240.0, "9155": 976842752.0, "9160": 957545856.0, "9165": 949887552.0, "9170": 987582720.0, "9175": 970992768.0, "9180": 966588672.0, "9185": 954783296.0, "9190": 956379072.0, "9195": 965881472.0, "9200": 968599424.0, "9205": 967134720.0, "9210": 984683136.0, "9215": 931338688.0, "9220": 949491008.0, "9225": 970887104.0, "9230": 970963776.0, "9235": 971379136.0, "9240": 959562368.0, "9245": 963597376.0, "9250": 961184192.0, "9255": 982921664.0, "9260": 979050624.0, "9265": 952621440.0, "9270": 949265920.0, "9275": 978269056.0, "9280": 977521408.0, "9285": 962387072.0, "9290": 979011264.0, "9295": 958561792.0, "9300": 965200640.0, "9305": 968900224.0, "9310": 972240384.0, "9315": 975677952.0, "9320": 947801216.0, "9325": 979185920.0, "9330": 977730688.0, "9335": 974997440.0, "9340": 959979648.0, "9345": 942900096.0, "9350": 952712960.0, "9355": 962836864.0, "9360": 959496512.0, "9365": 983437696.0, "9370": 982361984.0, "9375": 941725248.0, "9380": 982578304.0, "9385": 984915520.0, "9390": 972806016.0, "9395": 978331776.0, "9400": 937670272.0, "9405": 967641536.0, "9410": 981484288.0, "9415": 990962048.0, "9420": 959851968.0, "9425": 956485760.0, "9430": 938229376.0, "9435": 974449088.0, "9440": 959002944.0, "9445": 973131392.0, "9450": 961139840.0, "9455": 945260032.0, "9460": 977570624.0, "9465": 987683968.0, "9470": 962928000.0, "9475": 983368832.0, "9480": 930780800.0, "9485": 986718720.0, "9490": 963263104.0, "9495": 971655168.0, "9500": 982111040.0, "9505": 969881216.0, "9510": 964076160.0, "9515": 956213568.0, "9520": 948041472.0, "9525": 964980992.0, "9530": 957953920.0, "9535": 950926336.0, "9540": 953789952.0, "9545": 979125696.0, "9550": 955324928.0, "9555": 952301312.0, "9560": 957732800.0, "9565": 969389568.0, "9570": 977259648.0, "9575": 958580352.0, "9580": 962569984.0, "9585": 945890432.0, "9590": 948026944.0, "9595": 966418304.0, "9600": 984258368.0, "9605": 984983872.0, "9610": 943260544.0, "9615": 952384512.0, "9620": 980540800.0, "9625": 978144896.0, "9630": 969622528.0, "9635": 973972608.0, "9640": 940000064.0, "9645": 962032896.0, "9650": 970968704.0, "9655": 987005312.0, "9660": 962866880.0, "9665": 949542912.0, "9670": 966065024.0, "9675": 962585856.0, "9680": 964585856.0, "9685": 985850368.0, "9690": 940117760.0, "9695": 949747392.0, "9700": 975297600.0, "9705": 972442624.0, "9710": 966982272.0, "9715": 970937472.0, "9720": 939975552.0, "9725": 965705152.0, "9730": 973486592.0, "9735": 973362944.0, "9740": 970977728.0, "9745": 950963904.0, "9750": 979199616.0, "9755": 970035456.0, "9760": 967635264.0, "9765": 963358080.0, "9770": 952247168.0, "9775": 956216064.0, "9780": 969788800.0, "9785": 958001088.0, "9790": 960883584.0, "9795": 957624960.0, "9800": 948788480.0, "9805": 961669184.0, "9810": 978087296.0, "9815": 977028224.0, "9820": 981930816.0, "9825": 938700288.0, "9830": 969013760.0, "9835": 972265600.0, "9840": 971086528.0, "9845": 966399488.0, "9850": 946396800.0, "9855": 956897920.0, "9860": 986979712.0, "9865": 969291456.0, "9870": 989720960.0, "9875": 956655360.0, "9880": 930761152.0, "9885": 963077312.0, "9890": 972295232.0, "9895": 983035520.0, "9900": 956374720.0, "9905": 938088960.0, "9910": 978049664.0, "9915": 973334016.0, "9920": 944131456.0, "9925": 962438848.0, "9930": 946681536.0, "9935": 960536576.0, "9940": 965082880.0, "9945": 958125376.0, "9950": 963724352.0, "9955": 943107264.0, "9960": 966611200.0, "9965": 982909056.0, "9970": 966287872.0, "9975": 963279872.0, "9980": 980414848.0, "9985": 941665152.0, "9990": 976234496.0, "9995": 982362496.0, "10000": 971164032.0, "10005": 969297600.0, "10010": 943890688.0, "10015": 982564992.0, "10020": 977436288.0, "10025": 978886912.0, "10030": 970827392.0, "10035": 945931520.0, "10040": 950228480.0, "10045": 977412352.0, "10050": 985059072.0, "10055": 989978176.0, "10060": 958051072.0, "10065": 946830720.0, "10070": 966662784.0, "10075": 978381952.0, "10080": 971252736.0, "10085": 973885952.0, "10090": 943174080.0, "10095": 962659136.0, "10100": 971300352.0, "10105": 975618176.0, "10110": 971404480.0, "10115": 948232576.0, "10120": 961759488.0, "10125": 973642880.0, "10130": 980135424.0, "10135": 971769344.0, "10140": 957572864.0, "10145": 933775872.0, "10150": 973487424.0, "10155": 969372992.0, "10160": 961126848.0, "10165": 974677632.0, "10170": 944122112.0, "10175": 978242816.0, "10180": 983408128.0, "10185": 978427968.0, "10190": 954968192.0, "10195": 936573312.0, "10200": 987430400.0, "10205": 972124544.0, "10210": 965832960.0, "10215": 975606784.0, "10220": 947903616.0, "10225": 950006656.0, "10230": 975150912.0, "10235": 953439360.0, "10240": 968940608.0, "10245": 961036352.0, "10250": 935909312.0, "10255": 979123456.0, "10260": 963945152.0, "10265": 966544512.0, "10270": 968057920.0, "10275": 935623808.0, "10280": 969181952.0, "10285": 995754240.0, "10290": 978976256.0, "10295": 980901376.0, "10300": 951608320.0, "10305": 971471744.0, "10310": 959721152.0, "10315": 970636416.0, "10320": 984667520.0, "10325": 982811264.0, "10330": 934178112.0, "10335": 975963648.0, "10340": 956830080.0, "10345": 972798720.0, "10350": 984363712.0, "10355": 941791872.0, "10360": 961542656.0, "10365": 973753216.0, "10370": 980186880.0, "10375": 969692416.0, "10380": 961281792.0, "10385": 954728768.0, "10390": 989910400.0, "10395": 964453120.0, "10400": 960015744.0, "10405": 949367808.0, "10410": 954594752.0, "10415": 975065280.0, "10420": 967038848.0, "10425": 969236096.0, "10430": 964217472.0, "10435": 962300096.0, "10440": 971509184.0, "10445": 971435008.0, "10450": 974802816.0, "10455": 965583296.0, "10460": 947338048.0, "10465": 970809984.0, "10470": 971921856.0, "10475": 978742016.0, "10480": 996777728.0, "10485": 949276288.0, "10490": 933999744.0, "10495": 968274304.0, "10500": 977914944.0, "10505": 958532288.0, "10510": 950861056.0, "10515": 952761856.0, "10520": 971412864.0, "10525": 969254656.0, "10530": 969823808.0, "10535": 985973760.0, "10540": 946511232.0, "10545": 969796480.0, "10550": 968647104.0, "10555": 958945216.0, "10560": 975352448.0, "10565": 960958528.0, "10570": 968443648.0, "10575": 972584896.0, "10580": 960072640.0, "10585": 972977664.0, "10590": 951475712.0, "10595": 955927232.0, "10600": 967173440.0, "10605": 986208128.0, "10610": 965668032.0, "10615": 976196928.0, "10620": 940602752.0, "10625": 964360512.0, "10630": 966548096.0, "10635": 972474880.0, "10640": 974100224.0, "10645": 947771840.0, "10650": 965123264.0, "10655": 985146112.0, "10660": 975958592.0, "10665": 966414976.0, "10670": 954538112.0, "10675": 933791744.0, "10680": 985552512.0, "10685": 990465536.0, "10690": 963272320.0, "10695": 971467712.0, "10700": 949330112.0, "10705": 977442304.0, "10710": 967678912.0, "10715": 966750528.0, "10720": 965843520.0, "10725": 943925824.0, "10730": 979668096.0, "10735": 960466368.0, "10740": 970657152.0, "10745": 983659968.0, "10750": 980694080.0, "10755": 944319104.0, "10760": 969219456.0, "10765": 972360000.0, "10770": 973532480.0, "10775": 957519936.0, "10780": 948992768.0, "10785": 953068672.0, "10790": 969274624.0, "10795": 959968000.0, "10800": 971228224.0, "10805": 950749376.0, "10810": 973302208.0, "10815": 959227840.0, "10820": 970578944.0, "10825": 966622400.0, "10830": 956279104.0, "10835": 962315520.0, "10840": 970164032.0, "10845": 963272064.0, "10850": 957413888.0, "10855": 966982464.0, "10860": 950112960.0, "10865": 963435840.0, "10870": 982521920.0, "10875": 981439424.0, "10880": 957886400.0, "10885": 953618880.0, "10890": 972140800.0, "10895": 972574528.0, "10900": 969552192.0, "10905": 963967168.0, "10910": 937931840.0, "10915": 959792320.0, "10920": 982695360.0, "10925": 969096832.0, "10930": 967604480.0, "10935": 962319296.0, "10940": 953353728.0, "10945": 964435776.0, "10950": 971693760.0, "10955": 966006912.0, "10960": 971449792.0, "10965": 965964608.0, "10970": 983068992.0, "10975": 965355328.0, "10980": 973981632.0, "10985": 985763264.0, "10990": 950380544.0, "10995": 962849856.0, "11000": 984696640.0, "11005": 978032448.0, "11010": 970939136.0, "11015": 969445056.0, "11020": 947336320.0, "11025": 959564608.0, "11030": 977603968.0, "11035": 975451264.0, "11040": 985860032.0, "11045": 956168704.0, "11050": 972917696.0, "11055": 973708928.0, "11060": 961488832.0, "11065": 985186048.0, "11070": 949030336.0, "11075": 975965760.0, "11080": 971664960.0, "11085": 966653440.0, "11090": 976054528.0, "11095": 945996928.0, "11100": 965548416.0, "11105": 973599680.0, "11110": 980302656.0, "11115": 967617664.0, "11120": 956744832.0, "11125": 956168704.0, "11130": 974829056.0, "11135": 978900416.0, "11140": 963803456.0, "11145": 965899456.0, "11150": 935298240.0, "11155": 975768832.0, "11160": 983533120.0, "11165": 981822784.0, "11170": 977400960.0, "11175": 957507904.0, "11180": 961753600.0, "11185": 971365312.0, "11190": 979127104.0, "11195": 984951168.0, "11200": 982093312.0, "11205": 941529472.0, "11210": 983868928.0, "11215": 966979840.0, "11220": 982691456.0, "11225": 961335424.0, "11230": 952575552.0, "11235": 980760384.0, "11240": 976750016.0, "11245": 965706752.0, "11250": 969000832.0, "11255": 959332160.0, "11260": 979323392.0, "11265": 963239808.0, "11270": 981069568.0, "11275": 967778048.0, "11280": 955402048.0, "11285": 952766464.0, "11290": 956145024.0, "11295": 967793408.0, "11300": 962232448.0, "11305": 958466176.0, "11310": 946095744.0, "11315": 982546496.0, "11320": 964325952.0, "11325": 980637248.0, "11330": 974888256.0, "11335": 951892608.0, "11340": 970130944.0, "11345": 969289472.0, "11350": 980805888.0, "11355": 982004480.0, "11360": 940931840.0, "11365": 970395136.0, "11370": 978573056.0, "11375": 975142976.0, "11380": 968097984.0, "11385": 958159040.0, "11390": 937506624.0, "11395": 976905280.0, "11400": 973024256.0, "11405": 960868608.0, "11410": 965629312.0, "11415": 928453504.0, "11420": 964290176.0, "11425": 980607360.0, "11430": 977911680.0, "11435": 969675648.0, "11440": 944643072.0, "11445": 974050688.0, "11450": 984023808.0, "11455": 970787136.0, "11460": 964618560.0, "11465": 959463872.0, "11470": 954479488.0, "11475": 972360256.0, "11480": 956101120.0, "11485": 976733952.0, "11490": 985840576.0, "11495": 958384128.0, "11500": 969573056.0, "11505": 963288576.0, "11510": 976199104.0, "11515": 977610560.0, "11520": 953632128.0, "11525": 975708160.0, "11530": 976330944.0, "11535": 979344704.0, "11540": 973920896.0, "11545": 953017600.0, "11550": 952767040.0, "11555": 981303360.0, "11560": 984029120.0, "11565": 964543168.0, "11570": 965946624.0, "11575": 951044608.0, "11580": 975743616.0, "11585": 976876416.0, "11590": 968810112.0, "11595": 976216000.0, "11600": 946182144.0, "11605": 972659456.0, "11610": 981967040.0, "11615": 971432320.0, "11620": 968908800.0, "11625": 948963648.0, "11630": 936902784.0, "11635": 973200320.0, "11640": 980805248.0, "11645": 979578176.0, "11650": 971279552.0, "11655": 955651840.0, "11660": 980159488.0, "11665": 957699264.0, "11670": 982226176.0, "11675": 971690368.0, "11680": 955794304.0, "11685": 982354240.0, "11690": 967976896.0, "11695": 967325696.0, "11700": 973205504.0, "11705": 955916928.0, "11710": 964352000.0, "11715": 982668672.0, "11720": 983293952.0, "11725": 964787264.0, "11730": 955178944.0, "11735": 942254784.0, "11740": 973436608.0, "11745": 970794112.0, "11750": 961046720.0, "11755": 962908160.0, "11760": 949851456.0, "11765": 983325376.0, "11770": 984209856.0, "11775": 974678528.0, "11780": 984976128.0, "11785": 946474496.0, "11790": 972187328.0, "11795": 970179840.0, "11800": 972786432.0, "11805": 986351808.0, "11810": 966793920.0, "11815": 955481920.0, "11820": 973164544.0, "11825": 970475200.0, "11830": 974539520.0, "11835": 961372672.0, "11840": 944087808.0, "11845": 980474368.0, "11850": 974160064.0, "11855": 977514496.0, "11860": 971245376.0, "11865": 938116672.0, "11870": 939856000.0, "11875": 989607104.0, "11880": 971937984.0, "11885": 962472256.0, "11890": 969840768.0, "11895": 964964544.0, "11900": 979000512.0, "11905": 960978048.0, "11910": 983261120.0, "11915": 989539008.0, "11920": 944341952.0, "11925": 993746880.0, "11930": 964276480.0, "11935": 963232512.0, "11940": 976610624.0, "11945": 944407488.0, "11950": 977418368.0, "11955": 978834624.0, "11960": 971871104.0, "11965": 975734464.0, "11970": 962815872.0, "11975": 962920512.0, "11980": 977155456.0, "11985": 952620800.0, "11990": 968188736.0, "11995": 964801856.0, "12000": 958062656.0, "12005": 974032384.0, "12010": 978925888.0, "12015": 971758976.0, "12020": 972924800.0, "12025": 934113408.0, "12030": 969001344.0, "12035": 983635776.0, "12040": 977360000.0, "12045": 981351744.0, "12050": 930858368.0, "12055": 938177408.0, "12060": 973956800.0, "12065": 965073088.0, "12070": 967858304.0, "12075": 949253376.0, "12080": 953109632.0, "12085": 971789376.0, "12090": 963601728.0, "12095": 963075008.0, "12100": 976382208.0, "12105": 950176512.0, "12110": 971641536.0, "12115": 967857792.0, "12120": 986224768.0, "12125": 980344640.0, "12130": 941307904.0, "12135": 955159872.0, "12140": 975757440.0, "12145": 979380672.0, "12150": 979350720.0, "12155": 961437568.0, "12160": 946262592.0, "12165": 968123456.0, "12170": 963922944.0, "12175": 966870272.0, "12180": 974525824.0, "12185": 952431168.0, "12190": 987822272.0, "12195": 970064896.0, "12200": 964392832.0, "12205": 968238784.0, "12210": 938703168.0, "12215": 996356672.0, "12220": 969584320.0, "12225": 978894144.0, "12230": 979707904.0, "12235": 949733824.0, "12240": 963307456.0, "12245": 964943424.0, "12250": 976390528.0, "12255": 967674688.0, "12260": 983212992.0, "12265": 931121728.0, "12270": 966041216.0, "12275": 979260992.0, "12280": 977151808.0, "12285": 970127168.0, "12290": 928813632.0, "12295": 976481216.0, "12300": 985536896.0, "12305": 969624064.0, "12310": 986035072.0, "12315": 935797824.0, "12320": 957608896.0, "12325": 966046400.0, "12330": 968013504.0, "12335": 963445248.0, "12340": 957385472.0, "12345": 943979200.0, "12350": 966506624.0, "12355": 975255552.0, "12360": 978663168.0, "12365": 964205312.0, "12370": 948695552.0, "12375": 963496896.0, "12380": 964567808.0, "12385": 972784960.0, "12390": 961207232.0, "12395": 961298752.0, "12400": 974965504.0, "12405": 976105728.0, "12410": 952883968.0, "12415": 962219136.0, "12420": 943610496.0, "12425": 948535232.0, "12430": 971740352.0, "12435": 968575616.0, "12440": 961145408.0, "12445": 951484032.0, "12450": 946801792.0, "12455": 980573632.0, "12460": 973289856.0, "12465": 954094720.0, "12470": 980628608.0, "12475": 958189568.0, "12480": 966422080.0, "12485": 977641984.0, "12490": 973641152.0, "12495": 968993472.0, "12500": 960825344.0, "12505": 943203776.0, "12510": 960585408.0, "12515": 969358272.0, "12520": 973605696.0, "12525": 971886848.0, "12530": 944143104.0, "12535": 975812544.0, "12540": 965290496.0, "12545": 971470080.0, "12550": 969047168.0, "12555": 940294400.0, "12560": 963904832.0, "12565": 947056960.0, "12570": 974076544.0, "12575": 962073216.0, "12580": 957711360.0, "12585": 963994624.0, "12590": 965937536.0, "12595": 978425344.0, "12600": 981726848.0, "12605": 948685504.0, "12610": 937389824.0, "12615": 962448832.0, "12620": 960662528.0, "12625": 966016960.0, "12630": 970505728.0, "12635": 961904768.0, "12640": 978014784.0, "12645": 968929536.0, "12650": 969781696.0, "12655": 963823872.0, "12660": 932158976.0, "12665": 956682368.0, "12670": 985824960.0, "12675": 965333824.0, "12680": 960746048.0, "12685": 950900160.0, "12690": 945037440.0, "12695": 978180096.0, "12700": 984947904.0, "12705": 958612096.0, "12710": 968185408.0, "12715": 956194880.0, "12720": 976281216.0, "12725": 964788992.0, "12730": 968903936.0, "12735": 986458624.0, "12740": 937148928.0, "12745": 970235712.0, "12750": 974094272.0, "12755": 979672512.0, "12760": 969672256.0, "12765": 941497536.0, "12770": 951448832.0, "12775": 951018560.0, "12780": 968859584.0, "12785": 955667456.0, "12790": 962440384.0, "12795": 952574912.0, "12800": 962459456.0, "12805": 972357632.0, "12810": 973204672.0, "12815": 952295168.0, "12820": 941006208.0, "12825": 966426880.0, "12830": 998354240.0, "12835": 976476416.0, "12840": 962262592.0, "12845": 941357248.0, "12850": 958793280.0, "12855": 961055552.0, "12860": 972029440.0, "12865": 977576704.0, "12870": 974241152.0, "12875": 955667904.0, "12880": 967431104.0, "12885": 980837184.0, "12890": 958991040.0, "12895": 968756352.0, "12900": 936932416.0, "12905": 967534720.0, "12910": 980463488.0, "12915": 974646016.0, "12920": 954913280.0, "12925": 948394048.0, "12930": 959638976.0, "12935": 990254336.0, "12940": 967258560.0, "12945": 974963584.0, "12950": 970684224.0, "12955": 955156928.0, "12960": 976667840.0, "12965": 960294784.0, "12970": 961231936.0, "12975": 959308800.0, "12980": 937475264.0, "12985": 962245248.0, "12990": 967650176.0, "12995": 975082560.0, "13000": 979618752.0, "13005": 953874944.0, "13010": 950754368.0, "13015": 963804416.0, "13020": 960271936.0, "13025": 979702016.0, "13030": 971587648.0, "13035": 954566080.0, "13040": 953463936.0, "13045": 972294016.0, "13050": 967461952.0, "13055": 967282240.0, "13060": 950986496.0, "13065": 969834816.0, "13070": 974811072.0, "13075": 961141952.0, "13080": 960868480.0, "13085": 944243968.0, "13090": 973321344.0, "13095": 980513472.0, "13100": 965077824.0, "13105": 973763456.0, "13110": 924311168.0, "13115": 973399680.0, "13120": 980765056.0, "13125": 974949632.0, "13130": 951117312.0, "13135": 944539456.0, "13140": 925608448.0, "13145": 989776576.0, "13150": 983093056.0, "13155": 976174528.0, "13160": 969236352.0, "13165": 952627648.0, "13170": 977000832.0, "13175": 982029312.0, "13180": 976495616.0, "13185": 974812224.0, "13190": 949060416.0, "13195": 964321344.0, "13200": 969488320.0, "13205": 982912896.0, "13210": 971767744.0, "13215": 947757376.0, "13220": 962411136.0, "13225": 963763712.0, "13230": 975741376.0, "13235": 977233664.0, "13240": 965918784.0, "13245": 936192896.0, "13250": 977779072.0, "13255": 960361728.0, "13260": 966538688.0, "13265": 973043584.0, "13270": 954648000.0, "13275": 959451776.0, "13280": 976656576.0, "13285": 974861056.0, "13290": 966620032.0, "13295": 942063168.0, "13300": 969118272.0, "13305": 982134784.0, "13310": 971667840.0, "13315": 967658560.0, "13320": 976212480.0, "13325": 943523648.0, "13330": 972270272.0, "13335": 980114624.0, "13340": 960195840.0, "13345": 978223936.0, "13350": 954960128.0, "13355": 968459648.0, "13360": 982481472.0, "13365": 957186432.0, "13370": 966880256.0, "13375": 937487552.0, "13380": 952872960.0, "13385": 979948096.0, "13390": 978890624.0, "13395": 982442304.0, "13400": 951320256.0, "13405": 934107776.0, "13410": 975766592.0, "13415": 972871616.0, "13420": 984904960.0, "13425": 965993728.0, "13430": 954231424.0, "13435": 980875968.0, "13440": 966290368.0, "13445": 966201280.0, "13450": 969668224.0, "13455": 951651712.0, "13460": 964609792.0, "13465": 974064640.0, "13470": 971761280.0, "13475": 969500032.0, "13480": 966415680.0, "13485": 966637632.0, "13490": 977847104.0, "13495": 960212096.0, "13500": 971532480.0, "13505": 965213184.0, "13510": 963248896.0, "13515": 990388288.0, "13520": 958538880.0, "13525": 976756864.0, "13530": 983425024.0, "13535": 931321344.0, "13540": 946745408.0, "13545": 972389376.0, "13550": 970839680.0, "13555": 980935616.0, "13560": 959234944.0, "13565": 963986496.0, "13570": 972310144.0, "13575": 976823744.0, "13580": 975771712.0, "13585": 963359296.0, "13590": 939804224.0, "13595": 983545472.0, "13600": 990107008.0, "13605": 969120832.0, "13610": 973733120.0, "13615": 945268800.0, "13620": 972478592.0, "13625": 971448576.0, "13630": 958999168.0, "13635": 985219392.0, "13640": 980530880.0, "13645": 960931008.0, "13650": 953292608.0, "13655": 965451648.0, "13660": 978077120.0, "13665": 969804544.0, "13670": 956380352.0, "13675": 977689280.0, "13680": 976501440.0, "13685": 967911232.0, "13690": 971495936.0, "13695": 944195136.0, "13700": 974261376.0, "13705": 973308672.0, "13710": 975996864.0, "13715": 950649984.0, "13720": 951448192.0, "13725": 972720128.0, "13730": 969294272.0, "13735": 961792384.0, "13740": 973032576.0, "13745": 973866496.0, "13750": 958256256.0, "13755": 977567168.0, "13760": 964839680.0, "13765": 967831232.0, "13770": 978984896.0, "13775": 928985984.0, "13780": 973935488.0, "13785": 981719744.0, "13790": 963765568.0, "13795": 979261120.0, "13800": 955877952.0, "13805": 967651520.0, "13810": 963543552.0, "13815": 981258176.0, "13820": 976177216.0, "13825": 958088000.0, "13830": 945731328.0, "13835": 974651520.0, "13840": 996439424.0, "13845": 967843456.0, "13850": 975134272.0, "13855": 933767232.0, "13860": 971477952.0, "13865": 976842560.0, "13870": 987009536.0, "13875": 978941376.0, "13880": 951325632.0, "13885": 975767296.0, "13890": 968266304.0, "13895": 944866624.0, "13900": 979275904.0, "13905": 966534080.0, "13910": 965749504.0, "13915": 977553216.0, "13920": 975725184.0, "13925": 980912256.0, "13930": 963014208.0, "13935": 956772672.0, "13940": 965539456.0, "13945": 965396736.0, "13950": 977848640.0, "13955": 977259328.0, "13960": 974586368.0, "13965": 974931648.0, "13970": 972626752.0, "13975": 971565696.0, "13980": 983223424.0, "13985": 968934592.0, "13990": 962259904.0, "13995": 980496960.0, "14000": 972112256.0, "14005": 973174080.0, "14010": 965890816.0, "14015": 941965760.0, "14020": 980546688.0, "14025": 977131008.0, "14030": 972129920.0, "14035": 971405248.0, "14040": 936352000.0, "14045": 968445888.0, "14050": 975153344.0, "14055": 979059008.0, "14060": 976662976.0, "14065": 928849856.0, "14070": 978131328.0, "14075": 979579904.0, "14080": 964862272.0, "14085": 969209408.0, "14090": 965940416.0, "14095": 950791616.0, "14100": 972296896.0, "14105": 970938816.0, "14110": 987498560.0, "14115": 967758592.0, "14120": 944513792.0, "14125": 973016064.0, "14130": 970758656.0, "14135": 978738624.0, "14140": 972522752.0, "14145": 947268032.0, "14150": 974494336.0, "14155": 979807680.0, "14160": 972941952.0, "14165": 972914688.0, "14170": 947223040.0, "14175": 949709632.0, "14180": 976846592.0, "14185": 971902272.0, "14190": 979733056.0, "14195": 973786752.0, "14200": 944968192.0, "14205": 980787648.0, "14210": 981227456.0, "14215": 969726080.0, "14220": 965378240.0, "14225": 956140992.0, "14230": 983781056.0, "14235": 983824000.0, "14240": 980612032.0, "14245": 969728704.0, "14250": 953852800.0, "14255": 941328320.0, "14260": 963630016.0, "14265": 988763456.0, "14270": 987013184.0, "14275": 968937088.0, "14280": 955058368.0, "14285": 962529024.0, "14290": 966191232.0, "14295": 966160128.0, "14300": 983290624.0, "14305": 936971200.0, "14310": 969623360.0, "14315": 977266048.0, "14320": 976023872.0, "14325": 980393920.0, "14330": 957279232.0, "14335": 963027968.0, "14340": 956338176.0, "14345": 968107584.0, "14350": 963630016.0, "14355": 946412992.0, "14360": 949717888.0, "14365": 972425792.0, "14370": 953770624.0, "14375": 956161728.0, "14380": 957709952.0, "14385": 951672064.0, "14390": 982406272.0, "14395": 971004096.0, "14400": 963427136.0, "14405": 969586176.0, "14410": 965564544.0, "14415": 963809280.0, "14420": 960527616.0, "14425": 976778688.0, "14430": 979100224.0, "14435": 970700672.0, "14440": 973844736.0, "14445": 980557184.0, "14450": 973676864.0, "14455": 961148928.0, "14460": 955967552.0, "14465": 934774656.0, "14470": 960542400.0, "14475": 966358144.0, "14480": 967413504.0, "14485": 975995840.0, "14490": 947116800.0, "14495": 959785088.0, "14500": 971377152.0, "14505": 966559168.0, "14510": 977737920.0, "14515": 942668736.0, "14520": 953736576.0, "14525": 971814400.0, "14530": 957328192.0, "14535": 979194368.0, "14540": 954583360.0, "14545": 940405952.0, "14550": 988628608.0, "14555": 972020096.0, "14560": 973802688.0, "14565": 969470848.0, "14570": 948660992.0, "14575": 966444352.0, "14580": 966197696.0, "14585": 976904704.0, "14590": 975301888.0, "14595": 945847872.0, "14600": 958453248.0, "14605": 968476032.0, "14610": 953920512.0, "14615": 967651392.0, "14620": 953145280.0, "14625": 963428480.0, "14630": 971401216.0, "14635": 976572160.0, "14640": 978156544.0, "14645": 974490880.0, "14650": 946837632.0, "14655": 977234944.0, "14660": 975239232.0, "14665": 954075072.0, "14670": 970649472.0, "14675": 952555840.0, "14680": 970667520.0, "14685": 971792512.0, "14690": 967248640.0, "14695": 949294336.0, "14700": 934664832.0, "14705": 959160576.0, "14710": 978588288.0, "14715": 982095872.0, "14720": 967414592.0, "14725": 962372608.0, "14730": 938147008.0, "14735": 954839040.0, "14740": 967599104.0, "14745": 987279104.0, "14750": 973881408.0, "14755": 944140736.0, "14760": 974096064.0, "14765": 970029824.0, "14770": 988972928.0, "14775": 982314752.0, "14780": 945278016.0, "14785": 958064320.0, "14790": 971393856.0, "14795": 974845568.0, "14800": 969471424.0, "14805": 949740864.0, "14810": 951452288.0, "14815": 966450880.0, "14820": 968281408.0, "14825": 964171008.0, "14830": 956763072.0, "14835": 945851264.0, "14840": 967526272.0, "14845": 980497408.0, "14850": 953512768.0, "14855": 960849664.0, "14860": 967291264.0, "14865": 977291584.0, "14870": 967267520.0, "14875": 979975552.0, "14880": 957254144.0, "14885": 962218048.0, "14890": 950189888.0, "14895": 976278400.0, "14900": 971407488.0, "14905": 980312704.0, "14910": 972296576.0, "14915": 945828928.0, "14920": 952708992.0, "14925": 977351872.0, "14930": 976028864.0, "14935": 973840448.0, "14940": 939853376.0, "14945": 975404544.0, "14950": 977270144.0, "14955": 983293440.0, "14960": 955462208.0, "14965": 956524288.0, "14970": 943288000.0, "14975": 960540736.0, "14980": 977475264.0, "14985": 984475968.0, "14990": 966799168.0, "14995": 952593280.0, "15000": 976813440.0, "15005": 965177728.0, "15010": 966935488.0, "15015": 971482048.0, "15020": 944571904.0, "15025": 974077632.0, "15030": 970348416.0, "15035": 969883968.0, "15040": 971506368.0, "15045": 949940096.0, "15050": 948415936.0, "15055": 967998144.0, "15060": 970786048.0, "15065": 972610304.0, "15070": 953778816.0, "15075": 949085120.0, "15080": 970402240.0, "15085": 973548480.0, "15090": 971664192.0, "15095": 950142400.0, "15100": 957999680.0, "15105": 987353024.0, "15110": 980863680.0, "15115": 956866048.0, "15120": 959761984.0, "15125": 962540928.0, "15130": 968469760.0, "15135": 982511232.0, "15140": 956334912.0, "15145": 976498368.0, "15150": 938281856.0, "15155": 938656896.0, "15160": 968072128.0, "15165": 975133888.0, "15170": 959514048.0, "15175": 974384832.0, "15180": 945356096.0, "15185": 964806016.0, "15190": 963140800.0, "15195": 971082752.0, "15200": 985360768.0, "15205": 941469248.0, "15210": 963634880.0, "15215": 965207552.0, "15220": 983131328.0, "15225": 966267136.0, "15230": 949436992.0, "15235": 933252992.0, "15240": 979782208.0, "15245": 958031232.0, "15250": 964578560.0, "15255": 972007936.0, "15260": 955061440.0, "15265": 981651712.0, "15270": 958466368.0, "15275": 973604544.0, "15280": 967792768.0, "15285": 942698176.0, "15290": 980495424.0, "15295": 967711296.0, "15300": 956541376.0, "15305": 960934976.0, "15310": 932012480.0, "15315": 939512000.0, "15320": 969221824.0, "15325": 970176896.0, "15330": 955228736.0, "15335": 967148224.0, "15340": 951535232.0, "15345": 987683072.0, "15350": 973311488.0, "15355": 972248704.0, "15360": 968304320.0, "15365": 940715328.0, "15370": 955683840.0, "15375": 972289984.0, "15380": 972432192.0, "15385": 977282432.0, "15390": 946449536.0, "15395": 950327744.0, "15400": 961743552.0, "15405": 973305600.0, "15410": 964289792.0, "15415": 964008192.0, "15420": 961436672.0, "15425": 969741056.0, "15430": 972801088.0, "15435": 959189952.0, "15440": 956217856.0, "15445": 951800576.0, "15450": 979267200.0, "15455": 955622144.0, "15460": 971251648.0, "15465": 980316736.0, "15470": 966459712.0, "15475": 958822336.0, "15480": 968083840.0, "15485": 955938368.0, "15490": 956038336.0, "15495": 954539968.0, "15500": 968531456.0, "15505": 967929024.0, "15510": 966696704.0, "15515": 972142400.0, "15520": 963902656.0, "15525": 928926464.0, "15530": 977321024.0, "15535": 976504960.0, "15540": 974799360.0, "15545": 967733888.0, "15550": 950444032.0, "15555": 963469440.0, "15560": 983125440.0, "15565": 962636224.0, "15570": 969218176.0, "15575": 954742016.0, "15580": 959397952.0, "15585": 977733248.0, "15590": 987229824.0, "15595": 974280192.0, "15600": 952094528.0, "15605": 944122304.0, "15610": 973594176.0, "15615": 970815232.0, "15620": 953764736.0, "15625": 979919040.0, "15630": 950571520.0, "15635": 976964992.0, "15640": 962998336.0, "15645": 961976768.0, "15650": 983838208.0, "15655": 939549120.0, "15660": 979587200.0, "15665": 965891456.0, "15670": 971683584.0, "15675": 978816960.0, "15680": 952414016.0, "15685": 945802560.0, "15690": 967777728.0, "15695": 965661952.0, "15700": 975286912.0, "15705": 967464128.0, "15710": 949828992.0, "15715": 979188096.0, "15720": 960283392.0, "15725": 971307904.0, "15730": 959975040.0, "15735": 943335104.0, "15740": 986146048.0, "15745": 978715968.0, "15750": 982196032.0, "15755": 941391104.0, "15760": 958416704.0, "15765": 955412480.0, "15770": 979742592.0, "15775": 964329536.0, "15780": 952458688.0, "15785": 962585920.0, "15790": 935138752.0, "15795": 968731776.0, "15800": 974533888.0, "15805": 971529472.0, "15810": 975038464.0, "15815": 939388992.0, "15820": 973917632.0, "15825": 987897024.0, "15830": 968189888.0, "15835": 981193024.0, "15840": 932611456.0, "15845": 969980352.0, "15850": 964373248.0, "15855": 985266048.0, "15860": 957972608.0, "15865": 963796288.0, "15870": 941077376.0, "15875": 972322432.0, "15880": 965118656.0, "15885": 982258624.0, "15890": 969098816.0, "15895": 955848128.0, "15900": 992000832.0, "15905": 966236096.0, "15910": 980576256.0, "15915": 972248384.0, "15920": 948820608.0, "15925": 968422912.0, "15930": 983495296.0, "15935": 968379520.0, "15940": 971286528.0, "15945": 981129728.0, "15950": 964410432.0, "15955": 975215232.0, "15960": 974163712.0, "15965": 971359040.0, "15970": 968993984.0, "15975": 954499904.0, "15980": 975915456.0, "15985": 975861056.0, "15990": 985295616.0, "15995": 974192320.0, "16000": 969102784.0, "16005": 961317824.0, "16010": 973245696.0, "16015": 980958336.0, "16020": 964872768.0, "16025": 961061888.0, "16030": 951701440.0, "16035": 984447808.0, "16040": 960826624.0, "16045": 971121856.0, "16050": 955659072.0, "16055": 966056384.0, "16060": 965210496.0, "16065": 972345408.0, "16070": 968244032.0, "16075": 978429632.0, "16080": 950635584.0, "16085": 970614656.0, "16090": 973470272.0, "16095": 967378048.0, "16100": 981500928.0, "16105": 930009728.0, "16110": 961955712.0, "16115": 967930176.0, "16120": 971063360.0, "16125": 975972608.0, "16130": 960872064.0, "16135": 950836544.0, "16140": 977347328.0, "16145": 977384128.0, "16150": 982418304.0, "16155": 977347712.0, "16160": 942442752.0, "16165": 970529984.0, "16170": 963182080.0, "16175": 978538368.0, "16180": 976776768.0, "16185": 953436544.0, "16190": 951689728.0, "16195": 978092608.0, "16200": 975700416.0, "16205": 946662208.0, "16210": 962189952.0, "16215": 950867392.0, "16220": 978599616.0, "16225": 968208704.0, "16230": 972271808.0, "16235": 973348800.0, "16240": 940888960.0, "16245": 974958976.0, "16250": 979534592.0, "16255": 989962496.0, "16260": 970006336.0, "16265": 955223872.0, "16270": 963987328.0, "16275": 969159104.0, "16280": 992095360.0, "16285": 976756288.0, "16290": 940654656.0, "16295": 944364672.0, "16300": 957784896.0, "16305": 980825536.0, "16310": 975541120.0, "16315": 972887168.0, "16320": 942410432.0, "16325": 975195200.0, "16330": 978565056.0, "16335": 975548672.0, "16340": 988348736.0, "16345": 947441664.0, "16350": 962531264.0, "16355": 967766528.0, "16360": 957954048.0, "16365": 972555840.0, "16370": 934506112.0, "16375": 962717952.0, "16380": 984748224.0, "16385": 975013184.0, "16390": 976998208.0, "16395": 963122688.0, "16400": 951635712.0, "16405": 962124672.0, "16410": 964161088.0, "16415": 980128704.0, "16420": 967977472.0, "16425": 956174720.0, "16430": 959794368.0, "16435": 972108608.0, "16440": 970626880.0, "16445": 969361088.0, "16450": 946458816.0, "16455": 934309888.0, "16460": 981432768.0, "16465": 964879104.0, "16470": 979482496.0, "16475": 950446464.0, "16480": 962714560.0, "16485": 971536512.0, "16490": 966210368.0, "16495": 984085760.0, "16500": 990649600.0, "16505": 957426496.0, "16510": 967576320.0, "16515": 954460672.0, "16520": 971948992.0, "16525": 977640640.0, "16530": 931561536.0, "16535": 974222016.0, "16540": 958423488.0, "16545": 971424896.0, "16550": 974600896.0, "16555": 951440768.0, "16560": 959566144.0, "16565": 965252544.0, "16570": 971064704.0, "16575": 975333056.0, "16580": 972011520.0, "16585": 946616384.0, "16590": 964608896.0, "16595": 975104128.0, "16600": 980903360.0, "16605": 972813568.0, "16610": 946703360.0, "16615": 985879552.0, "16620": 959701696.0, "16625": 978619712.0, "16630": 973641664.0, "16635": 956983936.0, "16640": 967820224.0, "16645": 970038336.0, "16650": 967709952.0, "16655": 965205760.0, "16660": 975709504.0, "16665": 951745536.0, "16670": 972494784.0, "16675": 966351552.0, "16680": 960954432.0, "16685": 969165440.0, "16690": 945948224.0, "16695": 968908864.0, "16700": 970833856.0, "16705": 963325568.0, "16710": 972647552.0, "16715": 947188864.0, "16720": 964141120.0, "16725": 966924736.0, "16730": 974957440.0, "16735": 988913600.0, "16740": 952238016.0, "16745": 950326784.0, "16750": 949767040.0, "16755": 965159104.0, "16760": 968921216.0, "16765": 967732480.0, "16770": 925482752.0, "16775": 972807488.0, "16780": 972638080.0, "16785": 957369664.0, "16790": 960858688.0, "16795": 942446336.0, "16800": 950831616.0, "16805": 965830144.0, "16810": 960531648.0, "16815": 964774784.0, "16820": 952980288.0, "16825": 966027456.0, "16830": 972790400.0, "16835": 976626304.0, "16840": 965603840.0, "16845": 973089920.0, "16850": 962951424.0, "16855": 984466560.0, "16860": 976216576.0, "16865": 960892864.0, "16870": 953216576.0, "16875": 960806272.0, "16880": 976360704.0, "16885": 975529728.0, "16890": 965753536.0, "16895": 966348096.0, "16900": 952085760.0, "16905": 961088768.0, "16910": 965697792.0, "16915": 973895168.0, "16920": 957637248.0, "16925": 977637696.0, "16930": 940232064.0, "16935": 977431936.0, "16940": 969338432.0, "16945": 978101120.0, "16950": 962238848.0, "16955": 945607296.0, "16960": 970621376.0, "16965": 971733888.0, "16970": 988034880.0, "16975": 975479360.0, "16980": 947674176.0, "16985": 960562112.0, "16990": 973360000.0, "16995": 960894528.0, "17000": 958956928.0, "17005": 966526144.0, "17010": 938854848.0, "17015": 979477120.0, "17020": 965198720.0, "17025": 968328576.0, "17030": 971859008.0, "17035": 951716480.0, "17040": 965420736.0, "17045": 973760704.0, "17050": 975044480.0, "17055": 976613568.0, "17060": 943884992.0, "17065": 978484224.0, "17070": 979261824.0, "17075": 971783424.0, "17080": 971739072.0, "17085": 956646528.0, "17090": 963846336.0, "17095": 983289344.0, "17100": 960728704.0, "17105": 961292672.0, "17110": 962509696.0, "17115": 940788736.0, "17120": 970893056.0, "17125": 968734912.0, "17130": 962900992.0, "17135": 969508352.0, "17140": 952155712.0, "17145": 970346432.0, "17150": 962669120.0, "17155": 967300288.0, "17160": 976827264.0, "17165": 964134784.0, "17170": 963821312.0, "17175": 977887680.0, "17180": 958922816.0, "17185": 983797504.0, "17190": 974620288.0, "17195": 937600960.0, "17200": 963017408.0, "17205": 971395200.0, "17210": 983263872.0, "17215": 979736128.0, "17220": 937672000.0, "17225": 961483456.0, "17230": 950204544.0, "17235": 970087040.0, "17240": 982427968.0, "17245": 952478720.0, "17250": 967691200.0, "17255": 977851776.0, "17260": 962691968.0, "17265": 965434752.0, "17270": 956612928.0, "17275": 945445184.0, "17280": 975929152.0, "17285": 969228544.0, "17290": 954448128.0, "17295": 957755456.0, "17300": 936189888.0, "17305": 979276544.0, "17310": 965163648.0, "17315": 971635520.0, "17320": 957348096.0, "17325": 945257728.0, "17330": 955305408.0, "17335": 966231616.0, "17340": 966333696.0, "17345": 971360832.0, "17350": 953111744.0, "17355": 949290624.0, "17360": 981340800.0, "17365": 963663616.0, "17370": 967803456.0, "17375": 962046656.0, "17380": 944950208.0, "17385": 968349696.0, "17390": 967084928.0, "17395": 969202624.0, "17400": 977582784.0, "17405": 946554432.0, "17410": 963036608.0, "17415": 980124992.0, "17420": 963762368.0, "17425": 967440064.0, "17430": 953014016.0, "17435": 952111744.0, "17440": 964207552.0, "17445": 968005824.0, "17450": 963228224.0, "17455": 984584128.0, "17460": 944364160.0, "17465": 969063552.0, "17470": 975689664.0, "17475": 958785408.0, "17480": 974479168.0, "17485": 950242240.0, "17490": 971004416.0, "17495": 970004224.0, "17500": 963171136.0, "17505": 963596160.0, "17510": 954199296.0, "17515": 960654592.0, "17520": 982819584.0, "17525": 970337088.0, "17530": 966501056.0, "17535": 961341696.0, "17540": 953177664.0, "17545": 972313728.0, "17550": 987355072.0, "17555": 974503680.0, "17560": 956472384.0, "17565": 945806016.0, "17570": 966235136.0, "17575": 988140288.0, "17580": 978116608.0, "17585": 960206208.0, "17590": 941950784.0, "17595": 943693696.0, "17600": 970237824.0, "17605": 968935040.0, "17610": 977637120.0, "17615": 954881408.0, "17620": 956555840.0, "17625": 983993536.0, "17630": 968422400.0, "17635": 981401408.0, "17640": 974248256.0, "17645": 946328704.0, "17650": 966728768.0, "17655": 975775616.0, "17660": 966496256.0, "17665": 971699840.0, "17670": 959260800.0, "17675": 951018304.0, "17680": 957813632.0, "17685": 964649472.0, "17690": 981483776.0, "17695": 953678976.0, "17700": 948986176.0, "17705": 969763264.0, "17710": 978162752.0, "17715": 974768192.0, "17720": 960720896.0, "17725": 934270528.0, "17730": 961092672.0, "17735": 975365376.0, "17740": 972710208.0, "17745": 964899072.0, "17750": 956035200.0, "17755": 973742336.0, "17760": 978201344.0, "17765": 979485888.0, "17770": 959934976.0, "17775": 959615616.0, "17780": 954542592.0, "17785": 975416256.0, "17790": 975719936.0, "17795": 958922432.0, "17800": 950817024.0, "17805": 954942912.0, "17810": 979512064.0, "17815": 964267584.0, "17820": 973486016.0, "17825": 967681792.0, "17830": 935557696.0, "17835": 961839872.0, "17840": 974424960.0, "17845": 988294464.0, "17850": 985091328.0, "17855": 941165504.0, "17860": 963614208.0, "17865": 971402368.0, "17870": 959588096.0, "17875": 973921856.0, "17880": 958716800.0, "17885": 943572800.0, "17890": 960335872.0, "17895": 975819648.0, "17900": 952713152.0, "17905": 983175360.0, "17910": 948491392.0, "17915": 962829632.0, "17920": 957288128.0, "17925": 959541888.0, "17930": 983565056.0, "17935": 962983296.0, "17940": 960064960.0, "17945": 964155456.0, "17950": 950264576.0, "17955": 959635456.0, "17960": 957470656.0, "17965": 963542016.0, "17970": 969230272.0, "17975": 966453312.0, "17980": 987144640.0, "17985": 966569920.0, "17990": 941984064.0, "17995": 974474752.0, "18000": 978442624.0, "18005": 976999616.0, "18010": 961451648.0, "18015": 959529344.0, "18020": 967994752.0, "18025": 982728832.0, "18030": 974488832.0, "18035": 959375936.0, "18040": 942917504.0, "18045": 959750272.0, "18050": 966918976.0, "18055": 966538624.0, "18060": 972441472.0, "18065": 961642816.0, "18070": 944569152.0, "18075": 971878272.0, "18080": 963299840.0, "18085": 967215552.0, "18090": 987664640.0, "18095": 947288896.0, "18100": 984886080.0, "18105": 971314304.0, "18110": 970495680.0, "18115": 981465088.0, "18120": 948857600.0, "18125": 968643968.0, "18130": 951244352.0, "18135": 972461184.0, "18140": 956593216.0, "18145": 957309312.0, "18150": 940704512.0, "18155": 976784256.0, "18160": 961705728.0, "18165": 974186112.0, "18170": 970002880.0, "18175": 958595904.0, "18180": 967958720.0, "18185": 972104896.0, "18190": 991389248.0, "18195": 974030464.0, "18200": 934730496.0, "18205": 962359552.0, "18210": 968602944.0, "18215": 972818048.0, "18220": 976059392.0, "18225": 959127936.0, "18230": 949671424.0, "18235": 980125120.0, "18240": 958315584.0, "18245": 961110272.0, "18250": 962059840.0, "18255": 936578176.0, "18260": 973996992.0, "18265": 958719936.0, "18270": 978700672.0, "18275": 979829760.0, "18280": 929410240.0, "18285": 953891392.0, "18290": 969671360.0, "18295": 979375808.0, "18300": 956561088.0, "18305": 942290176.0, "18310": 944030528.0, "18315": 960044864.0, "18320": 968718016.0, "18325": 970754880.0, "18330": 959313856.0, "18335": 946086912.0, "18340": 970983680.0, "18345": 969499392.0, "18350": 952019328.0, "18355": 974469888.0, "18360": 952712448.0, "18365": 980567808.0, "18370": 968682176.0, "18375": 972784192.0, "18380": 958615040.0, "18385": 954550272.0, "18390": 962916608.0, "18395": 967968960.0, "18400": 967909824.0, "18405": 955607360.0, "18410": 960908096.0, "18415": 965459968.0, "18420": 966661632.0, "18425": 966662528.0, "18430": 997560448.0, "18435": 975216256.0, "18440": 958295936.0, "18445": 978651136.0, "18450": 966134208.0, "18455": 987465536.0, "18460": 982706432.0, "18465": 952116224.0, "18470": 957602688.0, "18475": 973381376.0, "18480": 995193792.0, "18485": 974494976.0, "18490": 956035840.0, "18495": 935559168.0, "18500": 979505408.0, "18505": 972321152.0, "18510": 967976704.0, "18515": 977072960.0, "18520": 934262272.0, "18525": 992121216.0, "18530": 979542144.0, "18535": 986180608.0, "18540": 969832320.0, "18545": 965121792.0, "18550": 971854272.0, "18555": 963149312.0, "18560": 968050112.0, "18565": 975986368.0, "18570": 966238784.0, "18575": 976454784.0, "18580": 974676672.0, "18585": 969408768.0, "18590": 964701056.0, "18595": 967743616.0, "18600": 954235712.0, "18605": 978781120.0, "18610": 977436224.0, "18615": 967240192.0, "18620": 963770752.0, "18625": 951522240.0, "18630": 974659008.0, "18635": 972527424.0, "18640": 963813952.0, "18645": 967693888.0, "18650": 942688192.0, "18655": 981055488.0, "18660": 973114368.0, "18665": 969197696.0, "18670": 972257856.0, "18675": 950853760.0, "18680": 944255296.0, "18685": 980598336.0, "18690": 963370176.0, "18695": 981818624.0, "18700": 979003648.0, "18705": 950562944.0, "18710": 961397504.0, "18715": 984405760.0, "18720": 971902144.0, "18725": 978915904.0, "18730": 944721152.0, "18735": 967431168.0, "18740": 963746112.0, "18745": 974065536.0, "18750": 970237504.0, "18755": 957807680.0, "18760": 940188672.0, "18765": 977651200.0, "18770": 967448128.0, "18775": 974191424.0, "18780": 978437248.0, "18785": 958137024.0, "18790": 970507136.0, "18795": 982706688.0, "18800": 968413312.0, "18805": 977110784.0, "18810": 947945920.0, "18815": 988735936.0, "18820": 966843776.0, "18825": 969401920.0, "18830": 965002496.0, "18835": 953648576.0, "18840": 963052672.0, "18845": 959078208.0, "18850": 969904576.0, "18855": 980597120.0, "18860": 971864256.0, "18865": 944882944.0, "18870": 966466688.0, "18875": 972540352.0, "18880": 962410816.0, "18885": 959906560.0, "18890": 958274560.0, "18895": 992011520.0, "18900": 976084672.0, "18905": 970262272.0, "18910": 979911168.0, "18915": 954764800.0, "18920": 946356928.0, "18925": 978702592.0, "18930": 973806400.0, "18935": 982366720.0, "18940": 963095104.0, "18945": 934132928.0, "18950": 965146880.0, "18955": 974949312.0, "18960": 986778688.0, "18965": 973217536.0, "18970": 942887936.0, "18975": 961124416.0, "18980": 971254400.0, "18985": 964488000.0, "18990": 962902912.0, "18995": 952610176.0, "19000": 960660928.0, "19005": 976027968.0, "19010": 972744448.0, "19015": 986592704.0, "19020": 954754048.0, "19025": 953670592.0, "19030": 970586496.0, "19035": 970882688.0, "19040": 962601280.0, "19045": 961794176.0, "19050": 946996416.0, "19055": 970363840.0, "19060": 965310976.0, "19065": 981188416.0, "19070": 963151808.0, "19075": 933158272.0, "19080": 965520448.0, "19085": 981912576.0, "19090": 957014080.0, "19095": 974480704.0, "19100": 934489280.0, "19105": 955800512.0, "19110": 968537024.0, "19115": 973002432.0, "19120": 959339904.0, "19125": 954163968.0, "19130": 950501952.0, "19135": 964747776.0, "19140": 955618048.0, "19145": 976023936.0, "19150": 977687424.0, "19155": 934446400.0, "19160": 953234432.0, "19165": 977944704.0, "19170": 964133248.0, "19175": 969924800.0, "19180": 951144640.0, "19185": 965340992.0, "19190": 972165504.0, "19195": 956645888.0, "19200": 969969472.0, "19205": 977290560.0, "19210": 947898752.0, "19215": 973202816.0, "19220": 959819712.0, "19225": 978168000.0, "19230": 977121728.0, "19235": 952616832.0, "19240": 978487488.0, "19245": 981730624.0, "19250": 984701952.0, "19255": 967378880.0, "19260": 935953280.0, "19265": 964983872.0, "19270": 973220928.0, "19275": 967259520.0, "19280": 962472576.0, "19285": 972423360.0, "19290": 947037568.0, "19295": 974026624.0, "19300": 983978048.0, "19305": 958513472.0, "19310": 955427008.0, "19315": 950644288.0, "19320": 980127488.0, "19325": 968634944.0, "19330": 963911104.0, "19335": 974233536.0, "19340": 940209280.0, "19345": 966117440.0, "19350": 973585024.0, "19355": 981495424.0, "19360": 976896640.0, "19365": 957589248.0, "19370": 948326848.0, "19375": 963149376.0, "19380": 982156864.0, "19385": 989143744.0, "19390": 979645376.0, "19395": 928395904.0, "19400": 971871296.0, "19405": 979172864.0, "19410": 969396544.0, "19415": 976201472.0, "19420": 939298304.0, "19425": 962638848.0, "19430": 949949568.0, "19435": 964836864.0, "19440": 984534144.0, "19445": 949341696.0, "19450": 946375040.0, "19455": 965998336.0, "19460": 973132416.0, "19465": 974720064.0, "19470": 965766400.0, "19475": 947390528.0, "19480": 975673024.0, "19485": 965857088.0, "19490": 963191488.0, "19495": 970292096.0, "19500": 948316352.0, "19505": 968948224.0, "19510": 951689792.0, "19515": 962271040.0, "19520": 966257728.0, "19525": 946903936.0, "19530": 977928768.0, "19535": 986181952.0, "19540": 957792704.0, "19545": 965299904.0, "19550": 947424576.0, "19555": 951874240.0, "19560": 990291136.0, "19565": 979603456.0, "19570": 968499648.0, "19575": 960028416.0, "19580": 945666880.0, "19585": 964715136.0, "19590": 968058752.0, "19595": 972375168.0, "19600": 969973504.0, "19605": 947430464.0, "19610": 974598144.0, "19615": 972250624.0, "19620": 953018752.0, "19625": 972244608.0, "19630": 976545920.0, "19635": 941104768.0, "19640": 972265728.0, "19645": 968262208.0, "19650": 971828288.0, "19655": 981783744.0, "19660": 946866944.0, "19665": 957577280.0, "19670": 965776384.0, "19675": 965607232.0, "19680": 972388160.0, "19685": 942611776.0, "19690": 971584256.0, "19695": 965639360.0, "19700": 968205440.0, "19705": 977930752.0, "19710": 946756096.0, "19715": 967349440.0, "19720": 971102976.0, "19725": 982247104.0, "19730": 966552256.0, "19735": 971236864.0, "19740": 940521152.0, "19745": 966707328.0, "19750": 967366336.0, "19755": 979107328.0, "19760": 943544448.0, "19765": 935810240.0, "19770": 968936448.0, "19775": 963945920.0, "19780": 965944384.0, "19785": 964949312.0, "19790": 940316992.0, "19795": 969596224.0, "19800": 982049984.0, "19805": 972036160.0, "19810": 967644608.0, "19815": 946474944.0, "19820": 938193728.0, "19825": 971120384.0, "19830": 974599296.0, "19835": 982041024.0, "19840": 977332608.0, "19845": 939135424.0, "19850": 991187200.0, "19855": 970708800.0, "19860": 955801536.0, "19865": 973083136.0, "19870": 950052736.0, "19875": 980071168.0, "19880": 976010624.0, "19885": 968413696.0, "19890": 976950336.0, "19895": 947037312.0, "19900": 955699008.0, "19905": 976213056.0, "19910": 960257536.0, "19915": 977301248.0, "19920": 985250624.0, "19925": 965203584.0, "19930": 979916032.0, "19935": 979227712.0, "19940": 970150016.0, "19945": 959938688.0, "19950": 956621248.0, "19955": 976153344.0, "19960": 960736512.0, "19965": 973707776.0, "19970": 978420800.0, "19975": 944955648.0, "19980": 960080064.0, "19985": 964519104.0, "19990": 969141440.0, "19995": 957836544.0, "20000": 961142080.0, "20005": 939710912.0, "20010": 975219392.0, "20015": 967561280.0, "20020": 994904640.0, "20025": 961430080.0, "20030": 942571200.0, "20035": 967128832.0, "20040": 973088000.0, "20045": 979930176.0, "20050": 968572416.0, "20055": 946731264.0, "20060": 958634176.0, "20065": 984853568.0, "20070": 960618752.0, "20075": 972831040.0, "20080": 970021824.0, "20085": 948553088.0, "20090": 961491776.0, "20095": 963327232.0, "20100": 959266240.0, "20105": 971938496.0, "20110": 957255488.0, "20115": 968034176.0, "20120": 961661120.0, "20125": 969765376.0, "20130": 966452096.0, "20135": 947101504.0, "20140": 959729536.0, "20145": 969458304.0, "20150": 965900672.0, "20155": 977718144.0, "20160": 963340864.0, "20165": 966987072.0, "20170": 972251008.0, "20175": 974875328.0, "20180": 965427648.0, "20185": 957522048.0, "20190": 942958592.0, "20195": 961911360.0, "20200": 969458368.0, "20205": 977289536.0, "20210": 959535552.0, "20215": 938390848.0, "20220": 958323072.0, "20225": 971501440.0, "20230": 967787136.0, "20235": 970875136.0, "20240": 944067264.0, "20245": 943765568.0, "20250": 980054080.0, "20255": 976730368.0, "20260": 971471872.0, "20265": 953346048.0, "20270": 943427712.0, "20275": 971981120.0, "20280": 963550016.0, "20285": 971155072.0, "20290": 969415488.0, "20295": 939969408.0, "20300": 969691712.0, "20305": 962313216.0, "20310": 973469312.0, "20315": 992090816.0, "20320": 953564992.0, "20325": 948975232.0, "20330": 970424896.0, "20335": 962479360.0, "20340": 960027264.0, "20345": 961837568.0, "20350": 952972416.0, "20355": 975235136.0, "20360": 964317248.0, "20365": 972064640.0, "20370": 975809728.0, "20375": 943748032.0, "20380": 969219904.0, "20385": 965645632.0, "20390": 969604864.0, "20395": 986414080.0, "20400": 957371328.0, "20405": 965120896.0, "20410": 981114048.0, "20415": 966760640.0, "20420": 965194688.0, "20425": 948058880.0, "20430": 932876032.0, "20435": 981514496.0, "20440": 969076928.0, "20445": 980687424.0, "20450": 959755520.0, "20455": 939557376.0, "20460": 955594752.0, "20465": 980484992.0, "20470": 978223040.0, "20475": 969002304.0, "20480": 946351936.0, "20485": 957885632.0, "20490": 979544512.0, "20495": 963545600.0, "20500": 974468032.0, "20505": 961651136.0, "20510": 944623808.0, "20515": 981752960.0, "20520": 989928896.0, "20525": 979737536.0, "20530": 962284864.0, "20535": 945915392.0, "20540": 970411712.0, "20545": 957714944.0, "20550": 966153408.0, "20555": 985703744.0, "20560": 944171200.0, "20565": 965398848.0, "20570": 968913792.0, "20575": 962138112.0, "20580": 953674752.0, "20585": 954368640.0, "20590": 986489088.0, "20595": 964599232.0, "20600": 961119296.0, "20605": 965256832.0, "20610": 945031616.0, "20615": 936002880.0, "20620": 975415296.0, "20625": 974744512.0, "20630": 970484352.0, "20635": 984617088.0, "20640": 954195008.0, "20645": 970331456.0, "20650": 972916992.0, "20655": 956965952.0, "20660": 966292928.0, "20665": 943359680.0, "20670": 959033856.0, "20675": 982058240.0, "20680": 971036480.0, "20685": 978443584.0, "20690": 965332352.0, "20695": 935597504.0, "20700": 971644672.0, "20705": 964545344.0, "20710": 976257856.0, "20715": 976116032.0, "20720": 954636096.0, "20725": 976165376.0, "20730": 977419264.0, "20735": 961709056.0, "20740": 991612800.0, "20745": 956523904.0, "20750": 956840896.0, "20755": 975737472.0, "20760": 985580608.0, "20765": 984906112.0, "20770": 950670720.0, "20775": 929029888.0, "20780": 967870912.0, "20785": 977184128.0, "20790": 961444032.0, "20795": 974476544.0, "20800": 950107200.0, "20805": 987578688.0, "20810": 980018304.0, "20815": 970295040.0, "20820": 966061120.0, "20825": 949025600.0, "20830": 976736448.0, "20835": 963015680.0, "20840": 975354816.0, "20845": 971719040.0, "20850": 939841344.0, "20855": 964463872.0, "20860": 975060864.0, "20865": 968426112.0, "20870": 963818816.0, "20875": 964171328.0, "20880": 954704512.0, "20885": 972341952.0, "20890": 977223040.0, "20895": 964833344.0, "20900": 983089600.0, "20905": 935789568.0, "20910": 963881024.0, "20915": 966608320.0, "20920": 983804992.0, "20925": 970478848.0, "20930": 951524416.0, "20935": 944129984.0, "20940": 988247616.0, "20945": 965969920.0, "20950": 952212288.0, "20955": 957567808.0, "20960": 938833984.0, "20965": 967033344.0, "20970": 969380224.0, "20975": 965773440.0, "20980": 973727296.0, "20985": 940893760.0, "20990": 969796416.0, "20995": 987207744.0, "21000": 979695616.0, "21005": 957643008.0, "21010": 951528768.0, "21015": 979017472.0, "21020": 975387520.0, "21025": 975281408.0, "21030": 968427840.0, "21035": 968806592.0, "21040": 978402368.0, "21045": 980427264.0, "21050": 964074688.0, "21055": 972711808.0, "21060": 970944832.0, "21065": 945103616.0, "21070": 985440256.0, "21075": 978079488.0, "21080": 968653760.0, "21085": 967265792.0, "21090": 949218112.0, "21095": 987740736.0, "21100": 981401856.0, "21105": 961260928.0, "21110": 963837440.0, "21115": 963823872.0, "21120": 964992832.0, "21125": 977090944.0, "21130": 973198528.0, "21135": 971912960.0, "21140": 961262656.0, "21145": 936331456.0, "21150": 966092544.0, "21155": 1000624256.0, "21160": 973620544.0, "21165": 989009600.0, "21170": 956136704.0, "21175": 970453248.0, "21180": 968043584.0, "21185": 968487744.0, "21190": 978424512.0, "21195": 966799872.0, "21200": 955270336.0, "21205": 981133312.0, "21210": 964456192.0, "21215": 985901632.0, "21220": 968218752.0, "21225": 959426880.0, "21230": 961755200.0, "21235": 971472384.0, "21240": 981381120.0, "21245": 974785856.0, "21250": 946603648.0, "21255": 983058880.0, "21260": 972203712.0, "21265": 968703936.0, "21270": 953199040.0, "21275": 946063168.0, "21280": 965680832.0, "21285": 981508864.0, "21290": 974784832.0, "21295": 970561856.0, "21300": 926223488.0, "21305": 956196224.0, "21310": 987872704.0, "21315": 988890496.0, "21320": 966574464.0, "21325": 970932608.0, "21330": 957729024.0, "21335": 979138432.0, "21340": 976908736.0, "21345": 979244032.0, "21350": 979929728.0, "21355": 946818816.0, "21360": 964716864.0, "21365": 967669440.0, "21370": 992563840.0, "21375": 972361984.0, "21380": 957813632.0, "21385": 943059840.0, "21390": 958729216.0, "21395": 984136384.0, "21400": 970941120.0, "21405": 961854144.0, "21410": 963400896.0, "21415": 964438016.0, "21420": 963765824.0, "21425": 981154176.0, "21430": 962837504.0, "21435": 949981184.0, "21440": 964162944.0, "21445": 969636352.0, "21450": 977646976.0, "21455": 973118144.0, "21460": 962051200.0, "21465": 969115712.0, "21470": 967173888.0, "21475": 964661184.0, "21480": 965281792.0, "21485": 938461568.0, "21490": 942789120.0, "21495": 969238976.0, "21500": 969396736.0, "21505": 977326272.0, "21510": 985693440.0, "21515": 943355136.0, "21520": 976669440.0, "21525": 981866048.0, "21530": 978464768.0, "21535": 971240320.0, "21540": 940368192.0, "21545": 958882496.0, "21550": 975565824.0, "21555": 978469248.0, "21560": 956037696.0, "21565": 971318016.0, "21570": 934791808.0, "21575": 961199104.0, "21580": 972597248.0, "21585": 964259392.0, "21590": 966891584.0, "21595": 949945024.0, "21600": 974521152.0, "21605": 966959296.0, "21610": 953346688.0, "21615": 969797376.0, "21620": 944896512.0, "21625": 957087872.0, "21630": 966351232.0, "21635": 984740032.0, "21640": 964399872.0, "21645": 944806080.0, "21650": 948838272.0, "21655": 963391296.0, "21660": 966104512.0, "21665": 992895168.0, "21670": 956093952.0, "21675": 950773824.0, "21680": 975550720.0, "21685": 979162944.0, "21690": 975228032.0, "21695": 952794304.0, "21700": 953541952.0, "21705": 967666560.0, "21710": 977321344.0, "21715": 973576448.0, "21720": 955081024.0, "21725": 937280448.0, "21730": 960970944.0, "21735": 979243840.0, "21740": 970645824.0, "21745": 956387520.0, "21750": 944582272.0, "21755": 961511488.0, "21760": 974060864.0, "21765": 967481408.0, "21770": 979095488.0, "21775": 981448192.0, "21780": 946732864.0, "21785": 979993856.0, "21790": 977129472.0, "21795": 975372224.0, "21800": 971553024.0, "21805": 949612288.0, "21810": 969716864.0, "21815": 953815808.0, "21820": 977586176.0, "21825": 964361088.0, "21830": 963590720.0, "21835": 958937408.0, "21840": 969643456.0, "21845": 965128768.0, "21850": 966118016.0, "21855": 982338752.0, "21860": 951279104.0, "21865": 955521664.0, "21870": 968892672.0, "21875": 972106112.0, "21880": 964865536.0, "21885": 961278720.0, "21890": 968992064.0, "21895": 971422464.0, "21900": 972100480.0, "21905": 959760704.0, "21910": 982879424.0, "21915": 950610880.0, "21920": 970486528.0, "21925": 970533824.0, "21930": 963341312.0, "21935": 944189376.0, "21940": 940487680.0, "21945": 976971456.0, "21950": 968511808.0, "21955": 967965824.0, "21960": 978763776.0, "21965": 938520832.0, "21970": 976066176.0, "21975": 965320000.0, "21980": 958779136.0, "21985": 974729408.0, "21990": 953506240.0, "21995": 940081920.0, "22000": 966190592.0, "22005": 967302784.0, "22010": 969921024.0, "22015": 966736512.0, "22020": 951392832.0, "22025": 975828992.0, "22030": 979206592.0, "22035": 986264128.0, "22040": 964680448.0, "22045": 939334400.0, "22050": 976793024.0, "22055": 972326912.0, "22060": 970404672.0, "22065": 970494336.0, "22070": 955573440.0, "22075": 945401216.0, "22080": 967255680.0, "22085": 967032384.0, "22090": 979673216.0, "22095": 972223872.0, "22100": 949601344.0, "22105": 963855616.0, "22110": 976013056.0, "22115": 973998656.0, "22120": 984590912.0, "22125": 951088256.0, "22130": 970067328.0, "22135": 956061184.0, "22140": 974937472.0, "22145": 969055040.0, "22150": 944543104.0, "22155": 961078912.0, "22160": 982184000.0, "22165": 968457984.0, "22170": 956830912.0, "22175": 928821760.0, "22180": 966601344.0, "22185": 972727104.0, "22190": 957699712.0, "22195": 956924928.0, "22200": 949783616.0, "22205": 942032512.0, "22210": 986361984.0, "22215": 979171584.0, "22220": 964691328.0, "22225": 976037568.0, "22230": 937390720.0, "22235": 957477952.0, "22240": 974595456.0, "22245": 974311104.0, "22250": 962558336.0, "22255": 966012480.0, "22260": 943301248.0, "22265": 974594048.0, "22270": 983782784.0, "22275": 964934656.0, "22280": 959768384.0, "22285": 952992064.0, "22290": 953711872.0, "22295": 959589312.0, "22300": 982365312.0, "22305": 971797824.0, "22310": 936081664.0, "22315": 967763712.0, "22320": 955761536.0, "22325": 957234944.0, "22330": 972708096.0, "22335": 946432064.0, "22340": 951500736.0, "22345": 969433664.0, "22350": 969855296.0, "22355": 966247488.0, "22360": 954553664.0, "22365": 968611072.0, "22370": 964777024.0, "22375": 975212608.0, "22380": 975459008.0, "22385": 962989568.0, "22390": 951605632.0, "22395": 971357632.0, "22400": 967008960.0, "22405": 961796288.0, "22410": 969693440.0, "22415": 936850176.0, "22420": 972468608.0, "22425": 965346112.0, "22430": 978498688.0, "22435": 973979776.0, "22440": 932054144.0, "22445": 951860608.0, "22450": 975564032.0, "22455": 960246144.0, "22460": 967539584.0, "22465": 988022144.0, "22470": 943540288.0, "22475": 975703936.0, "22480": 978688704.0, "22485": 977150080.0, "22490": 966899904.0, "22495": 942507712.0, "22500": 981041280.0, "22505": 957581568.0, "22510": 984980608.0, "22515": 966805504.0, "22520": 952115136.0, "22525": 965811776.0, "22530": 985910272.0, "22535": 974078272.0, "22540": 983529920.0, "22545": 952556608.0, "22550": 947170176.0, "22555": 972406656.0, "22560": 972955264.0, "22565": 966760768.0, "22570": 980416832.0, "22575": 948150784.0, "22580": 964207616.0, "22585": 947524736.0, "22590": 976332160.0, "22595": 982941376.0, "22600": 950301376.0, "22605": 978155712.0, "22610": 968844992.0, "22615": 950886144.0, "22620": 985023104.0, "22625": 959110144.0, "22630": 943816256.0, "22635": 955002112.0, "22640": 971378176.0, "22645": 988853184.0, "22650": 956716096.0, "22655": 945667648.0, "22660": 962857408.0, "22665": 971720640.0, "22670": 969484480.0, "22675": 978926400.0, "22680": 939906240.0, "22685": 958570688.0, "22690": 977714752.0, "22695": 958491520.0, "22700": 978134272.0, "22705": 954262208.0, "22710": 958182784.0, "22715": 972283648.0, "22720": 982909824.0, "22725": 961628352.0, "22730": 958913984.0, "22735": 948644032.0, "22740": 968260544.0, "22745": 965479232.0, "22750": 997951488.0, "22755": 973870208.0, "22760": 939940352.0, "22765": 966812096.0, "22770": 968759872.0, "22775": 949725248.0, "22780": 977650688.0, "22785": 955403968.0, "22790": 955030080.0, "22795": 976225792.0, "22800": 970213760.0, "22805": 962492416.0, "22810": 958755776.0, "22815": 945295936.0, "22820": 978518528.0, "22825": 965980608.0, "22830": 966358656.0, "22835": 969410496.0, "22840": 940608704.0, "22845": 973240320.0, "22850": 975068800.0, "22855": 951888256.0, "22860": 964066880.0, "22865": 949149888.0, "22870": 977665728.0, "22875": 974602496.0, "22880": 969645312.0, "22885": 977445888.0, "22890": 946845056.0, "22895": 944096192.0, "22900": 961941184.0, "22905": 971781760.0, "22910": 980031744.0, "22915": 971509696.0, "22920": 946439488.0, "22925": 970772800.0, "22930": 975968896.0, "22935": 969260160.0, "22940": 973054144.0, "22945": 941027456.0, "22950": 975760192.0, "22955": 972611840.0, "22960": 976027328.0, "22965": 965119616.0, "22970": 957061056.0, "22975": 931256448.0, "22980": 979264192.0, "22985": 960038336.0, "22990": 965137344.0, "22995": 958527360.0, "23000": 966014400.0, "23005": 973020352.0, "23010": 964743296.0, "23015": 968654272.0, "23020": 981821632.0, "23025": 955935872.0, "23030": 991025024.0, "23035": 968775744.0, "23040": 973782272.0, "23045": 959377344.0, "23050": 947800384.0, "23055": 949367232.0, "23060": 966707200.0, "23065": 980937088.0, "23070": 960609088.0, "23075": 957851904.0, "23080": 941302592.0, "23085": 975655424.0, "23090": 979904256.0, "23095": 965988800.0, "23100": 986714112.0, "23105": 952366272.0, "23110": 970783104.0, "23115": 970343616.0, "23120": 974974528.0, "23125": 971842752.0, "23130": 941395648.0, "23135": 948387520.0, "23140": 980668736.0, "23145": 980053760.0, "23150": 982500096.0, "23155": 970084736.0, "23160": 936919040.0, "23165": 969876352.0, "23170": 981326784.0, "23175": 992018560.0, "23180": 958539648.0, "23185": 950516480.0, "23190": 956740608.0, "23195": 982094144.0, "23200": 977917248.0, "23205": 968119744.0, "23210": 952073984.0, "23215": 931399680.0, "23220": 966554112.0, "23225": 958850880.0, "23230": 977573952.0, "23235": 964592192.0, "23240": 958312704.0, "23245": 974005888.0, "23250": 950970624.0, "23255": 974338496.0, "23260": 963808896.0, "23265": 954280000.0, "23270": 981481088.0, "23275": 974654976.0, "23280": 966983488.0, "23285": 971694144.0, "23290": 940360576.0, "23295": 965095104.0, "23300": 960203840.0, "23305": 952547008.0, "23310": 966836608.0, "23315": 958368576.0, "23320": 959804416.0, "23325": 972355200.0, "23330": 985891200.0, "23335": 958696128.0, "23340": 936294912.0, "23345": 945463296.0, "23350": 977076032.0, "23355": 988789248.0, "23360": 966621568.0, "23365": 985454784.0, "23370": 938732992.0, "23375": 963043200.0, "23380": 961942912.0, "23385": 989489600.0, "23390": 987013312.0, "23395": 959490944.0, "23400": 961899648.0, "23405": 958968000.0, "23410": 966210816.0, "23415": 981719936.0, "23420": 952090944.0, "23425": 938251968.0, "23430": 971376576.0, "23435": 969824576.0, "23440": 976530240.0, "23445": 971830336.0, "23450": 955762752.0, "23455": 972647168.0, "23460": 965210240.0, "23465": 950826048.0, "23470": 978837824.0, "23475": 958071680.0, "23480": 961483136.0, "23485": 985632192.0, "23490": 962112576.0, "23495": 974645824.0, "23500": 956923328.0, "23505": 948963840.0, "23510": 975927616.0, "23515": 968292352.0, "23520": 962047872.0, "23525": 977941696.0, "23530": 946268288.0, "23535": 976358528.0, "23540": 979349632.0, "23545": 979796608.0, "23550": 975724736.0, "23555": 940562432.0, "23560": 963765888.0, "23565": 965244032.0, "23570": 978698112.0, "23575": 945850816.0, "23580": 941845440.0, "23585": 959131072.0, "23590": 972693952.0, "23595": 970566336.0, "23600": 966508544.0, "23605": 962100224.0, "23610": 937939136.0, "23615": 973749696.0, "23620": 973512704.0, "23625": 981707456.0, "23630": 970136768.0, "23635": 949885696.0, "23640": 962003328.0, "23645": 982789568.0, "23650": 968080960.0, "23655": 969705536.0, "23660": 954171072.0, "23665": 952187136.0, "23670": 985361856.0, "23675": 972913600.0, "23680": 976518272.0, "23685": 959725056.0, "23690": 932516480.0, "23695": 964037696.0, "23700": 967028736.0, "23705": 977857216.0, "23710": 961843648.0, "23715": 955102400.0, "23720": 988763328.0, "23725": 968715968.0, "23730": 970518336.0, "23735": 959374656.0, "23740": 952353472.0, "23745": 948822592.0, "23750": 979556224.0, "23755": 967519488.0, "23760": 972424768.0, "23765": 947987136.0, "23770": 951507968.0, "23775": 968237504.0, "23780": 967390336.0, "23785": 962856448.0, "23790": 980083776.0, "23795": 944050368.0, "23800": 975006848.0, "23805": 974312256.0, "23810": 973574208.0, "23815": 971708544.0, "23820": 958864384.0, "23825": 960295360.0, "23830": 965778560.0, "23835": 970290752.0, "23840": 980613376.0, "23845": 944283776.0, "23850": 945492480.0, "23855": 970518528.0, "23860": 970185088.0, "23865": 970997184.0, "23870": 986612032.0, "23875": 948066816.0, "23880": 955517312.0, "23885": 972393344.0, "23890": 972488640.0, "23895": 985050304.0, "23900": 951690944.0, "23905": 954792960.0, "23910": 972011136.0, "23915": 962667904.0, "23920": 960713792.0, "23925": 943963072.0, "23930": 948743936.0, "23935": 981819456.0, "23940": 971381696.0, "23945": 970545984.0, "23950": 972548288.0, "23955": 944227328.0, "23960": 974196096.0, "23965": 977102336.0, "23970": 963895680.0, "23975": 960720192.0, "23980": 945273216.0, "23985": 969737216.0, "23990": 998076864.0, "23995": 975855808.0, "24000": 963338816.0, "24005": 937053696.0, "24010": 949942336.0, "24015": 968207552.0, "24020": 986284160.0, "24025": 967589184.0, "24030": 966929408.0, "24035": 937815552.0, "24040": 963158336.0, "24045": 985092928.0, "24050": 962796480.0, "24055": 968078016.0, "24060": 947321280.0, "24065": 975432384.0, "24070": 975331264.0, "24075": 975791424.0, "24080": 980182720.0, "24085": 956997120.0, "24090": 970744192.0, "24095": 963803712.0, "24100": 977681152.0, "24105": 981516800.0, "24110": 968661248.0, "24115": 953440768.0, "24120": 963244800.0, "24125": 964300416.0, "24130": 965498240.0, "24135": 959689600.0, "24140": 965970560.0, "24145": 966674048.0, "24150": 968969728.0, "24155": 955515712.0, "24160": 972235712.0, "24165": 939842752.0, "24170": 960277312.0, "24175": 981393088.0, "24180": 968752512.0, "24185": 957488448.0, "24190": 951367040.0, "24195": 952284032.0, "24200": 969787776.0, "24205": 972364160.0, "24210": 962866624.0, "24215": 932202944.0, "24220": 957318592.0, "24225": 981520000.0, "24230": 959303104.0, "24235": 948332288.0, "24240": 969478848.0, "24245": 949914944.0, "24250": 956194496.0, "24255": 973598976.0, "24260": 969103872.0, "24265": 968650560.0, "24270": 953933632.0, "24275": 961696640.0, "24280": 958652864.0, "24285": 987229120.0, "24290": 969981056.0, "24295": 964830208.0, "24300": 946335104.0, "24305": 980762048.0, "24310": 971935168.0, "24315": 974750656.0, "24320": 965119616.0, "24325": 939432640.0, "24330": 955701888.0, "24335": 967307264.0, "24340": 991853696.0, "24345": 971335552.0, "24350": 924678016.0, "24355": 969423360.0, "24360": 967502400.0, "24365": 963945280.0, "24370": 968606656.0, "24375": 974431104.0, "24380": 953848576.0, "24385": 978189824.0, "24390": 975013440.0, "24395": 956637632.0, "24400": 968968256.0, "24405": 943301056.0, "24410": 979950784.0, "24415": 982469120.0, "24420": 959020224.0, "24425": 973195264.0, "24430": 944738560.0, "24435": 958959424.0, "24440": 979202240.0, "24445": 972960320.0, "24450": 951416512.0, "24455": 941890432.0, "24460": 947712832.0, "24465": 975258496.0, "24470": 988450240.0, "24475": 974012928.0, "24480": 962630592.0, "24485": 921264064.0, "24490": 970375040.0, "24495": 982832832.0, "24500": 976660288.0, "24505": 967197056.0, "24510": 958454976.0, "24515": 1000298880.0, "24520": 954954880.0, "24525": 966162624.0, "24530": 958965696.0, "24535": 958415872.0, "24540": 956820224.0, "24545": 968914240.0, "24550": 962834880.0, "24555": 959190720.0, "24560": 954761728.0, "24565": 947724480.0, "24570": 967121152.0, "24575": 973963712.0, "24580": 966010944.0, "24585": 958401152.0, "24590": 942305472.0, "24595": 973938816.0, "24600": 971260032.0, "24605": 971667776.0, "24610": 953775232.0, "24615": 943344320.0, "24620": 959075200.0, "24625": 980050304.0, "24630": 979433984.0, "24635": 976606272.0, "24640": 942133504.0, "24645": 956704768.0, "24650": 954276992.0, "24655": 981502208.0, "24660": 956537472.0, "24665": 958234560.0, "24670": 952944192.0, "24675": 961186560.0, "24680": 970739008.0, "24685": 964836224.0, "24690": 959376320.0, "24695": 938600640.0, "24700": 980743936.0, "24705": 970924416.0, "24710": 959672512.0, "24715": 972666496.0, "24720": 947624960.0, "24725": 962766400.0, "24730": 961655232.0, "24735": 951784640.0, "24740": 982070592.0, "24745": 964440960.0, "24750": 949561280.0, "24755": 971409536.0, "24760": 963268096.0, "24765": 987873600.0, "24770": 969461504.0, "24775": 942434624.0, "24780": 968452608.0, "24785": 971008256.0, "24790": 960998336.0, "24795": 961911680.0, "24800": 945805824.0, "24805": 934175296.0, "24810": 965408832.0, "24815": 963442240.0, "24820": 972466048.0, "24825": 960792896.0, "24830": 963350976.0, "24835": 979455616.0, "24840": 956931904.0, "24845": 964225856.0, "24850": 974062144.0, "24855": 949420480.0, "24860": 989551488.0, "24865": 994637760.0, "24870": 971231232.0, "24875": 963970560.0, "24880": 948497792.0, "24885": 964947456.0, "24890": 970541440.0, "24895": 974020160.0, "24900": 966556992.0, "24905": 950605248.0, "24910": 966238784.0, "24915": 957847872.0, "24920": 985896640.0, "24925": 976405952.0, "24930": 965389184.0, "24935": 915178176.0, "24940": 963368000.0, "24945": 982967744.0, "24950": 983009664.0, "24955": 962054528.0, "24960": 948861504.0, "24965": 973102208.0, "24970": 965067840.0, "24975": 968189696.0, "24980": 965075712.0, "24985": 956771584.0, "24990": 971409024.0, "24995": 967623680.0, "25000": 954622208.0, "25005": 970002432.0, "25010": 973575488.0, "25015": 956789184.0, "25020": 981327488.0, "25025": 959849216.0, "25030": 957913152.0, "25035": 965475840.0, "25040": 950060352.0, "25045": 978034432.0, "25050": 968152768.0, "25055": 962766656.0, "25060": 969904448.0, "25065": 950783424.0, "25070": 952283264.0, "25075": 970217024.0, "25080": 963142464.0, "25085": 967003904.0, "25090": 959724096.0, "25095": 936029056.0, "25100": 990472512.0, "25105": 977625088.0, "25110": 977825024.0, "25115": 963217600.0, "25120": 950928512.0, "25125": 960868864.0, "25130": 973374272.0, "25135": 976636416.0, "25140": 972201408.0, "25145": 938382144.0, "25150": 961862144.0, "25155": 965315392.0, "25160": 964543424.0, "25165": 978128576.0, "25170": 938131584.0, "25175": 972171200.0, "25180": 976696704.0, "25185": 984454976.0, "25190": 975423936.0, "25195": 958847232.0, "25200": 952034240.0, "25205": 951423680.0, "25210": 963932608.0, "25215": 975787904.0, "25220": 973280000.0, "25225": 944062208.0, "25230": 966852608.0, "25235": 969012800.0, "25240": 964098432.0, "25245": 964232384.0, "25250": 955763712.0, "25255": 962337344.0, "25260": 973103872.0, "25265": 965437632.0, "25270": 976107584.0, "25275": 965253824.0, "25280": 941408832.0, "25285": 971009344.0, "25290": 958048704.0, "25295": 964609664.0, "25300": 970383424.0, "25305": 944223680.0, "25310": 964641088.0, "25315": 975353024.0, "25320": 963216000.0, "25325": 956843584.0, "25330": 949851264.0, "25335": 977999744.0, "25340": 966273856.0, "25345": 975746624.0, "25350": 974540032.0, "25355": 955812736.0, "25360": 954867392.0, "25365": 975837184.0, "25370": 987603008.0, "25375": 968191872.0, "25380": 980909888.0, "25385": 935765056.0, "25390": 968295104.0, "25395": 969191680.0, "25400": 975296576.0, "25405": 984730560.0, "25410": 940931008.0, "25415": 974232704.0, "25420": 964276672.0, "25425": 981304640.0, "25430": 971199104.0, "25435": 943623168.0, "25440": 946810048.0, "25445": 972410880.0, "25450": 980049280.0, "25455": 977307904.0, "25460": 951884608.0, "25465": 955077888.0, "25470": 981102784.0, "25475": 962022400.0, "25480": 957946688.0, "25485": 968624064.0, "25490": 956834432.0, "25495": 986279808.0, "25500": 974450304.0, "25505": 993519808.0, "25510": 963823872.0, "25515": 961741632.0, "25520": 961028608.0, "25525": 984976512.0, "25530": 971022464.0, "25535": 970911040.0, "25540": 956058816.0, "25545": 951333760.0, "25550": 973084160.0, "25555": 973563712.0, "25560": 977726272.0, "25565": 964567424.0, "25570": 930937984.0, "25575": 973832576.0, "25580": 976974720.0, "25585": 978134848.0, "25590": 981408192.0, "25595": 940666496.0, "25600": 948610688.0, "25605": 969757824.0, "25610": 974738176.0, "25615": 968011904.0, "25620": 970674944.0, "25625": 948271616.0, "25630": 980399424.0, "25635": 966324544.0, "25640": 976598784.0, "25645": 975199616.0, "25650": 954019392.0, "25655": 975527360.0, "25660": 968828608.0, "25665": 964216064.0, "25670": 976426624.0, "25675": 957919936.0, "25680": 943160704.0, "25685": 973871488.0, "25690": 970541312.0, "25695": 981382272.0, "25700": 957914240.0, "25705": 948976576.0, "25710": 978926784.0, "25715": 978817344.0, "25720": 967202176.0, "25725": 952486400.0, "25730": 957904256.0, "25735": 977004160.0, "25740": 977582144.0, "25745": 972821696.0, "25750": 958474368.0, "25755": 958560768.0, "25760": 960841408.0, "25765": 988150848.0, "25770": 974835648.0, "25775": 969553728.0, "25780": 967503872.0, "25785": 954326528.0, "25790": 965065792.0, "25795": 970564736.0, "25800": 971599424.0, "25805": 981106752.0, "25810": 938739904.0, "25815": 973612928.0, "25820": 964725120.0, "25825": 973270464.0, "25830": 986174528.0, "25835": 939959168.0, "25840": 974849408.0, "25845": 968068352.0, "25850": 967849984.0, "25855": 988423360.0, "25860": 941041728.0, "25865": 943350400.0, "25870": 969911616.0, "25875": 968380864.0, "25880": 965419712.0, "25885": 946773056.0, "25890": 951969152.0, "25895": 976044864.0, "25900": 968452224.0, "25905": 975423040.0, "25910": 963698944.0, "25915": 944575616.0, "25920": 964251968.0, "25925": 971090752.0, "25930": 962767488.0, "25935": 974284352.0, "25940": 953160128.0, "25945": 982262336.0, "25950": 982164096.0, "25955": 965801280.0, "25960": 961299328.0, "25965": 944725696.0, "25970": 974897088.0, "25975": 991996096.0, "25980": 957498688.0, "25985": 981525632.0, "25990": 967816448.0, "25995": 931301568.0, "26000": 973258304.0, "26005": 968595136.0, "26010": 979546240.0, "26015": 973765888.0, "26020": 954125568.0, "26025": 970055616.0, "26030": 968299968.0, "26035": 985013440.0, "26040": 967392768.0, "26045": 958041664.0, "26050": 961655232.0, "26055": 960015168.0, "26060": 967324736.0, "26065": 981402176.0, "26070": 976802688.0, "26075": 937603328.0, "26080": 967031296.0, "26085": 965052224.0, "26090": 979589888.0, "26095": 981316352.0, "26100": 959215040.0, "26105": 973302080.0, "26110": 967704192.0, "26115": 970518976.0, "26120": 969294208.0, "26125": 949856448.0, "26130": 967728384.0, "26135": 979262848.0, "26140": 972170240.0, "26145": 965048576.0, "26150": 951281984.0, "26155": 954714560.0, "26160": 968276224.0, "26165": 977366592.0, "26170": 976548800.0, "26175": 967489472.0, "26180": 950112960.0, "26185": 970514688.0, "26190": 991727168.0, "26195": 964326656.0, "26200": 987481472.0, "26205": 948382848.0, "26210": 972066240.0, "26215": 971654208.0, "26220": 970150208.0, "26225": 974186688.0, "26230": 956165824.0, "26235": 943899264.0, "26240": 974364352.0, "26245": 960993216.0, "26250": 970082432.0, "26255": 968749184.0, "26260": 952795264.0, "26265": 985093632.0, "26270": 964221248.0, "26275": 976967744.0, "26280": 982484416.0, "26285": 959192768.0, "26290": 983127936.0, "26295": 966610944.0, "26300": 958042240.0, "26305": 980946496.0, "26310": 955983424.0, "26315": 942097024.0, "26320": 964350720.0, "26325": 965628736.0, "26330": 961262528.0, "26335": 955575360.0, "26340": 939499072.0, "26345": 961869440.0, "26350": 967743616.0, "26355": 966309504.0, "26360": 972100800.0, "26365": 950385600.0, "26370": 969897280.0, "26375": 974915520.0, "26380": 968265216.0, "26385": 975927552.0, "26390": 952268096.0, "26395": 966918720.0, "26400": 975372096.0, "26405": 964387328.0, "26410": 957277824.0, "26415": 968436416.0, "26420": 947367360.0, "26425": 972595264.0, "26430": 966886208.0, "26435": 962633024.0, "26440": 964268544.0, "26445": 943111040.0, "26450": 966199104.0, "26455": 979073600.0, "26460": 964781568.0, "26465": 968280000.0, "26470": 945923904.0, "26475": 971825088.0, "26480": 978705536.0, "26485": 979387968.0, "26490": 975173760.0, "26495": 965174336.0, "26500": 963589376.0, "26505": 970487232.0, "26510": 959623360.0, "26515": 966108288.0, "26520": 972779456.0, "26525": 935563840.0, "26530": 970765184.0, "26535": 958642112.0, "26540": 962041536.0, "26545": 968177344.0, "26550": 956190144.0, "26555": 966033792.0, "26560": 964530048.0, "26565": 965372480.0, "26570": 962724864.0, "26575": 949198912.0, "26580": 963558528.0, "26585": 963447680.0, "26590": 964988736.0, "26595": 964933696.0, "26600": 970143552.0, "26605": 937960064.0, "26610": 971137600.0, "26615": 969831808.0, "26620": 979275520.0, "26625": 960125760.0, "26630": 944070784.0, "26635": 959488576.0, "26640": 969027136.0, "26645": 965588352.0, "26650": 967232640.0, "26655": 940622272.0, "26660": 945988096.0, "26665": 972705856.0, "26670": 969395584.0, "26675": 967463616.0, "26680": 951970368.0, "26685": 945919040.0, "26690": 967662336.0, "26695": 971383552.0, "26700": 975806528.0, "26705": 982927680.0, "26710": 952994112.0, "26715": 963969856.0, "26720": 986701120.0, "26725": 952023552.0, "26730": 970077312.0, "26735": 960094208.0, "26740": 961081728.0, "26745": 965083840.0, "26750": 967231424.0, "26755": 977440576.0, "26760": 975297600.0, "26765": 942971648.0, "26770": 972595072.0, "26775": 974553216.0, "26780": 962913024.0, "26785": 969718336.0, "26790": 943192512.0, "26795": 948647040.0, "26800": 965911552.0, "26805": 964147584.0, "26810": 967384384.0, "26815": 951766720.0, "26820": 969970752.0, "26825": 963362752.0, "26830": 980107200.0, "26835": 971437760.0, "26840": 957932608.0, "26845": 946457920.0, "26850": 983375936.0, "26855": 970740672.0, "26860": 973367296.0, "26865": 963288448.0, "26870": 954637760.0, "26875": 972827968.0, "26880": 972902336.0, "26885": 968836224.0, "26890": 961336192.0, "26895": 938383360.0, "26900": 967467904.0, "26905": 967238528.0, "26910": 957343744.0, "26915": 974524160.0, "26920": 943794432.0, "26925": 951146944.0, "26930": 961809664.0, "26935": 976303040.0, "26940": 967136064.0, "26945": 973762688.0, "26950": 949713600.0, "26955": 971735872.0, "26960": 972907328.0, "26965": 972992384.0, "26970": 971164800.0, "26975": 949211648.0, "26980": 981886080.0, "26985": 976059776.0, "26990": 975098944.0, "26995": 961717568.0, "27000": 952480704.0, "27005": 956693376.0, "27010": 968644864.0, "27015": 962700352.0, "27020": 956191232.0, "27025": 990552000.0, "27030": 935804032.0, "27035": 954107200.0, "27040": 959364800.0, "27045": 978269312.0, "27050": 951698240.0, "27055": 951989248.0, "27060": 991284864.0, "27065": 964332736.0, "27070": 975417536.0, "27075": 965645888.0, "27080": 943253184.0, "27085": 962853632.0, "27090": 958807296.0, "27095": 980278400.0, "27100": 958644992.0, "27105": 939119488.0, "27110": 948831360.0, "27115": 974136960.0, "27120": 974169408.0, "27125": 971564800.0, "27130": 959983936.0, "27135": 948426496.0, "27140": 968406144.0, "27145": 973707584.0, "27150": 967865920.0, "27155": 975432704.0, "27160": 943908736.0, "27165": 974013376.0, "27170": 961091712.0, "27175": 967949888.0, "27180": 968758272.0, "27185": 975363392.0, "27190": 944782848.0, "27195": 961383360.0, "27200": 969374464.0, "27205": 975388928.0, "27210": 955702848.0, "27215": 950196032.0, "27220": 974744512.0, "27225": 962855232.0, "27230": 962962368.0, "27235": 953050368.0, "27240": 956594240.0, "27245": 963186624.0, "27250": 965790080.0, "27255": 969557952.0, "27260": 952897600.0, "27265": 961956992.0, "27270": 963387712.0, "27275": 992559680.0, "27280": 957787264.0, "27285": 964560576.0, "27290": 969303808.0, "27295": 932638848.0, "27300": 976011648.0, "27305": 962513856.0, "27310": 975204992.0, "27315": 968566592.0, "27320": 951994240.0, "27325": 965452480.0, "27330": 960548864.0, "27335": 984055104.0, "27340": 980254784.0, "27345": 941545600.0, "27350": 958248192.0, "27355": 957811776.0, "27360": 975603712.0, "27365": 968386944.0, "27370": 959279744.0, "27375": 939403072.0, "27380": 966078144.0, "27385": 959020864.0, "27390": 957134144.0, "27395": 984928448.0, "27400": 952804736.0, "27405": 977573696.0, "27410": 983040384.0, "27415": 960741184.0, "27420": 972496256.0, "27425": 938911552.0, "27430": 960537536.0, "27435": 963278208.0, "27440": 963289664.0, "27445": 970740672.0, "27450": 935307392.0, "27455": 976323200.0, "27460": 961312192.0, "27465": 977152064.0, "27470": 971782592.0, "27475": 964880768.0, "27480": 949039488.0, "27485": 964129600.0, "27490": 969086784.0, "27495": 971316416.0, "27500": 967508544.0, "27505": 960702208.0, "27510": 966329152.0, "27515": 968020160.0, "27520": 979848256.0, "27525": 966748352.0, "27530": 952717504.0, "27535": 951754816.0, "27540": 975666688.0, "27545": 970677696.0, "27550": 965876672.0, "27555": 957349632.0, "27560": 941275392.0, "27565": 966852288.0, "27570": 963880000.0, "27575": 978972352.0, "27580": 952381312.0, "27585": 935715584.0, "27590": 963361664.0, "27595": 969399424.0, "27600": 976406528.0, "27605": 963896832.0, "27610": 945520512.0, "27615": 962600256.0, "27620": 972852608.0, "27625": 973184576.0, "27630": 963019072.0, "27635": 957626880.0, "27640": 949598912.0, "27645": 981199808.0, "27650": 972227392.0, "27655": 976719488.0, "27660": 973338368.0, "27665": 953693504.0, "27670": 956079744.0, "27675": 957734912.0, "27680": 958488512.0, "27685": 977933376.0, "27690": 932571712.0, "27695": 986439296.0, "27700": 967509120.0, "27705": 963144576.0, "27710": 953336448.0, "27715": 956104768.0, "27720": 949976896.0, "27725": 987421504.0, "27730": 969001088.0, "27735": 972957504.0, "27740": 962489664.0, "27745": 945620160.0, "27750": 973000896.0, "27755": 975045696.0, "27760": 971812864.0, "27765": 972073408.0, "27770": 946393280.0, "27775": 970606016.0, "27780": 979429376.0, "27785": 968875072.0, "27790": 975618944.0, "27795": 941368128.0, "27800": 959739200.0, "27805": 975790208.0, "27810": 955453696.0, "27815": 973890816.0, "27820": 985247296.0, "27825": 940293760.0, "27830": 968178432.0, "27835": 979540096.0, "27840": 959783040.0, "27845": 974319488.0, "27850": 949450240.0, "27855": 979878464.0, "27860": 985235968.0, "27865": 978790720.0, "27870": 983719424.0, "27875": 939677952.0, "27880": 970797056.0, "27885": 980414400.0, "27890": 970359040.0, "27895": 970081600.0, "27900": 937915520.0, "27905": 952333376.0, "27910": 979505856.0, "27915": 979478592.0, "27920": 953235200.0, "27925": 970615040.0, "27930": 948029440.0, "27935": 978493888.0, "27940": 990812224.0, "27945": 964144000.0, "27950": 968921664.0, "27955": 939206528.0, "27960": 976269952.0, "27965": 969561536.0, "27970": 961115904.0, "27975": 966461120.0, "27980": 942768384.0, "27985": 963134336.0, "27990": 976011008.0, "27995": 975344768.0, "28000": 976678592.0, "28005": 960278976.0, "28010": 940133760.0, "28015": 977436672.0, "28020": 964483200.0, "28025": 973764352.0, "28030": 966671488.0, "28035": 942376704.0, "28040": 960924672.0, "28045": 971255552.0, "28050": 974823744.0, "28055": 970653376.0, "28060": 949611520.0, "28065": 953117248.0, "28070": 972012736.0, "28075": 964462592.0, "28080": 973082304.0, "28085": 987549376.0, "28090": 941428800.0, "28095": 972785472.0, "28100": 971244032.0, "28105": 973160704.0, "28110": 985143424.0, "28115": 949573376.0, "28120": 992390400.0, "28125": 961834176.0, "28130": 968338432.0, "28135": 951679488.0, "28140": 936266240.0, "28145": 951091648.0, "28150": 962658496.0, "28155": 969425152.0, "28160": 965073664.0, "28165": 944978560.0, "28170": 944183680.0, "28175": 976292096.0, "28180": 972761728.0, "28185": 976144384.0, "28190": 952296832.0, "28195": 950193024.0, "28200": 973788544.0, "28205": 975900224.0, "28210": 978513792.0, "28215": 979278144.0, "28220": 936786432.0, "28225": 968568320.0, "28230": 973700544.0, "28235": 959145664.0, "28240": 967774400.0, "28245": 953044672.0, "28250": 959332352.0, "28255": 956206592.0, "28260": 959445696.0, "28265": 973294592.0, "28270": 973872000.0, "28275": 950893440.0, "28280": 964301440.0, "28285": 964745536.0, "28290": 969885632.0, "28295": 965207296.0, "28300": 954259904.0, "28305": 964745216.0, "28310": 963812352.0, "28315": 964617344.0, "28320": 962164352.0, "28325": 948716864.0, "28330": 970232704.0, "28335": 966398016.0, "28340": 977294784.0, "28345": 965150272.0, "28350": 959745984.0, "28355": 951908544.0, "28360": 966104768.0, "28365": 988442048.0, "28370": 971915456.0, "28375": 961666944.0, "28380": 949015360.0, "28385": 965207296.0, "28390": 972221504.0, "28395": 964808832.0, "28400": 983736640.0, "28405": 955788608.0, "28410": 980358592.0, "28415": 975898368.0, "28420": 969959680.0, "28425": 974199104.0, "28430": 939894784.0, "28435": 955800832.0, "28440": 976698816.0, "28445": 973913600.0, "28450": 981422080.0, "28455": 975105920.0, "28460": 955285696.0, "28465": 966522048.0, "28470": 956449536.0, "28475": 969893760.0, "28480": 976778496.0, "28485": 947510080.0, "28490": 949980224.0, "28495": 962904128.0, "28500": 990148544.0, "28505": 968781760.0, "28510": 948956032.0, "28515": 946621056.0, "28520": 970508672.0, "28525": 973233600.0, "28530": 972127360.0, "28535": 966778752.0, "28540": 958284736.0, "28545": 967196480.0, "28550": 966231552.0, "28555": 973855296.0, "28560": 969750336.0, "28565": 955944256.0, "28570": 980303360.0, "28575": 958554944.0, "28580": 972545728.0, "28585": 970652160.0, "28590": 948179968.0, "28595": 949998720.0, "28600": 970587712.0, "28605": 972276864.0, "28610": 977373632.0, "28615": 949628992.0, "28620": 948334976.0, "28625": 967682368.0, "28630": 970113344.0, "28635": 966447616.0, "28640": 968831360.0, "28645": 954559296.0, "28650": 974449792.0, "28655": 982847808.0, "28660": 983556736.0, "28665": 967126912.0, "28670": 944710848.0, "28675": 964678592.0, "28680": 985468160.0, "28685": 969857344.0, "28690": 989257920.0, "28695": 946398528.0, "28700": 931107136.0, "28705": 965849728.0, "28710": 978189568.0, "28715": 978718144.0, "28720": 985299136.0, "28725": 955497280.0, "28730": 966239808.0, "28735": 960832512.0, "28740": 976461376.0, "28745": 965884544.0, "28750": 948155520.0, "28755": 951423488.0, "28760": 968965184.0, "28765": 975668416.0, "28770": 955821568.0, "28775": 971427904.0, "28780": 945778816.0, "28785": 964547328.0, "28790": 969923200.0, "28795": 975564928.0, "28800": 957127296.0, "28805": 953939712.0, "28810": 971291712.0, "28815": 964763328.0, "28820": 972956608.0, "28825": 948315968.0, "28830": 933072832.0, "28835": 966281088.0, "28840": 978116480.0, "28845": 967044224.0, "28850": 975879104.0, "28855": 956724544.0, "28860": 939582208.0, "28865": 973464128.0, "28870": 963027840.0, "28875": 966226816.0, "28880": 962247488.0, "28885": 961438464.0, "28890": 966564160.0, "28895": 965973440.0, "28900": 971071872.0, "28905": 985012736.0, "28910": 930724672.0, "28915": 962994496.0, "28920": 967571264.0, "28925": 970828480.0, "28930": 989781824.0, "28935": 949894848.0, "28940": 951055552.0, "28945": 985401088.0, "28950": 962364032.0, "28955": 959778368.0, "28960": 961597056.0, "28965": 974075776.0, "28970": 958506752.0, "28975": 968643776.0, "28980": 958013696.0, "28985": 966115648.0, "28990": 937143104.0, "28995": 959942656.0, "29000": 980228864.0, "29005": 970700736.0, "29010": 976956672.0, "29015": 946456576.0, "29020": 963817088.0, "29025": 948654720.0, "29030": 976193536.0, "29035": 983209344.0, "29040": 943088832.0, "29045": 964205696.0, "29050": 986925376.0, "29055": 968215936.0, "29060": 952683840.0, "29065": 959629696.0, "29070": 944938880.0, "29075": 977094208.0, "29080": 968412480.0, "29085": 973843072.0, "29090": 973784768.0, "29095": 921360192.0, "29100": 960347008.0, "29105": 983767360.0, "29110": 974511232.0, "29115": 967499328.0, "29120": 946859392.0, "29125": 945055232.0, "29130": 979709568.0, "29135": 968872960.0, "29140": 970305536.0, "29145": 960998848.0, "29150": 947197440.0, "29155": 987041984.0, "29160": 970712000.0, "29165": 983894784.0, "29170": 969881216.0, "29175": 952739072.0, "29180": 969241920.0, "29185": 970751872.0, "29190": 948162432.0, "29195": 978588288.0, "29200": 958849088.0, "29205": 966012096.0, "29210": 974179648.0, "29215": 965955328.0, "29220": 953478144.0, "29225": 962338816.0, "29230": 948082240.0, "29235": 973504512.0, "29240": 975912512.0, "29245": 970496128.0, "29250": 977114688.0, "29255": 957253440.0, "29260": 972977984.0, "29265": 982692224.0, "29270": 966226368.0, "29275": 952172416.0, "29280": 937258496.0, "29285": 975366272.0, "29290": 980247680.0, "29295": 958719744.0, "29300": 965531712.0, "29305": 961147840.0, "29310": 951220288.0, "29315": 982266176.0, "29320": 965548736.0, "29325": 984989184.0, "29330": 962283520.0, "29335": 937615168.0, "29340": 967855744.0, "29345": 963401728.0, "29350": 969174720.0, "29355": 985252992.0, "29360": 941517568.0, "29365": 961269888.0, "29370": 970950720.0, "29375": 970138304.0, "29380": 976718976.0, "29385": 954686784.0, "29390": 954291712.0, "29395": 961638592.0, "29400": 979856064.0, "29405": 963379200.0, "29410": 961332416.0, "29415": 947062272.0, "29420": 983171648.0, "29425": 965416128.0, "29430": 972068480.0, "29435": 969358208.0, "29440": 933961792.0, "29445": 985517952.0, "29450": 961558464.0, "29455": 976432576.0, "29460": 978010944.0, "29465": 941443072.0, "29470": 956131072.0, "29475": 974381504.0, "29480": 957675776.0, "29485": 972152256.0, "29490": 956615168.0, "29495": 951517888.0, "29500": 973441792.0, "29505": 961947008.0, "29510": 969538432.0, "29515": 973597888.0, "29520": 950416320.0, "29525": 961668992.0, "29530": 969023808.0, "29535": 970656128.0, "29540": 965169472.0, "29545": 928397440.0, "29550": 934467264.0, "29555": 978082048.0, "29560": 963382784.0, "29565": 972485504.0, "29570": 963051008.0, "29575": 948003072.0, "29580": 968812032.0, "29585": 974810816.0, "29590": 971538816.0, "29595": 958721792.0, "29600": 949776512.0, "29605": 958928384.0, "29610": 963862976.0, "29615": 960073280.0, "29620": 972865408.0, "29625": 965913280.0, "29630": 964293248.0, "29635": 965932800.0, "29640": 973660288.0, "29645": 971048640.0, "29650": 970819264.0, "29655": 936653376.0, "29660": 957160256.0, "29665": 964599168.0, "29670": 956811456.0, "29675": 972767936.0, "29680": 946143680.0, "29685": 978819456.0, "29690": 963762816.0, "29695": 964653376.0, "29700": 975832576.0, "29705": 961012736.0, "29710": 965595776.0, "29715": 971409984.0, "29720": 970710464.0, "29725": 967910336.0, "29730": 960150272.0, "29735": 953985728.0, "29740": 986790400.0, "29745": 959003712.0, "29750": 972030336.0, "29755": 953911680.0, "29760": 941837120.0, "29765": 965127936.0, "29770": 974224384.0, "29775": 971219200.0, "29780": 966096960.0, "29785": 939365312.0, "29790": 969099840.0, "29795": 974691008.0, "29800": 973880064.0, "29805": 981528640.0, "29810": 935658304.0, "29815": 950010112.0, "29820": 969443904.0, "29825": 969827200.0, "29830": 969579904.0, "29835": 957485056.0, "29840": 935227840.0, "29845": 954078464.0, "29850": 972510784.0, "29855": 961786688.0, "29860": 980644480.0, "29865": 938357824.0, "29870": 958728256.0, "29875": 979267072.0, "29880": 965789824.0, "29885": 962056320.0, "29890": 920034368.0, "29895": 993872448.0, "29900": 955232768.0, "29905": 959374080.0, "29910": 954846720.0, "29915": 965491328.0, "29920": 962094976.0, "29925": 985822848.0, "29930": 957046912.0, "29935": 970249088.0, "29940": 970162688.0, "29945": 969172928.0, "29950": 964821888.0, "29955": 977317696.0, "29960": 974905728.0, "29965": 972570048.0, "29970": 923725440.0, "29975": 958935488.0, "29980": 972595584.0, "29985": 963867904.0, "29990": 967702208.0, "29995": 967891520.0, "30000": 942485312.0, "30005": 967524736.0, "30010": 956522816.0, "30015": 966104384.0, "30020": 957793920.0, "30025": 944605184.0, "30030": 978150080.0, "30035": 978178240.0, "30040": 983204736.0, "30045": 965906176.0, "30050": 937687552.0, "30055": 972870336.0, "30060": 959842944.0, "30065": 976423680.0, "30070": 962552576.0, "30075": 956921728.0, "30080": 954046784.0, "30085": 965483968.0, "30090": 972903744.0, "30095": 950048384.0, "30100": 962516800.0, "30105": 952921856.0, "30110": 963355712.0, "30115": 963076864.0, "30120": 972000640.0, "30125": 965294272.0, "30130": 933841728.0, "30135": 965369344.0, "30140": 953449152.0, "30145": 980671104.0, "30150": 975236480.0, "30155": 930823104.0, "30160": 967363264.0, "30165": 966518528.0, "30170": 970347328.0, "30175": 956038144.0, "30180": 931960320.0, "30185": 944360448.0, "30190": 970181824.0, "30195": 972669376.0, "30200": 958170624.0, "30205": 950540352.0, "30210": 940246080.0, "30215": 969580416.0, "30220": 963093440.0, "30225": 954739904.0, "30230": 964955392.0, "30235": 953176256.0, "30240": 958955136.0, "30245": 973978368.0, "30250": 968812608.0, "30255": 985562944.0, "30260": 978141504.0, "30265": 969058304.0, "30270": 966713216.0, "30275": 974808064.0, "30280": 984063360.0, "30285": 966990784.0, "30290": 959373376.0, "30295": 960349440.0, "30300": 953334784.0, "30305": 980396672.0, "30310": 967019904.0, "30315": 961928192.0, "30320": 966572032.0, "30325": 962305536.0, "30330": 960780928.0, "30335": 960643776.0, "30340": 959538368.0, "30345": 957590016.0, "30350": 972084736.0, "30355": 974102720.0, "30360": 966522880.0, "30365": 968475584.0, "30370": 948236160.0, "30375": 975949824.0, "30380": 963794688.0, "30385": 963009216.0, "30390": 986218368.0, "30395": 930699264.0, "30400": 976172544.0, "30405": 990139072.0, "30410": 977453248.0, "30415": 962462080.0, "30420": 945796288.0, "30425": 969537856.0, "30430": 977129664.0, "30435": 972228544.0, "30440": 986717696.0, "30445": 936598464.0, "30450": 944995904.0, "30455": 955667328.0, "30460": 973499520.0, "30465": 980912896.0, "30470": 981662400.0, "30475": 936935104.0, "30480": 964624384.0, "30485": 959895936.0, "30490": 986651456.0, "30495": 975640192.0, "30500": 958426624.0, "30505": 975357312.0, "30510": 963550336.0, "30515": 970582272.0, "30520": 974691392.0, "30525": 944117696.0, "30530": 965030272.0, "30535": 967080704.0, "30540": 975631616.0, "30545": 967179392.0, "30550": 982170944.0, "30555": 955264704.0, "30560": 974654400.0, "30565": 969905152.0, "30570": 965275264.0, "30575": 965981440.0, "30580": 940290752.0, "30585": 973531136.0, "30590": 960609792.0, "30595": 972436864.0, "30600": 978824704.0, "30605": 940060864.0, "30610": 968653184.0, "30615": 964429056.0, "30620": 968082752.0, "30625": 969574400.0, "30630": 965763776.0, "30635": 946198080.0, "30640": 975619648.0, "30645": 973022592.0, "30650": 967326272.0, "30655": 959540160.0, "30660": 943441024.0, "30665": 983268864.0, "30670": 967697600.0, "30675": 971282368.0, "30680": 975432256.0, "30685": 934134656.0, "30690": 961381120.0, "30695": 967247296.0, "30700": 972474944.0, "30705": 961314432.0, "30710": 964050496.0, "30715": 946494016.0, "30720": 969322432.0, "30725": 972848896.0, "30730": 976618944.0, "30735": 967442624.0, "30740": 947587456.0, "30745": 966871488.0, "30750": 964618368.0, "30755": 994787968.0, "30760": 988412288.0, "30765": 942145152.0, "30770": 961744832.0, "30775": 970945984.0, "30780": 977523904.0, "30785": 970607616.0, "30790": 952486848.0, "30795": 961963136.0, "30800": 961892416.0, "30805": 959132416.0, "30810": 951528128.0, "30815": 956242368.0, "30820": 948932288.0, "30825": 989051328.0, "30830": 974463232.0, "30835": 966427200.0, "30840": 975378112.0, "30845": 937088704.0, "30850": 968369984.0, "30855": 987338176.0, "30860": 970588864.0, "30865": 968531456.0, "30870": 956638208.0, "30875": 948582400.0, "30880": 985812480.0, "30885": 981196416.0, "30890": 974768896.0, "30895": 946486016.0, "30900": 941977088.0, "30905": 952414848.0, "30910": 977420160.0, "30915": 980446144.0, "30920": 969054144.0, "30925": 949351488.0, "30930": 974843648.0, "30935": 967210176.0, "30940": 958707904.0, "30945": 974507328.0, "30950": 950730368.0, "30955": 973157504.0, "30960": 971576448.0, "30965": 965261056.0, "30970": 973908224.0, "30975": 975159040.0, "30980": 947273024.0, "30985": 971511680.0, "30990": 966220480.0, "30995": 967885504.0, "31000": 968404352.0, "31005": 952753856.0, "31010": 983745600.0, "31015": 957472256.0, "31020": 961332416.0, "31025": 964501824.0, "31030": 943728896.0, "31035": 977682368.0, "31040": 981852992.0, "31045": 963727936.0, "31050": 967334720.0, "31055": 953132480.0, "31060": 978972160.0, "31065": 981037376.0, "31070": 972663104.0, "31075": 970084928.0, "31080": 972737472.0, "31085": 940333888.0, "31090": 987577472.0, "31095": 970059840.0, "31100": 975905920.0, "31105": 961738816.0, "31110": 953195072.0, "31115": 968280128.0, "31120": 978371008.0, "31125": 971405696.0, "31130": 969986112.0, "31135": 936838720.0, "31140": 952641344.0, "31145": 986705088.0, "31150": 966993856.0, "31155": 967387712.0, "31160": 954611840.0, "31165": 958291136.0, "31170": 969723456.0, "31175": 968560064.0, "31180": 968486720.0, "31185": 981047808.0, "31190": 956524800.0, "31195": 963046848.0, "31200": 957060544.0, "31205": 958426624.0, "31210": 985285312.0, "31215": 941419456.0, "31220": 960780032.0, "31225": 967297152.0, "31230": 962075008.0, "31235": 968262272.0, "31240": 959072128.0, "31245": 942765632.0, "31250": 956369216.0, "31255": 959791808.0, "31260": 965952448.0, "31265": 949544320.0, "31270": 948598912.0, "31275": 973556288.0, "31280": 977461312.0, "31285": 963175808.0, "31290": 973002816.0, "31295": 935190592.0, "31300": 977148288.0, "31305": 988324800.0, "31310": 969807616.0, "31315": 957966784.0, "31320": 945861952.0, "31325": 940448192.0, "31330": 969709952.0, "31335": 980650816.0, "31340": 955865408.0, "31345": 960284864.0, "31350": 936297664.0, "31355": 963262272.0, "31360": 961871552.0, "31365": 973965504.0, "31370": 968831552.0, "31375": 936296320.0, "31380": 957131968.0, "31385": 956695488.0, "31390": 959624064.0, "31395": 981766016.0, "31400": 965865792.0, "31405": 955595520.0, "31410": 960115520.0, "31415": 972505408.0, "31420": 969194048.0, "31425": 943397248.0, "31430": 960521088.0, "31435": 974330368.0, "31440": 972667904.0, "31445": 970653632.0, "31450": 980275200.0, "31455": 936819648.0, "31460": 988542400.0, "31465": 963037568.0, "31470": 952548928.0, "31475": 962609600.0, "31480": 952786944.0, "31485": 960264896.0, "31490": 974453312.0, "31495": 957190592.0, "31500": 974812992.0, "31505": 944580416.0, "31510": 958541888.0, "31515": 959315520.0, "31520": 975046336.0, "31525": 963746368.0, "31530": 965262784.0, "31535": 933421888.0, "31540": 960867840.0, "31545": 976219904.0, "31550": 973223488.0, "31555": 967116608.0, "31560": 946622336.0, "31565": 940638784.0, "31570": 971085056.0, "31575": 979334016.0, "31580": 961466304.0, "31585": 943832256.0, "31590": 929239872.0, "31595": 967302144.0, "31600": 964007168.0, "31605": 959526592.0, "31610": 966386176.0, "31615": 946160192.0, "31620": 968565632.0, "31625": 943378688.0, "31630": 960701504.0, "31635": 971588416.0, "31640": 947586240.0, "31645": 958351744.0, "31650": 962951744.0, "31655": 984119232.0, "31660": 961026176.0, "31665": 968020096.0, "31670": 974852416.0, "31675": 960410368.0, "31680": 957064320.0, "31685": 981581120.0, "31690": 957182336.0, "31695": 933307392.0, "31700": 957020608.0, "31705": 951573696.0, "31710": 963787136.0, "31715": 959650688.0, "31720": 941719552.0, "31725": 965030208.0, "31730": 965331392.0, "31735": 965931328.0, "31740": 959375040.0, "31745": 943457600.0, "31750": 970753728.0, "31755": 966362944.0, "31760": 970086592.0, "31765": 971502656.0, "31770": 944190528.0, "31775": 953849664.0, "31780": 968904704.0, "31785": 985095488.0, "31790": 986465472.0, "31795": 966331200.0, "31800": 943385536.0, "31805": 967688000.0, "31810": 973926400.0, "31815": 967664640.0, "31820": 956166784.0, "31825": 938424320.0, "31830": 962817984.0, "31835": 976629888.0, "31840": 963985536.0, "31845": 991779584.0, "31850": 966873152.0, "31855": 936206784.0, "31860": 968224192.0, "31865": 961028928.0, "31870": 989036544.0, "31875": 984309504.0, "31880": 951836032.0, "31885": 965718656.0, "31890": 942684800.0, "31895": 963555584.0, "31900": 966728960.0, "31905": 946504000.0, "31910": 975448000.0, "31915": 964568704.0, "31920": 950212928.0, "31925": 961762880.0, "31930": 947056832.0, "31935": 955735552.0, "31940": 974926080.0, "31945": 975692992.0, "31950": 961978624.0, "31955": 955910016.0, "31960": 941977920.0, "31965": 953971968.0, "31970": 984272128.0, "31975": 970927744.0, "31980": 971754688.0, "31985": 948815744.0, "31990": 964617472.0, "31995": 976331072.0, "32000": 974519808.0, "32005": 989516480.0, "32010": 948103040.0, "32015": 952357952.0, "32020": 963913600.0, "32025": 983301056.0, "32030": 966695616.0, "32035": 973377024.0, "32040": 944717312.0, "32045": 972132480.0, "32050": 951375936.0, "32055": 980717760.0, "32060": 965723392.0, "32065": 958243776.0, "32070": 961657344.0, "32075": 967972480.0, "32080": 975973248.0, "32085": 969575552.0, "32090": 946204480.0, "32095": 984173248.0, "32100": 971643776.0, "32105": 976845696.0, "32110": 971362944.0, "32115": 963377856.0, "32120": 970694720.0, "32125": 966316992.0, "32130": 957935296.0, "32135": 964638912.0, "32140": 971663232.0, "32145": 923816832.0, "32150": 975231680.0, "32155": 978931648.0, "32160": 957507264.0, "32165": 962622976.0, "32170": 938568704.0, "32175": 950523328.0, "32180": 971668352.0, "32185": 984826112.0, "32190": 958353920.0, "32195": 953863168.0, "32200": 959330048.0, "32205": 975530560.0, "32210": 979873536.0, "32215": 961880320.0, "32220": 957192960.0, "32225": 946302208.0, "32230": 979920448.0, "32235": 973217728.0, "32240": 965171520.0, "32245": 968205376.0, "32250": 964973248.0, "32255": 961020608.0, "32260": 975194560.0, "32265": 971147776.0, "32270": 968591104.0, "32275": 952052800.0, "32280": 949152704.0, "32285": 961409664.0, "32290": 976582656.0, "32295": 969878144.0, "32300": 948465600.0, "32305": 953818688.0, "32310": 966417152.0, "32315": 956377024.0, "32320": 970556864.0, "32325": 963652736.0, "32330": 950035008.0, "32335": 982264768.0, "32340": 960511168.0, "32345": 964802112.0, "32350": 966632384.0, "32355": 952291904.0, "32360": 964435200.0, "32365": 976723328.0, "32370": 965133056.0, "32375": 967481408.0, "32380": 931151360.0, "32385": 964692608.0, "32390": 980070784.0, "32395": 962143680.0, "32400": 961062848.0, "32405": 969127744.0, "32410": 952444608.0, "32415": 941764544.0, "32420": 973702464.0, "32425": 975889088.0, "32430": 983844224.0, "32435": 946641472.0, "32440": 961976384.0, "32445": 979876608.0, "32450": 975089792.0, "32455": 971760000.0, "32460": 968782720.0, "32465": 955185856.0, "32470": 962823616.0, "32475": 968077888.0, "32480": 975827072.0, "32485": 949773632.0, "32490": 949869760.0, "32495": 957432640.0, "32500": 971686080.0, "32505": 974256128.0, "32510": 989518400.0, "32515": 951585664.0, "32520": 947193216.0, "32525": 967357760.0, "32530": 961240320.0, "32535": 982374016.0, "32540": 958028160.0, "32545": 958692352.0, "32550": 979599680.0, "32555": 972809408.0, "32560": 961097152.0, "32565": 958948224.0, "32570": 946387072.0, "32575": 964978944.0, "32580": 962835584.0, "32585": 973247488.0, "32590": 974088704.0, "32595": 952958784.0, "32600": 981753984.0, "32605": 975820480.0, "32610": 954763136.0, "32615": 953689664.0, "32620": 952849792.0, "32625": 979234560.0, "32630": 969361472.0, "32635": 980917888.0, "32640": 960989120.0, "32645": 941193984.0, "32650": 966687232.0, "32655": 962609856.0, "32660": 974734976.0, "32665": 988843008.0, "32670": 968050752.0, "32675": 952659328.0, "32680": 971140288.0, "32685": 972805568.0, "32690": 974052864.0, "32695": 976467456.0, "32700": 937378624.0, "32705": 965140672.0, "32710": 978458048.0, "32715": 960431040.0, "32720": 979525888.0, "32725": 957217856.0, "32730": 969703808.0, "32735": 959344192.0, "32740": 954610432.0, "32745": 980951488.0, "32750": 964569024.0, "32755": 959116992.0, "32760": 971099456.0, "32765": 962928704.0, "32770": 969459136.0, "32775": 974187200.0, "32780": 949669440.0, "32785": 964647488.0, "32790": 984592320.0, "32795": 969596352.0, "32800": 973169472.0, "32805": 950625536.0, "32810": 952672192.0, "32815": 976262400.0, "32820": 978434368.0, "32825": 979720896.0, "32830": 952211904.0, "32835": 949790784.0, "32840": 975559040.0, "32845": 978205824.0, "32850": 963229568.0, "32855": 975362240.0, "32860": 953066752.0, "32865": 962996864.0, "32870": 962671872.0, "32875": 975643456.0, "32880": 965725760.0, "32885": 970741312.0, "32890": 970435520.0, "32895": 957665984.0, "32900": 974212352.0, "32905": 975483840.0, "32910": 960092160.0, "32915": 957273088.0, "32920": 957728448.0, "32925": 970785664.0, "32930": 959509248.0, "32935": 977901888.0, "32940": 957934528.0, "32945": 969029248.0, "32950": 987927424.0, "32955": 980561536.0, "32960": 967277376.0, "32965": 925047552.0, "32970": 945694592.0, "32975": 970188608.0, "32980": 975151680.0, "32985": 979169728.0, "32990": 935205440.0, "32995": 968040960.0, "33000": 963594816.0, "33005": 960712256.0, "33010": 976533312.0, "33015": 961861504.0, "33020": 956841984.0, "33025": 984648192.0, "33030": 976726016.0, "33035": 977018112.0, "33040": 983204288.0, "33045": 941420864.0, "33050": 971602048.0, "33055": 965834816.0, "33060": 973837568.0, "33065": 970657984.0, "33070": 947158976.0, "33075": 970141952.0, "33080": 976233792.0, "33085": 986233088.0, "33090": 959099968.0, "33095": 961520640.0, "33100": 946280448.0, "33105": 971910336.0, "33110": 988432832.0, "33115": 968733632.0, "33120": 966379712.0, "33125": 941240512.0, "33130": 964067264.0, "33135": 967122880.0, "33140": 983430720.0, "33145": 973709632.0, "33150": 949628352.0, "33155": 955711552.0, "33160": 960252608.0, "33165": 966449856.0, "33170": 969061120.0, "33175": 967693312.0, "33180": 938528768.0, "33185": 964876608.0, "33190": 961517888.0, "33195": 975615744.0, "33200": 965115968.0, "33205": 943306624.0, "33210": 976712576.0, "33215": 966227968.0, "33220": 984008576.0, "33225": 982578688.0, "33230": 961487104.0, "33235": 968181632.0, "33240": 973395776.0, "33245": 972841984.0, "33250": 965128064.0, "33255": 947619840.0, "33260": 945285760.0, "33265": 980244736.0, "33270": 979232320.0, "33275": 972242496.0, "33280": 968526528.0, "33285": 961141760.0, "33290": 971945344.0, "33295": 961514112.0, "33300": 975681408.0, "33305": 982502848.0, "33310": 971588160.0, "33315": 974645888.0, "33320": 966941120.0, "33325": 973488128.0, "33330": 958184640.0, "33335": 951346176.0, "33340": 958820736.0, "33345": 974529664.0, "33350": 975623424.0, "33355": 988260224.0, "33360": 966642368.0, "33365": 946606656.0, "33370": 987313856.0, "33375": 961603200.0, "33380": 972827072.0, "33385": 993334784.0, "33390": 956973568.0, "33395": 964533632.0, "33400": 972627392.0, "33405": 974839744.0, "33410": 981789248.0, "33415": 948171328.0, "33420": 969399488.0, "33425": 991472064.0, "33430": 960616256.0, "33435": 972474496.0, "33440": 952595456.0, "33445": 925109120.0, "33450": 968372544.0, "33455": 968064832.0, "33460": 975109248.0, "33465": 982653952.0, "33470": 959342464.0, "33475": 983058560.0, "33480": 971739520.0, "33485": 961757056.0, "33490": 975478656.0, "33495": 954294528.0, "33500": 985396096.0, "33505": 984114496.0, "33510": 976023552.0, "33515": 965210560.0, "33520": 956236096.0, "33525": 956499264.0, "33530": 965890816.0, "33535": 972277760.0, "33540": 982332288.0, "33545": 960553856.0, "33550": 934424896.0, "33555": 968267392.0, "33560": 987247808.0, "33565": 975718784.0, "33570": 973757568.0, "33575": 938969664.0, "33580": 965516032.0, "33585": 974022848.0, "33590": 986853888.0, "33595": 980466112.0, "33600": 958550720.0, "33605": 952015936.0, "33610": 969878656.0, "33615": 958279296.0, "33620": 972604992.0, "33625": 975836096.0, "33630": 953564992.0, "33635": 979066496.0, "33640": 952399936.0, "33645": 968564544.0, "33650": 981480448.0, "33655": 958236032.0, "33660": 982074816.0, "33665": 967049856.0, "33670": 962132224.0, "33675": 984581056.0, "33680": 938472320.0, "33685": 951162496.0, "33690": 972205504.0, "33695": 978641408.0, "33700": 964497472.0, "33705": 967210176.0, "33710": 966715200.0, "33715": 978138752.0, "33720": 965499456.0, "33725": 982062464.0, "33730": 967014080.0, "33735": 933283840.0, "33740": 967528448.0, "33745": 972387904.0, "33750": 970224832.0, "33755": 957721792.0, "33760": 936020288.0, "33765": 961665088.0, "33770": 971708928.0, "33775": 976050688.0, "33780": 977412608.0, "33785": 951679104.0, "33790": 950734848.0, "33795": 960669504.0, "33800": 972341184.0, "33805": 976244288.0, "33810": 960160000.0, "33815": 947311552.0, "33820": 970428608.0, "33825": 977004032.0, "33830": 973598336.0, "33835": 965952128.0, "33840": 953631104.0, "33845": 961531072.0, "33850": 974096064.0, "33855": 956493632.0, "33860": 972419008.0, "33865": 949921408.0, "33870": 959389568.0, "33875": 970915840.0, "33880": 960707328.0, "33885": 969883072.0, "33890": 950362496.0, "33895": 944046976.0, "33900": 963459712.0, "33905": 965798208.0, "33910": 972922624.0, "33915": 954916544.0, "33920": 937705152.0, "33925": 972928704.0, "33930": 975403712.0, "33935": 954521728.0, "33940": 980202560.0, "33945": 953754048.0, "33950": 969368832.0, "33955": 968118784.0, "33960": 972525696.0, "33965": 973869248.0, "33970": 954355200.0, "33975": 960220928.0, "33980": 958779008.0, "33985": 976038272.0, "33990": 983960448.0, "33995": 953832512.0, "34000": 932845952.0, "34005": 965571584.0, "34010": 978271808.0, "34015": 963041472.0, "34020": 977535616.0, "34025": 930892544.0, "34030": 960215168.0, "34035": 974447488.0, "34040": 969189248.0, "34045": 968834432.0, "34050": 943740992.0, "34055": 968868736.0, "34060": 973766528.0, "34065": 976221888.0, "34070": 959459712.0, "34075": 969231808.0, "34080": 944947136.0, "34085": 965757056.0, "34090": 971566016.0, "34095": 964820480.0, "34100": 960817280.0, "34105": 956587200.0, "34110": 976735680.0, "34115": 957388288.0, "34120": 962757440.0, "34125": 957089856.0, "34130": 947685952.0, "34135": 956838464.0, "34140": 972731584.0, "34145": 969591808.0, "34150": 954252992.0, "34155": 951881280.0, "34160": 951008448.0, "34165": 963754624.0, "34170": 987853632.0, "34175": 972542528.0, "34180": 975775104.0, "34185": 932576512.0, "34190": 956868480.0, "34195": 973710464.0, "34200": 971663808.0, "34205": 951016064.0, "34210": 917883456.0, "34215": 949289216.0, "34220": 968219456.0, "34225": 979501824.0, "34230": 968125568.0, "34235": 951321408.0, "34240": 948638144.0, "34245": 972258304.0, "34250": 994444544.0, "34255": 962463232.0, "34260": 977027712.0, "34265": 942151168.0, "34270": 968862528.0, "34275": 965848640.0, "34280": 969316864.0, "34285": 972972992.0, "34290": 951890688.0, "34295": 951346432.0, "34300": 979108416.0, "34305": 973070528.0, "34310": 973002880.0, "34315": 954285824.0, "34320": 948496576.0, "34325": 966890816.0, "34330": 976607808.0, "34335": 972191040.0, "34340": 962281856.0, "34345": 951145792.0, "34350": 955074048.0, "34355": 966237696.0, "34360": 955449344.0, "34365": 957648256.0, "34370": 941519808.0, "34375": 973249152.0, "34380": 979734656.0, "34385": 959571456.0, "34390": 967609920.0, "34395": 959190976.0, "34400": 954198592.0, "34405": 968612480.0, "34410": 974856256.0, "34415": 977719680.0, "34420": 966512512.0, "34425": 954472512.0, "34430": 972603648.0, "34435": 969306496.0, "34440": 972703360.0, "34445": 980520576.0, "34450": 935069568.0, "34455": 959612608.0, "34460": 969261120.0, "34465": 973483136.0, "34470": 971862400.0, "34475": 939570496.0, "34480": 972579648.0, "34485": 973593792.0, "34490": 965341888.0, "34495": 971475392.0, "34500": 958146560.0, "34505": 954495552.0, "34510": 974420736.0, "34515": 967118912.0, "34520": 975354368.0, "34525": 967287360.0, "34530": 947093952.0, "34535": 966229504.0, "34540": 975343360.0, "34545": 957554560.0, "34550": 960723456.0, "34555": 932237312.0, "34560": 961628480.0, "34565": 977930432.0, "34570": 956477504.0, "34575": 969072832.0, "34580": 953341184.0, "34585": 966665664.0, "34590": 965867200.0, "34595": 958543232.0, "34600": 967959488.0, "34605": 977867072.0, "34610": 950143360.0, "34615": 968144256.0, "34620": 984999808.0, "34625": 971950528.0, "34630": 970825920.0, "34635": 943218368.0, "34640": 961288832.0, "34645": 964772416.0, "34650": 978323648.0, "34655": 971739520.0, "34660": 948366912.0, "34665": 943964928.0, "34670": 972923904.0, "34675": 969077312.0, "34680": 974847616.0, "34685": 968586048.0, "34690": 928599040.0, "34695": 968993600.0, "34700": 970530176.0, "34705": 961630144.0, "34710": 976530048.0, "34715": 953925568.0, "34720": 968084288.0, "34725": 969495552.0, "34730": 963869760.0, "34735": 988027520.0, "34740": 962182144.0, "34745": 966331712.0, "34750": 984783616.0, "34755": 964521728.0, "34760": 972409472.0, "34765": 973843648.0, "34770": 947330176.0, "34775": 991638912.0, "34780": 972592640.0, "34785": 975618752.0, "34790": 957432768.0, "34795": 947478336.0, "34800": 954706816.0, "34805": 984074240.0, "34810": 969196736.0, "34815": 959109312.0, "34820": 935688704.0, "34825": 974995328.0, "34830": 975456448.0, "34835": 965994624.0, "34840": 974984384.0, "34845": 954834176.0, "34850": 941679168.0, "34855": 980627456.0, "34860": 964614848.0, "34865": 967113280.0, "34870": 976766336.0, "34875": 941690368.0, "34880": 959725120.0, "34885": 969593472.0, "34890": 975399680.0, "34895": 980389568.0, "34900": 948754176.0, "34905": 961259200.0, "34910": 956627072.0, "34915": 956870976.0, "34920": 975360768.0, "34925": 949479808.0, "34930": 939602176.0, "34935": 966296448.0, "34940": 971500544.0, "34945": 972892224.0, "34950": 979275456.0, "34955": 941057856.0, "34960": 959211264.0, "34965": 985251072.0, "34970": 975078400.0, "34975": 975428800.0, "34980": 947054848.0, "34985": 973299136.0, "34990": 975669952.0, "34995": 970213248.0, "35000": 973074432.0, "35005": 938671040.0, "35010": 952116736.0, "35015": 983022144.0, "35020": 973643648.0, "35025": 971396544.0, "35030": 954429440.0, "35035": 961308224.0, "35040": 968614336.0, "35045": 969501888.0, "35050": 982270976.0, "35055": 969466240.0, "35060": 930667200.0, "35065": 975321472.0, "35070": 971986560.0, "35075": 970720832.0, "35080": 973365184.0, "35085": 944387904.0, "35090": 966554432.0, "35095": 998031744.0, "35100": 971134528.0, "35105": 978816832.0, "35110": 941617600.0, "35115": 959576128.0, "35120": 976485824.0, "35125": 960098112.0, "35130": 980104384.0, "35135": 972769408.0, "35140": 936667712.0, "35145": 967432896.0, "35150": 970499328.0, "35155": 974589568.0, "35160": 978058048.0, "35165": 959571712.0, "35170": 964289344.0, "35175": 974638912.0, "35180": 970778368.0, "35185": 982737792.0, "35190": 961561088.0, "35195": 962731968.0, "35200": 958232640.0, "35205": 975145920.0, "35210": 981099776.0, "35215": 979531776.0, "35220": 945554688.0, "35225": 967948352.0, "35230": 979198976.0, "35235": 975078080.0, "35240": 962683520.0, "35245": 951567808.0, "35250": 954365696.0, "35255": 977876416.0, "35260": 975393408.0, "35265": 963703488.0, "35270": 943097472.0, "35275": 956371328.0, "35280": 983746624.0, "35285": 973329792.0, "35290": 972033536.0, "35295": 961265408.0, "35300": 957435712.0, "35305": 976567424.0, "35310": 975849920.0, "35315": 987749888.0, "35320": 974230272.0, "35325": 958391744.0, "35330": 974552704.0, "35335": 969644032.0, "35340": 976761216.0, "35345": 974170176.0, "35350": 946445056.0, "35355": 955052160.0, "35360": 967947136.0, "35365": 957106752.0, "35370": 973438528.0, "35375": 952062272.0, "35380": 965235520.0, "35385": 977695168.0, "35390": 968199616.0, "35395": 966838400.0, "35400": 977396928.0, "35405": 931236672.0, "35410": 964529024.0, "35415": 966517952.0, "35420": 963284928.0, "35425": 971444864.0, "35430": 951193472.0, "35435": 953764288.0, "35440": 958194816.0, "35445": 978168832.0, "35450": 975968192.0, "35455": 941045952.0, "35460": 949887040.0, "35465": 973359936.0, "35470": 976183616.0, "35475": 958769664.0, "35480": 954256320.0, "35485": 960380992.0, "35490": 961640512.0, "35495": 980853632.0, "35500": 968381632.0, "35505": 964946048.0, "35510": 954740032.0, "35515": 971453504.0, "35520": 976124928.0, "35525": 964785728.0, "35530": 972088576.0, "35535": 960235328.0, "35540": 946673280.0, "35545": 968356032.0, "35550": 974945600.0, "35555": 981762176.0, "35560": 959553472.0, "35565": 946396544.0, "35570": 968324800.0, "35575": 978152448.0, "35580": 982972928.0, "35585": 961562560.0, "35590": 927098240.0, "35595": 985343296.0, "35600": 971475136.0, "35605": 977326912.0, "35610": 956581376.0, "35615": 960194496.0, "35620": 965250560.0, "35625": 968441728.0, "35630": 971541248.0, "35635": 972764544.0, "35640": 965584704.0, "35645": 939070656.0, "35650": 956763648.0, "35655": 973783424.0, "35660": 966942656.0, "35665": 984375168.0, "35670": 938349952.0, "35675": 981554368.0, "35680": 968114944.0, "35685": 981332352.0, "35690": 970222784.0, "35695": 941972800.0, "35700": 958808576.0, "35705": 969786816.0, "35710": 977537088.0, "35715": 972863232.0, "35720": 967677952.0, "35725": 939571712.0, "35730": 965231424.0, "35735": 989398720.0, "35740": 968356288.0, "35745": 963034880.0, "35750": 948117312.0, "35755": 967358720.0, "35760": 965793664.0, "35765": 970381952.0, "35770": 957349760.0, "35775": 947680832.0, "35780": 970984192.0, "35785": 970375872.0, "35790": 969274560.0, "35795": 971458048.0, "35800": 948475776.0, "35805": 945580800.0, "35810": 968543360.0, "35815": 961929600.0, "35820": 986215744.0, "35825": 979311936.0, "35830": 958102784.0, "35835": 969446528.0, "35840": 966979008.0, "35845": 967672576.0, "35850": 986713344.0, "35855": 956535040.0, "35860": 958608384.0, "35865": 968410944.0, "35870": 974964480.0, "35875": 968931520.0, "35880": 951002752.0, "35885": 968889088.0, "35890": 986680064.0, "35895": 961809216.0, "35900": 970606912.0, "35905": 954927296.0, "35910": 955388416.0, "35915": 979440768.0, "35920": 973306752.0, "35925": 981688320.0, "35930": 957711360.0, "35935": 958981504.0, "35940": 969857536.0, "35945": 969989056.0, "35950": 979190848.0, "35955": 967754240.0, "35960": 940125312.0, "35965": 945182272.0, "35970": 958145792.0, "35975": 978626496.0, "35980": 974318272.0, "35985": 962087936.0, "35990": 944172800.0, "35995": 958736960.0, "36000": 970570688.0, "36005": 969539392.0, "36010": 981738368.0, "36015": 954052416.0, "36020": 983365888.0, "36025": 981894912.0, "36030": 971026880.0, "36035": 965695040.0, "36040": 944836224.0, "36045": 974288640.0, "36050": 970063296.0, "36055": 966912128.0, "36060": 973882816.0, "36065": 947221312.0, "36070": 956996736.0, "36075": 976540928.0, "36080": 962632384.0, "36085": 972022080.0, "36090": 957213888.0, "36095": 951276992.0, "36100": 960947968.0, "36105": 964185024.0, "36110": 974547200.0, "36115": 975374336.0, "36120": 962256000.0, "36125": 965454848.0, "36130": 970120704.0, "36135": 975619712.0, "36140": 970471744.0, "36145": 934551168.0, "36150": 968746304.0, "36155": 960431232.0, "36160": 962935424.0, "36165": 956950848.0, "36170": 940665152.0, "36175": 953162624.0, "36180": 975823744.0, "36185": 966567168.0, "36190": 973083776.0, "36195": 941083328.0, "36200": 923505792.0, "36205": 974805120.0, "36210": 972579200.0, "36215": 971683136.0, "36220": 966449856.0, "36225": 951642304.0, "36230": 959985024.0, "36235": 977278400.0, "36240": 973660032.0, "36245": 966857472.0, "36250": 966647808.0, "36255": 962978688.0, "36260": 966849024.0, "36265": 968822464.0, "36270": 973276992.0, "36275": 972701184.0, "36280": 947637376.0, "36285": 952746496.0, "36290": 960659392.0, "36295": 974625664.0, "36300": 975838016.0, "36305": 944629888.0, "36310": 967562368.0, "36315": 953302400.0, "36320": 953331584.0, "36325": 960781568.0, "36330": 943272000.0, "36335": 963532992.0, "36340": 975976064.0, "36345": 966214912.0, "36350": 978587712.0, "36355": 962741312.0, "36360": 955961472.0, "36365": 973684736.0, "36370": 961084416.0, "36375": 982405632.0, "36380": 948736576.0, "36385": 941501952.0, "36390": 961991296.0, "36395": 967858304.0, "36400": 970928768.0, "36405": 960567104.0, "36410": 939104960.0, "36415": 947932672.0, "36420": 965856768.0, "36425": 963360320.0, "36430": 973862528.0, "36435": 966456896.0, "36440": 946003264.0, "36445": 952852480.0, "36450": 965565120.0, "36455": 986067840.0, "36460": 979540864.0, "36465": 939355264.0, "36470": 980097344.0, "36475": 974902400.0, "36480": 967618112.0, "36485": 985615168.0, "36490": 945186304.0, "36495": 964270208.0, "36500": 968469568.0, "36505": 967594368.0, "36510": 961635008.0, "36515": 960820352.0, "36520": 935676224.0, "36525": 978162304.0, "36530": 963828096.0, "36535": 966978240.0, "36540": 975671168.0, "36545": 945467072.0, "36550": 967444288.0, "36555": 973747392.0, "36560": 962415680.0, "36565": 973131200.0, "36570": 963758848.0, "36575": 976701312.0, "36580": 955303744.0, "36585": 972121280.0, "36590": 984993600.0, "36595": 968549312.0, "36600": 965934592.0, "36605": 971346944.0, "36610": 972850944.0, "36615": 981467968.0, "36620": 952255744.0, "36625": 940216512.0, "36630": 970940672.0, "36635": 972334528.0, "36640": 964346048.0, "36645": 977035264.0, "36650": 935507648.0, "36655": 980282880.0, "36660": 971702976.0, "36665": 962287040.0, "36670": 966486976.0, "36675": 951920192.0, "36680": 946376896.0, "36685": 978262656.0, "36690": 984544576.0, "36695": 991342464.0, "36700": 969792064.0, "36705": 926871488.0, "36710": 966706432.0, "36715": 970060352.0, "36720": 975112640.0, "36725": 969920832.0, "36730": 948995584.0, "36735": 968016896.0, "36740": 968606080.0, "36745": 951116032.0, "36750": 962403776.0, "36755": 935740096.0, "36760": 964350592.0, "36765": 978586752.0, "36770": 963129472.0, "36775": 965398400.0, "36780": 949449024.0, "36785": 946262336.0, "36790": 978341120.0, "36795": 964895616.0, "36800": 960635008.0, "36805": 953864000.0, "36810": 936730112.0, "36815": 986489728.0, "36820": 972255616.0, "36825": 978149568.0, "36830": 968907328.0, "36835": 931171264.0, "36840": 971541248.0, "36845": 979618944.0, "36850": 962036736.0, "36855": 949454656.0, "36860": 961848064.0, "36865": 944972032.0, "36870": 972396224.0, "36875": 973430528.0, "36880": 971616000.0, "36885": 977918784.0, "36890": 933651712.0, "36895": 979434624.0, "36900": 957551872.0, "36905": 967200960.0, "36910": 980511680.0, "36915": 937765248.0, "36920": 960916032.0, "36925": 979618112.0, "36930": 973413888.0, "36935": 978081792.0, "36940": 960222528.0, "36945": 955137152.0, "36950": 970737472.0, "36955": 974841728.0, "36960": 969192960.0, "36965": 946311232.0, "36970": 944634944.0, "36975": 972742720.0, "36980": 974626688.0, "36985": 953501120.0, "36990": 956321408.0, "36995": 943215872.0, "37000": 975136448.0, "37005": 974877440.0, "37010": 962980160.0, "37015": 966830720.0, "37020": 946426944.0, "37025": 958130752.0, "37030": 964606144.0, "37035": 956193152.0, "37040": 978675520.0, "37045": 943098112.0, "37050": 955316864.0, "37055": 980963968.0, "37060": 966866688.0, "37065": 967528000.0, "37070": 967553664.0, "37075": 952758592.0, "37080": 959830592.0, "37085": 972867520.0, "37090": 961043648.0, "37095": 975570688.0, "37100": 942477056.0, "37105": 955789120.0, "37110": 956846272.0, "37115": 971697984.0, "37120": 962832640.0, "37125": 951274816.0, "37130": 939269504.0, "37135": 981793600.0, "37140": 976257856.0, "37145": 971700288.0, "37150": 970844352.0, "37155": 938621312.0, "37160": 961532352.0, "37165": 964478848.0, "37170": 958532224.0, "37175": 986050176.0, "37180": 942635776.0, "37185": 957814336.0, "37190": 970175616.0, "37195": 964079744.0, "37200": 976824192.0, "37205": 944599168.0, "37210": 966893888.0, "37215": 975306944.0, "37220": 957212224.0, "37225": 975527872.0, "37230": 970968448.0, "37235": 953207040.0, "37240": 976614208.0, "37245": 985571392.0, "37250": 955992256.0, "37255": 955119616.0, "37260": 949468096.0, "37265": 990084672.0, "37270": 963055296.0, "37275": 973770432.0, "37280": 959173504.0, "37285": 946226176.0, "37290": 948454912.0, "37295": 980478784.0, "37300": 947311232.0, "37305": 959098368.0, "37310": 953465344.0, "37315": 942042176.0, "37320": 962201856.0, "37325": 978065152.0, "37330": 976507328.0, "37335": 967740672.0, "37340": 941990656.0, "37345": 950507072.0, "37350": 958436352.0, "37355": 971672448.0, "37360": 974689152.0, "37365": 961464896.0, "37370": 951650688.0, "37375": 967470016.0, "37380": 976500160.0, "37385": 965811968.0, "37390": 964774912.0, "37395": 959568320.0, "37400": 963256128.0, "37405": 983609216.0, "37410": 966388096.0, "37415": 964800768.0, "37420": 939462336.0, "37425": 964330176.0, "37430": 963225536.0, "37435": 955943360.0, "37440": 963715072.0, "37445": 947476416.0, "37450": 993309760.0, "37455": 965310912.0, "37460": 961107200.0, "37465": 962187008.0, "37470": 921135424.0, "37475": 951772800.0, "37480": 981587840.0, "37485": 979121408.0, "37490": 968895104.0, "37495": 949873088.0, "37500": 944268864.0, "37505": 958941952.0, "37510": 973509248.0, "37515": 983888576.0, "37520": 962354496.0, "37525": 951603904.0, "37530": 963725632.0, "37535": 980437376.0, "37540": 952743872.0, "37545": 994284480.0, "37550": 951957696.0, "37555": 966806912.0, "37560": 978318592.0, "37565": 972557376.0, "37570": 967956544.0, "37575": 972988288.0, "37580": 943717888.0, "37585": 960268480.0, "37590": 979396992.0, "37595": 977230720.0, "37600": 980811456.0, "37605": 957147072.0, "37610": 975743616.0, "37615": 973806784.0, "37620": 968663360.0, "37625": 982116672.0, "37630": 948468096.0, "37635": 977745600.0, "37640": 959583424.0, "37645": 977923840.0, "37650": 963699136.0, "37655": 925853888.0, "37660": 947669952.0, "37665": 967101440.0, "37670": 966259200.0, "37675": 975142656.0, "37680": 976696704.0, "37685": 949426432.0, "37690": 971229440.0, "37695": 968722496.0, "37700": 967557440.0, "37705": 978661504.0, "37710": 934346112.0, "37715": 977646720.0, "37720": 972888960.0, "37725": 990964800.0, "37730": 985259200.0, "37735": 934890688.0, "37740": 941839040.0, "37745": 967503744.0, "37750": 962274496.0, "37755": 970446144.0, "37760": 958544128.0, "37765": 932228032.0, "37770": 969859392.0, "37775": 963649728.0, "37780": 962311296.0, "37785": 964491584.0, "37790": 940192960.0, "37795": 980037120.0, "37800": 963232960.0, "37805": 973482112.0, "37810": 983342848.0, "37815": 960039616.0, "37820": 938106432.0, "37825": 955092032.0, "37830": 960109440.0, "37835": 974724160.0, "37840": 985157824.0, "37845": 931191296.0, "37850": 952663168.0, "37855": 979448640.0, "37860": 960658240.0, "37865": 965093120.0, "37870": 960279360.0, "37875": 986934976.0, "37880": 951869184.0, "37885": 977129984.0, "37890": 966206208.0, "37895": 934303424.0, "37900": 976616256.0, "37905": 969311872.0, "37910": 979948480.0, "37915": 962924544.0, "37920": 947293952.0, "37925": 945667392.0, "37930": 985933440.0, "37935": 965593088.0, "37940": 960498688.0, "37945": 972530240.0, "37950": 941773312.0, "37955": 966035584.0, "37960": 981374784.0, "37965": 969432192.0, "37970": 992188096.0, "37975": 944971904.0, "37980": 949362176.0, "37985": 976921344.0, "37990": 973650176.0, "37995": 970675904.0, "38000": 941150336.0, "38005": 945524224.0, "38010": 980944448.0, "38015": 969499840.0, "38020": 977328896.0, "38025": 964228672.0, "38030": 950276032.0, "38035": 979748160.0, "38040": 960690304.0, "38045": 968809536.0, "38050": 982642688.0, "38055": 951348928.0, "38060": 971182400.0, "38065": 985827328.0, "38070": 967327616.0, "38075": 970087552.0, "38080": 947596672.0, "38085": 973090304.0, "38090": 978065344.0, "38095": 965247104.0, "38100": 956007616.0, "38105": 963057216.0, "38110": 946906624.0, "38115": 959720000.0, "38120": 972524416.0, "38125": 971328768.0, "38130": 974300160.0, "38135": 950519232.0, "38140": 966403584.0, "38145": 972076672.0, "38150": 974104128.0, "38155": 966541440.0, "38160": 949180160.0, "38165": 965308416.0, "38170": 980436608.0, "38175": 967937408.0, "38180": 965493952.0, "38185": 949953216.0, "38190": 933823232.0, "38195": 983376000.0, "38200": 996417856.0, "38205": 975653760.0, "38210": 964148352.0, "38215": 946508160.0, "38220": 972998400.0, "38225": 963988992.0, "38230": 955812928.0, "38235": 969626368.0, "38240": 934383424.0, "38245": 974257472.0, "38250": 993041984.0, "38255": 967887872.0, "38260": 968355712.0, "38265": 953283136.0, "38270": 949279104.0, "38275": 967386176.0, "38280": 980178944.0, "38285": 967391680.0, "38290": 962427072.0, "38295": 958296256.0, "38300": 972180160.0, "38305": 981172672.0, "38310": 959900032.0, "38315": 961867648.0, "38320": 953972160.0, "38325": 982076608.0, "38330": 974594944.0, "38335": 968550016.0, "38340": 979692480.0, "38345": 941797056.0, "38350": 956637248.0, "38355": 974631424.0, "38360": 962986304.0, "38365": 979136256.0, "38370": 942196416.0, "38375": 937878528.0, "38380": 978157376.0, "38385": 966555264.0, "38390": 953037632.0, "38395": 959333312.0, "38400": 942115648.0, "38405": 969681216.0, "38410": 975484800.0, "38415": 976063360.0, "38420": 964381504.0, "38425": 944892288.0, "38430": 953374592.0, "38435": 960578304.0, "38440": 981814016.0, "38445": 968750144.0, "38450": 946078080.0, "38455": 941595264.0, "38460": 961747200.0, "38465": 962195712.0, "38470": 960352512.0, "38475": 993712704.0, "38480": 952923584.0, "38485": 971677568.0, "38490": 972107520.0, "38495": 952019136.0, "38500": 968881536.0, "38505": 945327616.0, "38510": 970966272.0, "38515": 967741312.0, "38520": 959005760.0, "38525": 967458048.0, "38530": 945077376.0, "38535": 953995968.0, "38540": 966248512.0, "38545": 959214848.0, "38550": 961835968.0, "38555": 971088256.0, "38560": 969325120.0, "38565": 975230464.0, "38570": 964258880.0, "38575": 960415232.0, "38580": 969872128.0, "38585": 947094976.0, "38590": 962459072.0, "38595": 974700736.0, "38600": 968195904.0, "38605": 982685696.0, "38610": 931993856.0, "38615": 946819328.0, "38620": 971284096.0, "38625": 969605632.0, "38630": 981607488.0, "38635": 966499328.0, "38640": 948772288.0, "38645": 966431168.0, "38650": 980490816.0, "38655": 965318848.0, "38660": 955927488.0, "38665": 936152128.0, "38670": 945499456.0, "38675": 954730112.0, "38680": 974535360.0, "38685": 988141888.0, "38690": 938633792.0, "38695": 961063680.0, "38700": 971407232.0, "38705": 982585344.0, "38710": 961536896.0, "38715": 992955072.0, "38720": 954657216.0, "38725": 967939648.0, "38730": 962163264.0, "38735": 957212416.0, "38740": 967735680.0, "38745": 938605568.0, "38750": 989158912.0, "38755": 967273408.0, "38760": 967372544.0, "38765": 976443392.0, "38770": 950542976.0, "38775": 962772480.0, "38780": 961641536.0, "38785": 978839680.0, "38790": 984507968.0, "38795": 942028800.0, "38800": 970601664.0, "38805": 980405120.0, "38810": 950223040.0, "38815": 973723776.0, "38820": 954405184.0, "38825": 950192960.0, "38830": 975909504.0, "38835": 967270272.0, "38840": 969877760.0, "38845": 963557888.0, "38850": 945262336.0, "38855": 963271168.0, "38860": 978786240.0, "38865": 966874880.0, "38870": 961010432.0, "38875": 957009472.0, "38880": 963925696.0, "38885": 965836480.0, "38890": 962852864.0, "38895": 977411200.0, "38900": 950941056.0, "38905": 946994944.0, "38910": 958453504.0, "38915": 978070016.0, "38920": 980331776.0, "38925": 975561472.0, "38930": 933824960.0, "38935": 960794880.0, "38940": 968692032.0, "38945": 965725952.0, "38950": 967545280.0, "38955": 958397952.0, "38960": 955824576.0, "38965": 954265152.0, "38970": 967221312.0, "38975": 958099072.0, "38980": 948091776.0, "38985": 948055104.0, "38990": 964598016.0, "38995": 961759104.0, "39000": 966455296.0, "39005": 961966464.0, "39010": 941218048.0, "39015": 984108224.0, "39020": 976965760.0, "39025": 956248512.0, "39030": 958873984.0, "39035": 945226496.0, "39040": 984847232.0, "39045": 968557312.0, "39050": 955038848.0, "39055": 973459264.0, "39060": 950369728.0, "39065": 949939456.0, "39070": 969002496.0, "39075": 979486976.0, "39080": 975844864.0, "39085": 963231808.0, "39090": 947034880.0, "39095": 943518400.0, "39100": 973324032.0, "39105": 980367936.0, "39110": 969613504.0, "39115": 950122048.0, "39120": 962009344.0, "39125": 975043392.0, "39130": 989403328.0, "39135": 965208512.0, "39140": 945120064.0, "39145": 950511360.0, "39150": 963079936.0, "39155": 958644288.0, "39160": 966404288.0, "39165": 939459008.0, "39170": 937565696.0, "39175": 984625024.0, "39180": 961974144.0, "39185": 964594880.0, "39190": 974306304.0, "39195": 940822080.0, "39200": 963904384.0, "39205": 953559424.0, "39210": 977173440.0, "39215": 969152576.0, "39220": 944702400.0, "39225": 962877824.0, "39230": 961808704.0, "39235": 968040192.0, "39240": 971462976.0, "39245": 952041856.0, "39250": 940564224.0, "39255": 975780352.0, "39260": 958270208.0, "39265": 972372736.0, "39270": 965013312.0, "39275": 954577792.0, "39280": 969135936.0, "39285": 975261952.0, "39290": 973316864.0, "39295": 961602240.0, "39300": 936783360.0, "39305": 959474368.0, "39310": 975241088.0, "39315": 971561792.0, "39320": 963726016.0, "39325": 975399424.0, "39330": 942982400.0, "39335": 976308224.0, "39340": 963460032.0, "39345": 968940416.0, "39350": 983587648.0, "39355": 957816576.0, "39360": 972917184.0, "39365": 972574336.0, "39370": 982395392.0, "39375": 963553408.0, "39380": 932927488.0, "39385": 959125248.0, "39390": 967820928.0, "39395": 965273472.0, "39400": 963487552.0, "39405": 957057280.0, "39410": 937061760.0, "39415": 954228416.0, "39420": 978577280.0, "39425": 961648448.0, "39430": 960869824.0, "39435": 951949504.0, "39440": 971579520.0, "39445": 966863808.0, "39450": 956144896.0, "39455": 964504192.0, "39460": 937522112.0, "39465": 995012224.0, "39470": 973707520.0, "39475": 960457280.0, "39480": 956920704.0, "39485": 964245888.0, "39490": 963738816.0, "39495": 968380608.0, "39500": 961557888.0, "39505": 973325760.0, "39510": 941349568.0, "39515": 934741696.0, "39520": 970942464.0, "39525": 966015936.0, "39530": 981211904.0, "39535": 955181504.0, "39540": 954880640.0, "39545": 951464576.0, "39550": 970789696.0, "39555": 974147392.0, "39560": 990589504.0, "39565": 953910400.0, "39570": 957858240.0, "39575": 971954880.0, "39580": 960408768.0, "39585": 971942528.0, "39590": 977335232.0, "39595": 948947328.0, "39600": 954583488.0, "39605": 958246848.0, "39610": 968544448.0, "39615": 979217600.0, "39620": 934322880.0, "39625": 978476288.0, "39630": 948359744.0, "39635": 961545856.0, "39640": 972581824.0, "39645": 951543808.0, "39650": 975678080.0, "39655": 969878400.0, "39660": 973141696.0, "39665": 959840768.0, "39670": 963805056.0, "39675": 959148416.0, "39680": 968430784.0, "39685": 971400320.0, "39690": 967704512.0, "39695": 958455040.0, "39700": 933136448.0, "39705": 974528448.0, "39710": 984944384.0, "39715": 990571712.0, "39720": 964947456.0, "39725": 943827456.0, "39730": 976776256.0, "39735": 970904960.0, "39740": 965561728.0, "39745": 973811712.0, "39750": 929902144.0, "39755": 961568896.0, "39760": 970567744.0, "39765": 962588288.0, "39770": 967611456.0, "39775": 965309248.0, "39780": 948188864.0, "39785": 970473600.0, "39790": 973011648.0, "39795": 973993472.0, "39800": 968528128.0, "39805": 951084160.0, "39810": 978661120.0, "39815": 972035648.0, "39820": 973624064.0, "39825": 975419968.0, "39830": 951439296.0, "39835": 950276032.0, "39840": 972925120.0, "39845": 972482688.0, "39850": 976709952.0, "39855": 956125696.0, "39860": 941697216.0, "39865": 963890944.0, "39870": 968368768.0, "39875": 975539904.0, "39880": 975307328.0, "39885": 937044992.0, "39890": 964126080.0, "39895": 975043840.0, "39900": 969878912.0, "39905": 967758976.0, "39910": 938798848.0, "39915": 982347072.0, "39920": 978355456.0, "39925": 960430016.0, "39930": 966096320.0, "39935": 954775168.0, "39940": 948809792.0, "39945": 977827968.0, "39950": 974037760.0, "39955": 966421632.0, "39960": 967367168.0, "39965": 940422720.0, "39970": 972987968.0, "39975": 977953792.0, "39980": 980917376.0, "39985": 970641792.0, "39990": 947638720.0, "39995": 963002304.0, "40000": 969107776.0, "40005": 970351872.0, "40010": 967632640.0, "40015": 959505280.0, "40020": 952099392.0, "40025": 967999360.0, "40030": 970130368.0, "40035": 988723840.0, "40040": 958412928.0, "40045": 959917440.0, "40050": 967036800.0, "40055": 965549504.0, "40060": 966219136.0, "40065": 966489920.0, "40070": 955807552.0, "40075": 981972608.0, "40080": 975610048.0, "40085": 954733440.0, "40090": 970628928.0, "40095": 943427072.0, "40100": 951515712.0, "40105": 964005056.0, "40110": 956798208.0, "40115": 979111424.0, "40120": 973559488.0, "40125": 946293440.0, "40130": 986684928.0, "40135": 975600064.0, "40140": 971532736.0, "40145": 978754752.0, "40150": 929733440.0, "40155": 989656128.0, "40160": 984129664.0, "40165": 971542912.0, "40170": 983441152.0, "40175": 931024512.0, "40180": 964952320.0, "40185": 981568128.0, "40190": 975353472.0, "40195": 964040960.0, "40200": 941454848.0, "40205": 961065088.0, "40210": 984768832.0, "40215": 963164864.0, "40220": 966040384.0, "40225": 961741376.0, "40230": 944985536.0, "40235": 972397760.0, "40240": 968561600.0, "40245": 975816000.0, "40250": 977132352.0, "40255": 957624960.0, "40260": 974643264.0, "40265": 963754688.0, "40270": 961906240.0, "40275": 978582656.0, "40280": 962939520.0, "40285": 973405952.0, "40290": 971709184.0, "40295": 969282432.0, "40300": 971171136.0, "40305": 960145536.0, "40310": 941110592.0, "40315": 973484800.0, "40320": 964781184.0, "40325": 961189056.0, "40330": 975819520.0, "40335": 944303296.0, "40340": 973030208.0, "40345": 963873152.0, "40350": 967360576.0, "40355": 971863296.0, "40360": 945917504.0, "40365": 965216768.0, "40370": 969168192.0, "40375": 979479040.0, "40380": 982655616.0, "40385": 970265600.0, "40390": 926479040.0, "40395": 962498688.0, "40400": 979455040.0, "40405": 973590208.0, "40410": 964001024.0, "40415": 943677632.0, "40420": 965696896.0, "40425": 968711680.0, "40430": 962466944.0, "40435": 971407744.0, "40440": 942456064.0, "40445": 962742208.0, "40450": 973707968.0, "40455": 985927936.0, "40460": 971984704.0, "40465": 940845760.0, "40470": 947248128.0, "40475": 981567616.0, "40480": 972714880.0, "40485": 987942080.0, "40490": 940502528.0, "40495": 949733120.0, "40500": 969215040.0, "40505": 966446592.0, "40510": 973988160.0, "40515": 984469056.0, "40520": 940919424.0, "40525": 962035840.0, "40530": 973583680.0, "40535": 959548352.0, "40540": 955771840.0, "40545": 959628800.0, "40550": 965152320.0, "40555": 975518592.0, "40560": 973047616.0, "40565": 968852928.0, "40570": 976079168.0, "40575": 943113088.0, "40580": 975420416.0, "40585": 977558912.0, "40590": 974374656.0, "40595": 975780736.0, "40600": 960045824.0, "40605": 972195392.0, "40610": 984766400.0, "40615": 967982144.0, "40620": 984178368.0, "40625": 946373120.0, "40630": 948505600.0, "40635": 974556288.0, "40640": 982789888.0, "40645": 977237952.0, "40650": 952657024.0, "40655": 951509184.0, "40660": 964535360.0, "40665": 977401600.0, "40670": 991757888.0, "40675": 971095104.0, "40680": 947320512.0, "40685": 959445504.0, "40690": 966269312.0, "40695": 972882880.0, "40700": 966838784.0, "40705": 969017472.0, "40710": 987224512.0, "40715": 967275840.0, "40720": 961195584.0, "40725": 986880064.0, "40730": 963914560.0, "40735": 955420800.0, "40740": 966348992.0, "40745": 959720192.0, "40750": 965224768.0, "40755": 982269312.0, "40760": 945459392.0, "40765": 972491328.0, "40770": 972740224.0, "40775": 974497536.0, "40780": 975327488.0, "40785": 949504576.0, "40790": 972734464.0, "40795": 977220160.0, "40800": 964614976.0, "40805": 960550272.0, "40810": 949056896.0, "40815": 962158336.0, "40820": 970814272.0, "40825": 968474688.0, "40830": 967152064.0, "40835": 971167104.0, "40840": 943622144.0, "40845": 961962432.0, "40850": 979834752.0, "40855": 968399104.0, "40860": 968689088.0, "40865": 944186240.0, "40870": 978538624.0, "40875": 968183936.0, "40880": 977506048.0, "40885": 965606912.0, "40890": 947075328.0, "40895": 952320448.0, "40900": 953037248.0, "40905": 981998912.0, "40910": 971225920.0, "40915": 959553088.0, "40920": 945047104.0, "40925": 966663168.0, "40930": 971112704.0, "40935": 980730176.0, "40940": 974216896.0, "40945": 946182848.0, "40950": 969265344.0, "40955": 967755008.0, "40960": 969074432.0, "40965": 974440768.0, "40970": 945382720.0, "40975": 951332224.0, "40980": 970408448.0, "40985": 971575104.0, "40990": 962379392.0, "40995": 941966016.0, "41000": 972667968.0, "41005": 963497152.0, "41010": 965997504.0, "41015": 953813696.0, "41020": 961067456.0, "41025": 934735680.0, "41030": 974162368.0, "41035": 973195008.0, "41040": 954322880.0, "41045": 973031552.0, "41050": 946379392.0, "41055": 963166336.0, "41060": 985472960.0, "41065": 973761600.0, "41070": 968989120.0, "41075": 942634496.0, "41080": 958889728.0, "41085": 968472384.0, "41090": 960577024.0, "41095": 975321920.0, "41100": 943116672.0, "41105": 947472896.0, "41110": 962073472.0, "41115": 967184448.0, "41120": 981538176.0, "41125": 956108672.0, "41130": 946709056.0, "41135": 959862208.0, "41140": 966340864.0, "41145": 963706880.0, "41150": 980958592.0, "41155": 952350016.0, "41160": 940004480.0, "41165": 971992320.0, "41170": 969182912.0, "41175": 956552896.0, "41180": 958185664.0, "41185": 957967680.0, "41190": 972335680.0, "41195": 965843200.0, "41200": 977877312.0, "41205": 965270656.0, "41210": 939551104.0, "41215": 982827008.0, "41220": 975044288.0, "41225": 961563008.0, "41230": 993708096.0, "41235": 944469760.0, "41240": 970348032.0, "41245": 954858048.0, "41250": 980227968.0, "41255": 960769792.0, "41260": 965287168.0, "41265": 951494400.0, "41270": 964617984.0, "41275": 974990400.0, "41280": 975923904.0, "41285": 957565056.0, "41290": 933162752.0, "41295": 968245184.0, "41300": 972042944.0, "41305": 962926528.0, "41310": 984337600.0, "41315": 936073664.0, "41320": 944340928.0, "41325": 970542912.0, "41330": 969250944.0, "41335": 973595520.0, "41340": 946657024.0, "41345": 936081088.0, "41350": 969087616.0, "41355": 973478016.0, "41360": 982193216.0, "41365": 944248192.0, "41370": 951453760.0, "41375": 964236928.0, "41380": 958251584.0, "41385": 965293056.0, "41390": 977789120.0, "41395": 939107904.0, "41400": 947048256.0, "41405": 964180032.0, "41410": 977183168.0, "41415": 962823744.0, "41420": 946759488.0, "41425": 950986176.0, "41430": 981924544.0, "41435": 975371072.0, "41440": 965870720.0, "41445": 957892032.0, "41450": 942013440.0, "41455": 988269184.0, "41460": 955846272.0, "41465": 955408960.0, "41470": 972858496.0, "41475": 955390656.0, "41480": 979791232.0, "41485": 966696832.0, "41490": 965014336.0, "41495": 964274368.0, "41500": 939279488.0, "41505": 966471360.0, "41510": 957596992.0, "41515": 982873088.0, "41520": 965323968.0, "41525": 953939968.0, "41530": 970414592.0, "41535": 959565056.0, "41540": 972078144.0, "41545": 987770304.0, "41550": 968616768.0, "41555": 949082048.0, "41560": 951570944.0, "41565": 977293952.0, "41570": 975181568.0, "41575": 957781824.0, "41580": 945636096.0, "41585": 966487680.0, "41590": 975703360.0, "41595": 959628672.0, "41600": 980417792.0, "41605": 945692416.0, "41610": 948917120.0, "41615": 965312448.0, "41620": 971033984.0, "41625": 978300288.0, "41630": 960300032.0, "41635": 950306432.0, "41640": 977337664.0, "41645": 958264832.0, "41650": 973037632.0, "41655": 968044224.0, "41660": 942578688.0, "41665": 975692480.0, "41670": 984337216.0, "41675": 958063104.0, "41680": 972196736.0, "41685": 940262464.0, "41690": 964918080.0, "41695": 960755776.0, "41700": 976279936.0, "41705": 955346304.0, "41710": 956717568.0, "41715": 951926144.0, "41720": 973122368.0, "41725": 971924736.0, "41730": 952300928.0, "41735": 959516416.0, "41740": 946382208.0, "41745": 964285120.0, "41750": 972853504.0, "41755": 974590528.0, "41760": 973627904.0, "41765": 948896320.0, "41770": 964750976.0, "41775": 963977152.0, "41780": 973196928.0, "41785": 976985664.0, "41790": 953241152.0, "41795": 954456128.0, "41800": 960488128.0, "41805": 958201728.0, "41810": 971203264.0, "41815": 960715264.0, "41820": 954649728.0, "41825": 962269568.0, "41830": 957366272.0, "41835": 970696000.0, "41840": 974383680.0, "41845": 934373056.0, "41850": 973373248.0, "41855": 970180544.0, "41860": 976138624.0, "41865": 979694848.0, "41870": 967180992.0, "41875": 963088064.0, "41880": 965942656.0, "41885": 960840000.0, "41890": 966004928.0, "41895": 952466240.0, "41900": 931045056.0, "41905": 982779008.0, "41910": 975201152.0, "41915": 967205184.0, "41920": 965254784.0, "41925": 936584384.0, "41930": 968134080.0, "41935": 978104640.0, "41940": 987540224.0, "41945": 957125184.0, "41950": 965834816.0, "41955": 979946752.0, "41960": 959794432.0, "41965": 964622848.0, "41970": 968052160.0, "41975": 954800512.0, "41980": 955396608.0, "41985": 969430912.0, "41990": 978902080.0, "41995": 970248704.0, "42000": 977409216.0, "42005": 937322304.0, "42010": 966743872.0, "42015": 976142720.0, "42020": 966445632.0, "42025": 962982656.0, "42030": 955078144.0, "42035": 948139136.0, "42040": 958486144.0, "42045": 969147456.0, "42050": 979116864.0, "42055": 927548480.0, "42060": 935233536.0, "42065": 977244672.0, "42070": 964237824.0, "42075": 978612992.0, "42080": 951809152.0, "42085": 947378944.0, "42090": 966785920.0, "42095": 966593600.0, "42100": 959313600.0, "42105": 967815488.0, "42110": 965738560.0, "42115": 977662336.0, "42120": 967612288.0, "42125": 971551424.0, "42130": 963709888.0, "42135": 946725504.0, "42140": 969068864.0, "42145": 973935232.0, "42150": 970901376.0, "42155": 963288384.0, "42160": 948747776.0, "42165": 942361664.0, "42170": 963954048.0, "42175": 987471552.0, "42180": 956379456.0, "42185": 951693120.0, "42190": 942025856.0, "42195": 971341568.0, "42200": 976203072.0, "42205": 959172224.0, "42210": 966612288.0, "42215": 945392192.0, "42220": 956248960.0, "42225": 984583936.0, "42230": 958947200.0, "42235": 974941056.0, "42240": 954751936.0, "42245": 943672128.0, "42250": 962375424.0, "42255": 962541184.0, "42260": 967085760.0, "42265": 964727552.0, "42270": 955153984.0, "42275": 974928256.0, "42280": 969752768.0, "42285": 963118656.0, "42290": 977864704.0, "42295": 951521856.0, "42300": 966090304.0, "42305": 978683520.0, "42310": 952736704.0, "42315": 971379712.0, "42320": 960561664.0, "42325": 956949888.0, "42330": 974217856.0, "42335": 971486656.0, "42340": 960374336.0, "42345": 968683264.0, "42350": 947203392.0, "42355": 985376128.0, "42360": 972699200.0, "42365": 969297472.0, "42370": 974878656.0, "42375": 946018048.0, "42380": 960586624.0, "42385": 975403584.0, "42390": 980585088.0, "42395": 948099008.0, "42400": 935281792.0, "42405": 932014400.0, "42410": 969221248.0, "42415": 975518336.0, "42420": 974150464.0, "42425": 953308992.0, "42430": 938518144.0, "42435": 974497664.0, "42440": 967680384.0, "42445": 983994560.0, "42450": 970142400.0, "42455": 949947200.0, "42460": 973926912.0, "42465": 967718208.0, "42470": 972803200.0, "42475": 971404672.0, "42480": 945398912.0, "42485": 941536256.0, "42490": 960274368.0, "42495": 972747968.0, "42500": 958241536.0, "42505": 962053632.0, "42510": 944950528.0, "42515": 966048448.0, "42520": 969966720.0, "42525": 976895744.0, "42530": 972080448.0, "42535": 958903424.0, "42540": 970844224.0, "42545": 971132032.0, "42550": 964039552.0, "42555": 961755136.0, "42560": 947288640.0, "42565": 951079488.0, "42570": 970115008.0, "42575": 971949952.0, "42580": 969615616.0, "42585": 953491840.0, "42590": 967540288.0, "42595": 957161280.0, "42600": 981998976.0, "42605": 971380224.0, "42610": 978890816.0, "42615": 952898048.0, "42620": 969907008.0, "42625": 979906752.0, "42630": 983254016.0, "42635": 966792128.0, "42640": 935195200.0, "42645": 961816832.0, "42650": 969052992.0, "42655": 985825536.0, "42660": 977342464.0, "42665": 948485440.0, "42670": 956096384.0, "42675": 982533824.0, "42680": 966993152.0, "42685": 971342912.0, "42690": 977651328.0, "42695": 943740352.0, "42700": 972308480.0, "42705": 972275584.0, "42710": 968659392.0, "42715": 974559488.0, "42720": 959267136.0, "42725": 973049600.0, "42730": 974936896.0, "42735": 959765440.0, "42740": 983480448.0, "42745": 970175552.0, "42750": 956356352.0, "42755": 967847296.0, "42760": 968361472.0, "42765": 973061824.0, "42770": 963533504.0, "42775": 944643840.0, "42780": 985430976.0, "42785": 968146240.0, "42790": 975678080.0, "42795": 983740096.0, "42800": 941467968.0, "42805": 982427840.0, "42810": 970783680.0, "42815": 962742208.0, "42820": 968774912.0, "42825": 937160512.0, "42830": 962712896.0, "42835": 962117504.0, "42840": 988480384.0, "42845": 972876992.0, "42850": 950659072.0, "42855": 948636992.0, "42860": 978319360.0, "42865": 978042560.0, "42870": 977122496.0, "42875": 972312896.0, "42880": 949044480.0, "42885": 981323648.0, "42890": 964098560.0, "42895": 972431424.0, "42900": 971266176.0, "42905": 946334528.0, "42910": 947036864.0, "42915": 968982400.0, "42920": 966334336.0, "42925": 981523648.0, "42930": 958171968.0, "42935": 953094080.0, "42940": 977886592.0, "42945": 956969024.0, "42950": 969080960.0, "42955": 966728384.0, "42960": 949536576.0, "42965": 980536896.0, "42970": 976440832.0, "42975": 985024512.0, "42980": 957840448.0, "42985": 938619136.0, "42990": 977331904.0, "42995": 968921984.0, "43000": 970752512.0, "43005": 976519872.0, "43010": 954953216.0, "43015": 974922176.0, "43020": 981805824.0, "43025": 973495744.0, "43030": 966974400.0, "43035": 979308800.0, "43040": 953488576.0, "43045": 960826368.0, "43050": 973653376.0, "43055": 976042816.0, "43060": 970568832.0, "43065": 947819328.0, "43070": 961382656.0, "43075": 969439680.0, "43080": 986354688.0, "43085": 981375744.0, "43090": 945805824.0, "43095": 962488064.0, "43100": 973168256.0, "43105": 981934208.0, "43110": 969166272.0, "43115": 961167424.0, "43120": 950210240.0, "43125": 971394176.0, "43130": 956354432.0, "43135": 970519296.0, "43140": 972168832.0, "43145": 962108352.0, "43150": 967141504.0, "43155": 970892096.0, "43160": 968353472.0, "43165": 973310784.0, "43170": 955022208.0, "43175": 966906752.0, "43180": 968191296.0, "43185": 970886976.0, "43190": 961980416.0, "43195": 947955136.0, "43200": 944457408.0, "43205": 971925824.0, "43210": 952694720.0, "43215": 971974144.0, "43220": 964941696.0, "43225": 950126592.0, "43230": 979130752.0, "43235": 966047488.0, "43240": 962486144.0, "43245": 977735360.0, "43250": 952416704.0, "43255": 973670528.0, "43260": 965900800.0, "43265": 970814528.0, "43270": 957312448.0, "43275": 944913280.0, "43280": 956919936.0, "43285": 957547456.0, "43290": 965776320.0, "43295": 985276416.0, "43300": 961312640.0, "43305": 931964928.0, "43310": 969032576.0, "43315": 971775872.0, "43320": 973553792.0, "43325": 974517120.0, "43330": 951686400.0, "43335": 975795200.0, "43340": 973500544.0, "43345": 976593664.0, "43350": 968972736.0, "43355": 946405248.0, "43360": 962722752.0, "43365": 970780352.0, "43370": 961212544.0, "43375": 991935488.0, "43380": 973657984.0, "43385": 934701760.0, "43390": 974412800.0, "43395": 966875840.0, "43400": 954187136.0, "43405": 966778816.0, "43410": 943114048.0, "43415": 984593728.0, "43420": 970704704.0, "43425": 959753408.0, "43430": 966980352.0, "43435": 948198528.0, "43440": 946197632.0, "43445": 968651776.0, "43450": 968115840.0, "43455": 959142528.0, "43460": 962927296.0, "43465": 947130688.0, "43470": 978895680.0, "43475": 959886720.0, "43480": 963243776.0, "43485": 960060032.0, "43490": 939225024.0, "43495": 967221184.0, "43500": 990096896.0, "43505": 973585216.0, "43510": 969614016.0, "43515": 948472960.0, "43520": 951380032.0, "43525": 968171904.0, "43530": 976864832.0, "43535": 985814080.0, "43540": 940224832.0, "43545": 943014144.0, "43550": 951762112.0, "43555": 968036608.0, "43560": 986658880.0, "43565": 954868928.0, "43570": 938886208.0, "43575": 970611200.0, "43580": 977024320.0, "43585": 975085184.0, "43590": 970119424.0, "43595": 943333696.0, "43600": 957694080.0, "43605": 977715904.0, "43610": 967134208.0, "43615": 986159808.0, "43620": 943507776.0, "43625": 932151040.0, "43630": 975665088.0, "43635": 944703488.0, "43640": 975693760.0, "43645": 950912832.0, "43650": 945223808.0, "43655": 968511104.0, "43660": 971641216.0, "43665": 965213632.0, "43670": 965337728.0, "43675": 950944064.0, "43680": 964369792.0, "43685": 971016704.0, "43690": 969888384.0, "43695": 964174080.0, "43700": 931103936.0, "43705": 965133120.0, "43710": 967975168.0, "43715": 968949312.0, "43720": 960665728.0, "43725": 953701376.0, "43730": 952765760.0, "43735": 972790656.0, "43740": 993968512.0, "43745": 963067520.0, "43750": 965375232.0, "43755": 940632256.0, "43760": 968382272.0, "43765": 965788928.0, "43770": 952849408.0, "43775": 969610816.0, "43780": 937168960.0, "43785": 965267648.0, "43790": 957211008.0, "43795": 949010816.0, "43800": 971073152.0, "43805": 932240256.0, "43810": 957091456.0, "43815": 961454528.0, "43820": 970284480.0, "43825": 961677376.0, "43830": 974492352.0, "43835": 934600256.0, "43840": 962994944.0, "43845": 972914368.0, "43850": 961484032.0, "43855": 962760512.0, "43860": 953684800.0, "43865": 960152000.0, "43870": 975413888.0, "43875": 972973568.0, "43880": 961051328.0, "43885": 941447744.0, "43890": 970997440.0, "43895": 982644160.0, "43900": 974233600.0, "43905": 969761984.0, "43910": 951126848.0, "43915": 938794816.0, "43920": 967778624.0, "43925": 969726144.0, "43930": 958656384.0, "43935": 978102720.0, "43940": 957176000.0, "43945": 996387776.0, "43950": 968411584.0, "43955": 983367424.0, "43960": 969972992.0, "43965": 947860416.0, "43970": 964706752.0, "43975": 968817664.0, "43980": 973159424.0, "43985": 960430400.0, "43990": 955790208.0, "43995": 943966464.0, "44000": 961202304.0, "44005": 951257856.0, "44010": 970610496.0, "44015": 984519424.0, "44020": 940207360.0, "44025": 955723520.0, "44030": 975801984.0, "44035": 977599424.0, "44040": 978491840.0, "44045": 933385536.0, "44050": 955212544.0, "44055": 967517248.0, "44060": 981545536.0, "44065": 973558784.0, "44070": 940197824.0, "44075": 932873984.0, "44080": 975824512.0, "44085": 963403072.0, "44090": 969175680.0, "44095": 962225984.0, "44100": 951865024.0, "44105": 963823552.0, "44110": 966330688.0, "44115": 964860416.0, "44120": 961490368.0, "44125": 951516032.0, "44130": 965149632.0, "44135": 970462912.0, "44140": 964659200.0, "44145": 973885056.0, "44150": 945411776.0, "44155": 950796544.0, "44160": 965696576.0, "44165": 987673088.0, "44170": 982005056.0, "44175": 958019840.0, "44180": 938759232.0, "44185": 975051968.0, "44190": 966459456.0, "44195": 966705024.0, "44200": 963657664.0, "44205": 937560256.0, "44210": 971152064.0, "44215": 968897152.0, "44220": 974516160.0, "44225": 973436992.0, "44230": 956252032.0, "44235": 962730048.0, "44240": 957044416.0, "44245": 966647360.0, "44250": 972839488.0, "44255": 961571712.0, "44260": 962748672.0, "44265": 962847616.0, "44270": 972233024.0, "44275": 960831744.0, "44280": 975438656.0, "44285": 938555648.0, "44290": 956401792.0, "44295": 972078528.0, "44300": 979239936.0, "44305": 968823680.0, "44310": 954199680.0, "44315": 955698048.0, "44320": 986631744.0, "44325": 972533248.0, "44330": 950170496.0, "44335": 950342848.0, "44340": 944060544.0, "44345": 983924992.0, "44350": 965304000.0, "44355": 959298304.0, "44360": 950508480.0, "44365": 934421312.0, "44370": 965974528.0, "44375": 973952640.0, "44380": 973314112.0, "44385": 959898368.0, "44390": 947953216.0, "44395": 963799680.0, "44400": 978693056.0, "44405": 980923072.0, "44410": 965563712.0, "44415": 961452416.0, "44420": 955791232.0, "44425": 967840832.0, "44430": 969929088.0, "44435": 971176000.0, "44440": 953794048.0, "44445": 936763264.0, "44450": 966993344.0, "44455": 956821184.0, "44460": 971793408.0, "44465": 989248000.0, "44470": 958233792.0, "44475": 944298112.0, "44480": 956017088.0, "44485": 963260608.0, "44490": 969898368.0, "44495": 954041280.0, "44500": 965925312.0, "44505": 976199232.0, "44510": 969211776.0, "44515": 968017088.0, "44520": 965665472.0, "44525": 957104064.0, "44530": 966680832.0, "44535": 986094016.0, "44540": 970845056.0, "44545": 970894784.0, "44550": 960119680.0, "44555": 950918976.0, "44560": 975630208.0, "44565": 971171392.0, "44570": 958727104.0, "44575": 945674176.0, "44580": 967939904.0, "44585": 960440192.0, "44590": 973211200.0, "44595": 975953920.0, "44600": 942547136.0, "44605": 956694848.0, "44610": 962657344.0, "44615": 981943104.0, "44620": 986203968.0, "44625": 970069248.0, "44630": 950619584.0, "44635": 968773888.0, "44640": 982219840.0, "44645": 967958208.0, "44650": 966042496.0, "44655": 952883200.0, "44660": 959814784.0, "44665": 960893440.0, "44670": 960415424.0, "44675": 963442560.0, "44680": 945479424.0, "44685": 943926080.0, "44690": 960613184.0, "44695": 964094912.0, "44700": 980009152.0, "44705": 975901504.0, "44710": 962130176.0, "44715": 964126336.0, "44720": 970845120.0, "44725": 951705216.0, "44730": 959019520.0, "44735": 965890112.0, "44740": 970830592.0, "44745": 969075648.0, "44750": 964242624.0, "44755": 961692032.0, "44760": 941720832.0, "44765": 955487360.0, "44770": 975907520.0, "44775": 969568320.0, "44780": 976028800.0, "44785": 953580544.0, "44790": 952698752.0, "44795": 953993024.0, "44800": 981672384.0, "44805": 988485120.0, "44810": 965574080.0, "44815": 947679616.0, "44820": 974413120.0, "44825": 954403904.0, "44830": 978327680.0, "44835": 968423872.0, "44840": 926351808.0, "44845": 971128512.0, "44850": 974088448.0, "44855": 980126720.0, "44860": 961977536.0, "44865": 946724928.0, "44870": 959383616.0, "44875": 963893632.0, "44880": 975770496.0, "44885": 958667264.0, "44890": 978644992.0, "44895": 939724608.0, "44900": 973270912.0, "44905": 958585024.0, "44910": 971002112.0, "44915": 963966464.0, "44920": 947340032.0, "44925": 959591936.0, "44930": 964261440.0, "44935": 976900416.0, "44940": 966671744.0, "44945": 946405376.0, "44950": 961490304.0, "44955": 978854336.0, "44960": 961296000.0, "44965": 960822400.0, "44970": 971415104.0, "44975": 954696512.0, "44980": 979976320.0, "44985": 961182656.0, "44990": 967416704.0, "44995": 973310720.0, "45000": 940178560.0, "45005": 957050112.0, "45010": 970586816.0, "45015": 978225472.0, "45020": 967780928.0, "45025": 948128640.0, "45030": 971199488.0, "45035": 963738368.0, "45040": 972669312.0, "45045": 973376448.0, "45050": 957399296.0, "45055": 941658624.0, "45060": 962593728.0, "45065": 958084416.0, "45070": 994061888.0, "45075": 981943040.0, "45080": 948223232.0, "45085": 970043008.0, "45090": 980077120.0, "45095": 993656448.0, "45100": 969985024.0, "45105": 937997120.0, "45110": 951331200.0, "45115": 985260992.0, "45120": 958665664.0, "45125": 977502784.0, "45130": 959215232.0, "45135": 965115648.0, "45140": 985758720.0, "45145": 970121344.0, "45150": 968530304.0, "45155": 967787200.0, "45160": 957658752.0, "45165": 967854400.0, "45170": 978319680.0, "45175": 967717632.0, "45180": 956105024.0, "45185": 957964736.0, "45190": 973836480.0, "45195": 961938176.0, "45200": 975331136.0, "45205": 982348672.0, "45210": 938429056.0, "45215": 955315392.0, "45220": 992003328.0, "45225": 972342144.0, "45230": 967419840.0, "45235": 969737856.0, "45240": 945716288.0, "45245": 974943744.0, "45250": 977726016.0, "45255": 979119552.0, "45260": 967480064.0, "45265": 938954624.0, "45270": 983235008.0, "45275": 966500160.0, "45280": 978430848.0, "45285": 978794432.0, "45290": 945139584.0, "45295": 954329024.0, "45300": 974831744.0, "45305": 979658944.0, "45310": 968560896.0, "45315": 965605888.0, "45320": 943703360.0, "45325": 979308736.0, "45330": 983056704.0, "45335": 976942528.0, "45340": 959722048.0, "45345": 946682688.0, "45350": 967325568.0, "45355": 967007360.0, "45360": 969098624.0, "45365": 977173568.0, "45370": 946559168.0, "45375": 948139968.0, "45380": 966332736.0, "45385": 957951616.0, "45390": 963083008.0, "45395": 960513280.0, "45400": 945137408.0, "45405": 976513984.0, "45410": 962578816.0, "45415": 965056192.0, "45420": 962676864.0, "45425": 952752000.0, "45430": 972058752.0, "45435": 976489856.0, "45440": 972665088.0, "45445": 958983872.0, "45450": 947071040.0, "45455": 970434432.0, "45460": 976177984.0, "45465": 958309568.0, "45470": 977108608.0, "45475": 953891264.0, "45480": 939019776.0, "45485": 976502784.0, "45490": 967000704.0, "45495": 982325376.0, "45500": 980905856.0, "45505": 927365056.0, "45510": 972925696.0, "45515": 961582400.0, "45520": 978336960.0, "45525": 989044096.0, "45530": 941688832.0, "45535": 971255040.0, "45540": 960926272.0, "45545": 965642688.0, "45550": 976120832.0, "45555": 961386112.0, "45560": 963970944.0, "45565": 952642176.0, "45570": 980919616.0, "45575": 971305728.0, "45580": 951668352.0, "45585": 944095680.0, "45590": 989239488.0, "45595": 975040896.0, "45600": 968945152.0, "45605": 974668032.0, "45610": 959554304.0, "45615": 967934272.0, "45620": 966269632.0, "45625": 978008960.0, "45630": 955994496.0, "45635": 948278272.0, "45640": 949851264.0, "45645": 976720768.0, "45650": 963482624.0, "45655": 970972800.0, "45660": 962701632.0, "45665": 956954304.0, "45670": 973811648.0, "45675": 962348736.0, "45680": 972238720.0, "45685": 968972352.0, "45690": 951484672.0, "45695": 972745536.0, "45700": 968702336.0, "45705": 971772800.0, "45710": 961325696.0, "45715": 935692480.0, "45720": 949738240.0, "45725": 966528704.0, "45730": 966068480.0, "45735": 988337472.0, "45740": 945743936.0, "45745": 945068160.0, "45750": 969744896.0, "45755": 958495680.0, "45760": 965044992.0, "45765": 979358208.0, "45770": 940792704.0, "45775": 966055168.0, "45780": 968741248.0, "45785": 958093504.0, "45790": 968807808.0, "45795": 953420288.0, "45800": 940988480.0, "45805": 971373888.0, "45810": 977037440.0, "45815": 963874176.0, "45820": 941806336.0, "45825": 963597184.0, "45830": 959816704.0, "45835": 965048768.0, "45840": 972363072.0, "45845": 974833600.0, "45850": 941027776.0, "45855": 963228992.0, "45860": 972840640.0, "45865": 952659136.0, "45870": 968602112.0, "45875": 948630464.0, "45880": 974069824.0, "45885": 985702272.0, "45890": 972262080.0, "45895": 963782464.0, "45900": 943919424.0, "45905": 965005888.0, "45910": 975924352.0, "45915": 952673024.0, "45920": 956952832.0, "45925": 949499712.0, "45930": 937704000.0, "45935": 985925760.0, "45940": 958775616.0, "45945": 972546624.0, "45950": 976471936.0, "45955": 940159808.0, "45960": 975764992.0, "45965": 961649856.0, "45970": 978434688.0, "45975": 974593856.0, "45980": 917638656.0, "45985": 962645312.0, "45990": 958647040.0, "45995": 968405568.0, "46000": 971656320.0, "46005": 966293760.0, "46010": 950698496.0, "46015": 964520704.0, "46020": 978567360.0, "46025": 968371264.0, "46030": 969261056.0, "46035": 950974400.0, "46040": 952855488.0, "46045": 973167040.0, "46050": 962080704.0, "46055": 972257728.0, "46060": 953720704.0, "46065": 967812544.0, "46070": 951755968.0, "46075": 965999424.0, "46080": 960298880.0, "46085": 939060032.0, "46090": 969691264.0, "46095": 984466688.0, "46100": 970003904.0, "46105": 963369344.0, "46110": 941742016.0, "46115": 952629376.0, "46120": 977717632.0, "46125": 968467584.0, "46130": 979419840.0, "46135": 966556864.0, "46140": 951995200.0, "46145": 956252096.0, "46150": 963365632.0, "46155": 968093184.0, "46160": 963099968.0, "46165": 939804032.0, "46170": 971085376.0, "46175": 977258368.0, "46180": 966725376.0, "46185": 967343488.0, "46190": 949925952.0, "46195": 951049920.0, "46200": 953697088.0, "46205": 974512512.0, "46210": 964673472.0, "46215": 975773568.0, "46220": 952607040.0, "46225": 970219328.0, "46230": 961675392.0, "46235": 969193728.0, "46240": 975635520.0, "46245": 962270976.0, "46250": 984438976.0, "46255": 972077248.0, "46260": 973921856.0, "46265": 956797696.0, "46270": 945504640.0, "46275": 959902784.0, "46280": 960034880.0, "46285": 979829760.0, "46290": 974197888.0, "46295": 972413056.0, "46300": 932979712.0, "46305": 960956800.0, "46310": 972797248.0, "46315": 963138304.0, "46320": 946983360.0, "46325": 951479040.0, "46330": 975570880.0, "46335": 978764608.0, "46340": 971705536.0, "46345": 978367552.0, "46350": 939390784.0, "46355": 955077568.0, "46360": 976571200.0, "46365": 967600000.0, "46370": 969476224.0, "46375": 952154752.0, "46380": 939427648.0, "46385": 986703680.0, "46390": 970801536.0, "46395": 965643904.0, "46400": 956804736.0, "46405": 937468736.0, "46410": 978944640.0, "46415": 975736960.0, "46420": 972141504.0, "46425": 957904320.0, "46430": 944203776.0, "46435": 952806912.0, "46440": 950116672.0, "46445": 980884992.0, "46450": 969512448.0, "46455": 972878016.0, "46460": 946707840.0, "46465": 968289792.0, "46470": 988220992.0, "46475": 961154112.0, "46480": 974978304.0, "46485": 947380096.0, "46490": 964003520.0, "46495": 959229952.0, "46500": 966989184.0, "46505": 950190016.0, "46510": 954696832.0, "46515": 978284736.0, "46520": 963513216.0, "46525": 959412480.0, "46530": 973579968.0, "46535": 949531456.0, "46540": 951831808.0, "46545": 974971712.0, "46550": 965145536.0, "46555": 950268288.0, "46560": 960803776.0, "46565": 943644096.0, "46570": 979462528.0, "46575": 967521728.0, "46580": 976059136.0, "46585": 956224064.0, "46590": 945435136.0, "46595": 957332608.0, "46600": 973341376.0, "46605": 966484736.0, "46610": 971532544.0, "46615": 958363776.0, "46620": 948371648.0, "46625": 968648256.0, "46630": 964612096.0, "46635": 966939392.0, "46640": 956411008.0, "46645": 941093952.0, "46650": 962652864.0, "46655": 963009152.0, "46660": 957553344.0, "46665": 959140928.0, "46670": 942894528.0, "46675": 971304448.0, "46680": 964746560.0, "46685": 971005056.0, "46690": 974108032.0, "46695": 962762048.0, "46700": 971199616.0, "46705": 976358528.0, "46710": 965390464.0, "46715": 962337344.0, "46720": 967049472.0, "46725": 961098176.0, "46730": 955546688.0, "46735": 970832960.0, "46740": 967363136.0, "46745": 966244992.0, "46750": 948815616.0, "46755": 972215424.0, "46760": 967694656.0, "46765": 968725376.0, "46770": 984443520.0, "46775": 940815872.0, "46780": 938402176.0, "46785": 961613440.0, "46790": 955398016.0, "46795": 971330432.0, "46800": 942328064.0, "46805": 948777984.0, "46810": 972771392.0, "46815": 979929408.0, "46820": 966338240.0, "46825": 963443712.0, "46830": 944613632.0, "46835": 973372544.0, "46840": 978719232.0, "46845": 970303872.0, "46850": 955560512.0, "46855": 940235712.0, "46860": 963611200.0, "46865": 965895872.0, "46870": 976820672.0, "46875": 961622848.0, "46880": 942373184.0, "46885": 951768832.0, "46890": 982851136.0, "46895": 958541760.0, "46900": 968552896.0, "46905": 953206080.0, "46910": 949844480.0, "46915": 975083648.0, "46920": 959943488.0, "46925": 978992384.0, "46930": 983184640.0, "46935": 951570752.0, "46940": 958236096.0, "46945": 949325120.0, "46950": 964443968.0, "46955": 973567872.0, "46960": 948924736.0, "46965": 965281664.0, "46970": 980964160.0, "46975": 969876096.0, "46980": 958101056.0, "46985": 929657792.0, "46990": 932409792.0, "46995": 977884608.0, "47000": 973598976.0, "47005": 969305856.0, "47010": 971846528.0, "47015": 942093120.0, "47020": 979260480.0, "47025": 975183168.0, "47030": 972600832.0, "47035": 960431424.0, "47040": 945988160.0, "47045": 962230912.0, "47050": 972467840.0, "47055": 976712448.0, "47060": 977148800.0, "47065": 970741248.0, "47070": 948189312.0, "47075": 974642624.0, "47080": 982687552.0, "47085": 962272384.0, "47090": 987276928.0, "47095": 934984512.0, "47100": 956756672.0, "47105": 968857344.0, "47110": 982944704.0, "47115": 977582912.0, "47120": 939287680.0, "47125": 982632064.0, "47130": 983786688.0, "47135": 969498688.0, "47140": 973480256.0, "47145": 964316992.0, "47150": 956890240.0, "47155": 983048960.0, "47160": 968611648.0, "47165": 975458112.0, "47170": 970239296.0, "47175": 964617344.0, "47180": 979286144.0, "47185": 962118976.0, "47190": 973794432.0, "47195": 977662848.0, "47200": 963089024.0, "47205": 970957248.0, "47210": 970963904.0, "47215": 976337408.0, "47220": 976539200.0, "47225": 948250432.0, "47230": 950062208.0, "47235": 975404416.0, "47240": 977474432.0, "47245": 978135488.0, "47250": 953421056.0, "47255": 934821568.0, "47260": 985654208.0, "47265": 982711680.0, "47270": 965760832.0, "47275": 963389696.0, "47280": 934925440.0, "47285": 964189504.0, "47290": 981669184.0, "47295": 971920768.0, "47300": 987105472.0, "47305": 949627392.0, "47310": 965405440.0, "47315": 987630720.0, "47320": 974422784.0, "47325": 972282624.0, "47330": 963281216.0, "47335": 940601664.0, "47340": 967141504.0, "47345": 982634432.0, "47350": 976798080.0, "47355": 973729984.0, "47360": 951620736.0, "47365": 974986816.0, "47370": 957446208.0, "47375": 953398080.0, "47380": 983428288.0, "47385": 960203072.0, "47390": 957082496.0, "47395": 966352128.0, "47400": 967695424.0, "47405": 975360320.0, "47410": 932281856.0, "47415": 949730112.0, "47420": 979771904.0, "47425": 969886528.0, "47430": 965158912.0, "47435": 970361280.0, "47440": 948600320.0, "47445": 972020480.0, "47450": 967236672.0, "47455": 968353344.0, "47460": 975728256.0, "47465": 950349120.0, "47470": 981096000.0, "47475": 971506048.0, "47480": 972252864.0, "47485": 976367936.0, "47490": 958433280.0, "47495": 951155392.0, "47500": 974863808.0, "47505": 979992000.0, "47510": 986195328.0, "47515": 961380672.0, "47520": 942306240.0, "47525": 975569472.0, "47530": 976567104.0, "47535": 974594624.0, "47540": 968586816.0, "47545": 943035136.0, "47550": 966848256.0, "47555": 970712896.0, "47560": 979998528.0, "47565": 975920320.0, "47570": 945908864.0, "47575": 960817216.0, "47580": 969692672.0, "47585": 974691648.0, "47590": 961404736.0, "47595": 966626176.0, "47600": 959608128.0, "47605": 968850432.0, "47610": 979302848.0, "47615": 963705728.0, "47620": 973594880.0, "47625": 936807808.0, "47630": 964152000.0, "47635": 968468416.0, "47640": 964656448.0, "47645": 958569856.0, "47650": 951909312.0, "47655": 976127552.0, "47660": 980906624.0, "47665": 970626944.0, "47670": 974086976.0, "47675": 955823232.0, "47680": 968531776.0, "47685": 985316736.0, "47690": 962611136.0, "47695": 971620608.0, "47700": 979395072.0, "47705": 958236224.0, "47710": 973105856.0, "47715": 983886080.0, "47720": 983561472.0, "47725": 973260800.0, "47730": 939694784.0, "47735": 967041792.0, "47740": 969603776.0, "47745": 983160576.0, "47750": 984168064.0, "47755": 938041280.0, "47760": 954423616.0, "47765": 963613184.0, "47770": 965528000.0, "47775": 957242112.0, "47780": 975365632.0, "47785": 959862912.0, "47790": 967332928.0, "47795": 974402816.0, "47800": 954415616.0, "47805": 981835904.0, "47810": 943159872.0, "47815": 969741824.0, "47820": 962886016.0, "47825": 974156864.0, "47830": 976333184.0, "47835": 945350080.0, "47840": 961978240.0, "47845": 975225408.0, "47850": 969349440.0, "47855": 979592384.0, "47860": 962737920.0, "47865": 956129088.0, "47870": 961866688.0, "47875": 961420672.0, "47880": 974911360.0, "47885": 964334912.0, "47890": 940173696.0, "47895": 970234240.0, "47900": 987519808.0, "47905": 963541824.0, "47910": 962074752.0, "47915": 942832832.0, "47920": 963714944.0, "47925": 979851904.0, "47930": 971435712.0, "47935": 955234688.0, "47940": 963752448.0, "47945": 935989248.0, "47950": 972160064.0, "47955": 975901120.0, "47960": 980816128.0, "47965": 958812224.0, "47970": 945620736.0, "47975": 967175168.0, "47980": 956927616.0, "47985": 992747904.0, "47990": 978240960.0, "47995": 952409664.0, "48000": 961535872.0, "48005": 967020480.0, "48010": 974580288.0, "48015": 963016128.0, "48020": 939934336.0, "48025": 961725248.0, "48030": 959923904.0, "48035": 978875008.0, "48040": 978843520.0, "48045": 961994432.0, "48050": 954600384.0, "48055": 981766272.0, "48060": 970508864.0, "48065": 980123136.0, "48070": 958100928.0, "48075": 944644608.0, "48080": 982468352.0, "48085": 973997184.0, "48090": 965888512.0, "48095": 979770816.0, "48100": 944028288.0, "48105": 975526784.0, "48110": 966152896.0, "48115": 962137216.0, "48120": 969638784.0, "48125": 940504384.0, "48130": 948926400.0, "48135": 960902016.0, "48140": 972489216.0, "48145": 970616896.0, "48150": 956086144.0, "48155": 938556608.0, "48160": 961398720.0, "48165": 966179456.0, "48170": 980673280.0, "48175": 975372288.0, "48180": 936068608.0, "48185": 949605760.0, "48190": 982066112.0, "48195": 968653568.0, "48200": 970083328.0, "48205": 965826688.0, "48210": 956824320.0, "48215": 952770496.0, "48220": 971553216.0, "48225": 980635712.0, "48230": 980057984.0, "48235": 935486144.0, "48240": 963865664.0, "48245": 980943424.0, "48250": 957735232.0, "48255": 976772864.0, "48260": 935665792.0, "48265": 975908480.0, "48270": 962513856.0, "48275": 971904256.0, "48280": 959588352.0, "48285": 950008576.0, "48290": 957267392.0, "48295": 974023616.0, "48300": 975378048.0, "48305": 968879744.0, "48310": 948642624.0, "48315": 952259008.0, "48320": 972690048.0, "48325": 967982336.0, "48330": 972749568.0, "48335": 959059072.0, "48340": 934134528.0, "48345": 953047616.0, "48350": 970324160.0, "48355": 973083008.0, "48360": 959046912.0, "48365": 930099904.0, "48370": 957958976.0, "48375": 973146112.0, "48380": 974831232.0, "48385": 960024192.0, "48390": 936193216.0, "48395": 976739648.0, "48400": 972075008.0, "48405": 973184896.0, "48410": 966527552.0, "48415": 968425920.0, "48420": 939698944.0, "48425": 978480256.0, "48430": 964900096.0, "48435": 974630208.0, "48440": 970251136.0, "48445": 958121792.0, "48450": 961093568.0, "48455": 959930304.0, "48460": 967799296.0, "48465": 970791744.0, "48470": 951917248.0, "48475": 936463552.0, "48480": 958813120.0, "48485": 974033536.0, "48490": 958769728.0, "48495": 952567744.0, "48500": 935682496.0, "48505": 969531648.0, "48510": 957317120.0, "48515": 974736128.0, "48520": 960690048.0, "48525": 934066240.0, "48530": 961527616.0, "48535": 976141248.0, "48540": 976158016.0, "48545": 969917440.0, "48550": 949858944.0, "48555": 951862592.0, "48560": 968120768.0, "48565": 972890560.0, "48570": 975201024.0, "48575": 961555264.0, "48580": 932718592.0, "48585": 979192576.0, "48590": 983343552.0, "48595": 966816000.0, "48600": 957182848.0, "48605": 938637376.0, "48610": 957935424.0, "48615": 971337920.0, "48620": 975287936.0, "48625": 981189568.0, "48630": 940339776.0, "48635": 957259712.0, "48640": 978602176.0, "48645": 967468608.0, "48650": 970945216.0, "48655": 963492544.0, "48660": 945608832.0, "48665": 968751552.0, "48670": 972680960.0, "48675": 984623104.0, "48680": 960438144.0, "48685": 950259456.0, "48690": 965096896.0, "48695": 972024768.0, "48700": 966932608.0, "48705": 967346816.0, "48710": 947164672.0, "48715": 960755392.0, "48720": 965601280.0, "48725": 956776512.0, "48730": 972466688.0, "48735": 962672960.0, "48740": 959353088.0, "48745": 972088064.0, "48750": 966607360.0, "48755": 981356800.0, "48760": 968455040.0, "48765": 949013440.0, "48770": 957033152.0, "48775": 988565760.0, "48780": 965911296.0, "48785": 965572416.0, "48790": 942219328.0, "48795": 952303936.0, "48800": 978610240.0, "48805": 982499200.0, "48810": 957491328.0, "48815": 955417984.0, "48820": 928638400.0, "48825": 977849792.0, "48830": 971486848.0, "48835": 969913600.0, "48840": 969998336.0, "48845": 954642752.0, "48850": 964323008.0, "48855": 971248256.0, "48860": 974591296.0, "48865": 971596544.0, "48870": 949894848.0, "48875": 972000128.0, "48880": 974561728.0, "48885": 961768320.0, "48890": 979940224.0, "48895": 956638336.0, "48900": 947838912.0, "48905": 962591552.0, "48910": 964064768.0, "48915": 960627968.0, "48920": 961446784.0, "48925": 938803712.0, "48930": 965442304.0, "48935": 966079296.0, "48940": 951074624.0, "48945": 986901632.0, "48950": 940087680.0, "48955": 973370752.0, "48960": 970072640.0, "48965": 963002048.0, "48970": 969389056.0, "48975": 929952128.0, "48980": 966895232.0, "48985": 968776960.0, "48990": 975869440.0, "48995": 974122112.0, "49000": 961563008.0, "49005": 942365632.0, "49010": 973467200.0, "49015": 971733312.0, "49020": 960036352.0, "49025": 945074560.0, "49030": 935221888.0, "49035": 978917568.0, "49040": 972775104.0, "49045": 963178688.0, "49050": 961131456.0, "49055": 941751616.0, "49060": 955785472.0, "49065": 965909888.0, "49070": 978486528.0, "49075": 975264064.0, "49080": 939000192.0, "49085": 948974912.0, "49090": 962879232.0, "49095": 983339840.0, "49100": 967863680.0, "49105": 970060352.0, "49110": 944559808.0, "49115": 978897536.0, "49120": 977837056.0, "49125": 981889600.0, "49130": 946885824.0, "49135": 954121728.0, "49140": 952740672.0, "49145": 970293888.0, "49150": 951296640.0, "49155": 967487872.0, "49160": 947777792.0, "49165": 973764736.0, "49170": 978800576.0, "49175": 971716352.0, "49180": 979758592.0, "49185": 975067200.0, "49190": 961812032.0, "49195": 988884416.0, "49200": 969688576.0, "49205": 963040064.0, "49210": 978203776.0, "49215": 940496704.0, "49220": 979072832.0, "49225": 964114368.0, "49230": 976423424.0, "49235": 975367552.0, "49240": 944938688.0, "49245": 961621888.0, "49250": 974594432.0, "49255": 993729664.0, "49260": 972345024.0, "49265": 950474240.0, "49270": 940760896.0, "49275": 962204032.0, "49280": 987996544.0, "49285": 984063104.0, "49290": 961462400.0, "49295": 939511680.0, "49300": 975748480.0, "49305": 980054464.0, "49310": 961113664.0, "49315": 968186688.0, "49320": 942870336.0, "49325": 965896320.0, "49330": 963047424.0, "49335": 959029056.0, "49340": 977546560.0, "49345": 964686912.0, "49350": 955687040.0, "49355": 971227456.0, "49360": 966099072.0, "49365": 958729024.0, "49370": 951278400.0, "49375": 930070336.0, "49380": 968904192.0, "49385": 958722624.0, "49390": 949012608.0, "49395": 975281344.0, "49400": 930253952.0, "49405": 959109248.0, "49410": 966538368.0, "49415": 968962944.0, "49420": 968919936.0, "49425": 947093504.0, "49430": 961554048.0, "49435": 970148224.0, "49440": 964537536.0, "49445": 967847360.0, "49450": 963390528.0, "49455": 937962560.0, "49460": 975348928.0, "49465": 969439296.0, "49470": 960375808.0, "49475": 973548608.0, "49480": 961503872.0, "49485": 960724928.0, "49490": 976392640.0, "49495": 978377152.0, "49500": 953698048.0, "49505": 953053568.0, "49510": 972246528.0, "49515": 960553984.0, "49520": 968131968.0, "49525": 973891968.0, "49530": 943552384.0, "49535": 952890496.0, "49540": 962481280.0, "49545": 982219520.0, "49550": 982872832.0, "49555": 974696704.0, "49560": 938286848.0, "49565": 965829056.0, "49570": 965288896.0, "49575": 973979008.0, "49580": 979513984.0, "49585": 962260736.0, "49590": 977309312.0, "49595": 973364672.0, "49600": 978599232.0, "49605": 961086592.0, "49610": 945939968.0, "49615": 956056000.0, "49620": 958397632.0, "49625": 947549248.0, "49630": 969065344.0, "49635": 963433600.0, "49640": 958130624.0, "49645": 989367616.0, "49650": 970571904.0, "49655": 952198528.0, "49660": 959558144.0, "49665": 948788992.0, "49670": 970466560.0, "49675": 979191104.0, "49680": 968342336.0, "49685": 968683968.0, "49690": 959034496.0, "49695": 941549376.0, "49700": 968014976.0, "49705": 981059520.0, "49710": 970778176.0, "49715": 968740288.0, "49720": 939017472.0, "49725": 955492928.0, "49730": 978401344.0, "49735": 983726912.0, "49740": 959341312.0, "49745": 927294784.0, "49750": 977769152.0, "49755": 960461760.0, "49760": 987093248.0, "49765": 963747776.0, "49770": 964127232.0, "49775": 951044288.0, "49780": 979882432.0, "49785": 967554688.0, "49790": 969869312.0, "49795": 948668352.0, "49800": 962092224.0, "49805": 978278784.0, "49810": 975472512.0, "49815": 972062656.0, "49820": 958717632.0, "49825": 966898688.0, "49830": 974024128.0, "49835": 966297856.0, "49840": 965423488.0, "49845": 966714496.0, "49850": 931339392.0, "49855": 972234944.0, "49860": 962377408.0, "49865": 958435328.0, "49870": 980077184.0, "49875": 959877696.0, "49880": 967725824.0, "49885": 968374016.0, "49890": 967905088.0, "49895": 975843712.0, "49900": 965679488.0, "49905": 952660992.0, "49910": 994654976.0, "49915": 977645312.0, "49920": 963317696.0, "49925": 979271424.0, "49930": 959221632.0, "49935": 955072256.0, "49940": 981647104.0, "49945": 977524480.0, "49950": 969662976.0, "49955": 946193536.0, "49960": 955734272.0, "49965": 963831552.0, "49970": 973254272.0, "49975": 965051904.0, "49980": 976060800.0, "49985": 960765120.0, "49990": 973557312.0, "49995": 967746432.0, "50000": 978123904.0, "50005": 979101952.0, "50010": 943472768.0, "50015": 958930368.0, "50020": 970704512.0, "50025": 981108672.0, "50030": 978309632.0, "50035": 955858240.0, "50040": 952451712.0, "50045": 967343680.0, "50050": 965956736.0, "50055": 943454080.0, "50060": 970205760.0, "50065": 956756800.0, "50070": 980966912.0, "50075": 959692928.0, "50080": 964317248.0, "50085": 966192256.0, "50090": 950020096.0, "50095": 983923008.0, "50100": 969305344.0, "50105": 957530048.0, "50110": 973992000.0, "50115": 954504448.0, "50120": 968175744.0, "50125": 959913920.0, "50130": 979106944.0, "50135": 974531968.0, "50140": 942635904.0, "50145": 975224768.0, "50150": 988810496.0, "50155": 983247936.0, "50160": 983301440.0, "50165": 969839552.0, "50170": 939462912.0, "50175": 948440768.0, "50180": 996434496.0, "50185": 969703680.0, "50190": 976227712.0, "50195": 952480384.0, "50200": 971387712.0, "50205": 958070592.0, "50210": 974019584.0, "50215": 972015616.0, "50220": 953699776.0, "50225": 949533824.0, "50230": 973444544.0, "50235": 963875200.0, "50240": 972609280.0, "50245": 964121536.0, "50250": 959051264.0, "50255": 970271424.0, "50260": 985799616.0, "50265": 976105408.0, "50270": 980954368.0, "50275": 949725120.0, "50280": 970993856.0, "50285": 967621376.0, "50290": 957043648.0, "50295": 964758080.0, "50300": 942758976.0, "50305": 963666816.0, "50310": 977565376.0, "50315": 965877952.0, "50320": 961812992.0, "50325": 954853248.0, "50330": 954322112.0, "50335": 973738368.0, "50340": 971467648.0, "50345": 961462464.0, "50350": 970545088.0, "50355": 945657920.0, "50360": 979763776.0, "50365": 966208576.0, "50370": 974519872.0, "50375": 974230272.0, "50380": 947696576.0, "50385": 971769152.0, "50390": 979113344.0, "50395": 971375616.0, "50400": 977713664.0, "50405": 953494272.0, "50410": 941574336.0, "50415": 967577216.0, "50420": 972610560.0, "50425": 987358208.0, "50430": 964299136.0, "50435": 951639232.0, "50440": 970904256.0, "50445": 971214976.0, "50450": 963151104.0, "50455": 975170048.0, "50460": 948836096.0, "50465": 977620608.0, "50470": 980072768.0, "50475": 968605440.0, "50480": 968729472.0, "50485": 955712704.0, "50490": 954896448.0, "50495": 962308928.0, "50500": 963186368.0, "50505": 966873600.0, "50510": 967535936.0, "50515": 941963584.0, "50520": 971380672.0, "50525": 955105536.0, "50530": 954779712.0, "50535": 977445760.0, "50540": 938304448.0, "50545": 972338240.0, "50550": 987782528.0, "50555": 971246464.0, "50560": 973592768.0, "50565": 946462848.0, "50570": 960743040.0, "50575": 970355392.0, "50580": 965944640.0, "50585": 969039616.0, "50590": 957910528.0, "50595": 949367872.0, "50600": 972154304.0, "50605": 965195200.0, "50610": 977435072.0, "50615": 963267072.0, "50620": 933819328.0, "50625": 976920320.0, "50630": 983275904.0, "50635": 974263168.0, "50640": 959110400.0, "50645": 943477504.0, "50650": 957839424.0, "50655": 963345344.0, "50660": 962910400.0, "50665": 976897280.0, "50670": 948512320.0, "50675": 949153920.0, "50680": 967622464.0, "50685": 968943424.0, "50690": 971799104.0, "50695": 971153216.0, "50700": 953777344.0, "50705": 973855232.0, "50710": 965324416.0, "50715": 978521984.0, "50720": 961634688.0, "50725": 944656000.0, "50730": 965183616.0, "50735": 962646528.0, "50740": 958225920.0, "50745": 966621632.0, "50750": 962378240.0, "50755": 952916608.0, "50760": 968795200.0, "50765": 956432064.0, "50770": 960846400.0, "50775": 955088256.0, "50780": 935136256.0, "50785": 986460288.0, "50790": 973518976.0, "50795": 972652032.0, "50800": 956786112.0, "50805": 939472448.0, "50810": 956014848.0, "50815": 985098176.0, "50820": 974397952.0, "50825": 958923584.0, "50830": 955926080.0, "50835": 951475072.0, "50840": 970949952.0, "50845": 976990720.0, "50850": 974825344.0, "50855": 949853632.0, "50860": 947931904.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 12697244672.0, "5": 12697245696.0, "10": 12697245696.0, "15": 12697245696.0, "20": 12697245696.0, "25": 12697245696.0, "30": 12697245696.0, "35": 12697245696.0, "40": 12697245696.0, "45": 12697245696.0, "50": 12697245696.0, "55": 12697245696.0, "60": 12697245696.0, "65": 12697245696.0, "70": 12697245696.0, "75": 12697245696.0, "80": 12697245696.0, "85": 12697245696.0, "90": 12697245696.0, "95": 12697245696.0, "100": 12697245696.0, "105": 12697245696.0, "110": 12697245696.0, "115": 12697245696.0, "120": 12697245696.0, "125": 12697245696.0, "130": 12697245696.0, "135": 12697245696.0, "140": 12697245696.0, "145": 12697245696.0, "150": 12697245696.0, "155": 12697245696.0, "160": 12697245696.0, "165": 12697245696.0, "170": 12697245696.0, "175": 12697245696.0, "180": 12697245696.0, "185": 12697245696.0, "190": 12697245696.0, "195": 12697245696.0, "200": 12697245696.0, "205": 12697245696.0, "210": 12697245696.0, "215": 12697245696.0, "220": 12697245696.0, "225": 12697245696.0, "230": 12697245696.0, "235": 12697245696.0, "240": 12697245696.0, "245": 12697245696.0, "250": 12697245696.0, "255": 12697245696.0, "260": 12697245696.0, "265": 12697245696.0, "270": 12697245696.0, "275": 12697245696.0, "280": 12697245696.0, "285": 12697245696.0, "290": 12697245696.0, "295": 12697245696.0, "300": 12697245696.0, "305": 12697245696.0, "310": 12697245696.0, "315": 12697245696.0, "320": 12697245696.0, "325": 12697245696.0, "330": 12697245696.0, "335": 12697245696.0, "340": 12697245696.0, "345": 12697245696.0, "350": 12697245696.0, "355": 12697245696.0, "360": 12697245696.0, "365": 12697245696.0, "370": 12697245696.0, "375": 12697245696.0, "380": 12697245696.0, "385": 12697245696.0, "390": 12697245696.0, "395": 12697245696.0, "400": 12697245696.0, "405": 12697245696.0, "410": 12697245696.0, "415": 12697245696.0, "420": 12697245696.0, "425": 12697245696.0, "430": 12697245696.0, "435": 12697245696.0, "440": 12697245696.0, "445": 12697245696.0, "450": 12697245696.0, "455": 12697245696.0, "460": 12697245696.0, "465": 12697245696.0, "470": 12697245696.0, "475": 12697245696.0, "480": 12697245696.0, "485": 12697245696.0, "490": 12697245696.0, "495": 12697245696.0, "500": 12697245696.0, "505": 12697245696.0, "510": 12697245696.0, "515": 12697245696.0, "520": 12697245696.0, "525": 12697245696.0, "530": 12697245696.0, "535": 12697245696.0, "540": 12697245696.0, "545": 12697245696.0, "550": 12697245696.0, "555": 12697245696.0, "560": 12697245696.0, "565": 12697245696.0, "570": 12697245696.0, "575": 12697245696.0, "580": 12697245696.0, "585": 12697245696.0, "590": 12697245696.0, "595": 12697245696.0, "600": 12697245696.0, "605": 12697245696.0, "610": 12697245696.0, "615": 12697245696.0, "620": 12697245696.0, "625": 12697245696.0, "630": 12697245696.0, "635": 12697245696.0, "640": 12697245696.0, "645": 12697245696.0, "650": 12697245696.0, "655": 12697245696.0, "660": 12697245696.0, "665": 12697245696.0, "670": 12697245696.0, "675": 12697245696.0, "680": 12697245696.0, "685": 12697245696.0, "690": 12697245696.0, "695": 12697245696.0, "700": 12697245696.0, "705": 12697245696.0, "710": 12697245696.0, "715": 12697245696.0, "720": 12697245696.0, "725": 12697245696.0, "730": 12697245696.0, "735": 12697245696.0, "740": 12697245696.0, "745": 12697245696.0, "750": 12697245696.0, "755": 12697245696.0, "760": 12697245696.0, "765": 12697245696.0, "770": 12697245696.0, "775": 12697245696.0, "780": 12697245696.0, "785": 12697245696.0, "790": 12697245696.0, "795": 12697245696.0, "800": 12697245696.0, "805": 12697245696.0, "810": 12697245696.0, "815": 12697245696.0, "820": 12697245696.0, "825": 12697245696.0, "830": 12697245696.0, "835": 12697245696.0, "840": 12697245696.0, "845": 12697245696.0, "850": 12697245696.0, "855": 12697245696.0, "860": 12697245696.0, "865": 12697245696.0, "870": 12697245696.0, "875": 12697245696.0, "880": 12697245696.0, "885": 12697245696.0, "890": 12697245696.0, "895": 12697245696.0, "900": 12697245696.0, "905": 12697245696.0, "910": 12697245696.0, "915": 12697245696.0, "920": 12697245696.0, "925": 12697245696.0, "930": 12697245696.0, "935": 12697245696.0, "940": 12697245696.0, "945": 12697245696.0, "950": 12697245696.0, "955": 12697245696.0, "960": 12697245696.0, "965": 12697245696.0, "970": 12697245696.0, "975": 12697245696.0, "980": 12697245696.0, "985": 12697245696.0, "990": 12697245696.0, "995": 12697245696.0, "1000": 12697245696.0, "1005": 12697245696.0, "1010": 12697245696.0, "1015": 12697245696.0, "1020": 12697245696.0, "1025": 12697245696.0, "1030": 12697245696.0, "1035": 12697245696.0, "1040": 12697245696.0, "1045": 12697245696.0, "1050": 12697245696.0, "1055": 12697245696.0, "1060": 12697245696.0, "1065": 12697245696.0, "1070": 12697245696.0, "1075": 12697245696.0, "1080": 12697245696.0, "1085": 12697245696.0, "1090": 12697245696.0, "1095": 12697245696.0, "1100": 12697245696.0, "1105": 12697245696.0, "1110": 12697245696.0, "1115": 12697245696.0, "1120": 12697245696.0, "1125": 12697245696.0, "1130": 12697245696.0, "1135": 12697245696.0, "1140": 12697245696.0, "1145": 12697245696.0, "1150": 12697245696.0, "1155": 12697245696.0, "1160": 12697245696.0, "1165": 12697245696.0, "1170": 12697245696.0, "1175": 12697245696.0, "1180": 12697245696.0, "1185": 12697245696.0, "1190": 12697245696.0, "1195": 12697245696.0, "1200": 12697245696.0, "1205": 12697245696.0, "1210": 12697245696.0, "1215": 12697245696.0, "1220": 12697245696.0, "1225": 12697245696.0, "1230": 12697245696.0, "1235": 12697245696.0, "1240": 12697245696.0, "1245": 12697245696.0, "1250": 12697245696.0, "1255": 12697245696.0, "1260": 12697245696.0, "1265": 12697245696.0, "1270": 12697245696.0, "1275": 12697245696.0, "1280": 12697245696.0, "1285": 12697245696.0, "1290": 12697245696.0, "1295": 12697245696.0, "1300": 12697245696.0, "1305": 12697245696.0, "1310": 12697245696.0, "1315": 12697245696.0, "1320": 12697245696.0, "1325": 12697245696.0, "1330": 12697245696.0, "1335": 12697245696.0, "1340": 12697245696.0, "1345": 12697245696.0, "1350": 12697245696.0, "1355": 12697245696.0, "1360": 12697245696.0, "1365": 12697245696.0, "1370": 12697245696.0, "1375": 12697245696.0, "1380": 12697245696.0, "1385": 12697245696.0, "1390": 12697245696.0, "1395": 12697245696.0, "1400": 12697245696.0, "1405": 12697245696.0, "1410": 12697245696.0, "1415": 12697245696.0, "1420": 12697245696.0, "1425": 12697245696.0, "1430": 12697245696.0, "1435": 12697245696.0, "1440": 12697245696.0, "1445": 12697245696.0, "1450": 12697245696.0, "1455": 12697245696.0, "1460": 12697245696.0, "1465": 12697245696.0, "1470": 12697245696.0, "1475": 12697245696.0, "1480": 12697245696.0, "1485": 12697245696.0, "1490": 12697245696.0, "1495": 12697245696.0, "1500": 12697245696.0, "1505": 12697245696.0, "1510": 12697245696.0, "1515": 12697245696.0, "1520": 12697245696.0, "1525": 12697245696.0, "1530": 12697245696.0, "1535": 12697245696.0, "1540": 12697245696.0, "1545": 12697245696.0, "1550": 12697245696.0, "1555": 12697245696.0, "1560": 12697245696.0, "1565": 12697245696.0, "1570": 12697245696.0, "1575": 12697245696.0, "1580": 12697245696.0, "1585": 12697245696.0, "1590": 12697245696.0, "1595": 12697245696.0, "1600": 12697245696.0, "1605": 12697245696.0, "1610": 12697245696.0, "1615": 12697245696.0, "1620": 12697245696.0, "1625": 12697245696.0, "1630": 12697245696.0, "1635": 12697245696.0, "1640": 12697245696.0, "1645": 12697245696.0, "1650": 12697245696.0, "1655": 12697245696.0, "1660": 12697245696.0, "1665": 12697245696.0, "1670": 12697245696.0, "1675": 12697245696.0, "1680": 12697245696.0, "1685": 12697245696.0, "1690": 12697245696.0, "1695": 12697245696.0, "1700": 12697245696.0, "1705": 12697245696.0, "1710": 12697245696.0, "1715": 12697245696.0, "1720": 12697245696.0, "1725": 12697245696.0, "1730": 12697245696.0, "1735": 12697245696.0, "1740": 12697245696.0, "1745": 12697245696.0, "1750": 12697245696.0, "1755": 12697245696.0, "1760": 12697245696.0, "1765": 12697245696.0, "1770": 12697245696.0, "1775": 12697245696.0, "1780": 12697245696.0, "1785": 12697245696.0, "1790": 12697245696.0, "1795": 12697245696.0, "1800": 12697245696.0, "1805": 12697245696.0, "1810": 12697245696.0, "1815": 12697245696.0, "1820": 12697245696.0, "1825": 12697245696.0, "1830": 12697245696.0, "1835": 12697245696.0, "1840": 12697245696.0, "1845": 12697245696.0, "1850": 12697245696.0, "1855": 12697245696.0, "1860": 12697245696.0, "1865": 12697245696.0, "1870": 12697245696.0, "1875": 12697245696.0, "1880": 12697245696.0, "1885": 12697245696.0, "1890": 12697245696.0, "1895": 12697245696.0, "1900": 12697245696.0, "1905": 12697245696.0, "1910": 12697245696.0, "1915": 12697245696.0, "1920": 12697245696.0, "1925": 12697245696.0, "1930": 12697245696.0, "1935": 12697245696.0, "1940": 12697245696.0, "1945": 12697245696.0, "1950": 12697245696.0, "1955": 12697245696.0, "1960": 12697245696.0, "1965": 12697245696.0, "1970": 12697245696.0, "1975": 12697245696.0, "1980": 12697245696.0, "1985": 12697245696.0, "1990": 12697245696.0, "1995": 12697245696.0, "2000": 12697245696.0, "2005": 12697245696.0, "2010": 12697245696.0, "2015": 12697245696.0, "2020": 12697245696.0, "2025": 12697245696.0, "2030": 12697245696.0, "2035": 12697245696.0, "2040": 12697245696.0, "2045": 12697245696.0, "2050": 12697245696.0, "2055": 12697245696.0, "2060": 12697245696.0, "2065": 12697245696.0, "2070": 12697245696.0, "2075": 12697245696.0, "2080": 12697245696.0, "2085": 12697245696.0, "2090": 12697245696.0, "2095": 12697245696.0, "2100": 12697245696.0, "2105": 12697245696.0, "2110": 12697245696.0, "2115": 12697245696.0, "2120": 12697245696.0, "2125": 12697245696.0, "2130": 12697245696.0, "2135": 12697245696.0, "2140": 12697245696.0, "2145": 12697245696.0, "2150": 12697245696.0, "2155": 12697245696.0, "2160": 12697245696.0, "2165": 12697245696.0, "2170": 12697245696.0, "2175": 12697245696.0, "2180": 12697245696.0, "2185": 12697245696.0, "2190": 12697245696.0, "2195": 12697245696.0, "2200": 12697245696.0, "2205": 12697245696.0, "2210": 12697245696.0, "2215": 12697245696.0, "2220": 12697245696.0, "2225": 12697245696.0, "2230": 12697245696.0, "2235": 12697245696.0, "2240": 12697245696.0, "2245": 12697245696.0, "2250": 12697245696.0, "2255": 12697245696.0, "2260": 12697245696.0, "2265": 12697245696.0, "2270": 12697245696.0, "2275": 12697245696.0, "2280": 12697245696.0, "2285": 12697245696.0, "2290": 12697245696.0, "2295": 12697245696.0, "2300": 12697245696.0, "2305": 12697245696.0, "2310": 12697245696.0, "2315": 12697245696.0, "2320": 12697245696.0, "2325": 12697245696.0, "2330": 12697245696.0, "2335": 12697245696.0, "2340": 12697245696.0, "2345": 12697245696.0, "2350": 12697245696.0, "2355": 12697245696.0, "2360": 12697245696.0, "2365": 12697245696.0, "2370": 12697245696.0, "2375": 12697245696.0, "2380": 12697245696.0, "2385": 12697245696.0, "2390": 12697245696.0, "2395": 12697245696.0, "2400": 12697245696.0, "2405": 12697245696.0, "2410": 12697245696.0, "2415": 12697245696.0, "2420": 12697245696.0, "2425": 12697245696.0, "2430": 12697245696.0, "2435": 12697245696.0, "2440": 12697245696.0, "2445": 12697245696.0, "2450": 12697245696.0, "2455": 12697245696.0, "2460": 12697245696.0, "2465": 12697245696.0, "2470": 12697245696.0, "2475": 12697245696.0, "2480": 12697245696.0, "2485": 12697245696.0, "2490": 12697245696.0, "2495": 12697245696.0, "2500": 12697245696.0, "2505": 12697245696.0, "2510": 12697245696.0, "2515": 12697245696.0, "2520": 12697245696.0, "2525": 12697245696.0, "2530": 12697245696.0, "2535": 12697245696.0, "2540": 12697245696.0, "2545": 12697245696.0, "2550": 12697245696.0, "2555": 12697245696.0, "2560": 12697245696.0, "2565": 12697245696.0, "2570": 12697245696.0, "2575": 12697245696.0, "2580": 12697245696.0, "2585": 12697245696.0, "2590": 12697245696.0, "2595": 12697245696.0, "2600": 12697245696.0, "2605": 12697245696.0, "2610": 12697245696.0, "2615": 12697245696.0, "2620": 12697245696.0, "2625": 12697245696.0, "2630": 12697245696.0, "2635": 12697245696.0, "2640": 12697245696.0, "2645": 12697245696.0, "2650": 12697245696.0, "2655": 12697245696.0, "2660": 12697245696.0, "2665": 12697245696.0, "2670": 12697245696.0, "2675": 12697245696.0, "2680": 12697245696.0, "2685": 12697245696.0, "2690": 12697245696.0, "2695": 12697245696.0, "2700": 12697245696.0, "2705": 12697245696.0, "2710": 12697245696.0, "2715": 12697245696.0, "2720": 12697245696.0, "2725": 12697245696.0, "2730": 12697245696.0, "2735": 12697245696.0, "2740": 12697245696.0, "2745": 12697245696.0, "2750": 12697245696.0, "2755": 12697245696.0, "2760": 12697245696.0, "2765": 12697245696.0, "2770": 12697245696.0, "2775": 12697245696.0, "2780": 12697245696.0, "2785": 12697245696.0, "2790": 12697245696.0, "2795": 12697245696.0, "2800": 12697245696.0, "2805": 12697245696.0, "2810": 12697245696.0, "2815": 12697245696.0, "2820": 12697245696.0, "2825": 12697245696.0, "2830": 12697245696.0, "2835": 12697245696.0, "2840": 12697245696.0, "2845": 12697245696.0, "2850": 12697245696.0, "2855": 12697245696.0, "2860": 12697245696.0, "2865": 12697245696.0, "2870": 12697245696.0, "2875": 12697245696.0, "2880": 12697245696.0, "2885": 12697245696.0, "2890": 12697245696.0, "2895": 12697245696.0, "2900": 12697245696.0, "2905": 12697245696.0, "2910": 12697245696.0, "2915": 12697245696.0, "2920": 12697245696.0, "2925": 12697245696.0, "2930": 12697245696.0, "2935": 12697245696.0, "2940": 12697245696.0, "2945": 12697245696.0, "2950": 12697245696.0, "2955": 12697245696.0, "2960": 12697245696.0, "2965": 12697245696.0, "2970": 12697245696.0, "2975": 12697245696.0, "2980": 12697245696.0, "2985": 12697245696.0, "2990": 12697245696.0, "2995": 12697245696.0, "3000": 12697245696.0, "3005": 12697245696.0, "3010": 12697245696.0, "3015": 12697245696.0, "3020": 12697245696.0, "3025": 12697245696.0, "3030": 12697245696.0, "3035": 12697245696.0, "3040": 12697245696.0, "3045": 12697245696.0, "3050": 12697245696.0, "3055": 12697245696.0, "3060": 12697245696.0, "3065": 12697245696.0, "3070": 12697245696.0, "3075": 12697245696.0, "3080": 12697245696.0, "3085": 12697245696.0, "3090": 12697245696.0, "3095": 12697245696.0, "3100": 12697245696.0, "3105": 12697245696.0, "3110": 12697245696.0, "3115": 12697245696.0, "3120": 12697245696.0, "3125": 12697245696.0, "3130": 12697245696.0, "3135": 12697245696.0, "3140": 12697245696.0, "3145": 12697245696.0, "3150": 12697245696.0, "3155": 12697245696.0, "3160": 12697245696.0, "3165": 12697245696.0, "3170": 12697245696.0, "3175": 12697245696.0, "3180": 12697245696.0, "3185": 12697245696.0, "3190": 12697245696.0, "3195": 12697245696.0, "3200": 12697245696.0, "3205": 12697245696.0, "3210": 12697245696.0, "3215": 12697245696.0, "3220": 12697245696.0, "3225": 12697245696.0, "3230": 12697245696.0, "3235": 12697245696.0, "3240": 12697245696.0, "3245": 12697245696.0, "3250": 12697245696.0, "3255": 12697245696.0, "3260": 12697245696.0, "3265": 12697245696.0, "3270": 12697245696.0, "3275": 12697245696.0, "3280": 12697245696.0, "3285": 12697245696.0, "3290": 12697245696.0, "3295": 12697245696.0, "3300": 12697245696.0, "3305": 12697245696.0, "3310": 12697245696.0, "3315": 12697245696.0, "3320": 12697245696.0, "3325": 12697245696.0, "3330": 12697245696.0, "3335": 12697245696.0, "3340": 12697245696.0, "3345": 12697245696.0, "3350": 12697245696.0, "3355": 12697245696.0, "3360": 12697245696.0, "3365": 12697245696.0, "3370": 12697245696.0, "3375": 12697245696.0, "3380": 12697245696.0, "3385": 12697245696.0, "3390": 12697245696.0, "3395": 12697245696.0, "3400": 12697245696.0, "3405": 12697245696.0, "3410": 12697245696.0, "3415": 12697245696.0, "3420": 12697245696.0, "3425": 12697245696.0, "3430": 12697245696.0, "3435": 12697245696.0, "3440": 12697245696.0, "3445": 12697245696.0, "3450": 12697245696.0, "3455": 12697245696.0, "3460": 12697245696.0, "3465": 12697245696.0, "3470": 12697245696.0, "3475": 12697245696.0, "3480": 12697245696.0, "3485": 12697245696.0, "3490": 12697245696.0, "3495": 12697245696.0, "3500": 12697245696.0, "3505": 12697245696.0, "3510": 12697245696.0, "3515": 12697245696.0, "3520": 12697245696.0, "3525": 12697245696.0, "3530": 12697245696.0, "3535": 12697245696.0, "3540": 12697245696.0, "3545": 12697245696.0, "3550": 12697245696.0, "3555": 12697245696.0, "3560": 12697245696.0, "3565": 12697444352.0, "3570": 12697444352.0, "3575": 12697444352.0, "3580": 12697444352.0, "3585": 12697444352.0, "3590": 12697444352.0, "3595": 12697444352.0, "3600": 12697444352.0, "3605": 12697444352.0, "3610": 12697444352.0, "3615": 12697444352.0, "3620": 12697444352.0, "3625": 12697444352.0, "3630": 12697444352.0, "3635": 12697444352.0, "3640": 12697444352.0, "3645": 12697444352.0, "3650": 12697444352.0, "3655": 12697444352.0, "3660": 12697444352.0, "3665": 12697444352.0, "3670": 12697444352.0, "3675": 12697444352.0, "3680": 12697444352.0, "3685": 12697444352.0, "3690": 12697444352.0, "3695": 12697444352.0, "3700": 12697444352.0, "3705": 12697444352.0, "3710": 12697444352.0, "3715": 12697444352.0, "3720": 12697444352.0, "3725": 12697444352.0, "3730": 12697444352.0, "3735": 12697444352.0, "3740": 12697444352.0, "3745": 12697444352.0, "3750": 12697444352.0, "3755": 12697444352.0, "3760": 12697444352.0, "3765": 12697444352.0, "3770": 12697444352.0, "3775": 12697444352.0, "3780": 12697444352.0, "3785": 12697444352.0, "3790": 12697444352.0, "3795": 12697444352.0, "3800": 12697444352.0, "3805": 12697444352.0, "3810": 12697444352.0, "3815": 12697444352.0, "3820": 12697444352.0, "3825": 12697444352.0, "3830": 12697444352.0, "3835": 12697444352.0, "3840": 12697444352.0, "3845": 12697444352.0, "3850": 12697444352.0, "3855": 12697444352.0, "3860": 12697444352.0, "3865": 12697444352.0, "3870": 12697444352.0, "3875": 12697444352.0, "3880": 12697444352.0, "3885": 12697444352.0, "3890": 12697444352.0, "3895": 12697444352.0, "3900": 12697444352.0, "3905": 12697444352.0, "3910": 12697444352.0, "3915": 12697444352.0, "3920": 12697444352.0, "3925": 12697444352.0, "3930": 12697444352.0, "3935": 12697444352.0, "3940": 12697444352.0, "3945": 12697444352.0, "3950": 12697444352.0, "3955": 12697444352.0, "3960": 12697444352.0, "3965": 12697444352.0, "3970": 12697444352.0, "3975": 12697444352.0, "3980": 12697444352.0, "3985": 12697444352.0, "3990": 12697444352.0, "3995": 12697444352.0, "4000": 12697444352.0, "4005": 12697444352.0, "4010": 12697444352.0, "4015": 12697444352.0, "4020": 12697444352.0, "4025": 12697444352.0, "4030": 12697444352.0, "4035": 12697444352.0, "4040": 12697444352.0, "4045": 12697444352.0, "4050": 12697444352.0, "4055": 12697444352.0, "4060": 12697444352.0, "4065": 12697444352.0, "4070": 12697444352.0, "4075": 12697444352.0, "4080": 12697444352.0, "4085": 12697444352.0, "4090": 12697444352.0, "4095": 12697444352.0, "4100": 12697444352.0, "4105": 12697444352.0, "4110": 12697444352.0, "4115": 12697444352.0, "4120": 12697444352.0, "4125": 12697444352.0, "4130": 12697444352.0, "4135": 12697444352.0, "4140": 12697444352.0, "4145": 12697444352.0, "4150": 12697444352.0, "4155": 12697444352.0, "4160": 12697444352.0, "4165": 12697444352.0, "4170": 12697444352.0, "4175": 12697444352.0, "4180": 12697444352.0, "4185": 12697444352.0, "4190": 12697444352.0, "4195": 12697444352.0, "4200": 12697444352.0, "4205": 12697444352.0, "4210": 12697444352.0, "4215": 12697444352.0, "4220": 12697444352.0, "4225": 12697444352.0, "4230": 12697444352.0, "4235": 12697444352.0, "4240": 12697444352.0, "4245": 12697444352.0, "4250": 12697444352.0, "4255": 12697444352.0, "4260": 12697444352.0, "4265": 12697444352.0, "4270": 12697444352.0, "4275": 12697444352.0, "4280": 12697444352.0, "4285": 12697444352.0, "4290": 12697444352.0, "4295": 12697444352.0, "4300": 12697444352.0, "4305": 12697444352.0, "4310": 12697444352.0, "4315": 12697444352.0, "4320": 12697444352.0, "4325": 12697444352.0, "4330": 12697444352.0, "4335": 12697444352.0, "4340": 12697444352.0, "4345": 12697444352.0, "4350": 12697444352.0, "4355": 12697444352.0, "4360": 12697444352.0, "4365": 12697444352.0, "4370": 12697444352.0, "4375": 12697444352.0, "4380": 12697444352.0, "4385": 12697444352.0, "4390": 12697444352.0, "4395": 12697444352.0, "4400": 12697444352.0, "4405": 12697444352.0, "4410": 12697444352.0, "4415": 12697444352.0, "4420": 12697444352.0, "4425": 12697444352.0, "4430": 12697444352.0, "4435": 12697444352.0, "4440": 12697444352.0, "4445": 12697444352.0, "4450": 12697444352.0, "4455": 12697444352.0, "4460": 12697444352.0, "4465": 12697444352.0, "4470": 12697444352.0, "4475": 12697444352.0, "4480": 12697444352.0, "4485": 12697444352.0, "4490": 12697444352.0, "4495": 12697444352.0, "4500": 12697444352.0, "4505": 12697444352.0, "4510": 12697444352.0, "4515": 12697444352.0, "4520": 12697444352.0, "4525": 12697444352.0, "4530": 12697444352.0, "4535": 12697444352.0, "4540": 12697444352.0, "4545": 12697444352.0, "4550": 12697444352.0, "4555": 12697444352.0, "4560": 12697444352.0, "4565": 12697444352.0, "4570": 12697444352.0, "4575": 12697444352.0, "4580": 12697444352.0, "4585": 12697444352.0, "4590": 12697444352.0, "4595": 12697444352.0, "4600": 12697444352.0, "4605": 12697444352.0, "4610": 12697444352.0, "4615": 12697444352.0, "4620": 12697444352.0, "4625": 12697444352.0, "4630": 12697444352.0, "4635": 12697444352.0, "4640": 12697444352.0, "4645": 12697444352.0, "4650": 12697444352.0, "4655": 12697444352.0, "4660": 12697444352.0, "4665": 12697444352.0, "4670": 12697444352.0, "4675": 12697444352.0, "4680": 12697444352.0, "4685": 12697444352.0, "4690": 12697444352.0, "4695": 12697444352.0, "4700": 12697444352.0, "4705": 12697444352.0, "4710": 12697444352.0, "4715": 12697444352.0, "4720": 12697444352.0, "4725": 12697444352.0, "4730": 12697444352.0, "4735": 12697444352.0, "4740": 12697444352.0, "4745": 12697444352.0, "4750": 12697444352.0, "4755": 12697444352.0, "4760": 12697444352.0, "4765": 12697444352.0, "4770": 12697444352.0, "4775": 12697444352.0, "4780": 12697444352.0, "4785": 12697444352.0, "4790": 12697444352.0, "4795": 12697444352.0, "4800": 12697444352.0, "4805": 12697444352.0, "4810": 12697444352.0, "4815": 12697444352.0, "4820": 12697444352.0, "4825": 12697444352.0, "4830": 12697444352.0, "4835": 12697444352.0, "4840": 12697444352.0, "4845": 12697444352.0, "4850": 12697444352.0, "4855": 12697444352.0, "4860": 12697444352.0, "4865": 12697444352.0, "4870": 12697444352.0, "4875": 12697444352.0, "4880": 12697444352.0, "4885": 12697444352.0, "4890": 12697444352.0, "4895": 12697444352.0, "4900": 12697444352.0, "4905": 12697444352.0, "4910": 12697444352.0, "4915": 12697444352.0, "4920": 12697444352.0, "4925": 12697444352.0, "4930": 12697444352.0, "4935": 12697444352.0, "4940": 12697444352.0, "4945": 12697444352.0, "4950": 12697444352.0, "4955": 12697444352.0, "4960": 12697444352.0, "4965": 12697444352.0, "4970": 12697444352.0, "4975": 12697444352.0, "4980": 12697444352.0, "4985": 12697444352.0, "4990": 12697444352.0, "4995": 12697444352.0, "5000": 12697444352.0, "5005": 12697444352.0, "5010": 12697444352.0, "5015": 12697444352.0, "5020": 12697444352.0, "5025": 12697444352.0, "5030": 12697444352.0, "5035": 12697444352.0, "5040": 12697444352.0, "5045": 12697444352.0, "5050": 12697444352.0, "5055": 12697444352.0, "5060": 12697444352.0, "5065": 12697444352.0, "5070": 12697444352.0, "5075": 12697444352.0, "5080": 12697444352.0, "5085": 12697444352.0, "5090": 12697444352.0, "5095": 12697444352.0, "5100": 12697444352.0, "5105": 12697444352.0, "5110": 12697444352.0, "5115": 12697444352.0, "5120": 12697444352.0, "5125": 12697444352.0, "5130": 12697444352.0, "5135": 12697444352.0, "5140": 12697444352.0, "5145": 12697444352.0, "5150": 12697444352.0, "5155": 12697444352.0, "5160": 12697444352.0, "5165": 12697444352.0, "5170": 12697444352.0, "5175": 12697444352.0, "5180": 12697444352.0, "5185": 12697444352.0, "5190": 12697444352.0, "5195": 12697444352.0, "5200": 12697444352.0, "5205": 12697444352.0, "5210": 12697444352.0, "5215": 12697444352.0, "5220": 12697444352.0, "5225": 12697444352.0, "5230": 12697444352.0, "5235": 12697444352.0, "5240": 12697444352.0, "5245": 12697444352.0, "5250": 12697444352.0, "5255": 12697444352.0, "5260": 12697444352.0, "5265": 12697444352.0, "5270": 12697444352.0, "5275": 12697444352.0, "5280": 12697444352.0, "5285": 12697444352.0, "5290": 12697444352.0, "5295": 12697444352.0, "5300": 12697444352.0, "5305": 12697444352.0, "5310": 12697444352.0, "5315": 12697444352.0, "5320": 12697444352.0, "5325": 12697444352.0, "5330": 12697444352.0, "5335": 12697444352.0, "5340": 12697444352.0, "5345": 12697444352.0, "5350": 12697444352.0, "5355": 12697444352.0, "5360": 12697444352.0, "5365": 12697444352.0, "5370": 12697444352.0, "5375": 12697444352.0, "5380": 12697444352.0, "5385": 12697444352.0, "5390": 12697444352.0, "5395": 12697444352.0, "5400": 12697444352.0, "5405": 12697444352.0, "5410": 12697444352.0, "5415": 12697444352.0, "5420": 12697444352.0, "5425": 12697444352.0, "5430": 12697444352.0, "5435": 12697444352.0, "5440": 12697444352.0, "5445": 12697444352.0, "5450": 12697444352.0, "5455": 12697444352.0, "5460": 12697444352.0, "5465": 12697444352.0, "5470": 12697444352.0, "5475": 12697444352.0, "5480": 12697444352.0, "5485": 12697444352.0, "5490": 12697444352.0, "5495": 12697444352.0, "5500": 12697444352.0, "5505": 12697444352.0, "5510": 12697444352.0, "5515": 12697444352.0, "5520": 12697444352.0, "5525": 12697444352.0, "5530": 12697444352.0, "5535": 12697444352.0, "5540": 12697444352.0, "5545": 12697444352.0, "5550": 12697444352.0, "5555": 12697444352.0, "5560": 12697444352.0, "5565": 12697444352.0, "5570": 12697444352.0, "5575": 12697444352.0, "5580": 12697444352.0, "5585": 12697444352.0, "5590": 12697444352.0, "5595": 12697444352.0, "5600": 12697444352.0, "5605": 12697444352.0, "5610": 12697444352.0, "5615": 12697444352.0, "5620": 12697444352.0, "5625": 12697444352.0, "5630": 12697444352.0, "5635": 12697444352.0, "5640": 12697444352.0, "5645": 12697444352.0, "5650": 12697444352.0, "5655": 12697444352.0, "5660": 12697444352.0, "5665": 12697444352.0, "5670": 12697444352.0, "5675": 12697444352.0, "5680": 12697444352.0, "5685": 12697444352.0, "5690": 12697444352.0, "5695": 12697444352.0, "5700": 12697444352.0, "5705": 12697444352.0, "5710": 12697444352.0, "5715": 12697444352.0, "5720": 12697444352.0, "5725": 12697444352.0, "5730": 12697444352.0, "5735": 12697444352.0, "5740": 12697444352.0, "5745": 12697444352.0, "5750": 12697444352.0, "5755": 12697444352.0, "5760": 12697444352.0, "5765": 12697444352.0, "5770": 12697444352.0, "5775": 12697444352.0, "5780": 12697444352.0, "5785": 12697444352.0, "5790": 12697444352.0, "5795": 12697444352.0, "5800": 12697444352.0, "5805": 12697444352.0, "5810": 12697444352.0, "5815": 12697444352.0, "5820": 12697444352.0, "5825": 12697444352.0, "5830": 12697444352.0, "5835": 12697444352.0, "5840": 12697444352.0, "5845": 12697444352.0, "5850": 12697444352.0, "5855": 12697444352.0, "5860": 12697444352.0, "5865": 12697444352.0, "5870": 12697444352.0, "5875": 12697444352.0, "5880": 12697444352.0, "5885": 12697444352.0, "5890": 12697444352.0, "5895": 12697444352.0, "5900": 12697444352.0, "5905": 12697444352.0, "5910": 12697444352.0, "5915": 12697444352.0, "5920": 12697444352.0, "5925": 12697444352.0, "5930": 12697444352.0, "5935": 12697444352.0, "5940": 12697444352.0, "5945": 12697444352.0, "5950": 12697444352.0, "5955": 12697444352.0, "5960": 12697444352.0, "5965": 12697444352.0, "5970": 12697444352.0, "5975": 12697444352.0, "5980": 12697444352.0, "5985": 12697444352.0, "5990": 12697444352.0, "5995": 12697444352.0, "6000": 12697444352.0, "6005": 12697444352.0, "6010": 12697444352.0, "6015": 12697444352.0, "6020": 12697444352.0, "6025": 12697444352.0, "6030": 12697444352.0, "6035": 12697444352.0, "6040": 12697444352.0, "6045": 12697444352.0, "6050": 12697444352.0, "6055": 12697444352.0, "6060": 12697444352.0, "6065": 12697444352.0, "6070": 12697444352.0, "6075": 12697444352.0, "6080": 12697444352.0, "6085": 12697444352.0, "6090": 12697444352.0, "6095": 12697444352.0, "6100": 12697444352.0, "6105": 12697444352.0, "6110": 12697444352.0, "6115": 12697444352.0, "6120": 12697444352.0, "6125": 12697444352.0, "6130": 12697444352.0, "6135": 12697444352.0, "6140": 12697444352.0, "6145": 12697444352.0, "6150": 12697444352.0, "6155": 12697444352.0, "6160": 12697444352.0, "6165": 12697444352.0, "6170": 12697444352.0, "6175": 12697444352.0, "6180": 12697444352.0, "6185": 12697444352.0, "6190": 12697444352.0, "6195": 12697444352.0, "6200": 12697444352.0, "6205": 12697444352.0, "6210": 12697444352.0, "6215": 12697444352.0, "6220": 12697444352.0, "6225": 12697444352.0, "6230": 12697444352.0, "6235": 12697444352.0, "6240": 12697444352.0, "6245": 12697444352.0, "6250": 12697444352.0, "6255": 12697444352.0, "6260": 12697444352.0, "6265": 12697444352.0, "6270": 12697444352.0, "6275": 12697444352.0, "6280": 12697444352.0, "6285": 12697444352.0, "6290": 12697444352.0, "6295": 12697444352.0, "6300": 12697444352.0, "6305": 12697444352.0, "6310": 12697444352.0, "6315": 12697444352.0, "6320": 12697444352.0, "6325": 12697444352.0, "6330": 12697444352.0, "6335": 12697444352.0, "6340": 12697444352.0, "6345": 12697444352.0, "6350": 12697444352.0, "6355": 12697444352.0, "6360": 12697444352.0, "6365": 12697444352.0, "6370": 12697444352.0, "6375": 12697444352.0, "6380": 12697444352.0, "6385": 12697444352.0, "6390": 12697444352.0, "6395": 12697444352.0, "6400": 12697444352.0, "6405": 12697444352.0, "6410": 12697444352.0, "6415": 12697444352.0, "6420": 12697444352.0, "6425": 12697444352.0, "6430": 12697444352.0, "6435": 12697444352.0, "6440": 12697444352.0, "6445": 12697444352.0, "6450": 12697444352.0, "6455": 12697444352.0, "6460": 12697444352.0, "6465": 12697444352.0, "6470": 12697444352.0, "6475": 12697444352.0, "6480": 12697444352.0, "6485": 12697444352.0, "6490": 12697444352.0, "6495": 12697444352.0, "6500": 12697444352.0, "6505": 12697444352.0, "6510": 12697444352.0, "6515": 12697444352.0, "6520": 12697444352.0, "6525": 12697444352.0, "6530": 12697444352.0, "6535": 12697444352.0, "6540": 12697444352.0, "6545": 12697444352.0, "6550": 12697444352.0, "6555": 12697444352.0, "6560": 12697444352.0, "6565": 12697444352.0, "6570": 12697444352.0, "6575": 12697444352.0, "6580": 12697444352.0, "6585": 12697444352.0, "6590": 12697444352.0, "6595": 12697444352.0, "6600": 12697444352.0, "6605": 12697444352.0, "6610": 12697444352.0, "6615": 12697444352.0, "6620": 12697444352.0, "6625": 12697444352.0, "6630": 12697444352.0, "6635": 12697444352.0, "6640": 12697444352.0, "6645": 12697444352.0, "6650": 12697444352.0, "6655": 12697444352.0, "6660": 12697444352.0, "6665": 12697444352.0, "6670": 12697444352.0, "6675": 12697444352.0, "6680": 12697444352.0, "6685": 12697444352.0, "6690": 12697444352.0, "6695": 12697444352.0, "6700": 12697444352.0, "6705": 12697444352.0, "6710": 12697444352.0, "6715": 12697444352.0, "6720": 12697444352.0, "6725": 12697444352.0, "6730": 12697444352.0, "6735": 12697444352.0, "6740": 12697444352.0, "6745": 12697444352.0, "6750": 12697444352.0, "6755": 12697444352.0, "6760": 12697444352.0, "6765": 12697444352.0, "6770": 12697444352.0, "6775": 12697444352.0, "6780": 12697444352.0, "6785": 12697444352.0, "6790": 12697444352.0, "6795": 12697444352.0, "6800": 12697444352.0, "6805": 12697444352.0, "6810": 12697444352.0, "6815": 12697444352.0, "6820": 12697444352.0, "6825": 12697444352.0, "6830": 12697444352.0, "6835": 12697444352.0, "6840": 12697444352.0, "6845": 12697444352.0, "6850": 12697444352.0, "6855": 12697444352.0, "6860": 12697444352.0, "6865": 12697444352.0, "6870": 12697444352.0, "6875": 12697444352.0, "6880": 12697444352.0, "6885": 12697444352.0, "6890": 12697444352.0, "6895": 12697444352.0, "6900": 12697444352.0, "6905": 12697444352.0, "6910": 12697444352.0, "6915": 12697444352.0, "6920": 12697444352.0, "6925": 12697444352.0, "6930": 12697444352.0, "6935": 12697444352.0, "6940": 12697444352.0, "6945": 12697444352.0, "6950": 12697444352.0, "6955": 12697444352.0, "6960": 12697444352.0, "6965": 12697444352.0, "6970": 12697444352.0, "6975": 12697444352.0, "6980": 12697444352.0, "6985": 12697444352.0, "6990": 12697444352.0, "6995": 12697444352.0, "7000": 12697444352.0, "7005": 12697444352.0, "7010": 12697444352.0, "7015": 12697444352.0, "7020": 12697444352.0, "7025": 12697444352.0, "7030": 12697444352.0, "7035": 12697444352.0, "7040": 12697444352.0, "7045": 12697444352.0, "7050": 12697444352.0, "7055": 12697444352.0, "7060": 12697444352.0, "7065": 12697444352.0, "7070": 12697444352.0, "7075": 12697444352.0, "7080": 12697444352.0, "7085": 12697444352.0, "7090": 12697444352.0, "7095": 12697444352.0, "7100": 12697444352.0, "7105": 12697444352.0, "7110": 12697444352.0, "7115": 12697444352.0, "7120": 12697444352.0, "7125": 12697444352.0, "7130": 12697444352.0, "7135": 12697444352.0, "7140": 12697444352.0, "7145": 12697444352.0, "7150": 12697444352.0, "7155": 12697444352.0, "7160": 12697444352.0, "7165": 12697444352.0, "7170": 12697444352.0, "7175": 12697444352.0, "7180": 12697444352.0, "7185": 12697444352.0, "7190": 12697444352.0, "7195": 12697444352.0, "7200": 12697444352.0, "7205": 12697444352.0, "7210": 12697444352.0, "7215": 12697444352.0, "7220": 12697444352.0, "7225": 12697444352.0, "7230": 12697444352.0, "7235": 12697444352.0, "7240": 12697444352.0, "7245": 12697444352.0, "7250": 12697444352.0, "7255": 12697444352.0, "7260": 12697444352.0, "7265": 12697444352.0, "7270": 12697444352.0, "7275": 12697444352.0, "7280": 12697444352.0, "7285": 12697444352.0, "7290": 12697444352.0, "7295": 12697444352.0, "7300": 12697444352.0, "7305": 12697444352.0, "7310": 12697444352.0, "7315": 12697444352.0, "7320": 12697444352.0, "7325": 12697444352.0, "7330": 12697444352.0, "7335": 12697444352.0, "7340": 12697444352.0, "7345": 12697444352.0, "7350": 12697444352.0, "7355": 12697444352.0, "7360": 12697444352.0, "7365": 12697444352.0, "7370": 12697444352.0, "7375": 12697444352.0, "7380": 12697444352.0, "7385": 12697444352.0, "7390": 12697444352.0, "7395": 12697444352.0, "7400": 12697444352.0, "7405": 12697444352.0, "7410": 12697444352.0, "7415": 12697444352.0, "7420": 12697444352.0, "7425": 12697444352.0, "7430": 12697444352.0, "7435": 12697444352.0, "7440": 12697444352.0, "7445": 12697444352.0, "7450": 12697444352.0, "7455": 12697444352.0, "7460": 12697444352.0, "7465": 12697444352.0, "7470": 12697444352.0, "7475": 12697444352.0, "7480": 12697444352.0, "7485": 12697444352.0, "7490": 12697444352.0, "7495": 12697444352.0, "7500": 12697444352.0, "7505": 12697444352.0, "7510": 12697444352.0, "7515": 12697444352.0, "7520": 12697444352.0, "7525": 12697444352.0, "7530": 12697444352.0, "7535": 12697444352.0, "7540": 12697444352.0, "7545": 12697444352.0, "7550": 12697444352.0, "7555": 12697444352.0, "7560": 12697444352.0, "7565": 12697444352.0, "7570": 12697444352.0, "7575": 12697444352.0, "7580": 12697444352.0, "7585": 12697444352.0, "7590": 12697444352.0, "7595": 12697444352.0, "7600": 12697444352.0, "7605": 12697444352.0, "7610": 12697444352.0, "7615": 12697444352.0, "7620": 12697444352.0, "7625": 12697444352.0, "7630": 12697444352.0, "7635": 12697444352.0, "7640": 12697444352.0, "7645": 12697444352.0, "7650": 12697444352.0, "7655": 12697444352.0, "7660": 12697444352.0, "7665": 12697444352.0, "7670": 12697444352.0, "7675": 12697444352.0, "7680": 12697444352.0, "7685": 12697444352.0, "7690": 12697444352.0, "7695": 12697444352.0, "7700": 12697444352.0, "7705": 12697444352.0, "7710": 12697444352.0, "7715": 12697444352.0, "7720": 12697444352.0, "7725": 12697444352.0, "7730": 12697444352.0, "7735": 12697444352.0, "7740": 12697444352.0, "7745": 12697444352.0, "7750": 12697444352.0, "7755": 12697444352.0, "7760": 12697444352.0, "7765": 12697444352.0, "7770": 12697444352.0, "7775": 12697444352.0, "7780": 12697444352.0, "7785": 12697444352.0, "7790": 12697444352.0, "7795": 12697444352.0, "7800": 12697444352.0, "7805": 12697444352.0, "7810": 12697444352.0, "7815": 12697444352.0, "7820": 12697444352.0, "7825": 12697444352.0, "7830": 12697444352.0, "7835": 12697444352.0, "7840": 12697444352.0, "7845": 12697444352.0, "7850": 12697444352.0, "7855": 12697444352.0, "7860": 12697444352.0, "7865": 12697444352.0, "7870": 12697444352.0, "7875": 12697444352.0, "7880": 12697444352.0, "7885": 12697444352.0, "7890": 12697444352.0, "7895": 12697444352.0, "7900": 12697444352.0, "7905": 12697444352.0, "7910": 12697444352.0, "7915": 12697444352.0, "7920": 12697444352.0, "7925": 12697444352.0, "7930": 12697444352.0, "7935": 12697444352.0, "7940": 12697444352.0, "7945": 12697444352.0, "7950": 12697444352.0, "7955": 12697444352.0, "7960": 12697444352.0, "7965": 12697444352.0, "7970": 12697444352.0, "7975": 12697444352.0, "7980": 12697444352.0, "7985": 12697444352.0, "7990": 12697444352.0, "7995": 12697444352.0, "8000": 12697444352.0, "8005": 12697444352.0, "8010": 12697444352.0, "8015": 12697444352.0, "8020": 12697444352.0, "8025": 12697444352.0, "8030": 12697444352.0, "8035": 12697444352.0, "8040": 12697444352.0, "8045": 12697444352.0, "8050": 12697444352.0, "8055": 12697444352.0, "8060": 12697444352.0, "8065": 12697444352.0, "8070": 12697444352.0, "8075": 12697444352.0, "8080": 12697444352.0, "8085": 12697444352.0, "8090": 12697444352.0, "8095": 12697444352.0, "8100": 12697444352.0, "8105": 12697444352.0, "8110": 12697444352.0, "8115": 12697444352.0, "8120": 12697444352.0, "8125": 12697444352.0, "8130": 12697444352.0, "8135": 12697444352.0, "8140": 12697444352.0, "8145": 12697444352.0, "8150": 12697444352.0, "8155": 12697444352.0, "8160": 12697444352.0, "8165": 12697444352.0, "8170": 12697444352.0, "8175": 12697444352.0, "8180": 12697444352.0, "8185": 12697444352.0, "8190": 12697444352.0, "8195": 12697444352.0, "8200": 12697444352.0, "8205": 12697444352.0, "8210": 12697444352.0, "8215": 12697444352.0, "8220": 12697444352.0, "8225": 12697444352.0, "8230": 12697444352.0, "8235": 12697444352.0, "8240": 12697444352.0, "8245": 12697444352.0, "8250": 12697444352.0, "8255": 12697444352.0, "8260": 12697444352.0, "8265": 12697444352.0, "8270": 12697444352.0, "8275": 12697444352.0, "8280": 12697444352.0, "8285": 12697444352.0, "8290": 12697444352.0, "8295": 12697444352.0, "8300": 12697444352.0, "8305": 12697444352.0, "8310": 12697444352.0, "8315": 12697444352.0, "8320": 12697444352.0, "8325": 12697444352.0, "8330": 12697444352.0, "8335": 12697444352.0, "8340": 12697444352.0, "8345": 12697444352.0, "8350": 12697444352.0, "8355": 12697444352.0, "8360": 12697444352.0, "8365": 12697444352.0, "8370": 12697444352.0, "8375": 12697444352.0, "8380": 12697444352.0, "8385": 12697444352.0, "8390": 12697444352.0, "8395": 12697444352.0, "8400": 12697444352.0, "8405": 12697444352.0, "8410": 12697444352.0, "8415": 12697444352.0, "8420": 12697444352.0, "8425": 12697444352.0, "8430": 12697444352.0, "8435": 12697444352.0, "8440": 12697444352.0, "8445": 12697444352.0, "8450": 12697444352.0, "8455": 12697444352.0, "8460": 12697444352.0, "8465": 12697444352.0, "8470": 12697444352.0, "8475": 12697444352.0, "8480": 12697444352.0, "8485": 12697444352.0, "8490": 12697444352.0, "8495": 12697444352.0, "8500": 12697444352.0, "8505": 12697444352.0, "8510": 12697444352.0, "8515": 12697444352.0, "8520": 12697444352.0, "8525": 12697444352.0, "8530": 12697444352.0, "8535": 12697444352.0, "8540": 12697444352.0, "8545": 12697444352.0, "8550": 12697444352.0, "8555": 12697444352.0, "8560": 12697444352.0, "8565": 12697444352.0, "8570": 12697444352.0, "8575": 12697444352.0, "8580": 12697444352.0, "8585": 12697444352.0, "8590": 12697444352.0, "8595": 12697444352.0, "8600": 12697444352.0, "8605": 12697444352.0, "8610": 12697444352.0, "8615": 12697444352.0, "8620": 12697444352.0, "8625": 12697444352.0, "8630": 12697444352.0, "8635": 12697444352.0, "8640": 12697444352.0, "8645": 12697444352.0, "8650": 12697444352.0, "8655": 12697444352.0, "8660": 12697444352.0, "8665": 12697444352.0, "8670": 12697444352.0, "8675": 12697444352.0, "8680": 12697444352.0, "8685": 12697444352.0, "8690": 12697444352.0, "8695": 12697444352.0, "8700": 12697444352.0, "8705": 12697444352.0, "8710": 12697444352.0, "8715": 12697444352.0, "8720": 12697444352.0, "8725": 12697444352.0, "8730": 12697444352.0, "8735": 12697444352.0, "8740": 12697444352.0, "8745": 12697444352.0, "8750": 12697444352.0, "8755": 12697444352.0, "8760": 12697444352.0, "8765": 12697444352.0, "8770": 12697444352.0, "8775": 12697444352.0, "8780": 12697444352.0, "8785": 12697444352.0, "8790": 12697444352.0, "8795": 12697444352.0, "8800": 12697444352.0, "8805": 12697444352.0, "8810": 12697444352.0, "8815": 12697444352.0, "8820": 12697444352.0, "8825": 12697444352.0, "8830": 12697444352.0, "8835": 12697444352.0, "8840": 12697444352.0, "8845": 12697444352.0, "8850": 12697444352.0, "8855": 12697444352.0, "8860": 12697444352.0, "8865": 12697444352.0, "8870": 12697444352.0, "8875": 12697444352.0, "8880": 12697444352.0, "8885": 12697444352.0, "8890": 12697444352.0, "8895": 12697444352.0, "8900": 12697444352.0, "8905": 12697444352.0, "8910": 12697444352.0, "8915": 12697444352.0, "8920": 12697444352.0, "8925": 12697444352.0, "8930": 12697444352.0, "8935": 12697444352.0, "8940": 12697444352.0, "8945": 12697444352.0, "8950": 12697444352.0, "8955": 12697444352.0, "8960": 12697444352.0, "8965": 12697444352.0, "8970": 12697444352.0, "8975": 12697444352.0, "8980": 12697444352.0, "8985": 12697444352.0, "8990": 12697444352.0, "8995": 12697444352.0, "9000": 12697444352.0, "9005": 12697444352.0, "9010": 12697444352.0, "9015": 12697444352.0, "9020": 12697444352.0, "9025": 12697444352.0, "9030": 12697444352.0, "9035": 12697444352.0, "9040": 12697444352.0, "9045": 12697444352.0, "9050": 12697444352.0, "9055": 12697444352.0, "9060": 12697444352.0, "9065": 12697444352.0, "9070": 12697444352.0, "9075": 12697444352.0, "9080": 12697444352.0, "9085": 12697444352.0, "9090": 12697444352.0, "9095": 12697444352.0, "9100": 12697444352.0, "9105": 12697444352.0, "9110": 12697444352.0, "9115": 12697444352.0, "9120": 12697444352.0, "9125": 12697444352.0, "9130": 12697444352.0, "9135": 12697444352.0, "9140": 12697444352.0, "9145": 12697444352.0, "9150": 12697444352.0, "9155": 12697444352.0, "9160": 12697444352.0, "9165": 12697444352.0, "9170": 12697444352.0, "9175": 12697444352.0, "9180": 12697444352.0, "9185": 12697444352.0, "9190": 12697444352.0, "9195": 12697444352.0, "9200": 12697444352.0, "9205": 12697444352.0, "9210": 12697444352.0, "9215": 12697444352.0, "9220": 12697444352.0, "9225": 12697444352.0, "9230": 12697444352.0, "9235": 12697444352.0, "9240": 12697444352.0, "9245": 12697444352.0, "9250": 12697444352.0, "9255": 12697444352.0, "9260": 12697444352.0, "9265": 12697444352.0, "9270": 12697444352.0, "9275": 12697444352.0, "9280": 12697444352.0, "9285": 12697444352.0, "9290": 12697444352.0, "9295": 12697444352.0, "9300": 12697444352.0, "9305": 12697444352.0, "9310": 12697444352.0, "9315": 12697444352.0, "9320": 12697444352.0, "9325": 12697444352.0, "9330": 12697444352.0, "9335": 12697444352.0, "9340": 12697444352.0, "9345": 12697444352.0, "9350": 12697444352.0, "9355": 12697444352.0, "9360": 12697444352.0, "9365": 12697444352.0, "9370": 12697444352.0, "9375": 12697444352.0, "9380": 12697444352.0, "9385": 12697444352.0, "9390": 12697444352.0, "9395": 12697444352.0, "9400": 12697444352.0, "9405": 12697444352.0, "9410": 12697444352.0, "9415": 12697444352.0, "9420": 12697444352.0, "9425": 12697444352.0, "9430": 12697444352.0, "9435": 12697444352.0, "9440": 12697444352.0, "9445": 12697444352.0, "9450": 12697444352.0, "9455": 12697444352.0, "9460": 12697444352.0, "9465": 12697444352.0, "9470": 12697444352.0, "9475": 12697444352.0, "9480": 12697444352.0, "9485": 12697444352.0, "9490": 12697444352.0, "9495": 12697444352.0, "9500": 12697444352.0, "9505": 12697444352.0, "9510": 12697444352.0, "9515": 12697444352.0, "9520": 12697444352.0, "9525": 12697444352.0, "9530": 12697444352.0, "9535": 12697444352.0, "9540": 12697444352.0, "9545": 12697444352.0, "9550": 12697444352.0, "9555": 12697444352.0, "9560": 12697444352.0, "9565": 12697444352.0, "9570": 12697444352.0, "9575": 12697444352.0, "9580": 12697444352.0, "9585": 12697444352.0, "9590": 12697444352.0, "9595": 12697444352.0, "9600": 12697444352.0, "9605": 12697444352.0, "9610": 12697444352.0, "9615": 12697444352.0, "9620": 12697444352.0, "9625": 12697444352.0, "9630": 12697444352.0, "9635": 12697444352.0, "9640": 12697444352.0, "9645": 12697444352.0, "9650": 12697444352.0, "9655": 12697444352.0, "9660": 12697444352.0, "9665": 12697444352.0, "9670": 12697444352.0, "9675": 12697444352.0, "9680": 12697444352.0, "9685": 12697444352.0, "9690": 12697444352.0, "9695": 12697444352.0, "9700": 12697444352.0, "9705": 12697444352.0, "9710": 12697444352.0, "9715": 12697444352.0, "9720": 12697444352.0, "9725": 12697444352.0, "9730": 12697444352.0, "9735": 12697444352.0, "9740": 12697444352.0, "9745": 12697444352.0, "9750": 12697444352.0, "9755": 12697444352.0, "9760": 12697444352.0, "9765": 12697444352.0, "9770": 12697444352.0, "9775": 12697444352.0, "9780": 12697444352.0, "9785": 12697444352.0, "9790": 12697444352.0, "9795": 12697444352.0, "9800": 12697444352.0, "9805": 12697444352.0, "9810": 12697444352.0, "9815": 12697444352.0, "9820": 12697444352.0, "9825": 12697444352.0, "9830": 12697444352.0, "9835": 12697444352.0, "9840": 12697444352.0, "9845": 12697444352.0, "9850": 12697444352.0, "9855": 12697444352.0, "9860": 12697444352.0, "9865": 12697444352.0, "9870": 12697444352.0, "9875": 12697444352.0, "9880": 12697444352.0, "9885": 12697444352.0, "9890": 12697444352.0, "9895": 12697444352.0, "9900": 12697444352.0, "9905": 12697444352.0, "9910": 12697444352.0, "9915": 12697444352.0, "9920": 12697444352.0, "9925": 12697444352.0, "9930": 12697444352.0, "9935": 12697444352.0, "9940": 12697444352.0, "9945": 12697444352.0, "9950": 12697444352.0, "9955": 12697444352.0, "9960": 12697444352.0, "9965": 12697444352.0, "9970": 12697444352.0, "9975": 12697444352.0, "9980": 12697444352.0, "9985": 12697444352.0, "9990": 12697444352.0, "9995": 12697444352.0, "10000": 12697444352.0, "10005": 12697444352.0, "10010": 12697444352.0, "10015": 12697444352.0, "10020": 12697444352.0, "10025": 12697444352.0, "10030": 12697444352.0, "10035": 12697444352.0, "10040": 12697444352.0, "10045": 12697444352.0, "10050": 12697444352.0, "10055": 12697444352.0, "10060": 12697444352.0, "10065": 12697444352.0, "10070": 12697444352.0, "10075": 12697444352.0, "10080": 12697444352.0, "10085": 12697444352.0, "10090": 12697444352.0, "10095": 12697444352.0, "10100": 12697444352.0, "10105": 12697444352.0, "10110": 12697444352.0, "10115": 12697444352.0, "10120": 12697444352.0, "10125": 12697444352.0, "10130": 12697444352.0, "10135": 12697444352.0, "10140": 12697444352.0, "10145": 12697444352.0, "10150": 12697444352.0, "10155": 12697444352.0, "10160": 12697444352.0, "10165": 12697444352.0, "10170": 12697444352.0, "10175": 12697444352.0, "10180": 12697444352.0, "10185": 12697444352.0, "10190": 12697444352.0, "10195": 12697444352.0, "10200": 12697444352.0, "10205": 12697444352.0, "10210": 12697444352.0, "10215": 12697444352.0, "10220": 12697444352.0, "10225": 12697444352.0, "10230": 12697444352.0, "10235": 12697444352.0, "10240": 12697444352.0, "10245": 12697444352.0, "10250": 12697444352.0, "10255": 12697444352.0, "10260": 12697444352.0, "10265": 12697444352.0, "10270": 12697444352.0, "10275": 12697444352.0, "10280": 12697444352.0, "10285": 12697444352.0, "10290": 12697444352.0, "10295": 12697444352.0, "10300": 12697444352.0, "10305": 12697444352.0, "10310": 12697444352.0, "10315": 12697444352.0, "10320": 12697444352.0, "10325": 12697444352.0, "10330": 12697444352.0, "10335": 12697444352.0, "10340": 12697444352.0, "10345": 12697444352.0, "10350": 12697444352.0, "10355": 12697444352.0, "10360": 12697444352.0, "10365": 12697444352.0, "10370": 12697444352.0, "10375": 12697444352.0, "10380": 12697444352.0, "10385": 12697444352.0, "10390": 12697444352.0, "10395": 12697444352.0, "10400": 12697444352.0, "10405": 12697444352.0, "10410": 12697444352.0, "10415": 12697444352.0, "10420": 12697444352.0, "10425": 12697444352.0, "10430": 12697444352.0, "10435": 12697444352.0, "10440": 12697444352.0, "10445": 12697444352.0, "10450": 12697444352.0, "10455": 12697444352.0, "10460": 12697444352.0, "10465": 12697444352.0, "10470": 12697444352.0, "10475": 12697444352.0, "10480": 12697444352.0, "10485": 12697444352.0, "10490": 12697444352.0, "10495": 12697444352.0, "10500": 12697444352.0, "10505": 12697444352.0, "10510": 12697444352.0, "10515": 12697444352.0, "10520": 12697444352.0, "10525": 12697444352.0, "10530": 12697444352.0, "10535": 12697444352.0, "10540": 12697444352.0, "10545": 12697444352.0, "10550": 12697444352.0, "10555": 12697444352.0, "10560": 12697444352.0, "10565": 12697444352.0, "10570": 12697444352.0, "10575": 12697444352.0, "10580": 12697444352.0, "10585": 12697444352.0, "10590": 12697444352.0, "10595": 12697444352.0, "10600": 12697444352.0, "10605": 12697444352.0, "10610": 12697444352.0, "10615": 12697444352.0, "10620": 12697444352.0, "10625": 12697444352.0, "10630": 12697444352.0, "10635": 12697444352.0, "10640": 12697444352.0, "10645": 12697444352.0, "10650": 12697444352.0, "10655": 12697444352.0, "10660": 12697444352.0, "10665": 12697444352.0, "10670": 12697444352.0, "10675": 12697444352.0, "10680": 12697444352.0, "10685": 12697444352.0, "10690": 12697444352.0, "10695": 12697444352.0, "10700": 12697444352.0, "10705": 12697444352.0, "10710": 12697444352.0, "10715": 12697444352.0, "10720": 12697444352.0, "10725": 12697444352.0, "10730": 12697444352.0, "10735": 12697444352.0, "10740": 12697444352.0, "10745": 12697444352.0, "10750": 12697444352.0, "10755": 12697444352.0, "10760": 12697444352.0, "10765": 12697444352.0, "10770": 12697444352.0, "10775": 12697444352.0, "10780": 12697444352.0, "10785": 12697444352.0, "10790": 12697444352.0, "10795": 12697444352.0, "10800": 12697444352.0, "10805": 12697444352.0, "10810": 12697444352.0, "10815": 12697444352.0, "10820": 12697444352.0, "10825": 12697444352.0, "10830": 12697444352.0, "10835": 12697444352.0, "10840": 12697444352.0, "10845": 12697444352.0, "10850": 12697444352.0, "10855": 12697444352.0, "10860": 12697444352.0, "10865": 12697444352.0, "10870": 12697444352.0, "10875": 12697444352.0, "10880": 12697444352.0, "10885": 12697444352.0, "10890": 12697444352.0, "10895": 12697444352.0, "10900": 12697444352.0, "10905": 12697444352.0, "10910": 12697444352.0, "10915": 12697444352.0, "10920": 12697444352.0, "10925": 12697444352.0, "10930": 12697444352.0, "10935": 12697444352.0, "10940": 12697444352.0, "10945": 12697444352.0, "10950": 12697444352.0, "10955": 12697444352.0, "10960": 12697444352.0, "10965": 12697444352.0, "10970": 12697444352.0, "10975": 12697444352.0, "10980": 12697444352.0, "10985": 12697444352.0, "10990": 12697444352.0, "10995": 12697444352.0, "11000": 12697444352.0, "11005": 12697444352.0, "11010": 12697444352.0, "11015": 12697444352.0, "11020": 12697444352.0, "11025": 12697444352.0, "11030": 12697444352.0, "11035": 12697444352.0, "11040": 12697444352.0, "11045": 12697444352.0, "11050": 12697444352.0, "11055": 12697444352.0, "11060": 12697444352.0, "11065": 12697444352.0, "11070": 12697444352.0, "11075": 12697444352.0, "11080": 12697444352.0, "11085": 12697444352.0, "11090": 12697444352.0, "11095": 12697444352.0, "11100": 12697444352.0, "11105": 12697444352.0, "11110": 12697444352.0, "11115": 12697444352.0, "11120": 12697444352.0, "11125": 12697444352.0, "11130": 12697444352.0, "11135": 12697444352.0, "11140": 12697444352.0, "11145": 12697444352.0, "11150": 12697444352.0, "11155": 12697444352.0, "11160": 12697444352.0, "11165": 12697444352.0, "11170": 12697444352.0, "11175": 12697444352.0, "11180": 12697444352.0, "11185": 12697444352.0, "11190": 12697444352.0, "11195": 12697444352.0, "11200": 12697444352.0, "11205": 12697444352.0, "11210": 12697444352.0, "11215": 12697444352.0, "11220": 12697444352.0, "11225": 12697444352.0, "11230": 12697444352.0, "11235": 12697444352.0, "11240": 12697444352.0, "11245": 12697444352.0, "11250": 12697444352.0, "11255": 12697444352.0, "11260": 12697444352.0, "11265": 12697444352.0, "11270": 12697444352.0, "11275": 12697444352.0, "11280": 12697444352.0, "11285": 12697444352.0, "11290": 12697444352.0, "11295": 12697444352.0, "11300": 12697444352.0, "11305": 12697444352.0, "11310": 12697444352.0, "11315": 12697444352.0, "11320": 12697444352.0, "11325": 12697444352.0, "11330": 12697444352.0, "11335": 12697444352.0, "11340": 12697444352.0, "11345": 12697444352.0, "11350": 12697444352.0, "11355": 12697444352.0, "11360": 12697444352.0, "11365": 12697444352.0, "11370": 12697444352.0, "11375": 12697444352.0, "11380": 12697444352.0, "11385": 12697444352.0, "11390": 12697444352.0, "11395": 12697444352.0, "11400": 12697444352.0, "11405": 12697444352.0, "11410": 12697444352.0, "11415": 12697444352.0, "11420": 12697444352.0, "11425": 12697444352.0, "11430": 12697444352.0, "11435": 12697444352.0, "11440": 12697444352.0, "11445": 12697444352.0, "11450": 12697444352.0, "11455": 12697444352.0, "11460": 12697444352.0, "11465": 12697444352.0, "11470": 12697444352.0, "11475": 12697444352.0, "11480": 12697444352.0, "11485": 12697444352.0, "11490": 12697444352.0, "11495": 12697444352.0, "11500": 12697444352.0, "11505": 12697444352.0, "11510": 12697444352.0, "11515": 12697444352.0, "11520": 12697444352.0, "11525": 12697444352.0, "11530": 12697444352.0, "11535": 12697444352.0, "11540": 12697444352.0, "11545": 12697444352.0, "11550": 12697444352.0, "11555": 12697444352.0, "11560": 12697444352.0, "11565": 12697444352.0, "11570": 12697444352.0, "11575": 12697444352.0, "11580": 12697444352.0, "11585": 12697444352.0, "11590": 12697444352.0, "11595": 12697444352.0, "11600": 12697444352.0, "11605": 12697444352.0, "11610": 12697444352.0, "11615": 12697444352.0, "11620": 12697444352.0, "11625": 12697444352.0, "11630": 12697444352.0, "11635": 12697444352.0, "11640": 12697444352.0, "11645": 12697444352.0, "11650": 12697444352.0, "11655": 12697444352.0, "11660": 12697444352.0, "11665": 12697444352.0, "11670": 12697444352.0, "11675": 12697444352.0, "11680": 12697444352.0, "11685": 12697444352.0, "11690": 12697444352.0, "11695": 12697444352.0, "11700": 12697444352.0, "11705": 12697444352.0, "11710": 12697444352.0, "11715": 12697444352.0, "11720": 12697444352.0, "11725": 12697444352.0, "11730": 12697444352.0, "11735": 12697444352.0, "11740": 12697444352.0, "11745": 12697444352.0, "11750": 12697444352.0, "11755": 12697444352.0, "11760": 12697444352.0, "11765": 12697444352.0, "11770": 12697444352.0, "11775": 12697444352.0, "11780": 12697444352.0, "11785": 12697444352.0, "11790": 12697444352.0, "11795": 12697444352.0, "11800": 12697444352.0, "11805": 12697444352.0, "11810": 12697444352.0, "11815": 12697444352.0, "11820": 12697444352.0, "11825": 12697444352.0, "11830": 12697444352.0, "11835": 12697444352.0, "11840": 12697444352.0, "11845": 12697444352.0, "11850": 12697444352.0, "11855": 12697444352.0, "11860": 12697444352.0, "11865": 12697444352.0, "11870": 12697444352.0, "11875": 12697444352.0, "11880": 12697444352.0, "11885": 12697444352.0, "11890": 12697444352.0, "11895": 12697444352.0, "11900": 12697444352.0, "11905": 12697444352.0, "11910": 12697444352.0, "11915": 12697444352.0, "11920": 12697444352.0, "11925": 12697444352.0, "11930": 12697444352.0, "11935": 12697444352.0, "11940": 12697444352.0, "11945": 12697444352.0, "11950": 12697444352.0, "11955": 12697444352.0, "11960": 12697444352.0, "11965": 12697444352.0, "11970": 12697444352.0, "11975": 12697444352.0, "11980": 12697444352.0, "11985": 12697444352.0, "11990": 12697444352.0, "11995": 12697444352.0, "12000": 12697444352.0, "12005": 12697444352.0, "12010": 12697444352.0, "12015": 12697444352.0, "12020": 12697444352.0, "12025": 12697444352.0, "12030": 12697444352.0, "12035": 12697444352.0, "12040": 12697444352.0, "12045": 12697444352.0, "12050": 12697444352.0, "12055": 12697444352.0, "12060": 12697444352.0, "12065": 12697444352.0, "12070": 12697444352.0, "12075": 12697444352.0, "12080": 12697444352.0, "12085": 12697444352.0, "12090": 12697444352.0, "12095": 12697444352.0, "12100": 12697444352.0, "12105": 12697444352.0, "12110": 12697444352.0, "12115": 12697444352.0, "12120": 12697444352.0, "12125": 12697444352.0, "12130": 12697444352.0, "12135": 12697444352.0, "12140": 12697444352.0, "12145": 12697444352.0, "12150": 12697444352.0, "12155": 12697444352.0, "12160": 12697444352.0, "12165": 12697444352.0, "12170": 12697444352.0, "12175": 12697444352.0, "12180": 12697444352.0, "12185": 12697444352.0, "12190": 12697444352.0, "12195": 12697444352.0, "12200": 12697444352.0, "12205": 12697444352.0, "12210": 12697444352.0, "12215": 12697444352.0, "12220": 12697444352.0, "12225": 12697444352.0, "12230": 12697444352.0, "12235": 12697444352.0, "12240": 12697444352.0, "12245": 12697444352.0, "12250": 12697444352.0, "12255": 12697444352.0, "12260": 12697444352.0, "12265": 12697444352.0, "12270": 12697444352.0, "12275": 12697444352.0, "12280": 12697444352.0, "12285": 12697444352.0, "12290": 12697444352.0, "12295": 12697444352.0, "12300": 12697444352.0, "12305": 12697444352.0, "12310": 12697444352.0, "12315": 12697444352.0, "12320": 12697444352.0, "12325": 12697444352.0, "12330": 12697444352.0, "12335": 12697444352.0, "12340": 12697444352.0, "12345": 12697444352.0, "12350": 12697444352.0, "12355": 12697444352.0, "12360": 12697444352.0, "12365": 12697444352.0, "12370": 12697444352.0, "12375": 12697444352.0, "12380": 12697444352.0, "12385": 12697444352.0, "12390": 12697444352.0, "12395": 12697444352.0, "12400": 12697444352.0, "12405": 12697444352.0, "12410": 12697444352.0, "12415": 12697444352.0, "12420": 12697444352.0, "12425": 12697444352.0, "12430": 12697444352.0, "12435": 12697444352.0, "12440": 12697444352.0, "12445": 12697444352.0, "12450": 12697444352.0, "12455": 12697444352.0, "12460": 12697444352.0, "12465": 12697444352.0, "12470": 12697444352.0, "12475": 12697444352.0, "12480": 12697444352.0, "12485": 12697444352.0, "12490": 12697444352.0, "12495": 12697444352.0, "12500": 12697444352.0, "12505": 12697444352.0, "12510": 12697444352.0, "12515": 12697444352.0, "12520": 12697444352.0, "12525": 12697444352.0, "12530": 12697444352.0, "12535": 12697444352.0, "12540": 12697444352.0, "12545": 12697444352.0, "12550": 12697444352.0, "12555": 12697444352.0, "12560": 12697444352.0, "12565": 12697444352.0, "12570": 12697444352.0, "12575": 12697444352.0, "12580": 12697444352.0, "12585": 12697444352.0, "12590": 12697444352.0, "12595": 12697444352.0, "12600": 12697444352.0, "12605": 12697444352.0, "12610": 12697444352.0, "12615": 12697444352.0, "12620": 12697444352.0, "12625": 12697444352.0, "12630": 12697444352.0, "12635": 12697444352.0, "12640": 12697444352.0, "12645": 12697444352.0, "12650": 12697444352.0, "12655": 12697444352.0, "12660": 12697444352.0, "12665": 12697444352.0, "12670": 12697444352.0, "12675": 12697444352.0, "12680": 12697444352.0, "12685": 12697444352.0, "12690": 12697444352.0, "12695": 12697444352.0, "12700": 12697444352.0, "12705": 12697444352.0, "12710": 12697444352.0, "12715": 12697444352.0, "12720": 12697444352.0, "12725": 12697444352.0, "12730": 12697444352.0, "12735": 12697444352.0, "12740": 12697444352.0, "12745": 12697444352.0, "12750": 12697444352.0, "12755": 12697444352.0, "12760": 12697444352.0, "12765": 12697444352.0, "12770": 12697444352.0, "12775": 12697444352.0, "12780": 12697444352.0, "12785": 12697444352.0, "12790": 12697444352.0, "12795": 12697444352.0, "12800": 12697444352.0, "12805": 12697444352.0, "12810": 12697444352.0, "12815": 12697444352.0, "12820": 12697444352.0, "12825": 12697444352.0, "12830": 12697444352.0, "12835": 12697444352.0, "12840": 12697444352.0, "12845": 12697444352.0, "12850": 12697444352.0, "12855": 12697444352.0, "12860": 12697444352.0, "12865": 12697444352.0, "12870": 12697444352.0, "12875": 12697444352.0, "12880": 12697444352.0, "12885": 12697444352.0, "12890": 12697444352.0, "12895": 12697444352.0, "12900": 12697444352.0, "12905": 12697444352.0, "12910": 12697444352.0, "12915": 12697444352.0, "12920": 12697444352.0, "12925": 12697444352.0, "12930": 12697444352.0, "12935": 12697444352.0, "12940": 12697444352.0, "12945": 12697444352.0, "12950": 12697444352.0, "12955": 12697444352.0, "12960": 12697444352.0, "12965": 12697444352.0, "12970": 12697444352.0, "12975": 12697444352.0, "12980": 12697444352.0, "12985": 12697444352.0, "12990": 12697444352.0, "12995": 12697444352.0, "13000": 12697444352.0, "13005": 12697444352.0, "13010": 12697444352.0, "13015": 12697444352.0, "13020": 12697444352.0, "13025": 12697444352.0, "13030": 12697444352.0, "13035": 12697444352.0, "13040": 12697444352.0, "13045": 12697444352.0, "13050": 12697444352.0, "13055": 12697444352.0, "13060": 12697444352.0, "13065": 12697444352.0, "13070": 12697444352.0, "13075": 12697444352.0, "13080": 12697444352.0, "13085": 12697444352.0, "13090": 12697444352.0, "13095": 12697444352.0, "13100": 12697444352.0, "13105": 12697444352.0, "13110": 12697444352.0, "13115": 12697444352.0, "13120": 12697444352.0, "13125": 12697444352.0, "13130": 12697444352.0, "13135": 12697444352.0, "13140": 12697444352.0, "13145": 12697444352.0, "13150": 12697444352.0, "13155": 12697444352.0, "13160": 12697444352.0, "13165": 12697444352.0, "13170": 12697444352.0, "13175": 12697444352.0, "13180": 12697444352.0, "13185": 12697444352.0, "13190": 12697444352.0, "13195": 12697444352.0, "13200": 12697444352.0, "13205": 12697444352.0, "13210": 12697444352.0, "13215": 12697444352.0, "13220": 12697444352.0, "13225": 12697444352.0, "13230": 12697444352.0, "13235": 12697444352.0, "13240": 12697444352.0, "13245": 12697444352.0, "13250": 12697444352.0, "13255": 12697444352.0, "13260": 12697444352.0, "13265": 12697444352.0, "13270": 12697444352.0, "13275": 12697444352.0, "13280": 12697444352.0, "13285": 12697444352.0, "13290": 12697444352.0, "13295": 12697444352.0, "13300": 12697444352.0, "13305": 12697444352.0, "13310": 12697444352.0, "13315": 12697444352.0, "13320": 12697444352.0, "13325": 12697444352.0, "13330": 12697444352.0, "13335": 12697444352.0, "13340": 12697444352.0, "13345": 12697444352.0, "13350": 12697444352.0, "13355": 12697444352.0, "13360": 12697444352.0, "13365": 12697444352.0, "13370": 12697444352.0, "13375": 12697444352.0, "13380": 12697444352.0, "13385": 12697444352.0, "13390": 12697444352.0, "13395": 12697444352.0, "13400": 12697444352.0, "13405": 12697444352.0, "13410": 12697444352.0, "13415": 12697444352.0, "13420": 12697444352.0, "13425": 12697444352.0, "13430": 12697444352.0, "13435": 12697444352.0, "13440": 12697444352.0, "13445": 12697444352.0, "13450": 12697444352.0, "13455": 12697444352.0, "13460": 12697444352.0, "13465": 12697444352.0, "13470": 12697444352.0, "13475": 12697444352.0, "13480": 12697444352.0, "13485": 12697444352.0, "13490": 12697444352.0, "13495": 12697444352.0, "13500": 12697444352.0, "13505": 12697444352.0, "13510": 12697444352.0, "13515": 12697444352.0, "13520": 12697444352.0, "13525": 12697444352.0, "13530": 12697444352.0, "13535": 12697444352.0, "13540": 12697444352.0, "13545": 12697444352.0, "13550": 12697444352.0, "13555": 12697444352.0, "13560": 12697444352.0, "13565": 12697444352.0, "13570": 12697444352.0, "13575": 12697444352.0, "13580": 12697444352.0, "13585": 12697444352.0, "13590": 12697444352.0, "13595": 12697444352.0, "13600": 12697444352.0, "13605": 12697444352.0, "13610": 12697444352.0, "13615": 12697444352.0, "13620": 12697444352.0, "13625": 12697444352.0, "13630": 12697444352.0, "13635": 12697444352.0, "13640": 12697444352.0, "13645": 12697444352.0, "13650": 12697444352.0, "13655": 12697444352.0, "13660": 12697444352.0, "13665": 12697444352.0, "13670": 12697444352.0, "13675": 12697444352.0, "13680": 12697444352.0, "13685": 12697444352.0, "13690": 12697444352.0, "13695": 12697444352.0, "13700": 12697444352.0, "13705": 12697444352.0, "13710": 12697444352.0, "13715": 12697444352.0, "13720": 12697444352.0, "13725": 12697444352.0, "13730": 12697444352.0, "13735": 12697444352.0, "13740": 12697444352.0, "13745": 12697444352.0, "13750": 12697444352.0, "13755": 12697444352.0, "13760": 12697444352.0, "13765": 12697444352.0, "13770": 12697444352.0, "13775": 12697444352.0, "13780": 12697444352.0, "13785": 12697444352.0, "13790": 12697444352.0, "13795": 12697444352.0, "13800": 12697444352.0, "13805": 12697444352.0, "13810": 12697444352.0, "13815": 12697444352.0, "13820": 12697444352.0, "13825": 12697444352.0, "13830": 12697444352.0, "13835": 12697444352.0, "13840": 12697444352.0, "13845": 12697444352.0, "13850": 12697444352.0, "13855": 12697444352.0, "13860": 12697444352.0, "13865": 12697444352.0, "13870": 12697444352.0, "13875": 12697444352.0, "13880": 12697444352.0, "13885": 12697444352.0, "13890": 12697444352.0, "13895": 12697444352.0, "13900": 12697444352.0, "13905": 12697444352.0, "13910": 12697444352.0, "13915": 12697444352.0, "13920": 12697444352.0, "13925": 12697444352.0, "13930": 12697444352.0, "13935": 12697444352.0, "13940": 12697444352.0, "13945": 12697444352.0, "13950": 12697444352.0, "13955": 12697444352.0, "13960": 12697444352.0, "13965": 12697444352.0, "13970": 12697444352.0, "13975": 12697444352.0, "13980": 12697444352.0, "13985": 12697444352.0, "13990": 12697444352.0, "13995": 12697444352.0, "14000": 12697444352.0, "14005": 12697444352.0, "14010": 12697444352.0, "14015": 12697444352.0, "14020": 12697444352.0, "14025": 12697444352.0, "14030": 12697444352.0, "14035": 12697444352.0, "14040": 12697444352.0, "14045": 12697444352.0, "14050": 12697444352.0, "14055": 12697444352.0, "14060": 12697444352.0, "14065": 12697444352.0, "14070": 12697444352.0, "14075": 12697444352.0, "14080": 12697444352.0, "14085": 12697444352.0, "14090": 12697444352.0, "14095": 12697444352.0, "14100": 12697444352.0, "14105": 12697444352.0, "14110": 12697444352.0, "14115": 12697444352.0, "14120": 12697444352.0, "14125": 12697444352.0, "14130": 12697444352.0, "14135": 12697444352.0, "14140": 12697444352.0, "14145": 12697444352.0, "14150": 12697444352.0, "14155": 12697444352.0, "14160": 12697444352.0, "14165": 12697444352.0, "14170": 12697444352.0, "14175": 12697444352.0, "14180": 12697444352.0, "14185": 12697444352.0, "14190": 12697444352.0, "14195": 12697444352.0, "14200": 12697444352.0, "14205": 12697444352.0, "14210": 12697444352.0, "14215": 12697444352.0, "14220": 12697444352.0, "14225": 12697444352.0, "14230": 12697444352.0, "14235": 12697444352.0, "14240": 12697444352.0, "14245": 12697444352.0, "14250": 12697444352.0, "14255": 12697444352.0, "14260": 12697444352.0, "14265": 12697444352.0, "14270": 12697444352.0, "14275": 12697444352.0, "14280": 12697444352.0, "14285": 12697444352.0, "14290": 12697444352.0, "14295": 12697444352.0, "14300": 12697444352.0, "14305": 12697444352.0, "14310": 12697444352.0, "14315": 12697444352.0, "14320": 12697444352.0, "14325": 12697444352.0, "14330": 12697444352.0, "14335": 12697444352.0, "14340": 12697444352.0, "14345": 12697444352.0, "14350": 12697444352.0, "14355": 12697444352.0, "14360": 12697444352.0, "14365": 12697444352.0, "14370": 12697444352.0, "14375": 12697444352.0, "14380": 12697444352.0, "14385": 12697444352.0, "14390": 12697444352.0, "14395": 12697444352.0, "14400": 12697444352.0, "14405": 12697444352.0, "14410": 12697444352.0, "14415": 12697444352.0, "14420": 12697444352.0, "14425": 12697444352.0, "14430": 12697444352.0, "14435": 12697444352.0, "14440": 12697444352.0, "14445": 12697444352.0, "14450": 12697444352.0, "14455": 12697444352.0, "14460": 12697444352.0, "14465": 12697444352.0, "14470": 12697444352.0, "14475": 12697444352.0, "14480": 12697444352.0, "14485": 12697444352.0, "14490": 12697444352.0, "14495": 12697444352.0, "14500": 12697444352.0, "14505": 12697444352.0, "14510": 12697444352.0, "14515": 12697444352.0, "14520": 12697444352.0, "14525": 12697444352.0, "14530": 12697444352.0, "14535": 12697444352.0, "14540": 12697444352.0, "14545": 12697444352.0, "14550": 12697444352.0, "14555": 12697444352.0, "14560": 12697444352.0, "14565": 12697444352.0, "14570": 12697444352.0, "14575": 12697444352.0, "14580": 12697444352.0, "14585": 12697444352.0, "14590": 12697444352.0, "14595": 12697444352.0, "14600": 12697444352.0, "14605": 12697444352.0, "14610": 12697444352.0, "14615": 12697444352.0, "14620": 12697444352.0, "14625": 12697444352.0, "14630": 12697444352.0, "14635": 12697444352.0, "14640": 12697444352.0, "14645": 12697444352.0, "14650": 12697444352.0, "14655": 12697444352.0, "14660": 12697444352.0, "14665": 12697444352.0, "14670": 12697444352.0, "14675": 12697444352.0, "14680": 12697444352.0, "14685": 12697444352.0, "14690": 12697444352.0, "14695": 12697444352.0, "14700": 12697444352.0, "14705": 12697444352.0, "14710": 12697444352.0, "14715": 12697444352.0, "14720": 12697444352.0, "14725": 12697444352.0, "14730": 12697444352.0, "14735": 12697444352.0, "14740": 12697444352.0, "14745": 12697444352.0, "14750": 12697444352.0, "14755": 12697444352.0, "14760": 12697444352.0, "14765": 12697444352.0, "14770": 12697444352.0, "14775": 12697444352.0, "14780": 12697444352.0, "14785": 12697444352.0, "14790": 12697444352.0, "14795": 12697444352.0, "14800": 12697444352.0, "14805": 12697444352.0, "14810": 12697444352.0, "14815": 12697444352.0, "14820": 12697444352.0, "14825": 12697444352.0, "14830": 12697444352.0, "14835": 12697444352.0, "14840": 12697444352.0, "14845": 12697444352.0, "14850": 12697444352.0, "14855": 12697444352.0, "14860": 12697444352.0, "14865": 12697444352.0, "14870": 12697444352.0, "14875": 12697444352.0, "14880": 12697444352.0, "14885": 12697444352.0, "14890": 12697444352.0, "14895": 12697444352.0, "14900": 12697444352.0, "14905": 12697444352.0, "14910": 12697444352.0, "14915": 12697444352.0, "14920": 12697444352.0, "14925": 12697444352.0, "14930": 12697444352.0, "14935": 12697444352.0, "14940": 12697444352.0, "14945": 12697444352.0, "14950": 12697444352.0, "14955": 12697444352.0, "14960": 12697444352.0, "14965": 12697444352.0, "14970": 12697444352.0, "14975": 12697444352.0, "14980": 12697444352.0, "14985": 12697444352.0, "14990": 12697444352.0, "14995": 12697444352.0, "15000": 12697444352.0, "15005": 12697444352.0, "15010": 12697444352.0, "15015": 12697444352.0, "15020": 12697444352.0, "15025": 12697444352.0, "15030": 12697444352.0, "15035": 12697444352.0, "15040": 12697444352.0, "15045": 12697444352.0, "15050": 12697444352.0, "15055": 12697444352.0, "15060": 12697444352.0, "15065": 12697444352.0, "15070": 12697444352.0, "15075": 12697444352.0, "15080": 12697444352.0, "15085": 12697444352.0, "15090": 12697444352.0, "15095": 12697444352.0, "15100": 12697444352.0, "15105": 12697444352.0, "15110": 12697444352.0, "15115": 12697444352.0, "15120": 12697444352.0, "15125": 12697444352.0, "15130": 12697444352.0, "15135": 12697444352.0, "15140": 12697444352.0, "15145": 12697444352.0, "15150": 12697444352.0, "15155": 12697444352.0, "15160": 12697444352.0, "15165": 12697444352.0, "15170": 12697444352.0, "15175": 12697444352.0, "15180": 12697444352.0, "15185": 12697444352.0, "15190": 12697444352.0, "15195": 12697444352.0, "15200": 12697444352.0, "15205": 12697444352.0, "15210": 12697444352.0, "15215": 12697444352.0, "15220": 12697444352.0, "15225": 12697444352.0, "15230": 12697444352.0, "15235": 12697444352.0, "15240": 12697444352.0, "15245": 12697444352.0, "15250": 12697444352.0, "15255": 12697444352.0, "15260": 12697444352.0, "15265": 12697444352.0, "15270": 12697444352.0, "15275": 12697444352.0, "15280": 12697444352.0, "15285": 12697444352.0, "15290": 12697444352.0, "15295": 12697444352.0, "15300": 12697444352.0, "15305": 12697444352.0, "15310": 12697444352.0, "15315": 12697444352.0, "15320": 12697444352.0, "15325": 12697444352.0, "15330": 12697444352.0, "15335": 12697444352.0, "15340": 12697444352.0, "15345": 12697444352.0, "15350": 12697444352.0, "15355": 12697444352.0, "15360": 12697444352.0, "15365": 12697444352.0, "15370": 12697444352.0, "15375": 12697444352.0, "15380": 12697444352.0, "15385": 12697444352.0, "15390": 12697444352.0, "15395": 12697444352.0, "15400": 12697444352.0, "15405": 12697444352.0, "15410": 12697444352.0, "15415": 12697444352.0, "15420": 12697444352.0, "15425": 12697444352.0, "15430": 12697444352.0, "15435": 12697444352.0, "15440": 12697444352.0, "15445": 12697444352.0, "15450": 12697444352.0, "15455": 12697444352.0, "15460": 12697444352.0, "15465": 12697444352.0, "15470": 12697444352.0, "15475": 12697444352.0, "15480": 12697444352.0, "15485": 12697444352.0, "15490": 12697444352.0, "15495": 12697444352.0, "15500": 12697444352.0, "15505": 12697444352.0, "15510": 12697444352.0, "15515": 12697444352.0, "15520": 12697444352.0, "15525": 12697444352.0, "15530": 12697444352.0, "15535": 12697444352.0, "15540": 12697444352.0, "15545": 12697444352.0, "15550": 12697444352.0, "15555": 12697444352.0, "15560": 12697444352.0, "15565": 12697444352.0, "15570": 12697444352.0, "15575": 12697444352.0, "15580": 12697444352.0, "15585": 12697444352.0, "15590": 12697444352.0, "15595": 12697444352.0, "15600": 12697444352.0, "15605": 12697444352.0, "15610": 12697444352.0, "15615": 12697444352.0, "15620": 12697444352.0, "15625": 12697444352.0, "15630": 12697444352.0, "15635": 12697444352.0, "15640": 12697444352.0, "15645": 12697444352.0, "15650": 12697444352.0, "15655": 12697444352.0, "15660": 12697444352.0, "15665": 12697444352.0, "15670": 12697444352.0, "15675": 12697444352.0, "15680": 12697444352.0, "15685": 12697444352.0, "15690": 12697444352.0, "15695": 12697444352.0, "15700": 12697444352.0, "15705": 12697444352.0, "15710": 12697444352.0, "15715": 12697444352.0, "15720": 12697444352.0, "15725": 12697444352.0, "15730": 12697444352.0, "15735": 12697444352.0, "15740": 12697444352.0, "15745": 12697444352.0, "15750": 12697444352.0, "15755": 12697444352.0, "15760": 12697444352.0, "15765": 12697444352.0, "15770": 12697444352.0, "15775": 12697444352.0, "15780": 12697444352.0, "15785": 12697444352.0, "15790": 12697444352.0, "15795": 12697444352.0, "15800": 12697444352.0, "15805": 12697444352.0, "15810": 12697444352.0, "15815": 12697444352.0, "15820": 12697444352.0, "15825": 12697444352.0, "15830": 12697444352.0, "15835": 12697444352.0, "15840": 12697444352.0, "15845": 12697444352.0, "15850": 12697444352.0, "15855": 12697444352.0, "15860": 12697444352.0, "15865": 12697444352.0, "15870": 12697444352.0, "15875": 12697444352.0, "15880": 12697444352.0, "15885": 12697444352.0, "15890": 12697444352.0, "15895": 12697444352.0, "15900": 12697444352.0, "15905": 12697444352.0, "15910": 12697444352.0, "15915": 12697444352.0, "15920": 12697444352.0, "15925": 12697444352.0, "15930": 12697444352.0, "15935": 12697444352.0, "15940": 12697444352.0, "15945": 12697444352.0, "15950": 12697444352.0, "15955": 12697444352.0, "15960": 12697444352.0, "15965": 12697444352.0, "15970": 12697444352.0, "15975": 12697444352.0, "15980": 12697444352.0, "15985": 12697444352.0, "15990": 12697444352.0, "15995": 12697444352.0, "16000": 12697444352.0, "16005": 12697444352.0, "16010": 12697444352.0, "16015": 12697444352.0, "16020": 12697444352.0, "16025": 12697444352.0, "16030": 12697444352.0, "16035": 12697444352.0, "16040": 12697444352.0, "16045": 12697444352.0, "16050": 12697444352.0, "16055": 12697444352.0, "16060": 12697444352.0, "16065": 12697444352.0, "16070": 12697444352.0, "16075": 12697444352.0, "16080": 12697444352.0, "16085": 12697444352.0, "16090": 12697444352.0, "16095": 12697444352.0, "16100": 12697444352.0, "16105": 12697444352.0, "16110": 12697444352.0, "16115": 12697444352.0, "16120": 12697444352.0, "16125": 12697444352.0, "16130": 12697444352.0, "16135": 12697444352.0, "16140": 12697444352.0, "16145": 12697444352.0, "16150": 12697444352.0, "16155": 12697444352.0, "16160": 12697444352.0, "16165": 12697444352.0, "16170": 12697444352.0, "16175": 12697444352.0, "16180": 12697444352.0, "16185": 12697444352.0, "16190": 12697444352.0, "16195": 12697444352.0, "16200": 12697444352.0, "16205": 12697444352.0, "16210": 12697444352.0, "16215": 12697444352.0, "16220": 12697444352.0, "16225": 12697444352.0, "16230": 12697444352.0, "16235": 12697444352.0, "16240": 12697444352.0, "16245": 12697444352.0, "16250": 12697444352.0, "16255": 12697444352.0, "16260": 12697444352.0, "16265": 12697444352.0, "16270": 12697444352.0, "16275": 12697444352.0, "16280": 12697444352.0, "16285": 12697444352.0, "16290": 12697444352.0, "16295": 12697444352.0, "16300": 12697444352.0, "16305": 12697444352.0, "16310": 12697444352.0, "16315": 12697444352.0, "16320": 12697444352.0, "16325": 12697444352.0, "16330": 12697444352.0, "16335": 12697444352.0, "16340": 12697444352.0, "16345": 12697444352.0, "16350": 12697444352.0, "16355": 12697444352.0, "16360": 12697444352.0, "16365": 12697444352.0, "16370": 12697444352.0, "16375": 12697444352.0, "16380": 12697444352.0, "16385": 12697444352.0, "16390": 12697444352.0, "16395": 12697444352.0, "16400": 12697444352.0, "16405": 12697444352.0, "16410": 12697444352.0, "16415": 12697444352.0, "16420": 12697444352.0, "16425": 12697444352.0, "16430": 12697444352.0, "16435": 12697444352.0, "16440": 12697444352.0, "16445": 12697444352.0, "16450": 12697444352.0, "16455": 12697444352.0, "16460": 12697444352.0, "16465": 12697444352.0, "16470": 12697444352.0, "16475": 12697444352.0, "16480": 12697444352.0, "16485": 12697444352.0, "16490": 12697444352.0, "16495": 12697444352.0, "16500": 12697444352.0, "16505": 12697444352.0, "16510": 12697444352.0, "16515": 12697444352.0, "16520": 12697444352.0, "16525": 12697444352.0, "16530": 12697444352.0, "16535": 12697444352.0, "16540": 12697444352.0, "16545": 12697444352.0, "16550": 12697444352.0, "16555": 12697444352.0, "16560": 12697444352.0, "16565": 12697444352.0, "16570": 12697444352.0, "16575": 12697444352.0, "16580": 12697444352.0, "16585": 12697444352.0, "16590": 12697444352.0, "16595": 12697444352.0, "16600": 12697444352.0, "16605": 12697444352.0, "16610": 12697444352.0, "16615": 12697444352.0, "16620": 12697444352.0, "16625": 12697444352.0, "16630": 12697444352.0, "16635": 12697444352.0, "16640": 12697444352.0, "16645": 12697444352.0, "16650": 12697444352.0, "16655": 12697444352.0, "16660": 12697444352.0, "16665": 12697444352.0, "16670": 12697444352.0, "16675": 12697444352.0, "16680": 12697444352.0, "16685": 12697444352.0, "16690": 12697444352.0, "16695": 12697444352.0, "16700": 12697444352.0, "16705": 12697444352.0, "16710": 12697444352.0, "16715": 12697444352.0, "16720": 12697444352.0, "16725": 12697444352.0, "16730": 12697444352.0, "16735": 12697444352.0, "16740": 12697444352.0, "16745": 12697444352.0, "16750": 12697444352.0, "16755": 12697444352.0, "16760": 12697444352.0, "16765": 12697444352.0, "16770": 12697444352.0, "16775": 12697444352.0, "16780": 12697444352.0, "16785": 12697444352.0, "16790": 12697444352.0, "16795": 12697444352.0, "16800": 12697444352.0, "16805": 12697444352.0, "16810": 12697444352.0, "16815": 12697444352.0, "16820": 12697444352.0, "16825": 12697444352.0, "16830": 12697444352.0, "16835": 12697444352.0, "16840": 12697444352.0, "16845": 12697444352.0, "16850": 12697444352.0, "16855": 12697444352.0, "16860": 12697444352.0, "16865": 12697444352.0, "16870": 12697444352.0, "16875": 12697444352.0, "16880": 12697444352.0, "16885": 12697444352.0, "16890": 12697444352.0, "16895": 12697444352.0, "16900": 12697444352.0, "16905": 12697444352.0, "16910": 12697444352.0, "16915": 12697444352.0, "16920": 12697444352.0, "16925": 12697444352.0, "16930": 12697444352.0, "16935": 12697444352.0, "16940": 12697444352.0, "16945": 12697444352.0, "16950": 12697444352.0, "16955": 12697444352.0, "16960": 12697444352.0, "16965": 12697444352.0, "16970": 12697444352.0, "16975": 12697444352.0, "16980": 12697444352.0, "16985": 12697444352.0, "16990": 12697444352.0, "16995": 12697444352.0, "17000": 12697444352.0, "17005": 12697444352.0, "17010": 12697444352.0, "17015": 12697444352.0, "17020": 12697444352.0, "17025": 12697444352.0, "17030": 12697444352.0, "17035": 12697444352.0, "17040": 12697444352.0, "17045": 12697444352.0, "17050": 12697444352.0, "17055": 12697444352.0, "17060": 12697444352.0, "17065": 12697444352.0, "17070": 12697444352.0, "17075": 12697444352.0, "17080": 12697444352.0, "17085": 12697444352.0, "17090": 12697444352.0, "17095": 12697444352.0, "17100": 12697444352.0, "17105": 12697444352.0, "17110": 12697444352.0, "17115": 12697444352.0, "17120": 12697444352.0, "17125": 12697444352.0, "17130": 12697444352.0, "17135": 12697444352.0, "17140": 12697444352.0, "17145": 12697444352.0, "17150": 12697444352.0, "17155": 12697444352.0, "17160": 12697444352.0, "17165": 12697444352.0, "17170": 12697444352.0, "17175": 12697444352.0, "17180": 12697444352.0, "17185": 12697444352.0, "17190": 12697444352.0, "17195": 12697444352.0, "17200": 12697444352.0, "17205": 12697444352.0, "17210": 12697444352.0, "17215": 12697444352.0, "17220": 12697444352.0, "17225": 12697444352.0, "17230": 12697444352.0, "17235": 12697444352.0, "17240": 12697444352.0, "17245": 12697444352.0, "17250": 12697444352.0, "17255": 12697444352.0, "17260": 12697444352.0, "17265": 12697444352.0, "17270": 12697444352.0, "17275": 12697444352.0, "17280": 12697444352.0, "17285": 12697444352.0, "17290": 12697444352.0, "17295": 12697444352.0, "17300": 12697444352.0, "17305": 12697444352.0, "17310": 12697444352.0, "17315": 12697444352.0, "17320": 12697444352.0, "17325": 12697444352.0, "17330": 12697444352.0, "17335": 12697444352.0, "17340": 12697444352.0, "17345": 12697444352.0, "17350": 12697444352.0, "17355": 12697444352.0, "17360": 12697444352.0, "17365": 12697444352.0, "17370": 12697444352.0, "17375": 12697444352.0, "17380": 12697444352.0, "17385": 12697444352.0, "17390": 12697444352.0, "17395": 12697444352.0, "17400": 12697444352.0, "17405": 12697444352.0, "17410": 12697444352.0, "17415": 12697444352.0, "17420": 12697444352.0, "17425": 12697444352.0, "17430": 12697444352.0, "17435": 12697444352.0, "17440": 12697444352.0, "17445": 12697444352.0, "17450": 12697444352.0, "17455": 12697444352.0, "17460": 12697444352.0, "17465": 12697444352.0, "17470": 12697444352.0, "17475": 12697444352.0, "17480": 12697444352.0, "17485": 12697444352.0, "17490": 12697444352.0, "17495": 12697444352.0, "17500": 12697444352.0, "17505": 12697444352.0, "17510": 12697444352.0, "17515": 12697444352.0, "17520": 12697444352.0, "17525": 12697444352.0, "17530": 12697444352.0, "17535": 12697444352.0, "17540": 12697444352.0, "17545": 12697444352.0, "17550": 12697444352.0, "17555": 12697444352.0, "17560": 12697444352.0, "17565": 12697444352.0, "17570": 12697444352.0, "17575": 12697444352.0, "17580": 12697444352.0, "17585": 12697444352.0, "17590": 12697444352.0, "17595": 12697444352.0, "17600": 12697444352.0, "17605": 12697444352.0, "17610": 12697444352.0, "17615": 12697444352.0, "17620": 12697444352.0, "17625": 12697444352.0, "17630": 12697444352.0, "17635": 12697444352.0, "17640": 12697444352.0, "17645": 12697444352.0, "17650": 12697444352.0, "17655": 12697444352.0, "17660": 12697444352.0, "17665": 12697444352.0, "17670": 12697444352.0, "17675": 12697444352.0, "17680": 12697444352.0, "17685": 12697444352.0, "17690": 12697444352.0, "17695": 12697444352.0, "17700": 12697444352.0, "17705": 12697444352.0, "17710": 12697444352.0, "17715": 12697444352.0, "17720": 12697444352.0, "17725": 12697444352.0, "17730": 12697444352.0, "17735": 12697444352.0, "17740": 12697444352.0, "17745": 12697444352.0, "17750": 12697444352.0, "17755": 12697444352.0, "17760": 12697444352.0, "17765": 12697444352.0, "17770": 12697444352.0, "17775": 12697444352.0, "17780": 12697444352.0, "17785": 12697444352.0, "17790": 12697444352.0, "17795": 12697444352.0, "17800": 12697444352.0, "17805": 12697444352.0, "17810": 12697444352.0, "17815": 12697444352.0, "17820": 12697444352.0, "17825": 12697444352.0, "17830": 12697444352.0, "17835": 12697444352.0, "17840": 12697444352.0, "17845": 12697444352.0, "17850": 12697444352.0, "17855": 12697444352.0, "17860": 12697444352.0, "17865": 12697444352.0, "17870": 12697444352.0, "17875": 12697444352.0, "17880": 12697444352.0, "17885": 12697444352.0, "17890": 12697444352.0, "17895": 12697444352.0, "17900": 12697444352.0, "17905": 12697444352.0, "17910": 12697444352.0, "17915": 12697444352.0, "17920": 12697444352.0, "17925": 12697444352.0, "17930": 12697444352.0, "17935": 12697444352.0, "17940": 12697444352.0, "17945": 12697444352.0, "17950": 12697444352.0, "17955": 12697444352.0, "17960": 12697444352.0, "17965": 12697444352.0, "17970": 12697444352.0, "17975": 12697444352.0, "17980": 12697444352.0, "17985": 12697444352.0, "17990": 12697444352.0, "17995": 12697444352.0, "18000": 12697444352.0, "18005": 12697444352.0, "18010": 12697444352.0, "18015": 12697444352.0, "18020": 12697444352.0, "18025": 12697444352.0, "18030": 12697444352.0, "18035": 12697444352.0, "18040": 12697444352.0, "18045": 12697444352.0, "18050": 12697444352.0, "18055": 12697444352.0, "18060": 12697444352.0, "18065": 12697444352.0, "18070": 12697444352.0, "18075": 12697444352.0, "18080": 12697444352.0, "18085": 12697444352.0, "18090": 12697444352.0, "18095": 12697444352.0, "18100": 12697444352.0, "18105": 12697444352.0, "18110": 12697444352.0, "18115": 12697444352.0, "18120": 12697444352.0, "18125": 12697444352.0, "18130": 12697444352.0, "18135": 12697444352.0, "18140": 12697444352.0, "18145": 12697444352.0, "18150": 12697444352.0, "18155": 12697444352.0, "18160": 12697444352.0, "18165": 12697444352.0, "18170": 12697444352.0, "18175": 12697444352.0, "18180": 12697444352.0, "18185": 12697444352.0, "18190": 12697444352.0, "18195": 12697444352.0, "18200": 12697444352.0, "18205": 12697444352.0, "18210": 12697444352.0, "18215": 12697444352.0, "18220": 12697444352.0, "18225": 12697444352.0, "18230": 12697444352.0, "18235": 12697444352.0, "18240": 12697444352.0, "18245": 12697444352.0, "18250": 12697444352.0, "18255": 12697444352.0, "18260": 12697444352.0, "18265": 12697444352.0, "18270": 12697444352.0, "18275": 12697444352.0, "18280": 12697444352.0, "18285": 12697444352.0, "18290": 12697444352.0, "18295": 12697444352.0, "18300": 12697444352.0, "18305": 12697444352.0, "18310": 12697444352.0, "18315": 12697444352.0, "18320": 12697444352.0, "18325": 12697444352.0, "18330": 12697444352.0, "18335": 12697444352.0, "18340": 12697444352.0, "18345": 12697444352.0, "18350": 12697444352.0, "18355": 12697444352.0, "18360": 12697444352.0, "18365": 12697444352.0, "18370": 12697444352.0, "18375": 12697444352.0, "18380": 12697444352.0, "18385": 12697444352.0, "18390": 12697444352.0, "18395": 12697444352.0, "18400": 12697444352.0, "18405": 12697444352.0, "18410": 12697444352.0, "18415": 12697444352.0, "18420": 12697444352.0, "18425": 12697444352.0, "18430": 12697444352.0, "18435": 12697444352.0, "18440": 12697444352.0, "18445": 12697444352.0, "18450": 12697444352.0, "18455": 12697444352.0, "18460": 12697444352.0, "18465": 12697444352.0, "18470": 12697444352.0, "18475": 12697444352.0, "18480": 12697444352.0, "18485": 12697444352.0, "18490": 12697444352.0, "18495": 12697444352.0, "18500": 12697444352.0, "18505": 12697444352.0, "18510": 12697444352.0, "18515": 12697444352.0, "18520": 12697444352.0, "18525": 12697444352.0, "18530": 12697444352.0, "18535": 12697444352.0, "18540": 12697444352.0, "18545": 12697444352.0, "18550": 12697444352.0, "18555": 12697444352.0, "18560": 12697444352.0, "18565": 12697444352.0, "18570": 12697444352.0, "18575": 12697444352.0, "18580": 12697444352.0, "18585": 12697444352.0, "18590": 12697444352.0, "18595": 12697444352.0, "18600": 12697444352.0, "18605": 12697444352.0, "18610": 12697444352.0, "18615": 12697444352.0, "18620": 12697444352.0, "18625": 12697444352.0, "18630": 12697444352.0, "18635": 12697444352.0, "18640": 12697444352.0, "18645": 12697444352.0, "18650": 12697444352.0, "18655": 12697444352.0, "18660": 12697444352.0, "18665": 12697444352.0, "18670": 12697444352.0, "18675": 12697444352.0, "18680": 12697444352.0, "18685": 12697444352.0, "18690": 12697444352.0, "18695": 12697444352.0, "18700": 12697444352.0, "18705": 12697444352.0, "18710": 12697444352.0, "18715": 12697444352.0, "18720": 12697444352.0, "18725": 12697444352.0, "18730": 12697444352.0, "18735": 12697444352.0, "18740": 12697444352.0, "18745": 12697444352.0, "18750": 12697444352.0, "18755": 12697444352.0, "18760": 12697444352.0, "18765": 12697444352.0, "18770": 12697444352.0, "18775": 12697444352.0, "18780": 12697444352.0, "18785": 12697444352.0, "18790": 12697444352.0, "18795": 12697444352.0, "18800": 12697444352.0, "18805": 12697444352.0, "18810": 12697444352.0, "18815": 12697444352.0, "18820": 12697444352.0, "18825": 12697444352.0, "18830": 12697444352.0, "18835": 12697444352.0, "18840": 12697444352.0, "18845": 12697444352.0, "18850": 12697444352.0, "18855": 12697444352.0, "18860": 12697444352.0, "18865": 12697444352.0, "18870": 12697444352.0, "18875": 12697444352.0, "18880": 12697444352.0, "18885": 12697444352.0, "18890": 12697444352.0, "18895": 12697444352.0, "18900": 12697444352.0, "18905": 12697444352.0, "18910": 12697444352.0, "18915": 12697444352.0, "18920": 12697444352.0, "18925": 12697444352.0, "18930": 12697444352.0, "18935": 12697444352.0, "18940": 12697444352.0, "18945": 12697444352.0, "18950": 12697444352.0, "18955": 12697444352.0, "18960": 12697444352.0, "18965": 12697444352.0, "18970": 12697444352.0, "18975": 12697444352.0, "18980": 12697444352.0, "18985": 12697444352.0, "18990": 12697444352.0, "18995": 12697444352.0, "19000": 12697444352.0, "19005": 12697444352.0, "19010": 12697444352.0, "19015": 12697444352.0, "19020": 12697444352.0, "19025": 12697444352.0, "19030": 12697444352.0, "19035": 12697444352.0, "19040": 12697444352.0, "19045": 12697444352.0, "19050": 12697444352.0, "19055": 12697444352.0, "19060": 12697444352.0, "19065": 12697444352.0, "19070": 12697444352.0, "19075": 12697444352.0, "19080": 12697444352.0, "19085": 12697444352.0, "19090": 12697444352.0, "19095": 12697444352.0, "19100": 12697444352.0, "19105": 12697444352.0, "19110": 12697444352.0, "19115": 12697444352.0, "19120": 12697444352.0, "19125": 12697444352.0, "19130": 12697444352.0, "19135": 12697444352.0, "19140": 12697444352.0, "19145": 12697444352.0, "19150": 12697444352.0, "19155": 12697444352.0, "19160": 12697444352.0, "19165": 12697444352.0, "19170": 12697444352.0, "19175": 12697444352.0, "19180": 12697444352.0, "19185": 12697444352.0, "19190": 12697444352.0, "19195": 12697444352.0, "19200": 12697444352.0, "19205": 12697444352.0, "19210": 12697444352.0, "19215": 12697444352.0, "19220": 12697444352.0, "19225": 12697444352.0, "19230": 12697444352.0, "19235": 12697444352.0, "19240": 12697444352.0, "19245": 12697444352.0, "19250": 12697444352.0, "19255": 12697444352.0, "19260": 12697444352.0, "19265": 12697444352.0, "19270": 12697444352.0, "19275": 12697444352.0, "19280": 12697444352.0, "19285": 12697444352.0, "19290": 12697444352.0, "19295": 12697444352.0, "19300": 12697444352.0, "19305": 12697444352.0, "19310": 12697444352.0, "19315": 12697444352.0, "19320": 12697444352.0, "19325": 12697444352.0, "19330": 12697444352.0, "19335": 12697444352.0, "19340": 12697444352.0, "19345": 12697444352.0, "19350": 12697444352.0, "19355": 12697444352.0, "19360": 12697444352.0, "19365": 12697444352.0, "19370": 12697444352.0, "19375": 12697444352.0, "19380": 12697444352.0, "19385": 12697444352.0, "19390": 12697444352.0, "19395": 12697444352.0, "19400": 12697444352.0, "19405": 12697444352.0, "19410": 12697444352.0, "19415": 12697444352.0, "19420": 12697444352.0, "19425": 12697444352.0, "19430": 12697444352.0, "19435": 12697444352.0, "19440": 12697444352.0, "19445": 12697444352.0, "19450": 12697444352.0, "19455": 12697444352.0, "19460": 12697444352.0, "19465": 12697444352.0, "19470": 12697444352.0, "19475": 12697444352.0, "19480": 12697444352.0, "19485": 12697444352.0, "19490": 12697444352.0, "19495": 12697444352.0, "19500": 12697444352.0, "19505": 12697444352.0, "19510": 12697444352.0, "19515": 12697444352.0, "19520": 12697444352.0, "19525": 12697444352.0, "19530": 12697444352.0, "19535": 12697444352.0, "19540": 12697444352.0, "19545": 12697444352.0, "19550": 12697444352.0, "19555": 12697444352.0, "19560": 12697444352.0, "19565": 12697444352.0, "19570": 12697444352.0, "19575": 12697444352.0, "19580": 12697444352.0, "19585": 12697444352.0, "19590": 12697444352.0, "19595": 12697444352.0, "19600": 12697444352.0, "19605": 12697444352.0, "19610": 12697444352.0, "19615": 12697444352.0, "19620": 12697444352.0, "19625": 12697444352.0, "19630": 12697444352.0, "19635": 12697444352.0, "19640": 12697444352.0, "19645": 12697444352.0, "19650": 12697444352.0, "19655": 12697444352.0, "19660": 12697444352.0, "19665": 12697444352.0, "19670": 12697444352.0, "19675": 12697444352.0, "19680": 12697444352.0, "19685": 12697444352.0, "19690": 12697444352.0, "19695": 12697444352.0, "19700": 12697444352.0, "19705": 12697444352.0, "19710": 12697444352.0, "19715": 12697444352.0, "19720": 12697444352.0, "19725": 12697444352.0, "19730": 12697444352.0, "19735": 12697444352.0, "19740": 12697444352.0, "19745": 12697444352.0, "19750": 12697444352.0, "19755": 12697444352.0, "19760": 12697444352.0, "19765": 12697444352.0, "19770": 12697444352.0, "19775": 12697444352.0, "19780": 12697444352.0, "19785": 12697444352.0, "19790": 12697444352.0, "19795": 12697444352.0, "19800": 12697444352.0, "19805": 12697444352.0, "19810": 12697444352.0, "19815": 12697444352.0, "19820": 12697444352.0, "19825": 12697444352.0, "19830": 12697444352.0, "19835": 12697444352.0, "19840": 12697444352.0, "19845": 12697444352.0, "19850": 12697444352.0, "19855": 12697444352.0, "19860": 12697444352.0, "19865": 12697444352.0, "19870": 12697444352.0, "19875": 12697444352.0, "19880": 12697444352.0, "19885": 12697444352.0, "19890": 12697444352.0, "19895": 12697444352.0, "19900": 12697444352.0, "19905": 12697444352.0, "19910": 12697444352.0, "19915": 12697444352.0, "19920": 12697444352.0, "19925": 12697444352.0, "19930": 12697444352.0, "19935": 12697444352.0, "19940": 12697444352.0, "19945": 12697444352.0, "19950": 12697444352.0, "19955": 12697444352.0, "19960": 12697444352.0, "19965": 12697444352.0, "19970": 12697444352.0, "19975": 12697444352.0, "19980": 12697444352.0, "19985": 12697444352.0, "19990": 12697444352.0, "19995": 12697444352.0, "20000": 12697444352.0, "20005": 12697444352.0, "20010": 12697444352.0, "20015": 12697444352.0, "20020": 12697444352.0, "20025": 12697444352.0, "20030": 12697444352.0, "20035": 12697444352.0, "20040": 12697444352.0, "20045": 12697444352.0, "20050": 12697444352.0, "20055": 12697444352.0, "20060": 12697444352.0, "20065": 12697444352.0, "20070": 12697444352.0, "20075": 12697444352.0, "20080": 12697444352.0, "20085": 12697444352.0, "20090": 12697444352.0, "20095": 12697444352.0, "20100": 12697444352.0, "20105": 12697444352.0, "20110": 12697444352.0, "20115": 12697444352.0, "20120": 12697444352.0, "20125": 12697444352.0, "20130": 12697444352.0, "20135": 12697444352.0, "20140": 12697444352.0, "20145": 12697444352.0, "20150": 12697444352.0, "20155": 12697444352.0, "20160": 12697444352.0, "20165": 12697444352.0, "20170": 12697444352.0, "20175": 12697444352.0, "20180": 12697444352.0, "20185": 12697444352.0, "20190": 12697444352.0, "20195": 12697444352.0, "20200": 12697444352.0, "20205": 12697444352.0, "20210": 12697444352.0, "20215": 12697444352.0, "20220": 12697444352.0, "20225": 12697444352.0, "20230": 12697444352.0, "20235": 12697444352.0, "20240": 12697444352.0, "20245": 12697444352.0, "20250": 12697444352.0, "20255": 12697444352.0, "20260": 12697444352.0, "20265": 12697444352.0, "20270": 12697444352.0, "20275": 12697444352.0, "20280": 12697444352.0, "20285": 12697444352.0, "20290": 12697444352.0, "20295": 12697444352.0, "20300": 12697444352.0, "20305": 12697444352.0, "20310": 12697444352.0, "20315": 12697444352.0, "20320": 12697444352.0, "20325": 12697444352.0, "20330": 12697444352.0, "20335": 12697444352.0, "20340": 12697444352.0, "20345": 12697444352.0, "20350": 12697444352.0, "20355": 12697444352.0, "20360": 12697444352.0, "20365": 12697444352.0, "20370": 12697444352.0, "20375": 12697444352.0, "20380": 12697444352.0, "20385": 12697444352.0, "20390": 12697444352.0, "20395": 12697444352.0, "20400": 12697444352.0, "20405": 12697444352.0, "20410": 12697444352.0, "20415": 12697444352.0, "20420": 12697444352.0, "20425": 12697444352.0, "20430": 12697444352.0, "20435": 12697444352.0, "20440": 12697444352.0, "20445": 12697444352.0, "20450": 12697444352.0, "20455": 12697444352.0, "20460": 12697444352.0, "20465": 12697444352.0, "20470": 12697444352.0, "20475": 12697444352.0, "20480": 12697444352.0, "20485": 12697444352.0, "20490": 12697444352.0, "20495": 12697444352.0, "20500": 12697444352.0, "20505": 12697444352.0, "20510": 12697444352.0, "20515": 12697444352.0, "20520": 12697444352.0, "20525": 12697444352.0, "20530": 12697444352.0, "20535": 12697444352.0, "20540": 12697444352.0, "20545": 12697444352.0, "20550": 12697444352.0, "20555": 12697444352.0, "20560": 12697444352.0, "20565": 12697444352.0, "20570": 12697444352.0, "20575": 12697444352.0, "20580": 12697444352.0, "20585": 12697444352.0, "20590": 12697444352.0, "20595": 12697444352.0, "20600": 12697444352.0, "20605": 12697444352.0, "20610": 12697444352.0, "20615": 12697444352.0, "20620": 12697444352.0, "20625": 12697444352.0, "20630": 12697444352.0, "20635": 12697444352.0, "20640": 12697444352.0, "20645": 12697444352.0, "20650": 12697444352.0, "20655": 12697444352.0, "20660": 12697444352.0, "20665": 12697444352.0, "20670": 12697444352.0, "20675": 12697444352.0, "20680": 12697444352.0, "20685": 12697444352.0, "20690": 12697444352.0, "20695": 12697444352.0, "20700": 12697444352.0, "20705": 12697444352.0, "20710": 12697444352.0, "20715": 12697444352.0, "20720": 12697444352.0, "20725": 12697444352.0, "20730": 12697444352.0, "20735": 12697444352.0, "20740": 12697444352.0, "20745": 12697444352.0, "20750": 12697444352.0, "20755": 12697444352.0, "20760": 12697444352.0, "20765": 12697444352.0, "20770": 12697444352.0, "20775": 12697444352.0, "20780": 12697444352.0, "20785": 12697444352.0, "20790": 12697444352.0, "20795": 12697444352.0, "20800": 12697444352.0, "20805": 12697444352.0, "20810": 12697444352.0, "20815": 12697444352.0, "20820": 12697444352.0, "20825": 12697444352.0, "20830": 12697444352.0, "20835": 12697444352.0, "20840": 12697444352.0, "20845": 12697444352.0, "20850": 12697444352.0, "20855": 12697444352.0, "20860": 12697444352.0, "20865": 12697444352.0, "20870": 12697444352.0, "20875": 12697444352.0, "20880": 12697444352.0, "20885": 12697444352.0, "20890": 12697444352.0, "20895": 12697444352.0, "20900": 12697444352.0, "20905": 12697444352.0, "20910": 12697444352.0, "20915": 12697444352.0, "20920": 12697444352.0, "20925": 12697444352.0, "20930": 12697444352.0, "20935": 12697444352.0, "20940": 12697444352.0, "20945": 12697444352.0, "20950": 12697444352.0, "20955": 12697444352.0, "20960": 12697444352.0, "20965": 12697444352.0, "20970": 12697444352.0, "20975": 12697444352.0, "20980": 12697444352.0, "20985": 12697444352.0, "20990": 12697444352.0, "20995": 12697444352.0, "21000": 12697444352.0, "21005": 12697444352.0, "21010": 12697444352.0, "21015": 12697444352.0, "21020": 12697444352.0, "21025": 12697444352.0, "21030": 12697444352.0, "21035": 12697444352.0, "21040": 12697444352.0, "21045": 12697444352.0, "21050": 12697444352.0, "21055": 12697444352.0, "21060": 12697444352.0, "21065": 12697444352.0, "21070": 12697444352.0, "21075": 12697444352.0, "21080": 12697444352.0, "21085": 12697444352.0, "21090": 12697444352.0, "21095": 12697444352.0, "21100": 12697444352.0, "21105": 12697444352.0, "21110": 12697444352.0, "21115": 12697444352.0, "21120": 12697444352.0, "21125": 12697444352.0, "21130": 12697444352.0, "21135": 12697444352.0, "21140": 12697444352.0, "21145": 12697444352.0, "21150": 12697444352.0, "21155": 12697444352.0, "21160": 12697444352.0, "21165": 12697444352.0, "21170": 12697444352.0, "21175": 12697444352.0, "21180": 12697444352.0, "21185": 12697444352.0, "21190": 12697444352.0, "21195": 12697444352.0, "21200": 12697444352.0, "21205": 12697444352.0, "21210": 12697444352.0, "21215": 12697444352.0, "21220": 12697444352.0, "21225": 12697444352.0, "21230": 12697444352.0, "21235": 12697444352.0, "21240": 12697444352.0, "21245": 12697444352.0, "21250": 12697444352.0, "21255": 12697444352.0, "21260": 12697444352.0, "21265": 12697444352.0, "21270": 12697444352.0, "21275": 12697444352.0, "21280": 12697444352.0, "21285": 12697444352.0, "21290": 12697444352.0, "21295": 12697444352.0, "21300": 12697444352.0, "21305": 12697444352.0, "21310": 12697444352.0, "21315": 12697444352.0, "21320": 12697444352.0, "21325": 12697444352.0, "21330": 12697444352.0, "21335": 12697444352.0, "21340": 12697444352.0, "21345": 12697444352.0, "21350": 12697444352.0, "21355": 12697444352.0, "21360": 12697444352.0, "21365": 12697444352.0, "21370": 12697444352.0, "21375": 12697444352.0, "21380": 12697444352.0, "21385": 12697444352.0, "21390": 12697444352.0, "21395": 12697444352.0, "21400": 12697444352.0, "21405": 12697444352.0, "21410": 12697444352.0, "21415": 12697444352.0, "21420": 12697444352.0, "21425": 12697444352.0, "21430": 12697444352.0, "21435": 12697444352.0, "21440": 12697444352.0, "21445": 12697444352.0, "21450": 12697444352.0, "21455": 12697444352.0, "21460": 12697444352.0, "21465": 12697444352.0, "21470": 12697444352.0, "21475": 12697444352.0, "21480": 12697444352.0, "21485": 12697444352.0, "21490": 12697444352.0, "21495": 12697444352.0, "21500": 12697444352.0, "21505": 12697444352.0, "21510": 12697444352.0, "21515": 12697444352.0, "21520": 12697444352.0, "21525": 12697444352.0, "21530": 12697444352.0, "21535": 12697444352.0, "21540": 12697444352.0, "21545": 12697444352.0, "21550": 12697444352.0, "21555": 12697444352.0, "21560": 12697444352.0, "21565": 12697444352.0, "21570": 12697444352.0, "21575": 12697444352.0, "21580": 12697444352.0, "21585": 12697444352.0, "21590": 12697444352.0, "21595": 12697444352.0, "21600": 12697444352.0, "21605": 12697444352.0, "21610": 12697444352.0, "21615": 12697444352.0, "21620": 12697444352.0, "21625": 12697444352.0, "21630": 12697444352.0, "21635": 12697444352.0, "21640": 12697444352.0, "21645": 12697444352.0, "21650": 12697444352.0, "21655": 12697444352.0, "21660": 12697444352.0, "21665": 12697444352.0, "21670": 12697444352.0, "21675": 12697444352.0, "21680": 12697444352.0, "21685": 12697444352.0, "21690": 12697444352.0, "21695": 12697444352.0, "21700": 12697444352.0, "21705": 12697444352.0, "21710": 12697444352.0, "21715": 12697444352.0, "21720": 12697444352.0, "21725": 12697444352.0, "21730": 12697444352.0, "21735": 12697444352.0, "21740": 12697444352.0, "21745": 12697444352.0, "21750": 12697444352.0, "21755": 12697444352.0, "21760": 12697444352.0, "21765": 12697444352.0, "21770": 12697444352.0, "21775": 12697444352.0, "21780": 12697444352.0, "21785": 12697444352.0, "21790": 12697444352.0, "21795": 12697444352.0, "21800": 12697444352.0, "21805": 12697444352.0, "21810": 12697444352.0, "21815": 12697444352.0, "21820": 12697444352.0, "21825": 12697444352.0, "21830": 12697444352.0, "21835": 12697444352.0, "21840": 12697444352.0, "21845": 12697444352.0, "21850": 12697444352.0, "21855": 12697444352.0, "21860": 12697444352.0, "21865": 12697444352.0, "21870": 12697444352.0, "21875": 12697444352.0, "21880": 12697444352.0, "21885": 12697444352.0, "21890": 12697444352.0, "21895": 12697444352.0, "21900": 12697444352.0, "21905": 12697444352.0, "21910": 12697444352.0, "21915": 12697444352.0, "21920": 12697444352.0, "21925": 12697444352.0, "21930": 12697444352.0, "21935": 12697444352.0, "21940": 12697444352.0, "21945": 12697444352.0, "21950": 12697444352.0, "21955": 12697444352.0, "21960": 12697444352.0, "21965": 12697444352.0, "21970": 12697444352.0, "21975": 12697444352.0, "21980": 12697444352.0, "21985": 12697444352.0, "21990": 12697444352.0, "21995": 12697444352.0, "22000": 12697444352.0, "22005": 12697444352.0, "22010": 12697444352.0, "22015": 12697444352.0, "22020": 12697444352.0, "22025": 12697444352.0, "22030": 12697444352.0, "22035": 12697444352.0, "22040": 12697444352.0, "22045": 12697444352.0, "22050": 12697444352.0, "22055": 12697444352.0, "22060": 12697444352.0, "22065": 12697444352.0, "22070": 12697444352.0, "22075": 12697444352.0, "22080": 12697444352.0, "22085": 12697444352.0, "22090": 12697444352.0, "22095": 12697444352.0, "22100": 12697444352.0, "22105": 12697444352.0, "22110": 12697444352.0, "22115": 12697444352.0, "22120": 12697444352.0, "22125": 12697444352.0, "22130": 12697444352.0, "22135": 12697444352.0, "22140": 12697444352.0, "22145": 12697444352.0, "22150": 12697444352.0, "22155": 12697444352.0, "22160": 12697444352.0, "22165": 12697444352.0, "22170": 12697444352.0, "22175": 12697444352.0, "22180": 12697444352.0, "22185": 12697444352.0, "22190": 12697444352.0, "22195": 12697444352.0, "22200": 12697444352.0, "22205": 12697444352.0, "22210": 12697444352.0, "22215": 12697444352.0, "22220": 12697444352.0, "22225": 12697444352.0, "22230": 12697444352.0, "22235": 12697444352.0, "22240": 12697444352.0, "22245": 12697444352.0, "22250": 12697444352.0, "22255": 12697444352.0, "22260": 12697444352.0, "22265": 12697444352.0, "22270": 12697444352.0, "22275": 12697444352.0, "22280": 12697444352.0, "22285": 12697444352.0, "22290": 12697444352.0, "22295": 12697444352.0, "22300": 12697444352.0, "22305": 12697444352.0, "22310": 12697444352.0, "22315": 12697444352.0, "22320": 12697444352.0, "22325": 12697444352.0, "22330": 12697444352.0, "22335": 12697444352.0, "22340": 12697444352.0, "22345": 12697444352.0, "22350": 12697444352.0, "22355": 12697444352.0, "22360": 12697444352.0, "22365": 12697444352.0, "22370": 12697444352.0, "22375": 12697444352.0, "22380": 12697444352.0, "22385": 12697444352.0, "22390": 12697444352.0, "22395": 12697444352.0, "22400": 12697444352.0, "22405": 12697444352.0, "22410": 12697444352.0, "22415": 12697444352.0, "22420": 12697444352.0, "22425": 12697444352.0, "22430": 12697444352.0, "22435": 12697444352.0, "22440": 12697444352.0, "22445": 12697444352.0, "22450": 12697444352.0, "22455": 12697444352.0, "22460": 12697444352.0, "22465": 12697444352.0, "22470": 12697444352.0, "22475": 12697444352.0, "22480": 12697444352.0, "22485": 12697444352.0, "22490": 12697444352.0, "22495": 12697444352.0, "22500": 12697444352.0, "22505": 12697444352.0, "22510": 12697444352.0, "22515": 12697444352.0, "22520": 12697444352.0, "22525": 12697444352.0, "22530": 12697444352.0, "22535": 12697444352.0, "22540": 12697444352.0, "22545": 12697444352.0, "22550": 12697444352.0, "22555": 12697444352.0, "22560": 12697444352.0, "22565": 12697444352.0, "22570": 12697444352.0, "22575": 12697444352.0, "22580": 12697444352.0, "22585": 12697444352.0, "22590": 12697444352.0, "22595": 12697444352.0, "22600": 12697444352.0, "22605": 12697444352.0, "22610": 12697444352.0, "22615": 12697444352.0, "22620": 12697444352.0, "22625": 12697444352.0, "22630": 12697444352.0, "22635": 12697444352.0, "22640": 12697444352.0, "22645": 12697444352.0, "22650": 12697444352.0, "22655": 12697444352.0, "22660": 12697444352.0, "22665": 12697444352.0, "22670": 12697444352.0, "22675": 12697444352.0, "22680": 12697444352.0, "22685": 12697444352.0, "22690": 12697444352.0, "22695": 12697444352.0, "22700": 12697444352.0, "22705": 12697444352.0, "22710": 12697444352.0, "22715": 12697444352.0, "22720": 12697444352.0, "22725": 12697444352.0, "22730": 12697444352.0, "22735": 12697444352.0, "22740": 12697444352.0, "22745": 12697444352.0, "22750": 12697444352.0, "22755": 12697444352.0, "22760": 12697444352.0, "22765": 12697444352.0, "22770": 12697444352.0, "22775": 12697444352.0, "22780": 12697444352.0, "22785": 12697444352.0, "22790": 12697444352.0, "22795": 12697444352.0, "22800": 12697444352.0, "22805": 12697444352.0, "22810": 12697444352.0, "22815": 12697444352.0, "22820": 12697444352.0, "22825": 12697444352.0, "22830": 12697444352.0, "22835": 12697444352.0, "22840": 12697444352.0, "22845": 12697444352.0, "22850": 12697444352.0, "22855": 12697444352.0, "22860": 12697444352.0, "22865": 12697444352.0, "22870": 12697444352.0, "22875": 12697444352.0, "22880": 12697444352.0, "22885": 12697444352.0, "22890": 12697444352.0, "22895": 12697444352.0, "22900": 12697444352.0, "22905": 12697444352.0, "22910": 12697444352.0, "22915": 12697444352.0, "22920": 12697444352.0, "22925": 12697444352.0, "22930": 12697444352.0, "22935": 12697444352.0, "22940": 12697444352.0, "22945": 12697444352.0, "22950": 12697444352.0, "22955": 12697444352.0, "22960": 12697444352.0, "22965": 12697444352.0, "22970": 12697444352.0, "22975": 12697444352.0, "22980": 12697444352.0, "22985": 12697444352.0, "22990": 12697444352.0, "22995": 12697444352.0, "23000": 12697444352.0, "23005": 12697444352.0, "23010": 12697444352.0, "23015": 12697444352.0, "23020": 12697444352.0, "23025": 12697444352.0, "23030": 12697444352.0, "23035": 12697444352.0, "23040": 12697444352.0, "23045": 12697444352.0, "23050": 12697444352.0, "23055": 12697444352.0, "23060": 12697444352.0, "23065": 12697444352.0, "23070": 12697444352.0, "23075": 12697444352.0, "23080": 12697444352.0, "23085": 12697444352.0, "23090": 12697444352.0, "23095": 12697444352.0, "23100": 12697444352.0, "23105": 12697444352.0, "23110": 12697444352.0, "23115": 12697444352.0, "23120": 12697444352.0, "23125": 12697444352.0, "23130": 12697444352.0, "23135": 12697444352.0, "23140": 12697444352.0, "23145": 12697444352.0, "23150": 12697444352.0, "23155": 12697444352.0, "23160": 12697444352.0, "23165": 12697444352.0, "23170": 12697444352.0, "23175": 12697444352.0, "23180": 12697444352.0, "23185": 12697444352.0, "23190": 12697444352.0, "23195": 12697444352.0, "23200": 12697444352.0, "23205": 12697444352.0, "23210": 12697444352.0, "23215": 12697444352.0, "23220": 12697444352.0, "23225": 12697444352.0, "23230": 12697444352.0, "23235": 12697444352.0, "23240": 12697444352.0, "23245": 12697444352.0, "23250": 12697444352.0, "23255": 12697444352.0, "23260": 12697444352.0, "23265": 12697444352.0, "23270": 12697444352.0, "23275": 12697444352.0, "23280": 12697444352.0, "23285": 12697444352.0, "23290": 12697444352.0, "23295": 12697444352.0, "23300": 12697444352.0, "23305": 12697444352.0, "23310": 12697444352.0, "23315": 12697444352.0, "23320": 12697444352.0, "23325": 12697444352.0, "23330": 12697444352.0, "23335": 12697444352.0, "23340": 12697444352.0, "23345": 12697444352.0, "23350": 12697444352.0, "23355": 12697444352.0, "23360": 12697444352.0, "23365": 12697444352.0, "23370": 12697444352.0, "23375": 12697444352.0, "23380": 12697444352.0, "23385": 12697444352.0, "23390": 12697444352.0, "23395": 12697444352.0, "23400": 12697444352.0, "23405": 12697444352.0, "23410": 12697444352.0, "23415": 12697444352.0, "23420": 12697444352.0, "23425": 12697444352.0, "23430": 12697444352.0, "23435": 12697444352.0, "23440": 12697444352.0, "23445": 12697444352.0, "23450": 12697444352.0, "23455": 12697444352.0, "23460": 12697444352.0, "23465": 12697444352.0, "23470": 12697444352.0, "23475": 12697444352.0, "23480": 12697444352.0, "23485": 12697444352.0, "23490": 12697444352.0, "23495": 12697444352.0, "23500": 12697444352.0, "23505": 12697444352.0, "23510": 12697444352.0, "23515": 12697444352.0, "23520": 12697444352.0, "23525": 12697444352.0, "23530": 12697444352.0, "23535": 12697444352.0, "23540": 12697444352.0, "23545": 12697444352.0, "23550": 12697444352.0, "23555": 12697444352.0, "23560": 12697444352.0, "23565": 12697444352.0, "23570": 12697444352.0, "23575": 12697444352.0, "23580": 12697444352.0, "23585": 12697444352.0, "23590": 12697444352.0, "23595": 12697444352.0, "23600": 12697444352.0, "23605": 12697444352.0, "23610": 12697444352.0, "23615": 12697444352.0, "23620": 12697444352.0, "23625": 12697444352.0, "23630": 12697444352.0, "23635": 12697444352.0, "23640": 12697444352.0, "23645": 12697444352.0, "23650": 12697444352.0, "23655": 12697444352.0, "23660": 12697444352.0, "23665": 12697444352.0, "23670": 12697444352.0, "23675": 12697444352.0, "23680": 12697444352.0, "23685": 12697444352.0, "23690": 12697444352.0, "23695": 12697444352.0, "23700": 12697444352.0, "23705": 12697444352.0, "23710": 12697444352.0, "23715": 12697444352.0, "23720": 12697444352.0, "23725": 12697444352.0, "23730": 12697444352.0, "23735": 12697444352.0, "23740": 12697444352.0, "23745": 12697444352.0, "23750": 12697444352.0, "23755": 12697444352.0, "23760": 12697444352.0, "23765": 12697444352.0, "23770": 12697444352.0, "23775": 12697444352.0, "23780": 12697444352.0, "23785": 12697444352.0, "23790": 12697444352.0, "23795": 12697444352.0, "23800": 12697444352.0, "23805": 12697444352.0, "23810": 12697444352.0, "23815": 12697444352.0, "23820": 12697444352.0, "23825": 12697444352.0, "23830": 12697444352.0, "23835": 12697444352.0, "23840": 12697444352.0, "23845": 12697444352.0, "23850": 12697444352.0, "23855": 12697444352.0, "23860": 12697444352.0, "23865": 12697444352.0, "23870": 12697444352.0, "23875": 12697444352.0, "23880": 12697444352.0, "23885": 12697444352.0, "23890": 12697444352.0, "23895": 12697444352.0, "23900": 12697444352.0, "23905": 12697444352.0, "23910": 12697444352.0, "23915": 12697444352.0, "23920": 12697444352.0, "23925": 12697444352.0, "23930": 12697444352.0, "23935": 12697444352.0, "23940": 12697444352.0, "23945": 12697444352.0, "23950": 12697444352.0, "23955": 12697444352.0, "23960": 12697444352.0, "23965": 12697444352.0, "23970": 12697444352.0, "23975": 12697444352.0, "23980": 12697444352.0, "23985": 12697444352.0, "23990": 12697444352.0, "23995": 12697444352.0, "24000": 12697444352.0, "24005": 12697444352.0, "24010": 12697444352.0, "24015": 12697444352.0, "24020": 12697444352.0, "24025": 12697444352.0, "24030": 12697444352.0, "24035": 12697444352.0, "24040": 12697444352.0, "24045": 12697444352.0, "24050": 12697444352.0, "24055": 12697444352.0, "24060": 12697444352.0, "24065": 12697444352.0, "24070": 12697444352.0, "24075": 12697444352.0, "24080": 12697444352.0, "24085": 12697444352.0, "24090": 12697444352.0, "24095": 12697444352.0, "24100": 12697444352.0, "24105": 12697444352.0, "24110": 12697444352.0, "24115": 12697444352.0, "24120": 12697444352.0, "24125": 12697444352.0, "24130": 12697444352.0, "24135": 12697444352.0, "24140": 12697444352.0, "24145": 12697444352.0, "24150": 12697444352.0, "24155": 12697444352.0, "24160": 12697444352.0, "24165": 12697444352.0, "24170": 12697444352.0, "24175": 12697444352.0, "24180": 12697444352.0, "24185": 12697444352.0, "24190": 12697444352.0, "24195": 12697444352.0, "24200": 12697444352.0, "24205": 12697444352.0, "24210": 12697444352.0, "24215": 12697444352.0, "24220": 12697444352.0, "24225": 12697444352.0, "24230": 12697444352.0, "24235": 12697444352.0, "24240": 12697444352.0, "24245": 12697444352.0, "24250": 12697444352.0, "24255": 12697444352.0, "24260": 12697444352.0, "24265": 12697444352.0, "24270": 12697444352.0, "24275": 12697444352.0, "24280": 12697444352.0, "24285": 12697444352.0, "24290": 12697444352.0, "24295": 12697444352.0, "24300": 12697444352.0, "24305": 12697444352.0, "24310": 12697444352.0, "24315": 12697444352.0, "24320": 12697444352.0, "24325": 12697444352.0, "24330": 12697444352.0, "24335": 12697444352.0, "24340": 12697444352.0, "24345": 12697444352.0, "24350": 12697444352.0, "24355": 12697444352.0, "24360": 12697444352.0, "24365": 12697444352.0, "24370": 12697444352.0, "24375": 12697444352.0, "24380": 12697444352.0, "24385": 12697444352.0, "24390": 12697444352.0, "24395": 12697444352.0, "24400": 12697444352.0, "24405": 12697444352.0, "24410": 12697444352.0, "24415": 12697444352.0, "24420": 12697444352.0, "24425": 12697444352.0, "24430": 12697444352.0, "24435": 12697444352.0, "24440": 12697444352.0, "24445": 12697444352.0, "24450": 12697444352.0, "24455": 12697444352.0, "24460": 12697444352.0, "24465": 12697444352.0, "24470": 12697444352.0, "24475": 12697444352.0, "24480": 12697444352.0, "24485": 12697444352.0, "24490": 12697444352.0, "24495": 12697444352.0, "24500": 12697444352.0, "24505": 12697444352.0, "24510": 12697444352.0, "24515": 12697444352.0, "24520": 12697444352.0, "24525": 12697444352.0, "24530": 12697444352.0, "24535": 12697444352.0, "24540": 12697444352.0, "24545": 12697444352.0, "24550": 12697444352.0, "24555": 12697444352.0, "24560": 12697444352.0, "24565": 12697444352.0, "24570": 12697444352.0, "24575": 12697444352.0, "24580": 12697444352.0, "24585": 12697444352.0, "24590": 12697444352.0, "24595": 12697444352.0, "24600": 12697444352.0, "24605": 12697444352.0, "24610": 12697444352.0, "24615": 12697444352.0, "24620": 12697444352.0, "24625": 12697444352.0, "24630": 12697444352.0, "24635": 12697444352.0, "24640": 12697444352.0, "24645": 12697444352.0, "24650": 12697444352.0, "24655": 12697444352.0, "24660": 12697444352.0, "24665": 12697444352.0, "24670": 12697444352.0, "24675": 12697444352.0, "24680": 12697444352.0, "24685": 12697444352.0, "24690": 12697444352.0, "24695": 12697444352.0, "24700": 12697444352.0, "24705": 12697444352.0, "24710": 12697444352.0, "24715": 12697444352.0, "24720": 12697444352.0, "24725": 12697444352.0, "24730": 12697444352.0, "24735": 12697444352.0, "24740": 12697444352.0, "24745": 12697444352.0, "24750": 12697444352.0, "24755": 12697444352.0, "24760": 12697444352.0, "24765": 12697444352.0, "24770": 12697444352.0, "24775": 12697444352.0, "24780": 12697444352.0, "24785": 12697444352.0, "24790": 12697444352.0, "24795": 12697444352.0, "24800": 12697444352.0, "24805": 12697444352.0, "24810": 12697444352.0, "24815": 12697444352.0, "24820": 12697444352.0, "24825": 12697444352.0, "24830": 12697444352.0, "24835": 12697444352.0, "24840": 12697444352.0, "24845": 12697444352.0, "24850": 12697444352.0, "24855": 12697444352.0, "24860": 12697444352.0, "24865": 12697444352.0, "24870": 12697444352.0, "24875": 12697444352.0, "24880": 12697444352.0, "24885": 12697444352.0, "24890": 12697444352.0, "24895": 12697444352.0, "24900": 12697444352.0, "24905": 12697444352.0, "24910": 12697444352.0, "24915": 12697444352.0, "24920": 12697444352.0, "24925": 12697444352.0, "24930": 12697444352.0, "24935": 12697444352.0, "24940": 12697444352.0, "24945": 12697444352.0, "24950": 12697444352.0, "24955": 12697444352.0, "24960": 12697444352.0, "24965": 12697444352.0, "24970": 12697444352.0, "24975": 12697444352.0, "24980": 12697444352.0, "24985": 12697444352.0, "24990": 12697444352.0, "24995": 12697444352.0, "25000": 12697444352.0, "25005": 12697444352.0, "25010": 12697444352.0, "25015": 12697444352.0, "25020": 12697444352.0, "25025": 12697444352.0, "25030": 12697444352.0, "25035": 12697444352.0, "25040": 12697444352.0, "25045": 12697444352.0, "25050": 12697444352.0, "25055": 12697444352.0, "25060": 12697444352.0, "25065": 12697444352.0, "25070": 12697444352.0, "25075": 12697444352.0, "25080": 12697444352.0, "25085": 12697444352.0, "25090": 12697444352.0, "25095": 12697444352.0, "25100": 12697444352.0, "25105": 12697444352.0, "25110": 12697444352.0, "25115": 12697444352.0, "25120": 12697444352.0, "25125": 12697444352.0, "25130": 12697444352.0, "25135": 12697444352.0, "25140": 12697444352.0, "25145": 12697444352.0, "25150": 12697444352.0, "25155": 12697444352.0, "25160": 12697444352.0, "25165": 12697444352.0, "25170": 12697444352.0, "25175": 12697444352.0, "25180": 12697444352.0, "25185": 12697444352.0, "25190": 12697444352.0, "25195": 12697444352.0, "25200": 12697444352.0, "25205": 12697444352.0, "25210": 12697444352.0, "25215": 12697444352.0, "25220": 12697444352.0, "25225": 12697444352.0, "25230": 12697444352.0, "25235": 12697444352.0, "25240": 12697444352.0, "25245": 12697444352.0, "25250": 12697444352.0, "25255": 12697444352.0, "25260": 12697444352.0, "25265": 12697444352.0, "25270": 12697444352.0, "25275": 12697444352.0, "25280": 12697444352.0, "25285": 12697444352.0, "25290": 12697444352.0, "25295": 12697444352.0, "25300": 12697444352.0, "25305": 12697444352.0, "25310": 12697444352.0, "25315": 12697444352.0, "25320": 12697444352.0, "25325": 12697444352.0, "25330": 12697444352.0, "25335": 12697444352.0, "25340": 12697444352.0, "25345": 12697444352.0, "25350": 12697444352.0, "25355": 12697444352.0, "25360": 12697444352.0, "25365": 12697444352.0, "25370": 12697444352.0, "25375": 12697444352.0, "25380": 12697444352.0, "25385": 12697444352.0, "25390": 12697444352.0, "25395": 12697444352.0, "25400": 12697444352.0, "25405": 12697444352.0, "25410": 12697444352.0, "25415": 12697444352.0, "25420": 12697444352.0, "25425": 12697444352.0, "25430": 12697444352.0, "25435": 12697444352.0, "25440": 12697444352.0, "25445": 12697444352.0, "25450": 12697444352.0, "25455": 12697444352.0, "25460": 12697444352.0, "25465": 12697444352.0, "25470": 12697444352.0, "25475": 12697444352.0, "25480": 12697444352.0, "25485": 12697444352.0, "25490": 12697444352.0, "25495": 12697444352.0, "25500": 12697444352.0, "25505": 12697444352.0, "25510": 12697444352.0, "25515": 12697444352.0, "25520": 12697444352.0, "25525": 12697444352.0, "25530": 12697444352.0, "25535": 12697444352.0, "25540": 12697444352.0, "25545": 12697444352.0, "25550": 12697444352.0, "25555": 12697444352.0, "25560": 12697444352.0, "25565": 12697444352.0, "25570": 12697444352.0, "25575": 12697444352.0, "25580": 12697444352.0, "25585": 12697444352.0, "25590": 12697444352.0, "25595": 12697444352.0, "25600": 12697444352.0, "25605": 12697444352.0, "25610": 12697444352.0, "25615": 12697444352.0, "25620": 12697444352.0, "25625": 12697444352.0, "25630": 12697444352.0, "25635": 12697444352.0, "25640": 12697444352.0, "25645": 12697444352.0, "25650": 12697444352.0, "25655": 12697444352.0, "25660": 12697444352.0, "25665": 12697444352.0, "25670": 12697444352.0, "25675": 12697444352.0, "25680": 12697444352.0, "25685": 12697444352.0, "25690": 12697444352.0, "25695": 12697444352.0, "25700": 12697444352.0, "25705": 12697444352.0, "25710": 12697444352.0, "25715": 12697444352.0, "25720": 12697444352.0, "25725": 12697444352.0, "25730": 12697444352.0, "25735": 12697444352.0, "25740": 12697444352.0, "25745": 12697444352.0, "25750": 12697444352.0, "25755": 12697444352.0, "25760": 12697444352.0, "25765": 12697444352.0, "25770": 12697444352.0, "25775": 12697444352.0, "25780": 12697444352.0, "25785": 12697444352.0, "25790": 12697444352.0, "25795": 12697444352.0, "25800": 12697444352.0, "25805": 12697444352.0, "25810": 12697444352.0, "25815": 12697444352.0, "25820": 12697444352.0, "25825": 12697444352.0, "25830": 12697444352.0, "25835": 12697444352.0, "25840": 12697444352.0, "25845": 12697444352.0, "25850": 12697444352.0, "25855": 12697444352.0, "25860": 12697444352.0, "25865": 12697444352.0, "25870": 12697444352.0, "25875": 12697444352.0, "25880": 12697444352.0, "25885": 12697444352.0, "25890": 12697444352.0, "25895": 12697444352.0, "25900": 12697444352.0, "25905": 12697444352.0, "25910": 12697444352.0, "25915": 12697444352.0, "25920": 12697444352.0, "25925": 12697444352.0, "25930": 12697444352.0, "25935": 12697444352.0, "25940": 12697444352.0, "25945": 12697444352.0, "25950": 12697444352.0, "25955": 12697444352.0, "25960": 12697444352.0, "25965": 12697444352.0, "25970": 12697444352.0, "25975": 12697444352.0, "25980": 12697444352.0, "25985": 12697444352.0, "25990": 12697444352.0, "25995": 12697444352.0, "26000": 12697444352.0, "26005": 12697444352.0, "26010": 12697444352.0, "26015": 12697444352.0, "26020": 12697444352.0, "26025": 12697444352.0, "26030": 12697444352.0, "26035": 12697444352.0, "26040": 12697444352.0, "26045": 12697444352.0, "26050": 12697444352.0, "26055": 12697444352.0, "26060": 12697444352.0, "26065": 12697444352.0, "26070": 12697444352.0, "26075": 12697444352.0, "26080": 12697444352.0, "26085": 12697444352.0, "26090": 12697444352.0, "26095": 12697444352.0, "26100": 12697444352.0, "26105": 12697444352.0, "26110": 12697444352.0, "26115": 12697444352.0, "26120": 12697444352.0, "26125": 12697444352.0, "26130": 12697444352.0, "26135": 12697444352.0, "26140": 12697444352.0, "26145": 12697444352.0, "26150": 12697444352.0, "26155": 12697444352.0, "26160": 12697444352.0, "26165": 12697444352.0, "26170": 12697444352.0, "26175": 12697444352.0, "26180": 12697444352.0, "26185": 12697444352.0, "26190": 12697444352.0, "26195": 12697444352.0, "26200": 12697444352.0, "26205": 12697444352.0, "26210": 12697444352.0, "26215": 12697444352.0, "26220": 12697444352.0, "26225": 12697444352.0, "26230": 12697444352.0, "26235": 12697444352.0, "26240": 12697444352.0, "26245": 12697444352.0, "26250": 12697444352.0, "26255": 12697444352.0, "26260": 12697444352.0, "26265": 12697444352.0, "26270": 12697444352.0, "26275": 12697444352.0, "26280": 12697444352.0, "26285": 12697444352.0, "26290": 12697444352.0, "26295": 12697444352.0, "26300": 12697444352.0, "26305": 12697444352.0, "26310": 12697444352.0, "26315": 12697444352.0, "26320": 12697444352.0, "26325": 12697444352.0, "26330": 12697444352.0, "26335": 12697444352.0, "26340": 12697444352.0, "26345": 12697444352.0, "26350": 12697444352.0, "26355": 12697444352.0, "26360": 12697444352.0, "26365": 12697444352.0, "26370": 12697444352.0, "26375": 12697444352.0, "26380": 12697444352.0, "26385": 12697444352.0, "26390": 12697444352.0, "26395": 12697444352.0, "26400": 12697444352.0, "26405": 12697444352.0, "26410": 12697444352.0, "26415": 12697444352.0, "26420": 12697444352.0, "26425": 12697444352.0, "26430": 12697444352.0, "26435": 12697444352.0, "26440": 12697444352.0, "26445": 12697444352.0, "26450": 12697444352.0, "26455": 12697444352.0, "26460": 12697444352.0, "26465": 12697444352.0, "26470": 12697444352.0, "26475": 12697444352.0, "26480": 12697444352.0, "26485": 12697444352.0, "26490": 12697444352.0, "26495": 12697444352.0, "26500": 12697444352.0, "26505": 12697444352.0, "26510": 12697444352.0, "26515": 12697444352.0, "26520": 12697444352.0, "26525": 12697444352.0, "26530": 12697444352.0, "26535": 12697444352.0, "26540": 12697444352.0, "26545": 12697444352.0, "26550": 12697444352.0, "26555": 12697444352.0, "26560": 12697444352.0, "26565": 12697444352.0, "26570": 12697444352.0, "26575": 12697444352.0, "26580": 12697444352.0, "26585": 12697444352.0, "26590": 12697444352.0, "26595": 12697444352.0, "26600": 12697444352.0, "26605": 12697444352.0, "26610": 12697444352.0, "26615": 12697444352.0, "26620": 12697444352.0, "26625": 12697444352.0, "26630": 12697444352.0, "26635": 12697444352.0, "26640": 12697444352.0, "26645": 12697444352.0, "26650": 12697444352.0, "26655": 12697444352.0, "26660": 12697444352.0, "26665": 12697444352.0, "26670": 12697444352.0, "26675": 12697444352.0, "26680": 12697444352.0, "26685": 12697444352.0, "26690": 12697444352.0, "26695": 12697444352.0, "26700": 12697444352.0, "26705": 12697444352.0, "26710": 12697444352.0, "26715": 12697444352.0, "26720": 12697444352.0, "26725": 12697444352.0, "26730": 12697444352.0, "26735": 12697444352.0, "26740": 12697444352.0, "26745": 12697444352.0, "26750": 12697444352.0, "26755": 12697444352.0, "26760": 12697444352.0, "26765": 12697444352.0, "26770": 12697444352.0, "26775": 12697444352.0, "26780": 12697444352.0, "26785": 12697444352.0, "26790": 12697444352.0, "26795": 12697444352.0, "26800": 12697444352.0, "26805": 12697444352.0, "26810": 12697444352.0, "26815": 12697444352.0, "26820": 12697444352.0, "26825": 12697444352.0, "26830": 12697444352.0, "26835": 12697444352.0, "26840": 12697444352.0, "26845": 12697444352.0, "26850": 12697444352.0, "26855": 12697444352.0, "26860": 12697444352.0, "26865": 12697444352.0, "26870": 12697444352.0, "26875": 12697444352.0, "26880": 12697444352.0, "26885": 12697444352.0, "26890": 12697444352.0, "26895": 12697444352.0, "26900": 12697444352.0, "26905": 12697444352.0, "26910": 12697444352.0, "26915": 12697444352.0, "26920": 12697444352.0, "26925": 12697444352.0, "26930": 12697444352.0, "26935": 12697444352.0, "26940": 12697444352.0, "26945": 12697444352.0, "26950": 12697444352.0, "26955": 12697444352.0, "26960": 12697444352.0, "26965": 12697444352.0, "26970": 12697444352.0, "26975": 12697444352.0, "26980": 12697444352.0, "26985": 12697444352.0, "26990": 12697444352.0, "26995": 12697444352.0, "27000": 12697444352.0, "27005": 12697444352.0, "27010": 12697444352.0, "27015": 12697444352.0, "27020": 12697444352.0, "27025": 12697444352.0, "27030": 12697444352.0, "27035": 12697444352.0, "27040": 12697444352.0, "27045": 12697444352.0, "27050": 12697444352.0, "27055": 12697444352.0, "27060": 12697444352.0, "27065": 12697444352.0, "27070": 12697444352.0, "27075": 12697444352.0, "27080": 12697444352.0, "27085": 12697444352.0, "27090": 12697444352.0, "27095": 12697444352.0, "27100": 12697444352.0, "27105": 12697444352.0, "27110": 12697444352.0, "27115": 12697444352.0, "27120": 12697444352.0, "27125": 12697444352.0, "27130": 12697444352.0, "27135": 12697444352.0, "27140": 12697444352.0, "27145": 12697444352.0, "27150": 12697444352.0, "27155": 12697444352.0, "27160": 12697444352.0, "27165": 12697444352.0, "27170": 12697444352.0, "27175": 12697444352.0, "27180": 12697444352.0, "27185": 12697444352.0, "27190": 12697444352.0, "27195": 12697444352.0, "27200": 12697444352.0, "27205": 12697444352.0, "27210": 12697444352.0, "27215": 12697444352.0, "27220": 12697444352.0, "27225": 12697444352.0, "27230": 12697444352.0, "27235": 12697444352.0, "27240": 12697444352.0, "27245": 12697444352.0, "27250": 12697444352.0, "27255": 12697444352.0, "27260": 12697444352.0, "27265": 12697444352.0, "27270": 12697444352.0, "27275": 12697444352.0, "27280": 12697444352.0, "27285": 12697444352.0, "27290": 12697444352.0, "27295": 12697444352.0, "27300": 12697444352.0, "27305": 12697444352.0, "27310": 12697444352.0, "27315": 12697444352.0, "27320": 12697444352.0, "27325": 12697444352.0, "27330": 12697444352.0, "27335": 12697444352.0, "27340": 12697444352.0, "27345": 12697444352.0, "27350": 12697444352.0, "27355": 12697444352.0, "27360": 12697444352.0, "27365": 12697444352.0, "27370": 12697444352.0, "27375": 12697444352.0, "27380": 12697444352.0, "27385": 12697444352.0, "27390": 12697444352.0, "27395": 12697444352.0, "27400": 12697444352.0, "27405": 12697444352.0, "27410": 12697444352.0, "27415": 12697444352.0, "27420": 12697444352.0, "27425": 12697444352.0, "27430": 12697444352.0, "27435": 12697444352.0, "27440": 12697444352.0, "27445": 12697444352.0, "27450": 12697444352.0, "27455": 12697444352.0, "27460": 12697444352.0, "27465": 12697444352.0, "27470": 12697444352.0, "27475": 12697444352.0, "27480": 12697444352.0, "27485": 12697444352.0, "27490": 12697444352.0, "27495": 12697444352.0, "27500": 12697444352.0, "27505": 12697444352.0, "27510": 12697444352.0, "27515": 12697444352.0, "27520": 12697444352.0, "27525": 12697444352.0, "27530": 12697444352.0, "27535": 12697444352.0, "27540": 12697444352.0, "27545": 12697444352.0, "27550": 12697444352.0, "27555": 12697444352.0, "27560": 12697444352.0, "27565": 12697444352.0, "27570": 12697444352.0, "27575": 12697444352.0, "27580": 12697444352.0, "27585": 12697444352.0, "27590": 12697444352.0, "27595": 12697444352.0, "27600": 12697444352.0, "27605": 12697444352.0, "27610": 12697444352.0, "27615": 12697444352.0, "27620": 12697444352.0, "27625": 12697444352.0, "27630": 12697444352.0, "27635": 12697444352.0, "27640": 12697444352.0, "27645": 12697444352.0, "27650": 12697444352.0, "27655": 12697444352.0, "27660": 12697444352.0, "27665": 12697444352.0, "27670": 12697444352.0, "27675": 12697444352.0, "27680": 12697444352.0, "27685": 12697444352.0, "27690": 12697444352.0, "27695": 12697444352.0, "27700": 12697444352.0, "27705": 12697444352.0, "27710": 12697444352.0, "27715": 12697444352.0, "27720": 12697444352.0, "27725": 12697444352.0, "27730": 12697444352.0, "27735": 12697444352.0, "27740": 12697444352.0, "27745": 12697444352.0, "27750": 12697444352.0, "27755": 12697444352.0, "27760": 12697444352.0, "27765": 12697444352.0, "27770": 12697444352.0, "27775": 12697444352.0, "27780": 12697444352.0, "27785": 12697444352.0, "27790": 12697444352.0, "27795": 12697444352.0, "27800": 12697444352.0, "27805": 12697444352.0, "27810": 12697444352.0, "27815": 12697444352.0, "27820": 12697444352.0, "27825": 12697444352.0, "27830": 12697444352.0, "27835": 12697444352.0, "27840": 12697444352.0, "27845": 12697444352.0, "27850": 12697444352.0, "27855": 12697444352.0, "27860": 12697444352.0, "27865": 12697444352.0, "27870": 12697444352.0, "27875": 12697444352.0, "27880": 12697444352.0, "27885": 12697444352.0, "27890": 12697444352.0, "27895": 12697444352.0, "27900": 12697444352.0, "27905": 12697444352.0, "27910": 12697444352.0, "27915": 12697444352.0, "27920": 12697444352.0, "27925": 12697444352.0, "27930": 12697444352.0, "27935": 12697444352.0, "27940": 12697444352.0, "27945": 12697444352.0, "27950": 12697444352.0, "27955": 12697444352.0, "27960": 12697444352.0, "27965": 12697444352.0, "27970": 12697444352.0, "27975": 12697444352.0, "27980": 12697444352.0, "27985": 12697444352.0, "27990": 12697444352.0, "27995": 12697444352.0, "28000": 12697444352.0, "28005": 12697444352.0, "28010": 12697444352.0, "28015": 12697444352.0, "28020": 12697444352.0, "28025": 12697444352.0, "28030": 12697444352.0, "28035": 12697444352.0, "28040": 12697444352.0, "28045": 12697444352.0, "28050": 12697444352.0, "28055": 12697444352.0, "28060": 12697444352.0, "28065": 12697444352.0, "28070": 12697444352.0, "28075": 12697444352.0, "28080": 12697444352.0, "28085": 12697444352.0, "28090": 12697444352.0, "28095": 12697444352.0, "28100": 12697444352.0, "28105": 12697444352.0, "28110": 12697444352.0, "28115": 12697444352.0, "28120": 12697444352.0, "28125": 12697444352.0, "28130": 12697444352.0, "28135": 12697444352.0, "28140": 12697444352.0, "28145": 12697444352.0, "28150": 12697444352.0, "28155": 12697444352.0, "28160": 12697444352.0, "28165": 12697444352.0, "28170": 12697444352.0, "28175": 12697444352.0, "28180": 12697444352.0, "28185": 12697444352.0, "28190": 12697444352.0, "28195": 12697444352.0, "28200": 12697444352.0, "28205": 12697444352.0, "28210": 12697444352.0, "28215": 12697444352.0, "28220": 12697444352.0, "28225": 12697444352.0, "28230": 12697444352.0, "28235": 12697444352.0, "28240": 12697444352.0, "28245": 12697444352.0, "28250": 12697444352.0, "28255": 12697444352.0, "28260": 12697444352.0, "28265": 12697444352.0, "28270": 12697444352.0, "28275": 12697444352.0, "28280": 12697444352.0, "28285": 12697444352.0, "28290": 12697444352.0, "28295": 12697444352.0, "28300": 12697444352.0, "28305": 12697444352.0, "28310": 12697444352.0, "28315": 12697444352.0, "28320": 12697444352.0, "28325": 12697444352.0, "28330": 12697444352.0, "28335": 12697444352.0, "28340": 12697444352.0, "28345": 12697444352.0, "28350": 12697444352.0, "28355": 12697444352.0, "28360": 12697444352.0, "28365": 12697444352.0, "28370": 12697444352.0, "28375": 12697444352.0, "28380": 12697444352.0, "28385": 12697444352.0, "28390": 12697444352.0, "28395": 12697444352.0, "28400": 12697444352.0, "28405": 12697444352.0, "28410": 12697444352.0, "28415": 12697444352.0, "28420": 12697444352.0, "28425": 12697444352.0, "28430": 12697444352.0, "28435": 12697444352.0, "28440": 12697444352.0, "28445": 12697444352.0, "28450": 12697444352.0, "28455": 12697444352.0, "28460": 12697444352.0, "28465": 12697444352.0, "28470": 12697444352.0, "28475": 12697444352.0, "28480": 12697444352.0, "28485": 12697444352.0, "28490": 12697444352.0, "28495": 12697444352.0, "28500": 12697444352.0, "28505": 12697444352.0, "28510": 12697444352.0, "28515": 12697444352.0, "28520": 12697444352.0, "28525": 12697444352.0, "28530": 12697444352.0, "28535": 12697444352.0, "28540": 12697444352.0, "28545": 12697444352.0, "28550": 12697444352.0, "28555": 12697444352.0, "28560": 12697444352.0, "28565": 12697444352.0, "28570": 12697444352.0, "28575": 12697444352.0, "28580": 12697444352.0, "28585": 12697444352.0, "28590": 12697444352.0, "28595": 12697444352.0, "28600": 12697444352.0, "28605": 12697444352.0, "28610": 12697444352.0, "28615": 12697444352.0, "28620": 12697444352.0, "28625": 12697444352.0, "28630": 12697444352.0, "28635": 12697444352.0, "28640": 12697444352.0, "28645": 12697444352.0, "28650": 12697444352.0, "28655": 12697444352.0, "28660": 12697444352.0, "28665": 12697444352.0, "28670": 12697444352.0, "28675": 12697444352.0, "28680": 12697444352.0, "28685": 12697444352.0, "28690": 12697444352.0, "28695": 12697444352.0, "28700": 12697444352.0, "28705": 12697444352.0, "28710": 12697444352.0, "28715": 12697444352.0, "28720": 12697444352.0, "28725": 12697444352.0, "28730": 12697444352.0, "28735": 12697444352.0, "28740": 12697444352.0, "28745": 12697444352.0, "28750": 12697444352.0, "28755": 12697444352.0, "28760": 12697444352.0, "28765": 12697444352.0, "28770": 12697444352.0, "28775": 12697444352.0, "28780": 12697444352.0, "28785": 12697444352.0, "28790": 12697444352.0, "28795": 12697444352.0, "28800": 12697444352.0, "28805": 12697444352.0, "28810": 12697444352.0, "28815": 12697444352.0, "28820": 12697444352.0, "28825": 12697444352.0, "28830": 12697444352.0, "28835": 12697444352.0, "28840": 12697444352.0, "28845": 12697444352.0, "28850": 12697444352.0, "28855": 12697444352.0, "28860": 12697444352.0, "28865": 12697444352.0, "28870": 12697444352.0, "28875": 12697444352.0, "28880": 12697444352.0, "28885": 12697444352.0, "28890": 12697444352.0, "28895": 12697444352.0, "28900": 12697444352.0, "28905": 12697444352.0, "28910": 12697444352.0, "28915": 12697444352.0, "28920": 12697444352.0, "28925": 12697444352.0, "28930": 12697444352.0, "28935": 12697444352.0, "28940": 12697444352.0, "28945": 12697444352.0, "28950": 12697444352.0, "28955": 12697444352.0, "28960": 12697444352.0, "28965": 12697444352.0, "28970": 12697444352.0, "28975": 12697444352.0, "28980": 12697444352.0, "28985": 12697444352.0, "28990": 12697444352.0, "28995": 12697444352.0, "29000": 12697444352.0, "29005": 12697444352.0, "29010": 12697444352.0, "29015": 12697444352.0, "29020": 12697444352.0, "29025": 12697444352.0, "29030": 12697444352.0, "29035": 12697444352.0, "29040": 12697444352.0, "29045": 12697444352.0, "29050": 12697444352.0, "29055": 12697444352.0, "29060": 12697444352.0, "29065": 12697444352.0, "29070": 12697444352.0, "29075": 12697444352.0, "29080": 12697444352.0, "29085": 12697444352.0, "29090": 12697444352.0, "29095": 12697444352.0, "29100": 12697444352.0, "29105": 12697444352.0, "29110": 12697444352.0, "29115": 12697444352.0, "29120": 12697444352.0, "29125": 12697444352.0, "29130": 12697444352.0, "29135": 12697444352.0, "29140": 12697444352.0, "29145": 12697444352.0, "29150": 12697444352.0, "29155": 12697444352.0, "29160": 12697444352.0, "29165": 12697444352.0, "29170": 12697444352.0, "29175": 12697444352.0, "29180": 12697444352.0, "29185": 12697444352.0, "29190": 12697444352.0, "29195": 12697444352.0, "29200": 12697444352.0, "29205": 12697444352.0, "29210": 12697444352.0, "29215": 12697444352.0, "29220": 12697444352.0, "29225": 12697444352.0, "29230": 12697444352.0, "29235": 12697444352.0, "29240": 12697444352.0, "29245": 12697444352.0, "29250": 12697444352.0, "29255": 12697444352.0, "29260": 12697444352.0, "29265": 12697444352.0, "29270": 12697444352.0, "29275": 12697444352.0, "29280": 12697444352.0, "29285": 12697444352.0, "29290": 12697444352.0, "29295": 12697444352.0, "29300": 12697444352.0, "29305": 12697444352.0, "29310": 12697444352.0, "29315": 12697444352.0, "29320": 12697444352.0, "29325": 12697444352.0, "29330": 12697444352.0, "29335": 12697444352.0, "29340": 12697444352.0, "29345": 12697444352.0, "29350": 12697444352.0, "29355": 12697444352.0, "29360": 12697444352.0, "29365": 12697444352.0, "29370": 12697444352.0, "29375": 12697444352.0, "29380": 12697444352.0, "29385": 12697444352.0, "29390": 12697444352.0, "29395": 12697444352.0, "29400": 12697444352.0, "29405": 12697444352.0, "29410": 12697444352.0, "29415": 12697444352.0, "29420": 12697444352.0, "29425": 12697444352.0, "29430": 12697444352.0, "29435": 12697444352.0, "29440": 12697444352.0, "29445": 12697444352.0, "29450": 12697444352.0, "29455": 12697444352.0, "29460": 12697444352.0, "29465": 12697444352.0, "29470": 12697444352.0, "29475": 12697444352.0, "29480": 12697444352.0, "29485": 12697444352.0, "29490": 12697444352.0, "29495": 12697444352.0, "29500": 12697444352.0, "29505": 12697444352.0, "29510": 12697444352.0, "29515": 12697444352.0, "29520": 12697444352.0, "29525": 12697444352.0, "29530": 12697444352.0, "29535": 12697444352.0, "29540": 12697444352.0, "29545": 12697444352.0, "29550": 12697444352.0, "29555": 12697444352.0, "29560": 12697444352.0, "29565": 12697444352.0, "29570": 12697444352.0, "29575": 12697444352.0, "29580": 12697444352.0, "29585": 12697444352.0, "29590": 12697444352.0, "29595": 12697444352.0, "29600": 12697444352.0, "29605": 12697444352.0, "29610": 12697444352.0, "29615": 12697444352.0, "29620": 12697444352.0, "29625": 12697444352.0, "29630": 12697444352.0, "29635": 12697444352.0, "29640": 12697444352.0, "29645": 12697444352.0, "29650": 12697444352.0, "29655": 12697444352.0, "29660": 12697444352.0, "29665": 12697444352.0, "29670": 12697444352.0, "29675": 12697444352.0, "29680": 12697444352.0, "29685": 12697444352.0, "29690": 12697444352.0, "29695": 12697444352.0, "29700": 12697444352.0, "29705": 12697444352.0, "29710": 12697444352.0, "29715": 12697444352.0, "29720": 12697444352.0, "29725": 12697444352.0, "29730": 12697444352.0, "29735": 12697444352.0, "29740": 12697444352.0, "29745": 12697444352.0, "29750": 12697444352.0, "29755": 12697444352.0, "29760": 12697444352.0, "29765": 12697444352.0, "29770": 12697444352.0, "29775": 12697444352.0, "29780": 12697444352.0, "29785": 12697444352.0, "29790": 12697444352.0, "29795": 12697444352.0, "29800": 12697444352.0, "29805": 12697444352.0, "29810": 12697444352.0, "29815": 12697444352.0, "29820": 12697444352.0, "29825": 12697444352.0, "29830": 12697444352.0, "29835": 12697444352.0, "29840": 12697444352.0, "29845": 12697444352.0, "29850": 12697444352.0, "29855": 12697444352.0, "29860": 12697444352.0, "29865": 12697444352.0, "29870": 12697444352.0, "29875": 12697444352.0, "29880": 12697444352.0, "29885": 12697444352.0, "29890": 12697444352.0, "29895": 12697444352.0, "29900": 12697444352.0, "29905": 12697444352.0, "29910": 12697444352.0, "29915": 12697444352.0, "29920": 12697444352.0, "29925": 12697444352.0, "29930": 12697444352.0, "29935": 12697444352.0, "29940": 12697444352.0, "29945": 12697444352.0, "29950": 12697444352.0, "29955": 12697444352.0, "29960": 12697444352.0, "29965": 12697444352.0, "29970": 12697444352.0, "29975": 12697444352.0, "29980": 12697444352.0, "29985": 12697444352.0, "29990": 12697444352.0, "29995": 12697444352.0, "30000": 12697444352.0, "30005": 12697444352.0, "30010": 12697444352.0, "30015": 12697444352.0, "30020": 12697444352.0, "30025": 12697444352.0, "30030": 12697444352.0, "30035": 12697444352.0, "30040": 12697444352.0, "30045": 12697444352.0, "30050": 12697444352.0, "30055": 12697444352.0, "30060": 12697444352.0, "30065": 12697444352.0, "30070": 12697444352.0, "30075": 12697444352.0, "30080": 12697444352.0, "30085": 12697444352.0, "30090": 12697444352.0, "30095": 12697444352.0, "30100": 12697444352.0, "30105": 12697444352.0, "30110": 12697444352.0, "30115": 12697444352.0, "30120": 12697444352.0, "30125": 12697444352.0, "30130": 12697444352.0, "30135": 12697444352.0, "30140": 12697444352.0, "30145": 12697444352.0, "30150": 12697444352.0, "30155": 12697444352.0, "30160": 12697444352.0, "30165": 12697444352.0, "30170": 12697444352.0, "30175": 12697444352.0, "30180": 12697444352.0, "30185": 12697444352.0, "30190": 12697444352.0, "30195": 12697444352.0, "30200": 12697444352.0, "30205": 12697444352.0, "30210": 12697444352.0, "30215": 12697444352.0, "30220": 12697444352.0, "30225": 12697444352.0, "30230": 12697444352.0, "30235": 12697444352.0, "30240": 12697444352.0, "30245": 12697444352.0, "30250": 12697444352.0, "30255": 12697444352.0, "30260": 12697444352.0, "30265": 12697444352.0, "30270": 12697444352.0, "30275": 12697444352.0, "30280": 12697444352.0, "30285": 12697444352.0, "30290": 12697444352.0, "30295": 12697444352.0, "30300": 12697444352.0, "30305": 12697444352.0, "30310": 12697444352.0, "30315": 12697444352.0, "30320": 12697444352.0, "30325": 12697444352.0, "30330": 12697444352.0, "30335": 12697444352.0, "30340": 12697444352.0, "30345": 12697444352.0, "30350": 12697444352.0, "30355": 12697444352.0, "30360": 12697444352.0, "30365": 12697444352.0, "30370": 12697444352.0, "30375": 12697444352.0, "30380": 12697444352.0, "30385": 12697444352.0, "30390": 12697444352.0, "30395": 12697444352.0, "30400": 12697444352.0, "30405": 12697444352.0, "30410": 12697444352.0, "30415": 12697444352.0, "30420": 12697444352.0, "30425": 12697444352.0, "30430": 12697444352.0, "30435": 12697444352.0, "30440": 12697444352.0, "30445": 12697444352.0, "30450": 12697444352.0, "30455": 12697444352.0, "30460": 12697444352.0, "30465": 12697444352.0, "30470": 12697444352.0, "30475": 12697444352.0, "30480": 12697444352.0, "30485": 12697444352.0, "30490": 12697444352.0, "30495": 12697444352.0, "30500": 12697444352.0, "30505": 12697444352.0, "30510": 12697444352.0, "30515": 12697444352.0, "30520": 12697444352.0, "30525": 12697444352.0, "30530": 12697444352.0, "30535": 12697444352.0, "30540": 12697444352.0, "30545": 12697444352.0, "30550": 12697444352.0, "30555": 12697444352.0, "30560": 12697444352.0, "30565": 12697444352.0, "30570": 12697444352.0, "30575": 12697444352.0, "30580": 12697444352.0, "30585": 12697444352.0, "30590": 12697444352.0, "30595": 12697444352.0, "30600": 12697444352.0, "30605": 12697444352.0, "30610": 12697444352.0, "30615": 12697444352.0, "30620": 12697444352.0, "30625": 12697444352.0, "30630": 12697444352.0, "30635": 12697444352.0, "30640": 12697444352.0, "30645": 12697444352.0, "30650": 12697444352.0, "30655": 12697444352.0, "30660": 12697444352.0, "30665": 12697444352.0, "30670": 12697444352.0, "30675": 12697444352.0, "30680": 12697444352.0, "30685": 12697444352.0, "30690": 12697444352.0, "30695": 12697444352.0, "30700": 12697444352.0, "30705": 12697444352.0, "30710": 12697444352.0, "30715": 12697444352.0, "30720": 12697444352.0, "30725": 12697444352.0, "30730": 12697444352.0, "30735": 12697444352.0, "30740": 12697444352.0, "30745": 12697444352.0, "30750": 12697444352.0, "30755": 12697444352.0, "30760": 12697444352.0, "30765": 12697444352.0, "30770": 12697444352.0, "30775": 12697444352.0, "30780": 12697444352.0, "30785": 12697444352.0, "30790": 12697444352.0, "30795": 12697444352.0, "30800": 12697444352.0, "30805": 12697444352.0, "30810": 12697444352.0, "30815": 12697444352.0, "30820": 12697444352.0, "30825": 12697444352.0, "30830": 12697444352.0, "30835": 12697444352.0, "30840": 12697444352.0, "30845": 12697444352.0, "30850": 12697444352.0, "30855": 12697444352.0, "30860": 12697444352.0, "30865": 12697444352.0, "30870": 12697444352.0, "30875": 12697444352.0, "30880": 12697444352.0, "30885": 12697444352.0, "30890": 12697444352.0, "30895": 12697444352.0, "30900": 12697444352.0, "30905": 12697444352.0, "30910": 12697444352.0, "30915": 12697444352.0, "30920": 12697444352.0, "30925": 12697444352.0, "30930": 12697444352.0, "30935": 12697444352.0, "30940": 12697444352.0, "30945": 12697444352.0, "30950": 12697444352.0, "30955": 12697444352.0, "30960": 12697444352.0, "30965": 12697444352.0, "30970": 12697444352.0, "30975": 12697444352.0, "30980": 12697444352.0, "30985": 12697444352.0, "30990": 12697444352.0, "30995": 12697444352.0, "31000": 12697444352.0, "31005": 12697444352.0, "31010": 12697444352.0, "31015": 12697444352.0, "31020": 12697444352.0, "31025": 12697444352.0, "31030": 12697444352.0, "31035": 12697444352.0, "31040": 12697444352.0, "31045": 12697444352.0, "31050": 12697444352.0, "31055": 12697444352.0, "31060": 12697444352.0, "31065": 12697444352.0, "31070": 12697444352.0, "31075": 12697444352.0, "31080": 12697444352.0, "31085": 12697444352.0, "31090": 12697444352.0, "31095": 12697444352.0, "31100": 12697444352.0, "31105": 12697444352.0, "31110": 12697444352.0, "31115": 12697444352.0, "31120": 12697444352.0, "31125": 12697444352.0, "31130": 12697444352.0, "31135": 12697444352.0, "31140": 12697444352.0, "31145": 12697444352.0, "31150": 12697444352.0, "31155": 12697444352.0, "31160": 12697444352.0, "31165": 12697444352.0, "31170": 12697444352.0, "31175": 12697444352.0, "31180": 12697444352.0, "31185": 12697444352.0, "31190": 12697444352.0, "31195": 12697444352.0, "31200": 12697444352.0, "31205": 12697444352.0, "31210": 12697444352.0, "31215": 12697444352.0, "31220": 12697444352.0, "31225": 12697444352.0, "31230": 12697444352.0, "31235": 12697444352.0, "31240": 12697444352.0, "31245": 12697444352.0, "31250": 12697444352.0, "31255": 12697444352.0, "31260": 12697444352.0, "31265": 12697444352.0, "31270": 12697444352.0, "31275": 12697444352.0, "31280": 12697444352.0, "31285": 12697444352.0, "31290": 12697444352.0, "31295": 12697444352.0, "31300": 12697444352.0, "31305": 12697444352.0, "31310": 12697444352.0, "31315": 12697444352.0, "31320": 12697444352.0, "31325": 12697444352.0, "31330": 12697444352.0, "31335": 12697444352.0, "31340": 12697444352.0, "31345": 12697444352.0, "31350": 12697444352.0, "31355": 12697444352.0, "31360": 12697444352.0, "31365": 12697444352.0, "31370": 12697444352.0, "31375": 12697444352.0, "31380": 12697444352.0, "31385": 12697444352.0, "31390": 12697444352.0, "31395": 12697444352.0, "31400": 12697444352.0, "31405": 12697444352.0, "31410": 12697444352.0, "31415": 12697444352.0, "31420": 12697444352.0, "31425": 12697444352.0, "31430": 12697444352.0, "31435": 12697444352.0, "31440": 12697444352.0, "31445": 12697444352.0, "31450": 12697444352.0, "31455": 12697444352.0, "31460": 12697444352.0, "31465": 12697444352.0, "31470": 12697444352.0, "31475": 12697444352.0, "31480": 12697444352.0, "31485": 12697444352.0, "31490": 12697444352.0, "31495": 12697444352.0, "31500": 12697444352.0, "31505": 12697444352.0, "31510": 12697444352.0, "31515": 12697444352.0, "31520": 12697444352.0, "31525": 12697444352.0, "31530": 12697444352.0, "31535": 12697444352.0, "31540": 12697444352.0, "31545": 12697444352.0, "31550": 12697444352.0, "31555": 12697444352.0, "31560": 12697444352.0, "31565": 12697444352.0, "31570": 12697444352.0, "31575": 12697444352.0, "31580": 12697444352.0, "31585": 12697444352.0, "31590": 12697444352.0, "31595": 12697444352.0, "31600": 12697444352.0, "31605": 12697444352.0, "31610": 12697444352.0, "31615": 12697444352.0, "31620": 12697444352.0, "31625": 12697444352.0, "31630": 12697444352.0, "31635": 12697444352.0, "31640": 12697444352.0, "31645": 12697444352.0, "31650": 12697444352.0, "31655": 12697444352.0, "31660": 12697444352.0, "31665": 12697444352.0, "31670": 12697444352.0, "31675": 12697444352.0, "31680": 12697444352.0, "31685": 12697444352.0, "31690": 12697444352.0, "31695": 12697444352.0, "31700": 12697444352.0, "31705": 12697444352.0, "31710": 12697444352.0, "31715": 12697444352.0, "31720": 12697444352.0, "31725": 12697444352.0, "31730": 12697444352.0, "31735": 12697444352.0, "31740": 12697444352.0, "31745": 12697444352.0, "31750": 12697444352.0, "31755": 12697444352.0, "31760": 12697444352.0, "31765": 12697444352.0, "31770": 12697444352.0, "31775": 12697444352.0, "31780": 12697444352.0, "31785": 12697444352.0, "31790": 12697444352.0, "31795": 12697444352.0, "31800": 12697444352.0, "31805": 12697444352.0, "31810": 12697444352.0, "31815": 12697444352.0, "31820": 12697444352.0, "31825": 12697444352.0, "31830": 12697444352.0, "31835": 12697444352.0, "31840": 12697444352.0, "31845": 12697444352.0, "31850": 12697444352.0, "31855": 12697444352.0, "31860": 12697444352.0, "31865": 12697444352.0, "31870": 12697444352.0, "31875": 12697444352.0, "31880": 12697444352.0, "31885": 12697444352.0, "31890": 12697444352.0, "31895": 12697444352.0, "31900": 12697444352.0, "31905": 12697444352.0, "31910": 12697444352.0, "31915": 12697444352.0, "31920": 12697444352.0, "31925": 12697444352.0, "31930": 12697444352.0, "31935": 12697444352.0, "31940": 12697444352.0, "31945": 12697444352.0, "31950": 12697444352.0, "31955": 12697444352.0, "31960": 12697444352.0, "31965": 12697444352.0, "31970": 12697444352.0, "31975": 12697444352.0, "31980": 12697444352.0, "31985": 12697444352.0, "31990": 12697444352.0, "31995": 12697444352.0, "32000": 12697444352.0, "32005": 12697444352.0, "32010": 12697444352.0, "32015": 12697444352.0, "32020": 12697444352.0, "32025": 12697444352.0, "32030": 12697444352.0, "32035": 12697444352.0, "32040": 12697444352.0, "32045": 12697444352.0, "32050": 12697444352.0, "32055": 12697444352.0, "32060": 12697444352.0, "32065": 12697444352.0, "32070": 12697444352.0, "32075": 12697444352.0, "32080": 12697444352.0, "32085": 12697444352.0, "32090": 12697444352.0, "32095": 12697444352.0, "32100": 12697444352.0, "32105": 12697444352.0, "32110": 12697444352.0, "32115": 12697444352.0, "32120": 12697444352.0, "32125": 12697444352.0, "32130": 12697444352.0, "32135": 12697444352.0, "32140": 12697444352.0, "32145": 12697444352.0, "32150": 12697444352.0, "32155": 12697444352.0, "32160": 12697444352.0, "32165": 12697444352.0, "32170": 12697444352.0, "32175": 12697444352.0, "32180": 12697444352.0, "32185": 12697444352.0, "32190": 12697444352.0, "32195": 12697444352.0, "32200": 12697444352.0, "32205": 12697444352.0, "32210": 12697444352.0, "32215": 12697444352.0, "32220": 12697444352.0, "32225": 12697444352.0, "32230": 12697444352.0, "32235": 12697444352.0, "32240": 12697444352.0, "32245": 12697444352.0, "32250": 12697444352.0, "32255": 12697444352.0, "32260": 12697444352.0, "32265": 12697444352.0, "32270": 12697444352.0, "32275": 12697444352.0, "32280": 12697444352.0, "32285": 12697444352.0, "32290": 12697444352.0, "32295": 12697444352.0, "32300": 12697444352.0, "32305": 12697444352.0, "32310": 12697444352.0, "32315": 12697444352.0, "32320": 12697444352.0, "32325": 12697444352.0, "32330": 12697444352.0, "32335": 12697444352.0, "32340": 12697444352.0, "32345": 12697444352.0, "32350": 12697444352.0, "32355": 12697444352.0, "32360": 12697444352.0, "32365": 12697444352.0, "32370": 12697444352.0, "32375": 12697444352.0, "32380": 12697444352.0, "32385": 12697444352.0, "32390": 12697444352.0, "32395": 12697444352.0, "32400": 12697444352.0, "32405": 12697444352.0, "32410": 12697444352.0, "32415": 12697444352.0, "32420": 12697444352.0, "32425": 12697444352.0, "32430": 12697444352.0, "32435": 12697444352.0, "32440": 12697444352.0, "32445": 12697444352.0, "32450": 12697444352.0, "32455": 12697444352.0, "32460": 12697444352.0, "32465": 12697444352.0, "32470": 12697444352.0, "32475": 12697444352.0, "32480": 12697444352.0, "32485": 12697444352.0, "32490": 12697444352.0, "32495": 12697444352.0, "32500": 12697444352.0, "32505": 12697444352.0, "32510": 12697444352.0, "32515": 12697444352.0, "32520": 12697444352.0, "32525": 12697444352.0, "32530": 12697444352.0, "32535": 12697444352.0, "32540": 12697444352.0, "32545": 12697444352.0, "32550": 12697444352.0, "32555": 12697444352.0, "32560": 12697444352.0, "32565": 12697444352.0, "32570": 12697444352.0, "32575": 12697444352.0, "32580": 12697444352.0, "32585": 12697444352.0, "32590": 12697444352.0, "32595": 12697444352.0, "32600": 12697444352.0, "32605": 12697444352.0, "32610": 12697444352.0, "32615": 12697444352.0, "32620": 12697444352.0, "32625": 12697444352.0, "32630": 12697444352.0, "32635": 12697444352.0, "32640": 12697444352.0, "32645": 12697444352.0, "32650": 12697444352.0, "32655": 12697444352.0, "32660": 12697444352.0, "32665": 12697444352.0, "32670": 12697444352.0, "32675": 12697444352.0, "32680": 12697444352.0, "32685": 12697444352.0, "32690": 12697444352.0, "32695": 12697444352.0, "32700": 12697444352.0, "32705": 12697444352.0, "32710": 12697444352.0, "32715": 12697444352.0, "32720": 12697444352.0, "32725": 12697444352.0, "32730": 12697444352.0, "32735": 12697444352.0, "32740": 12697444352.0, "32745": 12697444352.0, "32750": 12697444352.0, "32755": 12697444352.0, "32760": 12697444352.0, "32765": 12697444352.0, "32770": 12697444352.0, "32775": 12697444352.0, "32780": 12697444352.0, "32785": 12697444352.0, "32790": 12697444352.0, "32795": 12697444352.0, "32800": 12697444352.0, "32805": 12697444352.0, "32810": 12697444352.0, "32815": 12697444352.0, "32820": 12697444352.0, "32825": 12697444352.0, "32830": 12697444352.0, "32835": 12697444352.0, "32840": 12697444352.0, "32845": 12697444352.0, "32850": 12697444352.0, "32855": 12697444352.0, "32860": 12697444352.0, "32865": 12697444352.0, "32870": 12697444352.0, "32875": 12697444352.0, "32880": 12697444352.0, "32885": 12697444352.0, "32890": 12697444352.0, "32895": 12697444352.0, "32900": 12697444352.0, "32905": 12697444352.0, "32910": 12697444352.0, "32915": 12697444352.0, "32920": 12697444352.0, "32925": 12697444352.0, "32930": 12697444352.0, "32935": 12697444352.0, "32940": 12697444352.0, "32945": 12697444352.0, "32950": 12697444352.0, "32955": 12697444352.0, "32960": 12697444352.0, "32965": 12697444352.0, "32970": 12697444352.0, "32975": 12697444352.0, "32980": 12697444352.0, "32985": 12697444352.0, "32990": 12697444352.0, "32995": 12697444352.0, "33000": 12697444352.0, "33005": 12697444352.0, "33010": 12697444352.0, "33015": 12697444352.0, "33020": 12697444352.0, "33025": 12697444352.0, "33030": 12697444352.0, "33035": 12697444352.0, "33040": 12697444352.0, "33045": 12697444352.0, "33050": 12697444352.0, "33055": 12697444352.0, "33060": 12697444352.0, "33065": 12697444352.0, "33070": 12697444352.0, "33075": 12697444352.0, "33080": 12697444352.0, "33085": 12697444352.0, "33090": 12697444352.0, "33095": 12697444352.0, "33100": 12697444352.0, "33105": 12697444352.0, "33110": 12697444352.0, "33115": 12697444352.0, "33120": 12697444352.0, "33125": 12697444352.0, "33130": 12697444352.0, "33135": 12697444352.0, "33140": 12697444352.0, "33145": 12697444352.0, "33150": 12697444352.0, "33155": 12697444352.0, "33160": 12697444352.0, "33165": 12697444352.0, "33170": 12697444352.0, "33175": 12697444352.0, "33180": 12697444352.0, "33185": 12697444352.0, "33190": 12697444352.0, "33195": 12697444352.0, "33200": 12697444352.0, "33205": 12697444352.0, "33210": 12697444352.0, "33215": 12697444352.0, "33220": 12697444352.0, "33225": 12697444352.0, "33230": 12697444352.0, "33235": 12697444352.0, "33240": 12697444352.0, "33245": 12697444352.0, "33250": 12697444352.0, "33255": 12697444352.0, "33260": 12697444352.0, "33265": 12697444352.0, "33270": 12697444352.0, "33275": 12697444352.0, "33280": 12697444352.0, "33285": 12697444352.0, "33290": 12697444352.0, "33295": 12697444352.0, "33300": 12697444352.0, "33305": 12697444352.0, "33310": 12697444352.0, "33315": 12697444352.0, "33320": 12697444352.0, "33325": 12697444352.0, "33330": 12697444352.0, "33335": 12697444352.0, "33340": 12697444352.0, "33345": 12697444352.0, "33350": 12697444352.0, "33355": 12697444352.0, "33360": 12697444352.0, "33365": 12697444352.0, "33370": 12697444352.0, "33375": 12697444352.0, "33380": 12697444352.0, "33385": 12697444352.0, "33390": 12697444352.0, "33395": 12697444352.0, "33400": 12697444352.0, "33405": 12697444352.0, "33410": 12697444352.0, "33415": 12697444352.0, "33420": 12697444352.0, "33425": 12697444352.0, "33430": 12697444352.0, "33435": 12697444352.0, "33440": 12697444352.0, "33445": 12697444352.0, "33450": 12697444352.0, "33455": 12697444352.0, "33460": 12697444352.0, "33465": 12697444352.0, "33470": 12697444352.0, "33475": 12697444352.0, "33480": 12697444352.0, "33485": 12697444352.0, "33490": 12697444352.0, "33495": 12697444352.0, "33500": 12697444352.0, "33505": 12697444352.0, "33510": 12697444352.0, "33515": 12697444352.0, "33520": 12697444352.0, "33525": 12697444352.0, "33530": 12697444352.0, "33535": 12697444352.0, "33540": 12697444352.0, "33545": 12697444352.0, "33550": 12697444352.0, "33555": 12697444352.0, "33560": 12697444352.0, "33565": 12697444352.0, "33570": 12697444352.0, "33575": 12697444352.0, "33580": 12697444352.0, "33585": 12697444352.0, "33590": 12697444352.0, "33595": 12697444352.0, "33600": 12697444352.0, "33605": 12697444352.0, "33610": 12697444352.0, "33615": 12697444352.0, "33620": 12697444352.0, "33625": 12697444352.0, "33630": 12697444352.0, "33635": 12697444352.0, "33640": 12697444352.0, "33645": 12697444352.0, "33650": 12697444352.0, "33655": 12697444352.0, "33660": 12697444352.0, "33665": 12697444352.0, "33670": 12697444352.0, "33675": 12697444352.0, "33680": 12697444352.0, "33685": 12697444352.0, "33690": 12697444352.0, "33695": 12697444352.0, "33700": 12697444352.0, "33705": 12697444352.0, "33710": 12697444352.0, "33715": 12697444352.0, "33720": 12697444352.0, "33725": 12697444352.0, "33730": 12697444352.0, "33735": 12697444352.0, "33740": 12697444352.0, "33745": 12697444352.0, "33750": 12697444352.0, "33755": 12697444352.0, "33760": 12697444352.0, "33765": 12697444352.0, "33770": 12697444352.0, "33775": 12697444352.0, "33780": 12697444352.0, "33785": 12697444352.0, "33790": 12697444352.0, "33795": 12697444352.0, "33800": 12697444352.0, "33805": 12697444352.0, "33810": 12697444352.0, "33815": 12697444352.0, "33820": 12697444352.0, "33825": 12697444352.0, "33830": 12697444352.0, "33835": 12697444352.0, "33840": 12697444352.0, "33845": 12697444352.0, "33850": 12697444352.0, "33855": 12697444352.0, "33860": 12697444352.0, "33865": 12697444352.0, "33870": 12697444352.0, "33875": 12697444352.0, "33880": 12697444352.0, "33885": 12697444352.0, "33890": 12697444352.0, "33895": 12697444352.0, "33900": 12697444352.0, "33905": 12697444352.0, "33910": 12697444352.0, "33915": 12697444352.0, "33920": 12697444352.0, "33925": 12697444352.0, "33930": 12697444352.0, "33935": 12697444352.0, "33940": 12697444352.0, "33945": 12697444352.0, "33950": 12697444352.0, "33955": 12697444352.0, "33960": 12697444352.0, "33965": 12697444352.0, "33970": 12697444352.0, "33975": 12697444352.0, "33980": 12697444352.0, "33985": 12697444352.0, "33990": 12697444352.0, "33995": 12697444352.0, "34000": 12697444352.0, "34005": 12697444352.0, "34010": 12697444352.0, "34015": 12697444352.0, "34020": 12697444352.0, "34025": 12697444352.0, "34030": 12697444352.0, "34035": 12697444352.0, "34040": 12697444352.0, "34045": 12697444352.0, "34050": 12697444352.0, "34055": 12697444352.0, "34060": 12697444352.0, "34065": 12697444352.0, "34070": 12697444352.0, "34075": 12697444352.0, "34080": 12697444352.0, "34085": 12697444352.0, "34090": 12697444352.0, "34095": 12697444352.0, "34100": 12697444352.0, "34105": 12697444352.0, "34110": 12697444352.0, "34115": 12697444352.0, "34120": 12697444352.0, "34125": 12697444352.0, "34130": 12697444352.0, "34135": 12697444352.0, "34140": 12697444352.0, "34145": 12697444352.0, "34150": 12697444352.0, "34155": 12697444352.0, "34160": 12697444352.0, "34165": 12697444352.0, "34170": 12697444352.0, "34175": 12697444352.0, "34180": 12697444352.0, "34185": 12697444352.0, "34190": 12697444352.0, "34195": 12697444352.0, "34200": 12697444352.0, "34205": 12697444352.0, "34210": 12697444352.0, "34215": 12697444352.0, "34220": 12697444352.0, "34225": 12697444352.0, "34230": 12697444352.0, "34235": 12697444352.0, "34240": 12697444352.0, "34245": 12697444352.0, "34250": 12697444352.0, "34255": 12697444352.0, "34260": 12697444352.0, "34265": 12697444352.0, "34270": 12697444352.0, "34275": 12697444352.0, "34280": 12697444352.0, "34285": 12697444352.0, "34290": 12697444352.0, "34295": 12697444352.0, "34300": 12697444352.0, "34305": 12697444352.0, "34310": 12697444352.0, "34315": 12697444352.0, "34320": 12697444352.0, "34325": 12697444352.0, "34330": 12697444352.0, "34335": 12697444352.0, "34340": 12697444352.0, "34345": 12697444352.0, "34350": 12697444352.0, "34355": 12697444352.0, "34360": 12697444352.0, "34365": 12697444352.0, "34370": 12697444352.0, "34375": 12697444352.0, "34380": 12697444352.0, "34385": 12697444352.0, "34390": 12697444352.0, "34395": 12697444352.0, "34400": 12697444352.0, "34405": 12697444352.0, "34410": 12697444352.0, "34415": 12697444352.0, "34420": 12697444352.0, "34425": 12697444352.0, "34430": 12697444352.0, "34435": 12697444352.0, "34440": 12697444352.0, "34445": 12697444352.0, "34450": 12697444352.0, "34455": 12697444352.0, "34460": 12697444352.0, "34465": 12697444352.0, "34470": 12697444352.0, "34475": 12697444352.0, "34480": 12697444352.0, "34485": 12697444352.0, "34490": 12697444352.0, "34495": 12697444352.0, "34500": 12697444352.0, "34505": 12697444352.0, "34510": 12697444352.0, "34515": 12697444352.0, "34520": 12697444352.0, "34525": 12697444352.0, "34530": 12697444352.0, "34535": 12697444352.0, "34540": 12697444352.0, "34545": 12697444352.0, "34550": 12697444352.0, "34555": 12697444352.0, "34560": 12697444352.0, "34565": 12697444352.0, "34570": 12697444352.0, "34575": 12697444352.0, "34580": 12697444352.0, "34585": 12697444352.0, "34590": 12697444352.0, "34595": 12697444352.0, "34600": 12697444352.0, "34605": 12697444352.0, "34610": 12697444352.0, "34615": 12697444352.0, "34620": 12697444352.0, "34625": 12697444352.0, "34630": 12697444352.0, "34635": 12697444352.0, "34640": 12697444352.0, "34645": 12697444352.0, "34650": 12697444352.0, "34655": 12697444352.0, "34660": 12697444352.0, "34665": 12697444352.0, "34670": 12697444352.0, "34675": 12697444352.0, "34680": 12697444352.0, "34685": 12697444352.0, "34690": 12697444352.0, "34695": 12697444352.0, "34700": 12697444352.0, "34705": 12697444352.0, "34710": 12697444352.0, "34715": 12697444352.0, "34720": 12697444352.0, "34725": 12697444352.0, "34730": 12697444352.0, "34735": 12697444352.0, "34740": 12697444352.0, "34745": 12697444352.0, "34750": 12697444352.0, "34755": 12697444352.0, "34760": 12697444352.0, "34765": 12697444352.0, "34770": 12697444352.0, "34775": 12697444352.0, "34780": 12697444352.0, "34785": 12697444352.0, "34790": 12697444352.0, "34795": 12697444352.0, "34800": 12697444352.0, "34805": 12697444352.0, "34810": 12697444352.0, "34815": 12697444352.0, "34820": 12697444352.0, "34825": 12697444352.0, "34830": 12697444352.0, "34835": 12697444352.0, "34840": 12697444352.0, "34845": 12697444352.0, "34850": 12697444352.0, "34855": 12697444352.0, "34860": 12697444352.0, "34865": 12697444352.0, "34870": 12697444352.0, "34875": 12697444352.0, "34880": 12697444352.0, "34885": 12697444352.0, "34890": 12697444352.0, "34895": 12697444352.0, "34900": 12697444352.0, "34905": 12697444352.0, "34910": 12697444352.0, "34915": 12697444352.0, "34920": 12697444352.0, "34925": 12697444352.0, "34930": 12697444352.0, "34935": 12697444352.0, "34940": 12697444352.0, "34945": 12697444352.0, "34950": 12697444352.0, "34955": 12697444352.0, "34960": 12697444352.0, "34965": 12697444352.0, "34970": 12697444352.0, "34975": 12697444352.0, "34980": 12697444352.0, "34985": 12697444352.0, "34990": 12697444352.0, "34995": 12697444352.0, "35000": 12697444352.0, "35005": 12697444352.0, "35010": 12697444352.0, "35015": 12697444352.0, "35020": 12697444352.0, "35025": 12697444352.0, "35030": 12697444352.0, "35035": 12697444352.0, "35040": 12697444352.0, "35045": 12697444352.0, "35050": 12697444352.0, "35055": 12697444352.0, "35060": 12697444352.0, "35065": 12697444352.0, "35070": 12697444352.0, "35075": 12697444352.0, "35080": 12697444352.0, "35085": 12697444352.0, "35090": 12697444352.0, "35095": 12697444352.0, "35100": 12697444352.0, "35105": 12697444352.0, "35110": 12697444352.0, "35115": 12697444352.0, "35120": 12697444352.0, "35125": 12697444352.0, "35130": 12697444352.0, "35135": 12697444352.0, "35140": 12697444352.0, "35145": 12697444352.0, "35150": 12697444352.0, "35155": 12697444352.0, "35160": 12697444352.0, "35165": 12697444352.0, "35170": 12697444352.0, "35175": 12697444352.0, "35180": 12697444352.0, "35185": 12697444352.0, "35190": 12697444352.0, "35195": 12697444352.0, "35200": 12697444352.0, "35205": 12697444352.0, "35210": 12697444352.0, "35215": 12697444352.0, "35220": 12697444352.0, "35225": 12697444352.0, "35230": 12697444352.0, "35235": 12697444352.0, "35240": 12697444352.0, "35245": 12697444352.0, "35250": 12697444352.0, "35255": 12697444352.0, "35260": 12697444352.0, "35265": 12697444352.0, "35270": 12697444352.0, "35275": 12697444352.0, "35280": 12697444352.0, "35285": 12697444352.0, "35290": 12697444352.0, "35295": 12697444352.0, "35300": 12697444352.0, "35305": 12697444352.0, "35310": 12697444352.0, "35315": 12697444352.0, "35320": 12697444352.0, "35325": 12697444352.0, "35330": 12697444352.0, "35335": 12697444352.0, "35340": 12697444352.0, "35345": 12697444352.0, "35350": 12697444352.0, "35355": 12697444352.0, "35360": 12697444352.0, "35365": 12697444352.0, "35370": 12697444352.0, "35375": 12697444352.0, "35380": 12697444352.0, "35385": 12697444352.0, "35390": 12697444352.0, "35395": 12697444352.0, "35400": 12697444352.0, "35405": 12697444352.0, "35410": 12697444352.0, "35415": 12697444352.0, "35420": 12697444352.0, "35425": 12697444352.0, "35430": 12697444352.0, "35435": 12697444352.0, "35440": 12697444352.0, "35445": 12697444352.0, "35450": 12697444352.0, "35455": 12697444352.0, "35460": 12697444352.0, "35465": 12697444352.0, "35470": 12697444352.0, "35475": 12697444352.0, "35480": 12697444352.0, "35485": 12697444352.0, "35490": 12697444352.0, "35495": 12697444352.0, "35500": 12697444352.0, "35505": 12697444352.0, "35510": 12697444352.0, "35515": 12697444352.0, "35520": 12697444352.0, "35525": 12697444352.0, "35530": 12697444352.0, "35535": 12697444352.0, "35540": 12697444352.0, "35545": 12697444352.0, "35550": 12697444352.0, "35555": 12697444352.0, "35560": 12697444352.0, "35565": 12697444352.0, "35570": 12697444352.0, "35575": 12697444352.0, "35580": 12697444352.0, "35585": 12697444352.0, "35590": 12697444352.0, "35595": 12697444352.0, "35600": 12697444352.0, "35605": 12697444352.0, "35610": 12697444352.0, "35615": 12697444352.0, "35620": 12697444352.0, "35625": 12697444352.0, "35630": 12697444352.0, "35635": 12697444352.0, "35640": 12697444352.0, "35645": 12697444352.0, "35650": 12697444352.0, "35655": 12697444352.0, "35660": 12697444352.0, "35665": 12697444352.0, "35670": 12697444352.0, "35675": 12697444352.0, "35680": 12697444352.0, "35685": 12697444352.0, "35690": 12697444352.0, "35695": 12697444352.0, "35700": 12697444352.0, "35705": 12697444352.0, "35710": 12697444352.0, "35715": 12697444352.0, "35720": 12697444352.0, "35725": 12697444352.0, "35730": 12697444352.0, "35735": 12697444352.0, "35740": 12697444352.0, "35745": 12697444352.0, "35750": 12697444352.0, "35755": 12697444352.0, "35760": 12697444352.0, "35765": 12697444352.0, "35770": 12697444352.0, "35775": 12697444352.0, "35780": 12697444352.0, "35785": 12697444352.0, "35790": 12697444352.0, "35795": 12697444352.0, "35800": 12697444352.0, "35805": 12697444352.0, "35810": 12697444352.0, "35815": 12697444352.0, "35820": 12697444352.0, "35825": 12697444352.0, "35830": 12697444352.0, "35835": 12697444352.0, "35840": 12697444352.0, "35845": 12697444352.0, "35850": 12697444352.0, "35855": 12697444352.0, "35860": 12697444352.0, "35865": 12697444352.0, "35870": 12697444352.0, "35875": 12697444352.0, "35880": 12697444352.0, "35885": 12697444352.0, "35890": 12697444352.0, "35895": 12697444352.0, "35900": 12697444352.0, "35905": 12697444352.0, "35910": 12697444352.0, "35915": 12697444352.0, "35920": 12697444352.0, "35925": 12697444352.0, "35930": 12697444352.0, "35935": 12697444352.0, "35940": 12697444352.0, "35945": 12697444352.0, "35950": 12697444352.0, "35955": 12697444352.0, "35960": 12697444352.0, "35965": 12697444352.0, "35970": 12697444352.0, "35975": 12697444352.0, "35980": 12697444352.0, "35985": 12697444352.0, "35990": 12697444352.0, "35995": 12697444352.0, "36000": 12697444352.0, "36005": 12697444352.0, "36010": 12697444352.0, "36015": 12697444352.0, "36020": 12697444352.0, "36025": 12697444352.0, "36030": 12697444352.0, "36035": 12697444352.0, "36040": 12697444352.0, "36045": 12697444352.0, "36050": 12697444352.0, "36055": 12697444352.0, "36060": 12697444352.0, "36065": 12697444352.0, "36070": 12697444352.0, "36075": 12697444352.0, "36080": 12697444352.0, "36085": 12697444352.0, "36090": 12697444352.0, "36095": 12697444352.0, "36100": 12697444352.0, "36105": 12697444352.0, "36110": 12697444352.0, "36115": 12697444352.0, "36120": 12697444352.0, "36125": 12697444352.0, "36130": 12697444352.0, "36135": 12697444352.0, "36140": 12697444352.0, "36145": 12697444352.0, "36150": 12697444352.0, "36155": 12697444352.0, "36160": 12697444352.0, "36165": 12697444352.0, "36170": 12697444352.0, "36175": 12697444352.0, "36180": 12697444352.0, "36185": 12697444352.0, "36190": 12697444352.0, "36195": 12697444352.0, "36200": 12697444352.0, "36205": 12697444352.0, "36210": 12697444352.0, "36215": 12697444352.0, "36220": 12697444352.0, "36225": 12697444352.0, "36230": 12697444352.0, "36235": 12697444352.0, "36240": 12697444352.0, "36245": 12697444352.0, "36250": 12697444352.0, "36255": 12697444352.0, "36260": 12697444352.0, "36265": 12697444352.0, "36270": 12697444352.0, "36275": 12697444352.0, "36280": 12697444352.0, "36285": 12697444352.0, "36290": 12697444352.0, "36295": 12697444352.0, "36300": 12697444352.0, "36305": 12697444352.0, "36310": 12697444352.0, "36315": 12697444352.0, "36320": 12697444352.0, "36325": 12697444352.0, "36330": 12697444352.0, "36335": 12697444352.0, "36340": 12697444352.0, "36345": 12697444352.0, "36350": 12697444352.0, "36355": 12697444352.0, "36360": 12697444352.0, "36365": 12697444352.0, "36370": 12697444352.0, "36375": 12697444352.0, "36380": 12697444352.0, "36385": 12697444352.0, "36390": 12697444352.0, "36395": 12697444352.0, "36400": 12697444352.0, "36405": 12697444352.0, "36410": 12697444352.0, "36415": 12697444352.0, "36420": 12697444352.0, "36425": 12697444352.0, "36430": 12697444352.0, "36435": 12697444352.0, "36440": 12697444352.0, "36445": 12697444352.0, "36450": 12697444352.0, "36455": 12697444352.0, "36460": 12697444352.0, "36465": 12697444352.0, "36470": 12697444352.0, "36475": 12697444352.0, "36480": 12697444352.0, "36485": 12697444352.0, "36490": 12697444352.0, "36495": 12697444352.0, "36500": 12697444352.0, "36505": 12697444352.0, "36510": 12697444352.0, "36515": 12697444352.0, "36520": 12697444352.0, "36525": 12697444352.0, "36530": 12697444352.0, "36535": 12697444352.0, "36540": 12697444352.0, "36545": 12697444352.0, "36550": 12697444352.0, "36555": 12697444352.0, "36560": 12697444352.0, "36565": 12697444352.0, "36570": 12697444352.0, "36575": 12697444352.0, "36580": 12697444352.0, "36585": 12697444352.0, "36590": 12697444352.0, "36595": 12697444352.0, "36600": 12697444352.0, "36605": 12697444352.0, "36610": 12697444352.0, "36615": 12697444352.0, "36620": 12697444352.0, "36625": 12697444352.0, "36630": 12697444352.0, "36635": 12697444352.0, "36640": 12697444352.0, "36645": 12697444352.0, "36650": 12697444352.0, "36655": 12697444352.0, "36660": 12697444352.0, "36665": 12697444352.0, "36670": 12697444352.0, "36675": 12697444352.0, "36680": 12697444352.0, "36685": 12697444352.0, "36690": 12697444352.0, "36695": 12697444352.0, "36700": 12697444352.0, "36705": 12697444352.0, "36710": 12697444352.0, "36715": 12697444352.0, "36720": 12697444352.0, "36725": 12697444352.0, "36730": 12697444352.0, "36735": 12697444352.0, "36740": 12697444352.0, "36745": 12697444352.0, "36750": 12697444352.0, "36755": 12697444352.0, "36760": 12697444352.0, "36765": 12697444352.0, "36770": 12697444352.0, "36775": 12697444352.0, "36780": 12697444352.0, "36785": 12697444352.0, "36790": 12697444352.0, "36795": 12697444352.0, "36800": 12697444352.0, "36805": 12697444352.0, "36810": 12697444352.0, "36815": 12697444352.0, "36820": 12697444352.0, "36825": 12697444352.0, "36830": 12697444352.0, "36835": 12697444352.0, "36840": 12697444352.0, "36845": 12697444352.0, "36850": 12697444352.0, "36855": 12697444352.0, "36860": 12697444352.0, "36865": 12697444352.0, "36870": 12697444352.0, "36875": 12697444352.0, "36880": 12697444352.0, "36885": 12697444352.0, "36890": 12697444352.0, "36895": 12697444352.0, "36900": 12697444352.0, "36905": 12697444352.0, "36910": 12697444352.0, "36915": 12697444352.0, "36920": 12697444352.0, "36925": 12697444352.0, "36930": 12697444352.0, "36935": 12697444352.0, "36940": 12697444352.0, "36945": 12697444352.0, "36950": 12697444352.0, "36955": 12697444352.0, "36960": 12697444352.0, "36965": 12697444352.0, "36970": 12697444352.0, "36975": 12697444352.0, "36980": 12697444352.0, "36985": 12697444352.0, "36990": 12697444352.0, "36995": 12697444352.0, "37000": 12697444352.0, "37005": 12697444352.0, "37010": 12697444352.0, "37015": 12697444352.0, "37020": 12697444352.0, "37025": 12697444352.0, "37030": 12697444352.0, "37035": 12697444352.0, "37040": 12697444352.0, "37045": 12697444352.0, "37050": 12697444352.0, "37055": 12697444352.0, "37060": 12697444352.0, "37065": 12697444352.0, "37070": 12697444352.0, "37075": 12697444352.0, "37080": 12697444352.0, "37085": 12697444352.0, "37090": 12697444352.0, "37095": 12697444352.0, "37100": 12697444352.0, "37105": 12697444352.0, "37110": 12697444352.0, "37115": 12697444352.0, "37120": 12697444352.0, "37125": 12697444352.0, "37130": 12697444352.0, "37135": 12697444352.0, "37140": 12697444352.0, "37145": 12697444352.0, "37150": 12697444352.0, "37155": 12697444352.0, "37160": 12697444352.0, "37165": 12697444352.0, "37170": 12697444352.0, "37175": 12697444352.0, "37180": 12697444352.0, "37185": 12697444352.0, "37190": 12697444352.0, "37195": 12697444352.0, "37200": 12697444352.0, "37205": 12697444352.0, "37210": 12697444352.0, "37215": 12697444352.0, "37220": 12697444352.0, "37225": 12697444352.0, "37230": 12697444352.0, "37235": 12697444352.0, "37240": 12697444352.0, "37245": 12697444352.0, "37250": 12697444352.0, "37255": 12697444352.0, "37260": 12697444352.0, "37265": 12697444352.0, "37270": 12697444352.0, "37275": 12697444352.0, "37280": 12697444352.0, "37285": 12697444352.0, "37290": 12697444352.0, "37295": 12697444352.0, "37300": 12697444352.0, "37305": 12697444352.0, "37310": 12697444352.0, "37315": 12697444352.0, "37320": 12697444352.0, "37325": 12697444352.0, "37330": 12697444352.0, "37335": 12697444352.0, "37340": 12697444352.0, "37345": 12697444352.0, "37350": 12697444352.0, "37355": 12697444352.0, "37360": 12697444352.0, "37365": 12697444352.0, "37370": 12697444352.0, "37375": 12697444352.0, "37380": 12697444352.0, "37385": 12697444352.0, "37390": 12697444352.0, "37395": 12697444352.0, "37400": 12697444352.0, "37405": 12697444352.0, "37410": 12697444352.0, "37415": 12697444352.0, "37420": 12697444352.0, "37425": 12697444352.0, "37430": 12697444352.0, "37435": 12697444352.0, "37440": 12697444352.0, "37445": 12697444352.0, "37450": 12697444352.0, "37455": 12697444352.0, "37460": 12697444352.0, "37465": 12697444352.0, "37470": 12697444352.0, "37475": 12697444352.0, "37480": 12697444352.0, "37485": 12697444352.0, "37490": 12697444352.0, "37495": 12697444352.0, "37500": 12697444352.0, "37505": 12697444352.0, "37510": 12697444352.0, "37515": 12697444352.0, "37520": 12697444352.0, "37525": 12697444352.0, "37530": 12697444352.0, "37535": 12697444352.0, "37540": 12697444352.0, "37545": 12697444352.0, "37550": 12697444352.0, "37555": 12697444352.0, "37560": 12697444352.0, "37565": 12697444352.0, "37570": 12697444352.0, "37575": 12697444352.0, "37580": 12697444352.0, "37585": 12697444352.0, "37590": 12697444352.0, "37595": 12697444352.0, "37600": 12697444352.0, "37605": 12697444352.0, "37610": 12697444352.0, "37615": 12697444352.0, "37620": 12697444352.0, "37625": 12697444352.0, "37630": 12697444352.0, "37635": 12697444352.0, "37640": 12697444352.0, "37645": 12697444352.0, "37650": 12697444352.0, "37655": 12697444352.0, "37660": 12697444352.0, "37665": 12697444352.0, "37670": 12697444352.0, "37675": 12697444352.0, "37680": 12697444352.0, "37685": 12697444352.0, "37690": 12697444352.0, "37695": 12697444352.0, "37700": 12697444352.0, "37705": 12697444352.0, "37710": 12697444352.0, "37715": 12697444352.0, "37720": 12697444352.0, "37725": 12697444352.0, "37730": 12697444352.0, "37735": 12697444352.0, "37740": 12697444352.0, "37745": 12697444352.0, "37750": 12697444352.0, "37755": 12697444352.0, "37760": 12697444352.0, "37765": 12697444352.0, "37770": 12697444352.0, "37775": 12697444352.0, "37780": 12697444352.0, "37785": 12697444352.0, "37790": 12697444352.0, "37795": 12697444352.0, "37800": 12697444352.0, "37805": 12697444352.0, "37810": 12697444352.0, "37815": 12697444352.0, "37820": 12697444352.0, "37825": 12697444352.0, "37830": 12697444352.0, "37835": 12697444352.0, "37840": 12697444352.0, "37845": 12697444352.0, "37850": 12697444352.0, "37855": 12697444352.0, "37860": 12697444352.0, "37865": 12697444352.0, "37870": 12697444352.0, "37875": 12697444352.0, "37880": 12697444352.0, "37885": 12697444352.0, "37890": 12697444352.0, "37895": 12697444352.0, "37900": 12697444352.0, "37905": 12697444352.0, "37910": 12697444352.0, "37915": 12697444352.0, "37920": 12697444352.0, "37925": 12697444352.0, "37930": 12697444352.0, "37935": 12697444352.0, "37940": 12697444352.0, "37945": 12697444352.0, "37950": 12697444352.0, "37955": 12697444352.0, "37960": 12697444352.0, "37965": 12697444352.0, "37970": 12697444352.0, "37975": 12697444352.0, "37980": 12697444352.0, "37985": 12697444352.0, "37990": 12697444352.0, "37995": 12697444352.0, "38000": 12697444352.0, "38005": 12697444352.0, "38010": 12697444352.0, "38015": 12697444352.0, "38020": 12697444352.0, "38025": 12697444352.0, "38030": 12697444352.0, "38035": 12697444352.0, "38040": 12697444352.0, "38045": 12697444352.0, "38050": 12697444352.0, "38055": 12697444352.0, "38060": 12697444352.0, "38065": 12697444352.0, "38070": 12697444352.0, "38075": 12697444352.0, "38080": 12697444352.0, "38085": 12697444352.0, "38090": 12697444352.0, "38095": 12697444352.0, "38100": 12697444352.0, "38105": 12697444352.0, "38110": 12697444352.0, "38115": 12697444352.0, "38120": 12697444352.0, "38125": 12697444352.0, "38130": 12697444352.0, "38135": 12697444352.0, "38140": 12697444352.0, "38145": 12697444352.0, "38150": 12697444352.0, "38155": 12697444352.0, "38160": 12697444352.0, "38165": 12697444352.0, "38170": 12697444352.0, "38175": 12697444352.0, "38180": 12697444352.0, "38185": 12697444352.0, "38190": 12697444352.0, "38195": 12697444352.0, "38200": 12697444352.0, "38205": 12697444352.0, "38210": 12697444352.0, "38215": 12697444352.0, "38220": 12697444352.0, "38225": 12697444352.0, "38230": 12697444352.0, "38235": 12697444352.0, "38240": 12697444352.0, "38245": 12697444352.0, "38250": 12697444352.0, "38255": 12697444352.0, "38260": 12697444352.0, "38265": 12697444352.0, "38270": 12697444352.0, "38275": 12697444352.0, "38280": 12697444352.0, "38285": 12697444352.0, "38290": 12697444352.0, "38295": 12697444352.0, "38300": 12697444352.0, "38305": 12697444352.0, "38310": 12697444352.0, "38315": 12697444352.0, "38320": 12697444352.0, "38325": 12697444352.0, "38330": 12697444352.0, "38335": 12697444352.0, "38340": 12697444352.0, "38345": 12697444352.0, "38350": 12697444352.0, "38355": 12697444352.0, "38360": 12697444352.0, "38365": 12697444352.0, "38370": 12697444352.0, "38375": 12697444352.0, "38380": 12697444352.0, "38385": 12697444352.0, "38390": 12697444352.0, "38395": 12697444352.0, "38400": 12697444352.0, "38405": 12697444352.0, "38410": 12697444352.0, "38415": 12697444352.0, "38420": 12697444352.0, "38425": 12697444352.0, "38430": 12697444352.0, "38435": 12697444352.0, "38440": 12697444352.0, "38445": 12697444352.0, "38450": 12697444352.0, "38455": 12697444352.0, "38460": 12697444352.0, "38465": 12697444352.0, "38470": 12697444352.0, "38475": 12697444352.0, "38480": 12697444352.0, "38485": 12697444352.0, "38490": 12697444352.0, "38495": 12697444352.0, "38500": 12697444352.0, "38505": 12697444352.0, "38510": 12697444352.0, "38515": 12697444352.0, "38520": 12697444352.0, "38525": 12697444352.0, "38530": 12697444352.0, "38535": 12697444352.0, "38540": 12697444352.0, "38545": 12697444352.0, "38550": 12697444352.0, "38555": 12697444352.0, "38560": 12697444352.0, "38565": 12697444352.0, "38570": 12697444352.0, "38575": 12697444352.0, "38580": 12697444352.0, "38585": 12697444352.0, "38590": 12697444352.0, "38595": 12697444352.0, "38600": 12697444352.0, "38605": 12697444352.0, "38610": 12697444352.0, "38615": 12697444352.0, "38620": 12697444352.0, "38625": 12697444352.0, "38630": 12697444352.0, "38635": 12697444352.0, "38640": 12697444352.0, "38645": 12697444352.0, "38650": 12697444352.0, "38655": 12697444352.0, "38660": 12697444352.0, "38665": 12697444352.0, "38670": 12697444352.0, "38675": 12697444352.0, "38680": 12697444352.0, "38685": 12697444352.0, "38690": 12697444352.0, "38695": 12697444352.0, "38700": 12697444352.0, "38705": 12697444352.0, "38710": 12697444352.0, "38715": 12697444352.0, "38720": 12697444352.0, "38725": 12697444352.0, "38730": 12697444352.0, "38735": 12697444352.0, "38740": 12697444352.0, "38745": 12697444352.0, "38750": 12697444352.0, "38755": 12697444352.0, "38760": 12697444352.0, "38765": 12697444352.0, "38770": 12697444352.0, "38775": 12697444352.0, "38780": 12697444352.0, "38785": 12697444352.0, "38790": 12697444352.0, "38795": 12697444352.0, "38800": 12697444352.0, "38805": 12697444352.0, "38810": 12697444352.0, "38815": 12697444352.0, "38820": 12697444352.0, "38825": 12697444352.0, "38830": 12697444352.0, "38835": 12697444352.0, "38840": 12697444352.0, "38845": 12697444352.0, "38850": 12697444352.0, "38855": 12697444352.0, "38860": 12697444352.0, "38865": 12697444352.0, "38870": 12697444352.0, "38875": 12697444352.0, "38880": 12697444352.0, "38885": 12697444352.0, "38890": 12697444352.0, "38895": 12697444352.0, "38900": 12697444352.0, "38905": 12697444352.0, "38910": 12697444352.0, "38915": 12697444352.0, "38920": 12697444352.0, "38925": 12697444352.0, "38930": 12697444352.0, "38935": 12697444352.0, "38940": 12697444352.0, "38945": 12697444352.0, "38950": 12697444352.0, "38955": 12697444352.0, "38960": 12697444352.0, "38965": 12697444352.0, "38970": 12697444352.0, "38975": 12697444352.0, "38980": 12697444352.0, "38985": 12697444352.0, "38990": 12697444352.0, "38995": 12697444352.0, "39000": 12697444352.0, "39005": 12697444352.0, "39010": 12697444352.0, "39015": 12697444352.0, "39020": 12697444352.0, "39025": 12697444352.0, "39030": 12697444352.0, "39035": 12697444352.0, "39040": 12697444352.0, "39045": 12697444352.0, "39050": 12697444352.0, "39055": 12697444352.0, "39060": 12697444352.0, "39065": 12697444352.0, "39070": 12697444352.0, "39075": 12697444352.0, "39080": 12697444352.0, "39085": 12697444352.0, "39090": 12697444352.0, "39095": 12697444352.0, "39100": 12697444352.0, "39105": 12697444352.0, "39110": 12697444352.0, "39115": 12697444352.0, "39120": 12697444352.0, "39125": 12697444352.0, "39130": 12697444352.0, "39135": 12697444352.0, "39140": 12697444352.0, "39145": 12697444352.0, "39150": 12697444352.0, "39155": 12697444352.0, "39160": 12697444352.0, "39165": 12697444352.0, "39170": 12697444352.0, "39175": 12697444352.0, "39180": 12697444352.0, "39185": 12697444352.0, "39190": 12697444352.0, "39195": 12697444352.0, "39200": 12697444352.0, "39205": 12697444352.0, "39210": 12697444352.0, "39215": 12697444352.0, "39220": 12697444352.0, "39225": 12697444352.0, "39230": 12697444352.0, "39235": 12697444352.0, "39240": 12697444352.0, "39245": 12697444352.0, "39250": 12697444352.0, "39255": 12697444352.0, "39260": 12697444352.0, "39265": 12697444352.0, "39270": 12697444352.0, "39275": 12697444352.0, "39280": 12697444352.0, "39285": 12697444352.0, "39290": 12697444352.0, "39295": 12697444352.0, "39300": 12697444352.0, "39305": 12697444352.0, "39310": 12697444352.0, "39315": 12697444352.0, "39320": 12697444352.0, "39325": 12697444352.0, "39330": 12697444352.0, "39335": 12697444352.0, "39340": 12697444352.0, "39345": 12697444352.0, "39350": 12697444352.0, "39355": 12697444352.0, "39360": 12697444352.0, "39365": 12697444352.0, "39370": 12697444352.0, "39375": 12697444352.0, "39380": 12697444352.0, "39385": 12697444352.0, "39390": 12697444352.0, "39395": 12697444352.0, "39400": 12697444352.0, "39405": 12697444352.0, "39410": 12697444352.0, "39415": 12697444352.0, "39420": 12697444352.0, "39425": 12697444352.0, "39430": 12697444352.0, "39435": 12697444352.0, "39440": 12697444352.0, "39445": 12697444352.0, "39450": 12697444352.0, "39455": 12697444352.0, "39460": 12697444352.0, "39465": 12697444352.0, "39470": 12697444352.0, "39475": 12697444352.0, "39480": 12697444352.0, "39485": 12697444352.0, "39490": 12697444352.0, "39495": 12697444352.0, "39500": 12697444352.0, "39505": 12697444352.0, "39510": 12697444352.0, "39515": 12697444352.0, "39520": 12697444352.0, "39525": 12697444352.0, "39530": 12697444352.0, "39535": 12697444352.0, "39540": 12697444352.0, "39545": 12697444352.0, "39550": 12697444352.0, "39555": 12697444352.0, "39560": 12697444352.0, "39565": 12697444352.0, "39570": 12697444352.0, "39575": 12697444352.0, "39580": 12697444352.0, "39585": 12697444352.0, "39590": 12697444352.0, "39595": 12697444352.0, "39600": 12697444352.0, "39605": 12697444352.0, "39610": 12697444352.0, "39615": 12697444352.0, "39620": 12697444352.0, "39625": 12697444352.0, "39630": 12697444352.0, "39635": 12697444352.0, "39640": 12697444352.0, "39645": 12697444352.0, "39650": 12697444352.0, "39655": 12697444352.0, "39660": 12697444352.0, "39665": 12697444352.0, "39670": 12697444352.0, "39675": 12697444352.0, "39680": 12697444352.0, "39685": 12697444352.0, "39690": 12697444352.0, "39695": 12697444352.0, "39700": 12697444352.0, "39705": 12697444352.0, "39710": 12697444352.0, "39715": 12697444352.0, "39720": 12697444352.0, "39725": 12697444352.0, "39730": 12697444352.0, "39735": 12697444352.0, "39740": 12697444352.0, "39745": 12697444352.0, "39750": 12697444352.0, "39755": 12697444352.0, "39760": 12697444352.0, "39765": 12697444352.0, "39770": 12697444352.0, "39775": 12697444352.0, "39780": 12697444352.0, "39785": 12697444352.0, "39790": 12697444352.0, "39795": 12697444352.0, "39800": 12697444352.0, "39805": 12697444352.0, "39810": 12697444352.0, "39815": 12697444352.0, "39820": 12697444352.0, "39825": 12697444352.0, "39830": 12697444352.0, "39835": 12697444352.0, "39840": 12697444352.0, "39845": 12697444352.0, "39850": 12697444352.0, "39855": 12697444352.0, "39860": 12697444352.0, "39865": 12697444352.0, "39870": 12697444352.0, "39875": 12697444352.0, "39880": 12697444352.0, "39885": 12697444352.0, "39890": 12697444352.0, "39895": 12697444352.0, "39900": 12697444352.0, "39905": 12697444352.0, "39910": 12697444352.0, "39915": 12697444352.0, "39920": 12697444352.0, "39925": 12697444352.0, "39930": 12697444352.0, "39935": 12697444352.0, "39940": 12697444352.0, "39945": 12697444352.0, "39950": 12697444352.0, "39955": 12697444352.0, "39960": 12697444352.0, "39965": 12697444352.0, "39970": 12697444352.0, "39975": 12697444352.0, "39980": 12697444352.0, "39985": 12697444352.0, "39990": 12697444352.0, "39995": 12697444352.0, "40000": 12697444352.0, "40005": 12697444352.0, "40010": 12697444352.0, "40015": 12697444352.0, "40020": 12697444352.0, "40025": 12697444352.0, "40030": 12697444352.0, "40035": 12697444352.0, "40040": 12697444352.0, "40045": 12697444352.0, "40050": 12697444352.0, "40055": 12697444352.0, "40060": 12697444352.0, "40065": 12697444352.0, "40070": 12697444352.0, "40075": 12697444352.0, "40080": 12697444352.0, "40085": 12697444352.0, "40090": 12697444352.0, "40095": 12697444352.0, "40100": 12697444352.0, "40105": 12697444352.0, "40110": 12697444352.0, "40115": 12697444352.0, "40120": 12697444352.0, "40125": 12697444352.0, "40130": 12697444352.0, "40135": 12697444352.0, "40140": 12697444352.0, "40145": 12697444352.0, "40150": 12697444352.0, "40155": 12697444352.0, "40160": 12697444352.0, "40165": 12697444352.0, "40170": 12697444352.0, "40175": 12697444352.0, "40180": 12697444352.0, "40185": 12697444352.0, "40190": 12697444352.0, "40195": 12697444352.0, "40200": 12697444352.0, "40205": 12697444352.0, "40210": 12697444352.0, "40215": 12697444352.0, "40220": 12697444352.0, "40225": 12697444352.0, "40230": 12697444352.0, "40235": 12697444352.0, "40240": 12697444352.0, "40245": 12697444352.0, "40250": 12697444352.0, "40255": 12697444352.0, "40260": 12697444352.0, "40265": 12697444352.0, "40270": 12697444352.0, "40275": 12697444352.0, "40280": 12697444352.0, "40285": 12697444352.0, "40290": 12697444352.0, "40295": 12697444352.0, "40300": 12697444352.0, "40305": 12697444352.0, "40310": 12697444352.0, "40315": 12697444352.0, "40320": 12697444352.0, "40325": 12697444352.0, "40330": 12697444352.0, "40335": 12697444352.0, "40340": 12697444352.0, "40345": 12697444352.0, "40350": 12697444352.0, "40355": 12697444352.0, "40360": 12697444352.0, "40365": 12697444352.0, "40370": 12697444352.0, "40375": 12697444352.0, "40380": 12697444352.0, "40385": 12697444352.0, "40390": 12697444352.0, "40395": 12697444352.0, "40400": 12697444352.0, "40405": 12697444352.0, "40410": 12697444352.0, "40415": 12697444352.0, "40420": 12697444352.0, "40425": 12697444352.0, "40430": 12697444352.0, "40435": 12697444352.0, "40440": 12697444352.0, "40445": 12697444352.0, "40450": 12697444352.0, "40455": 12697444352.0, "40460": 12697444352.0, "40465": 12697444352.0, "40470": 12697444352.0, "40475": 12697444352.0, "40480": 12697444352.0, "40485": 12697444352.0, "40490": 12697444352.0, "40495": 12697444352.0, "40500": 12697444352.0, "40505": 12697444352.0, "40510": 12697444352.0, "40515": 12697444352.0, "40520": 12697444352.0, "40525": 12697444352.0, "40530": 12697444352.0, "40535": 12697444352.0, "40540": 12697444352.0, "40545": 12697444352.0, "40550": 12697444352.0, "40555": 12697444352.0, "40560": 12697444352.0, "40565": 12697444352.0, "40570": 12697444352.0, "40575": 12697444352.0, "40580": 12697444352.0, "40585": 12697444352.0, "40590": 12697444352.0, "40595": 12697444352.0, "40600": 12697444352.0, "40605": 12697444352.0, "40610": 12697444352.0, "40615": 12697444352.0, "40620": 12697444352.0, "40625": 12697444352.0, "40630": 12697444352.0, "40635": 12697444352.0, "40640": 12697444352.0, "40645": 12697444352.0, "40650": 12697444352.0, "40655": 12697444352.0, "40660": 12697444352.0, "40665": 12697444352.0, "40670": 12697444352.0, "40675": 12697444352.0, "40680": 12697444352.0, "40685": 12697444352.0, "40690": 12697444352.0, "40695": 12697444352.0, "40700": 12697444352.0, "40705": 12697444352.0, "40710": 12697444352.0, "40715": 12697444352.0, "40720": 12697444352.0, "40725": 12697444352.0, "40730": 12697444352.0, "40735": 12697444352.0, "40740": 12697444352.0, "40745": 12697444352.0, "40750": 12697444352.0, "40755": 12697444352.0, "40760": 12697444352.0, "40765": 12697444352.0, "40770": 12697444352.0, "40775": 12697444352.0, "40780": 12697444352.0, "40785": 12697444352.0, "40790": 12697444352.0, "40795": 12697444352.0, "40800": 12697444352.0, "40805": 12697444352.0, "40810": 12697444352.0, "40815": 12697444352.0, "40820": 12697444352.0, "40825": 12697444352.0, "40830": 12697444352.0, "40835": 12697444352.0, "40840": 12697444352.0, "40845": 12697444352.0, "40850": 12697444352.0, "40855": 12697444352.0, "40860": 12697444352.0, "40865": 12697444352.0, "40870": 12697444352.0, "40875": 12697444352.0, "40880": 12697444352.0, "40885": 12697444352.0, "40890": 12697444352.0, "40895": 12697444352.0, "40900": 12697444352.0, "40905": 12697444352.0, "40910": 12697444352.0, "40915": 12697444352.0, "40920": 12697444352.0, "40925": 12697444352.0, "40930": 12697444352.0, "40935": 12697444352.0, "40940": 12697444352.0, "40945": 12697444352.0, "40950": 12697444352.0, "40955": 12697444352.0, "40960": 12697444352.0, "40965": 12697444352.0, "40970": 12697444352.0, "40975": 12697444352.0, "40980": 12697444352.0, "40985": 12697444352.0, "40990": 12697444352.0, "40995": 12697444352.0, "41000": 12697444352.0, "41005": 12697444352.0, "41010": 12697444352.0, "41015": 12697444352.0, "41020": 12697444352.0, "41025": 12697444352.0, "41030": 12697444352.0, "41035": 12697444352.0, "41040": 12697444352.0, "41045": 12697444352.0, "41050": 12697444352.0, "41055": 12697444352.0, "41060": 12697444352.0, "41065": 12697444352.0, "41070": 12697444352.0, "41075": 12697444352.0, "41080": 12697444352.0, "41085": 12697444352.0, "41090": 12697444352.0, "41095": 12697444352.0, "41100": 12697444352.0, "41105": 12697444352.0, "41110": 12697444352.0, "41115": 12697444352.0, "41120": 12697444352.0, "41125": 12697444352.0, "41130": 12697444352.0, "41135": 12697444352.0, "41140": 12697444352.0, "41145": 12697444352.0, "41150": 12697444352.0, "41155": 12697444352.0, "41160": 12697444352.0, "41165": 12697444352.0, "41170": 12697444352.0, "41175": 12697444352.0, "41180": 12697444352.0, "41185": 12697444352.0, "41190": 12697444352.0, "41195": 12697444352.0, "41200": 12697444352.0, "41205": 12697444352.0, "41210": 12697444352.0, "41215": 12697444352.0, "41220": 12697444352.0, "41225": 12697444352.0, "41230": 12697444352.0, "41235": 12697444352.0, "41240": 12697444352.0, "41245": 12697444352.0, "41250": 12697444352.0, "41255": 12697444352.0, "41260": 12697444352.0, "41265": 12697444352.0, "41270": 12697444352.0, "41275": 12697444352.0, "41280": 12697444352.0, "41285": 12697444352.0, "41290": 12697444352.0, "41295": 12697444352.0, "41300": 12697444352.0, "41305": 12697444352.0, "41310": 12697444352.0, "41315": 12697444352.0, "41320": 12697444352.0, "41325": 12697444352.0, "41330": 12697444352.0, "41335": 12697444352.0, "41340": 12697444352.0, "41345": 12697444352.0, "41350": 12697444352.0, "41355": 12697444352.0, "41360": 12697444352.0, "41365": 12697444352.0, "41370": 12697444352.0, "41375": 12697444352.0, "41380": 12697444352.0, "41385": 12697444352.0, "41390": 12697444352.0, "41395": 12697444352.0, "41400": 12697444352.0, "41405": 12697444352.0, "41410": 12697444352.0, "41415": 12697444352.0, "41420": 12697444352.0, "41425": 12697444352.0, "41430": 12697444352.0, "41435": 12697444352.0, "41440": 12697444352.0, "41445": 12697444352.0, "41450": 12697444352.0, "41455": 12697444352.0, "41460": 12697444352.0, "41465": 12697444352.0, "41470": 12697444352.0, "41475": 12697444352.0, "41480": 12697444352.0, "41485": 12697444352.0, "41490": 12697444352.0, "41495": 12697444352.0, "41500": 12697444352.0, "41505": 12697444352.0, "41510": 12697444352.0, "41515": 12697444352.0, "41520": 12697444352.0, "41525": 12697444352.0, "41530": 12697444352.0, "41535": 12697444352.0, "41540": 12697444352.0, "41545": 12697444352.0, "41550": 12697444352.0, "41555": 12697444352.0, "41560": 12697444352.0, "41565": 12697444352.0, "41570": 12697444352.0, "41575": 12697444352.0, "41580": 12697444352.0, "41585": 12697444352.0, "41590": 12697444352.0, "41595": 12697444352.0, "41600": 12697444352.0, "41605": 12697444352.0, "41610": 12697444352.0, "41615": 12697444352.0, "41620": 12697444352.0, "41625": 12697444352.0, "41630": 12697444352.0, "41635": 12697444352.0, "41640": 12697444352.0, "41645": 12697444352.0, "41650": 12697444352.0, "41655": 12697444352.0, "41660": 12697444352.0, "41665": 12697444352.0, "41670": 12697444352.0, "41675": 12697444352.0, "41680": 12697444352.0, "41685": 12697444352.0, "41690": 12697444352.0, "41695": 12697444352.0, "41700": 12697444352.0, "41705": 12697444352.0, "41710": 12697444352.0, "41715": 12697444352.0, "41720": 12697444352.0, "41725": 12697444352.0, "41730": 12697444352.0, "41735": 12697444352.0, "41740": 12697444352.0, "41745": 12697444352.0, "41750": 12697444352.0, "41755": 12697444352.0, "41760": 12697444352.0, "41765": 12697444352.0, "41770": 12697444352.0, "41775": 12697444352.0, "41780": 12697444352.0, "41785": 12697444352.0, "41790": 12697444352.0, "41795": 12697444352.0, "41800": 12697444352.0, "41805": 12697444352.0, "41810": 12697444352.0, "41815": 12697444352.0, "41820": 12697444352.0, "41825": 12697444352.0, "41830": 12697444352.0, "41835": 12697444352.0, "41840": 12697444352.0, "41845": 12697444352.0, "41850": 12697444352.0, "41855": 12697444352.0, "41860": 12697444352.0, "41865": 12697444352.0, "41870": 12697444352.0, "41875": 12697444352.0, "41880": 12697444352.0, "41885": 12697444352.0, "41890": 12697444352.0, "41895": 12697444352.0, "41900": 12697444352.0, "41905": 12697444352.0, "41910": 12697444352.0, "41915": 12697444352.0, "41920": 12697444352.0, "41925": 12697444352.0, "41930": 12697444352.0, "41935": 12697444352.0, "41940": 12697444352.0, "41945": 12697444352.0, "41950": 12697444352.0, "41955": 12697444352.0, "41960": 12697444352.0, "41965": 12697444352.0, "41970": 12697444352.0, "41975": 12697444352.0, "41980": 12697444352.0, "41985": 12697444352.0, "41990": 12697444352.0, "41995": 12697444352.0, "42000": 12697444352.0, "42005": 12697444352.0, "42010": 12697444352.0, "42015": 12697444352.0, "42020": 12697444352.0, "42025": 12697444352.0, "42030": 12697444352.0, "42035": 12697444352.0, "42040": 12697444352.0, "42045": 12697444352.0, "42050": 12697444352.0, "42055": 12697444352.0, "42060": 12697444352.0, "42065": 12697444352.0, "42070": 12697444352.0, "42075": 12697444352.0, "42080": 12697444352.0, "42085": 12697444352.0, "42090": 12697444352.0, "42095": 12697444352.0, "42100": 12697444352.0, "42105": 12697444352.0, "42110": 12697444352.0, "42115": 12697444352.0, "42120": 12697444352.0, "42125": 12697444352.0, "42130": 12697444352.0, "42135": 12697444352.0, "42140": 12697444352.0, "42145": 12697444352.0, "42150": 12697444352.0, "42155": 12697444352.0, "42160": 12697444352.0, "42165": 12697444352.0, "42170": 12697444352.0, "42175": 12697444352.0, "42180": 12697444352.0, "42185": 12697444352.0, "42190": 12697444352.0, "42195": 12697444352.0, "42200": 12697444352.0, "42205": 12697444352.0, "42210": 12697444352.0, "42215": 12697444352.0, "42220": 12697444352.0, "42225": 12697444352.0, "42230": 12697444352.0, "42235": 12697444352.0, "42240": 12697444352.0, "42245": 12697444352.0, "42250": 12697444352.0, "42255": 12697444352.0, "42260": 12697444352.0, "42265": 12697444352.0, "42270": 12697444352.0, "42275": 12697444352.0, "42280": 12697444352.0, "42285": 12697444352.0, "42290": 12697444352.0, "42295": 12697444352.0, "42300": 12697444352.0, "42305": 12697444352.0, "42310": 12697444352.0, "42315": 12697444352.0, "42320": 12697444352.0, "42325": 12697444352.0, "42330": 12697444352.0, "42335": 12697444352.0, "42340": 12697444352.0, "42345": 12697444352.0, "42350": 12697444352.0, "42355": 12697444352.0, "42360": 12697444352.0, "42365": 12697444352.0, "42370": 12697444352.0, "42375": 12697444352.0, "42380": 12697444352.0, "42385": 12697444352.0, "42390": 12697444352.0, "42395": 12697444352.0, "42400": 12697444352.0, "42405": 12697444352.0, "42410": 12697444352.0, "42415": 12697444352.0, "42420": 12697444352.0, "42425": 12697444352.0, "42430": 12697444352.0, "42435": 12697444352.0, "42440": 12697444352.0, "42445": 12697444352.0, "42450": 12697444352.0, "42455": 12697444352.0, "42460": 12697444352.0, "42465": 12697444352.0, "42470": 12697444352.0, "42475": 12697444352.0, "42480": 12697444352.0, "42485": 12697444352.0, "42490": 12697444352.0, "42495": 12697444352.0, "42500": 12697444352.0, "42505": 12697444352.0, "42510": 12697444352.0, "42515": 12697444352.0, "42520": 12697444352.0, "42525": 12697444352.0, "42530": 12697444352.0, "42535": 12697444352.0, "42540": 12697444352.0, "42545": 12697444352.0, "42550": 12697444352.0, "42555": 12697444352.0, "42560": 12697444352.0, "42565": 12697444352.0, "42570": 12697444352.0, "42575": 12697444352.0, "42580": 12697444352.0, "42585": 12697444352.0, "42590": 12697444352.0, "42595": 12697444352.0, "42600": 12697444352.0, "42605": 12697444352.0, "42610": 12697444352.0, "42615": 12697444352.0, "42620": 12697444352.0, "42625": 12697444352.0, "42630": 12697444352.0, "42635": 12697444352.0, "42640": 12697444352.0, "42645": 12697444352.0, "42650": 12697444352.0, "42655": 12697444352.0, "42660": 12697444352.0, "42665": 12697444352.0, "42670": 12697444352.0, "42675": 12697444352.0, "42680": 12697444352.0, "42685": 12697444352.0, "42690": 12697444352.0, "42695": 12697444352.0, "42700": 12697444352.0, "42705": 12697444352.0, "42710": 12697444352.0, "42715": 12697444352.0, "42720": 12697444352.0, "42725": 12697444352.0, "42730": 12697444352.0, "42735": 12697444352.0, "42740": 12697444352.0, "42745": 12697444352.0, "42750": 12697444352.0, "42755": 12697444352.0, "42760": 12697444352.0, "42765": 12697444352.0, "42770": 12697444352.0, "42775": 12697444352.0, "42780": 12697444352.0, "42785": 12697444352.0, "42790": 12697444352.0, "42795": 12697444352.0, "42800": 12697444352.0, "42805": 12697444352.0, "42810": 12697444352.0, "42815": 12697444352.0, "42820": 12697444352.0, "42825": 12697444352.0, "42830": 12697444352.0, "42835": 12697444352.0, "42840": 12697444352.0, "42845": 12697444352.0, "42850": 12697444352.0, "42855": 12697444352.0, "42860": 12697444352.0, "42865": 12697444352.0, "42870": 12697444352.0, "42875": 12697444352.0, "42880": 12697444352.0, "42885": 12697444352.0, "42890": 12697444352.0, "42895": 12697444352.0, "42900": 12697444352.0, "42905": 12697444352.0, "42910": 12697444352.0, "42915": 12697444352.0, "42920": 12697444352.0, "42925": 12697444352.0, "42930": 12697444352.0, "42935": 12697444352.0, "42940": 12697444352.0, "42945": 12697444352.0, "42950": 12697444352.0, "42955": 12697444352.0, "42960": 12697444352.0, "42965": 12697444352.0, "42970": 12697444352.0, "42975": 12697444352.0, "42980": 12697444352.0, "42985": 12697444352.0, "42990": 12697444352.0, "42995": 12697444352.0, "43000": 12697444352.0, "43005": 12697444352.0, "43010": 12697444352.0, "43015": 12697444352.0, "43020": 12697444352.0, "43025": 12697444352.0, "43030": 12697444352.0, "43035": 12697444352.0, "43040": 12697444352.0, "43045": 12697444352.0, "43050": 12697444352.0, "43055": 12697444352.0, "43060": 12697444352.0, "43065": 12697444352.0, "43070": 12697444352.0, "43075": 12697444352.0, "43080": 12697444352.0, "43085": 12697444352.0, "43090": 12697444352.0, "43095": 12697444352.0, "43100": 12697444352.0, "43105": 12697444352.0, "43110": 12697444352.0, "43115": 12697444352.0, "43120": 12697444352.0, "43125": 12697444352.0, "43130": 12697444352.0, "43135": 12697444352.0, "43140": 12697444352.0, "43145": 12697444352.0, "43150": 12697444352.0, "43155": 12697444352.0, "43160": 12697444352.0, "43165": 12697444352.0, "43170": 12697444352.0, "43175": 12697444352.0, "43180": 12697444352.0, "43185": 12697444352.0, "43190": 12697444352.0, "43195": 12697444352.0, "43200": 12697444352.0, "43205": 12697444352.0, "43210": 12697444352.0, "43215": 12697444352.0, "43220": 12697444352.0, "43225": 12697444352.0, "43230": 12697444352.0, "43235": 12697444352.0, "43240": 12697444352.0, "43245": 12697444352.0, "43250": 12697444352.0, "43255": 12697444352.0, "43260": 12697444352.0, "43265": 12697444352.0, "43270": 12697444352.0, "43275": 12697444352.0, "43280": 12697444352.0, "43285": 12697444352.0, "43290": 12697444352.0, "43295": 12697444352.0, "43300": 12697444352.0, "43305": 12697444352.0, "43310": 12697444352.0, "43315": 12697444352.0, "43320": 12697444352.0, "43325": 12697444352.0, "43330": 12697444352.0, "43335": 12697444352.0, "43340": 12697444352.0, "43345": 12697444352.0, "43350": 12697444352.0, "43355": 12697444352.0, "43360": 12697444352.0, "43365": 12697444352.0, "43370": 12697444352.0, "43375": 12697444352.0, "43380": 12697444352.0, "43385": 12697444352.0, "43390": 12697444352.0, "43395": 12697444352.0, "43400": 12697444352.0, "43405": 12697444352.0, "43410": 12697444352.0, "43415": 12697444352.0, "43420": 12697444352.0, "43425": 12697444352.0, "43430": 12697444352.0, "43435": 12697444352.0, "43440": 12697444352.0, "43445": 12697444352.0, "43450": 12697444352.0, "43455": 12697444352.0, "43460": 12697444352.0, "43465": 12697444352.0, "43470": 12697444352.0, "43475": 12697444352.0, "43480": 12697444352.0, "43485": 12697444352.0, "43490": 12697444352.0, "43495": 12697444352.0, "43500": 12697444352.0, "43505": 12697444352.0, "43510": 12697444352.0, "43515": 12697444352.0, "43520": 12697444352.0, "43525": 12697444352.0, "43530": 12697444352.0, "43535": 12697444352.0, "43540": 12697444352.0, "43545": 12697444352.0, "43550": 12697444352.0, "43555": 12697444352.0, "43560": 12697444352.0, "43565": 12697444352.0, "43570": 12697444352.0, "43575": 12697444352.0, "43580": 12697444352.0, "43585": 12697444352.0, "43590": 12697444352.0, "43595": 12697444352.0, "43600": 12697444352.0, "43605": 12697444352.0, "43610": 12697444352.0, "43615": 12697444352.0, "43620": 12697444352.0, "43625": 12697444352.0, "43630": 12697444352.0, "43635": 12697444352.0, "43640": 12697444352.0, "43645": 12697444352.0, "43650": 12697444352.0, "43655": 12697444352.0, "43660": 12697444352.0, "43665": 12697444352.0, "43670": 12697444352.0, "43675": 12697444352.0, "43680": 12697444352.0, "43685": 12697444352.0, "43690": 12697444352.0, "43695": 12697444352.0, "43700": 12697444352.0, "43705": 12697444352.0, "43710": 12697444352.0, "43715": 12697444352.0, "43720": 12697444352.0, "43725": 12697444352.0, "43730": 12697444352.0, "43735": 12697444352.0, "43740": 12697444352.0, "43745": 12697444352.0, "43750": 12697444352.0, "43755": 12697444352.0, "43760": 12697444352.0, "43765": 12697444352.0, "43770": 12697444352.0, "43775": 12697444352.0, "43780": 12697444352.0, "43785": 12697444352.0, "43790": 12697444352.0, "43795": 12697444352.0, "43800": 12697444352.0, "43805": 12697444352.0, "43810": 12697444352.0, "43815": 12697444352.0, "43820": 12697444352.0, "43825": 12697444352.0, "43830": 12697444352.0, "43835": 12697444352.0, "43840": 12697444352.0, "43845": 12697444352.0, "43850": 12697444352.0, "43855": 12697444352.0, "43860": 12697444352.0, "43865": 12697444352.0, "43870": 12697444352.0, "43875": 12697444352.0, "43880": 12697444352.0, "43885": 12697444352.0, "43890": 12697444352.0, "43895": 12697444352.0, "43900": 12697444352.0, "43905": 12697444352.0, "43910": 12697444352.0, "43915": 12697444352.0, "43920": 12697444352.0, "43925": 12697444352.0, "43930": 12697444352.0, "43935": 12697444352.0, "43940": 12697444352.0, "43945": 12697444352.0, "43950": 12697444352.0, "43955": 12697444352.0, "43960": 12697444352.0, "43965": 12697444352.0, "43970": 12697444352.0, "43975": 12697444352.0, "43980": 12697444352.0, "43985": 12697444352.0, "43990": 12697444352.0, "43995": 12697444352.0, "44000": 12697444352.0, "44005": 12697444352.0, "44010": 12697444352.0, "44015": 12697444352.0, "44020": 12697444352.0, "44025": 12697444352.0, "44030": 12697444352.0, "44035": 12697444352.0, "44040": 12697444352.0, "44045": 12697444352.0, "44050": 12697444352.0, "44055": 12697444352.0, "44060": 12697444352.0, "44065": 12697444352.0, "44070": 12697444352.0, "44075": 12697444352.0, "44080": 12697444352.0, "44085": 12697444352.0, "44090": 12697444352.0, "44095": 12697444352.0, "44100": 12697444352.0, "44105": 12697444352.0, "44110": 12697444352.0, "44115": 12697444352.0, "44120": 12697444352.0, "44125": 12697444352.0, "44130": 12697444352.0, "44135": 12697444352.0, "44140": 12697444352.0, "44145": 12697444352.0, "44150": 12697444352.0, "44155": 12697444352.0, "44160": 12697444352.0, "44165": 12697444352.0, "44170": 12697444352.0, "44175": 12697444352.0, "44180": 12697444352.0, "44185": 12697444352.0, "44190": 12697444352.0, "44195": 12697444352.0, "44200": 12697444352.0, "44205": 12697444352.0, "44210": 12697444352.0, "44215": 12697444352.0, "44220": 12697444352.0, "44225": 12697444352.0, "44230": 12697444352.0, "44235": 12697444352.0, "44240": 12697444352.0, "44245": 12697444352.0, "44250": 12697444352.0, "44255": 12697444352.0, "44260": 12697444352.0, "44265": 12697444352.0, "44270": 12697444352.0, "44275": 12697444352.0, "44280": 12697444352.0, "44285": 12697444352.0, "44290": 12697444352.0, "44295": 12697444352.0, "44300": 12697444352.0, "44305": 12697444352.0, "44310": 12697444352.0, "44315": 12697444352.0, "44320": 12697444352.0, "44325": 12697444352.0, "44330": 12697444352.0, "44335": 12697444352.0, "44340": 12697444352.0, "44345": 12697444352.0, "44350": 12697444352.0, "44355": 12697444352.0, "44360": 12697444352.0, "44365": 12697444352.0, "44370": 12697444352.0, "44375": 12697444352.0, "44380": 12697444352.0, "44385": 12697444352.0, "44390": 12697444352.0, "44395": 12697444352.0, "44400": 12697444352.0, "44405": 12697444352.0, "44410": 12697444352.0, "44415": 12697444352.0, "44420": 12697444352.0, "44425": 12697444352.0, "44430": 12697444352.0, "44435": 12697444352.0, "44440": 12697444352.0, "44445": 12697444352.0, "44450": 12697444352.0, "44455": 12697444352.0, "44460": 12697444352.0, "44465": 12697444352.0, "44470": 12697444352.0, "44475": 12697444352.0, "44480": 12697444352.0, "44485": 12697444352.0, "44490": 12697444352.0, "44495": 12697444352.0, "44500": 12697444352.0, "44505": 12697444352.0, "44510": 12697444352.0, "44515": 12697444352.0, "44520": 12697444352.0, "44525": 12697444352.0, "44530": 12697444352.0, "44535": 12697444352.0, "44540": 12697444352.0, "44545": 12697444352.0, "44550": 12697444352.0, "44555": 12697444352.0, "44560": 12697444352.0, "44565": 12697444352.0, "44570": 12697444352.0, "44575": 12697444352.0, "44580": 12697444352.0, "44585": 12697444352.0, "44590": 12697444352.0, "44595": 12697444352.0, "44600": 12697444352.0, "44605": 12697444352.0, "44610": 12697444352.0, "44615": 12697444352.0, "44620": 12697444352.0, "44625": 12697444352.0, "44630": 12697444352.0, "44635": 12697444352.0, "44640": 12697444352.0, "44645": 12697444352.0, "44650": 12697444352.0, "44655": 12697444352.0, "44660": 12697444352.0, "44665": 12697444352.0, "44670": 12697444352.0, "44675": 12697444352.0, "44680": 12697444352.0, "44685": 12697444352.0, "44690": 12697444352.0, "44695": 12697444352.0, "44700": 12697444352.0, "44705": 12697444352.0, "44710": 12697444352.0, "44715": 12697444352.0, "44720": 12697444352.0, "44725": 12697444352.0, "44730": 12697444352.0, "44735": 12697444352.0, "44740": 12697444352.0, "44745": 12697444352.0, "44750": 12697444352.0, "44755": 12697444352.0, "44760": 12697444352.0, "44765": 12697444352.0, "44770": 12697444352.0, "44775": 12697444352.0, "44780": 12697444352.0, "44785": 12697444352.0, "44790": 12697444352.0, "44795": 12697444352.0, "44800": 12697444352.0, "44805": 12697444352.0, "44810": 12697444352.0, "44815": 12697444352.0, "44820": 12697444352.0, "44825": 12697444352.0, "44830": 12697444352.0, "44835": 12697444352.0, "44840": 12697444352.0, "44845": 12697444352.0, "44850": 12697444352.0, "44855": 12697444352.0, "44860": 12697444352.0, "44865": 12697444352.0, "44870": 12697444352.0, "44875": 12697444352.0, "44880": 12697444352.0, "44885": 12697444352.0, "44890": 12697444352.0, "44895": 12697444352.0, "44900": 12697444352.0, "44905": 12697444352.0, "44910": 12697444352.0, "44915": 12697444352.0, "44920": 12697444352.0, "44925": 12697444352.0, "44930": 12697444352.0, "44935": 12697444352.0, "44940": 12697444352.0, "44945": 12697444352.0, "44950": 12697444352.0, "44955": 12697444352.0, "44960": 12697444352.0, "44965": 12697444352.0, "44970": 12697444352.0, "44975": 12697444352.0, "44980": 12697444352.0, "44985": 12697444352.0, "44990": 12697444352.0, "44995": 12697444352.0, "45000": 12697444352.0, "45005": 12697444352.0, "45010": 12697444352.0, "45015": 12697444352.0, "45020": 12697444352.0, "45025": 12697444352.0, "45030": 12697444352.0, "45035": 12697444352.0, "45040": 12697444352.0, "45045": 12697444352.0, "45050": 12697444352.0, "45055": 12697444352.0, "45060": 12697444352.0, "45065": 12697444352.0, "45070": 12697444352.0, "45075": 12697444352.0, "45080": 12697444352.0, "45085": 12697444352.0, "45090": 12697444352.0, "45095": 12697444352.0, "45100": 12697444352.0, "45105": 12697444352.0, "45110": 12697444352.0, "45115": 12697444352.0, "45120": 12697444352.0, "45125": 12697444352.0, "45130": 12697444352.0, "45135": 12697444352.0, "45140": 12697444352.0, "45145": 12697444352.0, "45150": 12697444352.0, "45155": 12697444352.0, "45160": 12697444352.0, "45165": 12697444352.0, "45170": 12697444352.0, "45175": 12697444352.0, "45180": 12697444352.0, "45185": 12697444352.0, "45190": 12697444352.0, "45195": 12697444352.0, "45200": 12697444352.0, "45205": 12697444352.0, "45210": 12697444352.0, "45215": 12697444352.0, "45220": 12697444352.0, "45225": 12697444352.0, "45230": 12697444352.0, "45235": 12697444352.0, "45240": 12697444352.0, "45245": 12697444352.0, "45250": 12697444352.0, "45255": 12697444352.0, "45260": 12697444352.0, "45265": 12697444352.0, "45270": 12697444352.0, "45275": 12697444352.0, "45280": 12697444352.0, "45285": 12697444352.0, "45290": 12697444352.0, "45295": 12697444352.0, "45300": 12697444352.0, "45305": 12697444352.0, "45310": 12697444352.0, "45315": 12697444352.0, "45320": 12697444352.0, "45325": 12697444352.0, "45330": 12697444352.0, "45335": 12697444352.0, "45340": 12697444352.0, "45345": 12697444352.0, "45350": 12697444352.0, "45355": 12697444352.0, "45360": 12697444352.0, "45365": 12697444352.0, "45370": 12697444352.0, "45375": 12697444352.0, "45380": 12697444352.0, "45385": 12697444352.0, "45390": 12697444352.0, "45395": 12697444352.0, "45400": 12697444352.0, "45405": 12697444352.0, "45410": 12697444352.0, "45415": 12697444352.0, "45420": 12697444352.0, "45425": 12697444352.0, "45430": 12697444352.0, "45435": 12697444352.0, "45440": 12697444352.0, "45445": 12697444352.0, "45450": 12697444352.0, "45455": 12697444352.0, "45460": 12697444352.0, "45465": 12697444352.0, "45470": 12697444352.0, "45475": 12697444352.0, "45480": 12697444352.0, "45485": 12697444352.0, "45490": 12697444352.0, "45495": 12697444352.0, "45500": 12697444352.0, "45505": 12697444352.0, "45510": 12697444352.0, "45515": 12697444352.0, "45520": 12697444352.0, "45525": 12697444352.0, "45530": 12697444352.0, "45535": 12697444352.0, "45540": 12697444352.0, "45545": 12697444352.0, "45550": 12697444352.0, "45555": 12697444352.0, "45560": 12697444352.0, "45565": 12697444352.0, "45570": 12697444352.0, "45575": 12697444352.0, "45580": 12697444352.0, "45585": 12697444352.0, "45590": 12697444352.0, "45595": 12697444352.0, "45600": 12697444352.0, "45605": 12697444352.0, "45610": 12697444352.0, "45615": 12697444352.0, "45620": 12697444352.0, "45625": 12697444352.0, "45630": 12697444352.0, "45635": 12697444352.0, "45640": 12697444352.0, "45645": 12697444352.0, "45650": 12697444352.0, "45655": 12697444352.0, "45660": 12697444352.0, "45665": 12697444352.0, "45670": 12697444352.0, "45675": 12697444352.0, "45680": 12697444352.0, "45685": 12697444352.0, "45690": 12697444352.0, "45695": 12697444352.0, "45700": 12697444352.0, "45705": 12697444352.0, "45710": 12697444352.0, "45715": 12697444352.0, "45720": 12697444352.0, "45725": 12697444352.0, "45730": 12697444352.0, "45735": 12697444352.0, "45740": 12697444352.0, "45745": 12697444352.0, "45750": 12697444352.0, "45755": 12697444352.0, "45760": 12697444352.0, "45765": 12697444352.0, "45770": 12697444352.0, "45775": 12697444352.0, "45780": 12697444352.0, "45785": 12697444352.0, "45790": 12697444352.0, "45795": 12697444352.0, "45800": 12697444352.0, "45805": 12697444352.0, "45810": 12697444352.0, "45815": 12697444352.0, "45820": 12697444352.0, "45825": 12697444352.0, "45830": 12697444352.0, "45835": 12697444352.0, "45840": 12697444352.0, "45845": 12697444352.0, "45850": 12697444352.0, "45855": 12697444352.0, "45860": 12697444352.0, "45865": 12697444352.0, "45870": 12697444352.0, "45875": 12697444352.0, "45880": 12697444352.0, "45885": 12697444352.0, "45890": 12697444352.0, "45895": 12697444352.0, "45900": 12697444352.0, "45905": 12697444352.0, "45910": 12697444352.0, "45915": 12697444352.0, "45920": 12697444352.0, "45925": 12697444352.0, "45930": 12697444352.0, "45935": 12697444352.0, "45940": 12697444352.0, "45945": 12697444352.0, "45950": 12697444352.0, "45955": 12697444352.0, "45960": 12697444352.0, "45965": 12697444352.0, "45970": 12697444352.0, "45975": 12697444352.0, "45980": 12697444352.0, "45985": 12697444352.0, "45990": 12697444352.0, "45995": 12697444352.0, "46000": 12697444352.0, "46005": 12697444352.0, "46010": 12697444352.0, "46015": 12697444352.0, "46020": 12697444352.0, "46025": 12697444352.0, "46030": 12697444352.0, "46035": 12697444352.0, "46040": 12697444352.0, "46045": 12697444352.0, "46050": 12697444352.0, "46055": 12697444352.0, "46060": 12697444352.0, "46065": 12697444352.0, "46070": 12697444352.0, "46075": 12697444352.0, "46080": 12697444352.0, "46085": 12697444352.0, "46090": 12697444352.0, "46095": 12697444352.0, "46100": 12697444352.0, "46105": 12697444352.0, "46110": 12697444352.0, "46115": 12697444352.0, "46120": 12697444352.0, "46125": 12697444352.0, "46130": 12697444352.0, "46135": 12697444352.0, "46140": 12697444352.0, "46145": 12697444352.0, "46150": 12697444352.0, "46155": 12697444352.0, "46160": 12697444352.0, "46165": 12697444352.0, "46170": 12697444352.0, "46175": 12697444352.0, "46180": 12697444352.0, "46185": 12697444352.0, "46190": 12697444352.0, "46195": 12697444352.0, "46200": 12697444352.0, "46205": 12697444352.0, "46210": 12697444352.0, "46215": 12697444352.0, "46220": 12697444352.0, "46225": 12697444352.0, "46230": 12697444352.0, "46235": 12697444352.0, "46240": 12697444352.0, "46245": 12697444352.0, "46250": 12697444352.0, "46255": 12697444352.0, "46260": 12697444352.0, "46265": 12697444352.0, "46270": 12697444352.0, "46275": 12697444352.0, "46280": 12697444352.0, "46285": 12697444352.0, "46290": 12697444352.0, "46295": 12697444352.0, "46300": 12697444352.0, "46305": 12697444352.0, "46310": 12697444352.0, "46315": 12697444352.0, "46320": 12697444352.0, "46325": 12697444352.0, "46330": 12697444352.0, "46335": 12697444352.0, "46340": 12697444352.0, "46345": 12697444352.0, "46350": 12697444352.0, "46355": 12697444352.0, "46360": 12697444352.0, "46365": 12697444352.0, "46370": 12697444352.0, "46375": 12697444352.0, "46380": 12697444352.0, "46385": 12697444352.0, "46390": 12697444352.0, "46395": 12697444352.0, "46400": 12697444352.0, "46405": 12697444352.0, "46410": 12697444352.0, "46415": 12697444352.0, "46420": 12697444352.0, "46425": 12697444352.0, "46430": 12697444352.0, "46435": 12697444352.0, "46440": 12697444352.0, "46445": 12697444352.0, "46450": 12697444352.0, "46455": 12697444352.0, "46460": 12697444352.0, "46465": 12697444352.0, "46470": 12697444352.0, "46475": 12697444352.0, "46480": 12697444352.0, "46485": 12697444352.0, "46490": 12697444352.0, "46495": 12697444352.0, "46500": 12697444352.0, "46505": 12697444352.0, "46510": 12697444352.0, "46515": 12697444352.0, "46520": 12697444352.0, "46525": 12697444352.0, "46530": 12697444352.0, "46535": 12697444352.0, "46540": 12697444352.0, "46545": 12697444352.0, "46550": 12697444352.0, "46555": 12697444352.0, "46560": 12697444352.0, "46565": 12697444352.0, "46570": 12697444352.0, "46575": 12697444352.0, "46580": 12697444352.0, "46585": 12697444352.0, "46590": 12697444352.0, "46595": 12697444352.0, "46600": 12697444352.0, "46605": 12697444352.0, "46610": 12697444352.0, "46615": 12697444352.0, "46620": 12697444352.0, "46625": 12697444352.0, "46630": 12697444352.0, "46635": 12697444352.0, "46640": 12697444352.0, "46645": 12697444352.0, "46650": 12697444352.0, "46655": 12697444352.0, "46660": 12697444352.0, "46665": 12697444352.0, "46670": 12697444352.0, "46675": 12697444352.0, "46680": 12697444352.0, "46685": 12697444352.0, "46690": 12697444352.0, "46695": 12697444352.0, "46700": 12697444352.0, "46705": 12697444352.0, "46710": 12697444352.0, "46715": 12697444352.0, "46720": 12697444352.0, "46725": 12697444352.0, "46730": 12697444352.0, "46735": 12697444352.0, "46740": 12697444352.0, "46745": 12697444352.0, "46750": 12697444352.0, "46755": 12697444352.0, "46760": 12697444352.0, "46765": 12697444352.0, "46770": 12697444352.0, "46775": 12697444352.0, "46780": 12697444352.0, "46785": 12697444352.0, "46790": 12697444352.0, "46795": 12697444352.0, "46800": 12697444352.0, "46805": 12697444352.0, "46810": 12697444352.0, "46815": 12697444352.0, "46820": 12697444352.0, "46825": 12697444352.0, "46830": 12697444352.0, "46835": 12697444352.0, "46840": 12697444352.0, "46845": 12697444352.0, "46850": 12697444352.0, "46855": 12697444352.0, "46860": 12697444352.0, "46865": 12697444352.0, "46870": 12697444352.0, "46875": 12697444352.0, "46880": 12697444352.0, "46885": 12697444352.0, "46890": 12697444352.0, "46895": 12697444352.0, "46900": 12697444352.0, "46905": 12697444352.0, "46910": 12697444352.0, "46915": 12697444352.0, "46920": 12697444352.0, "46925": 12697444352.0, "46930": 12697444352.0, "46935": 12697444352.0, "46940": 12697444352.0, "46945": 12697444352.0, "46950": 12697444352.0, "46955": 12697444352.0, "46960": 12697444352.0, "46965": 12697444352.0, "46970": 12697444352.0, "46975": 12697444352.0, "46980": 12697444352.0, "46985": 12697444352.0, "46990": 12697444352.0, "46995": 12697444352.0, "47000": 12697444352.0, "47005": 12697444352.0, "47010": 12697444352.0, "47015": 12697444352.0, "47020": 12697444352.0, "47025": 12697444352.0, "47030": 12697444352.0, "47035": 12697444352.0, "47040": 12697444352.0, "47045": 12697444352.0, "47050": 12697444352.0, "47055": 12697444352.0, "47060": 12697444352.0, "47065": 12697444352.0, "47070": 12697444352.0, "47075": 12697444352.0, "47080": 12697444352.0, "47085": 12697444352.0, "47090": 12697444352.0, "47095": 12697444352.0, "47100": 12697444352.0, "47105": 12697444352.0, "47110": 12697444352.0, "47115": 12697444352.0, "47120": 12697444352.0, "47125": 12697444352.0, "47130": 12697444352.0, "47135": 12697444352.0, "47140": 12697444352.0, "47145": 12697444352.0, "47150": 12697444352.0, "47155": 12697444352.0, "47160": 12697444352.0, "47165": 12697444352.0, "47170": 12697444352.0, "47175": 12697444352.0, "47180": 12697444352.0, "47185": 12697444352.0, "47190": 12697444352.0, "47195": 12697444352.0, "47200": 12697444352.0, "47205": 12697444352.0, "47210": 12697444352.0, "47215": 12697444352.0, "47220": 12697444352.0, "47225": 12697444352.0, "47230": 12697444352.0, "47235": 12697444352.0, "47240": 12697444352.0, "47245": 12697444352.0, "47250": 12697444352.0, "47255": 12697444352.0, "47260": 12697444352.0, "47265": 12697444352.0, "47270": 12697444352.0, "47275": 12697444352.0, "47280": 12697444352.0, "47285": 12697444352.0, "47290": 12697444352.0, "47295": 12697444352.0, "47300": 12697444352.0, "47305": 12697444352.0, "47310": 12697444352.0, "47315": 12697444352.0, "47320": 12697444352.0, "47325": 12697444352.0, "47330": 12697444352.0, "47335": 12697444352.0, "47340": 12697444352.0, "47345": 12697444352.0, "47350": 12697444352.0, "47355": 12697444352.0, "47360": 12697444352.0, "47365": 12697444352.0, "47370": 12697444352.0, "47375": 12697444352.0, "47380": 12697444352.0, "47385": 12697444352.0, "47390": 12697444352.0, "47395": 12697444352.0, "47400": 12697444352.0, "47405": 12697444352.0, "47410": 12697444352.0, "47415": 12697444352.0, "47420": 12697444352.0, "47425": 12697444352.0, "47430": 12697444352.0, "47435": 12697444352.0, "47440": 12697444352.0, "47445": 12697444352.0, "47450": 12697444352.0, "47455": 12697444352.0, "47460": 12697444352.0, "47465": 12697444352.0, "47470": 12697444352.0, "47475": 12697444352.0, "47480": 12697444352.0, "47485": 12697444352.0, "47490": 12697444352.0, "47495": 12697444352.0, "47500": 12697444352.0, "47505": 12697444352.0, "47510": 12697444352.0, "47515": 12697444352.0, "47520": 12697444352.0, "47525": 12697444352.0, "47530": 12697444352.0, "47535": 12697444352.0, "47540": 12697444352.0, "47545": 12697444352.0, "47550": 12697444352.0, "47555": 12697444352.0, "47560": 12697444352.0, "47565": 12697444352.0, "47570": 12697444352.0, "47575": 12697444352.0, "47580": 12697444352.0, "47585": 12697444352.0, "47590": 12697444352.0, "47595": 12697444352.0, "47600": 12697444352.0, "47605": 12697444352.0, "47610": 12697444352.0, "47615": 12697444352.0, "47620": 12697444352.0, "47625": 12697444352.0, "47630": 12697444352.0, "47635": 12697444352.0, "47640": 12697444352.0, "47645": 12697444352.0, "47650": 12697444352.0, "47655": 12697444352.0, "47660": 12697444352.0, "47665": 12697444352.0, "47670": 12697444352.0, "47675": 12697444352.0, "47680": 12697444352.0, "47685": 12697444352.0, "47690": 12697444352.0, "47695": 12697444352.0, "47700": 12697444352.0, "47705": 12697444352.0, "47710": 12697444352.0, "47715": 12697444352.0, "47720": 12697444352.0, "47725": 12697444352.0, "47730": 12697444352.0, "47735": 12697444352.0, "47740": 12697444352.0, "47745": 12697444352.0, "47750": 12697444352.0, "47755": 12697444352.0, "47760": 12697444352.0, "47765": 12697444352.0, "47770": 12697444352.0, "47775": 12697444352.0, "47780": 12697444352.0, "47785": 12697444352.0, "47790": 12697444352.0, "47795": 12697444352.0, "47800": 12697444352.0, "47805": 12697444352.0, "47810": 12697444352.0, "47815": 12697444352.0, "47820": 12697444352.0, "47825": 12697444352.0, "47830": 12697444352.0, "47835": 12697444352.0, "47840": 12697444352.0, "47845": 12697444352.0, "47850": 12697444352.0, "47855": 12697444352.0, "47860": 12697444352.0, "47865": 12697444352.0, "47870": 12697444352.0, "47875": 12697444352.0, "47880": 12697444352.0, "47885": 12697444352.0, "47890": 12697444352.0, "47895": 12697444352.0, "47900": 12697444352.0, "47905": 12697444352.0, "47910": 12697444352.0, "47915": 12697444352.0, "47920": 12697444352.0, "47925": 12697444352.0, "47930": 12697444352.0, "47935": 12697444352.0, "47940": 12697444352.0, "47945": 12697444352.0, "47950": 12697444352.0, "47955": 12697444352.0, "47960": 12697444352.0, "47965": 12697444352.0, "47970": 12697444352.0, "47975": 12697444352.0, "47980": 12697444352.0, "47985": 12697444352.0, "47990": 12697444352.0, "47995": 12697444352.0, "48000": 12697444352.0, "48005": 12697444352.0, "48010": 12697444352.0, "48015": 12697444352.0, "48020": 12697444352.0, "48025": 12697444352.0, "48030": 12697444352.0, "48035": 12697444352.0, "48040": 12697444352.0, "48045": 12697444352.0, "48050": 12697444352.0, "48055": 12697444352.0, "48060": 12697444352.0, "48065": 12697444352.0, "48070": 12697444352.0, "48075": 12697444352.0, "48080": 12697444352.0, "48085": 12697444352.0, "48090": 12697444352.0, "48095": 12697444352.0, "48100": 12697444352.0, "48105": 12697444352.0, "48110": 12697444352.0, "48115": 12697444352.0, "48120": 12697444352.0, "48125": 12697444352.0, "48130": 12697444352.0, "48135": 12697444352.0, "48140": 12697444352.0, "48145": 12697444352.0, "48150": 12697444352.0, "48155": 12697444352.0, "48160": 12697444352.0, "48165": 12697444352.0, "48170": 12697444352.0, "48175": 12697444352.0, "48180": 12697444352.0, "48185": 12697444352.0, "48190": 12697444352.0, "48195": 12697444352.0, "48200": 12697444352.0, "48205": 12697444352.0, "48210": 12697444352.0, "48215": 12697444352.0, "48220": 12697444352.0, "48225": 12697444352.0, "48230": 12697444352.0, "48235": 12697444352.0, "48240": 12697444352.0, "48245": 12697444352.0, "48250": 12697444352.0, "48255": 12697444352.0, "48260": 12697444352.0, "48265": 12697444352.0, "48270": 12697444352.0, "48275": 12697444352.0, "48280": 12697444352.0, "48285": 12697444352.0, "48290": 12697444352.0, "48295": 12697444352.0, "48300": 12697444352.0, "48305": 12697444352.0, "48310": 12697444352.0, "48315": 12697444352.0, "48320": 12697444352.0, "48325": 12697444352.0, "48330": 12697444352.0, "48335": 12697444352.0, "48340": 12697444352.0, "48345": 12697444352.0, "48350": 12697444352.0, "48355": 12697444352.0, "48360": 12697444352.0, "48365": 12697444352.0, "48370": 12697444352.0, "48375": 12697444352.0, "48380": 12697444352.0, "48385": 12697444352.0, "48390": 12697444352.0, "48395": 12697444352.0, "48400": 12697444352.0, "48405": 12697444352.0, "48410": 12697444352.0, "48415": 12697444352.0, "48420": 12697444352.0, "48425": 12697444352.0, "48430": 12697444352.0, "48435": 12697444352.0, "48440": 12697444352.0, "48445": 12697444352.0, "48450": 12697444352.0, "48455": 12697444352.0, "48460": 12697444352.0, "48465": 12697444352.0, "48470": 12697444352.0, "48475": 12697444352.0, "48480": 12697444352.0, "48485": 12697444352.0, "48490": 12697444352.0, "48495": 12697444352.0, "48500": 12697444352.0, "48505": 12697444352.0, "48510": 12697444352.0, "48515": 12697444352.0, "48520": 12697444352.0, "48525": 12697444352.0, "48530": 12697444352.0, "48535": 12697444352.0, "48540": 12697444352.0, "48545": 12697444352.0, "48550": 12697444352.0, "48555": 12697444352.0, "48560": 12697444352.0, "48565": 12697444352.0, "48570": 12697444352.0, "48575": 12697444352.0, "48580": 12697444352.0, "48585": 12697444352.0, "48590": 12697444352.0, "48595": 12697444352.0, "48600": 12697444352.0, "48605": 12697444352.0, "48610": 12697444352.0, "48615": 12697444352.0, "48620": 12697444352.0, "48625": 12697444352.0, "48630": 12697444352.0, "48635": 12697444352.0, "48640": 12697444352.0, "48645": 12697444352.0, "48650": 12697444352.0, "48655": 12697444352.0, "48660": 12697444352.0, "48665": 12697444352.0, "48670": 12697444352.0, "48675": 12697444352.0, "48680": 12697444352.0, "48685": 12697444352.0, "48690": 12697444352.0, "48695": 12697444352.0, "48700": 12697444352.0, "48705": 12697444352.0, "48710": 12697444352.0, "48715": 12697444352.0, "48720": 12697444352.0, "48725": 12697444352.0, "48730": 12697444352.0, "48735": 12697444352.0, "48740": 12697444352.0, "48745": 12697444352.0, "48750": 12697444352.0, "48755": 12697444352.0, "48760": 12697444352.0, "48765": 12697444352.0, "48770": 12697444352.0, "48775": 12697444352.0, "48780": 12697444352.0, "48785": 12697444352.0, "48790": 12697444352.0, "48795": 12697444352.0, "48800": 12697444352.0, "48805": 12697444352.0, "48810": 12697444352.0, "48815": 12697444352.0, "48820": 12697444352.0, "48825": 12697444352.0, "48830": 12697444352.0, "48835": 12697444352.0, "48840": 12697444352.0, "48845": 12697444352.0, "48850": 12697444352.0, "48855": 12697444352.0, "48860": 12697444352.0, "48865": 12697444352.0, "48870": 12697444352.0, "48875": 12697444352.0, "48880": 12697444352.0, "48885": 12697444352.0, "48890": 12697444352.0, "48895": 12697444352.0, "48900": 12697444352.0, "48905": 12697444352.0, "48910": 12697444352.0, "48915": 12697444352.0, "48920": 12697444352.0, "48925": 12697444352.0, "48930": 12697444352.0, "48935": 12697444352.0, "48940": 12697444352.0, "48945": 12697444352.0, "48950": 12697444352.0, "48955": 12697444352.0, "48960": 12697444352.0, "48965": 12697444352.0, "48970": 12697444352.0, "48975": 12697444352.0, "48980": 12697444352.0, "48985": 12697444352.0, "48990": 12697444352.0, "48995": 12697444352.0, "49000": 12697444352.0, "49005": 12697444352.0, "49010": 12697444352.0, "49015": 12697444352.0, "49020": 12697444352.0, "49025": 12697444352.0, "49030": 12697444352.0, "49035": 12697444352.0, "49040": 12697444352.0, "49045": 12697444352.0, "49050": 12697444352.0, "49055": 12697444352.0, "49060": 12697444352.0, "49065": 12697444352.0, "49070": 12697444352.0, "49075": 12697444352.0, "49080": 12697444352.0, "49085": 12697444352.0, "49090": 12697444352.0, "49095": 12697444352.0, "49100": 12697444352.0, "49105": 12697444352.0, "49110": 12697444352.0, "49115": 12697444352.0, "49120": 12697444352.0, "49125": 12697444352.0, "49130": 12697444352.0, "49135": 12697444352.0, "49140": 12697444352.0, "49145": 12697444352.0, "49150": 12697444352.0, "49155": 12697444352.0, "49160": 12697444352.0, "49165": 12697444352.0, "49170": 12697444352.0, "49175": 12697444352.0, "49180": 12697444352.0, "49185": 12697444352.0, "49190": 12697444352.0, "49195": 12697444352.0, "49200": 12697444352.0, "49205": 12697444352.0, "49210": 12697444352.0, "49215": 12697444352.0, "49220": 12697444352.0, "49225": 12697444352.0, "49230": 12697444352.0, "49235": 12697444352.0, "49240": 12697444352.0, "49245": 12697444352.0, "49250": 12697444352.0, "49255": 12697444352.0, "49260": 12697444352.0, "49265": 12697444352.0, "49270": 12697444352.0, "49275": 12697444352.0, "49280": 12697444352.0, "49285": 12697444352.0, "49290": 12697444352.0, "49295": 12697444352.0, "49300": 12697444352.0, "49305": 12697444352.0, "49310": 12697444352.0, "49315": 12697444352.0, "49320": 12697444352.0, "49325": 12697444352.0, "49330": 12697444352.0, "49335": 12697444352.0, "49340": 12697444352.0, "49345": 12697444352.0, "49350": 12697444352.0, "49355": 12697444352.0, "49360": 12697444352.0, "49365": 12697444352.0, "49370": 12697444352.0, "49375": 12697444352.0, "49380": 12697444352.0, "49385": 12697444352.0, "49390": 12697444352.0, "49395": 12697444352.0, "49400": 12697444352.0, "49405": 12697444352.0, "49410": 12697444352.0, "49415": 12697444352.0, "49420": 12697444352.0, "49425": 12697444352.0, "49430": 12697444352.0, "49435": 12697444352.0, "49440": 12697444352.0, "49445": 12697444352.0, "49450": 12697444352.0, "49455": 12697444352.0, "49460": 12697444352.0, "49465": 12697444352.0, "49470": 12697444352.0, "49475": 12697444352.0, "49480": 12697444352.0, "49485": 12697444352.0, "49490": 12697444352.0, "49495": 12697444352.0, "49500": 12697444352.0, "49505": 12697444352.0, "49510": 12697444352.0, "49515": 12697444352.0, "49520": 12697444352.0, "49525": 12697444352.0, "49530": 12697444352.0, "49535": 12697444352.0, "49540": 12697444352.0, "49545": 12697444352.0, "49550": 12697444352.0, "49555": 12697444352.0, "49560": 12697444352.0, "49565": 12697444352.0, "49570": 12697444352.0, "49575": 12697444352.0, "49580": 12697444352.0, "49585": 12697444352.0, "49590": 12697444352.0, "49595": 12697444352.0, "49600": 12697444352.0, "49605": 12697444352.0, "49610": 12697444352.0, "49615": 12697444352.0, "49620": 12697444352.0, "49625": 12697444352.0, "49630": 12697444352.0, "49635": 12697444352.0, "49640": 12697444352.0, "49645": 12697444352.0, "49650": 12697444352.0, "49655": 12697444352.0, "49660": 12697444352.0, "49665": 12697444352.0, "49670": 12697444352.0, "49675": 12697444352.0, "49680": 12697444352.0, "49685": 12697444352.0, "49690": 12697444352.0, "49695": 12697444352.0, "49700": 12697444352.0, "49705": 12697444352.0, "49710": 12697444352.0, "49715": 12697444352.0, "49720": 12697444352.0, "49725": 12697444352.0, "49730": 12697444352.0, "49735": 12697444352.0, "49740": 12697444352.0, "49745": 12697444352.0, "49750": 12697444352.0, "49755": 12697444352.0, "49760": 12697444352.0, "49765": 12697444352.0, "49770": 12697444352.0, "49775": 12697444352.0, "49780": 12697444352.0, "49785": 12697444352.0, "49790": 12697444352.0, "49795": 12697444352.0, "49800": 12697444352.0, "49805": 12697444352.0, "49810": 12697444352.0, "49815": 12697444352.0, "49820": 12697444352.0, "49825": 12697444352.0, "49830": 12697444352.0, "49835": 12697444352.0, "49840": 12697444352.0, "49845": 12697444352.0, "49850": 12697444352.0, "49855": 12697444352.0, "49860": 12697444352.0, "49865": 12697444352.0, "49870": 12697444352.0, "49875": 12697444352.0, "49880": 12697444352.0, "49885": 12697444352.0, "49890": 12697444352.0, "49895": 12697444352.0, "49900": 12697444352.0, "49905": 12697444352.0, "49910": 12697444352.0, "49915": 12697444352.0, "49920": 12697444352.0, "49925": 12697444352.0, "49930": 12697444352.0, "49935": 12697444352.0, "49940": 12697444352.0, "49945": 12697444352.0, "49950": 12697444352.0, "49955": 12697444352.0, "49960": 12697444352.0, "49965": 12697444352.0, "49970": 12697444352.0, "49975": 12697444352.0, "49980": 12697444352.0, "49985": 12697444352.0, "49990": 12697444352.0, "49995": 12697444352.0, "50000": 12697444352.0, "50005": 12697444352.0, "50010": 12697444352.0, "50015": 12697444352.0, "50020": 12697444352.0, "50025": 12697444352.0, "50030": 12697444352.0, "50035": 12697444352.0, "50040": 12697444352.0, "50045": 12697444352.0, "50050": 12697444352.0, "50055": 12697444352.0, "50060": 12697444352.0, "50065": 12697444352.0, "50070": 12697444352.0, "50075": 12697444352.0, "50080": 12697444352.0, "50085": 12697444352.0, "50090": 12697444352.0, "50095": 12697444352.0, "50100": 12697444352.0, "50105": 12697444352.0, "50110": 12697444352.0, "50115": 12697444352.0, "50120": 12697444352.0, "50125": 12697444352.0, "50130": 12697444352.0, "50135": 12697444352.0, "50140": 12697444352.0, "50145": 12697444352.0, "50150": 12697444352.0, "50155": 12697444352.0, "50160": 12697444352.0, "50165": 12697444352.0, "50170": 12697444352.0, "50175": 12697444352.0, "50180": 12697444352.0, "50185": 12697444352.0, "50190": 12697444352.0, "50195": 12697444352.0, "50200": 12697444352.0, "50205": 12697444352.0, "50210": 12697444352.0, "50215": 12697444352.0, "50220": 12697444352.0, "50225": 12697444352.0, "50230": 12697444352.0, "50235": 12697444352.0, "50240": 12697444352.0, "50245": 12697444352.0, "50250": 12697444352.0, "50255": 12697444352.0, "50260": 12697444352.0, "50265": 12697444352.0, "50270": 12697444352.0, "50275": 12697444352.0, "50280": 12697444352.0, "50285": 12697444352.0, "50290": 12697444352.0, "50295": 12697444352.0, "50300": 12697444352.0, "50305": 12697444352.0, "50310": 12697444352.0, "50315": 12697444352.0, "50320": 12697444352.0, "50325": 12697444352.0, "50330": 12697444352.0, "50335": 12697444352.0, "50340": 12697444352.0, "50345": 12697444352.0, "50350": 12697444352.0, "50355": 12697444352.0, "50360": 12697444352.0, "50365": 12697444352.0, "50370": 12697444352.0, "50375": 12697444352.0, "50380": 12697444352.0, "50385": 12697444352.0, "50390": 12697444352.0, "50395": 12697444352.0, "50400": 12697444352.0, "50405": 12697444352.0, "50410": 12697444352.0, "50415": 12697444352.0, "50420": 12697444352.0, "50425": 12697444352.0, "50430": 12697444352.0, "50435": 12697444352.0, "50440": 12697444352.0, "50445": 12697444352.0, "50450": 12697444352.0, "50455": 12697444352.0, "50460": 12697444352.0, "50465": 12697444352.0, "50470": 12697444352.0, "50475": 12697444352.0, "50480": 12697444352.0, "50485": 12697444352.0, "50490": 12697444352.0, "50495": 12697444352.0, "50500": 12697444352.0, "50505": 12697444352.0, "50510": 12697444352.0, "50515": 12697444352.0, "50520": 12697444352.0, "50525": 12697444352.0, "50530": 12697444352.0, "50535": 12697444352.0, "50540": 12697444352.0, "50545": 12697444352.0, "50550": 12697444352.0, "50555": 12697444352.0, "50560": 12697444352.0, "50565": 12697444352.0, "50570": 12697444352.0, "50575": 12697444352.0, "50580": 12697444352.0, "50585": 12697444352.0, "50590": 12697444352.0, "50595": 12697444352.0, "50600": 12697444352.0, "50605": 12697444352.0, "50610": 12697444352.0, "50615": 12697444352.0, "50620": 12697444352.0, "50625": 12697444352.0, "50630": 12697444352.0, "50635": 12697444352.0, "50640": 12697444352.0, "50645": 12697444352.0, "50650": 12697444352.0, "50655": 12697444352.0, "50660": 12697444352.0, "50665": 12697444352.0, "50670": 12697444352.0, "50675": 12697444352.0, "50680": 12697444352.0, "50685": 12697444352.0, "50690": 12697444352.0, "50695": 12697444352.0, "50700": 12697444352.0, "50705": 12697444352.0, "50710": 12697444352.0, "50715": 12697444352.0, "50720": 12697444352.0, "50725": 12697444352.0, "50730": 12697444352.0, "50735": 12697444352.0, "50740": 12697444352.0, "50745": 12697444352.0, "50750": 12697444352.0, "50755": 12697444352.0, "50760": 12697444352.0, "50765": 12697444352.0, "50770": 12697444352.0, "50775": 12697444352.0, "50780": 12697444352.0, "50785": 12697444352.0, "50790": 12697444352.0, "50795": 12697444352.0, "50800": 12697444352.0, "50805": 12697444352.0, "50810": 12697444352.0, "50815": 12697444352.0, "50820": 12697444352.0, "50825": 12697444352.0, "50830": 12697444352.0, "50835": 12697444352.0, "50840": 12697444352.0, "50845": 12697444352.0, "50850": 12697444352.0, "50855": 12697444352.0, "50860": 12697444352.0}}, "iteration-time": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": "nan", "25": "nan", "30": "nan", "35": "nan", "40": "nan", "45": "nan", "50": "nan", "55": "nan", "60": "nan", "65": "nan", "70": "nan", "75": "nan", "80": "nan", "85": "nan", "90": "nan", "95": "nan", "100": 3.57882, "105": "nan", "110": "nan", "115": "nan", "120": "nan", "125": "nan", "130": "nan", "135": "nan", "140": "nan", "145": "nan", "150": "nan", "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": 3.45399, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": 3.45639, "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": 3.45787, "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": 3.45745, "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": 3.46053, "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": 3.46195, "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": 3.45764, "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": 3.45366, "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": 3.44791, "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": 3.44071, "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": 3.43721, "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": 3.43243, "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": 3.42856, "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": 3.42615, "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": 3.42167, "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": 3.42017, "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": 3.41877, "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": 3.4179, "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": 3.41685, "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": 3.41557, "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": 3.4138, "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": 3.41345, "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": 3.41388, "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": 3.41147, "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": 3.41167, "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": 3.41202, "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": 3.41098, "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": 3.40974, "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": 3.40831, "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": 3.40639, "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": 3.40714, "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": 3.40809, "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": 3.41167, "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": 3.41108, "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": 3.70469, "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": 3.40703, "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": 3.40889, "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": 3.4081, "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": 3.40775, "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": 3.40614, "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": 3.40753, "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": 3.40822, "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": 3.40668, "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": 3.40671, "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": 3.40461, "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": 3.40749, "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": 3.40565, "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": 3.40816, "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": 3.41793, "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": 3.40418, "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": 3.40562, "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": 3.40567, "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": 3.40585, "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": 3.40531, "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": 3.40457, "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": 3.40634, "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": 3.40623, "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": 3.40547, "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": 3.40555, "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": 3.4058, "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": 3.40602, "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": 3.40595, "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": 3.40618, "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": 3.40649, "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": 3.40552, "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": 3.40568, "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": 3.40629, "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": 3.40496, "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": 3.4057, "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": 3.40432, "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": 3.40608, "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": 3.40582, "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": 3.4012, "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": 3.40193, "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": 3.3998, "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": 3.39987, "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": 3.39974, "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": 3.40071, "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": 3.40004, "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": 3.39873, "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": 3.40065, "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": 3.3997, "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": 3.39951, "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": 3.39913, "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": 3.39882, "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": 3.39943, "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": 3.39899, "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": 3.39922, "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": 3.39851, "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": 3.39732, "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": 3.39956, "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": 3.39892, "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": 3.39915, "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": 3.39924, "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": 3.3992, "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": 3.40413, "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": 3.40303, "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": 3.40214, "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": 3.40233, "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": 3.40178, "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": 3.40335, "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": 3.40219, "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": 3.40245, "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": 3.40044, "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": 3.40198, "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": 3.40235, "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": 3.4023, "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": 3.57166, "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": 3.40881, "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": 3.40536, "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": 3.40682, "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": 3.40694, "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": 3.40862, "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": 3.40891, "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": 3.40675, "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": 3.40723, "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": 3.40754, "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": 3.40712, "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": 3.40718, "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": 3.40644, "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": 3.40946, "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": 3.40621, "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": 3.40595, "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": 3.40806, "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": 3.40588, "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": 3.40618, "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": 3.40622, "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": 3.40699, "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": 3.40666, "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": 3.40745, "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": 3.40634, "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": 3.4068, "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": 3.40849, "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": 3.40645, "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": 3.40465, "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": 3.4069, "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": 3.40662, "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": 3.40602, "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": 3.406, "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": 3.40549, "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": 3.40604, "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": 3.40558, "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": 3.40585, "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": 3.40472, "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": 3.40197, "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": 3.60515, "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": 3.40207, "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": 3.40414, "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": 3.40489, "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": 3.40381, "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": 3.40674, "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": 3.40601, "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": 3.40535, "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": 3.40328, "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": 3.40215, "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": 3.40278, "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": 3.40247, "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": 3.40355, "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": 3.40289, "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": 3.40026, "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": 3.40281, "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": 3.40246, "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": 3.40285, "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": 3.40195, "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": 3.40179, "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": 3.40269, "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": 3.40214, "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": 3.40285, "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": 3.40203, "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": 3.39829, "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": 3.39879, "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": 3.39978, "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": 3.39972, "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": 3.39888, "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": 3.40202, "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": 3.40275, "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": 3.40364, "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": 3.40363, "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": 3.40251, "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": 3.40134, "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": 3.40223, "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": 3.4028, "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": 3.40249, "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": 3.84753, "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": 3.41447, "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": 3.41529, "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": 3.41535, "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": 3.41538, "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": 3.41487, "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": 3.41443, "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": 3.41488, "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": 3.41407, "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": 3.41455, "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": 3.4144, "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": 3.41386, "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": 3.41506, "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": 3.41498, "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": 3.41454, "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": 3.41471, "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": 3.41392, "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": 3.41612, "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": 3.41488, "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": 3.41519, "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": 3.41466, "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": 3.41328, "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": 3.4147, "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": 3.41488, "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": 3.4131, "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": 3.41453, "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": 3.41273, "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": 3.41362, "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": 3.41429, "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": 3.41362, "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": 3.41427, "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": 3.41331, "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": 3.41415, "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": 3.41438, "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": 3.41429, "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": 3.41436, "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": 3.414, "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": 3.41328, "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": 12.00895, "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": 61.10891, "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": 3.57915, "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": 3.40562, "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": 3.40772, "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": 3.40858, "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": 3.40807, "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": 3.40964, "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": 3.40835, "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": 3.40883, "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": 3.40864, "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": 3.4075, "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": 3.40832, "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": 3.40674, "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": 3.40722, "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": 3.40746, "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": 3.40661, "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": 3.40648, "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": 3.4062, "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": 3.40638, "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": 3.40767, "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": 3.40711, "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": 3.40688, "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": 3.40489, "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": 3.40584, "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": 3.40644, "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": 3.40639, "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": 3.40661, "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": 3.40519, "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": 3.40696, "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": 3.40637, "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": 3.4069, "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": 3.40651, "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": 3.4049, "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": 3.40764, "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": 3.40753, "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": 3.40751, "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": 3.40842, "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": 3.40699, "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": 3.40703, "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": 3.63448, "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": 3.40862, "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": 3.40771, "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": 3.40758, "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": 3.40757, "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": 3.40794, "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": 3.40842, "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": 3.40865, "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": 3.40541, "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": 3.40934, "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": 3.40656, "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": 3.40617, "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": 3.40781, "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": 3.40687, "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": 3.40798, "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": 3.40761, "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": 3.40805, "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": 3.40645, "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": 3.40666, "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": 3.41966, "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": 3.40725, "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": 3.40834, "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": 3.4088, "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": 3.40896, "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": 3.40136, "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": 3.40264, "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": 3.40165, "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": 3.40189, "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": 3.40016, "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": 3.4013, "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": 3.40295, "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": 3.40146, "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": 3.40089, "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": 3.40084, "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": 3.40199, "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": 3.40286, "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": 3.40219, "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": 3.40326, "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": 3.65163, "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": 3.39865, "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": 3.39756, "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": 3.39844, "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": 3.39702, "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": 3.39845, "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": 3.39971, "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": 3.39821, "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": 3.39952, "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": 3.40066, "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": 3.39862, "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": 3.3997, "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": 3.39891, "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": 3.3989, "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": 3.3991, "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": 3.39818, "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": 3.39925, "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": 3.39947, "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": 3.39902, "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": 3.39893, "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": 3.39819, "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": 3.3985, "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": 3.39961, "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": 3.39901, "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": 3.39796, "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": 3.40035, "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": 3.3998, "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": 3.40046, "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": 3.39997, "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": 3.39946, "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": 3.39763, "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": 3.39822, "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": 3.39786, "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": 3.39773, "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": 3.39886, "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": 3.39724, "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": 3.39886, "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": 3.39793, "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": 8.22537, "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": 3.39881, "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": 3.39522, "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": 3.39607, "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": 3.3966, "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": 3.39642, "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": 3.39663, "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": 3.39605, "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": 3.39674, "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": 3.39748, "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": 3.39649, "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": 3.39567, "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": 3.39479, "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": 3.39648, "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": 3.39707, "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": 3.39708, "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": 3.39672, "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": 3.39448, "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": 3.39501, "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": 3.3953, "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": 3.39487, "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": 3.39608, "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": 3.3959, "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": 3.39652, "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": 3.39593, "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": 3.3964, "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": 3.39676, "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": 3.3959, "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": 3.39651, "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": 3.39593, "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": 3.39902, "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": 3.39556, "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": 3.39507, "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": 3.39605, "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": 3.39676, "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": 3.39612, "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": 3.39629, "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": 3.40616, "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": 3.40621, "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": 3.40664, "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": 3.40579, "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": 3.40854, "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": 3.40794, "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": 3.407, "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": 3.40628, "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": 3.40662, "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": 3.40614, "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": 3.4055, "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": 3.40677, "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": 3.40597, "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": 3.40773, "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": 3.41559, "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": 3.44368, "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": 3.4075, "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": 3.40474, "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": 3.40674, "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": 3.40688, "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": 3.40698, "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": 3.40569, "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": 3.40642, "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": 3.40702, "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": 3.40635, "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": 3.40564, "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": 3.40588, "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": 3.40886, "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": 3.40535, "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": 3.40625, "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": 3.48043, "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": 3.4818, "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": 3.48178, "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": 3.4816, "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": 3.48218, "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": 3.48078, "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": 3.48218, "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": 3.48155, "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": 3.48176, "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": 3.48168, "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": 3.48012, "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": 3.4818, "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": 3.48108, "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": 3.48115, "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": 3.48059, "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": 3.47992, "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": 3.48102, "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": 3.48205, "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": 3.4815, "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": 3.48134, "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": 3.48066, "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": 3.48128, "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": 3.48135, "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": 3.48131, "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": 3.48153, "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": 3.48109, "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": 3.48185, "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": 3.48201, "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": 3.48209, "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": 3.48176, "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": 3.4801, "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": 3.48216, "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": 3.48276, "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": 3.48444, "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": 3.48375, "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": 3.48346, "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": 3.48524, "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": 3.48499, "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": 3.48328, "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": 3.48353, "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": 3.48397, "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": 3.4839, "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": 3.4829, "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": 3.48384, "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": 3.4836, "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": 3.48308, "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": 3.48296, "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": 3.48248, "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": 3.48267, "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": 3.48291, "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": 3.48148, "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": 3.48235, "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": 3.4828, "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": 3.48266, "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": 3.48317, "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": 3.48175, "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": 3.48225, "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": 3.48232, "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": 3.48206, "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": 3.48319, "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": 3.48116, "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": 3.48123, "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": 3.48144, "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": 3.48214, "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": 3.48297, "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": 3.48024, "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": 3.48232, "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": 3.55626, "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": 3.4041, "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": 3.40416, "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": 3.4036, "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": 3.40505, "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": 3.40468, "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": 3.40478, "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": 3.40482, "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": 3.40301, "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": 3.40444, "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": 3.40431, "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": 3.40469, "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": 3.40593, "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": 3.40493, "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": 3.40576, "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": 3.40555, "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": 3.40519, "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": 3.40503, "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": 3.40385, "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": 3.40444, "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": 3.40494, "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": 3.40493, "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": 3.40584, "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": 3.40345, "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": 3.40626, "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": 3.40524, "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": 3.40541, "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": 3.40473, "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": 3.40351, "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": 3.40603, "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": 3.40501, "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": 3.40514, "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": 3.40462, "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": 3.40362, "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": 3.40554, "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": 3.40455, "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/model_config.yaml index b600345..dbd376a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/model_config.yaml @@ -1,87 +1,87 @@ -ENV_VARS: - NCCL_IB_SL: 1 - NCCL_IB_TIMEOUT: 19 - CUDA_DEVICE_MAX_CONNECTIONS: 1 - NVTE_FWD_LAYERNORM_SM_MARGIN: 16 - NVTE_BWD_LAYERNORM_SM_MARGIN: 16 - NCCL_P2P_NET_CHUNKSIZE: 2097152 - NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 -TEST_TYPE: "release" -MODEL_ARGS: - # Distributed args - --distributed-timeout-minutes: 60 - --tensor-model-parallel-size: 8 - --pipeline-model-parallel-size: 1 - --use-distributed-optimizer: true - --overlap-grad-reduce: true - --overlap-param-gather: true - # Training args - --use-mcore-models: true - --sequence-parallel: true - --disable-bias-linear: true - --micro-batch-size: 4 - --rampup-batch-size: "384 384 97656250" - --global-batch-size: 1152 - --train-samples: 19531250 - --manual-gc: true - # Transformer Engine args - --transformer-impl: transformer_engine - # Data args - --data-cache-path: ${DATA_CACHE_PATH} - --tokenizer-type: GPTSentencePieceTokenizer - --tokenizer-model: ${DATA_PATH}/utils/nemotron_2_256k.model - --data-path: $DATA_BLEND - --split: 99,1,0 - --no-mmap-bin-files: true - --num-workers: 6 - # Add network size args - --apply-layernorm-1p: true - --untie-embeddings-and-output-weights: true - --position-embedding-type: rope - --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container - --rotary-percent: 0.5 - --squared-relu: true - --num-layers: 32 - --hidden-size: 6144 - --num-attention-heads: 48 - --group-query-attention: true - --num-query-groups: 8 - --seq-length: 4096 - --max-position-embeddings: 4096 - # Add regularization args - --attention-dropout: 0.0 - --hidden-dropout: 0.0 - --clip-grad: 1.0 - --weight-decay: 0.1 - # Add learning rate args - --lr-decay-samples: 1949218748 - --lr-warmup-samples: 3906252 - --lr: 4.5e-4 - --min-lr: 4.5e-5 - --decoupled-lr: 5.0e-4 - --decoupled-min-lr: 4.5e-5 - --lr-decay-style: cosine - --adam-beta1: 0.9 - --adam-beta2: 0.95 - # Add validation args - --eval-iters: 32 - --eval-interval: 2000 - # Add checkpointing args - --load: ${OUTPUT_PATH}/checkpoints - --save: ${OUTPUT_PATH}/checkpoints - --save-interval: 5000 - # Add initialization args - --init-method-std: 0.0134 - # Add logging args - --log-timers-to-tensorboard: true - --log-memory-to-tensorboard: true - --log-num-zeros-in-grad: true - --log-params-norm: true - --log-validation-ppl-to-tensorboard: true - --log-throughput: true - --log-interval: 100 - --tensorboard-dir: ${OUTPUT_PATH}/tensorboard - --wandb-project: megatron-core-release-runs - --wandb-exp-name: ${WANDB_EXPERIMENT} - # Add mixed precision args - --bf16: true +ENV_VARS: + NCCL_IB_SL: 1 + NCCL_IB_TIMEOUT: 19 + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_FWD_LAYERNORM_SM_MARGIN: 16 + NVTE_BWD_LAYERNORM_SM_MARGIN: 16 + NCCL_P2P_NET_CHUNKSIZE: 2097152 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 +TEST_TYPE: "release" +MODEL_ARGS: + # Distributed args + --distributed-timeout-minutes: 60 + --tensor-model-parallel-size: 8 + --pipeline-model-parallel-size: 1 + --use-distributed-optimizer: true + --overlap-grad-reduce: true + --overlap-param-gather: true + # Training args + --use-mcore-models: true + --sequence-parallel: true + --disable-bias-linear: true + --micro-batch-size: 4 + --rampup-batch-size: "384 384 97656250" + --global-batch-size: 1152 + --train-samples: 19531250 + --manual-gc: true + # Transformer Engine args + --transformer-impl: transformer_engine + # Data args + --data-cache-path: ${DATA_CACHE_PATH} + --tokenizer-type: GPTSentencePieceTokenizer + --tokenizer-model: ${DATA_PATH}/utils/nemotron_2_256k.model + --data-path: $DATA_BLEND + --split: 99,1,0 + --no-mmap-bin-files: true + --num-workers: 6 + # Add network size args + --apply-layernorm-1p: true + --untie-embeddings-and-output-weights: true + --position-embedding-type: rope + --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container + --rotary-percent: 0.5 + --squared-relu: true + --num-layers: 32 + --hidden-size: 6144 + --num-attention-heads: 48 + --group-query-attention: true + --num-query-groups: 8 + --seq-length: 4096 + --max-position-embeddings: 4096 + # Add regularization args + --attention-dropout: 0.0 + --hidden-dropout: 0.0 + --clip-grad: 1.0 + --weight-decay: 0.1 + # Add learning rate args + --lr-decay-samples: 1949218748 + --lr-warmup-samples: 3906252 + --lr: 4.5e-4 + --min-lr: 4.5e-5 + --decoupled-lr: 5.0e-4 + --decoupled-min-lr: 4.5e-5 + --lr-decay-style: cosine + --adam-beta1: 0.9 + --adam-beta2: 0.95 + # Add validation args + --eval-iters: 32 + --eval-interval: 2000 + # Add checkpointing args + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --save-interval: 5000 + # Add initialization args + --init-method-std: 0.0134 + # Add logging args + --log-timers-to-tensorboard: true + --log-memory-to-tensorboard: true + --log-num-zeros-in-grad: true + --log-params-norm: true + --log-validation-ppl-to-tensorboard: true + --log-throughput: true + --log-interval: 100 + --tensorboard-dir: ${OUTPUT_PATH}/tensorboard + --wandb-project: megatron-core-release-runs + --wandb-exp-name: ${WANDB_EXPERIMENT} + # Add mixed precision args + --bf16: true diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic/golden_values_dev.json new file mode 100644 index 0000000..a337e70 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82792, "5": 10.84781, "10": 10.78012, "15": 10.81698, "20": 10.71288, "25": 10.53426, "30": 10.34872, "35": 10.26352, "40": 10.08926, "45": 9.83354, "50": 9.91813}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4678.0, "5": 5335.0, "10": 4344.0, "15": 5230.0, "20": 4921.0, "25": 4920.0, "30": 5481.0, "35": 5657.0, "40": 6030.0, "45": 5883.0, "50": 6771.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1115076608.0, "5": 1115074048.0, "10": 1115075584.0, "15": 1115076096.0, "20": 1115074560.0, "25": 1115074560.0, "30": 1115074048.0, "35": 1115077120.0, "40": 1115076096.0, "45": 1115076608.0, "50": 1115076096.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1537535488.0, "5": 1996728320.0, "10": 1999877120.0, "15": 2002960384.0, "20": 2002960384.0, "25": 2002960384.0, "30": 2002960384.0, "35": 2003681280.0, "40": 2003681280.0, "45": 2003681280.0, "50": 2003681280.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 23.82563, "5": 0.37855, "10": 0.37452, "15": 0.37394, "20": 0.37392, "25": 0.37387, "30": 0.37168, "35": 0.37314, "40": 0.37306, "45": 0.37207, "50": 0.37144}}} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic/golden_values_lts.json new file mode 100644 index 0000000..3e47480 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82774, "5": 10.84827, "10": 10.78011, "15": 10.81678, "20": 10.71279, "25": 10.53365, "30": 10.34926, "35": 10.26352, "40": 10.08942, "45": 9.83354, "50": 9.91824}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4729.0, "5": 5488.0, "10": 4431.0, "15": 5132.0, "20": 4879.0, "25": 4945.0, "30": 5550.0, "35": 5723.0, "40": 6001.0, "45": 5773.0, "50": 6825.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1116130816.0, "5": 1116128768.0, "10": 1116129792.0, "15": 1116129792.0, "20": 1116128768.0, "25": 1116129280.0, "30": 1116128768.0, "35": 1116131328.0, "40": 1116129792.0, "45": 1116129280.0, "50": 1116131328.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1537398784.0, "5": 1998091776.0, "10": 1999355904.0, "15": 2002624512.0, "20": 2002624512.0, "25": 2002624512.0, "30": 2002624512.0, "35": 2006002176.0, "40": 2006002176.0, "45": 2006002176.0, "50": 2006002176.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 20.55189, "5": 0.35363, "10": 0.41186, "15": 0.34834, "20": 0.34735, "25": 0.34825, "30": 0.34579, "35": 0.34453, "40": 0.34407, "45": 0.34523, "50": 0.34418}}} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic/model_config.yaml new file mode 100644 index 0000000..eb822ab --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic/model_config.yaml @@ -0,0 +1,56 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --context-parallel-size: 2 + --pipeline-model-parallel-size: 2 + --expert-model-parallel-size: 2 + --sequence-parallel: true + --hidden-dropout: 0.0 + --attention-dropout: 0.0 + --num-experts: 4 + --moe-router-load-balancing-type: sinkhorn + --moe-router-topk: 1 + --no-gradient-accumulation-fusion: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --attention-backend: flash + --log-memory-to-tensorboard: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic_dp_last/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic_dp_last/golden_values_dev.json new file mode 100644 index 0000000..4665993 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic_dp_last/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82792, "5": 10.84769, "10": 10.78018, "15": 10.81724, "20": 10.71298, "25": 10.5334, "30": 10.3493, "35": 10.26338, "40": 10.08914, "45": 9.83363, "50": 9.91831}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4711.0, "5": 5348.0, "10": 4444.0, "15": 5314.0, "20": 4828.0, "25": 4941.0, "30": 5365.0, "35": 5747.0, "40": 5873.0, "45": 5803.0, "50": 6687.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1115076608.0, "5": 1115074048.0, "10": 1115726848.0, "15": 1115076096.0, "20": 1115074560.0, "25": 1115074560.0, "30": 1115074560.0, "35": 1115077120.0, "40": 1115075584.0, "45": 1115075584.0, "50": 1115076608.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1537535488.0, "5": 1996728320.0, "10": 1999867904.0, "15": 2001162752.0, "20": 2001162752.0, "25": 2001162752.0, "30": 2001162752.0, "35": 2005172224.0, "40": 2005172224.0, "45": 2005172224.0, "50": 2005172224.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 17.79547, "5": 0.37185, "10": 0.37271, "15": 0.37137, "20": 0.36427, "25": 0.36881, "30": 0.35953, "35": 0.35936, "40": 0.35904, "45": 0.35963, "50": 0.36552}}} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic_dp_last/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic_dp_last/golden_values_lts.json new file mode 100644 index 0000000..38ad5a7 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic_dp_last/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82774, "5": 10.84793, "10": 10.77998, "15": 10.81681, "20": 10.71261, "25": 10.53377, "30": 10.3493, "35": 10.26313, "40": 10.08882, "45": 9.83346, "50": 9.91821}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4721.0, "5": 5480.0, "10": 4409.0, "15": 5347.0, "20": 4986.0, "25": 4967.0, "30": 5474.0, "35": 5679.0, "40": 5936.0, "45": 5926.0, "50": 6715.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1117179392.0, "5": 1117177344.0, "10": 1117178880.0, "15": 1117178880.0, "20": 1117177856.0, "25": 1117177344.0, "30": 1117177344.0, "35": 1117179904.0, "40": 1117178368.0, "45": 1117178368.0, "50": 1117178880.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1537373184.0, "5": 1998192128.0, "10": 2001248256.0, "15": 2002296832.0, "20": 2002296832.0, "25": 2002296832.0, "30": 2002296832.0, "35": 2003603968.0, "40": 2003603968.0, "45": 2003603968.0, "50": 2003603968.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 18.78085, "5": 0.35009, "10": 0.36696, "15": 0.34452, "20": 0.3477, "25": 0.34319, "30": 0.34248, "35": 0.34296, "40": 0.3444, "45": 0.33948, "50": 0.33778}}} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic_dp_last/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic_dp_last/model_config.yaml new file mode 100644 index 0000000..83918f1 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic_dp_last/model_config.yaml @@ -0,0 +1,57 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --context-parallel-size: 2 + --pipeline-model-parallel-size: 2 + --expert-model-parallel-size: 2 + --sequence-parallel: true + --hidden-dropout: 0.0 + --attention-dropout: 0.0 + --num-experts: 4 + --moe-router-load-balancing-type: sinkhorn + --moe-router-topk: 1 + --no-gradient-accumulation-fusion: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --attention-backend: flash + --log-memory-to-tensorboard: true + --use-tp-pp-dp-mapping: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_dev.json index ce02aad..20e94b3 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.84013, - 10.8726, - 10.85028, - 10.7965, - 10.68165, - 10.60635, - 10.12791, - 10.22204, - 10.13807, - 9.82329 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1715.0, - 1828.0, - 1929.0, - 2000.0, - 1947.0, - 1769.0, - 1649.0, - 2052.0, - 2353.0, - 2301.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 5.42717, - 0.09122, - 0.08825, - 0.08981, - 0.08828, - 0.08996, - 0.08919, - 0.0901, - 0.08957, - 0.08977 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.84013, "5": 10.84032, "10": 10.81346, "15": 10.80282, "20": 10.70508, "25": 10.53868, "30": 10.3554, "35": 10.27166, "40": 10.08065, "45": 9.82309, "50": 9.90133}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1715.0, "5": 1885.0, "10": 1398.0, "15": 1795.0, "20": 1692.0, "25": 1622.0, "30": 2013.0, "35": 2079.0, "40": 2270.0, "45": 2200.0, "50": 2259.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 615042560.0, "5": 615042560.0, "10": 615042560.0, "15": 615042560.0, "20": 615042560.0, "25": 615042560.0, "30": 615042560.0, "35": 615042560.0, "40": 615042560.0, "45": 615042560.0, "50": 615042560.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4639367168.0, "5": 4735983616.0, "10": 4735983616.0, "15": 4735983616.0, "20": 4735983616.0, "25": 4735983616.0, "30": 4735983616.0, "35": 4735983616.0, "40": 4735983616.0, "45": 4735983616.0, "50": 4735983616.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2.77411, "5": 0.08693, "10": 0.08692, "15": 0.08726, "20": 0.0863, "25": 0.08638, "30": 0.08595, "35": 0.0867, "40": 0.08646, "45": 0.08601, "50": 0.08591}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_lts.json index b5847f7..7b80d80 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_lts.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.84013, - 10.8726, - 10.85028, - 10.79652, - 10.68163, - 10.60637, - 10.12795, - 10.22205, - 10.13809, - 9.82324 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1715.0, - 1828.0, - 1915.0, - 1898.0, - 1954.0, - 1773.0, - 1701.0, - 2089.0, - 2262.0, - 2284.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 12.57806, - 0.09197, - 0.09095, - 0.09076, - 0.09095, - 0.09051, - 0.09095, - 0.09036, - 0.09029, - 0.09061 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.84013, "5": 10.84032, "10": 10.81346, "15": 10.80282, "20": 10.70508, "25": 10.5386, "30": 10.3554, "35": 10.27167, "40": 10.08066, "45": 9.82312, "50": 9.90133}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1715.0, "5": 1885.0, "10": 1398.0, "15": 1921.0, "20": 1743.0, "25": 1660.0, "30": 1932.0, "35": 2005.0, "40": 2321.0, "45": 2051.0, "50": 2365.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 615042560.0, "5": 615042560.0, "10": 615042560.0, "15": 615042560.0, "20": 615042560.0, "25": 615042560.0, "30": 615042560.0, "35": 615042560.0, "40": 615042560.0, "45": 615042560.0, "50": 615042560.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4639367168.0, "5": 4735983616.0, "10": 4735983616.0, "15": 4735983616.0, "20": 4735983616.0, "25": 4735983616.0, "30": 4735983616.0, "35": 4735983616.0, "40": 4735983616.0, "45": 4735983616.0, "50": 4735983616.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 8.44702, "5": 0.09126, "10": 0.09132, "15": 0.09219, "20": 0.09184, "25": 0.0915, "30": 0.09308, "35": 0.09159, "40": 0.09271, "45": 0.09171, "50": 0.09183}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml index 69ad59f..6647240 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_fsdp2_resume_torch_dist_te/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_fsdp2_resume_torch_dist_te/model_config.yaml index da4f2c1..0733405 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_fsdp2_resume_torch_dist_te/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_fsdp2_resume_torch_dist_te/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true -TEST_TYPE: ckpt-resume \ No newline at end of file + --log-memory-to-tensorboard: true +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_dev.json new file mode 100644 index 0000000..e48ea9a --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.84012, "5": 10.84147, "10": 10.82107, "15": 10.85423, "20": 10.8511, "25": 10.82229, "30": 10.75583, "35": 10.68389, "40": 10.52756, "45": 10.3119, "50": 10.29392, "55": 10.21881, "60": 9.8477, "65": 9.30027, "70": 9.92473, "75": 9.60615, "80": 9.55867, "85": 9.7331, "90": 9.91024, "95": 9.60586, "100": 9.49453}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 454770688.0, "5": 454770688.0, "10": 454770688.0, "15": 454770688.0, "20": 518880768.0, "25": 518880768.0, "30": 518880768.0, "35": 518880768.0, "40": 518880768.0, "45": 518880768.0, "50": 518880768.0, "55": 518880768.0, "60": 518880768.0, "65": 518880768.0, "70": 518880768.0, "75": 518880768.0, "80": 518880768.0, "85": 518880768.0, "90": 518880768.0, "95": 518880768.0, "100": 518880768.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4511150592.0, "5": 4544705536.0, "10": 4544705536.0, "15": 4544705536.0, "20": 4607767040.0, "25": 4607767040.0, "30": 4607767040.0, "35": 4607767040.0, "40": 4607767040.0, "45": 4607767040.0, "50": 4607767040.0, "55": 4607767040.0, "60": 4607767040.0, "65": 4607767040.0, "70": 4607767040.0, "75": 4607767040.0, "80": 4607767040.0, "85": 4607767040.0, "90": 4607767040.0, "95": 4607767040.0, "100": 4607767040.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2.90411, "5": 0.08103, "10": 0.08282, "15": 0.07736, "20": 0.08804, "25": 0.08766, "30": 0.08841, "35": 0.08667, "40": 0.08757, "45": 0.08624, "50": 0.08735, "55": 0.0901, "60": 0.09086, "65": 0.09018, "70": 0.08945, "75": 0.08827, "80": 0.08921, "85": 0.08802, "90": 0.0892, "95": 0.08808, "100": 0.08872}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1146.0, "25": 1145.0, "30": 1357.0, "35": 1283.0, "40": 1410.0, "45": 1430.0, "50": 1484.0, "55": 1563.0, "60": 1530.0, "65": 1429.0, "70": 2005.0, "75": 1951.0, "80": 2143.0, "85": 2240.0, "90": 2432.0, "95": 2349.0, "100": 2292.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_lts.json new file mode 100644 index 0000000..2168cee --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.84012, "5": 10.84147, "10": 10.82107, "15": 10.85423, "20": 10.85111, "25": 10.82229, "30": 10.75584, "35": 10.68389, "40": 10.52756, "45": 10.3119, "50": 10.29392, "55": 10.21882, "60": 9.84769, "65": 9.30027, "70": 9.92473, "75": 9.60615, "80": 9.55866, "85": 9.7331, "90": 9.91024, "95": 9.60586, "100": 9.49454}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 454770688.0, "5": 454770688.0, "10": 454770688.0, "15": 454770688.0, "20": 518880768.0, "25": 518880768.0, "30": 518880768.0, "35": 518880768.0, "40": 518880768.0, "45": 518880768.0, "50": 518880768.0, "55": 518880768.0, "60": 518880768.0, "65": 518880768.0, "70": 518880768.0, "75": 518880768.0, "80": 518880768.0, "85": 518880768.0, "90": 518880768.0, "95": 518880768.0, "100": 518880768.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4511150592.0, "5": 4544705536.0, "10": 4544705536.0, "15": 4544705536.0, "20": 4607767040.0, "25": 4607767040.0, "30": 4607767040.0, "35": 4607767040.0, "40": 4607767040.0, "45": 4607767040.0, "50": 4607767040.0, "55": 4607767040.0, "60": 4607767040.0, "65": 4607767040.0, "70": 4607767040.0, "75": 4607767040.0, "80": 4607767040.0, "85": 4607767040.0, "90": 4607767040.0, "95": 4607767040.0, "100": 4607767040.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.06931, "5": 0.08348, "10": 0.08464, "15": 0.08384, "20": 0.09458, "25": 0.09207, "30": 0.09343, "35": 0.0916, "40": 0.0946, "45": 0.09217, "50": 0.09448, "55": 0.0925, "60": 0.09271, "65": 0.09196, "70": 0.09204, "75": 0.09195, "80": 0.09306, "85": 0.09148, "90": 0.09145, "95": 0.09814, "100": 0.09227}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1143.0, "25": 1185.0, "30": 1375.0, "35": 1263.0, "40": 1458.0, "45": 1307.0, "50": 1523.0, "55": 1597.0, "60": 1594.0, "65": 1496.0, "70": 1951.0, "75": 1910.0, "80": 2170.0, "85": 2204.0, "90": 2432.0, "95": 2324.0, "100": 2265.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml index fd1e725..2e1239e 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,4 +50,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_dev.json index 9895a35..33216a7 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.83373, - 10.86683, - 10.89023, - 10.81051, - 10.68459, - 10.60979, - 10.08992, - 10.21481, - 10.14018, - 9.80603 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1488.0, - 1854.0, - 1854.0, - 1884.0, - 1794.0, - 1784.0, - 1569.0, - 1942.0, - 2263.0, - 2147.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 13.39475, - 0.14158, - 0.14256, - 0.14166, - 0.14243, - 0.14232, - 0.143, - 0.14113, - 0.14164, - 0.14069 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.83373, "5": 10.85679, "10": 10.80851, "15": 10.79855, "20": 10.71089, "25": 10.54356, "30": 10.35071, "35": 10.26074, "40": 10.07158, "45": 9.81505, "50": 9.88949}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1488.0, "5": 1773.0, "10": 1404.0, "15": 1815.0, "20": 1611.0, "25": 1615.0, "30": 1860.0, "35": 1954.0, "40": 2277.0, "45": 2045.0, "50": 2358.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 948653056.0, "5": 948653056.0, "10": 948653056.0, "15": 948653056.0, "20": 948653056.0, "25": 948653056.0, "30": 948653056.0, "35": 948653056.0, "40": 948653056.0, "45": 948653056.0, "50": 948653056.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3275284480.0, "5": 3632653312.0, "10": 3632653312.0, "15": 3632653312.0, "20": 3632653312.0, "25": 3632653312.0, "30": 3632653312.0, "35": 3632653312.0, "40": 3632653312.0, "45": 3632653312.0, "50": 3632653312.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 8.71933, "5": 0.13324, "10": 0.13435, "15": 0.13348, "20": 0.13415, "25": 0.13297, "30": 0.13145, "35": 0.13124, "40": 0.13173, "45": 0.13207, "50": 0.13209}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json index 9895a35..e5fb0e4 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/golden_values_lts.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.83373, - 10.86683, - 10.89023, - 10.81051, - 10.68459, - 10.60979, - 10.08992, - 10.21481, - 10.14018, - 9.80603 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1488.0, - 1854.0, - 1854.0, - 1884.0, - 1794.0, - 1784.0, - 1569.0, - 1942.0, - 2263.0, - 2147.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 13.39475, - 0.14158, - 0.14256, - 0.14166, - 0.14243, - 0.14232, - 0.143, - 0.14113, - 0.14164, - 0.14069 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.83373, "5": 10.85679, "10": 10.80851, "15": 10.79855, "20": 10.71089, "25": 10.54356, "30": 10.35071, "35": 10.26074, "40": 10.07158, "45": 9.81505, "50": 9.88949}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1488.0, "5": 1773.0, "10": 1404.0, "15": 1815.0, "20": 1611.0, "25": 1615.0, "30": 1860.0, "35": 1954.0, "40": 2277.0, "45": 2045.0, "50": 2332.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 948653056.0, "5": 948653056.0, "10": 948653056.0, "15": 948653056.0, "20": 948653056.0, "25": 948653056.0, "30": 948653056.0, "35": 948653056.0, "40": 948653056.0, "45": 948653056.0, "50": 948653056.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3275284480.0, "5": 3632653312.0, "10": 3632653312.0, "15": 3632653312.0, "20": 3632653312.0, "25": 3632653312.0, "30": 3632653312.0, "35": 3632653312.0, "40": 3632653312.0, "45": 3632653312.0, "50": 3632653312.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.94948, "5": 0.1367, "10": 0.13659, "15": 0.13541, "20": 0.13747, "25": 0.13584, "30": 0.13503, "35": 0.13535, "40": 0.13372, "45": 0.13346, "50": 0.1329}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml index 2b94108..35bc27d 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -46,4 +46,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/model_config.yaml index d9ed9c7..f262b4a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_fp16/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -46,4 +46,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values_dev.json index fa1ca53..9e23273 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.83377, 10.86686, 10.89018, 10.81039, 10.68443, 10.60957, 10.08966, 10.21453, 10.13998, 9.80584, 9.83013, 9.60653, 9.67621, 9.68788, 9.59862, 9.07653, 9.47156, 9.06787, 9.32985, 9.51568]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1566.0, 1800.0, 1833.0, 1834.0, 1824.0, 1641.0, 1539.0, 1880.0, 2289.0, 2267.0, 2472.0, 2970.0, 3076.0, 3074.0, 3018.0, 2972.0, 3783.0, 2794.0, 2743.0, 3289.0]}, "iteration_timing_avg": 0.12010238805970147} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.83369, "5": 10.85789, "10": 10.81578, "15": 10.84971, "20": 10.85686, "25": 10.81326, "30": 10.74662, "35": 10.65647, "40": 10.5035, "45": 10.27258, "50": 10.25444, "55": 10.18699, "60": 9.8098, "65": 9.24409, "70": 9.91024, "75": 9.58173, "80": 9.54167, "85": 9.7262, "90": 9.90242, "95": 9.60208, "100": 9.49424}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 684610560.0, "5": 685659136.0, "10": 685659136.0, "15": 685659136.0, "20": 1043027456.0, "25": 1043027456.0, "30": 1043027456.0, "35": 1043027456.0, "40": 1043027456.0, "45": 1043027456.0, "50": 1043027456.0, "55": 1043027456.0, "60": 1043027456.0, "65": 1043027456.0, "70": 1043027456.0, "75": 1043027456.0, "80": 1043027456.0, "85": 1043027456.0, "90": 1043027456.0, "95": 1043027456.0, "100": 1043027456.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3187304960.0, "5": 3187305472.0, "10": 3187305472.0, "15": 3187305472.0, "20": 3544935936.0, "25": 3544935936.0, "30": 3544935936.0, "35": 3544935936.0, "40": 3544935936.0, "45": 3544935936.0, "50": 3544935936.0, "55": 3544935936.0, "60": 3544935936.0, "65": 3544935936.0, "70": 3544935936.0, "75": 3544935936.0, "80": 3544935936.0, "85": 3544935936.0, "90": 3544935936.0, "95": 3544935936.0, "100": 3544935936.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 6.28987, "5": 0.1226, "10": 0.12076, "15": 0.12204, "20": 0.13401, "25": 0.13151, "30": 0.13299, "35": 0.12924, "40": 0.13517, "45": 0.13012, "50": 0.12972, "55": 0.13223, "60": 0.13267, "65": 0.13269, "70": 0.13313, "75": 0.13241, "80": 0.13269, "85": 0.13179, "90": 0.13269, "95": 0.13229, "100": 0.13212}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 2157.0, "25": 1926.0, "30": 2841.0, "35": 1993.0, "40": 2001.0, "45": 2196.0, "50": 2216.0, "55": 2936.0, "60": 2092.0, "65": 2350.0, "70": 2536.0, "75": 2452.0, "80": 3520.0, "85": 3999.0, "90": 3310.0, "95": 3457.0, "100": 2915.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values_lts.json index fa1ca53..9e19887 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.83377, 10.86686, 10.89018, 10.81039, 10.68443, 10.60957, 10.08966, 10.21453, 10.13998, 9.80584, 9.83013, 9.60653, 9.67621, 9.68788, 9.59862, 9.07653, 9.47156, 9.06787, 9.32985, 9.51568]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1566.0, 1800.0, 1833.0, 1834.0, 1824.0, 1641.0, 1539.0, 1880.0, 2289.0, 2267.0, 2472.0, 2970.0, 3076.0, 3074.0, 3018.0, 2972.0, 3783.0, 2794.0, 2743.0, 3289.0]}, "iteration_timing_avg": 0.12010238805970147} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.83369, "5": 10.85789, "10": 10.81578, "15": 10.84971, "20": 10.85686, "25": 10.81326, "30": 10.74662, "35": 10.65647, "40": 10.5035, "45": 10.27258, "50": 10.25444, "55": 10.18699, "60": 9.8098, "65": 9.24409, "70": 9.91023, "75": 9.58173, "80": 9.54167, "85": 9.7262, "90": 9.90242, "95": 9.60207, "100": 9.49424}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 684610560.0, "5": 685659136.0, "10": 685659136.0, "15": 685659136.0, "20": 1043027456.0, "25": 1043027456.0, "30": 1043027456.0, "35": 1043027456.0, "40": 1043027456.0, "45": 1043027456.0, "50": 1043027456.0, "55": 1043027456.0, "60": 1043027456.0, "65": 1043027456.0, "70": 1043027456.0, "75": 1043027456.0, "80": 1043027456.0, "85": 1043027456.0, "90": 1043027456.0, "95": 1043027456.0, "100": 1043027456.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3187304960.0, "5": 3187305472.0, "10": 3187305472.0, "15": 3187305472.0, "20": 3544935936.0, "25": 3544935936.0, "30": 3544935936.0, "35": 3544935936.0, "40": 3544935936.0, "45": 3544935936.0, "50": 3544935936.0, "55": 3544935936.0, "60": 3544935936.0, "65": 3544935936.0, "70": 3544935936.0, "75": 3544935936.0, "80": 3544935936.0, "85": 3544935936.0, "90": 3544935936.0, "95": 3544935936.0, "100": 3544935936.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 15.48871, "5": 0.13221, "10": 0.13085, "15": 0.12655, "20": 0.13735, "25": 0.13495, "30": 0.13543, "35": 0.1352, "40": 0.13503, "45": 0.13502, "50": 0.13631, "55": 0.1428, "60": 0.13999, "65": 0.14047, "70": 0.13954, "75": 0.13895, "80": 0.13941, "85": 0.13825, "90": 0.13808, "95": 0.13975, "100": 0.13865}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 2157.0, "25": 1926.0, "30": 2841.0, "35": 1993.0, "40": 2001.0, "45": 2196.0, "50": 2216.0, "55": 2936.0, "60": 2092.0, "65": 2350.0, "70": 2558.0, "75": 2379.0, "80": 3530.0, "85": 4012.0, "90": 3340.0, "95": 3459.0, "100": 2871.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/model_config.yaml index abb85ba..54be873 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values_dev.json index 4924720..f059f5e 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.79206, - 10.86691, - 10.89065, - 10.78186, - 10.65978, - 10.58022, - 10.08207, - 10.19156, - 10.13495, - 9.81167 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1626.0, - 1866.0, - 1959.0, - 1816.0, - 1890.0, - 1654.0, - 1537.0, - 1965.0, - 2436.0, - 2405.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 21.9348, - 0.1633, - 0.16334, - 0.16269, - 0.16133, - 0.16064, - 0.16007, - 0.15926, - 0.1592, - 0.15982 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.79206, "5": 10.84592, "10": 10.76954, "15": 10.78975, "20": 10.67887, "25": 10.50432, "30": 10.33089, "35": 10.25276, "40": 10.05236, "45": 9.80262, "50": 9.88808}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1626.0, "5": 1909.0, "10": 1312.0, "15": 1911.0, "20": 1601.0, "25": 1600.0, "30": 1886.0, "35": 2056.0, "40": 2241.0, "45": 2090.0, "50": 2433.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 777900032.0, "5": 777900032.0, "10": 777900032.0, "15": 777900032.0, "20": 777900032.0, "25": 777900032.0, "30": 777900032.0, "35": 777900032.0, "40": 777900032.0, "45": 777900032.0, "50": 777900032.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2463815680.0, "5": 2744478720.0, "10": 2744478720.0, "15": 2744478720.0, "20": 2744478720.0, "25": 2744478720.0, "30": 2744478720.0, "35": 2744478720.0, "40": 2744478720.0, "45": 2744478720.0, "50": 2744478720.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 5.17793, "5": 0.16541, "10": 0.16804, "15": 0.1616, "20": 0.16211, "25": 0.16186, "30": 0.1614, "35": 0.16111, "40": 0.16014, "45": 0.15818, "50": 0.15678}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values_lts.json index 4924720..4d091a8 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/golden_values_lts.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.79206, - 10.86691, - 10.89065, - 10.78186, - 10.65978, - 10.58022, - 10.08207, - 10.19156, - 10.13495, - 9.81167 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1626.0, - 1866.0, - 1959.0, - 1816.0, - 1890.0, - 1654.0, - 1537.0, - 1965.0, - 2436.0, - 2405.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 21.9348, - 0.1633, - 0.16334, - 0.16269, - 0.16133, - 0.16064, - 0.16007, - 0.15926, - 0.1592, - 0.15982 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.79206, "5": 10.84592, "10": 10.76954, "15": 10.78975, "20": 10.67887, "25": 10.50432, "30": 10.33089, "35": 10.25276, "40": 10.05236, "45": 9.80262, "50": 9.88808}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1626.0, "5": 1909.0, "10": 1312.0, "15": 1911.0, "20": 1601.0, "25": 1600.0, "30": 1886.0, "35": 2056.0, "40": 2241.0, "45": 2090.0, "50": 2433.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 782094336.0, "5": 782094336.0, "10": 782094336.0, "15": 782094336.0, "20": 782094336.0, "25": 782094336.0, "30": 782094336.0, "35": 782094336.0, "40": 782094336.0, "45": 782094336.0, "50": 782094336.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2462767104.0, "5": 2748673024.0, "10": 2748673024.0, "15": 2748673024.0, "20": 2748673024.0, "25": 2748673024.0, "30": 2748673024.0, "35": 2748673024.0, "40": 2748673024.0, "45": 2748673024.0, "50": 2748673024.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 17.13285, "5": 0.16867, "10": 0.16853, "15": 0.16651, "20": 0.16645, "25": 0.16655, "30": 0.37162, "35": 0.16698, "40": 0.1662, "45": 0.16431, "50": 0.1635}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/model_config.yaml index e40b6f6..859669a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -46,4 +46,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/golden_values_dev.json new file mode 100644 index 0000000..2661e0c --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.79219, "5": 10.84727, "10": 10.77729, "15": 10.84106, "20": 10.82889, "25": 10.7666, "30": 10.69961, "35": 10.61845, "40": 10.44051, "45": 10.2086, "50": 10.21168, "55": 10.15676, "60": 9.77265, "65": 9.22128, "70": 9.89371, "75": 9.56098, "80": 9.5311, "85": 9.71911, "90": 9.89982, "95": 9.59785, "100": 9.49008}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 569591808.0, "5": 569591808.0, "10": 569591808.0, "15": 569591808.0, "20": 852351488.0, "25": 852351488.0, "30": 852351488.0, "35": 852351488.0, "40": 852351488.0, "45": 852351488.0, "50": 852351488.0, "55": 852351488.0, "60": 852351488.0, "65": 852351488.0, "70": 852351488.0, "75": 852351488.0, "80": 852351488.0, "85": 852351488.0, "90": 852351488.0, "95": 852351488.0, "100": 852351488.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2394266112.0, "5": 2394266624.0, "10": 2394266624.0, "15": 2394266624.0, "20": 2677288448.0, "25": 2677288448.0, "30": 2677288448.0, "35": 2677288448.0, "40": 2677288448.0, "45": 2677288448.0, "50": 2677288448.0, "55": 2677288448.0, "60": 2677288448.0, "65": 2677288448.0, "70": 2677288448.0, "75": 2677288448.0, "80": 2677288448.0, "85": 2677288448.0, "90": 2677288448.0, "95": 2677288448.0, "100": 2677288448.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 9.20637, "5": 0.15525, "10": 0.15614, "15": 0.15359, "20": 0.16136, "25": 0.16083, "30": 0.16019, "35": 0.16169, "40": 0.16106, "45": 0.16081, "50": 0.16073, "55": 0.15707, "60": 0.1561, "65": 0.16078, "70": 0.15943, "75": 0.15999, "80": 0.15947, "85": 0.15903, "90": 0.15903, "95": 0.15832, "100": 0.15962}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1881.0, "25": 2342.0, "30": 2479.0, "35": 2015.0, "40": 2210.0, "45": 2480.0, "50": 2916.0, "55": 2451.0, "60": 2926.0, "65": 2270.0, "70": 3615.0, "75": 2951.0, "80": 3569.0, "85": 3977.0, "90": 3808.0, "95": 4246.0, "100": 3731.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/golden_values_lts.json new file mode 100644 index 0000000..8ff4bfa --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.79219, "5": 10.84727, "10": 10.77729, "15": 10.84106, "20": 10.82889, "25": 10.7666, "30": 10.69961, "35": 10.61845, "40": 10.44051, "45": 10.2086, "50": 10.21168, "55": 10.15676, "60": 9.77265, "65": 9.22128, "70": 9.89371, "75": 9.56099, "80": 9.5311, "85": 9.71912, "90": 9.89983, "95": 9.59785, "100": 9.49009}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 570640384.0, "5": 570640384.0, "10": 570640384.0, "15": 570640384.0, "20": 852351488.0, "25": 852351488.0, "30": 852351488.0, "35": 852351488.0, "40": 852351488.0, "45": 852351488.0, "50": 852351488.0, "55": 852351488.0, "60": 852351488.0, "65": 852351488.0, "70": 852351488.0, "75": 852351488.0, "80": 852351488.0, "85": 852351488.0, "90": 852351488.0, "95": 852351488.0, "100": 852351488.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2393217536.0, "5": 2393218048.0, "10": 2393218048.0, "15": 2393218048.0, "20": 2675191296.0, "25": 2675191296.0, "30": 2675191296.0, "35": 2675191296.0, "40": 2675191296.0, "45": 2675191296.0, "50": 2675191296.0, "55": 2675191296.0, "60": 2675191296.0, "65": 2675191296.0, "70": 2675191296.0, "75": 2675191296.0, "80": 2675191296.0, "85": 2675191296.0, "90": 2675191296.0, "95": 2675191296.0, "100": 2675191296.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 16.58626, "5": 0.16569, "10": 0.16316, "15": 0.16279, "20": 0.17703, "25": 0.17217, "30": 0.17358, "35": 0.17246, "40": 0.17158, "45": 0.17224, "50": 0.1705, "55": 0.16674, "60": 0.16732, "65": 0.16787, "70": 0.16785, "75": 0.16687, "80": 0.16672, "85": 0.16784, "90": 0.16602, "95": 0.17069, "100": 0.16936}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1881.0, "25": 2342.0, "30": 2479.0, "35": 2015.0, "40": 2210.0, "45": 2480.0, "50": 2916.0, "55": 2451.0, "60": 2990.0, "65": 2327.0, "70": 3731.0, "75": 3015.0, "80": 3623.0, "85": 4045.0, "90": 3909.0, "95": 4212.0, "100": 3673.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/model_config.yaml index a2960f3..3348c08 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel/golden_values_dev.json new file mode 100644 index 0000000..dcee8b3 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82027, "5": 10.84932, "10": 10.78695, "15": 10.82723, "20": 10.728, "25": 10.57817, "30": 10.40703, "35": 10.31124, "40": 10.13951, "45": 9.91072, "50": 9.9683}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 5111.0, "5": 5732.0, "10": 4774.0, "15": 5398.0, "20": 5259.0, "25": 5163.0, "30": 5567.0, "35": 5831.0, "40": 6144.0, "45": 5834.0, "50": 6815.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 596698624.0, "5": 596698112.0, "10": 596696576.0, "15": 596697600.0, "20": 596698112.0, "25": 596698624.0, "30": 596699136.0, "35": 596696576.0, "40": 596698624.0, "45": 596698624.0, "50": 596699136.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 840132608.0, "5": 1070596096.0, "10": 1071991296.0, "15": 1071991296.0, "20": 1071991296.0, "25": 1071991296.0, "30": 1071991296.0, "35": 1071991296.0, "40": 1071991296.0, "45": 1071991296.0, "50": 1071991296.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.08967, "5": 0.79969, "10": 1.06452, "15": 0.78727, "20": 0.79043, "25": 0.99776, "30": 0.78847, "35": 0.79196, "40": 0.78657, "45": 0.79613, "50": 0.78348}}} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel/model_config.yaml new file mode 100644 index 0000000..d78078d --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel/model_config.yaml @@ -0,0 +1,56 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 2 + --context-parallel-size: 2 + --expert-model-parallel-size: 2 + --sequence-parallel: true + --num-experts: 4 + --moe-router-load-balancing-type: sinkhorn + --moe-router-topk: 1 + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --attention-backend: flash + --log-memory-to-tensorboard: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel_dp_last/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel_dp_last/golden_values_dev.json new file mode 100644 index 0000000..1097ed4 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel_dp_last/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82027, "5": 10.84932, "10": 10.78695, "15": 10.82723, "20": 10.728, "25": 10.57817, "30": 10.40703, "35": 10.31124, "40": 10.13951, "45": 9.91072, "50": 9.9683}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 5111.0, "5": 5732.0, "10": 4774.0, "15": 5398.0, "20": 5259.0, "25": 5163.0, "30": 5567.0, "35": 5831.0, "40": 6144.0, "45": 5834.0, "50": 6815.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 596698624.0, "5": 596698112.0, "10": 596696576.0, "15": 596697600.0, "20": 596698112.0, "25": 596698624.0, "30": 596699136.0, "35": 596696576.0, "40": 596698624.0, "45": 596698624.0, "50": 596699136.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 840132608.0, "5": 1070596096.0, "10": 1071991296.0, "15": 1071991296.0, "20": 1071991296.0, "25": 1071991296.0, "30": 1071991296.0, "35": 1071991296.0, "40": 1071991296.0, "45": 1071991296.0, "50": 1071991296.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.84481, "5": 1.02931, "10": 0.99191, "15": 0.97148, "20": 0.77381, "25": 1.02379, "30": 0.77383, "35": 0.77556, "40": 0.77762, "45": 0.77638, "50": 0.77403}}} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel_dp_last/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel_dp_last/model_config.yaml new file mode 100644 index 0000000..82d28bb --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel_dp_last/model_config.yaml @@ -0,0 +1,57 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 2 + --context-parallel-size: 2 + --expert-model-parallel-size: 2 + --sequence-parallel: true + --num-experts: 4 + --moe-router-load-balancing-type: sinkhorn + --moe-router-topk: 1 + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --attention-backend: flash + --log-memory-to-tensorboard: true + --use-tp-pp-dp-mapping: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel/golden_values_dev.json new file mode 100644 index 0000000..90beb44 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.7934, "5": 10.82731, "10": 10.76672, "15": 10.78912, "20": 10.70399, "25": 10.53774, "30": 10.39119, "35": 10.30163, "40": 10.12628, "45": 9.89627, "50": 9.97376}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 5576.0, "5": 6583.0, "10": 5280.0, "15": 6361.0, "20": 5720.0, "25": 5806.0, "30": 6223.0, "35": 6684.0, "40": 6987.0, "45": 6837.0, "50": 7602.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 458715136.0, "5": 458715136.0, "10": 458718208.0, "15": 458717184.0, "20": 458715136.0, "25": 458715648.0, "30": 458714624.0, "35": 458716160.0, "40": 458716672.0, "45": 458715136.0, "50": 458715648.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1017808384.0, "5": 1183341568.0, "10": 1184804864.0, "15": 1184804864.0, "20": 1184804864.0, "25": 1184804864.0, "30": 1184804864.0, "35": 1184804864.0, "40": 1184804864.0, "45": 1184804864.0, "50": 1184804864.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 8.4596, "5": 0.66406, "10": 0.66034, "15": 0.66098, "20": 0.65918, "25": 0.65845, "30": 0.65966, "35": 0.6573, "40": 0.65347, "45": 0.65812, "50": 0.65616}}} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel/golden_values_lts.json new file mode 100644 index 0000000..6ebd0fd --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.7934, "5": 10.82731, "10": 10.76672, "15": 10.78933, "20": 10.70416, "25": 10.53748, "30": 10.39181, "35": 10.30095, "40": 10.12594, "45": 9.89677, "50": 9.97407}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 5576.0, "5": 6583.0, "10": 5262.0, "15": 6198.0, "20": 5805.0, "25": 5815.0, "30": 6199.0, "35": 6566.0, "40": 7076.0, "45": 6876.0, "50": 7591.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 458715136.0, "5": 458715136.0, "10": 458718208.0, "15": 458717184.0, "20": 458715136.0, "25": 458715648.0, "30": 458715136.0, "35": 458716672.0, "40": 458716672.0, "45": 458714624.0, "50": 458716160.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1017808384.0, "5": 1183341568.0, "10": 1184804864.0, "15": 1184804864.0, "20": 1184804864.0, "25": 1184804864.0, "30": 1184804864.0, "35": 1184804864.0, "40": 1184804864.0, "45": 1185401344.0, "50": 1185401344.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 22.12737, "5": 0.70643, "10": 0.72506, "15": 0.70455, "20": 0.70271, "25": 0.70404, "30": 0.70446, "35": 0.70004, "40": 0.89832, "45": 0.70671, "50": 0.70503}}} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel/model_config.yaml new file mode 100644 index 0000000..e48c3c4 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel/model_config.yaml @@ -0,0 +1,56 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: local + --tensor-model-parallel-size: 4 + --pipeline-model-parallel-size: 2 + --expert-model-parallel-size: 2 + --expert-tensor-parallel-size: 2 + --sequence-parallel: true + --num-experts: 4 + --moe-router-load-balancing-type: sinkhorn + --moe-router-topk: 1 + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --attention-backend: unfused + --log-memory-to-tensorboard: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel_dp_last/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel_dp_last/golden_values_dev.json new file mode 100644 index 0000000..846a78a --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel_dp_last/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.7934, "5": 10.82731, "10": 10.76672, "15": 10.78912, "20": 10.70399, "25": 10.53774, "30": 10.39119, "35": 10.30163, "40": 10.12628, "45": 9.89627, "50": 9.97376}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 5576.0, "5": 6583.0, "10": 5280.0, "15": 6361.0, "20": 5720.0, "25": 5806.0, "30": 6223.0, "35": 6684.0, "40": 6987.0, "45": 6837.0, "50": 7602.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 458715136.0, "5": 458715136.0, "10": 458718208.0, "15": 458717184.0, "20": 458715136.0, "25": 458715648.0, "30": 458714624.0, "35": 458716160.0, "40": 458716672.0, "45": 458715136.0, "50": 458715648.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1017808384.0, "5": 1183341568.0, "10": 1184804864.0, "15": 1184804864.0, "20": 1184804864.0, "25": 1184804864.0, "30": 1184804864.0, "35": 1184804864.0, "40": 1184804864.0, "45": 1184804864.0, "50": 1184804864.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 8.96135, "5": 0.66582, "10": 0.66672, "15": 0.66703, "20": 0.67005, "25": 0.66892, "30": 0.66766, "35": 0.66539, "40": 0.66319, "45": 0.66894, "50": 0.66499}}} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel_dp_last/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel_dp_last/golden_values_lts.json new file mode 100644 index 0000000..43bd35a --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel_dp_last/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.7934, "5": 10.82731, "10": 10.76672, "15": 10.78933, "20": 10.70416, "25": 10.53748, "30": 10.39181, "35": 10.30095, "40": 10.12594, "45": 9.89677, "50": 9.97407}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 5576.0, "5": 6583.0, "10": 5262.0, "15": 6198.0, "20": 5805.0, "25": 5815.0, "30": 6199.0, "35": 6566.0, "40": 7076.0, "45": 6876.0, "50": 7591.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 458715136.0, "5": 458715136.0, "10": 458718208.0, "15": 458717184.0, "20": 458715136.0, "25": 458715648.0, "30": 458715136.0, "35": 458716672.0, "40": 458716672.0, "45": 458714624.0, "50": 458716160.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1017808384.0, "5": 1183341568.0, "10": 1184804864.0, "15": 1184804864.0, "20": 1184804864.0, "25": 1184804864.0, "30": 1184804864.0, "35": 1184804864.0, "40": 1184804864.0, "45": 1185401344.0, "50": 1185401344.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 26.12525, "5": 0.6932, "10": 0.69116, "15": 0.69227, "20": 0.69207, "25": 0.69063, "30": 0.69144, "35": 0.68372, "40": 0.89596, "45": 0.68537, "50": 0.69374}}} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel_dp_last/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel_dp_last/model_config.yaml new file mode 100644 index 0000000..524cccd --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel_dp_last/model_config.yaml @@ -0,0 +1,57 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: local + --tensor-model-parallel-size: 4 + --pipeline-model-parallel-size: 2 + --expert-model-parallel-size: 2 + --expert-tensor-parallel-size: 2 + --sequence-parallel: true + --num-experts: 4 + --moe-router-load-balancing-type: sinkhorn + --moe-router-topk: 1 + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --attention-backend: unfused + --log-memory-to-tensorboard: true + --use-tp-pp-dp-mapping: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel/golden_values_lts.json new file mode 100644 index 0000000..b5fc4f4 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.82445, "5": 10.83944, "10": 10.7889, "15": 10.82831, "20": 10.72949, "25": 10.57667, "30": 10.40631, "35": 10.3135, "40": 10.13964, "45": 9.90704, "50": 9.96951, "55": 9.92826, "60": 9.56897, "65": 8.99498, "70": 9.76136, "75": 9.4768, "80": 9.44907, "85": 9.65155, "90": 9.84117, "95": 9.54761, "100": 9.43675}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 12856.0, "5": 15993.0, "10": 12573.0, "15": 14651.0, "20": 13663.0, "25": 13137.0, "30": 14643.0, "35": 15376.0, "40": 16684.0, "45": 16099.0, "50": 18966.0, "55": 16976.0, "60": 18925.0, "65": 19522.0, "70": 22271.0, "75": 18752.0, "80": 23211.0, "85": 24769.0, "90": 24231.0, "95": 23303.0, "100": 21066.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 626182656.0, "5": 626185728.0, "10": 626182656.0, "15": 626185216.0, "20": 626186240.0, "25": 626183168.0, "30": 626183680.0, "35": 626184704.0, "40": 626185728.0, "45": 626475008.0, "50": 626184704.0, "55": 626181632.0, "60": 626180096.0, "65": 626168832.0, "70": 626182656.0, "75": 626186752.0, "80": 626180608.0, "85": 626176000.0, "90": 626186752.0, "95": 627019776.0, "100": 626182656.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1819317248.0, "5": 2050089472.0, "10": 2050089472.0, "15": 2050322944.0, "20": 2050322944.0, "25": 2050322944.0, "30": 2050322944.0, "35": 2050341376.0, "40": 2050341376.0, "45": 2050341376.0, "50": 2050341376.0, "55": 2050341376.0, "60": 2050341376.0, "65": 2050715136.0, "70": 2052688896.0, "75": 2052688896.0, "80": 2052688896.0, "85": 2054681600.0, "90": 2054681600.0, "95": 2057086464.0, "100": 2057086464.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 24.60883, "5": 0.44856, "10": 0.45126, "15": 0.44816, "20": 0.44504, "25": 0.44156, "30": 0.44184, "35": 0.45033, "40": 0.45005, "45": 0.44616, "50": 0.44366, "55": 0.43822, "60": 0.43979, "65": 0.4557, "70": 0.4497, "75": 0.44309, "80": 0.44931, "85": 0.44544, "90": 0.45177, "95": 0.44859, "100": 0.44472}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel/model_config.yaml index 6beae45..6786cac 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -51,4 +51,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_dev.json index 3dddf6c..b4ae3a5 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.82445, - 10.86393, - 10.85733, - 10.80809, - 10.70951, - 10.63738, - 10.16425, - 10.28201, - 10.19003, - 9.88697 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 12678.0, - 16220.0, - 16626.0, - 16055.0, - 13829.0, - 14904.0, - 12931.0, - 15765.0, - 16771.0, - 17621.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 16.34149, - 0.66962, - 0.66905, - 0.66791, - 0.67695, - 0.66977, - 0.67438, - 0.67368, - 0.6714, - 0.67874 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82445, "5": 10.83944, "10": 10.7889, "15": 10.82895, "20": 10.72911, "25": 10.57606, "30": 10.40656, "35": 10.31389, "40": 10.13997, "45": 9.90738, "50": 9.96993}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 12856.0, "5": 15993.0, "10": 12573.0, "15": 14881.0, "20": 13775.0, "25": 13046.0, "30": 14831.0, "35": 15239.0, "40": 16747.0, "45": 16125.0, "50": 19024.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 626182656.0, "5": 626185728.0, "10": 626182656.0, "15": 626184192.0, "20": 626186240.0, "25": 626183168.0, "30": 626183680.0, "35": 626183680.0, "40": 626186240.0, "45": 626184192.0, "50": 626185216.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1819317248.0, "5": 2050089472.0, "10": 2050089472.0, "15": 2050438656.0, "20": 2050438656.0, "25": 2050438656.0, "30": 2050438656.0, "35": 2050946560.0, "40": 2050946560.0, "45": 2050946560.0, "50": 2050946560.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 7.97445, "5": 0.43504, "10": 0.43232, "15": 0.43326, "20": 0.43474, "25": 0.43463, "30": 0.43979, "35": 0.44199, "40": 0.44565, "45": 0.44017, "50": 0.43092}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_lts.json index 8db9f81..3510a95 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/golden_values_lts.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.82445, - 10.86393, - 10.85733, - 10.80809, - 10.70951, - 10.63738, - 10.16425, - 10.28201, - 10.19003, - 9.88697 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 12678.0, - 16220.0, - 16626.0, - 16055.0, - 13829.0, - 14904.0, - 12931.0, - 15765.0, - 16771.0, - 17621.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 25.19848, - 0.70611, - 0.70356, - 0.70548, - 0.70285, - 0.70488, - 0.70589, - 0.70459, - 0.70261, - 0.71213 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82445, "5": 10.83944, "10": 10.7889, "15": 10.82831, "20": 10.72949, "25": 10.57667, "30": 10.40631, "35": 10.3135, "40": 10.13964, "45": 9.90704, "50": 9.96951}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 12856.0, "5": 15993.0, "10": 12573.0, "15": 14651.0, "20": 13663.0, "25": 13137.0, "30": 14643.0, "35": 15376.0, "40": 16684.0, "45": 16099.0, "50": 18966.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 626182656.0, "5": 626185728.0, "10": 626182656.0, "15": 626185216.0, "20": 626186240.0, "25": 626183168.0, "30": 626183680.0, "35": 626184704.0, "40": 626185728.0, "45": 626475008.0, "50": 626184704.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1819317248.0, "5": 2050089472.0, "10": 2050089472.0, "15": 2050322944.0, "20": 2050322944.0, "25": 2050322944.0, "30": 2050322944.0, "35": 2050341376.0, "40": 2050341376.0, "45": 2050341376.0, "50": 2050341376.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 18.15563, "5": 0.44565, "10": 0.43891, "15": 0.44112, "20": 0.44197, "25": 0.44184, "30": 0.43708, "35": 0.43675, "40": 0.43865, "45": 0.44326, "50": 0.44012}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/model_config.yaml index 150d96a..5c1d546 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -51,4 +51,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_resume_torch_dist_te_2experts/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_resume_torch_dist_te_2experts/model_config.yaml index 2b01cfa..79704b2 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_resume_torch_dist_te_2experts/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_resume_torch_dist_te_2experts/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -51,4 +51,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json index 4172a17..14b923e 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.86122, - 10.88647, - 10.87773, - 10.83111, - 10.7165, - 10.60619, - 10.13147, - 10.22767, - 10.15929, - 9.83482 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1694.0, - 2148.0, - 2169.0, - 2103.0, - 1991.0, - 1900.0, - 1707.0, - 2189.0, - 2557.0, - 2606.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 9.61991, - 0.29135, - 0.28852, - 0.28971, - 0.29221, - 0.28994, - 0.28976, - 0.28887, - 0.28975, - 0.2869 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.86122, "5": 10.88248, "10": 10.83515, "15": 10.82747, "20": 10.72762, "25": 10.55769, "30": 10.37915, "35": 10.28345, "40": 10.08809, "45": 9.82642, "50": 9.91341}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1694.0, "5": 2127.0, "10": 1548.0, "15": 1997.0, "20": 1846.0, "25": 1802.0, "30": 2112.0, "35": 2172.0, "40": 2560.0, "45": 2397.0, "50": 2761.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 382956544.0, "5": 382956544.0, "10": 382956544.0, "15": 382956544.0, "20": 382956544.0, "25": 382956544.0, "30": 382956544.0, "35": 382956544.0, "40": 382956544.0, "45": 382956544.0, "50": 382956544.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1496754688.0, "5": 1628741632.0, "10": 1628741632.0, "15": 1628741632.0, "20": 1628741632.0, "25": 1628741632.0, "30": 1628741632.0, "35": 1628741632.0, "40": 1628741632.0, "45": 1628741632.0, "50": 1628741632.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4.5526, "5": 0.28707, "10": 0.28966, "15": 0.28958, "20": 0.28862, "25": 0.28956, "30": 0.28644, "35": 0.28887, "40": 0.28562, "45": 0.28552, "50": 0.28692}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_lts.json index dc8076a..6ab9513 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/golden_values_lts.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.86122, - 10.88647, - 10.87773, - 10.83111, - 10.7165, - 10.60623, - 10.13146, - 10.2277, - 10.15933, - 9.8348 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1694.0, - 2148.0, - 2169.0, - 2103.0, - 1991.0, - 1869.0, - 1760.0, - 2214.0, - 2529.0, - 2587.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 11.72537, - 0.29824, - 0.29549, - 0.29574, - 0.29514, - 0.29533, - 0.29415, - 0.30722, - 0.29731, - 0.29867 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.86122, "5": 10.88248, "10": 10.83515, "15": 10.82747, "20": 10.72762, "25": 10.55769, "30": 10.37919, "35": 10.28344, "40": 10.08807, "45": 9.82644, "50": 9.9134}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1694.0, "5": 2127.0, "10": 1548.0, "15": 1997.0, "20": 1846.0, "25": 1700.0, "30": 2165.0, "35": 2194.0, "40": 2540.0, "45": 2414.0, "50": 2586.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 382956544.0, "5": 382956544.0, "10": 382956544.0, "15": 382956544.0, "20": 382956544.0, "25": 382956544.0, "30": 382956544.0, "35": 382956544.0, "40": 382956544.0, "45": 382956544.0, "50": 382956544.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1497803776.0, "5": 1628741632.0, "10": 1628741632.0, "15": 1628741632.0, "20": 1628741632.0, "25": 1628741632.0, "30": 1628741632.0, "35": 1628741632.0, "40": 1628741632.0, "45": 1628741632.0, "50": 1628741632.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 8.04015, "5": 0.30609, "10": 0.30611, "15": 0.30476, "20": 0.30451, "25": 0.3037, "30": 0.30473, "35": 0.30527, "40": 0.30608, "45": 0.30141, "50": 0.30553}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml index 267a290..b9c85ee 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -46,4 +46,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/golden_values_dev.json new file mode 100644 index 0000000..5429919 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.86104, "5": 10.88371, "10": 10.84263, "15": 10.87936, "20": 10.87404, "25": 10.82866, "30": 10.77191, "35": 10.67622, "40": 10.517, "45": 10.28436, "50": 10.27862, "55": 10.20113, "60": 9.83306, "65": 9.26979, "70": 9.92663, "75": 9.61385, "80": 9.56419, "85": 9.74319, "90": 9.92148, "95": 9.6163, "100": 9.5087}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 284527616.0, "5": 284527616.0, "10": 284527616.0, "15": 284527616.0, "20": 416513536.0, "25": 416513536.0, "30": 416513536.0, "35": 416513536.0, "40": 416513536.0, "45": 416513536.0, "50": 416513536.0, "55": 416513536.0, "60": 416513536.0, "65": 416513536.0, "70": 416513536.0, "75": 416513536.0, "80": 416513536.0, "85": 416513536.0, "90": 416513536.0, "95": 416513536.0, "100": 416513536.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1465367552.0, "5": 1465367552.0, "10": 1465368576.0, "15": 1465368576.0, "20": 1597092352.0, "25": 1597092352.0, "30": 1597092352.0, "35": 1597092352.0, "40": 1597092352.0, "45": 1597092352.0, "50": 1597092352.0, "55": 1597092352.0, "60": 1597092352.0, "65": 1597092352.0, "70": 1597092352.0, "75": 1597092352.0, "80": 1597092352.0, "85": 1597092352.0, "90": 1597092352.0, "95": 1597092352.0, "100": 1597092352.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3.88381, "5": 0.28491, "10": 0.28089, "15": 0.28096, "20": 0.2941, "25": 0.29217, "30": 0.29189, "35": 0.29014, "40": 0.29008, "45": 0.28992, "50": 0.29002, "55": 0.29062, "60": 0.29185, "65": 0.28998, "70": 0.28985, "75": 0.29115, "80": 0.29089, "85": 0.29148, "90": 0.2908, "95": 0.29004, "100": 0.29109}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1620.0, "25": 2028.0, "30": 2272.0, "35": 1848.0, "40": 1954.0, "45": 2388.0, "50": 2548.0, "55": 2269.0, "60": 2744.0, "65": 2295.0, "70": 3777.0, "75": 3002.0, "80": 3528.0, "85": 3660.0, "90": 3705.0, "95": 4147.0, "100": 3569.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/golden_values_lts.json new file mode 100644 index 0000000..d2b539f --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.86104, "5": 10.88371, "10": 10.84263, "15": 10.87936, "20": 10.87404, "25": 10.82866, "30": 10.77191, "35": 10.67622, "40": 10.517, "45": 10.28436, "50": 10.27862, "55": 10.20112, "60": 9.83306, "65": 9.26979, "70": 9.92662, "75": 9.61385, "80": 9.56419, "85": 9.74319, "90": 9.92149, "95": 9.6163, "100": 9.5087}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 284527616.0, "5": 284527616.0, "10": 284527616.0, "15": 284527616.0, "20": 416513536.0, "25": 416513536.0, "30": 416513536.0, "35": 416513536.0, "40": 416513536.0, "45": 416513536.0, "50": 416513536.0, "55": 416513536.0, "60": 416513536.0, "65": 416513536.0, "70": 416513536.0, "75": 416513536.0, "80": 416513536.0, "85": 416513536.0, "90": 416513536.0, "95": 416513536.0, "100": 416513536.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1465368064.0, "5": 1465368064.0, "10": 1465368064.0, "15": 1465368064.0, "20": 1596303360.0, "25": 1596303360.0, "30": 1596303360.0, "35": 1596303360.0, "40": 1596303360.0, "45": 1596303360.0, "50": 1596303360.0, "55": 1596303360.0, "60": 1596303360.0, "65": 1596303360.0, "70": 1596303360.0, "75": 1596304896.0, "80": 1596305408.0, "85": 1596305408.0, "90": 1596305408.0, "95": 1596305408.0, "100": 1596305920.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.25031, "5": 0.29691, "10": 0.29639, "15": 0.29502, "20": 0.3291, "25": 0.30842, "30": 0.31824, "35": 0.31378, "40": 0.31056, "45": 0.30902, "50": 0.30807, "55": 0.30895, "60": 0.31556, "65": 0.308, "70": 0.31154, "75": 0.30756, "80": 0.314, "85": 0.3103, "90": 0.3142, "95": 0.30701, "100": 0.30658}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1620.0, "25": 2028.0, "30": 2272.0, "35": 1848.0, "40": 1954.0, "45": 2388.0, "50": 2605.0, "55": 2341.0, "60": 2883.0, "65": 2307.0, "70": 3652.0, "75": 2877.0, "80": 3537.0, "85": 3698.0, "90": 3545.0, "95": 4040.0, "100": 3472.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/model_config.yaml index 77c55fa..9938a9f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -46,4 +46,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/golden_values_dev.json new file mode 100644 index 0000000..cf0209f --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.86104, "5": 10.88371, "10": 10.84263, "15": 10.87936, "20": 10.87404, "25": 10.82866, "30": 10.77191, "35": 10.67622, "40": 10.517, "45": 10.28436, "50": 10.27862, "55": 10.20113, "60": 9.83306, "65": 9.26979, "70": 9.92663, "75": 9.61385, "80": 9.56419, "85": 9.74319, "90": 9.92148, "95": 9.6163, "100": 9.5087}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 284527616.0, "5": 284527616.0, "10": 284527616.0, "15": 284527616.0, "20": 416513536.0, "25": 416513536.0, "30": 416513536.0, "35": 416513536.0, "40": 416513536.0, "45": 416513536.0, "50": 416513536.0, "55": 416513536.0, "60": 416513536.0, "65": 416513536.0, "70": 416513536.0, "75": 416513536.0, "80": 416513536.0, "85": 416513536.0, "90": 416513536.0, "95": 416513536.0, "100": 416513536.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1465368064.0, "5": 1465368064.0, "10": 1465368576.0, "15": 1465368576.0, "20": 1597092352.0, "25": 1597092352.0, "30": 1597092352.0, "35": 1597092352.0, "40": 1597092352.0, "45": 1597092352.0, "50": 1597092352.0, "55": 1597092352.0, "60": 1597092352.0, "65": 1597092352.0, "70": 1597092352.0, "75": 1597092352.0, "80": 1597092352.0, "85": 1597092352.0, "90": 1597092352.0, "95": 1597092352.0, "100": 1597092352.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4.45696, "5": 0.28792, "10": 0.28811, "15": 0.28636, "20": 0.30153, "25": 0.29748, "30": 0.29505, "35": 0.29452, "40": 0.29464, "45": 0.29589, "50": 0.29474, "55": 0.29138, "60": 0.29052, "65": 0.28928, "70": 0.29165, "75": 0.29065, "80": 0.29154, "85": 0.29123, "90": 0.29106, "95": 0.29151, "100": 0.29157}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1620.0, "25": 2028.0, "30": 2272.0, "35": 1848.0, "40": 1954.0, "45": 2388.0, "50": 2548.0, "55": 2269.0, "60": 2744.0, "65": 2295.0, "70": 3777.0, "75": 3002.0, "80": 3528.0, "85": 3660.0, "90": 3705.0, "95": 4147.0, "100": 3569.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/golden_values_lts.json new file mode 100644 index 0000000..62ad247 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.86104, "5": 10.88371, "10": 10.84263, "15": 10.87936, "20": 10.87404, "25": 10.82866, "30": 10.77191, "35": 10.67622, "40": 10.517, "45": 10.28436, "50": 10.27862, "55": 10.20112, "60": 9.83306, "65": 9.26979, "70": 9.92662, "75": 9.61385, "80": 9.56419, "85": 9.74319, "90": 9.92149, "95": 9.6163, "100": 9.5087}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 284527616.0, "5": 284527616.0, "10": 284527616.0, "15": 284527616.0, "20": 416513536.0, "25": 416513536.0, "30": 416513536.0, "35": 416513536.0, "40": 416513536.0, "45": 416513536.0, "50": 416513536.0, "55": 416513536.0, "60": 416513536.0, "65": 416513536.0, "70": 416513536.0, "75": 416513536.0, "80": 416513536.0, "85": 416513536.0, "90": 416513536.0, "95": 416513536.0, "100": 416513536.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1463270912.0, "5": 1465368576.0, "10": 1465368576.0, "15": 1465368576.0, "20": 1597092352.0, "25": 1597092352.0, "30": 1597092352.0, "35": 1597092352.0, "40": 1597092352.0, "45": 1597092352.0, "50": 1597092352.0, "55": 1597092352.0, "60": 1597092352.0, "65": 1597092352.0, "70": 1597092352.0, "75": 1597092352.0, "80": 1597092352.0, "85": 1597092352.0, "90": 1597092352.0, "95": 1597092352.0, "100": 1597092352.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 7.29813, "5": 0.29344, "10": 0.29293, "15": 0.29093, "20": 0.30728, "25": 0.31074, "30": 0.30695, "35": 0.30576, "40": 0.30871, "45": 0.31067, "50": 0.30611, "55": 0.3052, "60": 0.30899, "65": 0.30587, "70": 0.30945, "75": 0.30233, "80": 0.30465, "85": 0.30549, "90": 0.30363, "95": 0.30609, "100": 0.3023}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1620.0, "25": 2028.0, "30": 2272.0, "35": 1848.0, "40": 1954.0, "45": 2388.0, "50": 2605.0, "55": 2341.0, "60": 2883.0, "65": 2307.0, "70": 3652.0, "75": 2877.0, "80": 3537.0, "85": 3698.0, "90": 3545.0, "95": 4040.0, "100": 3472.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/model_config.yaml index d5d4413..c64a535 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values_dev.json index 9fe4f01..0f6af58 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values_dev.json @@ -1,50 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.87346, - 10.89625, - 10.88939, - 10.88681, - 10.8893, - 10.84863, - 10.6962, - 10.63919, - 10.53931, - 10.31119 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 4.95266, - 0.07818, - 0.07961, - 0.07716, - 0.08368, - 0.08327, - 0.08409, - 0.08371, - 0.08372, - 0.08387 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 32, - "step_interval": 5, - "values": [ - 1300.0, - 1287.0, - 1565.0, - 1441.0, - 1419.0, - 1295.0, - 1177.0 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.87346, "5": 10.86718, "10": 10.85561, "15": 10.88831, "20": 10.87704, "25": 10.84986, "30": 10.76439, "35": 10.68583, "40": 10.52311, "45": 10.32331, "50": 10.29634}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 454770688.0, "5": 454770688.0, "10": 454770688.0, "15": 454770688.0, "20": 518880768.0, "25": 518880768.0, "30": 518880768.0, "35": 518880768.0, "40": 518880768.0, "45": 518880768.0, "50": 518880768.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4511150592.0, "5": 4544705536.0, "10": 4544705536.0, "15": 4544705536.0, "20": 4607767040.0, "25": 4607767040.0, "30": 4607767040.0, "35": 4607767040.0, "40": 4607767040.0, "45": 4607767040.0, "50": 4607767040.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 5.60068, "5": 0.07688, "10": 0.07554, "15": 0.07566, "20": 0.33206, "25": 0.08271, "30": 0.08222, "35": 0.08267, "40": 0.08317, "45": 0.08236, "50": 0.08327}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1221.0, "25": 1129.0, "30": 1441.0, "35": 1322.0, "40": 1381.0, "45": 1282.0, "50": 1426.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values_lts.json index 69ca350..0a71e62 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.87346, 10.89625, 10.88939, 10.88681, 10.8893, 10.84864, 10.6962, 10.63918, 10.5393, 10.31119]}, "num-zeros": {"start_step": 0, "end_step": 32, "step_interval": 5, "values": [1298.0, 1352.0, 1590.0, 1403.0, 1435.0, 1266.0, 1195.0]}, "iteration_timing_avg": 0.07655911764705883} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.87346, "5": 10.86718, "10": 10.85561, "15": 10.88831, "20": 10.87704, "25": 10.84986, "30": 10.7644, "35": 10.68583, "40": 10.5231, "45": 10.32331, "50": 10.29634}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 454770688.0, "5": 454770688.0, "10": 454770688.0, "15": 454770688.0, "20": 518880768.0, "25": 518880768.0, "30": 518880768.0, "35": 518880768.0, "40": 518880768.0, "45": 518880768.0, "50": 518880768.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4511150592.0, "5": 4544705536.0, "10": 4544705536.0, "15": 4544705536.0, "20": 4607767040.0, "25": 4607767040.0, "30": 4607767040.0, "35": 4607767040.0, "40": 4607767040.0, "45": 4607767040.0, "50": 4607767040.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.97955, "5": 0.09865, "10": 0.09755, "15": 0.08417, "20": 0.09136, "25": 0.09055, "30": 0.09084, "35": 0.09134, "40": 0.09058, "45": 0.09138, "50": 0.09003}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1279.0, "25": 1219.0, "30": 1421.0, "35": 1249.0, "40": 1452.0, "45": 1336.0, "50": 1455.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/model_config.yaml index 7fac131..7c79dd3 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values_dev.json index bad3432..11f9f31 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values_dev.json @@ -1,50 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.87346, - 10.89625, - 10.88939, - 10.88681, - 10.88931, - 10.84864, - 10.6962, - 10.63918, - 10.5393, - 10.31119 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 5.32064, - 0.08204, - 0.08233, - 0.08176, - 0.09748, - 0.0966, - 0.09648, - 0.09617, - 0.09604, - 0.09646 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 32, - "step_interval": 5, - "values": [ - 1112.0, - 1124.0, - 1229.0, - 1665.0, - 1269.0, - 1219.0, - 1572.0 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.87346, "5": 10.86718, "10": 10.85561, "15": 10.88831, "20": 10.87704, "25": 10.84986, "30": 10.7644, "35": 10.68582, "40": 10.5231, "45": 10.32331, "50": 10.29634}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 911795200.0, "5": 911795200.0, "10": 911795200.0, "15": 911795200.0, "20": 1426769408.0, "25": 1426769408.0, "30": 1426769408.0, "35": 1426769408.0, "40": 1426769408.0, "45": 1426769408.0, "50": 1426769408.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4738548736.0, "5": 4772103680.0, "10": 4772103680.0, "15": 4772103680.0, "20": 5286291456.0, "25": 5286291456.0, "30": 5286291456.0, "35": 5286291456.0, "40": 5286291456.0, "45": 5286291456.0, "50": 5286291456.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3.51674, "5": 0.08141, "10": 0.08052, "15": 0.07992, "20": 0.09632, "25": 0.09637, "30": 0.09667, "35": 0.09681, "40": 0.09734, "45": 0.09652, "50": 0.09765}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1974.0, "25": 1113.0, "30": 994.0, "35": 1045.0, "40": 1324.0, "45": 1573.0, "50": 1267.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values_lts.json index 96b8036..fc05cd9 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.87346, 10.89625, 10.88939, 10.88681, 10.88931, 10.84864, 10.6962, 10.63918, 10.53931, 10.31119]}, "num-zeros": {"start_step": 0, "end_step": 32, "step_interval": 5, "values": [1131.0, 1173.0, 1218.0, 1783.0, 1278.0, 1244.0, 1555.0]}, "iteration_timing_avg": 0.07975499999999999} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.87346, "5": 10.86718, "10": 10.85561, "15": 10.88831, "20": 10.87703, "25": 10.84986, "30": 10.76439, "35": 10.68583, "40": 10.5231, "45": 10.32331, "50": 10.29634}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 911795200.0, "5": 911795200.0, "10": 911795200.0, "15": 911795200.0, "20": 1426769408.0, "25": 1426769408.0, "30": 1426769408.0, "35": 1426769408.0, "40": 1426769408.0, "45": 1426769408.0, "50": 1426769408.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4738548736.0, "5": 4772103680.0, "10": 4772103680.0, "15": 4772103680.0, "20": 5286291456.0, "25": 5286291456.0, "30": 5286291456.0, "35": 5286291456.0, "40": 5286291456.0, "45": 5286291456.0, "50": 5286291456.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 7.38932, "5": 0.08356, "10": 0.08398, "15": 0.09924, "20": 0.09907, "25": 0.09964, "30": 0.09945, "35": 0.10076, "40": 0.09872, "45": 0.09961, "50": 0.09911}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 2075.0, "25": 1126.0, "30": 1049.0, "35": 1033.0, "40": 1364.0, "45": 1599.0, "50": 1249.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/model_config.yaml index 2c05343..0d8b40a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_dev.json index 6c6d8e7..387f454 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.84009, 10.89314, 10.908, 10.87524, 10.86367, 10.83848, 10.64647, 10.62126, 10.53743, 10.24831]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2044.0, 2242.0, 2368.0, 2598.0, 2188.0, 1850.0, 2436.0]}, "iteration_timing_avg": 0.10581941176470588} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.84009, "5": 10.87275, "10": 10.8333, "15": 10.87115, "20": 10.85956, "25": 10.8165, "30": 10.7379, "35": 10.66607, "40": 10.50091, "45": 10.26832, "50": 10.25759}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 685659136.0, "5": 685659136.0, "10": 685659136.0, "15": 685659136.0, "20": 1043027456.0, "25": 1043027456.0, "30": 1043027456.0, "35": 1043027456.0, "40": 1043027456.0, "45": 1043027456.0, "50": 1043027456.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3187304960.0, "5": 3187305472.0, "10": 3187305472.0, "15": 3187305472.0, "20": 3544935936.0, "25": 3544935936.0, "30": 3544935936.0, "35": 3544935936.0, "40": 3544935936.0, "45": 3544935936.0, "50": 3544935936.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4.83724, "5": 0.1196, "10": 0.11844, "15": 0.11713, "20": 0.12863, "25": 0.12877, "30": 0.13001, "35": 0.12746, "40": 0.127, "45": 0.12743, "50": 0.12672}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 2206.0, "25": 1990.0, "30": 2857.0, "35": 2070.0, "40": 2038.0, "45": 2212.0, "50": 2256.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json index 6c6d8e7..54aeea6 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.84009, 10.89314, 10.908, 10.87524, 10.86367, 10.83848, 10.64647, 10.62126, 10.53743, 10.24831]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2044.0, 2242.0, 2368.0, 2598.0, 2188.0, 1850.0, 2436.0]}, "iteration_timing_avg": 0.10581941176470588} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.84009, "5": 10.87275, "10": 10.8333, "15": 10.87115, "20": 10.85956, "25": 10.8165, "30": 10.7379, "35": 10.66607, "40": 10.50091, "45": 10.26832, "50": 10.25759}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 685659136.0, "5": 685659136.0, "10": 685659136.0, "15": 685659136.0, "20": 1043027456.0, "25": 1043027456.0, "30": 1043027456.0, "35": 1043027456.0, "40": 1043027456.0, "45": 1043027456.0, "50": 1043027456.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3187304960.0, "5": 3187305472.0, "10": 3187305472.0, "15": 3187305472.0, "20": 3544935936.0, "25": 3544935936.0, "30": 3544935936.0, "35": 3544935936.0, "40": 3544935936.0, "45": 3544935936.0, "50": 3544935936.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 13.4701, "5": 0.1208, "10": 0.1223, "15": 0.11887, "20": 0.12942, "25": 0.12991, "30": 0.12979, "35": 0.12982, "40": 0.12913, "45": 0.12942, "50": 0.1287}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 2206.0, "25": 1990.0, "30": 2857.0, "35": 2070.0, "40": 2038.0, "45": 2212.0, "50": 2256.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml index 2d4f4d2..8565367 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -46,4 +46,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values_dev.json index d4a5cfb..ecc474a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.84009, 10.89314, 10.908, 10.87524, 10.86367, 10.83848, 10.64647, 10.62126, 10.53743, 10.24831, 10.20828, 9.96658, 9.97022, 9.92437, 9.79137, 9.26612, 9.61914, 9.19057, 9.46177, 9.62185]}, "num-zeros": {"start_step": 0, "end_step": 83, "step_interval": 5, "values": [2044.0, 2242.0, 2368.0, 2598.0, 2188.0, 1850.0, 2436.0, 2732.0, 2678.0, 2452.0, 2879.0, 2572.0, 3456.0, 3237.0, 2990.0, 3067.0, 3173.0]}, "iteration_timing_avg": 0.10533134328358208} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.84009, "5": 10.87275, "10": 10.8333, "15": 10.87115, "20": 10.85956, "25": 10.8165, "30": 10.7379, "35": 10.66607, "40": 10.50091, "45": 10.26832, "50": 10.25759, "55": 10.18894, "60": 9.82312, "65": 9.25909, "70": 9.91228, "75": 9.5866, "80": 9.5452, "85": 9.72691, "90": 9.9014, "95": 9.60295, "100": 9.49461}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 685659136.0, "5": 685659136.0, "10": 685659136.0, "15": 685659136.0, "20": 1043027456.0, "25": 1043027456.0, "30": 1043027456.0, "35": 1043027456.0, "40": 1043027456.0, "45": 1043027456.0, "50": 1043027456.0, "55": 1043027456.0, "60": 1043027456.0, "65": 1043027456.0, "70": 1043027456.0, "75": 1043027456.0, "80": 1043027456.0, "85": 1043027456.0, "90": 1043027456.0, "95": 1043027456.0, "100": 1043027456.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3187304960.0, "5": 3187305472.0, "10": 3187305472.0, "15": 3187305472.0, "20": 3544935936.0, "25": 3544935936.0, "30": 3544935936.0, "35": 3544935936.0, "40": 3544935936.0, "45": 3544935936.0, "50": 3544935936.0, "55": 3544935936.0, "60": 3544935936.0, "65": 3544935936.0, "70": 3544935936.0, "75": 3544935936.0, "80": 3544935936.0, "85": 3544935936.0, "90": 3544935936.0, "95": 3544935936.0, "100": 3544935936.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4.72009, "5": 0.11671, "10": 0.11704, "15": 0.11683, "20": 0.1282, "25": 0.12625, "30": 0.12532, "35": 0.12531, "40": 0.12681, "45": 0.13244, "50": 0.12675, "55": 0.12709, "60": 0.12556, "65": 0.12619, "70": 0.12735, "75": 0.12687, "80": 0.12618, "85": 0.12677, "90": 0.1261, "95": 0.12672, "100": 0.12579}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 2206.0, "25": 1990.0, "30": 2857.0, "35": 2070.0, "40": 2038.0, "45": 2212.0, "50": 2256.0, "55": 2702.0, "60": 2114.0, "65": 2315.0, "70": 2595.0, "75": 2552.0, "80": 3615.0, "85": 3769.0, "90": 3322.0, "95": 3540.0, "100": 2839.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values_lts.json index d4a5cfb..312b54d 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.84009, 10.89314, 10.908, 10.87524, 10.86367, 10.83848, 10.64647, 10.62126, 10.53743, 10.24831, 10.20828, 9.96658, 9.97022, 9.92437, 9.79137, 9.26612, 9.61914, 9.19057, 9.46177, 9.62185]}, "num-zeros": {"start_step": 0, "end_step": 83, "step_interval": 5, "values": [2044.0, 2242.0, 2368.0, 2598.0, 2188.0, 1850.0, 2436.0, 2732.0, 2678.0, 2452.0, 2879.0, 2572.0, 3456.0, 3237.0, 2990.0, 3067.0, 3173.0]}, "iteration_timing_avg": 0.10533134328358208} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.84009, "5": 10.87275, "10": 10.8333, "15": 10.87115, "20": 10.85956, "25": 10.8165, "30": 10.7379, "35": 10.66607, "40": 10.50091, "45": 10.26832, "50": 10.25759, "55": 10.18894, "60": 9.82312, "65": 9.25909, "70": 9.91228, "75": 9.5866, "80": 9.5452, "85": 9.72691, "90": 9.9014, "95": 9.60295, "100": 9.49461}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 685659136.0, "5": 685659136.0, "10": 685659136.0, "15": 685659136.0, "20": 1043027456.0, "25": 1043027456.0, "30": 1043027456.0, "35": 1043027456.0, "40": 1043027456.0, "45": 1043027456.0, "50": 1043027456.0, "55": 1043027456.0, "60": 1043027456.0, "65": 1043027456.0, "70": 1043027456.0, "75": 1043027456.0, "80": 1043027456.0, "85": 1043027456.0, "90": 1043027456.0, "95": 1043027456.0, "100": 1043027456.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3187304960.0, "5": 3187305472.0, "10": 3187305472.0, "15": 3187305472.0, "20": 3544935936.0, "25": 3544935936.0, "30": 3544935936.0, "35": 3544935936.0, "40": 3544935936.0, "45": 3544935936.0, "50": 3544935936.0, "55": 3544935936.0, "60": 3544935936.0, "65": 3544935936.0, "70": 3544935936.0, "75": 3544935936.0, "80": 3544935936.0, "85": 3544935936.0, "90": 3544935936.0, "95": 3544935936.0, "100": 3544935936.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 9.50882, "5": 0.12106, "10": 0.12106, "15": 0.12159, "20": 0.13115, "25": 0.13121, "30": 0.13033, "35": 0.13061, "40": 0.12963, "45": 0.13165, "50": 0.12995, "55": 0.13044, "60": 0.1305, "65": 0.13067, "70": 0.13152, "75": 0.12992, "80": 0.13009, "85": 0.13, "90": 0.13047, "95": 0.13007, "100": 0.13022}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 2206.0, "25": 1990.0, "30": 2857.0, "35": 2070.0, "40": 2038.0, "45": 2212.0, "50": 2256.0, "55": 2702.0, "60": 2114.0, "65": 2315.0, "70": 2595.0, "75": 2552.0, "80": 3615.0, "85": 3769.0, "90": 3322.0, "95": 3540.0, "100": 2839.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/model_config.yaml index 05eb509..57921b1 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values_dev.json index 0f5ad40..728b0dc 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.81248, 10.87098, 10.90003, 10.85021, 10.84909, 10.81546, 10.61697, 10.61018, 10.52451, 10.23087]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2427.0, 2538.0, 2652.0, 2303.0, 2378.0, 2744.0, 2530.0]}, "iteration_timing_avg": 0.1367805882352941} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.81248, "5": 10.85075, "10": 10.78942, "15": 10.8429, "20": 10.83716, "25": 10.79056, "30": 10.72218, "35": 10.65819, "40": 10.48109, "45": 10.24478, "50": 10.24013}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 570640384.0, "5": 570640384.0, "10": 570640384.0, "15": 570640384.0, "20": 852351488.0, "25": 852351488.0, "30": 852351488.0, "35": 852351488.0, "40": 852351488.0, "45": 852351488.0, "50": 852351488.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2393217536.0, "5": 2393218048.0, "10": 2393218048.0, "15": 2393218048.0, "20": 2675191296.0, "25": 2675191296.0, "30": 2675191296.0, "35": 2675191296.0, "40": 2675191296.0, "45": 2675191296.0, "50": 2675191296.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4.97308, "5": 0.15265, "10": 0.15193, "15": 0.14981, "20": 0.15765, "25": 0.15573, "30": 0.15624, "35": 0.15616, "40": 0.15612, "45": 0.1554, "50": 0.15431}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1747.0, "25": 2378.0, "30": 2490.0, "35": 2036.0, "40": 2118.0, "45": 2571.0, "50": 2738.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values_lts.json index 0f5ad40..5191c48 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.81248, 10.87098, 10.90003, 10.85021, 10.84909, 10.81546, 10.61697, 10.61018, 10.52451, 10.23087]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2427.0, 2538.0, 2652.0, 2303.0, 2378.0, 2744.0, 2530.0]}, "iteration_timing_avg": 0.1367805882352941} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.81248, "5": 10.85075, "10": 10.78942, "15": 10.8429, "20": 10.83716, "25": 10.79056, "30": 10.72218, "35": 10.65819, "40": 10.48109, "45": 10.24478, "50": 10.24013}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 570640384.0, "5": 570640384.0, "10": 570640384.0, "15": 570640384.0, "20": 852351488.0, "25": 852351488.0, "30": 852351488.0, "35": 852351488.0, "40": 852351488.0, "45": 852351488.0, "50": 852351488.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2393217536.0, "5": 2393218048.0, "10": 2393218048.0, "15": 2393218048.0, "20": 2675191296.0, "25": 2675191296.0, "30": 2675191296.0, "35": 2675191296.0, "40": 2675191296.0, "45": 2675191296.0, "50": 2675191296.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 17.9568, "5": 0.15667, "10": 0.15427, "15": 0.15435, "20": 0.16076, "25": 0.16147, "30": 0.16004, "35": 0.16115, "40": 0.16061, "45": 0.16137, "50": 0.16037}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1747.0, "25": 2378.0, "30": 2490.0, "35": 2036.0, "40": 2118.0, "45": 2571.0, "50": 2738.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/model_config.yaml index 4b1288d..4d003cf 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -46,4 +46,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values_dev.json index b9816fb..5238062 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.81248, 10.87098, 10.90003, 10.85021, 10.84909, 10.81546, 10.61697, 10.61018, 10.52451, 10.23087]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2427.0, 2538.0, 2652.0, 2303.0, 2378.0, 2744.0, 2530.0]}, "iteration_timing_avg": 0.13371323529411766} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.81248, "5": 10.85075, "10": 10.78942, "15": 10.8429, "20": 10.83716, "25": 10.79056, "30": 10.72218, "35": 10.65819, "40": 10.48109, "45": 10.24478, "50": 10.24013}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 570640384.0, "5": 570640384.0, "10": 570640384.0, "15": 570640384.0, "20": 852351488.0, "25": 852351488.0, "30": 852351488.0, "35": 852351488.0, "40": 852351488.0, "45": 852351488.0, "50": 852351488.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2393217536.0, "5": 2393218048.0, "10": 2393218048.0, "15": 2393218048.0, "20": 2675191296.0, "25": 2675191296.0, "30": 2675191296.0, "35": 2675191296.0, "40": 2675191296.0, "45": 2675191296.0, "50": 2675191296.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 5.41308, "5": 0.15141, "10": 0.15204, "15": 0.15184, "20": 0.15857, "25": 0.1585, "30": 0.15798, "35": 0.15815, "40": 0.15805, "45": 0.15802, "50": 0.1584}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1747.0, "25": 2378.0, "30": 2490.0, "35": 2036.0, "40": 2118.0, "45": 2571.0, "50": 2738.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values_lts.json index b9816fb..6a80f23 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.81248, 10.87098, 10.90003, 10.85021, 10.84909, 10.81546, 10.61697, 10.61018, 10.52451, 10.23087]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2427.0, 2538.0, 2652.0, 2303.0, 2378.0, 2744.0, 2530.0]}, "iteration_timing_avg": 0.13371323529411766} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.81248, "5": 10.85075, "10": 10.78942, "15": 10.8429, "20": 10.83716, "25": 10.79056, "30": 10.72218, "35": 10.65819, "40": 10.48109, "45": 10.24478, "50": 10.24013}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 570640384.0, "5": 570640384.0, "10": 570640384.0, "15": 570640384.0, "20": 852351488.0, "25": 852351488.0, "30": 852351488.0, "35": 852351488.0, "40": 852351488.0, "45": 852351488.0, "50": 852351488.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2393217536.0, "5": 2393218048.0, "10": 2393218048.0, "15": 2393218048.0, "20": 2675191296.0, "25": 2675191296.0, "30": 2675191296.0, "35": 2675191296.0, "40": 2675191296.0, "45": 2675191296.0, "50": 2675191296.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 15.70884, "5": 0.15531, "10": 0.15864, "15": 0.15813, "20": 0.16349, "25": 0.16119, "30": 0.15926, "35": 0.1604, "40": 0.16225, "45": 0.15963, "50": 0.15904}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1747.0, "25": 2378.0, "30": 2490.0, "35": 2036.0, "40": 2118.0, "45": 2571.0, "50": 2738.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/model_config.yaml index d55fb75..e240f25 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values_dev.json index 4cf16ef..d34aa0e 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.81248, 10.87098, 10.90003, 10.85021, 10.84909, 10.81546, 10.61697, 10.61018, 10.52451, 10.23087, 10.19557, 9.94382, 9.95175, 9.90538, 9.79357, 9.25904, 9.61568, 9.19187, 9.46047, 9.6229]}, "num-zeros": {"start_step": 0, "end_step": 83, "step_interval": 5, "values": [2427.0, 2538.0, 2652.0, 2303.0, 2378.0, 2744.0, 2530.0, 3566.0, 3139.0, 3236.0, 3208.0, 3413.0, 3913.0, 3194.0, 3581.0, 3625.0, 4695.0]}, "iteration_timing_avg": 0.1320626865671642} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.81248, "5": 10.85075, "10": 10.78942, "15": 10.8429, "20": 10.83716, "25": 10.79056, "30": 10.72218, "35": 10.65819, "40": 10.48109, "45": 10.24478, "50": 10.24013, "55": 10.18176, "60": 9.79743, "65": 9.23775, "70": 9.90668, "75": 9.57555, "80": 9.53721, "85": 9.7244, "90": 9.90337, "95": 9.60378, "100": 9.49515}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 570640384.0, "5": 570640384.0, "10": 570640384.0, "15": 570640384.0, "20": 852351488.0, "25": 852351488.0, "30": 852351488.0, "35": 852351488.0, "40": 852351488.0, "45": 852351488.0, "50": 852351488.0, "55": 852351488.0, "60": 852351488.0, "65": 852351488.0, "70": 852351488.0, "75": 852351488.0, "80": 852351488.0, "85": 852351488.0, "90": 852351488.0, "95": 852351488.0, "100": 852351488.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2393217536.0, "5": 2393218048.0, "10": 2393218048.0, "15": 2393218048.0, "20": 2675191296.0, "25": 2675191296.0, "30": 2675191296.0, "35": 2675191296.0, "40": 2675191296.0, "45": 2675191296.0, "50": 2675191296.0, "55": 2675191296.0, "60": 2675191296.0, "65": 2675191296.0, "70": 2675191296.0, "75": 2675191296.0, "80": 2675191296.0, "85": 2675191296.0, "90": 2675191296.0, "95": 2675191296.0, "100": 2675191296.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 6.29223, "5": 0.148, "10": 0.15262, "15": 0.14775, "20": 0.15758, "25": 0.15747, "30": 0.15755, "35": 0.1573, "40": 0.15708, "45": 0.15857, "50": 0.15592, "55": 0.15661, "60": 0.15681, "65": 0.15746, "70": 0.15643, "75": 0.1569, "80": 0.15717, "85": 0.15594, "90": 0.15698, "95": 0.15634, "100": 0.15557}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1747.0, "25": 2378.0, "30": 2490.0, "35": 2036.0, "40": 2118.0, "45": 2571.0, "50": 2738.0, "55": 2429.0, "60": 2997.0, "65": 2339.0, "70": 3596.0, "75": 3042.0, "80": 3664.0, "85": 3880.0, "90": 3675.0, "95": 3958.0, "100": 3516.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values_lts.json index 4cf16ef..25ee18f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.81248, 10.87098, 10.90003, 10.85021, 10.84909, 10.81546, 10.61697, 10.61018, 10.52451, 10.23087, 10.19557, 9.94382, 9.95175, 9.90538, 9.79357, 9.25904, 9.61568, 9.19187, 9.46047, 9.6229]}, "num-zeros": {"start_step": 0, "end_step": 83, "step_interval": 5, "values": [2427.0, 2538.0, 2652.0, 2303.0, 2378.0, 2744.0, 2530.0, 3566.0, 3139.0, 3236.0, 3208.0, 3413.0, 3913.0, 3194.0, 3581.0, 3625.0, 4695.0]}, "iteration_timing_avg": 0.1320626865671642} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.81248, "5": 10.85075, "10": 10.78942, "15": 10.8429, "20": 10.83716, "25": 10.79056, "30": 10.72218, "35": 10.65819, "40": 10.48109, "45": 10.24478, "50": 10.24013, "55": 10.18176, "60": 9.79743, "65": 9.23775, "70": 9.90668, "75": 9.57555, "80": 9.53721, "85": 9.7244, "90": 9.90337, "95": 9.60378, "100": 9.49515}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 570640384.0, "5": 570640384.0, "10": 570640384.0, "15": 570640384.0, "20": 852351488.0, "25": 852351488.0, "30": 852351488.0, "35": 852351488.0, "40": 852351488.0, "45": 852351488.0, "50": 852351488.0, "55": 852351488.0, "60": 852351488.0, "65": 852351488.0, "70": 852351488.0, "75": 852351488.0, "80": 852351488.0, "85": 852351488.0, "90": 852351488.0, "95": 852351488.0, "100": 852351488.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2393217536.0, "5": 2393218048.0, "10": 2393218048.0, "15": 2393218048.0, "20": 2675191296.0, "25": 2675191296.0, "30": 2675191296.0, "35": 2675191296.0, "40": 2675191296.0, "45": 2675191296.0, "50": 2675191296.0, "55": 2675191296.0, "60": 2675191296.0, "65": 2675191296.0, "70": 2675191296.0, "75": 2675191296.0, "80": 2675191296.0, "85": 2675191296.0, "90": 2675191296.0, "95": 2675191296.0, "100": 2675191296.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 17.8025, "5": 0.16107, "10": 0.1593, "15": 0.15882, "20": 0.16606, "25": 0.16546, "30": 0.1652, "35": 0.16461, "40": 0.16459, "45": 0.16465, "50": 0.16613, "55": 0.16539, "60": 0.16495, "65": 0.16796, "70": 0.16535, "75": 0.16378, "80": 0.16544, "85": 0.16397, "90": 0.16595, "95": 0.16528, "100": 0.16424}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1747.0, "25": 2378.0, "30": 2490.0, "35": 2036.0, "40": 2118.0, "45": 2571.0, "50": 2738.0, "55": 2429.0, "60": 2997.0, "65": 2339.0, "70": 3596.0, "75": 3042.0, "80": 3664.0, "85": 3880.0, "90": 3675.0, "95": 3958.0, "100": 3516.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/model_config.yaml index c0aceac..8719b06 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values_dev.json index 302a152..9b0accc 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.79311, 10.85248, 10.87281, 10.83016, 10.82949, 10.78726, 10.565, 10.57088, 10.4836, 10.19521]}, "num-zeros": {"start_step": 0, "end_step": 34, "step_interval": 5, "values": [2450.0, 2765.0, 2163.0, 2585.0, 2634.0, 2585.0, 2987.0]}, "iteration_timing_avg": 0.1333435294117647} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.79311, "5": 10.83074, "10": 10.76725, "15": 10.82664, "20": 10.81793, "25": 10.76529, "30": 10.69182, "35": 10.61672, "40": 10.44907, "45": 10.21488, "50": 10.21715}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 586369024.0, "5": 587417600.0, "10": 587417600.0, "15": 587417600.0, "20": 869128704.0, "25": 867031552.0, "30": 867031552.0, "35": 867031552.0, "40": 867031552.0, "45": 867031552.0, "50": 869128704.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3832784384.0, "5": 3832784896.0, "10": 3832784896.0, "15": 3832784896.0, "20": 4114758144.0, "25": 4114758144.0, "30": 4114758144.0, "35": 4114758144.0, "40": 4114758144.0, "45": 4114758144.0, "50": 4114758144.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 5.24257, "5": 0.15422, "10": 0.15487, "15": 0.15313, "20": 0.16008, "25": 0.16051, "30": 0.15922, "35": 0.15956, "40": 0.15077, "45": 0.15006, "50": 0.15178}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1846.0, "25": 2348.0, "30": 2490.0, "35": 2010.0, "40": 2016.0, "45": 2642.0, "50": 2810.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values_lts.json index 302a152..49b18e5 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.79311, 10.85248, 10.87281, 10.83016, 10.82949, 10.78726, 10.565, 10.57088, 10.4836, 10.19521]}, "num-zeros": {"start_step": 0, "end_step": 34, "step_interval": 5, "values": [2450.0, 2765.0, 2163.0, 2585.0, 2634.0, 2585.0, 2987.0]}, "iteration_timing_avg": 0.1333435294117647} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.79311, "5": 10.83074, "10": 10.76725, "15": 10.82664, "20": 10.81793, "25": 10.76529, "30": 10.69182, "35": 10.61672, "40": 10.44907, "45": 10.21488, "50": 10.21715}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 586369024.0, "5": 587417600.0, "10": 587417600.0, "15": 587417600.0, "20": 869128704.0, "25": 869128704.0, "30": 869128704.0, "35": 869128704.0, "40": 869128704.0, "45": 869128704.0, "50": 869128704.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3832784384.0, "5": 3832784896.0, "10": 3832784896.0, "15": 3832784896.0, "20": 4114758144.0, "25": 4114758144.0, "30": 4114758144.0, "35": 4114758144.0, "40": 4114758144.0, "45": 4114758144.0, "50": 4114758144.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 18.48378, "5": 0.14932, "10": 0.14983, "15": 0.14921, "20": 0.15688, "25": 0.1558, "30": 0.15625, "35": 0.156, "40": 0.15513, "45": 0.15522, "50": 0.15472}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1846.0, "25": 2348.0, "30": 2490.0, "35": 2010.0, "40": 2016.0, "45": 2642.0, "50": 2810.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/model_config.yaml index c2439f9..5cb0ff6 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values_dev.json index b807a2e..1daeca0 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.85929, 10.89211, 10.87639, 10.86988, 10.88179, 10.83898, 10.66589, 10.62691, 10.52461, 10.25708]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2458.0, 2527.0, 2467.0, 2148.0, 2250.0, 2467.0, 2528.0]}, "iteration_timing_avg": 0.1660379411764706} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.85929, "5": 10.87929, "10": 10.84772, "15": 10.86867, "20": 10.87317, "25": 10.83338, "30": 10.75624, "35": 10.66844, "40": 10.50171, "45": 10.28002, "50": 10.25621}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 378903552.0, "5": 378379264.0, "10": 380476416.0, "15": 380476416.0, "20": 559500800.0, "25": 559500800.0, "30": 560549376.0, "35": 559500800.0, "40": 560549376.0, "45": 560549376.0, "50": 560549376.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1702977024.0, "5": 1702977536.0, "10": 1702977536.0, "15": 1702977536.0, "20": 1884099072.0, "25": 1884360704.0, "30": 1884360704.0, "35": 1884360704.0, "40": 1884360704.0, "45": 1884360704.0, "50": 1884360704.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4.45783, "5": 0.20185, "10": 0.18595, "15": 0.18557, "20": 0.1954, "25": 0.1953, "30": 0.19498, "35": 0.19456, "40": 0.19508, "45": 0.19498, "50": 0.19438}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1808.0, "25": 2385.0, "30": 2591.0, "35": 1997.0, "40": 1959.0, "45": 2368.0, "50": 3073.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values_lts.json index b807a2e..5d4df37 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.85929, 10.89211, 10.87639, 10.86988, 10.88179, 10.83898, 10.66589, 10.62691, 10.52461, 10.25708]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2458.0, 2527.0, 2467.0, 2148.0, 2250.0, 2467.0, 2528.0]}, "iteration_timing_avg": 0.1660379411764706} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.85929, "5": 10.87929, "10": 10.84772, "15": 10.86867, "20": 10.87317, "25": 10.83338, "30": 10.75624, "35": 10.66844, "40": 10.50171, "45": 10.28002, "50": 10.25621}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 379952128.0, "5": 379427840.0, "10": 378903552.0, "15": 378379264.0, "20": 561597952.0, "25": 561597952.0, "30": 561597952.0, "35": 561597952.0, "40": 561597952.0, "45": 561597952.0, "50": 561073664.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1704025600.0, "5": 1704026112.0, "10": 1704026112.0, "15": 1704026112.0, "20": 1886196224.0, "25": 1886196224.0, "30": 1886196224.0, "35": 1886196224.0, "40": 1886196224.0, "45": 1886196224.0, "50": 1886196224.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.2993, "5": 0.19899, "10": 0.20014, "15": 0.19897, "20": 0.20978, "25": 0.20792, "30": 0.20828, "35": 0.20882, "40": 0.20801, "45": 0.20866, "50": 0.20769}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1808.0, "25": 2385.0, "30": 2591.0, "35": 1997.0, "40": 1959.0, "45": 2368.0, "50": 3073.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/model_config.yaml index 69dc9ed..ed0ab6a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values_dev.json index 546ccfc..6a5c558 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.80264, 10.85778, 10.86259, 10.83903, 10.82934, 10.81016, 10.60251, 10.61471, 10.54092, 10.27186, 10.24338, 10.02058, 10.03017, 9.99471, 9.84885, 9.34867, 9.67263, 9.2457, 9.53365, 9.67548]}, "num-zeros": {"start_step": 0, "end_step": 84, "step_interval": 5, "values": [8571.0, 7897.0, 7748.0, 9008.0, 9165.0, 8986.0, 9155.0, 7960.0, 7684.0, 9743.0, 8727.0, 9382.0, 10992.0, 11177.0, 11270.0, 13404.0, 11533.0]}, "iteration_timing_avg": 0.3735462686567164} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.76813, "5": 10.82426, "10": 10.7488, "15": 10.82093, "20": 10.79407, "25": 10.74528, "30": 10.68463, "35": 10.62109, "40": 10.47053, "45": 10.24915, "50": 10.27379, "55": 10.20448, "60": 9.84999, "65": 9.28499, "70": 9.94476, "75": 9.62753, "80": 9.57725, "85": 9.76823, "90": 9.93273, "95": 9.64547, "100": 9.53769}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 607448576.0, "5": 607448576.0, "10": 607448576.0, "15": 607448576.0, "20": 944340992.0, "25": 944037888.0, "30": 944954368.0, "35": 944078848.0, "40": 944078848.0, "45": 944078848.0, "50": 944992256.0, "55": 944078848.0, "60": 944078848.0, "65": 943674368.0, "70": 945127424.0, "75": 944078848.0, "80": 944322560.0, "85": 944078848.0, "90": 944078848.0, "95": 944078848.0, "100": 944993280.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1843249152.0, "5": 1843369472.0, "10": 1844654592.0, "15": 1844654592.0, "20": 2181567488.0, "25": 2181567488.0, "30": 2181567488.0, "35": 2181567488.0, "40": 2181567488.0, "45": 2181567488.0, "50": 2181567488.0, "55": 2181567488.0, "60": 2181567488.0, "65": 2181567488.0, "70": 2181567488.0, "75": 2181567488.0, "80": 2181567488.0, "85": 2181567488.0, "90": 2181567488.0, "95": 2181635584.0, "100": 2181635584.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.88614, "5": 0.48959, "10": 0.49989, "15": 0.49414, "20": 0.50941, "25": 0.50908, "30": 0.50805, "35": 0.51133, "40": 0.50893, "45": 0.51427, "50": 0.50848, "55": 0.50598, "60": 0.50695, "65": 0.77726, "70": 0.50306, "75": 0.71349, "80": 0.50736, "85": 0.50779, "90": 0.50494, "95": 0.50557, "100": 0.50572}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 7024.0, "25": 7902.0, "30": 8336.0, "35": 7346.0, "40": 7522.0, "45": 8100.0, "50": 8998.0, "55": 8207.0, "60": 9031.0, "65": 7785.0, "70": 10580.0, "75": 9533.0, "80": 11195.0, "85": 11864.0, "90": 12414.0, "95": 13058.0, "100": 10097.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values_lts.json index 546ccfc..5bf0616 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.80264, 10.85778, 10.86259, 10.83903, 10.82934, 10.81016, 10.60251, 10.61471, 10.54092, 10.27186, 10.24338, 10.02058, 10.03017, 9.99471, 9.84885, 9.34867, 9.67263, 9.2457, 9.53365, 9.67548]}, "num-zeros": {"start_step": 0, "end_step": 84, "step_interval": 5, "values": [8571.0, 7897.0, 7748.0, 9008.0, 9165.0, 8986.0, 9155.0, 7960.0, 7684.0, 9743.0, 8727.0, 9382.0, 10992.0, 11177.0, 11270.0, 13404.0, 11533.0]}, "iteration_timing_avg": 0.3735462686567164} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.76813, "5": 10.82426, "10": 10.7488, "15": 10.82093, "20": 10.79407, "25": 10.74528, "30": 10.68463, "35": 10.62109, "40": 10.47053, "45": 10.24915, "50": 10.27379, "55": 10.20448, "60": 9.84999, "65": 9.28499, "70": 9.94476, "75": 9.62753, "80": 9.57725, "85": 9.76823, "90": 9.93273, "95": 9.64547, "100": 9.53769}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 607448576.0, "5": 607448576.0, "10": 607448576.0, "15": 607448576.0, "20": 944340992.0, "25": 944037888.0, "30": 944954368.0, "35": 944078848.0, "40": 944078848.0, "45": 944078848.0, "50": 944992256.0, "55": 944078848.0, "60": 944078848.0, "65": 943674368.0, "70": 945127424.0, "75": 944078848.0, "80": 944322560.0, "85": 944078848.0, "90": 944078848.0, "95": 944078848.0, "100": 944993280.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1843249152.0, "5": 1843369472.0, "10": 1844654592.0, "15": 1844654592.0, "20": 2181567488.0, "25": 2181567488.0, "30": 2181567488.0, "35": 2181567488.0, "40": 2181567488.0, "45": 2181567488.0, "50": 2181567488.0, "55": 2181567488.0, "60": 2181567488.0, "65": 2181567488.0, "70": 2181567488.0, "75": 2181567488.0, "80": 2181567488.0, "85": 2181567488.0, "90": 2181567488.0, "95": 2181635584.0, "100": 2181635584.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 18.6534, "5": 0.52336, "10": 0.51659, "15": 0.52097, "20": 0.5413, "25": 0.56055, "30": 0.53271, "35": 0.54237, "40": 0.5352, "45": 0.53408, "50": 0.53304, "55": 0.53075, "60": 0.53399, "65": 0.53294, "70": 0.53179, "75": 0.69389, "80": 0.531, "85": 0.52842, "90": 0.53117, "95": 0.53133, "100": 0.72087}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 7024.0, "25": 7902.0, "30": 8336.0, "35": 7346.0, "40": 7522.0, "45": 8100.0, "50": 8998.0, "55": 8207.0, "60": 9031.0, "65": 7785.0, "70": 10580.0, "75": 9533.0, "80": 11195.0, "85": 11864.0, "90": 12414.0, "95": 13058.0, "100": 10097.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/model_config.yaml index bd324b8..8526488 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -51,4 +51,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values_dev.json index c0a53bd..5f64197 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.85929, 10.89211, 10.87639, 10.86988, 10.88179, 10.83898, 10.66589, 10.62691, 10.52461, 10.25708, 10.19741, 9.9562, 9.96369, 9.91398, 9.79604, 9.2686, 9.61975, 9.19501, 9.47332, 9.62216]}, "num-zeros": {"start_step": 0, "end_step": 83, "step_interval": 5, "values": [2458.0, 2527.0, 2467.0, 2148.0, 2250.0, 2467.0, 2528.0, 3656.0, 3275.0, 3203.0, 3297.0, 3364.0, 3789.0, 3277.0, 3660.0, 3733.0, 4815.0]}, "iteration_timing_avg": 0.1628459701492537} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.85929, "5": 10.87929, "10": 10.84772, "15": 10.86867, "20": 10.87317, "25": 10.83338, "30": 10.75624, "35": 10.66844, "40": 10.50171, "45": 10.28002, "50": 10.25621, "55": 10.18314, "60": 9.79897, "65": 9.24752, "70": 9.91362, "75": 9.58564, "80": 9.54312, "85": 9.72736, "90": 9.90472, "95": 9.6077, "100": 9.49935}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 380476416.0, "5": 380476416.0, "10": 380476416.0, "15": 380476416.0, "20": 560287232.0, "25": 560287232.0, "30": 560287232.0, "35": 561073664.0, "40": 560287232.0, "45": 561597952.0, "50": 561597952.0, "55": 561073664.0, "60": 561073664.0, "65": 561597952.0, "70": 560287232.0, "75": 560287232.0, "80": 560287232.0, "85": 560287232.0, "90": 560287232.0, "95": 561597952.0, "100": 560287232.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1702977024.0, "5": 1702977536.0, "10": 1702977536.0, "15": 1702977536.0, "20": 1884361216.0, "25": 1884361216.0, "30": 1884361216.0, "35": 1884361216.0, "40": 1884361216.0, "45": 1884361216.0, "50": 1884361216.0, "55": 1884361216.0, "60": 1884361216.0, "65": 1884361216.0, "70": 1884361216.0, "75": 1884361216.0, "80": 1884361216.0, "85": 1884361216.0, "90": 1884361216.0, "95": 1884361216.0, "100": 1884361216.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4.70051, "5": 0.18489, "10": 0.1885, "15": 0.18516, "20": 0.19623, "25": 0.19562, "30": 0.19558, "35": 0.19543, "40": 0.19414, "45": 0.19546, "50": 0.1943, "55": 0.19481, "60": 0.19412, "65": 0.19731, "70": 0.19502, "75": 0.1953, "80": 0.19592, "85": 0.19662, "90": 0.19524, "95": 0.19564, "100": 0.19497}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1808.0, "25": 2385.0, "30": 2591.0, "35": 1997.0, "40": 1959.0, "45": 2368.0, "50": 3073.0, "55": 2580.0, "60": 2853.0, "65": 2346.0, "70": 3572.0, "75": 2886.0, "80": 3459.0, "85": 4068.0, "90": 3747.0, "95": 4088.0, "100": 3436.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values_lts.json index c0a53bd..f5a12ad 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.85929, 10.89211, 10.87639, 10.86988, 10.88179, 10.83898, 10.66589, 10.62691, 10.52461, 10.25708, 10.19741, 9.9562, 9.96369, 9.91398, 9.79604, 9.2686, 9.61975, 9.19501, 9.47332, 9.62216]}, "num-zeros": {"start_step": 0, "end_step": 83, "step_interval": 5, "values": [2458.0, 2527.0, 2467.0, 2148.0, 2250.0, 2467.0, 2528.0, 3656.0, 3275.0, 3203.0, 3297.0, 3364.0, 3789.0, 3277.0, 3660.0, 3733.0, 4815.0]}, "iteration_timing_avg": 0.1628459701492537} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.85929, "5": 10.87929, "10": 10.84772, "15": 10.86867, "20": 10.87317, "25": 10.83338, "30": 10.75624, "35": 10.66844, "40": 10.50171, "45": 10.28002, "50": 10.25621, "55": 10.18314, "60": 9.79897, "65": 9.24752, "70": 9.91362, "75": 9.58564, "80": 9.54312, "85": 9.72736, "90": 9.90472, "95": 9.6077, "100": 9.49935}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 378903552.0, "5": 379952128.0, "10": 379952128.0, "15": 379952128.0, "20": 560549376.0, "25": 560549376.0, "30": 560549376.0, "35": 560549376.0, "40": 560549376.0, "45": 560549376.0, "50": 560549376.0, "55": 561073664.0, "60": 561073664.0, "65": 560549376.0, "70": 560549376.0, "75": 560549376.0, "80": 560549376.0, "85": 560549376.0, "90": 560549376.0, "95": 560549376.0, "100": 560549376.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1704025600.0, "5": 1704026112.0, "10": 1704026112.0, "15": 1704026112.0, "20": 1886196224.0, "25": 1886196224.0, "30": 1886196224.0, "35": 1886196224.0, "40": 1886196224.0, "45": 1886196224.0, "50": 1886196224.0, "55": 1886196224.0, "60": 1886196224.0, "65": 1886196224.0, "70": 1886196224.0, "75": 1886196224.0, "80": 1886196224.0, "85": 1886196224.0, "90": 1886196224.0, "95": 1886196224.0, "100": 1886196224.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.26991, "5": 0.1958, "10": 0.19444, "15": 0.194, "20": 0.20361, "25": 0.20332, "30": 0.20368, "35": 0.20417, "40": 0.20368, "45": 0.20398, "50": 0.2037, "55": 0.20453, "60": 0.20433, "65": 0.20387, "70": 0.20373, "75": 0.20399, "80": 0.20347, "85": 0.20432, "90": 0.2036, "95": 0.20374, "100": 0.20437}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1808.0, "25": 2385.0, "30": 2591.0, "35": 1997.0, "40": 1959.0, "45": 2368.0, "50": 3073.0, "55": 2580.0, "60": 2853.0, "65": 2346.0, "70": 3572.0, "75": 2886.0, "80": 3459.0, "85": 4068.0, "90": 3747.0, "95": 4088.0, "100": 3436.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/model_config.yaml index e872304..76fe028 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_dev.json index 18457f2..cd38550 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.86312, 10.87712, 10.87347, 10.88278, 10.89457, 10.84427, 10.69023, 10.62687, 10.53974, 10.26525]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2244.0, 2273.0, 2447.0, 2031.0, 2134.0, 2491.0, 2380.0]}, "iteration_timing_avg": 0.23144205882352942} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.86312, "5": 10.86984, "10": 10.84273, "15": 10.88712, "20": 10.87623, "25": 10.83465, "30": 10.75356, "35": 10.67297, "40": 10.50224, "45": 10.28079, "50": 10.27239}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 284527616.0, "5": 284527616.0, "10": 284527616.0, "15": 284527616.0, "20": 416513536.0, "25": 416513536.0, "30": 416513536.0, "35": 416513536.0, "40": 416513536.0, "45": 416513536.0, "50": 416513536.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1464318464.0, "5": 1464319488.0, "10": 1464320000.0, "15": 1464320000.0, "20": 1597091840.0, "25": 1597091840.0, "30": 1597091840.0, "35": 1597091840.0, "40": 1597092352.0, "45": 1597092352.0, "50": 1597092352.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 5.08228, "5": 0.27074, "10": 0.26257, "15": 0.26176, "20": 0.27712, "25": 0.27706, "30": 0.27709, "35": 0.28021, "40": 0.28046, "45": 0.27903, "50": 0.27978}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1645.0, "25": 2124.0, "30": 2345.0, "35": 1780.0, "40": 1936.0, "45": 2289.0, "50": 2738.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json index 18457f2..92b4906 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.86312, 10.87712, 10.87347, 10.88278, 10.89457, 10.84427, 10.69023, 10.62687, 10.53974, 10.26525]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2244.0, 2273.0, 2447.0, 2031.0, 2134.0, 2491.0, 2380.0]}, "iteration_timing_avg": 0.23144205882352942} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.86312, "5": 10.86984, "10": 10.84273, "15": 10.88712, "20": 10.87623, "25": 10.83465, "30": 10.75356, "35": 10.67297, "40": 10.50224, "45": 10.28079, "50": 10.27239}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 284527616.0, "5": 284527616.0, "10": 284527616.0, "15": 284527616.0, "20": 416513536.0, "25": 416513536.0, "30": 416513536.0, "35": 416513536.0, "40": 416513536.0, "45": 416513536.0, "50": 416513536.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1465368064.0, "5": 1465368064.0, "10": 1465368576.0, "15": 1465368576.0, "20": 1596304896.0, "25": 1596304896.0, "30": 1596304896.0, "35": 1596304896.0, "40": 1596304896.0, "45": 1596305408.0, "50": 1596305408.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 6.797, "5": 0.28708, "10": 0.286, "15": 0.28021, "20": 0.30007, "25": 0.29697, "30": 0.29501, "35": 0.29587, "40": 0.29259, "45": 0.2983, "50": 0.29365}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1645.0, "25": 2124.0, "30": 2345.0, "35": 1780.0, "40": 1936.0, "45": 2289.0, "50": 2738.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml index 226809a..ee16180 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -46,4 +46,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values_dev.json index 7b39f86..2e01ed5 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.86312, 10.87712, 10.87347, 10.88278, 10.89457, 10.84427, 10.69023, 10.62687, 10.53974, 10.26525]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2244.0, 2273.0, 2447.0, 2031.0, 2134.0, 2491.0, 2380.0]}, "iteration_timing_avg": 0.23131970588235293} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.86312, "5": 10.86984, "10": 10.84273, "15": 10.88712, "20": 10.87623, "25": 10.83465, "30": 10.75356, "35": 10.67297, "40": 10.50224, "45": 10.28079, "50": 10.27239}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 284527616.0, "5": 284527616.0, "10": 284527616.0, "15": 284527616.0, "20": 416513536.0, "25": 416513536.0, "30": 416513536.0, "35": 416513536.0, "40": 416513536.0, "45": 416513536.0, "50": 416513536.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1464318464.0, "5": 1464320000.0, "10": 1464320000.0, "15": 1464320000.0, "20": 1597089792.0, "25": 1597091328.0, "30": 1597092352.0, "35": 1597092352.0, "40": 1597092352.0, "45": 1597092352.0, "50": 1597092352.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4.11891, "5": 0.27161, "10": 0.26629, "15": 0.2637, "20": 0.2814, "25": 0.28361, "30": 0.28297, "35": 0.28276, "40": 0.28313, "45": 0.2873, "50": 0.28552}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1645.0, "25": 2124.0, "30": 2345.0, "35": 1780.0, "40": 1936.0, "45": 2289.0, "50": 2738.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values_lts.json index 7b39f86..4bdf1d9 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.86312, 10.87712, 10.87347, 10.88278, 10.89457, 10.84427, 10.69023, 10.62687, 10.53974, 10.26525]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2244.0, 2273.0, 2447.0, 2031.0, 2134.0, 2491.0, 2380.0]}, "iteration_timing_avg": 0.23131970588235293} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.86312, "5": 10.86984, "10": 10.84273, "15": 10.88712, "20": 10.87623, "25": 10.83465, "30": 10.75356, "35": 10.67297, "40": 10.50224, "45": 10.28079, "50": 10.27239}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 284527616.0, "5": 284527616.0, "10": 284527616.0, "15": 284527616.0, "20": 416513536.0, "25": 416513536.0, "30": 416513536.0, "35": 416513536.0, "40": 416513536.0, "45": 416513536.0, "50": 416513536.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1464319488.0, "5": 1464320000.0, "10": 1465368576.0, "15": 1465368576.0, "20": 1596305408.0, "25": 1596305408.0, "30": 1596305408.0, "35": 1596305408.0, "40": 1596305408.0, "45": 1596305408.0, "50": 1596305920.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 7.22206, "5": 0.28793, "10": 0.2833, "15": 0.28906, "20": 0.29969, "25": 0.30075, "30": 0.29561, "35": 0.30149, "40": 0.29547, "45": 0.30118, "50": 0.29352}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1645.0, "25": 2124.0, "30": 2345.0, "35": 1780.0, "40": 1936.0, "45": 2289.0, "50": 2738.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/model_config.yaml index 8746c03..708d1df 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values_dev.json index 47198f9..e85d6ee 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.86312, 10.87712, 10.87347, 10.88278, 10.89457, 10.84427, 10.69023, 10.62687, 10.53974, 10.26525, 10.21403, 9.9801, 9.96977, 9.93973, 9.81158, 9.28667, 9.63194, 9.19732, 9.48341, 9.62985]}, "num-zeros": {"start_step": 0, "end_step": 83, "step_interval": 5, "values": [2244.0, 2273.0, 2447.0, 2031.0, 2134.0, 2491.0, 2380.0, 3451.0, 3205.0, 2940.0, 3143.0, 3310.0, 3884.0, 3232.0, 3491.0, 3751.0, 5022.0]}, "iteration_timing_avg": 0.22914074626865674} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.86312, "5": 10.86984, "10": 10.84273, "15": 10.88712, "20": 10.87623, "25": 10.83465, "30": 10.75356, "35": 10.67297, "40": 10.50224, "45": 10.28079, "50": 10.27239, "55": 10.20076, "60": 9.84045, "65": 9.27781, "70": 9.92981, "75": 9.61573, "80": 9.56042, "85": 9.74259, "90": 9.91759, "95": 9.61376, "100": 9.50538}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 284527616.0, "5": 284527616.0, "10": 284527616.0, "15": 284527616.0, "20": 416513536.0, "25": 416513536.0, "30": 416513536.0, "35": 416513536.0, "40": 416513536.0, "45": 416513536.0, "50": 416513536.0, "55": 416513536.0, "60": 416513536.0, "65": 416513536.0, "70": 416513536.0, "75": 416513536.0, "80": 416513536.0, "85": 416513536.0, "90": 416513536.0, "95": 416513536.0, "100": 416513536.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1464319488.0, "5": 1464319488.0, "10": 1464320000.0, "15": 1464320000.0, "20": 1594994688.0, "25": 1597091840.0, "30": 1597091840.0, "35": 1597091840.0, "40": 1597092352.0, "45": 1597092352.0, "50": 1597092352.0, "55": 1597092352.0, "60": 1597092352.0, "65": 1597092352.0, "70": 1597092352.0, "75": 1597092352.0, "80": 1597092352.0, "85": 1597092352.0, "90": 1597092352.0, "95": 1597092352.0, "100": 1597092352.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3.9389, "5": 0.26761, "10": 0.26783, "15": 0.26387, "20": 0.27882, "25": 0.27734, "30": 0.2767, "35": 0.277, "40": 0.27635, "45": 0.27694, "50": 0.28016, "55": 0.27883, "60": 0.28002, "65": 0.27862, "70": 0.27887, "75": 0.27972, "80": 0.27714, "85": 0.27759, "90": 0.27766, "95": 0.27789, "100": 0.27817}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1645.0, "25": 2124.0, "30": 2345.0, "35": 1780.0, "40": 1936.0, "45": 2289.0, "50": 2738.0, "55": 2309.0, "60": 2740.0, "65": 2151.0, "70": 3646.0, "75": 2891.0, "80": 3546.0, "85": 3681.0, "90": 3861.0, "95": 4152.0, "100": 3405.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values_lts.json index 47198f9..711dbc6 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.86312, 10.87712, 10.87347, 10.88278, 10.89457, 10.84427, 10.69023, 10.62687, 10.53974, 10.26525, 10.21403, 9.9801, 9.96977, 9.93973, 9.81158, 9.28667, 9.63194, 9.19732, 9.48341, 9.62985]}, "num-zeros": {"start_step": 0, "end_step": 83, "step_interval": 5, "values": [2244.0, 2273.0, 2447.0, 2031.0, 2134.0, 2491.0, 2380.0, 3451.0, 3205.0, 2940.0, 3143.0, 3310.0, 3884.0, 3232.0, 3491.0, 3751.0, 5022.0]}, "iteration_timing_avg": 0.22914074626865674} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.86312, "5": 10.86984, "10": 10.84273, "15": 10.88712, "20": 10.87623, "25": 10.83465, "30": 10.75356, "35": 10.67297, "40": 10.50224, "45": 10.28079, "50": 10.27239, "55": 10.20076, "60": 9.84045, "65": 9.27781, "70": 9.92981, "75": 9.61573, "80": 9.56042, "85": 9.74259, "90": 9.91759, "95": 9.61376, "100": 9.50538}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 284527616.0, "5": 284527616.0, "10": 284527616.0, "15": 284527616.0, "20": 416513536.0, "25": 416513536.0, "30": 416513536.0, "35": 416513536.0, "40": 416513536.0, "45": 416513536.0, "50": 416513536.0, "55": 416513536.0, "60": 416513536.0, "65": 416513536.0, "70": 416513536.0, "75": 416513536.0, "80": 416513536.0, "85": 416513536.0, "90": 416513536.0, "95": 416513536.0, "100": 416513536.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1465368064.0, "5": 1465368064.0, "10": 1465368576.0, "15": 1465368576.0, "20": 1596305920.0, "25": 1596305920.0, "30": 1596305920.0, "35": 1596305920.0, "40": 1596305920.0, "45": 1596305920.0, "50": 1596305920.0, "55": 1596305920.0, "60": 1596305920.0, "65": 1596305920.0, "70": 1596305920.0, "75": 1596305920.0, "80": 1596305920.0, "85": 1596305920.0, "90": 1596305920.0, "95": 1596305920.0, "100": 1596305920.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 9.41683, "5": 0.29649, "10": 0.2936, "15": 0.29369, "20": 0.30302, "25": 0.29665, "30": 0.30347, "35": 0.29671, "40": 0.29818, "45": 0.29562, "50": 0.30562, "55": 0.29659, "60": 0.29349, "65": 0.29455, "70": 0.30009, "75": 0.29572, "80": 0.29482, "85": 0.29505, "90": 0.29548, "95": 0.29481, "100": 0.30221}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1645.0, "25": 2124.0, "30": 2345.0, "35": 1780.0, "40": 1936.0, "45": 2289.0, "50": 2738.0, "55": 2309.0, "60": 2740.0, "65": 2151.0, "70": 3646.0, "75": 2891.0, "80": 3546.0, "85": 3681.0, "90": 3861.0, "95": 4152.0, "100": 3405.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/model_config.yaml index 7d0be91..b5e627c 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/model_config.yaml index 4349bc0..aafc059 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel/model_config.yaml @@ -17,8 +17,8 @@ MODEL_ARGS: --train-iters: 2000 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -45,4 +45,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline/model_config.yaml index e28cc2b..4a78ba2 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 2000 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -45,4 +45,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/model_config.yaml index 399dbd1..de34935 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 2000 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp/model_config.yaml index 48acb1e..6b4be59 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 2000 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/model_config.yaml index 743064e..70f66f3 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 2000 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp/model_config.yaml index 61edc36..805f59d 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 2000 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,4 +50,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp/model_config.yaml index de27041..33c251b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 2000 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -52,4 +52,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp/model_config.yaml index aa529c3..8217fca 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 2000 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp1_pp4_memory_speed/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp1_pp4_memory_speed/golden_values_dev.json new file mode 100644 index 0000000..7d30705 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp1_pp4_memory_speed/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 25, "step_interval": 5, "values": {"1": 12.57679, "5": 12.5818, "10": 12.47354, "15": 11.80609, "20": 11.49702, "25": 10.98467}}, "num-zeros": {"start_step": 1, "end_step": 25, "step_interval": 5, "values": {"1": 521041248.0, "5": 520997440.0, "10": 521179808.0, "15": 521592416.0, "20": 521133664.0, "25": 523544832.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 25, "step_interval": 5, "values": {"1": 24510808064.0, "5": 24510808064.0, "10": 24510808064.0, "15": 24510808064.0, "20": 24510808064.0, "25": 24510808064.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 25, "step_interval": 5, "values": {"1": 52700401664.0, "5": 60489064448.0, "10": 60489064448.0, "15": 60489064448.0, "20": 60489064448.0, "25": 60489064448.0}}, "iteration-time": {"start_step": 1, "end_step": 25, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": 2.87864, "15": "nan", "20": 2.89414, "25": "nan"}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp1_pp4_memory_speed/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp1_pp4_memory_speed/model_config.yaml new file mode 100644 index 0000000..2e044d1 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp1_pp4_memory_speed/model_config.yaml @@ -0,0 +1,59 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 + NVTE_FWD_LAYERNORM_SM_MARGIN: 16 + NVTE_BWD_LAYERNORM_SM_MARGIN: 16 +MODEL_ARGS: + --num-layers: 32 + --hidden-size: 4096 + --num-attention-heads: 32 + --group-query-attention: true + --num-query-groups: 8 + --untie-embeddings-and-output-weights: true + --log-throughput: true + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 1 + --global-batch-size: 8 + --seq-length: 8192 + --max-position-embeddings: 8192 + --train-iters: 25 + --timing-log-level: 0 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --tokenizer-type: NullTokenizer + --vocab-size: 131072 + --mock-data: true + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 2 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 5 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 1 + --pipeline-model-parallel-size: 4 + --num-layers-per-virtual-pipeline-stage: 1 + --use-distributed-optimizer: true + --overlap-grad-reduce: true + --overlap-param-gather: true + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --log-memory-to-tensorboard: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp4_pp1_memory_speed/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp4_pp1_memory_speed/golden_values_dev.json new file mode 100644 index 0000000..a8361b8 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp4_pp1_memory_speed/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 25, "step_interval": 5, "values": {"1": 12.61262, "5": 12.60238, "10": 12.49879, "15": 11.82067, "20": 11.50566, "25": 10.99243}}, "num-zeros": {"start_step": 1, "end_step": 25, "step_interval": 5, "values": {"1": 523040896.0, "5": 523012096.0, "10": 523190944.0, "15": 523625088.0, "20": 523224032.0, "25": 525635776.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 25, "step_interval": 5, "values": {"1": 20634214400.0, "5": 20634214400.0, "10": 20634214400.0, "15": 20634214400.0, "20": 20634214400.0, "25": 20634214400.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 25, "step_interval": 5, "values": {"1": 51333926912.0, "5": 58188226560.0, "10": 58188226560.0, "15": 58188226560.0, "20": 58188226560.0, "25": 58188226560.0}}, "iteration-time": {"start_step": 1, "end_step": 25, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": 2.72059, "15": "nan", "20": 2.72404, "25": "nan"}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp4_pp1_memory_speed/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp4_pp1_memory_speed/golden_values_lts.json new file mode 100644 index 0000000..9e26dfe --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp4_pp1_memory_speed/golden_values_lts.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp4_pp1_memory_speed/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp4_pp1_memory_speed/model_config.yaml new file mode 100644 index 0000000..cfcb7a6 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_7b_mr_dgx_a100_1N8G_tp4_pp1_memory_speed/model_config.yaml @@ -0,0 +1,58 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 + NVTE_FWD_LAYERNORM_SM_MARGIN: 16 + NVTE_BWD_LAYERNORM_SM_MARGIN: 16 +MODEL_ARGS: + --num-layers: 32 + --hidden-size: 4096 + --num-attention-heads: 32 + --group-query-attention: true + --num-query-groups: 8 + --untie-embeddings-and-output-weights: true + --log-throughput: true + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 2 + --global-batch-size: 8 + --seq-length: 8192 + --max-position-embeddings: 8192 + --train-iters: 25 + --timing-log-level: 0 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --tokenizer-type: NullTokenizer + --vocab-size: 131072 + --mock-data: true + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 2 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 5 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 4 + --pipeline-model-parallel-size: 1 + --use-distributed-optimizer: true + --overlap-grad-reduce: true + --overlap-param-gather: true + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --log-memory-to-tensorboard: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json index 34dfa4f..144c65e 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.8401, - 10.87259, - 10.85024, - 10.79646, - 10.68156, - 10.60618, - 10.12768, - 10.22185, - 10.13788, - 9.82309 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1698.0, - 1855.0, - 1949.0, - 1968.0, - 1881.0, - 1783.0, - 1653.0, - 2037.0, - 2313.0, - 2300.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 5.37706, - 0.09618, - 0.09432, - 0.09666, - 0.09442, - 0.09619, - 0.09453, - 0.0975, - 0.09517, - 0.09727 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.8401, "5": 10.84034, "10": 10.8134, "15": 10.80277, "20": 10.70494, "25": 10.53848, "30": 10.3552, "35": 10.27145, "40": 10.08048, "45": 9.82288, "50": 9.90119}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1698.0, "5": 1900.0, "10": 1421.0, "15": 1946.0, "20": 1765.0, "25": 1726.0, "30": 2022.0, "35": 1962.0, "40": 2274.0, "45": 2172.0, "50": 2369.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 552128000.0, "5": 552128000.0, "10": 552128000.0, "15": 552128000.0, "20": 552128000.0, "25": 552128000.0, "30": 552128000.0, "35": 552128000.0, "40": 552128000.0, "45": 552128000.0, "50": 552128000.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4576452608.0, "5": 4673069056.0, "10": 4673069056.0, "15": 4673069056.0, "20": 4673069056.0, "25": 4673069056.0, "30": 4673069056.0, "35": 4673069056.0, "40": 4673069056.0, "45": 4673069056.0, "50": 4673069056.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 5.82685, "5": 0.09636, "10": 0.09453, "15": 0.0951, "20": 0.09324, "25": 0.09311, "30": 0.09279, "35": 0.0934, "40": 0.09774, "45": 0.09122, "50": 0.08864}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json index 87e9341..f9f2722 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.8401, 10.87262, 10.85025, 10.79646, 10.68152, 10.60614, 10.12765, 10.22184, 10.13787, 9.82312]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1670.0, 1901.0, 1954.0, 1932.0, 1998.0, 1768.0, 1651.0, 2063.0, 2348.0, 2324.0]}, "iteration_timing_avg": 0.06904588235294119} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.8401, "5": 10.84032, "10": 10.8134, "15": 10.80276, "20": 10.70493, "25": 10.53847, "30": 10.35518, "35": 10.27143, "40": 10.08046, "45": 9.82288, "50": 9.90114}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1670.0, "5": 1970.0, "10": 1397.0, "15": 1886.0, "20": 1785.0, "25": 1695.0, "30": 2086.0, "35": 1976.0, "40": 2349.0, "45": 2240.0, "50": 2338.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 552238592.0, "5": 552238592.0, "10": 552238592.0, "15": 552238592.0, "20": 552238592.0, "25": 552238592.0, "30": 552238592.0, "35": 552238592.0, "40": 552238592.0, "45": 552238592.0, "50": 552238592.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4576563200.0, "5": 4673179648.0, "10": 4673179648.0, "15": 4673179648.0, "20": 4673179648.0, "25": 4673179648.0, "30": 4673179648.0, "35": 4673179648.0, "40": 4673179648.0, "45": 4673179648.0, "50": 4673179648.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 7.79296, "5": 0.08936, "10": 0.08747, "15": 0.09067, "20": 0.08679, "25": 0.08868, "30": 0.08685, "35": 0.08887, "40": 0.08682, "45": 0.08792, "50": 0.08604}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml index 581b097..7d11869 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..58b730c --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.84023, "5": 10.84074, "10": 10.81392, "15": 10.80238, "20": 10.70474, "25": 10.53876, "30": 10.35537, "35": 10.2716, "40": 10.08036, "45": 9.8231, "50": 9.90117, "55": 9.86414, "60": 9.48062, "65": 8.93763, "70": 9.7102, "75": 9.40888, "80": 9.39066, "85": 9.59766, "90": 9.80366, "95": 9.50574, "100": 9.38807}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1659.0, "5": 1886.0, "10": 1388.0, "15": 1827.0, "20": 1686.0, "25": 1696.0, "30": 1877.0, "35": 1967.0, "40": 2300.0, "45": 2176.0, "50": 2249.0, "55": 2468.0, "60": 2471.0, "65": 2688.0, "70": 3271.0, "75": 2633.0, "80": 3351.0, "85": 3332.0, "90": 2984.0, "95": 3459.0, "100": 3555.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 552128512.0, "5": 552128512.0, "10": 552128512.0, "15": 552128512.0, "20": 552128512.0, "25": 552128512.0, "30": 552128512.0, "35": 552128512.0, "40": 552128512.0, "45": 552128512.0, "50": 552128512.0, "55": 552128512.0, "60": 552128512.0, "65": 552128512.0, "70": 552128512.0, "75": 552128512.0, "80": 552128512.0, "85": 552128512.0, "90": 552128512.0, "95": 552128512.0, "100": 552128512.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2615097856.0, "5": 2711714304.0, "10": 2711714304.0, "15": 2711714304.0, "20": 2711714304.0, "25": 2711714304.0, "30": 2711714304.0, "35": 2711714304.0, "40": 2711714304.0, "45": 2711714304.0, "50": 2711714304.0, "55": 2711714304.0, "60": 2711714304.0, "65": 2711714304.0, "70": 2711714304.0, "75": 2711714304.0, "80": 2711714304.0, "85": 2711714304.0, "90": 2711714304.0, "95": 2711714304.0, "100": 2711714304.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 5.51223, "5": 0.08691, "10": 0.085, "15": 0.0859, "20": 0.08404, "25": 0.08464, "30": 0.08355, "35": 0.08189, "40": 0.08107, "45": 0.08112, "50": 0.08147, "55": 0.08204, "60": 0.08108, "65": 0.08132, "70": 0.0801, "75": 0.0805, "80": 0.08087, "85": 0.08073, "90": 0.08118, "95": 0.0798, "100": 0.0816}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..3fda502 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.8401, "5": 10.84032, "10": 10.8134, "15": 10.80276, "20": 10.70493, "25": 10.53847, "30": 10.35518, "35": 10.27143, "40": 10.08046, "45": 9.82288, "50": 9.90114, "55": 9.86426, "60": 9.48028, "65": 8.93744, "70": 9.71023, "75": 9.40882, "80": 9.39078, "85": 9.59744, "90": 9.8039, "95": 9.50564, "100": 9.38814}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1670.0, "5": 1970.0, "10": 1397.0, "15": 1886.0, "20": 1785.0, "25": 1695.0, "30": 2086.0, "35": 1976.0, "40": 2349.0, "45": 2240.0, "50": 2338.0, "55": 2364.0, "60": 2474.0, "65": 2762.0, "70": 3207.0, "75": 2625.0, "80": 3502.0, "85": 3356.0, "90": 3142.0, "95": 3385.0, "100": 3449.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 552238592.0, "5": 552238592.0, "10": 552238592.0, "15": 552238592.0, "20": 552238592.0, "25": 552238592.0, "30": 552238592.0, "35": 552238592.0, "40": 552238592.0, "45": 552238592.0, "50": 552238592.0, "55": 552238592.0, "60": 552238592.0, "65": 552238592.0, "70": 552238592.0, "75": 552238592.0, "80": 552238592.0, "85": 552238592.0, "90": 552238592.0, "95": 552238592.0, "100": 552238592.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4576563200.0, "5": 4673179648.0, "10": 4673179648.0, "15": 4673179648.0, "20": 4673179648.0, "25": 4673179648.0, "30": 4673179648.0, "35": 4673179648.0, "40": 4673179648.0, "45": 4673179648.0, "50": 4673179648.0, "55": 4673179648.0, "60": 4673179648.0, "65": 4673179648.0, "70": 4673179648.0, "75": 4673179648.0, "80": 4673179648.0, "85": 4673179648.0, "90": 4673179648.0, "95": 4673179648.0, "100": 4673179648.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 9.8249, "5": 0.09011, "10": 0.09012, "15": 0.09032, "20": 0.08958, "25": 0.0911, "30": 0.0899, "35": 0.09078, "40": 0.08965, "45": 0.09255, "50": 0.0906, "55": 0.08977, "60": 0.0869, "65": 0.08684, "70": 0.08704, "75": 0.08628, "80": 0.08639, "85": 0.08662, "90": 0.08701, "95": 0.08613, "100": 0.0859}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/model_config.yaml index 90c2570..7682370 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,4 +50,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..112ea34 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.84023, "5": 10.84074, "10": 10.81392, "15": 10.80238, "20": 10.70474, "25": 10.53876, "30": 10.35537, "35": 10.2716, "40": 10.08036, "45": 9.8231, "50": 9.90117, "55": 9.86414, "60": 9.48062, "65": 8.93763, "70": 9.7102, "75": 9.40888, "80": 9.39066, "85": 9.59766, "90": 9.80366, "95": 9.50574, "100": 9.38807}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1659.0, "5": 1886.0, "10": 1388.0, "15": 1827.0, "20": 1686.0, "25": 1696.0, "30": 1877.0, "35": 1967.0, "40": 2300.0, "45": 2176.0, "50": 2249.0, "55": 2468.0, "60": 2471.0, "65": 2688.0, "70": 3271.0, "75": 2633.0, "80": 3351.0, "85": 3332.0, "90": 2984.0, "95": 3459.0, "100": 3555.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 552128512.0, "5": 552128512.0, "10": 552128512.0, "15": 552128512.0, "20": 552128512.0, "25": 552128512.0, "30": 552128512.0, "35": 552128512.0, "40": 552128512.0, "45": 552128512.0, "50": 552128512.0, "55": 552128512.0, "60": 552128512.0, "65": 552128512.0, "70": 552128512.0, "75": 552128512.0, "80": 552128512.0, "85": 552128512.0, "90": 552128512.0, "95": 552128512.0, "100": 552128512.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2615097856.0, "5": 2711714304.0, "10": 2711714304.0, "15": 2711714304.0, "20": 2711714304.0, "25": 2711714304.0, "30": 2711714304.0, "35": 2711714304.0, "40": 2711714304.0, "45": 2711714304.0, "50": 2711714304.0, "55": 2711714304.0, "60": 2711714304.0, "65": 2711714304.0, "70": 2711714304.0, "75": 2711714304.0, "80": 2711714304.0, "85": 2711714304.0, "90": 2711714304.0, "95": 2711714304.0, "100": 2711714304.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 5.45269, "5": 0.08237, "10": 0.08305, "15": 0.08328, "20": 0.08344, "25": 0.08281, "30": 0.08195, "35": 0.08111, "40": 0.08016, "45": 0.07836, "50": 0.07936, "55": 0.07906, "60": 0.08023, "65": 0.07916, "70": 0.08026, "75": 0.07938, "80": 0.07948, "85": 0.07874, "90": 0.07885, "95": 0.0779, "100": 0.08116}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..570a49b --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.8401, "5": 10.84032, "10": 10.8134, "15": 10.80276, "20": 10.70493, "25": 10.53847, "30": 10.35518, "35": 10.27143, "40": 10.08046, "45": 9.82288, "50": 9.90114, "55": 9.86426, "60": 9.48028, "65": 8.93744, "70": 9.71023, "75": 9.40882, "80": 9.39078, "85": 9.59744, "90": 9.8039, "95": 9.50564, "100": 9.38814}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1670.0, "5": 1970.0, "10": 1397.0, "15": 1886.0, "20": 1785.0, "25": 1695.0, "30": 2086.0, "35": 1976.0, "40": 2349.0, "45": 2240.0, "50": 2338.0, "55": 2364.0, "60": 2474.0, "65": 2762.0, "70": 3207.0, "75": 2625.0, "80": 3502.0, "85": 3356.0, "90": 3142.0, "95": 3385.0, "100": 3449.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 552238592.0, "5": 552238592.0, "10": 552238592.0, "15": 552238592.0, "20": 552238592.0, "25": 552238592.0, "30": 552238592.0, "35": 552238592.0, "40": 552238592.0, "45": 552238592.0, "50": 552238592.0, "55": 552238592.0, "60": 552238592.0, "65": 552238592.0, "70": 552238592.0, "75": 552238592.0, "80": 552238592.0, "85": 552238592.0, "90": 552238592.0, "95": 552238592.0, "100": 552238592.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4576563200.0, "5": 4673179648.0, "10": 4673179648.0, "15": 4673179648.0, "20": 4673179648.0, "25": 4673179648.0, "30": 4673179648.0, "35": 4673179648.0, "40": 4673179648.0, "45": 4673179648.0, "50": 4673179648.0, "55": 4673179648.0, "60": 4673179648.0, "65": 4673179648.0, "70": 4673179648.0, "75": 4673179648.0, "80": 4673179648.0, "85": 4673179648.0, "90": 4673179648.0, "95": 4673179648.0, "100": 4673179648.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.41235, "5": 0.08775, "10": 0.08849, "15": 0.08737, "20": 0.08713, "25": 0.08696, "30": 0.08757, "35": 0.08803, "40": 0.08782, "45": 0.08739, "50": 0.08653, "55": 0.08734, "60": 0.08891, "65": 0.1011, "70": 0.08925, "75": 0.08826, "80": 0.08863, "85": 0.08797, "90": 0.08896, "95": 0.08827, "100": 0.08947}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml index fcaad99..509996b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..6766395 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.84023, "5": 10.84074, "10": 10.81392, "15": 10.80242, "20": 10.70474, "25": 10.53872, "30": 10.35534, "35": 10.27156, "40": 10.08035, "45": 9.82307, "50": 9.90117, "55": 9.86415, "60": 9.48061, "65": 8.9376, "70": 9.71013, "75": 9.40885, "80": 9.39066, "85": 9.59761, "90": 9.80368, "95": 9.50575, "100": 9.38809}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1659.0, "5": 1886.0, "10": 1413.0, "15": 1912.0, "20": 1710.0, "25": 1666.0, "30": 2033.0, "35": 2032.0, "40": 2271.0, "45": 2171.0, "50": 2321.0, "55": 2330.0, "60": 2399.0, "65": 2573.0, "70": 3346.0, "75": 2588.0, "80": 3342.0, "85": 3296.0, "90": 3157.0, "95": 3269.0, "100": 3445.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1232487936.0, "5": 1232487936.0, "10": 1232487936.0, "15": 1232487936.0, "20": 1232487936.0, "25": 1232487936.0, "30": 1232487936.0, "35": 1232487936.0, "40": 1232487936.0, "45": 1232487936.0, "50": 1232487936.0, "55": 1232487936.0, "60": 1232487936.0, "65": 1232487936.0, "70": 1232487936.0, "75": 1232487936.0, "80": 1232487936.0, "85": 1232487936.0, "90": 1232487936.0, "95": 1232487936.0, "100": 1232487936.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1984492544.0, "5": 2534070272.0, "10": 2534070272.0, "15": 2534070272.0, "20": 2534070272.0, "25": 2534070272.0, "30": 2534070272.0, "35": 2534070272.0, "40": 2534070272.0, "45": 2534070272.0, "50": 2534070272.0, "55": 2534070272.0, "60": 2534070272.0, "65": 2534070272.0, "70": 2534070272.0, "75": 2534070272.0, "80": 2534070272.0, "85": 2534070272.0, "90": 2534070272.0, "95": 2534070272.0, "100": 2534070272.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 6.57779, "5": 0.12157, "10": 0.11891, "15": 0.1176, "20": 0.11702, "25": 0.11688, "30": 0.11766, "35": 0.11769, "40": 0.11717, "45": 0.11722, "50": 0.11804, "55": 0.11618, "60": 0.11829, "65": 0.11649, "70": 0.11804, "75": 0.11577, "80": 0.11793, "85": 0.11663, "90": 0.1178, "95": 0.11648, "100": 0.11531}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..a522ad2 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.8401, "5": 10.84032, "10": 10.81341, "15": 10.80278, "20": 10.70496, "25": 10.53846, "30": 10.35517, "35": 10.27147, "40": 10.08045, "45": 9.82292, "50": 9.90114, "55": 9.86422, "60": 9.48029, "65": 8.93749, "70": 9.71025, "75": 9.40879, "80": 9.39077, "85": 9.59743, "90": 9.80386, "95": 9.50565, "100": 9.38812}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1670.0, "5": 1970.0, "10": 1436.0, "15": 1918.0, "20": 1786.0, "25": 1610.0, "30": 2039.0, "35": 2001.0, "40": 2321.0, "45": 2205.0, "50": 2365.0, "55": 2489.0, "60": 2508.0, "65": 2719.0, "70": 3241.0, "75": 2643.0, "80": 3368.0, "85": 3336.0, "90": 2961.0, "95": 3533.0, "100": 3432.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1230390272.0, "5": 1230390272.0, "10": 1230390272.0, "15": 1230390272.0, "20": 1230390272.0, "25": 1230390272.0, "30": 1230390272.0, "35": 1230390272.0, "40": 1230390272.0, "45": 1230390272.0, "50": 1230390272.0, "55": 1230390272.0, "60": 1230390272.0, "65": 1230390272.0, "70": 1230390272.0, "75": 1230390272.0, "80": 1230390272.0, "85": 1230390272.0, "90": 1230390272.0, "95": 1230390272.0, "100": 1230390272.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1984492032.0, "5": 2531972608.0, "10": 2531972608.0, "15": 2531972608.0, "20": 2531972608.0, "25": 2531972608.0, "30": 2531972608.0, "35": 2531972608.0, "40": 2531972608.0, "45": 2531972608.0, "50": 2531972608.0, "55": 2531972608.0, "60": 2531972608.0, "65": 2531972608.0, "70": 2531972608.0, "75": 2531972608.0, "80": 2531972608.0, "85": 2531972608.0, "90": 2531972608.0, "95": 2531972608.0, "100": 2531972608.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 8.60398, "5": 0.12229, "10": 0.12251, "15": 0.12206, "20": 0.1226, "25": 0.12185, "30": 0.12287, "35": 0.12365, "40": 0.12186, "45": 0.12198, "50": 0.1223, "55": 0.12246, "60": 0.12181, "65": 0.12238, "70": 0.12276, "75": 0.12137, "80": 0.12307, "85": 0.1219, "90": 0.1217, "95": 0.12183, "100": 0.12252}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml index 1741647..100956c 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,4 +50,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values_dev.json index 75bf20e..91631bf 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.8401, - 10.87259, - 10.85023, - 10.79646, - 10.68153, - 10.60619, - 10.12767, - 10.22185, - 10.13787, - 9.82307 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1698.0, - 1855.0, - 1896.0, - 1866.0, - 2032.0, - 1814.0, - 1664.0, - 1961.0, - 2306.0, - 2403.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 8.00253, - 0.13176, - 0.13026, - 0.13184, - 0.13023, - 0.13135, - 0.13014, - 0.13143, - 0.1305, - 0.13191 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.8401, "5": 10.84034, "10": 10.81341, "15": 10.80277, "20": 10.70495, "25": 10.53848, "30": 10.35523, "35": 10.27145, "40": 10.08043, "45": 9.82293, "50": 9.90114}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1698.0, "5": 1900.0, "10": 1454.0, "15": 1969.0, "20": 1774.0, "25": 1736.0, "30": 1970.0, "35": 1941.0, "40": 2237.0, "45": 2180.0, "50": 2328.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1230390272.0, "5": 1230390272.0, "10": 1230390272.0, "15": 1230390272.0, "20": 1230390272.0, "25": 1230390272.0, "30": 1230390272.0, "35": 1230390272.0, "40": 1230390272.0, "45": 1230390272.0, "50": 1230390272.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1984492032.0, "5": 2531972608.0, "10": 2531972608.0, "15": 2531972608.0, "20": 2531972608.0, "25": 2531972608.0, "30": 2531972608.0, "35": 2531972608.0, "40": 2531972608.0, "45": 2531972608.0, "50": 2531972608.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 6.57733, "5": 0.12925, "10": 0.12965, "15": 0.12911, "20": 0.12836, "25": 0.12886, "30": 0.12957, "35": 0.12947, "40": 0.12911, "45": 0.12814, "50": 0.12753}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values_lts.json index 94554bb..69576a1 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.8401, 10.87262, 10.85023, 10.79645, 10.68149, 10.60617, 10.1277, 10.22183, 10.13794, 9.8231]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1670.0, 1901.0, 1923.0, 1922.0, 2020.0, 1815.0, 1713.0, 1963.0, 2266.0, 2324.0]}, "iteration_timing_avg": 0.09164500000000002} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.8401, "5": 10.84032, "10": 10.81341, "15": 10.80278, "20": 10.70496, "25": 10.53846, "30": 10.35517, "35": 10.27147, "40": 10.08045, "45": 9.82292, "50": 9.90114}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1670.0, "5": 1970.0, "10": 1436.0, "15": 1918.0, "20": 1786.0, "25": 1610.0, "30": 2039.0, "35": 2001.0, "40": 2321.0, "45": 2205.0, "50": 2365.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1230390272.0, "5": 1230390272.0, "10": 1230390272.0, "15": 1230390272.0, "20": 1230390272.0, "25": 1230390272.0, "30": 1230390272.0, "35": 1230390272.0, "40": 1230390272.0, "45": 1230390272.0, "50": 1230390272.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1984492032.0, "5": 2531972608.0, "10": 2531972608.0, "15": 2531972608.0, "20": 2531972608.0, "25": 2531972608.0, "30": 2531972608.0, "35": 2531972608.0, "40": 2531972608.0, "45": 2531972608.0, "50": 2531972608.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 8.0418, "5": 0.12998, "10": 0.12656, "15": 0.12621, "20": 0.13103, "25": 0.12628, "30": 0.12409, "35": 0.12632, "40": 0.13313, "45": 0.12545, "50": 0.12421}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml index 7f0d52a..b817ae1 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,4 +50,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_dev.json index 206d789..d9bef72 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.82974, 10.85934, 10.88536, 10.78981, 10.64534, 10.56415, 9.99534, 10.13972, 10.06259, 9.71481]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [261.0, 256.0, 258.0, 250.0, 243.0, 265.0, 254.0, 299.0, 299.0, 294.0]}, "iteration_timing_avg": 0.3993126470588235} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82974, "5": 10.84387, "10": 10.79336, "15": 10.77992, "20": 10.67707, "25": 10.48581, "30": 10.28464, "35": 10.18863, "40": 9.99275, "45": 9.72154, "50": 9.82122}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 214.0, "5": 270.0, "10": 224.0, "15": 235.0, "20": 242.0, "25": 260.0, "30": 280.0, "35": 300.0, "40": 334.0, "45": 324.0, "50": 298.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 829378048.0, "5": 829378048.0, "10": 829378048.0, "15": 829378048.0, "20": 829378048.0, "25": 829378048.0, "30": 829378048.0, "35": 829378048.0, "40": 829378048.0, "45": 829378048.0, "50": 829378048.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 891564544.0, "5": 1248933376.0, "10": 1250505728.0, "15": 1250505728.0, "20": 1250505728.0, "25": 1250505728.0, "30": 1250505728.0, "35": 1250505728.0, "40": 1250505728.0, "45": 1250505728.0, "50": 1250505728.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 20.58657, "5": 0.44565, "10": 0.45716, "15": 0.50953, "20": 0.44872, "25": 0.44791, "30": 0.44871, "35": 0.44188, "40": 0.44233, "45": 0.44161, "50": 0.44069}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_lts.json index c0c3ead..2f6e74a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.85803, 10.88122, 10.85832, 10.80987, 10.66115, 10.55375, 10.01843, 10.14234, 10.05958, 9.71149]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [244.0, 231.0, 243.0, 257.0, 247.0, 267.0, 256.0, 299.0, 318.0, 325.0]}, "iteration_timing_avg": 0.3993126470588235} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82975, "5": 10.8439, "10": 10.79337, "15": 10.77994, "20": 10.67712, "25": 10.48584, "30": 10.28468, "35": 10.18859, "40": 9.99279, "45": 9.72153, "50": 9.82127}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 226.0, "5": 275.0, "10": 181.0, "15": 253.0, "20": 248.0, "25": 207.0, "30": 265.0, "35": 281.0, "40": 315.0, "45": 282.0, "50": 336.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 831212544.0, "5": 831212544.0, "10": 831212544.0, "15": 831212544.0, "20": 831212544.0, "25": 831212544.0, "30": 831212544.0, "35": 831212544.0, "40": 831212544.0, "45": 831212544.0, "50": 831212544.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 891582464.0, "5": 1250786304.0, "10": 1250786304.0, "15": 1250786304.0, "20": 1250786304.0, "25": 1251832320.0, "30": 1251832320.0, "35": 1251832320.0, "40": 1251832320.0, "45": 1251832320.0, "50": 1251832320.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 13.63617, "5": 0.42436, "10": 0.41552, "15": 0.4158, "20": 0.41223, "25": 0.40643, "30": 0.40417, "35": 0.40442, "40": 0.40546, "45": 0.40627, "50": 0.40596}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml index 425f3b9..983a4e7 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml @@ -17,8 +17,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,5 +50,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: flash - + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..32970e9 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.82974, "5": 10.84387, "10": 10.79336, "15": 10.77992, "20": 10.67707, "25": 10.48581, "30": 10.28464, "35": 10.18863, "40": 9.99275, "45": 9.72154, "50": 9.82122, "55": 9.79605, "60": 9.41615, "65": 8.85917, "70": 9.67001, "75": 9.3564, "80": 9.34748, "85": 9.55946, "90": 9.77362, "95": 9.47863, "100": 9.35146}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 214.0, "5": 270.0, "10": 224.0, "15": 235.0, "20": 242.0, "25": 260.0, "30": 280.0, "35": 300.0, "40": 334.0, "45": 324.0, "50": 298.0, "55": 390.0, "60": 342.0, "65": 394.0, "70": 411.0, "75": 319.0, "80": 414.0, "85": 441.0, "90": 381.0, "95": 398.0, "100": 431.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 829378048.0, "5": 829378048.0, "10": 829378048.0, "15": 829378048.0, "20": 829378048.0, "25": 829378048.0, "30": 829378048.0, "35": 829378048.0, "40": 829378048.0, "45": 829378048.0, "50": 829378048.0, "55": 829378048.0, "60": 829378048.0, "65": 829378048.0, "70": 829378048.0, "75": 829378048.0, "80": 829378048.0, "85": 829378048.0, "90": 829378048.0, "95": 829378048.0, "100": 829378048.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 892610560.0, "5": 1248933376.0, "10": 1248933376.0, "15": 1248933376.0, "20": 1248933376.0, "25": 1248933376.0, "30": 1248933376.0, "35": 1249456128.0, "40": 1249456128.0, "45": 1249456128.0, "50": 1249980928.0, "55": 1249980928.0, "60": 1249980928.0, "65": 1249980928.0, "70": 1249980928.0, "75": 1250504192.0, "80": 1250504192.0, "85": 1250504192.0, "90": 1250505728.0, "95": 1250505728.0, "100": 1250505728.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 24.66296, "5": 0.45069, "10": 0.44192, "15": 0.44436, "20": 0.442, "25": 0.44288, "30": 0.44618, "35": 0.44139, "40": 0.44072, "45": 0.44429, "50": 0.43893, "55": 0.43569, "60": 0.43551, "65": 0.43912, "70": 0.44568, "75": 0.44023, "80": 0.43745, "85": 0.43617, "90": 0.43925, "95": 0.43653, "100": 0.43561}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..378ad0d --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.82975, "5": 10.8439, "10": 10.79337, "15": 10.77994, "20": 10.67712, "25": 10.48584, "30": 10.28468, "35": 10.18859, "40": 9.99279, "45": 9.72153, "50": 9.82127, "55": 9.79611, "60": 9.41616, "65": 8.85917, "70": 9.67001, "75": 9.35641, "80": 9.34751, "85": 9.55947, "90": 9.77366, "95": 9.47865, "100": 9.35145}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 226.0, "5": 275.0, "10": 181.0, "15": 253.0, "20": 248.0, "25": 207.0, "30": 265.0, "35": 281.0, "40": 315.0, "45": 282.0, "50": 336.0, "55": 373.0, "60": 343.0, "65": 389.0, "70": 436.0, "75": 337.0, "80": 395.0, "85": 419.0, "90": 412.0, "95": 405.0, "100": 394.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 831212544.0, "5": 831212544.0, "10": 831212544.0, "15": 831212544.0, "20": 831212544.0, "25": 831212544.0, "30": 831212544.0, "35": 831212544.0, "40": 831212544.0, "45": 831212544.0, "50": 831212544.0, "55": 831212544.0, "60": 831212544.0, "65": 831212544.0, "70": 831212544.0, "75": 831212544.0, "80": 831212544.0, "85": 831212544.0, "90": 831212544.0, "95": 831212544.0, "100": 831212544.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 891582464.0, "5": 1250786304.0, "10": 1250786304.0, "15": 1250786304.0, "20": 1250786304.0, "25": 1250786304.0, "30": 1250786304.0, "35": 1250786304.0, "40": 1251834880.0, "45": 1251834880.0, "50": 1251834880.0, "55": 1251834880.0, "60": 1251834880.0, "65": 1251834880.0, "70": 1251834880.0, "75": 1251834880.0, "80": 1251834880.0, "85": 1251834880.0, "90": 1251834880.0, "95": 1251834880.0, "100": 1251834880.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 14.7102, "5": 0.46307, "10": 0.41777, "15": 0.41661, "20": 0.41769, "25": 0.42698, "30": 0.41765, "35": 0.42804, "40": 0.42081, "45": 0.42234, "50": 0.41276, "55": 0.43287, "60": 0.43055, "65": 0.43352, "70": 0.42189, "75": 0.42153, "80": 0.41723, "85": 0.40522, "90": 0.40231, "95": 0.4016, "100": 0.40172}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml index 9e04bf4..d23dd03 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G/model_config.yaml @@ -17,8 +17,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -51,4 +51,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: flash + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..6e80790 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.82949, "5": 10.84768, "10": 10.79952, "15": 10.83278, "20": 10.75815, "25": 10.59944, "30": 10.44255, "35": 10.35518, "40": 10.17871, "45": 9.93731, "50": 9.99597, "55": 9.96506, "60": 9.59206, "65": 9.01654, "70": 9.78255, "75": 9.48023, "80": 9.4506, "85": 9.65781, "90": 9.84565, "95": 9.54832, "100": 9.43863}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 30437.0, "5": 35925.0, "10": 29186.0, "15": 34264.0, "20": 32053.0, "25": 30879.0, "30": 33163.0, "35": 34561.0, "40": 35765.0, "45": 35584.0, "50": 39786.0, "55": 37204.0, "60": 40266.0, "65": 41421.0, "70": 45637.0, "75": 40348.0, "80": 46876.0, "85": 49638.0, "90": 49468.0, "95": 47017.0, "100": 45528.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 936543232.0, "5": 936543744.0, "10": 936542720.0, "15": 936543232.0, "20": 936544768.0, "25": 936543232.0, "30": 936543232.0, "35": 936541184.0, "40": 936542720.0, "45": 936543232.0, "50": 936544256.0, "55": 936546816.0, "60": 936547328.0, "65": 936556032.0, "70": 936546816.0, "75": 936544256.0, "80": 936556544.0, "85": 936553984.0, "90": 936546304.0, "95": 936548352.0, "100": 936551936.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2451792384.0, "5": 2720901120.0, "10": 2720901120.0, "15": 2720901120.0, "20": 2720901120.0, "25": 2720901120.0, "30": 2720901120.0, "35": 2721362432.0, "40": 2721362432.0, "45": 2721362432.0, "50": 2724393984.0, "55": 2724393984.0, "60": 2728018432.0, "65": 2738826240.0, "70": 2738826240.0, "75": 2740684288.0, "80": 2740684288.0, "85": 2740684288.0, "90": 2741338624.0, "95": 2741338624.0, "100": 2741338624.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 17.42632, "5": 0.24404, "10": 0.242, "15": 0.23944, "20": 0.23931, "25": 0.23806, "30": 0.23357, "35": 0.23421, "40": 0.23628, "45": 0.23522, "50": 0.23575, "55": 0.24699, "60": 0.24808, "65": 0.25066, "70": 0.23754, "75": 0.23814, "80": 0.23925, "85": 0.23699, "90": 0.23541, "95": 0.23763, "100": 0.23866}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..b07c2c2 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.82949, "5": 10.84751, "10": 10.79994, "15": 10.83348, "20": 10.75739, "25": 10.59863, "30": 10.44207, "35": 10.35534, "40": 10.17846, "45": 9.93775, "50": 9.99583, "55": 9.96526, "60": 9.59209, "65": 9.01675, "70": 9.78268, "75": 9.4802, "80": 9.45051, "85": 9.65787, "90": 9.84587, "95": 9.54779, "100": 9.43905}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 30304.0, "5": 35542.0, "10": 29062.0, "15": 34559.0, "20": 31981.0, "25": 30845.0, "30": 32894.0, "35": 34952.0, "40": 36358.0, "45": 35638.0, "50": 40119.0, "55": 36895.0, "60": 39710.0, "65": 41463.0, "70": 45566.0, "75": 40307.0, "80": 46882.0, "85": 50049.0, "90": 49238.0, "95": 47300.0, "100": 45898.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 936567296.0, "5": 936566784.0, "10": 936566784.0, "15": 936567808.0, "20": 936568832.0, "25": 936565760.0, "30": 936568320.0, "35": 936564736.0, "40": 936566784.0, "45": 936566784.0, "50": 936567808.0, "55": 936570880.0, "60": 936570880.0, "65": 936580608.0, "70": 936571392.0, "75": 936568320.0, "80": 936580608.0, "85": 936578560.0, "90": 936569856.0, "95": 936572416.0, "100": 936576512.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 24.80877, "5": 0.2156, "10": 0.23039, "15": 0.21152, "20": 0.21327, "25": 0.2116, "30": 0.20846, "35": 0.2099, "40": 0.20891, "45": 0.20828, "50": 0.20799, "55": 0.20851, "60": 0.20961, "65": 0.21172, "70": 0.20966, "75": 0.20994, "80": 0.21009, "85": 0.20683, "90": 0.20599, "95": 0.20814, "100": 0.20924}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml index 2d2c1ce..bcfb190 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml @@ -17,8 +17,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -52,4 +52,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..de386e4 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.84764, "5": 10.86567, "10": 10.82469, "15": 10.81348, "20": 10.72058, "25": 10.53162, "30": 10.33683, "35": 10.24089, "40": 10.05113, "45": 9.76815, "50": 9.85503, "55": 9.82458, "60": 9.44286, "65": 8.89124, "70": 9.67905, "75": 9.36822, "80": 9.35789, "85": 9.56054, "90": 9.77055, "95": 9.48111, "100": 9.34966}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1736.0, "5": 1989.0, "10": 1643.0, "15": 1984.0, "20": 1713.0, "25": 1775.0, "30": 2005.0, "35": 2093.0, "40": 2238.0, "45": 2229.0, "50": 2348.0, "55": 2407.0, "60": 2545.0, "65": 2732.0, "70": 3041.0, "75": 2930.0, "80": 3261.0, "85": 3370.0, "90": 3188.0, "95": 3193.0, "100": 3397.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 894390272.0, "5": 894390272.0, "10": 894390272.0, "15": 894390272.0, "20": 894390272.0, "25": 894390272.0, "30": 894390272.0, "35": 894390272.0, "40": 894390272.0, "45": 894390272.0, "50": 894390272.0, "55": 894390272.0, "60": 894390272.0, "65": 894390272.0, "70": 894390272.0, "75": 894390272.0, "80": 894390272.0, "85": 894390272.0, "90": 894390272.0, "95": 894390272.0, "100": 894390272.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2233004032.0, "5": 2597712896.0, "10": 2597712896.0, "15": 2597712896.0, "20": 2597712896.0, "25": 2597712896.0, "30": 2597712896.0, "35": 2597712896.0, "40": 2597712896.0, "45": 2597712896.0, "50": 2597712896.0, "55": 2597712896.0, "60": 2597712896.0, "65": 2597712896.0, "70": 2597712896.0, "75": 2597712896.0, "80": 2597712896.0, "85": 2597712896.0, "90": 2597712896.0, "95": 2597712896.0, "100": 2597712896.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 9.04286, "5": 0.12525, "10": 0.12905, "15": 0.12687, "20": 0.12848, "25": 0.12854, "30": 0.12621, "35": 0.1283, "40": 0.12782, "45": 0.12535, "50": 0.12584, "55": 0.12504, "60": 0.1249, "65": 0.36941, "70": 0.12553, "75": 0.12455, "80": 0.12658, "85": 0.12479, "90": 0.12521, "95": 0.12546, "100": 0.1255}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..43d09ed --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.8468, "5": 10.8657, "10": 10.82411, "15": 10.8128, "20": 10.72008, "25": 10.53151, "30": 10.33655, "35": 10.24133, "40": 10.05096, "45": 9.76804, "50": 9.85531, "55": 9.82458, "60": 9.4433, "65": 8.89103, "70": 9.67922, "75": 9.36864, "80": 9.35829, "85": 9.56053, "90": 9.77063, "95": 9.48104, "100": 9.34984}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1707.0, "5": 2121.0, "10": 1606.0, "15": 1959.0, "20": 1756.0, "25": 1848.0, "30": 2091.0, "35": 2089.0, "40": 2156.0, "45": 2137.0, "50": 2317.0, "55": 2485.0, "60": 2487.0, "65": 2748.0, "70": 3067.0, "75": 2801.0, "80": 3131.0, "85": 3343.0, "90": 3084.0, "95": 3062.0, "100": 3270.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 888098304.0, "5": 888098304.0, "10": 888098304.0, "15": 888098304.0, "20": 888098304.0, "25": 888098304.0, "30": 888098304.0, "35": 888098304.0, "40": 888098304.0, "45": 888098304.0, "50": 888098304.0, "55": 888098304.0, "60": 888098304.0, "65": 888098304.0, "70": 888098304.0, "75": 888098304.0, "80": 888098304.0, "85": 888098304.0, "90": 888098304.0, "95": 888098304.0, "100": 888098304.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3212632576.0, "5": 3572098560.0, "10": 3572098560.0, "15": 3572098560.0, "20": 3572098560.0, "25": 3572098560.0, "30": 3572098560.0, "35": 3572098560.0, "40": 3572098560.0, "45": 3572098560.0, "50": 3572098560.0, "55": 3572098560.0, "60": 3572098560.0, "65": 3572098560.0, "70": 3572098560.0, "75": 3572098560.0, "80": 3572098560.0, "85": 3572098560.0, "90": 3572098560.0, "95": 3572098560.0, "100": 3572098560.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.16354, "5": 0.14461, "10": 0.14503, "15": 0.14287, "20": 0.14648, "25": 0.14267, "30": 0.14304, "35": 0.14471, "40": 0.14334, "45": 0.14299, "50": 0.14181, "55": 0.14263, "60": 0.14235, "65": 0.14203, "70": 0.14227, "75": 0.14188, "80": 0.14258, "85": 0.14302, "90": 0.14176, "95": 0.14354, "100": 0.14267}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/model_config.yaml index 7689c48..b6608bc 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..2506309 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.84554, "5": 10.86415, "10": 10.82215, "15": 10.81274, "20": 10.71915, "25": 10.53056, "30": 10.33604, "35": 10.24047, "40": 10.05025, "45": 9.76775, "50": 9.85479, "55": 9.82458, "60": 9.44264, "65": 8.89112, "70": 9.6789, "75": 9.36801, "80": 9.3576, "85": 9.56029, "90": 9.77049, "95": 9.48101, "100": 9.34984}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1700.0, "5": 2064.0, "10": 1561.0, "15": 1975.0, "20": 1696.0, "25": 1796.0, "30": 2014.0, "35": 2041.0, "40": 2189.0, "45": 2150.0, "50": 2403.0, "55": 2453.0, "60": 2540.0, "65": 2707.0, "70": 3080.0, "75": 2725.0, "80": 3156.0, "85": 3362.0, "90": 3032.0, "95": 3108.0, "100": 3352.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 890195968.0, "5": 890195968.0, "10": 890195968.0, "15": 890195968.0, "20": 890195968.0, "25": 890195968.0, "30": 890195968.0, "35": 890195968.0, "40": 890195968.0, "45": 890195968.0, "50": 890195968.0, "55": 890195968.0, "60": 890195968.0, "65": 890195968.0, "70": 890195968.0, "75": 890195968.0, "80": 890195968.0, "85": 890195968.0, "90": 890195968.0, "95": 890195968.0, "100": 890195968.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2236149760.0, "5": 2596664320.0, "10": 2596664320.0, "15": 2596664320.0, "20": 2596664320.0, "25": 2596664320.0, "30": 2596664320.0, "35": 2596664320.0, "40": 2596664320.0, "45": 2596664320.0, "50": 2596664320.0, "55": 2596664320.0, "60": 2596664320.0, "65": 2596664320.0, "70": 2596664320.0, "75": 2596664320.0, "80": 2596664320.0, "85": 2596664320.0, "90": 2596664320.0, "95": 2596664320.0, "100": 2596664320.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 8.95666, "5": 0.15388, "10": 0.15258, "15": 0.15019, "20": 0.14968, "25": 0.14923, "30": 0.14924, "35": 0.14855, "40": 0.14992, "45": 0.14894, "50": 0.14897, "55": 0.15057, "60": 0.14854, "65": 0.14894, "70": 0.15078, "75": 0.14842, "80": 0.1482, "85": 0.14764, "90": 0.14679, "95": 0.14761, "100": 0.1488}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..713f0a3 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.84474, "5": 10.86418, "10": 10.82155, "15": 10.81195, "20": 10.71872, "25": 10.53036, "30": 10.3358, "35": 10.24082, "40": 10.05008, "45": 9.76762, "50": 9.85505, "55": 9.82465, "60": 9.44305, "65": 8.89104, "70": 9.67902, "75": 9.36836, "80": 9.35799, "85": 9.56032, "90": 9.77055, "95": 9.48101, "100": 9.34997}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1776.0, "5": 2128.0, "10": 1615.0, "15": 2021.0, "20": 1775.0, "25": 1916.0, "30": 2029.0, "35": 2107.0, "40": 2174.0, "45": 2110.0, "50": 2363.0, "55": 2460.0, "60": 2462.0, "65": 2724.0, "70": 2952.0, "75": 2823.0, "80": 3222.0, "85": 3314.0, "90": 3087.0, "95": 3146.0, "100": 3331.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 888098304.0, "5": 888098304.0, "10": 888098304.0, "15": 888098304.0, "20": 888098304.0, "25": 888098304.0, "30": 888098304.0, "35": 888098304.0, "40": 888098304.0, "45": 888098304.0, "50": 888098304.0, "55": 888098304.0, "60": 888098304.0, "65": 888098304.0, "70": 888098304.0, "75": 888098304.0, "80": 888098304.0, "85": 888098304.0, "90": 888098304.0, "95": 888098304.0, "100": 888098304.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3215778304.0, "5": 3575244288.0, "10": 3575244288.0, "15": 3575244288.0, "20": 3575244288.0, "25": 3575244288.0, "30": 3575244288.0, "35": 3575244288.0, "40": 3575244288.0, "45": 3575244288.0, "50": 3575244288.0, "55": 3575244288.0, "60": 3575244288.0, "65": 3575244288.0, "70": 3575244288.0, "75": 3575244288.0, "80": 3575244288.0, "85": 3575244288.0, "90": 3575244288.0, "95": 3575244288.0, "100": 3575244288.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.33569, "5": 0.16136, "10": 0.15782, "15": 0.15802, "20": 0.15824, "25": 0.16808, "30": 0.16851, "35": 0.1675, "40": 0.16865, "45": 0.16815, "50": 0.16766, "55": 0.1655, "60": 0.16617, "65": 0.16519, "70": 0.16575, "75": 0.16497, "80": 0.16524, "85": 0.16595, "90": 0.16421, "95": 0.16539, "100": 0.16546}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml index 40f4368..ea6eab0 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,4 +50,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values_dev.json index 3020fb5..b1b9cbc 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.8468, - 10.87769, - 10.90302, - 10.82026, - 10.67979, - 10.60157, - 10.06449, - 10.19316, - 10.11411, - 9.76007 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1692.0, - 2044.0, - 2005.0, - 2007.0, - 1945.0, - 1868.0, - 1701.0, - 2085.0, - 2389.0, - 2377.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.20538, - 0.14353, - 0.14213, - 0.14213, - 0.14068, - 0.14104, - 0.14078, - 0.14149, - 0.14065, - 0.14118 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.8468, "5": 10.86571, "10": 10.82412, "15": 10.8128, "20": 10.7201, "25": 10.53149, "30": 10.33653, "35": 10.24134, "40": 10.05092, "45": 9.76805, "50": 9.85531}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1692.0, "5": 2135.0, "10": 1681.0, "15": 2053.0, "20": 1708.0, "25": 1835.0, "30": 2038.0, "35": 2087.0, "40": 2276.0, "45": 2125.0, "50": 2363.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 888098304.0, "5": 888098304.0, "10": 888098304.0, "15": 888098304.0, "20": 888098304.0, "25": 888098304.0, "30": 888098304.0, "35": 888098304.0, "40": 888098304.0, "45": 888098304.0, "50": 888098304.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3212632576.0, "5": 3572098560.0, "10": 3572098560.0, "15": 3572098560.0, "20": 3572098560.0, "25": 3572098560.0, "30": 3572098560.0, "35": 3572098560.0, "40": 3572098560.0, "45": 3572098560.0, "50": 3572098560.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.88958, "5": 0.14651, "10": 0.14518, "15": 0.14433, "20": 0.14484, "25": 0.14428, "30": 0.14459, "35": 0.1448, "40": 0.14541, "45": 0.14409, "50": 0.14459}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values_lts.json index 2778958..0eb1873 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.8468, 10.87772, 10.90302, 10.82024, 10.67979, 10.60157, 10.06448, 10.19311, 10.1141, 9.76008]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1707.0, 2086.0, 2030.0, 2000.0, 1910.0, 1894.0, 1744.0, 2071.0, 2344.0, 2377.0]}, "iteration_timing_avg": 0.11051617647058823} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.8468, "5": 10.8657, "10": 10.82411, "15": 10.8128, "20": 10.72008, "25": 10.53151, "30": 10.33655, "35": 10.24133, "40": 10.05096, "45": 9.76804, "50": 9.85531}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1707.0, "5": 2121.0, "10": 1606.0, "15": 1959.0, "20": 1756.0, "25": 1848.0, "30": 2091.0, "35": 2089.0, "40": 2156.0, "45": 2137.0, "50": 2317.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 888098304.0, "5": 888098304.0, "10": 888098304.0, "15": 888098304.0, "20": 888098304.0, "25": 888098304.0, "30": 888098304.0, "35": 888098304.0, "40": 888098304.0, "45": 888098304.0, "50": 888098304.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3212632576.0, "5": 3572098560.0, "10": 3572098560.0, "15": 3572098560.0, "20": 3572098560.0, "25": 3572098560.0, "30": 3572098560.0, "35": 3572098560.0, "40": 3572098560.0, "45": 3572098560.0, "50": 3572098560.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.69368, "5": 0.1479, "10": 0.14574, "15": 0.14499, "20": 0.14659, "25": 0.14524, "30": 0.14507, "35": 0.14609, "40": 0.1467, "45": 0.14341, "50": 0.14274}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/model_config.yaml index 922b5eb..4a02f01 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_dev.json index 50486e0..8e109a9 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.84474, - 10.87688, - 10.90253, - 10.81872, - 10.67849, - 10.60076, - 10.06361, - 10.19267, - 10.11344, - 9.75987 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1769.0, - 2129.0, - 1987.0, - 1961.0, - 1961.0, - 1886.0, - 1655.0, - 2130.0, - 2315.0, - 2362.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 8.72642, - 0.16194, - 0.15926, - 0.15956, - 0.15972, - 0.1623, - 0.16029, - 0.15863, - 0.15947, - 0.15935 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.84474, "5": 10.8642, "10": 10.82152, "15": 10.81201, "20": 10.71869, "25": 10.53034, "30": 10.33576, "35": 10.24082, "40": 10.05009, "45": 9.76761, "50": 9.85505}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1769.0, "5": 2061.0, "10": 1636.0, "15": 2011.0, "20": 1779.0, "25": 1875.0, "30": 2074.0, "35": 2069.0, "40": 2190.0, "45": 2153.0, "50": 2508.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 888098304.0, "5": 888098304.0, "10": 888098304.0, "15": 888098304.0, "20": 888098304.0, "25": 888098304.0, "30": 888098304.0, "35": 888098304.0, "40": 888098304.0, "45": 888098304.0, "50": 888098304.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3215778304.0, "5": 3575244288.0, "10": 3575244288.0, "15": 3575244288.0, "20": 3575244288.0, "25": 3575244288.0, "30": 3575244288.0, "35": 3575244288.0, "40": 3575244288.0, "45": 3575244288.0, "50": 3575244288.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 8.69754, "5": 0.16083, "10": 0.16079, "15": 0.16126, "20": 0.16129, "25": 0.16055, "30": 0.1609, "35": 0.16119, "40": 0.16222, "45": 0.16081, "50": 0.15983}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_lts.json index 33a65cc..688680f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.84474, 10.87687, 10.90254, 10.81872, 10.67848, 10.60075, 10.06363, 10.19268, 10.11342, 9.75986]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1776.0, 2161.0, 2052.0, 1892.0, 1971.0, 1946.0, 1701.0, 1985.0, 2295.0, 2293.0]}, "iteration_timing_avg": 0.11052176470588236} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.84474, "5": 10.86418, "10": 10.82155, "15": 10.81195, "20": 10.71872, "25": 10.53036, "30": 10.3358, "35": 10.24082, "40": 10.05008, "45": 9.76762, "50": 9.85505}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1776.0, "5": 2128.0, "10": 1615.0, "15": 2021.0, "20": 1775.0, "25": 1916.0, "30": 2029.0, "35": 2107.0, "40": 2174.0, "45": 2110.0, "50": 2363.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 888098304.0, "5": 888098304.0, "10": 888098304.0, "15": 888098304.0, "20": 888098304.0, "25": 888098304.0, "30": 888098304.0, "35": 888098304.0, "40": 888098304.0, "45": 888098304.0, "50": 888098304.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3215778304.0, "5": 3575244288.0, "10": 3575244288.0, "15": 3575244288.0, "20": 3575244288.0, "25": 3575244288.0, "30": 3575244288.0, "35": 3575244288.0, "40": 3575244288.0, "45": 3575244288.0, "50": 3575244288.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.1728, "5": 0.15783, "10": 0.15696, "15": 0.15564, "20": 0.15887, "25": 0.15731, "30": 0.15635, "35": 0.1571, "40": 0.15637, "45": 0.15705, "50": 0.15413}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml index 4220658..aa3798e 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,4 +50,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json index cd1e766..e1b4c50 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.79205, - 10.86789, - 10.89149, - 10.78328, - 10.66126, - 10.58275, - 10.08467, - 10.19448, - 10.13785, - 9.81454 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1580.0, - 1778.0, - 1849.0, - 1841.0, - 1884.0, - 1679.0, - 1544.0, - 1953.0, - 2449.0, - 2335.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.79458, - 0.16744, - 0.16286, - 0.16276, - 0.16292, - 0.16346, - 0.16288, - 0.16273, - 0.16282, - 0.16245 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.79205, "5": 10.84695, "10": 10.77106, "15": 10.79093, "20": 10.68042, "25": 10.50715, "30": 10.33325, "35": 10.25545, "40": 10.05544, "45": 9.80575, "50": 9.89082}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1580.0, "5": 1901.0, "10": 1346.0, "15": 1926.0, "20": 1643.0, "25": 1683.0, "30": 1867.0, "35": 2020.0, "40": 2252.0, "45": 2243.0, "50": 2459.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 716833792.0, "5": 716833792.0, "10": 716833792.0, "15": 716833792.0, "20": 716833792.0, "25": 716833792.0, "30": 716833792.0, "35": 716833792.0, "40": 716833792.0, "45": 716833792.0, "50": 716833792.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2399714304.0, "5": 2683412480.0, "10": 2683412480.0, "15": 2683412480.0, "20": 2683412480.0, "25": 2683412480.0, "30": 2683412480.0, "35": 2683412480.0, "40": 2683412480.0, "45": 2683412480.0, "50": 2683412480.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 8.70564, "5": 0.16109, "10": 0.15745, "15": 0.15861, "20": 0.15886, "25": 0.15817, "30": 0.15999, "35": 0.16113, "40": 0.15887, "45": 0.16006, "50": 0.1597}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json index cdabc8e..9eee80a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.79205, 10.86789, 10.89149, 10.78328, 10.66126, 10.58275, 10.08467, 10.19448, 10.13785, 9.81454]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1580.0, 1778.0, 1849.0, 1841.0, 1884.0, 1679.0, 1544.0, 1953.0, 2449.0, 2335.0]}, "iteration_timing_avg": 0.12243558823529416} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.79205, "5": 10.84695, "10": 10.77106, "15": 10.79093, "20": 10.68042, "25": 10.50715, "30": 10.33325, "35": 10.25545, "40": 10.05544, "45": 9.80575, "50": 9.89082}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1580.0, "5": 1901.0, "10": 1346.0, "15": 1926.0, "20": 1643.0, "25": 1683.0, "30": 1867.0, "35": 2020.0, "40": 2252.0, "45": 2243.0, "50": 2459.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 714736640.0, "5": 714736640.0, "10": 714736640.0, "15": 714736640.0, "20": 714736640.0, "25": 714736640.0, "30": 714736640.0, "35": 714736640.0, "40": 714736640.0, "45": 714736640.0, "50": 714736640.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2399714304.0, "5": 2681315328.0, "10": 2681315328.0, "15": 2681315328.0, "20": 2681315328.0, "25": 2681315328.0, "30": 2681315328.0, "35": 2681315328.0, "40": 2681315328.0, "45": 2681315328.0, "50": 2681315328.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 13.48918, "5": 0.16426, "10": 0.16419, "15": 0.15777, "20": 0.15716, "25": 0.15773, "30": 0.15842, "35": 0.15959, "40": 0.15581, "45": 0.15603, "50": 0.15595}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/model_config.yaml index dcf2920..9cc8edc 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_persistent_ckpt_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_persistent_ckpt_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..e1b4c50 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_persistent_ckpt_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.79205, "5": 10.84695, "10": 10.77106, "15": 10.79093, "20": 10.68042, "25": 10.50715, "30": 10.33325, "35": 10.25545, "40": 10.05544, "45": 9.80575, "50": 9.89082}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1580.0, "5": 1901.0, "10": 1346.0, "15": 1926.0, "20": 1643.0, "25": 1683.0, "30": 1867.0, "35": 2020.0, "40": 2252.0, "45": 2243.0, "50": 2459.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 716833792.0, "5": 716833792.0, "10": 716833792.0, "15": 716833792.0, "20": 716833792.0, "25": 716833792.0, "30": 716833792.0, "35": 716833792.0, "40": 716833792.0, "45": 716833792.0, "50": 716833792.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2399714304.0, "5": 2683412480.0, "10": 2683412480.0, "15": 2683412480.0, "20": 2683412480.0, "25": 2683412480.0, "30": 2683412480.0, "35": 2683412480.0, "40": 2683412480.0, "45": 2683412480.0, "50": 2683412480.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 8.70564, "5": 0.16109, "10": 0.15745, "15": 0.15861, "20": 0.15886, "25": 0.15817, "30": 0.15999, "35": 0.16113, "40": 0.15887, "45": 0.16006, "50": 0.1597}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_persistent_ckpt_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_persistent_ckpt_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..334b7cb --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_persistent_ckpt_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.79205, "5": 10.84695, "10": 10.77106, "15": 10.79093, "20": 10.68042, "25": 10.50715, "30": 10.33325, "35": 10.25545, "40": 10.05544, "45": 9.80575, "50": 9.89082}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1580.0, "5": 1901.0, "10": 1346.0, "15": 1926.0, "20": 1643.0, "25": 1683.0, "30": 1867.0, "35": 2020.0, "40": 2252.0, "45": 2243.0, "50": 2459.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 714736640.0, "5": 714736640.0, "10": 714736640.0, "15": 714736640.0, "20": 714736640.0, "25": 714736640.0, "30": 714736640.0, "35": 714736640.0, "40": 714736640.0, "45": 714736640.0, "50": 714736640.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2399714304.0, "5": 2681315328.0, "10": 2681315328.0, "15": 2681315328.0, "20": 2681315328.0, "25": 2681315328.0, "30": 2681315328.0, "35": 2681315328.0, "40": 2681315328.0, "45": 2681315328.0, "50": 2681315328.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 12.48837, "5": 0.15716, "10": 0.1577, "15": 0.1575, "20": 0.15694, "25": 0.15689, "30": 0.16393, "35": 0.15702, "40": 0.15586, "45": 0.1552, "50": 0.15598}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_persistent_ckpt_disable_bias_linear_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_persistent_ckpt_disable_bias_linear_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 0000000..730f5b4 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_persistent_ckpt_disable_bias_linear_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,54 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 1 + --pipeline-model-parallel-size: 4 + --disable-bias-linear: true + --async-save: true + --use-persistent-ckpt-worker: true + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --attention-backend: unfused + --log-memory-to-tensorboard: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..96e2364 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.79196, "5": 10.84767, "10": 10.76997, "15": 10.79032, "20": 10.68032, "25": 10.5078, "30": 10.3335, "35": 10.25557, "40": 10.05566, "45": 9.80602, "50": 9.89125, "55": 9.87089, "60": 9.4846, "65": 8.94044, "70": 9.7223, "75": 9.40865, "80": 9.39753, "85": 9.60719, "90": 9.81041, "95": 9.51159, "100": 9.39705}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1605.0, "5": 1978.0, "10": 1442.0, "15": 1952.0, "20": 1667.0, "25": 1734.0, "30": 1952.0, "35": 2043.0, "40": 2231.0, "45": 2197.0, "50": 2405.0, "55": 2212.0, "60": 2367.0, "65": 2639.0, "70": 3196.0, "75": 2592.0, "80": 3222.0, "85": 3406.0, "90": 3002.0, "95": 3368.0, "100": 3152.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 716834304.0, "5": 716834304.0, "10": 716834304.0, "15": 716834304.0, "20": 716834304.0, "25": 716834304.0, "30": 716834304.0, "35": 716834304.0, "40": 716834304.0, "45": 716834304.0, "50": 716834304.0, "55": 716834304.0, "60": 716834304.0, "65": 716834304.0, "70": 716834304.0, "75": 716834304.0, "80": 716834304.0, "85": 716834304.0, "90": 716834304.0, "95": 716834304.0, "100": 716834304.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1910424576.0, "5": 2193074176.0, "10": 2193074176.0, "15": 2193074176.0, "20": 2193074176.0, "25": 2193074176.0, "30": 2193074176.0, "35": 2193074176.0, "40": 2193074176.0, "45": 2193074176.0, "50": 2193074176.0, "55": 2193074176.0, "60": 2193074176.0, "65": 2193074176.0, "70": 2193074176.0, "75": 2193074176.0, "80": 2193074176.0, "85": 2193074176.0, "90": 2193074176.0, "95": 2193074176.0, "100": 2193074176.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 8.46967, "5": 0.1483, "10": 0.14544, "15": 0.14588, "20": 0.14639, "25": 0.14549, "30": 0.14597, "35": 0.14608, "40": 0.14578, "45": 0.14542, "50": 0.14492, "55": 0.14474, "60": 0.14635, "65": 0.14621, "70": 0.14453, "75": 0.14374, "80": 0.14465, "85": 0.14456, "90": 0.14413, "95": 0.14445, "100": 0.14399}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..4e4a644 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.79205, "5": 10.84695, "10": 10.77106, "15": 10.79093, "20": 10.68042, "25": 10.50715, "30": 10.33325, "35": 10.25545, "40": 10.05544, "45": 9.80575, "50": 9.89082, "55": 9.87063, "60": 9.48478, "65": 8.94022, "70": 9.72243, "75": 9.40907, "80": 9.3976, "85": 9.60746, "90": 9.81041, "95": 9.5116, "100": 9.39722}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1580.0, "5": 1901.0, "10": 1346.0, "15": 1926.0, "20": 1643.0, "25": 1683.0, "30": 1867.0, "35": 2020.0, "40": 2252.0, "45": 2243.0, "50": 2459.0, "55": 2291.0, "60": 2404.0, "65": 2474.0, "70": 3102.0, "75": 2603.0, "80": 3420.0, "85": 3388.0, "90": 2904.0, "95": 3333.0, "100": 3347.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 714736640.0, "5": 714736640.0, "10": 714736640.0, "15": 714736640.0, "20": 714736640.0, "25": 714736640.0, "30": 714736640.0, "35": 714736640.0, "40": 714736640.0, "45": 714736640.0, "50": 714736640.0, "55": 714736640.0, "60": 714736640.0, "65": 714736640.0, "70": 714736640.0, "75": 714736640.0, "80": 714736640.0, "85": 714736640.0, "90": 714736640.0, "95": 714736640.0, "100": 714736640.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2399714304.0, "5": 2681315328.0, "10": 2681315328.0, "15": 2681315328.0, "20": 2681315328.0, "25": 2681315328.0, "30": 2681315328.0, "35": 2681315328.0, "40": 2681315328.0, "45": 2681315328.0, "50": 2681315328.0, "55": 2681315328.0, "60": 2681315328.0, "65": 2681315328.0, "70": 2681315328.0, "75": 2681315328.0, "80": 2681315328.0, "85": 2681315328.0, "90": 2681315328.0, "95": 2681315328.0, "100": 2681315328.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 9.27792, "5": 0.15646, "10": 0.15784, "15": 0.15721, "20": 0.15673, "25": 0.15668, "30": 0.15634, "35": 0.1575, "40": 0.1572, "45": 0.15552, "50": 0.15469, "55": 0.16595, "60": 0.16703, "65": 0.16692, "70": 0.15969, "75": 0.15799, "80": 0.15892, "85": 0.15874, "90": 0.159, "95": 0.16041, "100": 0.15753}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/model_config.yaml index 440638b..eacb176 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_persistent_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_persistent_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..96e2364 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_persistent_disable_bias_linear_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.79196, "5": 10.84767, "10": 10.76997, "15": 10.79032, "20": 10.68032, "25": 10.5078, "30": 10.3335, "35": 10.25557, "40": 10.05566, "45": 9.80602, "50": 9.89125, "55": 9.87089, "60": 9.4846, "65": 8.94044, "70": 9.7223, "75": 9.40865, "80": 9.39753, "85": 9.60719, "90": 9.81041, "95": 9.51159, "100": 9.39705}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1605.0, "5": 1978.0, "10": 1442.0, "15": 1952.0, "20": 1667.0, "25": 1734.0, "30": 1952.0, "35": 2043.0, "40": 2231.0, "45": 2197.0, "50": 2405.0, "55": 2212.0, "60": 2367.0, "65": 2639.0, "70": 3196.0, "75": 2592.0, "80": 3222.0, "85": 3406.0, "90": 3002.0, "95": 3368.0, "100": 3152.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 716834304.0, "5": 716834304.0, "10": 716834304.0, "15": 716834304.0, "20": 716834304.0, "25": 716834304.0, "30": 716834304.0, "35": 716834304.0, "40": 716834304.0, "45": 716834304.0, "50": 716834304.0, "55": 716834304.0, "60": 716834304.0, "65": 716834304.0, "70": 716834304.0, "75": 716834304.0, "80": 716834304.0, "85": 716834304.0, "90": 716834304.0, "95": 716834304.0, "100": 716834304.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1910424576.0, "5": 2193074176.0, "10": 2193074176.0, "15": 2193074176.0, "20": 2193074176.0, "25": 2193074176.0, "30": 2193074176.0, "35": 2193074176.0, "40": 2193074176.0, "45": 2193074176.0, "50": 2193074176.0, "55": 2193074176.0, "60": 2193074176.0, "65": 2193074176.0, "70": 2193074176.0, "75": 2193074176.0, "80": 2193074176.0, "85": 2193074176.0, "90": 2193074176.0, "95": 2193074176.0, "100": 2193074176.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 8.46967, "5": 0.1483, "10": 0.14544, "15": 0.14588, "20": 0.14639, "25": 0.14549, "30": 0.14597, "35": 0.14608, "40": 0.14578, "45": 0.14542, "50": 0.14492, "55": 0.14474, "60": 0.14635, "65": 0.14621, "70": 0.14453, "75": 0.14374, "80": 0.14465, "85": 0.14456, "90": 0.14413, "95": 0.14445, "100": 0.14399}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_persistent_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_persistent_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..981f289 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_persistent_disable_bias_linear_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.79205, "5": 10.84695, "10": 10.77106, "15": 10.79093, "20": 10.68042, "25": 10.50715, "30": 10.33325, "35": 10.25545, "40": 10.05544, "45": 9.80575, "50": 9.89082, "55": 9.87063, "60": 9.48478, "65": 8.94022, "70": 9.72243, "75": 9.40907, "80": 9.3976, "85": 9.60746, "90": 9.81041, "95": 9.5116, "100": 9.39722}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1580.0, "5": 1901.0, "10": 1346.0, "15": 1926.0, "20": 1643.0, "25": 1683.0, "30": 1867.0, "35": 2020.0, "40": 2252.0, "45": 2243.0, "50": 2459.0, "55": 2291.0, "60": 2404.0, "65": 2474.0, "70": 3102.0, "75": 2603.0, "80": 3420.0, "85": 3388.0, "90": 2904.0, "95": 3333.0, "100": 3347.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 714736640.0, "5": 714736640.0, "10": 714736640.0, "15": 714736640.0, "20": 714736640.0, "25": 714736640.0, "30": 714736640.0, "35": 714736640.0, "40": 714736640.0, "45": 714736640.0, "50": 714736640.0, "55": 714736640.0, "60": 714736640.0, "65": 714736640.0, "70": 714736640.0, "75": 714736640.0, "80": 714736640.0, "85": 714736640.0, "90": 714736640.0, "95": 714736640.0, "100": 714736640.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2399714304.0, "5": 2681315328.0, "10": 2681315328.0, "15": 2681315328.0, "20": 2681315328.0, "25": 2681315328.0, "30": 2681315328.0, "35": 2681315328.0, "40": 2681315328.0, "45": 2681315328.0, "50": 2681315328.0, "55": 2681315328.0, "60": 2681315328.0, "65": 2681315328.0, "70": 2681315328.0, "75": 2681315328.0, "80": 2681315328.0, "85": 2681315328.0, "90": 2681315328.0, "95": 2681315328.0, "100": 2681315328.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.50484, "5": 0.15616, "10": 0.15661, "15": 0.15542, "20": 0.15597, "25": 0.15623, "30": 0.15732, "35": 0.15649, "40": 0.15774, "45": 0.15673, "50": 0.15646, "55": 0.1599, "60": 0.16087, "65": 0.16049, "70": 0.15987, "75": 0.15957, "80": 0.16064, "85": 0.16045, "90": 0.15984, "95": 0.15992, "100": 0.15958}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_persistent_disable_bias_linear_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_persistent_disable_bias_linear_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 0000000..c1d7558 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_persistent_disable_bias_linear_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,54 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 100 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 50 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 1 + --pipeline-model-parallel-size: 4 + --disable-bias-linear: true + --async-save: true + --use-persistent-ckpt-worker: true + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-checkpoint-opt_param-scheduler: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --log-memory-to-tensorboard: true +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..5218e9f --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.79196, "5": 10.84662, "10": 10.76844, "15": 10.78913, "20": 10.67859, "25": 10.50479, "30": 10.33089, "35": 10.25263, "40": 10.05242, "45": 9.80271, "50": 9.8884, "55": 9.86828, "60": 9.48223, "65": 8.93813, "70": 9.72081, "75": 9.40746, "80": 9.39636, "85": 9.60619, "90": 9.80953, "95": 9.51078, "100": 9.39612}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1613.0, "5": 1926.0, "10": 1432.0, "15": 1941.0, "20": 1592.0, "25": 1650.0, "30": 1891.0, "35": 1963.0, "40": 2255.0, "45": 2132.0, "50": 2411.0, "55": 2240.0, "60": 2443.0, "65": 2672.0, "70": 3168.0, "75": 2545.0, "80": 3353.0, "85": 3257.0, "90": 3171.0, "95": 3247.0, "100": 3375.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 717083136.0, "5": 717083136.0, "10": 717083136.0, "15": 717083136.0, "20": 717083136.0, "25": 717083136.0, "30": 717083136.0, "35": 717083136.0, "40": 717083136.0, "45": 717083136.0, "50": 717083136.0, "55": 717083136.0, "60": 717083136.0, "65": 717083136.0, "70": 717083136.0, "75": 717083136.0, "80": 717083136.0, "85": 717083136.0, "90": 717083136.0, "95": 717083136.0, "100": 717083136.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1910562816.0, "5": 2193323008.0, "10": 2193323008.0, "15": 2193323008.0, "20": 2193323008.0, "25": 2193323008.0, "30": 2193323008.0, "35": 2193323008.0, "40": 2193323008.0, "45": 2193323008.0, "50": 2193323008.0, "55": 2193323008.0, "60": 2193323008.0, "65": 2193323008.0, "70": 2193323008.0, "75": 2193323008.0, "80": 2193323008.0, "85": 2193323008.0, "90": 2193323008.0, "95": 2193323008.0, "100": 2193323008.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.72057, "5": 0.15784, "10": 0.15852, "15": 0.1575, "20": 0.15713, "25": 0.15769, "30": 0.15681, "35": 0.15447, "40": 0.15299, "45": 0.15347, "50": 0.15277, "55": 0.15216, "60": 0.15166, "65": 0.1519, "70": 0.15205, "75": 0.15222, "80": 0.15253, "85": 0.15199, "90": 0.15133, "95": 0.15154, "100": 0.15192}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..c83dddb --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.79208, "5": 10.8459, "10": 10.76945, "15": 10.78965, "20": 10.67868, "25": 10.50409, "30": 10.33064, "35": 10.25257, "40": 10.0522, "45": 9.80243, "50": 9.88792, "55": 9.86799, "60": 9.48248, "65": 8.93796, "70": 9.72094, "75": 9.40786, "80": 9.39646, "85": 9.60638, "90": 9.8096, "95": 9.51078, "100": 9.39625}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1633.0, "5": 1952.0, "10": 1432.0, "15": 1852.0, "20": 1592.0, "25": 1743.0, "30": 1953.0, "35": 1986.0, "40": 2180.0, "45": 2177.0, "50": 2468.0, "55": 2268.0, "60": 2427.0, "65": 2640.0, "70": 3158.0, "75": 2618.0, "80": 3274.0, "85": 3266.0, "90": 3078.0, "95": 3342.0, "100": 3345.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 714985472.0, "5": 714985472.0, "10": 714985472.0, "15": 714985472.0, "20": 714985472.0, "25": 714985472.0, "30": 714985472.0, "35": 714985472.0, "40": 714985472.0, "45": 714985472.0, "50": 714985472.0, "55": 714985472.0, "60": 714985472.0, "65": 714985472.0, "70": 714985472.0, "75": 714985472.0, "80": 714985472.0, "85": 714985472.0, "90": 714985472.0, "95": 714985472.0, "100": 714985472.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2399852544.0, "5": 2681564160.0, "10": 2681564160.0, "15": 2681564160.0, "20": 2681564160.0, "25": 2681564160.0, "30": 2681564160.0, "35": 2681564160.0, "40": 2681564160.0, "45": 2681564160.0, "50": 2681564160.0, "55": 2681564160.0, "60": 2681564160.0, "65": 2681564160.0, "70": 2681564160.0, "75": 2681564160.0, "80": 2681564160.0, "85": 2681564160.0, "90": 2681564160.0, "95": 2681564160.0, "100": 2681564160.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 15.13387, "5": 0.16765, "10": 0.16782, "15": 0.16572, "20": 0.16589, "25": 0.16624, "30": 0.16596, "35": 0.16694, "40": 0.16658, "45": 0.1656, "50": 0.16593, "55": 0.16847, "60": 0.16671, "65": 0.16618, "70": 0.16477, "75": 0.1663, "80": 0.16601, "85": 0.16704, "90": 0.16563, "95": 0.16515, "100": 0.16582}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/model_config.yaml index 059716a..7c07a50 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..6fddea9 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.74036, "5": 10.79133, "10": 10.71217, "15": 10.75916, "20": 10.68909, "25": 10.5421, "30": 10.45456, "35": 10.38155, "40": 10.24241, "45": 9.9827, "50": 10.06896, "55": 9.98885, "60": 9.66601, "65": 9.07115, "70": 9.81824, "75": 9.55308, "80": 9.51136, "85": 9.70682, "90": 9.87981, "95": 9.60074, "100": 9.49208}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2496.0, "5": 2768.0, "10": 2420.0, "15": 2572.0, "20": 2580.0, "25": 2521.0, "30": 2632.0, "35": 2626.0, "40": 2628.0, "45": 2362.0, "50": 2543.0, "55": 2498.0, "60": 2239.0, "65": 2652.0, "70": 3100.0, "75": 2597.0, "80": 3019.0, "85": 3171.0, "90": 3464.0, "95": 3134.0, "100": 2555.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 717157888.0, "5": 717157888.0, "10": 717157888.0, "15": 717157888.0, "20": 717157888.0, "25": 717157888.0, "30": 717157888.0, "35": 717157888.0, "40": 717157888.0, "45": 717157888.0, "50": 717157888.0, "55": 717157888.0, "60": 717157888.0, "65": 717157888.0, "70": 717157888.0, "75": 717157888.0, "80": 717157888.0, "85": 717157888.0, "90": 717157888.0, "95": 717157888.0, "100": 717157888.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1910556672.0, "5": 2194053120.0, "10": 2194053120.0, "15": 2194053120.0, "20": 2194053120.0, "25": 2194053120.0, "30": 2194053120.0, "35": 2194053120.0, "40": 2194053120.0, "45": 2194053120.0, "50": 2194053120.0, "55": 2194053120.0, "60": 2194053120.0, "65": 2194053120.0, "70": 2194053120.0, "75": 2194053120.0, "80": 2194053120.0, "85": 2194053120.0, "90": 2194053120.0, "95": 2194053120.0, "100": 2194053120.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 14.70264, "5": 0.15996, "10": 0.15856, "15": 0.15761, "20": 0.15818, "25": 0.15823, "30": 0.15624, "35": 0.1572, "40": 0.15555, "45": 0.15747, "50": 0.15543, "55": 0.15768, "60": 0.15761, "65": 0.1577, "70": 0.41222, "75": 0.15706, "80": 0.15755, "85": 0.15717, "90": 0.15749, "95": 0.15708, "100": 0.15789}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..1de4fa5 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.74049, "5": 10.79201, "10": 10.71088, "15": 10.76031, "20": 10.68908, "25": 10.54336, "30": 10.45425, "35": 10.38323, "40": 10.24297, "45": 9.98344, "50": 10.06864, "55": 9.9892, "60": 9.66702, "65": 9.07244, "70": 9.81879, "75": 9.55278, "80": 9.51061, "85": 9.70753, "90": 9.87996, "95": 9.60069, "100": 9.49261}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2527.0, "5": 2875.0, "10": 2475.0, "15": 2508.0, "20": 2634.0, "25": 2391.0, "30": 2505.0, "35": 2580.0, "40": 2568.0, "45": 2375.0, "50": 2618.0, "55": 2379.0, "60": 2183.0, "65": 2639.0, "70": 3090.0, "75": 2496.0, "80": 3076.0, "85": 3189.0, "90": 3454.0, "95": 3150.0, "100": 2593.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 715322368.0, "5": 715322368.0, "10": 715322368.0, "15": 715322368.0, "20": 715322368.0, "25": 715322368.0, "30": 715322368.0, "35": 715322368.0, "40": 715322368.0, "45": 715322368.0, "50": 715322368.0, "55": 715322368.0, "60": 715322368.0, "65": 715322368.0, "70": 715322368.0, "75": 715322368.0, "80": 715322368.0, "85": 715322368.0, "90": 715322368.0, "95": 715322368.0, "100": 715322368.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2402991104.0, "5": 2683341824.0, "10": 2683341824.0, "15": 2683341824.0, "20": 2683341824.0, "25": 2683341824.0, "30": 2683341824.0, "35": 2683341824.0, "40": 2683341824.0, "45": 2683341824.0, "50": 2683341824.0, "55": 2683341824.0, "60": 2683341824.0, "65": 2683341824.0, "70": 2683341824.0, "75": 2683341824.0, "80": 2683341824.0, "85": 2683341824.0, "90": 2683341824.0, "95": 2683341824.0, "100": 2683341824.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 17.64292, "5": 0.17363, "10": 0.17156, "15": 0.17206, "20": 0.1701, "25": 0.17207, "30": 0.16951, "35": 0.17005, "40": 0.17036, "45": 0.17005, "50": 0.16935, "55": 0.16909, "60": 0.16956, "65": 0.16911, "70": 0.16772, "75": 0.16805, "80": 0.16819, "85": 0.16813, "90": 0.30023, "95": 0.16879, "100": 0.16784}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/model_config.yaml index f82a51e..f8d0651 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,4 +50,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..9036f85 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.90084, "5": 10.89721, "10": 10.88248, "15": 10.83942, "20": 10.74023, "25": 10.57891, "30": 10.37715, "35": 10.29016, "40": 10.11583, "45": 9.85669, "50": 9.93406, "55": 9.87603, "60": 9.52783, "65": 8.95047, "70": 9.76628, "75": 9.43012, "80": 9.40888, "85": 9.63342, "90": 9.85157, "95": 9.51855, "100": 9.43239}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 22726994.0, "5": 22714776.0, "10": 22918756.0, "15": 22821356.0, "20": 22693986.0, "25": 22818816.0, "30": 22631356.0, "35": 22788100.0, "40": 22658194.0, "45": 22675192.0, "50": 22905040.0, "55": 22518724.0, "60": 22743286.0, "65": 23060364.0, "70": 22829802.0, "75": 23054078.0, "80": 22706314.0, "85": 22712008.0, "90": 22972120.0, "95": 23048220.0, "100": 23015920.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 719180288.0, "5": 719180288.0, "10": 719180288.0, "15": 719180288.0, "20": 719180288.0, "25": 719180288.0, "30": 719180288.0, "35": 719180288.0, "40": 719180288.0, "45": 719180288.0, "50": 719180288.0, "55": 719180288.0, "60": 719180288.0, "65": 719180288.0, "70": 719180288.0, "75": 719180288.0, "80": 719180288.0, "85": 719180288.0, "90": 719180288.0, "95": 719180288.0, "100": 719180288.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1910562816.0, "5": 2195420160.0, "10": 2195420160.0, "15": 2195420160.0, "20": 2195420160.0, "25": 2195420160.0, "30": 2195420160.0, "35": 2195420160.0, "40": 2195420160.0, "45": 2195420160.0, "50": 2195420160.0, "55": 2195420160.0, "60": 2195420160.0, "65": 2195420160.0, "70": 2195420160.0, "75": 2195420160.0, "80": 2195420160.0, "85": 2195420160.0, "90": 2195420160.0, "95": 2195420160.0, "100": 2195420160.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 17.0469, "5": 0.16696, "10": 0.15966, "15": 0.15842, "20": 0.15919, "25": 0.15919, "30": 0.1584, "35": 0.15808, "40": 0.15754, "45": 0.1578, "50": 0.15747, "55": 0.15722, "60": 0.15708, "65": 0.16058, "70": 0.15826, "75": 0.15724, "80": 0.15839, "85": 0.15805, "90": 0.15684, "95": 0.15771, "100": 0.15729}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..d4c9a7e --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.90105, "5": 10.89686, "10": 10.88269, "15": 10.83979, "20": 10.74036, "25": 10.57932, "30": 10.37739, "35": 10.29032, "40": 10.11557, "45": 9.8564, "50": 9.93379, "55": 9.87551, "60": 9.52806, "65": 8.95044, "70": 9.76632, "75": 9.43039, "80": 9.40915, "85": 9.63369, "90": 9.85157, "95": 9.51871, "100": 9.43255}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 22727086.0, "5": 22714736.0, "10": 22918896.0, "15": 22821328.0, "20": 22694052.0, "25": 22818836.0, "30": 22631284.0, "35": 22787960.0, "40": 22658224.0, "45": 22675178.0, "50": 22904938.0, "55": 22518824.0, "60": 22743396.0, "65": 23060486.0, "70": 22829784.0, "75": 23053926.0, "80": 22706340.0, "85": 22712066.0, "90": 22972248.0, "95": 23048252.0, "100": 23015708.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 717082624.0, "5": 717082624.0, "10": 717082624.0, "15": 717082624.0, "20": 717082624.0, "25": 717082624.0, "30": 717082624.0, "35": 717082624.0, "40": 717082624.0, "45": 717082624.0, "50": 717082624.0, "55": 717082624.0, "60": 717082624.0, "65": 717082624.0, "70": 717082624.0, "75": 717082624.0, "80": 717082624.0, "85": 717082624.0, "90": 717082624.0, "95": 717082624.0, "100": 717082624.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2399852544.0, "5": 2683661312.0, "10": 2683661312.0, "15": 2683661312.0, "20": 2683661312.0, "25": 2683661312.0, "30": 2683661312.0, "35": 2683661312.0, "40": 2683661312.0, "45": 2683661312.0, "50": 2683661312.0, "55": 2683661312.0, "60": 2683661312.0, "65": 2683661312.0, "70": 2683661312.0, "75": 2683661312.0, "80": 2683661312.0, "85": 2683661312.0, "90": 2683661312.0, "95": 2683661312.0, "100": 2683661312.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 16.63961, "5": 0.16461, "10": 0.16595, "15": 0.16376, "20": 0.16409, "25": 0.16696, "30": 0.16386, "35": 0.16712, "40": 0.16592, "45": 0.16468, "50": 0.16455, "55": 0.16662, "60": 0.16721, "65": 0.16822, "70": 0.16726, "75": 0.16746, "80": 0.16563, "85": 0.16414, "90": 0.16627, "95": 0.16928, "100": 0.16745}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml index 3d4dc22..ddaee6c 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values_dev.json index e8a2053..b52a6ac 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.79208, - 10.86688, - 10.89063, - 10.7818, - 10.65964, - 10.58005, - 10.0819, - 10.19136, - 10.13478, - 9.81149 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1602.0, - 1792.0, - 1751.0, - 1885.0, - 1872.0, - 1716.0, - 1561.0, - 1867.0, - 2355.0, - 2329.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 13.82777, - 0.17397, - 0.17253, - 0.17285, - 0.17221, - 0.17204, - 0.17139, - 0.17105, - 0.17258, - 0.17185 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.79208, "5": 10.84589, "10": 10.76947, "15": 10.78965, "20": 10.6787, "25": 10.50407, "30": 10.33068, "35": 10.25256, "40": 10.05216, "45": 9.8024, "50": 9.88789}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1602.0, "5": 1906.0, "10": 1337.0, "15": 1845.0, "20": 1657.0, "25": 1717.0, "30": 1845.0, "35": 2078.0, "40": 2250.0, "45": 2194.0, "50": 2387.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 717082624.0, "5": 717082624.0, "10": 717082624.0, "15": 717082624.0, "20": 717082624.0, "25": 717082624.0, "30": 717082624.0, "35": 717082624.0, "40": 717082624.0, "45": 717082624.0, "50": 717082624.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2399852544.0, "5": 2683661312.0, "10": 2683661312.0, "15": 2683661312.0, "20": 2683661312.0, "25": 2683661312.0, "30": 2683661312.0, "35": 2683661312.0, "40": 2683661312.0, "45": 2683661312.0, "50": 2683661312.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 16.00629, "5": 0.16887, "10": 0.16829, "15": 0.16707, "20": 0.16853, "25": 0.16779, "30": 0.16587, "35": 0.16742, "40": 0.16704, "45": 0.16674, "50": 0.16574}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values_lts.json index 6123f3c..8b7c247 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.79208, 10.86687, 10.89062, 10.78178, 10.65967, 10.58006, 10.08189, 10.19133, 10.13481, 9.81153]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1633.0, 1860.0, 1755.0, 1886.0, 1874.0, 1796.0, 1586.0, 1926.0, 2330.0, 2361.0]}, "iteration_timing_avg": 0.12348235294117646} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.79208, "5": 10.8459, "10": 10.76945, "15": 10.78965, "20": 10.67868, "25": 10.50409, "30": 10.33064, "35": 10.25257, "40": 10.0522, "45": 9.80243, "50": 9.88792}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1633.0, "5": 1952.0, "10": 1432.0, "15": 1852.0, "20": 1592.0, "25": 1743.0, "30": 1953.0, "35": 1986.0, "40": 2180.0, "45": 2177.0, "50": 2468.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 714985472.0, "5": 714985472.0, "10": 714985472.0, "15": 714985472.0, "20": 714985472.0, "25": 714985472.0, "30": 714985472.0, "35": 714985472.0, "40": 714985472.0, "45": 714985472.0, "50": 714985472.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2399852544.0, "5": 2681564160.0, "10": 2681564160.0, "15": 2681564160.0, "20": 2681564160.0, "25": 2681564160.0, "30": 2681564160.0, "35": 2681564160.0, "40": 2681564160.0, "45": 2681564160.0, "50": 2681564160.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 14.3494, "5": 0.18, "10": 0.16805, "15": 0.16724, "20": 0.16843, "25": 0.17065, "30": 0.16585, "35": 0.16544, "40": 0.16534, "45": 0.16445, "50": 0.16426}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/model_config.yaml index e89edc9..f352165 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values_dev.json index 6a5671c..e1effa2 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.74049, - 10.81937, - 10.84178, - 10.75558, - 10.69821, - 10.63096, - 10.2026, - 10.36288, - 10.25634, - 9.94255 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 2529.0, - 2845.0, - 2909.0, - 2683.0, - 2631.0, - 2573.0, - 2281.0, - 2559.0, - 2484.0, - 2360.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 14.80986, - 0.17896, - 0.17664, - 0.17758, - 0.17762, - 0.17676, - 0.17638, - 0.1761, - 0.17725, - 0.1755 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.74049, "5": 10.792, "10": 10.71084, "15": 10.76031, "20": 10.68907, "25": 10.54329, "30": 10.45422, "35": 10.38326, "40": 10.24295, "45": 9.98343, "50": 10.06864}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2529.0, "5": 2819.0, "10": 2489.0, "15": 2566.0, "20": 2706.0, "25": 2484.0, "30": 2558.0, "35": 2606.0, "40": 2597.0, "45": 2424.0, "50": 2604.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 715453440.0, "5": 715453440.0, "10": 715453440.0, "15": 715453440.0, "20": 715453440.0, "25": 715453440.0, "30": 715453440.0, "35": 715453440.0, "40": 715453440.0, "45": 715453440.0, "50": 715453440.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2402991104.0, "5": 2682818560.0, "10": 2682818560.0, "15": 2682818560.0, "20": 2682818560.0, "25": 2682818560.0, "30": 2682818560.0, "35": 2682818560.0, "40": 2682818560.0, "45": 2682818560.0, "50": 2682818560.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 16.28903, "5": 0.16814, "10": 0.16897, "15": 0.16781, "20": 0.16914, "25": 0.16852, "30": 0.16707, "35": 0.1637, "40": 0.16516, "45": 0.16471, "50": 0.16413}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values_lts.json index 0252095..1b703ef 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.74049, 10.81937, 10.84178, 10.75551, 10.69818, 10.63091, 10.20265, 10.36288, 10.25632, 9.94256]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [2527.0, 2937.0, 2975.0, 2749.0, 2580.0, 2593.0, 2320.0, 2616.0, 2541.0, 2393.0]}, "iteration_timing_avg": 0.12725500000000006} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.74049, "5": 10.79201, "10": 10.71088, "15": 10.76031, "20": 10.68908, "25": 10.54336, "30": 10.45425, "35": 10.38323, "40": 10.24297, "45": 9.98344, "50": 10.06864}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2527.0, "5": 2875.0, "10": 2475.0, "15": 2508.0, "20": 2634.0, "25": 2391.0, "30": 2505.0, "35": 2580.0, "40": 2568.0, "45": 2375.0, "50": 2618.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 715322368.0, "5": 715322368.0, "10": 715322368.0, "15": 715322368.0, "20": 715322368.0, "25": 715322368.0, "30": 715322368.0, "35": 715322368.0, "40": 715322368.0, "45": 715322368.0, "50": 715322368.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2402991104.0, "5": 2683341824.0, "10": 2683341824.0, "15": 2683341824.0, "20": 2683341824.0, "25": 2683341824.0, "30": 2683341824.0, "35": 2683341824.0, "40": 2683341824.0, "45": 2683341824.0, "50": 2683341824.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 15.05971, "5": 0.1673, "10": 0.16775, "15": 0.16798, "20": 0.16737, "25": 0.16621, "30": 0.1672, "35": 0.16793, "40": 0.16638, "45": 0.16732, "50": 0.16685}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/model_config.yaml index c6e8c36..c5cd812 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,4 +50,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_dev.json index e7ae5fe..6d7ab3f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.90105, - 10.91105, - 10.91632, - 10.84823, - 10.70727, - 10.63015, - 10.15241, - 10.26049, - 10.15995, - 9.83163 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 22727080.0, - 23021764.0, - 22500984.0, - 22830798.0, - 22739428.0, - 22547260.0, - 22955476.0, - 22590172.0, - 22659570.0, - 22884676.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 17.09091, - 0.17551, - 0.17095, - 0.1714, - 0.17144, - 0.1711, - 0.17223, - 0.17069, - 0.17123, - 0.17064 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.90105, "5": 10.89687, "10": 10.88265, "15": 10.83975, "20": 10.74036, "25": 10.57931, "30": 10.37738, "35": 10.2903, "40": 10.11557, "45": 9.85643, "50": 9.93377}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 22727080.0, "5": 22714794.0, "10": 22918886.0, "15": 22821344.0, "20": 22694044.0, "25": 22818724.0, "30": 22631208.0, "35": 22788060.0, "40": 22658080.0, "45": 22675284.0, "50": 22904912.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 717082624.0, "5": 717082624.0, "10": 717082624.0, "15": 717082624.0, "20": 717082624.0, "25": 717082624.0, "30": 717082624.0, "35": 717082624.0, "40": 717082624.0, "45": 717082624.0, "50": 717082624.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2399852544.0, "5": 2683661312.0, "10": 2683661312.0, "15": 2683661312.0, "20": 2683661312.0, "25": 2683661312.0, "30": 2683661312.0, "35": 2683661312.0, "40": 2683661312.0, "45": 2683661312.0, "50": 2683661312.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 15.28859, "5": 0.17536, "10": 0.16974, "15": 0.1669, "20": 0.1688, "25": 0.16926, "30": 0.17067, "35": 0.17146, "40": 0.17105, "45": 0.16839, "50": 0.16823}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_lts.json index 2039e2f..c3a9944 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.90105, 10.91104, 10.91635, 10.84822, 10.70727, 10.63018, 10.15241, 10.26052, 10.15994, 9.83162]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [22727086.0, 23021732.0, 22500940.0, 22830674.0, 22739332.0, 22547236.0, 22955516.0, 22590012.0, 22659588.0, 22884630.0]}, "iteration_timing_avg": 0.1246464705882353} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.90105, "5": 10.89686, "10": 10.88269, "15": 10.83979, "20": 10.74036, "25": 10.57932, "30": 10.37739, "35": 10.29032, "40": 10.11557, "45": 9.8564, "50": 9.93379}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 22727086.0, "5": 22714736.0, "10": 22918896.0, "15": 22821328.0, "20": 22694052.0, "25": 22818836.0, "30": 22631284.0, "35": 22787960.0, "40": 22658224.0, "45": 22675178.0, "50": 22904938.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 717082624.0, "5": 717082624.0, "10": 717082624.0, "15": 717082624.0, "20": 717082624.0, "25": 717082624.0, "30": 717082624.0, "35": 717082624.0, "40": 717082624.0, "45": 717082624.0, "50": 717082624.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2399852544.0, "5": 2683661312.0, "10": 2683661312.0, "15": 2683661312.0, "20": 2683661312.0, "25": 2683661312.0, "30": 2683661312.0, "35": 2683661312.0, "40": 2683661312.0, "45": 2683661312.0, "50": 2683661312.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 17.95959, "5": 0.16749, "10": 0.16624, "15": 0.16491, "20": 0.16804, "25": 0.16878, "30": 0.1674, "35": 0.16705, "40": 0.16678, "45": 0.16602, "50": 0.16554}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml index 0b73dc4..f962441 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values_dev.json index 1c4e36d..1fb0cd9 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.82005, - 10.87447, - 10.87793, - 10.79509, - 10.68164, - 10.59514, - 10.10045, - 10.21239, - 10.13862, - 9.80879 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1562.0, - 1754.0, - 1879.0, - 1778.0, - 1877.0, - 1733.0, - 1578.0, - 1924.0, - 2299.0, - 2292.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 18.71949, - 0.16575, - 0.16508, - 0.16465, - 0.16475, - 0.16222, - 0.16473, - 0.16461, - 0.16489, - 0.16518 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85286, "10": 10.78456, "15": 10.79229, "20": 10.69209, "25": 10.52412, "30": 10.34553, "35": 10.26239, "40": 10.07236, "45": 9.81098, "50": 9.88419}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1562.0, "5": 1861.0, "10": 1374.0, "15": 1892.0, "20": 1700.0, "25": 1653.0, "30": 1857.0, "35": 1888.0, "40": 2067.0, "45": 2092.0, "50": 2415.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 733859840.0, "5": 733859840.0, "10": 733859840.0, "15": 733859840.0, "20": 733859840.0, "25": 733859840.0, "30": 733859840.0, "35": 733859840.0, "40": 733859840.0, "45": 733859840.0, "50": 733859840.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3838895104.0, "5": 4122703872.0, "10": 4122703872.0, "15": 4122703872.0, "20": 4122703872.0, "25": 4122703872.0, "30": 4122703872.0, "35": 4122703872.0, "40": 4122703872.0, "45": 4122703872.0, "50": 4122703872.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 18.00945, "5": 0.16379, "10": 0.15924, "15": 0.15758, "20": 0.15765, "25": 0.15621, "30": 0.15685, "35": 0.15689, "40": 0.15615, "45": 0.1563, "50": 0.15589}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values_lts.json index 939863d..18abdb1 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.82005, 10.87449, 10.87798, 10.79509, 10.68164, 10.59517, 10.10046, 10.21236, 10.13863, 9.80877]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1559.0, 1719.0, 1856.0, 1791.0, 1900.0, 1709.0, 1627.0, 1831.0, 2272.0, 2312.0]}, "iteration_timing_avg": 0.12502588235294115} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85284, "10": 10.78455, "15": 10.79229, "20": 10.69211, "25": 10.52412, "30": 10.34552, "35": 10.26242, "40": 10.07239, "45": 9.811, "50": 9.88415}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1559.0, "5": 1840.0, "10": 1380.0, "15": 1848.0, "20": 1601.0, "25": 1635.0, "30": 1908.0, "35": 1925.0, "40": 2126.0, "45": 2086.0, "50": 2298.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 733859840.0, "5": 733859840.0, "10": 733859840.0, "15": 733859840.0, "20": 733859840.0, "25": 733859840.0, "30": 733859840.0, "35": 733859840.0, "40": 733859840.0, "45": 733859840.0, "50": 733859840.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3838895104.0, "5": 4122703872.0, "10": 4122703872.0, "15": 4122703872.0, "20": 4122703872.0, "25": 4122703872.0, "30": 4122703872.0, "35": 4122703872.0, "40": 4122703872.0, "45": 4122703872.0, "50": 4122703872.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 20.28567, "5": 0.15641, "10": 0.15619, "15": 0.15522, "20": 0.15449, "25": 0.15839, "30": 0.1595, "35": 0.15731, "40": 0.15551, "45": 0.15588, "50": 0.15662}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml index 106d3ba..8434151 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values_dev.json index e614c53..60254da 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.82005, - 10.87448, - 10.87796, - 10.79506, - 10.68153, - 10.59413, - 10.09983, - 10.20957, - 10.13642, - 9.80012 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1562.0, - 1687.0, - 1848.0, - 1736.0, - 1955.0, - 1764.0, - 1580.0, - 1886.0, - 2252.0, - 2259.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 16.16694, - 0.16354, - 0.16237, - 0.16232, - 0.16088, - 0.15891, - 0.15894, - 0.15865, - 0.16009, - 0.1576 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85281, "10": 10.78449, "15": 10.79227, "20": 10.69197, "25": 10.52315, "30": 10.34503, "35": 10.25891, "40": 10.0703, "45": 9.80301, "50": 9.87675}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1562.0, "5": 1892.0, "10": 1374.0, "15": 1864.0, "20": 1701.0, "25": 1660.0, "30": 1897.0, "35": 1919.0, "40": 2146.0, "45": 2065.0, "50": 2364.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 733859840.0, "5": 733859840.0, "10": 733859840.0, "15": 733859840.0, "20": 733859840.0, "25": 733859840.0, "30": 733859840.0, "35": 733859840.0, "40": 733859840.0, "45": 733859840.0, "50": 733859840.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3838895104.0, "5": 4122703872.0, "10": 4122703872.0, "15": 4122703872.0, "20": 4122703872.0, "25": 4122703872.0, "30": 4122703872.0, "35": 4122703872.0, "40": 4122703872.0, "45": 4122703872.0, "50": 4122703872.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 17.74636, "5": 0.1602, "10": 0.15939, "15": 0.15841, "20": 0.15788, "25": 0.15758, "30": 0.16054, "35": 0.15916, "40": 0.15845, "45": 0.16415, "50": 0.16006}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values_lts.json index 460f463..730a1c0 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.82005, 10.87448, 10.87794, 10.79507, 10.68154, 10.59412, 10.09987, 10.20952, 10.13639, 9.80012]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1559.0, 1734.0, 1884.0, 1684.0, 1815.0, 1766.0, 1601.0, 1904.0, 2361.0, 2347.0]}, "iteration_timing_avg": 0.12273676470588235} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85285, "10": 10.78449, "15": 10.79226, "20": 10.69196, "25": 10.52317, "30": 10.34507, "35": 10.25889, "40": 10.07027, "45": 9.80301, "50": 9.87673}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1559.0, "5": 1915.0, "10": 1361.0, "15": 1831.0, "20": 1695.0, "25": 1596.0, "30": 1821.0, "35": 1872.0, "40": 2121.0, "45": 2090.0, "50": 2395.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 733859840.0, "5": 733859840.0, "10": 733859840.0, "15": 733859840.0, "20": 733859840.0, "25": 733859840.0, "30": 733859840.0, "35": 733859840.0, "40": 733859840.0, "45": 733859840.0, "50": 733859840.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3838895104.0, "5": 4122703872.0, "10": 4122703872.0, "15": 4122703872.0, "20": 4122703872.0, "25": 4122703872.0, "30": 4122703872.0, "35": 4122703872.0, "40": 4122703872.0, "45": 4122703872.0, "50": 4122703872.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 15.79862, "5": 0.15862, "10": 0.15871, "15": 0.15689, "20": 0.15914, "25": 0.16132, "30": 0.15706, "35": 0.15914, "40": 0.15674, "45": 0.15634, "50": 0.15584}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/model_config.yaml index 24bbf3a..da5b9d4 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_dev.json index ccb8518..dd2c4c7 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.82005, - 10.87447, - 10.87793, - 10.79509, - 10.68164, - 10.59514, - 10.10045, - 10.21239, - 10.13862, - 9.80879 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1562.0, - 1754.0, - 1879.0, - 1778.0, - 1877.0, - 1733.0, - 1578.0, - 1924.0, - 2299.0, - 2292.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 18.68941, - 0.16498, - 0.16403, - 0.16281, - 0.16302, - 0.16352, - 0.16473, - 0.16207, - 0.16362, - 0.16219 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85286, "10": 10.78456, "15": 10.79229, "20": 10.69209, "25": 10.52412, "30": 10.34553, "35": 10.26239, "40": 10.07236, "45": 9.81098, "50": 9.88419}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1562.0, "5": 1861.0, "10": 1374.0, "15": 1892.0, "20": 1700.0, "25": 1653.0, "30": 1857.0, "35": 1888.0, "40": 2067.0, "45": 2092.0, "50": 2415.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 733859840.0, "5": 733859840.0, "10": 733859840.0, "15": 733859840.0, "20": 733859840.0, "25": 733859840.0, "30": 733859840.0, "35": 733859840.0, "40": 733859840.0, "45": 733859840.0, "50": 733859840.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3838895104.0, "5": 4122703872.0, "10": 4122703872.0, "15": 4122703872.0, "20": 4122703872.0, "25": 4122703872.0, "30": 4122703872.0, "35": 4122703872.0, "40": 4122703872.0, "45": 4122703872.0, "50": 4122703872.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 14.95964, "5": 0.16286, "10": 0.15862, "15": 0.15985, "20": 0.15943, "25": 0.15712, "30": 0.15869, "35": 0.15816, "40": 0.16209, "45": 0.16243, "50": 0.15969}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_lts.json index 939863d..340fdf1 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.82005, 10.87449, 10.87798, 10.79509, 10.68164, 10.59517, 10.10046, 10.21236, 10.13863, 9.80877]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1559.0, 1719.0, 1856.0, 1791.0, 1900.0, 1709.0, 1627.0, 1831.0, 2272.0, 2312.0]}, "iteration_timing_avg": 0.12502588235294115} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85284, "10": 10.78455, "15": 10.79229, "20": 10.69211, "25": 10.52412, "30": 10.34552, "35": 10.26242, "40": 10.07239, "45": 9.811, "50": 9.88415}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1559.0, "5": 1840.0, "10": 1380.0, "15": 1848.0, "20": 1601.0, "25": 1635.0, "30": 1908.0, "35": 1925.0, "40": 2126.0, "45": 2086.0, "50": 2298.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 733859840.0, "5": 733859840.0, "10": 733859840.0, "15": 733859840.0, "20": 733859840.0, "25": 733859840.0, "30": 733859840.0, "35": 733859840.0, "40": 733859840.0, "45": 733859840.0, "50": 733859840.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3838895104.0, "5": 4122703872.0, "10": 4122703872.0, "15": 4122703872.0, "20": 4122703872.0, "25": 4122703872.0, "30": 4122703872.0, "35": 4122703872.0, "40": 4122703872.0, "45": 4122703872.0, "50": 4122703872.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 17.71283, "5": 0.16448, "10": 0.16446, "15": 0.16389, "20": 0.16438, "25": 0.15866, "30": 0.15768, "35": 0.15941, "40": 0.15987, "45": 0.16075, "50": 0.16301}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml index 6b416f6..22fc235 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json index 1ebd78a..d7c90c3 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.82005, - 10.87447, - 10.87799, - 10.79507, - 10.68165, - 10.59511, - 10.10047, - 10.2124, - 10.13861, - 9.80876 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1562.0, - 1738.0, - 1852.0, - 1802.0, - 1917.0, - 1765.0, - 1570.0, - 1949.0, - 2251.0, - 2270.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 14.96968, - 0.16347, - 0.16403, - 0.16317, - 0.162, - 0.16129, - 0.16268, - 0.16156, - 0.16212, - 0.16407 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85286, "10": 10.7845, "15": 10.79231, "20": 10.6921, "25": 10.52408, "30": 10.34555, "35": 10.26239, "40": 10.07241, "45": 9.81101, "50": 9.88416}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1562.0, "5": 1861.0, "10": 1339.0, "15": 1948.0, "20": 1698.0, "25": 1687.0, "30": 1930.0, "35": 1927.0, "40": 2061.0, "45": 2060.0, "50": 2330.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 522976256.0, "5": 522976256.0, "10": 522976256.0, "15": 522976256.0, "20": 522976256.0, "25": 522976256.0, "30": 522976256.0, "35": 522976256.0, "40": 522976256.0, "45": 522976256.0, "50": 522976256.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3768845312.0, "5": 3912737792.0, "10": 3912737792.0, "15": 3912737792.0, "20": 3912737792.0, "25": 3912737792.0, "30": 3912737792.0, "35": 3912737792.0, "40": 3912737792.0, "45": 3912737792.0, "50": 3912737792.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 19.16738, "5": 0.16325, "10": 0.16427, "15": 0.16183, "20": 0.16039, "25": 0.16182, "30": 0.16047, "35": 0.16389, "40": 0.15815, "45": 0.15745, "50": 0.15915}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json index 2d807f5..74af4bc 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.82005, 10.87449, 10.87798, 10.79511, 10.68164, 10.59513, 10.10043, 10.21239, 10.13865, 9.80879]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1559.0, 1719.0, 1856.0, 1735.0, 1873.0, 1765.0, 1535.0, 1910.0, 2278.0, 2247.0]}, "iteration_timing_avg": 0.12168999999999999} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85284, "10": 10.78455, "15": 10.7923, "20": 10.69211, "25": 10.52414, "30": 10.34555, "35": 10.2624, "40": 10.07237, "45": 9.81103, "50": 9.88417}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1559.0, "5": 1840.0, "10": 1380.0, "15": 1850.0, "20": 1668.0, "25": 1607.0, "30": 1945.0, "35": 1860.0, "40": 2022.0, "45": 2042.0, "50": 2292.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 523003904.0, "5": 523003904.0, "10": 523003904.0, "15": 523003904.0, "20": 523003904.0, "25": 523003904.0, "30": 523003904.0, "35": 523003904.0, "40": 523003904.0, "45": 523003904.0, "50": 523003904.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3768872960.0, "5": 3912765440.0, "10": 3912765440.0, "15": 3912765440.0, "20": 3912765440.0, "25": 3912765440.0, "30": 3912765440.0, "35": 3912765440.0, "40": 3912765440.0, "45": 3912765440.0, "50": 3912765440.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 18.09905, "5": 0.16241, "10": 0.16341, "15": 0.15828, "20": 0.15929, "25": 0.15899, "30": 0.16171, "35": 0.15966, "40": 0.15804, "45": 0.15972, "50": 0.15901}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml index 898b249..02918ff 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,4 +50,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json index badf672..fa0ffab 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.82005, - 10.87447, - 10.87799, - 10.79507, - 10.68165, - 10.59511, - 10.10047, - 10.2124, - 10.13861, - 9.80876 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1562.0, - 1738.0, - 1852.0, - 1802.0, - 1917.0, - 1765.0, - 1570.0, - 1949.0, - 2251.0, - 2270.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 17.23575, - 0.17553, - 0.34737, - 0.17165, - 0.32526, - 0.17081, - 0.32706, - 0.17037, - 0.3321, - 0.16992 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85286, "10": 10.7845, "15": 10.79231, "20": 10.6921, "25": 10.52408, "30": 10.34555, "35": 10.26239, "40": 10.07241, "45": 9.81101, "50": 9.88416}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1562.0, "5": 1861.0, "10": 1339.0, "15": 1948.0, "20": 1698.0, "25": 1687.0, "30": 1930.0, "35": 1927.0, "40": 2061.0, "45": 2060.0, "50": 2330.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 522976256.0, "5": 522976256.0, "10": 522976256.0, "15": 522976256.0, "20": 522976256.0, "25": 522976256.0, "30": 522976256.0, "35": 522976256.0, "40": 522976256.0, "45": 522976256.0, "50": 522976256.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3768845312.0, "5": 3912737792.0, "10": 3912737792.0, "15": 3912737792.0, "20": 3912737792.0, "25": 3912737792.0, "30": 3912737792.0, "35": 3912737792.0, "40": 3912737792.0, "45": 3912737792.0, "50": 3912737792.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 18.07048, "5": 0.17182, "10": 0.16227, "15": 0.16202, "20": 0.16214, "25": 0.16227, "30": 0.16231, "35": 0.16221, "40": 0.16257, "45": 0.16117, "50": 0.16119}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json index f23c85a..2c4fb7d 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.82005, 10.87449, 10.87798, 10.79511, 10.68164, 10.59513, 10.10043, 10.21239, 10.13865, 9.80879]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1559.0, 1719.0, 1856.0, 1735.0, 1873.0, 1765.0, 1535.0, 1910.0, 2278.0, 2247.0]}, "iteration_timing_avg": 0.12873676470588236} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85284, "10": 10.78455, "15": 10.7923, "20": 10.69211, "25": 10.52414, "30": 10.34555, "35": 10.2624, "40": 10.07237, "45": 9.81103, "50": 9.88417}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1559.0, "5": 1840.0, "10": 1380.0, "15": 1850.0, "20": 1668.0, "25": 1607.0, "30": 1945.0, "35": 1860.0, "40": 2022.0, "45": 2042.0, "50": 2292.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 523003904.0, "5": 523003904.0, "10": 523003904.0, "15": 523003904.0, "20": 523003904.0, "25": 523003904.0, "30": 523003904.0, "35": 523003904.0, "40": 523003904.0, "45": 523003904.0, "50": 523003904.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3768872960.0, "5": 3912765440.0, "10": 3912765440.0, "15": 3912765440.0, "20": 3912765440.0, "25": 3912765440.0, "30": 3912765440.0, "35": 3912765440.0, "40": 3912765440.0, "45": 3912765440.0, "50": 3912765440.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 15.80767, "5": 0.16165, "10": 0.16385, "15": 0.16229, "20": 0.16237, "25": 0.1618, "30": 0.1643, "35": 0.16116, "40": 0.16294, "45": 0.16266, "50": 0.16228}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml index 818960e..70c44aa 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml @@ -1,56 +1,58 @@ -ENV_VARS: - CUDA_DEVICE_MAX_CONNECTIONS: 1 - NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 - NCCL_ALGO: Tree - CUBLAS_WORKSPACE_CONFIG: :4096:8 -MODEL_ARGS: - --num-layers: 12 - --hidden-size: 512 - --num-attention-heads: 8 - --log-params-norm: true - --log-num-zeros-in-grad: true - --log-validation-ppl-to-tensorboard: true - --log-timers-to-tensorboard: true - --tensorboard-dir: ${TENSORBOARD_PATH} - --micro-batch-size: 4 - --global-batch-size: 32 - --seq-length: 1024 - --max-position-embeddings: 1024 - --train-iters: 50 - --timing-log-level: 2 - --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} - --data-path: ${DATA_PATH}/my-gpt3_00_text_document - --vocab-file: ${DATA_PATH}/bpe/vocab.json - --merge-file: ${DATA_PATH}/bpe/merges.txt - --split: 949,50,1 - --distributed-backend: nccl - --lr: 0.00015 - --lr-decay-style: cosine - --min-lr: 1.0e-5 - --weight-decay: 1e-2 - --clip-grad: 1.0 - --lr-warmup-fraction: .01 - --log-interval: 1 - --save-interval: 10000 - --eval-interval: 1000 - --eval-iters: 10 - --transformer-impl: transformer_engine - --tensor-model-parallel-size: 1 - --pipeline-model-parallel-size: 4 - --num-layers-per-virtual-pipeline-stage: 1 - --use-distributed-optimizer: true - --overlap-grad-reduce: true - --overlap-param-gather: true - --check-weight-hash-across-dp-replicas-interval: 10 - --ckpt-fully-parallel-load: true - --deterministic-mode: true - --no-gradient-accumulation-fusion: true - --attention-softmax-in-fp32: true - --use-mcore-models: true - --ckpt-format: torch_dist - --data-cache-path: ${DATA_CACHE_PATH} - --bf16: true - --attention-backend: unfused -TEST_TYPE: regular +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 1 + --pipeline-model-parallel-size: 4 + --num-layers-per-virtual-pipeline-stage: 1 + --use-distributed-optimizer: true + --overlap-grad-reduce: true + --overlap-param-gather: true + --check-weight-hash-across-dp-replicas-interval: 10 + --disable-gloo-process-groups: true + --ckpt-fully-parallel-load: true + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --attention-backend: unfused + --log-memory-to-tensorboard: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values_dev.json index 5d79a14..f0fe3cc 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.82005, - 10.87447, - 10.87799, - 10.79508, - 10.68163, - 10.59514, - 10.10047, - 10.21237, - 10.13864, - 9.80877 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1562.0, - 1738.0, - 1852.0, - 1796.0, - 1869.0, - 1788.0, - 1517.0, - 1941.0, - 2226.0, - 2214.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 17.43169, - 0.16677, - 0.33581, - 0.16498, - 0.33103, - 0.16418, - 0.33146, - 0.16539, - 0.33075, - 0.1651 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85286, "10": 10.7845, "15": 10.79231, "20": 10.69208, "25": 10.52411, "30": 10.34557, "35": 10.2624, "40": 10.07239, "45": 9.811, "50": 9.8842}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1562.0, "5": 1861.0, "10": 1339.0, "15": 1964.0, "20": 1696.0, "25": 1558.0, "30": 1887.0, "35": 1887.0, "40": 2113.0, "45": 2114.0, "50": 2342.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 522977280.0, "5": 522977280.0, "10": 522977280.0, "15": 522977280.0, "20": 522977280.0, "25": 522977280.0, "30": 522977280.0, "35": 522977280.0, "40": 522977280.0, "45": 522977280.0, "50": 522977280.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3768846336.0, "5": 3912738816.0, "10": 3912738816.0, "15": 3912738816.0, "20": 3912738816.0, "25": 3912738816.0, "30": 3912738816.0, "35": 3912738816.0, "40": 3912738816.0, "45": 3912738816.0, "50": 3912738816.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 19.10362, "5": 0.16434, "10": 0.1658, "15": 0.16354, "20": 0.16555, "25": 0.16274, "30": 0.16422, "35": 0.16143, "40": 0.16856, "45": 0.16893, "50": 0.16867}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values_lts.json index 549ceb7..ee448f2 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.82005, 10.87449, 10.87799, 10.79508, 10.68166, 10.59514, 10.10042, 10.21238, 10.13865, 9.80879]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1559.0, 1719.0, 1857.0, 1746.0, 1883.0, 1738.0, 1475.0, 1851.0, 2303.0, 2258.0]}, "iteration_timing_avg": 0.12873676470588236} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85284, "10": 10.78451, "15": 10.79227, "20": 10.69215, "25": 10.52412, "30": 10.34553, "35": 10.26239, "40": 10.07239, "45": 9.81101, "50": 9.8842}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1559.0, "5": 1840.0, "10": 1336.0, "15": 1910.0, "20": 1640.0, "25": 1694.0, "30": 1894.0, "35": 1955.0, "40": 2147.0, "45": 2157.0, "50": 2389.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 523004928.0, "5": 523004928.0, "10": 523004928.0, "15": 523004928.0, "20": 523004928.0, "25": 523004928.0, "30": 523004928.0, "35": 523004928.0, "40": 523004928.0, "45": 523004928.0, "50": 523004928.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3768873984.0, "5": 3912766464.0, "10": 3912766464.0, "15": 3912766464.0, "20": 3912766464.0, "25": 3912766464.0, "30": 3912766464.0, "35": 3912766464.0, "40": 3912766464.0, "45": 3912766464.0, "50": 3912766464.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 21.58641, "5": 0.16306, "10": 0.16416, "15": 0.16288, "20": 0.16323, "25": 0.1694, "30": 0.16231, "35": 0.16648, "40": 0.16317, "45": 0.16593, "50": 0.16425}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/model_config.yaml index 1238b4a..5c7a4b7 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -54,4 +54,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_dev.json index 99b20e2..5a826e6 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.9359, - 10.93551, - 10.9424, - 10.88073, - 10.75652, - 10.66333, - 10.16716, - 10.27244, - 10.19575, - 9.86005 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 22727668.0, - 23021008.0, - 22501280.0, - 22830020.0, - 22739656.0, - 22548262.0, - 22955680.0, - 22589964.0, - 22660156.0, - 22884572.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 16.12696, - 0.16574, - 0.16735, - 0.16507, - 0.1657, - 0.16626, - 0.16614, - 0.16517, - 0.16625, - 0.16568 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.9359, "5": 10.9322, "10": 10.91082, "15": 10.85725, "20": 10.7709, "25": 10.60557, "30": 10.40545, "35": 10.31363, "40": 10.12334, "45": 9.87564, "50": 9.94453}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 22727668.0, "5": 22715306.0, "10": 22919026.0, "15": 22821242.0, "20": 22693800.0, "25": 22819536.0, "30": 22631092.0, "35": 22787886.0, "40": 22658198.0, "45": 22674644.0, "50": 22904428.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 521936896.0, "5": 521936896.0, "10": 521936896.0, "15": 521936896.0, "20": 521936896.0, "25": 521936896.0, "30": 521936896.0, "35": 521936896.0, "40": 521936896.0, "45": 521936896.0, "50": 521936896.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3769915392.0, "5": 3914746880.0, "10": 3914746880.0, "15": 3914746880.0, "20": 3914746880.0, "25": 3914746880.0, "30": 3914746880.0, "35": 3914746880.0, "40": 3914746880.0, "45": 3914746880.0, "50": 3914746880.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 14.90437, "5": 0.15853, "10": 0.15748, "15": 0.15817, "20": 0.15827, "25": 0.1568, "30": 0.1606, "35": 0.16038, "40": 0.15929, "45": 0.16015, "50": 0.17077}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_lts.json index 64f030d..b0379dd 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.9359, 10.93547, 10.94238, 10.88073, 10.75653, 10.66332, 10.1672, 10.27241, 10.19577, 9.86006]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [22727686.0, 23020980.0, 22501260.0, 22830024.0, 22739772.0, 22548148.0, 22955712.0, 22589816.0, 22660000.0, 22884332.0]}, "iteration_timing_avg": 0.12799705882352944} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.9359, "5": 10.93225, "10": 10.91081, "15": 10.85723, "20": 10.77091, "25": 10.60558, "30": 10.40544, "35": 10.31364, "40": 10.12333, "45": 9.8756, "50": 9.94451}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 22727686.0, "5": 22715312.0, "10": 22919004.0, "15": 22821282.0, "20": 22693812.0, "25": 22819580.0, "30": 22631132.0, "35": 22787906.0, "40": 22658304.0, "45": 22674764.0, "50": 22904438.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 523016192.0, "5": 523016192.0, "10": 523016192.0, "15": 523016192.0, "20": 523016192.0, "25": 523016192.0, "30": 523016192.0, "35": 523016192.0, "40": 523016192.0, "45": 523016192.0, "50": 523016192.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3769943040.0, "5": 3914774528.0, "10": 3914774528.0, "15": 3914774528.0, "20": 3914774528.0, "25": 3914774528.0, "30": 3914774528.0, "35": 3914774528.0, "40": 3914774528.0, "45": 3914774528.0, "50": 3914774528.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 19.24942, "5": 0.158, "10": 0.15909, "15": 0.15799, "20": 0.15892, "25": 0.15911, "30": 0.15833, "35": 0.15767, "40": 0.15693, "45": 0.16146, "50": 0.15756}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml index eb01273..8b3da96 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -51,4 +51,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..5286661 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.81873, "5": 10.85264, "10": 10.78415, "15": 10.7931, "20": 10.6921, "25": 10.52359, "30": 10.34496, "35": 10.25889, "40": 10.07079, "45": 9.80318, "50": 9.87688, "55": 9.85528, "60": 9.46661, "65": 8.91692, "70": 9.69269, "75": 9.37788, "80": 9.36796, "85": 9.576, "90": 9.77252, "95": 9.46897, "100": 9.34559}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1541.0, "5": 1835.0, "10": 1425.0, "15": 1935.0, "20": 1728.0, "25": 1634.0, "30": 1899.0, "35": 1945.0, "40": 2144.0, "45": 2092.0, "50": 2322.0, "55": 2333.0, "60": 2386.0, "65": 2636.0, "70": 3071.0, "75": 2522.0, "80": 3165.0, "85": 3334.0, "90": 2941.0, "95": 3321.0, "100": 3378.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 731763200.0, "5": 731763200.0, "10": 731763200.0, "15": 731763200.0, "20": 731763200.0, "25": 731763200.0, "30": 731763200.0, "35": 731763200.0, "40": 731763200.0, "45": 731763200.0, "50": 731763200.0, "55": 731763200.0, "60": 731763200.0, "65": 731763200.0, "70": 731763200.0, "75": 731763200.0, "80": 731763200.0, "85": 731763200.0, "90": 731763200.0, "95": 731763200.0, "100": 731763200.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2368927744.0, "5": 2649590784.0, "10": 2649590784.0, "15": 2649590784.0, "20": 2649590784.0, "25": 2649590784.0, "30": 2649590784.0, "35": 2649590784.0, "40": 2649590784.0, "45": 2649590784.0, "50": 2649590784.0, "55": 2649590784.0, "60": 2649590784.0, "65": 2649590784.0, "70": 2649590784.0, "75": 2649590784.0, "80": 2649590784.0, "85": 2649590784.0, "90": 2649590784.0, "95": 2649590784.0, "100": 2649590784.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 14.75021, "5": 0.15575, "10": 0.15818, "15": 0.15592, "20": 0.15584, "25": 0.15211, "30": 0.15253, "35": 0.15336, "40": 0.15465, "45": 0.1517, "50": 0.16501, "55": 0.16299, "60": 0.1657, "65": 0.16693, "70": 0.15946, "75": 0.15155, "80": 0.15175, "85": 0.15073, "90": 0.14954, "95": 0.14899, "100": 0.14722}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..0e82d88 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85285, "10": 10.78449, "15": 10.79226, "20": 10.69196, "25": 10.52317, "30": 10.34507, "35": 10.25889, "40": 10.07027, "45": 9.80301, "50": 9.87673, "55": 9.85527, "60": 9.46636, "65": 8.9166, "70": 9.69277, "75": 9.37814, "80": 9.368, "85": 9.57597, "90": 9.77245, "95": 9.46913, "100": 9.34575}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1559.0, "5": 1915.0, "10": 1361.0, "15": 1831.0, "20": 1695.0, "25": 1596.0, "30": 1821.0, "35": 1872.0, "40": 2121.0, "45": 2090.0, "50": 2395.0, "55": 2324.0, "60": 2357.0, "65": 2606.0, "70": 3130.0, "75": 2556.0, "80": 3224.0, "85": 3412.0, "90": 2988.0, "95": 3347.0, "100": 3383.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 733859840.0, "5": 733859840.0, "10": 733859840.0, "15": 733859840.0, "20": 733859840.0, "25": 733859840.0, "30": 733859840.0, "35": 733859840.0, "40": 733859840.0, "45": 733859840.0, "50": 733859840.0, "55": 733859840.0, "60": 733859840.0, "65": 733859840.0, "70": 733859840.0, "75": 733859840.0, "80": 733859840.0, "85": 733859840.0, "90": 733859840.0, "95": 733859840.0, "100": 733859840.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3838895104.0, "5": 4122703872.0, "10": 4122703872.0, "15": 4122703872.0, "20": 4122703872.0, "25": 4122703872.0, "30": 4122703872.0, "35": 4122703872.0, "40": 4122703872.0, "45": 4122703872.0, "50": 4122703872.0, "55": 4122703872.0, "60": 4122703872.0, "65": 4122703872.0, "70": 4122703872.0, "75": 4122703872.0, "80": 4122703872.0, "85": 4122703872.0, "90": 4122703872.0, "95": 4122703872.0, "100": 4122703872.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 18.81818, "5": 0.15605, "10": 0.15612, "15": 0.15668, "20": 0.15734, "25": 0.15595, "30": 0.15634, "35": 0.15597, "40": 0.15654, "45": 0.15538, "50": 0.15456, "55": 0.15493, "60": 0.15593, "65": 0.15527, "70": 0.15564, "75": 0.15555, "80": 0.15422, "85": 0.1551, "90": 0.1533, "95": 0.15475, "100": 0.15459}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/model_config.yaml index c31e5b6..48c87b5 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --ckpt-format: torch --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..e9992df --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.81873, "5": 10.85262, "10": 10.78413, "15": 10.79311, "20": 10.69219, "25": 10.52454, "30": 10.34542, "35": 10.26245, "40": 10.07286, "45": 9.8112, "50": 9.88428, "55": 9.86376, "60": 9.47981, "65": 8.93093, "70": 9.71205, "75": 9.4002, "80": 9.39074, "85": 9.60143, "90": 9.8051, "95": 9.5081, "100": 9.39221}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1541.0, "5": 1912.0, "10": 1317.0, "15": 1921.0, "20": 1595.0, "25": 1666.0, "30": 1933.0, "35": 1920.0, "40": 2094.0, "45": 2101.0, "50": 2362.0, "55": 2269.0, "60": 2379.0, "65": 2624.0, "70": 3128.0, "75": 2551.0, "80": 3192.0, "85": 3503.0, "90": 2966.0, "95": 3326.0, "100": 3383.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 731763200.0, "5": 731763200.0, "10": 731763200.0, "15": 731763200.0, "20": 731763200.0, "25": 731763200.0, "30": 731763200.0, "35": 731763200.0, "40": 731763200.0, "45": 731763200.0, "50": 731763200.0, "55": 731763200.0, "60": 731763200.0, "65": 731763200.0, "70": 731763200.0, "75": 731763200.0, "80": 731763200.0, "85": 731763200.0, "90": 731763200.0, "95": 731763200.0, "100": 731763200.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2368927744.0, "5": 2649590784.0, "10": 2649590784.0, "15": 2649590784.0, "20": 2649590784.0, "25": 2649590784.0, "30": 2649590784.0, "35": 2649590784.0, "40": 2649590784.0, "45": 2649590784.0, "50": 2649590784.0, "55": 2649590784.0, "60": 2649590784.0, "65": 2649590784.0, "70": 2649590784.0, "75": 2649590784.0, "80": 2649590784.0, "85": 2649590784.0, "90": 2649590784.0, "95": 2649590784.0, "100": 2649590784.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 16.11545, "5": 0.1692, "10": 0.16514, "15": 0.16462, "20": 0.15963, "25": 0.16538, "30": 0.1555, "35": 0.15427, "40": 0.15505, "45": 0.15352, "50": 0.15187, "55": 0.15496, "60": 0.15039, "65": 0.15056, "70": 0.14765, "75": 0.15137, "80": 0.15663, "85": 0.16052, "90": 0.15557, "95": 0.15598, "100": 0.15273}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..0399a97 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85284, "10": 10.78455, "15": 10.79229, "20": 10.69211, "25": 10.52412, "30": 10.34552, "35": 10.26242, "40": 10.07239, "45": 9.811, "50": 9.88415, "55": 9.86374, "60": 9.47965, "65": 8.93065, "70": 9.71216, "75": 9.40049, "80": 9.39075, "85": 9.6014, "90": 9.80503, "95": 9.50817, "100": 9.39236}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1559.0, "5": 1840.0, "10": 1380.0, "15": 1848.0, "20": 1601.0, "25": 1635.0, "30": 1908.0, "35": 1925.0, "40": 2126.0, "45": 2086.0, "50": 2298.0, "55": 2284.0, "60": 2337.0, "65": 2636.0, "70": 3136.0, "75": 2539.0, "80": 3253.0, "85": 3363.0, "90": 3004.0, "95": 3333.0, "100": 3447.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 733859840.0, "5": 733859840.0, "10": 733859840.0, "15": 733859840.0, "20": 733859840.0, "25": 733859840.0, "30": 733859840.0, "35": 733859840.0, "40": 733859840.0, "45": 733859840.0, "50": 733859840.0, "55": 733859840.0, "60": 733859840.0, "65": 733859840.0, "70": 733859840.0, "75": 733859840.0, "80": 733859840.0, "85": 733859840.0, "90": 733859840.0, "95": 733859840.0, "100": 733859840.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3838895104.0, "5": 4122703872.0, "10": 4122703872.0, "15": 4122703872.0, "20": 4122703872.0, "25": 4122703872.0, "30": 4122703872.0, "35": 4122703872.0, "40": 4122703872.0, "45": 4122703872.0, "50": 4122703872.0, "55": 4122703872.0, "60": 4122703872.0, "65": 4122703872.0, "70": 4122703872.0, "75": 4122703872.0, "80": 4122703872.0, "85": 4122703872.0, "90": 4122703872.0, "95": 4122703872.0, "100": 4122703872.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 22.44598, "5": 0.17072, "10": 0.16018, "15": 0.16147, "20": 0.15588, "25": 0.15643, "30": 0.15744, "35": 0.15702, "40": 0.15705, "45": 0.15718, "50": 0.15547, "55": 0.1569, "60": 0.1592, "65": 0.1591, "70": 0.15725, "75": 0.1566, "80": 0.15569, "85": 0.15565, "90": 0.15537, "95": 0.15899, "100": 0.15823}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml index 9b02b47..3438c77 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..8be7a44 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.81873, "5": 10.85262, "10": 10.78413, "15": 10.79311, "20": 10.69219, "25": 10.52454, "30": 10.34542, "35": 10.26245, "40": 10.07286, "45": 9.8112, "50": 9.88428, "55": 9.86376, "60": 9.47981, "65": 8.93093, "70": 9.71205, "75": 9.4002, "80": 9.39074, "85": 9.60143, "90": 9.8051, "95": 9.5081, "100": 9.39221}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1541.0, "5": 1912.0, "10": 1317.0, "15": 1921.0, "20": 1595.0, "25": 1666.0, "30": 1933.0, "35": 1920.0, "40": 2094.0, "45": 2101.0, "50": 2362.0, "55": 2269.0, "60": 2379.0, "65": 2624.0, "70": 3128.0, "75": 2551.0, "80": 3192.0, "85": 3503.0, "90": 2966.0, "95": 3326.0, "100": 3383.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 731763200.0, "5": 731763200.0, "10": 731763200.0, "15": 731763200.0, "20": 731763200.0, "25": 731763200.0, "30": 731763200.0, "35": 731763200.0, "40": 731763200.0, "45": 731763200.0, "50": 731763200.0, "55": 731763200.0, "60": 731763200.0, "65": 731763200.0, "70": 731763200.0, "75": 731763200.0, "80": 731763200.0, "85": 731763200.0, "90": 731763200.0, "95": 731763200.0, "100": 731763200.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2368927744.0, "5": 2649590784.0, "10": 2649590784.0, "15": 2649590784.0, "20": 2649590784.0, "25": 2649590784.0, "30": 2649590784.0, "35": 2649590784.0, "40": 2649590784.0, "45": 2649590784.0, "50": 2649590784.0, "55": 2649590784.0, "60": 2649590784.0, "65": 2649590784.0, "70": 2649590784.0, "75": 2649590784.0, "80": 2649590784.0, "85": 2649590784.0, "90": 2649590784.0, "95": 2649590784.0, "100": 2649590784.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 20.40385, "5": 0.1579, "10": 0.15096, "15": 0.15266, "20": 0.15158, "25": 0.15033, "30": 0.15058, "35": 0.14902, "40": 0.14939, "45": 0.14907, "50": 0.14846, "55": 0.1513, "60": 0.15238, "65": 0.1517, "70": 0.15268, "75": 0.15317, "80": 0.15218, "85": 0.14985, "90": 0.15084, "95": 0.14835, "100": 0.14852}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..b675cfd --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85284, "10": 10.78455, "15": 10.79229, "20": 10.69211, "25": 10.52412, "30": 10.34552, "35": 10.26242, "40": 10.07239, "45": 9.811, "50": 9.88415, "55": 9.86374, "60": 9.47965, "65": 8.93065, "70": 9.71216, "75": 9.40049, "80": 9.39075, "85": 9.6014, "90": 9.80503, "95": 9.50817, "100": 9.39236}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1559.0, "5": 1840.0, "10": 1380.0, "15": 1848.0, "20": 1601.0, "25": 1635.0, "30": 1908.0, "35": 1925.0, "40": 2126.0, "45": 2086.0, "50": 2298.0, "55": 2284.0, "60": 2337.0, "65": 2636.0, "70": 3136.0, "75": 2539.0, "80": 3253.0, "85": 3363.0, "90": 3004.0, "95": 3333.0, "100": 3447.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 733859840.0, "5": 733859840.0, "10": 733859840.0, "15": 733859840.0, "20": 733859840.0, "25": 733859840.0, "30": 733859840.0, "35": 733859840.0, "40": 733859840.0, "45": 733859840.0, "50": 733859840.0, "55": 733859840.0, "60": 733859840.0, "65": 733859840.0, "70": 733859840.0, "75": 733859840.0, "80": 733859840.0, "85": 733859840.0, "90": 733859840.0, "95": 733859840.0, "100": 733859840.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3838895104.0, "5": 4122703872.0, "10": 4122703872.0, "15": 4122703872.0, "20": 4122703872.0, "25": 4122703872.0, "30": 4122703872.0, "35": 4122703872.0, "40": 4122703872.0, "45": 4122703872.0, "50": 4122703872.0, "55": 4122703872.0, "60": 4122703872.0, "65": 4122703872.0, "70": 4122703872.0, "75": 4122703872.0, "80": 4122703872.0, "85": 4122703872.0, "90": 4122703872.0, "95": 4122703872.0, "100": 4122703872.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 15.54005, "5": 0.15749, "10": 0.15969, "15": 0.15843, "20": 0.15895, "25": 0.1586, "30": 0.15871, "35": 0.15794, "40": 0.1604, "45": 0.15841, "50": 0.15961, "55": 0.16456, "60": 0.16138, "65": 0.16027, "70": 0.16206, "75": 0.15997, "80": 0.16097, "85": 0.16718, "90": 0.16652, "95": 0.1684, "100": 0.16791}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml index d98716a..06ff36e 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml @@ -1,51 +1,53 @@ -ENV_VARS: - CUDA_DEVICE_MAX_CONNECTIONS: 1 - NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 - NCCL_ALGO: Tree - CUBLAS_WORKSPACE_CONFIG: :4096:8 -MODEL_ARGS: - --num-layers: 12 - --hidden-size: 512 - --num-attention-heads: 8 - --log-params-norm: true - --log-num-zeros-in-grad: true - --log-validation-ppl-to-tensorboard: true - --log-timers-to-tensorboard: true - --tensorboard-dir: ${TENSORBOARD_PATH} - --micro-batch-size: 4 - --global-batch-size: 32 - --seq-length: 1024 - --max-position-embeddings: 1024 - --train-iters: 100 - --timing-log-level: 2 - --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} - --data-path: ${DATA_PATH}/my-gpt3_00_text_document - --vocab-file: ${DATA_PATH}/bpe/vocab.json - --merge-file: ${DATA_PATH}/bpe/merges.txt - --split: 949,50,1 - --distributed-backend: nccl - --lr: 0.00015 - --lr-decay-style: cosine - --min-lr: 1.0e-5 - --weight-decay: 1e-2 - --clip-grad: 1.0 - --lr-warmup-fraction: .01 - --log-interval: 1 - --save-interval: 50 - --eval-interval: 1000 - --eval-iters: 10 - --transformer-impl: transformer_engine - --tensor-model-parallel-size: 1 - --pipeline-model-parallel-size: 4 - --num-layers-per-virtual-pipeline-stage: 1 - --deterministic-mode: true - --no-gradient-accumulation-fusion: true - --attention-softmax-in-fp32: true - --use-checkpoint-opt_param-scheduler: true - --use-mcore-models: true - --ckpt-format: torch_dist - --data-cache-path: ${DATA_CACHE_PATH} - --bf16: true -TEST_TYPE: ckpt-resume +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 100 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 50 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 1 + --pipeline-model-parallel-size: 4 + --num-layers-per-virtual-pipeline-stage: 1 + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-checkpoint-opt_param-scheduler: true + --use-mcore-models: true + --ckpt-format: torch_dist + --ckpt-assume-constant-structure: true + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --log-memory-to-tensorboard: true +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..5d54264 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.81873, "5": 10.85262, "10": 10.78415, "15": 10.79311, "20": 10.69222, "25": 10.52454, "30": 10.34542, "35": 10.26242, "40": 10.07283, "45": 9.81123, "50": 9.88433, "55": 9.86374, "60": 9.47985, "65": 8.93093, "70": 9.71206, "75": 9.4002, "80": 9.39071, "85": 9.60143, "90": 9.80506, "95": 9.50809, "100": 9.39219}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1541.0, "5": 1912.0, "10": 1353.0, "15": 1917.0, "20": 1675.0, "25": 1730.0, "30": 1899.0, "35": 1951.0, "40": 2020.0, "45": 2040.0, "50": 2385.0, "55": 2263.0, "60": 2327.0, "65": 2612.0, "70": 3254.0, "75": 2613.0, "80": 3186.0, "85": 3386.0, "90": 3037.0, "95": 3302.0, "100": 3280.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 521926144.0, "5": 521926144.0, "10": 521926144.0, "15": 521926144.0, "20": 521926144.0, "25": 521926144.0, "30": 521926144.0, "35": 521926144.0, "40": 521926144.0, "45": 521926144.0, "50": 521926144.0, "55": 521926144.0, "60": 521926144.0, "65": 521926144.0, "70": 521926144.0, "75": 521926144.0, "80": 521926144.0, "85": 521926144.0, "90": 521926144.0, "95": 521926144.0, "100": 521926144.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2298877952.0, "5": 2440802304.0, "10": 2440802304.0, "15": 2440802304.0, "20": 2440802304.0, "25": 2440802304.0, "30": 2440802304.0, "35": 2440802304.0, "40": 2440802304.0, "45": 2440802304.0, "50": 2440802304.0, "55": 2440802304.0, "60": 2440802304.0, "65": 2440802304.0, "70": 2440802304.0, "75": 2440802304.0, "80": 2440802304.0, "85": 2440802304.0, "90": 2440802304.0, "95": 2440802304.0, "100": 2440802304.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.42431, "5": 0.15421, "10": 0.15404, "15": 0.15679, "20": 0.15514, "25": 0.1535, "30": 0.1545, "35": 0.15342, "40": 0.15339, "45": 0.15224, "50": 0.15191, "55": 0.14871, "60": 0.14706, "65": 0.14745, "70": 0.14606, "75": 0.1482, "80": 0.14783, "85": 0.15003, "90": 0.14935, "95": 0.15271, "100": 0.16034}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..a68fea4 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85284, "10": 10.78455, "15": 10.7923, "20": 10.69211, "25": 10.52414, "30": 10.34555, "35": 10.2624, "40": 10.07237, "45": 9.81103, "50": 9.88417, "55": 9.86375, "60": 9.47966, "65": 8.93063, "70": 9.71218, "75": 9.40046, "80": 9.39077, "85": 9.60141, "90": 9.80504, "95": 9.50823, "100": 9.39237}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1559.0, "5": 1840.0, "10": 1380.0, "15": 1850.0, "20": 1668.0, "25": 1607.0, "30": 1945.0, "35": 1860.0, "40": 2022.0, "45": 2042.0, "50": 2292.0, "55": 2273.0, "60": 2355.0, "65": 2674.0, "70": 3184.0, "75": 2582.0, "80": 3237.0, "85": 3377.0, "90": 2972.0, "95": 3318.0, "100": 3514.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 523003904.0, "5": 523003904.0, "10": 523003904.0, "15": 523003904.0, "20": 523003904.0, "25": 523003904.0, "30": 523003904.0, "35": 523003904.0, "40": 523003904.0, "45": 523003904.0, "50": 523003904.0, "55": 523003904.0, "60": 523003904.0, "65": 523003904.0, "70": 523003904.0, "75": 523003904.0, "80": 523003904.0, "85": 523003904.0, "90": 523003904.0, "95": 523003904.0, "100": 523003904.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3768872960.0, "5": 3912765440.0, "10": 3912765440.0, "15": 3912765440.0, "20": 3912765440.0, "25": 3912765440.0, "30": 3912765440.0, "35": 3912765440.0, "40": 3912765440.0, "45": 3912765440.0, "50": 3912765440.0, "55": 3912765440.0, "60": 3912765440.0, "65": 3912765440.0, "70": 3912765440.0, "75": 3912765440.0, "80": 3912765440.0, "85": 3912765440.0, "90": 3912765440.0, "95": 3912765440.0, "100": 3912765440.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 16.02969, "5": 0.15735, "10": 0.16072, "15": 0.15723, "20": 0.15745, "25": 0.15826, "30": 0.15964, "35": 0.16023, "40": 0.15616, "45": 0.15487, "50": 0.15469, "55": 0.1613, "60": 0.16121, "65": 0.1622, "70": 0.1599, "75": 0.15976, "80": 0.16152, "85": 0.16061, "90": 0.15993, "95": 0.15988, "100": 0.1599}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml index 92b2e35..a52efdf 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,4 +50,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..4c84e32 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.81873, "5": 10.85262, "10": 10.78415, "15": 10.79311, "20": 10.69222, "25": 10.52454, "30": 10.34542, "35": 10.26242, "40": 10.07283, "45": 9.81123, "50": 9.88433, "55": 9.86374, "60": 9.47985, "65": 8.93093, "70": 9.71206, "75": 9.4002, "80": 9.39071, "85": 9.60143, "90": 9.80506, "95": 9.50809, "100": 9.39219}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1541.0, "5": 1912.0, "10": 1353.0, "15": 1917.0, "20": 1675.0, "25": 1730.0, "30": 1899.0, "35": 1951.0, "40": 2020.0, "45": 2040.0, "50": 2385.0, "55": 2263.0, "60": 2327.0, "65": 2612.0, "70": 3254.0, "75": 2613.0, "80": 3186.0, "85": 3386.0, "90": 3037.0, "95": 3302.0, "100": 3280.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 521926144.0, "5": 522974720.0, "10": 522974720.0, "15": 522974720.0, "20": 522974720.0, "25": 522974720.0, "30": 522974720.0, "35": 522974720.0, "40": 522974720.0, "45": 522974720.0, "50": 522974720.0, "55": 522974720.0, "60": 522974720.0, "65": 522974720.0, "70": 522974720.0, "75": 522974720.0, "80": 522974720.0, "85": 522974720.0, "90": 522974720.0, "95": 522974720.0, "100": 522974720.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2298877952.0, "5": 2440802304.0, "10": 2440802304.0, "15": 2440802304.0, "20": 2440802304.0, "25": 2440802304.0, "30": 2440802304.0, "35": 2440802304.0, "40": 2440802304.0, "45": 2440802304.0, "50": 2440802304.0, "55": 2440802304.0, "60": 2440802304.0, "65": 2440802304.0, "70": 2440802304.0, "75": 2440802304.0, "80": 2440802304.0, "85": 2440802304.0, "90": 2440802304.0, "95": 2440802304.0, "100": 2440802304.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 16.79264, "5": 0.15187, "10": 0.15534, "15": 0.15767, "20": 0.15499, "25": 0.1556, "30": 0.15402, "35": 0.15483, "40": 0.15282, "45": 0.15494, "50": 0.15426, "55": 0.1549, "60": 0.15835, "65": 0.15515, "70": 0.15423, "75": 0.15522, "80": 0.15525, "85": 0.15444, "90": 0.15344, "95": 0.15532, "100": 0.15381}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..26aaebb --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.82005, "5": 10.85284, "10": 10.78455, "15": 10.7923, "20": 10.69211, "25": 10.52414, "30": 10.34555, "35": 10.2624, "40": 10.07237, "45": 9.81103, "50": 9.88417, "55": 9.86375, "60": 9.47966, "65": 8.93063, "70": 9.71218, "75": 9.40046, "80": 9.39077, "85": 9.60141, "90": 9.80504, "95": 9.50823, "100": 9.39237}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1559.0, "5": 1840.0, "10": 1380.0, "15": 1850.0, "20": 1668.0, "25": 1607.0, "30": 1945.0, "35": 1860.0, "40": 2022.0, "45": 2042.0, "50": 2292.0, "55": 2273.0, "60": 2355.0, "65": 2674.0, "70": 3184.0, "75": 2582.0, "80": 3237.0, "85": 3377.0, "90": 2972.0, "95": 3318.0, "100": 3514.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 523003904.0, "5": 523003904.0, "10": 523003904.0, "15": 523003904.0, "20": 523003904.0, "25": 523003904.0, "30": 523003904.0, "35": 523003904.0, "40": 523003904.0, "45": 523003904.0, "50": 523003904.0, "55": 523003904.0, "60": 523003904.0, "65": 523003904.0, "70": 523003904.0, "75": 523003904.0, "80": 523003904.0, "85": 523003904.0, "90": 523003904.0, "95": 523003904.0, "100": 523003904.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3768872960.0, "5": 3912765440.0, "10": 3912765440.0, "15": 3912765440.0, "20": 3912765440.0, "25": 3912765440.0, "30": 3912765440.0, "35": 3912765440.0, "40": 3912765440.0, "45": 3912765440.0, "50": 3912765440.0, "55": 3912765440.0, "60": 3912765440.0, "65": 3912765440.0, "70": 3912765440.0, "75": 3912765440.0, "80": 3912765440.0, "85": 3912765440.0, "90": 3912765440.0, "95": 3912765440.0, "100": 3912765440.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 15.5918, "5": 0.16302, "10": 0.16601, "15": 0.16426, "20": 0.16354, "25": 0.16043, "30": 0.16113, "35": 0.16179, "40": 0.15952, "45": 0.15947, "50": 0.15972, "55": 0.1647, "60": 0.16566, "65": 0.16575, "70": 0.16359, "75": 0.16555, "80": 0.16349, "85": 0.16459, "90": 0.1641, "95": 0.16549, "100": 0.16374}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml index 1f2fa9e..50bf62a 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml @@ -1,56 +1,59 @@ -ENV_VARS: - CUDA_DEVICE_MAX_CONNECTIONS: 1 - NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 - NCCL_ALGO: Tree - CUBLAS_WORKSPACE_CONFIG: :4096:8 -MODEL_ARGS: - --num-layers: 12 - --hidden-size: 512 - --num-attention-heads: 8 - --log-params-norm: true - --log-num-zeros-in-grad: true - --log-validation-ppl-to-tensorboard: true - --log-timers-to-tensorboard: true - --tensorboard-dir: ${TENSORBOARD_PATH} - --micro-batch-size: 4 - --global-batch-size: 32 - --seq-length: 1024 - --max-position-embeddings: 1024 - --train-iters: 100 - --timing-log-level: 2 - --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} - --data-path: ${DATA_PATH}/my-gpt3_00_text_document - --vocab-file: ${DATA_PATH}/bpe/vocab.json - --merge-file: ${DATA_PATH}/bpe/merges.txt - --split: 949,50,1 - --distributed-backend: nccl - --lr: 0.00015 - --lr-decay-style: cosine - --min-lr: 1.0e-5 - --weight-decay: 1e-2 - --clip-grad: 1.0 - --lr-warmup-fraction: .01 - --log-interval: 1 - --save-interval: 50 - --eval-interval: 1000 - --eval-iters: 10 - --transformer-impl: transformer_engine - --tensor-model-parallel-size: 1 - --pipeline-model-parallel-size: 4 - --num-layers-per-virtual-pipeline-stage: 1 - --use-distributed-optimizer: true - --overlap-grad-reduce: true - --overlap-param-gather: true - --check-weight-hash-across-dp-replicas-interval: 10 - --ckpt-fully-parallel-load: true - --deterministic-mode: true - --no-gradient-accumulation-fusion: true - --attention-softmax-in-fp32: true - --use-checkpoint-opt_param-scheduler: true - --use-mcore-models: true - --ckpt-format: torch_dist - --data-cache-path: ${DATA_CACHE_PATH} - --bf16: true -TEST_TYPE: ckpt-resume +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 100 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 50 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 1 + --pipeline-model-parallel-size: 4 + --num-layers-per-virtual-pipeline-stage: 1 + --use-distributed-optimizer: true + --overlap-grad-reduce: true + --overlap-param-gather: true + --check-weight-hash-across-dp-replicas-interval: 10 + --disable-gloo-process-groups: true + --ckpt-fully-parallel-load: true + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-checkpoint-opt_param-scheduler: true + --use-mcore-models: true + --ckpt-format: torch_dist + --ckpt-assume-constant-structure: true + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --log-memory-to-tensorboard: true +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..10e88cf --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.93652, "5": 10.93219, "10": 10.91159, "15": 10.85668, "20": 10.77161, "25": 10.60544, "30": 10.40595, "35": 10.31396, "40": 10.12361, "45": 9.87606, "50": 9.94483, "55": 9.90094, "60": 9.5526, "65": 8.96804, "70": 9.77858, "75": 9.44577, "80": 9.4199, "85": 9.64322, "90": 9.85834, "95": 9.52082, "100": 9.43404}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 22727554.0, "5": 22715260.0, "10": 22919068.0, "15": 22821164.0, "20": 22693678.0, "25": 22819604.0, "30": 22631168.0, "35": 22787934.0, "40": 22658232.0, "45": 22674504.0, "50": 22904460.0, "55": 22519162.0, "60": 22743128.0, "65": 23060980.0, "70": 22829344.0, "75": 23053962.0, "80": 22707280.0, "85": 22712296.0, "90": 22971840.0, "95": 23047794.0, "100": 23015940.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 520891904.0, "5": 520891904.0, "10": 520891904.0, "15": 520891904.0, "20": 520891904.0, "25": 520891904.0, "30": 520891904.0, "35": 520891904.0, "40": 520891904.0, "45": 520891904.0, "50": 520891904.0, "55": 520891904.0, "60": 520891904.0, "65": 520891904.0, "70": 520891904.0, "75": 520891904.0, "80": 520891904.0, "85": 520891904.0, "90": 520891904.0, "95": 520891904.0, "100": 520891904.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2299948032.0, "5": 2439768064.0, "10": 2439768064.0, "15": 2439768064.0, "20": 2439768064.0, "25": 2439768064.0, "30": 2439768064.0, "35": 2439768064.0, "40": 2439768064.0, "45": 2439768064.0, "50": 2439768064.0, "55": 2439768064.0, "60": 2439768064.0, "65": 2439768064.0, "70": 2439768064.0, "75": 2439768064.0, "80": 2439768064.0, "85": 2439768064.0, "90": 2439768064.0, "95": 2439768064.0, "100": 2439768064.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 16.00303, "5": 0.15968, "10": 0.16109, "15": 0.15942, "20": 0.15611, "25": 0.15603, "30": 0.15632, "35": 0.15548, "40": 0.15633, "45": 0.15576, "50": 0.15591, "55": 0.15317, "60": 0.1529, "65": 0.15213, "70": 0.15024, "75": 0.15269, "80": 0.15331, "85": 0.15295, "90": 0.15136, "95": 0.15353, "100": 0.15201}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..3f684fa --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.9359, "5": 10.93225, "10": 10.91081, "15": 10.85723, "20": 10.77091, "25": 10.60558, "30": 10.40544, "35": 10.31364, "40": 10.12333, "45": 9.8756, "50": 9.94451, "55": 9.90089, "60": 9.55236, "65": 8.96792, "70": 9.77832, "75": 9.44604, "80": 9.4201, "85": 9.64321, "90": 9.85827, "95": 9.52085, "100": 9.43416}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 22727686.0, "5": 22715312.0, "10": 22919004.0, "15": 22821282.0, "20": 22693812.0, "25": 22819580.0, "30": 22631132.0, "35": 22787906.0, "40": 22658304.0, "45": 22674764.0, "50": 22904438.0, "55": 22519056.0, "60": 22743204.0, "65": 23060980.0, "70": 22829348.0, "75": 23054184.0, "80": 22707228.0, "85": 22712172.0, "90": 22971870.0, "95": 23047656.0, "100": 23016066.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 523016192.0, "5": 523016192.0, "10": 523016192.0, "15": 523016192.0, "20": 523016192.0, "25": 523016192.0, "30": 523016192.0, "35": 523016192.0, "40": 523016192.0, "45": 523016192.0, "50": 523016192.0, "55": 523016192.0, "60": 523016192.0, "65": 523016192.0, "70": 523016192.0, "75": 523016192.0, "80": 523016192.0, "85": 523016192.0, "90": 523016192.0, "95": 523016192.0, "100": 523016192.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3769943040.0, "5": 3914774528.0, "10": 3914774528.0, "15": 3914774528.0, "20": 3914774528.0, "25": 3914774528.0, "30": 3914774528.0, "35": 3914774528.0, "40": 3914774528.0, "45": 3914774528.0, "50": 3914774528.0, "55": 3914774528.0, "60": 3914774528.0, "65": 3914774528.0, "70": 3914774528.0, "75": 3914774528.0, "80": 3914774528.0, "85": 3914774528.0, "90": 3914774528.0, "95": 3914774528.0, "100": 3914774528.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 16.01453, "5": 0.166, "10": 0.16623, "15": 0.16232, "20": 0.16292, "25": 0.16015, "30": 0.15928, "35": 0.15947, "40": 0.15937, "45": 0.16129, "50": 0.15757, "55": 0.16029, "60": 0.15889, "65": 0.15795, "70": 0.15758, "75": 0.15718, "80": 0.15858, "85": 0.15639, "90": 0.15635, "95": 0.15626, "100": 0.1578}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml index 49865dd..d97a959 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -51,4 +51,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..b26dc38 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.81112, "5": 10.85474, "10": 10.88269, "15": 10.80707, "20": 10.63254, "25": 10.47352, "30": 10.39308, "35": 9.96795, "40": 10.10006, "45": 9.6834, "50": 9.92476, "55": 9.98142, "60": 9.3523, "65": 9.70184, "70": 9.73813, "75": 8.95596, "80": 9.31468, "85": 8.97866, "90": 9.55803, "95": 9.13519, "100": 9.21084}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1011.0, "5": 1122.0, "10": 1338.0, "15": 1172.0, "20": 1143.0, "25": 1122.0, "30": 1527.0, "35": 1239.0, "40": 1436.0, "45": 1565.0, "50": 1754.0, "55": 1787.0, "60": 1768.0, "65": 2460.0, "70": 2516.0, "75": 2011.0, "80": 1970.0, "85": 2220.0, "90": 2181.0, "95": 2486.0, "100": 1974.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 731763200.0, "5": 731763200.0, "10": 731763200.0, "15": 731763200.0, "20": 731763200.0, "25": 731763200.0, "30": 731763200.0, "35": 731763200.0, "40": 731763200.0, "45": 731763200.0, "50": 731763200.0, "55": 731763200.0, "60": 731763200.0, "65": 731763200.0, "70": 731763200.0, "75": 731763200.0, "80": 731763200.0, "85": 731763200.0, "90": 731763200.0, "95": 731763200.0, "100": 731763200.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2516124160.0, "5": 2796787200.0, "10": 2796787200.0, "15": 2796787200.0, "20": 2796787200.0, "25": 2796787200.0, "30": 2796787200.0, "35": 2796787200.0, "40": 2796787200.0, "45": 2796787200.0, "50": 2796787200.0, "55": 2796787200.0, "60": 2796787200.0, "65": 2796787200.0, "70": 2796787200.0, "75": 2796787200.0, "80": 2796787200.0, "85": 2796787200.0, "90": 2796787200.0, "95": 2796787200.0, "100": 2796787200.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 17.34358, "5": 0.33658, "10": 0.32993, "15": 0.32841, "20": 0.32798, "25": 0.33006, "30": 0.3274, "35": 0.32586, "40": 0.32487, "45": 0.32557, "50": 0.32425, "55": 0.33341, "60": 0.33044, "65": 0.32759, "70": 0.32883, "75": 0.33041, "80": 0.33042, "85": 0.33049, "90": 0.32813, "95": 0.33174, "100": 0.32817}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..47627a5 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.81184, "5": 10.85464, "10": 10.88256, "15": 10.80679, "20": 10.63196, "25": 10.47374, "30": 10.39285, "35": 9.96791, "40": 10.1, "45": 9.68346, "50": 9.92463, "55": 9.98132, "60": 9.3523, "65": 9.7021, "70": 9.73808, "75": 8.95617, "80": 9.31499, "85": 8.97886, "90": 9.5581, "95": 9.13527, "100": 9.21091}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1125.0, "5": 1255.0, "10": 1367.0, "15": 1127.0, "20": 1082.0, "25": 1114.0, "30": 1558.0, "35": 1292.0, "40": 1433.0, "45": 1637.0, "50": 1779.0, "55": 1819.0, "60": 1851.0, "65": 2490.0, "70": 2549.0, "75": 1996.0, "80": 1939.0, "85": 2175.0, "90": 2179.0, "95": 2519.0, "100": 2013.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 733859840.0, "5": 733859840.0, "10": 733859840.0, "15": 733859840.0, "20": 733859840.0, "25": 733859840.0, "30": 733859840.0, "35": 733859840.0, "40": 733859840.0, "45": 733859840.0, "50": 733859840.0, "55": 733859840.0, "60": 733859840.0, "65": 733859840.0, "70": 733859840.0, "75": 733859840.0, "80": 733859840.0, "85": 733859840.0, "90": 733859840.0, "95": 733859840.0, "100": 733859840.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4312984064.0, "5": 4596792832.0, "10": 4596792832.0, "15": 4596792832.0, "20": 4596792832.0, "25": 4596792832.0, "30": 4596792832.0, "35": 4596792832.0, "40": 4596792832.0, "45": 4596792832.0, "50": 4596792832.0, "55": 4596792832.0, "60": 4596792832.0, "65": 4596792832.0, "70": 4596792832.0, "75": 4596792832.0, "80": 4596792832.0, "85": 4596792832.0, "90": 4596792832.0, "95": 4596792832.0, "100": 4596792832.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 17.08432, "5": 0.34938, "10": 0.3472, "15": 0.34809, "20": 0.35058, "25": 0.34829, "30": 0.34775, "35": 0.34968, "40": 0.34886, "45": 0.34665, "50": 0.34852, "55": 0.35034, "60": 0.34955, "65": 0.34833, "70": 0.35115, "75": 0.35006, "80": 0.35114, "85": 0.3487, "90": 0.35045, "95": 0.3489, "100": 0.35007}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/model_config.yaml index 49bd5f9..17c0094 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -51,4 +51,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_dev.json index a03d56c..452933c 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.81184, 10.84052, 10.8763, 10.79906, 10.68214, 10.59702, 10.49258, 10.11236, 10.12393, 9.98165]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1118.0, 1331.0, 1230.0, 1085.0, 1180.0, 1245.0, 1454.0, 1330.0, 1752.0, 1851.0]}, "iteration-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [17.24286, 0.35341, 0.35187, 0.35028, 0.34941, 0.35093, 0.3488, 0.35179, 0.34905, 0.34684]}} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.81184, "5": 10.85467, "10": 10.88256, "15": 10.80682, "20": 10.63195, "25": 10.47372, "30": 10.39284, "35": 9.96785, "40": 10.09999, "45": 9.68342, "50": 9.92465}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1118.0, "5": 1229.0, "10": 1289.0, "15": 1125.0, "20": 1090.0, "25": 1110.0, "30": 1431.0, "35": 1132.0, "40": 1472.0, "45": 1544.0, "50": 1737.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 733859840.0, "5": 733859840.0, "10": 733859840.0, "15": 733859840.0, "20": 733859840.0, "25": 733859840.0, "30": 733859840.0, "35": 733859840.0, "40": 733859840.0, "45": 733859840.0, "50": 733859840.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4312984064.0, "5": 4596792832.0, "10": 4596792832.0, "15": 4596792832.0, "20": 4596792832.0, "25": 4596792832.0, "30": 4596792832.0, "35": 4596792832.0, "40": 4596792832.0, "45": 4596792832.0, "50": 4596792832.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 14.07311, "5": 0.3501, "10": 0.3498, "15": 0.34818, "20": 0.34849, "25": 0.34922, "30": 0.35192, "35": 0.35181, "40": 0.34504, "45": 0.34967, "50": 0.3384}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_lts.json index 91c3ae6..99728e4 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.81184, 10.84052, 10.87624, 10.79904, 10.68212, 10.59698, 10.49257, 10.11232, 10.12396, 9.98163]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1125.0, 1304.0, 1252.0, 1102.0, 1201.0, 1200.0, 1489.0, 1395.0, 1677.0, 1867.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1125.0, 1304.0, 1252.0, 1102.0, 1201.0, 1200.0, 1489.0, 1395.0, 1677.0, 1867.0]}, "iteration-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [22.22011, 0.36082, 0.35927, 0.35627, 0.35901, 0.35008, 0.34828, 0.34774, 0.35145, 0.35141]}} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.81184, "5": 10.85464, "10": 10.88256, "15": 10.80679, "20": 10.63196, "25": 10.47374, "30": 10.39285, "35": 9.96791, "40": 10.1, "45": 9.68346, "50": 9.92463}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1125.0, "5": 1255.0, "10": 1367.0, "15": 1127.0, "20": 1082.0, "25": 1114.0, "30": 1558.0, "35": 1292.0, "40": 1433.0, "45": 1637.0, "50": 1779.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 733859840.0, "5": 733859840.0, "10": 733859840.0, "15": 733859840.0, "20": 733859840.0, "25": 733859840.0, "30": 733859840.0, "35": 733859840.0, "40": 733859840.0, "45": 733859840.0, "50": 733859840.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 4312984064.0, "5": 4596792832.0, "10": 4596792832.0, "15": 4596792832.0, "20": 4596792832.0, "25": 4596792832.0, "30": 4596792832.0, "35": 4596792832.0, "40": 4596792832.0, "45": 4596792832.0, "50": 4596792832.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 15.10993, "5": 0.35468, "10": 0.57928, "15": 0.34914, "20": 0.34779, "25": 0.34849, "30": 0.3476, "35": 0.34849, "40": 0.34839, "45": 0.34576, "50": 0.34689}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/model_config.yaml index 3e896f0..db584a3 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -51,4 +51,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values_dev.json index 551870d..963a2ee 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.9735, - 10.96043, - 10.95577, - 10.91036, - 10.78792, - 10.71198, - 10.22428, - 10.28927, - 10.19052, - 9.86378 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 22727056.0, - 23021982.0, - 22501104.0, - 22831164.0, - 22740086.0, - 22547896.0, - 22955344.0, - 22589272.0, - 22658866.0, - 22885040.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 13.92799, - 0.16275, - 0.16118, - 0.16212, - 0.16165, - 0.16181, - 0.16104, - 0.16149, - 0.16151, - 0.16055 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.9735, "5": 10.95597, "10": 10.94991, "15": 10.91152, "20": 10.80976, "25": 10.6662, "30": 10.45503, "35": 10.33419, "40": 10.1465, "45": 9.89112, "50": 9.95256}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 22727056.0, "5": 22714202.0, "10": 22918312.0, "15": 22821034.0, "20": 22694248.0, "25": 22819602.0, "30": 22631112.0, "35": 22787556.0, "40": 22658080.0, "45": 22674612.0, "50": 22905288.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 658766848.0, "5": 658766848.0, "10": 657718272.0, "15": 658766848.0, "20": 657718272.0, "25": 658766848.0, "30": 657718272.0, "35": 658766848.0, "40": 657718272.0, "45": 658766848.0, "50": 657718272.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2129712128.0, "5": 2386204672.0, "10": 2386204672.0, "15": 2386204672.0, "20": 2386204672.0, "25": 2386204672.0, "30": 2386204672.0, "35": 2386204672.0, "40": 2386204672.0, "45": 2386204672.0, "50": 2386204672.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 15.12421, "5": 0.15186, "10": 0.15206, "15": 0.15102, "20": 0.14853, "25": 0.14759, "30": 0.14812, "35": 0.14831, "40": 0.14675, "45": 0.14841, "50": 0.14702}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values_lts.json index 48bbcc3..a994c22 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.9735, 10.96043, 10.95576, 10.91038, 10.78791, 10.71201, 10.22424, 10.28926, 10.19049, 9.86378]},"num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [22727052.0, 23021930.0, 22501022.0, 22831208.0, 22740024.0, 22547916.0, 22955210.0, 22589344.0, 22658940.0, 22884970.0]},"iteration_timing_avg": 0.1367805882352941} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.9735, "5": 10.95594, "10": 10.94989, "15": 10.9115, "20": 10.80975, "25": 10.66619, "30": 10.45505, "35": 10.3342, "40": 10.14647, "45": 9.8911, "50": 9.95258}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 22727052.0, "5": 22714228.0, "10": 22918376.0, "15": 22820932.0, "20": 22694228.0, "25": 22819504.0, "30": 22631112.0, "35": 22787612.0, "40": 22658002.0, "45": 22674598.0, "50": 22905310.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 658766848.0, "5": 658766848.0, "10": 657718272.0, "15": 658766848.0, "20": 657718272.0, "25": 658766848.0, "30": 657718272.0, "35": 658766848.0, "40": 657718272.0, "45": 658766848.0, "50": 657718272.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2129712128.0, "5": 2386204672.0, "10": 2386204672.0, "15": 2386204672.0, "20": 2386204672.0, "25": 2386204672.0, "30": 2386204672.0, "35": 2386204672.0, "40": 2386204672.0, "45": 2386204672.0, "50": 2386204672.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 16.33012, "5": 0.15907, "10": 0.15749, "15": 0.15792, "20": 0.1558, "25": 0.15554, "30": 0.15634, "35": 0.15618, "40": 0.15564, "45": 0.15575, "50": 0.15527}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/model_config.yaml index f17824f..a3fe6c4 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,4 +50,5 @@ MODEL_ARGS: --decoder-first-pipeline-num-layers: 2 --decoder-last-pipeline-num-layers: 2 --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp2_account_for_embedding_loss_in_pipeline_split_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp2_account_for_embedding_loss_in_pipeline_split_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..d79cde2 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp2_account_for_embedding_loss_in_pipeline_split_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.84528, "5": 10.87389, "10": 10.83534, "15": 10.84228, "20": 10.80169, "25": 10.71147, "30": 10.58469, "35": 10.53285, "40": 10.35655, "45": 10.13836, "50": 10.20006}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 22726240.0, "5": 22713916.0, "10": 22918018.0, "15": 22821640.0, "20": 22693640.0, "25": 22819132.0, "30": 22630774.0, "35": 22787542.0, "40": 22657576.0, "45": 22674502.0, "50": 22904024.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 612510208.0, "5": 612510208.0, "10": 612510208.0, "15": 612510208.0, "20": 612510208.0, "25": 612510208.0, "30": 612510208.0, "35": 612510208.0, "40": 612510208.0, "45": 612510208.0, "50": 612510208.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2582212096.0, "5": 2814534144.0, "10": 2814534144.0, "15": 2814534144.0, "20": 2814534144.0, "25": 2814534144.0, "30": 2814534144.0, "35": 2814534144.0, "40": 2814534144.0, "45": 2814534144.0, "50": 2814534144.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.96965, "5": 0.1128, "10": 0.11014, "15": 0.10922, "20": 0.10959, "25": 0.10952, "30": 0.10989, "35": 0.1097, "40": 0.10959, "45": 0.10943, "50": 0.10775}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp2_account_for_embedding_loss_in_pipeline_split_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp2_account_for_embedding_loss_in_pipeline_split_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..e47c00b --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp2_account_for_embedding_loss_in_pipeline_split_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.84528, "5": 10.87387, "10": 10.83535, "15": 10.84228, "20": 10.80165, "25": 10.71151, "30": 10.58471, "35": 10.53289, "40": 10.35653, "45": 10.13837, "50": 10.20009}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 22726236.0, "5": 22714030.0, "10": 22918036.0, "15": 22821524.0, "20": 22693660.0, "25": 22819084.0, "30": 22630708.0, "35": 22787594.0, "40": 22657484.0, "45": 22674484.0, "50": 22904176.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 612509696.0, "5": 612509696.0, "10": 612509696.0, "15": 612509696.0, "20": 612509696.0, "25": 612509696.0, "30": 612509696.0, "35": 612509696.0, "40": 612509696.0, "45": 612509696.0, "50": 612509696.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2582211584.0, "5": 2814533632.0, "10": 2814533632.0, "15": 2814533632.0, "20": 2814533632.0, "25": 2814533632.0, "30": 2814533632.0, "35": 2814533632.0, "40": 2814533632.0, "45": 2814533632.0, "50": 2814533632.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 17.55226, "5": 0.1078, "10": 0.10806, "15": 0.10761, "20": 0.1068, "25": 0.10715, "30": 0.10806, "35": 0.10926, "40": 0.10666, "45": 0.10734, "50": 0.10631}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp2_account_for_embedding_loss_in_pipeline_split_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp2_account_for_embedding_loss_in_pipeline_split_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 0000000..9d3aabb --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp1_pp4_vp2_account_for_embedding_loss_in_pipeline_split_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,55 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 6 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 1 + --pipeline-model-parallel-size: 4 + --num-virtual-stages-per-pipeline-rank: 2 + --untie-embeddings-and-output-weights: true + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --account-for-embedding-in-pipeline-split: true + --account-for-loss-in-pipeline-split: true + --attention-backend: unfused + --log-memory-to-tensorboard: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json index b87c0bc..260ef2d 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.88759, 10.90846, 10.88099, 10.84518, 10.69285, 10.6019, 10.09544, 10.18239, 10.08764, 9.76749]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [578.0, 659.0, 683.0, 700.0, 697.0, 620.0, 572.0, 774.0, 807.0, 837.0]}, "iteration_timing_avg": 0.3462723529411765} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.88763, "5": 10.90192, "10": 10.86851, "15": 10.84829, "20": 10.71772, "25": 10.54273, "30": 10.3364, "35": 10.23974, "40": 10.03266, "45": 9.76819, "50": 9.85322}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 577.0, "5": 644.0, "10": 518.0, "15": 676.0, "20": 606.0, "25": 622.0, "30": 689.0, "35": 699.0, "40": 805.0, "45": 804.0, "50": 913.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 609664512.0, "5": 609664512.0, "10": 609664512.0, "15": 609664512.0, "20": 609664512.0, "25": 609664512.0, "30": 609664512.0, "35": 609664512.0, "40": 609664512.0, "45": 609664512.0, "50": 609664512.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 879175168.0, "5": 1140484608.0, "10": 1140484608.0, "15": 1140484608.0, "20": 1140484608.0, "25": 1140484608.0, "30": 1140484608.0, "35": 1140484608.0, "40": 1140484608.0, "45": 1140484608.0, "50": 1140484608.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.90893, "5": 0.35294, "10": 0.35602, "15": 0.3519, "20": 0.35157, "25": 0.3699, "30": 0.35336, "35": 0.34883, "40": 0.3478, "45": 0.34674, "50": 0.34963}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json index b87c0bc..2d3bc8f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.88759, 10.90846, 10.88099, 10.84518, 10.69285, 10.6019, 10.09544, 10.18239, 10.08764, 9.76749]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [578.0, 659.0, 683.0, 700.0, 697.0, 620.0, 572.0, 774.0, 807.0, 837.0]}, "iteration_timing_avg": 0.3462723529411765} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.88759, "5": 10.90193, "10": 10.86849, "15": 10.84832, "20": 10.71768, "25": 10.54266, "30": 10.33646, "35": 10.23975, "40": 10.03266, "45": 9.7682, "50": 9.85323}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 553.0, "5": 688.0, "10": 557.0, "15": 645.0, "20": 574.0, "25": 618.0, "30": 663.0, "35": 707.0, "40": 809.0, "45": 784.0, "50": 870.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 610712576.0, "5": 610712576.0, "10": 610712576.0, "15": 610712576.0, "20": 610712576.0, "25": 610712576.0, "30": 610712576.0, "35": 610712576.0, "40": 610712576.0, "45": 610712576.0, "50": 610712576.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 880246784.0, "5": 1141557248.0, "10": 1141557248.0, "15": 1141557248.0, "20": 1141557248.0, "25": 1142590976.0, "30": 1142590976.0, "35": 1142590976.0, "40": 1142604800.0, "45": 1142604800.0, "50": 1143653888.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.27199, "5": 0.32916, "10": 0.3329, "15": 0.34423, "20": 0.32819, "25": 0.33221, "30": 0.32676, "35": 0.3233, "40": 0.34223, "45": 0.32454, "50": 0.32425}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml index 97b7669..7aebe4d 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml @@ -17,8 +17,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: flash + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..957acf3 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.80181, "5": 10.84284, "10": 10.78173, "15": 10.80418, "20": 10.7314, "25": 10.57558, "30": 10.43631, "35": 10.34347, "40": 10.17318, "45": 9.94245, "50": 10.00163, "55": 9.94872, "60": 9.59802, "65": 9.02299, "70": 9.78149, "75": 9.4886, "80": 9.45936, "85": 9.6529, "90": 9.84596, "95": 9.55834, "100": 9.43841}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 30687.0, "5": 35697.0, "10": 30112.0, "15": 35251.0, "20": 32966.0, "25": 31233.0, "30": 33087.0, "35": 34941.0, "40": 36233.0, "45": 35628.0, "50": 39783.0, "55": 37089.0, "60": 40650.0, "65": 41057.0, "70": 45337.0, "75": 39742.0, "80": 47699.0, "85": 49328.0, "90": 49103.0, "95": 48497.0, "100": 45560.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 865126912.0, "5": 865125376.0, "10": 865125376.0, "15": 865125888.0, "20": 865122816.0, "25": 865126400.0, "30": 865127424.0, "35": 865127936.0, "40": 865125376.0, "45": 865123328.0, "50": 865125888.0, "55": 865128448.0, "60": 865129472.0, "65": 865143808.0, "70": 865128960.0, "75": 865125888.0, "80": 865140736.0, "85": 865142272.0, "90": 865127936.0, "95": 865128448.0, "100": 865138176.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1867287040.0, "5": 2107624448.0, "10": 2107624448.0, "15": 2107624448.0, "20": 2112163840.0, "25": 2112163840.0, "30": 2112163840.0, "35": 2114877952.0, "40": 2114877952.0, "45": 2114877952.0, "50": 2116962304.0, "55": 2116962304.0, "60": 2127979520.0, "65": 2132691456.0, "70": 2132691456.0, "75": 2132691456.0, "80": 2133346816.0, "85": 2133346816.0, "90": 2135661568.0, "95": 2135661568.0, "100": 2135661568.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.44965, "5": 0.41295, "10": 0.40802, "15": 0.412, "20": 0.40527, "25": 0.40355, "30": 0.40301, "35": 0.40486, "40": 0.61232, "45": 0.40736, "50": 0.40454, "55": 0.40626, "60": 0.40914, "65": 0.41527, "70": 0.40692, "75": 0.41524, "80": 0.41678, "85": 0.41236, "90": 0.41594, "95": 0.4113, "100": 0.41746}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..dedf905 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.79987, "5": 10.85021, "10": 10.78437, "15": 10.80402, "20": 10.74018, "25": 10.57365, "30": 10.43064, "35": 10.34542, "40": 10.17702, "45": 9.94116, "50": 10.00138, "55": 9.94734, "60": 9.5942, "65": 9.02239, "70": 9.781, "75": 9.48705, "80": 9.4551, "85": 9.65724, "90": 9.84458, "95": 9.55632, "100": 9.44025}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 30784.0, "5": 35580.0, "10": 30083.0, "15": 35706.0, "20": 32807.0, "25": 30763.0, "30": 32985.0, "35": 34748.0, "40": 36348.0, "45": 36297.0, "50": 39908.0, "55": 37140.0, "60": 40211.0, "65": 40766.0, "70": 45683.0, "75": 40504.0, "80": 47991.0, "85": 48935.0, "90": 49292.0, "95": 48929.0, "100": 46758.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 865156096.0, "5": 865154560.0, "10": 865157632.0, "15": 865156608.0, "20": 865153024.0, "25": 865157120.0, "30": 865158656.0, "35": 865157632.0, "40": 865155072.0, "45": 865155584.0, "50": 865156608.0, "55": 865158144.0, "60": 865160704.0, "65": 865175552.0, "70": 865159680.0, "75": 865157632.0, "80": 865171456.0, "85": 865173504.0, "90": 865158144.0, "95": 865159168.0, "100": 865167360.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 17.56641, "5": 0.427, "10": 0.43169, "15": 0.68627, "20": 0.43474, "25": 0.43243, "30": 0.41868, "35": 0.42, "40": 0.42431, "45": 0.42216, "50": 0.42294, "55": 0.4248, "60": 0.42586, "65": 0.43207, "70": 0.42951, "75": 0.42294, "80": 0.42325, "85": 0.42285, "90": 0.41983, "95": 0.42117, "100": 0.41908}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 0000000..3adc626 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,58 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 100 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 50 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 1 + --expert-model-parallel-size: 2 + --sequence-parallel: true + --num-experts: 8 + --use-distributed-optimizer: true + --moe-router-load-balancing-type: sinkhorn + --moe-router-topk: 1 + --ckpt-fully-parallel-load: true + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-checkpoint-opt_param-scheduler: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --log-memory-to-tensorboard: true +TEST_TYPE: frozen-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_fsdp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_fsdp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml index 912b9bb..34576fd 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_fsdp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_fsdp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true -TEST_TYPE: ckpt-resume \ No newline at end of file + --log-memory-to-tensorboard: true +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_dev.json index 0386ad6..f47b788 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.88734, - 10.91614, - 10.89061, - 10.86173, - 10.72753, - 10.64491, - 10.18012, - 10.2562, - 10.1611, - 9.8539 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 3268.0, - 4040.0, - 4142.0, - 3766.0, - 4028.0, - 3648.0, - 3306.0, - 4028.0, - 4648.0, - 4546.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 7.0561, - 0.32588, - 0.32628, - 0.32385, - 0.32419, - 0.32364, - 0.32337, - 0.32334, - 0.32358, - 0.32395 - ] - } -} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.88734, "5": 10.9095, "10": 10.87762, "15": 10.86373, "20": 10.75091, "25": 10.59916, "30": 10.40104, "35": 10.30798, "40": 10.10903, "45": 9.85833, "50": 9.92113}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3268.0, "5": 4052.0, "10": 2754.0, "15": 3718.0, "20": 3418.0, "25": 3296.0, "30": 3834.0, "35": 4152.0, "40": 4500.0, "45": 4248.0, "50": 5174.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 462388224.0, "5": 462388224.0, "10": 462388224.0, "15": 462388224.0, "20": 462388224.0, "25": 462388224.0, "30": 462388224.0, "35": 462388224.0, "40": 462388224.0, "45": 462388224.0, "50": 462388224.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2529755136.0, "5": 2658182144.0, "10": 2658182144.0, "15": 2658182144.0, "20": 2658182144.0, "25": 2658182144.0, "30": 2658182144.0, "35": 2658182144.0, "40": 2658182144.0, "45": 2658182144.0, "50": 2658182144.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 7.51725, "5": 0.17534, "10": 0.17556, "15": 0.458, "20": 0.17526, "25": 0.17442, "30": 0.17452, "35": 0.17516, "40": 0.17397, "45": 0.17459, "50": 0.17426}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_lts.json index 15a93d0..967985d 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.88734, 10.91612, 10.8906, 10.86171, 10.72752, 10.64491, 10.18015, 10.25622, 10.16111, 9.85394]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [3228.0, 3820.0, 3890.0, 3848.0, 3902.0, 3486.0, 3310.0, 3982.0, 4472.0, 4532.0]}, "iteration_timing_avg": 0.22043823529411763} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.88734, "5": 10.90947, "10": 10.87761, "15": 10.86375, "20": 10.75093, "25": 10.59921, "30": 10.40105, "35": 10.30798, "40": 10.10902, "45": 9.85833, "50": 9.92118}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3228.0, "5": 4014.0, "10": 2862.0, "15": 3900.0, "20": 3426.0, "25": 3640.0, "30": 3692.0, "35": 3986.0, "40": 4502.0, "45": 4220.0, "50": 5028.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 462455808.0, "5": 462455808.0, "10": 462455808.0, "15": 462455808.0, "20": 462455808.0, "25": 462455808.0, "30": 462455808.0, "35": 462455808.0, "40": 462455808.0, "45": 462455808.0, "50": 462455808.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2529822720.0, "5": 2658249728.0, "10": 2658249728.0, "15": 2658249728.0, "20": 2658249728.0, "25": 2658249728.0, "30": 2658249728.0, "35": 2658249728.0, "40": 2658249728.0, "45": 2658249728.0, "50": 2658249728.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 8.10081, "5": 0.18214, "10": 0.17385, "15": 0.17946, "20": 0.17288, "25": 0.17466, "30": 0.18117, "35": 0.17967, "40": 0.17931, "45": 0.17881, "50": 0.17865}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/model_config.yaml index 3b4a2d6..179678b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -51,4 +51,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..c2a4e43 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.88763, "5": 10.90195, "10": 10.86849, "15": 10.84832, "20": 10.7177, "25": 10.54272, "30": 10.33644, "35": 10.23978, "40": 10.03268, "45": 9.7682, "50": 9.85323, "55": 9.8227, "60": 9.43756, "65": 8.87846, "70": 9.68163, "75": 9.37197, "80": 9.35654, "85": 9.57146, "90": 9.77728, "95": 9.48563, "100": 9.35905}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 586.0, "5": 597.0, "10": 527.0, "15": 644.0, "20": 584.0, "25": 631.0, "30": 670.0, "35": 675.0, "40": 793.0, "45": 753.0, "50": 864.0, "55": 849.0, "60": 821.0, "65": 974.0, "70": 1083.0, "75": 898.0, "80": 1055.0, "85": 1098.0, "90": 1043.0, "95": 1117.0, "100": 1177.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 609664512.0, "5": 609664512.0, "10": 609664512.0, "15": 609664512.0, "20": 609664512.0, "25": 609664512.0, "30": 609664512.0, "35": 609664512.0, "40": 609664512.0, "45": 609664512.0, "50": 609664512.0, "55": 609664512.0, "60": 609664512.0, "65": 609664512.0, "70": 609664512.0, "75": 609664512.0, "80": 609664512.0, "85": 609664512.0, "90": 609664512.0, "95": 609664512.0, "100": 609664512.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 879175168.0, "5": 1141530624.0, "10": 1141530624.0, "15": 1141530624.0, "20": 1141530624.0, "25": 1141530624.0, "30": 1141530624.0, "35": 1141530624.0, "40": 1141530624.0, "45": 1141530624.0, "50": 1141530624.0, "55": 1141530624.0, "60": 1141530624.0, "65": 1141530624.0, "70": 1141530624.0, "75": 1141530624.0, "80": 1141530624.0, "85": 1141530624.0, "90": 1141530624.0, "95": 1141530624.0, "100": 1141530624.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.53831, "5": 0.35469, "10": 0.35518, "15": 0.37444, "20": 0.38352, "25": 0.34972, "30": 0.35075, "35": 0.35232, "40": 0.35148, "45": 0.34803, "50": 0.34792, "55": 0.345, "60": 0.34373, "65": 0.34654, "70": 0.3425, "75": 0.34476, "80": 0.34414, "85": 0.35203, "90": 0.34116, "95": 0.34589, "100": 0.36469}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..21bb1de --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.88759, "5": 10.90191, "10": 10.86854, "15": 10.84831, "20": 10.71775, "25": 10.54269, "30": 10.33645, "35": 10.23971, "40": 10.03266, "45": 9.76819, "50": 9.85322, "55": 9.82269, "60": 9.43751, "65": 8.87843, "70": 9.68163, "75": 9.37195, "80": 9.35656, "85": 9.57142, "90": 9.77729, "95": 9.48561, "100": 9.35907}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 618.0, "5": 662.0, "10": 588.0, "15": 636.0, "20": 610.0, "25": 608.0, "30": 692.0, "35": 701.0, "40": 742.0, "45": 819.0, "50": 840.0, "55": 861.0, "60": 903.0, "65": 880.0, "70": 1061.0, "75": 890.0, "80": 1054.0, "85": 1095.0, "90": 1112.0, "95": 1202.0, "100": 1199.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 611892224.0, "5": 611892224.0, "10": 611892224.0, "15": 611892224.0, "20": 611892224.0, "25": 611892224.0, "30": 611892224.0, "35": 611892224.0, "40": 611892224.0, "45": 611892224.0, "50": 611892224.0, "55": 611892224.0, "60": 611892224.0, "65": 611892224.0, "70": 611892224.0, "75": 611892224.0, "80": 611892224.0, "85": 611892224.0, "90": 611892224.0, "95": 611892224.0, "100": 611892224.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 882343936.0, "5": 1155319296.0, "10": 1155319296.0, "15": 1155319296.0, "20": 1155319296.0, "25": 1155319296.0, "30": 1155319296.0, "35": 1155319296.0, "40": 1155319296.0, "45": 1155319296.0, "50": 1155319296.0, "55": 1155319296.0, "60": 1155319296.0, "65": 1155319296.0, "70": 1155319296.0, "75": 1155319296.0, "80": 1155319296.0, "85": 1155319296.0, "90": 1155319296.0, "95": 1155319296.0, "100": 1155319296.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 7.94154, "5": 0.33609, "10": 0.33973, "15": 0.34455, "20": 0.33527, "25": 0.33425, "30": 0.34539, "35": 0.33186, "40": 0.33039, "45": 0.33968, "50": 0.3329, "55": 0.32374, "60": 0.33146, "65": 0.32297, "70": 0.32375, "75": 0.32557, "80": 0.32269, "85": 0.33389, "90": 0.32203, "95": 0.32262, "100": 0.33397}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml index 0e2795a..7515a04 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml @@ -17,8 +17,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: flash + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..419203c --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.88789, "5": 10.90961, "10": 10.87795, "15": 10.86385, "20": 10.75085, "25": 10.59879, "30": 10.40096, "35": 10.30788, "40": 10.10957, "45": 9.85869, "50": 9.92089, "55": 9.88536, "60": 9.50765, "65": 8.95828, "70": 9.72745, "75": 9.42585, "80": 9.40548, "85": 9.61545, "90": 9.8127, "95": 9.52143, "100": 9.40111}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3484.0, "5": 4142.0, "10": 2870.0, "15": 3906.0, "20": 3312.0, "25": 3304.0, "30": 3930.0, "35": 4134.0, "40": 4428.0, "45": 4218.0, "50": 4882.0, "55": 5074.0, "60": 4638.0, "65": 5528.0, "70": 6682.0, "75": 5280.0, "80": 6588.0, "85": 6914.0, "90": 6014.0, "95": 6762.0, "100": 6874.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 462388736.0, "5": 462388736.0, "10": 462388736.0, "15": 462388736.0, "20": 462388736.0, "25": 462388736.0, "30": 462388736.0, "35": 462388736.0, "40": 462388736.0, "45": 462388736.0, "50": 462388736.0, "55": 462388736.0, "60": 462388736.0, "65": 462388736.0, "70": 462388736.0, "75": 462388736.0, "80": 462388736.0, "85": 462388736.0, "90": 462388736.0, "95": 462388736.0, "100": 462388736.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1549081088.0, "5": 1677508096.0, "10": 1677508096.0, "15": 1677508096.0, "20": 1677508096.0, "25": 1677508096.0, "30": 1677508096.0, "35": 1677508096.0, "40": 1677508096.0, "45": 1677508096.0, "50": 1677508096.0, "55": 1677508096.0, "60": 1677508096.0, "65": 1677508096.0, "70": 1677508096.0, "75": 1677508096.0, "80": 1677508096.0, "85": 1677508096.0, "90": 1677508096.0, "95": 1677508096.0, "100": 1677508096.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 6.80848, "5": 0.16899, "10": 0.16541, "15": 0.16405, "20": 0.16523, "25": 0.16288, "30": 0.16319, "35": 0.16446, "40": 0.16379, "45": 0.16319, "50": 0.16381, "55": 0.39524, "60": 0.17112, "65": 0.1693, "70": 0.16984, "75": 0.16564, "80": 0.16541, "85": 0.167, "90": 0.16655, "95": 0.165, "100": 0.17011}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..fcef42e --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.88734, "5": 10.90947, "10": 10.87761, "15": 10.86375, "20": 10.75093, "25": 10.59921, "30": 10.40105, "35": 10.30798, "40": 10.10902, "45": 9.85833, "50": 9.92118, "55": 9.88534, "60": 9.50741, "65": 8.95835, "70": 9.72738, "75": 9.42576, "80": 9.40566, "85": 9.61579, "90": 9.81287, "95": 9.52129, "100": 9.40119}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3228.0, "5": 4014.0, "10": 2862.0, "15": 3900.0, "20": 3426.0, "25": 3640.0, "30": 3692.0, "35": 3986.0, "40": 4502.0, "45": 4220.0, "50": 5028.0, "55": 4876.0, "60": 4868.0, "65": 5402.0, "70": 6504.0, "75": 5282.0, "80": 6794.0, "85": 6806.0, "90": 5996.0, "95": 6836.0, "100": 6636.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 462455808.0, "5": 462455808.0, "10": 462455808.0, "15": 462455808.0, "20": 462455808.0, "25": 462455808.0, "30": 462455808.0, "35": 462455808.0, "40": 462455808.0, "45": 462455808.0, "50": 462455808.0, "55": 462455808.0, "60": 462455808.0, "65": 462455808.0, "70": 462455808.0, "75": 462455808.0, "80": 462455808.0, "85": 462455808.0, "90": 462455808.0, "95": 462455808.0, "100": 462455808.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2529822720.0, "5": 2658249728.0, "10": 2658249728.0, "15": 2658249728.0, "20": 2658249728.0, "25": 2658249728.0, "30": 2658249728.0, "35": 2658249728.0, "40": 2658249728.0, "45": 2658249728.0, "50": 2658249728.0, "55": 2658249728.0, "60": 2658249728.0, "65": 2658249728.0, "70": 2658249728.0, "75": 2658249728.0, "80": 2658249728.0, "85": 2658249728.0, "90": 2658249728.0, "95": 2658249728.0, "100": 2658249728.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 8.80322, "5": 0.1869, "10": 0.18901, "15": 0.18805, "20": 0.18401, "25": 0.18156, "30": 0.18146, "35": 0.18277, "40": 0.18137, "45": 0.18129, "50": 0.18157, "55": 0.17851, "60": 0.17799, "65": 0.17661, "70": 0.17706, "75": 0.17825, "80": 0.18209, "85": 0.18389, "90": 0.18364, "95": 0.18051, "100": 0.17684}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/model_config.yaml index 359f483..f0507bb 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -51,4 +51,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..5aa476f --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.80181, "5": 10.84284, "10": 10.78173, "15": 10.80418, "20": 10.7314, "25": 10.57558, "30": 10.43631, "35": 10.34347, "40": 10.17318, "45": 9.94245, "50": 10.00163, "55": 9.94872, "60": 9.59802, "65": 9.02299, "70": 9.78149, "75": 9.4886, "80": 9.45936, "85": 9.6529, "90": 9.84596, "95": 9.55834, "100": 9.43841}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 30687.0, "5": 35697.0, "10": 30112.0, "15": 35251.0, "20": 32966.0, "25": 31233.0, "30": 33087.0, "35": 34941.0, "40": 36233.0, "45": 35628.0, "50": 39783.0, "55": 37089.0, "60": 40650.0, "65": 41057.0, "70": 45337.0, "75": 39742.0, "80": 47699.0, "85": 49328.0, "90": 49103.0, "95": 48497.0, "100": 45560.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 865126912.0, "5": 865125376.0, "10": 865125376.0, "15": 865125888.0, "20": 865122816.0, "25": 865126400.0, "30": 865127424.0, "35": 865127936.0, "40": 865125376.0, "45": 865123328.0, "50": 865125888.0, "55": 865128448.0, "60": 865129472.0, "65": 865143808.0, "70": 865128960.0, "75": 865125888.0, "80": 865140736.0, "85": 865142272.0, "90": 865127936.0, "95": 865128448.0, "100": 865138176.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1867287040.0, "5": 2107624448.0, "10": 2107624448.0, "15": 2107624448.0, "20": 2112163840.0, "25": 2112163840.0, "30": 2112163840.0, "35": 2114877952.0, "40": 2114877952.0, "45": 2114877952.0, "50": 2116962304.0, "55": 2116962304.0, "60": 2127979520.0, "65": 2132691456.0, "70": 2132691456.0, "75": 2132691456.0, "80": 2133346816.0, "85": 2133346816.0, "90": 2135661568.0, "95": 2135661568.0, "100": 2135661568.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 16.18829, "5": 0.41672, "10": 0.40953, "15": 0.42036, "20": 0.40968, "25": 0.40781, "30": 0.40912, "35": 0.68237, "40": 0.40935, "45": 0.40902, "50": 0.409, "55": 0.40627, "60": 0.41037, "65": 0.41568, "70": 0.41037, "75": 0.41007, "80": 0.41381, "85": 0.40865, "90": 0.40453, "95": 0.40952, "100": 0.41186}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..be9ece6 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.79987, "5": 10.85021, "10": 10.78437, "15": 10.80402, "20": 10.74018, "25": 10.57365, "30": 10.43064, "35": 10.34542, "40": 10.17702, "45": 9.94116, "50": 10.00138, "55": 9.94734, "60": 9.5942, "65": 9.02239, "70": 9.781, "75": 9.48705, "80": 9.4551, "85": 9.65724, "90": 9.84458, "95": 9.55632, "100": 9.44025}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 30784.0, "5": 35580.0, "10": 30083.0, "15": 35706.0, "20": 32807.0, "25": 30763.0, "30": 32985.0, "35": 34748.0, "40": 36348.0, "45": 36297.0, "50": 39908.0, "55": 37140.0, "60": 40211.0, "65": 40766.0, "70": 45683.0, "75": 40504.0, "80": 47991.0, "85": 48935.0, "90": 49292.0, "95": 48929.0, "100": 46758.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 865156096.0, "5": 865154560.0, "10": 865157632.0, "15": 865156608.0, "20": 865153024.0, "25": 865157120.0, "30": 865158656.0, "35": 865157632.0, "40": 865155072.0, "45": 865155584.0, "50": 865156608.0, "55": 865158144.0, "60": 865160704.0, "65": 865175552.0, "70": 865159680.0, "75": 865157632.0, "80": 865171456.0, "85": 865173504.0, "90": 865158144.0, "95": 865159168.0, "100": 865167360.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2845674496.0, "5": 3089817088.0, "10": 3089817088.0, "15": 3089817088.0, "20": 3090944000.0, "25": 3090944000.0, "30": 3090944000.0, "35": 3096425472.0, "40": 3096425472.0, "45": 3096425472.0, "50": 3098263040.0, "55": 3098263040.0, "60": 3106846720.0, "65": 3111392768.0, "70": 3111392768.0, "75": 3112845824.0, "80": 3112845824.0, "85": 3113016320.0, "90": 3118998528.0, "95": 3118998528.0, "100": 3118998528.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 16.85588, "5": 0.63247, "10": 0.42764, "15": 0.42156, "20": 0.41825, "25": 0.41965, "30": 0.41813, "35": 0.4219, "40": 0.42305, "45": 0.42873, "50": 0.42716, "55": 0.41875, "60": 0.43473, "65": 0.42855, "70": 0.42285, "75": 0.42556, "80": 0.42276, "85": 0.42862, "90": 0.41965, "95": 0.42303, "100": 0.42037}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml index edc9eed..df52482 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml @@ -1,57 +1,59 @@ -ENV_VARS: - CUDA_DEVICE_MAX_CONNECTIONS: 1 - NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 - NCCL_ALGO: Tree - CUBLAS_WORKSPACE_CONFIG: :4096:8 -MODEL_ARGS: - --num-layers: 12 - --hidden-size: 512 - --num-attention-heads: 8 - --log-params-norm: true - --log-num-zeros-in-grad: true - --log-validation-ppl-to-tensorboard: true - --log-timers-to-tensorboard: true - --tensorboard-dir: ${TENSORBOARD_PATH} - --micro-batch-size: 4 - --global-batch-size: 32 - --seq-length: 1024 - --max-position-embeddings: 1024 - --train-iters: 100 - --timing-log-level: 2 - --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} - --data-path: ${DATA_PATH}/my-gpt3_00_text_document - --vocab-file: ${DATA_PATH}/bpe/vocab.json - --merge-file: ${DATA_PATH}/bpe/merges.txt - --split: 949,50,1 - --distributed-backend: nccl - --lr: 0.00015 - --lr-decay-style: cosine - --min-lr: 1.0e-5 - --weight-decay: 1e-2 - --clip-grad: 1.0 - --lr-warmup-fraction: .01 - --log-interval: 1 - --save-interval: 50 - --eval-interval: 1000 - --eval-iters: 10 - --transformer-impl: transformer_engine - --tensor-model-parallel-size: 2 - --pipeline-model-parallel-size: 1 - --expert-model-parallel-size: 2 - --sequence-parallel: true - --num-experts: 8 - --use-distributed-optimizer: true - --moe-router-load-balancing-type: sinkhorn - --moe-router-topk: 1 - --ckpt-fully-parallel-load: true - --deterministic-mode: true - --no-gradient-accumulation-fusion: true - --attention-softmax-in-fp32: true - --use-checkpoint-opt_param-scheduler: true - --use-mcore-models: true - --ckpt-format: torch_dist - --data-cache-path: ${DATA_CACHE_PATH} - --bf16: true -TEST_TYPE: ckpt-resume +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 100 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 50 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 1 + --expert-model-parallel-size: 2 + --sequence-parallel: true + --num-experts: 8 + --use-distributed-optimizer: true + --moe-router-load-balancing-type: sinkhorn + --moe-router-topk: 1 + --ckpt-fully-parallel-load: true + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-checkpoint-opt_param-scheduler: true + --use-mcore-models: true + --ckpt-format: torch_dist + --ckpt-assume-constant-structure: true + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --log-memory-to-tensorboard: true +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..9dd296d --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.81129, "5": 10.84788, "10": 10.78374, "15": 10.82081, "20": 10.74489, "25": 10.59876, "30": 10.44157, "35": 10.35352, "40": 10.19022, "45": 9.95761, "50": 10.02046, "55": 9.95893, "60": 9.61299, "65": 9.03726, "70": 9.78763, "75": 9.49707, "80": 9.46333, "85": 9.66739, "90": 9.84886, "95": 9.56601, "100": 9.44758}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 30980.0, "5": 36191.0, "10": 30226.0, "15": 35268.0, "20": 33094.0, "25": 31163.0, "30": 33078.0, "35": 34831.0, "40": 36203.0, "45": 35991.0, "50": 39594.0, "55": 37028.0, "60": 39890.0, "65": 40600.0, "70": 45489.0, "75": 40046.0, "80": 47478.0, "85": 49240.0, "90": 49279.0, "95": 48714.0, "100": 44720.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1363810304.0, "5": 1363807744.0, "10": 1363810816.0, "15": 1363809792.0, "20": 1363809280.0, "25": 1363811840.0, "30": 1363812352.0, "35": 1363810304.0, "40": 1363808768.0, "45": 1363806720.0, "50": 1363808768.0, "55": 1363806208.0, "60": 1363809792.0, "65": 1363812864.0, "70": 1363806208.0, "75": 1363807744.0, "80": 1363818496.0, "85": 1363813888.0, "90": 1363805696.0, "95": 1363805696.0, "100": 1363811840.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1991354368.0, "5": 2555771392.0, "10": 2555771392.0, "15": 2555771392.0, "20": 2556176384.0, "25": 2556176384.0, "30": 2556176384.0, "35": 2560073728.0, "40": 2560073728.0, "45": 2560073728.0, "50": 2560073728.0, "55": 2560073728.0, "60": 2560073728.0, "65": 2560073728.0, "70": 2560073728.0, "75": 2562988032.0, "80": 2564945408.0, "85": 2564945408.0, "90": 2564945408.0, "95": 2564945408.0, "100": 2564945408.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 6.49146, "5": 0.27476, "10": 0.27706, "15": 0.29059, "20": 0.2707, "25": 0.27278, "30": 0.27509, "35": 0.27674, "40": 0.27814, "45": 0.27651, "50": 0.27399, "55": 0.27084, "60": 0.29133, "65": 0.27567, "70": 0.27284, "75": 0.27196, "80": 0.27431, "85": 0.27061, "90": 0.27032, "95": 0.27382, "100": 0.27476}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..91596e1 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.8029, "5": 10.85339, "10": 10.79198, "15": 10.81769, "20": 10.74357, "25": 10.58789, "30": 10.43346, "35": 10.35014, "40": 10.18622, "45": 9.95965, "50": 10.01907, "55": 9.95967, "60": 9.61901, "65": 9.0438, "70": 9.78907, "75": 9.50146, "80": 9.4689, "85": 9.66944, "90": 9.85084, "95": 9.562, "100": 9.44806}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 31473.0, "5": 36628.0, "10": 30674.0, "15": 35124.0, "20": 33128.0, "25": 30566.0, "30": 32881.0, "35": 34525.0, "40": 35704.0, "45": 35586.0, "50": 39709.0, "55": 36628.0, "60": 38989.0, "65": 40858.0, "70": 45481.0, "75": 39330.0, "80": 47453.0, "85": 49471.0, "90": 49228.0, "95": 47973.0, "100": 45474.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1355420160.0, "5": 1355417600.0, "10": 1355420672.0, "15": 1355418624.0, "20": 1355419136.0, "25": 1355422208.0, "30": 1355422208.0, "35": 1355423232.0, "40": 1355422208.0, "45": 1355418112.0, "50": 1355420160.0, "55": 1355424768.0, "60": 1355427840.0, "65": 1355438080.0, "70": 1355420672.0, "75": 1355420672.0, "80": 1355432448.0, "85": 1355430400.0, "90": 1355421184.0, "95": 1355420672.0, "100": 1355427840.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2953737728.0, "5": 3526075392.0, "10": 3526075392.0, "15": 3526075392.0, "20": 3527276544.0, "25": 3527276544.0, "30": 3527276544.0, "35": 3530373120.0, "40": 3530373120.0, "45": 3530373120.0, "50": 3534990848.0, "55": 3534990848.0, "60": 3542290944.0, "65": 3542290944.0, "70": 3542290944.0, "75": 3542290944.0, "80": 3542290944.0, "85": 3542290944.0, "90": 3542290944.0, "95": 3542290944.0, "100": 3542290944.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 9.55678, "5": 0.25735, "10": 0.25385, "15": 0.25994, "20": 0.26237, "25": 0.25011, "30": 0.25064, "35": 0.26042, "40": 0.25642, "45": 0.2505, "50": 0.25006, "55": 0.26056, "60": 0.25247, "65": 0.25735, "70": 0.26178, "75": 0.25377, "80": 0.25556, "85": 0.25939, "90": 0.26064, "95": 0.25687, "100": 0.25841}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/model_config.yaml index b12ef70..25d4934 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -54,4 +54,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..6ed6a04 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.81978, "5": 10.85197, "10": 10.79064, "15": 10.81272, "20": 10.71609, "25": 10.52476, "30": 10.33545, "35": 10.22995, "40": 10.04741, "45": 9.77656, "50": 9.87115, "55": 9.83385, "60": 9.461, "65": 8.90362, "70": 9.70126, "75": 9.39148, "80": 9.37893, "85": 9.59197, "90": 9.7945, "95": 9.50434, "100": 9.37913}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 27060.0, "5": 31921.0, "10": 26272.0, "15": 31301.0, "20": 28838.0, "25": 28531.0, "30": 30657.0, "35": 33054.0, "40": 35298.0, "45": 35155.0, "50": 39066.0, "55": 38205.0, "60": 55119.0, "65": 2137811.0, "70": 2140949.0, "75": 41844.0, "80": 157090.0, "85": 53159.0, "90": 160405.0, "95": 46932.0, "100": 45379.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1366404096.0, "5": 1366391296.0, "10": 1366398976.0, "15": 1366411264.0, "20": 1366395392.0, "25": 1366414848.0, "30": 1366408192.0, "35": 1366416896.0, "40": 1366381056.0, "45": 1366368768.0, "50": 1366344192.0, "55": 1366319616.0, "60": 1366306816.0, "65": 1366317568.0, "70": 1366287360.0, "75": 1366282240.0, "80": 1366307840.0, "85": 1366326784.0, "90": 1366323712.0, "95": 1366303232.0, "100": 1366301184.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2330338304.0, "5": 2902626816.0, "10": 2902626816.0, "15": 2902758912.0, "20": 2902758912.0, "25": 2909535744.0, "30": 2909535744.0, "35": 2909906432.0, "40": 2909906432.0, "45": 2909906432.0, "50": 2909906432.0, "55": 2909906432.0, "60": 2909906432.0, "65": 2909906432.0, "70": 2909906432.0, "75": 2909906432.0, "80": 2909906432.0, "85": 2909906432.0, "90": 2909906432.0, "95": 2909906432.0, "100": 2909906432.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.82131, "5": 0.33381, "10": 0.33099, "15": 0.33324, "20": 0.33223, "25": 0.33172, "30": 0.33447, "35": 0.33688, "40": 0.33819, "45": 0.34723, "50": 0.34463, "55": 0.33883, "60": 0.33949, "65": 0.33894, "70": 0.33639, "75": 0.33664, "80": 0.33471, "85": 0.33448, "90": 0.33392, "95": 0.33001, "100": 0.34356}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..95ccb90 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.83445, "5": 10.87409, "10": 10.82337, "15": 10.83072, "20": 10.73228, "25": 10.53817, "30": 10.34469, "35": 10.24798, "40": 10.05498, "45": 9.79536, "50": 9.88842, "55": 9.84583, "60": 9.47252, "65": 8.91336, "70": 9.70548, "75": 9.39495, "80": 9.38269, "85": 9.58876, "90": 9.79604, "95": 9.50297, "100": 9.37731}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 26648.0, "5": 31622.0, "10": 25722.0, "15": 30485.0, "20": 28303.0, "25": 27282.0, "30": 29586.0, "35": 32578.0, "40": 35072.0, "45": 35298.0, "50": 38377.0, "55": 36128.0, "60": 39347.0, "65": 39897.0, "70": 44013.0, "75": 41039.0, "80": 46916.0, "85": 48793.0, "90": 46771.0, "95": 45617.0, "100": 46434.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1356090368.0, "5": 1356061184.0, "10": 1356089856.0, "15": 1356084736.0, "20": 1356063744.0, "25": 1356076544.0, "30": 1356070912.0, "35": 1356048896.0, "40": 1356026880.0, "45": 1356000768.0, "50": 1355968000.0, "55": 1355970048.0, "60": 1355980288.0, "65": 1356027904.0, "70": 1355976192.0, "75": 1355971584.0, "80": 1356020736.0, "85": 1356049920.0, "90": 1356049920.0, "95": 1356047360.0, "100": 1356033536.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3273181696.0, "5": 3853371904.0, "10": 3853371904.0, "15": 3853371904.0, "20": 3853371904.0, "25": 3853371904.0, "30": 3853371904.0, "35": 3853371904.0, "40": 3853371904.0, "45": 3853371904.0, "50": 3853371904.0, "55": 3853371904.0, "60": 3853371904.0, "65": 3853371904.0, "70": 3853371904.0, "75": 3853371904.0, "80": 3853371904.0, "85": 3853371904.0, "90": 3853371904.0, "95": 3853371904.0, "100": 3853371904.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.68307, "5": 0.32305, "10": 0.32316, "15": 0.32294, "20": 0.31895, "25": 0.32174, "30": 0.33171, "35": 0.32776, "40": 0.34887, "45": 0.34206, "50": 0.33607, "55": 0.33463, "60": 0.32995, "65": 0.32515, "70": 0.33288, "75": 0.32963, "80": 0.32161, "85": 0.32851, "90": 0.31854, "95": 0.31863, "100": 0.32233}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml index 46a56c1..e451390 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -55,4 +55,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..c531fcd --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,53 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.8029, + 10.86149, + 10.86819, + 10.80829, + 10.72062, + 10.64588, + 10.21132, + 10.32324, + 10.2265, + 9.92918 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 31473.0, + 37753.0, + 38332.0, + 36348.0, + 33270.0, + 34310.0, + 30284.0, + 35432.0, + 36356.0, + 37109.0 + ] + }, + "iteration-time": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 5.94452, + 0.40526, + 0.40286, + 0.40289, + 0.40215, + 0.40351, + 0.40373, + 0.40354, + 0.40382, + 0.41286 + ] + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_ddp_average_in_collective_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_ddp_average_in_collective_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..8f4c470 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_ddp_average_in_collective_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1,37 @@ +{ + "lm loss": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 10.8029, + 10.86149, + 10.86819, + 10.80829, + 10.72062, + 10.64588, + 10.21132, + 10.32324, + 10.2265, + 9.92918 + ] + }, + "num-zeros": { + "start_step": 0, + "end_step": 50, + "step_interval": 5, + "values": [ + 31473.0, + 37753.0, + 38332.0, + 36348.0, + 33270.0, + 34310.0, + 30284.0, + 35432.0, + 36356.0, + 37109.0 + ] + }, + "iteration_timing_avg": 0.21900323529411767 +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 0000000..dcd7c60 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,62 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_PATH} + --load: ${CHECKPOINT_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 1 + --expert-model-parallel-size: 2 + --no-ckpt-fully-parallel-save: true + --moe-grouped-gemm: true + --disable-bias-linear: true + --sequence-parallel: true + --num-experts: 8 + --use-distributed-optimizer: true + --moe-router-load-balancing-type: sinkhorn + --moe-router-topk: 1 + --overlap-grad-reduce: true + --overlap-param-gather: true + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --attention-backend: unfused + --log-memory-to-tensorboard: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json index 6ba3300..eb397a5 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.79987, - 10.85907, - 10.86575, - 10.79932, - 10.70961, - 10.63871, - 10.19492, - 10.31016, - 10.22301, - 9.91473 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 30795.0, - 37447.0, - 37837.0, - 35948.0, - 33382.0, - 34774.0, - 30403.0, - 35340.0, - 36357.0, - 37792.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.77572, - 0.42536, - 0.42839, - 0.42977, - 0.42283, - 0.42333, - 0.43199, - 0.42998, - 0.43124, - 0.43207 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.79987, "5": 10.85011, "10": 10.78474, "15": 10.80469, "20": 10.74013, "25": 10.57368, "30": 10.43164, "35": 10.34482, "40": 10.17678, "45": 9.94099, "50": 10.00158}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 30795.0, "5": 36202.0, "10": 29805.0, "15": 35047.0, "20": 32996.0, "25": 31111.0, "30": 33355.0, "35": 34758.0, "40": 36390.0, "45": 36272.0, "50": 40012.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 865125376.0, "5": 865124864.0, "10": 865126400.0, "15": 865125376.0, "20": 865122816.0, "25": 865125888.0, "30": 865126912.0, "35": 865126912.0, "40": 865124352.0, "45": 865124864.0, "50": 865125376.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2845643776.0, "5": 3089300480.0, "10": 3089300480.0, "15": 3089300480.0, "20": 3090866176.0, "25": 3090866176.0, "30": 3090866176.0, "35": 3095958016.0, "40": 3095958016.0, "45": 3095958016.0, "50": 3098294272.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.74384, "5": 0.43164, "10": 0.43057, "15": 0.42884, "20": 0.4299, "25": 0.43109, "30": 0.67218, "35": 0.42782, "40": 0.42537, "45": 0.42596, "50": 0.77316}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json index 6afdc07..03b77f2 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/golden_values_lts.json @@ -1,37 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.79987, - 10.85983, - 10.865, - 10.799, - 10.70987, - 10.63782, - 10.1965, - 10.3099, - 10.22262, - 9.91423 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 30784.0, - 37528.0, - 37616.0, - 36105.0, - 33464.0, - 34923.0, - 30806.0, - 35663.0, - 36661.0, - 37641.0 - ] - }, - "iteration_timing_avg": 0.3566726470588235 -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.79987, "5": 10.85021, "10": 10.78437, "15": 10.80402, "20": 10.74018, "25": 10.57365, "30": 10.43064, "35": 10.34542, "40": 10.17702, "45": 9.94116, "50": 10.00138}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 30784.0, "5": 35580.0, "10": 30083.0, "15": 35706.0, "20": 32807.0, "25": 30763.0, "30": 32985.0, "35": 34748.0, "40": 36348.0, "45": 36297.0, "50": 39908.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 865156096.0, "5": 865154560.0, "10": 865157632.0, "15": 865156608.0, "20": 865153024.0, "25": 865157120.0, "30": 865158656.0, "35": 865157632.0, "40": 865155072.0, "45": 865155584.0, "50": 865156608.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2845674496.0, "5": 3089817088.0, "10": 3089817088.0, "15": 3089817088.0, "20": 3090944000.0, "25": 3090944000.0, "30": 3090944000.0, "35": 3096425472.0, "40": 3096425472.0, "45": 3096425472.0, "50": 3098263040.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 17.48573, "5": 0.68181, "10": 0.42126, "15": 0.42388, "20": 0.41898, "25": 0.41998, "30": 0.41505, "35": 0.41625, "40": 0.41814, "45": 0.41734, "50": 0.42354}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml index b07473d..e54e720 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -54,4 +54,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_dev.json index c531fcd..e730001 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.8029, - 10.86149, - 10.86819, - 10.80829, - 10.72062, - 10.64588, - 10.21132, - 10.32324, - 10.2265, - 9.92918 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 31473.0, - 37753.0, - 38332.0, - 36348.0, - 33270.0, - 34310.0, - 30284.0, - 35432.0, - 36356.0, - 37109.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 5.94452, - 0.40526, - 0.40286, - 0.40289, - 0.40215, - 0.40351, - 0.40373, - 0.40354, - 0.40382, - 0.41286 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.80959, "5": 10.85303, "10": 10.78796, "15": 10.81868, "20": 10.74722, "25": 10.5991, "30": 10.44004, "35": 10.3515, "40": 10.19634, "45": 9.95907, "50": 10.01749}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 31050.0, "5": 35937.0, "10": 30117.0, "15": 35239.0, "20": 32813.0, "25": 31429.0, "30": 33133.0, "35": 34855.0, "40": 36161.0, "45": 36187.0, "50": 38778.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 881113600.0, "5": 881112576.0, "10": 881114112.0, "15": 881116160.0, "20": 881113088.0, "25": 881115136.0, "30": 881115648.0, "35": 881117696.0, "40": 881113600.0, "45": 881113088.0, "50": 881115136.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2809510912.0, "5": 3055304704.0, "10": 3055304704.0, "15": 3055304704.0, "20": 3055304704.0, "25": 3055304704.0, "30": 3055304704.0, "35": 3056838144.0, "40": 3056838144.0, "45": 3056838144.0, "50": 3056838144.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 5.94436, "5": 0.29888, "10": 0.2892, "15": 0.28962, "20": 0.29155, "25": 0.2887, "30": 0.28714, "35": 0.28764, "40": 0.28693, "45": 0.2945, "50": 0.29357}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_lts.json index 8f4c470..9ad809f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/golden_values_lts.json @@ -1,37 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.8029, - 10.86149, - 10.86819, - 10.80829, - 10.72062, - 10.64588, - 10.21132, - 10.32324, - 10.2265, - 9.92918 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 31473.0, - 37753.0, - 38332.0, - 36348.0, - 33270.0, - 34310.0, - 30284.0, - 35432.0, - 36356.0, - 37109.0 - ] - }, - "iteration_timing_avg": 0.21900323529411767 -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.8029, "5": 10.85339, "10": 10.79202, "15": 10.81788, "20": 10.74371, "25": 10.58737, "30": 10.43384, "35": 10.35041, "40": 10.18639, "45": 9.95903, "50": 10.01914}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 31473.0, "5": 36628.0, "10": 30874.0, "15": 35127.0, "20": 32995.0, "25": 30607.0, "30": 32534.0, "35": 34542.0, "40": 35881.0, "45": 35814.0, "50": 39646.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 864192000.0, "5": 864189440.0, "10": 864192512.0, "15": 864189952.0, "20": 864189952.0, "25": 864192000.0, "30": 864194048.0, "35": 864194560.0, "40": 864194560.0, "45": 864189952.0, "50": 864192000.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2792702464.0, "5": 3036425216.0, "10": 3036425216.0, "15": 3036425216.0, "20": 3037602304.0, "25": 3037602304.0, "30": 3037602304.0, "35": 3040573440.0, "40": 3040573440.0, "45": 3040573440.0, "50": 3043728384.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.3358, "5": 0.26072, "10": 0.25802, "15": 0.25889, "20": 0.27225, "25": 0.25765, "30": 0.258, "35": 0.27009, "40": 0.26047, "45": 0.25566, "50": 0.26576}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/model_config.yaml index 0b25e16..7fb3411 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -58,4 +58,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json index 91e6f5e..054ff53 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.83445, - 10.87978, - 10.87924, - 10.81567, - 10.69374, - 10.60333, - 10.08824, - 10.21471, - 10.10778, - 9.78309 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 26648.0, - 32884.0, - 33611.0, - 31683.0, - 28744.0, - 30671.0, - 28602.0, - 33538.0, - 34560.0, - 35099.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 9.03575, - 0.59809, - 0.59808, - 0.60171, - 0.60477, - 0.611, - 0.62441, - 0.63554, - 0.64372, - 0.64983 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.82006, "5": 10.85405, "10": 10.79175, "15": 10.80877, "20": 10.71387, "25": 10.52487, "30": 10.33469, "35": 10.23358, "40": 10.04961, "45": 9.77656, "50": 9.87044}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 27146.0, "5": 32189.0, "10": 26378.0, "15": 31015.0, "20": 28863.0, "25": 28323.0, "30": 30844.0, "35": 32780.0, "40": 35120.0, "45": 35338.0, "50": 40557.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1365952512.0, "5": 1365933056.0, "10": 1365943808.0, "15": 1365943296.0, "20": 1365935616.0, "25": 1365958656.0, "30": 1365950976.0, "35": 1365954048.0, "40": 1365923840.0, "45": 1365914112.0, "50": 1365878784.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3320324096.0, "5": 3889025536.0, "10": 3889025536.0, "15": 3889025536.0, "20": 3889025536.0, "25": 3889859072.0, "30": 3895426048.0, "35": 3895426048.0, "40": 3895426048.0, "45": 3895426048.0, "50": 3895426048.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 7.64353, "5": 0.35348, "10": 0.35586, "15": 0.35242, "20": 0.34871, "25": 0.35025, "30": 0.36767, "35": 0.35732, "40": 0.3578, "45": 0.3675, "50": 0.35703}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json index d47ee5a..fc4c8a5 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json @@ -1,37 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.83445, - 10.87978, - 10.87924, - 10.81567, - 10.69374, - 10.60333, - 10.08824, - 10.21471, - 10.10778, - 9.78309 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 26648.0, - 32884.0, - 33611.0, - 31683.0, - 28744.0, - 30671.0, - 28602.0, - 33538.0, - 34560.0, - 35099.0 - ] - }, - "iteration_timing_avg": 0.28211852941176474 -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.83445, "5": 10.87409, "10": 10.82337, "15": 10.83072, "20": 10.73228, "25": 10.53817, "30": 10.34469, "35": 10.24798, "40": 10.05498, "45": 9.79536, "50": 9.88842}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 26648.0, "5": 31622.0, "10": 25722.0, "15": 30485.0, "20": 28303.0, "25": 27282.0, "30": 29586.0, "35": 32578.0, "40": 35072.0, "45": 35298.0, "50": 38377.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1356090368.0, "5": 1356061184.0, "10": 1356089856.0, "15": 1356084736.0, "20": 1356063744.0, "25": 1356076544.0, "30": 1356070912.0, "35": 1356048896.0, "40": 1356026880.0, "45": 1356000768.0, "50": 1355968000.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3273181696.0, "5": 3853371904.0, "10": 3853371904.0, "15": 3853371904.0, "20": 3853371904.0, "25": 3853371904.0, "30": 3853371904.0, "35": 3853371904.0, "40": 3853371904.0, "45": 3853371904.0, "50": 3853371904.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 11.04298, "5": 0.32129, "10": 0.31692, "15": 0.31486, "20": 0.3219, "25": 0.31787, "30": 0.33397, "35": 0.32395, "40": 0.34235, "45": 0.34383, "50": 0.33389}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml index 57d90af..d993ef6 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -55,4 +55,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_dev.json index af87531..c7e8e8f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.81823, - 10.86998, - 10.8727, - 10.80014, - 10.67571, - 10.57944, - 10.06572, - 10.19342, - 10.08575, - 9.75236 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 26801.0, - 32734.0, - 32925.0, - 31593.0, - 28610.0, - 30362.0, - 28464.0, - 33486.0, - 33403.0, - 35162.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 8.63293, - 0.29454, - 0.28102, - 0.28297, - 0.28369, - 0.2848, - 0.30008, - 0.29214, - 0.31041, - 0.295 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.78523, "5": 10.83054, "10": 10.76094, "15": 10.78693, "20": 10.68675, "25": 10.49371, "30": 10.31819, "35": 10.22644, "40": 10.04223, "45": 9.79314, "50": 9.87634}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 26792.0, "5": 31724.0, "10": 25850.0, "15": 30721.0, "20": 28214.0, "25": 27631.0, "30": 29636.0, "35": 32017.0, "40": 34744.0, "45": 35060.0, "50": 38936.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1365851648.0, "5": 1365850624.0, "10": 1365853696.0, "15": 1365849600.0, "20": 1365842944.0, "25": 1365835264.0, "30": 1365835776.0, "35": 1365832704.0, "40": 1365822976.0, "45": 1365822464.0, "50": 1365807616.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3194399744.0, "5": 3767866368.0, "10": 3767866368.0, "15": 3767866368.0, "20": 3767866368.0, "25": 3767866368.0, "30": 3767866368.0, "35": 3767866368.0, "40": 3767866368.0, "45": 3767866368.0, "50": 3767866368.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 7.84075, "5": 0.29919, "10": 0.30964, "15": 0.29943, "20": 0.29742, "25": 0.29871, "30": 0.30484, "35": 0.30368, "40": 0.30591, "45": 0.30832, "50": 0.31021}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_lts.json index af7288c..bdcb370 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/golden_values_lts.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.81823, - 10.86998, - 10.8727, - 10.80014, - 10.67571, - 10.57944, - 10.06572, - 10.19342, - 10.08575, - 9.75236 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 26801.0, - 32734.0, - 32925.0, - 31593.0, - 28610.0, - 30362.0, - 28464.0, - 33486.0, - 33403.0, - 35162.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 11.94141, - 0.28425, - 0.28413, - 0.29449, - 0.28534, - 0.29977, - 0.30061, - 0.30321, - 0.30986, - 0.30404 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.81823, "5": 10.86095, "10": 10.80526, "15": 10.79462, "20": 10.71165, "25": 10.51976, "30": 10.3329, "35": 10.2268, "40": 10.04174, "45": 9.77208, "50": 9.86857}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 26801.0, "5": 31616.0, "10": 25891.0, "15": 30664.0, "20": 28366.0, "25": 27044.0, "30": 29851.0, "35": 32076.0, "40": 34659.0, "45": 35029.0, "50": 39449.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1356450816.0, "5": 1356449280.0, "10": 1356442112.0, "15": 1356458496.0, "20": 1356450816.0, "25": 1356460032.0, "30": 1356473856.0, "35": 1356460544.0, "40": 1356441088.0, "45": 1356427776.0, "50": 1356393472.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3224656384.0, "5": 3809699328.0, "10": 3821031424.0, "15": 3821031424.0, "20": 3821031424.0, "25": 3821031424.0, "30": 3827638784.0, "35": 3827638784.0, "40": 3827638784.0, "45": 3827638784.0, "50": 3827638784.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 17.73421, "5": 0.28493, "10": 0.29456, "15": 0.28139, "20": 0.28344, "25": 0.28129, "30": 0.287, "35": 0.29344, "40": 0.28965, "45": 0.29918, "50": 0.30393}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/model_config.yaml index 30b51f4..d07788f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G/model_config.yaml @@ -20,8 +20,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -57,4 +57,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..faf1ad9 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.93292, "5": 10.92969, "10": 10.90474, "15": 10.87123, "20": 10.74999, "25": 10.53753, "30": 10.32549, "35": 10.22893, "40": 10.01971, "45": 9.75548, "50": 9.8407}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 606.0, "5": 660.0, "10": 607.0, "15": 629.0, "20": 544.0, "25": 637.0, "30": 694.0, "35": 721.0, "40": 747.0, "45": 772.0, "50": 818.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 430735872.0, "5": 430735872.0, "10": 430735872.0, "15": 430735872.0, "20": 430735872.0, "25": 430735872.0, "30": 430735872.0, "35": 430735872.0, "40": 430735872.0, "45": 430735872.0, "50": 430735872.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 677341696.0, "5": 859509760.0, "10": 859509760.0, "15": 859509760.0, "20": 859509760.0, "25": 859509760.0, "30": 859509760.0, "35": 859509760.0, "40": 859509760.0, "45": 859509760.0, "50": 859509760.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 18.40423, "5": 0.41246, "10": 0.41102, "15": 0.40853, "20": 0.40333, "25": 0.40316, "30": 0.40384, "35": 0.40428, "40": 0.40498, "45": 0.40793, "50": 0.3968}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 0000000..a8ce751 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,52 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 2 + --context-parallel-size: 2 + --sequence-parallel: true + --hidden-dropout: 0.0 + --attention-dropout: 0.0 + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --attention-backend: flash + --log-memory-to-tensorboard: true +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_dp_last_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_dp_last_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..faf1ad9 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_dp_last_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.93292, "5": 10.92969, "10": 10.90474, "15": 10.87123, "20": 10.74999, "25": 10.53753, "30": 10.32549, "35": 10.22893, "40": 10.01971, "45": 9.75548, "50": 9.8407}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 606.0, "5": 660.0, "10": 607.0, "15": 629.0, "20": 544.0, "25": 637.0, "30": 694.0, "35": 721.0, "40": 747.0, "45": 772.0, "50": 818.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 430735872.0, "5": 430735872.0, "10": 430735872.0, "15": 430735872.0, "20": 430735872.0, "25": 430735872.0, "30": 430735872.0, "35": 430735872.0, "40": 430735872.0, "45": 430735872.0, "50": 430735872.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 677341696.0, "5": 859509760.0, "10": 859509760.0, "15": 859509760.0, "20": 859509760.0, "25": 859509760.0, "30": 859509760.0, "35": 859509760.0, "40": 859509760.0, "45": 859509760.0, "50": 859509760.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 18.40423, "5": 0.41246, "10": 0.41102, "15": 0.40853, "20": 0.40333, "25": 0.40316, "30": 0.40384, "35": 0.40428, "40": 0.40498, "45": 0.40793, "50": 0.3968}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_dp_last_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_dp_last_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 0000000..04cee73 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_dp_last_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,54 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 2 + --context-parallel-size: 2 + --sequence-parallel: true + --hidden-dropout: 0.0 + --attention-dropout: 0.0 + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --attention-backend: flash + --log-memory-to-tensorboard: true + --use-tp-pp-dp-mapping: true + --expert-tensor-parallel-size: 4 +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_nondeterministic_dp_last_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_nondeterministic_dp_last_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..cf646af --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_nondeterministic_dp_last_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.93292, "5": 10.9297, "10": 10.90475, "15": 10.87123, "20": 10.75, "25": 10.53755, "30": 10.32553, "35": 10.22897, "40": 10.01971, "45": 9.7555, "50": 9.84071}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 624.0, "5": 685.0, "10": 513.0, "15": 641.0, "20": 585.0, "25": 581.0, "30": 707.0, "35": 718.0, "40": 845.0, "45": 777.0, "50": 861.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 430735872.0, "5": 430735872.0, "10": 430735872.0, "15": 430735872.0, "20": 430735872.0, "25": 430735872.0, "30": 430735872.0, "35": 430735872.0, "40": 430735872.0, "45": 430735872.0, "50": 430735872.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 676293120.0, "5": 859512320.0, "10": 859512320.0, "15": 859512320.0, "20": 859512320.0, "25": 859512320.0, "30": 859512320.0, "35": 859512320.0, "40": 859512320.0, "45": 859512320.0, "50": 859512320.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 14.98416, "5": 0.41865, "10": 0.41865, "15": 0.4155, "20": 0.45771, "25": 0.41373, "30": 0.41607, "35": 0.41643, "40": 0.41869, "45": 0.41712, "50": 0.41947}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_nondeterministic_dp_last_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_nondeterministic_dp_last_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..6482796 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_nondeterministic_dp_last_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.93292, "5": 10.92972, "10": 10.90473, "15": 10.8712, "20": 10.74997, "25": 10.53747, "30": 10.32549, "35": 10.22894, "40": 10.01969, "45": 9.75543, "50": 9.84069}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 597.0, "5": 673.0, "10": 541.0, "15": 596.0, "20": 559.0, "25": 621.0, "30": 722.0, "35": 677.0, "40": 764.0, "45": 766.0, "50": 834.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 432177152.0, "5": 432177152.0, "10": 432177152.0, "15": 432177152.0, "20": 432177152.0, "25": 432177152.0, "30": 432177152.0, "35": 432177152.0, "40": 432177152.0, "45": 432177152.0, "50": 432177152.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 676286464.0, "5": 856227840.0, "10": 856228864.0, "15": 856228864.0, "20": 857274880.0, "25": 857274880.0, "30": 857274880.0, "35": 857274880.0, "40": 857274880.0, "45": 857274880.0, "50": 857274880.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 15.34231, "5": 0.38106, "10": 0.38358, "15": 0.38108, "20": 0.38826, "25": 0.38764, "30": 0.39542, "35": 0.40726, "40": 0.4445, "45": 0.45466, "50": 0.46041}}} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_nondeterministic_dp_last_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_nondeterministic_dp_last_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 0000000..8b826ef --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_nondeterministic_dp_last_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,54 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 50 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 10000 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 2 + --context-parallel-size: 2 + --sequence-parallel: true + --hidden-dropout: 0.0 + --attention-dropout: 0.0 + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --attention-backend: flash + --log-memory-to-tensorboard: true + --use-tp-pp-dp-mapping: true + --expert-tensor-parallel-size: 4 +TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json index 4c8008e..6d3b178 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.93292, 10.93657, 10.88788, 10.86131, 10.71505, 10.61066, 10.06697, 10.17616, 10.07539, 9.74965]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [607.0, 638.0, 643.0, 649.0, 648.0, 590.0, 548.0, 772.0, 834.0, 836.0]}, "iteration_timing_avg": 0.3993126470588235} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.93292, "5": 10.92968, "10": 10.90474, "15": 10.87119, "20": 10.75002, "25": 10.53753, "30": 10.32551, "35": 10.22889, "40": 10.01977, "45": 9.75546, "50": 9.84068}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 600.0, "5": 636.0, "10": 530.0, "15": 603.0, "20": 587.0, "25": 616.0, "30": 678.0, "35": 686.0, "40": 754.0, "45": 825.0, "50": 834.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 430735872.0, "5": 430735872.0, "10": 430735872.0, "15": 430735872.0, "20": 430735872.0, "25": 430735872.0, "30": 430735872.0, "35": 430735872.0, "40": 430735872.0, "45": 430735872.0, "50": 430735872.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 677322752.0, "5": 859492352.0, "10": 859492352.0, "15": 859492352.0, "20": 859492352.0, "25": 859492352.0, "30": 859492352.0, "35": 859492352.0, "40": 859492352.0, "45": 859492352.0, "50": 859492352.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 18.40423, "5": 0.41246, "10": 0.41102, "15": 0.40853, "20": 0.40333, "25": 0.40316, "30": 0.40384, "35": 0.40428, "40": 0.40498, "45": 0.40793, "50": 0.3968}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json index 4c8008e..68c4e59 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.93292, 10.93657, 10.88788, 10.86131, 10.71505, 10.61066, 10.06697, 10.17616, 10.07539, 9.74965]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [607.0, 638.0, 643.0, 649.0, 648.0, 590.0, 548.0, 772.0, 834.0, 836.0]}, "iteration_timing_avg": 0.3993126470588235} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.93292, "5": 10.92968, "10": 10.90473, "15": 10.87124, "20": 10.74999, "25": 10.53753, "30": 10.32551, "35": 10.22896, "40": 10.01976, "45": 9.75546, "50": 9.84068}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 577.0, "5": 694.0, "10": 549.0, "15": 604.0, "20": 582.0, "25": 611.0, "30": 715.0, "35": 705.0, "40": 812.0, "45": 723.0, "50": 886.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 432177152.0, "5": 432177152.0, "10": 432177152.0, "15": 432177152.0, "20": 432177152.0, "25": 432177152.0, "30": 432177152.0, "35": 432177152.0, "40": 432177152.0, "45": 432177152.0, "50": 432177152.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 675237888.0, "5": 856228352.0, "10": 856228352.0, "15": 856228352.0, "20": 856228352.0, "25": 856228352.0, "30": 856228864.0, "35": 856228864.0, "40": 856228864.0, "45": 856228864.0, "50": 856228864.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 11.3532, "5": 0.38052, "10": 0.38643, "15": 0.3931, "20": 0.39615, "25": 0.3942, "30": 0.39277, "35": 0.39439, "40": 0.38701, "45": 0.38136, "50": 0.38105}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml index c6ca306..61e0a18 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml @@ -17,8 +17,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: flash + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_dev.json index 98ff45e..e175b2b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.92705, 10.93627, 10.89332, 10.87322, 10.74871, 10.65375, 10.15756, 10.24634, 10.15177, 9.83799]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1707.0, 1885.0, 1986.0, 1760.0, 1773.0, 1859.0, 1598.0, 1965.0, 2199.0, 2316.0]}, "iteration_timing_avg": 0.20321264705882353} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.92655, "5": 10.92721, "10": 10.90788, "15": 10.88289, "20": 10.77592, "25": 10.59264, "30": 10.39176, "35": 10.297, "40": 10.09664, "45": 9.84472, "50": 9.90945}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1578.0, "5": 2015.0, "10": 1470.0, "15": 1873.0, "20": 1641.0, "25": 1658.0, "30": 1855.0, "35": 1936.0, "40": 2171.0, "45": 2101.0, "50": 2466.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 437944832.0, "5": 437944832.0, "10": 437944832.0, "15": 437944832.0, "20": 437944832.0, "25": 437944832.0, "30": 437944832.0, "35": 437944832.0, "40": 437944832.0, "45": 437944832.0, "50": 437944832.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2245028864.0, "5": 2245028864.0, "10": 2245028864.0, "15": 2245028864.0, "20": 2245028864.0, "25": 2245028864.0, "30": 2245028864.0, "35": 2245028864.0, "40": 2245028864.0, "45": 2245028864.0, "50": 2245028864.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.02236, "5": 0.16926, "10": 0.16731, "15": 0.16738, "20": 0.1667, "25": 0.16613, "30": 0.16647, "35": 0.16593, "40": 0.1672, "45": 0.16523, "50": 0.16977}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_lts.json index 98ff45e..465fef4 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.92705, 10.93627, 10.89332, 10.87322, 10.74871, 10.65375, 10.15756, 10.24634, 10.15177, 9.83799]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1707.0, 1885.0, 1986.0, 1760.0, 1773.0, 1859.0, 1598.0, 1965.0, 2199.0, 2316.0]}, "iteration_timing_avg": 0.20321264705882353} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.92655, "5": 10.92722, "10": 10.9079, "15": 10.88296, "20": 10.77594, "25": 10.59266, "30": 10.39175, "35": 10.29701, "40": 10.09666, "45": 9.8447, "50": 9.90944}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1675.0, "5": 2035.0, "10": 1469.0, "15": 1853.0, "20": 1641.0, "25": 1685.0, "30": 1947.0, "35": 1941.0, "40": 2148.0, "45": 2122.0, "50": 2483.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 435191808.0, "5": 435191808.0, "10": 435191808.0, "15": 435191808.0, "20": 435191808.0, "25": 435191808.0, "30": 435191808.0, "35": 435191808.0, "40": 435191808.0, "45": 435191808.0, "50": 435191808.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 16.35385, "5": 0.17431, "10": 0.16906, "15": 0.16815, "20": 0.17162, "25": 0.17427, "30": 0.16998, "35": 0.172, "40": 0.17758, "45": 0.16824, "50": 0.16924}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml index 8f0bf33..a331f86 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml @@ -17,8 +17,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -44,4 +44,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json index a1c3bc0..d775bcb 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.92705, - 10.93624, - 10.89333, - 10.87317, - 10.74871, - 10.65379, - 10.15753, - 10.24638, - 10.15178, - 9.83806 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1653.0, - 1874.0, - 1994.0, - 1828.0, - 1769.0, - 1845.0, - 1674.0, - 1957.0, - 2364.0, - 2345.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 11.33146, - 0.22344, - 0.21997, - 0.21977, - 0.21792, - 0.21685, - 0.22555, - 0.21755, - 0.21796, - 0.21694 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92794, "10": 10.90787, "15": 10.88313, "20": 10.77627, "25": 10.5914, "30": 10.39192, "35": 10.29687, "40": 10.0964, "45": 9.84467, "50": 9.90918}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1653.0, "5": 1990.0, "10": 1417.0, "15": 1950.0, "20": 1611.0, "25": 1660.0, "30": 2007.0, "35": 2024.0, "40": 2229.0, "45": 2150.0, "50": 2473.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 487096320.0, "5": 487096320.0, "10": 487096320.0, "15": 487096320.0, "20": 487096320.0, "25": 487096320.0, "30": 487096320.0, "35": 487096320.0, "40": 487096320.0, "45": 487096320.0, "50": 487096320.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1720084480.0, "5": 1900157952.0, "10": 1900157952.0, "15": 1900157952.0, "20": 1900157952.0, "25": 1900157952.0, "30": 1900157952.0, "35": 1900157952.0, "40": 1900157952.0, "45": 1900157952.0, "50": 1900157952.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.00779, "5": 0.22728, "10": 0.22598, "15": 0.22514, "20": 0.22614, "25": 0.21574, "30": 0.2128, "35": 0.21504, "40": 0.22376, "45": 0.22447, "50": 0.22399}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values_lts.json index 265ad7c..cccd3a4 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.92705, 10.93626, 10.89335, 10.87325, 10.74869, 10.65372, 10.15755, 10.24642, 10.15177, 9.83802]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1627.0, 1889.0, 1973.0, 1785.0, 1797.0, 1836.0, 1602.0, 2034.0, 2316.0, 2307.0]}, "iteration_timing_avg": 0.15396205882352942} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92795, "10": 10.90786, "15": 10.88314, "20": 10.77629, "25": 10.5914, "30": 10.39194, "35": 10.29685, "40": 10.09639, "45": 9.84463, "50": 9.90918}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1627.0, "5": 2010.0, "10": 1368.0, "15": 1897.0, "20": 1626.0, "25": 1769.0, "30": 1899.0, "35": 1988.0, "40": 2199.0, "45": 2158.0, "50": 2494.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 489193472.0, "5": 489193472.0, "10": 489193472.0, "15": 489193472.0, "20": 489193472.0, "25": 489193472.0, "30": 489193472.0, "35": 489193472.0, "40": 489193472.0, "45": 489193472.0, "50": 489193472.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1720084480.0, "5": 1902255104.0, "10": 1902255104.0, "15": 1902255104.0, "20": 1902255104.0, "25": 1902255104.0, "30": 1902255104.0, "35": 1902255104.0, "40": 1902255104.0, "45": 1902255104.0, "50": 1902255104.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.57006, "5": 0.21756, "10": 0.21834, "15": 0.22103, "20": 0.22048, "25": 0.2186, "30": 0.21545, "35": 0.21478, "40": 0.22168, "45": 0.2141, "50": 0.21369}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml index c7190d5..191cf39 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_dev.json index edb6a17..a900ccb 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.92705, - 10.93628, - 10.89334, - 10.87322, - 10.74869, - 10.65374, - 10.15755, - 10.24638, - 10.15177, - 9.83799 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 68.0, - 64.0, - 61.0, - 70.0, - 66.0, - 55.0, - 76.0, - 72.0, - 64.0, - 85.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 9.68102, - 0.22487, - 0.22503, - 0.22418, - 0.22445, - 0.22504, - 0.22333, - 0.22333, - 0.22458, - 0.22367 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92799, "10": 10.90787, "15": 10.88314, "20": 10.77633, "25": 10.59142, "30": 10.39191, "35": 10.29686, "40": 10.09641, "45": 9.84468, "50": 9.90923}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 68.0, "5": 64.0, "10": 61.0, "15": 65.0, "20": 60.0, "25": 58.0, "30": 75.0, "35": 70.0, "40": 84.0, "45": 95.0, "50": 76.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 487096320.0, "5": 487096320.0, "10": 487096320.0, "15": 487096320.0, "20": 487096320.0, "25": 487096320.0, "30": 487096320.0, "35": 487096320.0, "40": 487096320.0, "45": 487096320.0, "50": 487096320.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2158389248.0, "5": 2338462720.0, "10": 2338462720.0, "15": 2338462720.0, "20": 2338462720.0, "25": 2338462720.0, "30": 2338462720.0, "35": 2338462720.0, "40": 2338462720.0, "45": 2338462720.0, "50": 2338462720.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 13.20849, "5": 0.21341, "10": 0.21229, "15": 0.21253, "20": 0.21156, "25": 0.20948, "30": 0.20977, "35": 0.21023, "40": 0.20885, "45": 0.20897, "50": 0.20968}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_lts.json index 517c935..7bccac3 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.92705, 10.93628, 10.89335, 10.87322, 10.7487, 10.65379, 10.15754, 10.2464, 10.15175, 9.83801]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [68.0, 64.0, 61.0, 58.0, 55.0, 85.0, 77.0, 68.0, 78.0, 63.0]}} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92799, "10": 10.90789, "15": 10.88313, "20": 10.77626, "25": 10.59138, "30": 10.39195, "35": 10.29687, "40": 10.0964, "45": 9.84466, "50": 9.90919}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 68.0, "5": 64.0, "10": 61.0, "15": 58.0, "20": 64.0, "25": 58.0, "30": 85.0, "35": 66.0, "40": 85.0, "45": 82.0, "50": 68.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 487096320.0, "5": 487096320.0, "10": 487096320.0, "15": 487096320.0, "20": 487096320.0, "25": 487096320.0, "30": 487096320.0, "35": 487096320.0, "40": 487096320.0, "45": 487096320.0, "50": 487096320.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2158389248.0, "5": 2338462720.0, "10": 2338462720.0, "15": 2338462720.0, "20": 2338462720.0, "25": 2338462720.0, "30": 2338462720.0, "35": 2338462720.0, "40": 2338462720.0, "45": 2338462720.0, "50": 2338462720.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 14.28087, "5": 0.21793, "10": 0.21831, "15": 0.21612, "20": 0.21234, "25": 0.21207, "30": 0.217, "35": 0.21022, "40": 0.21151, "45": 0.20721, "50": 0.20653}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml index 7351e98..3c09214 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json index 7a8ec5b..b18f1c4 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.92705, - 10.93624, - 10.89333, - 10.87317, - 10.74871, - 10.65379, - 10.15753, - 10.24638, - 10.15178, - 9.83806 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1653.0, - 1874.0, - 1994.0, - 1828.0, - 1769.0, - 1845.0, - 1674.0, - 1957.0, - 2364.0, - 2345.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 11.05896, - 0.21941, - 0.22052, - 0.22086, - 0.22118, - 0.22063, - 0.22075, - 0.22064, - 0.22956, - 0.23548 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92794, "10": 10.90787, "15": 10.88313, "20": 10.77627, "25": 10.5914, "30": 10.39192, "35": 10.29687, "40": 10.0964, "45": 9.84467, "50": 9.90918}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1653.0, "5": 1990.0, "10": 1417.0, "15": 1950.0, "20": 1611.0, "25": 1660.0, "30": 2007.0, "35": 2024.0, "40": 2229.0, "45": 2150.0, "50": 2473.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 484999168.0, "5": 484999168.0, "10": 484999168.0, "15": 484999168.0, "20": 484999168.0, "25": 484999168.0, "30": 484999168.0, "35": 484999168.0, "40": 484999168.0, "45": 484999168.0, "50": 484999168.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1720084480.0, "5": 1898060800.0, "10": 1898060800.0, "15": 1898060800.0, "20": 1898060800.0, "25": 1898060800.0, "30": 1898060800.0, "35": 1898060800.0, "40": 1898060800.0, "45": 1898060800.0, "50": 1898060800.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.82173, "5": 0.21481, "10": 0.21483, "15": 0.23, "20": 0.23045, "25": 0.20751, "30": 0.20833, "35": 0.22821, "40": 0.22678, "45": 0.21061, "50": 0.21097}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json index 265ad7c..4c970b1 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.92705, 10.93626, 10.89335, 10.87325, 10.74869, 10.65372, 10.15755, 10.24642, 10.15177, 9.83802]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1627.0, 1889.0, 1973.0, 1785.0, 1797.0, 1836.0, 1602.0, 2034.0, 2316.0, 2307.0]}, "iteration_timing_avg": 0.15396205882352942} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92795, "10": 10.90786, "15": 10.88314, "20": 10.77629, "25": 10.5914, "30": 10.39194, "35": 10.29685, "40": 10.09639, "45": 9.84463, "50": 9.90918}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1627.0, "5": 2010.0, "10": 1368.0, "15": 1897.0, "20": 1626.0, "25": 1769.0, "30": 1899.0, "35": 1988.0, "40": 2199.0, "45": 2158.0, "50": 2494.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 489193472.0, "5": 489193472.0, "10": 489193472.0, "15": 489193472.0, "20": 489193472.0, "25": 489193472.0, "30": 489193472.0, "35": 489193472.0, "40": 489193472.0, "45": 489193472.0, "50": 489193472.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1720084480.0, "5": 1902255104.0, "10": 1902255104.0, "15": 1902255104.0, "20": 1902255104.0, "25": 1902255104.0, "30": 1902255104.0, "35": 1902255104.0, "40": 1902255104.0, "45": 1902255104.0, "50": 1902255104.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 12.69434, "5": 0.20973, "10": 0.21081, "15": 0.21451, "20": 0.21302, "25": 0.21379, "30": 0.21504, "35": 0.20953, "40": 0.2084, "45": 0.20756, "50": 0.20954}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml index 503531d..4b3071c 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_dev.json index e2ce2f1..7feb6ff 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.92705, - 10.93624, - 10.89333, - 10.87317, - 10.74871, - 10.65379, - 10.15753, - 10.24638, - 10.15178, - 9.83806 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1653.0, - 1874.0, - 1994.0, - 1828.0, - 1769.0, - 1845.0, - 1674.0, - 1957.0, - 2364.0, - 2345.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 9.20057, - 0.21739, - 0.21735, - 0.21626, - 0.2165, - 0.21447, - 0.21821, - 0.21559, - 0.21472, - 0.21558 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92794, "10": 10.90787, "15": 10.88313, "20": 10.77627, "25": 10.5914, "30": 10.39192, "35": 10.29687, "40": 10.0964, "45": 9.84467, "50": 9.90918}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1653.0, "5": 1990.0, "10": 1417.0, "15": 1950.0, "20": 1611.0, "25": 1660.0, "30": 2007.0, "35": 2024.0, "40": 2229.0, "45": 2150.0, "50": 2473.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 487096320.0, "5": 487096320.0, "10": 487096320.0, "15": 487096320.0, "20": 487096320.0, "25": 487096320.0, "30": 487096320.0, "35": 487096320.0, "40": 487096320.0, "45": 487096320.0, "50": 487096320.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1714841600.0, "5": 1895963648.0, "10": 1895963648.0, "15": 1895963648.0, "20": 1895963648.0, "25": 1895963648.0, "30": 1895963648.0, "35": 1895963648.0, "40": 1895963648.0, "45": 1895963648.0, "50": 1895963648.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 13.21586, "5": 0.20927, "10": 0.21435, "15": 0.2285, "20": 0.23508, "25": 0.21704, "30": 0.20827, "35": 0.20577, "40": 0.20509, "45": 0.20489, "50": 0.20479}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_lts.json index 265ad7c..62c52c3 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.92705, 10.93626, 10.89335, 10.87325, 10.74869, 10.65372, 10.15755, 10.24642, 10.15177, 9.83802]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1627.0, 1889.0, 1973.0, 1785.0, 1797.0, 1836.0, 1602.0, 2034.0, 2316.0, 2307.0]}, "iteration_timing_avg": 0.15396205882352942} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92795, "10": 10.90786, "15": 10.88314, "20": 10.77629, "25": 10.5914, "30": 10.39194, "35": 10.29685, "40": 10.09639, "45": 9.84463, "50": 9.90918}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1627.0, "5": 2010.0, "10": 1368.0, "15": 1897.0, "20": 1626.0, "25": 1769.0, "30": 1899.0, "35": 1988.0, "40": 2199.0, "45": 2158.0, "50": 2494.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 487096320.0, "5": 487096320.0, "10": 487096320.0, "15": 487096320.0, "20": 487096320.0, "25": 487096320.0, "30": 487096320.0, "35": 487096320.0, "40": 487096320.0, "45": 487096320.0, "50": 487096320.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1715890176.0, "5": 1895963648.0, "10": 1895963648.0, "15": 1895963648.0, "20": 1895963648.0, "25": 1895963648.0, "30": 1895963648.0, "35": 1895963648.0, "40": 1895963648.0, "45": 1895963648.0, "50": 1895963648.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 14.15208, "5": 0.20628, "10": 0.20558, "15": 0.20362, "20": 0.20249, "25": 0.2031, "30": 0.20314, "35": 0.20265, "40": 0.20375, "45": 0.20367, "50": 0.20562}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml index d5ea7ea..a5565c4 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json index 08406d2..0b7c6e5 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.92705, - 10.93624, - 10.89333, - 10.87317, - 10.74871, - 10.65379, - 10.15753, - 10.24638, - 10.15178, - 9.83806 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1653.0, - 1874.0, - 1994.0, - 1828.0, - 1769.0, - 1845.0, - 1674.0, - 1957.0, - 2364.0, - 2345.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 9.47055, - 0.34439, - 0.22313, - 0.22277, - 0.22175, - 0.21936, - 0.23348, - 0.22009, - 0.22043, - 0.21934 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92794, "10": 10.90787, "15": 10.88313, "20": 10.77627, "25": 10.5914, "30": 10.39192, "35": 10.29687, "40": 10.0964, "45": 9.84467, "50": 9.90918}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1653.0, "5": 1990.0, "10": 1417.0, "15": 1950.0, "20": 1611.0, "25": 1660.0, "30": 2007.0, "35": 2024.0, "40": 2229.0, "45": 2150.0, "50": 2473.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 489193472.0, "5": 489193472.0, "10": 489193472.0, "15": 489193472.0, "20": 489193472.0, "25": 489193472.0, "30": 489193472.0, "35": 489193472.0, "40": 489193472.0, "45": 489193472.0, "50": 489193472.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1720084480.0, "5": 1902255104.0, "10": 1902255104.0, "15": 1902255104.0, "20": 1902255104.0, "25": 1902255104.0, "30": 1902255104.0, "35": 1902255104.0, "40": 1902255104.0, "45": 1902255104.0, "50": 1902255104.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.51293, "5": 0.21814, "10": 0.21945, "15": 0.21385, "20": 0.21265, "25": 0.20878, "30": 0.2079, "35": 0.20805, "40": 0.2085, "45": 0.20774, "50": 0.20826}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json index 265ad7c..2f1b7f5 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.92705, 10.93626, 10.89335, 10.87325, 10.74869, 10.65372, 10.15755, 10.24642, 10.15177, 9.83802]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1627.0, 1889.0, 1973.0, 1785.0, 1797.0, 1836.0, 1602.0, 2034.0, 2316.0, 2307.0]}, "iteration_timing_avg": 0.15396205882352942} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92795, "10": 10.90786, "15": 10.88314, "20": 10.77629, "25": 10.5914, "30": 10.39194, "35": 10.29685, "40": 10.09639, "45": 9.84463, "50": 9.90918}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1627.0, "5": 2010.0, "10": 1368.0, "15": 1897.0, "20": 1626.0, "25": 1769.0, "30": 1899.0, "35": 1988.0, "40": 2199.0, "45": 2158.0, "50": 2494.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 487096320.0, "5": 487096320.0, "10": 487096320.0, "15": 487096320.0, "20": 487096320.0, "25": 487096320.0, "30": 487096320.0, "35": 487096320.0, "40": 487096320.0, "45": 487096320.0, "50": 487096320.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1720084480.0, "5": 1900157952.0, "10": 1900157952.0, "15": 1900157952.0, "20": 1900157952.0, "25": 1900157952.0, "30": 1900157952.0, "35": 1900157952.0, "40": 1900157952.0, "45": 1900157952.0, "50": 1900157952.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.52844, "5": 0.21524, "10": 0.2168, "15": 0.21669, "20": 0.21232, "25": 0.21118, "30": 0.21093, "35": 0.21472, "40": 0.21279, "45": 0.21532, "50": 0.2125}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml index f1d58db..5d208f6 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..246df0b --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.93292, "5": 10.92968, "10": 10.90475, "15": 10.87123, "20": 10.75001, "25": 10.53756, "30": 10.32551, "35": 10.22891, "40": 10.01975, "45": 9.75547, "50": 9.84069, "55": 9.81451, "60": 9.42443, "65": 8.8671, "70": 9.67898, "75": 9.36666, "80": 9.35304, "85": 9.56705, "90": 9.77582, "95": 9.48327, "100": 9.35875}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 623.0, "5": 670.0, "10": 559.0, "15": 641.0, "20": 535.0, "25": 629.0, "30": 647.0, "35": 702.0, "40": 787.0, "45": 768.0, "50": 895.0, "55": 924.0, "60": 868.0, "65": 929.0, "70": 1102.0, "75": 918.0, "80": 1152.0, "85": 1143.0, "90": 1086.0, "95": 1096.0, "100": 1134.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 430735872.0, "5": 430735872.0, "10": 430735872.0, "15": 430735872.0, "20": 430735872.0, "25": 430735872.0, "30": 430735872.0, "35": 430735872.0, "40": 430735872.0, "45": 430735872.0, "50": 430735872.0, "55": 430735872.0, "60": 430735872.0, "65": 430735872.0, "70": 430735872.0, "75": 430735872.0, "80": 430735872.0, "85": 430735872.0, "90": 430735872.0, "95": 430735872.0, "100": 430735872.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 677323264.0, "5": 859493376.0, "10": 859493376.0, "15": 859493376.0, "20": 859493376.0, "25": 859493376.0, "30": 859493376.0, "35": 859493376.0, "40": 859493376.0, "45": 859493376.0, "50": 859493888.0, "55": 859493888.0, "60": 859493888.0, "65": 859493888.0, "70": 859493888.0, "75": 859493888.0, "80": 859493888.0, "85": 859493888.0, "90": 859493888.0, "95": 859493888.0, "100": 859493888.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 20.18762, "5": 0.40538, "10": 0.40541, "15": 0.40396, "20": 0.4026, "25": 0.40272, "30": 0.40173, "35": 0.40215, "40": 0.40291, "45": 0.40115, "50": 0.4029, "55": 0.39683, "60": 0.40123, "65": 0.40176, "70": 0.40407, "75": 0.4038, "80": 0.40337, "85": 0.4013, "90": 0.43321, "95": 0.40039, "100": 0.39449}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..d51aa6c --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.93292, "5": 10.92969, "10": 10.90473, "15": 10.87121, "20": 10.74997, "25": 10.53751, "30": 10.32549, "35": 10.22894, "40": 10.01974, "45": 9.75549, "50": 9.84069, "55": 9.81451, "60": 9.42443, "65": 8.86707, "70": 9.67897, "75": 9.36665, "80": 9.35303, "85": 9.56706, "90": 9.77585, "95": 9.48329, "100": 9.3588}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 582.0, "5": 618.0, "10": 496.0, "15": 672.0, "20": 600.0, "25": 619.0, "30": 678.0, "35": 697.0, "40": 775.0, "45": 770.0, "50": 894.0, "55": 906.0, "60": 932.0, "65": 960.0, "70": 1106.0, "75": 889.0, "80": 1186.0, "85": 1068.0, "90": 1077.0, "95": 1054.0, "100": 1160.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 431783936.0, "5": 431783936.0, "10": 431783936.0, "15": 431783936.0, "20": 431783936.0, "25": 431783936.0, "30": 431783936.0, "35": 431783936.0, "40": 431783936.0, "45": 431783936.0, "50": 431783936.0, "55": 431783936.0, "60": 431783936.0, "65": 431783936.0, "70": 431783936.0, "75": 431783936.0, "80": 431783936.0, "85": 431783936.0, "90": 431783936.0, "95": 431783936.0, "100": 431783936.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.90186, "5": 0.37688, "10": 0.37024, "15": 0.381, "20": 0.38683, "25": 0.39543, "30": 0.38049, "35": 0.36959, "40": 0.36509, "45": 0.364, "50": 0.36469, "55": 0.37647, "60": 0.37716, "65": 0.39072, "70": 0.39183, "75": 0.55129, "80": 0.39335, "85": 0.40289, "90": 0.41031, "95": 0.39498, "100": 0.3918}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml index 8942950..7ea4a4c 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G/model_config.yaml @@ -17,8 +17,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: flash + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..41ffc8c --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92655, "5": 10.92717, "10": 10.9079, "15": 10.88295, "20": 10.77596, "25": 10.59261, "30": 10.39174, "35": 10.29701, "40": 10.09659, "45": 9.84469, "50": 9.90945, "55": 9.87774, "60": 9.49121, "65": 8.94257, "70": 9.72279, "75": 9.41891, "80": 9.40055, "85": 9.61186, "90": 9.81026, "95": 9.51723, "100": 9.40133}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1627.0, "5": 1890.0, "10": 1457.0, "15": 1906.0, "20": 1583.0, "25": 1689.0, "30": 1964.0, "35": 1959.0, "40": 2139.0, "45": 2097.0, "50": 2565.0, "55": 2346.0, "60": 2411.0, "65": 2866.0, "70": 3124.0, "75": 2691.0, "80": 3633.0, "85": 3354.0, "90": 3175.0, "95": 3373.0, "100": 3314.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 435847680.0, "5": 435847680.0, "10": 435847680.0, "15": 435847680.0, "20": 436896256.0, "25": 435847680.0, "30": 436896256.0, "35": 435847680.0, "40": 435847680.0, "45": 435847680.0, "50": 435847680.0, "55": 435847680.0, "60": 435847680.0, "65": 435847680.0, "70": 435847680.0, "75": 435847680.0, "80": 435847680.0, "85": 435847680.0, "90": 436896256.0, "95": 435847680.0, "100": 435847680.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2245028864.0, "5": 2245028864.0, "10": 2245028864.0, "15": 2245028864.0, "20": 2245028864.0, "25": 2245028864.0, "30": 2245028864.0, "35": 2245028864.0, "40": 2245028864.0, "45": 2245028864.0, "50": 2245028864.0, "55": 2245028864.0, "60": 2245028864.0, "65": 2245028864.0, "70": 2245028864.0, "75": 2245028864.0, "80": 2245028864.0, "85": 2245028864.0, "90": 2245028864.0, "95": 2245028864.0, "100": 2245028864.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 12.11576, "5": 0.17876, "10": 0.17866, "15": 0.17484, "20": 0.17599, "25": 0.17157, "30": 0.17226, "35": 0.17334, "40": 0.17406, "45": 0.17188, "50": 0.17332, "55": 0.16781, "60": 0.17082, "65": 0.17111, "70": 0.16958, "75": 0.16914, "80": 0.16993, "85": 0.17084, "90": 0.16954, "95": 0.17091, "100": 0.16875}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..b04c987 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92655, "5": 10.92719, "10": 10.90793, "15": 10.88296, "20": 10.77597, "25": 10.59265, "30": 10.39174, "35": 10.29699, "40": 10.09664, "45": 9.84472, "50": 9.90947, "55": 9.87769, "60": 9.49123, "65": 8.94258, "70": 9.72278, "75": 9.41891, "80": 9.40054, "85": 9.61183, "90": 9.81027, "95": 9.51724, "100": 9.40132}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1676.0, "5": 1966.0, "10": 1436.0, "15": 1942.0, "20": 1633.0, "25": 1680.0, "30": 1907.0, "35": 1926.0, "40": 2160.0, "45": 2121.0, "50": 2515.0, "55": 2420.0, "60": 2301.0, "65": 2731.0, "70": 3205.0, "75": 2629.0, "80": 3565.0, "85": 3231.0, "90": 3144.0, "95": 3361.0, "100": 3313.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 437944320.0, "5": 437944320.0, "10": 437944320.0, "15": 437944320.0, "20": 437944320.0, "25": 437944320.0, "30": 437944320.0, "35": 437944320.0, "40": 437944320.0, "45": 437944320.0, "50": 437944320.0, "55": 437944320.0, "60": 437944320.0, "65": 437944320.0, "70": 437944320.0, "75": 437944320.0, "80": 437944320.0, "85": 437944320.0, "90": 437944320.0, "95": 437944320.0, "100": 437944320.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 15.72113, "5": 0.17463, "10": 0.17559, "15": 0.17017, "20": 0.17113, "25": 0.17408, "30": 0.16993, "35": 0.1678, "40": 0.16868, "45": 0.17028, "50": 0.16848, "55": 0.16705, "60": 0.16681, "65": 0.16659, "70": 0.16544, "75": 0.16674, "80": 0.16617, "85": 0.17285, "90": 0.16517, "95": 0.16707, "100": 0.16549}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml index 95f706d..d9d60ba 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G/model_config.yaml @@ -17,8 +17,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -45,4 +45,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..c74d094 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92655, "5": 10.9272, "10": 10.90786, "15": 10.88292, "20": 10.77603, "25": 10.59271, "30": 10.39175, "35": 10.297, "40": 10.09664, "45": 9.84468, "50": 9.9094, "55": 9.87765, "60": 9.49117, "65": 8.94241, "70": 9.72269, "75": 9.41888, "80": 9.40055, "85": 9.61184, "90": 9.81022, "95": 9.51724, "100": 9.4013}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1637.0, "5": 1988.0, "10": 1422.0, "15": 1936.0, "20": 1566.0, "25": 1705.0, "30": 1974.0, "35": 2043.0, "40": 2249.0, "45": 2145.0, "50": 2454.0, "55": 2388.0, "60": 2479.0, "65": 2674.0, "70": 3241.0, "75": 2687.0, "80": 3465.0, "85": 3382.0, "90": 3023.0, "95": 3415.0, "100": 3347.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 484999680.0, "5": 484999680.0, "10": 484999680.0, "15": 484999680.0, "20": 484999680.0, "25": 484999680.0, "30": 484999680.0, "35": 484999680.0, "40": 484999680.0, "45": 484999680.0, "50": 484999680.0, "55": 484999680.0, "60": 484999680.0, "65": 484999680.0, "70": 484999680.0, "75": 484999680.0, "80": 484999680.0, "85": 484999680.0, "90": 484999680.0, "95": 484999680.0, "100": 484999680.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1229747712.0, "5": 1407724032.0, "10": 1407724032.0, "15": 1407724032.0, "20": 1407724032.0, "25": 1407724032.0, "30": 1407724032.0, "35": 1407724032.0, "40": 1407724032.0, "45": 1407724032.0, "50": 1407724032.0, "55": 1407724032.0, "60": 1407724032.0, "65": 1407724032.0, "70": 1407724032.0, "75": 1407724032.0, "80": 1407724032.0, "85": 1407724032.0, "90": 1407724032.0, "95": 1407724032.0, "100": 1407724032.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 9.03168, "5": 0.19551, "10": 0.19611, "15": 0.19461, "20": 0.1953, "25": 0.19512, "30": 0.19427, "35": 0.19489, "40": 0.19376, "45": 0.19316, "50": 0.19289, "55": 0.19391, "60": 0.19362, "65": 0.19446, "70": 0.19495, "75": 0.19344, "80": 0.19443, "85": 0.19556, "90": 0.19447, "95": 0.19353, "100": 0.19477}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..b553064 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92795, "10": 10.90786, "15": 10.88314, "20": 10.77629, "25": 10.5914, "30": 10.39194, "35": 10.29685, "40": 10.09639, "45": 9.84463, "50": 9.90918, "55": 9.87766, "60": 9.49126, "65": 8.94236, "70": 9.72266, "75": 9.41909, "80": 9.40076, "85": 9.61209, "90": 9.81018, "95": 9.51718, "100": 9.40151}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1627.0, "5": 2010.0, "10": 1368.0, "15": 1897.0, "20": 1626.0, "25": 1769.0, "30": 1899.0, "35": 1988.0, "40": 2199.0, "45": 2158.0, "50": 2494.0, "55": 2485.0, "60": 2351.0, "65": 2777.0, "70": 3197.0, "75": 2615.0, "80": 3395.0, "85": 3340.0, "90": 3060.0, "95": 3408.0, "100": 3242.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 487096320.0, "5": 487096320.0, "10": 487096320.0, "15": 487096320.0, "20": 487096320.0, "25": 487096320.0, "30": 487096320.0, "35": 487096320.0, "40": 487096320.0, "45": 487096320.0, "50": 487096320.0, "55": 487096320.0, "60": 487096320.0, "65": 487096320.0, "70": 487096320.0, "75": 487096320.0, "80": 487096320.0, "85": 487096320.0, "90": 487096320.0, "95": 487096320.0, "100": 487096320.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1720084480.0, "5": 1900157952.0, "10": 1900157952.0, "15": 1900157952.0, "20": 1900157952.0, "25": 1900157952.0, "30": 1900157952.0, "35": 1900157952.0, "40": 1900157952.0, "45": 1900157952.0, "50": 1900157952.0, "55": 1900157952.0, "60": 1900157952.0, "65": 1900157952.0, "70": 1900157952.0, "75": 1900157952.0, "80": 1900157952.0, "85": 1900157952.0, "90": 1900157952.0, "95": 1900157952.0, "100": 1900157952.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.62709, "5": 0.20874, "10": 0.20997, "15": 0.20605, "20": 0.20488, "25": 0.20446, "30": 0.20451, "35": 0.20517, "40": 0.20621, "45": 0.2029, "50": 0.20256, "55": 0.20798, "60": 0.2075, "65": 0.2092, "70": 0.21034, "75": 0.20842, "80": 0.20955, "85": 0.20982, "90": 0.20951, "95": 0.20881, "100": 0.20686}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml index e74a0cc..6e01b08 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..cb57241 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92655, "5": 10.92715, "10": 10.90788, "15": 10.88296, "20": 10.77598, "25": 10.59263, "30": 10.39177, "35": 10.297, "40": 10.09664, "45": 9.84468, "50": 9.90938, "55": 9.87767, "60": 9.4912, "65": 8.94239, "70": 9.72271, "75": 9.41883, "80": 9.40054, "85": 9.61183, "90": 9.81021, "95": 9.51721, "100": 9.40125}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 61.0, "5": 67.0, "10": 45.0, "15": 63.0, "20": 62.0, "25": 59.0, "30": 62.0, "35": 73.0, "40": 68.0, "45": 80.0, "50": 96.0, "55": 51.0, "60": 83.0, "65": 93.0, "70": 91.0, "75": 76.0, "80": 78.0, "85": 78.0, "90": 88.0, "95": 82.0, "100": 90.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 487096832.0, "5": 487096832.0, "10": 487096832.0, "15": 487096832.0, "20": 487096832.0, "25": 487096832.0, "30": 487096832.0, "35": 487096832.0, "40": 487096832.0, "45": 487096832.0, "50": 487096832.0, "55": 487096832.0, "60": 487096832.0, "65": 487096832.0, "70": 487096832.0, "75": 487096832.0, "80": 487096832.0, "85": 487096832.0, "90": 487096832.0, "95": 487096832.0, "100": 487096832.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1668052480.0, "5": 1848125952.0, "10": 1848125952.0, "15": 1848125952.0, "20": 1848125952.0, "25": 1848125952.0, "30": 1848125952.0, "35": 1848125952.0, "40": 1848125952.0, "45": 1848125952.0, "50": 1848125952.0, "55": 1848125952.0, "60": 1848125952.0, "65": 1848125952.0, "70": 1848125952.0, "75": 1848125952.0, "80": 1848125952.0, "85": 1848125952.0, "90": 1848125952.0, "95": 1848125952.0, "100": 1848125952.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.8138, "5": 0.19926, "10": 0.19439, "15": 0.19389, "20": 0.19552, "25": 0.19186, "30": 0.19341, "35": 0.19268, "40": 0.19289, "45": 0.19218, "50": 0.19214, "55": 0.19236, "60": 0.19561, "65": 0.19299, "70": 0.19296, "75": 0.19308, "80": 0.19336, "85": 0.19452, "90": 0.19164, "95": 0.19304, "100": 0.19217}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..9d58200 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92799, "10": 10.90789, "15": 10.88313, "20": 10.77626, "25": 10.59138, "30": 10.39195, "35": 10.29687, "40": 10.0964, "45": 9.84466, "50": 9.90919, "55": 9.87765, "60": 9.49125, "65": 8.94236, "70": 9.72262, "75": 9.4191, "80": 9.40075, "85": 9.61211, "90": 9.81017, "95": 9.51717, "100": 9.40147}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 68.0, "5": 64.0, "10": 61.0, "15": 58.0, "20": 64.0, "25": 58.0, "30": 85.0, "35": 66.0, "40": 85.0, "45": 82.0, "50": 68.0, "55": 84.0, "60": 71.0, "65": 85.0, "70": 92.0, "75": 62.0, "80": 87.0, "85": 74.0, "90": 71.0, "95": 79.0, "100": 72.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 487096320.0, "5": 487096320.0, "10": 487096320.0, "15": 487096320.0, "20": 487096320.0, "25": 487096320.0, "30": 487096320.0, "35": 487096320.0, "40": 487096320.0, "45": 487096320.0, "50": 487096320.0, "55": 487096320.0, "60": 487096320.0, "65": 487096320.0, "70": 487096320.0, "75": 487096320.0, "80": 487096320.0, "85": 487096320.0, "90": 487096320.0, "95": 487096320.0, "100": 487096320.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2158389248.0, "5": 2338462720.0, "10": 2338462720.0, "15": 2338462720.0, "20": 2338462720.0, "25": 2338462720.0, "30": 2338462720.0, "35": 2338462720.0, "40": 2338462720.0, "45": 2338462720.0, "50": 2338462720.0, "55": 2338462720.0, "60": 2338462720.0, "65": 2338462720.0, "70": 2338462720.0, "75": 2338462720.0, "80": 2338462720.0, "85": 2338462720.0, "90": 2338462720.0, "95": 2338462720.0, "100": 2338462720.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.88233, "5": 0.22608, "10": 0.21553, "15": 0.21336, "20": 0.21247, "25": 0.21243, "30": 0.23729, "35": 0.2257, "40": 0.21253, "45": 0.21718, "50": 0.21345, "55": 0.21376, "60": 0.21327, "65": 0.21242, "70": 0.21194, "75": 0.21274, "80": 0.21252, "85": 0.21061, "90": 0.21024, "95": 0.21239, "100": 0.21117}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml index f041fd4..36f9e6e 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..178deca --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92655, "5": 10.9272, "10": 10.90786, "15": 10.88292, "20": 10.77603, "25": 10.59271, "30": 10.39175, "35": 10.297, "40": 10.09664, "45": 9.84468, "50": 9.9094, "55": 9.87765, "60": 9.49117, "65": 8.94241, "70": 9.72269, "75": 9.41888, "80": 9.40055, "85": 9.61184, "90": 9.81022, "95": 9.51724, "100": 9.4013}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1637.0, "5": 1988.0, "10": 1422.0, "15": 1936.0, "20": 1566.0, "25": 1705.0, "30": 1974.0, "35": 2043.0, "40": 2249.0, "45": 2145.0, "50": 2454.0, "55": 2388.0, "60": 2479.0, "65": 2674.0, "70": 3241.0, "75": 2687.0, "80": 3465.0, "85": 3382.0, "90": 3023.0, "95": 3415.0, "100": 3347.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 487096832.0, "5": 487096832.0, "10": 487096832.0, "15": 487096832.0, "20": 487096832.0, "25": 487096832.0, "30": 487096832.0, "35": 487096832.0, "40": 487096832.0, "45": 487096832.0, "50": 487096832.0, "55": 487096832.0, "60": 487096832.0, "65": 487096832.0, "70": 487096832.0, "75": 487096832.0, "80": 487096832.0, "85": 487096832.0, "90": 487096832.0, "95": 487096832.0, "100": 487096832.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1229747712.0, "5": 1409821184.0, "10": 1409821184.0, "15": 1409821184.0, "20": 1409821184.0, "25": 1409821184.0, "30": 1409821184.0, "35": 1409821184.0, "40": 1409821184.0, "45": 1409821184.0, "50": 1409821184.0, "55": 1409821184.0, "60": 1409821184.0, "65": 1409821184.0, "70": 1409821184.0, "75": 1409821184.0, "80": 1409821184.0, "85": 1409821184.0, "90": 1409821184.0, "95": 1409821184.0, "100": 1409821184.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.17732, "5": 0.20296, "10": 0.20325, "15": 0.20174, "20": 0.20216, "25": 0.20151, "30": 0.20223, "35": 0.20172, "40": 0.20152, "45": 0.20108, "50": 0.20046, "55": 0.1934, "60": 0.19326, "65": 0.19362, "70": 0.19278, "75": 0.19295, "80": 0.19307, "85": 0.19325, "90": 0.19304, "95": 0.19317, "100": 0.19328}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..f329d7e --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92795, "10": 10.90786, "15": 10.88314, "20": 10.77629, "25": 10.5914, "30": 10.39194, "35": 10.29685, "40": 10.09639, "45": 9.84463, "50": 9.90918, "55": 9.87766, "60": 9.49126, "65": 8.94236, "70": 9.72266, "75": 9.41909, "80": 9.40076, "85": 9.61209, "90": 9.81018, "95": 9.51718, "100": 9.40151}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1627.0, "5": 2010.0, "10": 1368.0, "15": 1897.0, "20": 1626.0, "25": 1769.0, "30": 1899.0, "35": 1988.0, "40": 2199.0, "45": 2158.0, "50": 2494.0, "55": 2485.0, "60": 2351.0, "65": 2777.0, "70": 3197.0, "75": 2615.0, "80": 3395.0, "85": 3340.0, "90": 3060.0, "95": 3408.0, "100": 3242.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 487096320.0, "5": 487096320.0, "10": 487096320.0, "15": 487096320.0, "20": 487096320.0, "25": 487096320.0, "30": 487096320.0, "35": 487096320.0, "40": 487096320.0, "45": 487096320.0, "50": 487096320.0, "55": 487096320.0, "60": 487096320.0, "65": 487096320.0, "70": 487096320.0, "75": 487096320.0, "80": 487096320.0, "85": 487096320.0, "90": 487096320.0, "95": 487096320.0, "100": 487096320.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1720084480.0, "5": 1900157952.0, "10": 1900157952.0, "15": 1900157952.0, "20": 1900157952.0, "25": 1900157952.0, "30": 1900157952.0, "35": 1900157952.0, "40": 1900157952.0, "45": 1900157952.0, "50": 1900157952.0, "55": 1900157952.0, "60": 1900157952.0, "65": 1900157952.0, "70": 1900157952.0, "75": 1900157952.0, "80": 1900157952.0, "85": 1900157952.0, "90": 1900157952.0, "95": 1900157952.0, "100": 1900157952.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.97156, "5": 0.2168, "10": 0.21367, "15": 0.22327, "20": 0.20978, "25": 0.20953, "30": 0.21033, "35": 0.20882, "40": 0.21062, "45": 0.20902, "50": 0.20932, "55": 0.21153, "60": 0.20966, "65": 0.20901, "70": 0.20892, "75": 0.21183, "80": 0.21189, "85": 0.21367, "90": 0.21386, "95": 0.21529, "100": 0.21247}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml index e683475..bc815ea 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..32f5f37 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92655, "5": 10.9272, "10": 10.90786, "15": 10.88292, "20": 10.77603, "25": 10.59271, "30": 10.39175, "35": 10.297, "40": 10.09664, "45": 9.84468, "50": 9.9094, "55": 9.87765, "60": 9.49117, "65": 8.94241, "70": 9.72269, "75": 9.41888, "80": 9.40055, "85": 9.61184, "90": 9.81022, "95": 9.51724, "100": 9.4013}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1637.0, "5": 1988.0, "10": 1422.0, "15": 1936.0, "20": 1566.0, "25": 1705.0, "30": 1974.0, "35": 2043.0, "40": 2249.0, "45": 2145.0, "50": 2454.0, "55": 2388.0, "60": 2479.0, "65": 2674.0, "70": 3241.0, "75": 2687.0, "80": 3465.0, "85": 3382.0, "90": 3023.0, "95": 3415.0, "100": 3347.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 487096832.0, "5": 487096832.0, "10": 487096832.0, "15": 487096832.0, "20": 487096832.0, "25": 487096832.0, "30": 487096832.0, "35": 487096832.0, "40": 487096832.0, "45": 487096832.0, "50": 487096832.0, "55": 487096832.0, "60": 487096832.0, "65": 487096832.0, "70": 487096832.0, "75": 487096832.0, "80": 487096832.0, "85": 487096832.0, "90": 487096832.0, "95": 487096832.0, "100": 487096832.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1225553408.0, "5": 1405626880.0, "10": 1405626880.0, "15": 1405626880.0, "20": 1405626880.0, "25": 1405626880.0, "30": 1405626880.0, "35": 1405626880.0, "40": 1405626880.0, "45": 1405626880.0, "50": 1405626880.0, "55": 1405626880.0, "60": 1405626880.0, "65": 1405626880.0, "70": 1405626880.0, "75": 1405626880.0, "80": 1405626880.0, "85": 1405626880.0, "90": 1405626880.0, "95": 1405626880.0, "100": 1405626880.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 12.43309, "5": 0.19435, "10": 0.19438, "15": 0.19481, "20": 0.19447, "25": 0.19273, "30": 0.19383, "35": 0.19374, "40": 0.19351, "45": 0.19317, "50": 0.19324, "55": 0.19031, "60": 0.19029, "65": 0.1911, "70": 0.19168, "75": 0.19169, "80": 0.1923, "85": 0.19181, "90": 0.19164, "95": 0.19197, "100": 0.19113}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..b8ff8d2 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92795, "10": 10.90786, "15": 10.88314, "20": 10.77629, "25": 10.5914, "30": 10.39194, "35": 10.29685, "40": 10.09639, "45": 9.84463, "50": 9.90918, "55": 9.87766, "60": 9.49126, "65": 8.94236, "70": 9.72266, "75": 9.41909, "80": 9.40076, "85": 9.61209, "90": 9.81018, "95": 9.51718, "100": 9.40151}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1627.0, "5": 2010.0, "10": 1368.0, "15": 1897.0, "20": 1626.0, "25": 1769.0, "30": 1899.0, "35": 1988.0, "40": 2199.0, "45": 2158.0, "50": 2494.0, "55": 2485.0, "60": 2351.0, "65": 2777.0, "70": 3197.0, "75": 2615.0, "80": 3395.0, "85": 3340.0, "90": 3060.0, "95": 3408.0, "100": 3242.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 487096320.0, "5": 487096320.0, "10": 487096320.0, "15": 487096320.0, "20": 487096320.0, "25": 487096320.0, "30": 487096320.0, "35": 487096320.0, "40": 487096320.0, "45": 487096320.0, "50": 487096320.0, "55": 487096320.0, "60": 487096320.0, "65": 487096320.0, "70": 487096320.0, "75": 487096320.0, "80": 487096320.0, "85": 487096320.0, "90": 487096320.0, "95": 487096320.0, "100": 487096320.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1715890176.0, "5": 1895963648.0, "10": 1895963648.0, "15": 1895963648.0, "20": 1895963648.0, "25": 1895963648.0, "30": 1895963648.0, "35": 1895963648.0, "40": 1895963648.0, "45": 1895963648.0, "50": 1895963648.0, "55": 1895963648.0, "60": 1895963648.0, "65": 1895963648.0, "70": 1895963648.0, "75": 1895963648.0, "80": 1895963648.0, "85": 1895963648.0, "90": 1895963648.0, "95": 1895963648.0, "100": 1895963648.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.12901, "5": 0.21538, "10": 0.21548, "15": 0.2128, "20": 0.21291, "25": 0.21127, "30": 0.21513, "35": 0.21158, "40": 0.213, "45": 0.21093, "50": 0.2091, "55": 0.20696, "60": 0.21221, "65": 0.20519, "70": 0.2076, "75": 0.20862, "80": 0.20653, "85": 0.20713, "90": 0.20604, "95": 0.21111, "100": 0.20922}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml index 1b416d0..5205899 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..a4da329 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92655, "5": 10.9272, "10": 10.90786, "15": 10.88292, "20": 10.77603, "25": 10.59271, "30": 10.39175, "35": 10.297, "40": 10.09664, "45": 9.84468, "50": 9.9094, "55": 9.87765, "60": 9.49117, "65": 8.94241, "70": 9.72269, "75": 9.41888, "80": 9.40055, "85": 9.61184, "90": 9.81022, "95": 9.51724, "100": 9.4013}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1637.0, "5": 1988.0, "10": 1422.0, "15": 1936.0, "20": 1566.0, "25": 1705.0, "30": 1974.0, "35": 2043.0, "40": 2249.0, "45": 2145.0, "50": 2454.0, "55": 2388.0, "60": 2479.0, "65": 2674.0, "70": 3241.0, "75": 2687.0, "80": 3465.0, "85": 3382.0, "90": 3023.0, "95": 3415.0, "100": 3347.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 490242560.0, "5": 490242560.0, "10": 490242560.0, "15": 490242560.0, "20": 490242560.0, "25": 490242560.0, "30": 490242560.0, "35": 490242560.0, "40": 490242560.0, "45": 490242560.0, "50": 490242560.0, "55": 490242560.0, "60": 490242560.0, "65": 490242560.0, "70": 490242560.0, "75": 490242560.0, "80": 490242560.0, "85": 490242560.0, "90": 490242560.0, "95": 490242560.0, "100": 490242560.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1228699136.0, "5": 1414015488.0, "10": 1414015488.0, "15": 1414015488.0, "20": 1414015488.0, "25": 1414015488.0, "30": 1414015488.0, "35": 1414015488.0, "40": 1414015488.0, "45": 1414015488.0, "50": 1414015488.0, "55": 1414015488.0, "60": 1414015488.0, "65": 1414015488.0, "70": 1414015488.0, "75": 1414015488.0, "80": 1414015488.0, "85": 1414015488.0, "90": 1414015488.0, "95": 1414015488.0, "100": 1414015488.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.55848, "5": 0.19452, "10": 0.1941, "15": 0.19706, "20": 0.19456, "25": 0.19225, "30": 0.19466, "35": 0.19187, "40": 0.19248, "45": 0.1906, "50": 0.19117, "55": 0.20393, "60": 0.20447, "65": 0.20474, "70": 0.20347, "75": 0.20347, "80": 0.20417, "85": 0.2045, "90": 0.20333, "95": 0.20388, "100": 0.20321}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..aa12a79 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92705, "5": 10.92795, "10": 10.90786, "15": 10.88314, "20": 10.77629, "25": 10.5914, "30": 10.39194, "35": 10.29685, "40": 10.09639, "45": 9.84463, "50": 9.90918, "55": 9.87766, "60": 9.49126, "65": 8.94236, "70": 9.72266, "75": 9.41909, "80": 9.40076, "85": 9.61209, "90": 9.81018, "95": 9.51718, "100": 9.40151}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1627.0, "5": 2010.0, "10": 1368.0, "15": 1897.0, "20": 1626.0, "25": 1769.0, "30": 1899.0, "35": 1988.0, "40": 2199.0, "45": 2158.0, "50": 2494.0, "55": 2485.0, "60": 2351.0, "65": 2777.0, "70": 3197.0, "75": 2615.0, "80": 3395.0, "85": 3340.0, "90": 3060.0, "95": 3408.0, "100": 3242.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 487096320.0, "5": 487096320.0, "10": 487096320.0, "15": 487096320.0, "20": 487096320.0, "25": 487096320.0, "30": 487096320.0, "35": 487096320.0, "40": 487096320.0, "45": 487096320.0, "50": 487096320.0, "55": 487096320.0, "60": 487096320.0, "65": 487096320.0, "70": 487096320.0, "75": 487096320.0, "80": 487096320.0, "85": 487096320.0, "90": 487096320.0, "95": 487096320.0, "100": 487096320.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1720084480.0, "5": 1900157952.0, "10": 1900157952.0, "15": 1900157952.0, "20": 1900157952.0, "25": 1900157952.0, "30": 1900157952.0, "35": 1900157952.0, "40": 1900157952.0, "45": 1900157952.0, "50": 1900157952.0, "55": 1900157952.0, "60": 1900157952.0, "65": 1900157952.0, "70": 1900157952.0, "75": 1900157952.0, "80": 1900157952.0, "85": 1900157952.0, "90": 1900157952.0, "95": 1900157952.0, "100": 1900157952.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.22421, "5": 0.2135, "10": 0.21228, "15": 0.21124, "20": 0.21112, "25": 0.21341, "30": 0.21004, "35": 0.21039, "40": 0.21245, "45": 0.21157, "50": 0.21206, "55": 0.21309, "60": 0.21493, "65": 0.2203, "70": 0.21919, "75": 0.2139, "80": 0.21624, "85": 0.21803, "90": 0.21757, "95": 0.21527, "100": 0.21237}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml index 4f92283..5d6f146 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..3ce62ed --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92655, "5": 10.92718, "10": 10.90795, "15": 10.88296, "20": 10.77593, "25": 10.59272, "30": 10.39174, "35": 10.29697, "40": 10.09661, "45": 9.84472, "50": 9.90947, "55": 9.87772, "60": 9.49122, "65": 8.94261, "70": 9.72277, "75": 9.41891, "80": 9.40056, "85": 9.61186, "90": 9.81027, "95": 9.51723, "100": 9.40137}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1611.0, "5": 1973.0, "10": 1470.0, "15": 1891.0, "20": 1584.0, "25": 1645.0, "30": 1962.0, "35": 1981.0, "40": 2112.0, "45": 2100.0, "50": 2531.0, "55": 2378.0, "60": 2386.0, "65": 2711.0, "70": 3230.0, "75": 2725.0, "80": 3457.0, "85": 3332.0, "90": 3085.0, "95": 3461.0, "100": 3332.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 438469120.0, "5": 438469120.0, "10": 438469120.0, "15": 438469120.0, "20": 438469120.0, "25": 438469120.0, "30": 438469120.0, "35": 438469120.0, "40": 438469120.0, "45": 438469120.0, "50": 438469120.0, "55": 438469120.0, "60": 438469120.0, "65": 438469120.0, "70": 438469120.0, "75": 438469120.0, "80": 438469120.0, "85": 438469120.0, "90": 438469120.0, "95": 438469120.0, "100": 438469120.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1179678208.0, "5": 1361717760.0, "10": 1361717760.0, "15": 1361717760.0, "20": 1361717760.0, "25": 1361717760.0, "30": 1361717760.0, "35": 1361717760.0, "40": 1361717760.0, "45": 1361717760.0, "50": 1361717760.0, "55": 1361717760.0, "60": 1361717760.0, "65": 1361717760.0, "70": 1361717760.0, "75": 1361717760.0, "80": 1361717760.0, "85": 1361717760.0, "90": 1361717760.0, "95": 1361717760.0, "100": 1361717760.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.67908, "5": 0.18507, "10": 0.18222, "15": 0.18185, "20": 0.181, "25": 0.18035, "30": 0.18093, "35": 0.18016, "40": 0.17965, "45": 0.17953, "50": 0.17971, "55": 0.17583, "60": 0.1751, "65": 0.17527, "70": 0.17444, "75": 0.17517, "80": 0.17438, "85": 0.17443, "90": 0.17435, "95": 0.17419, "100": 0.17558}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..b3621a0 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92655, "5": 10.92717, "10": 10.90792, "15": 10.88291, "20": 10.77595, "25": 10.59266, "30": 10.39176, "35": 10.29699, "40": 10.09666, "45": 9.84474, "50": 9.90944, "55": 9.87774, "60": 9.49116, "65": 8.94259, "70": 9.72275, "75": 9.4189, "80": 9.40056, "85": 9.61183, "90": 9.81023, "95": 9.51721, "100": 9.4013}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1587.0, "5": 1991.0, "10": 1408.0, "15": 1899.0, "20": 1647.0, "25": 1674.0, "30": 1912.0, "35": 1972.0, "40": 2247.0, "45": 2075.0, "50": 2469.0, "55": 2421.0, "60": 2487.0, "65": 2765.0, "70": 3291.0, "75": 2709.0, "80": 3493.0, "85": 3365.0, "90": 3095.0, "95": 3435.0, "100": 3327.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 435847168.0, "5": 435847168.0, "10": 435847168.0, "15": 435847168.0, "20": 435847168.0, "25": 436895744.0, "30": 435847168.0, "35": 435847168.0, "40": 435847168.0, "45": 435847168.0, "50": 435847168.0, "55": 435847168.0, "60": 435847168.0, "65": 435847168.0, "70": 435847168.0, "75": 435847168.0, "80": 435847168.0, "85": 435847168.0, "90": 435847168.0, "95": 435847168.0, "100": 435847168.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1179682816.0, "5": 1359626240.0, "10": 1359626240.0, "15": 1359626240.0, "20": 1359626240.0, "25": 1359626240.0, "30": 1359626240.0, "35": 1359626240.0, "40": 1359626240.0, "45": 1359626240.0, "50": 1359626240.0, "55": 1359626240.0, "60": 1359626240.0, "65": 1359626240.0, "70": 1359626240.0, "75": 1359626240.0, "80": 1359626240.0, "85": 1359626240.0, "90": 1359626240.0, "95": 1359626240.0, "100": 1359626240.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.04316, "5": 0.1807, "10": 0.17867, "15": 0.17689, "20": 0.17644, "25": 0.17764, "30": 0.17742, "35": 0.1794, "40": 0.17805, "45": 0.17812, "50": 0.18362, "55": 0.17265, "60": 0.17303, "65": 0.17109, "70": 0.17167, "75": 0.17216, "80": 0.17147, "85": 0.17705, "90": 0.17916, "95": 0.17291, "100": 0.17146}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/model_config.yaml index bdb039f..d23188f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G/model_config.yaml @@ -17,8 +17,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -44,4 +44,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..84dc899 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,142 @@ +{ + "lm loss": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 10.88789, + "5": 10.90966, + "10": 10.87793, + "15": 10.86382, + "20": 10.75082, + "25": 10.5988, + "30": 10.40099, + "35": 10.30785, + "40": 10.10955, + "45": 9.85867, + "50": 9.92084, + "55": 9.88535, + "60": 9.50758, + "65": 8.95821, + "70": 9.72738, + "75": 9.42579, + "80": 9.40535, + "85": 9.61537, + "90": 9.81263, + "95": 9.52135, + "100": 9.40103 + } + }, + "num-zeros": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 1742.0, + "5": 2115.0, + "10": 1468.0, + "15": 1877.0, + "20": 1665.0, + "25": 1643.0, + "30": 1900.0, + "35": 2086.0, + "40": 2185.0, + "45": 2254.0, + "50": 2496.0, + "55": 2418.0, + "60": 2489.0, + "65": 2697.0, + "70": 3267.0, + "75": 2631.0, + "80": 3442.0, + "85": 3440.0, + "90": 3075.0, + "95": 3348.0, + "100": 3389.0 + } + }, + "mem-allocated-bytes": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 246437376.0, + "5": 246437376.0, + "10": 246437376.0, + "15": 246437376.0, + "20": 246437376.0, + "25": 246437376.0, + "30": 246437376.0, + "35": 246437376.0, + "40": 246437376.0, + "45": 246437376.0, + "50": 246437376.0, + "55": 246437376.0, + "60": 246437376.0, + "65": 246437376.0, + "70": 246437376.0, + "75": 246437376.0, + "80": 246437376.0, + "85": 246437376.0, + "90": 246437376.0, + "95": 246437376.0, + "100": 246437376.0 + } + }, + "mem-max-allocated-bytes": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 1570924032.0, + "5": 1634534400.0, + "10": 1634534400.0, + "15": 1634534400.0, + "20": 1634534400.0, + "25": 1634534400.0, + "30": 1634593280.0, + "35": 1634593280.0, + "40": 1634593280.0, + "45": 1634593280.0, + "50": 1634593280.0, + "55": 1634593280.0, + "60": 1634593280.0, + "65": 1634593280.0, + "70": 1634593280.0, + "75": 1634593280.0, + "80": 1634593280.0, + "85": 1634593280.0, + "90": 1634593280.0, + "95": 1634593280.0, + "100": 1634593280.0 + } + }, + "iteration-time": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 6.22721, + "5": 0.27333, + "10": 0.27017, + "15": 0.26846, + "20": 0.26818, + "25": 0.26614, + "30": 0.26524, + "35": 0.30697, + "40": 0.2925, + "45": 0.26534, + "50": 0.26504, + "55": 0.26684, + "60": 0.26501, + "65": 0.26543, + "70": 0.26612, + "75": 0.26476, + "80": 0.26501, + "85": 0.26505, + "90": 0.26596, + "95": 0.26599, + "100": 0.2641 + } + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..84dc899 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1,142 @@ +{ + "lm loss": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 10.88789, + "5": 10.90966, + "10": 10.87793, + "15": 10.86382, + "20": 10.75082, + "25": 10.5988, + "30": 10.40099, + "35": 10.30785, + "40": 10.10955, + "45": 9.85867, + "50": 9.92084, + "55": 9.88535, + "60": 9.50758, + "65": 8.95821, + "70": 9.72738, + "75": 9.42579, + "80": 9.40535, + "85": 9.61537, + "90": 9.81263, + "95": 9.52135, + "100": 9.40103 + } + }, + "num-zeros": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 1742.0, + "5": 2115.0, + "10": 1468.0, + "15": 1877.0, + "20": 1665.0, + "25": 1643.0, + "30": 1900.0, + "35": 2086.0, + "40": 2185.0, + "45": 2254.0, + "50": 2496.0, + "55": 2418.0, + "60": 2489.0, + "65": 2697.0, + "70": 3267.0, + "75": 2631.0, + "80": 3442.0, + "85": 3440.0, + "90": 3075.0, + "95": 3348.0, + "100": 3389.0 + } + }, + "mem-allocated-bytes": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 246437376.0, + "5": 246437376.0, + "10": 246437376.0, + "15": 246437376.0, + "20": 246437376.0, + "25": 246437376.0, + "30": 246437376.0, + "35": 246437376.0, + "40": 246437376.0, + "45": 246437376.0, + "50": 246437376.0, + "55": 246437376.0, + "60": 246437376.0, + "65": 246437376.0, + "70": 246437376.0, + "75": 246437376.0, + "80": 246437376.0, + "85": 246437376.0, + "90": 246437376.0, + "95": 246437376.0, + "100": 246437376.0 + } + }, + "mem-max-allocated-bytes": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 1570924032.0, + "5": 1634534400.0, + "10": 1634534400.0, + "15": 1634534400.0, + "20": 1634534400.0, + "25": 1634534400.0, + "30": 1634593280.0, + "35": 1634593280.0, + "40": 1634593280.0, + "45": 1634593280.0, + "50": 1634593280.0, + "55": 1634593280.0, + "60": 1634593280.0, + "65": 1634593280.0, + "70": 1634593280.0, + "75": 1634593280.0, + "80": 1634593280.0, + "85": 1634593280.0, + "90": 1634593280.0, + "95": 1634593280.0, + "100": 1634593280.0 + } + }, + "iteration-time": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 6.22721, + "5": 0.27333, + "10": 0.27017, + "15": 0.26846, + "20": 0.26818, + "25": 0.26614, + "30": 0.26524, + "35": 0.30697, + "40": 0.2925, + "45": 0.26534, + "50": 0.26504, + "55": 0.26684, + "60": 0.26501, + "65": 0.26543, + "70": 0.26612, + "75": 0.26476, + "80": 0.26501, + "85": 0.26505, + "90": 0.26596, + "95": 0.26599, + "100": 0.2641 + } + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 0000000..2c2eb5b --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,54 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --log-memory-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 100 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 50 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 2 + --use-custom-fsdp: true + --calculate-per-token-loss: true + --data-parallel-sharding-strategy: optim_grads_params + --use-distributed-optimizer: true + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-checkpoint-opt_param-scheduler: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..ee354ee --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1,142 @@ +{ + "lm loss": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 10.81978, + "5": 10.85277, + "10": 10.79054, + "15": 10.81259, + "20": 10.71561, + "25": 10.52391, + "30": 10.33354, + "35": 10.22869, + "40": 10.04307, + "45": 9.77101, + "50": 9.86315, + "55": 9.82489, + "60": 9.45369, + "65": 8.89336, + "70": 9.69013, + "75": 9.38429, + "80": 9.37031, + "85": 9.58022, + "90": 9.78525, + "95": 9.49638, + "100": 9.36739 + } + }, + "num-zeros": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 27138.0, + "5": 32036.0, + "10": 26255.0, + "15": 31309.0, + "20": 28869.0, + "25": 28605.0, + "30": 30817.0, + "35": 32882.0, + "40": 35373.0, + "45": 35484.0, + "50": 2136527.0, + "55": 2135084.0, + "60": 2137981.0, + "65": 2138995.0, + "70": 2142528.0, + "75": 2215276.0, + "80": 2144227.0, + "85": 2146040.0, + "90": 2146440.0, + "95": 2144187.0, + "100": 2144354.0 + } + }, + "mem-allocated-bytes": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 668320768.0, + "5": 668306944.0, + "10": 668313600.0, + "15": 668326912.0, + "20": 668314112.0, + "25": 668332544.0, + "30": 668326912.0, + "35": 668337664.0, + "40": 668306432.0, + "45": 668297728.0, + "50": 668282880.0, + "55": 668265984.0, + "60": 668249088.0, + "65": 668242944.0, + "70": 668224512.0, + "75": 668213248.0, + "80": 668222464.0, + "85": 668234752.0, + "90": 668237312.0, + "95": 668223488.0, + "100": 668209664.0 + } + }, + "mem-max-allocated-bytes": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 2355231744.0, + "5": 2605464064.0, + "10": 2605464064.0, + "15": 2605464064.0, + "20": 2605464064.0, + "25": 2615321600.0, + "30": 2615321600.0, + "35": 2618603520.0, + "40": 2618603520.0, + "45": 2618603520.0, + "50": 2618603520.0, + "55": 2618603520.0, + "60": 2618603520.0, + "65": 2618603520.0, + "70": 2618603520.0, + "75": 2618603520.0, + "80": 2618603520.0, + "85": 2618603520.0, + "90": 2618603520.0, + "95": 2618603520.0, + "100": 2618603520.0 + } + }, + "iteration-time": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 6.84429, + "5": 0.49894, + "10": 0.4932, + "15": 0.48106, + "20": 0.48362, + "25": 0.48615, + "30": 0.49038, + "35": 0.49011, + "40": 0.50012, + "45": 0.49982, + "50": 0.49286, + "55": 0.92115, + "60": 0.49142, + "65": 0.49128, + "70": 0.49444, + "75": 0.49725, + "80": 0.4978, + "85": 0.49747, + "90": 0.497, + "95": 0.49687, + "100": 0.49788 + } + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..ee354ee --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1,142 @@ +{ + "lm loss": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 10.81978, + "5": 10.85277, + "10": 10.79054, + "15": 10.81259, + "20": 10.71561, + "25": 10.52391, + "30": 10.33354, + "35": 10.22869, + "40": 10.04307, + "45": 9.77101, + "50": 9.86315, + "55": 9.82489, + "60": 9.45369, + "65": 8.89336, + "70": 9.69013, + "75": 9.38429, + "80": 9.37031, + "85": 9.58022, + "90": 9.78525, + "95": 9.49638, + "100": 9.36739 + } + }, + "num-zeros": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 27138.0, + "5": 32036.0, + "10": 26255.0, + "15": 31309.0, + "20": 28869.0, + "25": 28605.0, + "30": 30817.0, + "35": 32882.0, + "40": 35373.0, + "45": 35484.0, + "50": 2136527.0, + "55": 2135084.0, + "60": 2137981.0, + "65": 2138995.0, + "70": 2142528.0, + "75": 2215276.0, + "80": 2144227.0, + "85": 2146040.0, + "90": 2146440.0, + "95": 2144187.0, + "100": 2144354.0 + } + }, + "mem-allocated-bytes": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 668320768.0, + "5": 668306944.0, + "10": 668313600.0, + "15": 668326912.0, + "20": 668314112.0, + "25": 668332544.0, + "30": 668326912.0, + "35": 668337664.0, + "40": 668306432.0, + "45": 668297728.0, + "50": 668282880.0, + "55": 668265984.0, + "60": 668249088.0, + "65": 668242944.0, + "70": 668224512.0, + "75": 668213248.0, + "80": 668222464.0, + "85": 668234752.0, + "90": 668237312.0, + "95": 668223488.0, + "100": 668209664.0 + } + }, + "mem-max-allocated-bytes": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 2355231744.0, + "5": 2605464064.0, + "10": 2605464064.0, + "15": 2605464064.0, + "20": 2605464064.0, + "25": 2615321600.0, + "30": 2615321600.0, + "35": 2618603520.0, + "40": 2618603520.0, + "45": 2618603520.0, + "50": 2618603520.0, + "55": 2618603520.0, + "60": 2618603520.0, + "65": 2618603520.0, + "70": 2618603520.0, + "75": 2618603520.0, + "80": 2618603520.0, + "85": 2618603520.0, + "90": 2618603520.0, + "95": 2618603520.0, + "100": 2618603520.0 + } + }, + "iteration-time": { + "start_step": 1, + "end_step": 100, + "step_interval": 5, + "values": { + "1": 6.84429, + "5": 0.49894, + "10": 0.4932, + "15": 0.48106, + "20": 0.48362, + "25": 0.48615, + "30": 0.49038, + "35": 0.49011, + "40": 0.50012, + "45": 0.49982, + "50": 0.49286, + "55": 0.92115, + "60": 0.49142, + "65": 0.49128, + "70": 0.49444, + "75": 0.49725, + "80": 0.4978, + "85": 0.49747, + "90": 0.497, + "95": 0.49687, + "100": 0.49788 + } + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 0000000..92530cf --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,63 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: Tree + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --log-memory-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 100 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 50 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 1 + --expert-model-parallel-size: 2 + --disable-bias-linear: true + --sequence-parallel: true + --num-experts: 8 + --moe-router-load-balancing-type: aux_loss + --moe-router-topk: 2 + --moe-aux-loss-coeff: 1e-2 + --use-custom-fsdp: true + --calculate-per-token-loss: true + --data-parallel-sharding-strategy: optim_grads_params + --use-distributed-optimizer: true + --deterministic-mode: true + --no-gradient-accumulation-fusion: true + --moe-grouped-gemm: true + --attention-softmax-in-fp32: true + --use-checkpoint-opt_param-scheduler: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true +TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json index c194271..7de1c88 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.86126, - 10.88645, - 10.87768, - 10.83106, - 10.71636, - 10.60597, - 10.13124, - 10.22753, - 10.1591, - 9.83464 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1755.0, - 2147.0, - 2147.0, - 2042.0, - 2108.0, - 1931.0, - 1762.0, - 2184.0, - 2529.0, - 2615.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 6.25178, - 0.35642, - 0.31793, - 0.31783, - 0.31708, - 0.31607, - 0.31789, - 0.31477, - 0.31433, - 0.31727 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.86126, "5": 10.88248, "10": 10.83507, "15": 10.82743, "20": 10.72743, "25": 10.5575, "30": 10.37893, "35": 10.28325, "40": 10.08786, "45": 9.82625, "50": 9.91321}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1755.0, "5": 2185.0, "10": 1522.0, "15": 2063.0, "20": 1801.0, "25": 1775.0, "30": 2044.0, "35": 2294.0, "40": 2587.0, "45": 2425.0, "50": 2628.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 269842432.0, "5": 269842432.0, "10": 269842432.0, "15": 269842432.0, "20": 269842432.0, "25": 269842432.0, "30": 269842432.0, "35": 269842432.0, "40": 269842432.0, "45": 269842432.0, "50": 269842432.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1449633280.0, "5": 1515627520.0, "10": 1515627520.0, "15": 1515627520.0, "20": 1515627520.0, "25": 1515627520.0, "30": 1515627520.0, "35": 1515627520.0, "40": 1515627520.0, "45": 1515627520.0, "50": 1515627520.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 6.40817, "5": 0.30338, "10": 0.30031, "15": 0.59063, "20": 0.30088, "25": 0.30345, "30": 0.30256, "35": 0.30195, "40": 0.3015, "45": 0.30212, "50": 0.30102}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json index 196e4b2..35fdd4f 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.86126, 10.88643, 10.87768, 10.83108, 10.71635, 10.60599, 10.13124, 10.2275, 10.15914, 9.83465]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1752.0, 2067.0, 2123.0, 2072.0, 1999.0, 1941.0, 1784.0, 2229.0, 2546.0, 2567.0]}, "iteration_timing_avg": 0.2256223529411765} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.86126, "5": 10.88253, "10": 10.83509, "15": 10.82739, "20": 10.72744, "25": 10.55749, "30": 10.37894, "35": 10.28322, "40": 10.08784, "45": 9.82625, "50": 9.91326}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1752.0, "5": 2204.0, "10": 1567.0, "15": 2068.0, "20": 1783.0, "25": 1795.0, "30": 2124.0, "35": 2186.0, "40": 2575.0, "45": 2278.0, "50": 2683.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 269891584.0, "5": 269891584.0, "10": 269891584.0, "15": 269891584.0, "20": 269891584.0, "25": 269891584.0, "30": 269891584.0, "35": 269891584.0, "40": 269891584.0, "45": 269891584.0, "50": 269891584.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1450730496.0, "5": 1513579520.0, "10": 1513579520.0, "15": 1513579520.0, "20": 1515676672.0, "25": 1515676672.0, "30": 1515676672.0, "35": 1515676672.0, "40": 1515676672.0, "45": 1515676672.0, "50": 1515676672.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 7.60475, "5": 0.30064, "10": 0.2968, "15": 0.29543, "20": 0.29652, "25": 0.29285, "30": 0.29834, "35": 0.2921, "40": 0.29827, "45": 0.29122, "50": 0.30005}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml index a86568b..9533978 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json index 9fe1964..9a00642 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.86126, - 10.88645, - 10.87768, - 10.83106, - 10.71636, - 10.60597, - 10.13124, - 10.22753, - 10.1591, - 9.83464 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1755.0, - 2147.0, - 2147.0, - 2042.0, - 2108.0, - 1931.0, - 1762.0, - 2184.0, - 2529.0, - 2615.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 7.0561, - 0.32588, - 0.32628, - 0.32385, - 0.32419, - 0.32364, - 0.32337, - 0.32334, - 0.32358, - 0.32395 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.86126, "5": 10.88248, "10": 10.83507, "15": 10.82743, "20": 10.72743, "25": 10.5575, "30": 10.37893, "35": 10.28325, "40": 10.08786, "45": 9.82625, "50": 9.91321}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1755.0, "5": 2185.0, "10": 1522.0, "15": 2063.0, "20": 1801.0, "25": 1775.0, "30": 2044.0, "35": 2294.0, "40": 2587.0, "45": 2425.0, "50": 2628.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 269842432.0, "5": 269842432.0, "10": 269842432.0, "15": 269842432.0, "20": 269842432.0, "25": 269842432.0, "30": 269842432.0, "35": 269842432.0, "40": 269842432.0, "45": 269842432.0, "50": 269842432.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1450682368.0, "5": 1515627520.0, "10": 1515627520.0, "15": 1515627520.0, "20": 1515627520.0, "25": 1515627520.0, "30": 1515627520.0, "35": 1515627520.0, "40": 1515627520.0, "45": 1515627520.0, "50": 1515627520.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 6.08264, "5": 0.50294, "10": 0.31361, "15": 0.31749, "20": 0.30552, "25": 0.31296, "30": 0.31703, "35": 0.30458, "40": 0.30685, "45": 0.31528, "50": 0.30493}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json index 49917fe..852b04b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.86126, 10.88643, 10.87768, 10.83108, 10.71635, 10.60599, 10.13124, 10.2275, 10.15914, 9.83465]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1752.0, 2067.0, 2123.0, 2072.0, 1999.0, 1941.0, 1784.0, 2229.0, 2546.0, 2567.0]}, "iteration_timing_avg": 0.22043823529411763} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.86126, "5": 10.88253, "10": 10.83509, "15": 10.82739, "20": 10.72744, "25": 10.55749, "30": 10.37894, "35": 10.28322, "40": 10.08784, "45": 9.82625, "50": 9.91326}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1752.0, "5": 2204.0, "10": 1567.0, "15": 2068.0, "20": 1783.0, "25": 1795.0, "30": 2124.0, "35": 2186.0, "40": 2575.0, "45": 2278.0, "50": 2683.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 269891584.0, "5": 269891584.0, "10": 269891584.0, "15": 269891584.0, "20": 269891584.0, "25": 269891584.0, "30": 269891584.0, "35": 269891584.0, "40": 269891584.0, "45": 269891584.0, "50": 269891584.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1449682432.0, "5": 1515676672.0, "10": 1515676672.0, "15": 1515676672.0, "20": 1515676672.0, "25": 1515676672.0, "30": 1515676672.0, "35": 1515676672.0, "40": 1515676672.0, "45": 1515676672.0, "50": 1515676672.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 8.33148, "5": 0.3095, "10": 0.30881, "15": 0.30285, "20": 0.305, "25": 0.30028, "30": 0.30512, "35": 0.30125, "40": 0.30469, "45": 0.29938, "50": 0.30327}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml index 2c9c760..036defa 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,4 +50,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_dev.json index 977545a..f59f69e 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.86217, - 10.88646, - 10.87861, - 10.83295, - 10.7203, - 10.61089, - 10.14181, - 10.23434, - 10.16609, - 9.84444 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1769.0, - 2056.0, - 2198.0, - 2079.0, - 2181.0, - 1912.0, - 1825.0, - 2115.0, - 2621.0, - 2598.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 6.42448, - 0.42854, - 0.42836, - 0.42582, - 0.42274, - 0.42187, - 0.42561, - 0.42178, - 0.44234, - 0.42304 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.86217, "5": 10.88249, "10": 10.83646, "15": 10.82906, "20": 10.73236, "25": 10.56397, "30": 10.38482, "35": 10.28955, "40": 10.09137, "45": 9.83491, "50": 9.91602}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1722.0, "5": 2259.0, "10": 1596.0, "15": 2099.0, "20": 1919.0, "25": 1785.0, "30": 2048.0, "35": 2290.0, "40": 2558.0, "45": 2447.0, "50": 2676.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 368387584.0, "5": 368387584.0, "10": 368387584.0, "15": 368387584.0, "20": 368387584.0, "25": 368387584.0, "30": 368387584.0, "35": 368387584.0, "40": 368387584.0, "45": 368387584.0, "50": 368387584.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1522507264.0, "5": 1653494272.0, "10": 1653494272.0, "15": 1653494272.0, "20": 1653494272.0, "25": 1653494272.0, "30": 1653494272.0, "35": 1653494272.0, "40": 1653494272.0, "45": 1653494272.0, "50": 1653494272.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 5.95291, "5": 0.41956, "10": 0.41644, "15": 0.41504, "20": 0.41541, "25": 0.41645, "30": 0.41452, "35": 0.41456, "40": 0.41402, "45": 0.41451, "50": 0.41368}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_lts.json index 8718207..5df5497 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.86217, 10.88641, 10.8786, 10.83291, 10.72031, 10.6109, 10.1418, 10.23434, 10.16605, 9.84445]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1769.0, 2019.0, 2145.0, 2058.0, 2166.0, 2060.0, 1776.0, 2174.0, 2524.0, 2645.0]}, "iteration_timing_avg": 0.2256223529411765} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.86217, "5": 10.8825, "10": 10.83647, "15": 10.8291, "20": 10.73231, "25": 10.56391, "30": 10.38476, "35": 10.28957, "40": 10.09136, "45": 9.83492, "50": 9.91604}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1769.0, "5": 2216.0, "10": 1573.0, "15": 2132.0, "20": 1815.0, "25": 1849.0, "30": 2009.0, "35": 2182.0, "40": 2489.0, "45": 2381.0, "50": 2727.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 368387072.0, "5": 368387072.0, "10": 368387072.0, "15": 368387072.0, "20": 368387072.0, "25": 368387072.0, "30": 368387072.0, "35": 368387072.0, "40": 368387072.0, "45": 368387072.0, "50": 368387072.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1512020480.0, "5": 1647202304.0, "10": 1647202304.0, "15": 1647202304.0, "20": 1647202304.0, "25": 1647202304.0, "30": 1647202304.0, "35": 1647202304.0, "40": 1647202304.0, "45": 1647202304.0, "50": 1647202304.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 7.51772, "5": 0.40946, "10": 0.40614, "15": 0.40997, "20": 0.41426, "25": 0.41365, "30": 0.41686, "35": 0.40937, "40": 0.41695, "45": 0.4147, "50": 0.42032}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml index 00946d2..826628b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..86d6fd7 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.86065, "5": 10.88253, "10": 10.8352, "15": 10.82848, "20": 10.72795, "25": 10.55737, "30": 10.37935, "35": 10.28345, "40": 10.0878, "45": 9.82662, "50": 9.91321, "55": 9.87799, "60": 9.50877, "65": 8.95112, "70": 9.73131, "75": 9.43668, "80": 9.41164, "85": 9.61594, "90": 9.8216, "95": 9.51907, "100": 9.40583}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1716.0, "5": 2123.0, "10": 1539.0, "15": 2026.0, "20": 1842.0, "25": 1767.0, "30": 2099.0, "35": 2213.0, "40": 2387.0, "45": 2378.0, "50": 2771.0, "55": 2649.0, "60": 2734.0, "65": 2982.0, "70": 3716.0, "75": 2729.0, "80": 3758.0, "85": 3562.0, "90": 3313.0, "95": 3458.0, "100": 3432.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 269842944.0, "5": 269842944.0, "10": 269842944.0, "15": 269842944.0, "20": 269842944.0, "25": 269842944.0, "30": 269842944.0, "35": 269842944.0, "40": 269842944.0, "45": 269842944.0, "50": 269842944.0, "55": 269842944.0, "60": 269842944.0, "65": 269842944.0, "70": 269842944.0, "75": 269842944.0, "80": 269842944.0, "85": 269842944.0, "90": 269842944.0, "95": 269842944.0, "100": 269842944.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 956153344.0, "5": 1035779584.0, "10": 1035779584.0, "15": 1035779584.0, "20": 1035779584.0, "25": 1035779584.0, "30": 1035779584.0, "35": 1035779584.0, "40": 1035779584.0, "45": 1035779584.0, "50": 1035779584.0, "55": 1035779584.0, "60": 1035779584.0, "65": 1035779584.0, "70": 1035779584.0, "75": 1035779584.0, "80": 1035779584.0, "85": 1035779584.0, "90": 1035779584.0, "95": 1035779584.0, "100": 1035779584.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 6.03589, "5": 0.28552, "10": 0.28504, "15": 0.29587, "20": 0.28309, "25": 0.27926, "30": 0.27852, "35": 0.27751, "40": 0.27651, "45": 0.27785, "50": 0.27743, "55": 0.27487, "60": 0.27351, "65": 0.27319, "70": 0.27565, "75": 0.50898, "80": 0.27289, "85": 0.27348, "90": 0.27316, "95": 0.27294, "100": 0.27277}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..a51e609 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.86126, "5": 10.88253, "10": 10.83509, "15": 10.82739, "20": 10.72744, "25": 10.55749, "30": 10.37894, "35": 10.28322, "40": 10.08784, "45": 9.82625, "50": 9.91326, "55": 9.8779, "60": 9.50869, "65": 8.95102, "70": 9.73166, "75": 9.43677, "80": 9.41158, "85": 9.61615, "90": 9.82168, "95": 9.51915, "100": 9.40594}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1752.0, "5": 2204.0, "10": 1567.0, "15": 2068.0, "20": 1783.0, "25": 1795.0, "30": 2124.0, "35": 2186.0, "40": 2575.0, "45": 2278.0, "50": 2683.0, "55": 2676.0, "60": 2773.0, "65": 3019.0, "70": 3591.0, "75": 2870.0, "80": 3765.0, "85": 3549.0, "90": 3490.0, "95": 3544.0, "100": 3617.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 269891584.0, "5": 269891584.0, "10": 269891584.0, "15": 269891584.0, "20": 269891584.0, "25": 269891584.0, "30": 269891584.0, "35": 269891584.0, "40": 269891584.0, "45": 269891584.0, "50": 269891584.0, "55": 269891584.0, "60": 269891584.0, "65": 269891584.0, "70": 269891584.0, "75": 269891584.0, "80": 269891584.0, "85": 269891584.0, "90": 269891584.0, "95": 269891584.0, "100": 269891584.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1450730496.0, "5": 1515676672.0, "10": 1515676672.0, "15": 1515676672.0, "20": 1515676672.0, "25": 1515676672.0, "30": 1515676672.0, "35": 1515676672.0, "40": 1515676672.0, "45": 1515676672.0, "50": 1515676672.0, "55": 1515676672.0, "60": 1515676672.0, "65": 1515676672.0, "70": 1515676672.0, "75": 1515676672.0, "80": 1515676672.0, "85": 1515676672.0, "90": 1515676672.0, "95": 1515676672.0, "100": 1515676672.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 8.22865, "5": 0.31237, "10": 0.31889, "15": 0.30979, "20": 0.31592, "25": 0.31368, "30": 0.31292, "35": 0.31001, "40": 0.31087, "45": 0.30787, "50": 0.3067, "55": 0.30223, "60": 0.29974, "65": 0.29841, "70": 0.29787, "75": 0.30072, "80": 0.29729, "85": 0.29753, "90": 0.29692, "95": 0.29937, "100": 0.29618}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml index dda321f..1ffc1d0 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..fdc28a7 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.86065, "5": 10.88253, "10": 10.8352, "15": 10.82848, "20": 10.72795, "25": 10.55737, "30": 10.37935, "35": 10.28345, "40": 10.0878, "45": 9.82662, "50": 9.91321, "55": 9.87799, "60": 9.50877, "65": 8.95112, "70": 9.73131, "75": 9.43668, "80": 9.41164, "85": 9.61594, "90": 9.8216, "95": 9.51907, "100": 9.40583}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1716.0, "5": 2123.0, "10": 1539.0, "15": 2026.0, "20": 1842.0, "25": 1767.0, "30": 2099.0, "35": 2213.0, "40": 2387.0, "45": 2378.0, "50": 2771.0, "55": 2649.0, "60": 2734.0, "65": 2982.0, "70": 3716.0, "75": 2729.0, "80": 3758.0, "85": 3562.0, "90": 3313.0, "95": 3458.0, "100": 3432.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 269842944.0, "5": 269842944.0, "10": 269842944.0, "15": 269842944.0, "20": 269842944.0, "25": 269842944.0, "30": 269842944.0, "35": 269842944.0, "40": 269842944.0, "45": 269842944.0, "50": 269842944.0, "55": 269842944.0, "60": 269842944.0, "65": 269842944.0, "70": 269842944.0, "75": 269842944.0, "80": 269842944.0, "85": 269842944.0, "90": 269842944.0, "95": 269842944.0, "100": 269842944.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 964541952.0, "5": 1035779584.0, "10": 1035779584.0, "15": 1035779584.0, "20": 1035779584.0, "25": 1035779584.0, "30": 1035779584.0, "35": 1035779584.0, "40": 1035779584.0, "45": 1035779584.0, "50": 1035779584.0, "55": 1035779584.0, "60": 1035779584.0, "65": 1035779584.0, "70": 1035779584.0, "75": 1035779584.0, "80": 1035779584.0, "85": 1035779584.0, "90": 1035779584.0, "95": 1035779584.0, "100": 1035779584.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 6.5187, "5": 0.28962, "10": 0.29031, "15": 0.28267, "20": 0.28213, "25": 0.28217, "30": 0.28177, "35": 0.28205, "40": 0.28416, "45": 0.28076, "50": 0.28157, "55": 0.28142, "60": 0.28019, "65": 0.28032, "70": 0.28088, "75": 0.27982, "80": 0.27983, "85": 0.27971, "90": 0.28117, "95": 0.28008, "100": 0.27953}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..452de08 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.86126, "5": 10.88253, "10": 10.83509, "15": 10.82739, "20": 10.72744, "25": 10.55749, "30": 10.37894, "35": 10.28322, "40": 10.08784, "45": 9.82625, "50": 9.91326, "55": 9.8779, "60": 9.50869, "65": 8.95102, "70": 9.73166, "75": 9.43677, "80": 9.41158, "85": 9.61615, "90": 9.82168, "95": 9.51915, "100": 9.40594}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1752.0, "5": 2204.0, "10": 1567.0, "15": 2068.0, "20": 1783.0, "25": 1795.0, "30": 2124.0, "35": 2186.0, "40": 2575.0, "45": 2278.0, "50": 2683.0, "55": 2676.0, "60": 2773.0, "65": 3019.0, "70": 3591.0, "75": 2870.0, "80": 3765.0, "85": 3549.0, "90": 3490.0, "95": 3544.0, "100": 3617.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 269891584.0, "5": 269891584.0, "10": 269891584.0, "15": 269891584.0, "20": 269891584.0, "25": 269891584.0, "30": 269891584.0, "35": 269891584.0, "40": 269891584.0, "45": 269891584.0, "50": 269891584.0, "55": 269891584.0, "60": 269891584.0, "65": 269891584.0, "70": 269891584.0, "75": 269891584.0, "80": 269891584.0, "85": 269891584.0, "90": 269891584.0, "95": 269891584.0, "100": 269891584.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1450731008.0, "5": 1515675648.0, "10": 1515676672.0, "15": 1515676672.0, "20": 1515676672.0, "25": 1515676672.0, "30": 1515676672.0, "35": 1515676672.0, "40": 1515676672.0, "45": 1515676672.0, "50": 1515676672.0, "55": 1515676672.0, "60": 1515676672.0, "65": 1515676672.0, "70": 1515676672.0, "75": 1515676672.0, "80": 1515676672.0, "85": 1515676672.0, "90": 1515676672.0, "95": 1515676672.0, "100": 1515676672.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.38789, "5": 0.322, "10": 0.31419, "15": 0.31533, "20": 0.30974, "25": 0.30867, "30": 0.30191, "35": 0.30301, "40": 0.30266, "45": 0.30177, "50": 0.30441, "55": 0.33472, "60": 0.31376, "65": 0.32009, "70": 0.31308, "75": 0.31965, "80": 0.31251, "85": 0.31098, "90": 0.30726, "95": 0.30595, "100": 0.30772}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml index 93e1ce6..4b72068 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -50,4 +50,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..c6b3bd2 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.86073, "5": 10.8823, "10": 10.83564, "15": 10.83051, "20": 10.73302, "25": 10.56317, "30": 10.38508, "35": 10.28979, "40": 10.09131, "45": 9.83512, "50": 9.91593, "55": 9.88231, "60": 9.51403, "65": 8.95406, "70": 9.7307, "75": 9.43134, "80": 9.40601, "85": 9.61116, "90": 9.8175, "95": 9.51556, "100": 9.40417}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1793.0, "5": 2158.0, "10": 1547.0, "15": 2089.0, "20": 1858.0, "25": 1753.0, "30": 2091.0, "35": 2200.0, "40": 2602.0, "45": 2415.0, "50": 2741.0, "55": 2688.0, "60": 2698.0, "65": 2813.0, "70": 3731.0, "75": 2787.0, "80": 3822.0, "85": 3525.0, "90": 3430.0, "95": 3582.0, "100": 3723.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 368387584.0, "5": 368387584.0, "10": 368387584.0, "15": 368387584.0, "20": 368387584.0, "25": 368387584.0, "30": 368387584.0, "35": 368387584.0, "40": 368387584.0, "45": 368387584.0, "50": 368387584.0, "55": 368387584.0, "60": 368387584.0, "65": 368387584.0, "70": 368387584.0, "75": 368387584.0, "80": 368387584.0, "85": 368387584.0, "90": 368387584.0, "95": 368387584.0, "100": 368387584.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1032173056.0, "5": 1163160064.0, "10": 1163160064.0, "15": 1163160064.0, "20": 1163160064.0, "25": 1163160064.0, "30": 1163160064.0, "35": 1163160064.0, "40": 1163160064.0, "45": 1163160064.0, "50": 1163160064.0, "55": 1163160064.0, "60": 1163160064.0, "65": 1163160064.0, "70": 1163160064.0, "75": 1163160064.0, "80": 1163160064.0, "85": 1163160064.0, "90": 1163160064.0, "95": 1163160064.0, "100": 1163160064.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 8.70935, "5": 0.38509, "10": 0.39398, "15": 0.39049, "20": 0.39065, "25": 0.38942, "30": 0.38888, "35": 0.39041, "40": 0.39256, "45": 0.39188, "50": 0.39096, "55": 0.38207, "60": 0.38257, "65": 0.38138, "70": 0.3975, "75": 0.38155, "80": 0.38011, "85": 0.38775, "90": 0.38412, "95": 0.3829, "100": 0.38287}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..7fd4b8c --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.86217, "5": 10.8825, "10": 10.83647, "15": 10.8291, "20": 10.73231, "25": 10.56391, "30": 10.38476, "35": 10.28957, "40": 10.09136, "45": 9.83492, "50": 9.91604, "55": 9.88229, "60": 9.51379, "65": 8.95396, "70": 9.731, "75": 9.43126, "80": 9.40596, "85": 9.61136, "90": 9.81744, "95": 9.51567, "100": 9.4043}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1769.0, "5": 2216.0, "10": 1573.0, "15": 2132.0, "20": 1815.0, "25": 1849.0, "30": 2009.0, "35": 2182.0, "40": 2489.0, "45": 2381.0, "50": 2727.0, "55": 2667.0, "60": 2723.0, "65": 2907.0, "70": 3734.0, "75": 2746.0, "80": 3726.0, "85": 3599.0, "90": 3323.0, "95": 3615.0, "100": 3524.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 368387072.0, "5": 368387072.0, "10": 368387072.0, "15": 368387072.0, "20": 368387072.0, "25": 368387072.0, "30": 368387072.0, "35": 368387072.0, "40": 368387072.0, "45": 368387072.0, "50": 368387072.0, "55": 368387072.0, "60": 368387072.0, "65": 368387072.0, "70": 368387072.0, "75": 368387072.0, "80": 368387072.0, "85": 368387072.0, "90": 368387072.0, "95": 368387072.0, "100": 368387072.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1513069568.0, "5": 1647199744.0, "10": 1647199744.0, "15": 1647201792.0, "20": 1647201792.0, "25": 1647201792.0, "30": 1647201792.0, "35": 1647201792.0, "40": 1647201792.0, "45": 1647201792.0, "50": 1647201792.0, "55": 1647201792.0, "60": 1649298944.0, "65": 1649298944.0, "70": 1649298944.0, "75": 1649298944.0, "80": 1649298944.0, "85": 1649298944.0, "90": 1649298944.0, "95": 1649298944.0, "100": 1649298944.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.88726, "5": 0.40682, "10": 0.43529, "15": 0.47149, "20": 0.41093, "25": 0.40566, "30": 0.42086, "35": 0.40692, "40": 0.4028, "45": 0.40374, "50": 0.404, "55": 0.41679, "60": 0.42436, "65": 0.427, "70": 0.42395, "75": 0.4485, "80": 0.45249, "85": 0.41989, "90": 0.41911, "95": 0.42649, "100": 0.42528}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml index 6418b0c..ba451e3 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..7ee9188 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.85831, "5": 10.87279, "10": 10.83267, "15": 10.82104, "20": 10.71376, "25": 10.54763, "30": 10.36782, "35": 10.2846, "40": 10.08923, "45": 9.84556, "50": 9.91944, "55": 9.89194, "60": 9.5082, "65": 8.9595, "70": 9.73443, "75": 9.43114, "80": 9.41103, "85": 9.61515, "90": 9.82371, "95": 9.5226, "100": 9.40801}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1758.0, "5": 2093.0, "10": 1539.0, "15": 2026.0, "20": 1800.0, "25": 1786.0, "30": 2071.0, "35": 2219.0, "40": 2402.0, "45": 2268.0, "50": 2714.0, "55": 2588.0, "60": 2760.0, "65": 2831.0, "70": 3489.0, "75": 2724.0, "80": 3683.0, "85": 3637.0, "90": 3411.0, "95": 3592.0, "100": 3642.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 232398336.0, "5": 232398336.0, "10": 232398336.0, "15": 232398336.0, "20": 232398336.0, "25": 232398336.0, "30": 232398336.0, "35": 232398336.0, "40": 232398336.0, "45": 232398336.0, "50": 232398336.0, "55": 232398336.0, "60": 232398336.0, "65": 232398336.0, "70": 232398336.0, "75": 232398336.0, "80": 232398336.0, "85": 232398336.0, "90": 232398336.0, "95": 232398336.0, "100": 232398336.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 682342912.0, "5": 773245440.0, "10": 773245440.0, "15": 773245440.0, "20": 773245440.0, "25": 773246464.0, "30": 773246464.0, "35": 773246464.0, "40": 773246464.0, "45": 773246464.0, "50": 773246464.0, "55": 773246464.0, "60": 773246464.0, "65": 773246464.0, "70": 773246464.0, "75": 773246464.0, "80": 773246464.0, "85": 773246464.0, "90": 775342080.0, "95": 775342080.0, "100": 775342080.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 9.25721, "5": 0.297, "10": 0.2962, "15": 0.29314, "20": 0.29254, "25": 0.29368, "30": 0.29285, "35": 0.2939, "40": 0.29424, "45": 0.29981, "50": 0.29991, "55": 0.28268, "60": 0.2813, "65": 0.28183, "70": 0.28205, "75": 0.28103, "80": 0.28125, "85": 0.28141, "90": 0.28129, "95": 0.28133, "100": 0.28055}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..6fdeb46 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.8583, "5": 10.87284, "10": 10.83264, "15": 10.82102, "20": 10.71379, "25": 10.54766, "30": 10.3679, "35": 10.28457, "40": 10.08925, "45": 9.84556, "50": 9.91943, "55": 9.89191, "60": 9.50823, "65": 8.95947, "70": 9.73446, "75": 9.43115, "80": 9.411, "85": 9.61516, "90": 9.82374, "95": 9.52257, "100": 9.408}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1693.0, "5": 2113.0, "10": 1534.0, "15": 2023.0, "20": 1755.0, "25": 1764.0, "30": 2036.0, "35": 2228.0, "40": 2447.0, "45": 2332.0, "50": 2745.0, "55": 2594.0, "60": 2725.0, "65": 2901.0, "70": 3493.0, "75": 2725.0, "80": 3691.0, "85": 3596.0, "90": 3410.0, "95": 3607.0, "100": 3719.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 232422400.0, "5": 232422400.0, "10": 232422400.0, "15": 232422400.0, "20": 232422400.0, "25": 232422400.0, "30": 232422400.0, "35": 232422400.0, "40": 232422400.0, "45": 232422400.0, "50": 232422400.0, "55": 232422400.0, "60": 232422400.0, "65": 232422400.0, "70": 232422400.0, "75": 232422400.0, "80": 232422400.0, "85": 232422400.0, "90": 232422400.0, "95": 232422400.0, "100": 232422400.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.16523, "5": 0.31605, "10": 0.28733, "15": 0.28667, "20": 0.28015, "25": 0.31509, "30": 0.28969, "35": 0.28728, "40": 0.29047, "45": 0.28331, "50": 0.28547, "55": 0.2768, "60": 0.27873, "65": 0.2789, "70": 0.27983, "75": 0.27902, "80": 0.27972, "85": 0.28215, "90": 0.27786, "95": 0.28072, "100": 0.28294}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 0000000..44f25d8 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,51 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 +MODEL_ARGS: + --num-layers: 12 + --hidden-size: 512 + --num-attention-heads: 8 + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --tensorboard-dir: ${TENSORBOARD_PATH} + --micro-batch-size: 4 + --global-batch-size: 32 + --seq-length: 1024 + --max-position-embeddings: 1024 + --train-iters: 100 + --timing-log-level: 2 + --lr-decay-iters: 320000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --data-path: ${DATA_PATH}/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/bpe/vocab.json + --merge-file: ${DATA_PATH}/bpe/merges.txt + --split: 949,50,1 + --distributed-backend: nccl + --lr: 0.00015 + --lr-decay-style: cosine + --min-lr: 1.0e-5 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --lr-warmup-fraction: .01 + --log-interval: 1 + --save-interval: 50 + --eval-interval: 1000 + --eval-iters: 10 + --transformer-impl: transformer_engine + --tensor-model-parallel-size: 4 + --pipeline-model-parallel-size: 2 + --use-distributed-optimizer: true + --async-save: true + --ckpt-fully-parallel-save: true + --no-gradient-accumulation-fusion: true + --attention-softmax-in-fp32: true + --use-checkpoint-opt_param-scheduler: true + --use-mcore-models: true + --ckpt-format: torch_dist + --data-cache-path: ${DATA_CACHE_PATH} + --bf16: true + --log-memory-to-tensorboard: true +TEST_TYPE: frozen-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..616b08e --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.85831, "5": 10.87284, "10": 10.83268, "15": 10.82102, "20": 10.71377, "25": 10.54763, "30": 10.36785, "35": 10.28461, "40": 10.08928, "45": 9.84557, "50": 9.9194, "55": 9.89197, "60": 9.50823, "65": 8.9595, "70": 9.73441, "75": 9.43113, "80": 9.411, "85": 9.61514, "90": 9.82373, "95": 9.52255, "100": 9.40799}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1775.0, "5": 2048.0, "10": 1559.0, "15": 2026.0, "20": 1790.0, "25": 1815.0, "30": 2056.0, "35": 2157.0, "40": 2311.0, "45": 2242.0, "50": 2756.0, "55": 2589.0, "60": 2651.0, "65": 2874.0, "70": 3534.0, "75": 2840.0, "80": 3634.0, "85": 3505.0, "90": 3377.0, "95": 3729.0, "100": 3572.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 232398336.0, "5": 232398336.0, "10": 232398336.0, "15": 232398336.0, "20": 232398336.0, "25": 232398336.0, "30": 232398336.0, "35": 232398336.0, "40": 233446912.0, "45": 232398336.0, "50": 232398336.0, "55": 232398336.0, "60": 232398336.0, "65": 232398336.0, "70": 232398336.0, "75": 232398336.0, "80": 232398336.0, "85": 232398336.0, "90": 232398336.0, "95": 232398336.0, "100": 232398336.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 686536192.0, "5": 775341056.0, "10": 775341056.0, "15": 775341056.0, "20": 775342080.0, "25": 775343616.0, "30": 775343616.0, "35": 775343616.0, "40": 775343616.0, "45": 775343616.0, "50": 775343616.0, "55": 775343616.0, "60": 775343616.0, "65": 775343616.0, "70": 775343616.0, "75": 775343616.0, "80": 775343616.0, "85": 775343616.0, "90": 775343616.0, "95": 775343616.0, "100": 775343616.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.96401, "5": 0.29061, "10": 0.28498, "15": 0.28362, "20": 0.28222, "25": 0.28294, "30": 0.28438, "35": 0.28301, "40": 0.28255, "45": 0.28337, "50": 0.28254, "55": 0.29177, "60": 0.29121, "65": 0.2911, "70": 0.29076, "75": 0.29215, "80": 0.29191, "85": 0.28992, "90": 0.29114, "95": 0.29025, "100": 0.28959}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..3c071a2 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.8583, "5": 10.87279, "10": 10.83264, "15": 10.82099, "20": 10.71379, "25": 10.54767, "30": 10.36789, "35": 10.2846, "40": 10.08927, "45": 9.84554, "50": 9.9194, "55": 9.89196, "60": 9.5082, "65": 8.95952, "70": 9.7344, "75": 9.4311, "80": 9.411, "85": 9.61517, "90": 9.82372, "95": 9.52256, "100": 9.408}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1763.0, "5": 2118.0, "10": 1540.0, "15": 2065.0, "20": 1836.0, "25": 1790.0, "30": 2030.0, "35": 2200.0, "40": 2389.0, "45": 2250.0, "50": 2793.0, "55": 2708.0, "60": 2777.0, "65": 2829.0, "70": 3443.0, "75": 2863.0, "80": 3676.0, "85": 3495.0, "90": 3282.0, "95": 3687.0, "100": 3655.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 232422400.0, "5": 232422400.0, "10": 232422400.0, "15": 232422400.0, "20": 232422400.0, "25": 232422400.0, "30": 232422400.0, "35": 232422400.0, "40": 232422400.0, "45": 232422400.0, "50": 232422400.0, "55": 232422400.0, "60": 232422400.0, "65": 232422400.0, "70": 232422400.0, "75": 232422400.0, "80": 232422400.0, "85": 232422400.0, "90": 232422400.0, "95": 232422400.0, "100": 232422400.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 686566400.0, "5": 775371776.0, "10": 775371776.0, "15": 775372288.0, "20": 775372288.0, "25": 775372288.0, "30": 775372288.0, "35": 775372288.0, "40": 775372288.0, "45": 775372288.0, "50": 775372288.0, "55": 775372288.0, "60": 775372288.0, "65": 775372288.0, "70": 775372288.0, "75": 775372288.0, "80": 775372288.0, "85": 775372288.0, "90": 775372288.0, "95": 775372288.0, "100": 775372288.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 16.98947, "5": 0.28276, "10": 0.29522, "15": 0.28583, "20": 0.29135, "25": 0.28791, "30": 0.28029, "35": 0.27945, "40": 0.27988, "45": 0.29308, "50": 0.28374, "55": 0.2909, "60": 0.29746, "65": 0.28807, "70": 0.29826, "75": 0.28803, "80": 0.29862, "85": 0.28869, "90": 0.28952, "95": 0.28889, "100": 0.28882}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/model_config.yaml index a5de201..9f0651b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G/model_config.yaml @@ -17,8 +17,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --ckpt-format: torch_dist --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..5dcb5b2 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92717, "5": 10.92928, "10": 10.91617, "15": 10.93901, "20": 10.93406, "25": 10.8858, "30": 10.81297, "35": 10.72203, "40": 10.55145, "45": 10.32854, "50": 10.28775, "55": 10.21253, "60": 9.833, "65": 9.27297, "70": 9.92539, "75": 9.59673, "80": 9.55132, "85": 9.73428, "90": 9.9073, "95": 9.60983, "100": 9.50131}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 379952128.0, "5": 378379264.0, "10": 379427840.0, "15": 378379264.0, "20": 559762944.0, "25": 561860096.0, "30": 561073664.0, "35": 561073664.0, "40": 560287232.0, "45": 559762944.0, "50": 560287232.0, "55": 561073664.0, "60": 559762944.0, "65": 559762944.0, "70": 559762944.0, "75": 559762944.0, "80": 559762944.0, "85": 559762944.0, "90": 561860096.0, "95": 560549376.0, "100": 560549376.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 9.22195, "5": 0.20217, "10": 0.20177, "15": 0.20429, "20": 0.21411, "25": 0.21219, "30": 0.21117, "35": 0.21259, "40": 0.21302, "45": 0.21291, "50": 0.21122, "55": 0.22967, "60": 0.2322, "65": 0.23206, "70": 0.23201, "75": 0.23017, "80": 0.22985, "85": 0.23239, "90": 0.231, "95": 0.23146, "100": 0.23157}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1799.0, "25": 2506.0, "30": 2471.0, "35": 2010.0, "40": 2153.0, "45": 2427.0, "50": 2914.0, "55": 2337.0, "60": 2978.0, "65": 2225.0, "70": 3612.0, "75": 3018.0, "80": 3488.0, "85": 3875.0, "90": 3770.0, "95": 3946.0, "100": 3446.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..999bb5c --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.92717, "5": 10.92928, "10": 10.91617, "15": 10.93901, "20": 10.93406, "25": 10.8858, "30": 10.81297, "35": 10.72203, "40": 10.55145, "45": 10.32854, "50": 10.28775, "55": 10.21253, "60": 9.833, "65": 9.27297, "70": 9.92539, "75": 9.59673, "80": 9.55132, "85": 9.73428, "90": 9.9073, "95": 9.60983, "100": 9.5013}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 378379264.0, "5": 378379264.0, "10": 378379264.0, "15": 378379264.0, "20": 561073664.0, "25": 561860096.0, "30": 561073664.0, "35": 561860096.0, "40": 561860096.0, "45": 560811520.0, "50": 561073664.0, "55": 561073664.0, "60": 561073664.0, "65": 561860096.0, "70": 561860096.0, "75": 561073664.0, "80": 561860096.0, "85": 561335808.0, "90": 561073664.0, "95": 561073664.0, "100": 561860096.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.81109, "5": 0.21194, "10": 0.21151, "15": 0.21057, "20": 0.22167, "25": 0.2212, "30": 0.22059, "35": 0.22295, "40": 0.22292, "45": 0.22399, "50": 0.22321, "55": 0.21669, "60": 0.21726, "65": 0.21668, "70": 0.22074, "75": 0.21923, "80": 0.21775, "85": 0.21706, "90": 0.21701, "95": 0.21697, "100": 0.2163}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1799.0, "25": 2506.0, "30": 2471.0, "35": 2010.0, "40": 2153.0, "45": 2427.0, "50": 2914.0, "55": 2409.0, "60": 2939.0, "65": 2178.0, "70": 3539.0, "75": 3029.0, "80": 3531.0, "85": 3892.0, "90": 3772.0, "95": 4015.0, "100": 3520.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/model_config.yaml index 226dfbc..2842fe3 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -49,4 +49,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..d39d503 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.92717, "5": 10.92928, "10": 10.91616, "15": 10.93902, "20": 10.93405, "25": 10.88579, "30": 10.81295, "35": 10.72198, "40": 10.55137, "45": 10.32844, "50": 10.28765}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 378378752.0, "5": 378903040.0, "10": 378378752.0, "15": 378903040.0, "20": 560548864.0, "25": 560548864.0, "30": 560548864.0, "35": 559238144.0, "40": 560548864.0, "45": 560548864.0, "50": 560548864.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1905351680.0, "5": 1905352192.0, "10": 1905352192.0, "15": 1905352192.0, "20": 2087784448.0, "25": 2087784448.0, "30": 2087784448.0, "35": 2087784448.0, "40": 2087784448.0, "45": 2087784448.0, "50": 2087784448.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 8.36878, "5": 0.2008, "10": 0.19913, "15": 0.19916, "20": 0.21528, "25": 0.21446, "30": 0.2138, "35": 0.21509, "40": 0.2138, "45": 0.21394, "50": 0.21354}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1751.0, "25": 2490.0, "30": 2497.0, "35": 2017.0, "40": 2091.0, "45": 2389.0, "50": 2925.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..204b13e --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.92717, "5": 10.92928, "10": 10.91616, "15": 10.93902, "20": 10.93405, "25": 10.88579, "30": 10.81295, "35": 10.72198, "40": 10.55137, "45": 10.32844, "50": 10.28766}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 378903040.0, "5": 378378752.0, "10": 378903040.0, "15": 378378752.0, "20": 560811008.0, "25": 560548864.0, "30": 561073152.0, "35": 562646016.0, "40": 560548864.0, "45": 562646016.0, "50": 560548864.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1905351680.0, "5": 1905352192.0, "10": 1905352192.0, "15": 1905352192.0, "20": 2087784448.0, "25": 2087784448.0, "30": 2087784448.0, "35": 2087784448.0, "40": 2087784448.0, "45": 2087784448.0, "50": 2087784448.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.5872, "5": 0.20393, "10": 0.20412, "15": 0.20193, "20": 0.22109, "25": 0.21826, "30": 0.21476, "35": 0.21348, "40": 0.21255, "45": 0.21142, "50": 0.21064}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1751.0, "25": 2491.0, "30": 2428.0, "35": 1827.0, "40": 2072.0, "45": 2361.0, "50": 2998.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/model_config.yaml index f2934a3..8726510 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G/model_config.yaml @@ -23,8 +23,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -52,4 +52,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json index 3d753bc..a8a6dee 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json @@ -1,612 +1 @@ -{ - "forward-backward-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 7.99255, - 0.1699, - 0.16797, - 0.16814, - 0.16792, - 0.1675, - 0.16973, - 0.16925, - 0.16932, - 0.16655 - ] - }, - "forward-compute-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1.99201, - 0.07269, - 0.07105, - 0.07144, - 0.07113, - 0.07113, - 0.07269, - 0.07292, - 0.07231, - 0.07028 - ] - }, - "backward-compute-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1.74189, - 0.07561, - 0.07559, - 0.07617, - 0.07601, - 0.07555, - 0.07573, - 0.07602, - 0.07589, - 0.07554 - ] - }, - "batch-generator-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 0.33623, - 0.00263, - 0.00278, - 0.00281, - 0.0029, - 0.00309, - 0.00249, - 0.00293, - 0.00275, - 0.00267 - ] - }, - "forward-recv-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 2.03589, - 0.01468, - 0.01445, - 0.01439, - 0.01441, - 0.01438, - 0.01445, - 0.01443, - 0.01439, - 0.01458 - ] - }, - "forward-send-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 0.56239, - 0.00016, - 0.00014, - 0.00015, - 0.00015, - 0.00015, - 0.00017, - 0.00015, - 0.00015, - 0.00014 - ] - }, - "backward-recv-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 0.01891, - 0.01827, - 0.01862, - 0.01906, - 0.01881, - 0.01843, - 0.01836, - 0.01816, - 0.01928, - 0.01844 - ] - }, - "backward-send-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 0.00022, - 0.00019, - 0.00026, - 0.00025, - 0.00025, - 0.00026, - 0.00019, - 0.00026, - 0.00024, - 0.00025 - ] - }, - "forward-send-backward-recv-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 3.65009, - 0.02665, - 0.02419, - 0.02471, - 0.02401, - 0.02444, - 0.02648, - 0.02644, - 0.02615, - 0.02382 - ] - }, - "backward-send-forward-recv-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1.79597, - 0.00095, - 0.00098, - 0.00098, - 0.00099, - 0.00104, - 0.00099, - 0.00107, - 0.00111, - 0.00095 - ] - }, - "layernorm-grads-all-reduce-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 3e-05, - 2e-05, - 3e-05, - 2e-05, - 2e-05, - 2e-05, - 2e-05, - 2e-05, - 2e-05, - 2e-05 - ] - }, - "embedding-grads-all-reduce-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 0.00069, - 0.00052, - 0.00052, - 0.00053, - 0.00053, - 0.00053, - 0.00053, - 0.00052, - 0.00053, - 0.00052 - ] - }, - "all-grads-sync-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 0.59902, - 0.00084, - 0.00085, - 0.00083, - 0.00084, - 0.00083, - 0.00084, - 0.00087, - 0.00084, - 0.00084 - ] - }, - "optimizer-copy-to-main-grad-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 0.00026, - 0.00019, - 0.00019, - 0.00019, - 0.00019, - 0.00019, - 0.0002, - 0.00019, - 0.00019, - 0.00019 - ] - }, - "optimizer-clip-main-grad-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 0.85985, - 0.0011, - 0.00109, - 0.00115, - 0.0012, - 0.00108, - 0.0011, - 0.00108, - 0.0011, - 0.00109 - ] - }, - "optimizer-count-zeros-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 0.0167, - 0.00528, - 0.00524, - 0.00528, - 0.00523, - 0.00525, - 0.00524, - 0.00525, - 0.00525, - 0.00527 - ] - }, - "optimizer-inner-step-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 0.01141, - 0.00081, - 0.00081, - 0.00083, - 0.00081, - 0.00084, - 0.00084, - 0.00084, - 0.00082, - 0.00083 - ] - }, - "optimizer-copy-main-to-model-params-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 0.00088, - 0.0006, - 0.0006, - 0.0006, - 0.0006, - 0.00082, - 0.0006, - 0.00059, - 0.0006, - 0.0006 - ] - }, - "optimizer-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 0.89007, - 0.00859, - 0.00853, - 0.00862, - 0.00862, - 0.00885, - 0.00857, - 0.00857, - 0.00854, - 0.00858 - ] - }, - "learning-rate": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 0.0, - 0.0, - 0.0, - 0.0, - 0.0, - 0.0, - 0.0, - 0.0, - 0.0, - 0.0 - ] - }, - "learning-rate vs samples": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 0.0, - 0.0, - 0.0, - 0.0, - 0.0, - 0.0, - 0.0, - 0.0, - 0.0, - 0.0 - ] - }, - "batch-size": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0 - ] - }, - "batch-size vs samples": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0 - ] - }, - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.85926, - 10.89117, - 10.86647, - 10.81416, - 10.70027, - 10.60761, - 10.10644, - 10.21377, - 10.12972, - 9.8041 - ] - }, - "lm loss vs samples": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 10.85926, - 10.89117, - 10.86647, - 10.81416, - 10.70027, - 10.60761, - 10.10644, - 10.21377, - 10.12972, - 9.8041 - ] - }, - "loss-scale": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0 - ] - }, - "loss-scale vs samples": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0 - ] - }, - "grad-norm": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 14.36883, - 10.19308, - 9.38217, - 11.67025, - 11.2611, - 10.52068, - 12.43181, - 7.21395, - 6.03602, - 5.80161 - ] - }, - "grad-norm vs samples": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 14.36883, - 10.19308, - 9.38217, - 11.67025, - 11.2611, - 10.52068, - 12.43181, - 7.21395, - 6.03602, - 5.80161 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1726.0, - 1922.0, - 2043.0, - 1879.0, - 1882.0, - 1821.0, - 1648.0, - 2039.0, - 2379.0, - 2451.0 - ] - }, - "num-zeros vs samples": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 1726.0, - 1922.0, - 2043.0, - 1879.0, - 1882.0, - 1821.0, - 1648.0, - 2039.0, - 2379.0, - 2451.0 - ] - }, - "params-norm": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 180.01265, - 180.01265, - 180.01265, - 180.01265, - 180.01265, - 180.01263, - 180.0126, - 180.01251, - 180.01237, - 180.01218 - ] - }, - "params-norm vs samples": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 180.01265, - 180.01265, - 180.01265, - 180.01265, - 180.01265, - 180.01263, - 180.0126, - 180.01251, - 180.01237, - 180.01218 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 8.9047, - 0.19058, - 0.18857, - 0.18884, - 0.18868, - 0.18839, - 0.19045, - 0.1901, - 0.18993, - 0.18735 - ] - }, - "lm loss validation": { - "start_step": 0, - "end_step": 2, - "step_interval": 5, - "values": [ - 9.81192 - ] - }, - "lm loss validation vs samples": { - "start_step": 0, - "end_step": 1, - "step_interval": 5, - "values": [ - 9.81192 - ] - }, - "lm loss validation ppl": { - "start_step": 0, - "end_step": 1, - "step_interval": 5, - "values": [ - 18250.01367 - ] - }, - "lm loss validation ppl vs samples": { - "start_step": 0, - "end_step": 1, - "step_interval": 5, - "values": [ - 18250.01367 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.85926, "5": 10.878, "10": 10.84086, "15": 10.81702, "20": 10.72418, "25": 10.55518, "30": 10.35548, "35": 10.2597, "40": 10.06425, "45": 9.81279, "50": 9.89265}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1726.0, "5": 1899.0, "10": 1437.0, "15": 1923.0, "20": 1700.0, "25": 1640.0, "30": 1993.0, "35": 2075.0, "40": 2268.0, "45": 2144.0, "50": 2461.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 487096832.0, "5": 487096832.0, "10": 487096832.0, "15": 487096832.0, "20": 487096832.0, "25": 487096832.0, "30": 487096832.0, "35": 487096832.0, "40": 487096832.0, "45": 487096832.0, "50": 487096832.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1229747712.0, "5": 1409821184.0, "10": 1409821184.0, "15": 1409821184.0, "20": 1409821184.0, "25": 1409821184.0, "30": 1409821184.0, "35": 1409821184.0, "40": 1409821184.0, "45": 1409821184.0, "50": 1409821184.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 13.04346, "5": 0.19365, "10": 0.19279, "15": 0.19212, "20": 0.1915, "25": 0.19182, "30": 0.192, "35": 0.19258, "40": 0.19179, "45": 0.19135, "50": 0.19151}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json index 5c516f0..f168921 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.86208, 10.89137, 10.86731, 10.81652, 10.70126, 10.60816, 10.11007, 10.21889, 10.1294, 9.80326]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1659.0, 1944.0, 1974.0, 1920.0, 1918.0, 1855.0, 1621.0, 2018.0, 2436.0, 2304.0]}, "iteration_timing_avg": 0.14203264705882354} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.86208, "5": 10.87869, "10": 10.84148, "15": 10.81526, "20": 10.72356, "25": 10.55942, "30": 10.35833, "35": 10.26014, "40": 10.06485, "45": 9.81413, "50": 9.89077}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1659.0, "5": 1904.0, "10": 1453.0, "15": 2011.0, "20": 1695.0, "25": 1617.0, "30": 1893.0, "35": 2080.0, "40": 2232.0, "45": 2224.0, "50": 2454.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 486047744.0, "5": 487096320.0, "10": 487096320.0, "15": 486047744.0, "20": 487096320.0, "25": 487096320.0, "30": 486047744.0, "35": 487096320.0, "40": 487096320.0, "45": 486047744.0, "50": 487096320.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1720084480.0, "5": 1900157952.0, "10": 1900157952.0, "15": 1900157952.0, "20": 1900157952.0, "25": 1900157952.0, "30": 1900157952.0, "35": 1900157952.0, "40": 1900157952.0, "45": 1900157952.0, "50": 1900157952.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.46191, "5": 0.19848, "10": 0.2013, "15": 0.20084, "20": 0.20142, "25": 0.20039, "30": 0.20371, "35": 0.20255, "40": 0.2022, "45": 0.20294, "50": 0.20066}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml index 287a9f4..3a26be3 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..aa09c82 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.85926, "5": 10.878, "10": 10.84086, "15": 10.81702, "20": 10.72418, "25": 10.55518, "30": 10.35548, "35": 10.2597, "40": 10.06425, "45": 9.81279, "50": 9.89265, "55": 9.86713, "60": 9.4818, "65": 8.93492, "70": 9.71847, "75": 9.41307, "80": 9.3968, "85": 9.60641, "90": 9.80599, "95": 9.51409, "100": 9.39833}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1726.0, "5": 1899.0, "10": 1437.0, "15": 1923.0, "20": 1700.0, "25": 1640.0, "30": 1993.0, "35": 2075.0, "40": 2268.0, "45": 2144.0, "50": 2461.0, "55": 2419.0, "60": 2540.0, "65": 2748.0, "70": 3339.0, "75": 2600.0, "80": 3404.0, "85": 3412.0, "90": 3049.0, "95": 3491.0, "100": 3350.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 488669696.0, "5": 488669696.0, "10": 488669696.0, "15": 488669696.0, "20": 488669696.0, "25": 488669696.0, "30": 488669696.0, "35": 488669696.0, "40": 488669696.0, "45": 488669696.0, "50": 488669696.0, "55": 488669696.0, "60": 488669696.0, "65": 488669696.0, "70": 488669696.0, "75": 488669696.0, "80": 488669696.0, "85": 488669696.0, "90": 488669696.0, "95": 488669696.0, "100": 488669696.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1229747712.0, "5": 1411918336.0, "10": 1411918336.0, "15": 1411918336.0, "20": 1411918336.0, "25": 1411918336.0, "30": 1411918336.0, "35": 1411918336.0, "40": 1411918336.0, "45": 1411918336.0, "50": 1411918336.0, "55": 1411918336.0, "60": 1411918336.0, "65": 1411918336.0, "70": 1411918336.0, "75": 1411918336.0, "80": 1411918336.0, "85": 1411918336.0, "90": 1411918336.0, "95": 1411918336.0, "100": 1411918336.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.2816, "5": 0.19252, "10": 0.19307, "15": 0.19178, "20": 0.19278, "25": 0.19268, "30": 0.19244, "35": 0.19333, "40": 0.19291, "45": 0.19374, "50": 0.19199, "55": 0.19307, "60": 0.19049, "65": 0.19061, "70": 0.19137, "75": 0.19057, "80": 0.1903, "85": 0.19047, "90": 0.19357, "95": 0.19059, "100": 0.1907}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..0fae879 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.86208, "5": 10.87869, "10": 10.84148, "15": 10.81526, "20": 10.72356, "25": 10.55942, "30": 10.35833, "35": 10.26014, "40": 10.06485, "45": 9.81413, "50": 9.89077, "55": 9.8674, "60": 9.48218, "65": 8.93482, "70": 9.7177, "75": 9.4111, "80": 9.39614, "85": 9.60606, "90": 9.80663, "95": 9.51629, "100": 9.39917}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1659.0, "5": 1904.0, "10": 1453.0, "15": 2011.0, "20": 1695.0, "25": 1617.0, "30": 1893.0, "35": 2080.0, "40": 2232.0, "45": 2224.0, "50": 2454.0, "55": 2461.0, "60": 2555.0, "65": 2883.0, "70": 3255.0, "75": 2586.0, "80": 3445.0, "85": 3442.0, "90": 3067.0, "95": 3500.0, "100": 3328.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 488144896.0, "5": 489193472.0, "10": 489193472.0, "15": 489193472.0, "20": 489193472.0, "25": 489193472.0, "30": 489193472.0, "35": 489193472.0, "40": 489193472.0, "45": 489193472.0, "50": 489193472.0, "55": 489193472.0, "60": 489193472.0, "65": 489193472.0, "70": 489193472.0, "75": 489193472.0, "80": 489193472.0, "85": 489193472.0, "90": 489193472.0, "95": 489193472.0, "100": 489193472.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1720084480.0, "5": 1902255104.0, "10": 1902255104.0, "15": 1902255104.0, "20": 1902255104.0, "25": 1902255104.0, "30": 1902255104.0, "35": 1902255104.0, "40": 1902255104.0, "45": 1902255104.0, "50": 1902255104.0, "55": 1902255104.0, "60": 1902255104.0, "65": 1902255104.0, "70": 1902255104.0, "75": 1902255104.0, "80": 1902255104.0, "85": 1902255104.0, "90": 1902255104.0, "95": 1902255104.0, "100": 1902255104.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 12.77466, "5": 0.19369, "10": 0.19406, "15": 0.19154, "20": 0.19362, "25": 0.19633, "30": 0.19002, "35": 0.19146, "40": 0.19099, "45": 0.19061, "50": 0.19124, "55": 0.19463, "60": 0.1903, "65": 0.19035, "70": 0.19049, "75": 0.18947, "80": 0.19086, "85": 0.1921, "90": 0.19047, "95": 0.1932, "100": 0.19029}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml index 52b0887..fe5f0ab 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --use-legacy-models: true --data-cache-path: ${DATA_CACHE_PATH} --bf16: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_dev.json index 68d9fe8..ff25448 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.79311, 10.85248, 10.87281, 10.83016, 10.82949, 10.78726, 10.565, 10.57088, 10.4836, 10.19521]}, "num-zeros": {"start_step": 0, "end_step": 34, "step_interval": 5, "values": [2450.0, 2765.0, 2163.0, 2585.0, 2634.0, 2585.0, 2987.0]}, "iteration_timing_avg": 0.1211408823529412} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.79311, "5": 10.83074, "10": 10.76725, "15": 10.82664, "20": 10.81793, "25": 10.76529, "30": 10.69182, "35": 10.61672, "40": 10.44907, "45": 10.21488, "50": 10.21715}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 586369024.0, "5": 587417600.0, "10": 587417600.0, "15": 587417600.0, "20": 869128704.0, "25": 867031552.0, "30": 867031552.0, "35": 867031552.0, "40": 867031552.0, "45": 867031552.0, "50": 869128704.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3832784384.0, "5": 3832784896.0, "10": 3832784896.0, "15": 3832784896.0, "20": 4114758144.0, "25": 4114758144.0, "30": 4114758144.0, "35": 4114758144.0, "40": 4114758144.0, "45": 4114758144.0, "50": 4114758144.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 14.89966, "5": 0.15568, "10": 0.15311, "15": 0.15336, "20": 0.15735, "25": 0.15804, "30": 0.15672, "35": 0.1548, "40": 0.15515, "45": 0.15584, "50": 0.15477}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1846.0, "25": 2348.0, "30": 2490.0, "35": 2010.0, "40": 2016.0, "45": 2642.0, "50": 2810.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_lts.json index 68d9fe8..1bc7133 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.79311, 10.85248, 10.87281, 10.83016, 10.82949, 10.78726, 10.565, 10.57088, 10.4836, 10.19521]}, "num-zeros": {"start_step": 0, "end_step": 34, "step_interval": 5, "values": [2450.0, 2765.0, 2163.0, 2585.0, 2634.0, 2585.0, 2987.0]}, "iteration_timing_avg": 0.1211408823529412} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.79311, "5": 10.83074, "10": 10.76725, "15": 10.82664, "20": 10.81793, "25": 10.76529, "30": 10.69182, "35": 10.61672, "40": 10.44907, "45": 10.21488, "50": 10.21715}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 586369024.0, "5": 587417600.0, "10": 587417600.0, "15": 587417600.0, "20": 869128704.0, "25": 869128704.0, "30": 869128704.0, "35": 869128704.0, "40": 869128704.0, "45": 869128704.0, "50": 869128704.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3832784384.0, "5": 3832784896.0, "10": 3832784896.0, "15": 3832784896.0, "20": 4114758144.0, "25": 4114758144.0, "30": 4114758144.0, "35": 4114758144.0, "40": 4114758144.0, "45": 4114758144.0, "50": 4114758144.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 17.9574, "5": 0.15166, "10": 0.15201, "15": 0.1496, "20": 0.15614, "25": 0.15477, "30": 0.15483, "35": 0.15409, "40": 0.1546, "45": 0.15501, "50": 0.15639}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1846.0, "25": 2348.0, "30": 2490.0, "35": 2010.0, "40": 2016.0, "45": 2642.0, "50": 2810.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml index 0923fd4..8f75708 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..0de85b8 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.79311, "5": 10.83074, "10": 10.76725, "15": 10.82664, "20": 10.81793, "25": 10.76529, "30": 10.69182, "35": 10.61672, "40": 10.44907, "45": 10.21488, "50": 10.21715, "55": 10.14491, "60": 9.76806, "65": 9.20573, "70": 9.87752, "75": 9.55094, "80": 9.52283, "85": 9.7106, "90": 9.89179, "95": 9.59202, "100": 9.48543}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 586369024.0, "5": 587417600.0, "10": 587417600.0, "15": 587417600.0, "20": 869128704.0, "25": 867031552.0, "30": 867031552.0, "35": 867031552.0, "40": 867031552.0, "45": 867031552.0, "50": 869128704.0, "55": 867031552.0, "60": 867031552.0, "65": 867031552.0, "70": 867031552.0, "75": 867031552.0, "80": 869128704.0, "85": 867031552.0, "90": 867031552.0, "95": 867031552.0, "100": 867031552.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3832784384.0, "5": 3832784896.0, "10": 3832784896.0, "15": 3832784896.0, "20": 4114758144.0, "25": 4114758144.0, "30": 4114758144.0, "35": 4114758144.0, "40": 4114758144.0, "45": 4114758144.0, "50": 4114758144.0, "55": 4114758144.0, "60": 4114758144.0, "65": 4114758144.0, "70": 4114758144.0, "75": 4114758144.0, "80": 4114758144.0, "85": 4114758144.0, "90": 4114758144.0, "95": 4114758144.0, "100": 4114758144.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 15.36019, "5": 0.14748, "10": 0.14569, "15": 0.14722, "20": 0.15678, "25": 0.15572, "30": 0.15085, "35": 0.15125, "40": 0.15141, "45": 0.15202, "50": 0.14925, "55": 0.14768, "60": 0.14952, "65": 0.15001, "70": 0.15024, "75": 0.14973, "80": 0.14933, "85": 0.1492, "90": 0.14942, "95": 0.14927, "100": 0.14832}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1846.0, "25": 2348.0, "30": 2490.0, "35": 2010.0, "40": 2016.0, "45": 2642.0, "50": 2810.0, "55": 2481.0, "60": 2945.0, "65": 2329.0, "70": 3673.0, "75": 3016.0, "80": 3642.0, "85": 4122.0, "90": 3744.0, "95": 4035.0, "100": 3447.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..a6c746f --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.79311, "5": 10.83074, "10": 10.76725, "15": 10.82664, "20": 10.81793, "25": 10.76529, "30": 10.69182, "35": 10.61672, "40": 10.44907, "45": 10.21488, "50": 10.21715, "55": 10.14491, "60": 9.76806, "65": 9.20573, "70": 9.87752, "75": 9.55094, "80": 9.52283, "85": 9.7106, "90": 9.89179, "95": 9.59202, "100": 9.48543}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 586369024.0, "5": 587417600.0, "10": 587417600.0, "15": 587417600.0, "20": 869128704.0, "25": 869128704.0, "30": 869128704.0, "35": 869128704.0, "40": 869128704.0, "45": 869128704.0, "50": 869128704.0, "55": 869128704.0, "60": 869128704.0, "65": 869128704.0, "70": 869128704.0, "75": 869128704.0, "80": 869128704.0, "85": 869128704.0, "90": 869128704.0, "95": 869128704.0, "100": 869128704.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3832784384.0, "5": 3832784896.0, "10": 3832784896.0, "15": 3832784896.0, "20": 4114758144.0, "25": 4114758144.0, "30": 4114758144.0, "35": 4114758144.0, "40": 4114758144.0, "45": 4114758144.0, "50": 4114758144.0, "55": 4114758144.0, "60": 4114758144.0, "65": 4114758144.0, "70": 4114758144.0, "75": 4114758144.0, "80": 4114758144.0, "85": 4114758144.0, "90": 4114758144.0, "95": 4114758144.0, "100": 4114758144.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 17.6616, "5": 0.15013, "10": 0.15207, "15": 0.15146, "20": 0.15882, "25": 0.15823, "30": 0.15777, "35": 0.15885, "40": 0.15922, "45": 0.15588, "50": 0.15635, "55": 0.15588, "60": 0.15681, "65": 0.15688, "70": 0.15648, "75": 0.15793, "80": 0.15889, "85": 0.15769, "90": 0.15693, "95": 0.15611, "100": 0.15689}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1846.0, "25": 2348.0, "30": 2490.0, "35": 2010.0, "40": 2016.0, "45": 2642.0, "50": 2810.0, "55": 2481.0, "60": 2945.0, "65": 2329.0, "70": 3673.0, "75": 3016.0, "80": 3642.0, "85": 4122.0, "90": 3744.0, "95": 4035.0, "100": 3447.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/model_config.yaml index 9ea57cb..e5e4a22 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -48,4 +48,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json index 87df9ed..20dd385 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.85929, 10.89211, 10.87639, 10.86988, 10.88179, 10.83898, 10.66589, 10.62691, 10.52461, 10.25708]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2458.0, 2527.0, 2467.0, 2148.0, 2250.0, 2467.0, 2528.0]}, "iteration_timing_avg": 0.14292588235294112} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.85929, "5": 10.87929, "10": 10.84772, "15": 10.86867, "20": 10.87317, "25": 10.83338, "30": 10.75624, "35": 10.66844, "40": 10.50171, "45": 10.28002, "50": 10.25621}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 379427840.0, "5": 378379264.0, "10": 378903552.0, "15": 378379264.0, "20": 561597952.0, "25": 561073664.0, "30": 561597952.0, "35": 561597952.0, "40": 561597952.0, "45": 561597952.0, "50": 561597952.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1704025600.0, "5": 1704026112.0, "10": 1704026112.0, "15": 1704026112.0, "20": 1886196224.0, "25": 1886196224.0, "30": 1886196224.0, "35": 1886196224.0, "40": 1886196224.0, "45": 1886196224.0, "50": 1886196224.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.22765, "5": 0.19439, "10": 0.19327, "15": 0.19227, "20": 0.20227, "25": 0.20323, "30": 0.2014, "35": 0.20216, "40": 0.20166, "45": 0.20072, "50": 0.19941}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1808.0, "25": 2385.0, "30": 2591.0, "35": 1997.0, "40": 1959.0, "45": 2368.0, "50": 3073.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json index 87df9ed..744e902 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.85929, 10.89211, 10.87639, 10.86988, 10.88179, 10.83898, 10.66589, 10.62691, 10.52461, 10.25708]}, "num-zeros": {"start_step": 0, "end_step": 33, "step_interval": 5, "values": [2458.0, 2527.0, 2467.0, 2148.0, 2250.0, 2467.0, 2528.0]}, "iteration_timing_avg": 0.14292588235294112} +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 10.85929, "5": 10.87929, "10": 10.84772, "15": 10.86867, "20": 10.87317, "25": 10.83338, "30": 10.75624, "35": 10.66844, "40": 10.50171, "45": 10.28002, "50": 10.25621}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 380476416.0, "5": 380476416.0, "10": 378903552.0, "15": 380476416.0, "20": 560549376.0, "25": 560549376.0, "30": 560549376.0, "35": 560549376.0, "40": 560287232.0, "45": 560549376.0, "50": 560549376.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 1704025600.0, "5": 1704026112.0, "10": 1704026112.0, "15": 1704026112.0, "20": 1884099072.0, "25": 1884099072.0, "30": 1884099072.0, "35": 1884099072.0, "40": 1884361216.0, "45": 1884361216.0, "50": 1884361216.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 12.59076, "5": 0.20078, "10": 0.20046, "15": 0.19967, "20": 0.20892, "25": 0.20876, "30": 0.2082, "35": 0.2082, "40": 0.21131, "45": 0.21272, "50": 0.21012}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": 1808.0, "25": 2385.0, "30": 2591.0, "35": 1997.0, "40": 1959.0, "45": 2368.0, "50": 3073.0}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml index ea96682..82e3f7b 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -46,4 +46,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml index beaaa98..dd1c416 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_mr_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -19,8 +19,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -47,4 +47,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --fp16: true --apply-query-key-layer-scaling: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/golden_values_dev.json b/tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/golden_values_dev.json new file mode 100644 index 0000000..9e26dfe --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/golden_values_dev.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/golden_values_lts.json b/tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/golden_values_lts.json new file mode 100644 index 0000000..9e26dfe --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/golden_values_lts.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/model_config.yaml index fc75e1c..cec5322 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume/model_config.yaml @@ -26,7 +26,7 @@ MODEL_ARGS: --seq-length: 1024 --max-position-embeddings: 1024 --position-embedding-type: rope - --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container + --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container --rotary-percent: 0.5 --swiglu: true --untie-embeddings-and-output-weights: true @@ -34,8 +34,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --data-path: ${DATA_PATH}/my-gpt3_00_text_document --vocab-file: ${DATA_PATH}/bpe/vocab.json --merge-file: ${DATA_PATH}/bpe/merges.txt @@ -63,4 +63,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --bf16: true --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/golden_values_0.10.0.json b/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/golden_values_0.10.0.json new file mode 100644 index 0000000..30307d6 --- /dev/null +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/golden_values_0.10.0.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 1.73911, "5": 1.76223, "10": 1.71041, "15": 1.7172, "20": 1.70966, "25": 1.65753, "30": 1.70118, "35": 1.59945, "40": 1.61193, "45": 1.61034, "50": 1.57326, "55": 1.58571, "60": 1.58051, "65": 1.57034, "70": 1.55089, "75": 1.57853, "80": 1.56953, "85": 1.55978, "90": 1.57521, "95": 1.5621, "100": 1.53458, "105": 1.52625, "110": 1.54252, "115": 1.52566, "120": 1.53266, "125": 1.53908, "130": 1.57143, "135": 1.52432, "140": 1.53179, "145": 1.50745, "150": 1.53378, "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": "nan", "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "num-zeros": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 46663888.0, "5": 49735444.0, "10": 47259480.0, "15": 50284904.0, "20": 51089700.0, "25": 51115972.0, "30": 47996280.0, "35": 48620284.0, "40": 48539768.0, "45": 48952664.0, "50": 47751376.0, "55": 51826928.0, "60": 48649168.0, "65": 50755648.0, "70": 46942848.0, "75": 47935228.0, "80": 50120008.0, "85": 47913568.0, "90": 49908096.0, "95": 49084468.0, "100": 49566232.0, "105": 51326736.0, "110": 51309060.0, "115": 44750864.0, "120": 46628396.0, "125": 49108904.0, "130": 49624924.0, "135": 50022120.0, "140": 49617536.0, "145": 47190592.0, "150": 50675220.0, "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": "nan", "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 22284783616.0, "5": 22284785664.0, "10": 22284785664.0, "15": 22284785664.0, "20": 22284785664.0, "25": 22284785664.0, "30": 22284785664.0, "35": 22284785664.0, "40": 22284785664.0, "45": 22284785664.0, "50": 22284785664.0, "55": 22284785664.0, "60": 22284785664.0, "65": 22284785664.0, "70": 22284785664.0, "75": 22284785664.0, "80": 22284785664.0, "85": 22284785664.0, "90": 22284785664.0, "95": 22284785664.0, "100": 22284785664.0, "105": 22284785664.0, "110": 22284785664.0, "115": 22284785664.0, "120": 22284785664.0, "125": 22284785664.0, "130": 22284785664.0, "135": 22284785664.0, "140": 22284785664.0, "145": 22284785664.0, "150": 22284785664.0, "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": "nan", "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "iteration-time": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 303.40753, "5": 7.7569, "10": 6.95585, "15": 7.34858, "20": 7.05739, "25": 6.89858, "30": 7.08568, "35": 6.81945, "40": 6.82484, "45": 6.90277, "50": 6.89609, "55": 7.1478, "60": 6.88183, "65": 6.96186, "70": 7.51916, "75": 7.02392, "80": 7.09158, "85": 6.61364, "90": 6.98915, "95": 6.62862, "100": 6.60561, "105": 6.63553, "110": 6.64044, "115": 7.55855, "120": 6.58757, "125": 6.57183, "130": 7.06609, "135": 6.98321, "140": 6.939, "145": 6.71321, "150": 6.63196, "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": "nan", "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/golden_values_0.9.0.json b/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/golden_values_0.9.0.json index 23735ec..ce22b87 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/golden_values_0.9.0.json +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/golden_values_0.9.0.json @@ -1,203 +1 @@ -{ - "mem-allocated-bytes": { - "start_step": 0, - "end_step": 300, - "step_interval": 5, - "values": [ - 22282596352.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282596352.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0, - 22282598400.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 300, - "step_interval": 5, - "values": [ - 309.57425, - 7.41416, - 7.25837, - 6.98896, - 7.14761, - 7.186, - 6.86385, - 6.9839, - 6.74659, - 6.91703, - 6.8232, - 6.77252, - 6.76381, - 6.76271, - 6.87235, - 6.71758, - 7.26112, - 6.68114, - 6.82257, - 6.56624, - 6.79547, - 6.71246, - 6.87595, - 6.7641, - 6.78867, - 6.94615, - 7.25241, - 7.1788, - 6.76322, - 6.62512, - 310.03296, - 7.59717, - 7.25297, - 6.86048, - 7.14724, - 7.01021, - 6.78072, - 7.35111, - 6.63961, - 6.78637, - 6.65223, - 6.66674, - 6.65987, - 6.64773, - 6.91043, - 6.54743, - 7.16854, - 6.47425, - 6.72084, - 6.90341, - 6.43778, - 6.59634, - 6.79432, - 6.64271, - 6.77244, - 6.59696, - 7.38602, - 6.98229, - 6.5725, - 6.57179 - ] - }, - "throughput": { - "start_step": 0, - "end_step": 300, - "step_interval": 5, - "values": [ - 6.63203, - 276.91702, - 282.86053, - 293.76428, - 287.24368, - 285.70932, - 299.1185, - 293.97681, - 304.31775, - 296.819, - 300.90082, - 303.15247, - 303.54291, - 303.59225, - 298.74869, - 305.63171, - 282.75345, - 307.29898, - 300.92853, - 312.67621, - 302.12869, - 305.86478, - 298.59213, - 303.52991, - 302.43121, - 295.57489, - 283.09302, - 285.99564, - 303.56918, - 309.89725, - 6.62222, - 270.246, - 283.07117, - 299.26562, - 287.2587, - 292.87387, - 302.78604, - 279.2919, - 309.22092, - 302.5336, - 308.63412, - 307.96243, - 308.28, - 308.84332, - 297.10269, - 313.57434, - 286.40494, - 317.11862, - 305.48352, - 297.40475, - 318.91516, - 311.24905, - 302.17957, - 309.07645, - 303.15582, - 311.22006, - 277.97174, - 294.0448, - 312.3783, - 312.41217 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 1.73911, "5": 1.76236, "10": 1.7101, "15": 1.71685, "20": 1.70952, "25": 1.6571, "30": 1.70057, "35": 1.59908, "40": 1.61246, "45": 1.61281, "50": 1.57423, "55": 1.58641, "60": 1.58107, "65": 1.5709, "70": 1.55138, "75": 1.5791, "80": 1.57036, "85": 1.56059, "90": 1.57623, "95": 1.56288, "100": 1.53538, "105": 1.52668, "110": 1.54296, "115": 1.52608, "120": 1.53299, "125": 1.53927, "130": 1.57152, "135": 1.52436, "140": 1.53198, "145": 1.50743, "150": 1.53376, "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": "nan", "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "num-zeros": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 46664812.0, "5": 49749100.0, "10": 47333128.0, "15": 50185804.0, "20": 51079300.0, "25": 51140832.0, "30": 47975536.0, "35": 48597620.0, "40": 48522604.0, "45": 48955876.0, "50": 47593240.0, "55": 51796536.0, "60": 48655760.0, "65": 50600604.0, "70": 46944320.0, "75": 47766776.0, "80": 50069400.0, "85": 48080532.0, "90": 49895944.0, "95": 49080084.0, "100": 49553636.0, "105": 51209220.0, "110": 51308940.0, "115": 44780688.0, "120": 46617540.0, "125": 49088720.0, "130": 49772656.0, "135": 50026940.0, "140": 49623616.0, "145": 47166168.0, "150": 50672284.0, "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": "nan", "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 22282596352.0, "5": 22282598400.0, "10": 22282598400.0, "15": 22282598400.0, "20": 22282598400.0, "25": 22282598400.0, "30": 22282598400.0, "35": 22282598400.0, "40": 22282598400.0, "45": 22282598400.0, "50": 22282598400.0, "55": 22282598400.0, "60": 22282598400.0, "65": 22282598400.0, "70": 22282598400.0, "75": 22282598400.0, "80": 22282598400.0, "85": 22282598400.0, "90": 22282598400.0, "95": 22282598400.0, "100": 22282598400.0, "105": 22282598400.0, "110": 22282598400.0, "115": 22282598400.0, "120": 22282598400.0, "125": 22282598400.0, "130": 22282598400.0, "135": 22282598400.0, "140": 22282598400.0, "145": 22282598400.0, "150": 22282598400.0, "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": "nan", "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "iteration-time": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 309.57425, "5": 7.51219, "10": 7.07752, "15": 6.99837, "20": 7.26199, "25": 6.94916, "30": 7.20243, "35": 7.63424, "40": 6.7547, "45": 6.86129, "50": 7.09675, "55": 6.9145, "60": 7.02844, "65": 6.99827, "70": 6.94228, "75": 6.84159, "80": 6.80518, "85": 6.89844, "90": 6.74273, "95": 6.6918, "100": 6.98212, "105": 6.82696, "110": 6.93624, "115": 6.92443, "120": 6.62233, "125": 6.93696, "130": 6.7543, "135": 7.0154, "140": 6.95064, "145": 6.51639, "150": 6.83764, "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": "nan", "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.10.0.json b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.10.0.json new file mode 100644 index 0000000..f636644 --- /dev/null +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.10.0.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 12.66411, "5": 12.64021, "10": 11.72967, "15": 10.70862, "20": 10.24203, "25": 9.93824, "30": 9.71076, "35": 9.45727, "40": 9.26048, "45": 9.07344, "50": 8.89423, "55": 8.6923, "60": 8.58237, "65": 8.48642, "70": 8.32183, "75": 8.205, "80": 8.07005, "85": 7.94543, "90": 7.76842, "95": 7.67199, "100": 7.55879, "105": 7.40687, "110": 7.31682, "115": 7.22402, "120": 7.11335, "125": 7.03668, "130": 6.95005, "135": 6.83787, "140": 6.79853, "145": 6.69175, "150": 6.63124, "155": 6.58796, "160": 6.51691, "165": 6.4272, "170": 6.35668, "175": 6.29808, "180": 6.2271, "185": 6.17956, "190": 6.06939, "195": 6.0735, "200": 5.97899, "205": 5.91114, "210": 5.89436, "215": 5.80963, "220": 5.75594, "225": 5.69727, "230": 5.63917, "235": 5.62452, "240": 5.50151, "245": 5.52279, "250": 5.45464, "255": 5.40305, "260": 5.36864, "265": 5.32579, "270": 5.29051, "275": 5.26992, "280": 5.22288, "285": 5.1915, "290": 5.15098, "295": 5.09602, "300": 5.06251, "305": 5.0513, "310": 5.00088, "315": 4.9487, "320": 4.90873, "325": 4.91854, "330": 4.87973, "335": 4.8585, "340": 4.82067, "345": 4.80275, "350": 4.75113, "355": 4.73333, "360": 4.65133, "365": 4.65726, "370": 4.6339, "375": 4.60424, "380": 4.56862, "385": 4.56123, "390": 4.50994, "395": 4.48958, "400": 4.45487, "405": 4.38689, "410": 4.39428, "415": 4.38106, "420": 4.33072, "425": 4.31182, "430": 4.27673, "435": 4.26004, "440": 4.21204, "445": 4.18492, "450": 4.17411, "455": 4.1255, "460": 4.08403, "465": 4.08559, "470": 4.01426, "475": 4.02584, "480": 3.99202, "485": 3.97648, "490": 3.96804, "495": 3.9449, "500": 3.88388, "505": 3.88432, "510": 3.87354, "515": 3.81908, "520": 3.84696, "525": 3.82369, "530": 3.76936, "535": 3.81696, "540": 3.75341, "545": 3.75902, "550": 3.73525, "555": 3.73336, "560": 3.66985, "565": 3.68799, "570": 3.67631, "575": 3.64012, "580": 3.64492, "585": 3.63874, "590": 3.61762, "595": 3.58955, "600": 3.58478, "605": 3.5419, "610": 3.55013, "615": 3.5433, "620": 3.52591, "625": 3.49236, "630": 3.46517, "635": 3.45645, "640": 3.47463, "645": 3.46271, "650": 3.51295, "655": 3.45879, "660": 3.42714, "665": 3.44917, "670": 3.48394, "675": 3.41579, "680": 3.44163, "685": 3.41953, "690": 3.42692, "695": 3.43042, "700": 3.41435, "705": 3.3966, "710": 3.38641, "715": 3.4005, "720": 3.39129, "725": 3.34858, "730": 3.32811, "735": 3.31154, "740": 3.31138, "745": 3.29193, "750": 3.31111, "755": 3.2913, "760": 3.28316, "765": 3.28005, "770": 3.26522, "775": 3.29524, "780": 3.28471, "785": 3.29572, "790": 3.25797, "795": 3.27045, "800": 3.30289, "805": 3.26267, "810": 3.25152, "815": 3.25028, "820": 3.25738, "825": 3.21794, "830": 3.20773, "835": 3.21657, "840": 3.16237, "845": 3.14996, "850": 3.20721, "855": 3.19036, "860": 3.18175, "865": 3.16828, "870": 3.1756, "875": 3.19605, "880": 3.1822, "885": 3.1925, "890": 3.17776, "895": 3.18019, "900": 3.16288, "905": 3.17557, "910": 3.15958, "915": 3.15544, "920": 3.16174, "925": 3.13915, "930": 3.11805, "935": 3.1196, "940": 3.14142, "945": 3.12627, "950": 3.15225, "955": 3.1787, "960": 3.14345, "965": 3.12266, "970": 3.12309, "975": 3.14058, "980": 3.11212, "985": 3.07994, "990": 3.04522, "995": 3.08938, "1000": 3.0612, "1005": 3.06441, "1010": 3.05546, "1015": 3.04356, "1020": 3.03641, "1025": 3.06042, "1030": 3.11504, "1035": 3.10692, "1040": 3.10535, "1045": 3.08109, "1050": 3.04324, "1055": 3.02036, "1060": 3.03252, "1065": 3.00259, "1070": 2.9925, "1075": 3.0824, "1080": 3.10113, "1085": 3.47463, "1090": 3.22499, "1095": 3.14723, "1100": 3.20275, "1105": 3.20017, "1110": 3.163, "1115": 3.1615, "1120": 3.14395, "1125": 3.06302, "1130": 3.02545, "1135": 3.03165, "1140": 3.023, "1145": 3.01737, "1150": 3.02397, "1155": 3.00982, "1160": 2.96166, "1165": 2.97888, "1170": 2.99597, "1175": 2.97064, "1180": 2.96169, "1185": 2.95062, "1190": 2.96937, "1195": 2.93003, "1200": 2.97725, "1205": 3.09008, "1210": 3.05456, "1215": 3.0597, "1220": 3.00305, "1225": 2.96613, "1230": 2.92125, "1235": 2.95312, "1240": 2.90178, "1245": 2.92123, "1250": 2.92953, "1255": 2.95259, "1260": 2.93912, "1265": 2.94744, "1270": 2.95724, "1275": 2.89654, "1280": 2.92352, "1285": 2.90764, "1290": 2.9243, "1295": 2.90377, "1300": 2.91127, "1305": 2.93182, "1310": 2.90085, "1315": 2.88315, "1320": 2.89093, "1325": 2.91221, "1330": 2.85479, "1335": 2.91221, "1340": 2.87509, "1345": 2.90753, "1350": 2.86178, "1355": 2.9035, "1360": 2.91186, "1365": 2.87805, "1370": 2.89725, "1375": 2.85552, "1380": 2.87105, "1385": 2.8838, "1390": 2.85868, "1395": 2.88183, "1400": 2.84709, "1405": 2.874, "1410": 2.86354, "1415": 2.88473, "1420": 3.21894, "1425": 3.20749, "1430": 3.12994, "1435": 2.97167, "1440": 2.95771, "1445": 2.97494, "1450": 2.99965, "1455": 3.00674, "1460": 3.02534, "1465": 3.03449, "1470": 3.14129, "1475": 2.99818, "1480": 2.98014, "1485": 2.95448, "1490": 2.89205, "1495": 2.88231, "1500": 2.87446, "1505": 2.87563, "1510": 2.84003, "1515": 2.86386, "1520": 2.83074, "1525": 2.80624, "1530": 2.81905, "1535": 2.84727, "1540": 2.84126, "1545": 2.86441, "1550": 2.85033, "1555": 2.9076, "1560": 2.88048, "1565": 2.83225, "1570": 2.8249, "1575": 2.81691, "1580": 2.81059, "1585": 2.82449, "1590": 2.8109, "1595": 2.83497, "1600": 2.82, "1605": 2.80914, "1610": 2.77793, "1615": 2.80428, "1620": 2.80545, "1625": 2.78198, "1630": 2.80942, "1635": 2.82658, "1640": 2.8153, "1645": 2.81373, "1650": 2.81769, "1655": 2.82217, "1660": 2.8157, "1665": 2.78062, "1670": 2.77122, "1675": 2.80339, "1680": 2.77575, "1685": 2.77471, "1690": 2.76522, "1695": 2.75819, "1700": 2.78858, "1705": 2.75372, "1710": 2.77768, "1715": 2.78854, "1720": 2.78723, "1725": 2.77635, "1730": 2.79633, "1735": 2.78281, "1740": 2.76449, "1745": 2.75742, "1750": 2.77351, "1755": 2.74352, "1760": 2.7994, "1765": 2.78856, "1770": 2.97788, "1775": 2.96775, "1780": 2.9797, "1785": 2.92033, "1790": 2.88284, "1795": 2.82568, "1800": 2.77547, "1805": 2.74333, "1810": 2.78676, "1815": 2.75899, "1820": 2.74069, "1825": 2.73606, "1830": 2.73044, "1835": 2.73207, "1840": 2.75608, "1845": 2.75227, "1850": 2.7441, "1855": 2.74282, "1860": 2.73862, "1865": 2.71656, "1870": 2.74365, "1875": 2.72768, "1880": 2.73369, "1885": 2.75114, "1890": 2.71775, "1895": 2.70634, "1900": 2.75082, "1905": 2.70035, "1910": 2.71518, "1915": 2.71953, "1920": 2.73172, "1925": 2.72762, "1930": 2.73479, "1935": 2.748, "1940": 2.70829, "1945": 2.71292, "1950": 2.71668, "1955": 2.69803, "1960": 2.67747, "1965": 2.71387, "1970": 2.72776, "1975": 2.75851, "1980": 2.80969, "1985": 2.74548, "1990": 2.7075, "1995": 2.72523, "2000": 2.70484, "2005": 2.72309, "2010": 2.68085, "2015": 2.70398, "2020": 2.68473, "2025": 2.69465, "2030": 2.70438, "2035": 2.68921, "2040": 2.70339, "2045": 2.6765, "2050": 2.70228, "2055": 2.68072, "2060": 2.67404, "2065": 2.69153, "2070": 2.67792, "2075": 2.67626, "2080": 2.68663, "2085": 2.67923, "2090": 2.66735, "2095": 2.67558, "2100": 2.66598, "2105": 2.68188, "2110": 2.67422, "2115": 2.68235, "2120": 2.65224, "2125": 2.66326, "2130": 2.66402, "2135": 2.69025, "2140": 2.66544, "2145": 2.67202, "2150": 2.6875, "2155": 2.6518, "2160": 2.67156, "2165": 2.66078, "2170": 2.67321, "2175": 2.66725, "2180": 2.66972, "2185": 2.64345, "2190": 2.63094, "2195": 2.66603, "2200": 2.64951, "2205": 2.66514, "2210": 2.64515, "2215": 2.65066, "2220": 2.67595, "2225": 2.61606, "2230": 2.63122, "2235": 2.64157, "2240": 2.67315, "2245": 3.14695, "2250": 2.79358, "2255": 2.725, "2260": 2.78767, "2265": 2.80376, "2270": 2.83432, "2275": 2.74668, "2280": 2.84211, "2285": 2.7226, "2290": 2.69875, "2295": 2.65277, "2300": 2.65868, "2305": 2.66184, "2310": 2.65568, "2315": 2.63606, "2320": 2.65066, "2325": 2.63113, "2330": 2.61396, "2335": 2.63418, "2340": 2.6643, "2345": 2.63884, "2350": 2.64845, "2355": 2.63251, "2360": 2.6527, "2365": 2.63709, "2370": 2.60195, "2375": 2.60332, "2380": 2.63315, "2385": 2.60418, "2390": 2.62045, "2395": 2.65441, "2400": 2.61152, "2405": 2.61896, "2410": 2.61218, "2415": 2.65643, "2420": 2.64165, "2425": 2.65181, "2430": 2.60436, "2435": 2.61374, "2440": 2.62972, "2445": 2.61125, "2450": 2.5982, "2455": 2.71885, "2460": 2.87174, "2465": 2.75049, "2470": 2.67614, "2475": 2.66549, "2480": 2.64287, "2485": 2.62898, "2490": 2.62084, "2495": 2.62637, "2500": 2.59354, "2505": 2.6004, "2510": 2.57697, "2515": 2.63831, "2520": 2.6203, "2525": 2.59631, "2530": 2.59964, "2535": 2.6047, "2540": 2.59062, "2545": 2.6102, "2550": 2.61281, "2555": 2.62133, "2560": 2.61901, "2565": 2.59515, "2570": 2.61455, "2575": 2.61538, "2580": 2.60782, "2585": 2.54964, "2590": 2.6181, "2595": 2.64551, "2600": 2.61777, "2605": 2.60063, "2610": 2.59565, "2615": 2.61901, "2620": 2.58506, "2625": 2.60609, "2630": 2.58903, "2635": 2.57047, "2640": 2.58481, "2645": 2.58706, "2650": 2.5725, "2655": 2.61836, "2660": 2.595, "2665": 2.60243, "2670": 2.63927, "2675": 2.64828, "2680": 2.57844, "2685": 2.59008, "2690": 2.5665, "2695": 2.58091, "2700": 2.61132, "2705": 2.56529, "2710": 2.58101, "2715": 2.55867, "2720": 2.53892, "2725": 2.56049, "2730": 2.58031, "2735": 2.59842, "2740": 2.55506, "2745": 2.57609, "2750": 2.55828, "2755": 2.5961, "2760": 2.56946, "2765": 2.54763, "2770": 2.61463, "2775": 2.59091, "2780": 2.58216, "2785": 2.5715, "2790": 2.56667, "2795": 2.57319, "2800": 2.56496, "2805": 2.57907, "2810": 2.57731, "2815": 2.60782, "2820": 2.68315, "2825": 2.60117, "2830": 2.59736, "2835": 2.58159, "2840": 2.55498, "2845": 2.54782, "2850": 2.55288, "2855": 2.52876, "2860": 2.57079, "2865": 2.55753, "2870": 2.53176, "2875": 2.5448, "2880": 2.56709, "2885": 2.56212, "2890": 2.56304, "2895": 2.56235, "2900": 2.69819, "2905": 2.5963, "2910": 2.54668, "2915": 2.55229, "2920": 2.55897, "2925": 2.54826, "2930": 2.53099, "2935": 2.54305, "2940": 2.55606, "2945": 2.54458, "2950": 2.52643, "2955": 2.56906, "2960": 2.53007, "2965": 2.53171, "2970": 2.54401, "2975": 2.54509, "2980": 2.56309, "2985": 2.55632, "2990": 2.57635, "2995": 2.59533, "3000": 2.59336, "3005": 2.54449, "3010": 2.56623, "3015": 2.547, "3020": 2.5338, "3025": 2.54991, "3030": 2.5536, "3035": 2.52995, "3040": 2.56396, "3045": 2.55273, "3050": 2.51547, "3055": 2.53602, "3060": 2.54631, "3065": 2.54816, "3070": 2.55476, "3075": 2.52907, "3080": 2.52066, "3085": 2.5139, "3090": 2.54193, "3095": 2.54409, "3100": 2.52686, "3105": 2.50155, "3110": 2.5319, "3115": 2.48543, "3120": 2.50811, "3125": 2.52726, "3130": 2.51306, "3135": 2.49108, "3140": 2.5067, "3145": 2.51711, "3150": 2.55145, "3155": 2.52169, "3160": 2.52727, "3165": 2.52805, "3170": 2.5585, "3175": 2.59588, "3180": 2.52487, "3185": 2.55146, "3190": 2.53618, "3195": 2.52578, "3200": 2.50818, "3205": 2.51214, "3210": 2.52649, "3215": 2.55967, "3220": 2.53059, "3225": 2.46238, "3230": 2.5196, "3235": 2.5318, "3240": 2.50875, "3245": 2.5008, "3250": 2.49186, "3255": 2.50601, "3260": 2.51161, "3265": 2.51011, "3270": 2.52991, "3275": 2.50769, "3280": 2.47649, "3285": 2.48515, "3290": 2.53647, "3295": 2.51833, "3300": 2.48406, "3305": 2.47938, "3310": 2.49381, "3315": 2.50401, "3320": 2.4881, "3325": 2.51379, "3330": 2.513, "3335": 2.72115, "3340": 2.55277, "3345": 2.52194, "3350": 2.50603, "3355": 2.50448, "3360": 2.51374, "3365": 2.51045, "3370": 2.49054, "3375": 2.50187, "3380": 2.49071, "3385": 2.47194, "3390": 2.49247, "3395": 2.50053, "3400": 2.49519, "3405": 2.48221, "3410": 2.47137, "3415": 2.47378, "3420": 2.49411, "3425": 2.50396, "3430": 2.48795, "3435": 2.51066, "3440": 2.49156, "3445": 2.49395, "3450": 2.47155, "3455": 2.49527, "3460": 2.48385, "3465": 2.50045, "3470": 2.47211, "3475": 2.4949, "3480": 2.46512, "3485": 2.4812, "3490": 2.4873, "3495": 2.53603, "3500": 2.48442, "3505": 2.51094, "3510": 2.50189, "3515": 2.49758, "3520": 2.46594, "3525": 2.50325, "3530": 2.50079, "3535": 2.4764, "3540": 2.50567, "3545": 2.4948, "3550": 2.47861, "3555": 2.47003, "3560": 2.48285, "3565": 2.47105, "3570": 2.46819, "3575": 2.46885, "3580": 2.44055, "3585": 2.47978, "3590": 2.47179, "3595": 2.48159, "3600": 2.4634, "3605": 2.50042, "3610": 2.49364, "3615": 2.47395, "3620": 2.46537, "3625": 2.46647, "3630": 2.47605, "3635": 2.46999, "3640": 2.45973, "3645": 2.48725, "3650": 2.45339, "3655": 2.47476, "3660": 2.47275, "3665": 2.46641, "3670": 2.47832, "3675": 2.47939, "3680": 2.46951, "3685": 3.58644, "3690": 2.56452, "3695": 2.5182, "3700": 2.50439, "3705": 2.48699, "3710": 2.46726, "3715": 2.46483, "3720": 2.46012, "3725": 2.46964, "3730": 2.45968, "3735": 2.4654, "3740": 2.45369, "3745": 2.46263, "3750": 2.44784, "3755": 2.42568, "3760": 2.45802, "3765": 2.44357, "3770": 2.46785, "3775": 2.46502, "3780": 2.42173, "3785": 2.48838, "3790": 2.45781, "3795": 2.51946, "3800": 2.47175, "3805": 2.46973, "3810": 2.44991, "3815": 2.46168, "3820": 2.46394, "3825": 2.45933, "3830": 2.4567, "3835": 2.47215, "3840": 2.76979, "3845": 2.49244, "3850": 2.51281, "3855": 2.52109, "3860": 2.47887, "3865": 2.45776, "3870": 2.43292, "3875": 2.43899, "3880": 2.44681, "3885": 2.46273, "3890": 2.43723, "3895": 2.43812, "3900": 2.43922, "3905": 2.44362, "3910": 2.41924, "3915": 2.45732, "3920": 2.4483, "3925": 2.38156, "3930": 2.43115, "3935": 2.4158, "3940": 2.44047, "3945": 2.45295, "3950": 2.46974, "3955": 2.42834, "3960": 2.4427, "3965": 2.482, "3970": 2.45689, "3975": 2.44363, "3980": 2.43164, "3985": 2.45282, "3990": 2.451, "3995": 2.42974, "4000": 2.45725, "4005": 2.40409, "4010": 2.42514, "4015": 2.44895, "4020": 2.4205, "4025": 2.44528, "4030": 2.42316, "4035": 2.42798, "4040": 2.42342, "4045": 2.43652, "4050": 2.3969, "4055": 2.41693, "4060": 2.42636, "4065": 2.41737, "4070": 2.40525, "4075": 2.4121, "4080": 2.43041, "4085": 2.57504, "4090": 2.52349, "4095": 2.48515, "4100": 2.44095, "4105": 2.42848, "4110": 2.41239, "4115": 2.4215, "4120": 2.41273, "4125": 2.42761, "4130": 2.41368, "4135": 2.42279, "4140": 2.38646, "4145": 2.41937, "4150": 2.39229, "4155": 2.40007, "4160": 2.40395, "4165": 2.37566, "4170": 2.40075, "4175": 2.40694, "4180": 2.40715, "4185": 2.40281, "4190": 2.41371, "4195": 2.38572, "4200": 2.4098, "4205": 2.43009, "4210": 2.4063, "4215": 2.40674, "4220": 2.42712, "4225": 2.4209, "4230": 2.40424, "4235": 2.42643, "4240": 2.39807, "4245": 2.54614, "4250": 2.43425, "4255": 2.43454, "4260": 2.4068, "4265": 2.41175, "4270": 2.40657, "4275": 2.37656, "4280": 2.42127, "4285": 2.38571, "4290": 2.39144, "4295": 2.39523, "4300": 2.38411, "4305": 2.40365, "4310": 2.38804, "4315": 2.40618, "4320": 2.37993, "4325": 2.36999, "4330": 2.40761, "4335": 2.39334, "4340": 2.39545, "4345": 2.41334, "4350": 2.39944, "4355": 2.41502, "4360": 2.39102, "4365": 2.39655, "4370": 2.39007, "4375": 2.39949, "4380": 2.38305, "4385": 2.40815, "4390": 2.37583, "4395": 2.37357, "4400": 2.38697, "4405": 2.38613, "4410": 2.36547, "4415": 2.38515, "4420": 2.34876, "4425": 2.40378, "4430": 2.42279, "4435": 2.37756, "4440": 2.38948, "4445": 2.37636, "4450": 2.38053, "4455": 2.37174, "4460": 2.38089, "4465": 2.40765, "4470": 2.43183, "4475": 2.39599, "4480": 2.37041, "4485": 2.39663, "4490": 2.37745, "4495": 2.36881, "4500": 2.36465, "4505": 2.4035, "4510": 2.38033, "4515": 2.39478, "4520": 2.40109, "4525": 2.40167, "4530": 2.407, "4535": 2.38201, "4540": 2.38076, "4545": 2.35981, "4550": 2.36865, "4555": 2.35622, "4560": 2.37539, "4565": 2.37123, "4570": 2.38376, "4575": 2.36202, "4580": 2.37877, "4585": 2.36329, "4590": 2.44118, "4595": 2.39269, "4600": 2.38925, "4605": 2.37953, "4610": 2.34314, "4615": 2.32357, "4620": 2.36018, "4625": 2.33664, "4630": 2.36782, "4635": 2.35302, "4640": 2.37309, "4645": 2.36524, "4650": 2.33813, "4655": 2.36169, "4660": 2.33649, "4665": 2.35225, "4670": 2.36191, "4675": 2.37744, "4680": 2.32789, "4685": 2.37243, "4690": 2.35114, "4695": 2.35417, "4700": 2.34543, "4705": 2.36619, "4710": 2.36316, "4715": 2.34158, "4720": 2.36033, "4725": 2.36852, "4730": 2.36755, "4735": 2.36414, "4740": 2.35501, "4745": 2.37131, "4750": 2.36773, "4755": 2.36931, "4760": 2.34729, "4765": 2.34324, "4770": 2.33487, "4775": 2.35954, "4780": 2.3573, "4785": 2.33024, "4790": 2.35553, "4795": 2.33463, "4800": 2.34448, "4805": 2.3531, "4810": 2.34434, "4815": 2.3342, "4820": 2.33629, "4825": 2.36673, "4830": 2.35695, "4835": 2.33807, "4840": 2.3324, "4845": 2.37687, "4850": 2.47113, "4855": 2.37098, "4860": 2.38564, "4865": 2.36155, "4870": 2.33063, "4875": 2.33, "4880": 2.34205, "4885": 2.34319, "4890": 2.34429, "4895": 2.30188, "4900": 2.35099, "4905": 2.31593, "4910": 2.32956, "4915": 2.33669, "4920": 2.34528, "4925": 2.34086, "4930": 2.31569, "4935": 2.32984, "4940": 2.34722, "4945": 2.31341, "4950": 2.3511, "4955": 2.32904, "4960": 2.34205, "4965": 2.34863, "4970": 2.31851, "4975": 2.32708, "4980": 2.32611, "4985": 2.34807, "4990": 2.35614, "4995": 2.29958, "5000": 2.3494, "5005": 2.36665, "5010": 2.33106, "5015": 2.31524, "5020": 2.35247, "5025": 2.32066, "5030": 2.34243, "5035": 2.33532, "5040": 2.33429, "5045": 2.31664, "5050": 2.33178, "5055": 2.33583, "5060": 2.32298, "5065": 2.34478, "5070": 2.34675, "5075": 2.32746, "5080": 2.32638, "5085": 2.31743, "5090": 2.32701, "5095": 2.31162, "5100": 2.31099, "5105": 2.32506, "5110": 2.33076, "5115": 2.31325, "5120": 2.31236, "5125": 2.29617, "5130": 2.32194, "5135": 2.32871, "5140": 2.32755, "5145": 2.33558, "5150": 2.33267, "5155": 2.32578, "5160": 2.33833, "5165": 2.31925, "5170": 2.32974, "5175": 2.33886, "5180": 2.32643, "5185": 2.29954, "5190": 2.28983, "5195": 2.33698, "5200": 2.31707, "5205": 2.29698, "5210": 2.33552, "5215": 2.29725, "5220": 2.31687, "5225": 2.28484, "5230": 2.31243, "5235": 2.30313, "5240": 2.32314, "5245": 2.30932, "5250": 2.27832, "5255": 2.3198, "5260": 2.29169, "5265": 2.30736, "5270": 2.31134, "5275": 2.33656, "5280": 2.33912, "5285": 2.33056, "5290": 2.32258, "5295": 2.30996, "5300": 2.33769, "5305": 2.28726, "5310": 2.31957, "5315": 2.29467, "5320": 2.2825, "5325": 2.32647, "5330": 2.30378, "5335": 2.31731, "5340": 2.32632, "5345": 2.31554, "5350": 2.29011, "5355": 2.31683, "5360": 2.34246, "5365": 2.32818, "5370": 2.32056, "5375": 2.33798, "5380": 2.30657, "5385": 2.30188, "5390": 2.28558, "5395": 2.26912, "5400": 2.30808, "5405": 2.29659, "5410": 2.30352, "5415": 2.32669, "5420": 2.32762, "5425": 2.31661, "5430": 2.30654, "5435": 2.32796, "5440": 2.28788, "5445": 2.27831, "5450": 2.30365, "5455": 2.3249, "5460": 2.28506, "5465": 2.30611, "5470": 2.30907, "5475": 2.31132, "5480": 2.31571, "5485": 2.29783, "5490": 2.30086, "5495": 2.30151, "5500": 2.28543, "5505": 2.30447, "5510": 2.28614, "5515": 2.26858, "5520": 2.27301, "5525": 2.28397, "5530": 2.29245, "5535": 2.26799, "5540": 2.27698, "5545": 2.30652, "5550": 2.30465, "5555": 2.31809, "5560": 2.28064, "5565": 2.31569, "5570": 2.26793, "5575": 2.29187, "5580": 2.29222, "5585": 2.29497, "5590": 2.28572, "5595": 2.29328, "5600": 2.29479, "5605": 2.30471, "5610": 2.27892, "5615": 2.27061, "5620": 2.28851, "5625": 2.26132, "5630": 2.28181, "5635": 2.27653, "5640": 2.29394, "5645": 2.30866, "5650": 2.29025, "5655": 2.27119, "5660": 2.27782, "5665": 2.28121, "5670": 2.30182, "5675": 2.28351, "5680": 2.27939, "5685": 2.28232, "5690": 2.27636, "5695": 2.30134, "5700": 2.27407, "5705": 2.26147, "5710": 2.30535, "5715": 2.26282, "5720": 2.24519, "5725": 2.2995, "5730": 2.30247, "5735": 2.26412, "5740": 2.27711, "5745": 2.2933, "5750": 2.31138, "5755": 2.29438, "5760": 2.27389, "5765": 2.29325, "5770": 2.25362, "5775": 2.32216, "5780": 2.31367, "5785": 2.27806, "5790": 2.28351, "5795": 2.24554, "5800": 2.25843, "5805": 2.27789, "5810": 2.26358, "5815": 2.27575, "5820": 2.258, "5825": 2.27156, "5830": 2.29126, "5835": 2.29389, "5840": 2.29862, "5845": 2.26452, "5850": 2.29126, "5855": 2.29225, "5860": 2.26799, "5865": 2.28263, "5870": 2.26838, "5875": 2.28031, "5880": 2.26178, "5885": 2.29941, "5890": 2.25298, "5895": 2.26631, "5900": 2.29345, "5905": 2.26439, "5910": 2.30736, "5915": 2.29454, "5920": 2.27312, "5925": 2.28435, "5930": 2.28442, "5935": 2.2656, "5940": 2.26144, "5945": 2.27925, "5950": 2.26351, "5955": 2.26326, "5960": 2.27128, "5965": 2.29771, "5970": 2.27274, "5975": 2.28768, "5980": 2.27417, "5985": 2.27545, "5990": 2.2548, "5995": 2.27391, "6000": 2.2437, "6005": 2.2681, "6010": 2.27697, "6015": 2.3258, "6020": 2.6241, "6025": 2.31035, "6030": 2.27631, "6035": 2.28272, "6040": 2.30454, "6045": 2.26487, "6050": 2.27772, "6055": 2.26719, "6060": 2.28813, "6065": 2.28218, "6070": 2.25104, "6075": 2.22959, "6080": 2.2617, "6085": 2.26559, "6090": 2.30175, "6095": 2.21999, "6100": 2.25961, "6105": 2.41709, "6110": 2.38986, "6115": 2.28848, "6120": 2.30748, "6125": 2.28573, "6130": 2.25318, "6135": 2.28614, "6140": 2.24999, "6145": 2.25546, "6150": 2.24328, "6155": 2.25051, "6160": 2.26508, "6165": 2.27562, "6170": 2.21233, "6175": 2.25786, "6180": 2.23255, "6185": 2.25666, "6190": 2.25785, "6195": 2.27255, "6200": 2.24737, "6205": 2.24066, "6210": 2.28158, "6215": 2.2804, "6220": 2.26483, "6225": 2.25444, "6230": 2.21837, "6235": 2.26093, "6240": 2.26943, "6245": 2.27659, "6250": 2.25916, "6255": 2.24063, "6260": 2.25094, "6265": 2.2483, "6270": 2.25476, "6275": 2.26516, "6280": 2.25288, "6285": 2.26645, "6290": 2.25069, "6295": 2.25705, "6300": 2.26454, "6305": 2.22608, "6310": 2.26753, "6315": 2.25491, "6320": 2.22957, "6325": 2.26222, "6330": 2.24343, "6335": 2.25696, "6340": 2.26491, "6345": 2.24856, "6350": 2.25357, "6355": 2.24734, "6360": 2.26832, "6365": 2.2602, "6370": 2.25286, "6375": 2.27375, "6380": 2.23451, "6385": 2.23538, "6390": 2.24256, "6395": 2.23339, "6400": 2.21727, "6405": 2.25234, "6410": 2.24885, "6415": 2.2421, "6420": 2.23297, "6425": 2.24534, "6430": 2.24517, "6435": 2.30054, "6440": 2.24383, "6445": 2.25215, "6450": 2.27084, "6455": 2.23646, "6460": 2.26431, "6465": 2.24563, "6470": 2.25761, "6475": 2.23718, "6480": 2.22584, "6485": 2.25267, "6490": 2.25276, "6495": 2.23876, "6500": 2.24155, "6505": 2.2428, "6510": 2.2226, "6515": 2.24988, "6520": 2.23769, "6525": 2.23349, "6530": 2.25048, "6535": 2.263, "6540": 2.27108, "6545": 2.24126, "6550": 2.22541, "6555": 2.2414, "6560": 2.27005, "6565": 2.23249, "6570": 2.22252, "6575": 2.26304, "6580": 2.24263, "6585": 2.22999, "6590": 2.23113, "6595": 2.241, "6600": 2.25603, "6605": 2.24189, "6610": 2.2314, "6615": 2.22519, "6620": 2.25329, "6625": 2.24382, "6630": 2.22881, "6635": 2.23702, "6640": 2.25661, "6645": 2.23341, "6650": 2.23641, "6655": 2.22865, "6660": 2.24057, "6665": 2.2262, "6670": 2.23958, "6675": 2.22464, "6680": 2.24176, "6685": 2.24025, "6690": 2.23726, "6695": 2.24546, "6700": 2.23616, "6705": 2.21178, "6710": 2.24777, "6715": 2.2501, "6720": 2.24699, "6725": 2.23167, "6730": 2.23238, "6735": 2.20693, "6740": 2.23238, "6745": 2.23711, "6750": 2.23429, "6755": 2.20999, "6760": 2.22664, "6765": 2.2558, "6770": 2.23676, "6775": 2.21913, "6780": 2.2062, "6785": 2.2176, "6790": 2.23369, "6795": 2.24648, "6800": 2.25043, "6805": 2.23602, "6810": 2.25955, "6815": 2.22271, "6820": 2.22363, "6825": 2.22967, "6830": 2.19783, "6835": 2.25692, "6840": 2.22582, "6845": 2.22369, "6850": 2.24309, "6855": 2.21603, "6860": 2.22874, "6865": 2.25268, "6870": 2.23762, "6875": 2.24843, "6880": 2.21873, "6885": 2.23397, "6890": 2.24175, "6895": 2.21393, "6900": 2.21878, "6905": 2.21074, "6910": 2.22165, "6915": 2.20171, "6920": 2.25659, "6925": 2.20928, "6930": 2.20465, "6935": 2.23847, "6940": 2.20659, "6945": 2.22571, "6950": 2.23876, "6955": 2.1999, "6960": 2.22662, "6965": 2.21606, "6970": 2.22699, "6975": 2.19538, "6980": 2.19785, "6985": 2.21336, "6990": 2.21652, "6995": 2.23961, "7000": 2.2277, "7005": 2.21715, "7010": 2.20022, "7015": 2.20114, "7020": 2.24693, "7025": 2.20812, "7030": 2.21688, "7035": 2.20463, "7040": 2.21337, "7045": 2.2154, "7050": 2.22007, "7055": 2.22028, "7060": 2.21128, "7065": 2.19833, "7070": 2.20785, "7075": 2.19523, "7080": 2.22051, "7085": 2.2278, "7090": 2.22281, "7095": 2.21874, "7100": 2.23263, "7105": 2.22335, "7110": 2.22914, "7115": 2.20665, "7120": 2.21868, "7125": 2.20124, "7130": 2.22044, "7135": 2.17174, "7140": 2.19495, "7145": 2.21391, "7150": 2.23669, "7155": 2.2081, "7160": 2.18878, "7165": 2.21141, "7170": 2.19383, "7175": 2.19285, "7180": 2.24094, "7185": 2.18673, "7190": 2.22771, "7195": 2.21393, "7200": 2.2152, "7205": 2.20711, "7210": 2.20619, "7215": 2.19006, "7220": 2.21369, "7225": 2.18911, "7230": 2.21646, "7235": 2.17825, "7240": 2.2319, "7245": 2.2249, "7250": 2.23961, "7255": 2.19694, "7260": 2.17526, "7265": 2.21935, "7270": 2.18611, "7275": 2.20338, "7280": 2.19467, "7285": 2.19589, "7290": 2.19984, "7295": 2.1933, "7300": 2.20736, "7305": 2.20806, "7310": 2.20164, "7315": 2.19528, "7320": 2.2021, "7325": 2.19249, "7330": 2.19996, "7335": 2.21837, "7340": 2.19265, "7345": 2.20523, "7350": 2.20332, "7355": 2.18962, "7360": 2.1919, "7365": 2.19509, "7370": 2.19751, "7375": 2.18448, "7380": 2.21498, "7385": 2.19651, "7390": 2.22719, "7395": 2.20495, "7400": 2.20601, "7405": 2.19929, "7410": 2.1574, "7415": 2.19655, "7420": 2.20461, "7425": 2.1973, "7430": 2.20618, "7435": 2.22215, "7440": 2.22347, "7445": 2.18619, "7450": 2.18619, "7455": 2.19119, "7460": 2.17067, "7465": 2.1892, "7470": 2.17173, "7475": 2.17336, "7480": 2.19569, "7485": 2.19057, "7490": 2.19506, "7495": 2.20741, "7500": 2.20794, "7505": 2.2102, "7510": 2.16186, "7515": 2.19384, "7520": 2.19948, "7525": 2.20424, "7530": 2.20212, "7535": 2.1978, "7540": 2.18394, "7545": 2.1868, "7550": 2.21229, "7555": 2.19505, "7560": 2.20923, "7565": 2.19274, "7570": 2.18073, "7575": 2.20315, "7580": 2.17739, "7585": 2.23064, "7590": 2.19024, "7595": 2.17983, "7600": 2.18314, "7605": 2.17138, "7610": 2.21964, "7615": 2.20015, "7620": 2.20725, "7625": 2.20365, "7630": 2.1775, "7635": 2.20554, "7640": 2.1931, "7645": 2.20135, "7650": 2.19806, "7655": 2.21668, "7660": 2.20123, "7665": 2.17901, "7670": 2.19053, "7675": 2.19736, "7680": 2.19665, "7685": 2.18109, "7690": 2.20132, "7695": 2.18287, "7700": 2.16217, "7705": 2.18556, "7710": 2.17642, "7715": 2.18648, "7720": 2.16163, "7725": 2.17407, "7730": 2.21326, "7735": 2.17993, "7740": 2.20234, "7745": 2.19294, "7750": 2.18483, "7755": 2.19484, "7760": 2.17577, "7765": 2.21193, "7770": 2.21311, "7775": 2.1994, "7780": 2.19826, "7785": 2.19261, "7790": 2.16189, "7795": 2.20288, "7800": 2.215, "7805": 2.19937, "7810": 2.18822, "7815": 2.20138, "7820": 2.18814, "7825": 2.20619, "7830": 2.17682, "7835": 2.20089, "7840": 2.19337, "7845": 2.20156, "7850": 2.212, "7855": 2.16689, "7860": 2.18648, "7865": 2.17972, "7870": 2.18556, "7875": 2.17072, "7880": 2.19734, "7885": 2.17249, "7890": 2.17835, "7895": 2.15976, "7900": 2.18977, "7905": 2.17686, "7910": 2.16509, "7915": 2.18212, "7920": 2.19872, "7925": 2.17817, "7930": 2.173, "7935": 2.18407, "7940": 2.15383, "7945": 2.17447, "7950": 2.17878, "7955": 2.18342, "7960": 2.1746, "7965": 2.17137, "7970": 2.18406, "7975": 2.21243, "7980": 2.20994, "7985": 2.15927, "7990": 2.18291, "7995": 2.1821, "8000": 2.18917, "8005": 2.18573, "8010": 2.16405, "8015": 2.17058, "8020": 2.15182, "8025": 2.17286, "8030": 2.19313, "8035": 2.20004, "8040": 2.18558, "8045": 2.19334, "8050": 2.17157, "8055": 2.18897, "8060": 2.18471, "8065": 2.17921, "8070": 2.18861, "8075": 2.17931, "8080": 2.16902, "8085": 2.1834, "8090": 2.21611, "8095": 2.17756, "8100": 2.17758, "8105": 2.19215, "8110": 2.1699, "8115": 2.19011, "8120": 2.16833, "8125": 2.14331, "8130": 2.17403, "8135": 2.18831, "8140": 2.19315, "8145": 2.19581, "8150": 2.15908, "8155": 2.18093, "8160": 2.17738, "8165": 2.17157, "8170": 2.15637, "8175": 2.18764, "8180": 2.1679, "8185": 2.19651, "8190": 2.18212, "8195": 2.17767, "8200": 2.17687, "8205": 2.18373, "8210": 2.16142, "8215": 2.17694, "8220": 2.15793, "8225": 2.14942, "8230": 2.16844, "8235": 2.16249, "8240": 2.15078, "8245": 2.15655, "8250": 2.18415, "8255": 2.20012, "8260": 2.15467, "8265": 2.18499, "8270": 2.17698, "8275": 2.17336, "8280": 2.19089, "8285": 2.16546, "8290": 2.17586, "8295": 2.1913, "8300": 2.20005, "8305": 2.1607, "8310": 2.15588, "8315": 2.18674, "8320": 2.16662, "8325": 2.17757, "8330": 2.16886, "8335": 2.16215, "8340": 2.15119, "8345": 2.13722, "8350": 2.14813, "8355": 2.18385, "8360": 2.18956, "8365": 2.18062, "8370": 2.17247, "8375": 2.16883, "8380": 2.17827, "8385": 2.17784, "8390": 2.16926, "8395": 2.15705, "8400": 2.18096, "8405": 2.16932, "8410": 2.18237, "8415": 2.19198, "8420": 2.16492, "8425": 2.15392, "8430": 2.158, "8435": 2.13968, "8440": 2.17951, "8445": 2.17897, "8450": 2.17079, "8455": 2.18617, "8460": 2.17909, "8465": 2.17084, "8470": 2.16528, "8475": 2.1557, "8480": 2.14769, "8485": 2.1851, "8490": 2.16244, "8495": 2.17082, "8500": 2.14286, "8505": 2.15121, "8510": 2.17081, "8515": 2.14688, "8520": 2.15165, "8525": 2.15826, "8530": 2.15505, "8535": 2.18648, "8540": 2.16968, "8545": 2.14318, "8550": 2.15783, "8555": 2.16384, "8560": 2.15314, "8565": 2.17754, "8570": 2.16404, "8575": 2.16224, "8580": 2.15998, "8585": 2.1739, "8590": 2.16774, "8595": 2.14729, "8600": 2.1531, "8605": 2.14557, "8610": 2.16993, "8615": 2.17828, "8620": 2.17115, "8625": 2.14865, "8630": 2.15858, "8635": 2.14794, "8640": 2.16004, "8645": 2.15517, "8650": 2.14334, "8655": 2.15554, "8660": 2.15071, "8665": 2.13248, "8670": 2.16723, "8675": 2.15387, "8680": 2.15077, "8685": 2.15301, "8690": 2.19936, "8695": 2.15736, "8700": 2.14062, "8705": 2.16788, "8710": 2.14405, "8715": 2.15867, "8720": 2.15165, "8725": 2.15822, "8730": 2.15233, "8735": 2.15669, "8740": 2.17398, "8745": 2.15104, "8750": 2.15142, "8755": 2.13854, "8760": 2.15421, "8765": 2.1795, "8770": 2.16226, "8775": 2.14205, "8780": 2.16756, "8785": 2.16823, "8790": 2.14291, "8795": 2.13417, "8800": 2.15032, "8805": 2.15284, "8810": 2.15251, "8815": 2.13595, "8820": 2.16753, "8825": 2.16226, "8830": 2.14972, "8835": 2.14551, "8840": 2.16564, "8845": 2.16729, "8850": 2.12832, "8855": 2.14464, "8860": 2.18013, "8865": 2.17079, "8870": 2.17666, "8875": 2.17525, "8880": 2.17507, "8885": 2.16236, "8890": 2.13601, "8895": 2.13907, "8900": 2.13833, "8905": 2.16712, "8910": 2.16168, "8915": 2.13184, "8920": 2.15296, "8925": 2.13778, "8930": 2.16139, "8935": 2.16544, "8940": 2.14707, "8945": 2.14512, "8950": 2.14034, "8955": 2.15891, "8960": 2.15821, "8965": 2.1327, "8970": 2.15258, "8975": 2.14771, "8980": 2.13, "8985": 2.14154, "8990": 2.17502, "8995": 2.14972, "9000": 2.16733, "9005": 2.17592, "9010": 2.15225, "9015": 2.14757, "9020": 2.13937, "9025": 2.13666, "9030": 2.13157, "9035": 2.15277, "9040": 2.14326, "9045": 2.15397, "9050": 2.16327, "9055": 2.15605, "9060": 2.13042, "9065": 2.12675, "9070": 2.14198, "9075": 2.17203, "9080": 2.14356, "9085": 2.16446, "9090": 2.15813, "9095": 2.16114, "9100": 2.15518, "9105": 2.16816, "9110": 2.14067, "9115": 2.15476, "9120": 2.12378, "9125": 2.17285, "9130": 2.14771, "9135": 2.1445, "9140": 2.14321, "9145": 2.14215, "9150": 2.15619, "9155": 2.15358, "9160": 2.15232, "9165": 2.12977, "9170": 2.13194, "9175": 2.13486, "9180": 2.14156, "9185": 2.1554, "9190": 2.14132, "9195": 2.14653, "9200": 2.15886, "9205": 2.14083, "9210": 2.17215, "9215": 2.14438, "9220": 2.14895, "9225": 2.15907, "9230": 2.16026, "9235": 2.11131, "9240": 2.1519, "9245": 2.13941, "9250": 2.12687, "9255": 2.15565, "9260": 2.16576, "9265": 2.14248, "9270": 2.13084, "9275": 2.15662, "9280": 2.14795, "9285": 2.14632, "9290": 2.17305, "9295": 2.13931, "9300": 2.13624, "9305": 2.1364, "9310": 2.14059, "9315": 2.15417, "9320": 2.13906, "9325": 2.12871, "9330": 2.14036, "9335": 2.11349, "9340": 2.15583, "9345": 2.1089, "9350": 2.14919, "9355": 2.13117, "9360": 2.12949, "9365": 2.15932, "9370": 2.13468, "9375": 2.10691, "9380": 2.15179, "9385": 2.12572, "9390": 2.11129, "9395": 2.13722, "9400": 2.13927, "9405": 2.13594, "9410": 2.132, "9415": 2.12998, "9420": 2.12926, "9425": 2.12412, "9430": 2.15928, "9435": 2.13013, "9440": 2.13103, "9445": 2.13567, "9450": 2.12739, "9455": 2.15329, "9460": 2.12584, "9465": 2.12059, "9470": 2.14922, "9475": 2.14654, "9480": 2.13989, "9485": 2.15056, "9490": 2.12491, "9495": 2.12954, "9500": 2.15806, "9505": 2.147, "9510": 2.11182, "9515": 2.11759, "9520": 2.15317, "9525": 2.0863, "9530": 2.13519, "9535": 2.14733, "9540": 2.10551, "9545": 2.12854, "9550": 2.14, "9555": 2.11507, "9560": 2.14094, "9565": 2.14143, "9570": 2.13488, "9575": 2.14818, "9580": 2.13325, "9585": 2.12238, "9590": 2.16557, "9595": 2.15085, "9600": 2.14937, "9605": 2.15287, "9610": 2.14426, "9615": 2.14428, "9620": 2.14513, "9625": 2.13694, "9630": 2.13783, "9635": 2.14403, "9640": 2.12751, "9645": 2.12519, "9650": 2.11951, "9655": 2.11889, "9660": 2.13694, "9665": 2.13379, "9670": 2.13895, "9675": 2.12806, "9680": 2.12604, "9685": 2.13656, "9690": 2.15231, "9695": 2.15885, "9700": 2.1423, "9705": 2.12303, "9710": 2.12451, "9715": 2.09948, "9720": 2.11236, "9725": 2.12597, "9730": 2.09269, "9735": 2.14604, "9740": 2.16787, "9745": 2.14305, "9750": 2.1524, "9755": 2.13463, "9760": 2.12539, "9765": 2.12651, "9770": 2.11488, "9775": 2.14079, "9780": 2.14892, "9785": 2.1158, "9790": 2.12745, "9795": 2.10658, "9800": 2.13193, "9805": 2.1292, "9810": 2.11037, "9815": 2.12545, "9820": 2.11872, "9825": 2.10131, "9830": 2.13355, "9835": 2.14712, "9840": 2.13458, "9845": 2.14798, "9850": 2.16348, "9855": 2.11187, "9860": 2.10807, "9865": 2.13104, "9870": 2.12532, "9875": 2.13795, "9880": 2.1425, "9885": 2.11924, "9890": 2.12813, "9895": 2.13592, "9900": 2.11747, "9905": 2.11195, "9910": 2.11564, "9915": 2.14542, "9920": 2.14239, "9925": 2.11126, "9930": 2.12119, "9935": 2.13359, "9940": 2.11387, "9945": 2.10109, "9950": 2.10382, "9955": 2.12807, "9960": 2.1016, "9965": 2.11735, "9970": 2.11984, "9975": 2.1246, "9980": 2.13747, "9985": 2.11896, "9990": 2.09136, "9995": 2.11237, "10000": 2.13705, "10005": 2.12129, "10010": 2.10327, "10015": 2.1298, "10020": 2.10494, "10025": 2.12941, "10030": 2.12144, "10035": 2.10937, "10040": 2.11898, "10045": 2.09117, "10050": 2.10015, "10055": 2.1486, "10060": 2.13373, "10065": 2.11644, "10070": 2.14373, "10075": 2.13081, "10080": 2.1304, "10085": 2.11903, "10090": 2.11447, "10095": 2.12129, "10100": 2.09991, "10105": 2.11122, "10110": 2.13783, "10115": 2.12369, "10120": 2.13004, "10125": 2.13396, "10130": 2.1081, "10135": 2.12878, "10140": 2.12831, "10145": 2.12809, "10150": 2.11292, "10155": 2.10951, "10160": 2.13252, "10165": 2.13553, "10170": 2.12738, "10175": 2.11223, "10180": 2.11597, "10185": 2.104, "10190": 2.12851, "10195": 2.11483, "10200": 2.11495, "10205": 2.09824, "10210": 2.10863, "10215": 2.11675, "10220": 2.11277, "10225": 2.13265, "10230": 2.11919, "10235": 2.12413, "10240": 2.11831, "10245": 2.15294, "10250": 2.11882, "10255": 2.1141, "10260": 2.14095, "10265": 2.12372, "10270": 2.11829, "10275": 2.11383, "10280": 2.11079, "10285": 2.1228, "10290": 2.12509, "10295": 2.10471, "10300": 2.12311, "10305": 2.11377, "10310": 2.14663, "10315": 2.12137, "10320": 2.1056, "10325": 2.10872, "10330": 2.13855, "10335": 2.10612, "10340": 2.10362, "10345": 2.11045, "10350": 2.12611, "10355": 2.13476, "10360": 2.11279, "10365": 2.11719, "10370": 2.12031, "10375": 2.12926, "10380": 2.12719, "10385": 2.12816, "10390": 2.12566, "10395": 2.11655, "10400": 2.11594, "10405": 2.11751, "10410": 2.10011, "10415": 2.10532, "10420": 2.11305, "10425": 2.10909, "10430": 2.13552, "10435": 2.13935, "10440": 2.12763, "10445": 2.12356, "10450": 2.12115, "10455": 2.10193, "10460": 2.12403, "10465": 2.09714, "10470": 2.12861, "10475": 2.11386, "10480": 2.11885, "10485": 2.10377, "10490": 2.1137, "10495": 2.12688, "10500": 2.11529, "10505": 2.11967, "10510": 2.08943, "10515": 2.14046, "10520": 2.09176, "10525": 2.11186, "10530": 2.13703, "10535": 2.11123, "10540": 2.10276, "10545": 2.0948, "10550": 2.07201, "10555": 2.11213, "10560": 2.12967, "10565": 2.12661, "10570": 2.10095, "10575": 2.12267, "10580": 2.1046, "10585": 2.12764, "10590": 2.10753, "10595": 2.1328, "10600": 2.13161, "10605": 2.12433, "10610": 2.10971, "10615": 2.09825, "10620": 2.11554, "10625": 2.11231, "10630": 2.11633, "10635": 2.12566, "10640": 2.09289, "10645": 2.11789, "10650": 2.09549, "10655": 2.11788, "10660": 2.10934, "10665": 2.09515, "10670": 2.08755, "10675": 2.15723, "10680": 2.11196, "10685": 2.12102, "10690": 2.10483, "10695": 2.10868, "10700": 2.12869, "10705": 2.10281, "10710": 2.12207, "10715": 2.10315, "10720": 2.08745, "10725": 2.09969, "10730": 2.09192, "10735": 2.08018, "10740": 2.11978, "10745": 2.11036, "10750": 2.09085, "10755": 2.10234, "10760": 2.09228, "10765": 2.10821, "10770": 2.09742, "10775": 2.11671, "10780": 2.0997, "10785": 2.11465, "10790": 2.08584, "10795": 2.12919, "10800": 2.10384, "10805": 2.10872, "10810": 2.08773, "10815": 2.1204, "10820": 2.08514, "10825": 2.12457, "10830": 2.12382, "10835": 2.09607, "10840": 2.09272, "10845": 2.10662, "10850": 2.09772, "10855": 2.0816, "10860": 2.10617, "10865": 2.10599, "10870": 2.11352, "10875": 2.11387, "10880": 2.11163, "10885": 2.10391, "10890": 2.12438, "10895": 2.09892, "10900": 2.09767, "10905": 2.11921, "10910": 2.1069, "10915": 2.09812, "10920": 2.06512, "10925": 2.10993, "10930": 2.09908, "10935": 2.0952, "10940": 2.1181, "10945": 2.11874, "10950": 2.11192, "10955": 2.12485, "10960": 2.1234, "10965": 2.07656, "10970": 2.10668, "10975": 2.11784, "10980": 2.09581, "10985": 2.06169, "10990": 2.08577, "10995": 2.08326, "11000": 2.08643, "11005": 2.13488, "11010": 2.12316, "11015": 2.09988, "11020": 2.09934, "11025": 2.08519, "11030": 2.08618, "11035": 2.11979, "11040": 2.07459, "11045": 2.07742, "11050": 2.11801, "11055": 2.12409, "11060": 2.08903, "11065": 2.11903, "11070": 2.11794, "11075": 2.07696, "11080": 2.10103, "11085": 2.11285, "11090": 2.09399, "11095": 2.09761, "11100": 2.12517, "11105": 2.12017, "11110": 2.10488, "11115": 2.0916, "11120": 2.11088, "11125": 2.09075, "11130": 2.10061, "11135": 2.12238, "11140": 2.1351, "11145": 2.1116, "11150": 2.10815, "11155": 2.11832, "11160": 2.09846, "11165": 2.10788, "11170": 2.11066, "11175": 2.09878, "11180": 2.07232, "11185": 2.10078, "11190": 2.10953, "11195": 2.11846, "11200": 2.13852, "11205": 2.10046, "11210": 2.09719, "11215": 2.09373, "11220": 2.11585, "11225": 2.09579, "11230": 2.0957, "11235": 2.12366, "11240": 2.06684, "11245": 2.11186, "11250": 2.12221, "11255": 2.11884, "11260": 2.11176, "11265": 2.10502, "11270": 2.12279, "11275": 2.09392, "11280": 2.08965, "11285": 2.11865, "11290": 2.10199, "11295": 2.09848, "11300": 2.11056, "11305": 2.07467, "11310": 2.10661, "11315": 2.09604, "11320": 2.10094, "11325": 2.1102, "11330": 2.09827, "11335": 2.10754, "11340": 2.08711, "11345": 2.09683, "11350": 2.09593, "11355": 2.10604, "11360": 2.13764, "11365": 2.11769, "11370": 2.11599, "11375": 2.07806, "11380": 2.09378, "11385": 2.08197, "11390": 2.11832, "11395": 2.12686, "11400": 2.0797, "11405": 2.09322, "11410": 2.10133, "11415": 2.07801, "11420": 2.09766, "11425": 2.10988, "11430": 2.11555, "11435": 2.09363, "11440": 2.10796, "11445": 2.11606, "11450": 2.10656, "11455": 2.09498, "11460": 2.08499, "11465": 2.0864, "11470": 2.10548, "11475": 2.12078, "11480": 2.07755, "11485": 2.08593, "11490": 2.07679, "11495": 2.09171, "11500": 2.10717, "11505": 2.0677, "11510": 2.11086, "11515": 2.10166, "11520": 2.09201, "11525": 2.10203, "11530": 2.09958, "11535": 2.0982, "11540": 2.10121, "11545": 2.11205, "11550": 2.10037, "11555": 2.09233, "11560": 2.0736, "11565": 2.09349, "11570": 2.08111, "11575": 2.09715, "11580": 2.08764, "11585": 2.07586, "11590": 2.11504, "11595": 2.08885, "11600": 2.09868, "11605": 2.09268, "11610": 2.08423, "11615": 2.0953, "11620": 2.10883, "11625": 2.07596, "11630": 2.08523, "11635": 2.10509, "11640": 2.09546, "11645": 2.09385, "11650": 2.09493, "11655": 2.06252, "11660": 2.08387, "11665": 2.11379, "11670": 2.10022, "11675": 2.09005, "11680": 2.08317, "11685": 2.0658, "11690": 2.11245, "11695": 2.09584, "11700": 2.09415, "11705": 2.07909, "11710": 2.08744, "11715": 2.08908, "11720": 2.06363, "11725": 2.09979, "11730": 2.0973, "11735": 2.07969, "11740": 2.08741, "11745": 2.10324, "11750": 2.08704, "11755": 2.08798, "11760": 2.10917, "11765": 2.10018, "11770": 2.08632, "11775": 2.08767, "11780": 2.11639, "11785": 2.09391, "11790": 2.1125, "11795": 2.10103, "11800": 2.07602, "11805": 2.0642, "11810": 2.1016, "11815": 2.0975, "11820": 2.11272, "11825": 2.10795, "11830": 2.09372, "11835": 2.09609, "11840": 2.0981, "11845": 2.10431, "11850": 2.1057, "11855": 2.07646, "11860": 2.10388, "11865": 2.07681, "11870": 2.08561, "11875": 2.08176, "11880": 2.11467, "11885": 2.08464, "11890": 2.08592, "11895": 2.07737, "11900": 2.07239, "11905": 2.08317, "11910": 2.10087, "11915": 2.09811, "11920": 2.08913, "11925": 2.08832, "11930": 2.06649, "11935": 2.09206, "11940": 2.12043, "11945": 2.0856, "11950": 2.09982, "11955": 2.08148, "11960": 2.08737, "11965": 2.0642, "11970": 2.0665, "11975": 2.04919, "11980": 2.06914, "11985": 2.08926, "11990": 2.0807, "11995": 2.07151, "12000": 2.09255, "12005": 2.0715, "12010": 2.08105, "12015": 2.09307, "12020": 2.11088, "12025": 2.09311, "12030": 2.08721, "12035": 2.09251, "12040": 2.07953, "12045": 2.11445, "12050": 2.10014, "12055": 2.08075, "12060": 2.07396, "12065": 2.07624, "12070": 2.08174, "12075": 2.07876, "12080": 2.08878, "12085": 2.08827, "12090": 2.0852, "12095": 2.10189, "12100": 2.07426, "12105": 2.09122, "12110": 2.11063, "12115": 2.07563, "12120": 2.10346, "12125": 2.09532, "12130": 2.08464, "12135": 2.0632, "12140": 2.08556, "12145": 2.07717, "12150": 2.08419, "12155": 2.08142, "12160": 2.08365, "12165": 2.11068, "12170": 2.07687, "12175": 2.08189, "12180": 2.06704, "12185": 2.08055, "12190": 2.08854, "12195": 2.07006, "12200": 2.07641, "12205": 2.09789, "12210": 2.07083, "12215": 2.06909, "12220": 2.07083, "12225": 2.08854, "12230": 2.08462, "12235": 2.08542, "12240": 2.0727, "12245": 2.08665, "12250": 2.07384, "12255": 2.07794, "12260": 2.0812, "12265": 2.08476, "12270": 2.06024, "12275": 2.09773, "12280": 2.0843, "12285": 2.09258, "12290": 2.09485, "12295": 2.08777, "12300": 2.09444, "12305": 2.06754, "12310": 2.09876, "12315": 2.06645, "12320": 2.06555, "12325": 2.08677, "12330": 2.06657, "12335": 2.08346, "12340": 2.0892, "12345": 2.0838, "12350": 2.09027, "12355": 2.08387, "12360": 2.07634, "12365": 2.10038, "12370": 2.0793, "12375": 2.10046, "12380": 2.04073, "12385": 2.0606, "12390": 2.07037, "12395": 2.08431, "12400": 2.08155, "12405": 2.07703, "12410": 2.07781, "12415": 2.06817, "12420": 2.07003, "12425": 2.08866, "12430": 2.07059, "12435": 2.07588, "12440": 2.05763, "12445": 2.08343, "12450": 2.07371, "12455": 2.08404, "12460": 2.08417, "12465": 2.09864, "12470": 2.09219, "12475": 2.08007, "12480": 2.07961, "12485": 2.08571, "12490": 2.10495, "12495": 2.04795, "12500": 2.0817, "12505": 2.08591, "12510": 2.08478, "12515": 2.081, "12520": 2.06408, "12525": 2.09281, "12530": 2.07566, "12535": 2.08868, "12540": 2.09168, "12545": 2.09567, "12550": 2.06258, "12555": 2.07827, "12560": 2.06494, "12565": 2.09309, "12570": 2.08164, "12575": 2.07179, "12580": 2.06722, "12585": 2.06575, "12590": 2.10382, "12595": 2.0778, "12600": 2.09111, "12605": 2.06705, "12610": 2.07005, "12615": 2.06128, "12620": 2.07767, "12625": 2.07608, "12630": 2.08175, "12635": 2.08033, "12640": 2.07565, "12645": 2.06678, "12650": 2.08384, "12655": 2.05178, "12660": 2.07225, "12665": 2.08882, "12670": 2.07281, "12675": 2.10298, "12680": 2.09698, "12685": 2.07017, "12690": 2.05973, "12695": 2.08172, "12700": 2.07379, "12705": 2.08501, "12710": 2.07498, "12715": 2.08857, "12720": 2.07753, "12725": 2.0886, "12730": 2.08569, "12735": 2.06083, "12740": 2.0804, "12745": 2.06221, "12750": 2.07042, "12755": 2.06586, "12760": 2.07481, "12765": 2.04833, "12770": 2.06097, "12775": 2.06575, "12780": 2.07621, "12785": 2.05487, "12790": 2.07826, "12795": 2.09252, "12800": 2.07185, "12805": 2.07482, "12810": 2.06449, "12815": 2.07517, "12820": 2.08208, "12825": 2.07676, "12830": 2.07449, "12835": 2.0705, "12840": 2.08172, "12845": 2.0687, "12850": 2.06607, "12855": 2.0739, "12860": 2.07418, "12865": 2.06356, "12870": 2.07764, "12875": 2.08088, "12880": 2.0878, "12885": 2.07356, "12890": 2.06559, "12895": 2.0772, "12900": 2.06678, "12905": 2.07459, "12910": 2.10668, "12915": 2.06317, "12920": 2.09818, "12925": 2.07493, "12930": 2.07782, "12935": 2.04551, "12940": 2.08037, "12945": 2.04753, "12950": 2.06498, "12955": 2.09422, "12960": 2.07738, "12965": 2.0356, "12970": 2.07662, "12975": 2.0585, "12980": 2.06968, "12985": 2.07817, "12990": 2.04981, "12995": 2.09275, "13000": 2.09038, "13005": 2.06588, "13010": 2.10129, "13015": 2.09242, "13020": 2.06095, "13025": 2.06721, "13030": 2.06583, "13035": 2.09245, "13040": 2.07292, "13045": 2.07773, "13050": 2.05746, "13055": 2.07828, "13060": 2.08084, "13065": 2.06402, "13070": 2.06791, "13075": 2.05463, "13080": 2.05996, "13085": 2.06028, "13090": 2.06761, "13095": 2.06138, "13100": 2.07176, "13105": 2.06527, "13110": 2.04824, "13115": 2.03376, "13120": 2.0466, "13125": 2.0579, "13130": 2.02928, "13135": 2.07207, "13140": 2.05062, "13145": 2.0647, "13150": 2.07787, "13155": 2.07347, "13160": 2.08311, "13165": 2.07516, "13170": 2.04651, "13175": 2.06603, "13180": 2.05924, "13185": 2.05516, "13190": 2.06412, "13195": 2.04919, "13200": 2.07115, "13205": 2.04995, "13210": 2.04964, "13215": 2.06687, "13220": 2.06859, "13225": 2.06653, "13230": 2.06296, "13235": 2.05638, "13240": 2.06421, "13245": 2.04029, "13250": 2.06719, "13255": 2.05625, "13260": 2.07211, "13265": 2.05605, "13270": 2.07196, "13275": 2.05846, "13280": 2.07206, "13285": 2.03939, "13290": 2.11172, "13295": 2.06134, "13300": 2.08392, "13305": 2.07407, "13310": 2.06975, "13315": 2.04867, "13320": 2.0668, "13325": 2.07354, "13330": 2.06022, "13335": 2.06176, "13340": 2.099, "13345": 2.06948, "13350": 2.06125, "13355": 2.06961, "13360": 2.06484, "13365": 2.07297, "13370": 2.06173, "13375": 2.07617, "13380": 2.07992, "13385": 2.06793, "13390": 2.1005, "13395": 2.07221, "13400": 2.05787, "13405": 2.08884, "13410": 2.0784, "13415": 2.04112, "13420": 2.04088, "13425": 2.0674, "13430": 2.06764, "13435": 2.0613, "13440": 2.05595, "13445": 2.06798, "13450": 2.05842, "13455": 2.07667, "13460": 2.06599, "13465": 2.05884, "13470": 2.08042, "13475": 2.08347, "13480": 2.05958, "13485": 2.05325, "13490": 2.05049, "13495": 2.04936, "13500": 2.05809, "13505": 2.07853, "13510": 2.04555, "13515": 2.06056, "13520": 2.07421, "13525": 2.05108, "13530": 2.08905, "13535": 2.05309, "13540": 2.0653, "13545": 2.05606, "13550": 2.06133, "13555": 2.09008, "13560": 2.06516, "13565": 2.06628, "13570": 2.04586, "13575": 2.07193, "13580": 2.05473, "13585": 2.07649, "13590": 2.07259, "13595": 2.05449, "13600": 2.08329, "13605": 2.04809, "13610": 2.02673, "13615": 2.07524, "13620": 2.08718, "13625": 2.05992, "13630": 2.06414, "13635": 2.04933, "13640": 2.06854, "13645": 2.06893, "13650": 2.04666, "13655": 2.07434, "13660": 2.04695, "13665": 2.0701, "13670": 2.10116, "13675": 2.07124, "13680": 2.05032, "13685": 2.07732, "13690": 2.05636, "13695": 2.08313, "13700": 2.05123, "13705": 2.06351, "13710": 2.0442, "13715": 2.02417, "13720": 2.05922, "13725": 2.03773, "13730": 2.05263, "13735": 2.08002, "13740": 2.08567, "13745": 2.06507, "13750": 2.07234, "13755": 2.05688, "13760": 2.0554, "13765": 2.04275, "13770": 2.06712, "13775": 2.03714, "13780": 2.0554, "13785": 2.05634, "13790": 2.06758, "13795": 2.04483, "13800": 2.05725, "13805": 2.07959, "13810": 2.05632, "13815": 2.04382, "13820": 2.04355, "13825": 2.02041, "13830": 2.04144, "13835": 2.06191, "13840": 2.04955, "13845": 2.04676, "13850": 2.06337, "13855": 2.05038, "13860": 2.04948, "13865": 2.08488, "13870": 2.05339, "13875": 2.0265, "13880": 2.06078, "13885": 2.0693, "13890": 2.07097, "13895": 2.06694, "13900": 2.07001, "13905": 2.07203, "13910": 2.0713, "13915": 2.04037, "13920": 2.06362, "13925": 2.06286, "13930": 2.06865, "13935": 2.06548, "13940": 2.06064, "13945": 2.07132, "13950": 2.06138, "13955": 2.06009, "13960": 2.09573, "13965": 2.02468, "13970": 2.05937, "13975": 2.04909, "13980": 2.07129, "13985": 2.05488, "13990": 2.06543, "13995": 2.07007, "14000": 2.03328, "14005": 2.06447, "14010": 2.08177, "14015": 2.06895, "14020": 2.04102, "14025": 2.07857, "14030": 2.04025, "14035": 2.06863, "14040": 2.02559, "14045": 2.04406, "14050": 2.04919, "14055": 2.03838, "14060": 2.05754, "14065": 2.08003, "14070": 2.0721, "14075": 2.0404, "14080": 2.06515, "14085": 2.06575, "14090": 2.03437, "14095": 2.06101, "14100": 2.05416, "14105": 2.04957, "14110": 2.03989, "14115": 2.04386, "14120": 2.05784, "14125": 2.05917, "14130": 2.03759, "14135": 2.05284, "14140": 2.0454, "14145": 2.05085, "14150": 2.04164, "14155": 2.07772, "14160": 2.03186, "14165": 2.05661, "14170": 2.04199, "14175": 2.06038, "14180": 2.06228, "14185": 2.05647, "14190": 2.04884, "14195": 2.04964, "14200": 2.03814, "14205": 2.07076, "14210": 2.03127, "14215": 2.05855, "14220": 2.07697, "14225": 2.05051, "14230": 2.04774, "14235": 2.06902, "14240": 2.05025, "14245": 2.08478, "14250": 2.05808, "14255": 2.07577, "14260": 2.06765, "14265": 2.05585, "14270": 2.04616, "14275": 2.06479, "14280": 2.02956, "14285": 2.0475, "14290": 2.07625, "14295": 2.05479, "14300": 2.03371, "14305": 2.0658, "14310": 2.0518, "14315": 2.0508, "14320": 2.0439, "14325": 2.06617, "14330": 2.05503, "14335": 2.06062, "14340": 2.05607, "14345": 2.07949, "14350": 2.07967, "14355": 2.04723, "14360": 2.03505, "14365": 2.04103, "14370": 2.01731, "14375": 2.04965, "14380": 2.0458, "14385": 2.02758, "14390": 2.06024, "14395": 2.04216, "14400": 2.05481, "14405": 2.02223, "14410": 2.04766, "14415": 2.06105, "14420": 2.03854, "14425": 2.02769, "14430": 2.04751, "14435": 2.06291, "14440": 2.0461, "14445": 2.05784, "14450": 2.05197, "14455": 2.06049, "14460": 2.02842, "14465": 2.01911, "14470": 2.06584, "14475": 2.05385, "14480": 2.08214, "14485": 2.02456, "14490": 2.04987, "14495": 2.06268, "14500": 2.05372, "14505": 2.05888, "14510": 2.04459, "14515": 2.0697, "14520": 2.06719, "14525": 2.05988, "14530": 2.05521, "14535": 2.068, "14540": 2.03825, "14545": 2.05217, "14550": 2.05738, "14555": 2.04986, "14560": 2.04165, "14565": 2.05759, "14570": 2.06302, "14575": 2.06759, "14580": 2.08043, "14585": 2.04089, "14590": 2.04702, "14595": 2.04988, "14600": 2.03446, "14605": 2.03122, "14610": 2.02787, "14615": 2.05649, "14620": 2.04071, "14625": 2.04883, "14630": 2.03453, "14635": 2.06355, "14640": 2.06055, "14645": 2.04396, "14650": 2.03193, "14655": 2.04933, "14660": 2.052, "14665": 2.04027, "14670": 2.06339, "14675": 2.06619, "14680": 2.04584, "14685": 2.05124, "14690": 2.05037, "14695": 2.04355, "14700": 2.05978, "14705": 2.03022, "14710": 2.03807, "14715": 2.05596, "14720": 2.04234, "14725": 2.07452, "14730": 2.07858, "14735": 2.02605, "14740": 2.04823, "14745": 2.07963, "14750": 2.04326, "14755": 2.05238, "14760": 2.03427, "14765": 2.04627, "14770": 2.06055, "14775": 2.05011, "14780": 2.05703, "14785": 2.03165, "14790": 2.0427, "14795": 2.04476, "14800": 2.04977, "14805": 2.08181, "14810": 2.03509, "14815": 2.05922, "14820": 2.04415, "14825": 2.05453, "14830": 2.05218, "14835": 2.05331, "14840": 2.02614, "14845": 2.03126, "14850": 2.04753, "14855": 2.05234, "14860": 2.05718, "14865": 2.03351, "14870": 2.05954, "14875": 2.03539, "14880": 2.03116, "14885": 2.04809, "14890": 2.03178, "14895": 2.08008, "14900": 2.05878, "14905": 2.03346, "14910": 2.05213, "14915": 2.04854, "14920": 2.03643, "14925": 2.05215, "14930": 2.01743, "14935": 2.05017, "14940": 2.0755, "14945": 2.03158, "14950": 2.0453, "14955": 2.03246, "14960": 2.05315, "14965": 2.05166, "14970": 2.04138, "14975": 2.04115, "14980": 2.04656, "14985": 2.04797, "14990": 2.0702, "14995": 2.0433, "15000": 2.04904, "15005": 2.04393, "15010": 2.034, "15015": 2.01362, "15020": 2.03905, "15025": 2.04691, "15030": 2.06146, "15035": 2.02632, "15040": 2.02271, "15045": 2.02536, "15050": 2.04823, "15055": 2.02737, "15060": 2.03006, "15065": 2.04018, "15070": 2.0422, "15075": 2.02296, "15080": 2.06802, "15085": 2.0559, "15090": 2.02935, "15095": 2.06334, "15100": 2.05483, "15105": 2.0339, "15110": 2.04143, "15115": 2.05722, "15120": 2.07494, "15125": 2.03693, "15130": 2.05631, "15135": 2.05016, "15140": 2.06699, "15145": 2.02625, "15150": 2.03134, "15155": 2.03768, "15160": 2.0403, "15165": 2.02467, "15170": 2.05159, "15175": 2.04845, "15180": 2.0534, "15185": 2.04725, "15190": 2.05491, "15195": 2.0423, "15200": 2.06904, "15205": 2.02926, "15210": 2.06016, "15215": 2.04204, "15220": 2.04194, "15225": 2.02986, "15230": 2.05211, "15235": 2.01635, "15240": 2.04608, "15245": 2.06029, "15250": 2.00499, "15255": 2.02864, "15260": 2.02557, "15265": 2.02199, "15270": 2.02395, "15275": 2.03093, "15280": 2.06618, "15285": 2.05203, "15290": 2.04355, "15295": 2.02785, "15300": 2.03241, "15305": 2.0502, "15310": 2.03142, "15315": 2.02737, "15320": 2.04029, "15325": 2.06126, "15330": 2.01759, "15335": 2.04456, "15340": 2.03182, "15345": 2.02093, "15350": 2.02011, "15355": 2.02314, "15360": 2.05124, "15365": 2.03191, "15370": 2.04084, "15375": 2.02542, "15380": 2.05861, "15385": 2.06444, "15390": 2.04532, "15395": 2.07027, "15400": 2.0579, "15405": 2.06829, "15410": 2.05209, "15415": 2.04386, "15420": 2.06307, "15425": 2.06817, "15430": 2.03603, "15435": 2.04501, "15440": 2.05189, "15445": 2.02654, "15450": 2.01535, "15455": 2.03559, "15460": 2.0381, "15465": 2.04605, "15470": 2.01797, "15475": 2.01805, "15480": 2.05625, "15485": 2.03695, "15490": 2.02846, "15495": 2.03985, "15500": 2.02563, "15505": 2.03617, "15510": 2.06448, "15515": 2.05473, "15520": 2.02719, "15525": 2.03176, "15530": 2.01789, "15535": 2.03123, "15540": 2.05053, "15545": 2.03026, "15550": 2.04792, "15555": 2.03246, "15560": 2.05853, "15565": 2.0427, "15570": 2.03759, "15575": 2.03235, "15580": 2.04563, "15585": 2.04794, "15590": 2.02666, "15595": 2.05088, "15600": 2.02322, "15605": 2.04825, "15610": 2.02788, "15615": 2.04032, "15620": 2.02773, "15625": 2.0383, "15630": 2.01791, "15635": 2.02699, "15640": 2.04191, "15645": 2.0116, "15650": 2.04678, "15655": 2.04004, "15660": 2.06222, "15665": 2.04367, "15670": 2.04316, "15675": 2.04754, "15680": 2.03463, "15685": 2.02359, "15690": 2.05432, "15695": 2.03424, "15700": 2.00935, "15705": 2.04098, "15710": 2.06023, "15715": 2.03618, "15720": 2.03226, "15725": 2.05896, "15730": 2.03342, "15735": 2.04185, "15740": 2.02388, "15745": 2.05228, "15750": 2.04107, "15755": 2.0367, "15760": 2.03643, "15765": 2.06688, "15770": 2.04597, "15775": 2.03221, "15780": 2.04492, "15785": 2.02563, "15790": 2.03587, "15795": 2.04493, "15800": 2.05509, "15805": 2.04274, "15810": 2.04748, "15815": 2.05149, "15820": 2.02794, "15825": 2.03827, "15830": 2.04399, "15835": 2.04407, "15840": 2.05975, "15845": 2.04808, "15850": 2.01726, "15855": 2.06468, "15860": 2.0457, "15865": 2.01515, "15870": 2.02528, "15875": 2.02122, "15880": 2.04379, "15885": 2.00369, "15890": 2.05608, "15895": 2.01336, "15900": 2.04444, "15905": 2.01666, "15910": 2.06143, "15915": 2.00975, "15920": 2.02261, "15925": 2.0435, "15930": 1.9966, "15935": 2.03575, "15940": 2.0395, "15945": 2.03901, "15950": 2.03122, "15955": 2.03619, "15960": 2.00581, "15965": 2.02787, "15970": 2.03566, "15975": 2.04771, "15980": 2.04014, "15985": 2.02882, "15990": 2.02894, "15995": 2.02153, "16000": 2.01724, "16005": 2.03856, "16010": 2.03769, "16015": 2.01435, "16020": 2.02999, "16025": 2.0208, "16030": 2.02567, "16035": 2.01807, "16040": 2.00901, "16045": 2.05659, "16050": 2.05465, "16055": 2.04156, "16060": 2.0234, "16065": 2.01026, "16070": 2.02302, "16075": 2.03604, "16080": 2.02588, "16085": 2.04435, "16090": 2.04568, "16095": 2.02793, "16100": 2.03929, "16105": 2.00814, "16110": 2.03022, "16115": 2.0504, "16120": 2.04743, "16125": 2.01886, "16130": 2.01585, "16135": 2.03841, "16140": 2.02541, "16145": 2.02494, "16150": 2.00836, "16155": 2.04398, "16160": 2.02566, "16165": 2.04018, "16170": 2.02328, "16175": 2.0416, "16180": 2.0507, "16185": 2.01997, "16190": 2.01997, "16195": 2.04384, "16200": 2.03918, "16205": 2.00429, "16210": 2.04853, "16215": 2.02478, "16220": 2.04274, "16225": 2.03698, "16230": 2.01134, "16235": 2.00056, "16240": 2.06099, "16245": 2.03579, "16250": 2.04657, "16255": 2.03827, "16260": 2.01259, "16265": 2.03326, "16270": 2.03355, "16275": 2.02808, "16280": 2.03168, "16285": 2.02839, "16290": 2.05609, "16295": 2.02072, "16300": 2.02746, "16305": 2.04045, "16310": 2.05274, "16315": 2.03117, "16320": 2.03305, "16325": 2.05444, "16330": 2.01019, "16335": 2.02626, "16340": 2.04405, "16345": 2.03335, "16350": 2.01763, "16355": 2.05007, "16360": 2.01907, "16365": 2.01417, "16370": 2.01135, "16375": 2.0023, "16380": 2.02731, "16385": 2.03623, "16390": 2.03883, "16395": 2.0411, "16400": 2.03461, "16405": 2.02017, "16410": 2.02881, "16415": 2.0545, "16420": 2.00403, "16425": 2.02653, "16430": 2.0428, "16435": 2.02895, "16440": 2.01559, "16445": 2.0292, "16450": 2.05376, "16455": 2.0228, "16460": 2.01064, "16465": 2.01946, "16470": 2.02112, "16475": 2.02818, "16480": 2.01522, "16485": 2.02842, "16490": 2.04919, "16495": 2.02481, "16500": 2.02657, "16505": 2.04192, "16510": 2.04909, "16515": 2.02203, "16520": 2.02778, "16525": 2.01106, "16530": 2.022, "16535": 2.03643, "16540": 2.03718, "16545": 2.03054, "16550": 2.02167, "16555": 2.0301, "16560": 2.04336, "16565": 2.01624, "16570": 2.02516, "16575": 2.01098, "16580": 2.03644, "16585": 2.03921, "16590": 2.03131, "16595": 2.01616, "16600": 2.0479, "16605": 2.02198, "16610": 2.04486, "16615": 2.01919, "16620": 2.01545, "16625": 2.0319, "16630": 2.04056, "16635": 2.02582, "16640": 2.04281, "16645": 2.02556, "16650": 2.03602, "16655": 2.02398, "16660": 2.04064, "16665": 2.01866, "16670": 2.02635, "16675": 1.99382, "16680": 2.03134, "16685": 2.03557, "16690": 2.00989, "16695": 2.00744, "16700": 2.01767, "16705": 2.04434, "16710": 2.02254, "16715": 2.04013, "16720": 2.01526, "16725": 2.04269, "16730": 2.01636, "16735": 2.03832, "16740": 2.00558, "16745": 2.02391, "16750": 2.02637, "16755": 2.03204, "16760": 2.02067, "16765": 2.02296, "16770": 2.02176, "16775": 2.0422, "16780": 2.00829, "16785": 2.01799, "16790": 2.0513, "16795": 2.02469, "16800": 2.03958, "16805": 2.03931, "16810": 2.04091, "16815": 2.03991, "16820": 2.05484, "16825": 2.04312, "16830": 2.03407, "16835": 2.01666, "16840": 2.02004, "16845": 2.02164, "16850": 2.02311, "16855": 2.00486, "16860": 2.04304, "16865": 2.03944, "16870": 2.01814, "16875": 2.00721, "16880": 2.02294, "16885": 2.00405, "16890": 2.02393, "16895": 2.01425, "16900": 2.03529, "16905": 2.00415, "16910": 2.03675, "16915": 2.04701, "16920": 2.03141, "16925": 2.02636, "16930": 2.02096, "16935": 2.00158, "16940": 2.00315, "16945": 2.01861, "16950": 2.03803, "16955": 2.02645, "16960": 2.05001, "16965": 2.03357, "16970": 2.05461, "16975": 2.03201, "16980": 2.0028, "16985": 2.01769, "16990": 2.02059, "16995": 2.01989, "17000": 2.03482, "17005": 2.02535, "17010": 2.00198, "17015": 2.00755, "17020": 2.02567, "17025": 2.01613, "17030": 2.04396, "17035": 2.00615, "17040": 2.0187, "17045": 2.0189, "17050": 2.04146, "17055": 2.03119, "17060": 2.01722, "17065": 2.03286, "17070": 2.03542, "17075": 2.02635, "17080": 2.01215, "17085": 2.01699, "17090": 2.04225, "17095": 2.02562, "17100": 2.02519, "17105": 2.02757, "17110": 2.00018, "17115": 2.02658, "17120": 2.03795, "17125": 2.02028, "17130": 2.01416, "17135": 2.01925, "17140": 2.02759, "17145": 1.98021, "17150": 2.02949, "17155": 2.00168, "17160": 2.02039, "17165": 2.0357, "17170": 1.99151, "17175": 2.03296, "17180": 2.01148, "17185": 2.01013, "17190": 2.03688, "17195": 2.04334, "17200": 2.02461, "17205": 2.03318, "17210": 2.04789, "17215": 2.03526, "17220": 2.03717, "17225": 2.02235, "17230": 2.0453, "17235": 2.02388, "17240": 2.03247, "17245": 2.02827, "17250": 2.04683, "17255": 2.00878, "17260": 2.0149, "17265": 2.06265, "17270": 2.00957, "17275": 2.02246, "17280": 2.022, "17285": 2.03886, "17290": 2.02212, "17295": 2.00954, "17300": 2.01133, "17305": 1.99817, "17310": 2.03345, "17315": 2.02497, "17320": 2.00913, "17325": 2.03655, "17330": 2.00665, "17335": 2.01441, "17340": 2.02171, "17345": 2.02886, "17350": 2.01761, "17355": 2.03286, "17360": 2.04102, "17365": 2.02023, "17370": 2.01159, "17375": 2.02132, "17380": 2.03354, "17385": 2.0442, "17390": 2.02327, "17395": 2.01114, "17400": 2.01029, "17405": 2.01865, "17410": 2.01086, "17415": 2.04391, "17420": 2.03306, "17425": 2.02403, "17430": 2.03084, "17435": 2.00703, "17440": 2.01112, "17445": 2.00417, "17450": 2.01204, "17455": 1.99247, "17460": 2.00494, "17465": 2.05459, "17470": 2.03514, "17475": 2.0313, "17480": 2.01111, "17485": 2.03341, "17490": 2.04589, "17495": 2.01857, "17500": 2.02759, "17505": 2.007, "17510": 2.03688, "17515": 2.03586, "17520": 2.02306, "17525": 2.02157, "17530": 2.01289, "17535": 2.05455, "17540": 2.02022, "17545": 2.02665, "17550": 2.01225, "17555": 2.01698, "17560": 2.03627, "17565": 2.02896, "17570": 2.02385, "17575": 2.0053, "17580": 2.02108, "17585": 2.02204, "17590": 2.01286, "17595": 2.01817, "17600": 2.01072, "17605": 2.00663, "17610": 2.01719, "17615": 2.03, "17620": 2.03549, "17625": 2.03502, "17630": 1.99702, "17635": 2.03715, "17640": 1.9752, "17645": 2.02902, "17650": 2.02147, "17655": 2.00896, "17660": 2.01302, "17665": 2.02888, "17670": 2.01199, "17675": 2.01791, "17680": 2.02206, "17685": 2.02401, "17690": 1.99492, "17695": 2.01467, "17700": 2.02487, "17705": 2.01726, "17710": 2.03721, "17715": 2.03726, "17720": 2.03921, "17725": 2.02949, "17730": 1.99687, "17735": 2.0215, "17740": 2.01219, "17745": 2.03369, "17750": 2.0487, "17755": 2.03767, "17760": 2.02179, "17765": 2.02141, "17770": 2.02298, "17775": 2.02133, "17780": 2.02849, "17785": 2.00976, "17790": 2.00389, "17795": 2.00567, "17800": 2.00253, "17805": 2.02471, "17810": 2.01474, "17815": 2.02384, "17820": 2.03357, "17825": 2.01726, "17830": 2.00406, "17835": 2.01594, "17840": 2.00931, "17845": 2.00426, "17850": 2.03897, "17855": 2.01932, "17860": 2.03316, "17865": 2.04338, "17870": 2.00294, "17875": 2.03649, "17880": 1.98591, "17885": 1.99818, "17890": 2.02625, "17895": 2.00011, "17900": 2.02656, "17905": 2.01914, "17910": 2.02125, "17915": 2.0065, "17920": 2.02777, "17925": 2.02603, "17930": 2.04015, "17935": 2.01176, "17940": 2.02729, "17945": 2.01832, "17950": 1.99111, "17955": 2.01754, "17960": 2.02372, "17965": 2.02763, "17970": 2.02529, "17975": 2.01918, "17980": 2.01799, "17985": 2.04036, "17990": 1.99322, "17995": 1.99162, "18000": 2.02884, "18005": 2.00536, "18010": 2.00918, "18015": 2.02433, "18020": 2.00973, "18025": 2.02601, "18030": 2.01164, "18035": 2.02041, "18040": 2.00733, "18045": 2.01577, "18050": 1.98961, "18055": 1.97819, "18060": 1.98179, "18065": 1.99455, "18070": 2.00544, "18075": 2.01406, "18080": 2.0329, "18085": 1.98717, "18090": 2.01296, "18095": 2.00516, "18100": 2.01857, "18105": 1.98866, "18110": 2.03028, "18115": 2.02158, "18120": 2.01257, "18125": 2.02606, "18130": 2.01984, "18135": 2.01842, "18140": 2.01476, "18145": 2.03507, "18150": 2.01796, "18155": 2.0142, "18160": 2.00618, "18165": 2.02533, "18170": 2.02669, "18175": 2.01105, "18180": 2.01592, "18185": 2.00883, "18190": 2.00751, "18195": 1.99354, "18200": 2.0216, "18205": 2.03053, "18210": 1.99706, "18215": 2.03643, "18220": 2.00052, "18225": 2.0263, "18230": 1.99138, "18235": 2.00464, "18240": 2.02047, "18245": 2.00076, "18250": 2.02486, "18255": 2.01322, "18260": 1.99216, "18265": 2.02441, "18270": 1.97741, "18275": 2.00681, "18280": 1.99585, "18285": 1.99434, "18290": 2.02408, "18295": 2.02041, "18300": 2.0148, "18305": 1.99074, "18310": 1.99026, "18315": 1.9962, "18320": 1.9912, "18325": 2.01433, "18330": 2.00365, "18335": 2.00634, "18340": 2.02672, "18345": 2.02152, "18350": 2.0007, "18355": 2.02413, "18360": 2.00822, "18365": 2.00761, "18370": 2.01925, "18375": 2.01237, "18380": 2.01289, "18385": 2.00326, "18390": 2.03134, "18395": 2.0066, "18400": 2.02819, "18405": 2.02535, "18410": 2.02754, "18415": 2.02769, "18420": 2.04974, "18425": 2.012, "18430": 2.01813, "18435": 1.97711, "18440": 2.02316, "18445": 2.01416, "18450": 1.99094, "18455": 1.99866, "18460": 2.01942, "18465": 2.00047, "18470": 2.0129, "18475": 1.99282, "18480": 2.01299, "18485": 1.99977, "18490": 2.00588, "18495": 2.04534, "18500": 1.99, "18505": 2.01199, "18510": 2.00331, "18515": 2.01517, "18520": 1.99274, "18525": 2.01607, "18530": 1.98333, "18535": 1.98679, "18540": 2.01212, "18545": 1.98514, "18550": 1.99722, "18555": 2.00625, "18560": 2.02672, "18565": 1.99987, "18570": 2.02741, "18575": 2.02255, "18580": 1.99443, "18585": 1.99511, "18590": 2.00633, "18595": 1.99848, "18600": 2.00604, "18605": 2.02936, "18610": 2.01951, "18615": 2.00557, "18620": 1.99868, "18625": 2.01382, "18630": 1.98935, "18635": 1.99882, "18640": 1.99814, "18645": 2.00755, "18650": 2.00189, "18655": 2.00269, "18660": 1.98739, "18665": 1.99017, "18670": 2.00351, "18675": 2.00498, "18680": 1.99572, "18685": 1.98883, "18690": 2.01416, "18695": 2.02385, "18700": 2.03153, "18705": 2.02034, "18710": 1.9967, "18715": 2.0234, "18720": 2.01745, "18725": 2.01536, "18730": 2.02429, "18735": 2.01854, "18740": 2.02214, "18745": 1.99571, "18750": 2.04142, "18755": 1.99855, "18760": 2.01775, "18765": 2.00334, "18770": 2.00817, "18775": 1.99636, "18780": 2.0376, "18785": 2.03304, "18790": 2.01656, "18795": 2.00307, "18800": 2.01753, "18805": 1.99742, "18810": 1.9989, "18815": 2.0284, "18820": 2.01164, "18825": 2.01625, "18830": 2.00393, "18835": 1.99581, "18840": 2.01462, "18845": 2.00914, "18850": 2.00078, "18855": 1.99689, "18860": 1.99716, "18865": 2.02216, "18870": 2.00397, "18875": 1.99723, "18880": 1.98663, "18885": 2.02399, "18890": 2.01254, "18895": 2.02186, "18900": 1.99208, "18905": 1.99153, "18910": 1.99502, "18915": 1.98386, "18920": 2.01965, "18925": 2.01253, "18930": 2.02693, "18935": 2.01047, "18940": 2.00761, "18945": 1.9992, "18950": 2.0104, "18955": 2.00152, "18960": 2.00598, "18965": 2.02205, "18970": 2.02645, "18975": 2.00879, "18980": 2.00172, "18985": 1.99396, "18990": 2.0197, "18995": 1.99488, "19000": 1.99188, "19005": 2.0131, "19010": 2.00665, "19015": 2.00529, "19020": 1.99353, "19025": 2.00592, "19030": 2.00477, "19035": 2.02612, "19040": 1.98998, "19045": 2.01334, "19050": 2.00227, "19055": 2.00481, "19060": 2.01532, "19065": 1.99562, "19070": 2.01846, "19075": 1.99248, "19080": 2.0027, "19085": 1.99659, "19090": 1.98473, "19095": 2.02347, "19100": 2.01618, "19105": 2.00439, "19110": 2.01477, "19115": 2.03557, "19120": 2.0305, "19125": 2.01333, "19130": 1.9982, "19135": 2.01446, "19140": 2.00422, "19145": 2.00813, "19150": 1.97719, "19155": 2.01351, "19160": 1.99843, "19165": 2.00347, "19170": 1.9833, "19175": 2.00959, "19180": 2.00781, "19185": 2.004, "19190": 1.9835, "19195": 1.99669, "19200": 1.99813, "19205": 1.99395, "19210": 2.01124, "19215": 1.99293, "19220": 2.02541, "19225": 2.00801, "19230": 2.00808, "19235": 1.98659, "19240": 1.993, "19245": 2.01041, "19250": 1.98837, "19255": 2.00527, "19260": 1.98112, "19265": 1.99245, "19270": 2.01374, "19275": 2.01052, "19280": 2.01488, "19285": 2.01181, "19290": 2.03395, "19295": 1.99929, "19300": 2.02075, "19305": 1.9929, "19310": 1.9998, "19315": 1.98845, "19320": 2.01068, "19325": 2.01913, "19330": 2.00087, "19335": 2.01875, "19340": 2.01531, "19345": 1.97635, "19350": 2.02462, "19355": 1.99515, "19360": 1.98551, "19365": 1.99479, "19370": 2.02488, "19375": 1.9987, "19380": 2.0028, "19385": 2.02648, "19390": 2.00637, "19395": 2.02054, "19400": 2.00436, "19405": 2.00961, "19410": 2.00216, "19415": 1.97489, "19420": 2.0112, "19425": 1.9874, "19430": 1.98862, "19435": 2.02134, "19440": 2.01365, "19445": 1.99511, "19450": 1.99009, "19455": 1.99585, "19460": 1.98639, "19465": 1.99595, "19470": 1.99878, "19475": 2.00387, "19480": 1.98558, "19485": 1.96833, "19490": 1.99889, "19495": 1.9922, "19500": 2.00495, "19505": 2.00215, "19510": 1.99805, "19515": 2.02595, "19520": 2.01537, "19525": 1.99306, "19530": 1.99208, "19535": 2.02515, "19540": 1.98971, "19545": 2.00253, "19550": 2.02186, "19555": 2.00448, "19560": 2.00741, "19565": 2.0089, "19570": 1.99414, "19575": 2.02012, "19580": 2.00381, "19585": 1.99649, "19590": 1.99078, "19595": 2.01475, "19600": 2.0083, "19605": 2.00865, "19610": 2.0133, "19615": 2.00378, "19620": 1.99727, "19625": 2.02659, "19630": 2.00896, "19635": 1.99332, "19640": 2.02243, "19645": 2.01463, "19650": 2.00628, "19655": 2.01405, "19660": 2.00124, "19665": 2.02997, "19670": 2.002, "19675": 1.99591, "19680": 1.98229, "19685": 2.01123, "19690": 2.00741, "19695": 2.00287, "19700": 1.98636, "19705": 2.01833, "19710": 2.03996, "19715": 2.01496, "19720": 1.99948, "19725": 2.02365, "19730": 2.02878, "19735": 2.01868, "19740": 2.01123, "19745": 1.99178, "19750": 1.98705, "19755": 1.99349, "19760": 1.98631, "19765": 2.01326, "19770": 2.0135, "19775": 1.99596, "19780": 2.00195, "19785": 1.98635, "19790": 1.99737, "19795": 2.00164, "19800": 1.99385, "19805": 1.98159, "19810": 1.9839, "19815": 2.00628, "19820": 2.01114, "19825": 1.99846, "19830": 2.01389, "19835": 2.01465, "19840": 2.01737, "19845": 1.95848, "19850": 2.00905, "19855": 2.00124, "19860": 1.98288, "19865": 2.01458, "19870": 2.00154, "19875": 2.01701, "19880": 2.00892, "19885": 1.97942, "19890": 1.99972, "19895": 2.00433, "19900": 1.99519, "19905": 2.0074, "19910": 2.02297, "19915": 1.99869, "19920": 1.98982, "19925": 1.9789, "19930": 1.98408, "19935": 2.00043, "19940": 1.97541, "19945": 2.02072, "19950": 2.01557, "19955": 2.01491, "19960": 2.02167, "19965": 1.99189, "19970": 1.97756, "19975": 2.00186, "19980": 2.00859, "19985": 2.00538, "19990": 1.99584, "19995": 1.9999, "20000": 2.00712, "20005": 1.99067, "20010": 1.99987, "20015": 1.99572, "20020": 2.00672, "20025": 2.00381, "20030": 2.00958, "20035": 1.9947, "20040": 1.98782, "20045": 1.97055, "20050": 2.01164, "20055": 1.99445, "20060": 1.98744, "20065": 2.00188, "20070": 2.00613, "20075": 1.98385, "20080": 1.99023, "20085": 1.97763, "20090": 1.98925, "20095": 1.98484, "20100": 1.99465, "20105": 1.9861, "20110": 2.01977, "20115": 2.00114, "20120": 1.9948, "20125": 1.98642, "20130": 1.99538, "20135": 1.98544, "20140": 1.988, "20145": 1.99367, "20150": 1.99213, "20155": 2.00786, "20160": 2.00959, "20165": 1.99543, "20170": 2.01812, "20175": 1.99674, "20180": 1.99187, "20185": 1.99811, "20190": 1.97762, "20195": 1.98035, "20200": 1.97242, "20205": 2.01399, "20210": 1.98824, "20215": 2.00314, "20220": 2.02342, "20225": 1.98975, "20230": 1.99002, "20235": 2.00323, "20240": 2.00865, "20245": 1.99225, "20250": 1.99486, "20255": 2.0071, "20260": 1.98455, "20265": 2.00619, "20270": 1.98494, "20275": 1.97967, "20280": 2.00645, "20285": 1.99427, "20290": 1.98117, "20295": 1.99883, "20300": 1.99901, "20305": 1.99768, "20310": 2.01837, "20315": 1.99649, "20320": 1.98881, "20325": 1.9985, "20330": 2.00741, "20335": 1.95935, "20340": 2.00605, "20345": 2.00596, "20350": 1.98556, "20355": 1.98664, "20360": 2.00428, "20365": 1.96603, "20370": 1.99465, "20375": 1.98669, "20380": 1.99507, "20385": 1.99234, "20390": 1.98202, "20395": 1.99881, "20400": 1.98099, "20405": 1.99423, "20410": 1.9792, "20415": 2.01655, "20420": 2.00922, "20425": 1.98395, "20430": 1.99439, "20435": 1.98661, "20440": 1.99133, "20445": 2.0252, "20450": 1.97602, "20455": 2.00434, "20460": 1.98591, "20465": 2.00238, "20470": 1.99449, "20475": 1.99295, "20480": 1.97341, "20485": 1.98947, "20490": 1.99347, "20495": 2.00595, "20500": 1.99069, "20505": 1.98322, "20510": 1.98498, "20515": 2.02051, "20520": 1.98923, "20525": 1.99004, "20530": 1.96509, "20535": 1.98198, "20540": 1.97833, "20545": 1.98415, "20550": 1.99839, "20555": 1.99909, "20560": 1.95435, "20565": 2.01176, "20570": 1.98848, "20575": 1.97153, "20580": 1.9941, "20585": 1.96783, "20590": 1.99192, "20595": 1.99666, "20600": 2.01744, "20605": 1.99701, "20610": 2.00496, "20615": 1.98816, "20620": 1.98345, "20625": 1.99658, "20630": 2.0043, "20635": 1.99346, "20640": 1.98673, "20645": 2.00038, "20650": 1.96909, "20655": 1.9914, "20660": 1.98944, "20665": 2.00014, "20670": 2.00135, "20675": 1.99193, "20680": 1.99175, "20685": 1.98389, "20690": 2.02648, "20695": 1.95698, "20700": 1.99891, "20705": 2.00693, "20710": 1.98631, "20715": 1.98838, "20720": 2.00511, "20725": 1.98705, "20730": 2.00091, "20735": 2.01789, "20740": 1.98972, "20745": 2.01729, "20750": 1.99392, "20755": 1.95946, "20760": 2.01092, "20765": 1.98911, "20770": 1.99108, "20775": 1.99471, "20780": 1.99374, "20785": 1.97831, "20790": 1.98252, "20795": 2.00069, "20800": 1.97227, "20805": 1.98279, "20810": 1.97518, "20815": 1.98075, "20820": 2.01033, "20825": 1.99741, "20830": 2.00368, "20835": 1.97623, "20840": 1.98294, "20845": 2.01737, "20850": 1.97828, "20855": 2.00525, "20860": 1.99843, "20865": 1.97456, "20870": 1.98751, "20875": 1.98596, "20880": 1.9914, "20885": 2.00536, "20890": 1.98325, "20895": 1.99007, "20900": 1.97671, "20905": 2.00126, "20910": 2.02618, "20915": 1.99143, "20920": 2.00618, "20925": 1.99791, "20930": 1.97773, "20935": 2.00148, "20940": 1.97961, "20945": 2.00815, "20950": 1.98613, "20955": 1.9892, "20960": 2.00953, "20965": 1.9766, "20970": 1.97351, "20975": 1.97861, "20980": 2.0046, "20985": 1.96033, "20990": 1.99285, "20995": 1.99382, "21000": 1.99827, "21005": 1.98578, "21010": 2.0108, "21015": 1.97053, "21020": 1.96625, "21025": 2.00088, "21030": 1.98132, "21035": 2.009, "21040": 2.01065, "21045": 2.00125, "21050": 1.98783, "21055": 1.99057, "21060": 1.98849, "21065": 2.00044, "21070": 1.99152, "21075": 1.98431, "21080": 1.96605, "21085": 1.98721, "21090": 1.96803, "21095": 1.98764, "21100": 1.96594, "21105": 1.99446, "21110": 1.97771, "21115": 1.98713, "21120": 1.98888, "21125": 1.98405, "21130": 1.98871, "21135": 1.98213, "21140": 2.0129, "21145": 2.00328, "21150": 1.99038, "21155": 1.98145, "21160": 1.97718, "21165": 1.97532, "21170": 1.99749, "21175": 2.01106, "21180": 1.97, "21185": 2.0249, "21190": 1.9947, "21195": 1.97408, "21200": 1.98088, "21205": 1.98623, "21210": 2.00205, "21215": 1.97127, "21220": 1.98231, "21225": 1.98011, "21230": 2.00493, "21235": 1.99927, "21240": 1.98464, "21245": 1.97567, "21250": 2.01125, "21255": 1.96019, "21260": 1.96518, "21265": 1.96598, "21270": 1.99037, "21275": 1.9731, "21280": 1.98599, "21285": 1.9746, "21290": 1.98806, "21295": 2.00143, "21300": 1.99191, "21305": 1.98901, "21310": 1.97724, "21315": 1.9927, "21320": 1.96986, "21325": 1.97125, "21330": 1.98636, "21335": 2.00272, "21340": 1.98532, "21345": 2.00998, "21350": 1.98611, "21355": 1.97294, "21360": 1.98547, "21365": 1.97997, "21370": 1.99942, "21375": 1.98583, "21380": 1.97978, "21385": 1.98378, "21390": 1.98792, "21395": 1.97396, "21400": 1.97086, "21405": 1.99509, "21410": 2.00534, "21415": 1.98487, "21420": 2.00801, "21425": 1.99104, "21430": 1.99971, "21435": 1.98579, "21440": 1.98292, "21445": 2.01198, "21450": 2.01114, "21455": 1.99691, "21460": 1.98373, "21465": 1.99006, "21470": 2.00335, "21475": 1.98792, "21480": 1.98911, "21485": 1.97144, "21490": 1.993, "21495": 1.98418, "21500": 1.97369, "21505": 1.99253, "21510": 1.96997, "21515": 1.99845, "21520": 1.98849, "21525": 1.99298, "21530": 1.99975, "21535": 1.98491, "21540": 1.98993, "21545": 1.97922, "21550": 1.97247, "21555": 1.97395, "21560": 1.99034, "21565": 1.99459, "21570": 1.9854, "21575": 2.00343, "21580": 1.97861, "21585": 1.99427, "21590": 1.98103, "21595": 1.98438, "21600": 1.96257, "21605": 2.00566, "21610": 2.00415, "21615": 1.96466, "21620": 2.00891, "21625": 1.97116, "21630": 1.99269, "21635": 1.97205, "21640": 1.99583, "21645": 2.02493, "21650": 1.97197, "21655": 1.96686, "21660": 1.98741, "21665": 1.99657, "21670": 1.99362, "21675": 1.99529, "21680": 1.9765, "21685": 1.97294, "21690": 1.98599, "21695": 1.979, "21700": 2.00565, "21705": 2.01058, "21710": 2.00705, "21715": 1.99927, "21720": 2.00411, "21725": 1.99936, "21730": 1.98352, "21735": 1.96621, "21740": 2.00295, "21745": 1.97515, "21750": 1.97988, "21755": 1.99309, "21760": 2.00716, "21765": 2.01802, "21770": 1.97218, "21775": 2.01057, "21780": 1.99289, "21785": 2.00139, "21790": 2.0072, "21795": 1.97358, "21800": 1.97454, "21805": 1.97514, "21810": 1.97515, "21815": 1.95868, "21820": 1.99741, "21825": 1.97065, "21830": 2.00852, "21835": 2.00574, "21840": 1.98366, "21845": 1.99724, "21850": 1.99327, "21855": 1.97865, "21860": 1.98429, "21865": 1.97587, "21870": 1.9898, "21875": 2.00492, "21880": 1.97698, "21885": 1.99353, "21890": 1.99517, "21895": 1.97338, "21900": 1.98528, "21905": 1.9693, "21910": 1.98249, "21915": 1.98084, "21920": 1.96622, "21925": 1.99088, "21930": 1.99385, "21935": 2.00399, "21940": 1.97297, "21945": 1.96225, "21950": 1.97693, "21955": 1.99614, "21960": 1.97973, "21965": 2.01477, "21970": 2.00278, "21975": 1.97466, "21980": 1.96289, "21985": 1.98342, "21990": 1.97358, "21995": 1.97085, "22000": 1.9968, "22005": 1.97684, "22010": 1.95484, "22015": 1.96378, "22020": 1.96599, "22025": 1.98983, "22030": 1.97331, "22035": 1.96758, "22040": 1.9722, "22045": 1.98287, "22050": 1.98562, "22055": 2.0043, "22060": 1.98791, "22065": 2.00392, "22070": 1.9875, "22075": 2.00532, "22080": 1.97416, "22085": 1.98897, "22090": 1.96584, "22095": 1.97756, "22100": 1.9912, "22105": 1.98489, "22110": 1.98723, "22115": 1.96068, "22120": 2.00077, "22125": 1.98785, "22130": 1.98301, "22135": 1.96152, "22140": 2.00339, "22145": 1.97337, "22150": 1.97491, "22155": 1.96584, "22160": 1.97934, "22165": 1.97069, "22170": 1.98789, "22175": 2.00241, "22180": 1.97242, "22185": 1.9851, "22190": 1.96633, "22195": 1.98626, "22200": 1.97249, "22205": 1.9957, "22210": 1.99301, "22215": 1.97396, "22220": 1.98995, "22225": 1.99048, "22230": 1.97738, "22235": 2.0069, "22240": 1.96995, "22245": 1.98279, "22250": 1.98861, "22255": 1.9585, "22260": 1.98545, "22265": 1.98582, "22270": 2.00519, "22275": 1.97231, "22280": 1.98427, "22285": 1.99765, "22290": 1.97428, "22295": 1.98902, "22300": 1.95796, "22305": 1.98206, "22310": 1.98152, "22315": 1.97486, "22320": 1.98396, "22325": 1.98544, "22330": 1.98088, "22335": 1.98102, "22340": 1.97851, "22345": 1.98377, "22350": 2.00797, "22355": 1.98243, "22360": 1.98963, "22365": 1.99678, "22370": 1.9808, "22375": 1.97907, "22380": 1.9842, "22385": 1.9907, "22390": 1.97698, "22395": 1.98303, "22400": 2.00021, "22405": 1.98659, "22410": 1.98538, "22415": 1.97241, "22420": 1.97238, "22425": 1.99157, "22430": 1.98852, "22435": 1.96845, "22440": 1.97338, "22445": 1.98175, "22450": 1.97367, "22455": 1.98031, "22460": 2.00441, "22465": 1.99942, "22470": 1.98392, "22475": 1.99866, "22480": 1.96353, "22485": 1.97088, "22490": 1.98445, "22495": 1.975, "22500": 1.96941, "22505": 1.97659, "22510": 1.96688, "22515": 1.97093, "22520": 1.95924, "22525": 1.97231, "22530": 1.96507, "22535": 1.99133, "22540": 1.99264, "22545": 1.97654, "22550": 1.99319, "22555": 1.98192, "22560": 1.98778, "22565": 1.97318, "22570": 1.99346, "22575": 1.99171, "22580": 1.98767, "22585": 1.9734, "22590": 2.00917, "22595": 1.9859, "22600": 1.99269, "22605": 1.94927, "22610": 1.99107, "22615": 1.97547, "22620": 1.99885, "22625": 1.95504, "22630": 1.96516, "22635": 1.99438, "22640": 1.96057, "22645": 1.98079, "22650": 1.98437, "22655": 1.97458, "22660": 1.96864, "22665": 2.00773, "22670": 1.9827, "22675": 1.96389, "22680": 1.96849, "22685": 1.97679, "22690": 1.96942, "22695": 1.98334, "22700": 1.9858, "22705": 1.9621, "22710": 1.95523, "22715": 2.01197, "22720": 1.98382, "22725": 1.9612, "22730": 1.97728, "22735": 1.96004, "22740": 1.96149, "22745": 1.97704, "22750": 1.96435, "22755": 2.00227, "22760": 1.98005, "22765": 1.96919, "22770": 1.98062, "22775": 1.95467, "22780": 1.96725, "22785": 1.95782, "22790": 1.99137, "22795": 2.00435, "22800": 1.99349, "22805": 1.96592, "22810": 1.97492, "22815": 1.95706, "22820": 1.96192, "22825": 1.95576, "22830": 1.98618, "22835": 1.97539, "22840": 1.96774, "22845": 1.99671, "22850": 1.98357, "22855": 1.975, "22860": 1.96556, "22865": 1.9852, "22870": 2.00271, "22875": 1.98172, "22880": 1.97039, "22885": 1.97082, "22890": 1.96772, "22895": 1.95815, "22900": 1.97058, "22905": 1.95627, "22910": 1.98621, "22915": 1.9744, "22920": 2.00146, "22925": 1.95323, "22930": 1.97273, "22935": 1.98214, "22940": 1.99532, "22945": 1.9974, "22950": 1.96989, "22955": 1.99273, "22960": 1.96774, "22965": 1.98075, "22970": 1.97418, "22975": 1.96131, "22980": 1.99632, "22985": 1.95583, "22990": 1.97416, "22995": 1.95942, "23000": 1.98815, "23005": 1.99433, "23010": 1.99907, "23015": 1.96742, "23020": 1.98026, "23025": 1.9787, "23030": 1.97268, "23035": 1.99878, "23040": 1.97638, "23045": 1.97432, "23050": 1.96029, "23055": 1.96883, "23060": 1.96908, "23065": 1.98591, "23070": 1.97531, "23075": 1.97799, "23080": 1.99657, "23085": 1.97257, "23090": 1.95802, "23095": 1.98817, "23100": 1.97482, "23105": 1.97988, "23110": 1.98116, "23115": 1.96496, "23120": 1.94252, "23125": 1.99758, "23130": 1.98043, "23135": 1.97682, "23140": 1.98273, "23145": 1.96361, "23150": 1.98163, "23155": 1.9872, "23160": 2.01488, "23165": 1.97581, "23170": 1.98858, "23175": 1.98259, "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "num-zeros": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 406787520.0, "5": 406592256.0, "10": 414795872.0, "15": 419864992.0, "20": 411724288.0, "25": 406622304.0, "30": 413034304.0, "35": 409552224.0, "40": 414714176.0, "45": 411202656.0, "50": 413174400.0, "55": 414168992.0, "60": 407747264.0, "65": 411425984.0, "70": 410919968.0, "75": 409331872.0, "80": 406662272.0, "85": 406728864.0, "90": 412170048.0, "95": 414241152.0, "100": 414834144.0, "105": 421842240.0, "110": 413672096.0, "115": 408993760.0, "120": 411564832.0, "125": 406562240.0, "130": 411451296.0, "135": 417566688.0, "140": 403018272.0, "145": 410664128.0, "150": 417475712.0, "155": 405320096.0, "160": 402154784.0, "165": 408992960.0, "170": 400900544.0, "175": 407148704.0, "180": 409550752.0, "185": 402997312.0, "190": 414755680.0, "195": 403489600.0, "200": 414941024.0, "205": 414930624.0, "210": 404129536.0, "215": 413629696.0, "220": 406036288.0, "225": 405373472.0, "230": 418522976.0, "235": 410754880.0, "240": 415721856.0, "245": 412100800.0, "250": 411167968.0, "255": 422414880.0, "260": 405246304.0, "265": 410729408.0, "270": 409320448.0, "275": 407086080.0, "280": 407993408.0, "285": 413274656.0, "290": 405427968.0, "295": 412640768.0, "300": 412000768.0, "305": 413316096.0, "310": 411113600.0, "315": 415751552.0, "320": 414909952.0, "325": 406883296.0, "330": 413362560.0, "335": 414361408.0, "340": 404911008.0, "345": 409995712.0, "350": 416836320.0, "355": 414217408.0, "360": 415051072.0, "365": 405025024.0, "370": 416624832.0, "375": 402492032.0, "380": 412621568.0, "385": 406798112.0, "390": 413799360.0, "395": 412016864.0, "400": 413232736.0, "405": 417363648.0, "410": 410792480.0, "415": 413900864.0, "420": 415209760.0, "425": 416627232.0, "430": 410907072.0, "435": 413996160.0, "440": 406105600.0, "445": 410403808.0, "450": 407894304.0, "455": 406206560.0, "460": 414314400.0, "465": 411987744.0, "470": 425869888.0, "475": 412910528.0, "480": 407134592.0, "485": 415675104.0, "490": 408964288.0, "495": 414695136.0, "500": 413684672.0, "505": 409272096.0, "510": 417079040.0, "515": 420419776.0, "520": 406484544.0, "525": 398837440.0, "530": 413281344.0, "535": 404577184.0, "540": 403469216.0, "545": 410432960.0, "550": 407044064.0, "555": 409851296.0, "560": 427455456.0, "565": 406162656.0, "570": 404859392.0, "575": 412398656.0, "580": 407224288.0, "585": 414315136.0, "590": 412621728.0, "595": 419184352.0, "600": 412166304.0, "605": 418650272.0, "610": 406106272.0, "615": 411868352.0, "620": 414483072.0, "625": 413545120.0, "630": 414249440.0, "635": 415762592.0, "640": 418910304.0, "645": 419574688.0, "650": 414617760.0, "655": 425489792.0, "660": 409908096.0, "665": 415278400.0, "670": 404457376.0, "675": 416198784.0, "680": 416534976.0, "685": 415695744.0, "690": 412359296.0, "695": 417980768.0, "700": 410633216.0, "705": 418420672.0, "710": 414203712.0, "715": 411008032.0, "720": 410516768.0, "725": 410396096.0, "730": 417933760.0, "735": 411969856.0, "740": 415870112.0, "745": 411143392.0, "750": 413324992.0, "755": 413921184.0, "760": 410644128.0, "765": 413238848.0, "770": 414562176.0, "775": 407341280.0, "780": 421722528.0, "785": 409849664.0, "790": 410567968.0, "795": 420400352.0, "800": 418182048.0, "805": 414133312.0, "810": 414460352.0, "815": 412908864.0, "820": 408241248.0, "825": 407965472.0, "830": 415386880.0, "835": 414043744.0, "840": 415204288.0, "845": 411016352.0, "850": 404442656.0, "855": 411951744.0, "860": 412365440.0, "865": 406100640.0, "870": 419939168.0, "875": 409721376.0, "880": 407168800.0, "885": 413694304.0, "890": 410882144.0, "895": 412955360.0, "900": 413041376.0, "905": 406262912.0, "910": 409207872.0, "915": 406260832.0, "920": 415109760.0, "925": 400830976.0, "930": 410062464.0, "935": 411185344.0, "940": 403089856.0, "945": 417411424.0, "950": 412864192.0, "955": 409713344.0, "960": 420149344.0, "965": 407941024.0, "970": 414691392.0, "975": 403905440.0, "980": 406756064.0, "985": 412577856.0, "990": 407738816.0, "995": 410225344.0, "1000": 414083104.0, "1005": 405787168.0, "1010": 419717376.0, "1015": 413893152.0, "1020": 413903648.0, "1025": 410997376.0, "1030": 403684256.0, "1035": 420490336.0, "1040": 408927616.0, "1045": 411334272.0, "1050": 414040256.0, "1055": 415264576.0, "1060": 409940800.0, "1065": 412784768.0, "1070": 407128992.0, "1075": 409007520.0, "1080": 418018176.0, "1085": 412431872.0, "1090": 414587616.0, "1095": 418743360.0, "1100": 411082944.0, "1105": 413015584.0, "1110": 407320320.0, "1115": 415981664.0, "1120": 407398144.0, "1125": 417683200.0, "1130": 417919136.0, "1135": 407434912.0, "1140": 415216736.0, "1145": 413779104.0, "1150": 412058592.0, "1155": 406349920.0, "1160": 415341792.0, "1165": 412420384.0, "1170": 410876544.0, "1175": 412858048.0, "1180": 412681856.0, "1185": 411931328.0, "1190": 417111264.0, "1195": 406445024.0, "1200": 410971200.0, "1205": 424615200.0, "1210": 409679648.0, "1215": 408693280.0, "1220": 407540000.0, "1225": 410069280.0, "1230": 418426720.0, "1235": 412551104.0, "1240": 410228448.0, "1245": 412509920.0, "1250": 406525536.0, "1255": 415956960.0, "1260": 408756608.0, "1265": 409598368.0, "1270": 406935936.0, "1275": 413292992.0, "1280": 409161120.0, "1285": 410126400.0, "1290": 409282560.0, "1295": 414241440.0, "1300": 409152768.0, "1305": 407936320.0, "1310": 413586976.0, "1315": 409717792.0, "1320": 407101728.0, "1325": 413080192.0, "1330": 417865408.0, "1335": 408950176.0, "1340": 417995424.0, "1345": 412731968.0, "1350": 419566656.0, "1355": 412066880.0, "1360": 402345408.0, "1365": 415139104.0, "1370": 417915040.0, "1375": 400933760.0, "1380": 413297440.0, "1385": 414364288.0, "1390": 412117856.0, "1395": 413715776.0, "1400": 404598624.0, "1405": 412338784.0, "1410": 419105152.0, "1415": 407760960.0, "1420": 411863968.0, "1425": 408363584.0, "1430": 411980768.0, "1435": 415373056.0, "1440": 408487232.0, "1445": 416851648.0, "1450": 412756512.0, "1455": 410345504.0, "1460": 414049184.0, "1465": 413358848.0, "1470": 413860288.0, "1475": 414416480.0, "1480": 413742656.0, "1485": 413893952.0, "1490": 418780896.0, "1495": 409671040.0, "1500": 418493472.0, "1505": 403666432.0, "1510": 414433152.0, "1515": 404495904.0, "1520": 413764608.0, "1525": 413698560.0, "1530": 419425536.0, "1535": 404638560.0, "1540": 411437856.0, "1545": 409020576.0, "1550": 411313248.0, "1555": 406652672.0, "1560": 406774752.0, "1565": 413993920.0, "1570": 418594368.0, "1575": 412430400.0, "1580": 411316832.0, "1585": 417946112.0, "1590": 409018400.0, "1595": 404168000.0, "1600": 409885536.0, "1605": 407624608.0, "1610": 420202720.0, "1615": 408830816.0, "1620": 415820160.0, "1625": 411788288.0, "1630": 408953664.0, "1635": 413073568.0, "1640": 411044288.0, "1645": 412709216.0, "1650": 412685312.0, "1655": 417731552.0, "1660": 416714784.0, "1665": 421184160.0, "1670": 417573664.0, "1675": 405742720.0, "1680": 416780448.0, "1685": 416323616.0, "1690": 412525280.0, "1695": 413663584.0, "1700": 407048768.0, "1705": 408813952.0, "1710": 416922624.0, "1715": 410371840.0, "1720": 404128864.0, "1725": 410475840.0, "1730": 418653248.0, "1735": 417132448.0, "1740": 411040736.0, "1745": 411075584.0, "1750": 410848576.0, "1755": 417424352.0, "1760": 406798976.0, "1765": 416371680.0, "1770": 404446464.0, "1775": 417529536.0, "1780": 413472640.0, "1785": 412876032.0, "1790": 407667360.0, "1795": 415060960.0, "1800": 409086816.0, "1805": 426672768.0, "1810": 407206560.0, "1815": 414004800.0, "1820": 415249344.0, "1825": 417358016.0, "1830": 407460896.0, "1835": 420258272.0, "1840": 407121792.0, "1845": 412629760.0, "1850": 421216576.0, "1855": 412247872.0, "1860": 417111232.0, "1865": 407811776.0, "1870": 406378432.0, "1875": 414691392.0, "1880": 410791296.0, "1885": 412041216.0, "1890": 414643360.0, "1895": 406860352.0, "1900": 410934816.0, "1905": 413752672.0, "1910": 417159072.0, "1915": 416150784.0, "1920": 406428416.0, "1925": 415854528.0, "1930": 410061920.0, "1935": 404454304.0, "1940": 418421952.0, "1945": 405733632.0, "1950": 413198080.0, "1955": 416175360.0, "1960": 415721568.0, "1965": 408029568.0, "1970": 417079584.0, "1975": 411135072.0, "1980": 405913184.0, "1985": 407787808.0, "1990": 409385600.0, "1995": 406986016.0, "2000": 418204480.0, "2005": 412561920.0, "2010": 407928352.0, "2015": 404347392.0, "2020": 414580160.0, "2025": 411200000.0, "2030": 411001344.0, "2035": 410663584.0, "2040": 413206944.0, "2045": 418035872.0, "2050": 416185952.0, "2055": 410044384.0, "2060": 414145536.0, "2065": 416037824.0, "2070": 408417184.0, "2075": 408083872.0, "2080": 415029536.0, "2085": 408797344.0, "2090": 422520064.0, "2095": 412102208.0, "2100": 406873760.0, "2105": 409858048.0, "2110": 412461216.0, "2115": 415997568.0, "2120": 417544448.0, "2125": 412495040.0, "2130": 415160800.0, "2135": 411726144.0, "2140": 416610976.0, "2145": 407848896.0, "2150": 412195328.0, "2155": 421871840.0, "2160": 414866144.0, "2165": 417311328.0, "2170": 412893952.0, "2175": 412061760.0, "2180": 401523136.0, "2185": 419654944.0, "2190": 407856128.0, "2195": 411337504.0, "2200": 416489920.0, "2205": 416241184.0, "2210": 406788672.0, "2215": 414889856.0, "2220": 409236960.0, "2225": 416552864.0, "2230": 411525312.0, "2235": 414073280.0, "2240": 410427488.0, "2245": 422848448.0, "2250": 407621088.0, "2255": 411854112.0, "2260": 415275520.0, "2265": 401920448.0, "2270": 411835264.0, "2275": 403329984.0, "2280": 412758592.0, "2285": 409604000.0, "2290": 405386528.0, "2295": 412128512.0, "2300": 414687840.0, "2305": 412055424.0, "2310": 413963296.0, "2315": 406132096.0, "2320": 414428992.0, "2325": 407839904.0, "2330": 407154112.0, "2335": 414211872.0, "2340": 396976192.0, "2345": 410634560.0, "2350": 417683936.0, "2355": 414628512.0, "2360": 413533536.0, "2365": 415815712.0, "2370": 408937088.0, "2375": 416051392.0, "2380": 414398272.0, "2385": 416522432.0, "2390": 410235168.0, "2395": 407589056.0, "2400": 418743488.0, "2405": 413565504.0, "2410": 412589120.0, "2415": 408912448.0, "2420": 410419616.0, "2425": 418355584.0, "2430": 411174176.0, "2435": 417270272.0, "2440": 415482176.0, "2445": 409323104.0, "2450": 415643008.0, "2455": 408209504.0, "2460": 412592096.0, "2465": 412157408.0, "2470": 417450208.0, "2475": 409709600.0, "2480": 412322656.0, "2485": 405371936.0, "2490": 408215136.0, "2495": 409813184.0, "2500": 414518784.0, "2505": 409078880.0, "2510": 408933888.0, "2515": 419013440.0, "2520": 423234592.0, "2525": 412786208.0, "2530": 415999104.0, "2535": 411781792.0, "2540": 414291264.0, "2545": 416074496.0, "2550": 426333408.0, "2555": 417115744.0, "2560": 408551552.0, "2565": 410186784.0, "2570": 405202176.0, "2575": 409909728.0, "2580": 407697440.0, "2585": 415065312.0, "2590": 418711136.0, "2595": 406943552.0, "2600": 404464000.0, "2605": 422354080.0, "2610": 407391744.0, "2615": 415175168.0, "2620": 410334400.0, "2625": 413413056.0, "2630": 418692096.0, "2635": 417749216.0, "2640": 414684256.0, "2645": 409656320.0, "2650": 408143648.0, "2655": 414075648.0, "2660": 408575584.0, "2665": 409609792.0, "2670": 413855104.0, "2675": 400291232.0, "2680": 415580960.0, "2685": 410059840.0, "2690": 402831424.0, "2695": 420051296.0, "2700": 413591424.0, "2705": 409028448.0, "2710": 407144384.0, "2715": 410079872.0, "2720": 410696032.0, "2725": 413175456.0, "2730": 412582176.0, "2735": 413075040.0, "2740": 412841472.0, "2745": 417259840.0, "2750": 413592416.0, "2755": 410169632.0, "2760": 416739936.0, "2765": 421538304.0, "2770": 410973632.0, "2775": 416086528.0, "2780": 410037792.0, "2785": 416428032.0, "2790": 409088192.0, "2795": 414727872.0, "2800": 406928160.0, "2805": 414285440.0, "2810": 414379008.0, "2815": 404253728.0, "2820": 414130720.0, "2825": 417722464.0, "2830": 422092640.0, "2835": 416378944.0, "2840": 415201440.0, "2845": 412390080.0, "2850": 411374816.0, "2855": 423506048.0, "2860": 412030720.0, "2865": 411372064.0, "2870": 412284384.0, "2875": 420631136.0, "2880": 405571040.0, "2885": 412421824.0, "2890": 411896416.0, "2895": 407861664.0, "2900": 423057088.0, "2905": 404228992.0, "2910": 420959296.0, "2915": 407537440.0, "2920": 415787328.0, "2925": 415946976.0, "2930": 414582624.0, "2935": 413672352.0, "2940": 409946912.0, "2945": 410839328.0, "2950": 419678240.0, "2955": 402757792.0, "2960": 411655840.0, "2965": 409881728.0, "2970": 409734816.0, "2975": 418148672.0, "2980": 410409344.0, "2985": 407735776.0, "2990": 412354208.0, "2995": 401123936.0, "3000": 409469792.0, "3005": 410477824.0, "3010": 419295232.0, "3015": 409509504.0, "3020": 403281760.0, "3025": 408281248.0, "3030": 409509536.0, "3035": 411892672.0, "3040": 411899008.0, "3045": 402997152.0, "3050": 420551360.0, "3055": 422616320.0, "3060": 409083648.0, "3065": 408839168.0, "3070": 419810752.0, "3075": 409168992.0, "3080": 424927552.0, "3085": 407314016.0, "3090": 416151104.0, "3095": 413042368.0, "3100": 421673760.0, "3105": 416499104.0, "3110": 413201504.0, "3115": 413487616.0, "3120": 422879264.0, "3125": 409815936.0, "3130": 421933952.0, "3135": 415587104.0, "3140": 408546240.0, "3145": 421576736.0, "3150": 412129312.0, "3155": 411846080.0, "3160": 414725088.0, "3165": 412613440.0, "3170": 414412032.0, "3175": 414954304.0, "3180": 411850816.0, "3185": 416236832.0, "3190": 413642336.0, "3195": 410492768.0, "3200": 419161568.0, "3205": 409026112.0, "3210": 408189088.0, "3215": 413918272.0, "3220": 410782912.0, "3225": 416897600.0, "3230": 417532384.0, "3235": 404055232.0, "3240": 414929408.0, "3245": 410574848.0, "3250": 412317728.0, "3255": 404854016.0, "3260": 425139392.0, "3265": 413815552.0, "3270": 408450784.0, "3275": 405114720.0, "3280": 424218016.0, "3285": 418747520.0, "3290": 410523488.0, "3295": 413457248.0, "3300": 416127136.0, "3305": 410644736.0, "3310": 415429984.0, "3315": 413004544.0, "3320": 412383776.0, "3325": 410719840.0, "3330": 408925120.0, "3335": 414299232.0, "3340": 408202688.0, "3345": 409512064.0, "3350": 413869440.0, "3355": 420501280.0, "3360": 409112704.0, "3365": 409075008.0, "3370": 414151904.0, "3375": 409609184.0, "3380": 417743328.0, "3385": 411416352.0, "3390": 421710656.0, "3395": 415478592.0, "3400": 413850912.0, "3405": 417918400.0, "3410": 412496320.0, "3415": 409164448.0, "3420": 414291168.0, "3425": 410992800.0, "3430": 413641984.0, "3435": 409449216.0, "3440": 416484608.0, "3445": 411892448.0, "3450": 418135552.0, "3455": 406587328.0, "3460": 409702272.0, "3465": 416326112.0, "3470": 414243936.0, "3475": 415523744.0, "3480": 417282304.0, "3485": 413667200.0, "3490": 414845344.0, "3495": 408664576.0, "3500": 424679360.0, "3505": 409311200.0, "3510": 410960864.0, "3515": 412008832.0, "3520": 421340480.0, "3525": 404119328.0, "3530": 421469856.0, "3535": 418741728.0, "3540": 411960480.0, "3545": 413171808.0, "3550": 418678848.0, "3555": 409486496.0, "3560": 406306048.0, "3565": 409476672.0, "3570": 407459712.0, "3575": 413757440.0, "3580": 414220640.0, "3585": 416856704.0, "3590": 414352448.0, "3595": 409133696.0, "3600": 414492000.0, "3605": 406107296.0, "3610": 414006272.0, "3615": 411591488.0, "3620": 413615968.0, "3625": 412596384.0, "3630": 411904352.0, "3635": 416913408.0, "3640": 413176544.0, "3645": 411115296.0, "3650": 424183904.0, "3655": 413256416.0, "3660": 415784384.0, "3665": 410494016.0, "3670": 411589920.0, "3675": 412823552.0, "3680": 417861856.0, "3685": 407721024.0, "3690": 403680096.0, "3695": 409792640.0, "3700": 418492224.0, "3705": 407275264.0, "3710": 409974720.0, "3715": 413826144.0, "3720": 402374048.0, "3725": 413972576.0, "3730": 398623328.0, "3735": 414606528.0, "3740": 415424032.0, "3745": 413134336.0, "3750": 414617952.0, "3755": 416330208.0, "3760": 409055328.0, "3765": 414408640.0, "3770": 413945344.0, "3775": 412199360.0, "3780": 411479040.0, "3785": 413934304.0, "3790": 411305856.0, "3795": 403618848.0, "3800": 415125152.0, "3805": 409917824.0, "3810": 413207296.0, "3815": 410972832.0, "3820": 409141536.0, "3825": 414579584.0, "3830": 415767136.0, "3835": 410013664.0, "3840": 418733376.0, "3845": 418061632.0, "3850": 408669472.0, "3855": 407323584.0, "3860": 412530848.0, "3865": 422753024.0, "3870": 417796416.0, "3875": 416397280.0, "3880": 417645088.0, "3885": 408435104.0, "3890": 421987264.0, "3895": 417238688.0, "3900": 406877856.0, "3905": 408381920.0, "3910": 410459424.0, "3915": 411175712.0, "3920": 409233952.0, "3925": 420930752.0, "3930": 421750016.0, "3935": 407358880.0, "3940": 407514624.0, "3945": 411217088.0, "3950": 408190272.0, "3955": 409803136.0, "3960": 412020640.0, "3965": 407735200.0, "3970": 410371776.0, "3975": 409705152.0, "3980": 422112512.0, "3985": 407128736.0, "3990": 417178592.0, "3995": 413297920.0, "4000": 409698176.0, "4005": 420258848.0, "4010": 420244864.0, "4015": 402531680.0, "4020": 410112672.0, "4025": 407380288.0, "4030": 413356480.0, "4035": 410748192.0, "4040": 411807744.0, "4045": 394843776.0, "4050": 422393984.0, "4055": 410409280.0, "4060": 414334656.0, "4065": 403559904.0, "4070": 411254304.0, "4075": 411364352.0, "4080": 409919552.0, "4085": 409736384.0, "4090": 407932288.0, "4095": 418265792.0, "4100": 415963616.0, "4105": 408086784.0, "4110": 410136544.0, "4115": 408845216.0, "4120": 409618816.0, "4125": 416037536.0, "4130": 409624960.0, "4135": 412100640.0, "4140": 417439584.0, "4145": 411498816.0, "4150": 417292544.0, "4155": 414913824.0, "4160": 414637152.0, "4165": 411904096.0, "4170": 416484480.0, "4175": 416332768.0, "4180": 412021344.0, "4185": 420826016.0, "4190": 414835840.0, "4195": 405726400.0, "4200": 422428256.0, "4205": 405694880.0, "4210": 414648960.0, "4215": 412796832.0, "4220": 409197408.0, "4225": 408443936.0, "4230": 411746336.0, "4235": 409837440.0, "4240": 410583488.0, "4245": 414693632.0, "4250": 412086176.0, "4255": 407968608.0, "4260": 414269056.0, "4265": 411968640.0, "4270": 406356800.0, "4275": 416421920.0, "4280": 409266624.0, "4285": 412446112.0, "4290": 412147872.0, "4295": 410671232.0, "4300": 406260288.0, "4305": 410496384.0, "4310": 415793792.0, "4315": 411732672.0, "4320": 413727264.0, "4325": 410913504.0, "4330": 407227264.0, "4335": 413551808.0, "4340": 413431392.0, "4345": 414332800.0, "4350": 411802176.0, "4355": 419960704.0, "4360": 407151296.0, "4365": 415364512.0, "4370": 413754816.0, "4375": 407905152.0, "4380": 417097568.0, "4385": 400661792.0, "4390": 412819616.0, "4395": 411881952.0, "4400": 413938848.0, "4405": 417654400.0, "4410": 416624480.0, "4415": 409992096.0, "4420": 415535200.0, "4425": 407119328.0, "4430": 405722560.0, "4435": 403976640.0, "4440": 405504704.0, "4445": 417671136.0, "4450": 410327776.0, "4455": 412817216.0, "4460": 415225664.0, "4465": 407161824.0, "4470": 414280800.0, "4475": 418563584.0, "4480": 413903488.0, "4485": 413127872.0, "4490": 413457664.0, "4495": 406281664.0, "4500": 417955712.0, "4505": 412241728.0, "4510": 404721024.0, "4515": 415187456.0, "4520": 406925216.0, "4525": 401550080.0, "4530": 413591296.0, "4535": 416092480.0, "4540": 412014560.0, "4545": 418894944.0, "4550": 406141920.0, "4555": 415870336.0, "4560": 411942368.0, "4565": 415976224.0, "4570": 415518112.0, "4575": 410449248.0, "4580": 415252224.0, "4585": 419184736.0, "4590": 415910368.0, "4595": 411055616.0, "4600": 411264832.0, "4605": 410518304.0, "4610": 409325280.0, "4615": 411109728.0, "4620": 408890688.0, "4625": 418087232.0, "4630": 413940352.0, "4635": 412645088.0, "4640": 406234368.0, "4645": 414096480.0, "4650": 413141376.0, "4655": 408596512.0, "4660": 415387776.0, "4665": 411063232.0, "4670": 406029664.0, "4675": 407430880.0, "4680": 412055680.0, "4685": 401745728.0, "4690": 412044832.0, "4695": 413025344.0, "4700": 411681312.0, "4705": 414058816.0, "4710": 406651808.0, "4715": 412724064.0, "4720": 418113920.0, "4725": 414824128.0, "4730": 400922080.0, "4735": 406288608.0, "4740": 411914368.0, "4745": 405708096.0, "4750": 409217664.0, "4755": 405916224.0, "4760": 412484608.0, "4765": 411705376.0, "4770": 414673632.0, "4775": 407479136.0, "4780": 414025120.0, "4785": 416548192.0, "4790": 415748288.0, "4795": 403483456.0, "4800": 410502208.0, "4805": 412299616.0, "4810": 405208352.0, "4815": 413816128.0, "4820": 415489792.0, "4825": 409353504.0, "4830": 410960128.0, "4835": 411584032.0, "4840": 419490784.0, "4845": 404203680.0, "4850": 404549888.0, "4855": 411618208.0, "4860": 415919584.0, "4865": 415302048.0, "4870": 402873856.0, "4875": 416305152.0, "4880": 415537376.0, "4885": 419962912.0, "4890": 409056320.0, "4895": 410478528.0, "4900": 410586272.0, "4905": 409008608.0, "4910": 405707456.0, "4915": 412932736.0, "4920": 409827872.0, "4925": 405078656.0, "4930": 418716736.0, "4935": 414275552.0, "4940": 413338656.0, "4945": 421138688.0, "4950": 418281824.0, "4955": 416253888.0, "4960": 410127296.0, "4965": 409405984.0, "4970": 414358784.0, "4975": 411737280.0, "4980": 415942784.0, "4985": 411343200.0, "4990": 411474368.0, "4995": 423269824.0, "5000": 407563648.0, "5005": 418379392.0, "5010": 414690784.0, "5015": 409344768.0, "5020": 417524576.0, "5025": 413819264.0, "5030": 414376032.0, "5035": 406852320.0, "5040": 415648864.0, "5045": 415759520.0, "5050": 411830656.0, "5055": 415553952.0, "5060": 412482400.0, "5065": 412398688.0, "5070": 405816576.0, "5075": 402048224.0, "5080": 414842752.0, "5085": 418147168.0, "5090": 408623872.0, "5095": 417380032.0, "5100": 415809952.0, "5105": 418508416.0, "5110": 410621600.0, "5115": 422605856.0, "5120": 414143872.0, "5125": 416544384.0, "5130": 409353824.0, "5135": 417951360.0, "5140": 413618976.0, "5145": 411649120.0, "5150": 413689760.0, "5155": 415771200.0, "5160": 411748288.0, "5165": 408962816.0, "5170": 422838784.0, "5175": 415303456.0, "5180": 408430912.0, "5185": 415903136.0, "5190": 409364832.0, "5195": 402516128.0, "5200": 416642688.0, "5205": 413341856.0, "5210": 414537408.0, "5215": 418592000.0, "5220": 407284704.0, "5225": 418565504.0, "5230": 415479520.0, "5235": 410750272.0, "5240": 410033984.0, "5245": 422290816.0, "5250": 410491776.0, "5255": 412067840.0, "5260": 417271232.0, "5265": 410531680.0, "5270": 418451936.0, "5275": 407225600.0, "5280": 408365440.0, "5285": 410938240.0, "5290": 412958592.0, "5295": 410426592.0, "5300": 405680160.0, "5305": 416860768.0, "5310": 406253984.0, "5315": 413700576.0, "5320": 410473824.0, "5325": 410751040.0, "5330": 407536896.0, "5335": 414707712.0, "5340": 418531392.0, "5345": 405538752.0, "5350": 419328256.0, "5355": 410057536.0, "5360": 410364000.0, "5365": 409321472.0, "5370": 414751040.0, "5375": 404871360.0, "5380": 415536864.0, "5385": 407600352.0, "5390": 415230496.0, "5395": 415805632.0, "5400": 400676640.0, "5405": 416262016.0, "5410": 415449632.0, "5415": 405927520.0, "5420": 409388800.0, "5425": 416509984.0, "5430": 409756992.0, "5435": 410588448.0, "5440": 408689792.0, "5445": 405268960.0, "5450": 410723680.0, "5455": 407342560.0, "5460": 414340224.0, "5465": 408173472.0, "5470": 408912160.0, "5475": 401477760.0, "5480": 405450464.0, "5485": 422295616.0, "5490": 410424448.0, "5495": 413924320.0, "5500": 406822464.0, "5505": 409163520.0, "5510": 411805920.0, "5515": 417173344.0, "5520": 411198176.0, "5525": 407156512.0, "5530": 412776256.0, "5535": 408211936.0, "5540": 414024128.0, "5545": 412604960.0, "5550": 411528672.0, "5555": 410828320.0, "5560": 414422336.0, "5565": 402186688.0, "5570": 416007168.0, "5575": 421389376.0, "5580": 404493408.0, "5585": 415418656.0, "5590": 412116896.0, "5595": 409707168.0, "5600": 416096544.0, "5605": 416291840.0, "5610": 410628832.0, "5615": 410274944.0, "5620": 411877760.0, "5625": 413396064.0, "5630": 414317376.0, "5635": 409598144.0, "5640": 409744544.0, "5645": 409829152.0, "5650": 411830976.0, "5655": 403955456.0, "5660": 412690272.0, "5665": 422409760.0, "5670": 410467232.0, "5675": 409453376.0, "5680": 414754336.0, "5685": 404178976.0, "5690": 412773888.0, "5695": 416490624.0, "5700": 413477888.0, "5705": 409529504.0, "5710": 405643296.0, "5715": 417145984.0, "5720": 424193600.0, "5725": 408262624.0, "5730": 409810432.0, "5735": 410582464.0, "5740": 412896896.0, "5745": 407158624.0, "5750": 408692928.0, "5755": 418964320.0, "5760": 405334144.0, "5765": 414495968.0, "5770": 416003744.0, "5775": 408651360.0, "5780": 412729120.0, "5785": 407661248.0, "5790": 405484512.0, "5795": 418275968.0, "5800": 417263712.0, "5805": 412918400.0, "5810": 416687040.0, "5815": 412139232.0, "5820": 410384384.0, "5825": 414542112.0, "5830": 405836000.0, "5835": 413131488.0, "5840": 406463392.0, "5845": 417842400.0, "5850": 410096256.0, "5855": 406545248.0, "5860": 414617824.0, "5865": 408112800.0, "5870": 412289824.0, "5875": 414215392.0, "5880": 418810144.0, "5885": 409215744.0, "5890": 412968896.0, "5895": 419949088.0, "5900": 415930624.0, "5905": 409976032.0, "5910": 414271200.0, "5915": 414415392.0, "5920": 412545472.0, "5925": 409269952.0, "5930": 407360512.0, "5935": 424579648.0, "5940": 412652128.0, "5945": 414457632.0, "5950": 421293504.0, "5955": 408359616.0, "5960": 412468640.0, "5965": 408258240.0, "5970": 415215360.0, "5975": 409514880.0, "5980": 417034368.0, "5985": 418633184.0, "5990": 410178176.0, "5995": 413919168.0, "6000": 419520352.0, "6005": 415239104.0, "6010": 419231232.0, "6015": 402347328.0, "6020": 399863168.0, "6025": 414671424.0, "6030": 411501472.0, "6035": 411903488.0, "6040": 408689120.0, "6045": 414185664.0, "6050": 410457376.0, "6055": 408779040.0, "6060": 413989888.0, "6065": 409872224.0, "6070": 406638784.0, "6075": 417546112.0, "6080": 418662400.0, "6085": 412227936.0, "6090": 409829504.0, "6095": 420235872.0, "6100": 420687904.0, "6105": 419752768.0, "6110": 414240128.0, "6115": 416979616.0, "6120": 410813088.0, "6125": 420775296.0, "6130": 414392512.0, "6135": 402908832.0, "6140": 415079136.0, "6145": 412475168.0, "6150": 413650496.0, "6155": 415987264.0, "6160": 410109792.0, "6165": 410914848.0, "6170": 419718144.0, "6175": 404852000.0, "6180": 412647680.0, "6185": 419911008.0, "6190": 424141280.0, "6195": 404354848.0, "6200": 411179936.0, "6205": 416988864.0, "6210": 412141216.0, "6215": 408968608.0, "6220": 410462240.0, "6225": 418114272.0, "6230": 415512544.0, "6235": 405726848.0, "6240": 407823296.0, "6245": 411398592.0, "6250": 408122656.0, "6255": 409049184.0, "6260": 402491360.0, "6265": 417888256.0, "6270": 413504256.0, "6275": 415064608.0, "6280": 418293312.0, "6285": 414474912.0, "6290": 412037696.0, "6295": 408849376.0, "6300": 406872640.0, "6305": 418181408.0, "6310": 413462720.0, "6315": 411011232.0, "6320": 415919264.0, "6325": 402057216.0, "6330": 416937216.0, "6335": 413977664.0, "6340": 415472064.0, "6345": 411637696.0, "6350": 413891776.0, "6355": 416075808.0, "6360": 407340192.0, "6365": 410253568.0, "6370": 420419808.0, "6375": 406305536.0, "6380": 410392352.0, "6385": 409390784.0, "6390": 409201696.0, "6395": 415783488.0, "6400": 422215680.0, "6405": 418600768.0, "6410": 416305056.0, "6415": 407342720.0, "6420": 409539520.0, "6425": 418037312.0, "6430": 415412480.0, "6435": 419614464.0, "6440": 408266368.0, "6445": 416459008.0, "6450": 408738336.0, "6455": 412933152.0, "6460": 413721408.0, "6465": 412628544.0, "6470": 409547008.0, "6475": 409976800.0, "6480": 408545984.0, "6485": 411313024.0, "6490": 405337536.0, "6495": 406970368.0, "6500": 415919264.0, "6505": 405728864.0, "6510": 413457984.0, "6515": 403535072.0, "6520": 411318528.0, "6525": 411361984.0, "6530": 412324128.0, "6535": 409042560.0, "6540": 410574464.0, "6545": 406107616.0, "6550": 412579136.0, "6555": 408207136.0, "6560": 411922816.0, "6565": 411164704.0, "6570": 418533088.0, "6575": 407066656.0, "6580": 405066848.0, "6585": 416190048.0, "6590": 416191168.0, "6595": 410655328.0, "6600": 411244224.0, "6605": 413200768.0, "6610": 417191680.0, "6615": 420748960.0, "6620": 405779840.0, "6625": 416103200.0, "6630": 407017408.0, "6635": 414521792.0, "6640": 405278240.0, "6645": 406523200.0, "6650": 406931648.0, "6655": 413613376.0, "6660": 414563008.0, "6665": 412135584.0, "6670": 410724992.0, "6675": 417232192.0, "6680": 419401024.0, "6685": 412502752.0, "6690": 413545568.0, "6695": 407073920.0, "6700": 408843424.0, "6705": 417056448.0, "6710": 408931456.0, "6715": 414520768.0, "6720": 401651040.0, "6725": 409633920.0, "6730": 411201312.0, "6735": 416888192.0, "6740": 408538112.0, "6745": 413812032.0, "6750": 418813824.0, "6755": 420703840.0, "6760": 410733792.0, "6765": 413956352.0, "6770": 413415712.0, "6775": 415320128.0, "6780": 409670048.0, "6785": 407195840.0, "6790": 410960320.0, "6795": 410349696.0, "6800": 406878432.0, "6805": 412728704.0, "6810": 401731040.0, "6815": 418275232.0, "6820": 409352672.0, "6825": 408759488.0, "6830": 415232960.0, "6835": 407831040.0, "6840": 408660608.0, "6845": 415775424.0, "6850": 402559680.0, "6855": 413455392.0, "6860": 416469760.0, "6865": 407666464.0, "6870": 411248256.0, "6875": 414474688.0, "6880": 407927008.0, "6885": 415798336.0, "6890": 407465472.0, "6895": 414825984.0, "6900": 410038912.0, "6905": 408221536.0, "6910": 412163008.0, "6915": 421952224.0, "6920": 420001280.0, "6925": 408516160.0, "6930": 413124640.0, "6935": 419486304.0, "6940": 410064416.0, "6945": 403108288.0, "6950": 413670560.0, "6955": 418639840.0, "6960": 410886432.0, "6965": 413994400.0, "6970": 408210848.0, "6975": 420139488.0, "6980": 422410624.0, "6985": 413416192.0, "6990": 416343264.0, "6995": 418594016.0, "7000": 404222880.0, "7005": 408002848.0, "7010": 411176288.0, "7015": 414187840.0, "7020": 411182816.0, "7025": 413032288.0, "7030": 410072000.0, "7035": 410427616.0, "7040": 411604832.0, "7045": 411177024.0, "7050": 410123104.0, "7055": 408955808.0, "7060": 416490784.0, "7065": 418334016.0, "7070": 408956928.0, "7075": 410037024.0, "7080": 415860480.0, "7085": 414028128.0, "7090": 404953440.0, "7095": 403129440.0, "7100": 412246848.0, "7105": 410494848.0, "7110": 418449344.0, "7115": 418523840.0, "7120": 415547328.0, "7125": 405309376.0, "7130": 413242464.0, "7135": 413060448.0, "7140": 414061376.0, "7145": 411340256.0, "7150": 411983424.0, "7155": 419354016.0, "7160": 422701856.0, "7165": 418116224.0, "7170": 413168384.0, "7175": 408594944.0, "7180": 411186048.0, "7185": 411898176.0, "7190": 412556896.0, "7195": 412622432.0, "7200": 407742528.0, "7205": 413136320.0, "7210": 414105376.0, "7215": 420888576.0, "7220": 407711680.0, "7225": 417766400.0, "7230": 407574368.0, "7235": 414970400.0, "7240": 409377024.0, "7245": 411056640.0, "7250": 406508480.0, "7255": 416823328.0, "7260": 412148192.0, "7265": 410191520.0, "7270": 416624896.0, "7275": 406961184.0, "7280": 413014880.0, "7285": 420289120.0, "7290": 413619392.0, "7295": 417693664.0, "7300": 413333472.0, "7305": 414994976.0, "7310": 420674560.0, "7315": 410930400.0, "7320": 419948800.0, "7325": 412490176.0, "7330": 408953376.0, "7335": 418182016.0, "7340": 406592928.0, "7345": 407002656.0, "7350": 408902528.0, "7355": 409314912.0, "7360": 412783936.0, "7365": 414183488.0, "7370": 413134080.0, "7375": 411669408.0, "7380": 412257024.0, "7385": 416493664.0, "7390": 409422560.0, "7395": 409351904.0, "7400": 414836896.0, "7405": 410537472.0, "7410": 402139008.0, "7415": 416332672.0, "7420": 411339040.0, "7425": 415920256.0, "7430": 413435744.0, "7435": 413010400.0, "7440": 420834816.0, "7445": 414140192.0, "7450": 422984416.0, "7455": 411846272.0, "7460": 417251872.0, "7465": 414833408.0, "7470": 418128768.0, "7475": 401456480.0, "7480": 410454560.0, "7485": 407158720.0, "7490": 407271424.0, "7495": 406760160.0, "7500": 418712096.0, "7505": 410179840.0, "7510": 414208224.0, "7515": 416275680.0, "7520": 407319520.0, "7525": 409797472.0, "7530": 413474048.0, "7535": 408676352.0, "7540": 413293120.0, "7545": 407205824.0, "7550": 406476576.0, "7555": 408163264.0, "7560": 420129184.0, "7565": 406882048.0, "7570": 407344928.0, "7575": 409943520.0, "7580": 411737920.0, "7585": 410520928.0, "7590": 409222048.0, "7595": 410075520.0, "7600": 397652640.0, "7605": 414515648.0, "7610": 404504480.0, "7615": 409364128.0, "7620": 417444192.0, "7625": 419103296.0, "7630": 410806880.0, "7635": 407835168.0, "7640": 411260768.0, "7645": 410432992.0, "7650": 420688960.0, "7655": 412450304.0, "7660": 410455040.0, "7665": 411401664.0, "7670": 408576416.0, "7675": 415501440.0, "7680": 416188608.0, "7685": 419033216.0, "7690": 411282624.0, "7695": 417976576.0, "7700": 414076256.0, "7705": 405264320.0, "7710": 410157728.0, "7715": 407155648.0, "7720": 416976384.0, "7725": 408501248.0, "7730": 416364800.0, "7735": 413589728.0, "7740": 410914976.0, "7745": 420447136.0, "7750": 411967808.0, "7755": 407266432.0, "7760": 406105536.0, "7765": 406774208.0, "7770": 413397440.0, "7775": 413940928.0, "7780": 410368704.0, "7785": 414090144.0, "7790": 413296480.0, "7795": 412613568.0, "7800": 409219264.0, "7805": 416098400.0, "7810": 416416032.0, "7815": 407279328.0, "7820": 401837728.0, "7825": 410666048.0, "7830": 412957216.0, "7835": 412029536.0, "7840": 407136672.0, "7845": 414754848.0, "7850": 417605856.0, "7855": 411267616.0, "7860": 410886848.0, "7865": 413619328.0, "7870": 409684384.0, "7875": 418857312.0, "7880": 416502912.0, "7885": 414877088.0, "7890": 410579488.0, "7895": 404974144.0, "7900": 417941952.0, "7905": 404370944.0, "7910": 412630080.0, "7915": 409939008.0, "7920": 410927680.0, "7925": 419168960.0, "7930": 403948352.0, "7935": 407401632.0, "7940": 413863648.0, "7945": 407751424.0, "7950": 411886336.0, "7955": 414418112.0, "7960": 412805856.0, "7965": 415668800.0, "7970": 411159424.0, "7975": 410108160.0, "7980": 408999808.0, "7985": 406482656.0, "7990": 421238112.0, "7995": 412914720.0, "8000": 403148832.0, "8005": 415348096.0, "8010": 413520128.0, "8015": 420566816.0, "8020": 417048960.0, "8025": 411959360.0, "8030": 412768704.0, "8035": 410022048.0, "8040": 410921024.0, "8045": 415959456.0, "8050": 416052736.0, "8055": 410067168.0, "8060": 405645536.0, "8065": 422264288.0, "8070": 411569984.0, "8075": 406331968.0, "8080": 420963840.0, "8085": 406949056.0, "8090": 413934080.0, "8095": 413813536.0, "8100": 413212320.0, "8105": 409859520.0, "8110": 411375392.0, "8115": 414319904.0, "8120": 414148896.0, "8125": 413546080.0, "8130": 411237280.0, "8135": 414400736.0, "8140": 408621664.0, "8145": 408976000.0, "8150": 411160864.0, "8155": 410147808.0, "8160": 413472544.0, "8165": 408491680.0, "8170": 418536064.0, "8175": 406545344.0, "8180": 411404704.0, "8185": 409659264.0, "8190": 409559616.0, "8195": 426112416.0, "8200": 407031168.0, "8205": 407949312.0, "8210": 420351360.0, "8215": 408576224.0, "8220": 410751904.0, "8225": 411471744.0, "8230": 421275488.0, "8235": 420523456.0, "8240": 411313408.0, "8245": 414484960.0, "8250": 407650336.0, "8255": 407370240.0, "8260": 414557824.0, "8265": 415232128.0, "8270": 405578048.0, "8275": 413208384.0, "8280": 412582176.0, "8285": 416926240.0, "8290": 411026048.0, "8295": 414802784.0, "8300": 417152256.0, "8305": 412022912.0, "8310": 416720320.0, "8315": 406089856.0, "8320": 416743104.0, "8325": 413310560.0, "8330": 411676736.0, "8335": 413853792.0, "8340": 412155776.0, "8345": 413108832.0, "8350": 424779520.0, "8355": 411029792.0, "8360": 407738208.0, "8365": 416022144.0, "8370": 405123648.0, "8375": 413804896.0, "8380": 408006560.0, "8385": 423808576.0, "8390": 407714272.0, "8395": 405706176.0, "8400": 414459072.0, "8405": 410676672.0, "8410": 415040000.0, "8415": 404157312.0, "8420": 409021952.0, "8425": 421387616.0, "8430": 407226848.0, "8435": 417420800.0, "8440": 407571552.0, "8445": 411870304.0, "8450": 412792256.0, "8455": 406514880.0, "8460": 415874592.0, "8465": 414739104.0, "8470": 406856096.0, "8475": 416833952.0, "8480": 418750688.0, "8485": 412597088.0, "8490": 417197664.0, "8495": 412568000.0, "8500": 417243968.0, "8505": 409116224.0, "8510": 411809728.0, "8515": 416817472.0, "8520": 409968448.0, "8525": 412785536.0, "8530": 411504032.0, "8535": 405125088.0, "8540": 416134784.0, "8545": 418816480.0, "8550": 406889088.0, "8555": 419189024.0, "8560": 412590976.0, "8565": 407753216.0, "8570": 407849664.0, "8575": 406468576.0, "8580": 406308736.0, "8585": 404395296.0, "8590": 406092256.0, "8595": 412735200.0, "8600": 419818400.0, "8605": 414261632.0, "8610": 409613472.0, "8615": 407544224.0, "8620": 410802496.0, "8625": 413307520.0, "8630": 409204992.0, "8635": 413171872.0, "8640": 410261216.0, "8645": 411353664.0, "8650": 416697152.0, "8655": 409004096.0, "8660": 413107776.0, "8665": 415116032.0, "8670": 411578816.0, "8675": 408032736.0, "8680": 413872320.0, "8685": 405209696.0, "8690": 409211136.0, "8695": 412897376.0, "8700": 410777248.0, "8705": 414163168.0, "8710": 406856064.0, "8715": 417048704.0, "8720": 408712864.0, "8725": 414099584.0, "8730": 403707936.0, "8735": 422542304.0, "8740": 409303936.0, "8745": 413730944.0, "8750": 414152512.0, "8755": 410934688.0, "8760": 413575200.0, "8765": 401472928.0, "8770": 416217472.0, "8775": 414733824.0, "8780": 413209600.0, "8785": 410492480.0, "8790": 412890944.0, "8795": 412617120.0, "8800": 416109920.0, "8805": 409996192.0, "8810": 412817376.0, "8815": 404952448.0, "8820": 407072032.0, "8825": 410587648.0, "8830": 404508064.0, "8835": 406684672.0, "8840": 416230048.0, "8845": 414742080.0, "8850": 416130720.0, "8855": 411390208.0, "8860": 416832992.0, "8865": 409744608.0, "8870": 411435744.0, "8875": 412866496.0, "8880": 415099904.0, "8885": 412171520.0, "8890": 412308256.0, "8895": 414633504.0, "8900": 415377056.0, "8905": 405739648.0, "8910": 413369216.0, "8915": 411755680.0, "8920": 415236096.0, "8925": 414453600.0, "8930": 403219584.0, "8935": 409606272.0, "8940": 414365632.0, "8945": 407541600.0, "8950": 415771616.0, "8955": 407967808.0, "8960": 414441792.0, "8965": 410897760.0, "8970": 409142272.0, "8975": 415192992.0, "8980": 408475008.0, "8985": 411167680.0, "8990": 412006016.0, "8995": 412865216.0, "9000": 401304384.0, "9005": 400419328.0, "9010": 414002112.0, "9015": 402873824.0, "9020": 418202752.0, "9025": 417285504.0, "9030": 413407776.0, "9035": 405182624.0, "9040": 419542976.0, "9045": 416871776.0, "9050": 413578784.0, "9055": 406230464.0, "9060": 412801152.0, "9065": 417140672.0, "9070": 407559456.0, "9075": 406870816.0, "9080": 413543840.0, "9085": 410924448.0, "9090": 412875104.0, "9095": 412833184.0, "9100": 414381056.0, "9105": 407347296.0, "9110": 406486624.0, "9115": 414560384.0, "9120": 412069984.0, "9125": 408426592.0, "9130": 410067040.0, "9135": 411938976.0, "9140": 415269024.0, "9145": 411111456.0, "9150": 416717376.0, "9155": 407653024.0, "9160": 410239200.0, "9165": 418049632.0, "9170": 411452032.0, "9175": 409892544.0, "9180": 420411904.0, "9185": 411007040.0, "9190": 414603648.0, "9195": 408889088.0, "9200": 409896736.0, "9205": 407005312.0, "9210": 417979360.0, "9215": 413843584.0, "9220": 409853216.0, "9225": 417698688.0, "9230": 406272000.0, "9235": 410517664.0, "9240": 412901600.0, "9245": 409604992.0, "9250": 417096960.0, "9255": 408220512.0, "9260": 411130368.0, "9265": 417643520.0, "9270": 416375328.0, "9275": 408731008.0, "9280": 410640864.0, "9285": 410339808.0, "9290": 401487808.0, "9295": 407186272.0, "9300": 410763808.0, "9305": 407818752.0, "9310": 413769728.0, "9315": 408964608.0, "9320": 410909024.0, "9325": 415346208.0, "9330": 408267936.0, "9335": 413888544.0, "9340": 404055840.0, "9345": 413376992.0, "9350": 403697600.0, "9355": 415597440.0, "9360": 415162144.0, "9365": 413489536.0, "9370": 417059616.0, "9375": 408546304.0, "9380": 408203456.0, "9385": 408861376.0, "9390": 416811520.0, "9395": 415061792.0, "9400": 411105920.0, "9405": 413291072.0, "9410": 413338176.0, "9415": 416755424.0, "9420": 413716448.0, "9425": 409065600.0, "9430": 413523264.0, "9435": 413639712.0, "9440": 408699168.0, "9445": 419652640.0, "9450": 414472064.0, "9455": 408244416.0, "9460": 414673728.0, "9465": 416351072.0, "9470": 418433408.0, "9475": 407980256.0, "9480": 416227200.0, "9485": 408080480.0, "9490": 410566848.0, "9495": 413251744.0, "9500": 409858944.0, "9505": 414259936.0, "9510": 418457568.0, "9515": 414517216.0, "9520": 405098816.0, "9525": 424286272.0, "9530": 406830464.0, "9535": 412372544.0, "9540": 412981248.0, "9545": 413239520.0, "9550": 412368512.0, "9555": 413964320.0, "9560": 413154880.0, "9565": 423675136.0, "9570": 424538496.0, "9575": 404446464.0, "9580": 414057952.0, "9585": 420364448.0, "9590": 412200480.0, "9595": 415085632.0, "9600": 411042016.0, "9605": 405953376.0, "9610": 422740288.0, "9615": 415438848.0, "9620": 414154240.0, "9625": 405530784.0, "9630": 406533888.0, "9635": 411131008.0, "9640": 413932352.0, "9645": 409117504.0, "9650": 409042304.0, "9655": 408254592.0, "9660": 418753696.0, "9665": 416892576.0, "9670": 408330560.0, "9675": 418751840.0, "9680": 409090080.0, "9685": 412928704.0, "9690": 408639776.0, "9695": 406004736.0, "9700": 408253024.0, "9705": 406170016.0, "9710": 407588928.0, "9715": 417776992.0, "9720": 411612672.0, "9725": 413713152.0, "9730": 426722848.0, "9735": 417437248.0, "9740": 410141120.0, "9745": 411516064.0, "9750": 410208352.0, "9755": 411149024.0, "9760": 414812000.0, "9765": 413852512.0, "9770": 407052288.0, "9775": 417422880.0, "9780": 409129312.0, "9785": 409848960.0, "9790": 420097504.0, "9795": 411559328.0, "9800": 413744864.0, "9805": 411370592.0, "9810": 412127488.0, "9815": 411120256.0, "9820": 415185440.0, "9825": 419961696.0, "9830": 412715424.0, "9835": 408663424.0, "9840": 410441504.0, "9845": 413534528.0, "9850": 408503488.0, "9855": 420038240.0, "9860": 407947168.0, "9865": 415907424.0, "9870": 411165184.0, "9875": 417988832.0, "9880": 411939360.0, "9885": 418495488.0, "9890": 402734848.0, "9895": 410954528.0, "9900": 410775872.0, "9905": 416969408.0, "9910": 411849440.0, "9915": 409483776.0, "9920": 407867872.0, "9925": 408473376.0, "9930": 409840352.0, "9935": 412729696.0, "9940": 411596960.0, "9945": 420695296.0, "9950": 411323040.0, "9955": 417103520.0, "9960": 412130688.0, "9965": 421842624.0, "9970": 411712256.0, "9975": 416296832.0, "9980": 414924640.0, "9985": 405896640.0, "9990": 417420128.0, "9995": 409667232.0, "10000": 408546368.0, "10005": 414422560.0, "10010": 411809344.0, "10015": 408426720.0, "10020": 412929728.0, "10025": 411050368.0, "10030": 404597248.0, "10035": 419796544.0, "10040": 404044736.0, "10045": 419312320.0, "10050": 414831616.0, "10055": 402750688.0, "10060": 408559488.0, "10065": 411439808.0, "10070": 414239392.0, "10075": 405740160.0, "10080": 414244416.0, "10085": 407204224.0, "10090": 412761632.0, "10095": 410258816.0, "10100": 419288672.0, "10105": 410064512.0, "10110": 408369440.0, "10115": 409325248.0, "10120": 410993152.0, "10125": 414836224.0, "10130": 403206624.0, "10135": 420892224.0, "10140": 407284032.0, "10145": 411036832.0, "10150": 410959968.0, "10155": 418004800.0, "10160": 410854016.0, "10165": 399429344.0, "10170": 413005056.0, "10175": 416504800.0, "10180": 409687744.0, "10185": 404901888.0, "10190": 413437120.0, "10195": 415316928.0, "10200": 417973600.0, "10205": 408190048.0, "10210": 403579488.0, "10215": 413030656.0, "10220": 415774784.0, "10225": 413510752.0, "10230": 406398080.0, "10235": 399496320.0, "10240": 412154976.0, "10245": 412717184.0, "10250": 415781664.0, "10255": 405600768.0, "10260": 412603456.0, "10265": 419725536.0, "10270": 410327136.0, "10275": 410872640.0, "10280": 422142240.0, "10285": 411367008.0, "10290": 412404960.0, "10295": 408069888.0, "10300": 413136032.0, "10305": 412755424.0, "10310": 410516544.0, "10315": 411768224.0, "10320": 414626432.0, "10325": 407476736.0, "10330": 414777504.0, "10335": 412302304.0, "10340": 415860608.0, "10345": 404312800.0, "10350": 412132320.0, "10355": 409012448.0, "10360": 417747712.0, "10365": 407714528.0, "10370": 409917824.0, "10375": 411830976.0, "10380": 415290016.0, "10385": 407590208.0, "10390": 409379104.0, "10395": 401845696.0, "10400": 415579136.0, "10405": 406720832.0, "10410": 415027552.0, "10415": 423592160.0, "10420": 414203872.0, "10425": 408743168.0, "10430": 418728960.0, "10435": 405940128.0, "10440": 408961600.0, "10445": 418898592.0, "10450": 415710976.0, "10455": 407853824.0, "10460": 409096864.0, "10465": 409708512.0, "10470": 410563968.0, "10475": 411725536.0, "10480": 406110112.0, "10485": 411865024.0, "10490": 408792832.0, "10495": 422467584.0, "10500": 412526656.0, "10505": 418026784.0, "10510": 418583712.0, "10515": 407682432.0, "10520": 418193472.0, "10525": 409466144.0, "10530": 414850144.0, "10535": 417215008.0, "10540": 412196192.0, "10545": 419763136.0, "10550": 417567008.0, "10555": 411761056.0, "10560": 412128608.0, "10565": 415278848.0, "10570": 412259104.0, "10575": 409392224.0, "10580": 414499808.0, "10585": 413844768.0, "10590": 419263328.0, "10595": 405077184.0, "10600": 418756576.0, "10605": 406381344.0, "10610": 421521792.0, "10615": 412273056.0, "10620": 403744928.0, "10625": 416457152.0, "10630": 408952064.0, "10635": 407388224.0, "10640": 406198368.0, "10645": 409089632.0, "10650": 415350496.0, "10655": 414625888.0, "10660": 415597664.0, "10665": 417030464.0, "10670": 410609632.0, "10675": 410426272.0, "10680": 413295936.0, "10685": 415730080.0, "10690": 414618816.0, "10695": 411701856.0, "10700": 414164064.0, "10705": 405438528.0, "10710": 412640512.0, "10715": 418950208.0, "10720": 415138816.0, "10725": 402323232.0, "10730": 409926688.0, "10735": 412191968.0, "10740": 411048160.0, "10745": 419079744.0, "10750": 415550688.0, "10755": 415333888.0, "10760": 414848896.0, "10765": 403140064.0, "10770": 417096064.0, "10775": 410852512.0, "10780": 412427296.0, "10785": 407605440.0, "10790": 417905536.0, "10795": 405633824.0, "10800": 413240672.0, "10805": 408137152.0, "10810": 417027712.0, "10815": 416096960.0, "10820": 405817056.0, "10825": 415029120.0, "10830": 414977312.0, "10835": 407951296.0, "10840": 418338560.0, "10845": 415426912.0, "10850": 414051040.0, "10855": 415582912.0, "10860": 413299328.0, "10865": 412765344.0, "10870": 411410336.0, "10875": 405817792.0, "10880": 410960128.0, "10885": 404020992.0, "10890": 403172960.0, "10895": 416512096.0, "10900": 407624096.0, "10905": 415894944.0, "10910": 411088480.0, "10915": 414825056.0, "10920": 412478560.0, "10925": 410916768.0, "10930": 412603392.0, "10935": 413458400.0, "10940": 412722144.0, "10945": 405761728.0, "10950": 411560320.0, "10955": 412372288.0, "10960": 413540608.0, "10965": 409147616.0, "10970": 414295968.0, "10975": 416842080.0, "10980": 408909216.0, "10985": 413429600.0, "10990": 417227232.0, "10995": 411200000.0, "11000": 415601632.0, "11005": 410343168.0, "11010": 410161568.0, "11015": 418019616.0, "11020": 409704544.0, "11025": 417635168.0, "11030": 418413248.0, "11035": 408634112.0, "11040": 407234624.0, "11045": 421668992.0, "11050": 415139680.0, "11055": 408374080.0, "11060": 423486464.0, "11065": 406636480.0, "11070": 415289600.0, "11075": 427618848.0, "11080": 408135360.0, "11085": 406595584.0, "11090": 412016480.0, "11095": 410720160.0, "11100": 414174112.0, "11105": 415821280.0, "11110": 412278080.0, "11115": 412713728.0, "11120": 407254240.0, "11125": 408671552.0, "11130": 413290496.0, "11135": 407937760.0, "11140": 420625792.0, "11145": 411428160.0, "11150": 416215680.0, "11155": 413802752.0, "11160": 418419040.0, "11165": 409293312.0, "11170": 407100832.0, "11175": 413822368.0, "11180": 411715744.0, "11185": 410193088.0, "11190": 407475040.0, "11195": 407254592.0, "11200": 406563072.0, "11205": 406279104.0, "11210": 417148768.0, "11215": 414135808.0, "11220": 411097760.0, "11225": 411025792.0, "11230": 418707488.0, "11235": 410264224.0, "11240": 404302784.0, "11245": 408345344.0, "11250": 408167840.0, "11255": 414848960.0, "11260": 404115648.0, "11265": 413668960.0, "11270": 411132128.0, "11275": 411943072.0, "11280": 419106656.0, "11285": 409942432.0, "11290": 412118208.0, "11295": 416910848.0, "11300": 415490944.0, "11305": 421972384.0, "11310": 406474368.0, "11315": 417059936.0, "11320": 409635680.0, "11325": 411009984.0, "11330": 408042016.0, "11335": 405550272.0, "11340": 404200416.0, "11345": 425513408.0, "11350": 417452096.0, "11355": 407166464.0, "11360": 408816768.0, "11365": 410824736.0, "11370": 413809504.0, "11375": 415665344.0, "11380": 418128896.0, "11385": 408463904.0, "11390": 412911136.0, "11395": 413824352.0, "11400": 409451776.0, "11405": 406553760.0, "11410": 419921984.0, "11415": 408794208.0, "11420": 411120896.0, "11425": 412439360.0, "11430": 404860608.0, "11435": 413861344.0, "11440": 405865184.0, "11445": 415111680.0, "11450": 413078176.0, "11455": 406271584.0, "11460": 411439456.0, "11465": 414858528.0, "11470": 407806272.0, "11475": 416459136.0, "11480": 418457248.0, "11485": 413355200.0, "11490": 415274784.0, "11495": 406024576.0, "11500": 414152320.0, "11505": 411005120.0, "11510": 412118624.0, "11515": 404804192.0, "11520": 410701216.0, "11525": 415470944.0, "11530": 407971360.0, "11535": 417393504.0, "11540": 415992288.0, "11545": 406661440.0, "11550": 410028640.0, "11555": 415374272.0, "11560": 419562080.0, "11565": 412970624.0, "11570": 418135168.0, "11575": 416772192.0, "11580": 418963904.0, "11585": 414261056.0, "11590": 407014016.0, "11595": 409681856.0, "11600": 416364128.0, "11605": 420550336.0, "11610": 413533120.0, "11615": 416219008.0, "11620": 413565408.0, "11625": 418886336.0, "11630": 408693376.0, "11635": 405187744.0, "11640": 415264640.0, "11645": 409664224.0, "11650": 411536864.0, "11655": 420440864.0, "11660": 411645664.0, "11665": 418992832.0, "11670": 412020864.0, "11675": 410979296.0, "11680": 418270272.0, "11685": 411550368.0, "11690": 408138944.0, "11695": 411546304.0, "11700": 410798496.0, "11705": 417733792.0, "11710": 408334976.0, "11715": 420163552.0, "11720": 419259840.0, "11725": 406272800.0, "11730": 413031488.0, "11735": 408866080.0, "11740": 420040288.0, "11745": 408937056.0, "11750": 411252160.0, "11755": 408878400.0, "11760": 411591136.0, "11765": 411266464.0, "11770": 409228192.0, "11775": 409231936.0, "11780": 411919712.0, "11785": 408894368.0, "11790": 401975968.0, "11795": 422692160.0, "11800": 402394176.0, "11805": 412524064.0, "11810": 413109152.0, "11815": 412114944.0, "11820": 411876704.0, "11825": 410690720.0, "11830": 409952704.0, "11835": 408326560.0, "11840": 413930112.0, "11845": 412722720.0, "11850": 406969888.0, "11855": 418399744.0, "11860": 408881056.0, "11865": 411774400.0, "11870": 413210752.0, "11875": 422621856.0, "11880": 411305184.0, "11885": 416833056.0, "11890": 402993920.0, "11895": 408376928.0, "11900": 420744704.0, "11905": 412085312.0, "11910": 410836480.0, "11915": 411683296.0, "11920": 414540928.0, "11925": 408622816.0, "11930": 419215616.0, "11935": 412868576.0, "11940": 410205600.0, "11945": 407758688.0, "11950": 409012032.0, "11955": 406615904.0, "11960": 408368192.0, "11965": 409624992.0, "11970": 412846240.0, "11975": 415597856.0, "11980": 414852160.0, "11985": 407889376.0, "11990": 412461248.0, "11995": 418140800.0, "12000": 410746816.0, "12005": 409569280.0, "12010": 419411712.0, "12015": 417327040.0, "12020": 407118368.0, "12025": 416855232.0, "12030": 413153280.0, "12035": 404798848.0, "12040": 415409536.0, "12045": 410132832.0, "12050": 402671936.0, "12055": 407687488.0, "12060": 410433888.0, "12065": 410123872.0, "12070": 413427520.0, "12075": 416451488.0, "12080": 405156672.0, "12085": 410431456.0, "12090": 414276864.0, "12095": 404107936.0, "12100": 413848096.0, "12105": 404293280.0, "12110": 403661408.0, "12115": 419016288.0, "12120": 407211968.0, "12125": 410902272.0, "12130": 417281216.0, "12135": 408161312.0, "12140": 411862208.0, "12145": 415915296.0, "12150": 412607584.0, "12155": 411900192.0, "12160": 414870912.0, "12165": 405738400.0, "12170": 407013952.0, "12175": 413703808.0, "12180": 418950464.0, "12185": 412473824.0, "12190": 412017504.0, "12195": 411160000.0, "12200": 406638880.0, "12205": 415155840.0, "12210": 405992096.0, "12215": 408423648.0, "12220": 411270176.0, "12225": 415799232.0, "12230": 411274688.0, "12235": 422778368.0, "12240": 410755712.0, "12245": 405793056.0, "12250": 419530144.0, "12255": 413614368.0, "12260": 405176640.0, "12265": 417504640.0, "12270": 417308576.0, "12275": 408280128.0, "12280": 418902112.0, "12285": 409246656.0, "12290": 410370656.0, "12295": 416016448.0, "12300": 421222560.0, "12305": 408819584.0, "12310": 409157824.0, "12315": 410070112.0, "12320": 415686688.0, "12325": 415186336.0, "12330": 416669824.0, "12335": 406768128.0, "12340": 411128672.0, "12345": 413732064.0, "12350": 405150144.0, "12355": 411681728.0, "12360": 413339072.0, "12365": 409117248.0, "12370": 418479232.0, "12375": 408352000.0, "12380": 414502400.0, "12385": 418008064.0, "12390": 413079456.0, "12395": 412165984.0, "12400": 413485728.0, "12405": 416033696.0, "12410": 412691424.0, "12415": 413615936.0, "12420": 414259008.0, "12425": 414500000.0, "12430": 411059744.0, "12435": 407387904.0, "12440": 413258848.0, "12445": 412662432.0, "12450": 414722752.0, "12455": 410525248.0, "12460": 417394848.0, "12465": 413967776.0, "12470": 408419328.0, "12475": 411140320.0, "12480": 415167456.0, "12485": 404822816.0, "12490": 413831680.0, "12495": 414624704.0, "12500": 410444672.0, "12505": 414527232.0, "12510": 420614784.0, "12515": 406614048.0, "12520": 421921792.0, "12525": 412366176.0, "12530": 413131456.0, "12535": 412542624.0, "12540": 416288768.0, "12545": 408971616.0, "12550": 420787968.0, "12555": 409925920.0, "12560": 406613600.0, "12565": 422669056.0, "12570": 419796544.0, "12575": 416046272.0, "12580": 414697920.0, "12585": 415686816.0, "12590": 412502432.0, "12595": 414022912.0, "12600": 414394656.0, "12605": 404905344.0, "12610": 420848448.0, "12615": 405013152.0, "12620": 407337344.0, "12625": 411300640.0, "12630": 411431168.0, "12635": 407171808.0, "12640": 414158592.0, "12645": 414524416.0, "12650": 410151840.0, "12655": 411493920.0, "12660": 412069248.0, "12665": 408891872.0, "12670": 411470944.0, "12675": 406736288.0, "12680": 404449024.0, "12685": 408532416.0, "12690": 420236896.0, "12695": 407348448.0, "12700": 418227712.0, "12705": 413313696.0, "12710": 410616640.0, "12715": 414380992.0, "12720": 413138880.0, "12725": 405271168.0, "12730": 419750944.0, "12735": 411663552.0, "12740": 406919072.0, "12745": 418517536.0, "12750": 404245856.0, "12755": 420271552.0, "12760": 408251360.0, "12765": 413217568.0, "12770": 409243744.0, "12775": 425068352.0, "12780": 409274272.0, "12785": 412750688.0, "12790": 413930912.0, "12795": 410439040.0, "12800": 412567872.0, "12805": 410813440.0, "12810": 416920608.0, "12815": 410995200.0, "12820": 400904960.0, "12825": 410594688.0, "12830": 408662368.0, "12835": 411080800.0, "12840": 408251488.0, "12845": 417899296.0, "12850": 408899552.0, "12855": 408979168.0, "12860": 403446112.0, "12865": 416702944.0, "12870": 411819008.0, "12875": 405528096.0, "12880": 413816928.0, "12885": 405726912.0, "12890": 418475456.0, "12895": 410197600.0, "12900": 410879232.0, "12905": 409214080.0, "12910": 409689248.0, "12915": 407435680.0, "12920": 418262784.0, "12925": 416432096.0, "12930": 411551680.0, "12935": 415365120.0, "12940": 416879424.0, "12945": 412531488.0, "12950": 413980704.0, "12955": 409763456.0, "12960": 406190880.0, "12965": 417407776.0, "12970": 417784128.0, "12975": 408084064.0, "12980": 406986304.0, "12985": 413431072.0, "12990": 413062496.0, "12995": 403686560.0, "13000": 409830240.0, "13005": 413120544.0, "13010": 410637376.0, "13015": 414519232.0, "13020": 407713856.0, "13025": 409718944.0, "13030": 414333920.0, "13035": 407620256.0, "13040": 409713984.0, "13045": 416574080.0, "13050": 408604736.0, "13055": 410952704.0, "13060": 421799648.0, "13065": 405425856.0, "13070": 416728384.0, "13075": 403621952.0, "13080": 407640896.0, "13085": 416645120.0, "13090": 407433280.0, "13095": 411587040.0, "13100": 410043456.0, "13105": 410902720.0, "13110": 415525152.0, "13115": 421501056.0, "13120": 418126304.0, "13125": 408681792.0, "13130": 418451616.0, "13135": 413828192.0, "13140": 410624928.0, "13145": 413532448.0, "13150": 409461920.0, "13155": 413518720.0, "13160": 408442528.0, "13165": 405225952.0, "13170": 414841568.0, "13175": 421366752.0, "13180": 397430720.0, "13185": 411445632.0, "13190": 405336960.0, "13195": 416476160.0, "13200": 408484512.0, "13205": 408177888.0, "13210": 413869536.0, "13215": 409628832.0, "13220": 417283712.0, "13225": 409061952.0, "13230": 414705184.0, "13235": 417927968.0, "13240": 414298752.0, "13245": 415580576.0, "13250": 413407424.0, "13255": 412974976.0, "13260": 413565184.0, "13265": 414463168.0, "13270": 409329696.0, "13275": 418510016.0, "13280": 417415904.0, "13285": 411215776.0, "13290": 406803008.0, "13295": 411913248.0, "13300": 407031072.0, "13305": 403564672.0, "13310": 417877440.0, "13315": 409358336.0, "13320": 413899136.0, "13325": 410265376.0, "13330": 410489664.0, "13335": 418160480.0, "13340": 409089472.0, "13345": 409335008.0, "13350": 417286752.0, "13355": 421810848.0, "13360": 408425280.0, "13365": 409436064.0, "13370": 409453216.0, "13375": 418280960.0, "13380": 406735488.0, "13385": 417722976.0, "13390": 409961952.0, "13395": 413549824.0, "13400": 418855904.0, "13405": 413443520.0, "13410": 407536960.0, "13415": 412546848.0, "13420": 411450080.0, "13425": 419369376.0, "13430": 421891936.0, "13435": 413805984.0, "13440": 411668864.0, "13445": 415440192.0, "13450": 406840352.0, "13455": 408240928.0, "13460": 406282400.0, "13465": 411025376.0, "13470": 413889728.0, "13475": 408914912.0, "13480": 414800384.0, "13485": 405272288.0, "13490": 415775648.0, "13495": 411256480.0, "13500": 415882048.0, "13505": 412087104.0, "13510": 406816576.0, "13515": 412712704.0, "13520": 412722464.0, "13525": 407291584.0, "13530": 405826752.0, "13535": 414407104.0, "13540": 408288608.0, "13545": 414361696.0, "13550": 415654496.0, "13555": 404187744.0, "13560": 402826624.0, "13565": 410329408.0, "13570": 404881312.0, "13575": 407904224.0, "13580": 415397536.0, "13585": 411594432.0, "13590": 419413312.0, "13595": 411412288.0, "13600": 404394688.0, "13605": 410450880.0, "13610": 414975200.0, "13615": 406833760.0, "13620": 408087968.0, "13625": 410133184.0, "13630": 412668736.0, "13635": 416457792.0, "13640": 416559872.0, "13645": 410011584.0, "13650": 414298112.0, "13655": 413672704.0, "13660": 415541952.0, "13665": 404737952.0, "13670": 412807296.0, "13675": 403850944.0, "13680": 410867808.0, "13685": 413006944.0, "13690": 402459296.0, "13695": 410499968.0, "13700": 415350656.0, "13705": 415519808.0, "13710": 411063296.0, "13715": 412784704.0, "13720": 414678176.0, "13725": 413326496.0, "13730": 414718720.0, "13735": 411526560.0, "13740": 410191872.0, "13745": 415553120.0, "13750": 408176736.0, "13755": 411188896.0, "13760": 418642752.0, "13765": 403943488.0, "13770": 410706208.0, "13775": 421784000.0, "13780": 401848704.0, "13785": 413083840.0, "13790": 408701344.0, "13795": 412159712.0, "13800": 414576288.0, "13805": 415399776.0, "13810": 415291232.0, "13815": 408548992.0, "13820": 419688608.0, "13825": 422072512.0, "13830": 407593632.0, "13835": 414592640.0, "13840": 413097248.0, "13845": 406286016.0, "13850": 419206528.0, "13855": 416199456.0, "13860": 411497120.0, "13865": 411257760.0, "13870": 399909728.0, "13875": 415954976.0, "13880": 411727296.0, "13885": 414691808.0, "13890": 412712672.0, "13895": 414798688.0, "13900": 408718976.0, "13905": 406569824.0, "13910": 408801952.0, "13915": 419521696.0, "13920": 410160832.0, "13925": 411017792.0, "13930": 415245504.0, "13935": 414783328.0, "13940": 412461600.0, "13945": 406525600.0, "13950": 414846656.0, "13955": 411463968.0, "13960": 408687616.0, "13965": 424239808.0, "13970": 410129152.0, "13975": 421840288.0, "13980": 410722464.0, "13985": 418414048.0, "13990": 409817248.0, "13995": 409174112.0, "14000": 418791424.0, "14005": 408044160.0, "14010": 413448160.0, "14015": 410661376.0, "14020": 412764416.0, "14025": 407725024.0, "14030": 421845408.0, "14035": 408548640.0, "14040": 422276512.0, "14045": 416018080.0, "14050": 410348576.0, "14055": 417619488.0, "14060": 413714976.0, "14065": 411696128.0, "14070": 407409280.0, "14075": 416009696.0, "14080": 410871488.0, "14085": 410335840.0, "14090": 418868800.0, "14095": 412314112.0, "14100": 416181696.0, "14105": 410995232.0, "14110": 416013664.0, "14115": 414352224.0, "14120": 416279872.0, "14125": 414578144.0, "14130": 423094080.0, "14135": 416709280.0, "14140": 415364160.0, "14145": 409716864.0, "14150": 402872672.0, "14155": 410204736.0, "14160": 417188928.0, "14165": 413691296.0, "14170": 404048256.0, "14175": 402111072.0, "14180": 415891872.0, "14185": 415879200.0, "14190": 414757088.0, "14195": 415861120.0, "14200": 407519744.0, "14205": 406147136.0, "14210": 423777120.0, "14215": 406042624.0, "14220": 415267648.0, "14225": 414275008.0, "14230": 406277696.0, "14235": 415252800.0, "14240": 409540192.0, "14245": 409021408.0, "14250": 406884896.0, "14255": 410244928.0, "14260": 407702176.0, "14265": 410933216.0, "14270": 411787360.0, "14275": 416134400.0, "14280": 416729760.0, "14285": 414818880.0, "14290": 413000064.0, "14295": 416314496.0, "14300": 413598656.0, "14305": 411383136.0, "14310": 416492608.0, "14315": 421526944.0, "14320": 407868544.0, "14325": 414247168.0, "14330": 410984096.0, "14335": 413162560.0, "14340": 408633536.0, "14345": 416434432.0, "14350": 415300224.0, "14355": 418249536.0, "14360": 409774816.0, "14365": 416090912.0, "14370": 416409600.0, "14375": 414104128.0, "14380": 410687104.0, "14385": 414415168.0, "14390": 414879328.0, "14395": 415693600.0, "14400": 409904960.0, "14405": 419112576.0, "14410": 407639648.0, "14415": 412166144.0, "14420": 413321248.0, "14425": 418076544.0, "14430": 409916736.0, "14435": 413159456.0, "14440": 416749088.0, "14445": 406774688.0, "14450": 420130720.0, "14455": 407202688.0, "14460": 417543840.0, "14465": 420561856.0, "14470": 415703328.0, "14475": 412444576.0, "14480": 404467520.0, "14485": 417221984.0, "14490": 407680608.0, "14495": 423042816.0, "14500": 409576896.0, "14505": 415304160.0, "14510": 418562784.0, "14515": 416727584.0, "14520": 410239456.0, "14525": 415299296.0, "14530": 407114560.0, "14535": 413976704.0, "14540": 408982144.0, "14545": 416324352.0, "14550": 413738336.0, "14555": 419268800.0, "14560": 413567072.0, "14565": 406441056.0, "14570": 416082528.0, "14575": 408263936.0, "14580": 406788384.0, "14585": 412245248.0, "14590": 411627584.0, "14595": 417970880.0, "14600": 410206912.0, "14605": 404276864.0, "14610": 408275936.0, "14615": 406303552.0, "14620": 406699520.0, "14625": 402493216.0, "14630": 417191360.0, "14635": 409951712.0, "14640": 413971296.0, "14645": 410732608.0, "14650": 419665952.0, "14655": 410892128.0, "14660": 417510592.0, "14665": 416208256.0, "14670": 407930912.0, "14675": 416659168.0, "14680": 414453152.0, "14685": 412396576.0, "14690": 412540960.0, "14695": 406871296.0, "14700": 412381856.0, "14705": 409182464.0, "14710": 416658240.0, "14715": 409526464.0, "14720": 415364288.0, "14725": 419898400.0, "14730": 407706464.0, "14735": 409696896.0, "14740": 407569568.0, "14745": 409038432.0, "14750": 406059168.0, "14755": 413585824.0, "14760": 408727136.0, "14765": 410991744.0, "14770": 415528032.0, "14775": 409001888.0, "14780": 415028192.0, "14785": 405303776.0, "14790": 416580192.0, "14795": 412760192.0, "14800": 415346112.0, "14805": 405160800.0, "14810": 412222400.0, "14815": 414623520.0, "14820": 415404672.0, "14825": 409574272.0, "14830": 409529952.0, "14835": 414853568.0, "14840": 413147456.0, "14845": 415736256.0, "14850": 408705472.0, "14855": 417778592.0, "14860": 418513152.0, "14865": 416533184.0, "14870": 408146368.0, "14875": 412090848.0, "14880": 414093888.0, "14885": 405867040.0, "14890": 411877440.0, "14895": 407555072.0, "14900": 415250848.0, "14905": 423733696.0, "14910": 407903520.0, "14915": 407109920.0, "14920": 409711200.0, "14925": 409307936.0, "14930": 408806240.0, "14935": 408255680.0, "14940": 410288768.0, "14945": 421324960.0, "14950": 409863488.0, "14955": 426891872.0, "14960": 408511168.0, "14965": 407903040.0, "14970": 416530528.0, "14975": 413523744.0, "14980": 416370368.0, "14985": 419032448.0, "14990": 417035744.0, "14995": 419578912.0, "15000": 409854208.0, "15005": 412337984.0, "15010": 409421888.0, "15015": 417957920.0, "15020": 415072832.0, "15025": 413163776.0, "15030": 408815968.0, "15035": 414432736.0, "15040": 412784000.0, "15045": 423253376.0, "15050": 413874624.0, "15055": 409401152.0, "15060": 421933408.0, "15065": 416799904.0, "15070": 414962656.0, "15075": 411044736.0, "15080": 416055072.0, "15085": 412309376.0, "15090": 406389824.0, "15095": 410270816.0, "15100": 414598624.0, "15105": 411615264.0, "15110": 409754688.0, "15115": 414265120.0, "15120": 404839712.0, "15125": 411061696.0, "15130": 404832736.0, "15135": 410470336.0, "15140": 409518976.0, "15145": 412262208.0, "15150": 415050912.0, "15155": 408247072.0, "15160": 415959008.0, "15165": 412947040.0, "15170": 410110176.0, "15175": 412553120.0, "15180": 410077024.0, "15185": 406095744.0, "15190": 412136384.0, "15195": 408065952.0, "15200": 412062208.0, "15205": 420192576.0, "15210": 410823904.0, "15215": 413144608.0, "15220": 415382016.0, "15225": 417372800.0, "15230": 416038752.0, "15235": 406142752.0, "15240": 415812416.0, "15245": 413040672.0, "15250": 415098656.0, "15255": 408788960.0, "15260": 411995680.0, "15265": 419607104.0, "15270": 407993504.0, "15275": 407719872.0, "15280": 406518720.0, "15285": 410664640.0, "15290": 413924224.0, "15295": 410627040.0, "15300": 412336128.0, "15305": 407287904.0, "15310": 412857760.0, "15315": 412955904.0, "15320": 416188992.0, "15325": 408672864.0, "15330": 410820960.0, "15335": 410834400.0, "15340": 416285760.0, "15345": 414151360.0, "15350": 415670720.0, "15355": 416403520.0, "15360": 412893632.0, "15365": 410460480.0, "15370": 417863840.0, "15375": 408740736.0, "15380": 414764000.0, "15385": 406151968.0, "15390": 408435040.0, "15395": 404361568.0, "15400": 412108384.0, "15405": 416664640.0, "15410": 403639584.0, "15415": 410581600.0, "15420": 408688384.0, "15425": 407743648.0, "15430": 415006336.0, "15435": 411452672.0, "15440": 411309504.0, "15445": 415068576.0, "15450": 407302400.0, "15455": 416666912.0, "15460": 411426720.0, "15465": 414243296.0, "15470": 410563744.0, "15475": 413199392.0, "15480": 408377088.0, "15485": 414441376.0, "15490": 402914688.0, "15495": 406727200.0, "15500": 412219424.0, "15505": 409593312.0, "15510": 416310592.0, "15515": 412990240.0, "15520": 415406304.0, "15525": 412404736.0, "15530": 405131904.0, "15535": 413649152.0, "15540": 410180448.0, "15545": 411101312.0, "15550": 417093280.0, "15555": 415319200.0, "15560": 414882752.0, "15565": 413146560.0, "15570": 411031136.0, "15575": 410587712.0, "15580": 415831776.0, "15585": 407161472.0, "15590": 408318784.0, "15595": 413395072.0, "15600": 422923040.0, "15605": 407846912.0, "15610": 414782176.0, "15615": 406407616.0, "15620": 411322592.0, "15625": 417929856.0, "15630": 411131328.0, "15635": 410442368.0, "15640": 405841344.0, "15645": 416878112.0, "15650": 408601376.0, "15655": 420728608.0, "15660": 410564768.0, "15665": 406776800.0, "15670": 411317568.0, "15675": 410319744.0, "15680": 411803136.0, "15685": 414977664.0, "15690": 410910336.0, "15695": 402848928.0, "15700": 415281888.0, "15705": 411143424.0, "15710": 411257184.0, "15715": 419271424.0, "15720": 404420320.0, "15725": 414472384.0, "15730": 406743552.0, "15735": 413415104.0, "15740": 409985984.0, "15745": 409416160.0, "15750": 414228832.0, "15755": 410854176.0, "15760": 411808416.0, "15765": 410689600.0, "15770": 415544736.0, "15775": 413528768.0, "15780": 412614208.0, "15785": 419238848.0, "15790": 415279776.0, "15795": 402530144.0, "15800": 400187456.0, "15805": 419200288.0, "15810": 402608000.0, "15815": 413333216.0, "15820": 405927616.0, "15825": 419995104.0, "15830": 412335104.0, "15835": 411689440.0, "15840": 406825280.0, "15845": 400994144.0, "15850": 412722880.0, "15855": 413202496.0, "15860": 405936704.0, "15865": 417395648.0, "15870": 418256800.0, "15875": 410102880.0, "15880": 414144512.0, "15885": 418020896.0, "15890": 399555936.0, "15895": 408643904.0, "15900": 400248672.0, "15905": 414156160.0, "15910": 408552352.0, "15915": 419763840.0, "15920": 417301632.0, "15925": 412372000.0, "15930": 417099968.0, "15935": 409354976.0, "15940": 412597152.0, "15945": 411395744.0, "15950": 414579232.0, "15955": 414588288.0, "15960": 414784000.0, "15965": 409058784.0, "15970": 415115776.0, "15975": 411202112.0, "15980": 412655264.0, "15985": 412630880.0, "15990": 407841344.0, "15995": 407433568.0, "16000": 406330080.0, "16005": 413597760.0, "16010": 410475584.0, "16015": 413428096.0, "16020": 411761056.0, "16025": 415310816.0, "16030": 418308480.0, "16035": 410456576.0, "16040": 414281760.0, "16045": 408104768.0, "16050": 409537536.0, "16055": 410442432.0, "16060": 413546080.0, "16065": 420094752.0, "16070": 415805856.0, "16075": 418105024.0, "16080": 414016640.0, "16085": 411060032.0, "16090": 409966336.0, "16095": 407192032.0, "16100": 410071200.0, "16105": 407796960.0, "16110": 418954720.0, "16115": 402412608.0, "16120": 410179200.0, "16125": 420190784.0, "16130": 411948800.0, "16135": 413352992.0, "16140": 410060832.0, "16145": 415431808.0, "16150": 412078784.0, "16155": 417236352.0, "16160": 418583072.0, "16165": 409410432.0, "16170": 413004000.0, "16175": 418524448.0, "16180": 413674624.0, "16185": 409374560.0, "16190": 415922240.0, "16195": 420739712.0, "16200": 408675296.0, "16205": 417168768.0, "16210": 415788384.0, "16215": 411862336.0, "16220": 418702848.0, "16225": 405015136.0, "16230": 414699712.0, "16235": 420401888.0, "16240": 405553856.0, "16245": 415039168.0, "16250": 405476736.0, "16255": 405968992.0, "16260": 412313600.0, "16265": 411656032.0, "16270": 397777216.0, "16275": 411084736.0, "16280": 418679072.0, "16285": 408191072.0, "16290": 409756064.0, "16295": 405374432.0, "16300": 416271488.0, "16305": 410740224.0, "16310": 408023744.0, "16315": 416846688.0, "16320": 414391232.0, "16325": 406383104.0, "16330": 416990112.0, "16335": 419768672.0, "16340": 405962240.0, "16345": 409994944.0, "16350": 409851328.0, "16355": 410252576.0, "16360": 397448352.0, "16365": 410630784.0, "16370": 416448576.0, "16375": 410467072.0, "16380": 412298720.0, "16385": 404117696.0, "16390": 415204416.0, "16395": 410437408.0, "16400": 408767488.0, "16405": 409017472.0, "16410": 419975360.0, "16415": 408901696.0, "16420": 417251520.0, "16425": 417590944.0, "16430": 407039552.0, "16435": 410673344.0, "16440": 411449088.0, "16445": 405280288.0, "16450": 408103584.0, "16455": 410093504.0, "16460": 412898752.0, "16465": 409915264.0, "16470": 412757184.0, "16475": 409369856.0, "16480": 421086976.0, "16485": 413041728.0, "16490": 407729856.0, "16495": 406525504.0, "16500": 408861920.0, "16505": 411517632.0, "16510": 400307744.0, "16515": 412775904.0, "16520": 413983168.0, "16525": 413946560.0, "16530": 415362176.0, "16535": 411289088.0, "16540": 406579104.0, "16545": 409507200.0, "16550": 416984768.0, "16555": 411712128.0, "16560": 411108160.0, "16565": 417144224.0, "16570": 411755520.0, "16575": 416767360.0, "16580": 409511072.0, "16585": 403773792.0, "16590": 410465920.0, "16595": 418275040.0, "16600": 404110400.0, "16605": 415543776.0, "16610": 409333344.0, "16615": 413393696.0, "16620": 415796544.0, "16625": 418101984.0, "16630": 411935808.0, "16635": 411524416.0, "16640": 411596480.0, "16645": 411391456.0, "16650": 407070752.0, "16655": 408097472.0, "16660": 403870752.0, "16665": 409262080.0, "16670": 408783424.0, "16675": 411940576.0, "16680": 410668128.0, "16685": 417450688.0, "16690": 412334304.0, "16695": 413868608.0, "16700": 413275840.0, "16705": 411869728.0, "16710": 411813760.0, "16715": 415566496.0, "16720": 409464064.0, "16725": 411162272.0, "16730": 416419744.0, "16735": 406519040.0, "16740": 416270688.0, "16745": 408093696.0, "16750": 401768000.0, "16755": 419641248.0, "16760": 410720192.0, "16765": 408930592.0, "16770": 417169280.0, "16775": 412318240.0, "16780": 411441152.0, "16785": 410339520.0, "16790": 406785408.0, "16795": 413269408.0, "16800": 412652736.0, "16805": 413761824.0, "16810": 407143904.0, "16815": 398762144.0, "16820": 412300000.0, "16825": 410142528.0, "16830": 411137248.0, "16835": 413753088.0, "16840": 406025440.0, "16845": 416578624.0, "16850": 414128640.0, "16855": 408140096.0, "16860": 410130080.0, "16865": 418445792.0, "16870": 412142688.0, "16875": 414610208.0, "16880": 414089216.0, "16885": 414203488.0, "16890": 410220512.0, "16895": 412136576.0, "16900": 404245728.0, "16905": 411883744.0, "16910": 417924832.0, "16915": 412160736.0, "16920": 409933312.0, "16925": 411636800.0, "16930": 411711104.0, "16935": 419502752.0, "16940": 420369728.0, "16945": 410804960.0, "16950": 412839840.0, "16955": 413335328.0, "16960": 410441952.0, "16965": 412673440.0, "16970": 411890752.0, "16975": 411077472.0, "16980": 412868384.0, "16985": 419945728.0, "16990": 420021024.0, "16995": 414498240.0, "17000": 424271904.0, "17005": 408959616.0, "17010": 414588704.0, "17015": 407926848.0, "17020": 405089152.0, "17025": 412011840.0, "17030": 410480896.0, "17035": 412896992.0, "17040": 413310624.0, "17045": 414115712.0, "17050": 403227776.0, "17055": 405561696.0, "17060": 415161664.0, "17065": 410759744.0, "17070": 411852864.0, "17075": 415567904.0, "17080": 417505920.0, "17085": 413173312.0, "17090": 419198304.0, "17095": 409453824.0, "17100": 417565824.0, "17105": 405874816.0, "17110": 416146208.0, "17115": 410682912.0, "17120": 413850304.0, "17125": 411942176.0, "17130": 417302016.0, "17135": 406648544.0, "17140": 414401120.0, "17145": 412664800.0, "17150": 414236704.0, "17155": 414040576.0, "17160": 405515712.0, "17165": 417028928.0, "17170": 407496704.0, "17175": 418039904.0, "17180": 419358784.0, "17185": 416814464.0, "17190": 414662080.0, "17195": 414272544.0, "17200": 409461312.0, "17205": 415089824.0, "17210": 404084128.0, "17215": 406718944.0, "17220": 408407392.0, "17225": 406881440.0, "17230": 412890368.0, "17235": 410715072.0, "17240": 414374336.0, "17245": 422363008.0, "17250": 404962144.0, "17255": 413649792.0, "17260": 420219104.0, "17265": 409226944.0, "17270": 417985696.0, "17275": 413836224.0, "17280": 407474432.0, "17285": 414575424.0, "17290": 411422464.0, "17295": 416559808.0, "17300": 405022816.0, "17305": 417162624.0, "17310": 407990336.0, "17315": 410011296.0, "17320": 415825312.0, "17325": 397266080.0, "17330": 416363104.0, "17335": 415021536.0, "17340": 410166240.0, "17345": 419658752.0, "17350": 416481696.0, "17355": 409198272.0, "17360": 409173696.0, "17365": 408720000.0, "17370": 402698560.0, "17375": 413788256.0, "17380": 415103744.0, "17385": 410347008.0, "17390": 416943456.0, "17395": 419531776.0, "17400": 409057376.0, "17405": 413921536.0, "17410": 414353920.0, "17415": 403810496.0, "17420": 404103360.0, "17425": 414293408.0, "17430": 406022208.0, "17435": 413980992.0, "17440": 404514592.0, "17445": 408378912.0, "17450": 413234336.0, "17455": 413222080.0, "17460": 413233728.0, "17465": 407130304.0, "17470": 407574880.0, "17475": 409252768.0, "17480": 406045856.0, "17485": 411345280.0, "17490": 409124896.0, "17495": 415279776.0, "17500": 417515456.0, "17505": 406856512.0, "17510": 416963008.0, "17515": 411771392.0, "17520": 411993728.0, "17525": 409277344.0, "17530": 411868896.0, "17535": 411795200.0, "17540": 412456256.0, "17545": 403057152.0, "17550": 416962976.0, "17555": 409887648.0, "17560": 408516864.0, "17565": 416728480.0, "17570": 405319008.0, "17575": 416102656.0, "17580": 411471296.0, "17585": 405907776.0, "17590": 417705184.0, "17595": 404117792.0, "17600": 409687424.0, "17605": 409258272.0, "17610": 409286080.0, "17615": 402023136.0, "17620": 402077664.0, "17625": 412052000.0, "17630": 412263680.0, "17635": 411722880.0, "17640": 413396000.0, "17645": 409792800.0, "17650": 414702816.0, "17655": 419367616.0, "17660": 411057344.0, "17665": 409318368.0, "17670": 405890528.0, "17675": 414989728.0, "17680": 413223008.0, "17685": 409427712.0, "17690": 421260512.0, "17695": 407057152.0, "17700": 415944576.0, "17705": 411937344.0, "17710": 406977056.0, "17715": 408781920.0, "17720": 410384768.0, "17725": 416174240.0, "17730": 411157408.0, "17735": 413663424.0, "17740": 415021184.0, "17745": 407883200.0, "17750": 409956032.0, "17755": 413235840.0, "17760": 406562624.0, "17765": 415111232.0, "17770": 420774336.0, "17775": 403822656.0, "17780": 411096768.0, "17785": 408572512.0, "17790": 411816000.0, "17795": 428176384.0, "17800": 412835552.0, "17805": 409153216.0, "17810": 412776160.0, "17815": 414328320.0, "17820": 408719328.0, "17825": 409909792.0, "17830": 414951104.0, "17835": 412529024.0, "17840": 419483840.0, "17845": 417010336.0, "17850": 409320064.0, "17855": 412081824.0, "17860": 417818432.0, "17865": 408932288.0, "17870": 411620960.0, "17875": 409838432.0, "17880": 420436032.0, "17885": 416236096.0, "17890": 415966208.0, "17895": 411588224.0, "17900": 412288384.0, "17905": 411527552.0, "17910": 411718176.0, "17915": 411054112.0, "17920": 408448736.0, "17925": 404238784.0, "17930": 415561184.0, "17935": 415511552.0, "17940": 406671424.0, "17945": 414050112.0, "17950": 420104288.0, "17955": 417602208.0, "17960": 422768192.0, "17965": 411753600.0, "17970": 410146464.0, "17975": 412731520.0, "17980": 410108448.0, "17985": 411090208.0, "17990": 412004032.0, "17995": 411395424.0, "18000": 408586336.0, "18005": 415413312.0, "18010": 418689120.0, "18015": 413002016.0, "18020": 407416448.0, "18025": 409518112.0, "18030": 411924544.0, "18035": 410168640.0, "18040": 418182496.0, "18045": 409345856.0, "18050": 416765472.0, "18055": 414939616.0, "18060": 412937408.0, "18065": 410702080.0, "18070": 409539744.0, "18075": 410191520.0, "18080": 414005888.0, "18085": 418112864.0, "18090": 402174592.0, "18095": 412341792.0, "18100": 418668480.0, "18105": 403326048.0, "18110": 410705120.0, "18115": 413746688.0, "18120": 418262720.0, "18125": 415185216.0, "18130": 408004352.0, "18135": 407257536.0, "18140": 418694272.0, "18145": 409612000.0, "18150": 409126784.0, "18155": 421613152.0, "18160": 409022464.0, "18165": 407235968.0, "18170": 408768608.0, "18175": 413341984.0, "18180": 410959200.0, "18185": 416813184.0, "18190": 411686944.0, "18195": 408024096.0, "18200": 413994080.0, "18205": 407721120.0, "18210": 418081088.0, "18215": 409212064.0, "18220": 408617280.0, "18225": 408477792.0, "18230": 414022432.0, "18235": 415237600.0, "18240": 411962944.0, "18245": 408796288.0, "18250": 416218496.0, "18255": 409488736.0, "18260": 419775904.0, "18265": 408268032.0, "18270": 408884320.0, "18275": 418253952.0, "18280": 414117184.0, "18285": 422164096.0, "18290": 415272320.0, "18295": 403429792.0, "18300": 420778048.0, "18305": 406472032.0, "18310": 421078752.0, "18315": 428271776.0, "18320": 412469792.0, "18325": 413505792.0, "18330": 413551840.0, "18335": 417399232.0, "18340": 415308672.0, "18345": 413452416.0, "18350": 415159936.0, "18355": 423988000.0, "18360": 413326976.0, "18365": 415819136.0, "18370": 416953632.0, "18375": 416352832.0, "18380": 406020288.0, "18385": 402692288.0, "18390": 412281408.0, "18395": 411488064.0, "18400": 416908032.0, "18405": 405128896.0, "18410": 404136896.0, "18415": 420793728.0, "18420": 413251168.0, "18425": 411588992.0, "18430": 411438656.0, "18435": 410584352.0, "18440": 408571712.0, "18445": 410723328.0, "18450": 413053888.0, "18455": 411314944.0, "18460": 406733504.0, "18465": 417485664.0, "18470": 412578240.0, "18475": 410450176.0, "18480": 419532800.0, "18485": 405182720.0, "18490": 424110240.0, "18495": 411417440.0, "18500": 413733792.0, "18505": 414077024.0, "18510": 416772192.0, "18515": 414103616.0, "18520": 413532096.0, "18525": 404788160.0, "18530": 409182944.0, "18535": 413908480.0, "18540": 408661216.0, "18545": 414731872.0, "18550": 408556128.0, "18555": 419916992.0, "18560": 414636448.0, "18565": 411830432.0, "18570": 405697248.0, "18575": 413559936.0, "18580": 418528832.0, "18585": 415098112.0, "18590": 424294912.0, "18595": 417735616.0, "18600": 418607488.0, "18605": 411444832.0, "18610": 411269376.0, "18615": 412029568.0, "18620": 426012800.0, "18625": 408537920.0, "18630": 409526528.0, "18635": 412522528.0, "18640": 421155616.0, "18645": 413042880.0, "18650": 411304160.0, "18655": 407288288.0, "18660": 410924480.0, "18665": 410820448.0, "18670": 404554272.0, "18675": 410937760.0, "18680": 416849344.0, "18685": 419801088.0, "18690": 415872640.0, "18695": 404941760.0, "18700": 403837856.0, "18705": 413735936.0, "18710": 407225088.0, "18715": 415830592.0, "18720": 408649152.0, "18725": 411329952.0, "18730": 415407648.0, "18735": 419437088.0, "18740": 411228736.0, "18745": 417875072.0, "18750": 408765888.0, "18755": 415057216.0, "18760": 409725056.0, "18765": 410317312.0, "18770": 407080032.0, "18775": 412854080.0, "18780": 404218048.0, "18785": 409463776.0, "18790": 412215136.0, "18795": 407076736.0, "18800": 409819648.0, "18805": 419282240.0, "18810": 417554944.0, "18815": 408210752.0, "18820": 405972992.0, "18825": 416962464.0, "18830": 411566368.0, "18835": 413866176.0, "18840": 417086752.0, "18845": 407669056.0, "18850": 413386720.0, "18855": 407325760.0, "18860": 419151136.0, "18865": 418247168.0, "18870": 408904288.0, "18875": 409252128.0, "18880": 413337856.0, "18885": 408367840.0, "18890": 409471712.0, "18895": 415450688.0, "18900": 415241184.0, "18905": 413697184.0, "18910": 414744096.0, "18915": 414078720.0, "18920": 411159328.0, "18925": 420997408.0, "18930": 410636256.0, "18935": 411546912.0, "18940": 410695264.0, "18945": 420488864.0, "18950": 403756128.0, "18955": 417054720.0, "18960": 406674272.0, "18965": 412250976.0, "18970": 410864704.0, "18975": 416123136.0, "18980": 406221184.0, "18985": 414703648.0, "18990": 411617984.0, "18995": 412069152.0, "19000": 412452672.0, "19005": 411371904.0, "19010": 415940832.0, "19015": 415278400.0, "19020": 406676000.0, "19025": 406818592.0, "19030": 410924256.0, "19035": 404423520.0, "19040": 411263008.0, "19045": 413210048.0, "19050": 404284864.0, "19055": 413089216.0, "19060": 404966624.0, "19065": 420967456.0, "19070": 407560480.0, "19075": 407007008.0, "19080": 419041120.0, "19085": 407396800.0, "19090": 418151104.0, "19095": 411159744.0, "19100": 409445248.0, "19105": 408964320.0, "19110": 413994880.0, "19115": 403312672.0, "19120": 413587296.0, "19125": 403684896.0, "19130": 409341152.0, "19135": 419390880.0, "19140": 408339264.0, "19145": 415914720.0, "19150": 409693408.0, "19155": 412443776.0, "19160": 418487424.0, "19165": 401085440.0, "19170": 409712672.0, "19175": 404634656.0, "19180": 408693984.0, "19185": 413793216.0, "19190": 407558176.0, "19195": 414569088.0, "19200": 415310976.0, "19205": 414576896.0, "19210": 418405152.0, "19215": 407718368.0, "19220": 407674688.0, "19225": 407575072.0, "19230": 414898880.0, "19235": 406002912.0, "19240": 411461056.0, "19245": 408502528.0, "19250": 414922720.0, "19255": 419514144.0, "19260": 420330240.0, "19265": 409926304.0, "19270": 415170048.0, "19275": 413597728.0, "19280": 412718688.0, "19285": 414459232.0, "19290": 412367584.0, "19295": 409343264.0, "19300": 415081728.0, "19305": 418536768.0, "19310": 410024960.0, "19315": 420472128.0, "19320": 413504608.0, "19325": 419598208.0, "19330": 411151648.0, "19335": 408001760.0, "19340": 413904576.0, "19345": 415040000.0, "19350": 410595968.0, "19355": 415972832.0, "19360": 415639904.0, "19365": 415854624.0, "19370": 399086656.0, "19375": 401404960.0, "19380": 412635520.0, "19385": 405407968.0, "19390": 410645024.0, "19395": 411676864.0, "19400": 409174304.0, "19405": 411354720.0, "19410": 409340160.0, "19415": 422188096.0, "19420": 408540736.0, "19425": 412625088.0, "19430": 417311776.0, "19435": 409935136.0, "19440": 416479424.0, "19445": 421676800.0, "19450": 420131232.0, "19455": 415628384.0, "19460": 413899424.0, "19465": 417552448.0, "19470": 411887488.0, "19475": 415798464.0, "19480": 414589312.0, "19485": 416054400.0, "19490": 407583488.0, "19495": 413069312.0, "19500": 404278400.0, "19505": 415902848.0, "19510": 411389792.0, "19515": 415101920.0, "19520": 415152352.0, "19525": 407610816.0, "19530": 418696448.0, "19535": 404405568.0, "19540": 412501664.0, "19545": 423199904.0, "19550": 408898016.0, "19555": 416668000.0, "19560": 408852768.0, "19565": 416508608.0, "19570": 411215520.0, "19575": 414675008.0, "19580": 407011776.0, "19585": 415513600.0, "19590": 418818848.0, "19595": 412436800.0, "19600": 411322208.0, "19605": 413668032.0, "19610": 412980352.0, "19615": 412896416.0, "19620": 420611456.0, "19625": 409754496.0, "19630": 416618272.0, "19635": 407550272.0, "19640": 403947008.0, "19645": 405376416.0, "19650": 407347968.0, "19655": 409150976.0, "19660": 418987904.0, "19665": 412974656.0, "19670": 409403328.0, "19675": 407671616.0, "19680": 412717120.0, "19685": 415751456.0, "19690": 411047136.0, "19695": 409972896.0, "19700": 411169568.0, "19705": 409873632.0, "19710": 418028608.0, "19715": 408124320.0, "19720": 409305888.0, "19725": 409811296.0, "19730": 410352480.0, "19735": 408409728.0, "19740": 403330208.0, "19745": 406564128.0, "19750": 412861472.0, "19755": 417863712.0, "19760": 411193536.0, "19765": 409535520.0, "19770": 413668512.0, "19775": 412737760.0, "19780": 412347872.0, "19785": 408029952.0, "19790": 417492864.0, "19795": 410694656.0, "19800": 418248544.0, "19805": 412188704.0, "19810": 416297216.0, "19815": 407156352.0, "19820": 410343680.0, "19825": 410768064.0, "19830": 411478208.0, "19835": 408449600.0, "19840": 414658528.0, "19845": 419571712.0, "19850": 406370656.0, "19855": 412317152.0, "19860": 415860064.0, "19865": 412072288.0, "19870": 408675712.0, "19875": 414940416.0, "19880": 413205792.0, "19885": 409925088.0, "19890": 412050112.0, "19895": 410022208.0, "19900": 410076864.0, "19905": 412941632.0, "19910": 414265248.0, "19915": 412677952.0, "19920": 407747008.0, "19925": 420249664.0, "19930": 411714624.0, "19935": 415622752.0, "19940": 414423648.0, "19945": 410104608.0, "19950": 408990688.0, "19955": 416258816.0, "19960": 402491968.0, "19965": 408749312.0, "19970": 422251424.0, "19975": 408897920.0, "19980": 413090208.0, "19985": 414572864.0, "19990": 411537792.0, "19995": 413512448.0, "20000": 402571584.0, "20005": 408892352.0, "20010": 418078176.0, "20015": 410051232.0, "20020": 416125440.0, "20025": 405888928.0, "20030": 413850528.0, "20035": 407411840.0, "20040": 411193984.0, "20045": 417179488.0, "20050": 416623552.0, "20055": 413748704.0, "20060": 417663520.0, "20065": 412246112.0, "20070": 416390880.0, "20075": 411067008.0, "20080": 418700000.0, "20085": 424833312.0, "20090": 413295328.0, "20095": 413817408.0, "20100": 406728384.0, "20105": 419158464.0, "20110": 404201728.0, "20115": 412811712.0, "20120": 413002912.0, "20125": 411460096.0, "20130": 410464576.0, "20135": 410848032.0, "20140": 408535424.0, "20145": 404062624.0, "20150": 417030016.0, "20155": 414561088.0, "20160": 407075168.0, "20165": 412737056.0, "20170": 408379584.0, "20175": 419108288.0, "20180": 412537792.0, "20185": 405931328.0, "20190": 414433984.0, "20195": 413327232.0, "20200": 405767232.0, "20205": 409940480.0, "20210": 416880032.0, "20215": 407885696.0, "20220": 413687008.0, "20225": 406863872.0, "20230": 416223264.0, "20235": 408208608.0, "20240": 423156224.0, "20245": 406586784.0, "20250": 409259904.0, "20255": 411870496.0, "20260": 412083808.0, "20265": 414865376.0, "20270": 414594368.0, "20275": 405084896.0, "20280": 415450528.0, "20285": 414019200.0, "20290": 413306048.0, "20295": 406318016.0, "20300": 410506656.0, "20305": 416361856.0, "20310": 414045184.0, "20315": 410525440.0, "20320": 410581760.0, "20325": 410115040.0, "20330": 420069472.0, "20335": 414462624.0, "20340": 408935296.0, "20345": 404071936.0, "20350": 403949376.0, "20355": 417248352.0, "20360": 406388160.0, "20365": 411886336.0, "20370": 412226528.0, "20375": 409747072.0, "20380": 416061440.0, "20385": 408754752.0, "20390": 413153472.0, "20395": 414884480.0, "20400": 409421984.0, "20405": 415203104.0, "20410": 421825888.0, "20415": 406807520.0, "20420": 411161408.0, "20425": 411039712.0, "20430": 411374080.0, "20435": 414512736.0, "20440": 409685856.0, "20445": 411539232.0, "20450": 411296512.0, "20455": 418509824.0, "20460": 407071584.0, "20465": 418167392.0, "20470": 413498240.0, "20475": 414127264.0, "20480": 412798080.0, "20485": 416335136.0, "20490": 409875008.0, "20495": 416315776.0, "20500": 413285408.0, "20505": 409784288.0, "20510": 419169952.0, "20515": 411711616.0, "20520": 414719776.0, "20525": 409345856.0, "20530": 409859936.0, "20535": 413857536.0, "20540": 408942464.0, "20545": 427381248.0, "20550": 405748448.0, "20555": 412881440.0, "20560": 415044064.0, "20565": 415025792.0, "20570": 415263072.0, "20575": 416403904.0, "20580": 403941504.0, "20585": 414418144.0, "20590": 408416864.0, "20595": 404913984.0, "20600": 408422240.0, "20605": 407511680.0, "20610": 408925568.0, "20615": 415699360.0, "20620": 408729632.0, "20625": 411372800.0, "20630": 415455040.0, "20635": 418445216.0, "20640": 415483168.0, "20645": 421597280.0, "20650": 416413280.0, "20655": 408119360.0, "20660": 411921472.0, "20665": 413589792.0, "20670": 413263776.0, "20675": 413051648.0, "20680": 412058560.0, "20685": 412828576.0, "20690": 409405440.0, "20695": 415803328.0, "20700": 409569792.0, "20705": 409886496.0, "20710": 411813120.0, "20715": 416499168.0, "20720": 413159648.0, "20725": 411166080.0, "20730": 411401184.0, "20735": 415447488.0, "20740": 419460384.0, "20745": 405167168.0, "20750": 417202528.0, "20755": 413087296.0, "20760": 422487296.0, "20765": 417031680.0, "20770": 408712128.0, "20775": 407251232.0, "20780": 410170592.0, "20785": 418906464.0, "20790": 423060064.0, "20795": 410388768.0, "20800": 408249504.0, "20805": 416964480.0, "20810": 413729056.0, "20815": 412908672.0, "20820": 416139584.0, "20825": 404774272.0, "20830": 417082656.0, "20835": 403044608.0, "20840": 410037248.0, "20845": 413325632.0, "20850": 416869600.0, "20855": 413176480.0, "20860": 405335232.0, "20865": 412475040.0, "20870": 415199168.0, "20875": 409440288.0, "20880": 413352352.0, "20885": 410202464.0, "20890": 409085024.0, "20895": 412557760.0, "20900": 412191328.0, "20905": 412261248.0, "20910": 408012576.0, "20915": 403738656.0, "20920": 416720480.0, "20925": 404658848.0, "20930": 415321728.0, "20935": 414536064.0, "20940": 423146272.0, "20945": 416461312.0, "20950": 408549696.0, "20955": 407842016.0, "20960": 418406176.0, "20965": 405089152.0, "20970": 412677056.0, "20975": 415456256.0, "20980": 404423008.0, "20985": 413516288.0, "20990": 410625024.0, "20995": 403742976.0, "21000": 409874784.0, "21005": 410437856.0, "21010": 418834112.0, "21015": 407944160.0, "21020": 413340736.0, "21025": 412254912.0, "21030": 415043616.0, "21035": 414168576.0, "21040": 408257280.0, "21045": 412314304.0, "21050": 417628288.0, "21055": 406232864.0, "21060": 409572544.0, "21065": 415954336.0, "21070": 416284512.0, "21075": 412728512.0, "21080": 417129408.0, "21085": 416684192.0, "21090": 412443360.0, "21095": 417915904.0, "21100": 410898624.0, "21105": 410442688.0, "21110": 412098080.0, "21115": 410354720.0, "21120": 417514560.0, "21125": 418096544.0, "21130": 410561984.0, "21135": 414796928.0, "21140": 418342368.0, "21145": 409465632.0, "21150": 407963392.0, "21155": 417529824.0, "21160": 409946816.0, "21165": 413743776.0, "21170": 407055520.0, "21175": 411074208.0, "21180": 413536800.0, "21185": 413027680.0, "21190": 426698368.0, "21195": 405758656.0, "21200": 409806752.0, "21205": 402704384.0, "21210": 409384352.0, "21215": 420298688.0, "21220": 408149888.0, "21225": 407528160.0, "21230": 416754720.0, "21235": 412317536.0, "21240": 407185728.0, "21245": 415491360.0, "21250": 405626208.0, "21255": 416096096.0, "21260": 409883520.0, "21265": 411611520.0, "21270": 405794080.0, "21275": 413783456.0, "21280": 415666080.0, "21285": 409328800.0, "21290": 410385824.0, "21295": 412152768.0, "21300": 411119104.0, "21305": 410586528.0, "21310": 410646592.0, "21315": 411711488.0, "21320": 418373408.0, "21325": 413620480.0, "21330": 424026048.0, "21335": 421001344.0, "21340": 419022496.0, "21345": 408753280.0, "21350": 412613536.0, "21355": 414642880.0, "21360": 411046624.0, "21365": 407761024.0, "21370": 407326848.0, "21375": 410102336.0, "21380": 410612576.0, "21385": 410168704.0, "21390": 410229536.0, "21395": 417604640.0, "21400": 410156352.0, "21405": 415634880.0, "21410": 411956032.0, "21415": 412543232.0, "21420": 416703328.0, "21425": 417105216.0, "21430": 426720480.0, "21435": 410497984.0, "21440": 413776768.0, "21445": 411684608.0, "21450": 409483648.0, "21455": 416410528.0, "21460": 421085120.0, "21465": 410558176.0, "21470": 406632832.0, "21475": 410353536.0, "21480": 415309568.0, "21485": 413524384.0, "21490": 403552832.0, "21495": 421378464.0, "21500": 405187968.0, "21505": 418323008.0, "21510": 418468864.0, "21515": 404492000.0, "21520": 410015808.0, "21525": 406053760.0, "21530": 414179040.0, "21535": 414244352.0, "21540": 414883488.0, "21545": 414531776.0, "21550": 415780512.0, "21555": 422161120.0, "21560": 410273280.0, "21565": 408782912.0, "21570": 410825440.0, "21575": 413071616.0, "21580": 410123328.0, "21585": 413780000.0, "21590": 416297728.0, "21595": 413910688.0, "21600": 418441248.0, "21605": 405393696.0, "21610": 411727840.0, "21615": 418373792.0, "21620": 412804896.0, "21625": 414281472.0, "21630": 416207488.0, "21635": 412896640.0, "21640": 411683488.0, "21645": 421285024.0, "21650": 417178144.0, "21655": 412146368.0, "21660": 415211360.0, "21665": 412949984.0, "21670": 411336128.0, "21675": 415747456.0, "21680": 416875712.0, "21685": 414757568.0, "21690": 408075840.0, "21695": 407759168.0, "21700": 412863872.0, "21705": 413507232.0, "21710": 415013344.0, "21715": 405796672.0, "21720": 420579904.0, "21725": 407985088.0, "21730": 414165760.0, "21735": 415070112.0, "21740": 410656800.0, "21745": 418620448.0, "21750": 410313696.0, "21755": 412045184.0, "21760": 405807072.0, "21765": 406637120.0, "21770": 413810272.0, "21775": 414792704.0, "21780": 415978720.0, "21785": 398156832.0, "21790": 416569056.0, "21795": 417207392.0, "21800": 410837056.0, "21805": 409723616.0, "21810": 412680160.0, "21815": 411766432.0, "21820": 418321824.0, "21825": 412911424.0, "21830": 404586848.0, "21835": 414994528.0, "21840": 409568288.0, "21845": 420917568.0, "21850": 406937376.0, "21855": 415503456.0, "21860": 419220544.0, "21865": 414234784.0, "21870": 416059104.0, "21875": 412058784.0, "21880": 408752128.0, "21885": 417451712.0, "21890": 417486656.0, "21895": 405274400.0, "21900": 414006208.0, "21905": 413477184.0, "21910": 412749440.0, "21915": 416164224.0, "21920": 417419264.0, "21925": 413481888.0, "21930": 410520608.0, "21935": 403785024.0, "21940": 411364288.0, "21945": 415223904.0, "21950": 404437216.0, "21955": 416512768.0, "21960": 412171200.0, "21965": 401654624.0, "21970": 406696448.0, "21975": 413364800.0, "21980": 414904576.0, "21985": 417176192.0, "21990": 412178720.0, "21995": 414390560.0, "22000": 407265248.0, "22005": 412203616.0, "22010": 422671808.0, "22015": 419999808.0, "22020": 403655488.0, "22025": 411228864.0, "22030": 410850560.0, "22035": 406487904.0, "22040": 415416032.0, "22045": 411512128.0, "22050": 412396128.0, "22055": 405322880.0, "22060": 412734688.0, "22065": 416716992.0, "22070": 405626784.0, "22075": 405568064.0, "22080": 409548000.0, "22085": 408137248.0, "22090": 412383712.0, "22095": 414239168.0, "22100": 413231872.0, "22105": 414366080.0, "22110": 404923520.0, "22115": 413890432.0, "22120": 412075168.0, "22125": 406513120.0, "22130": 404892448.0, "22135": 420842432.0, "22140": 419545696.0, "22145": 408543744.0, "22150": 412883360.0, "22155": 415954592.0, "22160": 411659712.0, "22165": 411608288.0, "22170": 411648832.0, "22175": 408149664.0, "22180": 409310304.0, "22185": 410286304.0, "22190": 410639104.0, "22195": 415396224.0, "22200": 409086528.0, "22205": 418904896.0, "22210": 414956352.0, "22215": 414642560.0, "22220": 411663616.0, "22225": 408153504.0, "22230": 414629920.0, "22235": 408318816.0, "22240": 416299008.0, "22245": 414156960.0, "22250": 406875392.0, "22255": 425967168.0, "22260": 414850976.0, "22265": 411607936.0, "22270": 419843936.0, "22275": 410490528.0, "22280": 409196192.0, "22285": 417775616.0, "22290": 408753152.0, "22295": 413545568.0, "22300": 418550592.0, "22305": 409473088.0, "22310": 413159520.0, "22315": 409225536.0, "22320": 411011296.0, "22325": 406960768.0, "22330": 408077888.0, "22335": 413782592.0, "22340": 414169568.0, "22345": 414355648.0, "22350": 406887616.0, "22355": 404243840.0, "22360": 410067008.0, "22365": 413235232.0, "22370": 411261888.0, "22375": 420588416.0, "22380": 413594496.0, "22385": 417551840.0, "22390": 417009920.0, "22395": 418159936.0, "22400": 406706368.0, "22405": 414818848.0, "22410": 421229376.0, "22415": 414184832.0, "22420": 406257184.0, "22425": 411109760.0, "22430": 400283424.0, "22435": 422028896.0, "22440": 412922848.0, "22445": 409315680.0, "22450": 419038432.0, "22455": 416010048.0, "22460": 410315840.0, "22465": 414482464.0, "22470": 414902272.0, "22475": 405131008.0, "22480": 414995104.0, "22485": 409898816.0, "22490": 407966304.0, "22495": 421072160.0, "22500": 428337600.0, "22505": 408863744.0, "22510": 416227136.0, "22515": 410890816.0, "22520": 407077952.0, "22525": 409521056.0, "22530": 417251040.0, "22535": 409878912.0, "22540": 412776992.0, "22545": 401911904.0, "22550": 405690144.0, "22555": 416563840.0, "22560": 410237408.0, "22565": 416030592.0, "22570": 408211808.0, "22575": 408420736.0, "22580": 421071552.0, "22585": 402720160.0, "22590": 413941888.0, "22595": 413268288.0, "22600": 413910784.0, "22605": 421648128.0, "22610": 409967328.0, "22615": 419691296.0, "22620": 403432096.0, "22625": 417361216.0, "22630": 414276480.0, "22635": 402035840.0, "22640": 411770240.0, "22645": 408708992.0, "22650": 408965440.0, "22655": 412522880.0, "22660": 405401952.0, "22665": 414056096.0, "22670": 416528128.0, "22675": 416147712.0, "22680": 407612576.0, "22685": 407888128.0, "22690": 409556928.0, "22695": 409764224.0, "22700": 415988416.0, "22705": 412861280.0, "22710": 407618496.0, "22715": 419989792.0, "22720": 414299776.0, "22725": 412752192.0, "22730": 408804896.0, "22735": 414049888.0, "22740": 416658624.0, "22745": 407330880.0, "22750": 408115968.0, "22755": 411174208.0, "22760": 414341504.0, "22765": 406373024.0, "22770": 418235360.0, "22775": 422619456.0, "22780": 412262592.0, "22785": 408533856.0, "22790": 404736160.0, "22795": 417565024.0, "22800": 402298688.0, "22805": 416776000.0, "22810": 416735456.0, "22815": 408511456.0, "22820": 413752672.0, "22825": 411781696.0, "22830": 410082048.0, "22835": 415565440.0, "22840": 414816832.0, "22845": 415513024.0, "22850": 410616416.0, "22855": 410765984.0, "22860": 420273376.0, "22865": 411123072.0, "22870": 404961056.0, "22875": 409490464.0, "22880": 410167360.0, "22885": 417436960.0, "22890": 410057472.0, "22895": 414495392.0, "22900": 419996448.0, "22905": 412133760.0, "22910": 414431072.0, "22915": 406904832.0, "22920": 413314336.0, "22925": 410163712.0, "22930": 413123520.0, "22935": 406870400.0, "22940": 405042144.0, "22945": 413342272.0, "22950": 413945056.0, "22955": 406187328.0, "22960": 416843168.0, "22965": 412350976.0, "22970": 411232672.0, "22975": 416423872.0, "22980": 403475040.0, "22985": 416208096.0, "22990": 411319072.0, "22995": 411721856.0, "23000": 411682688.0, "23005": 410348544.0, "23010": 406839648.0, "23015": 415314496.0, "23020": 400307296.0, "23025": 405767808.0, "23030": 421431168.0, "23035": 405604352.0, "23040": 413515168.0, "23045": 416463392.0, "23050": 422027168.0, "23055": 416224768.0, "23060": 415671040.0, "23065": 417717344.0, "23070": 408327424.0, "23075": 410774048.0, "23080": 427088576.0, "23085": 418210304.0, "23090": 412047424.0, "23095": 409226528.0, "23100": 413939840.0, "23105": 412627872.0, "23110": 417439008.0, "23115": 409292416.0, "23120": 415629984.0, "23125": 409678496.0, "23130": 410088800.0, "23135": 406104576.0, "23140": 411327040.0, "23145": 413737280.0, "23150": 408443232.0, "23155": 415731072.0, "23160": 406569216.0, "23165": 410426368.0, "23170": 410669824.0, "23175": 409251872.0, "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 17448259584.0, "5": 17448259584.0, "10": 17448259584.0, "15": 17448259584.0, "20": 17448259584.0, "25": 17448259584.0, "30": 17448259584.0, "35": 17448259584.0, "40": 17448259584.0, "45": 17448259584.0, "50": 17448259584.0, "55": 17448259584.0, "60": 17448259584.0, "65": 17448259584.0, "70": 17448341504.0, "75": 17448259584.0, "80": 17448259584.0, "85": 17448259584.0, "90": 17448259584.0, "95": 17448964096.0, "100": 17448259584.0, "105": 17448259584.0, "110": 17448259584.0, "115": 17448259584.0, "120": 17448259584.0, "125": 17448259584.0, "130": 17448259584.0, "135": 17448259584.0, "140": 17448259584.0, "145": 17448259584.0, "150": 17448259584.0, "155": 17448259584.0, "160": 17448259584.0, "165": 17448259584.0, "170": 17448259584.0, "175": 17448259584.0, "180": 17448259584.0, "185": 17448259584.0, "190": 17448259584.0, "195": 17448259584.0, "200": 17448259584.0, "205": 17448259584.0, "210": 17448259584.0, "215": 17448259584.0, "220": 17448259584.0, "225": 17448259584.0, "230": 17448259584.0, "235": 17448259584.0, "240": 17448259584.0, "245": 17448259584.0, "250": 17448259584.0, "255": 17448259584.0, "260": 17448259584.0, "265": 17448259584.0, "270": 17448259584.0, "275": 17448259584.0, "280": 17448259584.0, "285": 17448259584.0, "290": 17448259584.0, "295": 17448259584.0, "300": 17448259584.0, "305": 17448259584.0, "310": 17448259584.0, "315": 17448259584.0, "320": 17448259584.0, "325": 17448259584.0, "330": 17448259584.0, "335": 17448259584.0, "340": 17448259584.0, "345": 17448259584.0, "350": 17448259584.0, "355": 17448259584.0, "360": 17448259584.0, "365": 17448259584.0, "370": 17448259584.0, "375": 17448259584.0, "380": 17448259584.0, "385": 17448259584.0, "390": 17448259584.0, "395": 17448259584.0, "400": 17448259584.0, "405": 17448259584.0, "410": 17448259584.0, "415": 17448259584.0, "420": 17448259584.0, "425": 17448259584.0, "430": 17448259584.0, "435": 17448259584.0, "440": 17448259584.0, "445": 17448259584.0, "450": 17448259584.0, "455": 17448259584.0, "460": 17448259584.0, "465": 17448259584.0, "470": 17448259584.0, "475": 17448259584.0, "480": 17448259584.0, "485": 17448259584.0, "490": 17448259584.0, "495": 17448259584.0, "500": 17448259584.0, "505": 17448259584.0, "510": 17448259584.0, "515": 17448259584.0, "520": 17448259584.0, "525": 17448259584.0, "530": 17448259584.0, "535": 17448908800.0, "540": 17448908800.0, "545": 17448908800.0, "550": 17448908800.0, "555": 17448908800.0, "560": 17448908800.0, "565": 17448908800.0, "570": 17448908800.0, "575": 17448908800.0, "580": 17448908800.0, "585": 17448908800.0, "590": 17448908800.0, "595": 17448908800.0, "600": 17448908800.0, "605": 17448908800.0, "610": 17448908800.0, "615": 17448908800.0, "620": 17448908800.0, "625": 17448978432.0, "630": 17448908800.0, "635": 17448908800.0, "640": 17449400320.0, "645": 17448908800.0, "650": 17448908800.0, "655": 17448908800.0, "660": 17448908800.0, "665": 17449768960.0, "670": 17448908800.0, "675": 17448908800.0, "680": 17448908800.0, "685": 17448908800.0, "690": 17449730048.0, "695": 17448908800.0, "700": 17448908800.0, "705": 17448908800.0, "710": 17448908800.0, "715": 17448908800.0, "720": 17448908800.0, "725": 17448908800.0, "730": 17448908800.0, "735": 17448908800.0, "740": 17448908800.0, "745": 17448908800.0, "750": 17448908800.0, "755": 17448908800.0, "760": 17448908800.0, "765": 17448908800.0, "770": 17448908800.0, "775": 17448908800.0, "780": 17448908800.0, "785": 17448908800.0, "790": 17449472000.0, "795": 17448908800.0, "800": 17448908800.0, "805": 17449019392.0, "810": 17448908800.0, "815": 17448908800.0, "820": 17448908800.0, "825": 17448908800.0, "830": 17448908800.0, "835": 17448908800.0, "840": 17448908800.0, "845": 17448908800.0, "850": 17448908800.0, "855": 17448908800.0, "860": 17448908800.0, "865": 17448908800.0, "870": 17448908800.0, "875": 17448908800.0, "880": 17448908800.0, "885": 17448908800.0, "890": 17448908800.0, "895": 17448908800.0, "900": 17448908800.0, "905": 17448908800.0, "910": 17448908800.0, "915": 17448908800.0, "920": 17448908800.0, "925": 17448908800.0, "930": 17448908800.0, "935": 17448908800.0, "940": 17448908800.0, "945": 17448908800.0, "950": 17448908800.0, "955": 17448908800.0, "960": 17448908800.0, "965": 17448908800.0, "970": 17448908800.0, "975": 17448908800.0, "980": 17448908800.0, "985": 17448908800.0, "990": 17448908800.0, "995": 17448908800.0, "1000": 17449949184.0, "1005": 17448908800.0, "1010": 17448908800.0, "1015": 17448908800.0, "1020": 17448908800.0, "1025": 17449342976.0, "1030": 17448908800.0, "1035": 17448908800.0, "1040": 17448908800.0, "1045": 17448908800.0, "1050": 17448908800.0, "1055": 17448908800.0, "1060": 17448908800.0, "1065": 17449875456.0, "1070": 17448714240.0, "1075": 17448714240.0, "1080": 17449574400.0, "1085": 17448714240.0, "1090": 17448714240.0, "1095": 17448714240.0, "1100": 17448714240.0, "1105": 17448714240.0, "1110": 17448714240.0, "1115": 17448714240.0, "1120": 17448714240.0, "1125": 17448714240.0, "1130": 17448714240.0, "1135": 17448714240.0, "1140": 17448714240.0, "1145": 17448714240.0, "1150": 17449545728.0, "1155": 17448714240.0, "1160": 17448714240.0, "1165": 17449132032.0, "1170": 17448714240.0, "1175": 17448714240.0, "1180": 17448714240.0, "1185": 17448714240.0, "1190": 17448714240.0, "1195": 17448714240.0, "1200": 17448714240.0, "1205": 17448714240.0, "1210": 17448714240.0, "1215": 17448714240.0, "1220": 17448714240.0, "1225": 17448714240.0, "1230": 17448714240.0, "1235": 17448714240.0, "1240": 17448714240.0, "1245": 17449238528.0, "1250": 17448714240.0, "1255": 17448714240.0, "1260": 17448714240.0, "1265": 17449310208.0, "1270": 17448714240.0, "1275": 17448935424.0, "1280": 17448714240.0, "1285": 17448714240.0, "1290": 17448714240.0, "1295": 17448714240.0, "1300": 17448714240.0, "1305": 17448714240.0, "1310": 17448714240.0, "1315": 17448714240.0, "1320": 17449074688.0, "1325": 17448714240.0, "1330": 17449750528.0, "1335": 17449717760.0, "1340": 17448714240.0, "1345": 17449656320.0, "1350": 17448714240.0, "1355": 17448714240.0, "1360": 17448714240.0, "1365": 17448714240.0, "1370": 17448714240.0, "1375": 17448714240.0, "1380": 17448714240.0, "1385": 17448714240.0, "1390": 17448714240.0, "1395": 17448714240.0, "1400": 17448931328.0, "1405": 17448714240.0, "1410": 17448714240.0, "1415": 17448714240.0, "1420": 17448714240.0, "1425": 17448964096.0, "1430": 17448714240.0, "1435": 17448714240.0, "1440": 17448714240.0, "1445": 17448714240.0, "1450": 17448714240.0, "1455": 17448714240.0, "1460": 17448714240.0, "1465": 17448714240.0, "1470": 17448714240.0, "1475": 17448714240.0, "1480": 17448714240.0, "1485": 17448714240.0, "1490": 17448714240.0, "1495": 17448714240.0, "1500": 17448951808.0, "1505": 17448861696.0, "1510": 17448714240.0, "1515": 17448714240.0, "1520": 17448714240.0, "1525": 17448714240.0, "1530": 17448714240.0, "1535": 17448714240.0, "1540": 17448714240.0, "1545": 17448714240.0, "1550": 17448714240.0, "1555": 17448714240.0, "1560": 17448714240.0, "1565": 17448714240.0, "1570": 17448714240.0, "1575": 17449312256.0, "1580": 17448714240.0, "1585": 17448714240.0, "1590": 17448937472.0, "1595": 17448714240.0, "1600": 17448714240.0, "1605": 17448714240.0, "1610": 17448714240.0, "1615": 17448714240.0, "1620": 17448714240.0, "1625": 17448130560.0, "1630": 17448130560.0, "1635": 17448130560.0, "1640": 17448130560.0, "1645": 17449048064.0, "1650": 17448130560.0, "1655": 17448130560.0, "1660": 17448130560.0, "1665": 17448130560.0, "1670": 17448130560.0, "1675": 17448130560.0, "1680": 17449146368.0, "1685": 17448798208.0, "1690": 17448130560.0, "1695": 17448130560.0, "1700": 17448130560.0, "1705": 17448130560.0, "1710": 17448130560.0, "1715": 17448615936.0, "1720": 17448130560.0, "1725": 17448130560.0, "1730": 17448130560.0, "1735": 17448994816.0, "1740": 17448130560.0, "1745": 17448130560.0, "1750": 17448130560.0, "1755": 17448130560.0, "1760": 17448878080.0, "1765": 17448130560.0, "1770": 17448130560.0, "1775": 17448130560.0, "1780": 17448130560.0, "1785": 17448130560.0, "1790": 17448130560.0, "1795": 17448130560.0, "1800": 17448130560.0, "1805": 17448130560.0, "1810": 17448130560.0, "1815": 17448130560.0, "1820": 17448130560.0, "1825": 17448130560.0, "1830": 17448445952.0, "1835": 17448130560.0, "1840": 17448130560.0, "1845": 17448130560.0, "1850": 17448130560.0, "1855": 17448130560.0, "1860": 17448130560.0, "1865": 17448130560.0, "1870": 17448130560.0, "1875": 17448130560.0, "1880": 17448507392.0, "1885": 17448130560.0, "1890": 17448130560.0, "1895": 17448130560.0, "1900": 17448130560.0, "1905": 17448130560.0, "1910": 17448130560.0, "1915": 17448130560.0, "1920": 17448288256.0, "1925": 17448986624.0, "1930": 17448130560.0, "1935": 17448130560.0, "1940": 17448632320.0, "1945": 17448130560.0, "1950": 17448130560.0, "1955": 17448144896.0, "1960": 17448130560.0, "1965": 17448130560.0, "1970": 17448130560.0, "1975": 17448892416.0, "1980": 17448130560.0, "1985": 17448130560.0, "1990": 17448130560.0, "1995": 17448130560.0, "2000": 17448130560.0, "2005": 17448130560.0, "2010": 17448130560.0, "2015": 17448130560.0, "2020": 17448130560.0, "2025": 17448130560.0, "2030": 17448130560.0, "2035": 17448130560.0, "2040": 17448130560.0, "2045": 17448310784.0, "2050": 17448130560.0, "2055": 17448130560.0, "2060": 17448130560.0, "2065": 17448654848.0, "2070": 17448130560.0, "2075": 17448130560.0, "2080": 17448130560.0, "2085": 17448130560.0, "2090": 17448130560.0, "2095": 17448130560.0, "2100": 17448130560.0, "2105": 17448130560.0, "2110": 17448130560.0, "2115": 17448130560.0, "2120": 17448130560.0, "2125": 17448130560.0, "2130": 17448675328.0, "2135": 17448130560.0, "2140": 17448130560.0, "2145": 17448130560.0, "2150": 17448884224.0, "2155": 17448130560.0, "2160": 17448130560.0, "2165": 17448130560.0, "2170": 17448130560.0, "2175": 17448130560.0, "2180": 17448130560.0, "2185": 17448130560.0, "2190": 17448130560.0, "2195": 17448130560.0, "2200": 17448130560.0, "2205": 17448130560.0, "2210": 17448130560.0, "2215": 17448130560.0, "2220": 17448130560.0, "2225": 17448130560.0, "2230": 17448130560.0, "2235": 17448224768.0, "2240": 17448130560.0, "2245": 17448130560.0, "2250": 17448130560.0, "2255": 17448130560.0, "2260": 17448130560.0, "2265": 17448130560.0, "2270": 17448130560.0, "2275": 17448130560.0, "2280": 17448130560.0, "2285": 17448130560.0, "2290": 17448130560.0, "2295": 17448130560.0, "2300": 17448130560.0, "2305": 17448130560.0, "2310": 17448130560.0, "2315": 17448130560.0, "2320": 17448130560.0, "2325": 17448130560.0, "2330": 17448130560.0, "2335": 17448245248.0, "2340": 17448130560.0, "2345": 17448130560.0, "2350": 17448130560.0, "2355": 17448130560.0, "2360": 17448130560.0, "2365": 17448130560.0, "2370": 17448130560.0, "2375": 17448130560.0, "2380": 17448130560.0, "2385": 17448130560.0, "2390": 17448130560.0, "2395": 17448130560.0, "2400": 17448130560.0, "2405": 17448130560.0, "2410": 17448130560.0, "2415": 17448130560.0, "2420": 17448130560.0, "2425": 17448998912.0, "2430": 17448130560.0, "2435": 17448130560.0, "2440": 17448130560.0, "2445": 17448876032.0, "2450": 17448130560.0, "2455": 17448245248.0, "2460": 17448130560.0, "2465": 17448130560.0, "2470": 17448130560.0, "2475": 17448130560.0, "2480": 17448130560.0, "2485": 17448130560.0, "2490": 17448130560.0, "2495": 17448130560.0, "2500": 17448130560.0, "2505": 17448130560.0, "2510": 17448130560.0, "2515": 17448130560.0, "2520": 17448130560.0, "2525": 17448130560.0, "2530": 17448130560.0, "2535": 17448130560.0, "2540": 17448433664.0, "2545": 17448130560.0, "2550": 17448130560.0, "2555": 17448130560.0, "2560": 17448130560.0, "2565": 17448130560.0, "2570": 17448130560.0, "2575": 17448130560.0, "2580": 17448130560.0, "2585": 17448130560.0, "2590": 17448130560.0, "2595": 17448130560.0, "2600": 17448130560.0, "2605": 17448130560.0, "2610": 17448130560.0, "2615": 17448245248.0, "2620": 17448130560.0, "2625": 17448130560.0, "2630": 17448130560.0, "2635": 17448130560.0, "2640": 17448130560.0, "2645": 17448130560.0, "2650": 17448130560.0, "2655": 17448130560.0, "2660": 17448130560.0, "2665": 17448130560.0, "2670": 17448130560.0, "2675": 17448130560.0, "2680": 17448130560.0, "2685": 17448245248.0, "2690": 17448130560.0, "2695": 17448130560.0, "2700": 17448130560.0, "2705": 17448130560.0, "2710": 17448130560.0, "2715": 17448130560.0, "2720": 17448130560.0, "2725": 17448130560.0, "2730": 17448130560.0, "2735": 17448130560.0, "2740": 17448130560.0, "2745": 17448986624.0, "2750": 17448503296.0, "2755": 17448130560.0, "2760": 17448585216.0, "2765": 17448130560.0, "2770": 17448130560.0, "2775": 17448130560.0, "2780": 17448130560.0, "2785": 17448130560.0, "2790": 17448130560.0, "2795": 17448204288.0, "2800": 17448130560.0, "2805": 17448130560.0, "2810": 17448130560.0, "2815": 17448183808.0, "2820": 17448599552.0, "2825": 17448130560.0, "2830": 17448130560.0, "2835": 17448130560.0, "2840": 17448130560.0, "2845": 17448130560.0, "2850": 17448130560.0, "2855": 17448130560.0, "2860": 17448130560.0, "2865": 17448130560.0, "2870": 17448130560.0, "2875": 17448130560.0, "2880": 17448130560.0, "2885": 17448130560.0, "2890": 17448130560.0, "2895": 17448130560.0, "2900": 17448130560.0, "2905": 17448130560.0, "2910": 17448130560.0, "2915": 17448130560.0, "2920": 17448173568.0, "2925": 17448876032.0, "2930": 17448130560.0, "2935": 17448130560.0, "2940": 17448130560.0, "2945": 17448445952.0, "2950": 17448130560.0, "2955": 17448130560.0, "2960": 17448130560.0, "2965": 17448130560.0, "2970": 17448130560.0, "2975": 17448130560.0, "2980": 17448130560.0, "2985": 17448130560.0, "2990": 17448804352.0, "2995": 17448130560.0, "3000": 17448130560.0, "3005": 17448130560.0, "3010": 17448130560.0, "3015": 17448130560.0, "3020": 17448130560.0, "3025": 17448130560.0, "3030": 17448130560.0, "3035": 17448130560.0, "3040": 17448583168.0, "3045": 17448130560.0, "3050": 17448130560.0, "3055": 17448830976.0, "3060": 17448130560.0, "3065": 17448130560.0, "3070": 17448130560.0, "3075": 17448130560.0, "3080": 17448130560.0, "3085": 17448402944.0, "3090": 17448130560.0, "3095": 17448130560.0, "3100": 17448130560.0, "3105": 17448130560.0, "3110": 17448130560.0, "3115": 17448130560.0, "3120": 17448130560.0, "3125": 17448130560.0, "3130": 17448130560.0, "3135": 17448130560.0, "3140": 17448130560.0, "3145": 17448130560.0, "3150": 17448130560.0, "3155": 17448130560.0, "3160": 17448130560.0, "3165": 17448130560.0, "3170": 17448130560.0, "3175": 17448130560.0, "3180": 17448130560.0, "3185": 17448130560.0, "3190": 17448130560.0, "3195": 17448130560.0, "3200": 17448130560.0, "3205": 17448130560.0, "3210": 17448343552.0, "3215": 17448130560.0, "3220": 17448130560.0, "3225": 17448130560.0, "3230": 17448130560.0, "3235": 17448937472.0, "3240": 17448130560.0, "3245": 17448130560.0, "3250": 17448130560.0, "3255": 17448171520.0, "3260": 17448130560.0, "3265": 17448130560.0, "3270": 17448130560.0, "3275": 17448130560.0, "3280": 17448146944.0, "3285": 17448632320.0, "3290": 17448130560.0, "3295": 17448130560.0, "3300": 17448716288.0, "3305": 17448130560.0, "3310": 17449105408.0, "3315": 17448130560.0, "3320": 17448130560.0, "3325": 17448130560.0, "3330": 17448130560.0, "3335": 17448130560.0, "3340": 17448130560.0, "3345": 17448130560.0, "3350": 17448130560.0, "3355": 17448130560.0, "3360": 17448130560.0, "3365": 17448130560.0, "3370": 17448130560.0, "3375": 17448253440.0, "3380": 17449097216.0, "3385": 17448130560.0, "3390": 17448130560.0, "3395": 17448130560.0, "3400": 17448130560.0, "3405": 17448130560.0, "3410": 17448130560.0, "3415": 17448130560.0, "3420": 17448130560.0, "3425": 17448130560.0, "3430": 17448130560.0, "3435": 17448130560.0, "3440": 17448130560.0, "3445": 17449129984.0, "3450": 17448130560.0, "3455": 17448130560.0, "3460": 17448130560.0, "3465": 17448130560.0, "3470": 17448130560.0, "3475": 17448278016.0, "3480": 17448130560.0, "3485": 17448130560.0, "3490": 17448130560.0, "3495": 17448130560.0, "3500": 17448130560.0, "3505": 17448130560.0, "3510": 17448130560.0, "3515": 17448441856.0, "3520": 17448130560.0, "3525": 17448130560.0, "3530": 17448130560.0, "3535": 17448130560.0, "3540": 17448130560.0, "3545": 17448130560.0, "3550": 17448130560.0, "3555": 17448384512.0, "3560": 17448130560.0, "3565": 17448130560.0, "3570": 17448130560.0, "3575": 17448130560.0, "3580": 17448130560.0, "3585": 17448130560.0, "3590": 17448130560.0, "3595": 17448130560.0, "3600": 17448130560.0, "3605": 17448130560.0, "3610": 17448130560.0, "3615": 17448130560.0, "3620": 17448130560.0, "3625": 17448130560.0, "3630": 17448130560.0, "3635": 17448130560.0, "3640": 17448130560.0, "3645": 17448130560.0, "3650": 17448826880.0, "3655": 17448130560.0, "3660": 17448130560.0, "3665": 17448173568.0, "3670": 17448851456.0, "3675": 17448130560.0, "3680": 17448130560.0, "3685": 17448130560.0, "3690": 17448130560.0, "3695": 17448130560.0, "3700": 17448130560.0, "3705": 17448130560.0, "3710": 17448130560.0, "3715": 17448130560.0, "3720": 17448130560.0, "3725": 17448445952.0, "3730": 17449113600.0, "3735": 17448130560.0, "3740": 17448130560.0, "3745": 17448130560.0, "3750": 17448130560.0, "3755": 17448130560.0, "3760": 17449099264.0, "3765": 17448130560.0, "3770": 17448130560.0, "3775": 17448130560.0, "3780": 17448130560.0, "3785": 17448130560.0, "3790": 17448130560.0, "3795": 17448779776.0, "3800": 17449080832.0, "3805": 17448130560.0, "3810": 17448130560.0, "3815": 17448130560.0, "3820": 17448130560.0, "3825": 17448130560.0, "3830": 17449179136.0, "3835": 17448269824.0, "3840": 17448130560.0, "3845": 17448130560.0, "3850": 17448130560.0, "3855": 17448843264.0, "3860": 17448843264.0, "3865": 17448843264.0, "3870": 17448843264.0, "3875": 17448843264.0, "3880": 17448843264.0, "3885": 17448843264.0, "3890": 17448843264.0, "3895": 17449703424.0, "3900": 17448843264.0, "3905": 17448843264.0, "3910": 17448843264.0, "3915": 17448843264.0, "3920": 17448878080.0, "3925": 17448843264.0, "3930": 17448843264.0, "3935": 17448843264.0, "3940": 17448843264.0, "3945": 17448843264.0, "3950": 17448843264.0, "3955": 17448843264.0, "3960": 17448843264.0, "3965": 17449760768.0, "3970": 17448843264.0, "3975": 17448843264.0, "3980": 17448843264.0, "3985": 17448843264.0, "3990": 17448843264.0, "3995": 17449244672.0, "4000": 17448994816.0, "4005": 17448933376.0, "4010": 17448843264.0, "4015": 17448843264.0, "4020": 17448843264.0, "4025": 17448843264.0, "4030": 17448843264.0, "4035": 17448843264.0, "4040": 17448843264.0, "4045": 17448843264.0, "4050": 17448843264.0, "4055": 17448843264.0, "4060": 17448843264.0, "4065": 17448843264.0, "4070": 17448843264.0, "4075": 17448843264.0, "4080": 17448843264.0, "4085": 17449127936.0, "4090": 17448843264.0, "4095": 17448843264.0, "4100": 17448843264.0, "4105": 17448843264.0, "4110": 17448843264.0, "4115": 17448843264.0, "4120": 17448843264.0, "4125": 17449783296.0, "4130": 17448843264.0, "4135": 17448843264.0, "4140": 17448914944.0, "4145": 17448843264.0, "4150": 17448843264.0, "4155": 17448843264.0, "4160": 17448843264.0, "4165": 17449603072.0, "4170": 17449168896.0, "4175": 17448843264.0, "4180": 17448843264.0, "4185": 17448843264.0, "4190": 17448843264.0, "4195": 17448843264.0, "4200": 17448843264.0, "4205": 17449515008.0, "4210": 17448843264.0, "4215": 17448843264.0, "4220": 17448843264.0, "4225": 17448843264.0, "4230": 17448843264.0, "4235": 17448843264.0, "4240": 17448843264.0, "4245": 17448843264.0, "4250": 17448843264.0, "4255": 17448843264.0, "4260": 17448843264.0, "4265": 17448843264.0, "4270": 17448843264.0, "4275": 17448843264.0, "4280": 17448843264.0, "4285": 17448843264.0, "4290": 17448843264.0, "4295": 17448843264.0, "4300": 17448843264.0, "4305": 17448843264.0, "4310": 17448859648.0, "4315": 17448843264.0, "4320": 17448843264.0, "4325": 17448843264.0, "4330": 17448843264.0, "4335": 17448843264.0, "4340": 17448843264.0, "4345": 17448843264.0, "4350": 17448843264.0, "4355": 17448843264.0, "4360": 17448843264.0, "4365": 17448843264.0, "4370": 17448699904.0, "4375": 17448699904.0, "4380": 17448785920.0, "4385": 17448699904.0, "4390": 17448699904.0, "4395": 17448699904.0, "4400": 17449680896.0, "4405": 17448699904.0, "4410": 17448699904.0, "4415": 17448699904.0, "4420": 17448699904.0, "4425": 17448699904.0, "4430": 17448699904.0, "4435": 17448699904.0, "4440": 17448699904.0, "4445": 17448699904.0, "4450": 17448699904.0, "4455": 17448699904.0, "4460": 17449646080.0, "4465": 17448699904.0, "4470": 17448699904.0, "4475": 17448699904.0, "4480": 17448699904.0, "4485": 17448699904.0, "4490": 17448699904.0, "4495": 17448699904.0, "4500": 17448699904.0, "4505": 17448699904.0, "4510": 17448699904.0, "4515": 17448699904.0, "4520": 17448962048.0, "4525": 17448699904.0, "4530": 17448699904.0, "4535": 17448699904.0, "4540": 17448699904.0, "4545": 17448699904.0, "4550": 17448699904.0, "4555": 17448699904.0, "4560": 17448699904.0, "4565": 17448699904.0, "4570": 17448699904.0, "4575": 17448699904.0, "4580": 17448699904.0, "4585": 17448699904.0, "4590": 17449420800.0, "4595": 17448699904.0, "4600": 17449023488.0, "4605": 17448699904.0, "4610": 17448699904.0, "4615": 17448699904.0, "4620": 17448699904.0, "4625": 17448699904.0, "4630": 17448699904.0, "4635": 17448699904.0, "4640": 17448699904.0, "4645": 17448699904.0, "4650": 17448699904.0, "4655": 17448699904.0, "4660": 17449011200.0, "4665": 17448699904.0, "4670": 17448699904.0, "4675": 17448699904.0, "4680": 17448699904.0, "4685": 17448699904.0, "4690": 17448699904.0, "4695": 17448699904.0, "4700": 17448699904.0, "4705": 17448699904.0, "4710": 17448699904.0, "4715": 17448699904.0, "4720": 17449338880.0, "4725": 17448699904.0, "4730": 17449275392.0, "4735": 17448699904.0, "4740": 17448699904.0, "4745": 17448699904.0, "4750": 17448699904.0, "4755": 17448699904.0, "4760": 17448699904.0, "4765": 17448699904.0, "4770": 17448699904.0, "4775": 17448826880.0, "4780": 17448699904.0, "4785": 17448699904.0, "4790": 17448714240.0, "4795": 17448699904.0, "4800": 17448699904.0, "4805": 17448699904.0, "4810": 17448699904.0, "4815": 17448699904.0, "4820": 17448699904.0, "4825": 17449084928.0, "4830": 17448699904.0, "4835": 17449592832.0, "4840": 17448699904.0, "4845": 17448699904.0, "4850": 17448699904.0, "4855": 17448699904.0, "4860": 17448699904.0, "4865": 17448699904.0, "4870": 17448699904.0, "4875": 17448699904.0, "4880": 17448699904.0, "4885": 17448699904.0, "4890": 17448699904.0, "4895": 17449322496.0, "4900": 17448699904.0, "4905": 17448699904.0, "4910": 17448699904.0, "4915": 17448699904.0, "4920": 17448699904.0, "4925": 17448130560.0, "4930": 17448130560.0, "4935": 17448810496.0, "4940": 17448130560.0, "4945": 17448130560.0, "4950": 17448130560.0, "4955": 17448130560.0, "4960": 17448130560.0, "4965": 17448458240.0, "4970": 17448908800.0, "4975": 17448130560.0, "4980": 17448130560.0, "4985": 17448513536.0, "4990": 17448130560.0, "4995": 17448130560.0, "5000": 17448130560.0, "5005": 17448130560.0, "5010": 17448130560.0, "5015": 17448130560.0, "5020": 17448130560.0, "5025": 17448130560.0, "5030": 17448130560.0, "5035": 17448130560.0, "5040": 17448130560.0, "5045": 17448130560.0, "5050": 17448130560.0, "5055": 17448130560.0, "5060": 17448130560.0, "5065": 17448130560.0, "5070": 17448130560.0, "5075": 17448130560.0, "5080": 17448130560.0, "5085": 17448833024.0, "5090": 17448130560.0, "5095": 17448130560.0, "5100": 17448130560.0, "5105": 17448130560.0, "5110": 17448130560.0, "5115": 17448130560.0, "5120": 17448130560.0, "5125": 17448130560.0, "5130": 17448130560.0, "5135": 17448130560.0, "5140": 17448130560.0, "5145": 17448130560.0, "5150": 17448130560.0, "5155": 17448130560.0, "5160": 17448130560.0, "5165": 17448130560.0, "5170": 17448130560.0, "5175": 17448130560.0, "5180": 17448130560.0, "5185": 17448130560.0, "5190": 17448130560.0, "5195": 17448179712.0, "5200": 17448130560.0, "5205": 17448130560.0, "5210": 17448130560.0, "5215": 17448130560.0, "5220": 17448130560.0, "5225": 17448130560.0, "5230": 17448130560.0, "5235": 17448376320.0, "5240": 17448130560.0, "5245": 17448130560.0, "5250": 17448744960.0, "5255": 17448130560.0, "5260": 17448130560.0, "5265": 17448130560.0, "5270": 17448130560.0, "5275": 17448130560.0, "5280": 17448130560.0, "5285": 17448130560.0, "5290": 17448900608.0, "5295": 17448130560.0, "5300": 17448130560.0, "5305": 17448130560.0, "5310": 17448130560.0, "5315": 17448130560.0, "5320": 17448130560.0, "5325": 17448130560.0, "5330": 17448660992.0, "5335": 17448130560.0, "5340": 17448130560.0, "5345": 17448392704.0, "5350": 17448130560.0, "5355": 17448130560.0, "5360": 17448130560.0, "5365": 17448130560.0, "5370": 17448130560.0, "5375": 17448130560.0, "5380": 17448130560.0, "5385": 17448130560.0, "5390": 17448130560.0, "5395": 17448130560.0, "5400": 17448130560.0, "5405": 17448130560.0, "5410": 17448130560.0, "5415": 17448130560.0, "5420": 17449072640.0, "5425": 17448130560.0, "5430": 17448130560.0, "5435": 17448130560.0, "5440": 17448130560.0, "5445": 17448130560.0, "5450": 17448130560.0, "5455": 17448130560.0, "5460": 17448130560.0, "5465": 17448130560.0, "5470": 17448130560.0, "5475": 17448130560.0, "5480": 17448488960.0, "5485": 17449097216.0, "5490": 17448130560.0, "5495": 17448949760.0, "5500": 17448130560.0, "5505": 17448904704.0, "5510": 17448933376.0, "5515": 17448130560.0, "5520": 17448130560.0, "5525": 17448130560.0, "5530": 17448130560.0, "5535": 17448130560.0, "5540": 17448130560.0, "5545": 17448898560.0, "5550": 17448679424.0, "5555": 17448130560.0, "5560": 17448130560.0, "5565": 17448130560.0, "5570": 17448130560.0, "5575": 17448130560.0, "5580": 17448198144.0, "5585": 17448130560.0, "5590": 17448130560.0, "5595": 17448130560.0, "5600": 17449105408.0, "5605": 17448130560.0, "5610": 17448130560.0, "5615": 17448130560.0, "5620": 17448130560.0, "5625": 17448130560.0, "5630": 17448130560.0, "5635": 17448130560.0, "5640": 17448130560.0, "5645": 17448130560.0, "5650": 17448130560.0, "5655": 17448130560.0, "5660": 17448130560.0, "5665": 17448130560.0, "5670": 17448130560.0, "5675": 17448130560.0, "5680": 17448130560.0, "5685": 17448130560.0, "5690": 17449166848.0, "5695": 17448130560.0, "5700": 17448130560.0, "5705": 17448130560.0, "5710": 17449111552.0, "5715": 17448130560.0, "5720": 17448728576.0, "5725": 17448749056.0, "5730": 17448130560.0, "5735": 17448130560.0, "5740": 17448130560.0, "5745": 17448130560.0, "5750": 17448130560.0, "5755": 17448130560.0, "5760": 17448130560.0, "5765": 17448130560.0, "5770": 17448130560.0, "5775": 17448130560.0, "5780": 17448130560.0, "5785": 17448130560.0, "5790": 17448130560.0, "5795": 17449023488.0, "5800": 17448130560.0, "5805": 17448130560.0, "5810": 17448441856.0, "5815": 17448130560.0, "5820": 17448130560.0, "5825": 17448130560.0, "5830": 17448130560.0, "5835": 17448130560.0, "5840": 17448130560.0, "5845": 17448130560.0, "5850": 17448130560.0, "5855": 17449031680.0, "5860": 17448130560.0, "5865": 17448130560.0, "5870": 17448130560.0, "5875": 17449033728.0, "5880": 17448130560.0, "5885": 17448130560.0, "5890": 17448130560.0, "5895": 17448130560.0, "5900": 17448130560.0, "5905": 17448130560.0, "5910": 17448130560.0, "5915": 17448247296.0, "5920": 17448130560.0, "5925": 17448130560.0, "5930": 17448130560.0, "5935": 17448130560.0, "5940": 17448130560.0, "5945": 17448130560.0, "5950": 17448130560.0, "5955": 17448130560.0, "5960": 17448130560.0, "5965": 17448130560.0, "5970": 17448916992.0, "5975": 17448130560.0, "5980": 17448130560.0, "5985": 17448130560.0, "5990": 17448603648.0, "5995": 17448130560.0, "6000": 17449031680.0, "6005": 17448413184.0, "6010": 17448413184.0, "6015": 17448413184.0, "6020": 17448413184.0, "6025": 17449338880.0, "6030": 17448413184.0, "6035": 17448413184.0, "6040": 17448413184.0, "6045": 17448413184.0, "6050": 17448413184.0, "6055": 17448413184.0, "6060": 17448413184.0, "6065": 17448413184.0, "6070": 17448413184.0, "6075": 17448413184.0, "6080": 17448413184.0, "6085": 17448413184.0, "6090": 17448413184.0, "6095": 17448413184.0, "6100": 17448413184.0, "6105": 17448413184.0, "6110": 17448495104.0, "6115": 17449449472.0, "6120": 17448413184.0, "6125": 17448413184.0, "6130": 17448413184.0, "6135": 17448413184.0, "6140": 17448413184.0, "6145": 17448413184.0, "6150": 17448413184.0, "6155": 17448413184.0, "6160": 17448413184.0, "6165": 17448413184.0, "6170": 17448413184.0, "6175": 17449187328.0, "6180": 17448413184.0, "6185": 17448716288.0, "6190": 17448413184.0, "6195": 17448413184.0, "6200": 17448413184.0, "6205": 17448628224.0, "6210": 17448413184.0, "6215": 17448413184.0, "6220": 17448413184.0, "6225": 17448798208.0, "6230": 17448413184.0, "6235": 17448413184.0, "6240": 17448413184.0, "6245": 17448413184.0, "6250": 17448413184.0, "6255": 17448413184.0, "6260": 17448413184.0, "6265": 17448413184.0, "6270": 17448413184.0, "6275": 17448886272.0, "6280": 17448413184.0, "6285": 17448413184.0, "6290": 17448413184.0, "6295": 17449162752.0, "6300": 17448413184.0, "6305": 17448413184.0, "6310": 17448413184.0, "6315": 17448413184.0, "6320": 17448413184.0, "6325": 17448413184.0, "6330": 17448413184.0, "6335": 17448413184.0, "6340": 17448413184.0, "6345": 17448413184.0, "6350": 17448413184.0, "6355": 17448413184.0, "6360": 17448413184.0, "6365": 17448497152.0, "6370": 17448413184.0, "6375": 17448413184.0, "6380": 17448413184.0, "6385": 17448413184.0, "6390": 17449021440.0, "6395": 17448413184.0, "6400": 17448413184.0, "6405": 17448413184.0, "6410": 17448413184.0, "6415": 17448413184.0, "6420": 17448527872.0, "6425": 17448413184.0, "6430": 17448413184.0, "6435": 17448413184.0, "6440": 17448888320.0, "6445": 17448413184.0, "6450": 17448413184.0, "6455": 17448413184.0, "6460": 17448413184.0, "6465": 17448413184.0, "6470": 17448644608.0, "6475": 17448413184.0, "6480": 17448413184.0, "6485": 17448560640.0, "6490": 17448413184.0, "6495": 17448413184.0, "6500": 17449410560.0, "6505": 17448413184.0, "6510": 17448413184.0, "6515": 17448413184.0, "6520": 17448413184.0, "6525": 17449199616.0, "6530": 17448413184.0, "6535": 17448413184.0, "6540": 17448454144.0, "6545": 17448413184.0, "6550": 17448413184.0, "6555": 17448413184.0, "6560": 17448413184.0, "6565": 17448413184.0, "6570": 17448413184.0, "6575": 17448130560.0, "6580": 17448130560.0, "6585": 17448130560.0, "6590": 17448589312.0, "6595": 17448130560.0, "6600": 17448130560.0, "6605": 17448130560.0, "6610": 17448130560.0, "6615": 17448130560.0, "6620": 17448130560.0, "6625": 17448130560.0, "6630": 17448130560.0, "6635": 17448130560.0, "6640": 17448130560.0, "6645": 17448130560.0, "6650": 17448130560.0, "6655": 17448130560.0, "6660": 17448130560.0, "6665": 17448130560.0, "6670": 17448130560.0, "6675": 17448130560.0, "6680": 17448130560.0, "6685": 17448130560.0, "6690": 17448130560.0, "6695": 17448130560.0, "6700": 17448130560.0, "6705": 17448130560.0, "6710": 17448130560.0, "6715": 17448777728.0, "6720": 17448130560.0, "6725": 17448130560.0, "6730": 17448130560.0, "6735": 17448130560.0, "6740": 17448130560.0, "6745": 17448130560.0, "6750": 17448130560.0, "6755": 17448130560.0, "6760": 17448130560.0, "6765": 17448130560.0, "6770": 17448130560.0, "6775": 17448130560.0, "6780": 17448130560.0, "6785": 17448130560.0, "6790": 17448130560.0, "6795": 17448130560.0, "6800": 17448130560.0, "6805": 17448130560.0, "6810": 17448130560.0, "6815": 17448130560.0, "6820": 17448130560.0, "6825": 17448130560.0, "6830": 17448130560.0, "6835": 17448130560.0, "6840": 17448775680.0, "6845": 17448130560.0, "6850": 17448130560.0, "6855": 17448130560.0, "6860": 17448505344.0, "6865": 17448417280.0, "6870": 17448130560.0, "6875": 17448130560.0, "6880": 17448130560.0, "6885": 17448130560.0, "6890": 17448130560.0, "6895": 17448130560.0, "6900": 17448747008.0, "6905": 17448198144.0, "6910": 17448130560.0, "6915": 17448130560.0, "6920": 17448630272.0, "6925": 17448130560.0, "6930": 17448130560.0, "6935": 17448409088.0, "6940": 17448130560.0, "6945": 17448130560.0, "6950": 17448130560.0, "6955": 17448130560.0, "6960": 17448130560.0, "6965": 17448130560.0, "6970": 17448130560.0, "6975": 17448130560.0, "6980": 17448130560.0, "6985": 17448130560.0, "6990": 17448130560.0, "6995": 17448173568.0, "7000": 17448130560.0, "7005": 17448130560.0, "7010": 17448130560.0, "7015": 17448146944.0, "7020": 17448130560.0, "7025": 17448130560.0, "7030": 17448130560.0, "7035": 17448130560.0, "7040": 17448130560.0, "7045": 17448130560.0, "7050": 17448435712.0, "7055": 17448130560.0, "7060": 17448130560.0, "7065": 17448130560.0, "7070": 17448130560.0, "7075": 17449076736.0, "7080": 17448130560.0, "7085": 17448130560.0, "7090": 17448130560.0, "7095": 17448130560.0, "7100": 17448130560.0, "7105": 17448130560.0, "7110": 17448130560.0, "7115": 17448130560.0, "7120": 17448130560.0, "7125": 17448130560.0, "7130": 17448130560.0, "7135": 17448130560.0, "7140": 17448699904.0, "7145": 17448130560.0, "7150": 17448130560.0, "7155": 17448130560.0, "7160": 17449117696.0, "7165": 17448130560.0, "7170": 17448130560.0, "7175": 17448130560.0, "7180": 17448130560.0, "7185": 17448130560.0, "7190": 17448130560.0, "7195": 17448130560.0, "7200": 17448130560.0, "7205": 17448130560.0, "7210": 17448130560.0, "7215": 17448130560.0, "7220": 17448130560.0, "7225": 17448130560.0, "7230": 17448130560.0, "7235": 17448130560.0, "7240": 17448130560.0, "7245": 17448130560.0, "7250": 17448130560.0, "7255": 17448130560.0, "7260": 17448130560.0, "7265": 17448466432.0, "7270": 17448130560.0, "7275": 17448130560.0, "7280": 17448130560.0, "7285": 17448130560.0, "7290": 17448566784.0, "7295": 17448130560.0, "7300": 17448130560.0, "7305": 17448130560.0, "7310": 17448130560.0, "7315": 17448531968.0, "7320": 17448130560.0, "7325": 17448130560.0, "7330": 17448130560.0, "7335": 17448130560.0, "7340": 17448130560.0, "7345": 17448130560.0, "7350": 17448130560.0, "7355": 17448130560.0, "7360": 17448130560.0, "7365": 17448130560.0, "7370": 17448716288.0, "7375": 17448597504.0, "7380": 17448130560.0, "7385": 17448130560.0, "7390": 17448130560.0, "7395": 17448130560.0, "7400": 17448720384.0, "7405": 17448130560.0, "7410": 17448130560.0, "7415": 17448130560.0, "7420": 17448130560.0, "7425": 17449136128.0, "7430": 17448130560.0, "7435": 17448130560.0, "7440": 17448130560.0, "7445": 17448130560.0, "7450": 17448130560.0, "7455": 17448130560.0, "7460": 17448130560.0, "7465": 17448130560.0, "7470": 17448130560.0, "7475": 17448130560.0, "7480": 17448130560.0, "7485": 17448130560.0, "7490": 17448130560.0, "7495": 17448130560.0, "7500": 17448130560.0, "7505": 17448130560.0, "7510": 17448130560.0, "7515": 17448130560.0, "7520": 17448130560.0, "7525": 17448130560.0, "7530": 17449048064.0, "7535": 17448130560.0, "7540": 17448130560.0, "7545": 17448130560.0, "7550": 17448130560.0, "7555": 17448130560.0, "7560": 17448130560.0, "7565": 17448130560.0, "7570": 17448130560.0, "7575": 17448130560.0, "7580": 17448130560.0, "7585": 17448802304.0, "7590": 17448130560.0, "7595": 17448130560.0, "7600": 17448130560.0, "7605": 17448130560.0, "7610": 17448130560.0, "7615": 17448513536.0, "7620": 17448130560.0, "7625": 17448130560.0, "7630": 17448728576.0, "7635": 17448130560.0, "7640": 17448130560.0, "7645": 17448130560.0, "7650": 17448130560.0, "7655": 17449031680.0, "7660": 17448130560.0, "7665": 17448130560.0, "7670": 17448130560.0, "7675": 17448130560.0, "7680": 17448130560.0, "7685": 17448130560.0, "7690": 17448130560.0, "7695": 17448130560.0, "7700": 17448130560.0, "7705": 17448130560.0, "7710": 17448130560.0, "7715": 17448130560.0, "7720": 17448130560.0, "7725": 17448130560.0, "7730": 17448130560.0, "7735": 17448130560.0, "7740": 17448130560.0, "7745": 17448130560.0, "7750": 17448130560.0, "7755": 17448130560.0, "7760": 17448130560.0, "7765": 17448130560.0, "7770": 17448130560.0, "7775": 17448992768.0, "7780": 17448130560.0, "7785": 17448130560.0, "7790": 17448235008.0, "7795": 17448603648.0, "7800": 17448130560.0, "7805": 17448130560.0, "7810": 17448130560.0, "7815": 17448130560.0, "7820": 17448130560.0, "7825": 17448130560.0, "7830": 17448130560.0, "7835": 17448130560.0, "7840": 17448130560.0, "7845": 17448130560.0, "7850": 17448990720.0, "7855": 17448130560.0, "7860": 17448130560.0, "7865": 17448615936.0, "7870": 17448130560.0, "7875": 17448130560.0, "7880": 17448130560.0, "7885": 17448675328.0, "7890": 17448198144.0, "7895": 17448130560.0, "7900": 17448130560.0, "7905": 17448130560.0, "7910": 17449121792.0, "7915": 17448130560.0, "7920": 17448130560.0, "7925": 17448130560.0, "7930": 17448130560.0, "7935": 17448130560.0, "7940": 17448130560.0, "7945": 17448130560.0, "7950": 17448130560.0, "7955": 17448130560.0, "7960": 17448130560.0, "7965": 17448304640.0, "7970": 17448835072.0, "7975": 17448130560.0, "7980": 17448130560.0, "7985": 17448130560.0, "7990": 17448130560.0, "7995": 17448130560.0, "8000": 17448265728.0, "8005": 17448388608.0, "8010": 17448130560.0, "8015": 17448130560.0, "8020": 17448445952.0, "8025": 17448130560.0, "8030": 17448130560.0, "8035": 17448130560.0, "8040": 17448130560.0, "8045": 17448130560.0, "8050": 17448130560.0, "8055": 17448130560.0, "8060": 17448130560.0, "8065": 17448962048.0, "8070": 17448130560.0, "8075": 17448130560.0, "8080": 17448556544.0, "8085": 17448130560.0, "8090": 17448130560.0, "8095": 17448130560.0, "8100": 17448130560.0, "8105": 17448130560.0, "8110": 17448130560.0, "8115": 17448130560.0, "8120": 17448130560.0, "8125": 17448130560.0, "8130": 17448130560.0, "8135": 17448130560.0, "8140": 17448130560.0, "8145": 17448130560.0, "8150": 17448130560.0, "8155": 17448130560.0, "8160": 17448130560.0, "8165": 17448130560.0, "8170": 17448130560.0, "8175": 17448130560.0, "8180": 17448130560.0, "8185": 17448130560.0, "8190": 17448130560.0, "8195": 17448130560.0, "8200": 17448130560.0, "8205": 17448130560.0, "8210": 17448130560.0, "8215": 17448130560.0, "8220": 17448286208.0, "8225": 17448130560.0, "8230": 17448130560.0, "8235": 17448130560.0, "8240": 17448130560.0, "8245": 17448130560.0, "8250": 17448130560.0, "8255": 17448130560.0, "8260": 17448130560.0, "8265": 17448130560.0, "8270": 17448130560.0, "8275": 17448130560.0, "8280": 17448130560.0, "8285": 17448130560.0, "8290": 17448130560.0, "8295": 17448130560.0, "8300": 17448130560.0, "8305": 17448130560.0, "8310": 17448867840.0, "8315": 17448130560.0, "8320": 17448130560.0, "8325": 17448130560.0, "8330": 17448130560.0, "8335": 17448130560.0, "8340": 17449105408.0, "8345": 17448130560.0, "8350": 17448130560.0, "8355": 17448130560.0, "8360": 17448130560.0, "8365": 17448130560.0, "8370": 17448130560.0, "8375": 17448130560.0, "8380": 17448130560.0, "8385": 17448130560.0, "8390": 17448130560.0, "8395": 17448325120.0, "8400": 17448130560.0, "8405": 17448130560.0, "8410": 17448130560.0, "8415": 17448130560.0, "8420": 17448130560.0, "8425": 17448130560.0, "8430": 17448130560.0, "8435": 17448130560.0, "8440": 17448130560.0, "8445": 17448130560.0, "8450": 17448130560.0, "8455": 17448130560.0, "8460": 17448130560.0, "8465": 17448384512.0, "8470": 17448130560.0, "8475": 17448130560.0, "8480": 17448130560.0, "8485": 17448376320.0, "8490": 17448130560.0, "8495": 17448130560.0, "8500": 17448130560.0, "8505": 17448130560.0, "8510": 17448130560.0, "8515": 17448130560.0, "8520": 17448130560.0, "8525": 17448130560.0, "8530": 17448130560.0, "8535": 17448130560.0, "8540": 17448704000.0, "8545": 17448130560.0, "8550": 17448130560.0, "8555": 17448130560.0, "8560": 17448130560.0, "8565": 17448130560.0, "8570": 17448372224.0, "8575": 17448130560.0, "8580": 17448130560.0, "8585": 17448130560.0, "8590": 17448130560.0, "8595": 17448130560.0, "8600": 17448622080.0, "8605": 17448663040.0, "8610": 17448130560.0, "8615": 17448130560.0, "8620": 17448130560.0, "8625": 17448130560.0, "8630": 17448130560.0, "8635": 17448130560.0, "8640": 17448130560.0, "8645": 17448179712.0, "8650": 17448130560.0, "8655": 17448130560.0, "8660": 17448130560.0, "8665": 17448130560.0, "8670": 17448130560.0, "8675": 17448130560.0, "8680": 17448130560.0, "8685": 17448130560.0, "8690": 17448130560.0, "8695": 17449224192.0, "8700": 17448130560.0, "8705": 17448130560.0, "8710": 17448491008.0, "8715": 17448130560.0, "8720": 17448130560.0, "8725": 17448130560.0, "8730": 17448130560.0, "8735": 17448255488.0, "8740": 17448130560.0, "8745": 17448130560.0, "8750": 17448130560.0, "8755": 17448130560.0, "8760": 17448130560.0, "8765": 17448130560.0, "8770": 17448630272.0, "8775": 17448130560.0, "8780": 17448192000.0, "8785": 17448130560.0, "8790": 17448130560.0, "8795": 17448130560.0, "8800": 17448130560.0, "8805": 17448130560.0, "8810": 17448130560.0, "8815": 17448130560.0, "8820": 17448130560.0, "8825": 17448130560.0, "8830": 17448130560.0, "8835": 17448130560.0, "8840": 17448130560.0, "8845": 17448130560.0, "8850": 17448130560.0, "8855": 17448130560.0, "8860": 17448599552.0, "8865": 17448130560.0, "8870": 17448130560.0, "8875": 17448130560.0, "8880": 17448130560.0, "8885": 17448130560.0, "8890": 17448130560.0, "8895": 17448130560.0, "8900": 17448130560.0, "8905": 17448130560.0, "8910": 17448695808.0, "8915": 17448130560.0, "8920": 17448130560.0, "8925": 17448130560.0, "8930": 17448130560.0, "8935": 17448130560.0, "8940": 17448331264.0, "8945": 17448130560.0, "8950": 17448130560.0, "8955": 17448130560.0, "8960": 17448331264.0, "8965": 17448130560.0, "8970": 17448130560.0, "8975": 17448951808.0, "8980": 17448288256.0, "8985": 17448523776.0, "8990": 17448130560.0, "8995": 17448130560.0, "9000": 17448130560.0, "9005": 17448130560.0, "9010": 17448130560.0, "9015": 17448130560.0, "9020": 17448130560.0, "9025": 17448130560.0, "9030": 17448851456.0, "9035": 17448130560.0, "9040": 17448130560.0, "9045": 17448130560.0, "9050": 17448130560.0, "9055": 17448130560.0, "9060": 17448130560.0, "9065": 17448130560.0, "9070": 17448564736.0, "9075": 17448130560.0, "9080": 17448130560.0, "9085": 17448130560.0, "9090": 17448130560.0, "9095": 17448130560.0, "9100": 17448130560.0, "9105": 17448130560.0, "9110": 17448130560.0, "9115": 17448138752.0, "9120": 17448564736.0, "9125": 17448288256.0, "9130": 17448130560.0, "9135": 17448130560.0, "9140": 17448130560.0, "9145": 17448130560.0, "9150": 17448130560.0, "9155": 17448130560.0, "9160": 17448130560.0, "9165": 17448130560.0, "9170": 17448130560.0, "9175": 17448130560.0, "9180": 17448130560.0, "9185": 17448130560.0, "9190": 17448130560.0, "9195": 17448130560.0, "9200": 17448130560.0, "9205": 17448130560.0, "9210": 17448130560.0, "9215": 17448130560.0, "9220": 17448130560.0, "9225": 17448503296.0, "9230": 17448130560.0, "9235": 17448843264.0, "9240": 17448130560.0, "9245": 17448130560.0, "9250": 17448130560.0, "9255": 17448130560.0, "9260": 17448130560.0, "9265": 17448130560.0, "9270": 17448130560.0, "9275": 17448130560.0, "9280": 17448130560.0, "9285": 17448130560.0, "9290": 17448130560.0, "9295": 17448130560.0, "9300": 17448130560.0, "9305": 17448130560.0, "9310": 17448130560.0, "9315": 17448130560.0, "9320": 17448130560.0, "9325": 17448130560.0, "9330": 17448130560.0, "9335": 17448130560.0, "9340": 17448130560.0, "9345": 17448130560.0, "9350": 17448663040.0, "9355": 17448130560.0, "9360": 17448130560.0, "9365": 17448130560.0, "9370": 17448130560.0, "9375": 17448130560.0, "9380": 17448130560.0, "9385": 17448445952.0, "9390": 17448130560.0, "9395": 17448130560.0, "9400": 17448130560.0, "9405": 17449052160.0, "9410": 17448130560.0, "9415": 17448130560.0, "9420": 17448130560.0, "9425": 17448130560.0, "9430": 17448130560.0, "9435": 17448130560.0, "9440": 17449314304.0, "9445": 17448704000.0, "9450": 17448130560.0, "9455": 17448130560.0, "9460": 17448130560.0, "9465": 17448130560.0, "9470": 17448130560.0, "9475": 17448826880.0, "9480": 17449019392.0, "9485": 17448130560.0, "9490": 17448130560.0, "9495": 17448130560.0, "9500": 17448398848.0, "9505": 17449019392.0, "9510": 17448130560.0, "9515": 17448130560.0, "9520": 17448189952.0, "9525": 17448130560.0, "9530": 17448130560.0, "9535": 17448130560.0, "9540": 17448130560.0, "9545": 17448130560.0, "9550": 17448452096.0, "9555": 17448130560.0, "9560": 17449048064.0, "9565": 17448130560.0, "9570": 17448130560.0, "9575": 17448130560.0, "9580": 17448130560.0, "9585": 17448130560.0, "9590": 17448957952.0, "9595": 17448130560.0, "9600": 17448130560.0, "9605": 17448130560.0, "9610": 17448130560.0, "9615": 17448130560.0, "9620": 17448130560.0, "9625": 17448130560.0, "9630": 17449000960.0, "9635": 17448130560.0, "9640": 17448130560.0, "9645": 17448130560.0, "9650": 17448130560.0, "9655": 17448671232.0, "9660": 17448130560.0, "9665": 17448130560.0, "9670": 17448130560.0, "9675": 17448232960.0, "9680": 17448130560.0, "9685": 17448988672.0, "9690": 17448130560.0, "9695": 17448130560.0, "9700": 17448130560.0, "9705": 17448130560.0, "9710": 17448130560.0, "9715": 17448130560.0, "9720": 17448130560.0, "9725": 17448130560.0, "9730": 17448130560.0, "9735": 17448396800.0, "9740": 17448130560.0, "9745": 17448130560.0, "9750": 17448130560.0, "9755": 17448130560.0, "9760": 17448605696.0, "9765": 17448130560.0, "9770": 17448130560.0, "9775": 17448130560.0, "9780": 17448130560.0, "9785": 17448130560.0, "9790": 17448130560.0, "9795": 17448130560.0, "9800": 17448130560.0, "9805": 17449076736.0, "9810": 17449013248.0, "9815": 17448130560.0, "9820": 17448282112.0, "9825": 17448130560.0, "9830": 17448464384.0, "9835": 17448130560.0, "9840": 17448130560.0, "9845": 17448425472.0, "9850": 17448130560.0, "9855": 17448130560.0, "9860": 17448130560.0, "9865": 17448302592.0, "9870": 17448130560.0, "9875": 17448130560.0, "9880": 17448130560.0, "9885": 17448613888.0, "9890": 17448493056.0, "9895": 17448351744.0, "9900": 17448130560.0, "9905": 17448130560.0, "9910": 17448445952.0, "9915": 17448130560.0, "9920": 17448466432.0, "9925": 17448130560.0, "9930": 17448667136.0, "9935": 17448130560.0, "9940": 17448130560.0, "9945": 17448130560.0, "9950": 17448130560.0, "9955": 17448130560.0, "9960": 17448130560.0, "9965": 17448671232.0, "9970": 17448130560.0, "9975": 17448130560.0, "9980": 17448130560.0, "9985": 17448130560.0, "9990": 17448130560.0, "9995": 17448130560.0, "10000": 17448130560.0, "10005": 17448130560.0, "10010": 17448826880.0, "10015": 17448230912.0, "10020": 17448130560.0, "10025": 17448130560.0, "10030": 17448130560.0, "10035": 17448130560.0, "10040": 17448216576.0, "10045": 17448130560.0, "10050": 17448130560.0, "10055": 17448130560.0, "10060": 17448130560.0, "10065": 17448130560.0, "10070": 17449129984.0, "10075": 17448130560.0, "10080": 17448130560.0, "10085": 17448974336.0, "10090": 17448130560.0, "10095": 17448130560.0, "10100": 17448390656.0, "10105": 17448130560.0, "10110": 17448130560.0, "10115": 17448130560.0, "10120": 17448130560.0, "10125": 17448130560.0, "10130": 17448130560.0, "10135": 17448249344.0, "10140": 17448130560.0, "10145": 17448130560.0, "10150": 17448130560.0, "10155": 17448130560.0, "10160": 17448130560.0, "10165": 17448130560.0, "10170": 17448130560.0, "10175": 17448130560.0, "10180": 17448130560.0, "10185": 17448130560.0, "10190": 17448130560.0, "10195": 17448130560.0, "10200": 17448130560.0, "10205": 17448130560.0, "10210": 17448130560.0, "10215": 17448761344.0, "10220": 17448130560.0, "10225": 17448130560.0, "10230": 17448431616.0, "10235": 17448130560.0, "10240": 17448130560.0, "10245": 17448269824.0, "10250": 17448130560.0, "10255": 17448130560.0, "10260": 17448130560.0, "10265": 17448130560.0, "10270": 17448237056.0, "10275": 17448646656.0, "10280": 17448130560.0, "10285": 17448833024.0, "10290": 17448130560.0, "10295": 17448386560.0, "10300": 17448130560.0, "10305": 17448900608.0, "10310": 17448908800.0, "10315": 17448130560.0, "10320": 17448130560.0, "10325": 17448523776.0, "10330": 17448130560.0, "10335": 17448130560.0, "10340": 17448130560.0, "10345": 17448130560.0, "10350": 17448130560.0, "10355": 17448130560.0, "10360": 17448130560.0, "10365": 17448423424.0, "10370": 17448411136.0, "10375": 17448345600.0, "10380": 17448130560.0, "10385": 17448130560.0, "10390": 17448130560.0, "10395": 17448130560.0, "10400": 17448130560.0, "10405": 17448130560.0, "10410": 17448130560.0, "10415": 17448130560.0, "10420": 17449089024.0, "10425": 17448130560.0, "10430": 17448130560.0, "10435": 17448130560.0, "10440": 17448130560.0, "10445": 17449097216.0, "10450": 17448130560.0, "10455": 17448130560.0, "10460": 17448130560.0, "10465": 17448130560.0, "10470": 17448357888.0, "10475": 17448130560.0, "10480": 17448851456.0, "10485": 17448130560.0, "10490": 17448130560.0, "10495": 17448130560.0, "10500": 17448130560.0, "10505": 17448374272.0, "10510": 17448130560.0, "10515": 17448130560.0, "10520": 17448130560.0, "10525": 17448130560.0, "10530": 17448847360.0, "10535": 17448130560.0, "10540": 17448130560.0, "10545": 17448130560.0, "10550": 17448613888.0, "10555": 17448130560.0, "10560": 17448130560.0, "10565": 17448130560.0, "10570": 17448130560.0, "10575": 17448130560.0, "10580": 17448130560.0, "10585": 17448130560.0, "10590": 17448130560.0, "10595": 17448130560.0, "10600": 17448130560.0, "10605": 17448130560.0, "10610": 17448130560.0, "10615": 17448130560.0, "10620": 17448826880.0, "10625": 17448130560.0, "10630": 17448130560.0, "10635": 17448130560.0, "10640": 17448130560.0, "10645": 17448130560.0, "10650": 17448130560.0, "10655": 17448130560.0, "10660": 17448540160.0, "10665": 17448130560.0, "10670": 17448130560.0, "10675": 17448130560.0, "10680": 17448130560.0, "10685": 17448847360.0, "10690": 17448937472.0, "10695": 17448130560.0, "10700": 17448130560.0, "10705": 17448130560.0, "10710": 17448130560.0, "10715": 17448130560.0, "10720": 17448130560.0, "10725": 17448130560.0, "10730": 17448130560.0, "10735": 17448130560.0, "10740": 17448130560.0, "10745": 17448130560.0, "10750": 17448130560.0, "10755": 17448130560.0, "10760": 17448671232.0, "10765": 17448130560.0, "10770": 17448130560.0, "10775": 17448130560.0, "10780": 17448130560.0, "10785": 17448130560.0, "10790": 17448130560.0, "10795": 17448130560.0, "10800": 17448130560.0, "10805": 17448130560.0, "10810": 17448615936.0, "10815": 17448130560.0, "10820": 17448589312.0, "10825": 17448130560.0, "10830": 17448130560.0, "10835": 17448130560.0, "10840": 17448130560.0, "10845": 17448130560.0, "10850": 17448130560.0, "10855": 17448130560.0, "10860": 17448130560.0, "10865": 17448130560.0, "10870": 17448130560.0, "10875": 17448130560.0, "10880": 17448130560.0, "10885": 17448130560.0, "10890": 17448130560.0, "10895": 17448130560.0, "10900": 17448130560.0, "10905": 17448130560.0, "10910": 17448130560.0, "10915": 17448130560.0, "10920": 17448130560.0, "10925": 17448368128.0, "10930": 17448130560.0, "10935": 17448130560.0, "10940": 17448130560.0, "10945": 17448130560.0, "10950": 17448130560.0, "10955": 17448130560.0, "10960": 17448130560.0, "10965": 17448130560.0, "10970": 17448130560.0, "10975": 17448130560.0, "10980": 17448130560.0, "10985": 17448130560.0, "10990": 17448130560.0, "10995": 17448130560.0, "11000": 17448861696.0, "11005": 17448130560.0, "11010": 17448130560.0, "11015": 17448140800.0, "11020": 17448175616.0, "11025": 17448130560.0, "11030": 17448130560.0, "11035": 17448130560.0, "11040": 17448130560.0, "11045": 17448130560.0, "11050": 17448130560.0, "11055": 17448130560.0, "11060": 17448130560.0, "11065": 17448130560.0, "11070": 17448130560.0, "11075": 17448130560.0, "11080": 17448589312.0, "11085": 17448130560.0, "11090": 17448294400.0, "11095": 17448130560.0, "11100": 17448572928.0, "11105": 17448130560.0, "11110": 17448130560.0, "11115": 17448130560.0, "11120": 17448130560.0, "11125": 17448130560.0, "11130": 17448130560.0, "11135": 17448130560.0, "11140": 17448130560.0, "11145": 17448130560.0, "11150": 17448130560.0, "11155": 17448130560.0, "11160": 17448130560.0, "11165": 17448130560.0, "11170": 17448130560.0, "11175": 17448130560.0, "11180": 17448130560.0, "11185": 17448130560.0, "11190": 17448646656.0, "11195": 17448130560.0, "11200": 17448130560.0, "11205": 17448130560.0, "11210": 17448431616.0, "11215": 17448130560.0, "11220": 17448130560.0, "11225": 17448130560.0, "11230": 17448130560.0, "11235": 17448130560.0, "11240": 17448130560.0, "11245": 17448130560.0, "11250": 17448130560.0, "11255": 17448130560.0, "11260": 17448130560.0, "11265": 17448130560.0, "11270": 17448130560.0, "11275": 17448130560.0, "11280": 17448130560.0, "11285": 17448130560.0, "11290": 17448130560.0, "11295": 17448130560.0, "11300": 17448130560.0, "11305": 17448130560.0, "11310": 17448130560.0, "11315": 17448130560.0, "11320": 17448130560.0, "11325": 17448130560.0, "11330": 17448130560.0, "11335": 17448130560.0, "11340": 17448130560.0, "11345": 17448130560.0, "11350": 17448130560.0, "11355": 17448130560.0, "11360": 17448534016.0, "11365": 17448130560.0, "11370": 17448130560.0, "11375": 17448130560.0, "11380": 17448130560.0, "11385": 17448130560.0, "11390": 17448130560.0, "11395": 17448130560.0, "11400": 17448130560.0, "11405": 17448130560.0, "11410": 17448130560.0, "11415": 17448130560.0, "11420": 17448130560.0, "11425": 17448130560.0, "11430": 17448130560.0, "11435": 17448130560.0, "11440": 17448308736.0, "11445": 17448130560.0, "11450": 17448130560.0, "11455": 17448685568.0, "11460": 17448130560.0, "11465": 17448130560.0, "11470": 17448130560.0, "11475": 17448720384.0, "11480": 17448310784.0, "11485": 17448531968.0, "11490": 17448687616.0, "11495": 17448333312.0, "11500": 17448130560.0, "11505": 17448130560.0, "11510": 17448130560.0, "11515": 17448130560.0, "11520": 17448130560.0, "11525": 17448130560.0, "11530": 17448130560.0, "11535": 17448130560.0, "11540": 17448130560.0, "11545": 17448130560.0, "11550": 17448886272.0, "11555": 17448130560.0, "11560": 17448130560.0, "11565": 17448146944.0, "11570": 17448130560.0, "11575": 17448130560.0, "11580": 17448130560.0, "11585": 17448982528.0, "11590": 17448130560.0, "11595": 17448130560.0, "11600": 17448130560.0, "11605": 17448130560.0, "11610": 17448130560.0, "11615": 17448130560.0, "11620": 17448130560.0, "11625": 17448130560.0, "11630": 17448130560.0, "11635": 17448130560.0, "11640": 17448732672.0, "11645": 17448130560.0, "11650": 17448130560.0, "11655": 17448130560.0, "11660": 17448130560.0, "11665": 17448130560.0, "11670": 17448130560.0, "11675": 17448130560.0, "11680": 17448130560.0, "11685": 17448130560.0, "11690": 17448130560.0, "11695": 17448130560.0, "11700": 17448130560.0, "11705": 17448130560.0, "11710": 17448130560.0, "11715": 17448130560.0, "11720": 17448130560.0, "11725": 17448130560.0, "11730": 17448130560.0, "11735": 17448130560.0, "11740": 17448130560.0, "11745": 17448130560.0, "11750": 17448130560.0, "11755": 17448130560.0, "11760": 17448130560.0, "11765": 17448130560.0, "11770": 17448269824.0, "11775": 17448697856.0, "11780": 17448130560.0, "11785": 17448130560.0, "11790": 17448130560.0, "11795": 17448130560.0, "11800": 17448130560.0, "11805": 17448130560.0, "11810": 17448941568.0, "11815": 17448130560.0, "11820": 17448130560.0, "11825": 17448130560.0, "11830": 17448130560.0, "11835": 17448130560.0, "11840": 17448130560.0, "11845": 17448130560.0, "11850": 17448130560.0, "11855": 17448130560.0, "11860": 17448130560.0, "11865": 17448130560.0, "11870": 17449033728.0, "11875": 17448130560.0, "11880": 17448130560.0, "11885": 17448130560.0, "11890": 17448130560.0, "11895": 17448130560.0, "11900": 17448611840.0, "11905": 17448130560.0, "11910": 17448253440.0, "11915": 17448130560.0, "11920": 17448263680.0, "11925": 17448130560.0, "11930": 17448130560.0, "11935": 17448564736.0, "11940": 17448130560.0, "11945": 17448130560.0, "11950": 17448880128.0, "11955": 17448130560.0, "11960": 17448130560.0, "11965": 17448130560.0, "11970": 17448130560.0, "11975": 17448130560.0, "11980": 17448130560.0, "11985": 17448130560.0, "11990": 17448130560.0, "11995": 17449076736.0, "12000": 17449064448.0, "12005": 17448130560.0, "12010": 17448130560.0, "12015": 17448130560.0, "12020": 17448130560.0, "12025": 17448187904.0, "12030": 17448130560.0, "12035": 17448130560.0, "12040": 17448130560.0, "12045": 17448130560.0, "12050": 17448130560.0, "12055": 17448130560.0, "12060": 17448130560.0, "12065": 17448130560.0, "12070": 17448605696.0, "12075": 17448130560.0, "12080": 17448130560.0, "12085": 17448130560.0, "12090": 17448130560.0, "12095": 17448130560.0, "12100": 17448130560.0, "12105": 17448130560.0, "12110": 17448130560.0, "12115": 17448130560.0, "12120": 17448130560.0, "12125": 17448130560.0, "12130": 17448130560.0, "12135": 17448130560.0, "12140": 17449177088.0, "12145": 17448130560.0, "12150": 17448130560.0, "12155": 17448130560.0, "12160": 17448130560.0, "12165": 17448130560.0, "12170": 17448130560.0, "12175": 17448130560.0, "12180": 17448130560.0, "12185": 17448130560.0, "12190": 17448130560.0, "12195": 17448130560.0, "12200": 17448130560.0, "12205": 17448130560.0, "12210": 17448175616.0, "12215": 17448130560.0, "12220": 17448130560.0, "12225": 17448130560.0, "12230": 17448138752.0, "12235": 17448130560.0, "12240": 17448130560.0, "12245": 17448130560.0, "12250": 17448826880.0, "12255": 17448130560.0, "12260": 17448130560.0, "12265": 17448130560.0, "12270": 17448130560.0, "12275": 17448130560.0, "12280": 17448232960.0, "12285": 17448130560.0, "12290": 17448130560.0, "12295": 17448130560.0, "12300": 17448130560.0, "12305": 17448130560.0, "12310": 17448130560.0, "12315": 17448130560.0, "12320": 17448130560.0, "12325": 17448851456.0, "12330": 17448130560.0, "12335": 17448130560.0, "12340": 17448130560.0, "12345": 17448130560.0, "12350": 17448130560.0, "12355": 17448132608.0, "12360": 17448130560.0, "12365": 17448130560.0, "12370": 17448130560.0, "12375": 17448130560.0, "12380": 17448130560.0, "12385": 17448876032.0, "12390": 17448130560.0, "12395": 17448130560.0, "12400": 17448130560.0, "12405": 17448562688.0, "12410": 17448130560.0, "12415": 17448130560.0, "12420": 17448130560.0, "12425": 17448130560.0, "12430": 17448130560.0, "12435": 17448130560.0, "12440": 17448130560.0, "12445": 17448130560.0, "12450": 17448130560.0, "12455": 17448130560.0, "12460": 17448130560.0, "12465": 17448130560.0, "12470": 17448130560.0, "12475": 17448130560.0, "12480": 17448130560.0, "12485": 17448130560.0, "12490": 17448130560.0, "12495": 17448130560.0, "12500": 17448130560.0, "12505": 17448130560.0, "12510": 17448130560.0, "12515": 17448130560.0, "12520": 17448130560.0, "12525": 17448130560.0, "12530": 17448130560.0, "12535": 17448130560.0, "12540": 17448130560.0, "12545": 17448130560.0, "12550": 17448130560.0, "12555": 17448130560.0, "12560": 17448130560.0, "12565": 17448130560.0, "12570": 17448130560.0, "12575": 17448155136.0, "12580": 17448130560.0, "12585": 17448130560.0, "12590": 17448130560.0, "12595": 17448130560.0, "12600": 17448130560.0, "12605": 17448130560.0, "12610": 17448130560.0, "12615": 17448130560.0, "12620": 17448130560.0, "12625": 17448130560.0, "12630": 17448130560.0, "12635": 17448130560.0, "12640": 17448230912.0, "12645": 17448368128.0, "12650": 17448130560.0, "12655": 17448130560.0, "12660": 17448335360.0, "12665": 17448130560.0, "12670": 17448130560.0, "12675": 17448130560.0, "12680": 17448130560.0, "12685": 17448919040.0, "12690": 17448130560.0, "12695": 17448130560.0, "12700": 17448130560.0, "12705": 17448912896.0, "12710": 17448130560.0, "12715": 17449048064.0, "12720": 17448130560.0, "12725": 17448130560.0, "12730": 17448130560.0, "12735": 17448130560.0, "12740": 17448130560.0, "12745": 17448480768.0, "12750": 17448130560.0, "12755": 17448130560.0, "12760": 17448130560.0, "12765": 17448130560.0, "12770": 17448130560.0, "12775": 17448130560.0, "12780": 17448130560.0, "12785": 17448130560.0, "12790": 17448130560.0, "12795": 17448130560.0, "12800": 17448130560.0, "12805": 17448130560.0, "12810": 17448130560.0, "12815": 17448130560.0, "12820": 17448357888.0, "12825": 17449121792.0, "12830": 17448130560.0, "12835": 17448130560.0, "12840": 17448130560.0, "12845": 17448130560.0, "12850": 17448130560.0, "12855": 17448130560.0, "12860": 17448130560.0, "12865": 17448130560.0, "12870": 17448130560.0, "12875": 17448130560.0, "12880": 17448130560.0, "12885": 17448130560.0, "12890": 17448130560.0, "12895": 17448130560.0, "12900": 17448130560.0, "12905": 17448130560.0, "12910": 17448130560.0, "12915": 17448130560.0, "12920": 17448130560.0, "12925": 17448710144.0, "12930": 17448130560.0, "12935": 17448130560.0, "12940": 17448130560.0, "12945": 17448343552.0, "12950": 17448130560.0, "12955": 17448130560.0, "12960": 17448130560.0, "12965": 17448130560.0, "12970": 17448130560.0, "12975": 17448130560.0, "12980": 17448804352.0, "12985": 17448130560.0, "12990": 17448130560.0, "12995": 17448130560.0, "13000": 17448130560.0, "13005": 17448130560.0, "13010": 17448130560.0, "13015": 17448130560.0, "13020": 17448130560.0, "13025": 17448130560.0, "13030": 17448574976.0, "13035": 17448411136.0, "13040": 17448130560.0, "13045": 17448130560.0, "13050": 17448130560.0, "13055": 17448130560.0, "13060": 17448130560.0, "13065": 17448130560.0, "13070": 17448130560.0, "13075": 17448130560.0, "13080": 17448353792.0, "13085": 17448130560.0, "13090": 17448130560.0, "13095": 17448130560.0, "13100": 17448130560.0, "13105": 17448130560.0, "13110": 17449144320.0, "13115": 17448130560.0, "13120": 17448130560.0, "13125": 17448130560.0, "13130": 17448130560.0, "13135": 17448130560.0, "13140": 17448130560.0, "13145": 17448130560.0, "13150": 17449170944.0, "13155": 17448130560.0, "13160": 17448130560.0, "13165": 17448130560.0, "13170": 17448130560.0, "13175": 17448474624.0, "13180": 17448130560.0, "13185": 17448130560.0, "13190": 17448130560.0, "13195": 17449154560.0, "13200": 17448245248.0, "13205": 17448130560.0, "13210": 17449089024.0, "13215": 17448130560.0, "13220": 17448923136.0, "13225": 17448130560.0, "13230": 17448130560.0, "13235": 17448130560.0, "13240": 17448130560.0, "13245": 17448130560.0, "13250": 17448130560.0, "13255": 17448165376.0, "13260": 17448130560.0, "13265": 17448130560.0, "13270": 17448130560.0, "13275": 17448130560.0, "13280": 17448611840.0, "13285": 17448130560.0, "13290": 17448130560.0, "13295": 17448130560.0, "13300": 17448130560.0, "13305": 17448130560.0, "13310": 17448130560.0, "13315": 17448130560.0, "13320": 17448130560.0, "13325": 17448130560.0, "13330": 17448130560.0, "13335": 17448130560.0, "13340": 17448130560.0, "13345": 17448130560.0, "13350": 17448130560.0, "13355": 17448982528.0, "13360": 17448130560.0, "13365": 17448130560.0, "13370": 17448130560.0, "13375": 17448130560.0, "13380": 17448130560.0, "13385": 17448130560.0, "13390": 17448130560.0, "13395": 17448130560.0, "13400": 17448130560.0, "13405": 17448130560.0, "13410": 17448130560.0, "13415": 17448130560.0, "13420": 17448130560.0, "13425": 17448130560.0, "13430": 17448130560.0, "13435": 17448130560.0, "13440": 17448130560.0, "13445": 17449162752.0, "13450": 17448130560.0, "13455": 17448775680.0, "13460": 17448130560.0, "13465": 17448130560.0, "13470": 17448130560.0, "13475": 17448130560.0, "13480": 17448130560.0, "13485": 17448130560.0, "13490": 17448130560.0, "13495": 17448130560.0, "13500": 17448130560.0, "13505": 17448130560.0, "13510": 17448130560.0, "13515": 17448130560.0, "13520": 17448130560.0, "13525": 17448130560.0, "13530": 17448130560.0, "13535": 17448130560.0, "13540": 17448130560.0, "13545": 17448130560.0, "13550": 17448130560.0, "13555": 17448130560.0, "13560": 17448130560.0, "13565": 17448130560.0, "13570": 17448130560.0, "13575": 17448130560.0, "13580": 17448130560.0, "13585": 17448130560.0, "13590": 17448130560.0, "13595": 17448130560.0, "13600": 17448130560.0, "13605": 17448130560.0, "13610": 17448130560.0, "13615": 17448130560.0, "13620": 17448130560.0, "13625": 17448613888.0, "13630": 17448130560.0, "13635": 17448130560.0, "13640": 17448130560.0, "13645": 17448130560.0, "13650": 17448130560.0, "13655": 17448130560.0, "13660": 17448130560.0, "13665": 17448656896.0, "13670": 17448130560.0, "13675": 17448130560.0, "13680": 17448130560.0, "13685": 17448130560.0, "13690": 17448542208.0, "13695": 17448130560.0, "13700": 17448130560.0, "13705": 17448130560.0, "13710": 17448130560.0, "13715": 17448130560.0, "13720": 17448130560.0, "13725": 17448130560.0, "13730": 17448130560.0, "13735": 17448130560.0, "13740": 17448488960.0, "13745": 17448230912.0, "13750": 17448130560.0, "13755": 17448130560.0, "13760": 17448130560.0, "13765": 17448130560.0, "13770": 17448130560.0, "13775": 17448130560.0, "13780": 17448130560.0, "13785": 17448130560.0, "13790": 17448130560.0, "13795": 17448130560.0, "13800": 17448130560.0, "13805": 17448130560.0, "13810": 17448130560.0, "13815": 17448130560.0, "13820": 17448628224.0, "13825": 17448130560.0, "13830": 17448130560.0, "13835": 17448130560.0, "13840": 17448130560.0, "13845": 17448130560.0, "13850": 17448130560.0, "13855": 17448130560.0, "13860": 17448130560.0, "13865": 17448130560.0, "13870": 17448130560.0, "13875": 17448130560.0, "13880": 17448130560.0, "13885": 17448130560.0, "13890": 17448130560.0, "13895": 17448130560.0, "13900": 17448130560.0, "13905": 17448130560.0, "13910": 17448130560.0, "13915": 17448130560.0, "13920": 17448130560.0, "13925": 17448130560.0, "13930": 17448130560.0, "13935": 17448130560.0, "13940": 17449019392.0, "13945": 17448130560.0, "13950": 17448130560.0, "13955": 17448130560.0, "13960": 17448130560.0, "13965": 17448130560.0, "13970": 17448130560.0, "13975": 17448130560.0, "13980": 17448130560.0, "13985": 17448130560.0, "13990": 17448130560.0, "13995": 17448130560.0, "14000": 17448130560.0, "14005": 17448130560.0, "14010": 17448130560.0, "14015": 17448130560.0, "14020": 17448130560.0, "14025": 17448130560.0, "14030": 17448130560.0, "14035": 17448130560.0, "14040": 17448445952.0, "14045": 17448130560.0, "14050": 17448130560.0, "14055": 17448130560.0, "14060": 17448130560.0, "14065": 17448130560.0, "14070": 17448130560.0, "14075": 17448130560.0, "14080": 17448130560.0, "14085": 17448130560.0, "14090": 17448130560.0, "14095": 17449099264.0, "14100": 17448130560.0, "14105": 17448130560.0, "14110": 17448628224.0, "14115": 17448130560.0, "14120": 17448130560.0, "14125": 17448130560.0, "14130": 17448130560.0, "14135": 17448130560.0, "14140": 17448130560.0, "14145": 17448130560.0, "14150": 17448130560.0, "14155": 17448130560.0, "14160": 17448130560.0, "14165": 17448130560.0, "14170": 17448130560.0, "14175": 17448130560.0, "14180": 17448361984.0, "14185": 17448130560.0, "14190": 17448130560.0, "14195": 17448130560.0, "14200": 17448130560.0, "14205": 17448445952.0, "14210": 17448130560.0, "14215": 17448400896.0, "14220": 17448130560.0, "14225": 17448130560.0, "14230": 17448130560.0, "14235": 17448130560.0, "14240": 17448130560.0, "14245": 17448130560.0, "14250": 17448130560.0, "14255": 17448130560.0, "14260": 17448130560.0, "14265": 17448130560.0, "14270": 17448130560.0, "14275": 17448130560.0, "14280": 17448130560.0, "14285": 17448130560.0, "14290": 17448130560.0, "14295": 17448130560.0, "14300": 17448130560.0, "14305": 17449113600.0, "14310": 17448130560.0, "14315": 17448130560.0, "14320": 17449076736.0, "14325": 17448130560.0, "14330": 17448130560.0, "14335": 17448130560.0, "14340": 17448130560.0, "14345": 17448130560.0, "14350": 17448130560.0, "14355": 17448130560.0, "14360": 17448130560.0, "14365": 17448130560.0, "14370": 17448130560.0, "14375": 17448130560.0, "14380": 17448130560.0, "14385": 17448130560.0, "14390": 17448130560.0, "14395": 17448130560.0, "14400": 17448130560.0, "14405": 17448130560.0, "14410": 17448130560.0, "14415": 17448130560.0, "14420": 17448130560.0, "14425": 17448130560.0, "14430": 17448130560.0, "14435": 17448130560.0, "14440": 17448130560.0, "14445": 17448130560.0, "14450": 17448130560.0, "14455": 17448130560.0, "14460": 17448130560.0, "14465": 17448130560.0, "14470": 17448130560.0, "14475": 17448130560.0, "14480": 17448130560.0, "14485": 17448593408.0, "14490": 17448130560.0, "14495": 17448906752.0, "14500": 17448130560.0, "14505": 17448130560.0, "14510": 17448130560.0, "14515": 17448130560.0, "14520": 17448568832.0, "14525": 17448161280.0, "14530": 17448130560.0, "14535": 17448130560.0, "14540": 17448130560.0, "14545": 17448603648.0, "14550": 17448130560.0, "14555": 17448130560.0, "14560": 17448130560.0, "14565": 17448130560.0, "14570": 17448130560.0, "14575": 17448802304.0, "14580": 17448710144.0, "14585": 17448605696.0, "14590": 17448130560.0, "14595": 17448130560.0, "14600": 17448130560.0, "14605": 17448646656.0, "14610": 17448130560.0, "14615": 17448581120.0, "14620": 17448130560.0, "14625": 17448130560.0, "14630": 17448130560.0, "14635": 17448130560.0, "14640": 17448130560.0, "14645": 17448130560.0, "14650": 17448130560.0, "14655": 17448130560.0, "14660": 17448130560.0, "14665": 17448130560.0, "14670": 17448130560.0, "14675": 17448130560.0, "14680": 17448130560.0, "14685": 17448130560.0, "14690": 17448130560.0, "14695": 17448491008.0, "14700": 17448130560.0, "14705": 17448130560.0, "14710": 17448130560.0, "14715": 17448130560.0, "14720": 17448130560.0, "14725": 17448130560.0, "14730": 17448130560.0, "14735": 17448130560.0, "14740": 17448130560.0, "14745": 17448130560.0, "14750": 17448130560.0, "14755": 17448130560.0, "14760": 17448130560.0, "14765": 17448130560.0, "14770": 17448130560.0, "14775": 17448671232.0, "14780": 17448130560.0, "14785": 17448130560.0, "14790": 17448130560.0, "14795": 17448833024.0, "14800": 17448130560.0, "14805": 17448130560.0, "14810": 17449170944.0, "14815": 17448130560.0, "14820": 17448130560.0, "14825": 17448130560.0, "14830": 17448130560.0, "14835": 17448351744.0, "14840": 17448130560.0, "14845": 17448130560.0, "14850": 17448130560.0, "14855": 17448130560.0, "14860": 17448130560.0, "14865": 17449043968.0, "14870": 17448499200.0, "14875": 17448376320.0, "14880": 17448130560.0, "14885": 17448155136.0, "14890": 17448130560.0, "14895": 17448130560.0, "14900": 17448130560.0, "14905": 17448130560.0, "14910": 17448130560.0, "14915": 17448187904.0, "14920": 17448130560.0, "14925": 17448130560.0, "14930": 17448130560.0, "14935": 17448130560.0, "14940": 17448130560.0, "14945": 17448130560.0, "14950": 17448130560.0, "14955": 17448130560.0, "14960": 17448130560.0, "14965": 17448130560.0, "14970": 17448130560.0, "14975": 17448130560.0, "14980": 17448130560.0, "14985": 17448130560.0, "14990": 17448130560.0, "14995": 17448130560.0, "15000": 17448130560.0, "15005": 17448130560.0, "15010": 17448130560.0, "15015": 17448704000.0, "15020": 17448130560.0, "15025": 17448130560.0, "15030": 17448130560.0, "15035": 17448130560.0, "15040": 17448130560.0, "15045": 17448130560.0, "15050": 17448130560.0, "15055": 17448130560.0, "15060": 17448130560.0, "15065": 17448130560.0, "15070": 17448130560.0, "15075": 17448130560.0, "15080": 17448130560.0, "15085": 17448130560.0, "15090": 17448130560.0, "15095": 17449152512.0, "15100": 17448130560.0, "15105": 17448130560.0, "15110": 17448130560.0, "15115": 17448130560.0, "15120": 17448173568.0, "15125": 17448130560.0, "15130": 17448130560.0, "15135": 17448130560.0, "15140": 17448781824.0, "15145": 17448130560.0, "15150": 17448130560.0, "15155": 17448810496.0, "15160": 17448130560.0, "15165": 17448515584.0, "15170": 17448130560.0, "15175": 17448130560.0, "15180": 17448228864.0, "15185": 17448130560.0, "15190": 17448130560.0, "15195": 17448130560.0, "15200": 17448130560.0, "15205": 17448159232.0, "15210": 17448474624.0, "15215": 17448130560.0, "15220": 17448130560.0, "15225": 17448130560.0, "15230": 17448130560.0, "15235": 17448941568.0, "15240": 17448130560.0, "15245": 17448130560.0, "15250": 17448468480.0, "15255": 17448130560.0, "15260": 17448982528.0, "15265": 17448130560.0, "15270": 17448130560.0, "15275": 17448130560.0, "15280": 17448130560.0, "15285": 17448130560.0, "15290": 17448130560.0, "15295": 17448130560.0, "15300": 17448130560.0, "15305": 17448130560.0, "15310": 17448130560.0, "15315": 17448130560.0, "15320": 17448237056.0, "15325": 17448130560.0, "15330": 17448130560.0, "15335": 17448235008.0, "15340": 17448130560.0, "15345": 17448204288.0, "15350": 17448130560.0, "15355": 17448130560.0, "15360": 17448130560.0, "15365": 17448130560.0, "15370": 17448130560.0, "15375": 17448130560.0, "15380": 17448130560.0, "15385": 17448755200.0, "15390": 17448130560.0, "15395": 17448130560.0, "15400": 17448474624.0, "15405": 17448130560.0, "15410": 17448648704.0, "15415": 17448130560.0, "15420": 17448130560.0, "15425": 17448130560.0, "15430": 17448130560.0, "15435": 17448130560.0, "15440": 17448130560.0, "15445": 17448130560.0, "15450": 17448130560.0, "15455": 17448130560.0, "15460": 17448130560.0, "15465": 17448130560.0, "15470": 17448130560.0, "15475": 17448941568.0, "15480": 17448130560.0, "15485": 17448392704.0, "15490": 17448130560.0, "15495": 17448130560.0, "15500": 17448130560.0, "15505": 17448130560.0, "15510": 17448130560.0, "15515": 17448130560.0, "15520": 17448130560.0, "15525": 17448669184.0, "15530": 17448130560.0, "15535": 17448574976.0, "15540": 17448130560.0, "15545": 17448130560.0, "15550": 17448130560.0, "15555": 17448130560.0, "15560": 17448130560.0, "15565": 17448820736.0, "15570": 17448130560.0, "15575": 17448130560.0, "15580": 17448130560.0, "15585": 17448603648.0, "15590": 17448130560.0, "15595": 17448130560.0, "15600": 17448130560.0, "15605": 17448130560.0, "15610": 17448130560.0, "15615": 17448130560.0, "15620": 17448130560.0, "15625": 17448130560.0, "15630": 17448130560.0, "15635": 17448130560.0, "15640": 17449015296.0, "15645": 17448130560.0, "15650": 17448130560.0, "15655": 17448130560.0, "15660": 17448130560.0, "15665": 17448130560.0, "15670": 17448220672.0, "15675": 17448130560.0, "15680": 17448130560.0, "15685": 17448130560.0, "15690": 17448130560.0, "15695": 17448130560.0, "15700": 17448843264.0, "15705": 17448130560.0, "15710": 17448130560.0, "15715": 17448130560.0, "15720": 17448130560.0, "15725": 17448130560.0, "15730": 17448130560.0, "15735": 17448130560.0, "15740": 17448130560.0, "15745": 17448130560.0, "15750": 17448130560.0, "15755": 17448759296.0, "15760": 17448130560.0, "15765": 17448130560.0, "15770": 17448130560.0, "15775": 17448130560.0, "15780": 17448130560.0, "15785": 17448130560.0, "15790": 17448130560.0, "15795": 17448130560.0, "15800": 17448130560.0, "15805": 17448130560.0, "15810": 17449166848.0, "15815": 17448130560.0, "15820": 17448130560.0, "15825": 17448417280.0, "15830": 17448130560.0, "15835": 17448130560.0, "15840": 17448130560.0, "15845": 17448130560.0, "15850": 17448130560.0, "15855": 17448130560.0, "15860": 17448130560.0, "15865": 17448130560.0, "15870": 17448130560.0, "15875": 17448130560.0, "15880": 17448826880.0, "15885": 17448130560.0, "15890": 17448130560.0, "15895": 17448130560.0, "15900": 17448130560.0, "15905": 17448130560.0, "15910": 17448130560.0, "15915": 17448130560.0, "15920": 17448130560.0, "15925": 17448130560.0, "15930": 17448130560.0, "15935": 17448130560.0, "15940": 17448130560.0, "15945": 17448130560.0, "15950": 17448867840.0, "15955": 17448130560.0, "15960": 17448130560.0, "15965": 17448130560.0, "15970": 17448130560.0, "15975": 17448130560.0, "15980": 17448130560.0, "15985": 17448130560.0, "15990": 17448130560.0, "15995": 17449023488.0, "16000": 17448130560.0, "16005": 17448130560.0, "16010": 17448130560.0, "16015": 17448130560.0, "16020": 17448130560.0, "16025": 17448130560.0, "16030": 17448130560.0, "16035": 17448130560.0, "16040": 17448130560.0, "16045": 17448130560.0, "16050": 17448130560.0, "16055": 17448130560.0, "16060": 17448130560.0, "16065": 17448130560.0, "16070": 17448130560.0, "16075": 17448130560.0, "16080": 17448130560.0, "16085": 17448130560.0, "16090": 17448130560.0, "16095": 17448130560.0, "16100": 17448130560.0, "16105": 17448130560.0, "16110": 17448966144.0, "16115": 17448130560.0, "16120": 17448931328.0, "16125": 17448130560.0, "16130": 17448130560.0, "16135": 17448130560.0, "16140": 17448660992.0, "16145": 17448130560.0, "16150": 17448130560.0, "16155": 17448130560.0, "16160": 17448130560.0, "16165": 17448130560.0, "16170": 17448130560.0, "16175": 17448130560.0, "16180": 17448130560.0, "16185": 17448130560.0, "16190": 17448130560.0, "16195": 17448947712.0, "16200": 17448790016.0, "16205": 17448130560.0, "16210": 17448130560.0, "16215": 17448286208.0, "16220": 17448130560.0, "16225": 17448130560.0, "16230": 17448130560.0, "16235": 17448130560.0, "16240": 17448130560.0, "16245": 17448130560.0, "16250": 17448130560.0, "16255": 17448130560.0, "16260": 17448130560.0, "16265": 17448130560.0, "16270": 17448130560.0, "16275": 17448130560.0, "16280": 17448130560.0, "16285": 17448130560.0, "16290": 17448130560.0, "16295": 17448130560.0, "16300": 17448130560.0, "16305": 17448130560.0, "16310": 17448130560.0, "16315": 17448130560.0, "16320": 17448130560.0, "16325": 17448130560.0, "16330": 17448130560.0, "16335": 17448130560.0, "16340": 17448130560.0, "16345": 17448130560.0, "16350": 17448878080.0, "16355": 17448130560.0, "16360": 17448130560.0, "16365": 17448130560.0, "16370": 17448130560.0, "16375": 17448130560.0, "16380": 17448364032.0, "16385": 17448130560.0, "16390": 17448130560.0, "16395": 17448130560.0, "16400": 17448130560.0, "16405": 17448130560.0, "16410": 17448130560.0, "16415": 17448130560.0, "16420": 17448130560.0, "16425": 17448130560.0, "16430": 17448130560.0, "16435": 17448130560.0, "16440": 17448130560.0, "16445": 17448130560.0, "16450": 17448130560.0, "16455": 17448548352.0, "16460": 17448130560.0, "16465": 17448130560.0, "16470": 17448130560.0, "16475": 17448130560.0, "16480": 17448499200.0, "16485": 17448130560.0, "16490": 17448130560.0, "16495": 17448130560.0, "16500": 17448130560.0, "16505": 17448130560.0, "16510": 17448130560.0, "16515": 17448130560.0, "16520": 17448130560.0, "16525": 17448329216.0, "16530": 17448130560.0, "16535": 17448130560.0, "16540": 17448130560.0, "16545": 17448704000.0, "16550": 17448466432.0, "16555": 17448130560.0, "16560": 17448130560.0, "16565": 17448130560.0, "16570": 17448130560.0, "16575": 17448130560.0, "16580": 17448130560.0, "16585": 17448130560.0, "16590": 17448130560.0, "16595": 17448130560.0, "16600": 17448130560.0, "16605": 17448130560.0, "16610": 17448982528.0, "16615": 17448982528.0, "16620": 17448982528.0, "16625": 17449758720.0, "16630": 17449203712.0, "16635": 17448982528.0, "16640": 17449273344.0, "16645": 17448982528.0, "16650": 17448982528.0, "16655": 17448982528.0, "16660": 17448982528.0, "16665": 17448982528.0, "16670": 17448982528.0, "16675": 17448982528.0, "16680": 17448982528.0, "16685": 17448982528.0, "16690": 17448982528.0, "16695": 17448982528.0, "16700": 17448982528.0, "16705": 17449228288.0, "16710": 17448982528.0, "16715": 17448982528.0, "16720": 17448982528.0, "16725": 17448982528.0, "16730": 17448982528.0, "16735": 17448982528.0, "16740": 17448982528.0, "16745": 17448982528.0, "16750": 17448982528.0, "16755": 17448982528.0, "16760": 17448982528.0, "16765": 17448982528.0, "16770": 17448982528.0, "16775": 17448982528.0, "16780": 17448982528.0, "16785": 17448982528.0, "16790": 17449056256.0, "16795": 17448982528.0, "16800": 17449244672.0, "16805": 17448982528.0, "16810": 17448982528.0, "16815": 17448982528.0, "16820": 17448982528.0, "16825": 17449728000.0, "16830": 17448982528.0, "16835": 17448982528.0, "16840": 17448982528.0, "16845": 17448982528.0, "16850": 17448982528.0, "16855": 17448982528.0, "16860": 17448982528.0, "16865": 17448982528.0, "16870": 17448982528.0, "16875": 17448982528.0, "16880": 17448982528.0, "16885": 17448982528.0, "16890": 17448982528.0, "16895": 17449449472.0, "16900": 17448982528.0, "16905": 17448982528.0, "16910": 17448982528.0, "16915": 17448982528.0, "16920": 17448982528.0, "16925": 17448982528.0, "16930": 17448982528.0, "16935": 17448982528.0, "16940": 17448982528.0, "16945": 17448982528.0, "16950": 17448982528.0, "16955": 17448982528.0, "16960": 17448982528.0, "16965": 17448982528.0, "16970": 17449318400.0, "16975": 17448982528.0, "16980": 17448982528.0, "16985": 17448982528.0, "16990": 17448982528.0, "16995": 17448982528.0, "17000": 17448982528.0, "17005": 17448982528.0, "17010": 17448982528.0, "17015": 17448982528.0, "17020": 17448982528.0, "17025": 17448982528.0, "17030": 17449162752.0, "17035": 17448982528.0, "17040": 17448982528.0, "17045": 17448982528.0, "17050": 17448982528.0, "17055": 17448982528.0, "17060": 17448982528.0, "17065": 17448982528.0, "17070": 17448982528.0, "17075": 17448982528.0, "17080": 17448982528.0, "17085": 17448982528.0, "17090": 17448982528.0, "17095": 17448982528.0, "17100": 17448982528.0, "17105": 17448982528.0, "17110": 17448982528.0, "17115": 17448982528.0, "17120": 17448982528.0, "17125": 17449211904.0, "17130": 17448982528.0, "17135": 17448982528.0, "17140": 17449826304.0, "17145": 17448982528.0, "17150": 17448982528.0, "17155": 17448982528.0, "17160": 17449103360.0, "17165": 17448982528.0, "17170": 17448982528.0, "17175": 17448982528.0, "17180": 17448982528.0, "17185": 17448982528.0, "17190": 17448130560.0, "17195": 17448130560.0, "17200": 17448130560.0, "17205": 17448130560.0, "17210": 17448130560.0, "17215": 17448130560.0, "17220": 17448130560.0, "17225": 17448130560.0, "17230": 17448130560.0, "17235": 17448130560.0, "17240": 17448130560.0, "17245": 17448130560.0, "17250": 17448130560.0, "17255": 17448130560.0, "17260": 17448130560.0, "17265": 17448130560.0, "17270": 17448130560.0, "17275": 17448130560.0, "17280": 17448867840.0, "17285": 17448130560.0, "17290": 17448130560.0, "17295": 17448130560.0, "17300": 17448130560.0, "17305": 17448130560.0, "17310": 17448130560.0, "17315": 17448130560.0, "17320": 17448130560.0, "17325": 17448130560.0, "17330": 17449177088.0, "17335": 17448130560.0, "17340": 17448130560.0, "17345": 17448130560.0, "17350": 17448130560.0, "17355": 17448130560.0, "17360": 17448130560.0, "17365": 17448130560.0, "17370": 17448130560.0, "17375": 17448130560.0, "17380": 17448130560.0, "17385": 17448130560.0, "17390": 17448130560.0, "17395": 17448130560.0, "17400": 17448130560.0, "17405": 17448130560.0, "17410": 17448130560.0, "17415": 17448130560.0, "17420": 17448130560.0, "17425": 17448130560.0, "17430": 17448130560.0, "17435": 17448130560.0, "17440": 17448130560.0, "17445": 17448736768.0, "17450": 17448130560.0, "17455": 17448130560.0, "17460": 17448130560.0, "17465": 17448130560.0, "17470": 17448130560.0, "17475": 17448130560.0, "17480": 17448130560.0, "17485": 17448130560.0, "17490": 17448130560.0, "17495": 17448130560.0, "17500": 17448130560.0, "17505": 17448450048.0, "17510": 17448130560.0, "17515": 17448130560.0, "17520": 17448130560.0, "17525": 17448130560.0, "17530": 17448130560.0, "17535": 17448130560.0, "17540": 17448654848.0, "17545": 17448470528.0, "17550": 17448130560.0, "17555": 17448992768.0, "17560": 17448130560.0, "17565": 17448130560.0, "17570": 17448863744.0, "17575": 17448130560.0, "17580": 17448130560.0, "17585": 17448130560.0, "17590": 17448130560.0, "17595": 17448130560.0, "17600": 17448130560.0, "17605": 17448130560.0, "17610": 17448130560.0, "17615": 17448396800.0, "17620": 17448130560.0, "17625": 17448130560.0, "17630": 17448130560.0, "17635": 17448130560.0, "17640": 17448130560.0, "17645": 17448130560.0, "17650": 17448130560.0, "17655": 17448130560.0, "17660": 17448509440.0, "17665": 17448130560.0, "17670": 17448130560.0, "17675": 17448130560.0, "17680": 17448130560.0, "17685": 17448130560.0, "17690": 17448130560.0, "17695": 17448130560.0, "17700": 17448130560.0, "17705": 17448130560.0, "17710": 17448130560.0, "17715": 17448409088.0, "17720": 17448130560.0, "17725": 17448130560.0, "17730": 17448130560.0, "17735": 17448474624.0, "17740": 17448384512.0, "17745": 17448384512.0, "17750": 17448384512.0, "17755": 17448384512.0, "17760": 17448384512.0, "17765": 17448384512.0, "17770": 17448384512.0, "17775": 17448384512.0, "17780": 17448384512.0, "17785": 17448384512.0, "17790": 17448443904.0, "17795": 17448384512.0, "17800": 17448384512.0, "17805": 17448384512.0, "17810": 17448384512.0, "17815": 17448384512.0, "17820": 17448384512.0, "17825": 17448384512.0, "17830": 17448384512.0, "17835": 17448384512.0, "17840": 17448384512.0, "17845": 17448384512.0, "17850": 17448384512.0, "17855": 17448884224.0, "17860": 17448384512.0, "17865": 17448384512.0, "17870": 17448384512.0, "17875": 17448384512.0, "17880": 17448765440.0, "17885": 17448656896.0, "17890": 17448384512.0, "17895": 17448906752.0, "17900": 17448384512.0, "17905": 17448384512.0, "17910": 17448384512.0, "17915": 17448384512.0, "17920": 17448384512.0, "17925": 17448384512.0, "17930": 17448384512.0, "17935": 17448384512.0, "17940": 17448384512.0, "17945": 17448384512.0, "17950": 17448384512.0, "17955": 17448384512.0, "17960": 17448384512.0, "17965": 17448384512.0, "17970": 17448384512.0, "17975": 17448507392.0, "17980": 17448384512.0, "17985": 17448384512.0, "17990": 17448384512.0, "17995": 17448384512.0, "18000": 17448384512.0, "18005": 17448384512.0, "18010": 17449175040.0, "18015": 17448384512.0, "18020": 17449195520.0, "18025": 17449201664.0, "18030": 17448384512.0, "18035": 17448384512.0, "18040": 17448384512.0, "18045": 17448384512.0, "18050": 17448384512.0, "18055": 17448384512.0, "18060": 17448384512.0, "18065": 17448384512.0, "18070": 17448384512.0, "18075": 17448384512.0, "18080": 17448384512.0, "18085": 17448456192.0, "18090": 17448384512.0, "18095": 17448646656.0, "18100": 17448384512.0, "18105": 17448384512.0, "18110": 17448384512.0, "18115": 17448384512.0, "18120": 17448384512.0, "18125": 17448384512.0, "18130": 17448384512.0, "18135": 17448384512.0, "18140": 17448384512.0, "18145": 17448671232.0, "18150": 17448384512.0, "18155": 17448384512.0, "18160": 17448384512.0, "18165": 17448384512.0, "18170": 17448384512.0, "18175": 17448384512.0, "18180": 17448538112.0, "18185": 17448384512.0, "18190": 17448384512.0, "18195": 17448384512.0, "18200": 17448384512.0, "18205": 17448384512.0, "18210": 17448384512.0, "18215": 17448728576.0, "18220": 17448384512.0, "18225": 17448384512.0, "18230": 17448384512.0, "18235": 17448384512.0, "18240": 17448384512.0, "18245": 17448384512.0, "18250": 17448384512.0, "18255": 17448384512.0, "18260": 17448384512.0, "18265": 17448384512.0, "18270": 17448384512.0, "18275": 17448384512.0, "18280": 17449025536.0, "18285": 17448384512.0, "18290": 17448986624.0, "18295": 17448986624.0, "18300": 17448986624.0, "18305": 17449560064.0, "18310": 17448986624.0, "18315": 17448986624.0, "18320": 17448986624.0, "18325": 17448986624.0, "18330": 17448986624.0, "18335": 17448986624.0, "18340": 17448986624.0, "18345": 17448986624.0, "18350": 17448986624.0, "18355": 17448986624.0, "18360": 17448986624.0, "18365": 17449707520.0, "18370": 17448986624.0, "18375": 17448986624.0, "18380": 17448986624.0, "18385": 17448986624.0, "18390": 17448986624.0, "18395": 17448986624.0, "18400": 17448986624.0, "18405": 17448986624.0, "18410": 17448986624.0, "18415": 17448986624.0, "18420": 17448986624.0, "18425": 17448986624.0, "18430": 17449691136.0, "18435": 17448986624.0, "18440": 17448986624.0, "18445": 17448986624.0, "18450": 17448986624.0, "18455": 17448986624.0, "18460": 17448986624.0, "18465": 17449760768.0, "18470": 17449715712.0, "18475": 17448986624.0, "18480": 17448986624.0, "18485": 17448986624.0, "18490": 17449809920.0, "18495": 17448986624.0, "18500": 17448986624.0, "18505": 17448986624.0, "18510": 17448986624.0, "18515": 17448986624.0, "18520": 17448986624.0, "18525": 17448986624.0, "18530": 17448986624.0, "18535": 17448986624.0, "18540": 17448986624.0, "18545": 17448986624.0, "18550": 17448986624.0, "18555": 17448986624.0, "18560": 17448986624.0, "18565": 17448986624.0, "18570": 17448986624.0, "18575": 17448986624.0, "18580": 17448986624.0, "18585": 17448986624.0, "18590": 17448986624.0, "18595": 17449781248.0, "18600": 17448986624.0, "18605": 17448986624.0, "18610": 17448986624.0, "18615": 17448986624.0, "18620": 17448986624.0, "18625": 17448986624.0, "18630": 17448986624.0, "18635": 17449060352.0, "18640": 17448986624.0, "18645": 17448986624.0, "18650": 17448986624.0, "18655": 17448986624.0, "18660": 17448986624.0, "18665": 17448986624.0, "18670": 17448986624.0, "18675": 17448986624.0, "18680": 17449068544.0, "18685": 17448986624.0, "18690": 17448986624.0, "18695": 17448986624.0, "18700": 17448986624.0, "18705": 17449248768.0, "18710": 17448986624.0, "18715": 17448986624.0, "18720": 17448986624.0, "18725": 17448986624.0, "18730": 17448986624.0, "18735": 17448986624.0, "18740": 17448986624.0, "18745": 17448986624.0, "18750": 17448986624.0, "18755": 17448986624.0, "18760": 17448986624.0, "18765": 17448986624.0, "18770": 17448986624.0, "18775": 17449445376.0, "18780": 17448986624.0, "18785": 17449469952.0, "18790": 17449003008.0, "18795": 17448986624.0, "18800": 17448986624.0, "18805": 17448986624.0, "18810": 17449183232.0, "18815": 17448986624.0, "18820": 17448986624.0, "18825": 17448986624.0, "18830": 17448986624.0, "18835": 17448986624.0, "18840": 17448986624.0, "18845": 17448130560.0, "18850": 17448130560.0, "18855": 17448130560.0, "18860": 17448130560.0, "18865": 17448130560.0, "18870": 17448130560.0, "18875": 17448130560.0, "18880": 17448130560.0, "18885": 17448130560.0, "18890": 17448130560.0, "18895": 17448744960.0, "18900": 17448130560.0, "18905": 17448130560.0, "18910": 17448130560.0, "18915": 17448130560.0, "18920": 17448130560.0, "18925": 17448574976.0, "18930": 17448130560.0, "18935": 17448130560.0, "18940": 17448148992.0, "18945": 17448130560.0, "18950": 17448130560.0, "18955": 17448130560.0, "18960": 17448130560.0, "18965": 17448130560.0, "18970": 17448998912.0, "18975": 17448130560.0, "18980": 17448130560.0, "18985": 17448130560.0, "18990": 17448130560.0, "18995": 17448130560.0, "19000": 17448130560.0, "19005": 17448130560.0, "19010": 17448130560.0, "19015": 17448130560.0, "19020": 17448130560.0, "19025": 17448130560.0, "19030": 17448130560.0, "19035": 17448130560.0, "19040": 17448130560.0, "19045": 17448130560.0, "19050": 17448130560.0, "19055": 17448130560.0, "19060": 17448130560.0, "19065": 17448130560.0, "19070": 17448216576.0, "19075": 17448130560.0, "19080": 17448130560.0, "19085": 17448761344.0, "19090": 17448763392.0, "19095": 17448130560.0, "19100": 17448130560.0, "19105": 17448130560.0, "19110": 17448130560.0, "19115": 17448130560.0, "19120": 17448130560.0, "19125": 17448130560.0, "19130": 17448466432.0, "19135": 17448130560.0, "19140": 17448130560.0, "19145": 17448130560.0, "19150": 17448130560.0, "19155": 17448130560.0, "19160": 17448130560.0, "19165": 17448130560.0, "19170": 17448130560.0, "19175": 17448130560.0, "19180": 17448130560.0, "19185": 17448130560.0, "19190": 17448130560.0, "19195": 17448130560.0, "19200": 17448130560.0, "19205": 17448130560.0, "19210": 17448335360.0, "19215": 17449345024.0, "19220": 17448130560.0, "19225": 17448130560.0, "19230": 17448298496.0, "19235": 17448130560.0, "19240": 17448130560.0, "19245": 17448130560.0, "19250": 17448130560.0, "19255": 17448818688.0, "19260": 17448130560.0, "19265": 17448130560.0, "19270": 17448286208.0, "19275": 17448216576.0, "19280": 17448130560.0, "19285": 17448464384.0, "19290": 17448130560.0, "19295": 17448130560.0, "19300": 17448130560.0, "19305": 17448130560.0, "19310": 17448687616.0, "19315": 17448130560.0, "19320": 17448130560.0, "19325": 17448130560.0, "19330": 17448130560.0, "19335": 17448130560.0, "19340": 17448130560.0, "19345": 17448130560.0, "19350": 17448130560.0, "19355": 17448130560.0, "19360": 17448130560.0, "19365": 17448130560.0, "19370": 17448130560.0, "19375": 17448130560.0, "19380": 17448130560.0, "19385": 17448130560.0, "19390": 17448570880.0, "19395": 17448130560.0, "19400": 17448130560.0, "19405": 17448130560.0, "19410": 17448130560.0, "19415": 17448402944.0, "19420": 17448130560.0, "19425": 17448130560.0, "19430": 17448130560.0, "19435": 17448130560.0, "19440": 17448998912.0, "19445": 17448130560.0, "19450": 17448130560.0, "19455": 17448130560.0, "19460": 17448130560.0, "19465": 17448130560.0, "19470": 17448130560.0, "19475": 17448720384.0, "19480": 17448130560.0, "19485": 17448130560.0, "19490": 17448130560.0, "19495": 17448130560.0, "19500": 17448130560.0, "19505": 17448130560.0, "19510": 17448130560.0, "19515": 17448130560.0, "19520": 17448130560.0, "19525": 17448130560.0, "19530": 17448130560.0, "19535": 17448130560.0, "19540": 17448130560.0, "19545": 17448130560.0, "19550": 17448130560.0, "19555": 17448212480.0, "19560": 17448130560.0, "19565": 17448130560.0, "19570": 17448130560.0, "19575": 17448130560.0, "19580": 17448130560.0, "19585": 17448130560.0, "19590": 17448130560.0, "19595": 17448130560.0, "19600": 17449152512.0, "19605": 17448130560.0, "19610": 17448130560.0, "19615": 17448130560.0, "19620": 17448130560.0, "19625": 17448130560.0, "19630": 17448130560.0, "19635": 17448130560.0, "19640": 17448130560.0, "19645": 17448130560.0, "19650": 17448130560.0, "19655": 17448130560.0, "19660": 17448130560.0, "19665": 17449746432.0, "19670": 17448130560.0, "19675": 17448318976.0, "19680": 17448130560.0, "19685": 17449177088.0, "19690": 17449041920.0, "19695": 17448130560.0, "19700": 17448130560.0, "19705": 17448376320.0, "19710": 17448130560.0, "19715": 17448130560.0, "19720": 17448130560.0, "19725": 17448941568.0, "19730": 17448130560.0, "19735": 17448130560.0, "19740": 17448130560.0, "19745": 17448130560.0, "19750": 17448130560.0, "19755": 17448130560.0, "19760": 17448130560.0, "19765": 17448130560.0, "19770": 17448130560.0, "19775": 17448130560.0, "19780": 17448130560.0, "19785": 17448130560.0, "19790": 17448130560.0, "19795": 17448228864.0, "19800": 17448130560.0, "19805": 17448130560.0, "19810": 17448130560.0, "19815": 17448130560.0, "19820": 17448130560.0, "19825": 17448130560.0, "19830": 17448130560.0, "19835": 17448130560.0, "19840": 17448130560.0, "19845": 17448755200.0, "19850": 17448130560.0, "19855": 17449148416.0, "19860": 17448130560.0, "19865": 17448130560.0, "19870": 17448359936.0, "19875": 17448130560.0, "19880": 17448130560.0, "19885": 17448599552.0, "19890": 17448130560.0, "19895": 17448130560.0, "19900": 17448663040.0, "19905": 17448130560.0, "19910": 17448345600.0, "19915": 17448335360.0, "19920": 17448130560.0, "19925": 17448738816.0, "19930": 17448130560.0, "19935": 17448130560.0, "19940": 17448130560.0, "19945": 17448130560.0, "19950": 17448130560.0, "19955": 17448130560.0, "19960": 17448130560.0, "19965": 17448130560.0, "19970": 17448130560.0, "19975": 17448130560.0, "19980": 17448130560.0, "19985": 17448130560.0, "19990": 17448130560.0, "19995": 17448130560.0, "20000": 17448130560.0, "20005": 17448294400.0, "20010": 17448130560.0, "20015": 17448130560.0, "20020": 17448130560.0, "20025": 17448130560.0, "20030": 17448130560.0, "20035": 17448130560.0, "20040": 17448130560.0, "20045": 17448933376.0, "20050": 17448130560.0, "20055": 17448130560.0, "20060": 17448130560.0, "20065": 17448130560.0, "20070": 17448130560.0, "20075": 17448693760.0, "20080": 17448908800.0, "20085": 17448130560.0, "20090": 17448130560.0, "20095": 17448130560.0, "20100": 17448130560.0, "20105": 17448130560.0, "20110": 17448130560.0, "20115": 17448355840.0, "20120": 17448130560.0, "20125": 17448130560.0, "20130": 17448130560.0, "20135": 17448171520.0, "20140": 17448343552.0, "20145": 17448130560.0, "20150": 17448130560.0, "20155": 17448970240.0, "20160": 17448736768.0, "20165": 17448130560.0, "20170": 17448617984.0, "20175": 17448130560.0, "20180": 17448130560.0, "20185": 17448130560.0, "20190": 17448130560.0, "20195": 17448130560.0, "20200": 17448130560.0, "20205": 17448130560.0, "20210": 17448130560.0, "20215": 17448130560.0, "20220": 17448130560.0, "20225": 17448130560.0, "20230": 17448130560.0, "20235": 17448130560.0, "20240": 17448130560.0, "20245": 17448433664.0, "20250": 17448130560.0, "20255": 17449162752.0, "20260": 17448130560.0, "20265": 17449017344.0, "20270": 17448130560.0, "20275": 17448130560.0, "20280": 17448130560.0, "20285": 17448130560.0, "20290": 17448130560.0, "20295": 17448130560.0, "20300": 17448728576.0, "20305": 17448130560.0, "20310": 17448130560.0, "20315": 17448130560.0, "20320": 17448130560.0, "20325": 17448130560.0, "20330": 17448130560.0, "20335": 17449048064.0, "20340": 17448130560.0, "20345": 17448130560.0, "20350": 17448130560.0, "20355": 17448130560.0, "20360": 17448130560.0, "20365": 17448130560.0, "20370": 17448130560.0, "20375": 17448331264.0, "20380": 17448130560.0, "20385": 17448130560.0, "20390": 17448130560.0, "20395": 17448130560.0, "20400": 17448130560.0, "20405": 17448130560.0, "20410": 17448130560.0, "20415": 17448130560.0, "20420": 17448130560.0, "20425": 17449076736.0, "20430": 17448130560.0, "20435": 17448130560.0, "20440": 17448130560.0, "20445": 17448130560.0, "20450": 17448130560.0, "20455": 17448130560.0, "20460": 17448130560.0, "20465": 17448130560.0, "20470": 17448130560.0, "20475": 17448818688.0, "20480": 17448130560.0, "20485": 17448130560.0, "20490": 17448130560.0, "20495": 17448130560.0, "20500": 17448130560.0, "20505": 17448130560.0, "20510": 17448130560.0, "20515": 17448130560.0, "20520": 17448617984.0, "20525": 17448818688.0, "20530": 17448130560.0, "20535": 17448130560.0, "20540": 17448130560.0, "20545": 17448130560.0, "20550": 17448990720.0, "20555": 17448491008.0, "20560": 17448130560.0, "20565": 17448130560.0, "20570": 17448130560.0, "20575": 17448130560.0, "20580": 17448130560.0, "20585": 17448130560.0, "20590": 17448130560.0, "20595": 17448130560.0, "20600": 17448130560.0, "20605": 17449005056.0, "20610": 17448130560.0, "20615": 17448130560.0, "20620": 17448130560.0, "20625": 17448130560.0, "20630": 17448130560.0, "20635": 17448732672.0, "20640": 17448130560.0, "20645": 17448130560.0, "20650": 17448130560.0, "20655": 17448130560.0, "20660": 17448130560.0, "20665": 17448130560.0, "20670": 17448130560.0, "20675": 17448130560.0, "20680": 17448267776.0, "20685": 17448130560.0, "20690": 17448130560.0, "20695": 17448130560.0, "20700": 17448130560.0, "20705": 17448130560.0, "20710": 17448130560.0, "20715": 17448130560.0, "20720": 17448267776.0, "20725": 17448130560.0, "20730": 17448589312.0, "20735": 17448130560.0, "20740": 17448130560.0, "20745": 17448130560.0, "20750": 17448130560.0, "20755": 17448130560.0, "20760": 17448130560.0, "20765": 17448130560.0, "20770": 17448130560.0, "20775": 17448130560.0, "20780": 17448302592.0, "20785": 17448130560.0, "20790": 17448130560.0, "20795": 17448130560.0, "20800": 17448130560.0, "20805": 17448130560.0, "20810": 17448130560.0, "20815": 17448130560.0, "20820": 17448130560.0, "20825": 17448130560.0, "20830": 17448435712.0, "20835": 17449048064.0, "20840": 17448130560.0, "20845": 17448130560.0, "20850": 17448130560.0, "20855": 17448130560.0, "20860": 17448130560.0, "20865": 17448130560.0, "20870": 17448130560.0, "20875": 17448130560.0, "20880": 17448790016.0, "20885": 17448265728.0, "20890": 17448130560.0, "20895": 17448130560.0, "20900": 17448130560.0, "20905": 17448130560.0, "20910": 17448632320.0, "20915": 17448130560.0, "20920": 17449138176.0, "20925": 17448130560.0, "20930": 17448130560.0, "20935": 17448130560.0, "20940": 17448130560.0, "20945": 17448130560.0, "20950": 17448130560.0, "20955": 17448130560.0, "20960": 17448130560.0, "20965": 17448130560.0, "20970": 17448441856.0, "20975": 17448148992.0, "20980": 17448130560.0, "20985": 17448130560.0, "20990": 17448171520.0, "20995": 17448130560.0, "21000": 17448130560.0, "21005": 17448130560.0, "21010": 17448130560.0, "21015": 17448130560.0, "21020": 17448130560.0, "21025": 17448130560.0, "21030": 17448859648.0, "21035": 17448130560.0, "21040": 17448130560.0, "21045": 17448130560.0, "21050": 17448851456.0, "21055": 17448130560.0, "21060": 17448130560.0, "21065": 17448130560.0, "21070": 17448130560.0, "21075": 17448130560.0, "21080": 17448130560.0, "21085": 17448130560.0, "21090": 17448515584.0, "21095": 17448677376.0, "21100": 17448130560.0, "21105": 17448130560.0, "21110": 17448130560.0, "21115": 17448130560.0, "21120": 17448130560.0, "21125": 17448179712.0, "21130": 17448130560.0, "21135": 17448130560.0, "21140": 17448130560.0, "21145": 17448130560.0, "21150": 17448130560.0, "21155": 17448572928.0, "21160": 17448130560.0, "21165": 17448130560.0, "21170": 17448278016.0, "21175": 17448476672.0, "21180": 17448130560.0, "21185": 17448130560.0, "21190": 17448130560.0, "21195": 17448130560.0, "21200": 17448925184.0, "21205": 17448130560.0, "21210": 17448130560.0, "21215": 17448603648.0, "21220": 17448130560.0, "21225": 17448495104.0, "21230": 17448130560.0, "21235": 17448130560.0, "21240": 17448130560.0, "21245": 17448130560.0, "21250": 17448351744.0, "21255": 17448130560.0, "21260": 17448130560.0, "21265": 17448982528.0, "21270": 17448833024.0, "21275": 17448130560.0, "21280": 17448130560.0, "21285": 17448130560.0, "21290": 17448130560.0, "21295": 17448130560.0, "21300": 17448130560.0, "21305": 17448990720.0, "21310": 17448130560.0, "21315": 17449269248.0, "21320": 17448130560.0, "21325": 17448130560.0, "21330": 17448130560.0, "21335": 17448130560.0, "21340": 17448130560.0, "21345": 17448130560.0, "21350": 17448130560.0, "21355": 17448130560.0, "21360": 17448130560.0, "21365": 17449162752.0, "21370": 17449062400.0, "21375": 17448130560.0, "21380": 17448816640.0, "21385": 17448130560.0, "21390": 17448130560.0, "21395": 17448130560.0, "21400": 17448130560.0, "21405": 17448130560.0, "21410": 17448130560.0, "21415": 17448130560.0, "21420": 17448144896.0, "21425": 17448130560.0, "21430": 17448130560.0, "21435": 17448130560.0, "21440": 17448130560.0, "21445": 17448130560.0, "21450": 17448130560.0, "21455": 17448130560.0, "21460": 17448486912.0, "21465": 17448130560.0, "21470": 17448851456.0, "21475": 17448130560.0, "21480": 17448458240.0, "21485": 17448130560.0, "21490": 17448130560.0, "21495": 17448130560.0, "21500": 17448130560.0, "21505": 17448130560.0, "21510": 17448130560.0, "21515": 17448130560.0, "21520": 17448130560.0, "21525": 17448130560.0, "21530": 17448130560.0, "21535": 17448130560.0, "21540": 17448130560.0, "21545": 17448130560.0, "21550": 17448130560.0, "21555": 17448130560.0, "21560": 17448130560.0, "21565": 17448130560.0, "21570": 17448130560.0, "21575": 17448130560.0, "21580": 17448130560.0, "21585": 17448130560.0, "21590": 17448130560.0, "21595": 17448130560.0, "21600": 17448130560.0, "21605": 17448130560.0, "21610": 17448130560.0, "21615": 17448130560.0, "21620": 17448130560.0, "21625": 17448130560.0, "21630": 17448130560.0, "21635": 17448130560.0, "21640": 17448130560.0, "21645": 17448130560.0, "21650": 17448130560.0, "21655": 17448130560.0, "21660": 17448130560.0, "21665": 17448130560.0, "21670": 17448130560.0, "21675": 17448130560.0, "21680": 17448130560.0, "21685": 17448130560.0, "21690": 17448130560.0, "21695": 17448130560.0, "21700": 17448130560.0, "21705": 17448130560.0, "21710": 17448130560.0, "21715": 17448130560.0, "21720": 17448130560.0, "21725": 17448130560.0, "21730": 17448130560.0, "21735": 17448130560.0, "21740": 17448130560.0, "21745": 17448130560.0, "21750": 17448130560.0, "21755": 17448130560.0, "21760": 17448130560.0, "21765": 17448130560.0, "21770": 17448216576.0, "21775": 17448130560.0, "21780": 17448130560.0, "21785": 17448130560.0, "21790": 17448130560.0, "21795": 17448736768.0, "21800": 17448130560.0, "21805": 17448130560.0, "21810": 17448130560.0, "21815": 17448130560.0, "21820": 17448781824.0, "21825": 17448804352.0, "21830": 17448130560.0, "21835": 17448130560.0, "21840": 17448130560.0, "21845": 17448130560.0, "21850": 17448130560.0, "21855": 17448130560.0, "21860": 17448130560.0, "21865": 17448773632.0, "21870": 17448130560.0, "21875": 17448237056.0, "21880": 17448130560.0, "21885": 17448605696.0, "21890": 17448130560.0, "21895": 17448130560.0, "21900": 17448130560.0, "21905": 17448984576.0, "21910": 17448130560.0, "21915": 17448130560.0, "21920": 17448130560.0, "21925": 17448130560.0, "21930": 17448130560.0, "21935": 17448130560.0, "21940": 17448130560.0, "21945": 17448130560.0, "21950": 17448130560.0, "21955": 17448130560.0, "21960": 17448130560.0, "21965": 17448130560.0, "21970": 17448491008.0, "21975": 17448130560.0, "21980": 17448130560.0, "21985": 17448130560.0, "21990": 17448130560.0, "21995": 17449072640.0, "22000": 17448130560.0, "22005": 17448130560.0, "22010": 17448130560.0, "22015": 17448130560.0, "22020": 17448130560.0, "22025": 17449074688.0, "22030": 17448130560.0, "22035": 17448130560.0, "22040": 17448130560.0, "22045": 17448130560.0, "22050": 17448130560.0, "22055": 17448130560.0, "22060": 17448130560.0, "22065": 17448130560.0, "22070": 17448130560.0, "22075": 17448835072.0, "22080": 17448130560.0, "22085": 17448204288.0, "22090": 17448130560.0, "22095": 17448130560.0, "22100": 17448130560.0, "22105": 17448130560.0, "22110": 17448130560.0, "22115": 17448130560.0, "22120": 17448130560.0, "22125": 17448130560.0, "22130": 17448130560.0, "22135": 17448130560.0, "22140": 17448130560.0, "22145": 17448130560.0, "22150": 17448130560.0, "22155": 17448130560.0, "22160": 17448130560.0, "22165": 17448130560.0, "22170": 17448130560.0, "22175": 17448130560.0, "22180": 17448130560.0, "22185": 17448130560.0, "22190": 17448130560.0, "22195": 17448220672.0, "22200": 17448130560.0, "22205": 17448130560.0, "22210": 17448130560.0, "22215": 17448130560.0, "22220": 17448130560.0, "22225": 17448130560.0, "22230": 17448542208.0, "22235": 17449134080.0, "22240": 17448130560.0, "22245": 17448130560.0, "22250": 17448130560.0, "22255": 17448130560.0, "22260": 17448130560.0, "22265": 17448130560.0, "22270": 17448130560.0, "22275": 17448130560.0, "22280": 17448130560.0, "22285": 17448130560.0, "22290": 17448130560.0, "22295": 17448130560.0, "22300": 17448130560.0, "22305": 17448130560.0, "22310": 17449089024.0, "22315": 17448130560.0, "22320": 17448130560.0, "22325": 17448130560.0, "22330": 17448130560.0, "22335": 17448130560.0, "22340": 17448130560.0, "22345": 17448130560.0, "22350": 17448130560.0, "22355": 17448130560.0, "22360": 17448130560.0, "22365": 17448130560.0, "22370": 17448130560.0, "22375": 17448130560.0, "22380": 17448130560.0, "22385": 17448130560.0, "22390": 17448130560.0, "22395": 17448130560.0, "22400": 17448130560.0, "22405": 17448130560.0, "22410": 17448130560.0, "22415": 17448130560.0, "22420": 17448933376.0, "22425": 17448130560.0, "22430": 17448130560.0, "22435": 17448130560.0, "22440": 17448130560.0, "22445": 17448130560.0, "22450": 17448130560.0, "22455": 17448130560.0, "22460": 17448130560.0, "22465": 17448130560.0, "22470": 17448130560.0, "22475": 17448130560.0, "22480": 17448130560.0, "22485": 17448130560.0, "22490": 17448130560.0, "22495": 17448130560.0, "22500": 17448130560.0, "22505": 17449015296.0, "22510": 17448130560.0, "22515": 17448130560.0, "22520": 17448130560.0, "22525": 17448130560.0, "22530": 17448130560.0, "22535": 17448130560.0, "22540": 17448130560.0, "22545": 17448130560.0, "22550": 17448130560.0, "22555": 17448130560.0, "22560": 17448130560.0, "22565": 17448130560.0, "22570": 17448130560.0, "22575": 17448130560.0, "22580": 17448130560.0, "22585": 17448130560.0, "22590": 17448130560.0, "22595": 17448130560.0, "22600": 17448130560.0, "22605": 17448130560.0, "22610": 17448130560.0, "22615": 17448130560.0, "22620": 17448130560.0, "22625": 17448777728.0, "22630": 17448130560.0, "22635": 17448130560.0, "22640": 17448130560.0, "22645": 17448761344.0, "22650": 17448130560.0, "22655": 17448130560.0, "22660": 17448941568.0, "22665": 17448833024.0, "22670": 17448130560.0, "22675": 17449236480.0, "22680": 17449039872.0, "22685": 17448130560.0, "22690": 17448130560.0, "22695": 17448953856.0, "22700": 17448130560.0, "22705": 17448488960.0, "22710": 17448130560.0, "22715": 17448130560.0, "22720": 17449093120.0, "22725": 17448163328.0, "22730": 17448130560.0, "22735": 17448130560.0, "22740": 17448130560.0, "22745": 17448753152.0, "22750": 17448130560.0, "22755": 17448130560.0, "22760": 17448130560.0, "22765": 17448130560.0, "22770": 17448130560.0, "22775": 17448130560.0, "22780": 17448130560.0, "22785": 17448130560.0, "22790": 17448130560.0, "22795": 17448130560.0, "22800": 17448130560.0, "22805": 17448130560.0, "22810": 17448130560.0, "22815": 17448130560.0, "22820": 17448130560.0, "22825": 17448130560.0, "22830": 17448130560.0, "22835": 17448130560.0, "22840": 17448517632.0, "22845": 17448394752.0, "22850": 17448130560.0, "22855": 17448130560.0, "22860": 17448130560.0, "22865": 17448720384.0, "22870": 17448130560.0, "22875": 17448998912.0, "22880": 17448130560.0, "22885": 17448130560.0, "22890": 17448130560.0, "22895": 17448130560.0, "22900": 17448130560.0, "22905": 17448499200.0, "22910": 17448130560.0, "22915": 17448130560.0, "22920": 17448130560.0, "22925": 17448130560.0, "22930": 17448130560.0, "22935": 17448130560.0, "22940": 17448130560.0, "22945": 17448130560.0, "22950": 17448130560.0, "22955": 17448130560.0, "22960": 17448130560.0, "22965": 17448130560.0, "22970": 17448130560.0, "22975": 17448130560.0, "22980": 17448376320.0, "22985": 17448130560.0, "22990": 17448187904.0, "22995": 17448130560.0, "23000": 17448204288.0, "23005": 17448130560.0, "23010": 17448130560.0, "23015": 17448130560.0, "23020": 17448130560.0, "23025": 17448130560.0, "23030": 17448130560.0, "23035": 17448245248.0, "23040": 17448130560.0, "23045": 17448130560.0, "23050": 17448130560.0, "23055": 17448130560.0, "23060": 17448130560.0, "23065": 17448130560.0, "23070": 17448130560.0, "23075": 17448130560.0, "23080": 17448130560.0, "23085": 17448130560.0, "23090": 17448130560.0, "23095": 17448130560.0, "23100": 17448130560.0, "23105": 17448130560.0, "23110": 17448130560.0, "23115": 17448130560.0, "23120": 17448738816.0, "23125": 17448130560.0, "23130": 17448130560.0, "23135": 17448130560.0, "23140": 17448130560.0, "23145": 17448130560.0, "23150": 17448523776.0, "23155": 17448130560.0, "23160": 17448130560.0, "23165": 17448130560.0, "23170": 17448130560.0, "23175": 17448130560.0, "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "iteration-time": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 162.33488, "5": 27.63456, "10": 28.32344, "15": 31.97709, "20": 28.81754, "25": 29.27906, "30": 27.12259, "35": 26.974, "40": 25.73883, "45": 25.50964, "50": 26.38888, "55": 26.07553, "60": 24.80942, "65": 25.43277, "70": 25.65671, "75": 25.2229, "80": 24.71458, "85": 25.20529, "90": 25.22014, "95": 24.2876, "100": 24.27385, "105": 24.6462, "110": 25.39933, "115": 24.39181, "120": 24.02755, "125": 24.41257, "130": 25.10299, "135": 23.51835, "140": 23.51698, "145": 24.03484, "150": 23.89606, "155": 23.78217, "160": 23.73007, "165": 24.54976, "170": 23.63383, "175": 23.43749, "180": 23.42621, "185": 24.69216, "190": 23.83028, "195": 24.02066, "200": 23.30734, "205": 23.0702, "210": 23.19144, "215": 23.6325, "220": 24.54125, "225": 22.70217, "230": 23.47107, "235": 24.20423, "240": 23.36216, "245": 23.29042, "250": 22.73265, "255": 23.04983, "260": 23.78308, "265": 22.94376, "270": 22.6628, "275": 23.67569, "280": 23.45011, "285": 22.85767, "290": 23.28408, "295": 23.36654, "300": 22.82497, "305": 23.38641, "310": 22.51142, "315": 23.1091, "320": 23.15572, "325": 23.29043, "330": 22.63745, "335": 23.95273, "340": 22.72477, "345": 22.53634, "350": 23.67615, "355": 23.13276, "360": 22.57381, "365": 23.01695, "370": 23.12064, "375": 23.80065, "380": 23.19966, "385": 22.36432, "390": 22.63595, "395": 24.04463, "400": 22.66628, "405": 22.43683, "410": 23.02159, "415": 22.93113, "420": 22.34205, "425": 23.11462, "430": 22.41795, "435": 23.13698, "440": 23.2802, "445": 22.44474, "450": 22.77556, "455": 22.96077, "460": 22.64815, "465": 22.23746, "470": 23.14072, "475": 22.32019, "480": 22.67062, "485": 23.24182, "490": 22.88166, "495": 22.24888, "500": 23.11732, "505": 22.48098, "510": 23.2539, "515": 22.82385, "520": 22.24465, "525": 22.41246, "530": 24.63919, "535": 22.56963, "540": 22.67894, "545": 22.59, "550": 23.79467, "555": 22.53118, "560": 22.49428, "565": 22.48038, "570": 22.98204, "575": 22.37064, "580": 22.35894, "585": 22.323, "590": 22.38922, "595": 22.65984, "600": 22.72899, "605": 22.46005, "610": 22.26637, "615": 22.32573, "620": 22.34588, "625": 22.40991, "630": 22.34639, "635": 22.19154, "640": 22.74851, "645": 22.36226, "650": 22.24318, "655": 22.33952, "660": 22.18007, "665": 22.37838, "670": 22.2674, "675": 22.29029, "680": 22.51652, "685": 22.71382, "690": 22.44028, "695": 22.28969, "700": 23.15655, "705": 22.26592, "710": 22.33902, "715": 22.28232, "720": 23.43602, "725": 22.20604, "730": 22.63268, "735": 22.10392, "740": 23.84919, "745": 22.20986, "750": 22.19889, "755": 22.28392, "760": 22.94526, "765": 22.18916, "770": 22.15143, "775": 22.72231, "780": 22.44263, "785": 22.23465, "790": 22.23648, "795": 22.36058, "800": 22.48532, "805": 22.25367, "810": 22.285, "815": 22.24517, "820": 22.72961, "825": 22.11721, "830": 22.46054, "835": 22.29391, "840": 22.20994, "845": 22.21984, "850": 22.10665, "855": 23.02529, "860": 22.19173, "865": 22.60161, "870": 22.33966, "875": 23.65689, "880": 22.11257, "885": 22.67924, "890": 22.2571, "895": 23.328, "900": 22.17895, "905": 22.33173, "910": 22.74791, "915": 22.49844, "920": 22.14545, "925": 22.35647, "930": 22.09751, "935": 22.37875, "940": 22.65919, "945": 22.35912, "950": 22.46622, "955": 22.99397, "960": 22.48404, "965": 22.27889, "970": 22.36857, "975": 22.2725, "980": 22.13928, "985": 22.34166, "990": 22.13013, "995": 22.13094, "1000": 22.51724, "1005": 22.14379, "1010": 22.99352, "1015": 22.09964, "1020": 22.15952, "1025": 22.4116, "1030": 23.97159, "1035": 22.30551, "1040": 22.3264, "1045": 22.65636, "1050": 23.32904, "1055": 22.37957, "1060": 22.27145, "1065": 22.12749, "1070": 22.6648, "1075": 23.12567, "1080": 22.73015, "1085": 23.82922, "1090": 22.61354, "1095": 22.51046, "1100": 22.818, "1105": 22.91434, "1110": 24.22485, "1115": 22.46807, "1120": 22.34262, "1125": 22.39447, "1130": 22.82205, "1135": 22.37659, "1140": 22.07755, "1145": 22.16987, "1150": 22.15043, "1155": 22.14706, "1160": 22.0581, "1165": 22.02658, "1170": 22.12057, "1175": 22.03482, "1180": 22.43241, "1185": 22.67298, "1190": 22.08127, "1195": 22.05525, "1200": 22.12501, "1205": 22.43366, "1210": 22.71879, "1215": 22.46294, "1220": 23.22723, "1225": 22.84288, "1230": 22.28835, "1235": 22.18096, "1240": 23.73816, "1245": 22.16224, "1250": 22.19501, "1255": 22.10131, "1260": 23.95247, "1265": 22.21467, "1270": 22.77909, "1275": 22.16369, "1280": 23.38634, "1285": 22.28984, "1290": 22.28826, "1295": 22.48742, "1300": 22.3813, "1305": 22.1684, "1310": 22.04331, "1315": 22.782, "1320": 22.354, "1325": 22.16697, "1330": 22.07247, "1335": 22.10322, "1340": 22.0373, "1345": 22.07995, "1350": 22.09757, "1355": 22.18128, "1360": 22.98071, "1365": 22.11201, "1370": 22.0308, "1375": 21.98418, "1380": 22.0096, "1385": 22.04115, "1390": 22.12334, "1395": 22.99491, "1400": 22.29047, "1405": 22.90941, "1410": 22.54972, "1415": 23.74327, "1420": 23.16532, "1425": 23.24104, "1430": 23.24638, "1435": 23.7338, "1440": 22.65042, "1445": 22.68182, "1450": 23.57668, "1455": 23.33279, "1460": 22.97514, "1465": 23.1572, "1470": 23.24433, "1475": 23.17847, "1480": 22.77608, "1485": 22.65029, "1490": 22.74621, "1495": 23.04861, "1500": 22.15706, "1505": 22.14463, "1510": 21.97704, "1515": 22.02301, "1520": 22.00818, "1525": 22.38097, "1530": 22.44401, "1535": 22.00382, "1540": 22.71898, "1545": 22.26884, "1550": 23.37392, "1555": 22.23961, "1560": 22.15388, "1565": 22.06374, "1570": 23.81895, "1575": 22.08185, "1580": 22.12565, "1585": 22.92753, "1590": 22.97729, "1595": 22.07155, "1600": 22.00831, "1605": 22.16134, "1610": 22.76439, "1615": 22.05666, "1620": 22.26955, "1625": 22.95159, "1630": 22.50156, "1635": 22.43812, "1640": 22.48025, "1645": 23.85414, "1650": 22.40979, "1655": 22.25589, "1660": 22.19897, "1665": 22.6261, "1670": 22.1556, "1675": 22.19371, "1680": 22.17979, "1685": 22.11633, "1690": 22.49332, "1695": 22.57699, "1700": 22.386, "1705": 22.12892, "1710": 22.35769, "1715": 22.09777, "1720": 22.06297, "1725": 22.15442, "1730": 22.14226, "1735": 22.07652, "1740": 24.09398, "1745": 22.34081, "1750": 22.10917, "1755": 22.09681, "1760": 23.07998, "1765": 22.30123, "1770": 22.37368, "1775": 23.35021, "1780": 22.45834, "1785": 22.37906, "1790": 22.32725, "1795": 22.48836, "1800": 21.99504, "1805": 22.30828, "1810": 21.98612, "1815": 22.04731, "1820": 22.30447, "1825": 22.00674, "1830": 21.99485, "1835": 22.09578, "1840": 21.95287, "1845": 22.01643, "1850": 22.07223, "1855": 22.01531, "1860": 22.14847, "1865": 21.98391, "1870": 22.541, "1875": 22.15899, "1880": 22.04569, "1885": 22.00364, "1890": 24.15174, "1895": 22.02124, "1900": 22.00647, "1905": 22.04278, "1910": 23.16547, "1915": 22.12646, "1920": 22.02751, "1925": 22.21011, "1930": 23.0298, "1935": 22.62598, "1940": 22.01586, "1945": 22.16859, "1950": 22.48309, "1955": 22.07929, "1960": 21.99907, "1965": 22.03621, "1970": 22.29213, "1975": 22.04389, "1980": 22.2932, "1985": 22.13531, "1990": 22.38337, "1995": 22.09615, "2000": 22.18417, "2005": 22.00023, "2010": 22.01697, "2015": 22.00452, "2020": 22.03078, "2025": 22.69897, "2030": 21.95612, "2035": 22.07158, "2040": 22.02147, "2045": 23.36745, "2050": 21.99916, "2055": 22.02811, "2060": 21.95198, "2065": 23.55966, "2070": 22.01383, "2075": 22.08972, "2080": 22.04, "2085": 22.77204, "2090": 22.10589, "2095": 21.98873, "2100": 21.97686, "2105": 22.17561, "2110": 21.99627, "2115": 21.98763, "2120": 22.17884, "2125": 22.00586, "2130": 22.30566, "2135": 22.00526, "2140": 22.04128, "2145": 22.01381, "2150": 22.02008, "2155": 21.96066, "2160": 21.9781, "2165": 21.94464, "2170": 21.91412, "2175": 21.95968, "2180": 21.98511, "2185": 22.11197, "2190": 21.95374, "2195": 22.03738, "2200": 23.97869, "2205": 22.45066, "2210": 22.35224, "2215": 24.08174, "2220": 22.42296, "2225": 22.21296, "2230": 22.18343, "2235": 22.60632, "2240": 22.12869, "2245": 22.85544, "2250": 22.3811, "2255": 22.55375, "2260": 22.29183, "2265": 22.43185, "2270": 22.5969, "2275": 22.22061, "2280": 22.68011, "2285": 22.53192, "2290": 22.16648, "2295": 22.11318, "2300": 22.07938, "2305": 22.0225, "2310": 22.21074, "2315": 22.0967, "2320": 22.01473, "2325": 22.05989, "2330": 22.043, "2335": 22.01671, "2340": 22.06757, "2345": 22.05606, "2350": 22.6609, "2355": 22.0088, "2360": 22.09811, "2365": 22.02511, "2370": 24.06139, "2375": 22.02225, "2380": 22.02687, "2385": 22.02909, "2390": 23.07378, "2395": 22.07177, "2400": 22.0054, "2405": 22.03282, "2410": 22.47876, "2415": 22.24729, "2420": 22.1954, "2425": 22.22205, "2430": 22.13688, "2435": 22.01958, "2440": 22.00057, "2445": 22.09471, "2450": 22.1287, "2455": 22.496, "2460": 23.16273, "2465": 22.44182, "2470": 22.29766, "2475": 22.25696, "2480": 22.27658, "2485": 22.05119, "2490": 22.08958, "2495": 22.05101, "2500": 22.18049, "2505": 22.73885, "2510": 22.3013, "2515": 22.16159, "2520": 22.14283, "2525": 23.81545, "2530": 22.04112, "2535": 21.97344, "2540": 22.11779, "2545": 23.14048, "2550": 21.99081, "2555": 22.17471, "2560": 21.98548, "2565": 22.44279, "2570": 22.04601, "2575": 22.02526, "2580": 22.0334, "2585": 22.37937, "2590": 22.0229, "2595": 22.01771, "2600": 22.0375, "2605": 22.11882, "2610": 22.18145, "2615": 21.98777, "2620": 22.1291, "2625": 22.27949, "2630": 21.99291, "2635": 21.98551, "2640": 21.97771, "2645": 21.96018, "2650": 22.00741, "2655": 21.99028, "2660": 22.42281, "2665": 22.01176, "2670": 22.08069, "2675": 22.27134, "2680": 23.68705, "2685": 22.32166, "2690": 22.11497, "2695": 22.01227, "2700": 23.62869, "2705": 22.01168, "2710": 21.96153, "2715": 21.95652, "2720": 22.50496, "2725": 22.01491, "2730": 22.12278, "2735": 21.96662, "2740": 23.03789, "2745": 21.96039, "2750": 22.10556, "2755": 21.96966, "2760": 23.14078, "2765": 22.49746, "2770": 22.48436, "2775": 22.51472, "2780": 24.08761, "2785": 22.3488, "2790": 22.29295, "2795": 22.29061, "2800": 22.79278, "2805": 22.35583, "2810": 22.14458, "2815": 23.07112, "2820": 22.70057, "2825": 22.44947, "2830": 22.312, "2835": 22.50636, "2840": 22.23341, "2845": 22.43998, "2850": 22.11617, "2855": 22.10141, "2860": 22.1265, "2865": 22.08625, "2870": 22.38704, "2875": 22.11544, "2880": 22.08853, "2885": 22.17194, "2890": 22.1951, "2895": 22.10935, "2900": 22.3407, "2905": 22.11971, "2910": 22.37563, "2915": 22.11134, "2920": 22.23945, "2925": 22.08751, "2930": 22.87236, "2935": 22.1755, "2940": 22.12761, "2945": 22.0885, "2950": 23.70733, "2955": 22.11746, "2960": 22.05086, "2965": 22.10101, "2970": 23.67964, "2975": 22.18584, "2980": 22.22631, "2985": 22.56632, "2990": 22.62067, "2995": 22.07047, "3000": 22.10329, "3005": 22.1243, "3010": 22.13871, "3015": 22.13593, "3020": 22.10041, "3025": 22.08549, "3030": 22.13629, "3035": 22.07991, "3040": 22.19406, "3045": 22.07879, "3050": 22.12624, "3055": 22.13888, "3060": 22.25468, "3065": 22.11183, "3070": 22.09011, "3075": 22.09236, "3080": 22.05187, "3085": 22.69726, "3090": 22.10608, "3095": 22.09424, "3100": 22.35354, "3105": 23.13377, "3110": 22.01249, "3115": 22.23691, "3120": 22.00478, "3125": 23.30211, "3130": 22.01655, "3135": 22.00928, "3140": 22.00648, "3145": 23.17948, "3150": 22.00248, "3155": 22.01653, "3160": 22.07596, "3165": 22.41739, "3170": 22.11747, "3175": 22.16079, "3180": 22.14759, "3185": 22.05717, "3190": 22.08524, "3195": 22.20091, "3200": 22.03618, "3205": 22.08103, "3210": 22.0596, "3215": 22.28876, "3220": 22.21636, "3225": 22.18856, "3230": 22.27028, "3235": 22.20994, "3240": 22.77367, "3245": 22.17145, "3250": 22.06129, "3255": 22.04587, "3260": 22.83292, "3265": 22.08424, "3270": 22.07221, "3275": 22.29972, "3280": 23.36095, "3285": 22.10844, "3290": 22.14078, "3295": 22.0336, "3300": 23.55761, "3305": 22.07427, "3310": 131.56767, "3315": 22.8663, "3320": 22.41273, "3325": 22.32888, "3330": 22.29784, "3335": 22.40073, "3340": 22.71328, "3345": 22.62872, "3350": 22.28479, "3355": 22.35403, "3360": 22.78814, "3365": 22.2521, "3370": 24.19968, "3375": 22.20297, "3380": 22.16069, "3385": 22.22954, "3390": 22.42507, "3395": 22.03531, "3400": 22.05146, "3405": 22.49917, "3410": 22.05847, "3415": 22.40184, "3420": 22.40472, "3425": 22.49586, "3430": 22.27014, "3435": 22.11712, "3440": 22.33947, "3445": 22.7033, "3450": 22.66029, "3455": 22.19334, "3460": 22.76346, "3465": 22.09402, "3470": 22.09811, "3475": 22.47295, "3480": 22.15958, "3485": 22.01759, "3490": 22.69786, "3495": 22.10884, "3500": 22.26244, "3505": 22.48803, "3510": 22.04792, "3515": 22.26632, "3520": 23.02426, "3525": 21.97403, "3530": 21.96238, "3535": 22.82764, "3540": 23.1708, "3545": 21.9841, "3550": 22.37764, "3555": 22.07001, "3560": 23.26599, "3565": 22.63189, "3570": 22.1299, "3575": 22.05798, "3580": 23.61217, "3585": 22.05817, "3590": 22.05743, "3595": 22.38544, "3600": 22.30456, "3605": 22.0031, "3610": 22.39868, "3615": 21.96317, "3620": 21.98004, "3625": 22.37093, "3630": 21.94802, "3635": 22.3769, "3640": 22.34472, "3645": 22.1353, "3650": 22.26206, "3655": 22.61849, "3660": 21.94006, "3665": 21.96396, "3670": 22.37985, "3675": 22.82904, "3680": 22.03345, "3685": 22.91763, "3690": 22.04154, "3695": 23.94508, "3700": 22.52962, "3705": 21.97761, "3710": 22.02327, "3715": 23.60847, "3720": 21.95106, "3725": 22.01696, "3730": 22.36269, "3735": 22.25172, "3740": 21.95469, "3745": 22.16049, "3750": 22.00949, "3755": 22.31208, "3760": 22.618, "3765": 22.50883, "3770": 21.99661, "3775": 22.49637, "3780": 21.9858, "3785": 22.07864, "3790": 21.97356, "3795": 22.00495, "3800": 21.98709, "3805": 22.25688, "3810": 22.3697, "3815": 22.00685, "3820": 22.11556, "3825": 21.98722, "3830": 22.79662, "3835": 22.20122, "3840": 22.25326, "3845": 22.05364, "3850": 23.67121, "3855": 22.67826, "3860": 22.64956, "3865": 22.51112, "3870": 22.28967, "3875": 22.21078, "3880": 22.24363, "3885": 22.17845, "3890": 22.56251, "3895": 23.75595, "3900": 22.19031, "3905": 22.04256, "3910": 22.22994, "3915": 22.91995, "3920": 22.08375, "3925": 22.0581, "3930": 22.04427, "3935": 22.01686, "3940": 22.05989, "3945": 22.03671, "3950": 22.29211, "3955": 22.05595, "3960": 22.02085, "3965": 22.27472, "3970": 22.53228, "3975": 22.04172, "3980": 22.00045, "3985": 22.10351, "3990": 23.41112, "3995": 22.15242, "4000": 21.97758, "4005": 22.99546, "4010": 22.19767, "4015": 22.17034, "4020": 21.98852, "4025": 24.02019, "4030": 21.98294, "4035": 22.06153, "4040": 21.97046, "4045": 23.57644, "4050": 22.00377, "4055": 22.23121, "4060": 21.98056, "4065": 22.72778, "4070": 21.95871, "4075": 22.1475, "4080": 122.68214, "4085": 21.99969, "4090": 22.1801, "4095": 22.09289, "4100": 22.162, "4105": 121.74252, "4110": 101.97162, "4115": 22.07785, "4120": 22.01744, "4125": 21.96856, "4130": 22.0968, "4135": 21.99934, "4140": 21.97188, "4145": 22.24035, "4150": 21.98399, "4155": 22.21056, "4160": 22.18513, "4165": 21.97149, "4170": 21.95493, "4175": 21.98644, "4180": 22.82884, "4185": 21.9335, "4190": 22.23795, "4195": 22.21049, "4200": 23.443, "4205": 21.96949, "4210": 22.31057, "4215": 21.98531, "4220": 22.39323, "4225": 21.96446, "4230": 21.9576, "4235": 22.38507, "4240": 22.14095, "4245": 22.0295, "4250": 22.04722, "4255": 22.01127, "4260": 21.99885, "4265": 22.03699, "4270": 21.99884, "4275": 22.14375, "4280": 22.20271, "4285": 22.21506, "4290": 21.97778, "4295": 21.96421, "4300": 22.0063, "4305": 21.97568, "4310": 22.1903, "4315": 22.26445, "4320": 22.01799, "4325": 22.22094, "4330": 21.99275, "4335": 23.51758, "4340": 22.0658, "4345": 21.99291, "4350": 22.15841, "4355": 24.40164, "4360": 21.96139, "4365": 21.98735, "4370": 150.09036, "4375": 22.68849, "4380": 22.3453, "4385": 22.26625, "4390": 22.25795, "4395": 22.28304, "4400": 22.47484, "4405": 22.23546, "4410": 22.18179, "4415": 22.61419, "4420": 22.15316, "4425": 22.20619, "4430": 22.11052, "4435": 22.2049, "4440": 22.11021, "4445": 22.02624, "4450": 22.11827, "4455": 22.07004, "4460": 22.47109, "4465": 23.06672, "4470": 22.1117, "4475": 22.10337, "4480": 22.0979, "4485": 23.63155, "4490": 22.19922, "4495": 22.0407, "4500": 22.19427, "4505": 24.20879, "4510": 22.31867, "4515": 22.04195, "4520": 22.00538, "4525": 22.81903, "4530": 21.99423, "4535": 22.3949, "4540": 22.02369, "4545": 22.45018, "4550": 22.37383, "4555": 21.99917, "4560": 22.03564, "4565": 22.35363, "4570": 21.99408, "4575": 22.01112, "4580": 22.39293, "4585": 21.98907, "4590": 22.14867, "4595": 22.63223, "4600": 22.09417, "4605": 22.09595, "4610": 22.39637, "4615": 22.0246, "4620": 23.20505, "4625": 22.65614, "4630": 22.10893, "4635": 21.9965, "4640": 24.52925, "4645": 22.00175, "4650": 21.99555, "4655": 22.32024, "4660": 23.37879, "4665": 22.04023, "4670": 22.37461, "4675": 21.99992, "4680": 22.85228, "4685": 22.85336, "4690": 22.00803, "4695": 22.11459, "4700": 22.80308, "4705": 22.07863, "4710": 21.99984, "4715": 22.37447, "4720": 21.98966, "4725": 21.92479, "4730": 22.85162, "4735": 22.00992, "4740": 22.27876, "4745": 22.57113, "4750": 21.98179, "4755": 22.01637, "4760": 22.53234, "4765": 21.98748, "4770": 22.13289, "4775": 23.05708, "4780": 22.02361, "4785": 21.94255, "4790": 22.3635, "4795": 22.90755, "4800": 22.12824, "4805": 22.21219, "4810": 21.97153, "4815": 23.44829, "4820": 22.64153, "4825": 22.23301, "4830": 22.0035, "4835": 23.07607, "4840": 22.12879, "4845": 22.14409, "4850": 22.26793, "4855": 22.42532, "4860": 22.26226, "4865": 22.64532, "4870": 22.06861, "4875": 22.06725, "4880": 22.21232, "4885": 21.99002, "4890": 22.18011, "4895": 22.18558, "4900": 22.11082, "4905": 21.98766, "4910": 22.5618, "4915": 22.01597, "4920": 21.97111, "4925": 22.15179, "4930": 22.11745, "4935": 22.7826, "4940": 22.13885, "4945": 22.16569, "4950": 22.06308, "4955": 22.0259, "4960": 22.37567, "4965": 22.75186, "4970": 22.26817, "4975": 22.06791, "4980": 22.40875, "4985": 22.05632, "4990": 22.00733, "4995": 22.09037, "5000": 22.09093, "5005": 22.37394, "5010": 22.42126, "5015": 22.10242, "5020": 22.26656, "5025": 22.39732, "5030": 22.64602, "5035": 22.12388, "5040": 23.01379, "5045": 22.31129, "5050": 22.33345, "5055": 22.31668, "5060": 23.71577, "5065": 22.08548, "5070": 22.18891, "5075": 22.17726, "5080": 24.2219, "5085": 22.55174, "5090": 22.41676, "5095": 22.60143, "5100": 22.79384, "5105": 22.12798, "5110": 22.25447, "5115": 22.22759, "5120": 22.64345, "5125": 22.23938, "5130": 22.26654, "5135": 22.24029, "5140": 22.51447, "5145": 22.09813, "5150": 22.54697, "5155": 22.25159, "5160": 22.35757, "5165": 22.12011, "5170": 22.39896, "5175": 22.21842, "5180": 22.17444, "5185": 22.19344, "5190": 22.22536, "5195": 22.20193, "5200": 22.05924, "5205": 22.28392, "5210": 22.04254, "5215": 24.30609, "5220": 22.07253, "5225": 22.62721, "5230": 22.28485, "5235": 24.50441, "5240": 22.4861, "5245": 22.13557, "5250": 22.18743, "5255": 22.93906, "5260": 22.16685, "5265": 22.24292, "5270": 22.56848, "5275": 22.78221, "5280": 22.55576, "5285": 22.15076, "5290": 22.31002, "5295": 22.39034, "5300": 22.31134, "5305": 22.17454, "5310": 22.2718, "5315": 22.53468, "5320": 22.29049, "5325": 22.177, "5330": 22.28793, "5335": 22.53156, "5340": 22.14626, "5345": 22.21185, "5350": 22.27034, "5355": 22.44983, "5360": 22.33185, "5365": 22.18905, "5370": 22.28895, "5375": 22.21138, "5380": 22.18258, "5385": 22.24382, "5390": 23.30736, "5395": 22.08914, "5400": 22.34981, "5405": 22.46976, "5410": 23.1576, "5415": 22.22333, "5420": 22.32389, "5425": 22.18253, "5430": 22.71762, "5435": 22.19057, "5440": 148.93217, "5445": 22.66985, "5450": 22.30999, "5455": 22.2951, "5460": 22.24916, "5465": 22.25541, "5470": 22.48145, "5475": 22.29708, "5480": 22.14487, "5485": 22.28732, "5490": 22.79772, "5495": 22.1176, "5500": 24.25361, "5505": 22.10745, "5510": 22.13419, "5515": 22.02963, "5520": 22.84109, "5525": 21.99457, "5530": 21.97918, "5535": 21.98436, "5540": 22.01363, "5545": 21.97736, "5550": 21.94858, "5555": 22.28431, "5560": 22.1008, "5565": 22.02172, "5570": 22.0353, "5575": 21.98161, "5580": 21.97964, "5585": 22.00535, "5590": 21.96853, "5595": 22.95347, "5600": 22.5506, "5605": 22.01258, "5610": 22.77281, "5615": 21.93018, "5620": 22.18177, "5625": 22.04426, "5630": 23.61154, "5635": 21.98179, "5640": 22.65718, "5645": 22.00652, "5650": 23.55085, "5655": 22.00494, "5660": 22.06071, "5665": 22.17428, "5670": 22.24871, "5675": 22.07433, "5680": 22.00482, "5685": 22.68914, "5690": 21.95072, "5695": 22.23748, "5700": 21.9508, "5705": 21.95094, "5710": 21.94092, "5715": 21.99002, "5720": 22.1387, "5725": 22.07474, "5730": 22.92476, "5735": 22.03674, "5740": 21.98351, "5745": 21.91907, "5750": 21.95641, "5755": 21.97255, "5760": 21.93521, "5765": 21.92315, "5770": 21.96932, "5775": 22.58241, "5780": 21.9299, "5785": 22.38168, "5790": 21.95647, "5795": 22.27818, "5800": 22.04144, "5805": 23.64312, "5810": 22.19651, "5815": 21.96606, "5820": 22.56641, "5825": 22.34323, "5830": 21.92401, "5835": 21.91235, "5840": 21.93956, "5845": 21.94814, "5850": 21.96033, "5855": 21.9338, "5860": 22.18756, "5865": 22.66376, "5870": 21.93988, "5875": 21.99238, "5880": 21.96291, "5885": 21.91295, "5890": 21.89454, "5895": 21.97108, "5900": 21.95609, "5905": 21.99241, "5910": 22.63597, "5915": 22.16386, "5920": 22.0326, "5925": 22.4308, "5930": 21.92863, "5935": 22.00329, "5940": 22.25858, "5945": 21.91022, "5950": 21.94404, "5955": 22.54575, "5960": 23.15301, "5965": 21.95755, "5970": 21.97058, "5975": 21.94571, "5980": 23.68132, "5985": 21.89135, "5990": 22.18465, "5995": 22.03832, "6000": 23.52294, "6005": 23.41191, "6010": 22.47964, "6015": 22.59445, "6020": 22.78111, "6025": 22.48378, "6030": 22.26068, "6035": 22.41574, "6040": 22.19973, "6045": 22.19263, "6050": 22.40874, "6055": 22.15181, "6060": 22.50071, "6065": 22.21777, "6070": 22.22245, "6075": 22.14805, "6080": 23.57126, "6085": 22.10081, "6090": 22.16173, "6095": 22.57333, "6100": 22.69498, "6105": 22.19985, "6110": 22.30345, "6115": 22.16158, "6120": 22.3965, "6125": 22.22338, "6130": 22.07825, "6135": 22.04495, "6140": 22.06659, "6145": 22.32382, "6150": 22.06566, "6155": 22.03929, "6160": 22.04333, "6165": 22.00618, "6170": 22.00706, "6175": 23.07625, "6180": 22.0934, "6185": 22.17764, "6190": 22.06692, "6195": 23.62051, "6200": 22.03628, "6205": 21.95453, "6210": 23.18004, "6215": 22.03549, "6220": 21.95019, "6225": 22.46337, "6230": 22.30274, "6235": 21.99409, "6240": 21.99592, "6245": 21.98967, "6250": 22.37721, "6255": 21.97039, "6260": 22.01765, "6265": 22.00992, "6270": 22.40863, "6275": 21.96039, "6280": 22.06437, "6285": 21.92888, "6290": 22.02604, "6295": 21.92496, "6300": 21.95849, "6305": 21.96011, "6310": 21.91646, "6315": 22.13515, "6320": 21.97319, "6325": 21.96372, "6330": 21.87708, "6335": 21.93014, "6340": 22.21349, "6345": 23.27369, "6350": 21.94696, "6355": 22.0983, "6360": 22.14908, "6365": 23.92337, "6370": 21.93204, "6375": 21.89591, "6380": 22.15334, "6385": 23.03106, "6390": 21.92112, "6395": 21.94427, "6400": 21.92783, "6405": 22.66198, "6410": 22.04295, "6415": 21.91041, "6420": 22.24819, "6425": 22.07724, "6430": 21.90217, "6435": 21.93184, "6440": 21.93073, "6445": 21.88912, "6450": 22.17038, "6455": 22.17722, "6460": 22.15259, "6465": 21.91748, "6470": 21.90756, "6475": 22.05631, "6480": 21.90704, "6485": 22.07197, "6490": 21.8689, "6495": 22.12539, "6500": 22.86117, "6505": 21.99132, "6510": 21.95923, "6515": 21.91191, "6520": 23.51231, "6525": 21.92571, "6530": 21.88683, "6535": 21.8532, "6540": 23.2809, "6545": 21.86293, "6550": 21.97318, "6555": 21.96698, "6560": 22.55755, "6565": 21.9006, "6570": 22.13745, "6575": 22.7285, "6580": 22.98955, "6585": 22.45682, "6590": 22.53654, "6595": 22.47824, "6600": 22.44446, "6605": 22.98781, "6610": 22.40294, "6615": 22.33958, "6620": 22.97656, "6625": 22.38149, "6630": 23.17301, "6635": 22.89001, "6640": 22.35189, "6645": 22.41434, "6650": 24.54115, "6655": 22.30987, "6660": 22.35794, "6665": 22.94333, "6670": 23.28049, "6675": 22.32491, "6680": 22.65227, "6685": 22.3802, "6690": 22.52013, "6695": 22.60755, "6700": 22.3155, "6705": 22.41335, "6710": 22.7474, "6715": 22.64615, "6720": 22.31463, "6725": 22.62525, "6730": 22.27418, "6735": 22.30114, "6740": 22.75749, "6745": 22.47966, "6750": 22.37958, "6755": 22.36938, "6760": 22.26806, "6765": 22.9111, "6770": 22.64593, "6775": 22.22297, "6780": 22.46553, "6785": 22.55216, "6790": 22.29045, "6795": 22.21224, "6800": 22.51292, "6805": 23.79652, "6810": 22.24627, "6815": 22.25011, "6820": 22.30964, "6825": 23.3865, "6830": 22.50271, "6835": 22.21374, "6840": 22.22594, "6845": 22.94583, "6850": 22.22235, "6855": 22.23193, "6860": 22.72064, "6865": 22.40304, "6870": 22.24204, "6875": 22.26489, "6880": 22.66457, "6885": 22.35872, "6890": 22.39153, "6895": 22.29087, "6900": 22.38144, "6905": 22.27717, "6910": 22.40191, "6915": 22.25621, "6920": 22.30208, "6925": 22.34158, "6930": 22.25384, "6935": 22.24471, "6940": 22.35268, "6945": 22.55802, "6950": 22.21382, "6955": 22.21777, "6960": 22.90266, "6965": 22.2588, "6970": 22.21535, "6975": 22.82372, "6980": 23.43578, "6985": 22.26215, "6990": 22.2469, "6995": 22.49872, "7000": 22.94036, "7005": 22.23551, "7010": 22.28692, "7015": 22.32198, "7020": 22.96482, "7025": 22.51452, "7030": 22.34023, "7035": 22.27154, "7040": 22.58448, "7045": 22.36326, "7050": 22.30007, "7055": 22.30032, "7060": 22.52231, "7065": 22.34128, "7070": 22.45164, "7075": 22.32186, "7080": 22.36049, "7085": 22.3123, "7090": 22.95593, "7095": 22.31165, "7100": 22.30756, "7105": 22.3815, "7110": 22.40465, "7115": 22.90583, "7120": 22.20924, "7125": 22.23734, "7130": 22.5073, "7135": 22.92679, "7140": 22.41357, "7145": 22.32356, "7150": 22.24071, "7155": 22.19905, "7160": 22.12316, "7165": 22.17672, "7170": 23.84509, "7175": 22.22408, "7180": 22.11787, "7185": 22.19463, "7190": 22.81707, "7195": 22.0221, "7200": 22.02441, "7205": 22.70228, "7210": 21.99292, "7215": 22.12439, "7220": 22.04139, "7225": 22.64929, "7230": 21.98927, "7235": 21.95256, "7240": 21.98599, "7245": 22.14966, "7250": 21.93459, "7255": 21.9735, "7260": 21.95693, "7265": 21.94983, "7270": 22.10505, "7275": 21.99558, "7280": 22.0763, "7285": 21.94581, "7290": 21.99727, "7295": 21.9706, "7300": 21.96315, "7305": 21.95616, "7310": 21.91838, "7315": 21.9405, "7320": 22.32232, "7325": 21.93466, "7330": 21.94574, "7335": 21.93991, "7340": 23.26225, "7345": 22.07154, "7350": 22.04687, "7355": 21.96688, "7360": 23.22071, "7365": 22.02719, "7370": 21.95731, "7375": 21.95009, "7380": 22.88505, "7385": 21.91981, "7390": 21.91168, "7395": 21.93168, "7400": 22.5368, "7405": 21.97361, "7410": 21.90746, "7415": 21.98165, "7420": 22.11563, "7425": 22.04902, "7430": 22.11028, "7435": 22.07722, "7440": 22.04177, "7445": 22.17855, "7450": 22.05793, "7455": 22.10711, "7460": 21.93718, "7465": 21.95884, "7470": 21.94373, "7475": 22.5823, "7480": 21.95772, "7485": 22.10644, "7490": 21.90468, "7495": 22.97159, "7500": 21.9061, "7505": 21.92269, "7510": 21.95582, "7515": 23.46624, "7520": 21.94287, "7525": 21.90595, "7530": 21.9317, "7535": 23.02629, "7540": 22.03888, "7545": 21.96158, "7550": 21.95929, "7555": 22.53657, "7560": 22.13281, "7565": 22.11452, "7570": 21.96967, "7575": 22.26788, "7580": 21.99497, "7585": 21.94751, "7590": 22.00127, "7595": 21.96601, "7600": 21.97744, "7605": 22.14733, "7610": 22.16239, "7615": 22.03168, "7620": 21.92726, "7625": 21.92345, "7630": 22.38333, "7635": 21.87859, "7640": 22.01314, "7645": 22.32154, "7650": 22.60569, "7655": 21.95937, "7660": 21.93396, "7665": 21.93092, "7670": 23.52125, "7675": 21.961, "7680": 22.11011, "7685": 21.97565, "7690": 23.02219, "7695": 23.39515, "7700": 22.56087, "7705": 22.70469, "7710": 22.79294, "7715": 22.58659, "7720": 22.4462, "7725": 22.59032, "7730": 22.36973, "7735": 22.55464, "7740": 22.59935, "7745": 22.4473, "7750": 22.98614, "7755": 22.35078, "7760": 22.4762, "7765": 22.45739, "7770": 23.97258, "7775": 22.36117, "7780": 22.36047, "7785": 22.87189, "7790": 22.89751, "7795": 22.27469, "7800": 22.38182, "7805": 22.73872, "7810": 22.26821, "7815": 22.28769, "7820": 22.2849, "7825": 22.82652, "7830": 22.37185, "7835": 22.29845, "7840": 22.33222, "7845": 22.2762, "7850": 22.25741, "7855": 22.28212, "7860": 22.53258, "7865": 22.25523, "7870": 22.25707, "7875": 22.27488, "7880": 22.71931, "7885": 22.3053, "7890": 22.24338, "7895": 22.49386, "7900": 22.28202, "7905": 22.28061, "7910": 22.29494, "7915": 22.27202, "7920": 22.67196, "7925": 22.22246, "7930": 22.26949, "7935": 22.27304, "7940": 23.09534, "7945": 22.2416, "7950": 22.26685, "7955": 22.26832, "7960": 23.49038, "7965": 22.24053, "7970": 22.23108, "7975": 22.47435, "7980": 23.16756, "7985": 22.24102, "7990": 22.25222, "7995": 22.76021, "8000": 22.80934, "8005": 22.45395, "8010": 22.24392, "8015": 22.37659, "8020": 22.45716, "8025": 22.56563, "8030": 22.24505, "8035": 22.51422, "8040": 22.2566, "8045": 22.27507, "8050": 22.39083, "8055": 22.29866, "8060": 22.5829, "8065": 22.45741, "8070": 22.26728, "8075": 22.43254, "8080": 22.70552, "8085": 22.28991, "8090": 23.07297, "8095": 22.9786, "8100": 22.26268, "8105": 22.26661, "8110": 22.75854, "8115": 23.0899, "8120": 22.33212, "8125": 22.54357, "8130": 22.32941, "8135": 23.10976, "8140": 22.50407, "8145": 22.37353, "8150": 22.34679, "8155": 23.0604, "8160": 22.32842, "8165": 22.31148, "8170": 22.50634, "8175": 22.96704, "8180": 22.33179, "8185": 22.58222, "8190": 22.32176, "8195": 22.31031, "8200": 22.49125, "8205": 23.10764, "8210": 22.39403, "8215": 22.30886, "8220": 22.34794, "8225": 22.44214, "8230": 22.27193, "8235": 22.22294, "8240": 22.75418, "8245": 22.30252, "8250": 22.32677, "8255": 22.29416, "8260": 23.89183, "8265": 22.1848, "8270": 22.14688, "8275": 22.12786, "8280": 22.39072, "8285": 22.08048, "8290": 22.13431, "8295": 22.0821, "8300": 22.03043, "8305": 22.30047, "8310": 22.49924, "8315": 22.24033, "8320": 22.0342, "8325": 22.28211, "8330": 22.03233, "8335": 21.99454, "8340": 22.02875, "8345": 22.01483, "8350": 22.03599, "8355": 23.86466, "8360": 22.35957, "8365": 22.17941, "8370": 22.15026, "8375": 23.09934, "8380": 22.03894, "8385": 21.98834, "8390": 22.37127, "8395": 21.95023, "8400": 22.33841, "8405": 22.07427, "8410": 22.8483, "8415": 21.99837, "8420": 22.20466, "8425": 22.04719, "8430": 22.35748, "8435": 22.40579, "8440": 22.03366, "8445": 22.01361, "8450": 22.04, "8455": 22.05713, "8460": 22.03145, "8465": 22.03056, "8470": 22.04903, "8475": 22.40948, "8480": 22.01707, "8485": 22.23921, "8490": 22.20584, "8495": 22.09789, "8500": 22.0662, "8505": 22.63703, "8510": 21.98576, "8515": 22.17934, "8520": 22.01747, "8525": 23.1655, "8530": 22.00701, "8535": 21.99743, "8540": 21.98647, "8545": 22.94713, "8550": 22.45835, "8555": 22.06299, "8560": 22.24748, "8565": 22.82865, "8570": 22.00408, "8575": 21.99889, "8580": 22.01486, "8585": 22.49696, "8590": 21.96468, "8595": 22.16126, "8600": 21.98753, "8605": 21.94316, "8610": 22.14178, "8615": 21.9369, "8620": 21.90686, "8625": 22.16754, "8630": 21.9349, "8635": 21.9883, "8640": 22.09869, "8645": 21.9622, "8650": 22.00315, "8655": 22.03627, "8660": 22.27315, "8665": 22.35669, "8670": 22.0541, "8675": 22.01756, "8680": 22.91834, "8685": 21.97789, "8690": 22.3073, "8695": 21.99139, "8700": 23.60592, "8705": 21.97623, "8710": 22.01172, "8715": 21.96568, "8720": 22.99048, "8725": 21.98824, "8730": 22.15562, "8735": 21.95707, "8740": 22.30359, "8745": 22.17102, "8750": 21.90694, "8755": 22.19823, "8760": 22.33809, "8765": 21.94947, "8770": 21.97606, "8775": 21.93649, "8780": 22.57959, "8785": 21.92982, "8790": 21.95145, "8795": 21.92154, "8800": 21.95541, "8805": 23.48988, "8810": 22.41885, "8815": 22.42604, "8820": 22.54779, "8825": 22.34691, "8830": 22.15295, "8835": 22.30553, "8840": 22.11031, "8845": 22.20286, "8850": 22.29507, "8855": 22.09297, "8860": 22.38299, "8865": 22.08562, "8870": 22.02831, "8875": 22.04208, "8880": 23.782, "8885": 21.99253, "8890": 22.06645, "8895": 22.95581, "8900": 22.54926, "8905": 22.01927, "8910": 22.09799, "8915": 22.01145, "8920": 22.12133, "8925": 22.00604, "8930": 21.98834, "8935": 22.14145, "8940": 22.10432, "8945": 22.26266, "8950": 22.00677, "8955": 22.00659, "8960": 21.99679, "8965": 22.02218, "8970": 22.07809, "8975": 23.18765, "8980": 21.96604, "8985": 22.07652, "8990": 21.94554, "8995": 23.95501, "9000": 21.93954, "9005": 22.06546, "9010": 23.56919, "9015": 22.07648, "9020": 22.01256, "9025": 22.70309, "9030": 22.64351, "9035": 21.99525, "9040": 21.971, "9045": 22.08554, "9050": 21.97161, "9055": 21.96478, "9060": 21.99598, "9065": 21.95947, "9070": 22.73004, "9075": 21.95444, "9080": 22.03729, "9085": 21.94419, "9090": 22.14635, "9095": 21.96329, "9100": 22.01926, "9105": 21.96484, "9110": 21.91991, "9115": 22.53967, "9120": 21.96962, "9125": 22.26048, "9130": 21.98578, "9135": 22.00093, "9140": 21.93592, "9145": 22.71933, "9150": 21.91722, "9155": 22.15394, "9160": 22.55117, "9165": 23.12428, "9170": 21.94296, "9175": 21.91244, "9180": 22.00831, "9185": 22.94268, "9190": 21.9414, "9195": 21.91754, "9200": 21.93224, "9205": 22.90435, "9210": 22.09219, "9215": 21.96885, "9220": 22.26377, "9225": 22.16986, "9230": 21.93732, "9235": 21.93362, "9240": 21.94523, "9245": 22.41921, "9250": 22.41826, "9255": 21.96296, "9260": 22.07682, "9265": 22.24896, "9270": 21.94977, "9275": 22.21, "9280": 21.99099, "9285": 22.27461, "9290": 21.90714, "9295": 22.40924, "9300": 22.36802, "9305": 21.9225, "9310": 21.93243, "9315": 21.97662, "9320": 23.0313, "9325": 21.93668, "9330": 21.93863, "9335": 21.94334, "9340": 23.68914, "9345": 22.02102, "9350": 22.19501, "9355": 21.9984, "9360": 23.47618, "9365": 21.95237, "9370": 21.97516, "9375": 23.36148, "9380": 22.48184, "9385": 22.40107, "9390": 22.46822, "9395": 22.42578, "9400": 22.13706, "9405": 22.15555, "9410": 22.16438, "9415": 22.33963, "9420": 22.06686, "9425": 22.19624, "9430": 22.05982, "9435": 22.28124, "9440": 22.0422, "9445": 22.08439, "9450": 22.10309, "9455": 22.33257, "9460": 22.06435, "9465": 22.0584, "9470": 22.00953, "9475": 22.02717, "9480": 22.25961, "9485": 22.42705, "9490": 22.2598, "9495": 21.98361, "9500": 22.00018, "9505": 23.2619, "9510": 21.98728, "9515": 22.02227, "9520": 22.05188, "9525": 23.42534, "9530": 22.01402, "9535": 22.03168, "9540": 21.97632, "9545": 22.86041, "9550": 22.01081, "9555": 22.21875, "9560": 21.98183, "9565": 22.39386, "9570": 22.29224, "9575": 21.99715, "9580": 22.04048, "9585": 22.03793, "9590": 21.97568, "9595": 22.00332, "9600": 21.99096, "9605": 21.97508, "9610": 22.16356, "9615": 21.97117, "9620": 22.2805, "9625": 22.12569, "9630": 21.95103, "9635": 22.03245, "9640": 22.44641, "9645": 21.97786, "9650": 22.01248, "9655": 22.25473, "9660": 23.6538, "9665": 22.06386, "9670": 22.03203, "9675": 22.12429, "9680": 23.6531, "9685": 22.19912, "9690": 22.01809, "9695": 22.05893, "9700": 22.61082, "9705": 22.02442, "9710": 21.969, "9715": 21.98011, "9720": 22.27634, "9725": 21.98119, "9730": 21.97662, "9735": 22.05301, "9740": 22.18135, "9745": 22.05545, "9750": 22.03773, "9755": 22.0136, "9760": 22.08371, "9765": 21.99497, "9770": 22.28621, "9775": 21.96842, "9780": 22.01786, "9785": 22.01895, "9790": 22.02151, "9795": 22.11158, "9800": 21.99295, "9805": 22.39567, "9810": 21.96125, "9815": 23.27051, "9820": 22.16476, "9825": 22.06544, "9830": 21.99271, "9835": 23.65308, "9840": 22.0959, "9845": 21.96982, "9850": 22.15138, "9855": 23.26087, "9860": 21.95598, "9865": 22.1348, "9870": 22.1363, "9875": 22.45739, "9880": 22.25778, "9885": 22.23748, "9890": 22.06494, "9895": 22.17342, "9900": 21.96094, "9905": 21.95143, "9910": 22.13325, "9915": 21.98749, "9920": 22.00385, "9925": 22.15535, "9930": 23.93206, "9935": 22.41722, "9940": 22.31726, "9945": 22.34065, "9950": 22.90786, "9955": 22.96181, "9960": 22.48136, "9965": 22.16956, "9970": 22.28987, "9975": 22.29211, "9980": 22.22712, "9985": 22.22511, "9990": 22.46554, "9995": 22.21901, "10000": 22.26604, "10005": 22.39311, "10010": 22.68039, "10015": 22.14641, "10020": 22.10228, "10025": 22.22011, "10030": 22.13608, "10035": 22.41053, "10040": 22.19263, "10045": 22.15992, "10050": 22.17308, "10055": 22.36246, "10060": 22.09414, "10065": 22.70599, "10070": 22.21202, "10075": 23.20752, "10080": 21.95923, "10085": 22.09249, "10090": 21.96535, "10095": 23.1194, "10100": 22.26098, "10105": 21.93582, "10110": 21.9846, "10115": 23.24263, "10120": 22.17865, "10125": 22.0711, "10130": 22.20484, "10135": 23.02316, "10140": 21.96858, "10145": 22.03819, "10150": 21.94035, "10155": 22.27531, "10160": 22.02572, "10165": 21.93953, "10170": 22.04008, "10175": 22.0913, "10180": 21.94631, "10185": 22.16832, "10190": 21.9064, "10195": 22.18654, "10200": 21.93823, "10205": 22.22893, "10210": 22.00362, "10215": 21.94897, "10220": 22.03489, "10225": 21.8983, "10230": 22.79783, "10235": 22.13746, "10240": 21.88873, "10245": 22.07645, "10250": 23.16747, "10255": 21.94206, "10260": 22.23173, "10265": 22.05022, "10270": 22.93414, "10275": 21.91052, "10280": 22.03396, "10285": 21.91689, "10290": 23.12396, "10295": 21.90337, "10300": 22.00866, "10305": 22.03346, "10310": 22.18654, "10315": 22.49047, "10320": 21.87558, "10325": 22.18254, "10330": 22.37385, "10335": 22.00081, "10340": 22.05553, "10345": 22.12212, "10350": 22.48482, "10355": 21.99121, "10360": 22.15098, "10365": 22.04338, "10370": 22.3581, "10375": 22.19978, "10380": 21.97152, "10385": 21.99076, "10390": 22.3204, "10395": 21.89544, "10400": 22.08302, "10405": 22.81895, "10410": 21.97142, "10415": 22.04531, "10420": 22.16792, "10425": 23.21764, "10430": 22.00175, "10435": 22.29447, "10440": 21.94563, "10445": 23.24887, "10450": 22.28717, "10455": 22.13339, "10460": 22.06366, "10465": 23.12131, "10470": 21.9297, "10475": 140.1256, "10480": 22.73011, "10485": 22.41084, "10490": 22.36003, "10495": 22.26131, "10500": 22.25431, "10505": 22.49316, "10510": 22.34215, "10515": 22.16031, "10520": 22.24348, "10525": 22.60465, "10530": 22.04771, "10535": 23.66644, "10540": 22.09927, "10545": 22.04232, "10550": 21.99611, "10555": 22.86253, "10560": 22.01944, "10565": 21.99364, "10570": 22.02685, "10575": 22.02185, "10580": 21.99948, "10585": 21.97567, "10590": 22.29276, "10595": 22.07984, "10600": 21.97031, "10605": 21.97734, "10610": 22.03238, "10615": 22.29528, "10620": 22.02759, "10625": 22.24744, "10630": 21.98694, "10635": 22.00689, "10640": 21.9262, "10645": 21.94263, "10650": 21.95147, "10655": 21.93672, "10660": 21.94195, "10665": 22.75413, "10670": 21.9746, "10675": 21.9034, "10680": 21.94719, "10685": 23.56208, "10690": 22.11643, "10695": 21.99942, "10700": 21.9432, "10705": 23.29145, "10710": 22.00652, "10715": 21.98446, "10720": 21.99291, "10725": 22.87899, "10730": 22.22899, "10735": 21.97978, "10740": 21.97124, "10745": 22.2948, "10750": 21.92772, "10755": 22.05848, "10760": 21.9529, "10765": 21.94777, "10770": 21.94107, "10775": 21.96777, "10780": 21.94681, "10785": 21.9461, "10790": 21.94378, "10795": 21.93551, "10800": 21.97642, "10805": 21.97669, "10810": 22.08357, "10815": 21.94246, "10820": 22.44064, "10825": 22.09271, "10830": 21.98644, "10835": 21.97456, "10840": 23.1274, "10845": 21.91657, "10850": 21.91859, "10855": 22.06989, "10860": 23.68227, "10865": 21.96489, "10870": 21.91665, "10875": 21.95055, "10880": 23.27894, "10885": 22.08306, "10890": 21.95786, "10895": 21.94489, "10900": 22.53373, "10905": 21.91564, "10910": 21.91809, "10915": 21.95377, "10920": 22.22232, "10925": 21.93837, "10930": 21.94385, "10935": 22.03762, "10940": 22.03145, "10945": 21.9631, "10950": 22.13836, "10955": 21.91654, "10960": 22.06162, "10965": 21.91485, "10970": 21.93453, "10975": 21.989, "10980": 21.89547, "10985": 21.88159, "10990": 21.92781, "10995": 22.09275, "11000": 21.93096, "11005": 21.9418, "11010": 21.96949, "11015": 23.70097, "11020": 21.92893, "11025": 21.95695, "11030": 22.20091, "11035": 142.89355, "11040": 22.5926, "11045": 22.36504, "11050": 22.35024, "11055": 22.36231, "11060": 22.50457, "11065": 22.55788, "11070": 22.3299, "11075": 22.19959, "11080": 22.2085, "11085": 22.56955, "11090": 22.10896, "11095": 24.13467, "11100": 22.14111, "11105": 22.10239, "11110": 22.16594, "11115": 22.90691, "11120": 22.05528, "11125": 22.03406, "11130": 22.25815, "11135": 22.16077, "11140": 22.04629, "11145": 22.03982, "11150": 22.41739, "11155": 22.30052, "11160": 22.07073, "11165": 22.10942, "11170": 22.19751, "11175": 22.12399, "11180": 21.97286, "11185": 22.11489, "11190": 23.52588, "11195": 22.4343, "11200": 22.20617, "11205": 23.25856, "11210": 22.26933, "11215": 22.21999, "11220": 22.12105, "11225": 24.03827, "11230": 22.12546, "11235": 22.10118, "11240": 22.14506, "11245": 23.77409, "11250": 22.2313, "11255": 22.18394, "11260": 22.22734, "11265": 23.01459, "11270": 22.16313, "11275": 22.18533, "11280": 22.15733, "11285": 22.39277, "11290": 22.08727, "11295": 22.06366, "11300": 22.14344, "11305": 22.12801, "11310": 22.16378, "11315": 22.27419, "11320": 22.18672, "11325": 22.39353, "11330": 22.01839, "11335": 22.16974, "11340": 22.16062, "11345": 22.12285, "11350": 22.33393, "11355": 22.17613, "11360": 22.23771, "11365": 22.25522, "11370": 22.19194, "11375": 22.16371, "11380": 22.92581, "11385": 22.06856, "11390": 22.52404, "11395": 22.17185, "11400": 23.8426, "11405": 22.04615, "11410": 22.17242, "11415": 22.45349, "11420": 22.61803, "11425": 22.09953, "11430": 22.16346, "11435": 22.03363, "11440": 22.41405, "11445": 22.21785, "11450": 22.11374, "11455": 22.26627, "11460": 22.35772, "11465": 22.2638, "11470": 22.08309, "11475": 22.06068, "11480": 22.08989, "11485": 22.11845, "11490": 22.49276, "11495": 22.15837, "11500": 22.16976, "11505": 22.25486, "11510": 22.33572, "11515": 22.13069, "11520": 22.20598, "11525": 22.26255, "11530": 22.34658, "11535": 23.60326, "11540": 22.34267, "11545": 22.24829, "11550": 22.03141, "11555": 23.63577, "11560": 21.94727, "11565": 22.13788, "11570": 22.24198, "11575": 23.17917, "11580": 22.23528, "11585": 22.32687, "11590": 22.03948, "11595": 22.4274, "11600": 22.07506, "11605": 23.19544, "11610": 22.25594, "11615": 22.28393, "11620": 22.37688, "11625": 22.2648, "11630": 22.0992, "11635": 22.31651, "11640": 22.06785, "11645": 22.14657, "11650": 22.40668, "11655": 22.03332, "11660": 21.99637, "11665": 22.15314, "11670": 22.02197, "11675": 21.98647, "11680": 23.75401, "11685": 22.00033, "11690": 21.9883, "11695": 22.79285, "11700": 23.01733, "11705": 21.95646, "11710": 22.08797, "11715": 21.98842, "11720": 22.21652, "11725": 21.98715, "11730": 22.00206, "11735": 21.95181, "11740": 21.91771, "11745": 21.98524, "11750": 21.95975, "11755": 21.93496, "11760": 21.98787, "11765": 21.94955, "11770": 21.98976, "11775": 23.88894, "11780": 21.9895, "11785": 22.09234, "11790": 21.99721, "11795": 22.93048, "11800": 21.94187, "11805": 22.20019, "11810": 23.48027, "11815": 22.33556, "11820": 22.09226, "11825": 22.68929, "11830": 23.02668, "11835": 22.02526, "11840": 22.09567, "11845": 22.1899, "11850": 22.56017, "11855": 22.09973, "11860": 22.11393, "11865": 21.98043, "11870": 22.75241, "11875": 22.1833, "11880": 22.27929, "11885": 22.23516, "11890": 22.16733, "11895": 21.99921, "11900": 22.19715, "11905": 22.20871, "11910": 22.0395, "11915": 22.46126, "11920": 22.17677, "11925": 22.44914, "11930": 22.04399, "11935": 21.92573, "11940": 22.22817, "11945": 22.68874, "11950": 22.08961, "11955": 22.32596, "11960": 22.56467, "11965": 23.19425, "11970": 21.96477, "11975": 21.96205, "11980": 21.91429, "11985": 23.76249, "11990": 22.23063, "11995": 22.19607, "12000": 22.11092, "12005": 22.67843, "12010": 22.15535, "12015": 22.0871, "12020": 22.26075, "12025": 22.35197, "12030": 21.90504, "12035": 21.96282, "12040": 22.0123, "12045": 22.09485, "12050": 22.28031, "12055": 22.38512, "12060": 22.05363, "12065": 21.8818, "12070": 21.95825, "12075": 22.35514, "12080": 22.08761, "12085": 22.37116, "12090": 22.06028, "12095": 22.41449, "12100": 22.86658, "12105": 22.10044, "12110": 22.04147, "12115": 22.09418, "12120": 23.07068, "12125": 22.09069, "12130": 22.17834, "12135": 22.14473, "12140": 24.32488, "12145": 21.995, "12150": 22.11666, "12155": 22.55567, "12160": 22.99824, "12165": 22.41711, "12170": 22.32374, "12175": 22.24427, "12180": 22.25735, "12185": 22.19503, "12190": 22.18627, "12195": 23.61253, "12200": 22.12635, "12205": 22.09151, "12210": 23.45708, "12215": 22.27714, "12220": 22.03848, "12225": 22.09922, "12230": 24.4297, "12235": 22.01539, "12240": 22.0171, "12245": 22.46722, "12250": 23.02925, "12255": 21.99605, "12260": 22.25607, "12265": 22.08018, "12270": 22.2514, "12275": 22.21909, "12280": 21.94652, "12285": 21.98063, "12290": 22.42307, "12295": 22.38848, "12300": 21.93362, "12305": 22.36423, "12310": 22.0363, "12315": 22.00365, "12320": 22.2231, "12325": 21.9678, "12330": 22.01185, "12335": 22.49991, "12340": 21.98545, "12345": 22.18617, "12350": 22.17371, "12355": 21.99611, "12360": 22.11751, "12365": 22.83825, "12370": 22.0468, "12375": 22.04461, "12380": 22.28722, "12385": 23.31658, "12390": 22.04746, "12395": 22.25242, "12400": 22.12333, "12405": 22.98524, "12410": 22.42495, "12415": 22.1704, "12420": 21.93219, "12425": 23.06122, "12430": 22.01095, "12435": 22.23001, "12440": 22.26283, "12445": 22.1564, "12450": 21.9376, "12455": 22.27071, "12460": 22.00564, "12465": 22.02791, "12470": 22.41955, "12475": 22.06809, "12480": 22.26057, "12485": 22.20548, "12490": 22.20739, "12495": 21.97908, "12500": 22.21404, "12505": 22.03893, "12510": 22.01138, "12515": 22.22325, "12520": 22.47372, "12525": 22.32667, "12530": 22.18264, "12535": 21.99304, "12540": 23.26965, "12545": 22.39482, "12550": 22.00118, "12555": 22.08667, "12560": 23.64682, "12565": 21.99975, "12570": 22.02859, "12575": 21.96308, "12580": 23.54907, "12585": 22.02436, "12590": 22.03442, "12595": 21.99431, "12600": 22.58755, "12605": 22.17311, "12610": 22.21238, "12615": 22.22031, "12620": 22.17707, "12625": 21.97999, "12630": 22.1281, "12635": 21.9981, "12640": 22.3257, "12645": 22.07982, "12650": 22.18834, "12655": 22.01449, "12660": 21.96669, "12665": 21.96947, "12670": 21.99838, "12675": 22.36788, "12680": 22.05985, "12685": 22.13809, "12690": 22.00453, "12695": 23.25731, "12700": 21.99673, "12705": 21.98551, "12710": 22.79395, "12715": 22.42481, "12720": 22.33263, "12725": 22.24615, "12730": 23.56363, "12735": 22.22211, "12740": 22.23546, "12745": 22.14978, "12750": 22.80517, "12755": 22.15292, "12760": 22.16518, "12765": 22.03326, "12770": 22.1081, "12775": 22.36894, "12780": 22.78039, "12785": 22.146, "12790": 22.06842, "12795": 22.33502, "12800": 22.07475, "12805": 22.22698, "12810": 22.16503, "12815": 22.06227, "12820": 22.55505, "12825": 22.04563, "12830": 22.04068, "12835": 22.12907, "12840": 22.13998, "12845": 22.08681, "12850": 22.14841, "12855": 22.09265, "12860": 22.64504, "12865": 22.18937, "12870": 22.23819, "12875": 22.0782, "12880": 23.53246, "12885": 22.17256, "12890": 22.03847, "12895": 22.01203, "12900": 23.73529, "12905": 22.13377, "12910": 22.36154, "12915": 22.10386, "12920": 23.27136, "12925": 21.9956, "12930": 22.0008, "12935": 22.22032, "12940": 22.17382, "12945": 22.13734, "12950": 22.09854, "12955": 22.33516, "12960": 22.10708, "12965": 22.02188, "12970": 21.98382, "12975": 22.04335, "12980": 22.13405, "12985": 22.19769, "12990": 22.12807, "12995": 22.12966, "13000": 22.46542, "13005": 22.10598, "13010": 22.07956, "13015": 22.61186, "13020": 22.28414, "13025": 22.10376, "13030": 22.0406, "13035": 23.46393, "13040": 22.08534, "13045": 22.31457, "13050": 22.09795, "13055": 23.44545, "13060": 22.00071, "13065": 22.33506, "13070": 21.99311, "13075": 23.11664, "13080": 22.10192, "13085": 22.09293, "13090": 22.28759, "13095": 22.42964, "13100": 21.97656, "13105": 21.99427, "13110": 22.05748, "13115": 22.24468, "13120": 22.20023, "13125": 22.12956, "13130": 22.23451, "13135": 22.37536, "13140": 22.08675, "13145": 22.06699, "13150": 22.11744, "13155": 22.15729, "13160": 22.08482, "13165": 22.01248, "13170": 21.97218, "13175": 21.98092, "13180": 22.24579, "13185": 22.06405, "13190": 22.30727, "13195": 22.25513, "13200": 22.10453, "13205": 22.25217, "13210": 23.69952, "13215": 22.10014, "13220": 22.17127, "13225": 22.25203, "13230": 23.3001, "13235": 22.02072, "13240": 22.07172, "13245": 22.04093, "13250": 23.17788, "13255": 133.91409, "13260": 22.62094, "13265": 22.1946, "13270": 22.16227, "13275": 22.06576, "13280": 22.01955, "13285": 22.39165, "13290": 22.26613, "13295": 22.06898, "13300": 22.22915, "13305": 22.53251, "13310": 22.01786, "13315": 23.78334, "13320": 22.01678, "13325": 21.97404, "13330": 22.01097, "13335": 22.63951, "13340": 21.96553, "13345": 22.00771, "13350": 21.98575, "13355": 21.9418, "13360": 21.97369, "13365": 21.93784, "13370": 22.02976, "13375": 21.96005, "13380": 21.81782, "13385": 21.78727, "13390": 21.78551, "13395": 21.79165, "13400": 21.85636, "13405": 21.82886, "13410": 21.99624, "13415": 22.0077, "13420": 21.78878, "13425": 22.09372, "13430": 21.71131, "13435": 21.79132, "13440": 21.76248, "13445": 23.28747, "13450": 21.8151, "13455": 21.94023, "13460": 21.78097, "13465": 23.152, "13470": 21.75696, "13475": 21.79356, "13480": 21.87265, "13485": 23.11723, "13490": 21.74818, "13495": 21.74591, "13500": 22.17114, "13505": 22.21649, "13510": 21.98698, "13515": 21.80642, "13520": 21.76113, "13525": 22.01144, "13530": 21.78823, "13535": 21.96043, "13540": 21.73865, "13545": 22.05772, "13550": 21.79445, "13555": 21.7557, "13560": 21.75585, "13565": 21.69761, "13570": 21.70947, "13575": 21.69053, "13580": 21.69119, "13585": 21.74169, "13590": 21.95509, "13595": 21.68591, "13600": 22.17367, "13605": 21.8074, "13610": 21.84495, "13615": 21.78314, "13620": 22.88842, "13625": 22.05142, "13630": 21.77302, "13635": 22.04763, "13640": 23.77504, "13645": 21.79239, "13650": 21.81045, "13655": 21.80258, "13660": 22.54945, "13665": 21.9712, "13670": 21.77458, "13675": 21.98166, "13680": 22.45228, "13685": 21.77985, "13690": 21.73596, "13695": 21.77319, "13700": 21.99868, "13705": 21.80252, "13710": 21.73892, "13715": 21.75089, "13720": 21.77395, "13725": 21.96215, "13730": 22.03257, "13735": 21.7478, "13740": 21.95617, "13745": 21.72558, "13750": 21.80472, "13755": 22.05616, "13760": 21.70652, "13765": 21.70721, "13770": 21.92185, "13775": 22.14086, "13780": 21.73077, "13785": 21.70171, "13790": 21.71062, "13795": 22.47261, "13800": 21.71364, "13805": 21.82465, "13810": 21.72585, "13815": 23.22019, "13820": 21.74176, "13825": 24.07915, "13830": 22.3798, "13835": 22.28879, "13840": 22.20954, "13845": 22.67719, "13850": 22.68886, "13855": 22.33484, "13860": 22.1424, "13865": 22.09684, "13870": 22.13012, "13875": 22.10701, "13880": 22.08084, "13885": 22.25946, "13890": 22.06547, "13895": 22.04471, "13900": 22.31215, "13905": 22.03117, "13910": 22.24296, "13915": 22.24053, "13920": 23.89937, "13925": 22.13568, "13930": 22.26528, "13935": 22.03752, "13940": 22.7777, "13945": 21.98299, "13950": 22.22578, "13955": 21.96372, "13960": 22.51018, "13965": 22.27927, "13970": 22.00571, "13975": 21.97065, "13980": 22.03901, "13985": 22.07873, "13990": 21.95193, "13995": 22.07337, "14000": 21.97547, "14005": 21.96792, "14010": 21.93331, "14015": 22.12572, "14020": 22.17054, "14025": 21.97291, "14030": 22.30716, "14035": 21.93266, "14040": 21.93427, "14045": 22.16105, "14050": 22.95036, "14055": 21.92698, "14060": 21.97454, "14065": 21.98784, "14070": 23.3189, "14075": 22.0039, "14080": 22.19252, "14085": 21.91045, "14090": 23.32331, "14095": 21.96317, "14100": 21.91262, "14105": 21.95431, "14110": 22.84181, "14115": 21.92552, "14120": 21.90922, "14125": 21.95605, "14130": 22.21186, "14135": 22.59133, "14140": 21.92341, "14145": 22.10275, "14150": 21.92349, "14155": 21.96023, "14160": 21.90552, "14165": 22.09058, "14170": 22.08087, "14175": 21.89317, "14180": 22.39481, "14185": 21.93173, "14190": 21.92035, "14195": 21.93642, "14200": 22.01221, "14205": 23.11691, "14210": 22.34803, "14215": 21.93791, "14220": 21.95076, "14225": 23.21857, "14230": 21.91676, "14235": 21.91725, "14240": 22.10584, "14245": 23.05373, "14250": 22.14016, "14255": 22.30779, "14260": 21.9487, "14265": 23.04198, "14270": 22.41118, "14275": 22.1885, "14280": 21.9538, "14285": 22.32764, "14290": 22.15199, "14295": 22.04809, "14300": 22.34587, "14305": 22.17259, "14310": 22.02486, "14315": 22.65886, "14320": 22.0293, "14325": 22.01501, "14330": 22.67477, "14335": 21.91805, "14340": 22.27902, "14345": 22.23618, "14350": 21.97415, "14355": 21.96105, "14360": 22.28853, "14365": 22.04996, "14370": 22.01356, "14375": 22.35211, "14380": 22.86362, "14385": 21.92613, "14390": 23.40712, "14395": 22.41395, "14400": 22.50469, "14405": 22.83736, "14410": 22.73094, "14415": 22.14303, "14420": 22.09163, "14425": 22.42316, "14430": 22.29966, "14435": 22.06694, "14440": 22.22504, "14445": 22.03381, "14450": 22.02065, "14455": 22.50831, "14460": 22.02576, "14465": 22.0321, "14470": 22.0106, "14475": 22.04285, "14480": 21.97993, "14485": 21.99907, "14490": 22.04905, "14495": 22.24996, "14500": 22.18711, "14505": 21.94444, "14510": 21.96205, "14515": 22.016, "14520": 22.3745, "14525": 21.9693, "14530": 22.11854, "14535": 21.93496, "14540": 23.69136, "14545": 22.17713, "14550": 21.94559, "14555": 22.20101, "14560": 23.49273, "14565": 22.00373, "14570": 22.04679, "14575": 21.95177, "14580": 22.38211, "14585": 21.93575, "14590": 22.19423, "14595": 21.89307, "14600": 22.18651, "14605": 21.93256, "14610": 21.90761, "14615": 21.83156, "14620": 21.91896, "14625": 22.16129, "14630": 21.89252, "14635": 22.49046, "14640": 21.88118, "14645": 21.88282, "14650": 21.88681, "14655": 22.11419, "14660": 21.90731, "14665": 21.94577, "14670": 22.1344, "14675": 22.858, "14680": 22.39261, "14685": 21.91915, "14690": 22.12244, "14695": 24.00669, "14700": 21.96905, "14705": 21.89809, "14710": 21.90922, "14715": 22.90939, "14720": 21.96496, "14725": 22.37723, "14730": 21.90957, "14735": 22.2633, "14740": 21.92648, "14745": 22.1453, "14750": 21.99546, "14755": 22.19026, "14760": 21.97872, "14765": 22.22985, "14770": 22.44757, "14775": 21.90815, "14780": 21.89699, "14785": 22.135, "14790": 21.89062, "14795": 21.96668, "14800": 21.88543, "14805": 21.94974, "14810": 22.33304, "14815": 22.63924, "14820": 22.3517, "14825": 21.96801, "14830": 23.38039, "14835": 21.91175, "14840": 21.91076, "14845": 21.96284, "14850": 23.88027, "14855": 22.01619, "14860": 22.4207, "14865": 22.10486, "14870": 22.98876, "14875": 21.95213, "14880": 22.19591, "14885": 22.0658, "14890": 22.28211, "14895": 22.33519, "14900": 22.16742, "14905": 22.44668, "14910": 22.25115, "14915": 21.90199, "14920": 21.91358, "14925": 22.12622, "14930": 21.93859, "14935": 21.91215, "14940": 22.10348, "14945": 21.89009, "14950": 161.22256, "14955": 22.67771, "14960": 22.29123, "14965": 22.29667, "14970": 22.2014, "14975": 22.17402, "14980": 22.41303, "14985": 22.31009, "14990": 22.13118, "14995": 22.14212, "15000": 22.54224, "15005": 22.02762, "15010": 22.07452, "15015": 22.04026, "15020": 22.9083, "15025": 22.06352, "15030": 22.02565, "15035": 23.08343, "15040": 24.12852, "15045": 22.39804, "15050": 22.09042, "15055": 22.07531, "15060": 23.47391, "15065": 22.01587, "15070": 22.00564, "15075": 22.15926, "15080": 23.31885, "15085": 21.94673, "15090": 22.11191, "15095": 21.92916, "15100": 22.61556, "15105": 21.98521, "15110": 22.10856, "15115": 22.42169, "15120": 21.97684, "15125": 22.91799, "15130": 21.95036, "15135": 22.0167, "15140": 21.95811, "15145": 21.97114, "15150": 21.9532, "15155": 21.97204, "15160": 21.98026, "15165": 22.28662, "15170": 22.86322, "15175": 22.14224, "15180": 21.93594, "15185": 21.98763, "15190": 21.95835, "15195": 22.76817, "15200": 21.93218, "15205": 21.94633, "15210": 21.92873, "15215": 24.42871, "15220": 22.06728, "15225": 21.89901, "15230": 22.39679, "15235": 22.57079, "15240": 22.02097, "15245": 21.98808, "15250": 22.23732, "15255": 22.16463, "15260": 22.82572, "15265": 21.89205, "15270": 21.89112, "15275": 22.16621, "15280": 21.98128, "15285": 22.08635, "15290": 22.02586, "15295": 22.04114, "15300": 21.91116, "15305": 22.9532, "15310": 21.91262, "15315": 22.07182, "15320": 22.0234, "15325": 22.07274, "15330": 22.23304, "15335": 21.9448, "15340": 21.98049, "15345": 22.21634, "15350": 23.38017, "15355": 22.00352, "15360": 22.07538, "15365": 21.90314, "15370": 23.10847, "15375": 21.91619, "15380": 21.97316, "15385": 21.93989, "15390": 23.2703, "15395": 22.78903, "15400": 21.96156, "15405": 22.13886, "15410": 22.45977, "15415": 21.90212, "15420": 21.91313, "15425": 22.08989, "15430": 22.50386, "15435": 22.03348, "15440": 22.52951, "15445": 21.88365, "15450": 22.17541, "15455": 21.95478, "15460": 22.33181, "15465": 21.95527, "15470": 22.01898, "15475": 21.95855, "15480": 21.96129, "15485": 22.65193, "15490": 22.07128, "15495": 24.30192, "15500": 22.38112, "15505": 22.3575, "15510": 22.27934, "15515": 22.83629, "15520": 22.965, "15525": 22.18816, "15530": 22.15574, "15535": 22.03695, "15540": 22.24264, "15545": 22.01675, "15550": 22.02803, "15555": 22.22521, "15560": 22.03292, "15565": 22.04287, "15570": 22.2303, "15575": 22.01487, "15580": 22.46519, "15585": 22.23634, "15590": 23.65717, "15595": 22.1475, "15600": 22.14018, "15605": 23.98221, "15610": 22.18391, "15615": 22.08681, "15620": 22.35152, "15625": 23.41282, "15630": 21.92107, "15635": 22.05351, "15640": 22.1837, "15645": 23.67183, "15650": 22.59735, "15655": 21.96719, "15660": 21.9655, "15665": 22.56305, "15670": 22.04348, "15675": 22.09167, "15680": 21.94769, "15685": 22.11028, "15690": 22.01306, "15695": 22.63082, "15700": 22.08688, "15705": 22.04392, "15710": 22.06222, "15715": 21.94851, "15720": 21.91309, "15725": 21.90957, "15730": 21.95182, "15735": 22.05425, "15740": 22.67622, "15745": 22.09776, "15750": 22.10507, "15755": 21.95467, "15760": 22.26957, "15765": 21.98493, "15770": 22.03597, "15775": 22.05091, "15780": 23.00842, "15785": 22.7603, "15790": 21.93141, "15795": 21.92843, "15800": 23.34736, "15805": 21.9155, "15810": 22.04649, "15815": 22.27317, "15820": 22.19632, "15825": 22.09675, "15830": 22.51524, "15835": 22.17936, "15840": 22.25717, "15845": 22.02612, "15850": 22.02588, "15855": 21.97403, "15860": 21.93187, "15865": 22.04156, "15870": 21.98181, "15875": 22.63392, "15880": 22.27211, "15885": 22.04447, "15890": 22.03282, "15895": 21.88806, "15900": 21.90593, "15905": 21.95288, "15910": 22.03103, "15915": 22.06687, "15920": 22.67458, "15925": 21.9641, "15930": 21.88143, "15935": 22.85567, "15940": 22.02842, "15945": 22.29238, "15950": 22.17156, "15955": 23.4971, "15960": 21.97079, "15965": 22.55252, "15970": 21.86451, "15975": 22.98416, "15980": 21.9894, "15985": 21.92251, "15990": 21.84213, "15995": 22.58547, "16000": 21.95108, "16005": 21.8676, "16010": 22.51476, "16015": 21.99742, "16020": 22.07765, "16025": 22.01026, "16030": 21.94289, "16035": 21.84743, "16040": 21.8747, "16045": 22.45749, "16050": 22.87942, "16055": 22.29397, "16060": 22.28759, "16065": 22.19205, "16070": 22.13676, "16075": 22.1017, "16080": 22.15021, "16085": 23.92552, "16090": 22.0205, "16095": 21.99142, "16100": 22.21778, "16105": 22.73438, "16110": 21.93568, "16115": 21.95924, "16120": 21.93472, "16125": 21.92962, "16130": 21.96838, "16135": 21.9417, "16140": 22.17733, "16145": 21.90434, "16150": 21.92115, "16155": 21.99357, "16160": 22.5194, "16165": 21.86393, "16170": 21.91697, "16175": 21.93702, "16180": 23.27366, "16185": 22.05267, "16190": 21.99697, "16195": 22.09659, "16200": 23.12389, "16205": 22.01891, "16210": 21.94482, "16215": 23.81885, "16220": 21.97519, "16225": 21.95769, "16230": 21.87399, "16235": 22.60628, "16240": 21.92314, "16245": 22.1358, "16250": 21.93063, "16255": 21.92493, "16260": 21.89503, "16265": 22.11575, "16270": 22.05503, "16275": 21.931, "16280": 22.34903, "16285": 21.98562, "16290": 22.1837, "16295": 21.9055, "16300": 21.879, "16305": 21.92065, "16310": 21.8983, "16315": 21.92049, "16320": 21.90755, "16325": 21.91014, "16330": 22.20518, "16335": 22.14083, "16340": 21.91228, "16345": 22.10702, "16350": 23.20566, "16355": 21.93574, "16360": 21.89434, "16365": 21.912, "16370": 23.34055, "16375": 21.88169, "16380": 22.13822, "16385": 22.05727, "16390": 23.22691, "16395": 21.90628, "16400": 21.90251, "16405": 21.93046, "16410": 22.04186, "16415": 21.96421, "16420": 21.87629, "16425": 22.66036, "16430": 21.93746, "16435": 21.92011, "16440": 22.10722, "16445": 21.94488, "16450": 21.91058, "16455": 21.92397, "16460": 21.8902, "16465": 22.01658, "16470": 22.07845, "16475": 22.14326, "16480": 21.86449, "16485": 22.18476, "16490": 21.85877, "16495": 21.95627, "16500": 22.01926, "16505": 22.8022, "16510": 21.90936, "16515": 22.05768, "16520": 21.8953, "16525": 23.32429, "16530": 21.81398, "16535": 21.87517, "16540": 22.01058, "16545": 22.49591, "16550": 21.88068, "16555": 21.91231, "16560": 22.12068, "16565": 22.21186, "16570": 21.84098, "16575": 21.89492, "16580": 21.88684, "16585": 22.10743, "16590": 21.92124, "16595": 21.86491, "16600": 21.85643, "16605": 22.38973, "16610": 23.41516, "16615": 22.37932, "16620": 22.42834, "16625": 22.42504, "16630": 22.34001, "16635": 22.13259, "16640": 22.31732, "16645": 22.06589, "16650": 22.1157, "16655": 22.23946, "16660": 22.04668, "16665": 22.34457, "16670": 22.08363, "16675": 22.02682, "16680": 21.96208, "16685": 23.66416, "16690": 23.08889, "16695": 21.9674, "16700": 22.63074, "16705": 22.52517, "16710": 21.91751, "16715": 22.08132, "16720": 21.93962, "16725": 22.25333, "16730": 21.92221, "16735": 21.90744, "16740": 21.92948, "16745": 21.90364, "16750": 22.20302, "16755": 21.91799, "16760": 21.90854, "16765": 21.92225, "16770": 21.89596, "16775": 21.9869, "16780": 22.84789, "16785": 21.91627, "16790": 22.12421, "16795": 22.08815, "16800": 23.9005, "16805": 21.93364, "16810": 21.91129, "16815": 23.51608, "16820": 22.02169, "16825": 21.97655, "16830": 22.42986, "16835": 22.99865, "16840": 21.89589, "16845": 21.86419, "16850": 22.05721, "16855": 22.1417, "16860": 21.86587, "16865": 21.87053, "16870": 21.86522, "16875": 22.24121, "16880": 21.93151, "16885": 21.95986, "16890": 22.23524, "16895": 22.05507, "16900": 21.94153, "16905": 21.97932, "16910": 21.95712, "16915": 21.91747, "16920": 22.34903, "16925": 21.91216, "16930": 21.8964, "16935": 21.94906, "16940": 21.91969, "16945": 21.98901, "16950": 22.78173, "16955": 21.91874, "16960": 22.26974, "16965": 22.33372, "16970": 23.30799, "16975": 21.98068, "16980": 21.9007, "16985": 22.03655, "16990": 23.0492, "16995": 21.93155, "17000": 21.93129, "17005": 22.17845, "17010": 22.75557, "17015": 21.89625, "17020": 21.89226, "17025": 22.24747, "17030": 21.88805, "17035": 21.99549, "17040": 21.9368, "17045": 21.97915, "17050": 21.87326, "17055": 22.14593, "17060": 21.90196, "17065": 21.96984, "17070": 21.97786, "17075": 21.85899, "17080": 21.9879, "17085": 21.91914, "17090": 22.25362, "17095": 21.93395, "17100": 22.23808, "17105": 22.68966, "17110": 21.92008, "17115": 21.96497, "17120": 22.27307, "17125": 22.98984, "17130": 21.92916, "17135": 21.97813, "17140": 21.90333, "17145": 23.34066, "17150": 21.89833, "17155": 22.10503, "17160": 21.94186, "17165": 23.08848, "17170": 22.11463, "17175": 21.8463, "17180": 21.91953, "17185": 22.89877, "17190": 23.73218, "17195": 22.42363, "17200": 22.46532, "17205": 22.91613, "17210": 22.62899, "17215": 22.24701, "17220": 22.3595, "17225": 22.48426, "17230": 22.46637, "17235": 22.19544, "17240": 22.38442, "17245": 22.20857, "17250": 22.12284, "17255": 22.48597, "17260": 22.03767, "17265": 22.29737, "17270": 22.35967, "17275": 22.21011, "17280": 22.25432, "17285": 22.28724, "17290": 22.5908, "17295": 22.28881, "17300": 22.54647, "17305": 22.21074, "17310": 22.17563, "17315": 22.26774, "17320": 23.55245, "17325": 22.21331, "17330": 22.20358, "17335": 22.02606, "17340": 24.4723, "17345": 22.75248, "17350": 22.29311, "17355": 22.47133, "17360": 23.71203, "17365": 22.10523, "17370": 22.00669, "17375": 22.00521, "17380": 22.69204, "17385": 22.37559, "17390": 22.61921, "17395": 22.33183, "17400": 22.4362, "17405": 22.28384, "17410": 22.28061, "17415": 22.20364, "17420": 22.16715, "17425": 22.46016, "17430": 22.05546, "17435": 22.60965, "17440": 22.11484, "17445": 21.98009, "17450": 22.04194, "17455": 22.04475, "17460": 21.9549, "17465": 22.04948, "17470": 22.24386, "17475": 23.31019, "17480": 22.35634, "17485": 22.03755, "17490": 22.22925, "17495": 24.16876, "17500": 22.12555, "17505": 22.22649, "17510": 22.16083, "17515": 23.83411, "17520": 22.21472, "17525": 22.50799, "17530": 22.05291, "17535": 22.84241, "17540": 22.0681, "17545": 22.04823, "17550": 21.94578, "17555": 22.40086, "17560": 22.15225, "17565": 22.26291, "17570": 22.66864, "17575": 21.93353, "17580": 21.93478, "17585": 22.22363, "17590": 22.00097, "17595": 22.03661, "17600": 22.11326, "17605": 21.97129, "17610": 22.00186, "17615": 22.80986, "17620": 22.51099, "17625": 22.1664, "17630": 22.69682, "17635": 21.99812, "17640": 22.08206, "17645": 22.01875, "17650": 23.24252, "17655": 22.08027, "17660": 22.67207, "17665": 22.24003, "17670": 23.51071, "17675": 22.06751, "17680": 22.36954, "17685": 22.18572, "17690": 23.61478, "17695": 22.53047, "17700": 22.24614, "17705": 22.62651, "17710": 22.79958, "17715": 21.96569, "17720": 22.15494, "17725": 22.38556, "17730": 22.12974, "17735": 140.92084, "17740": 22.56123, "17745": 22.25339, "17750": 22.16484, "17755": 22.13417, "17760": 22.15251, "17765": 22.32478, "17770": 22.28301, "17775": 22.06738, "17780": 22.23368, "17785": 22.67226, "17790": 21.94695, "17795": 23.35838, "17800": 21.91423, "17805": 22.09444, "17810": 23.38272, "17815": 22.00824, "17820": 22.10418, "17825": 22.81555, "17830": 23.46469, "17835": 21.99529, "17840": 22.13408, "17845": 21.96917, "17850": 23.32749, "17855": 22.10569, "17860": 21.8884, "17865": 22.15323, "17870": 23.38048, "17875": 22.14706, "17880": 22.06371, "17885": 22.2645, "17890": 22.08347, "17895": 21.90107, "17900": 22.10132, "17905": 22.21358, "17910": 21.89827, "17915": 22.49907, "17920": 21.90398, "17925": 21.90301, "17930": 22.10188, "17935": 21.8792, "17940": 22.16063, "17945": 22.11251, "17950": 21.90349, "17955": 21.89345, "17960": 22.48941, "17965": 22.25469, "17970": 21.86746, "17975": 22.12245, "17980": 21.86431, "17985": 22.55658, "17990": 22.14666, "17995": 22.0019, "18000": 21.8743, "18005": 24.5088, "18010": 21.90429, "18015": 22.00753, "18020": 22.34926, "18025": 23.10468, "18030": 21.87733, "18035": 22.11472, "18040": 21.92853, "18045": 22.26509, "18050": 22.76646, "18055": 21.88745, "18060": 21.97069, "18065": 22.10681, "18070": 22.11221, "18075": 21.96692, "18080": 22.21186, "18085": 22.09282, "18090": 21.96493, "18095": 22.82153, "18100": 21.92385, "18105": 21.9182, "18110": 22.02597, "18115": 21.90279, "18120": 21.91149, "18125": 21.9898, "18130": 21.98556, "18135": 22.57119, "18140": 23.77576, "18145": 22.07531, "18150": 21.89292, "18155": 22.09519, "18160": 22.83916, "18165": 21.90342, "18170": 22.06823, "18175": 21.93637, "18180": 23.30968, "18185": 22.27253, "18190": 21.9272, "18195": 21.84724, "18200": 23.88655, "18205": 21.95126, "18210": 22.04781, "18215": 21.97089, "18220": 22.12606, "18225": 21.96691, "18230": 22.67038, "18235": 21.93763, "18240": 22.11482, "18245": 22.20917, "18250": 22.30054, "18255": 21.88757, "18260": 22.07828, "18265": 22.14773, "18270": 21.84265, "18275": 22.37273, "18280": 21.92939, "18285": 21.96561, "18290": 22.27472, "18295": 22.57948, "18300": 22.27416, "18305": 22.14346, "18310": 22.12561, "18315": 22.08073, "18320": 22.12568, "18325": 22.09466, "18330": 23.77893, "18335": 22.04626, "18340": 22.05358, "18345": 22.20372, "18350": 22.36179, "18355": 22.0075, "18360": 22.0408, "18365": 22.00042, "18370": 21.98481, "18375": 22.01639, "18380": 21.99979, "18385": 21.96556, "18390": 21.99689, "18395": 22.01745, "18400": 22.02278, "18405": 21.96906, "18410": 21.99706, "18415": 22.04051, "18420": 21.92503, "18425": 22.20583, "18430": 21.96526, "18435": 21.9694, "18440": 21.97391, "18445": 21.94984, "18450": 22.14592, "18455": 21.96716, "18460": 22.87592, "18465": 21.94625, "18470": 22.32421, "18475": 21.97687, "18480": 23.6807, "18485": 21.92821, "18490": 21.90706, "18495": 21.91689, "18500": 23.05102, "18505": 21.91281, "18510": 21.96933, "18515": 22.59068, "18520": 22.2703, "18525": 22.10871, "18530": 21.90704, "18535": 21.93079, "18540": 22.2627, "18545": 21.95236, "18550": 21.9388, "18555": 21.94889, "18560": 22.32835, "18565": 21.98171, "18570": 21.91051, "18575": 21.95267, "18580": 22.13834, "18585": 21.93518, "18590": 22.00776, "18595": 21.86336, "18600": 21.9089, "18605": 21.91834, "18610": 21.90193, "18615": 22.34741, "18620": 21.91473, "18625": 21.9129, "18630": 21.97435, "18635": 23.40218, "18640": 21.94948, "18645": 22.19461, "18650": 21.92497, "18655": 23.18721, "18660": 21.91219, "18665": 21.87075, "18670": 21.8798, "18675": 22.07025, "18680": 21.90715, "18685": 21.92243, "18690": 21.93414, "18695": 22.22524, "18700": 21.96453, "18705": 22.09474, "18710": 22.15372, "18715": 22.18124, "18720": 21.98959, "18725": 21.88475, "18730": 21.87731, "18735": 22.06944, "18740": 22.1632, "18745": 21.93652, "18750": 21.90155, "18755": 21.93402, "18760": 21.93659, "18765": 21.91844, "18770": 22.22085, "18775": 22.10082, "18780": 21.89503, "18785": 22.24442, "18790": 22.33903, "18795": 21.86492, "18800": 21.90925, "18805": 21.94518, "18810": 23.40622, "18815": 21.91047, "18820": 21.90311, "18825": 21.94192, "18830": 22.66415, "18835": 21.87047, "18840": 22.04506, "18845": 22.95342, "18850": 22.22917, "18855": 22.29291, "18860": 22.33777, "18865": 22.20746, "18870": 22.06872, "18875": 22.29098, "18880": 22.18869, "18885": 22.17466, "18890": 22.20113, "18895": 22.01383, "18900": 22.33329, "18905": 22.22148, "18910": 22.02099, "18915": 21.93171, "18920": 24.12339, "18925": 22.07284, "18930": 21.9047, "18935": 22.62148, "18940": 22.48013, "18945": 21.94094, "18950": 22.03331, "18955": 22.05208, "18960": 22.29285, "18965": 21.91331, "18970": 22.01707, "18975": 21.95478, "18980": 21.92721, "18985": 22.29436, "18990": 21.9933, "18995": 21.95528, "19000": 21.88792, "19005": 22.18183, "19010": 21.90917, "19015": 21.89099, "19020": 22.4842, "19025": 21.89539, "19030": 22.25591, "19035": 22.308, "19040": 21.89942, "19045": 22.02744, "19050": 22.71298, "19055": 21.99767, "19060": 21.97372, "19065": 22.455, "19070": 23.32994, "19075": 21.87264, "19080": 22.12935, "19085": 21.98933, "19090": 23.04386, "19095": 22.41329, "19100": 21.86575, "19105": 21.87706, "19110": 22.90137, "19115": 21.92526, "19120": 22.04314, "19125": 22.18998, "19130": 22.17895, "19135": 21.92048, "19140": 22.22564, "19145": 21.86314, "19150": 21.87833, "19155": 22.42549, "19160": 21.87848, "19165": 21.89775, "19170": 22.21894, "19175": 22.02126, "19180": 21.9216, "19185": 22.29199, "19190": 21.90321, "19195": 21.85741, "19200": 22.34853, "19205": 22.83453, "19210": 22.04334, "19215": 21.86308, "19220": 21.86827, "19225": 22.6536, "19230": 22.0969, "19235": 21.87761, "19240": 21.98846, "19245": 23.75274, "19250": 22.09551, "19255": 21.94575, "19260": 22.05519, "19265": 22.7954, "19270": 21.87604, "19275": 21.89112, "19280": 21.87079, "19285": 22.20078, "19290": 22.0325, "19295": 21.90048, "19300": 21.8947, "19305": 22.19234, "19310": 21.85884, "19315": 22.06618, "19320": 21.9218, "19325": 22.12058, "19330": 21.88987, "19335": 21.89909, "19340": 21.99312, "19345": 21.85139, "19350": 21.88832, "19355": 21.87834, "19360": 22.18779, "19365": 21.89986, "19370": 21.99228, "19375": 21.88754, "19380": 22.76864, "19385": 21.93106, "19390": 21.90834, "19395": 21.88733, "19400": 22.7232, "19405": 23.41444, "19410": 22.20091, "19415": 22.27267, "19420": 22.34888, "19425": 22.17844, "19430": 22.09881, "19435": 22.27759, "19440": 22.02811, "19445": 21.98665, "19450": 22.2212, "19455": 22.03121, "19460": 22.21875, "19465": 22.08487, "19470": 21.97694, "19475": 21.96782, "19480": 24.03193, "19485": 21.99308, "19490": 21.96586, "19495": 22.42752, "19500": 22.56327, "19505": 22.05542, "19510": 22.15744, "19515": 21.99814, "19520": 22.26136, "19525": 21.96748, "19530": 21.94836, "19535": 21.90509, "19540": 21.93329, "19545": 22.16497, "19550": 21.96943, "19555": 21.9505, "19560": 21.95476, "19565": 21.91866, "19570": 21.91355, "19575": 23.34159, "19580": 21.94623, "19585": 22.08283, "19590": 21.9853, "19595": 23.12875, "19600": 21.96253, "19605": 22.06066, "19610": 23.14593, "19615": 22.06447, "19620": 22.01226, "19625": 22.29865, "19630": 22.85391, "19635": 21.87824, "19640": 21.89916, "19645": 22.12746, "19650": 22.26227, "19655": 22.0648, "19660": 21.90477, "19665": 21.86888, "19670": 22.1456, "19675": 21.86069, "19680": 21.9772, "19685": 21.89295, "19690": 21.97826, "19695": 21.91662, "19700": 21.92912, "19705": 21.98575, "19710": 21.91143, "19715": 22.15994, "19720": 21.94224, "19725": 22.05104, "19730": 21.90543, "19735": 21.9424, "19740": 21.91512, "19745": 22.48978, "19750": 21.9768, "19755": 22.1227, "19760": 22.25965, "19765": 23.58655, "19770": 22.15499, "19775": 21.89788, "19780": 21.87019, "19785": 23.37408, "19790": 21.87133, "19795": 21.87757, "19800": 21.89523, "19805": 22.75735, "19810": 22.15304, "19815": 21.93394, "19820": 22.15983, "19825": 22.12523, "19830": 21.98576, "19835": 21.96263, "19840": 21.9474, "19845": 21.91399, "19850": 22.16323, "19855": 21.90883, "19860": 21.97966, "19865": 21.91658, "19870": 21.96461, "19875": 22.167, "19880": 21.95076, "19885": 22.20116, "19890": 21.98542, "19895": 22.18981, "19900": 22.2946, "19905": 21.97437, "19910": 22.01799, "19915": 21.99041, "19920": 23.20966, "19925": 21.93251, "19930": 21.9233, "19935": 21.93795, "19940": 23.63294, "19945": 21.94828, "19950": 22.14548, "19955": 21.99578, "19960": 23.00164, "19965": 21.95378, "19970": 24.24783, "19975": 22.4001, "19980": 22.23607, "19985": 22.22004, "19990": 22.5407, "19995": 22.56643, "20000": 22.19036, "20005": 22.16625, "20010": 22.11535, "20015": 22.01523, "20020": 23.21604, "20025": 22.0347, "20030": 22.0361, "20035": 21.98577, "20040": 22.89814, "20045": 22.26544, "20050": 22.00888, "20055": 21.91735, "20060": 22.04664, "20065": 21.89163, "20070": 21.8958, "20075": 21.91866, "20080": 21.87426, "20085": 21.86937, "20090": 21.86504, "20095": 22.02927, "20100": 21.95953, "20105": 22.26679, "20110": 21.85839, "20115": 21.90893, "20120": 21.87455, "20125": 21.86229, "20130": 21.9169, "20135": 22.34606, "20140": 21.97619, "20145": 21.94842, "20150": 21.90304, "20155": 22.60253, "20160": 22.45318, "20165": 21.92985, "20170": 22.18373, "20175": 23.32951, "20180": 21.875, "20185": 21.86106, "20190": 21.8593, "20195": 23.87326, "20200": 21.86536, "20205": 21.85523, "20210": 21.90631, "20215": 22.00688, "20220": 21.99679, "20225": 22.0325, "20230": 21.92027, "20235": 22.34104, "20240": 21.90581, "20245": 21.93014, "20250": 21.88323, "20255": 22.0817, "20260": 21.8717, "20265": 21.84916, "20270": 21.83305, "20275": 21.96905, "20280": 21.9195, "20285": 21.87681, "20290": 22.27952, "20295": 22.0045, "20300": 22.30791, "20305": 21.8932, "20310": 22.4212, "20315": 21.93566, "20320": 21.86809, "20325": 21.87465, "20330": 22.18711, "20335": 21.85629, "20340": 21.97021, "20345": 22.22873, "20350": 23.19128, "20355": 22.02861, "20360": 21.78632, "20365": 22.25433, "20370": 23.22731, "20375": 21.80064, "20380": 21.8721, "20385": 21.98899, "20390": 22.91793, "20395": 21.85881, "20400": 22.17363, "20405": 21.86264, "20410": 22.15514, "20415": 21.98962, "20420": 21.87876, "20425": 21.87883, "20430": 22.08578, "20435": 22.34193, "20440": 22.00141, "20445": 22.31351, "20450": 21.95465, "20455": 21.95295, "20460": 22.02845, "20465": 22.26912, "20470": 21.88778, "20475": 22.32305, "20480": 22.28701, "20485": 22.03921, "20490": 21.86562, "20495": 21.96003, "20500": 21.84065, "20505": 23.38371, "20510": 21.83109, "20515": 21.86938, "20520": 23.29243, "20525": 22.27982, "20530": 22.3264, "20535": 22.33935, "20540": 22.21033, "20545": 22.05206, "20550": 22.24322, "20555": 22.0282, "20560": 22.00593, "20565": 22.1896, "20570": 21.99199, "20575": 22.22231, "20580": 22.0971, "20585": 21.97023, "20590": 21.9557, "20595": 23.75948, "20600": 21.94612, "20605": 21.94234, "20610": 22.89706, "20615": 21.97001, "20620": 21.93197, "20625": 21.98767, "20630": 23.57337, "20635": 22.07767, "20640": 21.95705, "20645": 22.05142, "20650": 23.08367, "20655": 21.92001, "20660": 21.92312, "20665": 22.01255, "20670": 21.92256, "20675": 22.13815, "20680": 21.90127, "20685": 21.97626, "20690": 22.08382, "20695": 21.93223, "20700": 22.22619, "20705": 22.00922, "20710": 22.20775, "20715": 22.20736, "20720": 22.16002, "20725": 21.95503, "20730": 21.92285, "20735": 21.93532, "20740": 21.93621, "20745": 22.04563, "20750": 21.90855, "20755": 22.11181, "20760": 21.89808, "20765": 22.52566, "20770": 22.03439, "20775": 22.11573, "20780": 21.88609, "20785": 22.94617, "20790": 21.96451, "20795": 21.99632, "20800": 22.24578, "20805": 23.44532, "20810": 21.96582, "20815": 22.36996, "20820": 21.97754, "20825": 22.42654, "20830": 22.74564, "20835": 21.93002, "20840": 22.14953, "20845": 22.41521, "20850": 21.94217, "20855": 22.23694, "20860": 22.17575, "20865": 22.03621, "20870": 21.88211, "20875": 22.22401, "20880": 21.89464, "20885": 21.90497, "20890": 22.25689, "20895": 21.9718, "20900": 22.10847, "20905": 22.47204, "20910": 21.94412, "20915": 21.92227, "20920": 22.39898, "20925": 21.88926, "20930": 21.88895, "20935": 22.3142, "20940": 22.35787, "20945": 22.11632, "20950": 22.21154, "20955": 21.97493, "20960": 22.9748, "20965": 22.24159, "20970": 22.02841, "20975": 21.86926, "20980": 23.42767, "20985": 21.89917, "20990": 22.12198, "20995": 22.22296, "21000": 23.3937, "21005": 21.84537, "21010": 22.40702, "21015": 21.91698, "21020": 22.00727, "21025": 22.27846, "21030": 21.89568, "21035": 21.97612, "21040": 22.18355, "21045": 21.89477, "21050": 21.87252, "21055": 22.39557, "21060": 21.87317, "21065": 21.90295, "21070": 22.52442, "21075": 22.61439, "21080": 22.28873, "21085": 22.19503, "21090": 22.21828, "21095": 22.15216, "21100": 22.14238, "21105": 22.09633, "21110": 23.95334, "21115": 22.10181, "21120": 21.99934, "21125": 22.21178, "21130": 22.46131, "21135": 22.06959, "21140": 21.97888, "21145": 21.95407, "21150": 21.90532, "21155": 21.95326, "21160": 21.87381, "21165": 21.94204, "21170": 21.92915, "21175": 21.85864, "21180": 22.28867, "21185": 22.34218, "21190": 21.94105, "21195": 21.91789, "21200": 21.89, "21205": 21.89745, "21210": 21.92304, "21215": 21.93211, "21220": 22.61766, "21225": 22.49672, "21230": 22.05075, "21235": 21.91123, "21240": 22.77618, "21245": 21.91178, "21250": 22.00982, "21255": 21.87792, "21260": 22.95895, "21265": 21.87749, "21270": 22.3765, "21275": 21.90096, "21280": 23.472, "21285": 21.88679, "21290": 21.84865, "21295": 22.32765, "21300": 22.57172, "21305": 21.93024, "21310": 21.87886, "21315": 22.36621, "21320": 21.90438, "21325": 21.89115, "21330": 21.88831, "21335": 21.86276, "21340": 21.83491, "21345": 21.90537, "21350": 21.98884, "21355": 21.86384, "21360": 22.73313, "21365": 21.87223, "21370": 21.92554, "21375": 21.88264, "21380": 21.83027, "21385": 21.91513, "21390": 21.85397, "21395": 22.20498, "21400": 21.8539, "21405": 22.32035, "21410": 22.30253, "21415": 22.79622, "21420": 21.8373, "21425": 22.12549, "21430": 21.88865, "21435": 22.99749, "21440": 21.88109, "21445": 21.88198, "21450": 22.3217, "21455": 22.89747, "21460": 21.90877, "21465": 21.90104, "21470": 21.86651, "21475": 22.27553, "21480": 22.0044, "21485": 21.89744, "21490": 22.20341, "21495": 22.33273, "21500": 21.84427, "21505": 21.90413, "21510": 21.87869, "21515": 21.88772, "21520": 21.9021, "21525": 22.16344, "21530": 21.84345, "21535": 21.87458, "21540": 22.36913, "21545": 22.1598, "21550": 22.25897, "21555": 22.14168, "21560": 22.01805, "21565": 21.92558, "21570": 22.3771, "21575": 21.91211, "21580": 21.89195, "21585": 22.40404, "21590": 22.98244, "21595": 21.91455, "21600": 21.92152, "21605": 21.88309, "21610": 23.39477, "21615": 21.88572, "21620": 22.078, "21625": 133.74451, "21630": 22.62783, "21635": 22.32628, "21640": 22.26815, "21645": 22.21248, "21650": 22.19584, "21655": 22.40244, "21660": 22.22299, "21665": 22.11975, "21670": 22.23244, "21675": 22.55411, "21680": 22.05063, "21685": 23.39307, "21690": 22.03546, "21695": 22.00919, "21700": 22.01572, "21705": 22.72183, "21710": 22.0405, "21715": 22.02016, "21720": 22.02699, "21725": 22.03443, "21730": 21.97663, "21735": 21.99784, "21740": 22.33317, "21745": 22.14944, "21750": 22.03799, "21755": 22.0183, "21760": 22.02608, "21765": 21.98645, "21770": 21.9997, "21775": 22.01763, "21780": 23.35506, "21785": 22.46805, "21790": 21.97471, "21795": 21.97886, "21800": 22.77055, "21805": 22.09554, "21810": 21.99421, "21815": 23.34917, "21820": 22.08627, "21825": 22.18935, "21830": 21.97758, "21835": 23.57194, "21840": 22.11357, "21845": 22.12866, "21850": 22.08065, "21855": 22.02983, "21860": 22.01534, "21865": 21.9845, "21870": 22.20008, "21875": 21.97146, "21880": 22.26416, "21885": 21.98709, "21890": 21.98241, "21895": 21.99776, "21900": 21.94696, "21905": 22.04811, "21910": 21.94711, "21915": 22.27807, "21920": 22.00271, "21925": 22.00988, "21930": 22.28717, "21935": 22.00438, "21940": 21.98936, "21945": 21.99446, "21950": 22.44096, "21955": 21.96816, "21960": 22.18998, "21965": 21.94467, "21970": 23.38386, "21975": 21.98161, "21980": 22.17849, "21985": 21.97219, "21990": 23.59452, "21995": 22.25184, "22000": 21.94024, "22005": 22.24574, "22010": 22.2541, "22015": 21.97323, "22020": 21.9345, "22025": 21.95242, "22030": 22.00922, "22035": 22.08757, "22040": 21.94958, "22045": 22.12907, "22050": 22.2376, "22055": 21.94202, "22060": 21.97822, "22065": 22.20518, "22070": 21.98839, "22075": 21.94455, "22080": 21.95866, "22085": 21.99364, "22090": 21.96275, "22095": 22.23389, "22100": 22.09759, "22105": 22.12181, "22110": 22.35469, "22115": 21.96972, "22120": 21.94735, "22125": 23.02531, "22130": 21.95334, "22135": 21.96091, "22140": 22.21155, "22145": 23.5641, "22150": 21.98764, "22155": 21.95839, "22160": 21.99208, "22165": 23.67375, "22170": 21.94698, "22175": 22.07282, "22180": 21.93818, "22185": 23.23411, "22190": 22.57034, "22195": 22.19437, "22200": 22.17878, "22205": 22.08418, "22210": 22.04082, "22215": 22.00334, "22220": 22.12255, "22225": 23.00884, "22230": 21.96922, "22235": 22.44013, "22240": 21.99205, "22245": 23.45363, "22250": 21.93119, "22255": 21.9347, "22260": 21.90236, "22265": 23.07167, "22270": 21.93202, "22275": 21.92163, "22280": 22.399, "22285": 22.06894, "22290": 21.88502, "22295": 21.90339, "22300": 22.26072, "22305": 21.86575, "22310": 21.85253, "22315": 21.86361, "22320": 21.83476, "22325": 22.37215, "22330": 22.06427, "22335": 21.86362, "22340": 22.01734, "22345": 21.87365, "22350": 21.94982, "22355": 22.00068, "22360": 21.84621, "22365": 21.85452, "22370": 22.34996, "22375": 21.85311, "22380": 22.70627, "22385": 21.88983, "22390": 21.85641, "22395": 21.93656, "22400": 22.68195, "22405": 22.06765, "22410": 21.90212, "22415": 22.64166, "22420": 22.37843, "22425": 21.84844, "22430": 21.86065, "22435": 21.87164, "22440": 22.05172, "22445": 21.87425, "22450": 21.83163, "22455": 21.87706, "22460": 22.35563, "22465": 21.83521, "22470": 22.08659, "22475": 21.84178, "22480": 21.85463, "22485": 21.84945, "22490": 21.86816, "22495": 21.84891, "22500": 21.83025, "22505": 22.29218, "22510": 21.84039, "22515": 22.11007, "22520": 21.85208, "22525": 21.90591, "22530": 22.12516, "22535": 22.78424, "22540": 21.77506, "22545": 21.82374, "22550": 22.26541, "22555": 23.35794, "22560": 21.7653, "22565": 21.8312, "22570": 21.80776, "22575": 22.7471, "22580": 21.87969, "22585": 21.81216, "22590": 21.92033, "22595": 22.94689, "22600": 22.01585, "22605": 21.79555, "22610": 21.7912, "22615": 21.82168, "22620": 21.8457, "22625": 21.86891, "22630": 21.82476, "22635": 21.89085, "22640": 22.37118, "22645": 22.09606, "22650": 21.98536, "22655": 21.88792, "22660": 21.89652, "22665": 22.12687, "22670": 22.1144, "22675": 21.87835, "22680": 21.82204, "22685": 22.30981, "22690": 22.98802, "22695": 21.75546, "22700": 21.85481, "22705": 21.81232, "22710": 23.06424, "22715": 21.81678, "22720": 21.88474, "22725": 21.84141, "22730": 23.09712, "22735": 21.79733, "22740": 21.8877, "22745": 21.86501, "22750": 22.82389, "22755": 22.46114, "22760": 22.40315, "22765": 22.33706, "22770": 23.82304, "22775": 22.20775, "22780": 22.22513, "22785": 22.09407, "22790": 22.80057, "22795": 22.1306, "22800": 22.07873, "22805": 22.98512, "22810": 22.06503, "22815": 22.58817, "22820": 22.0665, "22825": 22.02495, "22830": 22.09456, "22835": 22.1639, "22840": 22.01363, "22845": 22.01716, "22850": 22.00336, "22855": 22.00767, "22860": 22.41607, "22865": 22.03979, "22870": 22.00028, "22875": 21.99678, "22880": 21.98605, "22885": 22.01232, "22890": 22.08104, "22895": 22.07173, "22900": 22.25581, "22905": 22.42612, "22910": 22.0072, "22915": 21.97762, "22920": 22.56423, "22925": 22.02015, "22930": 22.00479, "22935": 22.02855, "22940": 23.33064, "22945": 21.96675, "22950": 22.39393, "22955": 21.97058, "22960": 23.47817, "22965": 22.19776, "22970": 22.07031, "22975": 22.07102, "22980": 22.81465, "22985": 22.01989, "22990": 21.97789, "22995": 22.20746, "23000": 22.62698, "23005": 21.99861, "23010": 22.09623, "23015": 22.00708, "23020": 22.00046, "23025": 21.98772, "23030": 22.12618, "23035": 22.02309, "23040": 22.50072, "23045": 22.03792, "23050": 22.00023, "23055": 22.03956, "23060": 21.9974, "23065": 21.96917, "23070": 21.99286, "23075": 22.17406, "23080": 21.98059, "23085": 22.37513, "23090": 21.94758, "23095": 23.26035, "23100": 21.97728, "23105": 21.99266, "23110": 21.96836, "23115": 23.6834, "23120": 21.98486, "23125": 21.98735, "23130": 22.5139, "23135": 23.05521, "23140": 22.0435, "23145": 22.0416, "23150": 22.11742, "23155": 22.94217, "23160": 22.22262, "23165": 21.98633, "23170": 22.11476, "23175": 22.70853, "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.8.0.json b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.8.0.json index b3244d5..7be5a98 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.8.0.json +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.8.0.json @@ -1,15509 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 25809, - "step_interval": 5, - "values": [ - 12.66411, - 12.57516, - 11.54354, - 10.6032, - 10.16449, - 9.88042, - 9.63438, - 9.41891, - 9.20503, - 9.03148, - 8.87789, - 8.67233, - 8.53839, - 8.43406, - 8.31108, - 8.16115, - 8.02824, - 7.92113, - 7.76569, - 7.64618, - 7.56482, - 7.423, - 7.33899, - 7.1926, - 7.12876, - 7.00496, - 6.94097, - 6.84124, - 6.75131, - 6.66666, - 6.61212, - 6.52689, - 6.46099, - 6.38008, - 6.33837, - 6.26728, - 6.21, - 6.11653, - 6.08526, - 5.99383, - 5.97289, - 5.87339, - 5.84685, - 5.8009, - 5.73867, - 5.66111, - 5.64924, - 5.61117, - 5.54497, - 5.52944, - 5.44052, - 5.4127, - 5.34505, - 5.32588, - 5.31378, - 5.21715, - 5.153, - 5.15225, - 5.1334, - 5.10311, - 5.06526, - 5.01847, - 4.98702, - 4.94667, - 4.91664, - 4.91943, - 4.87036, - 4.82483, - 4.81318, - 4.77824, - 4.74309, - 4.73812, - 4.66233, - 4.64263, - 4.66767, - 4.60771, - 4.59091, - 4.55776, - 4.51109, - 4.4562, - 4.4568, - 4.39769, - 4.39211, - 4.38708, - 4.32148, - 4.3179, - 4.25069, - 4.22698, - 4.18783, - 4.17126, - 4.15768, - 4.12308, - 4.10039, - 4.03635, - 4.04794, - 4.05032, - 3.98542, - 4.01068, - 3.96227, - 3.89516, - 3.91924, - 3.92424, - 3.84845, - 3.82708, - 3.81442, - 3.80739, - 3.76773, - 3.76194, - 3.74276, - 3.70848, - 3.71628, - 3.70514, - 3.67254, - 3.69372, - 3.73836, - 3.67484, - 3.69449, - 3.69509, - 3.63909, - 3.61671, - 3.86641, - 3.91108, - 3.86229, - 3.8476, - 3.80902, - 3.79599, - 3.77916, - 3.76237, - 3.73642, - 3.7123, - 3.71527, - 3.68633, - 3.69328, - 3.6695, - 3.67081, - 3.67204, - 3.64524, - 3.61728, - 3.58576, - 3.61171, - 3.59952, - 3.58549, - 3.55617, - 3.5589, - 3.54904, - 3.52894, - 3.49346, - 3.47675, - 3.4653, - 3.46219, - 3.45321, - 3.45618, - 3.45439, - 3.4839, - 3.43183, - 3.45602, - 3.44469, - 3.44021, - 3.40449, - 3.37885, - 3.40424, - 3.36315, - 3.36924, - 3.34641, - 3.36711, - 3.33065, - 3.30393, - 3.30704, - 3.32833, - 3.35603, - 3.36083, - 3.31763, - 3.31707, - 3.3254, - 3.31376, - 3.30202, - 3.29341, - 3.28155, - 3.26409, - 3.23184, - 3.23391, - 3.24111, - 3.22041, - 3.24121, - 3.22107, - 3.22913, - 3.24452, - 3.24685, - 3.24123, - 3.22875, - 3.23874, - 3.23119, - 3.21755, - 3.20204, - 3.20408, - 3.23557, - 3.202, - 3.16036, - 3.14542, - 3.1504, - 3.13228, - 3.13436, - 3.11197, - 3.11828, - 3.15679, - 3.1374, - 3.12728, - 3.10044, - 3.11871, - 3.07607, - 3.09491, - 3.07588, - 3.07614, - 3.09542, - 3.12474, - 3.12076, - 3.1064, - 3.12262, - 3.14063, - 3.15886, - 3.10728, - 3.10984, - 3.1073, - 3.07684, - 3.08415, - 3.07667, - 3.05886, - 3.06151, - 3.0475, - 3.01151, - 3.03355, - 3.02966, - 3.02163, - 3.0594, - 3.04414, - 3.03074, - 3.0045, - 2.99584, - 3.00557, - 2.99064, - 2.98265, - 3.0317, - 3.0242, - 3.00816, - 2.99402, - 3.00563, - 2.97254, - 3.00519, - 2.99428, - 2.97898, - 2.97925, - 2.95006, - 2.97934, - 2.96066, - 2.95033, - 2.94045, - 2.92782, - 2.93269, - 2.95276, - 3.00208, - 3.00598, - 2.9958, - 3.02247, - 3.05693, - 3.0513, - 3.03139, - 3.04019, - 3.0275, - 3.03915, - 3.06306, - 3.09514, - 3.01386, - 2.96103, - 2.94824, - 2.92383, - 2.93269, - 2.91472, - 2.91698, - 2.90928, - 2.93277, - 2.89275, - 2.89732, - 2.90346, - 2.90917, - 2.88319, - 2.90531, - 2.90678, - 2.88025, - 2.88212, - 2.88666, - 2.89034, - 2.95103, - 2.9194, - 2.88403, - 2.88091, - 2.86091, - 2.85296, - 2.83686, - 2.8802, - 2.85111, - 2.84398, - 2.83726, - 2.87247, - 2.89281, - 2.89314, - 2.88111, - 2.88313, - 2.86382, - 2.83568, - 2.84982, - 2.82808, - 2.83919, - 2.82193, - 2.82643, - 2.815, - 2.82335, - 2.80299, - 2.83569, - 2.83059, - 2.83417, - 2.81645, - 2.79908, - 2.81806, - 2.82235, - 2.81913, - 2.80616, - 2.80297, - 2.80908, - 2.80267, - 2.82718, - 2.79742, - 2.7676, - 2.77967, - 2.79068, - 2.80364, - 2.7967, - 2.78296, - 2.77958, - 2.78218, - 2.79398, - 2.96053, - 2.93975, - 2.89807, - 2.90914, - 2.86565, - 2.93572, - 2.98157, - 3.12438, - 3.03965, - 3.07819, - 2.94204, - 2.88763, - 2.83853, - 2.83218, - 2.79569, - 2.78657, - 2.762, - 2.77675, - 2.78343, - 2.78284, - 2.78346, - 2.73175, - 2.77196, - 2.77058, - 2.75471, - 2.75461, - 2.76067, - 2.7878, - 2.77527, - 2.77343, - 2.76018, - 2.78462, - 2.75518, - 2.73606, - 2.74057, - 2.74578, - 2.76842, - 2.75133, - 2.75878, - 2.76826, - 2.75262, - 2.75032, - 2.74467, - 2.73292, - 2.73767, - 2.73096, - 2.76454, - 2.74557, - 2.74463, - 2.74477, - 2.71386, - 2.72494, - 2.71917, - 2.72265, - 2.71687, - 2.72912, - 2.71285, - 2.72567, - 2.70247, - 2.7046, - 2.70247, - 2.69536, - 2.7269, - 2.69956, - 2.75905, - 2.72384, - 2.7216, - 2.70528, - 2.70104, - 2.72049, - 2.71635, - 2.74128, - 2.73336, - 2.72151, - 2.69487, - 2.70528, - 2.68494, - 2.6742, - 2.67271, - 2.70942, - 2.66563, - 2.69598, - 2.67056, - 2.66522, - 2.69677, - 2.68403, - 2.68064, - 2.67474, - 2.87777, - 2.72613, - 2.72961, - 2.70526, - 2.69693, - 2.68454, - 2.66846, - 2.67258, - 2.66899, - 2.65032, - 2.68423, - 2.66745, - 2.67757, - 2.67157, - 2.68437, - 2.69593, - 2.6777, - 2.7056, - 2.66653, - 2.66106, - 2.67401, - 2.65086, - 2.64777, - 2.66265, - 2.67707, - 2.66609, - 2.63845, - 2.67924, - 2.64907, - 2.63357, - 2.64204, - 2.64246, - 2.63656, - 2.63001, - 2.6428, - 2.67454, - 2.65072, - 2.65904, - 2.64678, - 2.65651, - 2.6273, - 2.60058, - 2.62801, - 2.6597, - 2.60682, - 2.62805, - 2.63717, - 2.62339, - 2.63626, - 2.6438, - 2.64716, - 2.62449, - 2.64257, - 2.67059, - 2.6379, - 2.64702, - 2.69813, - 2.68945, - 2.66396, - 2.63082, - 2.64437, - 2.62969, - 2.61701, - 2.62118, - 2.61583, - 2.57513, - 2.61832, - 2.62818, - 2.5981, - 2.61345, - 2.64531, - 2.63026, - 2.64755, - 2.60326, - 2.63456, - 2.60604, - 2.62234, - 2.63267, - 2.59304, - 2.64316, - 2.61999, - 2.63293, - 2.60151, - 2.62664, - 2.58264, - 2.6135, - 2.58512, - 2.65074, - 2.60605, - 2.57324, - 2.58708, - 2.6458, - 2.62067, - 2.57395, - 2.59338, - 2.61362, - 2.57774, - 2.58543, - 2.57094, - 2.58595, - 2.58277, - 2.60221, - 2.59871, - 2.61073, - 2.6131, - 2.58232, - 2.58274, - 5.10252, - 3.7827, - 2.85664, - 2.8929, - 2.81138, - 2.8178, - 2.82754, - 2.65995, - 2.64274, - 2.59685, - 2.58541, - 2.59865, - 2.57182, - 2.60874, - 2.56996, - 2.56967, - 2.55983, - 2.59211, - 2.5685, - 2.68655, - 2.63724, - 2.6228, - 2.59465, - 2.58816, - 2.54588, - 2.5631, - 2.55327, - 2.55339, - 2.58847, - 2.59301, - 2.55715, - 2.59674, - 2.56258, - 2.57543, - 2.57048, - 2.57652, - 2.57145, - 2.57921, - 2.59337, - 2.57918, - 2.55959, - 2.56019, - 2.57094, - 2.54186, - 2.55944, - 2.54007, - 2.56213, - 2.57086, - 2.54538, - 2.5387, - 2.55329, - 2.54965, - 2.58243, - 2.52765, - 2.53317, - 2.54771, - 2.57974, - 2.54652, - 2.57573, - 2.5414, - 2.57058, - 2.54752, - 2.55178, - 2.56092, - 2.65328, - 2.63202, - 2.76889, - 2.68693, - 2.59635, - 2.57176, - 2.55804, - 2.54201, - 2.5494, - 2.54898, - 2.54794, - 2.55814, - 2.524, - 2.53347, - 2.55295, - 2.54841, - 2.53277, - 2.5371, - 2.54656, - 2.54167, - 2.49941, - 2.53562, - 2.5576, - 2.57073, - 2.65897, - 2.62885, - 2.57782, - 2.57227, - 2.5502, - 2.52615, - 2.51846, - 2.54957, - 2.5441, - 2.53438, - 2.54987, - 2.52454, - 2.52552, - 2.52362, - 2.52257, - 2.54204, - 2.51418, - 2.52265, - 2.52699, - 2.54211, - 2.92649, - 2.56868, - 2.57149, - 2.55966, - 2.54272, - 2.52941, - 2.52977, - 2.55518, - 2.5059, - 2.49772, - 2.52544, - 2.54471, - 2.50476, - 2.52263, - 2.49689, - 2.54787, - 2.50406, - 2.52705, - 2.52693, - 2.49849, - 2.51595, - 2.51793, - 2.48373, - 2.50489, - 2.52277, - 2.4983, - 2.51945, - 2.48681, - 2.51802, - 2.49539, - 2.5186, - 2.51261, - 2.4912, - 2.49299, - 2.58307, - 2.55548, - 2.51293, - 2.49444, - 2.52876, - 2.50204, - 2.51253, - 2.51834, - 2.49593, - 2.49698, - 2.49959, - 2.54374, - 2.50829, - 2.50251, - 2.4714, - 2.48828, - 2.48606, - 2.48724, - 2.4802, - 2.4646, - 2.46644, - 2.47273, - 2.47736, - 2.48761, - 2.48264, - 2.50997, - 2.48164, - 2.5124, - 2.48913, - 2.47703, - 2.57013, - 2.51527, - 2.50437, - 2.49668, - 2.52706, - 2.48805, - 2.4938, - 2.47834, - 2.46217, - 2.50757, - 2.48795, - 2.47117, - 2.47748, - 2.50137, - 2.48898, - 2.49565, - 2.45997, - 2.48252, - 2.45257, - 2.51143, - 2.46898, - 2.4731, - 3.45631, - 2.66496, - 2.5822, - 2.61394, - 2.54199, - 2.51064, - 2.49616, - 2.50271, - 2.47927, - 2.49807, - 2.49834, - 2.46281, - 2.47762, - 2.47519, - 2.46263, - 2.48371, - 2.44151, - 2.45273, - 2.45813, - 2.4672, - 2.47065, - 2.45921, - 2.47448, - 2.48647, - 2.4493, - 2.48145, - 5.60101, - 3.04163, - 2.61459, - 2.61974, - 2.52342, - 2.4954, - 2.48044, - 2.48996, - 2.46989, - 2.45434, - 2.46322, - 2.50222, - 2.46887, - 2.42965, - 2.44857, - 2.45906, - 2.46297, - 2.44755, - 2.46167, - 2.48561, - 2.45674, - 2.46964, - 2.42551, - 2.46506, - 2.47014, - 2.44821, - 2.44763, - 2.46011, - 2.46478, - 2.4834, - 2.50231, - 2.47178, - 2.45658, - 2.47718, - 2.44636, - 2.4529, - 2.43527, - 2.43681, - 2.45868, - 2.43822, - 2.4501, - 2.4549, - 2.43058, - 2.44892, - 2.66355, - 2.50838, - 2.49106, - 2.46143, - 2.44137, - 2.4442, - 2.44763, - 2.44496, - 2.4441, - 2.43145, - 2.44059, - 2.4207, - 2.45088, - 2.42472, - 2.43283, - 2.45799, - 2.44037, - 2.41054, - 2.43189, - 2.44633, - 2.40592, - 2.44642, - 2.40853, - 2.41919, - 2.41243, - 2.44535, - 2.41295, - 2.4487, - 2.43023, - 2.42297, - 2.45679, - 2.56554, - 2.52767, - 2.46144, - 2.42239, - 2.43187, - 2.40826, - 2.41466, - 2.40446, - 2.4212, - 2.42113, - 2.43036, - 2.41904, - 2.40481, - 2.42822, - 2.41741, - 2.39981, - 2.40896, - 2.40466, - 2.41905, - 2.39711, - 2.40311, - 2.40408, - 2.40879, - 2.41018, - 2.40198, - 2.42203, - 2.41935, - 2.40528, - 2.43275, - 2.44511, - 2.45021, - 2.41582, - 2.41097, - 2.39785, - 2.41581, - 2.40562, - 2.39796, - 2.41277, - 2.37093, - 2.40407, - 2.37606, - 2.38526, - 2.39534, - 2.40719, - 2.39547, - 2.41441, - 2.40578, - 2.40664, - 2.40259, - 2.43356, - 2.39976, - 2.40539, - 2.41574, - 2.39213, - 2.39022, - 2.40815, - 2.4108, - 2.39537, - 2.38769, - 2.40217, - 2.36938, - 2.37087, - 2.40508, - 2.40523, - 2.41153, - 2.38363, - 2.37615, - 2.38623, - 2.37808, - 2.40562, - 2.35967, - 2.38508, - 2.37367, - 2.36898, - 2.39865, - 2.37925, - 2.39824, - 2.36595, - 2.38837, - 2.37899, - 2.37416, - 2.37449, - 2.3935, - 2.39858, - 2.38075, - 2.36845, - 2.38085, - 2.37411, - 2.3665, - 2.37798, - 3.4126, - 2.45681, - 2.45932, - 2.42545, - 2.40192, - 2.3757, - 2.38718, - 2.39098, - 2.389, - 2.38218, - 2.35271, - 2.37676, - 2.37624, - 2.40922, - 2.35151, - 2.39615, - 2.37704, - 2.36568, - 2.34517, - 2.35607, - 3.41815, - 2.45154, - 2.45173, - 2.4075, - 2.39719, - 2.37313, - 2.3852, - 2.39014, - 2.38838, - 2.38082, - 2.35184, - 2.37625, - 2.37518, - 2.40951, - 2.35183, - 2.3963, - 2.37721, - 2.35644, - 2.34411, - 2.34907, - 2.35, - 2.37084, - 2.38258, - 2.34244, - 2.33619, - 2.35127, - 2.37487, - 2.36946, - 2.36555, - 2.36622, - 2.36664, - 2.3518, - 2.38268, - 2.37313, - 2.36951, - 2.3556, - 2.35122, - 2.35177, - 2.3484, - 2.37416, - 2.34384, - 2.38254, - 2.34784, - 2.34734, - 2.35937, - 2.35188, - 2.36656, - 2.37593, - 2.36648, - 2.35294, - 2.35873, - 2.35593, - 2.33805, - 2.36769, - 2.34278, - 2.3452, - 2.3501, - 2.3606, - 2.33848, - 2.3521, - 2.35697, - 2.34791, - 2.33823, - 2.33585, - 2.3376, - 2.37852, - 2.37086, - 2.34487, - 2.32444, - 2.37847, - 2.31607, - 2.36662, - 2.35298, - 2.36544, - 2.32139, - 2.3497, - 2.32667, - 2.31209, - 2.36248, - 2.33577, - 2.32924, - 2.34536, - 2.35568, - 2.32816, - 2.34109, - 2.35313, - 2.34368, - 2.32868, - 2.31828, - 2.33574, - 2.33602, - 2.35537, - 2.34132, - 2.32738, - 2.33634, - 2.32236, - 2.30612, - 2.32071, - 2.30058, - 2.33707, - 2.34003, - 2.33346, - 2.3392, - 2.3368, - 2.29906, - 2.30426, - 2.34929, - 2.33691, - 2.30409, - 2.31856, - 2.30877, - 2.34753, - 2.31753, - 2.30473, - 2.30711, - 2.34629, - 2.31416, - 2.32336, - 2.32901, - 2.33992, - 2.32014, - 2.35699, - 2.29662, - 2.30752, - 2.33833, - 2.34731, - 2.32189, - 2.3342, - 2.3325, - 2.2962, - 2.32674, - 2.3346, - 2.30586, - 2.31866, - 2.33417, - 2.33007, - 2.31537, - 2.32835, - 2.30873, - 2.32413, - 2.30499, - 2.34434, - 2.29632, - 2.29852, - 2.32797, - 2.32733, - 2.3215, - 2.33831, - 2.32226, - 2.31503, - 2.31293, - 2.29553, - 2.29585, - 2.31594, - 2.29929, - 2.31303, - 2.32006, - 2.33263, - 2.30624, - 2.29536, - 2.33261, - 2.29497, - 2.31418, - 2.30805, - 2.32763, - 2.36516, - 2.31831, - 2.31479, - 2.31257, - 2.2919, - 2.29083, - 2.30541, - 2.33874, - 2.29163, - 2.31391, - 2.32125, - 2.32191, - 2.30909, - 2.29203, - 2.31719, - 2.29465, - 2.30653, - 2.29871, - 2.30002, - 2.31042, - 2.2853, - 2.31587, - 2.31252, - 2.2793, - 2.30282, - 2.25167, - 2.29225, - 2.30705, - 2.31875, - 2.2839, - 2.29688, - 2.31421, - 2.29834, - 2.2981, - 2.29318, - 2.28765, - 2.31016, - 2.29365, - 2.30703, - 2.29611, - 2.29438, - 2.28643, - 2.27507, - 2.27993, - 2.29851, - 2.31715, - 2.27945, - 2.32453, - 2.29726, - 2.28811, - 2.27647, - 2.29779, - 2.31235, - 2.28765, - 2.30079, - 2.32162, - 2.29821, - 2.27832, - 2.28576, - 2.30729, - 2.30097, - 2.2833, - 2.286, - 2.30791, - 2.27955, - 2.2937, - 2.29328, - 2.28288, - 2.30789, - 2.3047, - 2.31643, - 2.33528, - 2.29746, - 2.30297, - 2.29795, - 2.25887, - 2.28062, - 2.29151, - 2.26852, - 2.27986, - 2.27989, - 2.29265, - 2.33602, - 2.2692, - 2.28938, - 2.27693, - 2.28194, - 2.26056, - 2.28424, - 2.28435, - 2.28953, - 2.2745, - 2.27479, - 2.26439, - 2.28375, - 2.2738, - 2.25722, - 2.26773, - 2.2875, - 2.28001, - 2.28734, - 2.23003, - 2.28859, - 2.26699, - 2.26021, - 2.28559, - 2.28204, - 2.2819, - 2.30033, - 2.2699, - 2.28156, - 2.29762, - 2.27843, - 2.27219, - 2.28373, - 2.27144, - 2.26943, - 2.26467, - 2.28622, - 2.27833, - 2.2711, - 2.29905, - 2.27272, - 2.25613, - 2.26406, - 2.26998, - 2.22571, - 2.27079, - 2.26904, - 2.27769, - 2.25549, - 2.26324, - 2.3207, - 2.24748, - 2.28025, - 2.26555, - 2.24703, - 2.23219, - 2.26615, - 2.26764, - 2.25261, - 2.24459, - 2.25994, - 2.25425, - 2.26257, - 2.26304, - 2.2658, - 2.23069, - 2.27564, - 2.27945, - 2.26938, - 2.26596, - 2.24777, - 2.27221, - 2.2627, - 2.25783, - 2.23139, - 2.29444, - 2.24838, - 2.26498, - 2.25982, - 2.26647, - 2.27729, - 2.25634, - 2.26301, - 2.2431, - 2.26673, - 2.24341, - 2.25452, - 2.26073, - 2.27015, - 2.26451, - 2.2372, - 2.28087, - 2.25998, - 2.26951, - 2.27372, - 2.26628, - 2.25288, - 2.24016, - 2.2463, - 2.2412, - 2.24088, - 2.27045, - 2.25563, - 2.25336, - 2.24708, - 2.23368, - 2.28392, - 2.22941, - 2.24152, - 2.25285, - 2.27771, - 2.2596, - 2.25145, - 2.25431, - 2.25111, - 2.22676, - 2.2383, - 2.22913, - 2.23077, - 2.26189, - 2.26198, - 2.27155, - 2.26289, - 2.25613, - 2.24493, - 2.24488, - 2.21664, - 2.25535, - 2.25616, - 2.25566, - 2.257, - 2.25213, - 2.25392, - 2.24508, - 2.24833, - 2.2831, - 2.24146, - 2.23173, - 2.22154, - 2.23891, - 2.23213, - 2.25906, - 2.23966, - 2.24831, - 2.24413, - 2.24186, - 2.25136, - 2.22626, - 2.20194, - 2.23917, - 2.22365, - 2.23584, - 2.25988, - 2.24301, - 2.23764, - 2.24454, - 2.21896, - 2.21993, - 2.25314, - 2.23316, - 2.22256, - 2.22445, - 2.22593, - 2.25032, - 2.23803, - 2.25304, - 2.24287, - 2.25814, - 2.22384, - 2.21532, - 2.20589, - 2.23821, - 2.22417, - 2.21108, - 2.23594, - 2.21555, - 2.25195, - 2.26063, - 2.24206, - 2.22611, - 2.25112, - 2.23082, - 2.23036, - 2.2277, - 2.23037, - 2.20874, - 2.22116, - 2.23917, - 2.24361, - 2.20392, - 2.22179, - 2.23097, - 2.22229, - 2.21195, - 2.22944, - 2.25981, - 2.2434, - 2.20831, - 2.24115, - 2.21434, - 2.22974, - 2.2362, - 2.21264, - 2.20396, - 2.23692, - 2.26001, - 2.21333, - 2.23951, - 2.24333, - 2.22447, - 2.21248, - 2.23774, - 2.21791, - 2.24057, - 2.22342, - 2.23545, - 2.22227, - 2.21786, - 2.20227, - 2.23391, - 2.22201, - 2.21595, - 2.22192, - 2.21282, - 2.23323, - 2.2344, - 2.22201, - 2.2026, - 2.20419, - 2.2483, - 2.21553, - 2.20059, - 2.24563, - 2.20672, - 2.21503, - 2.20151, - 2.20084, - 2.219, - 2.20243, - 2.19927, - 2.22923, - 2.21072, - 2.21969, - 2.2213, - 2.20264, - 2.25217, - 2.23773, - 2.21575, - 2.20187, - 2.21114, - 2.22712, - 2.20509, - 2.2168, - 2.19591, - 2.21125, - 2.21122, - 2.23691, - 2.19949, - 2.21691, - 2.2007, - 2.24638, - 2.22655, - 2.20339, - 2.22853, - 2.1873, - 2.21884, - 2.2094, - 2.2086, - 2.20743, - 2.21903, - 2.19814, - 2.19975, - 2.20395, - 2.2373, - 2.20414, - 2.21871, - 2.23264, - 2.20313, - 2.22064, - 2.21361, - 2.18704, - 2.22281, - 2.20231, - 2.22411, - 2.22443, - 2.20549, - 2.20824, - 2.2348, - 2.2069, - 2.22117, - 2.19895, - 2.17462, - 2.21554, - 2.19418, - 2.20804, - 2.2141, - 2.20324, - 2.21361, - 2.22517, - 2.19254, - 2.19933, - 2.21123, - 2.1993, - 2.1968, - 2.21417, - 2.21512, - 2.21611, - 2.20759, - 2.22837, - 2.21474, - 2.21309, - 2.19111, - 2.2002, - 2.21002, - 2.20039, - 2.21654, - 2.35729, - 2.24048, - 2.22567, - 2.20266, - 2.20885, - 2.21111, - 2.20912, - 2.21097, - 2.18819, - 2.22907, - 2.20253, - 2.1596, - 2.19965, - 2.20757, - 2.18336, - 2.19658, - 2.17928, - 2.23315, - 2.17944, - 2.19513, - 2.18579, - 2.19091, - 2.18981, - 2.19793, - 2.19356, - 2.20001, - 2.20008, - 2.1974, - 2.17898, - 2.21242, - 2.18683, - 2.19748, - 2.20972, - 2.18406, - 2.19211, - 2.22904, - 2.21988, - 2.21199, - 2.18348, - 2.17357, - 2.20285, - 2.1977, - 2.20577, - 2.18578, - 2.17496, - 2.18366, - 2.21152, - 2.18982, - 2.23573, - 2.19042, - 2.20649, - 2.2025, - 2.19027, - 2.1962, - 2.2164, - 2.19403, - 2.20102, - 2.1985, - 2.16246, - 2.18342, - 2.18692, - 2.19626, - 2.18192, - 2.1893, - 2.18755, - 2.21025, - 2.18549, - 2.184, - 2.20517, - 2.20886, - 2.20518, - 2.17352, - 2.17371, - 2.20078, - 2.18592, - 2.18403, - 2.18033, - 2.19754, - 2.19426, - 2.19499, - 2.20602, - 2.17739, - 2.21333, - 2.1663, - 2.15994, - 2.19678, - 2.21246, - 2.15862, - 2.18358, - 2.15428, - 2.20359, - 2.19003, - 2.1953, - 2.19557, - 2.16132, - 2.21895, - 2.19617, - 2.21634, - 2.19686, - 2.19147, - 2.18437, - 2.19547, - 2.20941, - 2.17363, - 2.18971, - 2.18604, - 2.18042, - 2.17109, - 2.19788, - 2.16382, - 2.15782, - 2.17956, - 2.18243, - 2.1787, - 2.17642, - 2.18644, - 2.14688, - 2.17485, - 2.21044, - 2.19769, - 2.19495, - 2.1608, - 2.18587, - 2.16831, - 2.20116, - 2.17414, - 2.16728, - 2.18941, - 2.19834, - 2.15607, - 2.19672, - 2.17378, - 2.17543, - 2.18507, - 2.1903, - 2.16206, - 2.16569, - 2.17585, - 2.19927, - 2.14874, - 2.16111, - 2.16594, - 2.21272, - 2.20347, - 2.16851, - 2.18174, - 2.1722, - 2.16502, - 2.18958, - 2.172, - 2.17576, - 2.19585, - 2.15571, - 2.15914, - 2.19858, - 2.16805, - 2.15536, - 2.19079, - 2.19912, - 2.17785, - 2.19722, - 2.18203, - 2.18803, - 2.15101, - 2.19091, - 2.15855, - 2.14759, - 2.18355, - 2.17852, - 2.17394, - 2.16678, - 2.17352, - 2.17239, - 2.16823, - 2.17916, - 2.16634, - 2.16794, - 2.16985, - 2.14855, - 2.17634, - 2.17512, - 2.16301, - 2.1526, - 2.16815, - 2.19929, - 2.17279, - 2.16724, - 2.17854, - 2.17462, - 2.15162, - 2.17402, - 2.2037, - 2.1857, - 2.16011, - 2.1677, - 2.1605, - 2.16044, - 2.16289, - 2.16693, - 2.15834, - 2.15576, - 2.17548, - 2.17367, - 2.19603, - 2.17902, - 2.19339, - 2.15507, - 2.18984, - 2.16392, - 2.17049, - 2.16408, - 2.18821, - 2.17378, - 2.17612, - 2.15704, - 2.17436, - 2.16806, - 2.17331, - 2.18089, - 2.19023, - 2.17341, - 2.1837, - 2.16447, - 2.17717, - 2.12845, - 2.16581, - 2.16576, - 2.17878, - 2.15896, - 2.14349, - 2.13857, - 2.163, - 2.16686, - 2.13574, - 2.17099, - 2.16829, - 2.1957, - 2.14049, - 2.1614, - 2.33308, - 2.18864, - 2.19581, - 2.15764, - 2.21001, - 2.17369, - 2.169, - 2.16057, - 2.1555, - 2.17984, - 2.17026, - 2.13552, - 2.15683, - 2.144, - 2.15337, - 2.15827, - 2.17272, - 2.15098, - 2.16686, - 2.16543, - 2.14474, - 2.17108, - 2.17368, - 2.15313, - 2.15852, - 2.15723, - 2.16181, - 2.17457, - 2.15197, - 2.15349, - 2.15066, - 2.15799, - 2.16662, - 2.15251, - 2.15903, - 2.16832, - 2.16734, - 2.14137, - 2.14993, - 2.16748, - 2.19773, - 2.16805, - 2.15964, - 2.1804, - 2.17998, - 2.14806, - 2.14573, - 2.13933, - 2.14742, - 2.15124, - 2.14117, - 2.15974, - 2.15591, - 2.16682, - 2.16508, - 2.14472, - 2.14973, - 2.16258, - 2.14212, - 2.19087, - 2.18512, - 2.15518, - 2.13408, - 2.1584, - 2.13969, - 2.15498, - 2.15836, - 2.15812, - 2.15092, - 2.14058, - 2.16166, - 2.19202, - 2.18302, - 2.16288, - 2.14476, - 2.19021, - 2.16748, - 2.16459, - 2.15818, - 2.15253, - 2.17882, - 2.17051, - 2.13662, - 2.15769, - 2.1451, - 2.15455, - 2.15933, - 2.17352, - 2.15205, - 2.16782, - 2.16651, - 2.14543, - 2.17196, - 2.17428, - 2.15367, - 2.15865, - 2.15753, - 2.16251, - 2.17474, - 2.15179, - 2.15464, - 2.15189, - 2.15825, - 2.16679, - 2.15247, - 2.15879, - 2.16848, - 2.16712, - 2.14151, - 2.14919, - 2.16636, - 2.19694, - 2.16746, - 2.15615, - 2.1801, - 2.18019, - 2.14781, - 2.14405, - 2.13878, - 2.14619, - 2.15067, - 2.14029, - 2.15864, - 2.15524, - 2.16666, - 2.16502, - 2.14454, - 2.14967, - 2.16244, - 2.14155, - 2.19212, - 2.18411, - 2.1545, - 2.13298, - 2.15686, - 2.13777, - 2.15407, - 2.15742, - 2.15722, - 2.14982, - 2.12737, - 2.15411, - 2.15453, - 2.14356, - 2.17199, - 2.15532, - 2.12601, - 2.12197, - 2.17268, - 2.13875, - 2.18042, - 2.13088, - 2.15764, - 2.17407, - 2.13045, - 2.15704, - 2.16287, - 2.1617, - 2.13503, - 2.15413, - 2.14423, - 2.14843, - 2.14099, - 2.16652, - 2.16624, - 2.16699, - 2.14701, - 2.14252, - 2.14079, - 2.15245, - 2.15248, - 2.16716, - 2.1652, - 2.17333, - 2.15225, - 2.15625, - 2.1559, - 2.15638, - 2.14564, - 2.13573, - 2.18864, - 2.14585, - 2.16181, - 2.14622, - 2.14284, - 2.14361, - 2.1353, - 2.13868, - 2.18464, - 2.13446, - 2.14149, - 2.15089, - 2.16825, - 2.15287, - 2.14872, - 2.11852, - 2.1368, - 2.1548, - 2.15594, - 2.15019, - 2.12168, - 2.14385, - 2.11972, - 2.12978, - 2.1364, - 2.15372, - 2.15559, - 2.14493, - 2.15871, - 2.14851, - 2.16254, - 2.15676, - 2.1324, - 2.13414, - 2.13716, - 2.15354, - 2.13055, - 2.14861, - 2.13414, - 2.13118, - 2.16083, - 2.14755, - 2.16996, - 2.15333, - 2.14687, - 2.13754, - 2.12017, - 2.12175, - 2.15103, - 2.12596, - 2.14087, - 2.15069, - 2.14017, - 2.14556, - 2.14779, - 2.11721, - 2.13546, - 2.14762, - 2.12142, - 2.11681, - 2.12942, - 2.16537, - 2.14594, - 2.14403, - 2.13581, - 2.14601, - 2.15087, - 2.13722, - 2.136, - 2.13283, - 2.15993, - 2.10791, - 2.12652, - 2.12944, - 2.12434, - 2.16751, - 2.1412, - 2.14415, - 2.1601, - 2.15032, - 2.15054, - 2.13025, - 2.12893, - 2.13228, - 2.12559, - 2.14819, - 2.1192, - 2.14483, - 2.13315, - 2.11682, - 2.11695, - 2.14524, - 2.11143, - 2.11339, - 2.11413, - 2.13984, - 2.13872, - 2.14782, - 2.14373, - 2.12765, - 2.12166, - 2.14038, - 2.1169, - 2.16891, - 2.11816, - 2.11764, - 2.10502, - 2.11715, - 2.16007, - 2.1139, - 2.12358, - 2.13892, - 2.15004, - 2.11246, - 2.12922, - 2.14736, - 2.13472, - 2.10951, - 2.12747, - 2.13798, - 2.12388, - 2.11521, - 2.10739, - 2.13998, - 2.13769, - 2.14859, - 2.13339, - 2.15248, - 2.14247, - 2.13312, - 2.14542, - 2.12039, - 2.11279, - 2.13326, - 2.14623, - 2.12046, - 2.12902, - 2.15093, - 2.14723, - 2.13488, - 2.15025, - 2.13168, - 2.14272, - 2.12932, - 2.13982, - 2.13424, - 2.11723, - 2.14033, - 2.11476, - 2.11145, - 2.12764, - 2.13232, - 2.11847, - 2.1461, - 2.10997, - 2.10156, - 2.1451, - 2.12625, - 2.13328, - 2.11557, - 2.1215, - 2.12135, - 2.15984, - 2.14912, - 2.12044, - 2.11027, - 2.10736, - 2.1285, - 2.13769, - 2.14091, - 2.10334, - 2.12345, - 2.12627, - 2.13376, - 2.14276, - 2.15602, - 2.15069, - 2.14161, - 2.1043, - 2.13112, - 2.11701, - 2.12521, - 2.08875, - 2.12792, - 2.13596, - 2.12691, - 2.12076, - 2.13896, - 2.13719, - 2.15087, - 2.11978, - 2.0985, - 2.12918, - 2.13974, - 2.12134, - 2.13189, - 2.12789, - 2.12962, - 2.13089, - 2.14811, - 2.12857, - 2.11768, - 2.12173, - 2.10441, - 2.14866, - 2.13166, - 2.12901, - 2.127, - 2.11426, - 2.12093, - 2.11143, - 2.11727, - 2.11241, - 2.12266, - 2.13044, - 2.10739, - 2.10831, - 2.15523, - 2.11048, - 2.13542, - 2.13614, - 2.12683, - 2.13448, - 2.12596, - 2.12179, - 2.12048, - 2.1139, - 2.10651, - 2.11425, - 2.11126, - 2.14146, - 2.11739, - 2.12012, - 2.09532, - 2.10843, - 2.09704, - 2.11482, - 2.11549, - 2.13335, - 2.12748, - 2.12996, - 2.12102, - 2.10231, - 2.121, - 2.08735, - 2.1264, - 2.13147, - 2.11565, - 2.13246, - 2.11584, - 2.13548, - 2.12057, - 2.13249, - 2.13311, - 2.13539, - 2.08873, - 2.15552, - 2.13632, - 2.1273, - 2.10797, - 2.10855, - 2.12145, - 2.09884, - 2.11454, - 2.10846, - 2.11284, - 2.11202, - 2.12415, - 2.10981, - 2.13325, - 2.11918, - 2.11938, - 2.10863, - 2.11764, - 2.12571, - 2.11926, - 2.11383, - 2.14034, - 2.11653, - 2.10883, - 2.11607, - 2.11223, - 2.13003, - 2.10391, - 2.09898, - 2.12297, - 2.11622, - 2.11255, - 2.11382, - 2.10276, - 2.0993, - 2.13575, - 2.10113, - 2.10347, - 2.13801, - 2.11259, - 2.1356, - 2.11331, - 2.14302, - 2.11484, - 2.1231, - 2.14666, - 2.09468, - 2.10025, - 2.11826, - 2.10354, - 2.12973, - 2.10786, - 2.10133, - 2.1188, - 2.12139, - 2.10567, - 2.10296, - 2.1229, - 2.13631, - 2.11626, - 2.09, - 2.09436, - 2.12306, - 2.12402, - 2.11397, - 2.11184, - 2.11068, - 2.1035, - 2.1186, - 2.12232, - 2.10365, - 2.11107, - 2.09657, - 2.10619, - 2.11737, - 2.10038, - 2.10319, - 2.13439, - 2.10429, - 2.07575, - 2.12834, - 2.11125, - 2.087, - 2.09909, - 2.13771, - 2.11033, - 2.09643, - 2.11279, - 2.11157, - 2.08541, - 2.11924, - 2.11518, - 2.11957, - 2.11874, - 2.08321, - 2.12935, - 2.09743, - 2.11283, - 2.10512, - 2.11416, - 2.10964, - 2.11671, - 2.07233, - 2.12294, - 2.09786, - 2.10687, - 2.1019, - 2.1202, - 2.11577, - 2.1137, - 2.08861, - 2.10085, - 2.10267, - 2.12121, - 2.10177, - 2.09619, - 2.09794, - 2.08094, - 2.08729, - 2.09336, - 2.09897, - 2.10286, - 2.07176, - 2.10334, - 2.12713, - 2.11912, - 2.11999, - 2.08836, - 2.10282, - 2.12619, - 2.0978, - 2.10238, - 2.10465, - 2.1121, - 2.12913, - 2.09269, - 2.11261, - 2.11606, - 2.07935, - 2.09366, - 2.12006, - 2.09347, - 2.07733, - 2.10526, - 2.10092, - 2.10797, - 2.10158, - 2.12027, - 2.10471, - 2.09255, - 2.0975, - 2.0737, - 2.11164, - 2.11574, - 2.09266, - 2.09184, - 2.09209, - 2.10541, - 2.09615, - 2.11114, - 2.08241, - 2.1174, - 2.11024, - 2.07316, - 2.09176, - 2.10127, - 2.08781, - 2.08613, - 2.09108, - 2.11006, - 2.10495, - 2.10946, - 2.07477, - 2.11336, - 2.09873, - 2.10383, - 2.14032, - 2.094, - 2.09863, - 2.11004, - 2.10177, - 2.09064, - 2.09376, - 2.09919, - 2.1078, - 2.10378, - 2.088, - 2.10266, - 2.0971, - 2.11202, - 2.06814, - 2.09322, - 2.10195, - 2.09977, - 2.08712, - 2.08943, - 2.0943, - 2.09088, - 2.07683, - 2.09816, - 2.0957, - 2.09438, - 2.08377, - 2.10353, - 2.09148, - 2.12309, - 2.07554, - 2.10233, - 2.10267, - 2.12013, - 2.07702, - 2.11946, - 2.09854, - 2.11316, - 2.10328, - 2.10833, - 2.12354, - 2.09029, - 2.08101, - 2.08138, - 2.10166, - 2.09347, - 2.12793, - 2.11543, - 2.09397, - 2.09456, - 2.07508, - 2.08559, - 2.10014, - 2.09946, - 2.0938, - 2.10062, - 2.08581, - 2.09366, - 2.10412, - 2.09658, - 2.12119, - 2.10416, - 2.10553, - 2.10884, - 2.10399, - 2.09831, - 2.07083, - 2.10862, - 2.08491, - 2.07786, - 2.06987, - 2.10105, - 2.08836, - 2.11082, - 2.08967, - 2.096, - 2.09845, - 2.11367, - 2.0919, - 2.08398, - 2.08567, - 2.10261, - 2.08733, - 2.07127, - 2.10659, - 2.10412, - 2.08127, - 2.0879, - 2.09321, - 2.0969, - 2.1155, - 2.09746, - 2.07711, - 2.09989, - 2.07658, - 2.08498, - 2.10385, - 2.09724, - 2.1108, - 2.09525, - 2.09183, - 2.1127, - 2.07946, - 2.09587, - 2.08618, - 2.05932, - 2.07322, - 2.09423, - 2.08995, - 2.08346, - 2.12977, - 2.08545, - 2.09628, - 2.08662, - 2.08522, - 2.09505, - 2.09735, - 2.08041, - 2.07145, - 2.11214, - 2.11189, - 2.07796, - 2.10217, - 2.08391, - 2.08151, - 2.08785, - 2.09681, - 2.07159, - 2.08265, - 2.09753, - 2.08791, - 2.10463, - 2.07866, - 2.07685, - 2.07439, - 2.12679, - 2.10319, - 2.07957, - 2.11112, - 2.09587, - 2.10383, - 2.08998, - 2.09877, - 2.08149, - 2.0726, - 2.09733, - 2.10202, - 2.05536, - 2.06957, - 2.07942, - 2.10035, - 2.07557, - 2.11221, - 2.10861, - 2.07354, - 2.08198, - 2.11816, - 2.10121, - 2.09839, - 2.08926, - 2.08913, - 2.06694, - 2.09322, - 2.12166, - 2.0856, - 2.10069, - 2.08259, - 2.088, - 2.06491, - 2.06815, - 2.05263, - 2.07064, - 2.09024, - 2.08155, - 2.07271, - 2.09329, - 2.07103, - 2.08115, - 2.09324, - 2.11059, - 2.09349, - 2.0868, - 2.09298, - 2.08033, - 2.11991, - 2.10219, - 2.08265, - 2.0745, - 2.08067, - 2.08228, - 2.07887, - 2.08947, - 2.08852, - 2.0846, - 2.10233, - 2.07347, - 2.09132, - 2.11081, - 2.07605, - 2.10372, - 2.09598, - 2.08573, - 2.06331, - 2.08668, - 2.07473, - 2.08458, - 2.08127, - 2.08422, - 2.11135, - 2.07743, - 2.08303, - 2.06754, - 2.08068, - 2.08845, - 2.07029, - 2.07641, - 2.09877, - 2.07114, - 2.06937, - 2.07108, - 2.08874, - 2.08498, - 2.08842, - 2.07386, - 2.08716, - 2.07466, - 2.07795, - 2.08073, - 2.08535, - 2.0606, - 2.09839, - 2.08545, - 2.0932, - 2.09564, - 2.08916, - 2.09524, - 2.06897, - 2.09949, - 2.06747, - 2.06616, - 2.08769, - 2.06691, - 2.08399, - 2.09025, - 2.08435, - 2.0922, - 2.08444, - 2.07771, - 2.1019, - 2.08006, - 2.10182, - 2.04187, - 2.06098, - 2.07087, - 2.08449, - 2.08222, - 2.0773, - 2.07871, - 2.06898, - 2.07074, - 2.08891, - 2.07142, - 2.0769, - 2.05867, - 2.08408, - 2.07476, - 2.08503, - 2.08507, - 2.09966, - 2.0936, - 2.08102, - 2.08051, - 2.08716, - 2.10569, - 2.04886, - 2.08287, - 2.08698, - 2.08574, - 2.08143, - 2.06543, - 2.09331, - 2.07571, - 2.08896, - 2.0924, - 2.09625, - 2.06282, - 2.07882, - 2.06549, - 2.09371, - 2.08219, - 2.07266, - 2.06664, - 2.06603, - 2.10642, - 2.07823, - 2.09126, - 2.06788, - 2.07061, - 2.06201, - 2.07877, - 2.07682, - 2.08231, - 2.08118, - 2.07654, - 2.06766, - 2.08435, - 2.05273, - 2.07367, - 2.08997, - 2.07393, - 2.10362, - 2.09741, - 2.07105, - 2.06079, - 2.08238, - 2.07444, - 2.08509, - 2.07566, - 2.08896, - 2.07058, - 2.08798, - 2.08435, - 2.06113, - 2.08116, - 2.06203, - 2.07101, - 2.06705, - 2.07565, - 2.04901, - 2.06124, - 2.06711, - 2.07743, - 2.05564, - 2.07932, - 2.09322, - 2.07225, - 2.07562, - 2.06527, - 2.0762, - 2.08281, - 2.0767, - 2.0748, - 2.07047, - 2.08225, - 2.06854, - 2.06512, - 2.0742, - 2.07513, - 2.06373, - 2.07743, - 2.08095, - 2.08841, - 2.07355, - 2.06643, - 2.07799, - 2.06675, - 2.07423, - 2.10812, - 2.06436, - 2.09897, - 2.07502, - 2.07737, - 2.04712, - 2.08047, - 2.04774, - 2.0649, - 2.09461, - 2.07892, - 2.0363, - 2.07714, - 2.05921, - 2.06925, - 2.07907, - 2.04963, - 2.09296, - 2.09086, - 2.06722, - 2.10081, - 2.09291, - 2.06089, - 2.06722, - 2.06642, - 2.09322, - 2.07335, - 2.07798, - 2.05836, - 2.07796, - 2.0808, - 2.06395, - 2.06751, - 2.05447, - 2.06104, - 2.06063, - 2.06766, - 2.06221, - 2.07257, - 2.06574, - 2.04905, - 2.03481, - 2.04832, - 2.05878, - 2.02979, - 2.07279, - 2.05071, - 2.0645, - 2.07826, - 2.07363, - 2.08398, - 2.07578, - 2.04699, - 2.06644, - 2.05969, - 2.05606, - 2.06473, - 2.04984, - 2.07189, - 2.05034, - 2.05124, - 2.06808, - 2.06996, - 2.06724, - 2.06324, - 2.05736, - 2.06497, - 2.04036, - 2.06733, - 2.05616, - 2.07322, - 2.05645, - 2.07276, - 2.05856, - 2.07256, - 2.03945, - 2.11163, - 2.0619, - 2.08546, - 2.07413, - 2.07061, - 2.04996, - 2.06793, - 2.07484, - 2.06008, - 2.06218, - 2.09877, - 2.06978, - 2.06143, - 2.06929, - 2.06508, - 2.07316, - 2.06215, - 2.07606, - 2.08038, - 2.06814, - 2.10101, - 2.07255, - 2.05784, - 2.08767, - 2.07738, - 2.03792, - 2.04016, - 2.06784, - 2.06786, - 2.06087, - 2.05665, - 2.06969, - 2.05982, - 2.07825, - 2.06744, - 2.06036, - 2.08139, - 2.08364, - 2.05996, - 2.05479, - 2.05167, - 2.05077, - 2.05922, - 2.07963, - 2.04633, - 2.061, - 2.07461, - 2.05146, - 2.08967, - 2.0543, - 2.06519, - 2.05693, - 2.06047, - 2.09078, - 2.06547, - 2.06655, - 2.04579, - 2.07219, - 2.05517, - 2.07714, - 2.07292, - 2.05494, - 2.08399, - 2.04845, - 2.0271, - 2.07541, - 2.08763, - 2.06062, - 2.06451, - 2.04971, - 2.06807, - 2.06973, - 2.04771, - 2.07481, - 2.04728, - 2.07123, - 2.10208, - 2.07216, - 2.04981, - 2.07723, - 2.0563, - 2.08333, - 2.05147, - 2.06321, - 2.04382, - 2.02393, - 2.05965, - 2.03862, - 2.05323, - 2.08049, - 2.08626, - 2.06566, - 2.07277, - 2.05743, - 2.05562, - 2.04274, - 2.06746, - 2.03728, - 2.05617, - 2.05681, - 2.06702, - 2.04731, - 2.05774, - 2.07996, - 2.05683, - 2.04402, - 2.04403, - 2.01992, - 2.04123, - 2.06046, - 2.04875, - 2.0466, - 2.06237, - 2.04971, - 2.04946, - 2.08544, - 2.05453, - 2.0264, - 2.06103, - 2.06825, - 2.07077, - 2.06739, - 2.07046, - 2.07204, - 2.07155, - 2.04056, - 2.06434, - 2.06275, - 2.06904, - 2.06548, - 2.06135, - 2.07188, - 2.06119, - 2.06055, - 2.0949, - 2.02424, - 2.05931, - 2.04845, - 2.07085, - 2.05544, - 2.06672, - 2.07003, - 2.03386, - 2.06494, - 2.08279, - 2.06862, - 2.04196, - 2.07868, - 2.04035, - 2.06889, - 2.02584, - 2.04468, - 2.0504, - 2.0388, - 2.05739, - 2.08007, - 2.0722, - 2.03968, - 2.06537, - 2.06581, - 2.03513, - 2.06123, - 2.05413, - 2.0505, - 2.04006, - 2.04391, - 2.05829, - 2.05854, - 2.03776, - 2.0529, - 2.04568, - 2.05123, - 2.04132, - 2.07814, - 2.03212, - 2.05699, - 2.04265, - 2.05987, - 2.0619, - 2.05647, - 2.04949, - 2.04947, - 2.03799, - 2.07108, - 2.03083, - 2.0576, - 2.07711, - 2.0508, - 2.04764, - 2.06956, - 2.0506, - 2.08523, - 2.05784, - 2.07594, - 2.06797, - 2.0562, - 2.04647, - 2.06524, - 2.02976, - 2.04842, - 2.07655, - 2.05525, - 2.03493, - 2.0666, - 2.05273, - 2.05187, - 2.04375, - 2.06658, - 2.05532, - 2.06008, - 2.0566, - 2.07965, - 2.08018, - 2.04848, - 2.03559, - 2.04089, - 2.0178, - 2.04963, - 2.04755, - 2.02811, - 2.06052, - 2.04175, - 2.05502, - 2.02278, - 2.04766, - 2.06112, - 2.03887, - 2.02798, - 2.04829, - 2.06336, - 2.04651, - 2.05795, - 2.05212, - 2.06047, - 2.0286, - 2.01909, - 2.06535, - 2.05403, - 2.0821, - 2.02458, - 2.05066, - 2.06295, - 2.0543, - 2.05905, - 2.04452, - 2.06969, - 2.06715, - 2.05956, - 2.05587, - 2.06945, - 2.03875, - 2.05269, - 2.05739, - 2.05056, - 2.04221, - 2.05828, - 2.06287, - 2.0695, - 2.08111, - 2.04066, - 2.04745, - 2.04967, - 2.0342, - 2.0318, - 2.02745, - 2.05636, - 2.04144, - 2.04963, - 2.03494, - 2.0634, - 2.05987, - 2.04363, - 2.03157, - 2.04925, - 2.05193, - 2.03998, - 2.06308, - 2.06588, - 2.04694, - 2.05157, - 2.05087, - 2.04383, - 2.06034, - 2.03071, - 2.03856, - 2.05594, - 2.04312, - 2.07479, - 2.07823, - 2.02631, - 2.04821, - 2.0792, - 2.04349, - 2.06049, - 2.04056, - 2.05241, - 2.04747, - 2.05308, - 2.03352, - 2.04522, - 2.06442, - 2.04325, - 2.05879, - 2.06124, - 2.04282, - 2.04139, - 2.05254, - 2.01988, - 2.07762, - 2.04611, - 2.03033, - 2.05727, - 2.05424, - 2.06047, - 2.04054, - 2.05252, - 2.04745, - 2.0531, - 2.0335, - 2.04512, - 2.06421, - 2.04357, - 2.05865, - 2.06117, - 2.04304, - 2.04141, - 2.05248, - 2.02, - 2.07693, - 2.04586, - 2.03029, - 2.05742, - 2.0541, - 2.06525, - 2.06902, - 2.0432, - 2.04453, - 2.06192, - 2.04707, - 2.04869, - 2.04354, - 2.05001, - 2.03991, - 2.0685, - 2.0549, - 2.05505, - 2.04703, - 2.03358, - 2.05194, - 2.05436, - 2.06724, - 2.05656, - 2.07674, - 2.07072, - 2.03293, - 2.03157, - 2.04006, - 2.04293, - 2.05827, - 2.03175, - 2.01841, - 2.05883, - 2.04812, - 2.03408, - 2.03289, - 2.03097, - 2.0434, - 2.04684, - 2.03107, - 2.06299, - 2.04331, - 2.04469, - 2.06301, - 2.0327, - 2.06513, - 2.03301, - 2.05957, - 2.04292, - 2.02398, - 2.04747, - 2.04785, - 2.03174, - 2.02171, - 2.05919, - 2.03983, - 2.05566, - 2.04248, - 2.03221, - 2.0759, - 2.05008, - 2.0214, - 2.06179, - 2.01749, - 2.04065, - 2.02708, - 2.05848, - 2.05042, - 2.05003, - 2.07077, - 2.04236, - 2.05066, - 2.03207, - 2.03696, - 2.03066, - 2.03533, - 2.0552, - 2.04942, - 2.04416, - 2.04847, - 2.03375, - 2.05024, - 2.02224, - 2.0599, - 2.03886, - 2.06545, - 2.05957, - 2.02021, - 2.06053, - 2.02396, - 2.03988, - 2.06241, - 2.01066, - 2.04243, - 2.05078, - 2.07304, - 2.04773, - 2.06107, - 2.04046, - 2.03072, - 2.06806, - 2.0502, - 2.05373, - 2.04114, - 2.02716, - 2.05167, - 2.04071, - 2.04664, - 2.04539, - 2.04807, - 2.01564, - 2.04137, - 2.03569, - 2.06744, - 2.07131, - 2.02967, - 2.01392, - 2.06078, - 2.05455, - 2.01983, - 2.02859, - 2.05341, - 2.01784, - 2.04694, - 2.04951, - 2.04892, - 2.06394, - 2.0479, - 2.03549, - 2.01551, - 2.04039, - 2.0363, - 2.03762, - 2.0608, - 2.01959, - 2.06367, - 2.04835, - 2.04411, - 2.02332, - 2.0585, - 2.04193, - 2.0603, - 2.0682, - 2.05464, - 2.02563, - 2.04411, - 2.04524, - 2.04669, - 2.03029, - 2.0362, - 2.02253, - 2.05388, - 2.05496, - 2.06212, - 2.04333, - 2.0413, - 2.02525, - 2.00874, - 2.0428, - 2.03114, - 2.03954, - 2.0378, - 2.04635, - 2.06999, - 2.05191, - 2.04536, - 2.03394, - 2.05732, - 2.04309, - 2.03061, - 2.05865, - 2.05048, - 2.03652, - 2.03049, - 2.01085, - 2.03067, - 2.01741, - 2.02034, - 2.04522, - 2.03736, - 2.06574, - 2.02185, - 2.03204, - 2.02819, - 2.05875, - 2.03848, - 2.07065, - 2.03875, - 2.01548, - 2.06044, - 2.0509, - 2.03823, - 2.03869, - 2.04014, - 2.03673, - 2.03314, - 2.01973, - 2.05239, - 2.06154, - 2.04174, - 2.03178, - 2.02154, - 2.00685, - 2.02756, - 2.03287, - 2.0427, - 2.05606, - 2.04018, - 2.01783, - 2.02935, - 2.016, - 2.05266, - 2.03158, - 2.04107, - 2.0517, - 2.03739, - 2.02115, - 2.0316, - 2.05073, - 2.04688, - 2.04303, - 2.0674, - 2.03838, - 2.01294, - 2.04581, - 2.02689, - 2.03504, - 2.01239, - 2.02324, - 2.05401, - 2.01266, - 2.03732, - 2.02325, - 2.04265, - 2.04579, - 2.00625, - 2.03277, - 2.03646, - 2.01592, - 2.03994, - 2.01572, - 2.01955, - 2.03168, - 2.02651, - 2.04041, - 2.0268, - 2.01381, - 2.05137, - 2.03582, - 2.01582, - 2.01213, - 2.01781, - 2.04045, - 2.0411, - 2.02934, - 2.03793, - 2.02468, - 2.0318, - 2.04112, - 2.0365, - 2.04224, - 2.05205, - 2.0668, - 2.04054, - 2.02819, - 2.0254, - 2.02306, - 2.04228, - 2.02134, - 2.05392, - 2.02807, - 2.02953, - 2.05391, - 2.05151, - 2.01489, - 2.03046, - 2.03306, - 2.03355, - 2.02705, - 2.00358, - 2.04511, - 2.03331, - 2.01168, - 2.02215, - 2.03613, - 2.03859, - 2.03608, - 2.04183, - 2.01935, - 2.04378, - 2.03376, - 2.04583, - 2.07143, - 2.03132, - 2.045, - 2.01276, - 2.05921, - 2.03287, - 2.04978, - 2.02679, - 2.04721, - 2.02158, - 2.04761, - 2.02592, - 2.01646, - 2.04388, - 2.05599, - 2.04995, - 2.01475, - 2.03737, - 2.03914, - 2.02618, - 2.01273, - 2.03062, - 2.0391, - 2.05022, - 2.02877, - 2.06806, - 2.0398, - 2.02339, - 2.02826, - 2.0283, - 2.05834, - 2.02902, - 1.99534, - 2.0505, - 2.00959, - 2.02836, - 2.00366, - 2.04647, - 2.03224, - 2.0056, - 2.04715, - 2.038, - 2.01394, - 2.02793, - 2.03377, - 2.02536, - 2.04284, - 2.03622, - 2.04047, - 2.04737, - 2.0126, - 2.04873, - 2.01303, - 2.04299, - 2.03197, - 2.02903, - 2.01212, - 2.02437, - 2.01794, - 2.02022, - 2.04984, - 2.04139, - 2.05848, - 2.03098, - 2.02086, - 2.00389, - 2.0592, - 2.01986, - 1.99799, - 2.04708, - 2.04642, - 2.05958, - 2.05049, - 2.03111, - 2.03582, - 2.02262, - 2.03563, - 2.03222, - 2.04899, - 2.02787, - 2.03317, - 2.04468, - 2.03544, - 2.01406, - 2.05183, - 2.03062, - 2.02943, - 2.03072, - 2.02441, - 2.01968, - 2.03337, - 2.01212, - 2.01679, - 2.03688, - 2.00323, - 2.05195, - 2.03035, - 2.0453, - 2.03253, - 2.05581, - 2.01793, - 2.03642, - 2.03252, - 2.0387, - 2.04706, - 2.02217, - 2.03086, - 2.02223, - 2.04418, - 2.03613, - 2.02383, - 2.02233, - 2.01692, - 2.03767, - 2.02427, - 2.01682, - 2.02529, - 2.00427, - 2.02606, - 2.03293, - 2.04867, - 2.04001, - 2.0225, - 2.03806, - 2.01906, - 2.03452, - 2.03287, - 2.00488, - 2.02604, - 2.02431, - 2.01111, - 2.0092, - 2.02263, - 2.01799, - 2.03186, - 2.02335, - 2.04214, - 2.03045, - 2.02994, - 2.01811, - 2.03178, - 2.05296, - 2.05152, - 2.00785, - 2.01546, - 2.05441, - 2.01446, - 2.00887, - 2.04831, - 2.01926, - 2.01434, - 2.02356, - 2.0183, - 2.03328, - 2.01008, - 2.02262, - 2.04957, - 2.02712, - 2.01721, - 2.04747, - 2.02184, - 2.02848, - 2.05733, - 2.03521, - 2.0195, - 2.04916, - 2.03439, - 2.02555, - 2.03685, - 2.00242, - 2.03878, - 2.04221, - 2.03542, - 2.02895, - 2.04015, - 2.02528, - 2.02639, - 2.04139, - 2.03501, - 2.0306, - 2.0051, - 2.02541, - 2.02449, - 2.02796, - 2.00731, - 2.01045, - 2.01817, - 2.04808, - 2.03134, - 2.02478, - 2.00888, - 1.99585, - 2.04413, - 2.0439, - 2.02972, - 2.04554, - 2.02551, - 2.02213, - 2.01853, - 2.0138, - 2.0115, - 2.02771, - 2.00542, - 2.04709, - 2.01674, - 2.02613, - 2.02933, - 1.99911, - 2.014, - 2.01743, - 1.99774, - 2.06495, - 2.0163, - 2.0329, - 2.03451, - 2.00671, - 2.02704, - 2.00913, - 2.00733, - 2.0169, - 2.02783, - 2.04017, - 2.0208, - 2.01728, - 2.03693, - 2.03491, - 2.00363, - 2.01592, - 2.02132, - 1.99621, - 2.01636, - 2.03577, - 2.05908, - 2.03387, - 2.00804, - 2.01834, - 2.01652, - 2.01748, - 2.02298, - 2.01874, - 2.00515, - 2.01887, - 2.04895, - 2.02251, - 2.01912, - 2.01777, - 2.02806, - 2.0269, - 2.02511, - 2.00423, - 2.0156, - 2.04654, - 2.02458, - 2.0275, - 2.01452, - 2.05435, - 1.99932, - 2.01555, - 2.00119, - 2.0053, - 2.00118, - 2.01676, - 2.03184, - 2.02566, - 2.01218, - 2.04158, - 2.01946, - 2.02495, - 2.00391, - 2.02647, - 2.04178, - 2.03745, - 2.01808, - 2.02752, - 2.03446, - 2.02934, - 2.02554, - 2.03386, - 2.03394, - 2.04926, - 2.02909, - 2.01161, - 2.03058, - 2.02171, - 2.02723, - 2.00443, - 2.03198, - 2.01503, - 2.03542, - 2.00337, - 2.02797, - 2.02077, - 2.04468, - 2.02087, - 2.03417, - 2.02033, - 1.99726, - 2.0323, - 2.02571, - 2.00141, - 2.00281, - 2.02224, - 2.01187, - 2.01136, - 1.9966, - 2.02486, - 2.0454, - 1.99753, - 2.03451, - 2.00934, - 1.99168, - 2.02524, - 1.99821, - 2.00111, - 2.03213, - 2.02918, - 2.00051, - 2.00875, - 2.01081, - 2.02113, - 1.99404, - 2.01046, - 2.01033, - 2.01276, - 2.0307, - 2.0092, - 2.00691, - 2.01202, - 2.04273, - 2.00016, - 2.01178, - 2.03478, - 2.02252, - 2.03838, - 1.99518, - 2.02079, - 2.04536, - 1.98687, - 2.02205, - 2.00979, - 2.04894, - 2.01404, - 2.03524, - 2.00443, - 2.02494, - 2.04453, - 2.00302, - 2.04026, - 2.03446, - 2.02769, - 2.01116, - 2.03618, - 2.061, - 2.02197, - 2.02747, - 2.03101, - 2.00854, - 2.02438, - 2.05939, - 2.02841, - 2.02124, - 2.00556, - 1.99604, - 2.02265, - 2.03088, - 2.00321, - 2.03285, - 2.01809, - 1.99459, - 2.02022, - 2.0229, - 2.01434, - 2.01916, - 2.02617, - 2.02603, - 2.01054, - 2.03832, - 1.98517, - 1.99417, - 2.01887, - 2.01682, - 2.02548, - 2.00015, - 2.03368, - 2.00086, - 2.01037, - 2.01429, - 2.00769, - 2.01118, - 2.00724, - 1.99551, - 2.01562, - 2.01609, - 2.00438, - 2.00593, - 2.02104, - 1.99666, - 2.01457, - 2.02156, - 1.9999, - 2.01153, - 2.00066, - 2.01639, - 2.02296, - 2.03506, - 2.00573, - 2.02935, - 2.04206, - 1.9967, - 2.02594, - 2.01435, - 2.0098, - 1.99997, - 2.01668, - 2.01697, - 2.01821, - 2.01434, - 2.01171, - 2.0176, - 2.00208, - 1.99654, - 2.00702, - 2.04028, - 2.01667, - 2.0269, - 2.01935, - 2.00899, - 2.01318, - 2.00988, - 2.0243, - 2.02081, - 2.00014, - 2.00777, - 2.03004, - 2.03963, - 2.03199, - 2.01695, - 1.99405, - 2.02884, - 2.02228, - 2.0097, - 2.02368, - 2.00031, - 1.97936, - 2.03661, - 1.99792, - 2.01396, - 2.00069, - 2.00372, - 2.01857, - 1.99959, - 2.00549, - 2.00833, - 2.00331, - 2.01386, - 2.01692, - 2.01799, - 2.0099, - 2.01079, - 2.03109, - 2.01696, - 2.01297, - 2.02409, - 2.02104, - 2.00718, - 2.01694, - 2.03406, - 2.01178, - 2.02006, - 1.99202, - 2.03438, - 2.01452, - 2.01791, - 2.00299, - 2.02679, - 2.00163, - 1.99945, - 2.00887, - 2.00057, - 2.00117, - 2.01481, - 2.0096, - 2.01508, - 2.00965, - 2.0271, - 2.00588, - 2.01586, - 2.0164, - 1.9802, - 2.01347, - 2.00002, - 2.00323, - 2.00534, - 2.01073, - 2.02406, - 2.02117, - 2.03012, - 2.00444, - 2.02137, - 1.99835, - 2.0141, - 1.98976, - 2.00178, - 2.02313, - 1.99839, - 2.03356, - 2.00942, - 2.02542, - 2.02327, - 1.99888, - 2.0115, - 1.99114, - 2.00245, - 1.99929, - 2.0199, - 2.03375, - 2.00886, - 2.02669, - 2.00426, - 2.02167, - 2.01747, - 2.01655, - 2.02242, - 2.02559, - 2.03004, - 2.02225, - 2.00754, - 1.97787, - 2.01462, - 1.99438, - 2.00506, - 2.02177, - 2.02731, - 1.9834, - 1.99755, - 1.99039, - 1.99425, - 2.01127, - 1.99564, - 2.00543, - 2.00145, - 2.0029, - 2.02316, - 2.01676, - 2.02277, - 2.01266, - 2.02716, - 1.99984, - 2.01757, - 2.00437, - 2.02128, - 2.0105, - 1.98912, - 2.00272, - 2.00987, - 2.01566, - 2.00122, - 1.98888, - 2.02972, - 2.02648, - 2.00617, - 2.0047, - 2.00636, - 2.02052, - 1.97765, - 1.9983, - 2.01733, - 2.01399, - 1.98946, - 2.05508, - 1.98109, - 1.98817, - 1.98658, - 1.99598, - 2.02788, - 1.99796, - 1.99547, - 2.02652, - 1.98941, - 1.99852, - 1.99472, - 2.00705, - 1.98575, - 1.99383, - 2.03304, - 1.99509, - 1.98603, - 2.00891, - 1.99476, - 2.00099, - 2.00052, - 2.01095, - 1.98485, - 2.02779, - 2.01766, - 2.00527, - 2.00705, - 1.99733, - 1.99805, - 1.99989, - 2.03851, - 2.00999, - 2.00448, - 2.0579, - 2.02868, - 2.02933, - 2.01409, - 2.00733, - 1.99399, - 1.98921, - 2.02756, - 1.98632, - 1.99522, - 1.98417, - 2.03794, - 1.98576, - 2.00464, - 2.02554, - 1.99239, - 2.00178, - 2.02655, - 2.00645, - 1.99684, - 2.01606, - 2.01443, - 1.9893, - 1.99015, - 1.99984, - 1.99745, - 2.0214, - 2.00721, - 1.99406, - 2.00279, - 2.02279, - 2.01922, - 2.01888, - 1.99817, - 2.00661, - 2.00941, - 2.00641, - 2.02468, - 1.99389, - 2.02113, - 1.99036, - 1.99003, - 2.01775, - 1.97272, - 2.01412, - 2.01143, - 2.00612, - 2.0146, - 2.00421, - 1.97847, - 2.01189, - 2.00629, - 1.98394, - 1.98192, - 1.98684, - 2.02731, - 2.00926, - 1.98187, - 2.00506, - 1.99795, - 2.00851, - 1.98334, - 1.98238, - 2.04913, - 2.01102, - 2.02372, - 2.02041, - 2.01756, - 1.99475, - 1.99402, - 1.96987, - 2.00352, - 1.98591, - 2.01374, - 2.00922, - 2.04849, - 1.99265, - 2.02093, - 2.0265, - 2.01523, - 1.98564, - 2.00247, - 1.98999, - 1.98939, - 2.01501, - 1.9914, - 2.00423, - 2.00071, - 2.02579, - 1.99256, - 1.99939, - 1.98541, - 1.99062, - 1.99484, - 2.00761, - 1.98857, - 2.0126, - 2.02232, - 2.01144, - 1.99891, - 2.00123, - 1.98839, - 2.00482, - 2.01331, - 1.9949, - 2.01185, - 1.99291, - 1.987, - 1.99669, - 2.01233, - 1.995, - 1.99357, - 1.99618, - 2.00486, - 2.00775, - 2.01924, - 2.00946, - 1.99399, - 2.00289, - 1.99571, - 1.98544, - 1.98196, - 2.01932, - 2.00375, - 2.00328, - 2.01648, - 2.00601, - 2.00308, - 1.98958, - 1.98415, - 2.02451, - 1.97622, - 1.99278, - 2.00709, - 1.9868, - 1.99317, - 2.0123, - 1.97666, - 1.97333, - 1.98052, - 1.98892, - 1.98048, - 2.02524, - 2.01807, - 1.97017, - 1.99807, - 1.9883, - 1.99095, - 2.00642, - 2.00431, - 2.01061, - 2.0326, - 2.00601, - 1.99722, - 1.99716, - 2.0085, - 2.00989, - 2.0007, - 2.00165, - 2.0141, - 1.99425, - 2.01475, - 1.9979, - 1.9876, - 2.02655, - 1.98569, - 1.98635, - 1.97076, - 1.98299, - 1.99767, - 2.0068, - 2.00752, - 2.01987, - 2.00339, - 2.01815, - 1.9816, - 1.99435, - 2.01083, - 2.01796, - 2.01531, - 2.03965, - 2.00477, - 2.01696, - 1.99056, - 1.98327, - 1.97754, - 1.99461, - 2.00059, - 2.00292, - 2.00937, - 2.02811, - 1.99617, - 1.99303, - 1.98569, - 2.00092, - 2.00718, - 2.00535, - 2.004, - 2.00416, - 2.00602, - 1.99007, - 1.98861, - 2.01652, - 1.99676, - 1.99282, - 2.01531, - 2.01286, - 2.00251, - 1.9917, - 1.98763, - 1.99212, - 2.00956, - 1.99525, - 2.01498, - 1.99689, - 2.01323, - 1.99353, - 2.00582, - 1.9922, - 2.00139, - 1.99641, - 1.99755, - 2.00076, - 2.00369, - 2.00498, - 2.00312, - 1.98471, - 2.0274, - 2.00147, - 1.9983, - 1.98119, - 2.01039, - 2.00926, - 2.00267, - 2.00749, - 2.00973, - 1.99064, - 1.98996, - 2.02164, - 1.9959, - 1.98124, - 2.00078, - 1.97757, - 1.98484, - 2.03268, - 1.99141, - 2.00327, - 1.98188, - 1.98364, - 2.01089, - 1.9924, - 2.00753, - 1.98206, - 1.98813, - 2.00954, - 1.97593, - 1.9745, - 2.01673, - 1.98959, - 2.02987, - 1.99085, - 2.02622, - 1.99347, - 2.00147, - 1.9956, - 1.99497, - 2.00223, - 2.00453, - 1.98743, - 1.98802, - 2.00409, - 2.00746, - 2.00977, - 2.00103, - 1.988, - 2.01477, - 1.99461, - 1.97404, - 1.98651, - 1.99028, - 1.99109, - 1.96326, - 1.99836, - 2.01111, - 2.01581, - 1.99938, - 1.98806, - 2.00891, - 1.99398, - 1.97624, - 1.99773, - 2.00823, - 1.99673, - 2.00302, - 1.99769, - 2.00555, - 2.03036, - 1.98132, - 1.99229, - 1.99362, - 2.0112, - 1.98501, - 1.9797, - 2.02853, - 1.98163, - 1.96786, - 2.0283, - 1.99061, - 1.99207, - 1.99668, - 1.9965, - 1.99253, - 1.98392, - 2.01956, - 2.01446, - 1.97614, - 1.98919, - 2.00085, - 1.97105, - 1.98078, - 2.00407, - 1.99237, - 1.98181, - 1.99109, - 1.97399, - 1.98097, - 1.98522, - 2.01025, - 2.01331, - 1.9859, - 1.99829, - 2.01144, - 2.00631, - 1.98287, - 1.99957, - 1.98278, - 1.9945, - 1.99219, - 2.00339, - 2.02496, - 1.98643, - 1.98436, - 1.9627, - 2.00079, - 2.00263, - 1.99184, - 1.99782, - 1.96953, - 1.98637, - 2.01861, - 1.97249, - 2.00423, - 1.99863, - 1.9702, - 1.98323, - 2.00875, - 1.98979, - 2.00072, - 2.01774, - 1.97834, - 1.99512, - 2.01396, - 1.97102, - 1.95655, - 1.99876, - 1.97568, - 1.98228, - 2.01858, - 2.01429, - 2.00076, - 1.98709, - 1.98613, - 2.01134, - 1.9852, - 1.97227, - 1.98728, - 1.98726, - 1.99978, - 1.98708, - 2.00129, - 1.98729, - 1.99865, - 1.98798, - 1.97864, - 1.98159, - 1.97724, - 1.99481, - 1.97354, - 2.00312, - 1.96164, - 1.97868, - 1.97595, - 1.99928, - 1.99311, - 2.01131, - 1.97432, - 1.99207, - 1.98909, - 1.99246, - 1.96602, - 1.97762, - 1.99757, - 2.00961, - 1.9767, - 1.97187, - 1.96383, - 1.99208, - 1.99792, - 1.98571, - 1.98426, - 2.0025, - 1.9886, - 1.99308, - 1.99431, - 1.97669, - 1.97736, - 1.98303, - 1.98092, - 2.00043, - 1.98022, - 2.01022, - 2.01455, - 1.99816, - 1.98871, - 1.98828, - 2.00851, - 1.96608, - 1.98804, - 1.98792, - 2.00853, - 1.98868, - 2.01477, - 1.97169, - 1.99693, - 1.98185, - 1.99157, - 2.00689, - 1.98726, - 1.97279, - 1.97607, - 1.99306, - 1.95529, - 2.01146, - 1.98777, - 1.98887, - 1.99853, - 1.98238, - 1.98201, - 2.00866, - 1.98484, - 1.97555, - 1.98664, - 1.97711, - 1.97722, - 2.00163, - 1.96501, - 1.97489, - 1.95798, - 1.99451, - 2.00438, - 1.97202, - 1.96737, - 1.98471, - 1.99732, - 1.98041, - 1.98379, - 1.98053, - 1.99641, - 1.9982, - 2.01328, - 1.98576, - 2.0032, - 1.99804, - 1.98635, - 1.9723, - 2.00564, - 2.00397, - 1.98169, - 1.99382, - 1.98857, - 1.98617, - 1.99168, - 1.97545, - 2.0027, - 2.00172, - 1.97751, - 1.98791, - 1.9923, - 1.99519, - 1.98804, - 1.9836, - 1.97195, - 1.97929, - 2.00433, - 1.98983, - 1.99124, - 1.98435, - 1.98178, - 1.9847, - 1.97866, - 1.96976, - 2.00239, - 1.95769, - 1.98415, - 1.99727, - 1.97566, - 1.98747, - 1.99506, - 1.98033, - 1.99536, - 1.99391, - 1.98904, - 1.99856, - 1.97625, - 2.00373, - 1.97841, - 1.97855, - 1.98864, - 1.9855, - 2.00417, - 1.99105, - 1.98511, - 1.98772, - 1.96643, - 2.00789, - 1.99686, - 2.0118, - 1.98208, - 1.99895, - 1.97595, - 1.98534, - 1.99223, - 2.00952, - 2.01319, - 1.98188, - 1.98363, - 1.98229, - 1.98778, - 1.97717, - 1.98371, - 1.98789, - 1.96225, - 1.9968, - 1.98601, - 1.99461, - 1.98586, - 1.99986, - 1.98264, - 1.98036, - 1.969, - 1.97158, - 1.9879, - 2.00237, - 1.99451, - 1.98611, - 1.96552, - 1.99081, - 1.99038, - 1.99089, - 2.00337, - 1.96334, - 1.983, - 1.95732, - 2.00282, - 1.99067, - 1.98402, - 1.9872, - 1.9902, - 1.9943, - 1.9717, - 2.00013, - 1.98988, - 1.99439, - 2.00095, - 1.98589, - 1.9919, - 1.98123, - 1.97352, - 1.97565, - 1.99066, - 1.9955, - 1.98609, - 2.00386, - 1.97897, - 1.99454, - 1.98226, - 1.98498, - 1.96271, - 2.00686, - 2.00453, - 1.9649, - 2.00981, - 1.97186, - 1.99293, - 1.97264, - 1.99619, - 2.02632, - 1.97267, - 1.96717, - 1.98792, - 1.99683, - 1.99289, - 1.99649, - 1.97657, - 1.97365, - 1.98683, - 1.97917, - 2.00608, - 2.01071, - 2.0069, - 2.00026, - 2.0043, - 1.99967, - 1.9832, - 1.96642, - 2.00364, - 1.97538, - 1.98045, - 1.99331, - 2.00766, - 2.01853, - 1.97273, - 2.01051, - 1.99416, - 2.00261, - 2.00741, - 1.97464, - 1.97467, - 1.97655, - 1.9756, - 1.95839, - 1.99758, - 1.97169, - 2.00909, - 2.0063, - 1.98495, - 2.00171, - 1.99286, - 1.97807, - 1.98479, - 1.9771, - 1.9943, - 1.97175, - 2.00013, - 1.98967, - 1.99431, - 2.00086, - 1.98579, - 1.99182, - 1.98115, - 1.97357, - 1.97528, - 1.99092, - 1.99548, - 1.98627, - 2.00394, - 1.97918, - 1.99447, - 1.98197, - 1.98489, - 1.96278, - 2.00684, - 2.0045, - 1.96498, - 2.00965, - 1.97172, - 1.99271, - 1.97253, - 1.99606, - 2.02626, - 1.97262, - 1.96719, - 1.98802, - 1.99651, - 1.99298, - 1.99652, - 1.97639, - 1.97329, - 1.987, - 1.97916, - 2.00615, - 2.01054, - 2.0072, - 1.9998, - 2.00422, - 1.99935, - 1.9831, - 1.96587, - 2.00294, - 1.97508, - 1.98032, - 1.99288, - 2.00712, - 2.0182, - 1.97226, - 2.01042, - 1.99371, - 2.00243, - 2.00727, - 1.97448, - 1.97464, - 1.97609, - 1.97561, - 1.95871, - 1.99913, - 1.9729, - 2.00971, - 2.00666, - 1.98505, - 1.98455, - 1.99249, - 1.97757, - 1.98489, - 1.97755, - 1.99165, - 2.00795, - 1.97903, - 1.99561, - 1.99716, - 1.97597, - 1.98804, - 1.97229, - 1.98554, - 1.98359, - 1.96783, - 1.99351, - 1.99628, - 2.00636, - 1.97529, - 1.9645, - 1.9795, - 1.99802, - 1.98153, - 2.01646, - 2.00502, - 1.97651, - 1.96467, - 1.98538, - 1.97484, - 1.97258, - 1.99876, - 1.97798, - 1.95536, - 1.9648, - 1.9662, - 1.99113, - 1.97484, - 1.9693, - 1.9735, - 1.98358, - 1.98638, - 2.00481, - 1.98793, - 2.00433, - 1.98754, - 2.00651, - 1.97492, - 1.98932, - 1.96623, - 1.98071, - 1.99392, - 1.98575, - 1.98861, - 1.96117, - 2.00127, - 1.98909, - 1.98382, - 1.9622, - 2.00328, - 1.97404, - 1.97576, - 1.96676, - 1.97996, - 1.97118, - 1.98848, - 2.00312, - 1.97302, - 1.98437, - 1.96605, - 1.98589, - 1.97225, - 1.99622, - 1.9936, - 1.97503, - 1.99069, - 1.99038, - 1.9771, - 2.00708, - 1.96959, - 1.98315, - 1.99011, - 1.95911, - 1.98614, - 1.98645, - 2.00538, - 1.97181, - 1.98426, - 1.99817, - 1.9744, - 1.98926, - 1.95839, - 1.982, - 1.98206, - 1.97567, - 1.98474, - 1.9855, - 1.98157, - 1.9813, - 1.97829, - 1.98378, - 2.00878, - 1.98318, - 1.99073, - 1.99813, - 1.98265, - 1.97987, - 1.98524, - 1.99257, - 1.97869, - 1.98485, - 2.00174, - 1.98818, - 1.98683, - 1.9736, - 1.97434, - 1.99292, - 1.98882, - 1.96963, - 1.97404, - 1.98262, - 1.97464, - 1.98076, - 2.00526, - 1.9995, - 1.98502, - 1.99879, - 1.9635, - 1.97154, - 1.98464, - 1.9755, - 1.9701, - 1.97747, - 1.96825, - 1.97191, - 1.95972, - 1.97326, - 1.96545, - 1.99198, - 1.99267, - 1.97666, - 1.99272, - 1.98163, - 1.98814, - 1.97387, - 1.9937, - 1.99245, - 1.98775, - 1.97258, - 2.00928, - 1.98538, - 1.99269, - 1.95022, - 1.9893, - 1.97631, - 1.99963, - 1.95413, - 1.96557, - 1.99451, - 1.9618, - 1.98107, - 1.98544, - 1.97545, - 1.96815, - 2.00798, - 1.98341, - 1.96386, - 1.96991, - 1.9771, - 1.96925, - 1.98404, - 1.98587, - 1.96237, - 1.95556, - 2.01202, - 1.98558, - 1.96215, - 1.97795, - 1.96097, - 1.96226, - 1.97746, - 1.96483, - 2.0027, - 1.98065, - 1.96986, - 1.98146, - 1.95507, - 1.96814, - 1.95787, - 1.9922, - 2.00465, - 1.99461, - 1.96622, - 1.97541, - 1.9582, - 1.96199, - 1.95646, - 1.98649, - 1.97577, - 1.96806, - 1.99681, - 1.98368, - 1.97493, - 1.96493, - 1.98542, - 2.0028, - 1.98204, - 1.97053, - 1.97051, - 1.96748, - 1.95835, - 1.971, - 1.95626, - 1.98603, - 1.97422, - 2.00138, - 1.95297, - 1.97297, - 1.98101, - 1.99482, - 1.99712, - 1.96936, - 1.99282, - 1.96858, - 1.98167, - 1.97467, - 1.96191, - 1.99738, - 1.95675, - 1.9749, - 1.95954, - 1.98859, - 1.99459, - 1.99903, - 1.96739, - 1.98151, - 1.9794, - 1.97253, - 1.99918, - 1.97579, - 1.97503, - 1.96025, - 1.96986, - 1.96948, - 1.98609, - 1.97586, - 1.97815, - 1.99705, - 1.97278, - 1.95803, - 1.98839, - 1.97515, - 1.97986, - 1.98236, - 1.96523, - 1.94251, - 1.99873, - 1.98118, - 1.97671, - 1.98255, - 1.96328, - 1.98177, - 1.98727, - 2.01537, - 1.9762, - 1.98885, - 1.98333, - 1.98675, - 1.97591, - 1.98025, - 1.96073, - 1.96238, - 1.98245, - 1.9725, - 2.00569, - 1.98257, - 1.97134, - 1.96917, - 1.99463, - 1.99105, - 1.97196, - 1.98023, - 1.9641, - 1.96138, - 1.98619, - 1.98262, - 1.99244, - 1.99036, - 1.99788, - 1.98222, - 1.98048, - 1.99969, - 1.9594, - 1.9809, - 1.9755, - 1.97206, - 1.99469, - 1.98807, - 1.99204, - 1.99401, - 1.95878, - 1.99493, - 1.96649, - 1.97731, - 1.9754, - 1.9754, - 1.97617, - 1.9744, - 1.98489, - 1.96886, - 2.00684, - 1.99592, - 1.9705, - 1.93113, - 1.9588, - 1.98189, - 1.96977, - 1.97269, - 1.98538, - 2.01774, - 1.97998, - 2.00738, - 1.97844, - 1.9572, - 1.98586, - 1.97157, - 1.97045, - 1.97222, - 1.98839, - 1.9772, - 1.95744, - 1.98938, - 1.97459, - 1.99735, - 1.95376, - 1.961, - 1.99066, - 1.95808, - 1.96907, - 1.98435, - 1.9809, - 1.97695, - 2.00311, - 1.9777, - 1.96266, - 1.97628, - 1.97564, - 1.99391, - 1.9793, - 1.94884, - 1.95541, - 1.97429, - 1.9392, - 1.99286, - 2.00065, - 1.97458, - 1.97711, - 1.9856, - 1.99472, - 1.9714, - 1.97708, - 1.97306, - 1.97078, - 1.99141, - 1.96657, - 1.97138, - 1.97852, - 1.96772, - 1.98967, - 2.00586, - 1.98355, - 1.98048, - 1.99165, - 1.99138, - 1.99213, - 1.97628, - 1.96309, - 2.0017, - 1.9599, - 1.95549, - 1.99777, - 1.96126, - 1.99871, - 1.97656, - 1.98567, - 1.9758, - 1.99049, - 1.98399, - 1.9758, - 1.97488, - 1.97796, - 1.97353, - 1.96161, - 1.96738, - 1.98444, - 1.98228, - 1.94666, - 1.97055, - 1.97462, - 1.99476, - 1.97612, - 2.00026, - 1.97502, - 1.95661, - 1.96336, - 1.98773, - 1.9851, - 1.97208, - 1.98689, - 1.97892, - 1.97377, - 1.97999, - 2.01994, - 1.98484, - 1.97806, - 1.98171, - 1.98249, - 1.97804, - 1.98512, - 1.99712, - 1.95851, - 1.97592, - 1.98949, - 1.9661, - 1.99311, - 1.98943, - 2.00002, - 1.98275, - 1.98982, - 1.96812, - 1.9881, - 1.96642, - 1.97642, - 1.96986, - 1.96485, - 1.98819, - 1.95736, - 1.98679, - 1.97612, - 1.9838, - 1.9883, - 1.97728 - ] - }, - "mem-allocated-bytes": { - "start_step": 0, - "end_step": 25809, - "step_interval": 5, - "values": [ - 17448312832.0, - 17448214528.0, - 17448243200.0, - 17447923712.0, - 17448040448.0, - 17448124416.0, - 17448331264.0, - 17448151040.0, - 17448157184.0, - 17448271872.0, - 17448185856.0, - 17448304640.0, - 17448306688.0, - 17448359936.0, - 17448329216.0, - 17448173568.0, - 17448312832.0, - 17448181760.0, - 17448278016.0, - 17448253440.0, - 17448331264.0, - 17448394752.0, - 17448251392.0, - 17448341504.0, - 17448284160.0, - 17448210432.0, - 17448198144.0, - 17448226816.0, - 17448251392.0, - 17448212480.0, - 17448351744.0, - 17448347648.0, - 17448235008.0, - 17448189952.0, - 17448259584.0, - 17448318976.0, - 17448214528.0, - 17448271872.0, - 17448235008.0, - 17448286208.0, - 17448230912.0, - 17448288256.0, - 17448288256.0, - 17448230912.0, - 17448284160.0, - 17449197568.0, - 17448337408.0, - 17448259584.0, - 17448253440.0, - 17448259584.0, - 17448224768.0, - 17448280064.0, - 17448230912.0, - 17448224768.0, - 17448267776.0, - 17448263680.0, - 17448296448.0, - 17448230912.0, - 17448220672.0, - 17448257536.0, - 17448200192.0, - 17448306688.0, - 17448265728.0, - 17448226816.0, - 17448304640.0, - 17448230912.0, - 17448230912.0, - 17448310784.0, - 17448253440.0, - 17448253440.0, - 17448308736.0, - 17448243200.0, - 17448239104.0, - 17448294400.0, - 17448282112.0, - 17448296448.0, - 17448280064.0, - 17448251392.0, - 17448259584.0, - 17448282112.0, - 17448308736.0, - 17448294400.0, - 17448286208.0, - 17448290304.0, - 17448280064.0, - 17448288256.0, - 17448278016.0, - 17448284160.0, - 17448290304.0, - 17448308736.0, - 17448267776.0, - 17448259584.0, - 17448302592.0, - 17448284160.0, - 17448243200.0, - 17448298496.0, - 17448243200.0, - 17448286208.0, - 17448269824.0, - 17448267776.0, - 17448247296.0, - 17447884800.0, - 17447876608.0, - 17447878656.0, - 17447907328.0, - 17447874560.0, - 17447862272.0, - 17447847936.0, - 17447882752.0, - 17447886848.0, - 17447886848.0, - 17447870464.0, - 17447862272.0, - 17447862272.0, - 17447835648.0, - 17447903232.0, - 17447911424.0, - 17447843840.0, - 17447915520.0, - 17447847936.0, - 17447886848.0, - 17447897088.0, - 17447876608.0, - 17447890944.0, - 17447874560.0, - 17447892992.0, - 17447895040.0, - 17447860224.0, - 17447899136.0, - 17447892992.0, - 17447845888.0, - 17448572928.0, - 17447882752.0, - 17447907328.0, - 17447892992.0, - 17447866368.0, - 17447903232.0, - 17447886848.0, - 17447903232.0, - 17447864320.0, - 17447866368.0, - 17447880704.0, - 17447864320.0, - 17447856128.0, - 17447874560.0, - 17447854080.0, - 17447878656.0, - 17447892992.0, - 17447874560.0, - 17447892992.0, - 17447886848.0, - 17447876608.0, - 17447870464.0, - 17447878656.0, - 17447897088.0, - 17447907328.0, - 17447890944.0, - 17447866368.0, - 17447901184.0, - 17447886848.0, - 17447886848.0, - 17447895040.0, - 17447876608.0, - 17447854080.0, - 17447874560.0, - 17447886848.0, - 17447882752.0, - 17447890944.0, - 17447886848.0, - 17447886848.0, - 17447890944.0, - 17447868416.0, - 17447888896.0, - 17447895040.0, - 17447890944.0, - 17447870464.0, - 17447862272.0, - 17447876608.0, - 17447870464.0, - 17447870464.0, - 17447882752.0, - 17447886848.0, - 17447878656.0, - 17447876608.0, - 17447874560.0, - 17447874560.0, - 17448663040.0, - 17447874560.0, - 17447886848.0, - 17447872512.0, - 17447899136.0, - 17447907328.0, - 17447868416.0, - 17447886848.0, - 17447874560.0, - 17447858176.0, - 17447880704.0, - 17447895040.0, - 17447870464.0, - 17447868416.0, - 17447884800.0, - 17447874560.0, - 17447882752.0, - 17447890944.0, - 17447862272.0, - 17447890944.0, - 17447901184.0, - 17448677376.0, - 17447895040.0, - 17447866368.0, - 17447890944.0, - 17447870464.0, - 17447895040.0, - 17447874560.0, - 17447854080.0, - 17447870464.0, - 17447890944.0, - 17447892992.0, - 17447940096.0, - 17447882752.0, - 17447874560.0, - 17447874560.0, - 17447880704.0, - 17447868416.0, - 17447888896.0, - 17447890944.0, - 17447890944.0, - 17447862272.0, - 17447882752.0, - 17447876608.0, - 17448890368.0, - 17448923136.0, - 17448880128.0, - 17448890368.0, - 17448894464.0, - 17448882176.0, - 17448914944.0, - 17448886272.0, - 17448892416.0, - 17448890368.0, - 17448878080.0, - 17448871936.0, - 17448890368.0, - 17448906752.0, - 17448863744.0, - 17448886272.0, - 17448894464.0, - 17448884224.0, - 17448869888.0, - 17448898560.0, - 17448890368.0, - 17448890368.0, - 17448892416.0, - 17448906752.0, - 17448871936.0, - 17448853504.0, - 17448892416.0, - 17449691136.0, - 17448900608.0, - 17448970240.0, - 17448902656.0, - 17448876032.0, - 17448873984.0, - 17448869888.0, - 17448861696.0, - 17448906752.0, - 17448904704.0, - 17448904704.0, - 17448894464.0, - 17448853504.0, - 17448845312.0, - 17448865792.0, - 17448869888.0, - 17448896512.0, - 17448886272.0, - 17448882176.0, - 17448869888.0, - 17448882176.0, - 17448894464.0, - 17448888320.0, - 17448884224.0, - 17448890368.0, - 17448902656.0, - 17448896512.0, - 17448890368.0, - 17448880128.0, - 17448898560.0, - 17448878080.0, - 17448880128.0, - 17448896512.0, - 17448888320.0, - 17448900608.0, - 17448884224.0, - 17448892416.0, - 17448906752.0, - 17448888320.0, - 17448890368.0, - 17448890368.0, - 17448873984.0, - 17448898560.0, - 17448921088.0, - 17448910848.0, - 17448898560.0, - 17448867840.0, - 17448884224.0, - 17448886272.0, - 17448894464.0, - 17448906752.0, - 17448898560.0, - 17448890368.0, - 17448886272.0, - 17448896512.0, - 17448902656.0, - 17448888320.0, - 17448888320.0, - 17448878080.0, - 17448890368.0, - 17448902656.0, - 17448890368.0, - 17448921088.0, - 17448873984.0, - 17448894464.0, - 17448878080.0, - 17448904704.0, - 17448849408.0, - 17448890368.0, - 17448890368.0, - 17448894464.0, - 17448890368.0, - 17448882176.0, - 17448900608.0, - 17448882176.0, - 17448878080.0, - 17448898560.0, - 17448902656.0, - 17448894464.0, - 17448900608.0, - 17448890368.0, - 17448882176.0, - 17448902656.0, - 17448867840.0, - 17448906752.0, - 17448886272.0, - 17447884800.0, - 17447849984.0, - 17447870464.0, - 17447923712.0, - 17447845888.0, - 17447735296.0, - 17447874560.0, - 17447929856.0, - 17447868416.0, - 17447895040.0, - 17447890944.0, - 17447890944.0, - 17447880704.0, - 17447901184.0, - 17447888896.0, - 17447890944.0, - 17447884800.0, - 17447866368.0, - 17447899136.0, - 17448316928.0, - 17447872512.0, - 17447880704.0, - 17447897088.0, - 17447903232.0, - 17447880704.0, - 17447862272.0, - 17447884800.0, - 17447895040.0, - 17447888896.0, - 17447890944.0, - 17447876608.0, - 17447878656.0, - 17447878656.0, - 17447878656.0, - 17447870464.0, - 17447872512.0, - 17447942144.0, - 17447886848.0, - 17447868416.0, - 17447874560.0, - 17447868416.0, - 17447878656.0, - 17447886848.0, - 17447880704.0, - 17447862272.0, - 17447888896.0, - 17447864320.0, - 17447890944.0, - 17447880704.0, - 17447892992.0, - 17447888896.0, - 17447874560.0, - 17447874560.0, - 17447870464.0, - 17447897088.0, - 17447870464.0, - 17447878656.0, - 17447882752.0, - 17447856128.0, - 17447858176.0, - 17447899136.0, - 17447897088.0, - 17447858176.0, - 17447862272.0, - 17447864320.0, - 17447872512.0, - 17447868416.0, - 17447895040.0, - 17447880704.0, - 17447886848.0, - 17447927808.0, - 17447878656.0, - 17447870464.0, - 17447882752.0, - 17447890944.0, - 17447872512.0, - 17447882752.0, - 17447874560.0, - 17447888896.0, - 17447874560.0, - 17447874560.0, - 17447886848.0, - 17447870464.0, - 17447884800.0, - 17447880704.0, - 17447888896.0, - 17447862272.0, - 17447895040.0, - 17447882752.0, - 17448146944.0, - 17447880704.0, - 17447872512.0, - 17447888896.0, - 17447888896.0, - 17447886848.0, - 17447890944.0, - 17447880704.0, - 17447903232.0, - 17447890944.0, - 17447874560.0, - 17447899136.0, - 17447874560.0, - 17447868416.0, - 17447901184.0, - 17447876608.0, - 17447866368.0, - 17447880704.0, - 17447874560.0, - 17447866368.0, - 17447903232.0, - 17447882752.0, - 17447862272.0, - 17447860224.0, - 17447860224.0, - 17447882752.0, - 17447895040.0, - 17447866368.0, - 17447878656.0, - 17447890944.0, - 17447870464.0, - 17447870464.0, - 17447890944.0, - 17447862272.0, - 17447884800.0, - 17447852032.0, - 17447874560.0, - 17447882752.0, - 17447895040.0, - 17447915520.0, - 17447903232.0, - 17447890944.0, - 17447862272.0, - 17447882752.0, - 17447886848.0, - 17447878656.0, - 17447895040.0, - 17447890944.0, - 17447874560.0, - 17447872512.0, - 17447874560.0, - 17447886848.0, - 17447882752.0, - 17447792640.0, - 17447829504.0, - 17447892992.0, - 17447876608.0, - 17447870464.0, - 17447882752.0, - 17447876608.0, - 17447899136.0, - 17447858176.0, - 17447886848.0, - 17447886848.0, - 17447864320.0, - 17447862272.0, - 17447860224.0, - 17447852032.0, - 17447899136.0, - 17447845888.0, - 17447886848.0, - 17447888896.0, - 17447886848.0, - 17448161280.0, - 17447890944.0, - 17447878656.0, - 17447882752.0, - 17447872512.0, - 17447886848.0, - 17447872512.0, - 17447886848.0, - 17447886848.0, - 17447870464.0, - 17448452096.0, - 17447876608.0, - 17447892992.0, - 17447882752.0, - 17447854080.0, - 17447882752.0, - 17447888896.0, - 17447880704.0, - 17447890944.0, - 17447886848.0, - 17447872512.0, - 17447882752.0, - 17447884800.0, - 17447874560.0, - 17447886848.0, - 17447882752.0, - 17447874560.0, - 17447888896.0, - 17447895040.0, - 17447870464.0, - 17447919616.0, - 17447888896.0, - 17447880704.0, - 17447882752.0, - 17447854080.0, - 17447899136.0, - 17447882752.0, - 17447858176.0, - 17447874560.0, - 17447886848.0, - 17447882752.0, - 17447870464.0, - 17447886848.0, - 17447862272.0, - 17447876608.0, - 17447876608.0, - 17447890944.0, - 17447884800.0, - 17447878656.0, - 17447905280.0, - 17447864320.0, - 17447886848.0, - 17447919616.0, - 17447888896.0, - 17447858176.0, - 17447868416.0, - 17447876608.0, - 17448615936.0, - 17447897088.0, - 17447872512.0, - 17447884800.0, - 17447868416.0, - 17447903232.0, - 17447880704.0, - 17447882752.0, - 17447872512.0, - 17447864320.0, - 17447880704.0, - 17447882752.0, - 17447868416.0, - 17447878656.0, - 17447888896.0, - 17447890944.0, - 17447890944.0, - 17447882752.0, - 17447901184.0, - 17447892992.0, - 17447890944.0, - 17447878656.0, - 17447872512.0, - 17447878656.0, - 17447884800.0, - 17447884800.0, - 17447882752.0, - 17447886848.0, - 17447882752.0, - 17447866368.0, - 17447882752.0, - 17447882752.0, - 17447874560.0, - 17447882752.0, - 17447872512.0, - 17447886848.0, - 17447872512.0, - 17447911424.0, - 17447878656.0, - 17447849984.0, - 17447911424.0, - 17447854080.0, - 17447876608.0, - 17447884800.0, - 17447876608.0, - 17447880704.0, - 17447880704.0, - 17447876608.0, - 17447888896.0, - 17447864320.0, - 17447870464.0, - 17447878656.0, - 17447862272.0, - 17447876608.0, - 17447886848.0, - 17447874560.0, - 17447880704.0, - 17447878656.0, - 17447874560.0, - 17447866368.0, - 17447872512.0, - 17447878656.0, - 17447899136.0, - 17447878656.0, - 17447870464.0, - 17447862272.0, - 17447890944.0, - 17447870464.0, - 17447866368.0, - 17448325120.0, - 17447874560.0, - 17447890944.0, - 17447888896.0, - 17447892992.0, - 17447886848.0, - 17447890944.0, - 17447895040.0, - 17447895040.0, - 17447864320.0, - 17447895040.0, - 17447864320.0, - 17447874560.0, - 17447878656.0, - 17447878656.0, - 17447874560.0, - 17447862272.0, - 17447880704.0, - 17447868416.0, - 17447882752.0, - 17447870464.0, - 17447895040.0, - 17447866368.0, - 17447888896.0, - 17447872512.0, - 17447886848.0, - 17447878656.0, - 17447862272.0, - 17447856128.0, - 17447880704.0, - 17447880704.0, - 17447886848.0, - 17447862272.0, - 17447876608.0, - 17447882752.0, - 17447870464.0, - 17447882752.0, - 17447880704.0, - 17447874560.0, - 17447868416.0, - 17447882752.0, - 17447864320.0, - 17447860224.0, - 17447882752.0, - 17447874560.0, - 17447858176.0, - 17447888896.0, - 17447872512.0, - 17447886848.0, - 17447845888.0, - 17448595456.0, - 17448609792.0, - 17448605696.0, - 17448591360.0, - 17448609792.0, - 17448603648.0, - 17448595456.0, - 17448615936.0, - 17448593408.0, - 17448611840.0, - 17448617984.0, - 17448599552.0, - 17448601600.0, - 17448622080.0, - 17448607744.0, - 17448611840.0, - 17448611840.0, - 17448611840.0, - 17448620032.0, - 17448599552.0, - 17448601600.0, - 17448603648.0, - 17448628224.0, - 17448611840.0, - 17448607744.0, - 17448611840.0, - 17448609792.0, - 17448607744.0, - 17448605696.0, - 17448574976.0, - 17448615936.0, - 17448607744.0, - 17448617984.0, - 17448628224.0, - 17448611840.0, - 17448615936.0, - 17448609792.0, - 17448587264.0, - 17448603648.0, - 17448624128.0, - 17448611840.0, - 17448615936.0, - 17448617984.0, - 17448620032.0, - 17448601600.0, - 17448624128.0, - 17448595456.0, - 17448611840.0, - 17448620032.0, - 17448605696.0, - 17448581120.0, - 17448605696.0, - 17448591360.0, - 17448607744.0, - 17449242624.0, - 17448583168.0, - 17448615936.0, - 17448607744.0, - 17448617984.0, - 17448589312.0, - 17448591360.0, - 17448603648.0, - 17448624128.0, - 17448609792.0, - 17448654848.0, - 17448609792.0, - 17448601600.0, - 17448615936.0, - 17448607744.0, - 17448622080.0, - 17448630272.0, - 17448615936.0, - 17448620032.0, - 17448562688.0, - 17448544256.0, - 17448611840.0, - 17448603648.0, - 17448611840.0, - 17448609792.0, - 17448617984.0, - 17448630272.0, - 17448605696.0, - 17448599552.0, - 17448615936.0, - 17448615936.0, - 17448626176.0, - 17448615936.0, - 17448599552.0, - 17448611840.0, - 17448628224.0, - 17448603648.0, - 17448624128.0, - 17448611840.0, - 17448597504.0, - 17448607744.0, - 17448603648.0, - 17448613888.0, - 17448591360.0, - 17448615936.0, - 17448603648.0, - 17448624128.0, - 17448620032.0, - 17448617984.0, - 17448595456.0, - 17448601600.0, - 17448605696.0, - 17448613888.0, - 17448599552.0, - 17448609792.0, - 17448624128.0, - 17448622080.0, - 17448601600.0, - 17448605696.0, - 17447880704.0, - 17447874560.0, - 17447890944.0, - 17447890944.0, - 17447849984.0, - 17447856128.0, - 17447903232.0, - 17447874560.0, - 17447884800.0, - 17447874560.0, - 17447868416.0, - 17447868416.0, - 17447878656.0, - 17447872512.0, - 17447866368.0, - 17447858176.0, - 17447874560.0, - 17447884800.0, - 17447882752.0, - 17447890944.0, - 17447876608.0, - 17447870464.0, - 17447884800.0, - 17447886848.0, - 17447870464.0, - 17447890944.0, - 17447895040.0, - 17447886848.0, - 17447878656.0, - 17447862272.0, - 17447890944.0, - 17447874560.0, - 17447876608.0, - 17447880704.0, - 17447890944.0, - 17447895040.0, - 17447874560.0, - 17447852032.0, - 17447892992.0, - 17447878656.0, - 17447874560.0, - 17447878656.0, - 17447866368.0, - 17447870464.0, - 17447892992.0, - 17447874560.0, - 17447866368.0, - 17447870464.0, - 17447872512.0, - 17447890944.0, - 17447880704.0, - 17447870464.0, - 17447882752.0, - 17447872512.0, - 17447880704.0, - 17447874560.0, - 17447888896.0, - 17447884800.0, - 17447874560.0, - 17447866368.0, - 17447886848.0, - 17447888896.0, - 17447872512.0, - 17447878656.0, - 17447878656.0, - 17447880704.0, - 17447862272.0, - 17447866368.0, - 17447878656.0, - 17447858176.0, - 17447890944.0, - 17447876608.0, - 17447866368.0, - 17447874560.0, - 17447892992.0, - 17447864320.0, - 17447876608.0, - 17447888896.0, - 17447882752.0, - 17447886848.0, - 17447872512.0, - 17447991296.0, - 17447878656.0, - 17447890944.0, - 17447882752.0, - 17447890944.0, - 17447880704.0, - 17447880704.0, - 17447874560.0, - 17447876608.0, - 17447870464.0, - 17447876608.0, - 17447890944.0, - 17447874560.0, - 17447874560.0, - 17447870464.0, - 17447882752.0, - 17447874560.0, - 17447890944.0, - 17447874560.0, - 17447878656.0, - 17447878656.0, - 17447874560.0, - 17447862272.0, - 17447886848.0, - 17447870464.0, - 17447880704.0, - 17447862272.0, - 17447874560.0, - 17447868416.0, - 17447880704.0, - 17447878656.0, - 17447882752.0, - 17447874560.0, - 17447888896.0, - 17447895040.0, - 17447872512.0, - 17447872512.0, - 17447895040.0, - 17447868416.0, - 17447878656.0, - 17447872512.0, - 17447886848.0, - 17447880704.0, - 17447890944.0, - 17447872512.0, - 17447874560.0, - 17447895040.0, - 17447858176.0, - 17447899136.0, - 17448153088.0, - 17447874560.0, - 17447886848.0, - 17447866368.0, - 17447895040.0, - 17447872512.0, - 17447882752.0, - 17447870464.0, - 17447882752.0, - 17447868416.0, - 17447886848.0, - 17447878656.0, - 17447870464.0, - 17447870464.0, - 17447876608.0, - 17447870464.0, - 17448894464.0, - 17448910848.0, - 17448882176.0, - 17448910848.0, - 17448894464.0, - 17448886272.0, - 17448902656.0, - 17448876032.0, - 17448910848.0, - 17448890368.0, - 17448906752.0, - 17448884224.0, - 17448902656.0, - 17448886272.0, - 17448900608.0, - 17448894464.0, - 17448882176.0, - 17448890368.0, - 17448892416.0, - 17448900608.0, - 17448894464.0, - 17448902656.0, - 17448892416.0, - 17448910848.0, - 17448894464.0, - 17448882176.0, - 17448890368.0, - 17448890368.0, - 17449883648.0, - 17448886272.0, - 17448908800.0, - 17448900608.0, - 17448898560.0, - 17448894464.0, - 17448894464.0, - 17448894464.0, - 17448882176.0, - 17448894464.0, - 17448910848.0, - 17448888320.0, - 17448898560.0, - 17448896512.0, - 17448896512.0, - 17448910848.0, - 17448886272.0, - 17448902656.0, - 17448906752.0, - 17448884224.0, - 17448906752.0, - 17448892416.0, - 17448894464.0, - 17448890368.0, - 17448904704.0, - 17448890368.0, - 17448894464.0, - 17448890368.0, - 17448900608.0, - 17448896512.0, - 17448894464.0, - 17448892416.0, - 17448890368.0, - 17448898560.0, - 17448878080.0, - 17448890368.0, - 17448892416.0, - 17448898560.0, - 17448873984.0, - 17448894464.0, - 17448886272.0, - 17448878080.0, - 17448894464.0, - 17448906752.0, - 17448888320.0, - 17448871936.0, - 17448904704.0, - 17448894464.0, - 17448898560.0, - 17448898560.0, - 17448892416.0, - 17448906752.0, - 17448896512.0, - 17448902656.0, - 17448894464.0, - 17449725952.0, - 17448894464.0, - 17448892416.0, - 17448896512.0, - 17448910848.0, - 17448888320.0, - 17448884224.0, - 17448878080.0, - 17448898560.0, - 17448884224.0, - 17448890368.0, - 17448898560.0, - 17448900608.0, - 17448882176.0, - 17448892416.0, - 17448904704.0, - 17448892416.0, - 17448894464.0, - 17448892416.0, - 17448900608.0, - 17448902656.0, - 17448910848.0, - 17448880128.0, - 17448906752.0, - 17448890368.0, - 17448906752.0, - 17448896512.0, - 17448890368.0, - 17448902656.0, - 17448900608.0, - 17448906752.0, - 17447888896.0, - 17447872512.0, - 17447888896.0, - 17447880704.0, - 17447878656.0, - 17447878656.0, - 17447888896.0, - 17447870464.0, - 17447878656.0, - 17447872512.0, - 17447878656.0, - 17447866368.0, - 17447880704.0, - 17447880704.0, - 17447880704.0, - 17447876608.0, - 17447868416.0, - 17447878656.0, - 17447895040.0, - 17447872512.0, - 17447888896.0, - 17447866368.0, - 17447878656.0, - 17447882752.0, - 17447884800.0, - 17447874560.0, - 17447862272.0, - 17447874560.0, - 17447880704.0, - 17447862272.0, - 17447878656.0, - 17447890944.0, - 17447874560.0, - 17447876608.0, - 17447890944.0, - 17447886848.0, - 17447884800.0, - 17447876608.0, - 17447870464.0, - 17447892992.0, - 17447886848.0, - 17447884800.0, - 17447866368.0, - 17447874560.0, - 17447874560.0, - 17447884800.0, - 17447892992.0, - 17447878656.0, - 17447870464.0, - 17447874560.0, - 17447882752.0, - 17447872512.0, - 17447897088.0, - 17447878656.0, - 17447870464.0, - 17447882752.0, - 17447858176.0, - 17447874560.0, - 17447890944.0, - 17447874560.0, - 17447901184.0, - 17448857600.0, - 17447874560.0, - 17447872512.0, - 17447878656.0, - 17447911424.0, - 17447878656.0, - 17447890944.0, - 17447876608.0, - 17447874560.0, - 17447868416.0, - 17447876608.0, - 17447874560.0, - 17447862272.0, - 17447870464.0, - 17447888896.0, - 17447884800.0, - 17447886848.0, - 17447874560.0, - 17447874560.0, - 17447892992.0, - 17447878656.0, - 17447888896.0, - 17447880704.0, - 17447878656.0, - 17447880704.0, - 17447870464.0, - 17447886848.0, - 17447876608.0, - 17447884800.0, - 17447874560.0, - 17447878656.0, - 17447878656.0, - 17447882752.0, - 17447874560.0, - 17447874560.0, - 17447872512.0, - 17447866368.0, - 17447895040.0, - 17447874560.0, - 17447876608.0, - 17447874560.0, - 17447878656.0, - 17447882752.0, - 17447884800.0, - 17447870464.0, - 17447884800.0, - 17447884800.0, - 17447892992.0, - 17447888896.0, - 17447870464.0, - 17447870464.0, - 17447880704.0, - 17447878656.0, - 17447876608.0, - 17447874560.0, - 17447864320.0, - 17447890944.0, - 17447876608.0, - 17447884800.0, - 17447872512.0, - 17447884800.0, - 17447874560.0, - 17447872512.0, - 17447878656.0, - 17447882752.0, - 17447876608.0, - 17447882752.0, - 17447878656.0, - 17447884800.0, - 17447870464.0, - 17447872512.0, - 17447892992.0, - 17447886848.0, - 17447878656.0, - 17447888896.0, - 17447870464.0, - 17447882752.0, - 17447903232.0, - 17447882752.0, - 17447886848.0, - 17447868416.0, - 17447886848.0, - 17447872512.0, - 17447888896.0, - 17447872512.0, - 17447876608.0, - 17447878656.0, - 17447888896.0, - 17447868416.0, - 17447895040.0, - 17447876608.0, - 17447870464.0, - 17447882752.0, - 17447876608.0, - 17447874560.0, - 17447868416.0, - 17447870464.0, - 17447882752.0, - 17447878656.0, - 17447882752.0, - 17447878656.0, - 17447870464.0, - 17447874560.0, - 17447899136.0, - 17447876608.0, - 17447878656.0, - 17447876608.0, - 17447880704.0, - 17447880704.0, - 17447878656.0, - 17447878656.0, - 17447897088.0, - 17447880704.0, - 17447882752.0, - 17447874560.0, - 17447872512.0, - 17447876608.0, - 17447870464.0, - 17447886848.0, - 17447872512.0, - 17447880704.0, - 17447878656.0, - 17447882752.0, - 17447884800.0, - 17447874560.0, - 17447886848.0, - 17447874560.0, - 17447876608.0, - 17447878656.0, - 17448779776.0, - 17447890944.0, - 17447866368.0, - 17447870464.0, - 17447874560.0, - 17447987200.0, - 17447878656.0, - 17447895040.0, - 17447874560.0, - 17447886848.0, - 17447866368.0, - 17447884800.0, - 17447895040.0, - 17447884800.0, - 17447888896.0, - 17447874560.0, - 17447880704.0, - 17447868416.0, - 17447895040.0, - 17447880704.0, - 17447872512.0, - 17447852032.0, - 17447890944.0, - 17447890944.0, - 17447868416.0, - 17447892992.0, - 17447876608.0, - 17447890944.0, - 17447874560.0, - 17447882752.0, - 17447872512.0, - 17447895040.0, - 17447888896.0, - 17447874560.0, - 17447886848.0, - 17447878656.0, - 17447886848.0, - 17447870464.0, - 17447890944.0, - 17447874560.0, - 17447862272.0, - 17447880704.0, - 17447886848.0, - 17447890944.0, - 17447890944.0, - 17447880704.0, - 17447884800.0, - 17447890944.0, - 17447886848.0, - 17447862272.0, - 17447882752.0, - 17447876608.0, - 17447874560.0, - 17447880704.0, - 17447882752.0, - 17447880704.0, - 17447878656.0, - 17447895040.0, - 17447876608.0, - 17447866368.0, - 17447886848.0, - 17447882752.0, - 17447886848.0, - 17447874560.0, - 17447866368.0, - 17447886848.0, - 17447886848.0, - 17447884800.0, - 17447882752.0, - 17447882752.0, - 17447874560.0, - 17447890944.0, - 17447878656.0, - 17447897088.0, - 17447897088.0, - 17447876608.0, - 17447901184.0, - 17447890944.0, - 17447866368.0, - 17447874560.0, - 17447862272.0, - 17447890944.0, - 17447878656.0, - 17447870464.0, - 17447878656.0, - 17447876608.0, - 17447870464.0, - 17447880704.0, - 17447876608.0, - 17447888896.0, - 17447882752.0, - 17447899136.0, - 17447870464.0, - 17447876608.0, - 17447882752.0, - 17447866368.0, - 17447878656.0, - 17447868416.0, - 17447886848.0, - 17447870464.0, - 17447890944.0, - 17447880704.0, - 17447874560.0, - 17447878656.0, - 17447886848.0, - 17447876608.0, - 17447880704.0, - 17447880704.0, - 17447876608.0, - 17447880704.0, - 17447882752.0, - 17447880704.0, - 17447882752.0, - 17447897088.0, - 17447874560.0, - 17447878656.0, - 17447870464.0, - 17447880704.0, - 17447864320.0, - 17447872512.0, - 17447876608.0, - 17447878656.0, - 17447878656.0, - 17447884800.0, - 17447890944.0, - 17447870464.0, - 17447874560.0, - 17447890944.0, - 17447882752.0, - 17447868416.0, - 17447876608.0, - 17447870464.0, - 17447864320.0, - 17447870464.0, - 17447880704.0, - 17447880704.0, - 17447862272.0, - 17447892992.0, - 17447870464.0, - 17447872512.0, - 17447884800.0, - 17447878656.0, - 17447878656.0, - 17447874560.0, - 17447882752.0, - 17447874560.0, - 17447870464.0, - 17447890944.0, - 17447997440.0, - 17447997440.0, - 17448005632.0, - 17448007680.0, - 17448001536.0, - 17448013824.0, - 17448017920.0, - 17447997440.0, - 17448005632.0, - 17448019968.0, - 17447989248.0, - 17448001536.0, - 17448017920.0, - 17447985152.0, - 17448003584.0, - 17447991296.0, - 17448003584.0, - 17447997440.0, - 17448009728.0, - 17448009728.0, - 17447997440.0, - 17448001536.0, - 17448007680.0, - 17447983104.0, - 17448017920.0, - 17448001536.0, - 17448007680.0, - 17448005632.0, - 17448005632.0, - 17447999488.0, - 17448003584.0, - 17448009728.0, - 17448005632.0, - 17448009728.0, - 17448003584.0, - 17447993344.0, - 17448011776.0, - 17448001536.0, - 17448017920.0, - 17448007680.0, - 17448019968.0, - 17448009728.0, - 17447995392.0, - 17447997440.0, - 17448005632.0, - 17448052736.0, - 17448017920.0, - 17447985152.0, - 17447999488.0, - 17447997440.0, - 17448013824.0, - 17447993344.0, - 17447997440.0, - 17448017920.0, - 17447995392.0, - 17447993344.0, - 17448022016.0, - 17447997440.0, - 17448005632.0, - 17447993344.0, - 17448001536.0, - 17448009728.0, - 17448011776.0, - 17448009728.0, - 17448005632.0, - 17448005632.0, - 17448007680.0, - 17447987200.0, - 17447999488.0, - 17447993344.0, - 17448011776.0, - 17448005632.0, - 17447995392.0, - 17448001536.0, - 17447989248.0, - 17448005632.0, - 17448228864.0, - 17448007680.0, - 17447999488.0, - 17448001536.0, - 17447997440.0, - 17448007680.0, - 17447999488.0, - 17447985152.0, - 17448005632.0, - 17447995392.0, - 17448013824.0, - 17448003584.0, - 17448013824.0, - 17447995392.0, - 17447991296.0, - 17448017920.0, - 17448009728.0, - 17447989248.0, - 17448001536.0, - 17448007680.0, - 17447976960.0, - 17448009728.0, - 17448017920.0, - 17448001536.0, - 17448001536.0, - 17448005632.0, - 17448007680.0, - 17448007680.0, - 17448005632.0, - 17448005632.0, - 17448005632.0, - 17447997440.0, - 17448005632.0, - 17448009728.0, - 17448007680.0, - 17448017920.0, - 17448005632.0, - 17448009728.0, - 17448122368.0, - 17448122368.0, - 17448114176.0, - 17448110080.0, - 17448114176.0, - 17448132608.0, - 17448122368.0, - 17448112128.0, - 17448103936.0, - 17448110080.0, - 17448118272.0, - 17448118272.0, - 17448118272.0, - 17448103936.0, - 17448124416.0, - 17448134656.0, - 17448120320.0, - 17448114176.0, - 17448118272.0, - 17448103936.0, - 17448134656.0, - 17448128512.0, - 17448116224.0, - 17448120320.0, - 17448118272.0, - 17448120320.0, - 17448120320.0, - 17448116224.0, - 17448120320.0, - 17448118272.0, - 17448118272.0, - 17448108032.0, - 17448112128.0, - 17448116224.0, - 17448140800.0, - 17448110080.0, - 17448116224.0, - 17448118272.0, - 17448128512.0, - 17448091648.0, - 17448128512.0, - 17448116224.0, - 17448118272.0, - 17448112128.0, - 17448105984.0, - 17448120320.0, - 17448128512.0, - 17448114176.0, - 17448116224.0, - 17448128512.0, - 17448108032.0, - 17448116224.0, - 17448124416.0, - 17448103936.0, - 17448097792.0, - 17448122368.0, - 17448116224.0, - 17448112128.0, - 17448122368.0, - 17448114176.0, - 17448130560.0, - 17448636416.0, - 17448116224.0, - 17448120320.0, - 17448134656.0, - 17448116224.0, - 17448108032.0, - 17448128512.0, - 17448116224.0, - 17448120320.0, - 17448120320.0, - 17448108032.0, - 17448130560.0, - 17448122368.0, - 17448118272.0, - 17448124416.0, - 17448114176.0, - 17448116224.0, - 17448116224.0, - 17448128512.0, - 17448118272.0, - 17448099840.0, - 17448114176.0, - 17448116224.0, - 17448112128.0, - 17448118272.0, - 17448112128.0, - 17448116224.0, - 17448116224.0, - 17448126464.0, - 17448112128.0, - 17448112128.0, - 17448120320.0, - 17448118272.0, - 17448120320.0, - 17448132608.0, - 17448103936.0, - 17448116224.0, - 17448124416.0, - 17448118272.0, - 17448112128.0, - 17448132608.0, - 17448118272.0, - 17448116224.0, - 17448108032.0, - 17448114176.0, - 17448120320.0, - 17448122368.0, - 17448114176.0, - 17448126464.0, - 17448114176.0, - 17448114176.0, - 17448124416.0, - 17447862272.0, - 17447880704.0, - 17447876608.0, - 17447880704.0, - 17447872512.0, - 17447884800.0, - 17447864320.0, - 17447895040.0, - 17447876608.0, - 17447866368.0, - 17447886848.0, - 17447880704.0, - 17447874560.0, - 17447862272.0, - 17447870464.0, - 17447868416.0, - 17447864320.0, - 17447876608.0, - 17447858176.0, - 17447870464.0, - 17447866368.0, - 17447870464.0, - 17447890944.0, - 17447895040.0, - 17447876608.0, - 17447884800.0, - 17447872512.0, - 17447870464.0, - 17447878656.0, - 17447892992.0, - 17447870464.0, - 17447872512.0, - 17447878656.0, - 17447880704.0, - 17447890944.0, - 17447888896.0, - 17447872512.0, - 17447874560.0, - 17447878656.0, - 17447886848.0, - 17447878656.0, - 17447876608.0, - 17447884800.0, - 17447868416.0, - 17447878656.0, - 17447878656.0, - 17447882752.0, - 17447878656.0, - 17447876608.0, - 17447878656.0, - 17447878656.0, - 17448871936.0, - 17447880704.0, - 17447880704.0, - 17447866368.0, - 17447886848.0, - 17447876608.0, - 17447882752.0, - 17447876608.0, - 17447886848.0, - 17447886848.0, - 17447882752.0, - 17447886848.0, - 17447886848.0, - 17447876608.0, - 17447866368.0, - 17447874560.0, - 17447884800.0, - 17447882752.0, - 17447882752.0, - 17447890944.0, - 17447858176.0, - 17447895040.0, - 17447872512.0, - 17447874560.0, - 17447886848.0, - 17447878656.0, - 17447886848.0, - 17447870464.0, - 17447876608.0, - 17447882752.0, - 17447880704.0, - 17447870464.0, - 17447866368.0, - 17447874560.0, - 17447897088.0, - 17447874560.0, - 17447897088.0, - 17447880704.0, - 17447874560.0, - 17447895040.0, - 17447878656.0, - 17447895040.0, - 17447866368.0, - 17447880704.0, - 17447876608.0, - 17447876608.0, - 17447882752.0, - 17447876608.0, - 17447872512.0, - 17447874560.0, - 17447876608.0, - 17448566784.0, - 17447866368.0, - 17447874560.0, - 17447886848.0, - 17448607744.0, - 17447886848.0, - 17447872512.0, - 17447862272.0, - 17447884800.0, - 17447876608.0, - 17447890944.0, - 17447890944.0, - 17447868416.0, - 17447895040.0, - 17447882752.0, - 17447864320.0, - 17447890944.0, - 17447882752.0, - 17447878656.0, - 17447878656.0, - 17447878656.0, - 17447895040.0, - 17447886848.0, - 17447872512.0, - 17447874560.0, - 17447886848.0, - 17447862272.0, - 17447884800.0, - 17447874560.0, - 17447882752.0, - 17447866368.0, - 17447919616.0, - 17447876608.0, - 17447886848.0, - 17447923712.0, - 17447880704.0, - 17447892992.0, - 17447878656.0, - 17447878656.0, - 17447884800.0, - 17447884800.0, - 17447878656.0, - 17447884800.0, - 17447876608.0, - 17447880704.0, - 17447874560.0, - 17447888896.0, - 17447870464.0, - 17447886848.0, - 17447868416.0, - 17447884800.0, - 17447880704.0, - 17447884800.0, - 17447868416.0, - 17447872512.0, - 17447890944.0, - 17447870464.0, - 17447874560.0, - 17447874560.0, - 17447890944.0, - 17447880704.0, - 17447886848.0, - 17447878656.0, - 17447870464.0, - 17447876608.0, - 17447880704.0, - 17447895040.0, - 17447849984.0, - 17447876608.0, - 17447876608.0, - 17447876608.0, - 17447890944.0, - 17447878656.0, - 17447874560.0, - 17447858176.0, - 17447948288.0, - 17447870464.0, - 17447870464.0, - 17447876608.0, - 17447874560.0, - 17447880704.0, - 17448407040.0, - 17447874560.0, - 17447890944.0, - 17447870464.0, - 17447878656.0, - 17447868416.0, - 17447874560.0, - 17447874560.0, - 17447899136.0, - 17447880704.0, - 17447878656.0, - 17447888896.0, - 17447882752.0, - 17447866368.0, - 17447882752.0, - 17447878656.0, - 17447870464.0, - 17447888896.0, - 17447870464.0, - 17447882752.0, - 17447872512.0, - 17447854080.0, - 17447892992.0, - 17447886848.0, - 17447903232.0, - 17447878656.0, - 17447888896.0, - 17447876608.0, - 17447862272.0, - 17447884800.0, - 17447874560.0, - 17447882752.0, - 17447890944.0, - 17447872512.0, - 17447888896.0, - 17447884800.0, - 17447886848.0, - 17447870464.0, - 17447886848.0, - 17447868416.0, - 17447882752.0, - 17447882752.0, - 17447882752.0, - 17447872512.0, - 17447876608.0, - 17447890944.0, - 17447870464.0, - 17447872512.0, - 17447868416.0, - 17447878656.0, - 17447882752.0, - 17447882752.0, - 17447886848.0, - 17447868416.0, - 17447872512.0, - 17447878656.0, - 17447897088.0, - 17447854080.0, - 17447866368.0, - 17447870464.0, - 17447874560.0, - 17447892992.0, - 17447874560.0, - 17447866368.0, - 17447874560.0, - 17447905280.0, - 17447866368.0, - 17447878656.0, - 17447878656.0, - 17447872512.0, - 17447878656.0, - 17448136704.0, - 17447882752.0, - 17447884800.0, - 17447866368.0, - 17447884800.0, - 17447866368.0, - 17447866368.0, - 17447878656.0, - 17447892992.0, - 17447872512.0, - 17447882752.0, - 17447886848.0, - 17447872512.0, - 17447866368.0, - 17447868416.0, - 17447884800.0, - 17447878656.0, - 17447878656.0, - 17447860224.0, - 17447892992.0, - 17448552448.0, - 17447878656.0, - 17447882752.0, - 17447878656.0, - 17447886848.0, - 17447874560.0, - 17448427520.0, - 17447872512.0, - 17447872512.0, - 17447870464.0, - 17447870464.0, - 17447872512.0, - 17447899136.0, - 17447880704.0, - 17447882752.0, - 17447888896.0, - 17447870464.0, - 17447880704.0, - 17447862272.0, - 17447884800.0, - 17447884800.0, - 17447886848.0, - 17448183808.0, - 17447864320.0, - 17447882752.0, - 17447895040.0, - 17447878656.0, - 17447882752.0, - 17447886848.0, - 17447882752.0, - 17447874560.0, - 17447892992.0, - 17447866368.0, - 17447880704.0, - 17447860224.0, - 17447882752.0, - 17447870464.0, - 17447878656.0, - 17447876608.0, - 17447878656.0, - 17447876608.0, - 17447868416.0, - 17447888896.0, - 17447868416.0, - 17447878656.0, - 17447876608.0, - 17447882752.0, - 17447866368.0, - 17447897088.0, - 17447888896.0, - 17447890944.0, - 17447880704.0, - 17447886848.0, - 17447862272.0, - 17447892992.0, - 17447874560.0, - 17447880704.0, - 17447874560.0, - 17447886848.0, - 17447878656.0, - 17447872512.0, - 17447874560.0, - 17447878656.0, - 17447892992.0, - 17447874560.0, - 17447872512.0, - 17447874560.0, - 17447888896.0, - 17447886848.0, - 17447886848.0, - 17447882752.0, - 17447878656.0, - 17447864320.0, - 17447892992.0, - 17447878656.0, - 17447878656.0, - 17447892992.0, - 17447872512.0, - 17447862272.0, - 17447886848.0, - 17447872512.0, - 17447876608.0, - 17447878656.0, - 17447882752.0, - 17447888896.0, - 17447874560.0, - 17447866368.0, - 17447866368.0, - 17447874560.0, - 17447866368.0, - 17447895040.0, - 17447882752.0, - 17447882752.0, - 17447895040.0, - 17447878656.0, - 17447876608.0, - 17447882752.0, - 17447878656.0, - 17447878656.0, - 17447895040.0, - 17447882752.0, - 17448458240.0, - 17447884800.0, - 17447886848.0, - 17447874560.0, - 17447876608.0, - 17447874560.0, - 17447882752.0, - 17447884800.0, - 17447884800.0, - 17447882752.0, - 17447880704.0, - 17447878656.0, - 17447886848.0, - 17447872512.0, - 17447878656.0, - 17447878656.0, - 17447882752.0, - 17447882752.0, - 17447882752.0, - 17447884800.0, - 17447876608.0, - 17447874560.0, - 17447888896.0, - 17447878656.0, - 17447870464.0, - 17447876608.0, - 17447872512.0, - 17447874560.0, - 17447872512.0, - 17447866368.0, - 17447874560.0, - 17447870464.0, - 17447882752.0, - 17447886848.0, - 17447878656.0, - 17447878656.0, - 17447876608.0, - 17447880704.0, - 17447878656.0, - 17447876608.0, - 17447876608.0, - 17447872512.0, - 17447884800.0, - 17447882752.0, - 17447876608.0, - 17447870464.0, - 17447886848.0, - 17447868416.0, - 17447901184.0, - 17447886848.0, - 17447886848.0, - 17447878656.0, - 17447874560.0, - 17447886848.0, - 17447880704.0, - 17447868416.0, - 17447890944.0, - 17447878656.0, - 17447874560.0, - 17447874560.0, - 17447876608.0, - 17447872512.0, - 17447878656.0, - 17447892992.0, - 17447864320.0, - 17447880704.0, - 17447892992.0, - 17447870464.0, - 17447884800.0, - 17447874560.0, - 17447876608.0, - 17447876608.0, - 17447892992.0, - 17447878656.0, - 17447878656.0, - 17447882752.0, - 17447890944.0, - 17447882752.0, - 17447876608.0, - 17447878656.0, - 17447886848.0, - 17447876608.0, - 17447858176.0, - 17447868416.0, - 17447866368.0, - 17447874560.0, - 17447882752.0, - 17447878656.0, - 17447880704.0, - 17447884800.0, - 17447874560.0, - 17447872512.0, - 17447884800.0, - 17447890944.0, - 17447886848.0, - 17447874560.0, - 17447882752.0, - 17447895040.0, - 17447862272.0, - 17447868416.0, - 17447864320.0, - 17448421376.0, - 17447876608.0, - 17447876608.0, - 17447874560.0, - 17447874560.0, - 17447878656.0, - 17447878656.0, - 17447880704.0, - 17447897088.0, - 17447880704.0, - 17447874560.0, - 17447890944.0, - 17447880704.0, - 17447899136.0, - 17448837120.0, - 17447870464.0, - 17447890944.0, - 17447856128.0, - 17447890944.0, - 17447878656.0, - 17447886848.0, - 17447874560.0, - 17447878656.0, - 17447868416.0, - 17447876608.0, - 17447888896.0, - 17447882752.0, - 17447872512.0, - 17447880704.0, - 17447907328.0, - 17447876608.0, - 17447886848.0, - 17447878656.0, - 17447876608.0, - 17447874560.0, - 17447892992.0, - 17447886848.0, - 17447878656.0, - 17447874560.0, - 17447892992.0, - 17447882752.0, - 17447886848.0, - 17447874560.0, - 17447890944.0, - 17447878656.0, - 17447874560.0, - 17447854080.0, - 17447862272.0, - 17447882752.0, - 17447878656.0, - 17447882752.0, - 17447876608.0, - 17447856128.0, - 17447866368.0, - 17447890944.0, - 17447880704.0, - 17447872512.0, - 17447878656.0, - 17447878656.0, - 17447882752.0, - 17447890944.0, - 17447878656.0, - 17447849984.0, - 17447878656.0, - 17447882752.0, - 17447886848.0, - 17447874560.0, - 17447882752.0, - 17447870464.0, - 17447895040.0, - 17447878656.0, - 17447899136.0, - 17447895040.0, - 17447872512.0, - 17447880704.0, - 17447874560.0, - 17447886848.0, - 17447876608.0, - 17447878656.0, - 17447882752.0, - 17447882752.0, - 17447866368.0, - 17447878656.0, - 17447888896.0, - 17447874560.0, - 17447878656.0, - 17447882752.0, - 17447874560.0, - 17447884800.0, - 17447884800.0, - 17447866368.0, - 17447895040.0, - 17447991296.0, - 17447886848.0, - 17447888896.0, - 17447866368.0, - 17447872512.0, - 17447884800.0, - 17448570880.0, - 17447890944.0, - 17447884800.0, - 17447874560.0, - 17447880704.0, - 17447890944.0, - 17447882752.0, - 17447868416.0, - 17447880704.0, - 17447882752.0, - 17447886848.0, - 17447880704.0, - 17447892992.0, - 17447886848.0, - 17447890944.0, - 17447874560.0, - 17447880704.0, - 17447874560.0, - 17447876608.0, - 17447870464.0, - 17447886848.0, - 17447870464.0, - 17447882752.0, - 17447884800.0, - 17447892992.0, - 17447880704.0, - 17447882752.0, - 17447890944.0, - 17447882752.0, - 17447882752.0, - 17447882752.0, - 17447876608.0, - 17447876608.0, - 17447874560.0, - 17447878656.0, - 17447870464.0, - 17447870464.0, - 17447870464.0, - 17447892992.0, - 17447876608.0, - 17447878656.0, - 17447870464.0, - 17447878656.0, - 17447880704.0, - 17447870464.0, - 17447890944.0, - 17447888896.0, - 17447872512.0, - 17447878656.0, - 17447884800.0, - 17447878656.0, - 17447878656.0, - 17447876608.0, - 17447888896.0, - 17447874560.0, - 17447866368.0, - 17447876608.0, - 17447868416.0, - 17447886848.0, - 17447872512.0, - 17447870464.0, - 17447878656.0, - 17447878656.0, - 17447886848.0, - 17447860224.0, - 17447874560.0, - 17447874560.0, - 17447874560.0, - 17447878656.0, - 17447882752.0, - 17447882752.0, - 17447874560.0, - 17447884800.0, - 17448579072.0, - 17447886848.0, - 17447874560.0, - 17447876608.0, - 17447886848.0, - 17447886848.0, - 17447872512.0, - 17447878656.0, - 17447886848.0, - 17447870464.0, - 17447874560.0, - 17447878656.0, - 17447874560.0, - 17447868416.0, - 17447888896.0, - 17447886848.0, - 17447866368.0, - 17447886848.0, - 17447884800.0, - 17447858176.0, - 17447878656.0, - 17447880704.0, - 17448126464.0, - 17447878656.0, - 17447890944.0, - 17447880704.0, - 17447878656.0, - 17447870464.0, - 17447882752.0, - 17447870464.0, - 17447872512.0, - 17447892992.0, - 17447878656.0, - 17447868416.0, - 17447888896.0, - 17447884800.0, - 17447882752.0, - 17447858176.0, - 17447892992.0, - 17447882752.0, - 17448316928.0, - 17447882752.0, - 17447864320.0, - 17447876608.0, - 17447880704.0, - 17447874560.0, - 17447864320.0, - 17447876608.0, - 17447874560.0, - 17447872512.0, - 17447882752.0, - 17447892992.0, - 17447890944.0, - 17447880704.0, - 17447892992.0, - 17447870464.0, - 17447874560.0, - 17447870464.0, - 17447870464.0, - 17447888896.0, - 17447878656.0, - 17447876608.0, - 17447866368.0, - 17447862272.0, - 17447884800.0, - 17447890944.0, - 17447864320.0, - 17447882752.0, - 17447882752.0, - 17447882752.0, - 17447866368.0, - 17447870464.0, - 17447886848.0, - 17447878656.0, - 17447880704.0, - 17447880704.0, - 17447878656.0, - 17447860224.0, - 17447874560.0, - 17447868416.0, - 17447876608.0, - 17447886848.0, - 17447874560.0, - 17447886848.0, - 17447878656.0, - 17447864320.0, - 17447882752.0, - 17447882752.0, - 17447882752.0, - 17447866368.0, - 17447888896.0, - 17447876608.0, - 17447874560.0, - 17447874560.0, - 17447874560.0, - 17447878656.0, - 17447870464.0, - 17447882752.0, - 17447884800.0, - 17447878656.0, - 17447874560.0, - 17447874560.0, - 17447876608.0, - 17447880704.0, - 17447870464.0, - 17447876608.0, - 17447882752.0, - 17447882752.0, - 17447874560.0, - 17447884800.0, - 17447897088.0, - 17447874560.0, - 17447860224.0, - 17447903232.0, - 17447899136.0, - 17447921664.0, - 17447915520.0, - 17447905280.0, - 17447901184.0, - 17447903232.0, - 17447905280.0, - 17447899136.0, - 17447919616.0, - 17447911424.0, - 17447903232.0, - 17447886848.0, - 17447915520.0, - 17447903232.0, - 17447890944.0, - 17447913472.0, - 17447890944.0, - 17447909376.0, - 17447913472.0, - 17447905280.0, - 17447911424.0, - 17447909376.0, - 17447903232.0, - 17447913472.0, - 17447897088.0, - 17447907328.0, - 17447911424.0, - 17447901184.0, - 17447903232.0, - 17447909376.0, - 17447899136.0, - 17447911424.0, - 17447897088.0, - 17447915520.0, - 17447899136.0, - 17447911424.0, - 17447899136.0, - 17447907328.0, - 17447907328.0, - 17447911424.0, - 17447911424.0, - 17447903232.0, - 17447915520.0, - 17447919616.0, - 17447903232.0, - 17447895040.0, - 17447911424.0, - 17447915520.0, - 17447899136.0, - 17447899136.0, - 17447911424.0, - 17447907328.0, - 17447905280.0, - 17447909376.0, - 17447915520.0, - 17447905280.0, - 17447892992.0, - 17447925760.0, - 17447913472.0, - 17447907328.0, - 17448826880.0, - 17447892992.0, - 17447901184.0, - 17447921664.0, - 17447907328.0, - 17447915520.0, - 17447903232.0, - 17447919616.0, - 17447909376.0, - 17447921664.0, - 17447899136.0, - 17447895040.0, - 17447909376.0, - 17447903232.0, - 17447913472.0, - 17447919616.0, - 17447917568.0, - 17447905280.0, - 17447905280.0, - 17447913472.0, - 17447899136.0, - 17447911424.0, - 17447909376.0, - 17447915520.0, - 17447913472.0, - 17447905280.0, - 17447909376.0, - 17447897088.0, - 17447909376.0, - 17447890944.0, - 17447899136.0, - 17447919616.0, - 17447913472.0, - 17447913472.0, - 17447915520.0, - 17447919616.0, - 17447913472.0, - 17447901184.0, - 17447895040.0, - 17447903232.0, - 17447899136.0, - 17447892992.0, - 17447909376.0, - 17447909376.0, - 17447905280.0, - 17447903232.0, - 17447909376.0, - 17447907328.0, - 17447909376.0, - 17447895040.0, - 17447919616.0, - 17447907328.0, - 17447868416.0, - 17447870464.0, - 17447868416.0, - 17447870464.0, - 17447864320.0, - 17447874560.0, - 17447878656.0, - 17447876608.0, - 17447876608.0, - 17447874560.0, - 17447876608.0, - 17447888896.0, - 17447866368.0, - 17447876608.0, - 17447874560.0, - 17447878656.0, - 17447886848.0, - 17447878656.0, - 17447868416.0, - 17447886848.0, - 17447862272.0, - 17447888896.0, - 17447882752.0, - 17447884800.0, - 17447886848.0, - 17447880704.0, - 17447897088.0, - 17447882752.0, - 17447882752.0, - 17447878656.0, - 17447874560.0, - 17447872512.0, - 17447888896.0, - 17447884800.0, - 17447876608.0, - 17447882752.0, - 17447890944.0, - 17447876608.0, - 17447886848.0, - 17447895040.0, - 17447876608.0, - 17447884800.0, - 17447870464.0, - 17447886848.0, - 17447878656.0, - 17447882752.0, - 17447878656.0, - 17447882752.0, - 17447866368.0, - 17447886848.0, - 17447890944.0, - 17447868416.0, - 17447876608.0, - 17447882752.0, - 17448462336.0, - 17447886848.0, - 17447868416.0, - 17447864320.0, - 17447882752.0, - 17447890944.0, - 17447878656.0, - 17447874560.0, - 17447874560.0, - 17447870464.0, - 17447878656.0, - 17447862272.0, - 17447874560.0, - 17447882752.0, - 17447864320.0, - 17447886848.0, - 17447874560.0, - 17447882752.0, - 17447886848.0, - 17447878656.0, - 17447870464.0, - 17447866368.0, - 17447882752.0, - 17447882752.0, - 17447866368.0, - 17447892992.0, - 17447890944.0, - 17447886848.0, - 17447882752.0, - 17447901184.0, - 17447862272.0, - 17447876608.0, - 17447878656.0, - 17447870464.0, - 17447878656.0, - 17447874560.0, - 17447882752.0, - 17447882752.0, - 17447882752.0, - 17447876608.0, - 17447878656.0, - 17448341504.0, - 17447870464.0, - 17447872512.0, - 17447882752.0, - 17447876608.0, - 17447901184.0, - 17447868416.0, - 17447888896.0, - 17447892992.0, - 17447868416.0, - 17447878656.0, - 17447899136.0, - 17447878656.0, - 17447880704.0, - 17447870464.0, - 17447868416.0, - 17447874560.0, - 17447882752.0, - 17447862272.0, - 17447886848.0, - 17447882752.0, - 17447899136.0, - 17447874560.0, - 17447866368.0, - 17447878656.0, - 17447878656.0, - 17447880704.0, - 17447870464.0, - 17447862272.0, - 17447884800.0, - 17447876608.0, - 17447876608.0, - 17447886848.0, - 17447884800.0, - 17447882752.0, - 17447874560.0, - 17447876608.0, - 17447878656.0, - 17448806400.0, - 17448820736.0, - 17448804352.0, - 17448808448.0, - 17448816640.0, - 17448816640.0, - 17448835072.0, - 17448810496.0, - 17448826880.0, - 17448804352.0, - 17448812544.0, - 17448814592.0, - 17448806400.0, - 17448826880.0, - 17448824832.0, - 17448798208.0, - 17448814592.0, - 17448816640.0, - 17448804352.0, - 17448818688.0, - 17448816640.0, - 17448810496.0, - 17448820736.0, - 17448822784.0, - 17448806400.0, - 17448794112.0, - 17448794112.0, - 17448828928.0, - 17448808448.0, - 17448802304.0, - 17448800256.0, - 17448820736.0, - 17448816640.0, - 17448808448.0, - 17448808448.0, - 17448812544.0, - 17448804352.0, - 17448796160.0, - 17448822784.0, - 17448818688.0, - 17448833024.0, - 17448804352.0, - 17448796160.0, - 17448800256.0, - 17448802304.0, - 17448820736.0, - 17448806400.0, - 17448814592.0, - 17449668608.0, - 17448792064.0, - 17448816640.0, - 17448808448.0, - 17448792064.0, - 17448804352.0, - 17448820736.0, - 17448812544.0, - 17448812544.0, - 17448806400.0, - 17448808448.0, - 17448814592.0, - 17448820736.0, - 17448816640.0, - 17448802304.0, - 17448802304.0, - 17448810496.0, - 17448812544.0, - 17448808448.0, - 17448802304.0, - 17448824832.0, - 17448806400.0, - 17448802304.0, - 17449644032.0, - 17448826880.0, - 17448808448.0, - 17448794112.0, - 17448820736.0, - 17448812544.0, - 17448808448.0, - 17448800256.0, - 17448814592.0, - 17448810496.0, - 17448810496.0, - 17448808448.0, - 17448814592.0, - 17448824832.0, - 17448804352.0, - 17448808448.0, - 17448806400.0, - 17448802304.0, - 17448804352.0, - 17448816640.0, - 17448804352.0, - 17448812544.0, - 17448810496.0, - 17448810496.0, - 17448812544.0, - 17448792064.0, - 17448816640.0, - 17448796160.0, - 17448816640.0, - 17448800256.0, - 17448812544.0, - 17448816640.0, - 17448812544.0, - 17448816640.0, - 17448816640.0, - 17448814592.0, - 17448792064.0, - 17448816640.0, - 17447880704.0, - 17447888896.0, - 17447882752.0, - 17447852032.0, - 17447882752.0, - 17447874560.0, - 17447888896.0, - 17447880704.0, - 17447866368.0, - 17448683520.0, - 17447882752.0, - 17447880704.0, - 17447878656.0, - 17447866368.0, - 17447874560.0, - 17447866368.0, - 17447882752.0, - 17447884800.0, - 17447876608.0, - 17447866368.0, - 17447856128.0, - 17447888896.0, - 17447897088.0, - 17447878656.0, - 17447864320.0, - 17447888896.0, - 17447882752.0, - 17447872512.0, - 17447880704.0, - 17447880704.0, - 17447890944.0, - 17447870464.0, - 17447872512.0, - 17447878656.0, - 17447866368.0, - 17447886848.0, - 17447892992.0, - 17447878656.0, - 17447872512.0, - 17447866368.0, - 17447874560.0, - 17447864320.0, - 17448878080.0, - 17447870464.0, - 17447882752.0, - 17447878656.0, - 17447864320.0, - 17447880704.0, - 17447884800.0, - 17447878656.0, - 17447886848.0, - 17447878656.0, - 17447886848.0, - 17447866368.0, - 17447876608.0, - 17447872512.0, - 17447886848.0, - 17447858176.0, - 17447874560.0, - 17447886848.0, - 17447892992.0, - 17447868416.0, - 17447878656.0, - 17447886848.0, - 17447878656.0, - 17447866368.0, - 17447866368.0, - 17447880704.0, - 17447876608.0, - 17447878656.0, - 17447886848.0, - 17447901184.0, - 17447882752.0, - 17447878656.0, - 17447884800.0, - 17447892992.0, - 17447874560.0, - 17447880704.0, - 17447874560.0, - 17447872512.0, - 17447886848.0, - 17447880704.0, - 17447866368.0, - 17447886848.0, - 17447862272.0, - 17447880704.0, - 17447884800.0, - 17447874560.0, - 17447890944.0, - 17447880704.0, - 17447878656.0, - 17447878656.0, - 17447878656.0, - 17447886848.0, - 17447878656.0, - 17447880704.0, - 17447884800.0, - 17447897088.0, - 17447878656.0, - 17447872512.0, - 17447845888.0, - 17447870464.0, - 17447876608.0, - 17447882752.0, - 17447880704.0, - 17447866368.0, - 17447886848.0, - 17447862272.0, - 17447886848.0, - 17447882752.0, - 17447880704.0, - 17447882752.0, - 17447882752.0, - 17447870464.0, - 17447882752.0, - 17447890944.0, - 17447866368.0, - 17447880704.0, - 17447862272.0, - 17447868416.0, - 17447874560.0, - 17447882752.0, - 17447874560.0, - 17447862272.0, - 17447876608.0, - 17447882752.0, - 17447880704.0, - 17447872512.0, - 17447888896.0, - 17447874560.0, - 17447882752.0, - 17447874560.0, - 17447886848.0, - 17447882752.0, - 17447864320.0, - 17447872512.0, - 17447882752.0, - 17447874560.0, - 17447884800.0, - 17447882752.0, - 17447876608.0, - 17447874560.0, - 17447886848.0, - 17447886848.0, - 17447878656.0, - 17447878656.0, - 17447868416.0, - 17447862272.0, - 17447876608.0, - 17447878656.0, - 17447882752.0, - 17447864320.0, - 17447882752.0, - 17447876608.0, - 17447878656.0, - 17447874560.0, - 17447872512.0, - 17447888896.0, - 17447874560.0, - 17447870464.0, - 17447882752.0, - 17447878656.0, - 17447878656.0, - 17447882752.0, - 17447874560.0, - 17447868416.0, - 17447880704.0, - 17447878656.0, - 17448001536.0, - 17447868416.0, - 17447874560.0, - 17447884800.0, - 17447870464.0, - 17447884800.0, - 17447895040.0, - 17447892992.0, - 17447870464.0, - 17447872512.0, - 17447870464.0, - 17447866368.0, - 17447886848.0, - 17447878656.0, - 17447870464.0, - 17447882752.0, - 17447886848.0, - 17447872512.0, - 17447882752.0, - 17447878656.0, - 17447880704.0, - 17447868416.0, - 17447878656.0, - 17447886848.0, - 17447876608.0, - 17447911424.0, - 17447884800.0, - 17447876608.0, - 17447888896.0, - 17447880704.0, - 17447880704.0, - 17447882752.0, - 17447882752.0, - 17447878656.0, - 17447870464.0, - 17447874560.0, - 17447886848.0, - 17447868416.0, - 17447874560.0, - 17447876608.0, - 17447878656.0, - 17447882752.0, - 17447862272.0, - 17447888896.0, - 17447874560.0, - 17447886848.0, - 17448714240.0, - 17447895040.0, - 17447880704.0, - 17447878656.0, - 17447884800.0, - 17447864320.0, - 17448050688.0, - 17447882752.0, - 17447886848.0, - 17447876608.0, - 17447866368.0, - 17447882752.0, - 17447895040.0, - 17447866368.0, - 17447890944.0, - 17447880704.0, - 17447890944.0, - 17447872512.0, - 17447878656.0, - 17447880704.0, - 17447882752.0, - 17447870464.0, - 17447892992.0, - 17447888896.0, - 17447880704.0, - 17447882752.0, - 17447884800.0, - 17447880704.0, - 17447882752.0, - 17447888896.0, - 17447888896.0, - 17447890944.0, - 17447878656.0, - 17447886848.0, - 17447886848.0, - 17447870464.0, - 17447874560.0, - 17447874560.0, - 17447878656.0, - 17447872512.0, - 17447882752.0, - 17447886848.0, - 17447874560.0, - 17447880704.0, - 17447884800.0, - 17447872512.0, - 17447882752.0, - 17447874560.0, - 17447884800.0, - 17447876608.0, - 17447895040.0, - 17447874560.0, - 17447872512.0, - 17447880704.0, - 17447882752.0, - 17447882752.0, - 17447890944.0, - 17447892992.0, - 17447878656.0, - 17447876608.0, - 17447870464.0, - 17447866368.0, - 17447876608.0, - 17447882752.0, - 17447872512.0, - 17447878656.0, - 17447872512.0, - 17447895040.0, - 17447882752.0, - 17447876608.0, - 17447874560.0, - 17447888896.0, - 17447884800.0, - 17447880704.0, - 17447872512.0, - 17447874560.0, - 17447878656.0, - 17447874560.0, - 17447876608.0, - 17447888896.0, - 17447866368.0, - 17447880704.0, - 17447895040.0, - 17447884800.0, - 17447872512.0, - 17447884800.0, - 17447874560.0, - 17447876608.0, - 17447876608.0, - 17447874560.0, - 17447876608.0, - 17447897088.0, - 17447872512.0, - 17447874560.0, - 17447878656.0, - 17447866368.0, - 17447897088.0, - 17447870464.0, - 17447862272.0, - 17447890944.0, - 17447874560.0, - 17447886848.0, - 17447864320.0, - 17447888896.0, - 17447882752.0, - 17447882752.0, - 17447890944.0, - 17447886848.0, - 17447876608.0, - 17447890944.0, - 17447854080.0, - 17447878656.0, - 17447870464.0, - 17447888896.0, - 17447884800.0, - 17447878656.0, - 17447884800.0, - 17447854080.0, - 17447878656.0, - 17447882752.0, - 17447882752.0, - 17447876608.0, - 17447882752.0, - 17447872512.0, - 17447878656.0, - 17447870464.0, - 17447874560.0, - 17447886848.0, - 17447890944.0, - 17447882752.0, - 17447878656.0, - 17447866368.0, - 17447878656.0, - 17447866368.0, - 17447884800.0, - 17447874560.0, - 17447874560.0, - 17447878656.0, - 17447878656.0, - 17447882752.0, - 17447874560.0, - 17447876608.0, - 17447868416.0, - 17447882752.0, - 17447882752.0, - 17447876608.0, - 17447876608.0, - 17447968768.0, - 17447892992.0, - 17447882752.0, - 17447862272.0, - 17447878656.0, - 17447878656.0, - 17447862272.0, - 17447886848.0, - 17447868416.0, - 17447876608.0, - 17447878656.0, - 17447878656.0, - 17447882752.0, - 17447872512.0, - 17447878656.0, - 17447868416.0, - 17447884800.0, - 17447882752.0, - 17447878656.0, - 17447878656.0, - 17447880704.0, - 17447886848.0, - 17447882752.0, - 17447866368.0, - 17447880704.0, - 17447886848.0, - 17447884800.0, - 17447878656.0, - 17447890944.0, - 17447884800.0, - 17447880704.0, - 17447890944.0, - 17447874560.0, - 17447876608.0, - 17447880704.0, - 17447886848.0, - 17447884800.0, - 17447866368.0, - 17447882752.0, - 17447874560.0, - 17447862272.0, - 17447878656.0, - 17447878656.0, - 17447882752.0, - 17447864320.0, - 17447890944.0, - 17447890944.0, - 17447874560.0, - 17447878656.0, - 17447880704.0, - 17447878656.0, - 17447880704.0, - 17447862272.0, - 17447882752.0, - 17447878656.0, - 17447884800.0, - 17447882752.0, - 17447884800.0, - 17447886848.0, - 17447882752.0, - 17447870464.0, - 17447880704.0, - 17447884800.0, - 17447878656.0, - 17447878656.0, - 17447895040.0, - 17447884800.0, - 17447880704.0, - 17447866368.0, - 17447880704.0, - 17447882752.0, - 17447882752.0, - 17447886848.0, - 17447874560.0, - 17447882752.0, - 17447872512.0, - 17447876608.0, - 17448185856.0, - 17447886848.0, - 17447858176.0, - 17447870464.0, - 17447890944.0, - 17447864320.0, - 17447864320.0, - 17447876608.0, - 17447874560.0, - 17447882752.0, - 17447882752.0, - 17447876608.0, - 17447882752.0, - 17447876608.0, - 17447890944.0, - 17447876608.0, - 17447882752.0, - 17447878656.0, - 17447872512.0, - 17447886848.0, - 17447870464.0, - 17447868416.0, - 17447882752.0, - 17447874560.0, - 17447860224.0, - 17447868416.0, - 17447878656.0, - 17447866368.0, - 17447892992.0, - 17447878656.0, - 17447878656.0, - 17447876608.0, - 17447895040.0, - 17447880704.0, - 17447866368.0, - 17447874560.0, - 17447897088.0, - 17447868416.0, - 17447901184.0, - 17447880704.0, - 17447862272.0, - 17447874560.0, - 17447886848.0, - 17447876608.0, - 17447872512.0, - 17447878656.0, - 17447882752.0, - 17447886848.0, - 17447882752.0, - 17447876608.0, - 17447874560.0, - 17447880704.0, - 17447868416.0, - 17447876608.0, - 17447874560.0, - 17447878656.0, - 17447888896.0, - 17447866368.0, - 17447876608.0, - 17447886848.0, - 17447878656.0, - 17447870464.0, - 17447882752.0, - 17447899136.0, - 17447884800.0, - 17447878656.0, - 17447870464.0, - 17447860224.0, - 17447878656.0, - 17447886848.0, - 17447870464.0, - 17447882752.0, - 17447870464.0, - 17447882752.0, - 17447878656.0, - 17447888896.0, - 17447876608.0, - 17447882752.0, - 17447882752.0, - 17447874560.0, - 17447874560.0, - 17447872512.0, - 17447870464.0, - 17447872512.0, - 17447878656.0, - 17447882752.0, - 17447878656.0, - 17447880704.0, - 17447878656.0, - 17447860224.0, - 17447886848.0, - 17447878656.0, - 17447878656.0, - 17447878656.0, - 17447872512.0, - 17447878656.0, - 17447882752.0, - 17447876608.0, - 17447886848.0, - 17447882752.0, - 17447868416.0, - 17447874560.0, - 17447890944.0, - 17447866368.0, - 17447882752.0, - 17447872512.0, - 17447874560.0, - 17447868416.0, - 17447886848.0, - 17447876608.0, - 17447870464.0, - 17447874560.0, - 17447882752.0, - 17447886848.0, - 17447868416.0, - 17447878656.0, - 17447866368.0, - 17447876608.0, - 17447878656.0, - 17447868416.0, - 17447874560.0, - 17447862272.0, - 17447864320.0, - 17447862272.0, - 17447864320.0, - 17447884800.0, - 17447872512.0, - 17447886848.0, - 17447880704.0, - 17447876608.0, - 17447868416.0, - 17447874560.0, - 17448923136.0, - 17447866368.0, - 17447874560.0, - 17447878656.0, - 17447890944.0, - 17447888896.0, - 17447876608.0, - 17447884800.0, - 17447897088.0, - 17447876608.0, - 17447868416.0, - 17447888896.0, - 17447874560.0, - 17447882752.0, - 17447874560.0, - 17448142848.0, - 17447884800.0, - 17447874560.0, - 17447874560.0, - 17447884800.0, - 17447878656.0, - 17447897088.0, - 17447895040.0, - 17448318976.0, - 17447899136.0, - 17447886848.0, - 17447895040.0, - 17447890944.0, - 17447886848.0, - 17447888896.0, - 17447882752.0, - 17447890944.0, - 17447907328.0, - 17447884800.0, - 17447890944.0, - 17447882752.0, - 17447886848.0, - 17447895040.0, - 17447874560.0, - 17447880704.0, - 17447888896.0, - 17447895040.0, - 17447895040.0, - 17447903232.0, - 17447868416.0, - 17447892992.0, - 17447888896.0, - 17447890944.0, - 17448044544.0, - 17447890944.0, - 17447897088.0, - 17447886848.0, - 17447890944.0, - 17447907328.0, - 17447876608.0, - 17447892992.0, - 17447882752.0, - 17447880704.0, - 17447899136.0, - 17447888896.0, - 17447882752.0, - 17447907328.0, - 17447892992.0, - 17447911424.0, - 17447895040.0, - 17448478720.0, - 17447882752.0, - 17447899136.0, - 17447878656.0, - 17447880704.0, - 17447903232.0, - 17447892992.0, - 17447901184.0, - 17447895040.0, - 17447882752.0, - 17447899136.0, - 17447899136.0, - 17447888896.0, - 17447890944.0, - 17447886848.0, - 17447899136.0, - 17447880704.0, - 17447878656.0, - 17447876608.0, - 17447892992.0, - 17447895040.0, - 17447890944.0, - 17447892992.0, - 17447905280.0, - 17447888896.0, - 17447892992.0, - 17447890944.0, - 17447890944.0, - 17447888896.0, - 17447907328.0, - 17447899136.0, - 17447897088.0, - 17447890944.0, - 17447886848.0, - 17447886848.0, - 17447903232.0, - 17447899136.0, - 17447888896.0, - 17447897088.0, - 17447895040.0, - 17447892992.0, - 17447884800.0, - 17447890944.0, - 17447897088.0, - 17447876608.0, - 17447907328.0, - 17447882752.0, - 17447903232.0, - 17447903232.0, - 17447907328.0, - 17447888896.0, - 17447890944.0, - 17447876608.0, - 17447886848.0, - 17447882752.0, - 17447897088.0, - 17447895040.0, - 17447890944.0, - 17447895040.0, - 17447890944.0, - 17447878656.0, - 17447901184.0, - 17447903232.0, - 17447888896.0, - 17447884800.0, - 17447886848.0, - 17447888896.0, - 17447890944.0, - 17447895040.0, - 17447888896.0, - 17447913472.0, - 17448865792.0, - 17448259584.0, - 17448257536.0, - 17448278016.0, - 17448267776.0, - 17448269824.0, - 17448263680.0, - 17448278016.0, - 17448269824.0, - 17448278016.0, - 17448275968.0, - 17448271872.0, - 17448280064.0, - 17448259584.0, - 17448261632.0, - 17448284160.0, - 17448263680.0, - 17448259584.0, - 17448275968.0, - 17448271872.0, - 17448261632.0, - 17448267776.0, - 17448259584.0, - 17448284160.0, - 17448267776.0, - 17448280064.0, - 17448269824.0, - 17448462336.0, - 17448275968.0, - 17448263680.0, - 17448271872.0, - 17448280064.0, - 17448284160.0, - 17448286208.0, - 17448267776.0, - 17448271872.0, - 17448257536.0, - 17448275968.0, - 17448267776.0, - 17448267776.0, - 17448263680.0, - 17448271872.0, - 17448269824.0, - 17448282112.0, - 17448280064.0, - 17448280064.0, - 17448271872.0, - 17448267776.0, - 17448282112.0, - 17448275968.0, - 17448269824.0, - 17448267776.0, - 17448273920.0, - 17448278016.0, - 17448267776.0, - 17448275968.0, - 17448271872.0, - 17448280064.0, - 17448265728.0, - 17448273920.0, - 17448269824.0, - 17448265728.0, - 17448267776.0, - 17448265728.0, - 17448265728.0, - 17448275968.0, - 17448269824.0, - 17448263680.0, - 17448261632.0, - 17448267776.0, - 17448267776.0, - 17448269824.0, - 17448271872.0, - 17448271872.0, - 17448275968.0, - 17448284160.0, - 17448263680.0, - 17448275968.0, - 17448271872.0, - 17448280064.0, - 17448273920.0, - 17448282112.0, - 17448292352.0, - 17448271872.0, - 17448255488.0, - 17448269824.0, - 17448280064.0, - 17448263680.0, - 17448275968.0, - 17448278016.0, - 17448271872.0, - 17448255488.0, - 17448282112.0, - 17448280064.0, - 17448284160.0, - 17448265728.0, - 17448280064.0, - 17448261632.0, - 17448255488.0, - 17448263680.0, - 17448275968.0, - 17448280064.0, - 17448280064.0, - 17448273920.0, - 17448265728.0, - 17448271872.0, - 17448273920.0, - 17448280064.0, - 17448296448.0, - 17448280064.0, - 17448275968.0, - 17448261632.0, - 17448251392.0, - 17448247296.0, - 17448263680.0, - 17447874560.0, - 17447874560.0, - 17447880704.0, - 17447876608.0, - 17447874560.0, - 17447862272.0, - 17447884800.0, - 17447878656.0, - 17447886848.0, - 17447864320.0, - 17447876608.0, - 17447888896.0, - 17447876608.0, - 17447868416.0, - 17447872512.0, - 17447888896.0, - 17447882752.0, - 17447878656.0, - 17447872512.0, - 17447899136.0, - 17447878656.0, - 17447878656.0, - 17447878656.0, - 17447878656.0, - 17447864320.0, - 17447882752.0, - 17447874560.0, - 17447890944.0, - 17447874560.0, - 17447890944.0, - 17447872512.0, - 17447878656.0, - 17447890944.0, - 17447866368.0, - 17447872512.0, - 17447882752.0, - 17447876608.0, - 17447876608.0, - 17447872512.0, - 17447892992.0, - 17447880704.0, - 17447870464.0, - 17447888896.0, - 17447874560.0, - 17447858176.0, - 17447890944.0, - 17447878656.0, - 17447872512.0, - 17447884800.0, - 17447866368.0, - 17447880704.0, - 17448083456.0, - 17447870464.0, - 17447882752.0, - 17448239104.0, - 17447872512.0, - 17447870464.0, - 17447880704.0, - 17447884800.0, - 17447895040.0, - 17447866368.0, - 17447884800.0, - 17447862272.0, - 17447878656.0, - 17447876608.0, - 17447874560.0, - 17447882752.0, - 17447884800.0, - 17447880704.0, - 17447876608.0, - 17447890944.0, - 17447878656.0, - 17447874560.0, - 17447890944.0, - 17447882752.0, - 17447874560.0, - 17447874560.0, - 17447886848.0, - 17447876608.0, - 17447880704.0, - 17447874560.0, - 17447874560.0, - 17447876608.0, - 17447880704.0, - 17447882752.0, - 17447870464.0, - 17447876608.0, - 17447862272.0, - 17447870464.0, - 17447868416.0, - 17447876608.0, - 17447886848.0, - 17447880704.0, - 17447882752.0, - 17447868416.0, - 17447876608.0, - 17447882752.0, - 17447870464.0, - 17447882752.0, - 17447860224.0, - 17447876608.0, - 17447864320.0, - 17447884800.0, - 17447874560.0, - 17447878656.0, - 17447874560.0, - 17447874560.0, - 17447878656.0, - 17447870464.0, - 17447888896.0, - 17447880704.0, - 17447874560.0, - 17447866368.0, - 17447890944.0, - 17447864320.0, - 17447878656.0, - 17447858176.0, - 17447878656.0, - 17447872512.0, - 17447876608.0, - 17447880704.0, - 17447876608.0, - 17447882752.0, - 17447872512.0, - 17447884800.0, - 17447886848.0, - 17447870464.0, - 17447870464.0, - 17447882752.0, - 17447866368.0, - 17447886848.0, - 17447878656.0, - 17447870464.0, - 17447890944.0, - 17447876608.0, - 17447880704.0, - 17447870464.0, - 17447884800.0, - 17447886848.0, - 17447884800.0, - 17447882752.0, - 17447880704.0, - 17447872512.0, - 17447886848.0, - 17447866368.0, - 17447864320.0, - 17447870464.0, - 17447878656.0, - 17447886848.0, - 17447886848.0, - 17447886848.0, - 17447870464.0, - 17447874560.0, - 17447870464.0, - 17448024064.0, - 17447890944.0, - 17447878656.0, - 17447884800.0, - 17447874560.0, - 17447882752.0, - 17447862272.0, - 17447860224.0, - 17447868416.0, - 17447890944.0, - 17447874560.0, - 17447874560.0, - 17447870464.0, - 17447874560.0, - 17447874560.0, - 17447872512.0, - 17447874560.0, - 17447874560.0, - 17447880704.0, - 17447878656.0, - 17447874560.0, - 17447884800.0, - 17447874560.0, - 17447878656.0, - 17447895040.0, - 17447870464.0, - 17447874560.0, - 17447886848.0, - 17447888896.0, - 17447878656.0, - 17447870464.0, - 17447880704.0, - 17447880704.0, - 17447876608.0, - 17447870464.0, - 17447878656.0, - 17447890944.0, - 17447880704.0, - 17447862272.0, - 17447878656.0, - 17447888896.0, - 17447882752.0, - 17447864320.0, - 17447874560.0, - 17447882752.0, - 17447868416.0, - 17447892992.0, - 17447876608.0, - 17447878656.0, - 17447886848.0, - 17447866368.0, - 17447868416.0, - 17447874560.0, - 17447874560.0, - 17447882752.0, - 17447878656.0, - 17447870464.0, - 17447903232.0, - 17447874560.0, - 17447890944.0, - 17447882752.0, - 17447882752.0, - 17447882752.0, - 17447878656.0, - 17447884800.0, - 17447876608.0, - 17447882752.0, - 17447876608.0, - 17447890944.0, - 17447872512.0, - 17447874560.0, - 17447882752.0, - 17447890944.0, - 17447874560.0, - 17447888896.0, - 17447890944.0, - 17447860224.0, - 17447862272.0, - 17447884800.0, - 17447864320.0, - 17447890944.0, - 17447878656.0, - 17447862272.0, - 17448318976.0, - 17447886848.0, - 17447892992.0, - 17447876608.0, - 17447862272.0, - 17447872512.0, - 17447870464.0, - 17447890944.0, - 17447876608.0, - 17447872512.0, - 17447868416.0, - 17447872512.0, - 17447880704.0, - 17447882752.0, - 17447886848.0, - 17447882752.0, - 17447866368.0, - 17447874560.0, - 17447874560.0, - 17447874560.0, - 17447892992.0, - 17448849408.0, - 17447882752.0, - 17447874560.0, - 17447895040.0, - 17447876608.0, - 17447880704.0, - 17447892992.0, - 17447882752.0, - 17447862272.0, - 17447882752.0, - 17447876608.0, - 17447886848.0, - 17447888896.0, - 17447884800.0, - 17447878656.0, - 17447866368.0, - 17447884800.0, - 17447882752.0, - 17447876608.0, - 17447897088.0, - 17447895040.0, - 17447858176.0, - 17447878656.0, - 17447882752.0, - 17447878656.0, - 17447886848.0, - 17447884800.0, - 17447890944.0, - 17447884800.0, - 17447870464.0, - 17447862272.0, - 17447876608.0, - 17447886848.0, - 17447884800.0, - 17447880704.0, - 17447870464.0, - 17447874560.0, - 17447890944.0, - 17447878656.0, - 17447882752.0, - 17447880704.0, - 17448357888.0, - 17447876608.0, - 17447874560.0, - 17447878656.0, - 17447874560.0, - 17447878656.0, - 17447884800.0, - 17447876608.0, - 17447874560.0, - 17447882752.0, - 17447882752.0, - 17447878656.0, - 17447878656.0, - 17447870464.0, - 17447884800.0, - 17447868416.0, - 17447874560.0, - 17447901184.0, - 17447874560.0, - 17447882752.0, - 17447874560.0, - 17447895040.0, - 17447876608.0, - 17447880704.0, - 17447872512.0, - 17448165376.0, - 17447876608.0, - 17448275968.0, - 17447872512.0, - 17447878656.0, - 17447880704.0, - 17447882752.0, - 17447892992.0, - 17447874560.0, - 17447874560.0, - 17447880704.0, - 17447888896.0, - 17447880704.0, - 17447876608.0, - 17447882752.0, - 17447884800.0, - 17447872512.0, - 17447876608.0, - 17447874560.0, - 17447880704.0, - 17448116224.0, - 17447888896.0, - 17447907328.0, - 17447872512.0, - 17447895040.0, - 17447872512.0, - 17447862272.0, - 17447876608.0, - 17447870464.0, - 17447874560.0, - 17447882752.0, - 17447878656.0, - 17448624128.0, - 17448597504.0, - 17447878656.0, - 17447884800.0, - 17447886848.0, - 17447874560.0, - 17447862272.0, - 17447876608.0, - 17447878656.0, - 17447872512.0, - 17447876608.0, - 17447884800.0, - 17447886848.0, - 17447880704.0, - 17447878656.0, - 17447870464.0, - 17447882752.0, - 17447878656.0, - 17447890944.0, - 17447878656.0, - 17447882752.0, - 17447884800.0, - 17447862272.0, - 17447884800.0, - 17447878656.0, - 17447872512.0, - 17447888896.0, - 17447878656.0, - 17447886848.0, - 17447878656.0, - 17447874560.0, - 17447870464.0, - 17447907328.0, - 17447884800.0, - 17447890944.0, - 17447862272.0, - 17447864320.0, - 17447882752.0, - 17447868416.0, - 17447882752.0, - 17447878656.0, - 17447874560.0, - 17447876608.0, - 17447876608.0, - 17447866368.0, - 17447882752.0, - 17447858176.0, - 17447874560.0, - 17447874560.0, - 17447864320.0, - 17447880704.0, - 17447886848.0, - 17447892992.0, - 17447874560.0, - 17447866368.0, - 17447880704.0, - 17447868416.0, - 17447888896.0, - 17447886848.0, - 17447878656.0, - 17447892992.0, - 17447888896.0, - 17447890944.0, - 17447886848.0, - 17447886848.0, - 17447890944.0, - 17447892992.0, - 17447874560.0, - 17447880704.0, - 17447878656.0, - 17447874560.0, - 17447874560.0, - 17447882752.0, - 17447872512.0, - 17447876608.0, - 17448359936.0, - 17447886848.0, - 17447870464.0, - 17447870464.0, - 17447878656.0, - 17447876608.0, - 17447880704.0, - 17447868416.0, - 17447880704.0, - 17447870464.0, - 17447882752.0, - 17447890944.0, - 17447872512.0, - 17447882752.0, - 17447876608.0, - 17447872512.0, - 17447882752.0, - 17447882752.0, - 17447886848.0, - 17447886848.0, - 17447874560.0, - 17447866368.0, - 17447880704.0, - 17447878656.0, - 17447876608.0, - 17448390656.0, - 17448382464.0, - 17448382464.0, - 17448380416.0, - 17448769536.0, - 17448390656.0, - 17448386560.0, - 17448394752.0, - 17448384512.0, - 17448388608.0, - 17449306112.0, - 17448386560.0, - 17448396800.0, - 17448402944.0, - 17448390656.0, - 17448392704.0, - 17448392704.0, - 17448398848.0, - 17448372224.0, - 17448384512.0, - 17448378368.0, - 17448390656.0, - 17448390656.0, - 17448396800.0, - 17448378368.0, - 17448384512.0, - 17448388608.0, - 17448390656.0, - 17448384512.0, - 17448378368.0, - 17448372224.0, - 17448402944.0, - 17448374272.0, - 17448388608.0, - 17448384512.0, - 17448400896.0, - 17448390656.0, - 17448384512.0, - 17448388608.0, - 17448386560.0, - 17448398848.0, - 17448372224.0, - 17448374272.0, - 17448400896.0, - 17448380416.0, - 17448398848.0, - 17448386560.0, - 17448378368.0, - 17449261056.0, - 17448382464.0, - 17448392704.0, - 17448392704.0, - 17448390656.0, - 17448380416.0, - 17448382464.0, - 17448394752.0, - 17448384512.0, - 17448378368.0, - 17448390656.0, - 17448380416.0, - 17448382464.0, - 17448388608.0, - 17448382464.0, - 17448382464.0, - 17448382464.0, - 17448394752.0, - 17448382464.0, - 17448378368.0, - 17448390656.0, - 17448388608.0, - 17448394752.0, - 17448394752.0, - 17448386560.0, - 17448382464.0, - 17448374272.0, - 17448376320.0, - 17448382464.0, - 17448384512.0, - 17448392704.0, - 17448964096.0, - 17448386560.0, - 17448374272.0, - 17448382464.0, - 17448394752.0, - 17448364032.0, - 17448394752.0, - 17448392704.0, - 17448392704.0, - 17448390656.0, - 17448390656.0, - 17448378368.0, - 17448382464.0, - 17448390656.0, - 17448382464.0, - 17448390656.0, - 17448386560.0, - 17448382464.0, - 17448394752.0, - 17448390656.0, - 17448390656.0, - 17448388608.0, - 17448398848.0, - 17448384512.0, - 17448386560.0, - 17448394752.0, - 17448386560.0, - 17448402944.0, - 17448386560.0, - 17448388608.0, - 17448396800.0, - 17448388608.0, - 17448390656.0, - 17448382464.0, - 17448386560.0, - 17447870464.0, - 17447878656.0, - 17447888896.0, - 17447878656.0, - 17447870464.0, - 17447882752.0, - 17447888896.0, - 17447884800.0, - 17447870464.0, - 17447874560.0, - 17447872512.0, - 17447874560.0, - 17447878656.0, - 17447872512.0, - 17447880704.0, - 17447876608.0, - 17447874560.0, - 17447876608.0, - 17447868416.0, - 17447882752.0, - 17447882752.0, - 17447868416.0, - 17447886848.0, - 17447872512.0, - 17447886848.0, - 17447882752.0, - 17447880704.0, - 17447890944.0, - 17447876608.0, - 17447878656.0, - 17448468480.0, - 17447880704.0, - 17447886848.0, - 17447878656.0, - 17447874560.0, - 17447868416.0, - 17447870464.0, - 17447874560.0, - 17447874560.0, - 17447884800.0, - 17447880704.0, - 17447882752.0, - 17447864320.0, - 17447862272.0, - 17447878656.0, - 17447870464.0, - 17447862272.0, - 17447888896.0, - 17447880704.0, - 17447874560.0, - 17447901184.0, - 17447870464.0, - 17447882752.0, - 17447882752.0, - 17447886848.0, - 17447880704.0, - 17447874560.0, - 17447868416.0, - 17447878656.0, - 17447872512.0, - 17447884800.0, - 17447886848.0, - 17447864320.0, - 17447901184.0, - 17447880704.0, - 17447862272.0, - 17447876608.0, - 17447880704.0, - 17447876608.0, - 17447886848.0, - 17447868416.0, - 17447876608.0, - 17447880704.0, - 17447880704.0, - 17447878656.0, - 17447880704.0, - 17447890944.0, - 17447882752.0, - 17447870464.0, - 17447870464.0, - 17447888896.0, - 17447870464.0, - 17447876608.0, - 17447878656.0, - 17447864320.0, - 17447884800.0, - 17447870464.0, - 17447888896.0, - 17447882752.0, - 17447890944.0, - 17447882752.0, - 17447895040.0, - 17447874560.0, - 17447884800.0, - 17447888896.0, - 17447882752.0, - 17447872512.0, - 17447882752.0, - 17447870464.0, - 17447886848.0, - 17447870464.0, - 17447874560.0, - 17447866368.0, - 17447878656.0, - 17447876608.0, - 17447870464.0, - 17447876608.0, - 17447866368.0, - 17447878656.0, - 17447888896.0, - 17447874560.0, - 17447884800.0, - 17447874560.0, - 17447890944.0, - 17447878656.0, - 17447882752.0, - 17447866368.0, - 17447880704.0, - 17447884800.0, - 17447882752.0, - 17447872512.0, - 17447876608.0, - 17447886848.0, - 17447882752.0, - 17447878656.0, - 17447874560.0, - 17447890944.0, - 17447882752.0, - 17447886848.0, - 17447874560.0, - 17447876608.0, - 17447874560.0, - 17447884800.0, - 17447878656.0, - 17447864320.0, - 17447884800.0, - 17447874560.0, - 17447872512.0, - 17447880704.0, - 17447878656.0, - 17448693760.0, - 17447878656.0, - 17447890944.0, - 17447868416.0, - 17447878656.0, - 17447882752.0, - 17447892992.0, - 17447884800.0, - 17447888896.0, - 17447880704.0, - 17447880704.0, - 17447878656.0, - 17447868416.0, - 17447876608.0, - 17447890944.0, - 17447886848.0, - 17447876608.0, - 17447872512.0, - 17447888896.0, - 17447890944.0, - 17447866368.0, - 17447880704.0, - 17447864320.0, - 17447890944.0, - 17447886848.0, - 17447870464.0, - 17447878656.0, - 17447903232.0, - 17447876608.0, - 17447892992.0, - 17447866368.0, - 17447884800.0, - 17447852032.0, - 17447880704.0, - 17447882752.0, - 17447874560.0, - 17447866368.0, - 17447899136.0, - 17447872512.0, - 17447878656.0, - 17447880704.0, - 17447874560.0, - 17447856128.0, - 17447886848.0, - 17447895040.0, - 17447866368.0, - 17447874560.0, - 17447874560.0, - 17447878656.0, - 17447862272.0, - 17447870464.0, - 17448798208.0, - 17447878656.0, - 17447870464.0, - 17447870464.0, - 17447864320.0, - 17447886848.0, - 17447874560.0, - 17447878656.0, - 17447888896.0, - 17447899136.0, - 17447886848.0, - 17447882752.0, - 17447878656.0, - 17447864320.0, - 17447888896.0, - 17447882752.0, - 17447878656.0, - 17447882752.0, - 17447878656.0, - 17447878656.0, - 17447878656.0, - 17447878656.0, - 17447874560.0, - 17447868416.0, - 17447876608.0, - 17447888896.0, - 17447874560.0, - 17447884800.0, - 17447882752.0, - 17447874560.0, - 17447882752.0, - 17447872512.0, - 17447870464.0, - 17447874560.0, - 17447882752.0, - 17447886848.0, - 17447876608.0, - 17447878656.0, - 17447870464.0, - 17448114176.0, - 17447884800.0, - 17447878656.0, - 17447884800.0, - 17447874560.0, - 17447878656.0, - 17448140800.0, - 17447878656.0, - 17447870464.0, - 17447892992.0, - 17447870464.0, - 17447892992.0, - 17447890944.0, - 17447870464.0, - 17447890944.0, - 17447888896.0, - 17447878656.0, - 17447874560.0, - 17447880704.0, - 17447895040.0, - 17447872512.0, - 17447878656.0, - 17447874560.0, - 17447886848.0, - 17448515584.0, - 17448247296.0, - 17447878656.0, - 17447882752.0, - 17447878656.0, - 17447872512.0, - 17447878656.0, - 17447878656.0, - 17447876608.0, - 17447884800.0, - 17447878656.0, - 17447866368.0, - 17447878656.0, - 17447864320.0, - 17447884800.0, - 17447878656.0, - 17447880704.0, - 17447878656.0, - 17447892992.0, - 17447870464.0, - 17447876608.0, - 17447878656.0, - 17447880704.0, - 17447880704.0, - 17447884800.0, - 17447876608.0, - 17447895040.0, - 17447870464.0, - 17447874560.0, - 17447872512.0, - 17447868416.0, - 17447890944.0, - 17447882752.0, - 17447892992.0, - 17447899136.0, - 17447866368.0, - 17447878656.0, - 17447868416.0, - 17447866368.0, - 17447890944.0, - 17447878656.0, - 17447866368.0, - 17447878656.0, - 17447876608.0, - 17447876608.0, - 17447874560.0, - 17447895040.0, - 17447866368.0, - 17447890944.0, - 17447882752.0, - 17447882752.0, - 17447868416.0, - 17447870464.0, - 17447880704.0, - 17447884800.0, - 17447876608.0, - 17447886848.0, - 17447870464.0, - 17447905280.0, - 17447884800.0, - 17447880704.0, - 17447878656.0, - 17447882752.0, - 17447870464.0, - 17447874560.0, - 17447870464.0, - 17447878656.0, - 17447878656.0, - 17447874560.0, - 17447862272.0, - 17447886848.0, - 17447884800.0, - 17447874560.0, - 17447884800.0, - 17447890944.0, - 17447872512.0, - 17447876608.0, - 17447878656.0, - 17447882752.0, - 17447878656.0, - 17447876608.0, - 17447895040.0, - 17447884800.0, - 17447882752.0, - 17447870464.0, - 17447872512.0, - 17447874560.0, - 17447878656.0, - 17447862272.0, - 17447892992.0, - 17447882752.0, - 17447872512.0, - 17447890944.0, - 17447870464.0, - 17447878656.0, - 17447874560.0, - 17447882752.0, - 17447878656.0, - 17447880704.0, - 17448763392.0, - 17447878656.0, - 17447878656.0, - 17447890944.0, - 17447862272.0, - 17447876608.0, - 17447884800.0, - 17447888896.0, - 17447895040.0, - 17447870464.0, - 17447878656.0, - 17447868416.0, - 17447872512.0, - 17447866368.0, - 17447880704.0, - 17447870464.0, - 17447864320.0, - 17447890944.0, - 17447872512.0, - 17447870464.0, - 17447884800.0, - 17447882752.0, - 17447890944.0, - 17447976960.0, - 17447874560.0, - 17447874560.0, - 17447870464.0, - 17447878656.0, - 17447866368.0, - 17447890944.0, - 17447870464.0, - 17447888896.0, - 17447890944.0, - 17447878656.0, - 17447882752.0, - 17447886848.0, - 17447886848.0, - 17447878656.0, - 17447880704.0, - 17447874560.0, - 17447886848.0, - 17447882752.0, - 17447878656.0, - 17447886848.0, - 17447868416.0, - 17447882752.0, - 17447874560.0, - 17447874560.0, - 17447890944.0, - 17447878656.0, - 17447866368.0, - 17447888896.0, - 17447878656.0, - 17447874560.0, - 17447892992.0, - 17447874560.0, - 17447886848.0, - 17447870464.0, - 17447880704.0, - 17447876608.0, - 17447886848.0, - 17447872512.0, - 17447884800.0, - 17447884800.0, - 17447888896.0, - 17447878656.0, - 17447862272.0, - 17447874560.0, - 17447882752.0, - 17447874560.0, - 17447882752.0, - 17447866368.0, - 17447880704.0, - 17447890944.0, - 17447876608.0, - 17447882752.0, - 17447868416.0, - 17447878656.0, - 17448085504.0, - 17447882752.0, - 17447882752.0, - 17447882752.0, - 17447880704.0, - 17447866368.0, - 17447886848.0, - 17447866368.0, - 17447858176.0, - 17447876608.0, - 17447878656.0, - 17447882752.0, - 17447876608.0, - 17447878656.0, - 17447888896.0, - 17447872512.0, - 17447866368.0, - 17447886848.0, - 17447876608.0, - 17447886848.0, - 17447870464.0, - 17447866368.0, - 17447882752.0, - 17447870464.0, - 17447892992.0, - 17447872512.0, - 17447882752.0, - 17447878656.0, - 17447862272.0, - 17447880704.0, - 17447886848.0, - 17447882752.0, - 17447872512.0, - 17447878656.0, - 17447872512.0, - 17447884800.0, - 17447884800.0, - 17447874560.0, - 17447872512.0, - 17447890944.0, - 17447886848.0, - 17447876608.0, - 17447878656.0, - 17447895040.0, - 17447880704.0, - 17447872512.0, - 17447884800.0, - 17447876608.0, - 17447884800.0, - 17447882752.0, - 17447878656.0, - 17447878656.0, - 17447874560.0, - 17447882752.0, - 17447882752.0, - 17448259584.0, - 17447880704.0, - 17447876608.0, - 17447864320.0, - 17447882752.0, - 17447874560.0, - 17447878656.0, - 17447882752.0, - 17447870464.0, - 17447878656.0, - 17447882752.0, - 17447880704.0, - 17447878656.0, - 17447899136.0, - 17447884800.0, - 17447872512.0, - 17448570880.0, - 17447866368.0, - 17447888896.0, - 17447878656.0, - 17447866368.0, - 17447882752.0, - 17447895040.0, - 17447878656.0, - 17447878656.0, - 17447888896.0, - 17447884800.0, - 17447880704.0, - 17447874560.0, - 17447901184.0, - 17447878656.0, - 17447874560.0, - 17447878656.0, - 17447872512.0, - 17447880704.0, - 17447880704.0, - 17447872512.0, - 17447878656.0, - 17447868416.0, - 17447886848.0, - 17447870464.0, - 17447872512.0, - 17447890944.0, - 17447870464.0, - 17447882752.0, - 17447882752.0, - 17447862272.0, - 17447878656.0, - 17447886848.0, - 17447882752.0, - 17447874560.0, - 17447878656.0, - 17447874560.0, - 17447882752.0, - 17447882752.0, - 17447874560.0, - 17448110080.0, - 17447890944.0, - 17447886848.0, - 17447874560.0, - 17447878656.0, - 17447892992.0, - 17447878656.0, - 17447872512.0, - 17447886848.0, - 17447874560.0, - 17447886848.0, - 17447884800.0, - 17447878656.0, - 17447882752.0, - 17447876608.0, - 17447880704.0, - 17447876608.0, - 17447880704.0, - 17447882752.0, - 17447874560.0, - 17447862272.0, - 17447882752.0, - 17447876608.0, - 17447878656.0, - 17447876608.0, - 17447876608.0, - 17447876608.0, - 17447876608.0, - 17448497152.0, - 17447876608.0, - 17447899136.0, - 17447884800.0, - 17447870464.0, - 17447876608.0, - 17447862272.0, - 17447890944.0, - 17447874560.0, - 17447870464.0, - 17447882752.0, - 17447895040.0, - 17447876608.0, - 17447882752.0, - 17447888896.0, - 17447884800.0, - 17447880704.0, - 17447878656.0, - 17447897088.0, - 17447878656.0, - 17447872512.0, - 17447868416.0, - 17447872512.0, - 17447876608.0, - 17447878656.0, - 17447874560.0, - 17447870464.0, - 17447872512.0, - 17447890944.0, - 17447874560.0, - 17447864320.0, - 17447878656.0, - 17447870464.0, - 17448939520.0, - 17447858176.0, - 17447874560.0, - 17447882752.0, - 17447878656.0, - 17447866368.0, - 17447882752.0, - 17447864320.0, - 17447882752.0, - 17447862272.0, - 17447874560.0, - 17447882752.0, - 17447886848.0, - 17447872512.0, - 17447880704.0, - 17447862272.0, - 17447880704.0, - 17447868416.0, - 17447862272.0, - 17447874560.0, - 17448544256.0, - 17447895040.0, - 17447886848.0, - 17447895040.0, - 17447880704.0, - 17447874560.0, - 17447890944.0, - 17447882752.0, - 17447870464.0, - 17447870464.0, - 17447890944.0, - 17447882752.0, - 17447870464.0, - 17447880704.0, - 17447882752.0, - 17447895040.0, - 17447878656.0, - 17447886848.0, - 17447872512.0, - 17447886848.0, - 17447872512.0, - 17447878656.0, - 17447882752.0, - 17447876608.0, - 17447878656.0, - 17447878656.0, - 17447897088.0, - 17447872512.0, - 17447886848.0, - 17447870464.0, - 17447886848.0, - 17447866368.0, - 17447886848.0, - 17447874560.0, - 17447888896.0, - 17447870464.0, - 17447874560.0, - 17447878656.0, - 17447882752.0, - 17447868416.0, - 17447880704.0, - 17447872512.0, - 17447880704.0, - 17447882752.0, - 17447878656.0, - 17447878656.0, - 17447874560.0, - 17447880704.0, - 17447880704.0, - 17447876608.0, - 17447888896.0, - 17447878656.0, - 17447868416.0, - 17447878656.0, - 17447874560.0, - 17447870464.0, - 17447866368.0, - 17447890944.0, - 17447872512.0, - 17447874560.0, - 17447880704.0, - 17447888896.0, - 17447874560.0, - 17447878656.0, - 17447872512.0, - 17447872512.0, - 17447878656.0, - 17447878656.0, - 17447876608.0, - 17447878656.0, - 17447884800.0, - 17447878656.0, - 17447880704.0, - 17447866368.0, - 17447874560.0, - 17447882752.0, - 17447874560.0, - 17447874560.0, - 17447870464.0, - 17447866368.0, - 17447886848.0, - 17447888896.0, - 17447882752.0, - 17447874560.0, - 17447882752.0, - 17447884800.0, - 17447882752.0, - 17447897088.0, - 17447878656.0, - 17447895040.0, - 17447886848.0, - 17447882752.0, - 17447870464.0, - 17447882752.0, - 17447868416.0, - 17447884800.0, - 17447882752.0, - 17447882752.0, - 17447864320.0, - 17447868416.0, - 17447880704.0, - 17447890944.0, - 17447876608.0, - 17447886848.0, - 17447886848.0, - 17447868416.0, - 17447874560.0, - 17447884800.0, - 17447866368.0, - 17447866368.0, - 17447872512.0, - 17447872512.0, - 17447868416.0, - 17447878656.0, - 17447874560.0, - 17447888896.0, - 17447880704.0, - 17447872512.0, - 17447886848.0, - 17447872512.0, - 17447890944.0, - 17447874560.0, - 17447888896.0, - 17447866368.0, - 17447880704.0, - 17447882752.0, - 17447878656.0, - 17447876608.0, - 17447878656.0, - 17447884800.0, - 17447876608.0, - 17447888896.0, - 17447870464.0, - 17447892992.0, - 17447870464.0, - 17447868416.0, - 17447886848.0, - 17447882752.0, - 17447884800.0, - 17447880704.0, - 17447882752.0, - 17447874560.0, - 17447886848.0, - 17447878656.0, - 17447862272.0, - 17447876608.0, - 17447878656.0, - 17447872512.0, - 17447882752.0, - 17447895040.0, - 17447886848.0, - 17447874560.0, - 17447860224.0, - 17447880704.0, - 17447882752.0, - 17447874560.0, - 17447874560.0, - 17447878656.0, - 17447876608.0, - 17447880704.0, - 17447878656.0, - 17447882752.0, - 17447874560.0, - 17447888896.0, - 17447886848.0, - 17447872512.0, - 17447882752.0, - 17447880704.0, - 17447880704.0, - 17447870464.0, - 17447866368.0, - 17447882752.0, - 17447874560.0, - 17447878656.0, - 17447884800.0, - 17447882752.0, - 17447874560.0, - 17447878656.0, - 17447878656.0, - 17447866368.0, - 17447880704.0, - 17447876608.0, - 17447874560.0, - 17447870464.0, - 17447880704.0, - 17447870464.0, - 17447884800.0, - 17447897088.0, - 17447878656.0, - 17447888896.0, - 17447870464.0, - 17447876608.0, - 17447874560.0, - 17447878656.0, - 17447886848.0, - 17447872512.0, - 17447868416.0, - 17447878656.0, - 17447884800.0, - 17447886848.0, - 17447872512.0, - 17447874560.0, - 17447874560.0, - 17447886848.0, - 17447872512.0, - 17447878656.0, - 17447876608.0, - 17447886848.0, - 17447870464.0, - 17447872512.0, - 17447872512.0, - 17447864320.0, - 17447880704.0, - 17447890944.0, - 17447884800.0, - 17447878656.0, - 17447907328.0, - 17447870464.0, - 17447870464.0, - 17447878656.0, - 17447878656.0, - 17447878656.0, - 17447899136.0, - 17447882752.0, - 17448333312.0, - 17447874560.0, - 17447892992.0, - 17447874560.0, - 17447882752.0, - 17447878656.0, - 17447870464.0, - 17447874560.0, - 17447870464.0, - 17447874560.0, - 17447888896.0, - 17447878656.0, - 17447878656.0, - 17447886848.0, - 17447878656.0, - 17447882752.0, - 17447876608.0, - 17447936000.0, - 17447878656.0, - 17447884800.0, - 17447876608.0, - 17447880704.0, - 17447888896.0, - 17447866368.0, - 17447872512.0, - 17447874560.0, - 17447872512.0, - 17447882752.0, - 17447876608.0, - 17447862272.0, - 17448724480.0, - 17447878656.0, - 17447876608.0, - 17447876608.0, - 17447872512.0, - 17447880704.0, - 17447884800.0, - 17447882752.0, - 17447878656.0, - 17447880704.0, - 17447878656.0, - 17447864320.0, - 17447878656.0, - 17447880704.0, - 17447882752.0, - 17447878656.0, - 17447878656.0, - 17447870464.0, - 17447866368.0, - 17447878656.0, - 17447878656.0, - 17447876608.0, - 17447882752.0, - 17447880704.0, - 17447886848.0, - 17447895040.0, - 17447890944.0, - 17447862272.0, - 17447878656.0, - 17447878656.0, - 17447866368.0, - 17447876608.0, - 17447888896.0, - 17447884800.0, - 17447872512.0, - 17447882752.0, - 17447870464.0, - 17447892992.0, - 17447866368.0, - 17447878656.0, - 17447880704.0, - 17447870464.0, - 17447866368.0, - 17447876608.0, - 17447880704.0, - 17447892992.0, - 17447882752.0, - 17447884800.0, - 17447882752.0, - 17447874560.0, - 17447890944.0, - 17447895040.0, - 17447890944.0, - 17447886848.0, - 17447872512.0, - 17447882752.0, - 17447884800.0, - 17447882752.0, - 17447874560.0, - 17447882752.0, - 17447872512.0, - 17447888896.0, - 17447868416.0, - 17447878656.0, - 17447870464.0, - 17447880704.0, - 17447874560.0, - 17448169472.0, - 17447878656.0, - 17447880704.0, - 17447878656.0, - 17447882752.0, - 17447882752.0, - 17447874560.0, - 17447876608.0, - 17447880704.0, - 17447868416.0, - 17447878656.0, - 17447878656.0, - 17447878656.0, - 17447868416.0, - 17447880704.0, - 17447882752.0, - 17447878656.0, - 17447876608.0, - 17447878656.0, - 17447874560.0, - 17447884800.0, - 17447880704.0, - 17447882752.0, - 17447872512.0, - 17447880704.0, - 17447878656.0, - 17447870464.0, - 17447872512.0, - 17447886848.0, - 17448013824.0, - 17447872512.0, - 17447884800.0, - 17447880704.0, - 17447862272.0, - 17447886848.0, - 17447874560.0, - 17447890944.0, - 17447866368.0, - 17447884800.0, - 17447878656.0, - 17447864320.0, - 17447876608.0, - 17447870464.0, - 17447872512.0, - 17447882752.0, - 17447876608.0, - 17447882752.0, - 17447878656.0, - 17447880704.0, - 17447872512.0, - 17447874560.0, - 17447872512.0, - 17447876608.0, - 17447895040.0, - 17447874560.0, - 17447874560.0, - 17447870464.0, - 17447876608.0, - 17447872512.0, - 17447868416.0, - 17447878656.0, - 17447862272.0, - 17447878656.0, - 17447876608.0, - 17447880704.0, - 17447870464.0, - 17447876608.0, - 17447890944.0, - 17447874560.0, - 17447886848.0, - 17447882752.0, - 17447888896.0, - 17447880704.0, - 17448466432.0, - 17447882752.0, - 17447876608.0, - 17447868416.0, - 17447872512.0, - 17447890944.0, - 17447897088.0, - 17447876608.0, - 17447874560.0, - 17447890944.0, - 17447878656.0, - 17447870464.0, - 17447882752.0, - 17447872512.0, - 17447886848.0, - 17447888896.0, - 17447882752.0, - 17447872512.0, - 17447866368.0, - 17447878656.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 25809, - "step_interval": 5, - "values": [ - 105.86866, - 27.56126, - 28.82349, - 29.53482, - 27.89586, - 28.03171, - 26.76686, - 27.44711, - 27.49381, - 26.2265, - 26.34585, - 26.49051, - 25.37542, - 25.01744, - 25.80256, - 25.40128, - 24.8858, - 25.58665, - 24.75191, - 25.04627, - 24.2937, - 24.7563, - 24.02316, - 24.34371, - 24.1251, - 23.96596, - 24.00971, - 23.89089, - 23.58458, - 24.4027, - 24.01048, - 23.99876, - 23.99977, - 23.84646, - 24.00587, - 24.41593, - 23.62381, - 23.21431, - 23.60982, - 23.42319, - 23.37656, - 23.99874, - 23.14469, - 23.10061, - 23.28335, - 23.36868, - 23.1209, - 23.39396, - 23.47888, - 23.09894, - 23.64079, - 22.88334, - 23.72844, - 23.62627, - 22.73817, - 22.86507, - 23.453, - 23.09974, - 22.69251, - 24.12787, - 22.81395, - 22.66667, - 23.18731, - 22.85296, - 23.01887, - 23.04897, - 22.88361, - 22.74143, - 22.74174, - 22.75465, - 23.50667, - 23.00953, - 22.53933, - 22.55209, - 22.99388, - 22.5802, - 22.61953, - 23.25686, - 23.04985, - 22.48606, - 22.77353, - 23.16327, - 22.37138, - 22.76908, - 22.68125, - 22.87267, - 22.54488, - 22.61455, - 23.20255, - 22.35706, - 22.78544, - 22.51313, - 22.8067, - 22.63311, - 22.36641, - 22.93204, - 22.8089, - 22.69756, - 22.35847, - 22.84454, - 22.16427, - 22.42444, - 22.43595, - 22.46487, - 22.40865, - 22.44312, - 22.45533, - 22.71659, - 22.41388, - 22.36669, - 22.49695, - 22.49306, - 22.65398, - 22.64062, - 22.85151, - 22.6082, - 22.72738, - 22.56372, - 22.70258, - 22.43925, - 101.14027, - 22.5031, - 22.76764, - 22.67679, - 22.41643, - 22.6319, - 22.398, - 22.86879, - 22.67579, - 22.62794, - 22.53665, - 22.67882, - 22.5028, - 22.52929, - 23.00784, - 22.50065, - 22.44123, - 22.76723, - 22.51196, - 22.7051, - 22.76956, - 22.52012, - 22.43069, - 22.40474, - 22.38156, - 22.47368, - 22.32673, - 22.40841, - 22.2759, - 22.51299, - 22.3214, - 22.86805, - 22.57032, - 22.37732, - 22.69439, - 22.65036, - 34.68773, - 25.7873, - 23.00085, - 22.46626, - 22.42371, - 23.02043, - 22.3282, - 22.45572, - 23.16323, - 22.28081, - 22.40856, - 23.19218, - 22.47156, - 23.06928, - 23.54648, - 22.44444, - 22.51854, - 23.50013, - 25.00345, - 32.67469, - 23.51427, - 22.31341, - 22.34525, - 22.84754, - 22.49431, - 22.44482, - 23.15204, - 22.29314, - 22.3289, - 22.44074, - 22.36134, - 23.06536, - 22.62574, - 22.56191, - 22.75284, - 22.55342, - 22.49709, - 22.30702, - 23.17389, - 22.35194, - 22.47066, - 22.50252, - 22.38508, - 22.32332, - 22.29499, - 22.64989, - 25.34019, - 26.20888, - 34.42688, - 22.71979, - 22.34598, - 22.32874, - 22.40121, - 22.29541, - 22.49414, - 22.34285, - 22.72862, - 22.65599, - 22.53123, - 22.3385, - 22.85989, - 22.42258, - 22.65887, - 23.03068, - 22.46347, - 22.4894, - 22.7975, - 22.94465, - 22.49659, - 23.17386, - 22.3175, - 22.39908, - 23.28626, - 22.32511, - 109.73788, - 22.4802, - 22.72729, - 22.61836, - 22.47513, - 22.44307, - 22.47037, - 22.40571, - 22.39138, - 22.51142, - 22.45977, - 22.42165, - 22.36773, - 22.32747, - 22.62535, - 22.35597, - 22.31357, - 22.87909, - 22.61735, - 22.3368, - 22.48093, - 22.49195, - 22.29134, - 22.46662, - 22.28344, - 22.48509, - 22.3982, - 22.31272, - 22.54745, - 22.79593, - 22.66751, - 22.7888, - 22.44623, - 22.90924, - 22.94298, - 22.70551, - 22.59248, - 22.44114, - 23.25265, - 22.6757, - 22.81174, - 22.79008, - 22.40932, - 22.52846, - 22.74684, - 22.64011, - 22.24557, - 22.44391, - 22.22307, - 22.20709, - 22.96877, - 22.22865, - 22.5563, - 22.75453, - 22.27962, - 22.35249, - 22.90046, - 22.31525, - 22.21288, - 22.95827, - 22.21294, - 22.43736, - 22.93256, - 22.69221, - 22.29764, - 22.3734, - 22.82716, - 22.44497, - 22.37052, - 22.33652, - 22.42637, - 22.30613, - 22.42651, - 22.4247, - 22.33259, - 22.30497, - 22.42634, - 22.2886, - 22.26643, - 22.23274, - 22.21864, - 22.64359, - 22.24904, - 22.36227, - 22.47831, - 22.39154, - 22.28922, - 22.68583, - 22.69337, - 22.33331, - 22.66439, - 22.29401, - 22.32352, - 22.75153, - 22.30951, - 22.38224, - 22.95873, - 22.35417, - 22.30513, - 23.46101, - 22.2886, - 22.24117, - 23.07443, - 22.4121, - 22.32479, - 22.83049, - 22.32771, - 22.36772, - 22.60619, - 22.26879, - 22.70377, - 22.97411, - 22.54233, - 22.6727, - 22.69834, - 23.01474, - 23.07424, - 23.89499, - 23.76587, - 23.45024, - 23.09168, - 22.51823, - 22.40998, - 22.32227, - 22.41722, - 22.23259, - 22.38729, - 22.3257, - 22.41275, - 22.21203, - 22.39303, - 22.17919, - 22.20379, - 22.2459, - 22.23867, - 22.36098, - 22.4702, - 22.32046, - 22.27016, - 22.31057, - 22.24971, - 22.25786, - 22.61771, - 22.22671, - 22.34153, - 22.38118, - 22.26394, - 22.24669, - 22.44497, - 22.23526, - 23.25095, - 22.23099, - 25.221, - 23.47947, - 22.21563, - 22.54813, - 23.25688, - 22.40309, - 22.19077, - 23.20723, - 22.24504, - 22.50768, - 22.69789, - 22.26528, - 22.24601, - 22.57661, - 22.22315, - 22.30669, - 22.64958, - 22.19949, - 22.19627, - 22.16858, - 22.27374, - 22.22293, - 22.6518, - 22.50734, - 22.323, - 22.29104, - 22.24173, - 22.55919, - 22.26411, - 22.46166, - 22.28385, - 22.47416, - 22.31791, - 22.2841, - 22.59189, - 22.30555, - 22.46978, - 22.16957, - 22.3074, - 22.19136, - 22.15528, - 22.18854, - 22.2021, - 22.14673, - 22.60293, - 22.6481, - 22.46713, - 23.36876, - 22.39404, - 22.22066, - 23.27526, - 22.17223, - 22.62513, - 23.22205, - 22.2436, - 22.2744, - 22.87858, - 22.22367, - 22.19553, - 22.74681, - 22.33299, - 22.39791, - 22.68906, - 22.62132, - 22.23763, - 22.31749, - 22.23967, - 22.26274, - 22.16136, - 22.4197, - 22.49426, - 22.14672, - 22.28955, - 22.19643, - 22.3853, - 22.41279, - 22.23421, - 22.30954, - 22.26539, - 22.31166, - 22.32302, - 22.26051, - 22.51379, - 22.29998, - 22.31581, - 22.28776, - 22.21906, - 22.34208, - 22.24649, - 22.37438, - 22.30338, - 22.44025, - 22.29842, - 22.4917, - 22.25071, - 22.22369, - 22.37264, - 22.26021, - 22.22922, - 22.9261, - 22.55762, - 22.29391, - 23.25415, - 22.6554, - 22.46727, - 23.43125, - 22.33364, - 22.32415, - 23.30188, - 22.3106, - 22.30622, - 23.30781, - 22.29728, - 22.29022, - 22.5379, - 22.30253, - 22.36467, - 22.38128, - 22.44048, - 22.31472, - 22.48322, - 22.266, - 22.33748, - 22.36523, - 22.4067, - 22.24718, - 22.27639, - 22.26624, - 22.23374, - 22.46478, - 22.27094, - 22.24064, - 22.20455, - 22.28345, - 22.27359, - 22.22132, - 22.34988, - 22.26994, - 22.50601, - 22.34611, - 22.30626, - 22.33995, - 22.2312, - 22.27587, - 22.23085, - 22.54672, - 22.25329, - 22.43076, - 22.96232, - 22.36468, - 22.37718, - 23.43173, - 22.27805, - 23.78584, - 24.4831, - 22.90033, - 22.81812, - 23.65196, - 56.45613, - 22.51331, - 23.30863, - 22.29567, - 22.25118, - 22.94326, - 22.21761, - 22.17075, - 22.74069, - 22.27514, - 22.15032, - 22.50908, - 22.19934, - 22.55052, - 22.82322, - 22.28077, - 22.36117, - 22.44909, - 22.4424, - 22.22169, - 22.22557, - 22.22998, - 22.16221, - 22.38628, - 22.30353, - 22.23189, - 22.24877, - 22.3081, - 22.20495, - 22.2328, - 22.3289, - 22.26328, - 22.16943, - 22.22003, - 22.18421, - 22.13651, - 22.19386, - 22.33811, - 75.57841, - 22.83766, - 22.49433, - 22.90823, - 22.10073, - 22.17331, - 22.91005, - 22.0739, - 38.58989, - 23.2531, - 22.19735, - 22.1543, - 23.24873, - 22.21465, - 22.16186, - 23.30331, - 22.10781, - 22.24317, - 22.22847, - 22.15637, - 22.49435, - 22.30383, - 22.74896, - 22.72693, - 22.34111, - 22.2892, - 22.26019, - 22.18476, - 22.17116, - 22.27654, - 22.09598, - 22.25638, - 22.55965, - 22.13537, - 22.12425, - 22.12707, - 22.25503, - 22.3358, - 22.29519, - 22.13488, - 22.26938, - 22.19761, - 22.4934, - 22.24306, - 22.11744, - 22.28918, - 22.45942, - 22.64582, - 22.23536, - 22.71051, - 22.12984, - 22.15548, - 22.87831, - 22.04995, - 22.14385, - 23.33722, - 22.32115, - 22.13066, - 23.09654, - 22.25108, - 22.21047, - 23.01985, - 22.24864, - 22.14587, - 22.42055, - 22.24742, - 22.20138, - 22.66302, - 22.25027, - 22.321, - 22.18202, - 22.13944, - 22.08795, - 22.13778, - 22.72377, - 22.09366, - 22.25969, - 22.13122, - 22.12656, - 22.50283, - 22.11498, - 22.22658, - 22.11015, - 22.10616, - 22.53533, - 22.44845, - 22.11857, - 22.13022, - 22.2749, - 22.37151, - 22.15915, - 22.15242, - 22.27226, - 22.09876, - 22.40813, - 22.34806, - 22.06896, - 22.11633, - 22.45255, - 22.56616, - 22.19688, - 22.91029, - 22.23645, - 22.17638, - 22.39302, - 22.16422, - 22.13814, - 22.22944, - 22.15951, - 22.36833, - 22.11834, - 22.19846, - 22.15721, - 22.14138, - 22.24758, - 22.18874, - 22.29269, - 22.15148, - 22.5053, - 22.13033, - 22.1671, - 22.16595, - 22.51783, - 22.22311, - 22.13156, - 22.58138, - 22.57103, - 22.22161, - 23.10209, - 22.36046, - 22.2058, - 23.24473, - 22.1824, - 22.18779, - 23.21699, - 22.30294, - 22.32474, - 23.0402, - 22.13272, - 22.10887, - 22.34825, - 22.17337, - 22.08873, - 22.1289, - 22.69025, - 22.13729, - 22.16747, - 22.11914, - 22.22668, - 22.29111, - 22.32997, - 22.97981, - 22.32437, - 22.34959, - 22.32594, - 22.42304, - 22.26817, - 22.16518, - 22.24685, - 22.25327, - 22.2315, - 22.15087, - 22.75643, - 22.09856, - 22.23405, - 22.18762, - 22.08163, - 22.14593, - 22.31931, - 22.0885, - 22.1177, - 22.85615, - 22.06519, - 22.02122, - 23.03752, - 22.14087, - 22.17897, - 25.75191, - 22.93589, - 22.30614, - 23.35775, - 22.1795, - 22.19582, - 22.8428, - 22.08013, - 22.13661, - 22.37544, - 22.09806, - 22.17831, - 22.20607, - 22.09212, - 22.23389, - 22.07772, - 22.18924, - 22.0577, - 22.19938, - 22.09173, - 22.31145, - 22.36939, - 22.04991, - 22.18527, - 22.10738, - 22.18981, - 22.11068, - 22.07264, - 22.25061, - 22.12102, - 22.13982, - 22.15264, - 22.44484, - 22.07088, - 22.20173, - 22.14096, - 22.10879, - 22.71354, - 22.10233, - 96.94515, - 22.27471, - 22.32662, - 22.37228, - 22.32926, - 22.41883, - 22.3726, - 22.45572, - 22.3245, - 22.48049, - 22.32897, - 22.28501, - 22.26884, - 22.26314, - 22.35017, - 22.28479, - 22.25477, - 22.27602, - 22.41632, - 22.23596, - 22.30393, - 22.42352, - 22.2961, - 22.25686, - 22.29131, - 22.67199, - 22.26909, - 22.44259, - 22.23191, - 22.83599, - 22.25297, - 22.24627, - 22.22356, - 22.2168, - 22.34749, - 22.52471, - 22.71684, - 22.39006, - 22.88928, - 22.28347, - 22.25723, - 22.72161, - 22.28623, - 22.3949, - 22.99483, - 22.20708, - 22.2303, - 23.13258, - 22.29917, - 22.18401, - 23.22085, - 22.2282, - 22.2045, - 23.05483, - 22.23938, - 22.49996, - 23.0514, - 22.22065, - 22.25204, - 22.26876, - 22.25576, - 22.28014, - 22.73024, - 22.23362, - 22.21972, - 22.24227, - 22.33502, - 22.33718, - 22.22531, - 22.43032, - 22.18942, - 22.30852, - 22.20391, - 22.22912, - 22.5215, - 22.18131, - 22.70087, - 22.2394, - 22.24933, - 22.17265, - 22.22171, - 22.31515, - 22.21229, - 22.25623, - 22.53603, - 22.33367, - 22.28302, - 22.48313, - 22.32134, - 22.22671, - 22.57547, - 22.23061, - 22.52828, - 22.75087, - 22.20845, - 22.62729, - 23.00921, - 22.21634, - 22.29214, - 23.26728, - 22.21111, - 22.16872, - 23.18336, - 22.33585, - 22.19185, - 22.62865, - 22.20496, - 22.23197, - 23.11489, - 22.47825, - 22.53148, - 22.51105, - 22.22266, - 22.25352, - 22.14376, - 22.0836, - 22.17412, - 22.11997, - 22.19344, - 22.05511, - 22.41642, - 22.08454, - 22.05458, - 22.09809, - 22.04645, - 22.07869, - 22.46114, - 22.34058, - 22.19998, - 22.10085, - 22.14581, - 22.07247, - 22.06751, - 22.07777, - 22.02308, - 22.06044, - 22.08314, - 22.03106, - 22.04277, - 22.03313, - 22.04535, - 22.03092, - 22.06435, - 22.50131, - 22.04072, - 22.06748, - 22.81533, - 22.42007, - 23.23182, - 22.72823, - 22.48266, - 23.12468, - 22.27155, - 22.17339, - 22.59993, - 22.10201, - 22.43105, - 22.87855, - 22.1498, - 22.15655, - 22.61607, - 22.18304, - 22.16694, - 22.84842, - 22.18667, - 22.20254, - 22.13703, - 22.1425, - 22.61908, - 22.13857, - 22.28426, - 22.12005, - 22.24491, - 22.49138, - 22.13086, - 22.149, - 22.17474, - 22.31891, - 22.19635, - 22.27147, - 22.245, - 22.15662, - 22.15245, - 22.14748, - 22.31566, - 22.22819, - 22.0779, - 22.12848, - 22.07462, - 22.24551, - 22.30577, - 22.48118, - 22.14043, - 22.24871, - 22.18597, - 22.12547, - 22.45964, - 22.08512, - 22.19704, - 22.53797, - 22.15965, - 22.17251, - 22.9695, - 22.12164, - 22.0741, - 23.49174, - 22.13247, - 22.14514, - 23.55108, - 22.4328, - 22.1622, - 23.46092, - 22.09899, - 22.17376, - 22.93211, - 22.28347, - 22.24711, - 22.58224, - 22.12082, - 22.12964, - 22.19894, - 22.17617, - 22.31262, - 22.23008, - 22.22007, - 22.0912, - 22.12377, - 22.43474, - 22.12168, - 22.24844, - 22.11504, - 22.1172, - 22.1757, - 22.11972, - 22.25583, - 22.13457, - 22.483, - 22.20644, - 22.07216, - 22.2421, - 22.1586, - 22.14987, - 22.45692, - 22.07339, - 22.16737, - 22.97819, - 22.14034, - 22.24947, - 22.5672, - 22.13059, - 22.11391, - 23.27428, - 22.30972, - 22.14038, - 23.33258, - 22.14281, - 22.10126, - 23.25173, - 22.12643, - 22.11474, - 24.79832, - 36.35246, - 23.34236, - 22.45186, - 22.3505, - 24.35035, - 44.27159, - 24.09615, - 22.9735, - 22.12124, - 22.46562, - 23.01711, - 22.21056, - 22.13922, - 22.85934, - 22.16744, - 22.21346, - 23.04249, - 22.16884, - 22.16901, - 23.10603, - 22.17805, - 22.22349, - 22.6018, - 22.62306, - 22.13406, - 22.16456, - 22.21091, - 22.96232, - 22.16914, - 22.1363, - 22.90742, - 22.18831, - 22.17849, - 22.24841, - 22.12546, - 22.14582, - 22.17622, - 22.46786, - 22.13009, - 22.23982, - 22.50402, - 22.19722, - 22.17025, - 22.14417, - 22.46392, - 22.14668, - 22.16472, - 22.16134, - 22.15765, - 22.22708, - 22.27921, - 22.35847, - 22.30508, - 22.16849, - 22.11531, - 22.42502, - 22.2297, - 22.16406, - 22.99023, - 22.19672, - 22.12043, - 22.78069, - 22.14125, - 22.39803, - 22.86991, - 22.12276, - 22.0988, - 22.83719, - 22.18489, - 22.30305, - 23.35031, - 22.13494, - 22.18387, - 23.73687, - 22.18075, - 22.15899, - 23.37286, - 22.37316, - 22.30837, - 22.8721, - 22.16494, - 22.11476, - 22.16614, - 22.19855, - 22.444, - 22.15477, - 22.17651, - 22.27273, - 22.17506, - 22.20785, - 22.15306, - 22.1285, - 22.1735, - 22.12963, - 22.4039, - 22.16245, - 22.32606, - 22.15952, - 22.16066, - 22.07468, - 22.17447, - 22.16543, - 22.15152, - 22.39188, - 22.29308, - 22.44995, - 22.13458, - 22.11372, - 22.16205, - 22.11089, - 22.25243, - 22.23583, - 22.44207, - 22.20432, - 22.33517, - 22.16782, - 22.50783, - 22.2033, - 22.19896, - 22.22855, - 22.22321, - 22.25639, - 22.29443, - 22.37464, - 22.23139, - 22.22269, - 22.30537, - 22.44663, - 22.19866, - 22.16419, - 22.16455, - 22.18301, - 22.32632, - 22.31321, - 22.27201, - 22.19892, - 22.30745, - 22.34024, - 22.17171, - 22.39589, - 22.18993, - 22.46068, - 22.25658, - 24.16375, - 23.92321, - 22.30729, - 22.13935, - 23.24818, - 22.11272, - 22.10558, - 23.38726, - 22.22758, - 22.10861, - 23.46488, - 22.10426, - 22.20886, - 22.9758, - 22.32598, - 22.20423, - 30.33943, - 22.15539, - 22.1042, - 22.45416, - 22.11073, - 22.268, - 22.69603, - 22.0952, - 22.11685, - 22.07027, - 22.10584, - 22.15115, - 22.30869, - 22.11352, - 23.48902, - 22.14596, - 22.149, - 22.16693, - 22.11947, - 22.11702, - 22.13901, - 22.10284, - 22.06163, - 22.09249, - 22.75618, - 22.20965, - 22.08725, - 22.26911, - 22.1724, - 22.08987, - 22.11494, - 22.18181, - 22.11005, - 22.19859, - 22.25121, - 22.23181, - 22.16117, - 22.4684, - 22.37384, - 22.13467, - 22.68775, - 22.09272, - 22.5173, - 22.99537, - 22.1063, - 22.27278, - 23.52777, - 22.10268, - 22.24326, - 23.17265, - 22.24969, - 22.26817, - 22.77222, - 22.26385, - 22.27297, - 22.24592, - 22.08224, - 22.23805, - 22.12017, - 22.10214, - 22.47179, - 22.08924, - 22.10815, - 22.13634, - 22.27741, - 104.73205, - 22.60669, - 22.28951, - 22.27221, - 22.25025, - 22.25406, - 22.23855, - 22.22173, - 22.46257, - 22.23242, - 22.32552, - 22.68991, - 22.19059, - 22.31979, - 22.82085, - 22.2321, - 22.32698, - 23.67177, - 22.3209, - 22.2611, - 23.40699, - 22.24295, - 22.20141, - 23.44636, - 22.30075, - 22.34236, - 22.58054, - 22.26764, - 22.32465, - 22.37762, - 22.3666, - 22.19189, - 22.31503, - 22.20973, - 22.43682, - 22.42813, - 22.23632, - 22.34831, - 22.22889, - 22.2004, - 22.3289, - 26.72219, - 22.20693, - 22.24854, - 22.29241, - 23.95484, - 22.32646, - 24.94179, - 22.45592, - 22.32752, - 22.23483, - 22.27381, - 22.1432, - 22.36125, - 22.16894, - 22.19653, - 22.33387, - 22.23896, - 22.30297, - 22.19481, - 22.22981, - 22.16392, - 22.17665, - 22.64811, - 22.47699, - 22.30692, - 22.83654, - 22.20083, - 22.23779, - 23.31463, - 22.35145, - 22.37234, - 23.6638, - 22.19647, - 22.33292, - 23.40368, - 22.21014, - 22.26415, - 23.00915, - 22.19072, - 22.2352, - 23.30064, - 22.20064, - 22.17496, - 22.65209, - 22.27287, - 22.16402, - 22.45403, - 22.20753, - 22.47796, - 22.37768, - 22.29129, - 22.19474, - 22.35811, - 22.25567, - 22.52566, - 22.34757, - 22.21695, - 22.29704, - 22.18918, - 22.19948, - 22.16968, - 22.24769, - 22.35874, - 22.18427, - 22.18135, - 22.18106, - 22.36706, - 22.20303, - 22.70529, - 22.22367, - 22.34332, - 22.85867, - 99.16663, - 22.14855, - 22.30119, - 22.16039, - 22.15292, - 22.12516, - 22.12736, - 22.4271, - 22.08621, - 22.17026, - 22.0794, - 22.20969, - 22.07803, - 22.39676, - 22.27253, - 22.08304, - 22.14433, - 22.26805, - 22.17376, - 22.19201, - 22.80214, - 22.13867, - 22.13145, - 22.4191, - 22.39882, - 22.45801, - 22.73377, - 22.09249, - 22.09398, - 22.94902, - 22.07003, - 22.14707, - 23.43768, - 22.07171, - 22.23931, - 22.98679, - 22.05136, - 22.17919, - 22.69357, - 22.17714, - 22.06069, - 22.31436, - 22.85199, - 22.02283, - 22.05677, - 22.05839, - 22.21271, - 22.08224, - 22.02952, - 22.14142, - 22.04819, - 22.08117, - 22.0568, - 22.14012, - 22.04499, - 22.02592, - 22.04916, - 22.0291, - 22.26844, - 22.00714, - 22.5877, - 22.08651, - 22.07325, - 22.16063, - 22.53217, - 22.33549, - 22.34411, - 22.34349, - 22.13511, - 22.7202, - 22.03777, - 22.06087, - 22.8264, - 22.09564, - 22.105, - 22.78717, - 22.07502, - 22.04396, - 23.41358, - 22.17254, - 22.31907, - 23.13572, - 22.06482, - 22.05608, - 22.54637, - 22.05076, - 22.32453, - 22.32633, - 22.04345, - 22.03181, - 22.68133, - 22.23248, - 22.04517, - 22.44096, - 22.02191, - 22.05021, - 22.9038, - 22.13408, - 22.22483, - 22.1612, - 22.01901, - 22.06094, - 22.04995, - 22.00261, - 22.03177, - 22.33237, - 22.06599, - 22.18676, - 22.27066, - 22.06088, - 22.10319, - 22.3554, - 22.43029, - 22.08364, - 101.82247, - 22.26788, - 22.41176, - 22.31658, - 22.22171, - 22.26953, - 22.38897, - 22.35295, - 22.26078, - 22.38658, - 22.22511, - 22.23323, - 22.19975, - 22.21646, - 22.20002, - 22.21175, - 22.22125, - 22.23533, - 22.22544, - 22.21968, - 22.38773, - 22.25294, - 22.29129, - 22.19592, - 22.56338, - 22.1982, - 22.50022, - 22.22738, - 22.17314, - 22.58518, - 22.20907, - 22.56643, - 22.95884, - 22.17963, - 22.17697, - 22.86739, - 22.26982, - 22.19184, - 23.14527, - 22.61316, - 22.19651, - 23.51628, - 22.3513, - 22.21668, - 23.052, - 22.21562, - 22.69276, - 22.84265, - 22.26288, - 22.36787, - 22.3193, - 22.24286, - 22.27066, - 22.45911, - 22.17954, - 22.20463, - 22.20747, - 22.43776, - 22.22131, - 22.20975, - 22.31592, - 22.1724, - 22.27687, - 22.1971, - 22.18341, - 22.44957, - 22.30224, - 22.41065, - 22.26056, - 22.22036, - 36.63224, - 22.20904, - 22.62301, - 22.2281, - 22.24924, - 22.23617, - 22.26707, - 22.18614, - 22.38173, - 22.68426, - 22.2443, - 22.467, - 22.23016, - 22.2359, - 22.74637, - 22.36831, - 22.48382, - 23.08908, - 22.20741, - 22.19456, - 23.7286, - 22.42771, - 22.27004, - 23.24859, - 22.28664, - 22.23396, - 23.71086, - 22.33778, - 22.20401, - 22.92546, - 22.28126, - 22.27238, - 22.53488, - 22.45289, - 22.26193, - 22.18085, - 22.23294, - 22.20978, - 22.24332, - 22.23108, - 22.27663, - 22.22038, - 22.66624, - 27.24293, - 52.30522, - 23.02974, - 22.1045, - 22.12346, - 22.54548, - 22.10596, - 22.08834, - 22.92914, - 22.13263, - 22.07696, - 23.18525, - 22.0615, - 22.07617, - 23.05637, - 22.54091, - 22.06504, - 23.16941, - 22.22867, - 22.09883, - 23.03754, - 22.07617, - 22.29193, - 22.07632, - 22.06766, - 22.09401, - 22.08058, - 22.5305, - 22.23272, - 22.20265, - 22.05807, - 22.10015, - 22.09801, - 22.04708, - 22.12919, - 22.03309, - 22.19255, - 22.06617, - 22.15741, - 22.14409, - 22.10266, - 22.14514, - 22.06529, - 22.03475, - 22.36857, - 22.51011, - 22.07271, - 22.43132, - 22.13092, - 22.07945, - 22.88389, - 22.02914, - 22.0468, - 23.04355, - 22.06601, - 22.32512, - 23.21267, - 22.05052, - 22.115, - 22.91224, - 22.02027, - 22.43867, - 23.37655, - 23.97474, - 71.25984, - 41.91306, - 22.15816, - 22.07058, - 22.80718, - 22.19788, - 22.10942, - 22.20605, - 22.14482, - 22.13974, - 22.17241, - 22.13096, - 22.08317, - 22.04396, - 22.08633, - 22.12318, - 22.08804, - 22.3781, - 22.09858, - 22.08912, - 22.06697, - 22.05695, - 22.06694, - 22.20087, - 22.27139, - 22.01606, - 22.16132, - 22.06047, - 22.09811, - 22.24228, - 22.24337, - 22.22391, - 22.36936, - 22.18073, - 22.05798, - 22.66177, - 22.03016, - 22.05562, - 22.4316, - 22.13376, - 22.04187, - 22.69404, - 22.06206, - 22.03522, - 23.21941, - 22.19, - 22.18488, - 23.02859, - 22.24261, - 22.46124, - 22.22919, - 22.21079, - 22.23019, - 22.1716, - 22.417, - 22.23801, - 22.19394, - 22.18927, - 22.16575, - 22.41394, - 22.33403, - 22.41359, - 22.25564, - 22.6107, - 22.2107, - 22.25703, - 22.24578, - 22.21567, - 22.43124, - 22.16546, - 22.26442, - 22.15163, - 22.23296, - 22.16571, - 22.15903, - 22.33734, - 22.22511, - 22.15729, - 22.28251, - 22.22234, - 22.15715, - 22.19457, - 22.41853, - 22.1707, - 22.16528, - 22.90154, - 22.104, - 22.15706, - 22.87638, - 22.25481, - 22.13235, - 22.8171, - 22.17582, - 22.16652, - 22.94389, - 22.42742, - 22.29331, - 23.01847, - 22.16805, - 22.13573, - 23.13758, - 22.25339, - 22.34294, - 22.89067, - 22.16572, - 22.16828, - 22.28816, - 22.49986, - 22.23072, - 22.38644, - 22.12899, - 22.11739, - 22.28425, - 22.16946, - 22.1681, - 22.1273, - 22.12382, - 22.10526, - 22.1646, - 22.16154, - 22.11507, - 22.57757, - 22.10374, - 22.12166, - 22.15047, - 22.50162, - 22.14833, - 22.17366, - 22.25464, - 22.26551, - 23.50498, - 22.73041, - 22.40403, - 23.29862, - 22.22557, - 22.13617, - 22.76498, - 22.20274, - 22.56885, - 22.75225, - 22.1825, - 22.15018, - 22.67589, - 22.35103, - 22.22574, - 22.83882, - 22.17659, - 22.17158, - 22.15542, - 22.18397, - 22.93985, - 22.15892, - 22.40788, - 22.4053, - 22.14476, - 22.64534, - 22.28369, - 22.21493, - 22.12785, - 22.11922, - 22.18312, - 22.10741, - 22.1438, - 22.14304, - 22.09958, - 22.19423, - 22.28677, - 22.14581, - 22.16098, - 22.15689, - 22.16352, - 22.23832, - 22.14916, - 22.55257, - 22.13931, - 22.12494, - 22.18276, - 22.14001, - 22.44161, - 22.17003, - 22.10938, - 22.42749, - 22.17772, - 22.21296, - 22.68479, - 22.14385, - 22.11939, - 23.23298, - 22.15392, - 22.15043, - 23.08218, - 22.55487, - 22.17844, - 23.12339, - 22.10373, - 22.15551, - 23.02888, - 22.19445, - 22.14878, - 22.94901, - 22.14322, - 22.1313, - 22.56967, - 22.11371, - 22.34008, - 22.37412, - 22.16953, - 22.23321, - 22.12283, - 22.58849, - 22.18116, - 22.40851, - 22.14007, - 22.40728, - 22.1991, - 22.18819, - 22.19996, - 22.17234, - 22.31612, - 22.17664, - 22.14698, - 22.1763, - 22.1763, - 22.24207, - 22.15693, - 22.16315, - 22.16435, - 22.81799, - 22.29942, - 22.20296, - 22.54365, - 25.52235, - 22.15784, - 22.4192, - 22.26017, - 22.16298, - 22.47279, - 22.36483, - 22.11842, - 22.69941, - 22.11577, - 22.16863, - 22.01176, - 22.22205, - 21.9872, - 22.00834, - 22.02707, - 22.04397, - 22.1899, - 22.01313, - 21.9813, - 21.95711, - 22.12524, - 21.96139, - 22.03709, - 22.11153, - 21.94281, - 22.37319, - 21.99951, - 22.00521, - 22.02443, - 21.97954, - 22.16246, - 21.99, - 22.10315, - 21.95831, - 21.94283, - 22.05901, - 22.18657, - 21.98883, - 21.98006, - 22.00507, - 22.11073, - 22.20488, - 21.94916, - 22.41868, - 22.71345, - 21.96047, - 21.96431, - 23.44101, - 21.92707, - 21.94534, - 23.01024, - 21.97376, - 21.94591, - 22.32252, - 21.95587, - 21.98852, - 22.4774, - 22.04141, - 22.07168, - 22.3629, - 22.02193, - 21.94847, - 22.52133, - 21.99339, - 21.97651, - 22.85852, - 21.94556, - 22.20845, - 22.20076, - 22.00715, - 21.99645, - 22.15719, - 21.96518, - 21.96064, - 22.10975, - 21.95919, - 22.27851, - 22.11466, - 21.95557, - 21.96246, - 22.26892, - 21.94298, - 22.12448, - 22.58432, - 22.13183, - 22.04597, - 21.98188, - 22.27192, - 21.94932, - 21.94599, - 22.71998, - 22.15013, - 21.95332, - 22.53628, - 22.06499, - 22.03487, - 22.92728, - 21.9577, - 21.93391, - 22.37597, - 21.95252, - 22.33879, - 22.43639, - 21.90894, - 21.91037, - 22.35445, - 21.95373, - 21.98795, - 22.50773, - 22.1386, - 21.97501, - 22.23404, - 22.345, - 21.96362, - 22.03652, - 21.96132, - 22.1345, - 22.05909, - 21.9686, - 22.36273, - 22.37979, - 21.9539, - 21.94893, - 22.19798, - 22.11944, - 22.15162, - 22.26939, - 22.14744, - 22.14287, - 22.63964, - 22.17126, - 22.15165, - 23.0408, - 22.13841, - 22.13303, - 23.27403, - 22.12087, - 22.10168, - 23.23486, - 22.15747, - 22.14743, - 23.27978, - 22.16347, - 22.08691, - 23.23901, - 22.16133, - 22.14168, - 23.17455, - 22.06886, - 22.13114, - 23.16213, - 22.30783, - 22.11336, - 23.26329, - 22.06549, - 22.07211, - 22.16437, - 22.08932, - 22.42285, - 22.0994, - 22.09114, - 22.15689, - 22.47469, - 22.0947, - 28.55794, - 69.96193, - 22.13434, - 62.76445, - 22.35301, - 22.20417, - 22.10021, - 22.09851, - 22.09592, - 22.14601, - 22.30364, - 22.07823, - 22.50219, - 22.21628, - 22.06474, - 22.10215, - 22.22407, - 22.29054, - 22.1174, - 26.53686, - 31.20536, - 22.06892, - 23.04956, - 24.16646, - 22.31828, - 22.80315, - 22.10885, - 22.17754, - 23.01577, - 22.13133, - 24.1609, - 30.29538, - 22.11376, - 22.09667, - 23.02923, - 22.09142, - 22.07874, - 22.80915, - 22.24058, - 22.13542, - 22.65468, - 22.38559, - 22.11647, - 22.22066, - 22.29338, - 22.11706, - 22.3686, - 22.09114, - 22.39197, - 22.12928, - 22.37087, - 22.09104, - 22.09063, - 22.11654, - 22.13602, - 22.1319, - 22.24958, - 22.30654, - 22.17007, - 22.54044, - 22.22475, - 22.14091, - 22.39241, - 22.0842, - 22.3842, - 22.18687, - 22.39611, - 22.1278, - 22.3284, - 22.1154, - 22.09646, - 22.81691, - 22.18181, - 23.37869, - 22.1495, - 22.14219, - 22.97886, - 22.17331, - 22.12148, - 22.64005, - 22.27992, - 22.28979, - 22.32475, - 22.12771, - 22.09844, - 22.40401, - 22.1298, - 22.19422, - 22.12317, - 22.20042, - 22.11794, - 22.12467, - 22.17046, - 22.09319, - 22.25505, - 22.10802, - 22.2528, - 22.12938, - 22.14415, - 22.29464, - 22.11598, - 22.12429, - 22.14322, - 22.22054, - 22.15059, - 22.1426, - 22.08842, - 22.13187, - 22.09539, - 22.12463, - 22.99156, - 22.2206, - 22.17205, - 22.54719, - 22.10391, - 22.23367, - 22.76334, - 22.07503, - 22.0559, - 23.17775, - 22.2461, - 22.25501, - 23.40468, - 22.08451, - 22.1167, - 22.96407, - 22.29052, - 22.23662, - 22.80043, - 22.07867, - 22.14055, - 22.80778, - 22.15202, - 22.13095, - 22.30886, - 22.09829, - 22.1017, - 22.30188, - 22.13423, - 22.2188, - 22.11035, - 22.11863, - 22.13763, - 22.26758, - 22.145, - 22.14197, - 22.28991, - 22.09615, - 22.0942, - 22.14376, - 22.08656, - 22.0449, - 22.09098, - 22.16193, - 22.11937, - 22.11731, - 22.09497, - 22.40587, - 22.10351, - 22.24368, - 22.29861, - 22.0891, - 22.45905, - 22.10118, - 22.28831, - 23.44521, - 22.18075, - 22.15478, - 23.5301, - 22.10188, - 22.07687, - 23.14587, - 22.1344, - 22.10284, - 22.46515, - 22.25157, - 22.07917, - 22.74706, - 22.10004, - 22.15853, - 22.56626, - 22.1016, - 22.30594, - 22.71221, - 22.05101, - 22.1266, - 22.18213, - 22.27545, - 23.55767, - 22.50461, - 22.37307, - 23.35459, - 22.13143, - 22.80335, - 22.11602, - 22.36897, - 22.56225, - 22.17821, - 22.14066, - 22.63053, - 22.25814, - 22.34772, - 22.18425, - 22.15824, - 22.18433, - 22.21728, - 22.3493, - 22.14707, - 22.14056, - 22.13981, - 22.26034, - 22.15999, - 22.11378, - 22.3432, - 22.12814, - 22.2546, - 22.14994, - 22.42207, - 22.17741, - 22.13358, - 22.18267, - 22.33383, - 22.15626, - 22.23825, - 22.95492, - 22.2781, - 22.13766, - 23.11202, - 22.14552, - 22.13851, - 23.22779, - 22.12749, - 22.1852, - 23.11909, - 22.14341, - 22.44931, - 23.18979, - 22.3004, - 22.15336, - 22.93739, - 22.10766, - 22.11832, - 22.32259, - 22.09604, - 22.15343, - 22.14026, - 22.28667, - 22.17037, - 22.10376, - 22.25451, - 22.10846, - 22.14132, - 22.14843, - 22.56039, - 22.09906, - 22.1378, - 22.1043, - 22.25665, - 22.08482, - 22.1022, - 22.1219, - 22.12338, - 22.11497, - 22.09806, - 22.37114, - 22.1223, - 22.11381, - 22.7123, - 22.13471, - 22.11115, - 22.80238, - 22.45191, - 22.28952, - 23.10402, - 22.13401, - 22.12466, - 23.15631, - 22.1558, - 22.11168, - 23.17534, - 22.12859, - 22.11271, - 23.08121, - 22.13197, - 22.1515, - 22.65207, - 22.30597, - 22.10917, - 22.24205, - 22.60878, - 22.09097, - 22.14094, - 22.14458, - 22.17201, - 22.13523, - 22.12548, - 22.16414, - 22.12026, - 22.12175, - 22.19186, - 22.29485, - 22.33278, - 23.3078, - 22.73304, - 22.44956, - 22.97514, - 22.28443, - 22.26082, - 22.75869, - 22.27789, - 22.48981, - 22.90584, - 22.24257, - 22.95042, - 22.29124, - 22.47709, - 22.7493, - 22.24822, - 22.23141, - 22.3471, - 22.34644, - 22.23412, - 22.33865, - 22.24652, - 22.44773, - 22.21963, - 22.29181, - 22.3559, - 22.21869, - 22.38225, - 22.19857, - 22.1889, - 22.18033, - 22.18476, - 22.29452, - 22.17247, - 22.18145, - 22.20088, - 22.61408, - 22.27509, - 22.20253, - 22.44377, - 22.2188, - 22.25543, - 22.65273, - 22.3446, - 22.14042, - 22.85975, - 22.35525, - 22.22577, - 22.76614, - 22.21959, - 22.20517, - 22.91721, - 22.19556, - 22.33519, - 23.31486, - 22.2228, - 22.25852, - 23.22495, - 22.23761, - 22.29332, - 22.99736, - 22.36848, - 22.2271, - 22.52477, - 22.28017, - 22.17957, - 22.41324, - 22.27419, - 22.26945, - 22.53473, - 22.28682, - 22.24526, - 22.68783, - 22.24592, - 22.32056, - 22.3266, - 22.24701, - 22.33195, - 22.34563, - 22.60168, - 22.287, - 22.36203, - 22.2186, - 22.45632, - 22.27663, - 22.41838, - 22.43779, - 22.29759, - 22.60786, - 22.23216, - 22.35389, - 22.54415, - 22.30203, - 22.31045, - 22.56062, - 22.25634, - 22.23882, - 22.89479, - 22.26127, - 22.17792, - 23.28277, - 22.21611, - 22.30095, - 22.99949, - 22.1849, - 22.22575, - 22.60047, - 22.2124, - 22.36786, - 22.2244, - 22.21203, - 98.3119, - 22.25833, - 22.33984, - 22.30907, - 22.23459, - 22.23605, - 22.21159, - 22.50951, - 22.31761, - 22.43768, - 22.16603, - 22.15476, - 22.18377, - 22.18599, - 22.34574, - 22.20304, - 22.18814, - 22.21121, - 22.36342, - 22.26305, - 22.32367, - 23.75264, - 22.46272, - 22.38041, - 23.13616, - 22.27755, - 22.23242, - 22.94668, - 22.16014, - 22.53244, - 22.92565, - 22.20641, - 22.23453, - 22.8928, - 22.27049, - 22.20821, - 22.79067, - 22.16702, - 22.62054, - 22.15549, - 22.18171, - 22.64815, - 22.27023, - 22.2545, - 22.1845, - 22.17325, - 22.55884, - 22.17352, - 22.24216, - 22.13593, - 22.14586, - 22.20862, - 22.17643, - 22.12239, - 22.16304, - 22.14181, - 22.09371, - 22.41703, - 22.29277, - 22.14284, - 22.10438, - 22.16169, - 22.25554, - 22.29576, - 22.5565, - 22.13078, - 22.41166, - 22.26812, - 22.25377, - 22.76081, - 22.12841, - 22.3889, - 23.38486, - 22.30836, - 22.30256, - 23.05643, - 22.28499, - 22.20536, - 23.07939, - 22.23701, - 22.16145, - 23.01979, - 22.56773, - 22.40174, - 22.60494, - 22.30154, - 22.15902, - 22.51167, - 22.34958, - 22.19127, - 22.28122, - 22.16833, - 22.18465, - 22.15229, - 22.1467, - 22.28804, - 22.15804, - 22.21382, - 22.13951, - 22.16174, - 22.44447, - 22.15885, - 22.30613, - 22.15337, - 22.30589, - 22.1999, - 22.1745, - 22.27547, - 22.33437, - 22.28582, - 22.1519, - 22.3119, - 22.8598, - 22.16582, - 22.23767, - 23.01784, - 22.33382, - 22.15389, - 23.28004, - 22.14173, - 22.15368, - 23.09755, - 22.22303, - 22.15798, - 22.78196, - 22.2945, - 22.1587, - 22.73261, - 22.17113, - 22.30944, - 22.71167, - 22.10199, - 22.14638, - 22.30165, - 22.19011, - 22.32598, - 22.15787, - 22.27633, - 22.18818, - 22.29677, - 22.19943, - 22.15767, - 22.19997, - 22.48665, - 22.14347, - 22.17856, - 22.3226, - 22.18066, - 22.14245, - 22.2881, - 22.31239, - 22.13641, - 22.14189, - 22.1446, - 22.16268, - 22.39175, - 22.14793, - 22.19722, - 23.45894, - 22.13176, - 22.1367, - 23.44023, - 22.1299, - 22.4474, - 24.83104, - 22.16282, - 22.17059, - 23.12659, - 22.54311, - 22.14508, - 22.87791, - 22.29035, - 22.10859, - 22.60427, - 22.32424, - 22.14501, - 22.2353, - 22.11713, - 23.62788, - 76.19838, - 35.15617, - 53.52323, - 22.13418, - 22.11021, - 22.1342, - 22.27757, - 22.11459, - 22.13136, - 22.11779, - 22.38937, - 22.21383, - 22.12602, - 22.31502, - 22.15772, - 22.15176, - 22.12988, - 22.18483, - 22.23671, - 22.12091, - 22.46193, - 22.39495, - 22.09328, - 22.12302, - 22.3467, - 22.52687, - 22.13686, - 22.26756, - 22.67041, - 22.11642, - 22.11507, - 23.23445, - 22.19371, - 22.11082, - 23.07766, - 22.1318, - 22.13628, - 22.75204, - 22.44869, - 22.2348, - 23.24037, - 22.12242, - 22.099, - 23.1955, - 22.08957, - 22.09665, - 22.25121, - 22.12469, - 22.16928, - 22.36078, - 22.11298, - 22.25122, - 22.13628, - 22.17261, - 22.11671, - 22.11718, - 22.58086, - 22.29782, - 22.30813, - 22.10063, - 22.30149, - 22.1296, - 22.11914, - 22.21392, - 22.19986, - 23.48234, - 22.49181, - 22.45885, - 23.25093, - 22.21008, - 22.14938, - 23.1092, - 22.17394, - 22.65149, - 22.96326, - 22.1142, - 22.11965, - 22.84835, - 22.18065, - 22.29337, - 23.03745, - 22.14559, - 22.18902, - 23.22768, - 22.22001, - 22.13229, - 22.6899, - 22.64023, - 22.16417, - 22.70918, - 22.22631, - 22.10449, - 22.76635, - 22.11324, - 22.48252, - 22.20778, - 22.09545, - 22.21494, - 22.37453, - 22.1122, - 23.61911, - 22.24059, - 22.12228, - 22.88989, - 22.29422, - 22.21959, - 22.4712, - 22.12836, - 22.20519, - 22.22461, - 22.33928, - 22.55437, - 22.13461, - 22.11088, - 22.13063, - 22.24762, - 22.14007, - 22.1073, - 22.15536, - 22.15056, - 22.2833, - 22.17607, - 22.45576, - 22.12186, - 22.11487, - 22.28336, - 22.12592, - 22.39547, - 22.42283, - 22.65163, - 22.24287, - 22.62111, - 22.30455, - 22.13848, - 22.693, - 22.17488, - 22.27557, - 23.01438, - 22.11642, - 22.17809, - 22.93026, - 22.23291, - 22.41226, - 22.91538, - 22.13111, - 22.09849, - 23.16933, - 22.40582, - 22.13057, - 23.20319, - 22.09818, - 22.1228, - 26.65474, - 22.51962, - 22.09971, - 22.97486, - 22.13328, - 22.25854, - 22.71712, - 22.11959, - 22.11576, - 22.2498, - 22.48635, - 22.14451, - 22.28473, - 22.5087, - 22.11036, - 22.39715, - 22.14277, - 22.47507, - 22.10215, - 22.29449, - 22.41286, - 22.12502, - 22.64326, - 22.24268, - 22.69601, - 22.64694, - 22.12512, - 22.06712, - 22.27097, - 22.04664, - 22.02911, - 22.08369, - 22.06847, - 22.2674, - 22.05704, - 22.03395, - 22.02212, - 22.01405, - 22.10292, - 22.04765, - 22.1624, - 22.01057, - 22.42028, - 22.04494, - 22.04976, - 22.1887, - 23.97383, - 28.59691, - 27.46884, - 22.09613, - 22.00944, - 23.47335, - 22.03805, - 22.02014, - 22.19552, - 22.05961, - 22.02592, - 22.0102, - 22.23346, - 22.04236, - 22.02031, - 22.0292, - 22.01072, - 22.01593, - 22.00968, - 22.36829, - 22.02921, - 22.15732, - 22.00256, - 22.1639, - 22.54104, - 22.27217, - 22.02895, - 23.10168, - 22.26862, - 22.01213, - 23.25629, - 22.07204, - 22.27703, - 22.89068, - 22.05503, - 22.04289, - 22.69295, - 22.12263, - 21.98553, - 22.57166, - 22.01637, - 22.021, - 22.22902, - 22.39313, - 22.13025, - 21.99196, - 22.01081, - 22.01796, - 22.03293, - 22.07697, - 22.18752, - 21.99396, - 22.33779, - 22.02495, - 22.05429, - 21.98904, - 22.11115, - 22.04974, - 22.02577, - 22.07866, - 21.98906, - 22.39023, - 21.96216, - 22.2517, - 22.23386, - 22.00722, - 22.06658, - 22.58047, - 22.26459, - 22.00987, - 23.29017, - 22.0715, - 22.02243, - 23.29697, - 21.98552, - 22.00917, - 23.33665, - 22.15608, - 22.03961, - 22.96184, - 22.03391, - 22.16316, - 22.40831, - 22.01907, - 22.13336, - 22.22098, - 22.01658, - 21.99148, - 22.07202, - 22.05245, - 22.06187, - 22.02708, - 22.0033, - 22.03901, - 22.02391, - 22.02047, - 22.23359, - 22.13673, - 22.15379, - 23.38139, - 22.53242, - 22.40147, - 22.08361, - 22.35783, - 22.14361, - 22.08543, - 22.14679, - 22.06928, - 22.13064, - 22.09093, - 22.40817, - 22.0675, - 22.18981, - 22.06542, - 22.02903, - 22.07273, - 22.06194, - 22.22455, - 22.11695, - 22.07998, - 22.09878, - 22.24274, - 22.06553, - 22.18964, - 22.16847, - 22.08908, - 22.07437, - 22.07371, - 22.33582, - 22.13176, - 22.09109, - 22.08477, - 22.58906, - 22.18727, - 22.26394, - 22.89701, - 22.30961, - 22.08732, - 23.13605, - 22.25897, - 22.2024, - 23.02925, - 22.08079, - 22.32117, - 23.33656, - 22.0643, - 22.25512, - 22.97935, - 22.11083, - 22.06071, - 22.99703, - 22.0818, - 22.07658, - 23.13362, - 22.08196, - 22.06038, - 22.32988, - 22.40493, - 22.06483, - 22.08828, - 22.28645, - 22.05807, - 22.05097, - 22.0599, - 22.26943, - 22.05993, - 22.08459, - 22.22258, - 22.05577, - 22.06454, - 22.09444, - 22.07581, - 22.05407, - 22.05447, - 22.06135, - 22.19512, - 22.07505, - 22.08514, - 22.09018, - 22.03577, - 22.13656, - 22.06639, - 22.23185, - 22.22575, - 22.7029, - 22.08141, - 22.06996, - 22.79906, - 22.03634, - 22.08697, - 23.15145, - 22.08298, - 22.08974, - 22.98047, - 22.02896, - 22.0517, - 23.07168, - 22.23171, - 22.05078, - 22.92055, - 22.23906, - 22.04827, - 22.6036, - 22.03553, - 22.01876, - 22.14338, - 22.03045, - 22.04494, - 22.00404, - 22.06206, - 22.05579, - 22.0682, - 22.15569, - 22.25482, - 22.1522, - 22.20773, - 22.66793, - 22.10077, - 22.19864, - 22.92173, - 22.34613, - 22.16071, - 22.8627, - 22.15788, - 22.20913, - 22.80749, - 22.28639, - 22.22906, - 22.91712, - 22.21992, - 22.10009, - 22.63514, - 22.28119, - 22.30845, - 22.30034, - 22.33763, - 22.49121, - 22.22773, - 22.25148, - 23.10453, - 22.22005, - 22.21039, - 23.45073, - 22.23287, - 22.24615, - 23.33691, - 22.18674, - 22.19884, - 23.29456, - 22.30191, - 22.1693, - 22.5558, - 22.17962, - 22.34188, - 22.24404, - 22.2818, - 22.21408, - 22.17356, - 22.29799, - 22.20556, - 22.42003, - 22.20857, - 22.16794, - 22.17568, - 22.17021, - 22.19748, - 22.1858, - 22.3408, - 22.14927, - 22.64574, - 22.20172, - 22.19735, - 22.34011, - 22.151, - 22.30382, - 22.67393, - 22.16991, - 22.17891, - 22.78298, - 22.2694, - 22.1732, - 23.53723, - 22.1954, - 22.14768, - 23.44664, - 22.15861, - 22.3066, - 23.4678, - 22.28481, - 22.23692, - 22.38347, - 22.30437, - 22.17762, - 85.69357, - 26.05182, - 22.13464, - 22.68467, - 44.12211, - 23.60427, - 22.31894, - 22.41063, - 22.25844, - 22.31148, - 22.1811, - 22.20852, - 22.67125, - 22.15725, - 22.43416, - 22.18386, - 22.13535, - 22.20669, - 22.14434, - 22.20536, - 22.24916, - 22.2579, - 22.16569, - 22.14116, - 22.1251, - 22.21198, - 22.35962, - 22.20946, - 22.44267, - 22.14181, - 22.51004, - 22.35907, - 22.21569, - 22.28595, - 22.57448, - 22.22769, - 22.17286, - 23.22999, - 22.30339, - 22.16747, - 23.06975, - 22.15824, - 22.36233, - 23.52405, - 22.16982, - 22.29248, - 23.31461, - 22.45673, - 22.70834, - 22.21004, - 22.19858, - 23.55759, - 24.40048, - 25.45925, - 24.54799, - 22.18995, - 22.13705, - 22.72186, - 22.18616, - 22.4262, - 22.83306, - 22.17848, - 22.16509, - 22.56974, - 22.13345, - 22.17874, - 22.79739, - 22.12083, - 22.17191, - 22.72615, - 22.13304, - 22.14131, - 22.65316, - 22.60612, - 22.1221, - 22.64332, - 22.24281, - 22.11845, - 22.14797, - 22.11282, - 22.95388, - 22.18239, - 22.12427, - 22.90953, - 22.30593, - 22.1269, - 22.52787, - 22.52999, - 22.12977, - 22.50165, - 22.48586, - 22.14554, - 22.23868, - 22.15025, - 22.39545, - 22.25827, - 22.18327, - 22.16616, - 22.1267, - 22.2322, - 22.14647, - 22.64237, - 22.13994, - 22.13984, - 22.17054, - 22.16124, - 22.33446, - 22.16855, - 22.45479, - 22.15133, - 22.14805, - 22.28934, - 22.30565, - 22.1553, - 22.31481, - 22.1494, - 22.12694, - 22.35941, - 22.13386, - 22.29727, - 22.37743, - 22.15605, - 22.13509, - 22.83535, - 22.1416, - 22.13944, - 23.30813, - 22.2882, - 22.15638, - 23.09331, - 22.27967, - 22.10267, - 22.62005, - 22.22771, - 22.4854, - 22.56649, - 22.16047, - 22.26528, - 22.63041, - 22.21485, - 22.13182, - 22.50123, - 22.14634, - 22.25712, - 22.30221, - 22.27126, - 22.26131, - 22.38047, - 22.35531, - 22.17483, - 22.28327, - 22.15102, - 22.14006, - 22.34709, - 22.11255, - 22.57836, - 22.28582, - 22.3182, - 22.15333, - 22.25862, - 22.41736, - 22.14971, - 22.12798, - 22.05725, - 22.1189, - 22.08777, - 21.9871, - 22.02674, - 21.9652, - 22.3894, - 21.9629, - 21.96916, - 22.07084, - 21.98032, - 22.08787, - 21.95312, - 22.24151, - 21.96968, - 22.26092, - 22.0704, - 21.98896, - 21.97335, - 21.97108, - 22.30925, - 21.93133, - 22.01282, - 21.94382, - 21.94129, - 21.97435, - 21.96218, - 22.30664, - 21.97312, - 21.90781, - 21.9544, - 22.10328, - 22.10118, - 21.92638, - 22.10578, - 22.08087, - 21.95187, - 22.024, - 22.04781, - 21.93244, - 22.45586, - 21.94182, - 22.19126, - 22.44053, - 22.59145, - 21.94529, - 22.7998, - 22.02333, - 21.94346, - 23.28782, - 21.9172, - 21.98843, - 22.69191, - 21.9297, - 22.17068, - 22.45259, - 22.02197, - 21.94125, - 22.01171, - 21.92182, - 21.97643, - 22.22745, - 22.52596, - 21.93607, - 21.93634, - 22.18567, - 21.92693, - 21.87371, - 22.04253, - 22.06289, - 21.97397, - 22.04379, - 21.94728, - 21.96546, - 22.02505, - 22.21399, - 22.03585, - 22.14121, - 21.93058, - 21.91269, - 22.60924, - 21.94764, - 22.08557, - 22.05277, - 21.94981, - 21.92587, - 22.47698, - 22.05984, - 21.95058, - 22.64668, - 21.93809, - 22.23211, - 23.2016, - 21.9254, - 21.99674, - 22.713, - 21.92072, - 21.92595, - 23.10071, - 21.92868, - 21.92577, - 22.31107, - 21.91951, - 21.89878, - 22.04094, - 22.01412, - 21.91925, - 36.99743, - 22.07171, - 22.05684, - 21.99286, - 21.91086, - 21.95043, - 37.7659, - 23.23805, - 22.11635, - 22.06267, - 22.26073, - 22.04733, - 22.08739, - 22.04904, - 22.29041, - 22.02994, - 22.00787, - 22.07276, - 22.14648, - 22.03278, - 22.0057, - 22.01582, - 22.03705, - 22.03766, - 22.01802, - 22.0059, - 21.99902, - 22.06452, - 22.26234, - 22.14829, - 22.01105, - 21.96761, - 22.20418, - 22.02033, - 22.12236, - 22.11036, - 22.00084, - 22.2584, - 21.9891, - 22.12932, - 23.25622, - 21.985, - 22.0856, - 22.8834, - 22.01259, - 21.99641, - 22.95084, - 22.04333, - 22.01655, - 23.01243, - 22.19859, - 22.08599, - 22.5855, - 21.96317, - 22.0839, - 22.20175, - 22.14398, - 22.15551, - 21.97279, - 22.025, - 21.98846, - 21.93747, - 21.94308, - 21.98601, - 22.00131, - 22.10379, - 21.96197, - 21.99262, - 22.25563, - 21.99555, - 21.97565, - 22.0237, - 22.00526, - 22.09017, - 21.97322, - 22.28951, - 21.98999, - 21.96734, - 22.09062, - 21.99726, - 22.228, - 21.99841, - 22.17922, - 22.83472, - 22.00885, - 22.03252, - 23.54512, - 22.05196, - 21.99299, - 23.18927, - 21.95728, - 21.99422, - 23.08361, - 22.123, - 22.03043, - 22.49834, - 22.01993, - 21.98784, - 22.35422, - 22.01466, - 21.98565, - 22.1711, - 21.96919, - 22.03237, - 22.30408, - 22.00759, - 22.03562, - 22.01947, - 22.20849, - 21.98004, - 21.98386, - 22.14885, - 22.14906, - 22.13118, - 21.9956, - 22.33289, - 21.99279, - 21.99903, - 22.0232, - 22.00992, - 22.16997, - 21.99727, - 21.98512, - 22.0992, - 22.09843, - 23.11728, - 22.45273, - 22.2, - 21.98674, - 22.0368, - 22.16985, - 22.11212, - 22.0407, - 22.07895, - 22.6133, - 22.01129, - 22.07007, - 22.1428, - 21.98159, - 22.00739, - 22.00778, - 22.12806, - 22.00893, - 22.23254, - 22.06447, - 22.03369, - 21.98988, - 22.0062, - 22.26566, - 22.13457, - 21.99102, - 22.55205, - 22.36024, - 22.17485, - 23.00265, - 21.96775, - 21.97485, - 22.9294, - 22.02423, - 22.08535, - 23.08501, - 22.10341, - 22.20068, - 22.94464, - 22.02868, - 22.02156, - 22.65288, - 22.2367, - 21.9922, - 22.25684, - 22.45598, - 22.00954, - 22.11768, - 21.89281, - 22.1111, - 22.39623, - 21.98596, - 22.02725, - 22.1116, - 22.01302, - 22.0117, - 22.02031, - 21.99995, - 21.99934, - 22.10891, - 21.99479, - 22.0294, - 21.98634, - 22.33414, - 21.98768, - 22.17036, - 22.13312, - 22.00869, - 22.15352, - 22.21374, - 22.00058, - 22.06923, - 22.77846, - 22.11276, - 21.98947, - 23.00625, - 22.08583, - 21.94752, - 22.7972, - 22.16673, - 21.99947, - 23.13647, - 22.17495, - 22.00803, - 22.65398, - 22.0268, - 22.03376, - 22.62485, - 22.02085, - 22.07868, - 22.68809, - 21.96732, - 21.98695, - 22.36464, - 21.98573, - 22.14117, - 22.21013, - 21.99391, - 22.00853, - 22.34148, - 21.98298, - 22.24566, - 21.99089, - 22.74926, - 23.35053, - 39.50373, - 22.11181, - 21.98993, - 34.79176, - 33.35522, - 21.98722, - 21.99461, - 22.31978, - 22.02065, - 22.00112, - 22.51674, - 21.90936, - 22.0396, - 22.14533, - 22.04658, - 22.0397, - 22.24594, - 21.98591, - 21.99769, - 23.1272, - 21.98597, - 21.97945, - 23.41716, - 22.01276, - 22.16768, - 22.05336, - 22.01864, - 22.00924, - 22.00254, - 22.01507, - 22.06016, - 22.27916, - 22.04636, - 21.98814, - 22.00941, - 22.0346, - 21.99864, - 22.10695, - 22.23064, - 21.98859, - 22.36341, - 22.0013, - 22.18137, - 22.05605, - 21.98882, - 22.19102, - 22.48586, - 21.97836, - 21.99124, - 23.31346, - 22.07199, - 22.00141, - 23.42964, - 21.96173, - 22.25887, - 23.43985, - 22.01332, - 22.01627, - 22.95893, - 21.99034, - 22.14963, - 22.27016, - 22.01802, - 22.175, - 22.26961, - 21.98826, - 21.98134, - 22.31324, - 21.94652, - 21.92741, - 21.99249, - 22.11845, - 21.96309, - 21.97954, - 21.97694, - 21.98313, - 22.01211, - 22.00381, - 22.31301, - 21.96675, - 21.95389, - 21.96227, - 21.98151, - 22.07147, - 21.99381, - 22.5566, - 22.06232, - 22.26409, - 21.96544, - 22.39042, - 21.96799, - 21.96196, - 22.71161, - 21.958, - 22.11271, - 24.0816, - 22.2892, - 23.36337, - 23.24124, - 21.96664, - 21.95624, - 22.91121, - 21.96068, - 22.01115, - 22.88241, - 21.95788, - 21.93589, - 23.13276, - 21.95262, - 21.97219, - 22.27244, - 22.12735, - 21.93767, - 22.23338, - 22.10927, - 21.96938, - 22.24808, - 21.95405, - 22.14658, - 22.14783, - 28.50503, - 21.95101, - 28.99765, - 21.93268, - 21.95949, - 22.24857, - 22.04115, - 32.10111, - 23.01695, - 22.16382, - 22.06284, - 21.99858, - 22.32419, - 21.95636, - 21.97852, - 21.9966, - 21.98316, - 21.99546, - 21.99638, - 22.28976, - 21.95052, - 22.34413, - 21.98317, - 21.85908, - 22.03553, - 22.27835, - 22.0571, - 22.01643, - 22.32665, - 22.62609, - 22.0722, - 22.89276, - 22.01153, - 22.01705, - 22.99083, - 21.97377, - 22.19615, - 23.35959, - 22.13275, - 21.97111, - 23.10741, - 22.02579, - 22.06489, - 22.48569, - 22.23588, - 21.96494, - 22.19732, - 22.66303, - 21.91312, - 21.93004, - 22.00775, - 22.07734, - 21.9728, - 22.20443, - 21.97438, - 22.00575, - 22.09644, - 22.08538, - 22.30842, - 21.92897, - 21.9404, - 21.96093, - 21.94, - 22.23155, - 22.00614, - 22.44172, - 21.97061, - 22.13604, - 21.98885, - 22.12053, - 22.23869, - 22.08662, - 21.95649, - 21.97178, - 22.28082, - 21.99879, - 22.10142, - 22.96808, - 22.01427, - 21.95657, - 22.88311, - 21.99775, - 21.96125, - 23.36863, - 22.1433, - 21.99431, - 22.9282, - 22.04818, - 21.99794, - 22.43828, - 21.98034, - 21.94735, - 22.20725, - 21.93566, - 22.07658, - 22.05801, - 22.07393, - 21.94482, - 21.95115, - 21.93797, - 22.12318, - 22.33475, - 22.00191, - 22.17385, - 21.94542, - 22.04834, - 21.96882, - 22.03203, - 21.96371, - 21.99714, - 22.34338, - 21.93479, - 22.24105, - 21.9695, - 22.12514, - 21.97491, - 21.96482, - 22.60359, - 22.03091, - 22.28636, - 87.44035, - 29.37494, - 22.14932, - 22.00649, - 22.14842, - 22.15305, - 22.47064, - 22.12112, - 22.1235, - 22.11014, - 22.08956, - 22.23661, - 22.27827, - 22.31518, - 22.13057, - 22.36065, - 22.11009, - 22.15529, - 22.29036, - 22.09258, - 22.29345, - 22.08084, - 22.2472, - 22.26483, - 22.14362, - 22.35014, - 22.34224, - 22.03782, - 22.4855, - 22.10209, - 22.31665, - 22.57082, - 22.02015, - 22.17261, - 22.76065, - 22.09401, - 22.0559, - 23.06159, - 22.02222, - 22.02379, - 22.79652, - 22.31302, - 22.1096, - 22.72537, - 22.0562, - 22.15724, - 22.43723, - 22.60014, - 22.25093, - 22.30373, - 22.062, - 22.12679, - 22.29995, - 22.07457, - 22.03976, - 22.10053, - 22.06265, - 22.26463, - 22.07873, - 22.44415, - 22.07001, - 22.33738, - 22.08838, - 22.16296, - 22.16339, - 22.16991, - 22.42509, - 22.2312, - 22.15916, - 22.11519, - 22.04263, - 22.3869, - 22.16323, - 22.18507, - 22.48579, - 22.06755, - 22.0962, - 22.95661, - 22.16252, - 22.05745, - 22.79741, - 22.09334, - 22.1858, - 22.93376, - 22.334, - 22.3063, - 22.84675, - 22.16503, - 22.17242, - 22.59222, - 22.06465, - 22.07589, - 22.80193, - 22.07308, - 22.27505, - 22.55282, - 22.12552, - 22.06361, - 22.26227, - 22.41097, - 22.07737, - 22.0641, - 22.22291, - 21.91401, - 22.09448, - 22.07533, - 22.14453, - 22.07874, - 22.29419, - 22.07872, - 22.0924, - 22.05562, - 22.07998, - 22.21663, - 22.02422, - 22.15489, - 22.04533, - 22.02868, - 22.06831, - 22.20454, - 22.05581, - 22.02841, - 22.20265, - 22.02366, - 22.02199, - 22.0139, - 22.1598, - 22.05404, - 22.01743, - 22.0129, - 22.0247, - 22.13256, - 22.01642, - 22.0272, - 22.00517, - 21.99164, - 22.10011, - 22.03568, - 22.06918, - 23.56804, - 22.16179, - 22.08451, - 22.20877, - 22.2711, - 22.10781, - 22.03911, - 22.70341, - 22.00169, - 22.04696, - 22.67068, - 21.99085, - 22.01035, - 22.9163, - 21.99913, - 22.06136, - 23.07159, - 22.17796, - 22.36062, - 23.19125, - 22.03456, - 21.98697, - 22.58117, - 22.03722, - 22.12609, - 22.31277, - 22.00898, - 22.03641, - 22.027, - 21.99275, - 22.03062, - 22.1308, - 22.0163, - 21.98889, - 22.00985, - 22.02208, - 22.3909, - 22.0133, - 21.99356, - 22.02443, - 22.16854, - 22.01443, - 22.01095, - 22.20835, - 22.0065, - 21.99457, - 22.03279, - 22.06444, - 22.02094, - 22.03274, - 22.07727, - 22.024, - 22.05811, - 22.00449, - 22.16497, - 22.00399, - 22.11103, - 22.20282, - 22.00141, - 22.33244, - 22.01291, - 22.1501, - 22.98475, - 22.00135, - 21.89305, - 23.21657, - 22.01541, - 22.00729, - 23.27537, - 22.02325, - 22.02953, - 22.99426, - 22.37106, - 22.17864, - 22.43954, - 21.99077, - 22.06264, - 22.03073, - 22.00708, - 22.0082, - 22.06792, - 22.00983, - 22.03936, - 22.33591, - 22.17899, - 22.11585, - 22.10419, - 22.08032, - 22.14083, - 22.07963, - 22.17312, - 22.037, - 22.20653, - 22.10069, - 22.04341, - 22.15363, - 22.05156, - 22.39116, - 22.12367, - 22.2752, - 22.14157, - 22.35703, - 22.15858, - 22.01961, - 22.29095, - 22.08881, - 22.04276, - 22.75425, - 22.0342, - 22.11545, - 23.31582, - 22.03647, - 22.05616, - 23.38589, - 22.03024, - 22.11227, - 22.98518, - 22.04708, - 22.04421, - 22.85279, - 22.05935, - 22.12996, - 22.37204, - 22.13334, - 22.06316, - 22.3544, - 22.23473, - 22.02368, - 22.30709, - 22.02756, - 22.1135, - 22.01979, - 22.17032, - 22.04573, - 22.02348, - 22.0829, - 22.03043, - 22.48803, - 22.03458, - 22.03211, - 22.01908, - 22.00251, - 22.14211, - 22.04241, - 22.20086, - 22.00635, - 22.0097, - 22.17863, - 22.00551, - 22.09333, - 22.01044, - 22.04104, - 22.06058, - 22.27026, - 22.02366, - 22.31058, - 22.78117, - 22.01579, - 22.02808, - 22.97729, - 22.01965, - 22.10839, - 23.29251, - 22.12997, - 22.00996, - 23.10594, - 22.02723, - 22.02972, - 23.00036, - 22.09853, - 22.16474, - 22.82317, - 22.00512, - 22.31634, - 22.14177, - 22.06013, - 22.02529, - 22.31011, - 22.00654, - 22.02501, - 22.59174, - 22.01666, - 22.1144, - 22.10909, - 22.03189, - 22.03186, - 22.02997, - 21.99226, - 22.0248, - 22.12153, - 21.9721, - 22.13031, - 22.00527, - 22.01625, - 22.03869, - 21.9971, - 22.32019, - 22.18763, - 22.35166, - 22.17188, - 22.29416, - 22.1213, - 22.13695, - 22.49823, - 22.97301, - 22.10295, - 22.12038, - 22.08706, - 22.13407, - 22.10087, - 22.0762, - 22.14732, - 22.11962, - 22.12895, - 22.15144, - 22.06173, - 22.08087, - 22.29365, - 22.15383, - 22.20576, - 22.13582, - 22.05402, - 22.57075, - 22.32239, - 22.28969, - 22.20852, - 22.07419, - 22.3298, - 22.0726, - 22.14401, - 22.87172, - 22.27554, - 22.08264, - 23.03667, - 22.06085, - 22.08401, - 23.0776, - 22.32991, - 22.05539, - 23.08225, - 22.5749, - 22.11254, - 22.94656, - 22.0916, - 22.24724, - 22.94123, - 22.21239, - 22.05054, - 22.65562, - 22.07319, - 22.29545, - 22.56916, - 22.07369, - 22.10235, - 22.38025, - 22.05502, - 22.1442, - 22.39969, - 22.59194, - 22.06765, - 22.15861, - 22.13692, - 22.04978, - 22.2308, - 22.07787, - 22.04773, - 22.18925, - 22.09132, - 22.05915, - 22.04757, - 22.24268, - 22.11858, - 22.04981, - 22.04236, - 22.07326, - 22.05566, - 22.54976, - 22.33248, - 22.24413, - 22.58618, - 22.08154, - 22.07835, - 23.05144, - 22.05515, - 22.14249, - 22.73477, - 22.076, - 22.07176, - 23.03686, - 22.05126, - 22.05328, - 23.06891, - 22.03351, - 22.06355, - 22.74752, - 22.09005, - 22.12947, - 22.51651, - 22.24589, - 22.05862, - 22.52743, - 22.01698, - 22.05485, - 22.65973, - 22.04256, - 22.04391, - 22.37144, - 22.09203, - 22.1188, - 22.37972, - 22.20775, - 22.26424, - 22.13799, - 22.32221, - 22.08471, - 22.15401, - 22.20326, - 22.1117, - 22.38476, - 22.08183, - 22.06705, - 22.13908, - 22.10766, - 22.119, - 22.06683, - 22.27187, - 22.10087, - 22.2443, - 22.56028, - 22.35752, - 22.08776, - 22.99192, - 22.08303, - 22.13826, - 22.90352, - 22.41341, - 22.28265, - 23.20811, - 22.09551, - 22.2311, - 22.64804, - 22.08277, - 22.11031, - 22.90923, - 22.25287, - 22.31899, - 22.59954, - 22.11233, - 22.26726, - 22.3943, - 22.23083, - 22.05556, - 22.17205, - 22.24762, - 22.09411, - 22.22834, - 22.07723, - 22.13943, - 22.12574, - 22.16756, - 22.07795, - 22.12778, - 22.30969, - 22.12327, - 22.09924, - 22.09402, - 22.07373, - 22.08579, - 22.0969, - 22.29523, - 22.0814, - 22.33657, - 22.05957, - 22.06162, - 22.23924, - 22.22044, - 22.25518, - 22.76025, - 22.04576, - 22.1095, - 22.89399, - 22.11334, - 22.20662, - 23.22123, - 22.13405, - 22.14319, - 23.13889, - 22.08252, - 22.09186, - 22.88288, - 22.13033, - 22.24811, - 22.84108, - 22.0963, - 22.10466, - 22.56334, - 22.28161, - 22.11432, - 22.51849, - 22.0848, - 22.0716, - 22.29104, - 22.28107, - 22.04936, - 22.34781, - 22.08045, - 22.22841, - 22.38318, - 22.08404, - 22.27922, - 22.06086, - 22.06059, - 22.0609, - 22.10083, - 22.07708, - 22.03609, - 22.18118, - 22.06044, - 22.24976, - 22.07572, - 22.05061, - 22.03577, - 22.05157, - 22.41553, - 22.04533, - 22.58813, - 22.22882, - 22.22933, - 22.18269, - 22.22138, - 22.29704, - 22.1916, - 22.50302, - 22.1511, - 22.20668, - 22.18498, - 22.28163, - 22.18772, - 22.18406, - 22.30853, - 22.15384, - 22.14454, - 22.19723, - 22.42928, - 22.26607, - 23.24038, - 22.16549, - 22.17437, - 23.31809, - 22.16913, - 22.15666, - 23.41506, - 22.20052, - 22.15415, - 23.44726, - 22.30211, - 22.1587, - 22.84592, - 22.22882, - 22.3731, - 22.89438, - 22.15999, - 22.31374, - 22.22651, - 22.15052, - 22.12954, - 22.19818, - 22.14812, - 22.22392, - 22.22943, - 22.19123, - 22.14818, - 22.16315, - 22.35636, - 22.14742, - 22.18533, - 22.16984, - 22.16773, - 22.55359, - 22.21615, - 22.2091, - 22.13037, - 22.15519, - 22.10123, - 22.17487, - 22.17513, - 22.21376, - 22.15904, - 22.2451, - 22.16102, - 22.27373, - 22.42959, - 22.35776, - 22.263, - 22.73783, - 22.27069, - 22.57598, - 22.9897, - 22.18811, - 22.14974, - 22.94098, - 22.19084, - 22.26805, - 23.17091, - 22.27699, - 22.11621, - 23.52157, - 22.32281, - 22.20457, - 22.84343, - 22.34451, - 22.14532, - 22.54568, - 22.15921, - 22.38103, - 22.35533, - 22.12631, - 22.14453, - 22.13071, - 22.19417, - 22.12171, - 22.27355, - 22.25996, - 22.13962, - 22.17909, - 22.31349, - 22.18588, - 22.14944, - 22.15603, - 22.14809, - 22.27744, - 22.13968, - 22.43714, - 22.17337, - 22.11314, - 22.20855, - 22.16081, - 22.22404, - 22.15729, - 22.41279, - 22.14239, - 22.13028, - 22.21568, - 22.10188, - 22.34468, - 22.07896, - 22.1231, - 22.09002, - 22.09242, - 22.11111, - 22.17983, - 22.24994, - 22.10215, - 22.46662, - 22.09419, - 22.15175, - 22.14559, - 22.08943, - 22.12113, - 22.08889, - 22.28845, - 22.57452, - 22.14223, - 22.45406, - 22.21435, - 22.05357, - 22.66234, - 22.05918, - 22.14693, - 23.03717, - 22.12768, - 22.32128, - 23.20236, - 22.09008, - 22.05365, - 23.21157, - 22.10796, - 22.06815, - 22.87714, - 22.57965, - 22.05288, - 22.48416, - 22.10489, - 22.15942, - 22.0792, - 22.29933, - 22.06366, - 22.10414, - 22.23846, - 61.27965, - 61.17303, - 60.93715, - 61.13133, - 61.12721, - 60.81685, - 60.98225, - 61.30132, - 60.93549, - 60.69967, - 60.91489, - 60.81747, - 61.46471, - 61.69749, - 60.77694, - 60.76163, - 60.97084, - 61.28849, - 60.91529, - 60.80709, - 60.8915, - 61.05598, - 22.11434, - 22.36842, - 22.15676, - 22.10011, - 22.11174, - 22.13811, - 22.41267, - 22.06169, - 22.10501, - 22.24403, - 22.07369, - 22.10714, - 22.13241, - 22.30543, - 22.09326, - 22.4798, - 22.12286, - 22.12307, - 22.17564, - 22.09602, - 22.08707, - 22.06782, - 22.79265, - 22.42881, - 22.18655, - 23.35501, - 22.20008, - 22.06771, - 22.66239, - 22.04897, - 22.40341, - 23.11431, - 22.07558, - 22.24625, - 22.47141, - 22.36805, - 22.04884, - 22.17862, - 22.12284, - 22.10071, - 22.40183, - 22.49404, - 22.05267, - 22.06313, - 22.06909, - 22.18636, - 22.12141, - 22.25289, - 22.06973, - 22.08393, - 22.24575, - 22.06041, - 22.18843, - 22.04192, - 22.06083, - 22.07726, - 22.04325, - 22.14804, - 22.15436, - 22.92499, - 22.07397, - 22.07851, - 22.31569, - 22.04001, - 22.17268, - 22.59199, - 22.26674, - 22.40413, - 22.73767, - 22.03631, - 22.06472, - 23.22907, - 22.37175, - 22.06171, - 23.18735, - 22.06551, - 22.04094, - 23.01561, - 22.1797, - 22.0393, - 22.36705, - 22.23749, - 22.05647, - 22.27163, - 22.03717, - 22.23222, - 22.03541, - 22.09642, - 22.07479, - 22.04652, - 22.0752, - 22.0611, - 22.155, - 22.04841, - 22.04367, - 22.57311, - 22.07823, - 22.13918, - 22.07624, - 22.58741, - 22.05358, - 22.09416, - 22.06915, - 22.06697, - 22.17179, - 22.04659, - 22.0679, - 22.05597, - 22.20582, - 22.1163, - 22.05879, - 22.53564, - 22.05523, - 22.37207, - 22.15885, - 22.14002, - 22.14307, - 22.12354, - 22.27465, - 22.12406, - 22.37709, - 22.15483, - 22.08713, - 22.11552, - 22.08857, - 22.066, - 22.08113, - 22.30342, - 22.08316, - 22.09483, - 22.08368, - 22.31247, - 22.07708, - 22.09326, - 22.02953, - 22.04734, - 22.21646, - 22.18826, - 22.1858, - 22.06094, - 22.2184, - 22.05256, - 22.58915, - 22.16498, - 22.40896, - 22.76875, - 22.0528, - 22.13154, - 23.05687, - 22.05648, - 22.18597, - 23.14894, - 22.23368, - 22.11616, - 22.59598, - 22.35966, - 22.07336, - 22.17872, - 22.06577, - 22.32277, - 22.08732, - 22.08067, - 22.36932, - 22.07089, - 22.07751, - 22.0811, - 22.31345, - 22.06705, - 22.05811, - 22.06743, - 22.06308, - 22.1459, - 22.06573, - 22.44047, - 22.06664, - 22.08419, - 22.1892, - 22.04749, - 22.09074, - 22.64728, - 22.51719, - 22.09339, - 22.60724, - 22.05313, - 22.05373, - 22.73244, - 29.9374, - 23.23771, - 26.12982, - 22.0714, - 22.04965, - 23.02428, - 22.26129, - 22.26949, - 23.02104, - 22.06185, - 22.05681, - 23.15292, - 22.45871, - 22.16934, - 22.56592, - 22.04116, - 22.05877, - 22.45156, - 22.18365, - 22.03071, - 22.37645, - 22.06848, - 22.15173, - 22.51891, - 22.19234, - 22.02494, - 22.16566, - 22.22915, - 22.07767, - 22.15082, - 22.22704, - 22.06001, - 22.20203, - 22.04289, - 22.08313, - 22.32529, - 22.04353, - 22.07976, - 22.06153, - 22.14602, - 22.23695, - 97.32394, - 22.15297, - 22.25851, - 22.20962, - 22.15517, - 22.09394, - 22.31625, - 22.21339, - 22.13564, - 22.28151, - 22.08694, - 22.05186, - 22.08302, - 22.06486, - 22.24339, - 22.04107, - 22.05055, - 22.05284, - 22.19875, - 22.08528, - 22.04858, - 22.1898, - 22.04259, - 22.08821, - 22.04079, - 22.26902, - 22.09483, - 22.0653, - 22.3063, - 22.04724, - 22.03538, - 22.11389, - 22.17977, - 22.19797, - 22.09501, - 22.05264, - 22.23768, - 22.06425, - 22.19367, - 22.15496, - 22.04645, - 22.01735, - 22.05546, - 22.22108, - 22.52894, - 22.17078, - 22.04657, - 22.66171, - 22.08216, - 22.14434, - 22.91265, - 22.04189, - 22.30463, - 22.8161, - 22.10876, - 22.15244, - 23.07323, - 22.07645, - 22.07515, - 22.45072, - 22.06701, - 22.05001, - 22.81856, - 22.2083, - 22.07677, - 22.49164, - 22.06707, - 22.04991, - 22.50302, - 22.19432, - 22.05407, - 22.17785, - 22.17777, - 22.0591, - 22.42836, - 22.04898, - 22.25012, - 22.02919, - 22.03809, - 22.02566, - 22.04623, - 22.19503, - 22.03965, - 22.13501, - 22.03498, - 22.24937, - 22.12539, - 22.04288, - 22.01837, - 22.0592, - 22.14505, - 22.05825, - 22.33469, - 22.28682, - 22.0202, - 22.06255, - 22.3121, - 22.04525, - 22.05081, - 22.87176, - 22.02192, - 22.02659, - 23.14619, - 22.01422, - 22.0033, - 22.77386, - 22.04744, - 22.02232, - 22.71235, - 22.23808, - 22.33464, - 22.51963, - 22.04383, - 22.09721, - 22.492, - 22.16247, - 22.15125, - 23.31783, - 22.50191, - 22.25313, - 23.16342, - 22.08969, - 22.08897, - 23.02494, - 22.07001, - 22.431, - 22.91199, - 22.07168, - 22.05827, - 22.73213, - 22.0699, - 22.06272, - 22.91321, - 22.04565, - 22.02981, - 23.11438, - 22.06312, - 22.07263, - 22.60522, - 22.48687, - 22.06531, - 22.81767, - 22.1324, - 22.05353, - 22.72526, - 22.04709, - 22.33975, - 22.49839, - 22.06596, - 22.0488, - 22.49857, - 22.21481, - 22.04979, - 22.67688, - 22.05085, - 22.604, - 22.01359, - 22.01026, - 22.576, - 22.04568, - 22.05149, - 22.26098, - 22.20339, - 22.25645, - 22.15332, - 22.0521, - 22.04389, - 22.01911, - 22.04118, - 22.18372, - 22.36079, - 22.03144, - 22.2546, - 22.0347, - 22.11309, - 22.02022, - 22.06121, - 22.0363, - 22.07602, - 22.02511, - 22.03806, - 22.49011, - 22.08332, - 22.04208, - 22.0424, - 22.02196, - 22.12873, - 22.07355, - 22.39268, - 22.90289, - 22.21884, - 22.05382, - 23.32278, - 22.01646, - 22.04866, - 23.09335, - 22.03294, - 22.05951, - 23.07175, - 22.33506, - 22.13579, - 22.96479, - 22.17044, - 22.06808, - 22.71606, - 22.06192, - 22.2198, - 22.76581, - 22.04501, - 22.07784, - 22.45968, - 22.02073, - 22.06513, - 22.02161, - 22.05107, - 22.01897, - 22.12474, - 22.30654, - 22.05217, - 22.06245, - 22.03632, - 22.05141, - 22.04536, - 22.04668, - 22.07617, - 22.21171, - 22.04614, - 22.03868, - 22.27957, - 22.15533, - 22.10648, - 22.02181, - 22.08012, - 22.11044, - 23.19676, - 22.11926, - 22.36305, - 22.08336, - 22.18096, - 22.12117, - 22.12299, - 22.08193, - 22.06577, - 22.11211, - 22.08488, - 22.50658, - 22.08343, - 22.08416, - 22.10853, - 22.06203, - 22.05712, - 22.13873, - 22.35144, - 22.18615, - 22.0991, - 22.05517, - 22.16001, - 22.04568, - 22.10196, - 22.27976, - 22.04611, - 22.51055, - 22.06527, - 22.25575, - 22.26271, - 22.07975, - 22.08833, - 22.50771, - 22.08065, - 22.03076, - 22.93063, - 22.05803, - 22.04597, - 23.21894, - 22.18984, - 22.37802, - 22.98876, - 22.06177, - 22.30177, - 22.92668, - 22.23802, - 22.0502, - 22.87797 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 12.66411, "5": 12.64021, "10": 11.72963, "15": 10.70883, "20": 10.24226, "25": 9.93838, "30": 9.71042, "35": 9.45611, "40": 9.25839, "45": 9.07306, "50": 8.89878, "55": 8.69221, "60": 8.58088, "65": 8.47806, "70": 8.3086, "75": 8.19317, "80": 8.06814, "85": 7.96125, "90": 7.7794, "95": 7.68724, "100": 7.56886, "105": 7.41296, "110": 7.32034, "115": 7.22153, "120": 7.12124, "125": 7.04036, "130": 6.94323, "135": 6.84027, "140": 6.80103, "145": 6.7193, "150": 6.63725, "155": 6.58149, "160": 6.50715, "165": 6.4355, "170": 6.37194, "175": 6.30827, "180": 6.23217, "185": 6.19631, "190": 6.07631, "195": 6.07991, "200": 5.97816, "205": 5.92, "210": 5.90802, "215": 5.81966, "220": 5.75272, "225": 5.71611, "230": 5.65962, "235": 5.63261, "240": 5.5126, "245": 5.51878, "250": 5.45283, "255": 5.41503, "260": 5.36589, "265": 5.32477, "270": 5.29108, "275": 5.26988, "280": 5.20879, "285": 5.18847, "290": 5.12924, "295": 5.10051, "300": 5.06067, "305": 5.04041, "310": 5.00125, "315": 4.95355, "320": 4.90367, "325": 4.90664, "330": 4.88035, "335": 4.84212, "340": 4.82769, "345": 4.80236, "350": 4.7426, "355": 4.73828, "360": 4.64164, "365": 4.64317, "370": 4.65196, "375": 4.64069, "380": 4.5892, "385": 4.5759, "390": 4.50715, "395": 4.492, "400": 4.46359, "405": 4.38816, "410": 4.39796, "415": 4.39008, "420": 4.32878, "425": 4.30737, "430": 4.25774, "435": 4.2308, "440": 4.19181, "445": 4.19218, "450": 4.17901, "455": 4.14874, "460": 4.08716, "465": 4.08221, "470": 4.01645, "475": 4.03527, "480": 4.00754, "485": 3.98316, "490": 3.96761, "495": 3.93743, "500": 3.88443, "505": 3.89242, "510": 3.87446, "515": 3.82429, "520": 3.84473, "525": 3.81867, "530": 3.76356, "535": 3.79118, "540": 3.75097, "545": 3.75095, "550": 3.72171, "555": 3.72512, "560": 3.67878, "565": 3.71357, "570": 3.71852, "575": 3.67244, "580": 3.70812, "585": 3.71058, "590": 3.66302, "595": 3.61888, "600": 3.64151, "605": 3.5873, "610": 3.5862, "615": 3.55484, "620": 3.54643, "625": 3.51212, "630": 3.48139, "635": 3.46239, "640": 3.45605, "645": 3.43952, "650": 3.49027, "655": 3.44519, "660": 3.43842, "665": 3.45182, "670": 3.48254, "675": 3.42095, "680": 3.44318, "685": 3.41881, "690": 3.42804, "695": 3.40947, "700": 3.41075, "705": 3.37695, "710": 3.37344, "715": 3.37012, "720": 3.35591, "725": 3.32489, "730": 3.31888, "735": 3.31492, "740": 3.32924, "745": 3.34213, "750": 3.35773, "755": 3.32241, "760": 3.32871, "765": 3.32376, "770": 3.30408, "775": 3.32199, "780": 3.30419, "785": 3.30323, "790": 3.25662, "795": 3.23898, "800": 3.275, "805": 3.22782, "810": 3.22636, "815": 3.22581, "820": 3.23497, "825": 3.22089, "830": 3.21916, "835": 3.24535, "840": 3.23402, "845": 3.23567, "850": 3.29257, "855": 3.24736, "860": 3.2387, "865": 3.20784, "870": 3.20865, "875": 3.23421, "880": 3.19783, "885": 3.19245, "890": 3.13723, "895": 3.15447, "900": 3.1439, "905": 3.13103, "910": 3.10493, "915": 3.11855, "920": 3.12498, "925": 3.1334, "930": 3.13156, "935": 3.14156, "940": 3.14732, "945": 3.09229, "950": 3.08955, "955": 3.10987, "960": 3.06949, "965": 3.07022, "970": 3.07431, "975": 3.12594, "980": 3.11256, "985": 3.10389, "990": 3.10572, "995": 3.15864, "1000": 3.12616, "1005": 3.12913, "1010": 3.1023, "1015": 3.08399, "1020": 3.09166, "1025": 3.09559, "1030": 3.08828, "1035": 3.07263, "1040": 3.04487, "1045": 3.01423, "1050": 3.00853, "1055": 3.00811, "1060": 3.03613, "1065": 3.00905, "1070": 3.04255, "1075": 3.04145, "1080": 3.05113, "1085": 3.01414, "1090": 3.02485, "1095": 2.99417, "1100": 3.00272, "1105": 2.9862, "1110": 3.00849, "1115": 2.98831, "1120": 3.00367, "1125": 2.98774, "1130": 2.96967, "1135": 2.9947, "1140": 3.04721, "1145": 3.02743, "1150": 3.01942, "1155": 2.99707, "1160": 2.95055, "1165": 2.9621, "1170": 2.97029, "1175": 2.95284, "1180": 2.93842, "1185": 2.9381, "1190": 2.95507, "1195": 2.9216, "1200": 2.99076, "1205": 3.00849, "1210": 3.01121, "1215": 3.04401, "1220": 3.20816, "1225": 3.04625, "1230": 3.05786, "1235": 3.06214, "1240": 2.97933, "1245": 3.11157, "1250": 3.07464, "1255": 3.05108, "1260": 2.96997, "1265": 2.96255, "1270": 2.9632, "1275": 2.89905, "1280": 2.92489, "1285": 2.90365, "1290": 2.91916, "1295": 2.90429, "1300": 2.90714, "1305": 2.92676, "1310": 2.8971, "1315": 2.87803, "1320": 2.89708, "1325": 2.91254, "1330": 2.85653, "1335": 2.91443, "1340": 2.87133, "1345": 2.90682, "1350": 2.8556, "1355": 2.89722, "1360": 2.93215, "1365": 2.89651, "1370": 2.90563, "1375": 2.862, "1380": 2.87867, "1385": 2.88961, "1390": 2.8682, "1395": 2.88433, "1400": 2.84048, "1405": 2.87542, "1410": 2.8665, "1415": 2.90275, "1420": 2.88737, "1425": 2.88463, "1430": 2.85982, "1435": 2.8297, "1440": 2.8166, "1445": 2.85727, "1450": 2.82409, "1455": 2.85156, "1460": 2.85793, "1465": 2.81153, "1470": 2.84774, "1475": 2.7993, "1480": 2.80416, "1485": 2.82008, "1490": 2.80498, "1495": 2.82359, "1500": 2.83467, "1505": 2.85428, "1510": 2.81199, "1515": 2.83901, "1520": 2.81577, "1525": 2.78343, "1530": 2.79771, "1535": 2.82361, "1540": 2.81201, "1545": 2.83242, "1550": 2.79423, "1555": 2.8356, "1560": 2.82755, "1565": 2.79271, "1570": 2.80031, "1575": 2.79011, "1580": 2.78415, "1585": 2.80245, "1590": 2.77473, "1595": 2.80574, "1600": 2.80713, "1605": 2.95083, "1610": 2.85233, "1615": 2.91338, "1620": 2.90922, "1625": 2.97057, "1630": 3.00526, "1635": 3.10822, "1640": 3.06912, "1645": 3.36609, "1650": 3.05955, "1655": 2.95857, "1660": 2.87221, "1665": 2.81923, "1670": 2.78938, "1675": 2.80592, "1680": 2.77442, "1685": 2.7691, "1690": 2.76308, "1695": 2.75021, "1700": 2.78099, "1705": 2.74739, "1710": 2.76181, "1715": 2.78159, "1720": 2.77978, "1725": 2.75508, "1730": 2.78393, "1735": 2.78744, "1740": 2.76355, "1745": 2.75727, "1750": 2.77019, "1755": 2.73833, "1760": 2.80449, "1765": 2.75408, "1770": 2.78207, "1775": 2.73778, "1780": 2.75836, "1785": 2.76592, "1790": 2.78281, "1795": 2.74737, "1800": 2.73144, "1805": 2.70391, "1810": 2.76272, "1815": 2.74367, "1820": 2.73923, "1825": 2.74053, "1830": 2.72811, "1835": 2.73009, "1840": 2.7351, "1845": 2.73444, "1850": 2.73503, "1855": 2.73684, "1860": 2.74074, "1865": 2.70737, "1870": 2.73372, "1875": 2.71026, "1880": 2.72026, "1885": 2.73758, "1890": 2.70824, "1895": 2.69402, "1900": 2.73512, "1905": 2.6833, "1910": 2.70278, "1915": 2.71115, "1920": 2.73725, "1925": 2.73979, "1930": 2.73919, "1935": 2.74132, "1940": 2.69451, "1945": 2.70347, "1950": 2.70337, "1955": 2.71781, "1960": 2.7251, "1965": 2.75283, "1970": 2.70863, "1975": 2.70834, "1980": 2.71838, "1985": 2.67842, "1990": 2.65008, "1995": 2.68685, "2000": 2.67756, "2005": 2.70405, "2010": 2.66731, "2015": 2.69047, "2020": 2.67582, "2025": 2.68411, "2030": 2.69769, "2035": 2.66866, "2040": 3.46304, "2045": 2.75699, "2050": 2.74898, "2055": 2.71489, "2060": 2.68255, "2065": 2.68808, "2070": 2.67285, "2075": 2.67145, "2080": 2.67732, "2085": 2.66734, "2090": 2.65806, "2095": 2.67225, "2100": 2.66107, "2105": 2.68477, "2110": 2.70104, "2115": 2.69393, "2120": 2.67222, "2125": 2.67735, "2130": 2.65973, "2135": 2.68453, "2140": 2.6613, "2145": 2.66542, "2150": 2.67253, "2155": 2.64775, "2160": 2.65958, "2165": 2.64856, "2170": 2.66651, "2175": 2.66033, "2180": 2.66566, "2185": 2.6376, "2190": 2.62834, "2195": 2.66177, "2200": 2.64981, "2205": 2.66123, "2210": 2.64346, "2215": 2.64162, "2220": 2.67017, "2225": 2.61983, "2230": 2.6326, "2235": 2.65058, "2240": 2.63798, "2245": 2.63755, "2250": 2.63354, "2255": 2.64266, "2260": 2.64371, "2265": 2.65246, "2270": 2.66468, "2275": 2.65214, "2280": 2.62492, "2285": 2.63533, "2290": 2.65351, "2295": 2.62423, "2300": 2.64546, "2305": 2.66514, "2310": 2.64721, "2315": 2.6835, "2320": 2.67897, "2325": 2.68065, "2330": 2.65328, "2335": 2.64829, "2340": 2.66565, "2345": 2.6284, "2350": 2.62951, "2355": 2.6165, "2360": 2.6277, "2365": 2.61969, "2370": 2.58857, "2375": 2.58489, "2380": 2.62716, "2385": 2.59116, "2390": 2.61877, "2395": 2.66566, "2400": 2.60929, "2405": 2.61286, "2410": 2.60755, "2415": 2.64377, "2420": 2.6309, "2425": 2.63344, "2430": 2.58796, "2435": 2.60278, "2440": 2.6259, "2445": 2.60412, "2450": 2.59229, "2455": 2.61069, "2460": 2.62454, "2465": 2.59015, "2470": 2.57043, "2475": 2.58861, "2480": 2.60488, "2485": 2.59975, "2490": 2.60245, "2495": 2.61672, "2500": 2.60716, "2505": 2.6022, "2510": 2.56936, "2515": 2.59702, "2520": 2.59291, "2525": 2.5744, "2530": 2.58186, "2535": 2.5941, "2540": 2.58194, "2545": 2.59928, "2550": 2.61, "2555": 2.609, "2560": 2.61861, "2565": 2.58962, "2570": 2.60516, "2575": 2.60298, "2580": 2.88239, "2585": 3.32085, "2590": 2.80136, "2595": 2.79183, "2600": 2.79046, "2605": 2.745, "2610": 2.73849, "2615": 2.68268, "2620": 2.62203, "2625": 2.62341, "2630": 2.59442, "2635": 2.56904, "2640": 2.57495, "2645": 2.57681, "2650": 2.56329, "2655": 2.61082, "2660": 2.58442, "2665": 2.59089, "2670": 2.86156, "2675": 2.67747, "2680": 2.58516, "2685": 2.60277, "2690": 2.56928, "2695": 2.58653, "2700": 2.60981, "2705": 2.55784, "2710": 2.57534, "2715": 2.55475, "2720": 2.52398, "2725": 2.55049, "2730": 2.57278, "2735": 2.60023, "2740": 2.55859, "2745": 2.57473, "2750": 2.54103, "2755": 2.59622, "2760": 2.56842, "2765": 2.54671, "2770": 2.57558, "2775": 2.55559, "2780": 2.55887, "2785": 2.55295, "2790": 2.55918, "2795": 2.56631, "2800": 2.56039, "2805": 2.57411, "2810": 2.5702, "2815": 2.56137, "2820": 2.55714, "2825": 2.54679, "2830": 2.55173, "2835": 2.56152, "2840": 2.54288, "2845": 2.54184, "2850": 2.55055, "2855": 2.53274, "2860": 2.57341, "2865": 2.56224, "2870": 2.53414, "2875": 2.54899, "2880": 2.56973, "2885": 2.56172, "2890": 2.56575, "2895": 5.18896, "2900": 2.65065, "2905": 2.65143, "2910": 2.69529, "2915": 2.61214, "2920": 2.59439, "2925": 2.57066, "2930": 2.54223, "2935": 2.54706, "2940": 2.55689, "2945": 2.54139, "2950": 2.52629, "2955": 2.56811, "2960": 2.52637, "2965": 2.5388, "2970": 2.54384, "2975": 2.54409, "2980": 2.56387, "2985": 2.5541, "2990": 2.57059, "2995": 2.57128, "3000": 2.534, "3005": 2.51732, "3010": 2.54177, "3015": 2.54772, "3020": 2.68067, "3025": 2.63053, "3030": 2.59877, "3035": 2.54805, "3040": 2.56442, "3045": 2.5445, "3050": 2.50745, "3055": 2.49596, "3060": 2.53168, "3065": 2.54038, "3070": 2.52694, "3075": 2.5251, "3080": 2.51083, "3085": 2.50272, "3090": 2.53438, "3095": 2.53602, "3100": 2.51698, "3105": 2.5008, "3110": 2.53155, "3115": 2.48533, "3120": 2.59495, "3125": 2.57236, "3130": 2.56113, "3135": 2.5364, "3140": 2.53198, "3145": 2.53274, "3150": 2.55771, "3155": 2.5181, "3160": 2.5275, "3165": 2.52725, "3170": 2.49897, "3175": 2.50791, "3180": 2.48768, "3185": 2.51598, "3190": 2.51381, "3195": 2.52192, "3200": 2.50424, "3205": 2.50656, "3210": 2.52309, "3215": 2.55745, "3220": 2.52428, "3225": 2.46029, "3230": 2.51554, "3235": 2.52768, "3240": 2.49578, "3245": 2.49449, "3250": 2.48825, "3255": 2.49953, "3260": 2.50688, "3265": 2.49805, "3270": 2.52658, "3275": 2.50685, "3280": 2.47615, "3285": 3.40989, "3290": 2.60072, "3295": 2.5626, "3300": 2.50263, "3305": 2.48974, "3310": 2.49746, "3315": 2.5026, "3320": 2.4833, "3325": 2.51132, "3330": 2.50954, "3335": 2.48464, "3340": 2.49681, "3345": 2.48831, "3350": 2.48874, "3355": 2.49131, "3360": 2.50782, "3365": 2.50568, "3370": 2.48966, "3375": 2.49956, "3380": 2.49002, "3385": 2.46893, "3390": 2.49587, "3395": 2.49785, "3400": 2.4934, "3405": 2.48043, "3410": 2.46752, "3415": 2.471, "3420": 2.48949, "3425": 2.49602, "3430": 2.48545, "3435": 2.50901, "3440": 2.66201, "3445": 2.52961, "3450": 2.48811, "3455": 2.49402, "3460": 2.48017, "3465": 2.49713, "3470": 2.47035, "3475": 2.48991, "3480": 2.46304, "3485": 2.47732, "3490": 2.47833, "3495": 2.48454, "3500": 2.44796, "3505": 2.48546, "3510": 2.49152, "3515": 2.49301, "3520": 2.46373, "3525": 2.50058, "3530": 2.49888, "3535": 2.47654, "3540": 2.50516, "3545": 3.92623, "3550": 2.6424, "3555": 2.68908, "3560": 2.61137, "3565": 2.57258, "3570": 2.51499, "3575": 2.50623, "3580": 2.45953, "3585": 2.50145, "3590": 2.50253, "3595": 2.4964, "3600": 2.46187, "3605": 2.50007, "3610": 2.48991, "3615": 2.46913, "3620": 2.4612, "3625": 2.45776, "3630": 2.46678, "3635": 2.46496, "3640": 2.45956, "3645": 2.48638, "3650": 2.44975, "3655": 2.47279, "3660": 2.47649, "3665": 2.46752, "3670": 2.4857, "3675": 2.60223, "3680": 3.43582, "3685": 2.86781, "3690": 2.63264, "3695": 2.56492, "3700": 2.53013, "3705": 2.50671, "3710": 2.48222, "3715": 2.47653, "3720": 2.46889, "3725": 2.47236, "3730": 2.46586, "3735": 2.46996, "3740": 2.45589, "3745": 2.46144, "3750": 2.45082, "3755": 2.43143, "3760": 2.46168, "3765": 2.44906, "3770": 2.47233, "3775": 2.47305, "3780": 2.42442, "3785": 2.49274, "3790": 2.46094, "3795": 2.45032, "3800": 2.45302, "3805": 2.46456, "3810": 2.45048, "3815": 2.46996, "3820": 2.47262, "3825": 2.47111, "3830": 2.47027, "3835": 2.47452, "3840": 2.46217, "3845": 2.44546, "3850": 2.459, "3855": 2.49795, "3860": 2.46945, "3865": 2.45576, "3870": 2.43294, "3875": 2.43994, "3880": 2.44112, "3885": 2.46364, "3890": 2.4399, "3895": 2.44225, "3900": 2.49675, "3905": 2.50779, "3910": 2.46379, "3915": 2.48268, "3920": 2.4625, "3925": 2.39091, "3930": 2.43662, "3935": 2.41674, "3940": 2.43467, "3945": 2.45007, "3950": 2.45455, "3955": 2.42626, "3960": 2.43755, "3965": 2.46886, "3970": 2.44968, "3975": 2.42788, "3980": 2.42236, "3985": 2.44671, "3990": 2.44998, "3995": 2.43001, "4000": 2.46362, "4005": 2.40651, "4010": 2.42988, "4015": 2.45371, "4020": 2.41933, "4025": 2.44769, "4030": 2.43013, "4035": 2.43137, "4040": 2.42707, "4045": 2.43891, "4050": 2.41075, "4055": 2.51468, "4060": 2.49015, "4065": 2.44674, "4070": 2.42051, "4075": 2.41874, "4080": 2.43459, "4085": 2.4213, "4090": 2.43379, "4095": 2.45209, "4100": 2.42664, "4105": 2.41994, "4110": 2.40856, "4115": 2.42049, "4120": 2.4172, "4125": 2.43249, "4130": 2.4161, "4135": 2.42639, "4140": 2.38782, "4145": 2.42087, "4150": 2.39396, "4155": 2.40325, "4160": 2.40844, "4165": 2.37812, "4170": 2.40117, "4175": 2.40833, "4180": 2.41148, "4185": 2.40585, "4190": 2.41652, "4195": 2.3896, "4200": 2.42309, "4205": 2.43802, "4210": 2.41251, "4215": 2.4119, "4220": 2.43019, "4225": 2.42548, "4230": 2.40676, "4235": 2.42793, "4240": 2.39325, "4245": 2.41642, "4250": 2.40749, "4255": 2.41865, "4260": 2.4014, "4265": 2.4104, "4270": 2.4071, "4275": 2.37913, "4280": 2.42263, "4285": 2.38924, "4290": 2.39564, "4295": 2.40078, "4300": 2.3887, "4305": 2.40657, "4310": 2.39325, "4315": 2.41025, "4320": 2.3831, "4325": 2.37438, "4330": 2.41042, "4335": 2.3954, "4340": 2.39839, "4345": 2.41619, "4350": 2.40115, "4355": 2.4174, "4360": 2.38934, "4365": 2.3975, "4370": 2.39148, "4375": 2.40125, "4380": 2.38571, "4385": 2.40878, "4390": 2.37725, "4395": 2.37478, "4400": 2.38704, "4405": 2.38906, "4410": 2.36636, "4415": 2.38518, "4420": 2.35116, "4425": 2.40453, "4430": 2.42424, "4435": 2.38007, "4440": 2.39056, "4445": 2.37843, "4450": 2.38105, "4455": 2.374, "4460": 2.38257, "4465": 2.38743, "4470": 2.38028, "4475": 2.37771, "4480": 2.35806, "4485": 2.39168, "4490": 2.37519, "4495": 2.36724, "4500": 2.36132, "4505": 2.62198, "4510": 2.53822, "4515": 2.45031, "4520": 2.43555, "4525": 2.41994, "4530": 2.41931, "4535": 2.38755, "4540": 2.3849, "4545": 2.36328, "4550": 2.37083, "4555": 2.35698, "4560": 2.376, "4565": 2.37108, "4570": 2.38464, "4575": 2.3634, "4580": 2.38035, "4585": 2.36866, "4590": 2.38091, "4595": 2.3875, "4600": 2.40077, "4605": 2.38068, "4610": 2.3496, "4615": 2.32709, "4620": 2.36329, "4625": 2.33937, "4630": 2.36933, "4635": 2.35598, "4640": 2.37701, "4645": 2.36855, "4650": 2.33989, "4655": 2.36305, "4660": 2.3474, "4665": 2.3612, "4670": 2.37569, "4675": 2.38639, "4680": 2.33108, "4685": 2.37838, "4690": 2.35403, "4695": 2.35517, "4700": 2.34631, "4705": 2.36576, "4710": 2.36294, "4715": 2.34026, "4720": 2.36003, "4725": 2.36801, "4730": 2.36611, "4735": 2.36296, "4740": 2.35448, "4745": 2.37154, "4750": 2.36716, "4755": 2.36863, "4760": 2.3472, "4765": 2.3426, "4770": 2.33464, "4775": 2.3593, "4780": 2.35764, "4785": 2.33139, "4790": 2.35176, "4795": 2.33673, "4800": 2.34524, "4805": 2.35392, "4810": 2.34607, "4815": 2.33645, "4820": 2.33833, "4825": 2.36905, "4830": 2.3581, "4835": 2.33999, "4840": 2.33254, "4845": 2.7087, "4850": 2.37493, "4855": 2.36512, "4860": 2.3598, "4865": 2.3521, "4870": 2.32714, "4875": 2.32685, "4880": 2.34034, "4885": 2.34418, "4890": 2.34584, "4895": 2.30389, "4900": 2.3518, "4905": 2.31818, "4910": 2.3327, "4915": 2.33861, "4920": 2.34912, "4925": 2.34413, "4930": 2.31879, "4935": 2.33329, "4940": 2.34506, "4945": 2.31692, "4950": 2.35193, "4955": 2.33122, "4960": 2.34357, "4965": 2.35032, "4970": 2.31873, "4975": 2.3305, "4980": 2.32914, "4985": 2.35472, "4990": 2.35834, "4995": 2.30177, "5000": 2.34831, "5005": 2.35402, "5010": 2.32683, "5015": 2.31401, "5020": 2.35466, "5025": 2.32447, "5030": 2.34618, "5035": 2.33856, "5040": 2.33919, "5045": 2.31932, "5050": 2.33646, "5055": 2.34031, "5060": 2.32703, "5065": 2.34463, "5070": 2.34774, "5075": 2.32854, "5080": 2.32584, "5085": 2.31683, "5090": 2.32877, "5095": 2.31126, "5100": 2.3098, "5105": 2.32516, "5110": 2.3338, "5115": 2.31797, "5120": 2.31603, "5125": 2.29935, "5130": 2.32473, "5135": 2.33008, "5140": 2.32878, "5145": 2.33524, "5150": 2.33357, "5155": 2.32467, "5160": 2.33658, "5165": 2.31726, "5170": 2.32477, "5175": 2.32454, "5180": 2.31847, "5185": 2.29949, "5190": 2.29014, "5195": 2.33971, "5200": 2.32046, "5205": 2.30188, "5210": 2.34015, "5215": 2.30099, "5220": 2.32183, "5225": 2.28844, "5230": 2.31504, "5235": 2.30541, "5240": 2.32579, "5245": 2.31129, "5250": 2.28012, "5255": 2.32198, "5260": 2.29313, "5265": 2.31005, "5270": 2.31157, "5275": 2.33757, "5280": 2.34018, "5285": 2.32977, "5290": 2.32329, "5295": 2.31046, "5300": 2.33701, "5305": 2.28623, "5310": 2.31842, "5315": 2.29807, "5320": 2.28257, "5325": 2.34686, "5330": 2.32018, "5335": 2.32145, "5340": 2.33313, "5345": 2.31853, "5350": 2.29056, "5355": 2.32015, "5360": 2.34699, "5365": 2.32684, "5370": 2.31858, "5375": 2.33663, "5380": 2.30587, "5385": 2.30209, "5390": 2.28651, "5395": 2.26938, "5400": 2.30877, "5405": 2.29218, "5410": 2.30422, "5415": 2.32161, "5420": 2.32497, "5425": 2.31964, "5430": 2.30829, "5435": 2.32869, "5440": 2.29023, "5445": 2.28093, "5450": 2.3056, "5455": 2.32326, "5460": 2.28401, "5465": 2.30581, "5470": 2.3075, "5475": 2.30979, "5480": 2.3059, "5485": 2.32117, "5490": 2.2976, "5495": 2.30316, "5500": 2.28491, "5505": 2.30329, "5510": 2.28627, "5515": 2.26883, "5520": 2.27267, "5525": 2.28411, "5530": 2.293, "5535": 2.26747, "5540": 2.27729, "5545": 2.3057, "5550": 2.30365, "5555": 2.31692, "5560": 2.27919, "5565": 2.31494, "5570": 2.2673, "5575": 2.29172, "5580": 2.29239, "5585": 2.29669, "5590": 2.28745, "5595": 2.29328, "5600": 2.29602, "5605": 2.30327, "5610": 2.27945, "5615": 2.26993, "5620": 2.28622, "5625": 2.26019, "5630": 2.28122, "5635": 2.27661, "5640": 2.28876, "5645": 2.30688, "5650": 2.28778, "5655": 2.26885, "5660": 2.27678, "5665": 2.28038, "5670": 2.34546, "5675": 2.3046, "5680": 2.28511, "5685": 2.31067, "5690": 2.28674, "5695": 2.3069, "5700": 2.27677, "5705": 2.26272, "5710": 2.30592, "5715": 2.26306, "5720": 2.24506, "5725": 2.29126, "5730": 2.29557, "5735": 2.26091, "5740": 2.27497, "5745": 2.29267, "5750": 2.31244, "5755": 2.29322, "5760": 2.27452, "5765": 2.293, "5770": 2.25502, "5775": 2.32546, "5780": 2.31493, "5785": 2.28048, "5790": 2.28447, "5795": 2.24472, "5800": 2.25724, "5805": 2.27624, "5810": 2.26445, "5815": 2.27368, "5820": 2.25596, "5825": 2.27053, "5830": 2.29028, "5835": 2.2912, "5840": 2.29496, "5845": 2.2638, "5850": 2.29487, "5855": 2.29865, "5860": 2.27004, "5865": 2.29106, "5870": 2.2716, "5875": 2.28325, "5880": 2.26452, "5885": 2.30113, "5890": 2.2543, "5895": 2.26808, "5900": 2.29177, "5905": 2.26388, "5910": 2.27379, "5915": 2.28101, "5920": 2.25829, "5925": 2.29122, "5930": 2.28616, "5935": 2.26931, "5940": 2.26338, "5945": 2.28182, "5950": 2.26878, "5955": 2.26417, "5960": 2.27047, "5965": 2.29714, "5970": 2.27237, "5975": 2.28559, "5980": 2.27539, "5985": 2.27498, "5990": 2.25432, "5995": 2.27338, "6000": 2.24259, "6005": 2.26539, "6010": 2.27524, "6015": 2.29345, "6020": 2.25137, "6025": 2.27606, "6030": 2.25772, "6035": 2.26826, "6040": 2.29626, "6045": 2.26125, "6050": 2.27631, "6055": 2.26726, "6060": 2.28857, "6065": 2.28428, "6070": 2.25218, "6075": 2.23101, "6080": 2.26479, "6085": 2.26846, "6090": 2.30281, "6095": 2.22107, "6100": 2.25528, "6105": 2.23911, "6110": 2.23974, "6115": 2.24889, "6120": 2.28424, "6125": 2.27514, "6130": 2.24631, "6135": 2.28306, "6140": 2.24779, "6145": 2.254, "6150": 2.24351, "6155": 2.25007, "6160": 2.26422, "6165": 2.27728, "6170": 2.21334, "6175": 2.25901, "6180": 2.23384, "6185": 2.25715, "6190": 2.26051, "6195": 2.2722, "6200": 2.24689, "6205": 2.24084, "6210": 2.28207, "6215": 2.28134, "6220": 2.26501, "6225": 2.25367, "6230": 2.21663, "6235": 2.26026, "6240": 2.26855, "6245": 2.27675, "6250": 2.25827, "6255": 2.23931, "6260": 2.24925, "6265": 2.24656, "6270": 2.24888, "6275": 2.2634, "6280": 2.25306, "6285": 2.2658, "6290": 2.25072, "6295": 2.2584, "6300": 2.26539, "6305": 2.22623, "6310": 2.26737, "6315": 2.25489, "6320": 2.22879, "6325": 2.26154, "6330": 2.24244, "6335": 2.25723, "6340": 2.26416, "6345": 2.24803, "6350": 2.25271, "6355": 2.24771, "6360": 2.26862, "6365": 2.25908, "6370": 2.25409, "6375": 2.27216, "6380": 2.23715, "6385": 2.23521, "6390": 2.24208, "6395": 2.23548, "6400": 2.22983, "6405": 2.25443, "6410": 2.25017, "6415": 2.2435, "6420": 2.23513, "6425": 2.24672, "6430": 2.24541, "6435": 2.29874, "6440": 2.24434, "6445": 2.2518, "6450": 2.26925, "6455": 2.23621, "6460": 2.26422, "6465": 2.24587, "6470": 2.25632, "6475": 2.2354, "6480": 2.22455, "6485": 2.25072, "6490": 2.25049, "6495": 2.23817, "6500": 2.24094, "6505": 2.24158, "6510": 2.22196, "6515": 2.24905, "6520": 2.23711, "6525": 2.2318, "6530": 2.24804, "6535": 2.26136, "6540": 2.26971, "6545": 2.24002, "6550": 2.22423, "6555": 2.2402, "6560": 2.27131, "6565": 2.23258, "6570": 2.22282, "6575": 2.2636, "6580": 2.24274, "6585": 2.22997, "6590": 2.2312, "6595": 2.24078, "6600": 2.25623, "6605": 2.24197, "6610": 2.22924, "6615": 2.22421, "6620": 2.25234, "6625": 2.24343, "6630": 2.22848, "6635": 2.23694, "6640": 2.25568, "6645": 2.23207, "6650": 2.23458, "6655": 2.22856, "6660": 2.23924, "6665": 2.2257, "6670": 2.23719, "6675": 2.22311, "6680": 2.2394, "6685": 2.23981, "6690": 2.23688, "6695": 2.24384, "6700": 2.23461, "6705": 2.21195, "6710": 2.24923, "6715": 2.25071, "6720": 2.24699, "6725": 2.23032, "6730": 2.23115, "6735": 2.20404, "6740": 2.23106, "6745": 2.23481, "6750": 2.23322, "6755": 2.20884, "6760": 2.22681, "6765": 2.25526, "6770": 2.2374, "6775": 2.21959, "6780": 2.20524, "6785": 2.21713, "6790": 2.23309, "6795": 2.24265, "6800": 2.24987, "6805": 2.23491, "6810": 2.25977, "6815": 2.22207, "6820": 2.22199, "6825": 2.22894, "6830": 2.19656, "6835": 2.25614, "6840": 2.22364, "6845": 2.22305, "6850": 2.24242, "6855": 2.21989, "6860": 2.2285, "6865": 2.24999, "6870": 2.21648, "6875": 2.23819, "6880": 2.21614, "6885": 2.23388, "6890": 2.24214, "6895": 2.21377, "6900": 2.22042, "6905": 2.21198, "6910": 2.22168, "6915": 2.20143, "6920": 2.25627, "6925": 2.20957, "6930": 2.20398, "6935": 2.23827, "6940": 2.2067, "6945": 2.22553, "6950": 2.23839, "6955": 2.19847, "6960": 2.22537, "6965": 2.2221, "6970": 2.23073, "6975": 2.19702, "6980": 2.19732, "6985": 2.21343, "6990": 2.21535, "6995": 2.23899, "7000": 2.22649, "7005": 2.21575, "7010": 2.19951, "7015": 2.20103, "7020": 2.24653, "7025": 2.20735, "7030": 2.21726, "7035": 2.20436, "7040": 2.21282, "7045": 2.21548, "7050": 2.21942, "7055": 2.21918, "7060": 2.20985, "7065": 2.19177, "7070": 2.20371, "7075": 2.19572, "7080": 2.21551, "7085": 2.23142, "7090": 2.22323, "7095": 2.21912, "7100": 2.23174, "7105": 2.22333, "7110": 2.22842, "7115": 2.20397, "7120": 2.21585, "7125": 2.20055, "7130": 2.22096, "7135": 2.17236, "7140": 2.19515, "7145": 2.21438, "7150": 2.23649, "7155": 2.2074, "7160": 2.18943, "7165": 2.21145, "7170": 2.19399, "7175": 2.1919, "7180": 2.24021, "7185": 2.18574, "7190": 2.22709, "7195": 2.21223, "7200": 2.2138, "7205": 2.20628, "7210": 2.20584, "7215": 2.19088, "7220": 2.2152, "7225": 2.1892, "7230": 2.21665, "7235": 2.17803, "7240": 2.23287, "7245": 2.22462, "7250": 2.23931, "7255": 2.1976, "7260": 2.17724, "7265": 2.21967, "7270": 2.18669, "7275": 2.20589, "7280": 2.19714, "7285": 2.19885, "7290": 2.20352, "7295": 2.19497, "7300": 2.20837, "7305": 2.20803, "7310": 2.20296, "7315": 2.38478, "7320": 2.23433, "7325": 2.20637, "7330": 2.20872, "7335": 2.22345, "7340": 2.19489, "7345": 2.20591, "7350": 2.20293, "7355": 2.18942, "7360": 2.19126, "7365": 2.19491, "7370": 2.19538, "7375": 2.18237, "7380": 2.21254, "7385": 2.19603, "7390": 2.22774, "7395": 2.20451, "7400": 2.20544, "7405": 2.19915, "7410": 2.15846, "7415": 2.19826, "7420": 2.20345, "7425": 2.19704, "7430": 2.20806, "7435": 2.22366, "7440": 2.22473, "7445": 2.18866, "7450": 2.18828, "7455": 2.19292, "7460": 2.17218, "7465": 2.19004, "7470": 2.17279, "7475": 2.17416, "7480": 2.19653, "7485": 2.19067, "7490": 2.1959, "7495": 2.20813, "7500": 2.20931, "7505": 2.21014, "7510": 2.16182, "7515": 2.19388, "7520": 2.19852, "7525": 2.20383, "7530": 2.20184, "7535": 2.19659, "7540": 2.18408, "7545": 2.1868, "7550": 2.21179, "7555": 2.19503, "7560": 2.20841, "7565": 2.19256, "7570": 2.18081, "7575": 2.20343, "7580": 2.17773, "7585": 2.23031, "7590": 2.19092, "7595": 2.18066, "7600": 2.1832, "7605": 2.17203, "7610": 2.21945, "7615": 2.20124, "7620": 2.20764, "7625": 2.20396, "7630": 2.17773, "7635": 2.20572, "7640": 2.19348, "7645": 2.19965, "7650": 2.19784, "7655": 2.21598, "7660": 2.20179, "7665": 2.17899, "7670": 2.19029, "7675": 2.19667, "7680": 2.19748, "7685": 2.18028, "7690": 2.20117, "7695": 2.18252, "7700": 2.16228, "7705": 2.18706, "7710": 2.17674, "7715": 2.18712, "7720": 2.16278, "7725": 2.17531, "7730": 2.21444, "7735": 2.18214, "7740": 2.2033, "7745": 2.19704, "7750": 2.18667, "7755": 2.19594, "7760": 2.17697, "7765": 2.21328, "7770": 2.21296, "7775": 2.19982, "7780": 2.19919, "7785": 2.19276, "7790": 2.16406, "7795": 2.20359, "7800": 2.21612, "7805": 2.20079, "7810": 2.18841, "7815": 2.202, "7820": 2.19015, "7825": 2.20653, "7830": 2.17691, "7835": 2.20106, "7840": 2.19519, "7845": 2.20114, "7850": 2.21167, "7855": 2.16594, "7860": 2.18579, "7865": 2.17975, "7870": 2.18494, "7875": 2.17006, "7880": 2.19726, "7885": 2.17135, "7890": 2.17732, "7895": 2.16038, "7900": 2.19028, "7905": 2.17672, "7910": 2.16472, "7915": 2.18171, "7920": 2.19923, "7925": 2.17887, "7930": 2.17281, "7935": 2.18484, "7940": 2.1593, "7945": 2.17469, "7950": 2.17892, "7955": 2.18226, "7960": 2.1753, "7965": 2.17215, "7970": 2.18389, "7975": 2.21293, "7980": 2.21028, "7985": 2.16018, "7990": 2.18201, "7995": 2.18109, "8000": 2.1892, "8005": 2.18642, "8010": 2.16101, "8015": 2.17045, "8020": 2.15121, "8025": 2.17404, "8030": 2.19271, "8035": 2.20098, "8040": 2.18594, "8045": 2.19425, "8050": 2.17269, "8055": 2.18993, "8060": 2.18663, "8065": 2.18035, "8070": 2.18944, "8075": 2.17984, "8080": 2.16906, "8085": 2.18389, "8090": 2.21737, "8095": 2.17874, "8100": 2.17991, "8105": 2.19371, "8110": 2.17184, "8115": 2.19124, "8120": 2.1694, "8125": 2.14537, "8130": 2.17523, "8135": 2.18904, "8140": 2.19372, "8145": 2.19681, "8150": 2.15935, "8155": 2.18163, "8160": 2.17959, "8165": 2.17193, "8170": 2.15792, "8175": 2.18882, "8180": 2.16874, "8185": 2.19883, "8190": 2.18377, "8195": 2.17753, "8200": 2.17725, "8205": 2.18541, "8210": 2.16295, "8215": 2.17804, "8220": 2.15842, "8225": 2.14955, "8230": 2.16858, "8235": 2.16288, "8240": 2.15204, "8245": 2.15781, "8250": 2.18562, "8255": 2.20094, "8260": 2.15461, "8265": 2.18522, "8270": 2.17842, "8275": 2.1738, "8280": 2.19082, "8285": 2.16486, "8290": 2.17509, "8295": 2.19122, "8300": 2.19853, "8305": 2.16002, "8310": 2.15581, "8315": 2.18074, "8320": 2.16662, "8325": 2.1773, "8330": 2.16891, "8335": 2.1622, "8340": 2.1517, "8345": 2.13725, "8350": 2.14203, "8355": 2.18428, "8360": 2.18782, "8365": 2.15801, "8370": 2.16538, "8375": 2.1672, "8380": 2.17771, "8385": 2.1775, "8390": 2.16945, "8395": 2.15728, "8400": 2.18275, "8405": 2.16928, "8410": 2.18316, "8415": 2.19337, "8420": 2.16647, "8425": 2.15634, "8430": 2.15846, "8435": 2.14069, "8440": 2.18021, "8445": 2.18083, "8450": 2.17201, "8455": 2.18718, "8460": 2.17766, "8465": 2.17184, "8470": 2.16609, "8475": 2.15621, "8480": 2.14864, "8485": 2.1832, "8490": 2.16292, "8495": 2.17199, "8500": 2.14306, "8505": 2.15086, "8510": 2.40544, "8515": 2.17626, "8520": 2.18909, "8525": 2.18052, "8530": 2.16597, "8535": 2.19802, "8540": 2.17643, "8545": 2.1466, "8550": 2.16009, "8555": 2.16541, "8560": 2.15312, "8565": 2.17698, "8570": 2.16311, "8575": 2.16126, "8580": 2.15864, "8585": 2.17383, "8590": 2.16669, "8595": 2.14619, "8600": 2.15335, "8605": 2.14443, "8610": 2.16923, "8615": 2.17841, "8620": 2.17112, "8625": 2.14906, "8630": 2.15811, "8635": 2.14874, "8640": 2.16039, "8645": 2.1569, "8650": 2.1438, "8655": 2.15589, "8660": 2.15017, "8665": 2.13261, "8670": 2.16722, "8675": 2.15393, "8680": 2.15241, "8685": 2.15387, "8690": 2.19956, "8695": 2.15789, "8700": 2.14051, "8705": 2.16632, "8710": 2.14455, "8715": 2.16025, "8720": 2.1535, "8725": 2.15995, "8730": 2.15498, "8735": 2.15924, "8740": 2.17585, "8745": 2.15231, "8750": 2.1523, "8755": 2.13979, "8760": 2.15555, "8765": 2.18117, "8770": 2.16231, "8775": 2.14311, "8780": 2.16902, "8785": 2.16914, "8790": 2.14296, "8795": 2.13497, "8800": 2.15907, "8805": 2.15523, "8810": 2.15384, "8815": 2.1366, "8820": 2.16817, "8825": 2.16262, "8830": 2.15158, "8835": 2.14656, "8840": 2.16677, "8845": 2.16763, "8850": 2.12785, "8855": 2.14542, "8860": 2.18006, "8865": 2.16993, "8870": 2.17595, "8875": 2.17533, "8880": 2.17575, "8885": 2.16277, "8890": 2.1391, "8895": 2.13945, "8900": 2.14162, "8905": 2.16739, "8910": 2.16303, "8915": 2.13261, "8920": 2.15314, "8925": 2.13349, "8930": 2.16121, "8935": 2.16375, "8940": 2.14626, "8945": 2.14505, "8950": 2.13894, "8955": 2.1571, "8960": 2.15631, "8965": 2.13122, "8970": 2.15212, "8975": 2.14731, "8980": 2.12974, "8985": 2.14061, "8990": 2.17354, "8995": 2.15007, "9000": 2.16716, "9005": 2.1758, "9010": 2.15193, "9015": 2.1473, "9020": 2.13851, "9025": 2.13605, "9030": 2.13141, "9035": 2.153, "9040": 2.14391, "9045": 2.15375, "9050": 2.16372, "9055": 2.15204, "9060": 2.13067, "9065": 2.12643, "9070": 2.1418, "9075": 2.17211, "9080": 2.14376, "9085": 2.16601, "9090": 2.15978, "9095": 2.16227, "9100": 2.1561, "9105": 2.16983, "9110": 2.14221, "9115": 2.15533, "9120": 2.12411, "9125": 2.17324, "9130": 2.14848, "9135": 2.14552, "9140": 2.14371, "9145": 2.14305, "9150": 2.15654, "9155": 2.15405, "9160": 2.15271, "9165": 2.13091, "9170": 2.1321, "9175": 2.13651, "9180": 2.14292, "9185": 2.1555, "9190": 2.14213, "9195": 2.14838, "9200": 2.15965, "9205": 2.1421, "9210": 2.17341, "9215": 2.14487, "9220": 2.15022, "9225": 2.16013, "9230": 2.16032, "9235": 2.11161, "9240": 2.15261, "9245": 2.13998, "9250": 2.12777, "9255": 2.1562, "9260": 2.16673, "9265": 2.14301, "9270": 2.13117, "9275": 2.15508, "9280": 2.14862, "9285": 2.14664, "9290": 2.17344, "9295": 2.13981, "9300": 2.13657, "9305": 2.13738, "9310": 2.14166, "9315": 2.15453, "9320": 2.13955, "9325": 2.1295, "9330": 2.14111, "9335": 2.11403, "9340": 2.15517, "9345": 2.10941, "9350": 2.14826, "9355": 2.12995, "9360": 2.12968, "9365": 2.15884, "9370": 2.13539, "9375": 2.10821, "9380": 2.15308, "9385": 2.12623, "9390": 2.11203, "9395": 2.13811, "9400": 2.13999, "9405": 2.13692, "9410": 2.13415, "9415": 2.13145, "9420": 2.12957, "9425": 2.12476, "9430": 2.16007, "9435": 2.13128, "9440": 2.13203, "9445": 2.13647, "9450": 2.12894, "9455": 2.15364, "9460": 2.12645, "9465": 2.12132, "9470": 2.14967, "9475": 2.14732, "9480": 2.14033, "9485": 2.15122, "9490": 2.12515, "9495": 2.13101, "9500": 2.15846, "9505": 2.14737, "9510": 2.11252, "9515": 2.11819, "9520": 2.15371, "9525": 2.08714, "9530": 2.13661, "9535": 2.14792, "9540": 2.10598, "9545": 2.12899, "9550": 2.14059, "9555": 2.11601, "9560": 2.13989, "9565": 2.14268, "9570": 2.13548, "9575": 2.14891, "9580": 2.13441, "9585": 2.12421, "9590": 2.16832, "9595": 2.15196, "9600": 2.15095, "9605": 2.15385, "9610": 2.14506, "9615": 2.14474, "9620": 2.14694, "9625": 2.13747, "9630": 2.13865, "9635": 2.14533, "9640": 2.12638, "9645": 2.12443, "9650": 2.11853, "9655": 2.11948, "9660": 2.1355, "9665": 2.13388, "9670": 2.13934, "9675": 2.12837, "9680": 2.12723, "9685": 2.13779, "9690": 2.15535, "9695": 2.15998, "9700": 2.14392, "9705": 2.12487, "9710": 2.12613, "9715": 2.10271, "9720": 2.11462, "9725": 2.12771, "9730": 2.09509, "9735": 2.14763, "9740": 2.16734, "9745": 2.14303, "9750": 2.15335, "9755": 2.13509, "9760": 2.12693, "9765": 2.12637, "9770": 2.11478, "9775": 2.14082, "9780": 2.15019, "9785": 2.11575, "9790": 2.12827, "9795": 2.10666, "9800": 2.13251, "9805": 2.12986, "9810": 2.11097, "9815": 2.12741, "9820": 2.12029, "9825": 2.10187, "9830": 2.1271, "9835": 2.14742, "9840": 2.13603, "9845": 2.14962, "9850": 2.16489, "9855": 2.11376, "9860": 2.10995, "9865": 2.13277, "9870": 2.12612, "9875": 2.13938, "9880": 2.14343, "9885": 2.12054, "9890": 2.12974, "9895": 2.13763, "9900": 2.11855, "9905": 2.11434, "9910": 2.11583, "9915": 2.14415, "9920": 2.14113, "9925": 2.11161, "9930": 2.1216, "9935": 2.13515, "9940": 2.11477, "9945": 2.10234, "9950": 2.10529, "9955": 2.1282, "9960": 2.10325, "9965": 2.11869, "9970": 2.12123, "9975": 2.12523, "9980": 2.13823, "9985": 2.11988, "9990": 2.09177, "9995": 2.11342, "10000": 2.13854, "10005": 2.12194, "10010": 2.10386, "10015": 2.13136, "10020": 2.10611, "10025": 2.13059, "10030": 2.12256, "10035": 2.11022, "10040": 2.12001, "10045": 2.09271, "10050": 2.10158, "10055": 2.14922, "10060": 2.13514, "10065": 2.11771, "10070": 2.14508, "10075": 2.13156, "10080": 2.1296, "10085": 2.12016, "10090": 2.11532, "10095": 2.1227, "10100": 2.09999, "10105": 2.11193, "10110": 2.139, "10115": 2.12373, "10120": 2.13098, "10125": 2.13424, "10130": 2.10839, "10135": 2.12904, "10140": 2.12923, "10145": 2.12979, "10150": 2.1142, "10155": 2.11039, "10160": 2.13252, "10165": 2.13688, "10170": 2.12788, "10175": 2.11398, "10180": 2.11675, "10185": 2.10472, "10190": 2.12969, "10195": 2.11586, "10200": 2.11639, "10205": 2.1001, "10210": 2.10978, "10215": 2.11731, "10220": 2.11225, "10225": 2.13302, "10230": 2.12012, "10235": 2.12515, "10240": 2.11928, "10245": 2.15379, "10250": 2.11987, "10255": 2.11534, "10260": 2.14185, "10265": 2.12446, "10270": 2.1184, "10275": 2.1146, "10280": 2.11167, "10285": 2.1239, "10290": 2.12601, "10295": 2.10491, "10300": 2.12304, "10305": 2.11449, "10310": 2.14741, "10315": 2.12113, "10320": 2.10633, "10325": 2.11069, "10330": 2.13972, "10335": 2.10647, "10340": 2.1045, "10345": 2.10764, "10350": 2.12491, "10355": 2.1352, "10360": 2.1122, "10365": 2.11747, "10370": 2.12142, "10375": 2.13044, "10380": 2.12833, "10385": 2.12995, "10390": 2.1269, "10395": 2.11774, "10400": 2.11681, "10405": 2.1192, "10410": 2.10153, "10415": 2.10683, "10420": 2.11452, "10425": 2.10974, "10430": 2.13645, "10435": 2.14063, "10440": 2.12928, "10445": 2.12476, "10450": 2.12131, "10455": 2.10256, "10460": 2.12457, "10465": 2.09785, "10470": 2.12933, "10475": 2.11503, "10480": 2.11948, "10485": 2.10483, "10490": 2.11534, "10495": 2.12882, "10500": 2.1164, "10505": 2.12127, "10510": 2.09067, "10515": 2.14095, "10520": 2.09181, "10525": 2.11171, "10530": 2.13753, "10535": 2.112, "10540": 2.10794, "10545": 2.09618, "10550": 2.0725, "10555": 2.1134, "10560": 2.13036, "10565": 2.12685, "10570": 2.10164, "10575": 2.12433, "10580": 2.1054, "10585": 2.12781, "10590": 2.10763, "10595": 2.13329, "10600": 2.13198, "10605": 2.12557, "10610": 2.11103, "10615": 2.09926, "10620": 2.11657, "10625": 2.11343, "10630": 2.11784, "10635": 2.12668, "10640": 2.09483, "10645": 2.11929, "10650": 2.0958, "10655": 2.11777, "10660": 2.11025, "10665": 2.09562, "10670": 2.08843, "10675": 2.15729, "10680": 2.11501, "10685": 2.12138, "10690": 2.10552, "10695": 2.10981, "10700": 2.12918, "10705": 2.1035, "10710": 2.12367, "10715": 2.10404, "10720": 2.08926, "10725": 2.1013, "10730": 2.09302, "10735": 2.08157, "10740": 2.12055, "10745": 2.11085, "10750": 2.09314, "10755": 2.10292, "10760": 2.0934, "10765": 2.10961, "10770": 2.09886, "10775": 2.11735, "10780": 2.10149, "10785": 2.11622, "10790": 2.08609, "10795": 2.12986, "10800": 2.10496, "10805": 2.10998, "10810": 2.08895, "10815": 2.12045, "10820": 2.08622, "10825": 2.1245, "10830": 2.12459, "10835": 2.0965, "10840": 2.09287, "10845": 2.10734, "10850": 2.09932, "10855": 2.08237, "10860": 2.10712, "10865": 2.10737, "10870": 2.11395, "10875": 2.11475, "10880": 2.11405, "10885": 2.10514, "10890": 2.12544, "10895": 2.09941, "10900": 2.09874, "10905": 2.11984, "10910": 2.10836, "10915": 2.09846, "10920": 2.06549, "10925": 2.11073, "10930": 2.1005, "10935": 2.09634, "10940": 2.11947, "10945": 2.11952, "10950": 2.11216, "10955": 2.12539, "10960": 2.12306, "10965": 2.07728, "10970": 2.10393, "10975": 2.1185, "10980": 2.09614, "10985": 2.06279, "10990": 2.08723, "10995": 2.08527, "11000": 2.08759, "11005": 2.13725, "11010": 2.12389, "11015": 2.10074, "11020": 2.09983, "11025": 2.08648, "11030": 2.08756, "11035": 2.12011, "11040": 2.07672, "11045": 2.078, "11050": 2.1185, "11055": 2.12474, "11060": 2.08932, "11065": 2.12016, "11070": 2.11833, "11075": 2.07795, "11080": 2.10216, "11085": 2.11298, "11090": 2.09516, "11095": 2.09868, "11100": 2.12603, "11105": 2.12075, "11110": 2.10547, "11115": 2.09182, "11120": 2.11064, "11125": 2.09215, "11130": 2.10079, "11135": 2.12277, "11140": 2.13543, "11145": 2.11233, "11150": 2.10877, "11155": 2.11897, "11160": 2.09914, "11165": 2.10918, "11170": 2.11092, "11175": 2.09923, "11180": 2.07321, "11185": 2.10272, "11190": 2.11064, "11195": 2.1193, "11200": 2.13959, "11205": 2.10032, "11210": 2.09674, "11215": 2.09411, "11220": 2.1163, "11225": 2.09655, "11230": 2.09582, "11235": 2.12403, "11240": 2.06841, "11245": 2.1119, "11250": 2.12269, "11255": 2.11941, "11260": 2.11145, "11265": 2.10581, "11270": 2.1229, "11275": 2.09478, "11280": 2.08966, "11285": 2.1198, "11290": 2.10135, "11295": 2.09891, "11300": 2.112, "11305": 2.07537, "11310": 2.10706, "11315": 2.09722, "11320": 2.10158, "11325": 2.11103, "11330": 2.09939, "11335": 2.10801, "11340": 2.0865, "11345": 2.09801, "11350": 2.095, "11355": 2.10472, "11360": 2.13738, "11365": 2.11704, "11370": 2.11649, "11375": 2.07905, "11380": 2.0931, "11385": 2.0827, "11390": 2.11863, "11395": 2.12776, "11400": 2.08022, "11405": 2.09274, "11410": 2.1015, "11415": 2.07786, "11420": 2.09822, "11425": 2.11023, "11430": 2.1167, "11435": 2.09542, "11440": 2.10972, "11445": 2.11733, "11450": 2.10729, "11455": 2.09625, "11460": 2.08587, "11465": 2.08758, "11470": 2.10749, "11475": 2.12256, "11480": 2.07987, "11485": 2.08774, "11490": 2.07772, "11495": 2.09254, "11500": 2.10801, "11505": 2.06887, "11510": 2.11199, "11515": 2.10198, "11520": 2.0925, "11525": 2.10232, "11530": 2.10033, "11535": 2.09809, "11540": 2.10222, "11545": 2.11269, "11550": 2.10107, "11555": 2.09332, "11560": 2.07441, "11565": 2.0951, "11570": 2.08108, "11575": 2.09733, "11580": 2.08752, "11585": 2.0757, "11590": 2.11538, "11595": 2.08926, "11600": 2.10313, "11605": 2.08983, "11610": 2.08347, "11615": 2.09511, "11620": 2.10848, "11625": 2.07471, "11630": 2.08417, "11635": 2.10496, "11640": 2.09555, "11645": 2.09502, "11650": 2.09632, "11655": 2.0632, "11660": 2.08471, "11665": 2.11486, "11670": 2.10066, "11675": 2.09101, "11680": 2.08371, "11685": 2.06865, "11690": 2.11203, "11695": 2.09528, "11700": 2.09425, "11705": 2.07926, "11710": 2.08847, "11715": 2.08972, "11720": 2.06414, "11725": 2.09963, "11730": 2.09811, "11735": 2.08154, "11740": 2.08861, "11745": 2.10359, "11750": 2.08838, "11755": 2.08847, "11760": 2.11085, "11765": 2.10158, "11770": 2.08703, "11775": 2.08795, "11780": 2.11712, "11785": 2.09748, "11790": 2.11454, "11795": 2.10224, "11800": 2.07659, "11805": 2.06463, "11810": 2.10134, "11815": 2.0971, "11820": 2.11214, "11825": 2.10878, "11830": 2.09399, "11835": 2.0965, "11840": 2.09872, "11845": 2.10488, "11850": 2.10591, "11855": 2.07718, "11860": 2.1047, "11865": 2.07736, "11870": 2.08625, "11875": 2.08261, "11880": 2.11515, "11885": 2.08426, "11890": 2.08588, "11895": 2.07694, "11900": 2.07274, "11905": 2.08425, "11910": 2.1012, "11915": 2.09838, "11920": 2.08933, "11925": 2.08923, "11930": 2.0669, "11935": 2.09341, "11940": 2.12176, "11945": 2.08589, "11950": 2.10089, "11955": 2.08288, "11960": 2.0882, "11965": 2.06461, "11970": 2.06759, "11975": 2.04914, "11980": 2.07021, "11985": 2.08988, "11990": 2.08166, "11995": 2.07303, "12000": 2.09366, "12005": 2.07103, "12010": 2.08115, "12015": 2.09324, "12020": 2.11059, "12025": 2.09349, "12030": 2.0868, "12035": 2.09298, "12040": 2.08033, "12045": 2.11991, "12050": 2.10219, "12055": 2.08265, "12060": 2.0745, "12065": 2.08067, "12070": 2.08228, "12075": 2.07887, "12080": 2.08947, "12085": 2.08852, "12090": 2.0846, "12095": 2.10233, "12100": 2.07347, "12105": 2.09132, "12110": 2.11081, "12115": 2.07605, "12120": 2.10372, "12125": 2.09598, "12130": 2.08573, "12135": 2.06331, "12140": 2.08668, "12145": 2.07473, "12150": 2.08458, "12155": 2.08127, "12160": 2.08422, "12165": 2.11135, "12170": 2.07743, "12175": 2.08303, "12180": 2.06754, "12185": 2.08068, "12190": 2.08845, "12195": 2.07029, "12200": 2.07641, "12205": 2.09877, "12210": 2.07114, "12215": 2.06937, "12220": 2.07108, "12225": 2.08874, "12230": 2.08498, "12235": 2.08842, "12240": 2.07386, "12245": 2.08716, "12250": 2.07466, "12255": 2.07795, "12260": 2.08073, "12265": 2.08535, "12270": 2.0606, "12275": 2.09839, "12280": 2.08545, "12285": 2.0932, "12290": 2.09564, "12295": 2.08916, "12300": 2.09524, "12305": 2.06897, "12310": 2.09949, "12315": 2.06747, "12320": 2.06616, "12325": 2.08769, "12330": 2.06691, "12335": 2.08399, "12340": 2.09025, "12345": 2.08435, "12350": 2.0922, "12355": 2.08444, "12360": 2.07771, "12365": 2.1019, "12370": 2.08006, "12375": 2.10182, "12380": 2.04187, "12385": 2.06098, "12390": 2.07087, "12395": 2.08449, "12400": 2.08222, "12405": 2.0773, "12410": 2.07871, "12415": 2.06898, "12420": 2.07074, "12425": 2.08891, "12430": 2.07142, "12435": 2.0769, "12440": 2.05867, "12445": 2.08408, "12450": 2.07476, "12455": 2.08503, "12460": 2.08507, "12465": 2.09966, "12470": 2.0936, "12475": 2.08102, "12480": 2.08051, "12485": 2.08716, "12490": 2.10569, "12495": 2.04886, "12500": 2.08287, "12505": 2.08698, "12510": 2.08574, "12515": 2.08143, "12520": 2.06543, "12525": 2.09331, "12530": 2.07571, "12535": 2.08896, "12540": 2.0924, "12545": 2.09625, "12550": 2.06282, "12555": 2.07882, "12560": 2.06549, "12565": 2.09371, "12570": 2.08219, "12575": 2.07266, "12580": 2.06664, "12585": 2.06603, "12590": 2.10642, "12595": 2.07823, "12600": 2.09126, "12605": 2.06788, "12610": 2.07061, "12615": 2.06201, "12620": 2.07877, "12625": 2.07682, "12630": 2.08231, "12635": 2.08118, "12640": 2.07654, "12645": 2.06766, "12650": 2.08435, "12655": 2.05273, "12660": 2.07367, "12665": 2.08997, "12670": 2.07393, "12675": 2.10362, "12680": 2.09741, "12685": 2.07105, "12690": 2.06079, "12695": 2.08238, "12700": 2.07444, "12705": 2.08509, "12710": 2.07566, "12715": 2.08896, "12720": 2.07058, "12725": 2.08798, "12730": 2.08435, "12735": 2.06113, "12740": 2.08116, "12745": 2.06203, "12750": 2.07101, "12755": 2.06705, "12760": 2.07565, "12765": 2.04901, "12770": 2.06124, "12775": 2.06711, "12780": 2.07743, "12785": 2.05564, "12790": 2.07932, "12795": 2.09322, "12800": 2.07225, "12805": 2.07562, "12810": 2.06527, "12815": 2.0762, "12820": 2.08281, "12825": 2.0767, "12830": 2.0748, "12835": 2.07047, "12840": 2.08225, "12845": 2.06854, "12850": 2.06512, "12855": 2.0742, "12860": 2.07513, "12865": 2.06373, "12870": 2.07743, "12875": 2.08095, "12880": 2.08841, "12885": 2.07355, "12890": 2.06643, "12895": 2.07799, "12900": 2.06675, "12905": 2.07423, "12910": 2.10812, "12915": 2.06436, "12920": 2.09897, "12925": 2.07502, "12930": 2.07737, "12935": 2.04712, "12940": 2.08047, "12945": 2.04774, "12950": 2.0649, "12955": 2.09461, "12960": 2.07892, "12965": 2.0363, "12970": 2.07714, "12975": 2.05921, "12980": 2.06925, "12985": 2.07907, "12990": 2.04963, "12995": 2.09296, "13000": 2.09086, "13005": 2.06722, "13010": 2.10081, "13015": 2.09291, "13020": 2.06089, "13025": 2.06722, "13030": 2.06642, "13035": 2.09322, "13040": 2.07335, "13045": 2.07798, "13050": 2.05836, "13055": 2.07796, "13060": 2.0808, "13065": 2.06395, "13070": 2.06751, "13075": 2.05447, "13080": 2.06104, "13085": 2.06063, "13090": 2.06766, "13095": 2.06221, "13100": 2.07257, "13105": 2.06574, "13110": 2.04905, "13115": 2.03481, "13120": 2.04832, "13125": 2.05878, "13130": 2.02979, "13135": 2.07279, "13140": 2.05071, "13145": 2.0645, "13150": 2.07826, "13155": 2.07363, "13160": 2.08398, "13165": 2.07578, "13170": 2.04699, "13175": 2.06644, "13180": 2.05969, "13185": 2.05606, "13190": 2.06473, "13195": 2.04984, "13200": 2.07189, "13205": 2.05034, "13210": 2.05124, "13215": 2.06808, "13220": 2.06996, "13225": 2.06724, "13230": 2.06324, "13235": 2.05736, "13240": 2.06497, "13245": 2.04036, "13250": 2.06733, "13255": 2.05616, "13260": 2.07322, "13265": 2.05645, "13270": 2.07276, "13275": 2.05856, "13280": 2.07256, "13285": 2.03945, "13290": 2.11163, "13295": 2.0619, "13300": 2.08546, "13305": 2.07413, "13310": 2.07061, "13315": 2.04996, "13320": 2.06793, "13325": 2.07484, "13330": 2.06008, "13335": 2.06218, "13340": 2.09877, "13345": 2.06978, "13350": 2.06143, "13355": 2.06929, "13360": 2.06508, "13365": 2.07316, "13370": 2.06215, "13375": 2.07606, "13380": 2.08038, "13385": 2.06814, "13390": 2.10101, "13395": 2.07255, "13400": 2.05784, "13405": 2.08767, "13410": 2.07738, "13415": 2.03792, "13420": 2.04016, "13425": 2.06784, "13430": 2.06786, "13435": 2.06087, "13440": 2.05665, "13445": 2.06969, "13450": 2.05982, "13455": 2.07825, "13460": 2.06744, "13465": 2.06036, "13470": 2.08139, "13475": 2.08364, "13480": 2.05996, "13485": 2.05479, "13490": 2.05167, "13495": 2.05077, "13500": 2.05922, "13505": 2.07963, "13510": 2.04633, "13515": 2.061, "13520": 2.07461, "13525": 2.05146, "13530": 2.08967, "13535": 2.0543, "13540": 2.06519, "13545": 2.05693, "13550": 2.06047, "13555": 2.09078, "13560": 2.06547, "13565": 2.06655, "13570": 2.04579, "13575": 2.07219, "13580": 2.05517, "13585": 2.07714, "13590": 2.07292, "13595": 2.05494, "13600": 2.08399, "13605": 2.04845, "13610": 2.0271, "13615": 2.07541, "13620": 2.08763, "13625": 2.06062, "13630": 2.06451, "13635": 2.04971, "13640": 2.06807, "13645": 2.06973, "13650": 2.04771, "13655": 2.07481, "13660": 2.04728, "13665": 2.07123, "13670": 2.10208, "13675": 2.07216, "13680": 2.04981, "13685": 2.07723, "13690": 2.0563, "13695": 2.08333, "13700": 2.05147, "13705": 2.06321, "13710": 2.04382, "13715": 2.02393, "13720": 2.05965, "13725": 2.03862, "13730": 2.05323, "13735": 2.08049, "13740": 2.08626, "13745": 2.06566, "13750": 2.07277, "13755": 2.05743, "13760": 2.05562, "13765": 2.04274, "13770": 2.06746, "13775": 2.03728, "13780": 2.05617, "13785": 2.05681, "13790": 2.06702, "13795": 2.04731, "13800": 2.05774, "13805": 2.07996, "13810": 2.05683, "13815": 2.04402, "13820": 2.04403, "13825": 2.01992, "13830": 2.04123, "13835": 2.06046, "13840": 2.04875, "13845": 2.0466, "13850": 2.06237, "13855": 2.04971, "13860": 2.04946, "13865": 2.08544, "13870": 2.05453, "13875": 2.0264, "13880": 2.06103, "13885": 2.06825, "13890": 2.07077, "13895": 2.06739, "13900": 2.07046, "13905": 2.07204, "13910": 2.07155, "13915": 2.04056, "13920": 2.06434, "13925": 2.06275, "13930": 2.06904, "13935": 2.06548, "13940": 2.06135, "13945": 2.07188, "13950": 2.06119, "13955": 2.06055, "13960": 2.0949, "13965": 2.02424, "13970": 2.05931, "13975": 2.04845, "13980": 2.07085, "13985": 2.05544, "13990": 2.06672, "13995": 2.07003, "14000": 2.03386, "14005": 2.06494, "14010": 2.08279, "14015": 2.06862, "14020": 2.04196, "14025": 2.07868, "14030": 2.04035, "14035": 2.06889, "14040": 2.02584, "14045": 2.04468, "14050": 2.0504, "14055": 2.0388, "14060": 2.05739, "14065": 2.08007, "14070": 2.0722, "14075": 2.03968, "14080": 2.06537, "14085": 2.06581, "14090": 2.03513, "14095": 2.06123, "14100": 2.05413, "14105": 2.0505, "14110": 2.04006, "14115": 2.04391, "14120": 2.05829, "14125": 2.05854, "14130": 2.03776, "14135": 2.0529, "14140": 2.04568, "14145": 2.05123, "14150": 2.04132, "14155": 2.07814, "14160": 2.03212, "14165": 2.05699, "14170": 2.04265, "14175": 2.05987, "14180": 2.0619, "14185": 2.05647, "14190": 2.04949, "14195": 2.04947, "14200": 2.03799, "14205": 2.07108, "14210": 2.03083, "14215": 2.0576, "14220": 2.07711, "14225": 2.0508, "14230": 2.04764, "14235": 2.06956, "14240": 2.0506, "14245": 2.08523, "14250": 2.05784, "14255": 2.07594, "14260": 2.06797, "14265": 2.0562, "14270": 2.04647, "14275": 2.06524, "14280": 2.02976, "14285": 2.04842, "14290": 2.07655, "14295": 2.05525, "14300": 2.03493, "14305": 2.0666, "14310": 2.05273, "14315": 2.05187, "14320": 2.04375, "14325": 2.06658, "14330": 2.05532, "14335": 2.06008, "14340": 2.0566, "14345": 2.07965, "14350": 2.08018, "14355": 2.04848, "14360": 2.03559, "14365": 2.04089, "14370": 2.0178, "14375": 2.04963, "14380": 2.04755, "14385": 2.02811, "14390": 2.06052, "14395": 2.04175, "14400": 2.05502, "14405": 2.02278, "14410": 2.04766, "14415": 2.06112, "14420": 2.03887, "14425": 2.02798, "14430": 2.04829, "14435": 2.06336, "14440": 2.04651, "14445": 2.05795, "14450": 2.05212, "14455": 2.06047, "14460": 2.0286, "14465": 2.01909, "14470": 2.06535, "14475": 2.05403, "14480": 2.0821, "14485": 2.02458, "14490": 2.05066, "14495": 2.06295, "14500": 2.0543, "14505": 2.05905, "14510": 2.04452, "14515": 2.06969, "14520": 2.06715, "14525": 2.05956, "14530": 2.05587, "14535": 2.06945, "14540": 2.03875, "14545": 2.05269, "14550": 2.05739, "14555": 2.05056, "14560": 2.04221, "14565": 2.05828, "14570": 2.06287, "14575": 2.0695, "14580": 2.08111, "14585": 2.04066, "14590": 2.04745, "14595": 2.04967, "14600": 2.0342, "14605": 2.0318, "14610": 2.02745, "14615": 2.05636, "14620": 2.04144, "14625": 2.04963, "14630": 2.03494, "14635": 2.0634, "14640": 2.05987, "14645": 2.04363, "14650": 2.03157, "14655": 2.04925, "14660": 2.05193, "14665": 2.03998, "14670": 2.06308, "14675": 2.06588, "14680": 2.04694, "14685": 2.05157, "14690": 2.05087, "14695": 2.04383, "14700": 2.06034, "14705": 2.03071, "14710": 2.03856, "14715": 2.05594, "14720": 2.04312, "14725": 2.07479, "14730": 2.07823, "14735": 2.02631, "14740": 2.04821, "14745": 2.0792, "14750": 2.04349, "14755": 2.0525, "14760": 2.03477, "14765": 2.04746, "14770": 2.0603, "14775": 2.0502, "14780": 2.05729, "14785": 2.0323, "14790": 2.04278, "14795": 2.04539, "14800": 2.04941, "14805": 2.08214, "14810": 2.03532, "14815": 2.05857, "14820": 2.0447, "14825": 2.05447, "14830": 2.05213, "14835": 2.05369, "14840": 2.02644, "14845": 2.03114, "14850": 2.0485, "14855": 2.05214, "14860": 2.05764, "14865": 2.03307, "14870": 2.05948, "14875": 2.03536, "14880": 2.03029, "14885": 2.04846, "14890": 2.03168, "14895": 2.08082, "14900": 2.05982, "14905": 2.03368, "14910": 2.05321, "14915": 2.0489, "14920": 2.03718, "14925": 2.05307, "14930": 2.01811, "14935": 2.05254, "14940": 2.07612, "14945": 2.03277, "14950": 2.04548, "14955": 2.03324, "14960": 2.05377, "14965": 2.05138, "14970": 2.04164, "14975": 2.04142, "14980": 2.04749, "14985": 2.04808, "14990": 2.07102, "14995": 2.04386, "15000": 2.04923, "15005": 2.04564, "15010": 2.03546, "15015": 2.01486, "15020": 2.04014, "15025": 2.04775, "15030": 2.06135, "15035": 2.02751, "15040": 2.02389, "15045": 2.02658, "15050": 2.04885, "15055": 2.02834, "15060": 2.02965, "15065": 2.03616, "15070": 2.04209, "15075": 2.02343, "15080": 2.0688, "15085": 2.05588, "15090": 2.03012, "15095": 2.06411, "15100": 2.05567, "15105": 2.03431, "15110": 2.04186, "15115": 2.05678, "15120": 2.07524, "15125": 2.03705, "15130": 2.05729, "15135": 2.05145, "15140": 2.06779, "15145": 2.0275, "15150": 2.03868, "15155": 2.04294, "15160": 2.04348, "15165": 2.02634, "15170": 2.05348, "15175": 2.0493, "15180": 2.05488, "15185": 2.04741, "15190": 2.05516, "15195": 2.04293, "15200": 2.06922, "15205": 2.02965, "15210": 2.05966, "15215": 2.04167, "15220": 2.04158, "15225": 2.02962, "15230": 2.05262, "15235": 2.01635, "15240": 2.04607, "15245": 2.06124, "15250": 2.00632, "15255": 2.02932, "15260": 2.02569, "15265": 2.02203, "15270": 2.02378, "15275": 2.03134, "15280": 2.06721, "15285": 2.05205, "15290": 2.04354, "15295": 2.02913, "15300": 2.03178, "15305": 2.05048, "15310": 2.03166, "15315": 2.0279, "15320": 2.04001, "15325": 2.06164, "15330": 2.01833, "15335": 2.04519, "15340": 2.0323, "15345": 2.02152, "15350": 2.02234, "15355": 2.02348, "15360": 2.05125, "15365": 2.03252, "15370": 2.0409, "15375": 2.02636, "15380": 2.0565, "15385": 2.06456, "15390": 2.04544, "15395": 2.06803, "15400": 2.05629, "15405": 2.06747, "15410": 2.05218, "15415": 2.04379, "15420": 2.0635, "15425": 2.06827, "15430": 2.0364, "15435": 2.04583, "15440": 2.05302, "15445": 2.0271, "15450": 2.01645, "15455": 2.03662, "15460": 2.03906, "15465": 2.04635, "15470": 2.01841, "15475": 2.01842, "15480": 2.05746, "15485": 2.0366, "15490": 2.02852, "15495": 2.04081, "15500": 2.02579, "15505": 2.03615, "15510": 2.06457, "15515": 2.0553, "15520": 2.02792, "15525": 2.0325, "15530": 2.01838, "15535": 2.03247, "15540": 2.05114, "15545": 2.0308, "15550": 2.04841, "15555": 2.03334, "15560": 2.0585, "15565": 2.04336, "15570": 2.03795, "15575": 2.03198, "15580": 2.04589, "15585": 2.04814, "15590": 2.02731, "15595": 2.05103, "15600": 2.02325, "15605": 2.04805, "15610": 2.02673, "15615": 2.04036, "15620": 2.02742, "15625": 2.03809, "15630": 2.01787, "15635": 2.02658, "15640": 2.04166, "15645": 2.01176, "15650": 2.0471, "15655": 2.04027, "15660": 2.06246, "15665": 2.04401, "15670": 2.04368, "15675": 2.04808, "15680": 2.03484, "15685": 2.02428, "15690": 2.05539, "15695": 2.03462, "15700": 2.00931, "15705": 2.04123, "15710": 2.0605, "15715": 2.03659, "15720": 2.0331, "15725": 2.05994, "15730": 2.03363, "15735": 2.04177, "15740": 2.02447, "15745": 2.05297, "15750": 2.0411, "15755": 2.03711, "15760": 2.03623, "15765": 2.06683, "15770": 2.04645, "15775": 2.03165, "15780": 2.04541, "15785": 2.0252, "15790": 2.03564, "15795": 2.04521, "15800": 2.05525, "15805": 2.04341, "15810": 2.04776, "15815": 2.05212, "15820": 2.02846, "15825": 2.03875, "15830": 2.04468, "15835": 2.04445, "15840": 2.06069, "15845": 2.04765, "15850": 2.01755, "15855": 2.0654, "15860": 2.04635, "15865": 2.01604, "15870": 2.02662, "15875": 2.02211, "15880": 2.0443, "15885": 2.00419, "15890": 2.05585, "15895": 2.01396, "15900": 2.04409, "15905": 2.01657, "15910": 2.06217, "15915": 2.00915, "15920": 2.02271, "15925": 2.04373, "15930": 1.99696, "15935": 2.0354, "15940": 2.03974, "15945": 2.03946, "15950": 2.03215, "15955": 2.03614, "15960": 2.00604, "15965": 2.02908, "15970": 2.04185, "15975": 2.04842, "15980": 2.04135, "15985": 2.03033, "15990": 2.03008, "15995": 2.02277, "16000": 2.01759, "16005": 2.03887, "16010": 2.0382, "16015": 2.01549, "16020": 2.02998, "16025": 2.02091, "16030": 2.0254, "16035": 2.01819, "16040": 2.00998, "16045": 2.05742, "16050": 2.05599, "16055": 2.04174, "16060": 2.02451, "16065": 2.01145, "16070": 2.02334, "16075": 2.03734, "16080": 2.02682, "16085": 2.04471, "16090": 2.04522, "16095": 2.02862, "16100": 2.03931, "16105": 2.00813, "16110": 2.03098, "16115": 2.05101, "16120": 2.04866, "16125": 2.01983, "16130": 2.01687, "16135": 2.03894, "16140": 2.02609, "16145": 2.02487, "16150": 2.00903, "16155": 2.04488, "16160": 2.02566, "16165": 2.0397, "16170": 2.02331, "16175": 2.04133, "16180": 2.04914, "16185": 2.02041, "16190": 2.01953, "16195": 2.0443, "16200": 2.03881, "16205": 2.00391, "16210": 2.04867, "16215": 2.02515, "16220": 2.04395, "16225": 2.03693, "16230": 2.01184, "16235": 2.00104, "16240": 2.06187, "16245": 2.03587, "16250": 2.0466, "16255": 2.03888, "16260": 2.01266, "16265": 2.03394, "16270": 2.03424, "16275": 2.02866, "16280": 2.03154, "16285": 2.02812, "16290": 2.05638, "16295": 2.02091, "16300": 2.02696, "16305": 2.04192, "16310": 2.05387, "16315": 2.0321, "16320": 2.03388, "16325": 2.05533, "16330": 2.01053, "16335": 2.02699, "16340": 2.04457, "16345": 2.03301, "16350": 2.01807, "16355": 2.0501, "16360": 2.01888, "16365": 2.01464, "16370": 2.01125, "16375": 2.00258, "16380": 2.02756, "16385": 2.03651, "16390": 2.03924, "16395": 2.04149, "16400": 2.03541, "16405": 2.02029, "16410": 2.02898, "16415": 2.05444, "16420": 2.00436, "16425": 2.0268, "16430": 2.04282, "16435": 2.02889, "16440": 2.01663, "16445": 2.02928, "16450": 2.05406, "16455": 2.02298, "16460": 2.01148, "16465": 2.02021, "16470": 2.02082, "16475": 2.02904, "16480": 2.01577, "16485": 2.02881, "16490": 2.04885, "16495": 2.02535, "16500": 2.02688, "16505": 2.04233, "16510": 2.04863, "16515": 2.02236, "16520": 2.02784, "16525": 2.01138, "16530": 2.02195, "16535": 2.03755, "16540": 2.03747, "16545": 2.03127, "16550": 2.0225, "16555": 2.03097, "16560": 2.04385, "16565": 2.01655, "16570": 2.02583, "16575": 2.01112, "16580": 2.03694, "16585": 2.03886, "16590": 2.03163, "16595": 2.0167, "16600": 2.048, "16605": 2.02331, "16610": 2.04542, "16615": 2.01948, "16620": 2.01559, "16625": 2.03258, "16630": 2.04092, "16635": 2.02614, "16640": 2.04323, "16645": 2.0263, "16650": 2.03575, "16655": 2.02499, "16660": 2.04057, "16665": 2.0195, "16670": 2.02641, "16675": 1.99415, "16680": 2.03152, "16685": 2.03662, "16690": 2.01136, "16695": 2.00759, "16700": 2.01896, "16705": 2.04535, "16710": 2.02293, "16715": 2.04066, "16720": 2.01616, "16725": 2.043, "16730": 2.01647, "16735": 2.04036, "16740": 2.00693, "16745": 2.0238, "16750": 2.02757, "16755": 2.03287, "16760": 2.02074, "16765": 2.02326, "16770": 2.02222, "16775": 2.04236, "16780": 2.00832, "16785": 2.01884, "16790": 2.05149, "16795": 2.02479, "16800": 2.03982, "16805": 2.03996, "16810": 2.04169, "16815": 2.0412, "16820": 2.05591, "16825": 2.04371, "16830": 2.03461, "16835": 2.018, "16840": 2.02114, "16845": 2.02291, "16850": 2.02477, "16855": 2.00529, "16860": 2.04403, "16865": 2.03993, "16870": 2.01935, "16875": 2.00818, "16880": 2.02337, "16885": 2.00468, "16890": 2.02484, "16895": 2.01484, "16900": 2.03596, "16905": 2.0041, "16910": 2.03729, "16915": 2.04743, "16920": 2.03227, "16925": 2.0274, "16930": 2.0228, "16935": 2.00324, "16940": 2.00424, "16945": 2.01978, "16950": 2.0388, "16955": 2.02682, "16960": 2.05007, "16965": 2.03417, "16970": 2.05457, "16975": 2.03224, "16980": 2.00373, "16985": 2.01785, "16990": 2.02115, "16995": 2.02051, "17000": 2.03509, "17005": 2.02666, "17010": 2.00269, "17015": 2.00851, "17020": 2.02655, "17025": 2.01734, "17030": 2.04522, "17035": 2.00739, "17040": 2.0196, "17045": 2.01981, "17050": 2.04178, "17055": 2.03151, "17060": 2.01826, "17065": 2.03369, "17070": 2.03566, "17075": 2.02703, "17080": 2.01262, "17085": 2.01776, "17090": 2.04203, "17095": 2.02641, "17100": 2.02584, "17105": 2.02807, "17110": 2.001, "17115": 2.02718, "17120": 2.03867, "17125": 2.02034, "17130": 2.01409, "17135": 2.0187, "17140": 2.0277, "17145": 1.98075, "17150": 2.02939, "17155": 2.00212, "17160": 2.02093, "17165": 2.03652, "17170": 1.9921, "17175": 2.03324, "17180": 2.01231, "17185": 2.01064, "17190": 2.03856, "17195": 2.04457, "17200": 2.02507, "17205": 2.03456, "17210": 2.04902, "17215": 2.03588, "17220": 2.03779, "17225": 2.0224, "17230": 2.0464, "17235": 2.02456, "17240": 2.03397, "17245": 2.03006, "17250": 2.04889, "17255": 2.01039, "17260": 2.01919, "17265": 2.0636, "17270": 2.01102, "17275": 2.02266, "17280": 2.02271, "17285": 2.03911, "17290": 2.02248, "17295": 2.0097, "17300": 2.01217, "17305": 1.99885, "17310": 2.03413, "17315": 2.02519, "17320": 2.00972, "17325": 2.03692, "17330": 2.0074, "17335": 2.00976, "17340": 2.0219, "17345": 2.02941, "17350": 2.02006, "17355": 2.04134, "17360": 2.04683, "17365": 2.02288, "17370": 2.01285, "17375": 2.02263, "17380": 2.0339, "17385": 2.04474, "17390": 2.02295, "17395": 2.01162, "17400": 2.01106, "17405": 2.01902, "17410": 2.01055, "17415": 2.04416, "17420": 2.03327, "17425": 2.0228, "17430": 2.03065, "17435": 2.00746, "17440": 2.01145, "17445": 2.00447, "17450": 2.01254, "17455": 1.99202, "17460": 2.00516, "17465": 2.05461, "17470": 2.034, "17475": 2.0314, "17480": 2.01182, "17485": 2.03455, "17490": 2.04655, "17495": 2.01926, "17500": 2.02769, "17505": 2.00719, "17510": 2.03689, "17515": 2.03591, "17520": 2.02397, "17525": 2.02259, "17530": 2.01364, "17535": 2.05481, "17540": 2.0203, "17545": 2.02704, "17550": 2.01323, "17555": 2.01799, "17560": 2.03747, "17565": 2.02903, "17570": 2.02463, "17575": 2.0058, "17580": 2.02155, "17585": 2.02285, "17590": 2.01244, "17595": 2.01786, "17600": 2.01061, "17605": 2.00626, "17610": 2.01702, "17615": 2.03067, "17620": 2.0355, "17625": 2.03485, "17630": 1.99677, "17635": 2.03738, "17640": 1.97553, "17645": 2.03006, "17650": 2.0221, "17655": 2.00935, "17660": 2.01302, "17665": 2.02938, "17670": 2.01299, "17675": 2.01816, "17680": 2.02184, "17685": 2.02458, "17690": 1.99473, "17695": 2.01551, "17700": 2.02568, "17705": 2.01714, "17710": 2.03676, "17715": 2.03762, "17720": 2.0396, "17725": 2.03042, "17730": 1.99709, "17735": 2.02132, "17740": 2.01273, "17745": 2.03461, "17750": 2.04869, "17755": 2.03783, "17760": 2.02252, "17765": 2.02109, "17770": 2.02364, "17775": 2.02151, "17780": 2.02899, "17785": 2.00965, "17790": 2.0042, "17795": 2.00554, "17800": 2.00239, "17805": 2.02439, "17810": 2.01538, "17815": 2.02454, "17820": 2.03366, "17825": 2.01802, "17830": 2.00447, "17835": 2.01651, "17840": 2.01004, "17845": 2.00461, "17850": 2.03918, "17855": 2.0204, "17860": 2.03332, "17865": 2.04374, "17870": 2.00343, "17875": 2.03638, "17880": 1.98611, "17885": 1.9977, "17890": 2.02651, "17895": 2.00069, "17900": 2.02726, "17905": 2.01899, "17910": 2.02192, "17915": 2.00657, "17920": 2.03239, "17925": 2.02634, "17930": 2.04076, "17935": 2.01166, "17940": 2.02806, "17945": 2.01915, "17950": 1.99125, "17955": 2.01731, "17960": 2.02342, "17965": 2.02704, "17970": 2.02427, "17975": 2.019, "17980": 2.01833, "17985": 2.04038, "17990": 1.9933, "17995": 1.99191, "18000": 2.02924, "18005": 2.00598, "18010": 2.0087, "18015": 2.02504, "18020": 2.01043, "18025": 2.02605, "18030": 2.01262, "18035": 2.02138, "18040": 2.01102, "18045": 2.0184, "18050": 1.99115, "18055": 1.98063, "18060": 1.98232, "18065": 1.99498, "18070": 2.00598, "18075": 2.01447, "18080": 2.03294, "18085": 1.98697, "18090": 2.01334, "18095": 2.00544, "18100": 2.01891, "18105": 1.98872, "18110": 2.03017, "18115": 2.02223, "18120": 2.01339, "18125": 2.02681, "18130": 2.01983, "18135": 2.02008, "18140": 2.0152, "18145": 2.03576, "18150": 2.01935, "18155": 2.01558, "18160": 2.00749, "18165": 2.02658, "18170": 2.02692, "18175": 2.01172, "18180": 2.01646, "18185": 2.00857, "18190": 2.0077, "18195": 1.99312, "18200": 2.02151, "18205": 2.03051, "18210": 1.99735, "18215": 2.03674, "18220": 2.00088, "18225": 2.02607, "18230": 1.99104, "18235": 2.00505, "18240": 2.02054, "18245": 2.00029, "18250": 2.02568, "18255": 2.01376, "18260": 1.99341, "18265": 2.0249, "18270": 1.97773, "18275": 2.00753, "18280": 1.99633, "18285": 1.99506, "18290": 2.0244, "18295": 2.02124, "18300": 2.01548, "18305": 1.99185, "18310": 1.99109, "18315": 1.99623, "18320": 1.99144, "18325": 2.0144, "18330": 2.00442, "18335": 2.00649, "18340": 2.02709, "18345": 2.02247, "18350": 2.00197, "18355": 2.02442, "18360": 2.00829, "18365": 2.00847, "18370": 2.01919, "18375": 2.01343, "18380": 2.0131, "18385": 2.00356, "18390": 2.03257, "18395": 2.00668, "18400": 2.02881, "18405": 2.02568, "18410": 2.029, "18415": 2.02972, "18420": 2.05174, "18425": 2.01339, "18430": 2.01848, "18435": 1.97805, "18440": 2.02308, "18445": 2.01502, "18450": 1.99209, "18455": 1.99914, "18460": 2.01933, "18465": 2.00063, "18470": 2.01392, "18475": 1.99387, "18480": 2.01331, "18485": 2.00022, "18490": 2.00667, "18495": 2.04631, "18500": 1.98982, "18505": 2.01253, "18510": 2.00402, "18515": 2.01554, "18520": 1.99384, "18525": 2.01754, "18530": 1.98365, "18535": 1.98714, "18540": 2.01227, "18545": 1.98565, "18550": 1.99799, "18555": 2.00673, "18560": 2.02725, "18565": 2.00065, "18570": 2.02803, "18575": 2.02284, "18580": 1.99482, "18585": 1.99514, "18590": 2.00661, "18595": 1.99885, "18600": 2.00622, "18605": 2.02899, "18610": 2.02028, "18615": 2.00523, "18620": 1.99913, "18625": 2.01435, "18630": 1.98997, "18635": 1.99914, "18640": 1.99849, "18645": 2.00814, "18650": 2.00285, "18655": 2.00393, "18660": 1.98784, "18665": 1.99054, "18670": 2.00413, "18675": 2.00573, "18680": 1.99666, "18685": 1.98991, "18690": 2.01523, "18695": 2.02537, "18700": 2.03218, "18705": 2.02084, "18710": 1.99711, "18715": 2.0245, "18720": 2.01772, "18725": 2.01599, "18730": 2.02435, "18735": 2.01872, "18740": 2.02298, "18745": 1.9966, "18750": 2.04177, "18755": 1.99895, "18760": 2.01827, "18765": 2.00345, "18770": 2.00956, "18775": 1.99698, "18780": 2.03937, "18785": 2.03389, "18790": 2.01744, "18795": 2.00471, "18800": 2.01998, "18805": 1.99774, "18810": 1.99961, "18815": 2.02862, "18820": 2.01196, "18825": 2.01768, "18830": 2.00459, "18835": 1.99768, "18840": 2.01587, "18845": 2.01021, "18850": 2.00144, "18855": 1.99765, "18860": 1.99821, "18865": 2.02432, "18870": 2.00559, "18875": 1.99931, "18880": 1.98856, "18885": 2.02525, "18890": 2.01404, "18895": 2.02304, "18900": 1.99292, "18905": 1.99343, "18910": 1.99609, "18915": 1.98469, "18920": 2.01925, "18925": 2.01309, "18930": 2.02713, "18935": 2.01104, "18940": 2.00891, "18945": 1.99912, "18950": 2.01048, "18955": 2.00192, "18960": 2.00586, "18965": 2.02274, "18970": 2.02614, "18975": 2.00926, "18980": 2.00244, "18985": 1.99488, "18990": 2.01996, "18995": 1.99597, "19000": 1.99229, "19005": 2.01313, "19010": 2.00653, "19015": 2.00603, "19020": 1.99386, "19025": 2.00639, "19030": 2.00367, "19035": 2.02682, "19040": 1.9908, "19045": 2.01357, "19050": 2.00254, "19055": 2.00567, "19060": 2.01639, "19065": 1.9963, "19070": 2.01983, "19075": 1.99275, "19080": 2.00277, "19085": 1.99706, "19090": 1.98476, "19095": 2.02326, "19100": 2.01702, "19105": 2.00563, "19110": 2.01457, "19115": 2.03625, "19120": 2.03135, "19125": 2.01411, "19130": 1.99917, "19135": 2.01562, "19140": 2.00461, "19145": 2.0091, "19150": 1.97721, "19155": 2.01424, "19160": 1.99929, "19165": 2.00466, "19170": 1.98414, "19175": 2.01095, "19180": 2.00892, "19185": 2.0048, "19190": 1.98371, "19195": 1.99743, "19200": 1.99893, "19205": 1.99485, "19210": 2.01172, "19215": 1.99288, "19220": 2.02595, "19225": 2.00858, "19230": 2.00917, "19235": 1.98683, "19240": 1.99347, "19245": 2.01101, "19250": 1.98923, "19255": 2.00531, "19260": 1.9811, "19265": 1.99274, "19270": 2.01545, "19275": 2.01137, "19280": 2.01541, "19285": 2.0126, "19290": 2.03437, "19295": 2.0001, "19300": 2.02021, "19305": 1.99358, "19310": 2.00119, "19315": 1.98839, "19320": 2.01085, "19325": 2.01929, "19330": 2.0016, "19335": 2.01952, "19340": 2.0154, "19345": 1.97181, "19350": 2.02515, "19355": 1.99556, "19360": 1.98599, "19365": 1.99543, "19370": 2.02486, "19375": 1.99962, "19380": 2.00339, "19385": 2.02722, "19390": 2.00698, "19395": 2.02186, "19400": 2.00483, "19405": 2.00954, "19410": 2.00295, "19415": 1.97556, "19420": 2.01199, "19425": 1.98735, "19430": 1.98935, "19435": 2.02165, "19440": 2.01415, "19445": 1.99557, "19450": 1.99122, "19455": 1.99602, "19460": 1.98279, "19465": 1.99654, "19470": 1.99933, "19475": 2.00382, "19480": 1.9868, "19485": 1.96827, "19490": 1.99874, "19495": 1.99292, "19500": 2.00575, "19505": 2.00256, "19510": 1.99854, "19515": 2.02579, "19520": 2.01553, "19525": 1.99298, "19530": 1.99249, "19535": 2.02625, "19540": 1.99018, "19545": 2.0035, "19550": 2.02191, "19555": 2.00483, "19560": 2.0076, "19565": 2.01004, "19570": 1.99418, "19575": 2.02016, "19580": 2.00316, "19585": 1.99615, "19590": 1.99072, "19595": 2.01431, "19600": 2.00915, "19605": 2.00874, "19610": 2.01353, "19615": 2.00412, "19620": 1.99764, "19625": 2.02692, "19630": 2.00869, "19635": 1.99354, "19640": 2.02213, "19645": 2.01509, "19650": 2.00658, "19655": 2.01427, "19660": 2.00138, "19665": 2.03063, "19670": 2.00218, "19675": 1.9965, "19680": 1.98281, "19685": 2.01162, "19690": 2.00824, "19695": 2.00338, "19700": 1.9877, "19705": 2.01941, "19710": 2.0404, "19715": 2.0159, "19720": 1.99985, "19725": 2.0243, "19730": 2.02939, "19735": 2.02048, "19740": 2.01239, "19745": 1.99261, "19750": 1.9864, "19755": 1.99391, "19760": 1.98698, "19765": 2.01413, "19770": 2.01399, "19775": 1.99689, "19780": 2.00215, "19785": 1.98733, "19790": 1.99794, "19795": 2.00191, "19800": 1.99407, "19805": 1.98234, "19810": 1.98461, "19815": 2.00706, "19820": 2.01179, "19825": 1.99957, "19830": 2.01491, "19835": 2.01579, "19840": 2.0179, "19845": 1.95987, "19850": 2.00997, "19855": 2.0022, "19860": 1.98368, "19865": 2.01517, "19870": 2.00243, "19875": 2.01711, "19880": 2.00953, "19885": 1.97971, "19890": 2.00013, "19895": 2.00511, "19900": 1.99558, "19905": 2.0079, "19910": 2.024, "19915": 1.99924, "19920": 1.99034, "19925": 1.97932, "19930": 1.98383, "19935": 1.99994, "19940": 1.97606, "19945": 2.02091, "19950": 2.01543, "19955": 2.01561, "19960": 2.02208, "19965": 1.9927, "19970": 1.97809, "19975": 2.0025, "19980": 2.00916, "19985": 2.00153, "19990": 1.99631, "19995": 2.00003, "20000": 2.00787, "20005": 1.9912, "20010": 2.00069, "20015": 1.99693, "20020": 2.00805, "20025": 2.00504, "20030": 2.01146, "20035": 1.99621, "20040": 1.98871, "20045": 1.9713, "20050": 2.01279, "20055": 1.99543, "20060": 1.98804, "20065": 2.00237, "20070": 2.00657, "20075": 1.98391, "20080": 1.99035, "20085": 1.97871, "20090": 1.99052, "20095": 1.98479, "20100": 1.99529, "20105": 1.98636, "20110": 2.02097, "20115": 2.00177, "20120": 1.9957, "20125": 1.98625, "20130": 1.99622, "20135": 1.98675, "20140": 1.98871, "20145": 1.99505, "20150": 1.99319, "20155": 2.00845, "20160": 2.01073, "20165": 1.99514, "20170": 2.01842, "20175": 1.99685, "20180": 1.99322, "20185": 1.99849, "20190": 1.97828, "20195": 1.98117, "20200": 1.97203, "20205": 2.01383, "20210": 1.98801, "20215": 2.00414, "20220": 2.02423, "20225": 1.99055, "20230": 1.99152, "20235": 2.0039, "20240": 2.00963, "20245": 1.99278, "20250": 1.99617, "20255": 2.00762, "20260": 1.98484, "20265": 2.00662, "20270": 1.98617, "20275": 1.98031, "20280": 2.00571, "20285": 1.99417, "20290": 1.98067, "20295": 1.99868, "20300": 1.99899, "20305": 1.99766, "20310": 2.01904, "20315": 1.99709, "20320": 1.98878, "20325": 1.99867, "20330": 2.00772, "20335": 1.96077, "20340": 2.00659, "20345": 2.00675, "20350": 1.98593, "20355": 1.98727, "20360": 2.00513, "20365": 1.96654, "20370": 1.99484, "20375": 1.98754, "20380": 1.99599, "20385": 1.99305, "20390": 1.98255, "20395": 1.99917, "20400": 1.98175, "20405": 1.99437, "20410": 1.97942, "20415": 2.01814, "20420": 2.00994, "20425": 1.98478, "20430": 1.99475, "20435": 1.98791, "20440": 1.99163, "20445": 2.02556, "20450": 1.9759, "20455": 2.00543, "20460": 1.98665, "20465": 2.00299, "20470": 1.99472, "20475": 1.99362, "20480": 1.97397, "20485": 1.99014, "20490": 1.99332, "20495": 2.00645, "20500": 1.99137, "20505": 1.98431, "20510": 1.9856, "20515": 2.02018, "20520": 1.98992, "20525": 1.99077, "20530": 1.96611, "20535": 1.98261, "20540": 1.9788, "20545": 1.98459, "20550": 1.99968, "20555": 1.99984, "20560": 1.95413, "20565": 2.0123, "20570": 1.98881, "20575": 1.97224, "20580": 1.99498, "20585": 1.96893, "20590": 1.99311, "20595": 1.99749, "20600": 2.01757, "20605": 1.99739, "20610": 2.00544, "20615": 1.98844, "20620": 1.98379, "20625": 1.99721, "20630": 2.00431, "20635": 1.99394, "20640": 1.98785, "20645": 2.00129, "20650": 1.96922, "20655": 1.99233, "20660": 1.99043, "20665": 2.00051, "20670": 2.00195, "20675": 1.9925, "20680": 1.99252, "20685": 1.98392, "20690": 2.02723, "20695": 1.95754, "20700": 1.99937, "20705": 2.007, "20710": 1.98702, "20715": 1.98874, "20720": 2.00579, "20725": 1.98894, "20730": 2.00142, "20735": 2.01918, "20740": 1.99084, "20745": 2.01833, "20750": 1.99424, "20755": 1.95991, "20760": 2.01131, "20765": 1.99006, "20770": 1.99187, "20775": 1.9956, "20780": 1.99478, "20785": 1.97872, "20790": 1.98272, "20795": 1.99606, "20800": 1.9722, "20805": 1.99636, "20810": 1.9755, "20815": 1.98143, "20820": 2.01116, "20825": 1.99797, "20830": 2.00324, "20835": 1.97765, "20840": 1.98351, "20845": 2.01804, "20850": 1.97985, "20855": 2.00656, "20860": 1.99954, "20865": 1.97562, "20870": 1.98906, "20875": 1.98755, "20880": 1.99281, "20885": 2.00699, "20890": 1.98514, "20895": 1.99128, "20900": 1.97799, "20905": 2.00223, "20910": 2.02685, "20915": 1.99066, "20920": 2.00628, "20925": 1.99908, "20930": 1.97791, "20935": 2.00204, "20940": 1.98071, "20945": 2.01001, "20950": 1.98753, "20955": 1.99147, "20960": 2.01099, "20965": 1.97805, "20970": 1.97472, "20975": 1.97999, "20980": 2.00597, "20985": 1.96166, "20990": 1.99298, "20995": 1.99471, "21000": 1.99863, "21005": 1.98586, "21010": 2.01168, "21015": 1.97063, "21020": 1.96757, "21025": 2.00264, "21030": 1.98121, "21035": 2.00902, "21040": 2.01114, "21045": 2.00084, "21050": 1.98773, "21055": 1.99143, "21060": 1.98844, "21065": 2.00184, "21070": 1.9922, "21075": 1.98497, "21080": 1.96644, "21085": 1.98796, "21090": 1.96871, "21095": 1.98875, "21100": 1.96726, "21105": 1.99635, "21110": 1.97768, "21115": 1.98811, "21120": 1.98921, "21125": 1.98395, "21130": 1.98934, "21135": 1.98222, "21140": 2.01355, "21145": 2.0036, "21150": 1.99152, "21155": 1.98213, "21160": 1.9774, "21165": 1.97547, "21170": 1.9981, "21175": 2.0103, "21180": 1.97017, "21185": 2.02565, "21190": 1.99536, "21195": 1.97407, "21200": 1.98206, "21205": 1.98683, "21210": 2.00249, "21215": 1.97203, "21220": 1.98277, "21225": 1.9808, "21230": 2.00516, "21235": 1.99941, "21240": 1.98463, "21245": 1.97608, "21250": 2.01119, "21255": 1.95993, "21260": 1.96535, "21265": 1.96604, "21270": 1.99108, "21275": 1.97383, "21280": 1.98604, "21285": 1.97562, "21290": 1.9884, "21295": 2.00168, "21300": 1.99281, "21305": 1.98955, "21310": 1.97799, "21315": 1.99262, "21320": 1.9697, "21325": 1.97089, "21330": 1.98685, "21335": 2.00234, "21340": 1.98584, "21345": 2.00997, "21350": 1.98689, "21355": 1.97423, "21360": 1.986, "21365": 1.97988, "21370": 1.99998, "21375": 1.98641, "21380": 1.97988, "21385": 1.98408, "21390": 1.98784, "21395": 1.97392, "21400": 1.9718, "21405": 1.996, "21410": 2.00593, "21415": 1.98585, "21420": 2.00781, "21425": 1.99173, "21430": 2.00043, "21435": 1.98707, "21440": 1.98378, "21445": 2.01332, "21450": 2.01174, "21455": 1.9981, "21460": 1.98473, "21465": 1.99072, "21470": 2.00373, "21475": 1.99008, "21480": 1.99047, "21485": 1.97356, "21490": 1.99429, "21495": 1.98515, "21500": 1.97536, "21505": 1.99428, "21510": 1.97173, "21515": 2.0001, "21520": 1.98971, "21525": 1.99435, "21530": 2.00082, "21535": 1.9858, "21540": 1.99166, "21545": 1.98148, "21550": 1.97336, "21555": 1.97502, "21560": 1.99078, "21565": 1.99548, "21570": 1.98613, "21575": 2.00412, "21580": 1.97893, "21585": 1.99447, "21590": 1.98189, "21595": 1.9848, "21600": 1.96298, "21605": 2.00639, "21610": 2.00451, "21615": 1.96498, "21620": 2.00964, "21625": 1.97184, "21630": 1.9928, "21635": 1.97251, "21640": 1.99611, "21645": 2.02613, "21650": 1.97246, "21655": 1.96717, "21660": 1.98811, "21665": 1.99659, "21670": 1.99307, "21675": 1.99655, "21680": 1.97644, "21685": 1.97361, "21690": 1.98693, "21695": 1.97914, "21700": 2.00606, "21705": 2.01066, "21710": 2.00674, "21715": 1.99986, "21720": 2.00416, "21725": 1.99941, "21730": 1.98295, "21735": 1.96593, "21740": 2.00317, "21745": 1.97578, "21750": 1.98081, "21755": 1.99309, "21760": 2.00704, "21765": 2.01826, "21770": 1.97199, "21775": 2.0105, "21780": 1.99323, "21785": 2.00237, "21790": 2.00718, "21795": 1.97402, "21800": 1.97441, "21805": 1.97614, "21810": 1.97544, "21815": 1.9583, "21820": 1.99739, "21825": 1.97144, "21830": 2.00842, "21835": 2.00603, "21840": 1.98454, "21845": 1.98594, "21850": 1.9926, "21855": 1.97726, "21860": 1.98433, "21865": 1.97712, "21870": 1.99165, "21875": 2.00795, "21880": 1.97903, "21885": 1.99561, "21890": 1.99716, "21895": 1.97597, "21900": 1.98804, "21905": 1.97229, "21910": 1.98554, "21915": 1.98359, "21920": 1.96783, "21925": 1.99351, "21930": 1.99628, "21935": 2.00636, "21940": 1.97529, "21945": 1.9645, "21950": 1.9795, "21955": 1.99802, "21960": 1.98153, "21965": 2.01646, "21970": 2.00502, "21975": 1.97651, "21980": 1.96467, "21985": 1.98538, "21990": 1.97484, "21995": 1.97258, "22000": 1.99876, "22005": 1.97798, "22010": 1.95536, "22015": 1.9648, "22020": 1.9662, "22025": 1.99113, "22030": 1.97484, "22035": 1.9693, "22040": 1.9735, "22045": 1.98358, "22050": 1.98638, "22055": 2.00481, "22060": 1.98793, "22065": 2.00433, "22070": 1.98754, "22075": 2.00651, "22080": 1.97492, "22085": 1.98932, "22090": 1.96623, "22095": 1.98071, "22100": 1.99392, "22105": 1.98575, "22110": 1.98861, "22115": 1.96117, "22120": 2.00127, "22125": 1.98909, "22130": 1.98382, "22135": 1.9622, "22140": 2.00328, "22145": 1.97404, "22150": 1.97576, "22155": 1.96676, "22160": 1.97996, "22165": 1.97118, "22170": 1.98848, "22175": 2.00312, "22180": 1.97302, "22185": 1.98437, "22190": 1.96605, "22195": 1.98589, "22200": 1.97225, "22205": 1.99622, "22210": 1.9936, "22215": 1.97503, "22220": 1.99069, "22225": 1.99038, "22230": 1.9771, "22235": 2.00708, "22240": 1.96959, "22245": 1.98315, "22250": 1.99011, "22255": 1.95911, "22260": 1.98614, "22265": 1.98645, "22270": 2.00538, "22275": 1.97181, "22280": 1.98426, "22285": 1.99817, "22290": 1.9744, "22295": 1.98926, "22300": 1.95839, "22305": 1.982, "22310": 1.98206, "22315": 1.97567, "22320": 1.98474, "22325": 1.9855, "22330": 1.98157, "22335": 1.9813, "22340": 1.97829, "22345": 1.98378, "22350": 2.00878, "22355": 1.98318, "22360": 1.99073, "22365": 1.99813, "22370": 1.98265, "22375": 1.97987, "22380": 1.98524, "22385": 1.99257, "22390": 1.97869, "22395": 1.98485, "22400": 2.00174, "22405": 1.98818, "22410": 1.98683, "22415": 1.9736, "22420": 1.97434, "22425": 1.99292, "22430": 1.98882, "22435": 1.96963, "22440": 1.97404, "22445": 1.98262, "22450": 1.97464, "22455": 1.98076, "22460": 2.00526, "22465": 1.9995, "22470": 1.98502, "22475": 1.99879, "22480": 1.9635, "22485": 1.97154, "22490": 1.98464, "22495": 1.9755, "22500": 1.9701, "22505": 1.97747, "22510": 1.96825, "22515": 1.97191, "22520": 1.95972, "22525": 1.97326, "22530": 1.96545, "22535": 1.99198, "22540": 1.99267, "22545": 1.97666, "22550": 1.99272, "22555": 1.98163, "22560": 1.98814, "22565": 1.97387, "22570": 1.9937, "22575": 1.99245, "22580": 1.98775, "22585": 1.97258, "22590": 2.00928, "22595": 1.98538, "22600": 1.99269, "22605": 1.95022, "22610": 1.9893, "22615": 1.97631, "22620": 1.99963, "22625": 1.95413, "22630": 1.96557, "22635": 1.99451, "22640": 1.9618, "22645": 1.98107, "22650": 1.98544, "22655": 1.97545, "22660": 1.96815, "22665": 2.00798, "22670": 1.98341, "22675": 1.96386, "22680": 1.96991, "22685": 1.9771, "22690": 1.96925, "22695": 1.98404, "22700": 1.98587, "22705": 1.96237, "22710": 1.95556, "22715": 2.01202, "22720": 1.98558, "22725": 1.96215, "22730": 1.97795, "22735": 1.96097, "22740": 1.96226, "22745": 1.97746, "22750": 1.96483, "22755": 2.0027, "22760": 1.98065, "22765": 1.96986, "22770": 1.98146, "22775": 1.95507, "22780": 1.96814, "22785": 1.95787, "22790": 1.9922, "22795": 2.00465, "22800": 1.99461, "22805": 1.96622, "22810": 1.97541, "22815": 1.9582, "22820": 1.96199, "22825": 1.95646, "22830": 1.98649, "22835": 1.97577, "22840": 1.96806, "22845": 1.99681, "22850": 1.98368, "22855": 1.97493, "22860": 1.96493, "22865": 1.98542, "22870": 2.0028, "22875": 1.98204, "22880": 1.97053, "22885": 1.97051, "22890": 1.96748, "22895": 1.95835, "22900": 1.971, "22905": 1.95626, "22910": 1.98603, "22915": 1.97422, "22920": 2.00138, "22925": 1.95297, "22930": 1.97297, "22935": 1.98101, "22940": 1.99482, "22945": 1.99712, "22950": 1.96936, "22955": 1.99282, "22960": 1.96858, "22965": 1.98167, "22970": 1.97467, "22975": 1.96191, "22980": 1.99738, "22985": 1.95675, "22990": 1.9749, "22995": 1.95954, "23000": 1.98859, "23005": 1.99459, "23010": 1.99903, "23015": 1.96739, "23020": 1.98151, "23025": 1.9794, "23030": 1.97253, "23035": 1.99918, "23040": 1.97579, "23045": 1.97503, "23050": 1.96025, "23055": 1.96986, "23060": 1.96948, "23065": 1.98609, "23070": 1.97586, "23075": 1.97815, "23080": 1.99705, "23085": 1.97278, "23090": 1.95803, "23095": 1.98839, "23100": 1.97515, "23105": 1.97986, "23110": 1.98236, "23115": 1.96523, "23120": 1.94251, "23125": 1.99873, "23130": 1.98118, "23135": 1.97671, "23140": 1.98255, "23145": 1.96328, "23150": 1.98177, "23155": 1.98727, "23160": 2.01537, "23165": 1.9762, "23170": 1.98885, "23175": 1.98333, "23180": 1.98675, "23185": 1.97591, "23190": 1.98025, "23195": 1.96073, "23200": 1.96238, "23205": 1.98245, "23210": 1.9725, "23215": 1.97763, "23220": 2.00261, "23225": 1.96808, "23230": 1.93576, "23235": 1.98489, "23240": 1.99048, "23245": 1.97787, "23250": 1.96609, "23255": 1.98889, "23260": 1.98712, "23265": 1.95868, "23270": 2.01375, "23275": 1.98385, "23280": 1.95872, "23285": 1.98819, "23290": 2.00706, "23295": 1.99373, "23300": 1.97099, "23305": 1.97591, "23310": 1.99427, "23315": 1.97855, "23320": 2.00474, "23325": 1.97064, "23330": 1.97649, "23335": 1.97216, "23340": 1.95778, "23345": 1.99042, "23350": 1.98434, "23355": 1.98325, "23360": 1.97398, "23365": 1.961, "23370": 1.97334, "23375": 1.98359, "23380": 2.00015, "23385": 1.99297, "23390": 1.99408, "23395": 1.97418, "23400": 1.96354, "23405": 1.97278, "23410": 1.98706, "23415": 1.96289, "23420": 1.98456, "23425": 1.98665, "23430": 1.98357, "23435": 1.96696, "23440": 1.96334, "23445": 1.9554, "23450": 1.96255, "23455": 1.96268, "23460": 1.97477, "23465": 1.99041, "23470": 1.99086, "23475": 1.9721, "23480": 1.97742, "23485": 1.97343, "23490": 1.98449, "23495": 1.96309, "23500": 1.98834, "23505": 1.96242, "23510": 1.98074, "23515": 1.98999, "23520": 1.98429, "23525": 1.97239, "23530": 1.99762, "23535": 1.98687, "23540": 1.97093, "23545": 1.9932, "23550": 1.95007, "23555": 1.96693, "23560": 1.94592, "23565": 1.9695, "23570": 1.9852, "23575": 1.97085, "23580": 1.98145, "23585": 1.98022, "23590": 1.97718, "23595": 1.96784, "23600": 1.94066, "23605": 1.96984, "23610": 1.97309, "23615": 1.97188, "23620": 1.96292, "23625": 1.98972, "23630": 1.96226, "23635": 1.99879, "23640": 1.99116, "23645": 1.96075, "23650": 1.95793, "23655": 1.97755, "23660": 1.9741, "23665": 1.96666, "23670": 1.97708, "23675": 1.94909, "23680": 1.98047, "23685": 1.95793, "23690": 1.96713, "23695": 1.97021, "23700": 1.97189, "23705": 1.96699, "23710": 1.9758, "23715": 1.95715, "23720": 1.95801, "23725": 1.97261, "23730": 2.00241, "23735": 1.97245, "23740": 1.96837, "23745": 1.94756, "23750": 1.96799, "23755": 1.99248, "23760": 1.97079, "23765": 1.98199, "23770": 1.97414, "23775": 1.96921, "23780": 1.97531, "23785": 1.98154, "23790": 1.97077, "23795": 1.96505, "23800": 1.96674, "23805": 1.95325, "23810": 1.95753, "23815": 1.97031, "23820": 1.97481, "23825": 1.98496, "23830": 1.97454, "23835": 1.97977, "23840": 1.9734, "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "num-zeros": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 406787520.0, "5": 406592480.0, "10": 414796032.0, "15": 419864672.0, "20": 411723840.0, "25": 406621568.0, "30": 413034432.0, "35": 409553568.0, "40": 414712960.0, "45": 411202912.0, "50": 413175584.0, "55": 414169344.0, "60": 407747424.0, "65": 411423104.0, "70": 410920192.0, "75": 409333024.0, "80": 406662176.0, "85": 406727328.0, "90": 412169440.0, "95": 414241824.0, "100": 414835392.0, "105": 421843232.0, "110": 413671136.0, "115": 408997152.0, "120": 411562720.0, "125": 406562816.0, "130": 411449824.0, "135": 417567232.0, "140": 403017760.0, "145": 410669344.0, "150": 417476928.0, "155": 405319904.0, "160": 402155648.0, "165": 408990496.0, "170": 400902368.0, "175": 407146976.0, "180": 409548032.0, "185": 402995712.0, "190": 414753984.0, "195": 403489152.0, "200": 414942528.0, "205": 414930144.0, "210": 404130272.0, "215": 413629024.0, "220": 406038144.0, "225": 405370176.0, "230": 418522688.0, "235": 410755744.0, "240": 415723040.0, "245": 412102976.0, "250": 411169440.0, "255": 422414176.0, "260": 405247808.0, "265": 410731520.0, "270": 409320800.0, "275": 407085760.0, "280": 407996192.0, "285": 413277152.0, "290": 405430592.0, "295": 412640160.0, "300": 412000672.0, "305": 413315776.0, "310": 411113632.0, "315": 415751936.0, "320": 414909984.0, "325": 406883008.0, "330": 413362400.0, "335": 414362304.0, "340": 404909856.0, "345": 409994912.0, "350": 416837824.0, "355": 414216576.0, "360": 415052864.0, "365": 405027136.0, "370": 416622624.0, "375": 402489152.0, "380": 412619904.0, "385": 406799232.0, "390": 413798656.0, "395": 412018176.0, "400": 413234464.0, "405": 417365184.0, "410": 410793408.0, "415": 413901024.0, "420": 415209760.0, "425": 416627008.0, "430": 410909984.0, "435": 413999168.0, "440": 406105408.0, "445": 410400384.0, "450": 407893408.0, "455": 406201824.0, "460": 414312864.0, "465": 411987872.0, "470": 425867648.0, "475": 412910656.0, "480": 407133920.0, "485": 415675296.0, "490": 408964480.0, "495": 414695808.0, "500": 413683616.0, "505": 409271168.0, "510": 417078976.0, "515": 420418752.0, "520": 406485376.0, "525": 398835680.0, "530": 413279808.0, "535": 404575520.0, "540": 403464544.0, "545": 410433024.0, "550": 407044000.0, "555": 409852640.0, "560": 427453664.0, "565": 406158464.0, "570": 404851936.0, "575": 412399168.0, "580": 407224736.0, "585": 414318656.0, "590": 412625984.0, "595": 419191200.0, "600": 412163776.0, "605": 418644096.0, "610": 406102528.0, "615": 411867872.0, "620": 414480896.0, "625": 413538752.0, "630": 414244704.0, "635": 415758656.0, "640": 418912256.0, "645": 419574720.0, "650": 414618240.0, "655": 425491104.0, "660": 409903744.0, "665": 415277536.0, "670": 404457472.0, "675": 416199424.0, "680": 416538560.0, "685": 415696192.0, "690": 412358048.0, "695": 417982816.0, "700": 410626880.0, "705": 418421216.0, "710": 414205120.0, "715": 411012800.0, "720": 410524992.0, "725": 410405120.0, "730": 417943904.0, "735": 411973440.0, "740": 415874496.0, "745": 411145376.0, "750": 413323264.0, "755": 413919360.0, "760": 410635776.0, "765": 413231168.0, "770": 414552800.0, "775": 407338688.0, "780": 421717248.0, "785": 409852032.0, "790": 410570240.0, "795": 420403136.0, "800": 418185984.0, "805": 414144416.0, "810": 414469216.0, "815": 412919712.0, "820": 408251168.0, "825": 407975104.0, "830": 415394912.0, "835": 414052928.0, "840": 415207840.0, "845": 411018336.0, "850": 404443616.0, "855": 411949344.0, "860": 412367488.0, "865": 406101408.0, "870": 419935584.0, "875": 409720960.0, "880": 407169088.0, "885": 413696480.0, "890": 410887008.0, "895": 412955904.0, "900": 413047552.0, "905": 406278688.0, "910": 409220960.0, "915": 406271456.0, "920": 415126272.0, "925": 400839520.0, "930": 410067808.0, "935": 411185728.0, "940": 403095584.0, "945": 417415200.0, "950": 412871936.0, "955": 409717024.0, "960": 420151488.0, "965": 407942080.0, "970": 414695840.0, "975": 403904800.0, "980": 406758848.0, "985": 412576352.0, "990": 407734688.0, "995": 410222336.0, "1000": 414074720.0, "1005": 405781856.0, "1010": 419718112.0, "1015": 413891680.0, "1020": 413900320.0, "1025": 410999328.0, "1030": 403687968.0, "1035": 420490816.0, "1040": 408928928.0, "1045": 411334816.0, "1050": 414040000.0, "1055": 415260928.0, "1060": 409937056.0, "1065": 412780416.0, "1070": 407123200.0, "1075": 409015488.0, "1080": 418024256.0, "1085": 412478336.0, "1090": 414585600.0, "1095": 418749632.0, "1100": 411093216.0, "1105": 413018112.0, "1110": 407327040.0, "1115": 415984448.0, "1120": 407400448.0, "1125": 417686976.0, "1130": 417922848.0, "1135": 407439712.0, "1140": 415212160.0, "1145": 413782176.0, "1150": 412067456.0, "1155": 406358720.0, "1160": 415345344.0, "1165": 412422432.0, "1170": 410875392.0, "1175": 412858080.0, "1180": 412682560.0, "1185": 411930784.0, "1190": 417114016.0, "1195": 406447584.0, "1200": 410972320.0, "1205": 424624608.0, "1210": 409678944.0, "1215": 408700128.0, "1220": 407518720.0, "1225": 410064544.0, "1230": 418411552.0, "1235": 412543520.0, "1240": 410219584.0, "1245": 412481760.0, "1250": 406503936.0, "1255": 415942560.0, "1260": 408755456.0, "1265": 409597408.0, "1270": 406937952.0, "1275": 413294432.0, "1280": 409163104.0, "1285": 410126784.0, "1290": 409286240.0, "1295": 414242048.0, "1300": 409152832.0, "1305": 407935872.0, "1310": 413584800.0, "1315": 409717280.0, "1320": 407098656.0, "1325": 413078208.0, "1330": 417862336.0, "1335": 408945920.0, "1340": 417989408.0, "1345": 412727680.0, "1350": 419564160.0, "1355": 412065472.0, "1360": 402343616.0, "1365": 415138464.0, "1370": 417915872.0, "1375": 400933952.0, "1380": 413303616.0, "1385": 414372160.0, "1390": 412121920.0, "1395": 413721024.0, "1400": 404603008.0, "1405": 412338656.0, "1410": 419108064.0, "1415": 407761760.0, "1420": 411903872.0, "1425": 408413728.0, "1430": 411996608.0, "1435": 415376416.0, "1440": 408488544.0, "1445": 416848832.0, "1450": 412753248.0, "1455": 410339072.0, "1460": 414054304.0, "1465": 413369056.0, "1470": 413868864.0, "1475": 414425152.0, "1480": 413743840.0, "1485": 413896288.0, "1490": 418773824.0, "1495": 409660160.0, "1500": 418482656.0, "1505": 403655936.0, "1510": 414426048.0, "1515": 404487680.0, "1520": 413757728.0, "1525": 413695040.0, "1530": 419422304.0, "1535": 404635072.0, "1540": 411434592.0, "1545": 409016992.0, "1550": 411309344.0, "1555": 406650720.0, "1560": 406775872.0, "1565": 413991872.0, "1570": 418585728.0, "1575": 412424064.0, "1580": 411311680.0, "1585": 417941888.0, "1590": 409018976.0, "1595": 404163904.0, "1600": 409882656.0, "1605": 407609184.0, "1610": 420194976.0, "1615": 408820576.0, "1620": 415816064.0, "1625": 411775840.0, "1630": 408939328.0, "1635": 413065184.0, "1640": 411029952.0, "1645": 412690336.0, "1650": 412669984.0, "1655": 417726656.0, "1660": 416720640.0, "1665": 421187872.0, "1670": 417582464.0, "1675": 405751008.0, "1680": 416786112.0, "1685": 416329952.0, "1690": 412530240.0, "1695": 413672448.0, "1700": 407052768.0, "1705": 408814336.0, "1710": 416923136.0, "1715": 410372416.0, "1720": 404131424.0, "1725": 410464192.0, "1730": 418648448.0, "1735": 417124768.0, "1740": 411034272.0, "1745": 411071328.0, "1750": 410844320.0, "1755": 417420288.0, "1760": 406787552.0, "1765": 416367136.0, "1770": 404459552.0, "1775": 417543552.0, "1780": 413474432.0, "1785": 412874944.0, "1790": 407661632.0, "1795": 415059456.0, "1800": 409079232.0, "1805": 426667552.0, "1810": 407199968.0, "1815": 413999936.0, "1820": 415243424.0, "1825": 417352608.0, "1830": 407460256.0, "1835": 420255424.0, "1840": 407119744.0, "1845": 412622112.0, "1850": 421212096.0, "1855": 412245312.0, "1860": 417109312.0, "1865": 407812992.0, "1870": 406376672.0, "1875": 414693472.0, "1880": 410792096.0, "1885": 412041056.0, "1890": 414642720.0, "1895": 406859456.0, "1900": 410935104.0, "1905": 413755008.0, "1910": 417161824.0, "1915": 416156384.0, "1920": 406427776.0, "1925": 415857472.0, "1930": 410062464.0, "1935": 404456256.0, "1940": 418425056.0, "1945": 405736064.0, "1950": 413199936.0, "1955": 416181408.0, "1960": 415724992.0, "1965": 408035616.0, "1970": 417083712.0, "1975": 411146272.0, "1980": 405922208.0, "1985": 407796320.0, "1990": 409387488.0, "1995": 406984768.0, "2000": 418201600.0, "2005": 412551968.0, "2010": 407924096.0, "2015": 404343552.0, "2020": 414575360.0, "2025": 411195680.0, "2030": 410997728.0, "2035": 410663232.0, "2040": 413131328.0, "2045": 418046432.0, "2050": 416187424.0, "2055": 410046752.0, "2060": 414154656.0, "2065": 416042944.0, "2070": 408420000.0, "2075": 408083456.0, "2080": 415031200.0, "2085": 408801024.0, "2090": 422519872.0, "2095": 412104320.0, "2100": 406876256.0, "2105": 409882880.0, "2110": 412471328.0, "2115": 416000864.0, "2120": 417545888.0, "2125": 412494944.0, "2130": 415155712.0, "2135": 411722528.0, "2140": 416608576.0, "2145": 407847584.0, "2150": 412196704.0, "2155": 421872032.0, "2160": 414864096.0, "2165": 417309952.0, "2170": 412891744.0, "2175": 412058784.0, "2180": 401520512.0, "2185": 419656416.0, "2190": 407853344.0, "2195": 411336672.0, "2200": 416486752.0, "2205": 416238688.0, "2210": 406785376.0, "2215": 414889280.0, "2220": 409236896.0, "2225": 416554240.0, "2230": 411538720.0, "2235": 414088832.0, "2240": 410443776.0, "2245": 422941824.0, "2250": 407624672.0, "2255": 411855552.0, "2260": 415275136.0, "2265": 401930336.0, "2270": 411847552.0, "2275": 403334208.0, "2280": 412774048.0, "2285": 409613088.0, "2290": 405387232.0, "2295": 412124576.0, "2300": 414685152.0, "2305": 412053184.0, "2310": 413968768.0, "2315": 406127008.0, "2320": 414424032.0, "2325": 407837760.0, "2330": 407155328.0, "2335": 414217856.0, "2340": 396988000.0, "2345": 410649760.0, "2350": 417693632.0, "2355": 414642720.0, "2360": 413548192.0, "2365": 415827296.0, "2370": 408944160.0, "2375": 416058624.0, "2380": 414399776.0, "2385": 416522176.0, "2390": 410234688.0, "2395": 407590784.0, "2400": 418749824.0, "2405": 413568544.0, "2410": 412590560.0, "2415": 408923552.0, "2420": 410427936.0, "2425": 418363648.0, "2430": 411182880.0, "2435": 417279968.0, "2440": 415496096.0, "2445": 409339072.0, "2450": 415650528.0, "2455": 408185248.0, "2460": 412614400.0, "2465": 412159392.0, "2470": 417437600.0, "2475": 409695456.0, "2480": 412302688.0, "2485": 405355456.0, "2490": 408197440.0, "2495": 409798624.0, "2500": 414512480.0, "2505": 409076064.0, "2510": 408936064.0, "2515": 419017920.0, "2520": 423228096.0, "2525": 412772608.0, "2530": 415984832.0, "2535": 411768768.0, "2540": 414284992.0, "2545": 416077344.0, "2550": 426342208.0, "2555": 417130144.0, "2560": 408565120.0, "2565": 410202592.0, "2570": 405218080.0, "2575": 409918112.0, "2580": 407686720.0, "2585": 414997312.0, "2590": 418695392.0, "2595": 406941024.0, "2600": 404454688.0, "2605": 422336832.0, "2610": 407384896.0, "2615": 415182816.0, "2620": 410346624.0, "2625": 413425664.0, "2630": 418701344.0, "2635": 417756768.0, "2640": 414691072.0, "2645": 409664352.0, "2650": 408149344.0, "2655": 414080960.0, "2660": 408577280.0, "2665": 409611776.0, "2670": 413776032.0, "2675": 400311328.0, "2680": 415572736.0, "2685": 410069056.0, "2690": 402840192.0, "2695": 420057760.0, "2700": 413597216.0, "2705": 409035872.0, "2710": 407149664.0, "2715": 410084704.0, "2720": 410700480.0, "2725": 413178464.0, "2730": 412584576.0, "2735": 413076448.0, "2740": 412834944.0, "2745": 417259552.0, "2750": 413596224.0, "2755": 410171520.0, "2760": 416743872.0, "2765": 421545504.0, "2770": 410954432.0, "2775": 416081696.0, "2780": 410019264.0, "2785": 416410656.0, "2790": 409065728.0, "2795": 414711936.0, "2800": 406917312.0, "2805": 414279424.0, "2810": 414379200.0, "2815": 404258272.0, "2820": 414118784.0, "2825": 417695840.0, "2830": 422071712.0, "2835": 416370560.0, "2840": 415198112.0, "2845": 412387328.0, "2850": 411374720.0, "2855": 423513056.0, "2860": 412038432.0, "2865": 411379328.0, "2870": 412293664.0, "2875": 420638560.0, "2880": 405579680.0, "2885": 412432832.0, "2890": 411907296.0, "2895": 407688608.0, "2900": 423112672.0, "2905": 404241536.0, "2910": 420966752.0, "2915": 407544832.0, "2920": 415805696.0, "2925": 415967552.0, "2930": 414606592.0, "2935": 413695616.0, "2940": 409965600.0, "2945": 410858016.0, "2950": 419697440.0, "2955": 402783328.0, "2960": 411678912.0, "2965": 409907040.0, "2970": 409766080.0, "2975": 418173408.0, "2980": 410430144.0, "2985": 407754624.0, "2990": 412374688.0, "2995": 401154400.0, "3000": 409474784.0, "3005": 410471936.0, "3010": 419281504.0, "3015": 409504064.0, "3020": 403232544.0, "3025": 408271840.0, "3030": 409526304.0, "3035": 411916032.0, "3040": 411906720.0, "3045": 403002496.0, "3050": 420560576.0, "3055": 422635296.0, "3060": 409080224.0, "3065": 408836704.0, "3070": 419813984.0, "3075": 409167584.0, "3080": 424930816.0, "3085": 407316064.0, "3090": 416144032.0, "3095": 413032256.0, "3100": 421664032.0, "3105": 416491296.0, "3110": 413192384.0, "3115": 413482112.0, "3120": 422866592.0, "3125": 409825568.0, "3130": 421932896.0, "3135": 415607040.0, "3140": 408571072.0, "3145": 421601152.0, "3150": 412155040.0, "3155": 411871936.0, "3160": 414743904.0, "3165": 412631040.0, "3170": 414463104.0, "3175": 414958048.0, "3180": 411850656.0, "3185": 416226912.0, "3190": 413623872.0, "3195": 410476544.0, "3200": 419148800.0, "3205": 409017216.0, "3210": 408178912.0, "3215": 413908896.0, "3220": 410773728.0, "3225": 416885184.0, "3230": 417519424.0, "3235": 404044032.0, "3240": 414908000.0, "3245": 410562976.0, "3250": 412317440.0, "3255": 404856096.0, "3260": 425139264.0, "3265": 413812448.0, "3270": 408447136.0, "3275": 405111200.0, "3280": 424216000.0, "3285": 418650688.0, "3290": 410552160.0, "3295": 413472448.0, "3300": 416158880.0, "3305": 410676480.0, "3310": 415461792.0, "3315": 413031584.0, "3320": 412408416.0, "3325": 410737408.0, "3330": 408939712.0, "3335": 414388576.0, "3340": 408211872.0, "3345": 409493184.0, "3350": 413845088.0, "3355": 420478240.0, "3360": 409090528.0, "3365": 409058752.0, "3370": 414138912.0, "3375": 409597376.0, "3380": 417731296.0, "3385": 411409408.0, "3390": 421702688.0, "3395": 415478720.0, "3400": 413851648.0, "3405": 417916704.0, "3410": 412494624.0, "3415": 409163296.0, "3420": 414290208.0, "3425": 410991968.0, "3430": 413637728.0, "3435": 409447936.0, "3440": 416489952.0, "3445": 411919168.0, "3450": 418141824.0, "3455": 406595776.0, "3460": 409709312.0, "3465": 416336096.0, "3470": 414253728.0, "3475": 415527936.0, "3480": 417283008.0, "3485": 413668256.0, "3490": 414843744.0, "3495": 408656224.0, "3500": 424663072.0, "3505": 409291104.0, "3510": 410937824.0, "3515": 411994976.0, "3520": 421331584.0, "3525": 404112704.0, "3530": 421461664.0, "3535": 418734464.0, "3540": 411952064.0, "3545": 413008576.0, "3550": 418656448.0, "3555": 409463648.0, "3560": 406303456.0, "3565": 409483072.0, "3570": 407474112.0, "3575": 413772640.0, "3580": 414237120.0, "3585": 416869056.0, "3590": 414359424.0, "3595": 409143616.0, "3600": 414504032.0, "3605": 406114720.0, "3610": 414010656.0, "3615": 411591936.0, "3620": 413609824.0, "3625": 412588576.0, "3630": 411896096.0, "3635": 416900416.0, "3640": 413177888.0, "3645": 411121632.0, "3650": 424189216.0, "3655": 413261856.0, "3660": 415787776.0, "3665": 410498208.0, "3670": 411584256.0, "3675": 412776704.0, "3680": 417748448.0, "3685": 407783328.0, "3690": 403662656.0, "3695": 409786848.0, "3700": 418492864.0, "3705": 407271712.0, "3710": 409967808.0, "3715": 413823104.0, "3720": 402373248.0, "3725": 413975008.0, "3730": 398626944.0, "3735": 414608480.0, "3740": 415426720.0, "3745": 413135264.0, "3750": 414614784.0, "3755": 416328416.0, "3760": 409056320.0, "3765": 414411424.0, "3770": 413950368.0, "3775": 412203424.0, "3780": 411484928.0, "3785": 413938528.0, "3790": 411309696.0, "3795": 403620000.0, "3800": 415103744.0, "3805": 409889568.0, "3810": 413182624.0, "3815": 410953504.0, "3820": 409127328.0, "3825": 414568832.0, "3830": 415761760.0, "3835": 410011872.0, "3840": 418846144.0, "3845": 418044832.0, "3850": 408646496.0, "3855": 407298432.0, "3860": 412501312.0, "3865": 422725312.0, "3870": 417772000.0, "3875": 416375648.0, "3880": 417630528.0, "3885": 408414784.0, "3890": 421968320.0, "3895": 417223360.0, "3900": 406862336.0, "3905": 408388800.0, "3910": 410466464.0, "3915": 411193792.0, "3920": 409258624.0, "3925": 420950528.0, "3930": 421766240.0, "3935": 407371232.0, "3940": 407527488.0, "3945": 411227712.0, "3950": 408199808.0, "3955": 409808288.0, "3960": 412020736.0, "3965": 407734720.0, "3970": 410356576.0, "3975": 409690080.0, "3980": 422085824.0, "3985": 407105344.0, "3990": 417155808.0, "3995": 413276768.0, "4000": 409683456.0, "4005": 420248576.0, "4010": 420237760.0, "4015": 402524832.0, "4020": 410113056.0, "4025": 407380256.0, "4030": 413353408.0, "4035": 410746528.0, "4040": 411806272.0, "4045": 394842976.0, "4050": 422394496.0, "4055": 410412256.0, "4060": 414335264.0, "4065": 403571584.0, "4070": 411274048.0, "4075": 411381280.0, "4080": 409931232.0, "4085": 409806464.0, "4090": 407945632.0, "4095": 418261568.0, "4100": 415938464.0, "4105": 408053312.0, "4110": 410109472.0, "4115": 408823424.0, "4120": 409600704.0, "4125": 416023456.0, "4130": 409615936.0, "4135": 412093600.0, "4140": 417433184.0, "4145": 411492256.0, "4150": 417287936.0, "4155": 414910240.0, "4160": 414634880.0, "4165": 411900960.0, "4170": 416480992.0, "4175": 416329952.0, "4180": 412018208.0, "4185": 420822624.0, "4190": 414833952.0, "4195": 405721984.0, "4200": 422442720.0, "4205": 405714432.0, "4210": 414668544.0, "4215": 412816416.0, "4220": 409211808.0, "4225": 408457632.0, "4230": 411757376.0, "4235": 409846688.0, "4240": 410592480.0, "4245": 414698176.0, "4250": 412068672.0, "4255": 407949952.0, "4260": 414240480.0, "4265": 411941216.0, "4270": 406332064.0, "4275": 416401056.0, "4280": 409251744.0, "4285": 412433792.0, "4290": 412137440.0, "4295": 410661344.0, "4300": 406251424.0, "4305": 410490048.0, "4310": 415785120.0, "4315": 411728192.0, "4320": 413725472.0, "4325": 410911104.0, "4330": 407227968.0, "4335": 413550816.0, "4340": 413427680.0, "4345": 414332448.0, "4350": 411803392.0, "4355": 419961408.0, "4360": 407156416.0, "4365": 415368384.0, "4370": 413754464.0, "4375": 407903904.0, "4380": 417096672.0, "4385": 400659744.0, "4390": 412815712.0, "4395": 411875264.0, "4400": 413934240.0, "4405": 417650208.0, "4410": 416623968.0, "4415": 409992448.0, "4420": 415535872.0, "4425": 407122144.0, "4430": 405725952.0, "4435": 403982176.0, "4440": 405511456.0, "4445": 417672480.0, "4450": 410330400.0, "4455": 412820864.0, "4460": 415227072.0, "4465": 407152608.0, "4470": 414263648.0, "4475": 418538112.0, "4480": 413878912.0, "4485": 413100384.0, "4490": 413431296.0, "4495": 406261216.0, "4500": 417938208.0, "4505": 412168448.0, "4510": 404689824.0, "4515": 415188640.0, "4520": 406930176.0, "4525": 401566144.0, "4530": 413608384.0, "4535": 416106048.0, "4540": 412023840.0, "4545": 418901632.0, "4550": 406146528.0, "4555": 415872288.0, "4560": 411939936.0, "4565": 415973056.0, "4570": 415513024.0, "4575": 410447296.0, "4580": 415248384.0, "4585": 419177376.0, "4590": 415934912.0, "4595": 411039712.0, "4600": 411234880.0, "4605": 410501056.0, "4610": 409306880.0, "4615": 411091072.0, "4620": 408869248.0, "4625": 418067488.0, "4630": 413921280.0, "4635": 412628096.0, "4640": 406219488.0, "4645": 414081824.0, "4650": 413127552.0, "4655": 408587264.0, "4660": 415375936.0, "4665": 411055008.0, "4670": 406017632.0, "4675": 407418976.0, "4680": 412045280.0, "4685": 401736224.0, "4690": 412037184.0, "4695": 413019808.0, "4700": 411674272.0, "4705": 414052096.0, "4710": 406642208.0, "4715": 412716256.0, "4720": 418103296.0, "4725": 414817824.0, "4730": 400912416.0, "4735": 406276736.0, "4740": 411904896.0, "4745": 405695040.0, "4750": 409201792.0, "4755": 405900480.0, "4760": 412470144.0, "4765": 411690880.0, "4770": 414659296.0, "4775": 407465536.0, "4780": 414013344.0, "4785": 416539296.0, "4790": 415739616.0, "4795": 403474720.0, "4800": 410493056.0, "4805": 412289824.0, "4810": 405198688.0, "4815": 413806944.0, "4820": 415481568.0, "4825": 409346080.0, "4830": 410953792.0, "4835": 411579456.0, "4840": 419485376.0, "4845": 404130816.0, "4850": 404556128.0, "4855": 411607488.0, "4860": 415919424.0, "4865": 415292480.0, "4870": 402863648.0, "4875": 416291168.0, "4880": 415523904.0, "4885": 419949696.0, "4890": 409046304.0, "4895": 410465632.0, "4900": 410577824.0, "4905": 408999456.0, "4910": 405696928.0, "4915": 412922464.0, "4920": 409820704.0, "4925": 405077664.0, "4930": 418715456.0, "4935": 414266400.0, "4940": 413330528.0, "4945": 421128480.0, "4950": 418268608.0, "4955": 416239232.0, "4960": 410115744.0, "4965": 409395200.0, "4970": 414350496.0, "4975": 411729536.0, "4980": 415933728.0, "4985": 411333056.0, "4990": 411462784.0, "4995": 423259552.0, "5000": 407557984.0, "5005": 418368800.0, "5010": 414669504.0, "5015": 409324672.0, "5020": 417505760.0, "5025": 413801184.0, "5030": 414360288.0, "5035": 406839968.0, "5040": 415636128.0, "5045": 415750176.0, "5050": 411820896.0, "5055": 415545152.0, "5060": 412474208.0, "5065": 412387520.0, "5070": 405806080.0, "5075": 402033504.0, "5080": 414822496.0, "5085": 418128000.0, "5090": 408607168.0, "5095": 417364992.0, "5100": 415794336.0, "5105": 418494528.0, "5110": 410620032.0, "5115": 422610432.0, "5120": 414145504.0, "5125": 416546976.0, "5130": 409353632.0, "5135": 417953120.0, "5140": 413615840.0, "5145": 411641856.0, "5150": 413686016.0, "5155": 415762400.0, "5160": 411735616.0, "5165": 408948960.0, "5170": 422829984.0, "5175": 415294592.0, "5180": 408416224.0, "5185": 415889600.0, "5190": 409353472.0, "5195": 402503072.0, "5200": 416629728.0, "5205": 413331168.0, "5210": 414525376.0, "5215": 418582464.0, "5220": 407277824.0, "5225": 418558304.0, "5230": 415471360.0, "5235": 410741536.0, "5240": 410024992.0, "5245": 422281824.0, "5250": 410481792.0, "5255": 412055936.0, "5260": 417262432.0, "5265": 410521024.0, "5270": 418441216.0, "5275": 407217728.0, "5280": 408357568.0, "5285": 410931648.0, "5290": 412951520.0, "5295": 410419296.0, "5300": 405672768.0, "5305": 416849184.0, "5310": 406242656.0, "5315": 413686816.0, "5320": 410464832.0, "5325": 410738880.0, "5330": 407531680.0, "5335": 414709440.0, "5340": 418533184.0, "5345": 405538880.0, "5350": 419318592.0, "5355": 410044992.0, "5360": 410350400.0, "5365": 409319904.0, "5370": 414749280.0, "5375": 404875680.0, "5380": 415539648.0, "5385": 407600288.0, "5390": 415229184.0, "5395": 415802432.0, "5400": 400666208.0, "5405": 416253120.0, "5410": 415432480.0, "5415": 405908992.0, "5420": 409366784.0, "5425": 416489664.0, "5430": 409741152.0, "5435": 410575328.0, "5440": 408676224.0, "5445": 405253536.0, "5450": 410712096.0, "5455": 407337280.0, "5460": 414333120.0, "5465": 408165120.0, "5470": 408905792.0, "5475": 401470720.0, "5480": 405460096.0, "5485": 422281248.0, "5490": 410419744.0, "5495": 413917472.0, "5500": 406814816.0, "5505": 409153568.0, "5510": 411791616.0, "5515": 417159392.0, "5520": 411183744.0, "5525": 407144480.0, "5530": 412771936.0, "5535": 408203648.0, "5540": 414014176.0, "5545": 412591680.0, "5550": 411516480.0, "5555": 410817536.0, "5560": 414410304.0, "5565": 402175776.0, "5570": 415996256.0, "5575": 421379008.0, "5580": 404484128.0, "5585": 415410752.0, "5590": 412109344.0, "5595": 409702432.0, "5600": 416091168.0, "5605": 416287648.0, "5610": 410623008.0, "5615": 410266592.0, "5620": 411873568.0, "5625": 413388736.0, "5630": 414310784.0, "5635": 409590080.0, "5640": 409739200.0, "5645": 409822336.0, "5650": 411823360.0, "5655": 403945472.0, "5660": 412682368.0, "5665": 422400704.0, "5670": 410413184.0, "5675": 409435680.0, "5680": 414750400.0, "5685": 404162208.0, "5690": 412766016.0, "5695": 416484288.0, "5700": 413474944.0, "5705": 409526208.0, "5710": 405639584.0, "5715": 417141792.0, "5720": 424190176.0, "5725": 408261536.0, "5730": 409801408.0, "5735": 410570720.0, "5740": 412880640.0, "5745": 407143136.0, "5750": 408675328.0, "5755": 418951424.0, "5760": 405321792.0, "5765": 414485120.0, "5770": 415993056.0, "5775": 408643872.0, "5780": 412723648.0, "5785": 407653088.0, "5790": 405480768.0, "5795": 418272224.0, "5800": 417261472.0, "5805": 412915616.0, "5810": 416682848.0, "5815": 412133728.0, "5820": 410378624.0, "5825": 414537792.0, "5830": 405827872.0, "5835": 413125248.0, "5840": 406457152.0, "5845": 417834016.0, "5850": 410087744.0, "5855": 406537504.0, "5860": 414612256.0, "5865": 408107264.0, "5870": 412287328.0, "5875": 414209536.0, "5880": 418803040.0, "5885": 409206496.0, "5890": 412961920.0, "5895": 419941184.0, "5900": 415926208.0, "5905": 409969376.0, "5910": 414287104.0, "5915": 414409024.0, "5920": 412529024.0, "5925": 409249120.0, "5930": 407340608.0, "5935": 424559840.0, "5940": 412634848.0, "5945": 414439232.0, "5950": 421274080.0, "5955": 408342880.0, "5960": 412454208.0, "5965": 408245184.0, "5970": 415200864.0, "5975": 409502912.0, "5980": 417019936.0, "5985": 418621184.0, "5990": 410166336.0, "5995": 413908832.0, "6000": 419510656.0, "6005": 415226336.0, "6010": 419215328.0, "6015": 402344320.0, "6020": 399961568.0, "6025": 414645280.0, "6030": 411471488.0, "6035": 411865728.0, "6040": 408651968.0, "6045": 414154528.0, "6050": 410430112.0, "6055": 408756032.0, "6060": 413972224.0, "6065": 409856480.0, "6070": 406623712.0, "6075": 417531040.0, "6080": 418653920.0, "6085": 412222464.0, "6090": 409827808.0, "6095": 420232192.0, "6100": 420691936.0, "6105": 419886560.0, "6110": 414351040.0, "6115": 416971680.0, "6120": 410791552.0, "6125": 420743680.0, "6130": 414362016.0, "6135": 402875264.0, "6140": 415047008.0, "6145": 412447552.0, "6150": 413621440.0, "6155": 415962624.0, "6160": 410085952.0, "6165": 410891904.0, "6170": 419701568.0, "6175": 404838656.0, "6180": 412637120.0, "6185": 419900512.0, "6190": 424128736.0, "6195": 404344096.0, "6200": 411170144.0, "6205": 416982080.0, "6210": 412134944.0, "6215": 408962944.0, "6220": 410454304.0, "6225": 418106816.0, "6230": 415503840.0, "6235": 405718176.0, "6240": 407816384.0, "6245": 411393888.0, "6250": 408117120.0, "6255": 409041056.0, "6260": 402481824.0, "6265": 417878144.0, "6270": 413492384.0, "6275": 415052160.0, "6280": 418281120.0, "6285": 414462336.0, "6290": 412027968.0, "6295": 408839904.0, "6300": 406867936.0, "6305": 418176096.0, "6310": 413460320.0, "6315": 411008416.0, "6320": 415916640.0, "6325": 402055488.0, "6330": 416932928.0, "6335": 413974816.0, "6340": 415470688.0, "6345": 411635424.0, "6350": 413888928.0, "6355": 416072864.0, "6360": 407335648.0, "6365": 410248928.0, "6370": 420421856.0, "6375": 406308064.0, "6380": 410392832.0, "6385": 409398400.0, "6390": 409213088.0, "6395": 415792448.0, "6400": 422207968.0, "6405": 418616352.0, "6410": 416314176.0, "6415": 407346368.0, "6420": 409542816.0, "6425": 418042592.0, "6430": 415417024.0, "6435": 419618560.0, "6440": 408266720.0, "6445": 416458912.0, "6450": 408738272.0, "6455": 412932384.0, "6460": 413719136.0, "6465": 412624160.0, "6470": 409542400.0, "6475": 409972448.0, "6480": 408541536.0, "6485": 411310400.0, "6490": 405334880.0, "6495": 406966400.0, "6500": 415916320.0, "6505": 405725056.0, "6510": 413454592.0, "6515": 403530464.0, "6520": 411314112.0, "6525": 411358560.0, "6530": 412311456.0, "6535": 409026816.0, "6540": 410557088.0, "6545": 406088512.0, "6550": 412560352.0, "6555": 408191776.0, "6560": 411903616.0, "6565": 411151968.0, "6570": 418523424.0, "6575": 407058720.0, "6580": 405059872.0, "6585": 416183712.0, "6590": 416184096.0, "6595": 410650240.0, "6600": 411241056.0, "6605": 413198176.0, "6610": 417187264.0, "6615": 420742432.0, "6620": 405772128.0, "6625": 416095872.0, "6630": 407011232.0, "6635": 414514304.0, "6640": 405267968.0, "6645": 406513888.0, "6650": 406921792.0, "6655": 413602976.0, "6660": 414554752.0, "6665": 412124096.0, "6670": 410713760.0, "6675": 417216384.0, "6680": 419384736.0, "6685": 412483104.0, "6690": 413527072.0, "6695": 407057600.0, "6700": 408828512.0, "6705": 417045152.0, "6710": 408922208.0, "6715": 414512896.0, "6720": 401644256.0, "6725": 409625952.0, "6730": 411195328.0, "6735": 416880800.0, "6740": 408526080.0, "6745": 413800288.0, "6750": 418802304.0, "6755": 420689536.0, "6760": 410718848.0, "6765": 413942368.0, "6770": 413403264.0, "6775": 415309536.0, "6780": 409660384.0, "6785": 407186848.0, "6790": 410953728.0, "6795": 410342848.0, "6800": 406869632.0, "6805": 412720736.0, "6810": 401725056.0, "6815": 418264512.0, "6820": 409342752.0, "6825": 408748672.0, "6830": 415220192.0, "6835": 407817856.0, "6840": 408645408.0, "6845": 415763296.0, "6850": 402546272.0, "6855": 413446048.0, "6860": 416460832.0, "6865": 407661376.0, "6870": 411262496.0, "6875": 414464384.0, "6880": 407913536.0, "6885": 415787968.0, "6890": 407454336.0, "6895": 414814112.0, "6900": 410030208.0, "6905": 408211328.0, "6910": 412157504.0, "6915": 421944288.0, "6920": 419993632.0, "6925": 408509568.0, "6930": 413117088.0, "6935": 419478912.0, "6940": 410057184.0, "6945": 403101536.0, "6950": 413664768.0, "6955": 418628320.0, "6960": 410870560.0, "6965": 413976160.0, "6970": 408200672.0, "6975": 420129280.0, "6980": 422402688.0, "6985": 413408512.0, "6990": 416336288.0, "6995": 418589792.0, "7000": 404217376.0, "7005": 407996224.0, "7010": 411170496.0, "7015": 414182368.0, "7020": 411175840.0, "7025": 413026624.0, "7030": 410066176.0, "7035": 410422080.0, "7040": 411600064.0, "7045": 411173024.0, "7050": 410118848.0, "7055": 408950368.0, "7060": 416484000.0, "7065": 418327456.0, "7070": 408944736.0, "7075": 410022400.0, "7080": 415851744.0, "7085": 414021792.0, "7090": 404946240.0, "7095": 403127392.0, "7100": 412244064.0, "7105": 410492224.0, "7110": 418444512.0, "7115": 418524928.0, "7120": 415542688.0, "7125": 405306176.0, "7130": 413235936.0, "7135": 413055264.0, "7140": 414055616.0, "7145": 411334208.0, "7150": 411977024.0, "7155": 419347584.0, "7160": 422695584.0, "7165": 418109856.0, "7170": 413164928.0, "7175": 408590592.0, "7180": 411181888.0, "7185": 411889376.0, "7190": 412547264.0, "7195": 412611072.0, "7200": 407732288.0, "7205": 413126912.0, "7210": 414096224.0, "7215": 420879168.0, "7220": 407700416.0, "7225": 417756064.0, "7230": 407562464.0, "7235": 414960928.0, "7240": 409365760.0, "7245": 411047872.0, "7250": 406500800.0, "7255": 416817024.0, "7260": 412142912.0, "7265": 410186848.0, "7270": 416622304.0, "7275": 406956864.0, "7280": 413012960.0, "7285": 420293056.0, "7290": 413619616.0, "7295": 417696320.0, "7300": 413332672.0, "7305": 414994912.0, "7310": 420673504.0, "7315": 410827616.0, "7320": 419956800.0, "7325": 412520576.0, "7330": 408984896.0, "7335": 418206464.0, "7340": 406613504.0, "7345": 407021664.0, "7350": 408917472.0, "7355": 409325792.0, "7360": 412794368.0, "7365": 414192512.0, "7370": 413140384.0, "7375": 411673984.0, "7380": 412258144.0, "7385": 416494944.0, "7390": 409423072.0, "7395": 409351328.0, "7400": 414837504.0, "7405": 410540416.0, "7410": 402142880.0, "7415": 416336384.0, "7420": 411342560.0, "7425": 415922560.0, "7430": 413437664.0, "7435": 413011648.0, "7440": 420834240.0, "7445": 414146112.0, "7450": 422991200.0, "7455": 411853664.0, "7460": 417257088.0, "7465": 414837024.0, "7470": 418136352.0, "7475": 401464896.0, "7480": 410460896.0, "7485": 407165696.0, "7490": 407273408.0, "7495": 406767840.0, "7500": 418720384.0, "7505": 410186848.0, "7510": 414211296.0, "7515": 416279520.0, "7520": 407321696.0, "7525": 409796992.0, "7530": 413473888.0, "7535": 408674880.0, "7540": 413294240.0, "7545": 407203520.0, "7550": 406472192.0, "7555": 408158624.0, "7560": 420126592.0, "7565": 406878400.0, "7570": 407340832.0, "7575": 409939712.0, "7580": 411733440.0, "7585": 410515008.0, "7590": 409219200.0, "7595": 410073088.0, "7600": 397649824.0, "7605": 414513952.0, "7610": 404503328.0, "7615": 409361792.0, "7620": 417440032.0, "7625": 419100544.0, "7630": 410802464.0, "7635": 407831296.0, "7640": 411256704.0, "7645": 410431168.0, "7650": 420687360.0, "7655": 412449344.0, "7660": 410452960.0, "7665": 411399616.0, "7670": 408572160.0, "7675": 415496608.0, "7680": 416183232.0, "7685": 419025600.0, "7690": 411275456.0, "7695": 417968512.0, "7700": 414070432.0, "7705": 405256160.0, "7710": 410151520.0, "7715": 407150400.0, "7720": 416972928.0, "7725": 408501856.0, "7730": 416362816.0, "7735": 413586976.0, "7740": 410912448.0, "7745": 420445792.0, "7750": 411971584.0, "7755": 407272032.0, "7760": 406110176.0, "7765": 406779008.0, "7770": 413400064.0, "7775": 413942784.0, "7780": 410371360.0, "7785": 414093056.0, "7790": 413295712.0, "7795": 412614560.0, "7800": 409222368.0, "7805": 416100832.0, "7810": 416417664.0, "7815": 407279136.0, "7820": 401837568.0, "7825": 410667424.0, "7830": 412957312.0, "7835": 412029120.0, "7840": 407136320.0, "7845": 414756544.0, "7850": 417606208.0, "7855": 411267488.0, "7860": 410887936.0, "7865": 413622112.0, "7870": 409684544.0, "7875": 418857248.0, "7880": 416503296.0, "7885": 414876960.0, "7890": 410578688.0, "7895": 404973056.0, "7900": 417938080.0, "7905": 404369984.0, "7910": 412629984.0, "7915": 409937376.0, "7920": 410924224.0, "7925": 419166976.0, "7930": 403945440.0, "7935": 407397984.0, "7940": 413858400.0, "7945": 407746592.0, "7950": 411884992.0, "7955": 414415104.0, "7960": 412802944.0, "7965": 415665312.0, "7970": 411156032.0, "7975": 410106048.0, "7980": 408994464.0, "7985": 406480640.0, "7990": 421233120.0, "7995": 412904960.0, "8000": 403135776.0, "8005": 415336480.0, "8010": 413507648.0, "8015": 420555040.0, "8020": 417037184.0, "8025": 411944160.0, "8030": 412760960.0, "8035": 410014976.0, "8040": 410914688.0, "8045": 415953024.0, "8050": 416048896.0, "8055": 410061024.0, "8060": 405639200.0, "8065": 422258816.0, "8070": 411564448.0, "8075": 406325536.0, "8080": 420957728.0, "8085": 406942240.0, "8090": 413928448.0, "8095": 413809056.0, "8100": 413210080.0, "8105": 409856928.0, "8110": 411372544.0, "8115": 414317216.0, "8120": 414147136.0, "8125": 413543680.0, "8130": 411233376.0, "8135": 414399456.0, "8140": 408620384.0, "8145": 408973984.0, "8150": 411157024.0, "8155": 410143360.0, "8160": 413469344.0, "8165": 408488480.0, "8170": 418532640.0, "8175": 406543104.0, "8180": 411400224.0, "8185": 409656672.0, "8190": 409556832.0, "8195": 426111552.0, "8200": 407030592.0, "8205": 407949152.0, "8210": 420353376.0, "8215": 408578368.0, "8220": 410752096.0, "8225": 411472096.0, "8230": 421273376.0, "8235": 420522208.0, "8240": 411312640.0, "8245": 414484224.0, "8250": 407650304.0, "8255": 407368864.0, "8260": 414557280.0, "8265": 415229760.0, "8270": 405574496.0, "8275": 413205728.0, "8280": 412580928.0, "8285": 416922624.0, "8290": 411021120.0, "8295": 414796032.0, "8300": 417147840.0, "8305": 412017632.0, "8310": 416716512.0, "8315": 406085472.0, "8320": 416740192.0, "8325": 413308864.0, "8330": 411673760.0, "8335": 413850560.0, "8340": 412153888.0, "8345": 413106048.0, "8350": 424780608.0, "8355": 411027264.0, "8360": 407737536.0, "8365": 416018784.0, "8370": 405117472.0, "8375": 413794240.0, "8380": 407997280.0, "8385": 423798176.0, "8390": 407704448.0, "8395": 405698624.0, "8400": 414450080.0, "8405": 410666336.0, "8410": 415028992.0, "8415": 404146976.0, "8420": 409012832.0, "8425": 421378816.0, "8430": 407220640.0, "8435": 417416320.0, "8440": 407566976.0, "8445": 411865440.0, "8450": 412787072.0, "8455": 406507808.0, "8460": 415868704.0, "8465": 414736960.0, "8470": 406852416.0, "8475": 416826752.0, "8480": 418745344.0, "8485": 412590016.0, "8490": 417190080.0, "8495": 412560160.0, "8500": 417235840.0, "8505": 409110080.0, "8510": 411742176.0, "8515": 416828160.0, "8520": 409977184.0, "8525": 412805056.0, "8530": 411518208.0, "8535": 405133664.0, "8540": 416143200.0, "8545": 418823744.0, "8550": 406893760.0, "8555": 419191360.0, "8560": 412594528.0, "8565": 407756160.0, "8570": 407851008.0, "8575": 406469696.0, "8580": 406308640.0, "8585": 404394880.0, "8590": 406092352.0, "8595": 412734464.0, "8600": 419815776.0, "8605": 414261184.0, "8610": 409609664.0, "8615": 407538880.0, "8620": 410798752.0, "8625": 413301504.0, "8630": 409200256.0, "8635": 413166048.0, "8640": 410256288.0, "8645": 411349504.0, "8650": 416690752.0, "8655": 408998368.0, "8660": 413100640.0, "8665": 415106144.0, "8670": 411568416.0, "8675": 408023008.0, "8680": 413864416.0, "8685": 405199584.0, "8690": 409200864.0, "8695": 412889344.0, "8700": 410770912.0, "8705": 414158048.0, "8710": 406851520.0, "8715": 417044192.0, "8720": 408705728.0, "8725": 414094368.0, "8730": 403704128.0, "8735": 422540960.0, "8740": 409304544.0, "8745": 413730688.0, "8750": 414153280.0, "8755": 410938912.0, "8760": 413576096.0, "8765": 401473920.0, "8770": 416217216.0, "8775": 414733408.0, "8780": 413208480.0, "8785": 410490240.0, "8790": 412889344.0, "8795": 412614496.0, "8800": 416107296.0, "8805": 409994784.0, "8810": 412818592.0, "8815": 404951744.0, "8820": 407071520.0, "8825": 410584672.0, "8830": 404506400.0, "8835": 406681664.0, "8840": 416227712.0, "8845": 414738368.0, "8850": 416128320.0, "8855": 411384896.0, "8860": 416827488.0, "8865": 409738944.0, "8870": 411430496.0, "8875": 412862336.0, "8880": 415094880.0, "8885": 412164960.0, "8890": 412304672.0, "8895": 414627744.0, "8900": 415370144.0, "8905": 405735648.0, "8910": 413365312.0, "8915": 411752608.0, "8920": 415233216.0, "8925": 414454048.0, "8930": 403218784.0, "8935": 409607392.0, "8940": 414367136.0, "8945": 407543520.0, "8950": 415770944.0, "8955": 407967776.0, "8960": 414441792.0, "8965": 410897504.0, "8970": 409139936.0, "8975": 415188128.0, "8980": 408470944.0, "8985": 411162592.0, "8990": 412001664.0, "8995": 412860032.0, "9000": 401298624.0, "9005": 400413472.0, "9010": 413995232.0, "9015": 402868832.0, "9020": 418197824.0, "9025": 417279968.0, "9030": 413401184.0, "9035": 405175968.0, "9040": 419538976.0, "9045": 416866784.0, "9050": 413574272.0, "9055": 406227744.0, "9060": 412795776.0, "9065": 417129216.0, "9070": 407551712.0, "9075": 406862240.0, "9080": 413538144.0, "9085": 410917056.0, "9090": 412872320.0, "9095": 412829216.0, "9100": 414376480.0, "9105": 407343872.0, "9110": 406480928.0, "9115": 414557184.0, "9120": 412066272.0, "9125": 408422752.0, "9130": 410064832.0, "9135": 411936448.0, "9140": 415265152.0, "9145": 411107840.0, "9150": 416713312.0, "9155": 407650304.0, "9160": 410235296.0, "9165": 418044512.0, "9170": 411446656.0, "9175": 409887168.0, "9180": 420408576.0, "9185": 411003680.0, "9190": 414596416.0, "9195": 408882432.0, "9200": 409891808.0, "9205": 407002880.0, "9210": 417979808.0, "9215": 413842432.0, "9220": 409854464.0, "9225": 417698432.0, "9230": 406274304.0, "9235": 410519008.0, "9240": 412900992.0, "9245": 409602336.0, "9250": 417096000.0, "9255": 408218784.0, "9260": 411129792.0, "9265": 417641024.0, "9270": 416372000.0, "9275": 408729344.0, "9280": 410635424.0, "9285": 410336032.0, "9290": 401481888.0, "9295": 407183488.0, "9300": 410759680.0, "9305": 407812768.0, "9310": 413765088.0, "9315": 408961216.0, "9320": 410903584.0, "9325": 415339744.0, "9330": 408261376.0, "9335": 413883488.0, "9340": 404052480.0, "9345": 413370176.0, "9350": 403688768.0, "9355": 415585824.0, "9360": 415150688.0, "9365": 413478080.0, "9370": 417045312.0, "9375": 408534688.0, "9380": 408193696.0, "9385": 408852608.0, "9390": 416802816.0, "9395": 415052160.0, "9400": 411096736.0, "9405": 413282368.0, "9410": 413333024.0, "9415": 416750336.0, "9420": 413711136.0, "9425": 409061824.0, "9430": 413519008.0, "9435": 413635424.0, "9440": 408695328.0, "9445": 419650432.0, "9450": 414468320.0, "9455": 408240832.0, "9460": 414670144.0, "9465": 416346624.0, "9470": 418429792.0, "9475": 407975200.0, "9480": 416222688.0, "9485": 408079776.0, "9490": 410561504.0, "9495": 413249824.0, "9500": 409853696.0, "9505": 414253920.0, "9510": 418451584.0, "9515": 414512512.0, "9520": 405096032.0, "9525": 424282016.0, "9530": 406824416.0, "9535": 412367648.0, "9540": 412975520.0, "9545": 413232320.0, "9550": 412363520.0, "9555": 413959264.0, "9560": 413150624.0, "9565": 423668320.0, "9570": 424531904.0, "9575": 404440352.0, "9580": 414054368.0, "9585": 420359104.0, "9590": 412196416.0, "9595": 415081856.0, "9600": 411039424.0, "9605": 405948928.0, "9610": 422738176.0, "9615": 415435872.0, "9620": 414151456.0, "9625": 405527168.0, "9630": 406531168.0, "9635": 411129312.0, "9640": 413930528.0, "9645": 409114912.0, "9650": 409035456.0, "9655": 408248896.0, "9660": 418749696.0, "9665": 416888064.0, "9670": 408328320.0, "9675": 418749312.0, "9680": 409088320.0, "9685": 412925696.0, "9690": 408639328.0, "9695": 406003200.0, "9700": 408250752.0, "9705": 406167168.0, "9710": 407587616.0, "9715": 417775680.0, "9720": 411613280.0, "9725": 413711808.0, "9730": 426720608.0, "9735": 417435552.0, "9740": 410139680.0, "9745": 411513888.0, "9750": 410206976.0, "9755": 411148896.0, "9760": 414809152.0, "9765": 413849472.0, "9770": 407049184.0, "9775": 417419520.0, "9780": 409122720.0, "9785": 409847616.0, "9790": 420090784.0, "9795": 411554496.0, "9800": 413739744.0, "9805": 411365696.0, "9810": 412122656.0, "9815": 411116384.0, "9820": 415179232.0, "9825": 419954944.0, "9830": 412708512.0, "9835": 408650656.0, "9840": 410431040.0, "9845": 413527456.0, "9850": 408495520.0, "9855": 420032800.0, "9860": 407940160.0, "9865": 415900000.0, "9870": 411158368.0, "9875": 417983232.0, "9880": 411935584.0, "9885": 418490080.0, "9890": 402729824.0, "9895": 410948704.0, "9900": 410771744.0, "9905": 416962592.0, "9910": 411844928.0, "9915": 409476480.0, "9920": 407859200.0, "9925": 408462272.0, "9930": 409827808.0, "9935": 412717888.0, "9940": 411583168.0, "9945": 420685056.0, "9950": 411311904.0, "9955": 417093472.0, "9960": 412122624.0, "9965": 421832416.0, "9970": 411703840.0, "9975": 416289152.0, "9980": 414918944.0, "9985": 405888672.0, "9990": 417412416.0, "9995": 409660224.0, "10000": 408540256.0, "10005": 414417696.0, "10010": 411803136.0, "10015": 408420896.0, "10020": 412925536.0, "10025": 411044960.0, "10030": 404590112.0, "10035": 419791232.0, "10040": 404037664.0, "10045": 419308672.0, "10050": 414825120.0, "10055": 402745632.0, "10060": 408554624.0, "10065": 411435456.0, "10070": 414234176.0, "10075": 405732608.0, "10080": 414237696.0, "10085": 407195200.0, "10090": 412756384.0, "10095": 410250176.0, "10100": 419281728.0, "10105": 410058176.0, "10110": 408360736.0, "10115": 409316800.0, "10120": 410984096.0, "10125": 414828416.0, "10130": 403197696.0, "10135": 420884064.0, "10140": 407277056.0, "10145": 411030336.0, "10150": 410952928.0, "10155": 417998976.0, "10160": 410851584.0, "10165": 399424256.0, "10170": 413001536.0, "10175": 416501664.0, "10180": 409684384.0, "10185": 404898560.0, "10190": 413433024.0, "10195": 415311936.0, "10200": 417970208.0, "10205": 408185984.0, "10210": 403573504.0, "10215": 413025280.0, "10220": 415769632.0, "10225": 413506240.0, "10230": 406391328.0, "10235": 399491904.0, "10240": 412150336.0, "10245": 412710944.0, "10250": 415775552.0, "10255": 405596800.0, "10260": 412600736.0, "10265": 419721376.0, "10270": 410324384.0, "10275": 410868352.0, "10280": 422137184.0, "10285": 411363936.0, "10290": 412399616.0, "10295": 408066656.0, "10300": 413131936.0, "10305": 412748160.0, "10310": 410512160.0, "10315": 411763776.0, "10320": 414620384.0, "10325": 407470464.0, "10330": 414774720.0, "10335": 412299360.0, "10340": 415857024.0, "10345": 404307232.0, "10350": 412128448.0, "10355": 409005152.0, "10360": 417741920.0, "10365": 407707584.0, "10370": 409910432.0, "10375": 411823552.0, "10380": 415282848.0, "10385": 407582976.0, "10390": 409371392.0, "10395": 401839968.0, "10400": 415573728.0, "10405": 406717088.0, "10410": 415023392.0, "10415": 423584768.0, "10420": 414196576.0, "10425": 408736416.0, "10430": 418721696.0, "10435": 405932704.0, "10440": 408958304.0, "10445": 418892384.0, "10450": 415706144.0, "10455": 407848000.0, "10460": 409088960.0, "10465": 409699840.0, "10470": 410556512.0, "10475": 411718048.0, "10480": 406102816.0, "10485": 411857152.0, "10490": 408786016.0, "10495": 422459328.0, "10500": 412520032.0, "10505": 418018592.0, "10510": 418577280.0, "10515": 407673024.0, "10520": 418183680.0, "10525": 409454400.0, "10530": 414837568.0, "10535": 417202816.0, "10540": 412183648.0, "10545": 419754880.0, "10550": 417558336.0, "10555": 411751808.0, "10560": 412118368.0, "10565": 415268256.0, "10570": 412249536.0, "10575": 409382176.0, "10580": 414494048.0, "10585": 413836576.0, "10590": 419257088.0, "10595": 405067872.0, "10600": 418748704.0, "10605": 406374144.0, "10610": 421514656.0, "10615": 412265728.0, "10620": 403737696.0, "10625": 416451680.0, "10630": 408946080.0, "10635": 407381536.0, "10640": 406193248.0, "10645": 409084416.0, "10650": 415346464.0, "10655": 414620000.0, "10660": 415590592.0, "10665": 417026176.0, "10670": 410603328.0, "10675": 410419456.0, "10680": 413287520.0, "10685": 415724928.0, "10690": 414612736.0, "10695": 411695712.0, "10700": 414157792.0, "10705": 405432032.0, "10710": 412632928.0, "10715": 418943104.0, "10720": 415132256.0, "10725": 402319008.0, "10730": 409919904.0, "10735": 412185248.0, "10740": 411042720.0, "10745": 419074304.0, "10750": 415545184.0, "10755": 415327872.0, "10760": 414845504.0, "10765": 403134272.0, "10770": 417090432.0, "10775": 410848160.0, "10780": 412423840.0, "10785": 407600480.0, "10790": 417901216.0, "10795": 405629344.0, "10800": 413234048.0, "10805": 408133568.0, "10810": 417022880.0, "10815": 416093792.0, "10820": 405812544.0, "10825": 415024064.0, "10830": 414973344.0, "10835": 407947136.0, "10840": 418333376.0, "10845": 415421696.0, "10850": 414045632.0, "10855": 415576064.0, "10860": 413296640.0, "10865": 412759712.0, "10870": 411405248.0, "10875": 405811776.0, "10880": 410952320.0, "10885": 404014560.0, "10890": 403169312.0, "10895": 416508192.0, "10900": 407618752.0, "10905": 415891424.0, "10910": 411086304.0, "10915": 414820896.0, "10920": 412473824.0, "10925": 410912800.0, "10930": 412597792.0, "10935": 413452640.0, "10940": 412719776.0, "10945": 405757696.0, "10950": 411555424.0, "10955": 412366880.0, "10960": 413537344.0, "10965": 409143072.0, "10970": 414292576.0, "10975": 416837216.0, "10980": 408902432.0, "10985": 413422944.0, "10990": 417216288.0, "10995": 411191392.0, "11000": 415593472.0, "11005": 410333888.0, "11010": 410156256.0, "11015": 418012480.0, "11020": 409698592.0, "11025": 417628832.0, "11030": 418405120.0, "11035": 408626336.0, "11040": 407228544.0, "11045": 421662336.0, "11050": 415127776.0, "11055": 408362560.0, "11060": 423477536.0, "11065": 406627808.0, "11070": 415279104.0, "11075": 427608352.0, "11080": 408126272.0, "11085": 406586016.0, "11090": 412008704.0, "11095": 410714048.0, "11100": 414169056.0, "11105": 415814592.0, "11110": 412272608.0, "11115": 412707840.0, "11120": 407247776.0, "11125": 408664192.0, "11130": 413283872.0, "11135": 407930112.0, "11140": 420617696.0, "11145": 411423136.0, "11150": 416209312.0, "11155": 413796320.0, "11160": 418413472.0, "11165": 409285024.0, "11170": 407095584.0, "11175": 413817504.0, "11180": 411708448.0, "11185": 410185728.0, "11190": 407468512.0, "11195": 407248960.0, "11200": 406558432.0, "11205": 406272032.0, "11210": 417142944.0, "11215": 414130848.0, "11220": 411091456.0, "11225": 411018144.0, "11230": 418702560.0, "11235": 410256384.0, "11240": 404296576.0, "11245": 408338976.0, "11250": 408159232.0, "11255": 414843584.0, "11260": 404108832.0, "11265": 413662208.0, "11270": 411123424.0, "11275": 411934016.0, "11280": 419098144.0, "11285": 409933920.0, "11290": 412109696.0, "11295": 416902528.0, "11300": 415485696.0, "11305": 421967456.0, "11310": 406470528.0, "11315": 417052448.0, "11320": 409628864.0, "11325": 411005024.0, "11330": 408036672.0, "11335": 405543744.0, "11340": 404196032.0, "11345": 425506944.0, "11350": 417446368.0, "11355": 407158112.0, "11360": 408809376.0, "11365": 410813696.0, "11370": 413799808.0, "11375": 415654784.0, "11380": 418118016.0, "11385": 408455424.0, "11390": 412902400.0, "11395": 413814176.0, "11400": 409442368.0, "11405": 406545152.0, "11410": 419913312.0, "11415": 408785920.0, "11420": 411110272.0, "11425": 412429632.0, "11430": 404851200.0, "11435": 413853376.0, "11440": 405855616.0, "11445": 415102720.0, "11450": 413070272.0, "11455": 406263680.0, "11460": 411430112.0, "11465": 414850144.0, "11470": 407798432.0, "11475": 416451232.0, "11480": 418448352.0, "11485": 413345376.0, "11490": 415264448.0, "11495": 406017728.0, "11500": 414144032.0, "11505": 410998304.0, "11510": 412111072.0, "11515": 404796064.0, "11520": 410693600.0, "11525": 415462816.0, "11530": 407963488.0, "11535": 417385920.0, "11540": 415985696.0, "11545": 406654432.0, "11550": 410019424.0, "11555": 415366336.0, "11560": 419553984.0, "11565": 412964640.0, "11570": 418128160.0, "11575": 416767424.0, "11580": 418956608.0, "11585": 414253696.0, "11590": 407006912.0, "11595": 409673184.0, "11600": 416353920.0, "11605": 420544992.0, "11610": 413525056.0, "11615": 416207712.0, "11620": 413556096.0, "11625": 418873888.0, "11630": 408682016.0, "11635": 405175360.0, "11640": 415254496.0, "11645": 409652672.0, "11650": 411527104.0, "11655": 420431488.0, "11660": 411635616.0, "11665": 418984032.0, "11670": 412013792.0, "11675": 410970944.0, "11680": 418263104.0, "11685": 411540352.0, "11690": 408128864.0, "11695": 411538464.0, "11700": 410787936.0, "11705": 417722176.0, "11710": 408322112.0, "11715": 420154240.0, "11720": 419249440.0, "11725": 406260448.0, "11730": 413020640.0, "11735": 408858144.0, "11740": 420031648.0, "11745": 408926688.0, "11750": 411244128.0, "11755": 408871360.0, "11760": 411581920.0, "11765": 411260736.0, "11770": 409220704.0, "11775": 409224608.0, "11780": 411911168.0, "11785": 408890624.0, "11790": 401972800.0, "11795": 422691776.0, "11800": 402392448.0, "11805": 412519200.0, "11810": 413104448.0, "11815": 412110080.0, "11820": 411871488.0, "11825": 410684288.0, "11830": 409946816.0, "11835": 408322048.0, "11840": 413922176.0, "11845": 412716704.0, "11850": 406962272.0, "11855": 418393152.0, "11860": 408874560.0, "11865": 411767776.0, "11870": 413203936.0, "11875": 422614752.0, "11880": 411297664.0, "11885": 416826400.0, "11890": 402987968.0, "11895": 408370080.0, "11900": 420736224.0, "11905": 412078112.0, "11910": 410827296.0, "11915": 411673664.0, "11920": 414534368.0, "11925": 408615360.0, "11930": 419208672.0, "11935": 412860032.0, "11940": 410197440.0, "11945": 407750400.0, "11950": 409004512.0, "11955": 406608832.0, "11960": 408361856.0, "11965": 409618336.0, "11970": 412840352.0, "11975": 415589440.0, "11980": 414845632.0, "11985": 407881536.0, "11990": 412452960.0, "11995": 418135488.0, "12000": 410740352.0, "12005": 409564576.0, "12010": 419407904.0, "12015": 417320320.0, "12020": 407113248.0, "12025": 416847968.0, "12030": 413147616.0, "12035": 404793056.0, "12040": 415404672.0, "12045": 410120128.0, "12050": 402666528.0, "12055": 407683328.0, "12060": 410431552.0, "12065": 410121568.0, "12070": 413424896.0, "12075": 416450848.0, "12080": 405155456.0, "12085": 410429760.0, "12090": 414273888.0, "12095": 404103104.0, "12100": 413845504.0, "12105": 404287232.0, "12110": 403656288.0, "12115": 419011168.0, "12120": 407207808.0, "12125": 410896320.0, "12130": 417277120.0, "12135": 408156352.0, "12140": 411857504.0, "12145": 415913664.0, "12150": 412600736.0, "12155": 411894112.0, "12160": 414863936.0, "12165": 405730784.0, "12170": 407008448.0, "12175": 413695008.0, "12180": 418941312.0, "12185": 412464000.0, "12190": 412007776.0, "12195": 411149952.0, "12200": 406629024.0, "12205": 415145408.0, "12210": 405980416.0, "12215": 408412672.0, "12220": 411260544.0, "12225": 415790080.0, "12230": 411267136.0, "12235": 422770080.0, "12240": 410749280.0, "12245": 405786880.0, "12250": 419524928.0, "12255": 413605888.0, "12260": 405170752.0, "12265": 417495808.0, "12270": 417302400.0, "12275": 408273600.0, "12280": 418895680.0, "12285": 409238720.0, "12290": 410364352.0, "12295": 416008064.0, "12300": 421216576.0, "12305": 408811712.0, "12310": 409148736.0, "12315": 410062528.0, "12320": 415679168.0, "12325": 415180000.0, "12330": 416663296.0, "12335": 406759328.0, "12340": 411120192.0, "12345": 413722592.0, "12350": 405139712.0, "12355": 411673728.0, "12360": 413329216.0, "12365": 409110624.0, "12370": 418473472.0, "12375": 408344352.0, "12380": 414494624.0, "12385": 418001952.0, "12390": 413071648.0, "12395": 412159424.0, "12400": 413477664.0, "12405": 416028608.0, "12410": 412684736.0, "12415": 413609248.0, "12420": 414251744.0, "12425": 414495232.0, "12430": 411053408.0, "12435": 407379520.0, "12440": 413251520.0, "12445": 412653440.0, "12450": 414716416.0, "12455": 410517664.0, "12460": 417387392.0, "12465": 413956960.0, "12470": 408413056.0, "12475": 411133184.0, "12480": 415160640.0, "12485": 404816608.0, "12490": 413823424.0, "12495": 414616736.0, "12500": 410438976.0, "12505": 414518752.0, "12510": 420609216.0, "12515": 406607648.0, "12520": 421914560.0, "12525": 412359392.0, "12530": 413123168.0, "12535": 412534880.0, "12540": 416281248.0, "12545": 408964544.0, "12550": 420781504.0, "12555": 409916768.0, "12560": 406605664.0, "12565": 422660832.0, "12570": 419787680.0, "12575": 416039040.0, "12580": 414689472.0, "12585": 415681664.0, "12590": 412494784.0, "12595": 414015200.0, "12600": 414389376.0, "12605": 404899136.0, "12610": 420843616.0, "12615": 405006432.0, "12620": 407330272.0, "12625": 411293632.0, "12630": 411424864.0, "12635": 407165408.0, "12640": 414151968.0, "12645": 414519008.0, "12650": 410146560.0, "12655": 411488352.0, "12660": 412063616.0, "12665": 408885184.0, "12670": 411464928.0, "12675": 406730304.0, "12680": 404442176.0, "12685": 408523744.0, "12690": 420230912.0, "12695": 407342944.0, "12700": 418219648.0, "12705": 413307552.0, "12710": 410611008.0, "12715": 414373056.0, "12720": 413130624.0, "12725": 405259968.0, "12730": 419739168.0, "12735": 411650880.0, "12740": 406905728.0, "12745": 418505024.0, "12750": 404234112.0, "12755": 420262688.0, "12760": 408241248.0, "12765": 413207008.0, "12770": 409233344.0, "12775": 425057280.0, "12780": 409264896.0, "12785": 412743136.0, "12790": 413922656.0, "12795": 410430848.0, "12800": 412560512.0, "12805": 410806464.0, "12810": 416913120.0, "12815": 410988160.0, "12820": 400898848.0, "12825": 410587616.0, "12830": 408656128.0, "12835": 411073952.0, "12840": 408245792.0, "12845": 417891520.0, "12850": 408890784.0, "12855": 408971456.0, "12860": 403438976.0, "12865": 416693792.0, "12870": 411813184.0, "12875": 405518976.0, "12880": 413809024.0, "12885": 405719392.0, "12890": 418467936.0, "12895": 410191456.0, "12900": 410871552.0, "12905": 409207392.0, "12910": 409681792.0, "12915": 407427936.0, "12920": 418255712.0, "12925": 416425088.0, "12930": 411543904.0, "12935": 415358144.0, "12940": 416870688.0, "12945": 412524480.0, "12950": 413972480.0, "12955": 409755968.0, "12960": 406184416.0, "12965": 417399232.0, "12970": 417775296.0, "12975": 408076640.0, "12980": 406977440.0, "12985": 413422624.0, "12990": 413053696.0, "12995": 403677088.0, "13000": 409820416.0, "13005": 413109408.0, "13010": 410628000.0, "13015": 414508608.0, "13020": 407704640.0, "13025": 409709600.0, "13030": 414323232.0, "13035": 407610592.0, "13040": 409703744.0, "13045": 416563648.0, "13050": 408595136.0, "13055": 410941024.0, "13060": 421787488.0, "13065": 405411104.0, "13070": 416714656.0, "13075": 403608128.0, "13080": 407627200.0, "13085": 416632384.0, "13090": 407419584.0, "13095": 411575552.0, "13100": 410031552.0, "13105": 410890912.0, "13110": 415514816.0, "13115": 421492864.0, "13120": 418119200.0, "13125": 408668640.0, "13130": 418441632.0, "13135": 413817824.0, "13140": 410614176.0, "13145": 413523168.0, "13150": 409453248.0, "13155": 413511744.0, "13160": 408437568.0, "13165": 405222400.0, "13170": 414834688.0, "13175": 421358912.0, "13180": 397422496.0, "13185": 411437568.0, "13190": 405329632.0, "13195": 416468896.0, "13200": 408476960.0, "13205": 408172576.0, "13210": 413864512.0, "13215": 409623104.0, "13220": 417277184.0, "13225": 409056704.0, "13230": 414697888.0, "13235": 417922464.0, "13240": 414292064.0, "13245": 415574784.0, "13250": 413401696.0, "13255": 412968864.0, "13260": 413560992.0, "13265": 414455392.0, "13270": 409323168.0, "13275": 418499872.0, "13280": 417407712.0, "13285": 411205728.0, "13290": 406795264.0, "13295": 411903616.0, "13300": 407022048.0, "13305": 403556320.0, "13310": 417866240.0, "13315": 409348960.0, "13320": 413888992.0, "13325": 410253760.0, "13330": 410481568.0, "13335": 418151552.0, "13340": 409082304.0, "13345": 409326464.0, "13350": 417276640.0, "13355": 421799840.0, "13360": 408415424.0, "13365": 409427328.0, "13370": 409442144.0, "13375": 418270976.0, "13380": 406724608.0, "13385": 417713536.0, "13390": 409952224.0, "13395": 413540928.0, "13400": 418847584.0, "13405": 413434528.0, "13410": 407525856.0, "13415": 412537216.0, "13420": 411436384.0, "13425": 419354784.0, "13430": 421878432.0, "13435": 413795936.0, "13440": 411654848.0, "13445": 415427040.0, "13450": 406827328.0, "13455": 408226560.0, "13460": 406269856.0, "13465": 411011552.0, "13470": 413876448.0, "13475": 408902336.0, "13480": 414787456.0, "13485": 405258176.0, "13490": 415762400.0, "13495": 411243872.0, "13500": 415868000.0, "13505": 412075264.0, "13510": 406803392.0, "13515": 412699392.0, "13520": 412709408.0, "13525": 407277408.0, "13530": 405813984.0, "13535": 414394560.0, "13540": 408278752.0, "13545": 414350016.0, "13550": 415644992.0, "13555": 404176256.0, "13560": 402817600.0, "13565": 410319072.0, "13570": 404871744.0, "13575": 407894624.0, "13580": 415385728.0, "13585": 411583872.0, "13590": 419402816.0, "13595": 411402976.0, "13600": 404385792.0, "13605": 410441632.0, "13610": 414964288.0, "13615": 406824960.0, "13620": 408078464.0, "13625": 410123168.0, "13630": 412658848.0, "13635": 416448832.0, "13640": 416549248.0, "13645": 410003040.0, "13650": 414289408.0, "13655": 413664128.0, "13660": 415533344.0, "13665": 404728192.0, "13670": 412797536.0, "13675": 403840640.0, "13680": 410857376.0, "13685": 412997344.0, "13690": 402448096.0, "13695": 410487840.0, "13700": 415339936.0, "13705": 415509120.0, "13710": 411051008.0, "13715": 412772096.0, "13720": 414665728.0, "13725": 413314112.0, "13730": 414704704.0, "13735": 411512896.0, "13740": 410180288.0, "13745": 415541056.0, "13750": 408164576.0, "13755": 411180288.0, "13760": 418633728.0, "13765": 403934656.0, "13770": 410696128.0, "13775": 421774464.0, "13780": 401838400.0, "13785": 413076160.0, "13790": 408691968.0, "13795": 412151456.0, "13800": 414569248.0, "13805": 415389024.0, "13810": 415282880.0, "13815": 408538688.0, "13820": 419681024.0, "13825": 422063808.0, "13830": 407585184.0, "13835": 414582944.0, "13840": 413089792.0, "13845": 406274208.0, "13850": 419195968.0, "13855": 416188128.0, "13860": 411486688.0, "13865": 411248384.0, "13870": 399900320.0, "13875": 415944640.0, "13880": 411714752.0, "13885": 414683328.0, "13890": 412703616.0, "13895": 414789504.0, "13900": 408710912.0, "13905": 406560736.0, "13910": 408790976.0, "13915": 419511744.0, "13920": 410149824.0, "13925": 411009792.0, "13930": 415236224.0, "13935": 414774400.0, "13940": 412452544.0, "13945": 406517120.0, "13950": 414838208.0, "13955": 411456224.0, "13960": 408677152.0, "13965": 424231040.0, "13970": 410121504.0, "13975": 421832768.0, "13980": 410714432.0, "13985": 418405408.0, "13990": 409807808.0, "13995": 409165472.0, "14000": 418782848.0, "14005": 408035008.0, "14010": 413440032.0, "14015": 410650912.0, "14020": 412754400.0, "14025": 407716960.0, "14030": 421836576.0, "14035": 408538304.0, "14040": 422267328.0, "14045": 416009056.0, "14050": 410340512.0, "14055": 417611040.0, "14060": 413705152.0, "14065": 411687872.0, "14070": 407399584.0, "14075": 416001184.0, "14080": 410863552.0, "14085": 410327008.0, "14090": 418860704.0, "14095": 412305664.0, "14100": 416172544.0, "14105": 410987392.0, "14110": 416007520.0, "14115": 414343872.0, "14120": 416272640.0, "14125": 414570752.0, "14130": 423086528.0, "14135": 416701344.0, "14140": 415356736.0, "14145": 409709280.0, "14150": 402864448.0, "14155": 410195936.0, "14160": 417179808.0, "14165": 413682784.0, "14170": 404039488.0, "14175": 402102528.0, "14180": 415883136.0, "14185": 415870976.0, "14190": 414750400.0, "14195": 415851808.0, "14200": 407512576.0, "14205": 406138752.0, "14210": 423769472.0, "14215": 406034688.0, "14220": 415261248.0, "14225": 414267936.0, "14230": 406269376.0, "14235": 415245600.0, "14240": 409532064.0, "14245": 409015680.0, "14250": 406875488.0, "14255": 410239296.0, "14260": 407693856.0, "14265": 410925824.0, "14270": 411779104.0, "14275": 416125792.0, "14280": 416722336.0, "14285": 414810624.0, "14290": 412991552.0, "14295": 416306144.0, "14300": 413589280.0, "14305": 411374496.0, "14310": 416485312.0, "14315": 421519744.0, "14320": 407860704.0, "14325": 414239328.0, "14330": 410978240.0, "14335": 413154848.0, "14340": 408625504.0, "14345": 416427392.0, "14350": 415292256.0, "14355": 418242304.0, "14360": 409767808.0, "14365": 416084928.0, "14370": 416401088.0, "14375": 414096128.0, "14380": 410676032.0, "14385": 414409152.0, "14390": 414871552.0, "14395": 415684768.0, "14400": 409896320.0, "14405": 419105408.0, "14410": 407632640.0, "14415": 412156864.0, "14420": 413313536.0, "14425": 418070144.0, "14430": 409908064.0, "14435": 413153536.0, "14440": 416741184.0, "14445": 406764320.0, "14450": 420124224.0, "14455": 407193248.0, "14460": 417536416.0, "14465": 420553760.0, "14470": 415695520.0, "14475": 412438272.0, "14480": 404459072.0, "14485": 417214272.0, "14490": 407673024.0, "14495": 423033760.0, "14500": 409569568.0, "14505": 415296160.0, "14510": 418554784.0, "14515": 416720288.0, "14520": 410231456.0, "14525": 415292800.0, "14530": 407106752.0, "14535": 413969216.0, "14540": 408975360.0, "14545": 416316864.0, "14550": 413731072.0, "14555": 419261216.0, "14560": 413561344.0, "14565": 406435136.0, "14570": 416075392.0, "14575": 408256896.0, "14580": 406780672.0, "14585": 412237760.0, "14590": 411619936.0, "14595": 417964960.0, "14600": 410199712.0, "14605": 404269952.0, "14610": 408270112.0, "14615": 406296160.0, "14620": 406693152.0, "14625": 402484480.0, "14630": 417184576.0, "14635": 409946176.0, "14640": 413965248.0, "14645": 410726272.0, "14650": 419657184.0, "14655": 410885280.0, "14660": 417502560.0, "14665": 416200608.0, "14670": 407922240.0, "14675": 416649792.0, "14680": 414445152.0, "14685": 412389760.0, "14690": 412532640.0, "14695": 406862304.0, "14700": 412373760.0, "14705": 409176000.0, "14710": 416651936.0, "14715": 409518400.0, "14720": 415356832.0, "14725": 419891424.0, "14730": 407699136.0, "14735": 409688384.0, "14740": 407560896.0, "14745": 409030624.0, "14750": 406051744.0, "14755": 413578880.0, "14760": 408719264.0, "14765": 410984256.0, "14770": 415520704.0, "14775": 408994016.0, "14780": 415020608.0, "14785": 405298624.0, "14790": 416572032.0, "14795": 412753696.0, "14800": 415339552.0, "14805": 405153856.0, "14810": 412214752.0, "14815": 414616960.0, "14820": 415397888.0, "14825": 409566176.0, "14830": 409522624.0, "14835": 414845216.0, "14840": 413140672.0, "14845": 415726464.0, "14850": 408700768.0, "14855": 417771936.0, "14860": 418506112.0, "14865": 416525856.0, "14870": 408138464.0, "14875": 412082816.0, "14880": 414084928.0, "14885": 405860736.0, "14890": 411868896.0, "14895": 407546496.0, "14900": 415243520.0, "14905": 423725888.0, "14910": 407894976.0, "14915": 407102304.0, "14920": 409701504.0, "14925": 409301792.0, "14930": 408798016.0, "14935": 408247584.0, "14940": 410281056.0, "14945": 421316352.0, "14950": 409856416.0, "14955": 426883328.0, "14960": 408501888.0, "14965": 407893216.0, "14970": 416523808.0, "14975": 413514336.0, "14980": 416363456.0, "14985": 419025408.0, "14990": 417028000.0, "14995": 419570912.0, "15000": 409845920.0, "15005": 412327680.0, "15010": 409413504.0, "15015": 417949920.0, "15020": 415064416.0, "15025": 413157536.0, "15030": 408808928.0, "15035": 414424224.0, "15040": 412775104.0, "15045": 423245728.0, "15050": 413866880.0, "15055": 409393312.0, "15060": 421926784.0, "15065": 416794912.0, "15070": 414954304.0, "15075": 411037440.0, "15080": 416047616.0, "15085": 412300576.0, "15090": 406381888.0, "15095": 410262144.0, "15100": 414591872.0, "15105": 411607168.0, "15110": 409747200.0, "15115": 414257632.0, "15120": 404834048.0, "15125": 411056000.0, "15130": 404825792.0, "15135": 410463680.0, "15140": 409510976.0, "15145": 412256896.0, "15150": 415048832.0, "15155": 408239552.0, "15160": 415953024.0, "15165": 412942592.0, "15170": 410109664.0, "15175": 412551424.0, "15180": 410073152.0, "15185": 406093952.0, "15190": 412133536.0, "15195": 408061344.0, "15200": 412056864.0, "15205": 420187392.0, "15210": 410817888.0, "15215": 413139680.0, "15220": 415376928.0, "15225": 417367680.0, "15230": 416033344.0, "15235": 406139776.0, "15240": 415804320.0, "15245": 413035168.0, "15250": 415092864.0, "15255": 408782816.0, "15260": 411990432.0, "15265": 419601600.0, "15270": 407985504.0, "15275": 407713856.0, "15280": 406512384.0, "15285": 410658912.0, "15290": 413919104.0, "15295": 410621760.0, "15300": 412330176.0, "15305": 407282432.0, "15310": 412851488.0, "15315": 412949632.0, "15320": 416182400.0, "15325": 408665440.0, "15330": 410813120.0, "15335": 410828576.0, "15340": 416280192.0, "15345": 414144736.0, "15350": 415664608.0, "15355": 416395776.0, "15360": 412887072.0, "15365": 410454400.0, "15370": 417859552.0, "15375": 408734272.0, "15380": 414760160.0, "15385": 406144672.0, "15390": 408428480.0, "15395": 404353984.0, "15400": 412101248.0, "15405": 416659136.0, "15410": 403633312.0, "15415": 410573408.0, "15420": 408682304.0, "15425": 407734080.0, "15430": 415000736.0, "15435": 411445760.0, "15440": 411303776.0, "15445": 415063328.0, "15450": 407293632.0, "15455": 416660352.0, "15460": 411420672.0, "15465": 414237696.0, "15470": 410556480.0, "15475": 413193632.0, "15480": 408369600.0, "15485": 414433888.0, "15490": 402909824.0, "15495": 406720832.0, "15500": 412213696.0, "15505": 409587008.0, "15510": 416304768.0, "15515": 412984608.0, "15520": 415399008.0, "15525": 412398656.0, "15530": 405126624.0, "15535": 413643776.0, "15540": 410175840.0, "15545": 411096160.0, "15550": 417085088.0, "15555": 415313728.0, "15560": 414876832.0, "15565": 413141408.0, "15570": 411025184.0, "15575": 410580032.0, "15580": 415824192.0, "15585": 407155936.0, "15590": 408310880.0, "15595": 413386784.0, "15600": 422916832.0, "15605": 407841632.0, "15610": 414773440.0, "15615": 406399168.0, "15620": 411313760.0, "15625": 417921568.0, "15630": 411122528.0, "15635": 410432832.0, "15640": 405831040.0, "15645": 416868736.0, "15650": 408591872.0, "15655": 420719552.0, "15660": 410556800.0, "15665": 406769024.0, "15670": 411310368.0, "15675": 410311168.0, "15680": 411795936.0, "15685": 414968288.0, "15690": 410903584.0, "15695": 402842240.0, "15700": 415272448.0, "15705": 411136544.0, "15710": 411248768.0, "15715": 419264192.0, "15720": 404410880.0, "15725": 414464352.0, "15730": 406734944.0, "15735": 413409344.0, "15740": 409978912.0, "15745": 409410016.0, "15750": 414221024.0, "15755": 410847424.0, "15760": 411803712.0, "15765": 410682656.0, "15770": 415537984.0, "15775": 413520352.0, "15780": 412607040.0, "15785": 419232736.0, "15790": 415274368.0, "15795": 402521024.0, "15800": 400181984.0, "15805": 419194208.0, "15810": 402599488.0, "15815": 413325760.0, "15820": 405920256.0, "15825": 419988096.0, "15830": 412328576.0, "15835": 411682528.0, "15840": 406818688.0, "15845": 400987328.0, "15850": 412715200.0, "15855": 413197664.0, "15860": 405929536.0, "15865": 417390656.0, "15870": 418248256.0, "15875": 410095936.0, "15880": 414137792.0, "15885": 418015072.0, "15890": 399548224.0, "15895": 408638432.0, "15900": 400241568.0, "15905": 414149440.0, "15910": 408544736.0, "15915": 419756576.0, "15920": 417293440.0, "15925": 412367552.0, "15930": 417094496.0, "15935": 409348864.0, "15940": 412591264.0, "15945": 411389280.0, "15950": 414572096.0, "15955": 414581216.0, "15960": 414777184.0, "15965": 409052256.0, "15970": 415105856.0, "15975": 411194752.0, "15980": 412649248.0, "15985": 412621952.0, "15990": 407834144.0, "15995": 407426368.0, "16000": 406322976.0, "16005": 413590528.0, "16010": 410469056.0, "16015": 413421088.0, "16020": 411755456.0, "16025": 415304288.0, "16030": 418302208.0, "16035": 410451328.0, "16040": 414280704.0, "16045": 408102912.0, "16050": 409532928.0, "16055": 410436288.0, "16060": 413540704.0, "16065": 420090272.0, "16070": 415799776.0, "16075": 418098400.0, "16080": 414010336.0, "16085": 411053824.0, "16090": 409959616.0, "16095": 407185120.0, "16100": 410065344.0, "16105": 407790432.0, "16110": 418946112.0, "16115": 402405568.0, "16120": 410172064.0, "16125": 420183488.0, "16130": 411939936.0, "16135": 413343136.0, "16140": 410052992.0, "16145": 415423808.0, "16150": 412071680.0, "16155": 417231136.0, "16160": 418578112.0, "16165": 409405440.0, "16170": 412998432.0, "16175": 418517856.0, "16180": 413669920.0, "16185": 409369536.0, "16190": 415917120.0, "16195": 420736224.0, "16200": 408669184.0, "16205": 417162624.0, "16210": 415781248.0, "16215": 411855776.0, "16220": 418696288.0, "16225": 405008352.0, "16230": 414692288.0, "16235": 420395968.0, "16240": 405545536.0, "16245": 415031840.0, "16250": 405468512.0, "16255": 405961728.0, "16260": 412304992.0, "16265": 411648160.0, "16270": 397768896.0, "16275": 411074528.0, "16280": 418671168.0, "16285": 408181472.0, "16290": 409746112.0, "16295": 405366240.0, "16300": 416262944.0, "16305": 410730560.0, "16310": 408015200.0, "16315": 416839648.0, "16320": 414383744.0, "16325": 406376832.0, "16330": 416982112.0, "16335": 419761152.0, "16340": 405955712.0, "16345": 409986816.0, "16350": 409844512.0, "16355": 410244960.0, "16360": 397442816.0, "16365": 410626304.0, "16370": 416444064.0, "16375": 410459968.0, "16380": 412291328.0, "16385": 404111232.0, "16390": 415198944.0, "16395": 410433088.0, "16400": 408763776.0, "16405": 409010592.0, "16410": 419970592.0, "16415": 408892832.0, "16420": 417246816.0, "16425": 417584384.0, "16430": 407035456.0, "16435": 410668064.0, "16440": 411442496.0, "16445": 405275040.0, "16450": 408098400.0, "16455": 410088288.0, "16460": 412891296.0, "16465": 409909024.0, "16470": 412751744.0, "16475": 409363840.0, "16480": 421083552.0, "16485": 413035328.0, "16490": 407724288.0, "16495": 406518304.0, "16500": 408855040.0, "16505": 411512064.0, "16510": 400301728.0, "16515": 412769536.0, "16520": 413977856.0, "16525": 413940416.0, "16530": 415357664.0, "16535": 411284544.0, "16540": 406573504.0, "16545": 409501088.0, "16550": 416978848.0, "16555": 411705984.0, "16560": 411103424.0, "16565": 417139456.0, "16570": 411751584.0, "16575": 416761376.0, "16580": 409505472.0, "16585": 403769152.0, "16590": 410461824.0, "16595": 418268800.0, "16600": 404103552.0, "16605": 415538208.0, "16610": 409325888.0, "16615": 413388256.0, "16620": 415790400.0, "16625": 418094976.0, "16630": 411930560.0, "16635": 411517280.0, "16640": 411590784.0, "16645": 411386528.0, "16650": 407064384.0, "16655": 408090240.0, "16660": 403864768.0, "16665": 409256096.0, "16670": 408775456.0, "16675": 411934304.0, "16680": 410660736.0, "16685": 417443008.0, "16690": 412323968.0, "16695": 413861600.0, "16700": 413269152.0, "16705": 411860992.0, "16710": 411807264.0, "16715": 415560992.0, "16720": 409456416.0, "16725": 411155616.0, "16730": 416413440.0, "16735": 406513664.0, "16740": 416263040.0, "16745": 408085856.0, "16750": 401761632.0, "16755": 419634496.0, "16760": 410713984.0, "16765": 408922560.0, "16770": 417163456.0, "16775": 412312032.0, "16780": 411435200.0, "16785": 410332608.0, "16790": 406778944.0, "16795": 413263744.0, "16800": 412645280.0, "16805": 413756800.0, "16810": 407137920.0, "16815": 398753024.0, "16820": 412291648.0, "16825": 410135328.0, "16830": 411131008.0, "16835": 413747104.0, "16840": 406017440.0, "16845": 416572864.0, "16850": 414119872.0, "16855": 408132352.0, "16860": 410124096.0, "16865": 418439328.0, "16870": 412135936.0, "16875": 414604704.0, "16880": 414082688.0, "16885": 414198080.0, "16890": 410213728.0, "16895": 412129824.0, "16900": 404238624.0, "16905": 411876640.0, "16910": 417919584.0, "16915": 412153408.0, "16920": 409926688.0, "16925": 411628992.0, "16930": 411705184.0, "16935": 419495424.0, "16940": 420364320.0, "16945": 410796512.0, "16950": 412833856.0, "16955": 413331072.0, "16960": 410435552.0, "16965": 412666368.0, "16970": 411884320.0, "16975": 411071616.0, "16980": 412864288.0, "16985": 419940896.0, "16990": 420017920.0, "16995": 414495168.0, "17000": 424266528.0, "17005": 408955008.0, "17010": 414583552.0, "17015": 407920320.0, "17020": 405083584.0, "17025": 412008576.0, "17030": 410475136.0, "17035": 412893600.0, "17040": 413306656.0, "17045": 414110816.0, "17050": 403222848.0, "17055": 405555072.0, "17060": 415154944.0, "17065": 410753312.0, "17070": 411847680.0, "17075": 415563392.0, "17080": 417502304.0, "17085": 413168224.0, "17090": 419193600.0, "17095": 409448160.0, "17100": 417560736.0, "17105": 405868256.0, "17110": 416139520.0, "17115": 410675520.0, "17120": 413846752.0, "17125": 411935712.0, "17130": 417296800.0, "17135": 406642240.0, "17140": 414394976.0, "17145": 412657312.0, "17150": 414228640.0, "17155": 414033504.0, "17160": 405508832.0, "17165": 417023872.0, "17170": 407489248.0, "17175": 418033952.0, "17180": 419352000.0, "17185": 416809984.0, "17190": 414656704.0, "17195": 414267424.0, "17200": 409455872.0, "17205": 415083488.0, "17210": 404076672.0, "17215": 406711744.0, "17220": 408400416.0, "17225": 406875584.0, "17230": 412882496.0, "17235": 410709024.0, "17240": 414369664.0, "17245": 422358144.0, "17250": 404958848.0, "17255": 413645792.0, "17260": 420217440.0, "17265": 409224096.0, "17270": 417982880.0, "17275": 413831872.0, "17280": 407468640.0, "17285": 414569440.0, "17290": 411417920.0, "17295": 416554848.0, "17300": 405017280.0, "17305": 417159136.0, "17310": 407985120.0, "17315": 410005504.0, "17320": 415819232.0, "17325": 397262656.0, "17330": 416357664.0, "17335": 415017024.0, "17340": 410161664.0, "17345": 419651264.0, "17350": 416476320.0, "17355": 409191968.0, "17360": 409165568.0, "17365": 408712448.0, "17370": 402694144.0, "17375": 413784896.0, "17380": 415100928.0, "17385": 410343104.0, "17390": 416937024.0, "17395": 419525984.0, "17400": 409048256.0, "17405": 413914368.0, "17410": 414349120.0, "17415": 403801504.0, "17420": 404095968.0, "17425": 414287936.0, "17430": 406014880.0, "17435": 413975424.0, "17440": 404506112.0, "17445": 408372864.0, "17450": 413226112.0, "17455": 413216928.0, "17460": 413227008.0, "17465": 407124256.0, "17470": 407567520.0, "17475": 409246624.0, "17480": 406039232.0, "17485": 411338208.0, "17490": 409117952.0, "17495": 415273248.0, "17500": 417508032.0, "17505": 406851904.0, "17510": 416957504.0, "17515": 411765472.0, "17520": 411986560.0, "17525": 409270560.0, "17530": 411862752.0, "17535": 411788224.0, "17540": 412447392.0, "17545": 403051264.0, "17550": 416956832.0, "17555": 409880256.0, "17560": 408510912.0, "17565": 416721760.0, "17570": 405312288.0, "17575": 416094528.0, "17580": 411465472.0, "17585": 405900544.0, "17590": 417698880.0, "17595": 404111488.0, "17600": 409680480.0, "17605": 409251584.0, "17610": 409279072.0, "17615": 402016064.0, "17620": 402069184.0, "17625": 412044064.0, "17630": 412257888.0, "17635": 411715712.0, "17640": 413389760.0, "17645": 409785760.0, "17650": 414695936.0, "17655": 419360800.0, "17660": 411051968.0, "17665": 409312000.0, "17670": 405884256.0, "17675": 414984640.0, "17680": 413217056.0, "17685": 409422528.0, "17690": 421254496.0, "17695": 407050208.0, "17700": 415940544.0, "17705": 411930432.0, "17710": 406971072.0, "17715": 408774688.0, "17720": 410381440.0, "17725": 416168000.0, "17730": 411153568.0, "17735": 413657824.0, "17740": 415015200.0, "17745": 407876416.0, "17750": 409950976.0, "17755": 413230624.0, "17760": 406555200.0, "17765": 415104608.0, "17770": 420766912.0, "17775": 403816416.0, "17780": 411090784.0, "17785": 408567840.0, "17790": 411810432.0, "17795": 428170368.0, "17800": 412832128.0, "17805": 409146784.0, "17810": 412771968.0, "17815": 414323616.0, "17820": 408715296.0, "17825": 409904032.0, "17830": 414944768.0, "17835": 412523904.0, "17840": 419477984.0, "17845": 417005344.0, "17850": 409314144.0, "17855": 412074656.0, "17860": 417810848.0, "17865": 408925760.0, "17870": 411615520.0, "17875": 409833344.0, "17880": 420430016.0, "17885": 416230016.0, "17890": 415959168.0, "17895": 411581664.0, "17900": 412284224.0, "17905": 411523616.0, "17910": 411711488.0, "17915": 411049536.0, "17920": 408439808.0, "17925": 404232864.0, "17930": 415555840.0, "17935": 415505344.0, "17940": 406664864.0, "17945": 414045280.0, "17950": 420096672.0, "17955": 417597376.0, "17960": 422763744.0, "17965": 411750720.0, "17970": 410143488.0, "17975": 412726784.0, "17980": 410103264.0, "17985": 411085920.0, "17990": 411998336.0, "17995": 411391072.0, "18000": 408581632.0, "18005": 415408384.0, "18010": 418686048.0, "18015": 412999968.0, "18020": 407413024.0, "18025": 409514368.0, "18030": 411921088.0, "18035": 410165888.0, "18040": 418178688.0, "18045": 409341952.0, "18050": 416762208.0, "18055": 414937184.0, "18060": 412934816.0, "18065": 410699264.0, "18070": 409533728.0, "18075": 410187520.0, "18080": 414001728.0, "18085": 418107776.0, "18090": 402171200.0, "18095": 412337408.0, "18100": 418664704.0, "18105": 403323168.0, "18110": 410700192.0, "18115": 413740224.0, "18120": 418257952.0, "18125": 415180160.0, "18130": 407997600.0, "18135": 407249824.0, "18140": 418688288.0, "18145": 409606944.0, "18150": 409122720.0, "18155": 421608416.0, "18160": 409018112.0, "18165": 407233088.0, "18170": 408765984.0, "18175": 413339584.0, "18180": 410954560.0, "18185": 416810112.0, "18190": 411684544.0, "18195": 408020096.0, "18200": 413988800.0, "18205": 407715680.0, "18210": 418077600.0, "18215": 409206432.0, "18220": 408611872.0, "18225": 408474080.0, "18230": 414017856.0, "18235": 415232960.0, "18240": 411956480.0, "18245": 408792128.0, "18250": 416211648.0, "18255": 409482656.0, "18260": 419769760.0, "18265": 408263872.0, "18270": 408878240.0, "18275": 418248576.0, "18280": 414109696.0, "18285": 422160224.0, "18290": 415265920.0, "18295": 403424480.0, "18300": 420770688.0, "18305": 406465440.0, "18310": 421072896.0, "18315": 428266944.0, "18320": 412463104.0, "18325": 413502656.0, "18330": 413545728.0, "18335": 417394720.0, "18340": 415301952.0, "18345": 413446720.0, "18350": 415153056.0, "18355": 423980352.0, "18360": 413320768.0, "18365": 415814208.0, "18370": 416946016.0, "18375": 416345984.0, "18380": 406015040.0, "18385": 402684672.0, "18390": 412275552.0, "18395": 411482656.0, "18400": 416902560.0, "18405": 405121184.0, "18410": 404132416.0, "18415": 420786784.0, "18420": 413248224.0, "18425": 411583200.0, "18430": 411434432.0, "18435": 410578144.0, "18440": 408565952.0, "18445": 410717600.0, "18450": 413047776.0, "18455": 411310752.0, "18460": 406725760.0, "18465": 417480448.0, "18470": 412568128.0, "18475": 410444352.0, "18480": 419527136.0, "18485": 405175776.0, "18490": 424105344.0, "18495": 411410240.0, "18500": 413726016.0, "18505": 414071200.0, "18510": 416767904.0, "18515": 414097312.0, "18520": 413526304.0, "18525": 404782528.0, "18530": 409178432.0, "18535": 413902016.0, "18540": 408655104.0, "18545": 414726592.0, "18550": 408550144.0, "18555": 419909632.0, "18560": 414628512.0, "18565": 411825728.0, "18570": 405693280.0, "18575": 413555520.0, "18580": 418522048.0, "18585": 415092192.0, "18590": 424287424.0, "18595": 417731072.0, "18600": 418601088.0, "18605": 411439296.0, "18610": 411263392.0, "18615": 412023360.0, "18620": 426006464.0, "18625": 408532960.0, "18630": 409520640.0, "18635": 412515968.0, "18640": 421149344.0, "18645": 413035008.0, "18650": 411299200.0, "18655": 407281440.0, "18660": 410919744.0, "18665": 410814112.0, "18670": 404548288.0, "18675": 410932416.0, "18680": 416844064.0, "18685": 419796896.0, "18690": 415867616.0, "18695": 404937280.0, "18700": 403831296.0, "18705": 413732544.0, "18710": 407218944.0, "18715": 415825216.0, "18720": 408642656.0, "18725": 411323616.0, "18730": 415402720.0, "18735": 419431488.0, "18740": 411222816.0, "18745": 417870816.0, "18750": 408758144.0, "18755": 415050208.0, "18760": 409719584.0, "18765": 410312000.0, "18770": 407073280.0, "18775": 412846688.0, "18780": 404211232.0, "18785": 409458464.0, "18790": 412207328.0, "18795": 407070592.0, "18800": 409813664.0, "18805": 419275040.0, "18810": 417547904.0, "18815": 408205248.0, "18820": 405967232.0, "18825": 416955616.0, "18830": 411561280.0, "18835": 413862432.0, "18840": 417080768.0, "18845": 407665632.0, "18850": 413380064.0, "18855": 407320160.0, "18860": 419145824.0, "18865": 418243456.0, "18870": 408898240.0, "18875": 409244512.0, "18880": 413332704.0, "18885": 408360352.0, "18890": 409466720.0, "18895": 415445152.0, "18900": 415234624.0, "18905": 413692160.0, "18910": 414739520.0, "18915": 414074208.0, "18920": 411152000.0, "18925": 420992256.0, "18930": 410630208.0, "18935": 411541536.0, "18940": 410691744.0, "18945": 420483808.0, "18950": 403750272.0, "18955": 417049696.0, "18960": 406669600.0, "18965": 412246176.0, "18970": 410859648.0, "18975": 416116800.0, "18980": 406215424.0, "18985": 414696672.0, "18990": 411610624.0, "18995": 412063232.0, "19000": 412446400.0, "19005": 411365792.0, "19010": 415934592.0, "19015": 415272160.0, "19020": 406672000.0, "19025": 406813184.0, "19030": 410920896.0, "19035": 404416896.0, "19040": 411256704.0, "19045": 413203776.0, "19050": 404277984.0, "19055": 413081632.0, "19060": 404962048.0, "19065": 420960096.0, "19070": 407555104.0, "19075": 407001792.0, "19080": 419034464.0, "19085": 407389344.0, "19090": 418145184.0, "19095": 411152128.0, "19100": 409439744.0, "19105": 408958784.0, "19110": 413988800.0, "19115": 403306304.0, "19120": 413580704.0, "19125": 403680224.0, "19130": 409334016.0, "19135": 419386016.0, "19140": 408331104.0, "19145": 415909600.0, "19150": 409684736.0, "19155": 412438016.0, "19160": 418479744.0, "19165": 401079200.0, "19170": 409706080.0, "19175": 404629216.0, "19180": 408687360.0, "19185": 413788480.0, "19190": 407550016.0, "19195": 414562848.0, "19200": 415306496.0, "19205": 414571232.0, "19210": 418399872.0, "19215": 407711808.0, "19220": 407669088.0, "19225": 407567936.0, "19230": 414892224.0, "19235": 405996832.0, "19240": 411456896.0, "19245": 408497504.0, "19250": 414920608.0, "19255": 419509728.0, "19260": 420326400.0, "19265": 409921984.0, "19270": 415164704.0, "19275": 413591456.0, "19280": 412715040.0, "19285": 414453888.0, "19290": 412362272.0, "19295": 409338688.0, "19300": 415076192.0, "19305": 418533152.0, "19310": 410020544.0, "19315": 420467104.0, "19320": 413499008.0, "19325": 419592736.0, "19330": 411146304.0, "19335": 407996480.0, "19340": 413900800.0, "19345": 415037632.0, "19350": 410591200.0, "19355": 415967392.0, "19360": 415635488.0, "19365": 415850464.0, "19370": 399080768.0, "19375": 401398880.0, "19380": 412629824.0, "19385": 405405216.0, "19390": 410637344.0, "19395": 411670816.0, "19400": 409167968.0, "19405": 411349184.0, "19410": 409335712.0, "19415": 422183488.0, "19420": 408534496.0, "19425": 412619200.0, "19430": 417307264.0, "19435": 409931296.0, "19440": 416475360.0, "19445": 421671520.0, "19450": 420125248.0, "19455": 415623040.0, "19460": 413892480.0, "19465": 417546496.0, "19470": 411881760.0, "19475": 415792672.0, "19480": 414585184.0, "19485": 416048928.0, "19490": 407578784.0, "19495": 413064032.0, "19500": 404273248.0, "19505": 415896704.0, "19510": 411385632.0, "19515": 415096352.0, "19520": 415147008.0, "19525": 407606912.0, "19530": 418691456.0, "19535": 404401184.0, "19540": 412495232.0, "19545": 423195040.0, "19550": 408894368.0, "19555": 416661504.0, "19560": 408847840.0, "19565": 416502272.0, "19570": 411210080.0, "19575": 414668992.0, "19580": 407004928.0, "19585": 415507936.0, "19590": 418813568.0, "19595": 412432992.0, "19600": 411316224.0, "19605": 413662336.0, "19610": 412975232.0, "19615": 412891424.0, "19620": 420607616.0, "19625": 409750560.0, "19630": 416612864.0, "19635": 407544896.0, "19640": 403941728.0, "19645": 405370752.0, "19650": 407345056.0, "19655": 409145472.0, "19660": 418982624.0, "19665": 412968288.0, "19670": 409396864.0, "19675": 407664928.0, "19680": 412711712.0, "19685": 415743616.0, "19690": 411041184.0, "19695": 409965824.0, "19700": 411163328.0, "19705": 409867392.0, "19710": 418023264.0, "19715": 408118400.0, "19720": 409301920.0, "19725": 409807008.0, "19730": 410347040.0, "19735": 408404128.0, "19740": 403324672.0, "19745": 406558688.0, "19750": 412855968.0, "19755": 417858848.0, "19760": 411189120.0, "19765": 409530336.0, "19770": 413663072.0, "19775": 412732928.0, "19780": 412344896.0, "19785": 408025888.0, "19790": 417487136.0, "19795": 410689824.0, "19800": 418243264.0, "19805": 412183392.0, "19810": 416292192.0, "19815": 407150464.0, "19820": 410337440.0, "19825": 410763296.0, "19830": 411473824.0, "19835": 408444608.0, "19840": 414653760.0, "19845": 419564992.0, "19850": 406363648.0, "19855": 412311584.0, "19860": 415854464.0, "19865": 412066656.0, "19870": 408669568.0, "19875": 414936480.0, "19880": 413199680.0, "19885": 409919552.0, "19890": 412044768.0, "19895": 410016512.0, "19900": 410069696.0, "19905": 412936512.0, "19910": 414260256.0, "19915": 412673344.0, "19920": 407741664.0, "19925": 420244768.0, "19930": 411709920.0, "19935": 415618208.0, "19940": 414419712.0, "19945": 410098976.0, "19950": 408986816.0, "19955": 416253824.0, "19960": 402488128.0, "19965": 408744224.0, "19970": 422245920.0, "19975": 408893792.0, "19980": 413085760.0, "19985": 414571520.0, "19990": 411532832.0, "19995": 413506688.0, "20000": 402565600.0, "20005": 408887104.0, "20010": 418072224.0, "20015": 410046400.0, "20020": 416119168.0, "20025": 405884160.0, "20030": 413845024.0, "20035": 407409024.0, "20040": 411188768.0, "20045": 417174848.0, "20050": 416618016.0, "20055": 413744416.0, "20060": 417658560.0, "20065": 412241696.0, "20070": 416385600.0, "20075": 411061408.0, "20080": 418695584.0, "20085": 424829600.0, "20090": 413289792.0, "20095": 413813088.0, "20100": 406723552.0, "20105": 419154112.0, "20110": 404197344.0, "20115": 412806912.0, "20120": 412997952.0, "20125": 411455584.0, "20130": 410458592.0, "20135": 410843904.0, "20140": 408531840.0, "20145": 404059936.0, "20150": 417027520.0, "20155": 414557760.0, "20160": 407069984.0, "20165": 412731936.0, "20170": 408374592.0, "20175": 419102816.0, "20180": 412532864.0, "20185": 405928448.0, "20190": 414427616.0, "20195": 413323776.0, "20200": 405762912.0, "20205": 409934752.0, "20210": 416875840.0, "20215": 407881888.0, "20220": 413683328.0, "20225": 406859200.0, "20230": 416218944.0, "20235": 408203168.0, "20240": 423150848.0, "20245": 406581888.0, "20250": 409254720.0, "20255": 411865920.0, "20260": 412079616.0, "20265": 414861376.0, "20270": 414588128.0, "20275": 405077760.0, "20280": 415444384.0, "20285": 414014016.0, "20290": 413301888.0, "20295": 406311232.0, "20300": 410500512.0, "20305": 416355168.0, "20310": 414039072.0, "20315": 410516928.0, "20320": 410574656.0, "20325": 410109920.0, "20330": 420062688.0, "20335": 414455328.0, "20340": 408931200.0, "20345": 404064096.0, "20350": 403944160.0, "20355": 417240736.0, "20360": 406383392.0, "20365": 411880544.0, "20370": 412221184.0, "20375": 409740032.0, "20380": 416054528.0, "20385": 408748416.0, "20390": 413148864.0, "20395": 414879232.0, "20400": 409416992.0, "20405": 415198368.0, "20410": 421820512.0, "20415": 406802304.0, "20420": 411157024.0, "20425": 411034880.0, "20430": 411368864.0, "20435": 414508416.0, "20440": 409680736.0, "20445": 411534336.0, "20450": 411289696.0, "20455": 418504672.0, "20460": 407066592.0, "20465": 418161824.0, "20470": 413494880.0, "20475": 414120672.0, "20480": 412793280.0, "20485": 416331968.0, "20490": 409866560.0, "20495": 416311040.0, "20500": 413280160.0, "20505": 409780416.0, "20510": 419166336.0, "20515": 411706784.0, "20520": 414715264.0, "20525": 409339936.0, "20530": 409853216.0, "20535": 413851872.0, "20540": 408935488.0, "20545": 427377600.0, "20550": 405741056.0, "20555": 412875392.0, "20560": 415039616.0, "20565": 415021216.0, "20570": 415256864.0, "20575": 416401056.0, "20580": 403936512.0, "20585": 414412992.0, "20590": 408411200.0, "20595": 404911712.0, "20600": 408416160.0, "20605": 407507680.0, "20610": 408919968.0, "20615": 415694656.0, "20620": 408722016.0, "20625": 411366432.0, "20630": 415450016.0, "20635": 418439264.0, "20640": 415477984.0, "20645": 421592448.0, "20650": 416409376.0, "20655": 408114464.0, "20660": 411916096.0, "20665": 413584160.0, "20670": 413258816.0, "20675": 413048096.0, "20680": 412053024.0, "20685": 412823392.0, "20690": 409399488.0, "20695": 415799488.0, "20700": 409562848.0, "20705": 409881216.0, "20710": 411806720.0, "20715": 416490912.0, "20720": 413154368.0, "20725": 411161472.0, "20730": 411396576.0, "20735": 415442400.0, "20740": 419454752.0, "20745": 405160192.0, "20750": 417198432.0, "20755": 413084064.0, "20760": 422482240.0, "20765": 417026688.0, "20770": 408707936.0, "20775": 407246816.0, "20780": 410167840.0, "20785": 418902880.0, "20790": 423055328.0, "20795": 410385440.0, "20800": 408245664.0, "20805": 416959968.0, "20810": 413724576.0, "20815": 412903936.0, "20820": 416135776.0, "20825": 404767968.0, "20830": 417082720.0, "20835": 403041280.0, "20840": 410034784.0, "20845": 413320160.0, "20850": 416865120.0, "20855": 413172416.0, "20860": 405331840.0, "20865": 412470816.0, "20870": 415194528.0, "20875": 409435648.0, "20880": 413347488.0, "20885": 410199104.0, "20890": 409080576.0, "20895": 412552544.0, "20900": 412187424.0, "20905": 412256384.0, "20910": 408006944.0, "20915": 403732768.0, "20920": 416716032.0, "20925": 404654336.0, "20930": 415316896.0, "20935": 414530816.0, "20940": 423140992.0, "20945": 416454304.0, "20950": 408545184.0, "20955": 407835840.0, "20960": 418400128.0, "20965": 405082784.0, "20970": 412671104.0, "20975": 415451680.0, "20980": 404419136.0, "20985": 413510816.0, "20990": 410618176.0, "20995": 403737664.0, "21000": 409868992.0, "21005": 410435392.0, "21010": 418828480.0, "21015": 407938816.0, "21020": 413337408.0, "21025": 412251360.0, "21030": 415039584.0, "21035": 414164896.0, "21040": 408254336.0, "21045": 412311136.0, "21050": 417624128.0, "21055": 406229920.0, "21060": 409567232.0, "21065": 415949664.0, "21070": 416280160.0, "21075": 412723872.0, "21080": 417126496.0, "21085": 416679840.0, "21090": 412441312.0, "21095": 417909504.0, "21100": 410894592.0, "21105": 410439424.0, "21110": 412095808.0, "21115": 410350112.0, "21120": 417512736.0, "21125": 418091840.0, "21130": 410559104.0, "21135": 414792192.0, "21140": 418340736.0, "21145": 409462784.0, "21150": 407958336.0, "21155": 417525600.0, "21160": 409942848.0, "21165": 413738368.0, "21170": 407052544.0, "21175": 411069984.0, "21180": 413533856.0, "21185": 413025088.0, "21190": 426694784.0, "21195": 405754624.0, "21200": 409801152.0, "21205": 402700480.0, "21210": 409379744.0, "21215": 420294656.0, "21220": 408144256.0, "21225": 407523232.0, "21230": 416750944.0, "21235": 412313824.0, "21240": 407182816.0, "21245": 415487904.0, "21250": 405623104.0, "21255": 416091872.0, "21260": 409880640.0, "21265": 411606816.0, "21270": 405790656.0, "21275": 413778944.0, "21280": 415663232.0, "21285": 409323392.0, "21290": 410381952.0, "21295": 412146848.0, "21300": 411116000.0, "21305": 410582176.0, "21310": 410640640.0, "21315": 411707008.0, "21320": 418369248.0, "21325": 413615616.0, "21330": 424023040.0, "21335": 420995936.0, "21340": 419020224.0, "21345": 408749568.0, "21350": 412607264.0, "21355": 414635712.0, "21360": 411042624.0, "21365": 407758080.0, "21370": 407320960.0, "21375": 410098528.0, "21380": 410607904.0, "21385": 410164576.0, "21390": 410225408.0, "21395": 417600768.0, "21400": 410155040.0, "21405": 415630144.0, "21410": 411950560.0, "21415": 412537760.0, "21420": 416699424.0, "21425": 417100320.0, "21430": 426714880.0, "21435": 410495072.0, "21440": 413770784.0, "21445": 411678368.0, "21450": 409476000.0, "21455": 416406080.0, "21460": 421080224.0, "21465": 410555200.0, "21470": 406626592.0, "21475": 410348256.0, "21480": 415304832.0, "21485": 413520416.0, "21490": 403549184.0, "21495": 421377280.0, "21500": 405184448.0, "21505": 418319712.0, "21510": 418465440.0, "21515": 404486880.0, "21520": 410012640.0, "21525": 406050688.0, "21530": 414173440.0, "21535": 414240064.0, "21540": 414879616.0, "21545": 414526656.0, "21550": 415775584.0, "21555": 422155936.0, "21560": 410268832.0, "21565": 408779488.0, "21570": 410821824.0, "21575": 413066624.0, "21580": 410119680.0, "21585": 413775040.0, "21590": 416292992.0, "21595": 413908384.0, "21600": 418435872.0, "21605": 405390304.0, "21610": 411720416.0, "21615": 418369728.0, "21620": 412800384.0, "21625": 414277440.0, "21630": 416204224.0, "21635": 412891104.0, "21640": 411680064.0, "21645": 421281024.0, "21650": 417174208.0, "21655": 412140576.0, "21660": 415205856.0, "21665": 412944384.0, "21670": 411331008.0, "21675": 415744448.0, "21680": 416871520.0, "21685": 414754080.0, "21690": 408071648.0, "21695": 407753536.0, "21700": 412859968.0, "21705": 413502688.0, "21710": 415009216.0, "21715": 405793472.0, "21720": 420577280.0, "21725": 407978976.0, "21730": 414161440.0, "21735": 415065088.0, "21740": 410652416.0, "21745": 418615008.0, "21750": 410309984.0, "21755": 412040320.0, "21760": 405801088.0, "21765": 406630912.0, "21770": 413805600.0, "21775": 414786592.0, "21780": 415973952.0, "21785": 398152256.0, "21790": 416564032.0, "21795": 417199904.0, "21800": 410831712.0, "21805": 409718912.0, "21810": 412675392.0, "21815": 411761120.0, "21820": 418316736.0, "21825": 412906880.0, "21830": 404584768.0, "21835": 414988736.0, "21840": 409563104.0, "21845": 420916928.0, "21850": 406930528.0, "21855": 415497376.0, "21860": 419218752.0, "21865": 414229152.0, "21870": 416052736.0, "21875": 412053504.0, "21880": 408746528.0, "21885": 417445216.0, "21890": 417479872.0, "21895": 405269536.0, "21900": 413999328.0, "21905": 413472768.0, "21910": 412744768.0, "21915": 416157888.0, "21920": 417414688.0, "21925": 413478848.0, "21930": 410514528.0, "21935": 403778752.0, "21940": 411358656.0, "21945": 415218880.0, "21950": 404432640.0, "21955": 416506240.0, "21960": 412167040.0, "21965": 401651328.0, "21970": 406692032.0, "21975": 413361536.0, "21980": 414899904.0, "21985": 417171072.0, "21990": 412173344.0, "21995": 414387328.0, "22000": 407261760.0, "22005": 412197600.0, "22010": 422669216.0, "22015": 419993888.0, "22020": 403650784.0, "22025": 411224672.0, "22030": 410846080.0, "22035": 406484384.0, "22040": 415411296.0, "22045": 411508320.0, "22050": 412391648.0, "22055": 405318656.0, "22060": 412730080.0, "22065": 416713152.0, "22070": 405621248.0, "22075": 405562432.0, "22080": 409542400.0, "22085": 408132640.0, "22090": 412378432.0, "22095": 414234464.0, "22100": 413224704.0, "22105": 414357952.0, "22110": 404915328.0, "22115": 413883616.0, "22120": 412068192.0, "22125": 406506880.0, "22130": 404884960.0, "22135": 420834848.0, "22140": 419540480.0, "22145": 408536736.0, "22150": 412877184.0, "22155": 415947232.0, "22160": 411652640.0, "22165": 411602624.0, "22170": 411642976.0, "22175": 408144320.0, "22180": 409304288.0, "22185": 410280320.0, "22190": 410638368.0, "22195": 415389472.0, "22200": 409081440.0, "22205": 418898912.0, "22210": 414949792.0, "22215": 414638272.0, "22220": 411658528.0, "22225": 408147520.0, "22230": 414624480.0, "22235": 408313440.0, "22240": 416294976.0, "22245": 414153664.0, "22250": 406869984.0, "22255": 425964384.0, "22260": 414845152.0, "22265": 411602688.0, "22270": 419838464.0, "22275": 410484320.0, "22280": 409192832.0, "22285": 417771136.0, "22290": 408748544.0, "22295": 413541728.0, "22300": 418545760.0, "22305": 409467424.0, "22310": 413154976.0, "22315": 409220256.0, "22320": 411004928.0, "22325": 406957408.0, "22330": 408074752.0, "22335": 413777632.0, "22340": 414164096.0, "22345": 414349664.0, "22350": 406883328.0, "22355": 404236736.0, "22360": 410061536.0, "22365": 413229344.0, "22370": 411256672.0, "22375": 420582112.0, "22380": 413588640.0, "22385": 417546688.0, "22390": 417002880.0, "22395": 418154688.0, "22400": 406703104.0, "22405": 414816320.0, "22410": 421225344.0, "22415": 414180704.0, "22420": 406251040.0, "22425": 411104896.0, "22430": 400278240.0, "22435": 422023808.0, "22440": 412917984.0, "22445": 409310176.0, "22450": 419033504.0, "22455": 416004000.0, "22460": 410310432.0, "22465": 414477280.0, "22470": 414896992.0, "22475": 405125088.0, "22480": 414988640.0, "22485": 409894304.0, "22490": 407959616.0, "22495": 421066624.0, "22500": 428334016.0, "22505": 408860192.0, "22510": 416223808.0, "22515": 410885312.0, "22520": 407074368.0, "22525": 409517056.0, "22530": 417248064.0, "22535": 409873824.0, "22540": 412772672.0, "22545": 401907776.0, "22550": 405684832.0, "22555": 416559360.0, "22560": 410231872.0, "22565": 416026304.0, "22570": 408207168.0, "22575": 408415520.0, "22580": 421066880.0, "22585": 402715392.0, "22590": 413939680.0, "22595": 413265952.0, "22600": 413906528.0, "22605": 421643264.0, "22610": 409962336.0, "22615": 419688768.0, "22620": 403428224.0, "22625": 417356384.0, "22630": 414270816.0, "22635": 402030464.0, "22640": 411766112.0, "22645": 408706144.0, "22650": 408960736.0, "22655": 412518464.0, "22660": 405395648.0, "22665": 414050144.0, "22670": 416521536.0, "22675": 416143360.0, "22680": 407607904.0, "22685": 407883456.0, "22690": 409551456.0, "22695": 409758464.0, "22700": 415984000.0, "22705": 412856032.0, "22710": 407612416.0, "22715": 419986368.0, "22720": 414294176.0, "22725": 412746624.0, "22730": 408801696.0, "22735": 414045728.0, "22740": 416653472.0, "22745": 407327360.0, "22750": 408112256.0, "22755": 411168576.0, "22760": 414334880.0, "22765": 406369216.0, "22770": 418230304.0, "22775": 422615680.0, "22780": 412257312.0, "22785": 408528608.0, "22790": 404729504.0, "22795": 417562688.0, "22800": 402293632.0, "22805": 416769984.0, "22810": 416731808.0, "22815": 408505440.0, "22820": 413746496.0, "22825": 411777952.0, "22830": 410075936.0, "22835": 415559968.0, "22840": 414809696.0, "22845": 415508192.0, "22850": 410611232.0, "22855": 410761696.0, "22860": 420270112.0, "22865": 411118080.0, "22870": 404955648.0, "22875": 409486112.0, "22880": 410163680.0, "22885": 417430944.0, "22890": 410052768.0, "22895": 414490656.0, "22900": 419991584.0, "22905": 412127360.0, "22910": 414426688.0, "22915": 406900320.0, "22920": 413309248.0, "22925": 410159392.0, "22930": 413118880.0, "22935": 406865248.0, "22940": 405036288.0, "22945": 413336416.0, "22950": 413939072.0, "22955": 406184096.0, "22960": 416838560.0, "22965": 412345696.0, "22970": 411227712.0, "22975": 416418656.0, "22980": 403470144.0, "22985": 416202688.0, "22990": 411314208.0, "22995": 411716736.0, "23000": 411677760.0, "23005": 410344928.0, "23010": 406835424.0, "23015": 415309856.0, "23020": 400302784.0, "23025": 405762688.0, "23030": 421426112.0, "23035": 405599808.0, "23040": 413511808.0, "23045": 416459104.0, "23050": 422021312.0, "23055": 416221536.0, "23060": 415664608.0, "23065": 417712896.0, "23070": 408322880.0, "23075": 410767776.0, "23080": 427082848.0, "23085": 418205472.0, "23090": 412040832.0, "23095": 409220480.0, "23100": 413935104.0, "23105": 412622048.0, "23110": 417436288.0, "23115": 409289184.0, "23120": 415624608.0, "23125": 409673152.0, "23130": 410083424.0, "23135": 406098432.0, "23140": 411323776.0, "23145": 413734144.0, "23150": 408440224.0, "23155": 415728064.0, "23160": 406563936.0, "23165": 410422432.0, "23170": 410665280.0, "23175": 409248320.0, "23180": 410776192.0, "23185": 410368992.0, "23190": 420262976.0, "23195": 410506368.0, "23200": 415071232.0, "23205": 410574048.0, "23210": 414712640.0, "23215": 414167328.0, "23220": 410102656.0, "23225": 415665280.0, "23230": 414626912.0, "23235": 417607136.0, "23240": 411098080.0, "23245": 416877696.0, "23250": 413467584.0, "23255": 407741728.0, "23260": 409376416.0, "23265": 406184544.0, "23270": 411728480.0, "23275": 406444416.0, "23280": 412710688.0, "23285": 415865984.0, "23290": 411525152.0, "23295": 416550400.0, "23300": 417036288.0, "23305": 409060672.0, "23310": 418017472.0, "23315": 412003776.0, "23320": 408968992.0, "23325": 411260992.0, "23330": 406554176.0, "23335": 410467360.0, "23340": 412832768.0, "23345": 414015264.0, "23350": 407894816.0, "23355": 412451744.0, "23360": 413295488.0, "23365": 415191616.0, "23370": 412411104.0, "23375": 419196896.0, "23380": 408942464.0, "23385": 417425696.0, "23390": 410232032.0, "23395": 413273440.0, "23400": 402196480.0, "23405": 406666720.0, "23410": 408685728.0, "23415": 415657472.0, "23420": 408565472.0, "23425": 404238976.0, "23430": 411056512.0, "23435": 414427264.0, "23440": 416292256.0, "23445": 416174112.0, "23450": 412673248.0, "23455": 402290560.0, "23460": 421079712.0, "23465": 417811968.0, "23470": 409069696.0, "23475": 407092128.0, "23480": 420266880.0, "23485": 410928672.0, "23490": 411152096.0, "23495": 409462112.0, "23500": 411116960.0, "23505": 413943328.0, "23510": 406810016.0, "23515": 412527296.0, "23520": 405644032.0, "23525": 406163136.0, "23530": 409884320.0, "23535": 414526784.0, "23540": 406790144.0, "23545": 403331264.0, "23550": 417407584.0, "23555": 412919040.0, "23560": 409746496.0, "23565": 408280864.0, "23570": 411532832.0, "23575": 416516128.0, "23580": 403139552.0, "23585": 409820896.0, "23590": 404846368.0, "23595": 416784992.0, "23600": 415224864.0, "23605": 409120256.0, "23610": 419836192.0, "23615": 417178272.0, "23620": 409569696.0, "23625": 409388928.0, "23630": 416391840.0, "23635": 404075904.0, "23640": 409163392.0, "23645": 410788864.0, "23650": 416430880.0, "23655": 403504608.0, "23660": 413235456.0, "23665": 412669536.0, "23670": 410663552.0, "23675": 405478336.0, "23680": 405857472.0, "23685": 407952352.0, "23690": 414320128.0, "23695": 405869888.0, "23700": 413408128.0, "23705": 413630816.0, "23710": 420284608.0, "23715": 411334560.0, "23720": 416649280.0, "23725": 413527296.0, "23730": 403304384.0, "23735": 413854944.0, "23740": 410569824.0, "23745": 401980192.0, "23750": 421117408.0, "23755": 418312256.0, "23760": 406193504.0, "23765": 413858048.0, "23770": 401852576.0, "23775": 417962048.0, "23780": 408079200.0, "23785": 412911424.0, "23790": 409901920.0, "23795": 411784800.0, "23800": 413321056.0, "23805": 407862720.0, "23810": 415684832.0, "23815": 413967872.0, "23820": 409880416.0, "23825": 418403040.0, "23830": 410238720.0, "23835": 409737536.0, "23840": 412772576.0, "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 17448312832.0, "5": 17448386560.0, "10": 17448083456.0, "15": 17447976960.0, "20": 17448022016.0, "25": 17448237056.0, "30": 17448200192.0, "35": 17448280064.0, "40": 17448224768.0, "45": 17448394752.0, "50": 17448083456.0, "55": 17448454144.0, "60": 17448280064.0, "65": 17448347648.0, "70": 17448210432.0, "75": 17448325120.0, "80": 17448228864.0, "85": 17448239104.0, "90": 17448218624.0, "95": 17448251392.0, "100": 17448216576.0, "105": 17448210432.0, "110": 17448411136.0, "115": 17448321024.0, "120": 17448249344.0, "125": 17448224768.0, "130": 17448243200.0, "135": 17448292352.0, "140": 17448257536.0, "145": 17448271872.0, "150": 17448411136.0, "155": 17448212480.0, "160": 17448224768.0, "165": 17448208384.0, "170": 17448259584.0, "175": 17448243200.0, "180": 17448292352.0, "185": 17448290304.0, "190": 17448230912.0, "195": 17448300544.0, "200": 17448222720.0, "205": 17448228864.0, "210": 17448239104.0, "215": 17448230912.0, "220": 17448241152.0, "225": 17448228864.0, "230": 17448253440.0, "235": 17448222720.0, "240": 17448278016.0, "245": 17448288256.0, "250": 17448280064.0, "255": 17448265728.0, "260": 17448280064.0, "265": 17448269824.0, "270": 17448249344.0, "275": 17448321024.0, "280": 17448282112.0, "285": 17448296448.0, "290": 17448263680.0, "295": 17448251392.0, "300": 17448245248.0, "305": 17448310784.0, "310": 17448230912.0, "315": 17448232960.0, "320": 17448251392.0, "325": 17448255488.0, "330": 17448259584.0, "335": 17448275968.0, "340": 17448212480.0, "345": 17448288256.0, "350": 17448300544.0, "355": 17448267776.0, "360": 17448230912.0, "365": 17448282112.0, "370": 17448259584.0, "375": 17448286208.0, "380": 17448282112.0, "385": 17448239104.0, "390": 17448263680.0, "395": 17448275968.0, "400": 17448265728.0, "405": 17448273920.0, "410": 17448292352.0, "415": 17448275968.0, "420": 17448302592.0, "425": 17448253440.0, "430": 17448261632.0, "435": 17448239104.0, "440": 17448280064.0, "445": 17448243200.0, "450": 17448310784.0, "455": 17448294400.0, "460": 17448263680.0, "465": 17448267776.0, "470": 17448278016.0, "475": 17448269824.0, "480": 17448286208.0, "485": 17448280064.0, "490": 17448261632.0, "495": 17448255488.0, "500": 17448249344.0, "505": 17447895040.0, "510": 17447874560.0, "515": 17447878656.0, "520": 17447866368.0, "525": 17447878656.0, "530": 17447892992.0, "535": 17447878656.0, "540": 17447895040.0, "545": 17447886848.0, "550": 17447880704.0, "555": 17447876608.0, "560": 17447858176.0, "565": 17447882752.0, "570": 17447874560.0, "575": 17447899136.0, "580": 17447907328.0, "585": 17447870464.0, "590": 17447892992.0, "595": 17447866368.0, "600": 17447907328.0, "605": 17447856128.0, "610": 17447895040.0, "615": 17447913472.0, "620": 17447890944.0, "625": 17447890944.0, "630": 17448611840.0, "635": 17447870464.0, "640": 17447890944.0, "645": 17447849984.0, "650": 17447868416.0, "655": 17447852032.0, "660": 17447849984.0, "665": 17447866368.0, "670": 17447868416.0, "675": 17447892992.0, "680": 17447878656.0, "685": 17447903232.0, "690": 17447886848.0, "695": 17447886848.0, "700": 17447868416.0, "705": 17447886848.0, "710": 17447884800.0, "715": 17447870464.0, "720": 17447899136.0, "725": 17447878656.0, "730": 17447858176.0, "735": 17447895040.0, "740": 17447886848.0, "745": 17447852032.0, "750": 17447890944.0, "755": 17447895040.0, "760": 17447890944.0, "765": 17447878656.0, "770": 17447897088.0, "775": 17447884800.0, "780": 17447913472.0, "785": 17447870464.0, "790": 17447880704.0, "795": 17447874560.0, "800": 17447874560.0, "805": 17447882752.0, "810": 17447882752.0, "815": 17447899136.0, "820": 17447878656.0, "825": 17447901184.0, "830": 17447878656.0, "835": 17447909376.0, "840": 17447888896.0, "845": 17447835648.0, "850": 17447882752.0, "855": 17447882752.0, "860": 17447876608.0, "865": 17447864320.0, "870": 17447888896.0, "875": 17447866368.0, "880": 17447882752.0, "885": 17447903232.0, "890": 17447874560.0, "895": 17447884800.0, "900": 17447878656.0, "905": 17447874560.0, "910": 17447892992.0, "915": 17447874560.0, "920": 17447888896.0, "925": 17447895040.0, "930": 17447878656.0, "935": 17447866368.0, "940": 17447897088.0, "945": 17447874560.0, "950": 17447886848.0, "955": 17447911424.0, "960": 17447886848.0, "965": 17447866368.0, "970": 17447882752.0, "975": 17447882752.0, "980": 17447868416.0, "985": 17447917568.0, "990": 17447860224.0, "995": 17447872512.0, "1000": 17447884800.0, "1005": 17447856128.0, "1010": 17447874560.0, "1015": 17447882752.0, "1020": 17447884800.0, "1025": 17447878656.0, "1030": 17447852032.0, "1035": 17447870464.0, "1040": 17447882752.0, "1045": 17447866368.0, "1050": 17447884800.0, "1055": 17448890368.0, "1060": 17448912896.0, "1065": 17448890368.0, "1070": 17448888320.0, "1075": 17448896512.0, "1080": 17448908800.0, "1085": 17448886272.0, "1090": 17448886272.0, "1095": 17449066496.0, "1100": 17448888320.0, "1105": 17448876032.0, "1110": 17448890368.0, "1115": 17448886272.0, "1120": 17449021440.0, "1125": 17448886272.0, "1130": 17448890368.0, "1135": 17448886272.0, "1140": 17448871936.0, "1145": 17448880128.0, "1150": 17448867840.0, "1155": 17448880128.0, "1160": 17448880128.0, "1165": 17448892416.0, "1170": 17448880128.0, "1175": 17448880128.0, "1180": 17448896512.0, "1185": 17448884224.0, "1190": 17448884224.0, "1195": 17448953856.0, "1200": 17448841216.0, "1205": 17448904704.0, "1210": 17448849408.0, "1215": 17448830976.0, "1220": 17448935424.0, "1225": 17448929280.0, "1230": 17448982528.0, "1235": 17448859648.0, "1240": 17448882176.0, "1245": 17448892416.0, "1250": 17448859648.0, "1255": 17448833024.0, "1260": 17448867840.0, "1265": 17448878080.0, "1270": 17448884224.0, "1275": 17448890368.0, "1280": 17448886272.0, "1285": 17448878080.0, "1290": 17448892416.0, "1295": 17448857600.0, "1300": 17448896512.0, "1305": 17448884224.0, "1310": 17448888320.0, "1315": 17448898560.0, "1320": 17448863744.0, "1325": 17448902656.0, "1330": 17448900608.0, "1335": 17448873984.0, "1340": 17448882176.0, "1345": 17448890368.0, "1350": 17448890368.0, "1355": 17448876032.0, "1360": 17448884224.0, "1365": 17448892416.0, "1370": 17448878080.0, "1375": 17448898560.0, "1380": 17448884224.0, "1385": 17448880128.0, "1390": 17448890368.0, "1395": 17448896512.0, "1400": 17448878080.0, "1405": 17448882176.0, "1410": 17448876032.0, "1415": 17448916992.0, "1420": 17448910848.0, "1425": 17448892416.0, "1430": 17448890368.0, "1435": 17448882176.0, "1440": 17448882176.0, "1445": 17448882176.0, "1450": 17448876032.0, "1455": 17448894464.0, "1460": 17448890368.0, "1465": 17448882176.0, "1470": 17448882176.0, "1475": 17448890368.0, "1480": 17448892416.0, "1485": 17448882176.0, "1490": 17448894464.0, "1495": 17448871936.0, "1500": 17448876032.0, "1505": 17448900608.0, "1510": 17448886272.0, "1515": 17448882176.0, "1520": 17448890368.0, "1525": 17448880128.0, "1530": 17448890368.0, "1535": 17448894464.0, "1540": 17448884224.0, "1545": 17448886272.0, "1550": 17448880128.0, "1555": 17448902656.0, "1560": 17448886272.0, "1565": 17448884224.0, "1570": 17448902656.0, "1575": 17448898560.0, "1580": 17448871936.0, "1585": 17448892416.0, "1590": 17448892416.0, "1595": 17448894464.0, "1600": 17448863744.0, "1605": 17448861696.0, "1610": 17448898560.0, "1615": 17447837696.0, "1620": 17447903232.0, "1625": 17447868416.0, "1630": 17447825408.0, "1635": 17447882752.0, "1640": 17447831552.0, "1645": 17447917568.0, "1650": 17447878656.0, "1655": 17447919616.0, "1660": 17447882752.0, "1665": 17447858176.0, "1670": 17447886848.0, "1675": 17447903232.0, "1680": 17447880704.0, "1685": 17447892992.0, "1690": 17447895040.0, "1695": 17447880704.0, "1700": 17447886848.0, "1705": 17447884800.0, "1710": 17447870464.0, "1715": 17447880704.0, "1720": 17447870464.0, "1725": 17447886848.0, "1730": 17447882752.0, "1735": 17447895040.0, "1740": 17447878656.0, "1745": 17447913472.0, "1750": 17447884800.0, "1755": 17447878656.0, "1760": 17447884800.0, "1765": 17447890944.0, "1770": 17447872512.0, "1775": 17447854080.0, "1780": 17447886848.0, "1785": 17447882752.0, "1790": 17447890944.0, "1795": 17447874560.0, "1800": 17447866368.0, "1805": 17447876608.0, "1810": 17447856128.0, "1815": 17447872512.0, "1820": 17447884800.0, "1825": 17447892992.0, "1830": 17447874560.0, "1835": 17447888896.0, "1840": 17447876608.0, "1845": 17447892992.0, "1850": 17447886848.0, "1855": 17447866368.0, "1860": 17447890944.0, "1865": 17447890944.0, "1870": 17447895040.0, "1875": 17447890944.0, "1880": 17447884800.0, "1885": 17447880704.0, "1890": 17447882752.0, "1895": 17447882752.0, "1900": 17447864320.0, "1905": 17447888896.0, "1910": 17447884800.0, "1915": 17447886848.0, "1920": 17447831552.0, "1925": 17447903232.0, "1930": 17447872512.0, "1935": 17447878656.0, "1940": 17447882752.0, "1945": 17447884800.0, "1950": 17447862272.0, "1955": 17447886848.0, "1960": 17447860224.0, "1965": 17447858176.0, "1970": 17447897088.0, "1975": 17447849984.0, "1980": 17447878656.0, "1985": 17447876608.0, "1990": 17447878656.0, "1995": 17447880704.0, "2000": 17447884800.0, "2005": 17447907328.0, "2010": 17447890944.0, "2015": 17447872512.0, "2020": 17447870464.0, "2025": 17447872512.0, "2030": 17447884800.0, "2035": 17447870464.0, "2040": 17447888896.0, "2045": 17447874560.0, "2050": 17447876608.0, "2055": 17447895040.0, "2060": 17447876608.0, "2065": 17447892992.0, "2070": 17447888896.0, "2075": 17447892992.0, "2080": 17447870464.0, "2085": 17447868416.0, "2090": 17447872512.0, "2095": 17447884800.0, "2100": 17447880704.0, "2105": 17447864320.0, "2110": 17447890944.0, "2115": 17447882752.0, "2120": 17447882752.0, "2125": 17447882752.0, "2130": 17447876608.0, "2135": 17447876608.0, "2140": 17447874560.0, "2145": 17447884800.0, "2150": 17447888896.0, "2155": 17447884800.0, "2160": 17447878656.0, "2165": 17447870464.0, "2170": 17447868416.0, "2175": 17447886848.0, "2180": 17447886848.0, "2185": 17447862272.0, "2190": 17447880704.0, "2195": 17447874560.0, "2200": 17447849984.0, "2205": 17447880704.0, "2210": 17447892992.0, "2215": 17447890944.0, "2220": 17447872512.0, "2225": 17447878656.0, "2230": 17447852032.0, "2235": 17447890944.0, "2240": 17447882752.0, "2245": 17447888896.0, "2250": 17447886848.0, "2255": 17447884800.0, "2260": 17447876608.0, "2265": 17447858176.0, "2270": 17447895040.0, "2275": 17447876608.0, "2280": 17447884800.0, "2285": 17447927808.0, "2290": 17447890944.0, "2295": 17447874560.0, "2300": 17447888896.0, "2305": 17447878656.0, "2310": 17447870464.0, "2315": 17447833600.0, "2320": 17447890944.0, "2325": 17447884800.0, "2330": 17447839744.0, "2335": 17447878656.0, "2340": 17447874560.0, "2345": 17447874560.0, "2350": 17447870464.0, "2355": 17447878656.0, "2360": 17447872512.0, "2365": 17447882752.0, "2370": 17447872512.0, "2375": 17447862272.0, "2380": 17447882752.0, "2385": 17447856128.0, "2390": 17447886848.0, "2395": 17447899136.0, "2400": 17447886848.0, "2405": 17447884800.0, "2410": 17447876608.0, "2415": 17447868416.0, "2420": 17447870464.0, "2425": 17447868416.0, "2430": 17447878656.0, "2435": 17447845888.0, "2440": 17447870464.0, "2445": 17447874560.0, "2450": 17447866368.0, "2455": 17447886848.0, "2460": 17447878656.0, "2465": 17447890944.0, "2470": 17447876608.0, "2475": 17447874560.0, "2480": 17447895040.0, "2485": 17447880704.0, "2490": 17447878656.0, "2495": 17447874560.0, "2500": 17447890944.0, "2505": 17447868416.0, "2510": 17447897088.0, "2515": 17448921088.0, "2520": 17447862272.0, "2525": 17447880704.0, "2530": 17447880704.0, "2535": 17447886848.0, "2540": 17447870464.0, "2545": 17447870464.0, "2550": 17447884800.0, "2555": 17447882752.0, "2560": 17447886848.0, "2565": 17447858176.0, "2570": 17447872512.0, "2575": 17447897088.0, "2580": 17447858176.0, "2585": 17447841792.0, "2590": 17447886848.0, "2595": 17447866368.0, "2600": 17447856128.0, "2605": 17447907328.0, "2610": 17447839744.0, "2615": 17447895040.0, "2620": 17447843840.0, "2625": 17447882752.0, "2630": 17447858176.0, "2635": 17447905280.0, "2640": 17447882752.0, "2645": 17447897088.0, "2650": 17447862272.0, "2655": 17447874560.0, "2660": 17447886848.0, "2665": 17447876608.0, "2670": 17447874560.0, "2675": 17447876608.0, "2680": 17447880704.0, "2685": 17447878656.0, "2690": 17447874560.0, "2695": 17447895040.0, "2700": 17447858176.0, "2705": 17447888896.0, "2710": 17447876608.0, "2715": 17447874560.0, "2720": 17447878656.0, "2725": 17447876608.0, "2730": 17447880704.0, "2735": 17447882752.0, "2740": 17447860224.0, "2745": 17447870464.0, "2750": 17447899136.0, "2755": 17447886848.0, "2760": 17447866368.0, "2765": 17447868416.0, "2770": 17447888896.0, "2775": 17447882752.0, "2780": 17447886848.0, "2785": 17447870464.0, "2790": 17447872512.0, "2795": 17447872512.0, "2800": 17447878656.0, "2805": 17447880704.0, "2810": 17447878656.0, "2815": 17447890944.0, "2820": 17447892992.0, "2825": 17447886848.0, "2830": 17447878656.0, "2835": 17447868416.0, "2840": 17447878656.0, "2845": 17447874560.0, "2850": 17447868416.0, "2855": 17447864320.0, "2860": 17447890944.0, "2865": 17447882752.0, "2870": 17447882752.0, "2875": 17447890944.0, "2880": 17447901184.0, "2885": 17447882752.0, "2890": 17447907328.0, "2895": 17447841792.0, "2900": 17447878656.0, "2905": 17447880704.0, "2910": 17447854080.0, "2915": 17447878656.0, "2920": 17447872512.0, "2925": 17447880704.0, "2930": 17447886848.0, "2935": 17447884800.0, "2940": 17447884800.0, "2945": 17447886848.0, "2950": 17447888896.0, "2955": 17447964672.0, "2960": 17447895040.0, "2965": 17447884800.0, "2970": 17447884800.0, "2975": 17447862272.0, "2980": 17447874560.0, "2985": 17447882752.0, "2990": 17447870464.0, "2995": 17447886848.0, "3000": 17447880704.0, "3005": 17447874560.0, "3010": 17447868416.0, "3015": 17447882752.0, "3020": 17447884800.0, "3025": 17447864320.0, "3030": 17447890944.0, "3035": 17447872512.0, "3040": 17447874560.0, "3045": 17447858176.0, "3050": 17447892992.0, "3055": 17447888896.0, "3060": 17447880704.0, "3065": 17447874560.0, "3070": 17447901184.0, "3075": 17447878656.0, "3080": 17447874560.0, "3085": 17447884800.0, "3090": 17447886848.0, "3095": 17447874560.0, "3100": 17447882752.0, "3105": 17447884800.0, "3110": 17447882752.0, "3115": 17447882752.0, "3120": 17447878656.0, "3125": 17447880704.0, "3130": 17447880704.0, "3135": 17447923712.0, "3140": 17447899136.0, "3145": 17447874560.0, "3150": 17447888896.0, "3155": 17447878656.0, "3160": 17447913472.0, "3165": 17447866368.0, "3170": 17447903232.0, "3175": 17447878656.0, "3180": 17447890944.0, "3185": 17447878656.0, "3190": 17447872512.0, "3195": 17447903232.0, "3200": 17447858176.0, "3205": 17447882752.0, "3210": 17447890944.0, "3215": 17447888896.0, "3220": 17447884800.0, "3225": 17447888896.0, "3230": 17447872512.0, "3235": 17447890944.0, "3240": 17447874560.0, "3245": 17447890944.0, "3250": 17447876608.0, "3255": 17447886848.0, "3260": 17447870464.0, "3265": 17447880704.0, "3270": 17447876608.0, "3275": 17447870464.0, "3280": 17447872512.0, "3285": 17447882752.0, "3290": 17447890944.0, "3295": 17447866368.0, "3300": 17447880704.0, "3305": 17448617984.0, "3310": 17448603648.0, "3315": 17448599552.0, "3320": 17448601600.0, "3325": 17448615936.0, "3330": 17448609792.0, "3335": 17448617984.0, "3340": 17448620032.0, "3345": 17448622080.0, "3350": 17448605696.0, "3355": 17448615936.0, "3360": 17448601600.0, "3365": 17448605696.0, "3370": 17448599552.0, "3375": 17448607744.0, "3380": 17448609792.0, "3385": 17448613888.0, "3390": 17448628224.0, "3395": 17448601600.0, "3400": 17448626176.0, "3405": 17448609792.0, "3410": 17448601600.0, "3415": 17448605696.0, "3420": 17448609792.0, "3425": 17448593408.0, "3430": 17448617984.0, "3435": 17448605696.0, "3440": 17448607744.0, "3445": 17448611840.0, "3450": 17448620032.0, "3455": 17448603648.0, "3460": 17448603648.0, "3465": 17448581120.0, "3470": 17448601600.0, "3475": 17448599552.0, "3480": 17448603648.0, "3485": 17448617984.0, "3490": 17448624128.0, "3495": 17448607744.0, "3500": 17448607744.0, "3505": 17448620032.0, "3510": 17448597504.0, "3515": 17448599552.0, "3520": 17448601600.0, "3525": 17448624128.0, "3530": 17448611840.0, "3535": 17448601600.0, "3540": 17448603648.0, "3545": 17448607744.0, "3550": 17448605696.0, "3555": 17448613888.0, "3560": 17448607744.0, "3565": 17448613888.0, "3570": 17448615936.0, "3575": 17448579072.0, "3580": 17448636416.0, "3585": 17448615936.0, "3590": 17448595456.0, "3595": 17448611840.0, "3600": 17448617984.0, "3605": 17448605696.0, "3610": 17448607744.0, "3615": 17448603648.0, "3620": 17448599552.0, "3625": 17448611840.0, "3630": 17448609792.0, "3635": 17448607744.0, "3640": 17448603648.0, "3645": 17448605696.0, "3650": 17448609792.0, "3655": 17448603648.0, "3660": 17448617984.0, "3665": 17448607744.0, "3670": 17448609792.0, "3675": 17448595456.0, "3680": 17448617984.0, "3685": 17448622080.0, "3690": 17448605696.0, "3695": 17448630272.0, "3700": 17448603648.0, "3705": 17448617984.0, "3710": 17448611840.0, "3715": 17448656896.0, "3720": 17448603648.0, "3725": 17448595456.0, "3730": 17448613888.0, "3735": 17448599552.0, "3740": 17448613888.0, "3745": 17448607744.0, "3750": 17448607744.0, "3755": 17448599552.0, "3760": 17448609792.0, "3765": 17448611840.0, "3770": 17448583168.0, "3775": 17448617984.0, "3780": 17448607744.0, "3785": 17448611840.0, "3790": 17448599552.0, "3795": 17448613888.0, "3800": 17448613888.0, "3805": 17448642560.0, "3810": 17448595456.0, "3815": 17448595456.0, "3820": 17448593408.0, "3825": 17448613888.0, "3830": 17448591360.0, "3835": 17448611840.0, "3840": 17448615936.0, "3845": 17448640512.0, "3850": 17448605696.0, "3855": 17448611840.0, "3860": 17448607744.0, "3865": 17448609792.0, "3870": 17448593408.0, "3875": 17447880704.0, "3880": 17447868416.0, "3885": 17447892992.0, "3890": 17447882752.0, "3895": 17447870464.0, "3900": 17447882752.0, "3905": 17447847936.0, "3910": 17447866368.0, "3915": 17447884800.0, "3920": 17447878656.0, "3925": 17447899136.0, "3930": 17447870464.0, "3935": 17447864320.0, "3940": 17447870464.0, "3945": 17447886848.0, "3950": 17447878656.0, "3955": 17447884800.0, "3960": 17447882752.0, "3965": 17447852032.0, "3970": 17447858176.0, "3975": 17447874560.0, "3980": 17447866368.0, "3985": 17447882752.0, "3990": 17447888896.0, "3995": 17447862272.0, "4000": 17447876608.0, "4005": 17447878656.0, "4010": 17447878656.0, "4015": 17447888896.0, "4020": 17447880704.0, "4025": 17447878656.0, "4030": 17447882752.0, "4035": 17447880704.0, "4040": 17447880704.0, "4045": 17447888896.0, "4050": 17447892992.0, "4055": 17447866368.0, "4060": 17447884800.0, "4065": 17447876608.0, "4070": 17447886848.0, "4075": 17447884800.0, "4080": 17447874560.0, "4085": 17447892992.0, "4090": 17447882752.0, "4095": 17447901184.0, "4100": 17447886848.0, "4105": 17447870464.0, "4110": 17447895040.0, "4115": 17448921088.0, "4120": 17447868416.0, "4125": 17447860224.0, "4130": 17447872512.0, "4135": 17447878656.0, "4140": 17447870464.0, "4145": 17447874560.0, "4150": 17447878656.0, "4155": 17447874560.0, "4160": 17447874560.0, "4165": 17447862272.0, "4170": 17447880704.0, "4175": 17447876608.0, "4180": 17447876608.0, "4185": 17447874560.0, "4190": 17447890944.0, "4195": 17447874560.0, "4200": 17447874560.0, "4205": 17447882752.0, "4210": 17447884800.0, "4215": 17447884800.0, "4220": 17447878656.0, "4225": 17447866368.0, "4230": 17447882752.0, "4235": 17447892992.0, "4240": 17447878656.0, "4245": 17447876608.0, "4250": 17447878656.0, "4255": 17447866368.0, "4260": 17447876608.0, "4265": 17447886848.0, "4270": 17447866368.0, "4275": 17447874560.0, "4280": 17447888896.0, "4285": 17447895040.0, "4290": 17447878656.0, "4295": 17447880704.0, "4300": 17447888896.0, "4305": 17447882752.0, "4310": 17447882752.0, "4315": 17447878656.0, "4320": 17447874560.0, "4325": 17447882752.0, "4330": 17447890944.0, "4335": 17447886848.0, "4340": 17447882752.0, "4345": 17447890944.0, "4350": 17447890944.0, "4355": 17447888896.0, "4360": 17447878656.0, "4365": 17447876608.0, "4370": 17447870464.0, "4375": 17447878656.0, "4380": 17447870464.0, "4385": 17447886848.0, "4390": 17447878656.0, "4395": 17447890944.0, "4400": 17447874560.0, "4405": 17447882752.0, "4410": 17447876608.0, "4415": 17447880704.0, "4420": 17447886848.0, "4425": 17447872512.0, "4430": 17447874560.0, "4435": 17447880704.0, "4440": 17447876608.0, "4445": 17447866368.0, "4450": 17447882752.0, "4455": 17447880704.0, "4460": 17447880704.0, "4465": 17447884800.0, "4470": 17447878656.0, "4475": 17447884800.0, "4480": 17447884800.0, "4485": 17447858176.0, "4490": 17447862272.0, "4495": 17447882752.0, "4500": 17447890944.0, "4505": 17447886848.0, "4510": 17447847936.0, "4515": 17447874560.0, "4520": 17447895040.0, "4525": 17447892992.0, "4530": 17447888896.0, "4535": 17447880704.0, "4540": 17447874560.0, "4545": 17447876608.0, "4550": 17447882752.0, "4555": 17447872512.0, "4560": 17447884800.0, "4565": 17447866368.0, "4570": 17447866368.0, "4575": 17447868416.0, "4580": 17447897088.0, "4585": 17447882752.0, "4590": 17447890944.0, "4595": 17447901184.0, "4600": 17447882752.0, "4605": 17448906752.0, "4610": 17448878080.0, "4615": 17448892416.0, "4620": 17448886272.0, "4625": 17448892416.0, "4630": 17448900608.0, "4635": 17448923136.0, "4640": 17448890368.0, "4645": 17448894464.0, "4650": 17448906752.0, "4655": 17448886272.0, "4660": 17448886272.0, "4665": 17448904704.0, "4670": 17448884224.0, "4675": 17448886272.0, "4680": 17448896512.0, "4685": 17448904704.0, "4690": 17448867840.0, "4695": 17448902656.0, "4700": 17448906752.0, "4705": 17448898560.0, "4710": 17448890368.0, "4715": 17448898560.0, "4720": 17448894464.0, "4725": 17448896512.0, "4730": 17448892416.0, "4735": 17448896512.0, "4740": 17448898560.0, "4745": 17448884224.0, "4750": 17448890368.0, "4755": 17448919040.0, "4760": 17448902656.0, "4765": 17448882176.0, "4770": 17448880128.0, "4775": 17448896512.0, "4780": 17448888320.0, "4785": 17448902656.0, "4790": 17448900608.0, "4795": 17448902656.0, "4800": 17448890368.0, "4805": 17448902656.0, "4810": 17448896512.0, "4815": 17448910848.0, "4820": 17448869888.0, "4825": 17448880128.0, "4830": 17448902656.0, "4835": 17448906752.0, "4840": 17448904704.0, "4845": 17448916992.0, "4850": 17448910848.0, "4855": 17448886272.0, "4860": 17448896512.0, "4865": 17448908800.0, "4870": 17448890368.0, "4875": 17448890368.0, "4880": 17448890368.0, "4885": 17448894464.0, "4890": 17448894464.0, "4895": 17448896512.0, "4900": 17448906752.0, "4905": 17448908800.0, "4910": 17448900608.0, "4915": 17448896512.0, "4920": 17448900608.0, "4925": 17448900608.0, "4930": 17448904704.0, "4935": 17448888320.0, "4940": 17448898560.0, "4945": 17448904704.0, "4950": 17448898560.0, "4955": 17448906752.0, "4960": 17448892416.0, "4965": 17448878080.0, "4970": 17448886272.0, "4975": 17448900608.0, "4980": 17448896512.0, "4985": 17448888320.0, "4990": 17448894464.0, "4995": 17448910848.0, "5000": 17448882176.0, "5005": 17448896512.0, "5010": 17448898560.0, "5015": 17448894464.0, "5020": 17448921088.0, "5025": 17448904704.0, "5030": 17448908800.0, "5035": 17448898560.0, "5040": 17448894464.0, "5045": 17448908800.0, "5050": 17448884224.0, "5055": 17448886272.0, "5060": 17448876032.0, "5065": 17448894464.0, "5070": 17448906752.0, "5075": 17447862272.0, "5080": 17447858176.0, "5085": 17447897088.0, "5090": 17447876608.0, "5095": 17447874560.0, "5100": 17447886848.0, "5105": 17447886848.0, "5110": 17447886848.0, "5115": 17447876608.0, "5120": 17447892992.0, "5125": 17447884800.0, "5130": 17447880704.0, "5135": 17447897088.0, "5140": 17447874560.0, "5145": 17447874560.0, "5150": 17447876608.0, "5155": 17447890944.0, "5160": 17447866368.0, "5165": 17447884800.0, "5170": 17448206336.0, "5175": 17447874560.0, "5180": 17447866368.0, "5185": 17447888896.0, "5190": 17447864320.0, "5195": 17447899136.0, "5200": 17447878656.0, "5205": 17447882752.0, "5210": 17447872512.0, "5215": 17447880704.0, "5220": 17447882752.0, "5225": 17447874560.0, "5230": 17447874560.0, "5235": 17447880704.0, "5240": 17447886848.0, "5245": 17447876608.0, "5250": 17447874560.0, "5255": 17447884800.0, "5260": 17448574976.0, "5265": 17447872512.0, "5270": 17447870464.0, "5275": 17447890944.0, "5280": 17447878656.0, "5285": 17447882752.0, "5290": 17447905280.0, "5295": 17447886848.0, "5300": 17447870464.0, "5305": 17447878656.0, "5310": 17447880704.0, "5315": 17447880704.0, "5320": 17447882752.0, "5325": 17447872512.0, "5330": 17447860224.0, "5335": 17447864320.0, "5340": 17447870464.0, "5345": 17447878656.0, "5350": 17447892992.0, "5355": 17447876608.0, "5360": 17447886848.0, "5365": 17447884800.0, "5370": 17447860224.0, "5375": 17447878656.0, "5380": 17447868416.0, "5385": 17447866368.0, "5390": 17447884800.0, "5395": 17447870464.0, "5400": 17447876608.0, "5405": 17447870464.0, "5410": 17447880704.0, "5415": 17447886848.0, "5420": 17447884800.0, "5425": 17447899136.0, "5430": 17447882752.0, "5435": 17448077312.0, "5440": 17447884800.0, "5445": 17447866368.0, "5450": 17447866368.0, "5455": 17447888896.0, "5460": 17447886848.0, "5465": 17447880704.0, "5470": 17447882752.0, "5475": 17447874560.0, "5480": 17447858176.0, "5485": 17447897088.0, "5490": 17447890944.0, "5495": 17447886848.0, "5500": 17447911424.0, "5505": 17447874560.0, "5510": 17447884800.0, "5515": 17447886848.0, "5520": 17447882752.0, "5525": 17447880704.0, "5530": 17447880704.0, "5535": 17447872512.0, "5540": 17447895040.0, "5545": 17447884800.0, "5550": 17447886848.0, "5555": 17447878656.0, "5560": 17447874560.0, "5565": 17448089600.0, "5570": 17447884800.0, "5575": 17447886848.0, "5580": 17447878656.0, "5585": 17447831552.0, "5590": 17447870464.0, "5595": 17447892992.0, "5600": 17447899136.0, "5605": 17447872512.0, "5610": 17447882752.0, "5615": 17447884800.0, "5620": 17447874560.0, "5625": 17447878656.0, "5630": 17447884800.0, "5635": 17447866368.0, "5640": 17447870464.0, "5645": 17447882752.0, "5650": 17447890944.0, "5655": 17447890944.0, "5660": 17447884800.0, "5665": 17447886848.0, "5670": 17447880704.0, "5675": 17447866368.0, "5680": 17447872512.0, "5685": 17447884800.0, "5690": 17447878656.0, "5695": 17447878656.0, "5700": 17447880704.0, "5705": 17447854080.0, "5710": 17447878656.0, "5715": 17447868416.0, "5720": 17447895040.0, "5725": 17447870464.0, "5730": 17447866368.0, "5735": 17447874560.0, "5740": 17447880704.0, "5745": 17447897088.0, "5750": 17447884800.0, "5755": 17447884800.0, "5760": 17447862272.0, "5765": 17447858176.0, "5770": 17447872512.0, "5775": 17447888896.0, "5780": 17447862272.0, "5785": 17447882752.0, "5790": 17447882752.0, "5795": 17447882752.0, "5800": 17447884800.0, "5805": 17447876608.0, "5810": 17447862272.0, "5815": 17447884800.0, "5820": 17447913472.0, "5825": 17447858176.0, "5830": 17447874560.0, "5835": 17447886848.0, "5840": 17447874560.0, "5845": 17447886848.0, "5850": 17447888896.0, "5855": 17447866368.0, "5860": 17447876608.0, "5865": 17447886848.0, "5870": 17447882752.0, "5875": 17447886848.0, "5880": 17447890944.0, "5885": 17447880704.0, "5890": 17447888896.0, "5895": 17448878080.0, "5900": 17447886848.0, "5905": 17447880704.0, "5910": 17447884800.0, "5915": 17447878656.0, "5920": 17447909376.0, "5925": 17447886848.0, "5930": 17447876608.0, "5935": 17447878656.0, "5940": 17447862272.0, "5945": 17447899136.0, "5950": 17447878656.0, "5955": 17447878656.0, "5960": 17447878656.0, "5965": 17447882752.0, "5970": 17447899136.0, "5975": 17447876608.0, "5980": 17447884800.0, "5985": 17447876608.0, "5990": 17447880704.0, "5995": 17447884800.0, "6000": 17447866368.0, "6005": 17447868416.0, "6010": 17447882752.0, "6015": 17447892992.0, "6020": 17447886848.0, "6025": 17447864320.0, "6030": 17447888896.0, "6035": 17448200192.0, "6040": 17447888896.0, "6045": 17447884800.0, "6050": 17447874560.0, "6055": 17447886848.0, "6060": 17447860224.0, "6065": 17447880704.0, "6070": 17447870464.0, "6075": 17447882752.0, "6080": 17447880704.0, "6085": 17447895040.0, "6090": 17447880704.0, "6095": 17447882752.0, "6100": 17447878656.0, "6105": 17447878656.0, "6110": 17447890944.0, "6115": 17447874560.0, "6120": 17447888896.0, "6125": 17447874560.0, "6130": 17447882752.0, "6135": 17447870464.0, "6140": 17447884800.0, "6145": 17447884800.0, "6150": 17447890944.0, "6155": 17447878656.0, "6160": 17447919616.0, "6165": 17447886848.0, "6170": 17447899136.0, "6175": 17447868416.0, "6180": 17447884800.0, "6185": 17447892992.0, "6190": 17447874560.0, "6195": 17447878656.0, "6200": 17447882752.0, "6205": 17447876608.0, "6210": 17447878656.0, "6215": 17447872512.0, "6220": 17447895040.0, "6225": 17447886848.0, "6230": 17447870464.0, "6235": 17447884800.0, "6240": 17447858176.0, "6245": 17447884800.0, "6250": 17447878656.0, "6255": 17447878656.0, "6260": 17447870464.0, "6265": 17447880704.0, "6270": 17447880704.0, "6275": 17447884800.0, "6280": 17447880704.0, "6285": 17447882752.0, "6290": 17447878656.0, "6295": 17447876608.0, "6300": 17447882752.0, "6305": 17447878656.0, "6310": 17447860224.0, "6315": 17447890944.0, "6320": 17447899136.0, "6325": 17447878656.0, "6330": 17447876608.0, "6335": 17447884800.0, "6340": 17447874560.0, "6345": 17447866368.0, "6350": 17447870464.0, "6355": 17447882752.0, "6360": 17447858176.0, "6365": 17447870464.0, "6370": 17447874560.0, "6375": 17447874560.0, "6380": 17447864320.0, "6385": 17447878656.0, "6390": 17447878656.0, "6395": 17447895040.0, "6400": 17447876608.0, "6405": 17447870464.0, "6410": 17447886848.0, "6415": 17447878656.0, "6420": 17447874560.0, "6425": 17447866368.0, "6430": 17447874560.0, "6435": 17447878656.0, "6440": 17447870464.0, "6445": 17447874560.0, "6450": 17447890944.0, "6455": 17447884800.0, "6460": 17447866368.0, "6465": 17447876608.0, "6470": 17447866368.0, "6475": 17447884800.0, "6480": 17447882752.0, "6485": 17447884800.0, "6490": 17447878656.0, "6495": 17447890944.0, "6500": 17447858176.0, "6505": 17447880704.0, "6510": 17447880704.0, "6515": 17447890944.0, "6520": 17447870464.0, "6525": 17447882752.0, "6530": 17447870464.0, "6535": 17447882752.0, "6540": 17447868416.0, "6545": 17447886848.0, "6550": 17447878656.0, "6555": 17447874560.0, "6560": 17447882752.0, "6565": 17447876608.0, "6570": 17447880704.0, "6575": 17447878656.0, "6580": 17447876608.0, "6585": 17447892992.0, "6590": 17447874560.0, "6595": 17447899136.0, "6600": 17447878656.0, "6605": 17447999488.0, "6610": 17447909376.0, "6615": 17447878656.0, "6620": 17447884800.0, "6625": 17447874560.0, "6630": 17447870464.0, "6635": 17447878656.0, "6640": 17447878656.0, "6645": 17447882752.0, "6650": 17447886848.0, "6655": 17447876608.0, "6660": 17447876608.0, "6665": 17447890944.0, "6670": 17447874560.0, "6675": 17447899136.0, "6680": 17447890944.0, "6685": 17447874560.0, "6690": 17447878656.0, "6695": 17447884800.0, "6700": 17447874560.0, "6705": 17447864320.0, "6710": 17447882752.0, "6715": 17447882752.0, "6720": 17447890944.0, "6725": 17447878656.0, "6730": 17447876608.0, "6735": 17447882752.0, "6740": 17447878656.0, "6745": 17447872512.0, "6750": 17447890944.0, "6755": 17447866368.0, "6760": 17447862272.0, "6765": 17448001536.0, "6770": 17448003584.0, "6775": 17448005632.0, "6780": 17448001536.0, "6785": 17447976960.0, "6790": 17447999488.0, "6795": 17447997440.0, "6800": 17448013824.0, "6805": 17447997440.0, "6810": 17447995392.0, "6815": 17448005632.0, "6820": 17448005632.0, "6825": 17448009728.0, "6830": 17448005632.0, "6835": 17447999488.0, "6840": 17448007680.0, "6845": 17447993344.0, "6850": 17448011776.0, "6855": 17448005632.0, "6860": 17448017920.0, "6865": 17448009728.0, "6870": 17448015872.0, "6875": 17448005632.0, "6880": 17447997440.0, "6885": 17448009728.0, "6890": 17448001536.0, "6895": 17447991296.0, "6900": 17448017920.0, "6905": 17448003584.0, "6910": 17448007680.0, "6915": 17447999488.0, "6920": 17447985152.0, "6925": 17448022016.0, "6930": 17448001536.0, "6935": 17447993344.0, "6940": 17447997440.0, "6945": 17448001536.0, "6950": 17447991296.0, "6955": 17447997440.0, "6960": 17447997440.0, "6965": 17448026112.0, "6970": 17448005632.0, "6975": 17448001536.0, "6980": 17448009728.0, "6985": 17447989248.0, "6990": 17448003584.0, "6995": 17448013824.0, "7000": 17448005632.0, "7005": 17448007680.0, "7010": 17447999488.0, "7015": 17447993344.0, "7020": 17448003584.0, "7025": 17448007680.0, "7030": 17447999488.0, "7035": 17448005632.0, "7040": 17447999488.0, "7045": 17447989248.0, "7050": 17448007680.0, "7055": 17448003584.0, "7060": 17448003584.0, "7065": 17448017920.0, "7070": 17448001536.0, "7075": 17447991296.0, "7080": 17448005632.0, "7085": 17448015872.0, "7090": 17448013824.0, "7095": 17448001536.0, "7100": 17448032256.0, "7105": 17448007680.0, "7110": 17448001536.0, "7115": 17448589312.0, "7120": 17447999488.0, "7125": 17447985152.0, "7130": 17448009728.0, "7135": 17447995392.0, "7140": 17448005632.0, "7145": 17448204288.0, "7150": 17447985152.0, "7155": 17448011776.0, "7160": 17448007680.0, "7165": 17448001536.0, "7170": 17448013824.0, "7175": 17448009728.0, "7180": 17448007680.0, "7185": 17447993344.0, "7190": 17448017920.0, "7195": 17448007680.0, "7200": 17448001536.0, "7205": 17448005632.0, "7210": 17448007680.0, "7215": 17448005632.0, "7220": 17447993344.0, "7225": 17448001536.0, "7230": 17448005632.0, "7235": 17447997440.0, "7240": 17447999488.0, "7245": 17448013824.0, "7250": 17448003584.0, "7255": 17447989248.0, "7260": 17448005632.0, "7265": 17448005632.0, "7270": 17447995392.0, "7275": 17448017920.0, "7280": 17447993344.0, "7285": 17447995392.0, "7290": 17448009728.0, "7295": 17448007680.0, "7300": 17448009728.0, "7305": 17448701952.0, "7310": 17447997440.0, "7315": 17448003584.0, "7320": 17447987200.0, "7325": 17447997440.0, "7330": 17448005632.0, "7335": 17448114176.0, "7340": 17448116224.0, "7345": 17448112128.0, "7350": 17448136704.0, "7355": 17448114176.0, "7360": 17448108032.0, "7365": 17448120320.0, "7370": 17448134656.0, "7375": 17448114176.0, "7380": 17448101888.0, "7385": 17448110080.0, "7390": 17448120320.0, "7395": 17448110080.0, "7400": 17448140800.0, "7405": 17448108032.0, "7410": 17448116224.0, "7415": 17448128512.0, "7420": 17448136704.0, "7425": 17448118272.0, "7430": 17448120320.0, "7435": 17448114176.0, "7440": 17448116224.0, "7445": 17448112128.0, "7450": 17448118272.0, "7455": 17448124416.0, "7460": 17448118272.0, "7465": 17448124416.0, "7470": 17448118272.0, "7475": 17448091648.0, "7480": 17448114176.0, "7485": 17448120320.0, "7490": 17448126464.0, "7495": 17448116224.0, "7500": 17448118272.0, "7505": 17448136704.0, "7510": 17448120320.0, "7515": 17448108032.0, "7520": 17448120320.0, "7525": 17448116224.0, "7530": 17448108032.0, "7535": 17448120320.0, "7540": 17448110080.0, "7545": 17448118272.0, "7550": 17448114176.0, "7555": 17448136704.0, "7560": 17448124416.0, "7565": 17448112128.0, "7570": 17448112128.0, "7575": 17448108032.0, "7580": 17448124416.0, "7585": 17448116224.0, "7590": 17448116224.0, "7595": 17448116224.0, "7600": 17448118272.0, "7605": 17448112128.0, "7610": 17448126464.0, "7615": 17448120320.0, "7620": 17448118272.0, "7625": 17448120320.0, "7630": 17448110080.0, "7635": 17448122368.0, "7640": 17448118272.0, "7645": 17448116224.0, "7650": 17448122368.0, "7655": 17448105984.0, "7660": 17448120320.0, "7665": 17448112128.0, "7670": 17448118272.0, "7675": 17448118272.0, "7680": 17448097792.0, "7685": 17448126464.0, "7690": 17448091648.0, "7695": 17448112128.0, "7700": 17448110080.0, "7705": 17448122368.0, "7710": 17448116224.0, "7715": 17448105984.0, "7720": 17448116224.0, "7725": 17448114176.0, "7730": 17448116224.0, "7735": 17448138752.0, "7740": 17448103936.0, "7745": 17448089600.0, "7750": 17448116224.0, "7755": 17448114176.0, "7760": 17448116224.0, "7765": 17448120320.0, "7770": 17448112128.0, "7775": 17448120320.0, "7780": 17448122368.0, "7785": 17448112128.0, "7790": 17448124416.0, "7795": 17448116224.0, "7800": 17448116224.0, "7805": 17448114176.0, "7810": 17448116224.0, "7815": 17448112128.0, "7820": 17448132608.0, "7825": 17448128512.0, "7830": 17448118272.0, "7835": 17448105984.0, "7840": 17448126464.0, "7845": 17448138752.0, "7850": 17448120320.0, "7855": 17448132608.0, "7860": 17448108032.0, "7865": 17448122368.0, "7870": 17448124416.0, "7875": 17448108032.0, "7880": 17448112128.0, "7885": 17448103936.0, "7890": 17448126464.0, "7895": 17448124416.0, "7900": 17447886848.0, "7905": 17447874560.0, "7910": 17447880704.0, "7915": 17447890944.0, "7920": 17447874560.0, "7925": 17447895040.0, "7930": 17447886848.0, "7935": 17447880704.0, "7940": 17447890944.0, "7945": 17447866368.0, "7950": 17447876608.0, "7955": 17447876608.0, "7960": 17447880704.0, "7965": 17447858176.0, "7970": 17447878656.0, "7975": 17447890944.0, "7980": 17447880704.0, "7985": 17447876608.0, "7990": 17447866368.0, "7995": 17447884800.0, "8000": 17447880704.0, "8005": 17447862272.0, "8010": 17447870464.0, "8015": 17447870464.0, "8020": 17447876608.0, "8025": 17447878656.0, "8030": 17447878656.0, "8035": 17447874560.0, "8040": 17447878656.0, "8045": 17447890944.0, "8050": 17447862272.0, "8055": 17447886848.0, "8060": 17447874560.0, "8065": 17447884800.0, "8070": 17447882752.0, "8075": 17447882752.0, "8080": 17447884800.0, "8085": 17447895040.0, "8090": 17447858176.0, "8095": 17447886848.0, "8100": 17447882752.0, "8105": 17447886848.0, "8110": 17447874560.0, "8115": 17447874560.0, "8120": 17447886848.0, "8125": 17447884800.0, "8130": 17447880704.0, "8135": 17447884800.0, "8140": 17447886848.0, "8145": 17447870464.0, "8150": 17447876608.0, "8155": 17447862272.0, "8160": 17447884800.0, "8165": 17447878656.0, "8170": 17447872512.0, "8175": 17447882752.0, "8180": 17448095744.0, "8185": 17447858176.0, "8190": 17447890944.0, "8195": 17447878656.0, "8200": 17447866368.0, "8205": 17447862272.0, "8210": 17447874560.0, "8215": 17447866368.0, "8220": 17447882752.0, "8225": 17447874560.0, "8230": 17447870464.0, "8235": 17447890944.0, "8240": 17447884800.0, "8245": 17447876608.0, "8250": 17447882752.0, "8255": 17447882752.0, "8260": 17447870464.0, "8265": 17447882752.0, "8270": 17447870464.0, "8275": 17448144896.0, "8280": 17447882752.0, "8285": 17447878656.0, "8290": 17447876608.0, "8295": 17447884800.0, "8300": 17447880704.0, "8305": 17447897088.0, "8310": 17447866368.0, "8315": 17447878656.0, "8320": 17447882752.0, "8325": 17447878656.0, "8330": 17447872512.0, "8335": 17447884800.0, "8340": 17447882752.0, "8345": 17447880704.0, "8350": 17447901184.0, "8355": 17447874560.0, "8360": 17447886848.0, "8365": 17447878656.0, "8370": 17447874560.0, "8375": 17447878656.0, "8380": 17447878656.0, "8385": 17447890944.0, "8390": 17447870464.0, "8395": 17447888896.0, "8400": 17447884800.0, "8405": 17447866368.0, "8410": 17447882752.0, "8415": 17447862272.0, "8420": 17447874560.0, "8425": 17447874560.0, "8430": 17447864320.0, "8435": 17447872512.0, "8440": 17447886848.0, "8445": 17447866368.0, "8450": 17448505344.0, "8455": 17447870464.0, "8460": 17447874560.0, "8465": 17447895040.0, "8470": 17447878656.0, "8475": 17447880704.0, "8480": 17447897088.0, "8485": 17447876608.0, "8490": 17447880704.0, "8495": 17447878656.0, "8500": 17447876608.0, "8505": 17447888896.0, "8510": 17447878656.0, "8515": 17447864320.0, "8520": 17447860224.0, "8525": 17447890944.0, "8530": 17447874560.0, "8535": 17447862272.0, "8540": 17447899136.0, "8545": 17447882752.0, "8550": 17447886848.0, "8555": 17447892992.0, "8560": 17447868416.0, "8565": 17447886848.0, "8570": 17447899136.0, "8575": 17447878656.0, "8580": 17447866368.0, "8585": 17447884800.0, "8590": 17447870464.0, "8595": 17447870464.0, "8600": 17447890944.0, "8605": 17447886848.0, "8610": 17447866368.0, "8615": 17447882752.0, "8620": 17447878656.0, "8625": 17447888896.0, "8630": 17447870464.0, "8635": 17447874560.0, "8640": 17447882752.0, "8645": 17447882752.0, "8650": 17447890944.0, "8655": 17447880704.0, "8660": 17447868416.0, "8665": 17447872512.0, "8670": 17447860224.0, "8675": 17447880704.0, "8680": 17447890944.0, "8685": 17447874560.0, "8690": 17447878656.0, "8695": 17447878656.0, "8700": 17447876608.0, "8705": 17447876608.0, "8710": 17447884800.0, "8715": 17447878656.0, "8720": 17447884800.0, "8725": 17447872512.0, "8730": 17447874560.0, "8735": 17447880704.0, "8740": 17447858176.0, "8745": 17447882752.0, "8750": 17447878656.0, "8755": 17447882752.0, "8760": 17447878656.0, "8765": 17447878656.0, "8770": 17448660992.0, "8775": 17447874560.0, "8780": 17447884800.0, "8785": 17447866368.0, "8790": 17447882752.0, "8795": 17447878656.0, "8800": 17447884800.0, "8805": 17447882752.0, "8810": 17447872512.0, "8815": 17447882752.0, "8820": 17447856128.0, "8825": 17447895040.0, "8830": 17447897088.0, "8835": 17447874560.0, "8840": 17447886848.0, "8845": 17447872512.0, "8850": 17447882752.0, "8855": 17447866368.0, "8860": 17447882752.0, "8865": 17447872512.0, "8870": 17447878656.0, "8875": 17447882752.0, "8880": 17447878656.0, "8885": 17447872512.0, "8890": 17447886848.0, "8895": 17447892992.0, "8900": 17447882752.0, "8905": 17447878656.0, "8910": 17447880704.0, "8915": 17447880704.0, "8920": 17447874560.0, "8925": 17447878656.0, "8930": 17447882752.0, "8935": 17447876608.0, "8940": 17447872512.0, "8945": 17447880704.0, "8950": 17447886848.0, "8955": 17447878656.0, "8960": 17448419328.0, "8965": 17447876608.0, "8970": 17447897088.0, "8975": 17447858176.0, "8980": 17447870464.0, "8985": 17447874560.0, "8990": 17447876608.0, "8995": 17447874560.0, "9000": 17447874560.0, "9005": 17447882752.0, "9010": 17447874560.0, "9015": 17447876608.0, "9020": 17447860224.0, "9025": 17447884800.0, "9030": 17447876608.0, "9035": 17447890944.0, "9040": 17447862272.0, "9045": 17447886848.0, "9050": 17447878656.0, "9055": 17447870464.0, "9060": 17447878656.0, "9065": 17447878656.0, "9070": 17447880704.0, "9075": 17447866368.0, "9080": 17447874560.0, "9085": 17447858176.0, "9090": 17447874560.0, "9095": 17447886848.0, "9100": 17447858176.0, "9105": 17447888896.0, "9110": 17447876608.0, "9115": 17447876608.0, "9120": 17447880704.0, "9125": 17447890944.0, "9130": 17447878656.0, "9135": 17447882752.0, "9140": 17447878656.0, "9145": 17447874560.0, "9150": 17447870464.0, "9155": 17447880704.0, "9160": 17447886848.0, "9165": 17447876608.0, "9170": 17447870464.0, "9175": 17447878656.0, "9180": 17447880704.0, "9185": 17447864320.0, "9190": 17447886848.0, "9195": 17447882752.0, "9200": 17447874560.0, "9205": 17447897088.0, "9210": 17447888896.0, "9215": 17447874560.0, "9220": 17447886848.0, "9225": 17447882752.0, "9230": 17447874560.0, "9235": 17447890944.0, "9240": 17447895040.0, "9245": 17447872512.0, "9250": 17447890944.0, "9255": 17447880704.0, "9260": 17447882752.0, "9265": 17447876608.0, "9270": 17447870464.0, "9275": 17447862272.0, "9280": 17447876608.0, "9285": 17447886848.0, "9290": 17447880704.0, "9295": 17447886848.0, "9300": 17447882752.0, "9305": 17447876608.0, "9310": 17447882752.0, "9315": 17447878656.0, "9320": 17447880704.0, "9325": 17447886848.0, "9330": 17447872512.0, "9335": 17447874560.0, "9340": 17447882752.0, "9345": 17447866368.0, "9350": 17447890944.0, "9355": 17447884800.0, "9360": 17447880704.0, "9365": 17447872512.0, "9370": 17447874560.0, "9375": 17448161280.0, "9380": 17447874560.0, "9385": 17447874560.0, "9390": 17447870464.0, "9395": 17447886848.0, "9400": 17447878656.0, "9405": 17447872512.0, "9410": 17447868416.0, "9415": 17447890944.0, "9420": 17447880704.0, "9425": 17447878656.0, "9430": 17447880704.0, "9435": 17447876608.0, "9440": 17447888896.0, "9445": 17447884800.0, "9450": 17447878656.0, "9455": 17447895040.0, "9460": 17447878656.0, "9465": 17447868416.0, "9470": 17447882752.0, "9475": 17447868416.0, "9480": 17447872512.0, "9485": 17447880704.0, "9490": 17447874560.0, "9495": 17447876608.0, "9500": 17447878656.0, "9505": 17447882752.0, "9510": 17447872512.0, "9515": 17447872512.0, "9520": 17447866368.0, "9525": 17447874560.0, "9530": 17447884800.0, "9535": 17447895040.0, "9540": 17447872512.0, "9545": 17447890944.0, "9550": 17447878656.0, "9555": 17447866368.0, "9560": 17447876608.0, "9565": 17447882752.0, "9570": 17447862272.0, "9575": 17447876608.0, "9580": 17447882752.0, "9585": 17447884800.0, "9590": 17447897088.0, "9595": 17447880704.0, "9600": 17447880704.0, "9605": 17447897088.0, "9610": 17447876608.0, "9615": 17447882752.0, "9620": 17447874560.0, "9625": 17447892992.0, "9630": 17447876608.0, "9635": 17447882752.0, "9640": 17447880704.0, "9645": 17447874560.0, "9650": 17447876608.0, "9655": 17447862272.0, "9660": 17447878656.0, "9665": 17447868416.0, "9670": 17447876608.0, "9675": 17447892992.0, "9680": 17447870464.0, "9685": 17447874560.0, "9690": 17447911424.0, "9695": 17447874560.0, "9700": 17447870464.0, "9705": 17447872512.0, "9710": 17447878656.0, "9715": 17447880704.0, "9720": 17447886848.0, "9725": 17447882752.0, "9730": 17447882752.0, "9735": 17447878656.0, "9740": 17447874560.0, "9745": 17447874560.0, "9750": 17447888896.0, "9755": 17447884800.0, "9760": 17447872512.0, "9765": 17447880704.0, "9770": 17447882752.0, "9775": 17447866368.0, "9780": 17447878656.0, "9785": 17447886848.0, "9790": 17447864320.0, "9795": 17447872512.0, "9800": 17447884800.0, "9805": 17447874560.0, "9810": 17447874560.0, "9815": 17447870464.0, "9820": 17447890944.0, "9825": 17447874560.0, "9830": 17447884800.0, "9835": 17447892992.0, "9840": 17447886848.0, "9845": 17447872512.0, "9850": 17447886848.0, "9855": 17447868416.0, "9860": 17447878656.0, "9865": 17447890944.0, "9870": 17447874560.0, "9875": 17448497152.0, "9880": 17447888896.0, "9885": 17447878656.0, "9890": 17447874560.0, "9895": 17447880704.0, "9900": 17447866368.0, "9905": 17447890944.0, "9910": 17447884800.0, "9915": 17447876608.0, "9920": 17447886848.0, "9925": 17447954432.0, "9930": 17447872512.0, "9935": 17447882752.0, "9940": 17447884800.0, "9945": 17447880704.0, "9950": 17447880704.0, "9955": 17447876608.0, "9960": 17447870464.0, "9965": 17447882752.0, "9970": 17447886848.0, "9975": 17447895040.0, "9980": 17447878656.0, "9985": 17447870464.0, "9990": 17447872512.0, "9995": 17447882752.0, "10000": 17447892992.0, "10005": 17447899136.0, "10010": 17447882752.0, "10015": 17447878656.0, "10020": 17447884800.0, "10025": 17447876608.0, "10030": 17447870464.0, "10035": 17447880704.0, "10040": 17447870464.0, "10045": 17447878656.0, "10050": 17447870464.0, "10055": 17447868416.0, "10060": 17447886848.0, "10065": 17447866368.0, "10070": 17447874560.0, "10075": 17447874560.0, "10080": 17447884800.0, "10085": 17447886848.0, "10090": 17447884800.0, "10095": 17447862272.0, "10100": 17447878656.0, "10105": 17447888896.0, "10110": 17447862272.0, "10115": 17447874560.0, "10120": 17447866368.0, "10125": 17447874560.0, "10130": 17447872512.0, "10135": 17447880704.0, "10140": 17447890944.0, "10145": 17447872512.0, "10150": 17447870464.0, "10155": 17447870464.0, "10160": 17447884800.0, "10165": 17447876608.0, "10170": 17447880704.0, "10175": 17447886848.0, "10180": 17447864320.0, "10185": 17447880704.0, "10190": 17447864320.0, "10195": 17447870464.0, "10200": 17447884800.0, "10205": 17447888896.0, "10210": 17447882752.0, "10215": 17447878656.0, "10220": 17447888896.0, "10225": 17447882752.0, "10230": 17447878656.0, "10235": 17447886848.0, "10240": 17447872512.0, "10245": 17447876608.0, "10250": 17447870464.0, "10255": 17447884800.0, "10260": 17447868416.0, "10265": 17447888896.0, "10270": 17447888896.0, "10275": 17447878656.0, "10280": 17447874560.0, "10285": 17447882752.0, "10290": 17447870464.0, "10295": 17447890944.0, "10300": 17447868416.0, "10305": 17447878656.0, "10310": 17447862272.0, "10315": 17447884800.0, "10320": 17448704000.0, "10325": 17447876608.0, "10330": 17447866368.0, "10335": 17447880704.0, "10340": 17447874560.0, "10345": 17447874560.0, "10350": 17447888896.0, "10355": 17447890944.0, "10360": 17447872512.0, "10365": 17447884800.0, "10370": 17447895040.0, "10375": 17447874560.0, "10380": 17447880704.0, "10385": 17447890944.0, "10390": 17447870464.0, "10395": 17447880704.0, "10400": 17447882752.0, "10405": 17447892992.0, "10410": 17447870464.0, "10415": 17447886848.0, "10420": 17447874560.0, "10425": 17447884800.0, "10430": 17447888896.0, "10435": 17447874560.0, "10440": 17447878656.0, "10445": 17447882752.0, "10450": 17447870464.0, "10455": 17447888896.0, "10460": 17447870464.0, "10465": 17447878656.0, "10470": 17447870464.0, "10475": 17447864320.0, "10480": 17447872512.0, "10485": 17447876608.0, "10490": 17447874560.0, "10495": 17447870464.0, "10500": 17447876608.0, "10505": 17447899136.0, "10510": 17447888896.0, "10515": 17447890944.0, "10520": 17447884800.0, "10525": 17447870464.0, "10530": 17447886848.0, "10535": 17447882752.0, "10540": 17447878656.0, "10545": 17447866368.0, "10550": 17447882752.0, "10555": 17447868416.0, "10560": 17447880704.0, "10565": 17447878656.0, "10570": 17448724480.0, "10575": 17447880704.0, "10580": 17447870464.0, "10585": 17447888896.0, "10590": 17447890944.0, "10595": 17447868416.0, "10600": 17447870464.0, "10605": 17447882752.0, "10610": 17447874560.0, "10615": 17447886848.0, "10620": 17447905280.0, "10625": 17447872512.0, "10630": 17447886848.0, "10635": 17447858176.0, "10640": 17447866368.0, "10645": 17447882752.0, "10650": 17447874560.0, "10655": 17447876608.0, "10660": 17447878656.0, "10665": 17447884800.0, "10670": 17447858176.0, "10675": 17447880704.0, "10680": 17447886848.0, "10685": 17447888896.0, "10690": 17447882752.0, "10695": 17447884800.0, "10700": 17447882752.0, "10705": 17447870464.0, "10710": 17447895040.0, "10715": 17447878656.0, "10720": 17447874560.0, "10725": 17447886848.0, "10730": 17447868416.0, "10735": 17447886848.0, "10740": 17447876608.0, "10745": 17447882752.0, "10750": 17447895040.0, "10755": 17447880704.0, "10760": 17447882752.0, "10765": 17447880704.0, "10770": 17447876608.0, "10775": 17447884800.0, "10780": 17447876608.0, "10785": 17447868416.0, "10790": 17447884800.0, "10795": 17447882752.0, "10800": 17447878656.0, "10805": 17447876608.0, "10810": 17447878656.0, "10815": 17447870464.0, "10820": 17447876608.0, "10825": 17447874560.0, "10830": 17447862272.0, "10835": 17447886848.0, "10840": 17447876608.0, "10845": 17447890944.0, "10850": 17447880704.0, "10855": 17447888896.0, "10860": 17447876608.0, "10865": 17447892992.0, "10870": 17447884800.0, "10875": 17447899136.0, "10880": 17447882752.0, "10885": 17447866368.0, "10890": 17447870464.0, "10895": 17447864320.0, "10900": 17447886848.0, "10905": 17447886848.0, "10910": 17447899136.0, "10915": 17447872512.0, "10920": 17447874560.0, "10925": 17447884800.0, "10930": 17447878656.0, "10935": 17447884800.0, "10940": 17447886848.0, "10945": 17447884800.0, "10950": 17447862272.0, "10955": 17447878656.0, "10960": 17447878656.0, "10965": 17447890944.0, "10970": 17447880704.0, "10975": 17447870464.0, "10980": 17447884800.0, "10985": 17447874560.0, "10990": 17447872512.0, "10995": 17447872512.0, "11000": 17447866368.0, "11005": 17448349696.0, "11010": 17447880704.0, "11015": 17447899136.0, "11020": 17447895040.0, "11025": 17447884800.0, "11030": 17447890944.0, "11035": 17447866368.0, "11040": 17447899136.0, "11045": 17447878656.0, "11050": 17447899136.0, "11055": 17447882752.0, "11060": 17447872512.0, "11065": 17447886848.0, "11070": 17447882752.0, "11075": 17447880704.0, "11080": 17447870464.0, "11085": 17447868416.0, "11090": 17447890944.0, "11095": 17447874560.0, "11100": 17447866368.0, "11105": 17447876608.0, "11110": 17447876608.0, "11115": 17447880704.0, "11120": 17447874560.0, "11125": 17447884800.0, "11130": 17447890944.0, "11135": 17447872512.0, "11140": 17447868416.0, "11145": 17447870464.0, "11150": 17447882752.0, "11155": 17447878656.0, "11160": 17447892992.0, "11165": 17447862272.0, "11170": 17447878656.0, "11175": 17447888896.0, "11180": 17447870464.0, "11185": 17447880704.0, "11190": 17447886848.0, "11195": 17447878656.0, "11200": 17447888896.0, "11205": 17447882752.0, "11210": 17447897088.0, "11215": 17447874560.0, "11220": 17447882752.0, "11225": 17447886848.0, "11230": 17447882752.0, "11235": 17447886848.0, "11240": 17447909376.0, "11245": 17447862272.0, "11250": 17447878656.0, "11255": 17447870464.0, "11260": 17447880704.0, "11265": 17447882752.0, "11270": 17447874560.0, "11275": 17447888896.0, "11280": 17447874560.0, "11285": 17447880704.0, "11290": 17447876608.0, "11295": 17447895040.0, "11300": 17447872512.0, "11305": 17447876608.0, "11310": 17447882752.0, "11315": 17447874560.0, "11320": 17447892992.0, "11325": 17447872512.0, "11330": 17447868416.0, "11335": 17447866368.0, "11340": 17447872512.0, "11345": 17447911424.0, "11350": 17447915520.0, "11355": 17447911424.0, "11360": 17447901184.0, "11365": 17448378368.0, "11370": 17447915520.0, "11375": 17447903232.0, "11380": 17448855552.0, "11385": 17447903232.0, "11390": 17447907328.0, "11395": 17447907328.0, "11400": 17447915520.0, "11405": 17447909376.0, "11410": 17447903232.0, "11415": 17447913472.0, "11420": 17447915520.0, "11425": 17447923712.0, "11430": 17447915520.0, "11435": 17447909376.0, "11440": 17447911424.0, "11445": 17447895040.0, "11450": 17447913472.0, "11455": 17447911424.0, "11460": 17447915520.0, "11465": 17447919616.0, "11470": 17447913472.0, "11475": 17447905280.0, "11480": 17447905280.0, "11485": 17447919616.0, "11490": 17447913472.0, "11495": 17447895040.0, "11500": 17447899136.0, "11505": 17447901184.0, "11510": 17447903232.0, "11515": 17447931904.0, "11520": 17447886848.0, "11525": 17447895040.0, "11530": 17447895040.0, "11535": 17447907328.0, "11540": 17447903232.0, "11545": 17447915520.0, "11550": 17447907328.0, "11555": 17447907328.0, "11560": 17447895040.0, "11565": 17447938048.0, "11570": 17447911424.0, "11575": 17447897088.0, "11580": 17447895040.0, "11585": 17447925760.0, "11590": 17447897088.0, "11595": 17447905280.0, "11600": 17447919616.0, "11605": 17447888896.0, "11610": 17447915520.0, "11615": 17447909376.0, "11620": 17447907328.0, "11625": 17447919616.0, "11630": 17447911424.0, "11635": 17447909376.0, "11640": 17447903232.0, "11645": 17447901184.0, "11650": 17447907328.0, "11655": 17447899136.0, "11660": 17447899136.0, "11665": 17447921664.0, "11670": 17447905280.0, "11675": 17448366080.0, "11680": 17447897088.0, "11685": 17447903232.0, "11690": 17447901184.0, "11695": 17447905280.0, "11700": 17447909376.0, "11705": 17447923712.0, "11710": 17447905280.0, "11715": 17447895040.0, "11720": 17447886848.0, "11725": 17447895040.0, "11730": 17447911424.0, "11735": 17447905280.0, "11740": 17447890944.0, "11745": 17447901184.0, "11750": 17447899136.0, "11755": 17447907328.0, "11760": 17447905280.0, "11765": 17447899136.0, "11770": 17447911424.0, "11775": 17447919616.0, "11780": 17447911424.0, "11785": 17447903232.0, "11790": 17447925760.0, "11795": 17447911424.0, "11800": 17448118272.0, "11805": 17447890944.0, "11810": 17447905280.0, "11815": 17447907328.0, "11820": 17447915520.0, "11825": 17447915520.0, "11830": 17447913472.0, "11835": 17447907328.0, "11840": 17447907328.0, "11845": 17447919616.0, "11850": 17447909376.0, "11855": 17447905280.0, "11860": 17447923712.0, "11865": 17447915520.0, "11870": 17447911424.0, "11875": 17447895040.0, "11880": 17447915520.0, "11885": 17447915520.0, "11890": 17447911424.0, "11895": 17447909376.0, "11900": 17447907328.0, "11905": 17447895040.0, "11910": 17447886848.0, "11915": 17447858176.0, "11920": 17447886848.0, "11925": 17447884800.0, "11930": 17447886848.0, "11935": 17447886848.0, "11940": 17447878656.0, "11945": 17447899136.0, "11950": 17447882752.0, "11955": 17447882752.0, "11960": 17447876608.0, "11965": 17447872512.0, "11970": 17448699904.0, "11975": 17447890944.0, "11980": 17447884800.0, "11985": 17447876608.0, "11990": 17447882752.0, "11995": 17447895040.0, "12000": 17447874560.0, "12005": 17447886848.0, "12010": 17447895040.0, "12015": 17447876608.0, "12020": 17447884800.0, "12025": 17447870464.0, "12030": 17447886848.0, "12035": 17447878656.0, "12040": 17447882752.0, "12045": 17447878656.0, "12050": 17447882752.0, "12055": 17447866368.0, "12060": 17447886848.0, "12065": 17447890944.0, "12070": 17447868416.0, "12075": 17447876608.0, "12080": 17447882752.0, "12085": 17448462336.0, "12090": 17447886848.0, "12095": 17447868416.0, "12100": 17447864320.0, "12105": 17447882752.0, "12110": 17447890944.0, "12115": 17447878656.0, "12120": 17447874560.0, "12125": 17447874560.0, "12130": 17447870464.0, "12135": 17447878656.0, "12140": 17447862272.0, "12145": 17447874560.0, "12150": 17447882752.0, "12155": 17447864320.0, "12160": 17447886848.0, "12165": 17447874560.0, "12170": 17447882752.0, "12175": 17447886848.0, "12180": 17447878656.0, "12185": 17447870464.0, "12190": 17447866368.0, "12195": 17447882752.0, "12200": 17447882752.0, "12205": 17447866368.0, "12210": 17447892992.0, "12215": 17447890944.0, "12220": 17447886848.0, "12225": 17447882752.0, "12230": 17447901184.0, "12235": 17447862272.0, "12240": 17447876608.0, "12245": 17447878656.0, "12250": 17447870464.0, "12255": 17447878656.0, "12260": 17447874560.0, "12265": 17447882752.0, "12270": 17447882752.0, "12275": 17447882752.0, "12280": 17447876608.0, "12285": 17447878656.0, "12290": 17448341504.0, "12295": 17447870464.0, "12300": 17447872512.0, "12305": 17447882752.0, "12310": 17447876608.0, "12315": 17447901184.0, "12320": 17447868416.0, "12325": 17447888896.0, "12330": 17447892992.0, "12335": 17447868416.0, "12340": 17447878656.0, "12345": 17447899136.0, "12350": 17447878656.0, "12355": 17447880704.0, "12360": 17447870464.0, "12365": 17447868416.0, "12370": 17447874560.0, "12375": 17447882752.0, "12380": 17447862272.0, "12385": 17447886848.0, "12390": 17447882752.0, "12395": 17447899136.0, "12400": 17447874560.0, "12405": 17447866368.0, "12410": 17447878656.0, "12415": 17447878656.0, "12420": 17447880704.0, "12425": 17447870464.0, "12430": 17447862272.0, "12435": 17447884800.0, "12440": 17447876608.0, "12445": 17447876608.0, "12450": 17447886848.0, "12455": 17447884800.0, "12460": 17447882752.0, "12465": 17447874560.0, "12470": 17447876608.0, "12475": 17447878656.0, "12480": 17448806400.0, "12485": 17448820736.0, "12490": 17448804352.0, "12495": 17448808448.0, "12500": 17448816640.0, "12505": 17448816640.0, "12510": 17448835072.0, "12515": 17448810496.0, "12520": 17448826880.0, "12525": 17448804352.0, "12530": 17448812544.0, "12535": 17448814592.0, "12540": 17448806400.0, "12545": 17448826880.0, "12550": 17448824832.0, "12555": 17448798208.0, "12560": 17448814592.0, "12565": 17448816640.0, "12570": 17448804352.0, "12575": 17448818688.0, "12580": 17448816640.0, "12585": 17448810496.0, "12590": 17448820736.0, "12595": 17448822784.0, "12600": 17448806400.0, "12605": 17448794112.0, "12610": 17448794112.0, "12615": 17448828928.0, "12620": 17448808448.0, "12625": 17448802304.0, "12630": 17448800256.0, "12635": 17448820736.0, "12640": 17448816640.0, "12645": 17448808448.0, "12650": 17448808448.0, "12655": 17448812544.0, "12660": 17448804352.0, "12665": 17448796160.0, "12670": 17448822784.0, "12675": 17448818688.0, "12680": 17448833024.0, "12685": 17448804352.0, "12690": 17448796160.0, "12695": 17448800256.0, "12700": 17448802304.0, "12705": 17448820736.0, "12710": 17448806400.0, "12715": 17448814592.0, "12720": 17449668608.0, "12725": 17448792064.0, "12730": 17448816640.0, "12735": 17448808448.0, "12740": 17448792064.0, "12745": 17448804352.0, "12750": 17448820736.0, "12755": 17448812544.0, "12760": 17448812544.0, "12765": 17448806400.0, "12770": 17448808448.0, "12775": 17448814592.0, "12780": 17448820736.0, "12785": 17448816640.0, "12790": 17448802304.0, "12795": 17448802304.0, "12800": 17448810496.0, "12805": 17448812544.0, "12810": 17448808448.0, "12815": 17448802304.0, "12820": 17448824832.0, "12825": 17448806400.0, "12830": 17448802304.0, "12835": 17449644032.0, "12840": 17448826880.0, "12845": 17448808448.0, "12850": 17448794112.0, "12855": 17448820736.0, "12860": 17448812544.0, "12865": 17448808448.0, "12870": 17448800256.0, "12875": 17448814592.0, "12880": 17448810496.0, "12885": 17448810496.0, "12890": 17448808448.0, "12895": 17448814592.0, "12900": 17448824832.0, "12905": 17448804352.0, "12910": 17448808448.0, "12915": 17448806400.0, "12920": 17448802304.0, "12925": 17448804352.0, "12930": 17448816640.0, "12935": 17448804352.0, "12940": 17448812544.0, "12945": 17448810496.0, "12950": 17448810496.0, "12955": 17448812544.0, "12960": 17448792064.0, "12965": 17448816640.0, "12970": 17448796160.0, "12975": 17448816640.0, "12980": 17448800256.0, "12985": 17448812544.0, "12990": 17448816640.0, "12995": 17448812544.0, "13000": 17448816640.0, "13005": 17448816640.0, "13010": 17448814592.0, "13015": 17448792064.0, "13020": 17448816640.0, "13025": 17447880704.0, "13030": 17447888896.0, "13035": 17447882752.0, "13040": 17447852032.0, "13045": 17447882752.0, "13050": 17447874560.0, "13055": 17447888896.0, "13060": 17447880704.0, "13065": 17447866368.0, "13070": 17448683520.0, "13075": 17447882752.0, "13080": 17447880704.0, "13085": 17447878656.0, "13090": 17447866368.0, "13095": 17447874560.0, "13100": 17447866368.0, "13105": 17447882752.0, "13110": 17447884800.0, "13115": 17447876608.0, "13120": 17447866368.0, "13125": 17447856128.0, "13130": 17447888896.0, "13135": 17447897088.0, "13140": 17447878656.0, "13145": 17447864320.0, "13150": 17447888896.0, "13155": 17447882752.0, "13160": 17447872512.0, "13165": 17447880704.0, "13170": 17447880704.0, "13175": 17447890944.0, "13180": 17447870464.0, "13185": 17447872512.0, "13190": 17447878656.0, "13195": 17447866368.0, "13200": 17447886848.0, "13205": 17447892992.0, "13210": 17447878656.0, "13215": 17447872512.0, "13220": 17447866368.0, "13225": 17447874560.0, "13230": 17447864320.0, "13235": 17448878080.0, "13240": 17447870464.0, "13245": 17447882752.0, "13250": 17447878656.0, "13255": 17447864320.0, "13260": 17447880704.0, "13265": 17447884800.0, "13270": 17447878656.0, "13275": 17447886848.0, "13280": 17447878656.0, "13285": 17447886848.0, "13290": 17447866368.0, "13295": 17447876608.0, "13300": 17447872512.0, "13305": 17447886848.0, "13310": 17447858176.0, "13315": 17447874560.0, "13320": 17447886848.0, "13325": 17447892992.0, "13330": 17447868416.0, "13335": 17447878656.0, "13340": 17447886848.0, "13345": 17447878656.0, "13350": 17447866368.0, "13355": 17447866368.0, "13360": 17447880704.0, "13365": 17447876608.0, "13370": 17447878656.0, "13375": 17447886848.0, "13380": 17447901184.0, "13385": 17447882752.0, "13390": 17447878656.0, "13395": 17447884800.0, "13400": 17447892992.0, "13405": 17447874560.0, "13410": 17447880704.0, "13415": 17447874560.0, "13420": 17447872512.0, "13425": 17447886848.0, "13430": 17447880704.0, "13435": 17447866368.0, "13440": 17447886848.0, "13445": 17447862272.0, "13450": 17447880704.0, "13455": 17447884800.0, "13460": 17447874560.0, "13465": 17447890944.0, "13470": 17447880704.0, "13475": 17447878656.0, "13480": 17447878656.0, "13485": 17447878656.0, "13490": 17447886848.0, "13495": 17447878656.0, "13500": 17447880704.0, "13505": 17447884800.0, "13510": 17447897088.0, "13515": 17447878656.0, "13520": 17447872512.0, "13525": 17447845888.0, "13530": 17447870464.0, "13535": 17447876608.0, "13540": 17447882752.0, "13545": 17447880704.0, "13550": 17447866368.0, "13555": 17447886848.0, "13560": 17447862272.0, "13565": 17447886848.0, "13570": 17447882752.0, "13575": 17447880704.0, "13580": 17447882752.0, "13585": 17447882752.0, "13590": 17447870464.0, "13595": 17447882752.0, "13600": 17447890944.0, "13605": 17447866368.0, "13610": 17447880704.0, "13615": 17447862272.0, "13620": 17447868416.0, "13625": 17447874560.0, "13630": 17447882752.0, "13635": 17447874560.0, "13640": 17447862272.0, "13645": 17447876608.0, "13650": 17447882752.0, "13655": 17447880704.0, "13660": 17447872512.0, "13665": 17447888896.0, "13670": 17447874560.0, "13675": 17447882752.0, "13680": 17447874560.0, "13685": 17447886848.0, "13690": 17447882752.0, "13695": 17447864320.0, "13700": 17447872512.0, "13705": 17447882752.0, "13710": 17447874560.0, "13715": 17447884800.0, "13720": 17447882752.0, "13725": 17447876608.0, "13730": 17447874560.0, "13735": 17447886848.0, "13740": 17447886848.0, "13745": 17447878656.0, "13750": 17447878656.0, "13755": 17447868416.0, "13760": 17447862272.0, "13765": 17447876608.0, "13770": 17447878656.0, "13775": 17447882752.0, "13780": 17447864320.0, "13785": 17447882752.0, "13790": 17447876608.0, "13795": 17447878656.0, "13800": 17447874560.0, "13805": 17447872512.0, "13810": 17447888896.0, "13815": 17447874560.0, "13820": 17447870464.0, "13825": 17447882752.0, "13830": 17447878656.0, "13835": 17447878656.0, "13840": 17447882752.0, "13845": 17447874560.0, "13850": 17447868416.0, "13855": 17447880704.0, "13860": 17447878656.0, "13865": 17448001536.0, "13870": 17447868416.0, "13875": 17447874560.0, "13880": 17447884800.0, "13885": 17447870464.0, "13890": 17447884800.0, "13895": 17447895040.0, "13900": 17447892992.0, "13905": 17447870464.0, "13910": 17447872512.0, "13915": 17447870464.0, "13920": 17447866368.0, "13925": 17447886848.0, "13930": 17447878656.0, "13935": 17447870464.0, "13940": 17447882752.0, "13945": 17447886848.0, "13950": 17447872512.0, "13955": 17447882752.0, "13960": 17447878656.0, "13965": 17447880704.0, "13970": 17447868416.0, "13975": 17447878656.0, "13980": 17447886848.0, "13985": 17447876608.0, "13990": 17447911424.0, "13995": 17447884800.0, "14000": 17447876608.0, "14005": 17447888896.0, "14010": 17447880704.0, "14015": 17447880704.0, "14020": 17447882752.0, "14025": 17447882752.0, "14030": 17447878656.0, "14035": 17447870464.0, "14040": 17447874560.0, "14045": 17447886848.0, "14050": 17447868416.0, "14055": 17447874560.0, "14060": 17447876608.0, "14065": 17447878656.0, "14070": 17447882752.0, "14075": 17447862272.0, "14080": 17447888896.0, "14085": 17447874560.0, "14090": 17447886848.0, "14095": 17448714240.0, "14100": 17447895040.0, "14105": 17447880704.0, "14110": 17447878656.0, "14115": 17447884800.0, "14120": 17447864320.0, "14125": 17448050688.0, "14130": 17447882752.0, "14135": 17447886848.0, "14140": 17447876608.0, "14145": 17447866368.0, "14150": 17447882752.0, "14155": 17447895040.0, "14160": 17447866368.0, "14165": 17447890944.0, "14170": 17447880704.0, "14175": 17447890944.0, "14180": 17447872512.0, "14185": 17447878656.0, "14190": 17447880704.0, "14195": 17447882752.0, "14200": 17447870464.0, "14205": 17447892992.0, "14210": 17447888896.0, "14215": 17447880704.0, "14220": 17447882752.0, "14225": 17447884800.0, "14230": 17447880704.0, "14235": 17447882752.0, "14240": 17447888896.0, "14245": 17447888896.0, "14250": 17447890944.0, "14255": 17447878656.0, "14260": 17447886848.0, "14265": 17447886848.0, "14270": 17447870464.0, "14275": 17447874560.0, "14280": 17447874560.0, "14285": 17447878656.0, "14290": 17447872512.0, "14295": 17447882752.0, "14300": 17447886848.0, "14305": 17447874560.0, "14310": 17447880704.0, "14315": 17447884800.0, "14320": 17447872512.0, "14325": 17447882752.0, "14330": 17447874560.0, "14335": 17447884800.0, "14340": 17447876608.0, "14345": 17447895040.0, "14350": 17447874560.0, "14355": 17447872512.0, "14360": 17447880704.0, "14365": 17447882752.0, "14370": 17447882752.0, "14375": 17447890944.0, "14380": 17447892992.0, "14385": 17447878656.0, "14390": 17447876608.0, "14395": 17447870464.0, "14400": 17447866368.0, "14405": 17447876608.0, "14410": 17447882752.0, "14415": 17447872512.0, "14420": 17447878656.0, "14425": 17447872512.0, "14430": 17447895040.0, "14435": 17447882752.0, "14440": 17447876608.0, "14445": 17447874560.0, "14450": 17447888896.0, "14455": 17447884800.0, "14460": 17447880704.0, "14465": 17447872512.0, "14470": 17447874560.0, "14475": 17447878656.0, "14480": 17447874560.0, "14485": 17447876608.0, "14490": 17447888896.0, "14495": 17447866368.0, "14500": 17447880704.0, "14505": 17447895040.0, "14510": 17447884800.0, "14515": 17447872512.0, "14520": 17447884800.0, "14525": 17447874560.0, "14530": 17447876608.0, "14535": 17447876608.0, "14540": 17447874560.0, "14545": 17447876608.0, "14550": 17447897088.0, "14555": 17447872512.0, "14560": 17447874560.0, "14565": 17447878656.0, "14570": 17447866368.0, "14575": 17447897088.0, "14580": 17447870464.0, "14585": 17447862272.0, "14590": 17447890944.0, "14595": 17447874560.0, "14600": 17447886848.0, "14605": 17447864320.0, "14610": 17447888896.0, "14615": 17447882752.0, "14620": 17447882752.0, "14625": 17447890944.0, "14630": 17447886848.0, "14635": 17447876608.0, "14640": 17447890944.0, "14645": 17447854080.0, "14650": 17447878656.0, "14655": 17447870464.0, "14660": 17447888896.0, "14665": 17447884800.0, "14670": 17447878656.0, "14675": 17447884800.0, "14680": 17447854080.0, "14685": 17447878656.0, "14690": 17447882752.0, "14695": 17447882752.0, "14700": 17447876608.0, "14705": 17447882752.0, "14710": 17447872512.0, "14715": 17447878656.0, "14720": 17447870464.0, "14725": 17447874560.0, "14730": 17447886848.0, "14735": 17447890944.0, "14740": 17447882752.0, "14745": 17447878656.0, "14750": 17447866368.0, "14755": 17447872512.0, "14760": 17447882752.0, "14765": 17447858176.0, "14770": 17447866368.0, "14775": 17447886848.0, "14780": 17447884800.0, "14785": 17447892992.0, "14790": 17447874560.0, "14795": 17447864320.0, "14800": 17447880704.0, "14805": 17447866368.0, "14810": 17447888896.0, "14815": 17447870464.0, "14820": 17447870464.0, "14825": 17447878656.0, "14830": 17447876608.0, "14835": 17447870464.0, "14840": 17447874560.0, "14845": 17447884800.0, "14850": 17447882752.0, "14855": 17447884800.0, "14860": 17447878656.0, "14865": 17447878656.0, "14870": 17447878656.0, "14875": 17447890944.0, "14880": 17447876608.0, "14885": 17447895040.0, "14890": 17447882752.0, "14895": 17447884800.0, "14900": 17447874560.0, "14905": 17447858176.0, "14910": 17447878656.0, "14915": 17447880704.0, "14920": 17447882752.0, "14925": 17447882752.0, "14930": 17447886848.0, "14935": 17447886848.0, "14940": 17447888896.0, "14945": 17447878656.0, "14950": 17447870464.0, "14955": 17447874560.0, "14960": 17447870464.0, "14965": 17447878656.0, "14970": 17447870464.0, "14975": 17447882752.0, "14980": 17447870464.0, "14985": 17447895040.0, "14990": 17447870464.0, "14995": 17447872512.0, "15000": 17447882752.0, "15005": 17447882752.0, "15010": 17447899136.0, "15015": 17447874560.0, "15020": 17447886848.0, "15025": 17447878656.0, "15030": 17447884800.0, "15035": 17447892992.0, "15040": 17447868416.0, "15045": 17447890944.0, "15050": 17447872512.0, "15055": 17447876608.0, "15060": 17447895040.0, "15065": 17447878656.0, "15070": 17447866368.0, "15075": 17447895040.0, "15080": 17447897088.0, "15085": 17447895040.0, "15090": 17447880704.0, "15095": 17447870464.0, "15100": 17447876608.0, "15105": 17447878656.0, "15110": 17447884800.0, "15115": 17447874560.0, "15120": 17447886848.0, "15125": 17447884800.0, "15130": 17447878656.0, "15135": 17447874560.0, "15140": 17447880704.0, "15145": 17447874560.0, "15150": 17447878656.0, "15155": 17447886848.0, "15160": 17447868416.0, "15165": 17447878656.0, "15170": 17447870464.0, "15175": 17447895040.0, "15180": 17447882752.0, "15185": 17447882752.0, "15190": 17447872512.0, "15195": 17447882752.0, "15200": 17447882752.0, "15205": 17447888896.0, "15210": 17447886848.0, "15215": 17447880704.0, "15220": 17447882752.0, "15225": 17447888896.0, "15230": 17447884800.0, "15235": 17448220672.0, "15240": 17447880704.0, "15245": 17447874560.0, "15250": 17447882752.0, "15255": 17447886848.0, "15260": 17447884800.0, "15265": 17447886848.0, "15270": 17447888896.0, "15275": 17447880704.0, "15280": 17447870464.0, "15285": 17447870464.0, "15290": 17447874560.0, "15295": 17447890944.0, "15300": 17447880704.0, "15305": 17447872512.0, "15310": 17447888896.0, "15315": 17447884800.0, "15320": 17447866368.0, "15325": 17447890944.0, "15330": 17447874560.0, "15335": 17447882752.0, "15340": 17447880704.0, "15345": 17447882752.0, "15350": 17447878656.0, "15355": 17447876608.0, "15360": 17447886848.0, "15365": 17447897088.0, "15370": 17447884800.0, "15375": 17447874560.0, "15380": 17447882752.0, "15385": 17447876608.0, "15390": 17447884800.0, "15395": 17447888896.0, "15400": 17447872512.0, "15405": 17448863744.0, "15410": 17447878656.0, "15415": 17447878656.0, "15420": 17447870464.0, "15425": 17447886848.0, "15430": 17447872512.0, "15435": 17447878656.0, "15440": 17447884800.0, "15445": 17447880704.0, "15450": 17447886848.0, "15455": 17447886848.0, "15460": 17447866368.0, "15465": 17447886848.0, "15470": 17447895040.0, "15475": 17447880704.0, "15480": 17447886848.0, "15485": 17447886848.0, "15490": 17447880704.0, "15495": 17447882752.0, "15500": 17447870464.0, "15505": 17447884800.0, "15510": 17447868416.0, "15515": 17447876608.0, "15520": 17447868416.0, "15525": 17447888896.0, "15530": 17447870464.0, "15535": 17447880704.0, "15540": 17447878656.0, "15545": 17447884800.0, "15550": 17447886848.0, "15555": 17447882752.0, "15560": 17447876608.0, "15565": 17447876608.0, "15570": 17447872512.0, "15575": 17447872512.0, "15580": 17447892992.0, "15585": 17447874560.0, "15590": 17447872512.0, "15595": 17447876608.0, "15600": 17447880704.0, "15605": 17447882752.0, "15610": 17447866368.0, "15615": 17447884800.0, "15620": 17447876608.0, "15625": 17447882752.0, "15630": 17447890944.0, "15635": 17447886848.0, "15640": 17448374272.0, "15645": 17447895040.0, "15650": 17447890944.0, "15655": 17447884800.0, "15660": 17447905280.0, "15665": 17447888896.0, "15670": 17447895040.0, "15675": 17447890944.0, "15680": 17447892992.0, "15685": 17447895040.0, "15690": 17447905280.0, "15695": 17447884800.0, "15700": 17447886848.0, "15705": 17447895040.0, "15710": 17447905280.0, "15715": 17447903232.0, "15720": 17447870464.0, "15725": 17447882752.0, "15730": 17447903232.0, "15735": 17447899136.0, "15740": 17447874560.0, "15745": 17447884800.0, "15750": 17447888896.0, "15755": 17447878656.0, "15760": 17447890944.0, "15765": 17447901184.0, "15770": 17447895040.0, "15775": 17447895040.0, "15780": 17447880704.0, "15785": 17447895040.0, "15790": 17447886848.0, "15795": 17447890944.0, "15800": 17447890944.0, "15805": 17447903232.0, "15810": 17447892992.0, "15815": 17447888896.0, "15820": 17447892992.0, "15825": 17447878656.0, "15830": 17447901184.0, "15835": 17447884800.0, "15840": 17447890944.0, "15845": 17447899136.0, "15850": 17447890944.0, "15855": 17447897088.0, "15860": 17447880704.0, "15865": 17447897088.0, "15870": 17447888896.0, "15875": 17447903232.0, "15880": 17447876608.0, "15885": 17447886848.0, "15890": 17447890944.0, "15895": 17447886848.0, "15900": 17447907328.0, "15905": 17447892992.0, "15910": 17447890944.0, "15915": 17448353792.0, "15920": 17447895040.0, "15925": 17447899136.0, "15930": 17447892992.0, "15935": 17447905280.0, "15940": 17447895040.0, "15945": 17447882752.0, "15950": 17447897088.0, "15955": 17447888896.0, "15960": 17447882752.0, "15965": 17447882752.0, "15970": 17447886848.0, "15975": 17447886848.0, "15980": 17447888896.0, "15985": 17447905280.0, "15990": 17447895040.0, "15995": 17447886848.0, "16000": 17448570880.0, "16005": 17447882752.0, "16010": 17447911424.0, "16015": 17447880704.0, "16020": 17447886848.0, "16025": 17447884800.0, "16030": 17447897088.0, "16035": 17447892992.0, "16040": 17447892992.0, "16045": 17447878656.0, "16050": 17447884800.0, "16055": 17447872512.0, "16060": 17447878656.0, "16065": 17447886848.0, "16070": 17447882752.0, "16075": 17447899136.0, "16080": 17447903232.0, "16085": 17447878656.0, "16090": 17447892992.0, "16095": 17447892992.0, "16100": 17447882752.0, "16105": 17447878656.0, "16110": 17447890944.0, "16115": 17447874560.0, "16120": 17447899136.0, "16125": 17447882752.0, "16130": 17447903232.0, "16135": 17447884800.0, "16140": 17447880704.0, "16145": 17447882752.0, "16150": 17447903232.0, "16155": 17447899136.0, "16160": 17447886848.0, "16165": 17447895040.0, "16170": 17447886848.0, "16175": 17447901184.0, "16180": 17447899136.0, "16185": 17447901184.0, "16190": 17447890944.0, "16195": 17448312832.0, "16200": 17448255488.0, "16205": 17448275968.0, "16210": 17448263680.0, "16215": 17448267776.0, "16220": 17448275968.0, "16225": 17448730624.0, "16230": 17448269824.0, "16235": 17448261632.0, "16240": 17448271872.0, "16245": 17448271872.0, "16250": 17448273920.0, "16255": 17448267776.0, "16260": 17448284160.0, "16265": 17448278016.0, "16270": 17448290304.0, "16275": 17448273920.0, "16280": 17448280064.0, "16285": 17448275968.0, "16290": 17448275968.0, "16295": 17448271872.0, "16300": 17448284160.0, "16305": 17448275968.0, "16310": 17448271872.0, "16315": 17448278016.0, "16320": 17448263680.0, "16325": 17448286208.0, "16330": 17448263680.0, "16335": 17448267776.0, "16340": 17448267776.0, "16345": 17448278016.0, "16350": 17448271872.0, "16355": 17448275968.0, "16360": 17448251392.0, "16365": 17448278016.0, "16370": 17448474624.0, "16375": 17448288256.0, "16380": 17448263680.0, "16385": 17448278016.0, "16390": 17448275968.0, "16395": 17448275968.0, "16400": 17448245248.0, "16405": 17448255488.0, "16410": 17448265728.0, "16415": 17448820736.0, "16420": 17448282112.0, "16425": 17448280064.0, "16430": 17448263680.0, "16435": 17448263680.0, "16440": 17448251392.0, "16445": 17448271872.0, "16450": 17448275968.0, "16455": 17448271872.0, "16460": 17448282112.0, "16465": 17448286208.0, "16470": 17448265728.0, "16475": 17448275968.0, "16480": 17448294400.0, "16485": 17448265728.0, "16490": 17448255488.0, "16495": 17448271872.0, "16500": 17448251392.0, "16505": 17448278016.0, "16510": 17448282112.0, "16515": 17448267776.0, "16520": 17448257536.0, "16525": 17448275968.0, "16530": 17448568832.0, "16535": 17448259584.0, "16540": 17448286208.0, "16545": 17448267776.0, "16550": 17448300544.0, "16555": 17448288256.0, "16560": 17448263680.0, "16565": 17448265728.0, "16570": 17448282112.0, "16575": 17448278016.0, "16580": 17448284160.0, "16585": 17448282112.0, "16590": 17448280064.0, "16595": 17448273920.0, "16600": 17448280064.0, "16605": 17448263680.0, "16610": 17449129984.0, "16615": 17448261632.0, "16620": 17448286208.0, "16625": 17448271872.0, "16630": 17448280064.0, "16635": 17448292352.0, "16640": 17448286208.0, "16645": 17448259584.0, "16650": 17448257536.0, "16655": 17448271872.0, "16660": 17448282112.0, "16665": 17448251392.0, "16670": 17448282112.0, "16675": 17448269824.0, "16680": 17448273920.0, "16685": 17448273920.0, "16690": 17448275968.0, "16695": 17448288256.0, "16700": 17448251392.0, "16705": 17448271872.0, "16710": 17448267776.0, "16715": 17448263680.0, "16720": 17448286208.0, "16725": 17448269824.0, "16730": 17448275968.0, "16735": 17448259584.0, "16740": 17448269824.0, "16745": 17448261632.0, "16750": 17448282112.0, "16755": 17448259584.0, "16760": 17448288256.0, "16765": 17448269824.0, "16770": 17448271872.0, "16775": 17447876608.0, "16780": 17447886848.0, "16785": 17447860224.0, "16790": 17447884800.0, "16795": 17447870464.0, "16800": 17447882752.0, "16805": 17447882752.0, "16810": 17447880704.0, "16815": 17447874560.0, "16820": 17447872512.0, "16825": 17447880704.0, "16830": 17447874560.0, "16835": 17447874560.0, "16840": 17447856128.0, "16845": 17447874560.0, "16850": 17447856128.0, "16855": 17447880704.0, "16860": 17447880704.0, "16865": 17447907328.0, "16870": 17447874560.0, "16875": 17447888896.0, "16880": 17447860224.0, "16885": 17447882752.0, "16890": 17447862272.0, "16895": 17447888896.0, "16900": 17447888896.0, "16905": 17447890944.0, "16910": 17447886848.0, "16915": 17447864320.0, "16920": 17447862272.0, "16925": 17447886848.0, "16930": 17447870464.0, "16935": 17447882752.0, "16940": 17447868416.0, "16945": 17447878656.0, "16950": 17447888896.0, "16955": 17447886848.0, "16960": 17447878656.0, "16965": 17447874560.0, "16970": 17447878656.0, "16975": 17447876608.0, "16980": 17447876608.0, "16985": 17447886848.0, "16990": 17447888896.0, "16995": 17447872512.0, "17000": 17447882752.0, "17005": 17447884800.0, "17010": 17447884800.0, "17015": 17447868416.0, "17020": 17447878656.0, "17025": 17447886848.0, "17030": 17447886848.0, "17035": 17447874560.0, "17040": 17447882752.0, "17045": 17447874560.0, "17050": 17447880704.0, "17055": 17447882752.0, "17060": 17447874560.0, "17065": 17447884800.0, "17070": 17447878656.0, "17075": 17447874560.0, "17080": 17447878656.0, "17085": 17447870464.0, "17090": 17447872512.0, "17095": 17447870464.0, "17100": 17447882752.0, "17105": 17447868416.0, "17110": 17447882752.0, "17115": 17447880704.0, "17120": 17447870464.0, "17125": 17447868416.0, "17130": 17447868416.0, "17135": 17447876608.0, "17140": 17447874560.0, "17145": 17447868416.0, "17150": 17448017920.0, "17155": 17447888896.0, "17160": 17447884800.0, "17165": 17447882752.0, "17170": 17447886848.0, "17175": 17447886848.0, "17180": 17447876608.0, "17185": 17447890944.0, "17190": 17448022016.0, "17195": 17447874560.0, "17200": 17447872512.0, "17205": 17447878656.0, "17210": 17447880704.0, "17215": 17447882752.0, "17220": 17447870464.0, "17225": 17447862272.0, "17230": 17447878656.0, "17235": 17447882752.0, "17240": 17447866368.0, "17245": 17447874560.0, "17250": 17447892992.0, "17255": 17447880704.0, "17260": 17447866368.0, "17265": 17447890944.0, "17270": 17447874560.0, "17275": 17447864320.0, "17280": 17447874560.0, "17285": 17447884800.0, "17290": 17447878656.0, "17295": 17447880704.0, "17300": 17447866368.0, "17305": 17447866368.0, "17310": 17447870464.0, "17315": 17447888896.0, "17320": 17447876608.0, "17325": 17447868416.0, "17330": 17447874560.0, "17335": 17447892992.0, "17340": 17447878656.0, "17345": 17447878656.0, "17350": 17447890944.0, "17355": 17447870464.0, "17360": 17448247296.0, "17365": 17447882752.0, "17370": 17447886848.0, "17375": 17447880704.0, "17380": 17447870464.0, "17385": 17447860224.0, "17390": 17447870464.0, "17395": 17447868416.0, "17400": 17447882752.0, "17405": 17447876608.0, "17410": 17447888896.0, "17415": 17447872512.0, "17420": 17447868416.0, "17425": 17447868416.0, "17430": 17447876608.0, "17435": 17447890944.0, "17440": 17447862272.0, "17445": 17447858176.0, "17450": 17447880704.0, "17455": 17447864320.0, "17460": 17447872512.0, "17465": 17447878656.0, "17470": 17447878656.0, "17475": 17447876608.0, "17480": 17447884800.0, "17485": 17447886848.0, "17490": 17447876608.0, "17495": 17447880704.0, "17500": 17447876608.0, "17505": 17447880704.0, "17510": 17447888896.0, "17515": 17447903232.0, "17520": 17447874560.0, "17525": 17447876608.0, "17530": 17447886848.0, "17535": 17447866368.0, "17540": 17447899136.0, "17545": 17447862272.0, "17550": 17447880704.0, "17555": 17447880704.0, "17560": 17447870464.0, "17565": 17447874560.0, "17570": 17447878656.0, "17575": 17447874560.0, "17580": 17447874560.0, "17585": 17447878656.0, "17590": 17447874560.0, "17595": 17447890944.0, "17600": 17447882752.0, "17605": 17447890944.0, "17610": 17447878656.0, "17615": 17447866368.0, "17620": 17447868416.0, "17625": 17447880704.0, "17630": 17447876608.0, "17635": 17447866368.0, "17640": 17447876608.0, "17645": 17447876608.0, "17650": 17447878656.0, "17655": 17447874560.0, "17660": 17447899136.0, "17665": 17447870464.0, "17670": 17447862272.0, "17675": 17447888896.0, "17680": 17447876608.0, "17685": 17447886848.0, "17690": 17447876608.0, "17695": 17448132608.0, "17700": 17447888896.0, "17705": 17447854080.0, "17710": 17447878656.0, "17715": 17447880704.0, "17720": 17447876608.0, "17725": 17447880704.0, "17730": 17447878656.0, "17735": 17447874560.0, "17740": 17448849408.0, "17745": 17447884800.0, "17750": 17447878656.0, "17755": 17447880704.0, "17760": 17447878656.0, "17765": 17447886848.0, "17770": 17447878656.0, "17775": 17447880704.0, "17780": 17447878656.0, "17785": 17447882752.0, "17790": 17447872512.0, "17795": 17447874560.0, "17800": 17447878656.0, "17805": 17447876608.0, "17810": 17447874560.0, "17815": 17447878656.0, "17820": 17447882752.0, "17825": 17447860224.0, "17830": 17447886848.0, "17835": 17447860224.0, "17840": 17447874560.0, "17845": 17447870464.0, "17850": 17447868416.0, "17855": 17447890944.0, "17860": 17447872512.0, "17865": 17447886848.0, "17870": 17447874560.0, "17875": 17447872512.0, "17880": 17447880704.0, "17885": 17447880704.0, "17890": 17447890944.0, "17895": 17447876608.0, "17900": 17447870464.0, "17905": 17447872512.0, "17910": 17447876608.0, "17915": 17447878656.0, "17920": 17447878656.0, "17925": 17447878656.0, "17930": 17447878656.0, "17935": 17447878656.0, "17940": 17447866368.0, "17945": 17447886848.0, "17950": 17447878656.0, "17955": 17447886848.0, "17960": 17447884800.0, "17965": 17447870464.0, "17970": 17447870464.0, "17975": 17447880704.0, "17980": 17447878656.0, "17985": 17447874560.0, "17990": 17447872512.0, "17995": 17447884800.0, "18000": 17447882752.0, "18005": 17447886848.0, "18010": 17448413184.0, "18015": 17447874560.0, "18020": 17447890944.0, "18025": 17447864320.0, "18030": 17447882752.0, "18035": 17447860224.0, "18040": 17447870464.0, "18045": 17447882752.0, "18050": 17447882752.0, "18055": 17447876608.0, "18060": 17447874560.0, "18065": 17447888896.0, "18070": 17448026112.0, "18075": 17447882752.0, "18080": 17447874560.0, "18085": 17447882752.0, "18090": 17447892992.0, "18095": 17447876608.0, "18100": 17447878656.0, "18105": 17447886848.0, "18110": 17447890944.0, "18115": 17447872512.0, "18120": 17447880704.0, "18125": 17447886848.0, "18130": 17447862272.0, "18135": 17447870464.0, "18140": 17447882752.0, "18145": 17447856128.0, "18150": 17447880704.0, "18155": 17447890944.0, "18160": 17448081408.0, "18165": 17447870464.0, "18170": 17447878656.0, "18175": 17447884800.0, "18180": 17447886848.0, "18185": 17447878656.0, "18190": 17447886848.0, "18195": 17447876608.0, "18200": 17447882752.0, "18205": 17447868416.0, "18210": 17447870464.0, "18215": 17447858176.0, "18220": 17447888896.0, "18225": 17447884800.0, "18230": 17447868416.0, "18235": 17447882752.0, "18240": 17447868416.0, "18245": 17447882752.0, "18250": 17447882752.0, "18255": 17447882752.0, "18260": 17447890944.0, "18265": 17447972864.0, "18270": 17447895040.0, "18275": 17447895040.0, "18280": 17447876608.0, "18285": 17447874560.0, "18290": 17447895040.0, "18295": 17447884800.0, "18300": 17447882752.0, "18305": 17447882752.0, "18310": 17447890944.0, "18315": 17447866368.0, "18320": 17447880704.0, "18325": 17447876608.0, "18330": 17447882752.0, "18335": 17447901184.0, "18340": 17447874560.0, "18345": 17447884800.0, "18350": 17447876608.0, "18355": 17447868416.0, "18360": 17447882752.0, "18365": 17447878656.0, "18370": 17447868416.0, "18375": 17447878656.0, "18380": 17447876608.0, "18385": 17447866368.0, "18390": 17447876608.0, "18395": 17447882752.0, "18400": 17447897088.0, "18405": 17447864320.0, "18410": 17447874560.0, "18415": 17447874560.0, "18420": 17447860224.0, "18425": 17447886848.0, "18430": 17447874560.0, "18435": 17448368128.0, "18440": 17447884800.0, "18445": 17447868416.0, "18450": 17447874560.0, "18455": 17447868416.0, "18460": 17447874560.0, "18465": 17447870464.0, "18470": 17447874560.0, "18475": 17447874560.0, "18480": 17447882752.0, "18485": 17447888896.0, "18490": 17447886848.0, "18495": 17447874560.0, "18500": 17448284160.0, "18505": 17447870464.0, "18510": 17447866368.0, "18515": 17447882752.0, "18520": 17447878656.0, "18525": 17447880704.0, "18530": 17447882752.0, "18535": 17447872512.0, "18540": 17447886848.0, "18545": 17447876608.0, "18550": 17447892992.0, "18555": 17447870464.0, "18560": 17447882752.0, "18565": 17447876608.0, "18570": 17447880704.0, "18575": 17447866368.0, "18580": 17447872512.0, "18585": 17447882752.0, "18590": 17447897088.0, "18595": 17447878656.0, "18600": 17447870464.0, "18605": 17447874560.0, "18610": 17447878656.0, "18615": 17447870464.0, "18620": 17447890944.0, "18625": 17447882752.0, "18630": 17447878656.0, "18635": 17447890944.0, "18640": 17447886848.0, "18645": 17448550400.0, "18650": 17447866368.0, "18655": 17447862272.0, "18660": 17447886848.0, "18665": 17447874560.0, "18670": 17447870464.0, "18675": 17447870464.0, "18680": 17447868416.0, "18685": 17447845888.0, "18690": 17447882752.0, "18695": 17447874560.0, "18700": 17447878656.0, "18705": 17447878656.0, "18710": 17447890944.0, "18715": 17447892992.0, "18720": 17447870464.0, "18725": 17447870464.0, "18730": 17447870464.0, "18735": 17447874560.0, "18740": 17447868416.0, "18745": 17447858176.0, "18750": 17447878656.0, "18755": 17447890944.0, "18760": 17447882752.0, "18765": 17447878656.0, "18770": 17447876608.0, "18775": 17447878656.0, "18780": 17447888896.0, "18785": 17447892992.0, "18790": 17447880704.0, "18795": 17447876608.0, "18800": 17447874560.0, "18805": 17447874560.0, "18810": 17447872512.0, "18815": 17447880704.0, "18820": 17447872512.0, "18825": 17447882752.0, "18830": 17447876608.0, "18835": 17447882752.0, "18840": 17447870464.0, "18845": 17447886848.0, "18850": 17447876608.0, "18855": 17447890944.0, "18860": 17447882752.0, "18865": 17447876608.0, "18870": 17447882752.0, "18875": 17447876608.0, "18880": 17447862272.0, "18885": 17447870464.0, "18890": 17447878656.0, "18895": 17447874560.0, "18900": 17447884800.0, "18905": 17447876608.0, "18910": 17447890944.0, "18915": 17447870464.0, "18920": 17447854080.0, "18925": 17447862272.0, "18930": 17447882752.0, "18935": 17447876608.0, "18940": 17447880704.0, "18945": 17447874560.0, "18950": 17447886848.0, "18955": 17447878656.0, "18960": 17447886848.0, "18965": 17447878656.0, "18970": 17447878656.0, "18975": 17447858176.0, "18980": 17447882752.0, "18985": 17447882752.0, "18990": 17447878656.0, "18995": 17447878656.0, "19000": 17447878656.0, "19005": 17447880704.0, "19010": 17447874560.0, "19015": 17447886848.0, "19020": 17447880704.0, "19025": 17447876608.0, "19030": 17448398848.0, "19035": 17448386560.0, "19040": 17448390656.0, "19045": 17448386560.0, "19050": 17448400896.0, "19055": 17448394752.0, "19060": 17448384512.0, "19065": 17448382464.0, "19070": 17448390656.0, "19075": 17448398848.0, "19080": 17448384512.0, "19085": 17448390656.0, "19090": 17448384512.0, "19095": 17448376320.0, "19100": 17448390656.0, "19105": 17448392704.0, "19110": 17448386560.0, "19115": 17448370176.0, "19120": 17448382464.0, "19125": 17448394752.0, "19130": 17448386560.0, "19135": 17448386560.0, "19140": 17448380416.0, "19145": 17448376320.0, "19150": 17448390656.0, "19155": 17448386560.0, "19160": 17448396800.0, "19165": 17448384512.0, "19170": 17448374272.0, "19175": 17448382464.0, "19180": 17448382464.0, "19185": 17448380416.0, "19190": 17448388608.0, "19195": 17448388608.0, "19200": 17448374272.0, "19205": 17448392704.0, "19210": 17448400896.0, "19215": 17448402944.0, "19220": 17448394752.0, "19225": 17448390656.0, "19230": 17448382464.0, "19235": 17448380416.0, "19240": 17448402944.0, "19245": 17448388608.0, "19250": 17448388608.0, "19255": 17448390656.0, "19260": 17448394752.0, "19265": 17448394752.0, "19270": 17448382464.0, "19275": 17448388608.0, "19280": 17448388608.0, "19285": 17448376320.0, "19290": 17448390656.0, "19295": 17448390656.0, "19300": 17448392704.0, "19305": 17448398848.0, "19310": 17448390656.0, "19315": 17448388608.0, "19320": 17448386560.0, "19325": 17448398848.0, "19330": 17448382464.0, "19335": 17448382464.0, "19340": 17448390656.0, "19345": 17448388608.0, "19350": 17448404992.0, "19355": 17448394752.0, "19360": 17448398848.0, "19365": 17448390656.0, "19370": 17448386560.0, "19375": 17448374272.0, "19380": 17448388608.0, "19385": 17448386560.0, "19390": 17448384512.0, "19395": 17448380416.0, "19400": 17448394752.0, "19405": 17448398848.0, "19410": 17448394752.0, "19415": 17448378368.0, "19420": 17448390656.0, "19425": 17448388608.0, "19430": 17448976384.0, "19435": 17448390656.0, "19440": 17448390656.0, "19445": 17448386560.0, "19450": 17448386560.0, "19455": 17448378368.0, "19460": 17448394752.0, "19465": 17448407040.0, "19470": 17448370176.0, "19475": 17448404992.0, "19480": 17448390656.0, "19485": 17448398848.0, "19490": 17448392704.0, "19495": 17448370176.0, "19500": 17448394752.0, "19505": 17448382464.0, "19510": 17448396800.0, "19515": 17448382464.0, "19520": 17448386560.0, "19525": 17448374272.0, "19530": 17448376320.0, "19535": 17448382464.0, "19540": 17448849408.0, "19545": 17448374272.0, "19550": 17448394752.0, "19555": 17448390656.0, "19560": 17448382464.0, "19565": 17448384512.0, "19570": 17448394752.0, "19575": 17448396800.0, "19580": 17448378368.0, "19585": 17448400896.0, "19590": 17448392704.0, "19595": 17448386560.0, "19600": 17447870464.0, "19605": 17447878656.0, "19610": 17447886848.0, "19615": 17447876608.0, "19620": 17447872512.0, "19625": 17447880704.0, "19630": 17447895040.0, "19635": 17447874560.0, "19640": 17447872512.0, "19645": 17447886848.0, "19650": 17447882752.0, "19655": 17447876608.0, "19660": 17447876608.0, "19665": 17447868416.0, "19670": 17447886848.0, "19675": 17447864320.0, "19680": 17447882752.0, "19685": 17447872512.0, "19690": 17447866368.0, "19695": 17447886848.0, "19700": 17447870464.0, "19705": 17447884800.0, "19710": 17447876608.0, "19715": 17447870464.0, "19720": 17447874560.0, "19725": 17447874560.0, "19730": 17447870464.0, "19735": 17447878656.0, "19740": 17447874560.0, "19745": 17447882752.0, "19750": 17447874560.0, "19755": 17447888896.0, "19760": 17447866368.0, "19765": 17447888896.0, "19770": 17447890944.0, "19775": 17447874560.0, "19780": 17447874560.0, "19785": 17447897088.0, "19790": 17447868416.0, "19795": 17447901184.0, "19800": 17448364032.0, "19805": 17447878656.0, "19810": 17447860224.0, "19815": 17447872512.0, "19820": 17447886848.0, "19825": 17447882752.0, "19830": 17447878656.0, "19835": 17447880704.0, "19840": 17447874560.0, "19845": 17447890944.0, "19850": 17447856128.0, "19855": 17447866368.0, "19860": 17447876608.0, "19865": 17447878656.0, "19870": 17447866368.0, "19875": 17447874560.0, "19880": 17447882752.0, "19885": 17447870464.0, "19890": 17447870464.0, "19895": 17447866368.0, "19900": 17447884800.0, "19905": 17448294400.0, "19910": 17447870464.0, "19915": 17447882752.0, "19920": 17447876608.0, "19925": 17447874560.0, "19930": 17447886848.0, "19935": 17447868416.0, "19940": 17447874560.0, "19945": 17447886848.0, "19950": 17447862272.0, "19955": 17447878656.0, "19960": 17447886848.0, "19965": 17447878656.0, "19970": 17447882752.0, "19975": 17447874560.0, "19980": 17447888896.0, "19985": 17447890944.0, "19990": 17447882752.0, "19995": 17447884800.0, "20000": 17447876608.0, "20005": 17447868416.0, "20010": 17447882752.0, "20015": 17447876608.0, "20020": 17447880704.0, "20025": 17447866368.0, "20030": 17447868416.0, "20035": 17447866368.0, "20040": 17447874560.0, "20045": 17447888896.0, "20050": 17447880704.0, "20055": 17447870464.0, "20060": 17447895040.0, "20065": 17447886848.0, "20070": 17447886848.0, "20075": 17447874560.0, "20080": 17447880704.0, "20085": 17447878656.0, "20090": 17447882752.0, "20095": 17447884800.0, "20100": 17447864320.0, "20105": 17447882752.0, "20110": 17447874560.0, "20115": 17447888896.0, "20120": 17447870464.0, "20125": 17447878656.0, "20130": 17447890944.0, "20135": 17447874560.0, "20140": 17447880704.0, "20145": 17447874560.0, "20150": 17447866368.0, "20155": 17447874560.0, "20160": 17447890944.0, "20165": 17447874560.0, "20170": 17447878656.0, "20175": 17447880704.0, "20180": 17447882752.0, "20185": 17447882752.0, "20190": 17447888896.0, "20195": 17447866368.0, "20200": 17447878656.0, "20205": 17447866368.0, "20210": 17447878656.0, "20215": 17447882752.0, "20220": 17447874560.0, "20225": 17447874560.0, "20230": 17447878656.0, "20235": 17447882752.0, "20240": 17447874560.0, "20245": 17447882752.0, "20250": 17447872512.0, "20255": 17447872512.0, "20260": 17447878656.0, "20265": 17447878656.0, "20270": 17447874560.0, "20275": 17447880704.0, "20280": 17447880704.0, "20285": 17447870464.0, "20290": 17447895040.0, "20295": 17447878656.0, "20300": 17447882752.0, "20305": 17447886848.0, "20310": 17447878656.0, "20315": 17447874560.0, "20320": 17447870464.0, "20325": 17447874560.0, "20330": 17447895040.0, "20335": 17447886848.0, "20340": 17447890944.0, "20345": 17447870464.0, "20350": 17447890944.0, "20355": 17447882752.0, "20360": 17447888896.0, "20365": 17447880704.0, "20370": 17447882752.0, "20375": 17447874560.0, "20380": 17447886848.0, "20385": 17447864320.0, "20390": 17447888896.0, "20395": 17447874560.0, "20400": 17447858176.0, "20405": 17447876608.0, "20410": 17447876608.0, "20415": 17447884800.0, "20420": 17447870464.0, "20425": 17447886848.0, "20430": 17447876608.0, "20435": 17447880704.0, "20440": 17447892992.0, "20445": 17447890944.0, "20450": 17447882752.0, "20455": 17447876608.0, "20460": 17447868416.0, "20465": 17447878656.0, "20470": 17447884800.0, "20475": 17447870464.0, "20480": 17447884800.0, "20485": 17447882752.0, "20490": 17447882752.0, "20495": 17448466432.0, "20500": 17447886848.0, "20505": 17447870464.0, "20510": 17447878656.0, "20515": 17447878656.0, "20520": 17447882752.0, "20525": 17447882752.0, "20530": 17447868416.0, "20535": 17447895040.0, "20540": 17447882752.0, "20545": 17447884800.0, "20550": 17447874560.0, "20555": 17447880704.0, "20560": 17447876608.0, "20565": 17447882752.0, "20570": 17447874560.0, "20575": 17447868416.0, "20580": 17447878656.0, "20585": 17447878656.0, "20590": 17447878656.0, "20595": 17447888896.0, "20600": 17447882752.0, "20605": 17447870464.0, "20610": 17447903232.0, "20615": 17447872512.0, "20620": 17447884800.0, "20625": 17447872512.0, "20630": 17447899136.0, "20635": 17448435712.0, "20640": 17447870464.0, "20645": 17447872512.0, "20650": 17447870464.0, "20655": 17447866368.0, "20660": 17447882752.0, "20665": 17447862272.0, "20670": 17447858176.0, "20675": 17447880704.0, "20680": 17447872512.0, "20685": 17447870464.0, "20690": 17447876608.0, "20695": 17447870464.0, "20700": 17447874560.0, "20705": 17447884800.0, "20710": 17447880704.0, "20715": 17447868416.0, "20720": 17447872512.0, "20725": 17447886848.0, "20730": 17447866368.0, "20735": 17447878656.0, "20740": 17447878656.0, "20745": 17447872512.0, "20750": 17447882752.0, "20755": 17447876608.0, "20760": 17447882752.0, "20765": 17447884800.0, "20770": 17447878656.0, "20775": 17447890944.0, "20780": 17447890944.0, "20785": 17447870464.0, "20790": 17447862272.0, "20795": 17447868416.0, "20800": 17447882752.0, "20805": 17447895040.0, "20810": 17447890944.0, "20815": 17447874560.0, "20820": 17447886848.0, "20825": 17447876608.0, "20830": 17447872512.0, "20835": 17447878656.0, "20840": 17447878656.0, "20845": 17447874560.0, "20850": 17447878656.0, "20855": 17447874560.0, "20860": 17447890944.0, "20865": 17447882752.0, "20870": 17447872512.0, "20875": 17447892992.0, "20880": 17447874560.0, "20885": 17447870464.0, "20890": 17447872512.0, "20895": 17447878656.0, "20900": 17447874560.0, "20905": 17447868416.0, "20910": 17447870464.0, "20915": 17447876608.0, "20920": 17447884800.0, "20925": 17447874560.0, "20930": 17447870464.0, "20935": 17447868416.0, "20940": 17447884800.0, "20945": 17447870464.0, "20950": 17447886848.0, "20955": 17447884800.0, "20960": 17447892992.0, "20965": 17447882752.0, "20970": 17447880704.0, "20975": 17447874560.0, "20980": 17447876608.0, "20985": 17447868416.0, "20990": 17447878656.0, "20995": 17447872512.0, "21000": 17447884800.0, "21005": 17447886848.0, "21010": 17447892992.0, "21015": 17447858176.0, "21020": 17447882752.0, "21025": 17447878656.0, "21030": 17448554496.0, "21035": 17447870464.0, "21040": 17447895040.0, "21045": 17447866368.0, "21050": 17447878656.0, "21055": 17447880704.0, "21060": 17447874560.0, "21065": 17447868416.0, "21070": 17447878656.0, "21075": 17447880704.0, "21080": 17447886848.0, "21085": 17447870464.0, "21090": 17447890944.0, "21095": 17447874560.0, "21100": 17447880704.0, "21105": 17447878656.0, "21110": 17447878656.0, "21115": 17447876608.0, "21120": 17447874560.0, "21125": 17447874560.0, "21130": 17448554496.0, "21135": 17447866368.0, "21140": 17447884800.0, "21145": 17447858176.0, "21150": 17447880704.0, "21155": 17447888896.0, "21160": 17447888896.0, "21165": 17447878656.0, "21170": 17447872512.0, "21175": 17447892992.0, "21180": 17447872512.0, "21185": 17447878656.0, "21190": 17447876608.0, "21195": 17447899136.0, "21200": 17447888896.0, "21205": 17447874560.0, "21210": 17447880704.0, "21215": 17447876608.0, "21220": 17447884800.0, "21225": 17447876608.0, "21230": 17447880704.0, "21235": 17447864320.0, "21240": 17447868416.0, "21245": 17447874560.0, "21250": 17447866368.0, "21255": 17447888896.0, "21260": 17447880704.0, "21265": 17447884800.0, "21270": 17447866368.0, "21275": 17447872512.0, "21280": 17448685568.0, "21285": 17447880704.0, "21290": 17447870464.0, "21295": 17447878656.0, "21300": 17447878656.0, "21305": 17447901184.0, "21310": 17447878656.0, "21315": 17447895040.0, "21320": 17447884800.0, "21325": 17447876608.0, "21330": 17447878656.0, "21335": 17447880704.0, "21340": 17447870464.0, "21345": 17447890944.0, "21350": 17447874560.0, "21355": 17447868416.0, "21360": 17447888896.0, "21365": 17447878656.0, "21370": 17447888896.0, "21375": 17447882752.0, "21380": 17447868416.0, "21385": 17448744960.0, "21390": 17447886848.0, "21395": 17447880704.0, "21400": 17447870464.0, "21405": 17447870464.0, "21410": 17447866368.0, "21415": 17447882752.0, "21420": 17447870464.0, "21425": 17447870464.0, "21430": 17447884800.0, "21435": 17447890944.0, "21440": 17447895040.0, "21445": 17447890944.0, "21450": 17447876608.0, "21455": 17447880704.0, "21460": 17447874560.0, "21465": 17447880704.0, "21470": 17447884800.0, "21475": 17447870464.0, "21480": 17447870464.0, "21485": 17447880704.0, "21490": 17447876608.0, "21495": 17447870464.0, "21500": 17447886848.0, "21505": 17447872512.0, "21510": 17447895040.0, "21515": 17447870464.0, "21520": 17447880704.0, "21525": 17447878656.0, "21530": 17447862272.0, "21535": 17448232960.0, "21540": 17447888896.0, "21545": 17447882752.0, "21550": 17447874560.0, "21555": 17447876608.0, "21560": 17447874560.0, "21565": 17447882752.0, "21570": 17447880704.0, "21575": 17447874560.0, "21580": 17447872512.0, "21585": 17447892992.0, "21590": 17448513536.0, "21595": 17447872512.0, "21600": 17447874560.0, "21605": 17447890944.0, "21610": 17447878656.0, "21615": 17447872512.0, "21620": 17447884800.0, "21625": 17447880704.0, "21630": 17447888896.0, "21635": 17447882752.0, "21640": 17447876608.0, "21645": 17447880704.0, "21650": 17447878656.0, "21655": 17447882752.0, "21660": 17447880704.0, "21665": 17447878656.0, "21670": 17447878656.0, "21675": 17447876608.0, "21680": 17447862272.0, "21685": 17447880704.0, "21690": 17447876608.0, "21695": 17447876608.0, "21700": 17447880704.0, "21705": 17447878656.0, "21710": 17447874560.0, "21715": 17447882752.0, "21720": 17447880704.0, "21725": 17447878656.0, "21730": 17447897088.0, "21735": 17447886848.0, "21740": 17447874560.0, "21745": 17447874560.0, "21750": 17447860224.0, "21755": 17447895040.0, "21760": 17447882752.0, "21765": 17447872512.0, "21770": 17447884800.0, "21775": 17447890944.0, "21780": 17447878656.0, "21785": 17447882752.0, "21790": 17447892992.0, "21795": 17447882752.0, "21800": 17447880704.0, "21805": 17447870464.0, "21810": 17447895040.0, "21815": 17447880704.0, "21820": 17447870464.0, "21825": 17447882752.0, "21830": 17447872512.0, "21835": 17447876608.0, "21840": 17447882752.0, "21845": 17447874560.0, "21850": 17447874560.0, "21855": 17447862272.0, "21860": 17447886848.0, "21865": 17447870464.0, "21870": 17447864320.0, "21875": 17447878656.0, "21880": 17447870464.0, "21885": 17448939520.0, "21890": 17447858176.0, "21895": 17447874560.0, "21900": 17447882752.0, "21905": 17447878656.0, "21910": 17447866368.0, "21915": 17447882752.0, "21920": 17447864320.0, "21925": 17447882752.0, "21930": 17447862272.0, "21935": 17447874560.0, "21940": 17447882752.0, "21945": 17447886848.0, "21950": 17447872512.0, "21955": 17447880704.0, "21960": 17447862272.0, "21965": 17447880704.0, "21970": 17447868416.0, "21975": 17447862272.0, "21980": 17447874560.0, "21985": 17448544256.0, "21990": 17447895040.0, "21995": 17447886848.0, "22000": 17447895040.0, "22005": 17447880704.0, "22010": 17447874560.0, "22015": 17447890944.0, "22020": 17447882752.0, "22025": 17447870464.0, "22030": 17447870464.0, "22035": 17447890944.0, "22040": 17447882752.0, "22045": 17447870464.0, "22050": 17447880704.0, "22055": 17447882752.0, "22060": 17447895040.0, "22065": 17447878656.0, "22070": 17447886848.0, "22075": 17447872512.0, "22080": 17447886848.0, "22085": 17447872512.0, "22090": 17447878656.0, "22095": 17447882752.0, "22100": 17447876608.0, "22105": 17447878656.0, "22110": 17447878656.0, "22115": 17447897088.0, "22120": 17447872512.0, "22125": 17447886848.0, "22130": 17447870464.0, "22135": 17447886848.0, "22140": 17447866368.0, "22145": 17447886848.0, "22150": 17447874560.0, "22155": 17447888896.0, "22160": 17447870464.0, "22165": 17447874560.0, "22170": 17447878656.0, "22175": 17447882752.0, "22180": 17447868416.0, "22185": 17447880704.0, "22190": 17447872512.0, "22195": 17447880704.0, "22200": 17447882752.0, "22205": 17447878656.0, "22210": 17447878656.0, "22215": 17447874560.0, "22220": 17447880704.0, "22225": 17447880704.0, "22230": 17447876608.0, "22235": 17447888896.0, "22240": 17447878656.0, "22245": 17447868416.0, "22250": 17447878656.0, "22255": 17447874560.0, "22260": 17447870464.0, "22265": 17447866368.0, "22270": 17447890944.0, "22275": 17447872512.0, "22280": 17447874560.0, "22285": 17447880704.0, "22290": 17447888896.0, "22295": 17447874560.0, "22300": 17447878656.0, "22305": 17447872512.0, "22310": 17447872512.0, "22315": 17447878656.0, "22320": 17447878656.0, "22325": 17447876608.0, "22330": 17447878656.0, "22335": 17447884800.0, "22340": 17447878656.0, "22345": 17447880704.0, "22350": 17447866368.0, "22355": 17447874560.0, "22360": 17447882752.0, "22365": 17447874560.0, "22370": 17447874560.0, "22375": 17447870464.0, "22380": 17447866368.0, "22385": 17447886848.0, "22390": 17447888896.0, "22395": 17447882752.0, "22400": 17447874560.0, "22405": 17447882752.0, "22410": 17447884800.0, "22415": 17447882752.0, "22420": 17447897088.0, "22425": 17447878656.0, "22430": 17447895040.0, "22435": 17447886848.0, "22440": 17447882752.0, "22445": 17447870464.0, "22450": 17447882752.0, "22455": 17447868416.0, "22460": 17447884800.0, "22465": 17447882752.0, "22470": 17447882752.0, "22475": 17447864320.0, "22480": 17447868416.0, "22485": 17447880704.0, "22490": 17447890944.0, "22495": 17447876608.0, "22500": 17447886848.0, "22505": 17447886848.0, "22510": 17447868416.0, "22515": 17447874560.0, "22520": 17447884800.0, "22525": 17447866368.0, "22530": 17447866368.0, "22535": 17447872512.0, "22540": 17447872512.0, "22545": 17447868416.0, "22550": 17447878656.0, "22555": 17447874560.0, "22560": 17447888896.0, "22565": 17447880704.0, "22570": 17447872512.0, "22575": 17447886848.0, "22580": 17447872512.0, "22585": 17447890944.0, "22590": 17447874560.0, "22595": 17447888896.0, "22600": 17447866368.0, "22605": 17447880704.0, "22610": 17447882752.0, "22615": 17447878656.0, "22620": 17447876608.0, "22625": 17447878656.0, "22630": 17447884800.0, "22635": 17447876608.0, "22640": 17447888896.0, "22645": 17447870464.0, "22650": 17447892992.0, "22655": 17447870464.0, "22660": 17447868416.0, "22665": 17447886848.0, "22670": 17447882752.0, "22675": 17447884800.0, "22680": 17447880704.0, "22685": 17447882752.0, "22690": 17447874560.0, "22695": 17447886848.0, "22700": 17447878656.0, "22705": 17447862272.0, "22710": 17447876608.0, "22715": 17447878656.0, "22720": 17447872512.0, "22725": 17447882752.0, "22730": 17447895040.0, "22735": 17447886848.0, "22740": 17447874560.0, "22745": 17447860224.0, "22750": 17447880704.0, "22755": 17447882752.0, "22760": 17447874560.0, "22765": 17447874560.0, "22770": 17447878656.0, "22775": 17447876608.0, "22780": 17447880704.0, "22785": 17447878656.0, "22790": 17447882752.0, "22795": 17447874560.0, "22800": 17447888896.0, "22805": 17447886848.0, "22810": 17447872512.0, "22815": 17447882752.0, "22820": 17447880704.0, "22825": 17447880704.0, "22830": 17447870464.0, "22835": 17447866368.0, "22840": 17447882752.0, "22845": 17447874560.0, "22850": 17447878656.0, "22855": 17447884800.0, "22860": 17447882752.0, "22865": 17447874560.0, "22870": 17447878656.0, "22875": 17447878656.0, "22880": 17447866368.0, "22885": 17447880704.0, "22890": 17447876608.0, "22895": 17447874560.0, "22900": 17447870464.0, "22905": 17447880704.0, "22910": 17447870464.0, "22915": 17447884800.0, "22920": 17447897088.0, "22925": 17447878656.0, "22930": 17447888896.0, "22935": 17447870464.0, "22940": 17447876608.0, "22945": 17447874560.0, "22950": 17447878656.0, "22955": 17447886848.0, "22960": 17447872512.0, "22965": 17447868416.0, "22970": 17447878656.0, "22975": 17447884800.0, "22980": 17447886848.0, "22985": 17447872512.0, "22990": 17447874560.0, "22995": 17447874560.0, "23000": 17447886848.0, "23005": 17447872512.0, "23010": 17447878656.0, "23015": 17447876608.0, "23020": 17447886848.0, "23025": 17447870464.0, "23030": 17447872512.0, "23035": 17447872512.0, "23040": 17447864320.0, "23045": 17447880704.0, "23050": 17447890944.0, "23055": 17447884800.0, "23060": 17447878656.0, "23065": 17447907328.0, "23070": 17447870464.0, "23075": 17447870464.0, "23080": 17447878656.0, "23085": 17447878656.0, "23090": 17447878656.0, "23095": 17447899136.0, "23100": 17447882752.0, "23105": 17448333312.0, "23110": 17447874560.0, "23115": 17447892992.0, "23120": 17447874560.0, "23125": 17447882752.0, "23130": 17447878656.0, "23135": 17447870464.0, "23140": 17447874560.0, "23145": 17447870464.0, "23150": 17447874560.0, "23155": 17447888896.0, "23160": 17447878656.0, "23165": 17447878656.0, "23170": 17447886848.0, "23175": 17447878656.0, "23180": 17447882752.0, "23185": 17447876608.0, "23190": 17447936000.0, "23195": 17447878656.0, "23200": 17447884800.0, "23205": 17447876608.0, "23210": 17447880704.0, "23215": 17447878656.0, "23220": 17447876608.0, "23225": 17447878656.0, "23230": 17447884800.0, "23235": 17447878656.0, "23240": 17447872512.0, "23245": 17447888896.0, "23250": 17447880704.0, "23255": 17447884800.0, "23260": 17447878656.0, "23265": 17447895040.0, "23270": 17447880704.0, "23275": 17447907328.0, "23280": 17447880704.0, "23285": 17447880704.0, "23290": 17447874560.0, "23295": 17447878656.0, "23300": 17447872512.0, "23305": 17447868416.0, "23310": 17447890944.0, "23315": 17447866368.0, "23320": 17447886848.0, "23325": 17447868416.0, "23330": 17447874560.0, "23335": 17447880704.0, "23340": 17447862272.0, "23345": 17447890944.0, "23350": 17447874560.0, "23355": 17447874560.0, "23360": 17447882752.0, "23365": 17447882752.0, "23370": 17447882752.0, "23375": 17447886848.0, "23380": 17447872512.0, "23385": 17447897088.0, "23390": 17447890944.0, "23395": 17447874560.0, "23400": 17447876608.0, "23405": 17447876608.0, "23410": 17447880704.0, "23415": 17447886848.0, "23420": 17447895040.0, "23425": 17447866368.0, "23430": 17447897088.0, "23435": 17447888896.0, "23440": 17447880704.0, "23445": 17447872512.0, "23450": 17447874560.0, "23455": 17447878656.0, "23460": 17447874560.0, "23465": 17447876608.0, "23470": 17447878656.0, "23475": 17447882752.0, "23480": 17447872512.0, "23485": 17447886848.0, "23490": 17447882752.0, "23495": 17447890944.0, "23500": 17447872512.0, "23505": 17447890944.0, "23510": 17447888896.0, "23515": 17448146944.0, "23520": 17447876608.0, "23525": 17447876608.0, "23530": 17447872512.0, "23535": 17447878656.0, "23540": 17447874560.0, "23545": 17447890944.0, "23550": 17447870464.0, "23555": 17447886848.0, "23560": 17447874560.0, "23565": 17447878656.0, "23570": 17447878656.0, "23575": 17448314880.0, "23580": 17447872512.0, "23585": 17447895040.0, "23590": 17447888896.0, "23595": 17447882752.0, "23600": 17447878656.0, "23605": 17447882752.0, "23610": 17447874560.0, "23615": 17447882752.0, "23620": 17447864320.0, "23625": 17447872512.0, "23630": 17447886848.0, "23635": 17447886848.0, "23640": 17447874560.0, "23645": 17447878656.0, "23650": 17447890944.0, "23655": 17447858176.0, "23660": 17447870464.0, "23665": 17447874560.0, "23670": 17447878656.0, "23675": 17447878656.0, "23680": 17447872512.0, "23685": 17447874560.0, "23690": 17447884800.0, "23695": 17447878656.0, "23700": 17447880704.0, "23705": 17447880704.0, "23710": 17447876608.0, "23715": 17447878656.0, "23720": 17447880704.0, "23725": 17447886848.0, "23730": 17447882752.0, "23735": 17447872512.0, "23740": 17447880704.0, "23745": 17447886848.0, "23750": 17447884800.0, "23755": 17447882752.0, "23760": 17447870464.0, "23765": 17447874560.0, "23770": 17447870464.0, "23775": 17447882752.0, "23780": 17447868416.0, "23785": 17447876608.0, "23790": 17447880704.0, "23795": 17447888896.0, "23800": 17447886848.0, "23805": 17447872512.0, "23810": 17447878656.0, "23815": 17447870464.0, "23820": 17447874560.0, "23825": 17447895040.0, "23830": 17447874560.0, "23835": 17447866368.0, "23840": 17447878656.0, "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "iteration-time": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 105.86866, "5": 27.5243, "10": 28.1298, "15": 31.20783, "20": 27.87376, "25": 29.49552, "30": 27.54191, "35": 27.57738, "40": 25.33253, "45": 26.15694, "50": 26.28548, "55": 25.51003, "60": 25.30287, "65": 25.30045, "70": 26.20496, "75": 26.00594, "80": 24.83331, "85": 24.5867, "90": 24.71129, "95": 24.79795, "100": 24.35243, "105": 25.68495, "110": 24.28126, "115": 24.4563, "120": 23.79027, "125": 24.39165, "130": 23.9158, "135": 23.51006, "140": 24.76995, "145": 23.63327, "150": 23.66437, "155": 23.64294, "160": 24.00689, "165": 23.74082, "170": 23.73023, "175": 24.5099, "180": 23.67345, "185": 23.27434, "190": 23.62969, "195": 23.9198, "200": 23.1559, "205": 23.92701, "210": 23.11186, "215": 23.28003, "220": 23.84027, "225": 23.63863, "230": 22.895, "235": 23.49538, "240": 23.21926, "245": 22.94419, "250": 23.81609, "255": 22.93311, "260": 24.12377, "265": 23.15621, "270": 22.82685, "275": 22.97502, "280": 23.20544, "285": 23.03408, "290": 22.7701, "295": 23.08958, "300": 22.73783, "305": 22.67177, "310": 22.96961, "315": 23.46347, "320": 22.9418, "325": 22.96319, "330": 22.77395, "335": 22.73034, "340": 22.89067, "345": 22.92514, "350": 23.44664, "355": 23.2911, "360": 22.78186, "365": 22.5755, "370": 22.98301, "375": 23.01979, "380": 22.82335, "385": 22.61654, "390": 22.81808, "395": 22.59931, "400": 22.46535, "405": 23.45793, "410": 22.45848, "415": 22.76169, "420": 22.94383, "425": 22.81616, "430": 22.49236, "435": 22.92228, "440": 23.1105, "445": 22.44164, "450": 23.57092, "455": 22.59991, "460": 22.60979, "465": 22.83875, "470": 22.36905, "475": 22.37728, "480": 22.97937, "485": 22.69111, "490": 22.3485, "495": 23.34196, "500": 22.7308, "505": 22.5235, "510": 22.46247, "515": 23.71997, "520": 22.75588, "525": 22.57718, "530": 23.56207, "535": 22.51443, "540": 22.48361, "545": 23.29201, "550": 22.40779, "555": 22.8444, "560": 23.37572, "565": 22.49735, "570": 22.75488, "575": 22.89492, "580": 22.74477, "585": 22.60436, "590": 23.16329, "595": 22.39527, "600": 22.79847, "605": 22.46625, "610": 22.54849, "615": 23.37304, "620": 22.44486, "625": 22.32187, "630": 23.15831, "635": 22.31844, "640": 22.84602, "645": 22.36468, "650": 22.3273, "655": 22.50677, "660": 22.50828, "665": 22.37353, "670": 22.33741, "675": 23.32347, "680": 22.46549, "685": 22.51376, "690": 23.99761, "695": 22.53893, "700": 22.49407, "705": 22.59101, "710": 22.37598, "715": 22.32939, "720": 22.39665, "725": 22.27563, "730": 22.96876, "735": 22.31432, "740": 22.48942, "745": 22.69561, "750": 22.47822, "755": 22.58422, "760": 22.33484, "765": 22.81884, "770": 22.31515, "775": 34.74235, "780": 24.18534, "785": 22.49987, "790": 22.38627, "795": 22.32725, "800": 22.37145, "805": 22.3399, "810": 22.30009, "815": 22.39986, "820": 22.77884, "825": 22.33778, "830": 22.50133, "835": 22.95916, "840": 22.77323, "845": 22.97551, "850": 22.66539, "855": 22.65729, "860": 23.41249, "865": 22.3742, "870": 22.35325, "875": 23.49277, "880": 22.32836, "885": 22.43145, "890": 23.85402, "895": 22.33376, "900": 22.2893, "905": 23.46962, "910": 22.65806, "915": 30.91117, "920": 27.9404, "925": 25.5039, "930": 22.42043, "935": 22.50394, "940": 22.3523, "945": 22.64808, "950": 22.80169, "955": 22.34261, "960": 22.31586, "965": 22.67122, "970": 22.37831, "975": 22.32088, "980": 22.82978, "985": 22.38588, "990": 22.41922, "995": 22.55842, "1000": 22.99728, "1005": 22.42272, "1010": 22.35328, "1015": 22.59459, "1020": 22.417, "1025": 22.33861, "1030": 22.52776, "1035": 22.61799, "1040": 22.29925, "1045": 22.27448, "1050": 22.41, "1055": 22.29965, "1060": 22.76227, "1065": 23.60236, "1070": 22.90237, "1075": 22.70822, "1080": 23.56038, "1085": 22.42088, "1090": 22.31116, "1095": 22.96551, "1100": 22.26456, "1105": 22.70461, "1110": 23.02264, "1115": 22.4029, "1120": 22.30211, "1125": 22.93764, "1130": 22.4981, "1135": 22.6124, "1140": 23.87217, "1145": 22.37103, "1150": 22.44505, "1155": 23.14266, "1160": 22.33476, "1165": 22.28105, "1170": 22.82821, "1175": 23.04803, "1180": 22.2928, "1185": 22.80058, "1190": 22.40369, "1195": 22.57008, "1200": 23.48947, "1205": 23.0083, "1210": 23.6272, "1215": 22.61008, "1220": 23.36132, "1225": 23.67034, "1230": 23.53979, "1235": 22.45733, "1240": 22.94668, "1245": 23.28349, "1250": 22.71836, "1255": 22.86751, "1260": 22.78431, "1265": 22.33577, "1270": 22.27667, "1275": 22.25684, "1280": 22.45495, "1285": 22.23076, "1290": 22.23001, "1295": 22.34294, "1300": 22.26883, "1305": 22.20764, "1310": 22.23297, "1315": 22.59344, "1320": 22.31138, "1325": 22.26384, "1330": 22.27967, "1335": 22.43728, "1340": 22.27787, "1345": 22.23125, "1350": 23.34727, "1355": 22.3046, "1360": 22.54434, "1365": 22.66493, "1370": 22.40846, "1375": 22.3753, "1380": 22.92916, "1385": 22.48588, "1390": 22.32022, "1395": 22.87651, "1400": 22.36661, "1405": 22.2445, "1410": 22.94393, "1415": 22.6391, "1420": 22.3417, "1425": 23.16752, "1430": 22.31012, "1435": 22.23718, "1440": 23.28394, "1445": 22.33133, "1450": 22.27417, "1455": 23.08275, "1460": 22.37399, "1465": 22.24181, "1470": 22.56442, "1475": 22.47041, "1480": 22.38119, "1485": 22.3673, "1490": 22.22658, "1495": 22.29031, "1500": 22.32967, "1505": 22.28572, "1510": 22.22748, "1515": 22.41056, "1520": 22.26986, "1525": 22.46393, "1530": 22.62662, "1535": 22.41455, "1540": 22.41286, "1545": 22.24332, "1550": 22.78843, "1555": 22.33269, "1560": 22.27978, "1565": 22.51116, "1570": 22.29514, "1575": 22.41509, "1580": 22.52286, "1585": 22.46716, "1590": 22.44685, "1595": 22.93538, "1600": 22.45712, "1605": 22.81687, "1610": 23.11505, "1615": 101.43819, "1620": 22.50706, "1625": 23.05721, "1630": 23.91206, "1635": 23.85742, "1640": 23.18831, "1645": 24.23958, "1650": 23.10976, "1655": 22.87432, "1660": 22.61725, "1665": 22.41515, "1670": 22.50571, "1675": 22.25202, "1680": 22.19893, "1685": 22.37028, "1690": 22.21771, "1695": 22.19765, "1700": 22.22444, "1705": 22.3618, "1710": 22.20334, "1715": 22.20086, "1720": 22.41741, "1725": 22.21877, "1730": 22.24269, "1735": 22.47463, "1740": 22.71409, "1745": 22.29684, "1750": 22.21664, "1755": 22.52973, "1760": 22.29104, "1765": 22.25748, "1770": 22.24553, "1775": 22.24678, "1780": 22.20543, "1785": 22.30902, "1790": 22.79206, "1795": 22.19903, "1800": 22.22973, "1805": 22.57196, "1810": 22.20957, "1815": 22.22766, "1820": 22.36847, "1825": 22.28322, "1830": 22.28194, "1835": 22.31064, "1840": 22.50887, "1845": 22.23597, "1850": 22.35818, "1855": 22.32158, "1860": 22.33559, "1865": 22.28448, "1870": 22.22008, "1875": 22.55212, "1880": 22.41352, "1885": 22.22631, "1890": 22.22529, "1895": 22.52517, "1900": 22.18494, "1905": 22.16923, "1910": 23.35993, "1915": 22.21531, "1920": 22.51151, "1925": 23.57921, "1930": 22.57907, "1935": 22.30088, "1940": 23.20473, "1945": 22.28976, "1950": 22.25124, "1955": 22.95404, "1960": 22.38381, "1965": 22.79575, "1970": 23.14964, "1975": 22.39994, "1980": 22.27198, "1985": 22.35673, "1990": 22.17216, "1995": 22.27648, "2000": 22.17346, "2005": 22.30537, "2010": 22.16763, "2015": 22.17264, "2020": 22.65557, "2025": 22.1944, "2030": 22.30959, "2035": 22.2, "2040": 23.5494, "2045": 22.32322, "2050": 22.36683, "2055": 22.53657, "2060": 22.24138, "2065": 22.2901, "2070": 22.21868, "2075": 22.14485, "2080": 22.29335, "2085": 22.2037, "2090": 22.21704, "2095": 22.24307, "2100": 22.29622, "2105": 22.2972, "2110": 22.9633, "2115": 22.30458, "2120": 22.21123, "2125": 22.95883, "2130": 22.26369, "2135": 22.31276, "2140": 23.3112, "2145": 22.45406, "2150": 22.16488, "2155": 23.23859, "2160": 22.28608, "2165": 22.15799, "2170": 23.15516, "2175": 22.17103, "2180": 22.206, "2185": 22.5087, "2190": 22.18877, "2195": 22.28504, "2200": 22.25598, "2205": 22.39778, "2210": 22.28323, "2215": 23.23729, "2220": 22.26385, "2225": 22.26701, "2230": 23.20972, "2235": 22.37248, "2240": 22.29094, "2245": 23.03885, "2250": 22.26215, "2255": 22.20325, "2260": 22.6105, "2265": 22.24911, "2270": 22.43573, "2275": 22.30151, "2280": 22.29351, "2285": 22.26292, "2290": 22.27442, "2295": 22.47322, "2300": 22.46659, "2305": 22.47113, "2310": 22.30881, "2315": 22.81986, "2320": 22.43278, "2325": 22.54671, "2330": 22.6375, "2335": 22.30869, "2340": 22.27465, "2345": 22.29144, "2350": 22.48658, "2355": 22.27998, "2360": 22.25107, "2365": 22.27279, "2370": 22.21696, "2375": 22.25252, "2380": 22.21462, "2385": 22.60005, "2390": 22.32992, "2395": 22.36949, "2400": 23.03567, "2405": 22.23046, "2410": 22.24693, "2415": 22.5995, "2420": 22.26181, "2425": 22.28953, "2430": 23.00037, "2435": 22.26845, "2440": 22.41168, "2445": 23.42136, "2450": 22.16479, "2455": 22.32153, "2460": 23.47361, "2465": 22.25866, "2470": 22.17888, "2475": 23.26573, "2480": 22.40333, "2485": 22.24768, "2490": 22.73425, "2495": 22.25463, "2500": 22.26711, "2505": 22.44958, "2510": 22.24719, "2515": 22.24683, "2520": 22.35456, "2525": 22.24772, "2530": 22.44319, "2535": 22.37726, "2540": 22.21642, "2545": 22.22575, "2550": 22.29765, "2555": 22.33317, "2560": 22.29099, "2565": 22.49906, "2570": 22.22897, "2575": 22.24928, "2580": 22.74883, "2585": 23.08116, "2590": 22.63166, "2595": 22.40653, "2600": 22.899, "2605": 22.74613, "2610": 22.64863, "2615": 22.30009, "2620": 22.29029, "2625": 22.33052, "2630": 22.19127, "2635": 22.41666, "2640": 22.21327, "2645": 22.52947, "2650": 22.19826, "2655": 22.35276, "2660": 22.79174, "2665": 22.1971, "2670": 22.34809, "2675": 23.46691, "2680": 22.30905, "2685": 22.344, "2690": 23.37738, "2695": 22.2309, "2700": 22.3796, "2705": 23.10741, "2710": 22.20201, "2715": 22.40287, "2720": 22.88854, "2725": 22.27402, "2730": 22.32103, "2735": 22.6414, "2740": 22.22147, "2745": 22.46682, "2750": 22.46146, "2755": 99.24538, "2760": 22.23207, "2765": 22.36683, "2770": 22.22324, "2775": 22.16938, "2780": 22.17318, "2785": 22.12214, "2790": 46.60334, "2795": 26.06869, "2800": 63.15344, "2805": 22.10689, "2810": 22.28581, "2815": 22.11837, "2820": 22.12693, "2825": 22.14184, "2830": 22.15474, "2835": 24.14747, "2840": 22.28615, "2845": 22.23348, "2850": 22.09157, "2855": 22.27747, "2860": 22.24855, "2865": 22.17903, "2870": 22.28846, "2875": 22.48808, "2880": 22.23365, "2885": 22.46195, "2890": 22.16868, "2895": 24.48564, "2900": 23.33514, "2905": 22.47393, "2910": 22.60634, "2915": 23.34789, "2920": 22.21467, "2925": 22.33244, "2930": 23.37488, "2935": 22.26254, "2940": 22.10736, "2945": 23.05563, "2950": 22.20954, "2955": 22.12093, "2960": 22.7311, "2965": 22.31778, "2970": 22.16017, "2975": 22.14517, "2980": 22.14704, "2985": 22.23082, "2990": 22.21963, "2995": 22.18401, "3000": 22.13491, "3005": 22.15477, "3010": 22.24669, "3015": 22.23991, "3020": 23.02974, "3025": 22.24038, "3030": 22.18465, "3035": 22.15706, "3040": 22.12254, "3045": 22.72721, "3050": 22.14801, "3055": 22.26619, "3060": 22.09819, "3065": 22.1396, "3070": 22.50871, "3075": 22.26149, "3080": 22.20454, "3085": 22.12651, "3090": 22.15909, "3095": 22.38609, "3100": 22.35572, "3105": 22.16256, "3110": 22.40048, "3115": 22.49888, "3120": 22.31226, "3125": 22.284, "3130": 22.69374, "3135": 23.76019, "3140": 22.15743, "3145": 22.97207, "3150": 22.38021, "3155": 22.14977, "3160": 23.1806, "3165": 22.24388, "3170": 22.10132, "3175": 23.37873, "3180": 22.25147, "3185": 22.26884, "3190": 23.1536, "3195": 22.13065, "3200": 22.39951, "3205": 22.99851, "3210": 22.19897, "3215": 22.11232, "3220": 22.67458, "3225": 22.15104, "3230": 22.36649, "3235": 22.53986, "3240": 22.08869, "3245": 22.23223, "3250": 22.20198, "3255": 22.45701, "3260": 22.12204, "3265": 22.13658, "3270": 22.12446, "3275": 22.2497, "3280": 22.6675, "3285": 22.62234, "3290": 22.57391, "3295": 22.13742, "3300": 22.16718, "3305": 22.15584, "3310": 22.20104, "3315": 22.23745, "3320": 22.21457, "3325": 22.22325, "3330": 22.24699, "3335": 22.16347, "3340": 22.12353, "3345": 22.90854, "3350": 22.11311, "3355": 22.14784, "3360": 23.48514, "3365": 22.13694, "3370": 22.13198, "3375": 23.11855, "3380": 22.13233, "3385": 22.1627, "3390": 23.38291, "3395": 22.24522, "3400": 22.19156, "3405": 22.59836, "3410": 22.18769, "3415": 22.28733, "3420": 22.12476, "3425": 22.12371, "3430": 22.10616, "3435": 22.17273, "3440": 22.34721, "3445": 22.18947, "3450": 22.40031, "3455": 22.15453, "3460": 22.13014, "3465": 22.17549, "3470": 22.14501, "3475": 22.20536, "3480": 22.30028, "3485": 22.25307, "3490": 22.1322, "3495": 22.84692, "3500": 22.14734, "3505": 22.11012, "3510": 22.99085, "3515": 22.08622, "3520": 22.31067, "3525": 23.05423, "3530": 22.12271, "3535": 22.1119, "3540": 23.2336, "3545": 23.20149, "3550": 22.39926, "3555": 23.29594, "3560": 22.38292, "3565": 22.3066, "3570": 22.84816, "3575": 22.22589, "3580": 22.39174, "3585": 22.68009, "3590": 22.35172, "3595": 22.09822, "3600": 22.0804, "3605": 22.12297, "3610": 22.06953, "3615": 22.30298, "3620": 22.06896, "3625": 22.06624, "3630": 22.06095, "3635": 22.09915, "3640": 22.08731, "3645": 22.08672, "3650": 22.13634, "3655": 22.05377, "3660": 22.07894, "3665": 22.12926, "3670": 22.09975, "3675": 22.63814, "3680": 22.94945, "3685": 22.73075, "3690": 22.3893, "3695": 22.40092, "3700": 22.28482, "3705": 22.10991, "3710": 22.31551, "3715": 22.12316, "3720": 22.07805, "3725": 22.74531, "3730": 22.25086, "3735": 22.08884, "3740": 22.92793, "3745": 22.31309, "3750": 22.07883, "3755": 22.91697, "3760": 22.08838, "3765": 22.35881, "3770": 22.89681, "3775": 22.06254, "3780": 22.21965, "3785": 22.70291, "3790": 22.10561, "3795": 22.117, "3800": 22.6845, "3805": 22.27401, "3810": 22.10784, "3815": 22.99611, "3820": 22.24563, "3825": 22.28077, "3830": 22.56835, "3835": 22.14892, "3840": 22.16629, "3845": 22.19701, "3850": 22.16381, "3855": 22.36064, "3860": 22.06355, "3865": 22.15808, "3870": 22.06509, "3875": 22.30857, "3880": 22.31805, "3885": 23.76531, "3890": 22.70623, "3895": 22.46667, "3900": 23.20569, "3905": 22.58357, "3910": 22.37154, "3915": 23.06784, "3920": 22.32197, "3925": 22.55769, "3930": 22.82133, "3935": 22.30026, "3940": 22.27654, "3945": 22.98222, "3950": 22.30622, "3955": 22.27985, "3960": 23.03239, "3965": 22.26647, "3970": 22.31575, "3975": 23.29106, "3980": 22.20276, "3985": 22.26123, "3990": 22.71136, "3995": 22.73831, "4000": 22.2304, "4005": 22.27919, "4010": 22.3829, "4015": 22.9861, "4020": 22.32907, "4025": 22.18992, "4030": 22.61412, "4035": 22.33365, "4040": 22.24266, "4045": 22.48053, "4050": 22.25873, "4055": 22.41666, "4060": 22.47737, "4065": 22.78752, "4070": 22.28217, "4075": 22.23876, "4080": 22.35876, "4085": 22.28275, "4090": 22.16164, "4095": 22.23063, "4100": 22.44295, "4105": 22.18727, "4110": 22.23501, "4115": 22.24432, "4120": 22.20763, "4125": 22.61488, "4130": 22.31009, "4135": 22.64333, "4140": 22.51318, "4145": 22.25328, "4150": 22.31807, "4155": 22.59032, "4160": 22.34318, "4165": 22.18311, "4170": 22.76894, "4175": 22.24581, "4180": 22.20513, "4185": 22.83229, "4190": 22.29694, "4195": 22.56538, "4200": 23.08197, "4205": 22.22903, "4210": 22.22707, "4215": 22.90014, "4220": 22.33725, "4225": 22.48818, "4230": 23.75595, "4235": 22.22435, "4240": 22.2368, "4245": 23.27431, "4250": 22.24164, "4255": 22.20056, "4260": 22.83662, "4265": 22.3728, "4270": 22.40924, "4275": 22.33744, "4280": 22.31959, "4285": 22.17503, "4290": 22.27055, "4295": 22.33686, "4300": 22.50303, "4305": 22.22035, "4310": 22.20641, "4315": 22.52134, "4320": 22.26591, "4325": 22.31173, "4330": 22.22271, "4335": 22.21073, "4340": 22.23991, "4345": 22.23457, "4350": 22.23894, "4355": 22.17741, "4360": 22.30576, "4365": 22.2028, "4370": 22.60817, "4375": 22.21872, "4380": 22.31853, "4385": 22.44722, "4390": 22.18763, "4395": 22.51367, "4400": 22.68101, "4405": 22.59529, "4410": 22.41267, "4415": 22.48739, "4420": 22.41369, "4425": 22.18191, "4430": 22.90411, "4435": 22.18082, "4440": 99.49797, "4445": 22.12262, "4450": 22.20474, "4455": 22.13596, "4460": 22.05674, "4465": 22.11161, "4470": 22.07819, "4475": 22.05337, "4480": 22.15776, "4485": 22.26007, "4490": 22.07221, "4495": 22.07016, "4500": 22.08345, "4505": 22.13976, "4510": 22.26136, "4515": 22.19949, "4520": 22.13747, "4525": 22.11575, "4530": 22.18919, "4535": 22.06343, "4540": 22.11646, "4545": 22.05526, "4550": 22.02565, "4555": 22.04382, "4560": 22.0325, "4565": 22.38262, "4570": 22.17642, "4575": 22.0772, "4580": 22.17647, "4585": 22.03959, "4590": 22.12272, "4595": 22.08479, "4600": 22.04704, "4605": 22.18312, "4610": 22.18009, "4615": 22.40414, "4620": 22.41693, "4625": 22.21491, "4630": 22.68585, "4635": 22.23743, "4640": 22.11703, "4645": 22.79907, "4650": 22.1202, "4655": 22.32061, "4660": 23.5672, "4665": 22.13417, "4670": 22.34442, "4675": 23.43162, "4680": 22.31361, "4685": 22.13164, "4690": 23.0389, "4695": 22.15807, "4700": 22.09999, "4705": 22.3279, "4710": 22.46598, "4715": 22.13638, "4720": 22.2042, "4725": 22.11925, "4730": 22.20306, "4735": 22.196, "4740": 22.134, "4745": 22.19464, "4750": 22.0967, "4755": 22.29041, "4760": 22.1549, "4765": 22.20745, "4770": 22.11027, "4775": 22.09078, "4780": 22.11296, "4785": 22.08661, "4790": 22.2871, "4795": 22.1475, "4800": 22.58398, "4805": 22.10457, "4810": 22.12545, "4815": 22.15838, "4820": 22.12468, "4825": 22.23039, "4830": 22.11066, "4835": 22.1893, "4840": 22.15476, "4845": 22.5009, "4850": 22.27532, "4855": 22.16191, "4860": 22.48751, "4865": 22.21384, "4870": 22.10462, "4875": 22.84896, "4880": 22.13924, "4885": 22.10012, "4890": 23.30805, "4895": 22.19012, "4900": 22.10889, "4905": 23.53894, "4910": 22.25789, "4915": 22.11059, "4920": 22.92012, "4925": 22.2142, "4930": 22.22024, "4935": 22.62997, "4940": 22.11659, "4945": 22.14752, "4950": 22.23664, "4955": 22.09451, "4960": 22.09845, "4965": 22.29268, "4970": 22.18848, "4975": 22.09718, "4980": 22.55378, "4985": 22.1627, "4990": 22.19914, "4995": 22.20334, "5000": 22.45484, "5005": 22.13007, "5010": 22.20427, "5015": 22.16255, "5020": 22.07658, "5025": 22.44861, "5030": 22.1156, "5035": 22.12901, "5040": 22.11248, "5045": 22.08506, "5050": 22.17819, "5055": 22.1272, "5060": 22.18801, "5065": 22.09503, "5070": 22.41882, "5075": 22.78638, "5080": 26.27642, "5085": 22.18617, "5090": 22.14356, "5095": 22.22733, "5100": 30.46112, "5105": 47.43751, "5110": 35.96645, "5115": 22.18387, "5120": 22.17562, "5125": 22.12924, "5130": 22.17035, "5135": 22.24651, "5140": 22.34181, "5145": 22.15347, "5150": 22.3334, "5155": 22.15379, "5160": 22.17209, "5165": 22.21446, "5170": 22.15951, "5175": 22.38038, "5180": 22.11671, "5185": 22.26187, "5190": 22.19746, "5195": 22.15034, "5200": 22.1046, "5205": 22.21761, "5210": 22.1316, "5215": 22.37744, "5220": 22.14268, "5225": 22.23649, "5230": 22.47943, "5235": 22.20737, "5240": 22.47734, "5245": 22.80323, "5250": 22.17441, "5255": 22.26766, "5260": 23.49327, "5265": 22.15138, "5270": 22.12088, "5275": 23.35054, "5280": 22.1591, "5285": 22.18443, "5290": 23.40368, "5295": 22.11016, "5300": 22.2025, "5305": 22.94553, "5310": 22.50829, "5315": 22.29598, "5320": 22.80223, "5325": 22.26336, "5330": 22.26128, "5335": 22.19511, "5340": 22.13634, "5345": 22.16444, "5350": 22.53715, "5355": 22.20471, "5360": 22.16942, "5365": 22.17329, "5370": 22.21541, "5375": 22.13123, "5380": 22.15361, "5385": 22.11794, "5390": 22.12846, "5395": 22.13329, "5400": 22.1391, "5405": 22.3054, "5410": 22.20903, "5415": 22.20131, "5420": 22.14307, "5425": 22.21831, "5430": 22.14025, "5435": 22.17322, "5440": 22.70807, "5445": 22.19349, "5450": 22.18649, "5455": 22.16108, "5460": 22.59336, "5465": 22.17204, "5470": 22.15743, "5475": 23.01254, "5480": 22.16465, "5485": 22.27566, "5490": 22.61131, "5495": 22.30609, "5500": 22.19457, "5505": 22.77441, "5510": 22.43567, "5515": 22.16294, "5520": 23.01877, "5525": 22.13216, "5530": 22.33573, "5535": 23.37022, "5540": 22.17239, "5545": 22.26816, "5550": 23.40328, "5555": 22.1335, "5560": 22.10801, "5565": 22.78751, "5570": 22.35264, "5575": 22.15508, "5580": 22.39675, "5585": 22.43397, "5590": 22.16937, "5595": 22.27273, "5600": 22.11896, "5605": 22.38024, "5610": 22.25853, "5615": 22.20405, "5620": 22.42439, "5625": 103.24894, "5630": 22.21828, "5635": 22.34653, "5640": 22.27605, "5645": 22.25786, "5650": 22.2069, "5655": 22.23782, "5660": 22.45121, "5665": 22.27886, "5670": 22.51813, "5675": 22.27682, "5680": 22.18743, "5685": 22.29732, "5690": 22.19879, "5695": 22.49294, "5700": 22.19459, "5705": 22.1403, "5710": 22.19565, "5715": 22.33154, "5720": 22.14482, "5725": 22.20657, "5730": 22.53198, "5735": 22.16334, "5740": 22.15288, "5745": 22.2123, "5750": 22.51036, "5755": 22.31442, "5760": 22.22625, "5765": 22.60485, "5770": 22.18501, "5775": 22.26288, "5780": 22.29196, "5785": 22.25457, "5790": 22.19913, "5795": 22.30567, "5800": 31.7086, "5805": 22.14331, "5810": 22.09806, "5815": 22.10286, "5820": 22.16739, "5825": 22.15615, "5830": 22.1299, "5835": 22.09128, "5840": 22.27086, "5845": 22.15464, "5850": 22.18943, "5855": 22.15398, "5860": 22.10228, "5865": 22.21758, "5870": 22.31213, "5875": 22.27539, "5880": 22.05962, "5885": 22.70877, "5890": 22.21589, "5895": 22.15617, "5900": 22.26716, "5905": 22.67423, "5910": 22.33739, "5915": 22.09927, "5920": 23.10401, "5925": 22.1314, "5930": 22.08376, "5935": 24.9588, "5940": 30.31479, "5945": 22.22004, "5950": 23.25044, "5955": 22.13658, "5960": 22.26241, "5965": 23.21275, "5970": 22.20257, "5975": 22.74516, "5980": 22.79046, "5985": 22.13266, "5990": 22.11488, "5995": 22.22801, "6000": 22.13595, "6005": 22.24459, "6010": 22.41358, "6015": 22.13501, "6020": 22.1081, "6025": 22.11963, "6030": 22.11053, "6035": 22.08994, "6040": 22.26074, "6045": 22.31068, "6050": 22.09389, "6055": 22.2186, "6060": 22.08819, "6065": 22.65383, "6070": 22.10055, "6075": 22.34503, "6080": 22.30633, "6085": 22.14835, "6090": 22.11087, "6095": 22.10192, "6100": 22.09816, "6105": 22.19774, "6110": 22.37954, "6115": 22.09901, "6120": 22.22704, "6125": 22.09652, "6130": 22.23116, "6135": 22.46113, "6140": 22.08116, "6145": 22.28795, "6150": 22.43662, "6155": 22.47462, "6160": 22.14327, "6165": 23.03357, "6170": 22.15186, "6175": 22.06621, "6180": 23.18677, "6185": 22.14213, "6190": 22.10347, "6195": 23.47568, "6200": 22.26376, "6205": 22.21753, "6210": 22.2904, "6215": 22.32977, "6220": 22.3098, "6225": 22.28718, "6230": 22.32039, "6235": 22.23838, "6240": 22.26958, "6245": 22.35067, "6250": 22.47291, "6255": 22.18571, "6260": 22.18113, "6265": 22.3722, "6270": 22.28153, "6275": 22.24068, "6280": 22.21768, "6285": 22.23186, "6290": 22.27298, "6295": 22.18786, "6300": 22.32139, "6305": 22.3139, "6310": 22.24514, "6315": 22.21682, "6320": 22.34309, "6325": 22.38277, "6330": 22.17771, "6335": 22.31608, "6340": 22.18571, "6345": 22.65169, "6350": 22.24255, "6355": 22.38634, "6360": 22.99979, "6365": 22.23075, "6370": 22.29765, "6375": 23.26567, "6380": 22.20839, "6385": 22.23107, "6390": 23.75523, "6395": 22.36391, "6400": 22.30357, "6405": 23.58806, "6410": 22.38488, "6415": 22.43644, "6420": 29.07883, "6425": 29.24209, "6430": 25.11951, "6435": 23.74628, "6440": 22.28473, "6445": 22.41172, "6450": 22.79672, "6455": 22.23213, "6460": 22.20994, "6465": 22.33991, "6470": 22.22613, "6475": 22.15819, "6480": 22.27304, "6485": 22.41617, "6490": 22.47667, "6495": 22.20027, "6500": 22.42458, "6505": 22.17988, "6510": 22.19755, "6515": 22.24649, "6520": 22.37561, "6525": 22.33315, "6530": 22.22128, "6535": 22.47802, "6540": 22.21011, "6545": 22.22193, "6550": 22.23321, "6555": 22.19644, "6560": 22.33516, "6565": 22.27763, "6570": 22.22648, "6575": 22.20945, "6580": 22.27722, "6585": 22.19865, "6590": 22.31174, "6595": 22.22154, "6600": 22.24693, "6605": 22.21748, "6610": 22.20741, "6615": 22.22942, "6620": 22.74405, "6625": 22.39287, "6630": 22.358, "6635": 22.87103, "6640": 22.241, "6645": 22.33371, "6650": 23.30417, "6655": 22.23416, "6660": 22.19745, "6665": 23.55109, "6670": 22.25535, "6675": 22.3179, "6680": 23.27462, "6685": 22.25774, "6690": 22.37132, "6695": 23.02143, "6700": 22.33427, "6705": 22.24655, "6710": 22.53585, "6715": 22.37226, "6720": 22.39018, "6725": 22.32856, "6730": 22.17057, "6735": 22.40882, "6740": 22.19919, "6745": 22.19179, "6750": 22.23405, "6755": 22.25271, "6760": 22.26263, "6765": 22.09608, "6770": 22.19822, "6775": 23.36043, "6780": 22.44472, "6785": 22.39983, "6790": 22.89771, "6795": 22.15928, "6800": 22.1542, "6805": 22.30882, "6810": 22.40828, "6815": 22.48279, "6820": 22.17847, "6825": 22.08433, "6830": 22.05925, "6835": 22.08404, "6840": 22.234, "6845": 22.40369, "6850": 22.09312, "6855": 22.24943, "6860": 22.21459, "6865": 22.33335, "6870": 22.08782, "6875": 22.09183, "6880": 22.13621, "6885": 22.22154, "6890": 22.11917, "6895": 22.12065, "6900": 22.48497, "6905": 22.10022, "6910": 22.32149, "6915": 22.06992, "6920": 22.04691, "6925": 22.2124, "6930": 22.19155, "6935": 22.31481, "6940": 22.22634, "6945": 22.08258, "6950": 22.06888, "6955": 22.97098, "6960": 22.06645, "6965": 22.04283, "6970": 22.76188, "6975": 22.0062, "6980": 22.16549, "6985": 22.7854, "6990": 22.11481, "6995": 22.02166, "7000": 23.12796, "7005": 22.15152, "7010": 22.21705, "7015": 23.04217, "7020": 22.06438, "7025": 22.02324, "7030": 23.2385, "7035": 22.07287, "7040": 22.30084, "7045": 23.34607, "7050": 22.03607, "7055": 22.05585, "7060": 22.56476, "7065": 22.19787, "7070": 22.06845, "7075": 22.3341, "7080": 22.22524, "7085": 22.12028, "7090": 22.39585, "7095": 22.2827, "7100": 22.2966, "7105": 22.09398, "7110": 22.10655, "7115": 22.09072, "7120": 22.2741, "7125": 22.05015, "7130": 22.09771, "7135": 22.73044, "7140": 21.98928, "7145": 22.27981, "7150": 22.04251, "7155": 22.18527, "7160": 22.03371, "7165": 22.05405, "7170": 22.54849, "7175": 22.18119, "7180": 22.07185, "7185": 22.26735, "7190": 22.05075, "7195": 22.04575, "7200": 22.36095, "7205": 22.44167, "7210": 22.02475, "7215": 22.51617, "7220": 27.26551, "7225": 22.54332, "7230": 22.73224, "7235": 22.03629, "7240": 22.00316, "7245": 22.59466, "7250": 21.99549, "7255": 22.07763, "7260": 22.66253, "7265": 21.99195, "7270": 22.07233, "7275": 23.02845, "7280": 22.23888, "7285": 22.26086, "7290": 23.16966, "7295": 22.03305, "7300": 22.25165, "7305": 23.14895, "7310": 22.20446, "7315": 22.65925, "7320": 22.27551, "7325": 22.07253, "7330": 22.12652, "7335": 22.30247, "7340": 22.52141, "7345": 23.40992, "7350": 22.59553, "7355": 22.383, "7360": 23.37441, "7365": 22.22348, "7370": 22.26614, "7375": 22.9041, "7380": 22.17506, "7385": 22.66914, "7390": 22.9449, "7395": 22.19632, "7400": 22.23226, "7405": 22.17986, "7410": 22.3653, "7415": 22.96289, "7420": 22.27491, "7425": 22.24875, "7430": 22.70344, "7435": 22.34845, "7440": 22.2249, "7445": 22.49621, "7450": 22.25832, "7455": 22.22613, "7460": 22.20862, "7465": 22.24491, "7470": 22.41099, "7475": 22.23769, "7480": 22.17268, "7485": 22.21024, "7490": 22.23409, "7495": 22.21972, "7500": 22.38885, "7505": 22.50325, "7510": 22.21956, "7515": 22.16976, "7520": 22.19408, "7525": 22.38549, "7530": 22.28072, "7535": 22.23389, "7540": 22.38602, "7545": 22.24982, "7550": 22.19032, "7555": 22.32655, "7560": 22.3778, "7565": 22.20291, "7570": 22.60423, "7575": 22.62853, "7580": 22.23581, "7585": 22.76169, "7590": 22.26329, "7595": 22.27645, "7600": 23.03451, "7605": 22.21012, "7610": 22.59076, "7615": 22.92364, "7620": 22.20176, "7625": 22.21186, "7630": 23.28506, "7635": 22.17274, "7640": 22.20979, "7645": 23.34105, "7650": 22.39515, "7655": 22.19322, "7660": 23.45533, "7665": 22.45455, "7670": 22.39973, "7675": 25.11262, "7680": 40.93679, "7685": 22.22136, "7690": 22.71146, "7695": 22.31897, "7700": 22.20709, "7705": 22.57125, "7710": 22.22332, "7715": 22.20828, "7720": 22.2384, "7725": 22.17922, "7730": 22.24653, "7735": 22.22637, "7740": 22.83158, "7745": 22.28104, "7750": 22.24304, "7755": 22.26106, "7760": 22.20046, "7765": 22.23604, "7770": 22.23495, "7775": 22.63812, "7780": 22.17662, "7785": 22.20692, "7790": 22.22672, "7795": 22.40768, "7800": 22.18934, "7805": 22.2592, "7810": 22.51566, "7815": 22.2639, "7820": 22.23743, "7825": 22.20561, "7830": 22.71536, "7835": 22.23521, "7840": 22.2444, "7845": 22.45869, "7850": 22.21514, "7855": 22.30334, "7860": 22.96298, "7865": 22.20679, "7870": 22.21905, "7875": 23.164, "7880": 22.1836, "7885": 22.34075, "7890": 23.24594, "7895": 22.19698, "7900": 53.45726, "7905": 32.99909, "7910": 22.92894, "7915": 22.1155, "7920": 22.07602, "7925": 22.27373, "7930": 22.23307, "7935": 22.09579, "7940": 22.09776, "7945": 22.0992, "7950": 22.26774, "7955": 22.32025, "7960": 22.09738, "7965": 22.27355, "7970": 22.07183, "7975": 22.06745, "7980": 22.07201, "7985": 22.19389, "7990": 22.07008, "7995": 22.06647, "8000": 22.12961, "8005": 22.10275, "8010": 22.83882, "8015": 22.69885, "8020": 22.14302, "8025": 23.17859, "8030": 22.10075, "8035": 22.07495, "8040": 23.52867, "8045": 22.07579, "8050": 22.13875, "8055": 23.09711, "8060": 22.07001, "8065": 22.06692, "8070": 22.76334, "8075": 22.17485, "8080": 22.06992, "8085": 22.3488, "8090": 22.20768, "8095": 22.06231, "8100": 22.0765, "8105": 22.59097, "8110": 22.12416, "8115": 22.06335, "8120": 22.05422, "8125": 22.05515, "8130": 22.22866, "8135": 22.09855, "8140": 22.03806, "8145": 22.06163, "8150": 22.06428, "8155": 22.07578, "8160": 22.13539, "8165": 22.04664, "8170": 22.08935, "8175": 22.06919, "8180": 22.34776, "8185": 22.35088, "8190": 22.08636, "8195": 22.40378, "8200": 22.05979, "8205": 22.08539, "8210": 22.165, "8215": 22.17785, "8220": 53.04314, "8225": 22.20604, "8230": 22.09289, "8235": 22.07924, "8240": 22.43967, "8245": 22.07031, "8250": 22.35645, "8255": 22.62668, "8260": 22.10239, "8265": 22.22715, "8270": 22.95084, "8275": 22.11325, "8280": 22.08289, "8285": 23.37266, "8290": 22.22956, "8295": 22.13399, "8300": 22.96706, "8305": 22.08218, "8310": 22.05185, "8315": 22.84957, "8320": 22.11887, "8325": 22.07095, "8330": 22.52073, "8335": 22.08339, "8340": 22.12881, "8345": 22.33493, "8350": 22.21923, "8355": 22.0322, "8360": 22.0951, "8365": 22.03762, "8370": 22.07253, "8375": 22.4061, "8380": 22.04716, "8385": 22.10449, "8390": 22.08067, "8395": 22.20531, "8400": 22.06153, "8405": 22.08362, "8410": 22.02524, "8415": 22.05032, "8420": 22.28056, "8425": 22.04263, "8430": 22.00302, "8435": 22.1024, "8440": 22.2583, "8445": 22.04058, "8450": 22.00832, "8455": 22.14919, "8460": 22.26068, "8465": 22.26582, "8470": 22.21223, "8475": 22.25274, "8480": 22.38541, "8485": 22.1755, "8490": 22.19175, "8495": 23.30241, "8500": 22.18971, "8505": 22.16525, "8510": 23.60507, "8515": 22.26125, "8520": 22.32621, "8525": 23.50128, "8530": 22.24552, "8535": 22.27076, "8540": 23.41005, "8545": 22.2513, "8550": 22.16935, "8555": 23.40381, "8560": 22.23459, "8565": 22.1904, "8570": 23.2497, "8575": 22.16975, "8580": 22.15671, "8585": 23.43326, "8590": 22.35414, "8595": 22.16223, "8600": 23.39594, "8605": 24.61445, "8610": 22.40071, "8615": 22.16857, "8620": 22.11372, "8625": 22.58491, "8630": 22.15061, "8635": 22.21091, "8640": 22.16912, "8645": 22.38004, "8650": 22.109, "8655": 22.12862, "8660": 22.14253, "8665": 22.15641, "8670": 22.19219, "8675": 22.14897, "8680": 22.34878, "8685": 22.21552, "8690": 22.21264, "8695": 22.16636, "8700": 22.18324, "8705": 22.73439, "8710": 22.2813, "8715": 22.46159, "8720": 22.4155, "8725": 22.15293, "8730": 22.16414, "8735": 22.90765, "8740": 22.44373, "8745": 22.12415, "8750": 22.51438, "8755": 22.10564, "8760": 22.26367, "8765": 22.89709, "8770": 22.14726, "8775": 22.23862, "8780": 23.2484, "8785": 22.09908, "8790": 22.12075, "8795": 23.07084, "8800": 22.2509, "8805": 22.49255, "8810": 23.333, "8815": 22.13602, "8820": 22.19905, "8825": 23.41385, "8830": 22.10555, "8835": 22.3, "8840": 22.44968, "8845": 22.15783, "8850": 22.1352, "8855": 22.44918, "8860": 22.30376, "8865": 22.20801, "8870": 22.1637, "8875": 22.35806, "8880": 22.11538, "8885": 22.14429, "8890": 23.00185, "8895": 22.3649, "8900": 22.21049, "8905": 23.02655, "8910": 22.20998, "8915": 22.16202, "8920": 23.19881, "8925": 22.20987, "8930": 22.34136, "8935": 24.22688, "8940": 22.21169, "8945": 22.12955, "8950": 22.63745, "8955": 22.25144, "8960": 22.25997, "8965": 22.89122, "8970": 22.1856, "8975": 22.19368, "8980": 22.81463, "8985": 22.28907, "8990": 22.19656, "8995": 22.24194, "9000": 22.61523, "9005": 22.1499, "9010": 22.23188, "9015": 22.20747, "9020": 22.16624, "9025": 22.38108, "9030": 22.17899, "9035": 22.18589, "9040": 22.17251, "9045": 22.16409, "9050": 22.17139, "9055": 22.1745, "9060": 22.19186, "9065": 22.19594, "9070": 21.98423, "9075": 22.00778, "9080": 22.46712, "9085": 22.03249, "9090": 21.95666, "9095": 22.55937, "9100": 22.02744, "9105": 21.95906, "9110": 23.17565, "9115": 21.96349, "9120": 21.98799, "9125": 23.22648, "9130": 21.9851, "9135": 21.95992, "9140": 22.98667, "9145": 21.9793, "9150": 22.02813, "9155": 22.89812, "9160": 22.03173, "9165": 21.98526, "9170": 23.0548, "9175": 22.0432, "9180": 21.94008, "9185": 22.96663, "9190": 22.01049, "9195": 21.97027, "9200": 22.77464, "9205": 22.24907, "9210": 22.2371, "9215": 22.0286, "9220": 21.95671, "9225": 22.26431, "9230": 21.98755, "9235": 21.97192, "9240": 22.5144, "9245": 21.93497, "9250": 22.15899, "9255": 22.0903, "9260": 21.9412, "9265": 21.99362, "9270": 22.06408, "9275": 21.97468, "9280": 21.95948, "9285": 22.15624, "9290": 21.95877, "9295": 22.11506, "9300": 21.94297, "9305": 22.1018, "9310": 22.16219, "9315": 22.04265, "9320": 22.55411, "9325": 21.95647, "9330": 22.52259, "9335": 22.91832, "9340": 22.01918, "9345": 22.00379, "9350": 22.8554, "9355": 22.05588, "9360": 21.98375, "9365": 22.7864, "9370": 21.95231, "9375": 22.0678, "9380": 22.51586, "9385": 22.11022, "9390": 22.00177, "9395": 22.51933, "9400": 21.93394, "9405": 21.96035, "9410": 22.4527, "9415": 22.13795, "9420": 22.48981, "9425": 22.31381, "9430": 22.18449, "9435": 22.02475, "9440": 22.41546, "9445": 21.93916, "9450": 22.13111, "9455": 22.2714, "9460": 21.90876, "9465": 21.9598, "9470": 22.19502, "9475": 22.10623, "9480": 21.97772, "9485": 22.41354, "9490": 21.93377, "9495": 21.91193, "9500": 22.21857, "9505": 21.97581, "9510": 22.53487, "9515": 22.19272, "9520": 22.06078, "9525": 21.97569, "9530": 22.13252, "9535": 22.12395, "9540": 22.07738, "9545": 22.01481, "9550": 22.23908, "9555": 21.97073, "9560": 21.94536, "9565": 22.87949, "9570": 22.00836, "9575": 22.21476, "9580": 22.70388, "9585": 21.97215, "9590": 21.94867, "9595": 22.8448, "9600": 22.36749, "9605": 21.96319, "9610": 22.81803, "9615": 22.11481, "9620": 21.95095, "9625": 22.56796, "9630": 21.90881, "9635": 21.9722, "9640": 22.30388, "9645": 22.22468, "9650": 22.19157, "9655": 22.22965, "9660": 22.15518, "9665": 22.29204, "9670": 22.19466, "9675": 22.35566, "9680": 22.11694, "9685": 22.13942, "9690": 22.25841, "9695": 22.12883, "9700": 22.15007, "9705": 22.13577, "9710": 22.28766, "9715": 22.15547, "9720": 22.13088, "9725": 22.08594, "9730": 22.25325, "9735": 22.12068, "9740": 22.15193, "9745": 22.12902, "9750": 22.08906, "9755": 22.20238, "9760": 22.06963, "9765": 22.09591, "9770": 22.07376, "9775": 22.11555, "9780": 22.14328, "9785": 22.08157, "9790": 22.35168, "9795": 22.05882, "9800": 22.24632, "9805": 22.11541, "9810": 22.33757, "9815": 22.69866, "9820": 22.15718, "9825": 22.29345, "9830": 23.49537, "9835": 22.84824, "9840": 128.88266, "9845": 23.48947, "9850": 35.51412, "9855": 23.94723, "9860": 22.91788, "9865": 22.07471, "9870": 22.07955, "9875": 22.82455, "9880": 22.14459, "9885": 22.53755, "9890": 22.19123, "9895": 22.13429, "9900": 22.13563, "9905": 22.42955, "9910": 22.12101, "9915": 22.07885, "9920": 22.09481, "9925": 22.11706, "9930": 24.85629, "9935": 31.06566, "9940": 26.77195, "9945": 23.61518, "9950": 22.11485, "9955": 22.06971, "9960": 22.1095, "9965": 22.08752, "9970": 22.74727, "9975": 22.49427, "9980": 22.14488, "9985": 22.26837, "9990": 22.08599, "9995": 22.08447, "10000": 25.35205, "10005": 22.15294, "10010": 22.37109, "10015": 22.44926, "10020": 22.10043, "10025": 22.30091, "10030": 22.6349, "10035": 22.06495, "10040": 22.12453, "10045": 22.26281, "10050": 22.10278, "10055": 22.18731, "10060": 22.7437, "10065": 22.38121, "10070": 22.12015, "10075": 22.96265, "10080": 22.0964, "10085": 22.10132, "10090": 23.22612, "10095": 22.09406, "10100": 22.09487, "10105": 23.29468, "10110": 22.10822, "10115": 22.10501, "10120": 23.13972, "10125": 22.11292, "10130": 22.09733, "10135": 22.33392, "10140": 22.50828, "10145": 22.27964, "10150": 22.24866, "10155": 22.41376, "10160": 22.10304, "10165": 22.28984, "10170": 22.26124, "10175": 22.28037, "10180": 22.05875, "10185": 22.14143, "10190": 22.06727, "10195": 22.16259, "10200": 101.97756, "10205": 22.4821, "10210": 22.11868, "10215": 22.11367, "10220": 22.11846, "10225": 22.13254, "10230": 22.30273, "10235": 22.12703, "10240": 22.10506, "10245": 22.14105, "10250": 22.24271, "10255": 22.30038, "10260": 22.15532, "10265": 22.09753, "10270": 23.05843, "10275": 22.09241, "10280": 22.09207, "10285": 23.27097, "10290": 22.2443, "10295": 22.08452, "10300": 23.80042, "10305": 22.08397, "10310": 22.11101, "10315": 23.47885, "10320": 22.29992, "10325": 22.24928, "10330": 22.81079, "10335": 22.12664, "10340": 22.26965, "10345": 22.41671, "10350": 22.13992, "10355": 22.09051, "10360": 22.11427, "10365": 22.1463, "10370": 22.26406, "10375": 22.24996, "10380": 22.11483, "10385": 22.15321, "10390": 22.06986, "10395": 22.09425, "10400": 22.12232, "10405": 22.14654, "10410": 22.09284, "10415": 22.11785, "10420": 22.11506, "10425": 22.08544, "10430": 22.2364, "10435": 22.14491, "10440": 22.37325, "10445": 22.0971, "10450": 22.11637, "10455": 22.1669, "10460": 22.12866, "10465": 22.29393, "10470": 22.12645, "10475": 22.08962, "10480": 22.08515, "10485": 22.06031, "10490": 22.27549, "10495": 22.09451, "10500": 22.296, "10505": 22.09745, "10510": 22.12888, "10515": 23.09202, "10520": 22.23685, "10525": 22.22499, "10530": 23.34186, "10535": 22.11355, "10540": 22.22273, "10545": 22.96212, "10550": 22.20344, "10555": 22.25673, "10560": 22.88347, "10565": 22.15214, "10570": 22.26138, "10575": 22.75884, "10580": 22.12616, "10585": 22.10187, "10590": 22.68881, "10595": 22.08986, "10600": 22.12656, "10605": 22.74291, "10610": 22.235, "10615": 22.1146, "10620": 22.68305, "10625": 22.14334, "10630": 22.08119, "10635": 22.31149, "10640": 22.09023, "10645": 22.25244, "10650": 22.20387, "10655": 22.30048, "10660": 22.04618, "10665": 22.0785, "10670": 22.07668, "10675": 22.25254, "10680": 22.1268, "10685": 22.12798, "10690": 22.17195, "10695": 22.08708, "10700": 22.22559, "10705": 22.0746, "10710": 22.09898, "10715": 22.17993, "10720": 22.07825, "10725": 22.09645, "10730": 22.44508, "10735": 22.3023, "10740": 22.11653, "10745": 22.77413, "10750": 22.15383, "10755": 22.22246, "10760": 23.46852, "10765": 22.19683, "10770": 22.10464, "10775": 22.21106, "10780": 22.3805, "10785": 22.17453, "10790": 22.16545, "10795": 22.22606, "10800": 22.15372, "10805": 22.16939, "10810": 22.274, "10815": 22.12713, "10820": 22.43771, "10825": 22.42843, "10830": 22.16453, "10835": 22.15679, "10840": 23.04396, "10845": 22.17541, "10850": 22.25764, "10855": 22.94838, "10860": 22.3138, "10865": 22.13037, "10870": 23.76912, "10875": 22.15067, "10880": 22.17236, "10885": 23.14013, "10890": 22.17699, "10895": 22.16659, "10900": 22.62814, "10905": 22.15198, "10910": 22.56608, "10915": 22.28891, "10920": 22.54427, "10925": 22.16468, "10930": 22.15282, "10935": 22.25576, "10940": 22.22941, "10945": 22.25619, "10950": 22.18205, "10955": 22.31164, "10960": 22.18445, "10965": 22.12024, "10970": 22.17815, "10975": 22.1429, "10980": 22.14663, "10985": 22.18594, "10990": 22.21734, "10995": 22.12268, "11000": 22.49431, "11005": 22.10394, "11010": 22.13009, "11015": 22.12579, "11020": 22.11646, "11025": 22.18301, "11030": 22.11151, "11035": 22.11219, "11040": 22.356, "11045": 22.17641, "11050": 22.29986, "11055": 22.48542, "11060": 22.15294, "11065": 22.1032, "11070": 22.67881, "11075": 22.1118, "11080": 22.13179, "11085": 23.27412, "11090": 22.46468, "11095": 22.17099, "11100": 23.27808, "11105": 22.15407, "11110": 22.14417, "11115": 22.96478, "11120": 22.30308, "11125": 22.10439, "11130": 22.4145, "11135": 22.12197, "11140": 22.17729, "11145": 22.14077, "11150": 22.11646, "11155": 22.29928, "11160": 22.28522, "11165": 22.13093, "11170": 22.19452, "11175": 22.08306, "11180": 22.457, "11185": 22.08146, "11190": 22.23018, "11195": 22.16382, "11200": 22.13537, "11205": 22.15779, "11210": 22.1152, "11215": 22.25417, "11220": 22.11868, "11225": 22.11885, "11230": 22.14136, "11235": 22.10404, "11240": 22.13352, "11245": 22.11283, "11250": 22.2983, "11255": 22.09851, "11260": 22.10505, "11265": 22.13692, "11270": 22.70712, "11275": 22.26513, "11280": 22.15008, "11285": 22.75834, "11290": 22.15089, "11295": 22.13554, "11300": 22.79744, "11305": 22.35216, "11310": 22.15399, "11315": 23.25233, "11320": 22.33022, "11325": 22.17033, "11330": 23.01187, "11335": 22.11407, "11340": 22.10059, "11345": 22.27149, "11350": 22.52204, "11355": 22.29699, "11360": 22.25294, "11365": 22.30442, "11370": 22.34749, "11375": 22.46684, "11380": 22.29022, "11385": 22.3012, "11390": 22.26943, "11395": 22.30338, "11400": 22.3053, "11405": 22.28502, "11410": 22.2379, "11415": 22.28391, "11420": 22.34019, "11425": 22.6653, "11430": 22.43627, "11435": 22.33025, "11440": 22.93355, "11445": 22.26089, "11450": 22.24721, "11455": 23.36638, "11460": 22.33755, "11465": 22.41657, "11470": 23.04111, "11475": 22.2519, "11480": 22.2217, "11485": 23.3825, "11490": 22.28565, "11495": 22.1571, "11500": 23.25355, "11505": 22.26813, "11510": 22.41761, "11515": 22.83356, "11520": 22.2121, "11525": 22.35864, "11530": 22.29991, "11535": 22.19191, "11540": 22.17762, "11545": 22.31162, "11550": 22.3533, "11555": 22.27555, "11560": 22.31289, "11565": 22.16978, "11570": 22.22899, "11575": 22.15807, "11580": 22.43921, "11585": 22.20556, "11590": 22.16677, "11595": 22.24484, "11600": 22.20983, "11605": 22.46577, "11610": 22.20846, "11615": 22.19917, "11620": 22.27347, "11625": 22.2886, "11630": 22.4653, "11635": 22.27466, "11640": 22.51916, "11645": 22.23322, "11650": 22.47868, "11655": 22.31395, "11660": 22.20894, "11665": 22.45072, "11670": 22.54201, "11675": 22.27139, "11680": 22.23893, "11685": 22.68264, "11690": 22.23997, "11695": 22.37311, "11700": 23.07201, "11705": 22.291, "11710": 22.26224, "11715": 23.24905, "11720": 22.26347, "11725": 22.27679, "11730": 23.08867, "11735": 22.26443, "11740": 22.23344, "11745": 22.3408, "11750": 22.22912, "11755": 22.24784, "11760": 22.46883, "11765": 22.30462, "11770": 22.26951, "11775": 22.51387, "11780": 22.26943, "11785": 22.41247, "11790": 22.47682, "11795": 22.51523, "11800": 22.2418, "11805": 22.53921, "11810": 22.2396, "11815": 22.44403, "11820": 22.58167, "11825": 22.30815, "11830": 22.38436, "11835": 22.65164, "11840": 22.19449, "11845": 22.21537, "11850": 22.31073, "11855": 22.18873, "11860": 22.23126, "11865": 22.40333, "11870": 22.38537, "11875": 22.31637, "11880": 22.34187, "11885": 22.40536, "11890": 22.29694, "11895": 22.18617, "11900": 22.5396, "11905": 22.2517, "11910": 22.30146, "11915": 22.30698, "11920": 23.6902, "11925": 22.67433, "11930": 22.41608, "11935": 23.48715, "11940": 22.26064, "11945": 22.21751, "11950": 22.91148, "11955": 22.19921, "11960": 22.55457, "11965": 23.1771, "11970": 22.20712, "11975": 22.19817, "11980": 22.93439, "11985": 22.24188, "11990": 22.25578, "11995": 23.26297, "12000": 22.20928, "12005": 22.62054, "12010": 22.15549, "12015": 22.18171, "12020": 22.64815, "12025": 22.27023, "12030": 22.2545, "12035": 22.1845, "12040": 22.17325, "12045": 22.55884, "12050": 22.17352, "12055": 22.24216, "12060": 22.13593, "12065": 22.14586, "12070": 22.20862, "12075": 22.17643, "12080": 22.12239, "12085": 22.16304, "12090": 22.14181, "12095": 22.09371, "12100": 22.41703, "12105": 22.29277, "12110": 22.14284, "12115": 22.10438, "12120": 22.16169, "12125": 22.25554, "12130": 22.29576, "12135": 22.5565, "12140": 22.13078, "12145": 22.41166, "12150": 22.26812, "12155": 22.25377, "12160": 22.76081, "12165": 22.12841, "12170": 22.3889, "12175": 23.38486, "12180": 22.30836, "12185": 22.30256, "12190": 23.05643, "12195": 22.28499, "12200": 22.20536, "12205": 23.07939, "12210": 22.23701, "12215": 22.16145, "12220": 23.01979, "12225": 22.56773, "12230": 22.40174, "12235": 22.60494, "12240": 22.30154, "12245": 22.15902, "12250": 22.51167, "12255": 22.34958, "12260": 22.19127, "12265": 22.28122, "12270": 22.16833, "12275": 22.18465, "12280": 22.15229, "12285": 22.1467, "12290": 22.28804, "12295": 22.15804, "12300": 22.21382, "12305": 22.13951, "12310": 22.16174, "12315": 22.44447, "12320": 22.15885, "12325": 22.30613, "12330": 22.15337, "12335": 22.30589, "12340": 22.1999, "12345": 22.1745, "12350": 22.27547, "12355": 22.33437, "12360": 22.28582, "12365": 22.1519, "12370": 22.3119, "12375": 22.8598, "12380": 22.16582, "12385": 22.23767, "12390": 23.01784, "12395": 22.33382, "12400": 22.15389, "12405": 23.28004, "12410": 22.14173, "12415": 22.15368, "12420": 23.09755, "12425": 22.22303, "12430": 22.15798, "12435": 22.78196, "12440": 22.2945, "12445": 22.1587, "12450": 22.73261, "12455": 22.17113, "12460": 22.30944, "12465": 22.71167, "12470": 22.10199, "12475": 22.14638, "12480": 22.30165, "12485": 22.19011, "12490": 22.32598, "12495": 22.15787, "12500": 22.27633, "12505": 22.18818, "12510": 22.29677, "12515": 22.19943, "12520": 22.15767, "12525": 22.19997, "12530": 22.48665, "12535": 22.14347, "12540": 22.17856, "12545": 22.3226, "12550": 22.18066, "12555": 22.14245, "12560": 22.2881, "12565": 22.31239, "12570": 22.13641, "12575": 22.14189, "12580": 22.1446, "12585": 22.16268, "12590": 22.39175, "12595": 22.14793, "12600": 22.19722, "12605": 23.45894, "12610": 22.13176, "12615": 22.1367, "12620": 23.44023, "12625": 22.1299, "12630": 22.4474, "12635": 24.83104, "12640": 22.16282, "12645": 22.17059, "12650": 23.12659, "12655": 22.54311, "12660": 22.14508, "12665": 22.87791, "12670": 22.29035, "12675": 22.10859, "12680": 22.60427, "12685": 22.32424, "12690": 22.14501, "12695": 22.2353, "12700": 22.11713, "12705": 23.62788, "12710": 76.19838, "12715": 35.15617, "12720": 53.52323, "12725": 22.13418, "12730": 22.11021, "12735": 22.1342, "12740": 22.27757, "12745": 22.11459, "12750": 22.13136, "12755": 22.11779, "12760": 22.38937, "12765": 22.21383, "12770": 22.12602, "12775": 22.31502, "12780": 22.15772, "12785": 22.15176, "12790": 22.12988, "12795": 22.18483, "12800": 22.23671, "12805": 22.12091, "12810": 22.46193, "12815": 22.39495, "12820": 22.09328, "12825": 22.12302, "12830": 22.3467, "12835": 22.52687, "12840": 22.13686, "12845": 22.26756, "12850": 22.67041, "12855": 22.11642, "12860": 22.11507, "12865": 23.23445, "12870": 22.19371, "12875": 22.11082, "12880": 23.07766, "12885": 22.1318, "12890": 22.13628, "12895": 22.75204, "12900": 22.44869, "12905": 22.2348, "12910": 23.24037, "12915": 22.12242, "12920": 22.099, "12925": 23.1955, "12930": 22.08957, "12935": 22.09665, "12940": 22.25121, "12945": 22.12469, "12950": 22.16928, "12955": 22.36078, "12960": 22.11298, "12965": 22.25122, "12970": 22.13628, "12975": 22.17261, "12980": 22.11671, "12985": 22.11718, "12990": 22.58086, "12995": 22.29782, "13000": 22.30813, "13005": 22.10063, "13010": 22.30149, "13015": 22.1296, "13020": 22.11914, "13025": 22.21392, "13030": 22.19986, "13035": 23.48234, "13040": 22.49181, "13045": 22.45885, "13050": 23.25093, "13055": 22.21008, "13060": 22.14938, "13065": 23.1092, "13070": 22.17394, "13075": 22.65149, "13080": 22.96326, "13085": 22.1142, "13090": 22.11965, "13095": 22.84835, "13100": 22.18065, "13105": 22.29337, "13110": 23.03745, "13115": 22.14559, "13120": 22.18902, "13125": 23.22768, "13130": 22.22001, "13135": 22.13229, "13140": 22.6899, "13145": 22.64023, "13150": 22.16417, "13155": 22.70918, "13160": 22.22631, "13165": 22.10449, "13170": 22.76635, "13175": 22.11324, "13180": 22.48252, "13185": 22.20778, "13190": 22.09545, "13195": 22.21494, "13200": 22.37453, "13205": 22.1122, "13210": 23.61911, "13215": 22.24059, "13220": 22.12228, "13225": 22.88989, "13230": 22.29422, "13235": 22.21959, "13240": 22.4712, "13245": 22.12836, "13250": 22.20519, "13255": 22.22461, "13260": 22.33928, "13265": 22.55437, "13270": 22.13461, "13275": 22.11088, "13280": 22.13063, "13285": 22.24762, "13290": 22.14007, "13295": 22.1073, "13300": 22.15536, "13305": 22.15056, "13310": 22.2833, "13315": 22.17607, "13320": 22.45576, "13325": 22.12186, "13330": 22.11487, "13335": 22.28336, "13340": 22.12592, "13345": 22.39547, "13350": 22.42283, "13355": 22.65163, "13360": 22.24287, "13365": 22.62111, "13370": 22.30455, "13375": 22.13848, "13380": 22.693, "13385": 22.17488, "13390": 22.27557, "13395": 23.01438, "13400": 22.11642, "13405": 22.17809, "13410": 22.93026, "13415": 22.23291, "13420": 22.41226, "13425": 22.91538, "13430": 22.13111, "13435": 22.09849, "13440": 23.16933, "13445": 22.40582, "13450": 22.13057, "13455": 23.20319, "13460": 22.09818, "13465": 22.1228, "13470": 26.65474, "13475": 22.51962, "13480": 22.09971, "13485": 22.97486, "13490": 22.13328, "13495": 22.25854, "13500": 22.71712, "13505": 22.11959, "13510": 22.11576, "13515": 22.2498, "13520": 22.48635, "13525": 22.14451, "13530": 22.28473, "13535": 22.5087, "13540": 22.11036, "13545": 22.39715, "13550": 22.14277, "13555": 22.47507, "13560": 22.10215, "13565": 22.29449, "13570": 22.41286, "13575": 22.12502, "13580": 22.64326, "13585": 22.24268, "13590": 22.69601, "13595": 22.64694, "13600": 22.12512, "13605": 22.06712, "13610": 22.27097, "13615": 22.04664, "13620": 22.02911, "13625": 22.08369, "13630": 22.06847, "13635": 22.2674, "13640": 22.05704, "13645": 22.03395, "13650": 22.02212, "13655": 22.01405, "13660": 22.10292, "13665": 22.04765, "13670": 22.1624, "13675": 22.01057, "13680": 22.42028, "13685": 22.04494, "13690": 22.04976, "13695": 22.1887, "13700": 23.97383, "13705": 28.59691, "13710": 27.46884, "13715": 22.09613, "13720": 22.00944, "13725": 23.47335, "13730": 22.03805, "13735": 22.02014, "13740": 22.19552, "13745": 22.05961, "13750": 22.02592, "13755": 22.0102, "13760": 22.23346, "13765": 22.04236, "13770": 22.02031, "13775": 22.0292, "13780": 22.01072, "13785": 22.01593, "13790": 22.00968, "13795": 22.36829, "13800": 22.02921, "13805": 22.15732, "13810": 22.00256, "13815": 22.1639, "13820": 22.54104, "13825": 22.27217, "13830": 22.02895, "13835": 23.10168, "13840": 22.26862, "13845": 22.01213, "13850": 23.25629, "13855": 22.07204, "13860": 22.27703, "13865": 22.89068, "13870": 22.05503, "13875": 22.04289, "13880": 22.69295, "13885": 22.12263, "13890": 21.98553, "13895": 22.57166, "13900": 22.01637, "13905": 22.021, "13910": 22.22902, "13915": 22.39313, "13920": 22.13025, "13925": 21.99196, "13930": 22.01081, "13935": 22.01796, "13940": 22.03293, "13945": 22.07697, "13950": 22.18752, "13955": 21.99396, "13960": 22.33779, "13965": 22.02495, "13970": 22.05429, "13975": 21.98904, "13980": 22.11115, "13985": 22.04974, "13990": 22.02577, "13995": 22.07866, "14000": 21.98906, "14005": 22.39023, "14010": 21.96216, "14015": 22.2517, "14020": 22.23386, "14025": 22.00722, "14030": 22.06658, "14035": 22.58047, "14040": 22.26459, "14045": 22.00987, "14050": 23.29017, "14055": 22.0715, "14060": 22.02243, "14065": 23.29697, "14070": 21.98552, "14075": 22.00917, "14080": 23.33665, "14085": 22.15608, "14090": 22.03961, "14095": 22.96184, "14100": 22.03391, "14105": 22.16316, "14110": 22.40831, "14115": 22.01907, "14120": 22.13336, "14125": 22.22098, "14130": 22.01658, "14135": 21.99148, "14140": 22.07202, "14145": 22.05245, "14150": 22.06187, "14155": 22.02708, "14160": 22.0033, "14165": 22.03901, "14170": 22.02391, "14175": 22.02047, "14180": 22.23359, "14185": 22.13673, "14190": 22.15379, "14195": 23.38139, "14200": 22.53242, "14205": 22.40147, "14210": 22.08361, "14215": 22.35783, "14220": 22.14361, "14225": 22.08543, "14230": 22.14679, "14235": 22.06928, "14240": 22.13064, "14245": 22.09093, "14250": 22.40817, "14255": 22.0675, "14260": 22.18981, "14265": 22.06542, "14270": 22.02903, "14275": 22.07273, "14280": 22.06194, "14285": 22.22455, "14290": 22.11695, "14295": 22.07998, "14300": 22.09878, "14305": 22.24274, "14310": 22.06553, "14315": 22.18964, "14320": 22.16847, "14325": 22.08908, "14330": 22.07437, "14335": 22.07371, "14340": 22.33582, "14345": 22.13176, "14350": 22.09109, "14355": 22.08477, "14360": 22.58906, "14365": 22.18727, "14370": 22.26394, "14375": 22.89701, "14380": 22.30961, "14385": 22.08732, "14390": 23.13605, "14395": 22.25897, "14400": 22.2024, "14405": 23.02925, "14410": 22.08079, "14415": 22.32117, "14420": 23.33656, "14425": 22.0643, "14430": 22.25512, "14435": 22.97935, "14440": 22.11083, "14445": 22.06071, "14450": 22.99703, "14455": 22.0818, "14460": 22.07658, "14465": 23.13362, "14470": 22.08196, "14475": 22.06038, "14480": 22.32988, "14485": 22.40493, "14490": 22.06483, "14495": 22.08828, "14500": 22.28645, "14505": 22.05807, "14510": 22.05097, "14515": 22.0599, "14520": 22.26943, "14525": 22.05993, "14530": 22.08459, "14535": 22.22258, "14540": 22.05577, "14545": 22.06454, "14550": 22.09444, "14555": 22.07581, "14560": 22.05407, "14565": 22.05447, "14570": 22.06135, "14575": 22.19512, "14580": 22.07505, "14585": 22.08514, "14590": 22.09018, "14595": 22.03577, "14600": 22.13656, "14605": 22.06639, "14610": 22.23185, "14615": 22.22575, "14620": 22.7029, "14625": 22.08141, "14630": 22.06996, "14635": 22.79906, "14640": 22.03634, "14645": 22.08697, "14650": 23.15145, "14655": 22.08298, "14660": 22.08974, "14665": 22.98047, "14670": 22.02896, "14675": 22.0517, "14680": 23.07168, "14685": 22.23171, "14690": 22.05078, "14695": 22.92055, "14700": 22.23906, "14705": 22.04827, "14710": 22.6036, "14715": 22.03553, "14720": 22.01876, "14725": 22.14338, "14730": 22.03045, "14735": 22.04494, "14740": 22.00404, "14745": 22.06206, "14750": 22.05579, "14755": 22.34571, "14760": 22.14629, "14765": 22.49629, "14770": 22.26082, "14775": 22.14379, "14780": 22.1596, "14785": 22.15635, "14790": 22.3504, "14795": 22.1896, "14800": 22.75355, "14805": 22.1423, "14810": 32.16642, "14815": 48.67271, "14820": 22.28576, "14825": 22.26926, "14830": 29.45678, "14835": 39.5044, "14840": 24.31624, "14845": 22.23618, "14850": 22.23816, "14855": 22.1697, "14860": 22.36467, "14865": 22.20577, "14870": 22.25344, "14875": 22.45305, "14880": 22.1841, "14885": 22.1588, "14890": 22.838, "14895": 22.26452, "14900": 22.14722, "14905": 23.29372, "14910": 22.12777, "14915": 22.19135, "14920": 23.44043, "14925": 22.13463, "14930": 22.29764, "14935": 23.45678, "14940": 22.18376, "14945": 22.17472, "14950": 23.00582, "14955": 22.19426, "14960": 22.2669, "14965": 22.78173, "14970": 22.16047, "14975": 22.14402, "14980": 22.67739, "14985": 22.1759, "14990": 22.32978, "14995": 22.27211, "15000": 22.49821, "15005": 22.14336, "15010": 22.30566, "15015": 22.1375, "15020": 22.23084, "15025": 22.26679, "15030": 22.18375, "15035": 22.2613, "15040": 22.2026, "15045": 22.21928, "15050": 22.20055, "15055": 108.75283, "15060": 22.21413, "15065": 22.46298, "15070": 23.23013, "15075": 24.7711, "15080": 23.97366, "15085": 22.41145, "15090": 22.29458, "15095": 22.20754, "15100": 22.32429, "15105": 22.13062, "15110": 22.15679, "15115": 22.15394, "15120": 22.11325, "15125": 22.31055, "15130": 22.16434, "15135": 22.13494, "15140": 22.16431, "15145": 22.34642, "15150": 22.1574, "15155": 22.23988, "15160": 22.34189, "15165": 22.12986, "15170": 22.22083, "15175": 22.17054, "15180": 22.31262, "15185": 22.28185, "15190": 22.14968, "15195": 22.38445, "15200": 22.25157, "15205": 22.14167, "15210": 22.28101, "15215": 22.14116, "15220": 22.12233, "15225": 22.1588, "15230": 22.15644, "15235": 22.27138, "15240": 22.145, "15245": 22.27718, "15250": 22.23054, "15255": 22.12166, "15260": 22.34333, "15265": 22.15505, "15270": 22.50488, "15275": 22.35867, "15280": 22.37681, "15285": 22.1438, "15290": 22.25179, "15295": 22.22634, "15300": 22.26645, "15305": 22.68257, "15310": 22.16404, "15315": 22.1599, "15320": 22.72198, "15325": 22.14424, "15330": 22.12457, "15335": 23.50373, "15340": 22.14722, "15345": 22.15547, "15350": 23.20711, "15355": 22.15286, "15360": 22.15391, "15365": 22.66223, "15370": 22.38403, "15375": 22.15479, "15380": 22.58415, "15385": 22.19517, "15390": 22.29277, "15395": 22.47288, "15400": 22.29634, "15405": 22.1994, "15410": 22.64123, "15415": 22.1131, "15420": 22.13666, "15425": 22.55303, "15430": 22.13969, "15435": 22.53339, "15440": 22.35246, "15445": 22.11829, "15450": 22.13727, "15455": 22.25045, "15460": 22.4341, "15465": 22.14739, "15470": 22.4151, "15475": 22.15448, "15480": 22.2669, "15485": 22.28845, "15490": 22.14882, "15495": 22.20642, "15500": 22.14014, "15505": 22.51099, "15510": 22.0916, "15515": 22.42788, "15520": 22.2763, "15525": 22.14355, "15530": 22.26678, "15535": 22.28767, "15540": 22.16939, "15545": 22.14751, "15550": 22.66261, "15555": 22.12391, "15560": 22.12855, "15565": 23.05983, "15570": 22.28159, "15575": 22.12816, "15580": 23.35321, "15585": 22.19283, "15590": 22.1841, "15595": 22.88772, "15600": 22.50205, "15605": 22.44481, "15610": 22.93856, "15615": 22.18609, "15620": 22.17825, "15625": 22.00053, "15630": 22.01649, "15635": 23.08682, "15640": 22.439, "15645": 22.16217, "15650": 22.96402, "15655": 22.08262, "15660": 21.95173, "15665": 22.63973, "15670": 21.96672, "15675": 22.30079, "15680": 22.63839, "15685": 21.9711, "15690": 22.02605, "15695": 22.59423, "15700": 21.97532, "15705": 21.98491, "15710": 22.73383, "15715": 21.93264, "15720": 21.99724, "15725": 22.63097, "15730": 21.9426, "15735": 21.95534, "15740": 22.33343, "15745": 22.62256, "15750": 21.95236, "15755": 22.35481, "15760": 22.10618, "15765": 21.92896, "15770": 22.4942, "15775": 21.94821, "15780": 22.4913, "15785": 22.26284, "15790": 21.96854, "15795": 22.07945, "15800": 22.33829, "15805": 21.92376, "15810": 22.97289, "15815": 21.99586, "15820": 21.92305, "15825": 22.28597, "15830": 21.96006, "15835": 21.95524, "15840": 22.00846, "15845": 22.04317, "15850": 21.99815, "15855": 21.95104, "15860": 22.11152, "15865": 22.57453, "15870": 21.97742, "15875": 21.92173, "15880": 21.96242, "15885": 22.20045, "15890": 21.93126, "15895": 21.8915, "15900": 21.99263, "15905": 21.89421, "15910": 21.9604, "15915": 22.04197, "15920": 22.09448, "15925": 21.93892, "15930": 21.89792, "15935": 22.1066, "15940": 21.94705, "15945": 21.90697, "15950": 22.19069, "15955": 22.60943, "15960": 21.97397, "15965": 22.47096, "15970": 21.98518, "15975": 21.97247, "15980": 22.82549, "15985": 21.9513, "15990": 22.13158, "15995": 22.88777, "16000": 21.90521, "16005": 21.95262, "16010": 23.0454, "16015": 22.03298, "16020": 21.92265, "16025": 22.83389, "16030": 21.91979, "16035": 21.94841, "16040": 22.45642, "16045": 22.39673, "16050": 22.04059, "16055": 22.20955, "16060": 21.92415, "16065": 21.94788, "16070": 22.03529, "16075": 21.93197, "16080": 22.00692, "16085": 22.12937, "16090": 21.93263, "16095": 21.94885, "16100": 22.03064, "16105": 22.05147, "16110": 21.94212, "16115": 21.92407, "16120": 22.0495, "16125": 21.88312, "16130": 21.94234, "16135": 22.48603, "16140": 21.96682, "16145": 21.93308, "16150": 22.02487, "16155": 22.02672, "16160": 33.43018, "16165": 23.61525, "16170": 22.06911, "16175": 21.93616, "16180": 22.61345, "16185": 21.9276, "16190": 25.40707, "16195": 27.02272, "16200": 104.14045, "16205": 22.34764, "16210": 22.1918, "16215": 22.00356, "16220": 22.01277, "16225": 21.98099, "16230": 22.25757, "16235": 22.03492, "16240": 22.0114, "16245": 22.01644, "16250": 22.22062, "16255": 22.17115, "16260": 22.21522, "16265": 22.00399, "16270": 22.53341, "16275": 21.96496, "16280": 22.06112, "16285": 23.1412, "16290": 22.16302, "16295": 21.98638, "16300": 23.24363, "16305": 21.97185, "16310": 22.02549, "16315": 22.95304, "16320": 22.16008, "16325": 22.08178, "16330": 22.49899, "16335": 22.04814, "16340": 22.23632, "16345": 22.07533, "16350": 22.06702, "16355": 21.9729, "16360": 22.01179, "16365": 21.99241, "16370": 22.13124, "16375": 22.14697, "16380": 22.03236, "16385": 21.96984, "16390": 22.01395, "16395": 21.99321, "16400": 22.03203, "16405": 22.06293, "16410": 21.9916, "16415": 21.98322, "16420": 22.00043, "16425": 21.96304, "16430": 22.31954, "16435": 21.96426, "16440": 22.1185, "16445": 21.98404, "16450": 21.96013, "16455": 22.07676, "16460": 21.96087, "16465": 22.14537, "16470": 22.05673, "16475": 21.97383, "16480": 21.9608, "16485": 22.26588, "16490": 22.13402, "16495": 21.99042, "16500": 22.93027, "16505": 21.95022, "16510": 21.97729, "16515": 23.37983, "16520": 22.16826, "16525": 22.07872, "16530": 22.9956, "16535": 21.97522, "16540": 22.08923, "16545": 22.4059, "16550": 22.04046, "16555": 22.16227, "16560": 22.09264, "16565": 21.9615, "16570": 22.0931, "16575": 22.12273, "16580": 21.99587, "16585": 22.00193, "16590": 21.95689, "16595": 22.0054, "16600": 22.01548, "16605": 22.20527, "16610": 22.1915, "16615": 22.00586, "16620": 22.18361, "16625": 22.0285, "16630": 21.99127, "16635": 21.97832, "16640": 21.95394, "16645": 22.13967, "16650": 21.98068, "16655": 22.11913, "16660": 22.02795, "16665": 22.0345, "16670": 21.99637, "16675": 22.15095, "16680": 22.04639, "16685": 21.94195, "16690": 22.11814, "16695": 22.00198, "16700": 22.76109, "16705": 21.97088, "16710": 22.02067, "16715": 22.94449, "16720": 22.11781, "16725": 21.98099, "16730": 22.95282, "16735": 22.17155, "16740": 22.00339, "16745": 22.77605, "16750": 22.05262, "16755": 22.22301, "16760": 22.45618, "16765": 21.99033, "16770": 22.00841, "16775": 102.78548, "16780": 22.08105, "16785": 22.1599, "16790": 22.09765, "16795": 22.06284, "16800": 22.0399, "16805": 22.14494, "16810": 21.98739, "16815": 22.02694, "16820": 22.01059, "16825": 21.98972, "16830": 22.7657, "16835": 22.00032, "16840": 22.05194, "16845": 22.45416, "16850": 22.0064, "16855": 22.06947, "16860": 23.02737, "16865": 22.30564, "16870": 22.17124, "16875": 22.78482, "16880": 22.0219, "16885": 21.99942, "16890": 22.70627, "16895": 22.00008, "16900": 22.11326, "16905": 22.97222, "16910": 21.99474, "16915": 22.02094, "16920": 22.85675, "16925": 22.00788, "16930": 22.00177, "16935": 22.57514, "16940": 22.17808, "16945": 22.19998, "16950": 22.51337, "16955": 22.12283, "16960": 21.95642, "16965": 22.0996, "16970": 22.00142, "16975": 21.9388, "16980": 22.00098, "16985": 22.24541, "16990": 22.08238, "16995": 22.18641, "17000": 21.98, "17005": 21.96778, "17010": 21.96717, "17015": 22.3357, "17020": 22.02258, "17025": 22.09072, "17030": 22.36878, "17035": 21.98931, "17040": 22.49298, "17045": 22.00318, "17050": 22.13231, "17055": 22.02248, "17060": 22.25429, "17065": 22.33303, "17070": 22.00539, "17075": 22.6849, "17080": 22.00874, "17085": 21.98753, "17090": 22.8741, "17095": 21.96132, "17100": 22.06234, "17105": 22.8776, "17110": 22.02037, "17115": 22.00928, "17120": 22.88946, "17125": 22.04385, "17130": 22.40008, "17135": 22.5898, "17140": 22.10558, "17145": 21.98245, "17150": 22.63395, "17155": 21.99476, "17160": 22.07993, "17165": 22.3421, "17170": 21.99951, "17175": 22.1299, "17180": 22.43877, "17185": 22.03425, "17190": 22.01426, "17195": 22.16917, "17200": 22.03557, "17205": 21.97984, "17210": 22.00562, "17215": 21.99617, "17220": 22.37739, "17225": 22.01841, "17230": 22.21338, "17235": 22.16782, "17240": 21.98746, "17245": 21.98829, "17250": 22.23601, "17255": 21.97335, "17260": 22.11067, "17265": 22.07148, "17270": 21.995, "17275": 22.27567, "17280": 59.57123, "17285": 21.98241, "17290": 22.39959, "17295": 21.96395, "17300": 26.5, "17305": 25.61316, "17310": 27.63273, "17315": 21.96714, "17320": 22.88083, "17325": 21.99797, "17330": 21.9755, "17335": 22.89678, "17340": 108.55729, "17345": 22.00624, "17350": 22.20455, "17355": 22.07398, "17360": 21.99376, "17365": 21.99253, "17370": 22.02192, "17375": 22.1787, "17380": 22.03727, "17385": 22.20255, "17390": 21.98809, "17395": 22.01012, "17400": 22.0191, "17405": 22.44017, "17410": 22.01764, "17415": 22.18406, "17420": 22.0475, "17425": 22.12873, "17430": 22.03171, "17435": 21.97336, "17440": 22.21947, "17445": 22.03744, "17450": 21.9838, "17455": 22.09471, "17460": 22.15073, "17465": 22.00501, "17470": 22.3344, "17475": 22.00299, "17480": 21.97998, "17485": 22.95447, "17490": 22.02015, "17495": 22.35135, "17500": 23.17098, "17505": 21.97502, "17510": 21.97556, "17515": 23.37412, "17520": 22.12029, "17525": 21.97167, "17530": 22.92391, "17535": 22.03224, "17540": 21.97089, "17545": 22.35937, "17550": 22.1408, "17555": 21.98587, "17560": 22.01266, "17565": 22.00514, "17570": 22.16556, "17575": 22.00796, "17580": 22.18963, "17585": 22.18623, "17590": 21.9791, "17595": 22.0793, "17600": 21.99541, "17605": 22.31197, "17610": 21.98766, "17615": 21.9704, "17620": 21.97371, "17625": 21.99799, "17630": 21.95933, "17635": 21.9562, "17640": 22.09629, "17645": 21.97268, "17650": 22.13341, "17655": 22.09126, "17660": 21.98376, "17665": 22.08392, "17670": 22.09101, "17675": 22.17549, "17680": 21.96966, "17685": 22.35471, "17690": 21.98837, "17695": 21.96703, "17700": 22.64849, "17705": 22.22724, "17710": 22.03005, "17715": 23.04508, "17720": 21.9465, "17725": 21.96678, "17730": 23.62211, "17735": 22.1444, "17740": 21.96842, "17745": 22.88523, "17750": 22.02585, "17755": 21.93829, "17760": 27.46476, "17765": 21.94905, "17770": 22.14762, "17775": 22.41342, "17780": 21.90354, "17785": 22.11543, "17790": 22.26307, "17795": 22.29889, "17800": 21.96047, "17805": 22.3723, "17810": 21.97787, "17815": 22.14546, "17820": 22.21171, "17825": 21.95588, "17830": 21.9494, "17835": 21.98499, "17840": 21.93675, "17845": 21.95003, "17850": 21.93418, "17855": 21.95887, "17860": 22.17711, "17865": 22.06028, "17870": 21.9671, "17875": 22.1171, "17880": 21.96055, "17885": 22.10432, "17890": 21.97672, "17895": 21.95165, "17900": 26.44584, "17905": 22.18544, "17910": 22.09242, "17915": 22.01001, "17920": 26.72145, "17925": 23.56852, "17930": 28.32198, "17935": 22.12374, "17940": 21.99697, "17945": 22.26221, "17950": 21.98222, "17955": 21.98755, "17960": 22.01588, "17965": 21.98175, "17970": 21.96262, "17975": 21.99837, "17980": 22.23151, "17985": 22.0217, "17990": 21.97939, "17995": 21.98702, "18000": 22.2317, "18005": 21.96911, "18010": 23.31272, "18015": 22.02362, "18020": 21.98358, "18025": 23.35131, "18030": 22.00043, "18035": 22.21697, "18040": 23.05941, "18045": 22.02504, "18050": 22.19326, "18055": 22.63777, "18060": 22.14659, "18065": 21.98502, "18070": 22.16636, "18075": 22.02879, "18080": 22.18116, "18085": 22.68525, "18090": 22.07384, "18095": 22.0734, "18100": 22.07659, "18105": 21.94316, "18110": 22.014, "18115": 22.00379, "18120": 21.93546, "18125": 21.99819, "18130": 22.16777, "18135": 21.989, "18140": 21.99708, "18145": 22.02609, "18150": 22.03176, "18155": 21.9769, "18160": 21.96837, "18165": 22.20785, "18170": 22.10235, "18175": 22.48914, "18180": 22.07178, "18185": 21.96585, "18190": 21.95092, "18195": 22.27635, "18200": 22.12732, "18205": 21.99511, "18210": 22.27462, "18215": 21.96392, "18220": 22.01683, "18225": 22.78276, "18230": 22.14031, "18235": 22.00997, "18240": 22.91005, "18245": 22.10516, "18250": 22.04361, "18255": 23.1747, "18260": 22.13474, "18265": 22.43275, "18270": 22.6549, "18275": 21.95833, "18280": 22.0007, "18285": 22.34029, "18290": 21.95577, "18295": 22.28424, "18300": 22.12564, "18305": 22.00367, "18310": 22.26914, "18315": 22.41239, "18320": 22.09215, "18325": 21.94787, "18330": 22.33199, "18335": 22.05616, "18340": 22.08605, "18345": 21.94572, "18350": 21.95664, "18355": 22.42528, "18360": 22.01061, "18365": 22.22847, "18370": 21.93708, "18375": 21.96013, "18380": 21.9217, "18385": 21.92542, "18390": 21.95933, "18395": 21.99837, "18400": 22.06573, "18405": 22.03477, "18410": 22.25431, "18415": 21.92775, "18420": 22.05329, "18425": 22.36522, "18430": 21.92698, "18435": 21.96432, "18440": 22.57409, "18445": 22.46712, "18450": 21.9793, "18455": 23.04648, "18460": 22.16213, "18465": 21.93308, "18470": 22.8007, "18475": 21.91814, "18480": 22.18781, "18485": 85.51256, "18490": 45.03661, "18495": 22.52975, "18500": 22.14979, "18505": 22.29557, "18510": 22.28647, "18515": 22.29387, "18520": 22.16978, "18525": 22.12654, "18530": 22.14587, "18535": 22.134, "18540": 22.2602, "18545": 22.10883, "18550": 22.27078, "18555": 22.14345, "18560": 22.32141, "18565": 22.08963, "18570": 22.25174, "18575": 22.1179, "18580": 22.09238, "18585": 22.1078, "18590": 22.08199, "18595": 22.2575, "18600": 22.22721, "18605": 22.26158, "18610": 22.64117, "18615": 22.10339, "18620": 22.02647, "18625": 22.8912, "18630": 22.07423, "18635": 22.03608, "18640": 23.20714, "18645": 22.24882, "18650": 22.30434, "18655": 23.00626, "18660": 22.01064, "18665": 22.05433, "18670": 22.44655, "18675": 22.26269, "18680": 22.05153, "18685": 22.27336, "18690": 22.42644, "18695": 22.12577, "18700": 22.07482, "18705": 22.09618, "18710": 22.2076, "18715": 22.0725, "18720": 22.10465, "18725": 22.11315, "18730": 22.06436, "18735": 22.02916, "18740": 22.12536, "18745": 22.45128, "18750": 22.08543, "18755": 22.07561, "18760": 22.09976, "18765": 22.07224, "18770": 22.05534, "18775": 22.0816, "18780": 22.44448, "18785": 22.07241, "18790": 22.09708, "18795": 22.39019, "18800": 22.03548, "18805": 22.30242, "18810": 22.46992, "18815": 22.32983, "18820": 22.20945, "18825": 22.63946, "18830": 22.15831, "18835": 22.28079, "18840": 22.91476, "18845": 22.07326, "18850": 22.09581, "18855": 22.91757, "18860": 22.05321, "18865": 22.06031, "18870": 22.96686, "18875": 22.32604, "18880": 22.1861, "18885": 22.76284, "18890": 22.07811, "18895": 22.10749, "18900": 22.5788, "18905": 22.18409, "18910": 22.3503, "18915": 22.55405, "18920": 22.18303, "18925": 22.30761, "18930": 22.41452, "18935": 22.2127, "18940": 22.09551, "18945": 22.04965, "18950": 22.07607, "18955": 22.13866, "18960": 22.30429, "18965": 22.22454, "18970": 22.15844, "18975": 22.27153, "18980": 22.01352, "18985": 22.052, "18990": 22.14542, "18995": 22.04883, "19000": 22.07955, "19005": 22.23974, "19010": 22.08844, "19015": 22.20536, "19020": 22.04534, "19025": 22.19245, "19030": 22.09579, "19035": 22.12651, "19040": 23.34943, "19045": 22.69005, "19050": 22.24844, "19055": 22.69724, "19060": 22.0511, "19065": 22.04012, "19070": 22.58181, "19075": 22.04435, "19080": 22.54076, "19085": 22.53158, "19090": 22.05289, "19095": 22.04298, "19100": 22.41844, "19105": 22.13549, "19110": 22.03942, "19115": 22.91284, "19120": 22.01683, "19125": 22.1149, "19130": 22.61757, "19135": 22.00559, "19140": 22.02532, "19145": 22.45529, "19150": 22.88046, "19155": 22.03862, "19160": 22.23494, "19165": 22.08642, "19170": 22.0461, "19175": 25.99008, "19180": 25.63387, "19185": 35.86026, "19190": 22.01076, "19195": 22.03292, "19200": 22.02003, "19205": 22.03662, "19210": 22.04207, "19215": 23.25604, "19220": 22.02187, "19225": 22.01513, "19230": 23.03368, "19235": 22.35138, "19240": 22.19641, "19245": 22.43887, "19250": 22.01143, "19255": 22.08095, "19260": 22.11804, "19265": 22.06499, "19270": 22.19212, "19275": 22.00093, "19280": 22.02519, "19285": 22.01187, "19290": 22.27587, "19295": 22.02815, "19300": 21.99642, "19305": 22.03264, "19310": 22.20341, "19315": 22.07625, "19320": 22.02696, "19325": 22.55609, "19330": 22.02632, "19335": 22.02117, "19340": 22.03833, "19345": 22.05752, "19350": 22.00674, "19355": 22.02014, "19360": 22.30345, "19365": 22.05058, "19370": 21.98538, "19375": 22.25976, "19380": 21.99865, "19385": 22.10072, "19390": 22.04212, "19395": 22.10595, "19400": 22.11526, "19405": 22.17679, "19410": 22.02278, "19415": 22.43738, "19420": 22.06879, "19425": 22.05541, "19430": 22.40515, "19435": 21.98907, "19440": 22.01894, "19445": 22.93965, "19450": 22.18347, "19455": 22.24125, "19460": 23.01338, "19465": 22.01056, "19470": 22.20675, "19475": 23.40782, "19480": 22.17527, "19485": 22.07211, "19490": 22.74796, "19495": 22.03738, "19500": 22.11447, "19505": 22.57502, "19510": 22.00241, "19515": 22.03146, "19520": 22.03991, "19525": 22.20038, "19530": 22.04317, "19535": 22.0125, "19540": 22.02949, "19545": 22.01063, "19550": 22.05496, "19555": 33.90336, "19560": 22.2613, "19565": 22.03721, "19570": 22.00879, "19575": 22.01798, "19580": 22.01124, "19585": 22.00198, "19590": 22.01794, "19595": 22.40425, "19600": 98.4347, "19605": 22.36858, "19610": 22.27251, "19615": 22.08022, "19620": 22.06991, "19625": 22.05433, "19630": 22.33052, "19635": 22.04945, "19640": 22.23253, "19645": 22.049, "19650": 22.37647, "19655": 22.46999, "19660": 22.04059, "19665": 22.0598, "19670": 22.72879, "19675": 22.06605, "19680": 22.05153, "19685": 23.35295, "19690": 22.17223, "19695": 22.03728, "19700": 23.28033, "19705": 22.01045, "19710": 22.05778, "19715": 22.7803, "19720": 22.23862, "19725": 22.15659, "19730": 22.48713, "19735": 22.00779, "19740": 22.4242, "19745": 22.34846, "19750": 22.13171, "19755": 22.03015, "19760": 22.21631, "19765": 22.06672, "19770": 22.16635, "19775": 22.22536, "19780": 22.0389, "19785": 22.01851, "19790": 22.10226, "19795": 22.01061, "19800": 22.03065, "19805": 22.02805, "19810": 22.00309, "19815": 22.04072, "19820": 22.02055, "19825": 22.02235, "19830": 22.2932, "19835": 22.03941, "19840": 22.10083, "19845": 22.03088, "19850": 22.01888, "19855": 22.10422, "19860": 22.04202, "19865": 22.3845, "19870": 22.01616, "19875": 22.02625, "19880": 22.01494, "19885": 22.02904, "19890": 22.27254, "19895": 22.04226, "19900": 22.17991, "19905": 22.00352, "19910": 22.02254, "19915": 22.56996, "19920": 22.30608, "19925": 22.16682, "19930": 23.12385, "19935": 22.03989, "19940": 22.0322, "19945": 22.8917, "19950": 22.10655, "19955": 22.20061, "19960": 22.90237, "19965": 22.00061, "19970": 21.99526, "19975": 22.40026, "19980": 22.0098, "19985": 22.22567, "19990": 22.35216, "19995": 22.00283, "20000": 22.04188, "20005": 22.26902, "20010": 22.32064, "20015": 22.02303, "20020": 22.37736, "20025": 22.13094, "20030": 21.99634, "20035": 22.18323, "20040": 22.01093, "20045": 22.21833, "20050": 22.12236, "20055": 22.16846, "20060": 22.04838, "20065": 22.01725, "20070": 21.99143, "20075": 21.99588, "20080": 22.02362, "20085": 22.02354, "20090": 22.09042, "20095": 22.01284, "20100": 22.30773, "20105": 22.13522, "20110": 22.1261, "20115": 22.03151, "20120": 22.02832, "20125": 22.00983, "20130": 22.21355, "20135": 22.23477, "20140": 22.0273, "20145": 22.31248, "20150": 22.09927, "20155": 22.07896, "20160": 22.44126, "20165": 22.02259, "20170": 98.96542, "20175": 22.17371, "20180": 22.28248, "20185": 22.17067, "20190": 22.14543, "20195": 22.11274, "20200": 22.32753, "20205": 22.1307, "20210": 22.10396, "20215": 22.2908, "20220": 22.10666, "20225": 22.33956, "20230": 22.16375, "20235": 22.09034, "20240": 22.15895, "20245": 22.07995, "20250": 22.08437, "20255": 22.60809, "20260": 22.44728, "20265": 22.13148, "20270": 22.44908, "20275": 22.07796, "20280": 22.14147, "20285": 22.84522, "20290": 22.07817, "20295": 22.30904, "20300": 23.04139, "20305": 22.10123, "20310": 22.04711, "20315": 23.05948, "20320": 22.06648, "20325": 22.08965, "20330": 23.10643, "20335": 22.14765, "20340": 22.09733, "20345": 23.21094, "20350": 22.24224, "20355": 22.06424, "20360": 22.85851, "20365": 22.12695, "20370": 22.04433, "20375": 22.26573, "20380": 22.10571, "20385": 22.12283, "20390": 22.26415, "20395": 22.08856, "20400": 22.10566, "20405": 22.42141, "20410": 22.16165, "20415": 22.04951, "20420": 22.18219, "20425": 22.23341, "20430": 22.06647, "20435": 22.58652, "20440": 22.05153, "20445": 22.10931, "20450": 22.0407, "20455": 22.06367, "20460": 22.28207, "20465": 22.08816, "20470": 22.07063, "20475": 22.06141, "20480": 22.04268, "20485": 22.02898, "20490": 22.0553, "20495": 22.11768, "20500": 22.01413, "20505": 22.10583, "20510": 22.06167, "20515": 22.16174, "20520": 22.08622, "20525": 22.66258, "20530": 22.49535, "20535": 22.07575, "20540": 22.05479, "20545": 22.91244, "20550": 22.0737, "20555": 22.03957, "20560": 23.0199, "20565": 22.10902, "20570": 22.09393, "20575": 22.80055, "20580": 22.06504, "20585": 22.05664, "20590": 22.61193, "20595": 22.33854, "20600": 22.04448, "20605": 22.52912, "20610": 22.05245, "20615": 22.45452, "20620": 22.46644, "20625": 22.31393, "20630": 22.07061, "20635": 22.3742, "20640": 22.07707, "20645": 22.13266, "20650": 22.51493, "20655": 22.06928, "20660": 22.24127, "20665": 22.58221, "20670": 22.07293, "20675": 22.1689, "20680": 22.40063, "20685": 22.03456, "20690": 22.04648, "20695": 22.21044, "20700": 22.08368, "20705": 22.51399, "20710": 22.05567, "20715": 22.11821, "20720": 22.058, "20725": 22.08121, "20730": 22.18068, "20735": 22.09075, "20740": 22.12848, "20745": 22.13681, "20750": 22.36705, "20755": 22.08716, "20760": 22.25969, "20765": 22.12404, "20770": 22.40801, "20775": 22.09593, "20780": 22.06397, "20785": 22.17894, "20790": 22.06289, "20795": 22.10845, "20800": 22.11135, "20805": 22.96048, "20810": 22.11031, "20815": 22.09799, "20820": 23.2067, "20825": 22.21415, "20830": 22.10096, "20835": 23.04988, "20840": 22.15696, "20845": 22.09985, "20850": 23.11044, "20855": 22.10461, "20860": 22.07559, "20865": 22.51228, "20870": 22.07763, "20875": 22.43639, "20880": 22.38419, "20885": 22.34445, "20890": 22.09507, "20895": 22.09455, "20900": 22.10687, "20905": 22.09304, "20910": 22.30083, "20915": 22.10806, "20920": 22.33636, "20925": 22.11913, "20930": 22.08962, "20935": 22.11232, "20940": 22.08051, "20945": 22.06372, "20950": 22.15435, "20955": 22.21595, "20960": 22.09835, "20965": 22.24853, "20970": 22.10371, "20975": 22.11889, "20980": 22.09375, "20985": 22.09609, "20990": 22.271, "20995": 22.08865, "21000": 22.47251, "21005": 22.26819, "21010": 22.2059, "21015": 22.22033, "21020": 22.63647, "21025": 22.10492, "21030": 22.08886, "21035": 23.08761, "21040": 22.06721, "21045": 22.04967, "21050": 23.18736, "21055": 22.18193, "21060": 22.06386, "21065": 22.9057, "21070": 22.28411, "21075": 22.11809, "21080": 22.62963, "21085": 22.30187, "21090": 22.39038, "21095": 22.52399, "21100": 22.10204, "21105": 22.09739, "21110": 22.44714, "21115": 22.10333, "21120": 22.29029, "21125": 22.48376, "21130": 22.08506, "21135": 22.07606, "21140": 22.166, "21145": 22.25564, "21150": 22.07999, "21155": 22.28068, "21160": 22.05645, "21165": 22.06884, "21170": 22.13417, "21175": 22.07036, "21180": 22.3929, "21185": 22.08393, "21190": 22.08051, "21195": 22.09407, "21200": 22.19248, "21205": 22.05552, "21210": 22.06678, "21215": 22.17284, "21220": 22.09283, "21225": 22.11928, "21230": 22.0553, "21235": 22.44683, "21240": 22.07104, "21245": 22.10604, "21250": 22.37072, "21255": 22.05393, "21260": 22.05842, "21265": 22.74839, "21270": 22.58553, "21275": 22.08839, "21280": 23.07223, "21285": 22.30255, "21290": 22.03623, "21295": 22.84942, "21300": 22.07417, "21305": 22.03115, "21310": 22.24228, "21315": 22.25573, "21320": 23.5793, "21325": 22.61039, "21330": 22.36532, "21335": 22.86068, "21340": 22.23742, "21345": 22.16316, "21350": 22.75761, "21355": 22.14952, "21360": 22.57283, "21365": 22.78905, "21370": 22.14692, "21375": 22.12825, "21380": 22.74569, "21385": 22.17358, "21390": 22.21782, "21395": 22.87909, "21400": 22.22842, "21405": 23.21243, "21410": 22.23678, "21415": 22.21271, "21420": 22.67222, "21425": 22.30411, "21430": 22.25876, "21435": 22.27433, "21440": 22.22074, "21445": 22.5895, "21450": 22.32478, "21455": 22.16705, "21460": 22.1451, "21465": 22.1889, "21470": 22.24654, "21475": 22.18555, "21480": 22.14676, "21485": 22.15877, "21490": 22.17779, "21495": 22.13825, "21500": 22.34591, "21505": 22.11419, "21510": 22.14428, "21515": 22.18977, "21520": 22.19932, "21525": 22.27118, "21530": 22.21516, "21535": 22.51826, "21540": 22.19528, "21545": 22.19451, "21550": 22.31046, "21555": 22.21383, "21560": 22.299, "21565": 22.11804, "21570": 22.39709, "21575": 22.48496, "21580": 22.16606, "21585": 22.31565, "21590": 22.66795, "21595": 22.27054, "21600": 22.18133, "21605": 22.56054, "21610": 22.14855, "21615": 22.20794, "21620": 22.83393, "21625": 22.57921, "21630": 22.1744, "21635": 23.17944, "21640": 22.27805, "21645": 22.18283, "21650": 23.06324, "21655": 22.13652, "21660": 22.13988, "21665": 23.17265, "21670": 22.21207, "21675": 22.1404, "21680": 22.72803, "21685": 22.17153, "21690": 22.27415, "21695": 22.44952, "21700": 22.17734, "21705": 22.11552, "21710": 22.14365, "21715": 22.48491, "21720": 22.18294, "21725": 22.29423, "21730": 22.17701, "21735": 22.33023, "21740": 22.14588, "21745": 22.18336, "21750": 22.44717, "21755": 22.21338, "21760": 22.30557, "21765": 22.14496, "21770": 22.32477, "21775": 22.16698, "21780": 22.27344, "21785": 22.24435, "21790": 22.18787, "21795": 22.24339, "21800": 22.20493, "21805": 22.69887, "21810": 22.24477, "21815": 22.12791, "21820": 22.23047, "21825": 22.11957, "21830": 22.17676, "21835": 22.42222, "21840": 22.27717, "21845": 22.19807, "21850": 22.4284, "21855": 22.21384, "21860": 22.32072, "21865": 23.00098, "21870": 22.06171, "21875": 23.18735, "21880": 22.06551, "21885": 22.04094, "21890": 23.01561, "21895": 22.1797, "21900": 22.0393, "21905": 22.36705, "21910": 22.23749, "21915": 22.05647, "21920": 22.27163, "21925": 22.03717, "21930": 22.23222, "21935": 22.03541, "21940": 22.09642, "21945": 22.07479, "21950": 22.04652, "21955": 22.0752, "21960": 22.0611, "21965": 22.155, "21970": 22.04841, "21975": 22.04367, "21980": 22.57311, "21985": 22.07823, "21990": 22.13918, "21995": 22.07624, "22000": 22.58741, "22005": 22.05358, "22010": 22.09416, "22015": 22.06915, "22020": 22.06697, "22025": 22.17179, "22030": 22.04659, "22035": 22.0679, "22040": 22.05597, "22045": 22.20582, "22050": 22.1163, "22055": 22.05879, "22060": 22.53564, "22065": 22.05523, "22070": 22.37207, "22075": 22.15885, "22080": 22.14002, "22085": 22.14307, "22090": 22.12354, "22095": 22.27465, "22100": 22.12406, "22105": 22.37709, "22110": 22.15483, "22115": 22.08713, "22120": 22.11552, "22125": 22.08857, "22130": 22.066, "22135": 22.08113, "22140": 22.30342, "22145": 22.08316, "22150": 22.09483, "22155": 22.08368, "22160": 22.31247, "22165": 22.07708, "22170": 22.09326, "22175": 22.02953, "22180": 22.04734, "22185": 22.21646, "22190": 22.18826, "22195": 22.1858, "22200": 22.06094, "22205": 22.2184, "22210": 22.05256, "22215": 22.58915, "22220": 22.16498, "22225": 22.40896, "22230": 22.76875, "22235": 22.0528, "22240": 22.13154, "22245": 23.05687, "22250": 22.05648, "22255": 22.18597, "22260": 23.14894, "22265": 22.23368, "22270": 22.11616, "22275": 22.59598, "22280": 22.35966, "22285": 22.07336, "22290": 22.17872, "22295": 22.06577, "22300": 22.32277, "22305": 22.08732, "22310": 22.08067, "22315": 22.36932, "22320": 22.07089, "22325": 22.07751, "22330": 22.0811, "22335": 22.31345, "22340": 22.06705, "22345": 22.05811, "22350": 22.06743, "22355": 22.06308, "22360": 22.1459, "22365": 22.06573, "22370": 22.44047, "22375": 22.06664, "22380": 22.08419, "22385": 22.1892, "22390": 22.04749, "22395": 22.09074, "22400": 22.64728, "22405": 22.51719, "22410": 22.09339, "22415": 22.60724, "22420": 22.05313, "22425": 22.05373, "22430": 22.73244, "22435": 29.9374, "22440": 23.23771, "22445": 26.12982, "22450": 22.0714, "22455": 22.04965, "22460": 23.02428, "22465": 22.26129, "22470": 22.26949, "22475": 23.02104, "22480": 22.06185, "22485": 22.05681, "22490": 23.15292, "22495": 22.45871, "22500": 22.16934, "22505": 22.56592, "22510": 22.04116, "22515": 22.05877, "22520": 22.45156, "22525": 22.18365, "22530": 22.03071, "22535": 22.37645, "22540": 22.06848, "22545": 22.15173, "22550": 22.51891, "22555": 22.19234, "22560": 22.02494, "22565": 22.16566, "22570": 22.22915, "22575": 22.07767, "22580": 22.15082, "22585": 22.22704, "22590": 22.06001, "22595": 22.20203, "22600": 22.04289, "22605": 22.08313, "22610": 22.32529, "22615": 22.04353, "22620": 22.07976, "22625": 22.06153, "22630": 22.14602, "22635": 22.23695, "22640": 97.32394, "22645": 22.15297, "22650": 22.25851, "22655": 22.20962, "22660": 22.15517, "22665": 22.09394, "22670": 22.31625, "22675": 22.21339, "22680": 22.13564, "22685": 22.28151, "22690": 22.08694, "22695": 22.05186, "22700": 22.08302, "22705": 22.06486, "22710": 22.24339, "22715": 22.04107, "22720": 22.05055, "22725": 22.05284, "22730": 22.19875, "22735": 22.08528, "22740": 22.04858, "22745": 22.1898, "22750": 22.04259, "22755": 22.08821, "22760": 22.04079, "22765": 22.26902, "22770": 22.09483, "22775": 22.0653, "22780": 22.3063, "22785": 22.04724, "22790": 22.03538, "22795": 22.11389, "22800": 22.17977, "22805": 22.19797, "22810": 22.09501, "22815": 22.05264, "22820": 22.23768, "22825": 22.06425, "22830": 22.19367, "22835": 22.15496, "22840": 22.04645, "22845": 22.01735, "22850": 22.05546, "22855": 22.22108, "22860": 22.52894, "22865": 22.17078, "22870": 22.04657, "22875": 22.66171, "22880": 22.08216, "22885": 22.14434, "22890": 22.91265, "22895": 22.04189, "22900": 22.30463, "22905": 22.8161, "22910": 22.10876, "22915": 22.15244, "22920": 23.07323, "22925": 22.07645, "22930": 22.07515, "22935": 22.45072, "22940": 22.06701, "22945": 22.05001, "22950": 22.81856, "22955": 22.2083, "22960": 22.07677, "22965": 22.49164, "22970": 22.06707, "22975": 22.04991, "22980": 22.50302, "22985": 22.19432, "22990": 22.05407, "22995": 22.17785, "23000": 22.17777, "23005": 22.0591, "23010": 22.42836, "23015": 22.04898, "23020": 22.25012, "23025": 22.02919, "23030": 22.03809, "23035": 22.02566, "23040": 22.04623, "23045": 22.19503, "23050": 22.03965, "23055": 22.13501, "23060": 22.03498, "23065": 22.24937, "23070": 22.12539, "23075": 22.04288, "23080": 22.01837, "23085": 22.0592, "23090": 22.14505, "23095": 22.05825, "23100": 22.33469, "23105": 22.28682, "23110": 22.0202, "23115": 22.06255, "23120": 22.3121, "23125": 22.04525, "23130": 22.05081, "23135": 22.87176, "23140": 22.02192, "23145": 22.02659, "23150": 23.14619, "23155": 22.01422, "23160": 22.0033, "23165": 22.77386, "23170": 22.04744, "23175": 22.02232, "23180": 22.71235, "23185": 22.23808, "23190": 22.33464, "23195": 22.51963, "23200": 22.04383, "23205": 22.09721, "23210": 22.492, "23215": 22.19905, "23220": 22.18149, "23225": 22.28442, "23230": 22.01794, "23235": 22.54617, "23240": 22.25495, "23245": 22.08296, "23250": 23.21325, "23255": 22.05708, "23260": 22.35512, "23265": 23.0728, "23270": 22.07326, "23275": 22.03076, "23280": 23.11003, "23285": 22.13873, "23290": 22.02885, "23295": 22.54902, "23300": 22.07867, "23305": 22.04812, "23310": 22.58596, "23315": 22.44736, "23320": 22.1713, "23325": 22.46432, "23330": 22.06162, "23335": 22.0646, "23340": 22.15957, "23345": 22.11225, "23350": 22.23356, "23355": 22.01955, "23360": 22.14925, "23365": 22.03971, "23370": 22.12007, "23375": 22.01747, "23380": 22.08666, "23385": 22.2317, "23390": 22.02904, "23395": 22.03382, "23400": 22.05095, "23405": 22.38572, "23410": 22.04568, "23415": 22.19506, "23420": 22.05439, "23425": 22.03643, "23430": 22.04934, "23435": 22.06603, "23440": 22.25674, "23445": 22.05488, "23450": 22.21124, "23455": 22.10399, "23460": 22.11652, "23465": 22.17046, "23470": 22.057, "23475": 22.03255, "23480": 22.86351, "23485": 22.16282, "23490": 22.06489, "23495": 23.34007, "23500": 21.99483, "23505": 22.39902, "23510": 22.82023, "23515": 22.03095, "23520": 22.15648, "23525": 22.74636, "23530": 22.00318, "23535": 22.02858, "23540": 22.67967, "23545": 22.06468, "23550": 22.2225, "23555": 22.53272, "23560": 22.0732, "23565": 22.02821, "23570": 22.2791, "23575": 22.02246, "23580": 22.34624, "23585": 22.47664, "23590": 22.10152, "23595": 22.32486, "23600": 22.09512, "23605": 22.11527, "23610": 22.35651, "23615": 22.32661, "23620": 22.08466, "23625": 22.59412, "23630": 22.03709, "23635": 22.22635, "23640": 22.70952, "23645": 22.12059, "23650": 22.16915, "23655": 22.79884, "23660": 22.08297, "23665": 22.25527, "23670": 23.18839, "23675": 22.17748, "23680": 22.06363, "23685": 22.77493, "23690": 22.2037, "23695": 22.08238, "23700": 22.86887, "23705": 22.0841, "23710": 22.05206, "23715": 22.13515, "23720": 22.08766, "23725": 22.24988, "23730": 22.10193, "23735": 22.27528, "23740": 22.03048, "23745": 22.05331, "23750": 22.07547, "23755": 22.03504, "23760": 22.072, "23765": 22.05542, "23770": 22.14354, "23775": 22.08246, "23780": 22.33715, "23785": 22.02071, "23790": 22.06893, "23795": 22.0253, "23800": 22.03884, "23805": 22.05743, "23810": 22.07715, "23815": 22.2371, "23820": 22.0828, "23825": 22.27429, "23830": 22.06306, "23835": 22.06641, "23840": 22.24307, "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.9.0.json b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.9.0.json index 3c34692..5c0fc7c 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.9.0.json +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/golden_values_0.9.0.json @@ -1,21878 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 27308, - "step_interval": 5, - "values": [ - 12.66411, - 12.57512, - 11.54347, - 10.60309, - 10.16435, - 9.88037, - 9.63458, - 9.42019, - 9.20416, - 9.03345, - 8.87633, - 8.68266, - 8.55282, - 8.44289, - 8.32071, - 8.18419, - 8.04222, - 7.93414, - 7.76829, - 7.65767, - 7.58631, - 7.42708, - 7.35614, - 7.20111, - 7.12867, - 7.00843, - 6.93027, - 6.84437, - 6.76406, - 6.68399, - 6.61684, - 6.54664, - 6.47692, - 6.37613, - 6.34276, - 6.27588, - 6.20124, - 6.12117, - 6.09124, - 5.98671, - 5.95872, - 5.87765, - 5.82396, - 5.78384, - 5.72361, - 5.66607, - 5.65114, - 5.61262, - 5.52993, - 5.54276, - 5.42221, - 5.41338, - 5.33586, - 5.3198, - 5.31586, - 5.18782, - 5.14439, - 5.14995, - 5.12504, - 5.09826, - 5.06345, - 5.0078, - 4.98392, - 4.94395, - 4.90681, - 4.90251, - 4.87224, - 4.82824, - 4.80728, - 4.77264, - 4.74214, - 4.73947, - 4.67142, - 4.65377, - 4.63964, - 4.56415, - 4.57758, - 4.54651, - 4.49286, - 4.4527, - 4.44914, - 4.38955, - 4.38042, - 4.3699, - 4.32201, - 4.32255, - 4.26145, - 4.22908, - 4.2008, - 4.16944, - 4.14805, - 4.11125, - 4.08557, - 4.03095, - 4.03893, - 4.04441, - 3.98295, - 4.00241, - 3.96752, - 3.88737, - 3.91287, - 3.91207, - 3.83451, - 3.82414, - 3.81407, - 3.79929, - 3.77533, - 3.77, - 3.74376, - 3.72147, - 3.71352, - 3.6834, - 3.65812, - 3.66585, - 3.65781, - 3.63993, - 3.62103, - 3.6417, - 3.58509, - 3.55831, - 3.6012, - 3.53974, - 3.55814, - 3.55746, - 3.51119, - 3.50954, - 3.5255, - 3.53233, - 3.52729, - 3.51299, - 3.51783, - 3.4733, - 3.50497, - 3.47873, - 3.45585, - 3.49018, - 3.44842, - 3.41404, - 3.41565, - 3.38658, - 3.37656, - 3.36638, - 3.37443, - 3.36633, - 3.34174, - 3.33734, - 3.31549, - 3.30359, - 3.32553, - 3.28474, - 3.31545, - 3.28885, - 3.30293, - 3.30619, - 3.31654, - 3.33438, - 3.32533, - 3.30398, - 3.28048, - 3.2985, - 3.31593, - 3.2582, - 3.29186, - 3.27218, - 3.28093, - 3.23044, - 3.21895, - 3.23147, - 3.19311, - 3.17656, - 3.15227, - 3.15724, - 3.19058, - 3.15595, - 3.15154, - 3.19151, - 3.16355, - 3.19715, - 3.21367, - 3.18492, - 3.18232, - 3.1802, - 3.12057, - 3.13289, - 3.12574, - 3.11834, - 3.09283, - 3.10192, - 3.12903, - 3.14907, - 3.11761, - 3.12161, - 3.14585, - 3.10963, - 3.07548, - 3.07332, - 3.0613, - 3.06168, - 3.08481, - 3.03568, - 3.03012, - 3.05793, - 3.00981, - 3.02738, - 3.0574, - 3.04075, - 3.04196, - 3.05152, - 3.01682, - 3.03018, - 3.02359, - 3.03656, - 3.06873, - 3.13228, - 3.69746, - 3.34098, - 3.2697, - 3.2011, - 3.23706, - 3.22535, - 3.20222, - 3.22282, - 3.24482, - 3.2826, - 3.23777, - 3.19313, - 3.10125, - 3.08371, - 3.01564, - 3.01027, - 2.99933, - 2.99072, - 2.99681, - 2.9711, - 3.0003, - 2.97339, - 2.97206, - 2.95987, - 2.96103, - 3.81862, - 3.027, - 3.08442, - 3.02201, - 2.97428, - 2.9512, - 2.94254, - 2.94452, - 2.95629, - 2.95066, - 2.96785, - 2.94775, - 2.94434, - 2.94975, - 2.92395, - 2.91463, - 2.94346, - 2.91442, - 2.96389, - 2.93466, - 2.92769, - 2.92092, - 2.9296, - 2.93897, - 2.90964, - 2.90179, - 2.89109, - 2.88789, - 2.90236, - 2.87818, - 2.89445, - 2.88733, - 2.86963, - 2.88201, - 2.88201, - 2.91574, - 2.85808, - 2.87506, - 2.90114, - 2.85602, - 2.86231, - 2.90121, - 2.92758, - 2.92889, - 2.97651, - 2.94846, - 2.95235, - 2.91583, - 2.90138, - 2.8962, - 2.82255, - 2.87337, - 2.82863, - 2.84668, - 2.88019, - 2.87063, - 2.82263, - 2.84282, - 2.82272, - 2.82577, - 2.83317, - 2.86631, - 2.8377, - 2.80912, - 2.85542, - 2.79838, - 2.80437, - 2.81773, - 2.84532, - 2.79921, - 2.80908, - 2.79932, - 2.805, - 2.79934, - 2.7967, - 2.7993, - 2.81225, - 2.79087, - 2.80686, - 2.7917, - 2.7713, - 2.79413, - 2.7818, - 2.79096, - 2.79608, - 2.81718, - 2.76239, - 2.76664, - 2.78456, - 2.80506, - 2.7998, - 2.80214, - 2.86702, - 2.80958, - 2.85462, - 2.87831, - 2.85835, - 2.86664, - 2.98447, - 3.01179, - 2.86197, - 2.82217, - 2.80549, - 2.77205, - 2.75611, - 2.7306, - 3.02386, - 2.76038, - 2.77132, - 2.76668, - 2.76814, - 2.73318, - 2.74889, - 2.75312, - 2.74421, - 2.75876, - 2.72944, - 2.75698, - 2.70658, - 2.73879, - 2.7168, - 2.75181, - 2.72915, - 2.73445, - 2.76606, - 2.71916, - 2.73669, - 2.72278, - 2.76389, - 2.76707, - 2.72831, - 2.75726, - 2.7201, - 2.73956, - 2.71, - 2.72431, - 2.7079, - 2.72553, - 2.68492, - 2.70358, - 2.72405, - 2.70679, - 2.70858, - 2.73712, - 2.70487, - 2.72022, - 2.70781, - 2.71437, - 2.73678, - 2.76825, - 2.73086, - 2.73186, - 2.70006, - 2.7383, - 2.68168, - 2.71223, - 2.70812, - 2.71417, - 2.73951, - 2.73634, - 2.71619, - 2.6698, - 2.72761, - 2.67432, - 2.69199, - 2.69912, - 2.69334, - 2.70113, - 2.73844, - 2.70143, - 2.68763, - 2.69931, - 2.69486, - 2.67607, - 2.68582, - 2.63971, - 2.67889, - 2.6846, - 2.68313, - 2.64794, - 2.68019, - 2.68884, - 2.70938, - 2.68497, - 2.70578, - 2.69081, - 2.67461, - 2.7047, - 2.6548, - 2.65724, - 2.65819, - 2.64778, - 2.64452, - 2.67403, - 2.6698, - 2.72684, - 2.67124, - 2.68642, - 2.68748, - 2.68093, - 2.69559, - 2.73456, - 2.6983, - 2.68567, - 2.6938, - 2.69101, - 2.67246, - 2.68474, - 2.63712, - 2.6841, - 2.68197, - 2.68107, - 2.64263, - 2.68132, - 2.68796, - 2.68261, - 2.67503, - 2.67891, - 2.69154, - 2.66332, - 2.70234, - 2.6525, - 2.65316, - 2.65565, - 2.64145, - 2.64406, - 2.67459, - 2.67396, - 2.65601, - 2.64538, - 2.64518, - 2.64029, - 2.62506, - 2.64812, - 2.68023, - 2.65857, - 2.65188, - 2.65118, - 2.67127, - 2.6762, - 2.65533, - 2.63195, - 2.6706, - 2.67011, - 2.63114, - 2.64083, - 2.63528, - 2.64123, - 2.61442, - 2.61288, - 2.65875, - 2.62135, - 2.66254, - 2.62008, - 2.66671, - 2.66685, - 2.66895, - 2.72481, - 2.65198, - 2.63081, - 2.62924, - 2.61116, - 2.60944, - 2.64439, - 2.64299, - 2.63168, - 2.614, - 2.61138, - 2.63383, - 2.61753, - 2.62809, - 2.61149, - 2.60833, - 2.61664, - 2.60659, - 2.62218, - 2.60881, - 2.61107, - 2.61836, - 2.58814, - 2.58691, - 2.60137, - 2.59519, - 2.61287, - 2.59388, - 2.62939, - 2.57181, - 2.58867, - 2.59744, - 2.5881, - 2.60213, - 2.60711, - 2.626, - 2.57491, - 2.61578, - 2.61135, - 2.57712, - 2.59037, - 2.58269, - 2.60228, - 2.61117, - 2.57721, - 2.58988, - 2.6088, - 2.59343, - 2.5886, - 2.59325, - 2.57698, - 2.58705, - 2.60276, - 2.78045, - 2.78575, - 2.71235, - 2.74961, - 2.67202, - 2.62672, - 2.62165, - 2.612, - 2.59372, - 2.57245, - 2.5668, - 2.56261, - 2.59085, - 2.56532, - 2.5658, - 2.56428, - 2.5478, - 2.53411, - 2.5662, - 2.58326, - 2.56237, - 2.54502, - 2.56639, - 2.5723, - 2.65984, - 2.60739, - 2.61156, - 2.60302, - 2.61116, - 2.57458, - 2.55265, - 2.55707, - 2.78539, - 2.71638, - 2.7649, - 2.69004, - 2.6322, - 2.62564, - 2.61967, - 2.59594, - 2.57381, - 2.56544, - 2.56151, - 2.5912, - 2.56681, - 2.56909, - 2.59729, - 2.94733, - 2.75884, - 2.68768, - 2.65241, - 2.59956, - 2.5661, - 2.57886, - 2.58442, - 2.58039, - 2.56677, - 2.57118, - 2.56942, - 2.59178, - 2.56563, - 2.55076, - 2.56077, - 2.56136, - 2.57081, - 2.57043, - 2.57068, - 2.55957, - 2.56693, - 2.5647, - 2.5598, - 2.5351, - 2.56527, - 2.59743, - 2.57771, - 2.67896, - 2.58597, - 2.58197, - 2.56086, - 2.57367, - 2.54699, - 2.56719, - 2.56208, - 2.52928, - 2.57391, - 2.54608, - 2.55876, - 2.58457, - 2.56585, - 2.56691, - 2.5395, - 2.53599, - 2.54027, - 2.54413, - 2.52798, - 2.55987, - 2.55681, - 2.52661, - 2.55512, - 2.53563, - 2.52261, - 2.55698, - 2.56615, - 2.53246, - 2.55192, - 2.5543, - 2.55431, - 2.51778, - 2.53535, - 2.55671, - 2.54136, - 2.51511, - 2.52728, - 2.53625, - 2.54599, - 2.58454, - 2.56324, - 2.58224, - 2.53765, - 2.57012, - 2.53108, - 2.56653, - 2.53744, - 2.51537, - 2.5962, - 4.82565, - 3.12657, - 2.76828, - 2.70589, - 2.67721, - 2.57146, - 2.57396, - 2.56132, - 2.54688, - 2.53223, - 2.55593, - 2.56043, - 2.53207, - 2.5261, - 2.52873, - 2.53849, - 2.53505, - 2.52328, - 2.5018, - 2.52388, - 2.52509, - 2.53215, - 2.5431, - 2.50073, - 2.76597, - 2.63563, - 2.58268, - 2.56536, - 2.53671, - 2.53596, - 2.4962, - 2.51957, - 2.52972, - 2.50681, - 2.50437, - 2.51215, - 2.48754, - 2.49129, - 2.48452, - 2.51387, - 2.5192, - 2.48679, - 2.51679, - 2.51778, - 2.50136, - 2.51352, - 2.5061, - 2.48554, - 2.50426, - 2.50521, - 2.53404, - 2.5519, - 2.53764, - 2.56074, - 2.5365, - 2.5334, - 2.54575, - 2.48862, - 2.51039, - 2.51649, - 2.49997, - 2.49433, - 2.48134, - 2.51264, - 2.50471, - 2.50695, - 2.48079, - 2.48813, - 2.48351, - 2.46973, - 2.48284, - 2.50415, - 2.47805, - 2.51741, - 2.48992, - 2.50547, - 2.48293, - 2.48447, - 2.49026, - 2.46599, - 2.48778, - 2.49269, - 2.48381, - 2.48727, - 2.50358, - 2.48089, - 2.49332, - 2.51056, - 2.50232, - 2.49096, - 2.48902, - 2.47096, - 2.47017, - 2.46071, - 2.50019, - 2.46935, - 2.50016, - 2.49045, - 2.49533, - 2.47747, - 2.47233, - 2.45548, - 2.47473, - 2.4702, - 2.46163, - 2.46659, - 2.49281, - 2.46124, - 2.49415, - 2.48226, - 2.43948, - 2.46836, - 2.44224, - 2.45511, - 2.42348, - 2.75451, - 2.50208, - 2.45048, - 2.47487, - 2.45522, - 2.45882, - 2.46588, - 2.49273, - 2.45878, - 2.46673, - 2.43995, - 2.83249, - 2.80646, - 2.60667, - 2.52176, - 2.4823, - 2.48339, - 2.46671, - 2.49174, - 2.49155, - 2.49121, - 2.46149, - 2.49995, - 2.4981, - 2.47713, - 2.50676, - 2.49282, - 2.47929, - 2.47077, - 2.48221, - 2.46996, - 2.46778, - 2.46731, - 2.43917, - 2.47942, - 2.47357, - 2.48187, - 2.45511, - 2.49732, - 2.4967, - 2.47343, - 2.46274, - 2.46076, - 2.47058, - 2.46557, - 2.45525, - 2.48398, - 2.45081, - 2.47409, - 2.68078, - 2.56122, - 2.60827, - 2.5425, - 2.50496, - 2.4883, - 2.48589, - 2.47404, - 2.48121, - 2.47507, - 2.45793, - 2.45941, - 2.45624, - 2.46092, - 2.45602, - 2.46255, - 2.45272, - 2.45936, - 2.4459, - 2.42484, - 2.45679, - 2.44605, - 2.46919, - 2.46531, - 2.4194, - 2.48545, - 2.4578, - 2.44743, - 2.45089, - 2.45547, - 2.44483, - 2.46114, - 2.4749, - 2.4645, - 2.46158, - 2.46674, - 2.4581, - 2.4435, - 2.45596, - 2.49623, - 2.46442, - 2.47126, - 2.45498, - 2.44775, - 2.44513, - 2.47022, - 2.43861, - 2.43864, - 2.43908, - 2.44399, - 2.41899, - 2.45898, - 2.44765, - 2.38065, - 2.43301, - 2.41682, - 2.44297, - 2.45459, - 2.45838, - 2.42785, - 2.43634, - 2.46543, - 2.44646, - 2.42453, - 2.41897, - 2.44462, - 2.44677, - 2.42722, - 2.45637, - 2.40108, - 2.42734, - 2.44864, - 2.4148, - 2.4428, - 2.42374, - 2.42748, - 2.42454, - 2.43675, - 2.39771, - 2.41691, - 2.42674, - 2.41677, - 2.40544, - 2.41117, - 2.43502, - 2.42062, - 2.43591, - 2.45371, - 2.42327, - 2.41664, - 2.4086, - 2.44727, - 2.4208, - 2.43135, - 2.41342, - 2.42134, - 2.38586, - 2.41833, - 2.39067, - 2.39839, - 2.40338, - 2.37409, - 2.39872, - 2.40511, - 2.40637, - 2.40249, - 2.4125, - 2.38705, - 2.40897, - 2.42774, - 2.40223, - 2.40561, - 2.42666, - 2.41957, - 2.4042, - 2.42502, - 2.38898, - 2.41357, - 2.40634, - 2.41681, - 2.39775, - 2.40796, - 2.4032, - 2.37535, - 2.41899, - 2.38559, - 2.3912, - 2.39589, - 2.38517, - 2.40207, - 2.38928, - 2.4074, - 2.38044, - 2.3739, - 2.44088, - 2.43452, - 2.42374, - 2.42461, - 2.40463, - 2.41599, - 2.38614, - 2.39198, - 2.38546, - 2.39558, - 2.37887, - 2.40355, - 2.37008, - 2.36908, - 2.38129, - 2.38291, - 2.3617, - 2.38131, - 2.34726, - 2.40769, - 2.47172, - 2.39215, - 2.39478, - 2.37947, - 2.38038, - 2.37322, - 2.37966, - 2.38359, - 2.37862, - 2.3733, - 2.35494, - 2.38871, - 2.37306, - 2.36491, - 2.35944, - 2.3974, - 2.37231, - 2.38846, - 2.39679, - 2.39883, - 2.40719, - 2.38082, - 2.37977, - 2.35828, - 2.36703, - 2.35675, - 2.3746, - 2.36973, - 2.38381, - 2.37212, - 2.38227, - 2.36506, - 2.37879, - 2.38272, - 2.38627, - 2.38176, - 2.34656, - 2.3249, - 2.36355, - 2.3385, - 2.36851, - 2.35391, - 2.37452, - 2.36621, - 2.37412, - 2.367, - 2.36341, - 2.36374, - 2.36245, - 2.34795, - 2.37278, - 2.35673, - 2.36032, - 2.34857, - 2.34147, - 2.3469, - 2.34856, - 2.37439, - 2.34246, - 2.38103, - 2.34807, - 2.3474, - 2.36175, - 2.35238, - 2.35391, - 2.37458, - 2.3662, - 2.33669, - 2.36054, - 2.33713, - 2.35158, - 2.35924, - 2.37368, - 2.32304, - 2.36873, - 2.34849, - 2.3527, - 2.34423, - 2.3653, - 2.36238, - 2.34018, - 2.35903, - 2.36851, - 2.36456, - 2.36398, - 2.35311, - 2.36877, - 2.36581, - 2.3668, - 2.3457, - 2.34705, - 2.33717, - 2.36028, - 2.35904, - 2.32872, - 2.35047, - 2.33366, - 2.34168, - 2.35846, - 2.34037, - 2.34776, - 2.35682, - 2.34883, - 2.36469, - 2.35768, - 2.3761, - 2.35571, - 2.34615, - 2.37258, - 2.35749, - 2.34662, - 2.36566, - 2.35248, - 2.35009, - 2.37637, - 2.35171, - 2.36242, - 2.3416, - 2.35399, - 2.35245, - 2.32678, - 2.36516, - 2.34922, - 2.35739, - 2.34631, - 2.34099, - 2.34122, - 2.33591, - 2.33375, - 2.3502, - 2.35637, - 2.35875, - 2.34344, - 2.35683, - 2.33736, - 2.34862, - 2.33042, - 2.35488, - 2.33463, - 2.34, - 2.32903, - 2.33785, - 2.32755, - 2.34972, - 2.32716, - 2.33863, - 2.33016, - 2.3454, - 2.36866, - 2.34091, - 2.3453, - 2.35851, - 2.33064, - 2.33069, - 2.3473, - 2.3267, - 2.30219, - 2.32526, - 2.33784, - 2.34165, - 2.30773, - 2.35806, - 2.32552, - 2.31563, - 2.34779, - 2.32626, - 2.3413, - 2.33368, - 2.32137, - 2.32749, - 2.35523, - 2.32796, - 2.33235, - 2.35171, - 2.30917, - 2.33306, - 2.35034, - 2.34312, - 2.31802, - 2.33234, - 2.34206, - 2.35341, - 2.34036, - 2.31576, - 2.31165, - 2.33731, - 2.29825, - 2.34914, - 2.32176, - 2.32853, - 2.33133, - 2.32918, - 2.3162, - 2.32797, - 2.33239, - 2.35176, - 2.30929, - 2.33318, - 2.35059, - 2.34281, - 2.31815, - 2.33244, - 2.34054, - 2.35382, - 2.34099, - 2.45863, - 2.32853, - 2.34513, - 2.30006, - 2.33872, - 2.30425, - 2.32087, - 2.32606, - 2.32697, - 2.31494, - 2.31995, - 2.31405, - 2.34618, - 2.30509, - 2.31754, - 2.29277, - 2.30321, - 2.33671, - 2.30639, - 2.32532, - 2.32695, - 2.33429, - 2.33889, - 2.3276, - 2.30499, - 2.3092, - 2.32644, - 2.30815, - 2.27373, - 2.3164, - 2.31897, - 2.27502, - 2.32455, - 2.31004, - 2.29922, - 2.30738, - 2.31113, - 2.30872, - 2.28772, - 2.31526, - 2.31436, - 2.30915, - 2.31281, - 2.29928, - 2.32958, - 2.30162, - 2.29196, - 2.29498, - 2.31804, - 2.34092, - 2.29856, - 2.32396, - 2.29105, - 2.31536, - 2.31527, - 2.2933, - 2.31634, - 2.30357, - 2.28604, - 2.30816, - 2.31288, - 2.27816, - 2.32034, - 2.3218, - 2.31551, - 2.30983, - 2.30641, - 2.31583, - 2.28101, - 2.31661, - 2.31236, - 2.28956, - 2.29766, - 2.31127, - 2.32213, - 2.31153, - 2.28038, - 2.29481, - 2.28165, - 2.29778, - 2.31807, - 2.28079, - 2.3001, - 2.28161, - 2.30097, - 2.31626, - 2.31123, - 2.29114, - 2.27838, - 2.30138, - 2.26487, - 2.27687, - 2.28385, - 2.27387, - 2.30489, - 2.32051, - 2.30122, - 2.31244, - 2.29363, - 2.30703, - 2.27247, - 2.28263, - 2.28871, - 2.29798, - 2.31719, - 2.29299, - 2.30643, - 2.30114, - 2.2748, - 2.26932, - 2.27572, - 2.28465, - 2.27429, - 2.31593, - 2.30536, - 2.2893, - 2.30021, - 2.30559, - 2.28467, - 2.28533, - 2.28006, - 2.28362, - 2.24851, - 3.13736, - 2.34349, - 2.31706, - 2.3095, - 2.27356, - 2.30032, - 2.27103, - 2.26529, - 2.27284, - 2.27818, - 2.27641, - 2.28615, - 2.28124, - 2.28659, - 2.28398, - 2.25834, - 2.29008, - 2.29331, - 2.25314, - 2.26942, - 2.27118, - 2.26287, - 2.28015, - 2.28573, - 2.25666, - 2.2745, - 2.24479, - 2.29538, - 2.24132, - 2.29013, - 2.29946, - 2.26017, - 2.28032, - 2.25631, - 2.3803, - 2.28427, - 2.25475, - 2.27285, - 2.26157, - 2.26781, - 2.29452, - 2.28554, - 2.22876, - 2.23936, - 2.30079, - 2.2425, - 2.25008, - 2.27445, - 2.253, - 2.26435, - 2.26172, - 2.25706, - 2.28226, - 2.25494, - 2.25982, - 2.28013, - 2.29914, - 2.27967, - 2.27591, - 2.25077, - 2.26793, - 2.27734, - 2.26694, - 2.28532, - 2.26479, - 2.26003, - 2.2675, - 2.27342, - 2.26254, - 2.2557, - 2.25426, - 2.25718, - 2.24937, - 2.26807, - 2.28277, - 2.25364, - 2.24416, - 2.26937, - 2.24983, - 2.26268, - 2.2849, - 2.27594, - 2.25881, - 2.24596, - 2.2671, - 2.26164, - 2.24522, - 2.25231, - 2.25117, - 2.27033, - 2.27379, - 2.26479, - 2.253, - 2.2397, - 2.25166, - 2.24795, - 2.25577, - 2.27708, - 2.24945, - 2.25107, - 2.26486, - 2.26349, - 2.24775, - 2.25349, - 2.23204, - 2.27066, - 2.24562, - 2.27559, - 2.26674, - 2.23482, - 2.26067, - 2.2391, - 2.26454, - 2.25461, - 2.25512, - 2.26109, - 2.23266, - 2.27577, - 2.23838, - 2.25419, - 2.24642, - 2.26419, - 2.26339, - 2.27517, - 2.21192, - 2.25676, - 2.23074, - 2.25479, - 2.25587, - 2.26956, - 2.24416, - 2.2394, - 2.27883, - 2.27656, - 2.26203, - 2.25128, - 2.21602, - 2.25807, - 2.26626, - 2.27417, - 2.25492, - 2.23648, - 2.24943, - 2.25078, - 2.25182, - 2.26201, - 2.25115, - 2.26358, - 2.24804, - 2.25437, - 2.26313, - 2.22383, - 2.26468, - 2.25201, - 2.22707, - 2.2597, - 2.24138, - 2.25423, - 2.2621, - 2.24576, - 2.25048, - 2.24546, - 2.26679, - 2.2574, - 2.25016, - 2.26902, - 2.23078, - 2.23128, - 2.23901, - 2.23162, - 2.21177, - 2.24905, - 2.24624, - 2.24036, - 2.23302, - 2.24519, - 2.24625, - 2.30239, - 2.24714, - 2.25193, - 2.26974, - 2.2357, - 2.26385, - 2.26139, - 2.25835, - 2.2364, - 2.22322, - 2.25002, - 2.24943, - 2.23566, - 2.23905, - 2.23952, - 2.21951, - 2.24697, - 2.23577, - 2.23046, - 2.24607, - 2.25833, - 2.2677, - 2.23739, - 2.22333, - 2.23828, - 2.26917, - 2.2308, - 2.22023, - 2.26161, - 2.24056, - 2.22889, - 2.23077, - 2.2399, - 2.2547, - 2.23963, - 2.22847, - 2.22303, - 2.25143, - 2.24214, - 2.22738, - 2.2492, - 2.25634, - 2.23278, - 2.23352, - 2.22727, - 2.23876, - 2.22395, - 2.23621, - 2.22148, - 2.23977, - 2.23883, - 2.23685, - 2.24441, - 2.23751, - 2.2107, - 2.2459, - 2.24785, - 2.24492, - 2.22868, - 2.22927, - 2.20284, - 2.2295, - 2.23444, - 2.23173, - 2.20784, - 2.22443, - 2.25378, - 2.23748, - 2.22177, - 2.2047, - 2.21618, - 2.23123, - 2.24187, - 2.24805, - 2.23277, - 2.25623, - 2.21824, - 2.21982, - 2.22696, - 2.19515, - 2.25431, - 2.22253, - 2.22053, - 2.24161, - 2.21587, - 2.22632, - 2.24762, - 2.22113, - 2.24292, - 2.21537, - 2.23194, - 2.24111, - 2.21203, - 2.21692, - 2.20881, - 2.21976, - 2.19951, - 2.25468, - 2.20831, - 2.20419, - 2.23648, - 2.20517, - 2.22458, - 2.23751, - 2.19601, - 2.22394, - 2.21334, - 2.22503, - 2.19357, - 2.19617, - 2.2109, - 2.21355, - 2.23827, - 2.22569, - 2.2143, - 2.19897, - 2.19982, - 2.2469, - 2.20684, - 2.21741, - 2.20364, - 2.21216, - 2.21416, - 2.21838, - 2.21879, - 2.21076, - 2.19334, - 2.20261, - 2.19426, - 2.20914, - 2.22493, - 2.22029, - 2.21708, - 2.23053, - 2.22254, - 2.22852, - 2.2025, - 2.2155, - 2.19965, - 2.22, - 2.17151, - 2.19466, - 2.21291, - 2.23672, - 2.20658, - 2.1878, - 2.21051, - 2.19248, - 2.19171, - 2.23969, - 2.18496, - 2.22672, - 2.21179, - 2.21392, - 2.20582, - 2.20557, - 2.18895, - 2.21331, - 2.18822, - 2.21586, - 2.17662, - 2.23091, - 2.22355, - 2.23878, - 2.19607, - 2.177, - 2.21798, - 2.18291, - 2.2016, - 2.19151, - 2.19461, - 2.19927, - 2.192, - 2.20628, - 2.20727, - 2.22149, - 2.23594, - 2.19696, - 2.20535, - 2.20999, - 2.19752, - 2.2445, - 2.24472, - 2.21003, - 2.21792, - 2.18449, - 2.21178, - 2.23166, - 2.20748, - 2.19934, - 2.20233, - 2.19846, - 2.20003, - 2.23812, - 2.21293, - 2.21961, - 2.20527, - 2.23464, - 2.22353, - 2.24253, - 2.20205, - 2.20585, - 2.20726, - 2.20917, - 2.23005, - 2.23013, - 2.23127, - 2.22704, - 2.18664, - 2.20769, - 2.21269, - 2.20319, - 2.20367, - 2.2201, - 2.22511, - 2.2097, - 2.18994, - 2.19614, - 2.18474, - 2.17118, - 2.21018, - 2.19686, - 2.22627, - 2.21873, - 2.20468, - 2.2358, - 2.22683, - 2.20412, - 2.20633, - 2.20238, - 2.21522, - 2.19515, - 2.2028, - 2.19795, - 2.18096, - 2.20727, - 2.1997, - 2.21317, - 2.22488, - 2.26399, - 2.18111, - 2.21143, - 2.20699, - 2.20514, - 2.19352, - 2.20582, - 2.22068, - 2.19581, - 2.18276, - 2.19513, - 2.20962, - 2.22388, - 2.19544, - 2.19637, - 2.18981, - 2.19623, - 2.21615, - 2.21421, - 2.22024, - 2.19223, - 2.21191, - 2.21632, - 2.18854, - 2.17312, - 2.18947, - 2.22201, - 2.22048, - 2.19933, - 2.19456, - 2.17664, - 2.18431, - 2.19267, - 2.21804, - 2.20361, - 2.18337, - 2.19178, - 2.18778, - 2.17158, - 2.19257, - 2.18221, - 2.19847, - 2.18699, - 2.18876, - 2.16976, - 2.20922, - 2.19614, - 2.18728, - 2.20266, - 2.19289, - 2.17091, - 2.19684, - 2.21724, - 2.16567, - 2.19022, - 2.19836, - 2.18485, - 2.19693, - 2.18865, - 2.20503, - 2.17384, - 2.1712, - 2.18654, - 2.21132, - 2.18745, - 2.20208, - 2.18395, - 2.1848, - 2.20709, - 2.19518, - 2.19361, - 2.17612, - 2.16723, - 2.20663, - 2.2079, - 2.1932, - 2.18473, - 2.17167, - 2.19394, - 2.19302, - 2.17634, - 2.20809, - 2.1691, - 2.16108, - 2.1884, - 2.21153, - 2.20744, - 2.19177, - 2.18037, - 2.19112, - 2.19616, - 2.19094, - 2.19146, - 2.17807, - 2.1947, - 2.1586, - 2.17623, - 2.19792, - 2.19234, - 2.19163, - 2.18969, - 2.21447, - 2.20134, - 2.20198, - 2.19537, - 2.20342, - 2.18, - 2.16158, - 2.18495, - 2.17806, - 2.17374, - 2.18037, - 2.21216, - 2.18542, - 2.19031, - 2.21129, - 2.20942, - 2.17665, - 2.18671, - 2.18516, - 2.16291, - 2.17659, - 2.16202, - 2.18568, - 2.20677, - 2.19447, - 2.20705, - 2.17714, - 2.18493, - 2.16299, - 2.17545, - 2.19509, - 2.17116, - 2.19052, - 2.20077, - 2.16712, - 2.1948, - 2.18042, - 2.18408, - 2.18575, - 2.1789, - 2.18597, - 2.18217, - 2.19605, - 2.19769, - 2.19696, - 2.18047, - 2.19096, - 2.17095, - 2.18572, - 2.15836, - 2.19251, - 2.18092, - 2.19628, - 2.19637, - 2.18255, - 2.18958, - 2.18544, - 2.16992, - 2.19092, - 2.19757, - 2.19692, - 2.18018, - 2.17467, - 2.18018, - 2.18806, - 2.17013, - 2.17568, - 2.17635, - 2.18172, - 2.20073, - 2.18673, - 2.15887, - 2.19047, - 2.14857, - 2.18644, - 2.17722, - 2.18688, - 2.15443, - 2.15883, - 2.15911, - 2.17995, - 2.17298, - 2.17851, - 2.17268, - 2.16566, - 2.15298, - 2.15932, - 2.17773, - 2.19447, - 2.17726, - 2.13966, - 2.17382, - 2.18571, - 2.15872, - 2.17109, - 2.19878, - 2.1465, - 2.18311, - 2.15326, - 2.12654, - 2.16625, - 2.1843, - 2.20163, - 2.15418, - 2.13907, - 2.17831, - 2.16712, - 2.13713, - 2.16055, - 2.19328, - 2.16491, - 2.16781, - 2.17474, - 2.16969, - 2.16316, - 2.16878, - 2.1769, - 2.17746, - 2.16496, - 2.15373, - 2.16553, - 2.1735, - 2.15272, - 2.16627, - 2.17682, - 2.16885, - 2.1828, - 2.15382, - 2.15212, - 2.15102, - 2.14325, - 2.17305, - 2.1356, - 2.16714, - 2.15555, - 2.16119, - 2.1712, - 2.17886, - 2.16028, - 2.15121, - 2.17744, - 2.15147, - 2.13448, - 2.14071, - 2.17768, - 2.17594, - 2.13869, - 2.15645, - 2.16531, - 2.15147, - 2.16482, - 2.1595, - 2.15062, - 2.17233, - 2.15514, - 2.18615, - 2.20268, - 2.16471, - 2.14453, - 2.15228, - 2.14675, - 2.17867, - 2.15447, - 2.15482, - 2.18024, - 2.17748, - 2.18148, - 2.15387, - 2.17497, - 2.14583, - 2.13506, - 2.15334, - 2.1616, - 2.16861, - 2.16018, - 2.12502, - 2.15452, - 2.14351, - 2.15588, - 2.12787, - 2.16337, - 2.18621, - 2.14146, - 2.15627, - 2.188, - 2.16418, - 2.15986, - 2.15054, - 2.16858, - 2.17756, - 2.16659, - 2.17392, - 2.16967, - 2.17342, - 2.13234, - 2.17792, - 2.15698, - 2.18763, - 2.14509, - 2.13952, - 2.13901, - 2.19797, - 2.15779, - 2.16589, - 2.14065, - 2.13341, - 2.14516, - 2.19117, - 2.15529, - 2.17257, - 2.14044, - 2.15565, - 2.1437, - 2.15304, - 2.14632, - 2.16167, - 2.13667, - 2.14948, - 2.14201, - 2.16874, - 2.16466, - 2.16376, - 2.14861, - 2.174, - 2.16175, - 2.17386, - 2.15577, - 2.17167, - 2.13649, - 2.15809, - 2.15294, - 2.13937, - 2.15582, - 2.17657, - 2.17229, - 2.16359, - 2.17443, - 2.13591, - 2.14767, - 2.15529, - 2.13658, - 2.15147, - 2.13708, - 2.13482, - 2.13859, - 2.14746, - 2.16933, - 2.16783, - 2.13929, - 2.15073, - 2.12074, - 2.15631, - 2.15275, - 2.1551, - 2.15404, - 2.15029, - 2.13513, - 2.13395, - 2.17789, - 2.13861, - 2.14697, - 2.15728, - 2.1493, - 2.12088, - 2.14168, - 2.13093, - 2.16586, - 2.13017, - 2.12433, - 2.1473, - 2.17478, - 2.15107, - 2.14611, - 2.15852, - 2.17619, - 2.14707, - 2.1406, - 2.15638, - 2.15066, - 2.13429, - 2.13279, - 2.13147, - 2.16257, - 2.14616, - 2.14945, - 2.14813, - 2.14687, - 2.1412, - 2.12824, - 2.16432, - 2.15185, - 2.16026, - 2.15946, - 2.14282, - 2.15976, - 2.13651, - 2.14104, - 2.11914, - 2.14231, - 2.13941, - 2.12993, - 2.13585, - 2.14842, - 2.14437, - 2.12906, - 2.15912, - 2.14138, - 2.13916, - 2.1582, - 2.14697, - 2.10675, - 2.14707, - 2.14242, - 2.13025, - 2.1427, - 2.15357, - 2.15331, - 2.1475, - 2.12719, - 2.13866, - 2.12869, - 2.14753, - 2.11454, - 2.14203, - 2.14822, - 2.12628, - 2.14162, - 2.12982, - 2.14264, - 2.17107, - 2.15791, - 2.14374, - 2.13347, - 2.15014, - 2.13416, - 2.13864, - 2.12559, - 2.15583, - 2.13963, - 2.16299, - 2.12861, - 2.16321, - 2.14987, - 2.16199, - 2.13154, - 2.13184, - 2.13165, - 2.13287, - 2.14828, - 2.11313, - 2.11529, - 2.13551, - 2.11214, - 2.14401, - 2.12739, - 2.13151, - 2.1635, - 2.12853, - 2.13294, - 2.13775, - 2.14994, - 2.12092, - 2.1097, - 2.14613, - 2.11616, - 2.11584, - 2.10137, - 2.12805, - 2.1552, - 2.13622, - 2.11434, - 2.14826, - 2.13524, - 2.12116, - 2.156, - 2.14046, - 2.1169, - 2.18787, - 2.14709, - 2.13584, - 2.14864, - 2.13175, - 2.1632, - 2.11351, - 2.13574, - 2.1281, - 2.14272, - 2.1185, - 2.10652, - 2.13242, - 2.13186, - 2.12978, - 2.12412, - 2.13101, - 2.13118, - 2.14791, - 2.12874, - 2.15053, - 2.14159, - 2.13073, - 2.17532, - 2.16262, - 2.12112, - 2.15458, - 2.13775, - 2.11572, - 2.12178, - 2.13028, - 2.11059, - 2.13558, - 2.13028, - 2.13174, - 2.13716, - 2.15449, - 2.14044, - 2.13057, - 2.10441, - 2.12053, - 2.1156, - 2.11077, - 2.11363, - 2.13476, - 2.12949, - 2.13338, - 2.15169, - 2.14129, - 2.11756, - 2.12196, - 2.1343, - 2.13309, - 2.13331, - 2.13618, - 2.12234, - 2.12865, - 2.14467, - 2.11589, - 2.08846, - 2.12745, - 2.12271, - 2.12066, - 2.11856, - 2.13521, - 2.1229, - 2.13846, - 2.11947, - 2.10113, - 2.12818, - 2.14578, - 2.12999, - 2.09591, - 2.15252, - 2.14103, - 2.10953, - 2.10453, - 2.12981, - 2.10568, - 2.14137, - 2.1167, - 2.12884, - 2.09856, - 2.12673, - 2.1428, - 2.11999, - 2.13421, - 2.10442, - 2.10267, - 2.12809, - 2.1251, - 2.14083, - 2.12095, - 2.10503, - 2.13132, - 2.10792, - 2.11294, - 2.13636, - 2.12487, - 2.12406, - 2.14356, - 2.10983, - 2.11546, - 2.1572, - 2.1044, - 2.11461, - 2.13109, - 2.11564, - 2.10409, - 2.11169, - 2.11803, - 2.1154, - 2.11063, - 2.12554, - 2.11805, - 2.13521, - 2.14865, - 2.12121, - 2.13089, - 2.10464, - 2.11936, - 2.12328, - 2.10598, - 2.10864, - 2.13501, - 2.11967, - 2.13568, - 2.09394, - 2.11256, - 2.12363, - 2.09259, - 2.10638, - 2.14164, - 2.10185, - 2.11282, - 2.13083, - 2.12451, - 2.13088, - 2.1092, - 2.12835, - 2.11962, - 2.1021, - 2.12448, - 2.10318, - 2.13581, - 2.12242, - 2.12717, - 2.12315, - 2.08382, - 2.13049, - 2.129, - 2.0975, - 2.09546, - 2.11273, - 2.10469, - 2.13763, - 2.11709, - 2.12221, - 2.11943, - 2.08926, - 2.12843, - 2.12156, - 2.10348, - 2.11548, - 2.13646, - 2.12677, - 2.13118, - 2.1086, - 2.11485, - 2.11909, - 2.115, - 2.1092, - 2.12265, - 2.09117, - 2.11124, - 2.13024, - 2.11834, - 2.09421, - 2.09779, - 2.09732, - 2.12408, - 2.10045, - 2.1264, - 2.1041, - 2.08844, - 2.14092, - 2.10422, - 2.14597, - 2.12946, - 2.12877, - 2.10539, - 2.08287, - 2.09877, - 2.10603, - 2.11889, - 2.11412, - 2.10104, - 2.08954, - 2.12212, - 2.12721, - 2.11811, - 2.12716, - 2.10983, - 2.1043, - 2.10093, - 2.10433, - 2.08868, - 2.0932, - 2.11133, - 2.102, - 2.12057, - 2.12435, - 2.12055, - 2.13042, - 2.10298, - 2.13085, - 2.10518, - 2.13111, - 2.11486, - 2.10522, - 2.12598, - 2.13453, - 2.1222, - 2.11624, - 2.11133, - 2.10147, - 2.10384, - 2.10432, - 2.10393, - 2.10091, - 2.09466, - 2.14762, - 2.11342, - 2.11501, - 2.11138, - 2.12211, - 2.1176, - 2.12071, - 2.08537, - 2.08995, - 2.1087, - 2.11347, - 2.08444, - 2.09329, - 2.11455, - 2.12055, - 2.12006, - 2.14608, - 2.10379, - 2.10506, - 2.11217, - 2.10095, - 2.09882, - 2.11324, - 2.11496, - 2.13605, - 2.08657, - 2.10991, - 2.12226, - 2.09807, - 2.10117, - 2.12436, - 2.1053, - 2.11567, - 2.13096, - 2.10153, - 2.07801, - 2.08331, - 2.11912, - 2.11735, - 2.10141, - 2.11338, - 2.10666, - 2.10381, - 2.09491, - 2.10761, - 2.07867, - 2.08435, - 2.11523, - 2.12342, - 2.09382, - 2.0941, - 2.10372, - 2.0878, - 2.09271, - 2.09765, - 2.11361, - 2.11692, - 2.06285, - 2.10545, - 2.09785, - 2.10162, - 2.08064, - 2.10131, - 2.10451, - 2.11204, - 2.09609, - 2.07794, - 2.11175, - 2.08183, - 2.07816, - 2.10186, - 2.09586, - 2.0795, - 2.10609, - 2.11111, - 2.11781, - 2.08618, - 2.11121, - 2.08754, - 2.10148, - 2.09663, - 2.10378, - 2.1119, - 2.09123, - 2.08248, - 2.10658, - 2.1088, - 2.08833, - 2.08138, - 2.09552, - 2.09427, - 2.09635, - 2.08094, - 2.0823, - 2.09447, - 2.09277, - 2.1113, - 2.12253, - 2.0925, - 2.07634, - 2.1246, - 2.08519, - 2.11255, - 2.0889, - 2.10186, - 2.0908, - 2.07362, - 2.12953, - 2.10626, - 2.09138, - 2.07346, - 2.10082, - 2.07363, - 2.09896, - 2.09724, - 2.12122, - 2.10643, - 2.1136, - 2.08744, - 2.07192, - 2.09029, - 2.09695, - 2.11094, - 2.08152, - 2.10928, - 2.09143, - 2.11409, - 2.08638, - 2.11304, - 2.09931, - 2.09718, - 2.10935, - 2.08924, - 2.11833, - 2.10592, - 2.08718, - 2.10077, - 2.10666, - 2.11755, - 2.07809, - 2.08113, - 2.09786, - 2.10007, - 2.12291, - 2.09514, - 2.11964, - 2.06755, - 2.12986, - 2.08769, - 2.10759, - 2.09586, - 2.11245, - 2.11148, - 2.11318, - 2.09481, - 2.08279, - 2.07567, - 2.10163, - 2.0974, - 2.09861, - 2.0872, - 2.11898, - 2.11822, - 2.11255, - 2.08386, - 2.08003, - 2.06289, - 2.08296, - 2.10865, - 2.11009, - 2.07553, - 2.10028, - 2.07597, - 2.09328, - 2.09893, - 2.07379, - 2.09902, - 2.08147, - 2.0839, - 2.08326, - 2.09449, - 2.09364, - 2.10083, - 2.09278, - 2.08758, - 2.08167, - 2.07538, - 2.08995, - 2.09279, - 2.12736, - 2.10807, - 2.10184, - 2.08751, - 2.0847, - 2.09265, - 2.08386, - 2.07006, - 2.12153, - 2.08329, - 2.09103, - 2.09337, - 2.09789, - 2.09198, - 2.07388, - 2.09009, - 2.07877, - 2.09975, - 2.08558, - 2.08092, - 2.07796, - 2.11427, - 2.07645, - 2.08587, - 2.07994, - 2.09411, - 2.10426, - 2.09129, - 2.09493, - 2.076, - 2.07897, - 2.0684, - 2.06919, - 2.11733, - 2.05946, - 2.08593, - 2.06686, - 2.08705, - 2.08045, - 2.05353, - 2.07825, - 2.07442, - 2.08214, - 2.10407, - 2.08733, - 2.10553, - 2.09124, - 2.06818, - 2.09218, - 2.07988, - 2.08737, - 2.06578, - 2.07419, - 2.07227, - 2.10073, - 2.09684, - 2.0856, - 2.08269, - 2.07845, - 2.07241, - 2.0759, - 2.07716, - 2.06817, - 2.09202, - 2.06369, - 2.10273, - 2.08456, - 2.10201, - 2.05859, - 2.08902, - 2.07694, - 2.07087, - 2.11405, - 2.08858, - 2.08403, - 2.0973, - 2.09528, - 2.09896, - 2.07364, - 2.09369, - 2.07312, - 2.07375, - 2.07553, - 2.09223, - 2.06588, - 2.08612, - 2.07809, - 2.07918, - 2.10594, - 2.08003, - 2.07374, - 2.05965, - 2.07897, - 2.09012, - 2.08142, - 2.08566, - 2.07965, - 2.07752, - 2.06828, - 2.07113, - 2.08696, - 2.1019, - 2.08484, - 2.08401, - 2.07583, - 2.07677, - 2.05178, - 2.09273, - 2.09568, - 2.09049, - 2.09177, - 2.08109, - 2.09283, - 2.08877, - 2.07474, - 2.09682, - 2.07322, - 2.03588, - 2.08106, - 2.06506, - 2.08969, - 2.0882, - 2.08007, - 2.08811, - 2.08107, - 2.09831, - 2.07798, - 2.0824, - 2.09531, - 2.08053, - 2.08655, - 2.09363, - 2.08094, - 2.06883, - 2.05773, - 2.08156, - 2.07064, - 2.08566, - 2.0614, - 2.05996, - 2.0824, - 2.06653, - 2.06912, - 2.06263, - 2.07677, - 2.071, - 2.08375, - 2.07863, - 2.08268, - 2.07898, - 2.08983, - 2.08015, - 2.06793, - 2.08298, - 2.0856, - 2.07527, - 2.09334, - 2.0847, - 2.08023, - 2.05792, - 2.07577, - 2.08785, - 2.05772, - 2.08125, - 2.07732, - 2.0888, - 2.05139, - 2.08819, - 2.07745, - 2.0909, - 2.09667, - 2.06242, - 2.08731, - 2.05704, - 2.06665, - 2.06706, - 2.09522, - 2.07766, - 2.09186, - 2.08733, - 2.07577, - 2.06137, - 2.05698, - 2.05987, - 2.07703, - 2.08037, - 2.06197, - 2.08552, - 2.0674, - 2.0532, - 2.05848, - 2.04363, - 2.06823, - 2.08524, - 2.09389, - 2.06654, - 2.08576, - 2.08263, - 2.05954, - 2.07301, - 2.07322, - 2.08739, - 2.07438, - 2.08496, - 2.0897, - 2.0721, - 2.09638, - 2.0893, - 2.06878, - 2.08257, - 2.07654, - 2.0914, - 2.09669, - 2.08891, - 2.06168, - 2.10219, - 2.07219, - 2.07644, - 2.06758, - 2.05378, - 2.08748, - 2.06457, - 2.06228, - 2.06972, - 2.04294, - 2.06218, - 2.07311, - 2.07709, - 2.03163, - 2.08281, - 2.06533, - 2.06287, - 2.07793, - 2.08121, - 2.0489, - 2.09047, - 2.05149, - 2.07074, - 2.05586, - 2.07451, - 2.06613, - 2.07563, - 2.06583, - 2.04976, - 2.08328, - 2.0555, - 2.08469, - 2.0746, - 2.06961, - 2.08574, - 2.07199, - 2.08647, - 2.06953, - 2.09863, - 2.0604, - 2.05422, - 2.0866, - 2.09007, - 2.0587, - 2.06765, - 2.05642, - 2.05661, - 2.0532, - 2.05785, - 2.06507, - 2.09304, - 2.05373, - 2.04958, - 2.06994, - 2.06811, - 2.05625, - 2.08298, - 2.07656, - 2.07459, - 2.06211, - 2.07367, - 2.09634, - 2.07091, - 2.08139, - 2.09121, - 2.08477, - 2.05548, - 2.06353, - 2.05887, - 2.05781, - 2.05187, - 2.08027, - 2.06552, - 2.07838, - 2.06431, - 2.05816, - 2.06535, - 2.07466, - 2.02241, - 2.08052, - 2.06561, - 2.06828, - 2.06667, - 2.08978, - 2.05595, - 2.08019, - 2.08449, - 2.04339, - 2.04393, - 2.0677, - 2.06292, - 2.06163, - 2.05378, - 2.08155, - 2.06476, - 2.07416, - 2.06893, - 2.04094, - 2.07745, - 2.04948, - 2.06206, - 2.0877, - 2.05347, - 2.06698, - 2.06114, - 2.0844, - 2.0936, - 2.05004, - 2.08896, - 2.06247, - 2.07165, - 2.07894, - 2.06254, - 2.0758, - 2.0261, - 2.06208, - 2.06331, - 2.06554, - 2.06187, - 2.07687, - 2.04845, - 2.05538, - 2.08791, - 2.06246, - 2.07582, - 2.07205, - 2.0628, - 2.06098, - 2.05988, - 2.05163, - 2.04249, - 2.0748, - 2.08031, - 2.06845, - 2.05917, - 2.05907, - 2.036, - 2.05774, - 2.05842, - 2.05498, - 2.05977, - 2.06068, - 2.04566, - 2.05765, - 2.07981, - 2.04186, - 2.07228, - 2.0539, - 2.06648, - 2.04815, - 2.0785, - 2.04572, - 2.04963, - 2.05432, - 2.06814, - 2.07715, - 2.06665, - 2.04256, - 2.06452, - 2.04815, - 2.08958, - 2.06202, - 2.06886, - 2.08891, - 2.04816, - 2.06448, - 2.0574, - 2.05137, - 2.05945, - 2.05611, - 2.09314, - 2.08976, - 2.04836, - 2.07046, - 2.08485, - 2.05261, - 2.08214, - 2.04824, - 2.06593, - 2.07158, - 2.04431, - 2.06139, - 2.10085, - 2.05848, - 2.05744, - 2.06079, - 2.07822, - 2.0495, - 2.06758, - 2.04932, - 2.09124, - 2.0749, - 2.07058, - 2.06367, - 2.07331, - 2.04826, - 2.07363, - 2.0815, - 2.05574, - 2.05042, - 2.06515, - 2.07594, - 2.06561, - 2.06576, - 2.07672, - 2.03732, - 2.05907, - 2.04405, - 2.06044, - 2.05181, - 2.0648, - 2.06622, - 2.04453, - 2.05617, - 2.08418, - 2.06629, - 2.04479, - 2.06395, - 2.05835, - 2.03672, - 2.05091, - 2.06807, - 2.05965, - 2.05244, - 2.04799, - 2.04888, - 2.057, - 2.08043, - 2.06741, - 2.0405, - 2.04681, - 2.02577, - 2.04165, - 2.05684, - 2.0439, - 2.08849, - 2.05031, - 2.05494, - 2.05735, - 2.08037, - 2.0477, - 2.04138, - 2.04735, - 2.06975, - 2.07014, - 2.04386, - 2.07404, - 2.04255, - 2.08597, - 2.06324, - 2.06999, - 2.09555, - 2.0326, - 2.05872, - 2.0551, - 2.03545, - 2.05595, - 2.07117, - 2.05541, - 2.04732, - 2.06458, - 2.07959, - 2.08091, - 2.04403, - 2.02611, - 2.03873, - 2.044, - 2.079, - 2.06113, - 2.04412, - 2.05382, - 2.04889, - 2.05078, - 2.06199, - 2.08954, - 2.04934, - 2.03859, - 2.03884, - 2.09246, - 2.03765, - 2.03391, - 2.05129, - 2.06733, - 2.06966, - 2.05459, - 2.02772, - 2.04357, - 2.05342, - 2.04329, - 2.04843, - 2.03818, - 2.06872, - 2.04616, - 2.04948, - 2.06677, - 2.05371, - 2.06039, - 2.04519, - 2.04977, - 2.07279, - 2.05874, - 2.08292, - 2.03485, - 2.06968, - 2.05161, - 2.04221, - 2.03732, - 2.05368, - 2.03358, - 2.07244, - 2.0632, - 2.05497, - 2.0562, - 2.05756, - 2.0577, - 2.04868, - 2.06997, - 2.05162, - 2.03733, - 2.04518, - 2.06017, - 2.05151, - 2.07674, - 2.04583, - 2.05183, - 2.05818, - 2.06713, - 2.05392, - 2.02621, - 2.06379, - 2.06328, - 2.03294, - 2.04615, - 2.0459, - 2.05443, - 2.0525, - 2.05937, - 2.04022, - 2.05148, - 2.0474, - 2.05293, - 2.0327, - 2.04478, - 2.06375, - 2.04269, - 2.05838, - 2.06087, - 2.04193, - 2.04159, - 2.05141, - 2.01906, - 2.07603, - 2.0459, - 2.02989, - 2.05661, - 2.05426, - 2.06415, - 2.06897, - 2.0431, - 2.04359, - 2.06131, - 2.04656, - 2.04744, - 2.04301, - 2.04993, - 2.03863, - 2.06721, - 2.05433, - 2.05453, - 2.04678, - 2.0337, - 2.05245, - 2.0544, - 2.06631, - 2.0562, - 2.07694, - 2.07045, - 2.03206, - 2.03025, - 2.03966, - 2.04263, - 2.05788, - 2.03113, - 2.02026, - 2.05902, - 2.04813, - 2.03334, - 2.03314, - 2.03019, - 2.04366, - 2.04676, - 2.03124, - 2.06234, - 2.04272, - 2.0443, - 2.06435, - 2.03257, - 2.06472, - 2.03341, - 2.05938, - 2.04276, - 2.02397, - 2.04648, - 2.04746, - 2.03116, - 2.0212, - 2.05963, - 2.04057, - 2.05554, - 2.04235, - 2.03245, - 2.07551, - 2.05013, - 2.02111, - 2.06155, - 2.01687, - 2.04069, - 2.02718, - 2.05838, - 2.05003, - 2.04928, - 2.07062, - 2.04298, - 2.04932, - 2.03092, - 2.03631, - 2.03075, - 2.03513, - 2.05442, - 2.04891, - 2.04352, - 2.04856, - 2.03406, - 2.04979, - 2.02269, - 2.05948, - 2.03842, - 2.06328, - 2.05855, - 2.02, - 2.05978, - 2.02421, - 2.03968, - 2.06176, - 2.0099, - 2.032, - 2.0439, - 2.03357, - 2.01352, - 2.03896, - 2.04647, - 2.06164, - 2.02649, - 2.02286, - 2.02599, - 2.0478, - 2.02721, - 2.02933, - 2.034, - 2.03197, - 2.04919, - 2.05943, - 2.03878, - 2.0138, - 2.04394, - 2.03362, - 2.01361, - 2.03898, - 2.04646, - 2.0616, - 2.02648, - 2.02293, - 2.02588, - 2.04777, - 2.02733, - 2.02927, - 2.03505, - 2.04149, - 2.02404, - 2.06881, - 2.05541, - 2.03, - 2.06325, - 2.05576, - 2.03434, - 2.04154, - 2.05645, - 2.0754, - 2.03702, - 2.05585, - 2.05022, - 2.06735, - 2.02693, - 2.03098, - 2.03773, - 2.0409, - 2.02471, - 2.05199, - 2.04826, - 2.05405, - 2.04706, - 2.05467, - 2.04219, - 2.06868, - 2.02924, - 2.05956, - 2.0422, - 2.04101, - 2.02943, - 2.05235, - 2.01587, - 2.0456, - 2.06034, - 2.00481, - 2.02813, - 2.02533, - 2.02134, - 2.0237, - 2.03117, - 2.06598, - 2.05188, - 2.04349, - 2.02788, - 2.03197, - 2.04952, - 2.03158, - 2.02688, - 2.04042, - 2.06156, - 2.0179, - 2.045, - 2.0316, - 2.02006, - 2.01662, - 2.02275, - 2.05183, - 2.03239, - 2.03996, - 2.02567, - 2.05566, - 2.06439, - 2.04536, - 2.06814, - 2.05608, - 2.06716, - 2.05189, - 2.04294, - 2.06314, - 2.06828, - 2.03597, - 2.04591, - 2.05287, - 2.02678, - 2.01602, - 2.03592, - 2.03815, - 2.04632, - 2.01799, - 2.01732, - 2.05624, - 2.03592, - 2.02787, - 2.04043, - 2.02578, - 2.04396, - 2.03359, - 2.01349, - 2.03893, - 2.04647, - 2.06176, - 2.02653, - 2.0229, - 2.02598, - 2.04782, - 2.02717, - 2.02933, - 2.03659, - 2.04149, - 2.02393, - 2.0687, - 2.05545, - 2.02981, - 2.0632, - 2.05572, - 2.034, - 2.03291, - 2.03984, - 2.04409, - 2.02957, - 2.05496, - 2.06666, - 2.03022, - 2.04957, - 2.04188, - 2.04904, - 2.02569, - 2.04956, - 2.05682, - 2.04833, - 2.07465, - 2.04357, - 2.06222, - 2.0501, - 2.05913, - 2.05388, - 2.04926, - 2.05875, - 2.04815, - 2.0669, - 2.02762, - 2.06074, - 2.0521, - 2.02609, - 2.04725, - 2.02584, - 2.03384, - 2.02635, - 2.05591, - 2.05263, - 2.0394, - 2.08327, - 2.05314, - 2.02349, - 2.03445, - 2.04493, - 2.0415, - 2.03804, - 2.02113, - 2.03579, - 2.02991, - 2.04472, - 2.02853, - 2.04564, - 2.02667, - 2.05156, - 2.03525, - 2.03939, - 2.0331, - 2.01905, - 2.02494, - 2.03274, - 2.05049, - 2.07437, - 2.05395, - 2.0251, - 2.00919, - 2.0385, - 2.04835, - 2.06086, - 2.02653, - 2.06988, - 2.05402, - 2.04542, - 2.03796, - 2.05745, - 2.04767, - 2.03953, - 2.03321, - 2.03784, - 2.02143, - 2.02282, - 2.0503, - 2.02462, - 2.04714, - 2.04997, - 2.04745, - 2.02703, - 2.04497, - 2.03736, - 2.05468, - 2.02471, - 2.01144, - 2.04567, - 2.02565, - 2.02473, - 2.05988, - 2.05931, - 2.04323, - 2.02688, - 2.03698, - 2.03442, - 2.02243, - 2.03235, - 2.04507, - 2.06176, - 2.06495, - 2.05802, - 2.04039, - 2.04648, - 2.05026, - 2.04683, - 2.03191, - 2.04605, - 2.02344, - 2.02002, - 2.06325, - 2.05966, - 2.03333, - 2.05611, - 2.04358, - 2.04246, - 2.03001, - 2.03445, - 2.04782, - 2.02951, - 2.04397, - 2.03358, - 2.01351, - 2.03895, - 2.04651, - 2.06166, - 2.02649, - 2.02284, - 2.02604, - 2.04769, - 2.02719, - 2.0293, - 2.03509, - 2.04162, - 2.02407, - 2.06889, - 2.05542, - 2.03027, - 2.06325, - 2.05549, - 2.03415, - 2.04177, - 2.0565, - 2.0752, - 2.03714, - 2.05579, - 2.05008, - 2.06743, - 2.02718, - 2.03106, - 2.03823, - 2.04058, - 2.02439, - 2.05191, - 2.04824, - 2.05421, - 2.04726, - 2.05483, - 2.04195, - 2.06883, - 2.02931, - 2.05972, - 2.04222, - 2.04134, - 2.02953, - 2.05244, - 2.01613, - 2.04581, - 2.06051, - 2.00504, - 2.02815, - 2.02522, - 2.02139, - 2.02351, - 2.03101, - 2.06604, - 2.05178, - 2.04318, - 2.02806, - 2.03178, - 2.05, - 2.03177, - 2.02702, - 2.04058, - 2.06143, - 2.01748, - 2.04501, - 2.03202, - 2.0204, - 2.01696, - 2.02264, - 2.05149, - 2.03235, - 2.03981, - 2.02884, - 2.05668, - 2.06515, - 2.0454, - 2.0681, - 2.05568, - 2.0666, - 2.05111, - 2.04279, - 2.06268, - 2.06802, - 2.03526, - 2.04529, - 2.05254, - 2.02608, - 2.01563, - 2.03574, - 2.03796, - 2.04604, - 2.01755, - 2.01751, - 2.05593, - 2.03588, - 2.02807, - 2.0402, - 2.02571, - 2.03594, - 2.06438, - 2.05428, - 2.02712, - 2.03171, - 2.01774, - 2.03147, - 2.05044, - 2.03008, - 2.04768, - 2.03269, - 2.05801, - 2.04298, - 2.03748, - 2.03136, - 2.04519, - 2.04821, - 2.02631, - 2.05053, - 2.0224, - 2.0479, - 2.02607, - 2.03992, - 2.02724, - 2.03698, - 2.01763, - 2.02642, - 2.04083, - 2.0115, - 2.04666, - 2.03939, - 2.06161, - 2.04346, - 2.0432, - 2.04746, - 2.03375, - 2.0242, - 2.0539, - 2.03408, - 2.00949, - 2.04119, - 2.06036, - 2.03598, - 2.03167, - 2.05879, - 2.03298, - 2.04085, - 2.02361, - 2.05218, - 2.04051, - 2.03673, - 2.03554, - 2.06707, - 2.04583, - 2.03151, - 2.04519, - 2.02609, - 2.03599, - 2.04496, - 2.05446, - 2.04293, - 2.04716, - 2.05103, - 2.0279, - 2.03785, - 2.0435, - 2.04388, - 2.05922, - 2.04812, - 2.01589, - 2.06412, - 2.0452, - 2.01446, - 2.0251, - 2.02092, - 2.04435, - 2.00331, - 2.05554, - 2.01352, - 2.04411, - 2.0167, - 2.06144, - 2.0096, - 2.02281, - 2.04379, - 1.99617, - 2.03532, - 2.03883, - 2.03948, - 2.03198, - 2.03645, - 2.00508, - 2.02869, - 2.03915, - 2.04765, - 2.04023, - 2.02952, - 2.02942, - 2.02132, - 2.01645, - 2.03758, - 2.0374, - 2.01416, - 2.02903, - 2.01951, - 2.02498, - 2.01839, - 2.00845, - 2.05646, - 2.05556, - 2.04136, - 2.02348, - 2.0104, - 2.02331, - 2.03587, - 2.02512, - 2.0444, - 2.04504, - 2.02787, - 2.03921, - 2.00719, - 2.03029, - 2.05034, - 2.04776, - 2.01935, - 2.016, - 2.03799, - 2.02506, - 2.02453, - 2.00851, - 2.04414, - 2.02549, - 2.03912, - 2.0233, - 2.04076, - 2.04595, - 2.01984, - 2.01842, - 2.03928, - 2.03865, - 2.00384, - 2.04796, - 2.02404, - 2.04256, - 2.03615, - 2.01126, - 1.99975, - 2.06016, - 2.03503, - 2.04612, - 2.03777, - 2.01213, - 2.03331, - 2.03364, - 2.02796, - 2.03139, - 2.02793, - 2.05595, - 2.0206, - 2.02698, - 2.04021, - 2.05276, - 2.03124, - 2.03408, - 2.05539, - 2.01042, - 2.02646, - 2.04477, - 2.03293, - 2.01808, - 2.05037, - 2.01895, - 2.0142, - 2.01123, - 2.00228, - 2.03452, - 2.03668, - 2.03795, - 2.04075, - 2.0338, - 2.02026, - 2.02876, - 2.05434, - 2.00376, - 2.0258, - 2.0425, - 2.02823, - 2.01461, - 2.02835, - 2.05312, - 2.0226, - 2.01029, - 2.0192, - 2.01975, - 2.02787, - 2.01463, - 2.02743, - 2.04852, - 2.02419, - 2.02586, - 2.04197, - 2.04883, - 2.02141, - 2.02771, - 2.01096, - 2.02227, - 2.036, - 2.03664, - 2.03069, - 2.0215, - 2.03019, - 2.04333, - 2.01624, - 2.02534, - 2.01035, - 2.03591, - 2.03826, - 2.02992, - 2.01607, - 2.04707, - 2.02211, - 2.04492, - 2.01874, - 2.01465, - 2.03188, - 2.03963, - 2.02568, - 2.04292, - 2.0253, - 2.03506, - 2.0252, - 2.0404, - 2.02266, - 2.0265, - 1.99374, - 2.03086, - 2.0363, - 2.00907, - 2.00728, - 2.01826, - 2.04402, - 2.02234, - 2.03909, - 2.01504, - 2.04241, - 2.01518, - 2.0381, - 2.00526, - 2.0232, - 2.02637, - 2.03172, - 2.01971, - 2.02255, - 2.02098, - 2.04131, - 2.00762, - 2.01746, - 2.05109, - 2.02451, - 2.03881, - 2.03773, - 2.03991, - 2.03909, - 2.05305, - 2.04252, - 2.03305, - 2.01598, - 2.01951, - 2.02095, - 2.02267, - 2.00457, - 2.04229, - 2.03862, - 2.01822, - 2.00703, - 2.02232, - 2.00473, - 2.02345, - 2.01431, - 2.03504, - 2.00394, - 2.03596, - 2.04642, - 2.03118, - 2.02664, - 2.0215, - 2.0014, - 2.00328, - 2.01929, - 2.03842, - 2.02697, - 2.04953, - 2.03403, - 2.05436, - 2.03211, - 2.00312, - 2.01717, - 2.02091, - 2.02073, - 2.03551, - 2.02636, - 2.00197, - 2.0068, - 2.0264, - 2.01595, - 2.04482, - 2.00658, - 2.01882, - 2.01991, - 2.04207, - 2.03125, - 2.01756, - 2.03217, - 2.03539, - 2.0259, - 2.0113, - 2.01748, - 2.04184, - 2.02499, - 2.02478, - 2.02734, - 1.99993, - 2.02587, - 2.03754, - 2.0196, - 2.01352, - 2.01831, - 2.02719, - 1.97957, - 2.02861, - 2.00141, - 2.02072, - 2.03559, - 1.99199, - 2.03251, - 2.0117, - 2.00998, - 2.03799, - 2.04407, - 2.02457, - 2.03279, - 2.04851, - 2.03535, - 2.03706, - 2.0222, - 2.04565, - 2.02396, - 2.03269, - 2.02883, - 2.04738, - 2.00884, - 2.01463, - 2.06277, - 2.01061, - 2.02274, - 2.02174, - 2.03885, - 2.02175, - 2.00945, - 2.01173, - 1.99839, - 2.03348, - 2.02483, - 2.00947, - 2.03681, - 2.00672, - 2.0102, - 2.02135, - 2.02997, - 2.01814, - 2.03341, - 2.04105, - 2.02039, - 2.01078, - 2.0211, - 2.03391, - 2.04414, - 2.02224, - 2.01061, - 2.00997, - 2.01806, - 2.01049, - 2.04389, - 2.03295, - 2.02285, - 2.02985, - 2.00641, - 2.01114, - 2.00392, - 2.01181, - 1.99204, - 2.0043, - 2.05471, - 2.03352, - 2.03126, - 2.01104, - 2.03363, - 2.04537, - 2.01876, - 2.02748, - 2.00684, - 2.03696, - 2.03597, - 2.02328, - 2.02213, - 2.0123, - 2.05469, - 2.02028, - 2.02705, - 2.0123, - 2.01669, - 2.03614, - 2.02877, - 2.0248, - 2.00562, - 2.02101, - 2.02229, - 2.01241, - 2.01733, - 2.01033, - 2.0062, - 2.01695, - 2.02995, - 2.03489, - 2.03435, - 1.99674, - 2.03637, - 1.97473, - 2.0285, - 2.02166, - 2.00932, - 2.01303, - 2.02845, - 2.0121, - 2.01759, - 2.02185, - 2.02373, - 1.99442, - 2.01499, - 2.0251, - 2.01769, - 2.0369, - 2.03746, - 2.03999, - 2.02927, - 1.99617, - 2.02048, - 2.01224, - 2.03408, - 2.04855, - 2.03776, - 2.02121, - 2.02088, - 2.02342, - 2.02094, - 2.02883, - 2.0093, - 2.00349, - 2.00501, - 2.00206, - 2.02512, - 2.01474, - 2.02379, - 2.03325, - 2.01739, - 2.00359, - 2.01606, - 2.00935, - 2.0042, - 2.0391, - 2.01989, - 2.03264, - 2.04375, - 2.00157, - 2.03584, - 1.98595, - 1.99817, - 2.02562, - 1.99946, - 2.02634, - 2.01851, - 2.02183, - 2.00543, - 2.02697, - 2.02505, - 2.03926, - 2.0112, - 2.0265, - 2.01764, - 1.9907, - 2.01658, - 2.02287, - 2.02692, - 2.02423, - 2.01913, - 2.01748, - 2.03993, - 1.99342, - 1.99109, - 2.0284, - 2.00499, - 2.00884, - 2.02477, - 2.00956, - 2.02611, - 2.01225, - 2.02093, - 2.00794, - 2.01576, - 1.98959, - 1.97934, - 1.98179, - 1.99424, - 2.00574, - 2.01427, - 2.03237, - 1.98732, - 2.01259, - 2.00545, - 2.01827, - 1.98888, - 2.02968, - 2.02146, - 2.01335, - 2.02529, - 2.01897, - 2.0139, - 2.01508, - 2.03485, - 2.01784, - 2.01391, - 2.00587, - 2.02546, - 2.02624, - 2.01145, - 2.01581, - 2.0091, - 2.00749, - 1.99335, - 2.02129, - 2.03013, - 1.99746, - 2.03664, - 2.00065, - 2.02595, - 1.99041, - 2.00494, - 2.01986, - 2.00018, - 2.02406, - 2.01324, - 1.99281, - 2.02451, - 1.9776, - 2.00726, - 1.99596, - 1.99399, - 2.02369, - 2.02053, - 2.01494, - 1.99063, - 1.99063, - 1.99566, - 1.991, - 2.01349, - 2.00353, - 2.00615, - 2.0272, - 2.0215, - 2.00099, - 2.02368, - 2.00792, - 2.00765, - 2.0192, - 2.01224, - 2.01247, - 2.00374, - 2.03229, - 2.00682, - 2.0282, - 2.02579, - 2.02739, - 2.02702, - 2.04966, - 2.01156, - 2.01702, - 1.9772, - 2.02185, - 2.0135, - 1.99074, - 1.99859, - 2.01884, - 1.99996, - 2.01244, - 1.99301, - 2.01261, - 2.00005, - 2.00642, - 2.04607, - 1.98873, - 2.01114, - 2.00259, - 2.01393, - 1.99178, - 2.01583, - 1.98222, - 1.98603, - 2.01218, - 1.98422, - 1.99595, - 2.00548, - 2.02611, - 1.99943, - 2.02716, - 2.02111, - 1.99357, - 1.99446, - 2.00576, - 1.99796, - 2.00541, - 2.02915, - 2.01934, - 2.00474, - 1.99838, - 2.01315, - 1.98912, - 1.99828, - 1.99746, - 2.0068, - 2.00148, - 2.00274, - 1.98749, - 1.98955, - 2.00288, - 2.00494, - 1.99547, - 1.98932, - 2.0152, - 2.02474, - 2.0319, - 2.02131, - 1.99666, - 2.02336, - 2.01748, - 2.01568, - 2.02383, - 2.01804, - 2.02191, - 1.99647, - 2.04113, - 1.99835, - 2.01757, - 2.00291, - 2.00795, - 1.9965, - 2.03833, - 2.03312, - 2.0159, - 2.00347, - 2.01815, - 1.99738, - 1.99865, - 2.02775, - 2.0118, - 2.01652, - 2.00365, - 1.99708, - 2.01478, - 2.0096, - 2.00053, - 1.99631, - 1.99676, - 2.0218, - 2.0036, - 1.99673, - 1.98744, - 2.0243, - 2.01288, - 2.02169, - 1.99193, - 1.99207, - 1.99385, - 1.98364, - 2.01838, - 2.0119, - 2.02606, - 2.00953, - 2.00799, - 1.998, - 2.0096, - 2.00063, - 2.00497, - 2.02134, - 2.02549, - 2.00817, - 2.00153, - 1.99363, - 2.01924, - 1.99448, - 1.99103, - 2.0123, - 2.00526, - 2.00536, - 1.99344, - 2.00591, - 2.00644, - 2.02668, - 1.9902, - 2.01414, - 2.00261, - 2.00526, - 2.01571, - 1.99488, - 2.01849, - 1.99226, - 2.00224, - 1.9959, - 1.98548, - 2.02315, - 2.0166, - 2.00439, - 2.01403, - 2.03553, - 2.03098, - 2.01426, - 1.99837, - 2.01447, - 2.00354, - 2.00783, - 1.9762, - 2.01315, - 1.99774, - 2.00346, - 1.98258, - 2.00968, - 2.00718, - 2.00375, - 1.98296, - 1.99634, - 1.99745, - 1.9936, - 2.01049, - 1.99214, - 2.02528, - 2.00782, - 2.00797, - 1.98618, - 1.99327, - 2.0102, - 1.98836, - 2.00511, - 1.98047, - 1.9917, - 2.01363, - 2.01026, - 2.01448, - 2.0123, - 2.03357, - 1.99884, - 2.01975, - 1.99185, - 1.99982, - 1.9869, - 2.00961, - 2.01793, - 2.0002, - 2.01777, - 2.01325, - 1.96991, - 2.0236, - 1.99445, - 1.98482, - 1.994, - 2.02403, - 1.99803, - 2.00216, - 2.02583, - 2.00572, - 2.01962, - 2.00463, - 2.00918, - 2.00188, - 1.97518, - 2.01101, - 1.98695, - 1.98816, - 2.02163, - 2.01294, - 1.99473, - 1.99036, - 1.99521, - 1.98195, - 1.99594, - 1.99873, - 2.00363, - 1.98531, - 1.96729, - 1.99796, - 1.99204, - 2.0046, - 2.00107, - 1.99765, - 2.02475, - 2.01531, - 1.99235, - 1.99118, - 2.02512, - 1.98952, - 2.00246, - 2.02206, - 2.00464, - 2.00631, - 2.00843, - 1.99384, - 2.01929, - 2.00276, - 1.99631, - 1.98986, - 2.01423, - 2.00843, - 2.00873, - 2.01348, - 2.00372, - 1.99799, - 2.02631, - 2.00887, - 1.99379, - 2.02305, - 2.01456, - 2.00642, - 2.0145, - 2.00127, - 2.02978, - 2.00249, - 1.99584, - 1.98228, - 2.01136, - 2.00759, - 2.00296, - 1.98735, - 2.01883, - 2.04026, - 2.01551, - 1.99944, - 2.02439, - 2.02915, - 2.01985, - 2.01156, - 1.99161, - 1.98691, - 1.99373, - 1.98676, - 2.01398, - 2.01424, - 1.9962, - 2.00248, - 1.98727, - 1.99739, - 2.00205, - 1.99389, - 1.98172, - 1.98394, - 2.00599, - 2.01084, - 1.998, - 2.01484, - 2.01506, - 2.01734, - 1.95867, - 2.00927, - 2.00067, - 1.9831, - 2.01456, - 2.00151, - 2.01657, - 2.00972, - 1.98019, - 1.99941, - 2.00454, - 1.99487, - 2.00749, - 2.0238, - 1.99856, - 1.98922, - 1.97861, - 1.98356, - 2.00019, - 1.9754, - 2.02016, - 2.01505, - 2.01497, - 2.02162, - 1.99191, - 1.97784, - 2.00152, - 2.00859, - 2.00281, - 1.99582, - 1.99982, - 2.00718, - 1.99105, - 1.99937, - 1.99601, - 2.00682, - 2.00383, - 2.01042, - 1.99529, - 1.98861, - 1.96993, - 2.01151, - 1.99493, - 1.98738, - 2.00192, - 2.00577, - 1.98318, - 1.99018, - 1.97786, - 1.98973, - 1.98514, - 1.99466, - 1.98597, - 2.01991, - 2.00111, - 1.99513, - 1.98609, - 1.99549, - 1.98568, - 1.98854, - 1.99407, - 1.99212, - 2.00774, - 2.0106, - 1.99599, - 2.01794, - 1.99698, - 1.99203, - 1.99825, - 1.97776, - 1.98067, - 1.97192, - 2.0128, - 1.98777, - 2.00317, - 2.02269, - 1.98981, - 1.99107, - 2.00241, - 2.0089, - 1.99231, - 1.99466, - 2.0073, - 1.98429, - 2.00641, - 1.98484, - 1.97868, - 2.00488, - 1.99342, - 1.97961, - 1.99823, - 1.99831, - 1.99756, - 2.01837, - 1.9964, - 1.98817, - 1.9983, - 2.0072, - 1.95942, - 2.00587, - 2.0055, - 1.98522, - 1.98642, - 2.00471, - 1.96529, - 1.99443, - 1.9868, - 1.99511, - 1.99262, - 1.98121, - 1.99823, - 1.98101, - 1.99395, - 1.97918, - 2.01644, - 2.00973, - 1.98311, - 1.99397, - 1.98703, - 1.99056, - 2.02533, - 1.97577, - 2.00484, - 1.98652, - 2.00247, - 1.99383, - 1.99348, - 1.97358, - 1.99007, - 1.99383, - 2.00612, - 1.99098, - 1.98346, - 1.98504, - 2.02042, - 1.98966, - 1.98993, - 1.9653, - 1.98116, - 1.97851, - 1.98399, - 1.99803, - 1.99854, - 1.95326, - 2.01206, - 1.9883, - 1.97208, - 1.99392, - 1.96778, - 1.99153, - 1.99694, - 2.01723, - 1.99723, - 2.00538, - 1.98856, - 1.9838, - 1.99693, - 2.0042, - 1.99356, - 1.98675, - 2.00106, - 1.96893, - 1.99148, - 1.98955, - 1.99983, - 2.00057, - 1.99182, - 1.99221, - 1.98384, - 2.0264, - 1.95733, - 1.99858, - 2.00652, - 1.9867, - 1.99119, - 2.00533, - 1.98842, - 2.0015, - 2.01842, - 1.99, - 2.01771, - 1.9948, - 1.95961, - 2.01107, - 1.98955, - 1.99167, - 1.99483, - 1.99381, - 1.97862, - 1.98275, - 1.9984, - 1.97274, - 1.97934, - 1.97584, - 1.98197, - 2.01116, - 1.99772, - 2.00267, - 1.97656, - 1.98257, - 2.0175, - 1.98348, - 1.98509, - 2.02044, - 1.98954, - 1.99003, - 1.96536, - 1.98122, - 1.97847, - 1.98394, - 1.99805, - 1.99853, - 1.95332, - 2.01141, - 1.98813, - 1.97192, - 1.99398, - 1.9678, - 1.99162, - 1.99679, - 2.01708, - 1.99715, - 2.00533, - 1.9882, - 1.98388, - 1.99684, - 2.00421, - 1.99355, - 1.98684, - 2.00084, - 1.96871, - 1.99156, - 1.98973, - 2.00008, - 2.00073, - 1.99175, - 1.99211, - 1.98369, - 2.02626, - 1.95714, - 1.99944, - 2.00649, - 1.98683, - 1.99049, - 2.00547, - 1.9884, - 2.0012, - 2.01836, - 1.99022, - 2.01783, - 1.99463, - 1.95968, - 2.01089, - 1.98956, - 1.99176, - 1.99482, - 1.99385, - 1.97882, - 1.98243, - 1.99994, - 1.97235, - 1.97814, - 1.97438, - 1.98044, - 2.01053, - 1.99762, - 2.00222, - 1.97616, - 1.98231, - 2.01696, - 1.97877, - 2.00538, - 1.99873, - 1.97461, - 1.988, - 1.98626, - 1.99149, - 2.0059, - 1.98343, - 1.98994, - 1.97678, - 2.00177, - 2.02618, - 1.99016, - 2.00466, - 1.99777, - 1.97711, - 2.001, - 1.97949, - 2.00864, - 1.9868, - 1.98909, - 2.00929, - 1.97703, - 1.97347, - 1.9786, - 2.00475, - 1.96084, - 1.99219, - 1.99315, - 1.99878, - 1.98498, - 2.01073, - 1.97037, - 1.96679, - 2.00134, - 1.98144, - 2.00838, - 2.01109, - 2.00081, - 1.98762, - 1.99078, - 1.98843, - 2.00061, - 1.99174, - 1.98376, - 1.9658, - 1.98703, - 1.96768, - 1.98668, - 1.96562, - 1.99416, - 1.9771, - 1.98767, - 1.98824, - 1.98331, - 1.98867, - 1.98199, - 2.0128, - 2.00291, - 1.99064, - 1.98182, - 1.97698, - 1.97598, - 1.99764, - 2.01044, - 1.96939, - 2.02565, - 1.99414, - 1.97399, - 1.9811, - 1.98576, - 2.00258, - 1.97614, - 1.98381, - 1.98132, - 2.0054, - 1.99913, - 1.98434, - 1.97586, - 2.01047, - 1.96043, - 1.96485, - 1.96549, - 1.99039, - 1.97356, - 1.98531, - 1.9736, - 1.9881, - 2.00054, - 1.9915, - 1.98831, - 1.97704, - 1.99218, - 1.96905, - 1.96997, - 1.98602, - 2.00213, - 1.98472, - 2.00915, - 1.98712, - 1.97335, - 1.98435, - 1.98019, - 1.99907, - 1.98555, - 1.9794, - 1.9833, - 1.98759, - 1.9739, - 1.97072, - 1.99543, - 2.0046, - 1.98496, - 2.00707, - 1.99034, - 1.99959, - 1.98613, - 1.98244, - 2.01219, - 2.01181, - 1.99683, - 1.98363, - 1.99042, - 2.00333, - 1.98869, - 1.98984, - 1.97126, - 1.99389, - 1.98415, - 1.97493, - 1.99372, - 1.97052, - 1.99946, - 1.98945, - 1.99372, - 2.00014, - 1.98606, - 1.99123, - 1.98091, - 1.97301, - 1.97437, - 1.98973, - 1.9945, - 1.98571, - 2.00405, - 1.97876, - 1.99408, - 1.98102, - 1.98366, - 1.96198, - 2.00596, - 2.00458, - 1.96415, - 2.0093, - 1.97088, - 1.99221, - 1.97215, - 1.99583, - 2.02515, - 1.97191, - 1.96611, - 1.9876, - 1.99635, - 1.99328, - 1.99522, - 1.97658, - 1.97281, - 1.98563, - 1.97909, - 2.00599, - 2.01052, - 2.0059, - 1.99928, - 2.00409, - 1.9995, - 1.9827, - 1.96514, - 2.00301, - 1.97483, - 1.98658, - 1.99226, - 2.00692, - 2.01763, - 1.97241, - 2.01049, - 1.99232, - 2.00145, - 2.00695, - 1.97336, - 1.9731, - 1.97484, - 1.97478, - 1.95817, - 1.99751, - 1.97089, - 2.00821, - 2.00549, - 1.98289, - 1.98547, - 1.9927, - 1.97683, - 1.98381, - 1.97642, - 1.99029, - 2.00601, - 1.97765, - 1.99498, - 1.99673, - 1.97494, - 1.98723, - 1.9711, - 1.98442, - 1.98201, - 1.96729, - 1.99265, - 1.99556, - 2.00511, - 1.97418, - 1.96359, - 1.97762, - 1.99707, - 1.97991, - 2.01571, - 2.00365, - 1.97552, - 1.96444, - 1.98316, - 1.97419, - 1.97064, - 1.99781, - 1.97707, - 1.95463, - 1.96371, - 1.96548, - 1.99055, - 1.97352, - 1.96774, - 1.97162, - 1.98249, - 1.98541, - 2.00375, - 1.98719, - 2.00367, - 1.987, - 2.00572, - 1.97439, - 1.98879, - 1.96491, - 1.97587, - 1.99069, - 1.9845, - 1.98752, - 1.96083, - 2.00084, - 1.98862, - 1.98287, - 1.96241, - 2.00414, - 1.97379, - 1.97531, - 1.9662, - 1.97974, - 1.97107, - 1.98823, - 2.00284, - 1.97251, - 1.98486, - 1.96668, - 1.98589, - 1.97159, - 1.99563, - 1.99258, - 1.97384, - 1.98965, - 1.98947, - 1.97668, - 2.00633, - 1.96894, - 1.98136, - 1.99015, - 1.95861, - 1.98573, - 1.99342, - 2.00597, - 1.97206, - 1.98381, - 1.99702, - 1.97439, - 1.98843, - 1.95719, - 1.98185, - 1.98241, - 1.97481, - 1.98377, - 1.98445, - 1.98054, - 1.9798, - 1.97749, - 1.98345, - 2.00732, - 1.98269, - 1.98211, - 1.98634, - 1.99513, - 1.99244, - 1.98704, - 1.96953, - 1.97854, - 1.97254, - 1.99002, - 1.98312, - 1.98762, - 1.97659, - 1.99247, - 1.96273, - 1.97902, - 2.01247, - 1.98425, - 1.97728, - 1.97485, - 1.98387, - 1.97321, - 1.99546, - 1.97729, - 1.99722, - 1.96483, - 1.96849, - 1.98311, - 1.97619, - 1.99799, - 1.96903, - 1.99348, - 1.98248, - 1.99898, - 1.98743, - 1.99462, - 1.97632, - 1.97272, - 1.98822, - 1.96384, - 1.96671, - 1.98833, - 1.97111, - 1.97248, - 1.99858, - 1.98472, - 1.93862, - 2.00782, - 1.96082, - 1.95402, - 1.96906, - 1.94578, - 1.98568, - 1.99701, - 1.98832, - 2.01203, - 2.00532, - 2.0272, - 1.97646, - 1.9788, - 1.98217, - 1.9725, - 1.97882, - 1.99233, - 2.00309, - 1.99261, - 1.98452, - 1.98313, - 1.98882, - 1.99501, - 1.99343, - 1.99932, - 2.02093, - 2.00584, - 2.00419, - 1.97697, - 1.99948, - 2.00158, - 1.97836, - 1.98128, - 1.94488, - 1.95429, - 1.98673, - 1.95489, - 1.99305, - 1.98063, - 1.98326, - 1.9997, - 1.97296, - 1.96523, - 1.98869, - 1.9884, - 1.97835, - 2.00525, - 1.97962, - 2.0051, - 1.99767, - 1.98315, - 2.00384, - 1.99682, - 1.99166, - 1.99472, - 1.97568, - 1.97426, - 1.97346, - 1.96715, - 2.00427, - 1.98328, - 1.97681, - 1.97897, - 1.96255, - 1.97755, - 1.99092, - 1.95698, - 1.97455, - 1.97819, - 1.99421, - 1.97128, - 1.99379, - 1.98866, - 2.00399, - 1.98818, - 1.98073, - 1.99928, - 1.97521, - 1.98082, - 1.98037, - 1.98469, - 1.99175, - 1.96804, - 1.97871, - 1.99209, - 1.99361, - 1.99632, - 1.97949, - 2.01014, - 2.00051, - 1.98244, - 1.96974, - 1.96948, - 1.97568, - 1.99661, - 1.96753, - 1.96725, - 1.99069, - 2.00053, - 2.00619, - 1.96723, - 1.97666, - 1.98268, - 2.01349, - 1.98079, - 1.97488, - 1.97525, - 1.98251, - 1.96623, - 1.95799, - 2.00255, - 1.98963, - 1.94153, - 1.97789, - 1.99023, - 1.97405, - 1.98151, - 1.98136, - 1.99012, - 1.95989, - 1.96852, - 1.97087, - 1.97409, - 1.96884, - 1.96393, - 1.96448, - 1.96227, - 1.95257, - 1.99644, - 1.98548, - 1.96573, - 2.00275, - 1.97828, - 1.97782, - 1.97046, - 2.00472, - 1.98267, - 1.98218, - 1.98185, - 1.99811, - 1.98589, - 1.97235, - 1.97777, - 1.98526, - 2.00289, - 1.98397, - 1.97263, - 1.97974, - 1.97371, - 1.97122, - 1.94389, - 1.97888, - 1.9773, - 1.96434, - 1.99638, - 1.97667, - 1.98786, - 1.98576, - 1.96784, - 1.96557, - 1.98683, - 1.99695, - 1.98353, - 2.01931, - 1.98226, - 1.98531, - 1.98354, - 1.96481, - 1.95257, - 1.97466, - 1.95285, - 1.95801, - 1.99969, - 1.96933, - 1.97723, - 1.97527, - 1.97731, - 1.99963, - 1.99053, - 1.95466, - 1.97239, - 1.98604, - 1.9762, - 1.97383, - 1.9565, - 1.96983, - 1.96954, - 1.97003, - 1.99973, - 1.98099, - 1.98955, - 1.97763, - 2.01913, - 1.99743, - 1.9675, - 1.9957, - 1.9872, - 1.97773, - 1.95599, - 1.97118, - 1.97233, - 1.96631, - 1.96624, - 1.98136, - 1.97427, - 1.98497, - 1.97698, - 2.00865, - 1.96001, - 1.96002, - 1.97367, - 1.96463, - 2.00026, - 1.96533, - 1.98626, - 1.97479, - 1.98232, - 1.95663, - 1.98854, - 1.97536, - 1.96903, - 1.98223, - 1.96472, - 1.98033, - 1.97389, - 1.98336, - 1.98833, - 1.9987, - 1.95439, - 1.96558, - 1.97607, - 1.97454, - 1.95262, - 1.95987, - 1.954, - 1.99685, - 1.96699, - 1.97974, - 1.97317, - 1.98569, - 1.96072, - 1.97474, - 1.9908, - 1.96712, - 1.96168, - 1.98603, - 1.9706, - 1.96296, - 1.98109, - 1.99294, - 1.96026, - 1.97933, - 1.9638, - 1.98623, - 1.96743, - 1.97765, - 1.99254, - 1.98295, - 1.98242, - 1.97053, - 1.96738, - 1.99195, - 2.00885, - 1.97939, - 1.9566, - 1.97577, - 1.95175, - 1.9848, - 1.97406, - 1.95411, - 1.97756, - 1.95243, - 1.98551, - 2.0068, - 1.97829, - 2.00332, - 1.97448, - 1.97006, - 1.94414, - 2.0026, - 1.96999, - 1.97596, - 1.97469, - 1.99319, - 1.98729, - 1.98055, - 1.97456, - 1.98908, - 1.97522, - 1.99778, - 1.97824, - 1.98406, - 1.96976, - 1.98279, - 1.9757, - 1.96873, - 1.9817, - 1.98834, - 1.96731, - 1.99605, - 1.96234, - 2.00172, - 1.98201, - 1.98182, - 1.95661, - 1.95341, - 2.0007, - 1.98151, - 1.95422, - 1.98961, - 1.96653, - 1.97436, - 1.96444, - 1.97534, - 1.99044, - 1.96851, - 1.96761, - 1.9858, - 2.00054, - 1.9803, - 1.93778, - 1.97352 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 27308, - "step_interval": 5, - "values": [ - 406787200.0, - 413329568.0, - 407052480.0, - 416989632.0, - 410323520.0, - 410021248.0, - 411995328.0, - 407357856.0, - 414101504.0, - 411800608.0, - 413498080.0, - 414276960.0, - 417866400.0, - 411733120.0, - 407478656.0, - 414440672.0, - 409784800.0, - 418058752.0, - 411855488.0, - 411574688.0, - 406808480.0, - 412067776.0, - 403725760.0, - 411853472.0, - 411982112.0, - 418783136.0, - 413144736.0, - 418206656.0, - 417634336.0, - 414908320.0, - 411922880.0, - 411807968.0, - 420079360.0, - 416080672.0, - 411864256.0, - 406640672.0, - 409751616.0, - 419660832.0, - 411690336.0, - 423075008.0, - 418769376.0, - 412412512.0, - 415866784.0, - 408799616.0, - 413916160.0, - 412246336.0, - 410237088.0, - 405956160.0, - 414555232.0, - 401587936.0, - 418539328.0, - 400680832.0, - 415455552.0, - 415338304.0, - 407145152.0, - 425813632.0, - 414486336.0, - 413734432.0, - 414920608.0, - 409832832.0, - 415692448.0, - 417435904.0, - 406095040.0, - 420108832.0, - 423033856.0, - 413181056.0, - 405683648.0, - 405063040.0, - 413816288.0, - 412653504.0, - 414408320.0, - 411328576.0, - 412473376.0, - 419713664.0, - 408136928.0, - 415401120.0, - 422008128.0, - 417607904.0, - 411945568.0, - 415441920.0, - 408914016.0, - 412276000.0, - 420512128.0, - 411698688.0, - 410132576.0, - 404293952.0, - 409915840.0, - 412033376.0, - 418736480.0, - 415841632.0, - 416787616.0, - 417623008.0, - 415008416.0, - 415184320.0, - 413671232.0, - 408672480.0, - 421492544.0, - 409910848.0, - 406736032.0, - 414192800.0, - 413315424.0, - 413576064.0, - 408547648.0, - 408758208.0, - 410485152.0, - 419429056.0, - 409613728.0, - 420058144.0, - 406988256.0, - 416838432.0, - 410861728.0, - 407744768.0, - 415494368.0, - 412770400.0, - 414825536.0, - 409707296.0, - 417417600.0, - 401726240.0, - 411154880.0, - 417653472.0, - 409985696.0, - 414131424.0, - 417554592.0, - 408021280.0, - 409726880.0, - 420839456.0, - 406524288.0, - 406664448.0, - 403959776.0, - 413346016.0, - 410637920.0, - 406835872.0, - 411553728.0, - 413174752.0, - 418957472.0, - 406976160.0, - 408011104.0, - 409916896.0, - 404499520.0, - 406043456.0, - 411387360.0, - 416618912.0, - 417623232.0, - 417757952.0, - 400602624.0, - 420249632.0, - 406106016.0, - 409226176.0, - 418259168.0, - 408199552.0, - 414846176.0, - 419465664.0, - 415344256.0, - 411813472.0, - 407994176.0, - 407125856.0, - 406659520.0, - 411253536.0, - 413794944.0, - 402926144.0, - 406463872.0, - 409343200.0, - 415471328.0, - 411349920.0, - 410214592.0, - 412656192.0, - 416121856.0, - 402495488.0, - 415543456.0, - 412362944.0, - 417293728.0, - 414206720.0, - 403667680.0, - 420230432.0, - 411909248.0, - 414727552.0, - 407619008.0, - 411388416.0, - 410712896.0, - 413299808.0, - 418516704.0, - 412281760.0, - 412607168.0, - 412804096.0, - 413614240.0, - 411514752.0, - 411307904.0, - 411640832.0, - 414032320.0, - 413002496.0, - 417101088.0, - 413952064.0, - 401503680.0, - 415830624.0, - 412305536.0, - 417205664.0, - 418911456.0, - 410804160.0, - 414292192.0, - 421360960.0, - 409510368.0, - 407718336.0, - 418434784.0, - 415501024.0, - 416456448.0, - 407883520.0, - 409808256.0, - 406268768.0, - 412507840.0, - 414443840.0, - 406872384.0, - 410414624.0, - 412307360.0, - 412224448.0, - 423211488.0, - 410218304.0, - 409435264.0, - 422575328.0, - 409614784.0, - 409876000.0, - 412678848.0, - 414339040.0, - 413259168.0, - 418441376.0, - 415439552.0, - 410649312.0, - 413625376.0, - 412105632.0, - 406747776.0, - 412796352.0, - 422808672.0, - 412335680.0, - 409918880.0, - 418168192.0, - 407248768.0, - 421091680.0, - 412351008.0, - 405050624.0, - 413690368.0, - 406975264.0, - 410766016.0, - 406797536.0, - 416946592.0, - 410418368.0, - 417159840.0, - 415488544.0, - 410965056.0, - 415145344.0, - 412029536.0, - 410545856.0, - 414676704.0, - 407003776.0, - 406290464.0, - 413774272.0, - 418395648.0, - 407660864.0, - 410702272.0, - 408532352.0, - 416211008.0, - 414019680.0, - 410964352.0, - 412772064.0, - 406845984.0, - 421453184.0, - 407243136.0, - 418324864.0, - 420898432.0, - 414071136.0, - 419867392.0, - 406654304.0, - 403937152.0, - 409323328.0, - 415401248.0, - 408025344.0, - 412492192.0, - 417086848.0, - 416585664.0, - 410076384.0, - 418486784.0, - 412341792.0, - 419367168.0, - 411339808.0, - 407453568.0, - 414365728.0, - 424172576.0, - 405656032.0, - 417934912.0, - 406252864.0, - 404356960.0, - 410034560.0, - 415793760.0, - 414010432.0, - 410778400.0, - 407958240.0, - 413821312.0, - 414367392.0, - 413903072.0, - 413366400.0, - 414591872.0, - 421833216.0, - 398499584.0, - 414836000.0, - 411075744.0, - 406082048.0, - 423628352.0, - 411251072.0, - 408523904.0, - 409533376.0, - 418847968.0, - 412557376.0, - 409682464.0, - 408153344.0, - 409853312.0, - 415246272.0, - 407611456.0, - 409596320.0, - 414811424.0, - 416653984.0, - 414182176.0, - 411456896.0, - 415729824.0, - 414284576.0, - 414552960.0, - 423904608.0, - 410941792.0, - 414327808.0, - 419368352.0, - 411004832.0, - 416402144.0, - 409224032.0, - 413425696.0, - 405841152.0, - 406990304.0, - 410957248.0, - 408911808.0, - 416568352.0, - 407686880.0, - 412850912.0, - 406259584.0, - 420194784.0, - 411532000.0, - 417609120.0, - 416324000.0, - 415915328.0, - 423913472.0, - 416845696.0, - 409687168.0, - 408028128.0, - 411651712.0, - 409627808.0, - 412446400.0, - 410097792.0, - 419470976.0, - 412213632.0, - 405062560.0, - 413286816.0, - 416026720.0, - 411178336.0, - 416384992.0, - 408819424.0, - 411716640.0, - 413256512.0, - 406920448.0, - 410459776.0, - 404630752.0, - 407452640.0, - 412446816.0, - 404843776.0, - 412171488.0, - 416333632.0, - 410598720.0, - 412641088.0, - 405499872.0, - 414033120.0, - 411059424.0, - 415228192.0, - 410451200.0, - 420925920.0, - 410109248.0, - 414626208.0, - 405184256.0, - 412837728.0, - 407421856.0, - 411829184.0, - 416949952.0, - 405071200.0, - 412798720.0, - 414545024.0, - 404589184.0, - 416566880.0, - 409887776.0, - 407853536.0, - 419503104.0, - 408241408.0, - 414366208.0, - 410865760.0, - 409671552.0, - 407412128.0, - 405344416.0, - 406116320.0, - 414143744.0, - 403607424.0, - 414142912.0, - 415673600.0, - 406569568.0, - 420790400.0, - 421954880.0, - 413295776.0, - 411373568.0, - 405562784.0, - 406776288.0, - 407774912.0, - 413368736.0, - 409940160.0, - 417265920.0, - 412326912.0, - 412850176.0, - 416114272.0, - 410305056.0, - 413233312.0, - 415643840.0, - 410721024.0, - 407892800.0, - 413281344.0, - 417676352.0, - 414757216.0, - 407144704.0, - 412571648.0, - 410562784.0, - 412431008.0, - 418018176.0, - 411571200.0, - 411001152.0, - 414144160.0, - 403607552.0, - 414145344.0, - 415665824.0, - 406544032.0, - 420767488.0, - 421935424.0, - 413279392.0, - 411361120.0, - 405553664.0, - 406771264.0, - 407769120.0, - 413361824.0, - 409936768.0, - 417264416.0, - 412322560.0, - 412841664.0, - 416104448.0, - 410295520.0, - 413224832.0, - 415650720.0, - 410728832.0, - 407901152.0, - 413285216.0, - 417686272.0, - 414756288.0, - 407149056.0, - 412574752.0, - 410562816.0, - 412428864.0, - 418014848.0, - 411564064.0, - 410994624.0, - 407481760.0, - 410382976.0, - 408615200.0, - 408963136.0, - 412064448.0, - 415628032.0, - 415482368.0, - 412489280.0, - 413669696.0, - 408792640.0, - 414654784.0, - 409911424.0, - 401795520.0, - 414730592.0, - 414187392.0, - 406833792.0, - 408289280.0, - 415823360.0, - 414213664.0, - 405439840.0, - 418203392.0, - 411081824.0, - 410598208.0, - 408771808.0, - 414753760.0, - 410664384.0, - 417661760.0, - 403180512.0, - 423176192.0, - 411655232.0, - 410551776.0, - 417440992.0, - 414267488.0, - 417515072.0, - 406846144.0, - 414729920.0, - 413723552.0, - 405860128.0, - 416585056.0, - 406517728.0, - 412943392.0, - 415103904.0, - 413974336.0, - 407210496.0, - 414474176.0, - 404680608.0, - 412680768.0, - 405762144.0, - 403747680.0, - 419327552.0, - 418386048.0, - 416171072.0, - 416360736.0, - 417899840.0, - 406583168.0, - 411792640.0, - 411024672.0, - 406752736.0, - 406842432.0, - 411752832.0, - 412666592.0, - 410520608.0, - 419612192.0, - 409827488.0, - 416138880.0, - 413036352.0, - 410743104.0, - 407264992.0, - 408345632.0, - 410203552.0, - 415865856.0, - 408225216.0, - 420168608.0, - 408398144.0, - 417352128.0, - 405625280.0, - 410145248.0, - 414633632.0, - 405963744.0, - 412626048.0, - 410865024.0, - 412027616.0, - 407961568.0, - 421254464.0, - 407638144.0, - 407696768.0, - 412132800.0, - 417663840.0, - 404961600.0, - 416850112.0, - 416556512.0, - 404697312.0, - 415590848.0, - 407828704.0, - 408035040.0, - 419311200.0, - 410567520.0, - 409822688.0, - 416804544.0, - 408840928.0, - 418794560.0, - 414157664.0, - 407072800.0, - 409210368.0, - 404472704.0, - 420725024.0, - 406982784.0, - 416654656.0, - 411591360.0, - 406167200.0, - 420043872.0, - 406453856.0, - 408489088.0, - 418341600.0, - 406755488.0, - 407638400.0, - 407697376.0, - 412132992.0, - 417660160.0, - 404960832.0, - 416851680.0, - 416560576.0, - 404707392.0, - 415598432.0, - 407836800.0, - 408040960.0, - 419315776.0, - 410574176.0, - 409830880.0, - 416810848.0, - 408781632.0, - 418782976.0, - 414165856.0, - 407091072.0, - 409238592.0, - 404495328.0, - 420747168.0, - 407005024.0, - 416681920.0, - 411595360.0, - 406162944.0, - 420033984.0, - 406441760.0, - 408478720.0, - 418332544.0, - 406750976.0, - 414735808.0, - 414474976.0, - 409515840.0, - 417684640.0, - 416059008.0, - 411617792.0, - 416979200.0, - 408480352.0, - 415941056.0, - 407626464.0, - 412022944.0, - 416289216.0, - 413785408.0, - 418021248.0, - 408511328.0, - 410923904.0, - 408390944.0, - 418289216.0, - 406867808.0, - 416811072.0, - 410955648.0, - 408530368.0, - 412900544.0, - 409033664.0, - 416651296.0, - 411760160.0, - 414473184.0, - 411769728.0, - 418971136.0, - 416610368.0, - 408131296.0, - 416810080.0, - 402708128.0, - 412841536.0, - 411517216.0, - 414437952.0, - 412923616.0, - 403544256.0, - 406644064.0, - 406387584.0, - 414336192.0, - 411493984.0, - 411756992.0, - 420298208.0, - 409809184.0, - 408256608.0, - 414552832.0, - 413182784.0, - 410785728.0, - 419386048.0, - 406448000.0, - 423340416.0, - 415421536.0, - 414696512.0, - 404446592.0, - 413190560.0, - 413374784.0, - 414593568.0, - 409145280.0, - 411784864.0, - 406730848.0, - 413557408.0, - 411929152.0, - 405978784.0, - 409845248.0, - 416652864.0, - 416609792.0, - 412913088.0, - 406085856.0, - 414405856.0, - 410309088.0, - 410516704.0, - 411279456.0, - 399318688.0, - 416109952.0, - 409008320.0, - 412100448.0, - 408904960.0, - 416812192.0, - 409706400.0, - 417021856.0, - 413425280.0, - 410688928.0, - 406638208.0, - 407053760.0, - 415109440.0, - 415483488.0, - 412891968.0, - 410448640.0, - 415244704.0, - 413658784.0, - 409372928.0, - 408230048.0, - 415841952.0, - 415542912.0, - 405444480.0, - 411262592.0, - 408095936.0, - 414814080.0, - 418206560.0, - 413436160.0, - 412992928.0, - 410922720.0, - 413137312.0, - 406111872.0, - 413145760.0, - 417047808.0, - 410370464.0, - 407832128.0, - 412872704.0, - 413201568.0, - 412345408.0, - 413109024.0, - 405144640.0, - 405829760.0, - 411015968.0, - 411314048.0, - 417690304.0, - 406290688.0, - 408407168.0, - 418117920.0, - 416025440.0, - 403458560.0, - 412439296.0, - 417282496.0, - 408072928.0, - 410581440.0, - 415703072.0, - 415324032.0, - 416606048.0, - 406160256.0, - 410540224.0, - 401445248.0, - 413973856.0, - 409098976.0, - 412462976.0, - 403681664.0, - 411389632.0, - 409947808.0, - 418828896.0, - 408873920.0, - 409302880.0, - 418188192.0, - 412517600.0, - 410344544.0, - 411640000.0, - 407261024.0, - 404093888.0, - 410984736.0, - 400889568.0, - 411950880.0, - 412493408.0, - 407747776.0, - 413701120.0, - 409582336.0, - 408507488.0, - 406885664.0, - 417050432.0, - 412286720.0, - 415426944.0, - 414444864.0, - 404300032.0, - 415707168.0, - 414249856.0, - 415407264.0, - 410956608.0, - 413761056.0, - 410058848.0, - 410680704.0, - 403680992.0, - 409937152.0, - 414854208.0, - 412045664.0, - 417461632.0, - 412588608.0, - 420142624.0, - 417332864.0, - 408357440.0, - 416706560.0, - 411769664.0, - 416028960.0, - 414781568.0, - 416319424.0, - 414125824.0, - 412868256.0, - 409322368.0, - 410261120.0, - 408841600.0, - 415018496.0, - 413197632.0, - 417073952.0, - 414226464.0, - 414086816.0, - 411827136.0, - 415918272.0, - 409309440.0, - 410951392.0, - 412000992.0, - 421333152.0, - 404112864.0, - 421464160.0, - 418736352.0, - 411955424.0, - 413171328.0, - 418679552.0, - 409491008.0, - 406307744.0, - 409476480.0, - 407457920.0, - 413756576.0, - 414218144.0, - 416857088.0, - 414353152.0, - 409134240.0, - 414500832.0, - 406113120.0, - 414014720.0, - 411596224.0, - 413613152.0, - 412591808.0, - 411899968.0, - 416905184.0, - 413171584.0, - 411109920.0, - 424177440.0, - 413255808.0, - 415786016.0, - 410507488.0, - 411603296.0, - 412848320.0, - 417891872.0, - 407918624.0, - 403705888.0, - 409799488.0, - 418483936.0, - 407261408.0, - 409961280.0, - 413813472.0, - 402364032.0, - 413965152.0, - 398619360.0, - 414599104.0, - 415418496.0, - 413128736.0, - 414610560.0, - 416327296.0, - 409055008.0, - 414406688.0, - 413943904.0, - 412198944.0, - 411482784.0, - 413936064.0, - 411311168.0, - 403627776.0, - 415113440.0, - 409896640.0, - 413178912.0, - 410947520.0, - 409122304.0, - 414565056.0, - 415758080.0, - 410009184.0, - 418842176.0, - 418043712.0, - 408647072.0, - 407298464.0, - 412500704.0, - 422720288.0, - 417781952.0, - 416399552.0, - 417658496.0, - 408441664.0, - 421993632.0, - 417242592.0, - 406882208.0, - 408385536.0, - 410465728.0, - 411182848.0, - 409240768.0, - 420936320.0, - 421754944.0, - 407375616.0, - 407539360.0, - 411239040.0, - 408215488.0, - 409821152.0, - 412036768.0, - 407748608.0, - 410371040.0, - 409701664.0, - 422094752.0, - 407115584.0, - 417167424.0, - 413288672.0, - 409692480.0, - 420254624.0, - 420238848.0, - 402528320.0, - 410110240.0, - 407377792.0, - 413355616.0, - 410748160.0, - 411811360.0, - 394848320.0, - 422398752.0, - 410414560.0, - 414341536.0, - 403565216.0, - 411259168.0, - 411366752.0, - 409918784.0, - 409797568.0, - 407940064.0, - 418257472.0, - 415937344.0, - 408053568.0, - 410109984.0, - 408823296.0, - 409609568.0, - 416034112.0, - 409625344.0, - 412102464.0, - 417440128.0, - 411499392.0, - 417293600.0, - 414915360.0, - 414638240.0, - 411904576.0, - 416484576.0, - 416336224.0, - 412024736.0, - 420829440.0, - 414841280.0, - 405728576.0, - 422429472.0, - 405695968.0, - 414646272.0, - 412796736.0, - 409195520.0, - 408443616.0, - 411745856.0, - 409837184.0, - 410584384.0, - 414691648.0, - 412066336.0, - 407948032.0, - 414240704.0, - 411940864.0, - 406331488.0, - 416399616.0, - 409247872.0, - 412430592.0, - 412137312.0, - 410661632.0, - 406256448.0, - 410502208.0, - 415798528.0, - 411738272.0, - 413735456.0, - 410926400.0, - 407244448.0, - 413563104.0, - 413446752.0, - 414356448.0, - 411820768.0, - 419979008.0, - 407168800.0, - 415378848.0, - 413764064.0, - 407911008.0, - 417100224.0, - 400664832.0, - 412822944.0, - 411881056.0, - 413938400.0, - 417650976.0, - 416622656.0, - 409991328.0, - 415532096.0, - 407115104.0, - 405693472.0, - 403989152.0, - 405524896.0, - 417688224.0, - 410342592.0, - 412831008.0, - 415239424.0, - 407164416.0, - 414277888.0, - 418553344.0, - 413891552.0, - 413112896.0, - 413442432.0, - 406271936.0, - 417946688.0, - 412232000.0, - 404715040.0, - 415177632.0, - 406917696.0, - 401542208.0, - 413586144.0, - 416087104.0, - 412009856.0, - 418889856.0, - 406139392.0, - 415863872.0, - 411935744.0, - 415969536.0, - 415512672.0, - 410451104.0, - 415264224.0, - 419201984.0, - 415957472.0, - 411062432.0, - 411268832.0, - 410520480.0, - 409327520.0, - 411109600.0, - 408886272.0, - 418082080.0, - 413936256.0, - 412638176.0, - 406230368.0, - 414091328.0, - 415699072.0, - 419364576.0, - 406069984.0, - 406295776.0, - 420449568.0, - 416379104.0, - 409316544.0, - 420823776.0, - 404547168.0, - 411281792.0, - 406051104.0, - 414846816.0, - 409199328.0, - 405090528.0, - 410601408.0, - 411000544.0, - 407046688.0, - 413628832.0, - 409460192.0, - 412354656.0, - 412639360.0, - 406230272.0, - 414090848.0, - 413135328.0, - 408592576.0, - 415381472.0, - 411061952.0, - 406021152.0, - 407417312.0, - 412042304.0, - 401732800.0, - 412034944.0, - 413013280.0, - 411671808.0, - 414052096.0, - 406646912.0, - 412723296.0, - 418110592.0, - 414825504.0, - 400923232.0, - 406290176.0, - 411916864.0, - 405706240.0, - 409212448.0, - 405911488.0, - 412483328.0, - 411705632.0, - 414675104.0, - 407481984.0, - 414027200.0, - 416551872.0, - 415750272.0, - 403483648.0, - 410502528.0, - 411331360.0, - 417783776.0, - 414624576.0, - 415714496.0, - 410190656.0, - 412778784.0, - 411114656.0, - 403733344.0, - 425629760.0, - 414116352.0, - 407972352.0, - 413478144.0, - 413768928.0, - 412927136.0, - 409713152.0, - 405392640.0, - 414133536.0, - 417484640.0, - 406474880.0, - 416604544.0, - 404454656.0, - 417528640.0, - 410242592.0, - 412910784.0, - 411525568.0, - 410256832.0, - 413854976.0, - 414780512.0, - 410807712.0, - 418133376.0, - 407462656.0, - 406418464.0, - 419102432.0, - 414808256.0, - 416596320.0, - 415926880.0, - 407450176.0, - 413364896.0, - 406537920.0, - 410979008.0, - 415708320.0, - 414475840.0, - 408255968.0, - 410307200.0, - 407299424.0, - 407976128.0, - 407831392.0, - 426551776.0, - 418021056.0, - 419212992.0, - 415467008.0, - 413498464.0, - 418373504.0, - 410553568.0, - 405214080.0, - 415341728.0, - 412864064.0, - 415497920.0, - 414048416.0, - 412196320.0, - 406169536.0, - 409683744.0, - 413723328.0, - 412323648.0, - 409598656.0, - 411558624.0, - 406827328.0, - 411510752.0, - 411926464.0, - 406827968.0, - 415451712.0, - 405978784.0, - 403861088.0, - 420599872.0, - 407671904.0, - 402235296.0, - 414055296.0, - 410003712.0, - 406041344.0, - 403981632.0, - 418595136.0, - 413900832.0, - 411205024.0, - 409972800.0, - 408655296.0, - 411394720.0, - 414434624.0, - 412015520.0, - 416597632.0, - 405979136.0, - 421419104.0, - 417429024.0, - 408709760.0, - 411811232.0, - 416481216.0, - 420598912.0, - 407672512.0, - 402235456.0, - 414054784.0, - 410005056.0, - 406040800.0, - 403983392.0, - 418596032.0, - 413902016.0, - 411203296.0, - 409972992.0, - 408654752.0, - 411316256.0, - 414445632.0, - 412035680.0, - 416609088.0, - 405993024.0, - 421428096.0, - 417433024.0, - 408711968.0, - 411811168.0, - 416480288.0, - 407109216.0, - 406314304.0, - 417575488.0, - 412714624.0, - 414520960.0, - 422196128.0, - 415706784.0, - 411734176.0, - 410722656.0, - 409332128.0, - 403014624.0, - 410644448.0, - 408423872.0, - 404717856.0, - 417809440.0, - 413385952.0, - 410551360.0, - 416090176.0, - 418011264.0, - 414745088.0, - 406070944.0, - 412089248.0, - 415224288.0, - 413866112.0, - 415380096.0, - 413101792.0, - 413683648.0, - 412534016.0, - 412169088.0, - 408649376.0, - 410575616.0, - 413011552.0, - 409895840.0, - 412050112.0, - 405428000.0, - 416176576.0, - 414112320.0, - 411594080.0, - 415684992.0, - 406517952.0, - 411042464.0, - 410219008.0, - 411653952.0, - 414974336.0, - 419418080.0, - 406841056.0, - 415087232.0, - 419770368.0, - 415165856.0, - 414039264.0, - 414520288.0, - 415471328.0, - 415148704.0, - 411513920.0, - 410708896.0, - 414162944.0, - 418914016.0, - 413238400.0, - 407973120.0, - 412226080.0, - 402654976.0, - 408145152.0, - 418581344.0, - 407750880.0, - 414617152.0, - 408159168.0, - 416370624.0, - 415928512.0, - 415441632.0, - 413011552.0, - 416887808.0, - 414649600.0, - 406928640.0, - 417463328.0, - 411969664.0, - 405575616.0, - 411237184.0, - 418786976.0, - 414282784.0, - 414012512.0, - 421826656.0, - 405228832.0, - 405841248.0, - 416138816.0, - 407559200.0, - 415596544.0, - 411477088.0, - 408120576.0, - 411998688.0, - 421387712.0, - 401538368.0, - 415624576.0, - 411668448.0, - 403466880.0, - 416273344.0, - 407900064.0, - 415062880.0, - 410174304.0, - 417021056.0, - 428308928.0, - 410876288.0, - 409520864.0, - 411546944.0, - 406365856.0, - 410481792.0, - 417363296.0, - 408862304.0, - 414896832.0, - 413008480.0, - 410001632.0, - 415189664.0, - 414575840.0, - 420688512.0, - 413844448.0, - 412753120.0, - 412982816.0, - 410559968.0, - 416677376.0, - 407556448.0, - 408970912.0, - 406257696.0, - 408577088.0, - 413755360.0, - 416010624.0, - 414017472.0, - 414866080.0, - 407566560.0, - 410864864.0, - 419209024.0, - 418458016.0, - 410257600.0, - 415472096.0, - 407857056.0, - 412651168.0, - 417658432.0, - 412973600.0, - 410834976.0, - 412531584.0, - 414706496.0, - 413310912.0, - 410388960.0, - 417169376.0, - 407421728.0, - 414063616.0, - 408397536.0, - 408519296.0, - 414151584.0, - 403736192.0, - 411350944.0, - 419264608.0, - 406796064.0, - 409791360.0, - 407589024.0, - 410226400.0, - 411496608.0, - 414742656.0, - 413582624.0, - 408933248.0, - 416197728.0, - 419163584.0, - 414516320.0, - 421198496.0, - 410648000.0, - 413048576.0, - 413772576.0, - 401896032.0, - 415950848.0, - 416890112.0, - 409845728.0, - 402167520.0, - 406009440.0, - 413937728.0, - 408716800.0, - 410700928.0, - 413359520.0, - 417827456.0, - 407050464.0, - 414642272.0, - 416742176.0, - 415734208.0, - 403233888.0, - 408140352.0, - 411291008.0, - 407275296.0, - 417494208.0, - 412821152.0, - 410127744.0, - 412566144.0, - 407011712.0, - 416768544.0, - 411127168.0, - 419286464.0, - 415237952.0, - 403092224.0, - 411566272.0, - 410920064.0, - 408421888.0, - 416843200.0, - 406914048.0, - 414898656.0, - 412997024.0, - 413349856.0, - 414633856.0, - 412580928.0, - 408039328.0, - 417959680.0, - 415261664.0, - 416177760.0, - 405368864.0, - 410751744.0, - 412790784.0, - 413006112.0, - 416136192.0, - 405308480.0, - 410043520.0, - 414319424.0, - 405945952.0, - 406758528.0, - 411313472.0, - 406728768.0, - 415162272.0, - 415656672.0, - 417167424.0, - 411780992.0, - 415948512.0, - 414952608.0, - 408808224.0, - 411716640.0, - 404715520.0, - 417157472.0, - 412566400.0, - 410789152.0, - 412864064.0, - 410606528.0, - 409157952.0, - 407948192.0, - 410900128.0, - 419708032.0, - 404843840.0, - 412640352.0, - 419903200.0, - 424133056.0, - 404346752.0, - 411173472.0, - 416984192.0, - 412138496.0, - 408965856.0, - 410460576.0, - 418112608.0, - 415509856.0, - 405721152.0, - 407817632.0, - 411394240.0, - 408118976.0, - 409042144.0, - 402485056.0, - 417881568.0, - 413495808.0, - 415056768.0, - 418288448.0, - 414467264.0, - 412031456.0, - 408842496.0, - 406866752.0, - 418174144.0, - 413456992.0, - 411006048.0, - 415911232.0, - 402049952.0, - 416931200.0, - 413970720.0, - 415466976.0, - 411631488.0, - 413886304.0, - 416071040.0, - 407335488.0, - 410249760.0, - 420416832.0, - 406301504.0, - 410387584.0, - 409385632.0, - 409196832.0, - 415780800.0, - 422217024.0, - 418600704.0, - 416300672.0, - 407333856.0, - 409533408.0, - 418033280.0, - 415407360.0, - 419612864.0, - 408260800.0, - 416454464.0, - 408735392.0, - 412928928.0, - 413711648.0, - 412617280.0, - 409546400.0, - 409979680.0, - 408545952.0, - 411313472.0, - 405336832.0, - 406970528.0, - 415920288.0, - 405727360.0, - 413457184.0, - 403532448.0, - 411317408.0, - 411360416.0, - 412315744.0, - 409030400.0, - 410558816.0, - 406092416.0, - 412566880.0, - 408197120.0, - 411911584.0, - 411155200.0, - 418523520.0, - 407061600.0, - 405064160.0, - 416187744.0, - 416192032.0, - 410655200.0, - 411246144.0, - 413204000.0, - 417195456.0, - 420749888.0, - 405779968.0, - 416103328.0, - 407018624.0, - 414524640.0, - 405293248.0, - 406541600.0, - 406945600.0, - 413623136.0, - 414572608.0, - 412146240.0, - 410737568.0, - 417239328.0, - 419405664.0, - 412509088.0, - 413554304.0, - 407086816.0, - 408855488.0, - 417070592.0, - 408946464.0, - 414534720.0, - 401662976.0, - 409642656.0, - 411211552.0, - 416893856.0, - 408541664.0, - 413814368.0, - 418817504.0, - 420705984.0, - 410736032.0, - 413955968.0, - 413418208.0, - 415320032.0, - 409672576.0, - 407198816.0, - 410964352.0, - 410353760.0, - 406880096.0, - 412727872.0, - 401732256.0, - 418271328.0, - 409351296.0, - 408754976.0, - 415226176.0, - 407825888.0, - 408653792.0, - 415771296.0, - 402553952.0, - 413453216.0, - 416467072.0, - 407665504.0, - 411260160.0, - 414475904.0, - 407920608.0, - 415790688.0, - 407459840.0, - 414817952.0, - 410033120.0, - 408214080.0, - 412158720.0, - 421948064.0, - 419996672.0, - 408512672.0, - 413122240.0, - 419484000.0, - 410063008.0, - 403108832.0, - 413669472.0, - 418633856.0, - 410876192.0, - 413980768.0, - 408199936.0, - 420128032.0, - 422401760.0, - 413406944.0, - 416335680.0, - 418586816.0, - 404216928.0, - 407996128.0, - 411172608.0, - 414184736.0, - 411180352.0, - 413033664.0, - 410072736.0, - 410428256.0, - 411608224.0, - 411179552.0, - 410125408.0, - 408956000.0, - 416491296.0, - 418332800.0, - 408952128.0, - 410032480.0, - 415864256.0, - 414027552.0, - 404950112.0, - 403128160.0, - 412242592.0, - 410491872.0, - 418445696.0, - 418528896.0, - 415546400.0, - 405308512.0, - 413236032.0, - 413057792.0, - 414054752.0, - 411334080.0, - 411977440.0, - 419346944.0, - 422696512.0, - 418111200.0, - 413165408.0, - 408591232.0, - 411180768.0, - 411891776.0, - 412547648.0, - 412614144.0, - 407733376.0, - 413129792.0, - 414097888.0, - 420883648.0, - 407706016.0, - 417759872.0, - 407569984.0, - 414966624.0, - 409372000.0, - 411054976.0, - 406504160.0, - 416825888.0, - 412147872.0, - 410194688.0, - 416626496.0, - 406960896.0, - 413014176.0, - 420288032.0, - 413616928.0, - 417692288.0, - 413332224.0, - 415002016.0, - 417877248.0, - 415546432.0, - 415646272.0, - 420121280.0, - 417948000.0, - 413164640.0, - 418486624.0, - 406207936.0, - 415000544.0, - 407112640.0, - 415200608.0, - 417214272.0, - 415140992.0, - 411136352.0, - 422206784.0, - 410856896.0, - 406010784.0, - 418315296.0, - 414234752.0, - 411561056.0, - 416129056.0, - 411089408.0, - 404215552.0, - 411018368.0, - 408019648.0, - 412223456.0, - 415269056.0, - 411960704.0, - 408578400.0, - 401909856.0, - 414824672.0, - 403048384.0, - 409670720.0, - 409082144.0, - 401939904.0, - 407654528.0, - 412529312.0, - 423408288.0, - 413573600.0, - 420621856.0, - 406756896.0, - 415775904.0, - 411422112.0, - 412043904.0, - 413662016.0, - 412162304.0, - 425109024.0, - 409776256.0, - 406453568.0, - 407947584.0, - 412233152.0, - 412104768.0, - 403309728.0, - 417805472.0, - 414457728.0, - 406951968.0, - 414498624.0, - 422965984.0, - 407377952.0, - 408374784.0, - 406376832.0, - 408520640.0, - 411607296.0, - 412678560.0, - 415551616.0, - 413230912.0, - 411958816.0, - 408714144.0, - 411806944.0, - 417081920.0, - 407238880.0, - 409748864.0, - 407716864.0, - 417937952.0, - 416423872.0, - 416592000.0, - 407355328.0, - 412408672.0, - 411665728.0, - 416709440.0, - 414633280.0, - 408626752.0, - 413042464.0, - 407127712.0, - 410180160.0, - 409107808.0, - 405647744.0, - 416609760.0, - 407224640.0, - 416332352.0, - 413701728.0, - 419689728.0, - 407962080.0, - 411231424.0, - 408937216.0, - 415902912.0, - 412646912.0, - 411165312.0, - 416003232.0, - 409245920.0, - 413049664.0, - 412192000.0, - 417156128.0, - 412322656.0, - 413019840.0, - 408328512.0, - 418740960.0, - 414037600.0, - 413227680.0, - 408863968.0, - 413429696.0, - 412272768.0, - 408354592.0, - 410018048.0, - 414275552.0, - 410053056.0, - 409671776.0, - 408628608.0, - 418114144.0, - 412176288.0, - 407783040.0, - 412221984.0, - 410460864.0, - 415365664.0, - 408752800.0, - 415049024.0, - 417620640.0, - 405218944.0, - 411778304.0, - 402078112.0, - 411237216.0, - 421871328.0, - 408958336.0, - 410339264.0, - 410191808.0, - 419335104.0, - 410230176.0, - 418002912.0, - 412247904.0, - 414668960.0, - 418759776.0, - 402500160.0, - 407161920.0, - 420004896.0, - 413730048.0, - 416853152.0, - 411215232.0, - 411973056.0, - 422411040.0, - 410644736.0, - 401468352.0, - 417161664.0, - 410576384.0, - 415596064.0, - 408981152.0, - 403784960.0, - 412242304.0, - 413934336.0, - 410848416.0, - 412823872.0, - 410805664.0, - 410719040.0, - 406750272.0, - 413446848.0, - 410757216.0, - 401959040.0, - 412531776.0, - 409531520.0, - 408071392.0, - 409007520.0, - 411040512.0, - 415904064.0, - 408043488.0, - 420725408.0, - 410648608.0, - 411845792.0, - 410573120.0, - 414150720.0, - 408975072.0, - 406062848.0, - 410830048.0, - 410452000.0, - 408349440.0, - 416822592.0, - 415581440.0, - 416723520.0, - 420185856.0, - 411942432.0, - 408999552.0, - 419375008.0, - 404652000.0, - 415069312.0, - 417294784.0, - 408961600.0, - 416891712.0, - 416416800.0, - 408785120.0, - 418825024.0, - 409200416.0, - 426124416.0, - 415360320.0, - 413513824.0, - 417340544.0, - 419229056.0, - 412179872.0, - 411151488.0, - 414296608.0, - 413235520.0, - 409998496.0, - 410361856.0, - 418995488.0, - 404643008.0, - 413266112.0, - 412490144.0, - 422580800.0, - 413359104.0, - 412878048.0, - 423259744.0, - 416096096.0, - 411227488.0, - 414875680.0, - 410961344.0, - 414185760.0, - 417355872.0, - 408661760.0, - 412761920.0, - 411469120.0, - 410972928.0, - 415782368.0, - 413452608.0, - 423340480.0, - 410733088.0, - 419495200.0, - 411307072.0, - 409314848.0, - 415942080.0, - 410806464.0, - 407406368.0, - 421401568.0, - 414137152.0, - 411310432.0, - 412850048.0, - 410706016.0, - 418044320.0, - 412023328.0, - 405552832.0, - 415811616.0, - 417596192.0, - 416760992.0, - 413630112.0, - 409692320.0, - 414986080.0, - 409880800.0, - 409965856.0, - 411709056.0, - 417301600.0, - 414699648.0, - 405652544.0, - 412530624.0, - 408071712.0, - 413591616.0, - 422813408.0, - 406044064.0, - 416552800.0, - 412311808.0, - 417666720.0, - 412147584.0, - 404668960.0, - 419801984.0, - 413544416.0, - 401322976.0, - 410224224.0, - 421619808.0, - 412179104.0, - 413390944.0, - 416861888.0, - 408555584.0, - 413307296.0, - 415378368.0, - 418108448.0, - 406972864.0, - 415326432.0, - 410880160.0, - 413732544.0, - 430673664.0, - 406762016.0, - 401276704.0, - 407826816.0, - 410279680.0, - 412088832.0, - 403155456.0, - 413544192.0, - 410535872.0, - 417206624.0, - 413280448.0, - 409459008.0, - 414570048.0, - 425874528.0, - 407437312.0, - 414139744.0, - 413614848.0, - 412202656.0, - 413965728.0, - 402935424.0, - 413682976.0, - 410373152.0, - 409738976.0, - 411791200.0, - 424273760.0, - 419575936.0, - 407868608.0, - 416854272.0, - 414382848.0, - 407833696.0, - 411450528.0, - 423631904.0, - 413772928.0, - 406225952.0, - 410467392.0, - 415914560.0, - 418793760.0, - 404020640.0, - 410533440.0, - 408724160.0, - 412480320.0, - 417606656.0, - 407860736.0, - 411859968.0, - 408904672.0, - 413656416.0, - 409897728.0, - 404487936.0, - 415294176.0, - 419976640.0, - 405987648.0, - 405495200.0, - 417879808.0, - 409711136.0, - 407919328.0, - 414591136.0, - 419024640.0, - 411771040.0, - 414461344.0, - 411780992.0, - 414850496.0, - 418810720.0, - 405728192.0, - 407869952.0, - 416555392.0, - 398807040.0, - 407760544.0, - 414825824.0, - 418454464.0, - 407254272.0, - 413662080.0, - 415556288.0, - 422430592.0, - 417553440.0, - 413331136.0, - 416795232.0, - 413878560.0, - 416997376.0, - 412182656.0, - 409385376.0, - 410559968.0, - 417041536.0, - 407615616.0, - 402000448.0, - 407001280.0, - 414213600.0, - 420888800.0, - 412536288.0, - 406384992.0, - 415570176.0, - 417120544.0, - 409088480.0, - 412024544.0, - 408856608.0, - 412241952.0, - 416309696.0, - 410448768.0, - 415036768.0, - 404358272.0, - 409275264.0, - 415528480.0, - 406755648.0, - 414033088.0, - 404672064.0, - 415610624.0, - 412227712.0, - 408588544.0, - 415302336.0, - 417671104.0, - 410247008.0, - 417821216.0, - 414434784.0, - 408395264.0, - 417179744.0, - 407203776.0, - 411779744.0, - 416305056.0, - 404792352.0, - 416134848.0, - 420800224.0, - 409513856.0, - 421324192.0, - 419191808.0, - 415797984.0, - 413020096.0, - 415885600.0, - 415902176.0, - 411819424.0, - 411780992.0, - 418692416.0, - 420165952.0, - 410124768.0, - 411392032.0, - 417797376.0, - 409862240.0, - 407935808.0, - 416633408.0, - 414467456.0, - 409981376.0, - 403319456.0, - 427653056.0, - 410264480.0, - 411934688.0, - 405917248.0, - 408851104.0, - 413802432.0, - 405862016.0, - 406409280.0, - 411256064.0, - 423556960.0, - 411126528.0, - 413178912.0, - 412017088.0, - 411701792.0, - 413904480.0, - 413946528.0, - 414430240.0, - 411184320.0, - 414515904.0, - 409554624.0, - 406645312.0, - 412776896.0, - 415207968.0, - 413887488.0, - 409591072.0, - 406176000.0, - 408026048.0, - 409906304.0, - 406780704.0, - 416084992.0, - 411565728.0, - 412250016.0, - 411364128.0, - 413419168.0, - 414338848.0, - 410083008.0, - 408145472.0, - 418742400.0, - 419969984.0, - 417148640.0, - 410822208.0, - 413411744.0, - 413090752.0, - 412696768.0, - 422743136.0, - 409285472.0, - 411498048.0, - 420191712.0, - 420993312.0, - 406663904.0, - 406604352.0, - 413842656.0, - 406360320.0, - 413906496.0, - 409208768.0, - 409753056.0, - 407764064.0, - 409253504.0, - 413748320.0, - 411913888.0, - 410366560.0, - 415059008.0, - 415389632.0, - 417152352.0, - 411053888.0, - 402669760.0, - 414325600.0, - 411284416.0, - 412581920.0, - 411596160.0, - 408676544.0, - 413155392.0, - 407500224.0, - 409621984.0, - 414093888.0, - 412344288.0, - 409056032.0, - 413420160.0, - 413331264.0, - 404019968.0, - 414227008.0, - 413383360.0, - 405955904.0, - 408800160.0, - 417740128.0, - 411586368.0, - 409975488.0, - 414719008.0, - 402412832.0, - 414348608.0, - 410783584.0, - 413035904.0, - 413852416.0, - 410354176.0, - 415001440.0, - 414301376.0, - 406717792.0, - 411501600.0, - 410624320.0, - 412769792.0, - 407556480.0, - 407640832.0, - 414769216.0, - 412796416.0, - 414765344.0, - 408631744.0, - 413902176.0, - 413608064.0, - 417752032.0, - 415843680.0, - 413497184.0, - 408318752.0, - 411286016.0, - 409982080.0, - 413915840.0, - 404801568.0, - 408353216.0, - 416261216.0, - 407356864.0, - 403871616.0, - 418378880.0, - 416149088.0, - 424280992.0, - 408796640.0, - 413845920.0, - 418971200.0, - 410723200.0, - 409083520.0, - 415870368.0, - 413700384.0, - 412606432.0, - 417984256.0, - 402308832.0, - 407871040.0, - 414843200.0, - 417298848.0, - 413123552.0, - 410529056.0, - 411936192.0, - 410095232.0, - 420276640.0, - 413477920.0, - 404721536.0, - 420981824.0, - 404661184.0, - 414980256.0, - 416487712.0, - 420964512.0, - 414274464.0, - 412628032.0, - 413912288.0, - 407990336.0, - 408985120.0, - 423830944.0, - 412061376.0, - 401733088.0, - 417962528.0, - 412468384.0, - 418796320.0, - 404670592.0, - 408578496.0, - 418339328.0, - 410108448.0, - 404120992.0, - 413843264.0, - 413639552.0, - 412083232.0, - 420173952.0, - 414991360.0, - 407717920.0, - 407908096.0, - 419606176.0, - 416079680.0, - 401552384.0, - 412657856.0, - 408442368.0, - 412943680.0, - 418281184.0, - 413288000.0, - 415427104.0, - 413499232.0, - 416875968.0, - 410177984.0, - 414286592.0, - 406609312.0, - 408938560.0, - 416495904.0, - 413238912.0, - 405081280.0, - 420601056.0, - 416687104.0, - 410220288.0, - 407656800.0, - 407293760.0, - 418117632.0, - 408833536.0, - 415466080.0, - 413033536.0, - 415566592.0, - 412225856.0, - 415519136.0, - 417217248.0, - 415994208.0, - 408873600.0, - 419491200.0, - 413765920.0, - 407130688.0, - 411230720.0, - 413884096.0, - 410950496.0, - 412169856.0, - 410735712.0, - 407945312.0, - 414506528.0, - 414365312.0, - 418137792.0, - 407623552.0, - 420193312.0, - 410835104.0, - 412817920.0, - 424067936.0, - 408388128.0, - 418699008.0, - 412992960.0, - 403409056.0, - 413680448.0, - 417872448.0, - 406802240.0, - 415407840.0, - 410247232.0, - 419759712.0, - 404802624.0, - 415696448.0, - 417937472.0, - 408253600.0, - 411902112.0, - 408573408.0, - 409423648.0, - 414088960.0, - 401478240.0, - 411742528.0, - 408343648.0, - 407304224.0, - 410957120.0, - 421268832.0, - 412663840.0, - 410873120.0, - 410675360.0, - 410138272.0, - 409784064.0, - 407843648.0, - 412239680.0, - 412600000.0, - 414638464.0, - 404790400.0, - 408548288.0, - 409732128.0, - 418413984.0, - 409784288.0, - 416175200.0, - 415713600.0, - 415409568.0, - 414057056.0, - 419980224.0, - 405691744.0, - 418788224.0, - 412552992.0, - 408631488.0, - 412029696.0, - 420240480.0, - 415681632.0, - 415580864.0, - 406958848.0, - 412249344.0, - 413478432.0, - 406426208.0, - 410148896.0, - 418998176.0, - 410417632.0, - 415333728.0, - 416584000.0, - 415242304.0, - 412212096.0, - 415857280.0, - 412620384.0, - 407461184.0, - 409759744.0, - 418417024.0, - 406013248.0, - 406120928.0, - 406583136.0, - 414575488.0, - 411152704.0, - 407186560.0, - 406491904.0, - 413695904.0, - 420950880.0, - 415250464.0, - 408569792.0, - 412236512.0, - 418439616.0, - 406238048.0, - 416038464.0, - 400165088.0, - 411226912.0, - 408823104.0, - 415843360.0, - 413962656.0, - 412118304.0, - 411415264.0, - 413096384.0, - 418737664.0, - 407577312.0, - 408430784.0, - 408529504.0, - 413784064.0, - 410975392.0, - 410156928.0, - 416404096.0, - 407903520.0, - 421458272.0, - 412274848.0, - 405073952.0, - 413044256.0, - 418528960.0, - 410658560.0, - 411992480.0, - 403968416.0, - 411108288.0, - 415119680.0, - 403387392.0, - 411993024.0, - 418329088.0, - 408459872.0, - 416921280.0, - 405643424.0, - 408147744.0, - 413396000.0, - 406320640.0, - 421459648.0, - 416321312.0, - 409179648.0, - 414647392.0, - 417873888.0, - 412161664.0, - 410750816.0, - 422205216.0, - 406689888.0, - 407261248.0, - 406805888.0, - 414381376.0, - 408532320.0, - 406677696.0, - 413526272.0, - 408279712.0, - 412306944.0, - 416118816.0, - 412484224.0, - 408808352.0, - 410736992.0, - 414504448.0, - 418444480.0, - 407431328.0, - 411008672.0, - 411402464.0, - 410406624.0, - 406542400.0, - 414190880.0, - 411730528.0, - 406809056.0, - 408454528.0, - 409122304.0, - 416596416.0, - 415372416.0, - 413621472.0, - 419321152.0, - 408640352.0, - 417094624.0, - 407202720.0, - 412524576.0, - 406226656.0, - 404579616.0, - 414175200.0, - 407127040.0, - 410158848.0, - 420271744.0, - 413895072.0, - 416175968.0, - 422343520.0, - 414051168.0, - 411498976.0, - 413662496.0, - 414726048.0, - 413234336.0, - 408260704.0, - 411350304.0, - 411811552.0, - 408372416.0, - 418412384.0, - 402269280.0, - 413677056.0, - 418753024.0, - 412217952.0, - 415215456.0, - 416648128.0, - 408234560.0, - 411213856.0, - 408790112.0, - 408121952.0, - 409170336.0, - 410734112.0, - 409936224.0, - 412276096.0, - 414539840.0, - 405619040.0, - 414992384.0, - 415291232.0, - 414335744.0, - 417380000.0, - 409549120.0, - 406891776.0, - 409049056.0, - 420720800.0, - 409671840.0, - 416345280.0, - 406489760.0, - 411682208.0, - 415073120.0, - 406077760.0, - 412551104.0, - 413092512.0, - 405305504.0, - 409754720.0, - 411273344.0, - 412325984.0, - 414492768.0, - 416958176.0, - 414128096.0, - 408105376.0, - 408754656.0, - 407315520.0, - 416939712.0, - 407366656.0, - 408556384.0, - 412100224.0, - 412307968.0, - 413936288.0, - 411327424.0, - 415825472.0, - 416874944.0, - 415247808.0, - 416807584.0, - 408765568.0, - 411392032.0, - 421282240.0, - 412509024.0, - 406195264.0, - 409552864.0, - 419496640.0, - 419015264.0, - 416641184.0, - 408564768.0, - 407659392.0, - 406930816.0, - 414664800.0, - 408869568.0, - 412012128.0, - 417340096.0, - 413850336.0, - 417076608.0, - 409370816.0, - 409628352.0, - 411424096.0, - 412042336.0, - 411818944.0, - 408846720.0, - 407841536.0, - 406151360.0, - 406319488.0, - 409120352.0, - 412615872.0, - 413532736.0, - 419171904.0, - 413866208.0, - 410164864.0, - 422770624.0, - 410631808.0, - 413956256.0, - 419620512.0, - 408846368.0, - 414635328.0, - 406362528.0, - 402708768.0, - 419613536.0, - 404847744.0, - 421550976.0, - 413543200.0, - 406893024.0, - 407650080.0, - 417774560.0, - 410705152.0, - 409986528.0, - 412831264.0, - 412717184.0, - 416972352.0, - 411505920.0, - 411700640.0, - 415884704.0, - 413376000.0, - 413832928.0, - 412735072.0, - 408822528.0, - 412971776.0, - 410920544.0, - 412344832.0, - 405632768.0, - 411159168.0, - 415580256.0, - 413999360.0, - 407473632.0, - 412041280.0, - 410532512.0, - 404566688.0, - 410197056.0, - 412254976.0, - 408523040.0, - 422427584.0, - 410615264.0, - 419350144.0, - 403884512.0, - 407252288.0, - 420443200.0, - 421425568.0, - 408452256.0, - 417916000.0, - 416775968.0, - 419099776.0, - 407547168.0, - 406765472.0, - 415332032.0, - 417052992.0, - 412604256.0, - 414826368.0, - 408118688.0, - 419557792.0, - 411729856.0, - 411672960.0, - 417175904.0, - 410632768.0, - 413532800.0, - 414665024.0, - 418662048.0, - 406574048.0, - 409988768.0, - 417109568.0, - 408678784.0, - 412142272.0, - 416801792.0, - 408941920.0, - 417166912.0, - 412325920.0, - 419871040.0, - 419650368.0, - 406610880.0, - 412993280.0, - 412550848.0, - 405127520.0, - 414458272.0, - 415903712.0, - 410621632.0, - 410580192.0, - 410456000.0, - 419746208.0, - 412518816.0, - 409092480.0, - 413411168.0, - 410308800.0, - 417502400.0, - 419797824.0, - 413532768.0, - 417780960.0, - 409911392.0, - 413185920.0, - 410197600.0, - 412674560.0, - 416234432.0, - 410191456.0, - 420617888.0, - 415609376.0, - 420792032.0, - 418711520.0, - 415262688.0, - 409744544.0, - 413882496.0, - 410282624.0, - 415323712.0, - 411371776.0, - 418940608.0, - 408532544.0, - 408758336.0, - 412250464.0, - 403105312.0, - 410416512.0, - 415844832.0, - 403932672.0, - 405284288.0, - 412304992.0, - 407686560.0, - 420514752.0, - 412744448.0, - 403093440.0, - 420757408.0, - 422156928.0, - 404139104.0, - 402234144.0, - 415565280.0, - 408738848.0, - 407156288.0, - 413337280.0, - 410476544.0, - 415218112.0, - 417073728.0, - 410918624.0, - 413596864.0, - 410684256.0, - 405601152.0, - 414670560.0, - 416290304.0, - 410909664.0, - 418249536.0, - 409838784.0, - 411910048.0, - 411890336.0, - 407964928.0, - 407949504.0, - 407969632.0, - 416002176.0, - 412363360.0, - 407452544.0, - 417762272.0, - 410101504.0, - 423719232.0, - 405305408.0, - 410104960.0, - 424874272.0, - 420910496.0, - 410874304.0, - 413398016.0, - 415916768.0, - 412462880.0, - 413505888.0, - 406121248.0, - 419927584.0, - 413912672.0, - 409356000.0, - 410613056.0, - 411567840.0, - 414483264.0, - 400987968.0, - 419914912.0, - 414681216.0, - 406084352.0, - 414429888.0, - 412849632.0, - 412337824.0, - 416503072.0, - 420020544.0, - 410636576.0, - 410452000.0, - 417279072.0, - 414075232.0, - 419390976.0, - 413008032.0, - 414749856.0, - 414421024.0, - 411885696.0, - 408459392.0, - 425847936.0, - 400233696.0, - 404880160.0, - 418252736.0, - 416729056.0, - 406792704.0, - 413315616.0, - 415429888.0, - 413354752.0, - 414298848.0, - 413956544.0, - 414377280.0, - 410985344.0, - 411758848.0, - 413260128.0, - 413067872.0, - 412349504.0, - 408906624.0, - 418704320.0, - 407485024.0, - 413081152.0, - 418494112.0, - 407292192.0, - 409452544.0, - 415622272.0, - 415080736.0, - 412973536.0, - 413540768.0, - 407776736.0, - 413128544.0, - 412933728.0, - 412351552.0, - 410930048.0, - 415583424.0, - 418761024.0, - 411081440.0, - 419254016.0, - 410607392.0, - 416964448.0, - 412580512.0, - 418322432.0, - 416248864.0, - 414754272.0, - 418429536.0, - 422143040.0, - 416746720.0, - 408958208.0, - 413181408.0, - 411399776.0, - 399912832.0, - 412798848.0, - 409085984.0, - 418165440.0, - 400254528.0, - 413066368.0, - 409962528.0, - 412352096.0, - 414146048.0, - 408423744.0, - 416251552.0, - 408652000.0, - 413273280.0, - 410580384.0, - 412101824.0, - 415320704.0, - 410887616.0, - 420440704.0, - 401429440.0, - 407820384.0, - 417939328.0, - 408921792.0, - 407054592.0, - 415264192.0, - 404144160.0, - 410387296.0, - 419861152.0, - 411793760.0, - 407248736.0, - 416489664.0, - 409148640.0, - 412185472.0, - 411933376.0, - 410221984.0, - 416924800.0, - 416474016.0, - 415423904.0, - 408695008.0, - 418412224.0, - 411769216.0, - 412400160.0, - 411516896.0, - 408460416.0, - 403828544.0, - 413352224.0, - 405221632.0, - 418408672.0, - 413698016.0, - 414702240.0, - 411660704.0, - 411947200.0, - 417931072.0, - 417306720.0, - 416300256.0, - 410703072.0, - 418913088.0, - 410888928.0, - 414792896.0, - 408956864.0, - 409185760.0, - 412513856.0, - 405430176.0, - 417268288.0, - 411270240.0, - 408358976.0, - 408169280.0, - 408885088.0, - 417539776.0, - 400110304.0, - 413166752.0, - 413704768.0, - 418178432.0, - 409899200.0, - 412180032.0, - 408936448.0, - 416983968.0, - 410752128.0, - 406807296.0, - 406977856.0, - 407779328.0, - 412997728.0, - 410356704.0, - 408474208.0, - 409943168.0, - 416296992.0, - 411913344.0, - 412763904.0, - 407826208.0, - 412081312.0, - 410528512.0, - 410612640.0, - 411905664.0, - 404348896.0, - 416405504.0, - 410370304.0, - 413573696.0, - 418568800.0, - 414526176.0, - 406187648.0, - 409909088.0, - 412512832.0, - 412409088.0, - 411042592.0, - 413653536.0, - 414702464.0, - 412562560.0, - 414280224.0, - 415883424.0, - 403675616.0, - 412089248.0, - 408515456.0, - 418335744.0, - 411349888.0, - 404206336.0, - 414782080.0, - 411190048.0, - 405753760.0, - 409812160.0, - 413012512.0, - 413965888.0, - 416909696.0, - 414205504.0, - 406583456.0, - 403910592.0, - 417990240.0, - 404456896.0, - 417939296.0, - 405434496.0, - 412307264.0, - 416589504.0, - 414508448.0, - 413783296.0, - 407825792.0, - 411619104.0, - 409458336.0, - 402773504.0, - 417758560.0, - 413692704.0, - 409094112.0, - 418525408.0, - 413656000.0, - 403587776.0, - 416889760.0, - 409511328.0, - 413061216.0, - 417074688.0, - 401520640.0, - 418245664.0, - 409211136.0, - 416336512.0, - 416596512.0, - 413691360.0, - 416336640.0, - 408581920.0, - 418484608.0, - 410611744.0, - 406622592.0, - 414445952.0, - 417665696.0, - 412304576.0, - 410998880.0, - 413205824.0, - 418866144.0, - 417385056.0, - 411238240.0, - 410852224.0, - 417827200.0, - 408697696.0, - 412004608.0, - 417878144.0, - 416696256.0, - 400275040.0, - 416025568.0, - 415134720.0, - 411819584.0, - 420903648.0, - 416375392.0, - 407875744.0, - 414635808.0, - 413061056.0, - 414031392.0, - 418138784.0, - 407766528.0, - 419056768.0, - 414834624.0, - 405367904.0, - 411640864.0, - 420512544.0, - 410596736.0, - 412505184.0, - 411529280.0, - 418171264.0, - 414528352.0, - 410746144.0, - 401523232.0, - 411170336.0, - 406806880.0, - 403549920.0, - 399703296.0, - 413465984.0, - 409570048.0, - 406891296.0, - 414745920.0, - 409857088.0, - 412629888.0, - 415331616.0, - 415388640.0, - 411000064.0, - 411473952.0, - 413842240.0, - 412345888.0, - 417958240.0, - 399448416.0, - 415723968.0, - 414086400.0, - 409938144.0, - 414793216.0, - 410372256.0, - 409621024.0, - 408433472.0, - 410472672.0, - 403508160.0, - 411948000.0, - 409381472.0, - 410839488.0, - 414824512.0, - 413173664.0, - 422487232.0, - 408493280.0, - 418438336.0, - 404510976.0, - 406437024.0, - 419742944.0, - 409776224.0, - 414145856.0, - 415367104.0, - 410615616.0, - 409414368.0, - 413264960.0, - 408429600.0, - 413213280.0, - 410542176.0, - 412621280.0, - 417195008.0, - 415857344.0, - 412075808.0, - 407025024.0, - 416864384.0, - 406006240.0, - 410357408.0, - 410466144.0, - 413489984.0, - 422346496.0, - 408409664.0, - 404822848.0, - 413623104.0, - 417135488.0, - 413184576.0, - 415751392.0, - 414974912.0, - 422248032.0, - 408304736.0, - 412700896.0, - 406231424.0, - 422602336.0, - 422375168.0, - 401396256.0, - 413774112.0, - 408714752.0, - 409816096.0, - 408833344.0, - 409475104.0, - 409888160.0, - 409251872.0, - 408407936.0, - 409487616.0, - 411059552.0, - 408933120.0, - 413142752.0, - 415504000.0, - 406859872.0, - 400262400.0, - 416990816.0, - 407815424.0, - 405070304.0, - 414449760.0, - 407524864.0, - 412588704.0, - 415973984.0, - 405801504.0, - 417083072.0, - 405406432.0, - 417092320.0, - 419425408.0, - 398769120.0, - 409619936.0, - 419184544.0, - 418183296.0, - 413439584.0, - 408257088.0, - 408395104.0, - 409987712.0, - 413147040.0, - 411692384.0, - 416098912.0, - 410718400.0, - 417983104.0, - 416508768.0, - 411693632.0, - 413714688.0, - 409650240.0, - 410810272.0, - 409166656.0, - 418381344.0, - 415022944.0, - 416013760.0, - 413185440.0, - 409006368.0, - 408300224.0, - 410016480.0, - 416380480.0, - 411470080.0, - 414281280.0, - 408139840.0, - 417026752.0, - 424993600.0, - 418707648.0, - 404901312.0, - 409670880.0, - 415935936.0, - 408295520.0, - 420807488.0, - 405990656.0, - 411857184.0, - 403794464.0, - 416856416.0, - 408281728.0, - 418706528.0, - 407098752.0, - 408099584.0, - 422021472.0, - 414068448.0, - 405964672.0, - 406380320.0, - 409431776.0, - 416689632.0, - 409117472.0, - 408712608.0, - 409188352.0, - 418025472.0, - 408787520.0, - 417809440.0, - 410713856.0, - 410838976.0, - 404538208.0, - 410644128.0, - 408829888.0, - 406812864.0, - 421082848.0, - 405078272.0, - 409454784.0, - 406151840.0, - 414860896.0, - 404874080.0, - 418170496.0, - 415090176.0, - 413429856.0, - 414018592.0, - 417080832.0, - 416350976.0, - 408085024.0, - 415680160.0, - 410764288.0, - 416525824.0, - 415515488.0, - 412741376.0, - 412186976.0, - 415023296.0, - 401767872.0, - 408590400.0, - 410976576.0, - 412373984.0, - 413890976.0, - 413547936.0, - 413189408.0, - 409986752.0, - 410224992.0, - 401877792.0, - 408283648.0, - 411967040.0, - 406617024.0, - 409350912.0, - 417277568.0, - 404634848.0, - 414047360.0, - 408804224.0, - 415608000.0, - 410062016.0, - 417742560.0, - 416662336.0, - 406339264.0, - 414942208.0, - 412868608.0, - 407392064.0, - 413066528.0, - 415261536.0, - 414303040.0, - 409643072.0, - 408382400.0, - 412263328.0, - 408197632.0, - 408900128.0, - 414820128.0, - 409075200.0, - 411732768.0, - 414604608.0, - 409029472.0, - 419163104.0, - 416645216.0, - 402355488.0, - 416218432.0, - 413576480.0, - 416073152.0, - 414948928.0, - 402899360.0, - 409368416.0, - 414215712.0, - 409511872.0, - 416543392.0, - 405668096.0, - 414999040.0, - 411480608.0, - 417967744.0, - 406704608.0, - 410216352.0, - 418870528.0, - 411148000.0, - 404389440.0, - 414091712.0, - 404349600.0, - 411022048.0, - 410273760.0, - 408304032.0, - 416404640.0, - 414859328.0, - 413521152.0, - 409438240.0, - 411023776.0, - 415843808.0, - 420726848.0, - 418109856.0, - 415636768.0, - 410362688.0, - 414244832.0, - 408885056.0, - 414116288.0, - 411190912.0, - 412045856.0, - 414100352.0, - 408663040.0, - 416548992.0, - 408255072.0, - 410600576.0, - 418523008.0, - 405684992.0, - 407968256.0, - 424508736.0, - 408812800.0, - 417322016.0, - 409140704.0, - 410040416.0, - 419333984.0, - 414006144.0, - 412334592.0, - 409420672.0, - 417956064.0, - 415071200.0, - 413162592.0, - 408815072.0, - 414430464.0, - 412782176.0, - 423251232.0, - 413873696.0, - 409398336.0, - 421932320.0, - 416800352.0, - 414005952.0, - 410387072.0, - 400667680.0, - 410455936.0, - 410716480.0, - 412333536.0, - 409420128.0, - 417956544.0, - 415071584.0, - 413163072.0, - 408814528.0, - 414430720.0, - 412782368.0, - 423250528.0, - 413873280.0, - 409398144.0, - 421933888.0, - 416800608.0, - 414960064.0, - 411043040.0, - 416053696.0, - 412307296.0, - 406388960.0, - 410268512.0, - 414598272.0, - 411614656.0, - 409754944.0, - 414264000.0, - 404840576.0, - 411062368.0, - 404831232.0, - 410469312.0, - 409517952.0, - 412259776.0, - 415050816.0, - 408245568.0, - 415958720.0, - 412945088.0, - 410110656.0, - 412552160.0, - 410075424.0, - 406095648.0, - 412135808.0, - 408065856.0, - 412062496.0, - 420191392.0, - 410822912.0, - 413143296.0, - 415380320.0, - 417372288.0, - 416036800.0, - 406144064.0, - 415809440.0, - 413041184.0, - 415098464.0, - 408788608.0, - 411995072.0, - 419606432.0, - 407992160.0, - 407718688.0, - 406517632.0, - 410663232.0, - 413921824.0, - 410626336.0, - 412333888.0, - 407286336.0, - 412857472.0, - 412953568.0, - 416187744.0, - 408670496.0, - 410816736.0, - 410832320.0, - 416285056.0, - 414148096.0, - 415671680.0, - 416401472.0, - 412892800.0, - 410457568.0, - 417862816.0, - 408737408.0, - 414763840.0, - 406149536.0, - 408431296.0, - 404359424.0, - 412105440.0, - 416662720.0, - 403636864.0, - 410578208.0, - 408686784.0, - 407738848.0, - 415004192.0, - 411451360.0, - 411308224.0, - 415067328.0, - 407297920.0, - 416666208.0, - 411425760.0, - 414241088.0, - 410561920.0, - 413198336.0, - 408375040.0, - 414440000.0, - 402914720.0, - 406725216.0, - 412218432.0, - 412333824.0, - 409421280.0, - 417955552.0, - 415071680.0, - 413163104.0, - 408814464.0, - 414430784.0, - 412781856.0, - 423251040.0, - 413873792.0, - 409399872.0, - 421932416.0, - 416800576.0, - 414960032.0, - 411042464.0, - 416054080.0, - 412306368.0, - 406388608.0, - 410268128.0, - 414597280.0, - 411612736.0, - 408295104.0, - 414462272.0, - 417366784.0, - 411096192.0, - 412285920.0, - 406202240.0, - 407254496.0, - 412605824.0, - 403345856.0, - 406529920.0, - 413622688.0, - 415196064.0, - 412086176.0, - 410344992.0, - 408565760.0, - 407707584.0, - 406999168.0, - 408540576.0, - 408720480.0, - 408075552.0, - 420701632.0, - 413992352.0, - 409516032.0, - 406258496.0, - 419734592.0, - 415636032.0, - 413339936.0, - 414134336.0, - 408552352.0, - 420962624.0, - 412519552.0, - 414985376.0, - 409112800.0, - 410114080.0, - 412866208.0, - 404519328.0, - 408306176.0, - 419277504.0, - 410477568.0, - 418033280.0, - 412887840.0, - 405576096.0, - 410093152.0, - 405674016.0, - 404280832.0, - 406234976.0, - 409424800.0, - 412385952.0, - 408543712.0, - 406378976.0, - 419656224.0, - 408405952.0, - 415772640.0, - 412971200.0, - 418634976.0, - 411540544.0, - 410815712.0, - 411672384.0, - 419577536.0, - 401775584.0, - 416125920.0, - 412564608.0, - 406396832.0, - 419172992.0, - 410975616.0, - 419229696.0, - 406012096.0, - 412721120.0, - 408335744.0, - 410184192.0, - 407970400.0, - 403651584.0, - 417332704.0, - 406419200.0, - 406705536.0, - 419962176.0, - 415639200.0, - 407573184.0, - 417041280.0, - 418201280.0, - 418428288.0, - 413459200.0, - 417342336.0, - 421775392.0, - 409215936.0, - 411485760.0, - 414967680.0, - 411455360.0, - 410077248.0, - 407133472.0, - 414610656.0, - 412223904.0, - 412128000.0, - 417865952.0, - 411240128.0, - 409370656.0, - 412870144.0, - 408209440.0, - 407686720.0, - 415734528.0, - 410805984.0, - 418054432.0, - 405390752.0, - 411940864.0, - 412018496.0, - 410426176.0, - 415427104.0, - 409086784.0, - 412518464.0, - 416869440.0, - 408008384.0, - 408546624.0, - 409969984.0, - 409345536.0, - 405880288.0, - 413686688.0, - 412068704.0, - 424414560.0, - 402884288.0, - 426367424.0, - 412332352.0, - 409420608.0, - 417955968.0, - 415070688.0, - 413162752.0, - 408815040.0, - 414430912.0, - 412783104.0, - 423251232.0, - 413872416.0, - 409399072.0, - 421932480.0, - 416800608.0, - 414960832.0, - 411042912.0, - 416052992.0, - 412306720.0, - 406389024.0, - 410268224.0, - 414599648.0, - 411613472.0, - 409754464.0, - 414264256.0, - 404839328.0, - 411061280.0, - 404830528.0, - 410469632.0, - 409517216.0, - 412260704.0, - 415051104.0, - 408246304.0, - 415958304.0, - 412944992.0, - 410110112.0, - 412552736.0, - 410074880.0, - 406096256.0, - 412135648.0, - 408065024.0, - 412060960.0, - 420191872.0, - 410823136.0, - 413143744.0, - 415381248.0, - 417372224.0, - 416035904.0, - 406144000.0, - 415809376.0, - 413040672.0, - 415096640.0, - 408788352.0, - 411994240.0, - 419606048.0, - 407992384.0, - 407720096.0, - 406516192.0, - 410663584.0, - 413923008.0, - 410626880.0, - 412333312.0, - 407287040.0, - 412855872.0, - 412953696.0, - 416186784.0, - 408670272.0, - 410817664.0, - 410832480.0, - 416285024.0, - 414148864.0, - 415670656.0, - 416400928.0, - 412892672.0, - 410457472.0, - 417862656.0, - 408740128.0, - 414764544.0, - 406150304.0, - 408434368.0, - 404361312.0, - 412107424.0, - 416664416.0, - 403638496.0, - 410577728.0, - 408685888.0, - 407741504.0, - 415004640.0, - 411453056.0, - 411308192.0, - 415068416.0, - 407297920.0, - 416666816.0, - 411425984.0, - 414243264.0, - 410560576.0, - 413197600.0, - 408374976.0, - 414438720.0, - 402913856.0, - 406726240.0, - 412217376.0, - 409593184.0, - 416310208.0, - 412989696.0, - 415405952.0, - 412404096.0, - 405132032.0, - 413649344.0, - 410179456.0, - 411101632.0, - 417092896.0, - 415317152.0, - 414881536.0, - 413145472.0, - 411031744.0, - 410585024.0, - 415829120.0, - 407160768.0, - 408316832.0, - 413392928.0, - 422922304.0, - 407847264.0, - 414778048.0, - 406403968.0, - 411318240.0, - 417926656.0, - 411127392.0, - 410436608.0, - 405836544.0, - 416875072.0, - 408596352.0, - 420724736.0, - 410561056.0, - 406772576.0, - 411313696.0, - 410316672.0, - 411800672.0, - 414975584.0, - 410908608.0, - 402847744.0, - 415278624.0, - 411141760.0, - 411254048.0, - 419268960.0, - 404416832.0, - 414470112.0, - 406740992.0, - 413413248.0, - 409985792.0, - 409414560.0, - 414224896.0, - 410853600.0, - 411807456.0, - 410688000.0, - 415543008.0, - 413525568.0, - 412613504.0, - 419237952.0, - 415279904.0, - 402528928.0, - 400186944.0, - 419198688.0, - 402603456.0, - 413331072.0, - 405925888.0, - 419994272.0, - 412333088.0, - 411687040.0, - 406823904.0, - 400992736.0, - 412719648.0, - 413201152.0, - 405933184.0, - 417393216.0, - 418254144.0, - 410101344.0, - 414142720.0, - 418019616.0, - 399554336.0, - 408644256.0, - 400246624.0, - 414155328.0, - 408550272.0, - 419760512.0, - 417298816.0, - 412370784.0, - 417099648.0, - 409352416.0, - 412594432.0, - 411392928.0, - 414576800.0, - 414586048.0, - 414782528.0, - 409057664.0, - 415109056.0, - 411199104.0, - 412653664.0, - 412627008.0, - 407838048.0, - 407430880.0, - 406327904.0, - 413594976.0, - 410473088.0, - 413426016.0, - 411759328.0, - 415309632.0, - 418306752.0, - 410454976.0, - 414280256.0, - 408103904.0, - 409534496.0, - 410438720.0, - 413541440.0, - 420091712.0, - 415800704.0, - 418100384.0, - 414012928.0, - 411054496.0, - 409962272.0, - 407187520.0, - 410066592.0, - 407791200.0, - 418949696.0, - 402407872.0, - 410174944.0, - 420186208.0, - 411943712.0, - 413347712.0, - 410057984.0, - 415427744.0, - 412076544.0, - 417233568.0, - 418581472.0, - 409409632.0, - 413002272.0, - 418524032.0, - 413671904.0, - 409373600.0, - 415921600.0, - 420740320.0, - 408673344.0, - 417167360.0, - 415787264.0, - 411862272.0, - 418703520.0, - 405013344.0, - 414699040.0, - 420402368.0, - 405552128.0, - 415035360.0, - 405473056.0, - 405967136.0, - 412309152.0, - 411652032.0, - 397773056.0, - 411081856.0, - 418675712.0, - 408187680.0, - 409751104.0, - 405371040.0, - 416268480.0, - 410736000.0, - 408020000.0, - 416843104.0, - 414388288.0, - 406380672.0, - 416985696.0, - 419766272.0, - 405959520.0, - 409991584.0, - 409848096.0, - 410249568.0, - 397445888.0, - 410630560.0, - 416447264.0, - 410464640.0, - 412293184.0, - 404117728.0, - 415202464.0, - 410438720.0, - 408767520.0, - 409015616.0, - 419974016.0, - 408899456.0, - 417250784.0, - 417589472.0, - 407038272.0, - 410672352.0, - 411449056.0, - 405278528.0, - 408102336.0, - 410093280.0, - 412896768.0, - 409913344.0, - 412756224.0, - 409367392.0, - 421088064.0, - 413039744.0, - 407730176.0, - 406522240.0, - 408859456.0, - 411516544.0, - 400306400.0, - 412775552.0, - 413981024.0, - 413943360.0, - 415361728.0, - 411286880.0, - 406578432.0, - 409504800.0, - 416983520.0, - 411709376.0, - 411107776.0, - 417143296.0, - 411754048.0, - 416764768.0, - 409507232.0, - 403772224.0, - 410465504.0, - 418273152.0, - 404107648.0, - 415542528.0, - 409330784.0, - 413391520.0, - 415793696.0, - 418099104.0, - 411934400.0, - 411521536.0, - 411593632.0, - 411388736.0, - 407068224.0, - 408093696.0, - 403867776.0, - 409259392.0, - 408781184.0, - 411940320.0, - 410667296.0, - 417449312.0, - 412331392.0, - 413866432.0, - 413272960.0, - 411865344.0, - 411812320.0, - 415565376.0, - 409462784.0, - 411160480.0, - 416418496.0, - 406518336.0, - 416268800.0, - 408092160.0, - 401766432.0, - 419639840.0, - 410718944.0, - 408926048.0, - 417168512.0, - 412317408.0, - 411438624.0, - 410338432.0, - 406784000.0, - 413270304.0, - 412651744.0, - 413761696.0, - 407144000.0, - 398757760.0, - 412297504.0, - 410139488.0, - 411136352.0, - 413750688.0, - 406022208.0, - 416577056.0, - 414127904.0, - 408137536.0, - 410128096.0, - 418443968.0, - 412141248.0, - 414607392.0, - 414087744.0, - 414201952.0, - 410218944.0, - 412134272.0, - 404243008.0, - 411880224.0, - 417923040.0, - 412157152.0, - 409931264.0, - 411632736.0, - 411707296.0, - 419498848.0, - 420366240.0, - 410800384.0, - 412836640.0, - 413333472.0, - 410439840.0, - 412670464.0, - 411889152.0, - 411074144.0, - 412865184.0, - 419942048.0, - 420019520.0, - 414496608.0, - 424268064.0, - 408957312.0, - 414585600.0, - 407925216.0, - 405087968.0, - 412011264.0, - 410478048.0, - 412896864.0, - 413307104.0, - 414115552.0, - 403227520.0, - 405560896.0, - 415158784.0, - 410759744.0, - 411851424.0, - 415566080.0, - 417507712.0, - 413171392.0, - 419198080.0, - 409451168.0, - 417564256.0, - 405871776.0, - 416142944.0, - 410680192.0, - 413849408.0, - 411941056.0, - 417300768.0, - 406647648.0, - 414399168.0, - 412662080.0, - 414233344.0, - 414039232.0, - 405511296.0, - 417026560.0, - 407493376.0, - 418037792.0, - 419356160.0, - 416813216.0, - 414660704.0, - 414270688.0, - 409459648.0, - 415086176.0, - 404081536.0, - 406716512.0, - 408404160.0, - 406878560.0, - 412887520.0, - 410712384.0, - 414372000.0, - 422359616.0, - 404960736.0, - 413646528.0, - 420218336.0, - 409225024.0, - 417984448.0, - 413833280.0, - 407472128.0, - 414571264.0, - 411421600.0, - 416557984.0, - 405020928.0, - 417161728.0, - 407989088.0, - 410008704.0, - 415822784.0, - 397264352.0, - 416360128.0, - 415021120.0, - 410166080.0, - 419657312.0, - 416481344.0, - 409199136.0, - 409173376.0, - 408719904.0, - 402699360.0, - 413787072.0, - 415104608.0, - 410347680.0, - 416941952.0, - 419532416.0, - 409054848.0, - 413920096.0, - 414353344.0, - 403808288.0, - 404103328.0, - 414294368.0, - 406022400.0, - 413980512.0, - 404513792.0, - 408380256.0, - 413233312.0, - 413223264.0, - 413232576.0, - 407129600.0, - 407573600.0, - 409252224.0, - 406044480.0, - 411344128.0, - 409123328.0, - 415280256.0, - 417513440.0, - 406856032.0, - 416962592.0, - 411770048.0, - 411990912.0, - 409274112.0, - 411866688.0, - 411793280.0, - 412453056.0, - 403054848.0, - 416962880.0, - 409884544.0, - 408514560.0, - 416725792.0, - 405316736.0, - 416100480.0, - 411469792.0, - 405906208.0, - 417704096.0, - 404116544.0, - 409684928.0, - 409256736.0, - 409281728.0, - 402020768.0, - 402074272.0, - 412050336.0, - 412262176.0, - 411720864.0, - 413394336.0, - 409789696.0, - 414700576.0, - 419364960.0, - 411055648.0, - 409317088.0, - 405888544.0, - 414987008.0, - 413221088.0, - 409427616.0, - 421257632.0, - 407055040.0, - 415942976.0, - 411933920.0, - 406975296.0, - 408777184.0, - 410383040.0, - 416171104.0, - 411157216.0, - 413661216.0, - 415019840.0, - 407880480.0, - 409953920.0, - 413232992.0, - 406559872.0, - 415108480.0, - 420771264.0, - 403820608.0, - 411093632.0, - 408571072.0, - 411816064.0, - 428174016.0, - 412835168.0, - 409151328.0, - 412774336.0, - 414327680.0, - 408718240.0, - 409906720.0, - 414947872.0, - 412527616.0, - 419480512.0, - 417008320.0, - 409319008.0, - 412079296.0, - 417816224.0, - 408929600.0, - 411617856.0, - 409836416.0, - 420434112.0, - 416234656.0, - 415962976.0, - 411586560.0, - 412288288.0, - 411526976.0, - 411715584.0, - 411053312.0, - 408447328.0, - 404235200.0, - 415558400.0, - 415507200.0, - 406669344.0, - 414048128.0, - 420099168.0, - 417598912.0, - 422765248.0, - 411750880.0, - 410144448.0, - 412728064.0, - 410105696.0, - 411087424.0, - 412000480.0, - 411394240.0, - 408583776.0, - 415410720.0, - 418687104.0, - 413001824.0, - 407414048.0, - 409516160.0, - 411923616.0, - 410166016.0, - 418181312.0, - 409344192.0, - 416763680.0, - 414939104.0, - 412936800.0, - 410700128.0, - 409537632.0, - 410188832.0, - 414002848.0, - 418110496.0, - 402172992.0, - 412341504.0, - 418667296.0, - 403326464.0, - 410703168.0, - 413742592.0, - 418261056.0, - 415183584.0, - 408002496.0, - 407256992.0, - 418691424.0, - 409610944.0, - 409124960.0, - 421610240.0, - 409020288.0, - 407234976.0, - 408767648.0, - 413340096.0, - 410958048.0, - 416810624.0, - 411687840.0, - 408020512.0, - 413992288.0, - 407717920.0, - 418078432.0, - 409209888.0, - 408614656.0, - 408477312.0, - 414019456.0, - 415234976.0, - 411960384.0, - 408796128.0, - 416215520.0, - 409486816.0, - 419772768.0, - 408267360.0, - 408882880.0, - 418252192.0, - 414112352.0, - 422162848.0, - 415268192.0, - 403428544.0, - 420774336.0, - 406468864.0, - 421077632.0, - 428270144.0, - 412467488.0, - 413505152.0, - 413549632.0, - 417397472.0, - 415305600.0, - 413451328.0, - 415158368.0, - 423987296.0, - 413324288.0, - 415818240.0, - 416950176.0, - 416349664.0, - 406019776.0, - 402688960.0, - 412278976.0, - 411485056.0, - 416906624.0, - 405126752.0, - 404135136.0, - 420790816.0, - 413249600.0, - 411586624.0, - 411436192.0, - 410582048.0, - 408570944.0, - 410722592.0, - 413051776.0, - 411314208.0, - 406731296.0, - 417484128.0, - 412573248.0, - 410448416.0, - 419529632.0, - 405180672.0, - 424109728.0, - 411415424.0, - 413732256.0, - 414075456.0, - 416771648.0, - 414102240.0, - 413529600.0, - 404785920.0, - 409181664.0, - 413906080.0, - 408658848.0, - 414729216.0, - 408554848.0, - 419915232.0, - 414633376.0, - 411829344.0, - 405695264.0, - 413557728.0, - 418526208.0, - 415096672.0, - 424292576.0, - 417733536.0, - 418604704.0, - 411442112.0, - 411265728.0, - 412027840.0, - 426011040.0, - 408536192.0, - 409523744.0, - 412519104.0, - 421151968.0, - 413040896.0, - 411303808.0, - 407286880.0, - 410922688.0, - 410816992.0, - 404551648.0, - 410934336.0, - 416845888.0, - 419800512.0, - 415870752.0, - 404941600.0, - 403836512.0, - 413734656.0, - 407222944.0, - 415828832.0, - 408647296.0, - 411327328.0, - 415406624.0, - 419435584.0, - 411225152.0, - 417874656.0, - 408762400.0, - 415056064.0, - 409725664.0, - 410317408.0, - 407079520.0, - 412851168.0, - 404216000.0, - 409463904.0, - 412213408.0, - 407073792.0, - 409818592.0, - 419280800.0, - 417554528.0, - 408209600.0, - 405972256.0, - 416959936.0, - 411566080.0, - 413864288.0, - 417084224.0, - 407670016.0, - 413385312.0, - 407325632.0, - 419148608.0, - 418247776.0, - 408901248.0, - 409249600.0, - 413336608.0, - 408365728.0, - 409470528.0, - 415449728.0, - 415238656.0, - 413695424.0, - 414744096.0, - 414077344.0, - 411156800.0, - 420996704.0, - 410633536.0, - 411545568.0, - 410693760.0, - 420488256.0, - 403753568.0, - 417051264.0, - 406674688.0, - 412248896.0, - 410862752.0, - 416118016.0, - 406218176.0, - 414699232.0, - 411616128.0, - 412067200.0, - 412450560.0, - 411369536.0, - 415937952.0, - 415274752.0, - 406674144.0, - 406815392.0, - 410921888.0, - 404419104.0, - 411259520.0, - 413207744.0, - 404282880.0, - 413085600.0, - 404968000.0, - 420965824.0, - 407557920.0, - 407005472.0, - 419038464.0, - 407394048.0, - 418149056.0, - 411156800.0, - 409444384.0, - 408961280.0, - 413993856.0, - 403310784.0, - 413584640.0, - 403683104.0, - 409338912.0, - 419388928.0, - 408335584.0, - 415915296.0, - 409688480.0, - 412441760.0, - 418482464.0, - 401084512.0, - 409711584.0, - 404632768.0, - 408691488.0, - 413791296.0, - 407553984.0, - 414567104.0, - 415310112.0, - 414574400.0, - 418404064.0, - 407714976.0, - 407671136.0, - 407571616.0, - 414897344.0, - 406000768.0, - 411459680.0, - 408501408.0, - 414923872.0, - 419512832.0, - 420328416.0, - 409924064.0, - 415170848.0, - 413594432.0, - 412716832.0, - 414456288.0, - 412364800.0, - 409342432.0, - 415079936.0, - 418535040.0, - 410023008.0, - 420469504.0, - 413501888.0, - 419594912.0, - 411149248.0, - 408000224.0, - 413901856.0, - 415041056.0, - 410592320.0, - 415970464.0, - 415638016.0, - 415852960.0, - 399083488.0, - 401402240.0, - 412633376.0, - 405406304.0, - 410640768.0, - 411674496.0, - 409171904.0, - 411352032.0, - 409339680.0, - 422185920.0, - 408538464.0, - 412623104.0, - 417310048.0, - 409934816.0, - 416477760.0, - 421674688.0, - 420129632.0, - 415626144.0, - 413892192.0, - 417549280.0, - 411884928.0, - 415794592.0, - 414585408.0, - 416051520.0, - 407581632.0, - 413066432.0, - 404276800.0, - 415900128.0, - 411388224.0, - 415099648.0, - 415149504.0, - 407609024.0, - 418693792.0, - 404404096.0, - 412497984.0, - 423197152.0, - 408897408.0, - 416664224.0, - 408850912.0, - 416506592.0, - 411212800.0, - 414671264.0, - 407007872.0, - 415510624.0, - 418816544.0, - 412434432.0, - 411318688.0, - 413666496.0, - 412977760.0, - 412893888.0, - 420609088.0, - 409751008.0, - 416614688.0, - 407548736.0, - 403942496.0, - 405373216.0, - 407348128.0, - 409148064.0, - 418983808.0, - 412971008.0, - 409399776.0, - 407666528.0, - 412713760.0, - 415746976.0, - 411044800.0, - 409970112.0, - 411167104.0, - 409869920.0, - 418025152.0, - 408120256.0, - 409303392.0, - 409807520.0, - 410351392.0, - 408406528.0, - 403326656.0, - 406561824.0, - 412858560.0, - 417861088.0, - 411190528.0, - 409534048.0, - 413665792.0, - 412734784.0, - 412345312.0, - 408027232.0, - 417489312.0, - 410693344.0, - 418244800.0, - 412187040.0, - 416294528.0, - 407152256.0, - 410340160.0, - 410764640.0, - 411476448.0, - 408448192.0, - 414655808.0, - 419568928.0, - 406367680.0, - 412313952.0, - 415858848.0, - 412070496.0, - 408672160.0, - 414939072.0, - 413201248.0, - 409922400.0, - 412048800.0, - 410020224.0, - 410075840.0, - 412940000.0, - 414263168.0, - 412676832.0, - 407743520.0, - 420247552.0, - 411710720.0, - 415620000.0, - 414421344.0, - 410101600.0, - 408988352.0, - 416256096.0, - 402490112.0, - 408745888.0, - 422249504.0, - 408895968.0, - 413087200.0, - 414572704.0, - 411535168.0, - 413508384.0, - 402569472.0, - 408889344.0, - 418075136.0, - 410048768.0, - 416121952.0, - 405886240.0, - 413847680.0, - 407409408.0, - 411192544.0, - 417178944.0, - 416621952.0, - 413747104.0, - 417660928.0, - 412243200.0, - 416387584.0, - 411064096.0, - 418697920.0, - 424831648.0, - 413290944.0, - 413815904.0, - 406725184.0, - 419155872.0, - 404200000.0, - 412809440.0, - 413000960.0, - 411457216.0, - 410462880.0, - 410847232.0, - 408533984.0, - 404060992.0, - 417029408.0, - 414560768.0, - 407073344.0, - 412733536.0, - 408379552.0, - 419107040.0, - 412535808.0, - 405930624.0, - 414432224.0, - 413327968.0, - 405766144.0, - 409937984.0, - 416881888.0, - 407882944.0, - 413686432.0, - 406863168.0, - 416222464.0, - 408207200.0, - 423153472.0, - 406585056.0, - 409257888.0, - 411868384.0, - 412083264.0, - 414864128.0, - 414590144.0, - 405081696.0, - 415446848.0, - 414018176.0, - 413303008.0, - 406314944.0, - 410501280.0, - 416356384.0, - 414040992.0, - 410520576.0, - 410577600.0, - 410110720.0, - 420064576.0, - 414459744.0, - 408932160.0, - 404067104.0, - 403946336.0, - 417242976.0, - 406385824.0, - 411881312.0, - 412223808.0, - 409743360.0, - 416056736.0, - 408751584.0, - 413151776.0, - 414881408.0, - 409417856.0, - 415199200.0, - 421822720.0, - 406805536.0, - 411158624.0, - 411038336.0, - 411371968.0, - 414510304.0, - 409683424.0, - 411538048.0, - 411293312.0, - 418505024.0, - 407069632.0, - 418164384.0, - 413494624.0, - 414124096.0, - 412794560.0, - 416333664.0, - 409870912.0, - 416313184.0, - 413283392.0, - 409782848.0, - 419167424.0, - 411709088.0, - 414716992.0, - 409342944.0, - 409857408.0, - 413854976.0, - 408939488.0, - 427380896.0, - 405747040.0, - 412877824.0, - 415042368.0, - 415022336.0, - 415259520.0, - 416400896.0, - 403938688.0, - 414416544.0, - 408415072.0, - 404913056.0, - 408419840.0, - 407509696.0, - 408921888.0, - 415695872.0, - 408726336.0, - 411368608.0, - 415452928.0, - 418441184.0, - 415481184.0, - 421594144.0, - 416409600.0, - 408116480.0, - 411919296.0, - 413586688.0, - 413259648.0, - 413050400.0, - 412055392.0, - 412826016.0, - 409402208.0, - 415799104.0, - 409565120.0, - 409883936.0, - 411809152.0, - 416490720.0, - 413156224.0, - 411161728.0, - 411398816.0, - 415444864.0, - 419458080.0, - 405163808.0, - 417201024.0, - 413085888.0, - 422484640.0, - 417028032.0, - 408711840.0, - 407249184.0, - 410171840.0, - 418905568.0, - 423057568.0, - 410384928.0, - 408250816.0, - 416966944.0, - 413731456.0, - 412908544.0, - 416137920.0, - 404774080.0, - 417087712.0, - 403045440.0, - 410037088.0, - 413323264.0, - 409782688.0, - 419168192.0, - 411709184.0, - 414717056.0, - 409342944.0, - 409857088.0, - 413853920.0, - 408938976.0, - 427380480.0, - 405745184.0, - 412877984.0, - 415042144.0, - 415023616.0, - 415259424.0, - 416400928.0, - 403938336.0, - 414415936.0, - 408414880.0, - 404913184.0, - 408418944.0, - 407509824.0, - 408923776.0, - 415696128.0, - 408725856.0, - 411368384.0, - 415452064.0, - 418440928.0, - 415481280.0, - 421594176.0, - 416410464.0, - 408116832.0, - 411920000.0, - 413586752.0, - 413260320.0, - 413049472.0, - 412055424.0, - 412826560.0, - 409402912.0, - 415799904.0, - 409565824.0, - 409883904.0, - 411808480.0, - 416491456.0, - 413156640.0, - 411163296.0, - 411398368.0, - 415446176.0, - 419458592.0, - 405163616.0, - 417200416.0, - 413086080.0, - 422485760.0, - 417029408.0, - 408712224.0, - 407249952.0, - 410169664.0, - 418905344.0, - 423058208.0, - 410385600.0, - 408247872.0, - 416963744.0, - 413728192.0, - 412906944.0, - 416136672.0, - 404769504.0, - 417085280.0, - 403042848.0, - 410035104.0, - 413321216.0, - 416867136.0, - 413173088.0, - 405334112.0, - 412472320.0, - 415194944.0, - 409439616.0, - 413350368.0, - 410201664.0, - 409082784.0, - 412555040.0, - 412189536.0, - 412259840.0, - 408011072.0, - 403736832.0, - 416718752.0, - 404656608.0, - 415319360.0, - 414533184.0, - 423143424.0, - 416456448.0, - 408547680.0, - 407838112.0, - 418401856.0, - 405085184.0, - 412671392.0, - 415452992.0, - 404419936.0, - 413512672.0, - 410620608.0, - 403741440.0, - 409871264.0, - 410435584.0, - 418829952.0, - 407941408.0, - 413339968.0, - 412251168.0, - 415039840.0, - 414166944.0, - 408257120.0, - 412312064.0, - 417625440.0, - 406232224.0, - 409569632.0, - 415952832.0, - 416282304.0, - 412728128.0, - 417127488.0, - 416681792.0, - 412442336.0, - 417915776.0, - 410897824.0, - 410443168.0, - 412096576.0, - 410352160.0, - 417513696.0, - 418094336.0, - 410561184.0, - 414794080.0, - 418340800.0, - 409464672.0, - 407962944.0, - 417527008.0, - 409945536.0, - 413742272.0, - 407055488.0, - 411071520.0, - 413535392.0, - 413026080.0, - 426695840.0, - 405755936.0, - 409803456.0, - 402702208.0, - 409381920.0, - 420295296.0, - 408148960.0, - 407524064.0, - 416752480.0, - 412317312.0, - 407183360.0, - 415490816.0, - 405625600.0, - 416093440.0, - 409883264.0, - 411608928.0, - 405792768.0, - 413779296.0, - 415663840.0, - 409326752.0, - 410384160.0, - 412148960.0, - 411116608.0, - 410583616.0, - 410644224.0, - 411709120.0, - 418371040.0, - 413618400.0, - 424024320.0, - 420999232.0, - 419021696.0, - 408752224.0, - 412612096.0, - 414639648.0, - 411044800.0, - 407760032.0, - 407324128.0, - 410101248.0, - 410610304.0, - 410166592.0, - 410226848.0, - 417601344.0, - 410154240.0, - 415633344.0, - 411953120.0, - 412540160.0, - 416702048.0, - 417102784.0, - 426717472.0, - 410496448.0, - 413774336.0, - 411682272.0, - 409479872.0, - 416407904.0, - 421082848.0, - 410556768.0, - 406629888.0, - 410350048.0, - 415307168.0, - 413522240.0, - 403550880.0, - 421376960.0, - 405186688.0, - 418321568.0, - 418466368.0, - 404490592.0, - 410016128.0, - 406053024.0, - 414175680.0, - 414242912.0, - 414882528.0, - 414529504.0, - 415778880.0, - 422159808.0, - 410270752.0, - 408782528.0, - 410824192.0, - 413070240.0, - 410121696.0, - 413777472.0, - 416295712.0, - 413909344.0, - 418438720.0, - 405393696.0, - 411723904.0, - 418372928.0, - 412801792.0, - 414278240.0, - 416205728.0, - 412894368.0, - 411682080.0, - 421283488.0, - 417175968.0, - 412144896.0, - 415207744.0, - 412947552.0, - 411333920.0, - 415746592.0, - 416873440.0, - 414757312.0, - 408075520.0, - 407757280.0, - 412861472.0, - 413505408.0, - 415010496.0, - 405795808.0, - 420578720.0, - 407982784.0, - 414164864.0, - 415067552.0, - 410654464.0, - 418618560.0, - 410312160.0, - 412042464.0, - 405805984.0, - 406633472.0, - 413807712.0, - 414789568.0, - 415976960.0, - 398154400.0, - 416566752.0, - 417202688.0, - 410834176.0, - 409721088.0, - 412676896.0, - 411763360.0, - 418318400.0, - 412911264.0, - 404585600.0, - 414992800.0, - 409566784.0, - 420918144.0, - 406934560.0, - 415502144.0, - 419220160.0, - 414232480.0, - 416056128.0, - 412057248.0, - 408750304.0, - 417448640.0, - 417483872.0, - 405272160.0, - 414002944.0, - 413475488.0, - 412748128.0, - 416160192.0, - 417418048.0, - 413482304.0, - 410519136.0, - 403782944.0, - 411360384.0, - 415220736.0, - 404434176.0, - 416508352.0, - 412169120.0, - 401651616.0, - 406695104.0, - 413363392.0, - 414902112.0, - 417173696.0, - 412177152.0, - 414389632.0, - 407262976.0, - 412202816.0, - 422671264.0, - 419997888.0, - 403653056.0, - 411229632.0, - 410847392.0, - 406487296.0, - 415415072.0, - 411510592.0, - 412393632.0, - 405321472.0, - 412734304.0, - 416715360.0, - 405623520.0, - 405564992.0, - 409543360.0, - 408135040.0, - 412380128.0, - 414238016.0, - 413230240.0, - 414362848.0, - 404919904.0, - 413887104.0, - 412071808.0, - 406509664.0, - 404890400.0, - 420840672.0, - 419543360.0, - 408540704.0, - 412880032.0, - 415953152.0, - 411657312.0, - 411606912.0, - 411646176.0, - 408148256.0, - 409308032.0, - 410284128.0, - 410640576.0, - 415392064.0, - 409084576.0, - 418902656.0, - 414953280.0, - 414640160.0, - 411663168.0, - 408150720.0, - 414628928.0, - 408316288.0, - 416297312.0, - 414155808.0, - 406869408.0, - 425966048.0, - 414848160.0, - 411601280.0, - 419840960.0, - 410488032.0, - 409195520.0, - 417774400.0, - 408751968.0, - 413544128.0, - 418550656.0, - 409471040.0, - 413158208.0, - 409223424.0, - 411010144.0, - 406960096.0, - 408077088.0, - 413780256.0, - 414168096.0, - 414353504.0, - 406885408.0, - 404241632.0, - 414064160.0, - 409646592.0, - 410281856.0, - 411679968.0, - 416243520.0, - 404785344.0, - 403984416.0, - 404878752.0, - 409183008.0, - 415826848.0, - 415122144.0, - 412185600.0, - 408520192.0, - 421287808.0, - 408672576.0, - 413298944.0, - 413467104.0, - 406984512.0, - 412318848.0, - 412709632.0, - 421537664.0, - 406775008.0, - 404700192.0, - 412582720.0, - 410817536.0, - 412796832.0, - 418861504.0, - 405357600.0, - 412806784.0, - 405746176.0, - 408707232.0, - 412464544.0, - 415678912.0, - 414442560.0, - 409652000.0, - 407475744.0, - 398902720.0, - 408842656.0, - 421491904.0, - 416185408.0, - 411142368.0, - 415594368.0, - 414723456.0, - 413442016.0, - 421615104.0, - 404462144.0, - 412357184.0, - 414613728.0, - 404847072.0, - 413734272.0, - 414247200.0, - 409626048.0, - 405592384.0, - 416373024.0, - 407660896.0, - 405725792.0, - 405698592.0, - 410651744.0, - 414211488.0, - 413706496.0, - 411401984.0, - 412373600.0, - 410624032.0, - 410629056.0, - 408744224.0, - 415665536.0, - 412485792.0, - 406977664.0, - 410130944.0, - 408421408.0, - 409544672.0, - 405554624.0, - 405657792.0, - 407111392.0, - 414962656.0, - 405947744.0, - 409236928.0, - 407208256.0, - 406124192.0, - 421160800.0, - 411457184.0, - 406809056.0, - 414147616.0, - 410097920.0, - 415244192.0, - 413859872.0, - 407559584.0, - 423466048.0, - 409413120.0, - 413979808.0, - 409470400.0, - 408693056.0, - 414448224.0, - 414206496.0, - 409932160.0, - 417578144.0, - 408779904.0, - 413545056.0, - 405554784.0, - 410653600.0, - 417618496.0, - 405065056.0, - 412851072.0, - 412948480.0, - 409216192.0, - 417855424.0, - 405823776.0, - 404151040.0, - 408320128.0, - 409148416.0, - 413846784.0, - 408813664.0, - 418152992.0, - 413817920.0, - 417386208.0, - 412205088.0, - 409163232.0, - 413539584.0, - 414094240.0, - 404732704.0, - 415835872.0, - 418341696.0, - 408911392.0, - 417898816.0, - 418943680.0, - 413356672.0, - 412573088.0, - 412165728.0, - 415440768.0, - 415615136.0, - 409410304.0, - 414407744.0, - 403833824.0, - 405599488.0, - 412193056.0, - 419614560.0, - 418475616.0, - 412749312.0, - 414353248.0, - 403964512.0, - 415875968.0, - 414815488.0, - 406770240.0, - 412814304.0, - 407327424.0, - 409648384.0, - 415934880.0, - 409559648.0, - 417769216.0, - 411861920.0, - 408670208.0, - 409908832.0, - 413190656.0, - 417249632.0, - 419422272.0, - 414544992.0, - 414035904.0, - 412567296.0, - 414525856.0, - 413345728.0, - 413224768.0, - 410348288.0, - 415287584.0, - 413636864.0, - 418653664.0, - 410725536.0, - 408467968.0, - 418469312.0, - 411717440.0, - 415058400.0, - 411068512.0, - 418466912.0, - 426838016.0, - 414877472.0, - 416154048.0, - 418760544.0, - 414722432.0, - 412547968.0, - 413842624.0, - 412536192.0, - 412193568.0, - 408993984.0, - 415939456.0, - 407144384.0, - 420579168.0, - 408979616.0, - 409361728.0, - 412482816.0, - 405211616.0, - 407349280.0, - 416475520.0, - 410697792.0, - 411385952.0, - 408907296.0, - 409212704.0, - 419849440.0, - 405209664.0, - 415689472.0, - 407773920.0, - 404753280.0, - 423845888.0, - 414080320.0, - 410734432.0, - 409974368.0, - 420848864.0, - 405265952.0, - 412001632.0, - 418803008.0, - 410403232.0, - 409923872.0, - 411246336.0, - 407009632.0, - 401001120.0, - 415164128.0, - 411744672.0, - 410635136.0, - 409976128.0, - 410186944.0, - 412817376.0, - 415046912.0, - 407553440.0, - 416752064.0, - 411832896.0, - 413511136.0, - 408357856.0, - 417875232.0, - 409265792.0, - 408991584.0, - 412974752.0, - 409484992.0, - 404348608.0, - 417255840.0, - 415399680.0, - 413680288.0, - 417364096.0, - 410461792.0, - 414346240.0, - 412381280.0, - 417941888.0, - 404608416.0, - 417577696.0, - 411246848.0, - 414256512.0, - 413302624.0, - 412222528.0, - 413853632.0, - 414105664.0, - 410215744.0, - 411992896.0, - 412422176.0, - 410441344.0, - 409220608.0, - 423066816.0, - 408758144.0, - 413956640.0, - 411603456.0, - 411750272.0, - 408924512.0, - 415287776.0, - 413966304.0, - 406181312.0, - 411627104.0, - 404660160.0, - 407257728.0, - 412939264.0, - 410327968.0, - 412852416.0, - 415560576.0, - 408297568.0, - 406727360.0, - 408172992.0, - 404212832.0, - 411568864.0, - 409437984.0, - 411797504.0, - 407477408.0, - 411486720.0, - 415295392.0, - 416135456.0, - 412305120.0, - 409700512.0, - 415905632.0, - 413016800.0, - 410426656.0, - 410999840.0, - 412306880.0, - 409613856.0, - 412883712.0, - 414024480.0, - 406378272.0, - 413402816.0, - 411964736.0, - 409209760.0, - 406782272.0, - 419007392.0, - 410481344.0, - 415320960.0, - 411916384.0, - 413330624.0, - 413738624.0, - 403096352.0, - 410987744.0, - 410248096.0, - 411915552.0, - 408780416.0, - 414472896.0, - 414283552.0, - 421019616.0, - 415122944.0, - 413441728.0, - 417481344.0, - 407879904.0, - 416028384.0, - 411960448.0, - 409839168.0, - 416706880.0, - 415146048.0, - 415824384.0, - 415381920.0, - 414179008.0, - 408640096.0, - 404155264.0, - 404889920.0, - 412660896.0, - 417988512.0, - 413679552.0, - 411348320.0, - 413236256.0, - 410595104.0, - 411361920.0, - 410095104.0, - 416193088.0, - 412658688.0, - 411892416.0, - 419331552.0, - 408420576.0, - 418358912.0, - 414829472.0, - 408023136.0, - 413982720.0, - 407457440.0, - 403236768.0, - 414210208.0, - 412159424.0, - 415586240.0, - 412262912.0, - 418215552.0, - 411634368.0, - 412696480.0, - 410259232.0, - 411108096.0, - 410867968.0, - 412139616.0, - 403073568.0, - 412327520.0, - 404484736.0, - 417144512.0, - 420561088.0, - 412251264.0, - 410655840.0, - 409090784.0, - 408897920.0, - 420830144.0, - 412937792.0, - 408788672.0, - 412919232.0, - 417588640.0, - 409970080.0 - ] - }, - "mem-allocated-bytes": { - "start_step": 0, - "end_step": 27308, - "step_interval": 5, - "values": [ - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17447112704.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17449054208.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448914944.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448853504.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448747008.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448620032.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17449050112.0, - 17448865792.0, - 17448013824.0, - 17448013824.0, - 17448030208.0, - 17448013824.0, - 17448251392.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17449041920.0, - 17448013824.0, - 17448759296.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448505344.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17448013824.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447915520.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447075840.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447034880.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447206912.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447362560.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448030208.0, - 17446985728.0, - 17446985728.0, - 17447190528.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447145472.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447612416.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446983680.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447411712.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447419904.0, - 17446985728.0, - 17447387136.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447264256.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447116800.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447251968.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447542784.0, - 17446985728.0, - 17447632896.0, - 17446985728.0, - 17446985728.0, - 17447477248.0, - 17447378944.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447165952.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447133184.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447550976.0, - 17446985728.0, - 17447227392.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447043072.0, - 17446985728.0, - 17446983680.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447514112.0, - 17447346176.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447526400.0, - 17446985728.0, - 17447108608.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447264256.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447526400.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447374848.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447968768.0, - 17446985728.0, - 17447108608.0, - 17446985728.0, - 17447723008.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447264256.0, - 17446985728.0, - 17447227392.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447895040.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447673856.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447370752.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447346176.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447329792.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17448046592.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17448206336.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447354368.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17447174144.0, - 17446983680.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447018496.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447448576.0, - 17447632896.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447215104.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447391232.0, - 17447256064.0, - 17446985728.0, - 17446985728.0, - 17447526400.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446998016.0, - 17446985728.0, - 17447845888.0, - 17447510016.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447813120.0, - 17446985728.0, - 17446985728.0, - 17447157760.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446993920.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447813120.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447141376.0, - 17447280640.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447190528.0, - 17447272448.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447755776.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447862272.0, - 17446985728.0, - 17446985728.0, - 17447960576.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447526400.0, - 17446985728.0, - 17447378944.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447280640.0, - 17447931904.0, - 17447301120.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447739392.0, - 17447546880.0, - 17446985728.0, - 17446985728.0, - 17447133184.0, - 17446985728.0, - 17447616512.0, - 17446985728.0, - 17447682048.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447424000.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447026688.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447428096.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447174144.0, - 17446985728.0, - 17447936000.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447583744.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447460864.0, - 17447747584.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447184384.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447165952.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447952384.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447763968.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447854080.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447829504.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447989248.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448280064.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447100416.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447215104.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447051264.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447411712.0, - 17446985728.0, - 17446985728.0, - 17447903232.0, - 17448509440.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17448402944.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17448513536.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17448435712.0, - 17447903232.0, - 17448075264.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17448034304.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17448058880.0, - 17448013824.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17448611840.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447911424.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447903232.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447788544.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447837696.0, - 17447059456.0, - 17447124992.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447149568.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447346176.0, - 17447059456.0, - 17447059456.0, - 17447428096.0, - 17447354368.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447325696.0, - 17447059456.0, - 17447059456.0, - 17447903232.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447948288.0, - 17447059456.0, - 17447059456.0, - 17447256064.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447059456.0, - 17447485440.0, - 17447485440.0, - 17447485440.0, - 17447485440.0, - 17447485440.0, - 17447485440.0, - 17447485440.0, - 17447485440.0, - 17447485440.0, - 17447485440.0, - 17447976960.0, - 17447485440.0, - 17447485440.0, - 17447485440.0, - 17447485440.0, - 17447485440.0, - 17447702528.0, - 17447485440.0, - 17447485440.0, - 17447485440.0, - 17447911424.0, - 17447485440.0, - 17447485440.0, - 17448067072.0, - 17447485440.0, - 17447485440.0, - 17447124992.0, - 17446985728.0, - 17447043072.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447690240.0, - 17447927808.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447178240.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447018496.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447653376.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447813120.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448026112.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447112704.0, - 17446985728.0, - 17446985728.0, - 17447960576.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447477248.0, - 17446985728.0, - 17446985728.0, - 17447727104.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447907328.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447985152.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447944192.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447677952.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447346176.0, - 17446985728.0, - 17447370752.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447272448.0, - 17447227392.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447346176.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447354368.0, - 17446985728.0, - 17446985728.0, - 17447403520.0, - 17446985728.0, - 17446985728.0, - 17446983680.0, - 17447649280.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447137280.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447305216.0, - 17447092224.0, - 17446985728.0, - 17446985728.0, - 17447239680.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447034880.0, - 17447575552.0, - 17447206912.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447436288.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447362560.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447698432.0, - 17447534592.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447755776.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447612416.0, - 17447342080.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447813120.0, - 17446985728.0, - 17447567360.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447206912.0, - 17447526400.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447297024.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448128512.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447387136.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447559168.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447108608.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17448128512.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447264256.0, - 17447084032.0, - 17447084032.0, - 17448116224.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447825408.0, - 17447084032.0, - 17447428096.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447354368.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447403520.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447809024.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17448075264.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17447084032.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448984576.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447260160.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447673856.0, - 17446985728.0, - 17446985728.0, - 17447395328.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447084032.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447858176.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447632896.0, - 17446985728.0, - 17446985728.0, - 17447624704.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447120896.0, - 17447051264.0, - 17447452672.0, - 17446985728.0, - 17447714816.0, - 17447403520.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447813120.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447510016.0, - 17447268352.0, - 17447841792.0, - 17448194048.0, - 17447268352.0, - 17447579648.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447555072.0, - 17447268352.0, - 17447628800.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447899136.0, - 17447268352.0, - 17447849984.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447620608.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447600128.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447387136.0, - 17447268352.0, - 17447268352.0, - 17447383040.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17448112128.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17448112128.0, - 17447268352.0, - 17447268352.0, - 17447284736.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447641088.0, - 17447268352.0, - 17447268352.0, - 17448280064.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447268352.0, - 17447276544.0, - 17447835648.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447903232.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17448747008.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17448247296.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447444480.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447817216.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447153664.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447862272.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447198720.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447067648.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447190528.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447436288.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447026688.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447411712.0, - 17447747584.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447907328.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17448366080.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447686144.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447972864.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447460864.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17448054784.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17448411136.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17448312832.0, - 17448157184.0, - 17448394752.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447411712.0, - 17447469056.0, - 17447411712.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447182336.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447215104.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447141376.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447178240.0, - 17447673856.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447559168.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447616512.0, - 17446985728.0, - 17446985728.0, - 17447862272.0, - 17446985728.0, - 17446985728.0, - 17447583744.0, - 17446985728.0, - 17447534592.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447051264.0, - 17447542784.0, - 17447419904.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447108608.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447575552.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447264256.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447174144.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447284736.0, - 17446985728.0, - 17447387136.0, - 17446985728.0, - 17447915520.0, - 17447325696.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447231488.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447026688.0, - 17447706624.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447165952.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447174144.0, - 17446985728.0, - 17446985728.0, - 17447444480.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447579648.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447022592.0, - 17446985728.0, - 17447698432.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447706624.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447256064.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448026112.0, - 17446985728.0, - 17446985728.0, - 17446983680.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447362560.0, - 17446985728.0, - 17446985728.0, - 17447370752.0, - 17446985728.0, - 17446985728.0, - 17447862272.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447297024.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447120896.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447190528.0, - 17447976960.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447501824.0, - 17447501824.0, - 17446985728.0, - 17447268352.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447620608.0, - 17446985728.0, - 17447604224.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447297024.0, - 17446985728.0, - 17447526400.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447239680.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447116800.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447731200.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447387136.0, - 17446985728.0, - 17446985728.0, - 17447665664.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447534592.0, - 17446985728.0, - 17447714816.0, - 17446985728.0, - 17446985728.0, - 17447632896.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447886848.0, - 17447124992.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447256064.0, - 17446985728.0, - 17446985728.0, - 17447157760.0, - 17447337984.0, - 17447702528.0, - 17446985728.0, - 17447833600.0, - 17447690240.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447395328.0, - 17447362560.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447428096.0, - 17446985728.0, - 17446985728.0, - 17447309312.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447636992.0, - 17446985728.0, - 17447616512.0, - 17446985728.0, - 17447288832.0, - 17446985728.0, - 17447456768.0, - 17446985728.0, - 17447579648.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447286784.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447301120.0, - 17446985728.0, - 17447084032.0, - 17446985728.0, - 17446985728.0, - 17447927808.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448034304.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447075840.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447755776.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447288832.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447477248.0, - 17447211008.0, - 17446985728.0, - 17446985728.0, - 17447690240.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447387136.0, - 17447997440.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447976960.0, - 17446985728.0, - 17447985152.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447870464.0, - 17446985728.0, - 17446985728.0, - 17447026688.0, - 17446985728.0, - 17446985728.0, - 17447231488.0, - 17446985728.0, - 17446985728.0, - 17447927808.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447231488.0, - 17446985728.0, - 17447075840.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446983680.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447395328.0, - 17446985728.0, - 17446985728.0, - 17447690240.0, - 17446985728.0, - 17447178240.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447153664.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447919616.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447755776.0, - 17446985728.0, - 17447641088.0, - 17446985728.0, - 17446985728.0, - 17447002112.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447845888.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447165952.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447305216.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447788544.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447768064.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448185856.0, - 17447157760.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447436288.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447825408.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447165952.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447477248.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447133184.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447313408.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447878656.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447518208.0, - 17446985728.0, - 17446985728.0, - 17447182336.0, - 17446985728.0, - 17446985728.0, - 17447542784.0, - 17447944192.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447985152.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448132608.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447149568.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447886848.0, - 17446985728.0, - 17446985728.0, - 17447256064.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447108608.0, - 17446985728.0, - 17447624704.0, - 17447624704.0, - 17447624704.0, - 17447624704.0, - 17447624704.0, - 17448075264.0, - 17447624704.0, - 17447624704.0, - 17447624704.0, - 17448140800.0, - 17447624704.0, - 17447624704.0, - 17447624704.0, - 17446985728.0, - 17447337984.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447436288.0, - 17447985152.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447878656.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447346176.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447124992.0, - 17446985728.0, - 17447641088.0, - 17446985728.0, - 17447174144.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447133184.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447084032.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447264256.0, - 17447133184.0, - 17446985728.0, - 17447251968.0, - 17446985728.0, - 17447370752.0, - 17446985728.0, - 17446985728.0, - 17447849984.0, - 17447116800.0, - 17446985728.0, - 17446985728.0, - 17447108608.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448034304.0, - 17447051264.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447858176.0, - 17446985728.0, - 17447542784.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448341504.0, - 17447600128.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447804928.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447165952.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447706624.0, - 17448673280.0, - 17447706624.0, - 17447706624.0, - 17447706624.0, - 17447706624.0, - 17447706624.0, - 17447706624.0, - 17447706624.0, - 17447706624.0, - 17447706624.0, - 17447706624.0, - 17447706624.0, - 17447706624.0, - 17447706624.0, - 17447706624.0, - 17447706624.0, - 17448185856.0, - 17447706624.0, - 17447706624.0, - 17447706624.0, - 17447680000.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17448038400.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447731200.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447936000.0, - 17447682048.0, - 17448099840.0, - 17448263680.0, - 17447682048.0, - 17448017920.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17448165376.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17448673280.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17448030208.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17448566784.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447845888.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17448464384.0, - 17447682048.0, - 17448460288.0, - 17448697856.0, - 17448349696.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17448660992.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17447682048.0, - 17448689664.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447419904.0, - 17446985728.0, - 17446985728.0, - 17447813120.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447280640.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447718912.0, - 17447854080.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447510016.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447862272.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447501824.0, - 17447305216.0, - 17446985728.0, - 17446993920.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447878656.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447714816.0, - 17446985728.0, - 17447432192.0, - 17446985728.0, - 17447976960.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448017920.0, - 17446985728.0, - 17446985728.0, - 17447661568.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447313408.0, - 17446985728.0, - 17447600128.0, - 17446985728.0, - 17447895040.0, - 17446985728.0, - 17447485440.0, - 17446985728.0, - 17447919616.0, - 17446985728.0, - 17447337984.0, - 17446985728.0, - 17446989824.0, - 17447358464.0, - 17447034880.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447018496.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447559168.0, - 17447493632.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447591936.0, - 17447485440.0, - 17446985728.0, - 17446985728.0, - 17447190528.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447510016.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447854080.0, - 17446985728.0, - 17446985728.0, - 17447370752.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447665664.0, - 17446985728.0, - 17447886848.0, - 17446985728.0, - 17446985728.0, - 17448038400.0, - 17446985728.0, - 17447559168.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447452672.0, - 17446985728.0, - 17446985728.0, - 17447198720.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447559168.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448034304.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447084032.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447649280.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447403520.0, - 17446985728.0, - 17448235008.0, - 17446985728.0, - 17447124992.0, - 17447862272.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447841792.0, - 17447907328.0, - 17446985728.0, - 17447837696.0, - 17447821312.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447100416.0, - 17446985728.0, - 17447059456.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447600128.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447100416.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447567360.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447231488.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447342080.0, - 17447084032.0, - 17446983680.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447002112.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447084032.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447444480.0, - 17448157184.0, - 17446985728.0, - 17447149568.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447403520.0, - 17446985728.0, - 17447972864.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447673856.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447100416.0, - 17446985728.0, - 17447772160.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447477248.0, - 17447464960.0, - 17447464960.0, - 17448144896.0, - 17448194048.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17448071168.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447624704.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447833600.0, - 17447464960.0, - 17447702528.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17448112128.0, - 17447464960.0, - 17448349696.0, - 17447464960.0, - 17447636992.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447686144.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447882752.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447907328.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447960576.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17448341504.0, - 17447464960.0, - 17447464960.0, - 17447464960.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447890944.0, - 17448525824.0, - 17447481344.0, - 17447481344.0, - 17448022016.0, - 17448292352.0, - 17448169472.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447579648.0, - 17448054784.0, - 17448103936.0, - 17447481344.0, - 17447989248.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17448398848.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447956480.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447514112.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447596032.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17448361984.0, - 17448443904.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17448374272.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447481344.0, - 17447297024.0, - 17448173568.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17448312832.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17448009728.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447493632.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17448239104.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447813120.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17447854080.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17448247296.0, - 17447297024.0, - 17447297024.0, - 17447297024.0, - 17448026112.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447718912.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447026688.0, - 17446985728.0, - 17447067648.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447141376.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447755776.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447534592.0, - 17446985728.0, - 17446985728.0, - 17447968768.0, - 17446985728.0, - 17447653376.0, - 17447383040.0, - 17446985728.0, - 17447018496.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447944192.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447821312.0, - 17446985728.0, - 17446983680.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447165952.0, - 17446985728.0, - 17447542784.0, - 17446985728.0, - 17446985728.0, - 17447776256.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447780352.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447567360.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447661568.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447075840.0, - 17447485440.0, - 17447239680.0, - 17447919616.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447837696.0, - 17447763968.0, - 17446985728.0, - 17446985728.0, - 17447493632.0, - 17446985728.0, - 17447051264.0, - 17447256064.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447510016.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448005632.0, - 17446985728.0, - 17446985728.0, - 17447227392.0, - 17446985728.0, - 17446985728.0, - 17447919616.0, - 17447821312.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447051264.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447071744.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447387136.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447153664.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447743488.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447596032.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447374848.0, - 17446985728.0, - 17447088128.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447862272.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447141376.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447305216.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447034880.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448017920.0, - 17447739392.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447337984.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447387136.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447768064.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447268352.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447038976.0, - 17446985728.0, - 17447034880.0, - 17447493632.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447100416.0, - 17447403520.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447845888.0, - 17446985728.0, - 17447727104.0, - 17446985728.0, - 17446985728.0, - 17447923712.0, - 17447596032.0, - 17447141376.0, - 17446985728.0, - 17447997440.0, - 17446985728.0, - 17446985728.0, - 17447854080.0, - 17446985728.0, - 17447469056.0, - 17447018496.0, - 17446985728.0, - 17447321600.0, - 17446985728.0, - 17446985728.0, - 17447362560.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447288832.0, - 17446985728.0, - 17447436288.0, - 17446985728.0, - 17447342080.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447059456.0, - 17446985728.0, - 17446985728.0, - 17447702528.0, - 17446985728.0, - 17447727104.0, - 17446985728.0, - 17447387136.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447944192.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448009728.0, - 17446985728.0, - 17447141376.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447354368.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447108608.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447075840.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447034880.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447272448.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447108608.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447018496.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447071744.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447706624.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447088128.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447428096.0, - 17446985728.0, - 17447305216.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447780352.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447395328.0, - 17446985728.0, - 17447329792.0, - 17446985728.0, - 17447673856.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447182336.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447157760.0, - 17447321600.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447510016.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447845888.0, - 17447542784.0, - 17448312832.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447849984.0, - 17447542784.0, - 17447870464.0, - 17447542784.0, - 17448419328.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17448034304.0, - 17447542784.0, - 17447542784.0, - 17448140800.0, - 17447542784.0, - 17448054784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447813120.0, - 17448288256.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447854080.0, - 17448423424.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447604224.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447542784.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17448022016.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447641088.0, - 17447849984.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447907328.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447677952.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447972864.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447800832.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447514112.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17448136704.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447632896.0, - 17448226816.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17448071168.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447923712.0, - 17447325696.0, - 17447325696.0, - 17447686144.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17447469056.0, - 17447325696.0, - 17447624704.0, - 17447325696.0, - 17447604224.0, - 17447325696.0, - 17447481344.0, - 17447325696.0, - 17447325696.0, - 17447325696.0, - 17446983680.0, - 17446985728.0, - 17447116800.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447817216.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447895040.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448026112.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447182336.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447677952.0, - 17446985728.0, - 17446985728.0, - 17447231488.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448947712.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447477248.0, - 17446985728.0, - 17446985728.0, - 17447112704.0, - 17447321600.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447702528.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447706624.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447452672.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448030208.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17448337408.0, - 17446985728.0, - 17447514112.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17447075840.0, - 17446985728.0, - 17447350272.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0, - 17446985728.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 27308, - "step_interval": 5, - "values": [ - 144.08891, - 27.18296, - 28.7759, - 28.37953, - 27.76792, - 28.07504, - 27.39385, - 26.78229, - 27.39429, - 25.28079, - 25.5494, - 26.54548, - 25.41502, - 25.6055, - 25.57833, - 24.08431, - 24.73616, - 25.01832, - 24.08821, - 23.51395, - 24.73726, - 24.21609, - 24.04016, - 25.09547, - 23.76432, - 23.56989, - 23.52949, - 23.4645, - 23.33365, - 23.6517, - 23.47858, - 23.61385, - 23.7676, - 23.47229, - 22.97194, - 23.39169, - 23.41737, - 23.56892, - 23.08883, - 22.66263, - 23.51845, - 22.96823, - 22.61889, - 23.5187, - 22.80851, - 22.96399, - 22.9144, - 22.42292, - 22.60671, - 23.16861, - 22.82373, - 22.84703, - 22.62976, - 22.53477, - 22.35819, - 22.7189, - 22.38451, - 22.50971, - 22.93356, - 22.64643, - 22.62173, - 22.53904, - 22.3477, - 22.37545, - 22.99236, - 22.56689, - 22.36764, - 22.76719, - 22.32971, - 22.26796, - 22.43175, - 22.86586, - 22.37827, - 22.31797, - 23.05517, - 22.38161, - 22.15415, - 22.85999, - 22.31488, - 22.1238, - 22.68572, - 22.69305, - 22.04383, - 22.71203, - 22.05705, - 22.30961, - 23.00833, - 22.02052, - 22.49272, - 22.69917, - 22.17568, - 22.16281, - 22.7872, - 22.00362, - 22.22705, - 22.92269, - 22.36921, - 22.17753, - 22.68225, - 22.1444, - 23.5386, - 22.505, - 22.01473, - 22.46687, - 22.24677, - 22.39756, - 22.1972, - 22.23715, - 22.16025, - 22.16319, - 22.41521, - 22.39638, - 22.03389, - 22.21401, - 22.08418, - 22.1449, - 22.226, - 22.35003, - 22.20765, - 22.0749, - 23.09716, - 22.09986, - 22.15346, - 22.98874, - 22.35659, - 22.08677, - 22.4387, - 22.22567, - 22.08282, - 22.80666, - 22.07835, - 22.12375, - 22.38661, - 22.07926, - 22.38071, - 22.14634, - 22.19898, - 22.25255, - 22.14789, - 22.03402, - 22.03884, - 22.53378, - 22.39106, - 22.00408, - 22.0108, - 23.3929, - 21.98099, - 22.01587, - 23.15318, - 22.20737, - 22.01783, - 22.22849, - 22.22966, - 22.13073, - 22.55899, - 22.0429, - 22.35985, - 22.44003, - 22.25177, - 22.17871, - 21.96168, - 22.29543, - 22.18, - 22.37824, - 22.18173, - 22.13368, - 22.53572, - 21.99892, - 22.00424, - 22.01292, - 22.26095, - 21.99937, - 22.04101, - 23.2343, - 21.98997, - 22.21035, - 23.17278, - 22.25899, - 22.12446, - 22.54666, - 22.0171, - 22.08991, - 22.45741, - 21.98198, - 22.12532, - 22.37849, - 21.99417, - 21.98968, - 22.10685, - 22.38733, - 22.22672, - 22.40604, - 22.03877, - 22.02761, - 22.02356, - 22.17616, - 22.32819, - 21.98196, - 23.25932, - 21.99223, - 22.82682, - 22.14838, - 22.07154, - 22.70525, - 22.43407, - 22.02542, - 22.63539, - 22.25999, - 22.18628, - 22.28038, - 22.00327, - 22.20951, - 22.35197, - 22.49728, - 23.56005, - 22.76213, - 24.61836, - 23.00086, - 22.83544, - 22.99861, - 22.90281, - 22.4608, - 23.60628, - 22.99803, - 22.32844, - 23.52395, - 22.3822, - 22.47603, - 23.19293, - 22.24039, - 22.05491, - 23.5743, - 22.07715, - 21.99079, - 23.59318, - 21.98454, - 22.11036, - 22.85504, - 22.45315, - 25.81426, - 22.52048, - 22.44753, - 22.47766, - 22.24636, - 22.24311, - 22.02379, - 22.12952, - 22.17585, - 22.23626, - 22.227, - 21.96626, - 22.25846, - 22.66167, - 22.04917, - 22.18736, - 22.93901, - 22.23628, - 22.00751, - 22.85013, - 21.99802, - 22.14584, - 22.64398, - 22.12933, - 22.03666, - 22.12914, - 22.47871, - 21.98998, - 22.08852, - 22.10707, - 22.02827, - 22.04949, - 22.10938, - 22.16002, - 22.0572, - 22.4045, - 21.9906, - 22.36884, - 22.57462, - 22.11775, - 22.29225, - 22.64343, - 22.27508, - 22.08397, - 23.19772, - 22.23017, - 22.19658, - 22.63357, - 22.08414, - 22.28009, - 22.59849, - 22.38033, - 21.96807, - 22.07953, - 22.15342, - 22.0268, - 22.26485, - 21.96872, - 22.56672, - 21.96759, - 22.14143, - 21.43117, - 22.27329, - 22.1273, - 22.67007, - 22.84943, - 22.03139, - 22.21482, - 22.93781, - 22.19395, - 22.04166, - 22.97579, - 22.04506, - 21.98575, - 22.37801, - 22.30579, - 21.9824, - 22.03537, - 22.09295, - 22.31415, - 21.98727, - 21.77508, - 22.62691, - 22.15103, - 22.14421, - 21.99115, - 22.31846, - 22.06955, - 22.17395, - 22.25436, - 139.46249, - 22.75183, - 22.51547, - 23.37055, - 22.65482, - 22.63677, - 23.55777, - 22.64493, - 23.05364, - 23.51189, - 22.66016, - 22.51283, - 22.90432, - 22.32768, - 22.55442, - 22.80493, - 22.64357, - 22.26495, - 22.93471, - 22.27821, - 22.25688, - 22.86227, - 22.23824, - 22.20756, - 22.43165, - 22.40266, - 22.24195, - 22.29421, - 22.39034, - 22.18892, - 22.24207, - 21.90287, - 22.62409, - 22.39802, - 22.3563, - 22.37461, - 22.84475, - 22.38544, - 22.29, - 23.4498, - 22.54358, - 22.3157, - 22.91372, - 22.51769, - 22.37781, - 22.83857, - 22.7779, - 22.26592, - 22.98142, - 22.4236, - 22.21238, - 22.88876, - 22.28733, - 22.55918, - 22.37388, - 22.25656, - 22.29004, - 22.34599, - 22.43384, - 22.56104, - 22.49615, - 22.44958, - 22.43601, - 22.26295, - 22.86147, - 22.39765, - 22.35822, - 23.10647, - 22.33805, - 22.32324, - 22.97255, - 21.7446, - 22.66551, - 22.67271, - 22.29879, - 22.55611, - 22.81529, - 22.48018, - 22.7111, - 22.26949, - 22.85083, - 22.71677, - 22.35647, - 22.43576, - 22.68977, - 22.40417, - 22.28594, - 22.2769, - 22.80963, - 22.37005, - 22.41868, - 23.11052, - 22.55657, - 22.45834, - 22.93099, - 22.38713, - 22.30621, - 22.57878, - 22.6241, - 22.36017, - 22.55442, - 22.33244, - 22.53711, - 22.37295, - 150.1908, - 22.31466, - 22.09742, - 23.3826, - 22.32718, - 22.11036, - 22.95423, - 22.0759, - 22.15037, - 22.74689, - 22.0872, - 22.12055, - 22.70332, - 22.01518, - 22.20242, - 22.61501, - 22.15112, - 21.99156, - 22.34172, - 21.98494, - 22.07139, - 22.42343, - 22.08413, - 22.01145, - 22.12979, - 22.19043, - 21.98698, - 21.97181, - 22.15881, - 22.01087, - 21.97878, - 22.03357, - 22.19872, - 21.98681, - 21.98032, - 21.95105, - 22.21537, - 22.07794, - 21.9827, - 22.18917, - 21.73407, - 22.00102, - 22.48948, - 21.97008, - 22.10194, - 22.76787, - 22.04689, - 22.02991, - 23.51822, - 22.66788, - 21.96909, - 22.51084, - 21.98716, - 22.22728, - 21.96566, - 21.98205, - 21.96522, - 22.06763, - 21.96275, - 21.98508, - 22.3101, - 21.99387, - 22.0796, - 22.08397, - 22.07532, - 22.00018, - 21.99079, - 22.69585, - 21.98075, - 21.98031, - 22.5497, - 21.95231, - 21.97636, - 23.47594, - 22.48762, - 21.96987, - 22.74353, - 21.98197, - 21.95332, - 22.09058, - 21.59242, - 22.27239, - 22.06962, - 21.96895, - 21.97272, - 22.09908, - 22.39087, - 21.96533, - 22.11435, - 21.95389, - 21.97265, - 22.00925, - 22.22567, - 22.17171, - 21.95621, - 22.08434, - 21.98597, - 21.98224, - 22.64483, - 22.20371, - 23.15428, - 21.9978, - 21.97693, - 22.61262, - 22.28172, - 22.56743, - 22.00901, - 21.99811, - 21.9869, - 21.97021, - 21.97121, - 22.16697, - 22.48932, - 21.97317, - 21.98121, - 22.00708, - 22.56698, - 22.76444, - 22.3888, - 22.47333, - 22.17974, - 22.38066, - 22.19249, - 22.06505, - 22.1555, - 22.02924, - 22.00077, - 21.9668, - 22.35229, - 21.95424, - 22.1547, - 23.02753, - 21.96796, - 22.09918, - 23.15867, - 22.0003, - 22.10756, - 22.80626, - 22.24095, - 22.02607, - 22.72858, - 22.61805, - 22.09383, - 22.05538, - 22.17601, - 22.25792, - 22.03217, - 21.99017, - 22.71981, - 22.26331, - 22.45265, - 22.14421, - 22.19871, - 21.99202, - 22.03261, - 22.12663, - 21.94556, - 21.90994, - 21.90858, - 22.34492, - 21.93792, - 22.06428, - 22.60384, - 22.25879, - 22.28391, - 23.46466, - 22.04683, - 22.21721, - 22.86592, - 22.23653, - 21.91424, - 22.0933, - 22.50215, - 21.94183, - 22.015, - 22.09922, - 22.20373, - 21.90876, - 21.9333, - 21.92505, - 21.95365, - 21.97395, - 22.13822, - 22.23772, - 22.32163, - 21.93494, - 21.95154, - 22.57417, - 21.99284, - 21.95553, - 23.02139, - 21.67191, - 22.02365, - 23.05264, - 21.96061, - 21.94264, - 22.87476, - 22.49135, - 21.95872, - 22.08128, - 21.91896, - 22.08139, - 21.92737, - 21.94585, - 22.39994, - 22.02547, - 22.0884, - 21.92127, - 22.30053, - 21.9285, - 22.02136, - 21.9092, - 22.22074, - 21.95978, - 21.95417, - 22.63596, - 21.95056, - 21.97393, - 22.54615, - 22.00624, - 22.09699, - 23.11883, - 22.1166, - 21.92557, - 22.81165, - 21.99968, - 21.97545, - 22.13873, - 21.93904, - 22.53462, - 22.05603, - 21.68633, - 22.13439, - 21.95697, - 22.14256, - 22.6049, - 206.4328, - 22.13583, - 22.42085, - 22.70371, - 21.95279, - 23.59682, - 23.43193, - 22.29466, - 22.23401, - 23.69629, - 22.20447, - 22.09062, - 22.74803, - 21.98634, - 21.95441, - 22.22846, - 21.97244, - 22.24925, - 21.94374, - 21.97849, - 22.03202, - 21.94975, - 21.94527, - 146.68144, - 22.27439, - 21.99763, - 22.94339, - 22.17575, - 22.08603, - 23.20221, - 22.06277, - 22.27312, - 22.69968, - 21.97287, - 21.98518, - 21.56896, - 21.97247, - 22.44083, - 22.13808, - 22.04357, - 22.1117, - 21.91148, - 21.8702, - 22.01261, - 22.23046, - 21.89266, - 22.19313, - 22.10151, - 22.10548, - 22.05675, - 22.64429, - 21.91852, - 21.90826, - 22.75417, - 22.09824, - 22.15108, - 22.95928, - 22.01593, - 21.98969, - 22.45724, - 22.07652, - 21.907, - 22.38014, - 21.88281, - 21.86258, - 21.91324, - 21.91422, - 21.87106, - 21.90118, - 22.25658, - 21.90246, - 21.89989, - 22.07162, - 22.0418, - 21.89729, - 21.75701, - 21.89276, - 22.09418, - 22.41572, - 22.00607, - 22.09298, - 22.54087, - 21.91413, - 21.90946, - 23.05955, - 21.93402, - 22.20568, - 22.84967, - 21.90794, - 21.94137, - 22.2126, - 22.07115, - 21.91625, - 22.17132, - 22.39414, - 21.34349, - 21.91209, - 22.21659, - 21.92665, - 22.28304, - 22.65754, - 21.91211, - 22.28527, - 21.93459, - 22.56003, - 22.74206, - 21.93342, - 22.09202, - 23.28637, - 22.09157, - 21.95656, - 22.8947, - 21.96243, - 21.9394, - 22.38718, - 21.59664, - 22.22617, - 22.21916, - 22.07887, - 21.93848, - 21.98941, - 22.01857, - 21.92522, - 22.32653, - 21.91902, - 22.77012, - 21.89258, - 22.05719, - 21.90374, - 21.98219, - 22.64801, - 22.20669, - 22.67932, - 22.67187, - 22.01469, - 22.15446, - 23.15926, - 22.09728, - 22.19881, - 22.07149, - 22.03691, - 21.97724, - 22.12679, - 21.95995, - 22.02123, - 22.18487, - 21.9739, - 21.96864, - 21.97257, - 22.22663, - 21.97249, - 21.97875, - 22.28503, - 21.9815, - 22.07268, - 22.10998, - 22.11118, - 21.98495, - 22.22104, - 21.9711, - 22.21139, - 22.67055, - 21.97117, - 21.97397, - 23.35298, - 22.19033, - 21.98968, - 22.80396, - 22.11866, - 22.25796, - 22.32182, - 22.39318, - 22.04391, - 22.15127, - 22.06453, - 22.05777, - 22.34845, - 21.96765, - 22.1485, - 22.07825, - 21.969, - 22.02032, - 21.95162, - 21.97527, - 21.97671, - 21.97859, - 22.49228, - 21.94657, - 22.04616, - 23.31876, - 22.23427, - 21.93586, - 23.03057, - 22.1601, - 21.97717, - 22.38684, - 21.94359, - 21.9093, - 22.23889, - 21.95759, - 22.07084, - 22.35077, - 21.98614, - 21.98721, - 21.99153, - 22.18873, - 21.95713, - 22.03424, - 22.33623, - 21.94898, - 22.03167, - 21.99354, - 22.0926, - 22.00058, - 22.49012, - 22.2445, - 21.99326, - 23.14098, - 22.00826, - 22.27556, - 22.66539, - 21.96698, - 22.19655, - 22.39693, - 21.95024, - 21.94962, - 22.39099, - 21.99116, - 22.00551, - 21.94971, - 21.97359, - 21.94154, - 21.9862, - 22.46948, - 21.99518, - 21.99948, - 21.95742, - 21.97806, - 22.29998, - 22.25772, - 21.97304, - 23.04687, - 22.02255, - 21.96136, - 22.63988, - 21.98201, - 22.44684, - 22.69289, - 21.91054, - 22.09969, - 22.15419, - 21.98784, - 22.34465, - 22.14339, - 22.22435, - 22.16608, - 22.04499, - 22.03883, - 22.0194, - 22.28322, - 22.16577, - 22.04861, - 22.01207, - 22.03022, - 22.03551, - 22.10007, - 22.20531, - 22.04516, - 22.01998, - 21.98422, - 22.19016, - 22.05819, - 22.04256, - 22.23628, - 22.04532, - 22.06464, - 21.97782, - 22.25726, - 23.50028, - 22.18097, - 21.98326, - 22.68992, - 22.10064, - 22.1042, - 22.09756, - 21.9846, - 22.2915, - 22.0134, - 21.98359, - 22.00443, - 22.3594, - 22.16943, - 22.10875, - 22.23036, - 22.02488, - 22.03753, - 22.11202, - 21.98034, - 22.04396, - 21.98521, - 22.31947, - 22.12728, - 21.96752, - 23.2102, - 22.00819, - 22.09734, - 23.2734, - 22.10175, - 22.00907, - 22.51192, - 21.99216, - 21.99815, - 22.23182, - 21.99145, - 21.96195, - 22.3484, - 22.15858, - 21.9582, - 21.98637, - 22.22783, - 21.97977, - 21.96251, - 22.15796, - 22.05459, - 22.03964, - 22.01487, - 22.37922, - 21.97776, - 22.02979, - 21.93978, - 22.00505, - 22.91704, - 22.0008, - 22.50814, - 23.5463, - 21.98618, - 21.96548, - 22.61999, - 21.97729, - 22.13021, - 22.01193, - 22.0045, - 22.00856, - 22.01993, - 22.06798, - 22.01047, - 22.60098, - 21.96739, - 22.01616, - 22.20296, - 21.9668, - 22.03036, - 23.0835, - 22.6443, - 22.01308, - 23.01417, - 22.51771, - 22.11776, - 23.18986, - 22.02416, - 22.01537, - 22.79275, - 21.98761, - 22.50517, - 21.96502, - 21.93878, - 21.94931, - 142.13861, - 22.39532, - 22.06472, - 23.17265, - 22.27286, - 22.20975, - 22.84169, - 22.02298, - 22.23592, - 22.55482, - 21.98098, - 22.00536, - 22.48102, - 21.98683, - 22.17384, - 22.35676, - 22.11801, - 21.92808, - 22.63972, - 21.97801, - 21.92817, - 21.95477, - 22.05367, - 22.05264, - 22.24046, - 21.99754, - 21.94995, - 21.88901, - 21.9762, - 22.15816, - 21.89293, - 22.08613, - 22.08702, - 21.90437, - 21.89442, - 21.89632, - 22.1366, - 21.90047, - 22.3612, - 21.93155, - 21.89009, - 22.4678, - 21.87928, - 21.99146, - 22.63725, - 22.12453, - 21.8854, - 23.11332, - 21.87945, - 21.91698, - 23.30958, - 22.06861, - 22.15321, - 23.12633, - 22.27345, - 22.16398, - 22.01246, - 22.1375, - 22.16237, - 22.04243, - 22.11127, - 22.18013, - 21.96813, - 22.01185, - 22.0346, - 22.20312, - 21.9984, - 22.00191, - 22.36888, - 21.99644, - 22.04733, - 22.25778, - 22.07293, - 21.96894, - 22.00403, - 22.37494, - 21.97663, - 21.97781, - 21.99943, - 22.1262, - 22.2965, - 22.12864, - 22.44026, - 21.94666, - 22.01049, - 22.02276, - 21.93438, - 21.93788, - 21.99422, - 22.94236, - 22.10934, - 22.00049, - 23.05529, - 22.19425, - 21.97173, - 22.81132, - 21.98524, - 22.15092, - 22.07076, - 22.19723, - 22.19315, - 21.95596, - 21.9444, - 21.909, - 22.27546, - 22.02288, - 22.21957, - 21.98733, - 21.95521, - 21.95763, - 21.94721, - 22.31026, - 22.0157, - 21.95551, - 22.63773, - 21.95335, - 21.97383, - 23.24275, - 22.10849, - 21.94298, - 22.98865, - 21.97692, - 21.94962, - 22.24428, - 22.14901, - 21.91759, - 21.9905, - 21.894, - 21.93218, - 22.17358, - 22.21614, - 21.92615, - 21.95192, - 21.93167, - 21.93223, - 21.94018, - 21.92842, - 21.98818, - 22.24216, - 21.92605, - 21.92489, - 23.30762, - 22.00282, - 22.23153, - 23.70756, - 21.95362, - 21.96965, - 22.48831, - 22.32396, - 22.59795, - 21.93239, - 21.93013, - 22.36592, - 22.21659, - 21.96341, - 23.07037, - 21.9989, - 21.97882, - 22.8066, - 21.89899, - 22.29705, - 22.50756, - 22.00453, - 21.87503, - 23.03505, - 21.87592, - 21.87096, - 23.11979, - 21.84632, - 21.85352, - 23.15894, - 21.86194, - 21.88866, - 22.85346, - 21.87683, - 21.83621, - 22.90984, - 21.81313, - 21.88593, - 22.51014, - 21.85441, - 22.00295, - 22.10692, - 22.11597, - 22.13581, - 21.93228, - 21.96083, - 21.97218, - 21.98125, - 21.83079, - 22.00393, - 21.97137, - 21.79148, - 21.79391, - 22.06623, - 21.8021, - 21.87739, - 22.57869, - 21.96111, - 21.8294, - 22.42445, - 21.82539, - 21.78304, - 22.76258, - 21.87705, - 22.39466, - 22.15284, - 21.91144, - 21.80806, - 21.89198, - 21.82063, - 21.78463, - 22.1367, - 21.79902, - 21.83569, - 21.8232, - 22.05093, - 21.80924, - 21.82128, - 21.94955, - 21.79657, - 21.85326, - 22.20561, - 22.08345, - 21.82835, - 22.714, - 21.97994, - 21.79499, - 22.61655, - 21.78305, - 22.19292, - 22.68875, - 21.80842, - 21.86604, - 22.1574, - 21.84699, - 21.7953, - 22.49977, - 21.83422, - 21.83876, - 21.87859, - 21.82252, - 21.79903, - 21.82918, - 21.78679, - 21.85667, - 21.83996, - 21.91973, - 21.99525, - 22.09814, - 21.9431, - 21.79477, - 22.53785, - 21.99228, - 21.99067, - 22.4957, - 21.91737, - 21.87883, - 22.45522, - 21.85888, - 22.20505, - 22.27021, - 21.95338, - 21.80428, - 21.8054, - 21.90604, - 21.80088, - 22.1636, - 22.03097, - 21.93403, - 22.10634, - 22.00156, - 21.94846, - 22.17914, - 21.93972, - 21.91467, - 21.86135, - 22.18961, - 21.86599, - 22.04627, - 22.10803, - 22.74719, - 21.89435, - 21.94254, - 23.82747, - 22.04257, - 21.99456, - 22.74565, - 21.97193, - 21.9267, - 22.38755, - 22.0684, - 21.86686, - 21.91021, - 21.87026, - 22.05928, - 21.87394, - 21.88032, - 22.05465, - 21.90457, - 21.87873, - 21.85079, - 22.11192, - 21.8833, - 21.87938, - 21.94757, - 22.36979, - 21.95247, - 21.95799, - 22.3807, - 21.91687, - 21.95121, - 23.12233, - 22.09942, - 21.88714, - 22.81775, - 22.0308, - 21.9125, - 22.42294, - 21.89738, - 22.14821, - 22.02139, - 21.85941, - 22.1295, - 22.06507, - 21.92367, - 21.89203, - 22.16508, - 21.86522, - 21.91719, - 21.99017, - 21.89352, - 21.93967, - 21.88254, - 22.20813, - 21.83993, - 21.84919, - 22.69724, - 21.88955, - 22.11138, - 23.59945, - 22.09364, - 21.93481, - 22.46647, - 21.92533, - 21.84766, - 22.25242, - 21.89277, - 22.02092, - 21.87456, - 22.23224, - 21.85141, - 21.98347, - 21.85346, - 22.33167, - 22.06509, - 21.84517, - 22.28148, - 22.5786, - 21.87647, - 21.82123, - 23.23129, - 21.86236, - 21.85248, - 23.31643, - 21.95381, - 22.05419, - 22.15946, - 21.83957, - 21.87428, - 21.98707, - 21.82906, - 21.84449, - 22.01626, - 21.87183, - 21.87889, - 22.00811, - 21.85775, - 21.90731, - 22.45462, - 22.02047, - 22.60295, - 21.98065, - 21.97552, - 22.20873, - 22.18311, - 21.99139, - 22.69954, - 22.05116, - 22.40658, - 21.90802, - 21.85639, - 22.015, - 21.88946, - 21.94592, - 22.14753, - 21.89762, - 22.02483, - 22.12046, - 21.84874, - 21.85095, - 21.89431, - 22.13549, - 21.91431, - 22.00004, - 22.08948, - 21.93019, - 21.93463, - 21.72272, - 21.64917, - 21.76523, - 21.78631, - 21.59759, - 21.71417, - 21.71277, - 21.6352, - 21.66456, - 21.79163, - 21.61727, - 21.61391, - 22.01, - 21.81964, - 21.65058, - 21.58351, - 22.39611, - 21.57187, - 21.5484, - 22.77818, - 21.95076, - 21.59944, - 22.48207, - 21.90988, - 21.60123, - 21.91667, - 21.55509, - 21.60043, - 21.71148, - 21.61902, - 21.71052, - 21.56121, - 21.79125, - 21.61895, - 21.82243, - 21.58892, - 21.56771, - 21.97018, - 21.55632, - 21.57243, - 21.54972, - 21.89003, - 21.56867, - 21.5805, - 22.49199, - 21.68268, - 21.63866, - 22.22682, - 21.75737, - 21.58986, - 22.98403, - 21.54404, - 21.66838, - 22.45726, - 21.57826, - 21.79136, - 21.72834, - 21.58094, - 21.55374, - 21.75886, - 21.52991, - 21.59133, - 21.93324, - 21.57468, - 21.58156, - 21.56442, - 21.70763, - 21.54559, - 22.67019, - 21.61771, - 21.78113, - 22.1951, - 21.51687, - 21.5471, - 22.79739, - 21.55815, - 21.5762, - 22.4953, - 21.60437, - 21.7942, - 21.84409, - 21.60122, - 21.69897, - 21.56287, - 21.80823, - 21.53247, - 21.90339, - 21.5872, - 21.54108, - 21.57595, - 21.58918, - 21.57443, - 21.56687, - 22.08588, - 21.55605, - 21.58208, - 22.29118, - 21.71883, - 21.81912, - 22.20041, - 21.87253, - 21.55853, - 22.76485, - 21.97927, - 21.68519, - 22.384, - 21.65105, - 21.56905, - 22.01037, - 21.57351, - 21.84402, - 21.93865, - 21.57359, - 21.57409, - 21.56773, - 22.17163, - 21.61912, - 21.57112, - 22.0843, - 21.72306, - 21.63203, - 22.80584, - 21.71512, - 21.62255, - 22.9722, - 21.65273, - 21.73816, - 21.56585, - 21.63462, - 21.84105, - 21.54243, - 21.55682, - 21.66568, - 21.6405, - 21.56556, - 21.55546, - 21.86375, - 21.72456, - 21.48658, - 21.65416, - 21.55668, - 21.69844, - 22.20503, - 22.06492, - 21.51941, - 22.84571, - 21.5346, - 21.499, - 22.80324, - 21.49194, - 21.50389, - 21.84848, - 21.92564, - 21.48695, - 21.69768, - 21.66972, - 21.52008, - 21.76282, - 21.52316, - 21.81372, - 21.53064, - 21.81821, - 21.51087, - 21.53629, - 21.64172, - 21.49074, - 21.55824, - 21.68024, - 21.67013, - 22.87816, - 21.53585, - 21.51361, - 22.50569, - 21.5219, - 22.20834, - 21.71869, - 21.48244, - 21.58961, - 21.54911, - 21.7198, - 21.5134, - 21.50591, - 21.94437, - 21.50681, - 21.56549, - 21.66914, - 21.52916, - 21.54661, - 21.806, - 21.78521, - 21.52422, - 22.4037, - 21.87564, - 21.52815, - 22.74947, - 21.51337, - 21.64755, - 22.27027, - 21.51728, - 22.11304, - 21.59328, - 21.71752, - 21.57915, - 21.47227, - 21.51114, - 21.7332, - 21.52916, - 21.46917, - 21.72661, - 21.47586, - 21.51426, - 21.46909, - 21.48341, - 21.78691, - 21.48813, - 21.75961, - 21.93572, - 21.84052, - 21.56804, - 22.46383, - 21.51143, - 21.53648, - 22.91481, - 21.6764, - 22.00167, - 22.16194, - 21.52871, - 21.52373, - 151.55295, - 21.82378, - 21.70948, - 22.69532, - 21.93156, - 21.65228, - 22.58118, - 21.69772, - 21.75235, - 22.32395, - 21.63565, - 21.66178, - 22.32896, - 21.66685, - 21.85512, - 22.45369, - 21.62199, - 21.62737, - 22.25415, - 21.68368, - 21.67747, - 22.18699, - 21.67863, - 21.65771, - 21.76783, - 21.87832, - 21.66377, - 21.64429, - 21.72954, - 21.63582, - 21.65568, - 21.63787, - 21.87094, - 21.64075, - 21.6436, - 21.65755, - 21.902, - 21.72626, - 21.6437, - 21.83108, - 21.55645, - 21.63674, - 22.40652, - 21.79753, - 21.65395, - 22.16056, - 21.65409, - 21.65837, - 22.46509, - 22.0882, - 21.63721, - 22.33517, - 21.62846, - 21.86158, - 22.356, - 21.69208, - 21.68824, - 21.81925, - 21.65616, - 21.63525, - 22.05059, - 21.65081, - 21.67372, - 21.62979, - 21.7075, - 21.71273, - 21.66647, - 22.56767, - 21.64273, - 21.6456, - 22.18868, - 21.68464, - 21.66484, - 22.5155, - 22.24424, - 21.64394, - 22.4389, - 21.6134, - 21.64674, - 22.07142, - 21.25747, - 21.84133, - 22.16199, - 21.63485, - 21.64806, - 22.06151, - 21.87458, - 21.65843, - 21.63718, - 21.66951, - 21.65164, - 21.91384, - 21.97839, - 21.84972, - 21.6567, - 22.12674, - 21.62995, - 21.63606, - 22.13262, - 21.91573, - 22.35869, - 21.63448, - 21.61452, - 22.47741, - 22.03423, - 22.18581, - 21.86574, - 21.64012, - 21.626, - 21.60879, - 21.65413, - 21.696, - 22.22939, - 22.26824, - 21.64161, - 21.62535, - 21.80349, - 21.84484, - 21.69425, - 22.08849, - 21.72068, - 21.55354, - 22.4506, - 21.61622, - 21.83088, - 22.40861, - 21.76977, - 21.5967, - 22.56649, - 21.56587, - 21.58908, - 22.69589, - 21.56429, - 21.58961, - 21.55196, - 21.5759, - 21.62071, - 21.82003, - 21.85126, - 21.77693, - 21.63889, - 21.65565, - 21.63356, - 21.64813, - 21.58359, - 21.84745, - 21.978, - 21.56287, - 21.89887, - 22.38138, - 21.53535, - 21.58376, - 22.65083, - 21.81246, - 21.5762, - 22.63054, - 21.56682, - 21.61128, - 21.94669, - 21.54736, - 21.61974, - 21.56308, - 21.78693, - 21.5687, - 21.73753, - 21.57136, - 21.54358, - 22.07465, - 21.58793, - 21.5559, - 21.56577, - 21.7909, - 21.61694, - 21.97116, - 21.56218, - 21.54515, - 21.57659, - 22.07294, - 21.88846, - 21.56917, - 22.49082, - 21.58161, - 21.57842, - 22.26622, - 21.78168, - 21.62129, - 22.18429, - 21.7378, - 21.51363, - 21.86942, - 21.64775, - 21.62395, - 21.59253, - 21.5974, - 21.5693, - 21.56175, - 21.64064, - 21.73298, - 21.93732, - 21.61726, - 21.55451, - 21.63414, - 21.85234, - 21.58293, - 22.038, - 22.68022, - 21.563, - 21.5389, - 22.24776, - 21.60902, - 21.53304, - 22.5903, - 21.68411, - 21.86177, - 21.56693, - 21.93658, - 21.73248, - 21.75682, - 22.02825, - 21.5784, - 21.54589, - 21.66703, - 21.74882, - 21.54907, - 21.52602, - 21.86369, - 21.76281, - 21.5797, - 21.64422, - 22.59989, - 21.89925, - 21.67147, - 21.78946, - 21.64474, - 21.63218, - 21.63518, - 21.65495, - 21.90246, - 21.73924, - 21.58303, - 21.61397, - 21.60397, - 21.60814, - 21.65283, - 21.91777, - 21.58087, - 21.59295, - 21.56074, - 21.74092, - 21.54031, - 21.62944, - 21.81124, - 21.63963, - 23.12883, - 21.66011, - 21.57737, - 22.41665, - 21.57356, - 21.5967, - 21.84927, - 21.67605, - 21.96464, - 21.6889, - 21.59797, - 21.70036, - 21.60604, - 21.62181, - 21.67803, - 21.84986, - 21.58628, - 21.56697, - 21.69355, - 21.65197, - 21.59211, - 21.85693, - 22.00741, - 21.58838, - 21.57172, - 22.84316, - 21.61741, - 21.60035, - 22.88768, - 21.57727, - 21.6491, - 22.52644, - 21.74342, - 21.77071, - 21.73386, - 21.69847, - 21.56891, - 21.58716, - 21.57728, - 21.67146, - 21.91794, - 21.58074, - 21.54423, - 21.57078, - 21.61197, - 21.60629, - 21.52761, - 21.84311, - 21.6082, - 21.62408, - 21.60308, - 21.69916, - 21.58556, - 22.33043, - 21.62978, - 21.60476, - 22.63116, - 21.62038, - 21.8278, - 22.82382, - 21.59286, - 21.84373, - 22.17928, - 21.62792, - 21.86093, - 21.58999, - 21.60063, - 21.60445, - 21.63382, - 22.03161, - 21.6142, - 22.22228, - 21.61925, - 21.65817, - 21.77623, - 21.58733, - 21.89899, - 22.35622, - 22.43633, - 21.55873, - 22.30825, - 21.65093, - 21.65475, - 22.55924, - 21.62029, - 21.76512, - 22.59398, - 21.78142, - 21.72865, - 22.06454, - 21.61566, - 21.61604, - 21.83513, - 21.61938, - 21.62506, - 21.62109, - 21.6272, - 21.79976, - 21.65784, - 21.61258, - 21.62815, - 21.56939, - 21.94439, - 21.55283, - 21.81701, - 21.55837, - 21.59135, - 21.55932, - 21.51552, - 21.83362, - 21.51843, - 22.01248, - 21.5495, - 21.53533, - 21.89116, - 21.77289, - 21.65211, - 22.44925, - 21.75326, - 21.55273, - 21.68788, - 21.68147, - 21.68405, - 21.57726, - 21.54934, - 21.56148, - 21.56606, - 21.54317, - 21.67813, - 21.53084, - 21.55274, - 21.64835, - 21.70918, - 21.62197, - 21.54325, - 21.88558, - 21.53776, - 21.55483, - 21.87672, - 21.94302, - 21.55986, - 22.7389, - 21.854, - 21.65241, - 22.70001, - 21.52581, - 21.89472, - 21.9015, - 21.56492, - 21.69495, - 21.65263, - 21.74936, - 21.51637, - 21.81002, - 21.60252, - 21.58355, - 21.53796, - 21.55804, - 21.53173, - 21.48751, - 21.47108, - 21.53239, - 22.0191, - 21.69831, - 21.53537, - 21.88987, - 21.7069, - 21.57018, - 22.55962, - 21.73724, - 21.48857, - 22.56757, - 21.54315, - 21.95433, - 22.01932, - 21.63421, - 21.96459, - 21.53721, - 21.79685, - 21.52909, - 21.7117, - 21.51667, - 21.68202, - 21.84814, - 21.77596, - 21.51305, - 21.516, - 22.22145, - 21.54059, - 21.57382, - 21.72287, - 21.88962, - 21.97017, - 22.36269, - 21.52348, - 21.70501, - 22.4914, - 21.69051, - 22.18999, - 22.16449, - 21.50469, - 21.50348, - 22.1642, - 21.53997, - 21.65783, - 21.82951, - 21.53457, - 21.58385, - 21.5099, - 136.63171, - 21.68244, - 21.58441, - 22.58458, - 21.71981, - 21.54, - 22.45638, - 21.5671, - 21.68709, - 22.28587, - 21.5795, - 21.61889, - 22.17575, - 21.58009, - 21.78561, - 22.27902, - 21.72767, - 21.61892, - 21.97467, - 21.57492, - 21.58488, - 22.02006, - 21.59664, - 21.5647, - 21.57561, - 21.77696, - 21.59375, - 21.55886, - 21.65411, - 21.57724, - 21.59547, - 21.5957, - 21.87417, - 21.53956, - 21.58601, - 21.87336, - 21.96485, - 21.6116, - 21.53532, - 22.70447, - 21.74116, - 21.57381, - 22.69849, - 21.59157, - 21.5731, - 22.58736, - 21.88272, - 21.57577, - 21.91797, - 21.76673, - 21.65596, - 21.49361, - 21.69173, - 21.54253, - 21.53864, - 21.89686, - 21.56388, - 22.06221, - 21.58559, - 21.88306, - 22.69777, - 21.56899, - 21.95677, - 22.52568, - 21.57915, - 21.56637, - 22.83046, - 21.57035, - 21.58179, - 22.38179, - 21.55364, - 21.61491, - 21.72159, - 21.94362, - 21.56172, - 21.54705, - 22.16372, - 21.86827, - 21.55448, - 21.51826, - 21.91613, - 21.54283, - 21.53507, - 21.75992, - 21.80093, - 22.05688, - 21.52552, - 21.56401, - 21.94125, - 21.69252, - 21.73504, - 22.62287, - 21.58912, - 21.58755, - 22.8816, - 21.80635, - 21.57159, - 22.12017, - 21.94203, - 21.58933, - 21.54906, - 21.66765, - 22.04293, - 21.57036, - 21.52805, - 21.99697, - 21.54062, - 21.89365, - 21.64669, - 22.15105, - 21.82581, - 21.55663, - 21.55671, - 21.9723, - 21.87363, - 21.65283, - 21.60476, - 21.72676, - 21.88276, - 21.61409, - 21.5905, - 22.03152, - 21.66849, - 21.89073, - 21.54827, - 21.8036, - 21.5708, - 21.69278, - 21.72254, - 21.59411, - 21.81518, - 21.56745, - 22.01509, - 21.59628, - 21.58522, - 21.6881, - 21.78942, - 22.00739, - 22.26501, - 21.79779, - 21.57775, - 22.53696, - 21.62551, - 21.55471, - 22.5533, - 21.79729, - 21.8075, - 22.76188, - 21.58442, - 21.58103, - 22.64152, - 21.65659, - 21.54801, - 21.72144, - 21.63657, - 21.73783, - 21.53477, - 21.62065, - 22.08425, - 21.75025, - 21.57749, - 22.05431, - 21.55263, - 21.55941, - 22.48433, - 21.95487, - 22.02954, - 22.65564, - 21.52373, - 21.67427, - 22.23854, - 21.93164, - 21.55903, - 22.33708, - 21.74249, - 21.57163, - 21.88797, - 21.71366, - 21.74071, - 21.57818, - 22.165, - 21.56903, - 21.63611, - 22.18623, - 21.58541, - 21.98815, - 21.84912, - 21.82375, - 21.61599, - 22.33696, - 22.11626, - 21.56298, - 22.37547, - 21.57281, - 21.7819, - 22.54384, - 21.57393, - 21.75278, - 21.95339, - 21.90502, - 21.61419, - 22.06952, - 21.6969, - 21.55399, - 21.90219, - 21.69707, - 21.84769, - 21.54528, - 21.92537, - 21.64732, - 21.55662, - 21.87083, - 21.60922, - 22.31197, - 21.85389, - 22.10234, - 21.64679, - 22.03962, - 21.80759, - 21.53678, - 22.49657, - 21.56291, - 21.79541, - 22.56068, - 21.70808, - 21.59511, - 22.13381, - 22.01638, - 21.62987, - 21.68787, - 21.59191, - 22.27096, - 21.65622, - 21.65535, - 21.67944, - 21.87005, - 21.72168, - 22.42433, - 21.78952, - 21.63349, - 22.57195, - 21.72304, - 21.86347, - 23.00344, - 21.80272, - 21.65009, - 22.95311, - 21.62943, - 21.61491, - 22.86763, - 21.59683, - 22.95715, - 21.78183, - 21.60624, - 22.49151, - 21.8046, - 21.65214, - 21.99899, - 22.05943, - 21.67257, - 21.97611, - 21.61917, - 21.79754, - 21.7178, - 21.62565, - 21.97799, - 21.60036, - 21.57731, - 21.60589, - 21.88809, - 21.60464, - 21.59186, - 21.70947, - 21.55285, - 21.662, - 21.77912, - 21.80357, - 21.68785, - 22.28477, - 21.68438, - 21.602, - 23.21924, - 21.82788, - 21.83267, - 22.21102, - 21.60302, - 21.77652, - 21.68499, - 21.76864, - 21.56026, - 21.63419, - 21.57534, - 21.55424, - 22.00135, - 21.65779, - 21.74632, - 21.56472, - 21.63263, - 21.57969, - 21.68821, - 21.87767, - 21.55614, - 21.97877, - 22.1321, - 21.69579, - 21.58538, - 22.40047, - 21.72507, - 21.58581, - 22.99751, - 21.59258, - 21.6901, - 22.79874, - 21.58407, - 21.57028, - 22.21932, - 21.89652, - 21.76627, - 22.2725, - 21.54544, - 21.6826, - 21.57891, - 21.52155, - 21.8777, - 21.57766, - 21.86917, - 21.5868, - 21.58119, - 21.81018, - 21.66853, - 21.75028, - 21.68756, - 21.73277, - 21.55003, - 21.85552, - 21.84644, - 21.63748, - 23.05416, - 21.5771, - 21.77141, - 22.42295, - 21.5426, - 21.75665, - 22.45468, - 21.70309, - 21.6274, - 21.55694, - 21.73986, - 21.59821, - 21.73266, - 21.78794, - 22.22515, - 21.75243, - 21.81952, - 22.92543, - 21.57938, - 21.51924, - 22.91805, - 21.50564, - 21.54366, - 21.84475, - 21.65069, - 21.52916, - 21.46206, - 21.53216, - 21.5666, - 21.91406, - 21.49215, - 21.48106, - 21.66519, - 21.62389, - 21.47563, - 21.80309, - 21.83562, - 21.76522, - 21.60353, - 21.69688, - 21.78853, - 21.47928, - 22.33244, - 21.48192, - 21.43361, - 22.47305, - 21.42368, - 21.43701, - 22.74971, - 21.81264, - 21.47023, - 21.741, - 21.55812, - 21.43555, - 22.22581, - 21.49308, - 21.57832, - 21.44682, - 21.50003, - 21.45481, - 21.44407, - 22.08694, - 21.44163, - 21.48675, - 21.58044, - 21.71608, - 21.43777, - 21.73142, - 21.71082, - 21.49479, - 21.93566, - 21.49392, - 21.61805, - 22.02037, - 21.49327, - 21.92543, - 22.39295, - 21.47744, - 21.48991, - 22.62925, - 21.7422, - 21.46264, - 21.89569, - 21.5788, - 21.45998, - 21.89958, - 21.93826, - 21.49643, - 21.45507, - 21.67425, - 21.6661, - 21.47589, - 21.60135, - 21.51766, - 21.47556, - 21.614, - 21.52802, - 21.92357, - 21.78433, - 21.44884, - 21.44659, - 22.11996, - 21.44306, - 21.45327, - 22.47322, - 21.52168, - 21.47706, - 22.28428, - 21.66654, - 21.48472, - 21.99957, - 22.05144, - 21.60125, - 21.66895, - 21.41358, - 21.49856, - 21.60013, - 21.80061, - 21.4953, - 21.93688, - 21.52449, - 21.64882, - 21.77471, - 22.47314, - 21.53808, - 21.52955, - 23.02877, - 22.01145, - 21.55342, - 23.06575, - 21.60921, - 21.47428, - 23.1464, - 21.575, - 21.48075, - 21.45599, - 21.5578, - 21.49987, - 21.47561, - 21.45568, - 21.44474, - 21.45348, - 21.48495, - 21.50041, - 21.60838, - 21.46336, - 21.55327, - 21.88429, - 21.50954, - 21.45561, - 22.54313, - 21.73337, - 21.45681, - 23.0479, - 21.73563, - 21.51128, - 22.0209, - 21.45315, - 21.42352, - 21.45035, - 21.6741, - 21.44737, - 21.43527, - 21.47702, - 21.50804, - 21.51431, - 21.44046, - 21.44285, - 21.72913, - 21.49306, - 21.47534, - 21.46813, - 21.67425, - 21.43789, - 21.47956, - 21.46762, - 21.73071, - 21.49577, - 22.38573, - 21.49366, - 21.4214, - 22.91327, - 21.67188, - 21.73738, - 22.53097, - 21.41509, - 21.48897, - 21.83018, - 21.42701, - 21.49333, - 21.44356, - 21.48265, - 21.43457, - 21.61751, - 21.42646, - 21.41981, - 21.69832, - 21.46145, - 21.41881, - 21.4058, - 21.59873, - 21.64021, - 21.43311, - 21.67352, - 21.56198, - 21.43013, - 22.21617, - 21.54359, - 21.70642, - 23.05833, - 21.46526, - 21.49916, - 21.97741, - 21.46583, - 22.34882, - 21.6075, - 21.68976, - 21.47015, - 21.42514, - 21.41413, - 21.41722, - 21.66907, - 21.41475, - 22.15442, - 21.44021, - 21.46236, - 21.44385, - 21.69637, - 21.44714, - 21.4207, - 22.33336, - 21.40789, - 21.7441, - 23.15104, - 21.53398, - 21.4527, - 22.07079, - 21.66019, - 21.48616, - 22.1905, - 142.8069, - 21.50322, - 21.5116, - 21.48465, - 21.6282, - 21.71555, - 21.52907, - 21.48035, - 21.51896, - 21.46203, - 21.48374, - 21.484, - 21.55581, - 21.48894, - 21.49048, - 21.48268, - 21.51904, - 21.694, - 21.60124, - 21.5014, - 21.50869, - 22.42254, - 21.61054, - 21.48395, - 22.36069, - 21.46131, - 21.48028, - 22.7717, - 21.61209, - 21.4578, - 22.40532, - 21.69094, - 21.52104, - 21.59249, - 21.58457, - 21.69248, - 21.57888, - 21.48798, - 21.51147, - 21.47921, - 21.47032, - 21.45736, - 21.70132, - 21.45491, - 21.5088, - 21.68301, - 22.14732, - 21.50698, - 21.47129, - 22.29572, - 21.49958, - 21.52491, - 22.55088, - 21.87606, - 21.52709, - 22.49417, - 21.52359, - 21.46711, - 22.61183, - 21.48452, - 21.47112, - 22.34735, - 21.43862, - 21.56923, - 21.59271, - 21.58337, - 21.55402, - 21.48213, - 21.84976, - 21.46791, - 21.47816, - 21.51783, - 21.46198, - 21.50114, - 21.45598, - 21.48008, - 22.12022, - 22.27965, - 21.4699, - 22.3084, - 21.47562, - 21.78045, - 22.52926, - 21.49684, - 21.68107, - 21.88065, - 21.62485, - 21.49029, - 21.58714, - 21.50628, - 21.49503, - 21.58564, - 21.51044, - 21.78372, - 21.62399, - 21.54225, - 21.55332, - 21.5355, - 21.75599, - 21.5098, - 21.56664, - 22.12525, - 22.23986, - 21.50774, - 22.23804, - 21.77882, - 21.47356, - 21.9393, - 21.50085, - 21.84186, - 22.18411, - 21.47083, - 21.8029, - 22.08525, - 21.51064, - 21.5307, - 21.79901, - 22.52934, - 21.65642, - 21.60962, - 23.02408, - 22.08945, - 21.69036, - 22.98063, - 21.68009, - 21.58362, - 23.0487, - 21.64721, - 21.85456, - 22.85459, - 21.68391, - 21.75407, - 22.51016, - 21.57963, - 21.58427, - 21.99586, - 21.57003, - 21.57963, - 21.57464, - 21.59734, - 21.59526, - 21.59161, - 21.96495, - 21.57056, - 21.70828, - 21.62271, - 21.61008, - 22.45152, - 21.59445, - 21.56591, - 22.46818, - 21.69018, - 21.93651, - 22.54885, - 21.62453, - 21.71384, - 21.88177, - 21.8953, - 21.62815, - 21.82053, - 21.71279, - 21.60486, - 21.64095, - 21.59952, - 21.62787, - 21.59293, - 21.57944, - 21.60423, - 21.73125, - 21.72972, - 21.59269, - 21.9238, - 21.95451, - 21.60263, - 22.76068, - 21.58194, - 21.61746, - 22.53708, - 21.60585, - 22.06127, - 22.3608, - 21.58855, - 21.57793, - 22.02168, - 21.98607, - 21.60375, - 21.80802, - 21.61122, - 21.58418, - 21.55624, - 21.80077, - 21.60522, - 21.57758, - 21.8121, - 21.56986, - 21.61115, - 21.68735, - 21.58259, - 21.79775, - 22.64034, - 21.60312, - 21.70466, - 22.56647, - 21.64692, - 21.59262, - 22.16153, - 21.59538, - 21.87165, - 22.35202, - 21.58603, - 21.56376, - 21.69425, - 21.91171, - 21.64526, - 21.58628, - 22.24154, - 21.65495, - 21.6447, - 21.83352, - 21.77844, - 21.62019, - 21.822, - 21.56919, - 21.62323, - 21.9777, - 21.59773, - 21.60118, - 22.0999, - 21.58842, - 21.60266, - 22.71779, - 21.71276, - 21.56083, - 146.56967, - 21.45808, - 21.5024, - 21.43204, - 21.45082, - 21.71256, - 21.42753, - 21.48536, - 21.4443, - 21.46259, - 21.45997, - 21.47048, - 21.52677, - 21.43538, - 21.43817, - 21.42289, - 21.58035, - 21.63596, - 21.42529, - 21.44615, - 21.41415, - 21.78891, - 21.6747, - 21.47311, - 21.87312, - 21.5834, - 21.48461, - 22.49995, - 21.4496, - 21.42049, - 22.73259, - 21.66057, - 21.56656, - 22.4381, - 21.41849, - 21.4069, - 21.82997, - 21.70164, - 21.42354, - 21.47467, - 21.42369, - 21.72058, - 21.41317, - 21.44279, - 21.41156, - 21.72298, - 21.4215, - 21.44296, - 22.17571, - 21.47875, - 21.6263, - 22.38635, - 22.13911, - 21.4686, - 22.29858, - 21.50379, - 21.43652, - 22.47829, - 21.45278, - 21.81296, - 21.67889, - 21.45739, - 21.57295, - 21.46393, - 21.47328, - 21.45979, - 21.41481, - 21.78815, - 21.4693, - 21.47041, - 21.47015, - 21.40857, - 21.42924, - 21.48908, - 21.91266, - 21.41579, - 22.04802, - 22.12431, - 21.4355, - 22.21189, - 21.4382, - 21.70653, - 22.29959, - 21.47712, - 21.96527, - 22.25433, - 21.495, - 21.4189, - 22.10533, - 21.44888, - 21.46879, - 21.64526, - 21.41628, - 21.4427, - 21.47358, - 21.41162, - 21.4308, - 21.41858, - 21.43157, - 21.64671, - 21.43574, - 21.41598, - 21.66396, - 21.54347, - 22.47212, - 21.50079, - 21.43311, - 22.33112, - 21.5431, - 22.10761, - 21.831, - 21.54832, - 21.45517, - 22.57453, - 21.6902, - 21.52412, - 22.08117, - 145.88203, - 21.71075, - 21.54059, - 21.5354, - 21.5675, - 21.73097, - 21.52441, - 21.56653, - 21.53841, - 21.49171, - 21.50596, - 21.498, - 21.59644, - 21.5032, - 21.512, - 21.52051, - 21.54917, - 21.61099, - 21.52134, - 21.53039, - 21.48055, - 21.62609, - 21.52657, - 21.52421, - 21.46705, - 21.51492, - 21.98726, - 21.83399, - 21.47299, - 22.62086, - 21.78829, - 21.49207, - 22.63745, - 21.55799, - 21.46961, - 21.84812, - 21.46944, - 21.46622, - 21.99589, - 21.47381, - 21.47848, - 21.61846, - 21.48407, - 21.49398, - 21.44872, - 21.67485, - 21.63505, - 21.46163, - 22.34559, - 21.47809, - 21.57469, - 21.77083, - 21.65937, - 21.57619, - 22.14579, - 21.76767, - 21.47012, - 22.61233, - 21.65102, - 21.47724, - 22.13934, - 21.4823, - 21.66911, - 21.97198, - 21.47686, - 21.4771, - 21.47093, - 21.64354, - 21.51281, - 21.62166, - 22.03233, - 21.51055, - 21.74672, - 21.48584, - 21.51262, - 21.46304, - 21.66524, - 21.78504, - 21.48946, - 21.76664, - 21.47263, - 21.64748, - 22.23729, - 21.49324, - 21.71291, - 22.69521, - 21.63739, - 21.68188, - 22.87513, - 21.49304, - 21.55095, - 21.61519, - 21.52643, - 21.59693, - 21.49414, - 22.54746, - 21.63094, - 21.49683, - 21.78281, - 21.47511, - 21.48744, - 21.48674, - 21.7982, - 21.57079, - 21.63743, - 21.58207, - 21.48284, - 21.78721, - 21.46952, - 21.65917, - 22.08725, - 21.4992, - 21.57851, - 21.99751, - 21.48665, - 21.59159, - 22.53135, - 22.42377, - 21.56328, - 21.53964, - 23.34228, - 22.13318, - 21.60877, - 23.10386, - 21.51107, - 22.24254, - 21.52256, - 22.25747, - 22.32143, - 21.53292, - 21.78864, - 21.6714, - 21.5156, - 21.53193, - 22.17002, - 21.6656, - 21.5585, - 21.53614, - 21.52829, - 21.50721, - 21.5401, - 22.1409, - 21.63641, - 21.50148, - 21.52724, - 21.51714, - 21.92943, - 21.4961, - 21.51644, - 21.63135, - 21.50551, - 21.55763, - 22.64879, - 21.91667, - 21.53831, - 23.03509, - 21.5096, - 21.54729, - 22.80404, - 21.51834, - 21.79143, - 21.51689, - 21.52294, - 21.52774, - 21.52755, - 21.85295, - 21.49936, - 21.5862, - 21.52196, - 21.51654, - 21.63153, - 21.49327, - 21.71434, - 21.49537, - 21.57787, - 21.51932, - 21.52773, - 22.19905, - 21.53399, - 22.03063, - 22.59632, - 21.53548, - 21.59096, - 22.68196, - 21.47887, - 21.46642, - 22.9559, - 21.48049, - 21.4988, - 21.88327, - 22.00504, - 21.59266, - 21.48892, - 21.78309, - 21.57641, - 21.48021, - 21.55056, - 21.49603, - 21.74652, - 21.6697, - 21.80577, - 21.52452, - 21.69905, - 21.47888, - 21.5028, - 21.99421, - 21.55231, - 21.65769, - 22.29546, - 21.51172, - 21.5093, - 22.49931, - 21.55806, - 21.46271, - 22.42236, - 22.03693, - 21.64107, - 21.72011, - 21.5809, - 21.71728, - 21.49746, - 21.68965, - 21.54438, - 21.58307, - 21.42611, - 21.48335, - 21.81653, - 21.52115, - 21.59352, - 21.79087, - 21.79479, - 21.56289, - 21.85769, - 21.56866, - 21.91235, - 21.53029, - 21.61246, - 21.65742, - 21.52113, - 21.50281, - 21.584, - 21.84119, - 21.75816, - 21.62656, - 21.50146, - 21.73751, - 21.52849, - 21.61599, - 21.71839, - 21.73666, - 21.65175, - 21.61274, - 22.08802, - 21.59661, - 21.79191, - 21.6944, - 21.61806, - 21.58048, - 21.64795, - 21.93579, - 21.822, - 21.57433, - 21.594, - 21.80216, - 21.6429, - 21.61486, - 21.77914, - 21.58244, - 21.60544, - 21.79309, - 21.86992, - 21.67645, - 21.602, - 21.61173, - 21.53684, - 21.57035, - 21.54446, - 21.6553, - 21.52828, - 21.50856, - 21.53533, - 21.51644, - 21.50335, - 21.56032, - 21.52578, - 21.63123, - 21.72904, - 21.56399, - 21.70109, - 21.57628, - 21.55785, - 22.13417, - 21.53338, - 22.57949, - 21.52532, - 21.705, - 21.61543, - 21.53494, - 21.52628, - 21.55159, - 21.7633, - 21.55347, - 21.84504, - 21.70438, - 21.54732, - 21.77428, - 21.5466, - 21.54042, - 21.6364, - 22.14655, - 21.52873, - 21.50331, - 22.14725, - 21.54372, - 21.53496, - 22.62301, - 21.50948, - 21.57116, - 23.08007, - 21.81751, - 21.5291, - 22.00298, - 21.53884, - 21.52971, - 21.54367, - 21.96324, - 21.53007, - 21.61884, - 21.89253, - 21.53172, - 21.52213, - 21.52903, - 21.66428, - 21.53107, - 21.54149, - 21.64372, - 21.49875, - 21.52825, - 21.53878, - 21.62825, - 21.97325, - 21.58806, - 21.80651, - 22.17837, - 21.61354, - 21.52312, - 22.51912, - 21.56807, - 21.52901, - 22.46097, - 21.93251, - 21.55098, - 21.77025, - 21.38795, - 21.45579, - 21.37344, - 21.36857, - 21.34813, - 21.40872, - 21.68973, - 21.48912, - 21.36768, - 21.37062, - 21.64229, - 21.39834, - 21.34632, - 21.52998, - 21.32887, - 21.34177, - 21.4569, - 21.56627, - 21.34089, - 21.43349, - 149.41389, - 21.52654, - 21.59368, - 21.56816, - 21.58154, - 21.67142, - 21.53662, - 21.54059, - 21.53109, - 21.56806, - 21.58924, - 21.55296, - 21.62975, - 21.52098, - 21.55582, - 21.56036, - 21.49619, - 21.85151, - 21.52779, - 21.51699, - 21.53346, - 21.61054, - 21.78313, - 21.49933, - 21.50669, - 21.53462, - 21.51713, - 21.97489, - 21.61486, - 21.5053, - 21.50298, - 21.50681, - 21.75626, - 21.463, - 21.48672, - 21.58988, - 21.72567, - 21.73965, - 21.51908, - 21.51784, - 21.45934, - 21.53754, - 23.01655, - 21.47714, - 21.54127, - 22.18103, - 21.67531, - 21.59345, - 21.47328, - 21.64961, - 21.48258, - 21.52313, - 21.54641, - 21.61563, - 21.4824, - 21.47113, - 21.84853, - 21.57625, - 21.51524, - 21.52997, - 21.50628, - 21.64664, - 21.58102, - 21.48271, - 22.05493, - 21.6616, - 21.4977, - 22.75326, - 21.59856, - 21.61931, - 22.3985, - 21.50767, - 21.65728, - 21.73722, - 21.54152, - 21.55252, - 21.57769, - 21.53825, - 21.50828, - 21.65716, - 21.15989, - 21.88503, - 21.47298, - 21.66755, - 21.52073, - 21.51004, - 21.69035, - 21.50243, - 21.84939, - 21.60291, - 21.52477, - 21.69724, - 22.24655, - 21.56001, - 21.54379, - 22.71299, - 21.50399, - 21.49905, - 22.36485, - 21.50131, - 20.91825, - 21.5623, - 21.59273, - 21.52829, - 21.72897, - 21.48931, - 21.54727, - 21.48473, - 21.58657, - 21.84502, - 21.84157, - 21.50338, - 22.06379, - 22.13465, - 21.54407, - 21.52397, - 22.57475, - 21.48901, - 22.02185, - 22.97197, - 21.83302, - 21.48891, - 21.54666, - 21.55527, - 21.44949, - 21.41495, - 21.51934, - 21.77577, - 21.5863, - 21.44902, - 21.45625, - 21.69513, - 21.55645, - 21.48493, - 21.6175, - 21.44225, - 21.41906, - 21.58026, - 21.66796, - 21.44687, - 21.51904, - 21.47391, - 21.44333, - 21.43228, - 21.43386, - 21.5319, - 21.45399, - 21.41062, - 21.46382, - 21.44175, - 21.44121, - 21.54329, - 21.43163, - 21.48617, - 21.61424, - 21.44527, - 21.48318, - 21.46964, - 21.46581, - 21.46561, - 21.44735, - 23.54856, - 21.42206, - 21.54659, - 21.56809, - 21.46545, - 21.43187, - 21.43565, - 21.57391, - 21.44946, - 21.67912, - 21.67854, - 21.42925, - 21.60362, - 21.4395, - 21.47978, - 21.43629, - 21.67325, - 21.41691, - 21.40849, - 21.57617, - 21.44286, - 21.44737, - 21.76506, - 21.44048, - 21.43151, - 23.13409, - 21.59008, - 21.43902, - 22.58402, - 21.44042, - 21.42973, - 22.02836, - 21.83129, - 21.49341, - 21.64447, - 21.75716, - 21.46585, - 21.47689, - 21.43305, - 21.52235, - 21.44002, - 21.43282, - 21.51689, - 21.41972, - 21.41654, - 21.44403, - 21.47841, - 21.4566, - 21.453, - 21.64254, - 21.57335, - 21.46264, - 21.45194, - 22.0507, - 21.45999, - 21.43745, - 22.97723, - 21.7691, - 21.44731, - 21.48336, - 21.84122, - 21.55548, - 21.45124, - 22.08764, - 21.43085, - 21.4739, - 21.61909, - 21.44926, - 21.44375, - 21.44155, - 21.54431, - 21.64954, - 21.58894, - 21.46746, - 21.70036, - 21.44327, - 21.60511, - 22.57814, - 21.72853, - 21.51416, - 22.9185, - 21.95488, - 21.64031, - 22.4101, - 21.51362, - 21.45811, - 21.56473, - 21.46649, - 21.45853, - 21.4747, - 21.44679, - 21.55151, - 21.44983, - 21.46462, - 21.54712, - 21.53437, - 21.46994, - 21.48958, - 21.51021, - 21.61304, - 21.46307, - 21.61999, - 21.44696, - 21.50673, - 21.43353, - 21.72038, - 21.78937, - 21.43614, - 23.14673, - 21.4319, - 21.4333, - 22.79548, - 21.47762, - 21.43184, - 21.43131, - 21.60482, - 21.42537, - 21.50112, - 21.42808, - 21.43978, - 21.49424, - 21.43013, - 21.54489, - 21.41546, - 21.50626, - 21.46931, - 21.45762, - 21.50328, - 21.40607, - 21.44674, - 21.47968, - 21.78925, - 21.75178, - 21.40919, - 21.4921, - 21.43849, - 22.33127, - 21.423, - 21.61097, - 23.08025, - 21.41651, - 21.45202, - 22.15586, - 21.46312, - 21.50652, - 21.54555, - 21.58263, - 21.45347, - 21.58255, - 21.42158, - 21.41072, - 21.42724, - 21.47008, - 21.43735, - 21.46616, - 21.56521, - 21.84152, - 21.42992, - 21.59851, - 21.82737, - 21.84893, - 21.42644, - 22.12304, - 23.14375, - 21.60519, - 21.45527, - 23.10497, - 21.4592, - 21.42501, - 21.89466, - 21.47457, - 21.50773, - 21.45204, - 21.5374, - 21.42299, - 21.41122, - 21.5085, - 21.44824, - 21.48767, - 21.41712, - 21.44367, - 21.51082, - 21.45433, - 21.4379, - 21.4432, - 21.93589, - 21.43155, - 22.06327, - 22.92958, - 21.41656, - 21.42872, - 22.94827, - 21.69178, - 21.46226, - 22.24065, - 21.79442, - 21.68378, - 21.63927, - 21.81347, - 21.66978, - 22.56515, - 21.61945, - 21.60239, - 21.91619, - 21.70785, - 21.57907, - 21.59388, - 21.58731, - 21.75914, - 21.59023, - 21.59088, - 21.70108, - 21.75731, - 21.63198, - 21.60036, - 21.59559, - 21.80771, - 21.60708, - 21.71292, - 21.82598, - 21.66252, - 21.57252, - 22.46304, - 21.95076, - 21.58654, - 23.18729, - 21.60266, - 21.57577, - 22.39223, - 21.58335, - 21.78007, - 21.74344, - 21.64603, - 21.57589, - 21.57082, - 21.76869, - 21.56773, - 21.82486, - 21.55803, - 21.61142, - 21.54349, - 21.5602, - 21.70089, - 21.58088, - 21.57338, - 21.55651, - 21.58702, - 21.58944, - 21.7049, - 21.86038, - 21.91736, - 21.73027, - 21.5464, - 22.589, - 21.56515, - 21.77919, - 22.85871, - 21.55888, - 21.71895, - 21.55665, - 21.58562, - 21.70024, - 22.13453, - 21.6026, - 21.5868, - 21.56531, - 21.57685, - 21.60075, - 21.58372, - 21.98746, - 21.5833, - 21.92795, - 21.74113, - 21.56639, - 22.51809, - 21.58413, - 21.75057, - 22.7856, - 21.55994, - 21.93107, - 22.63202, - 21.67662, - 21.60911, - 22.33818, - 21.55804, - 21.74773, - 22.33305, - 21.57394, - 21.70216, - 21.56695, - 21.58503, - 21.59897, - 21.601, - 21.61588, - 21.58364, - 21.93567, - 21.69898, - 21.58536, - 21.5903, - 21.93217, - 21.61726, - 21.62111, - 22.57579, - 21.62673, - 22.05375, - 22.47564, - 21.59261, - 21.60979, - 22.51018, - 21.77757, - 21.77647, - 148.99738, - 21.45087, - 21.45186, - 21.45362, - 21.41534, - 21.69003, - 21.41813, - 21.45619, - 21.60538, - 21.68758, - 21.41283, - 21.43567, - 21.41987, - 21.39449, - 21.58897, - 21.65373, - 21.40816, - 21.42618, - 22.23536, - 21.39327, - 21.49545, - 22.84484, - 21.41599, - 21.40939, - 22.64348, - 21.63325, - 21.46436, - 22.00187, - 21.58326, - 21.4316, - 21.43797, - 21.39769, - 21.92949, - 21.41308, - 21.42226, - 21.71479, - 21.43151, - 21.52, - 21.42525, - 21.59853, - 21.57578, - 21.43446, - 21.61681, - 21.43927, - 21.45015, - 21.44897, - 22.08352, - 21.55701, - 22.44639, - 21.42849, - 21.48295, - 22.51484, - 21.48636, - 21.72884, - 21.89283, - 21.42343, - 21.67812, - 21.64483, - 21.63708, - 21.41266, - 21.65123, - 21.44618, - 21.61533, - 21.86241, - 21.42007, - 21.44216, - 21.43338, - 21.39772, - 21.38327, - 21.50204, - 22.16446, - 21.40958, - 21.67229, - 22.39931, - 21.64397, - 21.39064, - 22.37575, - 21.48587, - 21.56677, - 22.40684, - 21.39897, - 21.66671, - 21.71957, - 21.41849, - 21.51428, - 21.45091, - 21.96433, - 21.42896, - 21.80562, - 21.43006, - 21.43935, - 21.45932, - 21.43191, - 21.60964, - 21.41457, - 22.24236, - 21.45485, - 21.41674, - 21.99351, - 21.41894, - 21.49025, - 22.22929, - 21.40828, - 21.47861, - 22.48122, - 21.52944, - 21.41681, - 22.04969, - 21.38011, - 21.57997, - 22.09864, - 21.43407, - 21.55106, - 22.19244, - 21.4537, - 21.57575, - 21.42574, - 21.75951, - 21.56903, - 21.74613, - 21.69635, - 21.5352, - 21.53788, - 21.55136, - 21.74194, - 21.66495, - 21.74068, - 21.53686, - 23.04973, - 21.71376, - 21.60627, - 22.65402, - 21.49118, - 21.56297, - 22.20888, - 21.47583, - 21.46699, - 21.49504, - 21.49498, - 26.34066, - 21.64714, - 22.01499, - 21.46068, - 21.70976, - 21.48282, - 21.67193, - 21.45333, - 21.48813, - 21.57205, - 21.74557, - 21.4878, - 21.72144, - 22.14816, - 22.06482, - 21.61135, - 22.40082, - 21.72118, - 21.53062, - 23.43495, - 21.49529, - 21.97108, - 22.04965, - 21.45288, - 21.48275, - 21.48481, - 22.44759, - 21.46132, - 21.80707, - 21.46533, - 21.44985, - 21.51299, - 21.6095, - 22.00613, - 21.44863, - 21.67141, - 21.51904, - 21.48117, - 21.54589, - 21.50514, - 21.81355, - 21.75925, - 21.60631, - 21.53182, - 22.58563, - 21.6423, - 21.5126, - 22.70399, - 21.5176, - 21.46538, - 22.3679, - 22.3979, - 21.50148, - 21.69178, - 22.1631, - 21.56535, - 21.47041, - 21.60833, - 21.98674, - 21.50263, - 21.47645, - 21.9439, - 21.49958, - 21.45705, - 21.68547, - 21.44871, - 21.75395, - 21.61946, - 22.05081, - 21.99069, - 21.47692, - 21.49688, - 22.04703, - 21.46369, - 21.48954, - 22.36658, - 22.19523, - 21.67834, - 22.40389, - 21.50949, - 21.62486, - 21.90676, - 21.48558, - 22.00095, - 21.7934, - 21.51948, - 21.46257, - 21.59903, - 21.47098, - 21.46803, - 21.97705, - 22.03763, - 21.45286, - 21.47488, - 144.60007, - 21.56963, - 21.5342, - 21.53681, - 21.56406, - 21.96356, - 21.54307, - 21.51891, - 21.52546, - 21.53364, - 21.50927, - 21.63958, - 21.58509, - 21.50613, - 21.49883, - 21.48584, - 21.5892, - 22.14145, - 21.48442, - 21.50465, - 23.71029, - 21.49158, - 21.48361, - 22.46544, - 21.4845, - 21.49207, - 21.75065, - 21.80818, - 21.59829, - 21.50598, - 21.70931, - 21.51391, - 21.60423, - 21.66108, - 21.62796, - 21.64064, - 21.49036, - 21.51825, - 22.12746, - 21.63203, - 21.60022, - 21.51107, - 22.32683, - 21.62702, - 21.68162, - 22.97898, - 21.54192, - 21.51468, - 22.38544, - 21.48763, - 21.51053, - 22.1996, - 21.59543, - 21.6692, - 21.49052, - 21.49631, - 21.47779, - 21.6864, - 21.58671, - 21.48205, - 21.62892, - 21.48467, - 21.48016, - 21.50617, - 21.7303, - 21.47185, - 21.50715, - 21.96781, - 21.49542, - 21.59906, - 22.6447, - 21.47831, - 21.66787, - 22.16209, - 21.63028, - 21.49444, - 22.3151, - 21.56746, - 21.50691, - 22.33439, - 21.66591, - 21.68378, - 21.60958, - 21.49365, - 21.56534, - 21.49094, - 21.9099, - 21.67978, - 21.49052, - 21.6604, - 21.5277, - 21.67594, - 21.5013, - 21.84143, - 21.55081, - 22.13372, - 21.55198, - 21.49173, - 22.34639, - 21.48882, - 21.70618, - 22.13215, - 21.66935, - 21.6016, - 22.1598, - 21.54518, - 21.51286, - 22.62902, - 21.50501, - 21.47023, - 22.13453, - 21.69733, - 21.594, - 21.50252, - 21.70252, - 21.54795, - 22.79333, - 21.59837, - 21.67672, - 23.2666, - 22.24294, - 21.75217, - 23.23928, - 21.74556, - 21.66679, - 22.93906, - 21.69355, - 21.98272, - 22.91322, - 21.99241, - 21.83147, - 22.5227, - 21.67384, - 21.62416, - 22.47656, - 21.67822, - 21.63718, - 21.64426, - 21.7326, - 21.76908, - 21.66174, - 21.79028, - 21.92622, - 21.64388, - 21.95417, - 21.67443, - 22.16162, - 21.66173, - 21.78984, - 22.66648, - 21.63336, - 22.12132, - 22.48049, - 21.71417, - 21.75484, - 22.52258, - 21.86187, - 21.68954, - 21.7817, - 21.78681, - 21.84849, - 21.62195, - 21.57876, - 21.88578, - 21.58939, - 21.61294, - 21.5879, - 21.81044, - 21.58273, - 21.81224, - 21.8226, - 21.68392, - 21.66322, - 21.59405, - 22.64067, - 21.68145, - 21.99891, - 22.12934, - 21.65859, - 21.76978, - 22.48611, - 21.64186, - 21.7664, - 22.76148, - 21.70806, - 21.66939, - 22.07162, - 21.72435, - 21.66379, - 21.67439, - 21.70436, - 21.64651, - 21.78717, - 22.14585, - 21.70251, - 21.63326, - 21.63268, - 21.6665, - 21.74414, - 21.7105, - 21.80335, - 21.86198, - 21.6546, - 21.62578, - 21.65526, - 22.23226, - 21.63566, - 22.01678, - 22.88632, - 21.64897, - 21.58507, - 22.62085, - 21.54297, - 21.57696, - 21.9491, - 21.56577, - 21.60951, - 21.62185, - 21.68652, - 21.79164, - 21.8505, - 21.5606, - 21.58963, - 21.66431, - 21.653, - 21.87288, - 22.06897, - 21.58569, - 21.57682, - 22.24193, - 21.64965, - 21.64543, - 22.77604, - 22.06601, - 21.51956, - 21.6099, - 21.52744, - 21.55185, - 21.5442, - 21.57829, - 21.90724, - 21.74616, - 21.53469, - 21.50715, - 21.71646, - 21.5009, - 21.55751, - 21.7219, - 21.48802, - 21.49234, - 21.75059, - 21.70982, - 21.49529, - 21.52759, - 21.54493, - 21.47167, - 22.24105, - 21.50892, - 21.47983, - 23.00498, - 21.82787, - 21.49047, - 22.297, - 21.47058, - 21.61332, - 21.45605, - 21.50505, - 21.67595, - 21.50675, - 21.75465, - 21.53391, - 21.71179, - 21.53099, - 21.50627, - 21.73101, - 21.47213, - 21.55113, - 21.50538, - 21.86218, - 21.47282, - 21.49278, - 22.29646, - 21.5022, - 21.51271, - 22.50128, - 21.75631, - 21.48092, - 22.77996, - 21.45921, - 21.51245, - 21.83765, - 21.49476, - 21.48503, - 21.53251, - 21.48063, - 21.47698, - 21.65149, - 21.47668, - 21.58117, - 21.49317, - 21.47561, - 21.47919, - 21.46605, - 21.66778, - 21.50228, - 21.76958, - 21.49623, - 21.72803, - 21.49773, - 21.73565, - 21.86163, - 21.51171, - 22.28914, - 21.5011, - 21.72346, - 21.50976, - 21.71791, - 21.90563, - 22.04996, - 21.4957, - 21.51403, - 21.47697, - 21.48074, - 21.62856, - 21.51559, - 21.81358, - 21.48551, - 21.69962, - 21.46548, - 21.545, - 21.54307, - 21.50453, - 21.61782, - 22.00138, - 22.11029, - 21.44758, - 22.03919, - 21.50162, - 21.48106, - 22.7933, - 21.50625, - 22.26604, - 22.44251, - 21.48965, - 21.58442, - 21.56795, - 21.50909, - 21.51488, - 21.72057, - 138.06879, - 21.54331, - 21.59938, - 21.5547, - 21.52649, - 21.74892, - 21.51106, - 21.58054, - 21.49594, - 21.5029, - 21.5216, - 21.48445, - 21.60748, - 21.50073, - 21.50445, - 21.52002, - 21.52854, - 21.75194, - 21.50781, - 21.50653, - 21.53886, - 21.6298, - 21.65182, - 21.53533, - 21.50952, - 21.50864, - 21.50241, - 21.61018, - 21.72447, - 21.50897, - 21.85884, - 21.5182, - 21.52365, - 22.42446, - 21.49897, - 22.17612, - 22.69951, - 21.67683, - 21.50679, - 21.79854, - 21.49739, - 21.51279, - 21.63616, - 21.48862, - 21.68302, - 21.50628, - 21.51613, - 21.57587, - 21.51114, - 21.54333, - 21.48607, - 21.67588, - 21.59783, - 21.48079, - 21.52143, - 21.71416, - 21.57711, - 21.47518, - 21.87652, - 21.65896, - 22.1036, - 22.50854, - 21.52687, - 21.53776, - 22.77522, - 21.48732, - 22.44962, - 22.01114, - 21.49217, - 21.72791, - 21.47052, - 21.51465, - 21.54685, - 21.66823, - 21.74246, - 21.49123, - 21.63798, - 21.51984, - 21.52589, - 21.9115, - 21.49533, - 22.02338, - 21.98291, - 21.50062, - 21.88354, - 22.5627, - 21.70596, - 21.61662, - 22.8774, - 21.49189, - 21.48763, - 22.67434, - 21.50889, - 21.64631, - 21.5299, - 21.64429, - 21.51915, - 21.61587, - 21.91783, - 21.52964, - 21.49414, - 21.67436, - 21.47715, - 21.49685, - 21.8267, - 21.49998, - 21.7164, - 22.01289, - 21.48126, - 21.51341, - 21.95688, - 21.53441, - 21.57615, - 22.40819, - 21.89717, - 21.50893, - 23.16485, - 21.69501, - 21.48232, - 21.41537, - 21.38971, - 21.38518, - 21.52319, - 21.59064, - 21.48896, - 21.38965, - 21.81098, - 21.41893, - 21.40796, - 21.94702, - 21.42209, - 21.45637, - 22.17652, - 21.56698, - 21.39951, - 22.85165, - 21.4428, - 21.41515, - 22.79811, - 21.6378, - 21.76793, - 22.69113, - 21.41487, - 21.4253, - 22.55215, - 21.40327, - 21.38558, - 21.39117, - 21.73987, - 21.39844, - 21.45017, - 21.53394, - 21.58961, - 21.35484, - 21.41395, - 21.43696, - 21.3739, - 21.36349, - 21.56645, - 22.28961, - 21.40661, - 21.36429, - 22.58153, - 21.36807, - 21.3614, - 22.44318, - 21.37492, - 21.50228, - 21.36326, - 21.35049, - 21.35776, - 21.34075, - 21.86766, - 21.40763, - 21.62003, - 21.39304, - 21.36419, - 21.41556, - 21.39511, - 21.73395, - 22.1611, - 21.85372, - 21.35844, - 22.49488, - 21.37574, - 21.34082, - 22.17738, - 21.46568, - 21.65194, - 21.91737, - 21.3546, - 21.35563, - 22.09611, - 21.57015, - 21.36296, - 21.65684, - 21.38988, - 21.89342, - 21.37261, - 21.38784, - 21.45537, - 21.40085, - 21.40078, - 21.36291, - 21.57958, - 21.55214, - 21.4854, - 21.6568, - 22.21302, - 21.43191, - 21.3881, - 22.48263, - 21.40361, - 21.36188, - 22.04883, - 21.36292, - 21.40056, - 22.04438, - 21.4135, - 21.36996, - 21.78072, - 21.70589, - 21.89188, - 21.38765, - 21.37718, - 21.38495, - 21.44516, - 21.38011, - 21.74122, - 21.65781, - 21.57116, - 21.36509, - 21.463, - 21.74009, - 21.34059, - 22.03207, - 21.56668, - 21.67216, - 21.52077, - 21.50537, - 21.50874, - 21.57077, - 21.98333, - 21.76201, - 21.5267, - 21.52984, - 21.87834, - 21.53708, - 21.54364, - 21.86814, - 21.56252, - 21.51746, - 21.74017, - 21.78962, - 21.52029, - 22.44086, - 21.51157, - 21.69183, - 22.34575, - 21.54969, - 21.48917, - 22.506, - 21.48875, - 21.56243, - 22.30615, - 21.77465, - 21.90519, - 21.73146, - 21.52625, - 21.54631, - 21.69025, - 21.5488, - 21.56662, - 21.88325, - 21.52429, - 21.50921, - 21.75135, - 21.56104, - 21.59957, - 21.79159, - 22.10465, - 21.54364, - 21.54337, - 22.85307, - 21.5478, - 21.5128, - 22.62147, - 21.53764, - 21.5388, - 23.90517, - 21.59492, - 21.90876, - 21.97001, - 21.79117, - 21.53523, - 22.19261, - 21.53661, - 21.7136, - 22.36243, - 21.52343, - 21.51417, - 21.55357, - 21.54353, - 21.52721, - 21.5431, - 21.71187, - 21.54911, - 21.56912, - 21.64602, - 21.57613, - 21.55509, - 22.00905, - 21.74969, - 21.52967, - 22.46437, - 21.52287, - 21.73389, - 22.11148, - 21.51169, - 21.55012, - 21.77282, - 21.51785, - 21.57759, - 22.36341, - 21.69684, - 21.53758, - 21.94524, - 21.53507, - 21.55589, - 21.88176, - 22.28848, - 21.52125, - 21.71257, - 21.57439, - 21.54072, - 21.99073, - 21.70533, - 21.58484, - 22.27408, - 21.54493, - 21.50619, - 21.849, - 21.52803, - 22.09462, - 22.22558, - 21.54106, - 21.81695, - 21.91092, - 21.5503, - 21.5956, - 21.78116, - 21.47605, - 21.65239, - 21.63147, - 21.55044, - 21.48025, - 21.47696, - 21.44423, - 21.46434, - 21.73214, - 21.66346, - 21.4976, - 21.46224, - 21.45179, - 21.51423, - 21.68325, - 21.47243, - 21.55736, - 21.44322, - 21.55522, - 21.50095, - 21.46918, - 21.80503, - 21.48958, - 21.51648, - 21.72704, - 21.42354, - 21.56669, - 21.51237, - 21.55172, - 21.43708, - 21.44087, - 21.65083, - 21.41974, - 21.4329, - 21.40905, - 21.59595, - 21.48127, - 21.4148, - 21.65783, - 21.41608, - 21.4282, - 21.54184, - 21.53227, - 21.44629, - 21.39053, - 22.54517, - 21.45127, - 21.4446, - 23.09391, - 21.57436, - 21.50443, - 21.81119, - 21.4344, - 21.45899, - 21.41381, - 21.61591, - 21.64419, - 21.42327, - 21.4053, - 21.4521, - 21.48417, - 21.43413, - 21.49747, - 21.61283, - 21.42577, - 21.44671, - 21.40714, - 21.46935, - 21.44229, - 21.43852, - 21.7933, - 21.43263, - 21.41851, - 21.97102, - 21.57809, - 21.43128, - 23.03788, - 21.43543, - 21.44999, - 22.51562, - 21.4061, - 21.77855, - 21.55755, - 21.41287, - 21.4319, - 21.88834, - 21.47312, - 22.12378, - 21.43149, - 21.43806, - 21.48273, - 21.44891, - 21.61332, - 21.46153, - 22.06796, - 21.42466, - 21.4657, - 22.29121, - 21.41982, - 21.46533, - 22.59104, - 21.62388, - 21.41068, - 21.92067, - 21.52139, - 21.46856, - 22.54698, - 21.43628, - 21.47125, - 21.76083, - 21.44383, - 21.59312, - 21.72431, - 21.45776, - 21.4234, - 21.45174, - 21.5624, - 22.3904, - 21.41565, - 21.39251, - 22.8605, - 22.05914, - 21.42754, - 23.04352, - 21.50099, - 21.51449, - 22.71483, - 21.41468, - 21.928, - 22.99737, - 21.42427, - 21.54309, - 22.51813, - 21.38641, - 21.51526, - 22.25174, - 21.39354, - 21.40944, - 21.66403, - 21.46622, - 21.39181, - 21.46091, - 21.95235, - 21.32834, - 21.36681, - 21.40896, - 21.37978, - 21.35006, - 21.3709, - 21.45846, - 21.39653, - 21.36419, - 21.54063, - 21.70045, - 21.37952, - 21.55238, - 22.72036, - 21.55484, - 21.35218, - 23.35183, - 21.53639, - 21.36385, - 21.49827, - 21.53132, - 21.35807, - 21.44452, - 21.73125, - 21.37169, - 21.42118, - 21.36254, - 21.54614, - 21.48963, - 21.36327, - 21.34729, - 21.39861, - 21.46427, - 21.33024, - 21.48868, - 21.50216, - 21.40308, - 21.55654, - 21.80919, - 21.49762, - 21.35313, - 21.36458, - 21.403, - 21.61012, - 21.40521, - 21.46027, - 21.36232, - 22.13297, - 21.52458, - 21.35949, - 21.675, - 21.43788, - 21.36499, - 21.37114, - 21.4986, - 21.3778, - 21.40485, - 21.64723, - 21.70011, - 21.48531, - 21.40276, - 21.37167, - 22.57043, - 21.59715, - 21.7825, - 23.36697, - 21.37002, - 21.36447, - 21.90403, - 21.63566, - 21.40192, - 21.47657, - 22.42685, - 21.47748, - 21.36917, - 21.62378, - 21.51085, - 21.42121, - 21.5183, - 21.39837, - 21.44077, - 21.38947, - 21.54976, - 21.73644, - 21.37281, - 21.36561, - 21.34189, - 21.76994, - 21.36634, - 21.40091, - 22.67479, - 21.4168, - 21.84795, - 21.40952, - 21.56366, - 21.51928, - 21.3866, - 21.39426, - 21.42005, - 21.79225, - 21.54788, - 21.39025, - 21.39838, - 21.66749, - 21.41071, - 21.36489, - 21.72653, - 21.37733, - 21.37247, - 21.46795, - 21.58604, - 21.49767, - 21.37405, - 21.52769, - 21.49965, - 21.40553, - 21.34805, - 21.32949, - 21.34316, - 21.32771, - 21.58136, - 21.61554, - 21.34298, - 21.29521, - 21.33676, - 21.40774, - 21.50525, - 21.42292, - 21.45998, - 21.35281, - 21.39203, - 21.50322, - 21.34026, - 21.78005, - 21.34328, - 21.3879, - 21.88154, - 21.46838, - 21.32902, - 22.55373, - 21.89904, - 21.30783, - 23.00034, - 21.45179, - 21.50976, - 22.82893, - 21.31915, - 21.82285, - 22.46257, - 21.39383, - 21.42254, - 21.79387, - 21.32108, - 21.44551, - 21.29847, - 21.47652, - 21.48548, - 21.29082, - 21.39804, - 21.34507, - 21.32278, - 21.3314, - 21.35476, - 21.73363, - 21.33135, - 21.39398, - 22.22256, - 21.44464, - 21.33411, - 22.65172, - 21.5205, - 21.8818, - 21.72054, - 21.36415, - 21.51948, - 21.31411, - 21.30877, - 21.33811, - 21.47744, - 21.32705, - 21.33504, - 21.54803, - 21.42194, - 21.45602, - 21.31921, - 21.29194, - 21.33044, - 21.38243, - 21.43781, - 21.29897, - 21.31547, - 22.03249, - 21.32423, - 21.29168, - 22.25559, - 21.45617, - 21.84155, - 22.94252, - 21.34163, - 21.34062, - 21.70744, - 42.37025, - 21.23082, - 21.8854, - 21.32675, - 21.3041, - 21.56448, - 21.49498, - 21.31515, - 21.31956, - 21.3252, - 21.59975, - 21.32988, - 21.33545, - 21.41687, - 21.64913, - 21.31671, - 21.31149, - 22.77766, - 21.29084, - 21.44871, - 22.93316, - 21.36997, - 21.31667, - 21.64206, - 21.57804, - 21.41466, - 21.82442, - 21.2932, - 21.30838, - 21.53247, - 21.67147, - 21.69564, - 21.71125, - 21.85515, - 22.49339, - 21.58926, - 21.51499, - 22.92025, - 21.49793, - 22.12625, - 22.39743, - 21.73316, - 21.48606, - 21.48727, - 21.49479, - 21.53268, - 21.50948, - 21.80451, - 21.52356, - 21.4528, - 21.47147, - 21.5196, - 21.66782, - 21.45963, - 21.45878, - 21.74641, - 21.50149, - 21.54905, - 21.47198, - 21.5413, - 21.464, - 21.46073, - 21.60428, - 21.45293, - 22.0467, - 22.6225, - 21.64651, - 21.47144, - 22.79697, - 21.60685, - 21.48925, - 23.28353, - 21.46856, - 21.52191, - 21.72009, - 21.50695, - 21.52918, - 21.57529, - 21.47933, - 21.50925, - 21.6805, - 21.52058, - 21.45812, - 21.61922, - 21.46568, - 21.4796, - 21.52748, - 21.68843, - 21.59617, - 21.68122, - 21.5904, - 21.50377, - 21.48779, - 21.70515, - 21.63938, - 21.47998, - 23.19242, - 21.49981, - 21.45223, - 23.12997, - 21.54318, - 21.49499, - 21.82821, - 21.48072, - 21.50372, - 21.49967, - 21.501, - 21.47864, - 21.47223, - 21.50483, - 21.49144, - 21.45406, - 21.57046, - 21.69256, - 21.47656, - 21.58561, - 21.49092, - 21.99757, - 21.51684, - 21.4778, - 22.20366, - 21.52083, - 21.4842, - 23.05357, - 21.73083, - 21.49291, - 22.78123, - 22.09088, - 21.49528, - 21.77238, - 21.4985, - 21.71434, - 21.50878, - 21.65577, - 21.69337, - 21.49433, - 21.59404, - 21.49991, - 21.52433, - 21.46667, - 21.49769, - 21.58025, - 21.77447, - 21.53856, - 21.69528, - 140.88046, - 21.50567, - 21.52767, - 21.54513, - 21.73718, - 21.70434, - 21.68278, - 21.75726, - 21.50469, - 21.75843, - 21.50908, - 21.67016, - 21.50596, - 21.50605, - 21.86186, - 22.0345, - 21.63119, - 21.50867, - 22.57252, - 21.51117, - 21.51261, - 22.71534, - 21.63224, - 21.49328, - 21.97537, - 21.77538, - 21.48623, - 21.56988, - 21.51687, - 21.4577, - 21.49192, - 21.66786, - 21.67671, - 21.49823, - 21.49661, - 21.86857, - 21.48432, - 21.50297, - 21.49771, - 21.81696, - 21.85007, - 21.54679, - 21.66561, - 21.96876, - 21.5589, - 21.65483, - 22.24072, - 21.57089, - 21.49358, - 22.62678, - 21.53527, - 21.49976, - 22.53759, - 21.48928, - 21.55455, - 21.87598, - 21.61069, - 21.74809, - 21.55618, - 21.6859, - 21.52414, - 21.59845, - 21.50869, - 21.48695, - 21.88519, - 21.59971, - 21.50933, - 21.60103, - 21.46312, - 21.52861, - 22.36, - 21.45089, - 21.72037, - 23.26463, - 21.47603, - 21.47435, - 22.70337, - 21.90371, - 21.48702, - 21.86955, - 21.52135, - 21.879, - 21.51374, - 21.49992, - 21.61309, - 21.49249, - 21.89408, - 21.49203, - 21.77342, - 21.49828, - 21.51173, - 21.57722, - 21.54473, - 21.67017, - 21.51232, - 22.31113, - 21.58524, - 21.49967, - 21.9219, - 21.49739, - 21.53436, - 22.39809, - 22.00699, - 21.53994, - 22.57789, - 21.73743, - 21.4719, - 21.9773, - 21.58742, - 22.00943, - 21.82804, - 21.50696, - 21.92103, - 21.65572, - 21.48257, - 21.5109, - 21.55255, - 21.94602, - 21.57032, - 21.8089, - 21.55935, - 21.57463, - 21.66593, - 21.63316, - 21.91181, - 21.64982, - 21.56321, - 21.51924, - 21.56886, - 21.5423, - 21.71634, - 22.45646, - 21.58003, - 21.64402 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 12.66411, "5": 12.64025, "10": 11.72968, "15": 10.70873, "20": 10.24216, "25": 9.93828, "30": 9.71125, "35": 9.45753, "40": 9.26038, "45": 9.07455, "50": 8.89765, "55": 8.69455, "60": 8.57237, "65": 8.48259, "70": 8.33043, "75": 8.19871, "80": 8.08632, "85": 7.97122, "90": 7.79547, "95": 7.69826, "100": 7.56497, "105": 7.43212, "110": 7.31844, "115": 7.2373, "120": 7.12049, "125": 7.03009, "130": 6.95809, "135": 6.84825, "140": 6.80007, "145": 6.69784, "150": 6.63492, "155": 6.60686, "160": 6.51094, "165": 6.43745, "170": 6.365, "175": 6.31598, "180": 6.23122, "185": 6.1993, "190": 6.06316, "195": 6.07071, "200": 5.97765, "205": 5.9193, "210": 5.89537, "215": 5.80761, "220": 5.74233, "225": 5.70095, "230": 5.63184, "235": 5.62826, "240": 5.50954, "245": 5.52812, "250": 5.44773, "255": 5.40715, "260": 5.36239, "265": 5.31515, "270": 5.28549, "275": 5.25815, "280": 5.20107, "285": 5.17412, "290": 5.12817, "295": 5.09683, "300": 5.05893, "305": 5.04573, "310": 4.98507, "315": 4.94627, "320": 4.90326, "325": 4.92193, "330": 4.86892, "335": 4.84094, "340": 4.81937, "345": 4.7864, "350": 4.74458, "355": 4.74424, "360": 4.6582, "365": 4.66167, "370": 4.63049, "375": 4.60898, "380": 4.56595, "385": 4.56026, "390": 4.49866, "395": 4.47305, "400": 4.44746, "405": 4.37811, "410": 4.38857, "415": 4.38106, "420": 4.3253, "425": 4.30971, "430": 4.26114, "435": 4.24136, "440": 4.1953, "445": 4.19113, "450": 4.17079, "455": 4.12056, "460": 4.07583, "465": 4.07812, "470": 4.01335, "475": 4.02141, "480": 4.0084, "485": 3.98832, "490": 3.96538, "495": 3.94001, "500": 3.88805, "505": 3.88745, "510": 3.86525, "515": 3.81983, "520": 3.83807, "525": 3.82093, "530": 3.76801, "535": 3.81191, "540": 3.75445, "545": 3.75269, "550": 3.71949, "555": 3.72259, "560": 3.66108, "565": 3.68278, "570": 3.67212, "575": 3.63409, "580": 3.63034, "585": 3.63397, "590": 3.60541, "595": 3.564, "600": 3.57502, "605": 3.55125, "610": 3.54806, "615": 3.53962, "620": 3.51985, "625": 3.50042, "630": 3.48705, "635": 3.47419, "640": 3.47953, "645": 3.48245, "650": 3.52422, "655": 3.45583, "660": 3.46182, "665": 3.45366, "670": 3.50199, "675": 3.4372, "680": 3.44962, "685": 3.41756, "690": 3.41679, "695": 3.38499, "700": 3.36772, "705": 3.34605, "710": 3.35171, "715": 3.36858, "720": 3.35674, "725": 3.32575, "730": 3.3248, "735": 3.30758, "740": 3.30011, "745": 3.28958, "750": 3.30876, "755": 3.29698, "760": 3.30709, "765": 3.30662, "770": 3.31083, "775": 3.33269, "780": 3.30028, "785": 3.30615, "790": 3.29453, "795": 3.26362, "800": 3.31097, "805": 3.25187, "810": 3.26899, "815": 3.27435, "820": 3.28248, "825": 3.25339, "830": 3.22624, "835": 3.2306, "840": 3.17331, "845": 3.16177, "850": 3.21927, "855": 3.19184, "860": 3.17366, "865": 3.16416, "870": 3.15613, "875": 3.19139, "880": 3.18374, "885": 3.2121, "890": 3.19246, "895": 3.19623, "900": 3.18808, "905": 3.16886, "910": 3.11639, "915": 3.11507, "920": 3.11197, "925": 3.10839, "930": 3.11466, "935": 3.12819, "940": 3.15475, "945": 3.12843, "950": 3.12777, "955": 3.1579, "960": 3.13167, "965": 3.10987, "970": 3.07846, "975": 3.09835, "980": 3.07991, "985": 3.06312, "990": 3.03495, "995": 3.08474, "1000": 3.0593, "1005": 3.05179, "1010": 3.04255, "1015": 3.02784, "1020": 3.03595, "1025": 3.04677, "1030": 3.06063, "1035": 3.04862, "1040": 3.03742, "1045": 3.01162, "1050": 3.01422, "1055": 3.00397, "1060": 3.06335, "1065": 3.07925, "1070": 3.97836, "1075": 3.41771, "1080": 3.23531, "1085": 3.22552, "1090": 3.24195, "1095": 3.22987, "1100": 3.19543, "1105": 3.15796, "1110": 3.18421, "1115": 3.11894, "1120": 3.26188, "1125": 3.23357, "1130": 3.09644, "1135": 3.07067, "1140": 3.05699, "1145": 3.04059, "1150": 3.02466, "1155": 3.01723, "1160": 2.97678, "1165": 2.98804, "1170": 3.00087, "1175": 2.98693, "1180": 2.97516, "1185": 2.96135, "1190": 2.97628, "1195": 2.93395, "1200": 3.05481, "1205": 3.05972, "1210": 3.01318, "1215": 3.00899, "1220": 2.97379, "1225": 2.95035, "1230": 2.91528, "1235": 2.96031, "1240": 2.91402, "1245": 2.92575, "1250": 2.93164, "1255": 2.95156, "1260": 2.91604, "1265": 2.94068, "1270": 2.95382, "1275": 2.89718, "1280": 2.92873, "1285": 2.92493, "1290": 2.93188, "1295": 2.9126, "1300": 2.91107, "1305": 2.93698, "1310": 2.9123, "1315": 2.9007, "1320": 2.89665, "1325": 2.91498, "1330": 2.85804, "1335": 2.91624, "1340": 2.87073, "1345": 2.90309, "1350": 2.85229, "1355": 2.90537, "1360": 2.92863, "1365": 2.88589, "1370": 2.89301, "1375": 2.85848, "1380": 2.87762, "1385": 2.88798, "1390": 2.86817, "1395": 2.88123, "1400": 2.89179, "1405": 2.90726, "1410": 2.9376, "1415": 2.9686, "1420": 2.94506, "1425": 2.9474, "1430": 2.93253, "1435": 2.88727, "1440": 2.84969, "1445": 2.88308, "1450": 2.83773, "1455": 2.85838, "1460": 2.87061, "1465": 2.83131, "1470": 2.86907, "1475": 2.81346, "1480": 2.81549, "1485": 2.82986, "1490": 2.81512, "1495": 2.85443, "1500": 2.86456, "1505": 2.87719, "1510": 2.83753, "1515": 2.85302, "1520": 2.81661, "1525": 2.78409, "1530": 2.80864, "1535": 2.85747, "1540": 2.84024, "1545": 2.85032, "1550": 2.81224, "1555": 2.84487, "1560": 2.83478, "1565": 2.80097, "1570": 2.80491, "1575": 2.80141, "1580": 2.79393, "1585": 2.80745, "1590": 2.77778, "1595": 2.81472, "1600": 2.80197, "1605": 2.7972, "1610": 2.76916, "1615": 2.79501, "1620": 2.77315, "1625": 2.75079, "1630": 2.78435, "1635": 2.80166, "1640": 2.79257, "1645": 2.7863, "1650": 2.79855, "1655": 2.84485, "1660": 2.82549, "1665": 2.84156, "1670": 2.93929, "1675": 2.87743, "1680": 2.88283, "1685": 2.9216, "1690": 2.88221, "1695": 2.86511, "1700": 2.85339, "1705": 2.7894, "1710": 2.78142, "1715": 2.78649, "1720": 2.77703, "1725": 2.74425, "1730": 2.78559, "1735": 2.76884, "1740": 2.74441, "1745": 2.74182, "1750": 2.76247, "1755": 2.73598, "1760": 2.78856, "1765": 2.74834, "1770": 2.77762, "1775": 2.73894, "1780": 2.76021, "1785": 2.75718, "1790": 2.77785, "1795": 2.74368, "1800": 2.72773, "1805": 2.70714, "1810": 2.7607, "1815": 2.74171, "1820": 2.7293, "1825": 2.72688, "1830": 2.72353, "1835": 2.73354, "1840": 2.74491, "1845": 2.74861, "1850": 2.74304, "1855": 2.75878, "1860": 2.74196, "1865": 2.70941, "1870": 2.72995, "1875": 2.7087, "1880": 2.71861, "1885": 2.73501, "1890": 2.70519, "1895": 2.69055, "1900": 2.73337, "1905": 2.68291, "1910": 2.70001, "1915": 2.70658, "1920": 2.7268, "1925": 2.71421, "1930": 2.74369, "1935": 2.77034, "1940": 2.73147, "1945": 2.72794, "1950": 2.72357, "1955": 2.69207, "1960": 2.67474, "1965": 2.71433, "1970": 2.69854, "1975": 2.70536, "1980": 2.72326, "1985": 2.7203, "1990": 2.71206, "1995": 2.72553, "2000": 2.69936, "2005": 2.71907, "2010": 2.6817, "2015": 2.71974, "2020": 2.69573, "2025": 2.70604, "2030": 2.72398, "2035": 2.68381, "2040": 2.69503, "2045": 2.67116, "2050": 2.6973, "2055": 2.67721, "2060": 2.67332, "2065": 2.68926, "2070": 2.66994, "2075": 2.66787, "2080": 2.67915, "2085": 2.67746, "2090": 2.66775, "2095": 2.67829, "2100": 2.66715, "2105": 2.7004, "2110": 2.69574, "2115": 2.75547, "2120": 2.67509, "2125": 2.68612, "2130": 2.66527, "2135": 2.68799, "2140": 2.66268, "2145": 2.66884, "2150": 2.67767, "2155": 2.64851, "2160": 2.66353, "2165": 2.65056, "2170": 2.67095, "2175": 2.6584, "2180": 2.66306, "2185": 2.63752, "2190": 2.62686, "2195": 2.66552, "2200": 2.64347, "2205": 2.6618, "2210": 2.64136, "2215": 2.64114, "2220": 2.71045, "2225": 2.64335, "2230": 2.64361, "2235": 2.64845, "2240": 2.64176, "2245": 2.63915, "2250": 2.63519, "2255": 2.6365, "2260": 2.64118, "2265": 2.64453, "2270": 2.65999, "2275": 2.64213, "2280": 2.61531, "2285": 2.63257, "2290": 2.65365, "2295": 2.62124, "2300": 2.6477, "2305": 2.67718, "2310": 2.70744, "2315": 2.67927, "2320": 2.67804, "2325": 2.64056, "2330": 2.61535, "2335": 2.62917, "2340": 2.66209, "2345": 2.62845, "2350": 2.63902, "2355": 2.61339, "2360": 2.63046, "2365": 2.62942, "2370": 2.59173, "2375": 2.58542, "2380": 2.62102, "2385": 2.59214, "2390": 2.61148, "2395": 2.64054, "2400": 2.59722, "2405": 2.61233, "2410": 2.6018, "2415": 2.6437, "2420": 2.63791, "2425": 2.64274, "2430": 2.59276, "2435": 2.6036, "2440": 2.61552, "2445": 2.59808, "2450": 2.58709, "2455": 2.60842, "2460": 2.62093, "2465": 2.59042, "2470": 2.56879, "2475": 2.58903, "2480": 2.59836, "2485": 2.59674, "2490": 2.60158, "2495": 2.60849, "2500": 2.58505, "2505": 2.59374, "2510": 2.5679, "2515": 2.59808, "2520": 2.59656, "2525": 2.57778, "2530": 2.5943, "2535": 2.59878, "2540": 2.58501, "2545": 2.59571, "2550": 2.59502, "2555": 2.59854, "2560": 2.60331, "2565": 2.58004, "2570": 2.59966, "2575": 2.59963, "2580": 2.8128, "2585": 2.65809, "2590": 2.65929, "2595": 2.75143, "2600": 2.70818, "2605": 2.66193, "2610": 2.61506, "2615": 2.62751, "2620": 2.58555, "2625": 2.59935, "2630": 2.57915, "2635": 2.55835, "2640": 2.56615, "2645": 2.57026, "2650": 2.55799, "2655": 2.60527, "2660": 2.58058, "2665": 2.58446, "2670": 2.56055, "2675": 2.58648, "2680": 2.53466, "2685": 2.56775, "2690": 2.55049, "2695": 2.56732, "2700": 2.66566, "2705": 2.59367, "2710": 2.62193, "2715": 2.58995, "2720": 2.54105, "2725": 2.54948, "2730": 2.56565, "2735": 2.58441, "2740": 2.5468, "2745": 2.5698, "2750": 2.54425, "2755": 2.59405, "2760": 2.57424, "2765": 2.54882, "2770": 2.56716, "2775": 2.55046, "2780": 2.55724, "2785": 2.54974, "2790": 2.5585, "2795": 2.60696, "2800": 2.59997, "2805": 2.63121, "2810": 2.58955, "2815": 2.57576, "2820": 2.56967, "2825": 2.55728, "2830": 2.55115, "2835": 2.55685, "2840": 2.53837, "2845": 2.53745, "2850": 2.54749, "2855": 2.52753, "2860": 2.58275, "2865": 2.57298, "2870": 2.54208, "2875": 2.549, "2880": 2.56742, "2885": 2.55905, "2890": 2.56162, "2895": 2.56109, "2900": 2.52562, "2905": 2.55868, "2910": 2.52025, "2915": 2.53528, "2920": 2.54759, "2925": 2.54255, "2930": 2.52798, "2935": 2.56117, "2940": 2.5639, "2945": 2.54586, "2950": 2.5258, "2955": 2.56601, "2960": 2.52372, "2965": 2.52682, "2970": 2.5412, "2975": 2.53928, "2980": 2.55879, "2985": 2.54966, "2990": 2.56713, "2995": 2.58732, "3000": 2.62043, "3005": 2.54616, "3010": 2.5562, "3015": 2.523, "3020": 2.54534, "3025": 2.55758, "3030": 2.55486, "3035": 2.52368, "3040": 2.55472, "3045": 2.55348, "3050": 3.50555, "3055": 3.63174, "3060": 2.92022, "3065": 2.75255, "3070": 2.69081, "3075": 2.59987, "3080": 2.56219, "3085": 2.53174, "3090": 2.5529, "3095": 2.54693, "3100": 2.52401, "3105": 2.50367, "3110": 2.5328, "3115": 2.48541, "3120": 2.50737, "3125": 2.52038, "3130": 2.5088, "3135": 2.48838, "3140": 2.50779, "3145": 2.51972, "3150": 2.55196, "3155": 2.52025, "3160": 2.52688, "3165": 2.52796, "3170": 4.17319, "3175": 2.77985, "3180": 2.56865, "3185": 2.57197, "3190": 2.5472, "3195": 2.53452, "3200": 2.51396, "3205": 2.51495, "3210": 2.52743, "3215": 2.55762, "3220": 2.52617, "3225": 2.46009, "3230": 2.51883, "3235": 2.5312, "3240": 2.50293, "3245": 2.50115, "3250": 2.49839, "3255": 2.5034, "3260": 2.50892, "3265": 2.50159, "3270": 2.52619, "3275": 2.50846, "3280": 2.47671, "3285": 2.48644, "3290": 2.5375, "3295": 2.51955, "3300": 2.4833, "3305": 2.54535, "3310": 2.54767, "3315": 2.56026, "3320": 2.53333, "3325": 2.53809, "3330": 2.52475, "3335": 2.4946, "3340": 2.50215, "3345": 2.49012, "3350": 2.4903, "3355": 2.49625, "3360": 2.50736, "3365": 2.50567, "3370": 2.48857, "3375": 2.49988, "3380": 2.4899, "3385": 2.46945, "3390": 2.48901, "3395": 2.49895, "3400": 2.49295, "3405": 2.47971, "3410": 2.46929, "3415": 2.47556, "3420": 2.49036, "3425": 2.4996, "3430": 2.48681, "3435": 2.5131, "3440": 2.4909, "3445": 2.49033, "3450": 2.46533, "3455": 2.48465, "3460": 2.47808, "3465": 2.50284, "3470": 2.47673, "3475": 2.49841, "3480": 2.47043, "3485": 2.49074, "3490": 2.50219, "3495": 2.49873, "3500": 2.45696, "3505": 2.49171, "3510": 2.49058, "3515": 2.49019, "3520": 2.46089, "3525": 2.49939, "3530": 2.49725, "3535": 2.47231, "3540": 2.50289, "3545": 2.491, "3550": 2.47633, "3555": 2.46912, "3560": 2.47893, "3565": 2.46885, "3570": 2.46709, "3575": 2.46794, "3580": 2.43922, "3585": 2.48109, "3590": 2.47552, "3595": 2.48263, "3600": 2.45656, "3605": 2.49847, "3610": 2.49435, "3615": 2.47212, "3620": 2.46245, "3625": 2.46594, "3630": 2.52034, "3635": 2.48927, "3640": 2.46579, "3645": 2.48879, "3650": 2.45377, "3655": 2.47134, "3660": 2.47024, "3665": 2.46409, "3670": 2.47807, "3675": 2.47961, "3680": 2.48651, "3685": 2.97677, "3690": 2.71522, "3695": 2.54332, "3700": 2.5244, "3705": 2.49832, "3710": 2.47836, "3715": 2.45941, "3720": 2.45624, "3725": 2.46092, "3730": 2.45602, "3735": 2.46255, "3740": 2.45272, "3745": 2.45936, "3750": 2.4459, "3755": 2.42484, "3760": 2.45679, "3765": 2.44605, "3770": 2.46919, "3775": 2.46531, "3780": 2.4194, "3785": 2.48545, "3790": 2.4578, "3795": 2.44743, "3800": 2.45089, "3805": 2.45547, "3810": 2.44483, "3815": 2.46114, "3820": 2.4749, "3825": 2.4645, "3830": 2.46158, "3835": 2.46674, "3840": 2.4581, "3845": 2.4435, "3850": 2.45596, "3855": 2.49623, "3860": 2.46442, "3865": 2.47126, "3870": 2.45498, "3875": 2.44775, "3880": 2.44513, "3885": 2.47022, "3890": 2.43861, "3895": 2.43864, "3900": 2.43908, "3905": 2.44399, "3910": 2.41899, "3915": 2.45898, "3920": 2.44765, "3925": 2.38065, "3930": 2.43301, "3935": 2.41682, "3940": 2.44297, "3945": 2.45459, "3950": 2.45838, "3955": 2.42785, "3960": 2.43634, "3965": 2.46543, "3970": 2.44646, "3975": 2.42453, "3980": 2.41897, "3985": 2.44462, "3990": 2.44677, "3995": 2.42722, "4000": 2.45637, "4005": 2.40108, "4010": 2.42734, "4015": 2.44864, "4020": 2.4148, "4025": 2.4428, "4030": 2.42374, "4035": 2.42748, "4040": 2.42454, "4045": 2.43675, "4050": 2.39771, "4055": 2.41691, "4060": 2.42674, "4065": 2.41677, "4070": 2.40544, "4075": 2.41117, "4080": 2.43502, "4085": 2.42062, "4090": 2.43591, "4095": 2.45371, "4100": 2.42327, "4105": 2.41664, "4110": 2.4086, "4115": 2.44727, "4120": 2.4208, "4125": 2.43135, "4130": 2.41342, "4135": 2.42134, "4140": 2.38586, "4145": 2.41833, "4150": 2.39067, "4155": 2.39839, "4160": 2.40338, "4165": 2.37409, "4170": 2.39872, "4175": 2.40511, "4180": 2.40637, "4185": 2.40249, "4190": 2.4125, "4195": 2.38705, "4200": 2.40897, "4205": 2.42774, "4210": 2.40223, "4215": 2.40561, "4220": 2.42666, "4225": 2.41957, "4230": 2.4042, "4235": 2.42502, "4240": 2.38898, "4245": 2.41357, "4250": 2.40634, "4255": 2.41681, "4260": 2.39775, "4265": 2.40796, "4270": 2.4032, "4275": 2.37535, "4280": 2.41899, "4285": 2.38559, "4290": 2.3912, "4295": 2.39589, "4300": 2.38517, "4305": 2.40207, "4310": 2.38928, "4315": 2.4074, "4320": 2.38044, "4325": 2.3739, "4330": 2.44088, "4335": 2.43452, "4340": 2.42374, "4345": 2.42461, "4350": 2.40463, "4355": 2.41599, "4360": 2.38614, "4365": 2.39198, "4370": 2.38546, "4375": 2.39558, "4380": 2.37887, "4385": 2.40355, "4390": 2.37008, "4395": 2.36908, "4400": 2.38129, "4405": 2.38291, "4410": 2.3617, "4415": 2.38131, "4420": 2.34726, "4425": 2.40769, "4430": 2.47172, "4435": 2.39215, "4440": 2.39478, "4445": 2.37947, "4450": 2.38038, "4455": 2.37322, "4460": 2.37966, "4465": 2.38359, "4470": 2.37862, "4475": 2.3733, "4480": 2.35494, "4485": 2.38871, "4490": 2.37306, "4495": 2.36491, "4500": 2.35944, "4505": 2.3974, "4510": 2.37231, "4515": 2.38846, "4520": 2.39679, "4525": 2.39883, "4530": 2.40719, "4535": 2.38082, "4540": 2.37977, "4545": 2.35828, "4550": 2.36703, "4555": 2.35675, "4560": 2.3746, "4565": 2.36973, "4570": 2.38381, "4575": 2.37212, "4580": 2.38227, "4585": 2.36506, "4590": 2.37879, "4595": 2.38272, "4600": 2.38627, "4605": 2.38176, "4610": 2.34656, "4615": 2.3249, "4620": 2.36355, "4625": 2.3385, "4630": 2.36851, "4635": 2.35391, "4640": 2.37452, "4645": 2.36621, "4650": 2.33672, "4655": 2.36083, "4660": 2.33753, "4665": 2.35215, "4670": 2.36016, "4675": 2.37484, "4680": 2.32399, "4685": 2.36954, "4690": 2.35136, "4695": 2.35497, "4700": 2.34698, "4705": 2.36598, "4710": 2.36267, "4715": 2.34093, "4720": 2.36069, "4725": 2.37138, "4730": 2.36609, "4735": 2.36398, "4740": 2.35311, "4745": 2.36877, "4750": 2.36581, "4755": 2.3668, "4760": 2.3457, "4765": 2.34705, "4770": 2.33717, "4775": 2.36028, "4780": 2.35904, "4785": 2.32872, "4790": 2.35047, "4795": 2.33366, "4800": 2.34168, "4805": 2.35083, "4810": 2.34344, "4815": 2.33294, "4820": 2.33443, "4825": 2.36506, "4830": 2.35435, "4835": 2.33586, "4840": 2.32864, "4845": 2.34796, "4850": 2.35206, "4855": 2.35579, "4860": 2.35329, "4865": 2.3478, "4870": 2.32396, "4875": 2.32439, "4880": 2.34381, "4885": 2.42031, "4890": 2.35929, "4895": 2.30579, "4900": 2.35225, "4905": 2.31904, "4910": 2.33352, "4915": 2.33983, "4920": 2.3475, "4925": 2.34209, "4930": 2.31686, "4935": 2.32966, "4940": 2.34683, "4945": 2.31263, "4950": 2.34841, "4955": 2.32994, "4960": 2.34121, "4965": 2.34746, "4970": 2.31652, "4975": 2.3251, "4980": 2.32397, "4985": 2.3481, "4990": 2.35425, "4995": 2.29722, "5000": 2.34557, "5005": 2.35165, "5010": 2.32104, "5015": 2.31022, "5020": 2.3499, "5025": 2.31919, "5030": 2.33996, "5035": 2.33426, "5040": 2.33412, "5045": 2.316, "5050": 2.33168, "5055": 2.33506, "5060": 2.32145, "5065": 2.34224, "5070": 2.34295, "5075": 2.32631, "5080": 2.32533, "5085": 2.35734, "5090": 2.33548, "5095": 2.31916, "5100": 2.30894, "5105": 2.32306, "5110": 2.32715, "5115": 2.3112, "5120": 2.31117, "5125": 2.29508, "5130": 2.3196, "5135": 2.32575, "5140": 2.32593, "5145": 2.33193, "5150": 2.3282, "5155": 2.32162, "5160": 2.33615, "5165": 2.31539, "5170": 2.32494, "5175": 2.33214, "5180": 2.31889, "5185": 2.29787, "5190": 2.28798, "5195": 2.33508, "5200": 2.31548, "5205": 2.29669, "5210": 2.33514, "5215": 2.29566, "5220": 2.31532, "5225": 2.28358, "5230": 2.31126, "5235": 2.30102, "5240": 2.32174, "5245": 2.30725, "5250": 2.27634, "5255": 2.31798, "5260": 2.29064, "5265": 2.3063, "5270": 2.3082, "5275": 2.33477, "5280": 2.3377, "5285": 2.3285, "5290": 2.32063, "5295": 2.30702, "5300": 2.33602, "5305": 2.2854, "5310": 2.31931, "5315": 2.2981, "5320": 2.28135, "5325": 2.32354, "5330": 2.30013, "5335": 2.31285, "5340": 2.32265, "5345": 2.31206, "5350": 2.28534, "5355": 2.31015, "5360": 2.33319, "5365": 2.31661, "5370": 2.31605, "5375": 2.33683, "5380": 2.30956, "5385": 2.30764, "5390": 2.28631, "5395": 2.26695, "5400": 2.30586, "5405": 2.28987, "5410": 2.30127, "5415": 2.32316, "5420": 2.32174, "5425": 2.31181, "5430": 2.30214, "5435": 2.32363, "5440": 2.28713, "5445": 2.27582, "5450": 2.3011, "5455": 2.32147, "5460": 2.28161, "5465": 2.3034, "5470": 2.30601, "5475": 2.30569, "5480": 2.30279, "5485": 2.2732, "5490": 2.29245, "5495": 2.29726, "5500": 2.28031, "5505": 2.29958, "5510": 2.28175, "5515": 2.26521, "5520": 2.27019, "5525": 2.28209, "5530": 2.29196, "5535": 2.26995, "5540": 2.29036, "5545": 2.31318, "5550": 2.31029, "5555": 2.31645, "5560": 2.28007, "5565": 2.31464, "5570": 2.26997, "5575": 2.29357, "5580": 2.29204, "5585": 2.29394, "5590": 2.28442, "5595": 2.29326, "5600": 2.29342, "5605": 2.30195, "5610": 2.27769, "5615": 2.26726, "5620": 2.28515, "5625": 2.259, "5630": 2.27974, "5635": 2.27401, "5640": 2.28979, "5645": 2.30633, "5650": 2.28677, "5655": 2.26642, "5660": 2.27542, "5665": 2.277, "5670": 2.29122, "5675": 2.277, "5680": 2.34352, "5685": 2.30528, "5690": 2.28931, "5695": 2.30822, "5700": 2.2759, "5705": 2.26127, "5710": 2.30463, "5715": 2.26021, "5720": 2.24169, "5725": 2.28821, "5730": 2.2916, "5735": 2.25721, "5740": 2.27331, "5745": 2.28985, "5750": 2.30889, "5755": 2.29053, "5760": 2.2718, "5765": 2.29049, "5770": 2.25196, "5775": 2.32054, "5780": 2.3127, "5785": 2.27868, "5790": 2.28621, "5795": 2.24597, "5800": 2.25664, "5805": 2.2765, "5810": 2.26262, "5815": 2.27269, "5820": 2.25601, "5825": 2.26982, "5830": 2.28935, "5835": 2.29063, "5840": 2.2942, "5845": 2.26339, "5850": 2.31101, "5855": 2.29399, "5860": 2.2663, "5865": 2.28089, "5870": 2.26591, "5875": 2.2786, "5880": 2.29544, "5885": 2.302, "5890": 2.25458, "5895": 2.26583, "5900": 2.29313, "5905": 2.26241, "5910": 2.27254, "5915": 2.27949, "5920": 2.25498, "5925": 2.27743, "5930": 2.27932, "5935": 2.26255, "5940": 2.25945, "5945": 2.27853, "5950": 2.27145, "5955": 2.27436, "5960": 2.28295, "5965": 2.31275, "5970": 2.27714, "5975": 2.28937, "5980": 2.27496, "5985": 2.27325, "5990": 2.25291, "5995": 2.27148, "6000": 2.24006, "6005": 2.26312, "6010": 2.27238, "6015": 2.29088, "6020": 2.24915, "6025": 2.27275, "6030": 2.25582, "6035": 2.26816, "6040": 2.29457, "6045": 2.2592, "6050": 2.27474, "6055": 2.26474, "6060": 2.28767, "6065": 2.28174, "6070": 2.25084, "6075": 2.22948, "6080": 2.26225, "6085": 2.26628, "6090": 2.30311, "6095": 2.21967, "6100": 2.25432, "6105": 2.23638, "6110": 2.23793, "6115": 2.24777, "6120": 2.28274, "6125": 2.27364, "6130": 2.24409, "6135": 2.28028, "6140": 2.24617, "6145": 2.25305, "6150": 2.24068, "6155": 2.24914, "6160": 2.26293, "6165": 2.27514, "6170": 2.21195, "6175": 2.25672, "6180": 2.23065, "6185": 2.25453, "6190": 2.25575, "6195": 2.26989, "6200": 2.24407, "6205": 2.23989, "6210": 2.27985, "6215": 2.27757, "6220": 2.26277, "6225": 2.25185, "6230": 2.21647, "6235": 2.25821, "6240": 2.26671, "6245": 2.27496, "6250": 2.25508, "6255": 2.23748, "6260": 2.24792, "6265": 2.24425, "6270": 2.24828, "6275": 2.26042, "6280": 2.2506, "6285": 2.26472, "6290": 2.24804, "6295": 2.25437, "6300": 2.26313, "6305": 2.22383, "6310": 2.26468, "6315": 2.25201, "6320": 2.22707, "6325": 2.2597, "6330": 2.24138, "6335": 2.25423, "6340": 2.2621, "6345": 2.24576, "6350": 2.25048, "6355": 2.24546, "6360": 2.26679, "6365": 2.2574, "6370": 2.25016, "6375": 2.26902, "6380": 2.23078, "6385": 2.23128, "6390": 2.23901, "6395": 2.23162, "6400": 2.21177, "6405": 2.24905, "6410": 2.24624, "6415": 2.24036, "6420": 2.23302, "6425": 2.24519, "6430": 2.24625, "6435": 2.30239, "6440": 2.24714, "6445": 2.25193, "6450": 2.26974, "6455": 2.2357, "6460": 2.26385, "6465": 2.26139, "6470": 2.25835, "6475": 2.2364, "6480": 2.22322, "6485": 2.25002, "6490": 2.24943, "6495": 2.23566, "6500": 2.23905, "6505": 2.23952, "6510": 2.21951, "6515": 2.24697, "6520": 2.23577, "6525": 2.23046, "6530": 2.24607, "6535": 2.25833, "6540": 2.2677, "6545": 2.23739, "6550": 2.22333, "6555": 2.23828, "6560": 2.26917, "6565": 2.2308, "6570": 2.22023, "6575": 2.26161, "6580": 2.24056, "6585": 2.22889, "6590": 2.23077, "6595": 2.2399, "6600": 2.2547, "6605": 2.23963, "6610": 2.22847, "6615": 2.22303, "6620": 2.25143, "6625": 2.24214, "6630": 2.22738, "6635": 2.2492, "6640": 2.25634, "6645": 2.23278, "6650": 2.23352, "6655": 2.22727, "6660": 2.23876, "6665": 2.22395, "6670": 2.23621, "6675": 2.22148, "6680": 2.23977, "6685": 2.23883, "6690": 2.23685, "6695": 2.24441, "6700": 2.23751, "6705": 2.2107, "6710": 2.2459, "6715": 2.24785, "6720": 2.24492, "6725": 2.22868, "6730": 2.22927, "6735": 2.20284, "6740": 2.2295, "6745": 2.23444, "6750": 2.23173, "6755": 2.20784, "6760": 2.22443, "6765": 2.25378, "6770": 2.23748, "6775": 2.22177, "6780": 2.2047, "6785": 2.21618, "6790": 2.23123, "6795": 2.24187, "6800": 2.24805, "6805": 2.23277, "6810": 2.25623, "6815": 2.21824, "6820": 2.21982, "6825": 2.22696, "6830": 2.19515, "6835": 2.25431, "6840": 2.22253, "6845": 2.22053, "6850": 2.24161, "6855": 2.21587, "6860": 2.22632, "6865": 2.24762, "6870": 2.22113, "6875": 2.24292, "6880": 2.21537, "6885": 2.23194, "6890": 2.24111, "6895": 2.21203, "6900": 2.21692, "6905": 2.20881, "6910": 2.21976, "6915": 2.19951, "6920": 2.25468, "6925": 2.20831, "6930": 2.20419, "6935": 2.23648, "6940": 2.20517, "6945": 2.22458, "6950": 2.23751, "6955": 2.19601, "6960": 2.22394, "6965": 2.21334, "6970": 2.22503, "6975": 2.19357, "6980": 2.19617, "6985": 2.2109, "6990": 2.21355, "6995": 2.23827, "7000": 2.22569, "7005": 2.2143, "7010": 2.19897, "7015": 2.19982, "7020": 2.2469, "7025": 2.20684, "7030": 2.21741, "7035": 2.20364, "7040": 2.21216, "7045": 2.21416, "7050": 2.21838, "7055": 2.21879, "7060": 2.21076, "7065": 2.19334, "7070": 2.20261, "7075": 2.19426, "7080": 2.20914, "7085": 2.22493, "7090": 2.22029, "7095": 2.21708, "7100": 2.23053, "7105": 2.22254, "7110": 2.22852, "7115": 2.2025, "7120": 2.2155, "7125": 2.19965, "7130": 2.22, "7135": 2.17151, "7140": 2.19466, "7145": 2.21291, "7150": 2.23672, "7155": 2.20658, "7160": 2.1878, "7165": 2.21051, "7170": 2.19248, "7175": 2.19171, "7180": 2.23969, "7185": 2.18496, "7190": 2.22672, "7195": 2.21179, "7200": 2.21392, "7205": 2.20582, "7210": 2.20557, "7215": 2.18895, "7220": 2.21331, "7225": 2.18822, "7230": 2.21586, "7235": 2.17662, "7240": 2.23091, "7245": 2.22355, "7250": 2.23878, "7255": 2.19607, "7260": 2.177, "7265": 2.21798, "7270": 2.18291, "7275": 2.2016, "7280": 2.19151, "7285": 2.19461, "7290": 2.19927, "7295": 2.192, "7300": 2.20628, "7305": 2.20727, "7310": 2.19969, "7315": 2.19174, "7320": 2.21633, "7325": 2.20295, "7330": 2.2024, "7335": 2.21812, "7340": 2.1913, "7345": 2.2035, "7350": 2.20088, "7355": 2.18777, "7360": 2.1903, "7365": 2.19297, "7370": 2.19442, "7375": 2.18027, "7380": 2.21151, "7385": 2.19373, "7390": 2.22416, "7395": 2.20347, "7400": 2.20371, "7405": 2.19769, "7410": 2.15579, "7415": 2.19553, "7420": 2.20369, "7425": 2.19549, "7430": 2.20509, "7435": 2.22097, "7440": 2.22219, "7445": 2.18566, "7450": 2.18607, "7455": 2.18995, "7460": 2.16956, "7465": 2.18893, "7470": 2.17104, "7475": 2.17284, "7480": 2.19555, "7485": 2.19016, "7490": 2.19582, "7495": 2.20777, "7500": 2.20788, "7505": 2.20863, "7510": 2.16062, "7515": 2.19309, "7520": 2.19798, "7525": 2.20339, "7530": 2.20007, "7535": 2.19668, "7540": 2.1837, "7545": 2.18613, "7550": 2.21083, "7555": 2.19346, "7560": 2.20763, "7565": 2.19136, "7570": 2.17953, "7575": 2.20109, "7580": 2.17641, "7585": 2.22924, "7590": 2.18952, "7595": 2.17995, "7600": 2.18249, "7605": 2.17038, "7610": 2.21817, "7615": 2.19902, "7620": 2.20634, "7625": 2.20253, "7630": 2.17647, "7635": 2.2037, "7640": 2.19408, "7645": 2.19889, "7650": 2.19712, "7655": 2.21484, "7660": 2.20025, "7665": 2.17647, "7670": 2.18925, "7675": 2.19431, "7680": 2.19476, "7685": 2.17843, "7690": 2.19832, "7695": 2.18076, "7700": 2.16052, "7705": 2.18514, "7710": 2.17446, "7715": 2.18592, "7720": 2.162, "7725": 2.17339, "7730": 2.21261, "7735": 2.18007, "7740": 2.20149, "7745": 2.19204, "7750": 2.18388, "7755": 2.19364, "7760": 2.17472, "7765": 2.21078, "7770": 2.21159, "7775": 2.19837, "7780": 2.1976, "7785": 2.19128, "7790": 2.16115, "7795": 2.20206, "7800": 2.21395, "7805": 2.19857, "7810": 2.1874, "7815": 2.19941, "7820": 2.18819, "7825": 2.20606, "7830": 2.25431, "7835": 2.20483, "7840": 2.19587, "7845": 2.20225, "7850": 2.21218, "7855": 2.16565, "7860": 2.18547, "7865": 2.17855, "7870": 2.18522, "7875": 2.17068, "7880": 2.19723, "7885": 2.17197, "7890": 2.17773, "7895": 2.15895, "7900": 2.18857, "7905": 2.17535, "7910": 2.16259, "7915": 2.18029, "7920": 2.19814, "7925": 2.17761, "7930": 2.17205, "7935": 2.18388, "7940": 2.15309, "7945": 2.17431, "7950": 2.18811, "7955": 2.18356, "7960": 2.17475, "7965": 2.17161, "7970": 2.18347, "7975": 2.21198, "7980": 2.20896, "7985": 2.15862, "7990": 2.18004, "7995": 2.1801, "8000": 2.18696, "8005": 2.18383, "8010": 2.16048, "8015": 2.16822, "8020": 2.14864, "8025": 2.17182, "8030": 2.19045, "8035": 2.19952, "8040": 2.18455, "8045": 2.19307, "8050": 2.17156, "8055": 2.18972, "8060": 2.1849, "8065": 2.17911, "8070": 2.18879, "8075": 2.17947, "8080": 2.17026, "8085": 2.18489, "8090": 2.21727, "8095": 2.17857, "8100": 2.1793, "8105": 2.19244, "8110": 2.1701, "8115": 2.18979, "8120": 2.16804, "8125": 2.14288, "8130": 2.17387, "8135": 2.18783, "8140": 2.19221, "8145": 2.19488, "8150": 2.15757, "8155": 2.1801, "8160": 2.17742, "8165": 2.17085, "8170": 2.15664, "8175": 2.187, "8180": 2.16736, "8185": 2.19824, "8190": 2.18124, "8195": 2.17535, "8200": 2.17592, "8205": 2.18282, "8210": 2.16029, "8215": 2.17645, "8220": 2.15781, "8225": 2.14826, "8230": 2.16814, "8235": 2.16111, "8240": 2.14986, "8245": 2.15582, "8250": 2.18246, "8255": 2.19923, "8260": 2.1535, "8265": 2.18556, "8270": 2.17658, "8275": 2.17337, "8280": 2.19041, "8285": 2.16414, "8290": 2.17413, "8295": 2.18989, "8300": 2.19827, "8305": 2.15954, "8310": 2.15521, "8315": 2.18705, "8320": 2.16816, "8325": 2.17724, "8330": 2.1678, "8335": 2.16106, "8340": 2.1497, "8345": 2.13501, "8350": 2.14604, "8355": 2.18231, "8360": 2.1851, "8365": 2.15552, "8370": 2.1627, "8375": 2.1642, "8380": 2.17533, "8385": 2.1756, "8390": 2.16975, "8395": 2.15939, "8400": 2.18245, "8405": 2.16975, "8410": 2.18273, "8415": 2.19141, "8420": 2.16367, "8425": 2.15371, "8430": 2.15651, "8435": 2.13878, "8440": 2.17765, "8445": 2.17775, "8450": 2.17125, "8455": 2.1867, "8460": 2.17861, "8465": 2.17076, "8470": 2.16965, "8475": 2.15524, "8480": 2.14761, "8485": 2.18187, "8490": 2.16147, "8495": 2.16993, "8500": 2.14202, "8505": 2.14917, "8510": 2.16912, "8515": 2.14391, "8520": 2.15026, "8525": 2.15619, "8530": 2.15383, "8535": 2.1856, "8540": 2.16918, "8545": 2.14268, "8550": 2.15693, "8555": 2.1616, "8560": 2.15238, "8565": 2.17727, "8570": 2.16231, "8575": 2.16066, "8580": 2.15862, "8585": 2.17477, "8590": 2.16566, "8595": 2.14566, "8600": 2.15281, "8605": 2.1442, "8610": 2.16836, "8615": 2.17678, "8620": 2.16957, "8625": 2.14774, "8630": 2.15757, "8635": 2.14751, "8640": 2.15949, "8645": 2.15494, "8650": 2.14224, "8655": 2.15446, "8660": 2.14976, "8665": 2.13381, "8670": 2.1666, "8675": 2.15287, "8680": 2.14993, "8685": 2.15227, "8690": 2.19837, "8695": 2.15594, "8700": 2.1388, "8705": 2.16426, "8710": 2.14299, "8715": 2.15858, "8720": 2.15187, "8725": 2.15758, "8730": 2.15257, "8735": 2.15665, "8740": 2.17339, "8745": 2.14992, "8750": 2.15105, "8755": 2.13776, "8760": 2.15344, "8765": 2.17845, "8770": 2.16098, "8775": 2.14079, "8780": 2.16714, "8785": 2.16786, "8790": 2.14852, "8795": 2.13363, "8800": 2.1499, "8805": 2.15257, "8810": 2.15138, "8815": 2.13466, "8820": 2.16551, "8825": 2.16031, "8830": 2.14843, "8835": 2.1434, "8840": 2.1637, "8845": 2.16595, "8850": 2.12628, "8855": 2.14313, "8860": 2.17906, "8865": 2.16948, "8870": 2.1751, "8875": 2.17429, "8880": 2.17431, "8885": 2.16191, "8890": 2.13449, "8895": 2.13804, "8900": 2.13317, "8905": 2.16623, "8910": 2.15998, "8915": 2.13196, "8920": 2.15299, "8925": 2.13984, "8930": 2.16092, "8935": 2.22746, "8940": 2.1515, "8945": 2.14642, "8950": 2.13961, "8955": 2.1576, "8960": 2.15682, "8965": 2.13193, "8970": 2.15241, "8975": 2.14759, "8980": 2.12972, "8985": 2.14068, "8990": 2.17288, "8995": 2.14912, "9000": 2.16707, "9005": 2.17518, "9010": 2.15142, "9015": 2.14661, "9020": 2.13749, "9025": 2.13606, "9030": 2.13094, "9035": 2.15293, "9040": 2.14316, "9045": 2.15361, "9050": 2.1634, "9055": 2.1519, "9060": 2.13013, "9065": 2.12628, "9070": 2.14149, "9075": 2.17198, "9080": 2.14308, "9085": 2.16415, "9090": 2.15837, "9095": 2.16139, "9100": 2.15535, "9105": 2.16868, "9110": 2.14177, "9115": 2.15607, "9120": 2.12401, "9125": 2.17331, "9130": 2.14671, "9135": 2.14482, "9140": 2.14334, "9145": 2.14219, "9150": 2.15521, "9155": 2.15286, "9160": 2.15222, "9165": 2.12957, "9170": 2.13188, "9175": 2.13526, "9180": 2.14214, "9185": 2.15589, "9190": 2.1416, "9195": 2.14719, "9200": 2.15887, "9205": 2.14112, "9210": 2.17124, "9215": 2.14424, "9220": 2.14871, "9225": 2.15885, "9230": 2.15951, "9235": 2.1112, "9240": 2.1519, "9245": 2.13921, "9250": 2.12689, "9255": 2.15543, "9260": 2.16544, "9265": 2.14271, "9270": 2.13076, "9275": 2.15295, "9280": 2.148, "9285": 2.14597, "9290": 2.17268, "9295": 2.13936, "9300": 2.13623, "9305": 2.1372, "9310": 2.14075, "9315": 2.15344, "9320": 2.13832, "9325": 2.12847, "9330": 2.13957, "9335": 2.11342, "9340": 2.15355, "9345": 2.10835, "9350": 2.14723, "9355": 2.12909, "9360": 2.12946, "9365": 2.15889, "9370": 2.13435, "9375": 2.10592, "9380": 2.1519, "9385": 2.12589, "9390": 2.11101, "9395": 2.13652, "9400": 2.13968, "9405": 2.1363, "9410": 2.13276, "9415": 2.13081, "9420": 2.12948, "9425": 2.12455, "9430": 2.15956, "9435": 2.13011, "9440": 2.13051, "9445": 2.13553, "9450": 2.12758, "9455": 2.15297, "9460": 2.12548, "9465": 2.12087, "9470": 2.14886, "9475": 2.1471, "9480": 2.13986, "9485": 2.15324, "9490": 2.1256, "9495": 2.13051, "9500": 2.15875, "9505": 2.14762, "9510": 2.11267, "9515": 2.11812, "9520": 2.1547, "9525": 2.08798, "9530": 2.13606, "9535": 2.17818, "9540": 2.11398, "9545": 2.13191, "9550": 2.14273, "9555": 2.11647, "9560": 2.13952, "9565": 2.14123, "9570": 2.13373, "9575": 2.14763, "9580": 2.13298, "9585": 2.12097, "9590": 2.16413, "9595": 2.14983, "9600": 2.14944, "9605": 2.15208, "9610": 2.14407, "9615": 2.14316, "9620": 2.14523, "9625": 2.13646, "9630": 2.13782, "9635": 2.14473, "9640": 2.12525, "9645": 2.12524, "9650": 2.11894, "9655": 2.11901, "9660": 2.13655, "9665": 2.13292, "9670": 2.13924, "9675": 2.128, "9680": 2.12642, "9685": 2.13617, "9690": 2.155, "9695": 2.15886, "9700": 2.14175, "9705": 2.12228, "9710": 2.12442, "9715": 2.09908, "9720": 2.11286, "9725": 2.12616, "9730": 2.09372, "9735": 2.14741, "9740": 2.16653, "9745": 2.14296, "9750": 2.15341, "9755": 2.1353, "9760": 2.12518, "9765": 2.12593, "9770": 2.11452, "9775": 2.14048, "9780": 2.14927, "9785": 2.11523, "9790": 2.12685, "9795": 2.10613, "9800": 2.13203, "9805": 2.12886, "9810": 2.10955, "9815": 2.12527, "9820": 2.11895, "9825": 2.10034, "9830": 2.13716, "9835": 2.1518, "9840": 2.13678, "9845": 2.14904, "9850": 2.16423, "9855": 2.1127, "9860": 2.10816, "9865": 2.13124, "9870": 2.12571, "9875": 2.13843, "9880": 2.14231, "9885": 2.11957, "9890": 2.12755, "9895": 2.1366, "9900": 2.11703, "9905": 2.11196, "9910": 2.11328, "9915": 2.14258, "9920": 2.13998, "9925": 2.11067, "9930": 2.12089, "9935": 2.13335, "9940": 2.11358, "9945": 2.10148, "9950": 2.10429, "9955": 2.12761, "9960": 2.10246, "9965": 2.11756, "9970": 2.12102, "9975": 2.12487, "9980": 2.13853, "9985": 2.11919, "9990": 2.0911, "9995": 2.1131, "10000": 2.13716, "10005": 2.12055, "10010": 2.10308, "10015": 2.13015, "10020": 2.10515, "10025": 2.12951, "10030": 2.12167, "10035": 2.10912, "10040": 2.11975, "10045": 2.09254, "10050": 2.10063, "10055": 2.14906, "10060": 2.13371, "10065": 2.11663, "10070": 2.14459, "10075": 2.13078, "10080": 2.12822, "10085": 2.11886, "10090": 2.1142, "10095": 2.12216, "10100": 2.09973, "10105": 2.11093, "10110": 2.13832, "10115": 2.12311, "10120": 2.13016, "10125": 2.13369, "10130": 2.1085, "10135": 2.12829, "10140": 2.12856, "10145": 2.128, "10150": 2.11364, "10155": 2.10938, "10160": 2.13169, "10165": 2.13518, "10170": 2.12695, "10175": 2.1125, "10180": 2.11614, "10185": 2.10421, "10190": 2.12809, "10195": 2.11469, "10200": 2.115, "10205": 2.09821, "10210": 2.10945, "10215": 2.11652, "10220": 2.11198, "10225": 2.13204, "10230": 2.1185, "10235": 2.12376, "10240": 2.11837, "10245": 2.15328, "10250": 2.11916, "10255": 2.11399, "10260": 2.14121, "10265": 2.12369, "10270": 2.1183, "10275": 2.11281, "10280": 2.11087, "10285": 2.12294, "10290": 2.12587, "10295": 2.10484, "10300": 2.12322, "10305": 2.11329, "10310": 2.14687, "10315": 2.12124, "10320": 2.10609, "10325": 2.10879, "10330": 2.13891, "10335": 2.10542, "10340": 2.10367, "10345": 2.10703, "10350": 2.12341, "10355": 2.13353, "10360": 2.11298, "10365": 2.11669, "10370": 2.12021, "10375": 2.12944, "10380": 2.12758, "10385": 2.12781, "10390": 2.12571, "10395": 2.11777, "10400": 2.11636, "10405": 2.11863, "10410": 2.1012, "10415": 2.10629, "10420": 2.11308, "10425": 2.10916, "10430": 2.13619, "10435": 2.13936, "10440": 2.1281, "10445": 2.12391, "10450": 2.12106, "10455": 2.10169, "10460": 2.12475, "10465": 2.09752, "10470": 2.1289, "10475": 2.11385, "10480": 2.11824, "10485": 2.10399, "10490": 2.11413, "10495": 2.12797, "10500": 2.11564, "10505": 2.1199, "10510": 2.08989, "10515": 2.13973, "10520": 2.09137, "10525": 2.11084, "10530": 2.13646, "10535": 2.11142, "10540": 2.10309, "10545": 2.09668, "10550": 2.07222, "10555": 2.11273, "10560": 2.13032, "10565": 2.12537, "10570": 2.10087, "10575": 2.12361, "10580": 2.10438, "10585": 2.12741, "10590": 2.10733, "10595": 2.13295, "10600": 2.13134, "10605": 2.12483, "10610": 2.11037, "10615": 2.0986, "10620": 2.11605, "10625": 2.11228, "10630": 2.11666, "10635": 2.12549, "10640": 2.09279, "10645": 2.118, "10650": 2.09519, "10655": 2.11728, "10660": 2.10962, "10665": 2.09459, "10670": 2.08811, "10675": 2.1565, "10680": 2.11267, "10685": 2.12059, "10690": 2.10418, "10695": 2.10876, "10700": 2.12888, "10705": 2.10347, "10710": 2.1224, "10715": 2.10331, "10720": 2.08782, "10725": 2.10057, "10730": 2.09167, "10735": 2.07963, "10740": 2.11934, "10745": 2.11057, "10750": 2.09177, "10755": 2.10233, "10760": 2.0928, "10765": 2.10776, "10770": 2.09769, "10775": 2.11675, "10780": 2.09993, "10785": 2.11493, "10790": 2.08612, "10795": 2.12922, "10800": 2.10411, "10805": 2.10915, "10810": 2.08765, "10815": 2.12015, "10820": 2.08509, "10825": 2.12525, "10830": 2.12467, "10835": 2.09599, "10840": 2.09297, "10845": 2.10701, "10850": 2.09909, "10855": 2.08185, "10860": 2.10656, "10865": 2.10672, "10870": 2.11422, "10875": 2.11503, "10880": 2.11281, "10885": 2.1049, "10890": 2.1238, "10895": 2.09945, "10900": 2.09815, "10905": 2.11936, "10910": 2.10709, "10915": 2.09849, "10920": 2.06506, "10925": 2.10936, "10930": 2.09927, "10935": 2.09619, "10940": 2.11792, "10945": 2.11932, "10950": 2.11146, "10955": 2.12454, "10960": 2.12234, "10965": 2.07622, "10970": 2.10452, "10975": 2.11751, "10980": 2.09578, "10985": 2.06193, "10990": 2.08593, "10995": 2.08399, "11000": 2.08655, "11005": 2.13455, "11010": 2.12308, "11015": 2.09995, "11020": 2.09952, "11025": 2.08166, "11030": 2.0871, "11035": 2.11957, "11040": 2.07593, "11045": 2.07732, "11050": 2.11767, "11055": 2.12392, "11060": 2.08935, "11065": 2.11907, "11070": 2.11838, "11075": 2.07731, "11080": 2.10111, "11085": 2.11196, "11090": 2.09433, "11095": 2.09819, "11100": 2.12541, "11105": 2.12034, "11110": 2.10418, "11115": 2.09139, "11120": 2.11014, "11125": 2.09105, "11130": 2.10063, "11135": 2.12179, "11140": 2.13454, "11145": 2.1115, "11150": 2.10737, "11155": 2.1189, "11160": 2.09814, "11165": 2.10755, "11170": 2.10982, "11175": 2.09786, "11180": 2.07284, "11185": 2.10102, "11190": 2.10984, "11195": 2.11781, "11200": 2.1382, "11205": 2.09986, "11210": 2.09643, "11215": 2.09314, "11220": 2.11546, "11225": 2.09573, "11230": 2.09484, "11235": 2.12336, "11240": 2.06689, "11245": 2.11083, "11250": 2.12167, "11255": 2.11869, "11260": 2.11156, "11265": 2.1049, "11270": 2.12246, "11275": 2.09351, "11280": 2.08974, "11285": 2.11938, "11290": 2.10088, "11295": 2.09814, "11300": 2.11012, "11305": 2.07607, "11310": 2.10703, "11315": 2.09604, "11320": 2.1009, "11325": 2.11022, "11330": 2.09836, "11335": 2.10719, "11340": 2.08575, "11345": 2.09604, "11350": 2.09482, "11355": 2.10416, "11360": 2.1371, "11365": 2.11653, "11370": 2.1153, "11375": 2.07807, "11380": 2.09342, "11385": 2.08193, "11390": 2.11752, "11395": 2.12798, "11400": 2.07978, "11405": 2.09352, "11410": 2.10114, "11415": 2.07741, "11420": 2.09746, "11425": 2.11016, "11430": 2.11582, "11435": 2.0941, "11440": 2.10889, "11445": 2.11766, "11450": 2.10732, "11455": 2.09699, "11460": 2.0863, "11465": 2.08777, "11470": 2.10668, "11475": 2.12216, "11480": 2.07883, "11485": 2.08623, "11490": 2.07633, "11495": 2.09272, "11500": 2.10777, "11505": 2.06887, "11510": 2.11101, "11515": 2.10311, "11520": 2.09246, "11525": 2.10248, "11530": 2.1034, "11535": 2.09766, "11540": 2.10141, "11545": 2.1129, "11550": 2.10009, "11555": 2.09262, "11560": 2.07359, "11565": 2.09648, "11570": 2.08146, "11575": 2.0969, "11580": 2.08659, "11585": 2.07574, "11590": 2.11459, "11595": 2.08918, "11600": 2.10366, "11605": 2.08865, "11610": 2.08252, "11615": 2.0938, "11620": 2.10688, "11625": 2.07358, "11630": 2.08352, "11635": 2.10343, "11640": 2.09415, "11645": 2.09392, "11650": 2.09476, "11655": 2.06232, "11660": 2.08332, "11665": 2.11418, "11670": 2.10031, "11675": 2.09048, "11680": 2.08324, "11685": 2.06546, "11690": 2.11199, "11695": 2.09538, "11700": 2.09326, "11705": 2.07812, "11710": 2.0875, "11715": 2.08891, "11720": 2.0637, "11725": 2.09897, "11730": 2.09854, "11735": 2.08057, "11740": 2.08815, "11745": 2.10273, "11750": 2.08792, "11755": 2.08797, "11760": 2.10884, "11765": 2.10009, "11770": 2.08583, "11775": 2.08752, "11780": 2.11669, "11785": 2.11395, "11790": 2.11916, "11795": 2.10345, "11800": 2.07714, "11805": 2.06457, "11810": 2.10212, "11815": 2.09683, "11820": 2.11202, "11825": 2.11111, "11830": 2.09314, "11835": 2.09541, "11840": 2.09785, "11845": 2.10345, "11850": 2.10526, "11855": 2.07674, "11860": 2.10364, "11865": 2.07697, "11870": 2.08567, "11875": 2.08152, "11880": 2.11372, "11885": 2.08308, "11890": 2.08489, "11895": 2.07619, "11900": 2.07176, "11905": 2.08311, "11910": 2.10022, "11915": 2.09703, "11920": 2.08787, "11925": 2.08853, "11930": 2.06556, "11935": 2.09267, "11940": 2.12111, "11945": 2.08449, "11950": 2.09998, "11955": 2.08153, "11960": 2.08759, "11965": 2.06346, "11970": 2.06661, "11975": 2.05298, "11980": 2.06929, "11985": 2.08958, "11990": 2.08079, "11995": 2.0717, "12000": 2.09246, "12005": 2.07037, "12010": 2.08082, "12015": 2.09207, "12020": 2.10962, "12025": 2.0934, "12030": 2.08572, "12035": 2.09284, "12040": 2.07952, "12045": 2.11447, "12050": 2.10027, "12055": 2.0805, "12060": 2.0729, "12065": 2.07517, "12070": 2.08142, "12075": 2.07735, "12080": 2.08834, "12085": 2.08779, "12090": 2.0847, "12095": 2.10172, "12100": 2.07351, "12105": 2.0909, "12110": 2.10889, "12115": 2.07443, "12120": 2.10336, "12125": 2.09546, "12130": 2.08505, "12135": 2.06245, "12140": 2.08605, "12145": 2.07496, "12150": 2.08468, "12155": 2.0811, "12160": 2.0842, "12165": 2.11115, "12170": 2.07714, "12175": 2.08229, "12180": 2.06732, "12185": 2.0802, "12190": 2.08836, "12195": 2.06922, "12200": 2.07565, "12205": 2.0983, "12210": 2.07004, "12215": 2.06948, "12220": 2.07127, "12225": 2.08898, "12230": 2.08472, "12235": 2.08517, "12240": 2.07253, "12245": 2.08653, "12250": 2.07321, "12255": 2.0772, "12260": 2.08083, "12265": 2.08415, "12270": 2.05978, "12275": 2.09805, "12280": 2.08511, "12285": 2.09236, "12290": 2.09528, "12295": 2.08762, "12300": 2.09432, "12305": 2.06798, "12310": 2.09894, "12315": 2.06714, "12320": 2.06528, "12325": 2.08738, "12330": 2.06593, "12335": 2.08387, "12340": 2.08887, "12345": 2.08376, "12350": 2.09072, "12355": 2.0837, "12360": 2.07632, "12365": 2.10111, "12370": 2.07893, "12375": 2.10129, "12380": 2.04056, "12385": 2.05965, "12390": 2.07018, "12395": 2.0835, "12400": 2.08174, "12405": 2.07663, "12410": 2.07779, "12415": 2.06784, "12420": 2.06979, "12425": 2.08765, "12430": 2.07118, "12435": 2.07501, "12440": 2.05737, "12445": 2.08313, "12450": 2.07412, "12455": 2.08407, "12460": 2.0838, "12465": 2.09855, "12470": 2.0921, "12475": 2.07961, "12480": 2.07914, "12485": 2.08659, "12490": 2.10543, "12495": 2.05053, "12500": 2.08329, "12505": 2.08701, "12510": 2.08543, "12515": 2.08134, "12520": 2.06548, "12525": 2.09288, "12530": 2.07538, "12535": 2.08831, "12540": 2.09189, "12545": 2.09613, "12550": 2.06245, "12555": 2.07831, "12560": 2.06506, "12565": 2.09346, "12570": 2.08154, "12575": 2.07248, "12580": 2.06589, "12585": 2.06518, "12590": 2.10228, "12595": 2.0775, "12600": 2.09098, "12605": 2.06729, "12610": 2.06995, "12615": 2.06176, "12620": 2.07869, "12625": 2.07633, "12630": 2.08179, "12635": 2.0803, "12640": 2.07631, "12645": 2.06718, "12650": 2.08459, "12655": 2.05214, "12660": 2.07275, "12665": 2.08872, "12670": 2.07357, "12675": 2.10313, "12680": 2.09764, "12685": 2.07004, "12690": 2.05977, "12695": 2.08263, "12700": 2.07392, "12705": 2.08454, "12710": 2.07504, "12715": 2.08794, "12720": 2.0698, "12725": 2.08725, "12730": 2.08295, "12735": 2.0597, "12740": 2.07962, "12745": 2.06154, "12750": 2.07023, "12755": 2.06639, "12760": 2.07557, "12765": 2.04847, "12770": 2.06114, "12775": 2.06626, "12780": 2.07665, "12785": 2.0549, "12790": 2.07862, "12795": 2.09335, "12800": 2.07201, "12805": 2.0747, "12810": 2.06417, "12815": 2.075, "12820": 2.08175, "12825": 2.07626, "12830": 2.07452, "12835": 2.07, "12840": 2.0817, "12845": 2.06818, "12850": 2.06559, "12855": 2.07425, "12860": 2.07415, "12865": 2.0632, "12870": 2.07699, "12875": 2.08039, "12880": 2.08753, "12885": 2.07329, "12890": 2.06539, "12895": 2.07675, "12900": 2.06599, "12905": 2.0739, "12910": 2.10737, "12915": 2.0636, "12920": 2.09816, "12925": 2.07445, "12930": 2.07705, "12935": 2.04594, "12940": 2.08018, "12945": 2.04745, "12950": 2.06467, "12955": 2.09448, "12960": 2.07852, "12965": 2.03534, "12970": 2.07598, "12975": 2.0582, "12980": 2.06885, "12985": 2.07852, "12990": 2.04936, "12995": 2.09198, "13000": 2.0901, "13005": 2.06612, "13010": 2.09903, "13015": 2.09205, "13020": 2.06057, "13025": 2.06634, "13030": 2.0657, "13035": 2.09276, "13040": 2.07292, "13045": 2.07665, "13050": 2.05756, "13055": 2.07765, "13060": 2.07969, "13065": 2.06323, "13070": 2.06718, "13075": 2.05355, "13080": 2.06023, "13085": 2.05936, "13090": 2.06632, "13095": 2.06109, "13100": 2.07201, "13105": 2.06481, "13110": 2.0483, "13115": 2.03383, "13120": 2.04711, "13125": 2.05796, "13130": 2.02839, "13135": 2.07241, "13140": 2.04998, "13145": 2.06385, "13150": 2.07767, "13155": 2.0727, "13160": 2.08366, "13165": 2.07547, "13170": 2.04646, "13175": 2.06564, "13180": 2.05888, "13185": 2.05483, "13190": 2.06306, "13195": 2.04909, "13200": 2.07119, "13205": 2.04903, "13210": 2.04893, "13215": 2.06723, "13220": 2.06868, "13225": 2.06587, "13230": 2.06239, "13235": 2.05632, "13240": 2.0646, "13245": 2.04027, "13250": 2.06679, "13255": 2.05584, "13260": 2.07295, "13265": 2.05648, "13270": 2.07145, "13275": 2.05791, "13280": 2.07128, "13285": 2.0388, "13290": 2.1107, "13295": 2.06127, "13300": 2.08487, "13305": 2.07321, "13310": 2.06929, "13315": 2.0482, "13320": 2.06672, "13325": 2.07374, "13330": 2.0603, "13335": 2.06166, "13340": 2.09789, "13345": 2.0687, "13350": 2.06053, "13355": 2.0688, "13360": 2.06478, "13365": 2.07261, "13370": 2.06174, "13375": 2.07564, "13380": 2.07971, "13385": 2.06755, "13390": 2.10008, "13395": 2.07004, "13400": 2.05735, "13405": 2.08729, "13410": 2.07673, "13415": 2.03747, "13420": 2.03903, "13425": 2.06622, "13430": 2.06704, "13435": 2.05919, "13440": 2.05599, "13445": 2.0688, "13450": 2.05914, "13455": 2.07774, "13460": 2.06646, "13465": 2.05928, "13470": 2.08071, "13475": 2.08355, "13480": 2.05968, "13485": 2.05324, "13490": 2.05137, "13495": 2.04964, "13500": 2.05881, "13505": 2.07846, "13510": 2.04602, "13515": 2.0605, "13520": 2.07392, "13525": 2.05046, "13530": 2.08906, "13535": 2.05296, "13540": 2.06508, "13545": 2.05734, "13550": 2.06151, "13555": 2.09075, "13560": 2.06519, "13565": 2.06641, "13570": 2.04554, "13575": 2.07068, "13580": 2.05466, "13585": 2.07686, "13590": 2.07227, "13595": 2.05438, "13600": 2.08352, "13605": 2.04814, "13610": 2.02672, "13615": 2.07467, "13620": 2.08676, "13625": 2.05915, "13630": 2.06405, "13635": 2.04873, "13640": 2.06787, "13645": 2.06856, "13650": 2.04634, "13655": 2.07368, "13660": 2.04605, "13665": 2.06942, "13670": 2.10152, "13675": 2.07111, "13680": 2.0488, "13685": 2.07584, "13690": 2.05536, "13695": 2.08231, "13700": 2.05088, "13705": 2.0622, "13710": 2.04267, "13715": 2.02304, "13720": 2.05882, "13725": 2.03793, "13730": 2.05185, "13735": 2.08074, "13740": 2.08634, "13745": 2.06504, "13750": 2.07164, "13755": 2.05683, "13760": 2.05478, "13765": 2.04226, "13770": 2.06699, "13775": 2.03641, "13780": 2.05503, "13785": 2.05632, "13790": 2.06643, "13795": 2.0451, "13800": 2.05654, "13805": 2.07875, "13810": 2.0561, "13815": 2.04288, "13820": 2.04343, "13825": 2.01963, "13830": 2.04042, "13835": 2.06013, "13840": 2.04797, "13845": 2.04518, "13850": 2.06191, "13855": 2.04945, "13860": 2.04889, "13865": 2.0838, "13870": 2.05252, "13875": 2.02554, "13880": 2.05978, "13885": 2.06865, "13890": 2.07036, "13895": 2.06623, "13900": 2.0698, "13905": 2.07154, "13910": 2.07114, "13915": 2.03929, "13920": 2.06315, "13925": 2.06183, "13930": 2.06826, "13935": 2.06527, "13940": 2.05997, "13945": 2.07083, "13950": 2.06082, "13955": 2.05945, "13960": 2.09433, "13965": 2.02348, "13970": 2.05829, "13975": 2.04982, "13980": 2.07082, "13985": 2.05462, "13990": 2.06587, "13995": 2.0697, "14000": 2.03306, "14005": 2.06376, "14010": 2.08141, "14015": 2.06879, "14020": 2.04072, "14025": 2.07819, "14030": 2.03938, "14035": 2.06793, "14040": 2.02488, "14045": 2.04354, "14050": 2.04914, "14055": 2.03854, "14060": 2.05692, "14065": 2.07931, "14070": 2.07154, "14075": 2.03879, "14080": 2.06429, "14085": 2.06488, "14090": 2.03416, "14095": 2.06059, "14100": 2.05397, "14105": 2.04982, "14110": 2.03907, "14115": 2.04305, "14120": 2.05792, "14125": 2.05895, "14130": 2.03704, "14135": 2.05308, "14140": 2.04503, "14145": 2.05117, "14150": 2.04121, "14155": 2.07706, "14160": 2.03183, "14165": 2.05583, "14170": 2.04141, "14175": 2.05859, "14180": 2.06162, "14185": 2.05549, "14190": 2.04817, "14195": 2.0488, "14200": 2.03793, "14205": 2.07073, "14210": 2.03051, "14215": 2.05728, "14220": 2.07796, "14225": 2.05118, "14230": 2.04719, "14235": 2.06882, "14240": 2.05065, "14245": 2.08472, "14250": 2.05742, "14255": 2.0748, "14260": 2.06689, "14265": 2.05522, "14270": 2.04567, "14275": 2.06473, "14280": 2.02946, "14285": 2.04722, "14290": 2.07584, "14295": 2.0549, "14300": 2.03471, "14305": 2.06567, "14310": 2.05293, "14315": 2.05198, "14320": 2.04373, "14325": 2.06631, "14330": 2.06138, "14335": 2.06112, "14340": 2.05711, "14345": 2.07946, "14350": 2.07973, "14355": 2.04774, "14360": 2.03516, "14365": 2.04018, "14370": 2.01661, "14375": 2.04978, "14380": 2.04594, "14385": 2.0275, "14390": 2.05981, "14395": 2.0415, "14400": 2.05481, "14405": 2.02222, "14410": 2.04811, "14415": 2.06109, "14420": 2.03851, "14425": 2.02822, "14430": 2.048, "14435": 2.06288, "14440": 2.04581, "14445": 2.05781, "14450": 2.0512, "14455": 2.06051, "14460": 2.02749, "14465": 2.0184, "14470": 2.06577, "14475": 2.05386, "14480": 2.08122, "14485": 2.02383, "14490": 2.05062, "14495": 2.06237, "14500": 2.05403, "14505": 2.05747, "14510": 2.04476, "14515": 2.06937, "14520": 2.06635, "14525": 2.05926, "14530": 2.05375, "14535": 2.06786, "14540": 2.03869, "14545": 2.0519, "14550": 2.05702, "14555": 2.05093, "14560": 2.04233, "14565": 2.05725, "14570": 2.06248, "14575": 2.06814, "14580": 2.08068, "14585": 2.04127, "14590": 2.04621, "14595": 2.0492, "14600": 2.03417, "14605": 2.03084, "14610": 2.02714, "14615": 2.05616, "14620": 2.04066, "14625": 2.04876, "14630": 2.0352, "14635": 2.06343, "14640": 2.05997, "14645": 2.04447, "14650": 2.03186, "14655": 2.0489, "14660": 2.05179, "14665": 2.03956, "14670": 2.06318, "14675": 2.06554, "14680": 2.04604, "14685": 2.05162, "14690": 2.05569, "14695": 2.04292, "14700": 2.06101, "14705": 2.03055, "14710": 2.03781, "14715": 2.05479, "14720": 2.04301, "14725": 2.07425, "14730": 2.07842, "14735": 2.02706, "14740": 2.04919, "14745": 2.08055, "14750": 2.04379, "14755": 2.05252, "14760": 2.03489, "14765": 2.04663, "14770": 2.06053, "14775": 2.04988, "14780": 2.05704, "14785": 2.03242, "14790": 2.04326, "14795": 2.04476, "14800": 2.04946, "14805": 2.08154, "14810": 2.03529, "14815": 2.05913, "14820": 2.04393, "14825": 2.05448, "14830": 2.05115, "14835": 2.05229, "14840": 2.02544, "14845": 2.03105, "14850": 2.04689, "14855": 2.05259, "14860": 2.05724, "14865": 2.03317, "14870": 2.0588, "14875": 2.03492, "14880": 2.03085, "14885": 2.04775, "14890": 2.03152, "14895": 2.08086, "14900": 2.05907, "14905": 2.03327, "14910": 2.05324, "14915": 2.0485, "14920": 2.03706, "14925": 2.05199, "14930": 2.01786, "14935": 2.05012, "14940": 2.07539, "14945": 2.03124, "14950": 2.04542, "14955": 2.03327, "14960": 2.05317, "14965": 2.05129, "14970": 2.04079, "14975": 2.04116, "14980": 2.04687, "14985": 2.04796, "14990": 2.07066, "14995": 2.04346, "15000": 2.04935, "15005": 2.04392, "15010": 2.0336, "15015": 2.01354, "15020": 2.03896, "15025": 2.04644, "15030": 2.0616, "15035": 2.02654, "15040": 2.02292, "15045": 2.02599, "15050": 2.0478, "15055": 2.02721, "15060": 2.02933, "15065": 2.034, "15070": 2.04149, "15075": 2.02404, "15080": 2.06881, "15085": 2.05541, "15090": 2.03, "15095": 2.06325, "15100": 2.05576, "15105": 2.03434, "15110": 2.04154, "15115": 2.05645, "15120": 2.0754, "15125": 2.03702, "15130": 2.05585, "15135": 2.05022, "15140": 2.06735, "15145": 2.02693, "15150": 2.03098, "15155": 2.03773, "15160": 2.0409, "15165": 2.02471, "15170": 2.05199, "15175": 2.04826, "15180": 2.05405, "15185": 2.04706, "15190": 2.05467, "15195": 2.04219, "15200": 2.06868, "15205": 2.02924, "15210": 2.05956, "15215": 2.0422, "15220": 2.04101, "15225": 2.02943, "15230": 2.05235, "15235": 2.01587, "15240": 2.0456, "15245": 2.06034, "15250": 2.00481, "15255": 2.02813, "15260": 2.02533, "15265": 2.02134, "15270": 2.0237, "15275": 2.03117, "15280": 2.06598, "15285": 2.05188, "15290": 2.04349, "15295": 2.02788, "15300": 2.03197, "15305": 2.04952, "15310": 2.03158, "15315": 2.02688, "15320": 2.04042, "15325": 2.06156, "15330": 2.0179, "15335": 2.045, "15340": 2.0316, "15345": 2.02006, "15350": 2.01662, "15355": 2.02275, "15360": 2.05183, "15365": 2.03239, "15370": 2.03996, "15375": 2.02567, "15380": 2.05566, "15385": 2.06439, "15390": 2.04536, "15395": 2.06814, "15400": 2.05608, "15405": 2.06716, "15410": 2.05189, "15415": 2.04294, "15420": 2.06314, "15425": 2.06828, "15430": 2.03597, "15435": 2.04591, "15440": 2.05287, "15445": 2.02678, "15450": 2.01602, "15455": 2.03592, "15460": 2.03815, "15465": 2.04632, "15470": 2.01799, "15475": 2.01732, "15480": 2.05624, "15485": 2.03592, "15490": 2.02787, "15495": 2.04043, "15500": 2.02578, "15505": 2.03622, "15510": 2.06447, "15515": 2.05447, "15520": 2.02725, "15525": 2.03168, "15530": 2.01764, "15535": 2.03132, "15540": 2.05044, "15545": 2.0299, "15550": 2.04748, "15555": 2.03286, "15560": 2.05793, "15565": 2.0432, "15570": 2.03761, "15575": 2.03125, "15580": 2.04541, "15585": 2.04817, "15590": 2.0265, "15595": 2.05073, "15600": 2.0224, "15605": 2.0479, "15610": 2.02607, "15615": 2.03992, "15620": 2.02724, "15625": 2.03698, "15630": 2.01763, "15635": 2.02642, "15640": 2.04083, "15645": 2.0115, "15650": 2.04666, "15655": 2.03939, "15660": 2.06161, "15665": 2.04346, "15670": 2.0432, "15675": 2.04746, "15680": 2.03375, "15685": 2.0242, "15690": 2.0539, "15695": 2.03408, "15700": 2.00949, "15705": 2.04119, "15710": 2.06036, "15715": 2.03598, "15720": 2.03167, "15725": 2.05879, "15730": 2.03298, "15735": 2.04085, "15740": 2.02361, "15745": 2.05218, "15750": 2.04051, "15755": 2.03673, "15760": 2.03554, "15765": 2.06707, "15770": 2.04583, "15775": 2.03151, "15780": 2.04519, "15785": 2.02609, "15790": 2.03599, "15795": 2.04496, "15800": 2.05446, "15805": 2.04293, "15810": 2.04716, "15815": 2.05103, "15820": 2.0279, "15825": 2.03785, "15830": 2.0435, "15835": 2.04388, "15840": 2.05922, "15845": 2.04812, "15850": 2.01589, "15855": 2.06412, "15860": 2.0452, "15865": 2.01446, "15870": 2.0251, "15875": 2.02092, "15880": 2.04435, "15885": 2.00331, "15890": 2.05554, "15895": 2.01352, "15900": 2.04411, "15905": 2.0167, "15910": 2.06144, "15915": 2.0096, "15920": 2.02281, "15925": 2.04379, "15930": 1.99617, "15935": 2.03532, "15940": 2.03883, "15945": 2.03948, "15950": 2.03198, "15955": 2.03645, "15960": 2.00508, "15965": 2.02869, "15970": 2.03915, "15975": 2.04765, "15980": 2.04023, "15985": 2.02952, "15990": 2.02942, "15995": 2.02132, "16000": 2.01645, "16005": 2.03758, "16010": 2.0374, "16015": 2.01416, "16020": 2.02903, "16025": 2.01951, "16030": 2.02498, "16035": 2.01839, "16040": 2.00845, "16045": 2.05646, "16050": 2.05556, "16055": 2.04136, "16060": 2.02348, "16065": 2.0104, "16070": 2.02331, "16075": 2.03587, "16080": 2.02512, "16085": 2.0444, "16090": 2.04504, "16095": 2.02787, "16100": 2.03921, "16105": 2.00719, "16110": 2.03029, "16115": 2.05034, "16120": 2.04776, "16125": 2.01935, "16130": 2.016, "16135": 2.03799, "16140": 2.02506, "16145": 2.02453, "16150": 2.00851, "16155": 2.04414, "16160": 2.02549, "16165": 2.03912, "16170": 2.0233, "16175": 2.04076, "16180": 2.04595, "16185": 2.01984, "16190": 2.01842, "16195": 2.03928, "16200": 2.03865, "16205": 2.00384, "16210": 2.04796, "16215": 2.02404, "16220": 2.04256, "16225": 2.03615, "16230": 2.01126, "16235": 1.99975, "16240": 2.06016, "16245": 2.03503, "16250": 2.04612, "16255": 2.03777, "16260": 2.01213, "16265": 2.03331, "16270": 2.03364, "16275": 2.02796, "16280": 2.03139, "16285": 2.02793, "16290": 2.05595, "16295": 2.0206, "16300": 2.02698, "16305": 2.04021, "16310": 2.05276, "16315": 2.03124, "16320": 2.03408, "16325": 2.05539, "16330": 2.01042, "16335": 2.02646, "16340": 2.04477, "16345": 2.03293, "16350": 2.01808, "16355": 2.05037, "16360": 2.01895, "16365": 2.0142, "16370": 2.01123, "16375": 2.00228, "16380": 2.03452, "16385": 2.03668, "16390": 2.03795, "16395": 2.04075, "16400": 2.0338, "16405": 2.02026, "16410": 2.02876, "16415": 2.05434, "16420": 2.00376, "16425": 2.0258, "16430": 2.0425, "16435": 2.02823, "16440": 2.01461, "16445": 2.02835, "16450": 2.05312, "16455": 2.0226, "16460": 2.01029, "16465": 2.0192, "16470": 2.01975, "16475": 2.02787, "16480": 2.01463, "16485": 2.02743, "16490": 2.04852, "16495": 2.02419, "16500": 2.02586, "16505": 2.04197, "16510": 2.04883, "16515": 2.02141, "16520": 2.02771, "16525": 2.01096, "16530": 2.02227, "16535": 2.036, "16540": 2.03664, "16545": 2.03069, "16550": 2.0215, "16555": 2.03019, "16560": 2.04333, "16565": 2.01624, "16570": 2.02534, "16575": 2.01035, "16580": 2.03591, "16585": 2.03826, "16590": 2.02992, "16595": 2.01607, "16600": 2.04707, "16605": 2.02211, "16610": 2.04492, "16615": 2.01874, "16620": 2.01465, "16625": 2.03188, "16630": 2.03963, "16635": 2.02568, "16640": 2.04292, "16645": 2.0253, "16650": 2.03506, "16655": 2.0252, "16660": 2.0404, "16665": 2.02266, "16670": 2.0265, "16675": 1.99374, "16680": 2.03086, "16685": 2.0363, "16690": 2.00907, "16695": 2.00728, "16700": 2.01826, "16705": 2.04402, "16710": 2.02234, "16715": 2.03909, "16720": 2.01504, "16725": 2.04241, "16730": 2.01518, "16735": 2.0381, "16740": 2.00526, "16745": 2.0232, "16750": 2.02637, "16755": 2.03172, "16760": 2.01971, "16765": 2.02255, "16770": 2.02098, "16775": 2.04131, "16780": 2.00762, "16785": 2.01746, "16790": 2.05109, "16795": 2.02451, "16800": 2.03881, "16805": 2.03773, "16810": 2.03991, "16815": 2.03909, "16820": 2.05305, "16825": 2.04252, "16830": 2.03305, "16835": 2.01598, "16840": 2.01951, "16845": 2.02095, "16850": 2.02267, "16855": 2.00457, "16860": 2.04229, "16865": 2.03862, "16870": 2.01822, "16875": 2.00703, "16880": 2.02232, "16885": 2.00473, "16890": 2.02345, "16895": 2.01431, "16900": 2.03504, "16905": 2.00394, "16910": 2.03596, "16915": 2.04642, "16920": 2.03118, "16925": 2.02664, "16930": 2.0215, "16935": 2.0014, "16940": 2.00328, "16945": 2.01929, "16950": 2.03842, "16955": 2.02697, "16960": 2.04953, "16965": 2.03403, "16970": 2.05436, "16975": 2.03211, "16980": 2.00312, "16985": 2.01717, "16990": 2.02091, "16995": 2.02073, "17000": 2.03551, "17005": 2.02636, "17010": 2.00197, "17015": 2.0068, "17020": 2.0264, "17025": 2.01595, "17030": 2.04482, "17035": 2.00658, "17040": 2.01882, "17045": 2.01991, "17050": 2.04207, "17055": 2.03125, "17060": 2.01756, "17065": 2.03217, "17070": 2.03539, "17075": 2.0259, "17080": 2.0113, "17085": 2.01748, "17090": 2.04184, "17095": 2.02499, "17100": 2.02478, "17105": 2.02734, "17110": 1.99993, "17115": 2.02587, "17120": 2.03754, "17125": 2.0196, "17130": 2.01352, "17135": 2.01831, "17140": 2.02719, "17145": 1.97957, "17150": 2.02861, "17155": 2.00141, "17160": 2.02072, "17165": 2.03559, "17170": 1.99199, "17175": 2.03251, "17180": 2.0117, "17185": 2.00998, "17190": 2.03799, "17195": 2.04407, "17200": 2.02457, "17205": 2.03279, "17210": 2.04851, "17215": 2.03535, "17220": 2.03706, "17225": 2.0222, "17230": 2.04565, "17235": 2.02396, "17240": 2.03269, "17245": 2.02883, "17250": 2.04738, "17255": 2.00884, "17260": 2.01463, "17265": 2.06277, "17270": 2.01061, "17275": 2.02274, "17280": 2.02174, "17285": 2.03885, "17290": 2.02175, "17295": 2.00945, "17300": 2.01173, "17305": 1.99839, "17310": 2.03348, "17315": 2.02483, "17320": 2.00947, "17325": 2.03681, "17330": 2.00672, "17335": 2.0102, "17340": 2.02135, "17345": 2.02997, "17350": 2.01814, "17355": 2.03341, "17360": 2.04105, "17365": 2.02039, "17370": 2.01078, "17375": 2.0211, "17380": 2.03391, "17385": 2.04414, "17390": 2.02224, "17395": 2.01061, "17400": 2.00997, "17405": 2.01806, "17410": 2.01049, "17415": 2.04389, "17420": 2.03295, "17425": 2.02285, "17430": 2.02985, "17435": 2.00641, "17440": 2.01114, "17445": 2.00392, "17450": 2.01181, "17455": 1.99204, "17460": 2.0043, "17465": 2.05471, "17470": 2.03352, "17475": 2.03126, "17480": 2.01104, "17485": 2.03363, "17490": 2.04537, "17495": 2.01876, "17500": 2.02748, "17505": 2.00684, "17510": 2.03696, "17515": 2.03597, "17520": 2.02328, "17525": 2.02213, "17530": 2.0123, "17535": 2.05469, "17540": 2.02028, "17545": 2.02705, "17550": 2.0123, "17555": 2.01669, "17560": 2.03614, "17565": 2.02877, "17570": 2.0248, "17575": 2.00562, "17580": 2.02101, "17585": 2.02229, "17590": 2.01241, "17595": 2.01733, "17600": 2.01033, "17605": 2.0062, "17610": 2.01695, "17615": 2.02995, "17620": 2.03489, "17625": 2.03435, "17630": 1.99674, "17635": 2.03637, "17640": 1.97473, "17645": 2.0285, "17650": 2.02166, "17655": 2.00932, "17660": 2.01303, "17665": 2.02845, "17670": 2.0121, "17675": 2.01759, "17680": 2.02185, "17685": 2.02373, "17690": 1.99442, "17695": 2.01499, "17700": 2.0251, "17705": 2.01769, "17710": 2.0369, "17715": 2.03746, "17720": 2.03999, "17725": 2.02927, "17730": 1.99617, "17735": 2.02048, "17740": 2.01224, "17745": 2.03408, "17750": 2.04855, "17755": 2.03776, "17760": 2.02121, "17765": 2.02088, "17770": 2.02342, "17775": 2.02094, "17780": 2.02883, "17785": 2.0093, "17790": 2.00349, "17795": 2.00501, "17800": 2.00206, "17805": 2.02512, "17810": 2.01474, "17815": 2.02379, "17820": 2.03325, "17825": 2.01739, "17830": 2.00359, "17835": 2.01606, "17840": 2.00935, "17845": 2.0042, "17850": 2.0391, "17855": 2.01989, "17860": 2.03264, "17865": 2.04375, "17870": 2.00157, "17875": 2.03584, "17880": 1.98595, "17885": 1.99817, "17890": 2.02562, "17895": 1.99946, "17900": 2.02634, "17905": 2.01851, "17910": 2.02183, "17915": 2.00543, "17920": 2.02697, "17925": 2.02505, "17930": 2.03926, "17935": 2.0112, "17940": 2.0265, "17945": 2.01764, "17950": 1.9907, "17955": 2.01658, "17960": 2.02287, "17965": 2.02692, "17970": 2.02423, "17975": 2.01913, "17980": 2.01748, "17985": 2.03993, "17990": 1.99342, "17995": 1.99109, "18000": 2.0284, "18005": 2.00499, "18010": 2.00884, "18015": 2.02477, "18020": 2.00956, "18025": 2.02611, "18030": 2.01225, "18035": 2.02093, "18040": 2.00794, "18045": 2.01576, "18050": 1.98959, "18055": 1.97934, "18060": 1.98179, "18065": 1.99424, "18070": 2.00574, "18075": 2.01427, "18080": 2.03237, "18085": 1.98732, "18090": 2.01259, "18095": 2.00545, "18100": 2.01827, "18105": 1.98888, "18110": 2.02968, "18115": 2.02146, "18120": 2.01335, "18125": 2.02529, "18130": 2.01897, "18135": 2.0139, "18140": 2.01508, "18145": 2.03485, "18150": 2.01784, "18155": 2.01391, "18160": 2.00587, "18165": 2.02546, "18170": 2.02624, "18175": 2.01145, "18180": 2.01581, "18185": 2.0091, "18190": 2.00749, "18195": 1.99335, "18200": 2.02129, "18205": 2.03013, "18210": 1.99746, "18215": 2.03664, "18220": 2.00065, "18225": 2.02595, "18230": 1.99041, "18235": 2.00494, "18240": 2.01986, "18245": 2.00018, "18250": 2.02406, "18255": 2.01324, "18260": 1.99281, "18265": 2.02451, "18270": 1.9776, "18275": 2.00726, "18280": 1.99596, "18285": 1.99399, "18290": 2.02369, "18295": 2.02053, "18300": 2.01494, "18305": 1.99063, "18310": 1.99063, "18315": 1.99566, "18320": 1.991, "18325": 2.01349, "18330": 2.00353, "18335": 2.00615, "18340": 2.0272, "18345": 2.0215, "18350": 2.00099, "18355": 2.02368, "18360": 2.00792, "18365": 2.00765, "18370": 2.0192, "18375": 2.01224, "18380": 2.01247, "18385": 2.00374, "18390": 2.03229, "18395": 2.00682, "18400": 2.0282, "18405": 2.02579, "18410": 2.02739, "18415": 2.02702, "18420": 2.04966, "18425": 2.01156, "18430": 2.01702, "18435": 1.9772, "18440": 2.02185, "18445": 2.0135, "18450": 1.99074, "18455": 1.99859, "18460": 2.01884, "18465": 1.99996, "18470": 2.01244, "18475": 1.99301, "18480": 2.01261, "18485": 2.00005, "18490": 2.00642, "18495": 2.04607, "18500": 1.98873, "18505": 2.01114, "18510": 2.00259, "18515": 2.01393, "18520": 1.99178, "18525": 2.01583, "18530": 1.98222, "18535": 1.98603, "18540": 2.01218, "18545": 1.98422, "18550": 1.99595, "18555": 2.00548, "18560": 2.02611, "18565": 1.99943, "18570": 2.02716, "18575": 2.02111, "18580": 1.99357, "18585": 1.99446, "18590": 2.00576, "18595": 1.99796, "18600": 2.00541, "18605": 2.02915, "18610": 2.01934, "18615": 2.00474, "18620": 1.99838, "18625": 2.01315, "18630": 1.98912, "18635": 1.99828, "18640": 1.99746, "18645": 2.0068, "18650": 2.00148, "18655": 2.00274, "18660": 1.98749, "18665": 1.98955, "18670": 2.00288, "18675": 2.00494, "18680": 1.99547, "18685": 1.98932, "18690": 2.0152, "18695": 2.02474, "18700": 2.0319, "18705": 2.02131, "18710": 1.99666, "18715": 2.02336, "18720": 2.01748, "18725": 2.01568, "18730": 2.02383, "18735": 2.01804, "18740": 2.02191, "18745": 1.99647, "18750": 2.04113, "18755": 1.99835, "18760": 2.01757, "18765": 2.00291, "18770": 2.00795, "18775": 1.9965, "18780": 2.03833, "18785": 2.03312, "18790": 2.0159, "18795": 2.00347, "18800": 2.01815, "18805": 1.99738, "18810": 1.99865, "18815": 2.02775, "18820": 2.0118, "18825": 2.01652, "18830": 2.00365, "18835": 1.99708, "18840": 2.01478, "18845": 2.0096, "18850": 2.00053, "18855": 1.99631, "18860": 1.99676, "18865": 2.0218, "18870": 2.0036, "18875": 1.99673, "18880": 1.98744, "18885": 2.0243, "18890": 2.01288, "18895": 2.02169, "18900": 1.99193, "18905": 1.99207, "18910": 1.99385, "18915": 1.98364, "18920": 2.01838, "18925": 2.0119, "18930": 2.02606, "18935": 2.00953, "18940": 2.00799, "18945": 1.998, "18950": 2.0096, "18955": 2.00063, "18960": 2.00497, "18965": 2.02134, "18970": 2.02549, "18975": 2.00817, "18980": 2.00153, "18985": 1.99363, "18990": 2.01924, "18995": 1.99448, "19000": 1.99103, "19005": 2.0123, "19010": 2.00526, "19015": 2.00536, "19020": 1.99344, "19025": 2.00591, "19030": 2.00644, "19035": 2.02668, "19040": 1.9902, "19045": 2.01414, "19050": 2.00261, "19055": 2.00526, "19060": 2.01571, "19065": 1.99488, "19070": 2.01849, "19075": 1.99226, "19080": 2.00224, "19085": 1.9959, "19090": 1.98548, "19095": 2.02315, "19100": 2.0166, "19105": 2.00439, "19110": 2.01403, "19115": 2.03553, "19120": 2.03098, "19125": 2.01426, "19130": 1.99837, "19135": 2.01447, "19140": 2.00354, "19145": 2.00783, "19150": 1.9762, "19155": 2.01315, "19160": 1.99774, "19165": 2.00346, "19170": 1.98258, "19175": 2.00968, "19180": 2.00718, "19185": 2.00375, "19190": 1.98296, "19195": 1.99634, "19200": 1.99745, "19205": 1.9936, "19210": 2.01049, "19215": 1.99214, "19220": 2.02528, "19225": 2.00782, "19230": 2.00797, "19235": 1.98618, "19240": 1.99327, "19245": 2.0102, "19250": 1.98836, "19255": 2.00511, "19260": 1.98047, "19265": 1.9917, "19270": 2.01363, "19275": 2.01026, "19280": 2.01448, "19285": 2.0123, "19290": 2.03357, "19295": 1.99884, "19300": 2.01975, "19305": 1.99185, "19310": 1.99982, "19315": 1.9869, "19320": 2.00961, "19325": 2.01793, "19330": 2.0002, "19335": 2.01777, "19340": 2.01325, "19345": 1.96991, "19350": 2.0236, "19355": 1.99445, "19360": 1.98482, "19365": 1.994, "19370": 2.02403, "19375": 1.99803, "19380": 2.00216, "19385": 2.02583, "19390": 2.00572, "19395": 2.01962, "19400": 2.00463, "19405": 2.00918, "19410": 2.00188, "19415": 1.97518, "19420": 2.01101, "19425": 1.98695, "19430": 1.98816, "19435": 2.02163, "19440": 2.01294, "19445": 1.99473, "19450": 1.99036, "19455": 1.99521, "19460": 1.98195, "19465": 1.99594, "19470": 1.99873, "19475": 2.00363, "19480": 1.98531, "19485": 1.96729, "19490": 1.99796, "19495": 1.99204, "19500": 2.0046, "19505": 2.00107, "19510": 1.99765, "19515": 2.02475, "19520": 2.01531, "19525": 1.99235, "19530": 1.99118, "19535": 2.02512, "19540": 1.98952, "19545": 2.00246, "19550": 2.02206, "19555": 2.00464, "19560": 2.00631, "19565": 2.00843, "19570": 1.99384, "19575": 2.01929, "19580": 2.00276, "19585": 1.99631, "19590": 1.98986, "19595": 2.01423, "19600": 2.00843, "19605": 2.00873, "19610": 2.01348, "19615": 2.00372, "19620": 1.99799, "19625": 2.02631, "19630": 2.00887, "19635": 1.99379, "19640": 2.02305, "19645": 2.01456, "19650": 2.00642, "19655": 2.0145, "19660": 2.00127, "19665": 2.02978, "19670": 2.00249, "19675": 1.99584, "19680": 1.98228, "19685": 2.01136, "19690": 2.00759, "19695": 2.00296, "19700": 1.98735, "19705": 2.01883, "19710": 2.04026, "19715": 2.01551, "19720": 1.99944, "19725": 2.02439, "19730": 2.02915, "19735": 2.01985, "19740": 2.01156, "19745": 1.99161, "19750": 1.98691, "19755": 1.99373, "19760": 1.98676, "19765": 2.01398, "19770": 2.01424, "19775": 1.9962, "19780": 2.00248, "19785": 1.98727, "19790": 1.99739, "19795": 2.00205, "19800": 1.99389, "19805": 1.98172, "19810": 1.98394, "19815": 2.00599, "19820": 2.01084, "19825": 1.998, "19830": 2.01484, "19835": 2.01506, "19840": 2.01734, "19845": 1.95867, "19850": 2.00927, "19855": 2.00067, "19860": 1.9831, "19865": 2.01456, "19870": 2.00151, "19875": 2.01657, "19880": 2.00972, "19885": 1.98019, "19890": 1.99941, "19895": 2.00454, "19900": 1.99487, "19905": 2.00749, "19910": 2.0238, "19915": 1.99856, "19920": 1.98922, "19925": 1.97861, "19930": 1.98356, "19935": 2.00019, "19940": 1.9754, "19945": 2.02016, "19950": 2.01505, "19955": 2.01497, "19960": 2.02162, "19965": 1.99191, "19970": 1.97784, "19975": 2.00152, "19980": 2.00859, "19985": 2.00281, "19990": 1.99582, "19995": 1.99982, "20000": 2.00718, "20005": 1.99105, "20010": 1.99937, "20015": 1.99601, "20020": 2.00682, "20025": 2.00383, "20030": 2.01042, "20035": 1.99529, "20040": 1.98861, "20045": 1.96993, "20050": 2.01151, "20055": 1.99493, "20060": 1.98738, "20065": 2.00192, "20070": 2.00577, "20075": 1.98318, "20080": 1.99018, "20085": 1.97786, "20090": 1.98973, "20095": 1.98514, "20100": 1.99466, "20105": 1.98597, "20110": 2.01991, "20115": 2.00111, "20120": 1.99513, "20125": 1.98609, "20130": 1.99549, "20135": 1.98568, "20140": 1.98854, "20145": 1.99407, "20150": 1.99212, "20155": 2.00774, "20160": 2.0106, "20165": 1.99599, "20170": 2.01794, "20175": 1.99698, "20180": 1.99203, "20185": 1.99825, "20190": 1.97776, "20195": 1.98067, "20200": 1.97192, "20205": 2.0128, "20210": 1.98777, "20215": 2.00317, "20220": 2.02269, "20225": 1.98981, "20230": 1.99107, "20235": 2.00241, "20240": 2.0089, "20245": 1.99231, "20250": 1.99466, "20255": 2.0073, "20260": 1.98429, "20265": 2.00641, "20270": 1.98484, "20275": 1.97868, "20280": 2.00488, "20285": 1.99342, "20290": 1.97961, "20295": 1.99823, "20300": 1.99831, "20305": 1.99756, "20310": 2.01837, "20315": 1.9964, "20320": 1.98817, "20325": 1.9983, "20330": 2.0072, "20335": 1.95942, "20340": 2.00587, "20345": 2.0055, "20350": 1.98522, "20355": 1.98642, "20360": 2.00471, "20365": 1.96529, "20370": 1.99443, "20375": 1.9868, "20380": 1.99511, "20385": 1.99262, "20390": 1.98121, "20395": 1.99823, "20400": 1.98101, "20405": 1.99395, "20410": 1.97918, "20415": 2.01644, "20420": 2.00973, "20425": 1.98311, "20430": 1.99397, "20435": 1.98703, "20440": 1.99056, "20445": 2.02533, "20450": 1.97577, "20455": 2.00484, "20460": 1.98652, "20465": 2.00247, "20470": 1.99383, "20475": 1.99348, "20480": 1.97358, "20485": 1.99007, "20490": 1.99383, "20495": 2.00612, "20500": 1.99098, "20505": 1.98346, "20510": 1.98504, "20515": 2.02042, "20520": 1.98966, "20525": 1.98993, "20530": 1.9653, "20535": 1.98116, "20540": 1.97851, "20545": 1.98399, "20550": 1.99803, "20555": 1.99854, "20560": 1.95326, "20565": 2.01206, "20570": 1.9883, "20575": 1.97208, "20580": 1.99392, "20585": 1.96778, "20590": 1.99153, "20595": 1.99694, "20600": 2.01723, "20605": 1.99723, "20610": 2.00538, "20615": 1.98856, "20620": 1.9838, "20625": 1.99693, "20630": 2.0042, "20635": 1.99356, "20640": 1.98675, "20645": 2.00106, "20650": 1.96893, "20655": 1.99148, "20660": 1.98955, "20665": 1.99983, "20670": 2.00057, "20675": 1.99182, "20680": 1.99221, "20685": 1.98384, "20690": 2.0264, "20695": 1.95733, "20700": 1.99858, "20705": 2.00652, "20710": 1.9867, "20715": 1.99119, "20720": 2.00533, "20725": 1.98842, "20730": 2.0015, "20735": 2.01842, "20740": 1.99, "20745": 2.01771, "20750": 1.9948, "20755": 1.95961, "20760": 2.01107, "20765": 1.98955, "20770": 1.99167, "20775": 1.99483, "20780": 1.99381, "20785": 1.97862, "20790": 1.98275, "20795": 1.9984, "20800": 1.97274, "20805": 1.97934, "20810": 1.97584, "20815": 1.98197, "20820": 2.01116, "20825": 1.99772, "20830": 2.00267, "20835": 1.97656, "20840": 1.98257, "20845": 2.0175, "20850": 1.97877, "20855": 2.00538, "20860": 1.99873, "20865": 1.97461, "20870": 1.988, "20875": 1.98626, "20880": 1.99149, "20885": 2.0059, "20890": 1.98343, "20895": 1.98994, "20900": 1.97678, "20905": 2.00177, "20910": 2.02618, "20915": 1.99016, "20920": 2.00466, "20925": 1.99777, "20930": 1.97711, "20935": 2.001, "20940": 1.97949, "20945": 2.00864, "20950": 1.9868, "20955": 1.98909, "20960": 2.00929, "20965": 1.97703, "20970": 1.97347, "20975": 1.9786, "20980": 2.00475, "20985": 1.96084, "20990": 1.99219, "20995": 1.99315, "21000": 1.99878, "21005": 1.98498, "21010": 2.01073, "21015": 1.97037, "21020": 1.96679, "21025": 2.00134, "21030": 1.98144, "21035": 2.00838, "21040": 2.01109, "21045": 2.00081, "21050": 1.98762, "21055": 1.99078, "21060": 1.98843, "21065": 2.00061, "21070": 1.99174, "21075": 1.98376, "21080": 1.9658, "21085": 1.98703, "21090": 1.96768, "21095": 1.98668, "21100": 1.96562, "21105": 1.99416, "21110": 1.9771, "21115": 1.98767, "21120": 1.98824, "21125": 1.98331, "21130": 1.98867, "21135": 1.98199, "21140": 2.0128, "21145": 2.00291, "21150": 1.99064, "21155": 1.98182, "21160": 1.97698, "21165": 1.97598, "21170": 1.99764, "21175": 2.01044, "21180": 1.96939, "21185": 2.02565, "21190": 1.99414, "21195": 1.97399, "21200": 1.9811, "21205": 1.98576, "21210": 2.00258, "21215": 1.97614, "21220": 1.98381, "21225": 1.98132, "21230": 2.0054, "21235": 1.99913, "21240": 1.98434, "21245": 1.97586, "21250": 2.01047, "21255": 1.96043, "21260": 1.96485, "21265": 1.96549, "21270": 1.99039, "21275": 1.97356, "21280": 1.98531, "21285": 1.9736, "21290": 1.9881, "21295": 2.00054, "21300": 1.9915, "21305": 1.98831, "21310": 1.97704, "21315": 1.99218, "21320": 1.96905, "21325": 1.96997, "21330": 1.98602, "21335": 2.00213, "21340": 1.98472, "21345": 2.00915, "21350": 1.98712, "21355": 1.97335, "21360": 1.98435, "21365": 1.98019, "21370": 1.99907, "21375": 1.98555, "21380": 1.9794, "21385": 1.9833, "21390": 1.98759, "21395": 1.9739, "21400": 1.97072, "21405": 1.99543, "21410": 2.0046, "21415": 1.98496, "21420": 2.00707, "21425": 1.99034, "21430": 1.99959, "21435": 1.98613, "21440": 1.98244, "21445": 2.01219, "21450": 2.01181, "21455": 1.99683, "21460": 1.98363, "21465": 1.99042, "21470": 2.00333, "21475": 1.98869, "21480": 1.98984, "21485": 1.97126, "21490": 1.99389, "21495": 1.98415, "21500": 1.97493, "21505": 1.99372, "21510": 1.97052, "21515": 1.99946, "21520": 1.98945, "21525": 1.99372, "21530": 2.00014, "21535": 1.98606, "21540": 1.99123, "21545": 1.98091, "21550": 1.97301, "21555": 1.97437, "21560": 1.98973, "21565": 1.9945, "21570": 1.98571, "21575": 2.00405, "21580": 1.97876, "21585": 1.99408, "21590": 1.98102, "21595": 1.98366, "21600": 1.96198, "21605": 2.00596, "21610": 2.00458, "21615": 1.96415, "21620": 2.0093, "21625": 1.97088, "21630": 1.99221, "21635": 1.97215, "21640": 1.99583, "21645": 2.02515, "21650": 1.97191, "21655": 1.96611, "21660": 1.9876, "21665": 1.99635, "21670": 1.99328, "21675": 1.99522, "21680": 1.97658, "21685": 1.97281, "21690": 1.98563, "21695": 1.97909, "21700": 2.00599, "21705": 2.01052, "21710": 2.0059, "21715": 1.99928, "21720": 2.00409, "21725": 1.9995, "21730": 1.9827, "21735": 1.96514, "21740": 2.00301, "21745": 1.97483, "21750": 1.98658, "21755": 1.99226, "21760": 2.00692, "21765": 2.01763, "21770": 1.97241, "21775": 2.01049, "21780": 1.99232, "21785": 2.00145, "21790": 2.00695, "21795": 1.97336, "21800": 1.9731, "21805": 1.97484, "21810": 1.97478, "21815": 1.95817, "21820": 1.99751, "21825": 1.97089, "21830": 2.00821, "21835": 2.00549, "21840": 1.98289, "21845": 1.98547, "21850": 1.9927, "21855": 1.97683, "21860": 1.98381, "21865": 1.97642, "21870": 1.99029, "21875": 2.00601, "21880": 1.97765, "21885": 1.99498, "21890": 1.99673, "21895": 1.97494, "21900": 1.98723, "21905": 1.9711, "21910": 1.98442, "21915": 1.98201, "21920": 1.96729, "21925": 1.99265, "21930": 1.99556, "21935": 2.00511, "21940": 1.97418, "21945": 1.96359, "21950": 1.97762, "21955": 1.99707, "21960": 1.97991, "21965": 2.01571, "21970": 2.00365, "21975": 1.97552, "21980": 1.96444, "21985": 1.98316, "21990": 1.97419, "21995": 1.97064, "22000": 1.99781, "22005": 1.97707, "22010": 1.95463, "22015": 1.96371, "22020": 1.96548, "22025": 1.99055, "22030": 1.97352, "22035": 1.96774, "22040": 1.97162, "22045": 1.98249, "22050": 1.98541, "22055": 2.00375, "22060": 1.98719, "22065": 2.00367, "22070": 1.987, "22075": 2.00572, "22080": 1.97439, "22085": 1.98879, "22090": 1.96491, "22095": 1.97587, "22100": 1.99069, "22105": 1.9845, "22110": 1.98752, "22115": 1.96083, "22120": 2.00084, "22125": 1.98862, "22130": 1.98287, "22135": 1.96241, "22140": 2.00414, "22145": 1.97379, "22150": 1.97531, "22155": 1.9662, "22160": 1.97974, "22165": 1.97107, "22170": 1.98823, "22175": 2.00284, "22180": 1.97251, "22185": 1.98486, "22190": 1.96668, "22195": 1.98589, "22200": 1.97159, "22205": 1.99563, "22210": 1.99258, "22215": 1.97384, "22220": 1.98965, "22225": 1.98947, "22230": 1.97668, "22235": 2.00633, "22240": 1.96894, "22245": 1.98136, "22250": 1.99015, "22255": 1.95861, "22260": 1.98573, "22265": 1.99342, "22270": 2.00597, "22275": 1.97206, "22280": 1.98381, "22285": 1.99702, "22290": 1.97439, "22295": 1.98843, "22300": 1.95719, "22305": 1.98185, "22310": 1.98241, "22315": 1.97481, "22320": 1.98377, "22325": 1.98445, "22330": 1.98054, "22335": 1.9798, "22340": 1.97749, "22345": 1.98345, "22350": 2.00732, "22355": 1.98269, "22360": 1.99157, "22365": 1.99705, "22370": 1.98202, "22375": 1.97948, "22380": 1.98494, "22385": 1.99233, "22390": 1.97763, "22395": 1.98423, "22400": 2.00069, "22405": 1.98667, "22410": 1.98599, "22415": 1.97333, "22420": 1.97339, "22425": 1.99249, "22430": 1.98823, "22435": 1.96909, "22440": 1.97356, "22445": 1.98159, "22450": 1.97375, "22455": 1.98065, "22460": 2.00457, "22465": 1.99869, "22470": 1.98398, "22475": 1.99878, "22480": 1.9635, "22485": 1.97119, "22490": 1.98434, "22495": 1.97504, "22500": 1.96901, "22505": 1.97725, "22510": 1.96714, "22515": 1.97127, "22520": 1.95973, "22525": 1.97241, "22530": 1.96524, "22535": 1.9915, "22540": 1.99273, "22545": 1.97646, "22550": 1.99189, "22555": 1.98167, "22560": 1.98741, "22565": 1.97307, "22570": 1.99326, "22575": 1.99188, "22580": 1.987, "22585": 1.9721, "22590": 2.00908, "22595": 1.98555, "22600": 1.99278, "22605": 1.94972, "22610": 1.98954, "22615": 1.97538, "22620": 1.9991, "22625": 1.95426, "22630": 1.96562, "22635": 1.99426, "22640": 1.95996, "22645": 1.98076, "22650": 1.98461, "22655": 1.97527, "22660": 1.96841, "22665": 2.00776, "22670": 1.98209, "22675": 1.96333, "22680": 1.96884, "22685": 1.97706, "22690": 1.96897, "22695": 1.98313, "22700": 1.98575, "22705": 1.96154, "22710": 1.95592, "22715": 2.01229, "22720": 1.98472, "22725": 1.96145, "22730": 1.97777, "22735": 1.96051, "22740": 1.96177, "22745": 1.9769, "22750": 1.96424, "22755": 2.0027, "22760": 1.98037, "22765": 1.96893, "22770": 1.9812, "22775": 1.95436, "22780": 1.96731, "22785": 1.95811, "22790": 1.99082, "22795": 2.00444, "22800": 1.99383, "22805": 1.9656, "22810": 1.97393, "22815": 1.95711, "22820": 1.96134, "22825": 1.9554, "22830": 1.98527, "22835": 1.97485, "22840": 1.96718, "22845": 1.99629, "22850": 1.98282, "22855": 1.97442, "22860": 1.96452, "22865": 1.98487, "22870": 2.00233, "22875": 1.98142, "22880": 1.96904, "22885": 1.9699, "22890": 1.98097, "22895": 1.95794, "22900": 1.97021, "22905": 1.9557, "22910": 1.98502, "22915": 1.97359, "22920": 2.00107, "22925": 1.95209, "22930": 1.9723, "22935": 1.98058, "22940": 1.99452, "22945": 1.99648, "22950": 1.96937, "22955": 1.99233, "22960": 1.96806, "22965": 1.9805, "22970": 1.97405, "22975": 1.96153, "22980": 1.9965, "22985": 1.95573, "22990": 1.97474, "22995": 1.95908, "23000": 1.98803, "23005": 1.99438, "23010": 1.99887, "23015": 1.96679, "23020": 1.98081, "23025": 1.97811, "23030": 1.97198, "23035": 1.99855, "23040": 1.97571, "23045": 1.97483, "23050": 1.95936, "23055": 1.96897, "23060": 1.9691, "23065": 1.9858, "23070": 1.97546, "23075": 1.97766, "23080": 1.99581, "23085": 1.97171, "23090": 1.95718, "23095": 1.98769, "23100": 1.97461, "23105": 1.97983, "23110": 1.9819, "23115": 1.96415, "23120": 1.94201, "23125": 1.99727, "23130": 1.98169, "23135": 1.97768, "23140": 1.98197, "23145": 1.96376, "23150": 1.98183, "23155": 1.98713, "23160": 2.01436, "23165": 1.97583, "23170": 1.98837, "23175": 1.98309, "23180": 1.98664, "23185": 1.9753, "23190": 1.9801, "23195": 1.95972, "23200": 1.96134, "23205": 1.98201, "23210": 1.97206, "23215": 1.97711, "23220": 2.00291, "23225": 1.967, "23230": 1.93529, "23235": 1.98419, "23240": 1.98993, "23245": 1.97742, "23250": 1.96569, "23255": 1.98848, "23260": 1.98689, "23265": 1.95791, "23270": 2.01333, "23275": 1.98348, "23280": 1.958, "23285": 1.98794, "23290": 2.00688, "23295": 1.99255, "23300": 1.9713, "23305": 1.97596, "23310": 1.99389, "23315": 1.979, "23320": 2.0041, "23325": 1.96992, "23330": 1.97555, "23335": 1.97133, "23340": 1.9563, "23345": 1.98885, "23350": 1.98371, "23355": 1.98347, "23360": 1.97412, "23365": 1.96057, "23370": 1.9721, "23375": 1.98215, "23380": 1.9992, "23385": 1.9923, "23390": 1.99436, "23395": 1.97422, "23400": 1.9643, "23405": 1.97222, "23410": 1.98682, "23415": 1.963, "23420": 1.98439, "23425": 1.98681, "23430": 1.98423, "23435": 1.9673, "23440": 1.96342, "23445": 1.95552, "23450": 1.96355, "23455": 1.96227, "23460": 1.97463, "23465": 1.99012, "23470": 1.991, "23475": 1.97209, "23480": 1.97715, "23485": 1.97368, "23490": 1.98417, "23495": 1.96322, "23500": 1.98776, "23505": 1.96169, "23510": 1.97984, "23515": 1.98866, "23520": 1.98404, "23525": 1.97209, "23530": 1.99753, "23535": 1.98678, "23540": 1.9711, "23545": 1.99253, "23550": 1.95041, "23555": 1.96696, "23560": 1.94513, "23565": 1.9691, "23570": 1.98454, "23575": 1.96997, "23580": 1.9815, "23585": 1.97995, "23590": 1.9768, "23595": 1.96875, "23600": 1.93978, "23605": 1.96994, "23610": 1.97312, "23615": 1.97155, "23620": 1.96199, "23625": 1.98866, "23630": 1.96164, "23635": 1.99801, "23640": 1.99057, "23645": 1.96155, "23650": 1.95806, "23655": 1.97693, "23660": 1.97393, "23665": 1.96671, "23670": 1.97725, "23675": 1.94922, "23680": 1.98086, "23685": 1.95762, "23690": 1.96755, "23695": 1.96962, "23700": 1.97117, "23705": 1.96607, "23710": 1.97535, "23715": 1.95707, "23720": 1.95745, "23725": 1.97311, "23730": 2.00205, "23735": 1.97225, "23740": 1.96835, "23745": 1.94699, "23750": 1.96709, "23755": 1.99098, "23760": 1.96962, "23765": 1.98139, "23770": 1.97362, "23775": 1.96832, "23780": 1.97433, "23785": 1.98091, "23790": 1.97047, "23795": 1.96449, "23800": 1.96651, "23805": 1.95291, "23810": 1.95733, "23815": 1.96976, "23820": 1.97476, "23825": 1.98412, "23830": 1.97428, "23835": 1.97946, "23840": 1.97232, "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "num-zeros": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 406787200.0, "5": 406592800.0, "10": 414795680.0, "15": 419864736.0, "20": 411723456.0, "25": 406621408.0, "30": 413032352.0, "35": 409552288.0, "40": 414714432.0, "45": 411202976.0, "50": 413174560.0, "55": 414168224.0, "60": 407748608.0, "65": 411426496.0, "70": 410917696.0, "75": 409330912.0, "80": 406662208.0, "85": 406727296.0, "90": 412170368.0, "95": 414242752.0, "100": 414837248.0, "105": 421840928.0, "110": 413674208.0, "115": 408996160.0, "120": 411563648.0, "125": 406562368.0, "130": 411449472.0, "135": 417567008.0, "140": 403017248.0, "145": 410665632.0, "150": 417478752.0, "155": 405316512.0, "160": 402154976.0, "165": 408991232.0, "170": 400900224.0, "175": 407149088.0, "180": 409547744.0, "185": 402997280.0, "190": 414755328.0, "195": 403490912.0, "200": 414941408.0, "205": 414928320.0, "210": 404131456.0, "215": 413631456.0, "220": 406039552.0, "225": 405373344.0, "230": 418524480.0, "235": 410755168.0, "240": 415721312.0, "245": 412098208.0, "250": 411169568.0, "255": 422415392.0, "260": 405249152.0, "265": 410731360.0, "270": 409320832.0, "275": 407087264.0, "280": 407996544.0, "285": 413278496.0, "290": 405431616.0, "295": 412640544.0, "300": 412001056.0, "305": 413316000.0, "310": 411115552.0, "315": 415751968.0, "320": 414909216.0, "325": 406882400.0, "330": 413361120.0, "335": 414362784.0, "340": 404911424.0, "345": 409996768.0, "350": 416837664.0, "355": 414217472.0, "360": 415052928.0, "365": 405024928.0, "370": 416626336.0, "375": 402492320.0, "380": 412622720.0, "385": 406800992.0, "390": 413800352.0, "395": 412018176.0, "400": 413234400.0, "405": 417366112.0, "410": 410795296.0, "415": 413901536.0, "420": 415211744.0, "425": 416629920.0, "430": 410911968.0, "435": 414000768.0, "440": 406106112.0, "445": 410400576.0, "450": 407894272.0, "455": 406204640.0, "460": 414314688.0, "465": 411987008.0, "470": 425869408.0, "475": 412912448.0, "480": 407134560.0, "485": 415675520.0, "490": 408967616.0, "495": 414697344.0, "500": 413689344.0, "505": 409277696.0, "510": 417083264.0, "515": 420421536.0, "520": 406486176.0, "525": 398836800.0, "530": 413281408.0, "535": 404573472.0, "540": 403465728.0, "545": 410430624.0, "550": 407042464.0, "555": 409849952.0, "560": 427451808.0, "565": 406157312.0, "570": 404855232.0, "575": 412396704.0, "580": 407225888.0, "585": 414316256.0, "590": 412624544.0, "595": 419187904.0, "600": 412166656.0, "605": 418647616.0, "610": 406105568.0, "615": 411867520.0, "620": 414482592.0, "625": 413541920.0, "630": 414244288.0, "635": 415757984.0, "640": 418910848.0, "645": 419571712.0, "650": 414619456.0, "655": 425490336.0, "660": 409905920.0, "665": 415278240.0, "670": 404456640.0, "675": 416200448.0, "680": 416538080.0, "685": 415696928.0, "690": 412361344.0, "695": 417985376.0, "700": 410633376.0, "705": 418421024.0, "710": 414200160.0, "715": 411006176.0, "720": 410517440.0, "725": 410395968.0, "730": 417933952.0, "735": 411969504.0, "740": 415871680.0, "745": 411143136.0, "750": 413328160.0, "755": 413922624.0, "760": 410644960.0, "765": 413237696.0, "770": 414556320.0, "775": 407342016.0, "780": 421720960.0, "785": 409850688.0, "790": 410566048.0, "795": 420396768.0, "800": 418179520.0, "805": 414130336.0, "810": 414457536.0, "815": 412908960.0, "820": 408238752.0, "825": 407963936.0, "830": 415385696.0, "835": 414047104.0, "840": 415204928.0, "845": 411016192.0, "850": 404439328.0, "855": 411947872.0, "860": 412363616.0, "865": 406095232.0, "870": 419935936.0, "875": 409717184.0, "880": 407162624.0, "885": 413686496.0, "890": 410879104.0, "895": 412948128.0, "900": 413039584.0, "905": 406263104.0, "910": 409210848.0, "915": 406260512.0, "920": 415114912.0, "925": 400829856.0, "930": 410056256.0, "935": 411177696.0, "940": 403081504.0, "945": 417402752.0, "950": 412865888.0, "955": 409705024.0, "960": 420144416.0, "965": 407940672.0, "970": 414700576.0, "975": 403909728.0, "980": 406760032.0, "985": 412578464.0, "990": 407737568.0, "995": 410222336.0, "1000": 414078048.0, "1005": 405783904.0, "1010": 419715840.0, "1015": 413890944.0, "1020": 413898624.0, "1025": 410996128.0, "1030": 403688256.0, "1035": 420494176.0, "1040": 408933920.0, "1045": 411340928.0, "1050": 414043200.0, "1055": 415266016.0, "1060": 409931392.0, "1065": 412781632.0, "1070": 407065792.0, "1075": 408986880.0, "1080": 418013600.0, "1085": 412463616.0, "1090": 414579712.0, "1095": 418743040.0, "1100": 411084032.0, "1105": 413011296.0, "1110": 407311296.0, "1115": 415975616.0, "1120": 407381536.0, "1125": 417662080.0, "1130": 417912384.0, "1135": 407432224.0, "1140": 415217952.0, "1145": 413782464.0, "1150": 412067776.0, "1155": 406357952.0, "1160": 415343296.0, "1165": 412422112.0, "1170": 410875136.0, "1175": 412856640.0, "1180": 412681024.0, "1185": 411930464.0, "1190": 417111008.0, "1195": 406445536.0, "1200": 410965888.0, "1205": 424617344.0, "1210": 409684320.0, "1215": 408699008.0, "1220": 407541952.0, "1225": 410070496.0, "1230": 418428000.0, "1235": 412550560.0, "1240": 410224704.0, "1245": 412510752.0, "1250": 406523808.0, "1255": 415952256.0, "1260": 408754528.0, "1265": 409595840.0, "1270": 406934400.0, "1275": 413291776.0, "1280": 409161472.0, "1285": 410123840.0, "1290": 409285568.0, "1295": 414244768.0, "1300": 409156576.0, "1305": 407935200.0, "1310": 413585760.0, "1315": 409715008.0, "1320": 407103872.0, "1325": 413081184.0, "1330": 417865984.0, "1335": 408947520.0, "1340": 417991424.0, "1345": 412728992.0, "1350": 419565344.0, "1355": 412067360.0, "1360": 402343104.0, "1365": 415137216.0, "1370": 417912416.0, "1375": 400929856.0, "1380": 413295840.0, "1385": 414364960.0, "1390": 412117472.0, "1395": 413717056.0, "1400": 404592352.0, "1405": 412337856.0, "1410": 419102400.0, "1415": 407758720.0, "1420": 411900128.0, "1425": 408406080.0, "1430": 411989824.0, "1435": 415365408.0, "1440": 408482944.0, "1445": 416845280.0, "1450": 412752544.0, "1455": 410340704.0, "1460": 414055008.0, "1465": 413369856.0, "1470": 413871296.0, "1475": 414429728.0, "1480": 413746208.0, "1485": 413902432.0, "1490": 418777536.0, "1495": 409660000.0, "1500": 418486528.0, "1505": 403660576.0, "1510": 414430720.0, "1515": 404493408.0, "1520": 413765504.0, "1525": 413699584.0, "1530": 419424320.0, "1535": 404636160.0, "1540": 411435840.0, "1545": 409019328.0, "1550": 411313088.0, "1555": 406654208.0, "1560": 406776864.0, "1565": 413993184.0, "1570": 418591232.0, "1575": 412425664.0, "1580": 411314368.0, "1585": 417945760.0, "1590": 409021280.0, "1595": 404167040.0, "1600": 409886848.0, "1605": 407624736.0, "1610": 420205152.0, "1615": 408829920.0, "1620": 415825504.0, "1625": 411794560.0, "1630": 408957056.0, "1635": 413076032.0, "1640": 411048960.0, "1645": 412716064.0, "1650": 412701312.0, "1655": 417733440.0, "1660": 416723872.0, "1665": 421185088.0, "1670": 417566336.0, "1675": 405740896.0, "1680": 416770400.0, "1685": 416311296.0, "1690": 412519168.0, "1695": 413662560.0, "1700": 407042112.0, "1705": 408810720.0, "1710": 416924960.0, "1715": 410375744.0, "1720": 404130944.0, "1725": 410461408.0, "1730": 418640288.0, "1735": 417127968.0, "1740": 411045024.0, "1745": 411080608.0, "1750": 410851072.0, "1755": 417427744.0, "1760": 406794816.0, "1765": 416368096.0, "1770": 404462048.0, "1775": 417543680.0, "1780": 413475968.0, "1785": 412876384.0, "1790": 407662368.0, "1795": 415060544.0, "1800": 409081728.0, "1805": 426666336.0, "1810": 407200448.0, "1815": 413999936.0, "1820": 415243616.0, "1825": 417351808.0, "1830": 407456640.0, "1835": 420251136.0, "1840": 407119776.0, "1845": 412623840.0, "1850": 421213568.0, "1855": 412256320.0, "1860": 417122208.0, "1865": 407822432.0, "1870": 406385216.0, "1875": 414701600.0, "1880": 410798464.0, "1885": 412044544.0, "1890": 414643136.0, "1895": 406858912.0, "1900": 410935904.0, "1905": 413753664.0, "1910": 417157568.0, "1915": 416149120.0, "1920": 406425312.0, "1925": 415858752.0, "1930": 410068736.0, "1935": 404462496.0, "1940": 418424576.0, "1945": 405741280.0, "1950": 413204544.0, "1955": 416186912.0, "1960": 415729760.0, "1965": 408031616.0, "1970": 417080448.0, "1975": 411139040.0, "1980": 405911008.0, "1985": 407792032.0, "1990": 409383264.0, "1995": 406987680.0, "2000": 418206080.0, "2005": 412560480.0, "2010": 407928288.0, "2015": 404352224.0, "2020": 414600928.0, "2025": 411224192.0, "2030": 411018880.0, "2035": 410682304.0, "2040": 413223392.0, "2045": 418047296.0, "2050": 416196096.0, "2055": 410052800.0, "2060": 414153504.0, "2065": 416044096.0, "2070": 408422624.0, "2075": 408085760.0, "2080": 415035296.0, "2085": 408808256.0, "2090": 422525792.0, "2095": 412106784.0, "2100": 406880672.0, "2105": 409864416.0, "2110": 412466336.0, "2115": 416016736.0, "2120": 417554400.0, "2125": 412509920.0, "2130": 415167872.0, "2135": 411730624.0, "2140": 416616064.0, "2145": 407850848.0, "2150": 412204160.0, "2155": 421880032.0, "2160": 414872320.0, "2165": 417313824.0, "2170": 412888832.0, "2175": 412058592.0, "2180": 401517248.0, "2185": 419650112.0, "2190": 407847872.0, "2195": 411329792.0, "2200": 416483584.0, "2205": 416235456.0, "2210": 406784288.0, "2215": 414889920.0, "2220": 409257728.0, "2225": 416560576.0, "2230": 411542784.0, "2235": 414091904.0, "2240": 410443360.0, "2245": 422944896.0, "2250": 407622432.0, "2255": 411846688.0, "2260": 415267264.0, "2265": 401921472.0, "2270": 411845888.0, "2275": 403335456.0, "2280": 412776960.0, "2285": 409613952.0, "2290": 405388480.0, "2295": 412134688.0, "2300": 414693216.0, "2305": 412082080.0, "2310": 413971360.0, "2315": 406141216.0, "2320": 414435200.0, "2325": 407850208.0, "2330": 407168000.0, "2335": 414224480.0, "2340": 396992416.0, "2345": 410659808.0, "2350": 417715040.0, "2355": 414657088.0, "2360": 413562816.0, "2365": 415847232.0, "2370": 408956512.0, "2375": 416067840.0, "2380": 414410432.0, "2385": 416537440.0, "2390": 410253024.0, "2395": 407603872.0, "2400": 418760544.0, "2405": 413580864.0, "2410": 412598016.0, "2415": 408921376.0, "2420": 410432640.0, "2425": 418367584.0, "2430": 411183104.0, "2435": 417282784.0, "2440": 415494496.0, "2445": 409334016.0, "2450": 415647904.0, "2455": 408179168.0, "2460": 412609376.0, "2465": 412156000.0, "2470": 417436960.0, "2475": 409694688.0, "2480": 412302304.0, "2485": 405355488.0, "2490": 408202432.0, "2495": 409802272.0, "2500": 414514240.0, "2505": 409075872.0, "2510": 408931392.0, "2515": 419014688.0, "2520": 423230432.0, "2525": 412778592.0, "2530": 415990112.0, "2535": 411774944.0, "2540": 414291424.0, "2545": 416071936.0, "2550": 426326048.0, "2555": 417112448.0, "2560": 408545216.0, "2565": 410183168.0, "2570": 405201184.0, "2575": 409905536.0, "2580": 407675648.0, "2585": 415078784.0, "2590": 418690688.0, "2595": 406931648.0, "2600": 404455360.0, "2605": 422343360.0, "2610": 407391872.0, "2615": 415177696.0, "2620": 410339424.0, "2625": 413419136.0, "2630": 418694784.0, "2635": 417752608.0, "2640": 414685632.0, "2645": 409658656.0, "2650": 408144640.0, "2655": 414075904.0, "2660": 408572000.0, "2665": 409608992.0, "2670": 413822528.0, "2675": 400270176.0, "2680": 415554528.0, "2685": 410027392.0, "2690": 402805376.0, "2695": 420029120.0, "2700": 413570592.0, "2705": 409027072.0, "2710": 407153632.0, "2715": 410097824.0, "2720": 410712448.0, "2725": 413195200.0, "2730": 412596640.0, "2735": 413084064.0, "2740": 412838688.0, "2745": 417260416.0, "2750": 413595008.0, "2755": 410175744.0, "2760": 416744928.0, "2765": 421540768.0, "2770": 410949856.0, "2775": 416078304.0, "2780": 410014240.0, "2785": 416408448.0, "2790": 409062752.0, "2795": 414748928.0, "2800": 406918208.0, "2805": 414317920.0, "2810": 414419136.0, "2815": 404296832.0, "2820": 414149280.0, "2825": 417729504.0, "2830": 422104640.0, "2835": 416386016.0, "2840": 415203904.0, "2845": 412396768.0, "2850": 411379840.0, "2855": 423512352.0, "2860": 412028768.0, "2865": 411407808.0, "2870": 412313408.0, "2875": 420658592.0, "2880": 405601312.0, "2885": 412448576.0, "2890": 411919648.0, "2895": 407876864.0, "2900": 423106976.0, "2905": 404229696.0, "2910": 420953952.0, "2915": 407533472.0, "2920": 415786720.0, "2925": 415944960.0, "2930": 414584192.0, "2935": 413681984.0, "2940": 409973664.0, "2945": 410869536.0, "2950": 419703808.0, "2955": 402785376.0, "2960": 411677344.0, "2965": 409903040.0, "2970": 409751168.0, "2975": 418162816.0, "2980": 410421728.0, "2985": 407747872.0, "2990": 412363072.0, "2995": 401145024.0, "3000": 409486432.0, "3005": 410491328.0, "3010": 419307872.0, "3015": 409528576.0, "3020": 403291296.0, "3025": 408300640.0, "3030": 409533440.0, "3035": 411915424.0, "3040": 411904992.0, "3045": 402992032.0, "3050": 420408224.0, "3055": 422485504.0, "3060": 409035584.0, "3065": 408836608.0, "3070": 419824224.0, "3075": 409186816.0, "3080": 424957664.0, "3085": 407349760.0, "3090": 416174496.0, "3095": 413058624.0, "3100": 421686144.0, "3105": 416513632.0, "3110": 413210528.0, "3115": 413495072.0, "3120": 422892416.0, "3125": 409825760.0, "3130": 421939456.0, "3135": 415594560.0, "3140": 408555136.0, "3145": 421588928.0, "3150": 412143488.0, "3155": 411858688.0, "3160": 414733920.0, "3165": 412623648.0, "3170": 414342400.0, "3175": 414913056.0, "3180": 411854720.0, "3185": 416245216.0, "3190": 413642048.0, "3195": 410496256.0, "3200": 419168064.0, "3205": 409037024.0, "3210": 408199840.0, "3215": 413927488.0, "3220": 410788288.0, "3225": 416898112.0, "3230": 417529888.0, "3235": 404059776.0, "3240": 414934336.0, "3245": 410583072.0, "3250": 412329280.0, "3255": 404869088.0, "3260": 425151776.0, "3265": 413824960.0, "3270": 408459872.0, "3275": 405119776.0, "3280": 424221312.0, "3285": 418752992.0, "3290": 410527264.0, "3295": 413462016.0, "3300": 416134944.0, "3305": 410637760.0, "3310": 415434496.0, "3315": 413029632.0, "3320": 412415872.0, "3325": 410753056.0, "3330": 408954912.0, "3335": 414404000.0, "3340": 408227328.0, "3345": 409507776.0, "3350": 413857280.0, "3355": 420489408.0, "3360": 409104864.0, "3365": 409069984.0, "3370": 414148032.0, "3375": 409605184.0, "3380": 417740800.0, "3385": 411416384.0, "3390": 421712192.0, "3395": 415479488.0, "3400": 413852032.0, "3405": 417919296.0, "3410": 412496512.0, "3415": 409168544.0, "3420": 414295968.0, "3425": 410996032.0, "3430": 413643008.0, "3435": 409450752.0, "3440": 416490080.0, "3445": 411897728.0, "3450": 418121376.0, "3455": 406573312.0, "3460": 409691200.0, "3465": 416324832.0, "3470": 414249312.0, "3475": 415538304.0, "3480": 417294208.0, "3485": 413677152.0, "3490": 414862304.0, "3495": 408679488.0, "3500": 424684448.0, "3505": 409308448.0, "3510": 410950816.0, "3515": 411999456.0, "3520": 421329856.0, "3525": 404110304.0, "3530": 421461088.0, "3535": 418736544.0, "3540": 411952704.0, "3545": 413166464.0, "3550": 418670976.0, "3555": 409479744.0, "3560": 406299808.0, "3565": 409469120.0, "3570": 407452480.0, "3575": 413753888.0, "3580": 414224672.0, "3585": 416864672.0, "3590": 414358752.0, "3595": 409140832.0, "3600": 414502112.0, "3605": 406114528.0, "3610": 414011104.0, "3615": 411592672.0, "3620": 413609536.0, "3625": 412592576.0, "3630": 411909920.0, "3635": 416928320.0, "3640": 413198496.0, "3645": 411136608.0, "3650": 424203776.0, "3655": 413274656.0, "3660": 415800192.0, "3665": 410505760.0, "3670": 411598784.0, "3675": 412831968.0, "3680": 417850080.0, "3685": 407707776.0, "3690": 403624160.0, "3695": 409771296.0, "3700": 418474304.0, "3705": 407261536.0, "3710": 409960704.0, "3715": 413813472.0, "3720": 402364032.0, "3725": 413965152.0, "3730": 398619360.0, "3735": 414599104.0, "3740": 415418496.0, "3745": 413128736.0, "3750": 414610560.0, "3755": 416327296.0, "3760": 409055008.0, "3765": 414406688.0, "3770": 413943904.0, "3775": 412198944.0, "3780": 411482784.0, "3785": 413936064.0, "3790": 411311168.0, "3795": 403627776.0, "3800": 415113440.0, "3805": 409896640.0, "3810": 413178912.0, "3815": 410947520.0, "3820": 409122304.0, "3825": 414565056.0, "3830": 415758080.0, "3835": 410009184.0, "3840": 418842176.0, "3845": 418043712.0, "3850": 408647072.0, "3855": 407298464.0, "3860": 412500704.0, "3865": 422720288.0, "3870": 417781952.0, "3875": 416399552.0, "3880": 417658496.0, "3885": 408441664.0, "3890": 421993632.0, "3895": 417242592.0, "3900": 406882208.0, "3905": 408385536.0, "3910": 410465728.0, "3915": 411182848.0, "3920": 409240768.0, "3925": 420936320.0, "3930": 421754944.0, "3935": 407375616.0, "3940": 407539360.0, "3945": 411239040.0, "3950": 408215488.0, "3955": 409821152.0, "3960": 412036768.0, "3965": 407748608.0, "3970": 410371040.0, "3975": 409701664.0, "3980": 422094752.0, "3985": 407115584.0, "3990": 417167424.0, "3995": 413288672.0, "4000": 409692480.0, "4005": 420254624.0, "4010": 420238848.0, "4015": 402528320.0, "4020": 410110240.0, "4025": 407377792.0, "4030": 413355616.0, "4035": 410748160.0, "4040": 411811360.0, "4045": 394848320.0, "4050": 422398752.0, "4055": 410414560.0, "4060": 414341536.0, "4065": 403565216.0, "4070": 411259168.0, "4075": 411366752.0, "4080": 409918784.0, "4085": 409797568.0, "4090": 407940064.0, "4095": 418257472.0, "4100": 415937344.0, "4105": 408053568.0, "4110": 410109984.0, "4115": 408823296.0, "4120": 409609568.0, "4125": 416034112.0, "4130": 409625344.0, "4135": 412102464.0, "4140": 417440128.0, "4145": 411499392.0, "4150": 417293600.0, "4155": 414915360.0, "4160": 414638240.0, "4165": 411904576.0, "4170": 416484576.0, "4175": 416336224.0, "4180": 412024736.0, "4185": 420829440.0, "4190": 414841280.0, "4195": 405728576.0, "4200": 422429472.0, "4205": 405695968.0, "4210": 414646272.0, "4215": 412796736.0, "4220": 409195520.0, "4225": 408443616.0, "4230": 411745856.0, "4235": 409837184.0, "4240": 410584384.0, "4245": 414691648.0, "4250": 412066336.0, "4255": 407948032.0, "4260": 414240704.0, "4265": 411940864.0, "4270": 406331488.0, "4275": 416399616.0, "4280": 409247872.0, "4285": 412430592.0, "4290": 412137312.0, "4295": 410661632.0, "4300": 406256448.0, "4305": 410502208.0, "4310": 415798528.0, "4315": 411738272.0, "4320": 413735456.0, "4325": 410926400.0, "4330": 407244448.0, "4335": 413563104.0, "4340": 413446752.0, "4345": 414356448.0, "4350": 411820768.0, "4355": 419979008.0, "4360": 407168800.0, "4365": 415378848.0, "4370": 413764064.0, "4375": 407911008.0, "4380": 417100224.0, "4385": 400664832.0, "4390": 412822944.0, "4395": 411881056.0, "4400": 413938400.0, "4405": 417650976.0, "4410": 416622656.0, "4415": 409991328.0, "4420": 415532096.0, "4425": 407115104.0, "4430": 405693472.0, "4435": 403989152.0, "4440": 405524896.0, "4445": 417688224.0, "4450": 410342592.0, "4455": 412831008.0, "4460": 415239424.0, "4465": 407164416.0, "4470": 414277888.0, "4475": 418553344.0, "4480": 413891552.0, "4485": 413112896.0, "4490": 413442432.0, "4495": 406271936.0, "4500": 417946688.0, "4505": 412232000.0, "4510": 404715040.0, "4515": 415177632.0, "4520": 406917696.0, "4525": 401542208.0, "4530": 413586144.0, "4535": 416087104.0, "4540": 412009856.0, "4545": 418889856.0, "4550": 406139392.0, "4555": 415863872.0, "4560": 411935744.0, "4565": 415969536.0, "4570": 415512672.0, "4575": 410451104.0, "4580": 415264224.0, "4585": 419201984.0, "4590": 415957472.0, "4595": 411062432.0, "4600": 411268832.0, "4605": 410520480.0, "4610": 409327520.0, "4615": 411109600.0, "4620": 408886272.0, "4625": 418082080.0, "4630": 413936256.0, "4635": 412638176.0, "4640": 406230368.0, "4645": 414091328.0, "4650": 413133792.0, "4655": 408592160.0, "4660": 415381600.0, "4665": 411059936.0, "4670": 406024576.0, "4675": 407422208.0, "4680": 412045120.0, "4685": 401737536.0, "4690": 412037152.0, "4695": 413021600.0, "4700": 411681312.0, "4705": 414061280.0, "4710": 406653824.0, "4715": 412725216.0, "4720": 418111488.0, "4725": 414832384.0, "4730": 400932768.0, "4735": 406290176.0, "4740": 411916864.0, "4745": 405706240.0, "4750": 409212448.0, "4755": 405911488.0, "4760": 412483328.0, "4765": 411705632.0, "4770": 414675104.0, "4775": 407481984.0, "4780": 414027200.0, "4785": 416551872.0, "4790": 415750272.0, "4795": 403483648.0, "4800": 410502528.0, "4805": 412297856.0, "4810": 405206432.0, "4815": 413810816.0, "4820": 415485248.0, "4825": 409352608.0, "4830": 410956416.0, "4835": 411579968.0, "4840": 419486720.0, "4845": 404231584.0, "4850": 404556000.0, "4855": 411596896.0, "4860": 415908640.0, "4865": 415283328.0, "4870": 402857568.0, "4875": 416288192.0, "4880": 415520032.0, "4885": 419919136.0, "4890": 409060896.0, "4895": 410480320.0, "4900": 410588672.0, "4905": 409010464.0, "4910": 405709248.0, "4915": 412933248.0, "4920": 409831168.0, "4925": 405079456.0, "4930": 418714720.0, "4935": 414269728.0, "4940": 413330944.0, "4945": 421132800.0, "4950": 418273920.0, "4955": 416244960.0, "4960": 410120544.0, "4965": 409400320.0, "4970": 414351744.0, "4975": 411731008.0, "4980": 415933248.0, "4985": 411335392.0, "4990": 411463584.0, "4995": 423262592.0, "5000": 407560992.0, "5005": 418368416.0, "5010": 414670688.0, "5015": 409324640.0, "5020": 417504672.0, "5025": 413801440.0, "5030": 414358848.0, "5035": 406837568.0, "5040": 415633056.0, "5045": 415747584.0, "5050": 411818496.0, "5055": 415541216.0, "5060": 412473760.0, "5065": 412388608.0, "5070": 405806016.0, "5075": 402035072.0, "5080": 414825248.0, "5085": 418113536.0, "5090": 408614400.0, "5095": 417369952.0, "5100": 415803936.0, "5105": 418499232.0, "5110": 410614912.0, "5115": 422598016.0, "5120": 414132800.0, "5125": 416534784.0, "5130": 409344576.0, "5135": 417942400.0, "5140": 413607584.0, "5145": 411636544.0, "5150": 413679744.0, "5155": 415759328.0, "5160": 411736480.0, "5165": 408955840.0, "5170": 422841088.0, "5175": 415305696.0, "5180": 408437568.0, "5185": 415913792.0, "5190": 409372064.0, "5195": 402516480.0, "5200": 416640416.0, "5205": 413339296.0, "5210": 414531296.0, "5215": 418585280.0, "5220": 407279232.0, "5225": 418559744.0, "5230": 415474272.0, "5235": 410745504.0, "5240": 410026688.0, "5245": 422285952.0, "5250": 410486400.0, "5255": 412058912.0, "5260": 417265696.0, "5265": 410523520.0, "5270": 418443776.0, "5275": 407219648.0, "5280": 408358208.0, "5285": 410931072.0, "5290": 412952128.0, "5295": 410420320.0, "5300": 405674496.0, "5305": 416855744.0, "5310": 406246496.0, "5315": 413688064.0, "5320": 410469728.0, "5325": 410749408.0, "5330": 407536768.0, "5335": 414708960.0, "5340": 418528704.0, "5345": 405531744.0, "5350": 419316128.0, "5355": 410044960.0, "5360": 410357152.0, "5365": 409314528.0, "5370": 414738528.0, "5375": 404859104.0, "5380": 415522144.0, "5385": 407587424.0, "5390": 415223584.0, "5395": 415797696.0, "5400": 400663232.0, "5405": 416249024.0, "5410": 415430560.0, "5415": 405909536.0, "5420": 409366912.0, "5425": 416490528.0, "5430": 409741184.0, "5435": 410576480.0, "5440": 408677216.0, "5445": 405256800.0, "5450": 410712160.0, "5455": 407334592.0, "5460": 414335296.0, "5465": 408168416.0, "5470": 408908672.0, "5475": 401473184.0, "5480": 405461216.0, "5485": 422297504.0, "5490": 410417440.0, "5495": 413914112.0, "5500": 406811712.0, "5505": 409148384.0, "5510": 411790368.0, "5515": 417155104.0, "5520": 411180096.0, "5525": 407141536.0, "5530": 412763808.0, "5535": 408202272.0, "5540": 414013184.0, "5545": 412599296.0, "5550": 411525632.0, "5555": 410826208.0, "5560": 414422560.0, "5565": 402189952.0, "5570": 416011488.0, "5575": 421397760.0, "5580": 404505472.0, "5585": 415428704.0, "5590": 412124576.0, "5595": 409716160.0, "5600": 416105152.0, "5605": 416301376.0, "5610": 410637888.0, "5615": 410282144.0, "5620": 411885728.0, "5625": 413402624.0, "5630": 414323008.0, "5635": 409602304.0, "5640": 409750720.0, "5645": 409834304.0, "5650": 411833984.0, "5655": 403957696.0, "5660": 412693120.0, "5665": 422411584.0, "5670": 410471936.0, "5675": 409451392.0, "5680": 414692032.0, "5685": 404195328.0, "5690": 412776736.0, "5695": 416498208.0, "5700": 413486752.0, "5705": 409541312.0, "5710": 405652768.0, "5715": 417155328.0, "5720": 424202112.0, "5725": 408272192.0, "5730": 409808800.0, "5735": 410577344.0, "5740": 412886464.0, "5745": 407148000.0, "5750": 408681440.0, "5755": 418956160.0, "5760": 405325632.0, "5765": 414488416.0, "5770": 415998144.0, "5775": 408645056.0, "5780": 412722272.0, "5785": 407659168.0, "5790": 405493568.0, "5795": 418294016.0, "5800": 417279296.0, "5805": 412931936.0, "5810": 416697760.0, "5815": 412148768.0, "5820": 410391616.0, "5825": 414546272.0, "5830": 405839072.0, "5835": 413134752.0, "5840": 406467424.0, "5845": 417840608.0, "5850": 410097920.0, "5855": 406551488.0, "5860": 414625760.0, "5865": 408117120.0, "5870": 412291296.0, "5875": 414214816.0, "5880": 418787744.0, "5885": 409224576.0, "5890": 412982816.0, "5895": 419963968.0, "5900": 415943936.0, "5905": 409986848.0, "5910": 414298560.0, "5915": 414420576.0, "5920": 412538912.0, "5925": 409259136.0, "5930": 407345952.0, "5935": 424563584.0, "5940": 412636640.0, "5945": 414445792.0, "5950": 421280352.0, "5955": 408346752.0, "5960": 412463872.0, "5965": 408257696.0, "5970": 415219520.0, "5975": 409520736.0, "5980": 417036608.0, "5985": 418636608.0, "5990": 410178272.0, "5995": 413919040.0, "6000": 419519072.0, "6005": 415234368.0, "6010": 419222304.0, "6015": 402348768.0, "6020": 399966912.0, "6025": 414652544.0, "6030": 411478336.0, "6035": 411872224.0, "6040": 408655904.0, "6045": 414154784.0, "6050": 410426080.0, "6055": 408748192.0, "6060": 413963328.0, "6065": 409847296.0, "6070": 406615328.0, "6075": 417527968.0, "6080": 418646080.0, "6085": 412212704.0, "6090": 409817024.0, "6095": 420224640.0, "6100": 420685312.0, "6105": 419883488.0, "6110": 414352256.0, "6115": 416973792.0, "6120": 410793920.0, "6125": 420745472.0, "6130": 414363392.0, "6135": 402876192.0, "6140": 415049280.0, "6145": 412451648.0, "6150": 413627040.0, "6155": 415966336.0, "6160": 410090528.0, "6165": 410899808.0, "6170": 419708032.0, "6175": 404844032.0, "6180": 412640608.0, "6185": 419904000.0, "6190": 424132896.0, "6195": 404346528.0, "6200": 411174592.0, "6205": 416985280.0, "6210": 412139488.0, "6215": 408967520.0, "6220": 410461024.0, "6225": 418113632.0, "6230": 415510816.0, "6235": 405722208.0, "6240": 407819584.0, "6245": 411395520.0, "6250": 408121248.0, "6255": 409042208.0, "6260": 402485056.0, "6265": 417879168.0, "6270": 413493920.0, "6275": 415054976.0, "6280": 418284608.0, "6285": 414468096.0, "6290": 412031456.0, "6295": 408842496.0, "6300": 406866752.0, "6305": 418174144.0, "6310": 413456992.0, "6315": 411006048.0, "6320": 415911232.0, "6325": 402049952.0, "6330": 416931200.0, "6335": 413970720.0, "6340": 415466976.0, "6345": 411631488.0, "6350": 413886304.0, "6355": 416071040.0, "6360": 407335488.0, "6365": 410249760.0, "6370": 420416832.0, "6375": 406301504.0, "6380": 410387584.0, "6385": 409385632.0, "6390": 409196832.0, "6395": 415780800.0, "6400": 422217024.0, "6405": 418600704.0, "6410": 416300672.0, "6415": 407333856.0, "6420": 409533408.0, "6425": 418033280.0, "6430": 415407360.0, "6435": 419612864.0, "6440": 408260800.0, "6445": 416454464.0, "6450": 408735392.0, "6455": 412928928.0, "6460": 413711648.0, "6465": 412617280.0, "6470": 409546400.0, "6475": 409979680.0, "6480": 408545952.0, "6485": 411313472.0, "6490": 405336832.0, "6495": 406970528.0, "6500": 415920288.0, "6505": 405727360.0, "6510": 413457184.0, "6515": 403532448.0, "6520": 411317408.0, "6525": 411360416.0, "6530": 412315744.0, "6535": 409030400.0, "6540": 410558816.0, "6545": 406092416.0, "6550": 412566880.0, "6555": 408197120.0, "6560": 411911584.0, "6565": 411155200.0, "6570": 418523520.0, "6575": 407061600.0, "6580": 405064160.0, "6585": 416187744.0, "6590": 416192032.0, "6595": 410655200.0, "6600": 411246144.0, "6605": 413204000.0, "6610": 417195456.0, "6615": 420749888.0, "6620": 405779968.0, "6625": 416103328.0, "6630": 407018624.0, "6635": 414524640.0, "6640": 405293248.0, "6645": 406541600.0, "6650": 406945600.0, "6655": 413623136.0, "6660": 414572608.0, "6665": 412146240.0, "6670": 410737568.0, "6675": 417239328.0, "6680": 419405664.0, "6685": 412509088.0, "6690": 413554304.0, "6695": 407086816.0, "6700": 408855488.0, "6705": 417070592.0, "6710": 408946464.0, "6715": 414534720.0, "6720": 401662976.0, "6725": 409642656.0, "6730": 411211552.0, "6735": 416893856.0, "6740": 408541664.0, "6745": 413814368.0, "6750": 418817504.0, "6755": 420705984.0, "6760": 410736032.0, "6765": 413955968.0, "6770": 413418208.0, "6775": 415320032.0, "6780": 409672576.0, "6785": 407198816.0, "6790": 410964352.0, "6795": 410353760.0, "6800": 406880096.0, "6805": 412727872.0, "6810": 401732256.0, "6815": 418271328.0, "6820": 409351296.0, "6825": 408754976.0, "6830": 415226176.0, "6835": 407825888.0, "6840": 408653792.0, "6845": 415771296.0, "6850": 402553952.0, "6855": 413453216.0, "6860": 416467072.0, "6865": 407665504.0, "6870": 411260160.0, "6875": 414475904.0, "6880": 407920608.0, "6885": 415790688.0, "6890": 407459840.0, "6895": 414817952.0, "6900": 410033120.0, "6905": 408214080.0, "6910": 412158720.0, "6915": 421948064.0, "6920": 419996672.0, "6925": 408512672.0, "6930": 413122240.0, "6935": 419484000.0, "6940": 410063008.0, "6945": 403108832.0, "6950": 413669472.0, "6955": 418633856.0, "6960": 410876192.0, "6965": 413980768.0, "6970": 408199936.0, "6975": 420128032.0, "6980": 422401760.0, "6985": 413406944.0, "6990": 416335680.0, "6995": 418586816.0, "7000": 404216928.0, "7005": 407996128.0, "7010": 411172608.0, "7015": 414184736.0, "7020": 411180352.0, "7025": 413033664.0, "7030": 410072736.0, "7035": 410428256.0, "7040": 411608224.0, "7045": 411179552.0, "7050": 410125408.0, "7055": 408956000.0, "7060": 416491296.0, "7065": 418332800.0, "7070": 408952128.0, "7075": 410032480.0, "7080": 415864256.0, "7085": 414027552.0, "7090": 404950112.0, "7095": 403128160.0, "7100": 412242592.0, "7105": 410491872.0, "7110": 418445696.0, "7115": 418528896.0, "7120": 415546400.0, "7125": 405308512.0, "7130": 413236032.0, "7135": 413057792.0, "7140": 414054752.0, "7145": 411334080.0, "7150": 411977440.0, "7155": 419346944.0, "7160": 422696512.0, "7165": 418111200.0, "7170": 413165408.0, "7175": 408591232.0, "7180": 411180768.0, "7185": 411891776.0, "7190": 412547648.0, "7195": 412614144.0, "7200": 407733376.0, "7205": 413129792.0, "7210": 414097888.0, "7215": 420883648.0, "7220": 407706016.0, "7225": 417759872.0, "7230": 407569984.0, "7235": 414966624.0, "7240": 409372000.0, "7245": 411054976.0, "7250": 406504160.0, "7255": 416825888.0, "7260": 412147872.0, "7265": 410194688.0, "7270": 416626496.0, "7275": 406960896.0, "7280": 413014176.0, "7285": 420288032.0, "7290": 413616928.0, "7295": 417692288.0, "7300": 413332224.0, "7305": 415002016.0, "7310": 420674368.0, "7315": 410928768.0, "7320": 419926976.0, "7325": 412483296.0, "7330": 408956384.0, "7335": 418185120.0, "7340": 406595744.0, "7345": 407005664.0, "7350": 408902880.0, "7355": 409315584.0, "7360": 412784928.0, "7365": 414185536.0, "7370": 413133408.0, "7375": 411668352.0, "7380": 412253184.0, "7385": 416490528.0, "7390": 409418688.0, "7395": 409347968.0, "7400": 414836128.0, "7405": 410534944.0, "7410": 402136128.0, "7415": 416332224.0, "7420": 411340416.0, "7425": 415920832.0, "7430": 413435712.0, "7435": 413010208.0, "7440": 420833088.0, "7445": 414136032.0, "7450": 422978880.0, "7455": 411839584.0, "7460": 417245952.0, "7465": 414833344.0, "7470": 418131104.0, "7475": 401456928.0, "7480": 410455232.0, "7485": 407161344.0, "7490": 407271424.0, "7495": 406768608.0, "7500": 418722080.0, "7505": 410190272.0, "7510": 414215552.0, "7515": 416283008.0, "7520": 407325632.0, "7525": 409800864.0, "7530": 413481024.0, "7535": 408681280.0, "7540": 413302496.0, "7545": 407213664.0, "7550": 406486304.0, "7555": 408173792.0, "7560": 420137504.0, "7565": 406892096.0, "7570": 407353888.0, "7575": 409949952.0, "7580": 411743072.0, "7585": 410524832.0, "7590": 409227616.0, "7595": 410082432.0, "7600": 397659584.0, "7605": 414522720.0, "7610": 404512448.0, "7615": 409368096.0, "7620": 417447776.0, "7625": 419106400.0, "7630": 410811040.0, "7635": 407840416.0, "7640": 411266688.0, "7645": 410441088.0, "7650": 420695680.0, "7655": 412456864.0, "7660": 410460224.0, "7665": 411406208.0, "7670": 408579968.0, "7675": 415503712.0, "7680": 416189312.0, "7685": 419031744.0, "7690": 411282560.0, "7695": 417975392.0, "7700": 414073632.0, "7705": 405261856.0, "7710": 410155968.0, "7715": 407152896.0, "7720": 416979424.0, "7725": 408505408.0, "7730": 416369952.0, "7735": 413596096.0, "7740": 410920000.0, "7745": 420454272.0, "7750": 411973952.0, "7755": 407273728.0, "7760": 406111360.0, "7765": 406779488.0, "7770": 413399488.0, "7775": 413943168.0, "7780": 410371328.0, "7785": 414095456.0, "7790": 413300288.0, "7795": 412617024.0, "7800": 409223264.0, "7805": 416101504.0, "7810": 416418912.0, "7815": 407282656.0, "7820": 401841152.0, "7825": 410669664.0, "7830": 412940736.0, "7835": 412050368.0, "7840": 407165792.0, "7845": 414783616.0, "7850": 417633600.0, "7855": 411289408.0, "7860": 410904800.0, "7865": 413634496.0, "7870": 409698624.0, "7875": 418869760.0, "7880": 416516640.0, "7885": 414891456.0, "7890": 410593152.0, "7895": 404985024.0, "7900": 417949888.0, "7905": 404382272.0, "7910": 412638560.0, "7915": 409947296.0, "7920": 410934528.0, "7925": 419174272.0, "7930": 403955968.0, "7935": 407407584.0, "7940": 413872224.0, "7945": 407754368.0, "7950": 411874784.0, "7955": 414425664.0, "7960": 412814208.0, "7965": 415675648.0, "7970": 411167648.0, "7975": 410116448.0, "7980": 409007712.0, "7985": 406490464.0, "7990": 421244096.0, "7995": 412916608.0, "8000": 403147520.0, "8005": 415348096.0, "8010": 413516288.0, "8015": 420566368.0, "8020": 417046432.0, "8025": 411952320.0, "8030": 412767616.0, "8035": 410021408.0, "8040": 410920384.0, "8045": 415961344.0, "8050": 416054752.0, "8055": 410068160.0, "8060": 405646784.0, "8065": 422266432.0, "8070": 411573472.0, "8075": 406336768.0, "8080": 420973152.0, "8085": 406959584.0, "8090": 413946752.0, "8095": 413828288.0, "8100": 413227168.0, "8105": 409873696.0, "8110": 411388256.0, "8115": 414332096.0, "8120": 414160096.0, "8125": 413553504.0, "8130": 411244928.0, "8135": 414408672.0, "8140": 408630816.0, "8145": 408984160.0, "8150": 411166624.0, "8155": 410153696.0, "8160": 413478528.0, "8165": 408497504.0, "8170": 418539104.0, "8175": 406549280.0, "8180": 411406560.0, "8185": 409661216.0, "8190": 409561440.0, "8195": 426114048.0, "8200": 407032480.0, "8205": 407951776.0, "8210": 420353248.0, "8215": 408578528.0, "8220": 410755328.0, "8225": 411475712.0, "8230": 421276416.0, "8235": 420524736.0, "8240": 411316416.0, "8245": 414486112.0, "8250": 407652960.0, "8255": 407370560.0, "8260": 414559104.0, "8265": 415232032.0, "8270": 405576736.0, "8275": 413209184.0, "8280": 412585504.0, "8285": 416929952.0, "8290": 411026080.0, "8295": 414803520.0, "8300": 417154240.0, "8305": 412023616.0, "8310": 416721920.0, "8315": 406089120.0, "8320": 416743904.0, "8325": 413315264.0, "8330": 411682016.0, "8335": 413860864.0, "8340": 412161824.0, "8345": 413113664.0, "8350": 424786976.0, "8355": 411032960.0, "8360": 407742720.0, "8365": 416020896.0, "8370": 405119456.0, "8375": 413796864.0, "8380": 407998080.0, "8385": 423801216.0, "8390": 407709216.0, "8395": 405706976.0, "8400": 414461184.0, "8405": 410678848.0, "8410": 415041728.0, "8415": 404159936.0, "8420": 409026624.0, "8425": 421390624.0, "8430": 407232544.0, "8435": 417423872.0, "8440": 407575072.0, "8445": 411874048.0, "8450": 412795584.0, "8455": 406517920.0, "8460": 415877312.0, "8465": 414745664.0, "8470": 406863264.0, "8475": 416838720.0, "8480": 418756896.0, "8485": 412603456.0, "8490": 417203776.0, "8495": 412572032.0, "8500": 417247232.0, "8505": 409122752.0, "8510": 411815968.0, "8515": 416824064.0, "8520": 409971904.0, "8525": 412788992.0, "8530": 411506912.0, "8535": 405128128.0, "8540": 416136512.0, "8545": 418818528.0, "8550": 406891296.0, "8555": 419192864.0, "8560": 412594880.0, "8565": 407756896.0, "8570": 407853792.0, "8575": 406472192.0, "8580": 406312832.0, "8585": 404399680.0, "8590": 406095712.0, "8595": 412739360.0, "8600": 419820672.0, "8605": 414267232.0, "8610": 409615392.0, "8615": 407546080.0, "8620": 410804768.0, "8625": 413307968.0, "8630": 409208000.0, "8635": 413176128.0, "8640": 410264448.0, "8645": 411358368.0, "8650": 416700832.0, "8655": 409007552.0, "8660": 413110304.0, "8665": 415118592.0, "8670": 411581152.0, "8675": 408035712.0, "8680": 413876320.0, "8685": 405210688.0, "8690": 409212288.0, "8695": 412898048.0, "8700": 410780096.0, "8705": 414166016.0, "8710": 406857600.0, "8715": 417049088.0, "8720": 408712096.0, "8725": 414100672.0, "8730": 403708864.0, "8735": 422546912.0, "8740": 409308992.0, "8745": 413737696.0, "8750": 414157056.0, "8755": 410940800.0, "8760": 413579712.0, "8765": 401478272.0, "8770": 416224704.0, "8775": 414738272.0, "8780": 413214080.0, "8785": 410496320.0, "8790": 412890336.0, "8795": 412622528.0, "8800": 416115232.0, "8805": 410004928.0, "8810": 412825568.0, "8815": 404964096.0, "8820": 407080896.0, "8825": 410596736.0, "8830": 404515488.0, "8835": 406690720.0, "8840": 416235808.0, "8845": 414744960.0, "8850": 416135392.0, "8855": 411393664.0, "8860": 416835968.0, "8865": 409747200.0, "8870": 411440256.0, "8875": 412868928.0, "8880": 415102912.0, "8885": 412172352.0, "8890": 412311520.0, "8895": 414633248.0, "8900": 415379392.0, "8905": 405739808.0, "8910": 413368608.0, "8915": 411758208.0, "8920": 415239936.0, "8925": 414456512.0, "8930": 403222784.0, "8935": 409600736.0, "8940": 414381536.0, "8945": 407565984.0, "8950": 415789568.0, "8955": 407980288.0, "8960": 414450016.0, "8965": 410903808.0, "8970": 409146400.0, "8975": 415194816.0, "8980": 408475232.0, "8985": 411168608.0, "8990": 412009440.0, "8995": 412866720.0, "9000": 401304640.0, "9005": 400419712.0, "9010": 414002816.0, "9015": 402874304.0, "9020": 418203040.0, "9025": 417285696.0, "9030": 413407680.0, "9035": 405182400.0, "9040": 419542848.0, "9045": 416871424.0, "9050": 413580960.0, "9055": 406233600.0, "9060": 412802912.0, "9065": 417138240.0, "9070": 407559232.0, "9075": 406869760.0, "9080": 413544416.0, "9085": 410925024.0, "9090": 412876608.0, "9095": 412835200.0, "9100": 414380672.0, "9105": 407349088.0, "9110": 406486400.0, "9115": 414561024.0, "9120": 412073504.0, "9125": 408430496.0, "9130": 410071264.0, "9135": 411941824.0, "9140": 415271168.0, "9145": 411114368.0, "9150": 416719168.0, "9155": 407655808.0, "9160": 410241280.0, "9165": 418052992.0, "9170": 411455168.0, "9175": 409895360.0, "9180": 420417248.0, "9185": 411010816.0, "9190": 414604704.0, "9195": 408891200.0, "9200": 409898720.0, "9205": 407009024.0, "9210": 417983680.0, "9215": 413845984.0, "9220": 409857088.0, "9225": 417701344.0, "9230": 406274464.0, "9235": 410519232.0, "9240": 412903392.0, "9245": 409607104.0, "9250": 417098336.0, "9255": 408225184.0, "9260": 411135744.0, "9265": 417647200.0, "9270": 416378912.0, "9275": 408735552.0, "9280": 410644928.0, "9285": 410342400.0, "9290": 401492480.0, "9295": 407188480.0, "9300": 410765664.0, "9305": 407818592.0, "9310": 413769888.0, "9315": 408966528.0, "9320": 410909216.0, "9325": 415345536.0, "9330": 408267296.0, "9335": 413888160.0, "9340": 404057600.0, "9345": 413373792.0, "9350": 403692384.0, "9355": 415591008.0, "9360": 415157728.0, "9365": 413483520.0, "9370": 417051040.0, "9375": 408541120.0, "9380": 408199584.0, "9385": 408857952.0, "9390": 416809376.0, "9395": 415059616.0, "9400": 411103648.0, "9405": 413289728.0, "9410": 413339008.0, "9415": 416755872.0, "9420": 413716512.0, "9425": 409067744.0, "9430": 413525024.0, "9435": 413641056.0, "9440": 408700672.0, "9445": 419655136.0, "9450": 414472960.0, "9455": 408244832.0, "9460": 414675712.0, "9465": 416352160.0, "9470": 418433344.0, "9475": 407980192.0, "9480": 416227872.0, "9485": 408085408.0, "9490": 410572512.0, "9495": 413262496.0, "9500": 409867200.0, "9505": 414266944.0, "9510": 418464576.0, "9515": 414525344.0, "9520": 405108288.0, "9525": 424295840.0, "9530": 406839936.0, "9535": 412383936.0, "9540": 412993312.0, "9545": 413253952.0, "9550": 412388640.0, "9555": 413980352.0, "9560": 413171968.0, "9565": 423689504.0, "9570": 424550144.0, "9575": 404456640.0, "9580": 414068640.0, "9585": 420372608.0, "9590": 412206624.0, "9595": 415092096.0, "9600": 411047264.0, "9605": 405957632.0, "9610": 422746208.0, "9615": 415443808.0, "9620": 414158560.0, "9625": 405536096.0, "9630": 406537856.0, "9635": 411136992.0, "9640": 413936960.0, "9645": 409119712.0, "9650": 409042496.0, "9655": 408255264.0, "9660": 418752928.0, "9665": 416893152.0, "9670": 408334624.0, "9675": 418754816.0, "9680": 409091712.0, "9685": 412930720.0, "9690": 408641024.0, "9695": 406007232.0, "9700": 408254464.0, "9705": 406172256.0, "9710": 407591808.0, "9715": 417780320.0, "9720": 411616352.0, "9725": 413716064.0, "9730": 426725344.0, "9735": 417438880.0, "9740": 410146976.0, "9745": 411524320.0, "9750": 410217760.0, "9755": 411158240.0, "9760": 414819200.0, "9765": 413861280.0, "9770": 407059552.0, "9775": 417429952.0, "9780": 409135552.0, "9785": 409857440.0, "9790": 420101056.0, "9795": 411563776.0, "9800": 413747872.0, "9805": 411372480.0, "9810": 412131328.0, "9815": 411124448.0, "9820": 415187296.0, "9825": 419963232.0, "9830": 412718336.0, "9835": 408660960.0, "9840": 410444288.0, "9845": 413540832.0, "9850": 408508160.0, "9855": 420044160.0, "9860": 407951456.0, "9865": 415911360.0, "9870": 411168704.0, "9875": 417993888.0, "9880": 411944704.0, "9885": 418500000.0, "9890": 402740000.0, "9895": 410958176.0, "9900": 410780000.0, "9905": 416972768.0, "9910": 411851968.0, "9915": 409483840.0, "9920": 407866688.0, "9925": 408468832.0, "9930": 409834240.0, "9935": 412723680.0, "9940": 411590880.0, "9945": 420691008.0, "9950": 411318560.0, "9955": 417098944.0, "9960": 412127456.0, "9965": 421839680.0, "9970": 411710336.0, "9975": 416295968.0, "9980": 414925344.0, "9985": 405896704.0, "9990": 417420992.0, "9995": 409665504.0, "10000": 408546912.0, "10005": 414422976.0, "10010": 411808864.0, "10015": 408428096.0, "10020": 412930752.0, "10025": 411051520.0, "10030": 404597920.0, "10035": 419797568.0, "10040": 404046016.0, "10045": 419314304.0, "10050": 414832064.0, "10055": 402751264.0, "10060": 408561824.0, "10065": 411441920.0, "10070": 414240992.0, "10075": 405739136.0, "10080": 414244992.0, "10085": 407202624.0, "10090": 412762048.0, "10095": 410256928.0, "10100": 419288192.0, "10105": 410062880.0, "10110": 408368224.0, "10115": 409323264.0, "10120": 410992704.0, "10125": 414838176.0, "10130": 403208992.0, "10135": 420892032.0, "10140": 407285024.0, "10145": 411040384.0, "10150": 410961920.0, "10155": 418008864.0, "10160": 410858976.0, "10165": 399431136.0, "10170": 413007040.0, "10175": 416505280.0, "10180": 409687776.0, "10185": 404901280.0, "10190": 413438464.0, "10195": 415317248.0, "10200": 417974016.0, "10205": 408189408.0, "10210": 403578848.0, "10215": 413030560.0, "10220": 415774944.0, "10225": 413510656.0, "10230": 406398976.0, "10235": 399499200.0, "10240": 412155168.0, "10245": 412719072.0, "10250": 415782240.0, "10255": 405601632.0, "10260": 412605728.0, "10265": 419728256.0, "10270": 410329248.0, "10275": 410872832.0, "10280": 422142304.0, "10285": 411368960.0, "10290": 412406336.0, "10295": 408072000.0, "10300": 413138592.0, "10305": 412758048.0, "10310": 410518816.0, "10315": 411771136.0, "10320": 414627744.0, "10325": 407478176.0, "10330": 414779744.0, "10335": 412303520.0, "10340": 415859136.0, "10345": 404311680.0, "10350": 412133664.0, "10355": 409012000.0, "10360": 417745248.0, "10365": 407714464.0, "10370": 409914432.0, "10375": 411832000.0, "10380": 415288448.0, "10385": 407590432.0, "10390": 409378048.0, "10395": 401845248.0, "10400": 415578112.0, "10405": 406723680.0, "10410": 415030880.0, "10415": 423592864.0, "10420": 414206208.0, "10425": 408746880.0, "10430": 418731648.0, "10435": 405944128.0, "10440": 408965920.0, "10445": 418902208.0, "10450": 415715808.0, "10455": 407858336.0, "10460": 409102272.0, "10465": 409710304.0, "10470": 410567104.0, "10475": 411727872.0, "10480": 406112512.0, "10485": 411866624.0, "10490": 408795200.0, "10495": 422467456.0, "10500": 412527488.0, "10505": 418027232.0, "10510": 418584384.0, "10515": 407679072.0, "10520": 418188160.0, "10525": 409458656.0, "10530": 414842848.0, "10535": 417211168.0, "10540": 412191456.0, "10545": 419758176.0, "10550": 417561344.0, "10555": 411755744.0, "10560": 412122464.0, "10565": 415275648.0, "10570": 412255488.0, "10575": 409386944.0, "10580": 414496704.0, "10585": 413841824.0, "10590": 419260096.0, "10595": 405074336.0, "10600": 418754240.0, "10605": 406378592.0, "10610": 421518400.0, "10615": 412271168.0, "10620": 403742144.0, "10625": 416455744.0, "10630": 408952256.0, "10635": 407387136.0, "10640": 406198112.0, "10645": 409089568.0, "10650": 415351040.0, "10655": 414624896.0, "10660": 415596416.0, "10665": 417028928.0, "10670": 410606624.0, "10675": 410426080.0, "10680": 413294816.0, "10685": 415728928.0, "10690": 414616864.0, "10695": 411700032.0, "10700": 414162016.0, "10705": 405437408.0, "10710": 412639072.0, "10715": 418948064.0, "10720": 415138240.0, "10725": 402324000.0, "10730": 409925248.0, "10735": 412191072.0, "10740": 411047680.0, "10745": 419079648.0, "10750": 415551328.0, "10755": 415334080.0, "10760": 414852064.0, "10765": 403140736.0, "10770": 417098336.0, "10775": 410855680.0, "10780": 412429888.0, "10785": 407606400.0, "10790": 417907424.0, "10795": 405636256.0, "10800": 413241568.0, "10805": 408138016.0, "10810": 417028608.0, "10815": 416098336.0, "10820": 405817216.0, "10825": 415029888.0, "10830": 414977632.0, "10835": 407951296.0, "10840": 418338752.0, "10845": 415426880.0, "10850": 414050848.0, "10855": 415583392.0, "10860": 413300576.0, "10865": 412765152.0, "10870": 411409856.0, "10875": 405819328.0, "10880": 410958688.0, "10885": 404020128.0, "10890": 403172864.0, "10895": 416512640.0, "10900": 407623680.0, "10905": 415895584.0, "10910": 411092224.0, "10915": 414824960.0, "10920": 412478848.0, "10925": 410919264.0, "10930": 412605472.0, "10935": 413461344.0, "10940": 412726528.0, "10945": 405764864.0, "10950": 411563712.0, "10955": 412374592.0, "10960": 413543904.0, "10965": 409150240.0, "10970": 414300064.0, "10975": 416843296.0, "10980": 408909664.0, "10985": 413429760.0, "10990": 417226304.0, "10995": 411197376.0, "11000": 415599040.0, "11005": 410344000.0, "11010": 410160128.0, "11015": 418018976.0, "11020": 409703392.0, "11025": 417632608.0, "11030": 418410272.0, "11035": 408630624.0, "11040": 407233152.0, "11045": 421666080.0, "11050": 415134048.0, "11055": 408367392.0, "11060": 423483360.0, "11065": 406632032.0, "11070": 415285664.0, "11075": 427614400.0, "11080": 408132928.0, "11085": 406591392.0, "11090": 412013600.0, "11095": 410716704.0, "11100": 414172832.0, "11105": 415820096.0, "11110": 412274656.0, "11115": 412711488.0, "11120": 407252512.0, "11125": 408669216.0, "11130": 413285440.0, "11135": 407936736.0, "11140": 420623744.0, "11145": 411428992.0, "11150": 416215008.0, "11155": 413801408.0, "11160": 418419488.0, "11165": 409289824.0, "11170": 407100640.0, "11175": 413821280.0, "11180": 411713984.0, "11185": 410191488.0, "11190": 407475360.0, "11195": 407253984.0, "11200": 406563424.0, "11205": 406279136.0, "11210": 417149504.0, "11215": 414137408.0, "11220": 411097696.0, "11225": 411024416.0, "11230": 418709696.0, "11235": 410261760.0, "11240": 404302336.0, "11245": 408344800.0, "11250": 408166112.0, "11255": 414845792.0, "11260": 404113664.0, "11265": 413665440.0, "11270": 411128640.0, "11275": 411938144.0, "11280": 419103584.0, "11285": 409936928.0, "11290": 412113920.0, "11295": 416906336.0, "11300": 415488064.0, "11305": 421969312.0, "11310": 406470176.0, "11315": 417057632.0, "11320": 409633248.0, "11325": 411008000.0, "11330": 408041792.0, "11335": 405545856.0, "11340": 404201024.0, "11345": 425510560.0, "11350": 417449696.0, "11355": 407163648.0, "11360": 408813440.0, "11365": 410818720.0, "11370": 413803680.0, "11375": 415661248.0, "11380": 418123296.0, "11385": 408458368.0, "11390": 412904992.0, "11395": 413820128.0, "11400": 409450048.0, "11405": 406552608.0, "11410": 419920288.0, "11415": 408792768.0, "11420": 411117984.0, "11425": 412436224.0, "11430": 404857120.0, "11435": 413857920.0, "11440": 405861568.0, "11445": 415107808.0, "11450": 413075808.0, "11455": 406270112.0, "11460": 411438400.0, "11465": 414857216.0, "11470": 407807360.0, "11475": 416458112.0, "11480": 418457440.0, "11485": 413353376.0, "11490": 415273664.0, "11495": 406022592.0, "11500": 414151744.0, "11505": 411004576.0, "11510": 412120576.0, "11515": 404803712.0, "11520": 410703776.0, "11525": 415474144.0, "11530": 407971008.0, "11535": 417397184.0, "11540": 415996480.0, "11545": 406664864.0, "11550": 410031808.0, "11555": 415379680.0, "11560": 419565344.0, "11565": 412976384.0, "11570": 418143648.0, "11575": 416782400.0, "11580": 418971328.0, "11585": 414268768.0, "11590": 407019680.0, "11595": 409689440.0, "11600": 416366880.0, "11605": 420558112.0, "11610": 413538176.0, "11615": 416220256.0, "11620": 413565312.0, "11625": 418884128.0, "11630": 408690368.0, "11635": 405184256.0, "11640": 415263904.0, "11645": 409662080.0, "11650": 411535136.0, "11655": 420439616.0, "11660": 411644512.0, "11665": 418991744.0, "11670": 412023872.0, "11675": 410980800.0, "11680": 418272768.0, "11685": 411549888.0, "11690": 408138784.0, "11695": 411546720.0, "11700": 410795200.0, "11705": 417732640.0, "11710": 408334016.0, "11715": 420164000.0, "11720": 419258656.0, "11725": 406271808.0, "11730": 413029504.0, "11735": 408867392.0, "11740": 420039552.0, "11745": 408935904.0, "11750": 411251456.0, "11755": 408877536.0, "11760": 411588000.0, "11765": 411262592.0, "11770": 409224128.0, "11775": 409231136.0, "11780": 411916704.0, "11785": 408898752.0, "11790": 401986048.0, "11795": 422696352.0, "11800": 402401536.0, "11805": 412528032.0, "11810": 413111168.0, "11815": 412114944.0, "11820": 411875904.0, "11825": 410691648.0, "11830": 409953568.0, "11835": 408327104.0, "11840": 413929120.0, "11845": 412720256.0, "11850": 406968000.0, "11855": 418398496.0, "11860": 408878016.0, "11865": 411772448.0, "11870": 413209888.0, "11875": 422620704.0, "11880": 411303616.0, "11885": 416833120.0, "11890": 402991776.0, "11895": 408375200.0, "11900": 420742528.0, "11905": 412084864.0, "11910": 410833632.0, "11915": 411680736.0, "11920": 414540480.0, "11925": 408619616.0, "11930": 419214240.0, "11935": 412866176.0, "11940": 410204032.0, "11945": 407755232.0, "11950": 409009600.0, "11955": 406614080.0, "11960": 408367616.0, "11965": 409623776.0, "11970": 412846752.0, "11975": 415592928.0, "11980": 414851680.0, "11985": 407887840.0, "11990": 412461344.0, "11995": 418142528.0, "12000": 410746720.0, "12005": 409569344.0, "12010": 419412640.0, "12015": 417326816.0, "12020": 407119040.0, "12025": 416853952.0, "12030": 413152864.0, "12035": 404797760.0, "12040": 415410048.0, "12045": 410130912.0, "12050": 402673984.0, "12055": 407685088.0, "12060": 410435648.0, "12065": 410124640.0, "12070": 413426944.0, "12075": 416450592.0, "12080": 405155616.0, "12085": 410430592.0, "12090": 414273568.0, "12095": 404104992.0, "12100": 413845184.0, "12105": 404290016.0, "12110": 403658784.0, "12115": 419011232.0, "12120": 407208544.0, "12125": 410897888.0, "12130": 417277408.0, "12135": 408155456.0, "12140": 411857120.0, "12145": 415914912.0, "12150": 412604544.0, "12155": 411894880.0, "12160": 414867616.0, "12165": 405734848.0, "12170": 407009472.0, "12175": 413700448.0, "12180": 418945216.0, "12185": 412468256.0, "12190": 412012256.0, "12195": 411155680.0, "12200": 406632800.0, "12205": 415151328.0, "12210": 405986816.0, "12215": 408417152.0, "12220": 411265280.0, "12225": 415794400.0, "12230": 411269440.0, "12235": 422774624.0, "12240": 410752608.0, "12245": 405789216.0, "12250": 419528320.0, "12255": 413611488.0, "12260": 405175200.0, "12265": 417499200.0, "12270": 417304544.0, "12275": 408278176.0, "12280": 418901088.0, "12285": 409243264.0, "12290": 410367520.0, "12295": 416011744.0, "12300": 421220320.0, "12305": 408816960.0, "12310": 409155104.0, "12315": 410068160.0, "12320": 415683808.0, "12325": 415184256.0, "12330": 416668160.0, "12335": 406764032.0, "12340": 411125952.0, "12345": 413729792.0, "12350": 405146560.0, "12355": 411679872.0, "12360": 413334912.0, "12365": 409113248.0, "12370": 418476768.0, "12375": 408348544.0, "12380": 414497792.0, "12385": 418006208.0, "12390": 413075936.0, "12395": 412161952.0, "12400": 413479840.0, "12405": 416030784.0, "12410": 412686496.0, "12415": 413610976.0, "12420": 414255200.0, "12425": 414497920.0, "12430": 411059168.0, "12435": 407387296.0, "12440": 413257920.0, "12445": 412659744.0, "12450": 414722976.0, "12455": 410524832.0, "12460": 417393216.0, "12465": 413964288.0, "12470": 408419040.0, "12475": 411138464.0, "12480": 415164672.0, "12485": 404820192.0, "12490": 413829152.0, "12495": 414623488.0, "12500": 410444960.0, "12505": 414524800.0, "12510": 420614112.0, "12515": 406614368.0, "12520": 421920928.0, "12525": 412366368.0, "12530": 413130272.0, "12535": 412542752.0, "12540": 416288192.0, "12545": 408971456.0, "12550": 420789184.0, "12555": 409925184.0, "12560": 406612032.0, "12565": 422667264.0, "12570": 419794368.0, "12575": 416043680.0, "12580": 414696576.0, "12585": 415686048.0, "12590": 412500096.0, "12595": 414020704.0, "12600": 414395872.0, "12605": 404905376.0, "12610": 420848448.0, "12615": 405012096.0, "12620": 407337344.0, "12625": 411301408.0, "12630": 411430400.0, "12635": 407173088.0, "12640": 414157056.0, "12645": 414524480.0, "12650": 410151744.0, "12655": 411492448.0, "12660": 412070400.0, "12665": 408891264.0, "12670": 411471104.0, "12675": 406734976.0, "12680": 404447360.0, "12685": 408529280.0, "12690": 420235264.0, "12695": 407347040.0, "12700": 418224064.0, "12705": 413311232.0, "12710": 410616640.0, "12715": 414377792.0, "12720": 413133984.0, "12725": 405265056.0, "12730": 419743584.0, "12735": 411655520.0, "12740": 406911552.0, "12745": 418508224.0, "12750": 404238752.0, "12755": 420265568.0, "12760": 408246272.0, "12765": 413213696.0, "12770": 409241056.0, "12775": 425064896.0, "12780": 409271200.0, "12785": 412748192.0, "12790": 413929312.0, "12795": 410438336.0, "12800": 412565376.0, "12805": 410812256.0, "12810": 416921216.0, "12815": 410994976.0, "12820": 400906144.0, "12825": 410593568.0, "12830": 408661120.0, "12835": 411079808.0, "12840": 408252096.0, "12845": 417897856.0, "12850": 408897152.0, "12855": 408977408.0, "12860": 403444864.0, "12865": 416702048.0, "12870": 411817792.0, "12875": 405525184.0, "12880": 413817120.0, "12885": 405724480.0, "12890": 418475968.0, "12895": 410196960.0, "12900": 410878432.0, "12905": 409214368.0, "12910": 409688320.0, "12915": 407434144.0, "12920": 418261504.0, "12925": 416430592.0, "12930": 411550784.0, "12935": 415364384.0, "12940": 416876704.0, "12945": 412528608.0, "12950": 413979456.0, "12955": 409762304.0, "12960": 406191456.0, "12965": 417406656.0, "12970": 417782432.0, "12975": 408083744.0, "12980": 406985344.0, "12985": 413429984.0, "12990": 413059520.0, "12995": 403685344.0, "13000": 409828192.0, "13005": 413118528.0, "13010": 410633472.0, "13015": 414516160.0, "13020": 407710816.0, "13025": 409715840.0, "13030": 414329824.0, "13035": 407616512.0, "13040": 409710560.0, "13045": 416569856.0, "13050": 408600448.0, "13055": 410949024.0, "13060": 421794048.0, "13065": 405416448.0, "13070": 416720288.0, "13075": 403614368.0, "13080": 407631936.0, "13085": 416637312.0, "13090": 407425344.0, "13095": 411581248.0, "13100": 410037600.0, "13105": 410897280.0, "13110": 415521120.0, "13115": 421495808.0, "13120": 418124096.0, "13125": 408675424.0, "13130": 418445120.0, "13135": 413822144.0, "13140": 410620512.0, "13145": 413529632.0, "13150": 409459008.0, "13155": 413516320.0, "13160": 408441024.0, "13165": 405228064.0, "13170": 414839936.0, "13175": 421365600.0, "13180": 397429696.0, "13185": 411442624.0, "13190": 405332768.0, "13195": 416474752.0, "13200": 408482688.0, "13205": 408178752.0, "13210": 413867872.0, "13215": 409628096.0, "13220": 417284384.0, "13225": 409062656.0, "13230": 414703040.0, "13235": 417926560.0, "13240": 414298016.0, "13245": 415581280.0, "13250": 413407424.0, "13255": 412974432.0, "13260": 413567488.0, "13265": 414462368.0, "13270": 409329472.0, "13275": 418507200.0, "13280": 417413888.0, "13285": 411213056.0, "13290": 406801568.0, "13295": 411908128.0, "13300": 407028000.0, "13305": 403561088.0, "13310": 417872992.0, "13315": 409353568.0, "13320": 413894880.0, "13325": 410260448.0, "13330": 410483872.0, "13335": 418155584.0, "13340": 409088224.0, "13345": 409330976.0, "13350": 417283168.0, "13355": 421804672.0, "13360": 408421216.0, "13365": 409431712.0, "13370": 409446688.0, "13375": 418277664.0, "13380": 406731872.0, "13385": 417717472.0, "13390": 409957792.0, "13395": 413546144.0, "13400": 418852512.0, "13405": 413440960.0, "13410": 407532096.0, "13415": 412543488.0, "13420": 411443904.0, "13425": 419361984.0, "13430": 421884960.0, "13435": 413801632.0, "13440": 411660960.0, "13445": 415432416.0, "13450": 406832672.0, "13455": 408232512.0, "13460": 406273504.0, "13465": 411017312.0, "13470": 413882304.0, "13475": 408906592.0, "13480": 414794208.0, "13485": 405264640.0, "13490": 415770048.0, "13495": 411248352.0, "13500": 415874208.0, "13505": 412080256.0, "13510": 406810624.0, "13515": 412704192.0, "13520": 412713792.0, "13525": 407282688.0, "13530": 405820640.0, "13535": 414400160.0, "13540": 408283136.0, "13545": 414355424.0, "13550": 415651072.0, "13555": 404182240.0, "13560": 402821984.0, "13565": 410324832.0, "13570": 404875840.0, "13575": 407899072.0, "13580": 415393536.0, "13585": 411588352.0, "13590": 419408448.0, "13595": 411409696.0, "13600": 404391040.0, "13605": 410445152.0, "13610": 414971584.0, "13615": 406830112.0, "13620": 408084000.0, "13625": 410129248.0, "13630": 412664928.0, "13635": 416453792.0, "13640": 416555712.0, "13645": 410009952.0, "13650": 414295872.0, "13655": 413669632.0, "13660": 415538336.0, "13665": 404734784.0, "13670": 412804160.0, "13675": 403847616.0, "13680": 410863104.0, "13685": 413001920.0, "13690": 402454400.0, "13695": 410494240.0, "13700": 415346496.0, "13705": 415512864.0, "13710": 411056704.0, "13715": 412777728.0, "13720": 414671008.0, "13725": 413319584.0, "13730": 414710080.0, "13735": 411520192.0, "13740": 410185248.0, "13745": 415547808.0, "13750": 408172384.0, "13755": 411185600.0, "13760": 418638368.0, "13765": 403940160.0, "13770": 410702080.0, "13775": 421778944.0, "13780": 401844224.0, "13785": 413081152.0, "13790": 408697632.0, "13795": 412156192.0, "13800": 414573984.0, "13805": 415395968.0, "13810": 415289248.0, "13815": 408543584.0, "13820": 419686912.0, "13825": 422069152.0, "13830": 407589760.0, "13835": 414588384.0, "13840": 413094272.0, "13845": 406281984.0, "13850": 419202688.0, "13855": 416194464.0, "13860": 411493792.0, "13865": 411254016.0, "13870": 399905856.0, "13875": 415950752.0, "13880": 411722560.0, "13885": 414687840.0, "13890": 412708384.0, "13895": 414793632.0, "13900": 408715776.0, "13905": 406565920.0, "13910": 408796064.0, "13915": 419518816.0, "13920": 410156032.0, "13925": 411014528.0, "13930": 415241056.0, "13935": 414781248.0, "13940": 412457888.0, "13945": 406522624.0, "13950": 414846528.0, "13955": 411461920.0, "13960": 408683936.0, "13965": 424240128.0, "13970": 410125984.0, "13975": 421835392.0, "13980": 410720608.0, "13985": 418411968.0, "13990": 409814784.0, "13995": 409172064.0, "14000": 418789632.0, "14005": 408041632.0, "14010": 413444832.0, "14015": 410659520.0, "14020": 412762176.0, "14025": 407724896.0, "14030": 421844928.0, "14035": 408546976.0, "14040": 422274144.0, "14045": 416017184.0, "14050": 410347232.0, "14055": 417619776.0, "14060": 413712416.0, "14065": 411696800.0, "14070": 407407296.0, "14075": 416008416.0, "14080": 410870080.0, "14085": 410334432.0, "14090": 418866592.0, "14095": 412312928.0, "14100": 416181376.0, "14105": 410992864.0, "14110": 416013728.0, "14115": 414349984.0, "14120": 416280640.0, "14125": 414578624.0, "14130": 423093376.0, "14135": 416708256.0, "14140": 415363072.0, "14145": 409717216.0, "14150": 402871520.0, "14155": 410204288.0, "14160": 417186912.0, "14165": 413689728.0, "14170": 404047360.0, "14175": 402111616.0, "14180": 415892096.0, "14185": 415878304.0, "14190": 414755712.0, "14195": 415858624.0, "14200": 407519360.0, "14205": 406145920.0, "14210": 423775040.0, "14215": 406040192.0, "14220": 415267008.0, "14225": 414274880.0, "14230": 406276096.0, "14235": 415253664.0, "14240": 409539584.0, "14245": 409022912.0, "14250": 406881824.0, "14255": 410243104.0, "14260": 407702464.0, "14265": 410930464.0, "14270": 411785824.0, "14275": 416132640.0, "14280": 416727168.0, "14285": 414817024.0, "14290": 412997792.0, "14295": 416312256.0, "14300": 413597280.0, "14305": 411382336.0, "14310": 416493472.0, "14315": 421527424.0, "14320": 407870240.0, "14325": 414249184.0, "14330": 410982880.0, "14335": 413163840.0, "14340": 408633952.0, "14345": 416434720.0, "14350": 415301184.0, "14355": 418248992.0, "14360": 409776704.0, "14365": 416091168.0, "14370": 416410496.0, "14375": 414104448.0, "14380": 410684608.0, "14385": 414414624.0, "14390": 414878688.0, "14395": 415692480.0, "14400": 409905152.0, "14405": 419110464.0, "14410": 407641696.0, "14415": 412165312.0, "14420": 413320480.0, "14425": 418077792.0, "14430": 409916448.0, "14435": 413158592.0, "14440": 416747712.0, "14445": 406771616.0, "14450": 420131520.0, "14455": 407202016.0, "14460": 417543968.0, "14465": 420561344.0, "14470": 415702240.0, "14475": 412444384.0, "14480": 404465664.0, "14485": 417221472.0, "14490": 407677024.0, "14495": 423040896.0, "14500": 409574784.0, "14505": 415302080.0, "14510": 418560256.0, "14515": 416725376.0, "14520": 410239232.0, "14525": 415297152.0, "14530": 407113920.0, "14535": 413975104.0, "14540": 408979712.0, "14545": 416323456.0, "14550": 413736992.0, "14555": 419266976.0, "14560": 413564160.0, "14565": 406438720.0, "14570": 416079680.0, "14575": 408264352.0, "14580": 406787136.0, "14585": 412242752.0, "14590": 411626944.0, "14595": 417970144.0, "14600": 410205408.0, "14605": 404274336.0, "14610": 408274016.0, "14615": 406301728.0, "14620": 406698592.0, "14625": 402490752.0, "14630": 417189088.0, "14635": 409950624.0, "14640": 413970016.0, "14645": 410731712.0, "14650": 419663456.0, "14655": 410892224.0, "14660": 417507136.0, "14665": 416205152.0, "14670": 407927520.0, "14675": 416656384.0, "14680": 414451520.0, "14685": 412394272.0, "14690": 412534464.0, "14695": 406868448.0, "14700": 412381600.0, "14705": 409182624.0, "14710": 416656960.0, "14715": 409526816.0, "14720": 415362848.0, "14725": 419899136.0, "14730": 407707936.0, "14735": 409694400.0, "14740": 407568992.0, "14745": 409039360.0, "14750": 406059840.0, "14755": 413586752.0, "14760": 408729056.0, "14765": 410992480.0, "14770": 415529536.0, "14775": 409001792.0, "14780": 415029312.0, "14785": 405302656.0, "14790": 416578592.0, "14795": 412759456.0, "14800": 415346112.0, "14805": 405159648.0, "14810": 412222080.0, "14815": 414623488.0, "14820": 415405024.0, "14825": 409575264.0, "14830": 409529408.0, "14835": 414851488.0, "14840": 413148064.0, "14845": 415735008.0, "14850": 408706624.0, "14855": 417777408.0, "14860": 418512096.0, "14865": 416532192.0, "14870": 408145664.0, "14875": 412089248.0, "14880": 414091104.0, "14885": 405865184.0, "14890": 411873632.0, "14895": 407552000.0, "14900": 415247712.0, "14905": 423730176.0, "14910": 407900640.0, "14915": 407107296.0, "14920": 409708704.0, "14925": 409305792.0, "14930": 408804000.0, "14935": 408253504.0, "14940": 410284608.0, "14945": 421322208.0, "14950": 409860384.0, "14955": 426887680.0, "14960": 408507200.0, "14965": 407899008.0, "14970": 416527616.0, "14975": 413522176.0, "14980": 416366464.0, "14985": 419028640.0, "14990": 417032992.0, "14995": 419575040.0, "15000": 409853248.0, "15005": 412334112.0, "15010": 409420864.0, "15015": 417956384.0, "15020": 415071424.0, "15025": 413163008.0, "15030": 408814336.0, "15035": 414431264.0, "15040": 412782464.0, "15045": 423251232.0, "15050": 413873696.0, "15055": 409398336.0, "15060": 421932320.0, "15065": 416800352.0, "15070": 414960064.0, "15075": 411043040.0, "15080": 416053696.0, "15085": 412307296.0, "15090": 406388960.0, "15095": 410268512.0, "15100": 414598272.0, "15105": 411614656.0, "15110": 409754944.0, "15115": 414264000.0, "15120": 404840576.0, "15125": 411062368.0, "15130": 404831232.0, "15135": 410469312.0, "15140": 409517952.0, "15145": 412259776.0, "15150": 415050816.0, "15155": 408245568.0, "15160": 415958720.0, "15165": 412945088.0, "15170": 410110656.0, "15175": 412552160.0, "15180": 410075424.0, "15185": 406095648.0, "15190": 412135808.0, "15195": 408065856.0, "15200": 412062496.0, "15205": 420191392.0, "15210": 410822912.0, "15215": 413143296.0, "15220": 415380320.0, "15225": 417372288.0, "15230": 416036800.0, "15235": 406144064.0, "15240": 415809440.0, "15245": 413041184.0, "15250": 415098464.0, "15255": 408788608.0, "15260": 411995072.0, "15265": 419606432.0, "15270": 407992160.0, "15275": 407718688.0, "15280": 406517632.0, "15285": 410663232.0, "15290": 413921824.0, "15295": 410626336.0, "15300": 412333888.0, "15305": 407286336.0, "15310": 412857472.0, "15315": 412953568.0, "15320": 416187744.0, "15325": 408670496.0, "15330": 410816736.0, "15335": 410832320.0, "15340": 416285056.0, "15345": 414148096.0, "15350": 415671680.0, "15355": 416401472.0, "15360": 412892800.0, "15365": 410457568.0, "15370": 417862816.0, "15375": 408737408.0, "15380": 414763840.0, "15385": 406149536.0, "15390": 408431296.0, "15395": 404359424.0, "15400": 412105440.0, "15405": 416662720.0, "15410": 403636864.0, "15415": 410578208.0, "15420": 408686784.0, "15425": 407738848.0, "15430": 415004192.0, "15435": 411451360.0, "15440": 411308224.0, "15445": 415067328.0, "15450": 407297920.0, "15455": 416666208.0, "15460": 411425760.0, "15465": 414241088.0, "15470": 410561920.0, "15475": 413198336.0, "15480": 408375040.0, "15485": 414440000.0, "15490": 402914720.0, "15495": 406725216.0, "15500": 412218432.0, "15505": 409592256.0, "15510": 416310368.0, "15515": 412989088.0, "15520": 415405312.0, "15525": 412403488.0, "15530": 405130048.0, "15535": 413649216.0, "15540": 410180608.0, "15545": 411101024.0, "15550": 417091840.0, "15555": 415316384.0, "15560": 414882400.0, "15565": 413145472.0, "15570": 411030144.0, "15575": 410586048.0, "15580": 415829696.0, "15585": 407159584.0, "15590": 408317184.0, "15595": 413392064.0, "15600": 422922304.0, "15605": 407847264.0, "15610": 414778048.0, "15615": 406403968.0, "15620": 411318240.0, "15625": 417926656.0, "15630": 411127392.0, "15635": 410436608.0, "15640": 405836544.0, "15645": 416875072.0, "15650": 408596352.0, "15655": 420724736.0, "15660": 410561056.0, "15665": 406772576.0, "15670": 411313696.0, "15675": 410316672.0, "15680": 411800672.0, "15685": 414975584.0, "15690": 410908608.0, "15695": 402847744.0, "15700": 415278624.0, "15705": 411141760.0, "15710": 411254048.0, "15715": 419268960.0, "15720": 404416832.0, "15725": 414470112.0, "15730": 406740992.0, "15735": 413413248.0, "15740": 409985792.0, "15745": 409414560.0, "15750": 414224896.0, "15755": 410853600.0, "15760": 411807456.0, "15765": 410688000.0, "15770": 415543008.0, "15775": 413525568.0, "15780": 412613504.0, "15785": 419237952.0, "15790": 415279904.0, "15795": 402528928.0, "15800": 400186944.0, "15805": 419198688.0, "15810": 402603456.0, "15815": 413331072.0, "15820": 405925888.0, "15825": 419994272.0, "15830": 412333088.0, "15835": 411687040.0, "15840": 406823904.0, "15845": 400992736.0, "15850": 412719648.0, "15855": 413201152.0, "15860": 405933184.0, "15865": 417393216.0, "15870": 418254144.0, "15875": 410101344.0, "15880": 414142720.0, "15885": 418019616.0, "15890": 399554336.0, "15895": 408644256.0, "15900": 400246624.0, "15905": 414155328.0, "15910": 408550272.0, "15915": 419760512.0, "15920": 417298816.0, "15925": 412370784.0, "15930": 417099648.0, "15935": 409352416.0, "15940": 412594432.0, "15945": 411392928.0, "15950": 414576800.0, "15955": 414586048.0, "15960": 414782528.0, "15965": 409057664.0, "15970": 415109056.0, "15975": 411199104.0, "15980": 412653664.0, "15985": 412627008.0, "15990": 407838048.0, "15995": 407430880.0, "16000": 406327904.0, "16005": 413594976.0, "16010": 410473088.0, "16015": 413426016.0, "16020": 411759328.0, "16025": 415309632.0, "16030": 418306752.0, "16035": 410454976.0, "16040": 414280256.0, "16045": 408103904.0, "16050": 409534496.0, "16055": 410438720.0, "16060": 413541440.0, "16065": 420091712.0, "16070": 415800704.0, "16075": 418100384.0, "16080": 414012928.0, "16085": 411054496.0, "16090": 409962272.0, "16095": 407187520.0, "16100": 410066592.0, "16105": 407791200.0, "16110": 418949696.0, "16115": 402407872.0, "16120": 410174944.0, "16125": 420186208.0, "16130": 411943712.0, "16135": 413347712.0, "16140": 410057984.0, "16145": 415427744.0, "16150": 412076544.0, "16155": 417233568.0, "16160": 418581472.0, "16165": 409409632.0, "16170": 413002272.0, "16175": 418524032.0, "16180": 413671904.0, "16185": 409373600.0, "16190": 415921600.0, "16195": 420740320.0, "16200": 408673344.0, "16205": 417167360.0, "16210": 415787264.0, "16215": 411862272.0, "16220": 418703520.0, "16225": 405013344.0, "16230": 414699040.0, "16235": 420402368.0, "16240": 405552128.0, "16245": 415035360.0, "16250": 405473056.0, "16255": 405967136.0, "16260": 412309152.0, "16265": 411652032.0, "16270": 397773056.0, "16275": 411081856.0, "16280": 418675712.0, "16285": 408187680.0, "16290": 409751104.0, "16295": 405371040.0, "16300": 416268480.0, "16305": 410736000.0, "16310": 408020000.0, "16315": 416843104.0, "16320": 414388288.0, "16325": 406380672.0, "16330": 416985696.0, "16335": 419766272.0, "16340": 405959520.0, "16345": 409991584.0, "16350": 409848096.0, "16355": 410249568.0, "16360": 397445888.0, "16365": 410630560.0, "16370": 416447264.0, "16375": 410464640.0, "16380": 412293184.0, "16385": 404117728.0, "16390": 415202464.0, "16395": 410438720.0, "16400": 408767520.0, "16405": 409015616.0, "16410": 419974016.0, "16415": 408899456.0, "16420": 417250784.0, "16425": 417589472.0, "16430": 407038272.0, "16435": 410672352.0, "16440": 411449056.0, "16445": 405278528.0, "16450": 408102336.0, "16455": 410093280.0, "16460": 412896768.0, "16465": 409913344.0, "16470": 412756224.0, "16475": 409367392.0, "16480": 421088064.0, "16485": 413039744.0, "16490": 407730176.0, "16495": 406522240.0, "16500": 408859456.0, "16505": 411516544.0, "16510": 400306400.0, "16515": 412775552.0, "16520": 413981024.0, "16525": 413943360.0, "16530": 415361728.0, "16535": 411286880.0, "16540": 406578432.0, "16545": 409504800.0, "16550": 416983520.0, "16555": 411709376.0, "16560": 411107776.0, "16565": 417143296.0, "16570": 411754048.0, "16575": 416764768.0, "16580": 409507232.0, "16585": 403772224.0, "16590": 410465504.0, "16595": 418273152.0, "16600": 404107648.0, "16605": 415542528.0, "16610": 409330784.0, "16615": 413391520.0, "16620": 415793696.0, "16625": 418099104.0, "16630": 411934400.0, "16635": 411521536.0, "16640": 411593632.0, "16645": 411388736.0, "16650": 407068224.0, "16655": 408093696.0, "16660": 403867776.0, "16665": 409259392.0, "16670": 408781184.0, "16675": 411940320.0, "16680": 410667296.0, "16685": 417449312.0, "16690": 412331392.0, "16695": 413866432.0, "16700": 413272960.0, "16705": 411865344.0, "16710": 411812320.0, "16715": 415565376.0, "16720": 409462784.0, "16725": 411160480.0, "16730": 416418496.0, "16735": 406518336.0, "16740": 416268800.0, "16745": 408092160.0, "16750": 401766432.0, "16755": 419639840.0, "16760": 410718944.0, "16765": 408926048.0, "16770": 417168512.0, "16775": 412317408.0, "16780": 411438624.0, "16785": 410338432.0, "16790": 406784000.0, "16795": 413270304.0, "16800": 412651744.0, "16805": 413761696.0, "16810": 407144000.0, "16815": 398757760.0, "16820": 412297504.0, "16825": 410139488.0, "16830": 411136352.0, "16835": 413750688.0, "16840": 406022208.0, "16845": 416577056.0, "16850": 414127904.0, "16855": 408137536.0, "16860": 410128096.0, "16865": 418443968.0, "16870": 412141248.0, "16875": 414607392.0, "16880": 414087744.0, "16885": 414201952.0, "16890": 410218944.0, "16895": 412134272.0, "16900": 404243008.0, "16905": 411880224.0, "16910": 417923040.0, "16915": 412157152.0, "16920": 409931264.0, "16925": 411632736.0, "16930": 411707296.0, "16935": 419498848.0, "16940": 420366240.0, "16945": 410800384.0, "16950": 412836640.0, "16955": 413333472.0, "16960": 410439840.0, "16965": 412670464.0, "16970": 411889152.0, "16975": 411074144.0, "16980": 412865184.0, "16985": 419942048.0, "16990": 420019520.0, "16995": 414496608.0, "17000": 424268064.0, "17005": 408957312.0, "17010": 414585600.0, "17015": 407925216.0, "17020": 405087968.0, "17025": 412011264.0, "17030": 410478048.0, "17035": 412896864.0, "17040": 413307104.0, "17045": 414115552.0, "17050": 403227520.0, "17055": 405560896.0, "17060": 415158784.0, "17065": 410759744.0, "17070": 411851424.0, "17075": 415566080.0, "17080": 417507712.0, "17085": 413171392.0, "17090": 419198080.0, "17095": 409451168.0, "17100": 417564256.0, "17105": 405871776.0, "17110": 416142944.0, "17115": 410680192.0, "17120": 413849408.0, "17125": 411941056.0, "17130": 417300768.0, "17135": 406647648.0, "17140": 414399168.0, "17145": 412662080.0, "17150": 414233344.0, "17155": 414039232.0, "17160": 405511296.0, "17165": 417026560.0, "17170": 407493376.0, "17175": 418037792.0, "17180": 419356160.0, "17185": 416813216.0, "17190": 414660704.0, "17195": 414270688.0, "17200": 409459648.0, "17205": 415086176.0, "17210": 404081536.0, "17215": 406716512.0, "17220": 408404160.0, "17225": 406878560.0, "17230": 412887520.0, "17235": 410712384.0, "17240": 414372000.0, "17245": 422359616.0, "17250": 404960736.0, "17255": 413646528.0, "17260": 420218336.0, "17265": 409225024.0, "17270": 417984448.0, "17275": 413833280.0, "17280": 407472128.0, "17285": 414571264.0, "17290": 411421600.0, "17295": 416557984.0, "17300": 405020928.0, "17305": 417161728.0, "17310": 407989088.0, "17315": 410008704.0, "17320": 415822784.0, "17325": 397264352.0, "17330": 416360128.0, "17335": 415021120.0, "17340": 410166080.0, "17345": 419657312.0, "17350": 416481344.0, "17355": 409199136.0, "17360": 409173376.0, "17365": 408719904.0, "17370": 402699360.0, "17375": 413787072.0, "17380": 415104608.0, "17385": 410347680.0, "17390": 416941952.0, "17395": 419532416.0, "17400": 409054848.0, "17405": 413920096.0, "17410": 414353344.0, "17415": 403808288.0, "17420": 404103328.0, "17425": 414294368.0, "17430": 406022400.0, "17435": 413980512.0, "17440": 404513792.0, "17445": 408380256.0, "17450": 413233312.0, "17455": 413223264.0, "17460": 413232576.0, "17465": 407129600.0, "17470": 407573600.0, "17475": 409252224.0, "17480": 406044480.0, "17485": 411344128.0, "17490": 409123328.0, "17495": 415280256.0, "17500": 417513440.0, "17505": 406856032.0, "17510": 416962592.0, "17515": 411770048.0, "17520": 411990912.0, "17525": 409274112.0, "17530": 411866688.0, "17535": 411793280.0, "17540": 412453056.0, "17545": 403054848.0, "17550": 416962880.0, "17555": 409884544.0, "17560": 408514560.0, "17565": 416725792.0, "17570": 405316736.0, "17575": 416100480.0, "17580": 411469792.0, "17585": 405906208.0, "17590": 417704096.0, "17595": 404116544.0, "17600": 409684928.0, "17605": 409256736.0, "17610": 409281728.0, "17615": 402020768.0, "17620": 402074272.0, "17625": 412050336.0, "17630": 412262176.0, "17635": 411720864.0, "17640": 413394336.0, "17645": 409789696.0, "17650": 414700576.0, "17655": 419364960.0, "17660": 411055648.0, "17665": 409317088.0, "17670": 405888544.0, "17675": 414987008.0, "17680": 413221088.0, "17685": 409427616.0, "17690": 421257632.0, "17695": 407055040.0, "17700": 415942976.0, "17705": 411933920.0, "17710": 406975296.0, "17715": 408777184.0, "17720": 410383040.0, "17725": 416171104.0, "17730": 411157216.0, "17735": 413661216.0, "17740": 415019840.0, "17745": 407880480.0, "17750": 409953920.0, "17755": 413232992.0, "17760": 406559872.0, "17765": 415108480.0, "17770": 420771264.0, "17775": 403820608.0, "17780": 411093632.0, "17785": 408571072.0, "17790": 411816064.0, "17795": 428174016.0, "17800": 412835168.0, "17805": 409151328.0, "17810": 412774336.0, "17815": 414327680.0, "17820": 408718240.0, "17825": 409906720.0, "17830": 414947872.0, "17835": 412527616.0, "17840": 419480512.0, "17845": 417008320.0, "17850": 409319008.0, "17855": 412079296.0, "17860": 417816224.0, "17865": 408929600.0, "17870": 411617856.0, "17875": 409836416.0, "17880": 420434112.0, "17885": 416234656.0, "17890": 415962976.0, "17895": 411586560.0, "17900": 412288288.0, "17905": 411526976.0, "17910": 411715584.0, "17915": 411053312.0, "17920": 408447328.0, "17925": 404235200.0, "17930": 415558400.0, "17935": 415507200.0, "17940": 406669344.0, "17945": 414048128.0, "17950": 420099168.0, "17955": 417598912.0, "17960": 422765248.0, "17965": 411750880.0, "17970": 410144448.0, "17975": 412728064.0, "17980": 410105696.0, "17985": 411087424.0, "17990": 412000480.0, "17995": 411394240.0, "18000": 408583776.0, "18005": 415410720.0, "18010": 418687104.0, "18015": 413001824.0, "18020": 407414048.0, "18025": 409516160.0, "18030": 411923616.0, "18035": 410166016.0, "18040": 418181312.0, "18045": 409344192.0, "18050": 416763680.0, "18055": 414939104.0, "18060": 412936800.0, "18065": 410700128.0, "18070": 409537632.0, "18075": 410188832.0, "18080": 414002848.0, "18085": 418110496.0, "18090": 402172992.0, "18095": 412341504.0, "18100": 418667296.0, "18105": 403326464.0, "18110": 410703168.0, "18115": 413742592.0, "18120": 418261056.0, "18125": 415183584.0, "18130": 408002496.0, "18135": 407256992.0, "18140": 418691424.0, "18145": 409610944.0, "18150": 409124960.0, "18155": 421610240.0, "18160": 409020288.0, "18165": 407234976.0, "18170": 408767648.0, "18175": 413340096.0, "18180": 410958048.0, "18185": 416810624.0, "18190": 411687840.0, "18195": 408020512.0, "18200": 413992288.0, "18205": 407717920.0, "18210": 418078432.0, "18215": 409209888.0, "18220": 408614656.0, "18225": 408477312.0, "18230": 414019456.0, "18235": 415234976.0, "18240": 411960384.0, "18245": 408796128.0, "18250": 416215520.0, "18255": 409486816.0, "18260": 419772768.0, "18265": 408267360.0, "18270": 408882880.0, "18275": 418252192.0, "18280": 414112352.0, "18285": 422162848.0, "18290": 415268192.0, "18295": 403428544.0, "18300": 420774336.0, "18305": 406468864.0, "18310": 421077632.0, "18315": 428270144.0, "18320": 412467488.0, "18325": 413505152.0, "18330": 413549632.0, "18335": 417397472.0, "18340": 415305600.0, "18345": 413451328.0, "18350": 415158368.0, "18355": 423987296.0, "18360": 413324288.0, "18365": 415818240.0, "18370": 416950176.0, "18375": 416349664.0, "18380": 406019776.0, "18385": 402688960.0, "18390": 412278976.0, "18395": 411485056.0, "18400": 416906624.0, "18405": 405126752.0, "18410": 404135136.0, "18415": 420790816.0, "18420": 413249600.0, "18425": 411586624.0, "18430": 411436192.0, "18435": 410582048.0, "18440": 408570944.0, "18445": 410722592.0, "18450": 413051776.0, "18455": 411314208.0, "18460": 406731296.0, "18465": 417484128.0, "18470": 412573248.0, "18475": 410448416.0, "18480": 419529632.0, "18485": 405180672.0, "18490": 424109728.0, "18495": 411415424.0, "18500": 413732256.0, "18505": 414075456.0, "18510": 416771648.0, "18515": 414102240.0, "18520": 413529600.0, "18525": 404785920.0, "18530": 409181664.0, "18535": 413906080.0, "18540": 408658848.0, "18545": 414729216.0, "18550": 408554848.0, "18555": 419915232.0, "18560": 414633376.0, "18565": 411829344.0, "18570": 405695264.0, "18575": 413557728.0, "18580": 418526208.0, "18585": 415096672.0, "18590": 424292576.0, "18595": 417733536.0, "18600": 418604704.0, "18605": 411442112.0, "18610": 411265728.0, "18615": 412027840.0, "18620": 426011040.0, "18625": 408536192.0, "18630": 409523744.0, "18635": 412519104.0, "18640": 421151968.0, "18645": 413040896.0, "18650": 411303808.0, "18655": 407286880.0, "18660": 410922688.0, "18665": 410816992.0, "18670": 404551648.0, "18675": 410934336.0, "18680": 416845888.0, "18685": 419800512.0, "18690": 415870752.0, "18695": 404941600.0, "18700": 403836512.0, "18705": 413734656.0, "18710": 407222944.0, "18715": 415828832.0, "18720": 408647296.0, "18725": 411327328.0, "18730": 415406624.0, "18735": 419435584.0, "18740": 411225152.0, "18745": 417874656.0, "18750": 408762400.0, "18755": 415056064.0, "18760": 409725664.0, "18765": 410317408.0, "18770": 407079520.0, "18775": 412851168.0, "18780": 404216000.0, "18785": 409463904.0, "18790": 412213408.0, "18795": 407073792.0, "18800": 409818592.0, "18805": 419280800.0, "18810": 417554528.0, "18815": 408209600.0, "18820": 405972256.0, "18825": 416959936.0, "18830": 411566080.0, "18835": 413864288.0, "18840": 417084224.0, "18845": 407670016.0, "18850": 413385312.0, "18855": 407325632.0, "18860": 419148608.0, "18865": 418247776.0, "18870": 408901248.0, "18875": 409249600.0, "18880": 413336608.0, "18885": 408365728.0, "18890": 409470528.0, "18895": 415449728.0, "18900": 415238656.0, "18905": 413695424.0, "18910": 414744096.0, "18915": 414077344.0, "18920": 411156800.0, "18925": 420996704.0, "18930": 410633536.0, "18935": 411545568.0, "18940": 410693760.0, "18945": 420488256.0, "18950": 403753568.0, "18955": 417051264.0, "18960": 406674688.0, "18965": 412248896.0, "18970": 410862752.0, "18975": 416118016.0, "18980": 406218176.0, "18985": 414699232.0, "18990": 411616128.0, "18995": 412067200.0, "19000": 412450560.0, "19005": 411369536.0, "19010": 415937952.0, "19015": 415274752.0, "19020": 406674144.0, "19025": 406815392.0, "19030": 410921888.0, "19035": 404419104.0, "19040": 411259520.0, "19045": 413207744.0, "19050": 404282880.0, "19055": 413085600.0, "19060": 404968000.0, "19065": 420965824.0, "19070": 407557920.0, "19075": 407005472.0, "19080": 419038464.0, "19085": 407394048.0, "19090": 418149056.0, "19095": 411156800.0, "19100": 409444384.0, "19105": 408961280.0, "19110": 413993856.0, "19115": 403310784.0, "19120": 413584640.0, "19125": 403683104.0, "19130": 409338912.0, "19135": 419388928.0, "19140": 408335584.0, "19145": 415915296.0, "19150": 409688480.0, "19155": 412441760.0, "19160": 418482464.0, "19165": 401084512.0, "19170": 409711584.0, "19175": 404632768.0, "19180": 408691488.0, "19185": 413791296.0, "19190": 407553984.0, "19195": 414567104.0, "19200": 415310112.0, "19205": 414574400.0, "19210": 418404064.0, "19215": 407714976.0, "19220": 407671136.0, "19225": 407571616.0, "19230": 414897344.0, "19235": 406000768.0, "19240": 411459680.0, "19245": 408501408.0, "19250": 414923872.0, "19255": 419512832.0, "19260": 420328416.0, "19265": 409924064.0, "19270": 415170848.0, "19275": 413594432.0, "19280": 412716832.0, "19285": 414456288.0, "19290": 412364800.0, "19295": 409342432.0, "19300": 415079936.0, "19305": 418535040.0, "19310": 410023008.0, "19315": 420469504.0, "19320": 413501888.0, "19325": 419594912.0, "19330": 411149248.0, "19335": 408000224.0, "19340": 413901856.0, "19345": 415041056.0, "19350": 410592320.0, "19355": 415970464.0, "19360": 415638016.0, "19365": 415852960.0, "19370": 399083488.0, "19375": 401402240.0, "19380": 412633376.0, "19385": 405406304.0, "19390": 410640768.0, "19395": 411674496.0, "19400": 409171904.0, "19405": 411352032.0, "19410": 409339680.0, "19415": 422185920.0, "19420": 408538464.0, "19425": 412623104.0, "19430": 417310048.0, "19435": 409934816.0, "19440": 416477760.0, "19445": 421674688.0, "19450": 420129632.0, "19455": 415626144.0, "19460": 413892192.0, "19465": 417549280.0, "19470": 411884928.0, "19475": 415794592.0, "19480": 414585408.0, "19485": 416051520.0, "19490": 407581632.0, "19495": 413066432.0, "19500": 404276800.0, "19505": 415900128.0, "19510": 411388224.0, "19515": 415099648.0, "19520": 415149504.0, "19525": 407609024.0, "19530": 418693792.0, "19535": 404404096.0, "19540": 412497984.0, "19545": 423197152.0, "19550": 408897408.0, "19555": 416664224.0, "19560": 408850912.0, "19565": 416506592.0, "19570": 411212800.0, "19575": 414671264.0, "19580": 407007872.0, "19585": 415510624.0, "19590": 418816544.0, "19595": 412434432.0, "19600": 411318688.0, "19605": 413666496.0, "19610": 412977760.0, "19615": 412893888.0, "19620": 420609088.0, "19625": 409751008.0, "19630": 416614688.0, "19635": 407548736.0, "19640": 403942496.0, "19645": 405373216.0, "19650": 407348128.0, "19655": 409148064.0, "19660": 418983808.0, "19665": 412971008.0, "19670": 409399776.0, "19675": 407666528.0, "19680": 412713760.0, "19685": 415746976.0, "19690": 411044800.0, "19695": 409970112.0, "19700": 411167104.0, "19705": 409869920.0, "19710": 418025152.0, "19715": 408120256.0, "19720": 409303392.0, "19725": 409807520.0, "19730": 410351392.0, "19735": 408406528.0, "19740": 403326656.0, "19745": 406561824.0, "19750": 412858560.0, "19755": 417861088.0, "19760": 411190528.0, "19765": 409534048.0, "19770": 413665792.0, "19775": 412734784.0, "19780": 412345312.0, "19785": 408027232.0, "19790": 417489312.0, "19795": 410693344.0, "19800": 418244800.0, "19805": 412187040.0, "19810": 416294528.0, "19815": 407152256.0, "19820": 410340160.0, "19825": 410764640.0, "19830": 411476448.0, "19835": 408448192.0, "19840": 414655808.0, "19845": 419568928.0, "19850": 406367680.0, "19855": 412313952.0, "19860": 415858848.0, "19865": 412070496.0, "19870": 408672160.0, "19875": 414939072.0, "19880": 413201248.0, "19885": 409922400.0, "19890": 412048800.0, "19895": 410020224.0, "19900": 410075840.0, "19905": 412940000.0, "19910": 414263168.0, "19915": 412676832.0, "19920": 407743520.0, "19925": 420247552.0, "19930": 411710720.0, "19935": 415620000.0, "19940": 414421344.0, "19945": 410101600.0, "19950": 408988352.0, "19955": 416256096.0, "19960": 402490112.0, "19965": 408745888.0, "19970": 422249504.0, "19975": 408895968.0, "19980": 413087200.0, "19985": 414572704.0, "19990": 411535168.0, "19995": 413508384.0, "20000": 402569472.0, "20005": 408889344.0, "20010": 418075136.0, "20015": 410048768.0, "20020": 416121952.0, "20025": 405886240.0, "20030": 413847680.0, "20035": 407409408.0, "20040": 411192544.0, "20045": 417178944.0, "20050": 416621952.0, "20055": 413747104.0, "20060": 417660928.0, "20065": 412243200.0, "20070": 416387584.0, "20075": 411064096.0, "20080": 418697920.0, "20085": 424831648.0, "20090": 413290944.0, "20095": 413815904.0, "20100": 406725184.0, "20105": 419155872.0, "20110": 404200000.0, "20115": 412809440.0, "20120": 413000960.0, "20125": 411457216.0, "20130": 410462880.0, "20135": 410847232.0, "20140": 408533984.0, "20145": 404060992.0, "20150": 417029408.0, "20155": 414560768.0, "20160": 407073344.0, "20165": 412733536.0, "20170": 408379552.0, "20175": 419107040.0, "20180": 412535808.0, "20185": 405930624.0, "20190": 414432224.0, "20195": 413327968.0, "20200": 405766144.0, "20205": 409937984.0, "20210": 416881888.0, "20215": 407882944.0, "20220": 413686432.0, "20225": 406863168.0, "20230": 416222464.0, "20235": 408207200.0, "20240": 423153472.0, "20245": 406585056.0, "20250": 409257888.0, "20255": 411868384.0, "20260": 412083264.0, "20265": 414864128.0, "20270": 414590144.0, "20275": 405081696.0, "20280": 415446848.0, "20285": 414018176.0, "20290": 413303008.0, "20295": 406314944.0, "20300": 410501280.0, "20305": 416356384.0, "20310": 414040992.0, "20315": 410520576.0, "20320": 410577600.0, "20325": 410110720.0, "20330": 420064576.0, "20335": 414459744.0, "20340": 408932160.0, "20345": 404067104.0, "20350": 403946336.0, "20355": 417242976.0, "20360": 406385824.0, "20365": 411881312.0, "20370": 412223808.0, "20375": 409743360.0, "20380": 416056736.0, "20385": 408751584.0, "20390": 413151776.0, "20395": 414881408.0, "20400": 409417856.0, "20405": 415199200.0, "20410": 421822720.0, "20415": 406805536.0, "20420": 411158624.0, "20425": 411038336.0, "20430": 411371968.0, "20435": 414510304.0, "20440": 409683424.0, "20445": 411538048.0, "20450": 411293312.0, "20455": 418505024.0, "20460": 407069632.0, "20465": 418164384.0, "20470": 413494624.0, "20475": 414124096.0, "20480": 412794560.0, "20485": 416333664.0, "20490": 409870912.0, "20495": 416313184.0, "20500": 413283392.0, "20505": 409782848.0, "20510": 419167424.0, "20515": 411709088.0, "20520": 414716992.0, "20525": 409342944.0, "20530": 409857408.0, "20535": 413854976.0, "20540": 408939488.0, "20545": 427380896.0, "20550": 405747040.0, "20555": 412877824.0, "20560": 415042368.0, "20565": 415022336.0, "20570": 415259520.0, "20575": 416400896.0, "20580": 403938688.0, "20585": 414416544.0, "20590": 408415072.0, "20595": 404913056.0, "20600": 408419840.0, "20605": 407509696.0, "20610": 408921888.0, "20615": 415695872.0, "20620": 408726336.0, "20625": 411368608.0, "20630": 415452928.0, "20635": 418441184.0, "20640": 415481184.0, "20645": 421594144.0, "20650": 416409600.0, "20655": 408116480.0, "20660": 411919296.0, "20665": 413586688.0, "20670": 413259648.0, "20675": 413050400.0, "20680": 412055392.0, "20685": 412826016.0, "20690": 409402208.0, "20695": 415799104.0, "20700": 409565120.0, "20705": 409883936.0, "20710": 411809152.0, "20715": 416490720.0, "20720": 413156224.0, "20725": 411161728.0, "20730": 411398816.0, "20735": 415444864.0, "20740": 419458080.0, "20745": 405163808.0, "20750": 417201024.0, "20755": 413085888.0, "20760": 422484640.0, "20765": 417028032.0, "20770": 408711840.0, "20775": 407249184.0, "20780": 410171840.0, "20785": 418905568.0, "20790": 423057568.0, "20795": 410384928.0, "20800": 408250816.0, "20805": 416966944.0, "20810": 413731456.0, "20815": 412908544.0, "20820": 416137920.0, "20825": 404774080.0, "20830": 417087712.0, "20835": 403045440.0, "20840": 410037088.0, "20845": 413323264.0, "20850": 416867136.0, "20855": 413173088.0, "20860": 405334112.0, "20865": 412472320.0, "20870": 415194944.0, "20875": 409439616.0, "20880": 413350368.0, "20885": 410201664.0, "20890": 409082784.0, "20895": 412555040.0, "20900": 412189536.0, "20905": 412259840.0, "20910": 408011072.0, "20915": 403736832.0, "20920": 416718752.0, "20925": 404656608.0, "20930": 415319360.0, "20935": 414533184.0, "20940": 423143424.0, "20945": 416456448.0, "20950": 408547680.0, "20955": 407838112.0, "20960": 418401856.0, "20965": 405085184.0, "20970": 412671392.0, "20975": 415452992.0, "20980": 404419936.0, "20985": 413512672.0, "20990": 410620608.0, "20995": 403741440.0, "21000": 409871264.0, "21005": 410435584.0, "21010": 418829952.0, "21015": 407941408.0, "21020": 413339968.0, "21025": 412251168.0, "21030": 415039840.0, "21035": 414166944.0, "21040": 408257120.0, "21045": 412312064.0, "21050": 417625440.0, "21055": 406232224.0, "21060": 409569632.0, "21065": 415952832.0, "21070": 416282304.0, "21075": 412728128.0, "21080": 417127488.0, "21085": 416681792.0, "21090": 412442336.0, "21095": 417915776.0, "21100": 410897824.0, "21105": 410443168.0, "21110": 412096576.0, "21115": 410352160.0, "21120": 417513696.0, "21125": 418094336.0, "21130": 410561184.0, "21135": 414794080.0, "21140": 418340800.0, "21145": 409464672.0, "21150": 407962944.0, "21155": 417527008.0, "21160": 409945536.0, "21165": 413742272.0, "21170": 407055488.0, "21175": 411071520.0, "21180": 413535392.0, "21185": 413026080.0, "21190": 426695840.0, "21195": 405755936.0, "21200": 409803456.0, "21205": 402702208.0, "21210": 409381920.0, "21215": 420295296.0, "21220": 408148960.0, "21225": 407524064.0, "21230": 416752480.0, "21235": 412317312.0, "21240": 407183360.0, "21245": 415490816.0, "21250": 405625600.0, "21255": 416093440.0, "21260": 409883264.0, "21265": 411608928.0, "21270": 405792768.0, "21275": 413779296.0, "21280": 415663840.0, "21285": 409326752.0, "21290": 410384160.0, "21295": 412148960.0, "21300": 411116608.0, "21305": 410583616.0, "21310": 410644224.0, "21315": 411709120.0, "21320": 418371040.0, "21325": 413618400.0, "21330": 424024320.0, "21335": 420999232.0, "21340": 419021696.0, "21345": 408752224.0, "21350": 412612096.0, "21355": 414639648.0, "21360": 411044800.0, "21365": 407760032.0, "21370": 407324128.0, "21375": 410101248.0, "21380": 410610304.0, "21385": 410166592.0, "21390": 410226848.0, "21395": 417601344.0, "21400": 410154240.0, "21405": 415633344.0, "21410": 411953120.0, "21415": 412540160.0, "21420": 416702048.0, "21425": 417102784.0, "21430": 426717472.0, "21435": 410496448.0, "21440": 413774336.0, "21445": 411682272.0, "21450": 409479872.0, "21455": 416407904.0, "21460": 421082848.0, "21465": 410556768.0, "21470": 406629888.0, "21475": 410350048.0, "21480": 415307168.0, "21485": 413522240.0, "21490": 403550880.0, "21495": 421376960.0, "21500": 405186688.0, "21505": 418321568.0, "21510": 418466368.0, "21515": 404490592.0, "21520": 410016128.0, "21525": 406053024.0, "21530": 414175680.0, "21535": 414242912.0, "21540": 414882528.0, "21545": 414529504.0, "21550": 415778880.0, "21555": 422159808.0, "21560": 410270752.0, "21565": 408782528.0, "21570": 410824192.0, "21575": 413070240.0, "21580": 410121696.0, "21585": 413777472.0, "21590": 416295712.0, "21595": 413909344.0, "21600": 418438720.0, "21605": 405393696.0, "21610": 411723904.0, "21615": 418372928.0, "21620": 412801792.0, "21625": 414278240.0, "21630": 416205728.0, "21635": 412894368.0, "21640": 411682080.0, "21645": 421283488.0, "21650": 417175968.0, "21655": 412144896.0, "21660": 415207744.0, "21665": 412947552.0, "21670": 411333920.0, "21675": 415746592.0, "21680": 416873440.0, "21685": 414757312.0, "21690": 408075520.0, "21695": 407757280.0, "21700": 412861472.0, "21705": 413505408.0, "21710": 415010496.0, "21715": 405795808.0, "21720": 420578720.0, "21725": 407982784.0, "21730": 414164864.0, "21735": 415067552.0, "21740": 410654464.0, "21745": 418618560.0, "21750": 410312160.0, "21755": 412042464.0, "21760": 405805984.0, "21765": 406633472.0, "21770": 413807712.0, "21775": 414789568.0, "21780": 415976960.0, "21785": 398154400.0, "21790": 416566752.0, "21795": 417202688.0, "21800": 410834176.0, "21805": 409721088.0, "21810": 412676896.0, "21815": 411763360.0, "21820": 418318400.0, "21825": 412911264.0, "21830": 404585600.0, "21835": 414992800.0, "21840": 409566784.0, "21845": 420918144.0, "21850": 406934560.0, "21855": 415502144.0, "21860": 419220160.0, "21865": 414232480.0, "21870": 416056128.0, "21875": 412057248.0, "21880": 408750304.0, "21885": 417448640.0, "21890": 417483872.0, "21895": 405272160.0, "21900": 414002944.0, "21905": 413475488.0, "21910": 412748128.0, "21915": 416160192.0, "21920": 417418048.0, "21925": 413482304.0, "21930": 410519136.0, "21935": 403782944.0, "21940": 411360384.0, "21945": 415220736.0, "21950": 404434176.0, "21955": 416508352.0, "21960": 412169120.0, "21965": 401651616.0, "21970": 406695104.0, "21975": 413363392.0, "21980": 414902112.0, "21985": 417173696.0, "21990": 412177152.0, "21995": 414389632.0, "22000": 407262976.0, "22005": 412202816.0, "22010": 422671264.0, "22015": 419997888.0, "22020": 403653056.0, "22025": 411229632.0, "22030": 410847392.0, "22035": 406487296.0, "22040": 415415072.0, "22045": 411510592.0, "22050": 412393632.0, "22055": 405321472.0, "22060": 412734304.0, "22065": 416715360.0, "22070": 405623520.0, "22075": 405564992.0, "22080": 409543360.0, "22085": 408135040.0, "22090": 412380128.0, "22095": 414238016.0, "22100": 413230240.0, "22105": 414362848.0, "22110": 404919904.0, "22115": 413887104.0, "22120": 412071808.0, "22125": 406509664.0, "22130": 404890400.0, "22135": 420840672.0, "22140": 419543360.0, "22145": 408540704.0, "22150": 412880032.0, "22155": 415953152.0, "22160": 411657312.0, "22165": 411606912.0, "22170": 411646176.0, "22175": 408148256.0, "22180": 409308032.0, "22185": 410284128.0, "22190": 410640576.0, "22195": 415392064.0, "22200": 409084576.0, "22205": 418902656.0, "22210": 414953280.0, "22215": 414640160.0, "22220": 411663168.0, "22225": 408150720.0, "22230": 414628928.0, "22235": 408316288.0, "22240": 416297312.0, "22245": 414155808.0, "22250": 406869408.0, "22255": 425966048.0, "22260": 414848160.0, "22265": 411601280.0, "22270": 419840960.0, "22275": 410488032.0, "22280": 409195520.0, "22285": 417774400.0, "22290": 408751968.0, "22295": 413544128.0, "22300": 418550656.0, "22305": 409471040.0, "22310": 413158208.0, "22315": 409223424.0, "22320": 411010144.0, "22325": 406960096.0, "22330": 408077088.0, "22335": 413780256.0, "22340": 414168096.0, "22345": 414353504.0, "22350": 406885408.0, "22355": 404241632.0, "22360": 410064128.0, "22365": 413233472.0, "22370": 411259744.0, "22375": 420584992.0, "22380": 413592384.0, "22385": 417549152.0, "22390": 417005952.0, "22395": 418157952.0, "22400": 406706240.0, "22405": 414817312.0, "22410": 421229280.0, "22415": 414182912.0, "22420": 406255200.0, "22425": 411109088.0, "22430": 400281568.0, "22435": 422028032.0, "22440": 412921728.0, "22445": 409312320.0, "22450": 419036736.0, "22455": 416006464.0, "22460": 410313120.0, "22465": 414479040.0, "22470": 414898976.0, "22475": 405126336.0, "22480": 414993280.0, "22485": 409896192.0, "22490": 407962784.0, "22495": 421068192.0, "22500": 428335488.0, "22505": 408860864.0, "22510": 416224096.0, "22515": 410887296.0, "22520": 407073792.0, "22525": 409517856.0, "22530": 417247712.0, "22535": 409874304.0, "22540": 412773888.0, "22545": 401909120.0, "22550": 405686208.0, "22555": 416561152.0, "22560": 410234720.0, "22565": 416026816.0, "22570": 408207744.0, "22575": 408415872.0, "22580": 421068864.0, "22585": 402716416.0, "22590": 413940512.0, "22595": 413268320.0, "22600": 413908480.0, "22605": 421645344.0, "22610": 409965280.0, "22615": 419689152.0, "22620": 403430976.0, "22625": 417358624.0, "22630": 414272736.0, "22635": 402032480.0, "22640": 411767200.0, "22645": 408708704.0, "22650": 408963424.0, "22655": 412519808.0, "22660": 405398048.0, "22665": 414052928.0, "22670": 416524000.0, "22675": 416145344.0, "22680": 407607392.0, "22685": 407885824.0, "22690": 409554016.0, "22695": 409760832.0, "22700": 415986176.0, "22705": 412858400.0, "22710": 407613664.0, "22715": 419989568.0, "22720": 414298176.0, "22725": 412750784.0, "22730": 408803072.0, "22735": 414047424.0, "22740": 416655520.0, "22745": 407329824.0, "22750": 408114144.0, "22755": 411170048.0, "22760": 414339232.0, "22765": 406370592.0, "22770": 418232000.0, "22775": 422617120.0, "22780": 412258688.0, "22785": 408531936.0, "22790": 404731776.0, "22795": 417563520.0, "22800": 402295488.0, "22805": 416774624.0, "22810": 416732512.0, "22815": 408508576.0, "22820": 413748704.0, "22825": 411780320.0, "22830": 410078784.0, "22835": 415563232.0, "22840": 414812672.0, "22845": 415509760.0, "22850": 410615040.0, "22855": 410763936.0, "22860": 420271360.0, "22865": 411120224.0, "22870": 404958368.0, "22875": 409488992.0, "22880": 410166080.0, "22885": 417433504.0, "22890": 410051712.0, "22895": 414493024.0, "22900": 419995456.0, "22905": 412130208.0, "22910": 414427424.0, "22915": 406901856.0, "22920": 413312704.0, "22925": 410162784.0, "22930": 413121152.0, "22935": 406866720.0, "22940": 405039232.0, "22945": 413339040.0, "22950": 413940320.0, "22955": 406184000.0, "22960": 416840640.0, "22965": 412347520.0, "22970": 411229760.0, "22975": 416422112.0, "22980": 403472032.0, "22985": 416203136.0, "22990": 411315840.0, "22995": 411718336.0, "23000": 411679616.0, "23005": 410345056.0, "23010": 406836960.0, "23015": 415311392.0, "23020": 400304192.0, "23025": 405766016.0, "23030": 421428480.0, "23035": 405602272.0, "23040": 413513376.0, "23045": 416460256.0, "23050": 422023680.0, "23055": 416221312.0, "23060": 415668896.0, "23065": 417714944.0, "23070": 408324512.0, "23075": 410770336.0, "23080": 427085024.0, "23085": 418206464.0, "23090": 412043488.0, "23095": 409222208.0, "23100": 413935296.0, "23105": 412623680.0, "23110": 417435552.0, "23115": 409290432.0, "23120": 415626752.0, "23125": 409674944.0, "23130": 410084288.0, "23135": 406099680.0, "23140": 411324288.0, "23145": 413734400.0, "23150": 408442272.0, "23155": 415728032.0, "23160": 406565952.0, "23165": 410423648.0, "23170": 410667328.0, "23175": 409247872.0, "23180": 410779552.0, "23185": 410370144.0, "23190": 420266208.0, "23195": 410509248.0, "23200": 415073568.0, "23205": 410576800.0, "23210": 414713376.0, "23215": 414170624.0, "23220": 410103712.0, "23225": 415668512.0, "23230": 414626752.0, "23235": 417606880.0, "23240": 411099136.0, "23245": 416879328.0, "23250": 413469920.0, "23255": 407744672.0, "23260": 409376736.0, "23265": 406184096.0, "23270": 411729568.0, "23275": 406447008.0, "23280": 412712352.0, "23285": 415868864.0, "23290": 411529280.0, "23295": 416552224.0, "23300": 417038496.0, "23305": 409062528.0, "23310": 418020960.0, "23315": 412007104.0, "23320": 408970176.0, "23325": 411263616.0, "23330": 406555488.0, "23335": 410467904.0, "23340": 412834272.0, "23345": 414015744.0, "23350": 407894496.0, "23355": 412452224.0, "23360": 413296352.0, "23365": 415192128.0, "23370": 412412864.0, "23375": 419196480.0, "23380": 408946080.0, "23385": 417428704.0, "23390": 410234144.0, "23395": 413275392.0, "23400": 402197152.0, "23405": 406668544.0, "23410": 408687424.0, "23415": 415658432.0, "23420": 408566912.0, "23425": 404239840.0, "23430": 411056704.0, "23435": 414429248.0, "23440": 416294112.0, "23445": 416175008.0, "23450": 412674496.0, "23455": 402290752.0, "23460": 421080704.0, "23465": 417812608.0, "23470": 409071040.0, "23475": 407091232.0, "23480": 420267648.0, "23485": 410929664.0, "23490": 411153024.0, "23495": 409463616.0, "23500": 411115200.0, "23505": 413945408.0, "23510": 406810784.0, "23515": 412528288.0, "23520": 405644288.0, "23525": 406164608.0, "23530": 409886432.0, "23535": 414528288.0, "23540": 406788704.0, "23545": 403332160.0, "23550": 417408128.0, "23555": 412920448.0, "23560": 409746688.0, "23565": 408283104.0, "23570": 411532160.0, "23575": 416517280.0, "23580": 403141184.0, "23585": 409821120.0, "23590": 404848000.0, "23595": 416785568.0, "23600": 415226112.0, "23605": 409122496.0, "23610": 419836864.0, "23615": 417178912.0, "23620": 409572416.0, "23625": 409390048.0, "23630": 416394464.0, "23635": 404077920.0, "23640": 409163872.0, "23645": 410788448.0, "23650": 416434176.0, "23655": 403506752.0, "23660": 413237472.0, "23665": 412671584.0, "23670": 410666272.0, "23675": 405479168.0, "23680": 405858432.0, "23685": 407956000.0, "23690": 414320576.0, "23695": 405872384.0, "23700": 413409920.0, "23705": 413633408.0, "23710": 420287648.0, "23715": 411337568.0, "23720": 416650880.0, "23725": 413528288.0, "23730": 403308800.0, "23735": 413855936.0, "23740": 410571840.0, "23745": 401983040.0, "23750": 421118880.0, "23755": 418313728.0, "23760": 406194656.0, "23765": 413859392.0, "23770": 401853728.0, "23775": 417965984.0, "23780": 408081088.0, "23785": 412912032.0, "23790": 409903584.0, "23795": 411784832.0, "23800": 413323168.0, "23805": 407864448.0, "23810": 415687328.0, "23815": 413970336.0, "23820": 409883328.0, "23825": 418402944.0, "23830": 410240800.0, "23835": 409738496.0, "23840": 412774400.0, "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 17447112704.0, "5": 17447112704.0, "10": 17447112704.0, "15": 17447112704.0, "20": 17447112704.0, "25": 17447112704.0, "30": 17447112704.0, "35": 17447112704.0, "40": 17447112704.0, "45": 17447112704.0, "50": 17447112704.0, "55": 17447112704.0, "60": 17447112704.0, "65": 17447112704.0, "70": 17447112704.0, "75": 17447112704.0, "80": 17447112704.0, "85": 17447112704.0, "90": 17447112704.0, "95": 17447112704.0, "100": 17447112704.0, "105": 17447112704.0, "110": 17447112704.0, "115": 17447112704.0, "120": 17447112704.0, "125": 17447112704.0, "130": 17447112704.0, "135": 17447112704.0, "140": 17447112704.0, "145": 17447112704.0, "150": 17447112704.0, "155": 17447112704.0, "160": 17447112704.0, "165": 17447112704.0, "170": 17447112704.0, "175": 17447112704.0, "180": 17447112704.0, "185": 17447112704.0, "190": 17447112704.0, "195": 17447112704.0, "200": 17447112704.0, "205": 17447112704.0, "210": 17447112704.0, "215": 17447112704.0, "220": 17447112704.0, "225": 17447112704.0, "230": 17447112704.0, "235": 17447112704.0, "240": 17447112704.0, "245": 17447112704.0, "250": 17447112704.0, "255": 17447112704.0, "260": 17447112704.0, "265": 17447112704.0, "270": 17447112704.0, "275": 17447112704.0, "280": 17447112704.0, "285": 17447112704.0, "290": 17447112704.0, "295": 17447112704.0, "300": 17447112704.0, "305": 17447112704.0, "310": 17447112704.0, "315": 17447112704.0, "320": 17447112704.0, "325": 17447112704.0, "330": 17447112704.0, "335": 17447112704.0, "340": 17447112704.0, "345": 17447112704.0, "350": 17447112704.0, "355": 17447112704.0, "360": 17447112704.0, "365": 17447112704.0, "370": 17447112704.0, "375": 17447112704.0, "380": 17447112704.0, "385": 17447112704.0, "390": 17447112704.0, "395": 17447112704.0, "400": 17447112704.0, "405": 17447112704.0, "410": 17447112704.0, "415": 17447112704.0, "420": 17447112704.0, "425": 17447112704.0, "430": 17447112704.0, "435": 17447112704.0, "440": 17447112704.0, "445": 17447112704.0, "450": 17447112704.0, "455": 17447112704.0, "460": 17447112704.0, "465": 17447112704.0, "470": 17447112704.0, "475": 17447112704.0, "480": 17447112704.0, "485": 17447112704.0, "490": 17447112704.0, "495": 17447112704.0, "500": 17447112704.0, "505": 17447112704.0, "510": 17447112704.0, "515": 17448062976.0, "520": 17448013824.0, "525": 17448480768.0, "530": 17448013824.0, "535": 17448013824.0, "540": 17448013824.0, "545": 17448013824.0, "550": 17448013824.0, "555": 17448013824.0, "560": 17448013824.0, "565": 17448013824.0, "570": 17448013824.0, "575": 17448013824.0, "580": 17448013824.0, "585": 17448013824.0, "590": 17448013824.0, "595": 17448013824.0, "600": 17448013824.0, "605": 17448013824.0, "610": 17448337408.0, "615": 17448013824.0, "620": 17448013824.0, "625": 17448013824.0, "630": 17448013824.0, "635": 17448013824.0, "640": 17448013824.0, "645": 17448013824.0, "650": 17448013824.0, "655": 17448013824.0, "660": 17448013824.0, "665": 17448013824.0, "670": 17448013824.0, "675": 17448013824.0, "680": 17448013824.0, "685": 17448546304.0, "690": 17448013824.0, "695": 17448013824.0, "700": 17448013824.0, "705": 17448136704.0, "710": 17448013824.0, "715": 17448013824.0, "720": 17448493056.0, "725": 17448013824.0, "730": 17448013824.0, "735": 17448013824.0, "740": 17448013824.0, "745": 17448013824.0, "750": 17448013824.0, "755": 17448013824.0, "760": 17448013824.0, "765": 17448013824.0, "770": 17448013824.0, "775": 17448013824.0, "780": 17448013824.0, "785": 17448013824.0, "790": 17448013824.0, "795": 17448013824.0, "800": 17448013824.0, "805": 17448013824.0, "810": 17448013824.0, "815": 17448034304.0, "820": 17448013824.0, "825": 17448013824.0, "830": 17448013824.0, "835": 17448013824.0, "840": 17448013824.0, "845": 17448013824.0, "850": 17448013824.0, "855": 17448013824.0, "860": 17448013824.0, "865": 17448013824.0, "870": 17448013824.0, "875": 17448013824.0, "880": 17448013824.0, "885": 17448013824.0, "890": 17448013824.0, "895": 17448013824.0, "900": 17448173568.0, "905": 17448013824.0, "910": 17448013824.0, "915": 17448013824.0, "920": 17448013824.0, "925": 17448013824.0, "930": 17448013824.0, "935": 17448013824.0, "940": 17448013824.0, "945": 17448013824.0, "950": 17448501248.0, "955": 17448013824.0, "960": 17448013824.0, "965": 17448013824.0, "970": 17448013824.0, "975": 17448013824.0, "980": 17448013824.0, "985": 17448013824.0, "990": 17448013824.0, "995": 17448013824.0, "1000": 17448013824.0, "1005": 17448013824.0, "1010": 17448013824.0, "1015": 17448013824.0, "1020": 17448013824.0, "1025": 17448013824.0, "1030": 17448013824.0, "1035": 17448013824.0, "1040": 17448013824.0, "1045": 17448734720.0, "1050": 17448357888.0, "1055": 17448013824.0, "1060": 17448013824.0, "1065": 17448013824.0, "1070": 17448013824.0, "1075": 17448013824.0, "1080": 17446983680.0, "1085": 17446985728.0, "1090": 17447747584.0, "1095": 17446985728.0, "1100": 17446985728.0, "1105": 17446985728.0, "1110": 17446985728.0, "1115": 17446985728.0, "1120": 17447010304.0, "1125": 17446985728.0, "1130": 17446985728.0, "1135": 17446985728.0, "1140": 17446985728.0, "1145": 17446985728.0, "1150": 17446985728.0, "1155": 17446985728.0, "1160": 17446985728.0, "1165": 17446985728.0, "1170": 17446985728.0, "1175": 17446985728.0, "1180": 17446985728.0, "1185": 17447665664.0, "1190": 17446985728.0, "1195": 17446985728.0, "1200": 17446985728.0, "1205": 17446985728.0, "1210": 17447911424.0, "1215": 17446985728.0, "1220": 17446985728.0, "1225": 17446985728.0, "1230": 17446985728.0, "1235": 17447886848.0, "1240": 17446985728.0, "1245": 17446985728.0, "1250": 17446985728.0, "1255": 17447821312.0, "1260": 17446985728.0, "1265": 17446985728.0, "1270": 17446985728.0, "1275": 17446985728.0, "1280": 17446985728.0, "1285": 17446985728.0, "1290": 17446985728.0, "1295": 17446985728.0, "1300": 17446985728.0, "1305": 17446985728.0, "1310": 17446985728.0, "1315": 17446985728.0, "1320": 17446985728.0, "1325": 17446985728.0, "1330": 17446985728.0, "1335": 17446985728.0, "1340": 17446985728.0, "1345": 17446985728.0, "1350": 17446985728.0, "1355": 17446985728.0, "1360": 17446985728.0, "1365": 17447591936.0, "1370": 17447071744.0, "1375": 17446985728.0, "1380": 17446985728.0, "1385": 17446985728.0, "1390": 17446985728.0, "1395": 17446985728.0, "1400": 17446985728.0, "1405": 17446985728.0, "1410": 17446985728.0, "1415": 17446985728.0, "1420": 17446985728.0, "1425": 17446985728.0, "1430": 17446985728.0, "1435": 17447456768.0, "1440": 17446985728.0, "1445": 17447145472.0, "1450": 17447649280.0, "1455": 17446985728.0, "1460": 17447378944.0, "1465": 17446985728.0, "1470": 17448017920.0, "1475": 17446985728.0, "1480": 17446985728.0, "1485": 17447174144.0, "1490": 17446985728.0, "1495": 17446985728.0, "1500": 17446985728.0, "1505": 17446985728.0, "1510": 17446985728.0, "1515": 17446985728.0, "1520": 17446985728.0, "1525": 17446985728.0, "1530": 17446985728.0, "1535": 17446985728.0, "1540": 17446985728.0, "1545": 17446985728.0, "1550": 17446985728.0, "1555": 17446985728.0, "1560": 17446985728.0, "1565": 17447239680.0, "1570": 17447428096.0, "1575": 17446985728.0, "1580": 17446985728.0, "1585": 17446985728.0, "1590": 17446985728.0, "1595": 17446985728.0, "1600": 17446985728.0, "1605": 17446985728.0, "1610": 17446985728.0, "1615": 17446985728.0, "1620": 17446985728.0, "1625": 17446985728.0, "1630": 17446985728.0, "1635": 17446985728.0, "1640": 17446985728.0, "1645": 17446985728.0, "1650": 17446985728.0, "1655": 17446985728.0, "1660": 17446985728.0, "1665": 17446985728.0, "1670": 17446985728.0, "1675": 17447780352.0, "1680": 17446985728.0, "1685": 17447010304.0, "1690": 17446985728.0, "1695": 17446985728.0, "1700": 17446985728.0, "1705": 17446985728.0, "1710": 17446985728.0, "1715": 17446985728.0, "1720": 17446985728.0, "1725": 17446985728.0, "1730": 17447448576.0, "1735": 17446985728.0, "1740": 17446985728.0, "1745": 17446985728.0, "1750": 17447858176.0, "1755": 17446985728.0, "1760": 17446985728.0, "1765": 17446985728.0, "1770": 17446985728.0, "1775": 17446985728.0, "1780": 17446985728.0, "1785": 17446985728.0, "1790": 17446985728.0, "1795": 17446985728.0, "1800": 17446985728.0, "1805": 17447194624.0, "1810": 17446985728.0, "1815": 17447223296.0, "1820": 17446985728.0, "1825": 17446985728.0, "1830": 17446985728.0, "1835": 17446985728.0, "1840": 17446985728.0, "1845": 17446985728.0, "1850": 17446985728.0, "1855": 17446985728.0, "1860": 17446985728.0, "1865": 17446985728.0, "1870": 17447272448.0, "1875": 17446985728.0, "1880": 17446985728.0, "1885": 17446985728.0, "1890": 17446985728.0, "1895": 17446985728.0, "1900": 17446985728.0, "1905": 17446985728.0, "1910": 17446985728.0, "1915": 17446985728.0, "1920": 17446985728.0, "1925": 17446985728.0, "1930": 17446985728.0, "1935": 17446985728.0, "1940": 17446985728.0, "1945": 17446985728.0, "1950": 17446985728.0, "1955": 17446985728.0, "1960": 17446985728.0, "1965": 17446985728.0, "1970": 17447698432.0, "1975": 17446985728.0, "1980": 17446985728.0, "1985": 17446985728.0, "1990": 17446985728.0, "1995": 17446985728.0, "2000": 17446985728.0, "2005": 17446985728.0, "2010": 17447337984.0, "2015": 17446985728.0, "2020": 17446985728.0, "2025": 17446985728.0, "2030": 17446985728.0, "2035": 17446985728.0, "2040": 17446985728.0, "2045": 17446985728.0, "2050": 17446985728.0, "2055": 17446985728.0, "2060": 17446985728.0, "2065": 17446985728.0, "2070": 17446985728.0, "2075": 17447124992.0, "2080": 17446985728.0, "2085": 17446985728.0, "2090": 17446985728.0, "2095": 17446985728.0, "2100": 17446985728.0, "2105": 17446985728.0, "2110": 17446985728.0, "2115": 17446985728.0, "2120": 17446985728.0, "2125": 17446985728.0, "2130": 17446985728.0, "2135": 17446985728.0, "2140": 17446985728.0, "2145": 17446985728.0, "2150": 17446985728.0, "2155": 17446985728.0, "2160": 17446985728.0, "2165": 17446985728.0, "2170": 17448009728.0, "2175": 17446985728.0, "2180": 17446985728.0, "2185": 17446985728.0, "2190": 17447755776.0, "2195": 17446985728.0, "2200": 17446985728.0, "2205": 17446985728.0, "2210": 17446985728.0, "2215": 17446985728.0, "2220": 17446985728.0, "2225": 17446985728.0, "2230": 17446985728.0, "2235": 17446985728.0, "2240": 17447182336.0, "2245": 17446985728.0, "2250": 17446985728.0, "2255": 17446985728.0, "2260": 17446985728.0, "2265": 17446985728.0, "2270": 17446985728.0, "2275": 17447710720.0, "2280": 17446985728.0, "2285": 17446985728.0, "2290": 17446985728.0, "2295": 17446985728.0, "2300": 17446985728.0, "2305": 17446985728.0, "2310": 17446985728.0, "2315": 17446985728.0, "2320": 17446985728.0, "2325": 17446985728.0, "2330": 17446985728.0, "2335": 17446985728.0, "2340": 17446985728.0, "2345": 17446985728.0, "2350": 17446985728.0, "2355": 17446985728.0, "2360": 17446985728.0, "2365": 17446985728.0, "2370": 17446985728.0, "2375": 17446985728.0, "2380": 17446985728.0, "2385": 17446985728.0, "2390": 17446985728.0, "2395": 17448009728.0, "2400": 17446985728.0, "2405": 17446985728.0, "2410": 17446985728.0, "2415": 17446985728.0, "2420": 17446985728.0, "2425": 17446985728.0, "2430": 17446985728.0, "2435": 17446985728.0, "2440": 17446985728.0, "2445": 17447141376.0, "2450": 17446985728.0, "2455": 17446985728.0, "2460": 17446985728.0, "2465": 17446985728.0, "2470": 17446985728.0, "2475": 17446985728.0, "2480": 17446985728.0, "2485": 17446985728.0, "2490": 17446985728.0, "2495": 17446985728.0, "2500": 17446985728.0, "2505": 17446985728.0, "2510": 17447641088.0, "2515": 17447030784.0, "2520": 17447043072.0, "2525": 17446985728.0, "2530": 17446985728.0, "2535": 17446985728.0, "2540": 17446985728.0, "2545": 17446985728.0, "2550": 17446985728.0, "2555": 17446985728.0, "2560": 17446985728.0, "2565": 17446985728.0, "2570": 17446985728.0, "2575": 17447194624.0, "2580": 17446985728.0, "2585": 17447174144.0, "2590": 17447174144.0, "2595": 17447174144.0, "2600": 17447174144.0, "2605": 17447174144.0, "2610": 17447174144.0, "2615": 17447174144.0, "2620": 17447174144.0, "2625": 17447174144.0, "2630": 17447174144.0, "2635": 17447174144.0, "2640": 17447526400.0, "2645": 17447174144.0, "2650": 17447174144.0, "2655": 17447174144.0, "2660": 17447174144.0, "2665": 17447874560.0, "2670": 17447174144.0, "2675": 17447174144.0, "2680": 17447174144.0, "2685": 17447174144.0, "2690": 17447174144.0, "2695": 17447174144.0, "2700": 17447174144.0, "2705": 17447174144.0, "2710": 17447174144.0, "2715": 17447174144.0, "2720": 17447174144.0, "2725": 17447174144.0, "2730": 17447174144.0, "2735": 17447174144.0, "2740": 17447174144.0, "2745": 17447174144.0, "2750": 17447174144.0, "2755": 17447174144.0, "2760": 17447174144.0, "2765": 17447174144.0, "2770": 17447174144.0, "2775": 17447174144.0, "2780": 17447174144.0, "2785": 17447174144.0, "2790": 17447174144.0, "2795": 17447174144.0, "2800": 17447174144.0, "2805": 17447174144.0, "2810": 17447174144.0, "2815": 17447174144.0, "2820": 17447174144.0, "2825": 17447174144.0, "2830": 17447174144.0, "2835": 17447174144.0, "2840": 17447174144.0, "2845": 17447174144.0, "2850": 17447821312.0, "2855": 17447174144.0, "2860": 17447174144.0, "2865": 17447174144.0, "2870": 17447174144.0, "2875": 17447174144.0, "2880": 17447174144.0, "2885": 17447575552.0, "2890": 17447174144.0, "2895": 17447174144.0, "2900": 17447174144.0, "2905": 17447174144.0, "2910": 17447174144.0, "2915": 17447174144.0, "2920": 17447174144.0, "2925": 17447174144.0, "2930": 17447174144.0, "2935": 17447174144.0, "2940": 17447239680.0, "2945": 17447174144.0, "2950": 17447174144.0, "2955": 17447174144.0, "2960": 17447174144.0, "2965": 17447174144.0, "2970": 17447174144.0, "2975": 17447174144.0, "2980": 17447174144.0, "2985": 17447174144.0, "2990": 17447174144.0, "2995": 17447174144.0, "3000": 17447174144.0, "3005": 17447174144.0, "3010": 17447174144.0, "3015": 17447174144.0, "3020": 17447174144.0, "3025": 17447174144.0, "3030": 17447174144.0, "3035": 17447174144.0, "3040": 17447174144.0, "3045": 17447174144.0, "3050": 17447174144.0, "3055": 17447174144.0, "3060": 17447174144.0, "3065": 17447174144.0, "3070": 17447174144.0, "3075": 17447174144.0, "3080": 17447174144.0, "3085": 17447174144.0, "3090": 17447174144.0, "3095": 17447723008.0, "3100": 17447174144.0, "3105": 17447174144.0, "3110": 17447530496.0, "3115": 17447174144.0, "3120": 17447174144.0, "3125": 17447174144.0, "3130": 17447174144.0, "3135": 17447174144.0, "3140": 17447174144.0, "3145": 17446985728.0, "3150": 17446985728.0, "3155": 17446985728.0, "3160": 17446985728.0, "3165": 17446985728.0, "3170": 17446985728.0, "3175": 17446985728.0, "3180": 17446985728.0, "3185": 17447309312.0, "3190": 17446985728.0, "3195": 17446985728.0, "3200": 17447526400.0, "3205": 17447133184.0, "3210": 17447014400.0, "3215": 17446985728.0, "3220": 17446985728.0, "3225": 17446985728.0, "3230": 17446985728.0, "3235": 17446985728.0, "3240": 17446985728.0, "3245": 17446985728.0, "3250": 17446985728.0, "3255": 17446985728.0, "3260": 17446985728.0, "3265": 17446985728.0, "3270": 17446985728.0, "3275": 17447010304.0, "3280": 17446985728.0, "3285": 17446985728.0, "3290": 17446985728.0, "3295": 17446985728.0, "3300": 17446985728.0, "3305": 17446985728.0, "3310": 17446985728.0, "3315": 17447854080.0, "3320": 17446985728.0, "3325": 17447632896.0, "3330": 17446985728.0, "3335": 17446985728.0, "3340": 17446985728.0, "3345": 17446985728.0, "3350": 17446985728.0, "3355": 17447133184.0, "3360": 17446985728.0, "3365": 17446985728.0, "3370": 17446985728.0, "3375": 17446985728.0, "3380": 17446985728.0, "3385": 17446985728.0, "3390": 17446985728.0, "3395": 17446985728.0, "3400": 17446985728.0, "3405": 17446985728.0, "3410": 17446985728.0, "3415": 17446985728.0, "3420": 17446985728.0, "3425": 17447141376.0, "3430": 17446985728.0, "3435": 17446985728.0, "3440": 17446985728.0, "3445": 17446985728.0, "3450": 17446985728.0, "3455": 17446985728.0, "3460": 17446985728.0, "3465": 17446985728.0, "3470": 17446985728.0, "3475": 17446985728.0, "3480": 17446985728.0, "3485": 17446985728.0, "3490": 17446985728.0, "3495": 17446985728.0, "3500": 17446985728.0, "3505": 17446985728.0, "3510": 17446985728.0, "3515": 17446985728.0, "3520": 17446985728.0, "3525": 17446985728.0, "3530": 17446985728.0, "3535": 17447272448.0, "3540": 17446985728.0, "3545": 17446985728.0, "3550": 17446985728.0, "3555": 17446985728.0, "3560": 17447378944.0, "3565": 17446985728.0, "3570": 17446985728.0, "3575": 17446985728.0, "3580": 17446985728.0, "3585": 17446985728.0, "3590": 17446985728.0, "3595": 17446985728.0, "3600": 17446985728.0, "3605": 17446985728.0, "3610": 17446985728.0, "3615": 17447034880.0, "3620": 17446985728.0, "3625": 17446985728.0, "3630": 17446985728.0, "3635": 17446985728.0, "3640": 17447440384.0, "3645": 17446985728.0, "3650": 17446985728.0, "3655": 17446985728.0, "3660": 17446985728.0, "3665": 17447337984.0, "3670": 17446985728.0, "3675": 17446985728.0, "3680": 17446985728.0, "3685": 17446985728.0, "3690": 17446985728.0, "3695": 17446985728.0, "3700": 17446985728.0, "3705": 17446985728.0, "3710": 17446985728.0, "3715": 17446985728.0, "3720": 17446985728.0, "3725": 17446985728.0, "3730": 17446985728.0, "3735": 17446985728.0, "3740": 17446985728.0, "3745": 17446985728.0, "3750": 17446985728.0, "3755": 17446985728.0, "3760": 17446985728.0, "3765": 17446985728.0, "3770": 17446985728.0, "3775": 17446985728.0, "3780": 17446985728.0, "3785": 17446985728.0, "3790": 17446985728.0, "3795": 17446985728.0, "3800": 17446985728.0, "3805": 17446985728.0, "3810": 17446985728.0, "3815": 17446985728.0, "3820": 17446985728.0, "3825": 17446985728.0, "3830": 17447190528.0, "3835": 17447272448.0, "3840": 17446985728.0, "3845": 17446985728.0, "3850": 17446985728.0, "3855": 17446985728.0, "3860": 17446985728.0, "3865": 17446985728.0, "3870": 17446985728.0, "3875": 17446985728.0, "3880": 17446985728.0, "3885": 17446985728.0, "3890": 17446985728.0, "3895": 17446985728.0, "3900": 17446985728.0, "3905": 17447755776.0, "3910": 17446985728.0, "3915": 17446985728.0, "3920": 17446985728.0, "3925": 17446985728.0, "3930": 17446985728.0, "3935": 17446985728.0, "3940": 17446985728.0, "3945": 17446985728.0, "3950": 17446985728.0, "3955": 17446985728.0, "3960": 17446985728.0, "3965": 17446985728.0, "3970": 17446985728.0, "3975": 17446985728.0, "3980": 17446985728.0, "3985": 17446985728.0, "3990": 17446985728.0, "3995": 17446985728.0, "4000": 17446985728.0, "4005": 17446985728.0, "4010": 17446985728.0, "4015": 17446985728.0, "4020": 17446985728.0, "4025": 17446985728.0, "4030": 17446985728.0, "4035": 17446985728.0, "4040": 17446985728.0, "4045": 17446985728.0, "4050": 17446985728.0, "4055": 17446985728.0, "4060": 17446985728.0, "4065": 17446985728.0, "4070": 17446985728.0, "4075": 17446985728.0, "4080": 17446985728.0, "4085": 17446985728.0, "4090": 17446985728.0, "4095": 17446985728.0, "4100": 17446985728.0, "4105": 17447862272.0, "4110": 17446985728.0, "4115": 17446985728.0, "4120": 17447960576.0, "4125": 17446985728.0, "4130": 17446985728.0, "4135": 17446985728.0, "4140": 17446985728.0, "4145": 17446985728.0, "4150": 17446985728.0, "4155": 17446985728.0, "4160": 17446985728.0, "4165": 17446985728.0, "4170": 17446985728.0, "4175": 17446985728.0, "4180": 17447526400.0, "4185": 17446985728.0, "4190": 17447378944.0, "4195": 17446985728.0, "4200": 17446985728.0, "4205": 17446985728.0, "4210": 17447280640.0, "4215": 17447931904.0, "4220": 17447301120.0, "4225": 17446985728.0, "4230": 17446985728.0, "4235": 17446985728.0, "4240": 17446985728.0, "4245": 17446985728.0, "4250": 17446985728.0, "4255": 17447739392.0, "4260": 17447546880.0, "4265": 17446985728.0, "4270": 17446985728.0, "4275": 17447133184.0, "4280": 17446985728.0, "4285": 17447616512.0, "4290": 17446985728.0, "4295": 17447682048.0, "4300": 17446985728.0, "4305": 17446985728.0, "4310": 17446985728.0, "4315": 17447424000.0, "4320": 17446985728.0, "4325": 17446985728.0, "4330": 17446985728.0, "4335": 17446985728.0, "4340": 17446985728.0, "4345": 17447026688.0, "4350": 17446985728.0, "4355": 17446985728.0, "4360": 17446985728.0, "4365": 17446985728.0, "4370": 17446985728.0, "4375": 17447428096.0, "4380": 17446985728.0, "4385": 17446985728.0, "4390": 17446985728.0, "4395": 17446985728.0, "4400": 17446985728.0, "4405": 17446985728.0, "4410": 17446985728.0, "4415": 17446985728.0, "4420": 17446985728.0, "4425": 17446985728.0, "4430": 17446985728.0, "4435": 17446985728.0, "4440": 17446985728.0, "4445": 17446985728.0, "4450": 17446985728.0, "4455": 17446985728.0, "4460": 17446985728.0, "4465": 17446985728.0, "4470": 17446985728.0, "4475": 17446985728.0, "4480": 17446985728.0, "4485": 17446985728.0, "4490": 17446985728.0, "4495": 17446985728.0, "4500": 17446985728.0, "4505": 17446985728.0, "4510": 17446985728.0, "4515": 17446985728.0, "4520": 17446985728.0, "4525": 17446985728.0, "4530": 17447174144.0, "4535": 17446985728.0, "4540": 17447936000.0, "4545": 17446985728.0, "4550": 17446985728.0, "4555": 17446985728.0, "4560": 17446985728.0, "4565": 17446985728.0, "4570": 17446985728.0, "4575": 17446985728.0, "4580": 17446985728.0, "4585": 17446985728.0, "4590": 17446985728.0, "4595": 17447583744.0, "4600": 17446985728.0, "4605": 17446985728.0, "4610": 17446985728.0, "4615": 17446985728.0, "4620": 17447460864.0, "4625": 17447747584.0, "4630": 17446985728.0, "4635": 17446985728.0, "4640": 17446985728.0, "4645": 17446985728.0, "4650": 17446985728.0, "4655": 17446985728.0, "4660": 17446985728.0, "4665": 17447395328.0, "4670": 17446985728.0, "4675": 17447223296.0, "4680": 17446985728.0, "4685": 17446985728.0, "4690": 17446985728.0, "4695": 17446985728.0, "4700": 17446985728.0, "4705": 17446985728.0, "4710": 17446985728.0, "4715": 17446985728.0, "4720": 17446985728.0, "4725": 17447030784.0, "4730": 17446985728.0, "4735": 17446985728.0, "4740": 17446985728.0, "4745": 17446985728.0, "4750": 17446985728.0, "4755": 17446985728.0, "4760": 17446985728.0, "4765": 17446985728.0, "4770": 17446985728.0, "4775": 17447952384.0, "4780": 17446985728.0, "4785": 17446985728.0, "4790": 17446985728.0, "4795": 17446985728.0, "4800": 17446985728.0, "4805": 17446985728.0, "4810": 17446985728.0, "4815": 17446985728.0, "4820": 17446985728.0, "4825": 17446985728.0, "4830": 17446985728.0, "4835": 17447211008.0, "4840": 17447944192.0, "4845": 17446985728.0, "4850": 17447723008.0, "4855": 17446985728.0, "4860": 17446985728.0, "4865": 17446985728.0, "4870": 17446985728.0, "4875": 17446985728.0, "4880": 17446985728.0, "4885": 17446985728.0, "4890": 17446985728.0, "4895": 17447071744.0, "4900": 17446985728.0, "4905": 17446985728.0, "4910": 17446985728.0, "4915": 17446985728.0, "4920": 17446985728.0, "4925": 17446985728.0, "4930": 17446985728.0, "4935": 17446985728.0, "4940": 17446985728.0, "4945": 17446985728.0, "4950": 17446985728.0, "4955": 17446985728.0, "4960": 17446985728.0, "4965": 17446985728.0, "4970": 17446985728.0, "4975": 17446985728.0, "4980": 17446985728.0, "4985": 17446985728.0, "4990": 17446985728.0, "4995": 17446985728.0, "5000": 17446985728.0, "5005": 17447239680.0, "5010": 17447370752.0, "5015": 17446985728.0, "5020": 17446985728.0, "5025": 17446985728.0, "5030": 17446985728.0, "5035": 17446985728.0, "5040": 17446985728.0, "5045": 17446985728.0, "5050": 17446985728.0, "5055": 17446985728.0, "5060": 17446985728.0, "5065": 17446985728.0, "5070": 17446985728.0, "5075": 17446985728.0, "5080": 17446985728.0, "5085": 17446985728.0, "5090": 17446985728.0, "5095": 17446985728.0, "5100": 17446985728.0, "5105": 17446985728.0, "5110": 17446985728.0, "5115": 17447903232.0, "5120": 17447903232.0, "5125": 17447903232.0, "5130": 17447903232.0, "5135": 17448157184.0, "5140": 17447903232.0, "5145": 17447903232.0, "5150": 17447903232.0, "5155": 17447903232.0, "5160": 17447903232.0, "5165": 17447903232.0, "5170": 17447903232.0, "5175": 17447903232.0, "5180": 17447903232.0, "5185": 17447903232.0, "5190": 17447903232.0, "5195": 17447903232.0, "5200": 17447903232.0, "5205": 17447903232.0, "5210": 17447903232.0, "5215": 17447903232.0, "5220": 17447903232.0, "5225": 17447903232.0, "5230": 17447903232.0, "5235": 17447903232.0, "5240": 17447903232.0, "5245": 17447903232.0, "5250": 17447903232.0, "5255": 17447903232.0, "5260": 17447903232.0, "5265": 17447903232.0, "5270": 17447903232.0, "5275": 17447903232.0, "5280": 17447903232.0, "5285": 17447903232.0, "5290": 17447903232.0, "5295": 17447903232.0, "5300": 17447903232.0, "5305": 17447903232.0, "5310": 17447903232.0, "5315": 17447903232.0, "5320": 17447903232.0, "5325": 17447903232.0, "5330": 17447903232.0, "5335": 17447903232.0, "5340": 17447903232.0, "5345": 17447903232.0, "5350": 17447903232.0, "5355": 17447903232.0, "5360": 17447903232.0, "5365": 17447903232.0, "5370": 17447903232.0, "5375": 17447903232.0, "5380": 17447903232.0, "5385": 17447903232.0, "5390": 17447903232.0, "5395": 17447903232.0, "5400": 17447903232.0, "5405": 17447903232.0, "5410": 17447903232.0, "5415": 17447903232.0, "5420": 17447903232.0, "5425": 17447903232.0, "5430": 17447903232.0, "5435": 17447903232.0, "5440": 17447903232.0, "5445": 17447903232.0, "5450": 17447903232.0, "5455": 17447903232.0, "5460": 17447903232.0, "5465": 17447903232.0, "5470": 17447903232.0, "5475": 17447903232.0, "5480": 17447903232.0, "5485": 17447903232.0, "5490": 17447903232.0, "5495": 17447903232.0, "5500": 17447903232.0, "5505": 17447903232.0, "5510": 17447903232.0, "5515": 17447993344.0, "5520": 17447903232.0, "5525": 17447903232.0, "5530": 17447903232.0, "5535": 17447903232.0, "5540": 17447903232.0, "5545": 17447903232.0, "5550": 17447903232.0, "5555": 17448378368.0, "5560": 17447903232.0, "5565": 17447903232.0, "5570": 17447903232.0, "5575": 17447903232.0, "5580": 17447903232.0, "5585": 17447903232.0, "5590": 17447837696.0, "5595": 17447059456.0, "5600": 17447059456.0, "5605": 17447059456.0, "5610": 17447059456.0, "5615": 17447059456.0, "5620": 17447059456.0, "5625": 17447059456.0, "5630": 17447059456.0, "5635": 17447137280.0, "5640": 17447059456.0, "5645": 17447059456.0, "5650": 17447059456.0, "5655": 17447059456.0, "5660": 17447059456.0, "5665": 17447059456.0, "5670": 17447059456.0, "5675": 17447059456.0, "5680": 17447059456.0, "5685": 17447059456.0, "5690": 17447059456.0, "5695": 17447059456.0, "5700": 17447059456.0, "5705": 17447059456.0, "5710": 17447059456.0, "5715": 17447059456.0, "5720": 17447059456.0, "5725": 17447059456.0, "5730": 17447059456.0, "5735": 17447600128.0, "5740": 17447059456.0, "5745": 17447059456.0, "5750": 17447059456.0, "5755": 17447059456.0, "5760": 17447059456.0, "5765": 17447059456.0, "5770": 17447059456.0, "5775": 17447059456.0, "5780": 17447059456.0, "5785": 17447059456.0, "5790": 17447059456.0, "5795": 17447059456.0, "5800": 17447059456.0, "5805": 17447059456.0, "5810": 17447059456.0, "5815": 17447395328.0, "5820": 17447059456.0, "5825": 17447059456.0, "5830": 17447059456.0, "5835": 17447059456.0, "5840": 17447059456.0, "5845": 17447059456.0, "5850": 17447059456.0, "5855": 17447059456.0, "5860": 17447059456.0, "5865": 17447059456.0, "5870": 17447059456.0, "5875": 17447059456.0, "5880": 17447059456.0, "5885": 17447059456.0, "5890": 17447059456.0, "5895": 17447059456.0, "5900": 17447059456.0, "5905": 17447059456.0, "5910": 17447567360.0, "5915": 17447530496.0, "5920": 17447059456.0, "5925": 17447059456.0, "5930": 17447059456.0, "5935": 17447059456.0, "5940": 17447059456.0, "5945": 17447059456.0, "5950": 17447059456.0, "5955": 17447059456.0, "5960": 17447059456.0, "5965": 17447059456.0, "5970": 17447059456.0, "5975": 17447059456.0, "5980": 17447059456.0, "5985": 17447059456.0, "5990": 17447059456.0, "5995": 17447059456.0, "6000": 17447059456.0, "6005": 17448017920.0, "6010": 17447059456.0, "6015": 17447059456.0, "6020": 17447059456.0, "6025": 17447059456.0, "6030": 17447059456.0, "6035": 17447059456.0, "6040": 17447059456.0, "6045": 17447059456.0, "6050": 17447059456.0, "6055": 17447059456.0, "6060": 17447059456.0, "6065": 17447059456.0, "6070": 17447059456.0, "6075": 17447059456.0, "6080": 17447059456.0, "6085": 17447059456.0, "6090": 17447059456.0, "6095": 17447059456.0, "6100": 17447059456.0, "6105": 17447952384.0, "6110": 17447059456.0, "6115": 17447059456.0, "6120": 17447059456.0, "6125": 17447059456.0, "6130": 17447059456.0, "6135": 17447194624.0, "6140": 17447059456.0, "6145": 17447059456.0, "6150": 17447059456.0, "6155": 17447059456.0, "6160": 17447059456.0, "6165": 17447485440.0, "6170": 17448132608.0, "6175": 17447489536.0, "6180": 17447485440.0, "6185": 17447485440.0, "6190": 17447485440.0, "6195": 17447485440.0, "6200": 17447485440.0, "6205": 17447485440.0, "6210": 17447485440.0, "6215": 17447485440.0, "6220": 17447485440.0, "6225": 17447485440.0, "6230": 17448255488.0, "6235": 17447485440.0, "6240": 17447485440.0, "6245": 17447485440.0, "6250": 17447485440.0, "6255": 17447485440.0, "6260": 17447485440.0, "6265": 17447485440.0, "6270": 17447485440.0, "6275": 17447485440.0, "6280": 17447485440.0, "6285": 17447485440.0, "6290": 17446985728.0, "6295": 17446985728.0, "6300": 17446985728.0, "6305": 17446985728.0, "6310": 17446985728.0, "6315": 17446985728.0, "6320": 17446985728.0, "6325": 17446985728.0, "6330": 17446985728.0, "6335": 17446985728.0, "6340": 17446985728.0, "6345": 17446985728.0, "6350": 17446985728.0, "6355": 17446985728.0, "6360": 17446985728.0, "6365": 17446985728.0, "6370": 17447018496.0, "6375": 17446985728.0, "6380": 17446985728.0, "6385": 17446985728.0, "6390": 17446985728.0, "6395": 17447653376.0, "6400": 17446985728.0, "6405": 17446985728.0, "6410": 17446985728.0, "6415": 17446985728.0, "6420": 17446985728.0, "6425": 17446985728.0, "6430": 17446985728.0, "6435": 17446985728.0, "6440": 17446985728.0, "6445": 17447813120.0, "6450": 17446985728.0, "6455": 17446985728.0, "6460": 17446985728.0, "6465": 17446985728.0, "6470": 17446985728.0, "6475": 17446985728.0, "6480": 17446985728.0, "6485": 17446985728.0, "6490": 17448026112.0, "6495": 17446985728.0, "6500": 17446985728.0, "6505": 17446985728.0, "6510": 17447112704.0, "6515": 17446985728.0, "6520": 17446985728.0, "6525": 17447960576.0, "6530": 17446985728.0, "6535": 17446985728.0, "6540": 17446985728.0, "6545": 17446985728.0, "6550": 17447477248.0, "6555": 17446985728.0, "6560": 17446985728.0, "6565": 17447727104.0, "6570": 17446985728.0, "6575": 17446985728.0, "6580": 17446985728.0, "6585": 17446985728.0, "6590": 17446985728.0, "6595": 17446985728.0, "6600": 17446985728.0, "6605": 17446985728.0, "6610": 17446985728.0, "6615": 17446985728.0, "6620": 17446985728.0, "6625": 17446985728.0, "6630": 17446985728.0, "6635": 17446985728.0, "6640": 17446985728.0, "6645": 17446985728.0, "6650": 17446985728.0, "6655": 17446985728.0, "6660": 17446985728.0, "6665": 17446985728.0, "6670": 17446985728.0, "6675": 17446985728.0, "6680": 17446985728.0, "6685": 17446985728.0, "6690": 17446985728.0, "6695": 17446985728.0, "6700": 17446985728.0, "6705": 17446985728.0, "6710": 17446985728.0, "6715": 17446985728.0, "6720": 17446985728.0, "6725": 17446985728.0, "6730": 17446985728.0, "6735": 17446985728.0, "6740": 17446985728.0, "6745": 17446985728.0, "6750": 17446985728.0, "6755": 17446985728.0, "6760": 17446985728.0, "6765": 17447907328.0, "6770": 17446985728.0, "6775": 17446985728.0, "6780": 17446985728.0, "6785": 17446985728.0, "6790": 17446985728.0, "6795": 17447985152.0, "6800": 17446985728.0, "6805": 17446985728.0, "6810": 17446985728.0, "6815": 17446985728.0, "6820": 17446985728.0, "6825": 17446985728.0, "6830": 17446985728.0, "6835": 17446985728.0, "6840": 17446985728.0, "6845": 17446985728.0, "6850": 17446985728.0, "6855": 17446985728.0, "6860": 17446985728.0, "6865": 17446985728.0, "6870": 17446985728.0, "6875": 17446985728.0, "6880": 17446985728.0, "6885": 17446985728.0, "6890": 17446985728.0, "6895": 17447944192.0, "6900": 17446985728.0, "6905": 17446985728.0, "6910": 17446985728.0, "6915": 17446985728.0, "6920": 17446985728.0, "6925": 17447677952.0, "6930": 17446985728.0, "6935": 17446985728.0, "6940": 17446985728.0, "6945": 17446985728.0, "6950": 17446985728.0, "6955": 17446985728.0, "6960": 17446985728.0, "6965": 17446985728.0, "6970": 17447346176.0, "6975": 17446985728.0, "6980": 17447370752.0, "6985": 17446985728.0, "6990": 17446985728.0, "6995": 17446985728.0, "7000": 17447272448.0, "7005": 17447227392.0, "7010": 17446985728.0, "7015": 17446985728.0, "7020": 17446985728.0, "7025": 17446985728.0, "7030": 17446985728.0, "7035": 17446985728.0, "7040": 17446985728.0, "7045": 17446985728.0, "7050": 17446985728.0, "7055": 17446985728.0, "7060": 17446985728.0, "7065": 17446985728.0, "7070": 17446985728.0, "7075": 17446985728.0, "7080": 17446985728.0, "7085": 17446985728.0, "7090": 17446985728.0, "7095": 17446985728.0, "7100": 17446985728.0, "7105": 17446985728.0, "7110": 17446985728.0, "7115": 17446985728.0, "7120": 17446985728.0, "7125": 17446985728.0, "7130": 17447346176.0, "7135": 17446985728.0, "7140": 17446985728.0, "7145": 17446985728.0, "7150": 17446985728.0, "7155": 17446985728.0, "7160": 17446985728.0, "7165": 17446985728.0, "7170": 17446985728.0, "7175": 17446985728.0, "7180": 17446985728.0, "7185": 17446985728.0, "7190": 17446985728.0, "7195": 17446985728.0, "7200": 17446985728.0, "7205": 17446985728.0, "7210": 17446985728.0, "7215": 17446985728.0, "7220": 17446985728.0, "7225": 17446985728.0, "7230": 17446985728.0, "7235": 17446985728.0, "7240": 17446985728.0, "7245": 17446985728.0, "7250": 17446985728.0, "7255": 17446985728.0, "7260": 17446985728.0, "7265": 17446985728.0, "7270": 17446985728.0, "7275": 17446985728.0, "7280": 17447354368.0, "7285": 17446985728.0, "7290": 17446985728.0, "7295": 17447403520.0, "7300": 17446985728.0, "7305": 17446985728.0, "7310": 17446985728.0, "7315": 17446985728.0, "7320": 17446985728.0, "7325": 17447755776.0, "7330": 17446985728.0, "7335": 17446985728.0, "7340": 17446985728.0, "7345": 17446985728.0, "7350": 17447739392.0, "7355": 17446985728.0, "7360": 17447526400.0, "7365": 17446985728.0, "7370": 17446985728.0, "7375": 17446985728.0, "7380": 17446985728.0, "7385": 17446985728.0, "7390": 17446985728.0, "7395": 17446985728.0, "7400": 17446985728.0, "7405": 17446985728.0, "7410": 17446985728.0, "7415": 17446985728.0, "7420": 17446985728.0, "7425": 17446985728.0, "7430": 17447821312.0, "7435": 17446985728.0, "7440": 17446985728.0, "7445": 17446985728.0, "7450": 17446985728.0, "7455": 17446985728.0, "7460": 17446985728.0, "7465": 17446985728.0, "7470": 17446985728.0, "7475": 17446985728.0, "7480": 17446985728.0, "7485": 17446985728.0, "7490": 17446985728.0, "7495": 17446985728.0, "7500": 17446985728.0, "7505": 17446985728.0, "7510": 17446985728.0, "7515": 17446985728.0, "7520": 17446985728.0, "7525": 17446985728.0, "7530": 17447161856.0, "7535": 17446985728.0, "7540": 17446985728.0, "7545": 17447329792.0, "7550": 17447190528.0, "7555": 17446985728.0, "7560": 17447501824.0, "7565": 17447804928.0, "7570": 17447084032.0, "7575": 17447084032.0, "7580": 17447084032.0, "7585": 17447084032.0, "7590": 17447084032.0, "7595": 17447084032.0, "7600": 17447084032.0, "7605": 17447084032.0, "7610": 17447084032.0, "7615": 17447084032.0, "7620": 17447084032.0, "7625": 17447084032.0, "7630": 17447084032.0, "7635": 17447084032.0, "7640": 17447084032.0, "7645": 17447084032.0, "7650": 17447084032.0, "7655": 17447084032.0, "7660": 17447084032.0, "7665": 17447084032.0, "7670": 17447084032.0, "7675": 17447084032.0, "7680": 17447084032.0, "7685": 17447084032.0, "7690": 17447084032.0, "7695": 17447084032.0, "7700": 17447624704.0, "7705": 17447690240.0, "7710": 17447084032.0, "7715": 17448128512.0, "7720": 17447084032.0, "7725": 17447084032.0, "7730": 17447084032.0, "7735": 17447534592.0, "7740": 17447084032.0, "7745": 17447084032.0, "7750": 17447084032.0, "7755": 17447084032.0, "7760": 17447084032.0, "7765": 17447084032.0, "7770": 17447084032.0, "7775": 17447084032.0, "7780": 17447084032.0, "7785": 17447084032.0, "7790": 17447084032.0, "7795": 17447346176.0, "7800": 17448132608.0, "7805": 17447084032.0, "7810": 17447084032.0, "7815": 17447084032.0, "7820": 17447084032.0, "7825": 17447084032.0, "7830": 17447084032.0, "7835": 17447084032.0, "7840": 17447084032.0, "7845": 17447084032.0, "7850": 17447653376.0, "7855": 17447084032.0, "7860": 17447084032.0, "7865": 17447084032.0, "7870": 17447084032.0, "7875": 17447084032.0, "7880": 17447084032.0, "7885": 17447084032.0, "7890": 17447084032.0, "7895": 17447084032.0, "7900": 17447723008.0, "7905": 17447084032.0, "7910": 17447084032.0, "7915": 17447084032.0, "7920": 17447084032.0, "7925": 17447084032.0, "7930": 17447800832.0, "7935": 17447084032.0, "7940": 17447084032.0, "7945": 17447084032.0, "7950": 17447084032.0, "7955": 17447084032.0, "7960": 17447084032.0, "7965": 17447084032.0, "7970": 17447084032.0, "7975": 17447084032.0, "7980": 17447084032.0, "7985": 17447084032.0, "7990": 17447084032.0, "7995": 17449050112.0, "8000": 17447084032.0, "8005": 17447084032.0, "8010": 17447084032.0, "8015": 17447084032.0, "8020": 17447084032.0, "8025": 17447084032.0, "8030": 17447084032.0, "8035": 17447084032.0, "8040": 17447084032.0, "8045": 17447313408.0, "8050": 17447084032.0, "8055": 17447084032.0, "8060": 17447084032.0, "8065": 17447510016.0, "8070": 17447084032.0, "8075": 17447518208.0, "8080": 17447436288.0, "8085": 17446985728.0, "8090": 17446985728.0, "8095": 17446985728.0, "8100": 17446985728.0, "8105": 17446985728.0, "8110": 17446985728.0, "8115": 17446985728.0, "8120": 17446985728.0, "8125": 17446985728.0, "8130": 17446985728.0, "8135": 17446985728.0, "8140": 17446985728.0, "8145": 17446985728.0, "8150": 17446985728.0, "8155": 17446985728.0, "8160": 17446985728.0, "8165": 17446985728.0, "8170": 17446985728.0, "8175": 17446985728.0, "8180": 17446985728.0, "8185": 17447477248.0, "8190": 17447276544.0, "8195": 17446985728.0, "8200": 17446985728.0, "8205": 17446985728.0, "8210": 17446985728.0, "8215": 17446985728.0, "8220": 17446985728.0, "8225": 17446985728.0, "8230": 17446985728.0, "8235": 17446985728.0, "8240": 17446985728.0, "8245": 17447149568.0, "8250": 17446985728.0, "8255": 17446985728.0, "8260": 17446985728.0, "8265": 17447505920.0, "8270": 17446985728.0, "8275": 17446985728.0, "8280": 17446985728.0, "8285": 17446985728.0, "8290": 17447559168.0, "8295": 17446985728.0, "8300": 17447297024.0, "8305": 17446985728.0, "8310": 17446985728.0, "8315": 17446985728.0, "8320": 17446985728.0, "8325": 17446985728.0, "8330": 17446985728.0, "8335": 17446985728.0, "8340": 17446985728.0, "8345": 17446985728.0, "8350": 17446985728.0, "8355": 17446985728.0, "8360": 17446985728.0, "8365": 17446985728.0, "8370": 17447669760.0, "8375": 17446985728.0, "8380": 17446985728.0, "8385": 17446985728.0, "8390": 17446985728.0, "8395": 17446985728.0, "8400": 17446985728.0, "8405": 17447165952.0, "8410": 17446985728.0, "8415": 17446985728.0, "8420": 17446985728.0, "8425": 17446985728.0, "8430": 17446985728.0, "8435": 17446985728.0, "8440": 17446985728.0, "8445": 17447092224.0, "8450": 17447510016.0, "8455": 17446985728.0, "8460": 17446985728.0, "8465": 17446985728.0, "8470": 17446985728.0, "8475": 17446985728.0, "8480": 17446985728.0, "8485": 17446985728.0, "8490": 17446985728.0, "8495": 17446985728.0, "8500": 17446985728.0, "8505": 17446985728.0, "8510": 17446985728.0, "8515": 17446985728.0, "8520": 17446985728.0, "8525": 17446985728.0, "8530": 17446985728.0, "8535": 17447723008.0, "8540": 17446985728.0, "8545": 17446985728.0, "8550": 17446985728.0, "8555": 17446985728.0, "8560": 17446985728.0, "8565": 17446985728.0, "8570": 17446985728.0, "8575": 17446985728.0, "8580": 17447256064.0, "8585": 17446985728.0, "8590": 17446985728.0, "8595": 17446985728.0, "8600": 17446985728.0, "8605": 17446985728.0, "8610": 17446985728.0, "8615": 17446985728.0, "8620": 17447051264.0, "8625": 17446985728.0, "8630": 17446985728.0, "8635": 17446985728.0, "8640": 17446985728.0, "8645": 17447796736.0, "8650": 17446985728.0, "8655": 17446985728.0, "8660": 17446985728.0, "8665": 17447266304.0, "8670": 17447268352.0, "8675": 17447284736.0, "8680": 17447268352.0, "8685": 17447268352.0, "8690": 17447268352.0, "8695": 17447268352.0, "8700": 17447268352.0, "8705": 17447317504.0, "8710": 17447268352.0, "8715": 17447268352.0, "8720": 17447268352.0, "8725": 17447268352.0, "8730": 17448067072.0, "8735": 17447268352.0, "8740": 17447268352.0, "8745": 17447268352.0, "8750": 17447268352.0, "8755": 17447268352.0, "8760": 17447268352.0, "8765": 17447268352.0, "8770": 17447268352.0, "8775": 17447268352.0, "8780": 17447268352.0, "8785": 17447268352.0, "8790": 17447268352.0, "8795": 17447268352.0, "8800": 17447268352.0, "8805": 17447268352.0, "8810": 17447268352.0, "8815": 17447751680.0, "8820": 17447268352.0, "8825": 17447268352.0, "8830": 17447268352.0, "8835": 17447268352.0, "8840": 17447268352.0, "8845": 17447268352.0, "8850": 17447268352.0, "8855": 17447268352.0, "8860": 17447268352.0, "8865": 17447268352.0, "8870": 17447440384.0, "8875": 17447268352.0, "8880": 17447268352.0, "8885": 17447268352.0, "8890": 17447268352.0, "8895": 17447268352.0, "8900": 17447268352.0, "8905": 17447268352.0, "8910": 17447268352.0, "8915": 17447268352.0, "8920": 17447268352.0, "8925": 17447268352.0, "8930": 17447268352.0, "8935": 17447268352.0, "8940": 17447268352.0, "8945": 17447268352.0, "8950": 17447268352.0, "8955": 17447268352.0, "8960": 17447268352.0, "8965": 17447268352.0, "8970": 17447268352.0, "8975": 17448198144.0, "8980": 17447268352.0, "8985": 17447268352.0, "8990": 17447268352.0, "8995": 17447268352.0, "9000": 17447268352.0, "9005": 17447268352.0, "9010": 17447268352.0, "9015": 17447268352.0, "9020": 17447268352.0, "9025": 17447268352.0, "9030": 17447268352.0, "9035": 17447268352.0, "9040": 17447268352.0, "9045": 17447268352.0, "9050": 17447268352.0, "9055": 17447268352.0, "9060": 17447268352.0, "9065": 17447268352.0, "9070": 17447268352.0, "9075": 17447268352.0, "9080": 17447268352.0, "9085": 17447268352.0, "9090": 17447366656.0, "9095": 17447268352.0, "9100": 17447268352.0, "9105": 17447268352.0, "9110": 17447292928.0, "9115": 17447268352.0, "9120": 17447268352.0, "9125": 17447268352.0, "9130": 17447268352.0, "9135": 17447268352.0, "9140": 17447268352.0, "9145": 17447268352.0, "9150": 17447268352.0, "9155": 17447268352.0, "9160": 17447268352.0, "9165": 17447268352.0, "9170": 17447268352.0, "9175": 17447268352.0, "9180": 17447268352.0, "9185": 17447268352.0, "9190": 17447268352.0, "9195": 17447268352.0, "9200": 17447268352.0, "9205": 17447284736.0, "9210": 17447919616.0, "9215": 17447268352.0, "9220": 17447895040.0, "9225": 17447268352.0, "9230": 17448112128.0, "9235": 17447268352.0, "9240": 17447268352.0, "9245": 17447268352.0, "9250": 17447411712.0, "9255": 17447411712.0, "9260": 17447411712.0, "9265": 17447411712.0, "9270": 17447411712.0, "9275": 17447411712.0, "9280": 17447411712.0, "9285": 17447411712.0, "9290": 17447411712.0, "9295": 17447411712.0, "9300": 17447411712.0, "9305": 17447411712.0, "9310": 17447411712.0, "9315": 17447411712.0, "9320": 17447411712.0, "9325": 17447411712.0, "9330": 17447411712.0, "9335": 17447411712.0, "9340": 17447411712.0, "9345": 17447411712.0, "9350": 17447411712.0, "9355": 17447411712.0, "9360": 17447411712.0, "9365": 17447411712.0, "9370": 17448181760.0, "9375": 17447411712.0, "9380": 17447411712.0, "9385": 17447411712.0, "9390": 17447411712.0, "9395": 17447411712.0, "9400": 17447411712.0, "9405": 17447411712.0, "9410": 17447411712.0, "9415": 17447411712.0, "9420": 17447411712.0, "9425": 17447411712.0, "9430": 17447411712.0, "9435": 17447411712.0, "9440": 17447411712.0, "9445": 17447411712.0, "9450": 17447411712.0, "9455": 17447411712.0, "9460": 17447411712.0, "9465": 17447411712.0, "9470": 17447411712.0, "9475": 17447411712.0, "9480": 17447411712.0, "9485": 17447411712.0, "9490": 17447411712.0, "9495": 17447411712.0, "9500": 17447411712.0, "9505": 17447411712.0, "9510": 17447411712.0, "9515": 17447411712.0, "9520": 17447411712.0, "9525": 17447411712.0, "9530": 17447411712.0, "9535": 17447411712.0, "9540": 17447411712.0, "9545": 17447411712.0, "9550": 17447411712.0, "9555": 17447411712.0, "9560": 17447411712.0, "9565": 17447411712.0, "9570": 17447411712.0, "9575": 17447411712.0, "9580": 17447411712.0, "9585": 17447411712.0, "9590": 17447411712.0, "9595": 17447411712.0, "9600": 17447411712.0, "9605": 17447411712.0, "9610": 17447411712.0, "9615": 17447411712.0, "9620": 17447411712.0, "9625": 17447411712.0, "9630": 17447411712.0, "9635": 17447411712.0, "9640": 17447411712.0, "9645": 17447411712.0, "9650": 17447411712.0, "9655": 17447411712.0, "9660": 17447411712.0, "9665": 17447411712.0, "9670": 17448083456.0, "9675": 17447411712.0, "9680": 17447411712.0, "9685": 17447411712.0, "9690": 17447411712.0, "9695": 17447411712.0, "9700": 17447411712.0, "9705": 17447411712.0, "9710": 17447411712.0, "9715": 17448067072.0, "9720": 17447411712.0, "9725": 17447411712.0, "9730": 17447411712.0, "9735": 17447411712.0, "9740": 17447411712.0, "9745": 17447411712.0, "9750": 17447411712.0, "9755": 17447411712.0, "9760": 17447411712.0, "9765": 17447411712.0, "9770": 17447411712.0, "9775": 17447411712.0, "9780": 17447411712.0, "9785": 17447411712.0, "9790": 17447993344.0, "9795": 17447411712.0, "9800": 17447411712.0, "9805": 17447411712.0, "9810": 17447411712.0, "9815": 17446983680.0, "9820": 17446985728.0, "9825": 17446985728.0, "9830": 17446985728.0, "9835": 17446985728.0, "9840": 17446985728.0, "9845": 17446985728.0, "9850": 17446985728.0, "9855": 17446985728.0, "9860": 17446985728.0, "9865": 17446985728.0, "9870": 17446985728.0, "9875": 17446985728.0, "9880": 17446985728.0, "9885": 17446985728.0, "9890": 17446985728.0, "9895": 17446985728.0, "9900": 17446985728.0, "9905": 17446985728.0, "9910": 17446985728.0, "9915": 17446985728.0, "9920": 17446985728.0, "9925": 17446985728.0, "9930": 17446985728.0, "9935": 17446985728.0, "9940": 17446985728.0, "9945": 17446985728.0, "9950": 17446985728.0, "9955": 17446985728.0, "9960": 17446985728.0, "9965": 17446985728.0, "9970": 17446985728.0, "9975": 17447370752.0, "9980": 17447256064.0, "9985": 17446985728.0, "9990": 17446985728.0, "9995": 17446985728.0, "10000": 17446985728.0, "10005": 17446985728.0, "10010": 17446985728.0, "10015": 17446985728.0, "10020": 17446985728.0, "10025": 17446985728.0, "10030": 17447043072.0, "10035": 17446985728.0, "10040": 17446985728.0, "10045": 17446985728.0, "10050": 17446985728.0, "10055": 17446985728.0, "10060": 17446985728.0, "10065": 17446985728.0, "10070": 17446985728.0, "10075": 17446985728.0, "10080": 17446985728.0, "10085": 17446985728.0, "10090": 17446985728.0, "10095": 17446985728.0, "10100": 17446985728.0, "10105": 17446985728.0, "10110": 17446985728.0, "10115": 17446985728.0, "10120": 17446985728.0, "10125": 17446985728.0, "10130": 17446985728.0, "10135": 17446985728.0, "10140": 17446985728.0, "10145": 17446985728.0, "10150": 17446985728.0, "10155": 17446985728.0, "10160": 17446985728.0, "10165": 17447743488.0, "10170": 17446985728.0, "10175": 17447120896.0, "10180": 17446985728.0, "10185": 17446985728.0, "10190": 17446985728.0, "10195": 17446985728.0, "10200": 17446985728.0, "10205": 17447841792.0, "10210": 17446985728.0, "10215": 17446985728.0, "10220": 17446985728.0, "10225": 17446985728.0, "10230": 17447927808.0, "10235": 17448009728.0, "10240": 17446985728.0, "10245": 17446985728.0, "10250": 17446985728.0, "10255": 17446985728.0, "10260": 17446985728.0, "10265": 17446985728.0, "10270": 17446985728.0, "10275": 17446985728.0, "10280": 17447698432.0, "10285": 17446985728.0, "10290": 17446985728.0, "10295": 17446985728.0, "10300": 17446985728.0, "10305": 17446985728.0, "10310": 17446985728.0, "10315": 17446985728.0, "10320": 17446985728.0, "10325": 17447788544.0, "10330": 17446985728.0, "10335": 17446985728.0, "10340": 17446985728.0, "10345": 17446985728.0, "10350": 17446985728.0, "10355": 17447657472.0, "10360": 17446985728.0, "10365": 17446985728.0, "10370": 17446985728.0, "10375": 17446985728.0, "10380": 17446985728.0, "10385": 17446985728.0, "10390": 17446985728.0, "10395": 17446985728.0, "10400": 17447968768.0, "10405": 17447411712.0, "10410": 17447411712.0, "10415": 17447411712.0, "10420": 17447411712.0, "10425": 17447411712.0, "10430": 17448280064.0, "10435": 17447411712.0, "10440": 17448329216.0, "10445": 17447411712.0, "10450": 17448288256.0, "10455": 17447411712.0, "10460": 17447411712.0, "10465": 17447411712.0, "10470": 17447411712.0, "10475": 17447411712.0, "10480": 17447411712.0, "10485": 17447411712.0, "10490": 17447411712.0, "10495": 17447411712.0, "10500": 17447411712.0, "10505": 17447411712.0, "10510": 17448099840.0, "10515": 17447411712.0, "10520": 17447411712.0, "10525": 17447411712.0, "10530": 17447411712.0, "10535": 17447411712.0, "10540": 17447411712.0, "10545": 17447411712.0, "10550": 17447411712.0, "10555": 17447411712.0, "10560": 17447411712.0, "10565": 17447411712.0, "10570": 17448280064.0, "10575": 17447411712.0, "10580": 17447411712.0, "10585": 17447411712.0, "10590": 17447411712.0, "10595": 17447411712.0, "10600": 17447411712.0, "10605": 17447411712.0, "10610": 17447411712.0, "10615": 17447411712.0, "10620": 17447411712.0, "10625": 17447411712.0, "10630": 17447411712.0, "10635": 17447411712.0, "10640": 17447411712.0, "10645": 17447411712.0, "10650": 17447411712.0, "10655": 17447411712.0, "10660": 17447411712.0, "10665": 17447460864.0, "10670": 17447411712.0, "10675": 17447411712.0, "10680": 17447411712.0, "10685": 17447411712.0, "10690": 17447411712.0, "10695": 17447411712.0, "10700": 17447411712.0, "10705": 17447411712.0, "10710": 17447411712.0, "10715": 17447411712.0, "10720": 17447411712.0, "10725": 17447419904.0, "10730": 17447411712.0, "10735": 17447411712.0, "10740": 17447411712.0, "10745": 17447411712.0, "10750": 17447411712.0, "10755": 17447411712.0, "10760": 17447411712.0, "10765": 17448345600.0, "10770": 17447411712.0, "10775": 17447411712.0, "10780": 17447411712.0, "10785": 17447411712.0, "10790": 17448243200.0, "10795": 17447411712.0, "10800": 17447411712.0, "10805": 17447534592.0, "10810": 17447583744.0, "10815": 17447411712.0, "10820": 17447411712.0, "10825": 17447411712.0, "10830": 17447411712.0, "10835": 17447411712.0, "10840": 17447411712.0, "10845": 17447411712.0, "10850": 17447411712.0, "10855": 17447411712.0, "10860": 17447411712.0, "10865": 17447411712.0, "10870": 17447411712.0, "10875": 17447411712.0, "10880": 17447411712.0, "10885": 17447411712.0, "10890": 17447772160.0, "10895": 17447411712.0, "10900": 17447411712.0, "10905": 17447411712.0, "10910": 17447411712.0, "10915": 17447411712.0, "10920": 17447411712.0, "10925": 17447411712.0, "10930": 17447849984.0, "10935": 17447411712.0, "10940": 17447411712.0, "10945": 17447411712.0, "10950": 17447411712.0, "10955": 17447411712.0, "10960": 17447526400.0, "10965": 17447411712.0, "10970": 17447411712.0, "10975": 17447411712.0, "10980": 17447411712.0, "10985": 17447411712.0, "10990": 17447411712.0, "10995": 17446985728.0, "11000": 17446985728.0, "11005": 17446985728.0, "11010": 17446985728.0, "11015": 17446985728.0, "11020": 17447321600.0, "11025": 17446985728.0, "11030": 17447096320.0, "11035": 17447165952.0, "11040": 17447972864.0, "11045": 17447124992.0, "11050": 17446985728.0, "11055": 17446985728.0, "11060": 17447915520.0, "11065": 17446985728.0, "11070": 17447858176.0, "11075": 17446985728.0, "11080": 17446985728.0, "11085": 17447313408.0, "11090": 17446985728.0, "11095": 17446985728.0, "11100": 17446985728.0, "11105": 17447415808.0, "11110": 17447931904.0, "11115": 17446985728.0, "11120": 17446985728.0, "11125": 17447600128.0, "11130": 17446985728.0, "11135": 17447460864.0, "11140": 17446985728.0, "11145": 17447723008.0, "11150": 17446985728.0, "11155": 17446985728.0, "11160": 17446985728.0, "11165": 17447464960.0, "11170": 17446985728.0, "11175": 17447010304.0, "11180": 17446985728.0, "11185": 17446985728.0, "11190": 17446985728.0, "11195": 17446985728.0, "11200": 17446985728.0, "11205": 17447370752.0, "11210": 17446985728.0, "11215": 17446985728.0, "11220": 17446985728.0, "11225": 17447215104.0, "11230": 17446985728.0, "11235": 17447510016.0, "11240": 17446985728.0, "11245": 17446985728.0, "11250": 17446985728.0, "11255": 17446985728.0, "11260": 17446985728.0, "11265": 17446985728.0, "11270": 17446985728.0, "11275": 17446985728.0, "11280": 17446985728.0, "11285": 17446985728.0, "11290": 17446985728.0, "11295": 17446985728.0, "11300": 17446985728.0, "11305": 17446985728.0, "11310": 17446985728.0, "11315": 17447841792.0, "11320": 17446985728.0, "11325": 17446985728.0, "11330": 17446985728.0, "11335": 17446985728.0, "11340": 17446985728.0, "11345": 17446985728.0, "11350": 17446985728.0, "11355": 17446985728.0, "11360": 17446985728.0, "11365": 17446985728.0, "11370": 17446985728.0, "11375": 17446985728.0, "11380": 17446985728.0, "11385": 17446985728.0, "11390": 17446985728.0, "11395": 17446985728.0, "11400": 17446985728.0, "11405": 17446985728.0, "11410": 17446985728.0, "11415": 17446985728.0, "11420": 17446985728.0, "11425": 17446985728.0, "11430": 17446985728.0, "11435": 17446985728.0, "11440": 17446985728.0, "11445": 17446985728.0, "11450": 17446985728.0, "11455": 17446985728.0, "11460": 17446985728.0, "11465": 17446985728.0, "11470": 17446985728.0, "11475": 17446985728.0, "11480": 17446985728.0, "11485": 17446985728.0, "11490": 17447714816.0, "11495": 17446985728.0, "11500": 17446985728.0, "11505": 17446985728.0, "11510": 17447632896.0, "11515": 17446985728.0, "11520": 17447612416.0, "11525": 17446985728.0, "11530": 17447337984.0, "11535": 17446985728.0, "11540": 17446985728.0, "11545": 17446985728.0, "11550": 17446985728.0, "11555": 17446985728.0, "11560": 17446985728.0, "11565": 17446985728.0, "11570": 17446985728.0, "11575": 17446985728.0, "11580": 17446985728.0, "11585": 17447452672.0, "11590": 17446985728.0, "11595": 17446985728.0, "11600": 17446985728.0, "11605": 17446985728.0, "11610": 17446985728.0, "11615": 17447534592.0, "11620": 17446985728.0, "11625": 17447440384.0, "11630": 17446985728.0, "11635": 17446985728.0, "11640": 17446985728.0, "11645": 17446985728.0, "11650": 17446985728.0, "11655": 17446985728.0, "11660": 17446985728.0, "11665": 17446985728.0, "11670": 17447198720.0, "11675": 17446985728.0, "11680": 17446985728.0, "11685": 17446985728.0, "11690": 17446985728.0, "11695": 17446985728.0, "11700": 17447190528.0, "11705": 17447284736.0, "11710": 17446985728.0, "11715": 17447927808.0, "11720": 17446985728.0, "11725": 17446985728.0, "11730": 17447170048.0, "11735": 17446985728.0, "11740": 17446985728.0, "11745": 17446985728.0, "11750": 17446985728.0, "11755": 17446985728.0, "11760": 17446985728.0, "11765": 17446985728.0, "11770": 17446985728.0, "11775": 17446985728.0, "11780": 17448001536.0, "11785": 17446985728.0, "11790": 17446985728.0, "11795": 17446985728.0, "11800": 17446985728.0, "11805": 17446985728.0, "11810": 17446985728.0, "11815": 17446985728.0, "11820": 17446985728.0, "11825": 17446985728.0, "11830": 17446985728.0, "11835": 17446985728.0, "11840": 17446985728.0, "11845": 17446985728.0, "11850": 17446985728.0, "11855": 17447407616.0, "11860": 17446985728.0, "11865": 17446985728.0, "11870": 17446985728.0, "11875": 17446985728.0, "11880": 17446985728.0, "11885": 17446985728.0, "11890": 17446985728.0, "11895": 17446985728.0, "11900": 17447067648.0, "11905": 17446985728.0, "11910": 17447231488.0, "11915": 17447698432.0, "11920": 17446985728.0, "11925": 17446985728.0, "11930": 17446985728.0, "11935": 17446985728.0, "11940": 17447575552.0, "11945": 17447636992.0, "11950": 17446985728.0, "11955": 17446985728.0, "11960": 17446985728.0, "11965": 17447477248.0, "11970": 17446985728.0, "11975": 17446985728.0, "11980": 17446985728.0, "11985": 17446985728.0, "11990": 17446985728.0, "11995": 17446985728.0, "12000": 17446985728.0, "12005": 17446985728.0, "12010": 17446985728.0, "12015": 17446985728.0, "12020": 17446985728.0, "12025": 17446985728.0, "12030": 17446985728.0, "12035": 17446985728.0, "12040": 17446985728.0, "12045": 17447895040.0, "12050": 17447493632.0, "12055": 17446985728.0, "12060": 17446985728.0, "12065": 17446985728.0, "12070": 17447837696.0, "12075": 17446985728.0, "12080": 17446985728.0, "12085": 17446985728.0, "12090": 17446985728.0, "12095": 17446985728.0, "12100": 17446985728.0, "12105": 17446985728.0, "12110": 17446985728.0, "12115": 17447350272.0, "12120": 17446985728.0, "12125": 17446985728.0, "12130": 17446985728.0, "12135": 17446985728.0, "12140": 17446985728.0, "12145": 17446985728.0, "12150": 17446985728.0, "12155": 17447157760.0, "12160": 17446985728.0, "12165": 17446985728.0, "12170": 17446985728.0, "12175": 17446985728.0, "12180": 17446985728.0, "12185": 17446985728.0, "12190": 17446985728.0, "12195": 17446985728.0, "12200": 17446985728.0, "12205": 17446985728.0, "12210": 17446985728.0, "12215": 17446985728.0, "12220": 17446985728.0, "12225": 17446985728.0, "12230": 17446985728.0, "12235": 17446985728.0, "12240": 17446985728.0, "12245": 17446985728.0, "12250": 17446985728.0, "12255": 17446985728.0, "12260": 17446985728.0, "12265": 17446985728.0, "12270": 17446985728.0, "12275": 17446985728.0, "12280": 17446985728.0, "12285": 17446985728.0, "12290": 17446985728.0, "12295": 17446985728.0, "12300": 17446985728.0, "12305": 17446985728.0, "12310": 17446985728.0, "12315": 17446985728.0, "12320": 17446985728.0, "12325": 17446985728.0, "12330": 17446985728.0, "12335": 17446985728.0, "12340": 17446985728.0, "12345": 17446985728.0, "12350": 17446985728.0, "12355": 17446985728.0, "12360": 17446985728.0, "12365": 17446985728.0, "12370": 17446985728.0, "12375": 17446985728.0, "12380": 17446985728.0, "12385": 17446985728.0, "12390": 17447976960.0, "12395": 17447428096.0, "12400": 17446985728.0, "12405": 17446985728.0, "12410": 17446985728.0, "12415": 17446985728.0, "12420": 17446985728.0, "12425": 17446985728.0, "12430": 17446985728.0, "12435": 17446985728.0, "12440": 17446985728.0, "12445": 17446985728.0, "12450": 17446985728.0, "12455": 17446985728.0, "12460": 17447559168.0, "12465": 17447886848.0, "12470": 17446985728.0, "12475": 17446985728.0, "12480": 17446985728.0, "12485": 17446985728.0, "12490": 17446985728.0, "12495": 17446985728.0, "12500": 17446985728.0, "12505": 17446985728.0, "12510": 17447411712.0, "12515": 17446985728.0, "12520": 17446985728.0, "12525": 17446985728.0, "12530": 17446985728.0, "12535": 17446985728.0, "12540": 17446985728.0, "12545": 17446985728.0, "12550": 17446985728.0, "12555": 17446985728.0, "12560": 17446985728.0, "12565": 17447804928.0, "12570": 17446985728.0, "12575": 17446985728.0, "12580": 17446985728.0, "12585": 17447804928.0, "12590": 17446985728.0, "12595": 17446985728.0, "12600": 17446985728.0, "12605": 17446985728.0, "12610": 17446985728.0, "12615": 17446985728.0, "12620": 17446985728.0, "12625": 17446985728.0, "12630": 17447518208.0, "12635": 17446985728.0, "12640": 17446985728.0, "12645": 17446985728.0, "12650": 17446985728.0, "12655": 17446985728.0, "12660": 17446985728.0, "12665": 17446985728.0, "12670": 17446985728.0, "12675": 17446985728.0, "12680": 17446985728.0, "12685": 17446985728.0, "12690": 17446985728.0, "12695": 17446985728.0, "12700": 17446985728.0, "12705": 17446985728.0, "12710": 17446985728.0, "12715": 17447555072.0, "12720": 17446985728.0, "12725": 17447100416.0, "12730": 17446985728.0, "12735": 17446985728.0, "12740": 17446985728.0, "12745": 17446985728.0, "12750": 17446985728.0, "12755": 17446985728.0, "12760": 17446985728.0, "12765": 17446985728.0, "12770": 17446985728.0, "12775": 17447190528.0, "12780": 17447903232.0, "12785": 17446985728.0, "12790": 17446985728.0, "12795": 17446985728.0, "12800": 17446985728.0, "12805": 17447862272.0, "12810": 17446985728.0, "12815": 17446985728.0, "12820": 17447235584.0, "12825": 17446985728.0, "12830": 17446985728.0, "12835": 17446985728.0, "12840": 17446985728.0, "12845": 17446985728.0, "12850": 17446985728.0, "12855": 17446985728.0, "12860": 17446985728.0, "12865": 17446985728.0, "12870": 17446985728.0, "12875": 17447739392.0, "12880": 17446985728.0, "12885": 17446985728.0, "12890": 17446985728.0, "12895": 17446985728.0, "12900": 17446985728.0, "12905": 17446985728.0, "12910": 17447501824.0, "12915": 17446985728.0, "12920": 17446985728.0, "12925": 17446985728.0, "12930": 17446985728.0, "12935": 17446985728.0, "12940": 17446985728.0, "12945": 17446985728.0, "12950": 17446985728.0, "12955": 17446985728.0, "12960": 17447141376.0, "12965": 17446985728.0, "12970": 17446985728.0, "12975": 17446985728.0, "12980": 17447444480.0, "12985": 17446985728.0, "12990": 17446985728.0, "12995": 17446985728.0, "13000": 17446985728.0, "13005": 17446985728.0, "13010": 17446985728.0, "13015": 17446985728.0, "13020": 17446985728.0, "13025": 17446985728.0, "13030": 17446985728.0, "13035": 17447657472.0, "13040": 17446985728.0, "13045": 17446985728.0, "13050": 17446985728.0, "13055": 17446985728.0, "13060": 17446985728.0, "13065": 17446985728.0, "13070": 17446985728.0, "13075": 17446985728.0, "13080": 17446985728.0, "13085": 17446985728.0, "13090": 17446985728.0, "13095": 17446985728.0, "13100": 17446985728.0, "13105": 17447452672.0, "13110": 17447198720.0, "13115": 17446985728.0, "13120": 17446985728.0, "13125": 17446985728.0, "13130": 17446985728.0, "13135": 17446985728.0, "13140": 17446985728.0, "13145": 17446985728.0, "13150": 17446985728.0, "13155": 17446985728.0, "13160": 17446985728.0, "13165": 17447100416.0, "13170": 17446985728.0, "13175": 17446985728.0, "13180": 17446985728.0, "13185": 17446985728.0, "13190": 17447673856.0, "13195": 17446985728.0, "13200": 17446985728.0, "13205": 17447157760.0, "13210": 17446985728.0, "13215": 17446985728.0, "13220": 17446985728.0, "13225": 17447337984.0, "13230": 17447436288.0, "13235": 17446985728.0, "13240": 17446985728.0, "13245": 17446985728.0, "13250": 17446985728.0, "13255": 17446985728.0, "13260": 17446985728.0, "13265": 17446985728.0, "13270": 17446985728.0, "13275": 17447837696.0, "13280": 17446985728.0, "13285": 17446985728.0, "13290": 17446985728.0, "13295": 17446985728.0, "13300": 17446985728.0, "13305": 17446985728.0, "13310": 17446985728.0, "13315": 17447170048.0, "13320": 17446985728.0, "13325": 17446985728.0, "13330": 17446985728.0, "13335": 17446985728.0, "13340": 17447530496.0, "13345": 17446985728.0, "13350": 17446985728.0, "13355": 17446985728.0, "13360": 17446985728.0, "13365": 17446985728.0, "13370": 17446985728.0, "13375": 17446985728.0, "13380": 17446985728.0, "13385": 17446985728.0, "13390": 17446985728.0, "13395": 17446985728.0, "13400": 17446985728.0, "13405": 17446985728.0, "13410": 17446985728.0, "13415": 17446985728.0, "13420": 17446985728.0, "13425": 17446985728.0, "13430": 17447723008.0, "13435": 17446985728.0, "13440": 17447366656.0, "13445": 17446985728.0, "13450": 17446985728.0, "13455": 17446985728.0, "13460": 17446985728.0, "13465": 17447284736.0, "13470": 17447452672.0, "13475": 17446985728.0, "13480": 17447858176.0, "13485": 17446985728.0, "13490": 17446985728.0, "13495": 17446985728.0, "13500": 17446985728.0, "13505": 17446985728.0, "13510": 17446985728.0, "13515": 17446985728.0, "13520": 17446985728.0, "13525": 17446985728.0, "13530": 17447972864.0, "13535": 17446985728.0, "13540": 17446985728.0, "13545": 17446985728.0, "13550": 17446985728.0, "13555": 17446985728.0, "13560": 17446985728.0, "13565": 17446985728.0, "13570": 17446985728.0, "13575": 17447632896.0, "13580": 17446985728.0, "13585": 17446985728.0, "13590": 17446985728.0, "13595": 17446985728.0, "13600": 17446985728.0, "13605": 17446985728.0, "13610": 17447174144.0, "13615": 17447018496.0, "13620": 17447370752.0, "13625": 17446985728.0, "13630": 17446985728.0, "13635": 17446985728.0, "13640": 17446985728.0, "13645": 17446985728.0, "13650": 17447100416.0, "13655": 17446985728.0, "13660": 17446985728.0, "13665": 17446985728.0, "13670": 17447571456.0, "13675": 17446985728.0, "13680": 17446985728.0, "13685": 17446985728.0, "13690": 17447211008.0, "13695": 17446985728.0, "13700": 17446985728.0, "13705": 17446985728.0, "13710": 17446985728.0, "13715": 17446985728.0, "13720": 17446985728.0, "13725": 17446985728.0, "13730": 17447219200.0, "13735": 17446985728.0, "13740": 17447395328.0, "13745": 17446985728.0, "13750": 17446985728.0, "13755": 17447190528.0, "13760": 17447702528.0, "13765": 17446985728.0, "13770": 17446985728.0, "13775": 17446985728.0, "13780": 17446985728.0, "13785": 17446985728.0, "13790": 17446985728.0, "13795": 17447657472.0, "13800": 17446985728.0, "13805": 17446985728.0, "13810": 17447534592.0, "13815": 17446985728.0, "13820": 17446985728.0, "13825": 17446985728.0, "13830": 17446985728.0, "13835": 17446985728.0, "13840": 17446985728.0, "13845": 17446985728.0, "13850": 17446985728.0, "13855": 17446985728.0, "13860": 17448722432.0, "13865": 17446985728.0, "13870": 17446985728.0, "13875": 17446985728.0, "13880": 17447579648.0, "13885": 17446985728.0, "13890": 17446985728.0, "13895": 17446985728.0, "13900": 17446985728.0, "13905": 17446985728.0, "13910": 17446985728.0, "13915": 17447026688.0, "13920": 17446985728.0, "13925": 17446985728.0, "13930": 17446985728.0, "13935": 17446985728.0, "13940": 17446985728.0, "13945": 17446985728.0, "13950": 17446985728.0, "13955": 17446985728.0, "13960": 17446985728.0, "13965": 17446985728.0, "13970": 17446985728.0, "13975": 17446985728.0, "13980": 17446985728.0, "13985": 17447354368.0, "13990": 17446985728.0, "13995": 17446985728.0, "14000": 17446985728.0, "14005": 17446985728.0, "14010": 17446985728.0, "14015": 17447198720.0, "14020": 17447575552.0, "14025": 17446985728.0, "14030": 17446985728.0, "14035": 17447493632.0, "14040": 17446985728.0, "14045": 17447788544.0, "14050": 17446985728.0, "14055": 17446985728.0, "14060": 17446985728.0, "14065": 17446985728.0, "14070": 17446985728.0, "14075": 17446985728.0, "14080": 17446985728.0, "14085": 17446985728.0, "14090": 17447956480.0, "14095": 17446985728.0, "14100": 17446985728.0, "14105": 17446985728.0, "14110": 17447313408.0, "14115": 17446985728.0, "14120": 17446985728.0, "14125": 17446985728.0, "14130": 17446985728.0, "14135": 17447141376.0, "14140": 17447469056.0, "14145": 17446985728.0, "14150": 17447641088.0, "14155": 17446985728.0, "14160": 17446985728.0, "14165": 17446985728.0, "14170": 17447739392.0, "14175": 17446985728.0, "14180": 17446985728.0, "14185": 17446985728.0, "14190": 17446985728.0, "14195": 17446985728.0, "14200": 17446985728.0, "14205": 17446985728.0, "14210": 17446985728.0, "14215": 17446985728.0, "14220": 17446985728.0, "14225": 17446985728.0, "14230": 17446985728.0, "14235": 17447600128.0, "14240": 17448185856.0, "14245": 17446985728.0, "14250": 17446985728.0, "14255": 17446985728.0, "14260": 17446985728.0, "14265": 17446985728.0, "14270": 17446985728.0, "14275": 17446985728.0, "14280": 17446985728.0, "14285": 17446985728.0, "14290": 17446985728.0, "14295": 17446985728.0, "14300": 17446985728.0, "14305": 17446985728.0, "14310": 17446985728.0, "14315": 17446985728.0, "14320": 17446985728.0, "14325": 17446985728.0, "14330": 17446985728.0, "14335": 17446985728.0, "14340": 17446985728.0, "14345": 17446985728.0, "14350": 17446985728.0, "14355": 17446985728.0, "14360": 17446985728.0, "14365": 17446985728.0, "14370": 17446985728.0, "14375": 17446985728.0, "14380": 17446985728.0, "14385": 17446985728.0, "14390": 17446985728.0, "14395": 17446985728.0, "14400": 17446985728.0, "14405": 17447534592.0, "14410": 17446985728.0, "14415": 17446985728.0, "14420": 17446985728.0, "14425": 17446985728.0, "14430": 17446985728.0, "14435": 17446985728.0, "14440": 17446985728.0, "14445": 17446985728.0, "14450": 17446985728.0, "14455": 17446985728.0, "14460": 17446985728.0, "14465": 17446985728.0, "14470": 17446985728.0, "14475": 17446985728.0, "14480": 17446985728.0, "14485": 17446985728.0, "14490": 17446985728.0, "14495": 17446985728.0, "14500": 17446985728.0, "14505": 17446985728.0, "14510": 17446985728.0, "14515": 17446985728.0, "14520": 17446985728.0, "14525": 17446985728.0, "14530": 17446985728.0, "14535": 17446985728.0, "14540": 17446985728.0, "14545": 17447624704.0, "14550": 17447116800.0, "14555": 17446985728.0, "14560": 17446985728.0, "14565": 17447116800.0, "14570": 17446985728.0, "14575": 17446985728.0, "14580": 17446985728.0, "14585": 17446985728.0, "14590": 17446985728.0, "14595": 17446985728.0, "14600": 17446985728.0, "14605": 17446985728.0, "14610": 17446985728.0, "14615": 17446985728.0, "14620": 17446985728.0, "14625": 17446985728.0, "14630": 17446985728.0, "14635": 17446985728.0, "14640": 17446985728.0, "14645": 17446985728.0, "14650": 17446985728.0, "14655": 17446985728.0, "14660": 17446985728.0, "14665": 17447927808.0, "14670": 17446985728.0, "14675": 17446985728.0, "14680": 17446985728.0, "14685": 17446985728.0, "14690": 17446985728.0, "14695": 17446985728.0, "14700": 17446985728.0, "14705": 17446985728.0, "14710": 17446985728.0, "14715": 17446985728.0, "14720": 17446985728.0, "14725": 17446985728.0, "14730": 17446985728.0, "14735": 17446985728.0, "14740": 17446985728.0, "14745": 17446985728.0, "14750": 17446985728.0, "14755": 17446985728.0, "14760": 17446985728.0, "14765": 17446985728.0, "14770": 17446985728.0, "14775": 17446985728.0, "14780": 17446985728.0, "14785": 17447485440.0, "14790": 17446985728.0, "14795": 17447030784.0, "14800": 17447194624.0, "14805": 17446985728.0, "14810": 17446985728.0, "14815": 17446985728.0, "14820": 17446985728.0, "14825": 17446985728.0, "14830": 17446985728.0, "14835": 17446985728.0, "14840": 17446985728.0, "14845": 17446985728.0, "14850": 17446985728.0, "14855": 17446985728.0, "14860": 17446985728.0, "14865": 17446985728.0, "14870": 17446985728.0, "14875": 17446985728.0, "14880": 17446985728.0, "14885": 17446985728.0, "14890": 17447428096.0, "14895": 17446985728.0, "14900": 17446985728.0, "14905": 17447100416.0, "14910": 17446985728.0, "14915": 17446985728.0, "14920": 17446985728.0, "14925": 17446985728.0, "14930": 17447317504.0, "14935": 17446985728.0, "14940": 17447641088.0, "14945": 17446985728.0, "14950": 17447874560.0, "14955": 17446985728.0, "14960": 17446985728.0, "14965": 17446985728.0, "14970": 17447981056.0, "14975": 17446985728.0, "14980": 17446985728.0, "14985": 17446985728.0, "14990": 17447256064.0, "14995": 17446985728.0, "15000": 17446985728.0, "15005": 17446985728.0, "15010": 17446985728.0, "15015": 17446985728.0, "15020": 17446985728.0, "15025": 17446985728.0, "15030": 17446985728.0, "15035": 17447555072.0, "15040": 17446985728.0, "15045": 17447624704.0, "15050": 17448140800.0, "15055": 17447624704.0, "15060": 17447624704.0, "15065": 17447624704.0, "15070": 17447346176.0, "15075": 17446985728.0, "15080": 17446985728.0, "15085": 17446985728.0, "15090": 17446985728.0, "15095": 17446985728.0, "15100": 17446985728.0, "15105": 17446985728.0, "15110": 17446985728.0, "15115": 17447124992.0, "15120": 17446985728.0, "15125": 17447641088.0, "15130": 17446985728.0, "15135": 17447174144.0, "15140": 17446985728.0, "15145": 17446985728.0, "15150": 17446985728.0, "15155": 17447133184.0, "15160": 17446985728.0, "15165": 17446985728.0, "15170": 17446985728.0, "15175": 17446985728.0, "15180": 17446985728.0, "15185": 17447084032.0, "15190": 17446985728.0, "15195": 17446985728.0, "15200": 17446985728.0, "15205": 17446985728.0, "15210": 17446985728.0, "15215": 17447264256.0, "15220": 17447133184.0, "15225": 17446985728.0, "15230": 17447251968.0, "15235": 17446985728.0, "15240": 17447370752.0, "15245": 17446985728.0, "15250": 17446985728.0, "15255": 17447849984.0, "15260": 17447116800.0, "15265": 17446985728.0, "15270": 17446985728.0, "15275": 17447108608.0, "15280": 17446985728.0, "15285": 17446985728.0, "15290": 17446985728.0, "15295": 17446985728.0, "15300": 17446985728.0, "15305": 17446985728.0, "15310": 17448034304.0, "15315": 17447051264.0, "15320": 17446985728.0, "15325": 17446985728.0, "15330": 17446985728.0, "15335": 17446985728.0, "15340": 17446985728.0, "15345": 17446985728.0, "15350": 17446985728.0, "15355": 17446985728.0, "15360": 17446985728.0, "15365": 17446985728.0, "15370": 17446985728.0, "15375": 17446985728.0, "15380": 17446985728.0, "15385": 17446985728.0, "15390": 17447858176.0, "15395": 17446985728.0, "15400": 17447542784.0, "15405": 17446985728.0, "15410": 17446985728.0, "15415": 17446985728.0, "15420": 17446985728.0, "15425": 17448341504.0, "15430": 17447600128.0, "15435": 17446985728.0, "15440": 17446985728.0, "15445": 17446985728.0, "15450": 17447804928.0, "15455": 17446985728.0, "15460": 17446985728.0, "15465": 17446985728.0, "15470": 17446985728.0, "15475": 17446985728.0, "15480": 17447165952.0, "15485": 17446985728.0, "15490": 17446985728.0, "15495": 17446985728.0, "15500": 17446985728.0, "15505": 17447682048.0, "15510": 17447682048.0, "15515": 17447682048.0, "15520": 17447682048.0, "15525": 17447682048.0, "15530": 17447682048.0, "15535": 17447682048.0, "15540": 17448304640.0, "15545": 17447682048.0, "15550": 17447682048.0, "15555": 17447682048.0, "15560": 17447682048.0, "15565": 17447682048.0, "15570": 17447682048.0, "15575": 17447682048.0, "15580": 17447739392.0, "15585": 17447682048.0, "15590": 17447682048.0, "15595": 17448124416.0, "15600": 17447034880.0, "15605": 17446985728.0, "15610": 17446985728.0, "15615": 17446985728.0, "15620": 17446985728.0, "15625": 17446985728.0, "15630": 17446985728.0, "15635": 17446985728.0, "15640": 17446985728.0, "15645": 17446985728.0, "15650": 17446985728.0, "15655": 17446985728.0, "15660": 17447018496.0, "15665": 17446985728.0, "15670": 17446985728.0, "15675": 17446985728.0, "15680": 17446985728.0, "15685": 17446985728.0, "15690": 17446985728.0, "15695": 17446985728.0, "15700": 17446985728.0, "15705": 17446985728.0, "15710": 17446985728.0, "15715": 17446985728.0, "15720": 17447559168.0, "15725": 17447493632.0, "15730": 17446985728.0, "15735": 17446985728.0, "15740": 17446985728.0, "15745": 17446985728.0, "15750": 17446985728.0, "15755": 17446985728.0, "15760": 17446985728.0, "15765": 17446985728.0, "15770": 17446985728.0, "15775": 17446985728.0, "15780": 17446985728.0, "15785": 17446985728.0, "15790": 17446985728.0, "15795": 17446985728.0, "15800": 17446985728.0, "15805": 17447591936.0, "15810": 17447485440.0, "15815": 17446985728.0, "15820": 17446985728.0, "15825": 17447190528.0, "15830": 17446985728.0, "15835": 17446985728.0, "15840": 17446985728.0, "15845": 17446985728.0, "15850": 17446985728.0, "15855": 17446985728.0, "15860": 17446985728.0, "15865": 17447510016.0, "15870": 17446985728.0, "15875": 17446985728.0, "15880": 17446985728.0, "15885": 17446985728.0, "15890": 17446985728.0, "15895": 17446985728.0, "15900": 17446985728.0, "15905": 17446985728.0, "15910": 17446985728.0, "15915": 17446985728.0, "15920": 17446985728.0, "15925": 17446985728.0, "15930": 17446985728.0, "15935": 17447854080.0, "15940": 17446985728.0, "15945": 17446985728.0, "15950": 17447370752.0, "15955": 17446985728.0, "15960": 17446985728.0, "15965": 17446985728.0, "15970": 17446985728.0, "15975": 17447665664.0, "15980": 17446985728.0, "15985": 17447886848.0, "15990": 17446985728.0, "15995": 17446985728.0, "16000": 17448038400.0, "16005": 17446985728.0, "16010": 17447559168.0, "16015": 17446985728.0, "16020": 17446985728.0, "16025": 17446985728.0, "16030": 17446985728.0, "16035": 17446985728.0, "16040": 17446985728.0, "16045": 17446985728.0, "16050": 17446985728.0, "16055": 17447452672.0, "16060": 17446985728.0, "16065": 17446985728.0, "16070": 17447198720.0, "16075": 17446985728.0, "16080": 17446985728.0, "16085": 17446985728.0, "16090": 17446985728.0, "16095": 17446985728.0, "16100": 17447559168.0, "16105": 17446985728.0, "16110": 17446985728.0, "16115": 17446985728.0, "16120": 17446985728.0, "16125": 17446985728.0, "16130": 17446985728.0, "16135": 17446985728.0, "16140": 17448034304.0, "16145": 17446985728.0, "16150": 17446985728.0, "16155": 17446985728.0, "16160": 17446985728.0, "16165": 17447084032.0, "16170": 17446985728.0, "16175": 17446985728.0, "16180": 17446985728.0, "16185": 17446985728.0, "16190": 17446985728.0, "16195": 17446985728.0, "16200": 17446985728.0, "16205": 17446985728.0, "16210": 17446985728.0, "16215": 17446985728.0, "16220": 17446985728.0, "16225": 17446985728.0, "16230": 17446985728.0, "16235": 17446985728.0, "16240": 17446985728.0, "16245": 17446985728.0, "16250": 17446985728.0, "16255": 17446985728.0, "16260": 17447649280.0, "16265": 17446985728.0, "16270": 17446985728.0, "16275": 17446985728.0, "16280": 17446985728.0, "16285": 17446985728.0, "16290": 17446985728.0, "16295": 17446985728.0, "16300": 17446985728.0, "16305": 17446985728.0, "16310": 17447403520.0, "16315": 17446985728.0, "16320": 17448235008.0, "16325": 17446985728.0, "16330": 17447124992.0, "16335": 17447862272.0, "16340": 17446985728.0, "16345": 17446985728.0, "16350": 17446985728.0, "16355": 17446985728.0, "16360": 17447841792.0, "16365": 17447907328.0, "16370": 17446985728.0, "16375": 17447837696.0, "16380": 17447821312.0, "16385": 17446985728.0, "16390": 17446985728.0, "16395": 17446985728.0, "16400": 17446985728.0, "16405": 17446985728.0, "16410": 17446985728.0, "16415": 17446985728.0, "16420": 17446985728.0, "16425": 17446985728.0, "16430": 17447100416.0, "16435": 17446985728.0, "16440": 17447059456.0, "16445": 17446985728.0, "16450": 17446985728.0, "16455": 17446985728.0, "16460": 17446985728.0, "16465": 17446985728.0, "16470": 17446985728.0, "16475": 17446985728.0, "16480": 17446985728.0, "16485": 17446985728.0, "16490": 17446985728.0, "16495": 17446985728.0, "16500": 17446985728.0, "16505": 17446985728.0, "16510": 17446985728.0, "16515": 17446985728.0, "16520": 17446985728.0, "16525": 17446985728.0, "16530": 17446985728.0, "16535": 17446985728.0, "16540": 17447600128.0, "16545": 17446985728.0, "16550": 17446985728.0, "16555": 17446985728.0, "16560": 17446985728.0, "16565": 17447100416.0, "16570": 17446985728.0, "16575": 17446985728.0, "16580": 17446985728.0, "16585": 17446985728.0, "16590": 17447567360.0, "16595": 17446985728.0, "16600": 17446985728.0, "16605": 17446985728.0, "16610": 17446985728.0, "16615": 17446985728.0, "16620": 17446985728.0, "16625": 17446985728.0, "16630": 17446985728.0, "16635": 17446985728.0, "16640": 17446985728.0, "16645": 17446985728.0, "16650": 17446985728.0, "16655": 17446985728.0, "16660": 17446985728.0, "16665": 17446985728.0, "16670": 17446985728.0, "16675": 17446985728.0, "16680": 17446985728.0, "16685": 17446985728.0, "16690": 17446985728.0, "16695": 17446985728.0, "16700": 17446985728.0, "16705": 17446985728.0, "16710": 17446985728.0, "16715": 17446985728.0, "16720": 17446985728.0, "16725": 17447231488.0, "16730": 17446985728.0, "16735": 17446985728.0, "16740": 17446985728.0, "16745": 17446985728.0, "16750": 17446985728.0, "16755": 17447342080.0, "16760": 17447084032.0, "16765": 17446983680.0, "16770": 17446985728.0, "16775": 17446985728.0, "16780": 17446985728.0, "16785": 17447002112.0, "16790": 17446985728.0, "16795": 17446985728.0, "16800": 17446985728.0, "16805": 17446985728.0, "16810": 17446985728.0, "16815": 17446985728.0, "16820": 17446985728.0, "16825": 17446985728.0, "16830": 17446985728.0, "16835": 17446985728.0, "16840": 17446985728.0, "16845": 17446985728.0, "16850": 17446985728.0, "16855": 17446985728.0, "16860": 17446985728.0, "16865": 17446985728.0, "16870": 17446985728.0, "16875": 17446985728.0, "16880": 17446985728.0, "16885": 17446985728.0, "16890": 17446985728.0, "16895": 17446985728.0, "16900": 17446985728.0, "16905": 17446985728.0, "16910": 17446985728.0, "16915": 17446985728.0, "16920": 17447084032.0, "16925": 17446985728.0, "16930": 17446985728.0, "16935": 17446985728.0, "16940": 17446985728.0, "16945": 17446985728.0, "16950": 17446985728.0, "16955": 17446985728.0, "16960": 17446985728.0, "16965": 17446985728.0, "16970": 17447444480.0, "16975": 17448157184.0, "16980": 17446985728.0, "16985": 17447149568.0, "16990": 17446985728.0, "16995": 17446985728.0, "17000": 17446985728.0, "17005": 17446985728.0, "17010": 17446985728.0, "17015": 17446985728.0, "17020": 17446985728.0, "17025": 17446985728.0, "17030": 17446985728.0, "17035": 17446985728.0, "17040": 17446985728.0, "17045": 17446985728.0, "17050": 17446985728.0, "17055": 17446985728.0, "17060": 17447403520.0, "17065": 17446985728.0, "17070": 17447972864.0, "17075": 17446985728.0, "17080": 17446985728.0, "17085": 17446985728.0, "17090": 17446985728.0, "17095": 17446985728.0, "17100": 17446985728.0, "17105": 17446985728.0, "17110": 17446985728.0, "17115": 17446985728.0, "17120": 17446985728.0, "17125": 17446985728.0, "17130": 17446985728.0, "17135": 17446985728.0, "17140": 17447673856.0, "17145": 17446985728.0, "17150": 17446985728.0, "17155": 17446985728.0, "17160": 17446985728.0, "17165": 17446985728.0, "17170": 17446985728.0, "17175": 17446985728.0, "17180": 17446985728.0, "17185": 17446985728.0, "17190": 17446985728.0, "17195": 17446985728.0, "17200": 17446985728.0, "17205": 17446985728.0, "17210": 17446985728.0, "17215": 17446985728.0, "17220": 17446985728.0, "17225": 17446985728.0, "17230": 17446985728.0, "17235": 17446985728.0, "17240": 17446985728.0, "17245": 17446985728.0, "17250": 17446985728.0, "17255": 17446985728.0, "17260": 17447100416.0, "17265": 17446985728.0, "17270": 17447772160.0, "17275": 17446985728.0, "17280": 17446985728.0, "17285": 17446985728.0, "17290": 17446985728.0, "17295": 17446985728.0, "17300": 17446985728.0, "17305": 17446985728.0, "17310": 17446985728.0, "17315": 17446985728.0, "17320": 17446985728.0, "17325": 17446985728.0, "17330": 17446985728.0, "17335": 17446985728.0, "17340": 17446985728.0, "17345": 17446985728.0, "17350": 17447464960.0, "17355": 17447464960.0, "17360": 17447464960.0, "17365": 17447464960.0, "17370": 17447477248.0, "17375": 17447464960.0, "17380": 17447464960.0, "17385": 17448144896.0, "17390": 17448194048.0, "17395": 17447464960.0, "17400": 17447464960.0, "17405": 17447464960.0, "17410": 17447464960.0, "17415": 17448071168.0, "17420": 17447464960.0, "17425": 17447464960.0, "17430": 17447464960.0, "17435": 17447464960.0, "17440": 17447464960.0, "17445": 17447464960.0, "17450": 17447464960.0, "17455": 17447464960.0, "17460": 17447624704.0, "17465": 17447464960.0, "17470": 17447464960.0, "17475": 17447464960.0, "17480": 17447464960.0, "17485": 17447464960.0, "17490": 17447464960.0, "17495": 17447833600.0, "17500": 17447464960.0, "17505": 17447702528.0, "17510": 17447464960.0, "17515": 17447464960.0, "17520": 17447464960.0, "17525": 17447464960.0, "17530": 17447464960.0, "17535": 17447464960.0, "17540": 17447464960.0, "17545": 17447464960.0, "17550": 17447464960.0, "17555": 17447464960.0, "17560": 17447464960.0, "17565": 17447464960.0, "17570": 17448112128.0, "17575": 17447464960.0, "17580": 17448349696.0, "17585": 17447464960.0, "17590": 17447636992.0, "17595": 17447464960.0, "17600": 17447464960.0, "17605": 17447464960.0, "17610": 17447464960.0, "17615": 17447464960.0, "17620": 17447464960.0, "17625": 17447686144.0, "17630": 17447464960.0, "17635": 17447464960.0, "17640": 17447464960.0, "17645": 17447464960.0, "17650": 17447882752.0, "17655": 17447464960.0, "17660": 17447464960.0, "17665": 17447464960.0, "17670": 17447464960.0, "17675": 17447464960.0, "17680": 17447464960.0, "17685": 17447464960.0, "17690": 17447464960.0, "17695": 17447464960.0, "17700": 17447464960.0, "17705": 17447464960.0, "17710": 17447464960.0, "17715": 17447464960.0, "17720": 17447464960.0, "17725": 17447464960.0, "17730": 17447907328.0, "17735": 17447464960.0, "17740": 17447464960.0, "17745": 17447464960.0, "17750": 17447464960.0, "17755": 17447464960.0, "17760": 17447464960.0, "17765": 17447464960.0, "17770": 17447464960.0, "17775": 17447464960.0, "17780": 17447464960.0, "17785": 17447464960.0, "17790": 17447464960.0, "17795": 17447464960.0, "17800": 17447464960.0, "17805": 17447464960.0, "17810": 17447464960.0, "17815": 17447464960.0, "17820": 17447464960.0, "17825": 17447464960.0, "17830": 17447464960.0, "17835": 17447464960.0, "17840": 17447464960.0, "17845": 17447464960.0, "17850": 17447464960.0, "17855": 17447464960.0, "17860": 17447464960.0, "17865": 17447960576.0, "17870": 17447464960.0, "17875": 17447464960.0, "17880": 17447464960.0, "17885": 17447464960.0, "17890": 17447464960.0, "17895": 17447464960.0, "17900": 17447464960.0, "17905": 17447464960.0, "17910": 17448341504.0, "17915": 17447464960.0, "17920": 17447464960.0, "17925": 17447464960.0, "17930": 17447481344.0, "17935": 17447481344.0, "17940": 17447481344.0, "17945": 17447481344.0, "17950": 17447481344.0, "17955": 17447481344.0, "17960": 17447481344.0, "17965": 17447481344.0, "17970": 17447481344.0, "17975": 17447890944.0, "17980": 17448525824.0, "17985": 17447481344.0, "17990": 17447481344.0, "17995": 17448022016.0, "18000": 17448292352.0, "18005": 17448169472.0, "18010": 17447481344.0, "18015": 17447481344.0, "18020": 17447481344.0, "18025": 17447481344.0, "18030": 17447481344.0, "18035": 17447481344.0, "18040": 17447481344.0, "18045": 17447481344.0, "18050": 17447481344.0, "18055": 17447481344.0, "18060": 17447481344.0, "18065": 17447481344.0, "18070": 17447481344.0, "18075": 17447481344.0, "18080": 17447481344.0, "18085": 17447481344.0, "18090": 17447481344.0, "18095": 17447481344.0, "18100": 17447481344.0, "18105": 17447481344.0, "18110": 17447481344.0, "18115": 17447481344.0, "18120": 17447481344.0, "18125": 17447481344.0, "18130": 17447481344.0, "18135": 17447481344.0, "18140": 17447481344.0, "18145": 17447481344.0, "18150": 17447481344.0, "18155": 17447481344.0, "18160": 17447481344.0, "18165": 17447481344.0, "18170": 17447481344.0, "18175": 17447481344.0, "18180": 17447481344.0, "18185": 17447481344.0, "18190": 17447481344.0, "18195": 17447481344.0, "18200": 17447579648.0, "18205": 17448054784.0, "18210": 17448103936.0, "18215": 17447481344.0, "18220": 17447989248.0, "18225": 17447481344.0, "18230": 17447481344.0, "18235": 17447481344.0, "18240": 17448398848.0, "18245": 17447481344.0, "18250": 17447481344.0, "18255": 17447481344.0, "18260": 17447956480.0, "18265": 17447481344.0, "18270": 17447481344.0, "18275": 17447481344.0, "18280": 17447481344.0, "18285": 17447481344.0, "18290": 17447514112.0, "18295": 17447481344.0, "18300": 17447481344.0, "18305": 17447481344.0, "18310": 17447481344.0, "18315": 17447596032.0, "18320": 17447481344.0, "18325": 17447481344.0, "18330": 17447481344.0, "18335": 17447481344.0, "18340": 17447481344.0, "18345": 17447481344.0, "18350": 17447481344.0, "18355": 17448361984.0, "18360": 17448443904.0, "18365": 17447481344.0, "18370": 17447481344.0, "18375": 17447481344.0, "18380": 17447481344.0, "18385": 17447481344.0, "18390": 17447481344.0, "18395": 17447481344.0, "18400": 17447481344.0, "18405": 17447481344.0, "18410": 17447481344.0, "18415": 17447481344.0, "18420": 17447481344.0, "18425": 17447481344.0, "18430": 17447481344.0, "18435": 17447481344.0, "18440": 17447481344.0, "18445": 17448374272.0, "18450": 17447481344.0, "18455": 17447481344.0, "18460": 17447481344.0, "18465": 17447481344.0, "18470": 17447481344.0, "18475": 17447481344.0, "18480": 17447481344.0, "18485": 17447481344.0, "18490": 17447481344.0, "18495": 17447481344.0, "18500": 17447481344.0, "18505": 17447481344.0, "18510": 17447297024.0, "18515": 17448173568.0, "18520": 17447297024.0, "18525": 17447297024.0, "18530": 17447297024.0, "18535": 17447297024.0, "18540": 17447297024.0, "18545": 17447297024.0, "18550": 17448312832.0, "18555": 17447297024.0, "18560": 17447297024.0, "18565": 17447297024.0, "18570": 17447297024.0, "18575": 17447297024.0, "18580": 17447297024.0, "18585": 17447297024.0, "18590": 17447297024.0, "18595": 17447297024.0, "18600": 17448009728.0, "18605": 17447297024.0, "18610": 17447297024.0, "18615": 17447297024.0, "18620": 17447493632.0, "18625": 17447297024.0, "18630": 17447297024.0, "18635": 17447297024.0, "18640": 17447297024.0, "18645": 17448239104.0, "18650": 17447297024.0, "18655": 17447297024.0, "18660": 17447297024.0, "18665": 17447297024.0, "18670": 17447297024.0, "18675": 17447297024.0, "18680": 17447297024.0, "18685": 17447297024.0, "18690": 17447297024.0, "18695": 17447297024.0, "18700": 17447297024.0, "18705": 17447297024.0, "18710": 17447297024.0, "18715": 17447297024.0, "18720": 17447297024.0, "18725": 17447297024.0, "18730": 17447297024.0, "18735": 17447297024.0, "18740": 17447297024.0, "18745": 17447297024.0, "18750": 17447297024.0, "18755": 17447297024.0, "18760": 17447297024.0, "18765": 17447297024.0, "18770": 17447297024.0, "18775": 17447297024.0, "18780": 17447297024.0, "18785": 17447297024.0, "18790": 17447297024.0, "18795": 17447297024.0, "18800": 17447297024.0, "18805": 17447297024.0, "18810": 17447297024.0, "18815": 17447297024.0, "18820": 17447297024.0, "18825": 17447297024.0, "18830": 17447297024.0, "18835": 17447297024.0, "18840": 17447297024.0, "18845": 17447297024.0, "18850": 17447297024.0, "18855": 17447297024.0, "18860": 17447297024.0, "18865": 17447297024.0, "18870": 17447297024.0, "18875": 17447297024.0, "18880": 17447297024.0, "18885": 17447297024.0, "18890": 17447297024.0, "18895": 17447297024.0, "18900": 17447297024.0, "18905": 17447297024.0, "18910": 17447813120.0, "18915": 17447297024.0, "18920": 17447297024.0, "18925": 17447297024.0, "18930": 17447297024.0, "18935": 17447297024.0, "18940": 17447297024.0, "18945": 17447297024.0, "18950": 17447297024.0, "18955": 17447297024.0, "18960": 17447297024.0, "18965": 17447297024.0, "18970": 17447297024.0, "18975": 17447297024.0, "18980": 17447297024.0, "18985": 17447297024.0, "18990": 17447297024.0, "18995": 17447297024.0, "19000": 17447297024.0, "19005": 17447297024.0, "19010": 17447297024.0, "19015": 17447297024.0, "19020": 17447297024.0, "19025": 17447297024.0, "19030": 17447297024.0, "19035": 17447297024.0, "19040": 17447297024.0, "19045": 17447297024.0, "19050": 17447297024.0, "19055": 17447854080.0, "19060": 17447297024.0, "19065": 17447297024.0, "19070": 17447297024.0, "19075": 17448247296.0, "19080": 17447297024.0, "19085": 17447297024.0, "19090": 17447297024.0, "19095": 17448026112.0, "19100": 17446985728.0, "19105": 17446985728.0, "19110": 17446985728.0, "19115": 17446985728.0, "19120": 17446985728.0, "19125": 17446985728.0, "19130": 17446985728.0, "19135": 17446985728.0, "19140": 17446985728.0, "19145": 17446985728.0, "19150": 17446985728.0, "19155": 17446985728.0, "19160": 17446985728.0, "19165": 17446985728.0, "19170": 17446985728.0, "19175": 17446985728.0, "19180": 17446985728.0, "19185": 17446985728.0, "19190": 17446985728.0, "19195": 17446985728.0, "19200": 17446985728.0, "19205": 17446985728.0, "19210": 17446985728.0, "19215": 17446985728.0, "19220": 17446985728.0, "19225": 17446985728.0, "19230": 17446985728.0, "19235": 17446985728.0, "19240": 17446985728.0, "19245": 17446985728.0, "19250": 17446985728.0, "19255": 17446985728.0, "19260": 17446985728.0, "19265": 17446985728.0, "19270": 17446985728.0, "19275": 17446985728.0, "19280": 17446985728.0, "19285": 17446985728.0, "19290": 17446985728.0, "19295": 17446985728.0, "19300": 17446985728.0, "19305": 17447718912.0, "19310": 17446985728.0, "19315": 17446985728.0, "19320": 17446985728.0, "19325": 17446985728.0, "19330": 17446985728.0, "19335": 17446985728.0, "19340": 17446985728.0, "19345": 17447026688.0, "19350": 17446985728.0, "19355": 17447067648.0, "19360": 17446985728.0, "19365": 17446985728.0, "19370": 17446985728.0, "19375": 17446985728.0, "19380": 17446985728.0, "19385": 17446985728.0, "19390": 17446985728.0, "19395": 17446985728.0, "19400": 17446985728.0, "19405": 17446985728.0, "19410": 17446985728.0, "19415": 17446985728.0, "19420": 17446985728.0, "19425": 17446985728.0, "19430": 17446985728.0, "19435": 17446985728.0, "19440": 17446985728.0, "19445": 17446985728.0, "19450": 17446985728.0, "19455": 17446985728.0, "19460": 17446985728.0, "19465": 17446985728.0, "19470": 17447141376.0, "19475": 17446985728.0, "19480": 17446985728.0, "19485": 17446985728.0, "19490": 17446985728.0, "19495": 17446985728.0, "19500": 17446985728.0, "19505": 17446985728.0, "19510": 17446985728.0, "19515": 17447755776.0, "19520": 17446985728.0, "19525": 17446985728.0, "19530": 17446985728.0, "19535": 17447534592.0, "19540": 17446985728.0, "19545": 17446985728.0, "19550": 17447968768.0, "19555": 17446985728.0, "19560": 17447653376.0, "19565": 17447383040.0, "19570": 17446985728.0, "19575": 17447018496.0, "19580": 17446985728.0, "19585": 17446985728.0, "19590": 17446985728.0, "19595": 17446985728.0, "19600": 17446985728.0, "19605": 17446985728.0, "19610": 17446985728.0, "19615": 17446985728.0, "19620": 17447944192.0, "19625": 17446985728.0, "19630": 17446985728.0, "19635": 17446985728.0, "19640": 17446985728.0, "19645": 17446985728.0, "19650": 17446985728.0, "19655": 17446985728.0, "19660": 17446985728.0, "19665": 17447821312.0, "19670": 17446985728.0, "19675": 17446983680.0, "19680": 17446985728.0, "19685": 17446985728.0, "19690": 17446985728.0, "19695": 17446985728.0, "19700": 17446985728.0, "19705": 17446985728.0, "19710": 17446985728.0, "19715": 17446985728.0, "19720": 17446985728.0, "19725": 17447165952.0, "19730": 17446985728.0, "19735": 17447542784.0, "19740": 17446985728.0, "19745": 17446985728.0, "19750": 17447776256.0, "19755": 17446985728.0, "19760": 17446985728.0, "19765": 17446985728.0, "19770": 17446985728.0, "19775": 17446985728.0, "19780": 17446985728.0, "19785": 17446985728.0, "19790": 17446985728.0, "19795": 17446985728.0, "19800": 17446985728.0, "19805": 17446985728.0, "19810": 17446985728.0, "19815": 17447780352.0, "19820": 17446985728.0, "19825": 17446985728.0, "19830": 17446985728.0, "19835": 17447567360.0, "19840": 17446985728.0, "19845": 17446985728.0, "19850": 17446985728.0, "19855": 17446985728.0, "19860": 17447661568.0, "19865": 17446985728.0, "19870": 17446985728.0, "19875": 17446985728.0, "19880": 17446985728.0, "19885": 17446985728.0, "19890": 17446985728.0, "19895": 17446985728.0, "19900": 17446985728.0, "19905": 17446985728.0, "19910": 17446985728.0, "19915": 17446985728.0, "19920": 17446985728.0, "19925": 17446985728.0, "19930": 17446985728.0, "19935": 17446985728.0, "19940": 17446985728.0, "19945": 17446985728.0, "19950": 17446985728.0, "19955": 17446985728.0, "19960": 17446985728.0, "19965": 17446985728.0, "19970": 17446985728.0, "19975": 17447075840.0, "19980": 17447485440.0, "19985": 17447239680.0, "19990": 17447919616.0, "19995": 17446985728.0, "20000": 17446985728.0, "20005": 17446985728.0, "20010": 17446985728.0, "20015": 17447837696.0, "20020": 17447763968.0, "20025": 17446985728.0, "20030": 17446985728.0, "20035": 17447493632.0, "20040": 17446985728.0, "20045": 17447051264.0, "20050": 17447256064.0, "20055": 17446985728.0, "20060": 17446985728.0, "20065": 17446985728.0, "20070": 17446985728.0, "20075": 17447510016.0, "20080": 17446985728.0, "20085": 17446985728.0, "20090": 17446985728.0, "20095": 17446985728.0, "20100": 17446985728.0, "20105": 17448005632.0, "20110": 17446985728.0, "20115": 17446985728.0, "20120": 17447227392.0, "20125": 17446985728.0, "20130": 17446985728.0, "20135": 17447919616.0, "20140": 17447821312.0, "20145": 17446985728.0, "20150": 17446985728.0, "20155": 17446985728.0, "20160": 17446985728.0, "20165": 17447051264.0, "20170": 17446985728.0, "20175": 17446985728.0, "20180": 17446985728.0, "20185": 17446985728.0, "20190": 17446985728.0, "20195": 17446985728.0, "20200": 17446985728.0, "20205": 17446985728.0, "20210": 17446985728.0, "20215": 17446985728.0, "20220": 17446985728.0, "20225": 17446985728.0, "20230": 17446985728.0, "20235": 17446985728.0, "20240": 17446985728.0, "20245": 17446985728.0, "20250": 17446985728.0, "20255": 17446985728.0, "20260": 17446985728.0, "20265": 17447071744.0, "20270": 17446985728.0, "20275": 17446985728.0, "20280": 17446985728.0, "20285": 17447387136.0, "20290": 17446985728.0, "20295": 17446985728.0, "20300": 17446985728.0, "20305": 17446985728.0, "20310": 17447153664.0, "20315": 17446985728.0, "20320": 17446985728.0, "20325": 17446985728.0, "20330": 17447743488.0, "20335": 17446985728.0, "20340": 17446985728.0, "20345": 17446985728.0, "20350": 17446985728.0, "20355": 17446985728.0, "20360": 17446985728.0, "20365": 17446985728.0, "20370": 17446985728.0, "20375": 17446985728.0, "20380": 17446985728.0, "20385": 17446985728.0, "20390": 17446985728.0, "20395": 17447596032.0, "20400": 17446985728.0, "20405": 17446985728.0, "20410": 17446985728.0, "20415": 17446985728.0, "20420": 17446985728.0, "20425": 17447374848.0, "20430": 17446985728.0, "20435": 17447088128.0, "20440": 17446985728.0, "20445": 17446985728.0, "20450": 17446985728.0, "20455": 17446985728.0, "20460": 17446985728.0, "20465": 17446985728.0, "20470": 17447862272.0, "20475": 17446985728.0, "20480": 17446985728.0, "20485": 17446985728.0, "20490": 17446985728.0, "20495": 17446985728.0, "20500": 17446985728.0, "20505": 17446985728.0, "20510": 17446985728.0, "20515": 17446985728.0, "20520": 17447141376.0, "20525": 17446985728.0, "20530": 17446985728.0, "20535": 17446985728.0, "20540": 17446985728.0, "20545": 17446985728.0, "20550": 17446985728.0, "20555": 17446985728.0, "20560": 17446985728.0, "20565": 17446985728.0, "20570": 17446985728.0, "20575": 17446985728.0, "20580": 17446985728.0, "20585": 17446985728.0, "20590": 17446985728.0, "20595": 17446985728.0, "20600": 17446985728.0, "20605": 17446985728.0, "20610": 17446985728.0, "20615": 17446985728.0, "20620": 17446985728.0, "20625": 17446985728.0, "20630": 17447305216.0, "20635": 17446985728.0, "20640": 17446985728.0, "20645": 17446985728.0, "20650": 17446985728.0, "20655": 17446985728.0, "20660": 17447034880.0, "20665": 17446985728.0, "20670": 17446985728.0, "20675": 17446985728.0, "20680": 17446985728.0, "20685": 17446985728.0, "20690": 17446985728.0, "20695": 17446985728.0, "20700": 17446985728.0, "20705": 17446985728.0, "20710": 17446985728.0, "20715": 17446985728.0, "20720": 17446985728.0, "20725": 17446985728.0, "20730": 17446985728.0, "20735": 17446985728.0, "20740": 17446985728.0, "20745": 17446985728.0, "20750": 17446985728.0, "20755": 17446985728.0, "20760": 17446985728.0, "20765": 17446985728.0, "20770": 17446985728.0, "20775": 17446985728.0, "20780": 17446985728.0, "20785": 17446985728.0, "20790": 17446985728.0, "20795": 17446985728.0, "20800": 17448017920.0, "20805": 17447739392.0, "20810": 17446985728.0, "20815": 17446985728.0, "20820": 17446985728.0, "20825": 17446985728.0, "20830": 17447337984.0, "20835": 17446985728.0, "20840": 17446985728.0, "20845": 17446985728.0, "20850": 17446985728.0, "20855": 17446985728.0, "20860": 17446985728.0, "20865": 17446985728.0, "20870": 17446985728.0, "20875": 17446985728.0, "20880": 17446985728.0, "20885": 17446985728.0, "20890": 17447288832.0, "20895": 17446985728.0, "20900": 17447436288.0, "20905": 17446985728.0, "20910": 17447342080.0, "20915": 17446985728.0, "20920": 17446985728.0, "20925": 17446985728.0, "20930": 17446985728.0, "20935": 17446985728.0, "20940": 17446985728.0, "20945": 17446985728.0, "20950": 17446985728.0, "20955": 17446985728.0, "20960": 17446985728.0, "20965": 17446985728.0, "20970": 17446985728.0, "20975": 17446985728.0, "20980": 17446985728.0, "20985": 17447059456.0, "20990": 17446985728.0, "20995": 17446985728.0, "21000": 17447702528.0, "21005": 17446985728.0, "21010": 17447727104.0, "21015": 17446985728.0, "21020": 17447387136.0, "21025": 17446985728.0, "21030": 17446985728.0, "21035": 17446985728.0, "21040": 17446985728.0, "21045": 17446985728.0, "21050": 17446985728.0, "21055": 17446985728.0, "21060": 17446985728.0, "21065": 17446985728.0, "21070": 17446985728.0, "21075": 17446985728.0, "21080": 17447944192.0, "21085": 17446985728.0, "21090": 17446985728.0, "21095": 17446985728.0, "21100": 17446985728.0, "21105": 17446985728.0, "21110": 17446985728.0, "21115": 17448009728.0, "21120": 17446985728.0, "21125": 17447141376.0, "21130": 17446985728.0, "21135": 17446985728.0, "21140": 17446985728.0, "21145": 17446985728.0, "21150": 17446985728.0, "21155": 17446985728.0, "21160": 17446985728.0, "21165": 17447354368.0, "21170": 17446985728.0, "21175": 17446985728.0, "21180": 17446985728.0, "21185": 17446985728.0, "21190": 17446985728.0, "21195": 17446985728.0, "21200": 17446985728.0, "21205": 17446985728.0, "21210": 17446985728.0, "21215": 17446985728.0, "21220": 17446985728.0, "21225": 17446985728.0, "21230": 17447108608.0, "21235": 17446985728.0, "21240": 17446985728.0, "21245": 17446985728.0, "21250": 17447075840.0, "21255": 17446985728.0, "21260": 17446985728.0, "21265": 17446985728.0, "21270": 17446985728.0, "21275": 17446985728.0, "21280": 17446985728.0, "21285": 17446985728.0, "21290": 17447034880.0, "21295": 17446985728.0, "21300": 17446985728.0, "21305": 17446985728.0, "21310": 17446985728.0, "21315": 17446985728.0, "21320": 17446985728.0, "21325": 17446985728.0, "21330": 17447272448.0, "21335": 17446985728.0, "21340": 17446985728.0, "21345": 17446985728.0, "21350": 17446985728.0, "21355": 17446985728.0, "21360": 17446985728.0, "21365": 17446985728.0, "21370": 17446985728.0, "21375": 17446985728.0, "21380": 17446985728.0, "21385": 17446985728.0, "21390": 17446985728.0, "21395": 17446985728.0, "21400": 17446985728.0, "21405": 17446985728.0, "21410": 17446985728.0, "21415": 17446985728.0, "21420": 17447108608.0, "21425": 17446985728.0, "21430": 17446985728.0, "21435": 17446985728.0, "21440": 17446985728.0, "21445": 17446985728.0, "21450": 17446985728.0, "21455": 17446985728.0, "21460": 17447018496.0, "21465": 17446985728.0, "21470": 17446985728.0, "21475": 17446985728.0, "21480": 17446985728.0, "21485": 17446985728.0, "21490": 17446985728.0, "21495": 17446985728.0, "21500": 17447071744.0, "21505": 17446985728.0, "21510": 17446985728.0, "21515": 17446985728.0, "21520": 17446985728.0, "21525": 17446985728.0, "21530": 17446985728.0, "21535": 17446985728.0, "21540": 17446985728.0, "21545": 17446985728.0, "21550": 17446985728.0, "21555": 17446985728.0, "21560": 17446985728.0, "21565": 17446985728.0, "21570": 17446985728.0, "21575": 17446985728.0, "21580": 17446985728.0, "21585": 17446985728.0, "21590": 17446985728.0, "21595": 17446985728.0, "21600": 17446985728.0, "21605": 17446985728.0, "21610": 17446985728.0, "21615": 17446985728.0, "21620": 17446985728.0, "21625": 17446985728.0, "21630": 17446985728.0, "21635": 17446985728.0, "21640": 17446985728.0, "21645": 17446985728.0, "21650": 17446985728.0, "21655": 17446985728.0, "21660": 17446985728.0, "21665": 17447706624.0, "21670": 17446985728.0, "21675": 17446985728.0, "21680": 17446985728.0, "21685": 17446985728.0, "21690": 17446985728.0, "21695": 17446985728.0, "21700": 17446985728.0, "21705": 17446985728.0, "21710": 17446985728.0, "21715": 17446985728.0, "21720": 17446985728.0, "21725": 17446985728.0, "21730": 17446985728.0, "21735": 17447088128.0, "21740": 17446985728.0, "21745": 17446985728.0, "21750": 17446985728.0, "21755": 17446985728.0, "21760": 17446985728.0, "21765": 17446985728.0, "21770": 17446985728.0, "21775": 17446985728.0, "21780": 17446985728.0, "21785": 17446985728.0, "21790": 17446985728.0, "21795": 17447428096.0, "21800": 17446985728.0, "21805": 17447305216.0, "21810": 17446985728.0, "21815": 17446985728.0, "21820": 17446985728.0, "21825": 17446985728.0, "21830": 17446985728.0, "21835": 17446985728.0, "21840": 17447780352.0, "21845": 17446985728.0, "21850": 17446985728.0, "21855": 17446985728.0, "21860": 17446985728.0, "21865": 17446985728.0, "21870": 17446985728.0, "21875": 17446985728.0, "21880": 17446985728.0, "21885": 17446985728.0, "21890": 17446985728.0, "21895": 17446985728.0, "21900": 17446985728.0, "21905": 17446985728.0, "21910": 17446985728.0, "21915": 17446985728.0, "21920": 17446985728.0, "21925": 17446985728.0, "21930": 17446985728.0, "21935": 17446985728.0, "21940": 17446985728.0, "21945": 17446985728.0, "21950": 17446985728.0, "21955": 17446985728.0, "21960": 17447395328.0, "21965": 17446985728.0, "21970": 17447329792.0, "21975": 17446985728.0, "21980": 17447673856.0, "21985": 17446985728.0, "21990": 17446985728.0, "21995": 17446985728.0, "22000": 17446985728.0, "22005": 17446985728.0, "22010": 17447182336.0, "22015": 17446985728.0, "22020": 17446985728.0, "22025": 17446985728.0, "22030": 17446985728.0, "22035": 17446985728.0, "22040": 17446985728.0, "22045": 17446985728.0, "22050": 17446985728.0, "22055": 17446985728.0, "22060": 17446985728.0, "22065": 17446985728.0, "22070": 17446985728.0, "22075": 17446985728.0, "22080": 17446985728.0, "22085": 17446985728.0, "22090": 17446985728.0, "22095": 17446985728.0, "22100": 17446985728.0, "22105": 17446985728.0, "22110": 17447157760.0, "22115": 17447321600.0, "22120": 17446985728.0, "22125": 17446985728.0, "22130": 17446985728.0, "22135": 17446985728.0, "22140": 17446985728.0, "22145": 17446985728.0, "22150": 17446985728.0, "22155": 17446985728.0, "22160": 17446985728.0, "22165": 17446985728.0, "22170": 17446985728.0, "22175": 17446985728.0, "22180": 17446985728.0, "22185": 17446985728.0, "22190": 17446985728.0, "22195": 17446985728.0, "22200": 17446985728.0, "22205": 17446985728.0, "22210": 17446985728.0, "22215": 17447510016.0, "22220": 17446985728.0, "22225": 17446985728.0, "22230": 17446985728.0, "22235": 17446985728.0, "22240": 17446985728.0, "22245": 17446985728.0, "22250": 17446985728.0, "22255": 17446985728.0, "22260": 17446985728.0, "22265": 17446985728.0, "22270": 17446985728.0, "22275": 17446985728.0, "22280": 17446985728.0, "22285": 17446985728.0, "22290": 17446985728.0, "22295": 17446985728.0, "22300": 17446985728.0, "22305": 17446985728.0, "22310": 17446985728.0, "22315": 17446985728.0, "22320": 17446985728.0, "22325": 17446985728.0, "22330": 17446985728.0, "22335": 17446985728.0, "22340": 17446985728.0, "22345": 17446985728.0, "22350": 17446985728.0, "22355": 17446985728.0, "22360": 17447542784.0, "22365": 17447542784.0, "22370": 17447542784.0, "22375": 17447542784.0, "22380": 17448189952.0, "22385": 17447542784.0, "22390": 17447542784.0, "22395": 17447542784.0, "22400": 17447542784.0, "22405": 17447542784.0, "22410": 17447542784.0, "22415": 17448247296.0, "22420": 17447542784.0, "22425": 17447542784.0, "22430": 17447542784.0, "22435": 17447542784.0, "22440": 17447542784.0, "22445": 17447542784.0, "22450": 17447542784.0, "22455": 17447542784.0, "22460": 17447542784.0, "22465": 17447587840.0, "22470": 17447542784.0, "22475": 17447542784.0, "22480": 17447542784.0, "22485": 17447542784.0, "22490": 17447542784.0, "22495": 17447542784.0, "22500": 17447542784.0, "22505": 17447542784.0, "22510": 17447542784.0, "22515": 17447542784.0, "22520": 17447948288.0, "22525": 17447542784.0, "22530": 17447542784.0, "22535": 17447542784.0, "22540": 17447542784.0, "22545": 17447542784.0, "22550": 17447542784.0, "22555": 17448480768.0, "22560": 17448542208.0, "22565": 17447542784.0, "22570": 17447542784.0, "22575": 17447542784.0, "22580": 17447542784.0, "22585": 17447542784.0, "22590": 17447936000.0, "22595": 17447325696.0, "22600": 17447325696.0, "22605": 17447325696.0, "22610": 17447325696.0, "22615": 17447325696.0, "22620": 17447768064.0, "22625": 17447325696.0, "22630": 17447325696.0, "22635": 17447325696.0, "22640": 17447325696.0, "22645": 17448095744.0, "22650": 17447325696.0, "22655": 17448046592.0, "22660": 17447325696.0, "22665": 17447325696.0, "22670": 17447325696.0, "22675": 17447325696.0, "22680": 17447325696.0, "22685": 17447325696.0, "22690": 17447325696.0, "22695": 17447325696.0, "22700": 17447350272.0, "22705": 17447325696.0, "22710": 17447325696.0, "22715": 17447325696.0, "22720": 17447325696.0, "22725": 17447325696.0, "22730": 17447325696.0, "22735": 17447325696.0, "22740": 17447325696.0, "22745": 17447325696.0, "22750": 17447325696.0, "22755": 17447325696.0, "22760": 17447325696.0, "22765": 17447784448.0, "22770": 17447325696.0, "22775": 17447661568.0, "22780": 17447325696.0, "22785": 17448202240.0, "22790": 17447325696.0, "22795": 17447325696.0, "22800": 17447325696.0, "22805": 17447325696.0, "22810": 17447325696.0, "22815": 17447325696.0, "22820": 17447325696.0, "22825": 17447538688.0, "22830": 17447497728.0, "22835": 17447325696.0, "22840": 17448128512.0, "22845": 17447325696.0, "22850": 17447325696.0, "22855": 17447325696.0, "22860": 17447325696.0, "22865": 17448075264.0, "22870": 17447325696.0, "22875": 17447596032.0, "22880": 17447325696.0, "22885": 17447325696.0, "22890": 17447325696.0, "22895": 17447325696.0, "22900": 17447325696.0, "22905": 17447325696.0, "22910": 17447325696.0, "22915": 17447325696.0, "22920": 17447325696.0, "22925": 17447325696.0, "22930": 17447325696.0, "22935": 17447325696.0, "22940": 17447325696.0, "22945": 17447325696.0, "22950": 17447325696.0, "22955": 17447641088.0, "22960": 17447325696.0, "22965": 17447325696.0, "22970": 17447325696.0, "22975": 17447325696.0, "22980": 17447325696.0, "22985": 17447489536.0, "22990": 17447325696.0, "22995": 17447325696.0, "23000": 17447325696.0, "23005": 17447325696.0, "23010": 17447325696.0, "23015": 17447325696.0, "23020": 17447604224.0, "23025": 17447325696.0, "23030": 17447325696.0, "23035": 17447325696.0, "23040": 17447325696.0, "23045": 17447325696.0, "23050": 17447325696.0, "23055": 17447325696.0, "23060": 17447325696.0, "23065": 17447325696.0, "23070": 17447325696.0, "23075": 17447325696.0, "23080": 17447702528.0, "23085": 17447325696.0, "23090": 17447325696.0, "23095": 17447325696.0, "23100": 17447325696.0, "23105": 17448177664.0, "23110": 17447325696.0, "23115": 17447325696.0, "23120": 17447325696.0, "23125": 17447325696.0, "23130": 17447325696.0, "23135": 17447325696.0, "23140": 17447325696.0, "23145": 17447325696.0, "23150": 17447325696.0, "23155": 17447325696.0, "23160": 17447325696.0, "23165": 17447325696.0, "23170": 17447751680.0, "23175": 17446985728.0, "23180": 17446985728.0, "23185": 17446985728.0, "23190": 17446985728.0, "23195": 17446985728.0, "23200": 17446985728.0, "23205": 17447329792.0, "23210": 17446985728.0, "23215": 17446985728.0, "23220": 17446985728.0, "23225": 17446985728.0, "23230": 17446985728.0, "23235": 17446985728.0, "23240": 17447862272.0, "23245": 17446985728.0, "23250": 17447272448.0, "23255": 17446985728.0, "23260": 17447542784.0, "23265": 17446985728.0, "23270": 17446985728.0, "23275": 17446985728.0, "23280": 17446985728.0, "23285": 17446985728.0, "23290": 17447706624.0, "23295": 17446985728.0, "23300": 17446985728.0, "23305": 17446985728.0, "23310": 17447903232.0, "23315": 17446985728.0, "23320": 17446985728.0, "23325": 17446985728.0, "23330": 17447460864.0, "23335": 17446985728.0, "23340": 17446985728.0, "23345": 17446985728.0, "23350": 17446985728.0, "23355": 17446985728.0, "23360": 17446985728.0, "23365": 17446985728.0, "23370": 17446985728.0, "23375": 17446985728.0, "23380": 17446985728.0, "23385": 17446985728.0, "23390": 17446985728.0, "23395": 17446985728.0, "23400": 17448022016.0, "23405": 17446985728.0, "23410": 17446985728.0, "23415": 17446985728.0, "23420": 17447354368.0, "23425": 17446985728.0, "23430": 17446985728.0, "23435": 17446985728.0, "23440": 17447739392.0, "23445": 17446985728.0, "23450": 17446985728.0, "23455": 17446985728.0, "23460": 17446985728.0, "23465": 17446985728.0, "23470": 17446985728.0, "23475": 17446985728.0, "23480": 17446985728.0, "23485": 17446985728.0, "23490": 17446985728.0, "23495": 17446985728.0, "23500": 17446985728.0, "23505": 17446985728.0, "23510": 17447084032.0, "23515": 17446985728.0, "23520": 17446985728.0, "23525": 17446985728.0, "23530": 17446985728.0, "23535": 17446985728.0, "23540": 17446985728.0, "23545": 17446985728.0, "23550": 17446985728.0, "23555": 17446985728.0, "23560": 17446985728.0, "23565": 17446985728.0, "23570": 17446985728.0, "23575": 17446985728.0, "23580": 17446985728.0, "23585": 17446985728.0, "23590": 17446985728.0, "23595": 17446985728.0, "23600": 17446985728.0, "23605": 17446985728.0, "23610": 17446985728.0, "23615": 17446985728.0, "23620": 17447518208.0, "23625": 17446985728.0, "23630": 17446985728.0, "23635": 17446985728.0, "23640": 17446985728.0, "23645": 17446985728.0, "23650": 17446985728.0, "23655": 17446985728.0, "23660": 17446985728.0, "23665": 17446985728.0, "23670": 17446985728.0, "23675": 17446985728.0, "23680": 17446985728.0, "23685": 17446985728.0, "23690": 17446985728.0, "23695": 17446985728.0, "23700": 17446985728.0, "23705": 17446985728.0, "23710": 17446985728.0, "23715": 17446985728.0, "23720": 17446985728.0, "23725": 17446985728.0, "23730": 17446985728.0, "23735": 17446985728.0, "23740": 17446985728.0, "23745": 17447968768.0, "23750": 17446985728.0, "23755": 17446983680.0, "23760": 17446985728.0, "23765": 17446985728.0, "23770": 17446985728.0, "23775": 17446985728.0, "23780": 17446985728.0, "23785": 17446985728.0, "23790": 17446985728.0, "23795": 17446985728.0, "23800": 17446985728.0, "23805": 17446985728.0, "23810": 17446985728.0, "23815": 17446985728.0, "23820": 17446985728.0, "23825": 17446985728.0, "23830": 17446985728.0, "23835": 17446985728.0, "23840": 17447215104.0, "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "iteration-time": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 144.08891, "5": 27.2236, "10": 28.09763, "15": 31.10084, "20": 27.46968, "25": 29.33701, "30": 26.95577, "35": 26.02749, "40": 25.8881, "45": 25.14784, "50": 25.79603, "55": 26.72368, "60": 24.40982, "65": 26.94055, "70": 25.59108, "75": 24.08512, "80": 24.79284, "85": 25.17835, "90": 24.34322, "95": 24.14462, "100": 24.64424, "105": 24.19553, "110": 23.4671, "115": 24.63096, "120": 23.67427, "125": 23.56249, "130": 24.34286, "135": 24.07573, "140": 23.32341, "145": 24.15708, "150": 23.96347, "155": 23.56666, "160": 24.28301, "165": 23.3147, "170": 23.17521, "175": 24.10724, "180": 23.36111, "185": 24.03814, "190": 23.96846, "195": 23.07005, "200": 22.8993, "205": 22.77484, "210": 22.80292, "215": 23.34138, "220": 22.99706, "225": 22.64255, "230": 22.80857, "235": 22.93467, "240": 22.55644, "245": 22.99945, "250": 23.42438, "255": 22.77932, "260": 22.89401, "265": 22.42893, "270": 22.52478, "275": 22.82332, "280": 22.48665, "285": 23.53636, "290": 22.96945, "295": 22.45523, "300": 22.3766, "305": 22.57429, "310": 22.72433, "315": 22.36683, "320": 23.21283, "325": 23.23947, "330": 22.2804, "335": 22.45246, "340": 22.96137, "345": 22.32039, "350": 22.42004, "355": 23.71647, "360": 22.67693, "365": 22.40318, "370": 22.99184, "375": 22.12886, "380": 22.20955, "385": 23.10148, "390": 22.9053, "395": 22.41515, "400": 23.04054, "405": 22.17788, "410": 22.25365, "415": 22.87256, "420": 22.15085, "425": 22.52464, "430": 22.92884, "435": 22.41841, "440": 22.25757, "445": 22.92621, "450": 22.12018, "455": 22.10355, "460": 22.73441, "465": 22.23245, "470": 22.14096, "475": 22.41666, "480": 22.45723, "485": 22.29551, "490": 22.00924, "495": 22.30848, "500": 22.07598, "505": 23.39601, "510": 22.03132, "515": 22.22611, "520": 22.17413, "525": 22.22544, "530": 22.45463, "535": 22.40339, "540": 22.24098, "545": 22.27378, "550": 22.23532, "555": 22.30796, "560": 22.5161, "565": 22.16478, "570": 22.41689, "575": 22.2165, "580": 22.08362, "585": 22.26635, "590": 22.42732, "595": 22.37101, "600": 22.24292, "605": 22.23052, "610": 22.30187, "615": 22.6667, "620": 22.07428, "625": 22.12634, "630": 22.45551, "635": 22.39111, "640": 22.36744, "645": 22.28034, "650": 22.09702, "655": 22.4392, "660": 22.27983, "665": 22.09488, "670": 22.26451, "675": 22.32191, "680": 22.35475, "685": 22.11723, "690": 22.34981, "695": 22.81896, "700": 22.02452, "705": 22.01524, "710": 23.15947, "715": 22.07858, "720": 22.02034, "725": 22.49796, "730": 22.01114, "735": 22.01479, "740": 22.58305, "745": 22.09931, "750": 22.20321, "755": 22.60004, "760": 22.39039, "765": 22.11246, "770": 22.25137, "775": 22.19129, "780": 22.28213, "785": 22.07065, "790": 22.32926, "795": 22.46876, "800": 22.20442, "805": 22.22269, "810": 22.1231, "815": 22.22772, "820": 22.31096, "825": 22.20937, "830": 22.08298, "835": 22.64551, "840": 22.27708, "845": 21.98452, "850": 23.53863, "855": 21.95305, "860": 22.01676, "865": 22.71927, "870": 22.02309, "875": 22.24012, "880": 22.29851, "885": 22.096, "890": 22.06881, "895": 22.51989, "900": 22.15869, "905": 22.18991, "910": 22.24367, "915": 22.00235, "920": 22.37139, "925": 22.10406, "930": 21.96413, "935": 21.98803, "940": 22.14898, "945": 22.33693, "950": 22.03372, "955": 22.20567, "960": 22.51262, "965": 22.09421, "970": 22.15463, "975": 23.115, "980": 22.01194, "985": 21.97585, "990": 22.64977, "995": 21.97795, "1000": 21.99221, "1005": 22.15746, "1010": 22.05653, "1015": 22.17474, "1020": 22.06607, "1025": 22.30317, "1030": 22.01884, "1035": 22.01889, "1040": 22.04993, "1045": 22.00698, "1050": 22.05928, "1055": 21.98571, "1060": 22.96492, "1065": 22.33824, "1070": 22.82865, "1075": 23.20566, "1080": 141.95985, "1085": 22.94431, "1090": 22.88024, "1095": 23.57555, "1100": 22.86887, "1105": 22.51194, "1110": 23.15537, "1115": 22.54626, "1120": 22.89272, "1125": 23.15977, "1130": 22.17529, "1135": 22.5735, "1140": 22.55441, "1145": 22.00536, "1150": 22.40148, "1155": 22.29702, "1160": 22.01536, "1165": 22.03158, "1170": 22.5752, "1175": 21.9854, "1180": 21.99297, "1185": 22.32784, "1190": 22.04877, "1195": 22.43818, "1200": 22.43902, "1205": 22.44984, "1210": 22.29833, "1215": 22.14499, "1220": 22.11111, "1225": 22.01352, "1230": 22.42718, "1235": 22.0027, "1240": 22.26246, "1245": 23.03674, "1250": 22.05678, "1255": 22.61572, "1260": 22.72771, "1265": 22.16971, "1270": 21.96532, "1275": 22.72153, "1280": 21.98172, "1285": 21.9749, "1290": 22.77431, "1295": 22.09998, "1300": 22.13931, "1305": 22.00092, "1310": 22.30056, "1315": 22.11626, "1320": 21.99142, "1325": 22.15462, "1330": 21.94742, "1335": 22.21358, "1340": 22.2039, "1345": 22.29016, "1350": 22.11915, "1355": 22.38725, "1360": 22.14655, "1365": 21.99225, "1370": 23.09492, "1375": 22.04124, "1380": 22.12525, "1385": 22.65644, "1390": 22.03158, "1395": 21.97579, "1400": 22.75143, "1405": 22.26717, "1410": 22.08986, "1415": 23.18184, "1420": 22.13351, "1425": 22.30119, "1430": 22.58195, "1435": 22.11068, "1440": 22.17222, "1445": 22.00423, "1450": 22.14547, "1455": 22.02682, "1460": 22.17642, "1465": 22.10783, "1470": 22.23637, "1475": 22.09443, "1480": 22.22223, "1485": 22.22926, "1490": 22.00464, "1495": 22.4662, "1500": 22.37388, "1505": 22.80632, "1510": 22.0516, "1515": 22.16529, "1520": 22.95915, "1525": 21.98851, "1530": 22.2394, "1535": 23.29181, "1540": 22.02277, "1545": 21.99925, "1550": 22.32857, "1555": 22.32092, "1560": 22.06799, "1565": 21.96972, "1570": 22.46369, "1575": 22.02484, "1580": 21.97097, "1585": 22.27509, "1590": 22.53359, "1595": 21.98865, "1600": 22.27424, "1605": 22.60177, "1610": 22.22066, "1615": 22.11858, "1620": 22.06724, "1625": 22.32577, "1630": 22.89856, "1635": 21.98478, "1640": 22.41856, "1645": 23.19677, "1650": 22.65013, "1655": 22.70721, "1660": 22.43447, "1665": 22.51628, "1670": 22.86496, "1675": 22.47766, "1680": 22.59336, "1685": 22.80584, "1690": 22.53722, "1695": 22.48871, "1700": 22.53855, "1705": 22.7395, "1710": 22.32091, "1715": 22.2805, "1720": 22.52037, "1725": 22.32762, "1730": 22.34848, "1735": 22.2883, "1740": 22.45375, "1745": 22.27738, "1750": 22.26088, "1755": 22.53005, "1760": 22.25365, "1765": 22.23865, "1770": 22.25915, "1775": 22.45051, "1780": 22.5009, "1785": 22.48086, "1790": 22.6348, "1795": 22.19429, "1800": 22.21107, "1805": 22.30847, "1810": 23.01065, "1815": 22.30843, "1820": 22.43873, "1825": 22.87164, "1830": 22.27315, "1835": 22.35623, "1840": 23.04989, "1845": 22.34454, "1850": 22.45392, "1855": 23.43726, "1860": 22.3628, "1865": 22.51252, "1870": 22.74893, "1875": 22.34211, "1880": 22.27227, "1885": 22.2777, "1890": 22.26033, "1895": 22.25213, "1900": 22.53092, "1905": 22.31397, "1910": 22.35744, "1915": 22.38861, "1920": 22.41579, "1925": 22.46163, "1930": 22.27585, "1935": 22.89414, "1940": 22.42151, "1945": 22.52795, "1950": 22.98067, "1955": 22.31317, "1960": 22.38173, "1965": 23.15274, "1970": 22.74668, "1975": 22.33639, "1980": 23.01343, "1985": 22.50633, "1990": 22.42154, "1995": 22.38326, "2000": 22.55233, "2005": 22.77157, "2010": 22.53507, "2015": 22.29025, "2020": 22.71737, "2025": 22.32072, "2030": 22.42938, "2035": 22.55638, "2040": 22.52923, "2045": 22.25919, "2050": 22.40748, "2055": 22.38136, "2060": 22.31332, "2065": 22.42474, "2070": 23.01568, "2075": 22.3459, "2080": 22.2693, "2085": 23.20354, "2090": 22.27854, "2095": 22.51925, "2100": 23.12786, "2105": 22.64154, "2110": 22.43992, "2115": 22.79606, "2120": 22.79196, "2125": 22.46951, "2130": 22.53698, "2135": 22.38766, "2140": 22.23779, "2145": 22.52082, "2150": 22.35913, "2155": 22.46986, "2160": 22.3761, "2165": 22.40642, "2170": 21.99785, "2175": 22.20485, "2180": 22.35665, "2185": 22.01099, "2190": 22.00174, "2195": 22.0073, "2200": 22.12681, "2205": 21.98207, "2210": 23.83617, "2215": 22.15219, "2220": 22.04889, "2225": 22.95096, "2230": 22.40208, "2235": 22.11198, "2240": 22.12972, "2245": 22.00121, "2250": 21.99377, "2255": 21.98723, "2260": 22.03485, "2265": 22.06119, "2270": 22.02086, "2275": 21.94641, "2280": 21.98817, "2285": 21.97174, "2290": 21.98134, "2295": 21.99595, "2300": 22.39061, "2305": 22.24705, "2310": 22.10766, "2315": 22.27807, "2320": 22.63472, "2325": 22.18436, "2330": 21.99586, "2335": 23.36767, "2340": 22.00911, "2345": 22.00339, "2350": 23.29087, "2355": 21.97403, "2360": 22.39861, "2365": 22.1784, "2370": 21.96617, "2375": 21.96607, "2380": 21.96106, "2385": 22.17665, "2390": 22.10645, "2395": 22.2044, "2400": 21.96898, "2405": 22.21235, "2410": 22.06014, "2415": 21.97314, "2420": 22.00958, "2425": 22.0192, "2430": 22.07792, "2435": 21.96285, "2440": 21.94991, "2445": 22.10097, "2450": 21.96834, "2455": 21.95949, "2460": 22.63243, "2465": 22.04713, "2470": 21.95367, "2475": 22.63396, "2480": 21.97988, "2485": 21.99063, "2490": 23.35281, "2495": 22.28134, "2500": 22.17946, "2505": 22.08464, "2510": 22.46674, "2515": 22.01231, "2520": 21.94655, "2525": 22.06176, "2530": 22.03359, "2535": 21.99157, "2540": 21.99607, "2545": 22.54469, "2550": 22.1021, "2555": 22.00093, "2560": 21.96964, "2565": 22.49484, "2570": 21.98081, "2575": 21.9787, "2580": 23.18122, "2585": 22.66273, "2590": 22.48768, "2595": 22.77039, "2600": 22.19886, "2605": 22.44694, "2610": 22.45874, "2615": 22.56594, "2620": 22.06062, "2625": 22.74987, "2630": 22.01755, "2635": 22.46602, "2640": 22.83463, "2645": 22.10558, "2650": 22.13282, "2655": 22.87598, "2660": 21.96939, "2665": 21.95176, "2670": 22.72478, "2675": 22.07332, "2680": 22.07645, "2685": 22.03182, "2690": 22.20487, "2695": 22.0265, "2700": 22.54615, "2705": 22.15423, "2710": 22.10807, "2715": 22.02754, "2720": 22.07043, "2725": 22.04705, "2730": 22.01579, "2735": 21.9978, "2740": 21.94801, "2745": 22.1088, "2750": 22.48825, "2755": 21.98103, "2760": 22.42155, "2765": 22.57811, "2770": 21.94033, "2775": 21.95603, "2780": 23.10522, "2785": 21.93268, "2790": 21.94728, "2795": 23.00129, "2800": 21.97243, "2805": 22.05927, "2810": 22.49496, "2815": 22.36413, "2820": 21.98822, "2825": 22.00239, "2830": 22.07514, "2835": 21.941, "2840": 21.962, "2845": 21.98043, "2850": 22.14126, "2855": 21.93423, "2860": 21.97508, "2865": 22.05469, "2870": 22.00147, "2875": 22.21472, "2880": 21.90937, "2885": 22.11213, "2890": 22.47168, "2895": 22.00354, "2900": 21.95348, "2905": 22.5424, "2910": 22.08798, "2915": 21.94383, "2920": 23.57422, "2925": 21.98254, "2930": 21.95837, "2935": 22.90391, "2940": 22.09406, "2945": 22.11863, "2950": 22.12591, "2955": 22.33026, "2960": 21.91195, "2965": 22.01255, "2970": 22.07826, "2975": 22.1348, "2980": 22.05192, "2985": 21.93209, "2990": 21.97496, "2995": 22.06938, "3000": 22.05776, "3005": 22.25947, "3010": 22.4516, "3015": 22.21274, "3020": 21.98523, "3025": 22.66941, "3030": 21.98487, "3035": 22.08728, "3040": 23.51621, "3045": 22.00331, "3050": 22.98313, "3055": 23.88533, "3060": 22.44783, "3065": 22.28096, "3070": 22.41115, "3075": 22.53901, "3080": 22.10982, "3085": 21.99411, "3090": 21.95357, "3095": 22.48489, "3100": 21.95122, "3105": 21.91855, "3110": 22.25745, "3115": 21.90812, "3120": 22.09901, "3125": 21.94687, "3130": 22.42758, "3135": 22.30041, "3140": 21.94234, "3145": 22.31524, "3150": 22.10401, "3155": 22.02902, "3160": 22.00084, "3165": 22.07582, "3170": 23.81692, "3175": 22.34463, "3180": 22.31254, "3185": 22.01924, "3190": 22.00481, "3195": 21.94896, "3200": 22.31445, "3205": 21.9779, "3210": 21.93103, "3215": 21.90668, "3220": 22.03311, "3225": 21.9683, "3230": 22.3476, "3235": 21.8893, "3240": 21.94247, "3245": 22.51957, "3250": 21.9957, "3255": 21.89219, "3260": 22.87314, "3265": 22.15628, "3270": 21.90489, "3275": 23.00701, "3280": 22.0631, "3285": 21.9211, "3290": 22.68334, "3295": 21.89549, "3300": 22.38505, "3305": 22.01871, "3310": 22.04052, "3315": 22.26692, "3320": 21.97196, "3325": 21.9538, "3330": 21.91738, "3335": 21.95123, "3340": 21.89427, "3345": 21.89893, "3350": 22.18461, "3355": 22.04362, "3360": 21.9007, "3365": 22.06639, "3370": 22.06829, "3375": 21.89323, "3380": 21.86426, "3385": 22.96109, "3390": 21.91624, "3395": 21.88387, "3400": 23.07858, "3405": 21.90828, "3410": 21.88213, "3415": 22.96194, "3420": 22.0547, "3425": 22.22236, "3430": 22.84489, "3435": 22.05076, "3440": 21.91535, "3445": 22.17423, "3450": 21.94291, "3455": 21.93241, "3460": 22.36056, "3465": 21.91161, "3470": 21.91147, "3475": 21.96016, "3480": 21.92586, "3485": 21.92666, "3490": 21.93728, "3495": 21.93778, "3500": 21.93863, "3505": 22.12587, "3510": 22.10105, "3515": 21.9277, "3520": 22.25658, "3525": 21.92694, "3530": 21.94781, "3535": 23.50185, "3540": 21.97174, "3545": 22.05844, "3550": 23.28739, "3555": 21.97104, "3560": 21.947, "3565": 22.36204, "3570": 22.28927, "3575": 21.95385, "3580": 22.72808, "3585": 21.96793, "3590": 21.96817, "3595": 21.91738, "3600": 21.93633, "3605": 21.9862, "3610": 21.9109, "3615": 22.63248, "3620": 21.89548, "3625": 21.86755, "3630": 21.97857, "3635": 22.18541, "3640": 22.00794, "3645": 22.13824, "3650": 21.90327, "3655": 21.91794, "3660": 22.63716, "3665": 22.21602, "3670": 22.14889, "3675": 22.84651, "3680": 22.12957, "3685": 22.26822, "3690": 23.06388, "3695": 22.0932, "3700": 22.41863, "3705": 22.88791, "3710": 22.16903, "3715": 21.969, "3720": 22.02032, "3725": 21.95162, "3730": 21.97527, "3735": 21.97671, "3740": 21.97859, "3745": 22.49228, "3750": 21.94657, "3755": 22.04616, "3760": 23.31876, "3765": 22.23427, "3770": 21.93586, "3775": 23.03057, "3780": 22.1601, "3785": 21.97717, "3790": 22.38684, "3795": 21.94359, "3800": 21.9093, "3805": 22.23889, "3810": 21.95759, "3815": 22.07084, "3820": 22.35077, "3825": 21.98614, "3830": 21.98721, "3835": 21.99153, "3840": 22.18873, "3845": 21.95713, "3850": 22.03424, "3855": 22.33623, "3860": 21.94898, "3865": 22.03167, "3870": 21.99354, "3875": 22.0926, "3880": 22.00058, "3885": 22.49012, "3890": 22.2445, "3895": 21.99326, "3900": 23.14098, "3905": 22.00826, "3910": 22.27556, "3915": 22.66539, "3920": 21.96698, "3925": 22.19655, "3930": 22.39693, "3935": 21.95024, "3940": 21.94962, "3945": 22.39099, "3950": 21.99116, "3955": 22.00551, "3960": 21.94971, "3965": 21.97359, "3970": 21.94154, "3975": 21.9862, "3980": 22.46948, "3985": 21.99518, "3990": 21.99948, "3995": 21.95742, "4000": 21.97806, "4005": 22.29998, "4010": 22.25772, "4015": 21.97304, "4020": 23.04687, "4025": 22.02255, "4030": 21.96136, "4035": 22.63988, "4040": 21.98201, "4045": 22.44684, "4050": 22.69289, "4055": 21.91054, "4060": 22.09969, "4065": 22.15419, "4070": 21.98784, "4075": 22.34465, "4080": 22.14339, "4085": 22.22435, "4090": 22.16608, "4095": 22.04499, "4100": 22.03883, "4105": 22.0194, "4110": 22.28322, "4115": 22.16577, "4120": 22.04861, "4125": 22.01207, "4130": 22.03022, "4135": 22.03551, "4140": 22.10007, "4145": 22.20531, "4150": 22.04516, "4155": 22.01998, "4160": 21.98422, "4165": 22.19016, "4170": 22.05819, "4175": 22.04256, "4180": 22.23628, "4185": 22.04532, "4190": 22.06464, "4195": 21.97782, "4200": 22.25726, "4205": 23.50028, "4210": 22.18097, "4215": 21.98326, "4220": 22.68992, "4225": 22.10064, "4230": 22.1042, "4235": 22.09756, "4240": 21.9846, "4245": 22.2915, "4250": 22.0134, "4255": 21.98359, "4260": 22.00443, "4265": 22.3594, "4270": 22.16943, "4275": 22.10875, "4280": 22.23036, "4285": 22.02488, "4290": 22.03753, "4295": 22.11202, "4300": 21.98034, "4305": 22.04396, "4310": 21.98521, "4315": 22.31947, "4320": 22.12728, "4325": 21.96752, "4330": 23.2102, "4335": 22.00819, "4340": 22.09734, "4345": 23.2734, "4350": 22.10175, "4355": 22.00907, "4360": 22.51192, "4365": 21.99216, "4370": 21.99815, "4375": 22.23182, "4380": 21.99145, "4385": 21.96195, "4390": 22.3484, "4395": 22.15858, "4400": 21.9582, "4405": 21.98637, "4410": 22.22783, "4415": 21.97977, "4420": 21.96251, "4425": 22.15796, "4430": 22.05459, "4435": 22.03964, "4440": 22.01487, "4445": 22.37922, "4450": 21.97776, "4455": 22.02979, "4460": 21.93978, "4465": 22.00505, "4470": 22.91704, "4475": 22.0008, "4480": 22.50814, "4485": 23.5463, "4490": 21.98618, "4495": 21.96548, "4500": 22.61999, "4505": 21.97729, "4510": 22.13021, "4515": 22.01193, "4520": 22.0045, "4525": 22.00856, "4530": 22.01993, "4535": 22.06798, "4540": 22.01047, "4545": 22.60098, "4550": 21.96739, "4555": 22.01616, "4560": 22.20296, "4565": 21.9668, "4570": 22.03036, "4575": 23.0835, "4580": 22.6443, "4585": 22.01308, "4590": 23.01417, "4595": 22.51771, "4600": 22.11776, "4605": 23.18986, "4610": 22.02416, "4615": 22.01537, "4620": 22.79275, "4625": 21.98761, "4630": 22.50517, "4635": 21.96502, "4640": 21.93878, "4645": 21.94931, "4650": 22.15909, "4655": 22.37708, "4660": 22.08815, "4665": 21.99406, "4670": 22.09759, "4675": 22.04793, "4680": 22.20862, "4685": 21.99964, "4690": 22.31631, "4695": 22.00274, "4700": 21.98257, "4705": 21.98885, "4710": 21.99165, "4715": 22.16433, "4720": 21.97777, "4725": 22.32039, "4730": 21.99257, "4735": 21.89009, "4740": 22.4678, "4745": 21.87928, "4750": 21.99146, "4755": 22.63725, "4760": 22.12453, "4765": 21.8854, "4770": 23.11332, "4775": 21.87945, "4780": 21.91698, "4785": 23.30958, "4790": 22.06861, "4795": 22.15321, "4800": 23.12633, "4805": 22.06231, "4810": 22.08964, "4815": 21.98205, "4820": 21.9092, "4825": 21.96551, "4830": 21.95651, "4835": 21.96523, "4840": 22.23273, "4845": 22.00668, "4850": 22.02847, "4855": 22.33499, "4860": 22.13925, "4865": 21.90338, "4870": 21.94092, "4875": 22.5508, "4880": 22.09006, "4885": 21.96061, "4890": 22.96961, "4895": 22.0147, "4900": 21.96034, "4905": 23.141, "4910": 22.26535, "4915": 22.18551, "4920": 22.5092, "4925": 22.20076, "4930": 21.98051, "4935": 22.22873, "4940": 21.97664, "4945": 21.97348, "4950": 21.91624, "4955": 21.95363, "4960": 21.97394, "4965": 21.98544, "4970": 21.92302, "4975": 21.97388, "4980": 21.93803, "4985": 22.13757, "4990": 21.90994, "4995": 22.01317, "5000": 21.97192, "5005": 21.94302, "5010": 22.67313, "5015": 22.28849, "5020": 21.91929, "5025": 23.29525, "5030": 21.93552, "5035": 22.35952, "5040": 22.82124, "5045": 21.98222, "5050": 21.95873, "5055": 22.1207, "5060": 22.34079, "5065": 21.95256, "5070": 21.93158, "5075": 21.92893, "5080": 21.92085, "5085": 22.00254, "5090": 22.01328, "5095": 22.49701, "5100": 21.93652, "5105": 22.76964, "5110": 21.95815, "5115": 21.84544, "5120": 21.82763, "5125": 21.84404, "5130": 21.83533, "5135": 21.96136, "5140": 21.93581, "5145": 22.26999, "5150": 21.81122, "5155": 21.85867, "5160": 21.92011, "5165": 21.87472, "5170": 21.89301, "5175": 22.02641, "5180": 22.17353, "5185": 21.9203, "5190": 21.83911, "5195": 21.84457, "5200": 21.83714, "5205": 21.81046, "5210": 23.24974, "5215": 21.97638, "5220": 21.79544, "5225": 22.5593, "5230": 22.18374, "5235": 22.01617, "5240": 22.15708, "5245": 21.78251, "5250": 21.79684, "5255": 21.83175, "5260": 21.83991, "5265": 21.99808, "5270": 21.78432, "5275": 21.78188, "5280": 21.80626, "5285": 21.80569, "5290": 21.80653, "5295": 21.79046, "5300": 22.33391, "5305": 21.94377, "5310": 21.84149, "5315": 21.80647, "5320": 22.49636, "5325": 21.9502, "5330": 21.80807, "5335": 23.04185, "5340": 21.94571, "5345": 21.84908, "5350": 22.80553, "5355": 21.80885, "5360": 22.09529, "5365": 22.23081, "5370": 21.82996, "5375": 21.80382, "5380": 21.89, "5385": 21.81085, "5390": 21.96167, "5395": 22.01134, "5400": 21.84536, "5405": 21.85263, "5410": 21.95983, "5415": 21.83943, "5420": 21.84764, "5425": 21.82045, "5430": 22.00095, "5435": 21.81427, "5440": 21.81643, "5445": 21.94497, "5450": 21.77888, "5455": 21.79599, "5460": 22.81204, "5465": 21.86355, "5470": 21.79651, "5475": 22.39709, "5480": 21.76531, "5485": 21.95405, "5490": 22.44137, "5495": 22.01226, "5500": 21.80305, "5505": 21.93014, "5510": 22.35104, "5515": 21.79423, "5520": 21.82593, "5525": 21.88716, "5530": 21.86735, "5535": 21.8156, "5540": 21.9136, "5545": 22.3953, "5550": 21.98692, "5555": 21.85006, "5560": 21.84305, "5565": 22.34507, "5570": 21.82124, "5575": 21.83654, "5580": 23.02476, "5585": 21.81322, "5590": 22.02586, "5595": 21.95878, "5600": 21.98102, "5605": 22.27885, "5610": 22.1455, "5615": 23.27765, "5620": 21.99887, "5625": 22.09528, "5630": 23.00047, "5635": 21.92437, "5640": 22.05922, "5645": 22.39652, "5650": 22.06667, "5655": 22.14134, "5660": 22.17906, "5665": 21.83086, "5670": 21.87373, "5675": 22.01868, "5680": 22.11524, "5685": 22.00367, "5690": 21.9908, "5695": 22.05459, "5700": 21.90247, "5705": 21.9079, "5710": 21.85879, "5715": 21.88318, "5720": 21.85553, "5725": 21.8884, "5730": 22.10924, "5735": 21.89549, "5740": 22.59445, "5745": 21.90379, "5750": 22.14941, "5755": 23.48192, "5760": 21.86133, "5765": 22.03934, "5770": 22.6296, "5775": 22.20277, "5780": 21.91902, "5785": 22.35466, "5790": 21.90337, "5795": 21.93882, "5800": 21.90693, "5805": 21.931, "5810": 22.16471, "5815": 22.0019, "5820": 21.93389, "5825": 21.94625, "5830": 21.84773, "5835": 21.86732, "5840": 21.8938, "5845": 22.0256, "5850": 21.95057, "5855": 21.93024, "5860": 22.00319, "5865": 21.88673, "5870": 21.87843, "5875": 21.90192, "5880": 22.0788, "5885": 21.95549, "5890": 22.04225, "5895": 23.14906, "5900": 21.93836, "5905": 21.90501, "5910": 23.31015, "5915": 21.97833, "5920": 21.87582, "5925": 22.13619, "5930": 21.98223, "5935": 21.86652, "5940": 22.04678, "5945": 22.09672, "5950": 21.9062, "5955": 22.02973, "5960": 21.90345, "5965": 21.95963, "5970": 21.96059, "5975": 22.12381, "5980": 21.96594, "5985": 21.89312, "5990": 21.85318, "5995": 21.87226, "6000": 21.9701, "6005": 21.80434, "6010": 22.08702, "6015": 23.00717, "6020": 21.84365, "6025": 21.82831, "6030": 23.23804, "6035": 22.00694, "6040": 21.88731, "6045": 22.56864, "6050": 21.88132, "6055": 21.84251, "6060": 21.86149, "6065": 22.09724, "6070": 22.06419, "6075": 22.02699, "6080": 21.82297, "6085": 21.86571, "6090": 22.01798, "6095": 21.86373, "6100": 21.96488, "6105": 21.89965, "6110": 22.15713, "6115": 21.83216, "6120": 21.87837, "6125": 22.6587, "6130": 21.92566, "6135": 21.88253, "6140": 23.00492, "6145": 21.82252, "6150": 21.88084, "6155": 23.04906, "6160": 22.18391, "6165": 22.14412, "6170": 22.08843, "6175": 21.97922, "6180": 22.05498, "6185": 22.11354, "6190": 21.94268, "6195": 22.01121, "6200": 22.0075, "6205": 21.91371, "6210": 21.93917, "6215": 21.89548, "6220": 22.02784, "6225": 21.87357, "6230": 22.22938, "6235": 22.11996, "6240": 21.9288, "6245": 21.86802, "6250": 23.08231, "6255": 21.85498, "6260": 21.85146, "6265": 22.97198, "6270": 22.14654, "6275": 21.87953, "6280": 22.35458, "6285": 22.36305, "6290": 21.60123, "6295": 21.91667, "6300": 21.55509, "6305": 21.60043, "6310": 21.71148, "6315": 21.61902, "6320": 21.71052, "6325": 21.56121, "6330": 21.79125, "6335": 21.61895, "6340": 21.82243, "6345": 21.58892, "6350": 21.56771, "6355": 21.97018, "6360": 21.55632, "6365": 21.57243, "6370": 21.54972, "6375": 21.89003, "6380": 21.56867, "6385": 21.5805, "6390": 22.49199, "6395": 21.68268, "6400": 21.63866, "6405": 22.22682, "6410": 21.75737, "6415": 21.58986, "6420": 22.98403, "6425": 21.54404, "6430": 21.66838, "6435": 22.45726, "6440": 21.57826, "6445": 21.79136, "6450": 21.72834, "6455": 21.58094, "6460": 21.55374, "6465": 21.75886, "6470": 21.52991, "6475": 21.59133, "6480": 21.93324, "6485": 21.57468, "6490": 21.58156, "6495": 21.56442, "6500": 21.70763, "6505": 21.54559, "6510": 22.67019, "6515": 21.61771, "6520": 21.78113, "6525": 22.1951, "6530": 21.51687, "6535": 21.5471, "6540": 22.79739, "6545": 21.55815, "6550": 21.5762, "6555": 22.4953, "6560": 21.60437, "6565": 21.7942, "6570": 21.84409, "6575": 21.60122, "6580": 21.69897, "6585": 21.56287, "6590": 21.80823, "6595": 21.53247, "6600": 21.90339, "6605": 21.5872, "6610": 21.54108, "6615": 21.57595, "6620": 21.58918, "6625": 21.57443, "6630": 21.56687, "6635": 22.08588, "6640": 21.55605, "6645": 21.58208, "6650": 22.29118, "6655": 21.71883, "6660": 21.81912, "6665": 22.20041, "6670": 21.87253, "6675": 21.55853, "6680": 22.76485, "6685": 21.97927, "6690": 21.68519, "6695": 22.384, "6700": 21.65105, "6705": 21.56905, "6710": 22.01037, "6715": 21.57351, "6720": 21.84402, "6725": 21.93865, "6730": 21.57359, "6735": 21.57409, "6740": 21.56773, "6745": 22.17163, "6750": 21.61912, "6755": 21.57112, "6760": 22.0843, "6765": 21.72306, "6770": 21.63203, "6775": 22.80584, "6780": 21.71512, "6785": 21.62255, "6790": 22.9722, "6795": 21.65273, "6800": 21.73816, "6805": 21.56585, "6810": 21.63462, "6815": 21.84105, "6820": 21.54243, "6825": 21.55682, "6830": 21.66568, "6835": 21.6405, "6840": 21.56556, "6845": 21.55546, "6850": 21.86375, "6855": 21.72456, "6860": 21.48658, "6865": 21.65416, "6870": 21.55668, "6875": 21.69844, "6880": 22.20503, "6885": 22.06492, "6890": 21.51941, "6895": 22.84571, "6900": 21.5346, "6905": 21.499, "6910": 22.80324, "6915": 21.49194, "6920": 21.50389, "6925": 21.84848, "6930": 21.92564, "6935": 21.48695, "6940": 21.69768, "6945": 21.66972, "6950": 21.52008, "6955": 21.76282, "6960": 21.52316, "6965": 21.81372, "6970": 21.53064, "6975": 21.81821, "6980": 21.51087, "6985": 21.53629, "6990": 21.64172, "6995": 21.49074, "7000": 21.55824, "7005": 21.68024, "7010": 21.67013, "7015": 22.87816, "7020": 21.53585, "7025": 21.51361, "7030": 22.50569, "7035": 21.5219, "7040": 22.20834, "7045": 21.71869, "7050": 21.48244, "7055": 21.58961, "7060": 21.54911, "7065": 21.7198, "7070": 21.5134, "7075": 21.50591, "7080": 21.94437, "7085": 21.50681, "7090": 21.56549, "7095": 21.66914, "7100": 21.52916, "7105": 21.54661, "7110": 21.806, "7115": 21.78521, "7120": 21.52422, "7125": 22.4037, "7130": 21.87564, "7135": 21.52815, "7140": 22.74947, "7145": 21.51337, "7150": 21.64755, "7155": 22.27027, "7160": 21.51728, "7165": 22.11304, "7170": 21.59328, "7175": 21.71752, "7180": 21.57915, "7185": 21.47227, "7190": 21.51114, "7195": 21.7332, "7200": 21.52916, "7205": 21.46917, "7210": 21.72661, "7215": 21.47586, "7220": 21.51426, "7225": 21.46909, "7230": 21.48341, "7235": 21.78691, "7240": 21.48813, "7245": 21.75961, "7250": 21.93572, "7255": 21.84052, "7260": 21.56804, "7265": 22.46383, "7270": 21.51143, "7275": 21.53648, "7280": 22.91481, "7285": 21.6764, "7290": 22.00167, "7295": 22.16194, "7300": 21.52871, "7305": 21.52373, "7310": 21.64676, "7315": 21.66761, "7320": 22.39133, "7325": 21.81146, "7330": 21.68017, "7335": 22.72589, "7340": 21.8913, "7345": 21.62648, "7350": 22.44115, "7355": 21.64332, "7360": 21.87795, "7365": 21.99484, "7370": 21.6657, "7375": 21.63131, "7380": 22.04182, "7385": 21.62982, "7390": 21.80899, "7395": 21.94722, "7400": 21.66708, "7405": 21.67367, "7410": 22.03996, "7415": 21.65468, "7420": 21.65413, "7425": 21.65841, "7430": 21.69575, "7435": 21.82306, "7440": 21.64474, "7445": 21.94554, "7450": 21.78938, "7455": 21.62659, "7460": 22.46404, "7465": 21.70502, "7470": 21.63697, "7475": 22.12899, "7480": 21.61485, "7485": 21.6462, "7490": 22.73169, "7495": 21.82973, "7500": 21.6226, "7505": 21.87097, "7510": 22.17775, "7515": 21.72797, "7520": 21.60932, "7525": 21.83348, "7530": 21.66551, "7535": 21.66183, "7540": 21.64903, "7545": 22.14012, "7550": 21.66554, "7555": 21.66904, "7560": 21.63442, "7565": 21.60158, "7570": 21.62441, "7575": 21.89958, "7580": 21.59731, "7585": 21.61016, "7590": 21.82005, "7595": 21.704, "7600": 21.54872, "7605": 21.79047, "7610": 21.57337, "7615": 21.86565, "7620": 22.16463, "7625": 21.56603, "7630": 21.57245, "7635": 22.49347, "7640": 21.72703, "7645": 21.56682, "7650": 22.54657, "7655": 21.79189, "7660": 21.72438, "7665": 22.21498, "7670": 21.59055, "7675": 21.5495, "7680": 21.59633, "7685": 21.68439, "7690": 21.59202, "7695": 22.02696, "7700": 21.58348, "7705": 21.58426, "7710": 21.73103, "7715": 21.6075, "7720": 21.70482, "7725": 21.83555, "7730": 21.55932, "7735": 21.56762, "7740": 21.58039, "7745": 21.97049, "7750": 21.59693, "7755": 21.60876, "7760": 22.18318, "7765": 21.73346, "7770": 21.5591, "7775": 22.61852, "7780": 21.95599, "7785": 21.58647, "7790": 22.27365, "7795": 21.84926, "7800": 21.60617, "7805": 22.37432, "7810": 21.58867, "7815": 21.6804, "7820": 22.46565, "7825": 21.57782, "7830": 21.69743, "7835": 21.74499, "7840": 21.65229, "7845": 21.59997, "7850": 21.60872, "7855": 21.92817, "7860": 21.56262, "7865": 21.59007, "7870": 21.55093, "7875": 21.7044, "7880": 21.59701, "7885": 21.59166, "7890": 21.75017, "7895": 21.59156, "7900": 22.10477, "7905": 21.54437, "7910": 21.80636, "7915": 22.27964, "7920": 21.57408, "7925": 21.95093, "7930": 22.42193, "7935": 21.54088, "7940": 21.56937, "7945": 22.93368, "7950": 21.57557, "7955": 21.54396, "7960": 21.83613, "7965": 21.56266, "7970": 21.5565, "7975": 21.6874, "7980": 22.49076, "7985": 21.68509, "7990": 21.54276, "7995": 21.65915, "8000": 21.60557, "8005": 21.58272, "8010": 21.95127, "8015": 21.58931, "8020": 22.1383, "8025": 21.66398, "8030": 21.57026, "8035": 22.27465, "8040": 21.57156, "8045": 22.24673, "8050": 22.45007, "8055": 21.62685, "8060": 21.71871, "8065": 22.29043, "8070": 21.58645, "8075": 21.77809, "8080": 22.16579, "8085": 22.23269, "8090": 21.66704, "8095": 21.66156, "8100": 21.83726, "8105": 21.94036, "8110": 21.66603, "8115": 22.65618, "8120": 21.73138, "8125": 21.59023, "8130": 22.7513, "8135": 21.66344, "8140": 21.90947, "8145": 22.61214, "8150": 21.74075, "8155": 21.57846, "8160": 22.70999, "8165": 21.56469, "8170": 21.55215, "8175": 23.04965, "8180": 21.57554, "8185": 21.67871, "8190": 22.98785, "8195": 21.58451, "8200": 21.68888, "8205": 21.59385, "8210": 21.55843, "8215": 21.70699, "8220": 21.57633, "8225": 21.88158, "8230": 21.59766, "8235": 21.61291, "8240": 21.57225, "8245": 21.73651, "8250": 21.76029, "8255": 21.58032, "8260": 21.93042, "8265": 22.20302, "8270": 21.57454, "8275": 21.56251, "8280": 22.92304, "8285": 21.66582, "8290": 21.6317, "8295": 22.7375, "8300": 21.56076, "8305": 21.58198, "8310": 21.86358, "8315": 21.73775, "8320": 21.58916, "8325": 21.69942, "8330": 21.59433, "8335": 21.59997, "8340": 21.61561, "8345": 21.63908, "8350": 21.75208, "8355": 21.5787, "8360": 21.56755, "8365": 21.57169, "8370": 21.6061, "8375": 21.58454, "8380": 21.58622, "8385": 21.89827, "8390": 21.81892, "8395": 21.57512, "8400": 21.71467, "8405": 21.96738, "8410": 21.71955, "8415": 21.57084, "8420": 22.76563, "8425": 21.57832, "8430": 21.67973, "8435": 22.94396, "8440": 21.57373, "8445": 21.69297, "8450": 22.57763, "8455": 21.78558, "8460": 21.55236, "8465": 21.83519, "8470": 21.61272, "8475": 21.62569, "8480": 21.81667, "8485": 21.63371, "8490": 21.69537, "8495": 21.61411, "8500": 21.62545, "8505": 21.76635, "8510": 21.83499, "8515": 21.65528, "8520": 21.63817, "8525": 21.99158, "8530": 21.6834, "8535": 21.84028, "8540": 22.65783, "8545": 21.58558, "8550": 21.64771, "8555": 22.46466, "8560": 21.59319, "8565": 21.66479, "8570": 22.36745, "8575": 21.9416, "8580": 21.75798, "8585": 22.00354, "8590": 21.64109, "8595": 21.75392, "8600": 21.84992, "8605": 21.58379, "8610": 22.42168, "8615": 21.77298, "8620": 21.54566, "8625": 21.77764, "8630": 21.91924, "8635": 21.61226, "8640": 21.6596, "8645": 21.92832, "8650": 21.6917, "8655": 21.71594, "8660": 21.6207, "8665": 127.65659, "8670": 21.73568, "8675": 21.58648, "8680": 22.79401, "8685": 21.85029, "8690": 21.55877, "8695": 22.45186, "8700": 21.59247, "8705": 21.69852, "8710": 22.07613, "8715": 21.54105, "8720": 21.55899, "8725": 22.0823, "8730": 21.5982, "8735": 21.84002, "8740": 22.12181, "8745": 21.54027, "8750": 21.54613, "8755": 21.9103, "8760": 21.56046, "8765": 21.54708, "8770": 22.21866, "8775": 21.55031, "8780": 21.52411, "8785": 21.54286, "8790": 21.71191, "8795": 21.65245, "8800": 21.59828, "8805": 21.83388, "8810": 21.5265, "8815": 21.79992, "8820": 21.63936, "8825": 21.56304, "8830": 21.80322, "8835": 21.55149, "8840": 21.74548, "8845": 22.31021, "8850": 21.67447, "8855": 21.68411, "8860": 23.0921, "8865": 21.53329, "8870": 21.55119, "8875": 22.1934, "8880": 21.53795, "8885": 21.82398, "8890": 21.6961, "8895": 21.65758, "8900": 21.73318, "8905": 21.55457, "8910": 21.5221, "8915": 21.56268, "8920": 21.69941, "8925": 21.66265, "8930": 21.87395, "8935": 21.58968, "8940": 21.54634, "8945": 21.52449, "8950": 21.50537, "8955": 22.02127, "8960": 21.68428, "8965": 21.82043, "8970": 22.01865, "8975": 21.70309, "8980": 21.56232, "8985": 22.30345, "8990": 21.53018, "8995": 21.51232, "9000": 22.73436, "9005": 21.51629, "9010": 22.38945, "9015": 21.64242, "9020": 21.48082, "9025": 21.74582, "9030": 21.63259, "9035": 22.04145, "9040": 21.51809, "9045": 21.5237, "9050": 21.68526, "9055": 21.50102, "9060": 21.56537, "9065": 21.52971, "9070": 21.93938, "9075": 21.89991, "9080": 21.49982, "9085": 22.17616, "9090": 22.04565, "9095": 21.50913, "9100": 21.53263, "9105": 22.14279, "9110": 21.63013, "9115": 21.53979, "9120": 22.49989, "9125": 21.49387, "9130": 21.60884, "9135": 22.21774, "9140": 21.62466, "9145": 21.66836, "9150": 21.9373, "9155": 21.70565, "9160": 21.5345, "9165": 21.72869, "9170": 21.67548, "9175": 21.82239, "9180": 21.53729, "9185": 21.76357, "9190": 21.54337, "9195": 21.58407, "9200": 22.04953, "9205": 21.61364, "9210": 22.16068, "9215": 21.77185, "9220": 21.54499, "9225": 21.52921, "9230": 22.15282, "9235": 21.93449, "9240": 21.52877, "9245": 22.09845, "9250": 21.84352, "9255": 21.62616, "9260": 21.68924, "9265": 21.63352, "9270": 21.59504, "9275": 21.63042, "9280": 21.63003, "9285": 21.88115, "9290": 21.68674, "9295": 21.58672, "9300": 21.59512, "9305": 21.59006, "9310": 21.59279, "9315": 21.6522, "9320": 21.82965, "9325": 21.62949, "9330": 21.5879, "9335": 21.59741, "9340": 21.7395, "9345": 21.57283, "9350": 21.59166, "9355": 21.83885, "9360": 21.57381, "9365": 21.68646, "9370": 21.59539, "9375": 21.83309, "9380": 21.60085, "9385": 21.60231, "9390": 21.84914, "9395": 21.59133, "9400": 21.64677, "9405": 21.81709, "9410": 22.81671, "9415": 21.61724, "9420": 21.56074, "9425": 22.2435, "9430": 21.96477, "9435": 21.56739, "9440": 21.80337, "9445": 21.53958, "9450": 21.74758, "9455": 21.59174, "9460": 21.57208, "9465": 21.54961, "9470": 21.7567, "9475": 21.92704, "9480": 21.65824, "9485": 21.56882, "9490": 21.59968, "9495": 21.56084, "9500": 21.60707, "9505": 21.97343, "9510": 21.81296, "9515": 22.48313, "9520": 21.55685, "9525": 21.71434, "9530": 22.44564, "9535": 21.74208, "9540": 21.78547, "9545": 22.34882, "9550": 21.52664, "9555": 21.57649, "9560": 21.5683, "9565": 21.59571, "9570": 21.55621, "9575": 21.55806, "9580": 22.16448, "9585": 21.56369, "9590": 21.6439, "9595": 21.68091, "9600": 21.59875, "9605": 21.54942, "9610": 21.72925, "9615": 22.09424, "9620": 21.54016, "9625": 22.19154, "9630": 21.95685, "9635": 21.53125, "9640": 21.89888, "9645": 21.57056, "9650": 21.96979, "9655": 22.64751, "9660": 21.56405, "9665": 22.02174, "9670": 22.49898, "9675": 21.82321, "9680": 21.54589, "9685": 21.89242, "9690": 21.61946, "9695": 21.56654, "9700": 21.56055, "9705": 21.55786, "9710": 21.97888, "9715": 21.55525, "9720": 21.82048, "9725": 21.68368, "9730": 21.57574, "9735": 21.99, "9740": 21.54289, "9745": 22.17278, "9750": 22.04093, "9755": 21.83546, "9760": 21.53476, "9765": 21.94009, "9770": 21.5373, "9775": 21.59461, "9780": 22.62531, "9785": 21.57091, "9790": 22.07092, "9795": 22.73768, "9800": 21.5582, "9805": 21.53336, "9810": 22.77846, "9815": 151.56555, "9820": 21.76158, "9825": 21.58727, "9830": 22.79601, "9835": 21.81669, "9840": 21.81185, "9845": 22.49418, "9850": 21.66813, "9855": 21.74694, "9860": 22.34097, "9865": 21.60657, "9870": 21.55815, "9875": 22.72419, "9880": 21.59769, "9885": 21.81663, "9890": 22.29588, "9895": 21.55961, "9900": 21.58406, "9905": 22.15191, "9910": 21.60336, "9915": 21.56689, "9920": 21.92275, "9925": 21.60723, "9930": 21.58311, "9935": 21.69279, "9940": 21.78867, "9945": 21.5606, "9950": 21.67206, "9955": 21.81135, "9960": 21.55233, "9965": 21.54428, "9970": 21.5556, "9975": 21.76476, "9980": 21.65493, "9985": 21.55541, "9990": 21.79139, "9995": 21.86553, "10000": 21.87056, "10005": 22.2947, "10010": 21.80388, "10015": 21.60044, "10020": 22.69172, "10025": 21.58146, "10030": 21.84229, "10035": 22.33141, "10040": 21.60271, "10045": 21.56093, "10050": 21.81465, "10055": 22.03131, "10060": 21.52378, "10065": 22.17086, "10070": 21.64502, "10075": 21.57193, "10080": 21.5388, "10085": 21.5844, "10090": 22.23949, "10095": 21.56088, "10100": 22.29833, "10105": 21.56012, "10110": 21.66204, "10115": 21.66165, "10120": 21.56301, "10125": 21.94571, "10130": 22.05202, "10135": 21.91853, "10140": 21.67032, "10145": 22.55575, "10150": 21.58157, "10155": 21.74572, "10160": 22.79743, "10165": 21.65935, "10170": 21.68104, "10175": 22.16625, "10180": 21.9581, "10185": 21.62725, "10190": 21.67642, "10195": 22.07462, "10200": 21.5968, "10205": 21.66735, "10210": 21.68722, "10215": 22.09408, "10220": 21.7072, "10225": 21.68306, "10230": 21.89629, "10235": 21.56992, "10240": 21.54678, "10245": 21.58816, "10250": 21.54674, "10255": 21.74142, "10260": 21.58282, "10265": 22.1365, "10270": 21.91608, "10275": 21.56388, "10280": 21.76403, "10285": 22.32128, "10290": 21.816, "10295": 21.54922, "10300": 22.87305, "10305": 21.74536, "10310": 21.87074, "10315": 22.13246, "10320": 21.56935, "10325": 21.60835, "10330": 21.74423, "10335": 22.01148, "10340": 22.09055, "10345": 21.7638, "10350": 21.63778, "10355": 21.55602, "10360": 21.57353, "10365": 21.56312, "10370": 21.64073, "10375": 21.87221, "10380": 22.09303, "10385": 21.76062, "10390": 21.52946, "10395": 22.37972, "10400": 21.5642, "10405": 21.74604, "10410": 21.71709, "10415": 21.67162, "10420": 21.61627, "10425": 21.78444, "10430": 21.67122, "10435": 21.67412, "10440": 21.73551, "10445": 21.64008, "10450": 21.61723, "10455": 21.63606, "10460": 21.94032, "10465": 21.65542, "10470": 21.63614, "10475": 21.82539, "10480": 21.61568, "10485": 21.63221, "10490": 21.76017, "10495": 21.71118, "10500": 21.61902, "10505": 21.79573, "10510": 21.73892, "10515": 21.60925, "10520": 21.57984, "10525": 21.91586, "10530": 21.79295, "10535": 21.61887, "10540": 21.69544, "10545": 21.63052, "10550": 21.71352, "10555": 21.63166, "10560": 22.15615, "10565": 21.60033, "10570": 21.5547, "10575": 21.94854, "10580": 21.59134, "10585": 21.58216, "10590": 21.55719, "10595": 22.17801, "10600": 21.67543, "10605": 23.05779, "10610": 21.59786, "10615": 21.96628, "10620": 21.85719, "10625": 21.69625, "10630": 21.54925, "10635": 21.68876, "10640": 21.78724, "10645": 21.55797, "10650": 21.79497, "10655": 21.59286, "10660": 21.65641, "10665": 21.97565, "10670": 21.58784, "10675": 21.98708, "10680": 21.5645, "10685": 21.62078, "10690": 21.68406, "10695": 21.77777, "10700": 21.8501, "10705": 21.60438, "10710": 21.78011, "10715": 22.35088, "10720": 21.65419, "10725": 21.64033, "10730": 23.24446, "10735": 21.597, "10740": 21.55706, "10745": 22.99714, "10750": 21.58522, "10755": 21.59997, "10760": 21.94814, "10765": 21.85138, "10770": 21.55478, "10775": 21.55384, "10780": 21.968, "10785": 21.79166, "10790": 21.57412, "10795": 21.56497, "10800": 21.7436, "10805": 21.67441, "10810": 21.66373, "10815": 21.84089, "10820": 21.72341, "10825": 21.83479, "10830": 21.57603, "10835": 21.59638, "10840": 21.72194, "10845": 21.59408, "10850": 21.71372, "10855": 22.15755, "10860": 21.57802, "10865": 21.55898, "10870": 22.57421, "10875": 21.59747, "10880": 21.57519, "10885": 22.90236, "10890": 21.9644, "10895": 21.58538, "10900": 22.33277, "10905": 21.74865, "10910": 21.86713, "10915": 21.89929, "10920": 21.57659, "10925": 21.84496, "10930": 21.62702, "10935": 21.71721, "10940": 21.60268, "10945": 22.06929, "10950": 21.58537, "10955": 21.57642, "10960": 21.60109, "10965": 21.91572, "10970": 21.62665, "10975": 21.59888, "10980": 22.38591, "10985": 21.59574, "10990": 21.5797, "10995": 22.00637, "11000": 21.55043, "11005": 21.79849, "11010": 21.66434, "11015": 21.51738, "11020": 21.53708, "11025": 22.00529, "11030": 21.52402, "11035": 21.56217, "11040": 21.52367, "11045": 21.5042, "11050": 21.66149, "11055": 21.50245, "11060": 21.79027, "11065": 21.49105, "11070": 21.48558, "11075": 21.60841, "11080": 21.79384, "11085": 21.52482, "11090": 21.47539, "11095": 22.42838, "11100": 21.52444, "11105": 21.46072, "11110": 22.62613, "11115": 21.64613, "11120": 21.47742, "11125": 22.58453, "11130": 22.28494, "11135": 21.48977, "11140": 21.64276, "11145": 21.62374, "11150": 21.87303, "11155": 21.48435, "11160": 21.46112, "11165": 21.72903, "11170": 21.47949, "11175": 21.44025, "11180": 21.43866, "11185": 21.626, "11190": 21.5046, "11195": 21.43367, "11200": 21.46787, "11205": 21.61202, "11210": 21.46475, "11215": 21.47767, "11220": 22.02934, "11225": 21.74898, "11230": 21.56461, "11235": 21.87267, "11240": 21.4503, "11245": 21.46367, "11250": 22.47386, "11255": 22.05935, "11260": 21.72062, "11265": 22.45335, "11270": 21.44576, "11275": 21.70266, "11280": 21.86421, "11285": 21.46184, "11290": 21.48755, "11295": 21.74409, "11300": 21.47899, "11305": 21.44563, "11310": 21.72706, "11315": 21.65402, "11320": 21.52074, "11325": 21.49227, "11330": 21.60765, "11335": 21.47067, "11340": 21.47387, "11345": 21.72912, "11350": 21.54684, "11355": 21.53583, "11360": 21.92639, "11365": 21.50307, "11370": 21.51838, "11375": 22.1273, "11380": 21.94207, "11385": 21.49373, "11390": 22.67451, "11395": 21.48798, "11400": 21.80422, "11405": 22.38069, "11410": 21.49938, "11415": 21.78708, "11420": 22.27447, "11425": 21.80911, "11430": 21.52279, "11435": 21.88351, "11440": 21.56434, "11445": 21.60929, "11450": 21.60526, "11455": 21.59477, "11460": 21.73674, "11465": 21.47443, "11470": 21.77086, "11475": 21.52573, "11480": 21.44042, "11485": 21.65492, "11490": 21.48695, "11495": 21.58564, "11500": 21.58722, "11505": 21.5644, "11510": 22.36676, "11515": 21.67685, "11520": 21.65263, "11525": 22.37508, "11530": 21.4986, "11535": 21.72329, "11540": 22.12507, "11545": 21.65914, "11550": 21.86658, "11555": 21.51907, "11560": 21.62185, "11565": 21.64038, "11570": 21.47554, "11575": 21.45363, "11580": 21.53543, "11585": 21.79704, "11590": 21.67655, "11595": 21.4903, "11600": 21.49477, "11605": 21.48401, "11610": 21.51333, "11615": 21.5194, "11620": 22.2627, "11625": 21.60813, "11630": 21.44605, "11635": 22.77659, "11640": 21.47142, "11645": 21.43653, "11650": 22.67036, "11655": 21.51531, "11660": 21.45664, "11665": 21.57061, "11670": 21.48244, "11675": 21.60415, "11680": 21.43867, "11685": 21.64391, "11690": 21.44037, "11695": 21.46859, "11700": 21.44069, "11705": 21.73577, "11710": 21.45403, "11715": 21.468, "11720": 21.46929, "11725": 21.45617, "11730": 21.45355, "11735": 21.43982, "11740": 21.67243, "11745": 21.87484, "11750": 21.47155, "11755": 21.43858, "11760": 22.38133, "11765": 21.47427, "11770": 21.44333, "11775": 22.83941, "11780": 21.74309, "11785": 21.46695, "11790": 22.44658, "11795": 21.68696, "11800": 21.51694, "11805": 22.10842, "11810": 21.43115, "11815": 21.50044, "11820": 21.92413, "11825": 21.45865, "11830": 21.61057, "11835": 21.48603, "11840": 21.43667, "11845": 21.43761, "11850": 21.61038, "11855": 21.46433, "11860": 21.44342, "11865": 21.96065, "11870": 21.38328, "11875": 21.41819, "11880": 21.42155, "11885": 21.81397, "11890": 21.40855, "11895": 21.42314, "11900": 22.26982, "11905": 21.42524, "11910": 21.45903, "11915": 22.75192, "11920": 21.58611, "11925": 21.4204, "11930": 22.30042, "11935": 21.69407, "11940": 21.48053, "11945": 21.73587, "11950": 21.44548, "11955": 21.41111, "11960": 21.55378, "11965": 21.45482, "11970": 21.45361, "11975": 21.49812, "11980": 21.57734, "11985": 21.45443, "11990": 21.73035, "11995": 21.41397, "12000": 21.42239, "12005": 21.61517, "12010": 21.46054, "12015": 21.46152, "12020": 22.52307, "12025": 21.42209, "12030": 21.44903, "12035": 22.51284, "12040": 21.44854, "12045": 21.58349, "12050": 22.62533, "12055": 21.44621, "12060": 21.4594, "12065": 22.04221, "12070": 21.92102, "12075": 21.43761, "12080": 21.97777, "12085": 21.42347, "12090": 21.44908, "12095": 21.42377, "12100": 21.64592, "12105": 21.55773, "12110": 21.57445, "12115": 21.42309, "12120": 21.4455, "12125": 21.40452, "12130": 21.57345, "12135": 21.50946, "12140": 21.71177, "12145": 21.66754, "12150": 21.5412, "12155": 21.50365, "12160": 21.47304, "12165": 21.47925, "12170": 21.45877, "12175": 21.65892, "12180": 21.70531, "12185": 21.57044, "12190": 21.46856, "12195": 21.48228, "12200": 21.66249, "12205": 22.90198, "12210": 21.51564, "12215": 21.56502, "12220": 22.37253, "12225": 21.63502, "12230": 21.61318, "12235": 21.61384, "12240": 21.49976, "12245": 21.44758, "12250": 21.44435, "12255": 21.94019, "12260": 21.60688, "12265": 21.44777, "12270": 21.5017, "12275": 21.55071, "12280": 21.45389, "12285": 21.4545, "12290": 21.9092, "12295": 21.7624, "12300": 21.51999, "12305": 21.48282, "12310": 21.65015, "12315": 21.88399, "12320": 21.50419, "12325": 21.48042, "12330": 22.552, "12335": 21.47617, "12340": 21.472, "12345": 22.78477, "12350": 21.49545, "12355": 21.49036, "12360": 22.11566, "12365": 21.46568, "12370": 21.49421, "12375": 21.85968, "12380": 21.91388, "12385": 21.49374, "12390": 21.5826, "12395": 21.50048, "12400": 21.44433, "12405": 21.5987, "12410": 21.46551, "12415": 22.06684, "12420": 21.48181, "12425": 21.60139, "12430": 21.50382, "12435": 21.46998, "12440": 21.57354, "12445": 21.58788, "12450": 21.58726, "12455": 21.82892, "12460": 21.68517, "12465": 21.47382, "12470": 22.28658, "12475": 21.46249, "12480": 21.52638, "12485": 22.54328, "12490": 21.51919, "12495": 21.69679, "12500": 22.35291, "12505": 21.50376, "12510": 21.73136, "12515": 21.63528, "12520": 21.53332, "12525": 21.61321, "12530": 21.57574, "12535": 22.23831, "12540": 21.51934, "12545": 21.56581, "12550": 21.55042, "12555": 21.45912, "12560": 21.79288, "12565": 21.47727, "12570": 21.79046, "12575": 22.54236, "12580": 21.52412, "12585": 21.56193, "12590": 21.98412, "12595": 21.48803, "12600": 21.50795, "12605": 22.16623, "12610": 21.7218, "12615": 21.50393, "12620": 21.97116, "12625": 21.54801, "12630": 21.68775, "12635": 22.22385, "12640": 21.51289, "12645": 21.81682, "12650": 21.93101, "12655": 21.53138, "12660": 22.2622, "12665": 21.84435, "12670": 21.49919, "12675": 21.47799, "12680": 22.01047, "12685": 21.64819, "12690": 21.53729, "12695": 21.98008, "12700": 21.66586, "12705": 21.48809, "12710": 21.49349, "12715": 22.15223, "12720": 21.99257, "12725": 21.62145, "12730": 21.64613, "12735": 21.75548, "12740": 21.6521, "12745": 21.61051, "12750": 21.63185, "12755": 21.90262, "12760": 21.7436, "12765": 21.5958, "12770": 21.57196, "12775": 21.90403, "12780": 21.56477, "12785": 21.60679, "12790": 21.78636, "12795": 21.58458, "12800": 21.56487, "12805": 22.33099, "12810": 21.5636, "12815": 21.57817, "12820": 22.34179, "12825": 21.82286, "12830": 21.74731, "12835": 22.82137, "12840": 21.6029, "12845": 21.62568, "12850": 22.28851, "12855": 21.60385, "12860": 21.62706, "12865": 21.62757, "12870": 21.58222, "12875": 21.6197, "12880": 21.58771, "12885": 21.5776, "12890": 21.62252, "12895": 21.60278, "12900": 21.62772, "12905": 21.56567, "12910": 21.92736, "12915": 21.57349, "12920": 21.57879, "12925": 21.72888, "12930": 21.71012, "12935": 21.6772, "12940": 21.60519, "12945": 22.30622, "12950": 21.58275, "12955": 21.62265, "12960": 22.84175, "12965": 21.61345, "12970": 21.6175, "12975": 22.68385, "12980": 21.60849, "12985": 21.58135, "12990": 22.00411, "12995": 21.85328, "13000": 21.5723, "13005": 21.82672, "13010": 21.7237, "13015": 21.79214, "13020": 21.59617, "13025": 21.54512, "13030": 21.60958, "13035": 21.65689, "13040": 21.61138, "13045": 21.69513, "13050": 21.92732, "13055": 21.61363, "13060": 21.61536, "13065": 22.61986, "13070": 21.63413, "13075": 22.01696, "13080": 23.01829, "13085": 21.6023, "13090": 21.58063, "13095": 22.70541, "13100": 21.9691, "13105": 21.59551, "13110": 22.24728, "13115": 21.62613, "13120": 21.64755, "13125": 21.72704, "13130": 21.60915, "13135": 21.62201, "13140": 21.62486, "13145": 21.94408, "13150": 21.5744, "13155": 21.64725, "13160": 21.83994, "13165": 21.58792, "13170": 21.58788, "13175": 21.80955, "13180": 21.92812, "13185": 21.60308, "13190": 22.5064, "13195": 21.60572, "13200": 21.62548, "13205": 22.19876, "13210": 21.60969, "13215": 21.84933, "13220": 22.49453, "13225": 22.22618, "13230": 21.58603, "13235": 22.54226, "13240": 21.72653, "13245": 21.56951, "13250": 21.85226, "13255": 21.63083, "13260": 21.67212, "13265": 21.98415, "13270": 21.6197, "13275": 21.88316, "13280": 21.80199, "13285": 21.59441, "13290": 21.59322, "13295": 21.72264, "13300": 21.48303, "13305": 21.7049, "13310": 21.67469, "13315": 21.57007, "13320": 21.46219, "13325": 21.42864, "13330": 21.47959, "13335": 21.4176, "13340": 21.60612, "13345": 21.68073, "13350": 21.47815, "13355": 21.41202, "13360": 21.40956, "13365": 21.64684, "13370": 21.60559, "13375": 21.42614, "13380": 21.50074, "13385": 21.43863, "13390": 21.55859, "13395": 21.53936, "13400": 21.40669, "13405": 21.82875, "13410": 21.42614, "13415": 22.32137, "13420": 21.78052, "13425": 21.39802, "13430": 21.54455, "13435": 21.6822, "13440": 21.42881, "13445": 21.4156, "13450": 21.39977, "13455": 21.62577, "13460": 21.38998, "13465": 21.43396, "13470": 21.6628, "13475": 21.6207, "13480": 21.41978, "13485": 21.45634, "13490": 21.49826, "13495": 21.80225, "13500": 21.42062, "13505": 22.30364, "13510": 21.43527, "13515": 21.40382, "13520": 22.25791, "13525": 21.51362, "13530": 21.67249, "13535": 22.10803, "13540": 21.62712, "13545": 21.46257, "13550": 21.67437, "13555": 21.45462, "13560": 21.44348, "13565": 21.42244, "13570": 21.41248, "13575": 22.12766, "13580": 21.43477, "13585": 21.60398, "13590": 21.62201, "13595": 21.43866, "13600": 21.55331, "13605": 21.43503, "13610": 21.94469, "13615": 21.8829, "13620": 21.63883, "13625": 21.40699, "13630": 22.15603, "13635": 21.48322, "13640": 21.42593, "13645": 22.18381, "13650": 21.43666, "13655": 21.91888, "13660": 22.82527, "13665": 21.45207, "13670": 21.42267, "13675": 22.26449, "13680": 21.48815, "13685": 21.40531, "13690": 21.71576, "13695": 21.47489, "13700": 21.886, "13705": 21.42413, "13710": 21.43033, "13715": 21.63961, "13720": 21.44732, "13725": 21.81773, "13730": 21.57998, "13735": 22.17287, "13740": 21.43563, "13745": 21.41886, "13750": 21.39015, "13755": 21.91205, "13760": 21.45436, "13765": 21.40298, "13770": 22.42338, "13775": 21.43254, "13780": 21.4186, "13785": 22.33019, "13790": 21.43505, "13795": 21.54457, "13800": 22.3624, "13805": 21.40893, "13810": 21.46229, "13815": 22.36292, "13820": 21.7384, "13825": 21.42773, "13830": 21.93015, "13835": 21.40594, "13840": 21.66647, "13845": 21.41309, "13850": 21.39369, "13855": 21.86049, "13860": 22.08419, "13865": 21.41956, "13870": 21.40359, "13875": 21.46136, "13880": 21.67065, "13885": 21.57461, "13890": 21.69682, "13895": 21.69269, "13900": 21.61894, "13905": 21.53673, "13910": 21.53628, "13915": 21.51737, "13920": 21.52515, "13925": 21.58964, "13930": 21.73351, "13935": 21.57735, "13940": 21.53998, "13945": 21.51028, "13950": 21.78739, "13955": 21.68472, "13960": 21.51338, "13965": 21.58913, "13970": 21.51572, "13975": 21.58304, "13980": 21.57864, "13985": 21.5017, "13990": 21.88856, "13995": 21.53044, "14000": 21.56958, "14005": 21.67434, "14010": 21.72283, "14015": 21.54249, "14020": 21.50284, "14025": 21.53931, "14030": 21.57749, "14035": 21.65394, "14040": 21.58986, "14045": 21.47887, "14050": 21.48978, "14055": 21.69746, "14060": 21.82314, "14065": 21.58096, "14070": 21.45645, "14075": 22.08319, "14080": 21.46175, "14085": 21.48076, "14090": 22.43843, "14095": 21.45576, "14100": 21.62178, "14105": 22.28318, "14110": 21.79677, "14115": 21.65798, "14120": 22.16719, "14125": 21.95055, "14130": 21.4458, "14135": 21.89464, "14140": 21.50058, "14145": 21.66608, "14150": 21.63567, "14155": 21.49018, "14160": 21.65996, "14165": 21.49228, "14170": 21.54908, "14175": 21.50992, "14180": 21.53658, "14185": 21.46641, "14190": 21.47578, "14195": 21.57501, "14200": 21.77915, "14205": 21.46588, "14210": 21.61828, "14215": 21.60715, "14220": 21.51089, "14225": 21.51659, "14230": 22.00269, "14235": 21.58971, "14240": 21.85265, "14245": 22.11356, "14250": 21.99747, "14255": 21.51624, "14260": 22.6287, "14265": 21.56298, "14270": 21.50327, "14275": 22.59794, "14280": 21.47066, "14285": 21.72252, "14290": 21.504, "14295": 21.54349, "14300": 21.61707, "14305": 21.65881, "14310": 22.11751, "14315": 21.50737, "14320": 21.72044, "14325": 21.70982, "14330": 21.51329, "14335": 21.60866, "14340": 21.64289, "14345": 21.6417, "14350": 21.48618, "14355": 21.82107, "14360": 21.49479, "14365": 21.50335, "14370": 22.31182, "14375": 22.12, "14380": 21.75237, "14385": 22.60896, "14390": 21.47257, "14395": 21.48787, "14400": 22.43021, "14405": 21.61968, "14410": 21.66939, "14415": 22.53909, "14420": 21.47168, "14425": 21.61335, "14430": 21.8907, "14435": 21.60404, "14440": 21.81415, "14445": 21.63539, "14450": 21.60126, "14455": 21.57021, "14460": 21.49395, "14465": 21.55186, "14470": 21.92797, "14475": 21.54896, "14480": 21.7472, "14485": 21.54759, "14490": 21.54388, "14495": 21.53003, "14500": 21.58607, "14505": 21.76739, "14510": 21.8324, "14515": 21.54289, "14520": 21.65256, "14525": 21.50816, "14530": 21.56597, "14535": 22.0002, "14540": 21.58296, "14545": 21.9052, "14550": 21.76509, "14555": 21.5368, "14560": 21.5295, "14565": 21.52989, "14570": 21.87527, "14575": 21.85797, "14580": 21.80552, "14585": 21.50719, "14590": 21.63021, "14595": 21.50562, "14600": 21.51517, "14605": 21.51664, "14610": 23.01773, "14615": 21.65414, "14620": 21.50326, "14625": 22.03127, "14630": 21.7184, "14635": 21.87682, "14640": 21.50236, "14645": 21.52952, "14650": 21.48642, "14655": 21.51113, "14660": 21.6061, "14665": 21.52296, "14670": 21.69819, "14675": 21.51307, "14680": 21.5596, "14685": 21.6776, "14690": 21.98591, "14695": 21.69058, "14700": 21.49247, "14705": 21.79719, "14710": 21.52996, "14715": 21.54457, "14720": 22.56989, "14725": 22.01424, "14730": 21.99755, "14735": 22.87115, "14740": 21.78416, "14745": 21.51427, "14750": 22.33521, "14755": 21.52827, "14760": 21.48049, "14765": 21.90666, "14770": 21.45926, "14775": 21.64762, "14780": 21.61202, "14785": 21.47144, "14790": 21.54864, "14795": 21.61836, "14800": 22.08496, "14805": 21.49691, "14810": 21.52701, "14815": 21.51192, "14820": 21.47988, "14825": 21.63251, "14830": 21.48736, "14835": 21.97881, "14840": 21.46102, "14845": 21.87054, "14850": 22.06639, "14855": 21.49078, "14860": 22.05739, "14865": 21.50863, "14870": 21.74494, "14875": 22.38114, "14880": 21.48133, "14885": 21.48785, "14890": 22.64604, "14895": 21.75407, "14900": 21.4769, "14905": 22.22041, "14910": 21.47975, "14915": 21.5086, "14920": 21.67594, "14925": 21.48425, "14930": 21.74713, "14935": 21.53016, "14940": 21.76315, "14945": 21.59231, "14950": 21.48511, "14955": 21.86, "14960": 21.46894, "14965": 21.88525, "14970": 21.68151, "14975": 22.01841, "14980": 21.60061, "14985": 22.09468, "14990": 21.55797, "14995": 21.466, "15000": 22.32405, "15005": 21.98041, "15010": 22.67623, "15015": 21.51119, "15020": 21.96137, "15025": 22.2985, "15030": 21.47303, "15035": 21.47312, "15040": 21.71444, "15045": 21.75816, "15050": 21.62656, "15055": 21.50146, "15060": 21.73751, "15065": 21.52849, "15070": 21.61486, "15075": 21.77914, "15080": 21.58244, "15085": 21.60544, "15090": 21.79309, "15095": 21.86992, "15100": 21.67645, "15105": 21.602, "15110": 21.61173, "15115": 21.53684, "15120": 21.57035, "15125": 21.54446, "15130": 21.6553, "15135": 21.52828, "15140": 21.50856, "15145": 21.53533, "15150": 21.51644, "15155": 21.50335, "15160": 21.56032, "15165": 21.52578, "15170": 21.63123, "15175": 21.72904, "15180": 21.56399, "15185": 21.70109, "15190": 21.57628, "15195": 21.55785, "15200": 22.13417, "15205": 21.53338, "15210": 22.57949, "15215": 21.52532, "15220": 21.705, "15225": 21.61543, "15230": 21.53494, "15235": 21.52628, "15240": 21.55159, "15245": 21.7633, "15250": 21.55347, "15255": 21.84504, "15260": 21.70438, "15265": 21.54732, "15270": 21.77428, "15275": 21.5466, "15280": 21.54042, "15285": 21.6364, "15290": 22.14655, "15295": 21.52873, "15300": 21.50331, "15305": 22.14725, "15310": 21.54372, "15315": 21.53496, "15320": 22.62301, "15325": 21.50948, "15330": 21.57116, "15335": 23.08007, "15340": 21.81751, "15345": 21.5291, "15350": 22.00298, "15355": 21.53884, "15360": 21.52971, "15365": 21.54367, "15370": 21.96324, "15375": 21.53007, "15380": 21.61884, "15385": 21.89253, "15390": 21.53172, "15395": 21.52213, "15400": 21.52903, "15405": 21.66428, "15410": 21.53107, "15415": 21.54149, "15420": 21.64372, "15425": 21.49875, "15430": 21.52825, "15435": 21.53878, "15440": 21.62825, "15445": 21.97325, "15450": 21.58806, "15455": 21.80651, "15460": 22.17837, "15465": 21.61354, "15470": 21.52312, "15475": 22.51912, "15480": 21.56807, "15485": 21.52901, "15490": 22.46097, "15495": 21.93251, "15500": 21.55098, "15505": 21.52168, "15510": 21.85346, "15515": 21.63026, "15520": 21.53588, "15525": 22.15439, "15530": 21.51787, "15535": 21.8951, "15540": 21.55362, "15545": 21.74613, "15550": 22.20283, "15555": 21.49438, "15560": 21.75619, "15565": 22.58959, "15570": 21.66072, "15575": 21.53766, "15580": 22.67474, "15585": 21.52729, "15590": 21.57282, "15595": 22.07118, "15600": 21.72853, "15605": 21.51416, "15610": 22.9185, "15615": 21.95488, "15620": 21.64031, "15625": 22.4101, "15630": 21.51362, "15635": 21.45811, "15640": 21.56473, "15645": 21.46649, "15650": 21.45853, "15655": 21.4747, "15660": 21.44679, "15665": 21.55151, "15670": 21.44983, "15675": 21.46462, "15680": 21.54712, "15685": 21.53437, "15690": 21.46994, "15695": 21.48958, "15700": 21.51021, "15705": 21.61304, "15710": 21.46307, "15715": 21.61999, "15720": 21.44696, "15725": 21.50673, "15730": 21.43353, "15735": 21.72038, "15740": 21.78937, "15745": 21.43614, "15750": 23.14673, "15755": 21.4319, "15760": 21.4333, "15765": 22.79548, "15770": 21.47762, "15775": 21.43184, "15780": 21.43131, "15785": 21.60482, "15790": 21.42537, "15795": 21.50112, "15800": 21.42808, "15805": 21.43978, "15810": 21.49424, "15815": 21.43013, "15820": 21.54489, "15825": 21.41546, "15830": 21.50626, "15835": 21.46931, "15840": 21.45762, "15845": 21.50328, "15850": 21.40607, "15855": 21.44674, "15860": 21.47968, "15865": 21.78925, "15870": 21.75178, "15875": 21.40919, "15880": 21.4921, "15885": 21.43849, "15890": 22.33127, "15895": 21.423, "15900": 21.61097, "15905": 23.08025, "15910": 21.41651, "15915": 21.45202, "15920": 22.15586, "15925": 21.46312, "15930": 21.50652, "15935": 21.54555, "15940": 21.58263, "15945": 21.45347, "15950": 21.58255, "15955": 21.42158, "15960": 21.41072, "15965": 21.42724, "15970": 21.47008, "15975": 21.43735, "15980": 21.46616, "15985": 21.56521, "15990": 21.84152, "15995": 21.42992, "16000": 21.59851, "16005": 21.82737, "16010": 21.84893, "16015": 21.42644, "16020": 22.12304, "16025": 23.14375, "16030": 21.60519, "16035": 21.45527, "16040": 23.10497, "16045": 21.4592, "16050": 21.42501, "16055": 21.89466, "16060": 21.47457, "16065": 21.50773, "16070": 21.45204, "16075": 21.5374, "16080": 21.42299, "16085": 21.41122, "16090": 21.5085, "16095": 21.44824, "16100": 21.48767, "16105": 21.41712, "16110": 21.44367, "16115": 21.51082, "16120": 21.45433, "16125": 21.4379, "16130": 21.4432, "16135": 21.93589, "16140": 21.43155, "16145": 22.06327, "16150": 22.92958, "16155": 21.41656, "16160": 21.42872, "16165": 22.94827, "16170": 21.69178, "16175": 21.46226, "16180": 22.24065, "16185": 21.79442, "16190": 21.68378, "16195": 21.63927, "16200": 21.81347, "16205": 21.66978, "16210": 22.56515, "16215": 21.61945, "16220": 21.60239, "16225": 21.91619, "16230": 21.70785, "16235": 21.57907, "16240": 21.59388, "16245": 21.58731, "16250": 21.75914, "16255": 21.59023, "16260": 21.59088, "16265": 21.70108, "16270": 21.75731, "16275": 21.63198, "16280": 21.60036, "16285": 21.59559, "16290": 21.80771, "16295": 21.60708, "16300": 21.71292, "16305": 21.82598, "16310": 21.66252, "16315": 21.57252, "16320": 22.46304, "16325": 21.95076, "16330": 21.58654, "16335": 23.18729, "16340": 21.60266, "16345": 21.57577, "16350": 22.39223, "16355": 21.58335, "16360": 21.78007, "16365": 21.74344, "16370": 21.64603, "16375": 21.57589, "16380": 21.57082, "16385": 21.76869, "16390": 21.56773, "16395": 21.82486, "16400": 21.55803, "16405": 21.61142, "16410": 21.54349, "16415": 21.5602, "16420": 21.70089, "16425": 21.58088, "16430": 21.57338, "16435": 21.55651, "16440": 21.58702, "16445": 21.58944, "16450": 21.7049, "16455": 21.86038, "16460": 21.91736, "16465": 21.73027, "16470": 21.5464, "16475": 22.589, "16480": 21.56515, "16485": 21.77919, "16490": 22.85871, "16495": 21.55888, "16500": 21.71895, "16505": 21.55665, "16510": 21.58562, "16515": 21.70024, "16520": 22.13453, "16525": 21.6026, "16530": 21.5868, "16535": 21.56531, "16540": 21.57685, "16545": 21.60075, "16550": 21.58372, "16555": 21.98746, "16560": 21.5833, "16565": 21.92795, "16570": 21.74113, "16575": 21.56639, "16580": 22.51809, "16585": 21.58413, "16590": 21.75057, "16595": 22.7856, "16600": 21.55994, "16605": 21.93107, "16610": 22.63202, "16615": 21.67662, "16620": 21.60911, "16625": 22.33818, "16630": 21.55804, "16635": 21.74773, "16640": 22.33305, "16645": 21.57394, "16650": 21.70216, "16655": 21.56695, "16660": 21.58503, "16665": 21.59897, "16670": 21.601, "16675": 21.61588, "16680": 21.58364, "16685": 21.93567, "16690": 21.69898, "16695": 21.58536, "16700": 21.5903, "16705": 21.93217, "16710": 21.61726, "16715": 21.62111, "16720": 22.57579, "16725": 21.62673, "16730": 22.05375, "16735": 22.47564, "16740": 21.59261, "16745": 21.60979, "16750": 22.51018, "16755": 21.77757, "16760": 21.77647, "16765": 148.99738, "16770": 21.45087, "16775": 21.45186, "16780": 21.45362, "16785": 21.41534, "16790": 21.69003, "16795": 21.41813, "16800": 21.45619, "16805": 21.60538, "16810": 21.68758, "16815": 21.41283, "16820": 21.43567, "16825": 21.41987, "16830": 21.39449, "16835": 21.58897, "16840": 21.65373, "16845": 21.40816, "16850": 21.42618, "16855": 22.23536, "16860": 21.39327, "16865": 21.49545, "16870": 22.84484, "16875": 21.41599, "16880": 21.40939, "16885": 22.64348, "16890": 21.63325, "16895": 21.46436, "16900": 22.00187, "16905": 21.58326, "16910": 21.4316, "16915": 21.43797, "16920": 21.39769, "16925": 21.92949, "16930": 21.41308, "16935": 21.42226, "16940": 21.71479, "16945": 21.43151, "16950": 21.52, "16955": 21.42525, "16960": 21.59853, "16965": 21.57578, "16970": 21.43446, "16975": 21.61681, "16980": 21.43927, "16985": 21.45015, "16990": 21.44897, "16995": 22.08352, "17000": 21.55701, "17005": 22.44639, "17010": 21.42849, "17015": 21.48295, "17020": 22.51484, "17025": 21.48636, "17030": 21.72884, "17035": 21.89283, "17040": 21.42343, "17045": 21.67812, "17050": 21.64483, "17055": 21.63708, "17060": 21.41266, "17065": 21.65123, "17070": 21.44618, "17075": 21.61533, "17080": 21.86241, "17085": 21.42007, "17090": 21.44216, "17095": 21.43338, "17100": 21.39772, "17105": 21.38327, "17110": 21.50204, "17115": 22.16446, "17120": 21.40958, "17125": 21.67229, "17130": 22.39931, "17135": 21.64397, "17140": 21.39064, "17145": 22.37575, "17150": 21.48587, "17155": 21.56677, "17160": 22.40684, "17165": 21.39897, "17170": 21.66671, "17175": 21.71957, "17180": 21.41849, "17185": 21.51428, "17190": 21.45091, "17195": 21.96433, "17200": 21.42896, "17205": 21.80562, "17210": 21.43006, "17215": 21.43935, "17220": 21.45932, "17225": 21.43191, "17230": 21.60964, "17235": 21.41457, "17240": 22.24236, "17245": 21.45485, "17250": 21.41674, "17255": 21.99351, "17260": 21.41894, "17265": 21.49025, "17270": 22.22929, "17275": 21.40828, "17280": 21.47861, "17285": 22.48122, "17290": 21.52944, "17295": 21.41681, "17300": 22.04969, "17305": 21.38011, "17310": 21.57997, "17315": 22.09864, "17320": 21.43407, "17325": 21.55106, "17330": 22.19244, "17335": 21.4537, "17340": 21.57575, "17345": 21.42574, "17350": 21.75951, "17355": 21.56903, "17360": 21.74613, "17365": 21.69635, "17370": 21.5352, "17375": 21.53788, "17380": 21.55136, "17385": 21.74194, "17390": 21.66495, "17395": 21.74068, "17400": 21.53686, "17405": 23.04973, "17410": 21.71376, "17415": 21.60627, "17420": 22.65402, "17425": 21.49118, "17430": 21.56297, "17435": 22.20888, "17440": 21.47583, "17445": 21.46699, "17450": 21.49504, "17455": 21.49498, "17460": 26.34066, "17465": 21.64714, "17470": 22.01499, "17475": 21.46068, "17480": 21.70976, "17485": 21.48282, "17490": 21.67193, "17495": 21.45333, "17500": 21.48813, "17505": 21.57205, "17510": 21.74557, "17515": 21.4878, "17520": 21.72144, "17525": 22.14816, "17530": 22.06482, "17535": 21.61135, "17540": 22.40082, "17545": 21.72118, "17550": 21.53062, "17555": 23.43495, "17560": 21.49529, "17565": 21.97108, "17570": 22.04965, "17575": 21.45288, "17580": 21.48275, "17585": 21.48481, "17590": 22.44759, "17595": 21.46132, "17600": 21.80707, "17605": 21.46533, "17610": 21.44985, "17615": 21.51299, "17620": 21.6095, "17625": 22.00613, "17630": 21.44863, "17635": 21.67141, "17640": 21.51904, "17645": 21.48117, "17650": 21.54589, "17655": 21.50514, "17660": 21.81355, "17665": 21.75925, "17670": 21.60631, "17675": 21.53182, "17680": 22.58563, "17685": 21.6423, "17690": 21.5126, "17695": 22.70399, "17700": 21.5176, "17705": 21.46538, "17710": 22.3679, "17715": 22.3979, "17720": 21.50148, "17725": 21.69178, "17730": 22.1631, "17735": 21.56535, "17740": 21.47041, "17745": 21.60833, "17750": 21.98674, "17755": 21.50263, "17760": 21.47645, "17765": 21.9439, "17770": 21.49958, "17775": 21.45705, "17780": 21.68547, "17785": 21.44871, "17790": 21.75395, "17795": 21.61946, "17800": 22.05081, "17805": 21.99069, "17810": 21.47692, "17815": 21.49688, "17820": 22.04703, "17825": 21.46369, "17830": 21.48954, "17835": 22.36658, "17840": 22.19523, "17845": 21.67834, "17850": 22.40389, "17855": 21.50949, "17860": 21.62486, "17865": 21.90676, "17870": 21.48558, "17875": 22.00095, "17880": 21.7934, "17885": 21.51948, "17890": 21.46257, "17895": 21.59903, "17900": 21.47098, "17905": 21.46803, "17910": 21.97705, "17915": 22.03763, "17920": 21.45286, "17925": 21.47488, "17930": 144.60007, "17935": 21.56963, "17940": 21.5342, "17945": 21.53681, "17950": 21.56406, "17955": 21.96356, "17960": 21.54307, "17965": 21.51891, "17970": 21.52546, "17975": 21.53364, "17980": 21.50927, "17985": 21.63958, "17990": 21.58509, "17995": 21.50613, "18000": 21.49883, "18005": 21.48584, "18010": 21.5892, "18015": 22.14145, "18020": 21.48442, "18025": 21.50465, "18030": 23.71029, "18035": 21.49158, "18040": 21.48361, "18045": 22.46544, "18050": 21.4845, "18055": 21.49207, "18060": 21.75065, "18065": 21.80818, "18070": 21.59829, "18075": 21.50598, "18080": 21.70931, "18085": 21.51391, "18090": 21.60423, "18095": 21.66108, "18100": 21.62796, "18105": 21.64064, "18110": 21.49036, "18115": 21.51825, "18120": 22.12746, "18125": 21.63203, "18130": 21.60022, "18135": 21.51107, "18140": 22.32683, "18145": 21.62702, "18150": 21.68162, "18155": 22.97898, "18160": 21.54192, "18165": 21.51468, "18170": 22.38544, "18175": 21.48763, "18180": 21.51053, "18185": 22.1996, "18190": 21.59543, "18195": 21.6692, "18200": 21.49052, "18205": 21.49631, "18210": 21.47779, "18215": 21.6864, "18220": 21.58671, "18225": 21.48205, "18230": 21.62892, "18235": 21.48467, "18240": 21.48016, "18245": 21.50617, "18250": 21.7303, "18255": 21.47185, "18260": 21.50715, "18265": 21.96781, "18270": 21.49542, "18275": 21.59906, "18280": 22.6447, "18285": 21.47831, "18290": 21.66787, "18295": 22.16209, "18300": 21.63028, "18305": 21.49444, "18310": 22.3151, "18315": 21.56746, "18320": 21.50691, "18325": 22.33439, "18330": 21.66591, "18335": 21.68378, "18340": 21.60958, "18345": 21.49365, "18350": 21.56534, "18355": 21.49094, "18360": 21.9099, "18365": 21.67978, "18370": 21.49052, "18375": 21.6604, "18380": 21.5277, "18385": 21.67594, "18390": 21.5013, "18395": 21.84143, "18400": 21.55081, "18405": 22.13372, "18410": 21.55198, "18415": 21.49173, "18420": 22.34639, "18425": 21.48882, "18430": 21.70618, "18435": 22.13215, "18440": 21.66935, "18445": 21.6016, "18450": 22.1598, "18455": 21.54518, "18460": 21.51286, "18465": 22.62902, "18470": 21.50501, "18475": 21.47023, "18480": 22.13453, "18485": 21.69733, "18490": 21.594, "18495": 21.50252, "18500": 21.70252, "18505": 21.54795, "18510": 22.79333, "18515": 21.59837, "18520": 21.67672, "18525": 23.2666, "18530": 22.24294, "18535": 21.75217, "18540": 23.23928, "18545": 21.74556, "18550": 21.66679, "18555": 22.93906, "18560": 21.69355, "18565": 21.98272, "18570": 22.91322, "18575": 21.99241, "18580": 21.83147, "18585": 22.5227, "18590": 21.67384, "18595": 21.62416, "18600": 22.47656, "18605": 21.67822, "18610": 21.63718, "18615": 21.64426, "18620": 21.7326, "18625": 21.76908, "18630": 21.66174, "18635": 21.79028, "18640": 21.92622, "18645": 21.64388, "18650": 21.95417, "18655": 21.67443, "18660": 22.16162, "18665": 21.66173, "18670": 21.78984, "18675": 22.66648, "18680": 21.63336, "18685": 22.12132, "18690": 22.48049, "18695": 21.71417, "18700": 21.75484, "18705": 22.52258, "18710": 21.86187, "18715": 21.68954, "18720": 21.7817, "18725": 21.78681, "18730": 21.84849, "18735": 21.62195, "18740": 21.57876, "18745": 21.88578, "18750": 21.58939, "18755": 21.61294, "18760": 21.5879, "18765": 21.81044, "18770": 21.58273, "18775": 21.81224, "18780": 21.8226, "18785": 21.68392, "18790": 21.66322, "18795": 21.59405, "18800": 22.64067, "18805": 21.68145, "18810": 21.99891, "18815": 22.12934, "18820": 21.65859, "18825": 21.76978, "18830": 22.48611, "18835": 21.64186, "18840": 21.7664, "18845": 22.76148, "18850": 21.70806, "18855": 21.66939, "18860": 22.07162, "18865": 21.72435, "18870": 21.66379, "18875": 21.67439, "18880": 21.70436, "18885": 21.64651, "18890": 21.78717, "18895": 22.14585, "18900": 21.70251, "18905": 21.63326, "18910": 21.63268, "18915": 21.6665, "18920": 21.74414, "18925": 21.7105, "18930": 21.80335, "18935": 21.86198, "18940": 21.6546, "18945": 21.62578, "18950": 21.65526, "18955": 22.23226, "18960": 21.63566, "18965": 22.01678, "18970": 22.88632, "18975": 21.64897, "18980": 21.58507, "18985": 22.62085, "18990": 21.54297, "18995": 21.57696, "19000": 21.9491, "19005": 21.56577, "19010": 21.60951, "19015": 21.62185, "19020": 21.68652, "19025": 21.79164, "19030": 21.8505, "19035": 21.5606, "19040": 21.58963, "19045": 21.66431, "19050": 21.653, "19055": 21.87288, "19060": 22.06897, "19065": 21.58569, "19070": 21.57682, "19075": 22.24193, "19080": 21.64965, "19085": 21.64543, "19090": 22.77604, "19095": 22.06601, "19100": 21.51956, "19105": 21.6099, "19110": 21.52744, "19115": 21.55185, "19120": 21.5442, "19125": 21.57829, "19130": 21.90724, "19135": 21.74616, "19140": 21.53469, "19145": 21.50715, "19150": 21.71646, "19155": 21.5009, "19160": 21.55751, "19165": 21.7219, "19170": 21.48802, "19175": 21.49234, "19180": 21.75059, "19185": 21.70982, "19190": 21.49529, "19195": 21.52759, "19200": 21.54493, "19205": 21.47167, "19210": 22.24105, "19215": 21.50892, "19220": 21.47983, "19225": 23.00498, "19230": 21.82787, "19235": 21.49047, "19240": 22.297, "19245": 21.47058, "19250": 21.61332, "19255": 21.45605, "19260": 21.50505, "19265": 21.67595, "19270": 21.50675, "19275": 21.75465, "19280": 21.53391, "19285": 21.71179, "19290": 21.53099, "19295": 21.50627, "19300": 21.73101, "19305": 21.47213, "19310": 21.55113, "19315": 21.50538, "19320": 21.86218, "19325": 21.47282, "19330": 21.49278, "19335": 22.29646, "19340": 21.5022, "19345": 21.51271, "19350": 22.50128, "19355": 21.75631, "19360": 21.48092, "19365": 22.77996, "19370": 21.45921, "19375": 21.51245, "19380": 21.83765, "19385": 21.49476, "19390": 21.48503, "19395": 21.53251, "19400": 21.48063, "19405": 21.47698, "19410": 21.65149, "19415": 21.47668, "19420": 21.58117, "19425": 21.49317, "19430": 21.47561, "19435": 21.47919, "19440": 21.46605, "19445": 21.66778, "19450": 21.50228, "19455": 21.76958, "19460": 21.49623, "19465": 21.72803, "19470": 21.49773, "19475": 21.73565, "19480": 21.86163, "19485": 21.51171, "19490": 22.28914, "19495": 21.5011, "19500": 21.72346, "19505": 21.50976, "19510": 21.71791, "19515": 21.90563, "19520": 22.04996, "19525": 21.4957, "19530": 21.51403, "19535": 21.47697, "19540": 21.48074, "19545": 21.62856, "19550": 21.51559, "19555": 21.81358, "19560": 21.48551, "19565": 21.69962, "19570": 21.46548, "19575": 21.545, "19580": 21.54307, "19585": 21.50453, "19590": 21.61782, "19595": 22.00138, "19600": 22.11029, "19605": 21.44758, "19610": 22.03919, "19615": 21.50162, "19620": 21.48106, "19625": 22.7933, "19630": 21.50625, "19635": 22.26604, "19640": 22.44251, "19645": 21.48965, "19650": 21.58442, "19655": 21.56795, "19660": 21.50909, "19665": 21.51488, "19670": 21.72057, "19675": 138.06879, "19680": 21.54331, "19685": 21.59938, "19690": 21.5547, "19695": 21.52649, "19700": 21.74892, "19705": 21.51106, "19710": 21.58054, "19715": 21.49594, "19720": 21.5029, "19725": 21.5216, "19730": 21.48445, "19735": 21.60748, "19740": 21.50073, "19745": 21.50445, "19750": 21.52002, "19755": 21.52854, "19760": 21.75194, "19765": 21.50781, "19770": 21.50653, "19775": 21.53886, "19780": 21.6298, "19785": 21.65182, "19790": 21.53533, "19795": 21.50952, "19800": 21.50864, "19805": 21.50241, "19810": 21.61018, "19815": 21.72447, "19820": 21.50897, "19825": 21.85884, "19830": 21.5182, "19835": 21.52365, "19840": 22.42446, "19845": 21.49897, "19850": 22.17612, "19855": 22.69951, "19860": 21.67683, "19865": 21.50679, "19870": 21.79854, "19875": 21.49739, "19880": 21.51279, "19885": 21.63616, "19890": 21.48862, "19895": 21.68302, "19900": 21.50628, "19905": 21.51613, "19910": 21.57587, "19915": 21.51114, "19920": 21.54333, "19925": 21.48607, "19930": 21.67588, "19935": 21.59783, "19940": 21.48079, "19945": 21.52143, "19950": 21.71416, "19955": 21.57711, "19960": 21.47518, "19965": 21.87652, "19970": 21.65896, "19975": 22.1036, "19980": 22.50854, "19985": 21.52687, "19990": 21.53776, "19995": 22.77522, "20000": 21.48732, "20005": 22.44962, "20010": 22.01114, "20015": 21.49217, "20020": 21.72791, "20025": 21.47052, "20030": 21.51465, "20035": 21.54685, "20040": 21.66823, "20045": 21.74246, "20050": 21.49123, "20055": 21.63798, "20060": 21.51984, "20065": 21.52589, "20070": 21.9115, "20075": 21.49533, "20080": 22.02338, "20085": 21.98291, "20090": 21.50062, "20095": 21.88354, "20100": 22.5627, "20105": 21.70596, "20110": 21.61662, "20115": 22.8774, "20120": 21.49189, "20125": 21.48763, "20130": 22.67434, "20135": 21.50889, "20140": 21.64631, "20145": 21.5299, "20150": 21.64429, "20155": 21.51915, "20160": 21.61587, "20165": 21.91783, "20170": 21.52964, "20175": 21.49414, "20180": 21.67436, "20185": 21.47715, "20190": 21.49685, "20195": 21.8267, "20200": 21.49998, "20205": 21.7164, "20210": 22.01289, "20215": 21.48126, "20220": 21.51341, "20225": 21.95688, "20230": 21.53441, "20235": 21.57615, "20240": 22.40819, "20245": 21.89717, "20250": 21.50893, "20255": 23.16485, "20260": 21.69501, "20265": 21.48232, "20270": 21.41537, "20275": 21.38971, "20280": 21.38518, "20285": 21.52319, "20290": 21.59064, "20295": 21.48896, "20300": 21.38965, "20305": 21.81098, "20310": 21.41893, "20315": 21.40796, "20320": 21.94702, "20325": 21.42209, "20330": 21.45637, "20335": 22.17652, "20340": 21.56698, "20345": 21.39951, "20350": 22.85165, "20355": 21.4428, "20360": 21.41515, "20365": 22.79811, "20370": 21.6378, "20375": 21.76793, "20380": 22.69113, "20385": 21.41487, "20390": 21.4253, "20395": 22.55215, "20400": 21.40327, "20405": 21.38558, "20410": 21.39117, "20415": 21.73987, "20420": 21.39844, "20425": 21.45017, "20430": 21.53394, "20435": 21.58961, "20440": 21.35484, "20445": 21.41395, "20450": 21.43696, "20455": 21.3739, "20460": 21.36349, "20465": 21.56645, "20470": 22.28961, "20475": 21.40661, "20480": 21.36429, "20485": 22.58153, "20490": 21.36807, "20495": 21.3614, "20500": 22.44318, "20505": 21.37492, "20510": 21.50228, "20515": 21.36326, "20520": 21.35049, "20525": 21.35776, "20530": 21.34075, "20535": 21.86766, "20540": 21.40763, "20545": 21.62003, "20550": 21.39304, "20555": 21.36419, "20560": 21.41556, "20565": 21.39511, "20570": 21.73395, "20575": 22.1611, "20580": 21.85372, "20585": 21.35844, "20590": 22.49488, "20595": 21.37574, "20600": 21.34082, "20605": 22.17738, "20610": 21.46568, "20615": 21.65194, "20620": 21.91737, "20625": 21.3546, "20630": 21.35563, "20635": 22.09611, "20640": 21.57015, "20645": 21.36296, "20650": 21.65684, "20655": 21.38988, "20660": 21.89342, "20665": 21.37261, "20670": 21.38784, "20675": 21.45537, "20680": 21.40085, "20685": 21.40078, "20690": 21.36291, "20695": 21.57958, "20700": 21.55214, "20705": 21.4854, "20710": 21.6568, "20715": 22.21302, "20720": 21.43191, "20725": 21.3881, "20730": 22.48263, "20735": 21.40361, "20740": 21.36188, "20745": 22.04883, "20750": 21.36292, "20755": 21.40056, "20760": 22.04438, "20765": 21.4135, "20770": 21.36996, "20775": 21.78072, "20780": 21.70589, "20785": 21.89188, "20790": 21.38765, "20795": 21.37718, "20800": 21.38495, "20805": 21.44516, "20810": 21.38011, "20815": 21.74122, "20820": 21.65781, "20825": 21.57116, "20830": 21.36509, "20835": 21.463, "20840": 21.74009, "20845": 21.34059, "20850": 21.5431, "20855": 21.71187, "20860": 21.54911, "20865": 21.56912, "20870": 21.64602, "20875": 21.57613, "20880": 21.55509, "20885": 22.00905, "20890": 21.74969, "20895": 21.52967, "20900": 22.46437, "20905": 21.52287, "20910": 21.73389, "20915": 22.11148, "20920": 21.51169, "20925": 21.55012, "20930": 21.77282, "20935": 21.51785, "20940": 21.57759, "20945": 22.36341, "20950": 21.69684, "20955": 21.53758, "20960": 21.94524, "20965": 21.53507, "20970": 21.55589, "20975": 21.88176, "20980": 22.28848, "20985": 21.52125, "20990": 21.71257, "20995": 21.57439, "21000": 21.54072, "21005": 21.99073, "21010": 21.70533, "21015": 21.58484, "21020": 22.27408, "21025": 21.54493, "21030": 21.50619, "21035": 21.849, "21040": 21.52803, "21045": 22.09462, "21050": 22.22558, "21055": 21.54106, "21060": 21.81695, "21065": 21.91092, "21070": 21.5503, "21075": 21.5956, "21080": 21.78116, "21085": 21.47605, "21090": 21.65239, "21095": 21.63147, "21100": 21.55044, "21105": 21.48025, "21110": 21.47696, "21115": 21.44423, "21120": 21.46434, "21125": 21.73214, "21130": 21.66346, "21135": 21.4976, "21140": 21.46224, "21145": 21.45179, "21150": 21.51423, "21155": 21.68325, "21160": 21.47243, "21165": 21.55736, "21170": 21.44322, "21175": 21.55522, "21180": 21.50095, "21185": 21.46918, "21190": 21.80503, "21195": 21.48958, "21200": 21.51648, "21205": 21.72704, "21210": 21.42354, "21215": 21.56669, "21220": 21.51237, "21225": 21.55172, "21230": 21.43708, "21235": 21.44087, "21240": 21.65083, "21245": 21.41974, "21250": 21.4329, "21255": 21.40905, "21260": 21.59595, "21265": 21.48127, "21270": 21.4148, "21275": 21.65783, "21280": 21.41608, "21285": 21.4282, "21290": 21.54184, "21295": 21.53227, "21300": 21.44629, "21305": 21.39053, "21310": 22.54517, "21315": 21.45127, "21320": 21.4446, "21325": 23.09391, "21330": 21.57436, "21335": 21.50443, "21340": 21.81119, "21345": 21.4344, "21350": 21.45899, "21355": 21.41381, "21360": 21.61591, "21365": 21.64419, "21370": 21.42327, "21375": 21.4053, "21380": 21.4521, "21385": 21.48417, "21390": 21.43413, "21395": 21.49747, "21400": 21.61283, "21405": 21.42577, "21410": 21.44671, "21415": 21.40714, "21420": 21.46935, "21425": 21.44229, "21430": 21.43852, "21435": 21.7933, "21440": 21.43263, "21445": 21.41851, "21450": 21.97102, "21455": 21.57809, "21460": 21.43128, "21465": 23.03788, "21470": 21.43543, "21475": 21.44999, "21480": 22.51562, "21485": 21.4061, "21490": 21.77855, "21495": 21.55755, "21500": 21.41287, "21505": 21.4319, "21510": 21.88834, "21515": 21.47312, "21520": 22.12378, "21525": 21.43149, "21530": 21.43806, "21535": 21.48273, "21540": 21.44891, "21545": 21.61332, "21550": 21.46153, "21555": 22.06796, "21560": 21.42466, "21565": 21.4657, "21570": 22.29121, "21575": 21.41982, "21580": 21.46533, "21585": 22.59104, "21590": 21.62388, "21595": 21.41068, "21600": 21.92067, "21605": 21.52139, "21610": 21.46856, "21615": 22.54698, "21620": 21.43628, "21625": 21.47125, "21630": 21.76083, "21635": 21.44383, "21640": 21.59312, "21645": 21.72431, "21650": 21.45776, "21655": 21.4234, "21660": 21.45174, "21665": 21.5624, "21670": 22.3904, "21675": 21.41565, "21680": 21.39251, "21685": 22.8605, "21690": 22.05914, "21695": 21.42754, "21700": 23.04352, "21705": 21.50099, "21710": 21.51449, "21715": 22.71483, "21720": 21.41468, "21725": 21.928, "21730": 22.99737, "21735": 21.42427, "21740": 21.54309, "21745": 22.51813, "21750": 21.38641, "21755": 21.51526, "21760": 22.25174, "21765": 21.39354, "21770": 21.40944, "21775": 21.66403, "21780": 21.46622, "21785": 21.39181, "21790": 21.46091, "21795": 21.95235, "21800": 21.32834, "21805": 21.36681, "21810": 21.40896, "21815": 21.37978, "21820": 21.35006, "21825": 21.3709, "21830": 21.45846, "21835": 21.39653, "21840": 21.36419, "21845": 21.54063, "21850": 21.70045, "21855": 21.37952, "21860": 21.55238, "21865": 22.72036, "21870": 21.55484, "21875": 21.35218, "21880": 23.35183, "21885": 21.53639, "21890": 21.36385, "21895": 21.49827, "21900": 21.53132, "21905": 21.35807, "21910": 21.44452, "21915": 21.73125, "21920": 21.37169, "21925": 21.42118, "21930": 21.36254, "21935": 21.54614, "21940": 21.48963, "21945": 21.36327, "21950": 21.34729, "21955": 21.39861, "21960": 21.46427, "21965": 21.33024, "21970": 21.48868, "21975": 21.50216, "21980": 21.40308, "21985": 21.55654, "21990": 21.80919, "21995": 21.49762, "22000": 21.35313, "22005": 21.36458, "22010": 21.403, "22015": 21.61012, "22020": 21.40521, "22025": 21.46027, "22030": 21.36232, "22035": 22.13297, "22040": 21.52458, "22045": 21.35949, "22050": 21.675, "22055": 21.43788, "22060": 21.36499, "22065": 21.37114, "22070": 21.4986, "22075": 21.3778, "22080": 21.40485, "22085": 21.64723, "22090": 21.70011, "22095": 21.48531, "22100": 21.40276, "22105": 21.37167, "22110": 22.57043, "22115": 21.59715, "22120": 21.7825, "22125": 23.36697, "22130": 21.37002, "22135": 21.36447, "22140": 21.90403, "22145": 21.63566, "22150": 21.40192, "22155": 21.47657, "22160": 22.42685, "22165": 21.47748, "22170": 21.36917, "22175": 21.62378, "22180": 21.51085, "22185": 21.42121, "22190": 21.5183, "22195": 21.39837, "22200": 21.44077, "22205": 21.38947, "22210": 21.54976, "22215": 21.73644, "22220": 21.37281, "22225": 21.36561, "22230": 21.34189, "22235": 21.76994, "22240": 21.36634, "22245": 21.40091, "22250": 22.67479, "22255": 21.4168, "22260": 21.84795, "22265": 21.40952, "22270": 21.56366, "22275": 21.51928, "22280": 21.3866, "22285": 21.39426, "22290": 21.42005, "22295": 21.79225, "22300": 21.54788, "22305": 21.39025, "22310": 21.39838, "22315": 21.66749, "22320": 21.41071, "22325": 21.36489, "22330": 21.72653, "22335": 21.37733, "22340": 21.37247, "22345": 21.46795, "22350": 21.58604, "22355": 21.49767, "22360": 21.32467, "22365": 21.30616, "22370": 21.85665, "22375": 21.29946, "22380": 21.37441, "22385": 21.7395, "22390": 21.33826, "22395": 21.32506, "22400": 21.32955, "22405": 21.42051, "22410": 21.46222, "22415": 21.31912, "22420": 21.3521, "22425": 21.32245, "22430": 21.61823, "22435": 21.34861, "22440": 21.41424, "22445": 22.78149, "22450": 26.38544, "22455": 21.50164, "22460": 22.63375, "22465": 21.4092, "22470": 21.31944, "22475": 21.87564, "22480": 21.33537, "22485": 21.30283, "22490": 21.4784, "22495": 21.8485, "22500": 21.3292, "22505": 21.40292, "22510": 21.74034, "22515": 21.34722, "22520": 21.30472, "22525": 22.07998, "22530": 21.3226, "22535": 21.51434, "22540": 21.40409, "22545": 21.29554, "22550": 21.9548, "22555": 21.29991, "22560": 21.40239, "22565": 23.01395, "22570": 21.5401, "22575": 21.31053, "22580": 22.42257, "22585": 21.28983, "22590": 21.44865, "22595": 21.96175, "22600": 21.51267, "22605": 21.50241, "22610": 21.50763, "22615": 21.70193, "22620": 21.50846, "22625": 21.66996, "22630": 21.5356, "22635": 21.62751, "22640": 21.52052, "22645": 21.49152, "22650": 22.53581, "22655": 21.49658, "22660": 21.638, "22665": 22.96485, "22670": 21.47962, "22675": 21.54928, "22680": 22.4481, "22685": 21.48817, "22690": 21.51057, "22695": 21.49976, "22700": 21.50912, "22705": 21.48459, "22710": 21.48862, "22715": 21.80236, "22720": 21.61888, "22725": 21.49555, "22730": 21.55506, "22735": 45.06478, "22740": 21.45188, "22745": 21.45398, "22750": 21.47109, "22755": 21.69155, "22760": 21.52843, "22765": 21.47018, "22770": 21.49527, "22775": 21.47748, "22780": 21.44265, "22785": 21.48944, "22790": 21.85927, "22795": 21.53116, "22800": 21.48819, "22805": 22.08717, "22810": 21.49294, "22815": 21.46152, "22820": 23.17042, "22825": 21.66888, "22830": 21.48582, "22835": 22.55975, "22840": 22.08668, "22845": 21.48657, "22850": 21.5913, "22855": 21.51588, "22860": 21.61789, "22865": 21.47565, "22870": 21.46363, "22875": 21.64616, "22880": 21.53571, "22885": 21.51951, "22890": 21.49116, "22895": 21.47469, "22900": 21.52454, "22905": 21.47806, "22910": 21.7347, "22915": 21.5348, "22920": 21.55626, "22925": 21.50261, "22930": 21.48562, "22935": 21.49675, "22940": 21.51384, "22945": 21.82704, "22950": 21.45409, "22955": 21.68952, "22960": 23.20589, "22965": 21.93431, "22970": 21.46793, "22975": 23.16636, "22980": 21.60427, "22985": 21.49007, "22990": 21.96381, "22995": 21.49183, "23000": 21.81672, "23005": 21.46528, "23010": 21.83148, "23015": 21.52086, "23020": 21.47056, "23025": 21.49474, "23030": 21.91616, "23035": 21.66395, "23040": 21.51825, "23045": 21.91628, "23050": 21.46816, "23055": 21.54034, "23060": 21.60809, "23065": 21.47927, "23070": 21.60118, "23075": 21.49061, "23080": 22.57764, "23085": 21.672, "23090": 21.49585, "23095": 22.87542, "23100": 21.51574, "23105": 21.60457, "23110": 22.1074, "23115": 21.5001, "23120": 21.83433, "23125": 21.47449, "23130": 21.49049, "23135": 21.49064, "23140": 21.75202, "23145": 21.47334, "23150": 21.46536, "23155": 21.88377, "23160": 21.51617, "23165": 21.47766, "23170": 21.49204, "23175": 22.56072, "23180": 21.5331, "23185": 21.49089, "23190": 22.93863, "23195": 21.99305, "23200": 21.55367, "23205": 21.51152, "23210": 21.50225, "23215": 21.58077, "23220": 21.59636, "23225": 21.52393, "23230": 21.49599, "23235": 21.53474, "23240": 21.5284, "23245": 21.71694, "23250": 21.47949, "23255": 21.7089, "23260": 21.61483, "23265": 21.49986, "23270": 21.5173, "23275": 21.542, "23280": 22.38899, "23285": 21.48647, "23290": 21.64651, "23295": 22.79354, "23300": 21.60236, "23305": 21.53783, "23310": 22.57936, "23315": 21.87099, "23320": 21.48593, "23325": 22.03277, "23330": 21.6274, "23335": 21.62247, "23340": 21.50779, "23345": 21.51574, "23350": 21.57372, "23355": 21.65617, "23360": 21.71172, "23365": 21.51351, "23370": 21.68368, "23375": 21.49409, "23380": 21.63906, "23385": 21.67814, "23390": 21.48488, "23395": 21.48471, "23400": 21.51802, "23405": 21.79497, "23410": 21.52785, "23415": 21.5044, "23420": 21.72243, "23425": 21.48042, "23430": 21.48722, "23435": 22.28902, "23440": 21.80267, "23445": 21.76807, "23450": 22.39959, "23455": 21.67576, "23460": 21.52568, "23465": 22.13397, "23470": 21.51617, "23475": 21.60344, "23480": 21.54084, "23485": 21.51184, "23490": 21.67424, "23495": 21.5212, "23500": 21.48832, "23505": 21.49812, "23510": 21.86855, "23515": 21.4696, "23520": 21.5521, "23525": 21.81048, "23530": 21.52487, "23535": 21.56627, "23540": 21.61337, "23545": 21.69715, "23550": 21.4897, "23555": 22.31607, "23560": 21.9417, "23565": 21.49574, "23570": 22.92598, "23575": 21.51229, "23580": 21.5181, "23585": 22.36736, "23590": 21.49288, "23595": 22.05467, "23600": 21.49557, "23605": 21.6369, "23610": 21.52375, "23615": 21.7601, "23620": 21.50189, "23625": 21.60672, "23630": 21.85466, "23635": 21.47096, "23640": 21.6569, "23645": 21.49996, "23650": 21.50007, "23655": 21.71051, "23660": 21.51535, "23665": 21.76217, "23670": 21.49085, "23675": 21.77878, "23680": 21.70919, "23685": 21.69431, "23690": 21.56194, "23695": 22.15374, "23700": 21.57006, "23705": 21.50616, "23710": 35.48329, "23715": 21.53033, "23720": 22.14655, "23725": 22.59083, "23730": 21.50393, "23735": 21.56968, "23740": 22.19148, "23745": 21.50358, "23750": 21.56724, "23755": 139.08365, "23760": 21.55345, "23765": 21.58732, "23770": 21.55026, "23775": 21.75987, "23780": 21.80856, "23785": 21.54395, "23790": 21.5279, "23795": 21.5575, "23800": 21.56882, "23805": 21.66381, "23810": 21.53179, "23815": 21.53774, "23820": 21.57779, "23825": 21.78831, "23830": 21.6451, "23835": 21.73146, "23840": 21.59029, "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/model_config.yaml b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/model_config.yaml index 1aab9ae..bcec2e1 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/model_config.yaml +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/model_config.yaml @@ -1,96 +1,96 @@ -ENV_VARS: - NCCL_IB_SL: 1 - NCCL_IB_TIMEOUT: 19 - CUDA_DEVICE_MAX_CONNECTIONS: 1 - NVTE_FWD_LAYERNORM_SM_MARGIN: 16 - NVTE_BWD_LAYERNORM_SM_MARGIN: 16 - NCCL_P2P_NET_CHUNKSIZE: 2097152 - NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 -TEST_TYPE: "release" -MODEL_ARGS: - # Distributed args - --distributed-timeout-minutes: 60 - --tensor-model-parallel-size: 2 - --pipeline-model-parallel-size: 4 - --use-distributed-optimizer: true - --overlap-grad-reduce: true - --overlap-param-gather: true - --no-ckpt-fully-parallel-save: true - # Training args - --use-mcore-models: true - --sequence-parallel: true - --use-flash-attn: true - --disable-bias-linear: true - --micro-batch-size: 1 - --global-batch-size: 1024 - --train-samples: 24414063 - --exit-duration-in-mins: 230 - # Transformer Engine args - --transformer-impl: transformer_engine - # Data args - --data-cache-path: ${DATA_CACHE_PATH} - --tokenizer-type: GPTSentencePieceTokenizer - --tokenizer-model: ${DATA_PATH}/utils/nemotron_2_256k.model - --data-path: $DATA_BLEND - --split: 99,1,0 - --no-mmap-bin-files: true - --num-workers: 6 - # Add network size args - --untie-embeddings-and-output-weights: true - --position-embedding-type: rope - --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container - --rotary-percent: 0.5 - --normalization: RMSNorm - --swiglu: true - --num-layers: 32 - --hidden-size: 4096 - --ffn-hidden-size: 14336 - --num-attention-heads: 32 - --group-query-attention: true - --num-query-groups: 8 - --seq-length: 4096 - --max-position-embeddings: 4096 - --make-vocab-size-divisible-by: 128 - # Add regularization args - --attention-dropout: 0.0 - --hidden-dropout: 0.0 - --clip-grad: 1.0 - --weight-decay: 0.1 - # Add learning rate args - --lr-decay-samples: 1949218748 - --lr-warmup-samples: 3906252 - --lr: 3.0e-4 - --min-lr: 3.0e-5 - --lr-decay-style: cosine - --adam-beta1: 0.9 - --adam-beta2: 0.95 - # Add MoE args - --expert-model-parallel-size: 4 - --num-experts: 8 - --moe-router-load-balancing-type: aux_loss - --moe-router-topk: 2 - --moe-grouped-gemm: true - --moe-aux-loss-coeff: 1e-2 - --moe-token-dispatcher-type: alltoall - # Add validation args - --eval-iters: 32 - --eval-interval: 200 - # Add checkpointing args - --load: ${OUTPUT_PATH}/checkpoints - --save: ${OUTPUT_PATH}/checkpoints - --save-interval: 5000 - # Add initialization args - --init-method-std: 0.010 - # Add logging args - --log-timers-to-tensorboard: true - --log-memory-to-tensorboard: true - --log-num-zeros-in-grad: true - --log-params-norm: true - --log-validation-ppl-to-tensorboard: true - --log-throughput: true - --log-interval: 1 - --tensorboard-dir: ${OUTPUT_PATH}/tensorboard - --wandb-project: megatron-core-release-runs - --wandb-exp-name: ${WANDB_EXPERIMENT} - # Add mixed precision args - --bf16: true +ENV_VARS: + NCCL_IB_SL: 1 + NCCL_IB_TIMEOUT: 19 + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_FWD_LAYERNORM_SM_MARGIN: 16 + NVTE_BWD_LAYERNORM_SM_MARGIN: 16 + NCCL_P2P_NET_CHUNKSIZE: 2097152 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 +TEST_TYPE: "release" +MODEL_ARGS: + # Distributed args + --distributed-timeout-minutes: 60 + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 4 + --use-distributed-optimizer: true + --overlap-grad-reduce: true + --overlap-param-gather: true + --no-ckpt-fully-parallel-save: true + # Training args + --use-mcore-models: true + --sequence-parallel: true + --use-flash-attn: true + --disable-bias-linear: true + --micro-batch-size: 1 + --global-batch-size: 1024 + --train-samples: 24414063 + --exit-duration-in-mins: 230 + # Transformer Engine args + --transformer-impl: transformer_engine + # Data args + --data-cache-path: ${DATA_CACHE_PATH} + --tokenizer-type: GPTSentencePieceTokenizer + --tokenizer-model: ${DATA_PATH}/utils/nemotron_2_256k.model + --data-path: $DATA_BLEND + --split: 99,1,0 + --no-mmap-bin-files: true + --num-workers: 6 + # Add network size args + --untie-embeddings-and-output-weights: true + --position-embedding-type: rope + --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container + --rotary-percent: 0.5 + --normalization: RMSNorm + --swiglu: true + --num-layers: 32 + --hidden-size: 4096 + --ffn-hidden-size: 14336 + --num-attention-heads: 32 + --group-query-attention: true + --num-query-groups: 8 + --seq-length: 4096 + --max-position-embeddings: 4096 + --make-vocab-size-divisible-by: 128 + # Add regularization args + --attention-dropout: 0.0 + --hidden-dropout: 0.0 + --clip-grad: 1.0 + --weight-decay: 0.1 + # Add learning rate args + --lr-decay-samples: 1949218748 + --lr-warmup-samples: 3906252 + --lr: 3.0e-4 + --min-lr: 3.0e-5 + --lr-decay-style: cosine + --adam-beta1: 0.9 + --adam-beta2: 0.95 + # Add MoE args + --expert-model-parallel-size: 4 + --num-experts: 8 + --moe-router-load-balancing-type: aux_loss + --moe-router-topk: 2 + --moe-grouped-gemm: true + --moe-aux-loss-coeff: 1e-2 + --moe-token-dispatcher-type: alltoall + # Add validation args + --eval-iters: 32 + --eval-interval: 200 + # Add checkpointing args + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --save-interval: 5000 + # Add initialization args + --init-method-std: 0.010 + # Add logging args + --log-timers-to-tensorboard: true + --log-memory-to-tensorboard: true + --log-num-zeros-in-grad: true + --log-params-norm: true + --log-validation-ppl-to-tensorboard: true + --log-throughput: true + --log-interval: 1 + --tensorboard-dir: ${OUTPUT_PATH}/tensorboard + --wandb-project: megatron-core-release-runs + --wandb-exp-name: ${WANDB_EXPERIMENT} + # Add mixed precision args + --bf16: true diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release_sm/model_config.yaml b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release_sm/model_config.yaml index c7ca1b0..a3c5895 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release_sm/model_config.yaml +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release_sm/model_config.yaml @@ -1,96 +1,96 @@ -ENV_VARS: - NCCL_IB_SL: 1 - NCCL_IB_TIMEOUT: 19 - CUDA_DEVICE_MAX_CONNECTIONS: 1 - NVTE_FWD_LAYERNORM_SM_MARGIN: 16 - NVTE_BWD_LAYERNORM_SM_MARGIN: 16 - NCCL_P2P_NET_CHUNKSIZE: 2097152 - NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 -TEST_TYPE: "release" -MODEL_ARGS: - # Distributed args - --distributed-timeout-minutes: 60 - --tensor-model-parallel-size: 2 - --pipeline-model-parallel-size: 4 - --use-distributed-optimizer: true - --overlap-grad-reduce: true - --overlap-param-gather: true - --no-ckpt-fully-parallel-save: true - # Training args - --use-mcore-models: true - --sequence-parallel: true - --use-flash-attn: true - --disable-bias-linear: true - --micro-batch-size: 1 - --global-batch-size: 1024 - --train-samples: 6103515 - --exit-duration-in-mins: 230 - # Transformer Engine args - --transformer-impl: transformer_engine - # Data args - --data-cache-path: ${DATA_CACHE_PATH} - --tokenizer-type: GPTSentencePieceTokenizer - --tokenizer-model: ${DATA_PATH}/utils/nemotron_2_256k.model - --data-path: $DATA_BLEND - --split: 99,1,0 - --no-mmap-bin-files: true - --num-workers: 6 - # Add network size args - --untie-embeddings-and-output-weights: true - --position-embedding-type: rope - --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container - --rotary-percent: 0.5 - --normalization: RMSNorm - --swiglu: true - --num-layers: 32 - --hidden-size: 4096 - --ffn-hidden-size: 14336 - --num-attention-heads: 32 - --group-query-attention: true - --num-query-groups: 8 - --seq-length: 4096 - --max-position-embeddings: 4096 - --make-vocab-size-divisible-by: 128 - # Add regularization args - --attention-dropout: 0.0 - --hidden-dropout: 0.0 - --clip-grad: 1.0 - --weight-decay: 0.1 - # Add learning rate args - --lr-decay-samples: 1949218748 - --lr-warmup-samples: 3906252 - --lr: 3.0e-4 - --min-lr: 3.0e-5 - --lr-decay-style: cosine - --adam-beta1: 0.9 - --adam-beta2: 0.95 - # Add MoE args - --expert-model-parallel-size: 4 - --num-experts: 8 - --moe-router-load-balancing-type: aux_loss - --moe-router-topk: 2 - --moe-grouped-gemm: true - --moe-aux-loss-coeff: 1e-2 - --moe-token-dispatcher-type: alltoall - # Add validation args - --eval-iters: 32 - --eval-interval: 200 - # Add checkpointing args - --load: ${OUTPUT_PATH}/checkpoints - --save: ${OUTPUT_PATH}/checkpoints - --save-interval: 500 - # Add initialization args - --init-method-std: 0.010 - # Add logging args - --log-timers-to-tensorboard: true - --log-memory-to-tensorboard: true - --log-num-zeros-in-grad: true - --log-params-norm: true - --log-validation-ppl-to-tensorboard: true - --log-throughput: true - --log-interval: 1 - --tensorboard-dir: ${OUTPUT_PATH}/tensorboard - --wandb-project: megatron-core-release-runs - --wandb-exp-name: ${WANDB_EXPERIMENT} - # Add mixed precision args - --bf16: true +ENV_VARS: + NCCL_IB_SL: 1 + NCCL_IB_TIMEOUT: 19 + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_FWD_LAYERNORM_SM_MARGIN: 16 + NVTE_BWD_LAYERNORM_SM_MARGIN: 16 + NCCL_P2P_NET_CHUNKSIZE: 2097152 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 +TEST_TYPE: "release" +MODEL_ARGS: + # Distributed args + --distributed-timeout-minutes: 60 + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 4 + --use-distributed-optimizer: true + --overlap-grad-reduce: true + --overlap-param-gather: true + --no-ckpt-fully-parallel-save: true + # Training args + --use-mcore-models: true + --sequence-parallel: true + --use-flash-attn: true + --disable-bias-linear: true + --micro-batch-size: 1 + --global-batch-size: 1024 + --train-samples: 6103515 + --exit-duration-in-mins: 230 + # Transformer Engine args + --transformer-impl: transformer_engine + # Data args + --data-cache-path: ${DATA_CACHE_PATH} + --tokenizer-type: GPTSentencePieceTokenizer + --tokenizer-model: ${DATA_PATH}/utils/nemotron_2_256k.model + --data-path: $DATA_BLEND + --split: 99,1,0 + --no-mmap-bin-files: true + --num-workers: 6 + # Add network size args + --untie-embeddings-and-output-weights: true + --position-embedding-type: rope + --no-rope-fusion: true #TODO: We can remove this once upgrading to the DEV container + --rotary-percent: 0.5 + --normalization: RMSNorm + --swiglu: true + --num-layers: 32 + --hidden-size: 4096 + --ffn-hidden-size: 14336 + --num-attention-heads: 32 + --group-query-attention: true + --num-query-groups: 8 + --seq-length: 4096 + --max-position-embeddings: 4096 + --make-vocab-size-divisible-by: 128 + # Add regularization args + --attention-dropout: 0.0 + --hidden-dropout: 0.0 + --clip-grad: 1.0 + --weight-decay: 0.1 + # Add learning rate args + --lr-decay-samples: 1949218748 + --lr-warmup-samples: 3906252 + --lr: 3.0e-4 + --min-lr: 3.0e-5 + --lr-decay-style: cosine + --adam-beta1: 0.9 + --adam-beta2: 0.95 + # Add MoE args + --expert-model-parallel-size: 4 + --num-experts: 8 + --moe-router-load-balancing-type: aux_loss + --moe-router-topk: 2 + --moe-grouped-gemm: true + --moe-aux-loss-coeff: 1e-2 + --moe-token-dispatcher-type: alltoall + # Add validation args + --eval-iters: 32 + --eval-interval: 200 + # Add checkpointing args + --save: ${CHECKPOINT_LOAD_PATH} + --load: ${CHECKPOINT_SAVE_PATH} + --save-interval: 500 + # Add initialization args + --init-method-std: 0.010 + # Add logging args + --log-timers-to-tensorboard: true + --log-memory-to-tensorboard: true + --log-num-zeros-in-grad: true + --log-params-norm: true + --log-validation-ppl-to-tensorboard: true + --log-throughput: true + --log-interval: 1 + --tensorboard-dir: ${OUTPUT_PATH}/tensorboard + --wandb-project: megatron-core-release-runs + --wandb-exp-name: ${WANDB_EXPERIMENT} + # Add mixed precision args + --bf16: true diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/golden_values_0.10.0.json b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/golden_values_0.10.0.json new file mode 100644 index 0000000..dc34608 --- /dev/null +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/golden_values_0.10.0.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 1.80637, "5": 1.8317, "10": 1.77654, "15": 1.78769, "20": 1.79515, "25": 1.75224, "30": 1.80504, "35": 1.7056, "40": 1.72624, "45": 1.72376, "50": 1.68552, "55": 1.69594, "60": 1.69313, "65": 1.68683, "70": 1.65621, "75": 1.68404, "80": 1.67447, "85": 1.66158, "90": 1.68292, "95": 1.66502, "100": 1.64202, "105": 1.62497, "110": 1.63683, "115": 1.63428, "120": 1.63299, "125": 1.64256, "130": 1.66909, "135": 1.62593, "140": 1.64117, "145": 1.60821, "150": 1.62737, "155": 1.61972, "160": 1.61876, "165": 1.62256, "170": 1.62578, "175": 1.60159, "180": 1.60661, "185": 1.67135, "190": 1.61268, "195": 1.6405, "200": 1.62432, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "num-zeros": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 30603822.0, "5": 32682148.0, "10": 31045572.0, "15": 33018754.0, "20": 33510834.0, "25": 33557528.0, "30": 31470404.0, "35": 31841352.0, "40": 31827464.0, "45": 32065052.0, "50": 31265236.0, "55": 33960616.0, "60": 31885906.0, "65": 33272050.0, "70": 30715592.0, "75": 31399474.0, "80": 32844178.0, "85": 31487264.0, "90": 32686198.0, "95": 32130336.0, "100": 32468560.0, "105": 33631048.0, "110": 33634672.0, "115": 29283322.0, "120": 30478856.0, "125": 32184674.0, "130": 32611400.0, "135": 32799708.0, "140": 32537316.0, "145": 30864032.0, "150": 33189970.0, "155": 33602408.0, "160": 32107236.0, "165": 33621956.0, "170": 32441776.0, "175": 31955124.0, "180": 31130886.0, "185": 31950278.0, "190": 33521704.0, "195": 32166632.0, "200": 30686888.0, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 20723163136.0, "5": 20723165184.0, "10": 20723165184.0, "15": 20723165184.0, "20": 20723165184.0, "25": 20723165184.0, "30": 20723165184.0, "35": 20723165184.0, "40": 20723165184.0, "45": 20723165184.0, "50": 20723165184.0, "55": 20723165184.0, "60": 20723165184.0, "65": 20723165184.0, "70": 20723165184.0, "75": 20723165184.0, "80": 20723165184.0, "85": 20723165184.0, "90": 20723165184.0, "95": 20723165184.0, "100": 20723165184.0, "105": 20723165184.0, "110": 20723165184.0, "115": 20723165184.0, "120": 20723165184.0, "125": 20723165184.0, "130": 20723165184.0, "135": 20723165184.0, "140": 20723165184.0, "145": 20723165184.0, "150": 20723165184.0, "155": 20723165184.0, "160": 20723165184.0, "165": 20723165184.0, "170": 20723165184.0, "175": 20723165184.0, "180": 20723165184.0, "185": 20723165184.0, "190": 20723165184.0, "195": 20723165184.0, "200": 20723165184.0, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "iteration-time": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 156.0981, "5": 3.34665, "10": 3.3605, "15": 3.35106, "20": 3.36715, "25": 3.34384, "30": 3.34446, "35": 3.34122, "40": 3.35077, "45": 3.33024, "50": 3.34961, "55": 3.50977, "60": 3.46212, "65": 3.62063, "70": 3.78557, "75": 3.55006, "80": 3.29524, "85": 3.29729, "90": 3.29181, "95": 3.28442, "100": 3.28088, "105": 3.28191, "110": 3.2822, "115": 3.29324, "120": 3.40329, "125": 3.2722, "130": 3.25511, "135": 3.26415, "140": 3.2588, "145": 3.25775, "150": 3.27378, "155": 3.25593, "160": 3.25947, "165": 3.24649, "170": 3.25754, "175": 3.26231, "180": 3.2607, "185": 3.25802, "190": 3.26212, "195": 3.25712, "200": 3.36674, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/golden_values_0.9.0.json b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/golden_values_0.9.0.json index 3b0155a..4a727d7 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/golden_values_0.9.0.json +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/golden_values_0.9.0.json @@ -1,275 +1 @@ -{ - "mem-allocated-bytes": { - "start_step": 0, - "end_step": 420, - "step_interval": 5, - "values": [ - 20705730560.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705730560.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705730560.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0, - 20705732608.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 420, - "step_interval": 5, - "values": [ - 174.32498, - 5.03933, - 5.07613, - 7.42013, - 169.24701, - 3.36681, - 3.34591, - 3.34951, - 3.52622, - 3.5358, - 3.48786, - 3.36115, - 3.35303, - 3.33904, - 3.3418, - 3.45107, - 3.34203, - 3.51434, - 3.40521, - 3.31669, - 3.29789, - 3.31313, - 3.29411, - 3.29085, - 3.27948, - 3.2839, - 3.33829, - 3.2764, - 3.27646, - 3.28108, - 3.26077, - 3.26767, - 3.25715, - 3.26524, - 3.26767, - 3.26115, - 3.26032, - 3.25141, - 3.27231, - 3.24855, - 3.25906, - 3.38416, - 3.26765, - 3.26154, - 169.37907, - 3.29826, - 3.29074, - 3.32167, - 3.54332, - 3.56011, - 3.41217, - 3.29645, - 3.30239, - 3.28493, - 3.28615, - 3.38222, - 3.27917, - 3.42778, - 3.35594, - 3.27354, - 3.23432, - 3.24867, - 3.24654, - 3.23251, - 3.22087, - 3.21832, - 3.27523, - 3.21564, - 3.21386, - 3.21731, - 3.21401, - 3.21026, - 3.20818, - 3.20512, - 3.20698, - 3.21101, - 3.19753, - 3.20163, - 3.22271, - 3.18466, - 3.19733, - 3.32646, - 3.19771, - 3.19899 - ] - }, - "throughput": { - "start_step": 0, - "end_step": 420, - "step_interval": 5, - "values": [ - 7.79399, - 269.61679, - 267.66226, - 183.10829, - 8.02784, - 403.55313, - 406.07434, - 405.63708, - 385.30963, - 384.26593, - 389.54803, - 404.2323, - 405.21173, - 406.90967, - 406.57309, - 393.69977, - 406.54602, - 386.612, - 399.0025, - 409.65109, - 411.98703, - 410.09161, - 412.46014, - 412.86859, - 414.30011, - 413.74167, - 407.00095, - 414.68881, - 414.68198, - 414.09723, - 416.67682, - 415.79745, - 417.14041, - 416.10687, - 415.79706, - 416.6282, - 416.73474, - 417.87595, - 415.20795, - 418.24426, - 416.89496, - 401.48453, - 415.79965, - 416.57834, - 8.02158, - 411.94022, - 412.88141, - 409.03793, - 383.4502, - 381.64218, - 398.18808, - 412.16641, - 411.42493, - 413.61191, - 413.45926, - 401.71454, - 414.33859, - 396.37567, - 404.85992, - 415.05142, - 420.0842, - 418.22919, - 418.50348, - 420.31937, - 421.83838, - 422.17279, - 414.83759, - 422.52484, - 422.75912, - 422.30557, - 422.73874, - 423.2323, - 423.50696, - 423.91129, - 423.66608, - 423.13437, - 424.918, - 424.37387, - 421.59784, - 426.63443, - 424.94376, - 408.44785, - 424.89417, - 424.72318 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 10.49788, "5": 10.50045, "10": 10.4243, "15": 10.14281, "20": 9.41642, "25": 1.75239, "30": 1.80478, "35": 1.70564, "40": 1.72623, "45": 1.72361, "50": 1.6855, "55": 1.69592, "60": 1.69306, "65": 1.68684, "70": 1.65619, "75": 1.68425, "80": 1.67456, "85": 1.66162, "90": 1.6831, "95": 1.66488, "100": 1.64205, "105": 1.62503, "110": 1.63692, "115": 1.63431, "120": 1.63308, "125": 1.64252, "130": 1.6691, "135": 1.62602, "140": 1.64107, "145": 1.6082, "150": 1.62736, "155": 1.61966, "160": 1.61866, "165": 1.62246, "170": 1.6257, "175": 1.60148, "180": 1.60642, "185": 1.67132, "190": 1.61238, "195": 1.64047, "200": 1.62411, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "num-zeros": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 30326678.0, "5": 32391428.0, "10": 30781540.0, "15": 32722572.0, "20": 33185490.0, "25": 33556448.0, "30": 31473592.0, "35": 31844712.0, "40": 31824396.0, "45": 32098404.0, "50": 31267756.0, "55": 33963476.0, "60": 31879064.0, "65": 33268092.0, "70": 30715010.0, "75": 31399052.0, "80": 32844988.0, "85": 31486526.0, "90": 32687740.0, "95": 32130810.0, "100": 32462664.0, "105": 33638344.0, "110": 33629648.0, "115": 29281796.0, "120": 30500668.0, "125": 32181704.0, "130": 32605190.0, "135": 32800060.0, "140": 32538972.0, "145": 30870348.0, "150": 33190744.0, "155": 33602592.0, "160": 32105356.0, "165": 33624008.0, "170": 32443560.0, "175": 31957776.0, "180": 31129932.0, "185": 31949800.0, "190": 33522066.0, "195": 32166264.0, "200": 30687200.0, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 20705730560.0, "5": 20705732608.0, "10": 20705732608.0, "15": 20705732608.0, "20": 20705732608.0, "25": 20705732608.0, "30": 20705732608.0, "35": 20705732608.0, "40": 20705732608.0, "45": 20705732608.0, "50": 20705732608.0, "55": 20705732608.0, "60": 20705732608.0, "65": 20705732608.0, "70": 20705732608.0, "75": 20705732608.0, "80": 20705732608.0, "85": 20705732608.0, "90": 20705732608.0, "95": 20705732608.0, "100": 20705732608.0, "105": 20705732608.0, "110": 20705732608.0, "115": 20705732608.0, "120": 20705732608.0, "125": 20705732608.0, "130": 20705732608.0, "135": 20705732608.0, "140": 20705732608.0, "145": 20705732608.0, "150": 20705732608.0, "155": 20705732608.0, "160": 20705732608.0, "165": 20705732608.0, "170": 20705732608.0, "175": 20705732608.0, "180": 20705732608.0, "185": 20705732608.0, "190": 20705732608.0, "195": 20705732608.0, "200": 20705732608.0, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}, "iteration-time": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 174.32498, "5": 5.06149, "10": 5.07997, "15": 5.21862, "20": 9.95533, "25": 3.35271, "30": 3.35688, "35": 3.53178, "40": 3.35947, "45": 3.33351, "50": 3.3658, "55": 3.3383, "60": 3.32413, "65": 3.32342, "70": 3.32885, "75": 3.31745, "80": 3.29681, "85": 3.3079, "90": 3.38823, "95": 3.39618, "100": 3.28383, "105": 3.28995, "110": 3.2873, "115": 3.31179, "120": 3.27844, "125": 3.2892, "130": 3.26734, "135": 3.52261, "140": 3.26552, "145": 3.26482, "150": 3.27187, "155": 3.39806, "160": 3.26054, "165": 3.25707, "170": 3.25038, "175": 3.27198, "180": 3.34554, "185": 3.26151, "190": 3.25863, "195": 3.25416, "200": 3.25999, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": "nan", "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": "nan", "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": "nan", "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": "nan", "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": "nan", "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": "nan", "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": "nan", "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": "nan", "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": "nan", "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": "nan", "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": "nan", "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": "nan", "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": "nan", "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": "nan", "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": "nan", "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": "nan", "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": "nan", "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": "nan", "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": "nan", "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": "nan", "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": "nan", "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": "nan", "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": "nan", "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": "nan", "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": "nan", "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": "nan", "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": "nan", "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": "nan", "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": "nan", "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": "nan", "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": "nan", "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": "nan", "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": "nan", "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": "nan", "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": "nan", "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": "nan", "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": "nan", "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": "nan", "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": "nan", "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": "nan", "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": "nan", "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": "nan", "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": "nan", "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": "nan", "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": "nan", "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": "nan", "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": "nan", "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": "nan", "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": "nan", "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": "nan", "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": "nan", "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": "nan", "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": "nan", "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": "nan", "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": "nan", "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": "nan", "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": "nan", "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": "nan", "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": "nan", "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": "nan", "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": "nan", "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": "nan", "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": "nan", "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": "nan", "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": "nan", "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": "nan", "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": "nan", "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": "nan", "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": "nan", "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": "nan", "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": "nan", "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": "nan", "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": "nan", "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": "nan", "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": "nan", "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": "nan", "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": "nan", "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": "nan", "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": "nan", "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": "nan", "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": "nan", "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": "nan", "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": "nan", "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": "nan", "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": "nan", "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": "nan", "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": "nan", "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": "nan", "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": "nan", "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": "nan", "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": "nan", "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": "nan", "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": "nan", "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": "nan", "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": "nan", "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": "nan", "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": "nan", "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": "nan", "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": "nan", "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": "nan", "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": "nan", "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": "nan", "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": "nan", "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": "nan", "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": "nan", "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": "nan", "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": "nan", "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": "nan", "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": "nan", "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": "nan", "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": "nan", "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": "nan", "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": "nan", "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": "nan", "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": "nan", "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": "nan", "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": "nan", "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": "nan", "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": "nan", "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": "nan", "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": "nan", "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": "nan", "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": "nan", "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": "nan", "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": "nan", "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan", "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": "nan", "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": "nan", "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": "nan", "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": "nan", "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": "nan", "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": "nan", "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": "nan", "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": "nan", "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": "nan", "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": "nan", "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": "nan", "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": "nan", "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": "nan", "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": "nan", "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": "nan", "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": "nan", "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": "nan", "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": "nan", "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": "nan", "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": "nan", "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": "nan", "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": "nan", "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": "nan", "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": "nan", "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": "nan", "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": "nan", "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": "nan", "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": "nan", "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": "nan", "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": "nan", "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": "nan", "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": "nan", "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": "nan", "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": "nan", "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": "nan", "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": "nan", "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": "nan", "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": "nan", "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": "nan", "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": "nan", "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": "nan", "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": "nan", "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": "nan", "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": "nan", "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": "nan", "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": "nan", "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": "nan", "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": "nan", "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": "nan", "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": "nan", "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": "nan", "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": "nan", "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": "nan", "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": "nan", "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": "nan", "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": "nan", "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": "nan", "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": "nan", "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": "nan", "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": "nan", "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": "nan", "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": "nan", "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": "nan", "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": "nan", "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": "nan", "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": "nan", "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": "nan", "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": "nan", "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": "nan", "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": "nan", "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": "nan", "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": "nan", "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": "nan", "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": "nan", "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": "nan", "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": "nan", "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": "nan", "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": "nan", "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": "nan", "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": "nan", "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": "nan", "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": "nan", "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": "nan", "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": "nan", "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": "nan", "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": "nan", "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": "nan", "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": "nan", "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": "nan", "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": "nan", "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": "nan", "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": "nan", "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": "nan", "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": "nan", "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": "nan", "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": "nan", "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": "nan", "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": "nan", "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": "nan", "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": "nan", "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": "nan", "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": "nan", "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": "nan", "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": "nan", "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": "nan", "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": "nan", "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": "nan", "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": "nan", "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": "nan", "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": "nan", "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": "nan", "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": "nan", "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": "nan", "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": "nan", "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": "nan", "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": "nan", "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": "nan", "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": "nan", "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": "nan", "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": "nan", "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": "nan", "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": "nan", "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": "nan", "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": "nan", "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": "nan", "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": "nan", "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": "nan", "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": "nan", "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": "nan", "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": "nan", "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": "nan", "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": "nan", "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": "nan", "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": "nan", "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": "nan", "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": "nan", "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": "nan", "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": "nan", "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": "nan", "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": "nan", "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": "nan", "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": "nan", "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": "nan", "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": "nan", "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": "nan", "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": "nan", "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": "nan", "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": "nan", "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": "nan", "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": "nan", "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": "nan", "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": "nan", "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": "nan", "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": "nan", "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": "nan", "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": "nan", "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": "nan", "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": "nan", "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": "nan", "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": "nan", "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": "nan", "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": "nan", "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": "nan", "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": "nan", "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": "nan", "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": "nan", "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": "nan", "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": "nan", "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": "nan", "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": "nan", "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": "nan", "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": "nan", "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": "nan", "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": "nan", "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": "nan", "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": "nan", "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": "nan", "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": "nan", "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": "nan", "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": "nan", "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": "nan", "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": "nan", "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": "nan", "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": "nan", "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": "nan", "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": "nan", "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": "nan", "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": "nan", "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": "nan", "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": "nan", "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": "nan", "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": "nan", "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": "nan", "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": "nan", "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": "nan", "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": "nan", "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": "nan", "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": "nan", "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": "nan", "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": "nan", "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": "nan", "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": "nan", "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": "nan", "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": "nan", "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": "nan", "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": "nan", "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": "nan", "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": "nan", "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": "nan", "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": "nan", "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": "nan", "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": "nan", "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": "nan", "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": "nan", "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": "nan", "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": "nan", "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": "nan", "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": "nan", "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": "nan", "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": "nan", "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": "nan", "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": "nan", "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": "nan", "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": "nan", "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": "nan", "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": "nan", "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": "nan", "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": "nan", "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": "nan", "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": "nan", "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": "nan", "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": "nan", "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": "nan", "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": "nan", "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": "nan", "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": "nan", "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": "nan", "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": "nan", "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": "nan", "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": "nan", "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": "nan", "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": "nan", "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": "nan", "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": "nan", "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": "nan", "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": "nan", "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": "nan", "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": "nan", "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": "nan", "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": "nan", "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": "nan", "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": "nan", "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": "nan", "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": "nan", "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": "nan", "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": "nan", "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": "nan", "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": "nan", "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": "nan", "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": "nan", "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": "nan", "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": "nan", "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": "nan", "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": "nan", "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": "nan", "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": "nan", "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": "nan", "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": "nan", "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": "nan", "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": "nan", "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": "nan", "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": "nan", "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": "nan", "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": "nan", "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": "nan", "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": "nan", "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": "nan", "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": "nan", "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": "nan", "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": "nan", "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": "nan", "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": "nan", "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": "nan", "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": "nan", "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": "nan", "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": "nan", "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": "nan", "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": "nan", "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": "nan", "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": "nan", "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": "nan", "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": "nan", "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": "nan", "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": "nan", "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": "nan", "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": "nan", "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": "nan", "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": "nan", "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": "nan", "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": "nan", "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": "nan", "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": "nan", "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": "nan", "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": "nan", "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": "nan", "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": "nan", "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": "nan", "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": "nan", "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": "nan", "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": "nan", "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": "nan", "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": "nan", "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": "nan", "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": "nan", "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": "nan", "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": "nan", "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": "nan", "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": "nan", "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": "nan", "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": "nan", "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": "nan", "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": "nan", "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": "nan", "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": "nan", "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": "nan", "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": "nan", "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": "nan", "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": "nan", "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": "nan", "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": "nan", "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": "nan", "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": "nan", "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": "nan", "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": "nan", "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": "nan", "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": "nan", "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": "nan", "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": "nan", "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": "nan", "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": "nan", "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": "nan", "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": "nan", "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": "nan", "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": "nan", "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": "nan", "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": "nan", "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": "nan", "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": "nan", "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": "nan", "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": "nan", "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": "nan", "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": "nan", "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": "nan", "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": "nan", "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": "nan", "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": "nan", "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": "nan", "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": "nan", "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": "nan", "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": "nan", "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": "nan", "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": "nan", "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": "nan", "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": "nan", "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": "nan", "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": "nan", "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": "nan", "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": "nan", "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": "nan", "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": "nan", "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": "nan", "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": "nan", "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": "nan", "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": "nan", "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": "nan", "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": "nan", "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": "nan", "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": "nan", "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values_dev.json index fdcf152..7a28081 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 9.1349, - 9.13328, - 9.129, - 9.11325, - 9.05402, - 9.0423, - 8.98255, - 8.93259, - 8.88939, - 8.78786 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 3477378.0, - 3584431.0, - 3475109.0, - 3382848.0, - 3699812.0, - 3478561.0, - 3397873.0, - 3453618.0, - 3424934.0, - 3585113.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 9.79473, - 0.31292, - 0.31229, - 0.31273, - 0.31218, - 0.31206, - 0.31234, - 0.3114, - 0.31226, - 0.31109 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.1349, "5": 9.12964, "10": 9.12979, "15": 9.11614, "20": 9.0785, "25": 9.04422, "30": 8.98304, "35": 8.94254, "40": 8.85736, "45": 8.80768, "50": 8.75896}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3477378.0, "5": 3783692.0, "10": 3544072.0, "15": 3421136.0, "20": 3465605.0, "25": 3457186.0, "30": 3708351.0, "35": 3432280.0, "40": 3614440.0, "45": 3452707.0, "50": 3411252.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2402084352.0, "5": 2402084352.0, "10": 2402084352.0, "15": 2402084352.0, "20": 2402084352.0, "25": 2402084352.0, "30": 2402084352.0, "35": 2402084352.0, "40": 2402084352.0, "45": 2402084352.0, "50": 2402084352.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 14680064000.0, "5": 15714056192.0, "10": 15714056192.0, "15": 15714056192.0, "20": 15714056192.0, "25": 15714056192.0, "30": 15714056192.0, "35": 15714056192.0, "40": 15714056192.0, "45": 15714056192.0, "50": 15714056192.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 7.20477, "5": 0.31364, "10": 0.31129, "15": 0.31047, "20": 0.31127, "25": 0.3103, "30": 0.30986, "35": 0.30958, "40": 0.31049, "45": 0.30941, "50": 0.31032}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values_lts.json index f4b3908..e114e45 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [9.13495, 9.13325, 9.12905, 9.11323, 9.05401, 9.04233, 8.98255, 8.93258, 8.88937, 8.78788]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [3477473.0, 3584371.0, 3475194.0, 3382773.0, 3699802.0, 3478715.0, 3397967.0, 3453615.0, 3424973.0, 3585127.0]},"iteration_timing_avg": 0.2253964705882353} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.13495, "5": 9.12967, "10": 9.12977, "15": 9.1161, "20": 9.07852, "25": 9.04418, "30": 8.98306, "35": 8.94256, "40": 8.85736, "45": 8.80772, "50": 8.75889}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3477473.0, "5": 3783651.0, "10": 3544032.0, "15": 3421007.0, "20": 3465554.0, "25": 3457236.0, "30": 3708300.0, "35": 3432250.0, "40": 3614349.0, "45": 3452827.0, "50": 3411525.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2402056704.0, "5": 2402056704.0, "10": 2402056704.0, "15": 2402056704.0, "20": 2402056704.0, "25": 2402056704.0, "30": 2402056704.0, "35": 2402056704.0, "40": 2402056704.0, "45": 2402056704.0, "50": 2402056704.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 14680380416.0, "5": 15714027520.0, "10": 15714027520.0, "15": 15714027520.0, "20": 15714027520.0, "25": 15714027520.0, "30": 15714027520.0, "35": 15714027520.0, "40": 15714027520.0, "45": 15714027520.0, "50": 15714027520.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 13.07788, "5": 0.29508, "10": 0.54146, "15": 0.48777, "20": 0.29866, "25": 0.29452, "30": 0.29974, "35": 0.29512, "40": 0.29799, "45": 0.29506, "50": 0.29873}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/model_config.yaml index b3b81d5..625bb91 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G/model_config.yaml @@ -21,8 +21,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --split: 949,50,1 --tokenizer-type: NullTokenizer --vocab-size: 8192 @@ -49,4 +49,5 @@ MODEL_ARGS: --img-w: 336 --patch-dim: 14 --mock-data: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values_dev.json index 74173ee..eee62d9 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 9.16172, - 9.16209, - 9.15685, - 9.1402, - 9.09395, - 9.07144, - 9.01399, - 8.96508, - 8.91879, - 8.8258 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 3557267.0, - 3663904.0, - 3554934.0, - 3462955.0, - 3780144.0, - 3559102.0, - 3477361.0, - 3533886.0, - 3504942.0, - 3665022.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 19.12182, - 0.63754, - 0.63824, - 0.6364, - 0.62383, - 0.62352, - 0.62268, - 0.62428, - 0.63616, - 0.6281 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.16172, "5": 9.1614, "10": 9.16229, "15": 9.14616, "20": 9.10769, "25": 9.07508, "30": 9.01761, "35": 8.97484, "40": 8.89629, "45": 8.83826, "50": 8.79161}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3557267.0, "5": 3864082.0, "10": 3624135.0, "15": 3500790.0, "20": 3546044.0, "25": 3537071.0, "30": 3787575.0, "35": 3512003.0, "40": 3694301.0, "45": 3532004.0, "50": 3492313.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 309738496.0, "5": 309738496.0, "10": 309738496.0, "15": 309738496.0, "20": 309738496.0, "25": 309738496.0, "30": 309738496.0, "35": 309738496.0, "40": 309738496.0, "45": 309738496.0, "50": 309738496.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2578621440.0, "5": 2675665408.0, "10": 2675665408.0, "15": 2675665408.0, "20": 2675665408.0, "25": 2675665408.0, "30": 2675665408.0, "35": 2675665408.0, "40": 2675665408.0, "45": 2675665408.0, "50": 2675665408.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 20.13525, "5": 0.63002, "10": 0.62634, "15": 0.62467, "20": 0.852, "25": 0.62647, "30": 0.61394, "35": 0.61186, "40": 0.61126, "45": 0.61295, "50": 0.61315}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values_lts.json index 03e0dd0..43e5b87 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [3557301.0, 3663955.0, 3555196.0, 3462888.0, 3780083.0, 3559007.0, 3477262.0, 3533752.0, 3505033.0, 3665096.0]},"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [9.16173, 9.16211, 9.15686, 9.14022, 9.09396, 9.07146, 9.01401, 8.9651, 8.91881, 8.82578]}} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.16173, "5": 9.16141, "10": 9.16225, "15": 9.14616, "20": 9.1077, "25": 9.07505, "30": 9.01758, "35": 8.9749, "40": 8.89623, "45": 8.83825, "50": 8.79166}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3557301.0, "5": 3863876.0, "10": 3624183.0, "15": 3500762.0, "20": 3545959.0, "25": 3537251.0, "30": 3787538.0, "35": 3512012.0, "40": 3694287.0, "45": 3532125.0, "50": 3492202.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 310157312.0, "5": 310157312.0, "10": 310157312.0, "15": 310157312.0, "20": 310157312.0, "25": 310157312.0, "30": 310157312.0, "35": 310157312.0, "40": 310157312.0, "45": 310157312.0, "50": 310157312.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 2578968064.0, "5": 2675332096.0, "10": 2675332096.0, "15": 2675332096.0, "20": 2675332096.0, "25": 2675332096.0, "30": 2675332096.0, "35": 2675332096.0, "40": 2675332096.0, "45": 2675332096.0, "50": 2675332096.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 21.0705, "5": 0.58863, "10": 0.75694, "15": 0.58633, "20": 0.58482, "25": 0.58575, "30": 0.58831, "35": 0.58903, "40": 0.59135, "45": 0.58505, "50": 0.5887}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/model_config.yaml index cdfdac5..3176a21 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G/model_config.yaml @@ -21,8 +21,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --split: 949,50,1 --tokenizer-type: NullTokenizer --vocab-size: 8192 @@ -50,4 +50,5 @@ MODEL_ARGS: --img-w: 336 --patch-dim: 14 --mock-data: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values_dev.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values_dev.json index a7ef0e1..5ee4c95 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values_dev.json @@ -1,53 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 9.19864, - 9.20111, - 9.19601, - 9.17296, - 9.11705, - 9.10224, - 9.04016, - 8.98428, - 8.94016, - 8.8386 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 3717664.0, - 3824288.0, - 3714705.0, - 3622894.0, - 3939791.0, - 3718740.0, - 3637227.0, - 3694225.0, - 3665435.0, - 3825408.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 50, - "step_interval": 5, - "values": [ - 12.72076, - 0.81802, - 0.8164, - 0.81573, - 0.81376, - 0.81495, - 0.81587, - 0.8178, - 0.82291, - 0.82279 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.19864, "5": 9.19691, "10": 9.19094, "15": 9.17523, "20": 9.13891, "25": 9.10449, "30": 9.03731, "35": 8.99499, "40": 8.91463, "45": 8.85894, "50": 8.80907}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3717664.0, "5": 4023697.0, "10": 3784077.0, "15": 3660641.0, "20": 3705933.0, "25": 3697193.0, "30": 3947711.0, "35": 3672071.0, "40": 3854594.0, "45": 3692583.0, "50": 3652269.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 404062208.0, "5": 403828736.0, "10": 403828736.0, "15": 403828736.0, "20": 403828736.0, "25": 403828736.0, "30": 404062208.0, "35": 404062208.0, "40": 404806144.0, "45": 404062208.0, "50": 403828736.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3450737152.0, "5": 3594671616.0, "10": 3594671616.0, "15": 3594671616.0, "20": 3594671616.0, "25": 3594671616.0, "30": 3595095040.0, "35": 3595095040.0, "40": 3595095040.0, "45": 3595095040.0, "50": 3595095040.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 11.64435, "5": 0.81028, "10": 0.79069, "15": 0.78106, "20": 0.78018, "25": 0.77926, "30": 1.05923, "35": 0.77819, "40": 0.83356, "45": 1.02801, "50": 0.9237}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values_lts.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values_lts.json index 96f345a..ab5e62d 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [9.19864, 9.20112, 9.19598, 9.17297, 9.1171, 9.10232, 9.04013, 8.98432, 8.94016, 8.83862]},"num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [3717564.0, 3824205.0, 3714643.0, 3622971.0, 3939727.0, 3718836.0, 3637293.0, 3694227.0, 3665382.0, 3825257.0]}, "iteration_timing_avg": 0.5847132352941178} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.19864, "5": 9.19687, "10": 9.19097, "15": 9.17523, "20": 9.13889, "25": 9.10451, "30": 9.03733, "35": 8.99498, "40": 8.91464, "45": 8.85892, "50": 8.80904}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3717564.0, "5": 4023735.0, "10": 3783982.0, "15": 3660711.0, "20": 3705787.0, "25": 3697298.0, "30": 3947821.0, "35": 3672132.0, "40": 3854582.0, "45": 3692819.0, "50": 3652068.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 404009472.0, "5": 404009472.0, "10": 404009472.0, "15": 404009472.0, "20": 404009472.0, "25": 404009472.0, "30": 404009472.0, "35": 404009472.0, "40": 404009472.0, "45": 404009472.0, "50": 404009472.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3451528704.0, "5": 3595407360.0, "10": 3595407360.0, "15": 3595407360.0, "20": 3595407360.0, "25": 3595407360.0, "30": 3595407360.0, "35": 3595407360.0, "40": 3595407360.0, "45": 3595407360.0, "50": 3595407360.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 13.11679, "5": 0.80064, "10": 0.80087, "15": 0.97298, "20": 0.79677, "25": 0.91733, "30": 0.96384, "35": 0.79662, "40": 0.79439, "45": 0.79405, "50": 0.79352}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/model_config.yaml b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/model_config.yaml index 22f816c..840184f 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/model_config.yaml +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G/model_config.yaml @@ -22,8 +22,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --split: 949,50,1 --tokenizer-type: NullTokenizer --vocab-size: 8192 @@ -52,4 +52,5 @@ MODEL_ARGS: --img-w: 336 --patch-dim: 14 --mock-data: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dgx_a100_1N8G/golden_values_dev.json index a2ef225..fe06d99 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dgx_a100_1N8G/golden_values_dev.json @@ -1 +1 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [7.96777, 0.62507, 0.62176, 0.62042, 0.62061, 0.62067, 0.62001, 0.61924, 0.61823, 0.6178]}, "forward-compute-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [3.04896, 0.30356, 0.30062, 0.29886, 0.29955, 0.29936, 0.29825, 0.29839, 0.2968, 0.29625]}, "backward-compute-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.99454, 0.28657, 0.28691, 0.28667, 0.28654, 0.28672, 0.28654, 0.2861, 0.28657, 0.28683]}, "batch-generator-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.3938, 0.01749, 0.01695, 0.01841, 0.01751, 0.01736, 0.01792, 0.01739, 0.01667, 0.01628]}, "forward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [3.32161, 0.03012, 0.02986, 0.02994, 0.02968, 0.02964, 0.03016, 0.02977, 0.02991, 0.02985]}, "forward-send-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.53192, 0.00018, 0.00018, 0.00018, 0.00019, 0.0002, 0.00019, 0.00019, 0.00019, 0.00018]}, "backward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.07283, 0.07198, 0.07135, 0.07044, 0.07023, 0.07085, 0.07065, 0.07057, 0.0704, 0.07021]}, "backward-send-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00023, 0.00029, 0.0002, 0.00027, 0.00027, 0.00032, 0.00032, 0.00028, 0.00027, 0.00021]}, "forward-send-backward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [3.46399, 0.30175, 0.30094, 0.29597, 0.29703, 0.29641, 0.2959, 0.29432, 0.29344, 0.29317]}, "backward-send-forward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.82172, 0.00243, 0.00247, 0.00234, 0.00236, 0.00228, 0.0023, 0.00235, 0.00232, 0.00233]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [4e-05, 3e-05, 3e-05, 3e-05, 3e-05, 2e-05, 3e-05, 3e-05, 3e-05, 3e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [7e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.16382, 0.00025, 0.00025, 0.00025, 0.00024, 0.00024, 0.00024, 0.00024, 0.00023, 0.00026]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [7e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05, 5e-05]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.83319, 0.00053, 0.00052, 0.00044, 0.00052, 0.00043, 0.00043, 0.00043, 0.00043, 0.00043]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00895, 0.00069, 0.00069, 0.00068, 0.00069, 0.00069, 0.00068, 0.00068, 0.00068, 0.00069]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00119, 0.00025, 0.00024, 0.00023, 0.00023, 0.00025, 0.00024, 0.00024, 0.00024, 0.00025]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00014, 9e-05, 9e-05, 8e-05, 8e-05, 9e-05, 9e-05, 8e-05, 9e-05, 9e-05]}, "optimizer-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.84455, 0.00225, 0.00226, 0.00214, 0.00221, 0.00216, 0.00214, 0.00213, 0.00214, 0.00214]}, "learning-rate": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]}, "learning-rate vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]}, "batch-size": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [9.19947, 9.20335, 9.20248, 9.19723, 9.19172, 9.18973, 9.18517, 9.17532, 9.17374, 9.1609]}, "lm loss vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [9.19947, 9.20335, 9.20248, 9.19723, 9.19172, 9.18973, 9.18517, 9.17532, 9.17374, 9.1609]}, "loss-scale": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.93277, 1.00171, 1.00056, 0.944, 1.16867, 0.98576, 0.91686, 0.9042, 0.83078, 0.88219]}, "grad-norm vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.93277, 1.00171, 1.00056, 0.944, 1.16867, 0.98576, 0.91686, 0.9042, 0.83078, 0.88219]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [106.0, 114.0, 108.0, 110.0, 81.0, 105.0, 85.0, 109.0, 146.0, 122.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [106.0, 114.0, 108.0, 110.0, 81.0, 105.0, 85.0, 109.0, 146.0, 122.0]}, "params-norm": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.6785, 35.67851, 35.6785, 35.67848, 35.67848]}, "params-norm vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.6785, 35.67851, 35.6785, 35.67848, 35.67848]}, "iteration-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [8.83079, 0.64044, 0.63692, 0.63516, 0.63554, 0.63541, 0.63471, 0.63399, 0.63285, 0.63245]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [9.1542]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [9.1542]}, "lm loss validation ppl": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [9454.09668]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [9454.09668]}} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.19947, "5": 9.20115, "10": 9.19966, "15": 9.19629, "20": 9.19324, "25": 9.19382, "30": 9.18139, "35": 9.17693, "40": 9.16914, "45": 9.16409, "50": 9.16138}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 106.0, "5": 75.0, "10": 88.0, "15": 114.0, "20": 98.0, "25": 88.0, "30": 92.0, "35": 110.0, "40": 91.0, "45": 114.0, "50": 148.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 116958208.0, "5": 116958208.0, "10": 116958208.0, "15": 116958208.0, "20": 116958208.0, "25": 116958208.0, "30": 116958208.0, "35": 116958208.0, "40": 116958208.0, "45": 116958208.0, "50": 116958208.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3010915328.0, "5": 3011058176.0, "10": 3011058176.0, "15": 3011614720.0, "20": 3011614720.0, "25": 3011615744.0, "30": 3011615744.0, "35": 3011615744.0, "40": 3011615744.0, "45": 3011615744.0, "50": 3011615744.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.37144, "5": 0.65091, "10": 0.6667, "15": 0.65488, "20": 0.64998, "25": 0.65251, "30": 0.65853, "35": 0.6369, "40": 0.63356, "45": 0.63189, "50": 0.63273}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dgx_a100_1N8G/golden_values_lts.json index 3c933e0..e7df6f6 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [12.7291, 0.62672, 0.60589, 0.60528, 0.60867, 0.60545, 0.60403, 0.61268, 0.61851, 0.60357]}, "forward-compute-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [5.56178, 0.30066, 0.28459, 0.28176, 0.28541, 0.27947, 0.28138, 0.28895, 0.29453, 0.28039]}, "backward-compute-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1.12115, 0.28858, 0.28597, 0.28809, 0.28772, 0.28811, 0.28721, 0.28849, 0.28849, 0.28829]}, "batch-generator-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [2.85702, 0.03903, 0.0338, 0.03035, 0.03224, 0.03016, 0.02978, 0.03435, 0.03368, 0.02954]}, "forward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [5.26228, 0.03127, 0.02963, 0.02987, 0.02952, 0.03226, 0.02962, 0.02934, 0.02956, 0.02928]}, "forward-send-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [2.5072, 0.00017, 0.00015, 0.00018, 0.00016, 0.00015, 0.00015, 0.00015, 0.00017, 0.00015]}, "backward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.07163, 0.07147, 0.0696, 0.06982, 0.07399, 0.0702, 0.06973, 0.07326, 0.07023, 0.06973]}, "backward-send-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00026, 0.00021, 0.00019, 0.00019, 0.00019, 0.00018, 0.00019, 0.0002, 0.0002, 0.00019]}, "forward-send-backward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [6.16563, 0.28249, 0.27763, 0.28103, 0.27952, 0.28051, 0.2813, 0.28172, 0.29124, 0.28177]}, "backward-send-forward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.92523, 0.00228, 0.00214, 0.00215, 0.00226, 0.00213, 0.00217, 0.00235, 0.00224, 0.00219]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [4e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [6e-05, 3e-05, 3e-05, 3e-05, 3e-05, 4e-05, 3e-05, 3e-05, 3e-05, 4e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.19033, 0.00022, 0.00021, 0.00022, 0.00022, 0.00023, 0.00022, 0.00022, 0.00022, 0.00022]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [6e-05, 4e-05, 4e-05, 4e-05, 5e-05, 4e-05, 4e-05, 4e-05, 4e-05, 5e-05]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [2.24661, 0.00048, 0.00047, 0.00038, 0.00047, 0.00039, 0.00039, 0.00039, 0.00039, 0.0004]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00926, 0.00069, 0.00062, 0.00063, 0.00063, 0.00063, 0.00062, 0.00063, 0.00062, 0.00062]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00112, 0.0002, 0.0002, 0.00021, 0.00021, 0.00021, 0.00021, 0.00021, 0.00022, 0.00021]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00014, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05, 8e-05]}, "optimizer-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [2.25814, 0.0021, 0.00203, 0.00193, 0.00201, 0.00193, 0.00195, 0.00196, 0.00197, 0.00195]}, "learning-rate": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]}, "learning-rate vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]}, "batch-size": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [9.19948, 9.20339, 9.20246, 9.19721, 9.1917, 9.18976, 9.18512, 9.17531, 9.17379, 9.16091]}, "lm loss vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [9.19948, 9.20339, 9.20246, 9.19721, 9.1917, 9.18976, 9.18512, 9.17531, 9.17379, 9.16091]}, "loss-scale": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.93282, 1.00192, 1.00046, 0.94405, 1.16906, 0.98576, 0.91648, 0.90421, 0.83062, 0.8822]}, "grad-norm vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.93282, 1.00192, 1.00046, 0.94405, 1.16906, 0.98576, 0.91648, 0.90421, 0.83062, 0.8822]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [103.0, 122.0, 112.0, 97.0, 93.0, 105.0, 109.0, 107.0, 125.0, 130.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [103.0, 122.0, 112.0, 97.0, 93.0, 105.0, 109.0, 107.0, 125.0, 130.0]}, "params-norm": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.6785, 35.67849, 35.67848]}, "params-norm vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.6785, 35.67849, 35.67848]}, "iteration-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [15.00501, 0.64144, 0.62022, 0.6193, 0.62312, 0.61981, 0.61869, 0.62693, 0.63288, 0.61782]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [9.15419]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [9.15419]}, "lm loss validation ppl": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [9453.99707]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [9453.99707]}} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.19948, "5": 9.20116, "10": 9.19965, "15": 9.19624, "20": 9.19324, "25": 9.19379, "30": 9.18136, "35": 9.17694, "40": 9.16916, "45": 9.16412, "50": 9.16137}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 103.0, "5": 91.0, "10": 94.0, "15": 116.0, "20": 98.0, "25": 78.0, "30": 78.0, "35": 115.0, "40": 73.0, "45": 119.0, "50": 150.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 116349952.0, "5": 116349952.0, "10": 116349952.0, "15": 116349952.0, "20": 116349952.0, "25": 116349952.0, "30": 116349952.0, "35": 116349952.0, "40": 116349952.0, "45": 116349952.0, "50": 116349952.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3196701696.0, "5": 3196701696.0, "10": 3196701696.0, "15": 3196701696.0, "20": 3196701696.0, "25": 3196701696.0, "30": 3196701696.0, "35": 3196701696.0, "40": 3196701696.0, "45": 3196701696.0, "50": 3196701696.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 12.75881, "5": 0.64545, "10": 0.64259, "15": 0.64082, "20": 0.64129, "25": 0.63622, "30": 0.63893, "35": 0.63774, "40": 0.64187, "45": 0.63672, "50": 0.63606}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dgx_a100_1N8G/model_config.yaml index e2ef184..0990142 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dgx_a100_1N8G/model_config.yaml @@ -22,8 +22,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --split: 949,50,1 --tokenizer-type: NullTokenizer --vocab-size: 8192 @@ -54,4 +54,5 @@ MODEL_ARGS: --mock-data: true --freeze-ViT: true --freeze-LM: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dist_opt_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dist_opt_dgx_a100_1N8G/golden_values_dev.json index c4c1cff..c712e52 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dist_opt_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dist_opt_dgx_a100_1N8G/golden_values_dev.json @@ -1 +1 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.80164, 0.62602, 0.62115, 0.61347, 0.61356, 0.6148, 0.61452, 0.61389, 0.61239, 0.61187]}, "forward-compute-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [5.14549, 0.30295, 0.29758, 0.29055, 0.29096, 0.29124, 0.29129, 0.2913, 0.29037, 0.28939]}, "backward-compute-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1.12619, 0.28782, 0.28877, 0.28732, 0.28777, 0.28808, 0.28786, 0.28769, 0.28753, 0.28791]}, "batch-generator-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1.29859, 0.02375, 0.02123, 0.01897, 0.01822, 0.01828, 0.01866, 0.01876, 0.01889, 0.01783]}, "forward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [3.69025, 0.02974, 0.02963, 0.03036, 0.03015, 0.03018, 0.03047, 0.03047, 0.03, 0.03017]}, "forward-send-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1.06877, 0.00017, 0.00016, 0.00015, 0.00015, 0.00015, 0.00018, 0.00015, 0.00016, 0.00014]}, "backward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.07001, 0.07185, 0.07034, 0.07062, 0.07068, 0.07076, 0.07093, 0.07034, 0.07033, 0.07056]}, "backward-send-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00032, 0.00023, 0.00027, 0.00028, 0.00026, 0.0003, 0.00028, 0.00029, 0.00028, 0.00029]}, "forward-send-backward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [5.90985, 0.29772, 0.29629, 0.28867, 0.29204, 0.29221, 0.29134, 0.28969, 0.29014, 0.29351]}, "backward-send-forward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.86713, 0.00263, 0.0025, 0.00238, 0.00246, 0.00238, 0.00237, 0.00259, 0.00243, 0.00254]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 3e-05, 2e-05, 2e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [5e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.20519, 0.00031, 0.00025, 0.00025, 0.00026, 0.00025, 0.00025, 0.00025, 0.00025, 0.00025]}, "params-all-gather-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00016, 0.00013, 0.00012, 0.00011, 0.00011, 0.00011, 0.00011, 0.00011, 0.00011, 0.00011]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00015, 0.00013, 0.00011, 0.00011, 0.00011, 0.00011, 0.0001, 0.0001, 0.0001, 0.0001]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.01362, 0.00058, 0.00048, 0.00041, 0.00047, 0.0004, 0.0004, 0.00039, 0.0004, 0.0004]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00823, 0.00068, 0.00072, 0.00073, 0.00068, 0.00069, 0.00069, 0.0007, 0.00069, 0.00066]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00098, 0.00026, 0.00023, 0.00023, 0.00025, 0.00023, 0.00023, 0.00024, 0.00024, 0.00023]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00019, 0.00018, 0.00015, 0.00016, 0.00015, 0.00016, 0.00016, 0.00015, 0.00015, 0.00015]}, "optimizer-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.02427, 0.00277, 0.00256, 0.00257, 0.00249, 0.00243, 0.00242, 0.00241, 0.00241, 0.00237]}, "learning-rate": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]}, "learning-rate vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]}, "batch-size": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [9.19947, 9.20335, 9.20248, 9.19723, 9.19172, 9.18973, 9.18517, 9.17532, 9.17374, 9.1609]}, "lm loss vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [9.19947, 9.20335, 9.20248, 9.19723, 9.19172, 9.18973, 9.18517, 9.17532, 9.17374, 9.1609]}, "loss-scale": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.93277, 1.00171, 1.00056, 0.944, 1.16867, 0.98576, 0.91686, 0.9042, 0.83078, 0.88219]}, "grad-norm vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.93277, 1.00171, 1.00056, 0.944, 1.16867, 0.98576, 0.91686, 0.9042, 0.83078, 0.88219]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [106.0, 114.0, 108.0, 110.0, 81.0, 105.0, 85.0, 109.0, 146.0, 122.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [106.0, 114.0, 108.0, 110.0, 81.0, 105.0, 85.0, 109.0, 146.0, 122.0]}, "params-norm": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.6785, 35.67851, 35.6785, 35.67848, 35.67848]}, "params-norm vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.6785, 35.67851, 35.6785, 35.67848, 35.67848]}, "iteration-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [11.71205, 0.64203, 0.63681, 0.62887, 0.62867, 0.62983, 0.6294, 0.62857, 0.62698, 0.62637]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [9.1542]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [9.1542]}, "lm loss validation ppl": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [9454.09668]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [9454.09668]}} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.19947, "5": 9.20115, "10": 9.19966, "15": 9.19629, "20": 9.19324, "25": 9.19382, "30": 9.18139, "35": 9.17693, "40": 9.16914, "45": 9.16409, "50": 9.16138}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 106.0, "5": 75.0, "10": 88.0, "15": 114.0, "20": 98.0, "25": 88.0, "30": 92.0, "35": 110.0, "40": 91.0, "45": 114.0, "50": 148.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 116168192.0, "5": 116168192.0, "10": 116168192.0, "15": 116168192.0, "20": 116168192.0, "25": 116168192.0, "30": 116168192.0, "35": 116168192.0, "40": 116168192.0, "45": 116168192.0, "50": 116168192.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3009881600.0, "5": 3010316800.0, "10": 3010316800.0, "15": 3010316800.0, "20": 3010316800.0, "25": 3010316800.0, "30": 3010389504.0, "35": 3010389504.0, "40": 3010389504.0, "45": 3010389504.0, "50": 3010389504.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 8.79591, "5": 0.63104, "10": 0.63019, "15": 0.62801, "20": 0.62803, "25": 0.63204, "30": 0.62942, "35": 0.63074, "40": 0.63003, "45": 0.62905, "50": 0.62883}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dist_opt_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dist_opt_dgx_a100_1N8G/golden_values_lts.json index bfdacf1..d817975 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dist_opt_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dist_opt_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [9.12533, 0.61523, 0.612, 0.61274, 0.60959, 0.61563, 0.61043, 0.62211, 0.61259, 0.61475]}, "forward-compute-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [3.2886, 0.29298, 0.28952, 0.29035, 0.28755, 0.29301, 0.28608, 0.30023, 0.28978, 0.29236]}, "backward-compute-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1.10925, 0.28738, 0.28707, 0.28715, 0.28829, 0.28813, 0.29022, 0.28846, 0.29053, 0.29005]}, "batch-generator-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.76471, 0.01852, 0.01694, 0.02369, 0.02029, 0.01651, 0.01633, 0.02469, 0.01956, 0.01684]}, "forward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [3.67666, 0.02972, 0.02965, 0.02942, 0.02811, 0.0288, 0.0288, 0.02849, 0.02832, 0.02838]}, "forward-send-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.9526, 0.00016, 0.00016, 0.00016, 0.00016, 0.00018, 0.00017, 0.00017, 0.00014, 0.00015]}, "backward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.07105, 0.07081, 0.07084, 0.07037, 0.06972, 0.07299, 0.06941, 0.06963, 0.07091, 0.07042]}, "backward-send-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00019, 0.0002, 0.00021, 0.00019, 0.0002, 0.00019, 0.00019, 0.00018, 0.00018, 0.00018]}, "forward-send-backward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [4.17022, 0.29888, 0.30073, 0.30472, 0.30255, 0.30377, 0.30116, 0.3082, 0.3045, 0.30713]}, "backward-send-forward-recv-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.89549, 0.00229, 0.00225, 0.00218, 0.00224, 0.00218, 0.00214, 0.00228, 0.00208, 0.00209]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [3e-05, 3e-05, 4e-05, 2e-05, 3e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [5e-05, 3e-05, 5e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.19492, 0.00027, 0.00039, 0.00025, 0.00027, 0.00025, 0.00024, 0.00025, 0.00022, 0.00022]}, "params-all-gather-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00015, 0.0001, 0.00011, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 9e-05, 9e-05]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00013, 0.00011, 0.00011, 0.0001, 0.0001, 0.0001, 0.0001, 0.00011, 9e-05, 9e-05]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.02498, 0.00052, 0.00052, 0.00039, 0.00051, 0.00039, 0.00041, 0.00041, 0.00037, 0.00036]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00735, 0.00064, 0.00064, 0.00064, 0.00063, 0.00065, 0.00068, 0.00065, 0.00065, 0.00065]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00093, 0.00021, 0.00021, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.00018, 0.00018]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.00018, 0.00015, 0.00015, 0.00015, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014, 0.00014]}, "optimizer-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.03475, 0.00249, 0.00249, 0.0023, 0.00258, 0.0023, 0.00234, 0.00235, 0.00223, 0.00223]}, "learning-rate": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]}, "learning-rate vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]}, "batch-size": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [9.19948, 9.20339, 9.20246, 9.19721, 9.1917, 9.18976, 9.18515, 9.17526, 9.1738, 9.16094]}, "lm loss vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [9.19948, 9.20339, 9.20246, 9.19721, 9.1917, 9.18976, 9.18515, 9.17526, 9.1738, 9.16094]}, "loss-scale": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.93282, 1.00192, 1.00046, 0.94405, 1.16906, 0.98576, 0.91623, 0.90401, 0.83116, 0.88246]}, "grad-norm vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [0.93282, 1.00192, 1.00046, 0.94405, 1.16906, 0.98576, 0.91623, 0.90401, 0.83116, 0.88246]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [103.0, 122.0, 112.0, 97.0, 93.0, 105.0, 105.0, 101.0, 126.0, 120.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [103.0, 122.0, 112.0, 97.0, 93.0, 105.0, 105.0, 101.0, 126.0, 120.0]}, "params-norm": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.6785, 35.67849, 35.67848]}, "params-norm vs samples": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.67851, 35.6785, 35.67849, 35.67848]}, "iteration-time": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [11.25871, 0.63103, 0.62702, 0.628, 0.62436, 0.6304, 0.62504, 0.63626, 0.62666, 0.62873]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [9.1542]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [9.1542]}, "lm loss validation ppl": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [9454.09668]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [9454.09668]}} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 9.19948, "5": 9.20116, "10": 9.19965, "15": 9.19624, "20": 9.19324, "25": 9.19379, "30": 9.18136, "35": 9.1769, "40": 9.16912, "45": 9.16403, "50": 9.16142}}, "num-zeros": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 103.0, "5": 91.0, "10": 94.0, "15": 116.0, "20": 98.0, "25": 78.0, "30": 78.0, "35": 100.0, "40": 79.0, "45": 112.0, "50": 154.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 116038144.0, "5": 116038144.0, "10": 116038144.0, "15": 116038144.0, "20": 116038144.0, "25": 116038144.0, "30": 116038144.0, "35": 116038144.0, "40": 116038144.0, "45": 116038144.0, "50": 116038144.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 3194514432.0, "5": 3195167232.0, "10": 3195167232.0, "15": 3195167232.0, "20": 3195167232.0, "25": 3195167232.0, "30": 3195167232.0, "35": 3195167232.0, "40": 3195167232.0, "45": 3195167232.0, "50": 3195167232.0}}, "iteration-time": {"start_step": 1, "end_step": 50, "step_interval": 5, "values": {"1": 14.04943, "5": 0.65318, "10": 0.6367, "15": 0.63196, "20": 0.63643, "25": 0.63971, "30": 0.63403, "35": 0.63752, "40": 0.6332, "45": 0.63749, "50": 0.6338}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dist_opt_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dist_opt_dgx_a100_1N8G/model_config.yaml index 9a40c44..979a950 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dist_opt_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dist_opt_dgx_a100_1N8G/model_config.yaml @@ -22,8 +22,8 @@ MODEL_ARGS: --train-iters: 50 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --split: 949,50,1 --tokenizer-type: NullTokenizer --vocab-size: 8192 @@ -55,4 +55,5 @@ MODEL_ARGS: --freeze-ViT: true --freeze-LM: true --use-distributed-optimizer: true + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/golden_values_dev.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/golden_values_dev.json new file mode 100644 index 0000000..5740bc5 --- /dev/null +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 9.19864, "5": 9.19691, "10": 9.19094, "15": 9.17523, "20": 9.13891, "25": 9.10449, "30": 9.03731, "35": 8.99499, "40": 8.91463, "45": 8.85894, "50": 8.80907, "55": 8.65925, "60": 8.57684, "65": 8.46083, "70": 8.3497, "75": 8.19176, "80": 8.10062, "85": 7.95999, "90": 7.84979, "95": 7.71733, "100": 7.61477}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3717664.0, "5": 4023697.0, "10": 3784077.0, "15": 3660641.0, "20": 3705933.0, "25": 3697193.0, "30": 3947711.0, "35": 3672071.0, "40": 3854594.0, "45": 3692583.0, "50": 3652269.0, "55": 3911052.0, "60": 3663335.0, "65": 3703672.0, "70": 3641289.0, "75": 3634834.0, "80": 3621572.0, "85": 3637171.0, "90": 3942162.0, "95": 3769981.0, "100": 3678054.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 404019712.0, "5": 404019712.0, "10": 404019712.0, "15": 404019712.0, "20": 404019712.0, "25": 404019712.0, "30": 404019712.0, "35": 404019712.0, "40": 404019712.0, "45": 404019712.0, "50": 404019712.0, "55": 404019712.0, "60": 404019712.0, "65": 404019712.0, "70": 404019712.0, "75": 404019712.0, "80": 404019712.0, "85": 404019712.0, "90": 404019712.0, "95": 404019712.0, "100": 404019712.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3452275712.0, "5": 3594629120.0, "10": 3594629120.0, "15": 3594629120.0, "20": 3595052544.0, "25": 3595052544.0, "30": 3595052544.0, "35": 3595052544.0, "40": 3595052544.0, "45": 3595052544.0, "50": 3595052544.0, "55": 3595052544.0, "60": 3595052544.0, "65": 3595052544.0, "70": 3595052544.0, "75": 3595052544.0, "80": 3595052544.0, "85": 3595052544.0, "90": 3595052544.0, "95": 3595052544.0, "100": 3595052544.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.8053, "5": 0.79659, "10": 0.78696, "15": 0.83282, "20": 0.79092, "25": 0.76982, "30": 1.0144, "35": 0.76992, "40": 0.77028, "45": 0.7698, "50": 0.77022, "55": 0.7708, "60": 0.77151, "65": 0.7711, "70": 0.77074, "75": 0.77119, "80": 0.7701, "85": 0.77011, "90": 0.77187, "95": 0.78677, "100": 0.7737}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/golden_values_lts.json b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/golden_values_lts.json new file mode 100644 index 0000000..9d8dc7a --- /dev/null +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 9.19864, "5": 9.19687, "10": 9.19097, "15": 9.17523, "20": 9.13889, "25": 9.10451, "30": 9.03733, "35": 8.99498, "40": 8.91464, "45": 8.85892, "50": 8.80904, "55": 8.65932, "60": 8.57683, "65": 8.46077, "70": 8.34959, "75": 8.19143, "80": 8.10051, "85": 7.95991, "90": 7.85011, "95": 7.71773, "100": 7.61557}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3717564.0, "5": 4023735.0, "10": 3783982.0, "15": 3660711.0, "20": 3705787.0, "25": 3697298.0, "30": 3947821.0, "35": 3672132.0, "40": 3854582.0, "45": 3692819.0, "50": 3652068.0, "55": 3910921.0, "60": 3663183.0, "65": 3703742.0, "70": 3641287.0, "75": 3634855.0, "80": 3621561.0, "85": 3637264.0, "90": 3942100.0, "95": 3769819.0, "100": 3677905.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 404009472.0, "5": 404009472.0, "10": 404009472.0, "15": 404009472.0, "20": 404009472.0, "25": 404009472.0, "30": 404009472.0, "35": 404009472.0, "40": 404009472.0, "45": 404009472.0, "50": 404009472.0, "55": 404009472.0, "60": 404009472.0, "65": 404009472.0, "70": 404009472.0, "75": 404009472.0, "80": 404009472.0, "85": 404009472.0, "90": 404009472.0, "95": 404009472.0, "100": 404009472.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 3452274688.0, "5": 3594947584.0, "10": 3595075584.0, "15": 3595075584.0, "20": 3595075584.0, "25": 3595075584.0, "30": 3595077120.0, "35": 3595077632.0, "40": 3595077632.0, "45": 3595077632.0, "50": 3595077632.0, "55": 3595077632.0, "60": 3595077632.0, "65": 3595077632.0, "70": 3595077632.0, "75": 3595077632.0, "80": 3595077632.0, "85": 3595077632.0, "90": 3595077632.0, "95": 3595077632.0, "100": 3595077632.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.13332, "5": 0.78933, "10": 0.77798, "15": 0.99663, "20": 0.7783, "25": 0.99515, "30": 0.77887, "35": 0.77873, "40": 0.77334, "45": 0.77259, "50": 0.77616, "55": 0.77484, "60": 0.77463, "65": 0.77478, "70": 0.78279, "75": 0.7746, "80": 0.77489, "85": 0.77824, "90": 0.77607, "95": 0.77845, "100": 0.77538}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/model_config.yaml b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/model_config.yaml index 4a829ac..fc0b193 100644 --- a/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/model_config.yaml +++ b/tests/functional_tests/test_cases/multimodal-llava/multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G/model_config.yaml @@ -22,8 +22,8 @@ MODEL_ARGS: --train-iters: 100 --timing-log-level: 2 --lr-decay-iters: 320000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --split: 949,50,1 --tokenizer-type: NullTokenizer --vocab-size: 8192 @@ -53,4 +53,5 @@ MODEL_ARGS: --img-w: 336 --patch-dim: 14 --mock-data: true + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json index 57cec73..6164642 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_dev.json @@ -1 +1 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [15.71288, 0.61814, 0.60061, 0.609, 0.60606, 0.59974, 0.60053, 0.59718, 0.59636, 0.5993, 0.59616, 0.5993, 0.60208, 0.59842, 0.59448, 0.59772, 0.59415, 0.59624, 0.59651, 0.5939]}, "forward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [3.12459, 0.22962, 0.23245, 0.23195, 0.2326, 0.23265, 0.23278, 0.23264, 0.23178, 0.23401, 0.23274, 0.23172, 0.23112, 0.23126, 0.23154, 0.23126, 0.23103, 0.23016, 0.23056, 0.2307]}, "backward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.75709, 0.24327, 0.23169, 0.23456, 0.23046, 0.23375, 0.23087, 0.2308, 0.23214, 0.23045, 0.23106, 0.23154, 0.23148, 0.2296, 0.23124, 0.23083, 0.23167, 0.23065, 0.23137, 0.23138]}, "forward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [5.98096, 0.06178, 0.06132, 0.06307, 0.06477, 0.06243, 0.06383, 0.06234, 0.06107, 0.06323, 0.06113, 0.06283, 0.06447, 0.06275, 0.06124, 0.06359, 0.06095, 0.06391, 0.06239, 0.0601]}, "forward-send-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.46683, 0.00046, 0.00053, 0.00048, 0.00057, 0.00042, 0.00051, 0.00053, 0.00042, 0.00054, 0.00044, 0.00051, 0.00053, 0.00042, 0.00076, 0.00043, 0.00042, 0.00051, 0.00053, 0.00051]}, "backward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.12574, 0.1199, 0.11997, 0.12137, 0.12141, 0.12166, 0.12187, 0.12333, 0.12271, 0.12397, 0.12208, 0.12564, 0.12261, 0.12247, 0.12167, 0.1226, 0.12277, 0.12102, 0.12155, 0.12196]}, "backward-send-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00058, 0.00051, 0.00055, 0.00049, 0.00052, 0.0005, 0.00055, 0.00054, 0.00056, 0.0005, 0.00049, 0.00056, 0.0005, 0.00055, 0.00056, 0.00056, 0.00057, 0.00055, 0.00055, 0.00055]}, "forward-send-backward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.64124, 0.21304, 0.19661, 0.2004, 0.20279, 0.21188, 0.21084, 0.20759, 0.20948, 0.20864, 0.20899, 0.21203, 0.20325, 0.1982, 0.20653, 0.21049, 0.2105, 0.20347, 0.20699, 0.20667]}, "backward-send-forward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [4.27348, 0.0208, 0.00376, 0.01105, 0.00428, 0.00581, 0.00423, 0.00361, 0.00435, 0.00393, 0.00433, 0.00662, 0.00407, 0.00384, 0.00455, 0.00466, 0.00417, 0.00513, 0.00494, 0.00456]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [6e-05, 3e-05, 3e-05, 3e-05, 2e-05, 3e-05, 2e-05, 3e-05, 2e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 2e-05, 3e-05, 2e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.36384, 0.00053, 0.00053, 0.00052, 0.00053, 0.00053, 0.00053, 0.00052, 0.00052, 0.00052, 0.00054, 0.00054, 0.00052, 0.00053, 0.00052, 0.00053, 0.00052, 0.00051, 0.00053, 0.00051]}, "all-grads-sync-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.35375, 0.00038, 0.00043, 0.00041, 0.00041, 0.0004, 0.00043, 0.00038, 0.00038, 0.00041, 0.00038, 0.00043, 0.00032, 0.00033, 0.00033, 0.00037, 0.00038, 0.00036, 0.00037, 0.00037]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0004, 0.00033, 0.00032, 0.00035, 0.00033, 0.00031, 0.00031, 0.00032, 0.00033, 0.00032, 0.00033, 0.00032, 0.00032, 0.00031, 0.00031, 0.00032, 0.0003, 0.0003, 0.0003, 0.0003]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.70516, 0.00125, 0.00124, 0.00125, 0.00126, 0.00121, 0.00122, 0.00122, 0.00123, 0.00122, 0.00126, 0.00125, 0.00124, 0.00119, 0.00128, 0.0012, 0.00121, 0.00122, 0.00125, 0.00124]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.01732, 0.00791, 0.00778, 0.00782, 0.00776, 0.00784, 0.00778, 0.00777, 0.00777, 0.00789, 0.00777, 0.00776, 0.00774, 0.00776, 0.00787, 0.00778, 0.00785, 0.00775, 0.00775, 0.00781]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.01232, 0.00107, 0.00103, 0.00105, 0.00103, 0.00104, 0.00103, 0.00105, 0.00103, 0.00104, 0.00103, 0.00104, 0.00103, 0.00103, 0.00104, 0.00104, 0.00103, 0.00104, 0.00103, 0.00104]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00143, 0.00103, 0.00099, 0.00099, 0.00099, 0.00099, 0.00098, 0.00099, 0.00099, 0.00099, 0.00098, 0.00098, 0.00099, 0.00099, 0.00104, 0.001, 0.00099, 0.00098, 0.00098, 0.00099]}, "optimizer-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.73804, 0.01225, 0.01201, 0.01214, 0.01201, 0.01205, 0.01198, 0.012, 0.012, 0.01212, 0.01203, 0.01202, 0.01198, 0.01192, 0.01221, 0.01199, 0.01202, 0.01192, 0.01194, 0.01204]}, "learning-rate": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "batch-size": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.41485, 9.20437, 8.6213, 8.34434, 8.0846, 7.96908, 7.68085, 7.3943, 7.2612, 7.19123, 7.30996, 7.16658, 7.0596, 6.99443, 6.85568, 6.93181, 6.95482, 7.02465, 6.66523, 6.93912]}, "lm loss vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.41485, 9.20437, 8.6213, 8.34434, 8.0846, 7.96908, 7.68085, 7.3943, 7.2612, 7.19123, 7.30996, 7.16658, 7.0596, 6.99443, 6.85568, 6.93181, 6.95482, 7.02465, 6.66523, 6.93912]}, "loss-scale": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [31.51805, 2.98993, 3.27236, 2.61222, 2.39606, 1.99737, 1.81218, 1.91449, 1.62396, 1.50901, 1.16214, 1.3245, 1.20365, 1.10605, 1.5131, 2.1239, 1.65989, 1.41738, 2.05605, 1.27075]}, "grad-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [31.51805, 2.98993, 3.27236, 2.61222, 2.39606, 1.99737, 1.81218, 1.91449, 1.62396, 1.50901, 1.16214, 1.3245, 1.20365, 1.10605, 1.5131, 2.1239, 1.65989, 1.41738, 2.05605, 1.27075]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [115733.0, 111077.0, 117083.0, 112366.0, 118719.0, 116953.0, 111389.0, 114012.0, 118474.0, 116947.0, 111514.0, 115608.0, 108500.0, 119951.0, 115760.0, 116926.0, 119844.0, 120384.0, 121401.0, 118454.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [115733.0, 111077.0, 117083.0, 112366.0, 118719.0, 116953.0, 111389.0, 114012.0, 118474.0, 116947.0, 111514.0, 115608.0, 108500.0, 119951.0, 115760.0, 116926.0, 119844.0, 120384.0, 121401.0, 118454.0]}, "params-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [309.46707, 309.48447, 309.52603, 309.57944, 309.64526, 309.72028, 309.80237, 309.88846, 309.97403, 310.056, 310.13495, 310.2077, 310.27109, 310.32544, 310.37173, 310.40884, 310.43594, 310.45645, 310.47226, 310.48434]}, "params-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [309.46707, 309.48447, 309.52603, 309.57944, 309.64526, 309.72028, 309.80237, 309.88846, 309.97403, 310.056, 310.13495, 310.2077, 310.27109, 310.32544, 310.37173, 310.40884, 310.43594, 310.45645, 310.47226, 310.48434]}, "iteration-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [16.47856, 0.644, 0.62616, 0.63468, 0.63159, 0.62541, 0.626, 0.62264, 0.62187, 0.62505, 0.62162, 0.62466, 0.62765, 0.62375, 0.62026, 0.62331, 0.61955, 0.62155, 0.62176, 0.61929]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.86562]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [6.86562]}, "lm loss validation ppl": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [958.74249]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [958.74249]}} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.41485, "5": 9.19075, "10": 8.80844, "15": 8.44371, "20": 8.04899, "25": 7.76059, "30": 7.70646, "35": 7.53725, "40": 7.39383, "45": 7.2546, "50": 7.08118, "55": 7.11611, "60": 7.09855, "65": 6.94952, "70": 7.03692, "75": 7.03624, "80": 6.91302, "85": 6.80843, "90": 7.22786, "95": 6.81498, "100": 6.95214}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 115733.0, "5": 119929.0, "10": 119903.0, "15": 116950.0, "20": 118681.0, "25": 114089.0, "30": 117114.0, "35": 115664.0, "40": 115637.0, "45": 115768.0, "50": 115850.0, "55": 116809.0, "60": 111657.0, "65": 118477.0, "70": 120201.0, "75": 118494.0, "80": 111311.0, "85": 117070.0, "90": 118471.0, "95": 117004.0, "100": 114091.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806441472.0, "5": 806441472.0, "10": 805655040.0, "15": 806441472.0, "20": 805655040.0, "25": 806441472.0, "30": 805655040.0, "35": 806441472.0, "40": 805655040.0, "45": 806441472.0, "50": 805655040.0, "55": 806441472.0, "60": 805655040.0, "65": 806441472.0, "70": 805655040.0, "75": 806441472.0, "80": 805655040.0, "85": 806441472.0, "90": 805655040.0, "95": 806441472.0, "100": 805655040.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806446080.0, "5": 1007401984.0, "10": 1007401984.0, "15": 1007401984.0, "20": 1007401984.0, "25": 1007401984.0, "30": 1007401984.0, "35": 1007401984.0, "40": 1007401984.0, "45": 1007401984.0, "50": 1007401984.0, "55": 1007401984.0, "60": 1007401984.0, "65": 1007401984.0, "70": 1007401984.0, "75": 1007401984.0, "80": 1007401984.0, "85": 1007401984.0, "90": 1007401984.0, "95": 1007401984.0, "100": 1007401984.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 16.99034, "5": 0.65406, "10": 0.64135, "15": 0.60161, "20": 0.60305, "25": 0.61369, "30": 0.60501, "35": 0.61134, "40": 0.60199, "45": 0.61383, "50": 0.60176, "55": 0.59845, "60": 0.59931, "65": 0.6009, "70": 0.60353, "75": 0.60406, "80": 0.60047, "85": 0.61302, "90": 0.60355, "95": 0.60358, "100": 0.64022}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json index dbe2095..ff893be 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [19.90333, 0.58856, 0.59469, 0.58216, 0.59341, 0.57994, 0.58185, 0.5789, 0.57607, 0.58, 0.58007, 0.5753, 0.58464, 0.58037, 0.57413, 0.57523, 0.57405, 0.58554, 0.60294, 0.58005]}, "forward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [4.42353, 0.2341, 0.23716, 0.23094, 0.23623, 0.22774, 0.22931, 0.22826, 0.22425, 0.22847, 0.22935, 0.22676, 0.23322, 0.22908, 0.22555, 0.22469, 0.22599, 0.22742, 0.25133, 0.2259]}, "backward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.95079, 0.22368, 0.2273, 0.22252, 0.22476, 0.22289, 0.22216, 0.22126, 0.22084, 0.22183, 0.22121, 0.22178, 0.22286, 0.22446, 0.22459, 0.22527, 0.22402, 0.22983, 0.22118, 0.22371]}, "forward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [8.01714, 0.06124, 0.06125, 0.0607, 0.06434, 0.06119, 0.06293, 0.06164, 0.06064, 0.06042, 0.06086, 0.06143, 0.06321, 0.06163, 0.05988, 0.0612, 0.05934, 0.06152, 0.06486, 0.05962]}, "forward-send-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.40091, 0.00043, 0.00062, 0.00053, 0.00045, 0.00042, 0.00068, 0.00049, 0.00045, 0.00043, 0.00058, 0.00043, 0.00053, 0.00043, 0.00056, 0.00042, 0.00042, 0.00044, 0.00042, 0.00055]}, "backward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.11724, 0.11466, 0.11811, 0.11163, 0.11217, 0.11093, 0.11231, 0.11875, 0.11788, 0.11954, 0.11946, 0.11548, 0.11898, 0.11974, 0.11993, 0.11865, 0.12113, 0.11927, 0.12228, 0.1208]}, "backward-send-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00051, 0.00051, 0.0005, 0.00066, 0.00066, 0.00056, 0.00055, 0.00046, 0.00064, 0.00048, 0.00047, 0.00048, 0.00046, 0.00045, 0.00045, 0.00043, 0.00046, 0.00046, 0.00047, 0.00043]}, "forward-send-backward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [13.497, 0.20707, 0.2087, 0.20974, 0.2204, 0.21082, 0.21043, 0.20604, 0.20439, 0.20846, 0.20868, 0.20842, 0.2171, 0.21065, 0.20419, 0.20475, 0.2067, 0.21521, 0.22812, 0.2131]}, "backward-send-forward-recv-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [4.98676, 0.02107, 0.02298, 0.01837, 0.01578, 0.01755, 0.01567, 0.01438, 0.01344, 0.01755, 0.01789, 0.01555, 0.01944, 0.01458, 0.01433, 0.01406, 0.01503, 0.01809, 0.03277, 0.01271]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [4e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 3e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.46106, 0.00051, 0.00051, 0.00052, 0.00051, 0.00052, 0.00051, 0.00051, 0.00051, 0.00062, 0.00051, 0.00053, 0.00051, 0.00051, 0.00052, 0.00051, 0.00051, 0.00059, 0.00051, 0.00063]}, "all-grads-sync-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.40205, 0.00032, 0.00032, 0.00035, 0.00031, 0.00037, 0.00031, 0.0003, 0.00038, 0.00034, 0.00031, 0.00046, 0.00035, 0.00036, 0.00035, 0.00031, 0.00034, 0.00031, 0.00031, 0.0003]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00038, 0.00032, 0.00032, 0.00031, 0.00032, 0.0003, 0.00031, 0.00031, 0.00031, 0.00031, 0.00031, 0.00032, 0.00031, 0.00031, 0.00031, 0.00031, 0.00031, 0.00031, 0.0003, 0.00031]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [2.12765, 0.00122, 0.00122, 0.00122, 0.0012, 0.00121, 0.00121, 0.00121, 0.00123, 0.0012, 0.00121, 0.00137, 0.00125, 0.00125, 0.00126, 0.00124, 0.00127, 0.00121, 0.0012, 0.00122]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.01111, 0.00722, 0.0072, 0.00709, 0.0071, 0.00708, 0.0071, 0.0071, 0.00715, 0.00709, 0.00708, 0.00888, 0.00709, 0.00704, 0.00711, 0.00709, 0.00705, 0.00716, 0.00716, 0.00707]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00991, 0.00103, 0.00104, 0.00103, 0.00103, 0.00103, 0.00101, 0.00102, 0.00103, 0.00102, 0.00103, 0.00105, 0.00103, 0.00103, 0.00102, 0.00102, 0.00103, 0.00103, 0.00102, 0.00102]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00112, 0.00098, 0.00098, 0.00098, 0.00098, 0.00097, 0.00097, 0.00097, 0.00097, 0.00097, 0.00098, 0.00098, 0.00097, 0.00097, 0.00098, 0.00097, 0.00097, 0.00098, 0.00097, 0.00097]}, "optimizer-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [2.15127, 0.01146, 0.01139, 0.01122, 0.01123, 0.01123, 0.01121, 0.01121, 0.01131, 0.01118, 0.0112, 0.01322, 0.01125, 0.01119, 0.01128, 0.01123, 0.01122, 0.01127, 0.01125, 0.01118]}, "learning-rate": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "batch-size": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.41485, 9.20435, 8.6213, 8.34427, 8.08473, 7.96923, 7.68106, 7.39444, 7.26111, 7.19106, 7.31002, 7.16668, 7.05964, 6.99445, 6.85574, 6.93197, 6.95538, 7.0248, 6.66527, 6.93928]}, "lm loss vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.41485, 9.20435, 8.6213, 8.34427, 8.08473, 7.96923, 7.68106, 7.39444, 7.26111, 7.19106, 7.31002, 7.16668, 7.05964, 6.99445, 6.85574, 6.93197, 6.95538, 7.0248, 6.66527, 6.93928]}, "loss-scale": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [31.51805, 2.9898, 3.27355, 2.61215, 2.39606, 1.99744, 1.81243, 1.91693, 1.62391, 1.50884, 1.1615, 1.33045, 1.20489, 1.10832, 1.51113, 2.13636, 1.66573, 1.41358, 2.06016, 1.27144]}, "grad-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [31.51805, 2.9898, 3.27355, 2.61215, 2.39606, 1.99744, 1.81243, 1.91693, 1.62391, 1.50884, 1.1615, 1.33045, 1.20489, 1.10832, 1.51113, 2.13636, 1.66573, 1.41358, 2.06016, 1.27144]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [115733.0, 111077.0, 117061.0, 112406.0, 118709.0, 116945.0, 111380.0, 114030.0, 118469.0, 116944.0, 111511.0, 115606.0, 108490.0, 119961.0, 115771.0, 116922.0, 119839.0, 120381.0, 121405.0, 118441.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [115733.0, 111077.0, 117061.0, 112406.0, 118709.0, 116945.0, 111380.0, 114030.0, 118469.0, 116944.0, 111511.0, 115606.0, 108490.0, 119961.0, 115771.0, 116922.0, 119839.0, 120381.0, 121405.0, 118441.0]}, "params-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [309.46707, 309.48444, 309.52603, 309.57944, 309.64526, 309.72025, 309.80234, 309.88849, 309.97403, 310.056, 310.13495, 310.20767, 310.27103, 310.32535, 310.3717, 310.40875, 310.43588, 310.45633, 310.47214, 310.48419]}, "params-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [309.46707, 309.48444, 309.52603, 309.57944, 309.64526, 309.72025, 309.80234, 309.88849, 309.97403, 310.056, 310.13495, 310.20767, 310.27103, 310.32535, 310.3717, 310.40875, 310.43588, 310.45633, 310.47214, 310.48419]}, "iteration-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [22.07582, 0.61292, 0.61886, 0.60601, 0.61744, 0.60406, 0.60575, 0.60271, 0.60001, 0.60403, 0.60393, 0.60127, 0.6086, 0.60424, 0.59816, 0.59917, 0.59804, 0.60976, 0.62704, 0.60404]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.86596]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.86596]}, "lm loss validation ppl": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [959.06805]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [959.06805]}} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.41485, "5": 9.19081, "10": 8.80859, "15": 8.44364, "20": 8.04915, "25": 7.76061, "30": 7.70656, "35": 7.53748, "40": 7.39383, "45": 7.25459, "50": 7.08122, "55": 7.11641, "60": 7.09868, "65": 6.94978, "70": 7.03669, "75": 7.0363, "80": 6.91331, "85": 6.80899, "90": 7.22851, "95": 6.81485, "100": 6.95211}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 115733.0, "5": 119920.0, "10": 119892.0, "15": 116943.0, "20": 118653.0, "25": 114075.0, "30": 117125.0, "35": 115699.0, "40": 115651.0, "45": 115762.0, "50": 115843.0, "55": 116815.0, "60": 111626.0, "65": 118485.0, "70": 120225.0, "75": 118486.0, "80": 111289.0, "85": 117090.0, "90": 118475.0, "95": 117014.0, "100": 114103.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806441472.0, "5": 806441472.0, "10": 805655040.0, "15": 806441472.0, "20": 805655040.0, "25": 806441472.0, "30": 805655040.0, "35": 806441472.0, "40": 805655040.0, "45": 806441472.0, "50": 805655040.0, "55": 806441472.0, "60": 805655040.0, "65": 806441472.0, "70": 805655040.0, "75": 806441472.0, "80": 805655040.0, "85": 806441472.0, "90": 805655040.0, "95": 806441472.0, "100": 805655040.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806446080.0, "5": 1007401984.0, "10": 1007401984.0, "15": 1007401984.0, "20": 1007401984.0, "25": 1007401984.0, "30": 1007401984.0, "35": 1007401984.0, "40": 1007401984.0, "45": 1007401984.0, "50": 1007401984.0, "55": 1007401984.0, "60": 1007401984.0, "65": 1007401984.0, "70": 1007401984.0, "75": 1007401984.0, "80": 1007401984.0, "85": 1007401984.0, "90": 1007401984.0, "95": 1007401984.0, "100": 1007401984.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 22.00903, "5": 0.60125, "10": 0.59706, "15": 0.59623, "20": 0.59871, "25": 0.59479, "30": 0.59163, "35": 0.59364, "40": 0.60899, "45": 0.60258, "50": 0.59837, "55": 0.59292, "60": 0.5965, "65": 0.58944, "70": 0.5896, "75": 0.66514, "80": 0.66123, "85": 0.65752, "90": 0.67162, "95": 0.61135, "100": 0.60224}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml index 8be8140..a2b1971 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G/model_config.yaml @@ -34,8 +34,8 @@ MODEL_ARGS: --tokenizer-type: BertWordPieceCase --calculate-per-token-loss: true --split: 99982,9,9 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --tensorboard-dir: ${TENSORBOARD_PATH} --log-params-norm: true --log-num-zeros-in-grad: true @@ -52,4 +52,5 @@ MODEL_ARGS: --deterministic-mode: true --ckpt-format: torch --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..f25c831 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.41485, "5": 9.19075, "10": 8.80844, "15": 8.44371, "20": 8.04899, "25": 7.76059, "30": 7.70646, "35": 7.53725, "40": 7.39383, "45": 7.2546, "50": 7.08118, "55": 7.11611, "60": 7.09855, "65": 6.94952, "70": 7.03692, "75": 7.03624, "80": 6.91302, "85": 6.80843, "90": 7.22786, "95": 6.81498, "100": 6.95214}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 115733.0, "5": 119929.0, "10": 119903.0, "15": 116950.0, "20": 118681.0, "25": 114089.0, "30": 117114.0, "35": 115664.0, "40": 115637.0, "45": 115768.0, "50": 115850.0, "55": 116809.0, "60": 111657.0, "65": 118477.0, "70": 120201.0, "75": 118494.0, "80": 111311.0, "85": 117070.0, "90": 118471.0, "95": 117004.0, "100": 114091.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806441472.0, "5": 806441472.0, "10": 805655040.0, "15": 806441472.0, "20": 805655040.0, "25": 806441472.0, "30": 805655040.0, "35": 806441472.0, "40": 805655040.0, "45": 806441472.0, "50": 805655040.0, "55": 806441472.0, "60": 805655040.0, "65": 806441472.0, "70": 805655040.0, "75": 806441472.0, "80": 805655040.0, "85": 806441472.0, "90": 805655040.0, "95": 806441472.0, "100": 805655040.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806446080.0, "5": 1007401984.0, "10": 1007401984.0, "15": 1007401984.0, "20": 1007401984.0, "25": 1007401984.0, "30": 1007401984.0, "35": 1007401984.0, "40": 1007401984.0, "45": 1007401984.0, "50": 1007401984.0, "55": 1007401984.0, "60": 1007401984.0, "65": 1007401984.0, "70": 1007401984.0, "75": 1007401984.0, "80": 1007401984.0, "85": 1007401984.0, "90": 1007401984.0, "95": 1007401984.0, "100": 1007401984.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 16.8302, "5": 0.67558, "10": 0.6039, "15": 0.5968, "20": 0.61642, "25": 0.63407, "30": 0.61435, "35": 0.61458, "40": 0.61232, "45": 0.61118, "50": 0.61079, "55": 0.60608, "60": 0.60591, "65": 0.60635, "70": 0.62838, "75": 0.61795, "80": 0.62067, "85": 0.62437, "90": 0.62433, "95": 0.61953, "100": 0.62907}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..4553fa8 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.41485, "5": 9.19081, "10": 8.80859, "15": 8.44364, "20": 8.04915, "25": 7.76061, "30": 7.70656, "35": 7.53748, "40": 7.39383, "45": 7.25459, "50": 7.08122, "55": 7.11641, "60": 7.09868, "65": 6.94978, "70": 7.03669, "75": 7.0363, "80": 6.91331, "85": 6.80899, "90": 7.22851, "95": 6.81485, "100": 6.95211}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 115733.0, "5": 119920.0, "10": 119892.0, "15": 116943.0, "20": 118653.0, "25": 114075.0, "30": 117125.0, "35": 115699.0, "40": 115651.0, "45": 115762.0, "50": 115843.0, "55": 116815.0, "60": 111626.0, "65": 118485.0, "70": 120225.0, "75": 118486.0, "80": 111289.0, "85": 117090.0, "90": 118475.0, "95": 117014.0, "100": 114103.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806441472.0, "5": 806441472.0, "10": 806441472.0, "15": 806441472.0, "20": 806441472.0, "25": 806441472.0, "30": 806441472.0, "35": 806441472.0, "40": 806441472.0, "45": 806441472.0, "50": 806441472.0, "55": 806441472.0, "60": 806441472.0, "65": 806441472.0, "70": 806441472.0, "75": 806441472.0, "80": 806441472.0, "85": 806441472.0, "90": 806441472.0, "95": 806441472.0, "100": 806441472.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 22.50457, "5": 0.62358, "10": 0.62443, "15": 0.61172, "20": 0.62772, "25": 0.61857, "30": 0.61968, "35": 0.65588, "40": 0.61567, "45": 0.61562, "50": 0.6303, "55": 0.61102, "60": 0.6098, "65": 0.6321, "70": 0.61503, "75": 0.61512, "80": 0.61688, "85": 0.62202, "90": 0.62148, "95": 0.64227, "100": 0.62097}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G/model_config.yaml new file mode 100644 index 0000000..617278d --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -0,0 +1,56 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + NCCL_ALGO: ^NVLS + CUBLAS_WORKSPACE_CONFIG: :4096:8 +MODEL_ARGS: + --encoder-num-layers: 12 + --decoder-num-layers: 12 + --hidden-size: 768 + --num-attention-heads: 12 + --kv-channels: 64 + --ffn-hidden-size: 3072 + --encoder-seq-length: 512 + --decoder-seq-length: 128 + --max-position-embeddings: 512 + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 2 + --micro-batch-size: 4 + --global-batch-size: 32 + --lr: 0.0001 + --train-iters: 100 + --lr-decay-iters: 100 + --lr-decay-style: linear + --min-lr: 0.00001 + --weight-decay: 1e-2 + --lr-warmup-fraction: .01 + --clip-grad: 1.0 + --bf16: true + --vocab-extra-ids: 100 + --init-method-std: 0.015 + --transformer-impl: transformer_engine + --data-path: ${DATA_PATH}/my-t5_00_text_document + --vocab-file: ${DATA_PATH}/bert-large-cased-vocab.txt + --tokenizer-type: BertWordPieceCase + --calculate-per-token-loss: true + --split: 99982,9,9 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --tensorboard-dir: ${TENSORBOARD_PATH} + --log-params-norm: true + --log-num-zeros-in-grad: true + --log-validation-ppl-to-tensorboard: true + --log-timers-to-tensorboard: true + --timing-log-level: 2 + --log-interval: 1 + --save-interval: 50 + --eval-interval: 1000 + --eval-iters: 10 + --distributed-backend: nccl + --data-cache-path: ${DATA_CACHE_PATH} + --encoder-pipeline-model-parallel-size: 2 + --deterministic-mode: true + --ckpt-format: torch + --attention-backend: unfused + --log-memory-to-tensorboard: true +TEST_TYPE: frozen-resume diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..492f7f4 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.41485, "5": 9.19075, "10": 8.80844, "15": 8.44371, "20": 8.04899, "25": 7.76059, "30": 7.70646, "35": 7.53725, "40": 7.39383, "45": 7.2546, "50": 7.08118, "55": 7.11611, "60": 7.09855, "65": 6.94952, "70": 7.03692, "75": 7.03624, "80": 6.91302, "85": 6.80843, "90": 7.22786, "95": 6.81498, "100": 6.95214}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 115733.0, "5": 119929.0, "10": 119903.0, "15": 116950.0, "20": 118681.0, "25": 114089.0, "30": 117114.0, "35": 115664.0, "40": 115637.0, "45": 115768.0, "50": 115850.0, "55": 116809.0, "60": 111657.0, "65": 118477.0, "70": 120201.0, "75": 118494.0, "80": 111311.0, "85": 117070.0, "90": 118471.0, "95": 117004.0, "100": 114091.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806441472.0, "5": 806441472.0, "10": 805655040.0, "15": 806441472.0, "20": 805655040.0, "25": 806441472.0, "30": 805655040.0, "35": 806441472.0, "40": 805655040.0, "45": 806441472.0, "50": 805655040.0, "55": 806441472.0, "60": 805655040.0, "65": 806441472.0, "70": 805655040.0, "75": 806441472.0, "80": 805655040.0, "85": 806441472.0, "90": 805655040.0, "95": 806441472.0, "100": 805655040.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806446080.0, "5": 1007401984.0, "10": 1007401984.0, "15": 1007401984.0, "20": 1007401984.0, "25": 1007401984.0, "30": 1007401984.0, "35": 1007401984.0, "40": 1007401984.0, "45": 1007401984.0, "50": 1007401984.0, "55": 1007401984.0, "60": 1007401984.0, "65": 1007401984.0, "70": 1007401984.0, "75": 1007401984.0, "80": 1007401984.0, "85": 1007401984.0, "90": 1007401984.0, "95": 1007401984.0, "100": 1007401984.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 18.52156, "5": 0.68221, "10": 0.60291, "15": 0.60035, "20": 0.60059, "25": 0.61542, "30": 0.59782, "35": 0.60756, "40": 0.60556, "45": 0.60301, "50": 0.60052, "55": 0.61184, "60": 0.59897, "65": 0.60789, "70": 0.59991, "75": 0.59967, "80": 0.6041, "85": 0.60089, "90": 0.60299, "95": 0.60274, "100": 0.60383}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..07a6cef --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.41485, "5": 9.19081, "10": 8.80859, "15": 8.44364, "20": 8.04915, "25": 7.76061, "30": 7.70656, "35": 7.53748, "40": 7.39383, "45": 7.25459, "50": 7.08122, "55": 7.11641, "60": 7.09868, "65": 6.94978, "70": 7.03669, "75": 7.0363, "80": 6.91331, "85": 6.80899, "90": 7.22851, "95": 6.81485, "100": 6.95211}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 115733.0, "5": 119920.0, "10": 119892.0, "15": 116943.0, "20": 118653.0, "25": 114075.0, "30": 117125.0, "35": 115699.0, "40": 115651.0, "45": 115762.0, "50": 115843.0, "55": 116815.0, "60": 111626.0, "65": 118485.0, "70": 120225.0, "75": 118486.0, "80": 111289.0, "85": 117090.0, "90": 118475.0, "95": 117014.0, "100": 114103.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806441472.0, "5": 806441472.0, "10": 805655040.0, "15": 806441472.0, "20": 805655040.0, "25": 806441472.0, "30": 805655040.0, "35": 806441472.0, "40": 805655040.0, "45": 806441472.0, "50": 805655040.0, "55": 806441472.0, "60": 805655040.0, "65": 806441472.0, "70": 805655040.0, "75": 806441472.0, "80": 805655040.0, "85": 806441472.0, "90": 805655040.0, "95": 806441472.0, "100": 805655040.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806446080.0, "5": 1007401984.0, "10": 1007401984.0, "15": 1007401984.0, "20": 1007401984.0, "25": 1007401984.0, "30": 1007401984.0, "35": 1007401984.0, "40": 1007401984.0, "45": 1007401984.0, "50": 1007401984.0, "55": 1007401984.0, "60": 1007401984.0, "65": 1007401984.0, "70": 1007401984.0, "75": 1007401984.0, "80": 1007401984.0, "85": 1007401984.0, "90": 1007401984.0, "95": 1007401984.0, "100": 1007401984.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 17.27723, "5": 0.60678, "10": 0.60057, "15": 0.60085, "20": 0.60042, "25": 0.59146, "30": 0.5894, "35": 0.59793, "40": 0.59588, "45": 0.59464, "50": 0.59558, "55": 0.59259, "60": 0.59135, "65": 0.59003, "70": 0.59169, "75": 0.593, "80": 0.59314, "85": 0.59113, "90": 0.59357, "95": 0.59711, "100": 0.59075}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml index c3a1a34..5008e60 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -34,8 +34,8 @@ MODEL_ARGS: --tokenizer-type: BertWordPieceCase --calculate-per-token-loss: true --split: 99982,9,9 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --tensorboard-dir: ${TENSORBOARD_PATH} --log-params-norm: true --log-num-zeros-in-grad: true @@ -52,4 +52,5 @@ MODEL_ARGS: --deterministic-mode: true --ckpt-format: torch --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json index 494043e..fce6976 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json @@ -1 +1 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.71086, 0.71893, 0.72885, 0.70321, 0.70401, 0.7141, 0.70976, 0.70408, 0.70335, 0.70493, 0.7093, 0.7085, 0.7048, 0.70419, 0.7078, 0.70467, 0.69381, 0.69597, 0.69193, 0.69684]}, "forward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [6.79062, 0.35414, 0.36513, 0.33889, 0.34029, 0.3472, 0.34538, 0.33905, 0.33883, 0.3403, 0.34588, 0.34318, 0.34002, 0.33934, 0.33993, 0.34056, 0.32859, 0.33199, 0.32739, 0.33349]}, "backward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [3.26804, 0.36177, 0.36023, 0.3614, 0.36044, 0.3688, 0.36315, 0.36233, 0.36183, 0.36219, 0.36248, 0.36207, 0.36158, 0.36184, 0.36344, 0.36275, 0.36265, 0.36201, 0.36266, 0.36271]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [5e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [7e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.72582, 0.0016, 0.00158, 0.0016, 0.00159, 0.0016, 0.00159, 0.00159, 0.00161, 0.0016, 0.00159, 0.00161, 0.00158, 0.00159, 0.00163, 0.0016, 0.00159, 0.00159, 0.00158, 0.00162]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00128, 0.00108, 0.00105, 0.00111, 0.00111, 0.00109, 0.00108, 0.00108, 0.00108, 0.00103, 0.00112, 0.00109, 0.00108, 0.00108, 0.00108, 0.00105, 0.00107, 0.00108, 0.00104, 0.00102]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.69392, 0.0034, 0.00322, 0.00351, 0.00348, 0.00346, 0.00349, 0.00351, 0.00338, 0.0036, 0.0035, 0.00345, 0.0032, 0.00342, 0.00312, 0.0032, 0.00325, 0.00328, 0.00326, 0.00293]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.04331, 0.02443, 0.02426, 0.02439, 0.02443, 0.02433, 0.02433, 0.02454, 0.02465, 0.0246, 0.02426, 0.02413, 0.02402, 0.0243, 0.02477, 0.0241, 0.02419, 0.02427, 0.02391, 0.02396]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0211, 0.00227, 0.00227, 0.00224, 0.00225, 0.00228, 0.00227, 0.00225, 0.0022, 0.00228, 0.00222, 0.00225, 0.00231, 0.0022, 0.00226, 0.00228, 0.00215, 0.00214, 0.0022, 0.00214]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00418, 0.00293, 0.00293, 0.00293, 0.00363, 0.00311, 0.00295, 0.00294, 0.00294, 0.00292, 0.00294, 0.00293, 0.00294, 0.00293, 0.00293, 0.00294, 0.00288, 0.00287, 0.00286, 0.00288]}, "optimizer-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.7649, 0.03478, 0.03443, 0.03485, 0.03558, 0.03495, 0.03478, 0.03499, 0.03496, 0.0351, 0.03473, 0.03451, 0.03421, 0.03459, 0.03483, 0.03425, 0.03418, 0.03429, 0.03391, 0.03358]}, "learning-rate": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "batch-size": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.32668, 9.41412, 8.86385, 8.56561, 8.2879, 8.10364, 7.83672, 7.53771, 7.3931, 7.29349, 7.3775, 7.22521, 7.11281, 7.06743, 6.91842, 6.96698, 6.97826, 7.04906, 6.72131, 6.98252]}, "lm loss vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.32668, 9.41412, 8.86385, 8.56561, 8.2879, 8.10364, 7.83672, 7.53771, 7.3931, 7.29349, 7.3775, 7.22521, 7.11281, 7.06743, 6.91842, 6.96698, 6.97826, 7.04906, 6.72131, 6.98252]}, "loss-scale": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [21.26364, 2.17403, 2.49719, 2.08969, 1.92529, 1.69973, 1.63605, 1.57249, 1.48395, 1.29577, 1.00881, 1.01474, 0.95564, 1.04584, 0.94469, 0.77682, 1.06965, 1.16858, 1.12415, 0.84938]}, "grad-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [21.26364, 2.17403, 2.49719, 2.08969, 1.92529, 1.69973, 1.63605, 1.57249, 1.48395, 1.29577, 1.00881, 1.01474, 0.95564, 1.04584, 0.94469, 0.77682, 1.06965, 1.16858, 1.12415, 0.84938]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43310.0, 40962.0, 43962.0, 41624.0, 44767.0, 43912.0, 41094.0, 42478.0, 44664.0, 43895.0, 41151.0, 43234.0, 39728.0, 45361.0, 43347.0, 43904.0, 45366.0, 45690.0, 46175.0, 44681.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43310.0, 40962.0, 43962.0, 41624.0, 44767.0, 43912.0, 41094.0, 42478.0, 44664.0, 43895.0, 41151.0, 43234.0, 39728.0, 45361.0, 43347.0, 43904.0, 45366.0, 45690.0, 46175.0, 44681.0]}, "params-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80362, 283.8273, 283.86472, 283.9053, 283.95062, 284.00027, 284.05209, 284.1051, 284.15646, 284.20462, 284.25775, 284.30688, 284.34857, 284.38318, 284.4115, 284.43536, 284.4545, 284.46991, 284.48178, 284.49057]}, "params-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80362, 283.8273, 283.86472, 283.9053, 283.95062, 284.00027, 284.05209, 284.1051, 284.15646, 284.20462, 284.25775, 284.30688, 284.34857, 284.38318, 284.4115, 284.43536, 284.4545, 284.46991, 284.48178, 284.49057]}, "iteration-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [11.50028, 0.77522, 0.78519, 0.75964, 0.76022, 0.77024, 0.76566, 0.76033, 0.75984, 0.76147, 0.76589, 0.76431, 0.76018, 0.76013, 0.76364, 0.7591, 0.7484, 0.75044, 0.74626, 0.75089]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.92026]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.92026]}, "lm loss validation ppl": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [1012.58026]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [1012.58026]}} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.32668, "5": 9.40846, "10": 9.02977, "15": 8.67236, "20": 8.29542, "25": 8.00392, "30": 7.88087, "35": 7.66788, "40": 7.5126, "45": 7.36879, "50": 7.17455, "55": 7.15371, "60": 7.14905, "65": 7.00078, "70": 7.0658, "75": 7.07358, "80": 6.9521, "85": 6.85915, "90": 7.25531, "95": 6.85027, "100": 6.99347}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43310.0, "5": 45382.0, "10": 45361.0, "15": 43950.0, "20": 44760.0, "25": 42473.0, "30": 43982.0, "35": 43258.0, "40": 43234.0, "45": 43314.0, "50": 43376.0, "55": 43892.0, "60": 41240.0, "65": 44692.0, "70": 45524.0, "75": 44655.0, "80": 41150.0, "85": 44006.0, "90": 44672.0, "95": 43939.0, "100": 42423.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1089391616.0, "5": 1089391616.0, "10": 1089391616.0, "15": 1089391616.0, "20": 1089391616.0, "25": 1089391616.0, "30": 1089391616.0, "35": 1089391616.0, "40": 1089391616.0, "45": 1089391616.0, "50": 1089391616.0, "55": 1089391616.0, "60": 1089391616.0, "65": 1089391616.0, "70": 1089391616.0, "75": 1089391616.0, "80": 1089391616.0, "85": 1089391616.0, "90": 1089391616.0, "95": 1089391616.0, "100": 1089391616.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1357230080.0, "5": 1808195072.0, "10": 1814751232.0, "15": 1814751232.0, "20": 1814751232.0, "25": 1814751232.0, "30": 1814751232.0, "35": 1814751232.0, "40": 1814751232.0, "45": 1814751232.0, "50": 1814751232.0, "55": 1814751232.0, "60": 1814751232.0, "65": 1814751232.0, "70": 1814751232.0, "75": 1814751232.0, "80": 1814751232.0, "85": 1814751232.0, "90": 1814751232.0, "95": 1814751232.0, "100": 1814751232.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.2764, "5": 0.75149, "10": 0.72765, "15": 0.72655, "20": 0.72819, "25": 0.72634, "30": 0.72812, "35": 0.72468, "40": 0.72473, "45": 0.72545, "50": 0.74251, "55": 0.72722, "60": 0.72634, "65": 0.72781, "70": 0.74545, "75": 0.73208, "80": 0.73229, "85": 1.04504, "90": 0.72786, "95": 0.72803, "100": 0.72956}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_lts.json index 9b48e08..a2c4602 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.71001, 0.98167, 0.67602, 0.67957, 0.67383, 0.67833, 0.6786, 0.67439, 0.67925, 0.6775, 0.67433, 0.67851, 0.6788, 0.67556, 0.68114, 0.67962, 0.6773, 0.67444, 0.68438, 0.68066]}, "forward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [6.44785, 0.63132, 0.32811, 0.32906, 0.32792, 0.32848, 0.32661, 0.32879, 0.33029, 0.33137, 0.32765, 0.32823, 0.33021, 0.32849, 0.33404, 0.33227, 0.33082, 0.32824, 0.33316, 0.32945]}, "backward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [3.10727, 0.34793, 0.34464, 0.34976, 0.34367, 0.34625, 0.34888, 0.34392, 0.34602, 0.34354, 0.34321, 0.34724, 0.34855, 0.34401, 0.34584, 0.34631, 0.34721, 0.34247, 0.34765, 0.34807]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [6e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [7e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 4e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 4e-05, 3e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.87223, 0.00177, 0.00184, 0.00158, 0.00162, 0.00156, 0.00156, 0.00155, 0.00156, 0.00155, 0.00156, 0.00157, 0.00156, 0.00154, 0.00179, 0.00155, 0.00155, 0.00155, 0.00181, 0.00156]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00108, 0.00104, 0.00095, 0.00093, 0.00095, 0.00095, 0.00096, 0.00094, 0.00096, 0.00095, 0.00093, 0.00093, 0.00093, 0.00094, 0.00093, 0.00095, 0.00093, 0.00093, 0.00093, 0.00092]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.44019, 0.00288, 0.00273, 0.0024, 0.00284, 0.00269, 0.00268, 0.0027, 0.00269, 0.00276, 0.00264, 0.0026, 0.00231, 0.00265, 0.00233, 0.00234, 0.00242, 0.00248, 0.00264, 0.00257]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.04271, 0.02276, 0.02251, 0.02261, 0.02452, 0.02248, 0.02262, 0.02283, 0.02299, 0.02287, 0.02278, 0.02297, 0.02272, 0.02268, 0.02282, 0.02275, 0.02281, 0.02271, 0.02275, 0.02318]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0133, 0.00197, 0.00183, 0.00183, 0.0037, 0.00184, 0.00184, 0.00184, 0.00186, 0.00184, 0.00183, 0.00185, 0.00184, 0.00188, 0.00183, 0.00183, 0.00183, 0.00184, 0.00185, 0.00184]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0028, 0.00282, 0.0028, 0.00275, 0.00296, 0.00276, 0.00275, 0.00276, 0.00276, 0.00277, 0.00275, 0.00276, 0.00274, 0.00275, 0.16325, 0.00275, 0.00274, 0.00276, 0.00275, 0.00275]}, "optimizer-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.50116, 0.03223, 0.03151, 0.03113, 0.03576, 0.03131, 0.03147, 0.03168, 0.03187, 0.03178, 0.03155, 0.03172, 0.03115, 0.0315, 0.19184, 0.03127, 0.03135, 0.03135, 0.03159, 0.03196]}, "learning-rate": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "batch-size": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.32658, 9.41412, 8.86391, 8.56555, 8.28783, 8.10358, 7.83667, 7.53748, 7.39311, 7.29338, 7.37752, 7.22518, 7.1129, 7.06753, 6.91822, 6.96679, 6.97834, 7.04893, 6.72125, 6.98236]}, "lm loss vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.32658, 9.41412, 8.86391, 8.56555, 8.28783, 8.10358, 7.83667, 7.53748, 7.39311, 7.29338, 7.37752, 7.22518, 7.1129, 7.06753, 6.91822, 6.96679, 6.97834, 7.04893, 6.72125, 6.98236]}, "loss-scale": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [21.26881, 2.17405, 2.50113, 2.08969, 1.9252, 1.69978, 1.63604, 1.57247, 1.48489, 1.29657, 1.0094, 1.01529, 0.95501, 1.04473, 0.94493, 0.77746, 1.07392, 1.16913, 1.12613, 0.84986]}, "grad-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [21.26881, 2.17405, 2.50113, 2.08969, 1.9252, 1.69978, 1.63604, 1.57247, 1.48489, 1.29657, 1.0094, 1.01529, 0.95501, 1.04473, 0.94493, 0.77746, 1.07392, 1.16913, 1.12613, 0.84986]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43303.0, 40954.0, 43957.0, 41612.0, 44782.0, 43938.0, 41086.0, 42465.0, 44666.0, 43893.0, 41158.0, 43221.0, 39725.0, 45367.0, 43342.0, 43903.0, 45362.0, 45687.0, 46160.0, 44706.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43303.0, 40954.0, 43957.0, 41612.0, 44782.0, 43938.0, 41086.0, 42465.0, 44666.0, 43893.0, 41158.0, 43221.0, 39725.0, 45367.0, 43342.0, 43903.0, 45362.0, 45687.0, 46160.0, 44706.0]}, "params-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80362, 283.8273, 283.86472, 283.9053, 283.95062, 284.00027, 284.05212, 284.10513, 284.15649, 284.20465, 284.25775, 284.30688, 284.34854, 284.38315, 284.41147, 284.43546, 284.45453, 284.46994, 284.48181, 284.49063]}, "params-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80362, 283.8273, 283.86472, 283.9053, 283.95062, 284.00027, 284.05212, 284.10513, 284.15649, 284.20465, 284.25775, 284.30688, 284.34854, 284.38315, 284.41147, 284.43546, 284.45453, 284.46994, 284.48181, 284.49063]}, "iteration-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [12.23694, 1.03463, 0.72739, 0.72966, 0.72882, 0.72883, 0.72924, 0.72542, 0.73039, 0.72858, 0.72719, 0.7292, 0.72931, 0.72642, 0.89265, 0.73026, 0.72781, 0.72495, 0.73526, 0.7318]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.9202]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.9202]}, "lm loss validation ppl": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [1012.52478]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [1012.52478]}} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.32658, "5": 9.40843, "10": 9.0296, "15": 8.67226, "20": 8.29523, "25": 8.00377, "30": 7.88064, "35": 7.66781, "40": 7.51261, "45": 7.36876, "50": 7.17456, "55": 7.15367, "60": 7.14902, "65": 7.00074, "70": 7.06582, "75": 7.07345, "80": 6.95226, "85": 6.85932, "90": 7.25528, "95": 6.85052, "100": 6.9934}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43303.0, "5": 45387.0, "10": 45361.0, "15": 43924.0, "20": 44759.0, "25": 42473.0, "30": 43995.0, "35": 43238.0, "40": 43236.0, "45": 43317.0, "50": 43386.0, "55": 43870.0, "60": 41240.0, "65": 44703.0, "70": 45514.0, "75": 44674.0, "80": 41143.0, "85": 44015.0, "90": 44683.0, "95": 43918.0, "100": 42409.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1089408000.0, "5": 1089408000.0, "10": 1089408000.0, "15": 1089408000.0, "20": 1089408000.0, "25": 1089408000.0, "30": 1089408000.0, "35": 1089408000.0, "40": 1089408000.0, "45": 1089408000.0, "50": 1089408000.0, "55": 1089408000.0, "60": 1089408000.0, "65": 1089408000.0, "70": 1089408000.0, "75": 1089408000.0, "80": 1089408000.0, "85": 1089408000.0, "90": 1089408000.0, "95": 1089408000.0, "100": 1089408000.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1357245440.0, "5": 1814240768.0, "10": 1815027200.0, "15": 1815027200.0, "20": 1817913344.0, "25": 1817913344.0, "30": 1817913344.0, "35": 1817913344.0, "40": 1817913344.0, "45": 1817913344.0, "50": 1817913344.0, "55": 1817913344.0, "60": 1817913344.0, "65": 1817913344.0, "70": 1817913344.0, "75": 1817913344.0, "80": 1817913344.0, "85": 1817913344.0, "90": 1817913344.0, "95": 1817913344.0, "100": 1817913344.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 14.80606, "5": 0.7493, "10": 0.74684, "15": 0.7426, "20": 0.75129, "25": 0.74314, "30": 0.74508, "35": 0.74417, "40": 0.74485, "45": 0.74453, "50": 0.73001, "55": 0.73092, "60": 0.72487, "65": 0.72475, "70": 0.73096, "75": 0.73221, "80": 0.72381, "85": 0.72806, "90": 0.73053, "95": 0.73338, "100": 0.72738}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/model_config.yaml index c17493f..ba5ff7d 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G/model_config.yaml @@ -34,8 +34,8 @@ MODEL_ARGS: --tokenizer-type: BertWordPieceCase --calculate-per-token-loss: true --split: 99982,9,9 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --tensorboard-dir: ${TENSORBOARD_PATH} --log-params-norm: true --log-num-zeros-in-grad: true @@ -52,4 +52,5 @@ MODEL_ARGS: --deterministic-mode: true --ckpt-format: torch_dist --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..82a81e2 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.32668, "5": 9.40846, "10": 9.02977, "15": 8.67236, "20": 8.29542, "25": 8.00392, "30": 7.88087, "35": 7.66788, "40": 7.5126, "45": 7.36879, "50": 7.17455, "55": 7.15371, "60": 7.14905, "65": 7.00078, "70": 7.0658, "75": 7.07358, "80": 6.9521, "85": 6.85915, "90": 7.25531, "95": 6.85027, "100": 6.99347}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43310.0, "5": 45382.0, "10": 45361.0, "15": 43950.0, "20": 44760.0, "25": 42473.0, "30": 43982.0, "35": 43258.0, "40": 43234.0, "45": 43314.0, "50": 43376.0, "55": 43892.0, "60": 41240.0, "65": 44692.0, "70": 45524.0, "75": 44655.0, "80": 41150.0, "85": 44006.0, "90": 44672.0, "95": 43939.0, "100": 42423.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1089391616.0, "5": 1089391616.0, "10": 1089391616.0, "15": 1089391616.0, "20": 1089391616.0, "25": 1089391616.0, "30": 1089391616.0, "35": 1089391616.0, "40": 1089391616.0, "45": 1089391616.0, "50": 1089391616.0, "55": 1089391616.0, "60": 1089391616.0, "65": 1089391616.0, "70": 1089391616.0, "75": 1089391616.0, "80": 1089391616.0, "85": 1089391616.0, "90": 1089391616.0, "95": 1089391616.0, "100": 1089391616.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1350742016.0, "5": 1809508352.0, "10": 1809508352.0, "15": 1810819072.0, "20": 1810819072.0, "25": 1813440512.0, "30": 1813440512.0, "35": 1813440512.0, "40": 1813440512.0, "45": 1813440512.0, "50": 1813440512.0, "55": 1813440512.0, "60": 1813440512.0, "65": 1813440512.0, "70": 1813440512.0, "75": 1813440512.0, "80": 1813440512.0, "85": 1813440512.0, "90": 1813440512.0, "95": 1813440512.0, "100": 1813440512.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.89054, "5": 0.9436, "10": 0.74604, "15": 0.74683, "20": 0.74627, "25": 0.74824, "30": 0.74479, "35": 0.74559, "40": 0.74518, "45": 0.74642, "50": 0.74295, "55": 0.73939, "60": 1.10093, "65": 0.73049, "70": 0.73297, "75": 0.73691, "80": 0.73182, "85": 0.73184, "90": 0.73228, "95": 0.73006, "100": 0.72966}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..fe135ac --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.32658, "5": 9.40843, "10": 9.0296, "15": 8.67226, "20": 8.29523, "25": 8.00377, "30": 7.88064, "35": 7.66781, "40": 7.51261, "45": 7.36876, "50": 7.17456, "55": 7.15367, "60": 7.14902, "65": 7.00074, "70": 7.06582, "75": 7.07345, "80": 6.95226, "85": 6.85932, "90": 7.25528, "95": 6.85052, "100": 6.9934}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43303.0, "5": 45387.0, "10": 45361.0, "15": 43924.0, "20": 44759.0, "25": 42473.0, "30": 43995.0, "35": 43238.0, "40": 43236.0, "45": 43317.0, "50": 43386.0, "55": 43870.0, "60": 41240.0, "65": 44703.0, "70": 45514.0, "75": 44674.0, "80": 41143.0, "85": 44015.0, "90": 44683.0, "95": 43918.0, "100": 42409.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1089408000.0, "5": 1089408000.0, "10": 1089408000.0, "15": 1089408000.0, "20": 1089408000.0, "25": 1089408000.0, "30": 1089408000.0, "35": 1089408000.0, "40": 1089408000.0, "45": 1089408000.0, "50": 1089408000.0, "55": 1089408000.0, "60": 1089408000.0, "65": 1089408000.0, "70": 1089408000.0, "75": 1089408000.0, "80": 1089408000.0, "85": 1089408000.0, "90": 1089408000.0, "95": 1089408000.0, "100": 1089408000.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1355476992.0, "5": 1815028736.0, "10": 1815028736.0, "15": 1815028736.0, "20": 1816602112.0, "25": 1816602112.0, "30": 1816602624.0, "35": 1816602624.0, "40": 1816602624.0, "45": 1816602624.0, "50": 1816602624.0, "55": 1816602624.0, "60": 1816602624.0, "65": 1816864256.0, "70": 1816864256.0, "75": 1816864256.0, "80": 1816864256.0, "85": 1816864256.0, "90": 1816864256.0, "95": 1816864256.0, "100": 1816864256.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 15.08305, "5": 0.82316, "10": 0.71275, "15": 0.71571, "20": 0.70855, "25": 0.71043, "30": 0.71757, "35": 0.71773, "40": 0.7158, "45": 0.71295, "50": 0.7163, "55": 0.72641, "60": 0.71371, "65": 0.73385, "70": 0.71874, "75": 0.71616, "80": 0.72036, "85": 0.72253, "90": 0.71561, "95": 0.71086, "100": 0.71987}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml index b3cfe0d..f29684e 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml @@ -34,8 +34,8 @@ MODEL_ARGS: --tokenizer-type: BertWordPieceCase --calculate-per-token-loss: true --split: 99982,9,9 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --tensorboard-dir: ${TENSORBOARD_PATH} --log-params-norm: true --log-num-zeros-in-grad: true @@ -52,4 +52,5 @@ MODEL_ARGS: --deterministic-mode: true --ckpt-format: torch_dist --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json index 67e211c..09f9f5c 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/golden_values_lts.json @@ -1 +1 @@ -{"lm loss": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [10.41501, 9.20443, 8.62112, 8.34419, 8.08454, 7.96905, 7.68086, 7.39418, 7.26109, 7.19122]}, "num-zeros": {"start_step": 0, "end_step": 50, "step_interval": 5, "values": [115751.0, 111072.0, 117055.0, 112398.0, 118712.0, 116944.0, 111387.0, 114025.0, 118464.0, 116959.0]}, "iteration_timing_avg": 0.2253964705882353} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.41501, "5": 9.19098, "10": 8.80853, "15": 8.44356, "20": 8.04896, "25": 7.76037, "30": 7.70645, "35": 7.53733, "40": 7.3937, "45": 7.25471, "50": 7.081, "55": 7.11618, "60": 7.09829, "65": 6.94934, "70": 7.03684, "75": 7.03626, "80": 6.91263, "85": 6.80799, "90": 7.22679, "95": 6.81458, "100": 6.9519}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 115751.0, "5": 119917.0, "10": 119886.0, "15": 116972.0, "20": 118676.0, "25": 114081.0, "30": 117111.0, "35": 115695.0, "40": 115627.0, "45": 115766.0, "50": 115867.0, "55": 116804.0, "60": 111649.0, "65": 118486.0, "70": 120214.0, "75": 118506.0, "80": 111335.0, "85": 117078.0, "90": 118465.0, "95": 117007.0, "100": 114131.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806441472.0, "5": 806441472.0, "10": 806441472.0, "15": 806441472.0, "20": 806441472.0, "25": 806441472.0, "30": 806441472.0, "35": 806441472.0, "40": 806441472.0, "45": 806441472.0, "50": 806441472.0, "55": 806441472.0, "60": 806441472.0, "65": 806441472.0, "70": 806441472.0, "75": 806441472.0, "80": 806441472.0, "85": 806441472.0, "90": 806441472.0, "95": 806441472.0, "100": 806441472.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806446080.0, "5": 1009299456.0, "10": 1009299456.0, "15": 1009299456.0, "20": 1009299456.0, "25": 1009299456.0, "30": 1009299456.0, "35": 1009299456.0, "40": 1009299456.0, "45": 1009299456.0, "50": 1009299456.0, "55": 1009299456.0, "60": 1009299456.0, "65": 1009299456.0, "70": 1009299456.0, "75": 1009299456.0, "80": 1009299456.0, "85": 1009299456.0, "90": 1009299456.0, "95": 1009299456.0, "100": 1009299456.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 19.81003, "5": 0.62168, "10": 0.61071, "15": 0.61037, "20": 0.60905, "25": 0.63348, "30": 0.60509, "35": 0.62179, "40": 0.60729, "45": 0.61442, "50": 0.61194, "55": 0.60055, "60": 0.60086, "65": 0.61298, "70": 0.5987, "75": 0.59646, "80": 0.60465, "85": 0.60925, "90": 0.60209, "95": 0.60329, "100": 0.60833}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml index 2df13fd..23180cf 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G/model_config.yaml @@ -34,8 +34,8 @@ MODEL_ARGS: --tokenizer-type: BertWordPieceCase --calculate-per-token-loss: true --split: 99982,9,9 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --tensorboard-dir: ${TENSORBOARD_PATH} --log-params-norm: true --log-num-zeros-in-grad: true @@ -51,4 +51,5 @@ MODEL_ARGS: --encoder-pipeline-model-parallel-size: 2 --deterministic-mode: true --ckpt-format: torch + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..c2325c7 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.41501, "5": 9.19098, "10": 8.80853, "15": 8.44356, "20": 8.04896, "25": 7.76037, "30": 7.70649, "35": 7.5374, "40": 7.39376, "45": 7.25463, "50": 7.08105, "55": 7.11603, "60": 7.09832, "65": 6.94932, "70": 7.03698, "75": 7.03632, "80": 6.91259, "85": 6.80803, "90": 7.22685, "95": 6.81459, "100": 6.95185}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 115751.0, "5": 119917.0, "10": 119886.0, "15": 116972.0, "20": 118679.0, "25": 114091.0, "30": 117119.0, "35": 115692.0, "40": 115660.0, "45": 115795.0, "50": 115829.0, "55": 116826.0, "60": 111634.0, "65": 118491.0, "70": 120216.0, "75": 118485.0, "80": 111288.0, "85": 117080.0, "90": 118473.0, "95": 116993.0, "100": 114130.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806441472.0, "5": 806441472.0, "10": 806441472.0, "15": 806441472.0, "20": 806441472.0, "25": 806441472.0, "30": 806441472.0, "35": 806441472.0, "40": 806441472.0, "45": 806441472.0, "50": 806441472.0, "55": 806441472.0, "60": 806441472.0, "65": 806441472.0, "70": 806441472.0, "75": 806441472.0, "80": 806441472.0, "85": 806441472.0, "90": 806441472.0, "95": 806441472.0, "100": 806441472.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806446080.0, "5": 1009299456.0, "10": 1009299456.0, "15": 1009299456.0, "20": 1009299456.0, "25": 1009299456.0, "30": 1009299456.0, "35": 1009299456.0, "40": 1009299456.0, "45": 1009299456.0, "50": 1009299456.0, "55": 1009299456.0, "60": 1009299456.0, "65": 1009299456.0, "70": 1009299456.0, "75": 1009299456.0, "80": 1009299456.0, "85": 1009299456.0, "90": 1009299456.0, "95": 1009299456.0, "100": 1009299456.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 18.89127, "5": 0.60445, "10": 0.59505, "15": 0.59404, "20": 0.59857, "25": 0.60897, "30": 0.59308, "35": 0.61154, "40": 0.59441, "45": 0.59606, "50": 0.5942, "55": 0.58841, "60": 0.59751, "65": 0.59219, "70": 0.59594, "75": 0.59615, "80": 0.59365, "85": 0.64261, "90": 0.59256, "95": 0.59724, "100": 0.59219}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..6ba9fb0 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.41501, "5": 9.19098, "10": 8.80853, "15": 8.44356, "20": 8.04896, "25": 7.76037, "30": 7.70645, "35": 7.53733, "40": 7.3937, "45": 7.25471, "50": 7.081, "55": 7.11618, "60": 7.09829, "65": 6.94934, "70": 7.03684, "75": 7.03626, "80": 6.91263, "85": 6.80799, "90": 7.22679, "95": 6.81458, "100": 6.9519}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 115751.0, "5": 119917.0, "10": 119886.0, "15": 116972.0, "20": 118676.0, "25": 114081.0, "30": 117111.0, "35": 115695.0, "40": 115627.0, "45": 115766.0, "50": 115867.0, "55": 116804.0, "60": 111649.0, "65": 118486.0, "70": 120214.0, "75": 118506.0, "80": 111335.0, "85": 117078.0, "90": 118465.0, "95": 117007.0, "100": 114131.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806441472.0, "5": 806441472.0, "10": 806441472.0, "15": 806441472.0, "20": 806441472.0, "25": 806441472.0, "30": 806441472.0, "35": 806441472.0, "40": 806441472.0, "45": 806441472.0, "50": 806441472.0, "55": 806441472.0, "60": 806441472.0, "65": 806441472.0, "70": 806441472.0, "75": 806441472.0, "80": 806441472.0, "85": 806441472.0, "90": 806441472.0, "95": 806441472.0, "100": 806441472.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 806446080.0, "5": 1009299456.0, "10": 1009299456.0, "15": 1009299456.0, "20": 1009299456.0, "25": 1009299456.0, "30": 1009299456.0, "35": 1009299456.0, "40": 1009299456.0, "45": 1009299456.0, "50": 1009299456.0, "55": 1009299456.0, "60": 1009299456.0, "65": 1009299456.0, "70": 1009299456.0, "75": 1009299456.0, "80": 1009299456.0, "85": 1009299456.0, "90": 1009299456.0, "95": 1009299456.0, "100": 1009299456.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 21.07152, "5": 0.61715, "10": 0.60362, "15": 0.60055, "20": 0.61535, "25": 0.61578, "30": 0.60708, "35": 0.61197, "40": 0.60846, "45": 0.61274, "50": 0.61868, "55": 0.60749, "60": 0.60595, "65": 0.60896, "70": 0.60225, "75": 0.60435, "80": 0.60823, "85": 0.60709, "90": 0.6091, "95": 0.59773, "100": 0.59673}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml index 23f9be2..50850fb 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G/model_config.yaml @@ -34,8 +34,8 @@ MODEL_ARGS: --tokenizer-type: BertWordPieceCase --calculate-per-token-loss: true --split: 99982,9,9 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --tensorboard-dir: ${TENSORBOARD_PATH} --log-params-norm: true --log-num-zeros-in-grad: true @@ -51,4 +51,5 @@ MODEL_ARGS: --encoder-pipeline-model-parallel-size: 2 --deterministic-mode: true --ckpt-format: torch + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json index d752d31..26d537a 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values_dev.json @@ -1,83 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.32658, - 9.41413, - 8.86432, - 8.56546, - 8.2877, - 8.1035, - 7.83646, - 7.5377, - 7.39282, - 7.29333, - 7.37736, - 7.22498, - 7.11249, - 7.06739, - 6.91817, - 6.96674, - 6.97821, - 7.0494, - 6.72101, - 6.98229 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 43310.0, - 40943.0, - 43952.0, - 41616.0, - 44789.0, - 43937.0, - 41093.0, - 42468.0, - 44652.0, - 43894.0, - 41154.0, - 43226.0, - 39719.0, - 45362.0, - 43332.0, - 43913.0, - 45362.0, - 45695.0, - 46170.0, - 44701.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 11.09527, - 0.74337, - 0.74502, - 0.74411, - 1.06685, - 0.74366, - 0.74354, - 0.74287, - 0.7419, - 0.74299, - 1.02516, - 0.74651, - 0.74175, - 0.74347, - 0.7457, - 0.74253, - 0.74391, - 0.74341, - 0.74261, - 0.74236 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.32658, "5": 9.40836, "10": 9.02928, "15": 8.67242, "20": 8.29515, "25": 8.00385, "30": 7.88061, "35": 7.66767, "40": 7.51246, "45": 7.36873, "50": 7.17416, "55": 7.15359, "60": 7.14886, "65": 7.00033, "70": 7.06557, "75": 7.07355, "80": 6.95215, "85": 6.85908, "90": 7.25527, "95": 6.85024, "100": 6.99333}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43310.0, "5": 45384.0, "10": 45373.0, "15": 43930.0, "20": 44761.0, "25": 42477.0, "30": 43999.0, "35": 43241.0, "40": 43236.0, "45": 43316.0, "50": 43381.0, "55": 43875.0, "60": 41238.0, "65": 44706.0, "70": 45512.0, "75": 44666.0, "80": 41128.0, "85": 44010.0, "90": 44664.0, "95": 43932.0, "100": 42421.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1089915904.0, "5": 1089915904.0, "10": 1089915904.0, "15": 1089915904.0, "20": 1089915904.0, "25": 1089915904.0, "30": 1089915904.0, "35": 1089915904.0, "40": 1089915904.0, "45": 1089915904.0, "50": 1089915904.0, "55": 1089915904.0, "60": 1089915904.0, "65": 1089915904.0, "70": 1089915904.0, "75": 1089915904.0, "80": 1089915904.0, "85": 1089915904.0, "90": 1089915904.0, "95": 1089915904.0, "100": 1089915904.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1368108032.0, "5": 1820057088.0, "10": 1820059648.0, "15": 1820059648.0, "20": 1820059648.0, "25": 1820059648.0, "30": 1820059648.0, "35": 1820059648.0, "40": 1820059648.0, "45": 1820059648.0, "50": 1820059648.0, "55": 1820059648.0, "60": 1820059648.0, "65": 1820059648.0, "70": 1820059648.0, "75": 1820059648.0, "80": 1820059648.0, "85": 1820059648.0, "90": 1820059648.0, "95": 1820059648.0, "100": 1820059648.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.64906, "5": 0.73826, "10": 0.72842, "15": 0.72533, "20": 0.72377, "25": 0.7257, "30": 0.72496, "35": 0.72316, "40": 0.72402, "45": 0.72699, "50": 0.72338, "55": 0.7255, "60": 0.72297, "65": 0.72409, "70": 0.72677, "75": 0.72542, "80": 0.72543, "85": 0.74564, "90": 0.72546, "95": 0.72636, "100": 0.72485}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values_lts.json index d932464..4290c9b 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values_lts.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/golden_values_lts.json @@ -1,763 +1 @@ -{ - "forward-backward-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 14.18678, - 0.67885, - 0.68278, - 0.68333, - 0.67855, - 0.68179, - 0.68809, - 0.67808, - 0.67889, - 0.69586, - 0.69577, - 0.67938, - 0.68076, - 0.68551, - 0.69108, - 0.67821, - 0.68422, - 0.68947, - 0.67891, - 0.68614 - ] - }, - "forward-compute-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 8.91183, - 0.31386, - 0.31455, - 0.31529, - 0.31399, - 0.31376, - 0.3168, - 0.31219, - 0.31205, - 0.32539, - 0.32943, - 0.31424, - 0.31569, - 0.32161, - 0.32188, - 0.31166, - 0.31627, - 0.31935, - 0.31029, - 0.32078 - ] - }, - "backward-compute-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 4.25414, - 0.3682, - 0.37658, - 0.37755, - 0.37333, - 0.37381, - 0.37727, - 0.37278, - 0.37206, - 0.37541, - 0.37183, - 0.37214, - 0.37101, - 0.37247, - 0.37485, - 0.36955, - 0.37359, - 0.3825, - 0.37545, - 0.37777 - ] - }, - "layernorm-grads-all-reduce-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.00004, - 0.00003, - 0.00003, - 0.00002, - 0.00002, - 0.00002, - 0.00002, - 0.00003, - 0.00002, - 0.00003, - 0.00002, - 0.00003, - 0.00002, - 0.00002, - 0.00004, - 0.00003, - 0.00002, - 0.00002, - 0.00002, - 0.00002 - ] - }, - "embedding-grads-all-reduce-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.00005, - 0.00004, - 0.00004, - 0.00004, - 0.00004, - 0.00003, - 0.00003, - 0.00004, - 0.00004, - 0.00003, - 0.00003, - 0.00004, - 0.00004, - 0.00004, - 0.00004, - 0.00003, - 0.00003, - 0.00003, - 0.00003, - 0.00003 - ] - }, - "all-grads-sync-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.9061, - 0.00163, - 0.00202, - 0.00163, - 0.00157, - 0.00156, - 0.00183, - 0.0016, - 0.00183, - 0.00157, - 0.00157, - 0.00158, - 0.00168, - 0.00158, - 0.00169, - 0.00156, - 0.00157, - 0.00157, - 0.00156, - 0.00185 - ] - }, - "optimizer-copy-to-main-grad-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.0011, - 0.00104, - 0.00102, - 0.00101, - 0.00097, - 0.00098, - 0.001, - 0.00096, - 0.00096, - 0.00099, - 0.00095, - 0.00097, - 0.00096, - 0.00098, - 0.00097, - 0.00098, - 0.00095, - 0.00099, - 0.00098, - 0.00099 - ] - }, - "optimizer-clip-main-grad-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 1.59317, - 0.00265, - 0.00282, - 0.00284, - 0.00289, - 0.00298, - 0.00282, - 0.00294, - 0.00302, - 0.00301, - 0.00304, - 0.00294, - 0.00253, - 0.00296, - 0.00251, - 0.00227, - 0.00282, - 0.00287, - 0.00308, - 0.00276 - ] - }, - "optimizer-count-zeros-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.04375, - 0.02396, - 0.02387, - 0.02381, - 0.02385, - 0.02393, - 0.0241, - 0.02406, - 0.02393, - 0.024, - 0.02396, - 0.024, - 0.0241, - 0.02397, - 0.024, - 0.02378, - 0.0238, - 0.02393, - 0.02395, - 0.02405 - ] - }, - "optimizer-inner-step-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.01715, - 0.00212, - 0.0021, - 0.00212, - 0.00212, - 0.00211, - 0.00218, - 0.00213, - 0.00212, - 0.00214, - 0.00211, - 0.00226, - 0.00211, - 0.00209, - 0.00211, - 0.00218, - 0.00207, - 0.00211, - 0.00213, - 0.00218 - ] - }, - "optimizer-copy-main-to-model-params-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.00281, - 0.00282, - 0.00281, - 0.00283, - 0.00281, - 0.00283, - 0.00289, - 0.00286, - 0.00281, - 0.00284, - 0.00282, - 0.00431, - 0.00295, - 0.00284, - 0.00283, - 0.00283, - 0.18259, - 0.00284, - 0.00283, - 0.00295 - ] - }, - "optimizer-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 1.65881, - 0.03322, - 0.03326, - 0.03323, - 0.03329, - 0.03345, - 0.03361, - 0.03357, - 0.03352, - 0.03364, - 0.03349, - 0.03532, - 0.03332, - 0.03347, - 0.03313, - 0.03267, - 0.21285, - 0.03336, - 0.03358, - 0.03357 - ] - }, - "learning-rate": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.0001, - 0.0001, - 0.00009, - 0.00009, - 0.00008, - 0.00008, - 0.00007, - 0.00007, - 0.00006, - 0.00006, - 0.00005, - 0.00005, - 0.00005, - 0.00004, - 0.00004, - 0.00003, - 0.00003, - 0.00002, - 0.00002, - 0.00001 - ] - }, - "learning-rate vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.0001, - 0.0001, - 0.00009, - 0.00009, - 0.00008, - 0.00008, - 0.00007, - 0.00007, - 0.00006, - 0.00006, - 0.00005, - 0.00005, - 0.00005, - 0.00004, - 0.00004, - 0.00003, - 0.00003, - 0.00002, - 0.00002, - 0.00001 - ] - }, - "batch-size": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32 - ] - }, - "batch-size vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32, - 32 - ] - }, - "lm loss": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.3267, - 9.41409, - 8.86422, - 8.56557, - 8.28779, - 8.10356, - 7.83669, - 7.53761, - 7.39304, - 7.29344, - 7.37755, - 7.22522, - 7.11288, - 7.06761, - 6.91847, - 6.96686, - 6.97827, - 7.04883, - 6.72143, - 6.98255 - ] - }, - "lm loss vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.3267, - 9.41409, - 8.86422, - 8.56557, - 8.28779, - 8.10356, - 7.83669, - 7.53761, - 7.39304, - 7.29344, - 7.37755, - 7.22522, - 7.11288, - 7.06761, - 6.91847, - 6.96686, - 6.97827, - 7.04883, - 6.72143, - 6.98255 - ] - }, - "loss-scale": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1 - ] - }, - "loss-scale vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1 - ] - }, - "grad-norm": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 21.2635, - 2.17416, - 2.50475, - 2.08972, - 1.9252, - 1.69975, - 1.63606, - 1.57261, - 1.48503, - 1.29641, - 1.00944, - 1.01609, - 0.95592, - 1.04635, - 0.94502, - 0.7775, - 1.07117, - 1.16813, - 1.12672, - 0.85024 - ] - }, - "grad-norm vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 21.2635, - 2.17416, - 2.50475, - 2.08972, - 1.9252, - 1.69975, - 1.63606, - 1.57261, - 1.48503, - 1.29641, - 1.00944, - 1.01609, - 0.95592, - 1.04635, - 0.94502, - 0.7775, - 1.07117, - 1.16813, - 1.12672, - 0.85024 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 43318, - 40956, - 43957, - 41617, - 44756, - 43946, - 41064, - 42479, - 44668, - 43904, - 41151, - 43235, - 39712, - 45373, - 43360, - 43896, - 45353, - 45682, - 46166, - 44693 - ] - }, - "num-zeros vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 43318, - 40956, - 43957, - 41617, - 44756, - 43946, - 41064, - 42479, - 44668, - 43904, - 41151, - 43235, - 39712, - 45373, - 43360, - 43896, - 45353, - 45682, - 46166, - 44693 - ] - }, - "params-norm": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 283.80362, - 283.8273, - 283.86469, - 283.90527, - 283.95059, - 284.00024, - 284.05206, - 284.10507, - 284.15643, - 284.20459, - 284.25775, - 284.30685, - 284.34851, - 284.38309, - 284.41144, - 284.43536, - 284.45441, - 284.46985, - 284.48169, - 284.49057 - ] - }, - "params-norm vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 283.80362, - 283.8273, - 283.86469, - 283.90527, - 283.95059, - 284.00024, - 284.05206, - 284.10507, - 284.15643, - 284.20459, - 284.25775, - 284.30685, - 284.34851, - 284.38309, - 284.41144, - 284.43536, - 284.45441, - 284.46985, - 284.48169, - 284.49057 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 15.87098, - 0.73261, - 0.73669, - 0.73696, - 0.73228, - 0.73561, - 0.74191, - 0.73193, - 0.73279, - 0.75004, - 0.74974, - 0.73772, - 0.73447, - 0.73951, - 0.74553, - 0.73119, - 0.9162, - 0.74318, - 0.73275, - 0.74014 - ] - }, - "lm loss validation": { - "start_step": 0, - "end_step": 2, - "step_interval": 5, - "values": [ - 6.92026 - ] - }, - "lm loss validation vs samples": { - "start_step": 0, - "end_step": 2, - "step_interval": 5, - "values": [ - 6.92026 - ] - }, - "lm loss validation ppl": { - "start_step": 0, - "end_step": 2, - "step_interval": 5, - "values": [ - 1012.58173 - ] - }, - "lm loss validation ppl vs samples": { - "start_step": 0, - "end_step": 2, - "step_interval": 5, - "values": [ - 1012.58173 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.3267, "5": 9.40854, "10": 9.02953, "15": 8.67237, "20": 8.2953, "25": 8.00373, "30": 7.88073, "35": 7.66782, "40": 7.51273, "45": 7.36894, "50": 7.17436, "55": 7.15376, "60": 7.14912, "65": 7.00078, "70": 7.06584, "75": 7.07368, "80": 6.95216, "85": 6.85934, "90": 7.25521, "95": 6.85057, "100": 6.99348}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43318.0, "5": 45390.0, "10": 45358.0, "15": 43934.0, "20": 44749.0, "25": 42461.0, "30": 43987.0, "35": 43246.0, "40": 43241.0, "45": 43320.0, "50": 43381.0, "55": 43871.0, "60": 41249.0, "65": 44699.0, "70": 45523.0, "75": 44655.0, "80": 41135.0, "85": 44015.0, "90": 44688.0, "95": 43927.0, "100": 42412.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1089915904.0, "5": 1089915904.0, "10": 1089915904.0, "15": 1089915904.0, "20": 1089915904.0, "25": 1089915904.0, "30": 1089915904.0, "35": 1089915904.0, "40": 1089915904.0, "45": 1089915904.0, "50": 1089915904.0, "55": 1089915904.0, "60": 1089915904.0, "65": 1089915904.0, "70": 1089915904.0, "75": 1089915904.0, "80": 1089915904.0, "85": 1089915904.0, "90": 1089915904.0, "95": 1089915904.0, "100": 1089915904.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1368108032.0, "5": 1820059136.0, "10": 1820059648.0, "15": 1820059648.0, "20": 1820059648.0, "25": 1820059648.0, "30": 1820059648.0, "35": 1820059648.0, "40": 1820059648.0, "45": 1820059648.0, "50": 1820059648.0, "55": 1820059648.0, "60": 1820059648.0, "65": 1820059648.0, "70": 1820059648.0, "75": 1820059648.0, "80": 1820059648.0, "85": 1820059648.0, "90": 1820059648.0, "95": 1820059648.0, "100": 1820059648.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.08605, "5": 0.75327, "10": 0.74304, "15": 0.74192, "20": 0.747, "25": 0.75266, "30": 0.74716, "35": 0.74401, "40": 0.7449, "45": 0.74217, "50": 0.73961, "55": 0.74434, "60": 0.74076, "65": 0.74652, "70": 0.73804, "75": 0.91246, "80": 0.74423, "85": 0.74622, "90": 0.74649, "95": 0.7355, "100": 0.74109}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/model_config.yaml index 3f19d3a..1af30d5 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G/model_config.yaml @@ -34,8 +34,8 @@ MODEL_ARGS: --tokenizer-type: BertWordPieceCase --calculate-per-token-loss: true --split: 99982,9,9 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --tensorboard-dir: ${TENSORBOARD_PATH} --log-params-norm: true --log-num-zeros-in-grad: true @@ -51,4 +51,5 @@ MODEL_ARGS: --encoder-pipeline-model-parallel-size: 0 --deterministic-mode: true --ckpt-format: torch_dist + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json new file mode 100644 index 0000000..ae0bb83 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.32658, "5": 9.40836, "10": 9.02928, "15": 8.67242, "20": 8.29515, "25": 8.00385, "30": 7.88061, "35": 7.66767, "40": 7.51246, "45": 7.36873, "50": 7.17416, "55": 7.15359, "60": 7.14886, "65": 7.00033, "70": 7.06557, "75": 7.07355, "80": 6.95215, "85": 6.85908, "90": 7.25527, "95": 6.85024, "100": 6.99333}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43310.0, "5": 45384.0, "10": 45373.0, "15": 43930.0, "20": 44761.0, "25": 42477.0, "30": 43999.0, "35": 43241.0, "40": 43236.0, "45": 43316.0, "50": 43381.0, "55": 43875.0, "60": 41238.0, "65": 44706.0, "70": 45512.0, "75": 44666.0, "80": 41128.0, "85": 44010.0, "90": 44664.0, "95": 43932.0, "100": 42421.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1091161088.0, "5": 1091161088.0, "10": 1091161088.0, "15": 1091161088.0, "20": 1091161088.0, "25": 1091161088.0, "30": 1091161088.0, "35": 1091161088.0, "40": 1091161088.0, "45": 1091161088.0, "50": 1091161088.0, "55": 1091161088.0, "60": 1091161088.0, "65": 1091161088.0, "70": 1091161088.0, "75": 1091161088.0, "80": 1091161088.0, "85": 1091161088.0, "90": 1091161088.0, "95": 1091161088.0, "100": 1091161088.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1368108544.0, "5": 1821107712.0, "10": 1821108224.0, "15": 1821108224.0, "20": 1821108224.0, "25": 1821108224.0, "30": 1821108224.0, "35": 1821108224.0, "40": 1821108224.0, "45": 1821108224.0, "50": 1821108224.0, "55": 1821108224.0, "60": 1821108224.0, "65": 1821108224.0, "70": 1821108224.0, "75": 1821108224.0, "80": 1821108224.0, "85": 1821108224.0, "90": 1821108224.0, "95": 1821108224.0, "100": 1821108224.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.87761, "5": 0.74257, "10": 0.74846, "15": 0.74349, "20": 0.74744, "25": 0.74671, "30": 0.74697, "35": 0.74483, "40": 0.74834, "45": 0.74435, "50": 0.73052, "55": 0.73799, "60": 0.7208, "65": 0.72522, "70": 0.72689, "75": 0.72574, "80": 0.72562, "85": 0.72421, "90": 0.72026, "95": 0.72797, "100": 0.72824}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json new file mode 100644 index 0000000..8e15f21 --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.3267, "5": 9.40854, "10": 9.02953, "15": 8.67237, "20": 8.2953, "25": 8.00373, "30": 7.88073, "35": 7.66782, "40": 7.51273, "45": 7.36894, "50": 7.17436, "55": 7.15376, "60": 7.14912, "65": 7.00078, "70": 7.06584, "75": 7.07368, "80": 6.95216, "85": 6.85934, "90": 7.25521, "95": 6.85057, "100": 6.99348}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43318.0, "5": 45390.0, "10": 45358.0, "15": 43934.0, "20": 44749.0, "25": 42461.0, "30": 43987.0, "35": 43246.0, "40": 43241.0, "45": 43320.0, "50": 43381.0, "55": 43871.0, "60": 41249.0, "65": 44699.0, "70": 45523.0, "75": 44655.0, "80": 41135.0, "85": 44015.0, "90": 44688.0, "95": 43927.0, "100": 42412.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1089915904.0, "5": 1089915904.0, "10": 1089915904.0, "15": 1089915904.0, "20": 1089915904.0, "25": 1089915904.0, "30": 1089915904.0, "35": 1089915904.0, "40": 1089915904.0, "45": 1089915904.0, "50": 1089915904.0, "55": 1089915904.0, "60": 1089915904.0, "65": 1089915904.0, "70": 1089915904.0, "75": 1089915904.0, "80": 1089915904.0, "85": 1089915904.0, "90": 1089915904.0, "95": 1089915904.0, "100": 1089915904.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 1368108544.0, "5": 1820059648.0, "10": 1820059648.0, "15": 1820059648.0, "20": 1820059648.0, "25": 1820059648.0, "30": 1820059648.0, "35": 1820321792.0, "40": 1820321792.0, "45": 1820321792.0, "50": 1820321792.0, "55": 1820321792.0, "60": 1820321792.0, "65": 1820321792.0, "70": 1820321792.0, "75": 1820321792.0, "80": 1820321792.0, "85": 1820321792.0, "90": 1820321792.0, "95": 1820321792.0, "100": 1820321792.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 12.89219, "5": 0.74072, "10": 0.73004, "15": 0.72217, "20": 0.73074, "25": 0.72948, "30": 0.72491, "35": 0.71792, "40": 0.71668, "45": 0.72144, "50": 0.7179, "55": 0.73741, "60": 0.73372, "65": 0.7275, "70": 0.739, "75": 0.73171, "80": 0.73186, "85": 0.73693, "90": 0.73959, "95": 0.73062, "100": 0.73857}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml index 243e1fc..dd907b9 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G/model_config.yaml @@ -34,8 +34,8 @@ MODEL_ARGS: --tokenizer-type: BertWordPieceCase --calculate-per-token-loss: true --split: 99982,9,9 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --tensorboard-dir: ${TENSORBOARD_PATH} --log-params-norm: true --log-num-zeros-in-grad: true @@ -51,4 +51,5 @@ MODEL_ARGS: --encoder-pipeline-model-parallel-size: 0 --deterministic-mode: true --ckpt-format: torch_dist + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/golden_values_dev.json new file mode 100644 index 0000000..e3a6fea --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.33692, "5": 9.40648, "10": 9.03817, "15": 8.6698, "20": 8.29229, "25": 8.01419, "30": 7.8897, "35": 7.66737, "40": 7.51209, "45": 7.36989, "50": 7.17993, "55": 7.1548, "60": 7.14501, "65": 6.99172, "70": 7.05571, "75": 7.06396, "80": 6.94199, "85": 6.85039, "90": 7.23433, "95": 6.84062, "100": 6.97772}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43334.0, "5": 45422.0, "10": 45371.0, "15": 43933.0, "20": 44825.0, "25": 42720.0, "30": 44038.0, "35": 43296.0, "40": 43276.0, "45": 43307.0, "50": 43376.0, "55": 43924.0, "60": 41325.0, "65": 44724.0, "70": 45526.0, "75": 44684.0, "80": 41101.0, "85": 44018.0, "90": 44722.0, "95": 44090.0, "100": 42478.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4094423552.0, "5": 4094423552.0, "10": 4094423552.0, "15": 4094423552.0, "20": 4094423552.0, "25": 4094423552.0, "30": 4094423552.0, "35": 4094423552.0, "40": 4094423552.0, "45": 4094423552.0, "50": 4094423552.0, "55": 4094423552.0, "60": 4094423552.0, "65": 4094423552.0, "70": 4094423552.0, "75": 4094423552.0, "80": 4094423552.0, "85": 4094423552.0, "90": 4094423552.0, "95": 4094423552.0, "100": 4094423552.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4352922112.0, "5": 6133625856.0, "10": 6133625856.0, "15": 6133625856.0, "20": 6133625856.0, "25": 6133625856.0, "30": 6133625856.0, "35": 6133625856.0, "40": 6133625856.0, "45": 6133625856.0, "50": 6133625856.0, "55": 6133625856.0, "60": 6133625856.0, "65": 6133625856.0, "70": 6133625856.0, "75": 6133625856.0, "80": 6133625856.0, "85": 6133625856.0, "90": 6133625856.0, "95": 6133625856.0, "100": 6133625856.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 7.28411, "5": 0.24039, "10": 0.24352, "15": 0.23971, "20": 0.23776, "25": 0.23765, "30": 0.23789, "35": 0.23831, "40": 0.23729, "45": 0.23855, "50": 0.23904, "55": 0.23694, "60": 0.23457, "65": 0.23394, "70": 0.23625, "75": 0.23477, "80": 0.24348, "85": 0.23375, "90": 0.23588, "95": 0.23366, "100": 0.24112}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/golden_values_lts.json new file mode 100644 index 0000000..050db2e --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.33692, "5": 9.40647, "10": 9.03812, "15": 8.66982, "20": 8.29233, "25": 8.01417, "30": 7.88975, "35": 7.66738, "40": 7.51226, "45": 7.37002, "50": 7.18009, "55": 7.15486, "60": 7.14528, "65": 6.99172, "70": 7.05573, "75": 7.06401, "80": 6.94204, "85": 6.85029, "90": 7.23443, "95": 6.84073, "100": 6.97784}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43335.0, "5": 45420.0, "10": 45381.0, "15": 43929.0, "20": 44836.0, "25": 42716.0, "30": 44035.0, "35": 43294.0, "40": 43289.0, "45": 43298.0, "50": 43362.0, "55": 43943.0, "60": 41332.0, "65": 44715.0, "70": 45520.0, "75": 44691.0, "80": 41115.0, "85": 44012.0, "90": 44713.0, "95": 44092.0, "100": 42473.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4094423040.0, "5": 4094423040.0, "10": 4094423040.0, "15": 4094423040.0, "20": 4094423040.0, "25": 4094423040.0, "30": 4094423040.0, "35": 4094423040.0, "40": 4094423040.0, "45": 4094423040.0, "50": 4094423040.0, "55": 4094423040.0, "60": 4094423040.0, "65": 4094423040.0, "70": 4094423040.0, "75": 4094423040.0, "80": 4094423040.0, "85": 4094423040.0, "90": 4094423040.0, "95": 4094423040.0, "100": 4094423040.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4339552256.0, "5": 6124447744.0, "10": 6124447744.0, "15": 6124447744.0, "20": 6124447744.0, "25": 6124447744.0, "30": 6124447744.0, "35": 6124447744.0, "40": 6124447744.0, "45": 6124447744.0, "50": 6124447744.0, "55": 6124447744.0, "60": 6124447744.0, "65": 6124447744.0, "70": 6124447744.0, "75": 6124447744.0, "80": 6124447744.0, "85": 6124447744.0, "90": 6124447744.0, "95": 6124447744.0, "100": 6124447744.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.19326, "5": 0.21899, "10": 0.22106, "15": 0.22825, "20": 0.22407, "25": 0.22833, "30": 0.2233, "35": 0.22334, "40": 0.22285, "45": 0.22288, "50": 0.2223, "55": 0.21966, "60": 0.22022, "65": 0.21946, "70": 0.21909, "75": 0.22086, "80": 0.22289, "85": 0.22592, "90": 0.22157, "95": 0.22247, "100": 0.21963}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/model_config.yaml index bde4e72..ae465ae 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch/model_config.yaml @@ -34,8 +34,8 @@ MODEL_ARGS: --tokenizer-type: BertWordPieceCase --calculate-per-token-loss: true --split: 99982,9,9 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --tensorboard-dir: ${TENSORBOARD_PATH} --log-params-norm: true --log-num-zeros-in-grad: true @@ -52,4 +52,5 @@ MODEL_ARGS: --attention-softmax-in-fp32: true --ckpt-format: torch --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_dev.json index 570eca0..24f434e 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_dev.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_dev.json @@ -1 +1 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [8.5793, 0.62156, 0.34426, 0.34959, 0.34301, 0.34282, 0.35085, 0.34342, 0.34419, 0.34313, 0.34469, 0.3443, 0.34409, 0.34468, 0.34387, 0.34425, 0.34364, 0.34422, 0.34383, 0.34972]}, "forward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [5.11833, 0.43748, 0.16255, 0.16704, 0.16205, 0.16151, 0.16942, 0.16138, 0.16252, 0.16175, 0.16312, 0.16223, 0.16308, 0.16294, 0.16207, 0.16265, 0.1619, 0.16234, 0.16178, 0.16665]}, "backward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [2.7297, 0.17954, 0.17726, 0.17654, 0.17682, 0.17671, 0.17681, 0.17739, 0.17716, 0.17701, 0.17743, 0.17721, 0.177, 0.17726, 0.17669, 0.17644, 0.1773, 0.17687, 0.17734, 0.17678]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [4e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 2e-05, 3e-05, 4e-05, 3e-05, 3e-05, 3e-05, 2e-05, 2e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05, 3e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [5e-05, 5e-05, 4e-05, 4e-05, 4e-05, 3e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 4e-05, 6e-05, 3e-05, 4e-05, 4e-05, 4e-05, 4e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.58321, 0.00365, 0.00367, 0.00381, 0.00361, 0.00362, 0.00361, 0.00361, 0.00361, 0.00362, 0.0036, 0.00362, 0.00363, 0.00361, 0.00362, 0.00362, 0.00366, 0.00366, 0.00366, 0.00362]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00128, 0.00104, 0.0009, 0.001, 0.00093, 0.0009, 0.00099, 0.00091, 0.00089, 0.00095, 0.00099, 0.00091, 0.00095, 0.00097, 0.00096, 0.00097, 0.00095, 0.00093, 0.00091, 0.00099]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.63878, 0.00531, 0.00498, 0.0055, 0.00476, 0.00472, 0.00508, 0.00477, 0.00474, 0.00476, 0.00488, 0.00414, 0.00418, 0.00419, 0.00476, 0.00458, 0.00422, 0.00478, 0.00475, 0.00476]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.03577, 0.02714, 0.02668, 0.02764, 0.0269, 0.02684, 0.02714, 0.02679, 0.02694, 0.02664, 0.02712, 0.02686, 0.02672, 0.02711, 0.02707, 0.02682, 0.02668, 0.02697, 0.02671, 0.02705]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.01745, 0.00284, 0.00279, 0.00296, 0.0028, 0.0028, 0.00281, 0.00284, 0.0028, 0.00279, 0.00282, 0.00281, 0.0028, 0.0028, 0.00281, 0.00283, 0.00281, 0.0028, 0.00278, 0.00282]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00437, 0.00308, 0.00301, 0.00318, 0.00303, 0.00302, 0.00304, 0.00303, 0.00312, 0.003, 0.00305, 0.00302, 0.00304, 0.00303, 0.00305, 0.00304, 0.00303, 0.00302, 0.00302, 0.00306]}, "optimizer-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.69859, 0.04007, 0.03899, 0.04112, 0.03904, 0.03889, 0.03968, 0.03901, 0.03916, 0.03877, 0.03957, 0.03839, 0.03832, 0.03874, 0.03928, 0.03886, 0.03831, 0.03913, 0.03887, 0.03931]}, "learning-rate": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "batch-size": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.39855, 9.41105, 8.88302, 8.56266, 8.28771, 8.10231, 7.83818, 7.53405, 7.39422, 7.28751, 7.36793, 7.22187, 7.10601, 7.05271, 6.91418, 6.96486, 6.973, 7.03533, 6.70377, 6.97036]}, "lm loss vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.39855, 9.41105, 8.88302, 8.56266, 8.28771, 8.10231, 7.83818, 7.53405, 7.39422, 7.28751, 7.36793, 7.22187, 7.10601, 7.05271, 6.91418, 6.96486, 6.973, 7.03533, 6.70377, 6.97036]}, "loss-scale": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [22.34142, 2.20568, 2.60115, 2.08118, 1.91833, 1.69112, 1.62099, 1.56865, 1.46236, 1.32506, 1.0147, 0.9197, 0.96922, 0.92739, 1.02635, 0.93686, 0.8341, 1.06816, 1.06549, 1.00001]}, "grad-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [22.34142, 2.20568, 2.60115, 2.08118, 1.91833, 1.69112, 1.62099, 1.56865, 1.46236, 1.32506, 1.0147, 0.9197, 0.96922, 0.92739, 1.02635, 0.93686, 0.8341, 1.06816, 1.06549, 1.00001]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43312.0, 40948.0, 43970.0, 41602.0, 44746.0, 43922.0, 41250.0, 42504.0, 44676.0, 43887.0, 41135.0, 43266.0, 39677.0, 45400.0, 43322.0, 43888.0, 45339.0, 45685.0, 46189.0, 44648.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43312.0, 40948.0, 43970.0, 41602.0, 44746.0, 43922.0, 41250.0, 42504.0, 44676.0, 43887.0, 41135.0, 43266.0, 39677.0, 45400.0, 43322.0, 43888.0, 45339.0, 45685.0, 46189.0, 44648.0]}, "params-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80814, 283.83228, 283.87, 283.91107, 283.95694, 284.00665, 284.05945, 284.11234, 284.1626, 284.21048, 284.26324, 284.31342, 284.35516, 284.39047, 284.41962, 284.44382, 284.46329, 284.47849, 284.49078, 284.50015]}, "params-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80814, 283.83228, 283.87, 283.91107, 283.95694, 284.00665, 284.05945, 284.11234, 284.1626, 284.21048, 284.26324, 284.31342, 284.35516, 284.39047, 284.41962, 284.44382, 284.46329, 284.47849, 284.49078, 284.50015]}, "iteration-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [9.31458, 0.68504, 0.40618, 0.41526, 0.40511, 0.40469, 0.4134, 0.40519, 0.4059, 0.40491, 0.40713, 0.40544, 0.40546, 0.40622, 0.406, 0.40584, 0.40459, 0.40637, 0.40544, 0.41191]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.91036]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [6.91036]}, "lm loss validation ppl": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [1002.60657]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 1, "step_interval": 5, "values": [1002.60657]}} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.39855, "5": 9.39695, "10": 9.03364, "15": 8.67287, "20": 8.28233, "25": 8.00333, "30": 7.88912, "35": 7.67184, "40": 7.5092, "45": 7.35233, "50": 7.18228, "55": 7.15579, "60": 7.14159, "65": 6.99966, "70": 7.05537, "75": 7.05859, "80": 6.94158, "85": 6.84586, "90": 7.24025, "95": 6.84356, "100": 6.9685}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43312.0, "5": 45378.0, "10": 45380.0, "15": 43907.0, "20": 44780.0, "25": 42434.0, "30": 43988.0, "35": 43272.0, "40": 43231.0, "45": 43287.0, "50": 43355.0, "55": 43858.0, "60": 41265.0, "65": 44681.0, "70": 45527.0, "75": 44663.0, "80": 41032.0, "85": 43960.0, "90": 44713.0, "95": 44070.0, "100": 42463.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2131835904.0, "5": 2131835904.0, "10": 2131835904.0, "15": 2131835904.0, "20": 2131835904.0, "25": 2131835904.0, "30": 2131835904.0, "35": 2131835904.0, "40": 2131835904.0, "45": 2131835904.0, "50": 2131835904.0, "55": 2131835904.0, "60": 2131835904.0, "65": 2131835904.0, "70": 2131835904.0, "75": 2131835904.0, "80": 2131835904.0, "85": 2131835904.0, "90": 2131835904.0, "95": 2131835904.0, "100": 2131835904.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2382152192.0, "5": 3312016896.0, "10": 3312016896.0, "15": 3312016896.0, "20": 3312016896.0, "25": 3312016896.0, "30": 3312016896.0, "35": 3312016896.0, "40": 3312016896.0, "45": 3312016896.0, "50": 3312016896.0, "55": 3312016896.0, "60": 3312016896.0, "65": 3312016896.0, "70": 3312016896.0, "75": 3312016896.0, "80": 3312016896.0, "85": 3312016896.0, "90": 3312016896.0, "95": 3312016896.0, "100": 3312016896.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4.98924, "5": 0.44597, "10": 0.4413, "15": 0.43981, "20": 0.44416, "25": 0.43901, "30": 0.44945, "35": 0.46365, "40": 0.44603, "45": 0.44055, "50": 0.44875, "55": 0.43358, "60": 0.43248, "65": 0.43199, "70": 0.43266, "75": 0.43124, "80": 0.43242, "85": 0.43226, "90": 0.45266, "95": 0.43088, "100": 0.43162}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_lts.json index 9eeb961..beb6a09 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_lts.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/golden_values_lts.json @@ -1 +1 @@ -{"forward-backward-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.81404, 0.34462, 0.3516, 0.34439, 0.34393, 0.34401, 0.34441, 0.34482, 0.34542, 0.34424, 0.34662, 0.34945, 0.34949, 0.35118, 0.34866, 0.35191, 0.36263, 0.34951, 0.34899, 0.34768]}, "forward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [6.31355, 0.16455, 0.16846, 0.16401, 0.16385, 0.16431, 0.16442, 0.16553, 0.16499, 0.16496, 0.16485, 0.16563, 0.16533, 0.16845, 0.16921, 0.16981, 0.1806, 0.16911, 0.16754, 0.16714]}, "backward-compute-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [2.99825, 0.17436, 0.17778, 0.1744, 0.17441, 0.17407, 0.17356, 0.17524, 0.17452, 0.175, 0.17682, 0.17918, 0.17946, 0.17646, 0.1748, 0.17691, 0.17882, 0.17598, 0.17491, 0.17482]}, "layernorm-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [4e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05, 3e-05, 2e-05, 2e-05, 2e-05, 2e-05]}, "embedding-grads-all-reduce-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [6e-05, 3e-05, 3e-05, 3e-05, 3e-05, 4e-05, 3e-05, 4e-05, 3e-05, 3e-05, 4e-05, 3e-05, 3e-05, 3e-05, 4e-05, 4e-05, 4e-05, 4e-05, 3e-05, 4e-05]}, "all-grads-sync-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.32584, 0.00364, 0.00361, 0.00362, 0.00361, 0.00362, 0.00361, 0.00378, 0.00364, 0.0036, 0.00362, 0.00359, 0.00361, 0.00363, 0.00361, 0.0037, 0.0037, 0.0036, 0.00362, 0.0036]}, "optimizer-copy-to-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00127, 0.00097, 0.00102, 0.00098, 0.00096, 0.00097, 0.00096, 0.001, 0.00097, 0.00101, 0.00097, 0.00099, 0.00091, 0.00096, 0.00097, 0.001, 0.00099, 0.00097, 0.00096, 0.00098]}, "optimizer-clip-main-grad-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.82922, 0.00468, 0.00493, 0.00495, 0.00501, 0.00506, 0.00519, 0.00518, 0.00505, 0.00512, 0.00509, 0.00462, 0.00457, 0.0046, 0.00508, 0.00493, 0.00442, 0.00498, 0.00507, 0.00494]}, "optimizer-count-zeros-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.03499, 0.02591, 0.02578, 0.0258, 0.02614, 0.026, 0.02589, 0.02598, 0.026, 0.02573, 0.02873, 0.02584, 0.02574, 0.02595, 0.02589, 0.02585, 0.02573, 0.02574, 0.02577, 0.02573]}, "optimizer-inner-step-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.01559, 0.00285, 0.00288, 0.00284, 0.00283, 0.00286, 0.00287, 0.00298, 0.00288, 0.0041, 0.00302, 0.00287, 0.00288, 0.00286, 0.00287, 0.00293, 0.00287, 0.00287, 0.00285, 0.00287]}, "optimizer-copy-main-to-model-params-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.00316, 0.00308, 0.00312, 0.0031, 0.00346, 0.0031, 0.00311, 0.0031, 0.00312, 0.00459, 0.00309, 0.00308, 0.0031, 0.00311, 0.0031, 0.00312, 0.00307, 0.00309, 0.00308, 0.00308]}, "optimizer-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.88542, 0.03816, 0.03835, 0.03835, 0.03902, 0.03861, 0.03864, 0.03888, 0.03865, 0.04122, 0.04158, 0.03801, 0.03781, 0.0381, 0.03851, 0.0385, 0.03778, 0.03827, 0.03833, 0.03823]}, "learning-rate": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "learning-rate vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [0.0001, 0.0001, 9e-05, 9e-05, 8e-05, 8e-05, 7e-05, 7e-05, 6e-05, 6e-05, 5e-05, 5e-05, 5e-05, 4e-05, 4e-05, 3e-05, 3e-05, 2e-05, 2e-05, 1e-05]}, "batch-size": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "batch-size vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0]}, "lm loss": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.39855, 9.41112, 8.88304, 8.56269, 8.28765, 8.10224, 7.83813, 7.53409, 7.39411, 7.28757, 7.3679, 7.22194, 7.10575, 7.0526, 6.91422, 6.96483, 6.97306, 7.03511, 6.70374, 6.97038]}, "lm loss vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [10.39855, 9.41112, 8.88304, 8.56269, 8.28765, 8.10224, 7.83813, 7.53409, 7.39411, 7.28757, 7.3679, 7.22194, 7.10575, 7.0526, 6.91422, 6.96483, 6.97306, 7.03511, 6.70374, 6.97038]}, "loss-scale": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "loss-scale vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]}, "grad-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [22.34142, 2.20571, 2.60016, 2.0812, 1.91834, 1.69111, 1.62094, 1.56876, 1.46252, 1.32493, 1.01436, 0.91945, 0.9683, 0.92765, 1.02683, 0.93685, 0.8336, 1.06608, 1.06564, 1.00043]}, "grad-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [22.34142, 2.20571, 2.60016, 2.0812, 1.91834, 1.69111, 1.62094, 1.56876, 1.46252, 1.32493, 1.01436, 0.91945, 0.9683, 0.92765, 1.02683, 0.93685, 0.8336, 1.06608, 1.06564, 1.00043]}, "num-zeros": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43312.0, 40958.0, 43972.0, 41597.0, 44750.0, 43923.0, 41262.0, 42494.0, 44656.0, 43889.0, 41161.0, 43247.0, 39676.0, 45397.0, 43316.0, 43882.0, 45349.0, 45684.0, 46190.0, 44647.0]}, "num-zeros vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [43312.0, 40958.0, 43972.0, 41597.0, 44750.0, 43923.0, 41262.0, 42494.0, 44656.0, 43889.0, 41161.0, 43247.0, 39676.0, 45397.0, 43316.0, 43882.0, 45349.0, 45684.0, 46190.0, 44647.0]}, "params-norm": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80814, 283.83228, 283.87, 283.91107, 283.95691, 284.00662, 284.05942, 284.1123, 284.1626, 284.21048, 284.26328, 284.31339, 284.35516, 284.39047, 284.41965, 284.44385, 284.46332, 284.47849, 284.49078, 284.50018]}, "params-norm vs samples": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [283.80814, 283.83228, 283.87, 283.91107, 283.95691, 284.00662, 284.05942, 284.1123, 284.1626, 284.21048, 284.26328, 284.31339, 284.35516, 284.39047, 284.41965, 284.44385, 284.46332, 284.47849, 284.49078, 284.50018]}, "iteration-time": {"start_step": 0, "end_step": 100, "step_interval": 5, "values": [11.73555, 0.40514, 0.41329, 0.40506, 0.40504, 0.40534, 0.4059, 0.40634, 0.40634, 0.40933, 0.41129, 0.40992, 0.4098, 0.41183, 0.40987, 0.41385, 0.42316, 0.41023, 0.40995, 0.40824]}, "lm loss validation": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.9103]}, "lm loss validation vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [6.9103]}, "lm loss validation ppl": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [1002.54486]}, "lm loss validation ppl vs samples": {"start_step": 0, "end_step": 2, "step_interval": 5, "values": [1002.54486]}} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.39855, "5": 9.39702, "10": 9.03361, "15": 8.67281, "20": 8.28238, "25": 8.00338, "30": 7.88907, "35": 7.67185, "40": 7.50913, "45": 7.35229, "50": 7.18219, "55": 7.15572, "60": 7.14146, "65": 6.99973, "70": 7.05527, "75": 7.05849, "80": 6.94151, "85": 6.84587, "90": 7.24028, "95": 6.84322, "100": 6.96848}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43312.0, "5": 45375.0, "10": 45362.0, "15": 43904.0, "20": 44779.0, "25": 42437.0, "30": 44003.0, "35": 43282.0, "40": 43249.0, "45": 43267.0, "50": 43350.0, "55": 43864.0, "60": 41279.0, "65": 44688.0, "70": 45538.0, "75": 44675.0, "80": 41033.0, "85": 43959.0, "90": 44720.0, "95": 44060.0, "100": 42465.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2131048960.0, "5": 2131048960.0, "10": 2131048960.0, "15": 2131048960.0, "20": 2131048960.0, "25": 2131048960.0, "30": 2131048960.0, "35": 2131048960.0, "40": 2131048960.0, "45": 2131048960.0, "50": 2131048960.0, "55": 2131048960.0, "60": 2131048960.0, "65": 2131048960.0, "70": 2131048960.0, "75": 2131048960.0, "80": 2131048960.0, "85": 2131048960.0, "90": 2131048960.0, "95": 2131048960.0, "100": 2131048960.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2368255488.0, "5": 3298251264.0, "10": 3298251264.0, "15": 3298251264.0, "20": 3298251264.0, "25": 3298251264.0, "30": 3298251264.0, "35": 3298251264.0, "40": 3298251264.0, "45": 3298251264.0, "50": 3298251264.0, "55": 3298251264.0, "60": 3298251264.0, "65": 3298251264.0, "70": 3298251264.0, "75": 3298251264.0, "80": 3298251264.0, "85": 3298251264.0, "90": 3298251264.0, "95": 3298251264.0, "100": 3298251264.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.62491, "5": 0.39861, "10": 0.40396, "15": 0.3966, "20": 0.39223, "25": 0.39279, "30": 0.39613, "35": 0.39607, "40": 0.38747, "45": 0.38845, "50": 0.39379, "55": 0.38604, "60": 0.39118, "65": 0.39009, "70": 0.39017, "75": 0.38798, "80": 0.39294, "85": 0.39198, "90": 0.38862, "95": 0.38931, "100": 0.39383}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/model_config.yaml index 289e213..4df31e3 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1/model_config.yaml @@ -34,8 +34,8 @@ MODEL_ARGS: --tokenizer-type: BertWordPieceCase --calculate-per-token-loss: true --split: 99982,9,9 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --tensorboard-dir: ${TENSORBOARD_PATH} --log-params-norm: true --log-num-zeros-in-grad: true @@ -52,4 +52,5 @@ MODEL_ARGS: --attention-softmax-in-fp32: true --ckpt-format: torch --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_dev.json index cac5161..fcb2532 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_dev.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_dev.json @@ -1,763 +1 @@ -{ - "forward-backward-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 9.31314, - 0.40373, - 0.40036, - 0.40377, - 0.40009, - 0.40024, - 0.40008, - 0.40025, - 0.40037, - 0.40077, - 0.39995, - 0.39931, - 0.39853, - 0.40105, - 0.40045, - 0.40088, - 0.39933, - 0.39867, - 0.39862, - 0.40146 - ] - }, - "forward-compute-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 5.20489, - 0.17867, - 0.17875, - 0.18291, - 0.18015, - 0.18089, - 0.18006, - 0.1809, - 0.18013, - 0.18084, - 0.18042, - 0.18048, - 0.17867, - 0.18032, - 0.18036, - 0.17967, - 0.17941, - 0.1796, - 0.17815, - 0.18228 - ] - }, - "backward-compute-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 2.81105, - 0.21748, - 0.21374, - 0.21269, - 0.21168, - 0.21226, - 0.2121, - 0.21196, - 0.211, - 0.21203, - 0.21167, - 0.2108, - 0.21104, - 0.21136, - 0.21186, - 0.21203, - 0.21083, - 0.21074, - 0.21117, - 0.21195 - ] - }, - "layernorm-grads-all-reduce-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.00512, - 0.00431, - 0.00431, - 0.00429, - 0.00441, - 0.00434, - 0.00441, - 0.00436, - 0.00493, - 0.00433, - 0.00438, - 0.00473, - 0.00441, - 0.00528, - 0.00439, - 0.0044, - 0.00435, - 0.00437, - 0.00441, - 0.0045 - ] - }, - "embedding-grads-all-reduce-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 4e-05, - 4e-05, - 4e-05, - 4e-05, - 4e-05, - 4e-05, - 5e-05, - 4e-05, - 4e-05, - 4e-05, - 4e-05, - 5e-05, - 4e-05, - 4e-05, - 4e-05, - 4e-05, - 4e-05, - 4e-05, - 4e-05, - 4e-05 - ] - }, - "all-grads-sync-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 1.05666, - 0.00366, - 0.00367, - 0.00368, - 0.00368, - 0.00368, - 0.00366, - 0.00366, - 0.00363, - 0.00367, - 0.00366, - 0.00368, - 0.00367, - 0.00368, - 0.00368, - 0.00369, - 0.00367, - 0.0037, - 0.00368, - 0.00368 - ] - }, - "optimizer-copy-to-main-grad-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.0011, - 0.00069, - 0.00071, - 0.00073, - 0.00072, - 0.00072, - 0.00077, - 0.00071, - 0.00075, - 0.00074, - 0.00076, - 0.00075, - 0.00075, - 0.00089, - 0.00076, - 0.00076, - 0.00075, - 0.00076, - 0.00077, - 0.00076 - ] - }, - "optimizer-clip-main-grad-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.70283, - 0.00449, - 0.00444, - 0.00452, - 0.00448, - 0.00448, - 0.00443, - 0.00452, - 0.00448, - 0.00445, - 0.00453, - 0.00385, - 0.00391, - 0.00488, - 0.00448, - 0.00393, - 0.00454, - 0.00395, - 0.0045, - 0.00395 - ] - }, - "optimizer-count-zeros-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.03309, - 0.02705, - 0.02695, - 0.02681, - 0.02743, - 0.0274, - 0.02716, - 0.02692, - 0.02696, - 0.02694, - 0.02683, - 0.02723, - 0.02741, - 0.02693, - 0.02688, - 0.02703, - 0.02721, - 0.02743, - 0.02725, - 0.02672 - ] - }, - "optimizer-inner-step-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.01276, - 0.00279, - 0.00278, - 0.00279, - 0.00281, - 0.00283, - 0.0028, - 0.00278, - 0.00278, - 0.00277, - 0.00277, - 0.00282, - 0.00282, - 0.00286, - 0.00283, - 0.00278, - 0.00281, - 0.0028, - 0.00283, - 0.00281 - ] - }, - "optimizer-copy-main-to-model-params-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.00299, - 0.00342, - 0.00298, - 0.00298, - 0.00301, - 0.00299, - 0.00321, - 0.00299, - 0.00297, - 0.00296, - 0.00298, - 0.00298, - 0.00309, - 0.00309, - 0.00298, - 0.00299, - 0.00299, - 0.00298, - 0.00304, - 0.00303 - ] - }, - "optimizer-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.75369, - 0.03908, - 0.03853, - 0.03848, - 0.03909, - 0.03905, - 0.03905, - 0.03857, - 0.03857, - 0.0385, - 0.03853, - 0.03832, - 0.03863, - 0.0393, - 0.03858, - 0.03814, - 0.03897, - 0.03856, - 0.03903, - 0.03795 - ] - }, - "learning-rate": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.0001, - 0.0001, - 9e-05, - 9e-05, - 8e-05, - 8e-05, - 7e-05, - 7e-05, - 6e-05, - 6e-05, - 5e-05, - 5e-05, - 5e-05, - 4e-05, - 4e-05, - 3e-05, - 3e-05, - 2e-05, - 2e-05, - 1e-05 - ] - }, - "learning-rate vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.0001, - 0.0001, - 9e-05, - 9e-05, - 8e-05, - 8e-05, - 7e-05, - 7e-05, - 6e-05, - 6e-05, - 5e-05, - 5e-05, - 5e-05, - 4e-05, - 4e-05, - 3e-05, - 3e-05, - 2e-05, - 2e-05, - 1e-05 - ] - }, - "batch-size": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0 - ] - }, - "batch-size vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0 - ] - }, - "lm loss": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.39767, - 9.41317, - 8.87813, - 8.5684, - 8.2951, - 8.11103, - 7.84414, - 7.5425, - 7.39999, - 7.29586, - 7.3749, - 7.23104, - 7.11682, - 7.06328, - 6.92509, - 6.97755, - 6.98393, - 7.04582, - 6.71802, - 6.98051 - ] - }, - "lm loss vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.39767, - 9.41317, - 8.87813, - 8.5684, - 8.2951, - 8.11103, - 7.84414, - 7.5425, - 7.39999, - 7.29586, - 7.3749, - 7.23104, - 7.11682, - 7.06328, - 6.92509, - 6.97755, - 6.98393, - 7.04582, - 6.71802, - 6.98051 - ] - }, - "loss-scale": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0 - ] - }, - "loss-scale vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0 - ] - }, - "grad-norm": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 22.49022, - 2.20544, - 2.51715, - 2.08127, - 1.91884, - 1.69272, - 1.62465, - 1.57572, - 1.4803, - 1.31751, - 1.06666, - 0.8993, - 0.90904, - 1.01869, - 1.52232, - 0.87585, - 1.08829, - 0.93451, - 1.30493, - 0.90059 - ] - }, - "grad-norm vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 22.49022, - 2.20544, - 2.51715, - 2.08127, - 1.91884, - 1.69272, - 1.62465, - 1.57572, - 1.4803, - 1.31751, - 1.06666, - 0.8993, - 0.90904, - 1.01869, - 1.52232, - 0.87585, - 1.08829, - 0.93451, - 1.30493, - 0.90059 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 43305.0, - 40966.0, - 43940.0, - 41620.0, - 44783.0, - 43929.0, - 41225.0, - 42517.0, - 44642.0, - 43905.0, - 41141.0, - 43266.0, - 39698.0, - 45369.0, - 43290.0, - 43888.0, - 45355.0, - 45686.0, - 46159.0, - 44703.0 - ] - }, - "num-zeros vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 43305.0, - 40966.0, - 43940.0, - 41620.0, - 44783.0, - 43929.0, - 41225.0, - 42517.0, - 44642.0, - 43905.0, - 41141.0, - 43266.0, - 39698.0, - 45369.0, - 43290.0, - 43888.0, - 45355.0, - 45686.0, - 46159.0, - 44703.0 - ] - }, - "params-norm": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 283.80814, - 283.8324, - 283.87021, - 283.9111, - 283.95691, - 284.00668, - 284.05994, - 284.11295, - 284.16342, - 284.21112, - 284.26437, - 284.31451, - 284.35611, - 284.39172, - 284.42053, - 284.44376, - 284.46249, - 284.47748, - 284.48962, - 284.49857 - ] - }, - "params-norm vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 283.80814, - 283.8324, - 283.87021, - 283.9111, - 283.95691, - 284.00668, - 284.05994, - 284.11295, - 284.16342, - 284.21112, - 284.26437, - 284.31451, - 284.35611, - 284.39172, - 284.42053, - 284.44376, - 284.46249, - 284.47748, - 284.48962, - 284.49857 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.11234, - 0.4649, - 0.46098, - 0.46501, - 0.46182, - 0.46156, - 0.46171, - 0.46107, - 0.4613, - 0.46164, - 0.46086, - 0.46018, - 0.45981, - 0.4639, - 0.46112, - 0.46197, - 0.46097, - 0.45954, - 0.46005, - 0.4621 - ] - }, - "lm loss validation": { - "start_step": 0, - "end_step": 2, - "step_interval": 5, - "values": [ - 6.91467 - ] - }, - "lm loss validation vs samples": { - "start_step": 0, - "end_step": 1, - "step_interval": 5, - "values": [ - 6.91467 - ] - }, - "lm loss validation ppl": { - "start_step": 0, - "end_step": 1, - "step_interval": 5, - "values": [ - 1006.93915 - ] - }, - "lm loss validation ppl vs samples": { - "start_step": 0, - "end_step": 1, - "step_interval": 5, - "values": [ - 1006.93915 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.39767, "5": 9.39811, "10": 9.03957, "15": 8.67613, "20": 8.29, "25": 8.00994, "30": 7.89589, "35": 7.67934, "40": 7.51671, "45": 7.36013, "50": 7.18861, "55": 7.16636, "60": 7.15229, "65": 7.01105, "70": 7.06838, "75": 7.06893, "80": 6.95242, "85": 6.85857, "90": 7.25274, "95": 6.85205, "100": 6.98569}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43305.0, "5": 45379.0, "10": 45351.0, "15": 43903.0, "20": 44776.0, "25": 42469.0, "30": 43982.0, "35": 43280.0, "40": 43211.0, "45": 43310.0, "50": 43378.0, "55": 43821.0, "60": 41240.0, "65": 44642.0, "70": 45535.0, "75": 44656.0, "80": 41052.0, "85": 43990.0, "90": 44708.0, "95": 44056.0, "100": 42460.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2134326272.0, "5": 2134326272.0, "10": 2134326272.0, "15": 2134326272.0, "20": 2134326272.0, "25": 2134326272.0, "30": 2134326272.0, "35": 2134326272.0, "40": 2134326272.0, "45": 2134326272.0, "50": 2134326272.0, "55": 2134326272.0, "60": 2134326272.0, "65": 2134326272.0, "70": 2134326272.0, "75": 2134326272.0, "80": 2134326272.0, "85": 2134326272.0, "90": 2134326272.0, "95": 2134326272.0, "100": 2134326272.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2241165824.0, "5": 3174700544.0, "10": 3174700544.0, "15": 3174700544.0, "20": 3174700544.0, "25": 3174700544.0, "30": 3174700544.0, "35": 3174700544.0, "40": 3174700544.0, "45": 3174700544.0, "50": 3174700544.0, "55": 3174700544.0, "60": 3174700544.0, "65": 3174700544.0, "70": 3174700544.0, "75": 3174700544.0, "80": 3174700544.0, "85": 3174700544.0, "90": 3174700544.0, "95": 3174700544.0, "100": 3174700544.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4.96463, "5": 0.52303, "10": 0.5011, "15": 0.5156, "20": 0.50037, "25": 0.49862, "30": 0.50087, "35": 0.50985, "40": 0.50484, "45": 0.5061, "50": 0.4994, "55": 0.49853, "60": 0.49964, "65": 0.5, "70": 0.49799, "75": 0.49922, "80": 0.50111, "85": 0.49851, "90": 0.5062, "95": 0.50108, "100": 0.50192}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_lts.json index 27e890f..dc063f4 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_lts.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/golden_values_lts.json @@ -1,763 +1 @@ -{ - "forward-backward-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.9967, - 0.401, - 0.40147, - 0.3912, - 0.39873, - 0.39107, - 0.39949, - 0.40485, - 0.39712, - 0.39832, - 0.39764, - 0.40869, - 0.39232, - 0.39721, - 0.39904, - 0.40227, - 0.39138, - 0.39833, - 0.40047, - 0.39544 - ] - }, - "forward-compute-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 6.48719, - 0.1808, - 0.18642, - 0.17754, - 0.18021, - 0.17845, - 0.17971, - 0.18366, - 0.18445, - 0.17837, - 0.18213, - 0.1862, - 0.17839, - 0.18306, - 0.17791, - 0.18267, - 0.17785, - 0.17902, - 0.1859, - 0.18165 - ] - }, - "backward-compute-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 2.90603, - 0.21569, - 0.20801, - 0.20679, - 0.21361, - 0.20617, - 0.21449, - 0.21342, - 0.20709, - 0.21379, - 0.20706, - 0.21465, - 0.20741, - 0.2069, - 0.2142, - 0.21282, - 0.20722, - 0.21411, - 0.20809, - 0.20825 - ] - }, - "layernorm-grads-all-reduce-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.00474, - 0.00397, - 0.00441, - 0.00441, - 0.0045, - 0.00432, - 0.00444, - 0.00454, - 0.00446, - 0.00429, - 0.00445, - 0.00452, - 0.00445, - 0.0045, - 0.00452, - 0.00501, - 0.00425, - 0.00435, - 0.00446, - 0.00455 - ] - }, - "embedding-grads-all-reduce-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 6e-05, - 4e-05, - 4e-05, - 3e-05, - 3e-05, - 4e-05, - 3e-05, - 3e-05, - 3e-05, - 4e-05, - 4e-05, - 3e-05, - 3e-05, - 3e-05, - 3e-05, - 4e-05, - 4e-05, - 3e-05, - 3e-05, - 3e-05 - ] - }, - "all-grads-sync-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 1.3196, - 0.00359, - 0.0036, - 0.00358, - 0.00357, - 0.00358, - 0.0036, - 0.0036, - 0.00358, - 0.00361, - 0.00359, - 0.00357, - 0.00357, - 0.00359, - 0.0036, - 0.00374, - 0.00358, - 0.00358, - 0.00358, - 0.00357 - ] - }, - "optimizer-copy-to-main-grad-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.00118, - 0.0006, - 0.0006, - 0.00059, - 0.00059, - 0.00059, - 0.00063, - 0.00059, - 0.00058, - 0.00064, - 0.00061, - 0.00059, - 0.00059, - 0.00058, - 0.0006, - 0.00065, - 0.00059, - 0.00058, - 0.00059, - 0.00058 - ] - }, - "optimizer-clip-main-grad-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.7916, - 0.00452, - 0.00459, - 0.00449, - 0.00456, - 0.00447, - 0.00456, - 0.00447, - 0.00454, - 0.00455, - 0.00455, - 0.00396, - 0.00391, - 0.00458, - 0.00535, - 0.00401, - 0.00486, - 0.00387, - 0.00445, - 0.00389 - ] - }, - "optimizer-count-zeros-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.03344, - 0.02605, - 0.02598, - 0.02583, - 0.02597, - 0.02572, - 0.02605, - 0.02578, - 0.02584, - 0.0262, - 0.03104, - 0.02591, - 0.026, - 0.02602, - 0.02589, - 0.02577, - 0.02595, - 0.02611, - 0.02591, - 0.02596 - ] - }, - "optimizer-inner-step-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.01284, - 0.00279, - 0.00282, - 0.00304, - 0.00277, - 0.00295, - 0.00282, - 0.0028, - 0.0028, - 0.0028, - 0.00322, - 0.00286, - 0.00278, - 0.00281, - 0.0028, - 0.00289, - 0.00281, - 0.0028, - 0.00283, - 0.00281 - ] - }, - "optimizer-copy-main-to-model-params-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.00383, - 0.00307, - 0.00307, - 0.00478, - 0.00306, - 0.00377, - 0.00308, - 0.00307, - 0.00306, - 0.00304, - 0.00394, - 0.00305, - 0.00306, - 0.00305, - 0.00307, - 0.00305, - 0.00394, - 0.00307, - 0.00307, - 0.00306 - ] - }, - "optimizer-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.84399, - 0.03764, - 0.03767, - 0.03939, - 0.03757, - 0.03834, - 0.03775, - 0.03732, - 0.03742, - 0.03785, - 0.04398, - 0.03697, - 0.03696, - 0.03764, - 0.03838, - 0.03699, - 0.03925, - 0.03705, - 0.03746, - 0.03691 - ] - }, - "learning-rate": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.0001, - 0.0001, - 9e-05, - 9e-05, - 8e-05, - 8e-05, - 7e-05, - 7e-05, - 6e-05, - 6e-05, - 5e-05, - 5e-05, - 5e-05, - 4e-05, - 4e-05, - 3e-05, - 3e-05, - 2e-05, - 2e-05, - 1e-05 - ] - }, - "learning-rate vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 0.0001, - 0.0001, - 9e-05, - 9e-05, - 8e-05, - 8e-05, - 7e-05, - 7e-05, - 6e-05, - 6e-05, - 5e-05, - 5e-05, - 5e-05, - 4e-05, - 4e-05, - 3e-05, - 3e-05, - 2e-05, - 2e-05, - 1e-05 - ] - }, - "batch-size": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0 - ] - }, - "batch-size vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0, - 32.0 - ] - }, - "lm loss": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.39767, - 9.41313, - 8.87826, - 8.56837, - 8.29503, - 8.11096, - 7.84414, - 7.54251, - 7.39997, - 7.29573, - 7.37498, - 7.23101, - 7.11673, - 7.06342, - 6.92492, - 6.97751, - 6.98396, - 7.04575, - 6.71801, - 6.98043 - ] - }, - "lm loss vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.39767, - 9.41313, - 8.87826, - 8.56837, - 8.29503, - 8.11096, - 7.84414, - 7.54251, - 7.39997, - 7.29573, - 7.37498, - 7.23101, - 7.11673, - 7.06342, - 6.92492, - 6.97751, - 6.98396, - 7.04575, - 6.71801, - 6.98043 - ] - }, - "loss-scale": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0 - ] - }, - "loss-scale vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0, - 1.0 - ] - }, - "grad-norm": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 22.49022, - 2.20552, - 2.51692, - 2.08126, - 1.91884, - 1.69274, - 1.62471, - 1.57573, - 1.48035, - 1.31762, - 1.06619, - 0.8992, - 0.90925, - 1.01884, - 1.52306, - 0.87798, - 1.08796, - 0.9338, - 1.30663, - 0.90086 - ] - }, - "grad-norm vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 22.49022, - 2.20552, - 2.51692, - 2.08126, - 1.91884, - 1.69274, - 1.62471, - 1.57573, - 1.48035, - 1.31762, - 1.06619, - 0.8992, - 0.90925, - 1.01884, - 1.52306, - 0.87798, - 1.08796, - 0.9338, - 1.30663, - 0.90086 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 43305.0, - 40957.0, - 43944.0, - 41613.0, - 44764.0, - 43920.0, - 41215.0, - 42515.0, - 44647.0, - 43902.0, - 41129.0, - 43274.0, - 39706.0, - 45365.0, - 43273.0, - 43897.0, - 45345.0, - 45686.0, - 46161.0, - 44705.0 - ] - }, - "num-zeros vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 43305.0, - 40957.0, - 43944.0, - 41613.0, - 44764.0, - 43920.0, - 41215.0, - 42515.0, - 44647.0, - 43902.0, - 41129.0, - 43274.0, - 39706.0, - 45365.0, - 43273.0, - 43897.0, - 45345.0, - 45686.0, - 46161.0, - 44705.0 - ] - }, - "params-norm": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 283.80814, - 283.83237, - 283.87021, - 283.9111, - 283.95691, - 284.00668, - 284.05994, - 284.11295, - 284.16345, - 284.21112, - 284.2644, - 284.31454, - 284.35611, - 284.39169, - 284.42053, - 284.44376, - 284.46249, - 284.47751, - 284.48962, - 284.49857 - ] - }, - "params-norm vs samples": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 283.80814, - 283.83237, - 283.87021, - 283.9111, - 283.95691, - 284.00668, - 284.05994, - 284.11295, - 284.16345, - 284.21112, - 284.2644, - 284.31454, - 284.35611, - 284.39169, - 284.42053, - 284.44376, - 284.46249, - 284.47751, - 284.48962, - 284.49857 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 11.88485, - 0.46024, - 0.46083, - 0.45067, - 0.45779, - 0.45103, - 0.45872, - 0.46374, - 0.45605, - 0.45774, - 0.46418, - 0.46713, - 0.45087, - 0.45645, - 0.45979, - 0.46102, - 0.45129, - 0.45737, - 0.45953, - 0.45489 - ] - }, - "lm loss validation": { - "start_step": 0, - "end_step": 2, - "step_interval": 5, - "values": [ - 6.91465 - ] - }, - "lm loss validation vs samples": { - "start_step": 0, - "end_step": 2, - "step_interval": 5, - "values": [ - 6.91465 - ] - }, - "lm loss validation ppl": { - "start_step": 0, - "end_step": 2, - "step_interval": 5, - "values": [ - 1006.91901 - ] - }, - "lm loss validation ppl vs samples": { - "start_step": 0, - "end_step": 2, - "step_interval": 5, - "values": [ - 1006.91901 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.39767, "5": 9.39801, "10": 9.03956, "15": 8.67616, "20": 8.28988, "25": 8.00984, "30": 7.89594, "35": 7.67928, "40": 7.51659, "45": 7.36013, "50": 7.1886, "55": 7.16636, "60": 7.1523, "65": 7.01105, "70": 7.06824, "75": 7.06868, "80": 6.95225, "85": 6.85845, "90": 7.25256, "95": 6.85207, "100": 6.98564}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43305.0, "5": 45377.0, "10": 45352.0, "15": 43895.0, "20": 44779.0, "25": 42465.0, "30": 43995.0, "35": 43263.0, "40": 43220.0, "45": 43295.0, "50": 43399.0, "55": 43839.0, "60": 41256.0, "65": 44643.0, "70": 45539.0, "75": 44659.0, "80": 41043.0, "85": 43981.0, "90": 44714.0, "95": 44056.0, "100": 42470.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2132097536.0, "5": 2132097536.0, "10": 2132097536.0, "15": 2132097536.0, "20": 2132097536.0, "25": 2132097536.0, "30": 2132097536.0, "35": 2132097536.0, "40": 2132097536.0, "45": 2132097536.0, "50": 2132097536.0, "55": 2132097536.0, "60": 2132097536.0, "65": 2132097536.0, "70": 2132097536.0, "75": 2132097536.0, "80": 2132097536.0, "85": 2132097536.0, "90": 2132097536.0, "95": 2132097536.0, "100": 2132097536.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2227006976.0, "5": 3157132288.0, "10": 3157132288.0, "15": 3157132288.0, "20": 3157132288.0, "25": 3157132288.0, "30": 3157132288.0, "35": 3157132288.0, "40": 3157132288.0, "45": 3157132288.0, "50": 3157132288.0, "55": 3157132288.0, "60": 3157132288.0, "65": 3157132288.0, "70": 3157132288.0, "75": 3157132288.0, "80": 3157132288.0, "85": 3157132288.0, "90": 3157132288.0, "95": 3157132288.0, "100": 3157132288.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.52803, "5": 0.45782, "10": 0.45042, "15": 0.44846, "20": 0.45655, "25": 0.45299, "30": 0.44341, "35": 0.44859, "40": 0.44669, "45": 0.45191, "50": 0.45143, "55": 0.45215, "60": 0.44733, "65": 0.4527, "70": 0.44713, "75": 0.43804, "80": 0.44656, "85": 0.44561, "90": 0.44732, "95": 0.44212, "100": 0.44466}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/model_config.yaml index 8cfc7e4..6a5a701 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel/model_config.yaml @@ -34,8 +34,8 @@ MODEL_ARGS: --tokenizer-type: BertWordPieceCase --calculate-per-token-loss: true --split: 99982,9,9 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --tensorboard-dir: ${TENSORBOARD_PATH} --log-params-norm: true --log-num-zeros-in-grad: true @@ -53,4 +53,5 @@ MODEL_ARGS: --attention-softmax-in-fp32: true --ckpt-format: torch --attention-backend: unfused + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_dev.json index 8150d55..a7746fd 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_dev.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_dev.json @@ -1,83 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.33709, - 9.42687, - 8.8635, - 8.56221, - 8.28399, - 8.10587, - 7.84887, - 7.53552, - 7.41074, - 7.29558, - 7.393, - 7.21933, - 7.10287, - 7.04869, - 6.90401, - 6.95994, - 6.9644, - 7.03536, - 6.70027, - 6.96648 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 43333.0, - 41002.0, - 44020.0, - 41734.0, - 44800.0, - 43940.0, - 41271.0, - 42543.0, - 44725.0, - 43906.0, - 41149.0, - 43283.0, - 39763.0, - 45410.0, - 43320.0, - 43922.0, - 45383.0, - 45713.0, - 46318.0, - 44723.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 9.40905, - 0.23547, - 0.23339, - 0.23504, - 0.23331, - 0.23198, - 0.23546, - 0.22987, - 0.2342, - 0.23143, - 0.49625, - 0.2285, - 0.22833, - 0.22775, - 0.23156, - 0.22944, - 0.23033, - 0.23074, - 0.23117, - 0.22948 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.33709, "5": 9.40642, "10": 9.0379, "15": 8.66992, "20": 8.29237, "25": 8.01431, "30": 7.88976, "35": 7.6675, "40": 7.51219, "45": 7.36991, "50": 7.1802, "55": 7.15485, "60": 7.14523, "65": 6.99168, "70": 7.05582, "75": 7.06391, "80": 6.94189, "85": 6.85025, "90": 7.23443, "95": 6.84062, "100": 6.9778}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43333.0, "5": 45405.0, "10": 45389.0, "15": 43950.0, "20": 44842.0, "25": 42711.0, "30": 44045.0, "35": 43296.0, "40": 43292.0, "45": 43297.0, "50": 43372.0, "55": 43931.0, "60": 41332.0, "65": 44727.0, "70": 45530.0, "75": 44687.0, "80": 41109.0, "85": 44005.0, "90": 44713.0, "95": 44106.0, "100": 42470.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4106481664.0, "5": 4106481664.0, "10": 4106481664.0, "15": 4106481664.0, "20": 4106481664.0, "25": 4106481664.0, "30": 4106481664.0, "35": 4106481664.0, "40": 4106481664.0, "45": 4106481664.0, "50": 4106481664.0, "55": 4106481664.0, "60": 4106481664.0, "65": 4106481664.0, "70": 4106481664.0, "75": 4106481664.0, "80": 4106481664.0, "85": 4106481664.0, "90": 4106481664.0, "95": 4106481664.0, "100": 4106481664.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4359213056.0, "5": 6137096192.0, "10": 6137096192.0, "15": 6137096192.0, "20": 6137096192.0, "25": 6137096192.0, "30": 6137096192.0, "35": 6137096192.0, "40": 6137096192.0, "45": 6137096192.0, "50": 6137096192.0, "55": 6137096192.0, "60": 6137096192.0, "65": 6137096192.0, "70": 6137096192.0, "75": 6137096192.0, "80": 6137096192.0, "85": 6137096192.0, "90": 6137096192.0, "95": 6137096192.0, "100": 6137096192.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 6.88682, "5": 0.2274, "10": 0.226, "15": 0.23097, "20": 0.22087, "25": 0.22518, "30": 0.72829, "35": 0.22423, "40": 0.21996, "45": 0.22667, "50": 0.21992, "55": 0.2215, "60": 0.21874, "65": 0.22089, "70": 0.21824, "75": 0.22488, "80": 0.2327, "85": 0.21827, "90": 0.23175, "95": 0.22662, "100": 0.22629}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_lts.json index bd1e723..f9869f3 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_lts.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/golden_values_lts.json @@ -1,83 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.33709, - 9.42687, - 8.8634, - 8.56213, - 8.28406, - 8.10594, - 7.84882, - 7.53542, - 7.41068, - 7.29571, - 7.39283, - 7.2191, - 7.10262, - 7.04837, - 6.90357, - 6.96014, - 6.96438, - 7.03513, - 6.70023, - 6.96639 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 43334.0, - 41023.0, - 44021.0, - 41733.0, - 44803.0, - 43935.0, - 41268.0, - 42516.0, - 44710.0, - 43908.0, - 41143.0, - 43285.0, - 39763.0, - 45410.0, - 43315.0, - 43919.0, - 45394.0, - 45708.0, - 46319.0, - 44709.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 14.36472, - 0.24447, - 0.24436, - 0.23998, - 0.23902, - 0.38149, - 0.25367, - 0.23963, - 0.23768, - 0.23812, - 0.24016, - 0.23918, - 0.239, - 0.23853, - 0.23868, - 0.23858, - 0.23757, - 0.2428, - 0.24091, - 0.2352 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.33709, "5": 9.4065, "10": 9.03793, "15": 8.66978, "20": 8.29225, "25": 8.0142, "30": 7.88977, "35": 7.66719, "40": 7.51221, "45": 7.36993, "50": 7.18004, "55": 7.15482, "60": 7.14496, "65": 6.99164, "70": 7.05575, "75": 7.06375, "80": 6.94188, "85": 6.85016, "90": 7.23422, "95": 6.84058, "100": 6.97772}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43334.0, "5": 45398.0, "10": 45372.0, "15": 43928.0, "20": 44824.0, "25": 42699.0, "30": 44046.0, "35": 43283.0, "40": 43286.0, "45": 43296.0, "50": 43367.0, "55": 43941.0, "60": 41347.0, "65": 44731.0, "70": 45537.0, "75": 44665.0, "80": 41109.0, "85": 43998.0, "90": 44716.0, "95": 44097.0, "100": 42471.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4106481664.0, "5": 4106481664.0, "10": 4106481664.0, "15": 4106481664.0, "20": 4106481664.0, "25": 4106481664.0, "30": 4106481664.0, "35": 4106481664.0, "40": 4106481664.0, "45": 4106481664.0, "50": 4106481664.0, "55": 4106481664.0, "60": 4106481664.0, "65": 4106481664.0, "70": 4106481664.0, "75": 4106481664.0, "80": 4106481664.0, "85": 4106481664.0, "90": 4106481664.0, "95": 4106481664.0, "100": 4106481664.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4359213056.0, "5": 6137096192.0, "10": 6137096192.0, "15": 6137096192.0, "20": 6137096192.0, "25": 6137096192.0, "30": 6137096192.0, "35": 6137096192.0, "40": 6137096192.0, "45": 6137096192.0, "50": 6137096192.0, "55": 6137096192.0, "60": 6137096192.0, "65": 6137096192.0, "70": 6137096192.0, "75": 6137096192.0, "80": 6137096192.0, "85": 6137096192.0, "90": 6137096192.0, "95": 6137096192.0, "100": 6137096192.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 13.54255, "5": 0.23581, "10": 0.23482, "15": 0.22655, "20": 0.23567, "25": 0.22702, "30": 0.22841, "35": 0.24983, "40": 0.22588, "45": 0.23063, "50": 0.22458, "55": 0.2276, "60": 0.22455, "65": 0.23121, "70": 0.22357, "75": 0.22668, "80": 0.22415, "85": 0.23064, "90": 0.22536, "95": 0.22355, "100": 0.22337}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/model_config.yaml index a05129f..268cd27 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1/model_config.yaml @@ -34,8 +34,8 @@ MODEL_ARGS: --tokenizer-type: BertWordPieceCase --calculate-per-token-loss: true --split: 99982,9,9 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --tensorboard-dir: ${TENSORBOARD_PATH} --log-params-norm: true --log-num-zeros-in-grad: true @@ -50,4 +50,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --deterministic-mode: true --ckpt-format: torch + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/golden_values_dev.json new file mode 100644 index 0000000..929b2cb --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/golden_values_dev.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.33709, "5": 9.40642, "10": 9.0379, "15": 8.66992, "20": 8.29237, "25": 8.01431, "30": 7.88976, "35": 7.6675, "40": 7.51219, "45": 7.36991, "50": 7.1802, "55": 7.15485, "60": 7.14523, "65": 6.99168, "70": 7.05582, "75": 7.06391, "80": 6.94189, "85": 6.85025, "90": 7.23443, "95": 6.84062, "100": 6.9778}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43333.0, "5": 45405.0, "10": 45389.0, "15": 43950.0, "20": 44842.0, "25": 42711.0, "30": 44045.0, "35": 43296.0, "40": 43292.0, "45": 43297.0, "50": 43372.0, "55": 43931.0, "60": 41332.0, "65": 44727.0, "70": 45530.0, "75": 44687.0, "80": 41109.0, "85": 44005.0, "90": 44713.0, "95": 44106.0, "100": 42470.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4106481664.0, "5": 4106481664.0, "10": 4106481664.0, "15": 4106481664.0, "20": 4106481664.0, "25": 4106481664.0, "30": 4106481664.0, "35": 4106481664.0, "40": 4106481664.0, "45": 4106481664.0, "50": 4106481664.0, "55": 4106481664.0, "60": 4106481664.0, "65": 4106481664.0, "70": 4106481664.0, "75": 4106481664.0, "80": 4106481664.0, "85": 4106481664.0, "90": 4106481664.0, "95": 4106481664.0, "100": 4106481664.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4359213056.0, "5": 6137096192.0, "10": 6137096192.0, "15": 6137096192.0, "20": 6137096192.0, "25": 6137096192.0, "30": 6137096192.0, "35": 6137096192.0, "40": 6137096192.0, "45": 6137096192.0, "50": 6137096192.0, "55": 6137096192.0, "60": 6137096192.0, "65": 6137096192.0, "70": 6137096192.0, "75": 6137096192.0, "80": 6137096192.0, "85": 6137096192.0, "90": 6137096192.0, "95": 6137096192.0, "100": 6137096192.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 6.16054, "5": 0.22579, "10": 0.21966, "15": 0.21784, "20": 0.21786, "25": 0.21942, "30": 0.21762, "35": 0.21855, "40": 0.21802, "45": 0.21777, "50": 0.21901, "55": 0.21589, "60": 0.21642, "65": 0.21503, "70": 0.21552, "75": 0.21711, "80": 0.21959, "85": 0.21847, "90": 0.23356, "95": 0.22411, "100": 0.21636}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/golden_values_lts.json new file mode 100644 index 0000000..8a024fc --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/golden_values_lts.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.33709, "5": 9.4065, "10": 9.03793, "15": 8.66978, "20": 8.29225, "25": 8.0142, "30": 7.88977, "35": 7.66719, "40": 7.51221, "45": 7.36993, "50": 7.18004, "55": 7.15482, "60": 7.14496, "65": 6.99164, "70": 7.05575, "75": 7.06375, "80": 6.94188, "85": 6.85016, "90": 7.23422, "95": 6.84058, "100": 6.97772}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43334.0, "5": 45398.0, "10": 45372.0, "15": 43928.0, "20": 44824.0, "25": 42699.0, "30": 44046.0, "35": 43283.0, "40": 43286.0, "45": 43296.0, "50": 43367.0, "55": 43941.0, "60": 41347.0, "65": 44731.0, "70": 45537.0, "75": 44665.0, "80": 41109.0, "85": 43998.0, "90": 44716.0, "95": 44097.0, "100": 42471.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4106481664.0, "5": 4106481664.0, "10": 4106481664.0, "15": 4106481664.0, "20": 4106481664.0, "25": 4106481664.0, "30": 4106481664.0, "35": 4106481664.0, "40": 4106481664.0, "45": 4106481664.0, "50": 4106481664.0, "55": 4106481664.0, "60": 4106481664.0, "65": 4106481664.0, "70": 4106481664.0, "75": 4106481664.0, "80": 4106481664.0, "85": 4106481664.0, "90": 4106481664.0, "95": 4106481664.0, "100": 4106481664.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4359213056.0, "5": 6137096192.0, "10": 6137096192.0, "15": 6137096192.0, "20": 6137096192.0, "25": 6137096192.0, "30": 6137096192.0, "35": 6137096192.0, "40": 6137096192.0, "45": 6137096192.0, "50": 6137096192.0, "55": 6137096192.0, "60": 6137096192.0, "65": 6137096192.0, "70": 6137096192.0, "75": 6137096192.0, "80": 6137096192.0, "85": 6137096192.0, "90": 6137096192.0, "95": 6137096192.0, "100": 6137096192.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 11.07103, "5": 0.23651, "10": 0.22781, "15": 0.23347, "20": 0.23241, "25": 0.22558, "30": 0.22608, "35": 0.22795, "40": 0.22801, "45": 0.22558, "50": 0.22524, "55": 0.22104, "60": 0.22236, "65": 0.22224, "70": 0.22178, "75": 0.22471, "80": 0.22325, "85": 0.22262, "90": 0.22818, "95": 0.22191, "100": 0.22233}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/model_config.yaml index 91c6e2e..8d87179 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch/model_config.yaml @@ -34,8 +34,8 @@ MODEL_ARGS: --tokenizer-type: BertWordPieceCase --calculate-per-token-loss: true --split: 99982,9,9 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --tensorboard-dir: ${TENSORBOARD_PATH} --log-params-norm: true --log-num-zeros-in-grad: true @@ -50,4 +50,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --deterministic-mode: true --ckpt-format: torch + --log-memory-to-tensorboard: true TEST_TYPE: ckpt-resume diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_dev.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_dev.json index 77be5e6..4a06a80 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_dev.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_dev.json @@ -1,83 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.39854, - 9.4111, - 8.88311, - 8.56273, - 8.2877, - 8.10231, - 7.83823, - 7.53415, - 7.39419, - 7.28768, - 7.36789, - 7.22197, - 7.10581, - 7.05271, - 6.91415, - 6.9649, - 6.97292, - 7.03514, - 6.70368, - 6.97028 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 43320.0, - 40947.0, - 43974.0, - 41600.0, - 44757.0, - 43928.0, - 41251.0, - 42505.0, - 44666.0, - 43890.0, - 41139.0, - 43267.0, - 39680.0, - 45388.0, - 43300.0, - 43886.0, - 45357.0, - 45697.0, - 46190.0, - 44658.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 11.77537, - 0.4173, - 0.41286, - 0.4207, - 0.40449, - 0.40246, - 0.40398, - 0.40397, - 0.83597, - 0.40504, - 0.40483, - 0.40662, - 0.40436, - 0.40355, - 0.40635, - 0.40423, - 0.40489, - 0.40503, - 0.40616, - 0.40556 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.39854, "5": 9.39704, "10": 9.0336, "15": 8.67288, "20": 8.28246, "25": 8.00333, "30": 7.88914, "35": 7.6719, "40": 7.50921, "45": 7.35238, "50": 7.18217, "55": 7.15553, "60": 7.14148, "65": 6.99994, "70": 7.05511, "75": 7.05861, "80": 6.94159, "85": 6.84592, "90": 7.24038, "95": 6.8433, "100": 6.96845}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43320.0, "5": 45384.0, "10": 45363.0, "15": 43909.0, "20": 44776.0, "25": 42442.0, "30": 43994.0, "35": 43262.0, "40": 43237.0, "45": 43284.0, "50": 43360.0, "55": 43870.0, "60": 41274.0, "65": 44689.0, "70": 45511.0, "75": 44668.0, "80": 41044.0, "85": 43969.0, "90": 44715.0, "95": 44065.0, "100": 42462.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2139699712.0, "5": 2139699712.0, "10": 2139699712.0, "15": 2139699712.0, "20": 2139699712.0, "25": 2139699712.0, "30": 2139699712.0, "35": 2139699712.0, "40": 2139699712.0, "45": 2139699712.0, "50": 2139699712.0, "55": 2139699712.0, "60": 2139699712.0, "65": 2139699712.0, "70": 2139699712.0, "75": 2139699712.0, "80": 2139699712.0, "85": 2139699712.0, "90": 2139699712.0, "95": 2139699712.0, "100": 2139699712.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2371335680.0, "5": 3304868864.0, "10": 3304870400.0, "15": 3304870400.0, "20": 3304870400.0, "25": 3304870400.0, "30": 3304870400.0, "35": 3304870400.0, "40": 3304870400.0, "45": 3304870400.0, "50": 3304870400.0, "55": 3304870400.0, "60": 3304870400.0, "65": 3304870400.0, "70": 3304870400.0, "75": 3304870400.0, "80": 3304870400.0, "85": 3304870400.0, "90": 3304870400.0, "95": 3304870400.0, "100": 3304870400.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 4.71073, "5": 0.39188, "10": 0.38193, "15": 0.38126, "20": 0.38342, "25": 0.38346, "30": 0.38239, "35": 0.38253, "40": 0.38346, "45": 0.38581, "50": 0.38254, "55": 0.38237, "60": 0.38182, "65": 0.38212, "70": 0.38162, "75": 0.38131, "80": 0.38235, "85": 0.38316, "90": 0.38152, "95": 0.38525, "100": 0.38568}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_lts.json b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_lts.json index 3215a21..b1e5aa2 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_lts.json +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/golden_values_lts.json @@ -1,83 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 10.39854, - 9.41109, - 8.8833, - 8.56279, - 8.28765, - 8.10226, - 7.83824, - 7.53414, - 7.39426, - 7.28765, - 7.36798, - 7.22207, - 7.10595, - 7.05273, - 6.91414, - 6.96485, - 6.97279, - 7.03525, - 6.70355, - 6.97029 - ] - }, - "num-zeros": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 43320.0, - 40948.0, - 43971.0, - 41622.0, - 44740.0, - 43919.0, - 41231.0, - 42497.0, - 44664.0, - 43894.0, - 41149.0, - 43254.0, - 39687.0, - 45400.0, - 43313.0, - 43891.0, - 45351.0, - 45692.0, - 46187.0, - 44657.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 100, - "step_interval": 5, - "values": [ - 14.46368, - 0.41717, - 0.42344, - 0.4102, - 0.40332, - 0.40531, - 0.40418, - 0.40386, - 0.40711, - 0.4048, - 0.40536, - 0.40331, - 0.40175, - 0.4047, - 0.40982, - 0.40834, - 0.40594, - 0.40872, - 0.40896, - 0.41014 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 10.39854, "5": 9.39701, "10": 9.03359, "15": 8.67298, "20": 8.28241, "25": 8.00349, "30": 7.88919, "35": 7.67196, "40": 7.50912, "45": 7.35246, "50": 7.18229, "55": 7.15567, "60": 7.14148, "65": 7.00001, "70": 7.0554, "75": 7.05859, "80": 6.94155, "85": 6.84584, "90": 7.2405, "95": 6.84353, "100": 6.96854}}, "num-zeros": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 43320.0, "5": 45392.0, "10": 45363.0, "15": 43919.0, "20": 44778.0, "25": 42432.0, "30": 43986.0, "35": 43261.0, "40": 43242.0, "45": 43266.0, "50": 43346.0, "55": 43875.0, "60": 41289.0, "65": 44697.0, "70": 45530.0, "75": 44661.0, "80": 41029.0, "85": 43973.0, "90": 44723.0, "95": 44054.0, "100": 42464.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2140224000.0, "5": 2140224000.0, "10": 2140224000.0, "15": 2140224000.0, "20": 2140224000.0, "25": 2140224000.0, "30": 2140224000.0, "35": 2140224000.0, "40": 2140224000.0, "45": 2140224000.0, "50": 2140224000.0, "55": 2140224000.0, "60": 2140224000.0, "65": 2140224000.0, "70": 2140224000.0, "75": 2140224000.0, "80": 2140224000.0, "85": 2140224000.0, "90": 2140224000.0, "95": 2140224000.0, "100": 2140224000.0}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 2372122112.0, "5": 3305918976.0, "10": 3305918976.0, "15": 3305918976.0, "20": 3305918976.0, "25": 3305918976.0, "30": 3305918976.0, "35": 3305918976.0, "40": 3305918976.0, "45": 3305918976.0, "50": 3305918976.0, "55": 3305918976.0, "60": 3305918976.0, "65": 3305918976.0, "70": 3305918976.0, "75": 3305918976.0, "80": 3305918976.0, "85": 3305918976.0, "90": 3305918976.0, "95": 3305918976.0, "100": 3306050048.0}}, "iteration-time": {"start_step": 1, "end_step": 100, "step_interval": 5, "values": {"1": 12.40654, "5": 0.40596, "10": 0.41633, "15": 0.39729, "20": 0.39823, "25": 0.39786, "30": 0.39874, "35": 0.39845, "40": 0.40982, "45": 0.39982, "50": 0.39604, "55": 0.39557, "60": 0.39545, "65": 0.39649, "70": 0.39623, "75": 0.39574, "80": 0.40039, "85": 0.39829, "90": 0.39569, "95": 0.39538, "100": 0.39981}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/model_config.yaml index cf95759..d315b91 100644 --- a/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1/model_config.yaml @@ -34,8 +34,8 @@ MODEL_ARGS: --tokenizer-type: BertWordPieceCase --calculate-per-token-loss: true --split: 99982,9,9 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} --tensorboard-dir: ${TENSORBOARD_PATH} --log-params-norm: true --log-num-zeros-in-grad: true @@ -50,4 +50,5 @@ MODEL_ARGS: --data-cache-path: ${DATA_CACHE_PATH} --deterministic-mode: true --ckpt-format: torch + --log-memory-to-tensorboard: true TEST_TYPE: regular diff --git a/tests/functional_tests/test_cases/t5/t5_release/golden_values_0.10.0.json b/tests/functional_tests/test_cases/t5/t5_release/golden_values_0.10.0.json new file mode 100644 index 0000000..e77590a --- /dev/null +++ b/tests/functional_tests/test_cases/t5/t5_release/golden_values_0.10.0.json @@ -0,0 +1 @@ +{"lm loss": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 10.34373, "5": 10.33855, "10": 10.25569, "15": 9.88106, "20": 9.67345, "25": 9.54549, "30": 9.43762, "35": 9.3672, "40": 9.28501, "45": 9.2071, "50": 9.15626, "55": 9.08668, "60": 9.0031, "65": 8.93069, "70": 8.93161, "75": 8.84474, "80": 8.84781, "85": 8.76, "90": 8.75481, "95": 8.70517, "100": 8.64932, "105": 8.60567, "110": 8.54205, "115": 8.48995, "120": 8.49354, "125": 8.42341, "130": 8.40226, "135": 8.32782, "140": 8.31369, "145": 8.28535, "150": 8.24697, "155": 8.1646, "160": 8.19814, "165": 8.1171, "170": 8.10944, "175": 8.08525, "180": 7.94271, "185": 7.94718, "190": 7.85632, "195": 7.79688, "200": 7.73069, "205": 7.70504, "210": 7.62635, "215": 7.59857, "220": 7.52364, "225": 7.4499, "230": 7.40579, "235": 7.37516, "240": 7.27585, "245": 7.21214, "250": 7.14555, "255": 7.10707, "260": 7.00568, "265": 6.98792, "270": 6.94625, "275": 6.86992, "280": 6.81511, "285": 6.78291, "290": 6.7873, "295": 6.67153, "300": 6.64963, "305": 6.59513, "310": 6.54339, "315": 6.46182, "320": 6.5203, "325": 6.49916, "330": 6.46671, "335": 6.41725, "340": 6.44749, "345": 6.38269, "350": 6.40767, "355": 6.35109, "360": 6.37964, "365": 6.33851, "370": 6.29025, "375": 6.2842, "380": 6.2651, "385": 6.23529, "390": 6.22468, "395": 6.20808, "400": 6.25388, "405": 6.17261, "410": 6.24493, "415": 6.16933, "420": 6.15378, "425": 6.10322, "430": 6.07944, "435": 6.08491, "440": 6.05466, "445": 6.02705, "450": 6.08078, "455": 5.99706, "460": 6.0091, "465": 5.99823, "470": 5.96513, "475": 5.95185, "480": 5.95842, "485": 5.93793, "490": 5.89913, "495": 5.89317, "500": 5.86326, "505": 5.86774, "510": 5.90165, "515": 5.82292, "520": 5.86862, "525": 5.8651, "530": 5.83201, "535": 5.83776, "540": 5.81752, "545": 5.76356, "550": 5.77684, "555": 5.74572, "560": 5.75079, "565": 5.72276, "570": 5.72427, "575": 5.71807, "580": 5.65531, "585": 5.69236, "590": 5.65107, "595": 5.63192, "600": 5.64838, "605": 5.64931, "610": 5.6647, "615": 5.62304, "620": 5.64463, "625": 5.61942, "630": 5.58523, "635": 5.60187, "640": 5.56528, "645": 5.59681, "650": 5.56625, "655": 5.5432, "660": 5.55742, "665": 5.57766, "670": 5.48936, "675": 5.51082, "680": 5.52987, "685": 5.5118, "690": 5.51949, "695": 5.48318, "700": 5.43745, "705": 5.38344, "710": 5.41567, "715": 5.49285, "720": 5.4272, "725": 5.45087, "730": 5.42554, "735": 5.43356, "740": 5.39754, "745": 5.41986, "750": 5.4463, "755": 5.45679, "760": 5.39975, "765": 5.3898, "770": 5.38604, "775": 5.34131, "780": 5.34038, "785": 5.31734, "790": 5.31676, "795": 5.32228, "800": 5.33093, "805": 5.34554, "810": 5.30766, "815": 5.25618, "820": 5.28777, "825": 5.29253, "830": 5.27956, "835": 5.30692, "840": 5.30163, "845": 5.22565, "850": 5.22479, "855": 5.25816, "860": 5.25967, "865": 5.24727, "870": 5.22242, "875": 5.20242, "880": 5.28622, "885": 5.26097, "890": 5.20789, "895": 5.2213, "900": 5.24091, "905": 5.23247, "910": 5.21549, "915": 5.22772, "920": 5.18973, "925": 5.20932, "930": 5.18467, "935": 5.15469, "940": 5.16071, "945": 5.19985, "950": 5.13407, "955": 5.09206, "960": 5.14539, "965": 5.09903, "970": 5.06261, "975": 5.12222, "980": 5.09013, "985": 5.0548, "990": 5.08546, "995": 5.09078, "1000": 5.10147, "1005": 5.10275, "1010": 5.09314, "1015": 5.06099, "1020": 5.10157, "1025": 5.04028, "1030": 4.99777, "1035": 5.079, "1040": 5.02758, "1045": 5.05137, "1050": 5.08537, "1055": 5.03412, "1060": 5.07852, "1065": 4.98447, "1070": 4.98147, "1075": 4.99446, "1080": 4.96727, "1085": 5.00919, "1090": 5.02104, "1095": 5.02971, "1100": 5.00844, "1105": 4.98152, "1110": 4.93565, "1115": 4.93359, "1120": 4.94646, "1125": 4.89808, "1130": 5.04134, "1135": 4.91846, "1140": 4.94814, "1145": 4.91299, "1150": 4.98516, "1155": 4.89725, "1160": 4.89734, "1165": 4.91698, "1170": 4.98865, "1175": 4.94765, "1180": 4.85047, "1185": 4.9258, "1190": 4.87912, "1195": 4.92408, "1200": 5.01939, "1205": 4.969, "1210": 4.83846, "1215": 4.88349, "1220": 4.88852, "1225": 4.8824, "1230": 4.90023, "1235": 4.88752, "1240": 4.87498, "1245": 4.88277, "1250": 4.83146, "1255": 4.88079, "1260": 4.80269, "1265": 4.87421, "1270": 4.93006, "1275": 4.84235, "1280": 4.81799, "1285": 4.91435, "1290": 4.72367, "1295": 4.82122, "1300": 4.84842, "1305": 4.82497, "1310": 4.91847, "1315": 4.79319, "1320": 4.87966, "1325": 4.77515, "1330": 4.81544, "1335": 4.79447, "1340": 4.813, "1345": 4.81843, "1350": 4.81781, "1355": 4.80846, "1360": 4.73603, "1365": 4.84669, "1370": 4.81742, "1375": 4.83794, "1380": 4.76658, "1385": 4.73424, "1390": 4.79136, "1395": 4.79311, "1400": 4.66705, "1405": 4.67893, "1410": 4.75841, "1415": 4.79013, "1420": 4.76499, "1425": 4.72912, "1430": 4.76043, "1435": 4.7706, "1440": 4.76119, "1445": 4.75276, "1450": 4.72328, "1455": 4.65186, "1460": 4.76294, "1465": 4.73985, "1470": 4.69713, "1475": 4.71323, "1480": 4.72648, "1485": 4.75028, "1490": 4.75551, "1495": 4.72689, "1500": 4.75752, "1505": 4.67818, "1510": 4.72246, "1515": 4.71601, "1520": 4.77559, "1525": 4.73571, "1530": 4.68283, "1535": 4.6909, "1540": 4.7182, "1545": 4.59999, "1550": 4.63695, "1555": 4.61199, "1560": 4.70465, "1565": 4.63195, "1570": 4.66107, "1575": 4.66866, "1580": 4.66083, "1585": 4.68075, "1590": 4.67066, "1595": 4.72121, "1600": 4.68216, "1605": 4.67519, "1610": 4.6172, "1615": 4.6267, "1620": 4.69874, "1625": 4.66292, "1630": 4.60635, "1635": 4.60763, "1640": 4.68401, "1645": 4.63932, "1650": 4.6569, "1655": 4.62405, "1660": 4.67525, "1665": 4.61909, "1670": 4.6946, "1675": 4.64751, "1680": 4.59597, "1685": 4.64714, "1690": 4.63953, "1695": 4.6664, "1700": 4.65485, "1705": 4.63608, "1710": 4.63385, "1715": 4.68949, "1720": 4.64248, "1725": 4.66167, "1730": 4.63339, "1735": 4.62405, "1740": 4.60647, "1745": 4.62659, "1750": 4.60624, "1755": 4.63647, "1760": 4.58248, "1765": 4.61897, "1770": 4.60537, "1775": 4.59025, "1780": 4.60569, "1785": 4.60668, "1790": 4.51577, "1795": 4.58243, "1800": 4.59834, "1805": 4.55914, "1810": 4.563, "1815": 4.57335, "1820": 4.59266, "1825": 4.62388, "1830": 4.56988, "1835": 4.55877, "1840": 4.48945, "1845": 4.52437, "1850": 4.63686, "1855": 4.58593, "1860": 4.56101, "1865": 4.54418, "1870": 4.58503, "1875": 4.54992, "1880": 4.5855, "1885": 4.51203, "1890": 4.50546, "1895": 4.64466, "1900": 4.49303, "1905": 4.56739, "1910": 4.59025, "1915": 4.57737, "1920": 4.55224, "1925": 4.52945, "1930": 4.55718, "1935": 4.5301, "1940": 4.51318, "1945": 4.53614, "1950": 4.51135, "1955": 4.56261, "1960": 4.54296, "1965": 4.56669, "1970": 4.51733, "1975": 4.55725, "1980": 4.50218, "1985": 4.56454, "1990": 4.46968, "1995": 4.55707, "2000": 4.54236, "2005": 4.52379, "2010": 4.5296, "2015": 4.5501, "2020": 4.48229, "2025": 4.47818, "2030": 4.54852, "2035": 4.51603, "2040": 4.5652, "2045": 4.43542, "2050": 4.51336, "2055": 4.50248, "2060": 4.48531, "2065": 4.53952, "2070": 4.48357, "2075": 4.55396, "2080": 4.54937, "2085": 4.4804, "2090": 4.46001, "2095": 4.43593, "2100": 4.40012, "2105": 4.44347, "2110": 4.55339, "2115": 4.54607, "2120": 4.55798, "2125": 4.4695, "2130": 4.46673, "2135": 4.49189, "2140": 4.48897, "2145": 4.43432, "2150": 4.47542, "2155": 4.44366, "2160": 4.40686, "2165": 4.53836, "2170": 4.46197, "2175": 4.50797, "2180": 4.49242, "2185": 4.40342, "2190": 4.4688, "2195": 4.48487, "2200": 4.45369, "2205": 4.36888, "2210": 4.46538, "2215": 4.44055, "2220": 4.42312, "2225": 4.41265, "2230": 4.41236, "2235": 4.43766, "2240": 4.41765, "2245": 4.46352, "2250": 4.42623, "2255": 4.49007, "2260": 4.44624, "2265": 4.40438, "2270": 4.38309, "2275": 4.37258, "2280": 4.47232, "2285": 4.43897, "2290": 4.43409, "2295": 4.47441, "2300": 4.45426, "2305": 4.42856, "2310": 4.38468, "2315": 4.40413, "2320": 4.31621, "2325": 4.42018, "2330": 4.43598, "2335": 4.44152, "2340": 4.41099, "2345": 4.43394, "2350": 4.33497, "2355": 4.42633, "2360": 4.45548, "2365": 4.38743, "2370": 4.36214, "2375": 4.43454, "2380": 4.42365, "2385": 4.32665, "2390": 4.38516, "2395": 4.35781, "2400": 4.38886, "2405": 4.42387, "2410": 4.33642, "2415": 4.47808, "2420": 4.42254, "2425": 4.3738, "2430": 4.4573, "2435": 4.33122, "2440": 4.39591, "2445": 4.39023, "2450": 4.37977, "2455": 4.44252, "2460": 4.3885, "2465": 4.40121, "2470": 4.3919, "2475": 4.35563, "2480": 4.42877, "2485": 4.42761, "2490": 4.32509, "2495": 4.34367, "2500": 4.39092, "2505": 4.35546, "2510": 4.32278, "2515": 4.34692, "2520": 4.41082, "2525": 4.30307, "2530": 4.36016, "2535": 4.42845, "2540": 4.35421, "2545": 4.31988, "2550": 4.37165, "2555": 4.3702, "2560": 4.38582, "2565": 4.35937, "2570": 4.41386, "2575": 4.36875, "2580": 4.35456, "2585": 4.3676, "2590": 4.38881, "2595": 4.29726, "2600": 4.3498, "2605": 4.4017, "2610": 4.34572, "2615": 4.3145, "2620": 4.36978, "2625": 4.40264, "2630": 4.34668, "2635": 4.39302, "2640": 4.37364, "2645": 4.36746, "2650": 4.34636, "2655": 4.40218, "2660": 4.29489, "2665": 4.24113, "2670": 4.34308, "2675": 4.25003, "2680": 4.30178, "2685": 4.284, "2690": 4.29731, "2695": 4.2387, "2700": 4.31279, "2705": 4.27998, "2710": 4.28462, "2715": 4.27085, "2720": 4.32126, "2725": 4.2947, "2730": 4.25667, "2735": 4.2663, "2740": 4.3753, "2745": 4.25858, "2750": 4.28991, "2755": 4.38789, "2760": 4.35329, "2765": 4.34414, "2770": 4.3091, "2775": 4.30006, "2780": 4.32318, "2785": 4.25846, "2790": 4.32715, "2795": 4.33212, "2800": 4.33399, "2805": 4.19668, "2810": 4.24825, "2815": 4.27589, "2820": 4.31176, "2825": 4.24226, "2830": 4.30215, "2835": 4.35489, "2840": 4.2871, "2845": 4.28532, "2850": 4.28945, "2855": 4.30689, "2860": 4.33647, "2865": 4.2694, "2870": 4.27456, "2875": 4.25531, "2880": 4.25603, "2885": 4.23401, "2890": 4.23071, "2895": 4.32901, "2900": 4.25511, "2905": 4.30237, "2910": 4.22836, "2915": 4.25905, "2920": 4.25223, "2925": 4.28701, "2930": 4.25433, "2935": 4.28748, "2940": 4.29283, "2945": 4.25611, "2950": 4.21366, "2955": 4.27353, "2960": 4.18162, "2965": 4.26663, "2970": 4.27293, "2975": 4.27101, "2980": 4.23164, "2985": 4.30162, "2990": 4.2143, "2995": 4.31287, "3000": 4.18779, "3005": 4.20192, "3010": 4.28184, "3015": 4.21154, "3020": 4.22735, "3025": 4.21064, "3030": 4.20726, "3035": 4.22906, "3040": 4.21856, "3045": 4.2039, "3050": 4.27938, "3055": 4.23123, "3060": 4.22316, "3065": 4.26368, "3070": 4.20915, "3075": 4.22851, "3080": 4.17393, "3085": 4.22756, "3090": 4.19516, "3095": 4.23734, "3100": 4.29387, "3105": 4.19866, "3110": 4.21012, "3115": 4.19282, "3120": 4.27089, "3125": 4.17596, "3130": 4.20171, "3135": 4.19776, "3140": 4.21762, "3145": 4.20133, "3150": 4.24748, "3155": 4.17835, "3160": 4.239, "3165": 4.22542, "3170": 4.15699, "3175": 4.26387, "3180": 4.17023, "3185": 4.15074, "3190": 4.21898, "3195": 4.18906, "3200": 4.22429, "3205": 4.18519, "3210": 4.19033, "3215": 4.21786, "3220": 4.16663, "3225": 4.21466, "3230": 4.20105, "3235": 4.22465, "3240": 4.14395, "3245": 4.18754, "3250": 4.25793, "3255": 4.22555, "3260": 4.19306, "3265": 4.11213, "3270": 4.17927, "3275": 4.19795, "3280": 4.20445, "3285": 4.16229, "3290": 4.21248, "3295": 4.25855, "3300": 4.24995, "3305": 4.16473, "3310": 4.22058, "3315": 4.16778, "3320": 4.218, "3325": 4.21766, "3330": 4.21956, "3335": 4.12481, "3340": 4.21185, "3345": 4.17605, "3350": 4.22124, "3355": 4.15217, "3360": 4.11837, "3365": 4.14482, "3370": 4.14752, "3375": 4.19705, "3380": 4.16132, "3385": 4.13556, "3390": 4.15108, "3395": 4.15783, "3400": 4.12948, "3405": 4.22487, "3410": 4.13376, "3415": 4.18698, "3420": 4.11745, "3425": 4.13269, "3430": 4.14891, "3435": 4.14902, "3440": 4.19632, "3445": 4.18022, "3450": 4.19456, "3455": 4.10639, "3460": 4.11668, "3465": 4.13534, "3470": 4.11293, "3475": 4.14744, "3480": 4.19392, "3485": 4.11114, "3490": 4.20226, "3495": 4.13097, "3500": 4.11839, "3505": 4.09013, "3510": 4.14695, "3515": 4.16858, "3520": 4.10784, "3525": 4.16444, "3530": 4.21005, "3535": 4.1966, "3540": 4.10319, "3545": 4.07543, "3550": 4.14334, "3555": 4.0432, "3560": 4.06421, "3565": 4.13532, "3570": 4.1447, "3575": 4.1767, "3580": 4.09621, "3585": 4.08965, "3590": 4.09949, "3595": 4.20076, "3600": 4.12621, "3605": 4.06399, "3610": 4.12388, "3615": 4.12415, "3620": 4.13261, "3625": 4.11576, "3630": 4.02187, "3635": 4.06444, "3640": 4.09653, "3645": 4.13957, "3650": 4.08334, "3655": 4.13321, "3660": 4.05314, "3665": 4.19043, "3670": 4.13473, "3675": 4.12749, "3680": 4.09161, "3685": 4.08541, "3690": 4.03251, "3695": 4.09233, "3700": 4.0925, "3705": 4.07911, "3710": 4.11915, "3715": 4.08263, "3720": 4.01163, "3725": 4.13338, "3730": 4.08857, "3735": 4.09141, "3740": 4.04633, "3745": 4.02761, "3750": 4.03641, "3755": 3.94674, "3760": 4.08837, "3765": 4.06041, "3770": 4.04526, "3775": 4.05189, "3780": 4.08133, "3785": 3.9885, "3790": 4.10446, "3795": 4.11409, "3800": 4.09591, "3805": 4.0666, "3810": 4.08732, "3815": 4.06925, "3820": 4.09918, "3825": 4.01907, "3830": 3.98653, "3835": 4.07898, "3840": 4.03265, "3845": 4.07834, "3850": 4.02966, "3855": 4.01192, "3860": 4.10565, "3865": 4.05392, "3870": 4.13499, "3875": 4.02316, "3880": 4.10204, "3885": 4.02853, "3890": 4.0613, "3895": 4.07185, "3900": 4.06356, "3905": 4.0524, "3910": 4.10115, "3915": 4.03328, "3920": 4.05363, "3925": 4.02595, "3930": 4.06871, "3935": 4.03531, "3940": 4.09464, "3945": 4.07538, "3950": 4.04273, "3955": 4.04673, "3960": 4.00719, "3965": 4.05471, "3970": 3.99269, "3975": 4.06175, "3980": 3.982, "3985": 4.08134, "3990": 4.04627, "3995": 4.1019, "4000": 4.0155, "4005": 4.06013, "4010": 4.02701, "4015": 4.09332, "4020": 4.07974, "4025": 4.03226, "4030": 4.04876, "4035": 4.02409, "4040": 4.04151, "4045": 4.07389, "4050": 4.12821, "4055": 4.00914, "4060": 4.02794, "4065": 4.05093, "4070": 4.03673, "4075": 4.02756, "4080": 3.94473, "4085": 4.01593, "4090": 4.06604, "4095": 3.98654, "4100": 4.03017, "4105": 4.02047, "4110": 3.99623, "4115": 4.03371, "4120": 4.03454, "4125": 3.98163, "4130": 3.99995, "4135": 3.95574, "4140": 4.04429, "4145": 4.02242, "4150": 3.97358, "4155": 4.03528, "4160": 3.94947, "4165": 4.07876, "4170": 4.05157, "4175": 4.04127, "4180": 4.08132, "4185": 3.98269, "4190": 3.99035, "4195": 3.96352, "4200": 4.05395, "4205": 4.02821, "4210": 3.98905, "4215": 4.04032, "4220": 4.05633, "4225": 3.99032, "4230": 4.01297, "4235": 4.03559, "4240": 3.94962, "4245": 3.9398, "4250": 3.98542, "4255": 3.94602, "4260": 3.97106, "4265": 3.9783, "4270": 3.94855, "4275": 3.95053, "4280": 4.0275, "4285": 3.95695, "4290": 3.93272, "4295": 4.01883, "4300": 3.92205, "4305": 3.98926, "4310": 4.04331, "4315": 3.98158, "4320": 4.04004, "4325": 3.90573, "4330": 3.9982, "4335": 3.95388, "4340": 3.98424, "4345": 3.9643, "4350": 3.9976, "4355": 3.94681, "4360": 3.93657, "4365": 3.94094, "4370": 3.9685, "4375": 3.99603, "4380": 3.97986, "4385": 3.97943, "4390": 3.99366, "4395": 4.00064, "4400": 3.98543, "4405": 3.90394, "4410": 3.95322, "4415": 3.95879, "4420": 3.97724, "4425": 4.01833, "4430": 3.90871, "4435": 3.9831, "4440": 3.93824, "4445": 3.93476, "4450": 3.88369, "4455": 3.9776, "4460": 3.86985, "4465": 3.96515, "4470": 3.94816, "4475": 3.94333, "4480": 4.01224, "4485": 3.89121, "4490": 3.88383, "4495": 3.92873, "4500": 3.93304, "4505": 4.0037, "4510": 4.02592, "4515": 3.9533, "4520": 3.96387, "4525": 3.87594, "4530": 3.99479, "4535": 3.93196, "4540": 3.92944, "4545": 3.89907, "4550": 3.93401, "4555": 3.96996, "4560": 3.94019, "4565": 3.93179, "4570": 3.91806, "4575": 3.90053, "4580": 3.88486, "4585": 3.98783, "4590": 3.86447, "4595": 3.96638, "4600": 3.91895, "4605": 3.90232, "4610": 3.86563, "4615": 3.90204, "4620": 3.93427, "4625": 3.93897, "4630": 3.967, "4635": 3.91723, "4640": 3.97825, "4645": 3.94805, "4650": 3.92558, "4655": 3.94005, "4660": 3.95379, "4665": 3.89411, "4670": 3.91285, "4675": 3.91002, "4680": 3.97659, "4685": 3.8905, "4690": 3.87566, "4695": 3.90924, "4700": 3.91282, "4705": 3.89168, "4710": 3.90272, "4715": 3.91273, "4720": 3.93602, "4725": 3.90995, "4730": 3.91693, "4735": 3.90354, "4740": 3.87933, "4745": 3.89622, "4750": 3.91625, "4755": 3.94461, "4760": 3.89007, "4765": 3.84348, "4770": 3.92026, "4775": 3.92328, "4780": 3.87932, "4785": 3.94608, "4790": 3.92693, "4795": 3.92456, "4800": 3.90515, "4805": 3.89125, "4810": 3.88502, "4815": 3.98372, "4820": 3.90738, "4825": 3.82435, "4830": 3.92337, "4835": 3.89131, "4840": 3.9148, "4845": 3.84694, "4850": 3.91744, "4855": 3.86399, "4860": 3.86602, "4865": 3.90987, "4870": 3.89117, "4875": 3.85254, "4880": 3.91177, "4885": 3.89707, "4890": 3.96861, "4895": 3.94168, "4900": 3.85595, "4905": 3.85925, "4910": 3.87576, "4915": 3.89915, "4920": 3.89231, "4925": 3.84858, "4930": 3.83138, "4935": 3.86932, "4940": 3.84319, "4945": 3.9681, "4950": 3.83031, "4955": 3.85306, "4960": 3.84911, "4965": 3.86292, "4970": 3.89539, "4975": 3.91362, "4980": 3.87599, "4985": 3.83733, "4990": 3.83995, "4995": 3.88889, "5000": 3.85296, "5005": 3.92969, "5010": 3.89428, "5015": 3.82292, "5020": 3.8586, "5025": 3.88727, "5030": 3.8023, "5035": 3.8955, "5040": 3.82501, "5045": 3.87057, "5050": 3.82553, "5055": 3.90251, "5060": 3.91365, "5065": 3.84634, "5070": 3.84597, "5075": 3.81175, "5080": 3.85556, "5085": 3.86311, "5090": 3.90125, "5095": 3.81329, "5100": 3.86085, "5105": 3.89795, "5110": 3.86219, "5115": 3.80749, "5120": 3.74018, "5125": 3.85569, "5130": 3.75942, "5135": 3.83889, "5140": 3.82761, "5145": 3.78361, "5150": 3.82771, "5155": 3.85718, "5160": 3.80953, "5165": 3.87967, "5170": 3.82486, "5175": 3.83389, "5180": 3.82018, "5185": 3.89964, "5190": 3.82672, "5195": 3.81821, "5200": 3.79609, "5205": 3.78106, "5210": 3.76474, "5215": 3.74551, "5220": 3.81629, "5225": 3.83809, "5230": 3.80839, "5235": 3.81863, "5240": 3.80485, "5245": 3.7897, "5250": 3.78233, "5255": 3.90076, "5260": 3.82432, "5265": 3.82612, "5270": 3.84145, "5275": 3.83637, "5280": 3.78397, "5285": 3.84884, "5290": 3.82359, "5295": 3.8157, "5300": 3.8032, "5305": 3.82915, "5310": 3.7973, "5315": 3.75693, "5320": 3.81037, "5325": 3.76489, "5330": 3.79447, "5335": 3.81268, "5340": 3.87726, "5345": 3.79598, "5350": 3.84541, "5355": 3.83295, "5360": 3.86841, "5365": 3.78674, "5370": 3.73942, "5375": 3.82772, "5380": 3.73799, "5385": 3.77805, "5390": 3.755, "5395": 3.73053, "5400": 3.82294, "5405": 3.83419, "5410": 3.80045, "5415": 3.78688, "5420": 3.83679, "5425": 3.71619, "5430": 3.82006, "5435": 3.7969, "5440": 3.78951, "5445": 3.7244, "5450": 3.71928, "5455": 3.72368, "5460": 3.71689, "5465": 3.74458, "5470": 3.80764, "5475": 3.72375, "5480": 3.84943, "5485": 3.79075, "5490": 3.73077, "5495": 3.75159, "5500": 3.7951, "5505": 3.80917, "5510": 3.79686, "5515": 3.78387, "5520": 3.81216, "5525": 3.77901, "5530": 3.74647, "5535": 3.74725, "5540": 3.72981, "5545": 3.80926, "5550": 3.73233, "5555": 3.8189, "5560": 3.77979, "5565": 3.72825, "5570": 3.85327, "5575": 3.78182, "5580": 3.71916, "5585": 3.73577, "5590": 3.73921, "5595": 3.77447, "5600": 3.74465, "5605": 3.71177, "5610": 3.73674, "5615": 3.68008, "5620": 3.72408, "5625": 3.72155, "5630": 3.74195, "5635": 3.72041, "5640": 3.74688, "5645": 3.75731, "5650": 3.72054, "5655": 3.74912, "5660": 3.75169, "5665": 3.76238, "5670": 3.73818, "5675": 3.78033, "5680": 3.75875, "5685": 3.73621, "5690": 3.70006, "5695": 3.78155, "5700": 3.75802, "5705": 3.70886, "5710": 3.71055, "5715": 3.70654, "5720": 3.73202, "5725": 3.75022, "5730": 3.81464, "5735": 3.72012, "5740": 3.82371, "5745": 3.75383, "5750": 3.70352, "5755": 3.76115, "5760": 3.84464, "5765": 3.72342, "5770": 3.7924, "5775": 3.71178, "5780": 3.67998, "5785": 3.75573, "5790": 3.72196, "5795": 3.73771, "5800": 3.7433, "5805": 3.72965, "5810": 3.6789, "5815": 3.73677, "5820": 3.72203, "5825": 3.66932, "5830": 3.67718, "5835": 3.64403, "5840": 3.6676, "5845": 3.74038, "5850": 3.7864, "5855": 3.65285, "5860": 3.67923, "5865": 3.6858, "5870": 3.74586, "5875": 3.69492, "5880": 3.74494, "5885": 3.66183, "5890": 3.70753, "5895": 3.65041, "5900": 3.68043, "5905": 3.69823, "5910": 3.69549, "5915": 3.65219, "5920": 3.71119, "5925": 3.68026, "5930": 3.66413, "5935": 3.65905, "5940": 3.66452, "5945": 3.71598, "5950": 3.70013, "5955": 3.71466, "5960": 3.68825, "5965": 3.72972, "5970": 3.66173, "5975": 3.66545, "5980": 3.65156, "5985": 3.72237, "5990": 3.73511, "5995": 3.63302, "6000": 3.63591, "6005": 3.65087, "6010": 3.67384, "6015": 3.68181, "6020": 3.68643, "6025": 3.63638, "6030": 3.69682, "6035": 3.69354, "6040": 3.50767, "6045": 3.66861, "6050": 3.60445, "6055": 3.63497, "6060": 3.66496, "6065": 3.72478, "6070": 3.62991, "6075": 3.73873, "6080": 3.6316, "6085": 3.66764, "6090": 3.65142, "6095": 3.72069, "6100": 3.62623, "6105": 3.67284, "6110": 3.61333, "6115": 3.5809, "6120": 3.59692, "6125": 3.70611, "6130": 3.64036, "6135": 3.64809, "6140": 3.60504, "6145": 3.57777, "6150": 3.65647, "6155": 3.6244, "6160": 3.63658, "6165": 3.67308, "6170": 3.66997, "6175": 3.61739, "6180": 3.59783, "6185": 3.69238, "6190": 3.63332, "6195": 3.66591, "6200": 3.60755, "6205": 3.60926, "6210": 3.60936, "6215": 3.69495, "6220": 3.5875, "6225": 3.53062, "6230": 3.58868, "6235": 3.58645, "6240": 3.64498, "6245": 3.62742, "6250": 3.65073, "6255": 3.58693, "6260": 3.61904, "6265": 3.65833, "6270": 3.64805, "6275": 3.59192, "6280": 3.57177, "6285": 3.59656, "6290": 3.68089, "6295": 3.58734, "6300": 3.59015, "6305": 3.55976, "6310": 3.59656, "6315": 3.64383, "6320": 3.63444, "6325": 3.56479, "6330": 3.61757, "6335": 3.5669, "6340": 3.53934, "6345": 3.56958, "6350": 3.62395, "6355": 3.64535, "6360": 3.58097, "6365": 3.56116, "6370": 3.61611, "6375": 3.60927, "6380": 3.6087, "6385": 3.57765, "6390": 3.61489, "6395": 3.60815, "6400": 3.53432, "6405": 3.6049, "6410": 3.61929, "6415": 3.5555, "6420": 3.53713, "6425": 3.63439, "6430": 3.5974, "6435": 3.6297, "6440": 3.56239, "6445": 3.57521, "6450": 3.60789, "6455": 3.53321, "6460": 3.56002, "6465": 3.54479, "6470": 3.59764, "6475": 3.51448, "6480": 3.56448, "6485": 3.50987, "6490": 3.56054, "6495": 3.56432, "6500": 3.53342, "6505": 3.65715, "6510": 3.59233, "6515": 3.62116, "6520": 3.60816, "6525": 3.55655, "6530": 3.61076, "6535": 3.62977, "6540": 3.58136, "6545": 3.53043, "6550": 3.59765, "6555": 3.57079, "6560": 3.56554, "6565": 3.50385, "6570": 3.52173, "6575": 3.50977, "6580": 3.51933, "6585": 3.61749, "6590": 3.55537, "6595": 3.56217, "6600": 3.58113, "6605": 3.53836, "6610": 3.54789, "6615": 3.55075, "6620": 3.52177, "6625": 3.53744, "6630": 3.50182, "6635": 3.58227, "6640": 3.55257, "6645": 3.57316, "6650": 3.5919, "6655": 3.54036, "6660": 3.61481, "6665": 3.533, "6670": 3.49903, "6675": 3.63102, "6680": 3.48862, "6685": 3.50866, "6690": 3.5496, "6695": 3.52368, "6700": 3.5062, "6705": 3.53062, "6710": 3.56199, "6715": 3.55967, "6720": 3.5546, "6725": 3.58058, "6730": 3.4248, "6735": 3.55779, "6740": 3.56503, "6745": 3.48741, "6750": 3.49517, "6755": 3.58869, "6760": 3.51634, "6765": 3.54901, "6770": 3.52356, "6775": 3.54394, "6780": 3.52942, "6785": 3.55451, "6790": 3.5487, "6795": 3.53214, "6800": 3.52485, "6805": 3.46449, "6810": 3.52292, "6815": 3.54947, "6820": 3.51786, "6825": 3.57689, "6830": 3.54032, "6835": 3.50713, "6840": 3.5105, "6845": 3.52581, "6850": 3.54205, "6855": 3.50677, "6860": 3.43863, "6865": 3.53903, "6870": 3.51628, "6875": 3.56694, "6880": 3.51085, "6885": 3.4263, "6890": 3.51941, "6895": 3.46403, "6900": 3.47026, "6905": 3.45397, "6910": 3.45992, "6915": 3.49624, "6920": 3.42635, "6925": 3.47338, "6930": 3.55032, "6935": 3.48705, "6940": 3.52226, "6945": 3.49742, "6950": 3.37755, "6955": 3.51492, "6960": 3.50448, "6965": 3.49043, "6970": 3.48009, "6975": 3.56594, "6980": 3.45585, "6985": 3.45905, "6990": 3.51163, "6995": 3.38135, "7000": 3.42425, "7005": 3.43093, "7010": 3.50424, "7015": 3.46326, "7020": 3.43438, "7025": 3.51815, "7030": 3.45768, "7035": 3.45919, "7040": 3.50022, "7045": 3.46933, "7050": 3.39984, "7055": 3.45222, "7060": 3.40546, "7065": 3.43116, "7070": 3.52776, "7075": 3.43922, "7080": 3.38876, "7085": 3.47967, "7090": 3.43736, "7095": 3.48524, "7100": 3.43887, "7105": 3.4614, "7110": 3.41065, "7115": 3.43796, "7120": 3.40471, "7125": 3.4299, "7130": 3.37878, "7135": 3.40957, "7140": 3.4664, "7145": 3.4667, "7150": 3.40627, "7155": 3.36021, "7160": 3.39122, "7165": 3.52705, "7170": 3.39161, "7175": 3.49874, "7180": 3.46873, "7185": 3.48768, "7190": 3.44766, "7195": 3.42326, "7200": 3.42042, "7205": 3.46607, "7210": 3.44923, "7215": 3.46017, "7220": 3.40418, "7225": 3.4172, "7230": 3.46905, "7235": 3.36334, "7240": 3.34325, "7245": 3.40616, "7250": 3.40178, "7255": 3.35405, "7260": 3.43742, "7265": 3.40438, "7270": 3.43028, "7275": 3.4467, "7280": 3.37358, "7285": 3.42738, "7290": 3.39251, "7295": 3.34722, "7300": 3.43413, "7305": 3.41985, "7310": 3.38553, "7315": 3.33633, "7320": 3.39109, "7325": 3.3935, "7330": 3.46259, "7335": 3.42918, "7340": 3.33528, "7345": 3.44071, "7350": 3.37246, "7355": 3.37798, "7360": 3.35867, "7365": 3.38763, "7370": 3.33853, "7375": 3.35272, "7380": 3.38653, "7385": 3.42799, "7390": 3.30547, "7395": 3.39884, "7400": 3.35676, "7405": 3.46583, "7410": 3.29506, "7415": 3.36596, "7420": 3.32471, "7425": 3.32613, "7430": 3.29646, "7435": 3.4149, "7440": 3.37071, "7445": 3.36867, "7450": 3.33083, "7455": 3.33694, "7460": 3.34766, "7465": 3.38066, "7470": 3.33912, "7475": 3.26749, "7480": 3.29524, "7485": 3.24388, "7490": 3.36158, "7495": 3.35318, "7500": 3.26088, "7505": 3.26061, "7510": 3.36374, "7515": 3.36047, "7520": 3.35768, "7525": 3.3297, "7530": 3.32166, "7535": 3.23309, "7540": 3.30174, "7545": 3.33649, "7550": 3.32455, "7555": 3.2532, "7560": 3.29536, "7565": 3.26603, "7570": 3.32493, "7575": 3.31373, "7580": 3.24472, "7585": 3.29328, "7590": 3.27588, "7595": 3.32635, "7600": 3.29377, "7605": 3.29018, "7610": 3.30994, "7615": 3.23625, "7620": 3.21079, "7625": 3.32776, "7630": 3.21842, "7635": 3.28686, "7640": 3.34183, "7645": 3.29475, "7650": 3.23009, "7655": 3.24817, "7660": 3.20092, "7665": 3.23828, "7670": 3.19927, "7675": 3.23655, "7680": 3.22404, "7685": 3.26981, "7690": 3.23138, "7695": 3.27039, "7700": 3.27132, "7705": 3.18254, "7710": 3.29105, "7715": 3.29647, "7720": 3.2387, "7725": 3.28956, "7730": 3.20666, "7735": 3.2382, "7740": 3.19503, "7745": 3.19719, "7750": 3.25255, "7755": 3.24808, "7760": 3.27941, "7765": 3.20657, "7770": 3.22893, "7775": 3.22373, "7780": 3.23811, "7785": 3.20329, "7790": 3.23783, "7795": 3.20233, "7800": 3.22959, "7805": 3.18655, "7810": 3.23453, "7815": 3.24612, "7820": 3.24743, "7825": 3.20502, "7830": 3.18515, "7835": 3.1707, "7840": 3.16678, "7845": 3.26586, "7850": 3.15014, "7855": 3.13562, "7860": 3.16316, "7865": 3.16757, "7870": 3.15497, "7875": 3.13011, "7880": 3.17218, "7885": 3.20455, "7890": 3.16433, "7895": 3.0976, "7900": 3.22828, "7905": 3.11808, "7910": 3.12106, "7915": 3.20044, "7920": 3.13601, "7925": 3.16847, "7930": 3.13992, "7935": 3.19627, "7940": 3.14615, "7945": 3.0283, "7950": 3.1298, "7955": 3.10708, "7960": 3.06394, "7965": 3.14586, "7970": 3.16993, "7975": 3.1882, "7980": 3.12301, "7985": 3.16083, "7990": 3.1466, "7995": 3.1348, "8000": 3.08372, "8005": 3.12077, "8010": 3.13907, "8015": 3.11953, "8020": 3.16854, "8025": 3.0824, "8030": 3.14578, "8035": 3.04942, "8040": 3.11219, "8045": 3.15168, "8050": 3.14341, "8055": 3.15519, "8060": 3.15288, "8065": 3.07471, "8070": 3.10249, "8075": 3.11136, "8080": 3.07083, "8085": 3.06105, "8090": 3.09685, "8095": 3.07416, "8100": 3.1148, "8105": 3.11036, "8110": 3.0543, "8115": 3.05424, "8120": 3.0753, "8125": 3.10262, "8130": 3.07354, "8135": 3.122, "8140": 3.05313, "8145": 3.11007, "8150": 3.17652, "8155": 3.09334, "8160": 3.08268, "8165": 3.07632, "8170": 3.16285, "8175": 3.09183, "8180": 3.1198, "8185": 3.06824, "8190": 2.94978, "8195": 3.14417, "8200": 3.03051, "8205": 3.05557, "8210": 3.08217, "8215": 3.04214, "8220": 3.09174, "8225": 3.12112, "8230": 3.10369, "8235": 3.09214, "8240": 3.05669, "8245": 3.04599, "8250": 3.10197, "8255": 3.07793, "8260": 3.10212, "8265": 3.07373, "8270": 3.03911, "8275": 3.11212, "8280": 3.05801, "8285": 3.11207, "8290": 3.04823, "8295": 3.02463, "8300": 3.15681, "8305": 3.00859, "8310": 3.04075, "8315": 3.05357, "8320": 3.04139, "8325": 3.04353, "8330": 3.02976, "8335": 2.99575, "8340": 2.98712, "8345": 3.03023, "8350": 3.03013, "8355": 3.09382, "8360": 3.05638, "8365": 3.06623, "8370": 3.09117, "8375": 2.99747, "8380": 3.05575, "8385": 2.97438, "8390": 3.0285, "8395": 3.00162, "8400": 2.97868, "8405": 2.98123, "8410": 3.00504, "8415": 3.04812, "8420": 3.00344, "8425": 3.04317, "8430": 3.03734, "8435": 3.04149, "8440": 3.05998, "8445": 2.97956, "8450": 2.99144, "8455": 3.02614, "8460": 3.00738, "8465": 3.00281, "8470": 2.98006, "8475": 2.98334, "8480": 3.08484, "8485": 3.01581, "8490": 3.0426, "8495": 2.87854, "8500": 3.04028, "8505": 3.01786, "8510": 2.98733, "8515": 3.03475, "8520": 2.998, "8525": 3.04886, "8530": 2.96947, "8535": 3.01969, "8540": 3.04299, "8545": 3.05398, "8550": 3.00779, "8555": 2.96202, "8560": 2.97986, "8565": 3.0576, "8570": 3.02814, "8575": 3.00558, "8580": 3.05941, "8585": 2.99178, "8590": 3.01179, "8595": 2.9492, "8600": 3.01884, "8605": 3.02966, "8610": 3.02221, "8615": 2.97817, "8620": 3.06381, "8625": 3.01175, "8630": 2.99828, "8635": 3.04907, "8640": 3.01076, "8645": 3.06652, "8650": 2.92039, "8655": 2.98636, "8660": 3.02962, "8665": 3.01276, "8670": 2.94875, "8675": 2.95157, "8680": 2.93623, "8685": 3.00377, "8690": 2.94947, "8695": 2.97794, "8700": 3.02097, "8705": 2.91509, "8710": 3.04594, "8715": 2.97136, "8720": 2.98768, "8725": 2.91641, "8730": 3.00558, "8735": 2.9421, "8740": 2.96155, "8745": 2.91724, "8750": 2.88059, "8755": 2.95301, "8760": 2.95524, "8765": 2.9737, "8770": 2.91457, "8775": 2.93109, "8780": 2.99251, "8785": 3.01905, "8790": 2.98893, "8795": 2.88652, "8800": 2.8872, "8805": 2.92567, "8810": 3.03555, "8815": 2.90108, "8820": 2.89678, "8825": 2.94809, "8830": 2.96848, "8835": 2.99381, "8840": 2.94817, "8845": 2.99014, "8850": 2.93993, "8855": 2.92878, "8860": 2.90801, "8865": 2.86821, "8870": 2.96922, "8875": 2.9595, "8880": 2.9031, "8885": 2.89049, "8890": 2.93309, "8895": 2.89729, "8900": 2.92864, "8905": 2.93537, "8910": 2.86823, "8915": 2.94141, "8920": 2.89898, "8925": 2.90317, "8930": 2.95898, "8935": 2.91456, "8940": 2.93832, "8945": 2.96202, "8950": 2.93163, "8955": 2.9349, "8960": 2.88125, "8965": 2.88356, "8970": 2.92694, "8975": 2.92838, "8980": 2.90395, "8985": 2.91888, "8990": 2.85165, "8995": 2.91536, "9000": 2.83986, "9005": 2.90637, "9010": 2.9266, "9015": 2.92566, "9020": 2.88667, "9025": 2.85988, "9030": 2.97739, "9035": 2.94211, "9040": 2.88793, "9045": 2.96471, "9050": 2.8611, "9055": 2.91867, "9060": 2.91574, "9065": 2.87209, "9070": 2.88276, "9075": 2.919, "9080": 3.00948, "9085": 2.86108, "9090": 2.91355, "9095": 2.8966, "9100": 2.84467, "9105": 2.90261, "9110": 2.84533, "9115": 2.87832, "9120": 2.88402, "9125": 2.91219, "9130": 2.8899, "9135": 2.90453, "9140": 2.84796, "9145": 2.88464, "9150": 2.8354, "9155": 2.84671, "9160": 2.89181, "9165": 2.88521, "9170": 2.9506, "9175": 2.84974, "9180": 2.89666, "9185": 2.85235, "9190": 2.85195, "9195": 2.84913, "9200": 2.88818, "9205": 2.88145, "9210": 2.87461, "9215": 2.93759, "9220": 2.9096, "9225": 2.90204, "9230": 2.84801, "9235": 2.93592, "9240": 2.87436, "9245": 2.93545, "9250": 2.8565, "9255": 2.91306, "9260": 2.81648, "9265": 2.77609, "9270": 2.87112, "9275": 2.92058, "9280": 2.86363, "9285": 2.8637, "9290": 2.82051, "9295": 2.86968, "9300": 2.89683, "9305": 2.94122, "9310": 2.90167, "9315": 2.8554, "9320": 2.84969, "9325": 2.94569, "9330": 2.87274, "9335": 2.91212, "9340": 2.89024, "9345": 2.90789, "9350": 2.85351, "9355": 2.93569, "9360": 2.833, "9365": 2.84836, "9370": 2.84216, "9375": 2.86702, "9380": 2.83874, "9385": 2.85376, "9390": 2.86825, "9395": 2.84465, "9400": 2.88115, "9405": 2.82411, "9410": 2.87711, "9415": 2.83626, "9420": 2.85958, "9425": 2.86856, "9430": 2.85344, "9435": 2.83414, "9440": 2.81761, "9445": 2.78682, "9450": 2.81676, "9455": 2.88599, "9460": 2.77246, "9465": 2.83656, "9470": 2.80338, "9475": 2.81755, "9480": 2.75213, "9485": 2.83818, "9490": 2.82138, "9495": 2.87926, "9500": 2.80685, "9505": 2.86205, "9510": 2.82847, "9515": 2.8306, "9520": 2.8062, "9525": 2.87281, "9530": 2.83671, "9535": 2.80868, "9540": 2.78397, "9545": 2.83638, "9550": 2.90854, "9555": 2.88609, "9560": 2.86725, "9565": 2.92881, "9570": 2.85826, "9575": 2.84622, "9580": 2.83774, "9585": 2.78839, "9590": 2.75198, "9595": 2.79365, "9600": 2.83202, "9605": 2.83313, "9610": 2.88542, "9615": 2.82057, "9620": 2.85027, "9625": 2.78259, "9630": 2.82299, "9635": 2.86118, "9640": 2.87586, "9645": 2.87579, "9650": 2.81046, "9655": 2.73458, "9660": 2.91709, "9665": 2.83103, "9670": 2.87719, "9675": 2.86594, "9680": 2.78985, "9685": 2.75631, "9690": 2.79676, "9695": 2.88059, "9700": 2.82923, "9705": 2.89245, "9710": 2.85379, "9715": 2.81232, "9720": 2.78922, "9725": 2.82539, "9730": 2.90027, "9735": 2.80767, "9740": 2.79123, "9745": 2.82364, "9750": 2.8531, "9755": 2.84836, "9760": 2.77105, "9765": 2.88072, "9770": 2.85756, "9775": 2.81596, "9780": 2.84078, "9785": 2.79745, "9790": 2.74067, "9795": 2.74568, "9800": 2.82554, "9805": 2.80929, "9810": 2.83042, "9815": 2.76668, "9820": 2.78218, "9825": 2.81722, "9830": 2.87058, "9835": 2.77493, "9840": 2.7847, "9845": 2.82749, "9850": 2.75867, "9855": 2.78395, "9860": 2.88608, "9865": 2.77684, "9870": 2.78117, "9875": 2.80214, "9880": 2.80629, "9885": 2.78763, "9890": 2.79679, "9895": 2.79596, "9900": 2.81529, "9905": 2.75659, "9910": 2.83644, "9915": 2.73952, "9920": 2.82992, "9925": 2.76944, "9930": 2.79556, "9935": 2.79906, "9940": 2.84549, "9945": 2.74519, "9950": 2.87954, "9955": 2.74333, "9960": 2.8515, "9965": 2.75833, "9970": 2.76822, "9975": 2.83184, "9980": 2.78223, "9985": 2.7249, "9990": 2.77009, "9995": 2.81493, "10000": 2.78621, "10005": 2.82315, "10010": 2.78863, "10015": 2.71124, "10020": 2.73446, "10025": 2.7615, "10030": 2.79936, "10035": 2.84779, "10040": 2.70533, "10045": 2.83971, "10050": 2.78028, "10055": 2.74328, "10060": 2.8381, "10065": 2.77523, "10070": 2.81396, "10075": 2.7509, "10080": 2.76918, "10085": 2.78206, "10090": 2.74589, "10095": 2.80262, "10100": 2.77409, "10105": 2.82272, "10110": 2.72161, "10115": 2.83088, "10120": 2.80562, "10125": 2.69371, "10130": 2.75052, "10135": 2.71636, "10140": 2.79758, "10145": 2.80331, "10150": 2.73399, "10155": 2.75704, "10160": 2.82616, "10165": 2.83111, "10170": 2.76177, "10175": 2.82994, "10180": 2.76471, "10185": 2.74141, "10190": 2.79218, "10195": 2.78765, "10200": 2.70453, "10205": 2.74292, "10210": 2.74568, "10215": 2.76488, "10220": 2.75755, "10225": 2.70289, "10230": 2.74959, "10235": 2.78826, "10240": 2.76373, "10245": 2.75896, "10250": 2.77561, "10255": 2.77842, "10260": 2.84629, "10265": 2.73551, "10270": 2.74216, "10275": 2.79688, "10280": 2.77568, "10285": 2.7466, "10290": 2.76524, "10295": 2.8182, "10300": 2.7936, "10305": 2.77229, "10310": 2.76497, "10315": 2.74991, "10320": 2.81558, "10325": 2.82018, "10330": 2.74355, "10335": 2.72049, "10340": 2.78242, "10345": 2.81482, "10350": 2.78255, "10355": 2.7946, "10360": 2.80611, "10365": 2.77654, "10370": 2.783, "10375": 2.67516, "10380": 2.79118, "10385": 2.80521, "10390": 2.81971, "10395": 2.79357, "10400": 2.72363, "10405": 2.72425, "10410": 2.75363, "10415": 2.77701, "10420": 2.65189, "10425": 2.72275, "10430": 2.76379, "10435": 2.68916, "10440": 2.77767, "10445": 2.75536, "10450": 2.72285, "10455": 2.74584, "10460": 2.78835, "10465": 2.73548, "10470": 2.76566, "10475": 2.77933, "10480": 2.6863, "10485": 2.75938, "10490": 2.70079, "10495": 2.77293, "10500": 2.77187, "10505": 2.7135, "10510": 2.73274, "10515": 2.81575, "10520": 2.72519, "10525": 2.78839, "10530": 2.73117, "10535": 2.79908, "10540": 2.71684, "10545": 2.82693, "10550": 2.72104, "10555": 2.72262, "10560": 2.76268, "10565": 2.79184, "10570": 2.72904, "10575": 2.72743, "10580": 2.79204, "10585": 2.65433, "10590": 2.71124, "10595": 2.58049, "10600": 2.73177, "10605": 2.78194, "10610": 2.77024, "10615": 2.6485, "10620": 2.66556, "10625": 2.64811, "10630": 2.79123, "10635": 2.68345, "10640": 2.77812, "10645": 2.73692, "10650": 2.77726, "10655": 2.73212, "10660": 2.69732, "10665": 2.66242, "10670": 2.75307, "10675": 2.60015, "10680": 2.71926, "10685": 2.64816, "10690": 2.70659, "10695": 2.72877, "10700": 2.68413, "10705": 2.69555, "10710": 2.774, "10715": 2.65661, "10720": 2.77528, "10725": 2.73474, "10730": 2.67914, "10735": 2.76507, "10740": 2.67478, "10745": 2.72617, "10750": 2.7479, "10755": 2.74263, "10760": 2.73073, "10765": 2.72251, "10770": 2.76372, "10775": 2.74374, "10780": 2.69416, "10785": 2.7111, "10790": 2.68224, "10795": 2.73026, "10800": 2.68876, "10805": 2.77081, "10810": 2.77176, "10815": 2.68136, "10820": 2.75497, "10825": 2.7289, "10830": 2.72109, "10835": 2.72345, "10840": 2.75372, "10845": 2.75927, "10850": 2.67368, "10855": 2.66173, "10860": 2.76767, "10865": 2.62342, "10870": 2.74536, "10875": 2.72451, "10880": 2.68847, "10885": 2.71223, "10890": 2.64144, "10895": 2.65478, "10900": 2.70849, "10905": 2.72647, "10910": 2.69491, "10915": 2.72985, "10920": 2.64977, "10925": 2.73933, "10930": 2.67429, "10935": 2.6864, "10940": 2.66962, "10945": 2.67685, "10950": 2.68635, "10955": 2.70653, "10960": 2.72338, "10965": 2.57087, "10970": 2.7383, "10975": 2.69785, "10980": 2.68698, "10985": 2.66184, "10990": 2.692, "10995": 2.71866, "11000": 2.68766, "11005": 2.74688, "11010": 2.69556, "11015": 2.78167, "11020": 2.70956, "11025": 2.72884, "11030": 2.65276, "11035": 2.64902, "11040": 2.72963, "11045": 2.67701, "11050": 2.69404, "11055": 2.72594, "11060": 2.67375, "11065": 2.6317, "11070": 2.6794, "11075": 2.62368, "11080": 2.75616, "11085": 2.7049, "11090": 2.63052, "11095": 2.63937, "11100": 2.66143, "11105": 2.7075, "11110": 2.70345, "11115": 2.69002, "11120": 2.62756, "11125": 2.63158, "11130": 2.71598, "11135": 2.71961, "11140": 2.73845, "11145": 2.6557, "11150": 2.67433, "11155": 2.60537, "11160": 2.69027, "11165": 2.72802, "11170": 2.72852, "11175": 2.65702, "11180": 2.66002, "11185": 2.66814, "11190": 2.65108, "11195": 2.60171, "11200": 2.66329, "11205": 2.66693, "11210": 2.70556, "11215": 2.66241, "11220": 2.65762, "11225": 2.64655, "11230": 2.67509, "11235": 2.62236, "11240": 2.68781, "11245": 2.75295, "11250": 2.74123, "11255": 2.73783, "11260": 2.76418, "11265": 2.65261, "11270": 2.72723, "11275": 2.72429, "11280": 2.64531, "11285": 2.64735, "11290": 2.74409, "11295": 2.67721, "11300": 2.68325, "11305": 2.60893, "11310": 2.65062, "11315": 2.63728, "11320": 2.70284, "11325": 2.61653, "11330": 2.63144, "11335": 2.67125, "11340": 2.64044, "11345": 2.69127, "11350": 2.71217, "11355": 2.67089, "11360": 2.70591, "11365": 2.66058, "11370": 2.62929, "11375": 2.67324, "11380": 2.6018, "11385": 2.71277, "11390": 2.71264, "11395": 2.61697, "11400": 2.6756, "11405": 2.66404, "11410": 2.61194, "11415": 2.57056, "11420": 2.69557, "11425": 2.66363, "11430": 2.67557, "11435": 2.61644, "11440": 2.68498, "11445": 2.66019, "11450": 2.69706, "11455": 2.62787, "11460": 2.58864, "11465": 2.61132, "11470": 2.66435, "11475": 2.6183, "11480": 2.71872, "11485": 2.72904, "11490": 2.70163, "11495": 2.65852, "11500": 2.64153, "11505": 2.5942, "11510": 2.6529, "11515": 2.65978, "11520": 2.6408, "11525": 2.74532, "11530": 2.62084, "11535": 2.66241, "11540": 2.66788, "11545": 2.69579, "11550": 2.72058, "11555": 2.6444, "11560": 2.68865, "11565": 2.72947, "11570": 2.72045, "11575": 2.68158, "11580": 2.6746, "11585": 2.67973, "11590": 2.6879, "11595": 2.63322, "11600": 2.6505, "11605": 2.66341, "11610": 2.61595, "11615": 2.66167, "11620": 2.65026, "11625": 2.63843, "11630": 2.72024, "11635": 2.67299, "11640": 2.70076, "11645": 2.73928, "11650": 2.66381, "11655": 2.61872, "11660": 2.64922, "11665": 2.59434, "11670": 2.67932, "11675": 2.69265, "11680": 2.63867, "11685": 2.66047, "11690": 2.63867, "11695": 2.66348, "11700": 2.6234, "11705": 2.64052, "11710": 2.63203, "11715": 2.64453, "11720": 2.61324, "11725": 2.66784, "11730": 2.66596, "11735": 2.62808, "11740": 2.61476, "11745": 2.69365, "11750": 2.65268, "11755": 2.66145, "11760": 2.634, "11765": 2.63583, "11770": 2.63564, "11775": 2.60478, "11780": 2.59301, "11785": 2.76369, "11790": 2.6778, "11795": 2.61838, "11800": 2.68366, "11805": 2.67069, "11810": 2.60381, "11815": 2.72331, "11820": 2.6947, "11825": 2.62886, "11830": 2.63341, "11835": 2.64762, "11840": 2.66269, "11845": 2.64957, "11850": 2.58321, "11855": 2.6229, "11860": 2.68902, "11865": 2.71669, "11870": 2.60736, "11875": 2.58456, "11880": 2.63088, "11885": 2.61851, "11890": 2.61205, "11895": 2.60755, "11900": 2.6518, "11905": 2.58732, "11910": 2.62695, "11915": 2.66082, "11920": 2.5863, "11925": 2.61868, "11930": 2.62991, "11935": 2.6531, "11940": 2.63112, "11945": 2.56727, "11950": 2.66651, "11955": 2.60308, "11960": 2.67455, "11965": 2.66714, "11970": 2.6697, "11975": 2.66589, "11980": 2.6778, "11985": 2.65905, "11990": 2.59962, "11995": 2.66944, "12000": 2.59937, "12005": 2.61246, "12010": 2.65575, "12015": 2.5752, "12020": 2.65213, "12025": 2.67874, "12030": 2.59926, "12035": 2.5537, "12040": 2.63665, "12045": 2.62687, "12050": 2.60616, "12055": 2.58853, "12060": 2.61971, "12065": 2.72069, "12070": 2.59778, "12075": 2.68404, "12080": 2.63482, "12085": 2.64472, "12090": 2.65618, "12095": 2.63679, "12100": 2.6144, "12105": 2.63906, "12110": 2.62582, "12115": 2.70058, "12120": 2.58198, "12125": 2.5733, "12130": 2.53267, "12135": 2.56819, "12140": 2.65881, "12145": 2.68735, "12150": 2.65059, "12155": 2.6284, "12160": 2.64504, "12165": 2.67008, "12170": 2.62136, "12175": 2.63891, "12180": 2.60228, "12185": 2.62034, "12190": 2.57074, "12195": 2.57234, "12200": 2.68905, "12205": 2.65192, "12210": 2.67921, "12215": 2.6171, "12220": 2.72807, "12225": 2.54761, "12230": 2.6028, "12235": 2.62904, "12240": 2.66762, "12245": 2.61202, "12250": 2.5588, "12255": 2.56822, "12260": 2.59098, "12265": 2.64667, "12270": 2.6067, "12275": 2.63649, "12280": 2.59273, "12285": 2.69431, "12290": 2.65189, "12295": 2.63632, "12300": 2.63729, "12305": 2.67018, "12310": 2.61192, "12315": 2.62402, "12320": 2.61662, "12325": 2.65623, "12330": 2.62489, "12335": 2.55117, "12340": 2.61841, "12345": 2.60587, "12350": 2.55066, "12355": 2.58261, "12360": 2.62326, "12365": 2.60426, "12370": 2.60004, "12375": 2.63933, "12380": 2.59488, "12385": 2.64497, "12390": 2.62279, "12395": 2.57949, "12400": 2.52892, "12405": 2.66837, "12410": 2.58806, "12415": 2.63665, "12420": 2.56557, "12425": 2.63239, "12430": 2.67078, "12435": 2.63037, "12440": 2.57457, "12445": 2.6132, "12450": 2.58453, "12455": 2.59255, "12460": 2.56691, "12465": 2.50022, "12470": 2.69623, "12475": 2.61177, "12480": 2.56991, "12485": 2.64382, "12490": 2.59916, "12495": 2.60517, "12500": 2.66517, "12505": 2.55525, "12510": 2.64625, "12515": 2.62922, "12520": 2.60914, "12525": 2.53641, "12530": 2.62482, "12535": 2.63691, "12540": 2.57252, "12545": 2.63375, "12550": 2.57743, "12555": 2.59238, "12560": 2.63586, "12565": 2.68928, "12570": 2.63116, "12575": 2.57104, "12580": 2.6315, "12585": 2.61095, "12590": 2.61514, "12595": 2.57671, "12600": 2.62523, "12605": 2.63591, "12610": 2.57553, "12615": 2.59755, "12620": 2.57325, "12625": 2.62245, "12630": 2.5441, "12635": 2.69572, "12640": 2.63416, "12645": 2.5538, "12650": 2.51497, "12655": 2.55205, "12660": 2.62786, "12665": 2.59165, "12670": 2.60416, "12675": 2.70627, "12680": 2.57841, "12685": 2.61666, "12690": 2.61799, "12695": 2.57309, "12700": 2.63751, "12705": 2.56397, "12710": 2.60517, "12715": 2.65275, "12720": 2.55862, "12725": 2.62626, "12730": 2.61539, "12735": 2.58237, "12740": 2.62056, "12745": 2.51559, "12750": 2.63168, "12755": 2.6279, "12760": 2.64722, "12765": 2.63219, "12770": 2.52202, "12775": 2.64378, "12780": 2.68548, "12785": 2.62687, "12790": 2.61682, "12795": 2.63023, "12800": 2.63661, "12805": 2.59536, "12810": 2.56072, "12815": 2.59763, "12820": 2.58467, "12825": 2.56767, "12830": 2.64248, "12835": 2.54535, "12840": 2.60088, "12845": 2.64354, "12850": 2.61982, "12855": 2.6034, "12860": 2.59268, "12865": 2.50562, "12870": 2.59195, "12875": 2.53416, "12880": 2.62355, "12885": 2.58454, "12890": 2.56907, "12895": 2.59119, "12900": 2.57594, "12905": 2.61317, "12910": 2.52146, "12915": 2.55696, "12920": 2.60694, "12925": 2.55523, "12930": 2.6304, "12935": 2.52705, "12940": 2.53131, "12945": 2.60041, "12950": 2.51971, "12955": 2.61234, "12960": 2.53555, "12965": 2.60813, "12970": 2.6336, "12975": 2.55963, "12980": 2.60058, "12985": 2.56116, "12990": 2.58672, "12995": 2.57078, "13000": 2.55764, "13005": 2.56541, "13010": 2.53737, "13015": 2.5886, "13020": 2.57668, "13025": 2.57133, "13030": 2.58046, "13035": 2.56866, "13040": 2.59754, "13045": 2.55919, "13050": 2.62375, "13055": 2.57373, "13060": 2.52503, "13065": 2.56914, "13070": 2.56289, "13075": 2.59277, "13080": 2.62682, "13085": 2.67045, "13090": 2.59264, "13095": 2.56606, "13100": 2.59488, "13105": 2.63756, "13110": 2.57107, "13115": 2.58638, "13120": 2.56728, "13125": 2.71526, "13130": 2.62742, "13135": 2.64123, "13140": 2.60837, "13145": 2.53586, "13150": 2.6515, "13155": 2.55855, "13160": 2.56744, "13165": 2.60521, "13170": 2.51803, "13175": 2.62026, "13180": 2.65334, "13185": 2.5575, "13190": 2.51807, "13195": 2.53575, "13200": 2.63824, "13205": 2.53228, "13210": 2.59998, "13215": 2.57834, "13220": 2.61797, "13225": 2.58699, "13230": 2.5514, "13235": 2.63808, "13240": 2.61201, "13245": 2.60179, "13250": 2.53932, "13255": 2.62221, "13260": 2.59215, "13265": 2.5482, "13270": 2.5407, "13275": 2.59347, "13280": 2.54946, "13285": 2.59453, "13290": 2.63624, "13295": 2.56089, "13300": 2.56257, "13305": 2.61559, "13310": 2.64047, "13315": 2.62613, "13320": 2.57968, "13325": 2.56845, "13330": 2.62741, "13335": 2.56416, "13340": 2.5876, "13345": 2.61715, "13350": 2.54261, "13355": 2.57478, "13360": 2.57558, "13365": 2.57203, "13370": 2.65889, "13375": 2.60655, "13380": 2.5712, "13385": 2.57197, "13390": 2.47932, "13395": 2.54308, "13400": 2.54287, "13405": 2.60172, "13410": 2.56533, "13415": 2.57702, "13420": 2.58898, "13425": 2.64237, "13430": 2.54929, "13435": 2.55719, "13440": 2.56766, "13445": 2.50912, "13450": 2.54854, "13455": 2.59088, "13460": 2.58884, "13465": 2.61791, "13470": 2.52516, "13475": 2.58921, "13480": 2.62476, "13485": 2.52471, "13490": 2.60909, "13495": 2.59524, "13500": 2.54781, "13505": 2.61394, "13510": 2.59857, "13515": 2.59888, "13520": 2.56803, "13525": 2.52762, "13530": 2.64271, "13535": 2.51992, "13540": 2.56217, "13545": 2.55239, "13550": 2.6756, "13555": 2.60427, "13560": 2.54746, "13565": 2.58994, "13570": 2.68741, "13575": 2.57651, "13580": 2.56945, "13585": 2.51041, "13590": 2.54349, "13595": 2.56837, "13600": 2.52876, "13605": 2.6248, "13610": 2.53533, "13615": 2.537, "13620": 2.61055, "13625": 2.57781, "13630": 2.56339, "13635": 2.52168, "13640": 2.65664, "13645": 2.55489, "13650": 2.61424, "13655": 2.45835, "13660": 2.54702, "13665": 2.54476, "13670": 2.55717, "13675": 2.62447, "13680": 2.51433, "13685": 2.54811, "13690": 2.59234, "13695": 2.51467, "13700": 2.54511, "13705": 2.58848, "13710": 2.55644, "13715": 2.53782, "13720": 2.54936, "13725": 2.55671, "13730": 2.5697, "13735": 2.53823, "13740": 2.59261, "13745": 2.53542, "13750": 2.5007, "13755": 2.54192, "13760": 2.6271, "13765": 2.52663, "13770": 2.5395, "13775": 2.57492, "13780": 2.56029, "13785": 2.58141, "13790": 2.56957, "13795": 2.55501, "13800": 2.50055, "13805": 2.54906, "13810": 2.59059, "13815": 2.58651, "13820": 2.56579, "13825": 2.58555, "13830": 2.5373, "13835": 2.57274, "13840": 2.56729, "13845": 2.62429, "13850": 2.50062, "13855": 2.53302, "13860": 2.61544, "13865": 2.44703, "13870": 2.64723, "13875": 2.53494, "13880": 2.48886, "13885": 2.56703, "13890": 2.58238, "13895": 2.54614, "13900": 2.513, "13905": 2.57205, "13910": 2.63022, "13915": 2.50957, "13920": 2.53724, "13925": 2.646, "13930": 2.49701, "13935": 2.50142, "13940": 2.5996, "13945": 2.53818, "13950": 2.56019, "13955": 2.50514, "13960": 2.58747, "13965": 2.53405, "13970": 2.56664, "13975": 2.54674, "13980": 2.55483, "13985": 2.54781, "13990": 2.55832, "13995": 2.53151, "14000": 2.53513, "14005": 2.54899, "14010": 2.50325, "14015": 2.49923, "14020": 2.5348, "14025": 2.44673, "14030": 2.55698, "14035": 2.52466, "14040": 2.57399, "14045": 2.58473, "14050": 2.55808, "14055": 2.56655, "14060": 2.55867, "14065": 2.47957, "14070": 2.47346, "14075": 2.55475, "14080": 2.47093, "14085": 2.53132, "14090": 2.59353, "14095": 2.44782, "14100": 2.47953, "14105": 2.52912, "14110": 2.48901, "14115": 2.52541, "14120": 2.58565, "14125": 2.54468, "14130": 2.60062, "14135": 2.53037, "14140": 2.51363, "14145": 2.52582, "14150": 2.56797, "14155": 2.52747, "14160": 2.50703, "14165": 2.53475, "14170": 2.51354, "14175": 2.51231, "14180": 2.49028, "14185": 2.53436, "14190": 2.43852, "14195": 2.57103, "14200": 2.51694, "14205": 2.56459, "14210": 2.54143, "14215": 2.48153, "14220": 2.5038, "14225": 2.51258, "14230": 2.54808, "14235": 2.55006, "14240": 2.55199, "14245": 2.52679, "14250": 2.50121, "14255": 2.55201, "14260": 2.56319, "14265": 2.55444, "14270": 2.52186, "14275": 2.5378, "14280": 2.54127, "14285": 2.47667, "14290": 2.55945, "14295": 2.63283, "14300": 2.54012, "14305": 2.55313, "14310": 2.44285, "14315": 2.52148, "14320": 2.54937, "14325": 2.54371, "14330": 2.48088, "14335": 2.49358, "14340": 2.57898, "14345": 2.57241, "14350": 2.47678, "14355": 2.49254, "14360": 2.52623, "14365": 2.46837, "14370": 2.53101, "14375": 2.55297, "14380": 2.47552, "14385": 2.47012, "14390": 2.61002, "14395": 2.46474, "14400": 2.52926, "14405": 2.50534, "14410": 2.53872, "14415": 2.46496, "14420": 2.54748, "14425": 2.4745, "14430": 2.48522, "14435": 2.49869, "14440": 2.38348, "14445": 2.548, "14450": 2.55235, "14455": 2.51485, "14460": 2.61128, "14465": 2.46977, "14470": 2.51597, "14475": 2.47024, "14480": 2.50381, "14485": 2.47865, "14490": 2.61438, "14495": 2.59234, "14500": 2.47234, "14505": 2.62139, "14510": 2.47655, "14515": 2.45457, "14520": 2.50775, "14525": 2.55715, "14530": 2.47944, "14535": 2.51, "14540": 2.56701, "14545": 2.5184, "14550": 2.57054, "14555": 2.55882, "14560": 2.52728, "14565": 2.53767, "14570": 2.45317, "14575": 2.48191, "14580": 2.50638, "14585": 2.51519, "14590": 2.56012, "14595": 2.54637, "14600": 2.57749, "14605": 2.47272, "14610": 2.54844, "14615": 2.55955, "14620": 2.51261, "14625": 2.50055, "14630": 2.46677, "14635": 2.5693, "14640": 2.48963, "14645": 2.60714, "14650": 2.50895, "14655": 2.52651, "14660": 2.49404, "14665": 2.53759, "14670": 2.58254, "14675": 2.5602, "14680": 2.51686, "14685": 2.52724, "14690": 2.57181, "14695": 2.52172, "14700": 2.54111, "14705": 2.57362, "14710": 2.50387, "14715": 2.49679, "14720": 2.46795, "14725": 2.44992, "14730": 2.51812, "14735": 2.48557, "14740": 2.53221, "14745": 2.51225, "14750": 2.4892, "14755": 2.57135, "14760": 2.48469, "14765": 2.48247, "14770": 2.48621, "14775": 2.4766, "14780": 2.53595, "14785": 2.50758, "14790": 2.5894, "14795": 2.52775, "14800": 2.51704, "14805": 2.52047, "14810": 2.54194, "14815": 2.51753, "14820": 2.50229, "14825": 2.57713, "14830": 2.6214, "14835": 2.5582, "14840": 2.50678, "14845": 2.50295, "14850": 2.55495, "14855": 2.54837, "14860": 2.5016, "14865": 2.53925, "14870": 2.50385, "14875": 2.47113, "14880": 2.51974, "14885": 2.51555, "14890": 2.4309, "14895": 2.50415, "14900": 2.52952, "14905": 2.47934, "14910": 2.53182, "14915": 2.52612, "14920": 2.55562, "14925": 2.50501, "14930": 2.52282, "14935": 2.48675, "14940": 2.56662, "14945": 2.49063, "14950": 2.51689, "14955": 2.59257, "14960": 2.51669, "14965": 2.48009, "14970": 2.57317, "14975": 2.56462, "14980": 2.50847, "14985": 2.45549, "14990": 2.53052, "14995": 2.50715, "15000": 2.54051, "15005": 2.55395, "15010": 2.51647, "15015": 2.45769, "15020": 2.49226, "15025": 2.51795, "15030": 2.53211, "15035": 2.5242, "15040": 2.50607, "15045": 2.50431, "15050": 2.46404, "15055": 2.53407, "15060": 2.47909, "15065": 2.52917, "15070": 2.53447, "15075": 2.54444, "15080": 2.45074, "15085": 2.49691, "15090": 2.52216, "15095": 2.53786, "15100": 2.50923, "15105": 2.56961, "15110": 2.47396, "15115": 2.49042, "15120": 2.53848, "15125": 2.55762, "15130": 2.60805, "15135": 2.50913, "15140": 2.4808, "15145": 2.52995, "15150": 2.4492, "15155": 2.51, "15160": 2.49198, "15165": 2.50924, "15170": 2.55737, "15175": 2.46719, "15180": 2.46767, "15185": 2.52421, "15190": 2.5426, "15195": 2.48619, "15200": 2.49134, "15205": 2.53762, "15210": 2.49442, "15215": 2.49165, "15220": 2.45663, "15225": 2.46861, "15230": 2.45754, "15235": 2.51414, "15240": 2.50037, "15245": 2.46544, "15250": 2.51031, "15255": 2.54527, "15260": 2.50129, "15265": 2.54273, "15270": 2.47823, "15275": 2.44973, "15280": 2.49764, "15285": 2.54499, "15290": 2.44607, "15295": 2.46866, "15300": 2.4998, "15305": 2.52469, "15310": 2.49174, "15315": 2.53219, "15320": 2.52595, "15325": 2.51654, "15330": 2.52461, "15335": 2.49264, "15340": 2.4977, "15345": 2.51686, "15350": 2.53796, "15355": 2.55194, "15360": 2.489, "15365": 2.56138, "15370": 2.49066, "15375": 2.51368, "15380": 2.45266, "15385": 2.47453, "15390": 2.49703, "15395": 2.48652, "15400": 2.47641, "15405": 2.43079, "15410": 2.51089, "15415": 2.52553, "15420": 2.47751, "15425": 2.49303, "15430": 2.49635, "15435": 2.50018, "15440": 2.49207, "15445": 2.49047, "15450": 2.44702, "15455": 2.48174, "15460": 2.50566, "15465": 2.49167, "15470": 2.41572, "15475": 2.49127, "15480": 2.45375, "15485": 2.47078, "15490": 2.54099, "15495": 2.4913, "15500": 2.44936, "15505": 2.43694, "15510": 2.44685, "15515": 2.47036, "15520": 2.46755, "15525": 2.44782, "15530": 2.46013, "15535": 2.52451, "15540": 2.52984, "15545": 2.50061, "15550": 2.49259, "15555": 2.46399, "15560": 2.47404, "15565": 2.4887, "15570": 2.49138, "15575": 2.52594, "15580": 2.54912, "15585": 2.56719, "15590": 2.50713, "15595": 2.44509, "15600": 2.41372, "15605": 2.46097, "15610": 2.50809, "15615": 2.46035, "15620": 2.49319, "15625": 2.5112, "15630": 2.42868, "15635": 2.49691, "15640": 2.45903, "15645": 2.47477, "15650": 2.40959, "15655": 2.44772, "15660": 2.45096, "15665": 2.50064, "15670": 2.4499, "15675": 2.49245, "15680": 2.49132, "15685": 2.48121, "15690": 2.48929, "15695": 2.44007, "15700": 2.47365, "15705": 2.51747, "15710": 2.48863, "15715": 2.45031, "15720": 2.48492, "15725": 2.51189, "15730": 2.50253, "15735": 2.51222, "15740": 2.44764, "15745": 2.49243, "15750": 2.49646, "15755": 2.46726, "15760": 2.46359, "15765": 2.47231, "15770": 2.45777, "15775": 2.43411, "15780": 2.42696, "15785": 2.45965, "15790": 2.52273, "15795": 2.48772, "15800": 2.47926, "15805": 2.49072, "15810": 2.47354, "15815": 2.48364, "15820": 2.48983, "15825": 2.44673, "15830": 2.43785, "15835": 2.41303, "15840": 2.42195, "15845": 2.44792, "15850": 2.45737, "15855": 2.48158, "15860": 2.52756, "15865": 2.5056, "15870": 2.3898, "15875": 2.4816, "15880": 2.42618, "15885": 2.45568, "15890": 2.48605, "15895": 2.55664, "15900": 2.48389, "15905": 2.47747, "15910": 2.48984, "15915": 2.44328, "15920": 2.5385, "15925": 2.47816, "15930": 2.48153, "15935": 2.44754, "15940": 2.45328, "15945": 2.50203, "15950": 2.44309, "15955": 2.48324, "15960": 2.50825, "15965": 2.52521, "15970": 2.47815, "15975": 2.49696, "15980": 2.42072, "15985": 2.43086, "15990": 2.46603, "15995": 2.45128, "16000": 2.40496, "16005": 2.46461, "16010": 2.49573, "16015": 2.49557, "16020": 2.4956, "16025": 2.49583, "16030": 2.50431, "16035": 2.47618, "16040": 2.42737, "16045": 2.51069, "16050": 2.50198, "16055": 2.50308, "16060": 2.48399, "16065": 2.51257, "16070": 2.43721, "16075": 2.52069, "16080": 2.52175, "16085": 2.56192, "16090": 2.44789, "16095": 2.45504, "16100": 2.51356, "16105": 2.42471, "16110": 2.49479, "16115": 2.46588, "16120": 2.4336, "16125": 2.51239, "16130": 2.48463, "16135": 2.42739, "16140": 2.48636, "16145": 2.44716, "16150": 2.39461, "16155": 2.48547, "16160": 2.45536, "16165": 2.44804, "16170": 2.44541, "16175": 2.36202, "16180": 2.50395, "16185": 2.39566, "16190": 2.49597, "16195": 2.47765, "16200": 2.4381, "16205": 2.45674, "16210": 2.47912, "16215": 2.5187, "16220": 2.43046, "16225": 2.41398, "16230": 2.44873, "16235": 2.49673, "16240": 2.47924, "16245": 2.53034, "16250": 2.45959, "16255": 2.44933, "16260": 2.43243, "16265": 2.38695, "16270": 2.49234, "16275": 2.44726, "16280": 2.49864, "16285": 2.44253, "16290": 2.3657, "16295": 2.46481, "16300": 2.45001, "16305": 2.44041, "16310": 2.47898, "16315": 2.4445, "16320": 2.48635, "16325": 2.45317, "16330": 2.46161, "16335": 2.44303, "16340": 2.50838, "16345": 2.53326, "16350": 2.41259, "16355": 2.43974, "16360": 2.50655, "16365": 2.43726, "16370": 2.40555, "16375": 2.41926, "16380": 2.52422, "16385": 2.40456, "16390": 2.49969, "16395": 2.45075, "16400": 2.4142, "16405": 2.48007, "16410": 2.40725, "16415": 2.45204, "16420": 2.48571, "16425": 2.41324, "16430": 2.42244, "16435": 2.46227, "16440": 2.40666, "16445": 2.513, "16450": 2.44168, "16455": 2.47747, "16460": 2.48168, "16465": 2.44564, "16470": 2.4879, "16475": 2.4592, "16480": 2.4731, "16485": 2.44843, "16490": 2.44142, "16495": 2.41214, "16500": 2.49427, "16505": 2.41273, "16510": 2.469, "16515": 2.48084, "16520": 2.50784, "16525": 2.46091, "16530": 2.49641, "16535": 2.47074, "16540": 2.49757, "16545": 2.47899, "16550": 2.48171, "16555": 2.44297, "16560": 2.5034, "16565": 2.36417, "16570": 2.45414, "16575": 2.39732, "16580": 2.42455, "16585": 2.50249, "16590": 2.4489, "16595": 2.45152, "16600": 2.47188, "16605": 2.47726, "16610": 2.43356, "16615": 2.47861, "16620": 2.52104, "16625": 2.4159, "16630": 2.45526, "16635": 2.39504, "16640": 2.54482, "16645": 2.49832, "16650": 2.49973, "16655": 2.49677, "16660": 2.47425, "16665": 2.38319, "16670": 2.40819, "16675": 2.4163, "16680": 2.41892, "16685": 2.47293, "16690": 2.48028, "16695": 2.43836, "16700": 2.42113, "16705": 2.49793, "16710": 2.4682, "16715": 2.44613, "16720": 2.46214, "16725": 2.4646, "16730": 2.46518, "16735": 2.48973, "16740": 2.45451, "16745": 2.4402, "16750": 2.46095, "16755": 2.55828, "16760": 2.54512, "16765": 2.49886, "16770": 2.47801, "16775": 2.50382, "16780": 2.42387, "16785": 2.45355, "16790": 2.48988, "16795": 2.49787, "16800": 2.41944, "16805": 2.39615, "16810": 2.44072, "16815": 2.49019, "16820": 2.43406, "16825": 2.4786, "16830": 2.48173, "16835": 2.41852, "16840": 2.45135, "16845": 2.48897, "16850": 2.43333, "16855": 2.42701, "16860": 2.39255, "16865": 2.44412, "16870": 2.42852, "16875": 2.46488, "16880": 2.45621, "16885": 2.49182, "16890": 2.45468, "16895": 2.42403, "16900": 2.51714, "16905": 2.43941, "16910": 2.45123, "16915": 2.41709, "16920": 2.42061, "16925": 2.51743, "16930": 2.51145, "16935": 2.47121, "16940": 2.45705, "16945": 2.47648, "16950": 2.3952, "16955": 2.40561, "16960": 2.44914, "16965": 2.44452, "16970": 2.45375, "16975": 2.47598, "16980": 2.41944, "16985": 2.40015, "16990": 2.42709, "16995": 2.406, "17000": 2.40551, "17005": 2.41182, "17010": 2.47111, "17015": 2.5243, "17020": 2.47188, "17025": 2.3945, "17030": 2.35537, "17035": 2.47306, "17040": 2.4954, "17045": 2.47069, "17050": 2.3938, "17055": 2.42186, "17060": 2.48148, "17065": 2.48042, "17070": 2.40329, "17075": 2.45216, "17080": 2.42048, "17085": 2.41868, "17090": 2.34541, "17095": 2.43817, "17100": 2.47529, "17105": 2.44024, "17110": 2.40316, "17115": 2.46158, "17120": 2.4592, "17125": 2.43662, "17130": 2.44501, "17135": 2.50504, "17140": 2.3255, "17145": 2.37211, "17150": 2.50763, "17155": 2.4365, "17160": 2.47676, "17165": 2.45084, "17170": 2.42123, "17175": 2.46927, "17180": 2.53405, "17185": 2.46928, "17190": 2.40347, "17195": 2.42605, "17200": 2.4592, "17205": 2.44091, "17210": 2.39332, "17215": 2.44654, "17220": 2.46383, "17225": 2.48457, "17230": 2.41352, "17235": 2.44827, "17240": 2.40546, "17245": 2.39759, "17250": 2.44277, "17255": 2.43563, "17260": 2.45721, "17265": 2.36993, "17270": 2.42919, "17275": 2.43293, "17280": 2.34419, "17285": 2.4664, "17290": 2.46384, "17295": 2.39899, "17300": 2.42177, "17305": 2.43774, "17310": 2.43814, "17315": 2.40864, "17320": 2.38905, "17325": 2.43212, "17330": 2.46917, "17335": 2.44338, "17340": 2.40546, "17345": 2.45713, "17350": 2.40209, "17355": 2.42045, "17360": 2.42684, "17365": 2.48148, "17370": 2.3838, "17375": 2.4376, "17380": 2.47337, "17385": 2.34403, "17390": 2.41918, "17395": 2.44033, "17400": 2.43775, "17405": 2.42476, "17410": 2.39657, "17415": 2.38495, "17420": 2.35706, "17425": 2.50211, "17430": 2.43332, "17435": 2.41288, "17440": 2.45428, "17445": 2.44479, "17450": 2.42002, "17455": 2.49392, "17460": 2.42323, "17465": 2.37673, "17470": 2.42724, "17475": 2.44552, "17480": 2.46967, "17485": 2.43613, "17490": 2.43368, "17495": 2.40922, "17500": 2.37625, "17505": 2.44845, "17510": 2.43083, "17515": 2.43062, "17520": 2.47645, "17525": 2.46705, "17530": 2.49624, "17535": 2.42266, "17540": 2.47968, "17545": 2.51642, "17550": 2.44719, "17555": 2.44205, "17560": 2.44272, "17565": 2.40909, "17570": 2.38656, "17575": 2.39089, "17580": 2.46535, "17585": 2.46662, "17590": 2.51274, "17595": 2.44063, "17600": 2.48396, "17605": 2.42971, "17610": 2.41466, "17615": 2.3959, "17620": 2.42656, "17625": 2.42163, "17630": 2.34902, "17635": 2.41727, "17640": 2.42908, "17645": 2.45633, "17650": 2.39428, "17655": 2.41976, "17660": 2.42647, "17665": 2.43743, "17670": 2.42228, "17675": 2.38728, "17680": 2.39554, "17685": 2.45746, "17690": 2.41751, "17695": 2.4206, "17700": 2.38284, "17705": 2.38236, "17710": 2.42481, "17715": 2.5367, "17720": 2.39077, "17725": 2.45618, "17730": 2.43272, "17735": 2.46245, "17740": 2.36387, "17745": 2.52177, "17750": 2.46785, "17755": 2.41181, "17760": 2.42189, "17765": 2.49631, "17770": 2.37977, "17775": 2.44955, "17780": 2.44358, "17785": 2.42979, "17790": 2.47238, "17795": 2.41614, "17800": 2.39141, "17805": 2.30612, "17810": 2.36194, "17815": 2.41323, "17820": 2.44998, "17825": 2.38217, "17830": 2.36344, "17835": 2.40839, "17840": 2.36134, "17845": 2.39056, "17850": 2.4056, "17855": 2.42441, "17860": 2.4678, "17865": 2.41908, "17870": 2.39208, "17875": 2.40244, "17880": 2.37676, "17885": 2.44609, "17890": 2.37577, "17895": 2.40089, "17900": 2.39443, "17905": 2.39667, "17910": 2.37887, "17915": 2.48064, "17920": 2.5147, "17925": 2.43166, "17930": 2.42804, "17935": 2.41024, "17940": 2.43555, "17945": 2.44583, "17950": 2.34442, "17955": 2.37603, "17960": 2.48223, "17965": 2.42399, "17970": 2.42597, "17975": 2.43686, "17980": 2.42038, "17985": 2.30797, "17990": 2.46654, "17995": 2.42178, "18000": 2.43888, "18005": 2.41845, "18010": 2.37627, "18015": 2.39848, "18020": 2.38071, "18025": 2.42429, "18030": 2.38881, "18035": 2.43121, "18040": 2.39287, "18045": 2.40773, "18050": 2.48876, "18055": 2.3926, "18060": 2.49749, "18065": 2.39598, "18070": 2.41372, "18075": 2.44045, "18080": 2.43111, "18085": 2.40779, "18090": 2.41684, "18095": 2.46468, "18100": 2.36652, "18105": 2.47032, "18110": 2.38169, "18115": 2.37629, "18120": 2.42039, "18125": 2.37882, "18130": 2.41203, "18135": 2.43465, "18140": 2.47643, "18145": 2.40054, "18150": 2.39025, "18155": 2.36708, "18160": 2.37533, "18165": 2.41515, "18170": 2.39142, "18175": 2.48507, "18180": 2.37881, "18185": 2.42664, "18190": 2.40111, "18195": 2.37567, "18200": 2.45754, "18205": 2.44652, "18210": 2.37053, "18215": 2.34708, "18220": 2.45756, "18225": 2.42595, "18230": 2.43828, "18235": 2.40852, "18240": 2.43419, "18245": 2.43789, "18250": 2.46067, "18255": 2.4784, "18260": 2.42888, "18265": 2.46003, "18270": 2.43324, "18275": 2.41679, "18280": 2.42222, "18285": 2.49988, "18290": 2.41431, "18295": 2.40181, "18300": 2.37895, "18305": 2.34427, "18310": 2.46268, "18315": 2.40605, "18320": 2.44888, "18325": 2.46157, "18330": 2.45411, "18335": 2.45376, "18340": 2.38397, "18345": 2.37488, "18350": 2.4082, "18355": 2.37267, "18360": 2.39132, "18365": 2.41457, "18370": 2.423, "18375": 2.36881, "18380": 2.3947, "18385": 2.42798, "18390": 2.48668, "18395": 2.37245, "18400": 2.37099, "18405": 2.44243, "18410": 2.41086, "18415": 2.47525, "18420": 2.46521, "18425": 2.40075, "18430": 2.44301, "18435": 2.3255, "18440": 2.43547, "18445": 2.41284, "18450": 2.43082, "18455": 2.33274, "18460": 2.41653, "18465": 2.3973, "18470": 2.36908, "18475": 2.46564, "18480": 2.44993, "18485": 2.42418, "18490": 2.37911, "18495": 2.48473, "18500": 2.4168, "18505": 2.44304, "18510": 2.40412, "18515": 2.4168, "18520": 2.48499, "18525": 2.43454, "18530": 2.42269, "18535": 2.50366, "18540": 2.37487, "18545": 2.42257, "18550": 2.45166, "18555": 2.34094, "18560": 2.36774, "18565": 2.47254, "18570": 2.41683, "18575": 2.33016, "18580": 2.4294, "18585": 2.37271, "18590": 2.42816, "18595": 2.45386, "18600": 2.43566, "18605": 2.34271, "18610": 2.40295, "18615": 2.35301, "18620": 2.43446, "18625": 2.37508, "18630": 2.38459, "18635": 2.34192, "18640": 2.42029, "18645": 2.41636, "18650": 2.37845, "18655": 2.46467, "18660": 2.38221, "18665": 2.42516, "18670": 2.35528, "18675": 2.43284, "18680": 2.48235, "18685": 2.47761, "18690": 2.38198, "18695": 2.48642, "18700": 2.34742, "18705": 2.43553, "18710": 2.39914, "18715": 2.39883, "18720": 2.37735, "18725": 2.38832, "18730": 2.41317, "18735": 2.40205, "18740": 2.4096, "18745": 2.39361, "18750": 2.49165, "18755": 2.37155, "18760": 2.46098, "18765": 2.35458, "18770": 2.37699, "18775": 2.35833, "18780": 2.45762, "18785": 2.39996, "18790": 2.36857, "18795": 2.40356, "18800": 2.37128, "18805": 2.3943, "18810": 2.33809, "18815": 2.41736, "18820": 2.39103, "18825": 2.43312, "18830": 2.36762, "18835": 2.41254, "18840": 2.425, "18845": 2.42869, "18850": 2.31579, "18855": 2.39871, "18860": 2.34794, "18865": 2.3959, "18870": 2.36778, "18875": 2.39222, "18880": 2.4762, "18885": 2.39689, "18890": 2.34042, "18895": 2.38953, "18900": 2.38134, "18905": 2.30373, "18910": 2.33727, "18915": 2.42243, "18920": 2.39787, "18925": 2.39317, "18930": 2.39796, "18935": 2.37159, "18940": 2.38655, "18945": 2.35764, "18950": 2.36596, "18955": 2.42772, "18960": 2.50969, "18965": 2.44955, "18970": 2.39652, "18975": 2.40257, "18980": 2.39553, "18985": 2.52201, "18990": 2.43734, "18995": 2.42542, "19000": 2.39653, "19005": 2.40917, "19010": 2.34446, "19015": 2.40616, "19020": 2.35189, "19025": 2.41675, "19030": 2.34652, "19035": 2.41663, "19040": 2.38631, "19045": 2.44107, "19050": 2.39706, "19055": 2.40741, "19060": 2.38411, "19065": 2.41564, "19070": 2.36857, "19075": 2.35327, "19080": 2.38747, "19085": 2.44892, "19090": 2.38754, "19095": 2.40876, "19100": 2.35874, "19105": 2.44681, "19110": 2.44715, "19115": 2.39994, "19120": 2.45628, "19125": 2.40859, "19130": 2.40452, "19135": 2.4079, "19140": 2.34946, "19145": 2.38648, "19150": 2.38342, "19155": 2.40601, "19160": 2.37017, "19165": 2.42529, "19170": 2.44622, "19175": 2.41276, "19180": 2.33968, "19185": 2.38553, "19190": 2.37275, "19195": 2.34306, "19200": 2.43968, "19205": 2.38122, "19210": 2.40294, "19215": 2.40924, "19220": 2.3186, "19225": 2.32856, "19230": 2.33527, "19235": 2.35411, "19240": 2.41046, "19245": 2.44643, "19250": 2.39749, "19255": 2.46706, "19260": 2.42789, "19265": 2.38296, "19270": 2.4031, "19275": 2.44072, "19280": 2.39037, "19285": 2.38325, "19290": 2.40355, "19295": 2.35335, "19300": 2.33202, "19305": 2.4495, "19310": 2.44715, "19315": 2.39553, "19320": 2.3801, "19325": 2.32493, "19330": 2.37953, "19335": 2.36259, "19340": 2.39048, "19345": 2.42083, "19350": 2.37465, "19355": 2.44569, "19360": 2.39819, "19365": 2.39324, "19370": 2.33053, "19375": 2.40529, "19380": 2.34855, "19385": 2.45045, "19390": 2.40835, "19395": 2.3657, "19400": 2.37979, "19405": 2.3332, "19410": 2.42831, "19415": 2.36848, "19420": 2.3576, "19425": 2.38642, "19430": 2.44992, "19435": 2.36695, "19440": 2.42176, "19445": 2.35118, "19450": 2.33638, "19455": 2.44396, "19460": 2.40305, "19465": 2.37406, "19470": 2.34536, "19475": 2.35016, "19480": 2.336, "19485": 2.40263, "19490": 2.38698, "19495": 2.32886, "19500": 2.37257, "19505": 2.3227, "19510": 2.36563, "19515": 2.41845, "19520": 2.38112, "19525": 2.45782, "19530": 2.40396, "19535": 2.3967, "19540": 2.41049, "19545": 2.38652, "19550": 2.41009, "19555": 2.34464, "19560": 2.44459, "19565": 2.3641, "19570": 2.3821, "19575": 2.30605, "19580": 2.46212, "19585": 2.37327, "19590": 2.39611, "19595": 2.35703, "19600": 2.40679, "19605": 2.46971, "19610": 2.38118, "19615": 2.34856, "19620": 2.34173, "19625": 2.41832, "19630": 2.35309, "19635": 2.36032, "19640": 2.41745, "19645": 2.34843, "19650": 2.31873, "19655": 2.36141, "19660": 2.37073, "19665": 2.37448, "19670": 2.36722, "19675": 2.41436, "19680": 2.42165, "19685": 2.32961, "19690": 2.36172, "19695": 2.43708, "19700": 2.37682, "19705": 2.4022, "19710": 2.38115, "19715": 2.38956, "19720": 2.31949, "19725": 2.42833, "19730": 2.32058, "19735": 2.40158, "19740": 2.33261, "19745": 2.35361, "19750": 2.31379, "19755": 2.35865, "19760": 2.40765, "19765": 2.29906, "19770": 2.37548, "19775": 2.4275, "19780": 2.32852, "19785": 2.339, "19790": 2.37538, "19795": 2.38606, "19800": 2.369, "19805": 2.40589, "19810": 2.40001, "19815": 2.42064, "19820": 2.41722, "19825": 2.42458, "19830": 2.41265, "19835": 2.41552, "19840": 2.39723, "19845": 2.3385, "19850": 2.43494, "19855": 2.36312, "19860": 2.37535, "19865": 2.42295, "19870": 2.36038, "19875": 2.37414, "19880": 2.38298, "19885": 2.35939, "19890": 2.39491, "19895": 2.42312, "19900": 2.36329, "19905": 2.37997, "19910": 2.35264, "19915": 2.31892, "19920": 2.32988, "19925": 2.38648, "19930": 2.38002, "19935": 2.34798, "19940": 2.35272, "19945": 2.37277, "19950": 2.41277, "19955": 2.35683, "19960": 2.3704, "19965": 2.39593, "19970": 2.40095, "19975": 2.26372, "19980": 2.38692, "19985": 2.43809, "19990": 2.39434, "19995": 2.41372, "20000": 2.38906, "20005": 2.30032, "20010": 2.35922, "20015": 2.43212, "20020": 2.3337, "20025": 2.31074, "20030": 2.42182, "20035": 2.34895, "20040": 2.34632, "20045": 2.38086, "20050": 2.36027, "20055": 2.32707, "20060": 2.40298, "20065": 2.30458, "20070": 2.40015, "20075": 2.34998, "20080": 2.42817, "20085": 2.36446, "20090": 2.37308, "20095": 2.35532, "20100": 2.32496, "20105": 2.38745, "20110": 2.3737, "20115": 2.35025, "20120": 2.38688, "20125": 2.2909, "20130": 2.39916, "20135": 2.33251, "20140": 2.30799, "20145": 2.38779, "20150": 2.3502, "20155": 2.40807, "20160": 2.44448, "20165": 2.38212, "20170": 2.33249, "20175": 2.38048, "20180": 2.38184, "20185": 2.3968, "20190": 2.37854, "20195": 2.42879, "20200": 2.37715, "20205": 2.42592, "20210": 2.4693, "20215": 2.38244, "20220": 2.42324, "20225": 2.32065, "20230": 2.40346, "20235": 2.37148, "20240": 2.34954, "20245": 2.38754, "20250": 2.41018, "20255": 2.3833, "20260": 2.4334, "20265": 2.41381, "20270": 2.37899, "20275": 2.34398, "20280": 2.45173, "20285": 2.3564, "20290": 2.38202, "20295": 2.39073, "20300": 2.36707, "20305": 2.32775, "20310": 2.41006, "20315": 2.31644, "20320": 2.42561, "20325": 2.37666, "20330": 2.41325, "20335": 2.30728, "20340": 2.37706, "20345": 2.41626, "20350": 2.38124, "20355": 2.38684, "20360": 2.36716, "20365": 2.39931, "20370": 2.32089, "20375": 2.28651, "20380": 2.42274, "20385": 2.37945, "20390": 2.35088, "20395": 2.35963, "20400": 2.33601, "20405": 2.31496, "20410": 2.40255, "20415": 2.35648, "20420": 2.39091, "20425": 2.35714, "20430": 2.32674, "20435": 2.30927, "20440": 2.31027, "20445": 2.39674, "20450": 2.33185, "20455": 2.35252, "20460": 2.44892, "20465": 2.4129, "20470": 2.37852, "20475": 2.35412, "20480": 2.33811, "20485": 2.39076, "20490": 2.321, "20495": 2.36639, "20500": 2.40627, "20505": 2.368, "20510": 2.32483, "20515": 2.39983, "20520": 2.4049, "20525": 2.35408, "20530": 2.37154, "20535": 2.33654, "20540": 2.32926, "20545": 2.37767, "20550": 2.29299, "20555": 2.31743, "20560": 2.34778, "20565": 2.3867, "20570": 2.34457, "20575": 2.31684, "20580": 2.37415, "20585": 2.34101, "20590": 2.40572, "20595": 2.36284, "20600": 2.34243, "20605": 2.39897, "20610": 2.37088, "20615": 2.37972, "20620": 2.34836, "20625": 2.3585, "20630": 2.37168, "20635": 2.40286, "20640": 2.31664, "20645": 2.33201, "20650": 2.34929, "20655": 2.31962, "20660": 2.38719, "20665": 2.31494, "20670": 2.37108, "20675": 2.33254, "20680": 2.38914, "20685": 2.29669, "20690": 2.3775, "20695": 2.2948, "20700": 2.36725, "20705": 2.32257, "20710": 2.33786, "20715": 2.35524, "20720": 2.35394, "20725": 2.335, "20730": 2.37528, "20735": 2.38591, "20740": 2.424, "20745": 2.283, "20750": 2.34793, "20755": 2.30771, "20760": 2.37801, "20765": 2.31467, "20770": 2.31971, "20775": 2.35568, "20780": 2.3881, "20785": 2.38992, "20790": 2.33541, "20795": 2.35441, "20800": 2.3248, "20805": 2.37348, "20810": 2.41415, "20815": 2.37316, "20820": 2.34801, "20825": 2.338, "20830": 2.40158, "20835": 2.41272, "20840": 2.36521, "20845": 2.41534, "20850": 2.2952, "20855": 2.37168, "20860": 2.3686, "20865": 2.36697, "20870": 2.3271, "20875": 2.36963, "20880": 2.3456, "20885": 2.31044, "20890": 2.32657, "20895": 2.30601, "20900": 2.36187, "20905": 2.41931, "20910": 2.33553, "20915": 2.39128, "20920": 2.32324, "20925": 2.309, "20930": 2.37342, "20935": 2.44946, "20940": 2.36332, "20945": 2.38698, "20950": 2.37933, "20955": 2.32653, "20960": 2.26662, "20965": 2.3766, "20970": 2.319, "20975": 2.39716, "20980": 2.36366, "20985": 2.38132, "20990": 2.40066, "20995": 2.39921, "21000": 2.37208, "21005": 2.35555, "21010": 2.35227, "21015": 2.42509, "21020": 2.36388, "21025": 2.33371, "21030": 2.3456, "21035": 2.33754, "21040": 2.29694, "21045": 2.34311, "21050": 2.27445, "21055": 2.40033, "21060": 2.39551, "21065": 2.35368, "21070": 2.34722, "21075": 2.35047, "21080": 2.36334, "21085": 2.29607, "21090": 2.34174, "21095": 2.28953, "21100": 2.39744, "21105": 2.36791, "21110": 2.3695, "21115": 2.34718, "21120": 2.34516, "21125": 2.30035, "21130": 2.31615, "21135": 2.36231, "21140": 2.22814, "21145": 2.39562, "21150": 2.35962, "21155": 2.3562, "21160": 2.36778, "21165": 2.36318, "21170": 2.30759, "21175": 2.32584, "21180": 2.28542, "21185": 2.3331, "21190": 2.33722, "21195": 2.27433, "21200": 2.40334, "21205": 2.40081, "21210": 2.38875, "21215": 2.28058, "21220": 2.37517, "21225": 2.37117, "21230": 2.39449, "21235": 2.26831, "21240": 2.31048, "21245": 2.28719, "21250": 2.30328, "21255": 2.37912, "21260": 2.32222, "21265": 2.31257, "21270": 2.34157, "21275": 2.3555, "21280": 2.36334, "21285": 2.34576, "21290": 2.3328, "21295": 2.2754, "21300": 2.3765, "21305": 2.44411, "21310": 2.32405, "21315": 2.31408, "21320": 2.37217, "21325": 2.35165, "21330": 2.30169, "21335": 2.34774, "21340": 2.33737, "21345": 2.3499, "21350": 2.37223, "21355": 2.36652, "21360": 2.38112, "21365": 2.32571, "21370": 2.39774, "21375": 2.30165, "21380": 2.35537, "21385": 2.3442, "21390": 2.31977, "21395": 2.41422, "21400": 2.35958, "21405": 2.38635, "21410": 2.36192, "21415": 2.36557, "21420": 2.32354, "21425": 2.36274, "21430": 2.32344, "21435": 2.34347, "21440": 2.369, "21445": 2.3602, "21450": 2.35174, "21455": 2.35133, "21460": 2.36281, "21465": 2.40361, "21470": 2.36179, "21475": 2.34198, "21480": 2.41852, "21485": 2.39509, "21490": 2.30163, "21495": 2.35594, "21500": 2.32735, "21505": 2.35938, "21510": 2.36749, "21515": 2.3936, "21520": 2.32981, "21525": 2.37621, "21530": 2.30468, "21535": 2.3269, "21540": 2.34583, "21545": 2.3405, "21550": 2.35218, "21555": 2.28883, "21560": 2.34381, "21565": 2.30482, "21570": 2.30516, "21575": 2.29348, "21580": 2.35647, "21585": 2.36413, "21590": 2.31424, "21595": 2.28653, "21600": 2.3517, "21605": 2.38602, "21610": 2.30338, "21615": 2.34563, "21620": 2.31, "21625": 2.33838, "21630": 2.30553, "21635": 2.38987, "21640": 2.30377, "21645": 2.29029, "21650": 2.43856, "21655": 2.3439, "21660": 2.3601, "21665": 2.31846, "21670": 2.32785, "21675": 2.3381, "21680": 2.33316, "21685": 2.37646, "21690": 2.28456, "21695": 2.36098, "21700": 2.32069, "21705": 2.31819, "21710": 2.32473, "21715": 2.28743, "21720": 2.27607, "21725": 2.30557, "21730": 2.35218, "21735": 2.335, "21740": 2.28915, "21745": 2.32701, "21750": 2.36288, "21755": 2.35264, "21760": 2.37322, "21765": 2.33278, "21770": 2.30145, "21775": 2.36588, "21780": 2.26477, "21785": 2.30928, "21790": 2.28321, "21795": 2.34553, "21800": 2.33673, "21805": 2.30522, "21810": 2.34417, "21815": 2.35217, "21820": 2.33528, "21825": 2.29763, "21830": 2.32978, "21835": 2.35676, "21840": 2.35149, "21845": 2.32823, "21850": 2.3691, "21855": 2.27764, "21860": 2.32091, "21865": 2.32836, "21870": 2.31785, "21875": 2.35316, "21880": 2.31727, "21885": 2.31848, "21890": 2.33841, "21895": 2.41155, "21900": 2.39236, "21905": 2.36774, "21910": 2.33157, "21915": 2.30764, "21920": 2.34954, "21925": 2.35716, "21930": 2.45457, "21935": 2.34988, "21940": 2.29799, "21945": 2.36548, "21950": 2.2689, "21955": 2.38067, "21960": 2.37303, "21965": 2.33174, "21970": 2.33882, "21975": 2.29536, "21980": 2.28351, "21985": 2.4273, "21990": 2.32432, "21995": 2.26337, "22000": 2.36073, "22005": 2.2982, "22010": 2.37228, "22015": 2.29347, "22020": 2.35076, "22025": 2.348, "22030": 2.37576, "22035": 2.32125, "22040": 2.36306, "22045": 2.4095, "22050": 2.36132, "22055": 2.30507, "22060": 2.31505, "22065": 2.40206, "22070": 2.29607, "22075": 2.3666, "22080": 2.36552, "22085": 2.31075, "22090": 2.26969, "22095": 2.29905, "22100": 2.3761, "22105": 2.36154, "22110": 2.38563, "22115": 2.26871, "22120": 2.36021, "22125": 2.34432, "22130": 2.32849, "22135": 2.38036, "22140": 2.35602, "22145": 2.34871, "22150": 2.36045, "22155": 2.28823, "22160": 2.3177, "22165": 2.29047, "22170": 2.3982, "22175": 2.30427, "22180": 2.29836, "22185": 2.33451, "22190": 2.34322, "22195": 2.38041, "22200": 2.3326, "22205": 2.3271, "22210": 2.28711, "22215": 2.31402, "22220": 2.35495, "22225": 2.29459, "22230": 2.27964, "22235": 2.29333, "22240": 2.2934, "22245": 2.32211, "22250": 2.32617, "22255": 2.31803, "22260": 2.26665, "22265": 2.32881, "22270": 2.30094, "22275": 2.2534, "22280": 2.29381, "22285": 2.35557, "22290": 2.3059, "22295": 2.34799, "22300": 2.33201, "22305": 2.38076, "22310": 2.28177, "22315": 2.30418, "22320": 2.31816, "22325": 2.36308, "22330": 2.357, "22335": 2.36148, "22340": 2.37948, "22345": 2.30811, "22350": 2.30561, "22355": 2.33222, "22360": 2.28864, "22365": 2.32087, "22370": 2.34183, "22375": 2.34382, "22380": 2.24571, "22385": 2.28301, "22390": 2.3461, "22395": 2.34159, "22400": 2.43284, "22405": 2.39873, "22410": 2.38303, "22415": 2.29124, "22420": 2.32447, "22425": 2.29179, "22430": 2.3441, "22435": 2.3072, "22440": 2.3362, "22445": 2.30314, "22450": 2.39368, "22455": 2.3072, "22460": 2.32594, "22465": 2.35462, "22470": 2.32059, "22475": 2.31691, "22480": 2.27011, "22485": 2.35858, "22490": 2.29007, "22495": 2.2695, "22500": 2.34561, "22505": 2.40556, "22510": 2.36854, "22515": 2.39511, "22520": 2.3466, "22525": 2.32412, "22530": 2.34776, "22535": 2.28149, "22540": 2.31171, "22545": 2.2896, "22550": 2.27503, "22555": 2.22109, "22560": 2.28078, "22565": 2.31641, "22570": 2.2569, "22575": 2.29866, "22580": 2.2847, "22585": 2.2821, "22590": 2.32988, "22595": 2.31103, "22600": 2.34468, "22605": 2.34715, "22610": 2.32221, "22615": 2.34918, "22620": 2.31647, "22625": 2.30242, "22630": 2.26042, "22635": 2.3526, "22640": 2.35864, "22645": 2.34868, "22650": 2.27268, "22655": 2.28743, "22660": 2.27985, "22665": 2.27804, "22670": 2.35466, "22675": 2.31601, "22680": 2.37434, "22685": 2.24438, "22690": 2.25011, "22695": 2.4251, "22700": 2.2466, "22705": 2.31294, "22710": 2.37279, "22715": 2.32692, "22720": 2.26142, "22725": 2.32412, "22730": 2.34189, "22735": 2.26813, "22740": 2.35995, "22745": 2.34476, "22750": 2.31998, "22755": 2.34436, "22760": 2.26112, "22765": 2.36022, "22770": 2.3257, "22775": 2.28283, "22780": 2.36068, "22785": 2.32123, "22790": 2.30755, "22795": 2.31783, "22800": 2.38702, "22805": 2.32446, "22810": 2.32947, "22815": 2.326, "22820": 2.29259, "22825": 2.3421, "22830": 2.35312, "22835": 2.32531, "22840": 2.28659, "22845": 2.24483, "22850": 2.28841, "22855": 2.37061, "22860": 2.29109, "22865": 2.30417, "22870": 2.32989, "22875": 2.3209, "22880": 2.27261, "22885": 2.27936, "22890": 2.31907, "22895": 2.28375, "22900": 2.33515, "22905": 2.3361, "22910": 2.33977, "22915": 2.28736, "22920": 2.32853, "22925": 2.23027, "22930": 2.35566, "22935": 2.30092, "22940": 2.26196, "22945": 2.29772, "22950": 2.3213, "22955": 2.38657, "22960": 2.30981, "22965": 2.28034, "22970": 2.29486, "22975": 2.31386, "22980": 2.30788, "22985": 2.32908, "22990": 2.39201, "22995": 2.32026, "23000": 2.30011, "23005": 2.36449, "23010": 2.33497, "23015": 2.31706, "23020": 2.29237, "23025": 2.37512, "23030": 2.38261, "23035": 2.32224, "23040": 2.22418, "23045": 2.38605, "23050": 2.29047, "23055": 2.27952, "23060": 2.29776, "23065": 2.35428, "23070": 2.28972, "23075": 2.33775, "23080": 2.24247, "23085": 2.34672, "23090": 2.30724, "23095": 2.28321, "23100": 2.25474, "23105": 2.26241, "23110": 2.33403, "23115": 2.26173, "23120": 2.29934, "23125": 2.31475, "23130": 2.34, "23135": 2.28875, "23140": 2.24299, "23145": 2.28648, "23150": 2.31871, "23155": 2.28526, "23160": 2.39018, "23165": 2.27711, "23170": 2.34558, "23175": 2.28796, "23180": 2.28924, "23185": 2.29233, "23190": 2.26593, "23195": 2.37003, "23200": 2.29656, "23205": 2.37604, "23210": 2.22549, "23215": 2.33736, "23220": 2.28151, "23225": 2.28295, "23230": 2.30864, "23235": 2.38318, "23240": 2.33012, "23245": 2.34728, "23250": 2.35289, "23255": 2.30926, "23260": 2.29133, "23265": 2.29919, "23270": 2.25814, "23275": 2.30764, "23280": 2.32036, "23285": 2.33018, "23290": 2.34402, "23295": 2.3178, "23300": 2.35001, "23305": 2.26644, "23310": 2.26541, "23315": 2.33417, "23320": 2.29286, "23325": 2.27776, "23330": 2.24671, "23335": 2.29891, "23340": 2.38223, "23345": 2.35168, "23350": 2.30825, "23355": 2.30879, "23360": 2.35207, "23365": 2.32254, "23370": 2.29135, "23375": 2.27191, "23380": 2.27938, "23385": 2.37218, "23390": 2.31101, "23395": 2.31023, "23400": 2.26986, "23405": 2.28417, "23410": 2.33691, "23415": 2.25886, "23420": 2.32142, "23425": 2.31565, "23430": 2.22102, "23435": 2.26726, "23440": 2.30905, "23445": 2.34232, "23450": 2.31587, "23455": 2.33091, "23460": 2.32883, "23465": 2.33392, "23470": 2.28553, "23475": 2.27309, "23480": 2.3266, "23485": 2.34224, "23490": 2.31635, "23495": 2.33976, "23500": 2.34584, "23505": 2.30191, "23510": 2.26525, "23515": 2.29628, "23520": 2.31944, "23525": 2.33216, "23530": 2.32422, "23535": 2.29452, "23540": 2.2632, "23545": 2.3614, "23550": 2.30736, "23555": 2.30342, "23560": 2.26454, "23565": 2.2943, "23570": 2.29461, "23575": 2.32311, "23580": 2.3263, "23585": 2.31741, "23590": 2.29608, "23595": 2.26783, "23600": 2.25194, "23605": 2.35514, "23610": 2.28041, "23615": 2.34173, "23620": 2.265, "23625": 2.30396, "23630": 2.31475, "23635": 2.33787, "23640": 2.22441, "23645": 2.25897, "23650": 2.29989, "23655": 2.2936, "23660": 2.32544, "23665": 2.39217, "23670": 2.39956, "23675": 2.32308, "23680": 2.32233, "23685": 2.33252, "23690": 2.27408, "23695": 2.29739, "23700": 2.31499, "23705": 2.35485, "23710": 2.29981, "23715": 2.35768, "23720": 2.31793, "23725": 2.29731, "23730": 2.29675, "23735": 2.32685, "23740": 2.32217, "23745": 2.35312, "23750": 2.30769, "23755": 2.29883, "23760": 2.28803, "23765": 2.30786, "23770": 2.32643, "23775": 2.28407, "23780": 2.28514, "23785": 2.29846, "23790": 2.22816, "23795": 2.30023, "23800": 2.22922, "23805": 2.39778, "23810": 2.32001, "23815": 2.27874, "23820": 2.32531, "23825": 2.25779, "23830": 2.30443, "23835": 2.25676, "23840": 2.33278, "23845": 2.24694, "23850": 2.30261, "23855": 2.2791, "23860": 2.28027, "23865": 2.32609, "23870": 2.28051, "23875": 2.23485, "23880": 2.34105, "23885": 2.33555, "23890": 2.29962, "23895": 2.28015, "23900": 2.33785, "23905": 2.32031, "23910": 2.30208, "23915": 2.31709, "23920": 2.30071, "23925": 2.32277, "23930": 2.29301, "23935": 2.32284, "23940": 2.27049, "23945": 2.2846, "23950": 2.28805, "23955": 2.32648, "23960": 2.30992, "23965": 2.25461, "23970": 2.31257, "23975": 2.33339, "23980": 2.27734, "23985": 2.24691, "23990": 2.2777, "23995": 2.31041, "24000": 2.17844, "24005": 2.42648, "24010": 2.36298, "24015": 2.31179, "24020": 2.34997, "24025": 2.2901, "24030": 2.30891, "24035": 2.31196, "24040": 2.27429, "24045": 2.20028, "24050": 2.36079, "24055": 2.30396, "24060": 2.35281, "24065": 2.30465, "24070": 2.31303, "24075": 2.24512, "24080": 2.32417, "24085": 2.25834, "24090": 2.30296, "24095": 2.27571, "24100": 2.34106, "24105": 2.27396, "24110": 2.25428, "24115": 2.24965, "24120": 2.2847, "24125": 2.25091, "24130": 2.33238, "24135": 2.29167, "24140": 2.34393, "24145": 2.32914, "24150": 2.31894, "24155": 2.2936, "24160": 2.25259, "24165": 2.28091, "24170": 2.27501, "24175": 2.30824, "24180": 2.28634, "24185": 2.33679, "24190": 2.23498, "24195": 2.27197, "24200": 2.34713, "24205": 2.31627, "24210": 2.29744, "24215": 2.22561, "24220": 2.29614, "24225": 2.25731, "24230": 2.32605, "24235": 2.31821, "24240": 2.33772, "24245": 2.29188, "24250": 2.31035, "24255": 2.25418, "24260": 2.34671, "24265": 2.26122, "24270": 2.26987, "24275": 2.37611, "24280": 2.35167, "24285": 2.24377, "24290": 2.32152, "24295": 2.32301, "24300": 2.27517, "24305": 2.30869, "24310": 2.27725, "24315": 2.32717, "24320": 2.35203, "24325": 2.27295, "24330": 2.22935, "24335": 2.39384, "24340": 2.37018, "24345": 2.35489, "24350": 2.2788, "24355": 2.3396, "24360": 2.32099, "24365": 2.24576, "24370": 2.30058, "24375": 2.27063, "24380": 2.31262, "24385": 2.24273, "24390": 2.28497, "24395": 2.26377, "24400": 2.25414, "24405": 2.29553, "24410": 2.30149, "24415": 2.32113, "24420": 2.34188, "24425": 2.24204, "24430": 2.27739, "24435": 2.3151, "24440": 2.26484, "24445": 2.29748, "24450": 2.29021, "24455": 2.2845, "24460": 2.26156, "24465": 2.25738, "24470": 2.3063, "24475": 2.22654, "24480": 2.30552, "24485": 2.22106, "24490": 2.28203, "24495": 2.3498, "24500": 2.2658, "24505": 2.21389, "24510": 2.2182, "24515": 2.28762, "24520": 2.36633, "24525": 2.28347, "24530": 2.32858, "24535": 2.29208, "24540": 2.31258, "24545": 2.34663, "24550": 2.31369, "24555": 2.25148, "24560": 2.29737, "24565": 2.22498, "24570": 2.28772, "24575": 2.33051, "24580": 2.29951, "24585": 2.30588, "24590": 2.21874, "24595": 2.35266, "24600": 2.25064, "24605": 2.27678, "24610": 2.31993, "24615": 2.23383, "24620": 2.30332, "24625": 2.27522, "24630": 2.2669, "24635": 2.30594, "24640": 2.29726, "24645": 2.26258, "24650": 2.29478, "24655": 2.3239, "24660": 2.25646, "24665": 2.21263, "24670": 2.26616, "24675": 2.21443, "24680": 2.35061, "24685": 2.24245, "24690": 2.2733, "24695": 2.27686, "24700": 2.26435, "24705": 2.30698, "24710": 2.27006, "24715": 2.2698, "24720": 2.28323, "24725": 2.2095, "24730": 2.32252, "24735": 2.31341, "24740": 2.26286, "24745": 2.25572, "24750": 2.31249, "24755": 2.29103, "24760": 2.27424, "24765": 2.22619, "24770": 2.2479, "24775": 2.29172, "24780": 2.29627, "24785": 2.28095, "24790": 2.34961, "24795": 2.23633, "24800": 2.28471, "24805": 2.26217, "24810": 2.23976, "24815": 2.32424, "24820": 2.30682, "24825": 2.3113, "24830": 2.25742, "24835": 2.25497, "24840": 2.29744, "24845": 2.3184, "24850": 2.33908, "24855": 2.2513, "24860": 2.33911, "24865": 2.1707, "24870": 2.28052, "24875": 2.32182, "24880": 2.22549, "24885": 2.23617, "24890": 2.26718, "24895": 2.29864, "24900": 2.29808, "24905": 2.2398, "24910": 2.27968, "24915": 2.25917, "24920": 2.39697, "24925": 2.30032, "24930": 2.22699, "24935": 2.29142, "24940": 2.2625, "24945": 2.30275, "24950": 2.34014, "24955": 2.31831, "24960": 2.28096, "24965": 2.33597, "24970": 2.30266, "24975": 2.27916, "24980": 2.30564, "24985": 2.29643, "24990": 2.27962, "24995": 2.28059, "25000": 2.29551, "25005": 2.3275, "25010": 2.28975, "25015": 2.29585, "25020": 2.31695, "25025": 2.28429, "25030": 2.28513, "25035": 2.31822, "25040": 2.31285, "25045": 2.3242, "25050": 2.33198, "25055": 2.33218, "25060": 2.29239, "25065": 2.29056, "25070": 2.31779, "25075": 2.27424, "25080": 2.25049, "25085": 2.32185, "25090": 2.33205, "25095": 2.30569, "25100": 2.29792, "25105": 2.23652, "25110": 2.29296, "25115": 2.26884, "25120": 2.3091, "25125": 2.27647, "25130": 2.26323, "25135": 2.27608, "25140": 2.25127, "25145": 2.3231, "25150": 2.26484, "25155": 2.28918, "25160": 2.29053, "25165": 2.31284, "25170": 2.24928, "25175": 2.28052, "25180": 2.21485, "25185": 2.30911, "25190": 2.21813, "25195": 2.25132, "25200": 2.22255, "25205": 2.2356, "25210": 2.30963, "25215": 2.28978, "25220": 2.35675, "25225": 2.22552, "25230": 2.20448, "25235": 2.28181, "25240": 2.30015, "25245": 2.26887, "25250": 2.19642, "25255": 2.2684, "25260": 2.29982, "25265": 2.28011, "25270": 2.22397, "25275": 2.24655, "25280": 2.30366, "25285": 2.26078, "25290": 2.34227, "25295": 2.3382, "25300": 2.23481, "25305": 2.26693, "25310": 2.33536, "25315": 2.29434, "25320": 2.24549, "25325": 2.22288, "25330": 2.30841, "25335": 2.3429, "25340": 2.31455, "25345": 2.25822, "25350": 2.33019, "25355": 2.23247, "25360": 2.27344, "25365": 2.32, "25370": 2.27874, "25375": 2.27302, "25380": 2.23638, "25385": 2.35493, "25390": 2.33021, "25395": 2.22736, "25400": 2.31435, "25405": 2.30854, "25410": 2.27188, "25415": 2.26956, "25420": 2.24939, "25425": 2.23461, "25430": 2.24336, "25435": 2.27048, "25440": 2.32107, "25445": 2.17772, "25450": 2.30613, "25455": 2.36286, "25460": 2.33141, "25465": 2.23688, "25470": 2.30925, "25475": 2.24163, "25480": 2.30099, "25485": 2.27701, "25490": 2.27054, "25495": 2.30795, "25500": 2.23573, "25505": 2.29353, "25510": 2.27964, "25515": 2.27783, "25520": 2.26934, "25525": 2.26607, "25530": 2.23815, "25535": 2.27375, "25540": 2.27035, "25545": 2.33408, "25550": 2.29004, "25555": 2.29865, "25560": 2.29524, "25565": 2.28562, "25570": 2.32505, "25575": 2.34404, "25580": 2.3143, "25585": 2.30063, "25590": 2.29815, "25595": 2.27319, "25600": 2.24658, "25605": 2.28818, "25610": 2.2915, "25615": 2.26672, "25620": 2.29299, "25625": 2.33161, "25630": 2.28363, "25635": 2.246, "25640": 2.31543, "25645": 2.26979, "25650": 2.23723, "25655": 2.20837, "25660": 2.2891, "25665": 2.28701, "25670": 2.2513, "25675": 2.19288, "25680": 2.27743, "25685": 2.29284, "25690": 2.25335, "25695": 2.2663, "25700": 2.26351, "25705": 2.27152, "25710": 2.30474, "25715": 2.25622, "25720": 2.25258, "25725": 2.30751, "25730": 2.32321, "25735": 2.31648, "25740": 2.29521, "25745": 2.30894, "25750": 2.25574, "25755": 2.28237, "25760": 2.15493, "25765": 2.31432, "25770": 2.2945, "25775": 2.23937, "25780": 2.33698, "25785": 2.34008, "25790": 2.27974, "25795": 2.26477, "25800": 2.33919, "25805": 2.23969, "25810": 2.23807, "25815": 2.19836, "25820": 2.27356, "25825": 2.23329, "25830": 2.31679, "25835": 2.19474, "25840": 2.28087, "25845": 2.29055, "25850": 2.23901, "25855": 2.24531, "25860": 2.31064, "25865": 2.25036, "25870": 2.28032, "25875": 2.20917, "25880": 2.25997, "25885": 2.30598, "25890": 2.2896, "25895": 2.19254, "25900": 2.29944, "25905": 2.24004, "25910": 2.24973, "25915": 2.28274, "25920": 2.23074, "25925": 2.26555, "25930": 2.22599, "25935": 2.27352, "25940": 2.23223, "25945": 2.28198, "25950": 2.27982, "25955": 2.33539, "25960": 2.23619, "25965": 2.24982, "25970": 2.26451, "25975": 2.26756, "25980": 2.30524, "25985": 2.32407, "25990": 2.31151, "25995": 2.2534, "26000": 2.29009, "26005": 2.24733, "26010": 2.26232, "26015": 2.27065, "26020": 2.26531, "26025": 2.31791, "26030": 2.23441, "26035": 2.27908, "26040": 2.23556, "26045": 2.34486, "26050": 2.25694, "26055": 2.32218, "26060": 2.23612, "26065": 2.1557, "26070": 2.28769, "26075": 2.28766, "26080": 2.22296, "26085": 2.32125, "26090": 2.21434, "26095": 2.15983, "26100": 2.19634, "26105": 2.18661, "26110": 2.29781, "26115": 2.19302, "26120": 2.26222, "26125": 2.27473, "26130": 2.24766, "26135": 2.23939, "26140": 2.24953, "26145": 2.18379, "26150": 2.23615, "26155": 2.23195, "26160": 2.33455, "26165": 2.19634, "26170": 2.3291, "26175": 2.24268, "26180": 2.28197, "26185": 2.21328, "26190": 2.29945, "26195": 2.27574, "26200": 2.29775, "26205": 2.20135, "26210": 2.24258, "26215": 2.29396, "26220": 2.22929, "26225": 2.29432, "26230": 2.23594, "26235": 2.17349, "26240": 2.28621, "26245": 2.29524, "26250": 2.29196, "26255": 2.30783, "26260": 2.23486, "26265": 2.22724, "26270": 2.29002, "26275": 2.22088, "26280": 2.26395, "26285": 2.25813, "26290": 2.31878, "26295": 2.26472, "26300": 2.29885, "26305": 2.31979, "26310": 2.29754, "26315": 2.28639, "26320": 2.20862, "26325": 2.24553, "26330": 2.25191, "26335": 2.27086, "26340": 2.29497, "26345": 2.30652, "26350": 2.25362, "26355": 2.19662, "26360": 2.29826, "26365": 2.24465, "26370": 2.23772, "26375": 2.29653, "26380": 2.28654, "26385": 2.23209, "26390": 2.21627, "26395": 2.29768, "26400": 2.2935, "26405": 2.25794, "26410": 2.32094, "26415": 2.34157, "26420": 2.24859, "26425": 2.23577, "26430": 2.2275, "26435": 2.37676, "26440": 2.20432, "26445": 2.30081, "26450": 2.26089, "26455": 2.25446, "26460": 2.29454, "26465": 2.21726, "26470": 2.20678, "26475": 2.29001, "26480": 2.27622, "26485": 2.21772, "26490": 2.2678, "26495": 2.24812, "26500": 2.3066, "26505": 2.2112, "26510": 2.23688, "26515": 2.31007, "26520": 2.25294, "26525": 2.33462, "26530": 2.20753, "26535": 2.32728, "26540": 2.25416, "26545": 2.2432, "26550": 2.25372, "26555": 2.24095, "26560": 2.31803, "26565": 2.28524, "26570": 2.22762, "26575": 2.29488, "26580": 2.31582, "26585": 2.20868, "26590": 2.24581, "26595": 2.2376, "26600": 2.2206, "26605": 2.27321, "26610": 2.34867, "26615": 2.23422, "26620": 2.24637, "26625": 2.29092, "26630": 2.2632, "26635": 2.27437, "26640": 2.26111, "26645": 2.29762, "26650": 2.22904, "26655": 2.26376, "26660": 2.23867, "26665": 2.30592, "26670": 2.24696, "26675": 2.26144, "26680": 2.28658, "26685": 2.27026, "26690": 2.23441, "26695": 2.25064, "26700": 2.24372, "26705": 2.23536, "26710": 2.34672, "26715": 2.24973, "26720": 2.19817, "26725": 2.27763, "26730": 2.25991, "26735": 2.26409, "26740": 2.31361, "26745": 2.328, "26750": 2.26203, "26755": 2.24139, "26760": 2.30922, "26765": 2.25223, "26770": 2.19261, "26775": 2.23628, "26780": 2.28245, "26785": 2.25269, "26790": 2.30994, "26795": 2.24999, "26800": 2.21718, "26805": 2.26557, "26810": 2.22248, "26815": 2.30478, "26820": 2.33024, "26825": 2.24, "26830": 2.26123, "26835": 2.25228, "26840": 2.29157, "26845": 2.24441, "26850": 2.3232, "26855": 2.31278, "26860": 2.27897, "26865": 2.31467, "26870": 2.23489, "26875": 2.24487, "26880": 2.3053, "26885": 2.22366, "26890": 2.29385, "26895": 2.2507, "26900": 2.30969, "26905": 2.2677, "26910": 2.29203, "26915": 2.22705, "26920": 2.34168, "26925": 2.20812, "26930": 2.22316, "26935": 2.23353, "26940": 2.25127, "26945": 2.22917, "26950": 2.26642, "26955": 2.2417, "26960": 2.25057, "26965": 2.2075, "26970": 2.25365, "26975": 2.25378, "26980": 2.24026, "26985": 2.3257, "26990": 2.23745, "26995": 2.23135, "27000": 2.19629, "27005": 2.23513, "27010": 2.33034, "27015": 2.22868, "27020": 2.24257, "27025": 2.32802, "27030": 2.29313, "27035": 2.16541, "27040": 2.2131, "27045": 2.30507, "27050": 2.20128, "27055": 2.26603, "27060": 2.2957, "27065": 2.22566, "27070": 2.27977, "27075": 2.2501, "27080": 2.25136, "27085": 2.23653, "27090": 2.22171, "27095": 2.25399, "27100": 2.24263, "27105": 2.22891, "27110": 2.28573, "27115": 2.21224, "27120": 2.23985, "27125": 2.24913, "27130": 2.2342, "27135": 2.28531, "27140": 2.21929, "27145": 2.2397, "27150": 2.25289, "27155": 2.21134, "27160": 2.24755, "27165": 2.20898, "27170": 2.27144, "27175": 2.21811, "27180": 2.21758, "27185": 2.30933, "27190": 2.21508, "27195": 2.26067, "27200": 2.27326, "27205": 2.22965, "27210": 2.32782, "27215": 2.26957, "27220": 2.2714, "27225": 2.21916, "27230": 2.18027, "27235": 2.19793, "27240": 2.27973, "27245": 2.22809, "27250": 2.28765, "27255": 2.1865, "27260": 2.22732, "27265": 2.3036, "27270": 2.21398, "27275": 2.33417, "27280": 2.2502, "27285": 2.25521, "27290": 2.31899, "27295": 2.2366, "27300": 2.20735, "27305": 2.25547, "27310": 2.17179, "27315": 2.2734, "27320": 2.21514, "27325": 2.25462, "27330": 2.3115, "27335": 2.28913, "27340": 2.27504, "27345": 2.23282, "27350": 2.28407, "27355": 2.25885, "27360": 2.28204, "27365": 2.21099, "27370": 2.23931, "27375": 2.28743, "27380": 2.25023, "27385": 2.22142, "27390": 2.16771, "27395": 2.21424, "27400": 2.32858, "27405": 2.24665, "27410": 2.25822, "27415": 2.2546, "27420": 2.19732, "27425": 2.22031, "27430": 2.19461, "27435": 2.25903, "27440": 2.24393, "27445": 2.23367, "27450": 2.23198, "27455": 2.21481, "27460": 2.29376, "27465": 2.28075, "27470": 2.19595, "27475": 2.25764, "27480": 2.23884, "27485": 2.25641, "27490": 2.29473, "27495": 2.30164, "27500": 2.27566, "27505": 2.2104, "27510": 2.19689, "27515": 2.18783, "27520": 2.24641, "27525": 2.20083, "27530": 2.25922, "27535": 2.20511, "27540": 2.21136, "27545": 2.19263, "27550": 2.22068, "27555": 2.19285, "27560": 2.17569, "27565": 2.19191, "27570": 2.23297, "27575": 2.22493, "27580": 2.28728, "27585": 2.25431, "27590": 2.22465, "27595": 2.21778, "27600": 2.25138, "27605": 2.235, "27610": 2.18818, "27615": 2.25252, "27620": 2.25266, "27625": 2.26379, "27630": 2.20873, "27635": 2.27216, "27640": 2.26108, "27645": 2.28349, "27650": 2.24027, "27655": 2.20233, "27660": 2.23476, "27665": 2.25936, "27670": 2.21975, "27675": 2.17299, "27680": 2.29129, "27685": 2.20508, "27690": 2.20044, "27695": 2.25245, "27700": 2.21574, "27705": 2.25334, "27710": 2.31607, "27715": 2.19617, "27720": 2.19869, "27725": 2.18569, "27730": 2.2134, "27735": 2.24536, "27740": 2.24716, "27745": 2.24792, "27750": 2.23235, "27755": 2.28208, "27760": 2.27538, "27765": 2.24152, "27770": 2.23118, "27775": 2.32079, "27780": 2.21399, "27785": 2.2183, "27790": 2.2299, "27795": 2.27366, "27800": 2.2108, "27805": 2.25951, "27810": 2.25188, "27815": 2.23305, "27820": 2.27543, "27825": 2.25557, "27830": 2.22637, "27835": 2.26659, "27840": 2.19116, "27845": 2.28312, "27850": 2.23134, "27855": 2.20925, "27860": 2.21148, "27865": 2.29257, "27870": 2.31503, "27875": 2.32458, "27880": 2.1995, "27885": 2.31872, "27890": 2.24244, "27895": 2.22849, "27900": 2.25708, "27905": 2.30534, "27910": 2.24595, "27915": 2.25508, "27920": 2.23253, "27925": 2.17368, "27930": 2.28276, "27935": 2.27583, "27940": 2.29949, "27945": 2.2671, "27950": 2.24587, "27955": 2.33299, "27960": 2.27357, "27965": 2.24904, "27970": 2.26459, "27975": 2.28125, "27980": 2.21976, "27985": 2.12579, "27990": 2.2557, "27995": 2.19261, "28000": 2.2505, "28005": 2.17378, "28010": 2.22445, "28015": 2.19558, "28020": 2.25809, "28025": 2.28847, "28030": 2.26829, "28035": 2.23162, "28040": 2.22745, "28045": 2.22331, "28050": 2.25793, "28055": 2.12392, "28060": 2.21395, "28065": 2.19046, "28070": 2.20287, "28075": 2.259, "28080": 2.20986, "28085": 2.23207, "28090": 2.2283, "28095": 2.21192, "28100": 2.20765, "28105": 2.31122, "28110": 2.21684, "28115": 2.19999, "28120": 2.2382, "28125": 2.21707, "28130": 2.22391, "28135": 2.24071, "28140": 2.25693, "28145": 2.23635, "28150": 2.24075, "28155": 2.1601, "28160": 2.21576, "28165": 2.21264, "28170": 2.12229, "28175": 2.25345, "28180": 2.32624, "28185": 2.24963, "28190": 2.25696, "28195": 2.24894, "28200": 2.25633, "28205": 2.21891, "28210": 2.24053, "28215": 2.21955, "28220": 2.20282, "28225": 2.24419, "28230": 2.20803, "28235": 2.2083, "28240": 2.23763, "28245": 2.27133, "28250": 2.23475, "28255": 2.18238, "28260": 2.14286, "28265": 2.1987, "28270": 2.24838, "28275": 2.19733, "28280": 2.24547, "28285": 2.28846, "28290": 2.19367, "28295": 2.26027, "28300": 2.21949, "28305": 2.27485, "28310": 2.20772, "28315": 2.20796, "28320": 2.22098, "28325": 2.25506, "28330": 2.24613, "28335": 2.22525, "28340": 2.22336, "28345": 2.26857, "28350": 2.18903, "28355": 2.23928, "28360": 2.25082, "28365": 2.22399, "28370": 2.29429, "28375": 2.18885, "28380": 2.25125, "28385": 2.19352, "28390": 2.27515, "28395": 2.183, "28400": 2.21574, "28405": 2.21992, "28410": 2.24183, "28415": 2.18882, "28420": 2.23014, "28425": 2.21104, "28430": 2.2484, "28435": 2.19406, "28440": 2.23508, "28445": 2.29829, "28450": 2.2224, "28455": 2.20412, "28460": 2.2341, "28465": 2.18846, "28470": 2.2613, "28475": 2.23165, "28480": 2.2113, "28485": 2.31138, "28490": 2.24195, "28495": 2.23256, "28500": 2.16405, "28505": 2.31158, "28510": 2.20995, "28515": 2.31595, "28520": 2.21827, "28525": 2.23205, "28530": 2.23811, "28535": 2.23506, "28540": 2.25472, "28545": 2.26503, "28550": 2.20467, "28555": 2.25506, "28560": 2.18782, "28565": 2.22924, "28570": 2.2555, "28575": 2.22682, "28580": 2.15356, "28585": 2.22675, "28590": 2.18103, "28595": 2.27831, "28600": 2.24003, "28605": 2.2508, "28610": 2.24484, "28615": 2.21392, "28620": 2.30585, "28625": 2.23767, "28630": 2.18882, "28635": 2.22295, "28640": 2.25784, "28645": 2.29236, "28650": 2.2832, "28655": 2.19837, "28660": 2.20057, "28665": 2.19075, "28670": 2.22001, "28675": 2.27804, "28680": 2.22289, "28685": 2.19914, "28690": 2.24101, "28695": 2.19377, "28700": 2.20616, "28705": 2.23423, "28710": 2.2652, "28715": 2.2584, "28720": 2.25962, "28725": 2.18234, "28730": 2.20975, "28735": 2.25805, "28740": 2.2432, "28745": 2.26343, "28750": 2.23342, "28755": 2.17383, "28760": 2.29062, "28765": 2.25156, "28770": 2.23572, "28775": 2.2027, "28780": 2.1486, "28785": 2.1922, "28790": 2.20367, "28795": 2.1678, "28800": 2.29372, "28805": 2.26449, "28810": 2.223, "28815": 2.27761, "28820": 2.23829, "28825": 2.17803, "28830": 2.27492, "28835": 2.24893, "28840": 2.12707, "28845": 2.1561, "28850": 2.27175, "28855": 2.28116, "28860": 2.20383, "28865": 2.16877, "28870": 2.22254, "28875": 2.20776, "28880": 2.23226, "28885": 2.18846, "28890": 2.15348, "28895": 2.15568, "28900": 2.27201, "28905": 2.28547, "28910": 2.23334, "28915": 2.20703, "28920": 2.23281, "28925": 2.21376, "28930": 2.27263, "28935": 2.12706, "28940": 2.25297, "28945": 2.2337, "28950": 2.16624, "28955": 2.28843, "28960": 2.1715, "28965": 2.23456, "28970": 2.24589, "28975": 2.30907, "28980": 2.24952, "28985": 2.21776, "28990": 2.20331, "28995": 2.22936, "29000": 2.22575, "29005": 2.14539, "29010": 2.2015, "29015": 2.22818, "29020": 2.20783, "29025": 2.26671, "29030": 2.20775, "29035": 2.20036, "29040": 2.21482, "29045": 2.2327, "29050": 2.24933, "29055": 2.25172, "29060": 2.25411, "29065": 2.29787, "29070": 2.2295, "29075": 2.2628, "29080": 2.17842, "29085": 2.19586, "29090": 2.23435, "29095": 2.14855, "29100": 2.15389, "29105": 2.23318, "29110": 2.18694, "29115": 2.29232, "29120": 2.28327, "29125": 2.25616, "29130": 2.21053, "29135": 2.27775, "29140": 2.15091, "29145": 2.27436, "29150": 2.21268, "29155": 2.26767, "29160": 2.26691, "29165": 2.18041, "29170": 2.27934, "29175": 2.21754, "29180": 2.24683, "29185": 2.26425, "29190": 2.20317, "29195": 2.14827, "29200": 2.19916, "29205": 2.1638, "29210": 2.1844, "29215": 2.21022, "29220": 2.22206, "29225": 2.32036, "29230": 2.18152, "29235": 2.27654, "29240": 2.28359, "29245": 2.20737, "29250": 2.30056, "29255": 2.25386, "29260": 2.21854, "29265": 2.17959, "29270": 2.20815, "29275": 2.17527, "29280": 2.24866, "29285": 2.2226, "29290": 2.1985, "29295": 2.2136, "29300": 2.17358, "29305": 2.24748, "29310": 2.20312, "29315": 2.25845, "29320": 2.17185, "29325": 2.25923, "29330": 2.17038, "29335": 2.25721, "29340": 2.30265, "29345": 2.18313, "29350": 2.21065, "29355": 2.24529, "29360": 2.25972, "29365": 2.26818, "29370": 2.1833, "29375": 2.22455, "29380": 2.24795, "29385": 2.21502, "29390": 2.29738, "29395": 2.22012, "29400": 2.25872, "29405": 2.19077, "29410": 2.20649, "29415": 2.24791, "29420": 2.21148, "29425": 2.2671, "29430": 2.16346, "29435": 2.17228, "29440": 2.24879, "29445": 2.20469, "29450": 2.17401, "29455": 2.17735, "29460": 2.19294, "29465": 2.2699, "29470": 2.24494, "29475": 2.2418, "29480": 2.1811, "29485": 2.18442, "29490": 2.19039, "29495": 2.20501, "29500": 2.27548, "29505": 2.17339, "29510": 2.17168, "29515": 2.19969, "29520": 2.24236, "29525": 2.1348, "29530": 2.25602, "29535": 2.16588, "29540": 2.24542, "29545": 2.11766, "29550": 2.19067, "29555": 2.22996, "29560": 2.20655, "29565": 2.17662, "29570": 2.21829, "29575": 2.18635, "29580": 2.25781, "29585": 2.22119, "29590": 2.15659, "29595": 2.14632, "29600": 2.21409, "29605": 2.26708, "29610": 2.23266, "29615": 2.21315, "29620": 2.24577, "29625": 2.17935, "29630": 2.11705, "29635": 2.20122, "29640": 2.2567, "29645": 2.25786, "29650": 2.19518, "29655": 2.19485, "29660": 2.22984, "29665": 2.23792, "29670": 2.22523, "29675": 2.20413, "29680": 2.18958, "29685": 2.22682, "29690": 2.24367, "29695": 2.22626, "29700": 2.238, "29705": 2.25747, "29710": 2.18371, "29715": 2.21825, "29720": 2.20226, "29725": 2.20059, "29730": 2.14334, "29735": 2.22446, "29740": 2.20469, "29745": 2.2827, "29750": 2.19146, "29755": 2.1868, "29760": 2.22688, "29765": 2.24698, "29770": 2.2561, "29775": 2.1832, "29780": 2.1615, "29785": 2.21636, "29790": 2.17194, "29795": 2.23803, "29800": 2.21348, "29805": 2.22674, "29810": 2.18789, "29815": 2.21075, "29820": 2.20074, "29825": 2.16419, "29830": 2.22397, "29835": 2.21157, "29840": 2.22506, "29845": 2.15844, "29850": 2.22602, "29855": 2.22582, "29860": 2.23033, "29865": 2.26128, "29870": 2.22342, "29875": 2.27021, "29880": 2.24685, "29885": 2.21952, "29890": 2.25544, "29895": 2.24551, "29900": 2.19516, "29905": 2.24733, "29910": 2.21484, "29915": 2.17002, "29920": 2.23219, "29925": 2.17589, "29930": 2.11808, "29935": 2.16641, "29940": 2.26569, "29945": 2.19069, "29950": 2.15231, "29955": 2.18342, "29960": 2.25696, "29965": 2.19235, "29970": 2.18351, "29975": 2.20873, "29980": 2.23526, "29985": 2.22136, "29990": 2.24497, "29995": 2.16511, "30000": 2.24195, "30005": 2.24563, "30010": 2.16775, "30015": 2.21087, "30020": 2.18417, "30025": 2.20954, "30030": 2.23414, "30035": 2.20336, "30040": 2.1951, "30045": 2.22535, "30050": 2.27279, "30055": 2.17239, "30060": 2.23606, "30065": 2.27954, "30070": 2.16845, "30075": 2.21608, "30080": 2.22897, "30085": 2.18517, "30090": 2.20901, "30095": 2.23121, "30100": 2.17748, "30105": 2.24084, "30110": 2.23133, "30115": 2.21774, "30120": 2.15719, "30125": 2.25046, "30130": 2.18023, "30135": 2.2843, "30140": 2.19803, "30145": 2.21362, "30150": 2.24038, "30155": 2.14841, "30160": 2.25073, "30165": 2.117, "30170": 2.28462, "30175": 2.19968, "30180": 2.27958, "30185": 2.23404, "30190": 2.23772, "30195": 2.19845, "30200": 2.20754, "30205": 2.18116, "30210": 2.17114, "30215": 2.18441, "30220": 2.17685, "30225": 2.15923, "30230": 2.25449, "30235": 2.16406, "30240": 2.26931, "30245": 2.19753, "30250": 2.19698, "30255": 2.20441, "30260": 2.17035, "30265": 2.21213, "30270": 2.22253, "30275": 2.21254, "30280": 2.22915, "30285": 2.24362, "30290": 2.19668, "30295": 2.2289, "30300": 2.23757, "30305": 2.20528, "30310": 2.16806, "30315": 2.22972, "30320": 2.21085, "30325": 2.2679, "30330": 2.19391, "30335": 2.24377, "30340": 2.22401, "30345": 2.2198, "30350": 2.22684, "30355": 2.17621, "30360": 2.22524, "30365": 2.19198, "30370": 2.23326, "30375": 2.14925, "30380": 2.23311, "30385": 2.16735, "30390": 2.23062, "30395": 2.2844, "30400": 2.25147, "30405": 2.20972, "30410": 2.19325, "30415": 2.19849, "30420": 2.18545, "30425": 2.18575, "30430": 2.1849, "30435": 2.20288, "30440": 2.20755, "30445": 2.20481, "30450": 2.13804, "30455": 2.17236, "30460": 2.23706, "30465": 2.13308, "30470": 2.17122, "30475": 2.26256, "30480": 2.2001, "30485": 2.24942, "30490": 2.22549, "30495": 2.19444, "30500": 2.18464, "30505": 2.22606, "30510": 2.23436, "30515": 2.31814, "30520": 2.23372, "30525": 2.10397, "30530": 2.2235, "30535": 2.24482, "30540": 2.1599, "30545": 2.19289, "30550": 2.11578, "30555": 2.14931, "30560": 2.22265, "30565": 2.24599, "30570": 2.19676, "30575": 2.16183, "30580": 2.26488, "30585": 2.2468, "30590": 2.25313, "30595": 2.22992, "30600": 2.21882, "30605": 2.19474, "30610": 2.18545, "30615": 2.21497, "30620": 2.21343, "30625": 2.26924, "30630": 2.26131, "30635": 2.22962, "30640": 2.19176, "30645": 2.23553, "30650": 2.21565, "30655": 2.21476, "30660": 2.2275, "30665": 2.19584, "30670": 2.20117, "30675": 2.15424, "30680": 2.17668, "30685": 2.13992, "30690": 2.19869, "30695": 2.22289, "30700": 2.25692, "30705": 2.27019, "30710": 2.18393, "30715": 2.24065, "30720": 2.16828, "30725": 2.17531, "30730": 2.2074, "30735": 2.20433, "30740": 2.20029, "30745": 2.24392, "30750": 2.16242, "30755": 2.23074, "30760": 2.18229, "30765": 2.15295, "30770": 2.17549, "30775": 2.24868, "30780": 2.21169, "30785": 2.27535, "30790": 2.23117, "30795": 2.24378, "30800": 2.24321, "30805": 2.13136, "30810": 2.24821, "30815": 2.21681, "30820": 2.24888, "30825": 2.14619, "30830": 2.24206, "30835": 2.23073, "30840": 2.28938, "30845": 2.19808, "30850": 2.16209, "30855": 2.1941, "30860": 2.15459, "30865": 2.19979, "30870": 2.18179, "30875": 2.17034, "30880": 2.24842, "30885": 2.14894, "30890": 2.26514, "30895": 2.18993, "30900": 2.155, "30905": 2.2399, "30910": 2.20063, "30915": 2.22005, "30920": 2.27109, "30925": 2.22937, "30930": 2.15651, "30935": 2.21039, "30940": 2.18813, "30945": 2.15158, "30950": 2.1423, "30955": 2.23965, "30960": 2.23005, "30965": 2.24675, "30970": 2.18079, "30975": 2.21179, "30980": 2.19699, "30985": 2.16669, "30990": 2.21708, "30995": 2.18616, "31000": 2.20787, "31005": 2.30233, "31010": 2.18863, "31015": 2.16996, "31020": 2.18676, "31025": 2.18819, "31030": 2.27004, "31035": 2.20728, "31040": 2.27025, "31045": 2.127, "31050": 2.16761, "31055": 2.22977, "31060": 2.09778, "31065": 2.25073, "31070": 2.2449, "31075": 2.21831, "31080": 2.19247, "31085": 2.2245, "31090": 2.22981, "31095": 2.20537, "31100": 2.21623, "31105": 2.17971, "31110": 2.16095, "31115": 2.21321, "31120": 2.19548, "31125": 2.18607, "31130": 2.12562, "31135": 2.1767, "31140": 2.24589, "31145": 2.21888, "31150": 2.25057, "31155": 2.17252, "31160": 2.21909, "31165": 2.22288, "31170": 2.17866, "31175": 2.22872, "31180": 2.20676, "31185": 2.16648, "31190": 2.17146, "31195": 2.21015, "31200": 2.1848, "31205": 2.19616, "31210": 2.18941, "31215": 2.23852, "31220": 2.17534, "31225": 2.17084, "31230": 2.23729, "31235": 2.18467, "31240": 2.19022, "31245": 2.20982, "31250": 2.26304, "31255": 2.15904, "31260": 2.20027, "31265": 2.18868, "31270": 2.19044, "31275": 2.22288, "31280": 2.20343, "31285": 2.12214, "31290": 2.23994, "31295": 2.21658, "31300": 2.14071, "31305": 2.23852, "31310": 2.19727, "31315": 2.16162, "31320": 2.24838, "31325": 2.20385, "31330": 2.27105, "31335": 2.24404, "31340": 2.2345, "31345": 2.16078, "31350": 2.14702, "31355": 2.19747, "31360": 2.21376, "31365": 2.1831, "31370": 2.1845, "31375": 2.26631, "31380": 2.14081, "31385": 2.17835, "31390": 2.1831, "31395": 2.1916, "31400": 2.16278, "31405": 2.16847, "31410": 2.24434, "31415": 2.18908, "31420": 2.26993, "31425": 2.17964, "31430": 2.2104, "31435": 2.21841, "31440": 2.17741, "31445": 2.12432, "31450": 2.20587, "31455": 2.26725, "31460": 2.20918, "31465": 2.17342, "31470": 2.22802, "31475": 2.13794, "31480": 2.22122, "31485": 2.19668, "31490": 2.16728, "31495": 2.17297, "31500": 2.22974, "31505": 2.21859, "31510": 2.18333, "31515": 2.14419, "31520": 2.12119, "31525": 2.17977, "31530": 2.24624, "31535": 2.2216, "31540": 2.22514, "31545": 2.1558, "31550": 2.19456, "31555": 2.28733, "31560": 2.22878, "31565": 2.20786, "31570": 2.27604, "31575": 2.20848, "31580": 2.15956, "31585": 2.18478, "31590": 2.17406, "31595": 2.17726, "31600": 2.26146, "31605": 2.19905, "31610": 2.20641, "31615": 2.18856, "31620": 2.23456, "31625": 2.19645, "31630": 2.1669, "31635": 2.18512, "31640": 2.23498, "31645": 2.19713, "31650": 2.21396, "31655": 2.18892, "31660": 2.23086, "31665": 2.20691, "31670": 2.20486, "31675": 2.21076, "31680": 2.18948, "31685": 2.21439, "31690": 2.17579, "31695": 2.26958, "31700": 2.20988, "31705": 2.15387, "31710": 2.19434, "31715": 2.16115, "31720": 2.1697, "31725": 2.17295, "31730": 2.1334, "31735": 2.1713, "31740": 2.20065, "31745": 2.15263, "31750": 2.08187, "31755": 2.19, "31760": 2.18755, "31765": 2.22079, "31770": 2.18856, "31775": 2.16227, "31780": 2.1448, "31785": 2.20365, "31790": 2.23433, "31795": 2.2247, "31800": 2.22457, "31805": 2.26435, "31810": 2.24421, "31815": 2.18259, "31820": 2.19779, "31825": 2.15362, "31830": 2.21458, "31835": 2.23519, "31840": 2.23216, "31845": 2.18177, "31850": 2.18641, "31855": 2.17883, "31860": 2.21113, "31865": 2.16957, "31870": 2.23106, "31875": 2.25489, "31880": 2.18548, "31885": 2.14099, "31890": 2.19385, "31895": 2.22776, "31900": 2.17725, "31905": 2.205, "31910": 2.24704, "31915": 2.15797, "31920": 2.17621, "31925": 2.17753, "31930": 2.22932, "31935": 2.23593, "31940": 2.18611, "31945": 2.19383, "31950": 2.21966, "31955": 2.24864, "31960": 2.23831, "31965": 2.19904, "31970": 2.21332, "31975": 2.22339, "31980": 2.14081, "31985": 2.16401, "31990": 2.22107, "31995": 2.26323, "32000": 2.16653, "32005": 2.18805, "32010": 2.18572, "32015": 2.24013, "32020": 2.16666, "32025": 2.24011, "32030": 2.20081, "32035": 2.20683, "32040": 2.1924, "32045": 2.18319, "32050": 2.20604, "32055": 2.13721, "32060": 2.19632, "32065": 2.10642, "32070": 2.22616, "32075": 2.18389, "32080": 2.20731, "32085": 2.22569, "32090": 2.26254, "32095": 2.1803, "32100": 2.16684, "32105": 2.15404, "32110": 2.15992, "32115": 2.21648, "32120": 2.21393, "32125": 2.09799, "32130": 2.22675, "32135": 2.2211, "32140": 2.21385, "32145": 2.14795, "32150": 2.17773, "32155": 2.12748, "32160": 2.24114, "32165": 2.21734, "32170": 2.16047, "32175": 2.16647, "32180": 2.23539, "32185": 2.14988, "32190": 2.1616, "32195": 2.25357, "32200": 2.23396, "32205": 2.21126, "32210": 2.21914, "32215": 2.16194, "32220": 2.20808, "32225": 2.16614, "32230": 2.18502, "32235": 2.2231, "32240": 2.18235, "32245": 2.23908, "32250": 2.24657, "32255": 2.19077, "32260": 2.19058, "32265": 2.18834, "32270": 2.14259, "32275": 2.16409, "32280": 2.1588, "32285": 2.23153, "32290": 2.1722, "32295": 2.24334, "32300": 2.18145, "32305": 2.15383, "32310": 2.27051, "32315": 2.20315, "32320": 2.14912, "32325": 2.15502, "32330": 2.19848, "32335": 2.17287, "32340": 2.21274, "32345": 2.21923, "32350": 2.14221, "32355": 2.10733, "32360": 2.20905, "32365": 2.19584, "32370": 2.20641, "32375": 2.19, "32380": 2.20275, "32385": 2.20414, "32390": 2.25888, "32395": 2.21524, "32400": 2.20179, "32405": 2.1376, "32410": 2.19946, "32415": 2.20157, "32420": 2.20336, "32425": 2.17633, "32430": 2.28743, "32435": 2.07713, "32440": 2.22029, "32445": 2.11744, "32450": 2.27725, "32455": 2.20348, "32460": 2.19624, "32465": 2.14472, "32470": 2.21444, "32475": 2.17278, "32480": 2.18334, "32485": 2.24988, "32490": 2.16151, "32495": 2.253, "32500": 2.13573, "32505": 2.15303, "32510": 2.11283, "32515": 2.14561, "32520": 2.19792, "32525": 2.2058, "32530": 2.16351, "32535": 2.19562, "32540": 2.20891, "32545": 2.14767, "32550": 2.15307, "32555": 2.20645, "32560": 2.2077, "32565": 2.24427, "32570": 2.23659, "32575": 2.18265, "32580": 2.18428, "32585": 2.20873, "32590": 2.19416, "32595": 2.24349, "32600": 2.21188, "32605": 2.20495, "32610": 2.20605, "32615": 2.21165, "32620": 2.2295, "32625": 2.20727, "32630": 2.20562, "32635": 2.13093, "32640": 2.20234, "32645": 2.15826, "32650": 2.16976, "32655": 2.24385, "32660": 2.21538, "32665": 2.22744, "32670": 2.18832, "32675": 2.12643, "32680": 2.20599, "32685": 2.14904, "32690": 2.2151, "32695": 2.13399, "32700": 2.14533, "32705": 2.12, "32710": 2.09792, "32715": 2.19305, "32720": 2.18089, "32725": 2.19149, "32730": 2.13902, "32735": 2.22958, "32740": 2.15591, "32745": 2.15355, "32750": 2.20351, "32755": 2.20796, "32760": 2.20524, "32765": 2.2189, "32770": 2.16729, "32775": 2.22136, "32780": 2.22334, "32785": 2.17071, "32790": 2.16741, "32795": 2.10559, "32800": 2.18429, "32805": 2.22495, "32810": 2.27527, "32815": 2.21871, "32820": 2.16754, "32825": 2.21269, "32830": 2.13222, "32835": 2.20103, "32840": 2.12797, "32845": 2.14156, "32850": 2.17136, "32855": 2.14684, "32860": 2.16482, "32865": 2.12452, "32870": 2.17243, "32875": 2.15589, "32880": 2.2017, "32885": 2.17217, "32890": 2.2157, "32895": 2.15404, "32900": 2.14311, "32905": 2.11017, "32910": 2.17312, "32915": 2.26673, "32920": 2.13232, "32925": 2.21065, "32930": 2.18345, "32935": 2.23656, "32940": 2.22423, "32945": 2.16162, "32950": 2.22802, "32955": 2.14357, "32960": 2.16982, "32965": 2.21677, "32970": 2.16882, "32975": 2.18423, "32980": 2.19561, "32985": 2.21724, "32990": 2.17797, "32995": 2.14018, "33000": 2.1884, "33005": 2.17057, "33010": 2.13292, "33015": 2.19868, "33020": 2.24595, "33025": 2.19093, "33030": 2.23331, "33035": 2.17692, "33040": 2.2166, "33045": 2.20091, "33050": 2.10712, "33055": 2.1804, "33060": 2.15384, "33065": 2.21679, "33070": 2.22955, "33075": 2.21172, "33080": 2.20789, "33085": 2.11702, "33090": 2.16399, "33095": 2.16897, "33100": 2.11559, "33105": 2.20951, "33110": 2.16985, "33115": 2.11757, "33120": 2.22882, "33125": 2.21533, "33130": 2.23128, "33135": 2.23246, "33140": 2.20879, "33145": 2.11983, "33150": 2.2181, "33155": 2.23333, "33160": 2.22494, "33165": 2.17457, "33170": 2.25319, "33175": 2.13221, "33180": 2.10864, "33185": 2.12958, "33190": 2.16075, "33195": 2.21393, "33200": 2.2082, "33205": 2.23698, "33210": 2.20765, "33215": 2.11221, "33220": 2.16287, "33225": 2.21005, "33230": 2.19297, "33235": 2.1814, "33240": 2.14011, "33245": 2.19754, "33250": 2.21587, "33255": 2.16695, "33260": 2.18318, "33265": 2.24699, "33270": 2.18081, "33275": 2.17773, "33280": 2.15288, "33285": 2.18812, "33290": 2.11156, "33295": 2.15637, "33300": 2.19874, "33305": 2.21912, "33310": 2.17033, "33315": 2.17367, "33320": 2.18567, "33325": 2.1143, "33330": 2.19611, "33335": 2.2132, "33340": 2.15657, "33345": 2.17872, "33350": 2.17312, "33355": 2.17133, "33360": 2.2578, "33365": 2.17776, "33370": 2.22497, "33375": 2.18991, "33380": 2.22421, "33385": 2.16879, "33390": 2.19987, "33395": 2.13503, "33400": 2.13613, "33405": 2.16364, "33410": 2.15161, "33415": 2.15264, "33420": 2.20344, "33425": 2.20886, "33430": 2.18493, "33435": 2.19143, "33440": 2.14193, "33445": 2.25482, "33450": 2.15237, "33455": 2.19805, "33460": 2.18859, "33465": 2.1394, "33470": 2.17243, "33475": 2.1991, "33480": 2.19854, "33485": 2.19524, "33490": 2.23418, "33495": 2.16554, "33500": 2.11486, "33505": 2.15904, "33510": 2.16403, "33515": 2.19737, "33520": 2.18645, "33525": 2.20135, "33530": 2.14711, "33535": 2.1589, "33540": 2.20454, "33545": 2.16609, "33550": 2.16889, "33555": 2.15768, "33560": 2.12478, "33565": 2.15475, "33570": 2.19115, "33575": 2.13253, "33580": 2.18201, "33585": 2.24237, "33590": 2.23772, "33595": 2.16228, "33600": 2.194, "33605": 2.18899, "33610": 2.17998, "33615": 2.14753, "33620": 2.2244, "33625": 2.14435, "33630": 2.20757, "33635": 2.16523, "33640": 2.2689, "33645": 2.22908, "33650": 2.18466, "33655": 2.1746, "33660": 2.19853, "33665": 2.16265, "33670": 2.15838, "33675": 2.1553, "33680": 2.21294, "33685": 2.1244, "33690": 2.21704, "33695": 2.20314, "33700": 2.14003, "33705": 2.19257, "33710": 2.22821, "33715": 2.23539, "33720": 2.15909, "33725": 2.1974, "33730": 2.20073, "33735": 2.18208, "33740": 2.07457, "33745": 2.12638, "33750": 2.18434, "33755": 2.15457, "33760": 2.18427, "33765": 2.19197, "33770": 2.13871, "33775": 2.28878, "33780": 2.16191, "33785": 2.1865, "33790": 2.12431, "33795": 2.20897, "33800": 2.12439, "33805": 2.19831, "33810": 2.17267, "33815": 2.07157, "33820": 2.19492, "33825": 2.14006, "33830": 2.11161, "33835": 2.1423, "33840": 2.19442, "33845": 2.16346, "33850": 2.22303, "33855": 2.20246, "33860": 2.21581, "33865": 2.13529, "33870": 2.15692, "33875": 2.19964, "33880": 2.13601, "33885": 2.16067, "33890": 2.11709, "33895": 2.17951, "33900": 2.17168, "33905": 2.13033, "33910": 2.12006, "33915": 2.12703, "33920": 2.1477, "33925": 2.15818, "33930": 2.15014, "33935": 2.17413, "33940": 2.19395, "33945": 2.1636, "33950": 2.18947, "33955": 2.18825, "33960": 2.14027, "33965": 2.191, "33970": 2.13141, "33975": 2.15292, "33980": 2.1664, "33985": 2.18427, "33990": 2.1293, "33995": 2.16592, "34000": 2.19379, "34005": 2.16705, "34010": 2.21046, "34015": 2.09622, "34020": 2.19545, "34025": 2.09721, "34030": 2.25954, "34035": 2.17065, "34040": 2.16303, "34045": 2.18198, "34050": 2.18198, "34055": 2.18857, "34060": 2.1554, "34065": 2.15323, "34070": 2.1948, "34075": 2.23736, "34080": 2.14716, "34085": 2.18277, "34090": 2.16819, "34095": 2.19868, "34100": 2.15516, "34105": 2.19976, "34110": 2.23611, "34115": 2.16166, "34120": 2.21578, "34125": 2.17901, "34130": 2.13671, "34135": 2.20909, "34140": 2.0799, "34145": 2.14707, "34150": 2.17862, "34155": 2.2051, "34160": 2.24602, "34165": 2.16843, "34170": 2.14777, "34175": 2.11396, "34180": 2.19624, "34185": 2.15382, "34190": 2.14971, "34195": 2.18247, "34200": 2.19639, "34205": 2.15562, "34210": 2.12834, "34215": 2.1627, "34220": 2.08282, "34225": 2.15223, "34230": 2.19629, "34235": 2.22344, "34240": 2.13136, "34245": 2.14722, "34250": 2.14381, "34255": 2.19747, "34260": 2.15387, "34265": 2.149, "34270": 2.19181, "34275": 2.15515, "34280": 2.13416, "34285": 2.19696, "34290": 2.19738, "34295": 2.22952, "34300": 2.13726, "34305": 2.17697, "34310": 2.20181, "34315": 2.17148, "34320": 2.14842, "34325": 2.18462, "34330": 2.17165, "34335": 2.13048, "34340": 2.15225, "34345": 2.1627, "34350": 2.19818, "34355": 2.16528, "34360": 2.17627, "34365": 2.19357, "34370": 2.16384, "34375": 2.14179, "34380": 2.19269, "34385": 2.12213, "34390": 2.16893, "34395": 2.16743, "34400": 2.14976, "34405": 2.18701, "34410": 2.19145, "34415": 2.19799, "34420": 2.23595, "34425": 2.25545, "34430": 2.17631, "34435": 2.17992, "34440": 2.18948, "34445": 2.1415, "34450": 2.15001, "34455": 2.19955, "34460": 2.15854, "34465": 2.22328, "34470": 2.20721, "34475": 2.16189, "34480": 2.22983, "34485": 2.22967, "34490": 2.15067, "34495": 2.14576, "34500": 2.21474, "34505": 2.22787, "34510": 2.14704, "34515": 2.12477, "34520": 2.19539, "34525": 2.20942, "34530": 2.16161, "34535": 2.09026, "34540": 2.11404, "34545": 2.17943, "34550": 2.18196, "34555": 2.19583, "34560": 2.21218, "34565": 2.16665, "34570": 2.21216, "34575": 2.2532, "34580": 2.10739, "34585": 2.21371, "34590": 2.14665, "34595": 2.22974, "34600": 2.18978, "34605": 2.15666, "34610": 2.20435, "34615": 2.1665, "34620": 2.20367, "34625": 2.20206, "34630": 2.12426, "34635": 2.1516, "34640": 2.16147, "34645": 2.22545, "34650": 2.16843, "34655": 2.23, "34660": 2.15257, "34665": 2.18665, "34670": 2.15673, "34675": 2.1706, "34680": 2.26088, "34685": 2.08799, "34690": 2.171, "34695": 2.11462, "34700": 2.12905, "34705": 2.20225, "34710": 2.16627, "34715": 2.18108, "34720": 2.1837, "34725": 2.19221, "34730": 2.15938, "34735": 2.13113, "34740": 2.19395, "34745": 2.10542, "34750": 2.16967, "34755": 2.2061, "34760": 2.08979, "34765": 2.12258, "34770": 2.18649, "34775": 2.13773, "34780": 2.19493, "34785": 2.1929, "34790": 2.17426, "34795": 2.15817, "34800": 2.11281, "34805": 2.17341, "34810": 2.14395, "34815": 2.16096, "34820": 2.09314, "34825": 2.21465, "34830": 2.21572, "34835": 2.18886, "34840": 2.17461, "34845": 2.16407, "34850": 2.1709, "34855": 2.11263, "34860": 2.18119, "34865": 2.15025, "34870": 2.18424, "34875": 2.17285, "34880": 2.22555, "34885": 2.15351, "34890": 2.18015, "34895": 2.19228, "34900": 2.15105, "34905": 2.1349, "34910": 2.14245, "34915": 2.17708, "34920": 2.14922, "34925": 2.14864, "34930": 2.21254, "34935": 2.17344, "34940": 2.15624, "34945": 2.15105, "34950": 2.12396, "34955": 2.11648, "34960": 2.17209, "34965": 2.22919, "34970": 2.19894, "34975": 2.20866, "34980": 2.17881, "34985": 2.17566, "34990": 2.17949, "34995": 2.14989, "35000": 2.14675, "35005": 2.14692, "35010": 2.14433, "35015": 2.14945, "35020": 2.1391, "35025": 2.17652, "35030": 2.1501, "35035": 2.15235, "35040": 2.1017, "35045": 2.20521, "35050": 2.15945, "35055": 2.1401, "35060": 2.18482, "35065": 2.22392, "35070": 2.16019, "35075": 2.22761, "35080": 2.12316, "35085": 2.14086, "35090": 2.14945, "35095": 2.13118, "35100": 2.15291, "35105": 2.2051, "35110": 2.20656, "35115": 2.14048, "35120": 2.10726, "35125": 2.17044, "35130": 2.14525, "35135": 2.18023, "35140": 2.08268, "35145": 2.1538, "35150": 2.1722, "35155": 2.16407, "35160": 2.18883, "35165": 2.15453, "35170": 2.13687, "35175": 2.19998, "35180": 2.1914, "35185": 2.1427, "35190": 2.13675, "35195": 2.16307, "35200": 2.21321, "35205": 2.19584, "35210": 2.16767, "35215": 2.20509, "35220": 2.17244, "35225": 2.18343, "35230": 2.15461, "35235": 2.12289, "35240": 2.13675, "35245": 2.11785, "35250": 2.19087, "35255": 2.14387, "35260": 2.16421, "35265": 2.12686, "35270": 2.1473, "35275": 2.20121, "35280": 2.14042, "35285": 2.16381, "35290": 2.12391, "35295": 2.0935, "35300": 2.13239, "35305": 2.14222, "35310": 2.15142, "35315": 2.1918, "35320": 2.1778, "35325": 2.19564, "35330": 2.13772, "35335": 2.11198, "35340": 2.18852, "35345": 2.18347, "35350": 2.167, "35355": 2.1534, "35360": 2.10686, "35365": 2.17683, "35370": 2.12899, "35375": 2.17486, "35380": 2.16248, "35385": 2.16808, "35390": 2.12281, "35395": 2.05079, "35400": 2.22131, "35405": 2.17149, "35410": 2.22012, "35415": 2.24945, "35420": 2.10495, "35425": 2.1694, "35430": 2.12219, "35435": 2.17508, "35440": 2.13157, "35445": 2.19033, "35450": 2.15707, "35455": 2.14355, "35460": 2.18299, "35465": 2.13184, "35470": 2.17691, "35475": 2.15793, "35480": 2.27924, "35485": 2.16638, "35490": 2.13905, "35495": 2.17527, "35500": 2.06738, "35505": 2.17094, "35510": 2.07497, "35515": 2.15894, "35520": 2.28889, "35525": 2.15266, "35530": 2.15317, "35535": 2.11069, "35540": 2.15333, "35545": 2.08117, "35550": 2.16076, "35555": 2.12096, "35560": 2.1906, "35565": 2.14436, "35570": 2.12753, "35575": 2.16442, "35580": 2.18599, "35585": 2.16769, "35590": 2.11668, "35595": 2.1389, "35600": 2.17875, "35605": 2.16762, "35610": 2.14241, "35615": 2.24057, "35620": 2.14815, "35625": 2.1519, "35630": 2.14415, "35635": 2.21177, "35640": 2.08634, "35645": 2.16834, "35650": 2.2226, "35655": 2.079, "35660": 2.26462, "35665": 2.17576, "35670": 2.18908, "35675": 2.11341, "35680": 2.15931, "35685": 2.17783, "35690": 2.23187, "35695": 2.16777, "35700": 2.18973, "35705": 2.14592, "35710": 2.18456, "35715": 2.07864, "35720": 2.22017, "35725": 2.10855, "35730": 2.09756, "35735": 2.13945, "35740": 2.1195, "35745": 2.12646, "35750": 2.13839, "35755": 2.23991, "35760": 2.1848, "35765": 2.18418, "35770": 2.18629, "35775": 2.21814, "35780": 2.1535, "35785": 2.2121, "35790": 2.19006, "35795": 2.15997, "35800": 2.10281, "35805": 2.17934, "35810": 2.08685, "35815": 2.11178, "35820": 2.1669, "35825": 2.16417, "35830": 2.13288, "35835": 2.1348, "35840": 2.15102, "35845": 2.12972, "35850": 2.13497, "35855": 2.15625, "35860": 2.13671, "35865": 2.14145, "35870": 2.16696, "35875": 2.1772, "35880": 2.16811, "35885": 2.1337, "35890": 2.15526, "35895": 2.14277, "35900": 2.0552, "35905": 2.06913, "35910": 2.12762, "35915": 2.18802, "35920": 2.07786, "35925": 2.19905, "35930": 2.21233, "35935": 2.12932, "35940": 2.19085, "35945": 2.14278, "35950": 2.13245, "35955": 2.19687, "35960": 2.1052, "35965": 2.13878, "35970": 2.09721, "35975": 2.16467, "35980": 2.19489, "35985": 2.13208, "35990": 2.15062, "35995": 2.19924, "36000": 2.13636, "36005": 2.12522, "36010": 2.17886, "36015": 2.18966, "36020": 2.14775, "36025": 2.12643, "36030": 2.08465, "36035": 2.11088, "36040": 2.07165, "36045": 2.13347, "36050": 2.14343, "36055": 2.15141, "36060": 2.11915, "36065": 2.1889, "36070": 2.18618, "36075": 2.12804, "36080": 2.1604, "36085": 2.18191, "36090": 2.21927, "36095": 2.17834, "36100": 2.11311, "36105": 2.19204, "36110": 2.0904, "36115": 2.19799, "36120": 2.11568, "36125": 2.18258, "36130": 2.25154, "36135": 2.13654, "36140": 2.14459, "36145": 2.09285, "36150": 2.13909, "36155": 2.1869, "36160": 2.10064, "36165": 2.17839, "36170": 2.18773, "36175": 2.19836, "36180": 2.14732, "36185": 2.18129, "36190": 2.16413, "36195": 2.09884, "36200": 2.19079, "36205": 2.20071, "36210": 2.11087, "36215": 2.19381, "36220": 2.10726, "36225": 2.06433, "36230": 2.13307, "36235": 2.13426, "36240": 2.14936, "36245": 2.13614, "36250": 2.13357, "36255": 2.15061, "36260": 2.20941, "36265": 2.11812, "36270": 2.21208, "36275": 2.20178, "36280": 2.13803, "36285": 2.141, "36290": 2.17059, "36295": 2.11685, "36300": 2.19532, "36305": 2.19079, "36310": 2.09326, "36315": 2.17199, "36320": 2.2067, "36325": 2.16259, "36330": 2.1708, "36335": 2.13233, "36340": 2.09095, "36345": 2.1572, "36350": 2.20088, "36355": 2.17605, "36360": 2.16884, "36365": 2.17975, "36370": 2.11324, "36375": 2.1861, "36380": 2.17115, "36385": 2.1896, "36390": 2.07517, "36395": 2.17209, "36400": 2.10736, "36405": 2.08801, "36410": 2.18273, "36415": 2.1342, "36420": 2.15892, "36425": 2.16718, "36430": 2.22262, "36435": 2.11873, "36440": 2.16973, "36445": 2.10848, "36450": 2.17749, "36455": 2.16009, "36460": 2.15199, "36465": 2.1202, "36470": 2.07561, "36475": 2.19028, "36480": 2.17175, "36485": 2.12122, "36490": 2.18122, "36495": 2.16216, "36500": 2.09148, "36505": 2.10633, "36510": 2.10091, "36515": 2.16794, "36520": 2.17079, "36525": 2.16007, "36530": 2.16843, "36535": 2.16447, "36540": 2.08402, "36545": 2.09771, "36550": 2.13632, "36555": 2.16184, "36560": 2.15272, "36565": 2.06543, "36570": 2.15415, "36575": 2.17237, "36580": 2.15795, "36585": 2.19588, "36590": 2.16867, "36595": 2.20383, "36600": 2.14724, "36605": 2.10775, "36610": 2.15918, "36615": 2.07787, "36620": 2.16883, "36625": 2.12539, "36630": 2.14381, "36635": 2.11192, "36640": 2.16663, "36645": 2.1771, "36650": 2.15274, "36655": 2.1044, "36660": 2.16333, "36665": 2.15221, "36670": 2.19984, "36675": 2.14464, "36680": 2.17043, "36685": 2.17995, "36690": 2.1103, "36695": 2.10401, "36700": 2.09678, "36705": 2.17073, "36710": 2.14227, "36715": 2.12449, "36720": 2.15584, "36725": 2.19008, "36730": 2.09304, "36735": 2.14391, "36740": 2.07301, "36745": 2.13323, "36750": 2.20397, "36755": 2.13032, "36760": 2.17994, "36765": 2.18278, "36770": 2.19754, "36775": 2.203, "36780": 2.22204, "36785": 2.23236, "36790": 2.18363, "36795": 2.14652, "36800": 2.10638, "36805": 2.08651, "36810": 2.07427, "36815": 2.14517, "36820": 2.12875, "36825": 2.13349, "36830": 2.15655, "36835": 2.16218, "36840": 2.13405, "36845": 2.16734, "36850": 2.17495, "36855": 2.18444, "36860": 2.20256, "36865": 2.12485, "36870": 2.14398, "36875": 2.16342, "36880": 2.18733, "36885": 2.12345, "36890": 2.11581, "36895": 2.18076, "36900": 2.18329, "36905": 2.16131, "36910": 2.20832, "36915": 2.09239, "36920": 2.11659, "36925": 2.14577, "36930": 2.16558, "36935": 2.06789, "36940": 2.09675, "36945": 2.14415, "36950": 2.14696, "36955": 2.17056, "36960": 2.11068, "36965": 2.24832, "36970": 2.0746, "36975": 2.16797, "36980": 2.12893, "36985": 2.12816, "36990": 2.21317, "36995": 2.14574, "37000": 2.05879, "37005": 2.17159, "37010": 2.17721, "37015": 2.16233, "37020": 2.09251, "37025": 2.11761, "37030": 2.09938, "37035": 2.16106, "37040": 2.07786, "37045": 2.17112, "37050": 2.10381, "37055": 2.10687, "37060": 2.17141, "37065": 2.16457, "37070": 2.23269, "37075": 2.16251, "37080": 2.14986, "37085": 2.14063, "37090": 2.09934, "37095": 2.21842, "37100": 2.19918, "37105": 2.14578, "37110": 2.17189, "37115": 2.18509, "37120": 2.15171, "37125": 2.13632, "37130": 2.15687, "37135": 2.1092, "37140": 2.0986, "37145": 2.05502, "37150": 2.09629, "37155": 2.1628, "37160": 2.21358, "37165": 2.19372, "37170": 2.10545, "37175": 2.15073, "37180": 2.1593, "37185": 2.13045, "37190": 2.22248, "37195": 2.14628, "37200": 2.07162, "37205": 2.17699, "37210": 2.18438, "37215": 2.19352, "37220": 2.15373, "37225": 2.12322, "37230": 2.12826, "37235": 2.14813, "37240": 2.14263, "37245": 2.07072, "37250": 2.15839, "37255": 2.1508, "37260": 2.09761, "37265": 2.16003, "37270": 2.19964, "37275": 2.08676, "37280": 2.179, "37285": 2.17943, "37290": 2.18564, "37295": 2.20523, "37300": 2.16722, "37305": 2.10734, "37310": 2.1014, "37315": 2.16668, "37320": 2.14018, "37325": 2.18157, "37330": 2.15784, "37335": 2.13088, "37340": 2.20664, "37345": 2.10298, "37350": 2.15408, "37355": 2.13668, "37360": 2.07552, "37365": 2.09239, "37370": 2.09126, "37375": 2.13139, "37380": 2.11215, "37385": 2.1658, "37390": 2.14239, "37395": 2.14111, "37400": 2.1535, "37405": 2.15297, "37410": 2.16628, "37415": 2.06544, "37420": 2.22608, "37425": 2.13475, "37430": 2.14026, "37435": 2.18634, "37440": 2.09378, "37445": 2.07272, "37450": 2.16846, "37455": 2.11905, "37460": 2.06375, "37465": 2.1723, "37470": 2.24043, "37475": 2.07833, "37480": 2.13995, "37485": 2.21306, "37490": 2.12648, "37495": 2.1088, "37500": 2.15932, "37505": 2.10117, "37510": 2.11126, "37515": 2.18997, "37520": 2.13564, "37525": 2.12292, "37530": 2.09336, "37535": 2.12776, "37540": 2.11657, "37545": 2.17182, "37550": 2.15348, "37555": 2.15889, "37560": 2.09556, "37565": 2.12314, "37570": 2.03955, "37575": 2.16797, "37580": 2.15947, "37585": 2.15131, "37590": 2.13784, "37595": 2.11591, "37600": 2.2003, "37605": 2.11455, "37610": 2.11815, "37615": 2.12835, "37620": 2.19039, "37625": 2.19334, "37630": 2.13443, "37635": 2.18131, "37640": 2.1402, "37645": 2.18089, "37650": 2.13151, "37655": 2.13335, "37660": 2.13474, "37665": 2.11288, "37670": 2.1251, "37675": 2.14218, "37680": 2.19414, "37685": 2.12851, "37690": 2.11288, "37695": 2.16107, "37700": 2.13449, "37705": 2.17741, "37710": 2.13843, "37715": 2.10554, "37720": 2.08685, "37725": 2.06302, "37730": 2.16106, "37735": 2.12599, "37740": 2.14585, "37745": 2.14802, "37750": 2.14228, "37755": 2.17951, "37760": 2.12306, "37765": 2.10279, "37770": 2.22574, "37775": 2.16915, "37780": 2.1411, "37785": 2.16493, "37790": 2.1773, "37795": 2.22755, "37800": 2.09896, "37805": 2.1061, "37810": 2.06079, "37815": 2.13783, "37820": 2.11575, "37825": 2.12711, "37830": 2.13779, "37835": 2.17167, "37840": 2.14148, "37845": 2.16471, "37850": 2.20747, "37855": 2.15319, "37860": 2.1619, "37865": 2.04191, "37870": 2.10995, "37875": 2.06735, "37880": 2.14077, "37885": 2.13409, "37890": 2.11447, "37895": 2.10053, "37900": 2.13853, "37905": 2.11878, "37910": 2.15896, "37915": 2.20403, "37920": 2.09662, "37925": 2.16004, "37930": 2.19322, "37935": 2.1334, "37940": 2.1213, "37945": 2.15431, "37950": 2.09426, "37955": 2.08984, "37960": 2.09486, "37965": 2.1468, "37970": 2.16589, "37975": 2.12332, "37980": 2.12402, "37985": 2.11956, "37990": 2.14889, "37995": 2.17845, "38000": 2.13287, "38005": 2.15456, "38010": 2.16701, "38015": 2.12455, "38020": 2.14118, "38025": 2.19727, "38030": 2.20479, "38035": 2.05007, "38040": 2.20972, "38045": 2.15875, "38050": 2.1373, "38055": 2.16985, "38060": 2.12205, "38065": 2.11119, "38070": 2.08183, "38075": 2.1871, "38080": 2.12467, "38085": 2.17386, "38090": 2.1795, "38095": 2.04931, "38100": 2.18302, "38105": 2.10827, "38110": 2.11583, "38115": 2.17403, "38120": 2.06756, "38125": 2.15528, "38130": 2.15604, "38135": 2.10937, "38140": 2.16274, "38145": 2.06334, "38150": 2.11821, "38155": 2.09107, "38160": 2.07647, "38165": 2.058, "38170": 2.08854, "38175": 2.15399, "38180": 2.12172, "38185": 2.14851, "38190": 2.09899, "38195": 2.15123, "38200": 2.07236, "38205": 2.13885, "38210": 2.16195, "38215": 2.16054, "38220": 2.20514, "38225": 2.1372, "38230": 2.15003, "38235": 2.13447, "38240": 2.1568, "38245": 2.14259, "38250": 2.10646, "38255": 2.0868, "38260": 2.12568, "38265": 2.14844, "38270": 2.13318, "38275": 2.09293, "38280": 2.15907, "38285": 2.14312, "38290": 2.09121, "38295": 2.12762, "38300": 2.08925, "38305": 2.17693, "38310": 2.15634, "38315": 2.141, "38320": 2.16919, "38325": 2.07702, "38330": 2.13511, "38335": 2.16232, "38340": 2.1587, "38345": 2.13163, "38350": 2.13954, "38355": 2.20279, "38360": 2.14267, "38365": 2.09259, "38370": 2.18651, "38375": 2.09741, "38380": 2.23499, "38385": 2.1598, "38390": 2.13591, "38395": 2.13444, "38400": 2.06294, "38405": 2.19267, "38410": 2.08766, "38415": 2.14207, "38420": 2.16166, "38425": 2.1029, "38430": 2.21917, "38435": 2.15514, "38440": 2.22338, "38445": 2.13506, "38450": 2.0853, "38455": 2.1298, "38460": 2.10584, "38465": 2.09335, "38470": 2.20925, "38475": 2.13651, "38480": 2.15849, "38485": 2.11759, "38490": 2.12113, "38495": 2.16595, "38500": 2.19141, "38505": 2.14385, "38510": 2.13263, "38515": 2.12968, "38520": 2.11355, "38525": 2.09882, "38530": 2.15355, "38535": 2.1137, "38540": 2.16553, "38545": 2.1241, "38550": 2.16052, "38555": 2.08832, "38560": 2.14566, "38565": 2.11859, "38570": 2.16447, "38575": 2.12325, "38580": 2.17037, "38585": 2.14575, "38590": 2.06371, "38595": 2.08302, "38600": 2.13563, "38605": 2.10756, "38610": 2.10866, "38615": 2.16177, "38620": 2.18449, "38625": 2.08991, "38630": 2.12083, "38635": 2.06284, "38640": 2.11598, "38645": 2.20021, "38650": 2.12797, "38655": 2.15679, "38660": 2.07655, "38665": 2.19864, "38670": 2.10375, "38675": 2.09555, "38680": 2.18706, "38685": 2.12843, "38690": 2.19311, "38695": 2.19267, "38700": 2.13752, "38705": 2.08348, "38710": 2.07951, "38715": 2.17884, "38720": 2.16345, "38725": 2.1419, "38730": 2.11672, "38735": 2.09569, "38740": 2.1121, "38745": 2.16913, "38750": 2.09024, "38755": 2.14444, "38760": 2.07016, "38765": 2.19434, "38770": 2.10732, "38775": 2.17154, "38780": 2.19736, "38785": 2.16505, "38790": 2.13417, "38795": 2.10544, "38800": 2.1673, "38805": 2.2359, "38810": 2.14117, "38815": 2.13041, "38820": 2.13943, "38825": 2.12378, "38830": 2.13804, "38835": 2.1533, "38840": 2.15028, "38845": 2.15686, "38850": 2.1234, "38855": 2.08503, "38860": 2.10656, "38865": 2.13683, "38870": 2.12647, "38875": 2.21354, "38880": 2.12945, "38885": 2.08103, "38890": 2.16963, "38895": 2.14091, "38900": 2.10943, "38905": 2.13816, "38910": 2.10866, "38915": 2.13798, "38920": 2.08537, "38925": 2.09894, "38930": 2.15561, "38935": 2.1629, "38940": 2.12985, "38945": 2.05662, "38950": 2.11572, "38955": 2.16236, "38960": 2.04758, "38965": 2.13498, "38970": 2.16629, "38975": 2.22061, "38980": 2.0923, "38985": 2.09306, "38990": 2.1422, "38995": 2.19515, "39000": 2.0605, "39005": 2.0843, "39010": 2.08171, "39015": 2.10471, "39020": 2.11167, "39025": 2.16662, "39030": 2.09316, "39035": 2.09665, "39040": 2.10416, "39045": 2.18291, "39050": 2.15486, "39055": 2.12367, "39060": 2.18283, "39065": 2.11733, "39070": 2.13052, "39075": 2.11833, "39080": 2.10101, "39085": 2.13951, "39090": 2.17508, "39095": 2.1506, "39100": 2.14043, "39105": 2.12551, "39110": 2.08914, "39115": 2.10152, "39120": 2.11171, "39125": 2.16364, "39130": 2.11424, "39135": 2.12351, "39140": 2.11824, "39145": 2.08793, "39150": 2.16833, "39155": 2.08064, "39160": 2.14634, "39165": 2.1157, "39170": 2.20163, "39175": 2.18667, "39180": 2.12771, "39185": 2.15863, "39190": 2.11431, "39195": 2.14255, "39200": 2.15761, "39205": 2.11758, "39210": 2.16546, "39215": 2.09914, "39220": 2.13753, "39225": 2.11625, "39230": 2.1432, "39235": 2.06144, "39240": 2.08595, "39245": 2.11538, "39250": 2.10716, "39255": 2.19398, "39260": 2.05649, "39265": 2.08657, "39270": 2.12506, "39275": 2.12841, "39280": 2.13676, "39285": 2.14045, "39290": 2.03333, "39295": 2.13038, "39300": 2.20669, "39305": 2.17429, "39310": 2.12668, "39315": 2.20535, "39320": 2.02511, "39325": 2.08474, "39330": 2.10722, "39335": 2.10319, "39340": 2.15176, "39345": 2.22634, "39350": 2.1253, "39355": 2.20704, "39360": 2.0773, "39365": 2.06672, "39370": 2.12142, "39375": 2.1198, "39380": 2.13405, "39385": 2.16633, "39390": 2.09157, "39395": 2.12978, "39400": 2.10702, "39405": 2.16155, "39410": 2.07252, "39415": 2.11184, "39420": 2.11962, "39425": 2.12081, "39430": 2.14054, "39435": 2.13511, "39440": 2.10819, "39445": 2.17232, "39450": 2.15097, "39455": 2.13015, "39460": 2.15865, "39465": 2.1681, "39470": 2.1753, "39475": 2.1047, "39480": 2.0449, "39485": 2.13758, "39490": 2.07086, "39495": 2.15543, "39500": 2.12314, "39505": 2.11416, "39510": 2.13759, "39515": 2.12582, "39520": 2.15213, "39525": 2.14083, "39530": 2.13745, "39535": 2.12565, "39540": 2.16912, "39545": 2.16159, "39550": 2.12141, "39555": 2.01427, "39560": 2.15386, "39565": 2.11255, "39570": 2.09773, "39575": 2.13058, "39580": 2.15216, "39585": 2.10934, "39590": 2.09231, "39595": 2.11567, "39600": 2.10076, "39605": 2.14196, "39610": 2.09517, "39615": 2.1093, "39620": 2.16724, "39625": 2.15456, "39630": 2.10374, "39635": 2.15507, "39640": 2.09327, "39645": 2.10881, "39650": 2.0764, "39655": 2.13669, "39660": 2.15168, "39665": 2.13008, "39670": 2.17835, "39675": 2.12604, "39680": 2.0977, "39685": 2.15676, "39690": 2.10957, "39695": 2.2124, "39700": 2.16443, "39705": 2.13549, "39710": 2.18978, "39715": 2.14418, "39720": 2.06889, "39725": 2.12657, "39730": 2.15735, "39735": 2.10739, "39740": 2.15774, "39745": 2.14436, "39750": 2.11173, "39755": 2.13125, "39760": 2.12388, "39765": 2.13142, "39770": 2.13787, "39775": 2.18797, "39780": 2.13415, "39785": 2.13972, "39790": 2.1339, "39795": 2.1423, "39800": 2.10255, "39805": 2.09311, "39810": 2.1087, "39815": 2.13246, "39820": 2.12986, "39825": 2.15626, "39830": 2.1893, "39835": 2.09175, "39840": 2.14399, "39845": 2.09856, "39850": 2.14332, "39855": 2.13783, "39860": 2.07468, "39865": 2.15817, "39870": 2.1349, "39875": 2.15335, "39880": 2.20828, "39885": 2.17168, "39890": 2.13618, "39895": 2.12644, "39900": 2.09736, "39905": 2.10073, "39910": 2.12938, "39915": 2.1336, "39920": 2.06771, "39925": 2.06747, "39930": 2.16072, "39935": 2.10533, "39940": 2.0894, "39945": 2.11439, "39950": 2.0775, "39955": 2.16378, "39960": 2.13422, "39965": 2.15961, "39970": 2.08387, "39975": 2.16451, "39980": 2.08582, "39985": 2.08074, "39990": 2.0577, "39995": 2.07529, "40000": 2.10444, "40005": 2.14883, "40010": 2.15416, "40015": 2.16701, "40020": 2.14202, "40025": 2.11468, "40030": 2.14864, "40035": 2.06053, "40040": 2.11083, "40045": 2.06794, "40050": 2.20144, "40055": 2.08608, "40060": 2.13323, "40065": 2.17581, "40070": 2.13138, "40075": 2.05581, "40080": 2.11901, "40085": 2.13028, "40090": 2.12494, "40095": 2.1046, "40100": 2.16482, "40105": 2.10615, "40110": 2.10641, "40115": 2.12789, "40120": 2.1508, "40125": 2.14396, "40130": 2.12316, "40135": 2.14702, "40140": 2.11166, "40145": 2.2011, "40150": 2.11225, "40155": 2.0876, "40160": 2.0989, "40165": 2.1914, "40170": 2.02984, "40175": 2.16339, "40180": 2.14677, "40185": 2.12287, "40190": 2.13771, "40195": 2.05427, "40200": 2.14644, "40205": 2.09956, "40210": 2.14478, "40215": 2.12576, "40220": 2.21381, "40225": 2.21846, "40230": 2.03606, "40235": 2.08954, "40240": 2.03126, "40245": 2.15305, "40250": 2.12294, "40255": 2.18881, "40260": 2.19636, "40265": 2.11026, "40270": 2.07397, "40275": 2.09655, "40280": 2.15688, "40285": 2.08608, "40290": 2.11222, "40295": 2.11779, "40300": 2.11913, "40305": 2.10353, "40310": 2.16666, "40315": 2.10155, "40320": 2.13462, "40325": 2.10676, "40330": 2.10907, "40335": 2.0927, "40340": 2.06649, "40345": 2.14482, "40350": 2.13584, "40355": 2.17458, "40360": 2.07862, "40365": 2.18629, "40370": 2.09312, "40375": 2.13525, "40380": 2.07977, "40385": 2.1168, "40390": 2.15878, "40395": 2.10252, "40400": 2.08614, "40405": 2.0558, "40410": 2.23823, "40415": 2.09881, "40420": 2.15241, "40425": 2.18609, "40430": 2.18008, "40435": 2.11229, "40440": 2.14256, "40445": 2.08488, "40450": 2.15515, "40455": 2.07091, "40460": 2.10367, "40465": 2.08982, "40470": 2.10724, "40475": 2.09918, "40480": 2.0623, "40485": 2.13643, "40490": 2.10807, "40495": 2.11118, "40500": 2.13243, "40505": 2.14213, "40510": 2.11051, "40515": 2.13069, "40520": 2.15691, "40525": 2.09695, "40530": 2.12055, "40535": 2.07352, "40540": 2.12787, "40545": 2.11208, "40550": 2.12955, "40555": 2.07176, "40560": 2.12584, "40565": 2.13533, "40570": 2.03302, "40575": 2.11814, "40580": 2.1015, "40585": 2.09605, "40590": 2.07233, "40595": 2.11195, "40600": 2.07762, "40605": 2.12818, "40610": 2.06818, "40615": 2.08624, "40620": 2.13636, "40625": 2.12554, "40630": 2.08911, "40635": 2.14108, "40640": 2.09273, "40645": 2.11325, "40650": 2.10544, "40655": 2.18323, "40660": 2.17431, "40665": 2.08551, "40670": 2.14437, "40675": 2.12549, "40680": 2.1431, "40685": 2.14302, "40690": 2.06871, "40695": 2.10706, "40700": 2.10877, "40705": 2.07804, "40710": 2.13774, "40715": 2.17019, "40720": 2.12831, "40725": 2.09741, "40730": 2.09694, "40735": 2.07664, "40740": 2.11796, "40745": 2.07521, "40750": 2.08923, "40755": 2.13232, "40760": 2.07859, "40765": 2.10083, "40770": 2.1529, "40775": 2.07381, "40780": 2.11347, "40785": 2.1281, "40790": 2.10705, "40795": 2.14102, "40800": 2.11238, "40805": 2.1931, "40810": 2.15874, "40815": 2.14024, "40820": 2.08502, "40825": 2.08195, "40830": 2.12429, "40835": 2.13296, "40840": 2.15523, "40845": 2.0517, "40850": 2.05802, "40855": 2.06163, "40860": 2.09216, "40865": 2.12123, "40870": 2.11399, "40875": 2.12622, "40880": 2.13787, "40885": 2.1195, "40890": 2.18386, "40895": 2.12943, "40900": 2.11748, "40905": 2.10661, "40910": 2.04921, "40915": 2.08423, "40920": 2.15499, "40925": 2.12568, "40930": 2.10839, "40935": 2.1235, "40940": 2.10859, "40945": 2.11095, "40950": 2.07746, "40955": 2.07279, "40960": 2.08298, "40965": 2.10254, "40970": 2.05588, "40975": 2.11813, "40980": 2.18222, "40985": 2.10699, "40990": 2.11321, "40995": 2.15198, "41000": 2.09442, "41005": 2.13064, "41010": 2.08414, "41015": 2.13704, "41020": 2.08785, "41025": 2.10271, "41030": 2.09088, "41035": 2.11098, "41040": 2.0736, "41045": 2.13234, "41050": 2.05596, "41055": 2.13021, "41060": 2.00908, "41065": 2.16496, "41070": 2.1218, "41075": 2.12511, "41080": 2.05306, "41085": 2.12242, "41090": 2.12508, "41095": 2.15909, "41100": 2.12911, "41105": 2.06895, "41110": 2.06343, "41115": 2.10021, "41120": 2.07932, "41125": 2.11202, "41130": 2.1306, "41135": 2.14804, "41140": 2.14503, "41145": 2.12504, "41150": 2.12237, "41155": 2.07517, "41160": 2.15732, "41165": 2.04145, "41170": 2.12457, "41175": 2.13073, "41180": 2.14266, "41185": 2.10859, "41190": 2.09253, "41195": 2.01577, "41200": 2.17161, "41205": 2.10903, "41210": 2.07145, "41215": 2.115, "41220": 2.12829, "41225": 2.14297, "41230": 2.15354, "41235": 2.15089, "41240": 2.2263, "41245": 2.14866, "41250": 2.13925, "41255": 2.14822, "41260": 2.10143, "41265": 2.11651, "41270": 2.15198, "41275": 2.14503, "41280": 2.04691, "41285": 2.14244, "41290": 2.15803, "41295": 2.07544, "41300": 2.11973, "41305": 2.19456, "41310": 2.11787, "41315": 2.13762, "41320": 2.15937, "41325": 2.10657, "41330": 2.15392, "41335": 2.13239, "41340": 2.12689, "41345": 2.10036, "41350": 2.12652, "41355": 2.1371, "41360": 2.0563, "41365": 2.1142, "41370": 2.14253, "41375": 2.05459, "41380": 2.15176, "41385": 2.06289, "41390": 2.1291, "41395": 2.10003, "41400": 2.14535, "41405": 2.07299, "41410": 2.0566, "41415": 2.11533, "41420": 2.08351, "41425": 2.14998, "41430": 2.1096, "41435": 2.09441, "41440": 2.11235, "41445": 2.08218, "41450": 2.06236, "41455": 2.15795, "41460": 2.09005, "41465": 2.05069, "41470": 2.10724, "41475": 2.19264, "41480": 2.10234, "41485": 2.14428, "41490": 2.0779, "41495": 2.08722, "41500": 2.15881, "41505": 2.08938, "41510": 2.03988, "41515": 2.08363, "41520": 2.08409, "41525": 2.12783, "41530": 2.14304, "41535": 2.13036, "41540": 2.12247, "41545": 2.16432, "41550": 2.05794, "41555": 2.19832, "41560": 2.11512, "41565": 2.11263, "41570": 2.15004, "41575": 2.15281, "41580": 2.10931, "41585": 2.06557, "41590": 2.06944, "41595": 2.10529, "41600": 2.10878, "41605": 2.10579, "41610": 2.14558, "41615": 2.11907, "41620": 2.16579, "41625": 2.12684, "41630": 2.07308, "41635": 2.11994, "41640": 2.13343, "41645": 2.17163, "41650": 2.08633, "41655": 2.10545, "41660": 2.07903, "41665": 2.15233, "41670": 2.07975, "41675": 2.1667, "41680": 2.04457, "41685": 2.07806, "41690": 2.1103, "41695": 2.11473, "41700": 2.15464, "41705": 2.02171, "41710": 2.11632, "41715": 2.17715, "41720": 2.0709, "41725": 2.05629, "41730": 2.09304, "41735": 2.03987, "41740": 2.11869, "41745": 2.15197, "41750": 2.14225, "41755": 2.12192, "41760": 2.20221, "41765": 2.07281, "41770": 2.09467, "41775": 2.04563, "41780": 2.15797, "41785": 2.07399, "41790": 2.07365, "41795": 2.15068, "41800": 2.14649, "41805": 2.07811, "41810": 2.08125, "41815": 2.11069, "41820": 2.11417, "41825": 2.13237, "41830": 2.11533, "41835": 2.05379, "41840": 2.12744, "41845": 2.1072, "41850": 2.07447, "41855": 2.08627, "41860": 2.07693, "41865": 2.09484, "41870": 2.07007, "41875": 2.10318, "41880": 2.08892, "41885": 2.19332, "41890": 2.11418, "41895": 2.06541, "41900": 2.08764, "41905": 2.01787, "41910": 2.1303, "41915": 2.07064, "41920": 2.08619, "41925": 2.12972, "41930": 2.07906, "41935": 2.0617, "41940": 2.12536, "41945": 2.11054, "41950": 2.11892, "41955": 2.12319, "41960": 2.07257, "41965": 2.17133, "41970": 2.12673, "41975": 2.15952, "41980": 2.0481, "41985": 2.17447, "41990": 2.10198, "41995": 2.09605, "42000": 2.10379, "42005": 2.08103, "42010": 2.09944, "42015": 2.06622, "42020": 2.15964, "42025": 2.04351, "42030": 2.14083, "42035": 2.0918, "42040": 2.11524, "42045": 2.15546, "42050": 2.15682, "42055": 2.12442, "42060": 2.08671, "42065": 2.12705, "42070": 2.12334, "42075": 2.13116, "42080": 2.1535, "42085": 2.11435, "42090": 2.07376, "42095": 2.08885, "42100": 2.15759, "42105": 2.05173, "42110": 2.10691, "42115": 2.09723, "42120": 2.11427, "42125": 2.03209, "42130": 2.07215, "42135": 2.04208, "42140": 2.06853, "42145": 2.10643, "42150": 2.10349, "42155": 2.07343, "42160": 2.11067, "42165": 2.08493, "42170": 2.08086, "42175": 2.11751, "42180": 2.16662, "42185": 2.12681, "42190": 2.09883, "42195": 2.15818, "42200": 2.12339, "42205": 2.09223, "42210": 2.12502, "42215": 2.09969, "42220": 2.0585, "42225": 2.19704, "42230": 2.17659, "42235": 2.07737, "42240": 2.15668, "42245": 2.14094, "42250": 2.05067, "42255": 2.02912, "42260": 2.12929, "42265": 2.13524, "42270": 2.08357, "42275": 2.11172, "42280": 2.13814, "42285": 2.11729, "42290": 2.1517, "42295": 2.11552, "42300": 2.13389, "42305": 2.06616, "42310": 2.08471, "42315": 2.16879, "42320": 2.11632, "42325": 2.13288, "42330": 2.0351, "42335": 2.11966, "42340": 2.1324, "42345": 2.14769, "42350": 2.06884, "42355": 2.10969, "42360": 2.11679, "42365": 2.13097, "42370": 2.1278, "42375": 2.13026, "42380": 2.19277, "42385": 2.11275, "42390": 2.14388, "42395": 2.11882, "42400": 2.15358, "42405": 2.12336, "42410": 2.13325, "42415": 2.18197, "42420": 2.13244, "42425": 2.08533, "42430": 2.14055, "42435": 2.01472, "42440": 2.12241, "42445": 2.15776, "42450": 2.07796, "42455": 2.06562, "42460": 2.09818, "42465": 2.07458, "42470": 2.10162, "42475": 2.14694, "42480": 2.0444, "42485": 2.11443, "42490": 2.17567, "42495": 1.99725, "42500": 2.13676, "42505": 2.06451, "42510": 2.12728, "42515": 2.07176, "42520": 2.04431, "42525": 2.04475, "42530": 2.08229, "42535": 2.10355, "42540": 2.08163, "42545": 2.09064, "42550": 2.12824, "42555": 2.09563, "42560": 2.12354, "42565": 2.10328, "42570": 2.06909, "42575": 2.0871, "42580": 2.11591, "42585": 2.03417, "42590": 2.16221, "42595": 2.15095, "42600": 2.13097, "42605": 2.06035, "42610": 2.09751, "42615": 2.13686, "42620": 2.11139, "42625": 2.08425, "42630": 2.05771, "42635": 2.15315, "42640": 2.14057, "42645": 2.06717, "42650": 2.2048, "42655": 2.0562, "42660": 2.04902, "42665": 2.13103, "42670": 2.12373, "42675": 2.10064, "42680": 2.11787, "42685": 2.08605, "42690": 2.13757, "42695": 2.04345, "42700": 2.15003, "42705": 2.08059, "42710": 2.11893, "42715": 2.10895, "42720": 2.08262, "42725": 2.09467, "42730": 2.08416, "42735": 2.08126, "42740": 2.07239, "42745": 2.12001, "42750": 2.1076, "42755": 2.10825, "42760": 2.09826, "42765": 2.07653, "42770": 2.13413, "42775": 2.0529, "42780": 2.06974, "42785": 2.12315, "42790": 2.17415, "42795": 2.14187, "42800": 2.09729, "42805": 2.14443, "42810": 2.0905, "42815": 2.14412, "42820": 2.14441, "42825": 2.16027, "42830": 2.09215, "42835": 2.1248, "42840": 2.14965, "42845": 2.17433, "42850": 2.07416, "42855": 2.12945, "42860": 2.11529, "42865": 2.09987, "42870": 2.14379, "42875": 2.14001, "42880": 2.1092, "42885": 2.04781, "42890": 2.14328, "42895": 2.16833, "42900": 2.12611, "42905": 2.13697, "42910": 2.10995, "42915": 2.12056, "42920": 2.02464, "42925": 2.10238, "42930": 2.02091, "42935": 2.07019, "42940": 2.09163, "42945": 2.10099, "42950": 2.13242, "42955": 2.04919, "42960": 2.09833, "42965": 2.06799, "42970": 2.0363, "42975": 2.13394, "42980": 2.10052, "42985": 2.10627, "42990": 2.11498, "42995": 2.12101, "43000": 2.05691, "43005": 2.15408, "43010": 2.09952, "43015": 2.08828, "43020": 2.12459, "43025": 2.05938, "43030": 2.04767, "43035": 2.0834, "43040": 2.00839, "43045": 2.1062, "43050": 2.13503, "43055": 2.06982, "43060": 2.01825, "43065": 2.15179, "43070": 2.08229, "43075": 2.10577, "43080": 2.15058, "43085": 2.08966, "43090": 2.08637, "43095": 2.0965, "43100": 2.09512, "43105": 2.11274, "43110": 2.10197, "43115": 2.06316, "43120": 2.11183, "43125": 2.12215, "43130": 2.15317, "43135": 2.11035, "43140": 2.08001, "43145": 2.11525, "43150": 2.10217, "43155": 2.12263, "43160": 2.06588, "43165": 2.08579, "43170": 2.11198, "43175": 2.13234, "43180": 2.1211, "43185": 2.0415, "43190": 2.14479, "43195": 2.08505, "43200": 2.09575, "43205": 2.15294, "43210": 2.11643, "43215": 2.11826, "43220": 2.13472, "43225": 2.14567, "43230": 2.05921, "43235": 2.04946, "43240": 2.08383, "43245": 2.0894, "43250": 2.05, "43255": 2.1603, "43260": 2.10109, "43265": 2.09337, "43270": 2.15497, "43275": 2.0641, "43280": 2.14317, "43285": 2.07434, "43290": 2.07236, "43295": 2.06455, "43300": 2.21822, "43305": 2.09941, "43310": 2.1253, "43315": 2.15285, "43320": 2.09223, "43325": 2.10789, "43330": 2.10033, "43335": 2.14707, "43340": 2.1002, "43345": 2.10871, "43350": 2.09791, "43355": 2.06512, "43360": 2.08112, "43365": 2.06704, "43370": 2.13396, "43375": 2.03911, "43380": 2.02126, "43385": 2.03813, "43390": 2.16454, "43395": 2.10524, "43400": 2.11929, "43405": 2.20858, "43410": 2.07889, "43415": 2.03275, "43420": 2.12301, "43425": 2.17457, "43430": 2.08492, "43435": 2.05808, "43440": 2.07157, "43445": 2.09795, "43450": 2.1649, "43455": 2.10957, "43460": 2.0697, "43465": 2.05668, "43470": 2.0768, "43475": 2.07967, "43480": 2.06556, "43485": 2.02798, "43490": 2.07868, "43495": 2.08791, "43500": 2.07597, "43505": 2.20295, "43510": 2.04401, "43515": 2.17062, "43520": 2.15855, "43525": 2.01683, "43530": 2.07082, "43535": 2.08875, "43540": 2.1021, "43545": 2.0758, "43550": 2.09347, "43555": 2.11741, "43560": 2.10933, "43565": 2.14681, "43570": 2.05669, "43575": 2.12351, "43580": 2.15259, "43585": 2.04176, "43590": 2.10212, "43595": 2.06852, "43600": 2.07754, "43605": 2.09287, "43610": 2.16086, "43615": 2.09836, "43620": 2.09764, "43625": 2.10617, "43630": 2.05205, "43635": 2.11624, "43640": 2.13294, "43645": 2.10992, "43650": 2.10442, "43655": 2.09006, "43660": 2.1029, "43665": 2.11861, "43670": 2.14578, "43675": 2.11172, "43680": 2.08682, "43685": 2.12453, "43690": 2.07515, "43695": 2.11776, "43700": 2.07803, "43705": 2.13187, "43710": 2.1489, "43715": 2.14245, "43720": 2.04892, "43725": 2.05129, "43730": 2.10818, "43735": 2.20764, "43740": 2.10166, "43745": 2.05251, "43750": 2.09294, "43755": 2.02584, "43760": 2.11792, "43765": 2.00623, "43770": 2.08517, "43775": 2.06774, "43780": 2.11813, "43785": 1.997, "43790": 2.10933, "43795": 2.13587, "43800": 2.05337, "43805": 2.02168, "43810": 2.07765, "43815": 2.08371, "43820": 2.073, "43825": 2.10451, "43830": 2.1697, "43835": 2.065, "43840": 2.0686, "43845": 2.11555, "43850": 2.07001, "43855": 2.10162, "43860": 2.10659, "43865": 2.09636, "43870": 2.075, "43875": 2.1283, "43880": 2.14582, "43885": 2.1179, "43890": 2.08615, "43895": 2.112, "43900": 2.07169, "43905": 2.10354, "43910": 2.03908, "43915": 2.0995, "43920": 2.12734, "43925": 2.13705, "43930": 2.14082, "43935": 2.08693, "43940": 2.12888, "43945": 2.11983, "43950": 2.06643, "43955": 2.07858, "43960": 2.14429, "43965": 2.06258, "43970": 2.07767, "43975": 2.07303, "43980": 2.10491, "43985": 2.10049, "43990": 2.1018, "43995": 2.08245, "44000": 2.09723, "44005": 2.03247, "44010": 2.08874, "44015": 2.11909, "44020": 2.12293, "44025": 2.06908, "44030": 2.09192, "44035": 2.12993, "44040": 2.08806, "44045": 2.07697, "44050": 2.06712, "44055": 2.10311, "44060": 2.0854, "44065": 2.02963, "44070": 2.06909, "44075": 2.06176, "44080": 2.11303, "44085": 2.08927, "44090": 2.11952, "44095": 2.07267, "44100": 2.1305, "44105": 2.13626, "44110": 2.12215, "44115": 2.01805, "44120": 2.07034, "44125": 2.0858, "44130": 2.12065, "44135": 2.05479, "44140": 2.11644, "44145": 2.10402, "44150": 2.05153, "44155": 2.07152, "44160": 2.07545, "44165": 2.07242, "44170": 2.12081, "44175": 1.99972, "44180": 2.14797, "44185": 2.10181, "44190": 2.02849, "44195": 2.04243, "44200": 2.05689, "44205": 2.089, "44210": 2.02305, "44215": 2.12583, "44220": 2.06537, "44225": 2.09013, "44230": 2.10646, "44235": 2.17595, "44240": 2.1306, "44245": 2.07229, "44250": 2.07371, "44255": 2.02229, "44260": 2.13491, "44265": 2.08482, "44270": 2.161, "44275": 2.10122, "44280": 2.04581, "44285": 2.12329, "44290": 2.0563, "44295": 2.0493, "44300": 2.11579, "44305": 2.08962, "44310": 2.14109, "44315": 2.01984, "44320": 2.09645, "44325": 2.08663, "44330": 2.11189, "44335": 2.09577, "44340": 2.02098, "44345": 2.11315, "44350": 2.10472, "44355": 2.14015, "44360": 1.9831, "44365": 2.15833, "44370": 2.14354, "44375": 2.07956, "44380": 2.12601, "44385": 2.15165, "44390": 2.06907, "44395": 2.13801, "44400": 2.13154, "44405": 2.04587, "44410": 2.08786, "44415": 2.02611, "44420": 2.08764, "44425": 2.12107, "44430": 2.16727, "44435": 2.04241, "44440": 2.03487, "44445": 2.06338, "44450": 2.06738, "44455": 2.08351, "44460": 2.10258, "44465": 2.09152, "44470": 2.13385, "44475": 2.04983, "44480": 2.08767, "44485": 2.07476, "44490": 2.0248, "44495": 2.10966, "44500": 2.08227, "44505": 2.13112, "44510": 2.08846, "44515": 2.07102, "44520": 2.11286, "44525": 2.08231, "44530": 2.11403, "44535": 2.03709, "44540": 2.07257, "44545": 2.0906, "44550": 2.10642, "44555": 2.1196, "44560": 2.04702, "44565": 2.08415, "44570": 2.17446, "44575": 2.11244, "44580": 2.06047, "44585": 2.11824, "44590": 2.06009, "44595": 2.11547, "44600": 2.07283, "44605": 2.10293, "44610": 2.06117, "44615": 2.11658, "44620": 2.15219, "44625": 2.13585, "44630": 2.11915, "44635": 2.07914, "44640": 2.09507, "44645": 2.06265, "44650": 2.11916, "44655": 2.10522, "44660": 2.13908, "44665": 2.11261, "44670": 2.14136, "44675": 2.08034, "44680": 2.05603, "44685": 2.09155, "44690": 2.07893, "44695": 2.07036, "44700": 2.12683, "44705": 2.09582, "44710": 2.16106, "44715": 2.05186, "44720": 2.03408, "44725": 2.05793, "44730": 2.14369, "44735": 2.06019, "44740": 2.16321, "44745": 2.0521, "44750": 2.08731, "44755": 2.08406, "44760": 2.06782, "44765": 2.1026, "44770": 1.98892, "44775": 2.1043, "44780": 2.06059, "44785": 2.09259, "44790": 2.13088, "44795": 2.12723, "44800": 2.11701, "44805": 2.12473, "44810": 2.10861, "44815": 2.11927, "44820": 2.07401, "44825": 2.06492, "44830": 2.03511, "44835": 2.09676, "44840": 2.09242, "44845": 2.04793, "44850": 2.11921, "44855": 2.10803, "44860": 2.10005, "44865": 2.09665, "44870": 2.1077, "44875": 2.08433, "44880": 2.09047, "44885": 2.02239, "44890": 2.0516, "44895": 2.12813, "44900": 2.06261, "44905": 2.13653, "44910": 2.08901, "44915": 2.10326, "44920": 2.05687, "44925": 2.03928, "44930": 2.17839, "44935": 2.11236, "44940": 2.09049, "44945": 2.10815, "44950": 2.13623, "44955": 2.11491, "44960": 2.12443, "44965": 2.07557, "44970": 2.09369, "44975": 2.09234, "44980": 2.05104, "44985": 2.08083, "44990": 2.15148, "44995": 2.00514, "45000": 2.09694, "45005": 2.1166, "45010": 2.06516, "45015": 2.0902, "45020": 2.10703, "45025": 2.07962, "45030": 2.11495, "45035": 2.08941, "45040": 2.11412, "45045": 2.04922, "45050": 2.01798, "45055": 2.06533, "45060": 2.07136, "45065": 2.04013, "45070": 2.04664, "45075": 2.09578, "45080": 2.06998, "45085": 2.11225, "45090": 2.05673, "45095": 2.11769, "45100": 2.07373, "45105": 2.0687, "45110": 2.1404, "45115": 2.11944, "45120": 2.11264, "45125": 2.11419, "45130": 2.1122, "45135": 2.11407, "45140": 2.1384, "45145": 2.08107, "45150": 2.04144, "45155": 2.05793, "45160": 2.10627, "45165": 2.12383, "45170": 2.04932, "45175": 2.10445, "45180": 2.09575, "45185": 2.10585, "45190": 2.09574, "45195": 2.13984, "45200": 2.11586, "45205": 2.04101, "45210": 2.05595, "45215": 2.0381, "45220": 2.04781, "45225": 2.12747, "45230": 2.09273, "45235": 2.14587, "45240": 2.07223, "45245": 2.05302, "45250": 2.08145, "45255": 2.08341, "45260": 2.11912, "45265": 2.15939, "45270": 2.00152, "45275": 2.1172, "45280": 2.0765, "45285": 2.0689, "45290": 2.1463, "45295": 2.1431, "45300": 2.18631, "45305": 2.11851, "45310": 2.02747, "45315": 2.02342, "45320": 2.04659, "45325": 2.07983, "45330": 2.04701, "45335": 2.07752, "45340": 2.06552, "45345": 2.14531, "45350": 2.12802, "45355": 2.11245, "45360": 2.06201, "45365": 2.12805, "45370": 2.10144, "45375": 2.04469, "45380": 2.11976, "45385": 2.13884, "45390": 2.09415, "45395": 2.17667, "45400": 2.058, "45405": 2.10251, "45410": 2.12288, "45415": 2.07698, "45420": 2.05956, "45425": 2.0933, "45430": 2.05518, "45435": 2.10879, "45440": 2.03413, "45445": 2.03915, "45450": 2.19668, "45455": 2.04455, "45460": 2.12139, "45465": 2.08378, "45470": 2.11346, "45475": 2.08471, "45480": 2.12178, "45485": 2.08403, "45490": 2.06847, "45495": 2.08339, "45500": 2.10583, "45505": 2.10275, "45510": 2.12985, "45515": 2.10534, "45520": 2.14151, "45525": 2.0843, "45530": 2.01573, "45535": 2.10227, "45540": 2.09827, "45545": 2.05469, "45550": 2.10279, "45555": 2.10319, "45560": 2.12686, "45565": 2.06826, "45570": 2.07006, "45575": 2.09745, "45580": 2.04345, "45585": 2.15227, "45590": 2.12465, "45595": 2.10797, "45600": 2.0241, "45605": 2.05587, "45610": 2.05662, "45615": 2.09822, "45620": 2.02151, "45625": 2.05677, "45630": 2.0856, "45635": 2.08005, "45640": 2.05388, "45645": 2.12154, "45650": 2.09423, "45655": 2.04896, "45660": 2.14091, "45665": 2.07837, "45670": 2.07759, "45675": 2.05917, "45680": 2.08817, "45685": 2.06506, "45690": 2.10639, "45695": 2.06872, "45700": 2.14014, "45705": 2.13165, "45710": 2.06994, "45715": 2.04071, "45720": 2.10402, "45725": 2.08456, "45730": 2.10631, "45735": 2.11498, "45740": 2.1135, "45745": 2.0725, "45750": 2.04821, "45755": 2.0728, "45760": 2.09382, "45765": 2.1193, "45770": 2.06153, "45775": 2.07712, "45780": 2.01562, "45785": 2.02137, "45790": 2.0728, "45795": 2.04182, "45800": 2.09321, "45805": 2.13551, "45810": 2.06082, "45815": 2.0637, "45820": 2.01311, "45825": 2.07476, "45830": 2.10589, "45835": 2.1372, "45840": 2.11473, "45845": 2.05743, "45850": 2.07999, "45855": 2.00049, "45860": 2.07475, "45865": 2.08849, "45870": 2.08221, "45875": 2.08266, "45880": 2.10759, "45885": 2.118, "45890": 2.07963, "45895": 2.0084, "45900": 2.12535, "45905": 2.18278, "45910": 2.04039, "45915": 2.04764, "45920": 2.08932, "45925": 2.0834, "45930": 2.07623, "45935": 2.01325, "45940": 2.10753, "45945": 2.12435, "45950": 2.16426, "45955": 2.08999, "45960": 2.04044, "45965": 2.09499, "45970": 2.06218, "45975": 2.07978, "45980": 2.09128, "45985": 2.08902, "45990": 2.10296, "45995": 2.03381, "46000": 2.04995, "46005": 2.05209, "46010": 2.04399, "46015": 2.01397, "46020": 2.13331, "46025": 2.04528, "46030": 2.02056, "46035": 2.07237, "46040": 2.11763, "46045": 2.10094, "46050": 2.10539, "46055": 2.07107, "46060": 2.109, "46065": 2.08427, "46070": 2.07755, "46075": 2.08614, "46080": 2.03169, "46085": 2.10995, "46090": 2.10595, "46095": 2.07468, "46100": 2.06423, "46105": 2.09662, "46110": 2.0822, "46115": 2.05153, "46120": 2.04888, "46125": 2.02437, "46130": 1.97106, "46135": 2.01291, "46140": 2.07971, "46145": 2.10458, "46150": 2.11327, "46155": 2.12618, "46160": 2.01279, "46165": 2.07619, "46170": 2.07166, "46175": 2.11504, "46180": 2.08715, "46185": 2.07529, "46190": 2.01489, "46195": 2.06252, "46200": 2.06993, "46205": 2.08138, "46210": 2.12408, "46215": 2.06183, "46220": 1.97557, "46225": 2.1126, "46230": 2.17007, "46235": 2.05733, "46240": 2.09492, "46245": 2.052, "46250": 2.12401, "46255": 2.06137, "46260": 2.07513, "46265": 2.00928, "46270": 2.07402, "46275": 2.11802, "46280": 2.0784, "46285": 2.07451, "46290": 2.09043, "46295": 2.0556, "46300": 2.1008, "46305": 2.07578, "46310": 2.15961, "46315": 2.00845, "46320": 2.10933, "46325": 2.09859, "46330": 2.06667, "46335": 2.1135, "46340": 2.05904, "46345": 2.02169, "46350": 2.07023, "46355": 2.12045, "46360": 2.10682, "46365": 2.05741, "46370": 2.04697, "46375": 2.06707, "46380": 2.05419, "46385": 2.09789, "46390": 2.07539, "46395": 2.07826, "46400": 2.09077, "46405": 2.1193, "46410": 2.14053, "46415": 2.10271, "46420": 2.08657, "46425": 2.11546, "46430": 2.0752, "46435": 2.103, "46440": 2.02251, "46445": 2.06612, "46450": 2.11949, "46455": 2.04801, "46460": 2.11451, "46465": 2.04794, "46470": 2.07143, "46475": 2.05491, "46480": 2.04125, "46485": 1.99657, "46490": 2.08549, "46495": 2.04679, "46500": 2.09298, "46505": 2.09896, "46510": 2.08628, "46515": 2.05586, "46520": 2.07239, "46525": 2.15981, "46530": 2.09505, "46535": 2.09852, "46540": 2.08009, "46545": 2.10304, "46550": 2.11649, "46555": 2.05054, "46560": 2.12333, "46565": 2.14722, "46570": 2.08764, "46575": 2.05858, "46580": 2.05716, "46585": 2.06853, "46590": 2.11856, "46595": 2.05974, "46600": 2.06332, "46605": 2.12322, "46610": 2.06795, "46615": 2.09989, "46620": 2.08116, "46625": 2.05777, "46630": 2.0459, "46635": 2.06853, "46640": 2.11397, "46645": 2.11187, "46650": 2.10993, "46655": 2.10451, "46660": 2.07506, "46665": 2.08831, "46670": 2.14237, "46675": 2.00086, "46680": 2.1081, "46685": 2.12161, "46690": 2.05489, "46695": 2.06771, "46700": 2.06524, "46705": 2.06671, "46710": 2.10205, "46715": 2.06007, "46720": 2.0438, "46725": 2.04293, "46730": 2.08769, "46735": 2.1037, "46740": 1.99185, "46745": 2.10189, "46750": 2.09231, "46755": 2.0936, "46760": 2.12368, "46765": 2.09739, "46770": 2.10228, "46775": 2.04579, "46780": 2.04766, "46785": 2.09265, "46790": 2.08361, "46795": 2.05025, "46800": 2.0494, "46805": 2.11082, "46810": 2.11535, "46815": 2.07899, "46820": 2.05841, "46825": 2.08779, "46830": 2.01627, "46835": 2.0643, "46840": 2.12981, "46845": 2.14038, "46850": 2.07425, "46855": 2.03758, "46860": 2.06504, "46865": 2.0159, "46870": 2.11804, "46875": 2.10314, "46880": 2.08777, "46885": 2.07811, "46890": 2.0829, "46895": 2.07245, "46900": 2.06428, "46905": 2.03661, "46910": 2.01816, "46915": 2.05972, "46920": 2.09748, "46925": 2.15784, "46930": 2.08819, "46935": 2.04716, "46940": 2.10577, "46945": 2.13065, "46950": 2.0682, "46955": 2.0247, "46960": 2.0804, "46965": 2.08725, "46970": 2.10646, "46975": 2.06993, "46980": 2.05095, "46985": 2.04977, "46990": 2.06195, "46995": 2.1043, "47000": 1.99644, "47005": 2.06216, "47010": 2.00799, "47015": 2.05693, "47020": 2.15794, "47025": 2.06826, "47030": 2.0676, "47035": 2.06074, "47040": 2.08534, "47045": 2.1175, "47050": 2.08926, "47055": 2.12009, "47060": 2.10907, "47065": 2.05725, "47070": 2.0369, "47075": 2.06131, "47080": 2.00795, "47085": 2.08959, "47090": 2.04809, "47095": 2.06893, "47100": 2.06445, "47105": 2.12251, "47110": 2.09119, "47115": 2.07867, "47120": 2.0858, "47125": 2.08924, "47130": 2.1421, "47135": 2.09666, "47140": 2.08099, "47145": 2.13928, "47150": 2.03818, "47155": 2.04347, "47160": 2.16744, "47165": 2.065, "47170": 2.11363, "47175": 2.08954, "47180": 2.09522, "47185": 2.09955, "47190": 2.07044, "47195": 2.10612, "47200": 2.04588, "47205": 2.15829, "47210": 2.10804, "47215": 2.11103, "47220": 2.13569, "47225": 2.03344, "47230": 2.07178, "47235": 2.00169, "47240": 2.09618, "47245": 2.06879, "47250": 2.07548, "47255": 2.11031, "47260": 2.11464, "47265": 2.05054, "47270": 2.04515, "47275": 2.0936, "47280": 2.07528, "47285": 2.0272, "47290": 2.02577, "47295": 2.11725, "47300": 2.10769, "47305": 2.19647, "47310": 2.04908, "47315": 2.06817, "47320": 2.09458, "47325": 2.12204, "47330": 2.09387, "47335": 2.02044, "47340": 2.0715, "47345": 2.07595, "47350": 2.06731, "47355": 2.05474, "47360": 2.09445, "47365": 2.09372, "47370": 2.05045, "47375": 2.06396, "47380": 2.10476, "47385": 2.10704, "47390": 1.98481, "47395": 2.08404, "47400": 2.12149, "47405": 2.04218, "47410": 2.09398, "47415": 2.08645, "47420": 2.07237, "47425": 2.12183, "47430": 2.12007, "47435": 2.10504, "47440": 2.03219, "47445": 2.01268, "47450": 2.0531, "47455": 2.07794, "47460": 2.02313, "47465": 2.09188, "47470": 2.06945, "47475": 2.10343, "47480": 2.06425, "47485": 2.04665, "47490": 2.00079, "47495": 2.01486, "47500": 2.0918, "47505": 2.08147, "47510": 2.11085, "47515": 2.05453, "47520": 2.1259, "47525": 2.05962, "47530": 2.02006, "47535": 2.05829, "47540": 2.11069, "47545": 2.09151, "47550": 2.10048, "47555": 2.12599, "47560": 2.05652, "47565": 2.07451, "47570": 2.02928, "47575": 2.09337, "47580": 2.1018, "47585": 2.06591, "47590": 2.09373, "47595": 2.12832, "47600": 2.09567, "47605": 2.08505, "47610": 2.0445, "47615": 2.03907, "47620": 2.11349, "47625": 2.02393, "47630": 2.07685, "47635": 2.01643, "47640": 2.03895, "47645": 2.0094, "47650": 2.05654, "47655": 2.07208, "47660": 2.05029, "47665": 2.05843, "47670": 2.06792, "47675": 2.07235, "47680": 2.06529, "47685": 2.0765, "47690": 2.03662, "47695": 2.03067, "47700": 2.06042, "47705": 2.0679, "47710": 2.02301, "47715": 2.11737, "47720": 2.05716, "47725": 2.04148, "47730": 2.08961, "47735": 2.06972, "47740": 2.07072, "47745": 2.08502, "47750": 2.05919, "47755": 2.0884, "47760": 2.06489, "47765": 2.08112, "47770": 2.10263, "47775": 2.04655, "47780": 2.05948, "47785": 2.06597, "47790": 2.09248, "47795": 2.05501, "47800": 2.07947, "47805": 2.06267, "47810": 2.06377, "47815": 2.08879, "47820": 2.12134, "47825": 2.05235, "47830": 2.08257, "47835": 2.16951, "47840": 2.00629, "47845": 2.119, "47850": 2.13195, "47855": 2.08236, "47860": 2.09082, "47865": 2.0241, "47870": 2.10958, "47875": 2.08298, "47880": 2.05648, "47885": 2.11094, "47890": 2.08481, "47895": 2.09409, "47900": 2.08505, "47905": 2.07935, "47910": 2.09712, "47915": 2.08956, "47920": 2.04222, "47925": 2.07585, "47930": 2.12186, "47935": 2.07361, "47940": 2.12155, "47945": 2.05675, "47950": 2.06151, "47955": 2.07005, "47960": 2.07857, "47965": 2.04916, "47970": 2.07737, "47975": 2.10929, "47980": 2.13612, "47985": 2.08404, "47990": 2.02335, "47995": 2.07163, "48000": 1.98965, "48005": 2.0539, "48010": 2.10973, "48015": 2.07378, "48020": 2.02956, "48025": 2.10981, "48030": 2.09186, "48035": 2.03302, "48040": 2.08293, "48045": 2.07405, "48050": 2.01321, "48055": 2.08742, "48060": 2.06836, "48065": 2.00888, "48070": 2.09018, "48075": 2.08477, "48080": 2.04417, "48085": 2.07842, "48090": 2.13382, "48095": 2.07653, "48100": 1.99055, "48105": 2.04608, "48110": 2.18205, "48115": 2.0692, "48120": 2.02776, "48125": 1.99494, "48130": 1.98289, "48135": 2.10344, "48140": 2.05661, "48145": 2.06603, "48150": 2.05014, "48155": 2.0444, "48160": 2.11202, "48165": 2.072, "48170": 2.08388, "48175": 2.05099, "48180": 2.12907, "48185": 2.06201, "48190": 2.0211, "48195": 2.02823, "48200": 2.07428, "48205": 2.04091, "48210": 2.06767, "48215": 2.13186, "48220": 2.08421, "48225": 2.03973, "48230": 2.07603, "48235": 2.01757, "48240": 2.04608, "48245": 2.03903, "48250": 2.07492, "48255": 2.08584, "48260": 2.02213, "48265": 2.08444, "48270": 2.07149, "48275": 2.03553, "48280": 2.10797, "48285": 2.028, "48290": 2.05667, "48295": 2.02485, "48300": 2.09246, "48305": 2.04665, "48310": 2.09391, "48315": 2.12166, "48320": 2.01143, "48325": 2.02797, "48330": 2.00712, "48335": 2.05516, "48340": 2.06251, "48345": 2.09635, "48350": 2.07101, "48355": 2.04937, "48360": 2.05403, "48365": 2.06517, "48370": 2.09165, "48375": 2.08203, "48380": 2.11988, "48385": 2.07837, "48390": 2.11466, "48395": 2.08063, "48400": 2.11091, "48405": 2.05401, "48410": 2.04835, "48415": 2.10048, "48420": 2.0084, "48425": 2.04357, "48430": 2.05857, "48435": 2.06936, "48440": 2.00719, "48445": 2.08338, "48450": 2.02155, "48455": 2.01039, "48460": 2.07619, "48465": 2.11259, "48470": 2.08535, "48475": 1.99104, "48480": 2.03918, "48485": 2.00944, "48490": 2.11779, "48495": 2.038, "48500": 2.03285, "48505": 2.09031, "48510": 2.07835, "48515": 2.06457, "48520": 2.07378, "48525": 2.09044, "48530": 2.07561, "48535": 1.96718, "48540": 2.07109, "48545": 2.04757, "48550": 2.07157, "48555": 2.09844, "48560": 2.10068, "48565": 2.02376, "48570": 2.10283, "48575": 2.08268, "48580": 1.97246, "48585": 2.07261, "48590": 2.10277, "48595": 2.08965, "48600": 2.07211, "48605": 2.06556, "48610": 2.11468, "48615": 2.10811, "48620": 2.07735, "48625": 2.08406, "48630": 2.00022, "48635": 2.07065, "48640": 2.07928, "48645": 2.07397, "48650": 2.0856, "48655": 2.07098, "48660": 2.07101, "48665": 2.0282, "48670": 2.0832, "48675": 2.11727, "48680": 2.09142, "48685": 2.10353, "48690": 2.08131, "48695": 2.07937, "48700": 2.06835, "48705": 2.09351, "48710": 2.05672, "48715": 1.99993, "48720": 2.07506, "48725": 2.08313, "48730": 2.14697, "48735": 2.05847, "48740": 2.11057, "48745": 2.04191, "48750": 2.02973, "48755": 2.03922, "48760": 2.0305, "48765": 2.11115, "48770": 2.04287, "48775": 2.05606, "48780": 2.03794, "48785": 2.0767, "48790": 2.06918, "48795": 2.04321, "48800": 2.08899, "48805": 2.07095, "48810": 2.06661, "48815": 2.05145, "48820": 2.05097, "48825": 2.00945, "48830": 2.0159, "48835": 2.03184, "48840": 2.09959, "48845": 2.09014, "48850": 2.13641, "48855": 1.99931, "48860": 2.06548, "48865": 2.05146, "48870": 2.12468, "48875": 2.02351, "48880": 2.02448, "48885": 2.0155, "48890": 2.0469, "48895": 2.06997, "48900": 2.04367, "48905": 2.06467, "48910": 2.14757, "48915": 2.10587, "48920": 1.99259, "48925": 2.10683, "48930": 2.09993, "48935": 2.03771, "48940": 2.06052, "48945": 2.07266, "48950": 2.08888, "48955": 2.04782, "48960": 2.02477, "48965": 2.05418, "48970": 2.13171, "48975": 2.11539, "48980": 2.01216, "48985": 2.09328, "48990": 2.0532, "48995": 2.02856, "49000": 2.06304, "49005": 2.03498, "49010": 2.06559, "49015": 2.09393, "49020": 2.05947, "49025": 2.08384, "49030": 2.05448, "49035": 2.07917, "49040": 2.03614, "49045": 2.0254, "49050": 2.06681, "49055": 2.09932, "49060": 2.11461, "49065": 2.00628, "49070": 2.12836, "49075": 2.05318, "49080": 2.10661, "49085": 2.0149, "49090": 2.06181, "49095": 2.1179, "49100": 2.0224, "49105": 2.04219, "49110": 2.04213, "49115": 2.1062, "49120": 2.0619, "49125": 2.06401, "49130": 2.08694, "49135": 2.07793, "49140": 2.11149, "49145": 2.08495, "49150": 2.11265, "49155": 2.0957, "49160": 2.07742, "49165": 2.10126, "49170": 2.06035, "49175": 2.02477, "49180": 2.12268, "49185": 2.13841, "49190": 2.15511, "49195": 2.11697, "49200": 2.09386, "49205": 2.07624, "49210": 2.06874, "49215": 2.08247, "49220": 2.0639, "49225": 2.03397, "49230": 2.01514, "49235": 2.0568, "49240": 2.10648, "49245": 2.02051, "49250": 2.00871, "49255": 2.05571, "49260": 2.08442, "49265": 2.07073, "49270": 2.07018, "49275": 2.03629, "49280": 2.08601, "49285": 2.03716, "49290": 2.04221, "49295": 2.10895, "49300": 2.09597, "49305": 2.05182, "49310": 2.04167, "49315": 2.10027, "49320": 1.9984, "49325": 2.03837, "49330": 2.02067, "49335": 2.06718, "49340": 2.07492, "49345": 2.08495, "49350": 2.08349, "49355": 1.9883, "49360": 2.03919, "49365": 2.10305, "49370": 2.04006, "49375": 2.03241, "49380": 2.09307, "49385": 1.99171, "49390": 2.0409, "49395": 2.01044, "49400": 2.03678, "49405": 2.11975, "49410": 2.04214, "49415": 2.01593, "49420": 2.07139, "49425": 2.04924, "49430": 2.03796, "49435": 2.07715, "49440": 2.0593, "49445": 2.11919, "49450": 2.00655, "49455": 2.07775, "49460": 2.09545, "49465": 2.05764, "49470": 2.05844, "49475": 2.11049, "49480": 2.06227, "49485": 2.03437, "49490": 2.02589, "49495": 2.04341, "49500": 2.06062, "49505": 2.00591, "49510": 2.11086, "49515": 2.0443, "49520": 2.01287, "49525": 2.04838, "49530": 2.07854, "49535": 1.99493, "49540": 2.00836, "49545": 2.11415, "49550": 2.12057, "49555": 2.07023, "49560": 2.10217, "49565": 2.08376, "49570": 2.10222, "49575": 2.0694, "49580": 2.00483, "49585": 2.05804, "49590": 2.00928, "49595": 2.12551, "49600": 2.0639, "49605": 2.09831, "49610": 2.04838, "49615": 2.04433, "49620": 2.08464, "49625": 2.03792, "49630": 2.0804, "49635": 2.06033, "49640": 2.0549, "49645": 2.09119, "49650": 2.02545, "49655": 2.05949, "49660": 2.00683, "49665": 2.03539, "49670": 1.9899, "49675": 2.07062, "49680": 2.0315, "49685": 2.07658, "49690": 2.08307, "49695": 2.0563, "49700": 2.11647, "49705": 2.15544, "49710": 2.11817, "49715": 2.13531, "49720": 1.99634, "49725": 2.06573, "49730": 2.06303, "49735": 2.12073, "49740": 2.05925, "49745": 2.06809, "49750": 2.05275, "49755": 2.06669, "49760": 2.08754, "49765": 2.00652, "49770": 2.12387, "49775": 2.05407, "49780": 2.12084, "49785": 2.04344, "49790": 2.03877, "49795": 2.08695, "49800": 2.05928, "49805": 2.01413, "49810": 2.04483, "49815": 2.07881, "49820": 2.05288, "49825": 2.06176, "49830": 2.08152, "49835": 2.09345, "49840": 2.11958, "49845": 2.1028, "49850": 2.04633, "49855": 2.05424, "49860": 2.13035, "49865": 2.08512, "49870": 2.10092, "49875": 2.07275, "49880": 2.05717, "49885": 2.04849, "49890": 2.08638, "49895": 2.04743, "49900": 2.1194, "49905": 1.97341, "49910": 2.16425, "49915": 2.02121, "49920": 1.98269, "49925": 2.16712, "49930": 2.07225, "49935": 1.99127, "49940": 2.04442, "49945": 2.10877, "49950": 2.06196, "49955": 2.08741, "49960": 2.04024, "49965": 2.0908, "49970": 2.03214, "49975": 2.08168, "49980": 2.10535, "49985": 2.0803, "49990": 2.04154, "49995": 2.00735, "50000": 2.06252, "50005": 2.07555, "50010": 2.06359, "50015": 2.06141, "50020": 2.07158, "50025": 2.04531, "50030": 2.04556, "50035": 2.02607, "50040": 2.05603, "50045": 2.07278, "50050": 2.04475, "50055": 2.00739, "50060": 2.1075, "50065": 2.08834, "50070": 2.10931, "50075": 2.0797, "50080": 2.0514, "50085": 2.07759, "50090": 2.04952, "50095": 2.14057, "50100": 2.05748, "50105": 2.07966, "50110": 2.09919, "50115": 2.06144, "50120": 2.02325, "50125": 2.07857, "50130": 1.99654, "50135": 2.10342, "50140": 2.07332, "50145": 2.10793, "50150": 1.95472, "50155": 2.01752, "50160": 2.08347, "50165": 2.08151, "50170": 2.00205, "50175": 2.01699, "50180": 2.09776, "50185": 2.03742, "50190": 2.04216, "50195": 2.03144, "50200": 2.0912, "50205": 2.04497, "50210": 2.03872, "50215": 2.04564, "50220": 2.05643, "50225": 2.08899, "50230": 2.00745, "50235": 2.1448, "50240": 1.98532, "50245": 2.10014, "50250": 1.98645, "50255": 2.03989, "50260": 2.12091, "50265": 2.03442, "50270": 2.14937, "50275": 2.06946, "50280": 2.11965, "50285": 2.00844, "50290": 2.00415, "50295": 2.04246, "50300": 2.0855, "50305": 2.07594, "50310": 2.031, "50315": 2.03553, "50320": 1.95007, "50325": 2.12666, "50330": 2.04751, "50335": 2.02645, "50340": 2.11556, "50345": 2.03411, "50350": 2.13395, "50355": 2.04576, "50360": 2.04195, "50365": 2.04875, "50370": 2.12035, "50375": 2.09594, "50380": 2.0705, "50385": 2.07435, "50390": 1.99775, "50395": 2.07063, "50400": 2.03295, "50405": 2.09595, "50410": 2.07553, "50415": 2.085, "50420": 2.04963, "50425": 2.08586, "50430": 2.10836, "50435": 2.06119, "50440": 1.98643, "50445": 2.04463, "50450": 2.0392, "50455": 2.00836, "50460": 2.01837, "50465": 2.09538, "50470": 2.11139, "50475": 2.11523, "50480": 2.0255, "50485": 2.03127, "50490": 2.00564, "50495": 2.0656, "50500": 2.06284, "50505": 2.02036, "50510": 1.98355, "50515": 2.07531, "50520": 2.00401, "50525": 2.10752, "50530": 2.13609, "50535": 2.08608, "50540": 2.08572, "50545": 1.9854, "50550": 2.05342, "50555": 2.09292, "50560": 2.05797, "50565": 2.04826, "50570": 2.05458, "50575": 2.01623, "50580": 2.06472, "50585": 2.06944, "50590": 2.04677, "50595": 2.06797, "50600": 2.02577, "50605": 2.03008, "50610": 1.96034, "50615": 2.03001, "50620": 2.08549, "50625": 2.00163, "50630": 1.99103, "50635": 2.02589, "50640": 2.06818, "50645": 2.07716, "50650": 2.01244, "50655": 2.14079, "50660": 2.02737, "50665": 2.11889, "50670": 2.01729, "50675": 1.98616, "50680": 2.02447, "50685": 2.06194, "50690": 1.99923, "50695": 2.03303, "50700": 2.06288, "50705": 2.0644, "50710": 2.09727, "50715": 2.08877, "50720": 2.08664, "50725": 2.02143, "50730": 2.04866, "50735": 2.09704, "50740": 2.01746, "50745": 2.11809, "50750": 2.00362, "50755": 2.04548, "50760": 2.09141, "50765": 2.05645, "50770": 2.07069, "50775": 2.04516, "50780": 2.03081, "50785": 2.08476, "50790": 2.00582, "50795": 2.01245, "50800": 2.09904, "50805": 2.08013, "50810": 2.04434, "50815": 2.02625, "50820": 1.97498, "50825": 2.05306, "50830": 2.08538, "50835": 2.02496, "50840": 2.0499, "50845": 1.98684, "50850": 2.08978, "50855": 2.07146, "50860": 2.03334}}, "num-zeros": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 40024.0, "5": 40010.0, "10": 41481.0, "15": 37259.0, "20": 40804.0, "25": 40126.0, "30": 40729.0, "35": 39351.0, "40": 41495.0, "45": 42268.0, "50": 38428.0, "55": 39350.0, "60": 42253.0, "65": 39972.0, "70": 40719.0, "75": 41485.0, "80": 40874.0, "85": 40821.0, "90": 40712.0, "95": 40720.0, "100": 41568.0, "105": 40786.0, "110": 38717.0, "115": 41565.0, "120": 39269.0, "125": 40732.0, "130": 40724.0, "135": 37890.0, "140": 38700.0, "145": 40960.0, "150": 40036.0, "155": 40791.0, "160": 40103.0, "165": 38581.0, "170": 37354.0, "175": 40040.0, "180": 40104.0, "185": 38581.0, "190": 39321.0, "195": 41486.0, "200": 41565.0, "205": 40084.0, "210": 40795.0, "215": 40016.0, "220": 40797.0, "225": 39403.0, "230": 41483.0, "235": 37969.0, "240": 39340.0, "245": 38636.0, "250": 40036.0, "255": 41485.0, "260": 39257.0, "265": 41561.0, "270": 40726.0, "275": 40786.0, "280": 41497.0, "285": 41550.0, "290": 40796.0, "295": 38605.0, "300": 38750.0, "305": 41567.0, "310": 39264.0, "315": 40111.0, "320": 39330.0, "325": 40797.0, "330": 40793.0, "335": 40171.0, "340": 37981.0, "345": 39953.0, "350": 40716.0, "355": 39953.0, "360": 41569.0, "365": 37983.0, "370": 40044.0, "375": 41494.0, "380": 38710.0, "385": 37823.0, "390": 39952.0, "395": 40789.0, "400": 39256.0, "405": 40710.0, "410": 39471.0, "415": 40723.0, "420": 41490.0, "425": 39422.0, "430": 39426.0, "435": 42254.0, "440": 35823.0, "445": 40047.0, "450": 41491.0, "455": 39958.0, "460": 38580.0, "465": 39332.0, "470": 40798.0, "475": 40787.0, "480": 40804.0, "485": 39195.0, "490": 38731.0, "495": 40786.0, "500": 40716.0, "505": 41498.0, "510": 37748.0, "515": 40717.0, "520": 40018.0, "525": 39274.0, "530": 38823.0, "535": 40793.0, "540": 40022.0, "545": 41569.0, "550": 40798.0, "555": 40724.0, "560": 39953.0, "565": 40717.0, "570": 40166.0, "575": 39338.0, "580": 40786.0, "585": 40711.0, "590": 40028.0, "595": 40016.0, "600": 40047.0, "605": 40767.0, "610": 40891.0, "615": 39941.0, "620": 41554.0, "625": 40727.0, "630": 42248.0, "635": 39970.0, "640": 37053.0, "645": 40110.0, "650": 39268.0, "655": 39202.0, "660": 37320.0, "665": 40099.0, "670": 40799.0, "675": 39273.0, "680": 39947.0, "685": 39248.0, "690": 42260.0, "695": 38591.0, "700": 40039.0, "705": 38573.0, "710": 42250.0, "715": 42249.0, "720": 41495.0, "725": 40786.0, "730": 38651.0, "735": 40789.0, "740": 41565.0, "745": 41566.0, "750": 40056.0, "755": 38713.0, "760": 38568.0, "765": 39954.0, "770": 40710.0, "775": 40727.0, "780": 39329.0, "785": 41577.0, "790": 42316.0, "795": 41551.0, "800": 40808.0, "805": 37057.0, "810": 39324.0, "815": 40723.0, "820": 43017.0, "825": 40027.0, "830": 39953.0, "835": 41479.0, "840": 38557.0, "845": 41561.0, "850": 38057.0, "855": 36501.0, "860": 41487.0, "865": 39963.0, "870": 35802.0, "875": 38574.0, "880": 39976.0, "885": 40036.0, "890": 39395.0, "895": 40717.0, "900": 40014.0, "905": 40024.0, "910": 40021.0, "915": 39267.0, "920": 40069.0, "925": 40121.0, "930": 40081.0, "935": 40723.0, "940": 40792.0, "945": 39956.0, "950": 37718.0, "955": 40108.0, "960": 40782.0, "965": 40121.0, "970": 40719.0, "975": 39950.0, "980": 40805.0, "985": 30963.0, "990": 39335.0, "995": 42259.0, "1000": 41533.0, "1005": 41494.0, "1010": 41657.0, "1015": 40047.0, "1020": 39273.0, "1025": 40034.0, "1030": 40190.0, "1035": 39425.0, "1040": 40104.0, "1045": 40807.0, "1050": 41493.0, "1055": 37943.0, "1060": 42250.0, "1065": 37148.0, "1070": 39267.0, "1075": 43018.0, "1080": 39322.0, "1085": 40034.0, "1090": 39970.0, "1095": 38493.0, "1100": 38637.0, "1105": 39347.0, "1110": 42338.0, "1115": 40764.0, "1120": 40723.0, "1125": 40729.0, "1130": 38657.0, "1135": 40126.0, "1140": 40125.0, "1145": 39338.0, "1150": 41551.0, "1155": 40720.0, "1160": 41485.0, "1165": 40038.0, "1170": 39282.0, "1175": 40032.0, "1180": 40728.0, "1185": 39285.0, "1190": 39261.0, "1195": 40722.0, "1200": 41556.0, "1205": 38591.0, "1210": 41559.0, "1215": 39319.0, "1220": 41575.0, "1225": 40126.0, "1230": 39358.0, "1235": 41493.0, "1240": 40712.0, "1245": 41482.0, "1250": 39408.0, "1255": 38691.0, "1260": 41490.0, "1265": 41499.0, "1270": 40725.0, "1275": 38454.0, "1280": 40108.0, "1285": 40728.0, "1290": 40026.0, "1295": 40789.0, "1300": 40731.0, "1305": 41503.0, "1310": 41493.0, "1315": 40802.0, "1320": 40732.0, "1325": 40727.0, "1330": 40807.0, "1335": 38660.0, "1340": 40796.0, "1345": 39973.0, "1350": 40797.0, "1355": 40106.0, "1360": 42256.0, "1365": 39421.0, "1370": 39342.0, "1375": 40800.0, "1380": 38631.0, "1385": 38606.0, "1390": 40037.0, "1395": 40801.0, "1400": 40806.0, "1405": 40813.0, "1410": 37810.0, "1415": 40019.0, "1420": 41493.0, "1425": 40787.0, "1430": 39963.0, "1435": 35918.0, "1440": 38578.0, "1445": 39346.0, "1450": 40102.0, "1455": 41548.0, "1460": 40807.0, "1465": 40734.0, "1470": 37194.0, "1475": 40727.0, "1480": 37366.0, "1485": 37829.0, "1490": 38508.0, "1495": 38594.0, "1500": 41483.0, "1505": 40037.0, "1510": 40723.0, "1515": 40806.0, "1520": 40803.0, "1525": 40908.0, "1530": 39277.0, "1535": 39343.0, "1540": 37882.0, "1545": 39979.0, "1550": 40889.0, "1555": 40797.0, "1560": 40797.0, "1565": 37882.0, "1570": 39961.0, "1575": 40037.0, "1580": 38651.0, "1585": 39276.0, "1590": 39346.0, "1595": 37963.0, "1600": 40800.0, "1605": 39337.0, "1610": 40721.0, "1615": 37873.0, "1620": 39283.0, "1625": 41587.0, "1630": 39964.0, "1635": 40812.0, "1640": 41498.0, "1645": 40054.0, "1650": 39350.0, "1655": 41503.0, "1660": 40875.0, "1665": 41568.0, "1670": 40802.0, "1675": 40804.0, "1680": 40895.0, "1685": 39962.0, "1690": 40034.0, "1695": 39334.0, "1700": 40801.0, "1705": 41618.0, "1710": 41492.0, "1715": 39348.0, "1720": 41554.0, "1725": 40053.0, "1730": 39335.0, "1735": 38673.0, "1740": 41484.0, "1745": 37743.0, "1750": 39286.0, "1755": 39348.0, "1760": 39261.0, "1765": 40781.0, "1770": 40800.0, "1775": 39419.0, "1780": 41496.0, "1785": 41565.0, "1790": 40718.0, "1795": 41487.0, "1800": 39449.0, "1805": 40724.0, "1810": 40044.0, "1815": 37735.0, "1820": 40731.0, "1825": 39397.0, "1830": 41499.0, "1835": 39461.0, "1840": 40802.0, "1845": 40805.0, "1850": 40805.0, "1855": 40808.0, "1860": 40724.0, "1865": 40044.0, "1870": 41643.0, "1875": 40733.0, "1880": 39326.0, "1885": 39203.0, "1890": 41573.0, "1895": 40726.0, "1900": 37807.0, "1905": 41499.0, "1910": 41491.0, "1915": 40954.0, "1920": 41562.0, "1925": 39251.0, "1930": 41494.0, "1935": 40809.0, "1940": 40066.0, "1945": 39442.0, "1950": 41493.0, "1955": 41559.0, "1960": 40811.0, "1965": 40110.0, "1970": 39970.0, "1975": 42254.0, "1980": 36541.0, "1985": 40116.0, "1990": 41560.0, "1995": 40040.0, "2000": 41500.0, "2005": 41565.0, "2010": 37812.0, "2015": 38745.0, "2020": 40733.0, "2025": 38727.0, "2030": 38059.0, "2035": 40759.0, "2040": 41580.0, "2045": 40179.0, "2050": 39495.0, "2055": 41567.0, "2060": 39416.0, "2065": 42257.0, "2070": 41490.0, "2075": 39280.0, "2080": 42250.0, "2085": 39374.0, "2090": 40809.0, "2095": 41551.0, "2100": 41577.0, "2105": 41503.0, "2110": 40811.0, "2115": 40724.0, "2120": 41491.0, "2125": 40035.0, "2130": 39347.0, "2135": 39433.0, "2140": 41488.0, "2145": 38581.0, "2150": 40037.0, "2155": 41495.0, "2160": 41574.0, "2165": 40058.0, "2170": 40804.0, "2175": 39347.0, "2180": 40038.0, "2185": 41501.0, "2190": 40884.0, "2195": 38804.0, "2200": 40802.0, "2205": 39958.0, "2210": 39413.0, "2215": 40801.0, "2220": 39267.0, "2225": 40105.0, "2230": 40801.0, "2235": 37269.0, "2240": 40032.0, "2245": 40728.0, "2250": 42259.0, "2255": 40741.0, "2260": 41568.0, "2265": 40813.0, "2270": 40048.0, "2275": 40727.0, "2280": 39197.0, "2285": 40879.0, "2290": 39436.0, "2295": 37898.0, "2300": 41587.0, "2305": 40722.0, "2310": 40060.0, "2315": 38510.0, "2320": 40107.0, "2325": 41496.0, "2330": 40734.0, "2335": 37187.0, "2340": 40732.0, "2345": 40196.0, "2350": 40121.0, "2355": 40900.0, "2360": 39277.0, "2365": 40729.0, "2370": 40732.0, "2375": 38574.0, "2380": 40040.0, "2385": 40812.0, "2390": 40096.0, "2395": 40734.0, "2400": 41567.0, "2405": 40084.0, "2410": 40780.0, "2415": 39198.0, "2420": 39978.0, "2425": 37805.0, "2430": 39253.0, "2435": 41499.0, "2440": 40036.0, "2445": 39379.0, "2450": 40810.0, "2455": 42271.0, "2460": 40113.0, "2465": 39508.0, "2470": 37829.0, "2475": 40049.0, "2480": 40026.0, "2485": 39349.0, "2490": 40105.0, "2495": 41493.0, "2500": 39209.0, "2505": 40727.0, "2510": 40736.0, "2515": 38663.0, "2520": 40797.0, "2525": 41579.0, "2530": 40732.0, "2535": 39979.0, "2540": 39203.0, "2545": 37996.0, "2550": 40802.0, "2555": 41494.0, "2560": 39455.0, "2565": 39202.0, "2570": 39351.0, "2575": 40725.0, "2580": 40735.0, "2585": 40857.0, "2590": 40122.0, "2595": 37974.0, "2600": 39959.0, "2605": 38772.0, "2610": 41561.0, "2615": 40100.0, "2620": 41499.0, "2625": 39276.0, "2630": 40037.0, "2635": 39521.0, "2640": 40800.0, "2645": 40042.0, "2650": 40048.0, "2655": 39180.0, "2660": 37262.0, "2665": 40890.0, "2670": 40884.0, "2675": 38440.0, "2680": 40038.0, "2685": 41558.0, "2690": 40043.0, "2695": 40745.0, "2700": 39276.0, "2705": 41576.0, "2710": 40810.0, "2715": 38499.0, "2720": 40043.0, "2725": 38667.0, "2730": 40917.0, "2735": 40036.0, "2740": 38589.0, "2745": 40046.0, "2750": 40793.0, "2755": 38522.0, "2760": 38500.0, "2765": 39260.0, "2770": 41574.0, "2775": 39357.0, "2780": 40750.0, "2785": 40739.0, "2790": 39290.0, "2795": 40020.0, "2800": 39423.0, "2805": 40812.0, "2810": 41555.0, "2815": 42266.0, "2820": 40735.0, "2825": 40052.0, "2830": 40810.0, "2835": 41564.0, "2840": 40799.0, "2845": 38592.0, "2850": 42256.0, "2855": 40795.0, "2860": 39364.0, "2865": 39346.0, "2870": 40189.0, "2875": 42264.0, "2880": 40792.0, "2885": 39976.0, "2890": 38588.0, "2895": 40732.0, "2900": 39338.0, "2905": 39976.0, "2910": 40731.0, "2915": 40036.0, "2920": 37885.0, "2925": 40805.0, "2930": 36517.0, "2935": 41579.0, "2940": 40817.0, "2945": 38892.0, "2950": 39408.0, "2955": 38050.0, "2960": 41491.0, "2965": 39199.0, "2970": 40818.0, "2975": 38799.0, "2980": 41495.0, "2985": 39253.0, "2990": 42327.0, "2995": 40799.0, "3000": 37945.0, "3005": 42264.0, "3010": 38741.0, "3015": 40804.0, "3020": 41500.0, "3025": 39342.0, "3030": 38449.0, "3035": 40034.0, "3040": 41577.0, "3045": 39330.0, "3050": 41494.0, "3055": 40729.0, "3060": 38508.0, "3065": 39267.0, "3070": 40798.0, "3075": 37934.0, "3080": 39212.0, "3085": 41580.0, "3090": 39272.0, "3095": 39367.0, "3100": 38592.0, "3105": 39980.0, "3110": 37133.0, "3115": 38465.0, "3120": 41661.0, "3125": 40087.0, "3130": 39299.0, "3135": 39256.0, "3140": 39996.0, "3145": 40801.0, "3150": 41512.0, "3155": 38586.0, "3160": 40889.0, "3165": 41488.0, "3170": 39255.0, "3175": 37443.0, "3180": 39457.0, "3185": 42265.0, "3190": 37957.0, "3195": 39263.0, "3200": 38677.0, "3205": 40038.0, "3210": 39975.0, "3215": 38590.0, "3220": 39957.0, "3225": 40126.0, "3230": 40107.0, "3235": 42349.0, "3240": 40031.0, "3245": 40740.0, "3250": 41495.0, "3255": 42262.0, "3260": 40096.0, "3265": 42257.0, "3270": 40794.0, "3275": 41587.0, "3280": 40165.0, "3285": 39283.0, "3290": 38639.0, "3295": 40722.0, "3300": 40181.0, "3305": 42266.0, "3310": 42352.0, "3315": 41496.0, "3320": 41501.0, "3325": 39281.0, "3330": 40800.0, "3335": 40129.0, "3340": 40816.0, "3345": 40823.0, "3350": 40810.0, "3355": 41595.0, "3360": 41506.0, "3365": 40740.0, "3370": 40024.0, "3375": 40726.0, "3380": 40018.0, "3385": 39343.0, "3390": 40025.0, "3395": 39972.0, "3400": 38082.0, "3405": 38667.0, "3410": 39972.0, "3415": 40047.0, "3420": 40801.0, "3425": 40815.0, "3430": 40734.0, "3435": 40042.0, "3440": 41568.0, "3445": 38003.0, "3450": 37886.0, "3455": 40817.0, "3460": 42266.0, "3465": 39343.0, "3470": 38561.0, "3475": 40211.0, "3480": 40818.0, "3485": 38815.0, "3490": 40048.0, "3495": 40052.0, "3500": 40037.0, "3505": 40873.0, "3510": 40818.0, "3515": 40042.0, "3520": 39268.0, "3525": 41577.0, "3530": 40720.0, "3535": 41573.0, "3540": 40746.0, "3545": 40807.0, "3550": 41564.0, "3555": 40114.0, "3560": 40760.0, "3565": 40802.0, "3570": 40822.0, "3575": 40054.0, "3580": 40049.0, "3585": 41581.0, "3590": 41505.0, "3595": 39335.0, "3600": 39348.0, "3605": 39276.0, "3610": 40032.0, "3615": 42266.0, "3620": 38681.0, "3625": 39965.0, "3630": 40812.0, "3635": 40106.0, "3640": 39204.0, "3645": 39333.0, "3650": 38652.0, "3655": 40203.0, "3660": 42337.0, "3665": 41494.0, "3670": 37361.0, "3675": 39412.0, "3680": 37316.0, "3685": 39347.0, "3690": 40781.0, "3695": 42342.0, "3700": 38173.0, "3705": 39488.0, "3710": 39273.0, "3715": 38524.0, "3720": 40804.0, "3725": 37253.0, "3730": 37841.0, "3735": 40806.0, "3740": 40792.0, "3745": 38567.0, "3750": 39393.0, "3755": 39329.0, "3760": 39367.0, "3765": 39346.0, "3770": 42336.0, "3775": 40816.0, "3780": 42262.0, "3785": 37895.0, "3790": 39340.0, "3795": 40029.0, "3800": 39509.0, "3805": 37152.0, "3810": 39406.0, "3815": 40730.0, "3820": 40136.0, "3825": 42267.0, "3830": 40044.0, "3835": 40796.0, "3840": 40035.0, "3845": 39351.0, "3850": 39430.0, "3855": 39364.0, "3860": 40051.0, "3865": 41487.0, "3870": 37754.0, "3875": 39971.0, "3880": 40038.0, "3885": 41491.0, "3890": 39960.0, "3895": 40739.0, "3900": 39278.0, "3905": 38584.0, "3910": 40044.0, "3915": 40788.0, "3920": 36535.0, "3925": 39971.0, "3930": 39258.0, "3935": 39348.0, "3940": 39427.0, "3945": 41506.0, "3950": 39985.0, "3955": 39399.0, "3960": 38674.0, "3965": 39978.0, "3970": 39281.0, "3975": 39965.0, "3980": 40829.0, "3985": 38587.0, "3990": 39205.0, "3995": 39416.0, "4000": 40054.0, "4005": 40046.0, "4010": 35697.0, "4015": 41565.0, "4020": 39299.0, "4025": 40200.0, "4030": 37038.0, "4035": 41510.0, "4040": 34448.0, "4045": 40814.0, "4050": 37750.0, "4055": 39425.0, "4060": 37261.0, "4065": 40729.0, "4070": 38513.0, "4075": 40044.0, "4080": 40878.0, "4085": 41500.0, "4090": 40738.0, "4095": 41498.0, "4100": 37879.0, "4105": 41499.0, "4110": 41494.0, "4115": 40205.0, "4120": 40804.0, "4125": 38753.0, "4130": 40732.0, "4135": 39356.0, "4140": 39356.0, "4145": 40039.0, "4150": 39370.0, "4155": 39407.0, "4160": 40151.0, "4165": 39358.0, "4170": 40753.0, "4175": 39278.0, "4180": 40799.0, "4185": 40794.0, "4190": 39285.0, "4195": 40795.0, "4200": 41494.0, "4205": 40735.0, "4210": 40743.0, "4215": 40039.0, "4220": 38653.0, "4225": 40060.0, "4230": 38816.0, "4235": 39368.0, "4240": 40808.0, "4245": 40740.0, "4250": 40123.0, "4255": 39355.0, "4260": 38672.0, "4265": 40735.0, "4270": 40800.0, "4275": 40095.0, "4280": 40034.0, "4285": 41492.0, "4290": 40747.0, "4295": 37113.0, "4300": 39345.0, "4305": 40032.0, "4310": 40106.0, "4315": 40108.0, "4320": 41583.0, "4325": 40063.0, "4330": 38714.0, "4335": 42257.0, "4340": 40739.0, "4345": 40808.0, "4350": 37946.0, "4355": 41509.0, "4360": 40188.0, "4365": 40807.0, "4370": 37195.0, "4375": 40030.0, "4380": 38512.0, "4385": 40099.0, "4390": 40747.0, "4395": 39972.0, "4400": 39959.0, "4405": 40206.0, "4410": 41569.0, "4415": 41493.0, "4420": 40039.0, "4425": 40214.0, "4430": 40730.0, "4435": 37970.0, "4440": 39978.0, "4445": 39443.0, "4450": 39969.0, "4455": 40740.0, "4460": 40800.0, "4465": 41508.0, "4470": 41499.0, "4475": 39207.0, "4480": 41584.0, "4485": 40126.0, "4490": 42266.0, "4495": 39961.0, "4500": 40021.0, "4505": 40802.0, "4510": 40806.0, "4515": 37888.0, "4520": 39257.0, "4525": 40735.0, "4530": 39968.0, "4535": 39354.0, "4540": 39433.0, "4545": 38772.0, "4550": 40037.0, "4555": 40810.0, "4560": 39371.0, "4565": 40124.0, "4570": 41655.0, "4575": 40050.0, "4580": 37834.0, "4585": 39419.0, "4590": 37286.0, "4595": 39347.0, "4600": 40050.0, "4605": 40743.0, "4610": 40810.0, "4615": 40736.0, "4620": 40740.0, "4625": 41497.0, "4630": 40058.0, "4635": 40738.0, "4640": 41509.0, "4645": 41502.0, "4650": 40804.0, "4655": 39969.0, "4660": 40875.0, "4665": 41504.0, "4670": 39279.0, "4675": 40037.0, "4680": 41576.0, "4685": 38061.0, "4690": 40738.0, "4695": 39269.0, "4700": 39957.0, "4705": 40831.0, "4710": 42256.0, "4715": 41574.0, "4720": 40889.0, "4725": 40831.0, "4730": 38520.0, "4735": 40060.0, "4740": 37832.0, "4745": 39419.0, "4750": 40065.0, "4755": 39208.0, "4760": 40111.0, "4765": 41499.0, "4770": 40121.0, "4775": 36691.0, "4780": 39276.0, "4785": 40048.0, "4790": 42332.0, "4795": 39280.0, "4800": 42269.0, "4805": 39462.0, "4810": 40037.0, "4815": 42264.0, "4820": 39351.0, "4825": 37856.0, "4830": 40111.0, "4835": 41500.0, "4840": 40046.0, "4845": 41584.0, "4850": 38637.0, "4855": 40190.0, "4860": 38663.0, "4865": 40029.0, "4870": 41579.0, "4875": 40721.0, "4880": 41577.0, "4885": 40729.0, "4890": 40727.0, "4895": 40779.0, "4900": 40035.0, "4905": 40822.0, "4910": 40056.0, "4915": 40808.0, "4920": 40059.0, "4925": 40041.0, "4930": 39978.0, "4935": 40808.0, "4940": 39990.0, "4945": 39447.0, "4950": 40033.0, "4955": 39974.0, "4960": 38581.0, "4965": 38674.0, "4970": 39350.0, "4975": 39962.0, "4980": 37932.0, "4985": 37762.0, "4990": 39345.0, "4995": 40033.0, "5000": 38617.0, "5005": 40050.0, "5010": 38671.0, "5015": 40732.0, "5020": 39208.0, "5025": 41490.0, "5030": 36565.0, "5035": 37978.0, "5040": 40748.0, "5045": 39217.0, "5050": 39288.0, "5055": 40000.0, "5060": 40805.0, "5065": 38589.0, "5070": 42261.0, "5075": 40057.0, "5080": 41503.0, "5085": 39291.0, "5090": 40804.0, "5095": 40881.0, "5100": 39353.0, "5105": 40069.0, "5110": 40804.0, "5115": 38564.0, "5120": 40111.0, "5125": 41504.0, "5130": 40135.0, "5135": 41508.0, "5140": 41585.0, "5145": 38045.0, "5150": 39337.0, "5155": 40045.0, "5160": 41572.0, "5165": 39960.0, "5170": 40747.0, "5175": 38766.0, "5180": 41495.0, "5185": 40818.0, "5190": 38653.0, "5195": 38617.0, "5200": 40123.0, "5205": 42264.0, "5210": 39269.0, "5215": 37200.0, "5220": 40036.0, "5225": 41570.0, "5230": 40183.0, "5235": 40047.0, "5240": 38668.0, "5245": 40819.0, "5250": 38538.0, "5255": 40046.0, "5260": 40805.0, "5265": 39280.0, "5270": 41496.0, "5275": 41508.0, "5280": 39289.0, "5285": 40048.0, "5290": 38727.0, "5295": 39338.0, "5300": 39435.0, "5305": 39273.0, "5310": 41573.0, "5315": 39222.0, "5320": 40905.0, "5325": 40815.0, "5330": 39347.0, "5335": 40133.0, "5340": 42269.0, "5345": 39975.0, "5350": 40804.0, "5355": 40135.0, "5360": 37339.0, "5365": 37914.0, "5370": 40814.0, "5375": 40032.0, "5380": 39334.0, "5385": 39338.0, "5390": 38025.0, "5395": 39436.0, "5400": 38710.0, "5405": 39367.0, "5410": 40047.0, "5415": 40747.0, "5420": 39975.0, "5425": 40110.0, "5430": 40813.0, "5435": 42263.0, "5440": 39291.0, "5445": 38579.0, "5450": 40046.0, "5455": 40155.0, "5460": 41494.0, "5465": 40892.0, "5470": 39419.0, "5475": 39471.0, "5480": 40030.0, "5485": 41498.0, "5490": 39397.0, "5495": 40816.0, "5500": 40902.0, "5505": 37764.0, "5510": 42259.0, "5515": 39289.0, "5520": 40734.0, "5525": 40800.0, "5530": 39271.0, "5535": 40756.0, "5540": 40132.0, "5545": 41582.0, "5550": 38748.0, "5555": 39982.0, "5560": 40180.0, "5565": 42346.0, "5570": 41499.0, "5575": 39448.0, "5580": 39330.0, "5585": 40732.0, "5590": 38590.0, "5595": 40060.0, "5600": 39345.0, "5605": 41573.0, "5610": 40801.0, "5615": 40782.0, "5620": 39451.0, "5625": 39411.0, "5630": 40100.0, "5635": 39338.0, "5640": 40805.0, "5645": 40742.0, "5650": 38583.0, "5655": 39267.0, "5660": 38736.0, "5665": 40823.0, "5670": 41589.0, "5675": 40819.0, "5680": 39341.0, "5685": 40744.0, "5690": 39969.0, "5695": 40814.0, "5700": 39346.0, "5705": 40039.0, "5710": 39346.0, "5715": 40806.0, "5720": 40898.0, "5725": 39961.0, "5730": 39977.0, "5735": 39966.0, "5740": 40050.0, "5745": 40792.0, "5750": 37219.0, "5755": 41493.0, "5760": 40806.0, "5765": 37154.0, "5770": 40742.0, "5775": 40033.0, "5780": 38669.0, "5785": 42334.0, "5790": 38013.0, "5795": 40864.0, "5800": 40810.0, "5805": 39969.0, "5810": 43024.0, "5815": 39271.0, "5820": 40732.0, "5825": 35612.0, "5830": 39428.0, "5835": 40043.0, "5840": 40046.0, "5845": 40819.0, "5850": 41502.0, "5855": 42257.0, "5860": 38530.0, "5865": 39379.0, "5870": 39408.0, "5875": 40038.0, "5880": 39349.0, "5885": 40801.0, "5890": 37910.0, "5895": 40730.0, "5900": 40046.0, "5905": 38679.0, "5910": 37964.0, "5915": 37855.0, "5920": 40805.0, "5925": 37941.0, "5930": 40857.0, "5935": 39290.0, "5940": 40100.0, "5945": 39329.0, "5950": 40133.0, "5955": 39352.0, "5960": 40051.0, "5965": 38733.0, "5970": 38429.0, "5975": 36496.0, "5980": 40734.0, "5985": 40053.0, "5990": 40805.0, "5995": 38595.0, "6000": 38753.0, "6005": 40051.0, "6010": 39963.0, "6015": 40052.0, "6020": 41573.0, "6025": 40049.0, "6030": 39357.0, "6035": 41587.0, "6040": 38598.0, "6045": 39972.0, "6050": 39964.0, "6055": 41570.0, "6060": 40115.0, "6065": 41574.0, "6070": 38749.0, "6075": 41507.0, "6080": 38009.0, "6085": 35977.0, "6090": 40743.0, "6095": 41502.0, "6100": 40759.0, "6105": 35958.0, "6110": 40106.0, "6115": 41497.0, "6120": 40133.0, "6125": 37248.0, "6130": 39277.0, "6135": 39264.0, "6140": 41570.0, "6145": 40079.0, "6150": 39291.0, "6155": 41504.0, "6160": 39960.0, "6165": 40731.0, "6170": 37286.0, "6175": 40807.0, "6180": 37832.0, "6185": 40812.0, "6190": 41502.0, "6195": 38587.0, "6200": 41498.0, "6205": 41573.0, "6210": 38722.0, "6215": 41500.0, "6220": 39985.0, "6225": 40749.0, "6230": 37898.0, "6235": 40821.0, "6240": 40740.0, "6245": 40198.0, "6250": 41500.0, "6255": 40730.0, "6260": 41574.0, "6265": 39224.0, "6270": 40804.0, "6275": 40889.0, "6280": 43037.0, "6285": 40826.0, "6290": 41574.0, "6295": 40801.0, "6300": 39980.0, "6305": 38679.0, "6310": 38676.0, "6315": 41502.0, "6320": 40891.0, "6325": 37697.0, "6330": 39266.0, "6335": 41568.0, "6340": 40734.0, "6345": 41495.0, "6350": 41561.0, "6355": 40098.0, "6360": 38742.0, "6365": 40798.0, "6370": 38723.0, "6375": 40038.0, "6380": 37117.0, "6385": 40185.0, "6390": 39500.0, "6395": 41495.0, "6400": 36571.0, "6405": 40051.0, "6410": 40115.0, "6415": 41575.0, "6420": 40847.0, "6425": 37280.0, "6430": 39958.0, "6435": 40112.0, "6440": 41490.0, "6445": 40731.0, "6450": 38509.0, "6455": 38603.0, "6460": 39208.0, "6465": 42260.0, "6470": 41504.0, "6475": 40829.0, "6480": 40121.0, "6485": 39287.0, "6490": 35762.0, "6495": 39273.0, "6500": 41510.0, "6505": 40761.0, "6510": 40053.0, "6515": 40108.0, "6520": 40792.0, "6525": 42270.0, "6530": 41493.0, "6535": 39280.0, "6540": 40874.0, "6545": 37998.0, "6550": 39337.0, "6555": 40810.0, "6560": 35771.0, "6565": 39227.0, "6570": 40107.0, "6575": 41500.0, "6580": 41577.0, "6585": 40055.0, "6590": 35486.0, "6595": 40789.0, "6600": 39284.0, "6605": 39346.0, "6610": 41508.0, "6615": 40808.0, "6620": 41571.0, "6625": 40729.0, "6630": 37796.0, "6635": 38683.0, "6640": 40822.0, "6645": 38040.0, "6650": 40048.0, "6655": 40048.0, "6660": 41509.0, "6665": 41499.0, "6670": 39266.0, "6675": 39366.0, "6680": 42268.0, "6685": 40725.0, "6690": 40049.0, "6695": 40730.0, "6700": 40794.0, "6705": 41592.0, "6710": 39202.0, "6715": 38509.0, "6720": 39968.0, "6725": 41566.0, "6730": 41495.0, "6735": 38662.0, "6740": 37271.0, "6745": 41499.0, "6750": 40744.0, "6755": 38601.0, "6760": 39351.0, "6765": 39352.0, "6770": 39972.0, "6775": 38666.0, "6780": 38589.0, "6785": 40040.0, "6790": 40734.0, "6795": 40737.0, "6800": 40724.0, "6805": 40800.0, "6810": 40186.0, "6815": 40822.0, "6820": 40109.0, "6825": 40807.0, "6830": 41587.0, "6835": 38604.0, "6840": 40802.0, "6845": 42336.0, "6850": 40113.0, "6855": 39369.0, "6860": 41490.0, "6865": 40743.0, "6870": 40036.0, "6875": 40819.0, "6880": 40791.0, "6885": 39379.0, "6890": 40021.0, "6895": 40098.0, "6900": 37750.0, "6905": 40879.0, "6910": 41570.0, "6915": 41587.0, "6920": 40022.0, "6925": 40107.0, "6930": 42268.0, "6935": 38505.0, "6940": 39410.0, "6945": 40806.0, "6950": 39965.0, "6955": 39432.0, "6960": 40717.0, "6965": 40798.0, "6970": 40812.0, "6975": 41500.0, "6980": 39501.0, "6985": 40816.0, "6990": 40116.0, "6995": 39958.0, "7000": 39428.0, "7005": 39418.0, "7010": 39969.0, "7015": 38669.0, "7020": 40797.0, "7025": 39285.0, "7030": 38684.0, "7035": 39341.0, "7040": 42340.0, "7045": 36556.0, "7050": 38600.0, "7055": 40898.0, "7060": 40732.0, "7065": 37993.0, "7070": 40051.0, "7075": 39285.0, "7080": 40730.0, "7085": 40733.0, "7090": 36576.0, "7095": 40723.0, "7100": 40810.0, "7105": 40734.0, "7110": 42334.0, "7115": 39296.0, "7120": 40819.0, "7125": 38525.0, "7130": 40886.0, "7135": 39286.0, "7140": 40107.0, "7145": 40737.0, "7150": 40800.0, "7155": 42337.0, "7160": 40058.0, "7165": 40026.0, "7170": 41501.0, "7175": 40816.0, "7180": 40827.0, "7185": 39296.0, "7190": 40809.0, "7195": 39962.0, "7200": 40118.0, "7205": 40811.0, "7210": 41580.0, "7215": 41567.0, "7220": 37359.0, "7225": 40822.0, "7230": 40115.0, "7235": 41504.0, "7240": 38572.0, "7245": 39416.0, "7250": 40816.0, "7255": 39963.0, "7260": 38504.0, "7265": 39357.0, "7270": 40733.0, "7275": 38580.0, "7280": 39448.0, "7285": 40125.0, "7290": 41554.0, "7295": 40739.0, "7300": 40044.0, "7305": 39502.0, "7310": 40121.0, "7315": 40865.0, "7320": 40127.0, "7325": 40818.0, "7330": 41493.0, "7335": 39371.0, "7340": 40812.0, "7345": 40113.0, "7350": 39357.0, "7355": 40736.0, "7360": 40194.0, "7365": 41498.0, "7370": 38580.0, "7375": 38442.0, "7380": 40731.0, "7385": 40121.0, "7390": 38677.0, "7395": 38469.0, "7400": 41504.0, "7405": 41499.0, "7410": 38494.0, "7415": 38521.0, "7420": 39354.0, "7425": 41499.0, "7430": 39361.0, "7435": 39976.0, "7440": 40138.0, "7445": 40724.0, "7450": 40786.0, "7455": 42267.0, "7460": 38801.0, "7465": 35634.0, "7470": 40803.0, "7475": 37824.0, "7480": 40869.0, "7485": 37975.0, "7490": 40819.0, "7495": 40104.0, "7500": 41493.0, "7505": 40810.0, "7510": 39214.0, "7515": 40060.0, "7520": 40743.0, "7525": 40113.0, "7530": 40812.0, "7535": 39262.0, "7540": 40045.0, "7545": 38447.0, "7550": 40733.0, "7555": 40136.0, "7560": 41506.0, "7565": 40053.0, "7570": 40735.0, "7575": 40743.0, "7580": 39961.0, "7585": 40739.0, "7590": 40811.0, "7595": 40799.0, "7600": 35669.0, "7605": 40727.0, "7610": 41501.0, "7615": 38466.0, "7620": 41578.0, "7625": 40722.0, "7630": 41577.0, "7635": 41514.0, "7640": 40130.0, "7645": 40131.0, "7650": 38646.0, "7655": 41565.0, "7660": 39966.0, "7665": 41573.0, "7670": 39972.0, "7675": 39450.0, "7680": 40736.0, "7685": 39357.0, "7690": 40110.0, "7695": 40051.0, "7700": 41504.0, "7705": 40188.0, "7710": 40804.0, "7715": 40038.0, "7720": 35771.0, "7725": 39348.0, "7730": 39361.0, "7735": 37969.0, "7740": 40864.0, "7745": 40731.0, "7750": 41501.0, "7755": 39400.0, "7760": 40145.0, "7765": 39345.0, "7770": 39351.0, "7775": 39347.0, "7780": 38603.0, "7785": 38689.0, "7790": 39361.0, "7795": 40028.0, "7800": 40142.0, "7805": 38082.0, "7810": 42345.0, "7815": 38590.0, "7820": 40800.0, "7825": 37898.0, "7830": 40882.0, "7835": 39363.0, "7840": 40818.0, "7845": 41505.0, "7850": 39344.0, "7855": 38675.0, "7860": 39970.0, "7865": 38816.0, "7870": 40820.0, "7875": 40116.0, "7880": 40732.0, "7885": 40745.0, "7890": 40050.0, "7895": 42261.0, "7900": 40137.0, "7905": 41499.0, "7910": 37906.0, "7915": 39269.0, "7920": 39361.0, "7925": 42275.0, "7930": 39255.0, "7935": 41586.0, "7940": 39337.0, "7945": 41566.0, "7950": 36614.0, "7955": 37920.0, "7960": 40046.0, "7965": 40811.0, "7970": 40879.0, "7975": 37766.0, "7980": 41493.0, "7985": 39973.0, "7990": 39392.0, "7995": 39349.0, "8000": 41507.0, "8005": 40798.0, "8010": 40731.0, "8015": 40885.0, "8020": 38647.0, "8025": 40878.0, "8030": 37826.0, "8035": 40049.0, "8040": 39232.0, "8045": 41555.0, "8050": 38596.0, "8055": 39272.0, "8060": 39292.0, "8065": 40827.0, "8070": 40808.0, "8075": 41583.0, "8080": 41564.0, "8085": 39292.0, "8090": 40819.0, "8095": 39366.0, "8100": 40733.0, "8105": 40884.0, "8110": 38677.0, "8115": 40814.0, "8120": 39358.0, "8125": 39973.0, "8130": 39282.0, "8135": 41510.0, "8140": 39974.0, "8145": 40804.0, "8150": 41571.0, "8155": 41511.0, "8160": 39457.0, "8165": 40040.0, "8170": 38733.0, "8175": 40044.0, "8180": 37450.0, "8185": 40739.0, "8190": 39347.0, "8195": 40039.0, "8200": 40127.0, "8205": 40060.0, "8210": 39287.0, "8215": 40805.0, "8220": 38592.0, "8225": 39275.0, "8230": 40802.0, "8235": 40052.0, "8240": 39352.0, "8245": 38686.0, "8250": 41569.0, "8255": 39425.0, "8260": 40824.0, "8265": 41517.0, "8270": 42276.0, "8275": 40740.0, "8280": 40042.0, "8285": 40874.0, "8290": 40102.0, "8295": 39272.0, "8300": 40792.0, "8305": 40111.0, "8310": 39287.0, "8315": 39198.0, "8320": 41507.0, "8325": 39288.0, "8330": 40747.0, "8335": 40040.0, "8340": 40070.0, "8345": 39435.0, "8350": 40812.0, "8355": 40069.0, "8360": 40796.0, "8365": 39963.0, "8370": 39411.0, "8375": 38878.0, "8380": 40740.0, "8385": 40059.0, "8390": 40823.0, "8395": 39974.0, "8400": 40130.0, "8405": 39206.0, "8410": 41501.0, "8415": 38625.0, "8420": 40098.0, "8425": 39972.0, "8430": 37837.0, "8435": 39427.0, "8440": 37091.0, "8445": 38576.0, "8450": 39424.0, "8455": 39986.0, "8460": 38020.0, "8465": 37919.0, "8470": 38622.0, "8475": 40807.0, "8480": 40201.0, "8485": 37662.0, "8490": 41504.0, "8495": 40023.0, "8500": 39971.0, "8505": 38427.0, "8510": 41508.0, "8515": 39424.0, "8520": 39425.0, "8525": 38513.0, "8530": 40805.0, "8535": 40748.0, "8540": 40733.0, "8545": 39371.0, "8550": 40819.0, "8555": 40016.0, "8560": 40722.0, "8565": 39989.0, "8570": 39280.0, "8575": 40035.0, "8580": 40135.0, "8585": 40047.0, "8590": 40827.0, "8595": 40051.0, "8600": 40197.0, "8605": 39291.0, "8610": 40856.0, "8615": 38607.0, "8620": 40050.0, "8625": 42273.0, "8630": 40739.0, "8635": 39268.0, "8640": 40742.0, "8645": 40065.0, "8650": 37212.0, "8655": 40121.0, "8660": 38662.0, "8665": 40727.0, "8670": 40833.0, "8675": 40061.0, "8680": 38512.0, "8685": 42265.0, "8690": 37906.0, "8695": 39275.0, "8700": 38655.0, "8705": 40802.0, "8710": 40061.0, "8715": 40038.0, "8720": 41498.0, "8725": 39346.0, "8730": 40818.0, "8735": 38659.0, "8740": 39314.0, "8745": 38670.0, "8750": 38592.0, "8755": 40862.0, "8760": 40742.0, "8765": 39210.0, "8770": 41580.0, "8775": 40804.0, "8780": 40811.0, "8785": 39276.0, "8790": 41493.0, "8795": 40807.0, "8800": 41554.0, "8805": 41509.0, "8810": 39278.0, "8815": 40721.0, "8820": 39274.0, "8825": 41511.0, "8830": 42260.0, "8835": 40809.0, "8840": 39344.0, "8845": 42279.0, "8850": 41493.0, "8855": 42267.0, "8860": 39349.0, "8865": 39271.0, "8870": 41580.0, "8875": 38751.0, "8880": 38661.0, "8885": 38582.0, "8890": 39212.0, "8895": 40039.0, "8900": 40040.0, "8905": 38524.0, "8910": 40739.0, "8915": 37904.0, "8920": 39297.0, "8925": 39279.0, "8930": 42269.0, "8935": 37293.0, "8940": 41575.0, "8945": 41490.0, "8950": 40047.0, "8955": 41505.0, "8960": 41503.0, "8965": 42263.0, "8970": 37912.0, "8975": 36517.0, "8980": 40809.0, "8985": 41500.0, "8990": 40734.0, "8995": 39359.0, "9000": 40730.0, "9005": 39281.0, "9010": 36528.0, "9015": 39338.0, "9020": 40045.0, "9025": 39969.0, "9030": 40037.0, "9035": 40113.0, "9040": 40820.0, "9045": 39973.0, "9050": 40104.0, "9055": 40823.0, "9060": 38534.0, "9065": 38592.0, "9070": 38574.0, "9075": 40142.0, "9080": 40118.0, "9085": 40730.0, "9090": 40048.0, "9095": 39983.0, "9100": 38522.0, "9105": 38645.0, "9110": 40104.0, "9115": 39272.0, "9120": 34489.0, "9125": 39527.0, "9130": 42331.0, "9135": 39276.0, "9140": 39203.0, "9145": 40218.0, "9150": 39279.0, "9155": 42264.0, "9160": 40807.0, "9165": 40044.0, "9170": 40043.0, "9175": 38591.0, "9180": 39977.0, "9185": 40111.0, "9190": 40048.0, "9195": 40052.0, "9200": 38032.0, "9205": 40736.0, "9210": 39272.0, "9215": 40747.0, "9220": 40804.0, "9225": 40809.0, "9230": 40818.0, "9235": 38856.0, "9240": 41498.0, "9245": 41578.0, "9250": 40727.0, "9255": 40110.0, "9260": 40202.0, "9265": 41500.0, "9270": 41511.0, "9275": 41566.0, "9280": 37275.0, "9285": 40953.0, "9290": 35886.0, "9295": 40815.0, "9300": 40799.0, "9305": 39428.0, "9310": 40738.0, "9315": 40805.0, "9320": 38590.0, "9325": 40123.0, "9330": 40812.0, "9335": 38530.0, "9340": 41497.0, "9345": 40785.0, "9350": 40811.0, "9355": 38447.0, "9360": 42269.0, "9365": 38692.0, "9370": 41498.0, "9375": 40813.0, "9380": 40806.0, "9385": 40740.0, "9390": 41576.0, "9395": 40876.0, "9400": 40793.0, "9405": 40040.0, "9410": 40800.0, "9415": 40060.0, "9420": 40748.0, "9425": 41495.0, "9430": 40735.0, "9435": 39429.0, "9440": 41515.0, "9445": 40804.0, "9450": 39356.0, "9455": 42336.0, "9460": 40803.0, "9465": 42267.0, "9470": 35947.0, "9475": 40804.0, "9480": 41589.0, "9485": 39402.0, "9490": 39277.0, "9495": 38581.0, "9500": 41497.0, "9505": 40047.0, "9510": 41550.0, "9515": 40096.0, "9520": 39273.0, "9525": 39499.0, "9530": 41580.0, "9535": 38676.0, "9540": 39288.0, "9545": 39300.0, "9550": 41567.0, "9555": 40746.0, "9560": 39355.0, "9565": 40055.0, "9570": 39430.0, "9575": 42265.0, "9580": 40038.0, "9585": 40054.0, "9590": 38509.0, "9595": 38661.0, "9600": 37198.0, "9605": 40810.0, "9610": 41583.0, "9615": 39984.0, "9620": 39968.0, "9625": 40811.0, "9630": 40814.0, "9635": 38441.0, "9640": 40805.0, "9645": 39957.0, "9650": 41498.0, "9655": 42273.0, "9660": 40821.0, "9665": 40067.0, "9670": 40039.0, "9675": 38717.0, "9680": 40742.0, "9685": 39986.0, "9690": 39443.0, "9695": 39275.0, "9700": 39207.0, "9705": 40885.0, "9710": 39282.0, "9715": 39336.0, "9720": 40181.0, "9725": 38750.0, "9730": 40812.0, "9735": 41593.0, "9740": 41584.0, "9745": 39361.0, "9750": 40814.0, "9755": 40890.0, "9760": 39304.0, "9765": 43035.0, "9770": 39432.0, "9775": 39973.0, "9780": 40122.0, "9785": 40886.0, "9790": 39220.0, "9795": 42280.0, "9800": 37206.0, "9805": 39987.0, "9810": 40822.0, "9815": 40803.0, "9820": 39267.0, "9825": 40798.0, "9830": 39509.0, "9835": 39269.0, "9840": 37891.0, "9845": 39961.0, "9850": 39283.0, "9855": 38546.0, "9860": 37977.0, "9865": 41503.0, "9870": 39341.0, "9875": 37138.0, "9880": 40052.0, "9885": 39229.0, "9890": 40041.0, "9895": 41554.0, "9900": 39967.0, "9905": 41582.0, "9910": 41574.0, "9915": 41493.0, "9920": 40005.0, "9925": 38665.0, "9930": 40028.0, "9935": 37892.0, "9940": 41644.0, "9945": 41501.0, "9950": 40804.0, "9955": 41507.0, "9960": 40821.0, "9965": 40824.0, "9970": 39343.0, "9975": 40737.0, "9980": 41504.0, "9985": 42274.0, "9990": 38508.0, "9995": 42274.0, "10000": 40031.0, "10005": 39334.0, "10010": 39443.0, "10015": 41489.0, "10020": 40732.0, "10025": 38516.0, "10030": 39315.0, "10035": 36376.0, "10040": 41577.0, "10045": 40052.0, "10050": 39273.0, "10055": 41510.0, "10060": 40058.0, "10065": 41484.0, "10070": 40824.0, "10075": 40814.0, "10080": 40875.0, "10085": 40123.0, "10090": 41493.0, "10095": 40744.0, "10100": 40738.0, "10105": 41504.0, "10110": 39295.0, "10115": 41636.0, "10120": 40800.0, "10125": 39344.0, "10130": 37764.0, "10135": 40048.0, "10140": 40742.0, "10145": 40735.0, "10150": 41563.0, "10155": 40113.0, "10160": 39517.0, "10165": 37944.0, "10170": 41508.0, "10175": 40825.0, "10180": 40032.0, "10185": 41496.0, "10190": 39981.0, "10195": 37894.0, "10200": 40114.0, "10205": 40888.0, "10210": 40831.0, "10215": 39431.0, "10220": 40826.0, "10225": 37271.0, "10230": 38607.0, "10235": 40735.0, "10240": 39430.0, "10245": 40751.0, "10250": 39361.0, "10255": 40876.0, "10260": 40042.0, "10265": 37194.0, "10270": 40885.0, "10275": 41564.0, "10280": 40039.0, "10285": 40810.0, "10290": 37224.0, "10295": 40810.0, "10300": 38446.0, "10305": 39489.0, "10310": 38665.0, "10315": 40805.0, "10320": 39276.0, "10325": 37925.0, "10330": 36526.0, "10335": 38512.0, "10340": 41508.0, "10345": 40798.0, "10350": 41514.0, "10355": 39328.0, "10360": 39279.0, "10365": 37066.0, "10370": 41494.0, "10375": 37973.0, "10380": 40811.0, "10385": 40042.0, "10390": 41502.0, "10395": 39995.0, "10400": 40744.0, "10405": 35918.0, "10410": 39273.0, "10415": 40746.0, "10420": 40873.0, "10425": 40031.0, "10430": 40735.0, "10435": 40119.0, "10440": 39347.0, "10445": 38592.0, "10450": 38586.0, "10455": 39369.0, "10460": 40025.0, "10465": 41587.0, "10470": 37976.0, "10475": 41578.0, "10480": 39260.0, "10485": 40794.0, "10490": 42350.0, "10495": 39298.0, "10500": 39354.0, "10505": 40041.0, "10510": 40186.0, "10515": 37679.0, "10520": 39284.0, "10525": 40099.0, "10530": 40050.0, "10535": 40036.0, "10540": 39211.0, "10545": 40796.0, "10550": 40803.0, "10555": 39353.0, "10560": 40729.0, "10565": 39329.0, "10570": 39987.0, "10575": 37336.0, "10580": 39429.0, "10585": 40033.0, "10590": 39282.0, "10595": 39285.0, "10600": 41506.0, "10605": 40047.0, "10610": 37274.0, "10615": 40733.0, "10620": 38741.0, "10625": 38491.0, "10630": 40819.0, "10635": 40742.0, "10640": 39355.0, "10645": 39328.0, "10650": 40026.0, "10655": 40052.0, "10660": 39263.0, "10665": 35687.0, "10670": 36913.0, "10675": 40735.0, "10680": 40820.0, "10685": 38566.0, "10690": 39356.0, "10695": 38731.0, "10700": 35983.0, "10705": 41509.0, "10710": 40137.0, "10715": 39334.0, "10720": 39219.0, "10725": 39455.0, "10730": 40137.0, "10735": 40804.0, "10740": 39374.0, "10745": 40129.0, "10750": 40821.0, "10755": 42258.0, "10760": 39440.0, "10765": 40829.0, "10770": 40792.0, "10775": 41502.0, "10780": 40040.0, "10785": 41504.0, "10790": 41506.0, "10795": 39308.0, "10800": 40046.0, "10805": 39330.0, "10810": 40048.0, "10815": 36498.0, "10820": 42276.0, "10825": 39363.0, "10830": 40039.0, "10835": 37962.0, "10840": 40107.0, "10845": 41510.0, "10850": 41585.0, "10855": 41506.0, "10860": 41511.0, "10865": 41564.0, "10870": 36676.0, "10875": 42266.0, "10880": 41505.0, "10885": 41497.0, "10890": 40826.0, "10895": 40749.0, "10900": 41511.0, "10905": 40831.0, "10910": 40810.0, "10915": 41505.0, "10920": 40026.0, "10925": 41499.0, "10930": 40120.0, "10935": 39423.0, "10940": 40740.0, "10945": 39285.0, "10950": 41561.0, "10955": 38601.0, "10960": 39275.0, "10965": 41510.0, "10970": 39273.0, "10975": 39383.0, "10980": 40191.0, "10985": 40044.0, "10990": 40805.0, "10995": 40035.0, "11000": 37944.0, "11005": 38082.0, "11010": 40787.0, "11015": 40035.0, "11020": 40036.0, "11025": 40819.0, "11030": 40820.0, "11035": 41507.0, "11040": 40059.0, "11045": 40813.0, "11050": 40747.0, "11055": 40807.0, "11060": 38600.0, "11065": 40029.0, "11070": 40049.0, "11075": 40827.0, "11080": 40815.0, "11085": 41564.0, "11090": 40738.0, "11095": 39271.0, "11100": 41511.0, "11105": 41502.0, "11110": 38520.0, "11115": 38684.0, "11120": 38573.0, "11125": 41501.0, "11130": 40823.0, "11135": 38718.0, "11140": 39973.0, "11145": 40748.0, "11150": 40108.0, "11155": 41573.0, "11160": 41511.0, "11165": 38575.0, "11170": 38525.0, "11175": 39992.0, "11180": 40735.0, "11185": 38738.0, "11190": 39282.0, "11195": 40812.0, "11200": 37052.0, "11205": 37007.0, "11210": 39288.0, "11215": 40819.0, "11220": 40131.0, "11225": 40824.0, "11230": 41560.0, "11235": 41582.0, "11240": 40745.0, "11245": 40880.0, "11250": 37770.0, "11255": 40153.0, "11260": 39350.0, "11265": 38737.0, "11270": 40130.0, "11275": 40816.0, "11280": 40148.0, "11285": 40265.0, "11290": 40053.0, "11295": 39982.0, "11300": 40814.0, "11305": 40749.0, "11310": 41511.0, "11315": 38545.0, "11320": 39278.0, "11325": 40144.0, "11330": 38645.0, "11335": 41496.0, "11340": 39441.0, "11345": 37958.0, "11350": 39348.0, "11355": 40029.0, "11360": 40744.0, "11365": 38686.0, "11370": 42267.0, "11375": 39279.0, "11380": 39271.0, "11385": 40055.0, "11390": 41516.0, "11395": 38653.0, "11400": 39355.0, "11405": 40816.0, "11410": 38618.0, "11415": 36528.0, "11420": 39366.0, "11425": 37285.0, "11430": 40113.0, "11435": 41596.0, "11440": 41497.0, "11445": 39982.0, "11450": 40796.0, "11455": 39279.0, "11460": 40815.0, "11465": 40738.0, "11470": 37425.0, "11475": 39400.0, "11480": 37851.0, "11485": 39985.0, "11490": 40721.0, "11495": 40039.0, "11500": 38666.0, "11505": 42262.0, "11510": 39248.0, "11515": 40023.0, "11520": 40728.0, "11525": 41497.0, "11530": 40124.0, "11535": 40801.0, "11540": 40794.0, "11545": 40743.0, "11550": 39287.0, "11555": 36984.0, "11560": 41559.0, "11565": 39276.0, "11570": 40040.0, "11575": 39253.0, "11580": 40741.0, "11585": 38724.0, "11590": 40822.0, "11595": 41504.0, "11600": 40890.0, "11605": 38545.0, "11610": 38670.0, "11615": 38593.0, "11620": 39528.0, "11625": 41516.0, "11630": 41505.0, "11635": 42319.0, "11640": 40043.0, "11645": 40728.0, "11650": 40043.0, "11655": 40024.0, "11660": 37846.0, "11665": 40123.0, "11670": 40807.0, "11675": 41584.0, "11680": 42274.0, "11685": 39197.0, "11690": 41494.0, "11695": 40046.0, "11700": 41501.0, "11705": 40106.0, "11710": 41498.0, "11715": 39283.0, "11720": 38076.0, "11725": 40790.0, "11730": 40731.0, "11735": 39284.0, "11740": 39988.0, "11745": 38605.0, "11750": 38678.0, "11755": 38527.0, "11760": 41500.0, "11765": 40129.0, "11770": 39339.0, "11775": 40830.0, "11780": 39344.0, "11785": 40050.0, "11790": 40799.0, "11795": 38682.0, "11800": 40027.0, "11805": 38649.0, "11810": 40803.0, "11815": 40727.0, "11820": 42267.0, "11825": 40888.0, "11830": 41580.0, "11835": 40813.0, "11840": 39284.0, "11845": 41503.0, "11850": 40733.0, "11855": 38710.0, "11860": 41497.0, "11865": 38429.0, "11870": 40727.0, "11875": 41499.0, "11880": 40111.0, "11885": 40813.0, "11890": 40108.0, "11895": 38650.0, "11900": 38608.0, "11905": 39355.0, "11910": 40879.0, "11915": 40826.0, "11920": 40753.0, "11925": 41583.0, "11930": 40875.0, "11935": 41494.0, "11940": 39279.0, "11945": 40813.0, "11950": 40733.0, "11955": 38577.0, "11960": 40126.0, "11965": 40822.0, "11970": 41590.0, "11975": 39287.0, "11980": 40809.0, "11985": 41581.0, "11990": 41509.0, "11995": 40058.0, "12000": 38511.0, "12005": 41506.0, "12010": 40039.0, "12015": 41504.0, "12020": 40043.0, "12025": 39273.0, "12030": 40123.0, "12035": 38591.0, "12040": 40042.0, "12045": 40877.0, "12050": 39356.0, "12055": 39287.0, "12060": 40799.0, "12065": 39291.0, "12070": 39294.0, "12075": 40805.0, "12080": 39278.0, "12085": 40035.0, "12090": 40739.0, "12095": 40033.0, "12100": 40133.0, "12105": 37973.0, "12110": 39344.0, "12115": 40880.0, "12120": 40729.0, "12125": 38436.0, "12130": 38661.0, "12135": 40780.0, "12140": 40108.0, "12145": 40116.0, "12150": 40038.0, "12155": 40030.0, "12160": 39958.0, "12165": 40124.0, "12170": 40050.0, "12175": 42259.0, "12180": 40829.0, "12185": 40802.0, "12190": 40816.0, "12195": 37966.0, "12200": 39983.0, "12205": 39966.0, "12210": 42337.0, "12215": 40885.0, "12220": 39218.0, "12225": 39290.0, "12230": 41503.0, "12235": 40194.0, "12240": 40041.0, "12245": 38678.0, "12250": 40814.0, "12255": 41496.0, "12260": 40135.0, "12265": 42272.0, "12270": 37304.0, "12275": 40038.0, "12280": 39973.0, "12285": 40814.0, "12290": 40049.0, "12295": 40805.0, "12300": 40871.0, "12305": 42268.0, "12310": 40828.0, "12315": 39438.0, "12320": 40806.0, "12325": 36603.0, "12330": 39340.0, "12335": 39567.0, "12340": 40197.0, "12345": 40802.0, "12350": 37813.0, "12355": 39495.0, "12360": 42276.0, "12365": 40730.0, "12370": 38667.0, "12375": 40128.0, "12380": 40805.0, "12385": 40044.0, "12390": 38455.0, "12395": 39970.0, "12400": 39302.0, "12405": 39966.0, "12410": 39973.0, "12415": 40805.0, "12420": 39342.0, "12425": 39984.0, "12430": 40784.0, "12435": 40798.0, "12440": 39292.0, "12445": 40803.0, "12450": 40109.0, "12455": 40135.0, "12460": 36307.0, "12465": 36599.0, "12470": 39216.0, "12475": 39963.0, "12480": 39986.0, "12485": 37394.0, "12490": 40033.0, "12495": 40767.0, "12500": 37838.0, "12505": 40813.0, "12510": 39204.0, "12515": 38600.0, "12520": 38524.0, "12525": 37221.0, "12530": 37979.0, "12535": 41508.0, "12540": 39326.0, "12545": 38599.0, "12550": 40050.0, "12555": 40046.0, "12560": 40048.0, "12565": 38102.0, "12570": 39969.0, "12575": 39969.0, "12580": 40027.0, "12585": 41509.0, "12590": 40046.0, "12595": 41501.0, "12600": 39369.0, "12605": 40068.0, "12610": 40217.0, "12615": 40841.0, "12620": 41502.0, "12625": 40812.0, "12630": 38524.0, "12635": 40127.0, "12640": 33835.0, "12645": 40187.0, "12650": 40041.0, "12655": 40117.0, "12660": 40820.0, "12665": 40879.0, "12670": 40877.0, "12675": 41590.0, "12680": 39217.0, "12685": 41492.0, "12690": 39286.0, "12695": 41498.0, "12700": 40093.0, "12705": 40822.0, "12710": 39436.0, "12715": 38440.0, "12720": 40200.0, "12725": 40799.0, "12730": 37297.0, "12735": 40801.0, "12740": 40874.0, "12745": 40837.0, "12750": 40813.0, "12755": 41571.0, "12760": 40116.0, "12765": 38577.0, "12770": 42284.0, "12775": 40050.0, "12780": 40036.0, "12785": 40731.0, "12790": 40125.0, "12795": 39349.0, "12800": 40113.0, "12805": 38098.0, "12810": 40127.0, "12815": 40741.0, "12820": 40826.0, "12825": 41509.0, "12830": 40819.0, "12835": 40200.0, "12840": 41502.0, "12845": 39330.0, "12850": 40810.0, "12855": 38675.0, "12860": 40822.0, "12865": 39971.0, "12870": 41513.0, "12875": 40039.0, "12880": 40040.0, "12885": 40755.0, "12890": 41563.0, "12895": 39970.0, "12900": 38672.0, "12905": 40743.0, "12910": 39435.0, "12915": 39305.0, "12920": 40792.0, "12925": 40810.0, "12930": 41506.0, "12935": 40130.0, "12940": 40823.0, "12945": 40737.0, "12950": 36543.0, "12955": 40912.0, "12960": 38501.0, "12965": 38516.0, "12970": 40889.0, "12975": 41597.0, "12980": 39362.0, "12985": 40747.0, "12990": 39213.0, "12995": 41574.0, "13000": 41569.0, "13005": 41499.0, "13010": 39441.0, "13015": 39223.0, "13020": 40105.0, "13025": 41582.0, "13030": 40039.0, "13035": 41488.0, "13040": 37349.0, "13045": 41508.0, "13050": 40117.0, "13055": 40034.0, "13060": 40133.0, "13065": 40756.0, "13070": 39325.0, "13075": 42339.0, "13080": 40111.0, "13085": 38669.0, "13090": 41500.0, "13095": 37207.0, "13100": 40870.0, "13105": 40053.0, "13110": 40041.0, "13115": 40739.0, "13120": 37290.0, "13125": 40063.0, "13130": 40887.0, "13135": 40819.0, "13140": 38660.0, "13145": 40125.0, "13150": 40039.0, "13155": 37216.0, "13160": 38538.0, "13165": 39393.0, "13170": 41503.0, "13175": 40801.0, "13180": 40043.0, "13185": 38601.0, "13190": 40810.0, "13195": 38662.0, "13200": 40728.0, "13205": 41514.0, "13210": 40742.0, "13215": 37470.0, "13220": 38618.0, "13225": 39970.0, "13230": 39373.0, "13235": 39302.0, "13240": 39198.0, "13245": 39968.0, "13250": 40801.0, "13255": 40107.0, "13260": 40107.0, "13265": 42271.0, "13270": 39352.0, "13275": 40039.0, "13280": 40126.0, "13285": 39281.0, "13290": 40133.0, "13295": 39275.0, "13300": 40050.0, "13305": 39954.0, "13310": 40043.0, "13315": 40805.0, "13320": 40055.0, "13325": 40884.0, "13330": 40816.0, "13335": 39974.0, "13340": 41567.0, "13345": 40048.0, "13350": 42268.0, "13355": 41570.0, "13360": 37978.0, "13365": 40053.0, "13370": 41493.0, "13375": 39272.0, "13380": 40044.0, "13385": 40813.0, "13390": 40834.0, "13395": 38636.0, "13400": 37894.0, "13405": 39222.0, "13410": 40798.0, "13415": 41508.0, "13420": 42268.0, "13425": 38529.0, "13430": 39260.0, "13435": 39433.0, "13440": 40806.0, "13445": 40799.0, "13450": 40736.0, "13455": 40724.0, "13460": 39363.0, "13465": 41495.0, "13470": 39311.0, "13475": 38520.0, "13480": 40743.0, "13485": 39215.0, "13490": 39970.0, "13495": 40050.0, "13500": 40732.0, "13505": 40807.0, "13510": 39275.0, "13515": 39349.0, "13520": 39343.0, "13525": 40108.0, "13530": 37430.0, "13535": 40804.0, "13540": 39270.0, "13545": 37117.0, "13550": 40036.0, "13555": 38578.0, "13560": 40735.0, "13565": 40736.0, "13570": 39992.0, "13575": 37923.0, "13580": 42265.0, "13585": 39969.0, "13590": 40749.0, "13595": 40040.0, "13600": 40136.0, "13605": 35298.0, "13610": 42273.0, "13615": 35804.0, "13620": 37230.0, "13625": 36371.0, "13630": 40027.0, "13635": 40730.0, "13640": 40738.0, "13645": 40821.0, "13650": 40812.0, "13655": 39271.0, "13660": 40052.0, "13665": 38667.0, "13670": 40808.0, "13675": 41509.0, "13680": 41499.0, "13685": 38586.0, "13690": 39428.0, "13695": 39383.0, "13700": 40033.0, "13705": 40059.0, "13710": 41513.0, "13715": 40045.0, "13720": 38657.0, "13725": 37840.0, "13730": 42263.0, "13735": 40173.0, "13740": 41513.0, "13745": 40745.0, "13750": 38529.0, "13755": 40028.0, "13760": 40204.0, "13765": 39971.0, "13770": 39976.0, "13775": 40746.0, "13780": 41575.0, "13785": 38599.0, "13790": 39283.0, "13795": 40736.0, "13800": 39978.0, "13805": 40732.0, "13810": 40880.0, "13815": 37194.0, "13820": 39261.0, "13825": 40804.0, "13830": 41592.0, "13835": 40140.0, "13840": 40737.0, "13845": 38518.0, "13850": 38003.0, "13855": 40737.0, "13860": 39417.0, "13865": 40736.0, "13870": 41500.0, "13875": 39266.0, "13880": 39264.0, "13885": 38528.0, "13890": 38068.0, "13895": 42274.0, "13900": 40025.0, "13905": 37760.0, "13910": 39426.0, "13915": 41589.0, "13920": 39295.0, "13925": 41572.0, "13930": 40038.0, "13935": 38536.0, "13940": 41569.0, "13945": 39965.0, "13950": 40740.0, "13955": 39437.0, "13960": 42267.0, "13965": 40050.0, "13970": 37829.0, "13975": 39343.0, "13980": 38510.0, "13985": 41506.0, "13990": 41506.0, "13995": 40890.0, "14000": 40727.0, "14005": 40092.0, "14010": 40814.0, "14015": 39989.0, "14020": 41586.0, "14025": 40753.0, "14030": 41578.0, "14035": 40811.0, "14040": 39448.0, "14045": 38652.0, "14050": 40819.0, "14055": 39344.0, "14060": 38683.0, "14065": 41589.0, "14070": 40820.0, "14075": 40793.0, "14080": 39965.0, "14085": 41593.0, "14090": 41581.0, "14095": 40217.0, "14100": 37764.0, "14105": 39964.0, "14110": 40743.0, "14115": 40148.0, "14120": 38640.0, "14125": 40801.0, "14130": 38667.0, "14135": 40800.0, "14140": 42287.0, "14145": 39979.0, "14150": 39451.0, "14155": 41504.0, "14160": 39444.0, "14165": 38601.0, "14170": 41505.0, "14175": 40046.0, "14180": 40864.0, "14185": 40119.0, "14190": 39370.0, "14195": 38574.0, "14200": 38646.0, "14205": 39200.0, "14210": 39277.0, "14215": 38531.0, "14220": 40051.0, "14225": 42266.0, "14230": 38648.0, "14235": 40895.0, "14240": 39980.0, "14245": 39359.0, "14250": 39223.0, "14255": 38657.0, "14260": 38642.0, "14265": 40052.0, "14270": 41592.0, "14275": 40800.0, "14280": 41561.0, "14285": 39203.0, "14290": 42265.0, "14295": 40798.0, "14300": 38611.0, "14305": 41585.0, "14310": 41579.0, "14315": 40131.0, "14320": 38668.0, "14325": 40879.0, "14330": 39974.0, "14335": 40818.0, "14340": 40039.0, "14345": 40046.0, "14350": 39297.0, "14355": 40044.0, "14360": 39293.0, "14365": 39211.0, "14370": 41588.0, "14375": 41504.0, "14380": 40726.0, "14385": 36653.0, "14390": 40740.0, "14395": 39302.0, "14400": 39421.0, "14405": 38592.0, "14410": 40781.0, "14415": 40048.0, "14420": 40810.0, "14425": 41568.0, "14430": 41497.0, "14435": 41563.0, "14440": 37840.0, "14445": 39977.0, "14450": 40129.0, "14455": 39445.0, "14460": 40145.0, "14465": 41501.0, "14470": 40740.0, "14475": 40055.0, "14480": 39437.0, "14485": 41570.0, "14490": 38663.0, "14495": 41493.0, "14500": 40035.0, "14505": 39968.0, "14510": 38525.0, "14515": 39200.0, "14520": 42265.0, "14525": 40797.0, "14530": 39205.0, "14535": 41515.0, "14540": 35164.0, "14545": 40802.0, "14550": 40744.0, "14555": 40739.0, "14560": 38662.0, "14565": 37215.0, "14570": 39295.0, "14575": 39203.0, "14580": 39273.0, "14585": 40123.0, "14590": 40731.0, "14595": 40791.0, "14600": 39355.0, "14605": 40877.0, "14610": 40792.0, "14615": 38596.0, "14620": 39425.0, "14625": 40727.0, "14630": 41519.0, "14635": 40051.0, "14640": 39344.0, "14645": 38706.0, "14650": 41585.0, "14655": 40036.0, "14660": 39440.0, "14665": 40131.0, "14670": 41577.0, "14675": 38452.0, "14680": 39359.0, "14685": 38672.0, "14690": 40178.0, "14695": 39964.0, "14700": 39344.0, "14705": 40806.0, "14710": 42265.0, "14715": 40125.0, "14720": 40210.0, "14725": 40812.0, "14730": 42350.0, "14735": 38595.0, "14740": 40739.0, "14745": 40842.0, "14750": 39290.0, "14755": 41597.0, "14760": 39354.0, "14765": 40048.0, "14770": 40109.0, "14775": 40010.0, "14780": 41564.0, "14785": 38585.0, "14790": 41564.0, "14795": 39366.0, "14800": 38514.0, "14805": 41560.0, "14810": 42266.0, "14815": 40121.0, "14820": 38530.0, "14825": 42283.0, "14830": 40812.0, "14835": 41551.0, "14840": 41579.0, "14845": 38778.0, "14850": 42271.0, "14855": 39278.0, "14860": 39333.0, "14865": 40046.0, "14870": 40814.0, "14875": 40824.0, "14880": 40036.0, "14885": 39344.0, "14890": 40733.0, "14895": 40054.0, "14900": 39207.0, "14905": 40802.0, "14910": 40831.0, "14915": 39428.0, "14920": 40037.0, "14925": 36546.0, "14930": 40734.0, "14935": 38070.0, "14940": 40053.0, "14945": 40137.0, "14950": 40730.0, "14955": 42263.0, "14960": 38520.0, "14965": 40056.0, "14970": 41505.0, "14975": 40131.0, "14980": 40020.0, "14985": 38574.0, "14990": 40112.0, "14995": 41577.0, "15000": 40738.0, "15005": 40039.0, "15010": 40060.0, "15015": 41499.0, "15020": 40810.0, "15025": 40023.0, "15030": 39272.0, "15035": 39984.0, "15040": 40809.0, "15045": 40930.0, "15050": 38441.0, "15055": 42260.0, "15060": 41499.0, "15065": 39974.0, "15070": 41586.0, "15075": 41505.0, "15080": 36419.0, "15085": 38599.0, "15090": 40040.0, "15095": 41577.0, "15100": 40040.0, "15105": 40742.0, "15110": 40733.0, "15115": 41495.0, "15120": 37971.0, "15125": 39311.0, "15130": 39580.0, "15135": 38514.0, "15140": 40732.0, "15145": 38461.0, "15150": 40816.0, "15155": 40736.0, "15160": 40812.0, "15165": 40032.0, "15170": 42265.0, "15175": 40873.0, "15180": 39352.0, "15185": 40809.0, "15190": 41573.0, "15195": 39450.0, "15200": 39214.0, "15205": 41508.0, "15210": 41583.0, "15215": 38620.0, "15220": 40812.0, "15225": 40050.0, "15230": 40739.0, "15235": 40032.0, "15240": 42266.0, "15245": 40821.0, "15250": 40041.0, "15255": 41508.0, "15260": 37833.0, "15265": 38820.0, "15270": 41510.0, "15275": 38643.0, "15280": 40199.0, "15285": 39356.0, "15290": 41562.0, "15295": 37720.0, "15300": 40909.0, "15305": 39330.0, "15310": 40813.0, "15315": 40810.0, "15320": 40825.0, "15325": 40827.0, "15330": 41507.0, "15335": 40732.0, "15340": 40811.0, "15345": 40811.0, "15350": 40048.0, "15355": 42342.0, "15360": 39349.0, "15365": 39448.0, "15370": 41499.0, "15375": 42261.0, "15380": 41566.0, "15385": 39370.0, "15390": 39283.0, "15395": 37919.0, "15400": 37207.0, "15405": 40743.0, "15410": 40063.0, "15415": 40270.0, "15420": 37395.0, "15425": 40738.0, "15430": 40743.0, "15435": 39433.0, "15440": 38436.0, "15445": 41495.0, "15450": 39980.0, "15455": 40805.0, "15460": 41655.0, "15465": 40735.0, "15470": 39357.0, "15475": 40728.0, "15480": 37901.0, "15485": 39967.0, "15490": 39367.0, "15495": 40739.0, "15500": 37951.0, "15505": 40039.0, "15510": 40057.0, "15515": 42269.0, "15520": 40813.0, "15525": 38671.0, "15530": 40733.0, "15535": 38698.0, "15540": 42355.0, "15545": 39389.0, "15550": 38534.0, "15555": 40746.0, "15560": 40889.0, "15565": 38589.0, "15570": 38450.0, "15575": 38529.0, "15580": 40741.0, "15585": 40813.0, "15590": 39368.0, "15595": 39359.0, "15600": 40055.0, "15605": 38723.0, "15610": 39269.0, "15615": 40739.0, "15620": 40892.0, "15625": 39993.0, "15630": 40903.0, "15635": 41590.0, "15640": 38075.0, "15645": 39497.0, "15650": 39973.0, "15655": 39428.0, "15660": 37106.0, "15665": 40052.0, "15670": 38629.0, "15675": 40809.0, "15680": 40040.0, "15685": 40799.0, "15690": 40805.0, "15695": 40131.0, "15700": 40804.0, "15705": 40120.0, "15710": 39426.0, "15715": 40038.0, "15720": 40746.0, "15725": 40746.0, "15730": 38637.0, "15735": 38754.0, "15740": 41510.0, "15745": 40736.0, "15750": 40815.0, "15755": 40043.0, "15760": 40798.0, "15765": 39339.0, "15770": 40045.0, "15775": 41581.0, "15780": 41498.0, "15785": 42256.0, "15790": 40752.0, "15795": 40053.0, "15800": 40131.0, "15805": 38608.0, "15810": 39274.0, "15815": 38659.0, "15820": 37298.0, "15825": 39357.0, "15830": 38520.0, "15835": 40195.0, "15840": 39967.0, "15845": 37271.0, "15850": 41583.0, "15855": 41563.0, "15860": 39440.0, "15865": 40199.0, "15870": 40732.0, "15875": 40818.0, "15880": 39503.0, "15885": 40790.0, "15890": 39368.0, "15895": 40743.0, "15900": 40807.0, "15905": 40804.0, "15910": 39977.0, "15915": 38586.0, "15920": 41585.0, "15925": 40803.0, "15930": 41501.0, "15935": 39259.0, "15940": 39429.0, "15945": 40823.0, "15950": 38597.0, "15955": 39281.0, "15960": 39307.0, "15965": 40799.0, "15970": 39367.0, "15975": 39300.0, "15980": 41503.0, "15985": 38638.0, "15990": 37201.0, "15995": 40739.0, "16000": 41578.0, "16005": 40056.0, "16010": 40755.0, "16015": 39416.0, "16020": 38676.0, "16025": 40729.0, "16030": 37350.0, "16035": 37957.0, "16040": 41497.0, "16045": 37964.0, "16050": 39436.0, "16055": 41506.0, "16060": 41506.0, "16065": 41570.0, "16070": 42354.0, "16075": 38750.0, "16080": 38447.0, "16085": 40800.0, "16090": 40801.0, "16095": 39973.0, "16100": 39292.0, "16105": 40815.0, "16110": 37814.0, "16115": 39285.0, "16120": 39977.0, "16125": 40115.0, "16130": 41500.0, "16135": 40738.0, "16140": 39367.0, "16145": 39361.0, "16150": 40736.0, "16155": 40821.0, "16160": 40029.0, "16165": 38063.0, "16170": 40824.0, "16175": 38668.0, "16180": 40039.0, "16185": 41499.0, "16190": 37958.0, "16195": 41506.0, "16200": 41577.0, "16205": 40033.0, "16210": 40731.0, "16215": 38663.0, "16220": 40131.0, "16225": 39979.0, "16230": 40798.0, "16235": 40807.0, "16240": 37810.0, "16245": 40748.0, "16250": 41487.0, "16255": 39962.0, "16260": 40048.0, "16265": 40040.0, "16270": 40723.0, "16275": 41501.0, "16280": 39357.0, "16285": 37950.0, "16290": 41581.0, "16295": 41507.0, "16300": 40741.0, "16305": 39309.0, "16310": 39271.0, "16315": 40055.0, "16320": 40046.0, "16325": 40824.0, "16330": 40050.0, "16335": 38757.0, "16340": 40812.0, "16345": 38635.0, "16350": 42279.0, "16355": 40810.0, "16360": 40113.0, "16365": 40899.0, "16370": 37829.0, "16375": 40900.0, "16380": 39972.0, "16385": 39292.0, "16390": 38817.0, "16395": 40105.0, "16400": 39403.0, "16405": 38602.0, "16410": 41490.0, "16415": 37744.0, "16420": 39970.0, "16425": 41574.0, "16430": 40809.0, "16435": 36437.0, "16440": 39398.0, "16445": 38665.0, "16450": 38105.0, "16455": 41502.0, "16460": 41582.0, "16465": 40807.0, "16470": 40838.0, "16475": 42276.0, "16480": 40799.0, "16485": 40113.0, "16490": 40047.0, "16495": 37076.0, "16500": 39366.0, "16505": 40813.0, "16510": 37272.0, "16515": 42350.0, "16520": 37409.0, "16525": 40100.0, "16530": 40044.0, "16535": 40752.0, "16540": 40052.0, "16545": 39282.0, "16550": 40730.0, "16555": 39367.0, "16560": 40062.0, "16565": 40118.0, "16570": 38655.0, "16575": 38440.0, "16580": 39280.0, "16585": 40785.0, "16590": 40827.0, "16595": 40738.0, "16600": 40047.0, "16605": 41517.0, "16610": 40820.0, "16615": 41500.0, "16620": 39230.0, "16625": 40748.0, "16630": 42264.0, "16635": 40047.0, "16640": 40042.0, "16645": 37825.0, "16650": 39269.0, "16655": 41555.0, "16660": 40807.0, "16665": 40811.0, "16670": 40117.0, "16675": 41580.0, "16680": 40815.0, "16685": 40047.0, "16690": 41502.0, "16695": 40799.0, "16700": 38584.0, "16705": 38737.0, "16710": 41514.0, "16715": 40054.0, "16720": 40044.0, "16725": 39196.0, "16730": 39279.0, "16735": 40041.0, "16740": 39455.0, "16745": 38538.0, "16750": 40731.0, "16755": 40885.0, "16760": 37979.0, "16765": 39212.0, "16770": 40043.0, "16775": 38593.0, "16780": 39426.0, "16785": 40733.0, "16790": 40834.0, "16795": 39230.0, "16800": 39295.0, "16805": 40047.0, "16810": 39965.0, "16815": 39336.0, "16820": 40731.0, "16825": 40057.0, "16830": 37210.0, "16835": 40046.0, "16840": 39444.0, "16845": 37117.0, "16850": 40840.0, "16855": 41503.0, "16860": 39275.0, "16865": 40729.0, "16870": 40041.0, "16875": 40111.0, "16880": 40741.0, "16885": 40815.0, "16890": 38599.0, "16895": 42263.0, "16900": 39214.0, "16905": 38498.0, "16910": 38589.0, "16915": 41595.0, "16920": 40049.0, "16925": 42256.0, "16930": 40814.0, "16935": 40745.0, "16940": 37686.0, "16945": 39965.0, "16950": 38593.0, "16955": 40066.0, "16960": 43097.0, "16965": 40057.0, "16970": 40046.0, "16975": 41508.0, "16980": 37141.0, "16985": 40742.0, "16990": 40105.0, "16995": 41581.0, "17000": 40804.0, "17005": 41650.0, "17010": 40039.0, "17015": 37963.0, "17020": 40741.0, "17025": 37736.0, "17030": 39370.0, "17035": 39339.0, "17040": 40723.0, "17045": 37947.0, "17050": 13805.0, "17055": 38510.0, "17060": 37987.0, "17065": 39280.0, "17070": 41499.0, "17075": 40809.0, "17080": 36933.0, "17085": 42261.0, "17090": 39261.0, "17095": 40183.0, "17100": 40057.0, "17105": 40826.0, "17110": 39448.0, "17115": 40035.0, "17120": 39296.0, "17125": 41568.0, "17130": 40055.0, "17135": 38793.0, "17140": 38040.0, "17145": 40747.0, "17150": 40733.0, "17155": 40815.0, "17160": 39495.0, "17165": 40734.0, "17170": 41507.0, "17175": 40045.0, "17180": 40801.0, "17185": 40822.0, "17190": 37972.0, "17195": 40813.0, "17200": 40110.0, "17205": 40041.0, "17210": 40823.0, "17215": 41519.0, "17220": 40012.0, "17225": 40742.0, "17230": 40037.0, "17235": 40035.0, "17240": 39363.0, "17245": 40908.0, "17250": 39404.0, "17255": 40736.0, "17260": 40887.0, "17265": 40056.0, "17270": 39966.0, "17275": 41520.0, "17280": 39381.0, "17285": 40750.0, "17290": 38598.0, "17295": 42266.0, "17300": 37914.0, "17305": 41518.0, "17310": 40190.0, "17315": 40127.0, "17320": 42276.0, "17325": 40045.0, "17330": 39973.0, "17335": 40115.0, "17340": 39353.0, "17345": 40793.0, "17350": 40730.0, "17355": 38659.0, "17360": 39985.0, "17365": 38594.0, "17370": 40805.0, "17375": 38521.0, "17380": 35645.0, "17385": 40052.0, "17390": 35215.0, "17395": 39295.0, "17400": 40031.0, "17405": 39358.0, "17410": 38683.0, "17415": 41498.0, "17420": 40735.0, "17425": 39978.0, "17430": 41505.0, "17435": 40802.0, "17440": 40808.0, "17445": 40040.0, "17450": 40801.0, "17455": 40801.0, "17460": 40048.0, "17465": 38730.0, "17470": 39272.0, "17475": 40034.0, "17480": 40058.0, "17485": 40203.0, "17490": 40055.0, "17495": 39965.0, "17500": 36671.0, "17505": 38584.0, "17510": 40891.0, "17515": 38528.0, "17520": 41573.0, "17525": 40827.0, "17530": 40745.0, "17535": 41501.0, "17540": 41507.0, "17545": 42270.0, "17550": 39337.0, "17555": 39280.0, "17560": 41511.0, "17565": 39409.0, "17570": 40841.0, "17575": 39368.0, "17580": 40739.0, "17585": 39356.0, "17590": 39352.0, "17595": 40060.0, "17600": 37265.0, "17605": 36540.0, "17610": 40147.0, "17615": 40113.0, "17620": 40810.0, "17625": 40801.0, "17630": 39354.0, "17635": 40734.0, "17640": 40143.0, "17645": 40882.0, "17650": 38654.0, "17655": 41582.0, "17660": 39424.0, "17665": 40044.0, "17670": 42278.0, "17675": 40896.0, "17680": 38072.0, "17685": 40802.0, "17690": 40729.0, "17695": 40809.0, "17700": 39371.0, "17705": 40810.0, "17710": 41575.0, "17715": 42282.0, "17720": 40759.0, "17725": 40820.0, "17730": 39968.0, "17735": 41513.0, "17740": 42275.0, "17745": 39304.0, "17750": 40747.0, "17755": 42349.0, "17760": 38635.0, "17765": 40031.0, "17770": 41497.0, "17775": 40722.0, "17780": 40741.0, "17785": 40749.0, "17790": 42264.0, "17795": 38559.0, "17800": 40112.0, "17805": 40741.0, "17810": 41504.0, "17815": 37773.0, "17820": 40818.0, "17825": 40130.0, "17830": 40082.0, "17835": 39971.0, "17840": 40131.0, "17845": 41490.0, "17850": 39277.0, "17855": 38536.0, "17860": 40098.0, "17865": 39400.0, "17870": 40056.0, "17875": 38509.0, "17880": 40723.0, "17885": 38671.0, "17890": 40795.0, "17895": 39292.0, "17900": 40070.0, "17905": 41507.0, "17910": 40736.0, "17915": 40011.0, "17920": 39266.0, "17925": 41505.0, "17930": 40041.0, "17935": 39963.0, "17940": 40042.0, "17945": 39966.0, "17950": 39993.0, "17955": 39322.0, "17960": 39967.0, "17965": 38603.0, "17970": 41495.0, "17975": 40184.0, "17980": 40805.0, "17985": 40040.0, "17990": 40054.0, "17995": 36516.0, "18000": 40062.0, "18005": 40038.0, "18010": 39211.0, "18015": 38649.0, "18020": 39344.0, "18025": 39337.0, "18030": 40804.0, "18035": 40124.0, "18040": 40198.0, "18045": 39362.0, "18050": 40731.0, "18055": 40042.0, "18060": 40734.0, "18065": 40069.0, "18070": 42271.0, "18075": 37216.0, "18080": 40829.0, "18085": 40112.0, "18090": 41503.0, "18095": 40050.0, "18100": 36499.0, "18105": 40729.0, "18110": 42267.0, "18115": 40739.0, "18120": 38644.0, "18125": 40804.0, "18130": 43032.0, "18135": 40732.0, "18140": 39342.0, "18145": 40036.0, "18150": 40742.0, "18155": 40051.0, "18160": 40823.0, "18165": 41564.0, "18170": 41500.0, "18175": 40744.0, "18180": 39221.0, "18185": 37974.0, "18190": 40739.0, "18195": 39438.0, "18200": 40795.0, "18205": 40806.0, "18210": 37882.0, "18215": 40883.0, "18220": 39357.0, "18225": 39501.0, "18230": 41501.0, "18235": 40034.0, "18240": 41505.0, "18245": 40730.0, "18250": 40132.0, "18255": 41509.0, "18260": 39228.0, "18265": 38558.0, "18270": 38675.0, "18275": 41570.0, "18280": 39281.0, "18285": 40799.0, "18290": 39971.0, "18295": 40910.0, "18300": 39361.0, "18305": 41583.0, "18310": 37811.0, "18315": 38576.0, "18320": 40742.0, "18325": 39366.0, "18330": 39274.0, "18335": 38513.0, "18340": 38451.0, "18345": 40741.0, "18350": 37807.0, "18355": 41502.0, "18360": 38683.0, "18365": 37179.0, "18370": 41568.0, "18375": 39444.0, "18380": 38588.0, "18385": 38437.0, "18390": 40732.0, "18395": 39305.0, "18400": 38800.0, "18405": 40745.0, "18410": 38504.0, "18415": 39364.0, "18420": 41508.0, "18425": 38887.0, "18430": 39274.0, "18435": 39214.0, "18440": 40051.0, "18445": 39468.0, "18450": 39358.0, "18455": 41557.0, "18460": 40045.0, "18465": 38503.0, "18470": 38532.0, "18475": 38575.0, "18480": 41585.0, "18485": 37741.0, "18490": 39291.0, "18495": 39363.0, "18500": 40902.0, "18505": 39357.0, "18510": 39280.0, "18515": 41509.0, "18520": 40813.0, "18525": 39977.0, "18530": 40217.0, "18535": 37392.0, "18540": 39430.0, "18545": 40062.0, "18550": 39981.0, "18555": 39261.0, "18560": 38696.0, "18565": 37325.0, "18570": 40230.0, "18575": 39264.0, "18580": 39367.0, "18585": 39337.0, "18590": 40132.0, "18595": 39974.0, "18600": 40807.0, "18605": 40882.0, "18610": 39231.0, "18615": 40141.0, "18620": 41497.0, "18625": 39287.0, "18630": 39283.0, "18635": 38660.0, "18640": 40069.0, "18645": 40805.0, "18650": 37891.0, "18655": 39349.0, "18660": 37839.0, "18665": 39276.0, "18670": 42261.0, "18675": 41507.0, "18680": 39451.0, "18685": 40063.0, "18690": 41503.0, "18695": 41593.0, "18700": 38504.0, "18705": 42344.0, "18710": 41577.0, "18715": 36541.0, "18720": 39281.0, "18725": 38571.0, "18730": 40731.0, "18735": 41507.0, "18740": 39203.0, "18745": 40732.0, "18750": 41504.0, "18755": 39219.0, "18760": 38574.0, "18765": 39357.0, "18770": 40807.0, "18775": 38670.0, "18780": 38005.0, "18785": 38581.0, "18790": 40040.0, "18795": 38644.0, "18800": 41513.0, "18805": 40816.0, "18810": 40121.0, "18815": 40742.0, "18820": 40796.0, "18825": 36612.0, "18830": 37758.0, "18835": 36010.0, "18840": 38582.0, "18845": 37766.0, "18850": 37233.0, "18855": 38506.0, "18860": 40056.0, "18865": 40047.0, "18870": 35761.0, "18875": 39972.0, "18880": 41500.0, "18885": 38738.0, "18890": 37999.0, "18895": 38500.0, "18900": 38650.0, "18905": 40741.0, "18910": 39223.0, "18915": 39459.0, "18920": 39976.0, "18925": 37968.0, "18930": 41496.0, "18935": 40125.0, "18940": 40748.0, "18945": 40057.0, "18950": 41506.0, "18955": 40732.0, "18960": 41588.0, "18965": 38758.0, "18970": 40825.0, "18975": 40815.0, "18980": 40738.0, "18985": 40108.0, "18990": 39537.0, "18995": 39432.0, "19000": 40045.0, "19005": 40891.0, "19010": 41572.0, "19015": 40735.0, "19020": 42345.0, "19025": 40746.0, "19030": 39351.0, "19035": 38534.0, "19040": 41570.0, "19045": 38655.0, "19050": 39425.0, "19055": 40802.0, "19060": 40793.0, "19065": 40022.0, "19070": 41575.0, "19075": 41506.0, "19080": 39280.0, "19085": 41573.0, "19090": 37773.0, "19095": 39203.0, "19100": 40901.0, "19105": 40055.0, "19110": 41504.0, "19115": 40110.0, "19120": 39270.0, "19125": 38039.0, "19130": 40815.0, "19135": 40742.0, "19140": 39979.0, "19145": 40034.0, "19150": 38656.0, "19155": 40731.0, "19160": 40820.0, "19165": 37963.0, "19170": 40115.0, "19175": 40752.0, "19180": 37156.0, "19185": 39974.0, "19190": 35900.0, "19195": 38509.0, "19200": 39435.0, "19205": 34586.0, "19210": 41515.0, "19215": 40123.0, "19220": 40742.0, "19225": 40141.0, "19230": 40041.0, "19235": 40052.0, "19240": 40044.0, "19245": 40820.0, "19250": 40028.0, "19255": 37922.0, "19260": 37909.0, "19265": 40055.0, "19270": 40830.0, "19275": 39217.0, "19280": 39325.0, "19285": 39979.0, "19290": 40762.0, "19295": 40809.0, "19300": 39270.0, "19305": 39429.0, "19310": 40875.0, "19315": 39376.0, "19320": 40066.0, "19325": 39973.0, "19330": 41498.0, "19335": 40070.0, "19340": 39354.0, "19345": 40806.0, "19350": 39363.0, "19355": 40051.0, "19360": 40198.0, "19365": 41580.0, "19370": 40730.0, "19375": 40045.0, "19380": 40048.0, "19385": 39969.0, "19390": 39299.0, "19395": 40812.0, "19400": 40798.0, "19405": 40886.0, "19410": 40041.0, "19415": 40884.0, "19420": 40831.0, "19425": 39278.0, "19430": 41594.0, "19435": 40729.0, "19440": 41493.0, "19445": 40062.0, "19450": 40147.0, "19455": 41499.0, "19460": 41494.0, "19465": 39332.0, "19470": 39440.0, "19475": 39359.0, "19480": 40048.0, "19485": 40201.0, "19490": 40041.0, "19495": 39979.0, "19500": 36611.0, "19505": 42269.0, "19510": 41600.0, "19515": 39372.0, "19520": 41492.0, "19525": 39973.0, "19530": 39295.0, "19535": 39978.0, "19540": 39289.0, "19545": 38703.0, "19550": 40047.0, "19555": 40022.0, "19560": 39280.0, "19565": 40136.0, "19570": 42347.0, "19575": 39337.0, "19580": 40823.0, "19585": 40044.0, "19590": 40127.0, "19595": 40207.0, "19600": 39266.0, "19605": 40045.0, "19610": 40742.0, "19615": 37824.0, "19620": 39977.0, "19625": 37838.0, "19630": 41592.0, "19635": 40097.0, "19640": 39336.0, "19645": 40060.0, "19650": 40739.0, "19655": 40190.0, "19660": 40034.0, "19665": 40739.0, "19670": 39962.0, "19675": 40046.0, "19680": 41496.0, "19685": 40054.0, "19690": 40841.0, "19695": 40811.0, "19700": 40731.0, "19705": 38653.0, "19710": 40040.0, "19715": 36592.0, "19720": 40038.0, "19725": 40027.0, "19730": 35970.0, "19735": 40034.0, "19740": 39272.0, "19745": 39201.0, "19750": 39379.0, "19755": 40056.0, "19760": 41588.0, "19765": 39337.0, "19770": 40046.0, "19775": 40791.0, "19780": 39977.0, "19785": 40751.0, "19790": 39219.0, "19795": 39271.0, "19800": 39970.0, "19805": 40810.0, "19810": 40750.0, "19815": 39525.0, "19820": 37359.0, "19825": 39290.0, "19830": 41517.0, "19835": 40826.0, "19840": 38434.0, "19845": 39983.0, "19850": 40060.0, "19855": 41556.0, "19860": 41502.0, "19865": 43031.0, "19870": 39349.0, "19875": 40111.0, "19880": 37184.0, "19885": 39223.0, "19890": 40812.0, "19895": 39201.0, "19900": 42275.0, "19905": 40876.0, "19910": 40034.0, "19915": 40138.0, "19920": 39203.0, "19925": 38818.0, "19930": 40866.0, "19935": 40054.0, "19940": 40746.0, "19945": 40806.0, "19950": 39203.0, "19955": 38646.0, "19960": 39274.0, "19965": 42278.0, "19970": 40885.0, "19975": 39973.0, "19980": 40023.0, "19985": 40042.0, "19990": 40820.0, "19995": 38762.0, "20000": 36450.0, "20005": 39431.0, "20010": 41506.0, "20015": 40813.0, "20020": 40835.0, "20025": 40052.0, "20030": 37216.0, "20035": 38594.0, "20040": 40739.0, "20045": 37899.0, "20050": 40877.0, "20055": 39345.0, "20060": 39971.0, "20065": 36644.0, "20070": 40735.0, "20075": 41583.0, "20080": 39285.0, "20085": 40750.0, "20090": 40745.0, "20095": 40809.0, "20100": 40752.0, "20105": 40057.0, "20110": 40056.0, "20115": 40038.0, "20120": 38594.0, "20125": 38750.0, "20130": 39265.0, "20135": 39339.0, "20140": 35273.0, "20145": 40747.0, "20150": 40741.0, "20155": 38673.0, "20160": 38590.0, "20165": 40825.0, "20170": 40029.0, "20175": 40740.0, "20180": 39368.0, "20185": 40831.0, "20190": 39305.0, "20195": 40732.0, "20200": 40042.0, "20205": 39354.0, "20210": 40063.0, "20215": 39296.0, "20220": 39288.0, "20225": 39966.0, "20230": 40128.0, "20235": 39287.0, "20240": 37360.0, "20245": 39374.0, "20250": 37978.0, "20255": 39447.0, "20260": 41510.0, "20265": 40741.0, "20270": 40199.0, "20275": 40797.0, "20280": 40878.0, "20285": 41506.0, "20290": 40817.0, "20295": 41594.0, "20300": 40807.0, "20305": 40906.0, "20310": 41501.0, "20315": 40045.0, "20320": 39973.0, "20325": 39296.0, "20330": 39286.0, "20335": 39260.0, "20340": 39295.0, "20345": 38600.0, "20350": 39424.0, "20355": 37963.0, "20360": 38745.0, "20365": 38081.0, "20370": 39363.0, "20375": 40733.0, "20380": 38762.0, "20385": 40794.0, "20390": 39343.0, "20395": 39287.0, "20400": 39290.0, "20405": 39513.0, "20410": 40116.0, "20415": 39356.0, "20420": 37453.0, "20425": 40101.0, "20430": 40734.0, "20435": 40034.0, "20440": 40824.0, "20445": 40055.0, "20450": 40065.0, "20455": 40809.0, "20460": 40803.0, "20465": 40744.0, "20470": 41557.0, "20475": 40136.0, "20480": 40921.0, "20485": 40121.0, "20490": 39429.0, "20495": 40729.0, "20500": 39390.0, "20505": 39978.0, "20510": 42271.0, "20515": 39278.0, "20520": 40814.0, "20525": 38586.0, "20530": 40047.0, "20535": 40041.0, "20540": 41571.0, "20545": 40066.0, "20550": 40121.0, "20555": 37971.0, "20560": 38727.0, "20565": 40123.0, "20570": 39462.0, "20575": 37295.0, "20580": 39408.0, "20585": 40027.0, "20590": 40743.0, "20595": 42269.0, "20600": 40814.0, "20605": 41568.0, "20610": 40031.0, "20615": 41503.0, "20620": 40036.0, "20625": 38748.0, "20630": 39516.0, "20635": 39268.0, "20640": 39334.0, "20645": 39377.0, "20650": 40124.0, "20655": 40993.0, "20660": 39368.0, "20665": 41595.0, "20670": 36410.0, "20675": 38030.0, "20680": 41573.0, "20685": 41594.0, "20690": 41555.0, "20695": 40127.0, "20700": 39330.0, "20705": 41579.0, "20710": 40111.0, "20715": 39287.0, "20720": 39962.0, "20725": 40826.0, "20730": 40812.0, "20735": 39287.0, "20740": 40802.0, "20745": 40823.0, "20750": 40044.0, "20755": 36984.0, "20760": 40043.0, "20765": 39382.0, "20770": 37045.0, "20775": 40790.0, "20780": 40120.0, "20785": 40746.0, "20790": 40901.0, "20795": 38670.0, "20800": 42280.0, "20805": 40740.0, "20810": 36604.0, "20815": 40735.0, "20820": 38598.0, "20825": 40751.0, "20830": 40809.0, "20835": 40053.0, "20840": 40038.0, "20845": 39970.0, "20850": 38641.0, "20855": 40055.0, "20860": 39413.0, "20865": 40802.0, "20870": 40829.0, "20875": 37964.0, "20880": 39275.0, "20885": 38447.0, "20890": 40807.0, "20895": 37847.0, "20900": 39276.0, "20905": 40196.0, "20910": 39424.0, "20915": 41498.0, "20920": 40058.0, "20925": 38518.0, "20930": 37907.0, "20935": 41501.0, "20940": 39961.0, "20945": 41495.0, "20950": 41590.0, "20955": 40138.0, "20960": 39978.0, "20965": 41502.0, "20970": 39286.0, "20975": 40192.0, "20980": 38643.0, "20985": 40805.0, "20990": 42279.0, "20995": 40822.0, "21000": 39974.0, "21005": 41580.0, "21010": 40061.0, "21015": 37196.0, "21020": 39283.0, "21025": 39283.0, "21030": 40826.0, "21035": 40738.0, "21040": 40727.0, "21045": 38526.0, "21050": 37359.0, "21055": 40056.0, "21060": 39439.0, "21065": 40730.0, "21070": 38610.0, "21075": 41508.0, "21080": 40032.0, "21085": 40883.0, "21090": 40889.0, "21095": 38739.0, "21100": 40820.0, "21105": 40743.0, "21110": 40813.0, "21115": 39433.0, "21120": 41520.0, "21125": 39409.0, "21130": 40046.0, "21135": 38081.0, "21140": 40739.0, "21145": 41510.0, "21150": 40050.0, "21155": 40134.0, "21160": 40120.0, "21165": 41568.0, "21170": 39995.0, "21175": 40746.0, "21180": 41563.0, "21185": 39347.0, "21190": 40066.0, "21195": 39293.0, "21200": 39982.0, "21205": 39978.0, "21210": 39279.0, "21215": 43036.0, "21220": 38663.0, "21225": 40733.0, "21230": 39271.0, "21235": 41512.0, "21240": 40111.0, "21245": 39360.0, "21250": 37207.0, "21255": 40814.0, "21260": 40118.0, "21265": 41596.0, "21270": 42276.0, "21275": 38599.0, "21280": 38455.0, "21285": 39336.0, "21290": 40884.0, "21295": 41503.0, "21300": 39282.0, "21305": 39969.0, "21310": 42275.0, "21315": 40819.0, "21320": 39360.0, "21325": 40801.0, "21330": 42278.0, "21335": 38033.0, "21340": 40056.0, "21345": 38716.0, "21350": 40111.0, "21355": 37075.0, "21360": 40748.0, "21365": 40807.0, "21370": 41573.0, "21375": 40813.0, "21380": 41583.0, "21385": 38530.0, "21390": 40789.0, "21395": 39332.0, "21400": 37834.0, "21405": 40746.0, "21410": 38516.0, "21415": 40029.0, "21420": 39359.0, "21425": 40054.0, "21430": 38730.0, "21435": 37747.0, "21440": 40071.0, "21445": 40878.0, "21450": 38490.0, "21455": 40039.0, "21460": 39264.0, "21465": 37997.0, "21470": 39981.0, "21475": 40141.0, "21480": 41570.0, "21485": 41505.0, "21490": 40110.0, "21495": 40058.0, "21500": 40746.0, "21505": 39434.0, "21510": 40058.0, "21515": 39300.0, "21520": 39980.0, "21525": 39981.0, "21530": 39975.0, "21535": 41508.0, "21540": 39976.0, "21545": 39367.0, "21550": 39212.0, "21555": 40050.0, "21560": 39426.0, "21565": 41496.0, "21570": 39295.0, "21575": 40738.0, "21580": 40744.0, "21585": 40057.0, "21590": 40758.0, "21595": 39974.0, "21600": 40107.0, "21605": 39274.0, "21610": 40890.0, "21615": 39968.0, "21620": 38598.0, "21625": 39210.0, "21630": 38747.0, "21635": 41501.0, "21640": 40731.0, "21645": 38669.0, "21650": 41511.0, "21655": 40063.0, "21660": 40788.0, "21665": 39206.0, "21670": 40888.0, "21675": 39345.0, "21680": 38018.0, "21685": 37289.0, "21690": 40134.0, "21695": 37749.0, "21700": 39981.0, "21705": 40059.0, "21710": 40104.0, "21715": 39990.0, "21720": 40889.0, "21725": 40810.0, "21730": 40891.0, "21735": 38830.0, "21740": 39467.0, "21745": 41650.0, "21750": 40789.0, "21755": 41576.0, "21760": 38736.0, "21765": 39345.0, "21770": 38663.0, "21775": 40736.0, "21780": 41487.0, "21785": 39538.0, "21790": 40029.0, "21795": 41502.0, "21800": 39212.0, "21805": 40878.0, "21810": 39380.0, "21815": 39416.0, "21820": 39290.0, "21825": 40064.0, "21830": 38746.0, "21835": 39289.0, "21840": 40052.0, "21845": 40821.0, "21850": 36292.0, "21855": 41585.0, "21860": 40747.0, "21865": 38654.0, "21870": 40797.0, "21875": 40738.0, "21880": 39205.0, "21885": 39212.0, "21890": 38758.0, "21895": 40134.0, "21900": 39461.0, "21905": 41503.0, "21910": 41504.0, "21915": 40818.0, "21920": 40789.0, "21925": 38663.0, "21930": 40750.0, "21935": 37903.0, "21940": 38739.0, "21945": 36577.0, "21950": 40739.0, "21955": 40807.0, "21960": 39435.0, "21965": 39973.0, "21970": 39965.0, "21975": 40742.0, "21980": 36705.0, "21985": 40817.0, "21990": 40808.0, "21995": 40041.0, "22000": 40803.0, "22005": 40132.0, "22010": 39430.0, "22015": 41500.0, "22020": 40805.0, "22025": 39966.0, "22030": 40110.0, "22035": 37897.0, "22040": 40199.0, "22045": 39365.0, "22050": 41511.0, "22055": 39357.0, "22060": 38594.0, "22065": 42282.0, "22070": 41501.0, "22075": 40113.0, "22080": 38783.0, "22085": 40038.0, "22090": 40118.0, "22095": 40109.0, "22100": 41504.0, "22105": 40046.0, "22110": 39441.0, "22115": 42265.0, "22120": 41511.0, "22125": 39202.0, "22130": 40891.0, "22135": 39971.0, "22140": 41500.0, "22145": 40054.0, "22150": 38549.0, "22155": 40919.0, "22160": 40797.0, "22165": 40792.0, "22170": 41500.0, "22175": 40043.0, "22180": 39346.0, "22185": 40126.0, "22190": 40743.0, "22195": 41499.0, "22200": 41590.0, "22205": 40128.0, "22210": 41499.0, "22215": 41579.0, "22220": 39213.0, "22225": 40118.0, "22230": 40059.0, "22235": 40158.0, "22240": 41503.0, "22245": 41498.0, "22250": 39978.0, "22255": 40815.0, "22260": 39386.0, "22265": 40045.0, "22270": 39419.0, "22275": 40120.0, "22280": 38835.0, "22285": 40041.0, "22290": 37195.0, "22295": 41577.0, "22300": 40812.0, "22305": 39511.0, "22310": 36604.0, "22315": 37829.0, "22320": 37192.0, "22325": 40737.0, "22330": 38440.0, "22335": 39961.0, "22340": 40812.0, "22345": 41505.0, "22350": 40811.0, "22355": 40064.0, "22360": 38685.0, "22365": 38580.0, "22370": 40135.0, "22375": 40072.0, "22380": 40806.0, "22385": 40126.0, "22390": 39286.0, "22395": 39325.0, "22400": 40814.0, "22405": 39271.0, "22410": 39973.0, "22415": 40809.0, "22420": 41517.0, "22425": 38527.0, "22430": 41573.0, "22435": 39982.0, "22440": 40033.0, "22445": 41507.0, "22450": 39284.0, "22455": 41518.0, "22460": 35832.0, "22465": 39287.0, "22470": 40056.0, "22475": 39439.0, "22480": 41578.0, "22485": 40125.0, "22490": 38732.0, "22495": 42347.0, "22500": 40117.0, "22505": 40210.0, "22510": 41511.0, "22515": 40795.0, "22520": 40801.0, "22525": 40105.0, "22530": 41585.0, "22535": 40816.0, "22540": 40738.0, "22545": 37762.0, "22550": 40807.0, "22555": 39271.0, "22560": 40042.0, "22565": 41506.0, "22570": 42262.0, "22575": 42272.0, "22580": 40736.0, "22585": 40840.0, "22590": 39299.0, "22595": 40030.0, "22600": 40739.0, "22605": 38507.0, "22610": 39351.0, "22615": 36619.0, "22620": 41572.0, "22625": 40743.0, "22630": 41499.0, "22635": 40228.0, "22640": 37897.0, "22645": 40822.0, "22650": 40061.0, "22655": 40050.0, "22660": 40818.0, "22665": 40824.0, "22670": 41565.0, "22675": 38716.0, "22680": 37819.0, "22685": 40044.0, "22690": 40743.0, "22695": 40210.0, "22700": 41497.0, "22705": 41509.0, "22710": 40748.0, "22715": 41596.0, "22720": 39203.0, "22725": 38581.0, "22730": 39278.0, "22735": 40813.0, "22740": 38751.0, "22745": 38662.0, "22750": 40890.0, "22755": 38519.0, "22760": 40036.0, "22765": 40115.0, "22770": 40043.0, "22775": 40741.0, "22780": 40906.0, "22785": 40031.0, "22790": 37826.0, "22795": 39495.0, "22800": 40049.0, "22805": 41508.0, "22810": 39419.0, "22815": 37203.0, "22820": 40114.0, "22825": 40041.0, "22830": 40863.0, "22835": 42260.0, "22840": 38658.0, "22845": 40046.0, "22850": 40195.0, "22855": 40810.0, "22860": 40863.0, "22865": 40734.0, "22870": 41514.0, "22875": 39967.0, "22880": 39506.0, "22885": 41512.0, "22890": 39212.0, "22895": 40839.0, "22900": 40811.0, "22905": 40827.0, "22910": 39357.0, "22915": 37265.0, "22920": 40039.0, "22925": 39339.0, "22930": 39439.0, "22935": 39279.0, "22940": 40811.0, "22945": 40034.0, "22950": 38595.0, "22955": 40902.0, "22960": 41513.0, "22965": 39349.0, "22970": 41512.0, "22975": 40726.0, "22980": 38744.0, "22985": 38653.0, "22990": 38743.0, "22995": 38599.0, "23000": 40134.0, "23005": 41497.0, "23010": 40133.0, "23015": 39964.0, "23020": 42257.0, "23025": 41500.0, "23030": 42275.0, "23035": 38752.0, "23040": 40130.0, "23045": 40797.0, "23050": 41575.0, "23055": 36278.0, "23060": 40050.0, "23065": 40039.0, "23070": 40061.0, "23075": 39300.0, "23080": 40050.0, "23085": 39979.0, "23090": 40748.0, "23095": 39193.0, "23100": 39290.0, "23105": 40724.0, "23110": 40200.0, "23115": 39960.0, "23120": 37772.0, "23125": 40808.0, "23130": 39985.0, "23135": 42269.0, "23140": 42273.0, "23145": 41570.0, "23150": 40818.0, "23155": 40886.0, "23160": 39971.0, "23165": 39361.0, "23170": 40807.0, "23175": 40815.0, "23180": 38595.0, "23185": 39965.0, "23190": 40808.0, "23195": 39277.0, "23200": 40113.0, "23205": 39979.0, "23210": 38817.0, "23215": 40051.0, "23220": 40898.0, "23225": 40811.0, "23230": 40895.0, "23235": 38666.0, "23240": 40050.0, "23245": 41499.0, "23250": 38687.0, "23255": 37208.0, "23260": 37970.0, "23265": 38665.0, "23270": 40117.0, "23275": 40186.0, "23280": 39979.0, "23285": 40811.0, "23290": 40738.0, "23295": 40809.0, "23300": 40815.0, "23305": 37760.0, "23310": 39441.0, "23315": 38704.0, "23320": 40046.0, "23325": 40051.0, "23330": 39215.0, "23335": 41506.0, "23340": 40037.0, "23345": 41502.0, "23350": 40894.0, "23355": 41507.0, "23360": 40140.0, "23365": 39969.0, "23370": 40057.0, "23375": 40062.0, "23380": 41498.0, "23385": 40744.0, "23390": 39970.0, "23395": 40059.0, "23400": 39205.0, "23405": 40192.0, "23410": 37275.0, "23415": 40801.0, "23420": 40207.0, "23425": 42273.0, "23430": 41505.0, "23435": 40130.0, "23440": 40036.0, "23445": 39279.0, "23450": 40730.0, "23455": 39282.0, "23460": 37917.0, "23465": 40742.0, "23470": 41576.0, "23475": 40043.0, "23480": 39505.0, "23485": 39339.0, "23490": 39477.0, "23495": 40741.0, "23500": 40116.0, "23505": 39974.0, "23510": 40052.0, "23515": 39309.0, "23520": 42260.0, "23525": 40747.0, "23530": 40067.0, "23535": 40880.0, "23540": 40818.0, "23545": 38010.0, "23550": 40736.0, "23555": 40742.0, "23560": 39353.0, "23565": 40131.0, "23570": 40073.0, "23575": 40033.0, "23580": 40048.0, "23585": 40049.0, "23590": 38586.0, "23595": 40115.0, "23600": 38780.0, "23605": 38613.0, "23610": 41508.0, "23615": 38634.0, "23620": 37906.0, "23625": 38469.0, "23630": 39451.0, "23635": 40736.0, "23640": 37767.0, "23645": 40122.0, "23650": 42271.0, "23655": 40038.0, "23660": 39266.0, "23665": 40816.0, "23670": 40052.0, "23675": 40032.0, "23680": 39361.0, "23685": 40817.0, "23690": 39362.0, "23695": 40736.0, "23700": 39373.0, "23705": 39269.0, "23710": 40049.0, "23715": 37232.0, "23720": 40749.0, "23725": 41507.0, "23730": 39434.0, "23735": 39286.0, "23740": 39327.0, "23745": 40044.0, "23750": 39974.0, "23755": 39393.0, "23760": 41502.0, "23765": 40733.0, "23770": 39976.0, "23775": 39960.0, "23780": 39431.0, "23785": 37769.0, "23790": 41593.0, "23795": 40117.0, "23800": 40040.0, "23805": 40736.0, "23810": 40217.0, "23815": 37982.0, "23820": 41587.0, "23825": 39293.0, "23830": 40129.0, "23835": 38637.0, "23840": 39976.0, "23845": 40746.0, "23850": 39298.0, "23855": 40136.0, "23860": 38591.0, "23865": 39362.0, "23870": 40040.0, "23875": 40066.0, "23880": 40818.0, "23885": 37303.0, "23890": 39289.0, "23895": 40826.0, "23900": 41568.0, "23905": 40207.0, "23910": 40061.0, "23915": 40125.0, "23920": 38686.0, "23925": 39284.0, "23930": 40747.0, "23935": 39512.0, "23940": 39275.0, "23945": 38588.0, "23950": 40040.0, "23955": 40065.0, "23960": 40886.0, "23965": 41503.0, "23970": 40893.0, "23975": 39279.0, "23980": 40890.0, "23985": 41567.0, "23990": 40896.0, "23995": 37997.0, "24000": 39230.0, "24005": 39347.0, "24010": 39345.0, "24015": 39262.0, "24020": 41569.0, "24025": 41505.0, "24030": 40746.0, "24035": 40044.0, "24040": 41507.0, "24045": 39459.0, "24050": 40814.0, "24055": 40749.0, "24060": 41521.0, "24065": 40829.0, "24070": 40815.0, "24075": 37834.0, "24080": 41501.0, "24085": 40735.0, "24090": 35982.0, "24095": 40815.0, "24100": 41501.0, "24105": 40054.0, "24110": 39412.0, "24115": 40804.0, "24120": 40803.0, "24125": 39348.0, "24130": 40071.0, "24135": 42289.0, "24140": 38605.0, "24145": 40814.0, "24150": 39273.0, "24155": 40724.0, "24160": 37835.0, "24165": 40045.0, "24170": 38748.0, "24175": 38683.0, "24180": 38665.0, "24185": 40741.0, "24190": 41582.0, "24195": 39283.0, "24200": 41568.0, "24205": 41580.0, "24210": 40746.0, "24215": 42271.0, "24220": 40130.0, "24225": 38513.0, "24230": 41494.0, "24235": 39281.0, "24240": 40732.0, "24245": 40814.0, "24250": 40063.0, "24255": 40145.0, "24260": 40738.0, "24265": 38463.0, "24270": 39297.0, "24275": 40748.0, "24280": 37354.0, "24285": 41494.0, "24290": 39349.0, "24295": 38712.0, "24300": 39434.0, "24305": 39267.0, "24310": 40805.0, "24315": 41495.0, "24320": 40736.0, "24325": 40738.0, "24330": 39311.0, "24335": 39274.0, "24340": 39281.0, "24345": 37832.0, "24350": 40116.0, "24355": 39348.0, "24360": 40742.0, "24365": 41581.0, "24370": 38018.0, "24375": 40887.0, "24380": 40810.0, "24385": 41499.0, "24390": 38689.0, "24395": 38530.0, "24400": 40799.0, "24405": 40891.0, "24410": 40107.0, "24415": 39429.0, "24420": 41515.0, "24425": 41507.0, "24430": 40113.0, "24435": 39303.0, "24440": 40734.0, "24445": 39273.0, "24450": 39334.0, "24455": 40808.0, "24460": 40805.0, "24465": 40127.0, "24470": 41496.0, "24475": 38790.0, "24480": 38671.0, "24485": 41503.0, "24490": 41502.0, "24495": 38679.0, "24500": 41571.0, "24505": 39331.0, "24510": 39981.0, "24515": 39287.0, "24520": 39967.0, "24525": 39354.0, "24530": 40016.0, "24535": 40824.0, "24540": 39367.0, "24545": 39208.0, "24550": 40025.0, "24555": 38454.0, "24560": 39351.0, "24565": 41566.0, "24570": 40036.0, "24575": 36727.0, "24580": 41591.0, "24585": 40037.0, "24590": 39281.0, "24595": 40060.0, "24600": 41495.0, "24605": 37755.0, "24610": 39458.0, "24615": 37108.0, "24620": 40189.0, "24625": 40063.0, "24630": 41512.0, "24635": 39479.0, "24640": 38538.0, "24645": 40813.0, "24650": 40109.0, "24655": 41507.0, "24660": 38571.0, "24665": 41500.0, "24670": 40734.0, "24675": 41577.0, "24680": 40026.0, "24685": 40733.0, "24690": 40869.0, "24695": 37960.0, "24700": 39979.0, "24705": 39357.0, "24710": 39982.0, "24715": 40746.0, "24720": 39204.0, "24725": 40149.0, "24730": 40801.0, "24735": 39272.0, "24740": 39278.0, "24745": 40750.0, "24750": 39208.0, "24755": 41502.0, "24760": 40758.0, "24765": 39357.0, "24770": 40816.0, "24775": 38656.0, "24780": 40876.0, "24785": 40111.0, "24790": 39962.0, "24795": 39363.0, "24800": 40069.0, "24805": 38746.0, "24810": 40815.0, "24815": 40112.0, "24820": 40047.0, "24825": 40044.0, "24830": 40820.0, "24835": 39373.0, "24840": 37072.0, "24845": 40190.0, "24850": 40030.0, "24855": 40069.0, "24860": 39332.0, "24865": 39368.0, "24870": 41496.0, "24875": 40090.0, "24880": 37211.0, "24885": 40730.0, "24890": 40840.0, "24895": 41570.0, "24900": 38632.0, "24905": 40737.0, "24910": 39979.0, "24915": 39967.0, "24920": 40737.0, "24925": 40028.0, "24930": 41494.0, "24935": 38051.0, "24940": 38563.0, "24945": 37985.0, "24950": 37268.0, "24955": 39478.0, "24960": 39487.0, "24965": 38588.0, "24970": 38575.0, "24975": 40743.0, "24980": 41522.0, "24985": 41501.0, "24990": 37835.0, "24995": 41512.0, "25000": 40875.0, "25005": 41505.0, "25010": 40056.0, "25015": 40048.0, "25020": 40829.0, "25025": 39335.0, "25030": 40909.0, "25035": 37112.0, "25040": 41640.0, "25045": 39344.0, "25050": 37958.0, "25055": 36679.0, "25060": 39375.0, "25065": 39269.0, "25070": 40135.0, "25075": 42271.0, "25080": 40812.0, "25085": 36541.0, "25090": 41588.0, "25095": 38043.0, "25100": 41506.0, "25105": 40817.0, "25110": 40784.0, "25115": 40742.0, "25120": 40815.0, "25125": 38589.0, "25130": 40059.0, "25135": 40742.0, "25140": 38651.0, "25145": 40808.0, "25150": 39294.0, "25155": 37768.0, "25160": 40101.0, "25165": 40739.0, "25170": 40737.0, "25175": 36557.0, "25180": 38681.0, "25185": 40203.0, "25190": 39975.0, "25195": 40099.0, "25200": 42259.0, "25205": 39968.0, "25210": 40867.0, "25215": 39419.0, "25220": 40887.0, "25225": 39444.0, "25230": 40884.0, "25235": 37905.0, "25240": 38595.0, "25245": 40802.0, "25250": 39361.0, "25255": 38654.0, "25260": 40794.0, "25265": 38668.0, "25270": 39975.0, "25275": 41504.0, "25280": 41583.0, "25285": 37763.0, "25290": 40737.0, "25295": 40746.0, "25300": 41497.0, "25305": 40809.0, "25310": 40828.0, "25315": 40816.0, "25320": 39978.0, "25325": 40734.0, "25330": 38663.0, "25335": 39376.0, "25340": 36470.0, "25345": 41512.0, "25350": 42350.0, "25355": 40732.0, "25360": 38751.0, "25365": 38789.0, "25370": 40734.0, "25375": 39297.0, "25380": 39969.0, "25385": 38681.0, "25390": 38607.0, "25395": 39346.0, "25400": 40872.0, "25405": 40021.0, "25410": 38008.0, "25415": 40064.0, "25420": 40122.0, "25425": 39344.0, "25430": 39285.0, "25435": 39429.0, "25440": 40102.0, "25445": 42275.0, "25450": 38688.0, "25455": 42262.0, "25460": 40827.0, "25465": 41582.0, "25470": 41504.0, "25475": 39215.0, "25480": 38443.0, "25485": 40113.0, "25490": 40745.0, "25495": 39421.0, "25500": 39448.0, "25505": 39439.0, "25510": 37144.0, "25515": 40051.0, "25520": 41509.0, "25525": 40039.0, "25530": 40032.0, "25535": 40836.0, "25540": 41587.0, "25545": 40826.0, "25550": 40039.0, "25555": 40146.0, "25560": 39968.0, "25565": 40050.0, "25570": 39981.0, "25575": 37200.0, "25580": 40746.0, "25585": 41499.0, "25590": 37899.0, "25595": 40735.0, "25600": 41492.0, "25605": 39275.0, "25610": 41508.0, "25615": 39288.0, "25620": 41666.0, "25625": 38767.0, "25630": 40819.0, "25635": 37332.0, "25640": 40145.0, "25645": 40903.0, "25650": 40792.0, "25655": 40730.0, "25660": 38530.0, "25665": 40806.0, "25670": 40744.0, "25675": 37866.0, "25680": 39355.0, "25685": 41579.0, "25690": 40732.0, "25695": 39357.0, "25700": 40053.0, "25705": 40802.0, "25710": 38518.0, "25715": 40192.0, "25720": 38745.0, "25725": 40816.0, "25730": 40848.0, "25735": 40112.0, "25740": 40055.0, "25745": 40056.0, "25750": 38681.0, "25755": 41578.0, "25760": 40963.0, "25765": 39424.0, "25770": 39270.0, "25775": 40201.0, "25780": 40798.0, "25785": 40742.0, "25790": 42263.0, "25795": 40052.0, "25800": 41510.0, "25805": 40746.0, "25810": 42268.0, "25815": 42268.0, "25820": 40046.0, "25825": 40055.0, "25830": 39988.0, "25835": 38740.0, "25840": 40740.0, "25845": 40057.0, "25850": 38628.0, "25855": 39279.0, "25860": 37008.0, "25865": 38598.0, "25870": 39966.0, "25875": 39353.0, "25880": 39365.0, "25885": 37886.0, "25890": 40059.0, "25895": 37899.0, "25900": 39272.0, "25905": 40743.0, "25910": 40055.0, "25915": 40113.0, "25920": 39369.0, "25925": 40835.0, "25930": 39264.0, "25935": 40025.0, "25940": 39349.0, "25945": 37886.0, "25950": 41583.0, "25955": 39359.0, "25960": 40798.0, "25965": 40817.0, "25970": 40818.0, "25975": 40030.0, "25980": 41493.0, "25985": 40088.0, "25990": 40729.0, "25995": 39379.0, "26000": 41507.0, "26005": 40041.0, "26010": 40815.0, "26015": 40732.0, "26020": 39205.0, "26025": 39281.0, "26030": 40050.0, "26035": 39300.0, "26040": 40743.0, "26045": 40744.0, "26050": 41517.0, "26055": 40823.0, "26060": 40911.0, "26065": 40805.0, "26070": 38761.0, "26075": 39285.0, "26080": 37332.0, "26085": 39976.0, "26090": 40055.0, "26095": 36363.0, "26100": 39502.0, "26105": 40811.0, "26110": 37846.0, "26115": 40042.0, "26120": 40801.0, "26125": 40057.0, "26130": 39284.0, "26135": 41583.0, "26140": 40042.0, "26145": 39366.0, "26150": 39967.0, "26155": 37834.0, "26160": 40060.0, "26165": 38733.0, "26170": 39343.0, "26175": 39988.0, "26180": 39453.0, "26185": 37127.0, "26190": 39328.0, "26195": 40829.0, "26200": 40201.0, "26205": 40892.0, "26210": 38557.0, "26215": 42281.0, "26220": 39457.0, "26225": 38587.0, "26230": 37826.0, "26235": 40043.0, "26240": 40043.0, "26245": 39967.0, "26250": 41505.0, "26255": 40137.0, "26260": 41575.0, "26265": 40046.0, "26270": 39288.0, "26275": 40135.0, "26280": 38582.0, "26285": 38582.0, "26290": 37161.0, "26295": 41502.0, "26300": 41571.0, "26305": 37138.0, "26310": 39204.0, "26315": 40068.0, "26320": 40735.0, "26325": 39358.0, "26330": 40901.0, "26335": 39359.0, "26340": 40029.0, "26345": 40737.0, "26350": 41655.0, "26355": 42276.0, "26360": 39222.0, "26365": 41504.0, "26370": 39360.0, "26375": 38745.0, "26380": 40035.0, "26385": 40049.0, "26390": 40758.0, "26395": 40069.0, "26400": 41567.0, "26405": 37932.0, "26410": 41499.0, "26415": 39972.0, "26420": 40800.0, "26425": 42263.0, "26430": 40074.0, "26435": 39964.0, "26440": 38037.0, "26445": 38662.0, "26450": 40804.0, "26455": 39974.0, "26460": 40895.0, "26465": 40802.0, "26470": 40744.0, "26475": 41504.0, "26480": 41507.0, "26485": 38603.0, "26490": 39986.0, "26495": 39271.0, "26500": 40033.0, "26505": 39980.0, "26510": 39357.0, "26515": 40212.0, "26520": 40054.0, "26525": 37352.0, "26530": 40732.0, "26535": 39963.0, "26540": 40728.0, "26545": 39367.0, "26550": 40824.0, "26555": 37890.0, "26560": 38729.0, "26565": 40062.0, "26570": 38663.0, "26575": 41586.0, "26580": 38660.0, "26585": 39960.0, "26590": 38440.0, "26595": 40050.0, "26600": 37211.0, "26605": 40821.0, "26610": 40071.0, "26615": 40103.0, "26620": 41492.0, "26625": 40734.0, "26630": 41589.0, "26635": 37997.0, "26640": 40735.0, "26645": 41573.0, "26650": 39280.0, "26655": 40889.0, "26660": 41587.0, "26665": 40003.0, "26670": 41502.0, "26675": 39429.0, "26680": 40045.0, "26685": 41513.0, "26690": 40038.0, "26695": 40043.0, "26700": 40040.0, "26705": 40817.0, "26710": 40809.0, "26715": 38797.0, "26720": 40832.0, "26725": 40742.0, "26730": 40054.0, "26735": 38523.0, "26740": 39523.0, "26745": 40073.0, "26750": 40733.0, "26755": 37965.0, "26760": 40115.0, "26765": 41490.0, "26770": 40813.0, "26775": 40060.0, "26780": 39435.0, "26785": 40878.0, "26790": 39269.0, "26795": 40726.0, "26800": 38665.0, "26805": 39332.0, "26810": 40887.0, "26815": 40735.0, "26820": 41573.0, "26825": 39970.0, "26830": 42287.0, "26835": 40050.0, "26840": 41583.0, "26845": 39958.0, "26850": 41507.0, "26855": 40892.0, "26860": 38597.0, "26865": 40820.0, "26870": 41580.0, "26875": 39206.0, "26880": 37338.0, "26885": 38646.0, "26890": 40135.0, "26895": 40753.0, "26900": 36355.0, "26905": 42264.0, "26910": 40052.0, "26915": 40119.0, "26920": 40030.0, "26925": 40044.0, "26930": 39351.0, "26935": 40058.0, "26940": 39342.0, "26945": 37917.0, "26950": 40058.0, "26955": 39454.0, "26960": 40038.0, "26965": 38828.0, "26970": 40041.0, "26975": 39983.0, "26980": 39351.0, "26985": 40051.0, "26990": 40831.0, "26995": 40745.0, "27000": 41502.0, "27005": 41587.0, "27010": 36499.0, "27015": 37131.0, "27020": 40808.0, "27025": 40817.0, "27030": 40043.0, "27035": 39381.0, "27040": 40892.0, "27045": 40796.0, "27050": 38658.0, "27055": 40131.0, "27060": 42271.0, "27065": 37968.0, "27070": 40831.0, "27075": 40819.0, "27080": 40823.0, "27085": 39964.0, "27090": 40037.0, "27095": 41566.0, "27100": 39212.0, "27105": 40040.0, "27110": 37902.0, "27115": 40038.0, "27120": 39267.0, "27125": 39362.0, "27130": 39358.0, "27135": 42272.0, "27140": 39983.0, "27145": 41579.0, "27150": 38604.0, "27155": 41502.0, "27160": 39302.0, "27165": 40126.0, "27170": 40896.0, "27175": 39975.0, "27180": 40125.0, "27185": 38021.0, "27190": 40739.0, "27195": 39972.0, "27200": 40732.0, "27205": 40814.0, "27210": 39970.0, "27215": 37468.0, "27220": 39279.0, "27225": 40046.0, "27230": 40797.0, "27235": 37193.0, "27240": 38541.0, "27245": 40027.0, "27250": 40053.0, "27255": 40746.0, "27260": 39210.0, "27265": 41502.0, "27270": 35231.0, "27275": 40054.0, "27280": 39341.0, "27285": 41499.0, "27290": 39284.0, "27295": 37823.0, "27300": 38568.0, "27305": 40826.0, "27310": 40819.0, "27315": 41569.0, "27320": 41508.0, "27325": 39962.0, "27330": 40120.0, "27335": 40728.0, "27340": 39270.0, "27345": 40916.0, "27350": 40739.0, "27355": 40900.0, "27360": 39961.0, "27365": 42275.0, "27370": 40745.0, "27375": 40822.0, "27380": 40744.0, "27385": 39344.0, "27390": 40814.0, "27395": 42266.0, "27400": 37877.0, "27405": 40878.0, "27410": 39492.0, "27415": 38523.0, "27420": 37313.0, "27425": 40123.0, "27430": 37874.0, "27435": 40040.0, "27440": 41574.0, "27445": 40128.0, "27450": 40047.0, "27455": 38527.0, "27460": 41497.0, "27465": 40741.0, "27470": 40816.0, "27475": 40061.0, "27480": 40040.0, "27485": 40048.0, "27490": 39500.0, "27495": 40729.0, "27500": 40830.0, "27505": 40117.0, "27510": 39386.0, "27515": 39970.0, "27520": 40816.0, "27525": 40751.0, "27530": 39367.0, "27535": 40747.0, "27540": 39281.0, "27545": 40887.0, "27550": 39392.0, "27555": 39273.0, "27560": 40736.0, "27565": 41507.0, "27570": 39376.0, "27575": 41491.0, "27580": 40745.0, "27585": 40107.0, "27590": 40744.0, "27595": 38579.0, "27600": 39282.0, "27605": 41599.0, "27610": 38043.0, "27615": 39976.0, "27620": 38511.0, "27625": 40088.0, "27630": 38627.0, "27635": 37755.0, "27640": 41585.0, "27645": 40126.0, "27650": 39968.0, "27655": 40051.0, "27660": 39225.0, "27665": 40099.0, "27670": 41504.0, "27675": 41574.0, "27680": 39372.0, "27685": 38664.0, "27690": 38670.0, "27695": 40058.0, "27700": 39975.0, "27705": 39463.0, "27710": 38727.0, "27715": 41571.0, "27720": 40822.0, "27725": 41503.0, "27730": 39268.0, "27735": 39448.0, "27740": 41582.0, "27745": 40052.0, "27750": 41569.0, "27755": 39280.0, "27760": 40752.0, "27765": 41582.0, "27770": 40807.0, "27775": 40039.0, "27780": 41582.0, "27785": 38719.0, "27790": 39297.0, "27795": 38585.0, "27800": 40830.0, "27805": 39447.0, "27810": 39990.0, "27815": 40056.0, "27820": 40049.0, "27825": 42266.0, "27830": 39220.0, "27835": 40120.0, "27840": 41499.0, "27845": 37281.0, "27850": 40803.0, "27855": 40746.0, "27860": 39205.0, "27865": 39971.0, "27870": 37947.0, "27875": 41580.0, "27880": 39343.0, "27885": 42265.0, "27890": 41507.0, "27895": 41505.0, "27900": 39368.0, "27905": 41599.0, "27910": 39358.0, "27915": 40814.0, "27920": 39997.0, "27925": 40126.0, "27930": 40094.0, "27935": 39959.0, "27940": 37764.0, "27945": 40800.0, "27950": 41585.0, "27955": 39492.0, "27960": 41514.0, "27965": 40080.0, "27970": 40802.0, "27975": 40033.0, "27980": 39408.0, "27985": 37998.0, "27990": 40732.0, "27995": 41520.0, "28000": 40726.0, "28005": 40113.0, "28010": 41580.0, "28015": 40119.0, "28020": 40738.0, "28025": 37929.0, "28030": 41501.0, "28035": 40746.0, "28040": 37965.0, "28045": 39352.0, "28050": 40801.0, "28055": 38684.0, "28060": 40836.0, "28065": 37043.0, "28070": 40122.0, "28075": 39415.0, "28080": 39391.0, "28085": 40738.0, "28090": 38441.0, "28095": 37700.0, "28100": 40741.0, "28105": 41509.0, "28110": 41597.0, "28115": 39364.0, "28120": 38628.0, "28125": 40130.0, "28130": 39968.0, "28135": 36456.0, "28140": 41512.0, "28145": 36509.0, "28150": 38609.0, "28155": 37201.0, "28160": 42266.0, "28165": 40058.0, "28170": 40824.0, "28175": 40122.0, "28180": 40817.0, "28185": 40738.0, "28190": 40741.0, "28195": 39431.0, "28200": 40817.0, "28205": 39979.0, "28210": 38673.0, "28215": 38673.0, "28220": 39517.0, "28225": 41505.0, "28230": 39352.0, "28235": 42271.0, "28240": 40729.0, "28245": 39221.0, "28250": 40042.0, "28255": 39291.0, "28260": 39982.0, "28265": 40754.0, "28270": 40271.0, "28275": 40808.0, "28280": 40724.0, "28285": 39336.0, "28290": 39295.0, "28295": 40738.0, "28300": 41511.0, "28305": 40048.0, "28310": 40742.0, "28315": 40054.0, "28320": 39966.0, "28325": 41503.0, "28330": 39980.0, "28335": 40054.0, "28340": 40816.0, "28345": 40733.0, "28350": 40037.0, "28355": 40830.0, "28360": 38033.0, "28365": 40054.0, "28370": 38649.0, "28375": 40799.0, "28380": 41587.0, "28385": 38862.0, "28390": 40817.0, "28395": 37882.0, "28400": 39363.0, "28405": 40730.0, "28410": 39961.0, "28415": 41589.0, "28420": 39276.0, "28425": 41565.0, "28430": 40813.0, "28435": 40766.0, "28440": 39358.0, "28445": 40207.0, "28450": 40133.0, "28455": 40157.0, "28460": 38602.0, "28465": 40925.0, "28470": 37127.0, "28475": 41510.0, "28480": 40756.0, "28485": 39976.0, "28490": 40816.0, "28495": 39352.0, "28500": 41500.0, "28505": 40743.0, "28510": 40125.0, "28515": 41651.0, "28520": 39372.0, "28525": 36659.0, "28530": 40737.0, "28535": 40802.0, "28540": 38584.0, "28545": 37870.0, "28550": 40820.0, "28555": 41579.0, "28560": 40796.0, "28565": 38607.0, "28570": 39372.0, "28575": 39354.0, "28580": 36502.0, "28585": 40804.0, "28590": 40828.0, "28595": 40823.0, "28600": 40131.0, "28605": 42268.0, "28610": 40131.0, "28615": 39969.0, "28620": 37188.0, "28625": 39284.0, "28630": 38610.0, "28635": 40797.0, "28640": 40040.0, "28645": 40910.0, "28650": 40107.0, "28655": 40806.0, "28660": 41502.0, "28665": 40123.0, "28670": 40802.0, "28675": 42289.0, "28680": 37835.0, "28685": 40789.0, "28690": 39359.0, "28695": 40812.0, "28700": 40810.0, "28705": 40138.0, "28710": 41574.0, "28715": 37951.0, "28720": 39286.0, "28725": 40895.0, "28730": 39200.0, "28735": 37884.0, "28740": 39965.0, "28745": 41563.0, "28750": 40129.0, "28755": 38646.0, "28760": 41499.0, "28765": 39506.0, "28770": 39196.0, "28775": 41509.0, "28780": 40756.0, "28785": 40812.0, "28790": 39348.0, "28795": 40036.0, "28800": 37959.0, "28805": 40122.0, "28810": 39274.0, "28815": 41507.0, "28820": 37259.0, "28825": 40067.0, "28830": 37892.0, "28835": 39288.0, "28840": 41514.0, "28845": 38658.0, "28850": 38502.0, "28855": 38734.0, "28860": 42276.0, "28865": 41578.0, "28870": 40820.0, "28875": 40808.0, "28880": 38558.0, "28885": 40037.0, "28890": 39969.0, "28895": 39285.0, "28900": 39264.0, "28905": 39971.0, "28910": 38742.0, "28915": 40038.0, "28920": 38660.0, "28925": 41496.0, "28930": 38733.0, "28935": 38466.0, "28940": 37242.0, "28945": 40748.0, "28950": 40044.0, "28955": 40135.0, "28960": 40046.0, "28965": 41500.0, "28970": 41505.0, "28975": 41498.0, "28980": 39342.0, "28985": 37329.0, "28990": 38605.0, "28995": 39275.0, "29000": 40031.0, "29005": 38686.0, "29010": 40822.0, "29015": 41564.0, "29020": 37961.0, "29025": 39283.0, "29030": 41510.0, "29035": 40744.0, "29040": 40819.0, "29045": 40793.0, "29050": 40744.0, "29055": 40799.0, "29060": 39974.0, "29065": 40114.0, "29070": 40130.0, "29075": 40121.0, "29080": 40801.0, "29085": 41505.0, "29090": 39363.0, "29095": 40119.0, "29100": 40122.0, "29105": 40141.0, "29110": 40048.0, "29115": 41514.0, "29120": 39273.0, "29125": 40896.0, "29130": 38652.0, "29135": 42266.0, "29140": 39265.0, "29145": 39443.0, "29150": 40806.0, "29155": 40888.0, "29160": 40747.0, "29165": 40047.0, "29170": 39197.0, "29175": 39971.0, "29180": 39208.0, "29185": 39430.0, "29190": 39965.0, "29195": 39422.0, "29200": 38747.0, "29205": 41496.0, "29210": 38668.0, "29215": 41576.0, "29220": 40189.0, "29225": 40818.0, "29230": 40815.0, "29235": 40834.0, "29240": 39385.0, "29245": 38516.0, "29250": 41508.0, "29255": 40741.0, "29260": 40049.0, "29265": 40817.0, "29270": 38736.0, "29275": 41501.0, "29280": 41500.0, "29285": 39979.0, "29290": 40815.0, "29295": 39974.0, "29300": 40753.0, "29305": 39279.0, "29310": 40131.0, "29315": 40035.0, "29320": 40050.0, "29325": 40820.0, "29330": 40108.0, "29335": 40046.0, "29340": 40808.0, "29345": 38119.0, "29350": 39961.0, "29355": 42261.0, "29360": 39988.0, "29365": 39479.0, "29370": 38429.0, "29375": 39293.0, "29380": 41573.0, "29385": 39284.0, "29390": 39284.0, "29395": 40131.0, "29400": 40788.0, "29405": 37772.0, "29410": 38668.0, "29415": 40821.0, "29420": 40803.0, "29425": 39424.0, "29430": 39359.0, "29435": 40793.0, "29440": 41504.0, "29445": 39513.0, "29450": 41499.0, "29455": 40065.0, "29460": 39468.0, "29465": 39373.0, "29470": 40735.0, "29475": 41499.0, "29480": 39972.0, "29485": 41490.0, "29490": 38641.0, "29495": 40750.0, "29500": 38665.0, "29505": 42267.0, "29510": 40047.0, "29515": 39431.0, "29520": 40051.0, "29525": 40820.0, "29530": 39983.0, "29535": 39343.0, "29540": 39344.0, "29545": 39211.0, "29550": 41599.0, "29555": 37737.0, "29560": 40107.0, "29565": 40734.0, "29570": 40896.0, "29575": 38529.0, "29580": 40819.0, "29585": 40213.0, "29590": 38740.0, "29595": 41505.0, "29600": 39347.0, "29605": 40823.0, "29610": 38555.0, "29615": 39357.0, "29620": 41580.0, "29625": 39451.0, "29630": 40817.0, "29635": 39377.0, "29640": 39360.0, "29645": 39277.0, "29650": 41566.0, "29655": 38733.0, "29660": 40742.0, "29665": 36453.0, "29670": 39992.0, "29675": 40043.0, "29680": 39968.0, "29685": 39285.0, "29690": 40813.0, "29695": 39297.0, "29700": 40812.0, "29705": 38559.0, "29710": 40816.0, "29715": 41577.0, "29720": 40841.0, "29725": 40060.0, "29730": 39210.0, "29735": 41505.0, "29740": 41506.0, "29745": 40973.0, "29750": 40066.0, "29755": 41494.0, "29760": 37887.0, "29765": 40805.0, "29770": 41504.0, "29775": 39978.0, "29780": 38428.0, "29785": 37909.0, "29790": 37967.0, "29795": 39279.0, "29800": 40098.0, "29805": 39376.0, "29810": 39417.0, "29815": 38448.0, "29820": 40145.0, "29825": 38731.0, "29830": 40076.0, "29835": 38457.0, "29840": 41498.0, "29845": 41504.0, "29850": 42269.0, "29855": 40027.0, "29860": 40048.0, "29865": 40743.0, "29870": 40810.0, "29875": 40820.0, "29880": 38516.0, "29885": 37750.0, "29890": 40737.0, "29895": 38797.0, "29900": 39983.0, "29905": 40815.0, "29910": 42284.0, "29915": 40113.0, "29920": 40111.0, "29925": 40032.0, "29930": 39282.0, "29935": 41589.0, "29940": 39351.0, "29945": 40034.0, "29950": 41504.0, "29955": 40107.0, "29960": 38580.0, "29965": 40736.0, "29970": 39353.0, "29975": 37884.0, "29980": 40739.0, "29985": 38110.0, "29990": 40750.0, "29995": 40040.0, "30000": 41504.0, "30005": 39357.0, "30010": 40062.0, "30015": 40135.0, "30020": 40036.0, "30025": 39976.0, "30030": 40875.0, "30035": 41576.0, "30040": 39273.0, "30045": 39310.0, "30050": 40040.0, "30055": 40214.0, "30060": 40057.0, "30065": 39293.0, "30070": 40027.0, "30075": 34446.0, "30080": 37877.0, "30085": 39368.0, "30090": 40831.0, "30095": 39291.0, "30100": 40042.0, "30105": 41504.0, "30110": 40887.0, "30115": 37298.0, "30120": 40819.0, "30125": 39343.0, "30130": 40052.0, "30135": 40057.0, "30140": 40810.0, "30145": 39340.0, "30150": 37962.0, "30155": 40741.0, "30160": 41519.0, "30165": 38739.0, "30170": 40793.0, "30175": 40753.0, "30180": 39197.0, "30185": 38795.0, "30190": 39368.0, "30195": 40733.0, "30200": 39976.0, "30205": 39367.0, "30210": 40050.0, "30215": 39422.0, "30220": 41494.0, "30225": 40039.0, "30230": 39357.0, "30235": 40831.0, "30240": 40044.0, "30245": 40134.0, "30250": 38535.0, "30255": 38763.0, "30260": 42270.0, "30265": 38129.0, "30270": 40817.0, "30275": 40040.0, "30280": 40117.0, "30285": 39359.0, "30290": 40800.0, "30295": 40065.0, "30300": 41575.0, "30305": 39976.0, "30310": 40737.0, "30315": 39273.0, "30320": 39311.0, "30325": 40747.0, "30330": 39346.0, "30335": 40843.0, "30340": 39970.0, "30345": 38633.0, "30350": 40054.0, "30355": 35671.0, "30360": 41594.0, "30365": 41516.0, "30370": 38817.0, "30375": 38665.0, "30380": 38585.0, "30385": 40043.0, "30390": 40126.0, "30395": 39379.0, "30400": 39272.0, "30405": 42267.0, "30410": 40750.0, "30415": 39373.0, "30420": 40055.0, "30425": 40062.0, "30430": 40799.0, "30435": 41584.0, "30440": 40816.0, "30445": 40826.0, "30450": 39341.0, "30455": 40035.0, "30460": 39358.0, "30465": 38755.0, "30470": 40052.0, "30475": 40730.0, "30480": 40193.0, "30485": 40108.0, "30490": 41581.0, "30495": 40740.0, "30500": 40196.0, "30505": 39369.0, "30510": 40805.0, "30515": 39280.0, "30520": 39426.0, "30525": 38659.0, "30530": 39972.0, "30535": 40053.0, "30540": 40053.0, "30545": 38586.0, "30550": 40748.0, "30555": 41663.0, "30560": 37083.0, "30565": 40809.0, "30570": 38039.0, "30575": 41511.0, "30580": 40052.0, "30585": 40046.0, "30590": 40817.0, "30595": 40138.0, "30600": 40147.0, "30605": 40823.0, "30610": 40199.0, "30615": 39282.0, "30620": 39974.0, "30625": 40734.0, "30630": 40142.0, "30635": 38529.0, "30640": 40047.0, "30645": 37971.0, "30650": 40140.0, "30655": 40040.0, "30660": 41498.0, "30665": 40806.0, "30670": 40049.0, "30675": 40879.0, "30680": 40118.0, "30685": 40049.0, "30690": 40124.0, "30695": 40739.0, "30700": 40071.0, "30705": 41503.0, "30710": 39964.0, "30715": 40825.0, "30720": 40894.0, "30725": 38679.0, "30730": 40113.0, "30735": 39420.0, "30740": 38528.0, "30745": 40066.0, "30750": 40750.0, "30755": 40746.0, "30760": 39201.0, "30765": 39429.0, "30770": 40052.0, "30775": 40046.0, "30780": 38592.0, "30785": 41506.0, "30790": 40785.0, "30795": 40737.0, "30800": 37413.0, "30805": 39289.0, "30810": 39975.0, "30815": 40737.0, "30820": 38603.0, "30825": 41492.0, "30830": 39203.0, "30835": 40058.0, "30840": 39977.0, "30845": 40046.0, "30850": 40817.0, "30855": 39277.0, "30860": 40140.0, "30865": 37931.0, "30870": 40056.0, "30875": 41501.0, "30880": 39367.0, "30885": 41496.0, "30890": 40830.0, "30895": 39362.0, "30900": 41510.0, "30905": 40959.0, "30910": 40743.0, "30915": 40728.0, "30920": 42266.0, "30925": 37832.0, "30930": 40053.0, "30935": 40734.0, "30940": 38532.0, "30945": 37991.0, "30950": 41498.0, "30955": 39434.0, "30960": 38515.0, "30965": 38066.0, "30970": 37836.0, "30975": 40096.0, "30980": 40818.0, "30985": 40896.0, "30990": 37139.0, "30995": 38804.0, "31000": 41588.0, "31005": 40793.0, "31010": 38574.0, "31015": 43036.0, "31020": 39275.0, "31025": 40799.0, "31030": 39277.0, "31035": 40828.0, "31040": 39369.0, "31045": 37754.0, "31050": 42265.0, "31055": 40743.0, "31060": 40139.0, "31065": 37950.0, "31070": 39489.0, "31075": 40821.0, "31080": 41574.0, "31085": 40744.0, "31090": 40063.0, "31095": 41498.0, "31100": 40202.0, "31105": 42346.0, "31110": 39428.0, "31115": 41580.0, "31120": 40043.0, "31125": 40738.0, "31130": 39965.0, "31135": 38743.0, "31140": 40045.0, "31145": 41501.0, "31150": 38604.0, "31155": 40831.0, "31160": 39961.0, "31165": 41576.0, "31170": 40132.0, "31175": 40890.0, "31180": 40796.0, "31185": 39446.0, "31190": 39974.0, "31195": 41507.0, "31200": 40807.0, "31205": 38021.0, "31210": 38657.0, "31215": 42346.0, "31220": 40053.0, "31225": 41502.0, "31230": 39285.0, "31235": 40130.0, "31240": 42272.0, "31245": 40045.0, "31250": 39376.0, "31255": 40741.0, "31260": 40744.0, "31265": 40034.0, "31270": 39281.0, "31275": 39976.0, "31280": 40034.0, "31285": 38050.0, "31290": 40810.0, "31295": 37921.0, "31300": 41585.0, "31305": 40143.0, "31310": 41680.0, "31315": 38602.0, "31320": 36939.0, "31325": 39294.0, "31330": 40020.0, "31335": 40743.0, "31340": 40816.0, "31345": 40037.0, "31350": 40123.0, "31355": 40799.0, "31360": 38533.0, "31365": 40040.0, "31370": 40134.0, "31375": 40035.0, "31380": 40062.0, "31385": 38504.0, "31390": 39957.0, "31395": 40194.0, "31400": 39494.0, "31405": 41513.0, "31410": 40049.0, "31415": 40132.0, "31420": 40033.0, "31425": 40734.0, "31430": 40045.0, "31435": 40807.0, "31440": 39980.0, "31445": 40786.0, "31450": 38597.0, "31455": 40037.0, "31460": 39435.0, "31465": 37904.0, "31470": 40155.0, "31475": 38614.0, "31480": 41595.0, "31485": 39366.0, "31490": 40820.0, "31495": 38520.0, "31500": 37974.0, "31505": 39990.0, "31510": 39433.0, "31515": 37841.0, "31520": 38004.0, "31525": 35144.0, "31530": 39296.0, "31535": 41506.0, "31540": 42348.0, "31545": 40811.0, "31550": 40742.0, "31555": 38515.0, "31560": 42266.0, "31565": 40049.0, "31570": 40794.0, "31575": 41574.0, "31580": 40046.0, "31585": 42262.0, "31590": 39367.0, "31595": 41579.0, "31600": 38663.0, "31605": 36512.0, "31610": 40745.0, "31615": 37924.0, "31620": 40741.0, "31625": 38442.0, "31630": 38561.0, "31635": 39435.0, "31640": 40057.0, "31645": 39371.0, "31650": 40797.0, "31655": 40057.0, "31660": 39978.0, "31665": 40737.0, "31670": 38592.0, "31675": 40743.0, "31680": 37843.0, "31685": 40806.0, "31690": 40799.0, "31695": 37979.0, "31700": 37904.0, "31705": 39968.0, "31710": 40744.0, "31715": 40809.0, "31720": 39398.0, "31725": 39349.0, "31730": 39359.0, "31735": 40753.0, "31740": 37762.0, "31745": 40129.0, "31750": 40128.0, "31755": 39341.0, "31760": 39211.0, "31765": 40814.0, "31770": 41572.0, "31775": 38577.0, "31780": 41583.0, "31785": 40809.0, "31790": 38647.0, "31795": 40812.0, "31800": 40053.0, "31805": 40081.0, "31810": 39273.0, "31815": 37927.0, "31820": 39983.0, "31825": 40729.0, "31830": 40055.0, "31835": 40065.0, "31840": 40175.0, "31845": 40054.0, "31850": 40809.0, "31855": 40810.0, "31860": 39986.0, "31865": 40823.0, "31870": 40120.0, "31875": 38489.0, "31880": 40121.0, "31885": 40036.0, "31890": 39992.0, "31895": 40206.0, "31900": 38586.0, "31905": 39347.0, "31910": 39271.0, "31915": 41506.0, "31920": 38654.0, "31925": 39280.0, "31930": 35803.0, "31935": 39424.0, "31940": 39976.0, "31945": 40740.0, "31950": 40795.0, "31955": 36516.0, "31960": 39267.0, "31965": 40064.0, "31970": 39966.0, "31975": 40808.0, "31980": 40062.0, "31985": 39365.0, "31990": 40991.0, "31995": 39435.0, "32000": 40878.0, "32005": 38625.0, "32010": 40797.0, "32015": 39987.0, "32020": 40870.0, "32025": 39352.0, "32030": 40800.0, "32035": 37720.0, "32040": 40102.0, "32045": 38680.0, "32050": 40813.0, "32055": 39299.0, "32060": 39968.0, "32065": 40168.0, "32070": 39364.0, "32075": 39290.0, "32080": 40803.0, "32085": 40117.0, "32090": 39970.0, "32095": 41498.0, "32100": 42263.0, "32105": 40824.0, "32110": 40045.0, "32115": 40830.0, "32120": 40052.0, "32125": 40736.0, "32130": 40734.0, "32135": 40811.0, "32140": 39363.0, "32145": 40057.0, "32150": 40746.0, "32155": 38573.0, "32160": 39962.0, "32165": 40823.0, "32170": 40100.0, "32175": 37224.0, "32180": 38587.0, "32185": 41498.0, "32190": 40040.0, "32195": 37980.0, "32200": 38574.0, "32205": 40196.0, "32210": 40035.0, "32215": 40756.0, "32220": 41591.0, "32225": 40817.0, "32230": 42270.0, "32235": 41588.0, "32240": 40173.0, "32245": 40139.0, "32250": 40891.0, "32255": 40800.0, "32260": 38665.0, "32265": 40815.0, "32270": 36695.0, "32275": 40835.0, "32280": 40123.0, "32285": 39969.0, "32290": 37169.0, "32295": 40127.0, "32300": 40123.0, "32305": 40733.0, "32310": 39253.0, "32315": 41555.0, "32320": 41503.0, "32325": 37224.0, "32330": 37825.0, "32335": 39967.0, "32340": 40741.0, "32345": 38785.0, "32350": 40825.0, "32355": 40033.0, "32360": 42265.0, "32365": 40027.0, "32370": 39321.0, "32375": 42274.0, "32380": 41496.0, "32385": 37828.0, "32390": 38587.0, "32395": 40868.0, "32400": 40810.0, "32405": 40051.0, "32410": 39385.0, "32415": 38499.0, "32420": 40034.0, "32425": 38660.0, "32430": 40107.0, "32435": 36685.0, "32440": 40053.0, "32445": 40894.0, "32450": 40127.0, "32455": 40815.0, "32460": 41595.0, "32465": 39978.0, "32470": 39198.0, "32475": 40831.0, "32480": 39971.0, "32485": 40817.0, "32490": 40114.0, "32495": 38515.0, "32500": 39349.0, "32505": 40734.0, "32510": 40810.0, "32515": 40114.0, "32520": 38673.0, "32525": 40729.0, "32530": 40149.0, "32535": 40815.0, "32540": 40056.0, "32545": 40881.0, "32550": 39349.0, "32555": 41508.0, "32560": 40820.0, "32565": 38515.0, "32570": 39368.0, "32575": 40126.0, "32580": 39447.0, "32585": 37145.0, "32590": 41586.0, "32595": 39972.0, "32600": 41509.0, "32605": 40824.0, "32610": 38680.0, "32615": 40809.0, "32620": 40056.0, "32625": 40909.0, "32630": 35994.0, "32635": 38575.0, "32640": 40050.0, "32645": 40029.0, "32650": 40130.0, "32655": 40798.0, "32660": 40893.0, "32665": 40055.0, "32670": 38052.0, "32675": 41575.0, "32680": 39356.0, "32685": 40736.0, "32690": 40197.0, "32695": 39294.0, "32700": 41513.0, "32705": 37197.0, "32710": 37826.0, "32715": 40734.0, "32720": 40066.0, "32725": 40888.0, "32730": 41552.0, "32735": 40873.0, "32740": 39195.0, "32745": 40091.0, "32750": 40806.0, "32755": 39345.0, "32760": 38645.0, "32765": 42270.0, "32770": 38026.0, "32775": 39290.0, "32780": 40876.0, "32785": 39461.0, "32790": 42274.0, "32795": 40738.0, "32800": 39370.0, "32805": 36437.0, "32810": 40742.0, "32815": 41578.0, "32820": 40819.0, "32825": 40809.0, "32830": 39355.0, "32835": 40888.0, "32840": 38726.0, "32845": 40816.0, "32850": 39376.0, "32855": 40736.0, "32860": 39271.0, "32865": 41519.0, "32870": 39346.0, "32875": 40098.0, "32880": 37972.0, "32885": 40051.0, "32890": 41592.0, "32895": 39415.0, "32900": 40060.0, "32905": 37843.0, "32910": 37963.0, "32915": 38661.0, "32920": 41587.0, "32925": 40823.0, "32930": 38590.0, "32935": 40065.0, "32940": 40806.0, "32945": 40816.0, "32950": 40811.0, "32955": 38056.0, "32960": 40742.0, "32965": 37835.0, "32970": 40822.0, "32975": 40207.0, "32980": 40751.0, "32985": 39383.0, "32990": 40182.0, "32995": 40745.0, "33000": 38456.0, "33005": 39474.0, "33010": 37972.0, "33015": 39365.0, "33020": 39284.0, "33025": 40813.0, "33030": 38651.0, "33035": 41499.0, "33040": 40886.0, "33045": 43030.0, "33050": 38617.0, "33055": 37972.0, "33060": 40046.0, "33065": 38667.0, "33070": 40215.0, "33075": 39208.0, "33080": 38662.0, "33085": 38539.0, "33090": 37323.0, "33095": 40113.0, "33100": 38681.0, "33105": 40069.0, "33110": 40119.0, "33115": 39212.0, "33120": 40127.0, "33125": 39987.0, "33130": 41496.0, "33135": 39281.0, "33140": 40036.0, "33145": 41503.0, "33150": 40205.0, "33155": 40047.0, "33160": 38646.0, "33165": 41511.0, "33170": 39210.0, "33175": 40803.0, "33180": 41499.0, "33185": 40822.0, "33190": 41574.0, "33195": 37919.0, "33200": 41581.0, "33205": 41508.0, "33210": 40752.0, "33215": 40812.0, "33220": 40141.0, "33225": 41506.0, "33230": 39352.0, "33235": 38761.0, "33240": 40751.0, "33245": 41511.0, "33250": 42270.0, "33255": 40822.0, "33260": 42266.0, "33265": 40126.0, "33270": 40057.0, "33275": 36605.0, "33280": 40729.0, "33285": 39444.0, "33290": 37269.0, "33295": 39318.0, "33300": 40811.0, "33305": 39975.0, "33310": 39287.0, "33315": 41512.0, "33320": 39984.0, "33325": 40733.0, "33330": 40057.0, "33335": 39975.0, "33340": 38519.0, "33345": 40042.0, "33350": 37300.0, "33355": 39525.0, "33360": 39286.0, "33365": 40954.0, "33370": 40825.0, "33375": 37376.0, "33380": 38690.0, "33385": 40741.0, "33390": 40741.0, "33395": 38668.0, "33400": 37848.0, "33405": 40113.0, "33410": 41511.0, "33415": 41498.0, "33420": 38494.0, "33425": 40748.0, "33430": 39352.0, "33435": 38523.0, "33440": 41504.0, "33445": 40803.0, "33450": 37337.0, "33455": 40815.0, "33460": 40811.0, "33465": 41647.0, "33470": 39443.0, "33475": 40813.0, "33480": 38659.0, "33485": 40891.0, "33490": 39281.0, "33495": 40800.0, "33500": 40810.0, "33505": 40049.0, "33510": 37759.0, "33515": 37995.0, "33520": 40729.0, "33525": 39215.0, "33530": 40799.0, "33535": 40906.0, "33540": 40831.0, "33545": 42347.0, "33550": 40892.0, "33555": 40803.0, "33560": 39366.0, "33565": 40035.0, "33570": 40209.0, "33575": 39419.0, "33580": 40817.0, "33585": 41518.0, "33590": 40117.0, "33595": 40070.0, "33600": 38526.0, "33605": 41518.0, "33610": 40727.0, "33615": 37957.0, "33620": 40810.0, "33625": 40064.0, "33630": 39993.0, "33635": 41505.0, "33640": 40728.0, "33645": 41499.0, "33650": 38681.0, "33655": 36766.0, "33660": 39980.0, "33665": 40125.0, "33670": 40735.0, "33675": 37980.0, "33680": 38679.0, "33685": 40818.0, "33690": 41506.0, "33695": 39975.0, "33700": 38610.0, "33705": 40043.0, "33710": 38562.0, "33715": 40067.0, "33720": 39335.0, "33725": 38700.0, "33730": 41511.0, "33735": 40746.0, "33740": 40822.0, "33745": 40754.0, "33750": 40811.0, "33755": 40149.0, "33760": 39348.0, "33765": 38445.0, "33770": 40739.0, "33775": 39431.0, "33780": 39480.0, "33785": 37888.0, "33790": 37728.0, "33795": 41518.0, "33800": 39264.0, "33805": 40050.0, "33810": 39288.0, "33815": 42273.0, "33820": 40126.0, "33825": 40755.0, "33830": 40208.0, "33835": 38524.0, "33840": 41574.0, "33845": 39368.0, "33850": 41502.0, "33855": 37967.0, "33860": 39468.0, "33865": 38065.0, "33870": 40730.0, "33875": 43027.0, "33880": 39439.0, "33885": 40030.0, "33890": 40049.0, "33895": 39977.0, "33900": 37001.0, "33905": 40744.0, "33910": 39962.0, "33915": 42267.0, "33920": 40065.0, "33925": 37831.0, "33930": 39438.0, "33935": 39442.0, "33940": 40047.0, "33945": 40741.0, "33950": 39440.0, "33955": 40816.0, "33960": 40826.0, "33965": 41513.0, "33970": 40754.0, "33975": 40812.0, "33980": 40122.0, "33985": 40822.0, "33990": 40732.0, "33995": 41580.0, "34000": 40739.0, "34005": 39979.0, "34010": 40131.0, "34015": 38648.0, "34020": 40798.0, "34025": 41589.0, "34030": 39347.0, "34035": 40043.0, "34040": 37992.0, "34045": 40819.0, "34050": 40061.0, "34055": 39279.0, "34060": 37154.0, "34065": 39227.0, "34070": 41509.0, "34075": 41577.0, "34080": 40125.0, "34085": 38491.0, "34090": 40122.0, "34095": 38523.0, "34100": 41505.0, "34105": 38582.0, "34110": 40824.0, "34115": 39372.0, "34120": 41503.0, "34125": 40127.0, "34130": 39977.0, "34135": 39275.0, "34140": 38576.0, "34145": 42274.0, "34150": 38525.0, "34155": 40727.0, "34160": 39977.0, "34165": 40817.0, "34170": 41506.0, "34175": 37177.0, "34180": 40123.0, "34185": 39337.0, "34190": 39305.0, "34195": 40741.0, "34200": 38738.0, "34205": 40061.0, "34210": 40751.0, "34215": 38596.0, "34220": 39373.0, "34225": 40728.0, "34230": 38725.0, "34235": 39971.0, "34240": 39275.0, "34245": 40038.0, "34250": 38603.0, "34255": 39372.0, "34260": 40055.0, "34265": 40029.0, "34270": 39435.0, "34275": 40810.0, "34280": 40730.0, "34285": 39974.0, "34290": 43029.0, "34295": 38427.0, "34300": 37876.0, "34305": 40734.0, "34310": 39373.0, "34315": 39351.0, "34320": 39963.0, "34325": 41506.0, "34330": 39962.0, "34335": 40123.0, "34340": 42275.0, "34345": 40208.0, "34350": 39349.0, "34355": 40821.0, "34360": 40041.0, "34365": 39299.0, "34370": 39342.0, "34375": 42275.0, "34380": 37873.0, "34385": 40792.0, "34390": 39449.0, "34395": 39223.0, "34400": 39197.0, "34405": 40830.0, "34410": 41510.0, "34415": 40820.0, "34420": 40042.0, "34425": 38538.0, "34430": 41511.0, "34435": 39967.0, "34440": 40119.0, "34445": 39380.0, "34450": 41507.0, "34455": 40726.0, "34460": 41575.0, "34465": 41566.0, "34470": 38046.0, "34475": 40125.0, "34480": 40817.0, "34485": 38635.0, "34490": 40065.0, "34495": 40756.0, "34500": 40977.0, "34505": 38512.0, "34510": 40839.0, "34515": 40057.0, "34520": 39968.0, "34525": 40032.0, "34530": 37819.0, "34535": 40741.0, "34540": 40805.0, "34545": 40819.0, "34550": 41579.0, "34555": 37173.0, "34560": 41522.0, "34565": 39429.0, "34570": 40736.0, "34575": 38779.0, "34580": 40753.0, "34585": 37976.0, "34590": 39459.0, "34595": 40761.0, "34600": 40875.0, "34605": 40786.0, "34610": 37247.0, "34615": 39296.0, "34620": 39971.0, "34625": 42269.0, "34630": 39432.0, "34635": 40884.0, "34640": 39302.0, "34645": 40049.0, "34650": 41510.0, "34655": 39378.0, "34660": 39527.0, "34665": 41561.0, "34670": 40741.0, "34675": 39353.0, "34680": 39273.0, "34685": 39448.0, "34690": 41565.0, "34695": 39294.0, "34700": 39350.0, "34705": 39360.0, "34710": 42343.0, "34715": 39424.0, "34720": 40807.0, "34725": 39221.0, "34730": 36463.0, "34735": 38721.0, "34740": 40138.0, "34745": 40742.0, "34750": 40101.0, "34755": 40815.0, "34760": 40142.0, "34765": 40809.0, "34770": 38828.0, "34775": 39384.0, "34780": 39302.0, "34785": 40044.0, "34790": 40796.0, "34795": 39343.0, "34800": 40116.0, "34805": 39428.0, "34810": 40061.0, "34815": 42270.0, "34820": 41509.0, "34825": 40112.0, "34830": 39285.0, "34835": 39400.0, "34840": 40103.0, "34845": 41570.0, "34850": 39974.0, "34855": 38533.0, "34860": 37963.0, "34865": 39272.0, "34870": 40740.0, "34875": 37689.0, "34880": 41498.0, "34885": 42349.0, "34890": 41502.0, "34895": 38609.0, "34900": 40735.0, "34905": 41506.0, "34910": 40114.0, "34915": 41518.0, "34920": 39261.0, "34925": 39361.0, "34930": 39207.0, "34935": 39961.0, "34940": 39985.0, "34945": 40889.0, "34950": 40736.0, "34955": 39415.0, "34960": 40840.0, "34965": 40824.0, "34970": 38722.0, "34975": 37892.0, "34980": 40120.0, "34985": 40810.0, "34990": 40825.0, "34995": 41569.0, "35000": 37905.0, "35005": 41497.0, "35010": 37201.0, "35015": 39979.0, "35020": 40909.0, "35025": 39402.0, "35030": 39422.0, "35035": 39288.0, "35040": 41651.0, "35045": 38534.0, "35050": 41501.0, "35055": 40748.0, "35060": 40042.0, "35065": 39346.0, "35070": 38783.0, "35075": 40814.0, "35080": 39968.0, "35085": 40742.0, "35090": 39377.0, "35095": 38847.0, "35100": 39273.0, "35105": 38590.0, "35110": 40064.0, "35115": 38593.0, "35120": 37820.0, "35125": 39414.0, "35130": 36362.0, "35135": 37880.0, "35140": 41578.0, "35145": 40106.0, "35150": 37764.0, "35155": 41506.0, "35160": 40885.0, "35165": 40745.0, "35170": 39432.0, "35175": 41519.0, "35180": 40050.0, "35185": 41588.0, "35190": 39411.0, "35195": 40114.0, "35200": 38603.0, "35205": 39371.0, "35210": 40901.0, "35215": 39371.0, "35220": 40117.0, "35225": 40740.0, "35230": 40900.0, "35235": 39371.0, "35240": 40186.0, "35245": 40124.0, "35250": 40050.0, "35255": 40058.0, "35260": 40726.0, "35265": 35218.0, "35270": 38750.0, "35275": 40033.0, "35280": 39205.0, "35285": 40749.0, "35290": 39346.0, "35295": 41512.0, "35300": 41569.0, "35305": 38589.0, "35310": 39199.0, "35315": 40147.0, "35320": 39983.0, "35325": 39411.0, "35330": 39220.0, "35335": 40825.0, "35340": 40104.0, "35345": 40802.0, "35350": 38068.0, "35355": 39506.0, "35360": 39361.0, "35365": 38599.0, "35370": 40100.0, "35375": 40820.0, "35380": 39292.0, "35385": 41502.0, "35390": 40729.0, "35395": 40798.0, "35400": 39434.0, "35405": 40892.0, "35410": 40806.0, "35415": 39975.0, "35420": 40806.0, "35425": 39981.0, "35430": 40070.0, "35435": 40811.0, "35440": 40119.0, "35445": 39445.0, "35450": 40063.0, "35455": 40810.0, "35460": 40905.0, "35465": 36589.0, "35470": 41493.0, "35475": 38677.0, "35480": 39985.0, "35485": 39970.0, "35490": 40043.0, "35495": 39215.0, "35500": 40114.0, "35505": 38606.0, "35510": 39275.0, "35515": 39316.0, "35520": 40106.0, "35525": 40749.0, "35530": 40830.0, "35535": 40878.0, "35540": 40755.0, "35545": 39287.0, "35550": 40063.0, "35555": 40807.0, "35560": 36326.0, "35565": 40748.0, "35570": 41556.0, "35575": 39440.0, "35580": 41500.0, "35585": 38672.0, "35590": 40052.0, "35595": 38528.0, "35600": 40108.0, "35605": 40122.0, "35610": 40124.0, "35615": 35975.0, "35620": 41515.0, "35625": 40809.0, "35630": 38461.0, "35635": 39988.0, "35640": 39353.0, "35645": 40808.0, "35650": 38590.0, "35655": 40803.0, "35660": 40816.0, "35665": 40055.0, "35670": 40060.0, "35675": 40144.0, "35680": 38675.0, "35685": 40797.0, "35690": 41576.0, "35695": 42267.0, "35700": 39445.0, "35705": 39379.0, "35710": 40727.0, "35715": 40100.0, "35720": 39969.0, "35725": 40725.0, "35730": 39276.0, "35735": 42271.0, "35740": 41585.0, "35745": 40741.0, "35750": 40741.0, "35755": 39442.0, "35760": 40745.0, "35765": 40187.0, "35770": 42266.0, "35775": 40036.0, "35780": 41586.0, "35785": 41570.0, "35790": 38598.0, "35795": 40740.0, "35800": 40075.0, "35805": 39320.0, "35810": 40028.0, "35815": 38435.0, "35820": 42429.0, "35825": 41492.0, "35830": 40817.0, "35835": 40277.0, "35840": 40126.0, "35845": 39298.0, "35850": 40114.0, "35855": 39301.0, "35860": 40822.0, "35865": 38662.0, "35870": 41500.0, "35875": 41567.0, "35880": 38745.0, "35885": 39352.0, "35890": 38436.0, "35895": 39970.0, "35900": 38745.0, "35905": 40109.0, "35910": 40051.0, "35915": 37359.0, "35920": 38754.0, "35925": 40888.0, "35930": 40807.0, "35935": 40034.0, "35940": 40047.0, "35945": 40736.0, "35950": 41509.0, "35955": 40049.0, "35960": 40046.0, "35965": 40132.0, "35970": 39349.0, "35975": 40039.0, "35980": 40743.0, "35985": 40196.0, "35990": 39354.0, "35995": 38540.0, "36000": 40195.0, "36005": 42353.0, "36010": 41496.0, "36015": 41569.0, "36020": 40829.0, "36025": 42265.0, "36030": 41565.0, "36035": 40762.0, "36040": 41505.0, "36045": 42256.0, "36050": 38540.0, "36055": 40041.0, "36060": 41508.0, "36065": 40127.0, "36070": 39409.0, "36075": 40806.0, "36080": 40147.0, "36085": 39284.0, "36090": 40138.0, "36095": 40723.0, "36100": 40810.0, "36105": 38739.0, "36110": 40149.0, "36115": 42265.0, "36120": 40892.0, "36125": 39527.0, "36130": 40819.0, "36135": 38521.0, "36140": 40037.0, "36145": 41588.0, "36150": 41507.0, "36155": 39484.0, "36160": 40744.0, "36165": 40798.0, "36170": 40803.0, "36175": 40040.0, "36180": 40736.0, "36185": 39427.0, "36190": 37902.0, "36195": 38583.0, "36200": 41581.0, "36205": 41586.0, "36210": 39970.0, "36215": 39960.0, "36220": 40128.0, "36225": 41558.0, "36230": 39305.0, "36235": 40094.0, "36240": 39413.0, "36245": 39968.0, "36250": 41496.0, "36255": 39992.0, "36260": 39431.0, "36265": 39452.0, "36270": 41500.0, "36275": 40127.0, "36280": 38586.0, "36285": 40796.0, "36290": 39291.0, "36295": 41499.0, "36300": 39426.0, "36305": 40731.0, "36310": 41562.0, "36315": 38670.0, "36320": 38824.0, "36325": 40740.0, "36330": 41572.0, "36335": 40746.0, "36340": 37883.0, "36345": 40813.0, "36350": 40744.0, "36355": 39985.0, "36360": 40749.0, "36365": 38674.0, "36370": 40189.0, "36375": 41507.0, "36380": 39343.0, "36385": 39973.0, "36390": 40740.0, "36395": 38058.0, "36400": 41512.0, "36405": 38500.0, "36410": 41509.0, "36415": 39202.0, "36420": 41586.0, "36425": 40814.0, "36430": 38727.0, "36435": 40754.0, "36440": 40131.0, "36445": 37898.0, "36450": 38660.0, "36455": 40123.0, "36460": 38018.0, "36465": 39299.0, "36470": 40118.0, "36475": 37829.0, "36480": 40817.0, "36485": 38671.0, "36490": 40757.0, "36495": 39440.0, "36500": 38626.0, "36505": 40809.0, "36510": 37825.0, "36515": 39210.0, "36520": 35643.0, "36525": 40064.0, "36530": 40112.0, "36535": 41506.0, "36540": 40802.0, "36545": 41512.0, "36550": 39219.0, "36555": 40812.0, "36560": 40748.0, "36565": 38731.0, "36570": 40805.0, "36575": 39206.0, "36580": 39451.0, "36585": 39969.0, "36590": 40812.0, "36595": 40029.0, "36600": 40735.0, "36605": 40152.0, "36610": 38795.0, "36615": 39435.0, "36620": 41514.0, "36625": 40057.0, "36630": 39439.0, "36635": 40057.0, "36640": 38030.0, "36645": 40132.0, "36650": 42357.0, "36655": 40136.0, "36660": 39365.0, "36665": 39428.0, "36670": 39990.0, "36675": 39206.0, "36680": 40824.0, "36685": 39275.0, "36690": 37952.0, "36695": 41562.0, "36700": 41644.0, "36705": 41582.0, "36710": 40739.0, "36715": 40746.0, "36720": 40045.0, "36725": 41578.0, "36730": 37777.0, "36735": 41513.0, "36740": 40111.0, "36745": 40884.0, "36750": 41568.0, "36755": 40129.0, "36760": 39987.0, "36765": 41501.0, "36770": 39363.0, "36775": 41507.0, "36780": 40110.0, "36785": 41510.0, "36790": 40050.0, "36795": 39342.0, "36800": 39290.0, "36805": 38521.0, "36810": 39301.0, "36815": 38524.0, "36820": 39287.0, "36825": 40720.0, "36830": 39976.0, "36835": 40049.0, "36840": 40206.0, "36845": 40130.0, "36850": 40052.0, "36855": 40877.0, "36860": 40130.0, "36865": 40142.0, "36870": 36520.0, "36875": 38439.0, "36880": 40126.0, "36885": 40067.0, "36890": 40734.0, "36895": 39282.0, "36900": 39272.0, "36905": 40055.0, "36910": 40146.0, "36915": 40740.0, "36920": 40800.0, "36925": 39271.0, "36930": 41512.0, "36935": 40808.0, "36940": 40256.0, "36945": 40117.0, "36950": 37914.0, "36955": 39288.0, "36960": 38659.0, "36965": 37914.0, "36970": 40124.0, "36975": 39348.0, "36980": 40742.0, "36985": 40906.0, "36990": 40734.0, "36995": 40118.0, "37000": 40070.0, "37005": 41581.0, "37010": 42264.0, "37015": 38741.0, "37020": 40128.0, "37025": 41579.0, "37030": 39294.0, "37035": 39378.0, "37040": 40061.0, "37045": 40811.0, "37050": 41574.0, "37055": 37825.0, "37060": 38696.0, "37065": 40068.0, "37070": 40091.0, "37075": 40052.0, "37080": 38819.0, "37085": 39355.0, "37090": 40043.0, "37095": 38574.0, "37100": 40257.0, "37105": 39353.0, "37110": 40811.0, "37115": 40050.0, "37120": 40813.0, "37125": 40951.0, "37130": 39427.0, "37135": 40812.0, "37140": 38719.0, "37145": 40833.0, "37150": 40163.0, "37155": 40041.0, "37160": 40117.0, "37165": 39967.0, "37170": 42336.0, "37175": 39350.0, "37180": 39342.0, "37185": 38598.0, "37190": 41506.0, "37195": 39979.0, "37200": 41578.0, "37205": 39393.0, "37210": 39983.0, "37215": 39970.0, "37220": 40123.0, "37225": 38581.0, "37230": 40738.0, "37235": 39967.0, "37240": 37768.0, "37245": 40002.0, "37250": 36459.0, "37255": 38730.0, "37260": 40118.0, "37265": 41508.0, "37270": 40736.0, "37275": 40057.0, "37280": 40815.0, "37285": 39436.0, "37290": 40053.0, "37295": 39429.0, "37300": 40817.0, "37305": 39977.0, "37310": 36611.0, "37315": 41591.0, "37320": 40073.0, "37325": 40044.0, "37330": 39981.0, "37335": 40813.0, "37340": 40110.0, "37345": 38590.0, "37350": 39966.0, "37355": 40804.0, "37360": 38552.0, "37365": 39445.0, "37370": 39211.0, "37375": 36981.0, "37380": 38553.0, "37385": 40738.0, "37390": 38723.0, "37395": 39264.0, "37400": 40817.0, "37405": 38663.0, "37410": 40816.0, "37415": 41573.0, "37420": 40804.0, "37425": 41499.0, "37430": 39220.0, "37435": 40119.0, "37440": 39289.0, "37445": 39970.0, "37450": 41499.0, "37455": 40117.0, "37460": 39360.0, "37465": 40801.0, "37470": 39967.0, "37475": 40071.0, "37480": 40733.0, "37485": 37262.0, "37490": 37256.0, "37495": 40053.0, "37500": 39988.0, "37505": 38105.0, "37510": 36533.0, "37515": 40116.0, "37520": 41506.0, "37525": 40892.0, "37530": 40736.0, "37535": 40054.0, "37540": 41505.0, "37545": 40841.0, "37550": 37757.0, "37555": 40080.0, "37560": 41584.0, "37565": 40024.0, "37570": 40737.0, "37575": 41641.0, "37580": 41572.0, "37585": 40027.0, "37590": 40816.0, "37595": 40965.0, "37600": 40109.0, "37605": 40807.0, "37610": 42264.0, "37615": 40823.0, "37620": 40800.0, "37625": 42343.0, "37630": 38725.0, "37635": 40813.0, "37640": 39452.0, "37645": 40829.0, "37650": 40885.0, "37655": 39284.0, "37660": 40121.0, "37665": 42277.0, "37670": 42275.0, "37675": 40045.0, "37680": 39200.0, "37685": 40042.0, "37690": 39351.0, "37695": 37990.0, "37700": 40901.0, "37705": 39362.0, "37710": 40130.0, "37715": 39290.0, "37720": 41506.0, "37725": 40820.0, "37730": 38496.0, "37735": 38794.0, "37740": 39980.0, "37745": 40124.0, "37750": 40044.0, "37755": 39981.0, "37760": 40789.0, "37765": 41514.0, "37770": 40197.0, "37775": 40745.0, "37780": 41516.0, "37785": 40734.0, "37790": 39290.0, "37795": 39204.0, "37800": 39288.0, "37805": 39291.0, "37810": 40744.0, "37815": 42350.0, "37820": 41559.0, "37825": 40742.0, "37830": 40049.0, "37835": 40031.0, "37840": 41515.0, "37845": 39283.0, "37850": 38042.0, "37855": 39220.0, "37860": 40810.0, "37865": 39971.0, "37870": 40734.0, "37875": 40823.0, "37880": 39257.0, "37885": 39974.0, "37890": 39415.0, "37895": 39298.0, "37900": 39487.0, "37905": 37401.0, "37910": 40747.0, "37915": 38515.0, "37920": 39219.0, "37925": 40741.0, "37930": 40893.0, "37935": 38593.0, "37940": 40054.0, "37945": 39275.0, "37950": 40055.0, "37955": 38740.0, "37960": 40747.0, "37965": 41512.0, "37970": 37742.0, "37975": 40801.0, "37980": 40741.0, "37985": 38584.0, "37990": 42281.0, "37995": 37990.0, "38000": 39972.0, "38005": 40820.0, "38010": 40805.0, "38015": 40800.0, "38020": 41496.0, "38025": 40860.0, "38030": 38610.0, "38035": 42273.0, "38040": 39385.0, "38045": 41579.0, "38050": 39285.0, "38055": 40044.0, "38060": 37949.0, "38065": 39270.0, "38070": 40059.0, "38075": 42275.0, "38080": 40117.0, "38085": 41580.0, "38090": 39348.0, "38095": 40058.0, "38100": 41567.0, "38105": 38607.0, "38110": 36588.0, "38115": 40739.0, "38120": 39278.0, "38125": 40122.0, "38130": 39288.0, "38135": 39281.0, "38140": 40022.0, "38145": 37277.0, "38150": 39969.0, "38155": 40805.0, "38160": 43033.0, "38165": 36369.0, "38170": 40805.0, "38175": 39430.0, "38180": 37042.0, "38185": 40049.0, "38190": 40050.0, "38195": 40056.0, "38200": 40807.0, "38205": 41503.0, "38210": 39283.0, "38215": 40739.0, "38220": 41581.0, "38225": 38044.0, "38230": 40728.0, "38235": 40018.0, "38240": 40808.0, "38245": 37366.0, "38250": 39262.0, "38255": 40871.0, "38260": 38668.0, "38265": 39983.0, "38270": 40732.0, "38275": 40029.0, "38280": 41567.0, "38285": 37854.0, "38290": 40871.0, "38295": 41490.0, "38300": 43035.0, "38305": 40111.0, "38310": 37261.0, "38315": 39441.0, "38320": 40819.0, "38325": 39408.0, "38330": 40071.0, "38335": 40135.0, "38340": 40716.0, "38345": 37436.0, "38350": 41575.0, "38355": 39215.0, "38360": 38599.0, "38365": 39285.0, "38370": 40836.0, "38375": 39449.0, "38380": 40135.0, "38385": 40076.0, "38390": 39971.0, "38395": 38824.0, "38400": 39345.0, "38405": 40047.0, "38410": 40054.0, "38415": 40900.0, "38420": 40039.0, "38425": 41500.0, "38430": 39275.0, "38435": 41501.0, "38440": 39984.0, "38445": 37137.0, "38450": 39963.0, "38455": 40824.0, "38460": 39418.0, "38465": 39276.0, "38470": 37332.0, "38475": 40230.0, "38480": 39432.0, "38485": 37285.0, "38490": 39205.0, "38495": 39200.0, "38500": 40858.0, "38505": 40052.0, "38510": 40835.0, "38515": 39980.0, "38520": 38596.0, "38525": 40213.0, "38530": 40738.0, "38535": 38580.0, "38540": 40042.0, "38545": 41504.0, "38550": 40116.0, "38555": 40048.0, "38560": 39317.0, "38565": 40812.0, "38570": 40894.0, "38575": 39445.0, "38580": 40737.0, "38585": 39419.0, "38590": 39976.0, "38595": 38596.0, "38600": 39985.0, "38605": 40796.0, "38610": 39281.0, "38615": 40745.0, "38620": 41581.0, "38625": 39352.0, "38630": 39974.0, "38635": 40808.0, "38640": 40100.0, "38645": 40137.0, "38650": 41503.0, "38655": 39347.0, "38660": 40045.0, "38665": 40041.0, "38670": 40823.0, "38675": 37843.0, "38680": 39386.0, "38685": 37068.0, "38690": 38685.0, "38695": 40031.0, "38700": 40042.0, "38705": 40126.0, "38710": 41502.0, "38715": 41512.0, "38720": 40041.0, "38725": 40038.0, "38730": 41584.0, "38735": 42276.0, "38740": 39435.0, "38745": 40968.0, "38750": 39461.0, "38755": 41514.0, "38760": 37224.0, "38765": 41505.0, "38770": 40128.0, "38775": 39339.0, "38780": 37977.0, "38785": 40032.0, "38790": 34412.0, "38795": 39360.0, "38800": 40220.0, "38805": 37088.0, "38810": 39217.0, "38815": 39366.0, "38820": 39262.0, "38825": 39286.0, "38830": 40742.0, "38835": 38527.0, "38840": 41662.0, "38845": 40030.0, "38850": 41494.0, "38855": 38565.0, "38860": 38698.0, "38865": 40031.0, "38870": 40116.0, "38875": 41518.0, "38880": 40049.0, "38885": 39363.0, "38890": 39272.0, "38895": 41498.0, "38900": 40046.0, "38905": 42269.0, "38910": 40048.0, "38915": 39285.0, "38920": 40866.0, "38925": 37888.0, "38930": 41576.0, "38935": 38658.0, "38940": 41511.0, "38945": 40820.0, "38950": 38594.0, "38955": 40121.0, "38960": 40120.0, "38965": 40202.0, "38970": 41517.0, "38975": 40731.0, "38980": 40812.0, "38985": 40748.0, "38990": 40827.0, "38995": 40059.0, "39000": 39429.0, "39005": 38597.0, "39010": 38517.0, "39015": 40203.0, "39020": 40741.0, "39025": 40045.0, "39030": 40052.0, "39035": 39273.0, "39040": 40811.0, "39045": 41597.0, "39050": 39435.0, "39055": 40820.0, "39060": 38737.0, "39065": 35804.0, "39070": 42269.0, "39075": 41495.0, "39080": 41572.0, "39085": 41571.0, "39090": 39978.0, "39095": 39993.0, "39100": 38514.0, "39105": 39268.0, "39110": 39421.0, "39115": 40121.0, "39120": 40818.0, "39125": 39959.0, "39130": 41556.0, "39135": 41496.0, "39140": 39992.0, "39145": 40745.0, "39150": 40821.0, "39155": 43026.0, "39160": 38695.0, "39165": 39472.0, "39170": 39982.0, "39175": 40036.0, "39180": 38052.0, "39185": 37751.0, "39190": 41503.0, "39195": 38563.0, "39200": 41506.0, "39205": 40046.0, "39210": 40036.0, "39215": 40739.0, "39220": 42266.0, "39225": 39379.0, "39230": 40804.0, "39235": 40167.0, "39240": 41578.0, "39245": 40798.0, "39250": 41556.0, "39255": 40817.0, "39260": 40746.0, "39265": 40732.0, "39270": 40053.0, "39275": 40109.0, "39280": 40810.0, "39285": 40811.0, "39290": 39976.0, "39295": 38744.0, "39300": 40890.0, "39305": 38609.0, "39310": 41504.0, "39315": 39276.0, "39320": 39280.0, "39325": 40731.0, "39330": 41509.0, "39335": 40886.0, "39340": 40896.0, "39345": 40114.0, "39350": 40864.0, "39355": 40731.0, "39360": 41521.0, "39365": 40812.0, "39370": 40828.0, "39375": 40125.0, "39380": 40034.0, "39385": 37902.0, "39390": 39984.0, "39395": 40817.0, "39400": 38727.0, "39405": 40744.0, "39410": 35955.0, "39415": 39290.0, "39420": 40137.0, "39425": 38040.0, "39430": 40813.0, "39435": 39266.0, "39440": 40792.0, "39445": 39992.0, "39450": 40823.0, "39455": 41502.0, "39460": 39345.0, "39465": 38596.0, "39470": 41577.0, "39475": 41518.0, "39480": 39445.0, "39485": 41503.0, "39490": 39384.0, "39495": 35767.0, "39500": 41574.0, "39505": 40215.0, "39510": 40044.0, "39515": 39280.0, "39520": 39962.0, "39525": 38569.0, "39530": 41492.0, "39535": 40722.0, "39540": 39208.0, "39545": 38741.0, "39550": 35154.0, "39555": 37965.0, "39560": 41506.0, "39565": 40119.0, "39570": 41565.0, "39575": 40751.0, "39580": 39289.0, "39585": 39287.0, "39590": 41572.0, "39595": 40747.0, "39600": 40100.0, "39605": 40803.0, "39610": 41518.0, "39615": 39973.0, "39620": 40879.0, "39625": 40736.0, "39630": 40109.0, "39635": 41577.0, "39640": 40031.0, "39645": 41512.0, "39650": 39296.0, "39655": 39983.0, "39660": 39422.0, "39665": 39365.0, "39670": 39368.0, "39675": 40833.0, "39680": 39977.0, "39685": 40109.0, "39690": 40820.0, "39695": 41496.0, "39700": 40808.0, "39705": 40795.0, "39710": 39523.0, "39715": 40027.0, "39720": 41505.0, "39725": 39980.0, "39730": 39978.0, "39735": 40037.0, "39740": 41499.0, "39745": 40811.0, "39750": 38661.0, "39755": 41497.0, "39760": 39979.0, "39765": 35700.0, "39770": 38532.0, "39775": 39275.0, "39780": 38075.0, "39785": 41654.0, "39790": 40044.0, "39795": 40736.0, "39800": 38599.0, "39805": 38735.0, "39810": 40844.0, "39815": 39378.0, "39820": 39975.0, "39825": 40822.0, "39830": 39223.0, "39835": 39289.0, "39840": 38663.0, "39845": 40730.0, "39850": 41571.0, "39855": 41567.0, "39860": 40049.0, "39865": 39301.0, "39870": 39341.0, "39875": 38635.0, "39880": 40799.0, "39885": 39342.0, "39890": 42290.0, "39895": 37880.0, "39900": 40814.0, "39905": 40041.0, "39910": 39291.0, "39915": 40802.0, "39920": 40903.0, "39925": 40203.0, "39930": 38720.0, "39935": 39198.0, "39940": 40741.0, "39945": 40883.0, "39950": 40728.0, "39955": 41497.0, "39960": 41579.0, "39965": 40061.0, "39970": 40824.0, "39975": 39272.0, "39980": 41510.0, "39985": 39279.0, "39990": 37905.0, "39995": 39408.0, "40000": 37907.0, "40005": 41500.0, "40010": 37966.0, "40015": 38682.0, "40020": 37215.0, "40025": 39267.0, "40030": 39285.0, "40035": 40130.0, "40040": 39970.0, "40045": 39338.0, "40050": 40031.0, "40055": 39213.0, "40060": 39976.0, "40065": 40026.0, "40070": 42275.0, "40075": 40798.0, "40080": 42261.0, "40085": 40835.0, "40090": 41632.0, "40095": 40740.0, "40100": 39279.0, "40105": 37331.0, "40110": 40892.0, "40115": 40892.0, "40120": 34528.0, "40125": 39293.0, "40130": 39338.0, "40135": 40805.0, "40140": 40815.0, "40145": 38524.0, "40150": 42266.0, "40155": 40106.0, "40160": 41588.0, "40165": 42274.0, "40170": 41582.0, "40175": 40134.0, "40180": 41647.0, "40185": 41494.0, "40190": 40056.0, "40195": 40026.0, "40200": 39331.0, "40205": 38443.0, "40210": 38607.0, "40215": 40804.0, "40220": 42271.0, "40225": 40076.0, "40230": 38636.0, "40235": 39292.0, "40240": 38742.0, "40245": 40823.0, "40250": 41503.0, "40255": 37889.0, "40260": 39448.0, "40265": 40124.0, "40270": 39402.0, "40275": 39347.0, "40280": 40193.0, "40285": 38602.0, "40290": 41501.0, "40295": 38587.0, "40300": 38546.0, "40305": 40112.0, "40310": 40728.0, "40315": 40882.0, "40320": 40813.0, "40325": 38057.0, "40330": 38116.0, "40335": 40811.0, "40340": 38582.0, "40345": 40750.0, "40350": 40867.0, "40355": 37983.0, "40360": 40739.0, "40365": 41500.0, "40370": 41497.0, "40375": 41573.0, "40380": 38689.0, "40385": 40800.0, "40390": 37964.0, "40395": 41518.0, "40400": 40126.0, "40405": 41507.0, "40410": 40810.0, "40415": 40815.0, "40420": 40811.0, "40425": 40112.0, "40430": 40031.0, "40435": 39262.0, "40440": 39971.0, "40445": 41596.0, "40450": 40156.0, "40455": 42261.0, "40460": 40042.0, "40465": 40817.0, "40470": 41511.0, "40475": 40826.0, "40480": 40113.0, "40485": 40726.0, "40490": 39207.0, "40495": 41603.0, "40500": 40067.0, "40505": 39972.0, "40510": 41580.0, "40515": 38664.0, "40520": 39347.0, "40525": 38592.0, "40530": 40129.0, "40535": 37453.0, "40540": 40054.0, "40545": 40738.0, "40550": 40042.0, "40555": 38826.0, "40560": 41579.0, "40565": 37974.0, "40570": 39363.0, "40575": 41593.0, "40580": 41520.0, "40585": 40048.0, "40590": 40879.0, "40595": 39980.0, "40600": 38637.0, "40605": 38583.0, "40610": 38039.0, "40615": 39310.0, "40620": 39286.0, "40625": 40751.0, "40630": 40057.0, "40635": 39984.0, "40640": 40810.0, "40645": 41505.0, "40650": 40747.0, "40655": 40736.0, "40660": 39291.0, "40665": 39284.0, "40670": 39286.0, "40675": 39429.0, "40680": 40807.0, "40685": 40798.0, "40690": 40814.0, "40695": 38589.0, "40700": 40035.0, "40705": 38721.0, "40710": 40070.0, "40715": 41509.0, "40720": 38499.0, "40725": 38580.0, "40730": 40804.0, "40735": 40212.0, "40740": 41519.0, "40745": 36578.0, "40750": 40806.0, "40755": 41496.0, "40760": 40113.0, "40765": 41558.0, "40770": 41503.0, "40775": 41507.0, "40780": 38722.0, "40785": 39966.0, "40790": 39365.0, "40795": 40046.0, "40800": 40868.0, "40805": 40803.0, "40810": 38030.0, "40815": 39980.0, "40820": 40039.0, "40825": 40737.0, "40830": 40053.0, "40835": 37246.0, "40840": 38525.0, "40845": 40051.0, "40850": 38721.0, "40855": 40047.0, "40860": 39355.0, "40865": 38729.0, "40870": 40100.0, "40875": 39365.0, "40880": 40815.0, "40885": 40806.0, "40890": 40740.0, "40895": 40055.0, "40900": 39301.0, "40905": 40052.0, "40910": 40806.0, "40915": 40017.0, "40920": 38688.0, "40925": 38532.0, "40930": 40180.0, "40935": 40051.0, "40940": 40125.0, "40945": 40812.0, "40950": 40131.0, "40955": 38592.0, "40960": 40138.0, "40965": 41576.0, "40970": 39367.0, "40975": 39970.0, "40980": 40050.0, "40985": 40038.0, "40990": 40052.0, "40995": 37900.0, "41000": 40731.0, "41005": 40730.0, "41010": 36567.0, "41015": 39964.0, "41020": 40811.0, "41025": 38590.0, "41030": 40749.0, "41035": 40046.0, "41040": 39964.0, "41045": 39973.0, "41050": 39987.0, "41055": 38670.0, "41060": 39288.0, "41065": 41582.0, "41070": 40032.0, "41075": 39954.0, "41080": 39360.0, "41085": 40743.0, "41090": 38645.0, "41095": 40729.0, "41100": 40101.0, "41105": 39973.0, "41110": 41621.0, "41115": 39346.0, "41120": 39206.0, "41125": 40907.0, "41130": 40037.0, "41135": 40810.0, "41140": 40813.0, "41145": 40052.0, "41150": 39200.0, "41155": 39368.0, "41160": 37288.0, "41165": 40834.0, "41170": 40815.0, "41175": 38601.0, "41180": 39287.0, "41185": 40819.0, "41190": 39422.0, "41195": 40118.0, "41200": 39421.0, "41205": 41502.0, "41210": 38663.0, "41215": 39363.0, "41220": 39985.0, "41225": 40037.0, "41230": 40054.0, "41235": 39296.0, "41240": 40112.0, "41245": 41496.0, "41250": 40798.0, "41255": 38614.0, "41260": 40088.0, "41265": 40796.0, "41270": 40042.0, "41275": 40817.0, "41280": 39531.0, "41285": 38766.0, "41290": 40753.0, "41295": 40110.0, "41300": 40060.0, "41305": 40743.0, "41310": 39958.0, "41315": 38512.0, "41320": 39201.0, "41325": 39291.0, "41330": 39433.0, "41335": 41506.0, "41340": 41499.0, "41345": 37917.0, "41350": 40822.0, "41355": 38510.0, "41360": 38619.0, "41365": 42264.0, "41370": 40201.0, "41375": 39280.0, "41380": 41563.0, "41385": 39310.0, "41390": 41487.0, "41395": 40058.0, "41400": 39973.0, "41405": 41505.0, "41410": 40803.0, "41415": 38735.0, "41420": 37274.0, "41425": 40177.0, "41430": 39255.0, "41435": 40044.0, "41440": 43050.0, "41445": 40135.0, "41450": 40111.0, "41455": 38660.0, "41460": 37983.0, "41465": 40827.0, "41470": 36571.0, "41475": 42269.0, "41480": 41497.0, "41485": 40061.0, "41490": 39302.0, "41495": 40809.0, "41500": 40040.0, "41505": 40813.0, "41510": 40055.0, "41515": 40891.0, "41520": 40829.0, "41525": 39966.0, "41530": 39978.0, "41535": 38518.0, "41540": 37673.0, "41545": 39286.0, "41550": 38598.0, "41555": 40811.0, "41560": 40814.0, "41565": 38511.0, "41570": 41497.0, "41575": 41498.0, "41580": 39255.0, "41585": 40050.0, "41590": 40817.0, "41595": 40875.0, "41600": 40737.0, "41605": 37988.0, "41610": 40744.0, "41615": 39372.0, "41620": 38495.0, "41625": 40042.0, "41630": 39232.0, "41635": 41656.0, "41640": 39443.0, "41645": 39356.0, "41650": 40051.0, "41655": 38540.0, "41660": 40829.0, "41665": 40832.0, "41670": 41511.0, "41675": 40801.0, "41680": 40803.0, "41685": 40751.0, "41690": 40741.0, "41695": 38051.0, "41700": 40814.0, "41705": 39414.0, "41710": 40055.0, "41715": 41554.0, "41720": 39339.0, "41725": 38674.0, "41730": 41506.0, "41735": 38055.0, "41740": 40141.0, "41745": 37330.0, "41750": 40217.0, "41755": 39987.0, "41760": 40123.0, "41765": 40735.0, "41770": 40830.0, "41775": 39967.0, "41780": 41514.0, "41785": 41502.0, "41790": 38628.0, "41795": 41503.0, "41800": 41497.0, "41805": 39967.0, "41810": 40808.0, "41815": 39447.0, "41820": 40060.0, "41825": 40049.0, "41830": 40043.0, "41835": 39968.0, "41840": 40047.0, "41845": 38576.0, "41850": 40791.0, "41855": 36381.0, "41860": 40953.0, "41865": 39301.0, "41870": 40113.0, "41875": 39293.0, "41880": 39292.0, "41885": 40828.0, "41890": 40043.0, "41895": 37957.0, "41900": 42269.0, "41905": 40748.0, "41910": 39276.0, "41915": 40141.0, "41920": 38457.0, "41925": 41504.0, "41930": 37672.0, "41935": 39970.0, "41940": 40057.0, "41945": 40733.0, "41950": 41514.0, "41955": 38445.0, "41960": 40743.0, "41965": 41577.0, "41970": 41512.0, "41975": 40036.0, "41980": 40055.0, "41985": 39373.0, "41990": 40816.0, "41995": 39977.0, "42000": 40801.0, "42005": 41495.0, "42010": 40144.0, "42015": 37069.0, "42020": 40036.0, "42025": 40059.0, "42030": 41508.0, "42035": 41510.0, "42040": 41579.0, "42045": 39433.0, "42050": 40810.0, "42055": 42265.0, "42060": 40893.0, "42065": 40039.0, "42070": 40819.0, "42075": 37943.0, "42080": 42258.0, "42085": 41580.0, "42090": 41584.0, "42095": 40051.0, "42100": 41516.0, "42105": 40824.0, "42110": 39364.0, "42115": 40891.0, "42120": 37130.0, "42125": 40038.0, "42130": 40739.0, "42135": 38535.0, "42140": 40041.0, "42145": 41595.0, "42150": 39962.0, "42155": 39970.0, "42160": 38004.0, "42165": 42264.0, "42170": 40197.0, "42175": 41527.0, "42180": 40037.0, "42185": 41562.0, "42190": 39281.0, "42195": 40034.0, "42200": 41575.0, "42205": 40743.0, "42210": 38583.0, "42215": 41504.0, "42220": 38534.0, "42225": 39276.0, "42230": 37347.0, "42235": 40827.0, "42240": 37741.0, "42245": 40032.0, "42250": 41496.0, "42255": 39352.0, "42260": 37973.0, "42265": 41582.0, "42270": 39399.0, "42275": 40049.0, "42280": 40133.0, "42285": 37878.0, "42290": 39359.0, "42295": 40729.0, "42300": 40129.0, "42305": 39234.0, "42310": 39349.0, "42315": 39314.0, "42320": 38677.0, "42325": 38812.0, "42330": 39286.0, "42335": 41507.0, "42340": 38511.0, "42345": 40044.0, "42350": 37111.0, "42355": 37706.0, "42360": 39415.0, "42365": 41512.0, "42370": 39973.0, "42375": 38438.0, "42380": 40047.0, "42385": 37953.0, "42390": 40039.0, "42395": 40057.0, "42400": 40731.0, "42405": 39971.0, "42410": 40216.0, "42415": 41509.0, "42420": 38589.0, "42425": 40812.0, "42430": 40890.0, "42435": 40157.0, "42440": 39356.0, "42445": 41558.0, "42450": 40132.0, "42455": 39201.0, "42460": 40059.0, "42465": 40749.0, "42470": 39961.0, "42475": 40736.0, "42480": 39414.0, "42485": 40742.0, "42490": 41598.0, "42495": 39353.0, "42500": 40797.0, "42505": 40823.0, "42510": 37915.0, "42515": 34312.0, "42520": 40131.0, "42525": 40070.0, "42530": 40792.0, "42535": 38080.0, "42540": 40736.0, "42545": 38604.0, "42550": 40808.0, "42555": 41494.0, "42560": 40801.0, "42565": 35416.0, "42570": 40812.0, "42575": 40051.0, "42580": 39211.0, "42585": 40129.0, "42590": 40819.0, "42595": 40746.0, "42600": 40839.0, "42605": 40204.0, "42610": 40741.0, "42615": 40732.0, "42620": 38003.0, "42625": 38516.0, "42630": 38556.0, "42635": 39975.0, "42640": 39385.0, "42645": 39379.0, "42650": 40114.0, "42655": 39990.0, "42660": 40207.0, "42665": 40069.0, "42670": 40043.0, "42675": 38505.0, "42680": 40822.0, "42685": 40750.0, "42690": 41505.0, "42695": 40805.0, "42700": 41500.0, "42705": 41520.0, "42710": 40117.0, "42715": 38762.0, "42720": 41515.0, "42725": 39276.0, "42730": 37915.0, "42735": 40805.0, "42740": 40742.0, "42745": 38652.0, "42750": 41510.0, "42755": 40801.0, "42760": 42270.0, "42765": 42346.0, "42770": 41547.0, "42775": 40041.0, "42780": 40742.0, "42785": 39268.0, "42790": 40017.0, "42795": 41583.0, "42800": 37242.0, "42805": 40881.0, "42810": 41501.0, "42815": 39349.0, "42820": 40050.0, "42825": 35728.0, "42830": 40824.0, "42835": 40800.0, "42840": 37901.0, "42845": 39969.0, "42850": 39275.0, "42855": 40805.0, "42860": 37813.0, "42865": 40730.0, "42870": 37165.0, "42875": 41570.0, "42880": 41501.0, "42885": 39353.0, "42890": 40055.0, "42895": 41574.0, "42900": 40807.0, "42905": 41584.0, "42910": 38580.0, "42915": 38600.0, "42920": 40823.0, "42925": 43042.0, "42930": 40025.0, "42935": 41585.0, "42940": 41507.0, "42945": 39333.0, "42950": 39308.0, "42955": 40748.0, "42960": 38062.0, "42965": 38612.0, "42970": 39273.0, "42975": 41511.0, "42980": 38672.0, "42985": 41601.0, "42990": 40735.0, "42995": 39411.0, "43000": 38717.0, "43005": 40123.0, "43010": 40092.0, "43015": 40739.0, "43020": 41568.0, "43025": 40743.0, "43030": 35980.0, "43035": 37753.0, "43040": 38682.0, "43045": 40739.0, "43050": 40810.0, "43055": 40758.0, "43060": 40880.0, "43065": 40122.0, "43070": 37886.0, "43075": 38506.0, "43080": 40805.0, "43085": 39292.0, "43090": 39982.0, "43095": 39967.0, "43100": 39971.0, "43105": 38526.0, "43110": 40041.0, "43115": 41567.0, "43120": 40792.0, "43125": 37977.0, "43130": 40109.0, "43135": 40809.0, "43140": 40730.0, "43145": 41577.0, "43150": 41597.0, "43155": 39289.0, "43160": 40808.0, "43165": 36416.0, "43170": 38676.0, "43175": 37210.0, "43180": 41583.0, "43185": 41578.0, "43190": 39422.0, "43195": 41587.0, "43200": 38004.0, "43205": 40048.0, "43210": 39350.0, "43215": 41569.0, "43220": 40804.0, "43225": 38598.0, "43230": 40842.0, "43235": 41511.0, "43240": 40748.0, "43245": 41497.0, "43250": 40735.0, "43255": 40131.0, "43260": 39206.0, "43265": 37764.0, "43270": 41494.0, "43275": 40823.0, "43280": 41496.0, "43285": 37927.0, "43290": 40158.0, "43295": 42271.0, "43300": 40042.0, "43305": 40123.0, "43310": 40062.0, "43315": 40220.0, "43320": 40820.0, "43325": 41506.0, "43330": 38061.0, "43335": 42269.0, "43340": 39984.0, "43345": 39976.0, "43350": 40057.0, "43355": 41504.0, "43360": 40052.0, "43365": 41563.0, "43370": 40142.0, "43375": 40028.0, "43380": 39428.0, "43385": 41564.0, "43390": 39514.0, "43395": 42261.0, "43400": 39955.0, "43405": 40744.0, "43410": 40184.0, "43415": 39428.0, "43420": 40034.0, "43425": 40144.0, "43430": 39353.0, "43435": 40135.0, "43440": 40757.0, "43445": 41496.0, "43450": 38767.0, "43455": 40050.0, "43460": 40738.0, "43465": 38591.0, "43470": 37918.0, "43475": 40056.0, "43480": 40174.0, "43485": 41502.0, "43490": 40820.0, "43495": 40105.0, "43500": 40059.0, "43505": 40805.0, "43510": 40199.0, "43515": 40810.0, "43520": 40880.0, "43525": 41508.0, "43530": 38512.0, "43535": 39343.0, "43540": 39965.0, "43545": 39968.0, "43550": 39964.0, "43555": 40739.0, "43560": 37837.0, "43565": 39297.0, "43570": 37958.0, "43575": 37794.0, "43580": 39270.0, "43585": 39432.0, "43590": 40797.0, "43595": 41567.0, "43600": 40825.0, "43605": 40818.0, "43610": 40051.0, "43615": 40815.0, "43620": 42269.0, "43625": 41504.0, "43630": 41588.0, "43635": 40122.0, "43640": 41592.0, "43645": 38586.0, "43650": 40826.0, "43655": 40093.0, "43660": 37005.0, "43665": 41578.0, "43670": 39377.0, "43675": 40740.0, "43680": 39302.0, "43685": 39428.0, "43690": 39971.0, "43695": 40801.0, "43700": 39197.0, "43705": 40138.0, "43710": 41502.0, "43715": 41585.0, "43720": 39434.0, "43725": 40137.0, "43730": 40745.0, "43735": 40054.0, "43740": 42263.0, "43745": 40883.0, "43750": 39284.0, "43755": 38605.0, "43760": 39399.0, "43765": 39265.0, "43770": 40818.0, "43775": 41513.0, "43780": 40803.0, "43785": 40051.0, "43790": 40733.0, "43795": 42260.0, "43800": 40731.0, "43805": 36921.0, "43810": 40052.0, "43815": 41589.0, "43820": 38669.0, "43825": 40102.0, "43830": 40740.0, "43835": 39264.0, "43840": 39970.0, "43845": 40807.0, "43850": 41495.0, "43855": 38598.0, "43860": 38594.0, "43865": 40059.0, "43870": 40032.0, "43875": 40735.0, "43880": 39356.0, "43885": 39449.0, "43890": 41498.0, "43895": 39305.0, "43900": 39357.0, "43905": 38563.0, "43910": 40053.0, "43915": 40113.0, "43920": 39958.0, "43925": 40736.0, "43930": 39264.0, "43935": 39283.0, "43940": 41503.0, "43945": 40193.0, "43950": 38677.0, "43955": 39300.0, "43960": 40748.0, "43965": 39282.0, "43970": 40051.0, "43975": 40049.0, "43980": 39517.0, "43985": 40811.0, "43990": 40031.0, "43995": 39294.0, "44000": 39364.0, "44005": 40747.0, "44010": 38529.0, "44015": 39290.0, "44020": 40818.0, "44025": 39287.0, "44030": 36607.0, "44035": 39969.0, "44040": 39346.0, "44045": 39376.0, "44050": 41512.0, "44055": 39430.0, "44060": 40746.0, "44065": 40110.0, "44070": 40216.0, "44075": 40818.0, "44080": 42272.0, "44085": 38739.0, "44090": 37122.0, "44095": 39439.0, "44100": 40902.0, "44105": 40745.0, "44110": 40188.0, "44115": 36668.0, "44120": 39361.0, "44125": 39200.0, "44130": 42339.0, "44135": 41501.0, "44140": 37811.0, "44145": 40041.0, "44150": 39354.0, "44155": 41500.0, "44160": 40753.0, "44165": 38733.0, "44170": 40744.0, "44175": 35905.0, "44180": 40815.0, "44185": 40100.0, "44190": 36221.0, "44195": 39366.0, "44200": 40038.0, "44205": 40042.0, "44210": 41504.0, "44215": 39350.0, "44220": 38084.0, "44225": 39356.0, "44230": 41581.0, "44235": 40831.0, "44240": 40025.0, "44245": 41584.0, "44250": 40053.0, "44255": 40867.0, "44260": 40810.0, "44265": 41505.0, "44270": 40805.0, "44275": 40907.0, "44280": 40822.0, "44285": 40887.0, "44290": 37355.0, "44295": 38086.0, "44300": 42266.0, "44305": 40758.0, "44310": 39210.0, "44315": 40037.0, "44320": 38664.0, "44325": 40739.0, "44330": 38563.0, "44335": 41580.0, "44340": 37896.0, "44345": 39363.0, "44350": 38666.0, "44355": 38597.0, "44360": 40133.0, "44365": 41496.0, "44370": 41663.0, "44375": 40876.0, "44380": 40805.0, "44385": 40869.0, "44390": 40736.0, "44395": 40756.0, "44400": 40741.0, "44405": 39978.0, "44410": 38091.0, "44415": 39338.0, "44420": 40741.0, "44425": 39264.0, "44430": 37120.0, "44435": 39980.0, "44440": 41576.0, "44445": 39346.0, "44450": 40813.0, "44455": 39370.0, "44460": 38603.0, "44465": 39354.0, "44470": 40057.0, "44475": 43035.0, "44480": 39974.0, "44485": 39226.0, "44490": 38575.0, "44495": 38442.0, "44500": 40083.0, "44505": 37913.0, "44510": 39289.0, "44515": 39967.0, "44520": 42258.0, "44525": 39273.0, "44530": 40734.0, "44535": 39287.0, "44540": 40808.0, "44545": 40748.0, "44550": 38671.0, "44555": 41498.0, "44560": 40749.0, "44565": 41583.0, "44570": 40052.0, "44575": 40098.0, "44580": 40827.0, "44585": 40823.0, "44590": 41563.0, "44595": 40740.0, "44600": 38584.0, "44605": 39984.0, "44610": 40834.0, "44615": 39351.0, "44620": 40051.0, "44625": 40035.0, "44630": 37988.0, "44635": 40742.0, "44640": 40101.0, "44645": 40040.0, "44650": 40045.0, "44655": 40051.0, "44660": 39367.0, "44665": 43043.0, "44670": 40042.0, "44675": 39337.0, "44680": 40731.0, "44685": 39285.0, "44690": 39283.0, "44695": 36359.0, "44700": 39207.0, "44705": 40811.0, "44710": 40026.0, "44715": 39979.0, "44720": 41511.0, "44725": 41506.0, "44730": 39357.0, "44735": 40740.0, "44740": 39205.0, "44745": 40806.0, "44750": 40800.0, "44755": 39291.0, "44760": 40826.0, "44765": 39273.0, "44770": 39220.0, "44775": 40109.0, "44780": 39430.0, "44785": 41515.0, "44790": 40803.0, "44795": 40141.0, "44800": 40819.0, "44805": 40826.0, "44810": 40110.0, "44815": 40119.0, "44820": 41507.0, "44825": 40815.0, "44830": 40184.0, "44835": 40732.0, "44840": 39281.0, "44845": 40165.0, "44850": 39348.0, "44855": 40045.0, "44860": 38673.0, "44865": 39981.0, "44870": 38583.0, "44875": 40803.0, "44880": 39969.0, "44885": 38675.0, "44890": 40793.0, "44895": 40970.0, "44900": 40031.0, "44905": 39333.0, "44910": 39456.0, "44915": 38534.0, "44920": 39281.0, "44925": 40817.0, "44930": 40804.0, "44935": 38661.0, "44940": 39273.0, "44945": 40048.0, "44950": 41505.0, "44955": 40061.0, "44960": 40867.0, "44965": 37989.0, "44970": 40045.0, "44975": 39189.0, "44980": 41580.0, "44985": 39305.0, "44990": 40800.0, "44995": 41505.0, "45000": 40047.0, "45005": 39272.0, "45010": 40062.0, "45015": 40119.0, "45020": 40055.0, "45025": 37195.0, "45030": 40734.0, "45035": 37230.0, "45040": 38586.0, "45045": 41512.0, "45050": 40734.0, "45055": 37817.0, "45060": 40860.0, "45065": 40033.0, "45070": 40039.0, "45075": 40807.0, "45080": 40114.0, "45085": 39337.0, "45090": 38434.0, "45095": 39292.0, "45100": 40733.0, "45105": 41495.0, "45110": 40822.0, "45115": 40735.0, "45120": 40817.0, "45125": 41581.0, "45130": 39209.0, "45135": 39418.0, "45140": 40107.0, "45145": 40889.0, "45150": 39283.0, "45155": 41495.0, "45160": 40736.0, "45165": 40100.0, "45170": 40070.0, "45175": 37908.0, "45180": 39280.0, "45185": 39435.0, "45190": 41571.0, "45195": 40829.0, "45200": 39326.0, "45205": 38665.0, "45210": 38606.0, "45215": 40877.0, "45220": 40822.0, "45225": 40809.0, "45230": 39375.0, "45235": 39372.0, "45240": 39416.0, "45245": 40745.0, "45250": 40826.0, "45255": 41497.0, "45260": 41498.0, "45265": 40056.0, "45270": 37757.0, "45275": 40051.0, "45280": 40824.0, "45285": 39971.0, "45290": 41572.0, "45295": 41512.0, "45300": 37304.0, "45305": 40050.0, "45310": 40030.0, "45315": 39444.0, "45320": 39978.0, "45325": 41506.0, "45330": 39275.0, "45335": 39966.0, "45340": 40102.0, "45345": 39973.0, "45350": 38455.0, "45355": 42420.0, "45360": 40139.0, "45365": 40796.0, "45370": 39308.0, "45375": 39976.0, "45380": 39346.0, "45385": 43038.0, "45390": 39307.0, "45395": 40820.0, "45400": 40184.0, "45405": 40820.0, "45410": 40819.0, "45415": 40822.0, "45420": 38431.0, "45425": 39355.0, "45430": 40049.0, "45435": 39365.0, "45440": 38699.0, "45445": 35196.0, "45450": 39306.0, "45455": 41504.0, "45460": 36537.0, "45465": 38684.0, "45470": 40048.0, "45475": 40054.0, "45480": 39225.0, "45485": 40072.0, "45490": 40802.0, "45495": 40728.0, "45500": 40055.0, "45505": 39959.0, "45510": 40029.0, "45515": 37701.0, "45520": 40041.0, "45525": 40029.0, "45530": 41596.0, "45535": 38669.0, "45540": 40806.0, "45545": 40147.0, "45550": 38736.0, "45555": 40742.0, "45560": 39277.0, "45565": 39973.0, "45570": 40058.0, "45575": 40736.0, "45580": 40752.0, "45585": 40806.0, "45590": 39966.0, "45595": 38458.0, "45600": 41661.0, "45605": 39197.0, "45610": 40818.0, "45615": 37373.0, "45620": 39407.0, "45625": 37461.0, "45630": 38823.0, "45635": 39226.0, "45640": 40049.0, "45645": 38499.0, "45650": 42275.0, "45655": 39979.0, "45660": 37760.0, "45665": 38651.0, "45670": 41492.0, "45675": 40752.0, "45680": 40733.0, "45685": 41572.0, "45690": 39318.0, "45695": 40753.0, "45700": 40042.0, "45705": 40730.0, "45710": 39974.0, "45715": 39367.0, "45720": 38730.0, "45725": 40805.0, "45730": 40888.0, "45735": 41561.0, "45740": 40731.0, "45745": 39193.0, "45750": 40816.0, "45755": 41512.0, "45760": 38494.0, "45765": 40895.0, "45770": 39527.0, "45775": 38531.0, "45780": 40189.0, "45785": 41508.0, "45790": 39208.0, "45795": 40824.0, "45800": 40814.0, "45805": 40042.0, "45810": 41504.0, "45815": 38588.0, "45820": 41572.0, "45825": 41561.0, "45830": 39973.0, "45835": 40753.0, "45840": 40744.0, "45845": 41510.0, "45850": 40153.0, "45855": 40735.0, "45860": 40215.0, "45865": 39984.0, "45870": 40824.0, "45875": 40042.0, "45880": 40736.0, "45885": 37824.0, "45890": 40747.0, "45895": 21180.0, "45900": 40035.0, "45905": 40821.0, "45910": 40112.0, "45915": 39455.0, "45920": 39990.0, "45925": 40129.0, "45930": 39994.0, "45935": 39428.0, "45940": 41504.0, "45945": 38653.0, "45950": 40052.0, "45955": 38611.0, "45960": 40741.0, "45965": 40066.0, "45970": 40206.0, "45975": 40825.0, "45980": 37826.0, "45985": 40746.0, "45990": 40810.0, "45995": 39412.0, "46000": 37811.0, "46005": 39200.0, "46010": 39434.0, "46015": 41505.0, "46020": 40735.0, "46025": 37883.0, "46030": 40873.0, "46035": 36456.0, "46040": 39420.0, "46045": 38054.0, "46050": 40127.0, "46055": 40813.0, "46060": 38535.0, "46065": 40193.0, "46070": 41503.0, "46075": 39211.0, "46080": 40039.0, "46085": 37158.0, "46090": 38061.0, "46095": 40063.0, "46100": 41501.0, "46105": 39466.0, "46110": 41569.0, "46115": 40732.0, "46120": 40904.0, "46125": 40054.0, "46130": 38513.0, "46135": 40039.0, "46140": 39347.0, "46145": 40023.0, "46150": 40736.0, "46155": 42273.0, "46160": 40137.0, "46165": 40036.0, "46170": 38564.0, "46175": 40818.0, "46180": 41492.0, "46185": 41585.0, "46190": 40124.0, "46195": 39207.0, "46200": 42273.0, "46205": 40039.0, "46210": 39286.0, "46215": 38466.0, "46220": 39354.0, "46225": 40028.0, "46230": 41569.0, "46235": 39278.0, "46240": 40053.0, "46245": 36543.0, "46250": 42273.0, "46255": 40110.0, "46260": 37890.0, "46265": 36645.0, "46270": 40051.0, "46275": 38677.0, "46280": 39513.0, "46285": 39274.0, "46290": 39481.0, "46295": 40822.0, "46300": 38727.0, "46305": 38593.0, "46310": 40892.0, "46315": 38528.0, "46320": 41516.0, "46325": 40140.0, "46330": 39971.0, "46335": 39354.0, "46340": 42269.0, "46345": 39967.0, "46350": 39298.0, "46355": 40038.0, "46360": 40741.0, "46365": 39971.0, "46370": 39296.0, "46375": 42335.0, "46380": 37746.0, "46385": 38610.0, "46390": 39453.0, "46395": 39355.0, "46400": 40734.0, "46405": 37892.0, "46410": 40815.0, "46415": 40052.0, "46420": 39369.0, "46425": 38689.0, "46430": 36595.0, "46435": 39968.0, "46440": 41600.0, "46445": 40125.0, "46450": 40979.0, "46455": 39353.0, "46460": 38744.0, "46465": 39360.0, "46470": 40133.0, "46475": 40035.0, "46480": 40796.0, "46485": 38436.0, "46490": 38651.0, "46495": 41570.0, "46500": 40742.0, "46505": 38141.0, "46510": 39230.0, "46515": 41498.0, "46520": 37332.0, "46525": 41583.0, "46530": 40982.0, "46535": 40822.0, "46540": 40867.0, "46545": 40000.0, "46550": 39233.0, "46555": 40812.0, "46560": 40747.0, "46565": 41510.0, "46570": 40131.0, "46575": 41591.0, "46580": 40808.0, "46585": 39286.0, "46590": 40038.0, "46595": 40208.0, "46600": 39277.0, "46605": 40729.0, "46610": 39976.0, "46615": 39432.0, "46620": 40883.0, "46625": 39960.0, "46630": 40732.0, "46635": 41605.0, "46640": 40065.0, "46645": 41508.0, "46650": 39341.0, "46655": 41502.0, "46660": 39297.0, "46665": 40112.0, "46670": 38573.0, "46675": 40795.0, "46680": 37921.0, "46685": 41500.0, "46690": 39980.0, "46695": 40738.0, "46700": 39259.0, "46705": 40739.0, "46710": 40967.0, "46715": 38583.0, "46720": 40797.0, "46725": 39298.0, "46730": 38610.0, "46735": 40824.0, "46740": 39301.0, "46745": 40767.0, "46750": 43030.0, "46755": 40809.0, "46760": 40808.0, "46765": 40036.0, "46770": 39981.0, "46775": 40034.0, "46780": 38719.0, "46785": 40061.0, "46790": 42265.0, "46795": 39430.0, "46800": 42343.0, "46805": 39516.0, "46810": 39970.0, "46815": 40141.0, "46820": 38603.0, "46825": 38744.0, "46830": 38594.0, "46835": 40069.0, "46840": 40903.0, "46845": 37198.0, "46850": 41561.0, "46855": 41564.0, "46860": 39607.0, "46865": 41502.0, "46870": 38577.0, "46875": 40895.0, "46880": 40746.0, "46885": 41567.0, "46890": 39200.0, "46895": 39420.0, "46900": 40820.0, "46905": 39301.0, "46910": 39476.0, "46915": 40802.0, "46920": 40735.0, "46925": 40879.0, "46930": 37998.0, "46935": 42268.0, "46940": 41503.0, "46945": 38617.0, "46950": 40827.0, "46955": 41502.0, "46960": 40805.0, "46965": 39969.0, "46970": 41503.0, "46975": 39296.0, "46980": 40136.0, "46985": 41584.0, "46990": 41505.0, "46995": 39974.0, "47000": 39978.0, "47005": 36999.0, "47010": 40747.0, "47015": 40809.0, "47020": 38760.0, "47025": 38611.0, "47030": 38066.0, "47035": 39964.0, "47040": 40040.0, "47045": 41573.0, "47050": 39218.0, "47055": 40056.0, "47060": 41556.0, "47065": 40048.0, "47070": 40105.0, "47075": 40115.0, "47080": 41502.0, "47085": 40045.0, "47090": 39307.0, "47095": 41501.0, "47100": 39334.0, "47105": 40212.0, "47110": 41499.0, "47115": 40057.0, "47120": 41494.0, "47125": 40828.0, "47130": 40831.0, "47135": 40032.0, "47140": 39357.0, "47145": 40744.0, "47150": 38766.0, "47155": 40038.0, "47160": 40038.0, "47165": 36609.0, "47170": 40812.0, "47175": 41564.0, "47180": 40051.0, "47185": 39962.0, "47190": 40143.0, "47195": 39973.0, "47200": 38008.0, "47205": 40105.0, "47210": 39421.0, "47215": 39997.0, "47220": 38680.0, "47225": 39445.0, "47230": 40808.0, "47235": 40914.0, "47240": 40804.0, "47245": 39981.0, "47250": 40106.0, "47255": 41513.0, "47260": 40876.0, "47265": 39287.0, "47270": 40027.0, "47275": 40021.0, "47280": 40050.0, "47285": 38584.0, "47290": 40813.0, "47295": 40743.0, "47300": 40813.0, "47305": 41520.0, "47310": 42263.0, "47315": 38734.0, "47320": 40741.0, "47325": 40908.0, "47330": 40061.0, "47335": 40045.0, "47340": 39197.0, "47345": 39965.0, "47350": 41511.0, "47355": 41524.0, "47360": 38075.0, "47365": 40038.0, "47370": 40796.0, "47375": 40041.0, "47380": 39221.0, "47385": 37152.0, "47390": 40789.0, "47395": 40061.0, "47400": 40737.0, "47405": 40034.0, "47410": 40039.0, "47415": 40824.0, "47420": 39290.0, "47425": 40810.0, "47430": 40745.0, "47435": 39352.0, "47440": 39213.0, "47445": 40744.0, "47450": 41600.0, "47455": 40804.0, "47460": 40749.0, "47465": 40127.0, "47470": 40170.0, "47475": 40806.0, "47480": 40813.0, "47485": 37071.0, "47490": 40899.0, "47495": 38675.0, "47500": 42258.0, "47505": 40744.0, "47510": 40749.0, "47515": 41493.0, "47520": 37113.0, "47525": 40827.0, "47530": 40743.0, "47535": 40731.0, "47540": 40748.0, "47545": 39445.0, "47550": 40808.0, "47555": 40742.0, "47560": 40053.0, "47565": 39288.0, "47570": 42268.0, "47575": 40749.0, "47580": 40868.0, "47585": 38670.0, "47590": 37059.0, "47595": 39971.0, "47600": 40877.0, "47605": 40123.0, "47610": 41499.0, "47615": 38820.0, "47620": 40793.0, "47625": 39350.0, "47630": 39972.0, "47635": 41511.0, "47640": 39487.0, "47645": 40195.0, "47650": 40806.0, "47655": 39981.0, "47660": 40748.0, "47665": 41509.0, "47670": 40822.0, "47675": 39209.0, "47680": 41503.0, "47685": 40813.0, "47690": 39976.0, "47695": 40802.0, "47700": 39979.0, "47705": 39977.0, "47710": 38441.0, "47715": 39299.0, "47720": 39303.0, "47725": 39458.0, "47730": 40038.0, "47735": 41511.0, "47740": 41568.0, "47745": 42274.0, "47750": 39354.0, "47755": 41567.0, "47760": 40816.0, "47765": 40841.0, "47770": 39258.0, "47775": 40806.0, "47780": 39269.0, "47785": 40040.0, "47790": 38056.0, "47795": 41662.0, "47800": 41502.0, "47805": 40739.0, "47810": 39581.0, "47815": 38596.0, "47820": 38738.0, "47825": 37978.0, "47830": 40728.0, "47835": 39363.0, "47840": 38462.0, "47845": 40050.0, "47850": 42271.0, "47855": 38523.0, "47860": 39960.0, "47865": 40045.0, "47870": 40812.0, "47875": 37899.0, "47880": 37315.0, "47885": 40046.0, "47890": 37886.0, "47895": 41499.0, "47900": 39491.0, "47905": 37301.0, "47910": 40041.0, "47915": 42266.0, "47920": 39984.0, "47925": 39370.0, "47930": 41589.0, "47935": 37809.0, "47940": 39424.0, "47945": 40040.0, "47950": 38528.0, "47955": 40032.0, "47960": 40121.0, "47965": 40741.0, "47970": 40106.0, "47975": 40749.0, "47980": 40140.0, "47985": 39494.0, "47990": 38603.0, "47995": 40058.0, "48000": 40143.0, "48005": 40745.0, "48010": 35015.0, "48015": 39975.0, "48020": 37910.0, "48025": 40127.0, "48030": 39344.0, "48035": 40817.0, "48040": 40734.0, "48045": 39264.0, "48050": 39348.0, "48055": 40808.0, "48060": 39215.0, "48065": 40726.0, "48070": 42347.0, "48075": 40126.0, "48080": 40733.0, "48085": 40055.0, "48090": 39984.0, "48095": 40050.0, "48100": 40809.0, "48105": 40827.0, "48110": 40800.0, "48115": 40815.0, "48120": 40823.0, "48125": 40818.0, "48130": 42260.0, "48135": 40176.0, "48140": 40043.0, "48145": 37758.0, "48150": 37324.0, "48155": 40732.0, "48160": 38726.0, "48165": 39300.0, "48170": 38659.0, "48175": 39971.0, "48180": 39971.0, "48185": 40872.0, "48190": 40057.0, "48195": 39279.0, "48200": 40051.0, "48205": 40903.0, "48210": 39283.0, "48215": 39328.0, "48220": 40878.0, "48225": 37903.0, "48230": 39420.0, "48235": 40056.0, "48240": 40885.0, "48245": 41501.0, "48250": 40738.0, "48255": 38460.0, "48260": 41562.0, "48265": 41499.0, "48270": 41507.0, "48275": 39295.0, "48280": 40151.0, "48285": 40727.0, "48290": 41508.0, "48295": 40728.0, "48300": 37973.0, "48305": 40836.0, "48310": 38460.0, "48315": 39208.0, "48320": 37371.0, "48325": 38591.0, "48330": 38590.0, "48335": 41578.0, "48340": 39977.0, "48345": 39356.0, "48350": 40761.0, "48355": 40115.0, "48360": 41580.0, "48365": 39437.0, "48370": 39437.0, "48375": 39341.0, "48380": 38069.0, "48385": 40052.0, "48390": 38520.0, "48395": 40049.0, "48400": 41521.0, "48405": 37224.0, "48410": 39346.0, "48415": 40794.0, "48420": 40793.0, "48425": 39369.0, "48430": 40142.0, "48435": 40806.0, "48440": 41565.0, "48445": 40101.0, "48450": 38651.0, "48455": 39313.0, "48460": 40040.0, "48465": 40036.0, "48470": 37046.0, "48475": 40129.0, "48480": 41491.0, "48485": 39979.0, "48490": 39339.0, "48495": 39282.0, "48500": 39422.0, "48505": 39964.0, "48510": 40099.0, "48515": 36462.0, "48520": 39336.0, "48525": 40820.0, "48530": 35585.0, "48535": 38756.0, "48540": 40735.0, "48545": 40814.0, "48550": 38610.0, "48555": 40052.0, "48560": 40198.0, "48565": 40807.0, "48570": 39350.0, "48575": 41512.0, "48580": 40802.0, "48585": 39346.0, "48590": 41506.0, "48595": 38565.0, "48600": 38576.0, "48605": 39453.0, "48610": 40799.0, "48615": 38039.0, "48620": 41567.0, "48625": 41585.0, "48630": 39971.0, "48635": 40797.0, "48640": 41579.0, "48645": 38769.0, "48650": 38619.0, "48655": 40750.0, "48660": 39208.0, "48665": 40788.0, "48670": 40037.0, "48675": 39441.0, "48680": 40819.0, "48685": 40810.0, "48690": 40746.0, "48695": 38532.0, "48700": 40801.0, "48705": 40051.0, "48710": 41504.0, "48715": 42275.0, "48720": 39208.0, "48725": 40808.0, "48730": 38527.0, "48735": 40830.0, "48740": 39977.0, "48745": 38458.0, "48750": 40822.0, "48755": 40036.0, "48760": 40828.0, "48765": 40832.0, "48770": 39978.0, "48775": 41591.0, "48780": 40797.0, "48785": 40875.0, "48790": 37981.0, "48795": 41575.0, "48800": 40114.0, "48805": 37159.0, "48810": 38062.0, "48815": 38681.0, "48820": 39284.0, "48825": 37840.0, "48830": 39269.0, "48835": 41516.0, "48840": 40045.0, "48845": 38050.0, "48850": 37829.0, "48855": 40108.0, "48860": 40825.0, "48865": 40821.0, "48870": 40796.0, "48875": 41566.0, "48880": 39282.0, "48885": 40124.0, "48890": 40733.0, "48895": 39368.0, "48900": 40117.0, "48905": 41586.0, "48910": 38762.0, "48915": 41581.0, "48920": 41506.0, "48925": 41657.0, "48930": 40886.0, "48935": 41510.0, "48940": 39529.0, "48945": 39987.0, "48950": 40188.0, "48955": 37901.0, "48960": 40807.0, "48965": 39290.0, "48970": 39973.0, "48975": 39279.0, "48980": 39984.0, "48985": 40126.0, "48990": 39381.0, "48995": 41562.0, "49000": 38598.0, "49005": 39576.0, "49010": 41502.0, "49015": 40892.0, "49020": 40119.0, "49025": 39964.0, "49030": 38684.0, "49035": 39373.0, "49040": 36494.0, "49045": 40050.0, "49050": 40044.0, "49055": 38570.0, "49060": 39278.0, "49065": 39288.0, "49070": 40957.0, "49075": 40799.0, "49080": 36918.0, "49085": 39387.0, "49090": 40112.0, "49095": 40828.0, "49100": 40121.0, "49105": 39345.0, "49110": 40751.0, "49115": 41503.0, "49120": 41515.0, "49125": 40875.0, "49130": 40730.0, "49135": 41576.0, "49140": 40737.0, "49145": 41507.0, "49150": 40812.0, "49155": 41499.0, "49160": 38425.0, "49165": 38750.0, "49170": 40065.0, "49175": 40730.0, "49180": 38509.0, "49185": 37208.0, "49190": 41514.0, "49195": 38019.0, "49200": 40893.0, "49205": 39297.0, "49210": 40746.0, "49215": 40045.0, "49220": 40030.0, "49225": 40736.0, "49230": 40826.0, "49235": 40744.0, "49240": 40058.0, "49245": 41502.0, "49250": 40817.0, "49255": 38596.0, "49260": 39272.0, "49265": 36513.0, "49270": 40729.0, "49275": 40051.0, "49280": 40739.0, "49285": 40128.0, "49290": 40045.0, "49295": 37903.0, "49300": 40055.0, "49305": 39978.0, "49310": 41501.0, "49315": 40136.0, "49320": 41506.0, "49325": 38604.0, "49330": 41515.0, "49335": 38528.0, "49340": 39369.0, "49345": 40814.0, "49350": 40053.0, "49355": 38741.0, "49360": 39356.0, "49365": 40810.0, "49370": 40732.0, "49375": 40737.0, "49380": 40053.0, "49385": 39344.0, "49390": 38762.0, "49395": 40134.0, "49400": 42263.0, "49405": 38582.0, "49410": 37899.0, "49415": 40052.0, "49420": 41497.0, "49425": 40040.0, "49430": 41509.0, "49435": 40812.0, "49440": 40120.0, "49445": 39282.0, "49450": 39986.0, "49455": 41505.0, "49460": 39348.0, "49465": 42334.0, "49470": 40138.0, "49475": 41513.0, "49480": 38661.0, "49485": 40056.0, "49490": 40891.0, "49495": 40809.0, "49500": 40879.0, "49505": 40112.0, "49510": 40882.0, "49515": 41514.0, "49520": 41498.0, "49525": 38606.0, "49530": 40056.0, "49535": 40736.0, "49540": 40820.0, "49545": 39368.0, "49550": 40731.0, "49555": 40818.0, "49560": 39958.0, "49565": 37957.0, "49570": 37875.0, "49575": 39411.0, "49580": 38613.0, "49585": 41593.0, "49590": 39970.0, "49595": 40826.0, "49600": 40804.0, "49605": 40068.0, "49610": 38735.0, "49615": 38678.0, "49620": 40807.0, "49625": 38582.0, "49630": 41574.0, "49635": 40872.0, "49640": 38748.0, "49645": 38503.0, "49650": 42274.0, "49655": 39972.0, "49660": 43043.0, "49665": 40806.0, "49670": 40725.0, "49675": 37813.0, "49680": 39967.0, "49685": 40741.0, "49690": 39349.0, "49695": 40037.0, "49700": 39419.0, "49705": 36521.0, "49710": 39361.0, "49715": 40734.0, "49720": 40736.0, "49725": 40740.0, "49730": 41505.0, "49735": 40810.0, "49740": 41568.0, "49745": 38025.0, "49750": 37747.0, "49755": 39965.0, "49760": 41568.0, "49765": 40040.0, "49770": 38493.0, "49775": 37808.0, "49780": 40038.0, "49785": 42265.0, "49790": 39318.0, "49795": 39289.0, "49800": 40737.0, "49805": 40886.0, "49810": 39278.0, "49815": 40815.0, "49820": 36528.0, "49825": 39377.0, "49830": 39375.0, "49835": 40742.0, "49840": 39351.0, "49845": 40214.0, "49850": 39346.0, "49855": 40048.0, "49860": 41595.0, "49865": 38814.0, "49870": 36514.0, "49875": 40802.0, "49880": 41635.0, "49885": 40733.0, "49890": 40819.0, "49895": 40820.0, "49900": 40809.0, "49905": 40882.0, "49910": 40821.0, "49915": 40898.0, "49920": 40817.0, "49925": 40823.0, "49930": 42267.0, "49935": 41508.0, "49940": 40725.0, "49945": 40199.0, "49950": 40799.0, "49955": 40060.0, "49960": 40891.0, "49965": 40735.0, "49970": 38436.0, "49975": 39362.0, "49980": 39968.0, "49985": 41505.0, "49990": 39271.0, "49995": 40114.0, "50000": 40115.0, "50005": 37745.0, "50010": 38470.0, "50015": 37757.0, "50020": 40806.0, "50025": 35238.0, "50030": 41503.0, "50035": 40728.0, "50040": 39285.0, "50045": 39220.0, "50050": 40817.0, "50055": 40104.0, "50060": 39338.0, "50065": 40103.0, "50070": 40743.0, "50075": 38671.0, "50080": 41497.0, "50085": 38581.0, "50090": 40743.0, "50095": 38023.0, "50100": 37893.0, "50105": 40823.0, "50110": 38670.0, "50115": 38678.0, "50120": 37070.0, "50125": 38578.0, "50130": 39352.0, "50135": 39273.0, "50140": 39407.0, "50145": 40742.0, "50150": 37223.0, "50155": 40822.0, "50160": 40042.0, "50165": 39193.0, "50170": 40736.0, "50175": 37897.0, "50180": 40056.0, "50185": 40809.0, "50190": 37316.0, "50195": 39293.0, "50200": 40047.0, "50205": 38653.0, "50210": 39260.0, "50215": 40810.0, "50220": 39431.0, "50225": 39960.0, "50230": 40743.0, "50235": 40130.0, "50240": 39972.0, "50245": 42269.0, "50250": 37290.0, "50255": 40819.0, "50260": 40162.0, "50265": 40143.0, "50270": 36982.0, "50275": 40739.0, "50280": 41496.0, "50285": 40746.0, "50290": 40112.0, "50295": 41582.0, "50300": 40121.0, "50305": 40809.0, "50310": 38792.0, "50315": 40104.0, "50320": 40879.0, "50325": 38748.0, "50330": 39362.0, "50335": 38074.0, "50340": 40035.0, "50345": 39297.0, "50350": 39984.0, "50355": 40746.0, "50360": 40046.0, "50365": 40732.0, "50370": 39351.0, "50375": 40047.0, "50380": 39983.0, "50385": 40805.0, "50390": 39274.0, "50395": 40806.0, "50400": 37835.0, "50405": 40887.0, "50410": 36495.0, "50415": 40045.0, "50420": 40122.0, "50425": 38631.0, "50430": 39285.0, "50435": 39373.0, "50440": 41509.0, "50445": 40813.0, "50450": 41498.0, "50455": 39286.0, "50460": 41581.0, "50465": 38666.0, "50470": 41593.0, "50475": 40820.0, "50480": 39287.0, "50485": 38045.0, "50490": 40045.0, "50495": 39351.0, "50500": 40122.0, "50505": 41515.0, "50510": 40121.0, "50515": 40200.0, "50520": 42260.0, "50525": 39260.0, "50530": 38572.0, "50535": 38608.0, "50540": 39345.0, "50545": 40738.0, "50550": 39409.0, "50555": 40060.0, "50560": 40047.0, "50565": 42268.0, "50570": 37081.0, "50575": 40120.0, "50580": 39272.0, "50585": 40746.0, "50590": 39222.0, "50595": 40800.0, "50600": 41506.0, "50605": 41501.0, "50610": 40051.0, "50615": 40822.0, "50620": 39344.0, "50625": 40879.0, "50630": 38671.0, "50635": 40737.0, "50640": 41575.0, "50645": 40023.0, "50650": 40037.0, "50655": 40811.0, "50660": 40822.0, "50665": 40733.0, "50670": 40837.0, "50675": 40746.0, "50680": 40114.0, "50685": 39213.0, "50690": 37079.0, "50695": 40808.0, "50700": 40751.0, "50705": 41502.0, "50710": 39295.0, "50715": 38581.0, "50720": 38765.0, "50725": 39334.0, "50730": 37021.0, "50735": 36457.0, "50740": 40896.0, "50745": 40735.0, "50750": 39208.0, "50755": 39975.0, "50760": 39277.0, "50765": 41507.0, "50770": 38593.0, "50775": 37996.0, "50780": 40059.0, "50785": 39229.0, "50790": 41499.0, "50795": 40820.0, "50800": 38594.0, "50805": 38506.0, "50810": 40044.0, "50815": 40803.0, "50820": 42275.0, "50825": 39360.0, "50830": 40812.0, "50835": 42267.0, "50840": 41506.0, "50845": 36675.0, "50850": 40119.0, "50855": 39276.0, "50860": 39958.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 1116261376.0, "5": 1116261376.0, "10": 1116261376.0, "15": 1116261376.0, "20": 1116261376.0, "25": 1116261376.0, "30": 1116261376.0, "35": 1116261376.0, "40": 1116261376.0, "45": 1116261376.0, "50": 1116261376.0, "55": 1116261376.0, "60": 1116261376.0, "65": 1116261376.0, "70": 1116261376.0, "75": 1116261376.0, "80": 1116261376.0, "85": 1116261376.0, "90": 1116261376.0, "95": 1116261376.0, "100": 1116261376.0, "105": 1116261376.0, "110": 1116261376.0, "115": 1116261376.0, "120": 1116261376.0, "125": 1116261376.0, "130": 1116261376.0, "135": 1116261376.0, "140": 1116261376.0, "145": 1116261376.0, "150": 1116261376.0, "155": 1116261376.0, "160": 1116261376.0, "165": 1116261376.0, "170": 1116261376.0, "175": 1116261376.0, "180": 1116261376.0, "185": 1116261376.0, "190": 1116261376.0, "195": 1116261376.0, "200": 1116261376.0, "205": 1116261376.0, "210": 1116261376.0, "215": 1116261376.0, "220": 1116261376.0, "225": 1116261376.0, "230": 1116261376.0, "235": 1116261376.0, "240": 1116261376.0, "245": 1116261376.0, "250": 1116261376.0, "255": 1116261376.0, "260": 1116261376.0, "265": 1116261376.0, "270": 1116261376.0, "275": 1116261376.0, "280": 1116261376.0, "285": 1116261376.0, "290": 1116261376.0, "295": 1116261376.0, "300": 1116261376.0, "305": 1116261376.0, "310": 1116261376.0, "315": 1116261376.0, "320": 1116261376.0, "325": 1116261376.0, "330": 1116261376.0, "335": 1116261376.0, "340": 1116261376.0, "345": 1116261376.0, "350": 1116261376.0, "355": 1116261376.0, "360": 1116261376.0, "365": 1116261376.0, "370": 1116261376.0, "375": 1116261376.0, "380": 1116261376.0, "385": 1116261376.0, "390": 1116261376.0, "395": 1116261376.0, "400": 1116261376.0, "405": 1116261376.0, "410": 1116261376.0, "415": 1116261376.0, "420": 1116261376.0, "425": 1116261376.0, "430": 1116261376.0, "435": 1116261376.0, "440": 1116261376.0, "445": 1116261376.0, "450": 1116261376.0, "455": 1116261376.0, "460": 1116261376.0, "465": 1116261376.0, "470": 1116261376.0, "475": 1116261376.0, "480": 1116261376.0, "485": 1116261376.0, "490": 1116261376.0, "495": 1116261376.0, "500": 1116261376.0, "505": 1116261376.0, "510": 1116261376.0, "515": 1116261376.0, "520": 1116261376.0, "525": 1116261376.0, "530": 1116261376.0, "535": 1116261376.0, "540": 1116261376.0, "545": 1116261376.0, "550": 1116261376.0, "555": 1116261376.0, "560": 1116261376.0, "565": 1116261376.0, "570": 1116261376.0, "575": 1116261376.0, "580": 1116261376.0, "585": 1116261376.0, "590": 1116261376.0, "595": 1116261376.0, "600": 1116261376.0, "605": 1116261376.0, "610": 1116261376.0, "615": 1116261376.0, "620": 1116261376.0, "625": 1116261376.0, "630": 1116261376.0, "635": 1116261376.0, "640": 1116261376.0, "645": 1116261376.0, "650": 1116261376.0, "655": 1116261376.0, "660": 1116261376.0, "665": 1116261376.0, "670": 1116261376.0, "675": 1116261376.0, "680": 1116261376.0, "685": 1116261376.0, "690": 1116261376.0, "695": 1116261376.0, "700": 1116261376.0, "705": 1116261376.0, "710": 1116261376.0, "715": 1116261376.0, "720": 1116261376.0, "725": 1116261376.0, "730": 1116261376.0, "735": 1116261376.0, "740": 1116261376.0, "745": 1116261376.0, "750": 1116261376.0, "755": 1116261376.0, "760": 1116261376.0, "765": 1116261376.0, "770": 1116261376.0, "775": 1116261376.0, "780": 1116261376.0, "785": 1116261376.0, "790": 1116261376.0, "795": 1116261376.0, "800": 1116261376.0, "805": 1116261376.0, "810": 1116261376.0, "815": 1116261376.0, "820": 1116261376.0, "825": 1116261376.0, "830": 1116261376.0, "835": 1116261376.0, "840": 1116261376.0, "845": 1116261376.0, "850": 1116261376.0, "855": 1116261376.0, "860": 1116261376.0, "865": 1116261376.0, "870": 1116261376.0, "875": 1116261376.0, "880": 1116261376.0, "885": 1116261376.0, "890": 1116261376.0, "895": 1116261376.0, "900": 1116261376.0, "905": 1116261376.0, "910": 1116261376.0, "915": 1116261376.0, "920": 1116261376.0, "925": 1116261376.0, "930": 1116261376.0, "935": 1116261376.0, "940": 1116261376.0, "945": 1116261376.0, "950": 1116261376.0, "955": 1116261376.0, "960": 1116261376.0, "965": 1116261376.0, "970": 1116261376.0, "975": 1116261376.0, "980": 1116261376.0, "985": 1116261376.0, "990": 1116261376.0, "995": 1116261376.0, "1000": 1116261376.0, "1005": 1116261376.0, "1010": 1116261376.0, "1015": 1116261376.0, "1020": 1116261376.0, "1025": 1116261376.0, "1030": 1116261376.0, "1035": 1116261376.0, "1040": 1116261376.0, "1045": 1116261376.0, "1050": 1116261376.0, "1055": 1116261376.0, "1060": 1116261376.0, "1065": 1116261376.0, "1070": 1116261376.0, "1075": 1116261376.0, "1080": 1116261376.0, "1085": 1116261376.0, "1090": 1116261376.0, "1095": 1116261376.0, "1100": 1116261376.0, "1105": 1116261376.0, "1110": 1116261376.0, "1115": 1116261376.0, "1120": 1116261376.0, "1125": 1116261376.0, "1130": 1116261376.0, "1135": 1116261376.0, "1140": 1116261376.0, "1145": 1116261376.0, "1150": 1116261376.0, "1155": 1116261376.0, "1160": 1116261376.0, "1165": 1116261376.0, "1170": 1116261376.0, "1175": 1116261376.0, "1180": 1116261376.0, "1185": 1116261376.0, "1190": 1116261376.0, "1195": 1116261376.0, "1200": 1116261376.0, "1205": 1116261376.0, "1210": 1116261376.0, "1215": 1116261376.0, "1220": 1116261376.0, "1225": 1116261376.0, "1230": 1116261376.0, "1235": 1116261376.0, "1240": 1116261376.0, "1245": 1116261376.0, "1250": 1116261376.0, "1255": 1116261376.0, "1260": 1116261376.0, "1265": 1116261376.0, "1270": 1116261376.0, "1275": 1116261376.0, "1280": 1116261376.0, "1285": 1116261376.0, "1290": 1116261376.0, "1295": 1116261376.0, "1300": 1116261376.0, "1305": 1116261376.0, "1310": 1116261376.0, "1315": 1116261376.0, "1320": 1116261376.0, "1325": 1116261376.0, "1330": 1116261376.0, "1335": 1116261376.0, "1340": 1116261376.0, "1345": 1116261376.0, "1350": 1116261376.0, "1355": 1116261376.0, "1360": 1116261376.0, "1365": 1116261376.0, "1370": 1116261376.0, "1375": 1116261376.0, "1380": 1116261376.0, "1385": 1116261376.0, "1390": 1116261376.0, "1395": 1116261376.0, "1400": 1116261376.0, "1405": 1116261376.0, "1410": 1116261376.0, "1415": 1116261376.0, "1420": 1116261376.0, "1425": 1116261376.0, "1430": 1116261376.0, "1435": 1116261376.0, "1440": 1116261376.0, "1445": 1116261376.0, "1450": 1116261376.0, "1455": 1116261376.0, "1460": 1116261376.0, "1465": 1116261376.0, "1470": 1116261376.0, "1475": 1116261376.0, "1480": 1116261376.0, "1485": 1116261376.0, "1490": 1116261376.0, "1495": 1116261376.0, "1500": 1116261376.0, "1505": 1116261376.0, "1510": 1116261376.0, "1515": 1116261376.0, "1520": 1116261376.0, "1525": 1116261376.0, "1530": 1116261376.0, "1535": 1116261376.0, "1540": 1116261376.0, "1545": 1116261376.0, "1550": 1116261376.0, "1555": 1116261376.0, "1560": 1116261376.0, "1565": 1116261376.0, "1570": 1116261376.0, "1575": 1116261376.0, "1580": 1116261376.0, "1585": 1116261376.0, "1590": 1116261376.0, "1595": 1116261376.0, "1600": 1116261376.0, "1605": 1116261376.0, "1610": 1116261376.0, "1615": 1116261376.0, "1620": 1116261376.0, "1625": 1116261376.0, "1630": 1116261376.0, "1635": 1116261376.0, "1640": 1116261376.0, "1645": 1116261376.0, "1650": 1116261376.0, "1655": 1116261376.0, "1660": 1116261376.0, "1665": 1116261376.0, "1670": 1116261376.0, "1675": 1116261376.0, "1680": 1116261376.0, "1685": 1116261376.0, "1690": 1116261376.0, "1695": 1116261376.0, "1700": 1116261376.0, "1705": 1116261376.0, "1710": 1116261376.0, "1715": 1116261376.0, "1720": 1116261376.0, "1725": 1116261376.0, "1730": 1116261376.0, "1735": 1116261376.0, "1740": 1116261376.0, "1745": 1116261376.0, "1750": 1116261376.0, "1755": 1116261376.0, "1760": 1116261376.0, "1765": 1116261376.0, "1770": 1116261376.0, "1775": 1116261376.0, "1780": 1116261376.0, "1785": 1116261376.0, "1790": 1116261376.0, "1795": 1116261376.0, "1800": 1116261376.0, "1805": 1116261376.0, "1810": 1116261376.0, "1815": 1116261376.0, "1820": 1116261376.0, "1825": 1116261376.0, "1830": 1116261376.0, "1835": 1116261376.0, "1840": 1116261376.0, "1845": 1116261376.0, "1850": 1116261376.0, "1855": 1116261376.0, "1860": 1116261376.0, "1865": 1116261376.0, "1870": 1116261376.0, "1875": 1116261376.0, "1880": 1116261376.0, "1885": 1116261376.0, "1890": 1116261376.0, "1895": 1116261376.0, "1900": 1116261376.0, "1905": 1116261376.0, "1910": 1116261376.0, "1915": 1116261376.0, "1920": 1116261376.0, "1925": 1116261376.0, "1930": 1116261376.0, "1935": 1116261376.0, "1940": 1116261376.0, "1945": 1116261376.0, "1950": 1116261376.0, "1955": 1116261376.0, "1960": 1116261376.0, "1965": 1116261376.0, "1970": 1116261376.0, "1975": 1116261376.0, "1980": 1116261376.0, "1985": 1116261376.0, "1990": 1116261376.0, "1995": 1116261376.0, "2000": 1116261376.0, "2005": 1116261376.0, "2010": 1116261376.0, "2015": 1116261376.0, "2020": 1116261376.0, "2025": 1116261376.0, "2030": 1116261376.0, "2035": 1116261376.0, "2040": 1116261376.0, "2045": 1116261376.0, "2050": 1116261376.0, "2055": 1116261376.0, "2060": 1116261376.0, "2065": 1116261376.0, "2070": 1116261376.0, "2075": 1116261376.0, "2080": 1116261376.0, "2085": 1116261376.0, "2090": 1116261376.0, "2095": 1116261376.0, "2100": 1116261376.0, "2105": 1116261376.0, "2110": 1116261376.0, "2115": 1116261376.0, "2120": 1116261376.0, "2125": 1116261376.0, "2130": 1116261376.0, "2135": 1116261376.0, "2140": 1116261376.0, "2145": 1116261376.0, "2150": 1116261376.0, "2155": 1116261376.0, "2160": 1116261376.0, "2165": 1116261376.0, "2170": 1116261376.0, "2175": 1116261376.0, "2180": 1116261376.0, "2185": 1116261376.0, "2190": 1116261376.0, "2195": 1116261376.0, "2200": 1116261376.0, "2205": 1116261376.0, "2210": 1116261376.0, "2215": 1116261376.0, "2220": 1116261376.0, "2225": 1116261376.0, "2230": 1116261376.0, "2235": 1116261376.0, "2240": 1116261376.0, "2245": 1116261376.0, "2250": 1116261376.0, "2255": 1116261376.0, "2260": 1116261376.0, "2265": 1116261376.0, "2270": 1116261376.0, "2275": 1116261376.0, "2280": 1116261376.0, "2285": 1116261376.0, "2290": 1116261376.0, "2295": 1116261376.0, "2300": 1116261376.0, "2305": 1116261376.0, "2310": 1116261376.0, "2315": 1116261376.0, "2320": 1116261376.0, "2325": 1116261376.0, "2330": 1116261376.0, "2335": 1116261376.0, "2340": 1116261376.0, "2345": 1116261376.0, "2350": 1116261376.0, "2355": 1116261376.0, "2360": 1116261376.0, "2365": 1116261376.0, "2370": 1116261376.0, "2375": 1116261376.0, "2380": 1116261376.0, "2385": 1116261376.0, "2390": 1116261376.0, "2395": 1116261376.0, "2400": 1116261376.0, "2405": 1116261376.0, "2410": 1116261376.0, "2415": 1116261376.0, "2420": 1116261376.0, "2425": 1116261376.0, "2430": 1116261376.0, "2435": 1116261376.0, "2440": 1116261376.0, "2445": 1116261376.0, "2450": 1116261376.0, "2455": 1116261376.0, "2460": 1116261376.0, "2465": 1116261376.0, "2470": 1116261376.0, "2475": 1116261376.0, "2480": 1116261376.0, "2485": 1116261376.0, "2490": 1116261376.0, "2495": 1116261376.0, "2500": 1116261376.0, "2505": 1116261376.0, "2510": 1116261376.0, "2515": 1116261376.0, "2520": 1116261376.0, "2525": 1116261376.0, "2530": 1116261376.0, "2535": 1116261376.0, "2540": 1116261376.0, "2545": 1116261376.0, "2550": 1116261376.0, "2555": 1116261376.0, "2560": 1116261376.0, "2565": 1116261376.0, "2570": 1116261376.0, "2575": 1116261376.0, "2580": 1116261376.0, "2585": 1116261376.0, "2590": 1116261376.0, "2595": 1116261376.0, "2600": 1116261376.0, "2605": 1116261376.0, "2610": 1116261376.0, "2615": 1116261376.0, "2620": 1116261376.0, "2625": 1116261376.0, "2630": 1116261376.0, "2635": 1116261376.0, "2640": 1116261376.0, "2645": 1116261376.0, "2650": 1116261376.0, "2655": 1116261376.0, "2660": 1116261376.0, "2665": 1116261376.0, "2670": 1116261376.0, "2675": 1116261376.0, "2680": 1116261376.0, "2685": 1116261376.0, "2690": 1116261376.0, "2695": 1116261376.0, "2700": 1116261376.0, "2705": 1116261376.0, "2710": 1116261376.0, "2715": 1116261376.0, "2720": 1116261376.0, "2725": 1116261376.0, "2730": 1116261376.0, "2735": 1116261376.0, "2740": 1116261376.0, "2745": 1116261376.0, "2750": 1116261376.0, "2755": 1116261376.0, "2760": 1116261376.0, "2765": 1116261376.0, "2770": 1116261376.0, "2775": 1116261376.0, "2780": 1116261376.0, "2785": 1116261376.0, "2790": 1116261376.0, "2795": 1116261376.0, "2800": 1116261376.0, "2805": 1116261376.0, "2810": 1116261376.0, "2815": 1116261376.0, "2820": 1116261376.0, "2825": 1116261376.0, "2830": 1116261376.0, "2835": 1116261376.0, "2840": 1116261376.0, "2845": 1116261376.0, "2850": 1116261376.0, "2855": 1116261376.0, "2860": 1116261376.0, "2865": 1116261376.0, "2870": 1116261376.0, "2875": 1116261376.0, "2880": 1116261376.0, "2885": 1116261376.0, "2890": 1116261376.0, "2895": 1116261376.0, "2900": 1116261376.0, "2905": 1116261376.0, "2910": 1116261376.0, "2915": 1116261376.0, "2920": 1116261376.0, "2925": 1116261376.0, "2930": 1116261376.0, "2935": 1116261376.0, "2940": 1116261376.0, "2945": 1116261376.0, "2950": 1116261376.0, "2955": 1116261376.0, "2960": 1116261376.0, "2965": 1116261376.0, "2970": 1116261376.0, "2975": 1116261376.0, "2980": 1116261376.0, "2985": 1116261376.0, "2990": 1116261376.0, "2995": 1116261376.0, "3000": 1116261376.0, "3005": 1116261376.0, "3010": 1116261376.0, "3015": 1116261376.0, "3020": 1116261376.0, "3025": 1116261376.0, "3030": 1116261376.0, "3035": 1116261376.0, "3040": 1116261376.0, "3045": 1116261376.0, "3050": 1116261376.0, "3055": 1116261376.0, "3060": 1116261376.0, "3065": 1116261376.0, "3070": 1116261376.0, "3075": 1116261376.0, "3080": 1116261376.0, "3085": 1116261376.0, "3090": 1116261376.0, "3095": 1116261376.0, "3100": 1116261376.0, "3105": 1116261376.0, "3110": 1116261376.0, "3115": 1116261376.0, "3120": 1116261376.0, "3125": 1116261376.0, "3130": 1116261376.0, "3135": 1116261376.0, "3140": 1116261376.0, "3145": 1116261376.0, "3150": 1116261376.0, "3155": 1116261376.0, "3160": 1116261376.0, "3165": 1116261376.0, "3170": 1116261376.0, "3175": 1116261376.0, "3180": 1116261376.0, "3185": 1116261376.0, "3190": 1116261376.0, "3195": 1116261376.0, "3200": 1116261376.0, "3205": 1116261376.0, "3210": 1116261376.0, "3215": 1116261376.0, "3220": 1116261376.0, "3225": 1116261376.0, "3230": 1116261376.0, "3235": 1116261376.0, "3240": 1116261376.0, "3245": 1116261376.0, "3250": 1116261376.0, "3255": 1116261376.0, "3260": 1116261376.0, "3265": 1116261376.0, "3270": 1116261376.0, "3275": 1116261376.0, "3280": 1116261376.0, "3285": 1116261376.0, "3290": 1116261376.0, "3295": 1116261376.0, "3300": 1116261376.0, "3305": 1116261376.0, "3310": 1116261376.0, "3315": 1116261376.0, "3320": 1116261376.0, "3325": 1116261376.0, "3330": 1116261376.0, "3335": 1116261376.0, "3340": 1116261376.0, "3345": 1116261376.0, "3350": 1116261376.0, "3355": 1116261376.0, "3360": 1116261376.0, "3365": 1116261376.0, "3370": 1116261376.0, "3375": 1116261376.0, "3380": 1116261376.0, "3385": 1116261376.0, "3390": 1116261376.0, "3395": 1116261376.0, "3400": 1116261376.0, "3405": 1116261376.0, "3410": 1116261376.0, "3415": 1116261376.0, "3420": 1116261376.0, "3425": 1116261376.0, "3430": 1116261376.0, "3435": 1116261376.0, "3440": 1116261376.0, "3445": 1116261376.0, "3450": 1116261376.0, "3455": 1116261376.0, "3460": 1116261376.0, "3465": 1116261376.0, "3470": 1116261376.0, "3475": 1116261376.0, "3480": 1116261376.0, "3485": 1116261376.0, "3490": 1116261376.0, "3495": 1116261376.0, "3500": 1116261376.0, "3505": 1116261376.0, "3510": 1116261376.0, "3515": 1116261376.0, "3520": 1116261376.0, "3525": 1116261376.0, "3530": 1116261376.0, "3535": 1116261376.0, "3540": 1116261376.0, "3545": 1116261376.0, "3550": 1116261376.0, "3555": 1116261376.0, "3560": 1116261376.0, "3565": 1116261376.0, "3570": 1116261376.0, "3575": 1116261376.0, "3580": 1116261376.0, "3585": 1116261376.0, "3590": 1116261376.0, "3595": 1116261376.0, "3600": 1116261376.0, "3605": 1116261376.0, "3610": 1116261376.0, "3615": 1116261376.0, "3620": 1116261376.0, "3625": 1116261376.0, "3630": 1116261376.0, "3635": 1116261376.0, "3640": 1116261376.0, "3645": 1116261376.0, "3650": 1116261376.0, "3655": 1116261376.0, "3660": 1116261376.0, "3665": 1116261376.0, "3670": 1116261376.0, "3675": 1116261376.0, "3680": 1116261376.0, "3685": 1116261376.0, "3690": 1116261376.0, "3695": 1116261376.0, "3700": 1116261376.0, "3705": 1116261376.0, "3710": 1116261376.0, "3715": 1116261376.0, "3720": 1116261376.0, "3725": 1116261376.0, "3730": 1116261376.0, "3735": 1116261376.0, "3740": 1116261376.0, "3745": 1116261376.0, "3750": 1116261376.0, "3755": 1116261376.0, "3760": 1116261376.0, "3765": 1116261376.0, "3770": 1116261376.0, "3775": 1116261376.0, "3780": 1116261376.0, "3785": 1116261376.0, "3790": 1116261376.0, "3795": 1116261376.0, "3800": 1116261376.0, "3805": 1116261376.0, "3810": 1116261376.0, "3815": 1116261376.0, "3820": 1116261376.0, "3825": 1116261376.0, "3830": 1116261376.0, "3835": 1116261376.0, "3840": 1116261376.0, "3845": 1116261376.0, "3850": 1116261376.0, "3855": 1116261376.0, "3860": 1116261376.0, "3865": 1116261376.0, "3870": 1116261376.0, "3875": 1116261376.0, "3880": 1116261376.0, "3885": 1116261376.0, "3890": 1116261376.0, "3895": 1116261376.0, "3900": 1116261376.0, "3905": 1116261376.0, "3910": 1116261376.0, "3915": 1116261376.0, "3920": 1116261376.0, "3925": 1116261376.0, "3930": 1116261376.0, "3935": 1116261376.0, "3940": 1116261376.0, "3945": 1116261376.0, "3950": 1116261376.0, "3955": 1116261376.0, "3960": 1116261376.0, "3965": 1116261376.0, "3970": 1116261376.0, "3975": 1116261376.0, "3980": 1116261376.0, "3985": 1116261376.0, "3990": 1116261376.0, "3995": 1116261376.0, "4000": 1116261376.0, "4005": 1116261376.0, "4010": 1116261376.0, "4015": 1116261376.0, "4020": 1116261376.0, "4025": 1116261376.0, "4030": 1116261376.0, "4035": 1116261376.0, "4040": 1116261376.0, "4045": 1116261376.0, "4050": 1116261376.0, "4055": 1116261376.0, "4060": 1116261376.0, "4065": 1116261376.0, "4070": 1116261376.0, "4075": 1116261376.0, "4080": 1116261376.0, "4085": 1116261376.0, "4090": 1116261376.0, "4095": 1116261376.0, "4100": 1116261376.0, "4105": 1116261376.0, "4110": 1116261376.0, "4115": 1116261376.0, "4120": 1116261376.0, "4125": 1116261376.0, "4130": 1116261376.0, "4135": 1116261376.0, "4140": 1116261376.0, "4145": 1116261376.0, "4150": 1116261376.0, "4155": 1116261376.0, "4160": 1116261376.0, "4165": 1116261376.0, "4170": 1116261376.0, "4175": 1116261376.0, "4180": 1116261376.0, "4185": 1116261376.0, "4190": 1116261376.0, "4195": 1116261376.0, "4200": 1116261376.0, "4205": 1116261376.0, "4210": 1116261376.0, "4215": 1116261376.0, "4220": 1116261376.0, "4225": 1116261376.0, "4230": 1116261376.0, "4235": 1116261376.0, "4240": 1116261376.0, "4245": 1116261376.0, "4250": 1116261376.0, "4255": 1116261376.0, "4260": 1116261376.0, "4265": 1116261376.0, "4270": 1116261376.0, "4275": 1116261376.0, "4280": 1116261376.0, "4285": 1116261376.0, "4290": 1116261376.0, "4295": 1116261376.0, "4300": 1116261376.0, "4305": 1116261376.0, "4310": 1116261376.0, "4315": 1116261376.0, "4320": 1116261376.0, "4325": 1116261376.0, "4330": 1116261376.0, "4335": 1116261376.0, "4340": 1116261376.0, "4345": 1116261376.0, "4350": 1116261376.0, "4355": 1116261376.0, "4360": 1116261376.0, "4365": 1116261376.0, "4370": 1116261376.0, "4375": 1116261376.0, "4380": 1116261376.0, "4385": 1116261376.0, "4390": 1116261376.0, "4395": 1116261376.0, "4400": 1116261376.0, "4405": 1116261376.0, "4410": 1116261376.0, "4415": 1116261376.0, "4420": 1116261376.0, "4425": 1116261376.0, "4430": 1116261376.0, "4435": 1116261376.0, "4440": 1116261376.0, "4445": 1116261376.0, "4450": 1116261376.0, "4455": 1116261376.0, "4460": 1116261376.0, "4465": 1116261376.0, "4470": 1116261376.0, "4475": 1116261376.0, "4480": 1116261376.0, "4485": 1116261376.0, "4490": 1116261376.0, "4495": 1116261376.0, "4500": 1116261376.0, "4505": 1116261376.0, "4510": 1116261376.0, "4515": 1116261376.0, "4520": 1116261376.0, "4525": 1116261376.0, "4530": 1116261376.0, "4535": 1116261376.0, "4540": 1116261376.0, "4545": 1116261376.0, "4550": 1116261376.0, "4555": 1116261376.0, "4560": 1116261376.0, "4565": 1116261376.0, "4570": 1116261376.0, "4575": 1116261376.0, "4580": 1116261376.0, "4585": 1116261376.0, "4590": 1116261376.0, "4595": 1116261376.0, "4600": 1116261376.0, "4605": 1116261376.0, "4610": 1116261376.0, "4615": 1116261376.0, "4620": 1116261376.0, "4625": 1116261376.0, "4630": 1116261376.0, "4635": 1116261376.0, "4640": 1116261376.0, "4645": 1116261376.0, "4650": 1116261376.0, "4655": 1116261376.0, "4660": 1116261376.0, "4665": 1116261376.0, "4670": 1116261376.0, "4675": 1116261376.0, "4680": 1116261376.0, "4685": 1116261376.0, "4690": 1116261376.0, "4695": 1116261376.0, "4700": 1116261376.0, "4705": 1116261376.0, "4710": 1116261376.0, "4715": 1116261376.0, "4720": 1116261376.0, "4725": 1116261376.0, "4730": 1116261376.0, "4735": 1116261376.0, "4740": 1116261376.0, "4745": 1116261376.0, "4750": 1116261376.0, "4755": 1116261376.0, "4760": 1116261376.0, "4765": 1116261376.0, "4770": 1116261376.0, "4775": 1116261376.0, "4780": 1116261376.0, "4785": 1116261376.0, "4790": 1116261376.0, "4795": 1116261376.0, "4800": 1116261376.0, "4805": 1116261376.0, "4810": 1116261376.0, "4815": 1116261376.0, "4820": 1116261376.0, "4825": 1116261376.0, "4830": 1116261376.0, "4835": 1116261376.0, "4840": 1116261376.0, "4845": 1116261376.0, "4850": 1116261376.0, "4855": 1116261376.0, "4860": 1116261376.0, "4865": 1116261376.0, "4870": 1116261376.0, "4875": 1116261376.0, "4880": 1116261376.0, "4885": 1116261376.0, "4890": 1116261376.0, "4895": 1116261376.0, "4900": 1116261376.0, "4905": 1116261376.0, "4910": 1116261376.0, "4915": 1116261376.0, "4920": 1116261376.0, "4925": 1116261376.0, "4930": 1116261376.0, "4935": 1116261376.0, "4940": 1116261376.0, "4945": 1116261376.0, "4950": 1116261376.0, "4955": 1116261376.0, "4960": 1116261376.0, "4965": 1116261376.0, "4970": 1116261376.0, "4975": 1116261376.0, "4980": 1116261376.0, "4985": 1116261376.0, "4990": 1116261376.0, "4995": 1116261376.0, "5000": 1116261376.0, "5005": 1116261376.0, "5010": 1116261376.0, "5015": 1116261376.0, "5020": 1116261376.0, "5025": 1116261376.0, "5030": 1116261376.0, "5035": 1116261376.0, "5040": 1116261376.0, "5045": 1116261376.0, "5050": 1116261376.0, "5055": 1116261376.0, "5060": 1116261376.0, "5065": 1116261376.0, "5070": 1116261376.0, "5075": 1116261376.0, "5080": 1116261376.0, "5085": 1116261376.0, "5090": 1116261376.0, "5095": 1116261376.0, "5100": 1116261376.0, "5105": 1116261376.0, "5110": 1116261376.0, "5115": 1116261376.0, "5120": 1116261376.0, "5125": 1116261376.0, "5130": 1116261376.0, "5135": 1116261376.0, "5140": 1116261376.0, "5145": 1116261376.0, "5150": 1116261376.0, "5155": 1116261376.0, "5160": 1116261376.0, "5165": 1116261376.0, "5170": 1116261376.0, "5175": 1116261376.0, "5180": 1116261376.0, "5185": 1116261376.0, "5190": 1116261376.0, "5195": 1116261376.0, "5200": 1116261376.0, "5205": 1116261376.0, "5210": 1116261376.0, "5215": 1116261376.0, "5220": 1116261376.0, "5225": 1116261376.0, "5230": 1116261376.0, "5235": 1116261376.0, "5240": 1116261376.0, "5245": 1116261376.0, "5250": 1116261376.0, "5255": 1116261376.0, "5260": 1116261376.0, "5265": 1116261376.0, "5270": 1116261376.0, "5275": 1116261376.0, "5280": 1116261376.0, "5285": 1116261376.0, "5290": 1116261376.0, "5295": 1116261376.0, "5300": 1116261376.0, "5305": 1116261376.0, "5310": 1116261376.0, "5315": 1116261376.0, "5320": 1116261376.0, "5325": 1116261376.0, "5330": 1116261376.0, "5335": 1116261376.0, "5340": 1116261376.0, "5345": 1116261376.0, "5350": 1116261376.0, "5355": 1116261376.0, "5360": 1116261376.0, "5365": 1116261376.0, "5370": 1116261376.0, "5375": 1116261376.0, "5380": 1116261376.0, "5385": 1116261376.0, "5390": 1116261376.0, "5395": 1116261376.0, "5400": 1116261376.0, "5405": 1116261376.0, "5410": 1116261376.0, "5415": 1116261376.0, "5420": 1116261376.0, "5425": 1116261376.0, "5430": 1116261376.0, "5435": 1116261376.0, "5440": 1116261376.0, "5445": 1116261376.0, "5450": 1116261376.0, "5455": 1116261376.0, "5460": 1116261376.0, "5465": 1116261376.0, "5470": 1116261376.0, "5475": 1116261376.0, "5480": 1116261376.0, "5485": 1116261376.0, "5490": 1116261376.0, "5495": 1116261376.0, "5500": 1116261376.0, "5505": 1116261376.0, "5510": 1116261376.0, "5515": 1116261376.0, "5520": 1116261376.0, "5525": 1116261376.0, "5530": 1116261376.0, "5535": 1116261376.0, "5540": 1116261376.0, "5545": 1116261376.0, "5550": 1116261376.0, "5555": 1116261376.0, "5560": 1116261376.0, "5565": 1116261376.0, "5570": 1116261376.0, "5575": 1116261376.0, "5580": 1116261376.0, "5585": 1116261376.0, "5590": 1116261376.0, "5595": 1116261376.0, "5600": 1116261376.0, "5605": 1116261376.0, "5610": 1116261376.0, "5615": 1116261376.0, "5620": 1116261376.0, "5625": 1116261376.0, "5630": 1116261376.0, "5635": 1116261376.0, "5640": 1116261376.0, "5645": 1116261376.0, "5650": 1116261376.0, "5655": 1116261376.0, "5660": 1116261376.0, "5665": 1116261376.0, "5670": 1116261376.0, "5675": 1116261376.0, "5680": 1116261376.0, "5685": 1116261376.0, "5690": 1116261376.0, "5695": 1116261376.0, "5700": 1116261376.0, "5705": 1116261376.0, "5710": 1116261376.0, "5715": 1116261376.0, "5720": 1116261376.0, "5725": 1116261376.0, "5730": 1116261376.0, "5735": 1116261376.0, "5740": 1116261376.0, "5745": 1116261376.0, "5750": 1116261376.0, "5755": 1116261376.0, "5760": 1116261376.0, "5765": 1116261376.0, "5770": 1116261376.0, "5775": 1116261376.0, "5780": 1116261376.0, "5785": 1116261376.0, "5790": 1116261376.0, "5795": 1116261376.0, "5800": 1116261376.0, "5805": 1116261376.0, "5810": 1116261376.0, "5815": 1116261376.0, "5820": 1116261376.0, "5825": 1116261376.0, "5830": 1116261376.0, "5835": 1116261376.0, "5840": 1116261376.0, "5845": 1116261376.0, "5850": 1116261376.0, "5855": 1116261376.0, "5860": 1116261376.0, "5865": 1116261376.0, "5870": 1116261376.0, "5875": 1116261376.0, "5880": 1116261376.0, "5885": 1116261376.0, "5890": 1116261376.0, "5895": 1116261376.0, "5900": 1116261376.0, "5905": 1116261376.0, "5910": 1116261376.0, "5915": 1116261376.0, "5920": 1116261376.0, "5925": 1116261376.0, "5930": 1116261376.0, "5935": 1116261376.0, "5940": 1116261376.0, "5945": 1116261376.0, "5950": 1116261376.0, "5955": 1116261376.0, "5960": 1116261376.0, "5965": 1116261376.0, "5970": 1116261376.0, "5975": 1116261376.0, "5980": 1116261376.0, "5985": 1116261376.0, "5990": 1116261376.0, "5995": 1116261376.0, "6000": 1116261376.0, "6005": 1116261376.0, "6010": 1116261376.0, "6015": 1116261376.0, "6020": 1116261376.0, "6025": 1116261376.0, "6030": 1116261376.0, "6035": 1116261376.0, "6040": 1116261376.0, "6045": 1116261376.0, "6050": 1116261376.0, "6055": 1116261376.0, "6060": 1116261376.0, "6065": 1116261376.0, "6070": 1116261376.0, "6075": 1116261376.0, "6080": 1116261376.0, "6085": 1116261376.0, "6090": 1116261376.0, "6095": 1116261376.0, "6100": 1116261376.0, "6105": 1116261376.0, "6110": 1116261376.0, "6115": 1116261376.0, "6120": 1116261376.0, "6125": 1116261376.0, "6130": 1116261376.0, "6135": 1116261376.0, "6140": 1116261376.0, "6145": 1116261376.0, "6150": 1116261376.0, "6155": 1116261376.0, "6160": 1116261376.0, "6165": 1116261376.0, "6170": 1116261376.0, "6175": 1116261376.0, "6180": 1116261376.0, "6185": 1116261376.0, "6190": 1116261376.0, "6195": 1116261376.0, "6200": 1116261376.0, "6205": 1116261376.0, "6210": 1116261376.0, "6215": 1116261376.0, "6220": 1116261376.0, "6225": 1116261376.0, "6230": 1116261376.0, "6235": 1116261376.0, "6240": 1116261376.0, "6245": 1116261376.0, "6250": 1116261376.0, "6255": 1116261376.0, "6260": 1116261376.0, "6265": 1116261376.0, "6270": 1116261376.0, "6275": 1116261376.0, "6280": 1116261376.0, "6285": 1116261376.0, "6290": 1116261376.0, "6295": 1116261376.0, "6300": 1116261376.0, "6305": 1116261376.0, "6310": 1116261376.0, "6315": 1116261376.0, "6320": 1116261376.0, "6325": 1116261376.0, "6330": 1116261376.0, "6335": 1116261376.0, "6340": 1116261376.0, "6345": 1116261376.0, "6350": 1116261376.0, "6355": 1116261376.0, "6360": 1116261376.0, "6365": 1116261376.0, "6370": 1116261376.0, "6375": 1116261376.0, "6380": 1116261376.0, "6385": 1116261376.0, "6390": 1116261376.0, "6395": 1116261376.0, "6400": 1116261376.0, "6405": 1116261376.0, "6410": 1116261376.0, "6415": 1116261376.0, "6420": 1116261376.0, "6425": 1116261376.0, "6430": 1116261376.0, "6435": 1116261376.0, "6440": 1116261376.0, "6445": 1116261376.0, "6450": 1116261376.0, "6455": 1116261376.0, "6460": 1116261376.0, "6465": 1116261376.0, "6470": 1116261376.0, "6475": 1116261376.0, "6480": 1116261376.0, "6485": 1116261376.0, "6490": 1116261376.0, "6495": 1116261376.0, "6500": 1116261376.0, "6505": 1116261376.0, "6510": 1116261376.0, "6515": 1116261376.0, "6520": 1116261376.0, "6525": 1116261376.0, "6530": 1116261376.0, "6535": 1116261376.0, "6540": 1116261376.0, "6545": 1116261376.0, "6550": 1116261376.0, "6555": 1116261376.0, "6560": 1116261376.0, "6565": 1116261376.0, "6570": 1116261376.0, "6575": 1116261376.0, "6580": 1116261376.0, "6585": 1116261376.0, "6590": 1116261376.0, "6595": 1116261376.0, "6600": 1116261376.0, "6605": 1116261376.0, "6610": 1116261376.0, "6615": 1116261376.0, "6620": 1116261376.0, "6625": 1116261376.0, "6630": 1116261376.0, "6635": 1116261376.0, "6640": 1116261376.0, "6645": 1116261376.0, "6650": 1116261376.0, "6655": 1116261376.0, "6660": 1116261376.0, "6665": 1116261376.0, "6670": 1116261376.0, "6675": 1116261376.0, "6680": 1116261376.0, "6685": 1116261376.0, "6690": 1116261376.0, "6695": 1116261376.0, "6700": 1116261376.0, "6705": 1116261376.0, "6710": 1116261376.0, "6715": 1116261376.0, "6720": 1116261376.0, "6725": 1116261376.0, "6730": 1116261376.0, "6735": 1116261376.0, "6740": 1116261376.0, "6745": 1116261376.0, "6750": 1116261376.0, "6755": 1116261376.0, "6760": 1116261376.0, "6765": 1116261376.0, "6770": 1116261376.0, "6775": 1116261376.0, "6780": 1116261376.0, "6785": 1116261376.0, "6790": 1116261376.0, "6795": 1116261376.0, "6800": 1116261376.0, "6805": 1116261376.0, "6810": 1116261376.0, "6815": 1116261376.0, "6820": 1116261376.0, "6825": 1116261376.0, "6830": 1116261376.0, "6835": 1116261376.0, "6840": 1116261376.0, "6845": 1116261376.0, "6850": 1116261376.0, "6855": 1116261376.0, "6860": 1116261376.0, "6865": 1116261376.0, "6870": 1116261376.0, "6875": 1116261376.0, "6880": 1116261376.0, "6885": 1116261376.0, "6890": 1116261376.0, "6895": 1116261376.0, "6900": 1116261376.0, "6905": 1116261376.0, "6910": 1116261376.0, "6915": 1116261376.0, "6920": 1116261376.0, "6925": 1116261376.0, "6930": 1116261376.0, "6935": 1116261376.0, "6940": 1116261376.0, "6945": 1116261376.0, "6950": 1116261376.0, "6955": 1116261376.0, "6960": 1116261376.0, "6965": 1116261376.0, "6970": 1116261376.0, "6975": 1116261376.0, "6980": 1116261376.0, "6985": 1116261376.0, "6990": 1116261376.0, "6995": 1116261376.0, "7000": 1116261376.0, "7005": 1116261376.0, "7010": 1116261376.0, "7015": 1116261376.0, "7020": 1116261376.0, "7025": 1116261376.0, "7030": 1116261376.0, "7035": 1116261376.0, "7040": 1116261376.0, "7045": 1116261376.0, "7050": 1116261376.0, "7055": 1116261376.0, "7060": 1116261376.0, "7065": 1116261376.0, "7070": 1116261376.0, "7075": 1116261376.0, "7080": 1116261376.0, "7085": 1116261376.0, "7090": 1116261376.0, "7095": 1116261376.0, "7100": 1116261376.0, "7105": 1116261376.0, "7110": 1116261376.0, "7115": 1116261376.0, "7120": 1116261376.0, "7125": 1116261376.0, "7130": 1116261376.0, "7135": 1116261376.0, "7140": 1116261376.0, "7145": 1116261376.0, "7150": 1116261376.0, "7155": 1116261376.0, "7160": 1116261376.0, "7165": 1116261376.0, "7170": 1116261376.0, "7175": 1116261376.0, "7180": 1116261376.0, "7185": 1116261376.0, "7190": 1116261376.0, "7195": 1116261376.0, "7200": 1116261376.0, "7205": 1116261376.0, "7210": 1116261376.0, "7215": 1116261376.0, "7220": 1116261376.0, "7225": 1116261376.0, "7230": 1116261376.0, "7235": 1116261376.0, "7240": 1116261376.0, "7245": 1116261376.0, "7250": 1116261376.0, "7255": 1116261376.0, "7260": 1116261376.0, "7265": 1116261376.0, "7270": 1116261376.0, "7275": 1116261376.0, "7280": 1116261376.0, "7285": 1116261376.0, "7290": 1116261376.0, "7295": 1116261376.0, "7300": 1116261376.0, "7305": 1116261376.0, "7310": 1116261376.0, "7315": 1116261376.0, "7320": 1116261376.0, "7325": 1116261376.0, "7330": 1116261376.0, "7335": 1116261376.0, "7340": 1116261376.0, "7345": 1116261376.0, "7350": 1116261376.0, "7355": 1116261376.0, "7360": 1116261376.0, "7365": 1116261376.0, "7370": 1116261376.0, "7375": 1116261376.0, "7380": 1116261376.0, "7385": 1116261376.0, "7390": 1116261376.0, "7395": 1116261376.0, "7400": 1116261376.0, "7405": 1116261376.0, "7410": 1116261376.0, "7415": 1116261376.0, "7420": 1116261376.0, "7425": 1116261376.0, "7430": 1116261376.0, "7435": 1116261376.0, "7440": 1116261376.0, "7445": 1116261376.0, "7450": 1116261376.0, "7455": 1116261376.0, "7460": 1116261376.0, "7465": 1116261376.0, "7470": 1116261376.0, "7475": 1116261376.0, "7480": 1116261376.0, "7485": 1116261376.0, "7490": 1116261376.0, "7495": 1116261376.0, "7500": 1116261376.0, "7505": 1116261376.0, "7510": 1116261376.0, "7515": 1116261376.0, "7520": 1116261376.0, "7525": 1116261376.0, "7530": 1116261376.0, "7535": 1116261376.0, "7540": 1116261376.0, "7545": 1116261376.0, "7550": 1116261376.0, "7555": 1116261376.0, "7560": 1116261376.0, "7565": 1116261376.0, "7570": 1116261376.0, "7575": 1116261376.0, "7580": 1116261376.0, "7585": 1116261376.0, "7590": 1116261376.0, "7595": 1116261376.0, "7600": 1116261376.0, "7605": 1116261376.0, "7610": 1116261376.0, "7615": 1116261376.0, "7620": 1116261376.0, "7625": 1116261376.0, "7630": 1116261376.0, "7635": 1116261376.0, "7640": 1116261376.0, "7645": 1116261376.0, "7650": 1116261376.0, "7655": 1116261376.0, "7660": 1116261376.0, "7665": 1116261376.0, "7670": 1116261376.0, "7675": 1116261376.0, "7680": 1116261376.0, "7685": 1116261376.0, "7690": 1116261376.0, "7695": 1116261376.0, "7700": 1116261376.0, "7705": 1116261376.0, "7710": 1116261376.0, "7715": 1116261376.0, "7720": 1116261376.0, "7725": 1116261376.0, "7730": 1116261376.0, "7735": 1116261376.0, "7740": 1116261376.0, "7745": 1116261376.0, "7750": 1116261376.0, "7755": 1116261376.0, "7760": 1116261376.0, "7765": 1116261376.0, "7770": 1116261376.0, "7775": 1116261376.0, "7780": 1116261376.0, "7785": 1116261376.0, "7790": 1116261376.0, "7795": 1116261376.0, "7800": 1116261376.0, "7805": 1116261376.0, "7810": 1116261376.0, "7815": 1116261376.0, "7820": 1116261376.0, "7825": 1116261376.0, "7830": 1116261376.0, "7835": 1116261376.0, "7840": 1116261376.0, "7845": 1116261376.0, "7850": 1116261376.0, "7855": 1116261376.0, "7860": 1116261376.0, "7865": 1116261376.0, "7870": 1116261376.0, "7875": 1116261376.0, "7880": 1116261376.0, "7885": 1116261376.0, "7890": 1116261376.0, "7895": 1116261376.0, "7900": 1116261376.0, "7905": 1116261376.0, "7910": 1116261376.0, "7915": 1116261376.0, "7920": 1116261376.0, "7925": 1116261376.0, "7930": 1116261376.0, "7935": 1116261376.0, "7940": 1116261376.0, "7945": 1116261376.0, "7950": 1116261376.0, "7955": 1116261376.0, "7960": 1116261376.0, "7965": 1116261376.0, "7970": 1116261376.0, "7975": 1116261376.0, "7980": 1116261376.0, "7985": 1116261376.0, "7990": 1116261376.0, "7995": 1116261376.0, "8000": 1116261376.0, "8005": 1116261376.0, "8010": 1116261376.0, "8015": 1116261376.0, "8020": 1116261376.0, "8025": 1116261376.0, "8030": 1116261376.0, "8035": 1116261376.0, "8040": 1116261376.0, "8045": 1116261376.0, "8050": 1116261376.0, "8055": 1116261376.0, "8060": 1116261376.0, "8065": 1116261376.0, "8070": 1116261376.0, "8075": 1116261376.0, "8080": 1116261376.0, "8085": 1116261376.0, "8090": 1116261376.0, "8095": 1116261376.0, "8100": 1116261376.0, "8105": 1116261376.0, "8110": 1116261376.0, "8115": 1116261376.0, "8120": 1116261376.0, "8125": 1116261376.0, "8130": 1116261376.0, "8135": 1116261376.0, "8140": 1116261376.0, "8145": 1116261376.0, "8150": 1116261376.0, "8155": 1116261376.0, "8160": 1116261376.0, "8165": 1116261376.0, "8170": 1116261376.0, "8175": 1116261376.0, "8180": 1116261376.0, "8185": 1116261376.0, "8190": 1116261376.0, "8195": 1116261376.0, "8200": 1116261376.0, "8205": 1116261376.0, "8210": 1116261376.0, "8215": 1116261376.0, "8220": 1116261376.0, "8225": 1116261376.0, "8230": 1116261376.0, "8235": 1116261376.0, "8240": 1116261376.0, "8245": 1116261376.0, "8250": 1116261376.0, "8255": 1116261376.0, "8260": 1116261376.0, "8265": 1116261376.0, "8270": 1116261376.0, "8275": 1116261376.0, "8280": 1116261376.0, "8285": 1116261376.0, "8290": 1116261376.0, "8295": 1116261376.0, "8300": 1116261376.0, "8305": 1116261376.0, "8310": 1116261376.0, "8315": 1116261376.0, "8320": 1116261376.0, "8325": 1116261376.0, "8330": 1116261376.0, "8335": 1116261376.0, "8340": 1116261376.0, "8345": 1116261376.0, "8350": 1116261376.0, "8355": 1116261376.0, "8360": 1116261376.0, "8365": 1116261376.0, "8370": 1116261376.0, "8375": 1116261376.0, "8380": 1116261376.0, "8385": 1116261376.0, "8390": 1116261376.0, "8395": 1116261376.0, "8400": 1116261376.0, "8405": 1116261376.0, "8410": 1116261376.0, "8415": 1116261376.0, "8420": 1116261376.0, "8425": 1116261376.0, "8430": 1116261376.0, "8435": 1116261376.0, "8440": 1116261376.0, "8445": 1116261376.0, "8450": 1116261376.0, "8455": 1116261376.0, "8460": 1116261376.0, "8465": 1116261376.0, "8470": 1116261376.0, "8475": 1116261376.0, "8480": 1116261376.0, "8485": 1116261376.0, "8490": 1116261376.0, "8495": 1116261376.0, "8500": 1116261376.0, "8505": 1116261376.0, "8510": 1116261376.0, "8515": 1116261376.0, "8520": 1116261376.0, "8525": 1116261376.0, "8530": 1116261376.0, "8535": 1116261376.0, "8540": 1116261376.0, "8545": 1116261376.0, "8550": 1116261376.0, "8555": 1116261376.0, "8560": 1116261376.0, "8565": 1116261376.0, "8570": 1116261376.0, "8575": 1116261376.0, "8580": 1116261376.0, "8585": 1116261376.0, "8590": 1116261376.0, "8595": 1116261376.0, "8600": 1116261376.0, "8605": 1116261376.0, "8610": 1116261376.0, "8615": 1116261376.0, "8620": 1116261376.0, "8625": 1116261376.0, "8630": 1116261376.0, "8635": 1116261376.0, "8640": 1116261376.0, "8645": 1116261376.0, "8650": 1116261376.0, "8655": 1116261376.0, "8660": 1116261376.0, "8665": 1116261376.0, "8670": 1116261376.0, "8675": 1116261376.0, "8680": 1116261376.0, "8685": 1116261376.0, "8690": 1116261376.0, "8695": 1116261376.0, "8700": 1116261376.0, "8705": 1116261376.0, "8710": 1116261376.0, "8715": 1116261376.0, "8720": 1116261376.0, "8725": 1116261376.0, "8730": 1116261376.0, "8735": 1116261376.0, "8740": 1116261376.0, "8745": 1116261376.0, "8750": 1116261376.0, "8755": 1116261376.0, "8760": 1116261376.0, "8765": 1116261376.0, "8770": 1116261376.0, "8775": 1116261376.0, "8780": 1116261376.0, "8785": 1116261376.0, "8790": 1116261376.0, "8795": 1116261376.0, "8800": 1116261376.0, "8805": 1116261376.0, "8810": 1116261376.0, "8815": 1116261376.0, "8820": 1116261376.0, "8825": 1116261376.0, "8830": 1116261376.0, "8835": 1116261376.0, "8840": 1116261376.0, "8845": 1116261376.0, "8850": 1116261376.0, "8855": 1116261376.0, "8860": 1116261376.0, "8865": 1116261376.0, "8870": 1116261376.0, "8875": 1116261376.0, "8880": 1116261376.0, "8885": 1116261376.0, "8890": 1116261376.0, "8895": 1116261376.0, "8900": 1116261376.0, "8905": 1116261376.0, "8910": 1116261376.0, "8915": 1116261376.0, "8920": 1116261376.0, "8925": 1116261376.0, "8930": 1116261376.0, "8935": 1116261376.0, "8940": 1116261376.0, "8945": 1116261376.0, "8950": 1116261376.0, "8955": 1116261376.0, "8960": 1116261376.0, "8965": 1116261376.0, "8970": 1116261376.0, "8975": 1116261376.0, "8980": 1116261376.0, "8985": 1116261376.0, "8990": 1116261376.0, "8995": 1116261376.0, "9000": 1116261376.0, "9005": 1116261376.0, "9010": 1116261376.0, "9015": 1116261376.0, "9020": 1116261376.0, "9025": 1116261376.0, "9030": 1116261376.0, "9035": 1116261376.0, "9040": 1116261376.0, "9045": 1116261376.0, "9050": 1116261376.0, "9055": 1116261376.0, "9060": 1116261376.0, "9065": 1116261376.0, "9070": 1116261376.0, "9075": 1116261376.0, "9080": 1116261376.0, "9085": 1116261376.0, "9090": 1116261376.0, "9095": 1116261376.0, "9100": 1116261376.0, "9105": 1116261376.0, "9110": 1116261376.0, "9115": 1116261376.0, "9120": 1116261376.0, "9125": 1116261376.0, "9130": 1116261376.0, "9135": 1116261376.0, "9140": 1116261376.0, "9145": 1116261376.0, "9150": 1116261376.0, "9155": 1116261376.0, "9160": 1116261376.0, "9165": 1116261376.0, "9170": 1116261376.0, "9175": 1116261376.0, "9180": 1116261376.0, "9185": 1116261376.0, "9190": 1116261376.0, "9195": 1116261376.0, "9200": 1116261376.0, "9205": 1116261376.0, "9210": 1116261376.0, "9215": 1116261376.0, "9220": 1116261376.0, "9225": 1116261376.0, "9230": 1116261376.0, "9235": 1116261376.0, "9240": 1116261376.0, "9245": 1116261376.0, "9250": 1116261376.0, "9255": 1116261376.0, "9260": 1116261376.0, "9265": 1116261376.0, "9270": 1116261376.0, "9275": 1116261376.0, "9280": 1116261376.0, "9285": 1116261376.0, "9290": 1116261376.0, "9295": 1116261376.0, "9300": 1116261376.0, "9305": 1116261376.0, "9310": 1116261376.0, "9315": 1116261376.0, "9320": 1116261376.0, "9325": 1116261376.0, "9330": 1116261376.0, "9335": 1116261376.0, "9340": 1116261376.0, "9345": 1116261376.0, "9350": 1116261376.0, "9355": 1116261376.0, "9360": 1116261376.0, "9365": 1116261376.0, "9370": 1116261376.0, "9375": 1116261376.0, "9380": 1116261376.0, "9385": 1116261376.0, "9390": 1116261376.0, "9395": 1116261376.0, "9400": 1116261376.0, "9405": 1116261376.0, "9410": 1116261376.0, "9415": 1116261376.0, "9420": 1116261376.0, "9425": 1116261376.0, "9430": 1116261376.0, "9435": 1116261376.0, "9440": 1116261376.0, "9445": 1116261376.0, "9450": 1116261376.0, "9455": 1116261376.0, "9460": 1116261376.0, "9465": 1116261376.0, "9470": 1116261376.0, "9475": 1116261376.0, "9480": 1116261376.0, "9485": 1116261376.0, "9490": 1116261376.0, "9495": 1116261376.0, "9500": 1116261376.0, "9505": 1116261376.0, "9510": 1116261376.0, "9515": 1116261376.0, "9520": 1116261376.0, "9525": 1116261376.0, "9530": 1116261376.0, "9535": 1116261376.0, "9540": 1116261376.0, "9545": 1116261376.0, "9550": 1116261376.0, "9555": 1116261376.0, "9560": 1116261376.0, "9565": 1116261376.0, "9570": 1116261376.0, "9575": 1116261376.0, "9580": 1116261376.0, "9585": 1116261376.0, "9590": 1116261376.0, "9595": 1116261376.0, "9600": 1116261376.0, "9605": 1116261376.0, "9610": 1116261376.0, "9615": 1116261376.0, "9620": 1116261376.0, "9625": 1116261376.0, "9630": 1116261376.0, "9635": 1116261376.0, "9640": 1116261376.0, "9645": 1116261376.0, "9650": 1116261376.0, "9655": 1116261376.0, "9660": 1116261376.0, "9665": 1116261376.0, "9670": 1116261376.0, "9675": 1116261376.0, "9680": 1116261376.0, "9685": 1116261376.0, "9690": 1116261376.0, "9695": 1116261376.0, "9700": 1116261376.0, "9705": 1116261376.0, "9710": 1116261376.0, "9715": 1116261376.0, "9720": 1116261376.0, "9725": 1116261376.0, "9730": 1116261376.0, "9735": 1116261376.0, "9740": 1116261376.0, "9745": 1116261376.0, "9750": 1116261376.0, "9755": 1116261376.0, "9760": 1116261376.0, "9765": 1116261376.0, "9770": 1116261376.0, "9775": 1116261376.0, "9780": 1116261376.0, "9785": 1116261376.0, "9790": 1116261376.0, "9795": 1116261376.0, "9800": 1116261376.0, "9805": 1116261376.0, "9810": 1116261376.0, "9815": 1116261376.0, "9820": 1116261376.0, "9825": 1116261376.0, "9830": 1116261376.0, "9835": 1116261376.0, "9840": 1116261376.0, "9845": 1116261376.0, "9850": 1116261376.0, "9855": 1116261376.0, "9860": 1116261376.0, "9865": 1116261376.0, "9870": 1116261376.0, "9875": 1116261376.0, "9880": 1116261376.0, "9885": 1116261376.0, "9890": 1116261376.0, "9895": 1116261376.0, "9900": 1116261376.0, "9905": 1116261376.0, "9910": 1116261376.0, "9915": 1116261376.0, "9920": 1116261376.0, "9925": 1116261376.0, "9930": 1116261376.0, "9935": 1116261376.0, "9940": 1116261376.0, "9945": 1116261376.0, "9950": 1116261376.0, "9955": 1116261376.0, "9960": 1116261376.0, "9965": 1116261376.0, "9970": 1116261376.0, "9975": 1116261376.0, "9980": 1116261376.0, "9985": 1116261376.0, "9990": 1116261376.0, "9995": 1116261376.0, "10000": 1116261376.0, "10005": 1116261376.0, "10010": 1116261376.0, "10015": 1116261376.0, "10020": 1116261376.0, "10025": 1116261376.0, "10030": 1116261376.0, "10035": 1116261376.0, "10040": 1116261376.0, "10045": 1116261376.0, "10050": 1116261376.0, "10055": 1116261376.0, "10060": 1116261376.0, "10065": 1116261376.0, "10070": 1116261376.0, "10075": 1116261376.0, "10080": 1116261376.0, "10085": 1116261376.0, "10090": 1116261376.0, "10095": 1116261376.0, "10100": 1116261376.0, "10105": 1116261376.0, "10110": 1116261376.0, "10115": 1116261376.0, "10120": 1116261376.0, "10125": 1116261376.0, "10130": 1116261376.0, "10135": 1116261376.0, "10140": 1116261376.0, "10145": 1116261376.0, "10150": 1116261376.0, "10155": 1116261376.0, "10160": 1116261376.0, "10165": 1116261376.0, "10170": 1116261376.0, "10175": 1116261376.0, "10180": 1116261376.0, "10185": 1116261376.0, "10190": 1116261376.0, "10195": 1116261376.0, "10200": 1116261376.0, "10205": 1116261376.0, "10210": 1116261376.0, "10215": 1116261376.0, "10220": 1116261376.0, "10225": 1116261376.0, "10230": 1116261376.0, "10235": 1116261376.0, "10240": 1116261376.0, "10245": 1116261376.0, "10250": 1116261376.0, "10255": 1116261376.0, "10260": 1116261376.0, "10265": 1116261376.0, "10270": 1116261376.0, "10275": 1116261376.0, "10280": 1116261376.0, "10285": 1116261376.0, "10290": 1116261376.0, "10295": 1116261376.0, "10300": 1116261376.0, "10305": 1116261376.0, "10310": 1116261376.0, "10315": 1116261376.0, "10320": 1116261376.0, "10325": 1116261376.0, "10330": 1116261376.0, "10335": 1116261376.0, "10340": 1116261376.0, "10345": 1116261376.0, "10350": 1116261376.0, "10355": 1116261376.0, "10360": 1116261376.0, "10365": 1116261376.0, "10370": 1116261376.0, "10375": 1116261376.0, "10380": 1116261376.0, "10385": 1116261376.0, "10390": 1116261376.0, "10395": 1116261376.0, "10400": 1116261376.0, "10405": 1116261376.0, "10410": 1116261376.0, "10415": 1116261376.0, "10420": 1116261376.0, "10425": 1116261376.0, "10430": 1116261376.0, "10435": 1116261376.0, "10440": 1116261376.0, "10445": 1116261376.0, "10450": 1116261376.0, "10455": 1116261376.0, "10460": 1116261376.0, "10465": 1116261376.0, "10470": 1116261376.0, "10475": 1116261376.0, "10480": 1116261376.0, "10485": 1116261376.0, "10490": 1116261376.0, "10495": 1116261376.0, "10500": 1116261376.0, "10505": 1116261376.0, "10510": 1116261376.0, "10515": 1116261376.0, "10520": 1116261376.0, "10525": 1116261376.0, "10530": 1116261376.0, "10535": 1116261376.0, "10540": 1116261376.0, "10545": 1116261376.0, "10550": 1116261376.0, "10555": 1116261376.0, "10560": 1116261376.0, "10565": 1116261376.0, "10570": 1116261376.0, "10575": 1116261376.0, "10580": 1116261376.0, "10585": 1116261376.0, "10590": 1116261376.0, "10595": 1116261376.0, "10600": 1116261376.0, "10605": 1116261376.0, "10610": 1116261376.0, "10615": 1116261376.0, "10620": 1116261376.0, "10625": 1116261376.0, "10630": 1116261376.0, "10635": 1116261376.0, "10640": 1116261376.0, "10645": 1116261376.0, "10650": 1116261376.0, "10655": 1116261376.0, "10660": 1116261376.0, "10665": 1116261376.0, "10670": 1116261376.0, "10675": 1116261376.0, "10680": 1116261376.0, "10685": 1116261376.0, "10690": 1116261376.0, "10695": 1116261376.0, "10700": 1116261376.0, "10705": 1116261376.0, "10710": 1116261376.0, "10715": 1116261376.0, "10720": 1116261376.0, "10725": 1116261376.0, "10730": 1116261376.0, "10735": 1116261376.0, "10740": 1116261376.0, "10745": 1116261376.0, "10750": 1116261376.0, "10755": 1116261376.0, "10760": 1116261376.0, "10765": 1116261376.0, "10770": 1116261376.0, "10775": 1116261376.0, "10780": 1116261376.0, "10785": 1116261376.0, "10790": 1116261376.0, "10795": 1116261376.0, "10800": 1116261376.0, "10805": 1116261376.0, "10810": 1116261376.0, "10815": 1116261376.0, "10820": 1116261376.0, "10825": 1116261376.0, "10830": 1116261376.0, "10835": 1116261376.0, "10840": 1116261376.0, "10845": 1116261376.0, "10850": 1116261376.0, "10855": 1116261376.0, "10860": 1116261376.0, "10865": 1116261376.0, "10870": 1116261376.0, "10875": 1116261376.0, "10880": 1116261376.0, "10885": 1116261376.0, "10890": 1116261376.0, "10895": 1116261376.0, "10900": 1116261376.0, "10905": 1116261376.0, "10910": 1116261376.0, "10915": 1116261376.0, "10920": 1116261376.0, "10925": 1116261376.0, "10930": 1116261376.0, "10935": 1116261376.0, "10940": 1116261376.0, "10945": 1116261376.0, "10950": 1116261376.0, "10955": 1116261376.0, "10960": 1116261376.0, "10965": 1116261376.0, "10970": 1116261376.0, "10975": 1116261376.0, "10980": 1116261376.0, "10985": 1116261376.0, "10990": 1116261376.0, "10995": 1116261376.0, "11000": 1116261376.0, "11005": 1116261376.0, "11010": 1116261376.0, "11015": 1116261376.0, "11020": 1116261376.0, "11025": 1116261376.0, "11030": 1116261376.0, "11035": 1116261376.0, "11040": 1116261376.0, "11045": 1116261376.0, "11050": 1116261376.0, "11055": 1116261376.0, "11060": 1116261376.0, "11065": 1116261376.0, "11070": 1116261376.0, "11075": 1116261376.0, "11080": 1116261376.0, "11085": 1116261376.0, "11090": 1116261376.0, "11095": 1116261376.0, "11100": 1116261376.0, "11105": 1116261376.0, "11110": 1116261376.0, "11115": 1116261376.0, "11120": 1116261376.0, "11125": 1116261376.0, "11130": 1116261376.0, "11135": 1116261376.0, "11140": 1116261376.0, "11145": 1116261376.0, "11150": 1116261376.0, "11155": 1116261376.0, "11160": 1116261376.0, "11165": 1116261376.0, "11170": 1116261376.0, "11175": 1116261376.0, "11180": 1116261376.0, "11185": 1116261376.0, "11190": 1116261376.0, "11195": 1116261376.0, "11200": 1116261376.0, "11205": 1116261376.0, "11210": 1116261376.0, "11215": 1116261376.0, "11220": 1116261376.0, "11225": 1116261376.0, "11230": 1116261376.0, "11235": 1116261376.0, "11240": 1116261376.0, "11245": 1116261376.0, "11250": 1116261376.0, "11255": 1116261376.0, "11260": 1116261376.0, "11265": 1116261376.0, "11270": 1116261376.0, "11275": 1116261376.0, "11280": 1116261376.0, "11285": 1116261376.0, "11290": 1116261376.0, "11295": 1116261376.0, "11300": 1116261376.0, "11305": 1116261376.0, "11310": 1116261376.0, "11315": 1116261376.0, "11320": 1116261376.0, "11325": 1116261376.0, "11330": 1116261376.0, "11335": 1116261376.0, "11340": 1116261376.0, "11345": 1116261376.0, "11350": 1116261376.0, "11355": 1116261376.0, "11360": 1116261376.0, "11365": 1116261376.0, "11370": 1116261376.0, "11375": 1116261376.0, "11380": 1116261376.0, "11385": 1116261376.0, "11390": 1116261376.0, "11395": 1116261376.0, "11400": 1116261376.0, "11405": 1116261376.0, "11410": 1116261376.0, "11415": 1116261376.0, "11420": 1116261376.0, "11425": 1116261376.0, "11430": 1116261376.0, "11435": 1116261376.0, "11440": 1116261376.0, "11445": 1116261376.0, "11450": 1116261376.0, "11455": 1116261376.0, "11460": 1116261376.0, "11465": 1116261376.0, "11470": 1116261376.0, "11475": 1116261376.0, "11480": 1116261376.0, "11485": 1116261376.0, "11490": 1116261376.0, "11495": 1116261376.0, "11500": 1116261376.0, "11505": 1116261376.0, "11510": 1116261376.0, "11515": 1116261376.0, "11520": 1116261376.0, "11525": 1116261376.0, "11530": 1116261376.0, "11535": 1116261376.0, "11540": 1116261376.0, "11545": 1116261376.0, "11550": 1116261376.0, "11555": 1116261376.0, "11560": 1116261376.0, "11565": 1116261376.0, "11570": 1116261376.0, "11575": 1116261376.0, "11580": 1116261376.0, "11585": 1116261376.0, "11590": 1116261376.0, "11595": 1116261376.0, "11600": 1116261376.0, "11605": 1116261376.0, "11610": 1116261376.0, "11615": 1116261376.0, "11620": 1116261376.0, "11625": 1116261376.0, "11630": 1116261376.0, "11635": 1116261376.0, "11640": 1116261376.0, "11645": 1116261376.0, "11650": 1116261376.0, "11655": 1116261376.0, "11660": 1116261376.0, "11665": 1116261376.0, "11670": 1116261376.0, "11675": 1116261376.0, "11680": 1116261376.0, "11685": 1116261376.0, "11690": 1116261376.0, "11695": 1116261376.0, "11700": 1116261376.0, "11705": 1116261376.0, "11710": 1116261376.0, "11715": 1116261376.0, "11720": 1116261376.0, "11725": 1116261376.0, "11730": 1116261376.0, "11735": 1116261376.0, "11740": 1116261376.0, "11745": 1116261376.0, "11750": 1116261376.0, "11755": 1116261376.0, "11760": 1116261376.0, "11765": 1116261376.0, "11770": 1116261376.0, "11775": 1116261376.0, "11780": 1116261376.0, "11785": 1116261376.0, "11790": 1116261376.0, "11795": 1116261376.0, "11800": 1116261376.0, "11805": 1116261376.0, "11810": 1116261376.0, "11815": 1116261376.0, "11820": 1116261376.0, "11825": 1116261376.0, "11830": 1116261376.0, "11835": 1116261376.0, "11840": 1116261376.0, "11845": 1116261376.0, "11850": 1116261376.0, "11855": 1116261376.0, "11860": 1116261376.0, "11865": 1116261376.0, "11870": 1116261376.0, "11875": 1116261376.0, "11880": 1116261376.0, "11885": 1116261376.0, "11890": 1116261376.0, "11895": 1116261376.0, "11900": 1116261376.0, "11905": 1116261376.0, "11910": 1116261376.0, "11915": 1116261376.0, "11920": 1116261376.0, "11925": 1116261376.0, "11930": 1116261376.0, "11935": 1116261376.0, "11940": 1116261376.0, "11945": 1116261376.0, "11950": 1116261376.0, "11955": 1116261376.0, "11960": 1116261376.0, "11965": 1116261376.0, "11970": 1116261376.0, "11975": 1116261376.0, "11980": 1116261376.0, "11985": 1116261376.0, "11990": 1116261376.0, "11995": 1116261376.0, "12000": 1116261376.0, "12005": 1116261376.0, "12010": 1116261376.0, "12015": 1116261376.0, "12020": 1116261376.0, "12025": 1116261376.0, "12030": 1116261376.0, "12035": 1116261376.0, "12040": 1116261376.0, "12045": 1116261376.0, "12050": 1116261376.0, "12055": 1116261376.0, "12060": 1116261376.0, "12065": 1116261376.0, "12070": 1116261376.0, "12075": 1116261376.0, "12080": 1116261376.0, "12085": 1116261376.0, "12090": 1116261376.0, "12095": 1116261376.0, "12100": 1116261376.0, "12105": 1116261376.0, "12110": 1116261376.0, "12115": 1116261376.0, "12120": 1116261376.0, "12125": 1116261376.0, "12130": 1116261376.0, "12135": 1116261376.0, "12140": 1116261376.0, "12145": 1116261376.0, "12150": 1116261376.0, "12155": 1116261376.0, "12160": 1116261376.0, "12165": 1116261376.0, "12170": 1116261376.0, "12175": 1116261376.0, "12180": 1116261376.0, "12185": 1116261376.0, "12190": 1116261376.0, "12195": 1116261376.0, "12200": 1116261376.0, "12205": 1116261376.0, "12210": 1116261376.0, "12215": 1116261376.0, "12220": 1116261376.0, "12225": 1116261376.0, "12230": 1116261376.0, "12235": 1116261376.0, "12240": 1116261376.0, "12245": 1116261376.0, "12250": 1116261376.0, "12255": 1116261376.0, "12260": 1116261376.0, "12265": 1116261376.0, "12270": 1116261376.0, "12275": 1116261376.0, "12280": 1116261376.0, "12285": 1116261376.0, "12290": 1116261376.0, "12295": 1116261376.0, "12300": 1116261376.0, "12305": 1116261376.0, "12310": 1116261376.0, "12315": 1116261376.0, "12320": 1116261376.0, "12325": 1116261376.0, "12330": 1116261376.0, "12335": 1116261376.0, "12340": 1116261376.0, "12345": 1116261376.0, "12350": 1116261376.0, "12355": 1116261376.0, "12360": 1116261376.0, "12365": 1116261376.0, "12370": 1116261376.0, "12375": 1116261376.0, "12380": 1116261376.0, "12385": 1116261376.0, "12390": 1116261376.0, "12395": 1116261376.0, "12400": 1116261376.0, "12405": 1116261376.0, "12410": 1116261376.0, "12415": 1116261376.0, "12420": 1116261376.0, "12425": 1116261376.0, "12430": 1116261376.0, "12435": 1116261376.0, "12440": 1116261376.0, "12445": 1116261376.0, "12450": 1116261376.0, "12455": 1116261376.0, "12460": 1116261376.0, "12465": 1116261376.0, "12470": 1116261376.0, "12475": 1116261376.0, "12480": 1116261376.0, "12485": 1116261376.0, "12490": 1116261376.0, "12495": 1116261376.0, "12500": 1116261376.0, "12505": 1116261376.0, "12510": 1116261376.0, "12515": 1116261376.0, "12520": 1116261376.0, "12525": 1116261376.0, "12530": 1116261376.0, "12535": 1116261376.0, "12540": 1116261376.0, "12545": 1116261376.0, "12550": 1116261376.0, "12555": 1116261376.0, "12560": 1116261376.0, "12565": 1116261376.0, "12570": 1116261376.0, "12575": 1116261376.0, "12580": 1116261376.0, "12585": 1116261376.0, "12590": 1116261376.0, "12595": 1116261376.0, "12600": 1116261376.0, "12605": 1116261376.0, "12610": 1116261376.0, "12615": 1116261376.0, "12620": 1116261376.0, "12625": 1116261376.0, "12630": 1116261376.0, "12635": 1116261376.0, "12640": 1116261376.0, "12645": 1116261376.0, "12650": 1116261376.0, "12655": 1116261376.0, "12660": 1116261376.0, "12665": 1116261376.0, "12670": 1116261376.0, "12675": 1116261376.0, "12680": 1116261376.0, "12685": 1116261376.0, "12690": 1116261376.0, "12695": 1116261376.0, "12700": 1116261376.0, "12705": 1116261376.0, "12710": 1116261376.0, "12715": 1116261376.0, "12720": 1116261376.0, "12725": 1116261376.0, "12730": 1116261376.0, "12735": 1116261376.0, "12740": 1116261376.0, "12745": 1116261376.0, "12750": 1116261376.0, "12755": 1116261376.0, "12760": 1116261376.0, "12765": 1116261376.0, "12770": 1116261376.0, "12775": 1116261376.0, "12780": 1116261376.0, "12785": 1116261376.0, "12790": 1116261376.0, "12795": 1116261376.0, "12800": 1116261376.0, "12805": 1116261376.0, "12810": 1116261376.0, "12815": 1116261376.0, "12820": 1116261376.0, "12825": 1116261376.0, "12830": 1116261376.0, "12835": 1116261376.0, "12840": 1116261376.0, "12845": 1116261376.0, "12850": 1116261376.0, "12855": 1116261376.0, "12860": 1116261376.0, "12865": 1116261376.0, "12870": 1116261376.0, "12875": 1116261376.0, "12880": 1116261376.0, "12885": 1116261376.0, "12890": 1116261376.0, "12895": 1116261376.0, "12900": 1116261376.0, "12905": 1116261376.0, "12910": 1116261376.0, "12915": 1116261376.0, "12920": 1116261376.0, "12925": 1116261376.0, "12930": 1116261376.0, "12935": 1116261376.0, "12940": 1116261376.0, "12945": 1116261376.0, "12950": 1116261376.0, "12955": 1116261376.0, "12960": 1116261376.0, "12965": 1116261376.0, "12970": 1116261376.0, "12975": 1116261376.0, "12980": 1116261376.0, "12985": 1116261376.0, "12990": 1116261376.0, "12995": 1116261376.0, "13000": 1116261376.0, "13005": 1116261376.0, "13010": 1116261376.0, "13015": 1116261376.0, "13020": 1116261376.0, "13025": 1116261376.0, "13030": 1116261376.0, "13035": 1116261376.0, "13040": 1116261376.0, "13045": 1116261376.0, "13050": 1116261376.0, "13055": 1116261376.0, "13060": 1116261376.0, "13065": 1116261376.0, "13070": 1116261376.0, "13075": 1116261376.0, "13080": 1116261376.0, "13085": 1116261376.0, "13090": 1116261376.0, "13095": 1116261376.0, "13100": 1116261376.0, "13105": 1116261376.0, "13110": 1116261376.0, "13115": 1116261376.0, "13120": 1116261376.0, "13125": 1116261376.0, "13130": 1116261376.0, "13135": 1116261376.0, "13140": 1116261376.0, "13145": 1116261376.0, "13150": 1116261376.0, "13155": 1116261376.0, "13160": 1116261376.0, "13165": 1116261376.0, "13170": 1116261376.0, "13175": 1116261376.0, "13180": 1116261376.0, "13185": 1116261376.0, "13190": 1116261376.0, "13195": 1116261376.0, "13200": 1116261376.0, "13205": 1116261376.0, "13210": 1116261376.0, "13215": 1116261376.0, "13220": 1116261376.0, "13225": 1116261376.0, "13230": 1116261376.0, "13235": 1116261376.0, "13240": 1116261376.0, "13245": 1116261376.0, "13250": 1116261376.0, "13255": 1116261376.0, "13260": 1116261376.0, "13265": 1116261376.0, "13270": 1116261376.0, "13275": 1116261376.0, "13280": 1116261376.0, "13285": 1116261376.0, "13290": 1116261376.0, "13295": 1116261376.0, "13300": 1116261376.0, "13305": 1116261376.0, "13310": 1116261376.0, "13315": 1116261376.0, "13320": 1116261376.0, "13325": 1116261376.0, "13330": 1116261376.0, "13335": 1116261376.0, "13340": 1116261376.0, "13345": 1116261376.0, "13350": 1116261376.0, "13355": 1116261376.0, "13360": 1116261376.0, "13365": 1116261376.0, "13370": 1116261376.0, "13375": 1116261376.0, "13380": 1116261376.0, "13385": 1116261376.0, "13390": 1116261376.0, "13395": 1116261376.0, "13400": 1116261376.0, "13405": 1116261376.0, "13410": 1116261376.0, "13415": 1116261376.0, "13420": 1116261376.0, "13425": 1116261376.0, "13430": 1116261376.0, "13435": 1116261376.0, "13440": 1116261376.0, "13445": 1116261376.0, "13450": 1116261376.0, "13455": 1116261376.0, "13460": 1116261376.0, "13465": 1116261376.0, "13470": 1116261376.0, "13475": 1116261376.0, "13480": 1116261376.0, "13485": 1116261376.0, "13490": 1116261376.0, "13495": 1116261376.0, "13500": 1116261376.0, "13505": 1116261376.0, "13510": 1116261376.0, "13515": 1116261376.0, "13520": 1116261376.0, "13525": 1116261376.0, "13530": 1116261376.0, "13535": 1116261376.0, "13540": 1116261376.0, "13545": 1116261376.0, "13550": 1116261376.0, "13555": 1116261376.0, "13560": 1116261376.0, "13565": 1116261376.0, "13570": 1116261376.0, "13575": 1116261376.0, "13580": 1116261376.0, "13585": 1116261376.0, "13590": 1116261376.0, "13595": 1116261376.0, "13600": 1116261376.0, "13605": 1116261376.0, "13610": 1116261376.0, "13615": 1116261376.0, "13620": 1116261376.0, "13625": 1116261376.0, "13630": 1116261376.0, "13635": 1116261376.0, "13640": 1116261376.0, "13645": 1116261376.0, "13650": 1116261376.0, "13655": 1116261376.0, "13660": 1116261376.0, "13665": 1116261376.0, "13670": 1116261376.0, "13675": 1116261376.0, "13680": 1116261376.0, "13685": 1116261376.0, "13690": 1116261376.0, "13695": 1116261376.0, "13700": 1116261376.0, "13705": 1116261376.0, "13710": 1116261376.0, "13715": 1116261376.0, "13720": 1116261376.0, "13725": 1116261376.0, "13730": 1116261376.0, "13735": 1116261376.0, "13740": 1116261376.0, "13745": 1116261376.0, "13750": 1116261376.0, "13755": 1116261376.0, "13760": 1116261376.0, "13765": 1116261376.0, "13770": 1116261376.0, "13775": 1116261376.0, "13780": 1116261376.0, "13785": 1116261376.0, "13790": 1116261376.0, "13795": 1116261376.0, "13800": 1116261376.0, "13805": 1116261376.0, "13810": 1116261376.0, "13815": 1116261376.0, "13820": 1116261376.0, "13825": 1116261376.0, "13830": 1116261376.0, "13835": 1116261376.0, "13840": 1116261376.0, "13845": 1116261376.0, "13850": 1116261376.0, "13855": 1116261376.0, "13860": 1116261376.0, "13865": 1116261376.0, "13870": 1116261376.0, "13875": 1116261376.0, "13880": 1116261376.0, "13885": 1116261376.0, "13890": 1116261376.0, "13895": 1116261376.0, "13900": 1116261376.0, "13905": 1116261376.0, "13910": 1116261376.0, "13915": 1116261376.0, "13920": 1116261376.0, "13925": 1116261376.0, "13930": 1116261376.0, "13935": 1116261376.0, "13940": 1116261376.0, "13945": 1116261376.0, "13950": 1116261376.0, "13955": 1116261376.0, "13960": 1116261376.0, "13965": 1116261376.0, "13970": 1116261376.0, "13975": 1116261376.0, "13980": 1116261376.0, "13985": 1116261376.0, "13990": 1116261376.0, "13995": 1116261376.0, "14000": 1116261376.0, "14005": 1116261376.0, "14010": 1116261376.0, "14015": 1116261376.0, "14020": 1116261376.0, "14025": 1116261376.0, "14030": 1116261376.0, "14035": 1116261376.0, "14040": 1116261376.0, "14045": 1116261376.0, "14050": 1116261376.0, "14055": 1116261376.0, "14060": 1116261376.0, "14065": 1116261376.0, "14070": 1116261376.0, "14075": 1116261376.0, "14080": 1116261376.0, "14085": 1116261376.0, "14090": 1116261376.0, "14095": 1116261376.0, "14100": 1116261376.0, "14105": 1116261376.0, "14110": 1116261376.0, "14115": 1116261376.0, "14120": 1116261376.0, "14125": 1116261376.0, "14130": 1116261376.0, "14135": 1116261376.0, "14140": 1116261376.0, "14145": 1116261376.0, "14150": 1116261376.0, "14155": 1116261376.0, "14160": 1116261376.0, "14165": 1116261376.0, "14170": 1116261376.0, "14175": 1116261376.0, "14180": 1116261376.0, "14185": 1116261376.0, "14190": 1116261376.0, "14195": 1116261376.0, "14200": 1116261376.0, "14205": 1116261376.0, "14210": 1116261376.0, "14215": 1116261376.0, "14220": 1116261376.0, "14225": 1116261376.0, "14230": 1116261376.0, "14235": 1116261376.0, "14240": 1116261376.0, "14245": 1116261376.0, "14250": 1116261376.0, "14255": 1116261376.0, "14260": 1116261376.0, "14265": 1116261376.0, "14270": 1116261376.0, "14275": 1116261376.0, "14280": 1116261376.0, "14285": 1116261376.0, "14290": 1116261376.0, "14295": 1116261376.0, "14300": 1116261376.0, "14305": 1116261376.0, "14310": 1116261376.0, "14315": 1116261376.0, "14320": 1116261376.0, "14325": 1116261376.0, "14330": 1116261376.0, "14335": 1116261376.0, "14340": 1116261376.0, "14345": 1116261376.0, "14350": 1116261376.0, "14355": 1116261376.0, "14360": 1116261376.0, "14365": 1116261376.0, "14370": 1116261376.0, "14375": 1116261376.0, "14380": 1116261376.0, "14385": 1116261376.0, "14390": 1116261376.0, "14395": 1116261376.0, "14400": 1116261376.0, "14405": 1116261376.0, "14410": 1116261376.0, "14415": 1116261376.0, "14420": 1116261376.0, "14425": 1116261376.0, "14430": 1116261376.0, "14435": 1116261376.0, "14440": 1116261376.0, "14445": 1116261376.0, "14450": 1116261376.0, "14455": 1116261376.0, "14460": 1116261376.0, "14465": 1116261376.0, "14470": 1116261376.0, "14475": 1116261376.0, "14480": 1116261376.0, "14485": 1116261376.0, "14490": 1116261376.0, "14495": 1116261376.0, "14500": 1116261376.0, "14505": 1116261376.0, "14510": 1116261376.0, "14515": 1116261376.0, "14520": 1116261376.0, "14525": 1116261376.0, "14530": 1116261376.0, "14535": 1116261376.0, "14540": 1116261376.0, "14545": 1116261376.0, "14550": 1116261376.0, "14555": 1116261376.0, "14560": 1116261376.0, "14565": 1116261376.0, "14570": 1116261376.0, "14575": 1116261376.0, "14580": 1116261376.0, "14585": 1116261376.0, "14590": 1116261376.0, "14595": 1116261376.0, "14600": 1116261376.0, "14605": 1116261376.0, "14610": 1116261376.0, "14615": 1116261376.0, "14620": 1116261376.0, "14625": 1116261376.0, "14630": 1116261376.0, "14635": 1116261376.0, "14640": 1116261376.0, "14645": 1116261376.0, "14650": 1116261376.0, "14655": 1116261376.0, "14660": 1116261376.0, "14665": 1116261376.0, "14670": 1116261376.0, "14675": 1116261376.0, "14680": 1116261376.0, "14685": 1116261376.0, "14690": 1116261376.0, "14695": 1116261376.0, "14700": 1116261376.0, "14705": 1116261376.0, "14710": 1116261376.0, "14715": 1116261376.0, "14720": 1116261376.0, "14725": 1116261376.0, "14730": 1116261376.0, "14735": 1116261376.0, "14740": 1116261376.0, "14745": 1116261376.0, "14750": 1116261376.0, "14755": 1116261376.0, "14760": 1116261376.0, "14765": 1116261376.0, "14770": 1116261376.0, "14775": 1116261376.0, "14780": 1116261376.0, "14785": 1116261376.0, "14790": 1116261376.0, "14795": 1116261376.0, "14800": 1116261376.0, "14805": 1116261376.0, "14810": 1116261376.0, "14815": 1116261376.0, "14820": 1116261376.0, "14825": 1116261376.0, "14830": 1116261376.0, "14835": 1116261376.0, "14840": 1116261376.0, "14845": 1116261376.0, "14850": 1116261376.0, "14855": 1116261376.0, "14860": 1116261376.0, "14865": 1116261376.0, "14870": 1116261376.0, "14875": 1116261376.0, "14880": 1116261376.0, "14885": 1116261376.0, "14890": 1116261376.0, "14895": 1116261376.0, "14900": 1116261376.0, "14905": 1116261376.0, "14910": 1116261376.0, "14915": 1116261376.0, "14920": 1116261376.0, "14925": 1116261376.0, "14930": 1116261376.0, "14935": 1116261376.0, "14940": 1116261376.0, "14945": 1116261376.0, "14950": 1116261376.0, "14955": 1116261376.0, "14960": 1116261376.0, "14965": 1116261376.0, "14970": 1116261376.0, "14975": 1116261376.0, "14980": 1116261376.0, "14985": 1116261376.0, "14990": 1116261376.0, "14995": 1116261376.0, "15000": 1116261376.0, "15005": 1116261376.0, "15010": 1116261376.0, "15015": 1116261376.0, "15020": 1116261376.0, "15025": 1116261376.0, "15030": 1116261376.0, "15035": 1116261376.0, "15040": 1116261376.0, "15045": 1116261376.0, "15050": 1116261376.0, "15055": 1116261376.0, "15060": 1116261376.0, "15065": 1116261376.0, "15070": 1116261376.0, "15075": 1116261376.0, "15080": 1116261376.0, "15085": 1116261376.0, "15090": 1116261376.0, "15095": 1116261376.0, "15100": 1116261376.0, "15105": 1116261376.0, "15110": 1116261376.0, "15115": 1116261376.0, "15120": 1116261376.0, "15125": 1116261376.0, "15130": 1116261376.0, "15135": 1116261376.0, "15140": 1116261376.0, "15145": 1116261376.0, "15150": 1116261376.0, "15155": 1116261376.0, "15160": 1116261376.0, "15165": 1116261376.0, "15170": 1116261376.0, "15175": 1116261376.0, "15180": 1116261376.0, "15185": 1116261376.0, "15190": 1116261376.0, "15195": 1116261376.0, "15200": 1116261376.0, "15205": 1116261376.0, "15210": 1116261376.0, "15215": 1116261376.0, "15220": 1116261376.0, "15225": 1116261376.0, "15230": 1116261376.0, "15235": 1116261376.0, "15240": 1116261376.0, "15245": 1116261376.0, "15250": 1116261376.0, "15255": 1116261376.0, "15260": 1116261376.0, "15265": 1116261376.0, "15270": 1116261376.0, "15275": 1116261376.0, "15280": 1116261376.0, "15285": 1116261376.0, "15290": 1116261376.0, "15295": 1116261376.0, "15300": 1116261376.0, "15305": 1116261376.0, "15310": 1116261376.0, "15315": 1116261376.0, "15320": 1116261376.0, "15325": 1116261376.0, "15330": 1116261376.0, "15335": 1116261376.0, "15340": 1116261376.0, "15345": 1116261376.0, "15350": 1116261376.0, "15355": 1116261376.0, "15360": 1116261376.0, "15365": 1116261376.0, "15370": 1116261376.0, "15375": 1116261376.0, "15380": 1116261376.0, "15385": 1116261376.0, "15390": 1116261376.0, "15395": 1116261376.0, "15400": 1116261376.0, "15405": 1116261376.0, "15410": 1116261376.0, "15415": 1116261376.0, "15420": 1116261376.0, "15425": 1116261376.0, "15430": 1116261376.0, "15435": 1116261376.0, "15440": 1116261376.0, "15445": 1116261376.0, "15450": 1116261376.0, "15455": 1116261376.0, "15460": 1116261376.0, "15465": 1116261376.0, "15470": 1116261376.0, "15475": 1116261376.0, "15480": 1116261376.0, "15485": 1116261376.0, "15490": 1116261376.0, "15495": 1116261376.0, "15500": 1116261376.0, "15505": 1116261376.0, "15510": 1116261376.0, "15515": 1116261376.0, "15520": 1116261376.0, "15525": 1116261376.0, "15530": 1116261376.0, "15535": 1116261376.0, "15540": 1116261376.0, "15545": 1116261376.0, "15550": 1116261376.0, "15555": 1116261376.0, "15560": 1116261376.0, "15565": 1116261376.0, "15570": 1116261376.0, "15575": 1116261376.0, "15580": 1116261376.0, "15585": 1116261376.0, "15590": 1116261376.0, "15595": 1116261376.0, "15600": 1116261376.0, "15605": 1116261376.0, "15610": 1116261376.0, "15615": 1116261376.0, "15620": 1116261376.0, "15625": 1116261376.0, "15630": 1116261376.0, "15635": 1116261376.0, "15640": 1116261376.0, "15645": 1116261376.0, "15650": 1116261376.0, "15655": 1116261376.0, "15660": 1116261376.0, "15665": 1116261376.0, "15670": 1116261376.0, "15675": 1116261376.0, "15680": 1116261376.0, "15685": 1116261376.0, "15690": 1116261376.0, "15695": 1116261376.0, "15700": 1116261376.0, "15705": 1116261376.0, "15710": 1116261376.0, "15715": 1116261376.0, "15720": 1116261376.0, "15725": 1116261376.0, "15730": 1116261376.0, "15735": 1116261376.0, "15740": 1116261376.0, "15745": 1116261376.0, "15750": 1116261376.0, "15755": 1116261376.0, "15760": 1116261376.0, "15765": 1116261376.0, "15770": 1116261376.0, "15775": 1116261376.0, "15780": 1116261376.0, "15785": 1116261376.0, "15790": 1116261376.0, "15795": 1116261376.0, "15800": 1116261376.0, "15805": 1116261376.0, "15810": 1116261376.0, "15815": 1116261376.0, "15820": 1116261376.0, "15825": 1116261376.0, "15830": 1116261376.0, "15835": 1116261376.0, "15840": 1116261376.0, "15845": 1116261376.0, "15850": 1116261376.0, "15855": 1116261376.0, "15860": 1116261376.0, "15865": 1116261376.0, "15870": 1116261376.0, "15875": 1116261376.0, "15880": 1116261376.0, "15885": 1116261376.0, "15890": 1116261376.0, "15895": 1116261376.0, "15900": 1116261376.0, "15905": 1116261376.0, "15910": 1116261376.0, "15915": 1116261376.0, "15920": 1116261376.0, "15925": 1116261376.0, "15930": 1116261376.0, "15935": 1116261376.0, "15940": 1116261376.0, "15945": 1116261376.0, "15950": 1116261376.0, "15955": 1116261376.0, "15960": 1116261376.0, "15965": 1116261376.0, "15970": 1116261376.0, "15975": 1116261376.0, "15980": 1116261376.0, "15985": 1116261376.0, "15990": 1116261376.0, "15995": 1116261376.0, "16000": 1116261376.0, "16005": 1116261376.0, "16010": 1116261376.0, "16015": 1116261376.0, "16020": 1116261376.0, "16025": 1116261376.0, "16030": 1116261376.0, "16035": 1116261376.0, "16040": 1116261376.0, "16045": 1116261376.0, "16050": 1116261376.0, "16055": 1116261376.0, "16060": 1116261376.0, "16065": 1116261376.0, "16070": 1116261376.0, "16075": 1116261376.0, "16080": 1116261376.0, "16085": 1116261376.0, "16090": 1116261376.0, "16095": 1116261376.0, "16100": 1116261376.0, "16105": 1116261376.0, "16110": 1116261376.0, "16115": 1116261376.0, "16120": 1116261376.0, "16125": 1116261376.0, "16130": 1116261376.0, "16135": 1116261376.0, "16140": 1116261376.0, "16145": 1116261376.0, "16150": 1116261376.0, "16155": 1116261376.0, "16160": 1116261376.0, "16165": 1116261376.0, "16170": 1116261376.0, "16175": 1116261376.0, "16180": 1116261376.0, "16185": 1116261376.0, "16190": 1116261376.0, "16195": 1116261376.0, "16200": 1116261376.0, "16205": 1116261376.0, "16210": 1116261376.0, "16215": 1116261376.0, "16220": 1116261376.0, "16225": 1116261376.0, "16230": 1116261376.0, "16235": 1116261376.0, "16240": 1116261376.0, "16245": 1116261376.0, "16250": 1116261376.0, "16255": 1116261376.0, "16260": 1116261376.0, "16265": 1116261376.0, "16270": 1116261376.0, "16275": 1116261376.0, "16280": 1116261376.0, "16285": 1116261376.0, "16290": 1116261376.0, "16295": 1116261376.0, "16300": 1116261376.0, "16305": 1116261376.0, "16310": 1116261376.0, "16315": 1116261376.0, "16320": 1116261376.0, "16325": 1116261376.0, "16330": 1116261376.0, "16335": 1116261376.0, "16340": 1116261376.0, "16345": 1116261376.0, "16350": 1116261376.0, "16355": 1116261376.0, "16360": 1116261376.0, "16365": 1116261376.0, "16370": 1116261376.0, "16375": 1116261376.0, "16380": 1116261376.0, "16385": 1116261376.0, "16390": 1116261376.0, "16395": 1116261376.0, "16400": 1116261376.0, "16405": 1116261376.0, "16410": 1116261376.0, "16415": 1116261376.0, "16420": 1116261376.0, "16425": 1116261376.0, "16430": 1116261376.0, "16435": 1116261376.0, "16440": 1116261376.0, "16445": 1116261376.0, "16450": 1116261376.0, "16455": 1116261376.0, "16460": 1116261376.0, "16465": 1116261376.0, "16470": 1116261376.0, "16475": 1116261376.0, "16480": 1116261376.0, "16485": 1116261376.0, "16490": 1116261376.0, "16495": 1116261376.0, "16500": 1116261376.0, "16505": 1116261376.0, "16510": 1116261376.0, "16515": 1116261376.0, "16520": 1116261376.0, "16525": 1116261376.0, "16530": 1116261376.0, "16535": 1116261376.0, "16540": 1116261376.0, "16545": 1116261376.0, "16550": 1116261376.0, "16555": 1116261376.0, "16560": 1116261376.0, "16565": 1116261376.0, "16570": 1116261376.0, "16575": 1116261376.0, "16580": 1116261376.0, "16585": 1116261376.0, "16590": 1116261376.0, "16595": 1116261376.0, "16600": 1116261376.0, "16605": 1116261376.0, "16610": 1116261376.0, "16615": 1116261376.0, "16620": 1116261376.0, "16625": 1116261376.0, "16630": 1116261376.0, "16635": 1116261376.0, "16640": 1116261376.0, "16645": 1116261376.0, "16650": 1116261376.0, "16655": 1116261376.0, "16660": 1116261376.0, "16665": 1116261376.0, "16670": 1116261376.0, "16675": 1116261376.0, "16680": 1116261376.0, "16685": 1116261376.0, "16690": 1116261376.0, "16695": 1116261376.0, "16700": 1116261376.0, "16705": 1116261376.0, "16710": 1116261376.0, "16715": 1116261376.0, "16720": 1116261376.0, "16725": 1116261376.0, "16730": 1116261376.0, "16735": 1116261376.0, "16740": 1116261376.0, "16745": 1116261376.0, "16750": 1116261376.0, "16755": 1116261376.0, "16760": 1116261376.0, "16765": 1116261376.0, "16770": 1116261376.0, "16775": 1116261376.0, "16780": 1116261376.0, "16785": 1116261376.0, "16790": 1116261376.0, "16795": 1116261376.0, "16800": 1116261376.0, "16805": 1116261376.0, "16810": 1116261376.0, "16815": 1116261376.0, "16820": 1116261376.0, "16825": 1116261376.0, "16830": 1116261376.0, "16835": 1116261376.0, "16840": 1116261376.0, "16845": 1116261376.0, "16850": 1116261376.0, "16855": 1116261376.0, "16860": 1116261376.0, "16865": 1116261376.0, "16870": 1116261376.0, "16875": 1116261376.0, "16880": 1116261376.0, "16885": 1116261376.0, "16890": 1116261376.0, "16895": 1116261376.0, "16900": 1116261376.0, "16905": 1116261376.0, "16910": 1116261376.0, "16915": 1116261376.0, "16920": 1116261376.0, "16925": 1116261376.0, "16930": 1116261376.0, "16935": 1116261376.0, "16940": 1116261376.0, "16945": 1116261376.0, "16950": 1116261376.0, "16955": 1116261376.0, "16960": 1116261376.0, "16965": 1116261376.0, "16970": 1116261376.0, "16975": 1116261376.0, "16980": 1116261376.0, "16985": 1116261376.0, "16990": 1116261376.0, "16995": 1116261376.0, "17000": 1116261376.0, "17005": 1116261376.0, "17010": 1116261376.0, "17015": 1116261376.0, "17020": 1116261376.0, "17025": 1116261376.0, "17030": 1116261376.0, "17035": 1116261376.0, "17040": 1116261376.0, "17045": 1116261376.0, "17050": 1116261376.0, "17055": 1116261376.0, "17060": 1116261376.0, "17065": 1116261376.0, "17070": 1116261376.0, "17075": 1116261376.0, "17080": 1116261376.0, "17085": 1116261376.0, "17090": 1116261376.0, "17095": 1116261376.0, "17100": 1116261376.0, "17105": 1116261376.0, "17110": 1116261376.0, "17115": 1116261376.0, "17120": 1116261376.0, "17125": 1116261376.0, "17130": 1116261376.0, "17135": 1116261376.0, "17140": 1116261376.0, "17145": 1116261376.0, "17150": 1116261376.0, "17155": 1116261376.0, "17160": 1116261376.0, "17165": 1116261376.0, "17170": 1116261376.0, "17175": 1116261376.0, "17180": 1116261376.0, "17185": 1116261376.0, "17190": 1116261376.0, "17195": 1116261376.0, "17200": 1116261376.0, "17205": 1116261376.0, "17210": 1116261376.0, "17215": 1116261376.0, "17220": 1116261376.0, "17225": 1116261376.0, "17230": 1116261376.0, "17235": 1116261376.0, "17240": 1116261376.0, "17245": 1116261376.0, "17250": 1116261376.0, "17255": 1116261376.0, "17260": 1116261376.0, "17265": 1116261376.0, "17270": 1116261376.0, "17275": 1116261376.0, "17280": 1116261376.0, "17285": 1116261376.0, "17290": 1116261376.0, "17295": 1116261376.0, "17300": 1116261376.0, "17305": 1116261376.0, "17310": 1116261376.0, "17315": 1116261376.0, "17320": 1116261376.0, "17325": 1116261376.0, "17330": 1116261376.0, "17335": 1116261376.0, "17340": 1116261376.0, "17345": 1116261376.0, "17350": 1116261376.0, "17355": 1116261376.0, "17360": 1116261376.0, "17365": 1116261376.0, "17370": 1116261376.0, "17375": 1116261376.0, "17380": 1116261376.0, "17385": 1116261376.0, "17390": 1116261376.0, "17395": 1116261376.0, "17400": 1116261376.0, "17405": 1116261376.0, "17410": 1116261376.0, "17415": 1116261376.0, "17420": 1116261376.0, "17425": 1116261376.0, "17430": 1116261376.0, "17435": 1116261376.0, "17440": 1116261376.0, "17445": 1116261376.0, "17450": 1116261376.0, "17455": 1116261376.0, "17460": 1116261376.0, "17465": 1116261376.0, "17470": 1116261376.0, "17475": 1116261376.0, "17480": 1116261376.0, "17485": 1116261376.0, "17490": 1116261376.0, "17495": 1116261376.0, "17500": 1116261376.0, "17505": 1116261376.0, "17510": 1116261376.0, "17515": 1116261376.0, "17520": 1116261376.0, "17525": 1116261376.0, "17530": 1116261376.0, "17535": 1116261376.0, "17540": 1116261376.0, "17545": 1116261376.0, "17550": 1116261376.0, "17555": 1116261376.0, "17560": 1116261376.0, "17565": 1116261376.0, "17570": 1116261376.0, "17575": 1116261376.0, "17580": 1116261376.0, "17585": 1116261376.0, "17590": 1116261376.0, "17595": 1116261376.0, "17600": 1116261376.0, "17605": 1116261376.0, "17610": 1116261376.0, "17615": 1116261376.0, "17620": 1116261376.0, "17625": 1116261376.0, "17630": 1116261376.0, "17635": 1116261376.0, "17640": 1116261376.0, "17645": 1116261376.0, "17650": 1116261376.0, "17655": 1116261376.0, "17660": 1116261376.0, "17665": 1116261376.0, "17670": 1116261376.0, "17675": 1116261376.0, "17680": 1116261376.0, "17685": 1116261376.0, "17690": 1116261376.0, "17695": 1116261376.0, "17700": 1116261376.0, "17705": 1116261376.0, "17710": 1116261376.0, "17715": 1116261376.0, "17720": 1116261376.0, "17725": 1116261376.0, "17730": 1116261376.0, "17735": 1116261376.0, "17740": 1116261376.0, "17745": 1116261376.0, "17750": 1116261376.0, "17755": 1116261376.0, "17760": 1116261376.0, "17765": 1116261376.0, "17770": 1116261376.0, "17775": 1116261376.0, "17780": 1116261376.0, "17785": 1116261376.0, "17790": 1116261376.0, "17795": 1116261376.0, "17800": 1116261376.0, "17805": 1116261376.0, "17810": 1116261376.0, "17815": 1116261376.0, "17820": 1116261376.0, "17825": 1116261376.0, "17830": 1116261376.0, "17835": 1116261376.0, "17840": 1116261376.0, "17845": 1116261376.0, "17850": 1116261376.0, "17855": 1116261376.0, "17860": 1116261376.0, "17865": 1116261376.0, "17870": 1116261376.0, "17875": 1116261376.0, "17880": 1116261376.0, "17885": 1116261376.0, "17890": 1116261376.0, "17895": 1116261376.0, "17900": 1116261376.0, "17905": 1116261376.0, "17910": 1116261376.0, "17915": 1116261376.0, "17920": 1116261376.0, "17925": 1116261376.0, "17930": 1116261376.0, "17935": 1116261376.0, "17940": 1116261376.0, "17945": 1116261376.0, "17950": 1116261376.0, "17955": 1116261376.0, "17960": 1116261376.0, "17965": 1116261376.0, "17970": 1116261376.0, "17975": 1116261376.0, "17980": 1116261376.0, "17985": 1116261376.0, "17990": 1116261376.0, "17995": 1116261376.0, "18000": 1116261376.0, "18005": 1116261376.0, "18010": 1116261376.0, "18015": 1116261376.0, "18020": 1116261376.0, "18025": 1116261376.0, "18030": 1116261376.0, "18035": 1116261376.0, "18040": 1116261376.0, "18045": 1116261376.0, "18050": 1116261376.0, "18055": 1116261376.0, "18060": 1116261376.0, "18065": 1116261376.0, "18070": 1116261376.0, "18075": 1116261376.0, "18080": 1116261376.0, "18085": 1116261376.0, "18090": 1116261376.0, "18095": 1116261376.0, "18100": 1116261376.0, "18105": 1116261376.0, "18110": 1116261376.0, "18115": 1116261376.0, "18120": 1116261376.0, "18125": 1116261376.0, "18130": 1116261376.0, "18135": 1116261376.0, "18140": 1116261376.0, "18145": 1116261376.0, "18150": 1116261376.0, "18155": 1116261376.0, "18160": 1116261376.0, "18165": 1116261376.0, "18170": 1116261376.0, "18175": 1116261376.0, "18180": 1116261376.0, "18185": 1116261376.0, "18190": 1116261376.0, "18195": 1116261376.0, "18200": 1116261376.0, "18205": 1116261376.0, "18210": 1116261376.0, "18215": 1116261376.0, "18220": 1116261376.0, "18225": 1116261376.0, "18230": 1116261376.0, "18235": 1116261376.0, "18240": 1116261376.0, "18245": 1116261376.0, "18250": 1116261376.0, "18255": 1116261376.0, "18260": 1116261376.0, "18265": 1116261376.0, "18270": 1116261376.0, "18275": 1116261376.0, "18280": 1116261376.0, "18285": 1116261376.0, "18290": 1116261376.0, "18295": 1116261376.0, "18300": 1116261376.0, "18305": 1116261376.0, "18310": 1116261376.0, "18315": 1116261376.0, "18320": 1116261376.0, "18325": 1116261376.0, "18330": 1116261376.0, "18335": 1116261376.0, "18340": 1116261376.0, "18345": 1116261376.0, "18350": 1116261376.0, "18355": 1116261376.0, "18360": 1116261376.0, "18365": 1116261376.0, "18370": 1116261376.0, "18375": 1116261376.0, "18380": 1116261376.0, "18385": 1116261376.0, "18390": 1116261376.0, "18395": 1116261376.0, "18400": 1116261376.0, "18405": 1116261376.0, "18410": 1116261376.0, "18415": 1116261376.0, "18420": 1116261376.0, "18425": 1116261376.0, "18430": 1116261376.0, "18435": 1116261376.0, "18440": 1116261376.0, "18445": 1116261376.0, "18450": 1116261376.0, "18455": 1116261376.0, "18460": 1116261376.0, "18465": 1116261376.0, "18470": 1116261376.0, "18475": 1116261376.0, "18480": 1116261376.0, "18485": 1116261376.0, "18490": 1116261376.0, "18495": 1116261376.0, "18500": 1116261376.0, "18505": 1116261376.0, "18510": 1116261376.0, "18515": 1116261376.0, "18520": 1116261376.0, "18525": 1116261376.0, "18530": 1116261376.0, "18535": 1116261376.0, "18540": 1116261376.0, "18545": 1116261376.0, "18550": 1116261376.0, "18555": 1116261376.0, "18560": 1116261376.0, "18565": 1116261376.0, "18570": 1116261376.0, "18575": 1116261376.0, "18580": 1116261376.0, "18585": 1116261376.0, "18590": 1116261376.0, "18595": 1116261376.0, "18600": 1116261376.0, "18605": 1116261376.0, "18610": 1116261376.0, "18615": 1116261376.0, "18620": 1116261376.0, "18625": 1116261376.0, "18630": 1116261376.0, "18635": 1116261376.0, "18640": 1116261376.0, "18645": 1116261376.0, "18650": 1116261376.0, "18655": 1116261376.0, "18660": 1116261376.0, "18665": 1116261376.0, "18670": 1116261376.0, "18675": 1116261376.0, "18680": 1116261376.0, "18685": 1116261376.0, "18690": 1116261376.0, "18695": 1116261376.0, "18700": 1116261376.0, "18705": 1116261376.0, "18710": 1116261376.0, "18715": 1116261376.0, "18720": 1116261376.0, "18725": 1116261376.0, "18730": 1116261376.0, "18735": 1116261376.0, "18740": 1116261376.0, "18745": 1116261376.0, "18750": 1116261376.0, "18755": 1116261376.0, "18760": 1116261376.0, "18765": 1116261376.0, "18770": 1116261376.0, "18775": 1116261376.0, "18780": 1116261376.0, "18785": 1116261376.0, "18790": 1116261376.0, "18795": 1116261376.0, "18800": 1116261376.0, "18805": 1116261376.0, "18810": 1116261376.0, "18815": 1116261376.0, "18820": 1116261376.0, "18825": 1116261376.0, "18830": 1116261376.0, "18835": 1116261376.0, "18840": 1116261376.0, "18845": 1116261376.0, "18850": 1116261376.0, "18855": 1116261376.0, "18860": 1116261376.0, "18865": 1116261376.0, "18870": 1116261376.0, "18875": 1116261376.0, "18880": 1116261376.0, "18885": 1116261376.0, "18890": 1116261376.0, "18895": 1116261376.0, "18900": 1116261376.0, "18905": 1116261376.0, "18910": 1116261376.0, "18915": 1116261376.0, "18920": 1116261376.0, "18925": 1116261376.0, "18930": 1116261376.0, "18935": 1116261376.0, "18940": 1116261376.0, "18945": 1116261376.0, "18950": 1116261376.0, "18955": 1116261376.0, "18960": 1116261376.0, "18965": 1116261376.0, "18970": 1116261376.0, "18975": 1116261376.0, "18980": 1116261376.0, "18985": 1116261376.0, "18990": 1116261376.0, "18995": 1116261376.0, "19000": 1116261376.0, "19005": 1116261376.0, "19010": 1116261376.0, "19015": 1116261376.0, "19020": 1116261376.0, "19025": 1116261376.0, "19030": 1116261376.0, "19035": 1116261376.0, "19040": 1116261376.0, "19045": 1116261376.0, "19050": 1116261376.0, "19055": 1116261376.0, "19060": 1116261376.0, "19065": 1116261376.0, "19070": 1116261376.0, "19075": 1116261376.0, "19080": 1116261376.0, "19085": 1116261376.0, "19090": 1116261376.0, "19095": 1116261376.0, "19100": 1116261376.0, "19105": 1116261376.0, "19110": 1116261376.0, "19115": 1116261376.0, "19120": 1116261376.0, "19125": 1116261376.0, "19130": 1116261376.0, "19135": 1116261376.0, "19140": 1116261376.0, "19145": 1116261376.0, "19150": 1116261376.0, "19155": 1116261376.0, "19160": 1116261376.0, "19165": 1116261376.0, "19170": 1116261376.0, "19175": 1116261376.0, "19180": 1116261376.0, "19185": 1116261376.0, "19190": 1116261376.0, "19195": 1116261376.0, "19200": 1116261376.0, "19205": 1116261376.0, "19210": 1116261376.0, "19215": 1116261376.0, "19220": 1116261376.0, "19225": 1116261376.0, "19230": 1116261376.0, "19235": 1116261376.0, "19240": 1116261376.0, "19245": 1116261376.0, "19250": 1116261376.0, "19255": 1116261376.0, "19260": 1116261376.0, "19265": 1116261376.0, "19270": 1116261376.0, "19275": 1116261376.0, "19280": 1116261376.0, "19285": 1116261376.0, "19290": 1116261376.0, "19295": 1116261376.0, "19300": 1116261376.0, "19305": 1116261376.0, "19310": 1116261376.0, "19315": 1116261376.0, "19320": 1116261376.0, "19325": 1116261376.0, "19330": 1116261376.0, "19335": 1116261376.0, "19340": 1116261376.0, "19345": 1116261376.0, "19350": 1116261376.0, "19355": 1116261376.0, "19360": 1116261376.0, "19365": 1116261376.0, "19370": 1116261376.0, "19375": 1116261376.0, "19380": 1116261376.0, "19385": 1116261376.0, "19390": 1116261376.0, "19395": 1116261376.0, "19400": 1116261376.0, "19405": 1116261376.0, "19410": 1116261376.0, "19415": 1116261376.0, "19420": 1116261376.0, "19425": 1116261376.0, "19430": 1116261376.0, "19435": 1116261376.0, "19440": 1116261376.0, "19445": 1116261376.0, "19450": 1116261376.0, "19455": 1116261376.0, "19460": 1116261376.0, "19465": 1116261376.0, "19470": 1116261376.0, "19475": 1116261376.0, "19480": 1116261376.0, "19485": 1116261376.0, "19490": 1116261376.0, "19495": 1116261376.0, "19500": 1116261376.0, "19505": 1116261376.0, "19510": 1116261376.0, "19515": 1116261376.0, "19520": 1116261376.0, "19525": 1116261376.0, "19530": 1116261376.0, "19535": 1116261376.0, "19540": 1116261376.0, "19545": 1116261376.0, "19550": 1116261376.0, "19555": 1116261376.0, "19560": 1116261376.0, "19565": 1116261376.0, "19570": 1116261376.0, "19575": 1116261376.0, "19580": 1116261376.0, "19585": 1116261376.0, "19590": 1116261376.0, "19595": 1116261376.0, "19600": 1116261376.0, "19605": 1116261376.0, "19610": 1116261376.0, "19615": 1116261376.0, "19620": 1116261376.0, "19625": 1116261376.0, "19630": 1116261376.0, "19635": 1116261376.0, "19640": 1116261376.0, "19645": 1116261376.0, "19650": 1116261376.0, "19655": 1116261376.0, "19660": 1116261376.0, "19665": 1116261376.0, "19670": 1116261376.0, "19675": 1116261376.0, "19680": 1116261376.0, "19685": 1116261376.0, "19690": 1116261376.0, "19695": 1116261376.0, "19700": 1116261376.0, "19705": 1116261376.0, "19710": 1116261376.0, "19715": 1116261376.0, "19720": 1116261376.0, "19725": 1116261376.0, "19730": 1116261376.0, "19735": 1116261376.0, "19740": 1116261376.0, "19745": 1116261376.0, "19750": 1116261376.0, "19755": 1116261376.0, "19760": 1116261376.0, "19765": 1116261376.0, "19770": 1116261376.0, "19775": 1116261376.0, "19780": 1116261376.0, "19785": 1116261376.0, "19790": 1116261376.0, "19795": 1116261376.0, "19800": 1116261376.0, "19805": 1116261376.0, "19810": 1116261376.0, "19815": 1116261376.0, "19820": 1116261376.0, "19825": 1116261376.0, "19830": 1116261376.0, "19835": 1116261376.0, "19840": 1116261376.0, "19845": 1116261376.0, "19850": 1116261376.0, "19855": 1116261376.0, "19860": 1116261376.0, "19865": 1116261376.0, "19870": 1116261376.0, "19875": 1116261376.0, "19880": 1116261376.0, "19885": 1116261376.0, "19890": 1116261376.0, "19895": 1116261376.0, "19900": 1116261376.0, "19905": 1116261376.0, "19910": 1116261376.0, "19915": 1116261376.0, "19920": 1116261376.0, "19925": 1116261376.0, "19930": 1116261376.0, "19935": 1116261376.0, "19940": 1116261376.0, "19945": 1116261376.0, "19950": 1116261376.0, "19955": 1116261376.0, "19960": 1116261376.0, "19965": 1116261376.0, "19970": 1116261376.0, "19975": 1116261376.0, "19980": 1116261376.0, "19985": 1116261376.0, "19990": 1116261376.0, "19995": 1116261376.0, "20000": 1116261376.0, "20005": 1116261376.0, "20010": 1116261376.0, "20015": 1116261376.0, "20020": 1116261376.0, "20025": 1116261376.0, "20030": 1116261376.0, "20035": 1116261376.0, "20040": 1116261376.0, "20045": 1116261376.0, "20050": 1116261376.0, "20055": 1116261376.0, "20060": 1116261376.0, "20065": 1116261376.0, "20070": 1116261376.0, "20075": 1116261376.0, "20080": 1116261376.0, "20085": 1116261376.0, "20090": 1116261376.0, "20095": 1116261376.0, "20100": 1116261376.0, "20105": 1116261376.0, "20110": 1116261376.0, "20115": 1116261376.0, "20120": 1116261376.0, "20125": 1116261376.0, "20130": 1116261376.0, "20135": 1116261376.0, "20140": 1116261376.0, "20145": 1116261376.0, "20150": 1116261376.0, "20155": 1116261376.0, "20160": 1116261376.0, "20165": 1116261376.0, "20170": 1116261376.0, "20175": 1116261376.0, "20180": 1116261376.0, "20185": 1116261376.0, "20190": 1116261376.0, "20195": 1116261376.0, "20200": 1116261376.0, "20205": 1116261376.0, "20210": 1116261376.0, "20215": 1116261376.0, "20220": 1116261376.0, "20225": 1116261376.0, "20230": 1116261376.0, "20235": 1116261376.0, "20240": 1116261376.0, "20245": 1116261376.0, "20250": 1116261376.0, "20255": 1116261376.0, "20260": 1116261376.0, "20265": 1116261376.0, "20270": 1116261376.0, "20275": 1116261376.0, "20280": 1116261376.0, "20285": 1116261376.0, "20290": 1116261376.0, "20295": 1116261376.0, "20300": 1116261376.0, "20305": 1116261376.0, "20310": 1116261376.0, "20315": 1116261376.0, "20320": 1116261376.0, "20325": 1116261376.0, "20330": 1116261376.0, "20335": 1116261376.0, "20340": 1116261376.0, "20345": 1116261376.0, "20350": 1116261376.0, "20355": 1116261376.0, "20360": 1116261376.0, "20365": 1116261376.0, "20370": 1116261376.0, "20375": 1116261376.0, "20380": 1116261376.0, "20385": 1116261376.0, "20390": 1116261376.0, "20395": 1116261376.0, "20400": 1116261376.0, "20405": 1116261376.0, "20410": 1116261376.0, "20415": 1116261376.0, "20420": 1116261376.0, "20425": 1116261376.0, "20430": 1116261376.0, "20435": 1116261376.0, "20440": 1116261376.0, "20445": 1116261376.0, "20450": 1116261376.0, "20455": 1116261376.0, "20460": 1116261376.0, "20465": 1116261376.0, "20470": 1116261376.0, "20475": 1116261376.0, "20480": 1116261376.0, "20485": 1116261376.0, "20490": 1116261376.0, "20495": 1116261376.0, "20500": 1116261376.0, "20505": 1116261376.0, "20510": 1116261376.0, "20515": 1116261376.0, "20520": 1116261376.0, "20525": 1116261376.0, "20530": 1116261376.0, "20535": 1116261376.0, "20540": 1116261376.0, "20545": 1116261376.0, "20550": 1116261376.0, "20555": 1116261376.0, "20560": 1116261376.0, "20565": 1116261376.0, "20570": 1116261376.0, "20575": 1116261376.0, "20580": 1116261376.0, "20585": 1116261376.0, "20590": 1116261376.0, "20595": 1116261376.0, "20600": 1116261376.0, "20605": 1116261376.0, "20610": 1116261376.0, "20615": 1116261376.0, "20620": 1116261376.0, "20625": 1116261376.0, "20630": 1116261376.0, "20635": 1116261376.0, "20640": 1116261376.0, "20645": 1116261376.0, "20650": 1116261376.0, "20655": 1116261376.0, "20660": 1116261376.0, "20665": 1116261376.0, "20670": 1116261376.0, "20675": 1116261376.0, "20680": 1116261376.0, "20685": 1116261376.0, "20690": 1116261376.0, "20695": 1116261376.0, "20700": 1116261376.0, "20705": 1116261376.0, "20710": 1116261376.0, "20715": 1116261376.0, "20720": 1116261376.0, "20725": 1116261376.0, "20730": 1116261376.0, "20735": 1116261376.0, "20740": 1116261376.0, "20745": 1116261376.0, "20750": 1116261376.0, "20755": 1116261376.0, "20760": 1116261376.0, "20765": 1116261376.0, "20770": 1116261376.0, "20775": 1116261376.0, "20780": 1116261376.0, "20785": 1116261376.0, "20790": 1116261376.0, "20795": 1116261376.0, "20800": 1116261376.0, "20805": 1116261376.0, "20810": 1116261376.0, "20815": 1116261376.0, "20820": 1116261376.0, "20825": 1116261376.0, "20830": 1116261376.0, "20835": 1116261376.0, "20840": 1116261376.0, "20845": 1116261376.0, "20850": 1116261376.0, "20855": 1116261376.0, "20860": 1116261376.0, "20865": 1116261376.0, "20870": 1116261376.0, "20875": 1116261376.0, "20880": 1116261376.0, "20885": 1116261376.0, "20890": 1116261376.0, "20895": 1116261376.0, "20900": 1116261376.0, "20905": 1116261376.0, "20910": 1116261376.0, "20915": 1116261376.0, "20920": 1116261376.0, "20925": 1116261376.0, "20930": 1116261376.0, "20935": 1116261376.0, "20940": 1116261376.0, "20945": 1116261376.0, "20950": 1116261376.0, "20955": 1116261376.0, "20960": 1116261376.0, "20965": 1116261376.0, "20970": 1116261376.0, "20975": 1116261376.0, "20980": 1116261376.0, "20985": 1116261376.0, "20990": 1116261376.0, "20995": 1116261376.0, "21000": 1116261376.0, "21005": 1116261376.0, "21010": 1116261376.0, "21015": 1116261376.0, "21020": 1116261376.0, "21025": 1116261376.0, "21030": 1116261376.0, "21035": 1116261376.0, "21040": 1116261376.0, "21045": 1116261376.0, "21050": 1116261376.0, "21055": 1116261376.0, "21060": 1116261376.0, "21065": 1116261376.0, "21070": 1116261376.0, "21075": 1116261376.0, "21080": 1116261376.0, "21085": 1116261376.0, "21090": 1116261376.0, "21095": 1116261376.0, "21100": 1116261376.0, "21105": 1116261376.0, "21110": 1116261376.0, "21115": 1116261376.0, "21120": 1116261376.0, "21125": 1116261376.0, "21130": 1116261376.0, "21135": 1116261376.0, "21140": 1116261376.0, "21145": 1116261376.0, "21150": 1116261376.0, "21155": 1116261376.0, "21160": 1116261376.0, "21165": 1116261376.0, "21170": 1116261376.0, "21175": 1116261376.0, "21180": 1116261376.0, "21185": 1116261376.0, "21190": 1116261376.0, "21195": 1116261376.0, "21200": 1116261376.0, "21205": 1116261376.0, "21210": 1116261376.0, "21215": 1116261376.0, "21220": 1116261376.0, "21225": 1116261376.0, "21230": 1116261376.0, "21235": 1116261376.0, "21240": 1116261376.0, "21245": 1116261376.0, "21250": 1116261376.0, "21255": 1116261376.0, "21260": 1116261376.0, "21265": 1116261376.0, "21270": 1116261376.0, "21275": 1116261376.0, "21280": 1116261376.0, "21285": 1116261376.0, "21290": 1116261376.0, "21295": 1116261376.0, "21300": 1116261376.0, "21305": 1116261376.0, "21310": 1116261376.0, "21315": 1116261376.0, "21320": 1116261376.0, "21325": 1116261376.0, "21330": 1116261376.0, "21335": 1116261376.0, "21340": 1116261376.0, "21345": 1116261376.0, "21350": 1116261376.0, "21355": 1116261376.0, "21360": 1116261376.0, "21365": 1116261376.0, "21370": 1116261376.0, "21375": 1116261376.0, "21380": 1116261376.0, "21385": 1116261376.0, "21390": 1116261376.0, "21395": 1116261376.0, "21400": 1116261376.0, "21405": 1116261376.0, "21410": 1116261376.0, "21415": 1116261376.0, "21420": 1116261376.0, "21425": 1116261376.0, "21430": 1116261376.0, "21435": 1116261376.0, "21440": 1116261376.0, "21445": 1116261376.0, "21450": 1116261376.0, "21455": 1116261376.0, "21460": 1116261376.0, "21465": 1116261376.0, "21470": 1116261376.0, "21475": 1116261376.0, "21480": 1116261376.0, "21485": 1116261376.0, "21490": 1116261376.0, "21495": 1116261376.0, "21500": 1116261376.0, "21505": 1116261376.0, "21510": 1116261376.0, "21515": 1116261376.0, "21520": 1116261376.0, "21525": 1116261376.0, "21530": 1116261376.0, "21535": 1116261376.0, "21540": 1116261376.0, "21545": 1116261376.0, "21550": 1116261376.0, "21555": 1116261376.0, "21560": 1116261376.0, "21565": 1116261376.0, "21570": 1116261376.0, "21575": 1116261376.0, "21580": 1116261376.0, "21585": 1116261376.0, "21590": 1116261376.0, "21595": 1116261376.0, "21600": 1116261376.0, "21605": 1116261376.0, "21610": 1116261376.0, "21615": 1116261376.0, "21620": 1116261376.0, "21625": 1116261376.0, "21630": 1116261376.0, "21635": 1116261376.0, "21640": 1116261376.0, "21645": 1116261376.0, "21650": 1116261376.0, "21655": 1116261376.0, "21660": 1116261376.0, "21665": 1116261376.0, "21670": 1116261376.0, "21675": 1116261376.0, "21680": 1116261376.0, "21685": 1116261376.0, "21690": 1116261376.0, "21695": 1116261376.0, "21700": 1116261376.0, "21705": 1116261376.0, "21710": 1116261376.0, "21715": 1116261376.0, "21720": 1116261376.0, "21725": 1116261376.0, "21730": 1116261376.0, "21735": 1116261376.0, "21740": 1116261376.0, "21745": 1116261376.0, "21750": 1116261376.0, "21755": 1116261376.0, "21760": 1116261376.0, "21765": 1116261376.0, "21770": 1116261376.0, "21775": 1116261376.0, "21780": 1116261376.0, "21785": 1116261376.0, "21790": 1116261376.0, "21795": 1116261376.0, "21800": 1116261376.0, "21805": 1116261376.0, "21810": 1116261376.0, "21815": 1116261376.0, "21820": 1116261376.0, "21825": 1116261376.0, "21830": 1116261376.0, "21835": 1116261376.0, "21840": 1116261376.0, "21845": 1116261376.0, "21850": 1116261376.0, "21855": 1116261376.0, "21860": 1116261376.0, "21865": 1116261376.0, "21870": 1116261376.0, "21875": 1116261376.0, "21880": 1116261376.0, "21885": 1116261376.0, "21890": 1116261376.0, "21895": 1116261376.0, "21900": 1116261376.0, "21905": 1116261376.0, "21910": 1116261376.0, "21915": 1116261376.0, "21920": 1116261376.0, "21925": 1116261376.0, "21930": 1116261376.0, "21935": 1116261376.0, "21940": 1116261376.0, "21945": 1116261376.0, "21950": 1116261376.0, "21955": 1116261376.0, "21960": 1116261376.0, "21965": 1116261376.0, "21970": 1116261376.0, "21975": 1116261376.0, "21980": 1116261376.0, "21985": 1116261376.0, "21990": 1116261376.0, "21995": 1116261376.0, "22000": 1116261376.0, "22005": 1116261376.0, "22010": 1116261376.0, "22015": 1116261376.0, "22020": 1116261376.0, "22025": 1116261376.0, "22030": 1116261376.0, "22035": 1116261376.0, "22040": 1116261376.0, "22045": 1116261376.0, "22050": 1116261376.0, "22055": 1116261376.0, "22060": 1116261376.0, "22065": 1116261376.0, "22070": 1116261376.0, "22075": 1116261376.0, "22080": 1116261376.0, "22085": 1116261376.0, "22090": 1116261376.0, "22095": 1116261376.0, "22100": 1116261376.0, "22105": 1116261376.0, "22110": 1116261376.0, "22115": 1116261376.0, "22120": 1116261376.0, "22125": 1116261376.0, "22130": 1116261376.0, "22135": 1116261376.0, "22140": 1116261376.0, "22145": 1116261376.0, "22150": 1116261376.0, "22155": 1116261376.0, "22160": 1116261376.0, "22165": 1116261376.0, "22170": 1116261376.0, "22175": 1116261376.0, "22180": 1116261376.0, "22185": 1116261376.0, "22190": 1116261376.0, "22195": 1116261376.0, "22200": 1116261376.0, "22205": 1116261376.0, "22210": 1116261376.0, "22215": 1116261376.0, "22220": 1116261376.0, "22225": 1116261376.0, "22230": 1116261376.0, "22235": 1116261376.0, "22240": 1116261376.0, "22245": 1116261376.0, "22250": 1116261376.0, "22255": 1116261376.0, "22260": 1116261376.0, "22265": 1116261376.0, "22270": 1116261376.0, "22275": 1116261376.0, "22280": 1116261376.0, "22285": 1116261376.0, "22290": 1116261376.0, "22295": 1116261376.0, "22300": 1116261376.0, "22305": 1116261376.0, "22310": 1116261376.0, "22315": 1116261376.0, "22320": 1116261376.0, "22325": 1116261376.0, "22330": 1116261376.0, "22335": 1116261376.0, "22340": 1116261376.0, "22345": 1116261376.0, "22350": 1116261376.0, "22355": 1116261376.0, "22360": 1116261376.0, "22365": 1116261376.0, "22370": 1116261376.0, "22375": 1116261376.0, "22380": 1116261376.0, "22385": 1116261376.0, "22390": 1116261376.0, "22395": 1116261376.0, "22400": 1116261376.0, "22405": 1116261376.0, "22410": 1116261376.0, "22415": 1116261376.0, "22420": 1116261376.0, "22425": 1116261376.0, "22430": 1116261376.0, "22435": 1116261376.0, "22440": 1116261376.0, "22445": 1116261376.0, "22450": 1116261376.0, "22455": 1116261376.0, "22460": 1116261376.0, "22465": 1116261376.0, "22470": 1116261376.0, "22475": 1116261376.0, "22480": 1116261376.0, "22485": 1116261376.0, "22490": 1116261376.0, "22495": 1116261376.0, "22500": 1116261376.0, "22505": 1116261376.0, "22510": 1116261376.0, "22515": 1116261376.0, "22520": 1116261376.0, "22525": 1116261376.0, "22530": 1116261376.0, "22535": 1116261376.0, "22540": 1116261376.0, "22545": 1116261376.0, "22550": 1116261376.0, "22555": 1116261376.0, "22560": 1116261376.0, "22565": 1116261376.0, "22570": 1116261376.0, "22575": 1116261376.0, "22580": 1116261376.0, "22585": 1116261376.0, "22590": 1116261376.0, "22595": 1116261376.0, "22600": 1116261376.0, "22605": 1116261376.0, "22610": 1116261376.0, "22615": 1116261376.0, "22620": 1116261376.0, "22625": 1116261376.0, "22630": 1116261376.0, "22635": 1116261376.0, "22640": 1116261376.0, "22645": 1116261376.0, "22650": 1116261376.0, "22655": 1116261376.0, "22660": 1116261376.0, "22665": 1116261376.0, "22670": 1116261376.0, "22675": 1116261376.0, "22680": 1116261376.0, "22685": 1116261376.0, "22690": 1116261376.0, "22695": 1116261376.0, "22700": 1116261376.0, "22705": 1116261376.0, "22710": 1116261376.0, "22715": 1116261376.0, "22720": 1116261376.0, "22725": 1116261376.0, "22730": 1116261376.0, "22735": 1116261376.0, "22740": 1116261376.0, "22745": 1116261376.0, "22750": 1116261376.0, "22755": 1116261376.0, "22760": 1116261376.0, "22765": 1116261376.0, "22770": 1116261376.0, "22775": 1116261376.0, "22780": 1116261376.0, "22785": 1116261376.0, "22790": 1116261376.0, "22795": 1116261376.0, "22800": 1116261376.0, "22805": 1116261376.0, "22810": 1116261376.0, "22815": 1116261376.0, "22820": 1116261376.0, "22825": 1116261376.0, "22830": 1116261376.0, "22835": 1116261376.0, "22840": 1116261376.0, "22845": 1116261376.0, "22850": 1116261376.0, "22855": 1116261376.0, "22860": 1116261376.0, "22865": 1116261376.0, "22870": 1116261376.0, "22875": 1116261376.0, "22880": 1116261376.0, "22885": 1116261376.0, "22890": 1116261376.0, "22895": 1116261376.0, "22900": 1116261376.0, "22905": 1116261376.0, "22910": 1116261376.0, "22915": 1116261376.0, "22920": 1116261376.0, "22925": 1116261376.0, "22930": 1116261376.0, "22935": 1116261376.0, "22940": 1116261376.0, "22945": 1116261376.0, "22950": 1116261376.0, "22955": 1116261376.0, "22960": 1116261376.0, "22965": 1116261376.0, "22970": 1116261376.0, "22975": 1116261376.0, "22980": 1116261376.0, "22985": 1116261376.0, "22990": 1116261376.0, "22995": 1116261376.0, "23000": 1116261376.0, "23005": 1116261376.0, "23010": 1116261376.0, "23015": 1116261376.0, "23020": 1116261376.0, "23025": 1116261376.0, "23030": 1116261376.0, "23035": 1116261376.0, "23040": 1116261376.0, "23045": 1116261376.0, "23050": 1116261376.0, "23055": 1116261376.0, "23060": 1116261376.0, "23065": 1116261376.0, "23070": 1116261376.0, "23075": 1116261376.0, "23080": 1116261376.0, "23085": 1116261376.0, "23090": 1116261376.0, "23095": 1116261376.0, "23100": 1116261376.0, "23105": 1116261376.0, "23110": 1116261376.0, "23115": 1116261376.0, "23120": 1116261376.0, "23125": 1116261376.0, "23130": 1116261376.0, "23135": 1116261376.0, "23140": 1116261376.0, "23145": 1116261376.0, "23150": 1116261376.0, "23155": 1116261376.0, "23160": 1116261376.0, "23165": 1116261376.0, "23170": 1116261376.0, "23175": 1116261376.0, "23180": 1116261376.0, "23185": 1116261376.0, "23190": 1116261376.0, "23195": 1116261376.0, "23200": 1116261376.0, "23205": 1116261376.0, "23210": 1116261376.0, "23215": 1116261376.0, "23220": 1116261376.0, "23225": 1116261376.0, "23230": 1116261376.0, "23235": 1116261376.0, "23240": 1116261376.0, "23245": 1116261376.0, "23250": 1116261376.0, "23255": 1116261376.0, "23260": 1116261376.0, "23265": 1116261376.0, "23270": 1116261376.0, "23275": 1116261376.0, "23280": 1116261376.0, "23285": 1116261376.0, "23290": 1116261376.0, "23295": 1116261376.0, "23300": 1116261376.0, "23305": 1116261376.0, "23310": 1116261376.0, "23315": 1116261376.0, "23320": 1116261376.0, "23325": 1116261376.0, "23330": 1116261376.0, "23335": 1116261376.0, "23340": 1116261376.0, "23345": 1116261376.0, "23350": 1116261376.0, "23355": 1116261376.0, "23360": 1116261376.0, "23365": 1116261376.0, "23370": 1116261376.0, "23375": 1116261376.0, "23380": 1116261376.0, "23385": 1116261376.0, "23390": 1116261376.0, "23395": 1116261376.0, "23400": 1116261376.0, "23405": 1116261376.0, "23410": 1116261376.0, "23415": 1116261376.0, "23420": 1116261376.0, "23425": 1116261376.0, "23430": 1116261376.0, "23435": 1116261376.0, "23440": 1116261376.0, "23445": 1116261376.0, "23450": 1116261376.0, "23455": 1116261376.0, "23460": 1116261376.0, "23465": 1116261376.0, "23470": 1116261376.0, "23475": 1116261376.0, "23480": 1116261376.0, "23485": 1116261376.0, "23490": 1116261376.0, "23495": 1116261376.0, "23500": 1116261376.0, "23505": 1116261376.0, "23510": 1116261376.0, "23515": 1116261376.0, "23520": 1116261376.0, "23525": 1116261376.0, "23530": 1116261376.0, "23535": 1116261376.0, "23540": 1116261376.0, "23545": 1116261376.0, "23550": 1116261376.0, "23555": 1116261376.0, "23560": 1116261376.0, "23565": 1116261376.0, "23570": 1116261376.0, "23575": 1116261376.0, "23580": 1116261376.0, "23585": 1116261376.0, "23590": 1116261376.0, "23595": 1116261376.0, "23600": 1116261376.0, "23605": 1116261376.0, "23610": 1116261376.0, "23615": 1116261376.0, "23620": 1116261376.0, "23625": 1116261376.0, "23630": 1116261376.0, "23635": 1116261376.0, "23640": 1116261376.0, "23645": 1116261376.0, "23650": 1116261376.0, "23655": 1116261376.0, "23660": 1116261376.0, "23665": 1116261376.0, "23670": 1116261376.0, "23675": 1116261376.0, "23680": 1116261376.0, "23685": 1116261376.0, "23690": 1116261376.0, "23695": 1116261376.0, "23700": 1116261376.0, "23705": 1116261376.0, "23710": 1116261376.0, "23715": 1116261376.0, "23720": 1116261376.0, "23725": 1116261376.0, "23730": 1116261376.0, "23735": 1116261376.0, "23740": 1116261376.0, "23745": 1116261376.0, "23750": 1116261376.0, "23755": 1116261376.0, "23760": 1116261376.0, "23765": 1116261376.0, "23770": 1116261376.0, "23775": 1116261376.0, "23780": 1116261376.0, "23785": 1116261376.0, "23790": 1116261376.0, "23795": 1116261376.0, "23800": 1116261376.0, "23805": 1116261376.0, "23810": 1116261376.0, "23815": 1116261376.0, "23820": 1116261376.0, "23825": 1116261376.0, "23830": 1116261376.0, "23835": 1116261376.0, "23840": 1116261376.0, "23845": 1116261376.0, "23850": 1116261376.0, "23855": 1116261376.0, "23860": 1116261376.0, "23865": 1116261376.0, "23870": 1116261376.0, "23875": 1116261376.0, "23880": 1116261376.0, "23885": 1116261376.0, "23890": 1116261376.0, "23895": 1116261376.0, "23900": 1116261376.0, "23905": 1116261376.0, "23910": 1116261376.0, "23915": 1116261376.0, "23920": 1116261376.0, "23925": 1116261376.0, "23930": 1116261376.0, "23935": 1116261376.0, "23940": 1116261376.0, "23945": 1116261376.0, "23950": 1116261376.0, "23955": 1116261376.0, "23960": 1116261376.0, "23965": 1116261376.0, "23970": 1116261376.0, "23975": 1116261376.0, "23980": 1116261376.0, "23985": 1116261376.0, "23990": 1116261376.0, "23995": 1116261376.0, "24000": 1116261376.0, "24005": 1116261376.0, "24010": 1116261376.0, "24015": 1116261376.0, "24020": 1116261376.0, "24025": 1116261376.0, "24030": 1116261376.0, "24035": 1116261376.0, "24040": 1116261376.0, "24045": 1116261376.0, "24050": 1116261376.0, "24055": 1116261376.0, "24060": 1116261376.0, "24065": 1116261376.0, "24070": 1116261376.0, "24075": 1116261376.0, "24080": 1116261376.0, "24085": 1116261376.0, "24090": 1116261376.0, "24095": 1116261376.0, "24100": 1116261376.0, "24105": 1116261376.0, "24110": 1116261376.0, "24115": 1116261376.0, "24120": 1116261376.0, "24125": 1116261376.0, "24130": 1116261376.0, "24135": 1116261376.0, "24140": 1116261376.0, "24145": 1116261376.0, "24150": 1116261376.0, "24155": 1116261376.0, "24160": 1116261376.0, "24165": 1116261376.0, "24170": 1116261376.0, "24175": 1116261376.0, "24180": 1116261376.0, "24185": 1116261376.0, "24190": 1116261376.0, "24195": 1116261376.0, "24200": 1116261376.0, "24205": 1116261376.0, "24210": 1116261376.0, "24215": 1116261376.0, "24220": 1116261376.0, "24225": 1116261376.0, "24230": 1116261376.0, "24235": 1116261376.0, "24240": 1116261376.0, "24245": 1116261376.0, "24250": 1116261376.0, "24255": 1116261376.0, "24260": 1116261376.0, "24265": 1116261376.0, "24270": 1116261376.0, "24275": 1116261376.0, "24280": 1116261376.0, "24285": 1116261376.0, "24290": 1116261376.0, "24295": 1116261376.0, "24300": 1116261376.0, "24305": 1116261376.0, "24310": 1116261376.0, "24315": 1116261376.0, "24320": 1116261376.0, "24325": 1116261376.0, "24330": 1116261376.0, "24335": 1116261376.0, "24340": 1116261376.0, "24345": 1116261376.0, "24350": 1116261376.0, "24355": 1116261376.0, "24360": 1116261376.0, "24365": 1116261376.0, "24370": 1116261376.0, "24375": 1116261376.0, "24380": 1116261376.0, "24385": 1116261376.0, "24390": 1116261376.0, "24395": 1116261376.0, "24400": 1116261376.0, "24405": 1116261376.0, "24410": 1116261376.0, "24415": 1116261376.0, "24420": 1116261376.0, "24425": 1116261376.0, "24430": 1116261376.0, "24435": 1116261376.0, "24440": 1116261376.0, "24445": 1116261376.0, "24450": 1116261376.0, "24455": 1116261376.0, "24460": 1116261376.0, "24465": 1116261376.0, "24470": 1116261376.0, "24475": 1116261376.0, "24480": 1116261376.0, "24485": 1116261376.0, "24490": 1116261376.0, "24495": 1116261376.0, "24500": 1116261376.0, "24505": 1116261376.0, "24510": 1116261376.0, "24515": 1116261376.0, "24520": 1116261376.0, "24525": 1116261376.0, "24530": 1116261376.0, "24535": 1116261376.0, "24540": 1116261376.0, "24545": 1116261376.0, "24550": 1116261376.0, "24555": 1116261376.0, "24560": 1116261376.0, "24565": 1116261376.0, "24570": 1116261376.0, "24575": 1116261376.0, "24580": 1116261376.0, "24585": 1116261376.0, "24590": 1116261376.0, "24595": 1116261376.0, "24600": 1116261376.0, "24605": 1116261376.0, "24610": 1116261376.0, "24615": 1116261376.0, "24620": 1116261376.0, "24625": 1116261376.0, "24630": 1116261376.0, "24635": 1116261376.0, "24640": 1116261376.0, "24645": 1116261376.0, "24650": 1116261376.0, "24655": 1116261376.0, "24660": 1116261376.0, "24665": 1116261376.0, "24670": 1116261376.0, "24675": 1116261376.0, "24680": 1116261376.0, "24685": 1116261376.0, "24690": 1116261376.0, "24695": 1116261376.0, "24700": 1116261376.0, "24705": 1116261376.0, "24710": 1116261376.0, "24715": 1116261376.0, "24720": 1116261376.0, "24725": 1116261376.0, "24730": 1116261376.0, "24735": 1116261376.0, "24740": 1116261376.0, "24745": 1116261376.0, "24750": 1116261376.0, "24755": 1116261376.0, "24760": 1116261376.0, "24765": 1116261376.0, "24770": 1116261376.0, "24775": 1116261376.0, "24780": 1116261376.0, "24785": 1116261376.0, "24790": 1116261376.0, "24795": 1116261376.0, "24800": 1116261376.0, "24805": 1116261376.0, "24810": 1116261376.0, "24815": 1116261376.0, "24820": 1116261376.0, "24825": 1116261376.0, "24830": 1116261376.0, "24835": 1116261376.0, "24840": 1116261376.0, "24845": 1116261376.0, "24850": 1116261376.0, "24855": 1116261376.0, "24860": 1116261376.0, "24865": 1116261376.0, "24870": 1116261376.0, "24875": 1116261376.0, "24880": 1116261376.0, "24885": 1116261376.0, "24890": 1116261376.0, "24895": 1116261376.0, "24900": 1116261376.0, "24905": 1116261376.0, "24910": 1116261376.0, "24915": 1116261376.0, "24920": 1116261376.0, "24925": 1116261376.0, "24930": 1116261376.0, "24935": 1116261376.0, "24940": 1116261376.0, "24945": 1116261376.0, "24950": 1116261376.0, "24955": 1116261376.0, "24960": 1116261376.0, "24965": 1116261376.0, "24970": 1116261376.0, "24975": 1116261376.0, "24980": 1116261376.0, "24985": 1116261376.0, "24990": 1116261376.0, "24995": 1116261376.0, "25000": 1116261376.0, "25005": 1116261376.0, "25010": 1116261376.0, "25015": 1116261376.0, "25020": 1116261376.0, "25025": 1116261376.0, "25030": 1116261376.0, "25035": 1116261376.0, "25040": 1116261376.0, "25045": 1116261376.0, "25050": 1116261376.0, "25055": 1116261376.0, "25060": 1116261376.0, "25065": 1116261376.0, "25070": 1116261376.0, "25075": 1116261376.0, "25080": 1116261376.0, "25085": 1116261376.0, "25090": 1116261376.0, "25095": 1116261376.0, "25100": 1116261376.0, "25105": 1116261376.0, "25110": 1116261376.0, "25115": 1116261376.0, "25120": 1116261376.0, "25125": 1116261376.0, "25130": 1116261376.0, "25135": 1116261376.0, "25140": 1116261376.0, "25145": 1116261376.0, "25150": 1116261376.0, "25155": 1116261376.0, "25160": 1116261376.0, "25165": 1116261376.0, "25170": 1116261376.0, "25175": 1116261376.0, "25180": 1116261376.0, "25185": 1116261376.0, "25190": 1116261376.0, "25195": 1116261376.0, "25200": 1116261376.0, "25205": 1116261376.0, "25210": 1116261376.0, "25215": 1116261376.0, "25220": 1116261376.0, "25225": 1116261376.0, "25230": 1116261376.0, "25235": 1116261376.0, "25240": 1116261376.0, "25245": 1116261376.0, "25250": 1116261376.0, "25255": 1116261376.0, "25260": 1116261376.0, "25265": 1116261376.0, "25270": 1116261376.0, "25275": 1116261376.0, "25280": 1116261376.0, "25285": 1116261376.0, "25290": 1116261376.0, "25295": 1116261376.0, "25300": 1116261376.0, "25305": 1116261376.0, "25310": 1116261376.0, "25315": 1116261376.0, "25320": 1116261376.0, "25325": 1116261376.0, "25330": 1116261376.0, "25335": 1116261376.0, "25340": 1116261376.0, "25345": 1116261376.0, "25350": 1116261376.0, "25355": 1116261376.0, "25360": 1116261376.0, "25365": 1116261376.0, "25370": 1116261376.0, "25375": 1116261376.0, "25380": 1116261376.0, "25385": 1116261376.0, "25390": 1116261376.0, "25395": 1116261376.0, "25400": 1116261376.0, "25405": 1116261376.0, "25410": 1116261376.0, "25415": 1116261376.0, "25420": 1116261376.0, "25425": 1116261376.0, "25430": 1116261376.0, "25435": 1116261376.0, "25440": 1116261376.0, "25445": 1116261376.0, "25450": 1116261376.0, "25455": 1116261376.0, "25460": 1116261376.0, "25465": 1116261376.0, "25470": 1116261376.0, "25475": 1116261376.0, "25480": 1116261376.0, "25485": 1116261376.0, "25490": 1116261376.0, "25495": 1116261376.0, "25500": 1116261376.0, "25505": 1116261376.0, "25510": 1116261376.0, "25515": 1116261376.0, "25520": 1116261376.0, "25525": 1116261376.0, "25530": 1116261376.0, "25535": 1116261376.0, "25540": 1116261376.0, "25545": 1116261376.0, "25550": 1116261376.0, "25555": 1116261376.0, "25560": 1116261376.0, "25565": 1116261376.0, "25570": 1116261376.0, "25575": 1116261376.0, "25580": 1116261376.0, "25585": 1116261376.0, "25590": 1116261376.0, "25595": 1116261376.0, "25600": 1116261376.0, "25605": 1116261376.0, "25610": 1116261376.0, "25615": 1116261376.0, "25620": 1116261376.0, "25625": 1116261376.0, "25630": 1116261376.0, "25635": 1116261376.0, "25640": 1116261376.0, "25645": 1116261376.0, "25650": 1116261376.0, "25655": 1116261376.0, "25660": 1116261376.0, "25665": 1116261376.0, "25670": 1116261376.0, "25675": 1116261376.0, "25680": 1116261376.0, "25685": 1116261376.0, "25690": 1116261376.0, "25695": 1116261376.0, "25700": 1116261376.0, "25705": 1116261376.0, "25710": 1116261376.0, "25715": 1116261376.0, "25720": 1116261376.0, "25725": 1116261376.0, "25730": 1116261376.0, "25735": 1116261376.0, "25740": 1116261376.0, "25745": 1116261376.0, "25750": 1116261376.0, "25755": 1116261376.0, "25760": 1116261376.0, "25765": 1116261376.0, "25770": 1116261376.0, "25775": 1116261376.0, "25780": 1116261376.0, "25785": 1116261376.0, "25790": 1116261376.0, "25795": 1116261376.0, "25800": 1116261376.0, "25805": 1116261376.0, "25810": 1116261376.0, "25815": 1116261376.0, "25820": 1116261376.0, "25825": 1116261376.0, "25830": 1116261376.0, "25835": 1116261376.0, "25840": 1116261376.0, "25845": 1116261376.0, "25850": 1116261376.0, "25855": 1116261376.0, "25860": 1116261376.0, "25865": 1116261376.0, "25870": 1116261376.0, "25875": 1116261376.0, "25880": 1116261376.0, "25885": 1116261376.0, "25890": 1116261376.0, "25895": 1116261376.0, "25900": 1116261376.0, "25905": 1116261376.0, "25910": 1116261376.0, "25915": 1116261376.0, "25920": 1116261376.0, "25925": 1116261376.0, "25930": 1116261376.0, "25935": 1116261376.0, "25940": 1116261376.0, "25945": 1116261376.0, "25950": 1116261376.0, "25955": 1116261376.0, "25960": 1116261376.0, "25965": 1116261376.0, "25970": 1116261376.0, "25975": 1116261376.0, "25980": 1116261376.0, "25985": 1116261376.0, "25990": 1116261376.0, "25995": 1116261376.0, "26000": 1116261376.0, "26005": 1116261376.0, "26010": 1116261376.0, "26015": 1116261376.0, "26020": 1116261376.0, "26025": 1116261376.0, "26030": 1116261376.0, "26035": 1116261376.0, "26040": 1116261376.0, "26045": 1116261376.0, "26050": 1116261376.0, "26055": 1116261376.0, "26060": 1116261376.0, "26065": 1116261376.0, "26070": 1116261376.0, "26075": 1116261376.0, "26080": 1116261376.0, "26085": 1116261376.0, "26090": 1116261376.0, "26095": 1116261376.0, "26100": 1116261376.0, "26105": 1116261376.0, "26110": 1116261376.0, "26115": 1116261376.0, "26120": 1116261376.0, "26125": 1116261376.0, "26130": 1116261376.0, "26135": 1116261376.0, "26140": 1116261376.0, "26145": 1116261376.0, "26150": 1116261376.0, "26155": 1116261376.0, "26160": 1116261376.0, "26165": 1116261376.0, "26170": 1116261376.0, "26175": 1116261376.0, "26180": 1116261376.0, "26185": 1116261376.0, "26190": 1116261376.0, "26195": 1116261376.0, "26200": 1116261376.0, "26205": 1116261376.0, "26210": 1116261376.0, "26215": 1116261376.0, "26220": 1116261376.0, "26225": 1116261376.0, "26230": 1116261376.0, "26235": 1116261376.0, "26240": 1116261376.0, "26245": 1116261376.0, "26250": 1116261376.0, "26255": 1116261376.0, "26260": 1116261376.0, "26265": 1116261376.0, "26270": 1116261376.0, "26275": 1116261376.0, "26280": 1116261376.0, "26285": 1116261376.0, "26290": 1116261376.0, "26295": 1116261376.0, "26300": 1116261376.0, "26305": 1116261376.0, "26310": 1116261376.0, "26315": 1116261376.0, "26320": 1116261376.0, "26325": 1116261376.0, "26330": 1116261376.0, "26335": 1116261376.0, "26340": 1116261376.0, "26345": 1116261376.0, "26350": 1116261376.0, "26355": 1116261376.0, "26360": 1116261376.0, "26365": 1116261376.0, "26370": 1116261376.0, "26375": 1116261376.0, "26380": 1116261376.0, "26385": 1116261376.0, "26390": 1116261376.0, "26395": 1116261376.0, "26400": 1116261376.0, "26405": 1116261376.0, "26410": 1116261376.0, "26415": 1116261376.0, "26420": 1116261376.0, "26425": 1116261376.0, "26430": 1116261376.0, "26435": 1116261376.0, "26440": 1116261376.0, "26445": 1116261376.0, "26450": 1116261376.0, "26455": 1116261376.0, "26460": 1116261376.0, "26465": 1116261376.0, "26470": 1116261376.0, "26475": 1116261376.0, "26480": 1116261376.0, "26485": 1116261376.0, "26490": 1116261376.0, "26495": 1116261376.0, "26500": 1116261376.0, "26505": 1116261376.0, "26510": 1116261376.0, "26515": 1116261376.0, "26520": 1116261376.0, "26525": 1116261376.0, "26530": 1116261376.0, "26535": 1116261376.0, "26540": 1116261376.0, "26545": 1116261376.0, "26550": 1116261376.0, "26555": 1116261376.0, "26560": 1116261376.0, "26565": 1116261376.0, "26570": 1116261376.0, "26575": 1116261376.0, "26580": 1116261376.0, "26585": 1116261376.0, "26590": 1116261376.0, "26595": 1116261376.0, "26600": 1116261376.0, "26605": 1116261376.0, "26610": 1116261376.0, "26615": 1116261376.0, "26620": 1116261376.0, "26625": 1116261376.0, "26630": 1116261376.0, "26635": 1116261376.0, "26640": 1116261376.0, "26645": 1116261376.0, "26650": 1116261376.0, "26655": 1116261376.0, "26660": 1116261376.0, "26665": 1116261376.0, "26670": 1116261376.0, "26675": 1116261376.0, "26680": 1116261376.0, "26685": 1116261376.0, "26690": 1116261376.0, "26695": 1116261376.0, "26700": 1116261376.0, "26705": 1116261376.0, "26710": 1116261376.0, "26715": 1116261376.0, "26720": 1116261376.0, "26725": 1116261376.0, "26730": 1116261376.0, "26735": 1116261376.0, "26740": 1116261376.0, "26745": 1116261376.0, "26750": 1116261376.0, "26755": 1116261376.0, "26760": 1116261376.0, "26765": 1116261376.0, "26770": 1116261376.0, "26775": 1116261376.0, "26780": 1116261376.0, "26785": 1116261376.0, "26790": 1116261376.0, "26795": 1116261376.0, "26800": 1116261376.0, "26805": 1116261376.0, "26810": 1116261376.0, "26815": 1116261376.0, "26820": 1116261376.0, "26825": 1116261376.0, "26830": 1116261376.0, "26835": 1116261376.0, "26840": 1116261376.0, "26845": 1116261376.0, "26850": 1116261376.0, "26855": 1116261376.0, "26860": 1116261376.0, "26865": 1116261376.0, "26870": 1116261376.0, "26875": 1116261376.0, "26880": 1116261376.0, "26885": 1116261376.0, "26890": 1116261376.0, "26895": 1116261376.0, "26900": 1116261376.0, "26905": 1116261376.0, "26910": 1116261376.0, "26915": 1116261376.0, "26920": 1116261376.0, "26925": 1116261376.0, "26930": 1116261376.0, "26935": 1116261376.0, "26940": 1116261376.0, "26945": 1116261376.0, "26950": 1116261376.0, "26955": 1116261376.0, "26960": 1116261376.0, "26965": 1116261376.0, "26970": 1116261376.0, "26975": 1116261376.0, "26980": 1116261376.0, "26985": 1116261376.0, "26990": 1116261376.0, "26995": 1116261376.0, "27000": 1116261376.0, "27005": 1116261376.0, "27010": 1116261376.0, "27015": 1116261376.0, "27020": 1116261376.0, "27025": 1116261376.0, "27030": 1116261376.0, "27035": 1116261376.0, "27040": 1116261376.0, "27045": 1116261376.0, "27050": 1116261376.0, "27055": 1116261376.0, "27060": 1116261376.0, "27065": 1116261376.0, "27070": 1116261376.0, "27075": 1116261376.0, "27080": 1116261376.0, "27085": 1116261376.0, "27090": 1116261376.0, "27095": 1116261376.0, "27100": 1116261376.0, "27105": 1116261376.0, "27110": 1116261376.0, "27115": 1116261376.0, "27120": 1116261376.0, "27125": 1116261376.0, "27130": 1116261376.0, "27135": 1116261376.0, "27140": 1116261376.0, "27145": 1116261376.0, "27150": 1116261376.0, "27155": 1116261376.0, "27160": 1116261376.0, "27165": 1116261376.0, "27170": 1116261376.0, "27175": 1116261376.0, "27180": 1116261376.0, "27185": 1116261376.0, "27190": 1116261376.0, "27195": 1116261376.0, "27200": 1116261376.0, "27205": 1116261376.0, "27210": 1116261376.0, "27215": 1116261376.0, "27220": 1116261376.0, "27225": 1116261376.0, "27230": 1116261376.0, "27235": 1116261376.0, "27240": 1116261376.0, "27245": 1116261376.0, "27250": 1116261376.0, "27255": 1116261376.0, "27260": 1116261376.0, "27265": 1116261376.0, "27270": 1116261376.0, "27275": 1116261376.0, "27280": 1116261376.0, "27285": 1116261376.0, "27290": 1116261376.0, "27295": 1116261376.0, "27300": 1116261376.0, "27305": 1116261376.0, "27310": 1116261376.0, "27315": 1116261376.0, "27320": 1116261376.0, "27325": 1116261376.0, "27330": 1116261376.0, "27335": 1116261376.0, "27340": 1116261376.0, "27345": 1116261376.0, "27350": 1116261376.0, "27355": 1116261376.0, "27360": 1116261376.0, "27365": 1116261376.0, "27370": 1116261376.0, "27375": 1116261376.0, "27380": 1116261376.0, "27385": 1116261376.0, "27390": 1116261376.0, "27395": 1116261376.0, "27400": 1116261376.0, "27405": 1116261376.0, "27410": 1116261376.0, "27415": 1116261376.0, "27420": 1116261376.0, "27425": 1116261376.0, "27430": 1116261376.0, "27435": 1116261376.0, "27440": 1116261376.0, "27445": 1116261376.0, "27450": 1116261376.0, "27455": 1116261376.0, "27460": 1116261376.0, "27465": 1116261376.0, "27470": 1116261376.0, "27475": 1116261376.0, "27480": 1116261376.0, "27485": 1116261376.0, "27490": 1116261376.0, "27495": 1116261376.0, "27500": 1116261376.0, "27505": 1116261376.0, "27510": 1116261376.0, "27515": 1116261376.0, "27520": 1116261376.0, "27525": 1116261376.0, "27530": 1116261376.0, "27535": 1116261376.0, "27540": 1116261376.0, "27545": 1116261376.0, "27550": 1116261376.0, "27555": 1116261376.0, "27560": 1116261376.0, "27565": 1116261376.0, "27570": 1116261376.0, "27575": 1116261376.0, "27580": 1116261376.0, "27585": 1116261376.0, "27590": 1116261376.0, "27595": 1116261376.0, "27600": 1116261376.0, "27605": 1116261376.0, "27610": 1116261376.0, "27615": 1116261376.0, "27620": 1116261376.0, "27625": 1116261376.0, "27630": 1116261376.0, "27635": 1116261376.0, "27640": 1116261376.0, "27645": 1116261376.0, "27650": 1116261376.0, "27655": 1116261376.0, "27660": 1116261376.0, "27665": 1116261376.0, "27670": 1116261376.0, "27675": 1116261376.0, "27680": 1116261376.0, "27685": 1116261376.0, "27690": 1116261376.0, "27695": 1116261376.0, "27700": 1116261376.0, "27705": 1116261376.0, "27710": 1116261376.0, "27715": 1116261376.0, "27720": 1116261376.0, "27725": 1116261376.0, "27730": 1116261376.0, "27735": 1116261376.0, "27740": 1116261376.0, "27745": 1116261376.0, "27750": 1116261376.0, "27755": 1116261376.0, "27760": 1116261376.0, "27765": 1116261376.0, "27770": 1116261376.0, "27775": 1116261376.0, "27780": 1116261376.0, "27785": 1116261376.0, "27790": 1116261376.0, "27795": 1116261376.0, "27800": 1116261376.0, "27805": 1116261376.0, "27810": 1116261376.0, "27815": 1116261376.0, "27820": 1116261376.0, "27825": 1116261376.0, "27830": 1116261376.0, "27835": 1116261376.0, "27840": 1116261376.0, "27845": 1116261376.0, "27850": 1116261376.0, "27855": 1116261376.0, "27860": 1116261376.0, "27865": 1116261376.0, "27870": 1116261376.0, "27875": 1116261376.0, "27880": 1116261376.0, "27885": 1116261376.0, "27890": 1116261376.0, "27895": 1116261376.0, "27900": 1116261376.0, "27905": 1116261376.0, "27910": 1116261376.0, "27915": 1116261376.0, "27920": 1116261376.0, "27925": 1116261376.0, "27930": 1116261376.0, "27935": 1116261376.0, "27940": 1116261376.0, "27945": 1116261376.0, "27950": 1116261376.0, "27955": 1116261376.0, "27960": 1116261376.0, "27965": 1116261376.0, "27970": 1116261376.0, "27975": 1116261376.0, "27980": 1116261376.0, "27985": 1116261376.0, "27990": 1116261376.0, "27995": 1116261376.0, "28000": 1116261376.0, "28005": 1116261376.0, "28010": 1116261376.0, "28015": 1116261376.0, "28020": 1116261376.0, "28025": 1116261376.0, "28030": 1116261376.0, "28035": 1116261376.0, "28040": 1116261376.0, "28045": 1116261376.0, "28050": 1116261376.0, "28055": 1116261376.0, "28060": 1116261376.0, "28065": 1116261376.0, "28070": 1116261376.0, "28075": 1116261376.0, "28080": 1116261376.0, "28085": 1116261376.0, "28090": 1116261376.0, "28095": 1116261376.0, "28100": 1116261376.0, "28105": 1116261376.0, "28110": 1116261376.0, "28115": 1116261376.0, "28120": 1116261376.0, "28125": 1116261376.0, "28130": 1116261376.0, "28135": 1116261376.0, "28140": 1116261376.0, "28145": 1116261376.0, "28150": 1116261376.0, "28155": 1116261376.0, "28160": 1116261376.0, "28165": 1116261376.0, "28170": 1116261376.0, "28175": 1116261376.0, "28180": 1116261376.0, "28185": 1116261376.0, "28190": 1116261376.0, "28195": 1116261376.0, "28200": 1116261376.0, "28205": 1116261376.0, "28210": 1116261376.0, "28215": 1116261376.0, "28220": 1116261376.0, "28225": 1116261376.0, "28230": 1116261376.0, "28235": 1116261376.0, "28240": 1116261376.0, "28245": 1116261376.0, "28250": 1116261376.0, "28255": 1116261376.0, "28260": 1116261376.0, "28265": 1116261376.0, "28270": 1116261376.0, "28275": 1116261376.0, "28280": 1116261376.0, "28285": 1116261376.0, "28290": 1116261376.0, "28295": 1116261376.0, "28300": 1116261376.0, "28305": 1116261376.0, "28310": 1116261376.0, "28315": 1116261376.0, "28320": 1116261376.0, "28325": 1116261376.0, "28330": 1116261376.0, "28335": 1116261376.0, "28340": 1116261376.0, "28345": 1116261376.0, "28350": 1116261376.0, "28355": 1116261376.0, "28360": 1116261376.0, "28365": 1116261376.0, "28370": 1116261376.0, "28375": 1116261376.0, "28380": 1116261376.0, "28385": 1116261376.0, "28390": 1116261376.0, "28395": 1116261376.0, "28400": 1116261376.0, "28405": 1116261376.0, "28410": 1116261376.0, "28415": 1116261376.0, "28420": 1116261376.0, "28425": 1116261376.0, "28430": 1116261376.0, "28435": 1116261376.0, "28440": 1116261376.0, "28445": 1116261376.0, "28450": 1116261376.0, "28455": 1116261376.0, "28460": 1116261376.0, "28465": 1116261376.0, "28470": 1116261376.0, "28475": 1116261376.0, "28480": 1116261376.0, "28485": 1116261376.0, "28490": 1116261376.0, "28495": 1116261376.0, "28500": 1116261376.0, "28505": 1116261376.0, "28510": 1116261376.0, "28515": 1116261376.0, "28520": 1116261376.0, "28525": 1116261376.0, "28530": 1116261376.0, "28535": 1116261376.0, "28540": 1116261376.0, "28545": 1116261376.0, "28550": 1116261376.0, "28555": 1116261376.0, "28560": 1116261376.0, "28565": 1116261376.0, "28570": 1116261376.0, "28575": 1116261376.0, "28580": 1116261376.0, "28585": 1116261376.0, "28590": 1116261376.0, "28595": 1116261376.0, "28600": 1116261376.0, "28605": 1116261376.0, "28610": 1116261376.0, "28615": 1116261376.0, "28620": 1116261376.0, "28625": 1116261376.0, "28630": 1116261376.0, "28635": 1116261376.0, "28640": 1116261376.0, "28645": 1116261376.0, "28650": 1116261376.0, "28655": 1116261376.0, "28660": 1116261376.0, "28665": 1116261376.0, "28670": 1116261376.0, "28675": 1116261376.0, "28680": 1116261376.0, "28685": 1116261376.0, "28690": 1116261376.0, "28695": 1116261376.0, "28700": 1116261376.0, "28705": 1116261376.0, "28710": 1116261376.0, "28715": 1116261376.0, "28720": 1116261376.0, "28725": 1116261376.0, "28730": 1116261376.0, "28735": 1116261376.0, "28740": 1116261376.0, "28745": 1116261376.0, "28750": 1116261376.0, "28755": 1116261376.0, "28760": 1116261376.0, "28765": 1116261376.0, "28770": 1116261376.0, "28775": 1116261376.0, "28780": 1116261376.0, "28785": 1116261376.0, "28790": 1116261376.0, "28795": 1116261376.0, "28800": 1116261376.0, "28805": 1116261376.0, "28810": 1116261376.0, "28815": 1116261376.0, "28820": 1116261376.0, "28825": 1116261376.0, "28830": 1116261376.0, "28835": 1116261376.0, "28840": 1116261376.0, "28845": 1116261376.0, "28850": 1116261376.0, "28855": 1116261376.0, "28860": 1116261376.0, "28865": 1116261376.0, "28870": 1116261376.0, "28875": 1116261376.0, "28880": 1116261376.0, "28885": 1116261376.0, "28890": 1116261376.0, "28895": 1116261376.0, "28900": 1116261376.0, "28905": 1116261376.0, "28910": 1116261376.0, "28915": 1116261376.0, "28920": 1116261376.0, "28925": 1116261376.0, "28930": 1116261376.0, "28935": 1116261376.0, "28940": 1116261376.0, "28945": 1116261376.0, "28950": 1116261376.0, "28955": 1116261376.0, "28960": 1116261376.0, "28965": 1116261376.0, "28970": 1116261376.0, "28975": 1116261376.0, "28980": 1116261376.0, "28985": 1116261376.0, "28990": 1116261376.0, "28995": 1116261376.0, "29000": 1116261376.0, "29005": 1116261376.0, "29010": 1116261376.0, "29015": 1116261376.0, "29020": 1116261376.0, "29025": 1116261376.0, "29030": 1116261376.0, "29035": 1116261376.0, "29040": 1116261376.0, "29045": 1116261376.0, "29050": 1116261376.0, "29055": 1116261376.0, "29060": 1116261376.0, "29065": 1116261376.0, "29070": 1116261376.0, "29075": 1116261376.0, "29080": 1116261376.0, "29085": 1116261376.0, "29090": 1116261376.0, "29095": 1116261376.0, "29100": 1116261376.0, "29105": 1116261376.0, "29110": 1116261376.0, "29115": 1116261376.0, "29120": 1116261376.0, "29125": 1116261376.0, "29130": 1116261376.0, "29135": 1116261376.0, "29140": 1116261376.0, "29145": 1116261376.0, "29150": 1116261376.0, "29155": 1116261376.0, "29160": 1116261376.0, "29165": 1116261376.0, "29170": 1116261376.0, "29175": 1116261376.0, "29180": 1116261376.0, "29185": 1116261376.0, "29190": 1116261376.0, "29195": 1116261376.0, "29200": 1116261376.0, "29205": 1116261376.0, "29210": 1116261376.0, "29215": 1116261376.0, "29220": 1116261376.0, "29225": 1116261376.0, "29230": 1116261376.0, "29235": 1116261376.0, "29240": 1116261376.0, "29245": 1116261376.0, "29250": 1116261376.0, "29255": 1116261376.0, "29260": 1116261376.0, "29265": 1116261376.0, "29270": 1116261376.0, "29275": 1116261376.0, "29280": 1116261376.0, "29285": 1116261376.0, "29290": 1116261376.0, "29295": 1116261376.0, "29300": 1116261376.0, "29305": 1116261376.0, "29310": 1116261376.0, "29315": 1116261376.0, "29320": 1116261376.0, "29325": 1116261376.0, "29330": 1116261376.0, "29335": 1116261376.0, "29340": 1116261376.0, "29345": 1116261376.0, "29350": 1116261376.0, "29355": 1116261376.0, "29360": 1116261376.0, "29365": 1116261376.0, "29370": 1116261376.0, "29375": 1116261376.0, "29380": 1116261376.0, "29385": 1116261376.0, "29390": 1116261376.0, "29395": 1116261376.0, "29400": 1116261376.0, "29405": 1116261376.0, "29410": 1116261376.0, "29415": 1116261376.0, "29420": 1116261376.0, "29425": 1116261376.0, "29430": 1116261376.0, "29435": 1116261376.0, "29440": 1116261376.0, "29445": 1116261376.0, "29450": 1116261376.0, "29455": 1116261376.0, "29460": 1116261376.0, "29465": 1116261376.0, "29470": 1116261376.0, "29475": 1116261376.0, "29480": 1116261376.0, "29485": 1116261376.0, "29490": 1116261376.0, "29495": 1116261376.0, "29500": 1116261376.0, "29505": 1116261376.0, "29510": 1116261376.0, "29515": 1116261376.0, "29520": 1116261376.0, "29525": 1116261376.0, "29530": 1116261376.0, "29535": 1116261376.0, "29540": 1116261376.0, "29545": 1116261376.0, "29550": 1116261376.0, "29555": 1116261376.0, "29560": 1116261376.0, "29565": 1116261376.0, "29570": 1116261376.0, "29575": 1116261376.0, "29580": 1116261376.0, "29585": 1116261376.0, "29590": 1116261376.0, "29595": 1116261376.0, "29600": 1116261376.0, "29605": 1116261376.0, "29610": 1116261376.0, "29615": 1116261376.0, "29620": 1116261376.0, "29625": 1116261376.0, "29630": 1116261376.0, "29635": 1116261376.0, "29640": 1116261376.0, "29645": 1116261376.0, "29650": 1116261376.0, "29655": 1116261376.0, "29660": 1116261376.0, "29665": 1116261376.0, "29670": 1116261376.0, "29675": 1116261376.0, "29680": 1116261376.0, "29685": 1116261376.0, "29690": 1116261376.0, "29695": 1116261376.0, "29700": 1116261376.0, "29705": 1116261376.0, "29710": 1116261376.0, "29715": 1116261376.0, "29720": 1116261376.0, "29725": 1116261376.0, "29730": 1116261376.0, "29735": 1116261376.0, "29740": 1116261376.0, "29745": 1116261376.0, "29750": 1116261376.0, "29755": 1116261376.0, "29760": 1116261376.0, "29765": 1116261376.0, "29770": 1116261376.0, "29775": 1116261376.0, "29780": 1116261376.0, "29785": 1116261376.0, "29790": 1116261376.0, "29795": 1116261376.0, "29800": 1116261376.0, "29805": 1116261376.0, "29810": 1116261376.0, "29815": 1116261376.0, "29820": 1116261376.0, "29825": 1116261376.0, "29830": 1116261376.0, "29835": 1116261376.0, "29840": 1116261376.0, "29845": 1116261376.0, "29850": 1116261376.0, "29855": 1116261376.0, "29860": 1116261376.0, "29865": 1116261376.0, "29870": 1116261376.0, "29875": 1116261376.0, "29880": 1116261376.0, "29885": 1116261376.0, "29890": 1116261376.0, "29895": 1116261376.0, "29900": 1116261376.0, "29905": 1116261376.0, "29910": 1116261376.0, "29915": 1116261376.0, "29920": 1116261376.0, "29925": 1116261376.0, "29930": 1116261376.0, "29935": 1116261376.0, "29940": 1116261376.0, "29945": 1116261376.0, "29950": 1116261376.0, "29955": 1116261376.0, "29960": 1116261376.0, "29965": 1116261376.0, "29970": 1116261376.0, "29975": 1116261376.0, "29980": 1116261376.0, "29985": 1116261376.0, "29990": 1116261376.0, "29995": 1116261376.0, "30000": 1116261376.0, "30005": 1116261376.0, "30010": 1116261376.0, "30015": 1116261376.0, "30020": 1116261376.0, "30025": 1116261376.0, "30030": 1116261376.0, "30035": 1116261376.0, "30040": 1116261376.0, "30045": 1116261376.0, "30050": 1116261376.0, "30055": 1116261376.0, "30060": 1116261376.0, "30065": 1116261376.0, "30070": 1116261376.0, "30075": 1116261376.0, "30080": 1116261376.0, "30085": 1116261376.0, "30090": 1116261376.0, "30095": 1116261376.0, "30100": 1116261376.0, "30105": 1116261376.0, "30110": 1116261376.0, "30115": 1116261376.0, "30120": 1116261376.0, "30125": 1116261376.0, "30130": 1116261376.0, "30135": 1116261376.0, "30140": 1116261376.0, "30145": 1116261376.0, "30150": 1116261376.0, "30155": 1116261376.0, "30160": 1116261376.0, "30165": 1116261376.0, "30170": 1116261376.0, "30175": 1116261376.0, "30180": 1116261376.0, "30185": 1116261376.0, "30190": 1116261376.0, "30195": 1116261376.0, "30200": 1116261376.0, "30205": 1116261376.0, "30210": 1116261376.0, "30215": 1116261376.0, "30220": 1116261376.0, "30225": 1116261376.0, "30230": 1116261376.0, "30235": 1116261376.0, "30240": 1116261376.0, "30245": 1116261376.0, "30250": 1116261376.0, "30255": 1116261376.0, "30260": 1116261376.0, "30265": 1116261376.0, "30270": 1116261376.0, "30275": 1116261376.0, "30280": 1116261376.0, "30285": 1116261376.0, "30290": 1116261376.0, "30295": 1116261376.0, "30300": 1116261376.0, "30305": 1116261376.0, "30310": 1116261376.0, "30315": 1116261376.0, "30320": 1116261376.0, "30325": 1116261376.0, "30330": 1116261376.0, "30335": 1116261376.0, "30340": 1116261376.0, "30345": 1116261376.0, "30350": 1116261376.0, "30355": 1116261376.0, "30360": 1116261376.0, "30365": 1116261376.0, "30370": 1116261376.0, "30375": 1116261376.0, "30380": 1116261376.0, "30385": 1116261376.0, "30390": 1116261376.0, "30395": 1116261376.0, "30400": 1116261376.0, "30405": 1116261376.0, "30410": 1116261376.0, "30415": 1116261376.0, "30420": 1116261376.0, "30425": 1116261376.0, "30430": 1116261376.0, "30435": 1116261376.0, "30440": 1116261376.0, "30445": 1116261376.0, "30450": 1116261376.0, "30455": 1116261376.0, "30460": 1116261376.0, "30465": 1116261376.0, "30470": 1116261376.0, "30475": 1116261376.0, "30480": 1116261376.0, "30485": 1116261376.0, "30490": 1116261376.0, "30495": 1116261376.0, "30500": 1116261376.0, "30505": 1116261376.0, "30510": 1116261376.0, "30515": 1116261376.0, "30520": 1116261376.0, "30525": 1116261376.0, "30530": 1116261376.0, "30535": 1116261376.0, "30540": 1116261376.0, "30545": 1116261376.0, "30550": 1116261376.0, "30555": 1116261376.0, "30560": 1116261376.0, "30565": 1116261376.0, "30570": 1116261376.0, "30575": 1116261376.0, "30580": 1116261376.0, "30585": 1116261376.0, "30590": 1116261376.0, "30595": 1116261376.0, "30600": 1116261376.0, "30605": 1116261376.0, "30610": 1116261376.0, "30615": 1116261376.0, "30620": 1116261376.0, "30625": 1116261376.0, "30630": 1116261376.0, "30635": 1116261376.0, "30640": 1116261376.0, "30645": 1116261376.0, "30650": 1116261376.0, "30655": 1116261376.0, "30660": 1116261376.0, "30665": 1116261376.0, "30670": 1116261376.0, "30675": 1116261376.0, "30680": 1116261376.0, "30685": 1116261376.0, "30690": 1116261376.0, "30695": 1116261376.0, "30700": 1116261376.0, "30705": 1116261376.0, "30710": 1116261376.0, "30715": 1116261376.0, "30720": 1116261376.0, "30725": 1116261376.0, "30730": 1116261376.0, "30735": 1116261376.0, "30740": 1116261376.0, "30745": 1116261376.0, "30750": 1116261376.0, "30755": 1116261376.0, "30760": 1116261376.0, "30765": 1116261376.0, "30770": 1116261376.0, "30775": 1116261376.0, "30780": 1116261376.0, "30785": 1116261376.0, "30790": 1116261376.0, "30795": 1116261376.0, "30800": 1116261376.0, "30805": 1116261376.0, "30810": 1116261376.0, "30815": 1116261376.0, "30820": 1116261376.0, "30825": 1116261376.0, "30830": 1116261376.0, "30835": 1116261376.0, "30840": 1116261376.0, "30845": 1116261376.0, "30850": 1116261376.0, "30855": 1116261376.0, "30860": 1116261376.0, "30865": 1116261376.0, "30870": 1116261376.0, "30875": 1116261376.0, "30880": 1116261376.0, "30885": 1116261376.0, "30890": 1116261376.0, "30895": 1116261376.0, "30900": 1116261376.0, "30905": 1116261376.0, "30910": 1116261376.0, "30915": 1116261376.0, "30920": 1116261376.0, "30925": 1116261376.0, "30930": 1116261376.0, "30935": 1116261376.0, "30940": 1116261376.0, "30945": 1116261376.0, "30950": 1116261376.0, "30955": 1116261376.0, "30960": 1116261376.0, "30965": 1116261376.0, "30970": 1116261376.0, "30975": 1116261376.0, "30980": 1116261376.0, "30985": 1116261376.0, "30990": 1116261376.0, "30995": 1116261376.0, "31000": 1116261376.0, "31005": 1116261376.0, "31010": 1116261376.0, "31015": 1116261376.0, "31020": 1116261376.0, "31025": 1116261376.0, "31030": 1116261376.0, "31035": 1116261376.0, "31040": 1116261376.0, "31045": 1116261376.0, "31050": 1116261376.0, "31055": 1116261376.0, "31060": 1116261376.0, "31065": 1116261376.0, "31070": 1116261376.0, "31075": 1116261376.0, "31080": 1116261376.0, "31085": 1116261376.0, "31090": 1116261376.0, "31095": 1116261376.0, "31100": 1116261376.0, "31105": 1116261376.0, "31110": 1116261376.0, "31115": 1116261376.0, "31120": 1116261376.0, "31125": 1116261376.0, "31130": 1116261376.0, "31135": 1116261376.0, "31140": 1116261376.0, "31145": 1116261376.0, "31150": 1116261376.0, "31155": 1116261376.0, "31160": 1116261376.0, "31165": 1116261376.0, "31170": 1116261376.0, "31175": 1116261376.0, "31180": 1116261376.0, "31185": 1116261376.0, "31190": 1116261376.0, "31195": 1116261376.0, "31200": 1116261376.0, "31205": 1116261376.0, "31210": 1116261376.0, "31215": 1116261376.0, "31220": 1116261376.0, "31225": 1116261376.0, "31230": 1116261376.0, "31235": 1116261376.0, "31240": 1116261376.0, "31245": 1116261376.0, "31250": 1116261376.0, "31255": 1116261376.0, "31260": 1116261376.0, "31265": 1116261376.0, "31270": 1116261376.0, "31275": 1116261376.0, "31280": 1116261376.0, "31285": 1116261376.0, "31290": 1116261376.0, "31295": 1116261376.0, "31300": 1116261376.0, "31305": 1116261376.0, "31310": 1116261376.0, "31315": 1116261376.0, "31320": 1116261376.0, "31325": 1116261376.0, "31330": 1116261376.0, "31335": 1116261376.0, "31340": 1116261376.0, "31345": 1116261376.0, "31350": 1116261376.0, "31355": 1116261376.0, "31360": 1116261376.0, "31365": 1116261376.0, "31370": 1116261376.0, "31375": 1116261376.0, "31380": 1116261376.0, "31385": 1116261376.0, "31390": 1116261376.0, "31395": 1116261376.0, "31400": 1116261376.0, "31405": 1116261376.0, "31410": 1116261376.0, "31415": 1116261376.0, "31420": 1116261376.0, "31425": 1116261376.0, "31430": 1116261376.0, "31435": 1116261376.0, "31440": 1116261376.0, "31445": 1116261376.0, "31450": 1116261376.0, "31455": 1116261376.0, "31460": 1116261376.0, "31465": 1116261376.0, "31470": 1116261376.0, "31475": 1116261376.0, "31480": 1116261376.0, "31485": 1116261376.0, "31490": 1116261376.0, "31495": 1116261376.0, "31500": 1116261376.0, "31505": 1116261376.0, "31510": 1116261376.0, "31515": 1116261376.0, "31520": 1116261376.0, "31525": 1116261376.0, "31530": 1116261376.0, "31535": 1116261376.0, "31540": 1116261376.0, "31545": 1116261376.0, "31550": 1116261376.0, "31555": 1116261376.0, "31560": 1116261376.0, "31565": 1116261376.0, "31570": 1116261376.0, "31575": 1116261376.0, "31580": 1116261376.0, "31585": 1116261376.0, "31590": 1116261376.0, "31595": 1116261376.0, "31600": 1116261376.0, "31605": 1116261376.0, "31610": 1116261376.0, "31615": 1116261376.0, "31620": 1116261376.0, "31625": 1116261376.0, "31630": 1116261376.0, "31635": 1116261376.0, "31640": 1116261376.0, "31645": 1116261376.0, "31650": 1116261376.0, "31655": 1116261376.0, "31660": 1116261376.0, "31665": 1116261376.0, "31670": 1116261376.0, "31675": 1116261376.0, "31680": 1116261376.0, "31685": 1116261376.0, "31690": 1116261376.0, "31695": 1116261376.0, "31700": 1116261376.0, "31705": 1116261376.0, "31710": 1116261376.0, "31715": 1116261376.0, "31720": 1116261376.0, "31725": 1116261376.0, "31730": 1116261376.0, "31735": 1116261376.0, "31740": 1116261376.0, "31745": 1116261376.0, "31750": 1116261376.0, "31755": 1116261376.0, "31760": 1116261376.0, "31765": 1116261376.0, "31770": 1116261376.0, "31775": 1116261376.0, "31780": 1116261376.0, "31785": 1116261376.0, "31790": 1116261376.0, "31795": 1116261376.0, "31800": 1116261376.0, "31805": 1116261376.0, "31810": 1116261376.0, "31815": 1116261376.0, "31820": 1116261376.0, "31825": 1116261376.0, "31830": 1116261376.0, "31835": 1116261376.0, "31840": 1116261376.0, "31845": 1116261376.0, "31850": 1116261376.0, "31855": 1116261376.0, "31860": 1116261376.0, "31865": 1116261376.0, "31870": 1116261376.0, "31875": 1116261376.0, "31880": 1116261376.0, "31885": 1116261376.0, "31890": 1116261376.0, "31895": 1116261376.0, "31900": 1116261376.0, "31905": 1116261376.0, "31910": 1116261376.0, "31915": 1116261376.0, "31920": 1116261376.0, "31925": 1116261376.0, "31930": 1116261376.0, "31935": 1116261376.0, "31940": 1116261376.0, "31945": 1116261376.0, "31950": 1116261376.0, "31955": 1116261376.0, "31960": 1116261376.0, "31965": 1116261376.0, "31970": 1116261376.0, "31975": 1116261376.0, "31980": 1116261376.0, "31985": 1116261376.0, "31990": 1116261376.0, "31995": 1116261376.0, "32000": 1116261376.0, "32005": 1116261376.0, "32010": 1116261376.0, "32015": 1116261376.0, "32020": 1116261376.0, "32025": 1116261376.0, "32030": 1116261376.0, "32035": 1116261376.0, "32040": 1116261376.0, "32045": 1116261376.0, "32050": 1116261376.0, "32055": 1116261376.0, "32060": 1116261376.0, "32065": 1116261376.0, "32070": 1116261376.0, "32075": 1116261376.0, "32080": 1116261376.0, "32085": 1116261376.0, "32090": 1116261376.0, "32095": 1116261376.0, "32100": 1116261376.0, "32105": 1116261376.0, "32110": 1116261376.0, "32115": 1116261376.0, "32120": 1116261376.0, "32125": 1116261376.0, "32130": 1116261376.0, "32135": 1116261376.0, "32140": 1116261376.0, "32145": 1116261376.0, "32150": 1116261376.0, "32155": 1116261376.0, "32160": 1116261376.0, "32165": 1116261376.0, "32170": 1116261376.0, "32175": 1116261376.0, "32180": 1116261376.0, "32185": 1116261376.0, "32190": 1116261376.0, "32195": 1116261376.0, "32200": 1116261376.0, "32205": 1116261376.0, "32210": 1116261376.0, "32215": 1116261376.0, "32220": 1116261376.0, "32225": 1116261376.0, "32230": 1116261376.0, "32235": 1116261376.0, "32240": 1116261376.0, "32245": 1116261376.0, "32250": 1116261376.0, "32255": 1116261376.0, "32260": 1116261376.0, "32265": 1116261376.0, "32270": 1116261376.0, "32275": 1116261376.0, "32280": 1116261376.0, "32285": 1116261376.0, "32290": 1116261376.0, "32295": 1116261376.0, "32300": 1116261376.0, "32305": 1116261376.0, "32310": 1116261376.0, "32315": 1116261376.0, "32320": 1116261376.0, "32325": 1116261376.0, "32330": 1116261376.0, "32335": 1116261376.0, "32340": 1116261376.0, "32345": 1116261376.0, "32350": 1116261376.0, "32355": 1116261376.0, "32360": 1116261376.0, "32365": 1116261376.0, "32370": 1116261376.0, "32375": 1116261376.0, "32380": 1116261376.0, "32385": 1116261376.0, "32390": 1116261376.0, "32395": 1116261376.0, "32400": 1116261376.0, "32405": 1116261376.0, "32410": 1116261376.0, "32415": 1116261376.0, "32420": 1116261376.0, "32425": 1116261376.0, "32430": 1116261376.0, "32435": 1116261376.0, "32440": 1116261376.0, "32445": 1116261376.0, "32450": 1116261376.0, "32455": 1116261376.0, "32460": 1116261376.0, "32465": 1116261376.0, "32470": 1116261376.0, "32475": 1116261376.0, "32480": 1116261376.0, "32485": 1116261376.0, "32490": 1116261376.0, "32495": 1116261376.0, "32500": 1116261376.0, "32505": 1116261376.0, "32510": 1116261376.0, "32515": 1116261376.0, "32520": 1116261376.0, "32525": 1116261376.0, "32530": 1116261376.0, "32535": 1116261376.0, "32540": 1116261376.0, "32545": 1116261376.0, "32550": 1116261376.0, "32555": 1116261376.0, "32560": 1116261376.0, "32565": 1116261376.0, "32570": 1116261376.0, "32575": 1116261376.0, "32580": 1116261376.0, "32585": 1116261376.0, "32590": 1116261376.0, "32595": 1116261376.0, "32600": 1116261376.0, "32605": 1116261376.0, "32610": 1116261376.0, "32615": 1116261376.0, "32620": 1116261376.0, "32625": 1116261376.0, "32630": 1116261376.0, "32635": 1116261376.0, "32640": 1116261376.0, "32645": 1116261376.0, "32650": 1116261376.0, "32655": 1116261376.0, "32660": 1116261376.0, "32665": 1116261376.0, "32670": 1116261376.0, "32675": 1116261376.0, "32680": 1116261376.0, "32685": 1116261376.0, "32690": 1116261376.0, "32695": 1116261376.0, "32700": 1116261376.0, "32705": 1116261376.0, "32710": 1116261376.0, "32715": 1116261376.0, "32720": 1116261376.0, "32725": 1116261376.0, "32730": 1116261376.0, "32735": 1116261376.0, "32740": 1116261376.0, "32745": 1116261376.0, "32750": 1116261376.0, "32755": 1116261376.0, "32760": 1116261376.0, "32765": 1116261376.0, "32770": 1116261376.0, "32775": 1116261376.0, "32780": 1116261376.0, "32785": 1116261376.0, "32790": 1116261376.0, "32795": 1116261376.0, "32800": 1116261376.0, "32805": 1116261376.0, "32810": 1116261376.0, "32815": 1116261376.0, "32820": 1116261376.0, "32825": 1116261376.0, "32830": 1116261376.0, "32835": 1116261376.0, "32840": 1116261376.0, "32845": 1116261376.0, "32850": 1116261376.0, "32855": 1116261376.0, "32860": 1116261376.0, "32865": 1116261376.0, "32870": 1116261376.0, "32875": 1116261376.0, "32880": 1116261376.0, "32885": 1116261376.0, "32890": 1116261376.0, "32895": 1116261376.0, "32900": 1116261376.0, "32905": 1116261376.0, "32910": 1116261376.0, "32915": 1116261376.0, "32920": 1116261376.0, "32925": 1116261376.0, "32930": 1116261376.0, "32935": 1116261376.0, "32940": 1116261376.0, "32945": 1116261376.0, "32950": 1116261376.0, "32955": 1116261376.0, "32960": 1116261376.0, "32965": 1116261376.0, "32970": 1116261376.0, "32975": 1116261376.0, "32980": 1116261376.0, "32985": 1116261376.0, "32990": 1116261376.0, "32995": 1116261376.0, "33000": 1116261376.0, "33005": 1116261376.0, "33010": 1116261376.0, "33015": 1116261376.0, "33020": 1116261376.0, "33025": 1116261376.0, "33030": 1116261376.0, "33035": 1116261376.0, "33040": 1116261376.0, "33045": 1116261376.0, "33050": 1116261376.0, "33055": 1116261376.0, "33060": 1116261376.0, "33065": 1116261376.0, "33070": 1116261376.0, "33075": 1116261376.0, "33080": 1116261376.0, "33085": 1116261376.0, "33090": 1116261376.0, "33095": 1116261376.0, "33100": 1116261376.0, "33105": 1116261376.0, "33110": 1116261376.0, "33115": 1116261376.0, "33120": 1116261376.0, "33125": 1116261376.0, "33130": 1116261376.0, "33135": 1116261376.0, "33140": 1116261376.0, "33145": 1116261376.0, "33150": 1116261376.0, "33155": 1116261376.0, "33160": 1116261376.0, "33165": 1116261376.0, "33170": 1116261376.0, "33175": 1116261376.0, "33180": 1116261376.0, "33185": 1116261376.0, "33190": 1116261376.0, "33195": 1116261376.0, "33200": 1116261376.0, "33205": 1116261376.0, "33210": 1116261376.0, "33215": 1116261376.0, "33220": 1116261376.0, "33225": 1116261376.0, "33230": 1116261376.0, "33235": 1116261376.0, "33240": 1116261376.0, "33245": 1116261376.0, "33250": 1116261376.0, "33255": 1116261376.0, "33260": 1116261376.0, "33265": 1116261376.0, "33270": 1116261376.0, "33275": 1116261376.0, "33280": 1116261376.0, "33285": 1116261376.0, "33290": 1116261376.0, "33295": 1116261376.0, "33300": 1116261376.0, "33305": 1116261376.0, "33310": 1116261376.0, "33315": 1116261376.0, "33320": 1116261376.0, "33325": 1116261376.0, "33330": 1116261376.0, "33335": 1116261376.0, "33340": 1116261376.0, "33345": 1116261376.0, "33350": 1116261376.0, "33355": 1116261376.0, "33360": 1116261376.0, "33365": 1116261376.0, "33370": 1116261376.0, "33375": 1116261376.0, "33380": 1116261376.0, "33385": 1116261376.0, "33390": 1116261376.0, "33395": 1116261376.0, "33400": 1116261376.0, "33405": 1116261376.0, "33410": 1116261376.0, "33415": 1116261376.0, "33420": 1116261376.0, "33425": 1116261376.0, "33430": 1116261376.0, "33435": 1116261376.0, "33440": 1116261376.0, "33445": 1116261376.0, "33450": 1116261376.0, "33455": 1116261376.0, "33460": 1116261376.0, "33465": 1116261376.0, "33470": 1116261376.0, "33475": 1116261376.0, "33480": 1116261376.0, "33485": 1116261376.0, "33490": 1116261376.0, "33495": 1116261376.0, "33500": 1116261376.0, "33505": 1116261376.0, "33510": 1116261376.0, "33515": 1116261376.0, "33520": 1116261376.0, "33525": 1116261376.0, "33530": 1116261376.0, "33535": 1116261376.0, "33540": 1116261376.0, "33545": 1116261376.0, "33550": 1116261376.0, "33555": 1116261376.0, "33560": 1116261376.0, "33565": 1116261376.0, "33570": 1116261376.0, "33575": 1116261376.0, "33580": 1116261376.0, "33585": 1116261376.0, "33590": 1116261376.0, "33595": 1116261376.0, "33600": 1116261376.0, "33605": 1116261376.0, "33610": 1116261376.0, "33615": 1116261376.0, "33620": 1116261376.0, "33625": 1116261376.0, "33630": 1116261376.0, "33635": 1116261376.0, "33640": 1116261376.0, "33645": 1116261376.0, "33650": 1116261376.0, "33655": 1116261376.0, "33660": 1116261376.0, "33665": 1116261376.0, "33670": 1116261376.0, "33675": 1116261376.0, "33680": 1116261376.0, "33685": 1116261376.0, "33690": 1116261376.0, "33695": 1116261376.0, "33700": 1116261376.0, "33705": 1116261376.0, "33710": 1116261376.0, "33715": 1116261376.0, "33720": 1116261376.0, "33725": 1116261376.0, "33730": 1116261376.0, "33735": 1116261376.0, "33740": 1116261376.0, "33745": 1116261376.0, "33750": 1116261376.0, "33755": 1116261376.0, "33760": 1116261376.0, "33765": 1116261376.0, "33770": 1116261376.0, "33775": 1116261376.0, "33780": 1116261376.0, "33785": 1116261376.0, "33790": 1116261376.0, "33795": 1116261376.0, "33800": 1116261376.0, "33805": 1116261376.0, "33810": 1116261376.0, "33815": 1116261376.0, "33820": 1116261376.0, "33825": 1116261376.0, "33830": 1116261376.0, "33835": 1116261376.0, "33840": 1116261376.0, "33845": 1116261376.0, "33850": 1116261376.0, "33855": 1116261376.0, "33860": 1116261376.0, "33865": 1116261376.0, "33870": 1116261376.0, "33875": 1116261376.0, "33880": 1116261376.0, "33885": 1116261376.0, "33890": 1116261376.0, "33895": 1116261376.0, "33900": 1116261376.0, "33905": 1116261376.0, "33910": 1116261376.0, "33915": 1116261376.0, "33920": 1116261376.0, "33925": 1116261376.0, "33930": 1116261376.0, "33935": 1116261376.0, "33940": 1116261376.0, "33945": 1116261376.0, "33950": 1116261376.0, "33955": 1116261376.0, "33960": 1116261376.0, "33965": 1116261376.0, "33970": 1116261376.0, "33975": 1116261376.0, "33980": 1116261376.0, "33985": 1116261376.0, "33990": 1116261376.0, "33995": 1116261376.0, "34000": 1116261376.0, "34005": 1116261376.0, "34010": 1116261376.0, "34015": 1116261376.0, "34020": 1116261376.0, "34025": 1116261376.0, "34030": 1116261376.0, "34035": 1116261376.0, "34040": 1116261376.0, "34045": 1116261376.0, "34050": 1116261376.0, "34055": 1116261376.0, "34060": 1116261376.0, "34065": 1116261376.0, "34070": 1116261376.0, "34075": 1116261376.0, "34080": 1116261376.0, "34085": 1116261376.0, "34090": 1116261376.0, "34095": 1116261376.0, "34100": 1116261376.0, "34105": 1116261376.0, "34110": 1116261376.0, "34115": 1116261376.0, "34120": 1116261376.0, "34125": 1116261376.0, "34130": 1116261376.0, "34135": 1116261376.0, "34140": 1116261376.0, "34145": 1116261376.0, "34150": 1116261376.0, "34155": 1116261376.0, "34160": 1116261376.0, "34165": 1116261376.0, "34170": 1116261376.0, "34175": 1116261376.0, "34180": 1116261376.0, "34185": 1116261376.0, "34190": 1116261376.0, "34195": 1116261376.0, "34200": 1116261376.0, "34205": 1116261376.0, "34210": 1116261376.0, "34215": 1116261376.0, "34220": 1116261376.0, "34225": 1116261376.0, "34230": 1116261376.0, "34235": 1116261376.0, "34240": 1116261376.0, "34245": 1116261376.0, "34250": 1116261376.0, "34255": 1116261376.0, "34260": 1116261376.0, "34265": 1116261376.0, "34270": 1116261376.0, "34275": 1116261376.0, "34280": 1116261376.0, "34285": 1116261376.0, "34290": 1116261376.0, "34295": 1116261376.0, "34300": 1116261376.0, "34305": 1116261376.0, "34310": 1116261376.0, "34315": 1116261376.0, "34320": 1116261376.0, "34325": 1116261376.0, "34330": 1116261376.0, "34335": 1116261376.0, "34340": 1116261376.0, "34345": 1116261376.0, "34350": 1116261376.0, "34355": 1116261376.0, "34360": 1116261376.0, "34365": 1116261376.0, "34370": 1116261376.0, "34375": 1116261376.0, "34380": 1116261376.0, "34385": 1116261376.0, "34390": 1116261376.0, "34395": 1116261376.0, "34400": 1116261376.0, "34405": 1116261376.0, "34410": 1116261376.0, "34415": 1116261376.0, "34420": 1116261376.0, "34425": 1116261376.0, "34430": 1116261376.0, "34435": 1116261376.0, "34440": 1116261376.0, "34445": 1116261376.0, "34450": 1116261376.0, "34455": 1116261376.0, "34460": 1116261376.0, "34465": 1116261376.0, "34470": 1116261376.0, "34475": 1116261376.0, "34480": 1116261376.0, "34485": 1116261376.0, "34490": 1116261376.0, "34495": 1116261376.0, "34500": 1116261376.0, "34505": 1116261376.0, "34510": 1116261376.0, "34515": 1116261376.0, "34520": 1116261376.0, "34525": 1116261376.0, "34530": 1116261376.0, "34535": 1116261376.0, "34540": 1116261376.0, "34545": 1116261376.0, "34550": 1116261376.0, "34555": 1116261376.0, "34560": 1116261376.0, "34565": 1116261376.0, "34570": 1116261376.0, "34575": 1116261376.0, "34580": 1116261376.0, "34585": 1116261376.0, "34590": 1116261376.0, "34595": 1116261376.0, "34600": 1116261376.0, "34605": 1116261376.0, "34610": 1116261376.0, "34615": 1116261376.0, "34620": 1116261376.0, "34625": 1116261376.0, "34630": 1116261376.0, "34635": 1116261376.0, "34640": 1116261376.0, "34645": 1116261376.0, "34650": 1116261376.0, "34655": 1116261376.0, "34660": 1116261376.0, "34665": 1116261376.0, "34670": 1116261376.0, "34675": 1116261376.0, "34680": 1116261376.0, "34685": 1116261376.0, "34690": 1116261376.0, "34695": 1116261376.0, "34700": 1116261376.0, "34705": 1116261376.0, "34710": 1116261376.0, "34715": 1116261376.0, "34720": 1116261376.0, "34725": 1116261376.0, "34730": 1116261376.0, "34735": 1116261376.0, "34740": 1116261376.0, "34745": 1116261376.0, "34750": 1116261376.0, "34755": 1116261376.0, "34760": 1116261376.0, "34765": 1116261376.0, "34770": 1116261376.0, "34775": 1116261376.0, "34780": 1116261376.0, "34785": 1116261376.0, "34790": 1116261376.0, "34795": 1116261376.0, "34800": 1116261376.0, "34805": 1116261376.0, "34810": 1116261376.0, "34815": 1116261376.0, "34820": 1116261376.0, "34825": 1116261376.0, "34830": 1116261376.0, "34835": 1116261376.0, "34840": 1116261376.0, "34845": 1116261376.0, "34850": 1116261376.0, "34855": 1116261376.0, "34860": 1116261376.0, "34865": 1116261376.0, "34870": 1116261376.0, "34875": 1116261376.0, "34880": 1116261376.0, "34885": 1116261376.0, "34890": 1116261376.0, "34895": 1116261376.0, "34900": 1116261376.0, "34905": 1116261376.0, "34910": 1116261376.0, "34915": 1116261376.0, "34920": 1116261376.0, "34925": 1116261376.0, "34930": 1116261376.0, "34935": 1116261376.0, "34940": 1116261376.0, "34945": 1116261376.0, "34950": 1116261376.0, "34955": 1116261376.0, "34960": 1116261376.0, "34965": 1116261376.0, "34970": 1116261376.0, "34975": 1116261376.0, "34980": 1116261376.0, "34985": 1116261376.0, "34990": 1116261376.0, "34995": 1116261376.0, "35000": 1116261376.0, "35005": 1116261376.0, "35010": 1116261376.0, "35015": 1116261376.0, "35020": 1116261376.0, "35025": 1116261376.0, "35030": 1116261376.0, "35035": 1116261376.0, "35040": 1116261376.0, "35045": 1116261376.0, "35050": 1116261376.0, "35055": 1116261376.0, "35060": 1116261376.0, "35065": 1116261376.0, "35070": 1116261376.0, "35075": 1116261376.0, "35080": 1116261376.0, "35085": 1116261376.0, "35090": 1116261376.0, "35095": 1116261376.0, "35100": 1116261376.0, "35105": 1116261376.0, "35110": 1116261376.0, "35115": 1116261376.0, "35120": 1116261376.0, "35125": 1116261376.0, "35130": 1116261376.0, "35135": 1116261376.0, "35140": 1116261376.0, "35145": 1116261376.0, "35150": 1116261376.0, "35155": 1116261376.0, "35160": 1116261376.0, "35165": 1116261376.0, "35170": 1116261376.0, "35175": 1116261376.0, "35180": 1116261376.0, "35185": 1116261376.0, "35190": 1116261376.0, "35195": 1116261376.0, "35200": 1116261376.0, "35205": 1116261376.0, "35210": 1116261376.0, "35215": 1116261376.0, "35220": 1116261376.0, "35225": 1116261376.0, "35230": 1116261376.0, "35235": 1116261376.0, "35240": 1116261376.0, "35245": 1116261376.0, "35250": 1116261376.0, "35255": 1116261376.0, "35260": 1116261376.0, "35265": 1116261376.0, "35270": 1116261376.0, "35275": 1116261376.0, "35280": 1116261376.0, "35285": 1116261376.0, "35290": 1116261376.0, "35295": 1116261376.0, "35300": 1116261376.0, "35305": 1116261376.0, "35310": 1116261376.0, "35315": 1116261376.0, "35320": 1116261376.0, "35325": 1116261376.0, "35330": 1116261376.0, "35335": 1116261376.0, "35340": 1116261376.0, "35345": 1116261376.0, "35350": 1116261376.0, "35355": 1116261376.0, "35360": 1116261376.0, "35365": 1116261376.0, "35370": 1116261376.0, "35375": 1116261376.0, "35380": 1116261376.0, "35385": 1116261376.0, "35390": 1116261376.0, "35395": 1116261376.0, "35400": 1116261376.0, "35405": 1116261376.0, "35410": 1116261376.0, "35415": 1116261376.0, "35420": 1116261376.0, "35425": 1116261376.0, "35430": 1116261376.0, "35435": 1116261376.0, "35440": 1116261376.0, "35445": 1116261376.0, "35450": 1116261376.0, "35455": 1116261376.0, "35460": 1116261376.0, "35465": 1116261376.0, "35470": 1116261376.0, "35475": 1116261376.0, "35480": 1116261376.0, "35485": 1116261376.0, "35490": 1116261376.0, "35495": 1116261376.0, "35500": 1116261376.0, "35505": 1116261376.0, "35510": 1116261376.0, "35515": 1116261376.0, "35520": 1116261376.0, "35525": 1116261376.0, "35530": 1116261376.0, "35535": 1116261376.0, "35540": 1116261376.0, "35545": 1116261376.0, "35550": 1116261376.0, "35555": 1116261376.0, "35560": 1116261376.0, "35565": 1116261376.0, "35570": 1116261376.0, "35575": 1116261376.0, "35580": 1116261376.0, "35585": 1116261376.0, "35590": 1116261376.0, "35595": 1116261376.0, "35600": 1116261376.0, "35605": 1116261376.0, "35610": 1116261376.0, "35615": 1116261376.0, "35620": 1116261376.0, "35625": 1116261376.0, "35630": 1116261376.0, "35635": 1116261376.0, "35640": 1116261376.0, "35645": 1116261376.0, "35650": 1116261376.0, "35655": 1116261376.0, "35660": 1116261376.0, "35665": 1116261376.0, "35670": 1116261376.0, "35675": 1116261376.0, "35680": 1116261376.0, "35685": 1116261376.0, "35690": 1116261376.0, "35695": 1116261376.0, "35700": 1116261376.0, "35705": 1116261376.0, "35710": 1116261376.0, "35715": 1116261376.0, "35720": 1116261376.0, "35725": 1116261376.0, "35730": 1116261376.0, "35735": 1116261376.0, "35740": 1116261376.0, "35745": 1116261376.0, "35750": 1116261376.0, "35755": 1116261376.0, "35760": 1116261376.0, "35765": 1116261376.0, "35770": 1116261376.0, "35775": 1116261376.0, "35780": 1116261376.0, "35785": 1116261376.0, "35790": 1116261376.0, "35795": 1116261376.0, "35800": 1116261376.0, "35805": 1116261376.0, "35810": 1116261376.0, "35815": 1116261376.0, "35820": 1116261376.0, "35825": 1116261376.0, "35830": 1116261376.0, "35835": 1116261376.0, "35840": 1116261376.0, "35845": 1116261376.0, "35850": 1116261376.0, "35855": 1116261376.0, "35860": 1116261376.0, "35865": 1116261376.0, "35870": 1116261376.0, "35875": 1116261376.0, "35880": 1116261376.0, "35885": 1116261376.0, "35890": 1116261376.0, "35895": 1116261376.0, "35900": 1116261376.0, "35905": 1116261376.0, "35910": 1116261376.0, "35915": 1116261376.0, "35920": 1116261376.0, "35925": 1116261376.0, "35930": 1116261376.0, "35935": 1116261376.0, "35940": 1116261376.0, "35945": 1116261376.0, "35950": 1116261376.0, "35955": 1116261376.0, "35960": 1116261376.0, "35965": 1116261376.0, "35970": 1116261376.0, "35975": 1116261376.0, "35980": 1116261376.0, "35985": 1116261376.0, "35990": 1116261376.0, "35995": 1116261376.0, "36000": 1116261376.0, "36005": 1116261376.0, "36010": 1116261376.0, "36015": 1116261376.0, "36020": 1116261376.0, "36025": 1116261376.0, "36030": 1116261376.0, "36035": 1116261376.0, "36040": 1116261376.0, "36045": 1116261376.0, "36050": 1116261376.0, "36055": 1116261376.0, "36060": 1116261376.0, "36065": 1116261376.0, "36070": 1116261376.0, "36075": 1116261376.0, "36080": 1116261376.0, "36085": 1116261376.0, "36090": 1116261376.0, "36095": 1116261376.0, "36100": 1116261376.0, "36105": 1116261376.0, "36110": 1116261376.0, "36115": 1116261376.0, "36120": 1116261376.0, "36125": 1116261376.0, "36130": 1116261376.0, "36135": 1116261376.0, "36140": 1116261376.0, "36145": 1116261376.0, "36150": 1116261376.0, "36155": 1116261376.0, "36160": 1116261376.0, "36165": 1116261376.0, "36170": 1116261376.0, "36175": 1116261376.0, "36180": 1116261376.0, "36185": 1116261376.0, "36190": 1116261376.0, "36195": 1116261376.0, "36200": 1116261376.0, "36205": 1116261376.0, "36210": 1116261376.0, "36215": 1116261376.0, "36220": 1116261376.0, "36225": 1116261376.0, "36230": 1116261376.0, "36235": 1116261376.0, "36240": 1116261376.0, "36245": 1116261376.0, "36250": 1116261376.0, "36255": 1116261376.0, "36260": 1116261376.0, "36265": 1116261376.0, "36270": 1116261376.0, "36275": 1116261376.0, "36280": 1116261376.0, "36285": 1116261376.0, "36290": 1116261376.0, "36295": 1116261376.0, "36300": 1116261376.0, "36305": 1116261376.0, "36310": 1116261376.0, "36315": 1116261376.0, "36320": 1116261376.0, "36325": 1116261376.0, "36330": 1116261376.0, "36335": 1116261376.0, "36340": 1116261376.0, "36345": 1116261376.0, "36350": 1116261376.0, "36355": 1116261376.0, "36360": 1116261376.0, "36365": 1116261376.0, "36370": 1116261376.0, "36375": 1116261376.0, "36380": 1116261376.0, "36385": 1116261376.0, "36390": 1116261376.0, "36395": 1116261376.0, "36400": 1116261376.0, "36405": 1116261376.0, "36410": 1116261376.0, "36415": 1116261376.0, "36420": 1116261376.0, "36425": 1116261376.0, "36430": 1116261376.0, "36435": 1116261376.0, "36440": 1116261376.0, "36445": 1116261376.0, "36450": 1116261376.0, "36455": 1116261376.0, "36460": 1116261376.0, "36465": 1116261376.0, "36470": 1116261376.0, "36475": 1116261376.0, "36480": 1116261376.0, "36485": 1116261376.0, "36490": 1116261376.0, "36495": 1116261376.0, "36500": 1116261376.0, "36505": 1116261376.0, "36510": 1116261376.0, "36515": 1116261376.0, "36520": 1116261376.0, "36525": 1116261376.0, "36530": 1116261376.0, "36535": 1116261376.0, "36540": 1116261376.0, "36545": 1116261376.0, "36550": 1116261376.0, "36555": 1116261376.0, "36560": 1116261376.0, "36565": 1116261376.0, "36570": 1116261376.0, "36575": 1116261376.0, "36580": 1116261376.0, "36585": 1116261376.0, "36590": 1116261376.0, "36595": 1116261376.0, "36600": 1116261376.0, "36605": 1116261376.0, "36610": 1116261376.0, "36615": 1116261376.0, "36620": 1116261376.0, "36625": 1116261376.0, "36630": 1116261376.0, "36635": 1116261376.0, "36640": 1116261376.0, "36645": 1116261376.0, "36650": 1116261376.0, "36655": 1116261376.0, "36660": 1116261376.0, "36665": 1116261376.0, "36670": 1116261376.0, "36675": 1116261376.0, "36680": 1116261376.0, "36685": 1116261376.0, "36690": 1116261376.0, "36695": 1116261376.0, "36700": 1116261376.0, "36705": 1116261376.0, "36710": 1116261376.0, "36715": 1116261376.0, "36720": 1116261376.0, "36725": 1116261376.0, "36730": 1116261376.0, "36735": 1116261376.0, "36740": 1116261376.0, "36745": 1116261376.0, "36750": 1116261376.0, "36755": 1116261376.0, "36760": 1116261376.0, "36765": 1116261376.0, "36770": 1116261376.0, "36775": 1116261376.0, "36780": 1116261376.0, "36785": 1116261376.0, "36790": 1116261376.0, "36795": 1116261376.0, "36800": 1116261376.0, "36805": 1116261376.0, "36810": 1116261376.0, "36815": 1116261376.0, "36820": 1116261376.0, "36825": 1116261376.0, "36830": 1116261376.0, "36835": 1116261376.0, "36840": 1116261376.0, "36845": 1116261376.0, "36850": 1116261376.0, "36855": 1116261376.0, "36860": 1116261376.0, "36865": 1116261376.0, "36870": 1116261376.0, "36875": 1116261376.0, "36880": 1116261376.0, "36885": 1116261376.0, "36890": 1116261376.0, "36895": 1116261376.0, "36900": 1116261376.0, "36905": 1116261376.0, "36910": 1116261376.0, "36915": 1116261376.0, "36920": 1116261376.0, "36925": 1116261376.0, "36930": 1116261376.0, "36935": 1116261376.0, "36940": 1116261376.0, "36945": 1116261376.0, "36950": 1116261376.0, "36955": 1116261376.0, "36960": 1116261376.0, "36965": 1116261376.0, "36970": 1116261376.0, "36975": 1116261376.0, "36980": 1116261376.0, "36985": 1116261376.0, "36990": 1116261376.0, "36995": 1116261376.0, "37000": 1116261376.0, "37005": 1116261376.0, "37010": 1116261376.0, "37015": 1116261376.0, "37020": 1116261376.0, "37025": 1116261376.0, "37030": 1116261376.0, "37035": 1116261376.0, "37040": 1116261376.0, "37045": 1116261376.0, "37050": 1116261376.0, "37055": 1116261376.0, "37060": 1116261376.0, "37065": 1116261376.0, "37070": 1116261376.0, "37075": 1116261376.0, "37080": 1116261376.0, "37085": 1116261376.0, "37090": 1116261376.0, "37095": 1116261376.0, "37100": 1116261376.0, "37105": 1116261376.0, "37110": 1116261376.0, "37115": 1116261376.0, "37120": 1116261376.0, "37125": 1116261376.0, "37130": 1116261376.0, "37135": 1116261376.0, "37140": 1116261376.0, "37145": 1116261376.0, "37150": 1116261376.0, "37155": 1116261376.0, "37160": 1116261376.0, "37165": 1116261376.0, "37170": 1116261376.0, "37175": 1116261376.0, "37180": 1116261376.0, "37185": 1116261376.0, "37190": 1116261376.0, "37195": 1116261376.0, "37200": 1116261376.0, "37205": 1116261376.0, "37210": 1116261376.0, "37215": 1116261376.0, "37220": 1116261376.0, "37225": 1116261376.0, "37230": 1116261376.0, "37235": 1116261376.0, "37240": 1116261376.0, "37245": 1116261376.0, "37250": 1116261376.0, "37255": 1116261376.0, "37260": 1116261376.0, "37265": 1116261376.0, "37270": 1116261376.0, "37275": 1116261376.0, "37280": 1116261376.0, "37285": 1116261376.0, "37290": 1116261376.0, "37295": 1116261376.0, "37300": 1116261376.0, "37305": 1116261376.0, "37310": 1116261376.0, "37315": 1116261376.0, "37320": 1116261376.0, "37325": 1116261376.0, "37330": 1116261376.0, "37335": 1116261376.0, "37340": 1116261376.0, "37345": 1116261376.0, "37350": 1116261376.0, "37355": 1116261376.0, "37360": 1116261376.0, "37365": 1116261376.0, "37370": 1116261376.0, "37375": 1116261376.0, "37380": 1116261376.0, "37385": 1116261376.0, "37390": 1116261376.0, "37395": 1116261376.0, "37400": 1116261376.0, "37405": 1116261376.0, "37410": 1116261376.0, "37415": 1116261376.0, "37420": 1116261376.0, "37425": 1116261376.0, "37430": 1116261376.0, "37435": 1116261376.0, "37440": 1116261376.0, "37445": 1116261376.0, "37450": 1116261376.0, "37455": 1116261376.0, "37460": 1116261376.0, "37465": 1116261376.0, "37470": 1116261376.0, "37475": 1116261376.0, "37480": 1116261376.0, "37485": 1116261376.0, "37490": 1116261376.0, "37495": 1116261376.0, "37500": 1116261376.0, "37505": 1116261376.0, "37510": 1116261376.0, "37515": 1116261376.0, "37520": 1116261376.0, "37525": 1116261376.0, "37530": 1116261376.0, "37535": 1116261376.0, "37540": 1116261376.0, "37545": 1116261376.0, "37550": 1116261376.0, "37555": 1116261376.0, "37560": 1116261376.0, "37565": 1116261376.0, "37570": 1116261376.0, "37575": 1116261376.0, "37580": 1116261376.0, "37585": 1116261376.0, "37590": 1116261376.0, "37595": 1116261376.0, "37600": 1116261376.0, "37605": 1116261376.0, "37610": 1116261376.0, "37615": 1116261376.0, "37620": 1116261376.0, "37625": 1116261376.0, "37630": 1116261376.0, "37635": 1116261376.0, "37640": 1116261376.0, "37645": 1116261376.0, "37650": 1116261376.0, "37655": 1116261376.0, "37660": 1116261376.0, "37665": 1116261376.0, "37670": 1116261376.0, "37675": 1116261376.0, "37680": 1116261376.0, "37685": 1116261376.0, "37690": 1116261376.0, "37695": 1116261376.0, "37700": 1116261376.0, "37705": 1116261376.0, "37710": 1116261376.0, "37715": 1116261376.0, "37720": 1116261376.0, "37725": 1116261376.0, "37730": 1116261376.0, "37735": 1116261376.0, "37740": 1116261376.0, "37745": 1116261376.0, "37750": 1116261376.0, "37755": 1116261376.0, "37760": 1116261376.0, "37765": 1116261376.0, "37770": 1116261376.0, "37775": 1116261376.0, "37780": 1116261376.0, "37785": 1116261376.0, "37790": 1116261376.0, "37795": 1116261376.0, "37800": 1116261376.0, "37805": 1116261376.0, "37810": 1116261376.0, "37815": 1116261376.0, "37820": 1116261376.0, "37825": 1116261376.0, "37830": 1116261376.0, "37835": 1116261376.0, "37840": 1116261376.0, "37845": 1116261376.0, "37850": 1116261376.0, "37855": 1116261376.0, "37860": 1116261376.0, "37865": 1116261376.0, "37870": 1116261376.0, "37875": 1116261376.0, "37880": 1116261376.0, "37885": 1116261376.0, "37890": 1116261376.0, "37895": 1116261376.0, "37900": 1116261376.0, "37905": 1116261376.0, "37910": 1116261376.0, "37915": 1116261376.0, "37920": 1116261376.0, "37925": 1116261376.0, "37930": 1116261376.0, "37935": 1116261376.0, "37940": 1116261376.0, "37945": 1116261376.0, "37950": 1116261376.0, "37955": 1116261376.0, "37960": 1116261376.0, "37965": 1116261376.0, "37970": 1116261376.0, "37975": 1116261376.0, "37980": 1116261376.0, "37985": 1116261376.0, "37990": 1116261376.0, "37995": 1116261376.0, "38000": 1116261376.0, "38005": 1116261376.0, "38010": 1116261376.0, "38015": 1116261376.0, "38020": 1116261376.0, "38025": 1116261376.0, "38030": 1116261376.0, "38035": 1116261376.0, "38040": 1116261376.0, "38045": 1116261376.0, "38050": 1116261376.0, "38055": 1116261376.0, "38060": 1116261376.0, "38065": 1116261376.0, "38070": 1116261376.0, "38075": 1116261376.0, "38080": 1116261376.0, "38085": 1116261376.0, "38090": 1116261376.0, "38095": 1116261376.0, "38100": 1116261376.0, "38105": 1116261376.0, "38110": 1116261376.0, "38115": 1116261376.0, "38120": 1116261376.0, "38125": 1116261376.0, "38130": 1116261376.0, "38135": 1116261376.0, "38140": 1116261376.0, "38145": 1116261376.0, "38150": 1116261376.0, "38155": 1116261376.0, "38160": 1116261376.0, "38165": 1116261376.0, "38170": 1116261376.0, "38175": 1116261376.0, "38180": 1116261376.0, "38185": 1116261376.0, "38190": 1116261376.0, "38195": 1116261376.0, "38200": 1116261376.0, "38205": 1116261376.0, "38210": 1116261376.0, "38215": 1116261376.0, "38220": 1116261376.0, "38225": 1116261376.0, "38230": 1116261376.0, "38235": 1116261376.0, "38240": 1116261376.0, "38245": 1116261376.0, "38250": 1116261376.0, "38255": 1116261376.0, "38260": 1116261376.0, "38265": 1116261376.0, "38270": 1116261376.0, "38275": 1116261376.0, "38280": 1116261376.0, "38285": 1116261376.0, "38290": 1116261376.0, "38295": 1116261376.0, "38300": 1116261376.0, "38305": 1116261376.0, "38310": 1116261376.0, "38315": 1116261376.0, "38320": 1116261376.0, "38325": 1116261376.0, "38330": 1116261376.0, "38335": 1116261376.0, "38340": 1116261376.0, "38345": 1116261376.0, "38350": 1116261376.0, "38355": 1116261376.0, "38360": 1116261376.0, "38365": 1116261376.0, "38370": 1116261376.0, "38375": 1116261376.0, "38380": 1116261376.0, "38385": 1116261376.0, "38390": 1116261376.0, "38395": 1116261376.0, "38400": 1116261376.0, "38405": 1116261376.0, "38410": 1116261376.0, "38415": 1116261376.0, "38420": 1116261376.0, "38425": 1116261376.0, "38430": 1116261376.0, "38435": 1116261376.0, "38440": 1116261376.0, "38445": 1116261376.0, "38450": 1116261376.0, "38455": 1116261376.0, "38460": 1116261376.0, "38465": 1116261376.0, "38470": 1116261376.0, "38475": 1116261376.0, "38480": 1116261376.0, "38485": 1116261376.0, "38490": 1116261376.0, "38495": 1116261376.0, "38500": 1116261376.0, "38505": 1116261376.0, "38510": 1116261376.0, "38515": 1116261376.0, "38520": 1116261376.0, "38525": 1116261376.0, "38530": 1116261376.0, "38535": 1116261376.0, "38540": 1116261376.0, "38545": 1116261376.0, "38550": 1116261376.0, "38555": 1116261376.0, "38560": 1116261376.0, "38565": 1116261376.0, "38570": 1116261376.0, "38575": 1116261376.0, "38580": 1116261376.0, "38585": 1116261376.0, "38590": 1116261376.0, "38595": 1116261376.0, "38600": 1116261376.0, "38605": 1116261376.0, "38610": 1116261376.0, "38615": 1116261376.0, "38620": 1116261376.0, "38625": 1116261376.0, "38630": 1116261376.0, "38635": 1116261376.0, "38640": 1116261376.0, "38645": 1116261376.0, "38650": 1116261376.0, "38655": 1116261376.0, "38660": 1116261376.0, "38665": 1116261376.0, "38670": 1116261376.0, "38675": 1116261376.0, "38680": 1116261376.0, "38685": 1116261376.0, "38690": 1116261376.0, "38695": 1116261376.0, "38700": 1116261376.0, "38705": 1116261376.0, "38710": 1116261376.0, "38715": 1116261376.0, "38720": 1116261376.0, "38725": 1116261376.0, "38730": 1116261376.0, "38735": 1116261376.0, "38740": 1116261376.0, "38745": 1116261376.0, "38750": 1116261376.0, "38755": 1116261376.0, "38760": 1116261376.0, "38765": 1116261376.0, "38770": 1116261376.0, "38775": 1116261376.0, "38780": 1116261376.0, "38785": 1116261376.0, "38790": 1116261376.0, "38795": 1116261376.0, "38800": 1116261376.0, "38805": 1116261376.0, "38810": 1116261376.0, "38815": 1116261376.0, "38820": 1116261376.0, "38825": 1116261376.0, "38830": 1116261376.0, "38835": 1116261376.0, "38840": 1116261376.0, "38845": 1116261376.0, "38850": 1116261376.0, "38855": 1116261376.0, "38860": 1116261376.0, "38865": 1116261376.0, "38870": 1116261376.0, "38875": 1116261376.0, "38880": 1116261376.0, "38885": 1116261376.0, "38890": 1116261376.0, "38895": 1116261376.0, "38900": 1116261376.0, "38905": 1116261376.0, "38910": 1116261376.0, "38915": 1116261376.0, "38920": 1116261376.0, "38925": 1116261376.0, "38930": 1116261376.0, "38935": 1116261376.0, "38940": 1116261376.0, "38945": 1116261376.0, "38950": 1116261376.0, "38955": 1116261376.0, "38960": 1116261376.0, "38965": 1116261376.0, "38970": 1116261376.0, "38975": 1116261376.0, "38980": 1116261376.0, "38985": 1116261376.0, "38990": 1116261376.0, "38995": 1116261376.0, "39000": 1116261376.0, "39005": 1116261376.0, "39010": 1116261376.0, "39015": 1116261376.0, "39020": 1116261376.0, "39025": 1116261376.0, "39030": 1116261376.0, "39035": 1116261376.0, "39040": 1116261376.0, "39045": 1116261376.0, "39050": 1116261376.0, "39055": 1116261376.0, "39060": 1116261376.0, "39065": 1116261376.0, "39070": 1116261376.0, "39075": 1116261376.0, "39080": 1116261376.0, "39085": 1116261376.0, "39090": 1116261376.0, "39095": 1116261376.0, "39100": 1116261376.0, "39105": 1116261376.0, "39110": 1116261376.0, "39115": 1116261376.0, "39120": 1116261376.0, "39125": 1116261376.0, "39130": 1116261376.0, "39135": 1116261376.0, "39140": 1116261376.0, "39145": 1116261376.0, "39150": 1116261376.0, "39155": 1116261376.0, "39160": 1116261376.0, "39165": 1116261376.0, "39170": 1116261376.0, "39175": 1116261376.0, "39180": 1116261376.0, "39185": 1116261376.0, "39190": 1116261376.0, "39195": 1116261376.0, "39200": 1116261376.0, "39205": 1116261376.0, "39210": 1116261376.0, "39215": 1116261376.0, "39220": 1116261376.0, "39225": 1116261376.0, "39230": 1116261376.0, "39235": 1116261376.0, "39240": 1116261376.0, "39245": 1116261376.0, "39250": 1116261376.0, "39255": 1116261376.0, "39260": 1116261376.0, "39265": 1116261376.0, "39270": 1116261376.0, "39275": 1116261376.0, "39280": 1116261376.0, "39285": 1116261376.0, "39290": 1116261376.0, "39295": 1116261376.0, "39300": 1116261376.0, "39305": 1116261376.0, "39310": 1116261376.0, "39315": 1116261376.0, "39320": 1116261376.0, "39325": 1116261376.0, "39330": 1116261376.0, "39335": 1116261376.0, "39340": 1116261376.0, "39345": 1116261376.0, "39350": 1116261376.0, "39355": 1116261376.0, "39360": 1116261376.0, "39365": 1116261376.0, "39370": 1116261376.0, "39375": 1116261376.0, "39380": 1116261376.0, "39385": 1116261376.0, "39390": 1116261376.0, "39395": 1116261376.0, "39400": 1116261376.0, "39405": 1116261376.0, "39410": 1116261376.0, "39415": 1116261376.0, "39420": 1116261376.0, "39425": 1116261376.0, "39430": 1116261376.0, "39435": 1116261376.0, "39440": 1116261376.0, "39445": 1116261376.0, "39450": 1116261376.0, "39455": 1116261376.0, "39460": 1116261376.0, "39465": 1116261376.0, "39470": 1116261376.0, "39475": 1116261376.0, "39480": 1116261376.0, "39485": 1116261376.0, "39490": 1116261376.0, "39495": 1116261376.0, "39500": 1116261376.0, "39505": 1116261376.0, "39510": 1116261376.0, "39515": 1116261376.0, "39520": 1116261376.0, "39525": 1116261376.0, "39530": 1116261376.0, "39535": 1116261376.0, "39540": 1116261376.0, "39545": 1116261376.0, "39550": 1116261376.0, "39555": 1116261376.0, "39560": 1116261376.0, "39565": 1116261376.0, "39570": 1116261376.0, "39575": 1116261376.0, "39580": 1116261376.0, "39585": 1116261376.0, "39590": 1116261376.0, "39595": 1116261376.0, "39600": 1116261376.0, "39605": 1116261376.0, "39610": 1116261376.0, "39615": 1116261376.0, "39620": 1116261376.0, "39625": 1116261376.0, "39630": 1116261376.0, "39635": 1116261376.0, "39640": 1116261376.0, "39645": 1116261376.0, "39650": 1116261376.0, "39655": 1116261376.0, "39660": 1116261376.0, "39665": 1116261376.0, "39670": 1116261376.0, "39675": 1116261376.0, "39680": 1116261376.0, "39685": 1116261376.0, "39690": 1116261376.0, "39695": 1116261376.0, "39700": 1116261376.0, "39705": 1116261376.0, "39710": 1116261376.0, "39715": 1116261376.0, "39720": 1116261376.0, "39725": 1116261376.0, "39730": 1116261376.0, "39735": 1116261376.0, "39740": 1116261376.0, "39745": 1116261376.0, "39750": 1116261376.0, "39755": 1116261376.0, "39760": 1116261376.0, "39765": 1116261376.0, "39770": 1116261376.0, "39775": 1116261376.0, "39780": 1116261376.0, "39785": 1116261376.0, "39790": 1116261376.0, "39795": 1116261376.0, "39800": 1116261376.0, "39805": 1116261376.0, "39810": 1116261376.0, "39815": 1116261376.0, "39820": 1116261376.0, "39825": 1116261376.0, "39830": 1116261376.0, "39835": 1116261376.0, "39840": 1116261376.0, "39845": 1116261376.0, "39850": 1116261376.0, "39855": 1116261376.0, "39860": 1116261376.0, "39865": 1116261376.0, "39870": 1116261376.0, "39875": 1116261376.0, "39880": 1116261376.0, "39885": 1116261376.0, "39890": 1116261376.0, "39895": 1116261376.0, "39900": 1116261376.0, "39905": 1116261376.0, "39910": 1116261376.0, "39915": 1116261376.0, "39920": 1116261376.0, "39925": 1116261376.0, "39930": 1116261376.0, "39935": 1116261376.0, "39940": 1116261376.0, "39945": 1116261376.0, "39950": 1116261376.0, "39955": 1116261376.0, "39960": 1116261376.0, "39965": 1116261376.0, "39970": 1116261376.0, "39975": 1116261376.0, "39980": 1116261376.0, "39985": 1116261376.0, "39990": 1116261376.0, "39995": 1116261376.0, "40000": 1116261376.0, "40005": 1116261376.0, "40010": 1116261376.0, "40015": 1116261376.0, "40020": 1116261376.0, "40025": 1116261376.0, "40030": 1116261376.0, "40035": 1116261376.0, "40040": 1116261376.0, "40045": 1116261376.0, "40050": 1116261376.0, "40055": 1116261376.0, "40060": 1116261376.0, "40065": 1116261376.0, "40070": 1116261376.0, "40075": 1116261376.0, "40080": 1116261376.0, "40085": 1116261376.0, "40090": 1116261376.0, "40095": 1116261376.0, "40100": 1116261376.0, "40105": 1116261376.0, "40110": 1116261376.0, "40115": 1116261376.0, "40120": 1116261376.0, "40125": 1116261376.0, "40130": 1116261376.0, "40135": 1116261376.0, "40140": 1116261376.0, "40145": 1116261376.0, "40150": 1116261376.0, "40155": 1116261376.0, "40160": 1116261376.0, "40165": 1116261376.0, "40170": 1116261376.0, "40175": 1116261376.0, "40180": 1116261376.0, "40185": 1116261376.0, "40190": 1116261376.0, "40195": 1116261376.0, "40200": 1116261376.0, "40205": 1116261376.0, "40210": 1116261376.0, "40215": 1116261376.0, "40220": 1116261376.0, "40225": 1116261376.0, "40230": 1116261376.0, "40235": 1116261376.0, "40240": 1116261376.0, "40245": 1116261376.0, "40250": 1116261376.0, "40255": 1116261376.0, "40260": 1116261376.0, "40265": 1116261376.0, "40270": 1116261376.0, "40275": 1116261376.0, "40280": 1116261376.0, "40285": 1116261376.0, "40290": 1116261376.0, "40295": 1116261376.0, "40300": 1116261376.0, "40305": 1116261376.0, "40310": 1116261376.0, "40315": 1116261376.0, "40320": 1116261376.0, "40325": 1116261376.0, "40330": 1116261376.0, "40335": 1116261376.0, "40340": 1116261376.0, "40345": 1116261376.0, "40350": 1116261376.0, "40355": 1116261376.0, "40360": 1116261376.0, "40365": 1116261376.0, "40370": 1116261376.0, "40375": 1116261376.0, "40380": 1116261376.0, "40385": 1116261376.0, "40390": 1116261376.0, "40395": 1116261376.0, "40400": 1116261376.0, "40405": 1116261376.0, "40410": 1116261376.0, "40415": 1116261376.0, "40420": 1116261376.0, "40425": 1116261376.0, "40430": 1116261376.0, "40435": 1116261376.0, "40440": 1116261376.0, "40445": 1116261376.0, "40450": 1116261376.0, "40455": 1116261376.0, "40460": 1116261376.0, "40465": 1116261376.0, "40470": 1116261376.0, "40475": 1116261376.0, "40480": 1116261376.0, "40485": 1116261376.0, "40490": 1116261376.0, "40495": 1116261376.0, "40500": 1116261376.0, "40505": 1116261376.0, "40510": 1116261376.0, "40515": 1116261376.0, "40520": 1116261376.0, "40525": 1116261376.0, "40530": 1116261376.0, "40535": 1116261376.0, "40540": 1116261376.0, "40545": 1116261376.0, "40550": 1116261376.0, "40555": 1116261376.0, "40560": 1116261376.0, "40565": 1116261376.0, "40570": 1116261376.0, "40575": 1116261376.0, "40580": 1116261376.0, "40585": 1116261376.0, "40590": 1116261376.0, "40595": 1116261376.0, "40600": 1116261376.0, "40605": 1116261376.0, "40610": 1116261376.0, "40615": 1116261376.0, "40620": 1116261376.0, "40625": 1116261376.0, "40630": 1116261376.0, "40635": 1116261376.0, "40640": 1116261376.0, "40645": 1116261376.0, "40650": 1116261376.0, "40655": 1116261376.0, "40660": 1116261376.0, "40665": 1116261376.0, "40670": 1116261376.0, "40675": 1116261376.0, "40680": 1116261376.0, "40685": 1116261376.0, "40690": 1116261376.0, "40695": 1116261376.0, "40700": 1116261376.0, "40705": 1116261376.0, "40710": 1116261376.0, "40715": 1116261376.0, "40720": 1116261376.0, "40725": 1116261376.0, "40730": 1116261376.0, "40735": 1116261376.0, "40740": 1116261376.0, "40745": 1116261376.0, "40750": 1116261376.0, "40755": 1116261376.0, "40760": 1116261376.0, "40765": 1116261376.0, "40770": 1116261376.0, "40775": 1116261376.0, "40780": 1116261376.0, "40785": 1116261376.0, "40790": 1116261376.0, "40795": 1116261376.0, "40800": 1116261376.0, "40805": 1116261376.0, "40810": 1116261376.0, "40815": 1116261376.0, "40820": 1116261376.0, "40825": 1116261376.0, "40830": 1116261376.0, "40835": 1116261376.0, "40840": 1116261376.0, "40845": 1116261376.0, "40850": 1116261376.0, "40855": 1116261376.0, "40860": 1116261376.0, "40865": 1116261376.0, "40870": 1116261376.0, "40875": 1116261376.0, "40880": 1116261376.0, "40885": 1116261376.0, "40890": 1116261376.0, "40895": 1116261376.0, "40900": 1116261376.0, "40905": 1116261376.0, "40910": 1116261376.0, "40915": 1116261376.0, "40920": 1116261376.0, "40925": 1116261376.0, "40930": 1116261376.0, "40935": 1116261376.0, "40940": 1116261376.0, "40945": 1116261376.0, "40950": 1116261376.0, "40955": 1116261376.0, "40960": 1116261376.0, "40965": 1116261376.0, "40970": 1116261376.0, "40975": 1116261376.0, "40980": 1116261376.0, "40985": 1116261376.0, "40990": 1116261376.0, "40995": 1116261376.0, "41000": 1116261376.0, "41005": 1116261376.0, "41010": 1116261376.0, "41015": 1116261376.0, "41020": 1116261376.0, "41025": 1116261376.0, "41030": 1116261376.0, "41035": 1116261376.0, "41040": 1116261376.0, "41045": 1116261376.0, "41050": 1116261376.0, "41055": 1116261376.0, "41060": 1116261376.0, "41065": 1116261376.0, "41070": 1116261376.0, "41075": 1116261376.0, "41080": 1116261376.0, "41085": 1116261376.0, "41090": 1116261376.0, "41095": 1116261376.0, "41100": 1116261376.0, "41105": 1116261376.0, "41110": 1116261376.0, "41115": 1116261376.0, "41120": 1116261376.0, "41125": 1116261376.0, "41130": 1116261376.0, "41135": 1116261376.0, "41140": 1116261376.0, "41145": 1116261376.0, "41150": 1116261376.0, "41155": 1116261376.0, "41160": 1116261376.0, "41165": 1116261376.0, "41170": 1116261376.0, "41175": 1116261376.0, "41180": 1116261376.0, "41185": 1116261376.0, "41190": 1116261376.0, "41195": 1116261376.0, "41200": 1116261376.0, "41205": 1116261376.0, "41210": 1116261376.0, "41215": 1116261376.0, "41220": 1116261376.0, "41225": 1116261376.0, "41230": 1116261376.0, "41235": 1116261376.0, "41240": 1116261376.0, "41245": 1116261376.0, "41250": 1116261376.0, "41255": 1116261376.0, "41260": 1116261376.0, "41265": 1116261376.0, "41270": 1116261376.0, "41275": 1116261376.0, "41280": 1116261376.0, "41285": 1116261376.0, "41290": 1116261376.0, "41295": 1116261376.0, "41300": 1116261376.0, "41305": 1116261376.0, "41310": 1116261376.0, "41315": 1116261376.0, "41320": 1116261376.0, "41325": 1116261376.0, "41330": 1116261376.0, "41335": 1116261376.0, "41340": 1116261376.0, "41345": 1116261376.0, "41350": 1116261376.0, "41355": 1116261376.0, "41360": 1116261376.0, "41365": 1116261376.0, "41370": 1116261376.0, "41375": 1116261376.0, "41380": 1116261376.0, "41385": 1116261376.0, "41390": 1116261376.0, "41395": 1116261376.0, "41400": 1116261376.0, "41405": 1116261376.0, "41410": 1116261376.0, "41415": 1116261376.0, "41420": 1116261376.0, "41425": 1116261376.0, "41430": 1116261376.0, "41435": 1116261376.0, "41440": 1116261376.0, "41445": 1116261376.0, "41450": 1116261376.0, "41455": 1116261376.0, "41460": 1116261376.0, "41465": 1116261376.0, "41470": 1116261376.0, "41475": 1116261376.0, "41480": 1116261376.0, "41485": 1116261376.0, "41490": 1116261376.0, "41495": 1116261376.0, "41500": 1116261376.0, "41505": 1116261376.0, "41510": 1116261376.0, "41515": 1116261376.0, "41520": 1116261376.0, "41525": 1116261376.0, "41530": 1116261376.0, "41535": 1116261376.0, "41540": 1116261376.0, "41545": 1116261376.0, "41550": 1116261376.0, "41555": 1116261376.0, "41560": 1116261376.0, "41565": 1116261376.0, "41570": 1116261376.0, "41575": 1116261376.0, "41580": 1116261376.0, "41585": 1116261376.0, "41590": 1116261376.0, "41595": 1116261376.0, "41600": 1116261376.0, "41605": 1116261376.0, "41610": 1116261376.0, "41615": 1116261376.0, "41620": 1116261376.0, "41625": 1116261376.0, "41630": 1116261376.0, "41635": 1116261376.0, "41640": 1116261376.0, "41645": 1116261376.0, "41650": 1116261376.0, "41655": 1116261376.0, "41660": 1116261376.0, "41665": 1116261376.0, "41670": 1116261376.0, "41675": 1116261376.0, "41680": 1116261376.0, "41685": 1116261376.0, "41690": 1116261376.0, "41695": 1116261376.0, "41700": 1116261376.0, "41705": 1116261376.0, "41710": 1116261376.0, "41715": 1116261376.0, "41720": 1116261376.0, "41725": 1116261376.0, "41730": 1116261376.0, "41735": 1116261376.0, "41740": 1116261376.0, "41745": 1116261376.0, "41750": 1116261376.0, "41755": 1116261376.0, "41760": 1116261376.0, "41765": 1116261376.0, "41770": 1116261376.0, "41775": 1116261376.0, "41780": 1116261376.0, "41785": 1116261376.0, "41790": 1116261376.0, "41795": 1116261376.0, "41800": 1116261376.0, "41805": 1116261376.0, "41810": 1116261376.0, "41815": 1116261376.0, "41820": 1116261376.0, "41825": 1116261376.0, "41830": 1116261376.0, "41835": 1116261376.0, "41840": 1116261376.0, "41845": 1116261376.0, "41850": 1116261376.0, "41855": 1116261376.0, "41860": 1116261376.0, "41865": 1116261376.0, "41870": 1116261376.0, "41875": 1116261376.0, "41880": 1116261376.0, "41885": 1116261376.0, "41890": 1116261376.0, "41895": 1116261376.0, "41900": 1116261376.0, "41905": 1116261376.0, "41910": 1116261376.0, "41915": 1116261376.0, "41920": 1116261376.0, "41925": 1116261376.0, "41930": 1116261376.0, "41935": 1116261376.0, "41940": 1116261376.0, "41945": 1116261376.0, "41950": 1116261376.0, "41955": 1116261376.0, "41960": 1116261376.0, "41965": 1116261376.0, "41970": 1116261376.0, "41975": 1116261376.0, "41980": 1116261376.0, "41985": 1116261376.0, "41990": 1116261376.0, "41995": 1116261376.0, "42000": 1116261376.0, "42005": 1116261376.0, "42010": 1116261376.0, "42015": 1116261376.0, "42020": 1116261376.0, "42025": 1116261376.0, "42030": 1116261376.0, "42035": 1116261376.0, "42040": 1116261376.0, "42045": 1116261376.0, "42050": 1116261376.0, "42055": 1116261376.0, "42060": 1116261376.0, "42065": 1116261376.0, "42070": 1116261376.0, "42075": 1116261376.0, "42080": 1116261376.0, "42085": 1116261376.0, "42090": 1116261376.0, "42095": 1116261376.0, "42100": 1116261376.0, "42105": 1116261376.0, "42110": 1116261376.0, "42115": 1116261376.0, "42120": 1116261376.0, "42125": 1116261376.0, "42130": 1116261376.0, "42135": 1116261376.0, "42140": 1116261376.0, "42145": 1116261376.0, "42150": 1116261376.0, "42155": 1116261376.0, "42160": 1116261376.0, "42165": 1116261376.0, "42170": 1116261376.0, "42175": 1116261376.0, "42180": 1116261376.0, "42185": 1116261376.0, "42190": 1116261376.0, "42195": 1116261376.0, "42200": 1116261376.0, "42205": 1116261376.0, "42210": 1116261376.0, "42215": 1116261376.0, "42220": 1116261376.0, "42225": 1116261376.0, "42230": 1116261376.0, "42235": 1116261376.0, "42240": 1116261376.0, "42245": 1116261376.0, "42250": 1116261376.0, "42255": 1116261376.0, "42260": 1116261376.0, "42265": 1116261376.0, "42270": 1116261376.0, "42275": 1116261376.0, "42280": 1116261376.0, "42285": 1116261376.0, "42290": 1116261376.0, "42295": 1116261376.0, "42300": 1116261376.0, "42305": 1116261376.0, "42310": 1116261376.0, "42315": 1116261376.0, "42320": 1116261376.0, "42325": 1116261376.0, "42330": 1116261376.0, "42335": 1116261376.0, "42340": 1116261376.0, "42345": 1116261376.0, "42350": 1116261376.0, "42355": 1116261376.0, "42360": 1116261376.0, "42365": 1116261376.0, "42370": 1116261376.0, "42375": 1116261376.0, "42380": 1116261376.0, "42385": 1116261376.0, "42390": 1116261376.0, "42395": 1116261376.0, "42400": 1116261376.0, "42405": 1116261376.0, "42410": 1116261376.0, "42415": 1116261376.0, "42420": 1116261376.0, "42425": 1116261376.0, "42430": 1116261376.0, "42435": 1116261376.0, "42440": 1116261376.0, "42445": 1116261376.0, "42450": 1116261376.0, "42455": 1116261376.0, "42460": 1116261376.0, "42465": 1116261376.0, "42470": 1116261376.0, "42475": 1116261376.0, "42480": 1116261376.0, "42485": 1116261376.0, "42490": 1116261376.0, "42495": 1116261376.0, "42500": 1116261376.0, "42505": 1116261376.0, "42510": 1116261376.0, "42515": 1116261376.0, "42520": 1116261376.0, "42525": 1116261376.0, "42530": 1116261376.0, "42535": 1116261376.0, "42540": 1116261376.0, "42545": 1116261376.0, "42550": 1116261376.0, "42555": 1116261376.0, "42560": 1116261376.0, "42565": 1116261376.0, "42570": 1116261376.0, "42575": 1116261376.0, "42580": 1116261376.0, "42585": 1116261376.0, "42590": 1116261376.0, "42595": 1116261376.0, "42600": 1116261376.0, "42605": 1116261376.0, "42610": 1116261376.0, "42615": 1116261376.0, "42620": 1116261376.0, "42625": 1116261376.0, "42630": 1116261376.0, "42635": 1116261376.0, "42640": 1116261376.0, "42645": 1116261376.0, "42650": 1116261376.0, "42655": 1116261376.0, "42660": 1116261376.0, "42665": 1116261376.0, "42670": 1116261376.0, "42675": 1116261376.0, "42680": 1116261376.0, "42685": 1116261376.0, "42690": 1116261376.0, "42695": 1116261376.0, "42700": 1116261376.0, "42705": 1116261376.0, "42710": 1116261376.0, "42715": 1116261376.0, "42720": 1116261376.0, "42725": 1116261376.0, "42730": 1116261376.0, "42735": 1116261376.0, "42740": 1116261376.0, "42745": 1116261376.0, "42750": 1116261376.0, "42755": 1116261376.0, "42760": 1116261376.0, "42765": 1116261376.0, "42770": 1116261376.0, "42775": 1116261376.0, "42780": 1116261376.0, "42785": 1116261376.0, "42790": 1116261376.0, "42795": 1116261376.0, "42800": 1116261376.0, "42805": 1116261376.0, "42810": 1116261376.0, "42815": 1116261376.0, "42820": 1116261376.0, "42825": 1116261376.0, "42830": 1116261376.0, "42835": 1116261376.0, "42840": 1116261376.0, "42845": 1116261376.0, "42850": 1116261376.0, "42855": 1116261376.0, "42860": 1116261376.0, "42865": 1116261376.0, "42870": 1116261376.0, "42875": 1116261376.0, "42880": 1116261376.0, "42885": 1116261376.0, "42890": 1116261376.0, "42895": 1116261376.0, "42900": 1116261376.0, "42905": 1116261376.0, "42910": 1116261376.0, "42915": 1116261376.0, "42920": 1116261376.0, "42925": 1116261376.0, "42930": 1116261376.0, "42935": 1116261376.0, "42940": 1116261376.0, "42945": 1116261376.0, "42950": 1116261376.0, "42955": 1116261376.0, "42960": 1116261376.0, "42965": 1116261376.0, "42970": 1116261376.0, "42975": 1116261376.0, "42980": 1116261376.0, "42985": 1116261376.0, "42990": 1116261376.0, "42995": 1116261376.0, "43000": 1116261376.0, "43005": 1116261376.0, "43010": 1116261376.0, "43015": 1116261376.0, "43020": 1116261376.0, "43025": 1116261376.0, "43030": 1116261376.0, "43035": 1116261376.0, "43040": 1116261376.0, "43045": 1116261376.0, "43050": 1116261376.0, "43055": 1116261376.0, "43060": 1116261376.0, "43065": 1116261376.0, "43070": 1116261376.0, "43075": 1116261376.0, "43080": 1116261376.0, "43085": 1116261376.0, "43090": 1116261376.0, "43095": 1116261376.0, "43100": 1116261376.0, "43105": 1116261376.0, "43110": 1116261376.0, "43115": 1116261376.0, "43120": 1116261376.0, "43125": 1116261376.0, "43130": 1116261376.0, "43135": 1116261376.0, "43140": 1116261376.0, "43145": 1116261376.0, "43150": 1116261376.0, "43155": 1116261376.0, "43160": 1116261376.0, "43165": 1116261376.0, "43170": 1116261376.0, "43175": 1116261376.0, "43180": 1116261376.0, "43185": 1116261376.0, "43190": 1116261376.0, "43195": 1116261376.0, "43200": 1116261376.0, "43205": 1116261376.0, "43210": 1116261376.0, "43215": 1116261376.0, "43220": 1116261376.0, "43225": 1116261376.0, "43230": 1116261376.0, "43235": 1116261376.0, "43240": 1116261376.0, "43245": 1116261376.0, "43250": 1116261376.0, "43255": 1116261376.0, "43260": 1116261376.0, "43265": 1116261376.0, "43270": 1116261376.0, "43275": 1116261376.0, "43280": 1116261376.0, "43285": 1116261376.0, "43290": 1116261376.0, "43295": 1116261376.0, "43300": 1116261376.0, "43305": 1116261376.0, "43310": 1116261376.0, "43315": 1116261376.0, "43320": 1116261376.0, "43325": 1116261376.0, "43330": 1116261376.0, "43335": 1116261376.0, "43340": 1116261376.0, "43345": 1116261376.0, "43350": 1116261376.0, "43355": 1116261376.0, "43360": 1116261376.0, "43365": 1116261376.0, "43370": 1116261376.0, "43375": 1116261376.0, "43380": 1116261376.0, "43385": 1116261376.0, "43390": 1116261376.0, "43395": 1116261376.0, "43400": 1116261376.0, "43405": 1116261376.0, "43410": 1116261376.0, "43415": 1116261376.0, "43420": 1116261376.0, "43425": 1116261376.0, "43430": 1116261376.0, "43435": 1116261376.0, "43440": 1116261376.0, "43445": 1116261376.0, "43450": 1116261376.0, "43455": 1116261376.0, "43460": 1116261376.0, "43465": 1116261376.0, "43470": 1116261376.0, "43475": 1116261376.0, "43480": 1116261376.0, "43485": 1116261376.0, "43490": 1116261376.0, "43495": 1116261376.0, "43500": 1116261376.0, "43505": 1116261376.0, "43510": 1116261376.0, "43515": 1116261376.0, "43520": 1116261376.0, "43525": 1116261376.0, "43530": 1116261376.0, "43535": 1116261376.0, "43540": 1116261376.0, "43545": 1116261376.0, "43550": 1116261376.0, "43555": 1116261376.0, "43560": 1116261376.0, "43565": 1116261376.0, "43570": 1116261376.0, "43575": 1116261376.0, "43580": 1116261376.0, "43585": 1116261376.0, "43590": 1116261376.0, "43595": 1116261376.0, "43600": 1116261376.0, "43605": 1116261376.0, "43610": 1116261376.0, "43615": 1116261376.0, "43620": 1116261376.0, "43625": 1116261376.0, "43630": 1116261376.0, "43635": 1116261376.0, "43640": 1116261376.0, "43645": 1116261376.0, "43650": 1116261376.0, "43655": 1116261376.0, "43660": 1116261376.0, "43665": 1116261376.0, "43670": 1116261376.0, "43675": 1116261376.0, "43680": 1116261376.0, "43685": 1116261376.0, "43690": 1116261376.0, "43695": 1116261376.0, "43700": 1116261376.0, "43705": 1116261376.0, "43710": 1116261376.0, "43715": 1116261376.0, "43720": 1116261376.0, "43725": 1116261376.0, "43730": 1116261376.0, "43735": 1116261376.0, "43740": 1116261376.0, "43745": 1116261376.0, "43750": 1116261376.0, "43755": 1116261376.0, "43760": 1116261376.0, "43765": 1116261376.0, "43770": 1116261376.0, "43775": 1116261376.0, "43780": 1116261376.0, "43785": 1116261376.0, "43790": 1116261376.0, "43795": 1116261376.0, "43800": 1116261376.0, "43805": 1116261376.0, "43810": 1116261376.0, "43815": 1116261376.0, "43820": 1116261376.0, "43825": 1116261376.0, "43830": 1116261376.0, "43835": 1116261376.0, "43840": 1116261376.0, "43845": 1116261376.0, "43850": 1116261376.0, "43855": 1116261376.0, "43860": 1116261376.0, "43865": 1116261376.0, "43870": 1116261376.0, "43875": 1116261376.0, "43880": 1116261376.0, "43885": 1116261376.0, "43890": 1116261376.0, "43895": 1116261376.0, "43900": 1116261376.0, "43905": 1116261376.0, "43910": 1116261376.0, "43915": 1116261376.0, "43920": 1116261376.0, "43925": 1116261376.0, "43930": 1116261376.0, "43935": 1116261376.0, "43940": 1116261376.0, "43945": 1116261376.0, "43950": 1116261376.0, "43955": 1116261376.0, "43960": 1116261376.0, "43965": 1116261376.0, "43970": 1116261376.0, "43975": 1116261376.0, "43980": 1116261376.0, "43985": 1116261376.0, "43990": 1116261376.0, "43995": 1116261376.0, "44000": 1116261376.0, "44005": 1116261376.0, "44010": 1116261376.0, "44015": 1116261376.0, "44020": 1116261376.0, "44025": 1116261376.0, "44030": 1116261376.0, "44035": 1116261376.0, "44040": 1116261376.0, "44045": 1116261376.0, "44050": 1116261376.0, "44055": 1116261376.0, "44060": 1116261376.0, "44065": 1116261376.0, "44070": 1116261376.0, "44075": 1116261376.0, "44080": 1116261376.0, "44085": 1116261376.0, "44090": 1116261376.0, "44095": 1116261376.0, "44100": 1116261376.0, "44105": 1116261376.0, "44110": 1116261376.0, "44115": 1116261376.0, "44120": 1116261376.0, "44125": 1116261376.0, "44130": 1116261376.0, "44135": 1116261376.0, "44140": 1116261376.0, "44145": 1116261376.0, "44150": 1116261376.0, "44155": 1116261376.0, "44160": 1116261376.0, "44165": 1116261376.0, "44170": 1116261376.0, "44175": 1116261376.0, "44180": 1116261376.0, "44185": 1116261376.0, "44190": 1116261376.0, "44195": 1116261376.0, "44200": 1116261376.0, "44205": 1116261376.0, "44210": 1116261376.0, "44215": 1116261376.0, "44220": 1116261376.0, "44225": 1116261376.0, "44230": 1116261376.0, "44235": 1116261376.0, "44240": 1116261376.0, "44245": 1116261376.0, "44250": 1116261376.0, "44255": 1116261376.0, "44260": 1116261376.0, "44265": 1116261376.0, "44270": 1116261376.0, "44275": 1116261376.0, "44280": 1116261376.0, "44285": 1116261376.0, "44290": 1116261376.0, "44295": 1116261376.0, "44300": 1116261376.0, "44305": 1116261376.0, "44310": 1116261376.0, "44315": 1116261376.0, "44320": 1116261376.0, "44325": 1116261376.0, "44330": 1116261376.0, "44335": 1116261376.0, "44340": 1116261376.0, "44345": 1116261376.0, "44350": 1116261376.0, "44355": 1116261376.0, "44360": 1116261376.0, "44365": 1116261376.0, "44370": 1116261376.0, "44375": 1116261376.0, "44380": 1116261376.0, "44385": 1116261376.0, "44390": 1116261376.0, "44395": 1116261376.0, "44400": 1116261376.0, "44405": 1116261376.0, "44410": 1116261376.0, "44415": 1116261376.0, "44420": 1116261376.0, "44425": 1116261376.0, "44430": 1116261376.0, "44435": 1116261376.0, "44440": 1116261376.0, "44445": 1116261376.0, "44450": 1116261376.0, "44455": 1116261376.0, "44460": 1116261376.0, "44465": 1116261376.0, "44470": 1116261376.0, "44475": 1116261376.0, "44480": 1116261376.0, "44485": 1116261376.0, "44490": 1116261376.0, "44495": 1116261376.0, "44500": 1116261376.0, "44505": 1116261376.0, "44510": 1116261376.0, "44515": 1116261376.0, "44520": 1116261376.0, "44525": 1116261376.0, "44530": 1116261376.0, "44535": 1116261376.0, "44540": 1116261376.0, "44545": 1116261376.0, "44550": 1116261376.0, "44555": 1116261376.0, "44560": 1116261376.0, "44565": 1116261376.0, "44570": 1116261376.0, "44575": 1116261376.0, "44580": 1116261376.0, "44585": 1116261376.0, "44590": 1116261376.0, "44595": 1116261376.0, "44600": 1116261376.0, "44605": 1116261376.0, "44610": 1116261376.0, "44615": 1116261376.0, "44620": 1116261376.0, "44625": 1116261376.0, "44630": 1116261376.0, "44635": 1116261376.0, "44640": 1116261376.0, "44645": 1116261376.0, "44650": 1116261376.0, "44655": 1116261376.0, "44660": 1116261376.0, "44665": 1116261376.0, "44670": 1116261376.0, "44675": 1116261376.0, "44680": 1116261376.0, "44685": 1116261376.0, "44690": 1116261376.0, "44695": 1116261376.0, "44700": 1116261376.0, "44705": 1116261376.0, "44710": 1116261376.0, "44715": 1116261376.0, "44720": 1116261376.0, "44725": 1116261376.0, "44730": 1116261376.0, "44735": 1116261376.0, "44740": 1116261376.0, "44745": 1116261376.0, "44750": 1116261376.0, "44755": 1116261376.0, "44760": 1116261376.0, "44765": 1116261376.0, "44770": 1116261376.0, "44775": 1116261376.0, "44780": 1116261376.0, "44785": 1116261376.0, "44790": 1116261376.0, "44795": 1116261376.0, "44800": 1116261376.0, "44805": 1116261376.0, "44810": 1116261376.0, "44815": 1116261376.0, "44820": 1116261376.0, "44825": 1116261376.0, "44830": 1116261376.0, "44835": 1116261376.0, "44840": 1116261376.0, "44845": 1116261376.0, "44850": 1116261376.0, "44855": 1116261376.0, "44860": 1116261376.0, "44865": 1116261376.0, "44870": 1116261376.0, "44875": 1116261376.0, "44880": 1116261376.0, "44885": 1116261376.0, "44890": 1116261376.0, "44895": 1116261376.0, "44900": 1116261376.0, "44905": 1116261376.0, "44910": 1116261376.0, "44915": 1116261376.0, "44920": 1116261376.0, "44925": 1116261376.0, "44930": 1116261376.0, "44935": 1116261376.0, "44940": 1116261376.0, "44945": 1116261376.0, "44950": 1116261376.0, "44955": 1116261376.0, "44960": 1116261376.0, "44965": 1116261376.0, "44970": 1116261376.0, "44975": 1116261376.0, "44980": 1116261376.0, "44985": 1116261376.0, "44990": 1116261376.0, "44995": 1116261376.0, "45000": 1116261376.0, "45005": 1116261376.0, "45010": 1116261376.0, "45015": 1116261376.0, "45020": 1116261376.0, "45025": 1116261376.0, "45030": 1116261376.0, "45035": 1116261376.0, "45040": 1116261376.0, "45045": 1116261376.0, "45050": 1116261376.0, "45055": 1116261376.0, "45060": 1116261376.0, "45065": 1116261376.0, "45070": 1116261376.0, "45075": 1116261376.0, "45080": 1116261376.0, "45085": 1116261376.0, "45090": 1116261376.0, "45095": 1116261376.0, "45100": 1116261376.0, "45105": 1116261376.0, "45110": 1116261376.0, "45115": 1116261376.0, "45120": 1116261376.0, "45125": 1116261376.0, "45130": 1116261376.0, "45135": 1116261376.0, "45140": 1116261376.0, "45145": 1116261376.0, "45150": 1116261376.0, "45155": 1116261376.0, "45160": 1116261376.0, "45165": 1116261376.0, "45170": 1116261376.0, "45175": 1116261376.0, "45180": 1116261376.0, "45185": 1116261376.0, "45190": 1116261376.0, "45195": 1116261376.0, "45200": 1116261376.0, "45205": 1116261376.0, "45210": 1116261376.0, "45215": 1116261376.0, "45220": 1116261376.0, "45225": 1116261376.0, "45230": 1116261376.0, "45235": 1116261376.0, "45240": 1116261376.0, "45245": 1116261376.0, "45250": 1116261376.0, "45255": 1116261376.0, "45260": 1116261376.0, "45265": 1116261376.0, "45270": 1116261376.0, "45275": 1116261376.0, "45280": 1116261376.0, "45285": 1116261376.0, "45290": 1116261376.0, "45295": 1116261376.0, "45300": 1116261376.0, "45305": 1116261376.0, "45310": 1116261376.0, "45315": 1116261376.0, "45320": 1116261376.0, "45325": 1116261376.0, "45330": 1116261376.0, "45335": 1116261376.0, "45340": 1116261376.0, "45345": 1116261376.0, "45350": 1116261376.0, "45355": 1116261376.0, "45360": 1116261376.0, "45365": 1116261376.0, "45370": 1116261376.0, "45375": 1116261376.0, "45380": 1116261376.0, "45385": 1116261376.0, "45390": 1116261376.0, "45395": 1116261376.0, "45400": 1116261376.0, "45405": 1116261376.0, "45410": 1116261376.0, "45415": 1116261376.0, "45420": 1116261376.0, "45425": 1116261376.0, "45430": 1116261376.0, "45435": 1116261376.0, "45440": 1116261376.0, "45445": 1116261376.0, "45450": 1116261376.0, "45455": 1116261376.0, "45460": 1116261376.0, "45465": 1116261376.0, "45470": 1116261376.0, "45475": 1116261376.0, "45480": 1116261376.0, "45485": 1116261376.0, "45490": 1116261376.0, "45495": 1116261376.0, "45500": 1116261376.0, "45505": 1116261376.0, "45510": 1116261376.0, "45515": 1116261376.0, "45520": 1116261376.0, "45525": 1116261376.0, "45530": 1116261376.0, "45535": 1116261376.0, "45540": 1116261376.0, "45545": 1116261376.0, "45550": 1116261376.0, "45555": 1116261376.0, "45560": 1116261376.0, "45565": 1116261376.0, "45570": 1116261376.0, "45575": 1116261376.0, "45580": 1116261376.0, "45585": 1116261376.0, "45590": 1116261376.0, "45595": 1116261376.0, "45600": 1116261376.0, "45605": 1116261376.0, "45610": 1116261376.0, "45615": 1116261376.0, "45620": 1116261376.0, "45625": 1116261376.0, "45630": 1116261376.0, "45635": 1116261376.0, "45640": 1116261376.0, "45645": 1116261376.0, "45650": 1116261376.0, "45655": 1116261376.0, "45660": 1116261376.0, "45665": 1116261376.0, "45670": 1116261376.0, "45675": 1116261376.0, "45680": 1116261376.0, "45685": 1116261376.0, "45690": 1116261376.0, "45695": 1116261376.0, "45700": 1116261376.0, "45705": 1116261376.0, "45710": 1116261376.0, "45715": 1116261376.0, "45720": 1116261376.0, "45725": 1116261376.0, "45730": 1116261376.0, "45735": 1116261376.0, "45740": 1116261376.0, "45745": 1116261376.0, "45750": 1116261376.0, "45755": 1116261376.0, "45760": 1116261376.0, "45765": 1116261376.0, "45770": 1116261376.0, "45775": 1116261376.0, "45780": 1116261376.0, "45785": 1116261376.0, "45790": 1116261376.0, "45795": 1116261376.0, "45800": 1116261376.0, "45805": 1116261376.0, "45810": 1116261376.0, "45815": 1116261376.0, "45820": 1116261376.0, "45825": 1116261376.0, "45830": 1116261376.0, "45835": 1116261376.0, "45840": 1116261376.0, "45845": 1116261376.0, "45850": 1116261376.0, "45855": 1116261376.0, "45860": 1116261376.0, "45865": 1116261376.0, "45870": 1116261376.0, "45875": 1116261376.0, "45880": 1116261376.0, "45885": 1116261376.0, "45890": 1116261376.0, "45895": 1116261376.0, "45900": 1116261376.0, "45905": 1116261376.0, "45910": 1116261376.0, "45915": 1116261376.0, "45920": 1116261376.0, "45925": 1116261376.0, "45930": 1116261376.0, "45935": 1116261376.0, "45940": 1116261376.0, "45945": 1116261376.0, "45950": 1116261376.0, "45955": 1116261376.0, "45960": 1116261376.0, "45965": 1116261376.0, "45970": 1116261376.0, "45975": 1116261376.0, "45980": 1116261376.0, "45985": 1116261376.0, "45990": 1116261376.0, "45995": 1116261376.0, "46000": 1116261376.0, "46005": 1116261376.0, "46010": 1116261376.0, "46015": 1116261376.0, "46020": 1116261376.0, "46025": 1116261376.0, "46030": 1116261376.0, "46035": 1116261376.0, "46040": 1116261376.0, "46045": 1116261376.0, "46050": 1116261376.0, "46055": 1116261376.0, "46060": 1116261376.0, "46065": 1116261376.0, "46070": 1116261376.0, "46075": 1116261376.0, "46080": 1116261376.0, "46085": 1116261376.0, "46090": 1116261376.0, "46095": 1116261376.0, "46100": 1116261376.0, "46105": 1116261376.0, "46110": 1116261376.0, "46115": 1116261376.0, "46120": 1116261376.0, "46125": 1116261376.0, "46130": 1116261376.0, "46135": 1116261376.0, "46140": 1116261376.0, "46145": 1116261376.0, "46150": 1116261376.0, "46155": 1116261376.0, "46160": 1116261376.0, "46165": 1116261376.0, "46170": 1116261376.0, "46175": 1116261376.0, "46180": 1116261376.0, "46185": 1116261376.0, "46190": 1116261376.0, "46195": 1116261376.0, "46200": 1116261376.0, "46205": 1116261376.0, "46210": 1116261376.0, "46215": 1116261376.0, "46220": 1116261376.0, "46225": 1116261376.0, "46230": 1116261376.0, "46235": 1116261376.0, "46240": 1116261376.0, "46245": 1116261376.0, "46250": 1116261376.0, "46255": 1116261376.0, "46260": 1116261376.0, "46265": 1116261376.0, "46270": 1116261376.0, "46275": 1116261376.0, "46280": 1116261376.0, "46285": 1116261376.0, "46290": 1116261376.0, "46295": 1116261376.0, "46300": 1116261376.0, "46305": 1116261376.0, "46310": 1116261376.0, "46315": 1116261376.0, "46320": 1116261376.0, "46325": 1116261376.0, "46330": 1116261376.0, "46335": 1116261376.0, "46340": 1116261376.0, "46345": 1116261376.0, "46350": 1116261376.0, "46355": 1116261376.0, "46360": 1116261376.0, "46365": 1116261376.0, "46370": 1116261376.0, "46375": 1116261376.0, "46380": 1116261376.0, "46385": 1116261376.0, "46390": 1116261376.0, "46395": 1116261376.0, "46400": 1116261376.0, "46405": 1116261376.0, "46410": 1116261376.0, "46415": 1116261376.0, "46420": 1116261376.0, "46425": 1116261376.0, "46430": 1116261376.0, "46435": 1116261376.0, "46440": 1116261376.0, "46445": 1116261376.0, "46450": 1116261376.0, "46455": 1116261376.0, "46460": 1116261376.0, "46465": 1116261376.0, "46470": 1116261376.0, "46475": 1116261376.0, "46480": 1116261376.0, "46485": 1116261376.0, "46490": 1116261376.0, "46495": 1116261376.0, "46500": 1116261376.0, "46505": 1116261376.0, "46510": 1116261376.0, "46515": 1116261376.0, "46520": 1116261376.0, "46525": 1116261376.0, "46530": 1116261376.0, "46535": 1116261376.0, "46540": 1116261376.0, "46545": 1116261376.0, "46550": 1116261376.0, "46555": 1116261376.0, "46560": 1116261376.0, "46565": 1116261376.0, "46570": 1116261376.0, "46575": 1116261376.0, "46580": 1116261376.0, "46585": 1116261376.0, "46590": 1116261376.0, "46595": 1116261376.0, "46600": 1116261376.0, "46605": 1116261376.0, "46610": 1116261376.0, "46615": 1116261376.0, "46620": 1116261376.0, "46625": 1116261376.0, "46630": 1116261376.0, "46635": 1116261376.0, "46640": 1116261376.0, "46645": 1116261376.0, "46650": 1116261376.0, "46655": 1116261376.0, "46660": 1116261376.0, "46665": 1116261376.0, "46670": 1116261376.0, "46675": 1116261376.0, "46680": 1116261376.0, "46685": 1116261376.0, "46690": 1116261376.0, "46695": 1116261376.0, "46700": 1116261376.0, "46705": 1116261376.0, "46710": 1116261376.0, "46715": 1116261376.0, "46720": 1116261376.0, "46725": 1116261376.0, "46730": 1116261376.0, "46735": 1116261376.0, "46740": 1116261376.0, "46745": 1116261376.0, "46750": 1116261376.0, "46755": 1116261376.0, "46760": 1116261376.0, "46765": 1116261376.0, "46770": 1116261376.0, "46775": 1116261376.0, "46780": 1116261376.0, "46785": 1116261376.0, "46790": 1116261376.0, "46795": 1116261376.0, "46800": 1116261376.0, "46805": 1116261376.0, "46810": 1116261376.0, "46815": 1116261376.0, "46820": 1116261376.0, "46825": 1116261376.0, "46830": 1116261376.0, "46835": 1116261376.0, "46840": 1116261376.0, "46845": 1116261376.0, "46850": 1116261376.0, "46855": 1116261376.0, "46860": 1116261376.0, "46865": 1116261376.0, "46870": 1116261376.0, "46875": 1116261376.0, "46880": 1116261376.0, "46885": 1116261376.0, "46890": 1116261376.0, "46895": 1116261376.0, "46900": 1116261376.0, "46905": 1116261376.0, "46910": 1116261376.0, "46915": 1116261376.0, "46920": 1116261376.0, "46925": 1116261376.0, "46930": 1116261376.0, "46935": 1116261376.0, "46940": 1116261376.0, "46945": 1116261376.0, "46950": 1116261376.0, "46955": 1116261376.0, "46960": 1116261376.0, "46965": 1116261376.0, "46970": 1116261376.0, "46975": 1116261376.0, "46980": 1116261376.0, "46985": 1116261376.0, "46990": 1116261376.0, "46995": 1116261376.0, "47000": 1116261376.0, "47005": 1116261376.0, "47010": 1116261376.0, "47015": 1116261376.0, "47020": 1116261376.0, "47025": 1116261376.0, "47030": 1116261376.0, "47035": 1116261376.0, "47040": 1116261376.0, "47045": 1116261376.0, "47050": 1116261376.0, "47055": 1116261376.0, "47060": 1116261376.0, "47065": 1116261376.0, "47070": 1116261376.0, "47075": 1116261376.0, "47080": 1116261376.0, "47085": 1116261376.0, "47090": 1116261376.0, "47095": 1116261376.0, "47100": 1116261376.0, "47105": 1116261376.0, "47110": 1116261376.0, "47115": 1116261376.0, "47120": 1116261376.0, "47125": 1116261376.0, "47130": 1116261376.0, "47135": 1116261376.0, "47140": 1116261376.0, "47145": 1116261376.0, "47150": 1116261376.0, "47155": 1116261376.0, "47160": 1116261376.0, "47165": 1116261376.0, "47170": 1116261376.0, "47175": 1116261376.0, "47180": 1116261376.0, "47185": 1116261376.0, "47190": 1116261376.0, "47195": 1116261376.0, "47200": 1116261376.0, "47205": 1116261376.0, "47210": 1116261376.0, "47215": 1116261376.0, "47220": 1116261376.0, "47225": 1116261376.0, "47230": 1116261376.0, "47235": 1116261376.0, "47240": 1116261376.0, "47245": 1116261376.0, "47250": 1116261376.0, "47255": 1116261376.0, "47260": 1116261376.0, "47265": 1116261376.0, "47270": 1116261376.0, "47275": 1116261376.0, "47280": 1116261376.0, "47285": 1116261376.0, "47290": 1116261376.0, "47295": 1116261376.0, "47300": 1116261376.0, "47305": 1116261376.0, "47310": 1116261376.0, "47315": 1116261376.0, "47320": 1116261376.0, "47325": 1116261376.0, "47330": 1116261376.0, "47335": 1116261376.0, "47340": 1116261376.0, "47345": 1116261376.0, "47350": 1116261376.0, "47355": 1116261376.0, "47360": 1116261376.0, "47365": 1116261376.0, "47370": 1116261376.0, "47375": 1116261376.0, "47380": 1116261376.0, "47385": 1116261376.0, "47390": 1116261376.0, "47395": 1116261376.0, "47400": 1116261376.0, "47405": 1116261376.0, "47410": 1116261376.0, "47415": 1116261376.0, "47420": 1116261376.0, "47425": 1116261376.0, "47430": 1116261376.0, "47435": 1116261376.0, "47440": 1116261376.0, "47445": 1116261376.0, "47450": 1116261376.0, "47455": 1116261376.0, "47460": 1116261376.0, "47465": 1116261376.0, "47470": 1116261376.0, "47475": 1116261376.0, "47480": 1116261376.0, "47485": 1116261376.0, "47490": 1116261376.0, "47495": 1116261376.0, "47500": 1116261376.0, "47505": 1116261376.0, "47510": 1116261376.0, "47515": 1116261376.0, "47520": 1116261376.0, "47525": 1116261376.0, "47530": 1116261376.0, "47535": 1116261376.0, "47540": 1116261376.0, "47545": 1116261376.0, "47550": 1116261376.0, "47555": 1116261376.0, "47560": 1116261376.0, "47565": 1116261376.0, "47570": 1116261376.0, "47575": 1116261376.0, "47580": 1116261376.0, "47585": 1116261376.0, "47590": 1116261376.0, "47595": 1116261376.0, "47600": 1116261376.0, "47605": 1116261376.0, "47610": 1116261376.0, "47615": 1116261376.0, "47620": 1116261376.0, "47625": 1116261376.0, "47630": 1116261376.0, "47635": 1116261376.0, "47640": 1116261376.0, "47645": 1116261376.0, "47650": 1116261376.0, "47655": 1116261376.0, "47660": 1116261376.0, "47665": 1116261376.0, "47670": 1116261376.0, "47675": 1116261376.0, "47680": 1116261376.0, "47685": 1116261376.0, "47690": 1116261376.0, "47695": 1116261376.0, "47700": 1116261376.0, "47705": 1116261376.0, "47710": 1116261376.0, "47715": 1116261376.0, "47720": 1116261376.0, "47725": 1116261376.0, "47730": 1116261376.0, "47735": 1116261376.0, "47740": 1116261376.0, "47745": 1116261376.0, "47750": 1116261376.0, "47755": 1116261376.0, "47760": 1116261376.0, "47765": 1116261376.0, "47770": 1116261376.0, "47775": 1116261376.0, "47780": 1116261376.0, "47785": 1116261376.0, "47790": 1116261376.0, "47795": 1116261376.0, "47800": 1116261376.0, "47805": 1116261376.0, "47810": 1116261376.0, "47815": 1116261376.0, "47820": 1116261376.0, "47825": 1116261376.0, "47830": 1116261376.0, "47835": 1116261376.0, "47840": 1116261376.0, "47845": 1116261376.0, "47850": 1116261376.0, "47855": 1116261376.0, "47860": 1116261376.0, "47865": 1116261376.0, "47870": 1116261376.0, "47875": 1116261376.0, "47880": 1116261376.0, "47885": 1116261376.0, "47890": 1116261376.0, "47895": 1116261376.0, "47900": 1116261376.0, "47905": 1116261376.0, "47910": 1116261376.0, "47915": 1116261376.0, "47920": 1116261376.0, "47925": 1116261376.0, "47930": 1116261376.0, "47935": 1116261376.0, "47940": 1116261376.0, "47945": 1116261376.0, "47950": 1116261376.0, "47955": 1116261376.0, "47960": 1116261376.0, "47965": 1116261376.0, "47970": 1116261376.0, "47975": 1116261376.0, "47980": 1116261376.0, "47985": 1116261376.0, "47990": 1116261376.0, "47995": 1116261376.0, "48000": 1116261376.0, "48005": 1116261376.0, "48010": 1116261376.0, "48015": 1116261376.0, "48020": 1116261376.0, "48025": 1116261376.0, "48030": 1116261376.0, "48035": 1116261376.0, "48040": 1116261376.0, "48045": 1116261376.0, "48050": 1116261376.0, "48055": 1116261376.0, "48060": 1116261376.0, "48065": 1116261376.0, "48070": 1116261376.0, "48075": 1116261376.0, "48080": 1116261376.0, "48085": 1116261376.0, "48090": 1116261376.0, "48095": 1116261376.0, "48100": 1116261376.0, "48105": 1116261376.0, "48110": 1116261376.0, "48115": 1116261376.0, "48120": 1116261376.0, "48125": 1116261376.0, "48130": 1116261376.0, "48135": 1116261376.0, "48140": 1116261376.0, "48145": 1116261376.0, "48150": 1116261376.0, "48155": 1116261376.0, "48160": 1116261376.0, "48165": 1116261376.0, "48170": 1116261376.0, "48175": 1116261376.0, "48180": 1116261376.0, "48185": 1116261376.0, "48190": 1116261376.0, "48195": 1116261376.0, "48200": 1116261376.0, "48205": 1116261376.0, "48210": 1116261376.0, "48215": 1116261376.0, "48220": 1116261376.0, "48225": 1116261376.0, "48230": 1116261376.0, "48235": 1116261376.0, "48240": 1116261376.0, "48245": 1116261376.0, "48250": 1116261376.0, "48255": 1116261376.0, "48260": 1116261376.0, "48265": 1116261376.0, "48270": 1116261376.0, "48275": 1116261376.0, "48280": 1116261376.0, "48285": 1116261376.0, "48290": 1116261376.0, "48295": 1116261376.0, "48300": 1116261376.0, "48305": 1116261376.0, "48310": 1116261376.0, "48315": 1116261376.0, "48320": 1116261376.0, "48325": 1116261376.0, "48330": 1116261376.0, "48335": 1116261376.0, "48340": 1116261376.0, "48345": 1116261376.0, "48350": 1116261376.0, "48355": 1116261376.0, "48360": 1116261376.0, "48365": 1116261376.0, "48370": 1116261376.0, "48375": 1116261376.0, "48380": 1116261376.0, "48385": 1116261376.0, "48390": 1116261376.0, "48395": 1116261376.0, "48400": 1116261376.0, "48405": 1116261376.0, "48410": 1116261376.0, "48415": 1116261376.0, "48420": 1116261376.0, "48425": 1116261376.0, "48430": 1116261376.0, "48435": 1116261376.0, "48440": 1116261376.0, "48445": 1116261376.0, "48450": 1116261376.0, "48455": 1116261376.0, "48460": 1116261376.0, "48465": 1116261376.0, "48470": 1116261376.0, "48475": 1116261376.0, "48480": 1116261376.0, "48485": 1116261376.0, "48490": 1116261376.0, "48495": 1116261376.0, "48500": 1116261376.0, "48505": 1116261376.0, "48510": 1116261376.0, "48515": 1116261376.0, "48520": 1116261376.0, "48525": 1116261376.0, "48530": 1116261376.0, "48535": 1116261376.0, "48540": 1116261376.0, "48545": 1116261376.0, "48550": 1116261376.0, "48555": 1116261376.0, "48560": 1116261376.0, "48565": 1116261376.0, "48570": 1116261376.0, "48575": 1116261376.0, "48580": 1116261376.0, "48585": 1116261376.0, "48590": 1116261376.0, "48595": 1116261376.0, "48600": 1116261376.0, "48605": 1116261376.0, "48610": 1116261376.0, "48615": 1116261376.0, "48620": 1116261376.0, "48625": 1116261376.0, "48630": 1116261376.0, "48635": 1116261376.0, "48640": 1116261376.0, "48645": 1116261376.0, "48650": 1116261376.0, "48655": 1116261376.0, "48660": 1116261376.0, "48665": 1116261376.0, "48670": 1116261376.0, "48675": 1116261376.0, "48680": 1116261376.0, "48685": 1116261376.0, "48690": 1116261376.0, "48695": 1116261376.0, "48700": 1116261376.0, "48705": 1116261376.0, "48710": 1116261376.0, "48715": 1116261376.0, "48720": 1116261376.0, "48725": 1116261376.0, "48730": 1116261376.0, "48735": 1116261376.0, "48740": 1116261376.0, "48745": 1116261376.0, "48750": 1116261376.0, "48755": 1116261376.0, "48760": 1116261376.0, "48765": 1116261376.0, "48770": 1116261376.0, "48775": 1116261376.0, "48780": 1116261376.0, "48785": 1116261376.0, "48790": 1116261376.0, "48795": 1116261376.0, "48800": 1116261376.0, "48805": 1116261376.0, "48810": 1116261376.0, "48815": 1116261376.0, "48820": 1116261376.0, "48825": 1116261376.0, "48830": 1116261376.0, "48835": 1116261376.0, "48840": 1116261376.0, "48845": 1116261376.0, "48850": 1116261376.0, "48855": 1116261376.0, "48860": 1116261376.0, "48865": 1116261376.0, "48870": 1116261376.0, "48875": 1116261376.0, "48880": 1116261376.0, "48885": 1116261376.0, "48890": 1116261376.0, "48895": 1116261376.0, "48900": 1116261376.0, "48905": 1116261376.0, "48910": 1116261376.0, "48915": 1116261376.0, "48920": 1116261376.0, "48925": 1116261376.0, "48930": 1116261376.0, "48935": 1116261376.0, "48940": 1116261376.0, "48945": 1116261376.0, "48950": 1116261376.0, "48955": 1116261376.0, "48960": 1116261376.0, "48965": 1116261376.0, "48970": 1116261376.0, "48975": 1116261376.0, "48980": 1116261376.0, "48985": 1116261376.0, "48990": 1116261376.0, "48995": 1116261376.0, "49000": 1116261376.0, "49005": 1116261376.0, "49010": 1116261376.0, "49015": 1116261376.0, "49020": 1116261376.0, "49025": 1116261376.0, "49030": 1116261376.0, "49035": 1116261376.0, "49040": 1116261376.0, "49045": 1116261376.0, "49050": 1116261376.0, "49055": 1116261376.0, "49060": 1116261376.0, "49065": 1116261376.0, "49070": 1116261376.0, "49075": 1116261376.0, "49080": 1116261376.0, "49085": 1116261376.0, "49090": 1116261376.0, "49095": 1116261376.0, "49100": 1116261376.0, "49105": 1116261376.0, "49110": 1116261376.0, "49115": 1116261376.0, "49120": 1116261376.0, "49125": 1116261376.0, "49130": 1116261376.0, "49135": 1116261376.0, "49140": 1116261376.0, "49145": 1116261376.0, "49150": 1116261376.0, "49155": 1116261376.0, "49160": 1116261376.0, "49165": 1116261376.0, "49170": 1116261376.0, "49175": 1116261376.0, "49180": 1116261376.0, "49185": 1116261376.0, "49190": 1116261376.0, "49195": 1116261376.0, "49200": 1116261376.0, "49205": 1116261376.0, "49210": 1116261376.0, "49215": 1116261376.0, "49220": 1116261376.0, "49225": 1116261376.0, "49230": 1116261376.0, "49235": 1116261376.0, "49240": 1116261376.0, "49245": 1116261376.0, "49250": 1116261376.0, "49255": 1116261376.0, "49260": 1116261376.0, "49265": 1116261376.0, "49270": 1116261376.0, "49275": 1116261376.0, "49280": 1116261376.0, "49285": 1116261376.0, "49290": 1116261376.0, "49295": 1116261376.0, "49300": 1116261376.0, "49305": 1116261376.0, "49310": 1116261376.0, "49315": 1116261376.0, "49320": 1116261376.0, "49325": 1116261376.0, "49330": 1116261376.0, "49335": 1116261376.0, "49340": 1116261376.0, "49345": 1116261376.0, "49350": 1116261376.0, "49355": 1116261376.0, "49360": 1116261376.0, "49365": 1116261376.0, "49370": 1116261376.0, "49375": 1116261376.0, "49380": 1116261376.0, "49385": 1116261376.0, "49390": 1116261376.0, "49395": 1116261376.0, "49400": 1116261376.0, "49405": 1116261376.0, "49410": 1116261376.0, "49415": 1116261376.0, "49420": 1116261376.0, "49425": 1116261376.0, "49430": 1116261376.0, "49435": 1116261376.0, "49440": 1116261376.0, "49445": 1116261376.0, "49450": 1116261376.0, "49455": 1116261376.0, "49460": 1116261376.0, "49465": 1116261376.0, "49470": 1116261376.0, "49475": 1116261376.0, "49480": 1116261376.0, "49485": 1116261376.0, "49490": 1116261376.0, "49495": 1116261376.0, "49500": 1116261376.0, "49505": 1116261376.0, "49510": 1116261376.0, "49515": 1116261376.0, "49520": 1116261376.0, "49525": 1116261376.0, "49530": 1116261376.0, "49535": 1116261376.0, "49540": 1116261376.0, "49545": 1116261376.0, "49550": 1116261376.0, "49555": 1116261376.0, "49560": 1116261376.0, "49565": 1116261376.0, "49570": 1116261376.0, "49575": 1116261376.0, "49580": 1116261376.0, "49585": 1116261376.0, "49590": 1116261376.0, "49595": 1116261376.0, "49600": 1116261376.0, "49605": 1116261376.0, "49610": 1116261376.0, "49615": 1116261376.0, "49620": 1116261376.0, "49625": 1116261376.0, "49630": 1116261376.0, "49635": 1116261376.0, "49640": 1116261376.0, "49645": 1116261376.0, "49650": 1116261376.0, "49655": 1116261376.0, "49660": 1116261376.0, "49665": 1116261376.0, "49670": 1116261376.0, "49675": 1116261376.0, "49680": 1116261376.0, "49685": 1116261376.0, "49690": 1116261376.0, "49695": 1116261376.0, "49700": 1116261376.0, "49705": 1116261376.0, "49710": 1116261376.0, "49715": 1116261376.0, "49720": 1116261376.0, "49725": 1116261376.0, "49730": 1116261376.0, "49735": 1116261376.0, "49740": 1116261376.0, "49745": 1116261376.0, "49750": 1116261376.0, "49755": 1116261376.0, "49760": 1116261376.0, "49765": 1116261376.0, "49770": 1116261376.0, "49775": 1116261376.0, "49780": 1116261376.0, "49785": 1116261376.0, "49790": 1116261376.0, "49795": 1116261376.0, "49800": 1116261376.0, "49805": 1116261376.0, "49810": 1116261376.0, "49815": 1116261376.0, "49820": 1116261376.0, "49825": 1116261376.0, "49830": 1116261376.0, "49835": 1116261376.0, "49840": 1116261376.0, "49845": 1116261376.0, "49850": 1116261376.0, "49855": 1116261376.0, "49860": 1116261376.0, "49865": 1116261376.0, "49870": 1116261376.0, "49875": 1116261376.0, "49880": 1116261376.0, "49885": 1116261376.0, "49890": 1116261376.0, "49895": 1116261376.0, "49900": 1116261376.0, "49905": 1116261376.0, "49910": 1116261376.0, "49915": 1116261376.0, "49920": 1116261376.0, "49925": 1116261376.0, "49930": 1116261376.0, "49935": 1116261376.0, "49940": 1116261376.0, "49945": 1116261376.0, "49950": 1116261376.0, "49955": 1116261376.0, "49960": 1116261376.0, "49965": 1116261376.0, "49970": 1116261376.0, "49975": 1116261376.0, "49980": 1116261376.0, "49985": 1116261376.0, "49990": 1116261376.0, "49995": 1116261376.0, "50000": 1116261376.0, "50005": 1116261376.0, "50010": 1116261376.0, "50015": 1116261376.0, "50020": 1116261376.0, "50025": 1116261376.0, "50030": 1116261376.0, "50035": 1116261376.0, "50040": 1116261376.0, "50045": 1116261376.0, "50050": 1116261376.0, "50055": 1116261376.0, "50060": 1116261376.0, "50065": 1116261376.0, "50070": 1116261376.0, "50075": 1116261376.0, "50080": 1116261376.0, "50085": 1116261376.0, "50090": 1116261376.0, "50095": 1116261376.0, "50100": 1116261376.0, "50105": 1116261376.0, "50110": 1116261376.0, "50115": 1116261376.0, "50120": 1116261376.0, "50125": 1116261376.0, "50130": 1116261376.0, "50135": 1116261376.0, "50140": 1116261376.0, "50145": 1116261376.0, "50150": 1116261376.0, "50155": 1116261376.0, "50160": 1116261376.0, "50165": 1116261376.0, "50170": 1116261376.0, "50175": 1116261376.0, "50180": 1116261376.0, "50185": 1116261376.0, "50190": 1116261376.0, "50195": 1116261376.0, "50200": 1116261376.0, "50205": 1116261376.0, "50210": 1116261376.0, "50215": 1116261376.0, "50220": 1116261376.0, "50225": 1116261376.0, "50230": 1116261376.0, "50235": 1116261376.0, "50240": 1116261376.0, "50245": 1116261376.0, "50250": 1116261376.0, "50255": 1116261376.0, "50260": 1116261376.0, "50265": 1116261376.0, "50270": 1116261376.0, "50275": 1116261376.0, "50280": 1116261376.0, "50285": 1116261376.0, "50290": 1116261376.0, "50295": 1116261376.0, "50300": 1116261376.0, "50305": 1116261376.0, "50310": 1116261376.0, "50315": 1116261376.0, "50320": 1116261376.0, "50325": 1116261376.0, "50330": 1116261376.0, "50335": 1116261376.0, "50340": 1116261376.0, "50345": 1116261376.0, "50350": 1116261376.0, "50355": 1116261376.0, "50360": 1116261376.0, "50365": 1116261376.0, "50370": 1116261376.0, "50375": 1116261376.0, "50380": 1116261376.0, "50385": 1116261376.0, "50390": 1116261376.0, "50395": 1116261376.0, "50400": 1116261376.0, "50405": 1116261376.0, "50410": 1116261376.0, "50415": 1116261376.0, "50420": 1116261376.0, "50425": 1116261376.0, "50430": 1116261376.0, "50435": 1116261376.0, "50440": 1116261376.0, "50445": 1116261376.0, "50450": 1116261376.0, "50455": 1116261376.0, "50460": 1116261376.0, "50465": 1116261376.0, "50470": 1116261376.0, "50475": 1116261376.0, "50480": 1116261376.0, "50485": 1116261376.0, "50490": 1116261376.0, "50495": 1116261376.0, "50500": 1116261376.0, "50505": 1116261376.0, "50510": 1116261376.0, "50515": 1116261376.0, "50520": 1116261376.0, "50525": 1116261376.0, "50530": 1116261376.0, "50535": 1116261376.0, "50540": 1116261376.0, "50545": 1116261376.0, "50550": 1116261376.0, "50555": 1116261376.0, "50560": 1116261376.0, "50565": 1116261376.0, "50570": 1116261376.0, "50575": 1116261376.0, "50580": 1116261376.0, "50585": 1116261376.0, "50590": 1116261376.0, "50595": 1116261376.0, "50600": 1116261376.0, "50605": 1116261376.0, "50610": 1116261376.0, "50615": 1116261376.0, "50620": 1116261376.0, "50625": 1116261376.0, "50630": 1116261376.0, "50635": 1116261376.0, "50640": 1116261376.0, "50645": 1116261376.0, "50650": 1116261376.0, "50655": 1116261376.0, "50660": 1116261376.0, "50665": 1116261376.0, "50670": 1116261376.0, "50675": 1116261376.0, "50680": 1116261376.0, "50685": 1116261376.0, "50690": 1116261376.0, "50695": 1116261376.0, "50700": 1116261376.0, "50705": 1116261376.0, "50710": 1116261376.0, "50715": 1116261376.0, "50720": 1116261376.0, "50725": 1116261376.0, "50730": 1116261376.0, "50735": 1116261376.0, "50740": 1116261376.0, "50745": 1116261376.0, "50750": 1116261376.0, "50755": 1116261376.0, "50760": 1116261376.0, "50765": 1116261376.0, "50770": 1116261376.0, "50775": 1116261376.0, "50780": 1116261376.0, "50785": 1116261376.0, "50790": 1116261376.0, "50795": 1116261376.0, "50800": 1116261376.0, "50805": 1116261376.0, "50810": 1116261376.0, "50815": 1116261376.0, "50820": 1116261376.0, "50825": 1116261376.0, "50830": 1116261376.0, "50835": 1116261376.0, "50840": 1116261376.0, "50845": 1116261376.0, "50850": 1116261376.0, "50855": 1116261376.0, "50860": 1116261376.0}}, "iteration-time": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": "nan", "25": "nan", "30": "nan", "35": "nan", "40": "nan", "45": "nan", "50": "nan", "55": "nan", "60": "nan", "65": "nan", "70": "nan", "75": "nan", "80": "nan", "85": "nan", "90": "nan", "95": "nan", "100": 0.36674, "105": "nan", "110": "nan", "115": "nan", "120": "nan", "125": "nan", "130": "nan", "135": "nan", "140": "nan", "145": "nan", "150": "nan", "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": 0.16893, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": 0.17449, "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": 0.16534, "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": 0.17218, "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": 0.17618, "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": 0.17222, "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": 0.17332, "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": 0.17024, "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": 0.16874, "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": 0.17061, "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": 0.17645, "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": 0.16865, "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": 0.1748, "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": 0.16907, "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": 0.16884, "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": 0.16248, "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": 0.16584, "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": 0.17722, "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": 0.16454, "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": 0.17124, "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": 0.17168, "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": 0.17384, "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": 0.18097, "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": 0.17322, "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": 0.16689, "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": 0.17309, "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": 0.17327, "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": 0.17066, "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": 0.18321, "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": 0.16944, "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": 0.16833, "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": 0.17451, "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": 0.17146, "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": 0.17797, "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": 0.17357, "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": 0.17342, "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": 0.16529, "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": 0.17537, "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": 0.17344, "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": 0.17242, "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": 0.17173, "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": 0.17048, "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": 0.18486, "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": 0.16836, "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": 0.16904, "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": 0.1758, "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": 0.16889, "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": 0.17559, "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": 0.17916, "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": 0.16397, "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": 0.17233, "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": 0.17735, "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": 0.17329, "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": 0.17751, "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": 0.16954, "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": 0.17045, "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": 0.16691, "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": 0.1733, "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": 0.17208, "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": 0.16734, "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": 0.18207, "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": 0.17126, "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": 0.17877, "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": 0.167, "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": 0.16819, "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": 0.17135, "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": 0.17548, "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": 0.16681, "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": 0.1747, "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": 0.1715, "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": 0.16777, "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": 0.178, "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": 0.16969, "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": 0.17581, "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": 0.17562, "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": 0.16908, "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": 0.17669, "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": 0.17304, "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": 0.16662, "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": 0.16798, "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": 0.17386, "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": 0.17011, "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": 0.17636, "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": 0.16683, "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": 0.17044, "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": 0.17142, "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": 0.16767, "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": 0.17856, "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": 0.1752, "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": 0.16713, "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": 0.17324, "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": 0.17077, "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": 0.17205, "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": 0.18596, "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": 0.16555, "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": 0.17542, "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": 0.17262, "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": 0.16532, "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": 0.17179, "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": 0.17015, "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": 0.17143, "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": 0.1787, "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": 0.1727, "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": 0.17188, "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": 0.16999, "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": 0.18053, "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": 0.16777, "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": 0.17105, "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": 0.17608, "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": 0.16669, "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": 0.17371, "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": 0.16906, "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": 0.17246, "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": 0.17226, "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": 0.17213, "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": 0.17437, "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": 0.17298, "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": 0.17559, "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": 0.16832, "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": 0.16791, "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": 0.17174, "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": 0.17504, "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": 0.16438, "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": 0.1729, "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": 0.17025, "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": 0.17142, "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": 0.17888, "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": 0.16941, "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": 0.17575, "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": 0.16834, "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": 0.17679, "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": 0.17019, "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": 0.17318, "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": 0.17538, "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": 0.17063, "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": 0.17456, "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": 0.17794, "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": 0.16428, "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": 0.17114, "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": 0.16854, "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": 0.17775, "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": 0.17536, "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": 0.16327, "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": 0.17379, "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": 0.16829, "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": 0.17329, "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": 0.16934, "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": 0.17818, "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": 0.1648, "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": 0.17129, "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": 0.17319, "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": 0.16467, "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": 0.18054, "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": 0.16782, "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": 0.17473, "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": 0.17826, "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": 0.17464, "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": 0.17056, "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": 0.16912, "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": 0.16849, "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": 0.16955, "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": 0.17436, "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": 0.16964, "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": 0.17172, "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": 0.17932, "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": 0.17211, "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": 0.16302, "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": 0.16802, "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": 0.1675, "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": 0.1749, "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": 0.16962, "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": 0.1703, "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": 0.1782, "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": 0.1676, "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": 0.18006, "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": 0.17504, "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": 0.16968, "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": 0.16903, "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": 0.1628, "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": 0.1623, "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": 0.17055, "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": 0.1704, "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": 0.17508, "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": 0.17301, "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": 0.17379, "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": 0.16154, "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": 0.17138, "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": 0.16966, "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": 0.17427, "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": 0.16904, "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": 0.16682, "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": 0.17111, "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": 0.17348, "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": 0.17714, "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": 0.16963, "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": 0.17559, "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": 0.16992, "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": 0.16442, "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": 0.1712, "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": 0.16642, "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": 0.17052, "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": 0.177, "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": 0.16697, "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": 0.17567, "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": 0.15896, "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": 0.17057, "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": 0.17268, "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": 0.16419, "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": 0.19294, "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": 0.17264, "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": 0.17147, "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": 0.16749, "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": 0.17027, "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": 0.17276, "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": 0.17986, "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": 0.16439, "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": 0.17197, "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": 0.17155, "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": 0.17189, "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": 0.16678, "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": 0.1689, "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": 0.1794, "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": 0.18661, "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": 0.18308, "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": 0.18215, "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": 0.17809, "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": 0.17212, "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": 0.17695, "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": 0.18009, "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": 0.17337, "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": 0.1649, "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": 0.17521, "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": 0.17519, "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": 0.17389, "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": 0.17857, "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": 0.17599, "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": 0.17461, "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": 0.17356, "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": 0.1728, "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": 0.17061, "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": 0.18183, "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": 0.17589, "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": 0.1805, "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": 0.17375, "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": 0.16932, "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": 0.17046, "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": 0.1788, "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": 0.16992, "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": 0.17415, "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": 0.17471, "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": 0.17414, "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": 0.18062, "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": 0.17667, "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": 0.17067, "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": 0.17467, "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": 0.17638, "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": 0.17337, "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": 0.18263, "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": 0.16929, "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": 0.16849, "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": 0.17493, "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": 0.17282, "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": 0.17644, "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": 0.17595, "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": 0.17957, "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": 0.17789, "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": 0.16808, "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": 0.17082, "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": 0.18002, "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": 0.16771, "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": 0.17852, "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": 0.17314, "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": 0.17603, "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": 0.17415, "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": 0.1769, "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": 0.17369, "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": 0.17263, "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": 0.16842, "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": 0.17178, "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": 0.17091, "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": 0.17787, "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": 0.17779, "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": 0.17882, "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": 0.1667, "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": 0.17302, "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": 0.17105, "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": 0.17923, "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": 0.17286, "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": 0.18072, "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": 0.17834, "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": 0.176, "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": 0.18235, "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": 0.17461, "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": 0.18163, "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": 0.17305, "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": 0.17452, "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": 0.18081, "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": 0.17079, "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": 0.17261, "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": 0.16756, "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": 0.16734, "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": 0.17513, "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": 0.17125, "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": 0.17879, "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": 0.16675, "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": 0.17418, "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": 0.18166, "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": 0.16325, "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": 0.19241, "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": 0.16838, "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": 0.17472, "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": 0.17952, "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": 0.17178, "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": 0.17948, "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": 0.18236, "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": 0.17485, "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": 0.17552, "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": 0.1656, "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": 0.17034, "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": 0.17048, "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": 0.17104, "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": 0.18391, "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": 0.17552, "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": 0.17502, "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": 0.17056, "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": 0.16878, "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": 0.17714, "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": 0.17314, "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": 0.17274, "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": 0.17336, "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": 0.16937, "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": 0.17579, "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": 0.1737, "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": 0.16597, "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": 0.17258, "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": 0.1747, "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": 0.17978, "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": 0.17186, "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": 0.17825, "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": 0.17847, "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": 0.17485, "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": 0.17885, "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": 0.17628, "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": 0.17109, "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": 0.1749, "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": 0.17553, "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": 0.17746, "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": 0.17556, "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": 0.17456, "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": 0.17025, "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": 0.17999, "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": 0.17062, "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": 0.17814, "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": 0.17596, "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": 0.16798, "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": 0.18727, "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": 0.17688, "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": 0.17273, "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": 0.17694, "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": 0.18299, "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": 0.17807, "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": 0.1706, "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": 0.18064, "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": 0.16862, "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": 0.18349, "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": 0.17472, "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": 0.16437, "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": 0.17675, "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": 0.17184, "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": 0.16729, "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": 0.17674, "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": 0.17065, "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": 0.17232, "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": 0.18245, "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": 0.17027, "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": 0.16961, "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": 0.17461, "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": 0.17381, "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": 0.17381, "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": 0.17103, "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": 0.16867, "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": 0.17115, "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": 0.17057, "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": 0.17655, "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": 0.17332, "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": 0.17266, "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": 0.1759, "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": 0.16721, "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": 0.17828, "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": 0.17749, "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": 0.16965, "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": 0.17095, "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": 0.17097, "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": 0.17341, "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": 0.1773, "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": 0.17335, "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": 0.17186, "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": 0.16821, "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": 0.16795, "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": 0.16542, "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": 0.17282, "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": 0.17163, "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": 0.16879, "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": 0.1809, "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": 0.16715, "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": 0.16828, "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": 0.17609, "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": 0.16908, "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": 0.18146, "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": 0.16559, "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": 0.17151, "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": 0.17095, "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": 0.17366, "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": 0.1705, "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": 0.177, "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": 0.17394, "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": 0.16247, "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": 0.17919, "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": 0.16896, "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": 0.17612, "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": 0.16549, "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": 0.17416, "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": 0.17878, "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": 0.17442, "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": 0.16877, "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": 0.1727, "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": 0.17015, "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": 0.16287, "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": 0.18054, "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": 0.16896, "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": 0.16898, "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": 0.17074, "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": 0.17305, "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": 0.17623, "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": 0.17124, "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": 0.1716, "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": 0.17074, "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": 0.16694, "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": 0.17283, "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": 0.16355, "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": 0.17572, "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": 0.17881, "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": 0.1733, "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": 0.16849, "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": 0.17068, "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": 0.17345, "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": 0.1681, "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": 0.17111, "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": 0.17935, "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": 0.16171, "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": 0.17142, "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": 0.16908, "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": 0.16587, "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": 0.18339, "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": 0.1698, "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": 0.17314, "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": 0.16554, "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": 0.17138, "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": 0.17864, "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": 0.17545, "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": 0.17426, "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": 0.16162, "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": 0.17983, "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": 0.17062, "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": 0.16965, "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": 0.17897, "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": 0.16603, "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": 0.17409, "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": 0.17375, "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": 0.16425, "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": 0.17495, "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": 0.17027, "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": 0.16911, "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": 0.17949, "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": 0.17429, "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": 0.17049, "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": 0.17191, "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": 0.16982, "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": 0.1711, "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": 0.16665, "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": 0.16947, "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": 0.17413, "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": 0.17284, "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": 0.1715, "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": 0.17238, "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": 0.16918, "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": 0.16491, "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": 0.17127, "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": 0.17187, "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": 0.16983, "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": 0.17282, "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": 0.17413, "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": 0.16899, "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": 0.1715, "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": 0.17653, "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": 0.16878, "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": 0.17389, "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": 0.17368, "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": 0.16802, "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": 0.1706, "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": 0.16808, "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": 0.18104, "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": 0.17037, "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": 0.17282, "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": 0.16275, "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": 0.17208, "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": 0.16957, "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_release/golden_values_0.9.0.json b/tests/functional_tests/test_cases/t5/t5_release/golden_values_0.9.0.json index d33c47a..009b9ad 100644 --- a/tests/functional_tests/test_cases/t5/t5_release/golden_values_0.9.0.json +++ b/tests/functional_tests/test_cases/t5/t5_release/golden_values_0.9.0.json @@ -1,40223 +1 @@ -{ - "lm loss": { - "start_step": 0, - "end_step": 100000, - "step_interval": 5, - "values": [ - 10.34371, - 10.32721, - 10.114, - 9.86649, - 9.64797, - 9.51665, - 9.43107, - 9.36024, - 9.28612, - 9.18335, - 9.13366, - 9.09549, - 9.00463, - 8.94915, - 8.92068, - 8.89332, - 8.84261, - 8.7778, - 8.74577, - 8.68576, - 8.66147, - 8.60809, - 8.59325, - 8.51331, - 8.45583, - 8.4516, - 8.39704, - 8.3649, - 8.28404, - 8.22978, - 8.2033, - 8.19542, - 8.12821, - 8.09811, - 8.02864, - 7.98128, - 7.91556, - 7.89997, - 7.87425, - 7.79892, - 7.72069, - 7.65651, - 7.64137, - 7.55316, - 7.45861, - 7.42432, - 7.36863, - 7.33937, - 7.22899, - 7.16727, - 7.11539, - 7.04258, - 7.0373, - 6.94246, - 6.85809, - 6.86439, - 6.80298, - 6.76349, - 6.70962, - 6.69861, - 6.66691, - 6.59053, - 6.54721, - 6.5453, - 6.51752, - 6.44991, - 6.54001, - 6.41416, - 6.38233, - 6.42955, - 6.37093, - 6.39886, - 6.36007, - 6.35539, - 6.31348, - 6.32511, - 6.26057, - 6.26525, - 6.25167, - 6.24934, - 6.24069, - 6.16234, - 6.18815, - 6.17433, - 6.1698, - 6.11567, - 6.11808, - 6.07284, - 6.12117, - 6.06599, - 6.03319, - 6.02723, - 6.0445, - 6.02115, - 6.0124, - 5.91088, - 5.97814, - 5.85118, - 5.87578, - 5.94438, - 5.91215, - 5.84502, - 5.85452, - 5.86563, - 5.82595, - 5.8257, - 5.84499, - 5.78783, - 5.76449, - 5.78957, - 5.75028, - 5.7297, - 5.77474, - 5.74849, - 5.73995, - 5.6496, - 5.68544, - 5.68631, - 5.62859, - 5.65657, - 5.64569, - 5.6526, - 5.64158, - 5.64334, - 5.55456, - 5.52606, - 5.54254, - 5.58907, - 5.61788, - 5.58637, - 5.51853, - 5.54271, - 5.55124, - 5.53125, - 5.55615, - 5.54975, - 5.54612, - 5.50163, - 5.53401, - 5.47103, - 5.44242, - 5.49341, - 5.43964, - 5.4582, - 5.38404, - 5.44417, - 5.45729, - 5.40678, - 5.48959, - 5.37385, - 5.40525, - 5.39967, - 5.37509, - 5.33497, - 5.39374, - 5.33408, - 5.37224, - 5.36061, - 5.29049, - 5.29867, - 5.33922, - 5.28809, - 5.28297, - 5.29188, - 5.31675, - 5.32539, - 5.32902, - 5.22632, - 5.33654, - 5.30256, - 5.29351, - 5.28235, - 5.29219, - 5.19923, - 5.23118, - 5.22195, - 5.24248, - 5.20525, - 5.19331, - 5.17488, - 5.20168, - 5.13312, - 5.23356, - 5.15915, - 5.14987, - 5.12961, - 5.17959, - 5.16337, - 5.17791, - 5.13279, - 5.15866, - 5.11402, - 5.10809, - 5.16762, - 5.0967, - 5.08165, - 5.13643, - 5.14252, - 5.14628, - 5.07924, - 5.11738, - 5.04207, - 5.04119, - 5.07161, - 5.02141, - 5.05205, - 5.06739, - 5.06261, - 5.01499, - 5.05365, - 5.05105, - 5.06245, - 5.01509, - 5.01269, - 5.02778, - 5.0117, - 4.99525, - 4.96393, - 4.98399, - 5.03623, - 5.0127, - 4.96259, - 5.00467, - 4.99258, - 4.91176, - 4.9443, - 4.99796, - 4.99819, - 4.94077, - 4.93736, - 4.96306, - 4.91808, - 4.92228, - 4.87653, - 4.95257, - 4.9784, - 4.90774, - 4.90829, - 4.84604, - 4.88128, - 4.94029, - 4.89162, - 4.8621, - 4.89156, - 4.86422, - 4.78927, - 4.88608, - 4.84052, - 4.85941, - 4.84103, - 4.92018, - 4.87086, - 4.75272, - 4.81387, - 4.81981, - 4.81054, - 4.86339, - 4.83061, - 4.88123, - 4.83057, - 4.81621, - 4.82811, - 4.81344, - 4.87048, - 4.85872, - 4.7662, - 4.88862, - 4.83712, - 4.82332, - 4.85606, - 4.82294, - 4.83144, - 4.71875, - 4.82615, - 4.76198, - 4.7181, - 4.7939, - 4.78762, - 4.77938, - 4.81392, - 4.75002, - 4.73173, - 4.78803, - 4.81845, - 4.74332, - 4.84571, - 4.80402, - 4.73229, - 4.7338, - 4.70098, - 4.77377, - 4.76931, - 4.75162, - 4.73874, - 4.75287, - 4.72182, - 4.74306, - 4.76364, - 4.74807, - 4.75593, - 4.71463, - 4.73093, - 4.71701, - 4.6946, - 4.73624, - 4.71605, - 4.66674, - 4.67845, - 4.716, - 4.69358, - 4.65051, - 4.70965, - 4.71412, - 4.67758, - 4.69109, - 4.62664, - 4.67108, - 4.66478, - 4.64889, - 4.69847, - 4.66109, - 4.60784, - 4.64061, - 4.72245, - 4.66823, - 4.69203, - 4.62672, - 4.56931, - 4.69906, - 4.6596, - 4.60592, - 4.66496, - 4.63112, - 4.66863, - 4.6666, - 4.69607, - 4.70907, - 4.63781, - 4.57693, - 4.64554, - 4.62399, - 4.5774, - 4.65926, - 4.63967, - 4.61865, - 4.65526, - 4.65787, - 4.62302, - 4.63163, - 4.62148, - 4.62259, - 4.55848, - 4.57079, - 4.58421, - 4.57123, - 4.57655, - 4.58359, - 4.59391, - 4.57222, - 4.65079, - 4.58564, - 4.58319, - 4.53181, - 4.54073, - 4.55527, - 4.60676, - 4.62171, - 4.53496, - 4.61109, - 4.61188, - 4.64368, - 4.57979, - 4.46449, - 4.57862, - 4.62607, - 4.56378, - 4.62886, - 4.54314, - 4.56404, - 4.5332, - 4.54747, - 4.56644, - 4.5655, - 4.50503, - 4.53438, - 4.53179, - 4.54529, - 4.50102, - 4.45783, - 4.46511, - 4.53787, - 4.56745, - 4.53006, - 4.50951, - 4.52579, - 4.55778, - 4.53446, - 4.53667, - 4.57361, - 4.55073, - 4.46018, - 4.55381, - 4.47448, - 4.54257, - 4.53436, - 4.46738, - 4.51397, - 4.52642, - 4.52233, - 4.51263, - 4.47809, - 4.51756, - 4.49554, - 4.56551, - 4.49964, - 4.50747, - 4.50212, - 4.47716, - 4.53627, - 4.56063, - 4.46399, - 4.45834, - 4.46807, - 4.4765, - 4.48007, - 4.49675, - 4.45521, - 4.44142, - 4.48267, - 4.48807, - 4.49728, - 4.54687, - 4.44415, - 4.46507, - 4.47678, - 4.4658, - 4.43037, - 4.48776, - 4.38539, - 4.51719, - 4.38865, - 4.40015, - 4.4873, - 4.44821, - 4.52269, - 4.50812, - 4.45893, - 4.42479, - 4.458, - 4.41173, - 4.38105, - 4.45432, - 4.48549, - 4.53234, - 4.49588, - 4.47487, - 4.40138, - 4.39951, - 4.40127, - 4.42078, - 4.40868, - 4.38337, - 4.45332, - 4.40609, - 4.42202, - 4.43767, - 4.44993, - 4.44147, - 4.44211, - 4.43367, - 4.47342, - 4.46464, - 4.37303, - 4.40851, - 4.39862, - 4.39781, - 4.43557, - 4.34771, - 4.41679, - 4.3494, - 4.35542, - 4.43877, - 4.43076, - 4.42589, - 4.37757, - 4.36102, - 4.325, - 4.38068, - 4.41097, - 4.44037, - 4.40652, - 4.36263, - 4.37697, - 4.30277, - 4.39542, - 4.32018, - 4.31759, - 4.42157, - 4.30335, - 4.37803, - 4.33683, - 4.36159, - 4.33094, - 4.27205, - 4.36141, - 4.38782, - 4.31195, - 4.42062, - 4.35485, - 4.31702, - 4.38093, - 4.25977, - 4.35765, - 4.36693, - 4.35076, - 4.28993, - 4.37813, - 4.28099, - 4.25841, - 4.3138, - 4.50574, - 4.30034, - 4.31952, - 4.32474, - 4.28206, - 4.40133, - 4.388, - 4.30447, - 4.34673, - 4.27437, - 4.27176, - 4.27178, - 4.31596, - 4.35738, - 4.36794, - 4.32901, - 4.32664, - 4.32511, - 4.31891, - 4.44161, - 4.38934, - 4.26593, - 4.24697, - 4.29139, - 4.29503, - 4.2805, - 4.30744, - 4.28106, - 4.29376, - 4.34339, - 4.31353, - 4.26455, - 4.34641, - 4.28986, - 4.27105, - 4.30687, - 4.31653, - 4.26322, - 4.285, - 4.25663, - 4.27059, - 4.23069, - 4.24971, - 4.29641, - 4.26077, - 4.22965, - 4.33005, - 4.24435, - 4.30421, - 4.27765, - 4.28617, - 4.3374, - 4.2579, - 4.19155, - 4.29224, - 4.275, - 4.27895, - 4.2813, - 4.21387, - 4.28236, - 4.30258, - 4.23456, - 4.24197, - 4.28329, - 4.28855, - 4.27254, - 4.24467, - 4.2486, - 4.27674, - 4.2098, - 4.21438, - 4.22464, - 4.28206, - 4.20106, - 4.29616, - 4.31549, - 4.27454, - 4.14934, - 4.18408, - 4.20249, - 4.1185, - 4.1766, - 4.25452, - 4.19783, - 4.21276, - 4.23118, - 4.18627, - 4.19913, - 4.2984, - 4.1896, - 4.19412, - 4.21993, - 4.23492, - 4.18918, - 4.21499, - 4.21815, - 4.18563, - 4.27453, - 4.19027, - 4.26236, - 4.25247, - 4.17194, - 4.23365, - 4.24633, - 4.21542, - 4.20471, - 4.11623, - 4.19141, - 4.19803, - 4.13584, - 4.22584, - 4.16821, - 4.22986, - 4.17502, - 4.20157, - 4.2042, - 4.15438, - 4.24046, - 4.15936, - 4.22629, - 4.15451, - 4.16778, - 4.21398, - 4.16408, - 4.27656, - 4.14559, - 4.24873, - 4.2216, - 4.10827, - 4.24151, - 4.14706, - 4.14237, - 4.15029, - 4.24328, - 4.1494, - 4.13806, - 4.16209, - 4.18968, - 4.19807, - 4.18528, - 4.15336, - 4.1921, - 4.21955, - 4.19537, - 4.17252, - 4.05469, - 4.23591, - 4.22929, - 4.16159, - 4.19924, - 4.13351, - 4.17162, - 4.22112, - 4.13728, - 4.19262, - 4.09591, - 4.18966, - 4.19159, - 4.16153, - 4.18441, - 4.24495, - 4.05146, - 4.11675, - 4.14561, - 4.13856, - 4.12771, - 4.13412, - 4.17317, - 4.10954, - 4.10103, - 4.10564, - 4.15103, - 4.06347, - 4.14064, - 4.13554, - 4.16036, - 4.13806, - 4.1411, - 4.13207, - 4.17111, - 4.13161, - 4.10581, - 4.14351, - 4.1418, - 4.12685, - 4.12491, - 4.17053, - 4.17197, - 4.08125, - 4.10622, - 4.08518, - 4.19901, - 4.18373, - 4.11784, - 4.13605, - 4.09085, - 4.16172, - 4.14396, - 4.08926, - 4.09725, - 4.07033, - 4.14794, - 4.09602, - 4.04872, - 4.11956, - 4.13134, - 4.17571, - 4.15728, - 4.04606, - 4.11036, - 4.10569, - 4.09439, - 4.08918, - 4.10652, - 4.04153, - 4.07967, - 4.14483, - 4.09258, - 4.11661, - 4.11553, - 4.05931, - 4.04687, - 4.05492, - 4.00914, - 4.14169, - 4.07154, - 4.01417, - 4.07498, - 4.05379, - 4.07445, - 4.12242, - 4.15678, - 4.09118, - 4.05464, - 4.09967, - 4.10054, - 4.07838, - 4.08205, - 4.10016, - 4.0927, - 4.0386, - 4.03104, - 4.09228, - 4.07933, - 4.03997, - 4.0703, - 4.0725, - 4.12135, - 4.05437, - 4.09376, - 4.10395, - 4.03578, - 4.05649, - 4.06444, - 3.99069, - 4.07636, - 4.06502, - 4.01864, - 4.09135, - 4.07911, - 4.06304, - 4.07942, - 4.00587, - 3.98571, - 4.01844, - 4.01845, - 4.0133, - 4.06635, - 4.05238, - 4.0415, - 4.08197, - 4.06864, - 4.06148, - 4.02985, - 4.1108, - 3.99637, - 4.02393, - 4.03333, - 4.00233, - 4.01089, - 3.99421, - 4.01976, - 3.98557, - 4.02879, - 4.02915, - 3.98361, - 4.01303, - 3.99182, - 4.01082, - 4.02917, - 3.98966, - 4.03798, - 3.98693, - 4.02806, - 3.9804, - 3.99154, - 3.95308, - 4.06131, - 3.98503, - 4.02242, - 4.04947, - 4.04755, - 4.05749, - 4.01964, - 4.04691, - 4.01903, - 4.00368, - 4.0223, - 3.96534, - 3.94413, - 3.95022, - 3.91459, - 4.01865, - 4.01447, - 4.01825, - 4.04712, - 3.90945, - 4.01035, - 3.93134, - 4.02347, - 4.0289, - 4.01944, - 4.02268, - 4.00379, - 3.98438, - 3.98494, - 4.00751, - 4.00539, - 4.01471, - 3.97883, - 3.96691, - 3.98118, - 3.95196, - 3.96805, - 3.9616, - 3.91135, - 3.9818, - 3.95048, - 3.96692, - 4.04797, - 3.95094, - 3.98129, - 4.00291, - 3.94687, - 3.99493, - 3.99943, - 3.91944, - 4.02828, - 3.97374, - 3.9849, - 4.02134, - 3.8844, - 4.0135, - 3.93749, - 3.9895, - 3.89734, - 3.91075, - 3.95003, - 3.94921, - 3.9051, - 3.86905, - 3.99393, - 3.95241, - 3.96172, - 3.99877, - 3.91178, - 3.97539, - 3.91908, - 3.989, - 3.95961, - 3.91376, - 3.89508, - 3.94791, - 3.85501, - 3.92824, - 3.9345, - 3.91217, - 3.91427, - 3.93805, - 3.93775, - 3.93593, - 4.00061, - 3.99358, - 3.85265, - 3.92745, - 3.86778, - 3.88336, - 3.91641, - 3.86977, - 3.94184, - 3.99253, - 3.9565, - 3.90893, - 3.95547, - 3.91539, - 4.00609, - 3.94149, - 3.88706, - 3.88884, - 3.87887, - 3.84859, - 3.96994, - 3.83642, - 3.91187, - 3.93243, - 3.99307, - 3.94405, - 3.89238, - 3.85897, - 3.90837, - 3.94427, - 3.89752, - 3.90644, - 3.91271, - 3.86256, - 3.94143, - 3.89318, - 3.94167, - 3.86062, - 3.88939, - 3.86926, - 3.92992, - 3.89863, - 3.89253, - 3.87386, - 3.7964, - 3.92208, - 3.89098, - 3.86265, - 3.83529, - 3.88205, - 3.89735, - 3.88953, - 3.89208, - 3.87159, - 3.87154, - 3.85348, - 3.84535, - 3.81758, - 3.9064, - 3.92085, - 3.91365, - 3.83899, - 3.86635, - 3.87412, - 3.83715, - 3.86589, - 3.82874, - 3.87186, - 3.96878, - 3.88596, - 3.86261, - 3.84512, - 3.87305, - 3.93143, - 3.8972, - 3.91724, - 3.82514, - 3.87908, - 3.84294, - 3.87977, - 3.85227, - 3.88875, - 3.83649, - 3.91289, - 3.75757, - 3.90332, - 3.84783, - 3.78191, - 3.82763, - 3.87901, - 3.8072, - 3.94452, - 3.89707, - 3.82348, - 3.75937, - 3.80237, - 3.83533, - 3.84014, - 3.79384, - 3.88295, - 3.84588, - 3.82935, - 3.84494, - 3.8517, - 3.83153, - 3.84037, - 3.89638, - 3.80366, - 3.8738, - 3.79322, - 3.80552, - 3.80024, - 3.84643, - 3.84107, - 3.81869, - 3.87334, - 3.79885, - 3.89891, - 3.86192, - 3.83541, - 3.84327, - 3.84301, - 3.77504, - 3.83437, - 3.78309, - 3.73592, - 3.78098, - 3.80711, - 3.79688, - 3.79451, - 3.78697, - 3.81944, - 3.8357, - 3.78419, - 3.84716, - 3.78422, - 3.80811, - 3.81015, - 3.78557, - 3.79856, - 3.80035, - 3.80803, - 3.79067, - 3.78887, - 3.70707, - 3.81911, - 3.80337, - 3.86852, - 3.8238, - 3.79076, - 3.817, - 3.80191, - 3.86436, - 3.79506, - 3.77135, - 3.71988, - 3.76742, - 3.76852, - 3.79947, - 3.74223, - 3.82796, - 3.80137, - 3.75179, - 3.85419, - 3.74153, - 3.75233, - 3.74222, - 3.77405, - 3.76368, - 3.75689, - 3.77549, - 3.72838, - 3.79685, - 3.7622, - 3.74174, - 3.81635, - 3.81354, - 3.76734, - 3.79697, - 3.73373, - 3.78578, - 3.72265, - 3.78478, - 3.77295, - 3.77003, - 3.80455, - 3.73715, - 3.73299, - 3.75412, - 3.77077, - 3.80284, - 3.69181, - 3.7611, - 3.77744, - 3.67717, - 3.76498, - 3.72482, - 3.71854, - 3.78029, - 3.73392, - 3.73919, - 3.72154, - 3.72539, - 3.83116, - 3.71476, - 3.75519, - 3.75007, - 3.70735, - 3.71681, - 3.7788, - 3.62798, - 3.77322, - 3.6499, - 3.82058, - 3.70896, - 3.73358, - 3.6799, - 3.74943, - 3.65681, - 3.70177, - 3.77954, - 3.72156, - 3.72226, - 3.68523, - 3.68692, - 3.67229, - 3.7438, - 3.67946, - 3.69673, - 3.66724, - 3.6744, - 3.78139, - 3.7027, - 3.71637, - 3.68019, - 3.71413, - 3.63249, - 3.70117, - 3.70714, - 3.64921, - 3.71662, - 3.67793, - 3.61612, - 3.69623, - 3.66664, - 3.68843, - 3.71517, - 3.80243, - 3.68301, - 3.73884, - 3.63722, - 3.64617, - 3.71635, - 3.70133, - 3.66793, - 3.66688, - 3.69307, - 3.69747, - 3.66167, - 3.68218, - 3.70806, - 3.67807, - 3.69406, - 3.65958, - 3.66385, - 3.68838, - 3.65491, - 3.67502, - 3.693, - 3.67065, - 3.67303, - 3.62493, - 3.71113, - 3.66078, - 3.60537, - 3.66142, - 3.66626, - 3.66495, - 3.66852, - 3.69801, - 3.63677, - 3.62982, - 3.64909, - 3.62899, - 3.58792, - 3.65804, - 3.6867, - 3.67791, - 3.63415, - 3.62693, - 3.63352, - 3.59584, - 3.62589, - 3.59005, - 3.65756, - 3.67979, - 3.6218, - 3.61814, - 3.74461, - 3.65376, - 3.69396, - 3.70908, - 3.58418, - 3.60069, - 3.69807, - 3.6059, - 3.71573, - 3.57689, - 3.61656, - 3.55108, - 3.63637, - 3.66366, - 3.62931, - 3.62951, - 3.65221, - 3.58482, - 3.60868, - 3.66425, - 3.65118, - 3.67675, - 3.658, - 3.61976, - 3.64246, - 3.62331, - 3.61776, - 3.62874, - 3.62721, - 3.59866, - 3.61873, - 3.5489, - 3.70696, - 3.57469, - 3.57608, - 3.64923, - 3.53588, - 3.61134, - 3.58014, - 3.6154, - 3.62417, - 3.60499, - 3.57437, - 3.59862, - 3.6083, - 3.56258, - 3.54283, - 3.48789, - 3.58356, - 3.54743, - 3.54125, - 3.68133, - 3.55024, - 3.62022, - 3.50064, - 3.52001, - 3.55301, - 3.55878, - 3.62301, - 3.61296, - 3.53876, - 3.55563, - 3.56008, - 3.53872, - 3.5625, - 3.52189, - 3.52659, - 3.52789, - 3.53299, - 3.50062, - 3.55139, - 3.54653, - 3.52656, - 3.54409, - 3.59934, - 3.56251, - 3.49642, - 3.54057, - 3.51033, - 3.50881, - 3.56371, - 3.50959, - 3.47596, - 3.4983, - 3.50324, - 3.51161, - 3.49018, - 3.45379, - 3.4568, - 3.4709, - 3.39537, - 3.4726, - 3.45765, - 3.46488, - 3.42513, - 3.4203, - 3.51239, - 3.49464, - 3.49605, - 3.47994, - 3.43017, - 3.49244, - 3.4508, - 3.45262, - 3.48298, - 3.43508, - 3.41518, - 3.49, - 3.40892, - 3.42355, - 3.49253, - 3.41237, - 3.38292, - 3.37708, - 3.45369, - 3.43094, - 3.42157, - 3.42184, - 3.40303, - 3.38357, - 3.32032, - 3.43462, - 3.42763, - 3.4259, - 3.41536, - 3.35857, - 3.36072, - 3.38797, - 3.38809, - 3.3164, - 3.39759, - 3.33031, - 3.38347, - 3.40914, - 3.3216, - 3.3373, - 3.33471, - 3.42567, - 3.43624, - 3.31601, - 3.35842, - 3.30376, - 3.3755, - 3.30036, - 3.304, - 3.34693, - 3.30717, - 3.34916, - 3.37777, - 3.33521, - 3.3354, - 3.33662, - 3.27124, - 3.3539, - 3.39383, - 3.37248, - 3.32546, - 3.28574, - 3.35235, - 3.34408, - 3.34222, - 3.3303, - 3.34022, - 3.27893, - 3.32112, - 3.30557, - 3.24484, - 3.29785, - 3.26682, - 3.22714, - 3.28872, - 3.30816, - 3.25746, - 3.29812, - 3.2934, - 3.3574, - 3.22733, - 3.28921, - 3.33915, - 3.21852, - 3.27923, - 3.23888, - 3.29058, - 3.20529, - 3.23681, - 3.26328, - 3.28397, - 3.30838, - 3.26096, - 3.2749, - 3.258, - 3.28091, - 3.27164, - 3.25485, - 3.26296, - 3.24127, - 3.26696, - 3.26689, - 3.21262, - 3.22802, - 3.26266, - 3.22859, - 3.28781, - 3.2253, - 3.23549, - 3.28202, - 3.30797, - 3.22898, - 3.17838, - 3.22148, - 3.21341, - 3.23912, - 3.19721, - 3.18832, - 3.2565, - 3.21436, - 3.1865, - 3.22391, - 3.20155, - 3.24919, - 3.23574, - 3.18696, - 3.17537, - 3.14401, - 3.20485, - 3.20609, - 3.17466, - 3.1378, - 3.15216, - 3.19468, - 3.15816, - 3.14527, - 3.19374, - 3.1484, - 3.20494, - 3.16096, - 3.15878, - 3.17442, - 3.24439, - 3.20999, - 3.16619, - 3.07025, - 3.1159, - 3.25497, - 3.18261, - 3.20949, - 3.15191, - 3.14302, - 3.04797, - 3.12089, - 3.12873, - 3.13918, - 3.12088, - 3.16562, - 3.06367, - 3.17184, - 3.12916, - 3.12642, - 3.14795, - 3.19024, - 3.0813, - 3.10649, - 3.1019, - 3.13557, - 3.11323, - 3.12541, - 3.1726, - 3.15794, - 3.07752, - 3.0946, - 3.13231, - 3.10344, - 3.11949, - 3.10301, - 3.05579, - 3.16942, - 3.0996, - 3.09904, - 3.15448, - 3.09789, - 3.09691, - 3.12681, - 3.1398, - 3.16618, - 3.11921, - 3.08365, - 3.07737, - 3.1531, - 3.09147, - 3.07162, - 3.03144, - 3.03893, - 3.07538, - 3.07841, - 3.05103, - 3.11952, - 3.11496, - 3.09061, - 3.10705, - 3.0946, - 3.1438, - 3.11292, - 3.05945, - 3.07554, - 3.06615, - 3.11348, - 3.08067, - 3.04709, - 3.10191, - 3.05431, - 3.12748, - 3.04764, - 3.01876, - 3.05853, - 3.03669, - 2.97918, - 3.0435, - 3.08119, - 3.06269, - 3.09626, - 3.08603, - 3.07461, - 3.08761, - 3.02338, - 3.04842, - 3.00278, - 2.9818, - 3.08616, - 3.07841, - 3.00485, - 3.00871, - 3.0374, - 3.0213, - 2.99273, - 3.03198, - 3.01008, - 3.05377, - 3.02347, - 3.07184, - 3.09238, - 3.0337, - 2.94648, - 3.08056, - 3.11581, - 3.06111, - 2.99844, - 3.04809, - 3.00298, - 3.01841, - 3.08443, - 2.97501, - 3.055, - 3.01817, - 2.9941, - 2.95482, - 2.93857, - 3.03342, - 2.99739, - 2.96384, - 2.99674, - 3.00566, - 3.03091, - 2.96007, - 3.02182, - 2.93403, - 3.09829, - 3.0091, - 2.98855, - 3.01479, - 3.03527, - 3.02026, - 3.03447, - 3.03381, - 2.99644, - 3.01419, - 3.05048, - 2.96736, - 3.02802, - 3.13532, - 2.97867, - 2.95863, - 3.00951, - 2.98254, - 2.99559, - 2.91804, - 2.94361, - 3.01278, - 2.98653, - 3.00444, - 2.9757, - 2.99622, - 2.98816, - 3.00311, - 2.99989, - 2.98755, - 3.03377, - 2.97463, - 2.96327, - 2.98301, - 3.01855, - 2.94814, - 3.01632, - 3.02101, - 2.92578, - 2.9293, - 3.00537, - 2.93999, - 2.91726, - 2.95025, - 3.06795, - 2.90178, - 2.96537, - 3.03844, - 2.92099, - 3.01076, - 2.94878, - 2.91929, - 2.91717, - 3.02398, - 2.95694, - 2.89827, - 2.95817, - 2.93463, - 2.88714, - 3.01429, - 2.88445, - 2.93545, - 2.91244, - 2.95474, - 2.93962, - 2.8926, - 2.85307, - 2.93422, - 2.9297, - 2.92236, - 2.93161, - 2.95587, - 2.90156, - 2.98388, - 2.94396, - 2.93603, - 2.93848, - 2.96532, - 2.84699, - 2.86447, - 2.91252, - 2.95438, - 2.90619, - 2.95315, - 2.95224, - 2.91235, - 2.92049, - 2.90155, - 2.93415, - 3.00983, - 2.98178, - 2.89485, - 2.89593, - 2.86089, - 2.8884, - 2.90884, - 2.93988, - 2.90918, - 2.86846, - 2.95056, - 2.95628, - 2.92048, - 2.92831, - 2.86578, - 2.96543, - 2.90046, - 2.88209, - 2.9463, - 2.91948, - 2.96318, - 2.93245, - 2.9697, - 2.89533, - 2.95198, - 2.86938, - 2.82628, - 2.95756, - 2.95097, - 2.97077, - 2.93639, - 2.90521, - 2.95695, - 2.9037, - 2.92091, - 2.8628, - 2.93554, - 2.86756, - 2.92286, - 2.88841, - 2.96557, - 2.91396, - 2.89637, - 2.91341, - 2.88855, - 2.77714, - 2.90297, - 2.94488, - 2.94575, - 2.91736, - 2.83114, - 2.83237, - 2.93209, - 2.87038, - 2.8587, - 2.88183, - 2.84469, - 2.8989, - 2.9417, - 2.82079, - 2.86929, - 2.90045, - 2.98193, - 2.89512, - 2.9062, - 2.93924, - 2.82449, - 2.92485, - 2.87495, - 2.8365, - 2.8181, - 2.90693, - 2.81489, - 2.86948, - 2.87256, - 2.90533, - 2.90093, - 2.88342, - 2.77137, - 2.8786, - 2.84092, - 2.80635, - 2.78477, - 2.88779, - 2.73949, - 2.89247, - 2.79196, - 2.9072, - 2.81964, - 2.85877, - 2.88935, - 2.88218, - 2.83053, - 2.84917, - 2.81894, - 2.84817, - 2.90223, - 2.88839, - 2.88154, - 2.82994, - 2.78961, - 2.82896, - 2.74455, - 2.85291, - 2.90095, - 2.84824, - 2.86226, - 2.88905, - 2.80715, - 2.8626, - 2.87669, - 2.87899, - 2.88478, - 2.80931, - 2.89738, - 2.8037, - 2.81486, - 2.81346, - 2.84374, - 2.90051, - 2.8515, - 2.88707, - 2.88663, - 2.87102, - 2.84106, - 2.82347, - 2.87193, - 2.78659, - 2.90058, - 2.76909, - 2.81374, - 2.79345, - 2.85864, - 2.88459, - 2.79361, - 2.8044, - 2.84767, - 2.85486, - 2.82785, - 2.85836, - 2.86613, - 2.92563, - 2.81349, - 2.77303, - 2.85303, - 2.82634, - 2.74063, - 2.77044, - 2.86468, - 2.83577, - 2.82462, - 2.80297, - 2.79962, - 2.8223, - 2.88981, - 2.7985, - 2.77283, - 2.82732, - 2.82565, - 2.86194, - 2.8816, - 2.86627, - 2.7917, - 2.77768, - 2.81535, - 2.83914, - 2.74679, - 2.80587, - 2.81403, - 2.80038, - 2.79634, - 2.88313, - 2.86541, - 2.81117, - 2.82719, - 2.77105, - 2.81753, - 2.84877, - 2.80999, - 2.75832, - 2.83501, - 2.88573, - 2.73618, - 2.78669, - 2.82508, - 2.83497, - 2.86184, - 2.81411, - 2.80486, - 2.83339, - 2.77216, - 2.7664, - 2.83678, - 2.82966, - 2.8651, - 2.73586, - 2.77931, - 2.82145, - 2.82056, - 2.76942, - 2.82824, - 2.78171, - 2.83337, - 2.84238, - 2.8074, - 2.83586, - 2.81499, - 2.77751, - 2.78656, - 2.74025, - 2.78274, - 2.83574, - 2.87686, - 2.82694, - 2.75606, - 2.80385, - 2.78596, - 2.80802, - 2.80465, - 2.79881, - 2.81739, - 2.7888, - 2.83816, - 2.80383, - 2.81455, - 2.85243, - 2.84293, - 2.79704, - 2.80649, - 2.81233, - 2.8055, - 2.80424, - 2.76885, - 2.76262, - 2.80149, - 2.79061, - 2.79671, - 2.80511, - 2.75307, - 2.80407, - 2.83569, - 2.7843, - 2.82479, - 2.80138, - 2.82107, - 2.78979, - 2.79239, - 2.77129, - 2.78763, - 2.74932, - 2.801, - 2.74313, - 2.79965, - 2.81306, - 2.77436, - 2.77067, - 2.84259, - 2.79077, - 2.80687, - 2.76434, - 2.75526, - 2.79594, - 2.77651, - 2.8763, - 2.72225, - 2.74088, - 2.85648, - 2.74197, - 2.76585, - 2.74744, - 2.73941, - 2.84705, - 2.76933, - 2.82295, - 2.8006, - 2.80583, - 2.73376, - 2.80069, - 2.75279, - 2.7493, - 2.7359, - 2.72292, - 2.74577, - 2.75061, - 2.77033, - 2.7877, - 2.76327, - 2.75848, - 2.7837, - 2.83026, - 2.78755, - 2.69023, - 2.76919, - 2.7289, - 2.73707, - 2.77825, - 2.73557, - 2.74949, - 2.78003, - 2.79292, - 2.72757, - 2.74697, - 2.69217, - 2.7304, - 2.71621, - 2.71694, - 2.76401, - 2.76801, - 2.78138, - 2.73347, - 2.80642, - 2.78506, - 2.71379, - 2.78032, - 2.78976, - 2.79134, - 2.80772, - 2.74918, - 2.70712, - 2.7587, - 2.74551, - 2.68356, - 2.80405, - 2.75191, - 2.80921, - 2.72457, - 2.74775, - 2.81151, - 2.66465, - 2.72849, - 2.71959, - 2.75387, - 2.75552, - 2.79577, - 2.7547, - 2.71633, - 2.69833, - 2.77585, - 2.77982, - 2.74336, - 2.78179, - 2.76975, - 2.78352, - 2.70881, - 2.73891, - 2.75507, - 2.72337, - 2.80237, - 2.80451, - 2.72218, - 2.71474, - 2.76943, - 2.75142, - 2.76966, - 2.79794, - 2.80761, - 2.81492, - 2.75243, - 2.72851, - 2.66692, - 2.78883, - 2.75137, - 2.70084, - 2.721, - 2.75057, - 2.6791, - 2.74507, - 2.81547, - 2.70009, - 2.81968, - 2.75444, - 2.78013, - 2.77986, - 2.74503, - 2.68274, - 2.74822, - 2.71928, - 2.76341, - 2.7392, - 2.70981, - 2.68247, - 2.78056, - 2.7008, - 2.69603, - 2.79023, - 2.73148, - 2.78412, - 2.78367, - 2.69007, - 2.74103, - 2.76041, - 2.69397, - 2.73454, - 2.79217, - 2.75188, - 2.73541, - 2.75435, - 2.67168, - 2.66605, - 2.75613, - 2.75529, - 2.68593, - 2.76386, - 2.67782, - 2.7735, - 2.74449, - 2.71107, - 2.68143, - 2.77062, - 2.7109, - 2.6776, - 2.72273, - 2.73666, - 2.76326, - 2.72386, - 2.81193, - 2.79333, - 2.72329, - 2.6656, - 2.64689, - 2.66826, - 2.73035, - 2.73958, - 2.71352, - 2.6232, - 2.67447, - 2.71078, - 2.72834, - 2.67008, - 2.72791, - 2.69784, - 2.71227, - 2.62515, - 2.68954, - 2.76627, - 2.6215, - 2.74541, - 2.72286, - 2.74895, - 2.64032, - 2.62844, - 2.7021, - 2.76356, - 2.75275, - 2.66259, - 2.75015, - 2.6293, - 2.68498, - 2.64215, - 2.64355, - 2.68438, - 2.71158, - 2.72629, - 2.56832, - 2.77191, - 2.75158, - 2.65353, - 2.71807, - 2.71046, - 2.75894, - 2.65446, - 2.74547, - 2.69499, - 2.68761, - 2.6913, - 2.74163, - 2.74886, - 2.67043, - 2.66168, - 2.68886, - 2.76689, - 2.74306, - 2.65098, - 2.70104, - 2.66722, - 2.71603, - 2.68891, - 2.67689, - 2.68424, - 2.76128, - 2.70074, - 2.69055, - 2.62151, - 2.71233, - 2.71145, - 2.56962, - 2.66729, - 2.68197, - 2.73717, - 2.75111, - 2.77256, - 2.73667, - 2.72777, - 2.67809, - 2.70789, - 2.65197, - 2.67535, - 2.68896, - 2.66942, - 2.66279, - 2.70952, - 2.66716, - 2.78037, - 2.69124, - 2.67769, - 2.65496, - 2.73923, - 2.64488, - 2.68576, - 2.73899, - 2.64938, - 2.70387, - 2.66367, - 2.73747, - 2.66893, - 2.67631, - 2.66314, - 2.64389, - 2.61873, - 2.64314, - 2.766, - 2.73337, - 2.68305, - 2.71639, - 2.61625, - 2.71792, - 2.68769, - 2.73993, - 2.70447, - 2.67, - 2.72517, - 2.73256, - 2.72007, - 2.72097, - 2.66064, - 2.70916, - 2.65783, - 2.6917, - 2.69324, - 2.5658, - 2.65943, - 2.68232, - 2.7527, - 2.61684, - 2.6854, - 2.75141, - 2.65068, - 2.6931, - 2.64071, - 2.68162, - 2.65333, - 2.68028, - 2.63348, - 2.72617, - 2.66754, - 2.73209, - 2.68119, - 2.6864, - 2.64034, - 2.69337, - 2.62332, - 2.70951, - 2.73773, - 2.67288, - 2.62249, - 2.59525, - 2.72794, - 2.6466, - 2.67197, - 2.7226, - 2.63357, - 2.66373, - 2.63202, - 2.68662, - 2.67108, - 2.61592, - 2.6019, - 2.66101, - 2.6626, - 2.60034, - 2.65389, - 2.63549, - 2.61021, - 2.68758, - 2.71159, - 2.75712, - 2.6618, - 2.65398, - 2.70419, - 2.66052, - 2.66932, - 2.62803, - 2.66542, - 2.64726, - 2.58274, - 2.70265, - 2.58808, - 2.65158, - 2.65309, - 2.70866, - 2.55429, - 2.60902, - 2.62775, - 2.65961, - 2.73813, - 2.6892, - 2.67541, - 2.65591, - 2.69175, - 2.69494, - 2.63681, - 2.62478, - 2.67323, - 2.62809, - 2.69152, - 2.64142, - 2.74684, - 2.54882, - 2.6867, - 2.68145, - 2.70877, - 2.70729, - 2.61984, - 2.6673, - 2.63975, - 2.55461, - 2.66996, - 2.62989, - 2.61291, - 2.60881, - 2.59522, - 2.63217, - 2.66455, - 2.71612, - 2.65904, - 2.61188, - 2.63071, - 2.62894, - 2.65015, - 2.60086, - 2.60751, - 2.65635, - 2.61026, - 2.6486, - 2.68425, - 2.62975, - 2.62047, - 2.68684, - 2.72416, - 2.67282, - 2.67596, - 2.60035, - 2.67338, - 2.6874, - 2.64649, - 2.6895, - 2.66173, - 2.65004, - 2.66817, - 2.66857, - 2.63647, - 2.67898, - 2.69128, - 2.64617, - 2.69696, - 2.61101, - 2.6229, - 2.6265, - 2.61036, - 2.66572, - 2.60918, - 2.60118, - 2.68381, - 2.69382, - 2.66188, - 2.7231, - 2.65321, - 2.55765, - 2.66842, - 2.64541, - 2.61506, - 2.59532, - 2.63639, - 2.60841, - 2.62806, - 2.64608, - 2.67118, - 2.62389, - 2.55923, - 2.57586, - 2.62948, - 2.62331, - 2.60092, - 2.63199, - 2.61124, - 2.58761, - 2.64234, - 2.60936, - 2.61712, - 2.58712, - 2.65235, - 2.63345, - 2.67624, - 2.63538, - 2.5859, - 2.68176, - 2.68966, - 2.62908, - 2.66472, - 2.59177, - 2.56704, - 2.61299, - 2.64034, - 2.63382, - 2.6428, - 2.54883, - 2.58262, - 2.61183, - 2.6311, - 2.57346, - 2.57403, - 2.62809, - 2.57895, - 2.69194, - 2.62525, - 2.63167, - 2.59661, - 2.69256, - 2.70696, - 2.54479, - 2.70055, - 2.60821, - 2.61701, - 2.67208, - 2.61011, - 2.65011, - 2.62321, - 2.65866, - 2.5425, - 2.6093, - 2.60854, - 2.59741, - 2.58862, - 2.67295, - 2.7044, - 2.60812, - 2.68488, - 2.65197, - 2.57168, - 2.61187, - 2.61328, - 2.63525, - 2.62934, - 2.56182, - 2.63649, - 2.63364, - 2.62887, - 2.59577, - 2.60886, - 2.63652, - 2.65075, - 2.56499, - 2.70703, - 2.64762, - 2.62931, - 2.65009, - 2.67072, - 2.59086, - 2.64295, - 2.58587, - 2.61895, - 2.5797, - 2.62413, - 2.56185, - 2.66142, - 2.6316, - 2.62357, - 2.5959, - 2.63244, - 2.58769, - 2.63122, - 2.5933, - 2.56499, - 2.51952, - 2.63504, - 2.54099, - 2.64521, - 2.60912, - 2.6267, - 2.564, - 2.57348, - 2.56992, - 2.58418, - 2.61012, - 2.55381, - 2.56653, - 2.66297, - 2.6435, - 2.59938, - 2.60593, - 2.641, - 2.55413, - 2.57443, - 2.63708, - 2.64828, - 2.58094, - 2.6622, - 2.63222, - 2.67, - 2.5877, - 2.51709, - 2.52876, - 2.57926, - 2.61093, - 2.66773, - 2.62584, - 2.61201, - 2.61813, - 2.63209, - 2.61149, - 2.58899, - 2.55519, - 2.5915, - 2.61339, - 2.57118, - 2.55824, - 2.61613, - 2.5801, - 2.58463, - 2.56969, - 2.55443, - 2.62851, - 2.57225, - 2.6848, - 2.58631, - 2.59045, - 2.53288, - 2.59222, - 2.58792, - 2.62052, - 2.59499, - 2.56684, - 2.58895, - 2.59582, - 2.5789, - 2.57688, - 2.57849, - 2.65257, - 2.55409, - 2.52359, - 2.58454, - 2.59495, - 2.53446, - 2.57372, - 2.54588, - 2.62729, - 2.5586, - 2.65723, - 2.58125, - 2.60351, - 2.58585, - 2.51436, - 2.55796, - 2.50209, - 2.64614, - 2.60605, - 2.59766, - 2.63874, - 2.52589, - 2.58287, - 2.54012, - 2.49623, - 2.64405, - 2.58353, - 2.65639, - 2.59984, - 2.52379, - 2.6299, - 2.57622, - 2.60262, - 2.6084, - 2.6076, - 2.57319, - 2.59715, - 2.57519, - 2.61333, - 2.63064, - 2.59368, - 2.6369, - 2.5333, - 2.49021, - 2.61736, - 2.54959, - 2.57231, - 2.56281, - 2.65289, - 2.56465, - 2.63305, - 2.59313, - 2.59101, - 2.5983, - 2.54118, - 2.61238, - 2.59537, - 2.61145, - 2.58803, - 2.60472, - 2.67877, - 2.56161, - 2.6101, - 2.56673, - 2.60268, - 2.60031, - 2.52168, - 2.6507, - 2.54765, - 2.63041, - 2.57828, - 2.59903, - 2.49068, - 2.59229, - 2.58171, - 2.60845, - 2.56928, - 2.58428, - 2.6247, - 2.52681, - 2.56191, - 2.58753, - 2.50335, - 2.60935, - 2.58442, - 2.49095, - 2.60589, - 2.56827, - 2.61591, - 2.61087, - 2.58495, - 2.61272, - 2.58798, - 2.54086, - 2.59552, - 2.61571, - 2.5995, - 2.52747, - 2.51579, - 2.63453, - 2.61821, - 2.56831, - 2.57385, - 2.59723, - 2.54406, - 2.61962, - 2.55937, - 2.62051, - 2.55239, - 2.5812, - 2.68362, - 2.54966, - 2.62374, - 2.57061, - 2.53222, - 2.57754, - 2.58206, - 2.6136, - 2.52934, - 2.5716, - 2.53918, - 2.51976, - 2.56665, - 2.44944, - 2.56967, - 2.55454, - 2.53906, - 2.55189, - 2.55023, - 2.57851, - 2.57355, - 2.557, - 2.57158, - 2.50214, - 2.51197, - 2.56256, - 2.51444, - 2.52839, - 2.58499, - 2.60438, - 2.52385, - 2.5747, - 2.50562, - 2.5617, - 2.5552, - 2.52638, - 2.5443, - 2.60336, - 2.52014, - 2.57715, - 2.56441, - 2.55141, - 2.57211, - 2.57972, - 2.52367, - 2.57278, - 2.54216, - 2.55236, - 2.54777, - 2.56982, - 2.59999, - 2.54135, - 2.58151, - 2.51634, - 2.61955, - 2.5675, - 2.4568, - 2.57342, - 2.55853, - 2.56717, - 2.63909, - 2.618, - 2.55715, - 2.60809, - 2.51439, - 2.5015, - 2.50281, - 2.5334, - 2.50071, - 2.55917, - 2.50471, - 2.56075, - 2.63811, - 2.51631, - 2.58247, - 2.5451, - 2.53291, - 2.5299, - 2.53253, - 2.53392, - 2.51032, - 2.58595, - 2.55135, - 2.57227, - 2.57543, - 2.54353, - 2.61402, - 2.56794, - 2.5604, - 2.55498, - 2.51499, - 2.52695, - 2.59009, - 2.51501, - 2.50967, - 2.48264, - 2.55001, - 2.5278, - 2.54164, - 2.52304, - 2.54214, - 2.48849, - 2.51753, - 2.58903, - 2.61956, - 2.56039, - 2.5406, - 2.54079, - 2.5449, - 2.51107, - 2.5658, - 2.52561, - 2.53839, - 2.55095, - 2.59917, - 2.53839, - 2.58099, - 2.62992, - 2.57205, - 2.57496, - 2.55759, - 2.60914, - 2.53817, - 2.5961, - 2.51283, - 2.55853, - 2.42765, - 2.53366, - 2.54295, - 2.54823, - 2.5644, - 2.53103, - 2.51332, - 2.51396, - 2.62756, - 2.46276, - 2.54627, - 2.595, - 2.48257, - 2.53466, - 2.52359, - 2.55915, - 2.54452, - 2.54712, - 2.52808, - 2.56123, - 2.54537, - 2.56587, - 2.52644, - 2.55813, - 2.54549, - 2.56297, - 2.45761, - 2.48587, - 2.49228, - 2.57336, - 2.61951, - 2.4818, - 2.45865, - 2.54354, - 2.46115, - 2.4485, - 2.51564, - 2.48489, - 2.57547, - 2.54891, - 2.50171, - 2.61323, - 2.57528, - 2.49208, - 2.48911, - 2.63947, - 2.51962, - 2.46058, - 2.50496, - 2.56047, - 2.50229, - 2.52409, - 2.5273, - 2.54956, - 2.55625, - 2.54374, - 2.52165, - 2.48175, - 2.57167, - 2.56448, - 2.50733, - 2.55954, - 2.53072, - 2.51991, - 2.51214, - 2.58552, - 2.47838, - 2.56448, - 2.52481, - 2.50555, - 2.49014, - 2.55007, - 2.55401, - 2.51096, - 2.55744, - 2.56583, - 2.51184, - 2.53594, - 2.53344, - 2.47268, - 2.53568, - 2.51197, - 2.56462, - 2.53845, - 2.50893, - 2.53091, - 2.54488, - 2.53861, - 2.56976, - 2.52347, - 2.52186, - 2.48405, - 2.5714, - 2.53902, - 2.56134, - 2.49359, - 2.49513, - 2.5278, - 2.53223, - 2.45371, - 2.55331, - 2.53556, - 2.56111, - 2.51521, - 2.49776, - 2.45491, - 2.54416, - 2.49937, - 2.53734, - 2.56064, - 2.54502, - 2.43262, - 2.52998, - 2.49131, - 2.53937, - 2.45889, - 2.45812, - 2.5329, - 2.46925, - 2.53378, - 2.51476, - 2.44329, - 2.50191, - 2.59317, - 2.56486, - 2.52811, - 2.46905, - 2.53522, - 2.51229, - 2.47238, - 2.59919, - 2.56517, - 2.51386, - 2.52101, - 2.50209, - 2.56061, - 2.55957, - 2.5346, - 2.55247, - 2.56498, - 2.54012, - 2.54842, - 2.58767, - 2.52982, - 2.43828, - 2.55407, - 2.47761, - 2.49028, - 2.50474, - 2.54748, - 2.53365, - 2.50861, - 2.46424, - 2.50986, - 2.45849, - 2.45363, - 2.51416, - 2.53037, - 2.53185, - 2.47771, - 2.46415, - 2.54037, - 2.49347, - 2.56565, - 2.48657, - 2.48515, - 2.49086, - 2.48235, - 2.48662, - 2.51988, - 2.4533, - 2.59623, - 2.54791, - 2.48602, - 2.55049, - 2.57616, - 2.47121, - 2.57921, - 2.48412, - 2.51028, - 2.48415, - 2.47141, - 2.56888, - 2.49364, - 2.51247, - 2.50614, - 2.4496, - 2.4561, - 2.53052, - 2.48028, - 2.54659, - 2.48437, - 2.52207, - 2.46704, - 2.49094, - 2.5086, - 2.52494, - 2.50704, - 2.4743, - 2.52148, - 2.47393, - 2.47473, - 2.50914, - 2.45272, - 2.42524, - 2.55252, - 2.45336, - 2.54388, - 2.52111, - 2.49833, - 2.47948, - 2.48883, - 2.52313, - 2.3921, - 2.44072, - 2.46335, - 2.5059, - 2.49504, - 2.50137, - 2.45563, - 2.45945, - 2.51307, - 2.47799, - 2.45586, - 2.47137, - 2.55418, - 2.46642, - 2.49773, - 2.50209, - 2.57988, - 2.44636, - 2.5325, - 2.53913, - 2.51121, - 2.44555, - 2.48821, - 2.5053, - 2.51159, - 2.44676, - 2.52829, - 2.55339, - 2.46706, - 2.51902, - 2.56035, - 2.53526, - 2.44858, - 2.44197, - 2.44784, - 2.52702, - 2.49211, - 2.51124, - 2.48739, - 2.48838, - 2.42239, - 2.50735, - 2.48765, - 2.53528, - 2.47403, - 2.47126, - 2.40944, - 2.45306, - 2.4385, - 2.55269, - 2.44388, - 2.52225, - 2.52264, - 2.52474, - 2.41298, - 2.4527, - 2.52612, - 2.48551, - 2.51101, - 2.56463, - 2.44662, - 2.53841, - 2.62289, - 2.50929, - 2.48694, - 2.4675, - 2.50383, - 2.48539, - 2.4656, - 2.43423, - 2.43326, - 2.46717, - 2.43426, - 2.49763, - 2.48805, - 2.41894, - 2.50256, - 2.50097, - 2.54449, - 2.53517, - 2.48893, - 2.55221, - 2.49779, - 2.49037, - 2.50485, - 2.46928, - 2.45018, - 2.44296, - 2.54036, - 2.50816, - 2.43497, - 2.44359, - 2.59455, - 2.51341, - 2.44948, - 2.47583, - 2.51782, - 2.40125, - 2.51056, - 2.52343, - 2.53308, - 2.4524, - 2.4995, - 2.46437, - 2.50152, - 2.41373, - 2.46085, - 2.54979, - 2.48368, - 2.49061, - 2.4516, - 2.51717, - 2.5328, - 2.4438, - 2.50285, - 2.44912, - 2.38315, - 2.43396, - 2.50824, - 2.44129, - 2.41037, - 2.48145, - 2.50363, - 2.37905, - 2.45995, - 2.46084, - 2.44395, - 2.48107, - 2.43907, - 2.47561, - 2.47779, - 2.48287, - 2.56597, - 2.48416, - 2.43324, - 2.51114, - 2.53984, - 2.41456, - 2.45317, - 2.44444, - 2.48929, - 2.49083, - 2.44818, - 2.47185, - 2.43723, - 2.55823, - 2.54137, - 2.45373, - 2.44897, - 2.44649, - 2.485, - 2.47959, - 2.40037, - 2.43593, - 2.46117, - 2.46449, - 2.47129, - 2.44506, - 2.51655, - 2.50383, - 2.51861, - 2.5298, - 2.46658, - 2.49133, - 2.47009, - 2.40181, - 2.45433, - 2.52508, - 2.53393, - 2.42816, - 2.44758, - 2.48871, - 2.50509, - 2.54517, - 2.44175, - 2.48583, - 2.506, - 2.41778, - 2.48236, - 2.47385, - 2.45025, - 2.42938, - 2.44768, - 2.49538, - 2.41138, - 2.44096, - 2.55329, - 2.51881, - 2.5045, - 2.49193, - 2.48855, - 2.44205, - 2.52298, - 2.50699, - 2.41615, - 2.39718, - 2.50678, - 2.41029, - 2.48705, - 2.50058, - 2.5181, - 2.48285, - 2.52447, - 2.56393, - 2.48324, - 2.57286, - 2.47213, - 2.45422, - 2.49593, - 2.46208, - 2.42037, - 2.48634, - 2.4893, - 2.47901, - 2.44354, - 2.49694, - 2.52512, - 2.50591, - 2.46428, - 2.42898, - 2.48041, - 2.5037, - 2.49226, - 2.49609, - 2.4008, - 2.43324, - 2.54186, - 2.47446, - 2.49677, - 2.48796, - 2.34877, - 2.47584, - 2.45474, - 2.45576, - 2.44953, - 2.47731, - 2.53344, - 2.46746, - 2.41117, - 2.43148, - 2.49897, - 2.43484, - 2.36097, - 2.45879, - 2.39436, - 2.456, - 2.47828, - 2.5278, - 2.45388, - 2.5169, - 2.44678, - 2.43361, - 2.47447, - 2.43904, - 2.44716, - 2.41444, - 2.47599, - 2.48082, - 2.47923, - 2.48797, - 2.43862, - 2.46833, - 2.49863, - 2.43985, - 2.41255, - 2.51604, - 2.4771, - 2.44459, - 2.45696, - 2.4569, - 2.42946, - 2.43607, - 2.47287, - 2.50773, - 2.45398, - 2.42438, - 2.42476, - 2.49932, - 2.43083, - 2.56139, - 2.39153, - 2.42377, - 2.4326, - 2.47275, - 2.37569, - 2.43639, - 2.48065, - 2.37779, - 2.39973, - 2.47236, - 2.52, - 2.42616, - 2.42471, - 2.41076, - 2.42168, - 2.37664, - 2.49429, - 2.49674, - 2.40823, - 2.42678, - 2.39898, - 2.4886, - 2.46728, - 2.45683, - 2.41069, - 2.48299, - 2.44732, - 2.44496, - 2.48252, - 2.49997, - 2.43768, - 2.43672, - 2.46574, - 2.3854, - 2.44129, - 2.45887, - 2.47777, - 2.41973, - 2.48464, - 2.45327, - 2.43424, - 2.47941, - 2.43311, - 2.33966, - 2.38103, - 2.41504, - 2.43436, - 2.4045, - 2.39855, - 2.41776, - 2.48139, - 2.39193, - 2.40106, - 2.56399, - 2.41142, - 2.46308, - 2.42983, - 2.44596, - 2.45258, - 2.46746, - 2.47742, - 2.52757, - 2.4501, - 2.46035, - 2.44079, - 2.44111, - 2.45808, - 2.44631, - 2.44144, - 2.49393, - 2.45404, - 2.471, - 2.42071, - 2.35502, - 2.3958, - 2.39963, - 2.4572, - 2.48439, - 2.44288, - 2.45428, - 2.45226, - 2.44871, - 2.42287, - 2.41821, - 2.31632, - 2.41892, - 2.45868, - 2.46317, - 2.37192, - 2.43773, - 2.47889, - 2.44095, - 2.45007, - 2.428, - 2.45152, - 2.37038, - 2.46866, - 2.48546, - 2.42577, - 2.37846, - 2.36839, - 2.42522, - 2.43037, - 2.49233, - 2.45342, - 2.34117, - 2.45867, - 2.48703, - 2.41528, - 2.39737, - 2.49851, - 2.43516, - 2.46851, - 2.43343, - 2.50841, - 2.43086, - 2.36646, - 2.43614, - 2.41312, - 2.40969, - 2.42721, - 2.44625, - 2.51612, - 2.45477, - 2.44079, - 2.47306, - 2.47038, - 2.43168, - 2.45239, - 2.47242, - 2.44754, - 2.48656, - 2.47418, - 2.4529, - 2.44918, - 2.47144, - 2.48287, - 2.45669, - 2.44199, - 2.45045, - 2.44441, - 2.43335, - 2.44748, - 2.46681, - 2.38271, - 2.49157, - 2.43675, - 2.46981, - 2.44239, - 2.50267, - 2.48553, - 2.49532, - 2.41873, - 2.41314, - 2.52626, - 2.37738, - 2.39934, - 2.36168, - 2.38334, - 2.3858, - 2.47889, - 2.40401, - 2.43927, - 2.42859, - 2.469, - 2.40495, - 2.41213, - 2.4513, - 2.43545, - 2.34913, - 2.3702, - 2.49439, - 2.50536, - 2.44142, - 2.36121, - 2.42158, - 2.37616, - 2.42401, - 2.3981, - 2.40368, - 2.40608, - 2.47441, - 2.45675, - 2.41427, - 2.49563, - 2.46106, - 2.39419, - 2.42108, - 2.4423, - 2.36174, - 2.37434, - 2.3965, - 2.39443, - 2.44809, - 2.40736, - 2.37544, - 2.39908, - 2.45371, - 2.42536, - 2.49582, - 2.43336, - 2.42806, - 2.4212, - 2.42195, - 2.48005, - 2.44504, - 2.43007, - 2.41469, - 2.43028, - 2.45316, - 2.35527, - 2.34749, - 2.36553, - 2.48738, - 2.48837, - 2.43901, - 2.45175, - 2.46795, - 2.43799, - 2.38591, - 2.41503, - 2.47782, - 2.33491, - 2.35122, - 2.43248, - 2.37952, - 2.46948, - 2.39154, - 2.40167, - 2.47075, - 2.47539, - 2.43312, - 2.43355, - 2.35798, - 2.42126, - 2.39522, - 2.44165, - 2.53409, - 2.47253, - 2.43564, - 2.48541, - 2.52605, - 2.37531, - 2.45193, - 2.43064, - 2.33368, - 2.44635, - 2.3883, - 2.45411, - 2.43913, - 2.42699, - 2.4177, - 2.41209, - 2.40784, - 2.43533, - 2.38979, - 2.38954, - 2.35591, - 2.41254, - 2.47589, - 2.43805, - 2.45715, - 2.45687, - 2.42219, - 2.48148, - 2.38397, - 2.412, - 2.43789, - 2.39362, - 2.46996, - 2.44852, - 2.43945, - 2.51257, - 2.44708, - 2.36097, - 2.41921, - 2.50602, - 2.46708, - 2.35945, - 2.43047, - 2.43858, - 2.40356, - 2.46117, - 2.39015, - 2.43291, - 2.40733, - 2.49934, - 2.45005, - 2.42611, - 2.39203, - 2.46268, - 2.46522, - 2.4188, - 2.45776, - 2.48719, - 2.42071, - 2.37559, - 2.45621, - 2.38608, - 2.3375, - 2.39569, - 2.41607, - 2.41182, - 2.45551, - 2.37164, - 2.43465, - 2.38836, - 2.29337, - 2.42226, - 2.38456, - 2.39973, - 2.36451, - 2.43549, - 2.39703, - 2.46514, - 2.34092, - 2.3686, - 2.36638, - 2.3963, - 2.38741, - 2.322, - 2.4522, - 2.42296, - 2.3946, - 2.35307, - 2.47029, - 2.44564, - 2.46324, - 2.38634, - 2.42638, - 2.39866, - 2.32799, - 2.427, - 2.34351, - 2.43408, - 2.41638, - 2.47459, - 2.36144, - 2.38345, - 2.40518, - 2.39887, - 2.38547, - 2.43809, - 2.43649, - 2.41806, - 2.34737, - 2.39533, - 2.44806, - 2.37867, - 2.34808, - 2.4283, - 2.3994, - 2.38463, - 2.33297, - 2.45357, - 2.39041, - 2.37299, - 2.37114, - 2.47348, - 2.4324, - 2.38278, - 2.387, - 2.38894, - 2.3825, - 2.36569, - 2.3973, - 2.4538, - 2.39107, - 2.35772, - 2.40367, - 2.47927, - 2.40236, - 2.41206, - 2.41355, - 2.40457, - 2.36882, - 2.46935, - 2.40173, - 2.47172, - 2.42129, - 2.39868, - 2.35595, - 2.45532, - 2.46093, - 2.41247, - 2.39015, - 2.43603, - 2.38937, - 2.38167, - 2.35432, - 2.39596, - 2.45203, - 2.44817, - 2.43994, - 2.40765, - 2.47365, - 2.37336, - 2.43105, - 2.2874, - 2.47444, - 2.44809, - 2.38903, - 2.42847, - 2.43097, - 2.42105, - 2.36719, - 2.41405, - 2.45951, - 2.42072, - 2.39682, - 2.43415, - 2.47979, - 2.38059, - 2.38185, - 2.36539, - 2.37576, - 2.4104, - 2.34443, - 2.40225, - 2.4358, - 2.39576, - 2.38854, - 2.31644, - 2.39867, - 2.46033, - 2.38285, - 2.40998, - 2.3774, - 2.43852, - 2.37564, - 2.39266, - 2.43871, - 2.3981, - 2.34756, - 2.38106, - 2.44591, - 2.45643, - 2.33291, - 2.45392, - 2.36207, - 2.3989, - 2.38159, - 2.46144, - 2.3897, - 2.39159, - 2.38726, - 2.40366, - 2.39406, - 2.40143, - 2.32614, - 2.34314, - 2.38278, - 2.3639, - 2.36335, - 2.47772, - 2.48295, - 2.3424, - 2.40592, - 2.42125, - 2.38847, - 2.4326, - 2.38761, - 2.32859, - 2.38169, - 2.3917, - 2.39386, - 2.4567, - 2.39554, - 2.35668, - 2.42333, - 2.35512, - 2.3518, - 2.37154, - 2.38232, - 2.34516, - 2.42604, - 2.39911, - 2.39493, - 2.37223, - 2.37286, - 2.39589, - 2.35676, - 2.31851, - 2.36512, - 2.39574, - 2.37361, - 2.37608, - 2.38294, - 2.40001, - 2.43503, - 2.34914, - 2.41414, - 2.2842, - 2.40146, - 2.361, - 2.3575, - 2.36846, - 2.41704, - 2.3053, - 2.37741, - 2.43156, - 2.42723, - 2.37159, - 2.36045, - 2.36558, - 2.33395, - 2.44232, - 2.35623, - 2.43426, - 2.46154, - 2.39019, - 2.33971, - 2.38337, - 2.37051, - 2.32992, - 2.32513, - 2.34353, - 2.35053, - 2.34599, - 2.37815, - 2.36871, - 2.36244, - 2.38412, - 2.42166, - 2.41477, - 2.41588, - 2.31442, - 2.36525, - 2.42305, - 2.42509, - 2.38108, - 2.48414, - 2.4747, - 2.36735, - 2.4386, - 2.37478, - 2.44656, - 2.45512, - 2.36073, - 2.38947, - 2.37061, - 2.37254, - 2.3647, - 2.38957, - 2.32266, - 2.41707, - 2.37172, - 2.32196, - 2.44195, - 2.35164, - 2.37721, - 2.45974, - 2.40125, - 2.37919, - 2.40121, - 2.40656, - 2.40431, - 2.39828, - 2.36251, - 2.34878, - 2.3192, - 2.36455, - 2.33588, - 2.4067, - 2.39346, - 2.37477, - 2.35897, - 2.38503, - 2.41422, - 2.40102, - 2.38295, - 2.35731, - 2.34536, - 2.36943, - 2.34382, - 2.38457, - 2.41553, - 2.41011, - 2.33812, - 2.39173, - 2.38359, - 2.46877, - 2.35994, - 2.31356, - 2.35452, - 2.44076, - 2.35765, - 2.31413, - 2.36351, - 2.40812, - 2.37623, - 2.37268, - 2.41153, - 2.3828, - 2.36721, - 2.35975, - 2.41003, - 2.42775, - 2.38805, - 2.39763, - 2.33671, - 2.30849, - 2.43196, - 2.40053, - 2.40498, - 2.37281, - 2.33895, - 2.38814, - 2.38709, - 2.29562, - 2.40552, - 2.42674, - 2.28353, - 2.36709, - 2.38747, - 2.43536, - 2.28574, - 2.31932, - 2.33256, - 2.36615, - 2.3509, - 2.3465, - 2.33666, - 2.40038, - 2.42856, - 2.47235, - 2.32582, - 2.32998, - 2.40834, - 2.32001, - 2.3429, - 2.33184, - 2.35229, - 2.31496, - 2.35778, - 2.39379, - 2.30153, - 2.36632, - 2.3553, - 2.3968, - 2.30229, - 2.31862, - 2.38492, - 2.31996, - 2.40791, - 2.36851, - 2.33387, - 2.44133, - 2.36085, - 2.37109, - 2.32835, - 2.36442, - 2.41246, - 2.32801, - 2.33578, - 2.36342, - 2.38694, - 2.39458, - 2.39053, - 2.32132, - 2.34338, - 2.36383, - 2.43567, - 2.33884, - 2.40508, - 2.40711, - 2.40748, - 2.36651, - 2.45448, - 2.3411, - 2.3412, - 2.33847, - 2.29466, - 2.31834, - 2.33244, - 2.3318, - 2.34817, - 2.40952, - 2.37413, - 2.29033, - 2.38039, - 2.40061, - 2.38755, - 2.36713, - 2.3198, - 2.4009, - 2.37644, - 2.35729, - 2.33856, - 2.35551, - 2.31243, - 2.42418, - 2.35016, - 2.43423, - 2.40236, - 2.38754, - 2.41432, - 2.34497, - 2.38432, - 2.30964, - 2.3525, - 2.33479, - 2.41182, - 2.38985, - 2.41635, - 2.33682, - 2.43021, - 2.40384, - 2.34395, - 2.34698, - 2.39516, - 2.37112, - 2.33876, - 2.41652, - 2.34647, - 2.35761, - 2.43094, - 2.44124, - 2.32344, - 2.33098, - 2.38679, - 2.39217, - 2.38827, - 2.40402, - 2.36627, - 2.28741, - 2.36463, - 2.42916, - 2.28997, - 2.31332, - 2.32435, - 2.35909, - 2.34945, - 2.34203, - 2.36253, - 2.35494, - 2.30765, - 2.40377, - 2.39861, - 2.37706, - 2.34076, - 2.35282, - 2.33144, - 2.41193, - 2.41147, - 2.38108, - 2.392, - 2.424, - 2.32085, - 2.31582, - 2.31409, - 2.33267, - 2.35492, - 2.30452, - 2.35681, - 2.34307, - 2.42982, - 2.3299, - 2.37047, - 2.3758, - 2.37116, - 2.31265, - 2.37924, - 2.27602, - 2.36165, - 2.30245, - 2.35583, - 2.33128, - 2.37524, - 2.38862, - 2.28755, - 2.35508, - 2.40703, - 2.36397, - 2.39604, - 2.40241, - 2.35316, - 2.33623, - 2.40125, - 2.39651, - 2.36906, - 2.33148, - 2.31936, - 2.2974, - 2.33415, - 2.37516, - 2.40411, - 2.3965, - 2.33992, - 2.36064, - 2.374, - 2.33443, - 2.3703, - 2.3093, - 2.36726, - 2.38026, - 2.38113, - 2.33188, - 2.38845, - 2.31522, - 2.40702, - 2.32157, - 2.33237, - 2.40476, - 2.37072, - 2.3135, - 2.37444, - 2.40814, - 2.35038, - 2.32054, - 2.37754, - 2.41123, - 2.37526, - 2.37334, - 2.39234, - 2.33352, - 2.35454, - 2.34671, - 2.278, - 2.35701, - 2.31809, - 2.38648, - 2.37654, - 2.27011, - 2.3956, - 2.30964, - 2.35322, - 2.39058, - 2.3514, - 2.29601, - 2.40887, - 2.39479, - 2.38717, - 2.32845, - 2.32749, - 2.42149, - 2.35133, - 2.36205, - 2.36705, - 2.38024, - 2.27276, - 2.33031, - 2.39015, - 2.35107, - 2.37211, - 2.32647, - 2.34067, - 2.34266, - 2.34768, - 2.35381, - 2.29817, - 2.3358, - 2.35753, - 2.33894, - 2.34174, - 2.30702, - 2.37089, - 2.4002, - 2.36714, - 2.34439, - 2.38029, - 2.31557, - 2.31868, - 2.36817, - 2.3062, - 2.34969, - 2.3862, - 2.31742, - 2.37374, - 2.33592, - 2.30795, - 2.33078, - 2.30363, - 2.37755, - 2.32173, - 2.24658, - 2.38106, - 2.29931, - 2.40289, - 2.28121, - 2.3664, - 2.28871, - 2.25222, - 2.36338, - 2.33597, - 2.30395, - 2.33398, - 2.29544, - 2.35347, - 2.34537, - 2.39536, - 2.34465, - 2.36671, - 2.32264, - 2.29473, - 2.34713, - 2.35198, - 2.35651, - 2.32595, - 2.41528, - 2.42511, - 2.34961, - 2.36901, - 2.41455, - 2.35649, - 2.20305, - 2.37859, - 2.26474, - 2.30328, - 2.32076, - 2.32295, - 2.36271, - 2.33805, - 2.33653, - 2.35248, - 2.41576, - 2.35631, - 2.29582, - 2.30227, - 2.30052, - 2.37779, - 2.31777, - 2.30457, - 2.33778, - 2.33725, - 2.38799, - 2.32624, - 2.35793, - 2.21489, - 2.3568, - 2.34665, - 2.37795, - 2.34979, - 2.33138, - 2.35222, - 2.33497, - 2.31229, - 2.32785, - 2.31261, - 2.34641, - 2.30966, - 2.33011, - 2.3203, - 2.35829, - 2.39546, - 2.29829, - 2.36049, - 2.28997, - 2.32363, - 2.36086, - 2.28007, - 2.29862, - 2.28738, - 2.32796, - 2.28469, - 2.37557, - 2.35971, - 2.34856, - 2.33371, - 2.43035, - 2.3364, - 2.34784, - 2.32915, - 2.45303, - 2.26319, - 2.27797, - 2.35049, - 2.30604, - 2.39091, - 2.38856, - 2.32811, - 2.3586, - 2.3763, - 2.40737, - 2.42468, - 2.29717, - 2.38079, - 2.33199, - 2.2844, - 2.35656, - 2.23873, - 2.32868, - 2.31588, - 2.38177, - 2.32162, - 2.37505, - 2.36034, - 2.39087, - 2.35306, - 2.3138, - 2.31102, - 2.3395, - 2.32402, - 2.28041, - 2.27591, - 2.27592, - 2.43852, - 2.3236, - 2.34216, - 2.33443, - 2.31428, - 2.3246, - 2.32937, - 2.31187, - 2.35044, - 2.33839, - 2.39611, - 2.32738, - 2.325, - 2.28703, - 2.34692, - 2.36431, - 2.35307, - 2.30053, - 2.25565, - 2.3464, - 2.3976, - 2.29805, - 2.36602, - 2.35222, - 2.41203, - 2.29111, - 2.39338, - 2.38202, - 2.28533, - 2.31149, - 2.3994, - 2.31048, - 2.32986, - 2.32638, - 2.2965, - 2.28237, - 2.34284, - 2.25593, - 2.32466, - 2.33789, - 2.38439, - 2.35992, - 2.32567, - 2.38335, - 2.36934, - 2.34376, - 2.31668, - 2.32295, - 2.37287, - 2.3162, - 2.30218, - 2.27904, - 2.32526, - 2.29081, - 2.26775, - 2.35042, - 2.33598, - 2.39387, - 2.27399, - 2.33851, - 2.31339, - 2.25865, - 2.30557, - 2.28222, - 2.31588, - 2.37114, - 2.33603, - 2.38974, - 2.31124, - 2.31247, - 2.38898, - 2.36064, - 2.3793, - 2.26656, - 2.38434, - 2.35168, - 2.37874, - 2.28458, - 2.34536, - 2.36558, - 2.38075, - 2.35071, - 2.35047, - 2.29922, - 2.28976, - 2.34538, - 2.38151, - 2.29953, - 2.34682, - 2.29819, - 2.32651, - 2.31358, - 2.37483, - 2.2137, - 2.38919, - 2.28122, - 2.35157, - 2.38775, - 2.36373, - 2.34145, - 2.35998, - 2.37029, - 2.34652, - 2.30105, - 2.36501, - 2.25023, - 2.30257, - 2.28682, - 2.34696, - 2.35959, - 2.309, - 2.30905, - 2.372, - 2.35475, - 2.29397, - 2.3221, - 2.32319, - 2.32089, - 2.31318, - 2.29314, - 2.29082, - 2.2888, - 2.32099, - 2.31974, - 2.32944, - 2.32869, - 2.26575, - 2.34882, - 2.33387, - 2.29807, - 2.34745, - 2.27568, - 2.3765, - 2.34131, - 2.38432, - 2.31787, - 2.3129, - 2.3479, - 2.34492, - 2.31494, - 2.33812, - 2.36501, - 2.27056, - 2.34073, - 2.31151, - 2.27308, - 2.36842, - 2.34132, - 2.3584, - 2.29073, - 2.27972, - 2.32033, - 2.28428, - 2.30867, - 2.32251, - 2.30674, - 2.3487, - 2.40238, - 2.31657, - 2.31371, - 2.36587, - 2.28718, - 2.39406, - 2.24531, - 2.27121, - 2.35616, - 2.35022, - 2.37819, - 2.38128, - 2.28521, - 2.28675, - 2.34507, - 2.3157, - 2.31316, - 2.39692, - 2.32902, - 2.38607, - 2.34733, - 2.3356, - 2.36899, - 2.3109, - 2.31256, - 2.34217, - 2.30109, - 2.26033, - 2.28311, - 2.33036, - 2.3561, - 2.30822, - 2.23943, - 2.30454, - 2.24015, - 2.34933, - 2.30544, - 2.29913, - 2.27381, - 2.301, - 2.3102, - 2.31376, - 2.32089, - 2.39854, - 2.32713, - 2.31341, - 2.34682, - 2.32585, - 2.25769, - 2.28464, - 2.35967, - 2.29777, - 2.34915, - 2.33855, - 2.30143, - 2.31598, - 2.27136, - 2.38314, - 2.30828, - 2.32727, - 2.27975, - 2.33638, - 2.33695, - 2.25556, - 2.27118, - 2.36187, - 2.32948, - 2.31856, - 2.31782, - 2.31759, - 2.32257, - 2.32951, - 2.32422, - 2.25847, - 2.3022, - 2.22775, - 2.31743, - 2.24807, - 2.34732, - 2.36938, - 2.26449, - 2.3781, - 2.34702, - 2.31158, - 2.32228, - 2.30409, - 2.3017, - 2.35076, - 2.3339, - 2.25519, - 2.26083, - 2.34709, - 2.32374, - 2.31691, - 2.31619, - 2.43835, - 2.28286, - 2.31331, - 2.27018, - 2.3398, - 2.34235, - 2.29933, - 2.28017, - 2.27883, - 2.31051, - 2.25479, - 2.30503, - 2.33457, - 2.34546, - 2.33267, - 2.29765, - 2.21723, - 2.32093, - 2.28692, - 2.34186, - 2.34355, - 2.41484, - 2.38635, - 2.38863, - 2.32886, - 2.29336, - 2.24039, - 2.26092, - 2.28347, - 2.28931, - 2.30063, - 2.28297, - 2.26672, - 2.33504, - 2.25036, - 2.30185, - 2.33471, - 2.34894, - 2.34274, - 2.24908, - 2.31252, - 2.26165, - 2.28626, - 2.3149, - 2.31389, - 2.39159, - 2.23271, - 2.33834, - 2.33143, - 2.32396, - 2.30178, - 2.30472, - 2.29144, - 2.35978, - 2.30647, - 2.3212, - 2.31336, - 2.24742, - 2.32072, - 2.33159, - 2.28308, - 2.24581, - 2.33138, - 2.36302, - 2.32048, - 2.28385, - 2.33962, - 2.33205, - 2.24559, - 2.37812, - 2.29892, - 2.39876, - 2.34838, - 2.30028, - 2.3307, - 2.36426, - 2.27043, - 2.33673, - 2.36158, - 2.27535, - 2.28101, - 2.32255, - 2.2845, - 2.26677, - 2.28588, - 2.29385, - 2.29639, - 2.29405, - 2.35829, - 2.33347, - 2.35388, - 2.31765, - 2.31573, - 2.33276, - 2.32637, - 2.2869, - 2.3663, - 2.26301, - 2.30974, - 2.39988, - 2.32595, - 2.25346, - 2.31361, - 2.20447, - 2.31762, - 2.32427, - 2.38443, - 2.32127, - 2.29363, - 2.3297, - 2.28356, - 2.24175, - 2.35573, - 2.30903, - 2.27581, - 2.28817, - 2.22655, - 2.3117, - 2.26524, - 2.26944, - 2.28476, - 2.33353, - 2.26781, - 2.34228, - 2.22967, - 2.32138, - 2.28392, - 2.27765, - 2.28453, - 2.31037, - 2.28731, - 2.32046, - 2.27158, - 2.30304, - 2.31048, - 2.31055, - 2.30284, - 2.31686, - 2.26421, - 2.29578, - 2.34, - 2.31554, - 2.31426, - 2.28269, - 2.29109, - 2.25288, - 2.3441, - 2.27963, - 2.32795, - 2.30369, - 2.29721, - 2.26176, - 2.2865, - 2.30119, - 2.31767, - 2.26151, - 2.25708, - 2.25483, - 2.28461, - 2.34528, - 2.28909, - 2.31757, - 2.32009, - 2.29849, - 2.36728, - 2.27771, - 2.37934, - 2.32722, - 2.33238, - 2.25238, - 2.35262, - 2.34442, - 2.24892, - 2.27963, - 2.28751, - 2.31168, - 2.25677, - 2.24405, - 2.34552, - 2.34363, - 2.34295, - 2.28811, - 2.32645, - 2.27708, - 2.34251, - 2.27185, - 2.27032, - 2.25924, - 2.30917, - 2.33413, - 2.26041, - 2.30944, - 2.26045, - 2.3215, - 2.37973, - 2.37687, - 2.30112, - 2.25414, - 2.23536, - 2.26742, - 2.26829, - 2.28334, - 2.29017, - 2.2436, - 2.30472, - 2.32327, - 2.22032, - 2.30544, - 2.31482, - 2.31798, - 2.36188, - 2.26373, - 2.28496, - 2.32194, - 2.31651, - 2.30951, - 2.31524, - 2.23931, - 2.31331, - 2.3064, - 2.30754, - 2.32229, - 2.29953, - 2.30942, - 2.2455, - 2.22995, - 2.27598, - 2.27145, - 2.34907, - 2.28499, - 2.33274, - 2.35311, - 2.31892, - 2.26853, - 2.33194, - 2.32451, - 2.26971, - 2.37189, - 2.23369, - 2.28999, - 2.36987, - 2.29793, - 2.34096, - 2.34831, - 2.27748, - 2.32859, - 2.2783, - 2.30227, - 2.25795, - 2.38445, - 2.22675, - 2.3017, - 2.28495, - 2.25894, - 2.31047, - 2.31433, - 2.26925, - 2.31406, - 2.28849, - 2.31905, - 2.32917, - 2.2575, - 2.2658, - 2.3136, - 2.27457, - 2.34375, - 2.33208, - 2.26295, - 2.31324, - 2.3378, - 2.27822, - 2.2568, - 2.27925, - 2.29242, - 2.2762, - 2.29042, - 2.27601, - 2.29345, - 2.26191, - 2.33049, - 2.26877, - 2.35006, - 2.29163, - 2.31056, - 2.26425, - 2.27701, - 2.25224, - 2.30509, - 2.2756, - 2.31335, - 2.25832, - 2.30842, - 2.29366, - 2.31453, - 2.31744, - 2.28282, - 2.31849, - 2.25052, - 2.28484, - 2.31727, - 2.29214, - 2.27429, - 2.29625, - 2.36618, - 2.30621, - 2.27172, - 2.35141, - 2.26624, - 2.32619, - 2.30082, - 2.37303, - 2.32651, - 2.2319, - 2.27583, - 2.28767, - 2.26208, - 2.28975, - 2.25455, - 2.32159, - 2.26322, - 2.32481, - 2.34334, - 2.298, - 2.26343, - 2.28899, - 2.29281, - 2.3116, - 2.25594, - 2.2231, - 2.27035, - 2.32467, - 2.26816, - 2.26924, - 2.33015, - 2.29858, - 2.27592, - 2.3126, - 2.29137, - 2.21896, - 2.2572, - 2.26662, - 2.28766, - 2.30639, - 2.34087, - 2.24574, - 2.24694, - 2.319, - 2.27503, - 2.23404, - 2.25466, - 2.35617, - 2.28837, - 2.25345, - 2.2258, - 2.27974, - 2.26306, - 2.23349, - 2.33063, - 2.30477, - 2.31285, - 2.30391, - 2.31369, - 2.22309, - 2.28165, - 2.27084, - 2.28753, - 2.20054, - 2.21802, - 2.30895, - 2.27988, - 2.32555, - 2.30199, - 2.32774, - 2.30014, - 2.35876, - 2.26811, - 2.24612, - 2.26931, - 2.28746, - 2.226, - 2.25646, - 2.23505, - 2.26251, - 2.28245, - 2.35404, - 2.26406, - 2.21759, - 2.36444, - 2.27618, - 2.28048, - 2.30683, - 2.25652, - 2.32219, - 2.31178, - 2.32584, - 2.28049, - 2.30901, - 2.33382, - 2.35808, - 2.27405, - 2.23613, - 2.32045, - 2.33081, - 2.29187, - 2.27822, - 2.33333, - 2.34763, - 2.33963, - 2.30702, - 2.35085, - 2.2776, - 2.31401, - 2.2743, - 2.28269, - 2.31321, - 2.26779, - 2.29846, - 2.29899, - 2.28674, - 2.25104, - 2.29188, - 2.24941, - 2.23573, - 2.25549, - 2.326, - 2.22707, - 2.19091, - 2.27202, - 2.2573, - 2.34511, - 2.31047, - 2.30486, - 2.30453, - 2.30888, - 2.29207, - 2.23915, - 2.34281, - 2.26205, - 2.2788, - 2.24084, - 2.26297, - 2.22565, - 2.26401, - 2.32395, - 2.30888, - 2.27577, - 2.23916, - 2.33084, - 2.20222, - 2.20459, - 2.21624, - 2.36708, - 2.27542, - 2.26703, - 2.30914, - 2.31921, - 2.19542, - 2.28136, - 2.21464, - 2.37228, - 2.30606, - 2.24575, - 2.25141, - 2.28075, - 2.24149, - 2.32138, - 2.29035, - 2.32311, - 2.22986, - 2.2675, - 2.28678, - 2.30545, - 2.2392, - 2.3062, - 2.27575, - 2.27482, - 2.36098, - 2.28932, - 2.36313, - 2.27923, - 2.23081, - 2.24816, - 2.25986, - 2.27783, - 2.3313, - 2.23894, - 2.30961, - 2.29889, - 2.27375, - 2.27954, - 2.23446, - 2.33747, - 2.21888, - 2.30056, - 2.29386, - 2.19091, - 2.29853, - 2.18373, - 2.2247, - 2.37622, - 2.34344, - 2.26531, - 2.21173, - 2.22969, - 2.21245, - 2.32034, - 2.25669, - 2.25442, - 2.27981, - 2.33598, - 2.28863, - 2.25182, - 2.2144, - 2.22598, - 2.27594, - 2.24061, - 2.29323, - 2.31538, - 2.27097, - 2.32496, - 2.28008, - 2.25531, - 2.23983, - 2.31062, - 2.22498, - 2.26555, - 2.27609, - 2.28037, - 2.29703, - 2.25152, - 2.31323, - 2.25087, - 2.22589, - 2.25044, - 2.3799, - 2.31342, - 2.27544, - 2.26559, - 2.24385, - 2.23955, - 2.30404, - 2.24285, - 2.27096, - 2.28075, - 2.30996, - 2.31934, - 2.21255, - 2.25085, - 2.21385, - 2.36517, - 2.25011, - 2.21567, - 2.34407, - 2.23942, - 2.30581, - 2.25433, - 2.21406, - 2.19583, - 2.16367, - 2.29238, - 2.27652, - 2.28689, - 2.23188, - 2.25101, - 2.2069, - 2.26023, - 2.24432, - 2.26876, - 2.24231, - 2.30753, - 2.27174, - 2.28203, - 2.23602, - 2.25297, - 2.32252, - 2.23501, - 2.2441, - 2.25427, - 2.26421, - 2.26354, - 2.27438, - 2.27743, - 2.3413, - 2.34816, - 2.21529, - 2.30047, - 2.30838, - 2.29375, - 2.28378, - 2.29369, - 2.3612, - 2.3204, - 2.22818, - 2.23865, - 2.2576, - 2.26531, - 2.26604, - 2.24755, - 2.22915, - 2.32015, - 2.2524, - 2.35102, - 2.30039, - 2.28184, - 2.25508, - 2.21573, - 2.3199, - 2.32537, - 2.24624, - 2.22385, - 2.24388, - 2.27865, - 2.29919, - 2.332, - 2.25959, - 2.24184, - 2.27029, - 2.24272, - 2.217, - 2.27312, - 2.23213, - 2.33784, - 2.24607, - 2.21049, - 2.28382, - 2.28628, - 2.27932, - 2.23026, - 2.30625, - 2.32199, - 2.29223, - 2.37266, - 2.20944, - 2.24009, - 2.23374, - 2.30731, - 2.26484, - 2.222, - 2.2752, - 2.32475, - 2.26119, - 2.23574, - 2.26394, - 2.2649, - 2.30594, - 2.23764, - 2.2651, - 2.19928, - 2.33329, - 2.27862, - 2.3241, - 2.30848, - 2.26077, - 2.23658, - 2.30315, - 2.26561, - 2.21562, - 2.215, - 2.2668, - 2.28447, - 2.27141, - 2.24044, - 2.25239, - 2.27913, - 2.22815, - 2.19552, - 2.27596, - 2.23941, - 2.21747, - 2.3346, - 2.24769, - 2.23819, - 2.2597, - 2.26718, - 2.27513, - 2.21657, - 2.24492, - 2.27344, - 2.25294, - 2.30257, - 2.2664, - 2.23426, - 2.3176, - 2.27259, - 2.25807, - 2.27796, - 2.21176, - 2.26738, - 2.27172, - 2.30121, - 2.22638, - 2.2532, - 2.25186, - 2.22663, - 2.25306, - 2.35508, - 2.23079, - 2.23542, - 2.30251, - 2.26841, - 2.28758, - 2.29228, - 2.23275, - 2.27099, - 2.27637, - 2.27004, - 2.32502, - 2.25567, - 2.23987, - 2.25298, - 2.29515, - 2.23522, - 2.20681, - 2.26902, - 2.28208, - 2.30124, - 2.30757, - 2.2665, - 2.31069, - 2.25069, - 2.22944, - 2.23631, - 2.27318, - 2.24399, - 2.28333, - 2.17397, - 2.22383, - 2.22171, - 2.20641, - 2.25249, - 2.26373, - 2.21081, - 2.25092, - 2.24309, - 2.24109, - 2.2096, - 2.24156, - 2.23924, - 2.29145, - 2.26872, - 2.22748, - 2.30354, - 2.26186, - 2.2248, - 2.22063, - 2.2732, - 2.2928, - 2.23761, - 2.2856, - 2.30373, - 2.25622, - 2.27107, - 2.2047, - 2.25743, - 2.26774, - 2.26806, - 2.26718, - 2.23514, - 2.26876, - 2.25414, - 2.22596, - 2.21757, - 2.24918, - 2.27361, - 2.23689, - 2.29734, - 2.26362, - 2.24912, - 2.20272, - 2.24995, - 2.22097, - 2.26316, - 2.25865, - 2.13785, - 2.32427, - 2.3076, - 2.26371, - 2.29575, - 2.27468, - 2.22428, - 2.2474, - 2.20855, - 2.19004, - 2.2191, - 2.25557, - 2.27184, - 2.27009, - 2.26902, - 2.26074, - 2.22283, - 2.31222, - 2.19251, - 2.29032, - 2.25953, - 2.28061, - 2.24688, - 2.23443, - 2.27528, - 2.3004, - 2.32535, - 2.15229, - 2.26973, - 2.30728, - 2.28017, - 2.24378, - 2.20627, - 2.26838, - 2.22309, - 2.25808, - 2.27254, - 2.25879, - 2.30892, - 2.25283, - 2.22084, - 2.30474, - 2.21821, - 2.20423, - 2.33272, - 2.27974, - 2.24159, - 2.25214, - 2.24737, - 2.23276, - 2.20825, - 2.18644, - 2.30785, - 2.2353, - 2.2608, - 2.29785, - 2.24727, - 2.23613, - 2.24939, - 2.28215, - 2.21083, - 2.2342, - 2.20836, - 2.22409, - 2.20148, - 2.27887, - 2.28447, - 2.27605, - 2.25101, - 2.24515, - 2.24318, - 2.30539, - 2.24187, - 2.26708, - 2.26945, - 2.24406, - 2.24659, - 2.26902, - 2.20928, - 2.25511, - 2.27344, - 2.16798, - 2.18122, - 2.27509, - 2.26037, - 2.22824, - 2.24255, - 2.27395, - 2.21836, - 2.27066, - 2.28745, - 2.31211, - 2.25957, - 2.22632, - 2.26037, - 2.21943, - 2.32047, - 2.26657, - 2.196, - 2.24452, - 2.25432, - 2.24101, - 2.23783, - 2.25172, - 2.25288, - 2.24563, - 2.25752, - 2.28357, - 2.19328, - 2.22881, - 2.24384, - 2.26408, - 2.201, - 2.18255, - 2.26111, - 2.27603, - 2.2826, - 2.2439, - 2.24679, - 2.3049, - 2.26285, - 2.1657, - 2.22854, - 2.29231, - 2.21202, - 2.31859, - 2.1601, - 2.23898, - 2.19799, - 2.18529, - 2.20906, - 2.18287, - 2.24746, - 2.25303, - 2.22196, - 2.21808, - 2.21234, - 2.20915, - 2.2258, - 2.31046, - 2.2726, - 2.25578, - 2.26728, - 2.25823, - 2.25184, - 2.24255, - 2.1883, - 2.2977, - 2.22426, - 2.3146, - 2.33685, - 2.24832, - 2.26487, - 2.30893, - 2.26663, - 2.24264, - 2.24745, - 2.20989, - 2.20122, - 2.27402, - 2.27683, - 2.2418, - 2.18259, - 2.25985, - 2.24388, - 2.25256, - 2.28727, - 2.21402, - 2.27203, - 2.20865, - 2.25523, - 2.21317, - 2.24735, - 2.25371, - 2.23022, - 2.18307, - 2.19771, - 2.25384, - 2.1768, - 2.18254, - 2.2438, - 2.23252, - 2.27407, - 2.23176, - 2.2919, - 2.31625, - 2.24077, - 2.26987, - 2.26973, - 2.2081, - 2.25484, - 2.23556, - 2.19505, - 2.23615, - 2.27951, - 2.19773, - 2.27352, - 2.24487, - 2.23409, - 2.28094, - 2.21222, - 2.25545, - 2.20604, - 2.22922, - 2.21871, - 2.23487, - 2.21154, - 2.22138, - 2.21795, - 2.15199, - 2.12186, - 2.25677, - 2.29408, - 2.30101, - 2.22241, - 2.23599, - 2.17838, - 2.23392, - 2.23216, - 2.2282, - 2.24029, - 2.19892, - 2.20182, - 2.29924, - 2.24659, - 2.21558, - 2.13523, - 2.24031, - 2.23832, - 2.2361, - 2.25783, - 2.14691, - 2.24666, - 2.2304, - 2.25293, - 2.20698, - 2.28011, - 2.21899, - 2.22231, - 2.25094, - 2.19811, - 2.25357, - 2.18304, - 2.25966, - 2.23982, - 2.27055, - 2.26212, - 2.16246, - 2.24442, - 2.19089, - 2.2742, - 2.22611, - 2.25393, - 2.23888, - 2.25422, - 2.28876, - 2.30695, - 2.16905, - 2.22453, - 2.24778, - 2.29088, - 2.32827, - 2.25915, - 2.23699, - 2.23982, - 2.2934, - 2.278, - 2.15056, - 2.22392, - 2.24651, - 2.28561, - 2.24428, - 2.29171, - 2.20218, - 2.27289, - 2.20438, - 2.27205, - 2.25771, - 2.21743, - 2.2539, - 2.18989, - 2.24616, - 2.09462, - 2.29464, - 2.2381, - 2.27381, - 2.22227, - 2.1845, - 2.24689, - 2.34436, - 2.13466, - 2.282, - 2.22444, - 2.2361, - 2.20235, - 2.25996, - 2.18011, - 2.24235, - 2.19195, - 2.21779, - 2.22378, - 2.22843, - 2.21895, - 2.25129, - 2.21489, - 2.24468, - 2.22351, - 2.26985, - 2.26622, - 2.2457, - 2.2346, - 2.29214, - 2.15813, - 2.23181, - 2.19873, - 2.2778, - 2.26692, - 2.20834, - 2.22504, - 2.24427, - 2.22709, - 2.29954, - 2.19423, - 2.23063, - 2.2057, - 2.22093, - 2.15737, - 2.27659, - 2.25128, - 2.24126, - 2.33491, - 2.16782, - 2.20551, - 2.20622, - 2.24485, - 2.27422, - 2.28974, - 2.18009, - 2.28657, - 2.24227, - 2.26561, - 2.24557, - 2.22467, - 2.19801, - 2.19589, - 2.20536, - 2.26559, - 2.25484, - 2.20751, - 2.28041, - 2.12879, - 2.22118, - 2.26328, - 2.23625, - 2.28534, - 2.26483, - 2.22616, - 2.17126, - 2.22666, - 2.19732, - 2.21919, - 2.26583, - 2.20236, - 2.23885, - 2.11851, - 2.29928, - 2.18972, - 2.18551, - 2.25994, - 2.14637, - 2.29773, - 2.28146, - 2.1763, - 2.25422, - 2.23319, - 2.31134, - 2.29023, - 2.17315, - 2.17506, - 2.2579, - 2.2234, - 2.20293, - 2.21176, - 2.23258, - 2.1596, - 2.22487, - 2.24964, - 2.11981, - 2.20334, - 2.18752, - 2.26029, - 2.19671, - 2.14463, - 2.24732, - 2.13578, - 2.21815, - 2.29053, - 2.24347, - 2.26832, - 2.24564, - 2.22975, - 2.22927, - 2.21264, - 2.24596, - 2.22403, - 2.27156, - 2.2324, - 2.29214, - 2.264, - 2.23932, - 2.23175, - 2.22399, - 2.21267, - 2.18554, - 2.23799, - 2.19805, - 2.24236, - 2.23487, - 2.19871, - 2.21072, - 2.16625, - 2.23291, - 2.16936, - 2.20441, - 2.25754, - 2.17245, - 2.24195, - 2.1749, - 2.23418, - 2.3063, - 2.30118, - 2.21545, - 2.20499, - 2.17399, - 2.27147, - 2.21542, - 2.18053, - 2.12942, - 2.25953, - 2.24147, - 2.20108, - 2.28438, - 2.2277, - 2.22916, - 2.20886, - 2.22513, - 2.29721, - 2.22078, - 2.25585, - 2.15324, - 2.19529, - 2.1724, - 2.30415, - 2.19358, - 2.25345, - 2.20496, - 2.20459, - 2.18869, - 2.28839, - 2.19919, - 2.26473, - 2.26814, - 2.23938, - 2.18824, - 2.28337, - 2.20702, - 2.26018, - 2.25865, - 2.23921, - 2.23888, - 2.25055, - 2.22939, - 2.23578, - 2.18855, - 2.21436, - 2.21061, - 2.21166, - 2.24047, - 2.22465, - 2.26974, - 2.1709, - 2.21075, - 2.2248, - 2.24426, - 2.16158, - 2.1644, - 2.20684, - 2.14923, - 2.2455, - 2.23981, - 2.2519, - 2.23067, - 2.16993, - 2.28606, - 2.26347, - 2.22209, - 2.27635, - 2.22396, - 2.19679, - 2.2102, - 2.19956, - 2.23833, - 2.18497, - 2.18458, - 2.19868, - 2.19368, - 2.20248, - 2.22471, - 2.16594, - 2.22026, - 2.20694, - 2.22058, - 2.15419, - 2.15854, - 2.1888, - 2.19827, - 2.22371, - 2.19875, - 2.17589, - 2.24352, - 2.20224, - 2.24292, - 2.18679, - 2.18478, - 2.17571, - 2.27568, - 2.19909, - 2.1892, - 2.21373, - 2.17221, - 2.19547, - 2.19284, - 2.20406, - 2.20468, - 2.23072, - 2.17302, - 2.21362, - 2.19807, - 2.26144, - 2.22886, - 2.20004, - 2.20235, - 2.21414, - 2.16988, - 2.15622, - 2.23965, - 2.19846, - 2.17537, - 2.30169, - 2.24128, - 2.21354, - 2.25149, - 2.25398, - 2.27106, - 2.13152, - 2.16561, - 2.20063, - 2.1847, - 2.21628, - 2.262, - 2.22181, - 2.23301, - 2.18843, - 2.15993, - 2.15677, - 2.27284, - 2.16489, - 2.19916, - 2.1398, - 2.29537, - 2.273, - 2.27328, - 2.28932, - 2.17611, - 2.2292, - 2.19345, - 2.24528, - 2.25877, - 2.16795, - 2.24119, - 2.241, - 2.23499, - 2.20558, - 2.23699, - 2.1566, - 2.24216, - 2.22405, - 2.25204, - 2.20111, - 2.31211, - 2.28991, - 2.1806, - 2.21372, - 2.15639, - 2.24022, - 2.22589, - 2.13903, - 2.20626, - 2.22583, - 2.20669, - 2.16328, - 2.20356, - 2.22925, - 2.16117, - 2.26391, - 2.19999, - 2.18565, - 2.20236, - 2.24342, - 2.18822, - 2.20671, - 2.22332, - 2.22274, - 2.25148, - 2.27896, - 2.20091, - 2.22961, - 2.13387, - 2.19623, - 2.23741, - 2.23618, - 2.2123, - 2.1183, - 2.1612, - 2.22253, - 2.25013, - 2.25069, - 2.16281, - 2.235, - 2.23375, - 2.24304, - 2.15358, - 2.21107, - 2.27842, - 2.19868, - 2.19577, - 2.28025, - 2.26633, - 2.18797, - 2.23344, - 2.19758, - 2.30858, - 2.1701, - 2.18416, - 2.26493, - 2.21513, - 2.20325, - 2.22047, - 2.22152, - 2.23076, - 2.20499, - 2.16107, - 2.23073, - 2.2265, - 2.19598, - 2.11451, - 2.24254, - 2.21178, - 2.23262, - 2.2352, - 2.18972, - 2.13664, - 2.14642, - 2.24424, - 2.22136, - 2.23379, - 2.20182, - 2.22086, - 2.21942, - 2.25666, - 2.21326, - 2.22208, - 2.25125, - 2.26473, - 2.21218, - 2.21166, - 2.23813, - 2.25877, - 2.22026, - 2.25599, - 2.22988, - 2.21954, - 2.18196, - 2.22892, - 2.20358, - 2.176, - 2.2237, - 2.22511, - 2.18362, - 2.26284, - 2.23933, - 2.1815, - 2.15851, - 2.18376, - 2.22621, - 2.23815, - 2.25236, - 2.16465, - 2.25849, - 2.16965, - 2.27853, - 2.2231, - 2.27134, - 2.26107, - 2.22091, - 2.23582, - 2.23798, - 2.18639, - 2.19631, - 2.22252, - 2.22002, - 2.18222, - 2.18756, - 2.16207, - 2.24294, - 2.22669, - 2.21863, - 2.22913, - 2.29183, - 2.20731, - 2.14348, - 2.22127, - 2.1598, - 2.14535, - 2.21911, - 2.24315, - 2.20628, - 2.12623, - 2.19029, - 2.2074, - 2.18254, - 2.19894, - 2.19623, - 2.21212, - 2.2598, - 2.18732, - 2.17395, - 2.1791, - 2.19766, - 2.18911, - 2.21569, - 2.16504, - 2.2066, - 2.13402, - 2.19176, - 2.24327, - 2.18474, - 2.15945, - 2.20581, - 2.25377, - 2.21673, - 2.18962, - 2.25344, - 2.22438, - 2.19167, - 2.12169, - 2.12234, - 2.18678, - 2.29488, - 2.24097, - 2.1744, - 2.17925, - 2.18291, - 2.12379, - 2.13163, - 2.22096, - 2.27216, - 2.27404, - 2.1856, - 2.17813, - 2.18967, - 2.21593, - 2.22889, - 2.18884, - 2.19619, - 2.24829, - 2.245, - 2.17357, - 2.19923, - 2.24841, - 2.19846, - 2.21079, - 2.21288, - 2.21146, - 2.23961, - 2.19646, - 2.14437, - 2.16233, - 2.17183, - 2.18888, - 2.23809, - 2.16978, - 2.27749, - 2.17471, - 2.17407, - 2.16768, - 2.16875, - 2.2205, - 2.24319, - 2.19678, - 2.16642, - 2.16756, - 2.25248, - 2.18895, - 2.19294, - 2.15795, - 2.20966, - 2.20044, - 2.26231, - 2.22748, - 2.25184, - 2.16617, - 2.19674, - 2.21324, - 2.17613, - 2.19103, - 2.14368, - 2.24833, - 2.18035, - 2.19567, - 2.19069, - 2.21859, - 2.23762, - 2.23008, - 2.23579, - 2.30036, - 2.14598, - 2.21623, - 2.29332, - 2.22742, - 2.16483, - 2.24409, - 2.2265, - 2.23591, - 2.16333, - 2.21793, - 2.26052, - 2.17921, - 2.16207, - 2.16339, - 2.1831, - 2.2444, - 2.19043, - 2.30217, - 2.23443, - 2.17217, - 2.24418, - 2.19298, - 2.20652, - 2.17321, - 2.22938, - 2.1576, - 2.2477, - 2.16524, - 2.22628, - 2.14053, - 2.20938, - 2.18401, - 2.1168, - 2.17354, - 2.17737, - 2.16722, - 2.12087, - 2.22196, - 2.17336, - 2.16698, - 2.21296, - 2.28588, - 2.2957, - 2.24896, - 2.22379, - 2.24946, - 2.18048, - 2.18442, - 2.28043, - 2.22461, - 2.19722, - 2.1886, - 2.18034, - 2.18292, - 2.15963, - 2.18223, - 2.17423, - 2.19174, - 2.20057, - 2.15799, - 2.17359, - 2.26563, - 2.15428, - 2.22252, - 2.25182, - 2.19201, - 2.19964, - 2.16182, - 2.18568, - 2.20648, - 2.13393, - 2.15626, - 2.22197, - 2.2297, - 2.18775, - 2.17654, - 2.2139, - 2.16912, - 2.16991, - 2.26171, - 2.19787, - 2.25869, - 2.19707, - 2.18408, - 2.24158, - 2.24305, - 2.17198, - 2.19475, - 2.14717, - 2.13071, - 2.19173, - 2.19142, - 2.20264, - 2.22022, - 2.17235, - 2.22318, - 2.22151, - 2.15264, - 2.262, - 2.19635, - 2.20866, - 2.09748, - 2.16742, - 2.21412, - 2.17513, - 2.22751, - 2.21105, - 2.15255, - 2.08142, - 2.23492, - 2.19024, - 2.17487, - 2.23364, - 2.16083, - 2.25129, - 2.16732, - 2.27398, - 2.23137, - 2.28515, - 2.1826, - 2.16822, - 2.199, - 2.20704, - 2.17198, - 2.18951, - 2.22092, - 2.18299, - 2.15996, - 2.21033, - 2.22576, - 2.25902, - 2.14598, - 2.20248, - 2.21365, - 2.19369, - 2.20266, - 2.22554, - 2.14396, - 2.23735, - 2.18523, - 2.21327, - 2.1385, - 2.11236, - 2.14419, - 2.11693, - 2.27874, - 2.15128, - 2.21041, - 2.21278, - 2.20353, - 2.19682, - 2.22705, - 2.19489, - 2.14587, - 2.1605, - 2.1358, - 2.18505, - 2.19636, - 2.1678, - 2.17444, - 2.16609, - 2.2133, - 2.15837, - 2.26007, - 2.21901, - 2.21838, - 2.2289, - 2.19235, - 2.18687, - 2.14898, - 2.14436, - 2.1785, - 2.18926, - 2.17498, - 2.18857, - 2.17052, - 2.25191, - 2.21637, - 2.14774, - 2.1839, - 2.21205, - 2.18254, - 2.22673, - 2.19182, - 2.24383, - 2.26419, - 2.14876, - 2.20587, - 2.16535, - 2.171, - 2.23864, - 2.18447, - 2.07948, - 2.19822, - 2.15259, - 2.20613, - 2.17774, - 2.25645, - 2.20525, - 2.15837, - 2.24514, - 2.20117, - 2.19517, - 2.23338, - 2.22092, - 2.18493, - 2.22049, - 2.16538, - 2.23924, - 2.11864, - 2.19685, - 2.23542, - 2.23535, - 2.20776, - 2.18834, - 2.15912, - 2.2024, - 2.2364, - 2.18515, - 2.14846, - 2.23542, - 2.16338, - 2.14177, - 2.23236, - 2.17155, - 2.17734, - 2.23101, - 2.14107, - 2.20621, - 2.2258, - 2.20929, - 2.14006, - 2.23271, - 2.228, - 2.23592, - 2.17617, - 2.12695, - 2.20624, - 2.1473, - 2.26885, - 2.1665, - 2.2411, - 2.23942, - 2.23412, - 2.19414, - 2.18129, - 2.22372, - 2.149, - 2.17353, - 2.19332, - 2.22956, - 2.19612, - 2.09611, - 2.18822, - 2.25055, - 2.09947, - 2.14562, - 2.21729, - 2.13752, - 2.24239, - 2.223, - 2.24654, - 2.19255, - 2.2509, - 2.24795, - 2.16924, - 2.16962, - 2.20386, - 2.22648, - 2.19466, - 2.21748, - 2.25794, - 2.15843, - 2.22507, - 2.162, - 2.19109, - 2.19371, - 2.17231, - 2.14199, - 2.18118, - 2.1418, - 2.17042, - 2.14686, - 2.16497, - 2.21536, - 2.22927, - 2.22189, - 2.17408, - 2.23239, - 2.1909, - 2.23852, - 2.14701, - 2.19052, - 2.13351, - 2.23871, - 2.18124, - 2.15787, - 2.13693, - 2.19317, - 2.19172, - 2.17757, - 2.15866, - 2.17654, - 2.19488, - 2.21325, - 2.23961, - 2.18422, - 2.15119, - 2.17855, - 2.18409, - 2.25894, - 2.20659, - 2.11756, - 2.21752, - 2.16925, - 2.16518, - 2.18679, - 2.15202, - 2.14381, - 2.23832, - 2.14596, - 2.1292, - 2.22629, - 2.1634, - 2.17665, - 2.21671, - 2.14607, - 2.18149, - 2.23647, - 2.11486, - 2.10063, - 2.21347, - 2.16122, - 2.17386, - 2.18871, - 2.17045, - 2.15798, - 2.12981, - 2.23092, - 2.14763, - 2.18536, - 2.19813, - 2.15587, - 2.16637, - 2.20717, - 2.1684, - 2.25265, - 2.13618, - 2.19594, - 2.14158, - 2.13884, - 2.09643, - 2.21984, - 2.14415, - 2.18913, - 2.18371, - 2.16178, - 2.17632, - 2.1704, - 2.1893, - 2.22616, - 2.18549, - 2.15689, - 2.17516, - 2.1641, - 2.19504, - 2.17974, - 2.1411, - 2.14035, - 2.16962, - 2.2077, - 2.16971, - 2.20421, - 2.17865, - 2.16499, - 2.19048, - 2.17789, - 2.23147, - 2.19088, - 2.15808, - 2.14942, - 2.17741, - 2.21185, - 2.16612, - 2.10775, - 2.16014, - 2.18899, - 2.21219, - 2.1384, - 2.19676, - 2.17719, - 2.13189, - 2.16194, - 2.28633, - 2.17589, - 2.20129, - 2.24096, - 2.12471, - 2.22245, - 2.19435, - 2.20092, - 2.17098, - 2.15053, - 2.15869, - 2.15449, - 2.16505, - 2.08836, - 2.14381, - 2.2129, - 2.2443, - 2.15627, - 2.18562, - 2.17721, - 2.24678, - 2.16422, - 2.17136, - 2.23304, - 2.18754, - 2.08615, - 2.15053, - 2.182, - 2.1934, - 2.13023, - 2.18719, - 2.21188, - 2.20094, - 2.2128, - 2.16235, - 2.21876, - 2.24812, - 2.13839, - 2.14376, - 2.176, - 2.18248, - 2.0878, - 2.18845, - 2.15805, - 2.18194, - 2.27305, - 2.20462, - 2.12331, - 2.14726, - 2.18278, - 2.19324, - 2.07861, - 2.19475, - 2.12795, - 2.07141, - 2.1589, - 2.20913, - 2.14575, - 2.1554, - 2.19834, - 2.19859, - 2.19761, - 2.14008, - 2.21035, - 2.19338, - 2.12152, - 2.16342, - 2.18187, - 2.23497, - 2.16294, - 2.19523, - 2.17069, - 2.1596, - 2.18296, - 2.12661, - 2.20773, - 2.19878, - 2.18339, - 2.16327, - 2.12554, - 2.19279, - 2.14004, - 2.19628, - 2.20077, - 2.22333, - 2.17238, - 2.14481, - 2.22323, - 2.20387, - 2.17393, - 2.13201, - 2.25435, - 2.14168, - 2.1809, - 2.17039, - 2.17014, - 2.18769, - 2.17823, - 2.19337, - 2.23288, - 2.12942, - 2.1974, - 2.22317, - 2.21439, - 2.16148, - 2.0994, - 2.14307, - 2.17923, - 2.16521, - 2.165, - 2.20389, - 2.17387, - 2.19475, - 2.16099, - 2.1804, - 2.16889, - 2.1979, - 2.20662, - 2.16761, - 2.13787, - 2.14476, - 2.19704, - 2.1956, - 2.13627, - 2.23574, - 2.17881, - 2.19045, - 2.25563, - 2.12019, - 2.16512, - 2.20547, - 2.13034, - 2.15442, - 2.1915, - 2.1631, - 2.20917, - 2.23116, - 2.20563, - 2.11118, - 2.19799, - 2.17105, - 2.22072, - 2.19312, - 2.19949, - 2.19668, - 2.12649, - 2.22004, - 2.19222, - 2.14469, - 2.1375, - 2.24186, - 2.23416, - 2.17489, - 2.22855, - 2.19364, - 2.1462, - 2.18539, - 2.21381, - 2.20301, - 2.17025, - 2.21229, - 2.1542, - 2.19686, - 2.23795, - 2.13062, - 2.16361, - 2.2831, - 2.16167, - 2.19152, - 2.19636, - 2.22274, - 2.21934, - 2.11555, - 2.16735, - 2.19769, - 2.18792, - 2.17895, - 2.19356, - 2.13993, - 2.20102, - 2.12787, - 2.19608, - 2.18511, - 2.17356, - 2.11416, - 2.13741, - 2.15361, - 2.20432, - 2.18582, - 2.11962, - 2.18235, - 2.12726, - 2.17091, - 2.15228, - 2.19795, - 2.20253, - 2.15677, - 2.1901, - 2.20029, - 2.18824, - 2.13169, - 2.13188, - 2.1261, - 2.188, - 2.1577, - 2.15174, - 2.22681, - 2.11346, - 2.26227, - 2.18974, - 2.18759, - 2.18016, - 2.171, - 2.13627, - 2.22414, - 2.12527, - 2.14319, - 2.18409, - 2.19015, - 2.14186, - 2.2096, - 2.1584, - 2.15151, - 2.19772, - 2.15573, - 2.12144, - 2.17812, - 2.16634, - 2.17126, - 2.19852, - 2.14377, - 2.17556, - 2.13343, - 2.14667, - 2.21172, - 2.22372, - 2.17904, - 2.19627, - 2.16038, - 2.17056, - 2.1863, - 2.16126, - 2.14911, - 2.20188, - 2.2295, - 2.21522, - 2.1707, - 2.22305, - 2.20397, - 2.13875, - 2.1514, - 2.23455, - 2.21333, - 2.23432, - 2.22782, - 2.15701, - 2.17824, - 2.18557, - 2.14307, - 2.19431, - 2.17633, - 2.16333, - 2.18545, - 2.15988, - 2.20873, - 2.25565, - 2.17987, - 2.20093, - 2.1641, - 2.11594, - 2.19329, - 2.17483, - 2.23036, - 2.17095, - 2.08435, - 2.1746, - 2.13823, - 2.23445, - 2.22879, - 2.14523, - 2.11814, - 2.12371, - 2.16946, - 2.14735, - 2.20776, - 2.1688, - 2.09443, - 2.09813, - 2.27158, - 2.16765, - 2.19254, - 2.17127, - 2.1108, - 2.17815, - 2.13556, - 2.18773, - 2.23169, - 2.14421, - 2.22486, - 2.10308, - 2.16505, - 2.16881, - 2.14276, - 2.16921, - 2.17698, - 2.0814, - 2.16943, - 2.14014, - 2.17986, - 2.14425, - 2.12627, - 2.18475, - 2.09639, - 2.15737, - 2.14141, - 2.15202, - 2.17887, - 2.09074, - 2.1799, - 2.15226, - 2.09619, - 2.17392, - 2.21411, - 2.14455, - 2.18984, - 2.21242, - 2.20512, - 2.16369, - 2.14966, - 2.17023, - 2.17962, - 2.10001, - 2.15492, - 2.1599, - 2.17024, - 2.19805, - 2.14163, - 2.21704, - 2.11472, - 2.13153, - 2.21065, - 2.21365, - 2.14468, - 2.17928, - 2.13948, - 2.17584, - 2.11796, - 2.13929, - 2.21111, - 2.1729, - 2.17904, - 2.14527, - 2.15457, - 2.14542, - 2.15487, - 2.07213, - 2.13305, - 2.14742, - 2.18168, - 2.19591, - 2.11795, - 2.22315, - 2.19172, - 2.11757, - 2.10382, - 2.12674, - 2.20122, - 2.1669, - 2.13744, - 2.11814, - 2.14481, - 2.14126, - 2.18402, - 2.14413, - 2.1412, - 2.18759, - 2.16347, - 2.14641, - 2.1772, - 2.13392, - 2.13317, - 2.2441, - 2.19087, - 2.20755, - 2.12688, - 2.08398, - 2.17117, - 2.15669, - 2.21693, - 2.1563, - 2.21172, - 2.15064, - 2.12712, - 2.19224, - 2.12352, - 2.18321, - 2.16115, - 2.10183, - 2.14896, - 2.18542, - 2.11044, - 2.21889, - 2.14815, - 2.20288, - 2.13586, - 2.13122, - 2.13881, - 2.20578, - 2.1554, - 2.24229, - 2.10068, - 2.10239, - 2.12014, - 2.12012, - 2.24642, - 2.1717, - 2.10839, - 2.14481, - 2.11417, - 2.14398, - 2.18517, - 2.13833, - 2.17722, - 2.14771, - 2.18383, - 2.15047, - 2.23741, - 2.18579, - 2.17068, - 2.20426, - 2.17343, - 2.13063, - 2.18809, - 2.16792, - 2.2203, - 2.13557, - 2.19047, - 2.21943, - 2.14859, - 2.21883, - 2.15039, - 2.15073, - 2.16637, - 2.20756, - 2.11541, - 2.15605, - 2.23578, - 2.09558, - 2.13865, - 2.20222, - 2.13735, - 2.1257, - 2.22712, - 2.16516, - 2.11833, - 2.11664, - 2.21357, - 2.1293, - 2.1418, - 2.15839, - 2.11491, - 2.19184, - 2.20907, - 2.21059, - 2.15801, - 2.16079, - 2.1542, - 2.18909, - 2.2075, - 2.17892, - 2.20052, - 2.15818, - 2.19726, - 2.12918, - 2.12043, - 2.1192, - 2.19113, - 2.17503, - 2.11447, - 2.13221, - 2.17911, - 2.19611, - 2.22877, - 2.22697, - 2.17057, - 2.12098, - 2.21337, - 2.09408, - 2.12751, - 2.17385, - 2.22869, - 2.13343, - 2.15537, - 2.15627, - 2.19161, - 2.15547, - 2.1861, - 2.11345, - 2.12603, - 2.15712, - 2.15899, - 2.10614, - 2.22799, - 2.12488, - 2.15447, - 2.18165, - 2.16897, - 2.20423, - 2.19525, - 2.19034, - 2.14141, - 2.092, - 2.12331, - 2.2184, - 2.16262, - 2.2323, - 2.1185, - 2.13731, - 2.17633, - 2.10914, - 2.21421, - 2.1356, - 2.20011, - 2.13352, - 2.13991, - 2.12282, - 2.21991, - 2.16005, - 2.15045, - 2.17351, - 2.16292, - 2.20494, - 2.17673, - 2.1725, - 2.17753, - 2.20951, - 2.12298, - 2.1166, - 2.15282, - 2.15422, - 2.13507, - 2.13676, - 2.20661, - 2.21175, - 2.17303, - 2.1732, - 2.13905, - 2.13086, - 2.06595, - 2.13958, - 2.10611, - 2.15866, - 2.20199, - 2.16534, - 2.17839, - 2.13912, - 2.17059, - 2.17953, - 2.20951, - 2.09998, - 2.1497, - 2.11881, - 2.201, - 2.18636, - 2.14123, - 2.17393, - 2.13139, - 2.13438, - 2.25838, - 2.09495, - 2.18119, - 2.14884, - 2.12437, - 2.13167, - 2.18004, - 2.1817, - 2.08885, - 2.18663, - 2.15839, - 2.19119, - 2.12625, - 2.13064, - 2.12897, - 2.11453, - 2.11508, - 2.21637, - 2.11942, - 2.11395, - 2.16933, - 2.20956, - 2.158, - 2.22838, - 2.1665, - 2.13675, - 2.11883, - 2.18817, - 2.15585, - 2.18007, - 2.18405, - 2.129, - 2.13108, - 2.14397, - 2.14182, - 2.18087, - 2.13031, - 2.12518, - 2.17341, - 2.16205, - 2.18804, - 2.17343, - 2.1561, - 2.19577, - 2.14849, - 2.12863, - 2.11314, - 2.16094, - 2.15494, - 2.24692, - 2.14065, - 2.17351, - 2.13242, - 2.11577, - 2.14927, - 2.14705, - 2.20702, - 2.14626, - 2.13143, - 2.26467, - 2.14851, - 2.08748, - 2.15985, - 2.1408, - 2.13133, - 2.13693, - 2.15571, - 2.1332, - 2.15936, - 2.1864, - 2.22572, - 2.11322, - 2.18171, - 2.14538, - 2.11439, - 2.11543, - 2.10431, - 2.15426, - 2.11361, - 2.23124, - 2.19205, - 2.15783, - 2.16621, - 2.15966, - 2.18705, - 2.13729, - 2.18321, - 2.12248, - 2.1277, - 2.13242, - 2.16999, - 2.23769, - 2.1019, - 2.14574, - 2.18237, - 2.14769, - 2.1453, - 2.15215, - 2.19509, - 2.15582, - 2.19214, - 2.12899, - 2.06071, - 2.15981, - 2.11794, - 2.17052, - 2.1134, - 2.15056, - 2.1364, - 2.12485, - 2.10564, - 2.12643, - 2.16582, - 2.18306, - 2.16195, - 2.08746, - 2.12128, - 2.1741, - 2.16082, - 2.17856, - 2.13519, - 2.10839, - 2.19802, - 2.19525, - 2.16751, - 2.14105, - 2.14196, - 2.15494, - 2.15341, - 2.11478, - 2.13909, - 2.15364, - 2.18751, - 2.15586, - 2.10667, - 2.21337, - 2.14683, - 2.14487, - 2.17174, - 2.21612, - 2.12335, - 2.15413, - 2.10651, - 2.18791, - 2.1295, - 2.14391, - 2.12422, - 2.19471, - 2.19702, - 2.12624, - 2.17518, - 2.13955, - 2.14575, - 2.16906, - 2.15761, - 2.15301, - 2.21712, - 2.15404, - 2.16024, - 2.11105, - 2.13444, - 2.06263, - 2.07958, - 2.14639, - 2.11967, - 2.11237, - 2.14355, - 2.09234, - 2.13686, - 2.11726, - 2.15732, - 2.2001, - 2.17643, - 2.15822, - 2.13031, - 2.21946, - 2.17706, - 2.14201, - 2.13601, - 2.1548, - 2.22658, - 2.18241, - 2.1561, - 2.14607, - 2.14711, - 2.10361, - 2.14364, - 2.16466, - 2.13877, - 2.20757, - 2.16564, - 2.08779, - 2.17628, - 2.15093, - 2.13675, - 2.18662, - 2.14238, - 2.19216, - 2.12602, - 2.13973, - 2.16889, - 2.13101, - 2.13919, - 2.19636, - 2.20009, - 2.16071, - 2.18303, - 2.16926, - 2.11904, - 2.15374, - 2.10227, - 2.06041, - 2.1489, - 2.18369, - 2.14088, - 2.14455, - 2.15934, - 2.16377, - 2.14733, - 2.13128, - 2.20711, - 2.11473, - 2.16237, - 2.13424, - 2.16518, - 2.06324, - 2.12067, - 2.16686, - 2.10743, - 2.14634, - 2.17486, - 2.1638, - 2.1238, - 2.13779, - 2.16477, - 2.14167, - 2.14611, - 2.12306, - 2.16709, - 2.13379, - 2.17019, - 2.15353, - 2.11015, - 2.17153, - 2.16197, - 2.13218, - 2.15085, - 2.15781, - 2.25466, - 2.10951, - 2.14014, - 2.16187, - 2.12101, - 2.19565, - 2.06527, - 2.1721, - 2.14685, - 2.14808, - 2.10014, - 2.14934, - 2.1203, - 2.10332, - 2.12704, - 2.21661, - 2.18606, - 2.1656, - 2.08735, - 2.17195, - 2.13625, - 2.1438, - 2.16655, - 2.17534, - 2.1924, - 2.15769, - 2.11956, - 2.16561, - 2.22, - 2.15219, - 2.21531, - 2.09772, - 2.11993, - 2.13102, - 2.16096, - 2.10238, - 2.13756, - 2.15544, - 2.20732, - 2.17988, - 2.14668, - 2.20464, - 2.15031, - 2.10549, - 2.12134, - 2.17467, - 2.17739, - 2.13906, - 2.11434, - 2.14797, - 2.14234, - 2.12723, - 2.15721, - 2.12631, - 2.10021, - 2.21065, - 2.06616, - 2.09443, - 2.08835, - 2.13769, - 2.131, - 2.17644, - 2.07085, - 2.17694, - 2.12149, - 2.14257, - 2.10743, - 2.13535, - 2.22973, - 2.12877, - 2.12004, - 2.16013, - 2.16882, - 2.17764, - 2.09691, - 2.07116, - 2.20154, - 2.13691, - 2.15612, - 2.13042, - 2.21776, - 2.1763, - 2.15666, - 2.11485, - 2.13405, - 2.10092, - 2.1665, - 2.16885, - 2.0917, - 2.16698, - 2.15875, - 2.15715, - 2.20274, - 2.22135, - 2.11236, - 2.17254, - 2.1997, - 2.20477, - 2.18226, - 2.13096, - 2.13406, - 2.1266, - 2.15258, - 2.16074, - 2.15609, - 2.1192, - 2.14821, - 2.09995, - 2.10816, - 2.13307, - 2.12424, - 2.21113, - 2.15264, - 2.1543, - 2.1717, - 2.11504, - 2.15576, - 2.14418, - 2.19965, - 2.10689, - 2.15542, - 2.10296, - 2.12316, - 2.13181, - 2.08559, - 2.09557, - 2.1893, - 2.13595, - 2.11831, - 2.15318, - 2.12329, - 2.16081, - 2.11925, - 2.16646, - 2.1576, - 2.08549, - 2.11739, - 2.12032, - 2.11986, - 2.20412, - 2.14557, - 2.15658, - 2.19747, - 2.09774, - 2.18192, - 2.07301, - 2.18194, - 2.1714, - 2.18218, - 2.12295, - 2.15817, - 2.12634, - 2.13661, - 2.20957, - 2.132, - 2.11809, - 2.13282, - 2.16385, - 2.1819, - 2.19392, - 2.19965, - 2.09605, - 2.10998, - 2.13227, - 2.15023, - 2.11067, - 2.10107, - 2.11555, - 2.10901, - 2.11211, - 2.21725, - 2.12493, - 2.08417, - 2.14836, - 2.14632, - 2.14523, - 2.11451, - 2.15026, - 2.1456, - 2.19607, - 2.10271, - 2.11301, - 2.13541, - 2.16967, - 2.13959, - 2.09122, - 2.13218, - 2.13511, - 2.14937, - 2.09319, - 2.22332, - 2.1649, - 2.10092, - 2.13287, - 2.12295, - 2.2189, - 2.12971, - 2.02948, - 2.04855, - 2.13348, - 2.17088, - 2.1336, - 2.11146, - 2.14232, - 2.19518, - 2.11201, - 2.07141, - 2.14178, - 2.14737, - 2.1458, - 2.13256, - 2.11894, - 2.17876, - 2.11283, - 2.16828, - 2.19105, - 2.18398, - 2.09715, - 2.01543, - 2.12319, - 2.11653, - 2.16111, - 2.15539, - 2.09938, - 2.12497, - 2.09658, - 2.13796, - 2.10877, - 2.1363, - 2.09153, - 2.1354, - 2.12927, - 2.16925, - 2.04497, - 2.13855, - 2.11693, - 2.13237, - 2.21729, - 2.14198, - 2.17185, - 2.09057, - 2.15511, - 2.12693, - 2.17202, - 2.15091, - 2.17912, - 2.13925, - 2.18152, - 2.12077, - 2.11154, - 2.14419, - 2.15057, - 2.12067, - 2.21523, - 2.19308, - 2.11932, - 2.15405, - 2.14394, - 2.19311, - 2.20192, - 2.14891, - 2.16126, - 2.13381, - 2.21022, - 2.07788, - 2.14154, - 2.1593, - 2.16751, - 2.09106, - 2.13339, - 2.1655, - 2.22046, - 2.12049, - 2.03173, - 2.15567, - 2.19593, - 2.07965, - 2.10403, - 2.17251, - 2.12173, - 2.13437, - 2.11798, - 2.16775, - 2.13645, - 2.11347, - 2.1324, - 2.17526, - 2.16644, - 2.12277, - 2.10492, - 2.17144, - 2.16993, - 2.09841, - 2.17271, - 2.16234, - 2.14445, - 2.18642, - 2.11622, - 2.14784, - 2.17022, - 2.18088, - 2.11295, - 2.06826, - 2.09255, - 2.14574, - 2.22784, - 2.14507, - 2.08756, - 2.09345, - 2.08039, - 2.16518, - 2.19839, - 2.14267, - 2.14187, - 2.09488, - 2.1075, - 2.10622, - 2.10258, - 2.14102, - 2.13237, - 2.13323, - 2.05967, - 2.14527, - 2.1391, - 2.06164, - 2.16528, - 2.08549, - 2.09559, - 2.19385, - 2.1695, - 2.09547, - 2.08691, - 2.1146, - 2.09143, - 2.17281, - 2.14259, - 2.17527, - 2.10536, - 2.19447, - 2.09333, - 2.11649, - 2.18198, - 2.13537, - 2.14148, - 2.15844, - 2.064, - 2.17453, - 2.19131, - 2.12504, - 2.15203, - 2.18609, - 2.1661, - 2.13134, - 2.08756, - 2.08427, - 2.16414, - 2.21497, - 2.09981, - 2.1262, - 2.01528, - 2.15988, - 2.15862, - 2.09725, - 2.12982, - 2.07286, - 2.16997, - 2.16532, - 2.05147, - 2.19824, - 2.13548, - 2.06603, - 2.16366, - 2.08655, - 2.13162, - 2.08834, - 2.17486, - 2.13321, - 2.13171, - 2.14515, - 2.09801, - 2.13333, - 2.15441, - 2.12937, - 2.13597, - 2.15221, - 2.0731, - 2.10645, - 2.11284, - 2.16414, - 2.09933, - 2.14338, - 2.10623, - 2.07228, - 2.08654, - 2.14202, - 2.18884, - 2.10239, - 2.18639, - 2.19179, - 2.13551, - 2.15389, - 2.1511, - 2.14091, - 2.15937, - 2.07546, - 2.11303, - 2.17517, - 2.1412, - 2.03735, - 2.17992, - 2.13268, - 2.18176, - 2.08772, - 2.08312, - 2.12718, - 2.08874, - 2.1553, - 2.1415, - 2.15291, - 2.05888, - 2.11814, - 2.10731, - 2.08374, - 2.13396, - 2.12354, - 2.14289, - 2.09932, - 2.16092, - 2.13329, - 2.1063, - 2.11394, - 2.09821, - 2.14214, - 2.18023, - 2.10755, - 2.16653, - 2.19933, - 2.06603, - 2.10071, - 2.19799, - 2.06671, - 2.10484, - 2.13748, - 2.15959, - 2.15561, - 2.1137, - 2.12093, - 2.19014, - 2.13541, - 2.12725, - 2.0983, - 2.08588, - 2.10597, - 2.09329, - 2.20691, - 2.11375, - 2.07391, - 2.11606, - 2.09485, - 2.10288, - 2.0806, - 2.10469, - 2.15963, - 2.12958, - 2.08124, - 2.09756, - 2.14018, - 2.11993, - 2.11828, - 2.09453, - 2.12628, - 2.14104, - 2.0796, - 2.04218, - 2.01484, - 2.12482, - 2.08634, - 2.13438, - 2.15562, - 2.15216, - 2.17004, - 2.13035, - 2.16651, - 2.1716, - 2.14191, - 2.10148, - 2.06979, - 2.14407, - 2.13396, - 2.07676, - 2.16373, - 2.06168, - 2.04154, - 2.18675, - 2.07855, - 2.1341, - 2.12187, - 2.15629, - 2.14057, - 2.13709, - 2.08859, - 2.06976, - 2.13725, - 2.09054, - 2.13351, - 2.08726, - 2.10761, - 2.15441, - 2.09503, - 2.15399, - 2.06266, - 2.14508, - 2.11744, - 2.12495, - 2.11958, - 2.11224, - 2.11268, - 2.10583, - 2.10275, - 2.16901, - 2.10984, - 2.07304, - 2.08363, - 2.10196, - 2.13966, - 2.07077, - 2.08902, - 2.16228, - 2.15967, - 2.17185, - 2.07537, - 2.15779, - 2.1715, - 2.05667, - 2.12227, - 2.12891, - 2.15615, - 2.12718, - 2.10373, - 2.10221, - 2.09313, - 2.11385, - 2.10161, - 2.11608, - 2.12269, - 2.14827, - 2.10462, - 2.13028, - 2.09747, - 2.14935, - 2.14235, - 2.14072, - 2.17865, - 2.09507, - 2.08337, - 2.14248, - 2.11666, - 2.13571, - 2.13529, - 2.15697, - 2.09802, - 2.11925, - 2.09387, - 2.08241, - 2.0783, - 2.14557, - 2.12659, - 2.19182, - 2.06489, - 2.16013, - 2.18327, - 2.09867, - 2.13889, - 2.18897, - 2.13581, - 2.16738, - 2.1643, - 2.11768, - 2.12279, - 2.15801, - 2.07078, - 2.07846, - 2.0728, - 2.13256, - 2.09567, - 2.12748, - 2.18461, - 2.14324, - 2.13974, - 2.11556, - 2.14132, - 2.03372, - 2.1025, - 2.09162, - 2.09885, - 2.14057, - 2.09402, - 2.18067, - 2.11267, - 2.09488, - 2.17158, - 2.06687, - 2.12892, - 2.12106, - 2.15669, - 2.12901, - 2.13127, - 2.05828, - 2.15015, - 2.22143, - 2.1744, - 2.12979, - 2.07898, - 2.07257, - 2.1851, - 2.03252, - 2.06686, - 2.13522, - 2.08287, - 2.11278, - 2.06087, - 2.17548, - 2.11286, - 2.11709, - 2.12416, - 2.06491, - 2.0962, - 2.15181, - 2.16777, - 2.13497, - 2.12714, - 2.13369, - 2.03608, - 2.12232, - 2.14683, - 2.1591, - 2.11504, - 2.16808, - 2.04265, - 2.12814, - 2.11979, - 2.13031, - 2.12495, - 2.07751, - 2.14106, - 2.07351, - 2.11523, - 2.07912, - 2.16593, - 2.06806, - 2.05106, - 2.08856, - 2.06571, - 2.05193, - 2.15024, - 2.13226, - 2.11704, - 2.0977, - 2.20583, - 2.1516, - 2.15286, - 2.10037, - 2.0982, - 2.07352, - 2.09963, - 2.12464, - 2.12513, - 2.16762, - 2.13514, - 2.13649, - 2.08477, - 2.07079, - 2.10859, - 2.11399, - 2.07488, - 2.06204, - 2.06621, - 2.08936, - 2.10552, - 2.15456, - 2.07139, - 2.12529, - 2.13757, - 2.12853, - 2.04168, - 2.11304, - 2.06003, - 2.15838, - 2.08245, - 2.14785, - 2.17583, - 2.14739, - 2.12889, - 2.11007, - 2.14053, - 2.12198, - 2.12999, - 2.13901, - 2.17513, - 2.19321, - 2.14118, - 2.07928, - 2.12319, - 2.1115, - 2.11312, - 2.11301, - 2.09192, - 2.16897, - 2.09811, - 2.11893, - 2.12235, - 2.10151, - 2.14767, - 2.17382, - 2.12145, - 2.12704, - 2.096, - 2.09778, - 2.09733, - 2.10067, - 2.11163, - 2.11902, - 2.11622, - 2.10515, - 2.15673, - 2.15187, - 2.07975, - 2.11713, - 2.1019, - 2.08906, - 2.09129, - 2.09094, - 2.07139, - 2.09792, - 2.11818, - 2.13521, - 2.09317, - 2.15205, - 2.09359, - 2.12902, - 2.20491, - 2.15404, - 2.12387, - 2.21422, - 2.07809, - 2.15791, - 2.13147, - 2.13017, - 2.14478, - 2.13592, - 2.14572, - 2.12771, - 2.14784, - 2.15496, - 2.08933, - 2.06774, - 2.19163, - 2.09368, - 2.18901, - 2.07754, - 2.15847, - 2.11625, - 2.10876, - 2.18488, - 2.08851, - 2.1842, - 2.11237, - 2.10533, - 2.09335, - 2.09365, - 2.11399, - 2.10166, - 2.13801, - 2.10645, - 2.15973, - 2.14104, - 2.08315, - 2.12548, - 2.09931, - 2.15813, - 2.10575, - 2.12403, - 2.19015, - 2.0717, - 2.14015, - 2.16857, - 2.03163, - 2.11119, - 2.07661, - 2.12338, - 2.19026, - 2.09889, - 2.07589, - 2.06158, - 2.05661, - 2.20033, - 2.1062, - 2.10739, - 2.09728, - 2.09079, - 2.16006, - 2.04724, - 2.17185, - 2.15296, - 2.06467, - 2.07009, - 2.1072, - 2.12453, - 2.10475, - 2.18102, - 2.12786, - 2.10917, - 2.10525, - 2.14673, - 2.13222, - 2.08293, - 2.0987, - 2.14066, - 2.08767, - 2.07583, - 2.10129, - 2.13516, - 2.15028, - 2.19762, - 2.09509, - 2.21563, - 2.10623, - 2.0537, - 2.08187, - 2.08561, - 2.06894, - 2.11377, - 2.12836, - 2.0927, - 2.14447, - 2.11826, - 2.14211, - 2.17653, - 2.1369, - 2.14495, - 2.10479, - 2.07528, - 2.16553, - 2.13641, - 2.04795, - 2.07306, - 2.07787, - 2.08293, - 2.08743, - 2.17014, - 2.14769, - 2.13377, - 2.08137, - 2.11715, - 2.05214, - 2.1387, - 2.12016, - 2.18269, - 2.14379, - 2.08269, - 2.13372, - 2.02374, - 2.12732, - 2.11985, - 2.1444, - 2.02607, - 2.16631, - 2.09898, - 2.15149, - 2.14237, - 2.12051, - 2.10995, - 2.1431, - 2.08786, - 2.11085, - 2.11849, - 2.0467, - 2.08808, - 2.15111, - 2.10828, - 2.03967, - 2.0953, - 2.09515, - 2.13106, - 2.10416, - 2.16272, - 2.19205, - 2.15543, - 2.09813, - 2.12134, - 2.10226, - 2.0816, - 2.11417, - 2.0767, - 2.1201, - 2.07774, - 2.05761, - 2.12116, - 2.10238, - 2.15694, - 2.10822, - 2.08529, - 2.13655, - 2.13623, - 2.15343, - 2.12412, - 2.12337, - 2.07381, - 2.11136, - 2.06947, - 2.0946, - 2.12401, - 2.02247, - 2.13659, - 2.12685, - 2.16461, - 2.14882, - 2.07491, - 2.11043, - 2.11849, - 2.05548, - 2.13547, - 2.07164, - 2.10644, - 2.12943, - 2.13384, - 2.17229, - 2.07367, - 2.07991, - 2.08646, - 2.17803, - 2.10172, - 2.07228, - 2.12777, - 2.1558, - 2.11659, - 2.04521, - 2.09697, - 2.12532, - 2.10339, - 2.16412, - 2.09753, - 2.1333, - 2.13044, - 2.10626, - 2.11237, - 2.12524, - 2.073, - 2.04064, - 2.08737, - 2.13133, - 2.12298, - 2.11477, - 2.11178, - 2.04273, - 2.1295, - 2.07829, - 2.09891, - 2.11744, - 2.10461, - 2.11068, - 2.09291, - 2.0958, - 2.13826, - 2.08055, - 2.14422, - 2.03641, - 2.11846, - 2.14572, - 2.095, - 2.12173, - 2.12026, - 2.0954, - 2.13221, - 2.09799, - 2.12851, - 2.13405, - 2.09671, - 2.12179, - 2.13242, - 2.13734, - 2.12762, - 2.07765, - 2.09467, - 2.13116, - 2.11245, - 2.09388, - 2.06438, - 2.19199, - 2.10535, - 2.0643, - 2.16325, - 2.161, - 2.06441, - 2.12777, - 2.19557, - 2.15368, - 2.1306, - 2.1223, - 2.09381, - 2.16069, - 2.08246, - 2.06664, - 2.05811, - 2.18172, - 2.11197, - 2.0889, - 2.11844, - 2.05629, - 2.09787, - 2.12297, - 2.09358, - 2.07653, - 2.20638, - 2.13664, - 2.08055, - 2.09602, - 2.10926, - 2.09085, - 2.14696, - 2.10263, - 2.17495, - 2.16893, - 2.05959, - 2.13629, - 2.12439, - 2.113, - 2.15838, - 2.07767, - 2.14023, - 2.06465, - 2.14326, - 2.10932, - 2.11235, - 2.15571, - 2.11715, - 2.11077, - 2.08572, - 2.16581, - 2.06708, - 2.08967, - 2.09113, - 2.03634, - 2.11875, - 2.09162, - 2.10286, - 2.09849, - 2.13724, - 2.03559, - 2.15476, - 2.05496, - 2.10161, - 2.12889, - 2.10539, - 2.10914, - 2.13, - 2.1522, - 2.19162, - 2.12216, - 2.08058, - 2.08741, - 2.09026, - 2.11781, - 2.1328, - 2.08103, - 2.12144, - 2.13464, - 2.13409, - 2.05673, - 2.14685, - 2.12839, - 2.09789, - 2.11096, - 2.03408, - 2.1277, - 2.0641, - 2.08126, - 2.03025, - 2.13796, - 2.07861, - 2.08853, - 2.16225, - 2.05343, - 2.05362, - 2.1201, - 2.19761, - 2.06776, - 2.09517, - 2.06562, - 2.0837, - 2.07416, - 2.07223, - 2.09019, - 2.10433, - 2.10541, - 2.08951, - 2.0656, - 2.10961, - 2.19401, - 2.08729, - 2.1336, - 2.10931, - 2.12852, - 2.06295, - 2.12389, - 2.13807, - 2.10564, - 2.08134, - 2.03201, - 2.06256, - 2.13122, - 2.0748, - 2.12925, - 2.13271, - 2.08649, - 2.10411, - 2.08313, - 2.0844, - 2.06736, - 2.10034, - 2.02649, - 2.11708, - 2.11577, - 2.10454, - 2.07515, - 2.15633, - 2.04952, - 2.05541, - 2.1335, - 2.14564, - 2.13752, - 2.1232, - 2.08976, - 2.10063, - 2.08379, - 2.18628, - 2.17248, - 2.10656, - 2.10485, - 2.10782, - 2.11629, - 2.08295, - 2.09438, - 2.0461, - 2.11415, - 2.09651, - 2.04462, - 2.05152, - 2.06941, - 2.11877, - 2.08115, - 2.10382, - 2.09713, - 2.13192, - 2.11901, - 2.12414, - 2.14095, - 2.05162, - 2.04336, - 2.06538, - 2.13317, - 2.08047, - 2.12775, - 2.16373, - 2.14333, - 2.09389, - 2.13983, - 2.05974, - 2.06538, - 2.13546, - 2.07594, - 2.08922, - 2.05947, - 2.1159, - 2.1085, - 2.12799, - 2.09804, - 2.09748, - 2.13617, - 2.08942, - 2.12746, - 2.18929, - 2.07228, - 2.04472, - 2.05019, - 2.13376, - 2.13808, - 2.06058, - 2.11357, - 2.14014, - 2.14083, - 2.11342, - 2.10486, - 2.08908, - 2.14961, - 2.0871, - 2.04269, - 2.07421, - 2.13873, - 2.12728, - 2.1059, - 2.10184, - 2.13237, - 2.02594, - 2.1117, - 2.10417, - 2.06541, - 2.08943, - 2.11647, - 2.10221, - 2.08875, - 2.09492, - 2.1144, - 2.10078, - 2.10404, - 2.13708, - 2.16025, - 2.04102, - 2.11573, - 2.16445, - 2.13012, - 2.13756, - 2.04568, - 2.10701, - 2.1444, - 2.13497, - 2.13023, - 2.06821, - 2.09004, - 2.06164, - 2.12677, - 2.1306, - 2.17549, - 2.14337, - 1.97909, - 2.08921, - 2.07469, - 2.10392, - 2.03888, - 2.06376, - 2.12682, - 2.0744, - 2.11495, - 2.13959, - 2.0988, - 2.13658, - 2.0542, - 2.11604, - 2.08743, - 2.13097, - 2.05898, - 2.07154, - 2.00648, - 2.13888, - 2.16212, - 2.06639, - 2.08285, - 2.09566, - 2.1004, - 2.09767, - 2.11408, - 2.11714, - 2.07545, - 2.10731, - 2.09629, - 2.09582, - 2.06628, - 2.12314, - 2.10698, - 2.10181, - 2.13564, - 2.03563, - 2.08675, - 2.02621, - 2.15156, - 2.10211, - 2.17107, - 2.08302, - 2.08706, - 2.0643, - 2.08192, - 2.15243, - 2.11812, - 2.03822, - 2.07945, - 2.06443, - 2.12322, - 2.09557, - 2.04426, - 2.10083, - 2.11102, - 2.04523, - 2.08589, - 2.0738, - 2.06606, - 2.08098, - 2.13841, - 2.15132, - 2.18142, - 2.01625, - 2.11072, - 2.13764, - 2.06693, - 2.03944, - 2.12171, - 2.11775, - 2.11287, - 2.08698, - 2.07643, - 2.07805, - 2.04208, - 2.0846, - 2.08954, - 2.1007, - 2.07633, - 2.09357, - 2.15145, - 2.03944, - 2.13708, - 2.12186, - 2.13552, - 2.0563, - 2.08474, - 2.10664, - 2.08035, - 2.07747, - 2.13382, - 2.12754, - 2.11104, - 2.11554, - 2.12822, - 2.08551, - 2.10757, - 2.11655, - 2.04381, - 2.06609, - 2.15029, - 2.11813, - 2.05769, - 2.10855, - 2.09565, - 2.14681, - 2.06712, - 2.14611, - 2.10404, - 2.07452, - 2.14771, - 2.09639, - 2.07964, - 2.11627, - 2.06014, - 2.08635, - 2.05488, - 2.01871, - 2.0961, - 2.13904, - 2.09139, - 2.05184, - 2.11013, - 2.09804, - 2.08198, - 2.07202, - 2.0249, - 2.08768, - 2.07607, - 2.04796, - 2.06937, - 2.1416, - 2.09828, - 2.07378, - 1.98903, - 2.17028, - 2.09999, - 2.11408, - 2.12836, - 2.16324, - 2.10701, - 2.09383, - 2.13008, - 2.10959, - 2.0722, - 2.1232, - 2.08331, - 2.11982, - 2.08524, - 2.06727, - 2.15084, - 2.1194, - 2.12956, - 2.08734, - 2.04497, - 2.09508, - 2.08397, - 2.1124, - 2.08193, - 1.98146, - 2.08651, - 2.0249, - 2.05506, - 2.05229, - 2.05008, - 2.08448, - 1.99079, - 2.09303, - 2.06631, - 2.09303, - 2.07354, - 2.09196, - 2.09489, - 2.07874, - 2.09201, - 2.16335, - 2.0502, - 2.07131, - 2.04835, - 2.06584, - 2.07688, - 2.13008, - 2.06124, - 2.12235, - 2.12116, - 2.13997, - 2.12582, - 2.18375, - 2.10301, - 2.05615, - 2.07228, - 2.09195, - 2.0463, - 1.97925, - 2.15292, - 2.01689, - 2.06506, - 2.0327, - 2.09565, - 2.12951, - 2.04255, - 2.09192, - 2.07481, - 2.0485, - 2.08095, - 2.06796, - 2.05202, - 2.07413, - 2.01706, - 2.10438, - 2.04484, - 2.02036, - 2.06866, - 2.10875, - 2.09371, - 2.13349, - 2.06631, - 2.08181, - 2.14259, - 2.09199, - 2.04041, - 2.13474, - 2.08385, - 2.05325, - 2.09975, - 2.12255, - 2.0704, - 2.13144, - 2.09484, - 2.08705, - 2.15514, - 2.11261, - 2.11636, - 2.15667, - 2.0404, - 2.06174, - 2.03463, - 2.00406, - 2.03327, - 2.09417, - 2.13681, - 1.96806, - 2.12661, - 2.0948, - 2.0926, - 2.06922, - 2.09639, - 2.05791, - 2.07714, - 2.13913, - 2.02277, - 2.06623, - 2.13421, - 2.1062, - 2.07541, - 2.12336, - 2.06514, - 2.05075, - 2.07548, - 2.12557, - 2.14924, - 2.11018, - 2.0842, - 2.14355, - 2.08738, - 2.13799, - 2.09062, - 2.04969, - 2.08582, - 2.10324, - 2.03572, - 2.05147, - 2.00502, - 2.07141, - 1.99557, - 2.13894, - 2.1553, - 2.06648, - 2.05819, - 2.08383, - 2.11133, - 2.02196, - 2.10783, - 2.02858, - 2.03358, - 2.06072, - 2.0359, - 2.08323, - 2.04802, - 2.11395, - 2.13524, - 2.11736, - 2.07258, - 2.08804, - 2.11794, - 2.13645, - 2.0996, - 2.06315, - 2.05538, - 2.09322, - 2.10632, - 2.114, - 2.12489, - 2.07014, - 2.11277, - 2.01848, - 2.08928, - 2.05211, - 2.13821, - 2.12306, - 2.05305, - 2.09285, - 2.05594, - 2.16263, - 2.0912, - 2.11417, - 2.10779, - 2.07809, - 2.13621, - 2.05704, - 2.01261, - 2.08016, - 2.12863, - 2.06718, - 2.10976, - 2.13463, - 2.14882, - 2.0966, - 2.06652, - 2.07969, - 2.04107, - 2.02419, - 2.09575, - 2.12857, - 2.04398, - 2.11785, - 2.08828, - 2.04959, - 2.06058, - 2.08635, - 2.08974, - 2.03504, - 2.1456, - 2.17049, - 2.02768, - 2.09823, - 2.05754, - 2.07887, - 2.1078, - 2.08457, - 2.12408, - 2.0954, - 2.07639, - 2.09045, - 2.05784, - 2.04278, - 2.09548, - 2.1087, - 2.1437, - 2.09094, - 2.07874, - 2.01493, - 2.02804, - 2.01007, - 2.04847, - 2.12547, - 2.11514, - 2.11946, - 2.1125, - 2.07157, - 2.111, - 2.13207, - 2.0967, - 2.08252, - 2.08888, - 2.05647, - 2.05834, - 2.16022, - 2.04922, - 2.06841, - 2.07677, - 2.06226, - 2.09475, - 2.0168, - 2.12406, - 2.06325, - 2.09587, - 2.03052, - 2.08313, - 2.084, - 2.10075, - 2.05824, - 2.09606, - 2.11564, - 2.05424, - 2.12791, - 2.10788, - 2.11386, - 2.12504, - 2.13182, - 2.05432, - 2.11362, - 2.10827, - 2.11317, - 2.07054, - 2.0865, - 2.08514, - 2.09255, - 2.12185, - 2.08077, - 2.076, - 2.10649, - 2.07883, - 2.02817, - 2.0122, - 2.16202, - 2.11263, - 1.97946, - 1.99947, - 2.03089, - 2.13528, - 2.07286, - 2.13223, - 2.08395, - 2.15577, - 2.04823, - 2.1056, - 2.0594, - 2.05308, - 2.07569, - 2.00582, - 2.18676, - 2.03374, - 2.03684, - 2.08538, - 2.07424, - 2.10281, - 2.07143, - 2.09961, - 2.11097, - 2.07543, - 2.00702, - 2.03751, - 2.12102, - 2.04582, - 2.10064, - 2.01073, - 2.11498, - 2.13712, - 2.05089, - 2.0584, - 2.11574, - 2.14152, - 2.09001, - 2.08799, - 2.11396, - 2.04485, - 2.07874, - 2.06325, - 2.06574, - 2.15556, - 2.10324, - 2.08869, - 2.10685, - 2.04254, - 2.07161, - 2.01449, - 2.08847, - 2.0733, - 2.0586, - 2.01824, - 2.10437, - 2.19663, - 2.05156, - 2.09629, - 2.13721, - 2.02461, - 2.11276, - 2.06099, - 2.06829, - 2.09166, - 2.07752, - 2.07912, - 2.10421, - 2.10106, - 2.08491, - 2.07528, - 2.15454, - 2.04691, - 2.07905, - 2.11661, - 2.0584, - 2.03592, - 2.08157, - 2.15897, - 2.07329, - 2.11183, - 2.04339, - 2.04438, - 2.03336, - 2.13214, - 2.06406, - 2.08607, - 2.09633, - 2.01343, - 2.04247, - 2.01893, - 2.06765, - 2.12042, - 2.06529, - 2.00884, - 2.06082, - 2.01918, - 2.20488, - 2.07777, - 2.07087, - 2.15486, - 2.14038, - 2.01405, - 2.01239, - 2.08214, - 2.01641, - 2.09813, - 2.042, - 2.08089, - 2.16437, - 2.09584, - 2.00511, - 2.11407, - 2.06171, - 2.10425, - 2.07342, - 2.08236, - 2.0627, - 2.08872, - 2.0751, - 2.07786, - 2.09862, - 2.13165, - 2.10163, - 2.08189, - 2.07655, - 2.10482, - 2.08075, - 2.05504, - 2.14323, - 2.04128, - 2.07747, - 2.12379, - 2.07758, - 2.06598, - 1.99411, - 2.09964, - 2.12168, - 2.12594, - 2.03914, - 2.13376, - 2.18517, - 2.05919, - 2.04488, - 2.0858, - 2.06392, - 2.11487, - 2.03378, - 2.09504, - 1.99732, - 2.02115, - 2.06633, - 2.08621, - 2.11161, - 2.02401, - 2.07989, - 2.04353, - 2.07797, - 2.08321, - 2.10694, - 2.08116, - 2.08013, - 2.05166, - 2.03859, - 2.06647, - 2.06128, - 2.0405, - 2.08564, - 2.02637, - 2.1218, - 2.14185, - 2.10984, - 2.08003, - 2.10348, - 2.02095, - 2.13531, - 2.05896, - 2.10359, - 2.01529, - 2.08866, - 2.09921, - 2.03798, - 2.02394, - 2.06774, - 2.0759, - 2.0776, - 2.06026, - 2.11891, - 2.1025, - 2.09668, - 2.03808, - 2.14558, - 2.06375, - 1.99458, - 2.09215, - 2.06062, - 2.06884, - 2.06021, - 2.05503, - 2.09091, - 2.09302, - 2.0515, - 2.08263, - 2.05106, - 2.10749, - 2.10874, - 2.08487, - 2.01956, - 2.07787, - 2.05804, - 2.01602, - 2.1156, - 2.08484, - 2.07253, - 2.06774, - 2.11448, - 2.00769, - 2.04023, - 2.06195, - 2.04073, - 2.12735, - 2.07933, - 2.12628, - 2.06697, - 2.11568, - 2.06734, - 2.13341, - 2.06596, - 2.07189, - 1.99975, - 2.13733, - 2.0662, - 2.14758, - 2.09966, - 2.09943, - 2.07907, - 2.11264, - 2.09428, - 2.07668, - 2.08417, - 2.1009, - 2.10719, - 2.07278, - 2.1406, - 2.03982, - 2.06965, - 2.01863, - 2.07975, - 2.14794, - 2.12445, - 2.02001, - 2.11883, - 2.04336, - 2.03164, - 2.07358, - 2.11727, - 2.12822, - 2.05488, - 2.00583, - 2.01671, - 2.07008, - 2.04864, - 2.15423, - 2.11196, - 2.13013, - 2.18329, - 2.12132, - 2.10072, - 2.07513, - 2.11864, - 2.10071, - 2.06849, - 1.98953, - 2.03667, - 2.01311, - 2.14559, - 2.03179, - 2.09717, - 2.0781, - 2.04418, - 2.0241, - 2.07223, - 2.07765, - 2.06816, - 2.0047, - 2.09235, - 2.06072, - 2.01874, - 2.07433, - 2.03177, - 2.07782, - 2.02207, - 2.02828, - 2.03052, - 2.08796, - 2.04217, - 2.07722, - 2.00231, - 2.08325, - 2.06856, - 2.06138, - 2.04988, - 2.10389, - 2.06896, - 2.07199, - 2.10403, - 2.14834, - 2.146, - 2.07495, - 2.15474, - 2.01435, - 2.02295, - 2.07418, - 2.05188, - 1.95005, - 2.04698, - 2.0027, - 2.09133, - 2.06517, - 2.11931, - 2.05626, - 2.15348, - 2.07157, - 2.06836, - 2.02424, - 2.05232, - 2.11096, - 2.06014, - 2.07044, - 2.09761, - 2.04773, - 2.04677, - 2.00572, - 2.07806, - 2.04695, - 2.07245, - 2.08196, - 2.09445, - 2.01205, - 2.06319, - 2.04123, - 2.06795, - 2.03582, - 2.03007, - 2.10083, - 2.11105, - 2.12536, - 2.10771, - 2.09022, - 2.08695, - 2.02961, - 2.06678, - 2.07391, - 2.09108, - 2.08101, - 2.05321, - 2.03353, - 2.0768, - 2.11662, - 2.09157, - 2.06999, - 2.02295, - 2.07998, - 2.1274, - 2.05929, - 2.0327, - 2.05993, - 2.05613, - 2.05721, - 2.03967, - 2.08017, - 1.99532, - 2.15504, - 2.08392, - 2.05929, - 2.08824, - 2.05432, - 2.05738, - 2.02724, - 1.9721, - 2.05708, - 2.11622, - 2.00563, - 2.02918, - 2.10931, - 2.06615, - 2.05428, - 2.05104, - 2.06887, - 2.10398, - 1.99669, - 2.10738, - 2.05644, - 2.05772, - 2.07513, - 2.08181, - 2.14405, - 2.15466, - 2.10755, - 2.11731, - 2.07633, - 2.06804, - 2.05887, - 2.08575, - 2.03062, - 2.0421, - 2.0979, - 2.05685, - 2.11896, - 2.01023, - 2.12295, - 2.12157, - 2.05898, - 2.12495, - 2.10141, - 2.0376, - 2.05051, - 2.02397, - 2.08365, - 2.10829, - 2.01454, - 2.00711, - 2.06005, - 2.09017, - 2.09549, - 2.09088, - 2.08542, - 2.07953, - 2.11315, - 2.00019, - 2.13795, - 2.06708, - 2.05435, - 2.07118, - 2.10171, - 2.08301, - 2.05753, - 2.00449, - 2.06953, - 2.08565, - 2.10364, - 2.02805, - 2.07596, - 2.09671, - 2.08481, - 2.06851, - 2.08965, - 2.09405, - 2.08666, - 2.01672, - 1.99783, - 2.0308, - 2.10783, - 1.98615, - 2.10551, - 2.04035, - 2.12412, - 2.04586, - 2.05379, - 2.08107, - 2.01705, - 2.06461, - 2.07541, - 2.09577, - 2.12469, - 2.09285, - 2.09374, - 2.11407, - 2.07602, - 2.11062, - 2.09319, - 2.03698, - 2.07173, - 2.08843, - 2.10623, - 1.97882, - 2.03307, - 2.11743, - 2.13381, - 1.99917, - 2.06088, - 2.12626, - 2.08097, - 2.11418, - 2.01078, - 2.07393, - 2.10276, - 2.06112, - 2.08514, - 2.13986, - 2.06858, - 1.96141, - 2.06757, - 2.0924, - 2.07449, - 2.09889, - 2.06556, - 2.10549, - 2.09042, - 1.9865, - 2.07955, - 1.9797, - 2.06262, - 2.01997, - 2.08973, - 2.04196, - 2.02348, - 2.13264, - 2.06386, - 2.09811, - 2.03411, - 2.15665, - 2.08293, - 2.071, - 2.07658, - 2.17354, - 2.02568, - 2.06407, - 2.06232, - 2.04192, - 2.11026, - 2.0558, - 2.13428, - 2.05726, - 2.12916, - 2.05409, - 2.0381, - 2.03409, - 2.05967, - 2.11175, - 2.0571, - 2.08, - 2.06239, - 2.0856, - 2.01971, - 2.14144, - 1.99617, - 2.08663, - 2.06458, - 2.02968, - 2.05902, - 2.06709, - 2.09696, - 2.05254, - 2.02575, - 2.01666, - 2.06365, - 2.06849, - 2.03339, - 2.0836, - 2.05389, - 2.06919, - 2.12839, - 2.06191, - 1.9969, - 2.13849, - 2.04207, - 2.03666, - 2.06636, - 2.08137, - 2.08508, - 2.06531, - 2.03684, - 2.05422, - 2.05608, - 2.01764, - 2.08834, - 2.11597, - 2.04752, - 2.13887, - 2.05414, - 2.10016, - 2.08874, - 2.02427, - 2.04, - 2.09702, - 2.06191, - 2.07475, - 2.05225, - 2.07732, - 2.07689, - 2.03459, - 2.10178, - 2.05543, - 2.01174, - 2.01685, - 2.08381, - 2.07526, - 2.04286, - 2.06321, - 2.06589, - 2.01497, - 2.02844, - 1.9941, - 2.07638, - 2.02883, - 2.07611, - 2.07492, - 2.0213, - 1.99648, - 2.07458, - 2.08831, - 2.10314, - 2.06595, - 2.14293, - 2.11275, - 2.08798, - 2.0226, - 2.12569, - 2.05368, - 2.03676, - 2.07185, - 2.0657, - 2.06805, - 2.02539, - 2.13168, - 2.12109, - 2.02806, - 2.17646, - 2.05934, - 2.05101, - 2.0635, - 2.07882, - 2.02287, - 2.06363, - 2.07557, - 2.08147, - 2.09725, - 2.10681, - 2.10097, - 2.04607, - 2.00042, - 2.10639, - 2.02104, - 2.0728, - 2.04873, - 2.10192, - 2.07086, - 2.12973, - 2.05518, - 2.14593, - 2.1289, - 2.1208, - 2.04688, - 2.04163, - 2.11887, - 2.06291, - 2.10193, - 2.05585, - 2.06526, - 2.10719, - 2.06099, - 2.03764, - 1.96667, - 2.07842, - 2.06978, - 2.0467, - 2.06868, - 2.0281, - 2.07606, - 2.06319, - 2.09745, - 2.08347, - 2.02629, - 2.08695, - 2.0741, - 2.13217, - 2.06302, - 2.0969, - 2.11372, - 2.02474, - 2.09705, - 2.09613, - 2.05204, - 2.04801, - 2.06313, - 2.10968, - 2.01281, - 2.10232, - 2.03633, - 2.05308, - 2.10498, - 2.00901, - 2.0953, - 2.02451, - 2.09715, - 2.10641, - 2.10068, - 2.05326, - 2.12624, - 2.10394, - 2.03133, - 2.05325, - 2.07099, - 2.10652, - 2.07389, - 2.10081, - 2.06438, - 2.04873, - 2.04403, - 2.06912, - 2.1148, - 2.06834, - 1.99483, - 2.05345, - 2.08751, - 2.0224, - 2.04882, - 2.02314, - 2.16878, - 2.09001, - 2.05333, - 2.06419, - 2.0933, - 2.07829, - 2.01932, - 2.05817, - 2.08431, - 2.11285, - 2.00857, - 2.07289, - 1.99443, - 2.08259, - 2.03306, - 2.02887, - 2.03445, - 2.08559, - 2.05949, - 2.02193, - 2.07204, - 2.10583, - 2.11107, - 2.05598, - 2.08288, - 2.03973, - 2.05778, - 2.06878, - 2.14201, - 2.12522, - 2.08545, - 2.08692, - 2.02053, - 2.05446, - 2.08623, - 2.01557, - 2.00816, - 2.05988, - 2.0229, - 2.02391, - 2.10504, - 2.12706, - 2.09158, - 2.05715, - 2.07415, - 2.1255, - 2.03306, - 2.06392, - 2.06344, - 2.08021, - 2.07575, - 1.97882, - 2.00249, - 2.02147, - 1.99094, - 2.1553, - 2.04567, - 1.98475, - 2.10893, - 2.0802, - 2.03024, - 2.0225, - 2.07984, - 2.02403, - 2.04878, - 2.04283, - 2.07964, - 2.11598, - 2.04082, - 2.03682, - 2.137, - 2.09772, - 2.03725, - 2.08016, - 2.03772, - 1.98558, - 2.06352, - 2.04918, - 2.02798, - 2.05669, - 2.0397, - 2.03802, - 2.00055, - 2.0775, - 2.0793, - 2.1328, - 2.10442, - 2.08381, - 2.1243, - 2.06731, - 2.08703, - 2.03377, - 2.06871, - 2.06195, - 2.10511, - 2.05166, - 2.0509, - 2.04504, - 2.0389, - 2.08043, - 2.09295, - 2.04042, - 2.07732, - 2.0989, - 2.04119, - 2.11715, - 2.0662, - 2.03521, - 2.11652, - 2.09343, - 2.09146, - 2.00349, - 2.10837, - 2.11932, - 2.10045, - 2.12766, - 2.11238, - 2.05193, - 2.08805, - 2.08027, - 1.99229, - 2.00739, - 2.07347, - 2.05927, - 2.10553, - 2.06289, - 2.05298, - 2.07148, - 2.02937, - 2.09286, - 2.0625, - 2.04251, - 2.11579, - 2.08493, - 2.0145, - 2.1172, - 1.99018, - 2.10698, - 2.08955, - 2.05902, - 2.01577, - 2.04284, - 2.03211, - 2.09129, - 2.11101, - 2.09873, - 2.10147, - 2.06763, - 2.06895, - 2.07842, - 2.06146, - 2.04676, - 2.04107, - 2.01566, - 2.0244, - 2.08427, - 2.10549, - 2.02203, - 2.11446, - 2.00773, - 2.05271, - 2.08152, - 2.06324, - 2.12073, - 2.05899, - 2.09005, - 2.03802, - 2.08768, - 2.06788, - 2.03647, - 2.09092, - 1.94285, - 2.10432, - 2.10817, - 2.07619, - 2.03425, - 2.00709, - 2.06827, - 2.05093, - 2.07483, - 2.06409, - 2.05012, - 2.04017, - 2.06685, - 2.04528, - 2.05901, - 2.03942, - 2.02023, - 2.09415, - 2.00588, - 2.04256, - 2.06708, - 2.02678, - 2.0221, - 2.05656, - 2.02921, - 2.13808, - 2.07724, - 2.04311, - 2.08102, - 2.08407, - 2.02629, - 2.0513, - 2.06495, - 2.04718, - 2.04385, - 2.04184, - 2.07937, - 1.99661, - 2.03563, - 2.03948, - 2.06068, - 2.10829, - 2.0595, - 2.09556, - 2.11285, - 2.03227, - 2.06781, - 2.05925, - 2.05581, - 2.06333, - 2.06697, - 2.00727, - 2.05655, - 2.11136, - 2.03674, - 2.06544, - 2.12446, - 2.03548, - 2.0911, - 2.06112, - 2.05034, - 2.05249, - 2.06103, - 2.05356, - 2.06695, - 2.09099, - 2.07425, - 2.07788, - 2.09215, - 2.07736, - 1.98757, - 2.03298, - 2.03088, - 2.02213, - 2.08634, - 2.02768, - 1.99756, - 2.14677, - 2.05558, - 2.02496, - 2.09724, - 2.05255, - 2.06716, - 2.07167, - 2.00812, - 2.09066, - 2.06376, - 2.04842, - 2.06851, - 2.07735, - 2.13334, - 2.0587, - 2.04104, - 2.00786, - 2.07433, - 2.05318, - 1.95878, - 2.07099, - 2.03443, - 2.05422, - 2.12209, - 2.07094, - 1.9528, - 2.01506, - 2.05553, - 2.12138, - 2.02508, - 2.07666, - 2.14575, - 2.01951, - 2.04164, - 2.03867, - 2.03378, - 2.09433, - 2.06457, - 2.08161, - 2.09086, - 2.0496, - 2.04918, - 2.06391, - 2.06524, - 2.04333, - 2.07325, - 2.0304, - 2.06887, - 1.96485, - 2.09435, - 2.05732, - 2.04756, - 2.08311, - 2.05735, - 2.11405, - 2.11355, - 1.98737, - 1.99303, - 2.06603, - 1.98646, - 2.10581, - 2.10562, - 2.02354, - 2.103, - 2.07137, - 2.0457, - 2.00153, - 2.06103, - 2.0997, - 1.99062, - 2.01324, - 2.06253, - 2.06176, - 2.0397, - 2.05751, - 2.06248, - 2.11154, - 2.08294, - 2.07978, - 2.07026, - 2.08019, - 2.03755, - 2.07636, - 2.01067, - 2.02766, - 2.05753, - 2.12263, - 2.05045, - 1.98059, - 2.04864, - 2.04771, - 2.06722, - 2.03609, - 2.06284, - 2.07717, - 2.01665, - 2.08986, - 2.0273, - 2.05682, - 2.03488, - 2.05332, - 2.03322, - 2.05592, - 2.08147, - 2.0479, - 2.1046, - 2.02317, - 2.05165, - 2.05359, - 2.00625, - 2.02435, - 2.02878, - 2.03786, - 2.09736, - 2.05512, - 2.09181, - 2.06442, - 2.05538, - 2.09673, - 2.03222, - 2.09708, - 1.98943, - 2.0283, - 2.05977, - 2.0863, - 2.02144, - 2.06487, - 2.04112, - 2.10147, - 2.0824, - 2.07287, - 2.03416, - 2.0116, - 2.11638, - 2.09206, - 2.08047, - 2.05441, - 2.03693, - 2.04957, - 2.04778, - 2.03492, - 1.96548, - 2.02681, - 2.02874, - 2.07203, - 2.0569, - 1.99965, - 2.03311, - 2.0092, - 2.02598, - 2.05989, - 2.10664, - 2.04568, - 2.03186, - 2.01805, - 2.06315, - 1.99281, - 2.0392, - 2.05607, - 2.04348, - 2.03614, - 2.05212, - 2.09476, - 1.97991, - 2.0256, - 2.04247, - 2.03762, - 2.02747, - 1.98989, - 2.01387, - 2.0662, - 1.97273, - 2.04414, - 2.04068, - 2.14846, - 2.05013, - 2.10822, - 2.10342, - 2.05437, - 2.05571, - 2.1086, - 2.05597, - 2.03278, - 2.09545, - 2.06232, - 2.04632, - 2.0163, - 2.08783, - 2.05287, - 2.05522, - 2.11135, - 2.0458, - 2.12138, - 1.99393, - 2.02124, - 2.08029, - 2.02087, - 2.07313, - 2.03356, - 2.06596, - 2.09844, - 2.03429, - 2.05596, - 1.98228, - 2.07446, - 2.05781, - 1.99759, - 2.07992, - 1.94621, - 2.08207, - 2.06664, - 2.05679, - 2.06798, - 2.02544, - 2.06645, - 2.00403, - 2.03956, - 1.99711, - 2.08653, - 2.00936, - 2.08544, - 2.0267, - 2.03343, - 2.07269, - 2.07503, - 2.0354, - 2.02986, - 2.12732, - 2.10069, - 2.08838, - 2.00378, - 2.03698, - 2.0345, - 2.03579, - 2.03079, - 2.04633, - 2.08341, - 1.99281, - 2.04339, - 2.08322, - 2.04202, - 1.97566, - 2.12464, - 2.08085, - 2.02189, - 2.07332, - 2.11819, - 2.05622, - 2.04107, - 2.05936, - 2.06088, - 2.10049, - 2.08115, - 2.04944, - 2.0799, - 2.01254, - 2.01197, - 2.01803, - 2.06186, - 2.0443, - 2.0118, - 2.15467, - 2.07352, - 2.01528, - 2.03535, - 2.01712, - 2.06954, - 2.01698, - 2.00203, - 2.06967, - 2.07898, - 2.0671, - 2.02714, - 2.06968, - 2.02246, - 2.13574, - 1.99259, - 2.05496, - 2.0191, - 2.04134, - 2.02151, - 2.02575, - 2.00882, - 2.08244, - 2.07441, - 2.0507, - 2.06194, - 2.01666, - 2.03804, - 2.11047, - 2.06599, - 1.98031, - 2.06439, - 2.07867, - 2.03715, - 2.0558, - 2.02979, - 2.01242, - 1.95233, - 2.02884, - 1.97599, - 2.01915, - 2.04814, - 2.04897, - 2.03521, - 2.0504, - 2.06254, - 2.03101, - 2.00247, - 2.04606, - 2.0705, - 2.01914, - 2.06384, - 2.03466, - 2.01895, - 1.99722, - 2.03233, - 2.14209, - 2.13457, - 2.00492, - 2.01353, - 1.98569, - 1.99858, - 2.02839, - 2.01293, - 2.07357, - 2.00096, - 2.0323, - 1.97499, - 2.06599, - 2.06921, - 2.03327, - 2.02488, - 2.04191, - 2.02133, - 2.02351, - 2.00015, - 2.02345, - 1.96638, - 2.02281, - 2.05081, - 1.99942, - 2.06361, - 2.02102, - 2.04005, - 2.09392, - 2.03241, - 2.00798, - 2.0817, - 2.04202, - 2.06015, - 2.01093, - 2.07711, - 2.05408, - 2.11212, - 2.00511, - 2.04476, - 2.0318, - 2.06195, - 2.06481, - 2.11177, - 2.08009, - 1.99903, - 2.09377, - 2.01221, - 2.05325, - 2.0452, - 2.06081, - 1.99355, - 2.05137, - 2.06812, - 2.0877, - 2.02019, - 2.05333, - 1.97595, - 2.07502, - 2.01471, - 1.99411, - 2.08107, - 2.0588, - 2.0105, - 2.03353, - 2.04271, - 2.02517, - 2.07914, - 2.05705, - 2.01211, - 2.0303, - 2.09696, - 2.0821, - 1.99863, - 1.97906, - 2.05219, - 2.02901, - 2.09172, - 2.07638, - 2.079, - 2.04351, - 1.99277, - 1.96134, - 2.0013, - 2.06079, - 1.99285, - 2.03553, - 2.07931, - 2.08115, - 2.07353, - 2.04599, - 2.0149, - 2.0358, - 2.02745, - 2.0754, - 2.08336, - 2.06918, - 2.06555, - 2.03802, - 2.03622, - 2.05264, - 2.06019, - 2.04436, - 2.0434, - 2.09629, - 2.01639, - 2.05267, - 1.98718, - 2.00768, - 2.0835, - 1.95697, - 2.03776, - 2.04586, - 1.97659, - 2.0237, - 2.0232, - 2.05365, - 2.05695, - 2.06813, - 2.10843, - 2.04927, - 2.04191, - 2.06537, - 2.06218, - 2.06167, - 2.09267, - 2.14703, - 2.05801, - 2.03078, - 2.01405, - 2.04858, - 2.01306, - 2.01265, - 2.06588, - 2.04529, - 2.07559, - 2.02285, - 2.0835, - 2.05909, - 2.06312, - 2.0296, - 2.06669, - 2.04078, - 2.05484, - 2.05034, - 2.05032, - 2.09256, - 2.07644, - 2.10918, - 2.09884, - 2.05171, - 2.05447, - 2.07415, - 1.97931, - 1.99107, - 2.09041, - 2.07007, - 2.12373, - 2.0628, - 2.03133, - 2.02806, - 2.05817, - 2.11746, - 2.03185, - 1.99633, - 2.03181, - 2.06992, - 2.00142, - 2.04983, - 2.08606, - 2.01466, - 2.07301, - 2.0694, - 2.07049, - 2.09433, - 2.05604, - 1.93766, - 2.07719, - 2.06593, - 2.00452, - 2.04133, - 2.02449, - 1.93746, - 2.09304, - 2.05463, - 1.97208, - 2.07886, - 2.08435, - 2.04709, - 2.05548, - 2.05979, - 2.08635, - 2.0245, - 2.11378, - 2.07825, - 2.00529, - 2.01365, - 2.10492, - 2.06886, - 2.12362, - 2.03996, - 2.00802, - 2.0232, - 2.07588, - 2.05648, - 1.99096, - 2.04846, - 2.06835, - 2.10403, - 2.04452, - 2.09195, - 1.9982, - 1.95311, - 2.06445, - 2.0108, - 2.05774, - 2.0647, - 2.0606, - 2.08073, - 2.04388, - 2.05094, - 2.0839, - 2.07656, - 2.00466, - 2.05127, - 1.96307, - 2.08589, - 2.05027, - 2.01888, - 2.03501, - 1.99818, - 2.04141, - 2.06752, - 2.06005, - 2.06424, - 2.09357, - 2.06184, - 2.0651, - 1.98939, - 2.02905, - 2.074, - 2.04499, - 2.02906, - 2.06848, - 2.03097, - 2.13828, - 2.05086, - 2.05244, - 2.03032, - 2.01746, - 2.07007, - 2.01759, - 2.0675, - 2.07511, - 2.08403, - 2.06978, - 2.12505, - 2.05219, - 2.10628, - 2.01007, - 1.99664, - 2.05293, - 2.01147, - 2.04377, - 2.04881, - 2.05149, - 1.98977, - 2.09375, - 2.01582, - 2.05345, - 2.03797, - 1.98496, - 2.00659, - 2.04192, - 2.10839, - 2.02277, - 2.11565, - 2.03522, - 1.99542, - 2.00427, - 2.04391, - 2.00052, - 2.0555, - 2.07215, - 2.08636, - 2.01941, - 2.0739, - 2.02585, - 2.00941, - 2.00431, - 2.0757, - 2.06148, - 2.00521, - 2.0939, - 2.08654, - 2.00003, - 2.09182, - 2.03023, - 2.03517, - 2.01204, - 2.01232, - 2.01482, - 2.01081, - 1.98632, - 1.98401, - 2.04891, - 1.99541, - 1.97905, - 2.07105, - 2.06188, - 2.02913, - 2.02339, - 2.05316, - 2.08183, - 2.01807, - 1.99209, - 2.0713, - 2.1148, - 2.03973, - 1.97343, - 2.05063, - 2.08566, - 2.06206, - 2.08155, - 2.04375, - 2.00931, - 2.06977, - 2.01332, - 2.00786, - 2.05361, - 2.07465, - 2.05162, - 2.02641, - 2.04114, - 2.0394, - 2.07364, - 2.04138, - 1.99877, - 2.06716, - 2.0497, - 2.04435, - 2.03228, - 2.06879, - 2.09824, - 2.05829, - 2.07127, - 1.99953, - 2.12035, - 2.04031, - 2.00151, - 2.00565, - 2.07348, - 2.02206, - 2.08856, - 2.1003, - 2.08671, - 2.0348, - 2.03413, - 2.00235, - 2.05301, - 2.00236, - 2.01938, - 2.03495, - 2.01281, - 2.05153, - 2.03436, - 2.0984, - 2.06466, - 2.05331, - 2.06208, - 1.95656, - 2.07439, - 2.03927, - 2.07195, - 1.94577, - 2.02683, - 2.04671, - 2.0243, - 2.04746, - 1.99379, - 2.05004, - 2.05325, - 1.95167, - 2.06438, - 1.9819, - 2.06717, - 1.98481, - 2.07661, - 2.06218, - 2.09445, - 2.05715, - 2.08314, - 2.07168, - 2.01358, - 2.02683, - 1.97722, - 1.95312, - 2.04417, - 2.02442, - 2.02347, - 2.07241, - 2.02514, - 2.08622, - 2.04221, - 2.05096, - 2.07314, - 2.13696, - 2.06015, - 2.01742, - 2.0084, - 2.04167, - 2.04772, - 2.00709, - 2.03842, - 2.04394, - 2.03635, - 2.00665, - 2.03504, - 2.01059, - 2.01281, - 2.04627, - 1.99592, - 2.01543, - 2.06817, - 2.01479, - 2.08267, - 2.01821, - 1.99912, - 2.02065, - 1.97842, - 2.04527, - 2.03568, - 2.02168, - 2.04755, - 2.00704, - 2.02188, - 2.03648, - 2.0004, - 2.01286, - 2.06695, - 2.04746, - 2.03476, - 2.01299, - 1.98974, - 2.06906, - 2.01204, - 2.08883, - 2.06575, - 1.95288, - 2.04875, - 2.03387, - 1.97633, - 2.05345, - 2.04138, - 2.02941, - 2.00312, - 2.10963, - 2.0227, - 2.04545, - 2.03884, - 2.0069, - 2.09703, - 2.00674, - 2.03592, - 2.01223, - 2.02784, - 2.04446, - 2.05916, - 2.11052, - 2.09213, - 1.99841, - 1.9766, - 2.04458, - 1.99501, - 2.10247, - 2.066, - 2.02093, - 1.98519, - 2.10046, - 2.02259, - 2.0452, - 2.04717, - 2.0968, - 1.99128, - 1.99461, - 2.04492, - 2.08868, - 1.99449, - 2.05135, - 2.04986, - 2.06184, - 2.03039, - 2.03804, - 2.0274, - 2.02479, - 2.0313, - 2.03745, - 2.04138, - 2.02565, - 2.05005, - 2.06094, - 1.9984, - 2.08405, - 2.11242, - 2.08307, - 2.03924, - 2.08906, - 2.04133, - 2.05965, - 2.02815, - 2.02263, - 2.0009, - 2.00766, - 2.04237, - 2.04047, - 2.08929, - 2.04549, - 1.95894, - 2.05369, - 2.01792, - 2.07557, - 2.02753, - 2.04762, - 1.96677, - 2.01277, - 2.0046, - 2.05989, - 2.02114, - 2.05902, - 2.04022, - 1.99867, - 1.98075, - 2.04126, - 2.03787, - 2.0874, - 2.063, - 2.04377, - 2.04205, - 2.05737, - 1.98219, - 2.06904, - 2.04775, - 2.06803, - 2.01797, - 2.039, - 2.03651, - 2.11954, - 2.06176, - 2.09317, - 2.02388, - 1.99481, - 2.0153, - 2.08242, - 2.05532, - 2.02236, - 2.00758, - 2.04008, - 2.05073, - 1.99605, - 2.02382, - 2.10455, - 1.97817, - 2.04235, - 2.02687, - 2.00991, - 2.02168, - 2.05494, - 2.0512, - 2.05067, - 2.00786, - 2.06875, - 2.0224, - 2.06234, - 2.00912, - 2.09214, - 1.95324, - 2.02738, - 2.08275, - 2.02254, - 2.0369, - 2.05405, - 2.02959, - 2.05703, - 1.99223, - 2.07428, - 2.02973, - 1.97431, - 2.061, - 2.07873, - 2.01556, - 1.98274, - 2.06137, - 2.00247, - 2.0947, - 2.01852, - 2.01967, - 1.94124, - 2.06542, - 2.04619, - 2.04536, - 2.01331, - 2.04072, - 1.99667, - 2.018, - 2.10627, - 2.00543, - 2.06958, - 2.10232, - 2.01031, - 2.01484, - 2.05005, - 2.08926, - 1.99118, - 2.07571, - 2.0442, - 2.01177, - 2.04327, - 2.03287, - 2.08929, - 2.03896, - 2.03296, - 2.05071, - 2.00438, - 1.993, - 2.04854, - 2.01181, - 2.06205, - 2.01158, - 2.00008, - 2.01962, - 2.05425, - 2.04649, - 2.01251, - 2.13246, - 2.02078, - 1.96197, - 1.98832, - 2.03155, - 2.04205, - 2.02571, - 2.03448, - 2.03671, - 1.98112, - 2.07774, - 2.00172, - 1.99759, - 2.10468, - 1.9926, - 2.04203, - 2.04605, - 2.08304, - 1.99226, - 2.01744, - 2.05274, - 2.01254, - 1.98196, - 2.04995, - 2.00141, - 2.02619, - 1.97542, - 2.01756, - 2.05893, - 2.03685, - 2.04299, - 2.03363, - 2.04344, - 2.05253, - 2.04273, - 2.049, - 2.04465, - 2.06437, - 2.05469, - 2.01664, - 2.0528, - 2.03139, - 2.03358, - 2.00775, - 2.13464, - 2.08799, - 1.99273, - 2.03076, - 2.05424, - 2.02467, - 1.99377, - 2.06463, - 2.00243, - 2.04052, - 2.01414, - 1.99525, - 1.98163, - 1.9722, - 2.0066, - 2.02137, - 1.95982, - 2.05045, - 1.96512, - 2.08604, - 2.00693, - 2.04563, - 1.99637, - 2.02522, - 1.95063, - 2.01126, - 1.99196, - 1.96953, - 2.00673, - 2.11076, - 2.05141, - 2.05908, - 2.03717, - 2.06208, - 1.98347, - 2.04901, - 2.08991, - 2.06519, - 1.94892, - 2.07483, - 2.04106, - 2.0238, - 2.04959, - 2.01121, - 2.03226, - 1.97948, - 2.02006, - 1.98296, - 2.00407, - 2.02294, - 1.99481, - 2.06786, - 2.01331, - 2.06993, - 2.04081, - 1.97166, - 1.96785, - 2.04559, - 1.99974, - 1.98193, - 2.09427, - 2.05862, - 2.06364, - 2.04382, - 2.07245, - 1.97886, - 2.08746, - 2.02099, - 2.0504, - 2.00904, - 2.06181, - 2.03075, - 2.05166, - 2.02199, - 2.06201, - 1.97316, - 2.10181, - 2.01546, - 2.07818, - 2.01619, - 2.07721, - 2.04741, - 2.07659, - 2.02654, - 2.06533, - 2.08106, - 1.98971, - 1.9816, - 2.02453, - 2.10511, - 1.99992, - 2.03092, - 1.95937, - 1.99368, - 2.05773, - 2.02116, - 1.98536, - 2.01015, - 2.10459, - 2.03902, - 2.03918, - 2.03325, - 2.01775, - 2.00205, - 2.04061, - 2.06224, - 2.04991, - 2.13514, - 2.05253, - 2.04615, - 2.01691, - 1.9955, - 2.05995, - 2.10562, - 2.03446, - 1.98969, - 2.05353, - 1.92862, - 2.07712, - 2.02195, - 2.03035, - 2.0617, - 2.04521, - 2.11582, - 2.03336, - 2.1062, - 1.97303, - 2.04044, - 1.97689, - 1.96544, - 2.06958, - 2.07703, - 2.0125, - 2.02929, - 2.04616, - 2.08024, - 1.99276, - 2.03152, - 2.04875, - 2.06501, - 2.04279, - 2.01695, - 2.00081, - 2.01705, - 2.10031, - 2.0991, - 1.99026, - 2.02798, - 2.03765, - 2.04349, - 2.0691, - 1.99352, - 1.96085, - 2.05949, - 1.98782, - 2.00053, - 2.04778, - 2.01161, - 2.0263, - 2.04023, - 2.09427, - 2.0425, - 2.05877, - 2.01403, - 2.02845, - 1.99665, - 2.02719, - 1.98273, - 2.03832, - 2.02678, - 2.05003, - 2.09428, - 1.99382, - 2.01616, - 2.02085, - 2.01399, - 2.05093, - 2.08196, - 2.0974, - 2.00954, - 2.0579, - 2.00367, - 2.04651, - 2.00061, - 1.99142, - 2.09523, - 2.06945, - 1.98428, - 2.05986, - 2.05129, - 1.9787, - 2.04062, - 2.07625, - 2.03406, - 1.98366, - 2.00276, - 2.04209, - 1.99034, - 2.04436, - 2.01854, - 2.07582, - 2.02472, - 2.01564, - 2.04766, - 2.0021, - 2.02958, - 2.06718, - 2.0269, - 2.0562, - 1.98415, - 2.10495, - 2.07558, - 1.97873, - 2.06828, - 2.07391, - 2.04666, - 2.08702, - 2.00299, - 2.03966, - 1.90193, - 2.00991, - 1.96801, - 2.03322, - 2.05742, - 2.08016, - 2.00009, - 2.01803, - 2.05561, - 2.04927, - 2.00996, - 2.07946, - 1.99202, - 2.05029, - 2.05601, - 1.99476, - 2.03286, - 2.08657, - 1.99633, - 2.02739, - 1.98202, - 2.10259, - 1.99573, - 2.00333, - 2.04982, - 2.05528, - 1.99594, - 2.03069, - 2.07108, - 2.0565, - 2.0293, - 2.06936, - 2.05684, - 2.07113, - 2.05184, - 2.05938, - 2.06232, - 2.00901, - 2.0264, - 2.01848, - 2.00885, - 2.04134, - 1.93906, - 2.08677, - 2.02942, - 2.00517, - 2.01085, - 2.00384, - 2.01917, - 2.01199, - 1.99907, - 1.9842, - 1.98772, - 2.05759, - 2.0756, - 2.04736, - 2.04841, - 2.06533, - 2.02209, - 1.95722, - 2.05277, - 2.03147, - 2.01122, - 2.04154, - 1.99118, - 2.02905, - 2.01992, - 2.05153, - 2.00151, - 2.04448, - 2.01624, - 2.03142, - 2.07705, - 1.98829, - 2.05905, - 2.00661, - 2.04719, - 2.04164, - 1.94409, - 2.04687, - 1.99531, - 2.0431, - 1.96737, - 2.08512, - 2.00398, - 2.03257, - 2.04067, - 2.06084, - 2.05831, - 2.05144, - 2.0378, - 1.98551, - 2.00189, - 2.03009, - 1.99709, - 2.02987, - 2.07721, - 2.00797, - 1.98894, - 2.0588, - 1.96312, - 2.03794, - 1.99722, - 2.08, - 2.05966, - 2.00908, - 1.98005, - 1.98886, - 1.99833, - 2.03177, - 1.99676, - 2.06761, - 2.06546, - 1.99675, - 2.00105, - 2.0126, - 2.01483, - 2.03515, - 2.07148, - 2.04988, - 2.02312, - 2.02478, - 2.0675, - 2.00915, - 2.03448, - 2.00931, - 1.96812, - 2.09029, - 2.00158, - 2.02548, - 1.96033, - 2.05469, - 2.08831, - 2.10054, - 2.05097, - 2.06478, - 1.93357, - 1.9862, - 2.03489, - 2.00182, - 1.99074, - 2.05095, - 2.02907, - 1.95065, - 2.04738, - 1.97365, - 2.05899, - 2.01042, - 2.00248, - 1.91584, - 2.02787, - 2.029, - 2.02843, - 1.97224, - 1.98028, - 1.97923, - 2.0349, - 1.97383, - 1.96711, - 2.00871, - 2.04652, - 2.01933, - 2.01334, - 2.02175, - 2.04653, - 2.00607, - 2.12906, - 1.99195, - 2.03293, - 2.07709, - 2.00835, - 1.98402, - 2.02952, - 2.06772, - 2.05982, - 2.05761, - 1.99813, - 2.0301, - 2.01908, - 1.98472, - 2.01914, - 2.08002, - 2.03777, - 2.05484, - 2.04266, - 2.07644, - 2.01995, - 2.00252, - 2.01765, - 2.01819, - 2.01961, - 2.02911, - 1.988, - 2.08838, - 2.0543, - 2.03986, - 2.04175, - 2.11259, - 2.02308, - 2.11121, - 2.00928, - 1.97019, - 2.03228, - 1.99059, - 2.05269, - 2.0406, - 2.0514, - 2.06977, - 2.07301, - 1.98433, - 2.02284, - 2.05447, - 1.9911, - 2.1004, - 2.0019, - 2.04878, - 2.09615, - 2.03017, - 1.96198, - 2.05567, - 2.03783, - 2.0176, - 2.06279, - 2.00846, - 1.9966, - 2.05103, - 1.97235, - 2.03745, - 1.98532, - 1.98366, - 1.99227, - 1.98912, - 1.9981, - 2.00532, - 2.01077, - 2.05767, - 2.02644, - 1.98781, - 2.03154, - 1.96607, - 2.0017, - 2.0502, - 2.05493, - 2.0798, - 2.0474, - 1.98818, - 1.99227, - 2.04269, - 2.03015, - 1.99726, - 2.08021, - 1.95536, - 1.99633, - 2.01104, - 1.9854, - 2.09295, - 2.00914, - 1.98836, - 2.05984, - 2.01752, - 2.01018, - 1.99307, - 2.07742, - 2.0338, - 2.04326, - 2.03325, - 2.06367, - 1.95861, - 2.04643, - 2.04298, - 2.07182, - 1.95904, - 2.06589, - 2.01601, - 2.02384, - 2.05404, - 1.99331, - 2.03091, - 2.03839, - 1.98751, - 1.99061, - 2.06377, - 1.98709, - 1.99511, - 2.02984, - 2.04086, - 1.917, - 2.01041, - 2.01561, - 2.01116, - 2.02548, - 1.97304, - 1.98645, - 2.00927, - 2.01387, - 2.02743, - 1.94947, - 1.97216, - 2.02591, - 2.01813, - 2.02633, - 2.05251, - 1.94656, - 2.02516, - 2.07575, - 2.05024, - 2.07926, - 2.03839, - 2.03793, - 2.03907, - 2.04937, - 2.071, - 2.06587, - 2.03193, - 2.02391, - 2.03961, - 2.02611, - 1.98718, - 2.0064, - 1.95923, - 2.01422, - 2.02635, - 2.01855, - 1.95932, - 1.98137, - 1.9382, - 1.98496, - 2.05682, - 2.00338, - 1.99249, - 2.02971, - 1.98475, - 1.99565, - 2.00011, - 1.98817, - 2.04617, - 1.95292, - 1.96558, - 1.97704, - 1.9639, - 2.00853, - 2.06038, - 1.93902, - 2.03269, - 2.05443, - 2.05108, - 1.97352, - 2.06641, - 1.96112, - 2.08331, - 1.97423, - 2.02683, - 1.97744, - 2.0362, - 2.06564, - 1.99807, - 2.01944, - 2.09912, - 2.08156, - 1.96018, - 2.0293, - 2.0936, - 1.95791, - 2.06562, - 2.04463, - 2.01874, - 1.99582, - 2.05538, - 2.03876, - 1.95537, - 2.0239, - 1.97208, - 2.00811, - 2.05162, - 2.0634, - 1.9526, - 2.06848, - 2.02276, - 1.99694, - 1.99792, - 2.03578, - 2.11844, - 2.09191, - 2.02243, - 1.87811, - 2.02906, - 2.03125, - 2.01584, - 2.05565, - 2.0127, - 2.05311, - 1.99147, - 2.01825, - 1.96421, - 2.00847, - 2.03262, - 2.05404, - 1.99861, - 2.03847, - 2.07007, - 2.08098, - 1.99097, - 1.96965, - 2.01327, - 1.96723, - 2.03507, - 2.01562, - 2.05189, - 2.05747, - 2.03642, - 2.03468, - 2.06061, - 2.09757, - 1.98072, - 2.04695, - 1.94565, - 2.06268, - 2.03412, - 1.93504, - 1.9653, - 2.03721, - 1.93384, - 1.9698, - 2.01241, - 2.05127, - 1.97721, - 2.05221, - 2.07942, - 1.98581, - 2.04671, - 2.03968, - 2.00701, - 1.98215, - 1.96589, - 2.02465, - 2.05796, - 2.03362, - 1.98102, - 2.04755, - 2.01727, - 1.99702, - 1.95521, - 1.97006, - 2.03422, - 2.00421, - 2.12456, - 2.02896, - 1.98881, - 1.98948, - 2.01639, - 1.99763, - 2.06432, - 2.00342, - 2.02628, - 1.94357, - 2.01706, - 2.05078, - 2.05807, - 1.99656, - 1.96201, - 2.00779, - 2.0257, - 2.03237, - 2.0297, - 2.02753, - 1.95626, - 2.0173, - 2.0552, - 2.01339, - 2.01701, - 2.02015, - 2.01077, - 1.98322, - 1.96444, - 2.03022, - 2.02724, - 2.10411, - 2.00826, - 2.02952, - 2.02855, - 2.07096, - 2.06074, - 2.00696, - 2.08547, - 1.97324, - 1.99811, - 1.96896, - 1.99855, - 1.97778, - 2.01804, - 2.0409, - 2.00016, - 2.05343, - 1.98898, - 2.03514, - 2.04517, - 2.00783, - 1.99026, - 1.97843, - 2.01287, - 2.00309, - 1.99703, - 1.94229, - 2.01806, - 2.00115, - 2.00361, - 1.98432, - 2.03043, - 2.08663, - 1.96306, - 2.0179, - 2.08255, - 2.04953, - 2.03675, - 1.99322, - 2.00494, - 2.03521, - 2.07294, - 2.00984, - 2.01965, - 2.06652, - 1.9971, - 1.98603, - 1.96039, - 2.04443, - 1.98842, - 2.03208, - 1.98713, - 2.0276, - 2.06413, - 1.97517, - 1.94964, - 1.98601, - 2.02599, - 1.96895, - 2.03406, - 2.00392, - 1.94878, - 1.93994, - 2.04878, - 2.02049, - 2.07027, - 2.03959, - 2.03564, - 1.96753, - 2.03455, - 2.04722, - 2.07086, - 1.96425, - 1.9974, - 2.08203, - 1.9998, - 2.00913, - 1.99502, - 2.0213, - 2.04663, - 1.9605, - 2.07072, - 1.97065, - 2.02948, - 2.02303, - 2.07083, - 2.00865, - 1.95834, - 2.05494, - 1.95127, - 1.95866, - 2.03531, - 1.95642, - 2.04075, - 2.00111, - 1.95651, - 2.06501, - 2.04002, - 1.95657, - 2.05644, - 2.03245, - 1.99571, - 2.09864, - 2.05246, - 2.00419, - 1.98986, - 1.99285, - 1.99414, - 1.98582, - 2.05419, - 2.03268, - 1.96084, - 1.96931, - 2.03434, - 2.06422, - 2.02297, - 2.0169, - 1.9922, - 2.02366, - 2.01021, - 1.94237, - 2.0596, - 2.02884, - 1.95473, - 1.97729, - 2.01942, - 1.98257, - 2.00121, - 1.97581, - 1.98864, - 2.07926, - 2.04559, - 2.11119, - 2.0064, - 2.01953, - 2.0561, - 2.0152, - 2.00195, - 2.0488, - 2.05433, - 1.94545, - 1.98894, - 2.03514, - 1.96007, - 2.05129, - 2.00728, - 2.03702, - 1.96445, - 2.02548, - 2.12273, - 2.04321, - 2.01468, - 2.02275, - 1.98088, - 1.98887, - 2.02666, - 2.012, - 2.00707, - 1.9987, - 1.97281, - 2.01063, - 2.00517, - 2.04176, - 2.07291, - 2.02487, - 2.02908, - 2.04452, - 1.9954, - 2.02014, - 2.00692, - 1.98732, - 2.01584, - 2.04199, - 1.98595, - 2.02522, - 1.98916, - 1.97619, - 1.97789, - 2.0126, - 1.99261, - 2.01578, - 2.03327, - 2.04221, - 1.98237, - 2.00512, - 1.92235, - 2.04375, - 2.03261, - 2.06578, - 1.99043, - 2.04664, - 1.93456, - 2.0388, - 1.99526, - 1.99115, - 2.03796, - 2.03547, - 1.96898, - 1.97562, - 2.08045, - 2.02621, - 2.01901, - 2.0653, - 1.99854, - 2.05852, - 2.05129, - 2.02701, - 2.01379, - 2.02948, - 2.00735, - 2.04941, - 1.96573, - 2.01903, - 1.96895, - 1.96195, - 1.97505, - 2.02764, - 1.98727, - 1.99096, - 2.00394, - 2.0805, - 2.04087, - 1.96825, - 1.97602, - 1.95703, - 2.03198, - 1.9142, - 2.03639, - 1.94347, - 2.03689, - 2.00989, - 2.03822, - 1.99745, - 2.03986, - 2.01531, - 2.04774, - 2.02886, - 1.94095, - 1.98422, - 2.02463, - 2.00062, - 2.05377, - 2.00139, - 2.02391, - 2.00514, - 1.99956, - 1.99995, - 1.99346, - 1.98958, - 2.06951, - 2.02386, - 2.04238, - 1.98314, - 2.01808, - 1.98751, - 1.98229, - 1.9959, - 2.02373, - 1.94895, - 1.98692, - 2.10199, - 2.06477, - 1.98143, - 2.00136, - 2.05122, - 1.95947, - 2.04105, - 1.98372, - 1.95131, - 2.01702, - 1.9985, - 1.98936, - 2.05077, - 1.98544, - 1.99829, - 1.99232, - 1.99834, - 1.98451, - 2.05129, - 2.05385, - 2.00879, - 2.03047, - 2.05291, - 2.00253, - 1.95412, - 1.99365, - 1.91888, - 2.01307, - 2.02629, - 1.99914, - 1.95803, - 2.01059, - 1.99322, - 2.01757, - 2.01168, - 2.01442, - 2.03676, - 2.0081, - 1.89199, - 1.97492, - 1.94554, - 2.00253, - 2.02376, - 2.01736, - 2.05809, - 1.95855, - 1.99146, - 1.97251, - 2.01931, - 2.0197, - 2.00076, - 2.0824, - 1.96626, - 2.00595, - 2.00556, - 1.99692, - 2.00042, - 1.99194, - 2.02848, - 2.01454, - 1.92868, - 2.0128, - 2.01294, - 2.02245, - 2.00355, - 1.97926, - 1.99438, - 2.04544, - 1.98878, - 2.02317, - 2.05832, - 2.05176, - 1.99093, - 2.00458, - 2.09083, - 2.01218, - 2.01488, - 1.98868, - 2.05206, - 2.02418, - 2.04944, - 2.03538, - 1.98035, - 2.03976, - 1.96904, - 1.98689, - 2.00182, - 2.05096, - 2.04869, - 2.00459, - 2.0297, - 2.00987, - 1.98749, - 2.0019, - 2.02971, - 2.03556, - 1.9856, - 2.06113, - 2.03574, - 1.97064, - 2.08041, - 1.96483, - 1.99301, - 1.98006, - 1.9313, - 2.01808, - 2.0258, - 2.03275, - 2.09576, - 1.98446, - 1.98921, - 1.98268, - 1.97382, - 2.03328, - 2.0298, - 2.01399, - 2.06142, - 2.04923, - 2.01043, - 1.9741, - 2.03857, - 2.0282, - 2.0995, - 2.11682, - 2.07535, - 1.98859, - 1.95763, - 1.9381, - 2.04968, - 1.98562, - 2.08763, - 1.94718, - 1.96977, - 2.02407, - 1.97047, - 2.0147, - 1.96208, - 1.90099, - 2.07603, - 2.02276, - 2.00562, - 2.03233, - 2.12088, - 2.06874, - 1.9812, - 1.95639, - 1.98698, - 2.05529, - 1.983, - 2.11055, - 2.01205, - 2.06332, - 2.04293, - 2.02461, - 2.00586, - 2.06079, - 1.97871, - 1.97443, - 2.02281, - 2.00214, - 2.0261, - 1.98808, - 2.06307, - 1.99366, - 1.98239, - 2.00326, - 1.99525, - 2.01102, - 2.03917, - 1.99459, - 2.03149, - 2.04708, - 1.98997, - 1.99754, - 1.97091, - 2.02839, - 1.98442, - 2.06248, - 2.03474, - 2.03616, - 1.97396, - 2.04268, - 1.99204, - 1.95996, - 2.03771, - 2.00482, - 1.95327, - 1.97945, - 2.00126, - 2.04572, - 1.97116, - 2.04714, - 2.0102, - 1.98112, - 1.92874, - 1.95191, - 2.01692, - 1.96376, - 1.98024, - 2.02489, - 1.99766, - 1.99019, - 1.95507, - 2.03374, - 1.91463, - 1.98136, - 1.96572, - 2.04854, - 2.01462, - 1.98584, - 1.97944, - 1.91392, - 1.93925, - 1.97923, - 1.9981, - 1.97254, - 2.05865, - 2.03985, - 2.02978, - 2.00912, - 2.09103, - 2.04664, - 2.03203, - 2.00625, - 2.02695, - 1.9299, - 2.01462, - 2.04031, - 1.98378, - 1.98164, - 2.01099, - 2.04143, - 2.03486, - 2.0398, - 1.99276, - 2.00627, - 2.03088, - 1.93286, - 1.97995, - 1.98387, - 1.96655, - 2.00029, - 1.96476, - 2.0436, - 2.01933, - 2.03058, - 2.00946, - 2.00662, - 1.98321, - 1.96428, - 2.06089, - 2.02815, - 1.97661, - 1.95311, - 1.99788, - 1.98392, - 2.023, - 1.9883, - 2.0231, - 2.01242, - 1.96769, - 2.03766, - 1.98989, - 1.95733, - 2.06986, - 2.02944, - 1.88962, - 1.98596, - 1.96756, - 2.07344, - 1.99616, - 2.07636, - 1.96153, - 2.01993, - 2.006, - 1.98924, - 1.98594, - 2.08265, - 1.99294, - 2.00128, - 2.01888, - 2.00446, - 2.04186, - 2.03706, - 1.98871, - 2.0367, - 1.98992, - 2.00194, - 1.98956, - 2.01477, - 2.07673, - 1.99776, - 2.00791, - 2.00243, - 2.05245, - 2.00527, - 1.89964, - 2.0233, - 2.02567, - 2.0068, - 1.92181, - 1.97317, - 1.95074, - 2.06205, - 1.96365, - 1.99552, - 2.03024, - 2.08255, - 2.00579, - 1.96697, - 1.95575, - 2.05837, - 2.01277, - 2.00968, - 1.95842, - 2.01428, - 1.98785, - 1.92533, - 2.01882, - 2.06527, - 1.96613, - 2.01629, - 2.0061, - 2.01929, - 2.00902, - 1.97217, - 1.97057, - 2.02872, - 1.9562, - 1.93554, - 2.10084, - 1.99287, - 1.99207, - 2.02983, - 2.00123, - 2.03857, - 2.03137, - 1.98541, - 1.95956, - 2.02009, - 1.93708, - 2.02226, - 2.04299, - 1.95262, - 2.03477, - 1.96713, - 2.04649, - 1.96283, - 2.05235, - 1.95168, - 1.99563, - 1.98333, - 1.9804, - 1.96479, - 2.01103, - 1.95921, - 2.02415, - 2.01369, - 1.99571, - 2.01753, - 2.06413, - 2.01131, - 2.01281, - 1.98365, - 2.04805, - 1.98333, - 2.00521, - 2.03218, - 2.00052, - 2.03325, - 2.03395, - 2.01898, - 2.05167, - 2.01596, - 2.02609, - 1.9922, - 2.03392, - 2.01698, - 1.97777, - 2.00345, - 2.02413, - 1.97269, - 2.01582, - 2.03331, - 1.99219, - 2.00692, - 1.99662, - 1.98049, - 2.00729, - 1.98974, - 2.00085, - 2.02075, - 1.90049, - 2.03939, - 1.9401, - 2.04572, - 1.98253, - 1.95721, - 1.99365, - 2.04621, - 1.9598, - 2.06474, - 1.9597, - 1.99697, - 2.00205, - 2.02449, - 1.9592, - 2.07183, - 2.04893, - 2.00964, - 1.99749, - 1.9637, - 2.02774, - 1.96726, - 1.98985, - 2.02242, - 1.97285, - 2.03987, - 2.00749, - 1.91543, - 2.04369, - 1.94382, - 1.95827, - 1.96691, - 2.00206, - 2.07647, - 2.02042, - 1.98448, - 2.01804, - 1.96448, - 2.03352, - 2.02048, - 1.95061, - 2.03489, - 2.01484, - 2.02283, - 1.95214, - 2.03393, - 2.01868, - 2.03471, - 1.98764, - 2.01705, - 1.95488, - 1.98411, - 2.01061, - 1.97284, - 1.98691, - 2.05997, - 2.00921, - 2.04649, - 1.96603, - 1.98895, - 1.98335, - 2.01348, - 1.95849, - 2.04201, - 2.04699, - 1.98494, - 1.99152, - 2.01163, - 2.03349, - 1.97441, - 1.95745, - 1.94131, - 2.02055, - 2.06058, - 2.03908, - 2.02442, - 2.03803, - 2.00502, - 2.01744, - 2.04546, - 2.07086, - 1.95477, - 2.05745, - 1.97998, - 2.05611, - 1.99976, - 2.04745, - 1.98438, - 2.02153, - 2.01266, - 2.02685, - 1.99237, - 1.95874, - 2.01595, - 2.01275, - 1.99528, - 1.93453, - 2.03881, - 2.042, - 2.0232, - 2.0455, - 1.99861, - 1.99264, - 2.05347, - 1.96142, - 1.97577, - 1.94603, - 2.01496, - 1.93602, - 2.03565, - 1.96889, - 2.01638, - 1.97009, - 1.98204, - 2.00127, - 2.05713, - 2.00223, - 1.97572, - 1.95095, - 1.94675, - 2.03205, - 1.97211, - 1.97383, - 2.02932, - 1.99864, - 1.98542, - 1.93838, - 1.98474, - 2.00468, - 1.90209, - 2.01508, - 2.00664, - 1.9883, - 1.95055, - 2.01114, - 2.06622, - 1.91469, - 2.0693, - 1.99328, - 2.00079, - 1.98355, - 1.9891, - 1.98803, - 1.99355, - 1.97788, - 1.98502, - 1.98553, - 1.94578, - 2.04847, - 1.99754, - 1.99669, - 2.02536, - 1.96085, - 1.9855, - 2.01302, - 2.05116, - 1.99158, - 1.93569, - 1.96444, - 1.98112, - 1.97228, - 2.00323, - 1.97894, - 1.91352, - 2.00361, - 2.04402, - 2.0064, - 2.02979, - 1.98477, - 1.99644, - 2.00115, - 1.95118, - 1.95617, - 1.96624, - 2.05518, - 1.89362, - 2.01568, - 1.9944, - 2.02599, - 2.06907, - 1.93003, - 1.97998, - 1.96448, - 2.02148, - 2.00263, - 1.9826, - 2.00307, - 1.97674, - 2.04795, - 2.01112, - 2.06018, - 1.9703, - 1.97933, - 2.0022, - 1.99355, - 1.98898, - 1.97372, - 2.04092, - 2.01353, - 2.02296, - 1.9766, - 1.9998, - 1.93045, - 2.05486, - 2.03206, - 1.89151, - 1.96828, - 2.03969, - 1.99979, - 2.0169, - 1.97263, - 2.01506, - 1.98855, - 1.97664, - 2.06285, - 1.97189, - 2.02166, - 1.96846, - 1.99084, - 2.01495, - 1.99737, - 1.98845, - 2.04, - 1.89863, - 2.00204, - 2.04437, - 1.9923, - 1.98981, - 1.97009, - 1.9507, - 1.96559, - 1.9867, - 2.05348, - 1.98062, - 2.00027, - 1.95882, - 2.00115, - 1.9907, - 2.00334, - 1.97457, - 2.0031, - 2.00836, - 1.9097, - 1.9315, - 2.00495, - 1.95076, - 1.99167, - 2.02935, - 2.02231, - 1.99844, - 2.06407, - 1.98244, - 1.93732, - 1.94948, - 2.0558, - 2.04316, - 1.99596, - 1.97589, - 1.97237, - 1.99428, - 1.97414, - 2.02602, - 2.01618, - 1.99366, - 1.98207, - 1.98739, - 1.89958, - 1.98187, - 1.98361, - 2.00059, - 2.01874, - 1.96295, - 2.04907, - 2.03307, - 2.03817, - 2.00627, - 1.97757, - 1.99663, - 1.98184, - 1.99729, - 2.00995, - 1.88819, - 1.97794, - 2.00415, - 1.99307, - 2.00314, - 2.02864, - 2.02904, - 1.97873, - 1.97951, - 1.9679, - 1.9739, - 2.02483, - 1.94875, - 1.97001, - 2.02303, - 1.97568, - 2.03039, - 1.972, - 1.96526, - 1.95852, - 1.99328, - 1.96262, - 2.01939, - 2.00978, - 2.03351, - 2.04386, - 2.01462, - 1.98075, - 1.91643, - 1.9798, - 2.00099, - 2.01135, - 2.01561, - 2.00976, - 1.96302, - 1.96523, - 2.03429, - 2.03473, - 1.92108, - 2.03141, - 2.09516, - 2.00677, - 2.03369, - 1.99738, - 1.98227, - 1.9916, - 2.02027, - 2.04128, - 2.05798, - 2.0523, - 1.97825, - 2.07077, - 1.95376, - 2.02397, - 1.98578, - 1.99831, - 1.94968, - 2.01742, - 2.0109, - 1.96485, - 1.95675, - 1.98677, - 2.04235, - 2.04987, - 1.94219, - 2.05676, - 2.02581, - 2.03068, - 1.99321, - 2.01793, - 1.90772, - 2.05076, - 2.04089, - 1.98871, - 1.92802, - 1.97656, - 2.02284, - 1.96275, - 2.05975, - 1.99876, - 2.07755, - 1.93556, - 1.94664, - 2.00254, - 2.03218, - 1.96148, - 1.94981, - 1.95951, - 2.08401, - 2.03398, - 1.98407, - 1.98549, - 1.96512, - 1.98633, - 2.03149, - 2.00493, - 1.98666, - 2.02876, - 2.00091, - 2.0426, - 1.95763, - 1.91548, - 1.91078, - 1.97378, - 2.00277, - 2.02352, - 2.08331, - 2.01085, - 1.95839, - 1.97665, - 2.03236, - 1.99652, - 1.99873, - 2.02419, - 1.96455, - 1.90486, - 2.01951, - 1.99785, - 2.03716, - 1.9734, - 2.04055, - 1.97903, - 1.9381, - 1.97781, - 2.03637, - 1.98255, - 1.98489, - 2.04846, - 1.95674, - 1.95809, - 1.98031, - 1.95848, - 2.01704, - 1.97616, - 1.94339, - 2.04096, - 2.05934, - 1.99289, - 2.0376, - 1.97598, - 2.00435, - 1.96602, - 2.01242, - 1.98324, - 1.97226, - 1.98835, - 1.92274, - 2.01217, - 1.98835, - 2.02167, - 1.98622, - 2.04031, - 2.02588, - 1.98607, - 2.03358, - 2.00742, - 1.94243, - 1.97613, - 1.96072, - 1.99119, - 1.99252, - 2.04808, - 1.98132, - 1.90744, - 1.9521, - 1.98523, - 1.97674, - 1.96921, - 2.0059, - 2.02196, - 2.09653, - 2.02984, - 2.03233, - 2.01399, - 1.97902, - 1.92289, - 2.02088, - 1.98795, - 1.97243, - 2.00055, - 1.99687, - 1.99595, - 1.96015, - 1.93251, - 1.99104, - 1.95964, - 1.98884, - 1.98333, - 2.03268, - 1.91441, - 2.06152, - 1.93455, - 1.96024, - 2.02305, - 2.02251, - 1.97979, - 1.93099, - 2.02761, - 1.93714, - 1.97679, - 2.01065, - 2.09354, - 1.95595, - 1.96252, - 2.04783, - 1.96374, - 1.9913, - 1.98251, - 2.01662, - 1.96123, - 2.02611, - 1.97044, - 2.00854, - 2.0152, - 1.98203, - 2.01076, - 1.99256, - 1.958, - 2.00109, - 2.0034, - 2.02911, - 1.96206, - 1.99128, - 2.01339, - 2.00852, - 2.04354, - 1.93514, - 2.01169, - 2.01617, - 1.89919, - 1.95354, - 1.95736, - 2.02089, - 2.00792, - 2.00597, - 2.0159, - 2.00293, - 1.9962, - 2.0171, - 1.98384, - 1.91738, - 1.98072, - 1.99734, - 2.0799, - 1.94829, - 1.89855, - 2.0291, - 2.01176, - 2.05298, - 2.02792, - 2.05886, - 1.99928, - 2.02507, - 2.05813, - 2.02668, - 1.95257, - 1.95227, - 1.968, - 1.96955, - 1.97169, - 1.94825, - 1.97716, - 1.98542, - 2.00687, - 1.98687, - 2.00347, - 2.03969, - 1.98224, - 1.935, - 1.9709, - 2.0671, - 1.99546, - 2.00251, - 2.01341, - 1.86798, - 1.97899, - 1.9975, - 2.03694, - 1.98567, - 2.00011, - 2.04276, - 1.98067, - 2.02486, - 2.00715, - 2.03001, - 2.00473, - 2.04593, - 2.02199, - 2.00787, - 1.98125, - 2.0041, - 1.96644, - 1.98402, - 2.04687, - 1.98445, - 1.96908, - 1.98546, - 2.05776, - 2.04457, - 1.98404, - 1.98669, - 1.93033, - 1.9852, - 1.94804, - 1.95895, - 1.96825, - 1.98975, - 2.02821, - 2.06057, - 1.99018, - 1.92653, - 2.00515, - 1.99945, - 1.97966, - 1.96691, - 2.00663, - 1.98157, - 2.03215, - 1.96618, - 2.05549, - 1.9983, - 1.97929, - 2.03801, - 1.94459, - 1.92648, - 2.0353, - 1.94629, - 2.02508, - 2.03577, - 1.9909, - 1.99029, - 1.9972, - 2.01723, - 1.98741, - 1.97019, - 2.0116, - 1.97402, - 2.00446, - 1.95901, - 1.94283, - 1.9989, - 2.01434, - 1.95845, - 2.00733, - 1.97276, - 1.97346, - 2.02668, - 2.01142, - 2.00703, - 2.0151, - 1.95583, - 1.94438, - 2.01065, - 1.93958, - 1.94426, - 1.99917, - 2.0056, - 2.03731, - 1.99175, - 2.00864, - 2.04502, - 1.96004, - 1.92537, - 1.9456, - 1.97112, - 1.96476, - 1.98412, - 2.01266, - 1.97465, - 2.03248, - 2.01574, - 1.93379, - 1.96352, - 2.07466, - 1.94021, - 1.92511, - 1.97332, - 2.00491, - 1.94898, - 1.98354, - 1.93344, - 2.0303, - 2.04397, - 2.03331, - 2.02834, - 2.03329, - 2.04104, - 2.02153, - 2.00073, - 1.99066, - 2.01512, - 2.0153, - 1.9408, - 1.98334, - 2.03944, - 2.02187, - 2.0345, - 1.94131, - 2.00797, - 1.98111, - 1.99203, - 2.03004, - 2.03545, - 2.02201, - 2.03476, - 1.97641, - 2.01004, - 1.99534, - 2.02757, - 2.027, - 1.94261, - 2.05076, - 1.92188, - 1.9429, - 2.09663, - 1.90244, - 1.97694, - 1.98409, - 1.95274, - 1.97645, - 1.98941, - 1.95427, - 1.96345, - 1.9693, - 1.99523, - 1.96543, - 2.05512, - 1.97311, - 1.97184, - 2.02727, - 1.96254, - 1.96313, - 1.98338, - 1.96345, - 2.00016, - 1.95226, - 1.96962, - 1.96841, - 2.01774, - 2.01013, - 1.9609, - 1.90046, - 1.9943, - 2.01479, - 1.96584, - 1.94991, - 1.98248, - 1.94358, - 2.02598, - 1.98599, - 1.9788, - 1.964, - 2.00263, - 2.01156, - 1.94345, - 1.93722, - 1.98747, - 2.01206, - 1.99596, - 2.03204, - 1.92939, - 1.97974, - 1.97004, - 2.00422, - 2.00573, - 2.02825, - 2.06348, - 1.9778, - 1.97892, - 1.92993, - 2.00311, - 1.99318, - 2.00283, - 1.89879, - 1.95669, - 2.04127, - 1.99294, - 2.00856, - 1.97424, - 2.05307, - 1.95007, - 1.99605, - 1.97253, - 2.03717, - 2.00418, - 1.99459, - 1.98566, - 1.99275, - 1.98428, - 2.01674, - 2.0169, - 1.99546, - 1.96682, - 1.99448, - 2.01996, - 2.07104, - 2.00004, - 1.92634, - 2.03429, - 2.04954, - 1.97503, - 2.0191, - 1.94803, - 1.9294, - 2.01009, - 1.98563, - 1.97411, - 2.01039, - 1.97171, - 2.01617, - 1.9745, - 1.9717, - 2.0179, - 2.02169, - 1.96091, - 1.93472, - 1.93124, - 2.03503, - 2.00312, - 1.94756, - 1.97263, - 2.0053, - 2.01181, - 1.93185, - 1.99288, - 1.9604, - 2.03188, - 1.98252, - 1.94941, - 1.98199, - 1.98967, - 2.00364, - 2.00329, - 2.03105, - 2.02863, - 2.03405, - 1.95088, - 1.98236, - 2.00378, - 1.97968, - 1.96715, - 2.05643, - 1.99113, - 1.95354, - 2.02381, - 1.98066, - 1.95233, - 1.99064, - 1.99499, - 1.99963, - 1.98265, - 2.03129, - 2.05113, - 1.93927, - 1.94626, - 1.95358, - 2.0079, - 1.98633, - 1.927, - 1.91407, - 2.01291, - 1.9977, - 1.94055, - 1.92996, - 2.05607, - 1.98319, - 1.93848, - 1.97485, - 1.96573, - 1.98183, - 1.98029, - 1.9763, - 1.97673, - 1.95977, - 2.02845, - 2.04553, - 1.93552, - 1.95932, - 1.919, - 2.03002, - 2.03049, - 1.99282, - 2.01993, - 1.98707, - 2.00712, - 1.96717, - 1.96314, - 2.01438, - 2.0253, - 1.97594, - 1.98823, - 1.96277, - 1.96884, - 1.96481, - 2.01356, - 1.90224, - 1.97409, - 1.92016, - 1.99256, - 1.9705, - 2.04418, - 1.94863, - 1.99169, - 1.88822, - 1.98237, - 2.03701, - 2.00487, - 1.97934, - 1.97313, - 1.95245, - 1.94582, - 1.99571, - 1.98369, - 1.99128, - 1.97404, - 1.96798, - 2.03327, - 1.99452, - 1.9317, - 1.97406, - 1.98336, - 2.04028, - 2.04071, - 2.03543, - 1.96285, - 2.03403, - 1.96632, - 1.99084, - 1.97986, - 1.96514, - 1.9726, - 1.94514, - 1.99318, - 1.99782, - 1.99016, - 1.98098, - 2.04205, - 1.97103, - 2.02323, - 1.94867, - 1.99526, - 2.0218, - 1.98826, - 2.01249, - 2.00605, - 1.9782, - 1.92196, - 2.03419, - 1.95081, - 1.92547, - 1.97216, - 1.98277, - 2.04983, - 1.95157, - 1.99612, - 1.94277, - 1.91894, - 1.98716, - 1.96341, - 1.9547, - 1.93626, - 1.95351, - 1.96746, - 2.00362, - 1.96986, - 2.00854, - 2.03535, - 1.98909, - 2.0071, - 1.98053, - 1.89974, - 1.88706, - 1.99948, - 1.9944, - 2.06122, - 2.03833, - 2.00912, - 1.95391, - 1.96251, - 2.02318, - 1.99228, - 1.98454, - 1.96682, - 1.9963, - 1.93436, - 1.94906, - 2.02444, - 2.04053, - 1.98776, - 1.99624, - 1.96611, - 1.96937, - 1.95541, - 1.99131, - 1.93865, - 2.07497, - 2.03941, - 2.05973, - 1.96334, - 1.97828, - 2.00941, - 2.0231, - 1.96689, - 2.03658, - 1.95218, - 2.03254, - 2.05962, - 1.99608, - 1.90958, - 2.06436, - 2.00983, - 1.97181, - 1.96836, - 1.99543, - 2.02426, - 1.96266, - 1.96595, - 1.96847, - 2.03084, - 1.94589, - 2.00036, - 1.9347, - 1.96128, - 1.98817, - 1.99094, - 2.00073, - 1.96516, - 2.00657, - 2.03516, - 1.9641, - 2.01086, - 2.0202, - 1.97758, - 1.96737, - 1.96066, - 1.99637, - 1.99239, - 1.95635, - 1.93077, - 1.98171, - 1.99667, - 1.93671, - 2.00278, - 2.02386, - 1.97179, - 2.00508, - 1.9927, - 1.94199, - 1.97418, - 1.97833, - 1.98674, - 1.98324, - 1.99701, - 1.97478, - 1.96459, - 1.96923, - 2.01838, - 2.00544, - 1.92812, - 1.93194, - 1.95946, - 1.93229, - 1.98554, - 1.94472, - 1.96006, - 2.06347, - 2.03454, - 2.02813, - 1.99065, - 1.88492, - 1.9695, - 2.02826, - 2.03011, - 1.99475, - 2.02767, - 2.09269, - 1.92003, - 1.93642, - 1.97548, - 1.91734, - 1.98807, - 1.94399, - 1.9875, - 2.03989, - 1.9735, - 2.01372, - 1.98959, - 1.9726, - 1.9682, - 2.00462, - 1.964, - 1.9971, - 2.00619, - 1.94498, - 2.01274, - 2.08062, - 2.01585, - 1.99568, - 2.06212, - 1.97864, - 2.02482, - 2.00044, - 1.93452, - 2.01283, - 1.98868, - 2.00252, - 1.94436, - 1.95456, - 1.98729, - 1.93025, - 2.01188, - 1.95522, - 2.00946, - 1.92741, - 2.0293, - 2.01412, - 1.96944, - 1.85562, - 2.03398, - 1.99448, - 1.98626, - 2.01263, - 2.03701, - 2.02779, - 1.9861, - 1.93431, - 2.05202, - 1.91912, - 1.96914, - 1.96211, - 1.9215, - 2.02252, - 1.9535, - 1.98695, - 1.9481, - 1.9923, - 1.98367, - 1.92088, - 2.02521, - 1.99033, - 1.98421, - 1.97445, - 2.03386, - 2.02991, - 2.03236, - 1.97375, - 1.98152, - 1.94662, - 2.00794, - 1.99559, - 1.99689, - 1.98376, - 1.96719, - 1.93885, - 1.93029, - 1.99269, - 1.97823, - 1.97119, - 2.00468, - 2.02014, - 1.96549, - 1.98446, - 1.99627, - 2.0587, - 1.98754, - 1.95387, - 2.00008, - 1.96028, - 1.97904, - 1.91734, - 1.99355, - 1.9515, - 2.00868, - 1.93325, - 1.97367, - 1.9764, - 1.93601, - 1.95077, - 1.99771, - 1.99598, - 1.93073, - 1.95586, - 1.95627, - 2.00006, - 1.98971, - 1.96715, - 2.02188, - 1.97787, - 1.96229, - 1.9209, - 1.94712, - 1.94313, - 1.9795, - 1.95527, - 1.92708, - 1.91806, - 2.0466, - 2.00079, - 2.00519, - 1.966, - 2.03785, - 1.94921, - 1.97676, - 1.9662, - 2.03085, - 1.93562, - 1.9313, - 2.01941, - 2.02013, - 1.93643, - 1.95894, - 1.95778, - 1.94561, - 1.95845, - 2.0194, - 1.94204, - 1.9897, - 1.97353, - 1.9965, - 1.93067, - 1.97084, - 2.00349, - 1.97769, - 1.96569, - 1.91816, - 1.95467, - 1.92357, - 1.95407, - 1.98378, - 2.00928, - 2.02088, - 1.96533, - 1.98272, - 1.96449, - 1.9888, - 1.9876, - 1.89257, - 1.98443, - 1.93691, - 1.98647, - 1.98377, - 1.96244, - 1.91485, - 2.02801, - 1.99371, - 1.98383, - 1.93932, - 2.03993, - 1.95617, - 1.90354, - 1.94911, - 1.98231, - 1.95849, - 2.01279, - 1.98692, - 1.97703, - 2.03021, - 1.97021, - 1.96368, - 2.0056, - 1.96479, - 2.00998, - 2.03106, - 1.93726, - 2.01484, - 1.95845, - 2.03382, - 1.97781, - 1.96391, - 1.91376, - 2.00831, - 2.05082, - 1.93713, - 1.96367, - 1.95695, - 1.94157, - 1.9053, - 1.98043, - 1.96037, - 2.04364, - 1.98088, - 1.93161, - 2.01679, - 1.96765, - 1.91298, - 1.96849, - 2.03841, - 1.95388, - 1.98285, - 1.99397, - 1.94903, - 1.98552, - 2.01108, - 1.90294, - 1.94041, - 2.02583, - 2.03383, - 2.07532, - 1.96256, - 1.95447, - 1.96777, - 1.95356, - 1.95474, - 1.92051, - 1.97469, - 1.99365, - 1.93624, - 1.92425, - 2.00907, - 2.02582, - 1.9966, - 1.95483, - 1.91602, - 2.01729, - 1.94688, - 1.9511, - 1.99284, - 1.97352, - 1.95443, - 1.96131, - 2.01319, - 1.9911, - 1.99706, - 1.96574, - 1.94709, - 1.97128, - 2.01347, - 2.00459, - 2.05158, - 2.00237, - 2.00458, - 1.98558, - 2.00432, - 2.01505, - 1.95335, - 2.0139, - 1.98579, - 1.94451, - 2.01946, - 1.96131, - 1.98425, - 1.96505, - 1.87638, - 2.02833, - 1.98527, - 1.93589, - 1.98291, - 2.00207, - 2.00821, - 1.93842, - 2.01899, - 1.96355, - 1.94923, - 1.97149, - 2.01003, - 2.021, - 1.90265, - 1.94123, - 1.99005, - 1.9667, - 1.98316, - 1.99619, - 1.94322, - 1.98903, - 2.02459, - 2.01778, - 1.93959, - 1.9572, - 2.01687, - 2.03342, - 1.98714, - 1.90974, - 1.96413, - 1.93967, - 2.00428, - 1.99324, - 1.93698, - 2.02305, - 2.01771, - 1.99757, - 1.95202, - 1.93205, - 1.95497, - 1.97572, - 1.94547, - 1.94131, - 1.87771, - 2.05968, - 1.92594, - 1.99585, - 1.97679, - 1.96619, - 1.97151, - 1.93183, - 2.02339, - 1.96641, - 1.95669, - 1.95238, - 1.92394, - 2.01263, - 1.98686, - 1.99557, - 1.95669, - 1.97434, - 1.94185, - 2.00366, - 1.96482, - 2.00482, - 1.97337, - 1.93184, - 1.98171, - 2.00013, - 2.00078, - 1.9926, - 2.01497, - 1.91734, - 2.0471, - 1.99045, - 1.97346, - 2.0546, - 1.95712, - 1.91867, - 1.96107, - 1.96687, - 1.98602, - 2.01906, - 1.9422, - 1.92829, - 1.99356, - 2.00052, - 1.92881, - 2.03842, - 1.97915, - 2.00085, - 1.97143, - 1.96326, - 1.93283, - 1.96998, - 1.97348, - 1.91339, - 2.01583, - 1.97175, - 2.05243, - 2.05453, - 1.99339, - 1.98419, - 2.01361, - 1.93532, - 1.96542, - 1.9782, - 1.96069, - 1.98955, - 1.99741, - 1.99438, - 2.00907, - 1.94164, - 1.91727, - 1.97279, - 2.01746, - 1.99268, - 1.94287, - 2.02791, - 1.92978, - 1.9047, - 1.90564, - 1.99784, - 1.99989, - 2.06317, - 1.98358, - 1.9155, - 1.92227, - 2.00725, - 1.95086, - 1.99643, - 1.98353, - 2.02813, - 1.99828, - 2.07523, - 1.9931, - 1.98494, - 1.96496, - 2.02275, - 2.00813, - 1.92473, - 2.00383, - 1.96417, - 2.01452, - 1.99262, - 1.88807, - 1.90506, - 1.93445, - 1.96481, - 2.03627, - 1.94696, - 1.95402, - 1.9825, - 1.97432, - 1.9798, - 1.93927, - 1.98013, - 1.95889, - 1.95168, - 1.98974, - 1.93711, - 1.98389, - 2.00521, - 2.04882, - 1.96911, - 1.94369, - 2.10105, - 1.97562, - 2.01181, - 2.01213, - 2.02869, - 2.00185, - 1.91835, - 2.00355, - 1.96372, - 1.97117, - 1.98286, - 2.03665, - 1.95927, - 1.9663, - 2.00408, - 2.04361, - 1.9962, - 1.94799, - 1.95962, - 1.94746, - 1.97048, - 1.99226, - 2.01224, - 1.93817, - 1.94561, - 1.99782, - 1.94198, - 1.98114, - 1.93666, - 1.9584, - 1.97029, - 1.96347, - 1.96103, - 2.02238, - 1.98185, - 1.97127, - 2.01246, - 2.00018, - 2.00953, - 2.02532, - 2.03519, - 1.97326, - 1.95495, - 1.98598, - 1.96043, - 2.01431, - 2.00126, - 1.96306, - 1.92119, - 1.98395, - 1.91376, - 1.95375, - 1.92882, - 2.01989, - 2.00988, - 2.00782, - 1.98083, - 1.94331, - 1.95664, - 1.9685, - 1.93775, - 1.97353, - 1.95202, - 1.94563, - 1.94753, - 1.9342, - 1.95383, - 2.00884, - 1.95045, - 2.00743, - 2.02391, - 1.99232, - 1.98303, - 2.01668, - 1.98341, - 2.12, - 1.97469, - 1.95465, - 1.95191, - 1.93757, - 1.93613, - 1.95431, - 1.92264, - 1.94794, - 1.99006, - 1.98009, - 2.04625, - 1.98275, - 1.9321, - 1.98278, - 1.96495, - 1.96174, - 2.01025, - 1.99745, - 1.95494, - 1.92365, - 2.00088, - 1.95428, - 2.0119, - 2.03279, - 1.98256, - 1.98426, - 2.00448, - 1.9587, - 1.94967, - 1.98558, - 1.97571, - 2.0167, - 1.97, - 1.99878, - 1.99161, - 1.97537, - 2.00101, - 1.9866, - 1.94771, - 1.92996, - 1.94673, - 2.00313, - 1.97442, - 1.97999, - 1.96232, - 1.95125, - 1.93083, - 1.9764, - 2.0037, - 1.93986, - 1.95912, - 1.99717, - 1.94977, - 1.97692, - 2.00599, - 1.92449, - 2.01315, - 1.93977, - 1.96668, - 1.96718, - 1.99215, - 1.92846, - 1.9536, - 1.97173, - 1.97247, - 1.9761, - 1.93479, - 1.99013, - 2.02282, - 1.94592, - 2.00971, - 1.9754, - 2.0106, - 2.00716, - 2.02199, - 1.90274, - 1.9667, - 1.96439, - 1.9563, - 2.00954, - 2.01943, - 1.95102, - 2.01505, - 1.97, - 1.9571, - 2.02098, - 1.98598, - 1.93574, - 1.95752, - 1.96123, - 1.97996, - 1.88537, - 1.91621, - 2.00375, - 1.97274, - 1.97126, - 1.9414, - 1.96476, - 1.92179, - 1.99697, - 1.96214, - 2.04319, - 1.92058, - 1.99669, - 1.95231, - 1.99893, - 1.96724, - 2.00434, - 1.96359, - 2.02052, - 1.98201, - 1.98097, - 2.0416, - 1.93833, - 1.94685, - 1.8908, - 1.96725, - 2.00229, - 1.98477, - 1.95004, - 1.97548, - 1.94814, - 1.93435, - 1.98676, - 2.03156, - 1.94819, - 2.03513, - 2.06098, - 1.96503, - 1.94686, - 1.9525, - 1.9792, - 2.0509, - 1.96295, - 1.9403, - 1.94524, - 1.94178, - 1.97712, - 1.88336, - 1.96105, - 1.99633, - 1.98437, - 1.99804, - 1.93821, - 1.99166, - 1.96774, - 1.89773, - 1.92836, - 1.88551, - 1.93865, - 1.93004, - 1.94561, - 1.96234, - 1.95982, - 1.97006, - 2.04929, - 1.98355, - 1.95069, - 1.96282, - 2.02303, - 1.89441, - 1.94946, - 1.96196, - 1.96048, - 1.94227, - 1.9771, - 1.95643, - 1.95222, - 1.96817, - 1.91682, - 1.93093, - 2.00938, - 1.95287, - 1.95115, - 1.99607, - 1.98889, - 2.04047, - 1.9963, - 1.92561, - 1.95427, - 2.00296, - 1.93019, - 1.98702, - 1.97153, - 1.94843, - 2.00609, - 2.00275, - 1.95366, - 1.99981, - 2.0396, - 1.98452, - 1.93443, - 1.93329, - 2.00219, - 1.99894, - 1.97154, - 1.97404, - 1.9506, - 2.03493, - 1.94391, - 1.94493, - 1.9338, - 1.99544, - 2.01323, - 1.90762, - 1.96144, - 2.00523, - 2.02091, - 2.06628, - 1.96535, - 1.94685, - 1.97524, - 1.95928, - 1.95921, - 1.99955, - 1.93487, - 2.02453, - 1.91431, - 2.00856, - 1.94713, - 2.01627, - 2.03416, - 1.94354, - 1.9831, - 1.98563, - 2.01353, - 1.96529, - 1.99574, - 1.94429, - 1.95839, - 1.96998, - 1.9868, - 2.00454, - 1.94127, - 1.95508, - 1.94047, - 1.97924, - 1.98295, - 1.99062, - 1.92712, - 1.93389, - 1.95819, - 1.94414, - 1.8819, - 1.95202, - 1.98718, - 1.99937, - 1.93831, - 1.9618, - 1.92638, - 1.96301, - 1.95276, - 1.94873, - 2.02361, - 1.97588, - 2.01239, - 1.98399, - 2.01884, - 1.96307, - 1.93774, - 1.93475, - 2.0152, - 1.94811, - 1.98276, - 1.98838, - 1.97724, - 1.90091, - 1.87406, - 1.97194, - 1.97741, - 1.95337, - 1.99019, - 1.94909, - 1.92047, - 1.99518, - 1.94543, - 1.97223, - 1.99569, - 1.9499, - 2.02308, - 1.97286, - 1.95651, - 2.0017, - 1.98428, - 1.95679, - 1.98119, - 1.96725, - 2.0006, - 1.96624, - 2.00056, - 1.94665, - 1.97609, - 2.00981, - 1.98482, - 1.90937, - 1.86038, - 1.95381, - 1.97141, - 1.9418, - 1.93867, - 1.96167, - 1.9798, - 1.9777, - 1.94992, - 1.96763, - 1.96742, - 1.97224, - 1.89956, - 1.99476, - 1.91959, - 1.96674, - 2.01863, - 1.95378, - 1.96567, - 1.91762, - 1.97196, - 1.99614, - 1.9843, - 1.93138, - 1.96464, - 1.99066, - 1.99496, - 1.94187, - 2.04153, - 2.00983, - 2.01253, - 1.98862, - 1.98532, - 1.93247, - 1.98124, - 1.98496, - 1.91601, - 2.00015, - 1.95752, - 1.85977, - 1.97536, - 1.91797, - 1.99533, - 1.98154, - 1.99169, - 1.98718, - 1.95177, - 2.00054, - 1.99086, - 1.98527, - 1.98955, - 1.98121, - 1.91877, - 2.03102, - 1.94662, - 1.96952, - 1.97537, - 1.93707, - 1.97287, - 1.98319, - 1.98094, - 1.98584, - 1.94898, - 2.03493, - 1.98483, - 1.95736, - 2.005, - 1.97067, - 1.92753, - 2.0404, - 2.01794, - 1.99445, - 1.96374, - 1.96249, - 1.96126, - 2.01567, - 1.97186, - 1.99377, - 1.96385, - 1.95966, - 1.91722, - 1.94026, - 2.04341, - 1.97561, - 2.03429, - 1.94834, - 1.95979, - 1.96698, - 1.99466, - 2.032, - 1.98647, - 1.97339, - 1.98541, - 1.99343, - 1.9975, - 2.00459, - 1.92977, - 1.94035, - 1.96027, - 1.96117, - 2.02045, - 1.95554, - 2.00729, - 1.97553, - 1.96472, - 1.90474, - 1.96908, - 1.9176, - 1.93222, - 1.97489, - 2.02916, - 1.95856, - 1.96698, - 1.982, - 1.98051, - 1.97411, - 1.94515, - 1.96233, - 1.96947, - 1.95161, - 1.98839, - 1.95187, - 1.95991, - 1.96441, - 2.02842, - 1.97327, - 1.92108, - 1.99463, - 1.97719, - 1.98958, - 2.00001, - 1.95279, - 1.90101, - 2.01805, - 2.01558, - 1.98936, - 1.99803, - 1.9932, - 1.95486, - 1.9493, - 1.93138, - 1.96692, - 1.964, - 1.99579, - 1.92504, - 2.0367, - 1.96875, - 1.9875, - 1.86965, - 1.93676, - 1.95676, - 1.98201, - 1.98704, - 1.90864, - 1.97297, - 1.95319, - 1.9565, - 1.96676, - 2.00463, - 1.88853, - 1.97872, - 1.95847, - 2.03037, - 1.99604, - 1.94762, - 2.01836, - 1.95253, - 1.98769, - 1.93894, - 1.91301, - 2.024, - 1.97574, - 1.98434, - 1.9472, - 1.95914, - 1.94324, - 1.99734, - 1.94083, - 2.02947, - 2.00302, - 1.97415, - 1.91728, - 2.00511, - 1.93039, - 1.94029, - 1.96278, - 2.03847, - 1.99537, - 1.98783, - 1.98972, - 1.99169, - 2.04112, - 1.94444, - 1.92006, - 2.0123, - 1.96727, - 1.92559, - 1.99542, - 1.97775, - 1.99654, - 1.97345, - 1.97704, - 1.96876, - 1.9428, - 1.92134, - 1.97265, - 1.91729, - 1.9865, - 1.99779, - 1.95909, - 1.97465, - 1.98477, - 1.87031, - 1.92061, - 1.98045, - 1.99703, - 1.96988, - 2.00502, - 1.97002, - 2.01651, - 1.94624, - 1.90909, - 1.96184, - 2.03578, - 1.93211, - 2.00002, - 1.93402, - 1.98671, - 2.003, - 1.99881, - 1.93612, - 1.99127, - 1.89462, - 1.97984, - 1.98552, - 1.95373, - 1.9681, - 1.99415, - 2.03394, - 1.94494, - 1.96831, - 1.92203, - 2.05426, - 1.91021, - 1.91504, - 1.95663, - 1.98115, - 1.96429, - 1.95331, - 2.02275, - 1.94924, - 1.95192, - 1.98223, - 2.00738, - 2.01188, - 1.97933, - 2.0228, - 1.93587, - 1.99367, - 1.92953, - 1.92319, - 1.94797, - 1.96581, - 2.02049, - 1.92735, - 1.94909, - 1.94261, - 1.94637, - 1.93461, - 1.92548, - 1.96693, - 1.93239, - 1.93908, - 1.98171, - 1.93323, - 1.92038, - 1.90329, - 1.95412, - 1.96008, - 2.01787, - 1.91014, - 2.00295, - 1.94809, - 1.95648, - 1.916, - 1.94391, - 2.02286, - 1.92035, - 1.96339, - 1.98396, - 2.02977, - 1.94066, - 1.96189, - 1.96589, - 2.04575, - 1.9781, - 1.96108, - 2.01827, - 1.99769, - 1.93543, - 1.92655, - 1.98173, - 1.97946, - 1.98773, - 1.97598, - 1.96225, - 1.98576, - 1.97442, - 2.01132, - 2.00138, - 1.92463, - 1.94441, - 1.95364, - 1.94326, - 1.96604, - 1.91178, - 1.9505, - 1.97324, - 1.96651, - 1.91171, - 1.93661, - 2.05011, - 1.99516, - 1.93651, - 2.01667, - 2.04204, - 1.96781, - 1.9876, - 1.97798, - 1.99398, - 1.99633, - 1.9366, - 1.9785, - 1.97861, - 1.92202, - 1.99333, - 1.95395, - 1.95112, - 1.97162, - 1.96958, - 2.00216, - 1.9494, - 1.99109, - 2.01035, - 1.9599, - 1.9183, - 2.02702, - 1.94259, - 1.98105, - 1.99736, - 1.89613, - 1.99487, - 1.95124, - 2.00971, - 1.90702, - 1.95452, - 1.95907, - 1.96423, - 1.9766, - 1.99772, - 1.91466, - 1.98375, - 1.93421, - 1.92774, - 1.89509, - 1.95344, - 1.91103, - 2.00796, - 1.94012, - 2.0087, - 1.97784, - 1.8906, - 1.98044, - 1.95602, - 1.94264, - 1.95789, - 1.9387, - 1.96224, - 1.91959, - 1.93368, - 1.94242, - 2.02529, - 1.91847, - 1.96567, - 1.97997, - 1.98145, - 2.02076, - 1.94209, - 1.95255, - 2.04639, - 1.93688, - 2.00651, - 2.04311, - 1.8814, - 1.91513, - 1.95666, - 2.01217, - 1.96515, - 1.95301, - 1.96678, - 1.94906, - 1.95899, - 1.94074, - 2.0126, - 1.90498, - 1.9697, - 1.90526, - 1.96683, - 1.86889, - 1.96433, - 1.94823, - 1.93327, - 1.98054, - 1.95148, - 1.96087, - 1.95912, - 1.98236, - 1.98821, - 1.9516, - 1.95619, - 2.02611, - 1.98394, - 1.9687, - 1.9193, - 1.90065, - 1.97227, - 1.91581, - 1.93159, - 1.88678, - 1.96777, - 1.90822, - 2.00605, - 1.93586, - 1.98872, - 1.91784, - 1.87839, - 1.93603, - 1.90498, - 1.97621, - 1.97116, - 2.01805, - 1.88633, - 1.97953, - 1.9475, - 2.00233, - 1.96353, - 1.92185, - 1.92314, - 1.97937, - 1.99847, - 1.92785, - 2.00258, - 1.96824, - 2.00776, - 2.01612, - 2.01992, - 1.95369, - 1.93914, - 1.99563, - 1.94701, - 1.94031, - 1.94528, - 1.96042, - 1.87634, - 1.97201, - 2.00407, - 1.96966, - 1.91841, - 1.93842, - 1.98374, - 1.91854, - 2.01102, - 1.95802, - 1.93791, - 1.97447, - 1.99389, - 1.90215, - 1.97638, - 2.02795, - 1.96526, - 1.95481, - 2.00662, - 1.98545, - 1.98168, - 1.96571, - 1.9191, - 1.90479, - 1.95063, - 1.92533, - 1.98968, - 1.99873, - 1.9886, - 2.01919, - 1.97103, - 1.93394, - 1.93393, - 1.99938, - 1.96804, - 1.94282, - 1.92131, - 1.95508, - 1.99982, - 1.94905, - 1.94513, - 2.00505, - 1.9914, - 1.99667, - 2.00357, - 1.94806, - 1.98821, - 1.91391, - 1.93545, - 1.90382, - 1.91899, - 1.90691, - 2.01546, - 1.92868, - 1.93954, - 1.95306, - 2.01139, - 1.93674, - 1.95268, - 1.91445, - 1.93099, - 1.96695, - 1.90718, - 1.96559, - 1.97965, - 1.99131, - 1.95215, - 1.98165, - 2.02754, - 1.98242, - 1.92454, - 1.90726, - 1.94256, - 1.98416, - 1.94241, - 1.95835, - 1.87194, - 1.915, - 1.94581, - 1.99088, - 1.95054, - 1.91561, - 1.96686, - 1.95393, - 1.8958, - 1.95457, - 1.97515, - 1.98473, - 1.98008, - 1.93856, - 1.95622, - 1.98293, - 1.90832, - 1.98032, - 1.98412, - 1.98345, - 2.00628, - 1.89234, - 1.93124, - 1.9189, - 1.96897, - 1.94453, - 1.97169, - 1.95243, - 1.98738, - 2.00436, - 1.96597, - 1.93939, - 2.0087, - 1.97986, - 1.93111, - 1.9553, - 1.9246, - 1.9193, - 1.96772, - 2.01156, - 1.96661, - 1.94821, - 1.85657, - 1.96243, - 1.94744, - 1.95039, - 2.00261, - 1.95025, - 1.93616, - 1.95649, - 2.01825, - 1.97371, - 1.91711, - 1.99027, - 1.93702, - 1.96006, - 1.92997, - 1.90419, - 1.97515, - 1.96562, - 1.91522, - 1.97064, - 1.94258, - 1.88581, - 1.95952, - 1.91051, - 1.98515, - 1.95377, - 1.98391, - 1.88486, - 1.98573, - 1.97312, - 2.01208, - 1.88471, - 1.96404, - 1.9231, - 1.92921, - 1.96775, - 1.91707, - 1.96622, - 1.98026, - 2.03567, - 2.02726, - 2.00526, - 1.96308, - 2.02671, - 1.92991, - 1.91613, - 1.9628, - 1.91566, - 1.93534, - 1.9043, - 1.93649, - 1.94982, - 1.90693, - 1.98251, - 1.99359, - 1.9303, - 2.00752, - 1.92463, - 1.94404, - 1.98053, - 1.90621, - 1.94625, - 1.96926, - 2.02117, - 1.95299, - 1.91649, - 1.98401, - 1.99524, - 1.9932, - 1.9009, - 1.96296, - 1.9222, - 1.92972, - 1.9293, - 1.97229, - 1.91057, - 1.98626, - 1.92968, - 1.98331, - 1.95597, - 1.93686, - 1.94116, - 2.00345, - 1.92524, - 2.01039, - 1.91759, - 1.93482, - 1.94821, - 1.95177, - 1.95889, - 1.86935, - 1.99405, - 1.87767, - 1.93979, - 1.96832, - 1.9717, - 1.87379, - 1.91173, - 1.97723, - 2.01459, - 1.91751, - 1.96033, - 1.95646, - 1.91157, - 1.90925, - 1.97586, - 1.94403, - 1.92181, - 1.95549, - 1.89846, - 1.99541, - 1.98837, - 1.92926, - 1.94585, - 2.00821, - 1.94127, - 1.96055, - 1.96686, - 1.9688, - 2.00608, - 2.03618, - 1.93263, - 1.93273, - 1.99351, - 1.97609, - 2.00285, - 1.95328, - 1.96078, - 1.96906, - 1.95953, - 1.93688, - 1.8941, - 1.9357, - 2.00772, - 2.0243, - 1.9744, - 1.99251, - 1.99392, - 1.94725, - 1.98753, - 1.87983, - 1.95964, - 1.97048, - 1.96031, - 2.01829, - 1.90627, - 1.94428, - 1.96609, - 1.97196, - 1.96765, - 1.95375, - 1.9182, - 2.01935, - 1.9988, - 1.98149, - 1.98468, - 1.96982, - 1.94275, - 1.96768, - 1.99241, - 1.91496, - 1.92985, - 1.9192, - 1.93568, - 1.86913, - 1.97695, - 1.90388, - 1.973, - 2.00545, - 1.99202, - 1.93116, - 1.91259, - 1.88296, - 1.94968, - 2.02245, - 1.99053, - 1.94634, - 1.92335, - 1.94601, - 1.91957, - 1.96721, - 1.96155, - 1.95578, - 1.99804, - 1.97308, - 1.97192, - 1.93278, - 1.99586, - 1.98785, - 2.00151, - 1.98252, - 1.9526, - 1.96387, - 1.95307, - 1.97407, - 2.00137, - 1.99633, - 1.90089, - 1.93632, - 1.91766, - 1.93775, - 1.99138, - 1.95878, - 1.93611, - 1.9049, - 2.02674, - 1.99672, - 1.99696, - 1.99015, - 1.94259, - 1.97976, - 1.95753, - 1.96631, - 1.93229, - 1.94634, - 1.93236, - 1.94069, - 1.95688, - 1.92525, - 1.95004, - 1.96046, - 1.95285, - 1.94777, - 1.90407, - 1.9985, - 1.95356, - 1.91561, - 1.93103, - 1.95786, - 1.92762, - 1.96006, - 1.99027, - 1.9632, - 1.90566, - 1.98402, - 1.9625, - 1.91858, - 1.99667, - 2.00571, - 1.93598, - 1.94064, - 1.94169, - 1.9421, - 1.99361, - 1.98744, - 1.90862, - 1.94516, - 1.94857, - 1.98219, - 2.0496, - 2.01876, - 1.91018, - 1.96115, - 1.96214, - 1.94622, - 1.97607, - 1.89081, - 1.87321, - 1.98222, - 1.91435, - 1.95511, - 1.92419, - 1.91298, - 1.92271, - 1.88206, - 1.89561, - 1.9085, - 1.89732, - 1.99886, - 1.97409, - 1.9998, - 1.97167, - 1.97365, - 1.96472, - 2.0676, - 1.93329, - 1.91406, - 1.9499, - 1.94553, - 1.95389, - 1.90821, - 1.93315, - 1.98229, - 1.95678, - 1.96025, - 1.96028, - 1.9595, - 1.90981, - 1.89862, - 1.93178, - 1.95338, - 1.95793, - 1.92827, - 1.90126, - 1.98016, - 1.9693, - 1.97726, - 1.98079, - 1.93067, - 1.98612, - 2.02269, - 1.90535, - 1.90302, - 1.92914, - 1.87339, - 1.87628, - 1.97088, - 1.94866, - 1.9588, - 1.95355, - 1.95014, - 1.94164, - 1.9532, - 2.01957, - 1.92538, - 1.92938, - 1.98502, - 1.93127, - 1.96259, - 1.99424, - 1.98457, - 2.03483, - 1.95072, - 1.98271, - 2.01228, - 1.95502, - 2.02969, - 1.91887, - 2.00915, - 1.94795, - 1.98147, - 1.95175, - 1.8734, - 1.97696, - 1.99315, - 1.97147, - 1.95296, - 1.99764, - 1.93381, - 1.98352, - 1.96392, - 1.90621, - 1.97947, - 1.93631, - 1.97624, - 1.90753, - 1.96359, - 1.94559, - 1.91472, - 1.94847, - 1.97066, - 1.90796, - 1.90755, - 1.93825, - 1.97343, - 1.96213, - 1.93989, - 1.93812, - 2.00195, - 1.93497, - 1.94057, - 1.96496, - 1.94509, - 1.89868, - 1.96128, - 1.98457, - 1.95766, - 1.949, - 2.04589, - 1.96209, - 2.01578, - 1.97483, - 1.9516, - 1.95659, - 1.89522, - 1.91391, - 1.90362, - 1.95917, - 1.98161, - 1.953, - 1.94872, - 1.95364, - 1.92907, - 2.01951, - 1.87976, - 1.97935, - 1.9651, - 1.96125, - 1.98016, - 1.95402, - 1.89667, - 1.98883, - 1.92775, - 1.95007, - 2.01185, - 1.98455, - 1.97737, - 1.97814, - 1.94288, - 2.00561, - 1.932, - 1.97354, - 1.93004, - 1.96157, - 1.95592, - 1.96859, - 1.93378, - 1.92694, - 1.93169, - 1.89272, - 1.97236, - 1.98064, - 1.9593, - 1.96467, - 1.96668, - 1.95205, - 1.93102, - 1.90394, - 1.94362, - 1.93583, - 1.9786, - 2.01416, - 1.98787, - 1.99599, - 2.02246, - 1.98891, - 1.94502, - 1.92891, - 1.92293, - 1.98825, - 1.95673, - 1.92819, - 1.99713, - 1.88248, - 1.95218, - 1.88483, - 1.94384, - 1.95257, - 1.8953, - 1.95737, - 1.95864, - 1.94424, - 2.02371, - 1.95469, - 1.98219, - 1.95691, - 1.94304, - 1.90884, - 1.9809, - 1.96286, - 1.91628, - 1.92269, - 1.8572, - 1.92198, - 1.93977, - 1.97591, - 1.94359, - 1.87961, - 1.95293, - 1.94019, - 1.97773, - 1.96765, - 1.88061, - 1.90556, - 1.9363, - 2.00088, - 1.92137, - 1.90157, - 1.97114, - 1.93604, - 1.94127, - 1.92278, - 1.9119, - 1.95194, - 1.95393, - 1.95208, - 1.93649, - 1.90274, - 1.93547, - 1.96397, - 1.94352, - 1.96077, - 1.94851, - 1.914, - 1.90888, - 2.01122, - 1.95399, - 1.99894, - 1.92558, - 1.90957, - 1.95812, - 1.92526, - 1.92883, - 1.88316, - 1.92514, - 2.0001, - 1.927, - 1.98376, - 1.94136, - 1.95811, - 1.97758, - 1.9398, - 1.90329, - 1.92893, - 1.92894, - 1.96436, - 1.95364, - 1.88869, - 1.93606, - 2.03627, - 1.89387, - 1.94449, - 1.95805, - 1.9099, - 1.93298, - 1.94024, - 1.97732, - 1.9576, - 1.92632, - 1.88371, - 1.89318, - 1.89805, - 1.98557, - 1.9073, - 1.96748, - 1.98032, - 1.98804, - 1.96027, - 1.97784, - 1.97296, - 1.9718, - 1.90683, - 1.98335, - 1.90942, - 1.89952, - 1.93024, - 1.91363, - 1.95551, - 1.94315, - 1.95338, - 1.95067, - 1.94898, - 1.89859, - 1.89276, - 2.00752, - 1.93466, - 1.98859, - 1.97517, - 1.95262, - 1.89435, - 1.97489, - 1.94462, - 1.9635, - 1.893, - 1.9907, - 1.94562, - 1.9537, - 1.92536, - 1.96477, - 1.94561, - 1.92761, - 1.9499, - 1.88887, - 1.91358, - 1.97172, - 1.94112, - 1.95163, - 1.87646, - 1.98045, - 1.93228, - 2.01146, - 1.95794, - 1.96645, - 1.93619, - 1.98297, - 1.95949, - 1.93283, - 1.95082, - 1.93744, - 1.98659, - 1.95623, - 1.93405, - 1.88713, - 1.98433, - 1.98834, - 1.90188, - 1.97475, - 1.95593, - 2.0059, - 1.89579, - 1.93779, - 1.94937, - 1.95644, - 2.02585, - 1.92467, - 1.93105, - 1.99799, - 1.91276, - 1.9133, - 2.01103, - 1.88012, - 1.92384, - 1.93269, - 1.93081, - 1.99811, - 1.90881, - 2.02541, - 1.94068, - 1.94711, - 1.93834, - 2.01625, - 1.96654, - 1.93828, - 1.96385, - 1.87368, - 1.98738, - 1.93886, - 1.97097, - 1.9817, - 1.93343, - 1.96904, - 1.93027, - 1.95161, - 1.91139, - 1.97701, - 1.96157, - 1.86792, - 1.94032, - 2.00755, - 2.05782, - 1.94078, - 1.99467, - 1.85038, - 1.98023, - 1.9853, - 2.02216, - 1.94999, - 1.99573, - 1.85987, - 1.99583, - 1.94462, - 1.87309, - 1.92445, - 1.91205, - 1.96243, - 1.9411, - 1.89975, - 1.92444, - 1.88337, - 1.97536, - 1.95531, - 1.9076, - 1.91831, - 1.91788, - 1.93464, - 1.93644, - 1.94484, - 1.94335, - 1.94236, - 1.91167, - 1.93304, - 1.89702, - 1.94596, - 1.95084, - 1.95733, - 1.9049, - 1.97366, - 1.93233, - 1.91747, - 1.88526, - 1.89923, - 1.91342, - 1.96428, - 1.89431, - 1.94503, - 1.95557, - 1.97605, - 1.95739, - 1.96395, - 2.01445, - 1.90651, - 1.99186, - 1.95402, - 1.88206, - 1.96211, - 2.01762, - 1.94751, - 1.92439, - 1.96786, - 2.04932, - 1.93576, - 1.95099, - 1.9637, - 1.93624, - 1.97356, - 1.93049, - 1.95252, - 1.93429, - 2.00149, - 1.92206, - 1.86609, - 1.96464, - 1.94563, - 1.97578, - 1.92335, - 1.91393, - 1.87523, - 2.00937, - 2.02892, - 1.92765, - 1.96052, - 1.93188, - 1.94804, - 1.94131, - 1.98614, - 1.94013, - 1.9377, - 1.93531, - 1.92446, - 1.99008, - 1.99141, - 1.93366, - 1.86488, - 1.90012, - 1.92046, - 1.97078, - 1.97527, - 1.95425, - 1.98595, - 1.9951, - 1.95776, - 2.00521, - 1.88496, - 1.94229, - 1.9364, - 1.92311, - 1.92501, - 1.99301, - 1.97788, - 1.97931, - 1.9526, - 1.90609, - 1.94685, - 1.93193, - 1.96921, - 1.9593, - 1.90525, - 1.97211, - 1.93076, - 1.91661, - 1.97243, - 1.86858, - 1.98929, - 1.96717, - 1.89837, - 1.91703, - 1.92658, - 1.91, - 1.94644, - 1.89451, - 1.95362, - 1.99832, - 1.93987, - 1.95487, - 1.9469, - 1.89179, - 1.9629, - 1.99844, - 1.98007, - 2.00662, - 1.93604, - 1.91614, - 1.97981, - 2.0045, - 1.92924, - 1.91744, - 1.95176, - 1.94886, - 1.95319, - 1.99059, - 1.90717, - 1.94924, - 1.92271, - 1.92331, - 2.01754, - 1.90505, - 1.90854, - 1.96666, - 1.93369, - 1.92738, - 1.92062, - 1.96493, - 1.97554, - 1.90828, - 1.92792, - 1.93648, - 1.88707, - 1.92537, - 1.92721, - 1.91238, - 2.01376, - 1.91439, - 1.96637, - 1.92889, - 1.92195, - 1.91907, - 2.01593, - 1.93592, - 1.94905, - 1.99003, - 1.96197, - 1.96021, - 1.9702, - 1.99491, - 1.92021, - 1.93772, - 1.96716, - 1.9352, - 1.91998, - 1.88934, - 1.92512, - 1.99338, - 1.93728, - 1.949, - 1.9283, - 1.91463, - 1.9475, - 1.97568, - 1.96547, - 1.93983, - 1.93649, - 1.9873, - 1.88795, - 1.93334, - 1.94293, - 2.00343, - 1.98894, - 1.91957, - 1.88014, - 1.97678, - 1.90162, - 1.93596, - 1.99617, - 1.99014, - 1.93497, - 1.96344, - 1.91777, - 1.96309, - 1.92363, - 1.90104, - 1.92677, - 1.9997, - 1.94654, - 1.92444, - 2.01253, - 1.96311, - 1.95971, - 1.94277, - 1.92776, - 1.87647, - 1.92249, - 1.96548, - 1.92133, - 1.93535, - 1.94584, - 1.93531, - 1.91324, - 1.9366, - 1.88221, - 1.88483, - 1.93071, - 2.00023, - 1.94088, - 1.97838, - 1.98492, - 1.93968, - 1.91214, - 1.89872, - 1.96912, - 1.85213, - 1.9297, - 1.93558, - 1.97611, - 1.96551, - 1.90474, - 1.91503, - 1.95007, - 1.96837, - 1.94975, - 1.87677, - 1.9885, - 1.93097, - 1.92723, - 1.97983, - 1.95212, - 1.91381, - 1.98592, - 1.93663, - 1.98856, - 1.95174, - 2.01299, - 1.94571, - 1.94727, - 1.96419, - 1.9201, - 1.93321, - 1.91477, - 1.95637, - 2.02377, - 1.95927, - 1.8771, - 1.87183, - 1.90944, - 1.93754, - 1.98075, - 1.93995, - 1.87665, - 1.93753, - 1.88068, - 1.96816, - 1.9136, - 1.90933, - 2.01274, - 1.88794, - 1.91101, - 1.96665, - 1.93926, - 1.89332, - 1.94242, - 1.96961, - 1.98258, - 1.96354, - 1.92748, - 1.86343, - 1.93653, - 1.87586, - 2.03019, - 1.98314, - 1.9515, - 1.95462, - 2.00723, - 1.92209, - 1.93391, - 1.98734, - 1.9333, - 2.0202, - 1.90935, - 1.95647, - 1.92223, - 1.91674, - 1.93162, - 1.97011, - 1.9947, - 1.90525, - 1.93498, - 1.91135, - 1.94386, - 1.93963, - 1.96744, - 1.93245, - 1.84187, - 1.94812, - 1.92852, - 2.03207, - 1.9635, - 1.89476, - 1.96573, - 1.903, - 1.91526, - 1.9765, - 1.95872, - 1.87991, - 1.90886, - 1.97805, - 1.89535, - 1.95224, - 2.0195, - 1.95127, - 2.00518, - 1.98062, - 1.91637, - 2.02097, - 1.99848, - 1.91051, - 2.02326, - 1.97526, - 1.94271, - 1.94622, - 1.91267, - 1.90826, - 1.93462, - 1.89029, - 1.91615, - 2.01299, - 1.97227, - 1.94929, - 1.98089, - 1.99435, - 1.92795, - 1.9736, - 1.97466, - 1.97275, - 1.91535, - 1.99577, - 1.91189, - 1.95657, - 1.93913, - 1.91695, - 1.99986, - 2.01655, - 1.94452, - 1.88216, - 1.97962, - 1.95274, - 1.91392, - 1.87165, - 1.90779, - 1.94764, - 2.01028, - 1.93804, - 1.96113, - 1.97934, - 1.99488, - 1.90531, - 1.98148, - 1.88815, - 1.94505, - 1.91355, - 1.91978, - 1.90947, - 1.95753, - 1.89437, - 1.93898, - 1.93748, - 1.97043, - 1.9361, - 1.95503, - 1.88965, - 1.97041, - 1.92433, - 1.95668, - 1.90366, - 1.93463, - 1.89196, - 1.96508, - 1.93753, - 1.93789, - 1.93092, - 2.0146, - 1.96468, - 1.96714, - 2.00045, - 1.9461, - 1.96375, - 1.90741, - 1.9439, - 1.89652, - 1.92833, - 1.90919, - 1.94386, - 1.99179, - 1.94412, - 1.914, - 1.95382, - 1.98721, - 1.92139, - 1.97717, - 1.94134, - 1.91244, - 1.974, - 1.88372, - 1.90006, - 1.95555, - 1.92947, - 1.87255, - 1.90677, - 1.97652, - 1.87355, - 1.89553, - 1.94453, - 1.8659, - 1.9831, - 1.96646, - 1.88421, - 1.94225, - 1.92048, - 1.908, - 1.93687, - 1.92356, - 1.99273, - 1.94377, - 1.9456, - 1.96818, - 1.94391, - 1.99896, - 1.91805, - 1.95657, - 1.93507, - 1.96283, - 1.96149, - 1.94757, - 1.93362, - 1.89808, - 1.9368, - 1.9565, - 1.90642, - 1.91944, - 1.98033, - 1.93402, - 1.95258, - 1.89539, - 1.99945, - 1.98927, - 1.91466, - 1.98027, - 1.88732, - 1.97984, - 1.96499, - 1.89582, - 1.95803, - 1.91477, - 1.96466, - 1.93703, - 1.94311, - 1.97689, - 2.01124, - 1.91667, - 1.94846, - 1.93329, - 1.97468, - 1.94056, - 1.90207, - 1.94662, - 1.9824, - 1.91634, - 1.93589, - 1.95682, - 1.9002, - 1.98457, - 1.96449, - 1.95437, - 1.90606, - 1.93912, - 1.9281, - 1.96403, - 1.92464, - 1.95756, - 1.97512, - 1.91297, - 1.95538, - 1.98789, - 1.95769, - 1.93455, - 1.96164, - 1.93992, - 1.94864, - 1.94232, - 1.94742, - 1.9185, - 1.89294, - 1.92365, - 1.92313, - 1.95503, - 1.9592, - 1.96855, - 1.93349, - 1.95687, - 1.90604, - 1.95352, - 1.98154, - 2.006, - 1.93091, - 1.90366, - 1.92345, - 1.94657, - 1.93484, - 1.94064, - 1.91682, - 1.97535, - 1.95001, - 1.92684, - 1.88777, - 1.92836, - 1.88914, - 1.90737, - 1.89046, - 1.94276, - 1.88489, - 1.95976, - 2.03497, - 1.95263, - 2.00356, - 1.87281, - 1.90231, - 1.92985, - 1.99002, - 1.96141, - 1.93041, - 1.94028, - 1.99391, - 1.94861, - 1.87762, - 1.94614, - 1.8911, - 1.9352, - 1.90566, - 1.95925, - 1.98351, - 1.91002, - 1.9134, - 1.9592, - 1.93115, - 1.92933, - 1.93691, - 1.92782, - 1.95569, - 1.94108, - 1.9698, - 1.98585, - 1.99849, - 1.96921, - 2.00012, - 1.95076, - 1.903, - 2.00482, - 1.93828, - 1.95012, - 1.93521, - 2.00781, - 1.93175, - 1.98927, - 1.92282, - 1.96321, - 1.95517, - 1.96789, - 1.90995, - 1.97649, - 1.93643, - 1.9482, - 1.92981, - 1.97309, - 1.96037, - 1.95105, - 1.875, - 1.95388, - 1.96275, - 1.96213, - 1.91965, - 1.95116, - 1.9491, - 1.91898, - 1.94353, - 1.91322, - 1.94672, - 1.93114, - 1.89621, - 1.89538, - 1.94372, - 1.97922, - 1.90549, - 1.93432, - 1.87826, - 1.93538, - 1.98038, - 1.89026, - 1.99009, - 1.96232, - 1.96852, - 1.97355, - 1.93561, - 1.87636, - 1.95926, - 1.93666, - 1.93869, - 1.96662, - 1.93526, - 1.86318, - 1.91281, - 1.8983, - 1.90035, - 1.90477, - 1.89812, - 1.91537, - 1.91641, - 1.88822, - 1.90328, - 1.90625, - 1.92143, - 1.91721, - 1.95535, - 1.94313, - 1.92128, - 1.97228, - 1.90396, - 2.00064, - 1.9666, - 1.89527, - 1.91201, - 1.98934, - 1.92286, - 1.89175, - 1.99004, - 1.95911, - 1.99489, - 1.92849, - 1.894, - 1.90351, - 1.93141, - 1.95655, - 1.93733, - 1.918, - 2.06592, - 1.89668, - 1.94321, - 1.95438, - 1.94602, - 1.8543, - 1.92957, - 1.98072, - 1.91772, - 1.99615, - 1.91156, - 1.93968, - 1.9189, - 1.92116, - 1.99652, - 2.01539, - 1.87257, - 1.91207, - 2.0026, - 1.92746, - 1.91068, - 1.94758, - 1.92309, - 1.89727, - 1.98905, - 1.92093, - 1.96566, - 1.94626, - 1.93312, - 1.84898, - 1.90351, - 1.91148, - 1.99148, - 2.02208, - 1.93461, - 1.96637, - 1.97948, - 1.89491, - 1.89591, - 2.01071, - 1.88199, - 1.97355, - 1.96392, - 1.94901, - 1.92355, - 1.89521, - 1.92308, - 1.9357, - 1.9034, - 1.95113, - 1.93566, - 1.88386, - 1.90119, - 1.97003, - 2.02876, - 1.96282, - 1.8879, - 1.92494, - 1.95831, - 1.93525, - 1.97474, - 1.96895, - 1.97316, - 1.96702, - 1.93252, - 1.96162, - 1.97605, - 1.91578, - 2.00732, - 1.9362, - 1.95494, - 2.01949, - 1.90673, - 1.91131, - 1.90915, - 1.94754, - 1.92437, - 1.98394, - 1.93066, - 1.89939, - 1.94373, - 1.93231, - 1.96178, - 1.99999, - 1.94704, - 1.89324, - 1.92364, - 1.90946, - 1.93757, - 1.97212, - 1.91481, - 1.96543, - 1.93616, - 1.90184, - 1.95422, - 1.98921, - 1.96063, - 1.9407, - 1.97704, - 1.94855, - 1.90648, - 1.97604, - 1.89047, - 1.90418, - 1.95983, - 1.90942, - 1.8923, - 1.94085, - 1.92592, - 1.9906, - 2.0043, - 1.98122, - 1.91388, - 1.94631, - 1.93839, - 1.92997, - 2.0134, - 1.95169, - 1.86152, - 1.88413, - 1.90576, - 1.97617, - 1.8754, - 1.93057, - 1.97556, - 1.99244, - 1.99539, - 1.8998, - 1.97838, - 1.95793, - 1.94167, - 1.92323, - 1.96734, - 1.91275, - 1.9688, - 1.95592, - 1.96255, - 1.99572, - 1.9273, - 1.95406, - 1.95181, - 1.96869, - 1.91512, - 1.97945, - 1.94075, - 1.9357, - 1.97978, - 1.975, - 1.95323, - 1.90534, - 1.96648, - 1.9596, - 1.89919, - 1.90911, - 1.96491, - 1.93626, - 1.99923, - 1.92231, - 1.86787, - 1.91517, - 1.91178, - 1.95093, - 2.01344, - 1.91336, - 1.89831, - 1.94353, - 1.90163, - 1.99674, - 1.9911, - 1.9633, - 1.88333, - 1.9181, - 1.94942, - 1.90974, - 1.91119, - 1.91887, - 1.95308, - 1.95797, - 2.05375, - 1.95602, - 1.95142, - 1.95603, - 1.94501, - 1.92126, - 1.93308, - 1.96531, - 1.96945, - 1.93295, - 1.87308, - 1.93856, - 1.97541, - 1.91394, - 1.97091, - 1.99224, - 1.89254, - 1.93019, - 1.92248, - 1.92214, - 1.96309, - 1.90371, - 1.88871, - 1.98354, - 1.94417, - 1.92577, - 1.92228, - 1.88461, - 1.95145, - 1.91099, - 1.92067, - 1.92681, - 1.87553, - 1.8937, - 1.90617, - 1.96364, - 1.97131, - 1.96759, - 1.89627, - 1.96717, - 1.92025, - 1.90727, - 1.93488, - 1.94802, - 1.92526, - 1.96558, - 1.8977, - 1.95853, - 1.93084, - 1.96424, - 1.92764, - 1.88569, - 1.93369, - 1.95445, - 1.94756, - 1.96442, - 1.90859, - 1.92706, - 1.89127, - 1.94097, - 1.93615, - 1.95091, - 1.85966, - 1.94662, - 1.90816, - 1.94305, - 1.94922, - 1.84486, - 1.92356, - 1.93053, - 1.9244, - 1.99663, - 1.97552, - 1.87689, - 1.98795, - 1.87203, - 1.98532, - 1.90226, - 1.97809, - 1.96325, - 1.86965, - 1.94078, - 1.88585, - 1.98079, - 1.89603, - 1.94079, - 1.92063, - 1.96473, - 1.90133, - 1.95843, - 1.84688, - 1.91185, - 1.92476, - 1.88449, - 1.9335, - 1.96336, - 1.85507, - 1.94197, - 1.97346, - 1.9303, - 1.97317, - 2.01781, - 1.97283, - 1.91372, - 1.98612, - 1.90053, - 1.94736, - 1.90981, - 1.96763, - 1.92138, - 1.97403, - 1.9228, - 1.99265, - 1.97898, - 1.82964, - 1.91524, - 1.8658, - 1.93141, - 1.99034, - 1.9504, - 1.95404, - 1.8932, - 2.00271, - 1.91233, - 1.9073, - 1.98407, - 1.9334, - 1.91375, - 1.9574, - 1.95489, - 1.83593, - 1.91688, - 1.9323, - 1.88206, - 1.99888, - 1.97283, - 1.98046, - 1.90552, - 1.95073, - 1.93053, - 1.95528, - 1.90145, - 1.98146, - 1.95205, - 1.91032, - 1.92978, - 1.94742, - 1.95511, - 2.00529, - 2.0051, - 1.94546, - 1.96988, - 1.88514, - 1.92366, - 1.97013, - 1.91784, - 1.95106, - 1.92766, - 1.85697, - 1.96149, - 1.98434, - 1.93621, - 1.9797, - 1.92138, - 1.99607, - 1.96114, - 1.91071, - 1.88029, - 1.94787, - 1.96312, - 1.8933, - 1.93141, - 1.8684, - 1.95842, - 1.89094, - 1.94317, - 1.99095, - 1.95654, - 1.91818, - 1.9345, - 1.99936, - 1.93212, - 1.93381, - 1.93389, - 1.92694, - 1.8728, - 1.88146, - 1.91489, - 1.92196, - 2.0176, - 1.9651, - 1.99691, - 1.89961, - 1.90708, - 2.01109, - 1.93873, - 1.89756, - 1.98576, - 1.85228, - 1.98173, - 1.87245, - 1.91109, - 1.85639, - 1.87661, - 1.95947, - 1.90492, - 1.94597, - 1.95236, - 1.95739, - 1.95027, - 1.94813, - 2.01647, - 1.91149, - 1.91519, - 1.99035, - 1.91517, - 1.93913, - 1.8745, - 1.99158, - 1.95916, - 1.89326, - 1.91891, - 1.85962, - 1.91381, - 1.94621, - 1.91113, - 1.91608, - 1.96515, - 1.92494, - 1.89849, - 2.00669, - 1.9265, - 1.88348, - 1.9634, - 1.97313, - 1.92317, - 1.91308, - 1.9305, - 1.97287, - 1.92902, - 1.90105, - 1.88669, - 1.90178, - 1.97685, - 1.92986, - 1.93228, - 1.91391, - 1.93709, - 1.92177, - 2.02657, - 1.90782, - 1.95636, - 1.90856, - 1.96929, - 1.91203, - 1.89572, - 1.89256, - 1.98135, - 1.894, - 1.9742, - 1.97269, - 1.98494, - 1.93019, - 1.99579, - 1.9121, - 1.85378, - 1.93302, - 1.91763, - 1.95084, - 1.96371, - 1.85813, - 1.92462, - 1.94547, - 1.89458, - 1.94993, - 1.9351, - 1.97645, - 1.91391, - 1.95188, - 1.94693, - 1.89944, - 1.86975, - 1.89799, - 1.97224, - 1.90237, - 1.88304, - 1.94193, - 1.88748, - 1.89714, - 1.93253, - 1.93449, - 1.94736, - 1.92341, - 1.93072, - 1.96139, - 1.90908, - 1.98775, - 1.91061, - 1.87959, - 1.94657, - 1.9198, - 1.95079, - 1.95697, - 1.92562, - 1.8758, - 1.85324, - 1.95047, - 1.94453, - 1.96974, - 1.93145, - 1.94151, - 1.93702, - 1.92659, - 2.0076, - 1.96606, - 1.92364, - 1.97808, - 1.90009, - 1.98887, - 1.91816, - 1.97041, - 1.90765, - 1.91508, - 1.94429, - 1.96974, - 1.94512, - 1.91053, - 1.91712, - 1.90694, - 1.94986, - 1.95189, - 1.97155, - 1.97552, - 1.97235, - 1.88492, - 1.90277, - 1.93998, - 1.92123, - 1.9002, - 1.89712, - 1.88712, - 1.91605, - 1.98995, - 1.95071, - 1.8788, - 1.9465, - 1.95157, - 1.90013, - 1.94089, - 1.99479, - 1.88615, - 1.90067, - 1.90335, - 1.9231, - 1.91675, - 2.00293, - 1.90564, - 1.95141, - 1.95477, - 1.9472, - 1.92578, - 1.93688, - 1.92193, - 1.93941, - 1.95141, - 1.87374, - 1.95621, - 1.92474, - 2.01996, - 1.99032, - 1.93441, - 1.87026, - 1.90181, - 1.95079, - 1.99378, - 1.91364, - 1.94357, - 1.93555, - 1.87093, - 1.91576, - 1.96486, - 1.9203, - 1.91243, - 1.89862, - 1.9381, - 1.92578, - 1.95138, - 1.91525, - 1.91543, - 1.94057, - 1.93247, - 1.90494, - 1.90845, - 1.92802, - 1.91202, - 1.97704, - 2.00656, - 1.89936, - 1.93632, - 1.96991, - 1.93717, - 1.92877, - 1.928, - 1.90681, - 1.93182, - 1.93997, - 1.96944, - 1.92458, - 1.92341, - 1.9171, - 1.91209, - 1.93336, - 1.96265, - 1.93291, - 1.9396, - 1.89681, - 1.93092, - 1.95367, - 1.93605, - 1.89851, - 1.92295, - 1.91328, - 1.96616, - 1.97962, - 1.94314, - 1.91185, - 1.84906, - 1.97953, - 1.97281, - 1.94936, - 1.91396, - 1.96046, - 1.95028, - 1.90689, - 1.85132, - 1.891, - 1.89664, - 1.93376, - 1.89855, - 1.88083, - 1.92486, - 1.87875, - 1.98045, - 1.93819, - 1.88975, - 1.95794, - 1.88334, - 2.03729, - 1.9212, - 1.99457, - 1.92115, - 1.93022, - 1.94117, - 1.90339, - 1.9471, - 1.9164, - 1.87681, - 1.95712, - 1.93437, - 1.88979, - 2.00388, - 1.96095, - 1.94428, - 2.00144, - 1.88269, - 1.94257, - 1.96826, - 1.9547, - 1.93804, - 1.90893, - 1.91983, - 1.90715, - 1.88256, - 1.96337, - 1.9019, - 1.9183, - 1.92926, - 1.94839, - 1.89927, - 1.97932, - 1.94042, - 1.94826, - 1.95331, - 1.93501, - 1.91075, - 1.87079, - 1.89842, - 1.98023, - 1.95434, - 1.89101, - 1.94485, - 1.95729, - 1.94659, - 1.98922, - 1.89305, - 1.93768, - 2.03823, - 1.9002, - 1.90058, - 1.98997, - 1.95036, - 1.8939, - 1.88367, - 1.96966, - 1.92294, - 1.92133, - 1.957, - 1.91447, - 1.94721, - 1.94339, - 1.95887, - 1.97828, - 2.03433, - 1.99138, - 1.95766, - 1.92421, - 1.94308, - 1.90936, - 1.91372, - 1.94925, - 1.9278, - 1.94809, - 1.86981, - 1.92335, - 1.95342, - 1.99177, - 1.89166, - 1.93616, - 1.92392, - 1.88805, - 1.92043, - 1.98909, - 1.90649, - 1.93995, - 1.9326, - 1.93108, - 1.86819, - 1.89785, - 1.94857, - 1.88327, - 1.92083, - 1.89099, - 1.89509, - 1.93953, - 1.96214, - 1.95004, - 1.94404, - 1.9473, - 1.92725, - 1.97665, - 1.90874, - 1.92251, - 1.94479, - 1.9278, - 1.97109, - 2.0131, - 1.90357, - 1.93168, - 1.89182, - 1.94354, - 1.86664, - 1.92117, - 1.90175, - 1.90004, - 1.94033, - 1.98472, - 1.92857, - 1.93344, - 1.93294, - 1.9457, - 1.91618, - 1.92507, - 1.86762, - 1.85383, - 1.98204, - 1.96305, - 1.96269, - 1.95449, - 1.88368, - 1.94525, - 1.86543, - 1.84214, - 1.98001, - 1.93765, - 1.92506, - 1.93818, - 1.95248, - 1.93261, - 1.95372, - 1.94564, - 1.9586, - 1.89915, - 1.86833, - 1.95888, - 1.93043, - 1.97799, - 1.89341, - 1.96774, - 1.91207, - 1.89564, - 1.89088, - 2.00955, - 1.9295, - 1.88259, - 1.8801, - 1.93134, - 1.91732, - 1.93266, - 1.93361, - 1.96068, - 1.89466, - 1.89746, - 1.90371, - 1.87505, - 1.96021, - 1.9255, - 1.92749, - 1.95017, - 1.89188, - 1.95392, - 1.93579, - 1.93057, - 1.93619, - 1.90095, - 1.91312, - 1.88474, - 1.92934, - 1.94037, - 1.93436, - 1.96237, - 1.91746, - 1.92026, - 1.89822, - 1.91521, - 1.88677, - 1.8965, - 1.92748, - 1.89479, - 1.89301, - 1.91363, - 1.94357, - 1.99708, - 1.93147, - 2.01746, - 1.93409, - 1.97243, - 1.93466, - 1.88234, - 1.94529, - 1.92877, - 1.87116, - 1.90629, - 1.90843, - 1.86878, - 1.92002, - 1.94538, - 1.92179, - 1.93251, - 1.89491, - 1.94915, - 1.8983, - 1.92034, - 1.93567, - 1.91998, - 1.94853, - 1.90672, - 1.94697, - 1.9406, - 1.91341, - 1.96702, - 1.98351, - 2.01633, - 1.94063, - 1.89402, - 1.98813, - 2.00803, - 1.91278, - 1.97932, - 1.86827, - 1.87298, - 1.90921, - 1.94044, - 1.9663, - 1.98207, - 1.88709, - 1.89548, - 1.90925, - 1.92744, - 1.89719, - 1.90329, - 1.85791, - 1.91167, - 1.88561, - 1.90941, - 1.99058, - 1.94634, - 1.87024, - 1.91587, - 1.91515, - 1.9732, - 1.99627, - 1.89963, - 1.90712, - 1.93562, - 1.87924, - 1.95523, - 1.90203, - 1.93655, - 1.92854, - 1.92726, - 1.95616, - 1.89989, - 1.92624, - 1.92378, - 1.95413, - 1.90168, - 1.92917, - 1.89649, - 1.88507, - 1.9386, - 1.83354, - 1.91551, - 1.96603, - 1.87212, - 1.9828, - 1.841, - 1.94963, - 1.9909, - 1.83439, - 1.9418, - 1.9503, - 1.90072, - 1.96187, - 1.95112, - 1.9421, - 1.93126, - 1.82235, - 1.98274, - 1.96009, - 1.9205, - 1.9323, - 1.95942, - 1.9048, - 1.90134, - 1.8658, - 1.90087, - 1.94376, - 1.93135, - 1.95171, - 1.91493, - 1.90017, - 1.89356, - 1.95393, - 1.93403, - 1.95129, - 1.93375, - 1.93496, - 1.93606, - 1.93275, - 1.92236, - 1.91851, - 1.9482, - 1.901, - 1.9373, - 1.85615, - 1.89029, - 1.89467, - 1.9089, - 1.80752, - 1.88027, - 1.95811, - 1.88734, - 1.87741, - 1.91846, - 1.90337, - 1.95246, - 1.88781, - 1.90954, - 1.95024, - 1.97128, - 1.94518, - 1.91873, - 1.99291, - 1.96599, - 1.92888, - 1.92781, - 1.941, - 1.9037, - 1.96209, - 1.90777, - 1.88407, - 1.96551, - 1.94542, - 1.95148, - 1.92638, - 1.95206, - 1.94091, - 1.93494, - 1.95649, - 1.89838, - 1.9023, - 1.94065, - 1.90243, - 1.97203, - 1.90213, - 1.83122, - 1.93074, - 1.94478, - 1.97367, - 1.99763, - 1.94857, - 1.85538, - 1.95467, - 1.96614, - 1.92499, - 1.90551, - 1.8828, - 1.95785, - 1.88483, - 1.91047, - 1.89883, - 1.89651, - 1.9031, - 1.92835, - 1.90385, - 1.9669, - 1.94811, - 1.91052, - 1.88865, - 1.91011, - 1.94018, - 1.90242, - 1.95544, - 1.91599, - 1.90356, - 1.89646, - 1.92658, - 1.91497, - 1.92842, - 1.90354, - 1.88746, - 1.93965, - 1.89824, - 1.9514, - 1.8846, - 1.85878, - 1.88692, - 1.98268, - 1.88362, - 1.91181, - 1.92974, - 1.90405, - 1.91173, - 1.91951, - 1.87387, - 1.89523, - 1.93829, - 1.9334, - 1.88928, - 1.90371, - 1.928, - 1.95065, - 1.90311, - 1.93618, - 1.92009, - 1.95145, - 1.97647, - 1.93184, - 1.9533, - 1.92028, - 1.91895, - 1.91679, - 1.90866, - 1.82013, - 1.88896, - 1.87111, - 1.82042, - 1.94783, - 1.91639, - 1.94217, - 1.91184, - 1.91743, - 1.96614, - 1.98506, - 1.92023, - 1.99022, - 1.94412, - 1.86952, - 1.9391, - 1.96387, - 1.92632, - 1.90393, - 1.94497, - 1.93814, - 1.92468, - 1.94645, - 1.90292, - 1.96926, - 1.91462, - 1.95781, - 1.92797, - 1.86734, - 1.94308, - 1.90269, - 1.91714, - 1.98561, - 1.94516, - 1.93131, - 1.91614, - 1.93417, - 1.92749, - 1.92042, - 1.82974, - 1.90638, - 1.89558, - 1.99201, - 1.87831, - 1.90629, - 1.87786, - 1.88168, - 1.96509, - 1.83434, - 1.94533, - 1.97436, - 1.90878, - 1.92358, - 2.03989, - 1.92306, - 1.94574, - 1.89335, - 1.94099, - 1.92511, - 1.92386, - 1.88337, - 1.88767, - 1.89724, - 1.87642, - 1.94097, - 1.86382, - 1.94869, - 1.89886, - 1.96416, - 1.93165, - 1.92141, - 1.8695, - 1.91, - 1.94779, - 1.95512, - 1.89899, - 1.91408, - 1.89279, - 1.96907, - 1.96637, - 1.90919, - 1.93851, - 1.93995, - 1.85046, - 1.88659, - 1.95704, - 1.94303, - 1.92861, - 1.94433, - 1.87922, - 1.91254, - 1.91706, - 1.87679, - 1.86158, - 1.97964, - 1.90476, - 1.95219, - 1.99553, - 1.94777, - 1.9136, - 1.89675, - 2.02064, - 1.91305, - 1.80009, - 1.94087, - 1.90029, - 1.97344, - 1.90139, - 1.98023, - 1.95106, - 1.92306, - 2.00754, - 1.93753, - 1.98253, - 1.8953, - 1.92405, - 1.93237, - 1.94267, - 1.88574, - 1.91298, - 1.98481, - 1.91388, - 1.93915, - 1.93301, - 1.92767, - 1.89124, - 1.98884, - 1.98743, - 1.93264, - 1.95109, - 1.89008, - 1.93312, - 1.94136, - 1.93448, - 1.97003, - 1.96267, - 1.86429, - 1.86806, - 1.97285, - 1.93429, - 1.9503, - 1.93223, - 1.94269, - 1.90346, - 1.92027, - 1.98587, - 1.8905, - 1.91779, - 1.90321, - 1.94587, - 1.92735, - 1.90286, - 1.89654, - 1.90572, - 1.90434, - 1.92275, - 1.96465, - 1.89785, - 1.91235, - 1.9283, - 1.93107, - 1.96544, - 1.89627, - 1.97201, - 1.88465, - 1.85036, - 1.88088, - 1.94032, - 1.90919, - 1.92871, - 1.96534, - 1.87743, - 1.98491, - 1.86956, - 1.92453, - 1.88809, - 1.9006, - 1.94708, - 1.93059, - 1.96719, - 1.88414, - 1.91479, - 1.9072, - 1.91835, - 1.89228, - 1.87372, - 1.93908, - 1.92241, - 1.9382, - 1.99628, - 1.83721, - 1.89382, - 1.9229, - 1.90513, - 1.92572, - 1.94147, - 1.99897, - 1.95264, - 1.92509, - 1.92951, - 1.88776, - 1.97743, - 1.976, - 1.95043, - 1.88058, - 1.9175, - 1.88012, - 1.93412, - 1.93562, - 1.95345, - 1.96817, - 1.89767, - 1.95352, - 1.91565, - 1.94449, - 1.95429, - 1.91576, - 1.95433, - 1.93055, - 1.94794, - 1.89391, - 1.93615, - 1.93105, - 1.97406, - 1.9146, - 1.90364, - 1.9173, - 1.93608, - 1.93909, - 1.93227, - 1.97275, - 1.89151, - 1.955, - 1.88676, - 1.88398, - 1.90984, - 1.96293, - 1.89665, - 1.92023, - 1.90597, - 1.96421, - 1.83987, - 1.90699, - 1.89077, - 1.9066, - 1.93624, - 1.94365, - 1.85519, - 1.87682, - 1.87541, - 1.95949, - 1.94008, - 1.89712, - 1.87619, - 1.86937, - 1.95877, - 1.91471, - 1.93952, - 1.90927, - 1.9694, - 1.86038, - 1.97667, - 1.92677, - 1.91572, - 1.93326, - 1.93627, - 1.90675, - 1.94161, - 1.88927, - 1.9205, - 1.9266, - 1.95163, - 1.94173, - 1.95148, - 1.90677, - 1.90823, - 1.93295, - 1.88235, - 1.97318, - 1.92545, - 1.95889, - 2.02819, - 1.9968, - 1.91761, - 1.96572, - 1.93775, - 1.90934, - 1.93105, - 1.90129, - 1.90305, - 1.9445, - 1.95634, - 1.90573, - 1.89767, - 1.90335, - 1.94311, - 1.93132, - 1.92399, - 1.89202, - 1.97969, - 1.90993, - 1.82068, - 1.98303, - 1.97078, - 1.84476, - 1.91222, - 1.96836, - 1.9401, - 1.99719, - 1.96299, - 1.87151, - 1.96045, - 1.9734, - 2.00387, - 1.97065, - 1.9517, - 1.8715, - 1.94841, - 1.92404, - 1.9141, - 1.93419, - 1.88106, - 1.94231, - 1.92597, - 1.89628, - 1.88056, - 1.93939, - 1.87049, - 1.89581, - 1.84846, - 2.01049, - 1.88432, - 1.95819, - 1.95419, - 1.99557, - 1.98864, - 1.90152, - 1.9057, - 1.90546, - 1.92243, - 1.91772, - 1.89925, - 1.90592, - 1.94576, - 1.91816, - 1.96072, - 1.94377, - 1.88582, - 1.91774, - 1.92517, - 1.90864, - 1.96374, - 1.91323, - 1.90556, - 1.93685, - 1.90614, - 1.91029, - 2.0254, - 1.91353, - 1.83083, - 1.91759, - 1.92438, - 1.9801, - 1.92524, - 1.96863, - 1.87682, - 1.92308, - 1.88299, - 1.9158, - 1.83865, - 1.90922, - 1.91258, - 1.95401, - 1.92945, - 1.92789, - 1.90044, - 1.89629, - 1.92802, - 1.89947, - 1.94174, - 1.85641, - 1.98217, - 1.91864, - 1.9616, - 1.95019, - 1.90628, - 1.91301, - 1.93331, - 1.90436, - 1.89387, - 1.94393, - 1.98699, - 1.85996, - 1.91958, - 1.88149, - 1.95801, - 1.85613, - 1.90623, - 1.87876, - 1.94767, - 1.96351, - 1.94779, - 1.93208, - 1.86909, - 1.88812, - 1.90223, - 1.90754, - 1.90454, - 1.90598, - 1.92436, - 1.95191, - 1.96255, - 1.92846, - 1.91378, - 1.89129, - 1.86858, - 1.83996, - 1.93626, - 1.92607, - 1.93479, - 1.9039, - 1.90641, - 1.96081, - 1.88789, - 1.8548, - 1.87547, - 1.90889, - 1.98396, - 1.85486, - 1.91756, - 1.90111, - 1.92005, - 1.88201, - 1.92666, - 1.86944, - 1.86724, - 1.95319, - 1.89914, - 1.93976, - 1.91426, - 1.93552, - 2.00713, - 1.92827, - 1.93423, - 1.84749, - 1.94963, - 1.94501, - 1.9104, - 1.91973, - 1.85337, - 1.90889, - 1.8707, - 1.91429, - 1.90343, - 1.84598, - 1.90526, - 1.89095, - 1.83412, - 1.89617, - 1.90181, - 1.97153, - 1.93579, - 1.94061, - 1.86137, - 1.95447, - 1.99761, - 1.85934, - 1.91523, - 1.93557, - 1.99958, - 1.95443, - 1.90138, - 1.90683, - 1.86319, - 1.86754, - 1.95339, - 1.99761, - 1.94861, - 1.90535, - 1.9182, - 1.89745, - 1.97264, - 1.96077, - 1.8868, - 1.88885, - 1.92178, - 1.93217, - 1.89323, - 1.90882, - 1.91578, - 1.95125, - 1.89341, - 1.93991, - 1.90315, - 1.94857, - 1.8622, - 1.91969, - 1.93377, - 1.93673, - 1.95238, - 1.90151, - 1.92495, - 1.94783, - 1.85339, - 1.97773, - 1.91755, - 1.93809, - 1.89925, - 1.84476, - 1.87337, - 1.87181, - 1.92659, - 1.93462, - 1.92029, - 1.91292, - 1.94186, - 1.90252, - 1.81919, - 1.90986, - 1.93502, - 1.86957, - 1.88505, - 1.92777, - 1.948, - 1.92198, - 1.97078, - 1.94205, - 1.87305, - 1.88505, - 1.8589, - 1.91265, - 1.90656, - 1.88914, - 1.93699, - 1.88655, - 1.96529, - 1.8761, - 1.86992, - 1.92747, - 1.9751, - 1.98622, - 1.91359, - 1.88929, - 1.94068, - 1.81871, - 1.90393, - 1.91165, - 1.94748, - 1.93084, - 1.94526, - 1.89406, - 1.8824, - 1.9062, - 1.92762, - 1.9497, - 1.9306, - 1.9589, - 1.9359, - 1.89096, - 1.88498, - 1.93576, - 1.93231, - 1.92441, - 1.89613, - 1.90214, - 1.90439, - 1.97123, - 1.93374, - 1.89022, - 1.90001, - 1.91272, - 1.93272, - 1.92404, - 1.85881, - 1.94067, - 1.92159, - 1.91583, - 1.86731, - 1.91677, - 1.98315, - 1.91193, - 1.87902, - 1.92793, - 1.91164, - 1.91652, - 1.95318, - 1.88711, - 1.94685, - 1.87212, - 1.90851, - 1.94687, - 1.93567, - 1.97129, - 1.95667, - 1.90704, - 1.96276, - 1.87802, - 1.94489, - 1.9039, - 1.96104, - 1.93642, - 1.89151, - 1.88871, - 1.95774, - 1.93056, - 1.93682, - 1.9083, - 1.93534, - 1.98085, - 1.96111, - 1.85569, - 1.94889, - 1.95587, - 1.90195, - 1.915, - 1.96066, - 1.88146, - 1.97086, - 1.86486, - 1.8985, - 1.9085, - 1.89878, - 1.95942, - 1.96562, - 1.91221, - 1.9092, - 1.88652, - 1.92158, - 1.94048, - 1.93796, - 1.92643, - 1.85953, - 1.9183, - 1.93001, - 1.98451, - 1.91898, - 1.95028, - 1.95311, - 1.94721, - 1.88326, - 1.95348, - 1.93807, - 1.87572, - 1.94912, - 1.91065, - 1.93433, - 1.98243, - 1.86413, - 1.92531, - 1.92826, - 1.978, - 1.9487, - 1.89589, - 1.84685, - 1.93624, - 1.92262, - 1.93201, - 1.96473, - 1.98637, - 1.88871, - 1.89058, - 1.92831, - 1.93523, - 1.88779, - 1.92556, - 1.99757, - 1.91183, - 1.9853, - 1.94168, - 1.89053, - 1.91543, - 1.90491, - 1.98293, - 1.93557, - 1.90037, - 1.9436, - 1.92631, - 1.81038, - 1.94534, - 1.88524, - 1.90349, - 1.91605, - 1.90754, - 1.9236, - 1.93614, - 1.94948, - 1.93355, - 1.94986, - 1.95426, - 1.92526, - 1.97424, - 1.92613, - 1.96668, - 1.91653, - 1.97163, - 1.96485, - 1.91595, - 1.94231, - 1.92101, - 1.91657, - 1.87641, - 1.90554, - 1.92248, - 1.92945, - 1.96735, - 1.91283, - 1.94713, - 1.87912, - 1.95001, - 1.90563, - 1.98847, - 1.88236, - 1.92784, - 1.93252, - 1.92005, - 1.93973, - 1.86425, - 1.8514, - 1.92832, - 1.88543, - 1.9358, - 1.92336, - 1.88702, - 1.82142, - 1.90662, - 1.88931, - 1.93282, - 1.89019, - 1.88316, - 1.91902, - 1.95134, - 1.94319, - 1.91982, - 1.94131, - 1.87583, - 1.94846, - 1.93097, - 1.94543, - 1.8536, - 1.87662, - 1.94207, - 1.91342, - 1.94546, - 1.87634, - 1.92166, - 1.85897, - 1.82884, - 1.9593, - 1.9641, - 1.90061, - 1.90405, - 1.97221, - 1.83594, - 1.98778, - 1.88017, - 1.90155, - 1.90856, - 1.89585, - 1.90914, - 1.97795, - 1.91585, - 1.94498, - 1.90108, - 1.84538, - 1.93017, - 1.93581, - 1.91264, - 1.91429, - 1.94952, - 1.94106, - 1.95029, - 1.89125, - 1.94328, - 1.93361, - 1.86939, - 1.96494, - 1.90735, - 1.9212, - 1.97439, - 1.97347, - 1.94139, - 1.94746, - 1.93516, - 1.84338, - 1.95018, - 1.99782, - 1.92026, - 1.92854, - 1.95255, - 1.89613, - 1.93882, - 1.93453, - 1.98261, - 2.02049, - 1.88942, - 1.923, - 1.92665, - 1.95453, - 1.89221, - 1.95892, - 1.91435, - 1.9362, - 1.97908, - 1.92447, - 1.89364, - 1.86999, - 1.94464, - 1.96632, - 1.94083, - 1.8537, - 1.89416, - 2.0029, - 1.84889, - 1.94234, - 1.98936, - 1.8771, - 1.95278, - 1.93761, - 1.8573, - 1.91054, - 1.84765, - 1.95621, - 1.83888, - 1.86302, - 1.94138, - 1.93171, - 1.89087, - 1.91, - 1.88917, - 1.89981, - 1.90445, - 1.89645, - 1.90776, - 1.87894, - 1.94529, - 1.8606, - 1.94202, - 1.9418, - 1.9343, - 1.92812, - 1.93082, - 1.88138, - 1.96359, - 1.92591, - 1.90575, - 1.96048, - 1.85506, - 1.88279, - 1.95842, - 1.92874, - 1.8865, - 1.93879, - 1.89811, - 1.9385, - 1.94514, - 1.87891, - 1.91613, - 1.95585, - 1.89282, - 1.94966, - 1.97594, - 1.96846, - 1.87198, - 1.86709, - 1.82777, - 1.91836, - 1.94214, - 1.92153, - 1.87493, - 1.85685, - 1.88129, - 1.99427, - 1.87287, - 1.92532, - 1.92704, - 1.96969, - 1.93876, - 1.92551, - 1.8888, - 1.92515, - 1.94386, - 1.90357, - 1.9278, - 1.92956, - 1.89503, - 1.8714, - 1.89102, - 1.9132, - 1.93782, - 1.93668, - 1.87965, - 1.86944, - 1.95088, - 1.96413, - 1.91793, - 1.91312, - 1.91736, - 1.88803, - 1.96676, - 1.88643, - 1.91421, - 1.89281, - 1.89071, - 1.94956, - 1.88727, - 1.88991, - 1.94454, - 1.93285, - 1.93214, - 1.92247, - 1.81764, - 1.91856, - 1.92249, - 1.85175, - 1.90399, - 1.88896, - 1.89468, - 1.82241, - 1.8988, - 1.89394, - 1.92889, - 1.90881, - 1.86807, - 1.9418, - 1.8649, - 1.90602, - 1.87121, - 1.90921, - 1.9679, - 1.92221, - 1.91462, - 1.92235, - 1.97157, - 1.95764, - 1.91667, - 1.93295, - 1.89008, - 1.8893, - 1.96022, - 1.85937, - 1.90086, - 1.93088, - 1.88524, - 1.87212, - 1.86629, - 1.92055, - 1.96114, - 1.93551, - 1.85796, - 1.9556, - 1.95127, - 1.94179, - 1.93043, - 1.91846, - 1.98531, - 1.89084, - 1.93306, - 1.94695, - 1.90639, - 1.8969, - 1.88359, - 1.97213, - 1.90512, - 1.87663, - 1.89002, - 1.86999, - 1.90648, - 1.92699, - 1.89338, - 1.88947, - 1.97413, - 1.93204, - 1.92249, - 1.91288, - 1.88437, - 1.89161, - 1.86754, - 1.89254, - 1.91047, - 1.90126, - 1.85587, - 1.9509, - 1.94498, - 1.92925, - 1.93233, - 1.92973, - 1.9512, - 1.90803, - 1.87993, - 1.85393, - 1.90327, - 1.93877, - 1.89326, - 1.91159, - 1.93161, - 1.95061, - 1.92195, - 1.97568, - 1.88993, - 1.89828, - 1.85996, - 1.91697, - 1.90879, - 1.83324, - 1.95449, - 1.9689, - 1.9155, - 1.84016, - 1.86721, - 1.79147, - 1.87974, - 1.94363, - 1.98853, - 1.92054, - 1.92772, - 1.87183, - 1.94988, - 1.94968, - 1.89512, - 1.95872, - 1.86821, - 1.85364, - 1.94803, - 1.89038, - 1.94107, - 1.84185, - 1.8594, - 1.96749, - 1.88824, - 1.90037, - 1.95317, - 1.91184, - 1.93369, - 1.89585, - 1.96196, - 1.96523, - 1.87488, - 1.93907, - 1.93786, - 1.91049, - 2.00867, - 1.93451, - 1.88408, - 1.86725, - 1.8915, - 1.89194, - 1.91198, - 1.92819, - 1.90521, - 1.87293, - 1.94436, - 1.89141, - 1.91207, - 1.93088, - 1.9009, - 1.97551, - 1.89865, - 1.90232, - 1.87169, - 1.9353, - 1.93459, - 1.87844, - 1.93532, - 1.94951, - 1.87139, - 1.83868, - 1.91593, - 1.90148, - 1.92494, - 1.89296, - 1.89462, - 1.8584, - 1.95049, - 1.86487, - 1.92426, - 1.93875, - 1.89198, - 1.90463, - 1.88866, - 1.96898, - 1.91797, - 1.95272, - 1.96082, - 1.91281, - 1.92643, - 1.92419, - 1.87007, - 1.89544, - 1.94805, - 1.84939, - 1.91176, - 1.85722, - 1.96981, - 1.9299, - 1.88535, - 1.89919, - 1.8869, - 1.95847, - 1.9501, - 1.85081, - 1.92908, - 1.92457, - 1.88456, - 1.87512, - 1.90691, - 1.88777, - 1.92923, - 1.9827, - 1.92265, - 1.94924, - 1.91246, - 1.95389, - 1.93171, - 1.90951, - 1.94819, - 1.89016, - 1.90467, - 1.90228, - 1.85986, - 1.93523, - 1.92172, - 1.89695, - 1.92785, - 1.94854, - 1.84389, - 1.94144, - 1.94048, - 1.85197, - 1.98446, - 1.90687, - 1.96096, - 1.83349, - 1.87997, - 1.87136, - 1.87351, - 1.82067, - 1.96834, - 1.97547, - 1.92412, - 1.90922, - 1.95478, - 1.92194, - 1.92639, - 1.91129, - 1.86798, - 1.88427, - 1.89213, - 1.85861, - 1.92222, - 1.90903, - 1.89439, - 1.93018, - 1.8888, - 1.95262, - 1.9377, - 1.93677, - 1.90286, - 1.94078, - 1.84312, - 1.8817, - 1.88877, - 1.9523, - 1.88364, - 1.97502, - 1.94516, - 1.86082, - 1.98664, - 1.94234, - 1.84198, - 1.91281, - 1.97107, - 1.89681, - 1.86954, - 1.87805, - 1.87422, - 2.00645, - 1.91878, - 1.92243, - 1.83154, - 1.87011, - 1.92654, - 1.90705, - 1.96852, - 1.88474, - 1.90012, - 1.92024, - 1.94105, - 1.93482, - 1.87481, - 1.87886, - 1.95903, - 1.94193, - 1.9475, - 1.92588, - 1.91743, - 1.88132, - 1.88784, - 1.87593, - 1.95391, - 1.92341, - 1.81218, - 1.92909, - 1.89429, - 1.90132, - 1.9699, - 1.86859, - 1.92271, - 1.88409, - 1.85159, - 1.93433, - 1.93513, - 1.9601, - 1.95186, - 1.90971, - 1.92572, - 1.93555, - 1.89075, - 1.91385, - 1.94841, - 1.91123, - 1.89936, - 1.90901, - 1.92289, - 1.92424, - 1.88441, - 1.88779, - 1.91002, - 1.91114, - 1.93361, - 1.95551, - 1.95006, - 1.89988, - 1.96804, - 1.95558, - 1.92827, - 1.88672, - 1.92559, - 1.89571, - 1.88174, - 1.91804, - 1.86285, - 1.91011, - 1.92086, - 1.91331, - 1.88731, - 1.93874, - 1.95702, - 1.86976, - 1.91414, - 1.89549, - 1.94012, - 1.9609, - 1.94449, - 1.88616, - 1.90619, - 1.90171, - 1.95495, - 1.88415, - 1.95539, - 1.94533, - 1.91146, - 1.90992, - 1.907, - 1.85545, - 1.95283, - 1.94047, - 1.95706, - 1.94957, - 1.85915, - 1.8745, - 1.97033, - 1.99545, - 1.88829, - 1.94409, - 1.91418, - 1.86465, - 1.94016, - 1.90693, - 1.87203, - 1.89988, - 1.95208, - 1.92028, - 1.91307, - 2.01021, - 1.9271, - 1.8987, - 1.94369, - 1.88138, - 1.86686, - 1.97555, - 1.94943, - 1.92598, - 1.93391, - 1.86151, - 1.91509, - 1.99467, - 1.88326, - 1.88726, - 1.88975, - 1.86546, - 1.86123, - 1.92961, - 1.95244, - 1.95612, - 1.84435, - 1.86686, - 1.89544, - 1.94486, - 1.93069, - 1.92311, - 1.93712, - 1.93309, - 1.8859, - 1.9022, - 1.84949, - 1.90923, - 1.87092, - 1.88934, - 1.83164, - 1.95605, - 1.88705, - 1.92983, - 1.94384, - 1.85565, - 1.96172, - 1.85169, - 1.92676, - 1.87128, - 1.92088, - 1.91364, - 1.91247, - 1.94429, - 1.93462, - 1.96755, - 1.89588, - 1.94141, - 1.96903, - 1.89872, - 1.93896, - 2.00121, - 1.86917, - 1.90139, - 1.91865, - 1.93595, - 1.86648, - 1.87268, - 1.88051, - 1.89009, - 1.85794, - 1.90544, - 1.88405, - 1.91429, - 1.90028, - 1.89066, - 1.94216, - 1.98899, - 1.92389, - 1.82488, - 1.84803, - 1.98334, - 1.90673, - 1.94713, - 1.9192, - 1.92624, - 1.91717, - 1.91817, - 1.94882, - 1.90997, - 1.94473, - 1.93276, - 1.89714, - 1.93114, - 1.89048, - 1.93178, - 1.91891, - 1.94125, - 1.87324, - 1.87242, - 1.90996, - 1.91507, - 1.93386, - 1.93872, - 1.9041, - 1.88523, - 1.96495, - 1.9513, - 1.8948, - 1.87202, - 1.89115, - 1.94977, - 2.01341, - 1.90988, - 1.99898, - 1.909, - 1.93826, - 1.94539, - 1.93217, - 1.86049, - 1.87217, - 1.89878, - 1.89198, - 1.94106, - 1.94684, - 1.9271, - 1.95768, - 1.9989, - 1.86892, - 1.90808, - 1.89044, - 1.89065, - 1.98894, - 1.91314, - 1.89747, - 1.89802, - 1.94524, - 1.91024, - 1.9598, - 1.936, - 1.94862, - 1.93858, - 1.93679, - 1.90085, - 1.88925, - 1.91091, - 1.88977, - 1.8797, - 1.88541, - 1.87475, - 1.87681, - 1.88708, - 1.92756, - 2.00702, - 1.9545, - 1.91741, - 1.87069, - 1.85443, - 1.92229, - 1.92842, - 1.80193, - 1.86518, - 1.89555, - 1.91374, - 1.94372, - 1.90606, - 1.88833, - 1.90511, - 1.83957, - 1.91194, - 1.95785, - 1.88155, - 1.89665, - 1.89393, - 1.86371, - 1.86706, - 1.96444, - 1.86699, - 1.89033, - 1.89523, - 1.97265, - 1.90867, - 1.91646, - 1.90571, - 1.96069, - 1.95405, - 1.90078, - 1.90857, - 1.91398, - 1.91386, - 1.93509, - 1.88581, - 1.89403, - 1.89226, - 1.85995, - 1.86663, - 1.88968, - 1.96037, - 1.98757, - 1.91499, - 1.87869, - 1.92596, - 1.91781, - 1.89947, - 1.90601, - 1.90036, - 1.90024, - 1.90474, - 1.89433, - 1.90777, - 1.94925, - 1.94041, - 1.89188, - 1.83982, - 1.93134, - 1.84717, - 1.93441, - 1.94629, - 1.9071, - 1.9211, - 1.93776, - 1.93955, - 1.91847, - 1.79408, - 1.99092, - 1.90469, - 1.86877, - 1.9637, - 1.96642, - 1.95072, - 1.95473, - 1.90777, - 1.88362, - 1.93889, - 1.90448, - 1.89116, - 1.9184, - 1.98457, - 1.93922, - 1.8291, - 1.90257, - 1.93626, - 1.96857, - 1.86036, - 1.92042, - 1.90912, - 1.94348, - 1.9657, - 1.96312, - 1.92467, - 1.90862, - 1.89561, - 1.8834, - 1.92688, - 1.89745, - 1.90251, - 1.95188, - 1.84629, - 1.87373, - 1.91895, - 1.91026, - 1.91554, - 1.92764, - 1.93096, - 1.92018, - 1.87516, - 1.86704, - 1.89069, - 1.90745, - 1.89173, - 1.87129, - 1.87234, - 1.93767, - 1.91211, - 2.02745, - 1.95784, - 1.91843, - 1.96069, - 1.91247, - 1.8916, - 1.88483, - 1.91833, - 1.91503, - 1.8709, - 1.93441, - 1.84627, - 1.89737, - 1.92913, - 1.93305, - 1.91726, - 1.92321, - 1.82371, - 1.86448, - 1.88605, - 1.90859, - 1.86578, - 1.90981, - 1.87837, - 1.90053, - 1.94463, - 1.88724, - 1.97309, - 1.96308, - 1.90104, - 1.95781, - 1.91869, - 1.87905, - 1.87807, - 1.90662, - 1.88738, - 1.91886, - 1.94197, - 1.91169, - 1.86747, - 1.9388, - 1.90926, - 1.92888, - 1.93188, - 1.84332, - 1.93333, - 1.84837, - 1.95958, - 1.95456, - 1.90826, - 1.92018, - 1.94273, - 1.95068, - 1.88269, - 1.90586, - 1.95305, - 1.9392, - 1.903, - 1.94829, - 1.91927, - 1.98141, - 1.85118, - 1.92681, - 1.94982, - 1.93264, - 1.89614, - 1.95254, - 1.87918, - 1.94932, - 1.92734, - 1.88766, - 1.90773, - 1.90834, - 1.91493, - 1.90093, - 1.88408, - 1.89604, - 1.93622, - 1.89698, - 1.86012, - 1.90165, - 1.95251, - 1.87085, - 1.86935, - 1.90496, - 1.91094, - 1.92247, - 1.9682, - 1.87208, - 1.96818, - 1.92362, - 1.89818, - 1.95388, - 1.88612, - 1.96245, - 1.88919, - 1.90593, - 1.92343, - 1.92473, - 1.93183, - 1.8816, - 1.90611, - 1.94958, - 1.92784, - 1.90084, - 1.9342, - 1.94704, - 1.88567, - 1.93058, - 1.94168, - 1.85923, - 1.86745, - 1.91224, - 1.87596, - 1.91232, - 1.85541, - 1.89238, - 1.86553, - 1.92008, - 1.9717, - 1.8919, - 1.90528, - 1.92503, - 1.94822, - 1.82775, - 1.87351, - 1.87301, - 1.89434, - 1.91861, - 1.95537, - 1.99002, - 1.94804, - 1.88884, - 1.92329, - 1.93849, - 1.95217, - 1.83058, - 1.97018, - 1.90426, - 1.94702, - 1.92879, - 1.89519, - 1.86178, - 1.95132, - 1.91848, - 1.92129, - 1.89435, - 1.8866, - 1.95164, - 1.95711, - 1.8963, - 1.91726, - 1.90109, - 1.85152, - 1.94412, - 1.90523, - 1.93546, - 1.88843, - 1.88712, - 1.8666, - 1.94606, - 1.93585, - 1.92239, - 1.89381, - 1.89814, - 1.85074, - 1.81513, - 1.95627, - 1.89675, - 1.92499, - 1.91972, - 1.92959, - 1.91764, - 1.87262, - 1.94673, - 1.85866, - 1.95893, - 1.89169, - 1.90053, - 1.9027, - 1.91496, - 1.91936, - 1.91936, - 1.84974, - 1.96991, - 1.89198, - 1.897, - 1.93511, - 1.85072, - 1.87805, - 1.90793, - 1.92024, - 1.93477, - 1.90126, - 1.91332, - 1.86085, - 1.89997, - 1.95678, - 1.9112, - 1.95388, - 1.93932, - 1.90213, - 1.88809, - 1.90328, - 1.93446, - 1.92292, - 1.85193, - 1.8979, - 1.89242, - 1.9464, - 1.95242, - 1.90669, - 1.92154, - 1.94324, - 1.9411, - 1.94989, - 1.94142, - 1.86209, - 1.92119, - 1.88105, - 1.89427, - 1.86823, - 1.96413, - 1.85534, - 1.95653, - 1.82501, - 1.89821, - 1.94377, - 1.89335, - 1.90368, - 1.92903, - 1.9084, - 1.98078, - 1.93277, - 1.82945, - 1.94855, - 1.84181, - 1.93801, - 1.91062, - 1.90053, - 1.90337, - 1.95322, - 1.90717, - 1.90905, - 1.86396, - 1.92125, - 1.93364, - 1.889, - 1.87918, - 1.89981, - 1.90823, - 1.87888, - 1.9678, - 1.88769, - 1.907, - 1.8804, - 1.88978, - 1.91382, - 1.90217, - 1.87691, - 1.9691, - 1.97763, - 1.86138, - 1.92238, - 1.95277, - 1.88592, - 1.91714, - 1.89184, - 1.8925, - 1.92222, - 1.84047, - 1.83724, - 1.83995, - 1.92514, - 1.92017, - 1.92259, - 1.91711, - 1.83503, - 1.90669, - 1.89425, - 1.87261, - 1.93384, - 1.90074, - 1.85623, - 1.93333, - 1.87113, - 1.85687, - 1.95622, - 1.87921, - 1.98096, - 1.93047, - 1.90115, - 1.87306, - 1.94826, - 1.88986, - 1.91819, - 1.91592, - 1.91697, - 1.89813, - 1.93293, - 1.89999, - 1.87325, - 1.85609, - 1.91779, - 1.86093, - 1.86151, - 1.94337, - 1.9009, - 1.93174, - 1.85084, - 1.93166, - 1.91196, - 1.99994, - 1.89362, - 1.94074, - 1.81413, - 1.89013, - 1.93026, - 1.95717, - 1.90888, - 1.79356, - 1.9427, - 1.912, - 1.92505, - 1.91821, - 1.94834, - 1.95647, - 1.87896, - 1.9324, - 1.8497, - 1.95646, - 1.9219, - 1.89331, - 1.91809, - 1.91975, - 1.90753, - 1.92783, - 1.92949, - 1.94767, - 1.88343, - 1.91725, - 1.88292, - 1.87831, - 1.93308, - 1.94093, - 1.84983, - 1.99494, - 1.95111, - 1.85053, - 1.94202, - 1.88058, - 1.87813, - 1.92712, - 1.90368, - 1.88393, - 1.90206, - 1.91592, - 1.947, - 1.93779, - 1.89352, - 1.88939, - 1.86558, - 1.92518, - 1.92073, - 2.01221, - 1.93862, - 1.92983, - 1.90029, - 1.87514, - 1.91934, - 1.91155, - 1.83163, - 1.90525, - 1.92033, - 1.86115, - 1.89532, - 1.9774, - 1.92514, - 1.83991, - 1.91304, - 1.864, - 1.95481, - 1.83291, - 1.85941, - 1.94623, - 1.94252, - 1.84162, - 1.89438, - 1.94786, - 1.88124, - 1.93927, - 1.90921, - 1.88524, - 1.87148, - 1.88094, - 1.92003, - 1.9175, - 1.90807, - 1.86856, - 1.90959, - 1.90706, - 1.8901, - 1.89895, - 1.90219, - 1.8708, - 1.8676, - 1.94945, - 1.84765, - 1.96701, - 1.95951, - 1.89101, - 1.82687, - 1.96857, - 1.88662, - 1.8417, - 1.86179, - 1.94273, - 1.91387, - 1.92779, - 1.94725, - 1.93562, - 1.93647, - 1.92331, - 1.87937, - 1.89649, - 1.9014, - 1.9009, - 1.84864, - 1.89171, - 1.91525, - 1.93123, - 1.92092, - 1.95457, - 1.865, - 1.88184, - 1.92551, - 1.94116, - 1.85661, - 1.89485, - 1.86615, - 1.87844, - 1.94995, - 1.9472, - 1.88099, - 1.89887, - 1.90874, - 1.94508, - 1.90148, - 1.92045, - 1.88876, - 1.86274, - 1.91966, - 1.89405, - 1.81976, - 1.88538, - 1.89813, - 1.84851, - 1.89373, - 1.92157, - 1.9361, - 1.96239, - 1.9061, - 1.93451, - 1.87335, - 1.90411, - 1.89713, - 1.87754, - 1.92505, - 1.93949, - 1.95683, - 1.87564, - 1.93017, - 1.88748, - 1.91734, - 1.8943, - 1.90121, - 1.87702, - 1.91119, - 1.99068, - 1.84873, - 1.90968, - 1.84008, - 1.92501, - 1.88215, - 1.86165, - 1.83472, - 1.93535, - 1.83038, - 1.87687, - 1.87947, - 1.868, - 1.9305, - 1.88055, - 1.86326, - 1.84779, - 1.95615, - 1.89223, - 1.91743, - 1.90109, - 1.89156, - 1.95531, - 1.89797, - 1.91833, - 1.89238, - 1.86095, - 1.95222, - 2.00292, - 1.89642, - 1.86344, - 1.93019, - 1.91423, - 1.94333, - 1.92508, - 1.86868, - 1.92105, - 1.9369, - 1.93871, - 1.83597, - 1.81581, - 1.92172, - 1.90453, - 1.90467, - 1.88393, - 1.87411, - 1.87974, - 1.88772, - 1.93826, - 1.95298, - 1.83295, - 1.88548, - 1.89272, - 1.89873, - 1.8992, - 1.93869, - 1.86985, - 1.92996, - 1.92858, - 1.90236, - 1.97189, - 1.86641, - 1.89065, - 1.84123, - 1.93955, - 1.91118, - 1.86707, - 1.96107, - 1.89974, - 1.8701, - 1.91322, - 1.91088, - 1.90301, - 1.85358, - 1.84664, - 1.91812, - 1.84288, - 1.83288, - 1.87466, - 1.89709, - 1.82498, - 1.86155, - 1.8756, - 1.8999, - 1.91252, - 1.95948, - 1.90237, - 1.95671, - 1.81797, - 1.92749, - 1.88567, - 1.90553, - 1.87891, - 1.94909, - 1.9126, - 1.89714, - 1.88499, - 1.94698, - 1.85319, - 1.85645, - 1.87097, - 1.85027, - 1.86751, - 1.90263, - 1.9193, - 1.94909, - 1.91692, - 1.88033, - 1.87837, - 1.88316, - 1.95097, - 1.86339, - 1.87371, - 1.89056, - 1.92129, - 1.94876, - 1.90219, - 1.89103, - 1.91283, - 1.92891, - 1.87829, - 1.85374, - 1.84017, - 1.90724, - 1.91175, - 1.94451, - 1.92106, - 1.98218, - 1.89814, - 1.88245, - 1.8982, - 1.87257, - 1.88418, - 1.85654, - 1.9414, - 1.89919, - 1.88024, - 1.91836, - 1.88946, - 1.88392, - 1.92315, - 1.91853, - 1.87337, - 1.93152, - 1.87209, - 1.93287, - 1.9059, - 1.90559, - 1.93138, - 1.95418, - 1.89373, - 1.88532, - 1.9267, - 1.91591, - 1.8972, - 1.93243, - 1.9273, - 1.91034, - 1.87855, - 1.87658, - 1.90628, - 1.85251, - 1.93004, - 1.96931, - 1.83961, - 1.89049, - 1.90444, - 1.81201, - 1.85224, - 1.94652, - 1.88548, - 1.98069, - 1.95921, - 1.88406, - 1.92122, - 1.89853, - 1.8639, - 1.85833, - 1.8679, - 1.84291, - 1.90414, - 1.89853, - 1.91067, - 1.89156, - 1.88756, - 1.97128, - 1.8454, - 1.97562, - 1.9539, - 1.89481, - 1.94946, - 1.92226, - 1.98704, - 1.9365, - 1.88799, - 1.92376, - 1.92317, - 1.91839, - 1.91388, - 1.91198, - 1.88888, - 1.88499, - 1.88869, - 1.87937, - 1.93176, - 1.9246, - 1.96274, - 1.91646, - 1.91014, - 1.93027, - 1.90069, - 1.93918, - 1.96957, - 1.87496, - 1.90658, - 1.91793, - 1.87122, - 1.87289, - 1.94557, - 1.86041, - 1.96009, - 1.93872, - 1.91626, - 1.85837, - 1.89121, - 1.86614, - 1.85229, - 1.85726, - 1.92826, - 1.98489, - 1.94296, - 1.91414, - 1.93129, - 1.90846, - 1.89334, - 1.87587, - 1.91529, - 1.96049, - 1.90679, - 1.86906, - 1.94594, - 1.92161, - 1.8422, - 1.92224, - 1.8426, - 1.85511, - 1.84221, - 1.85076, - 1.89198, - 1.92349, - 1.88173, - 1.92207, - 1.92661, - 2.00454, - 1.92071, - 1.85754, - 1.94825, - 1.94255, - 1.89022, - 1.86921, - 1.88642, - 1.95832, - 1.88899, - 1.90084, - 1.93382, - 1.91946, - 1.83539, - 1.93374, - 1.93504, - 1.91402, - 1.93458, - 1.87769, - 1.88379, - 1.88181, - 1.91467, - 1.91502, - 1.95188, - 1.88866, - 1.89681, - 1.84433, - 1.87122, - 1.91535, - 1.91722, - 1.97517, - 1.88158, - 1.85847, - 1.93695, - 1.8908, - 1.89423, - 1.8416, - 1.91528, - 1.92174, - 1.89173, - 1.88147, - 1.95144, - 1.94883, - 1.90245, - 1.97829, - 1.83781, - 1.9311, - 1.84968, - 1.93573, - 1.90225, - 1.87028, - 1.97623, - 1.9018, - 1.87328, - 1.88192, - 1.84538, - 1.8741, - 1.8915, - 1.93982, - 2.02884, - 1.89347, - 1.90958, - 1.91429, - 1.91233, - 1.92402, - 1.89165, - 1.8967, - 1.94119, - 1.8987, - 1.88061, - 1.90134, - 1.89399, - 1.91044, - 1.92534, - 1.89951, - 1.90237, - 1.93234, - 1.92213, - 1.91278, - 1.92844, - 1.97111, - 1.88481, - 1.8492, - 1.87132, - 1.94349, - 1.90489, - 1.82446, - 1.91877, - 1.85686, - 1.84299, - 1.95147, - 1.89941, - 1.91305, - 2.00956, - 1.88445, - 1.96234, - 1.95297, - 1.87819, - 1.87843, - 1.93676, - 1.86222, - 1.91974, - 1.87604, - 1.88549, - 1.91261, - 1.97055, - 1.88517, - 1.92968, - 1.88643, - 1.84512, - 1.8807, - 1.92284, - 1.89046, - 1.85794, - 1.94384, - 1.93897, - 1.88314, - 1.93296, - 1.89242, - 1.92083, - 1.91838, - 1.86341, - 1.87536, - 1.87639, - 1.89657, - 1.90851, - 1.91088, - 1.8814, - 1.92377, - 2.01336, - 1.90862, - 1.87602, - 1.81566, - 1.93134, - 1.97, - 1.87586, - 1.91137, - 1.91695, - 1.91872, - 1.95924, - 1.92802, - 1.89402, - 1.89174, - 1.80352, - 1.82789, - 1.93425, - 1.96918, - 1.84852, - 1.88705, - 1.88775, - 1.83824, - 1.83676, - 1.91337, - 1.844, - 1.89973, - 1.83667, - 1.91701, - 1.82666, - 1.87823, - 1.97091, - 1.93496, - 1.88823, - 1.88559, - 1.91377, - 1.89151, - 1.89035, - 1.90105, - 1.85569, - 1.94203, - 1.87719, - 1.89065, - 1.90371, - 1.88084, - 1.87331, - 1.8688, - 1.90522, - 1.86918, - 1.9694, - 1.85483, - 1.86122, - 1.91788, - 1.91176, - 1.92413, - 1.87041, - 1.85806, - 1.8731, - 1.88539, - 1.91566, - 1.89919, - 1.91097, - 1.96104, - 1.89508, - 1.98339, - 1.80513, - 1.95638, - 1.85669, - 1.89453, - 1.92779, - 1.91355, - 1.93373, - 1.95864, - 1.86706, - 1.92964, - 1.90326, - 1.86789, - 1.94376, - 1.91442, - 1.8579, - 1.88882, - 1.99484, - 1.86896, - 1.95865, - 1.81779, - 1.88087, - 1.86961, - 1.8748, - 1.9451, - 1.92931, - 1.86442, - 1.87312, - 1.93511, - 1.9308, - 1.83393, - 1.89186, - 1.82268, - 1.86841, - 1.93666, - 1.89858, - 1.90007, - 1.86347, - 1.95636, - 1.86894, - 1.83355, - 1.90367, - 1.93889, - 1.88893, - 1.91209, - 1.87138, - 1.92302, - 1.86705, - 1.92834, - 1.89954, - 1.95951, - 1.9608, - 1.96239, - 1.9384, - 1.90386, - 1.88728, - 1.92158, - 1.87991, - 1.92063, - 1.91518, - 1.90097, - 1.90791, - 1.81265, - 1.96855, - 1.91688, - 1.89643, - 1.88704, - 1.92988, - 1.86394, - 1.93382, - 1.87782, - 1.87375, - 1.82157, - 1.92651, - 1.86742, - 1.98795, - 1.90446, - 1.85796, - 1.97362, - 2.0011, - 1.90826, - 1.92485, - 1.88367, - 1.91704, - 1.90442, - 1.82834, - 1.90826, - 1.89689, - 1.84038, - 1.8916, - 1.90616, - 1.90907, - 1.87936, - 1.89695, - 1.89878, - 1.95948, - 1.86516, - 1.93328, - 1.94128, - 1.87707, - 1.8711, - 1.89763, - 1.93972, - 1.97389, - 1.93522, - 1.93064, - 1.89938, - 1.92767, - 1.91503, - 1.91738, - 1.91744, - 1.93042, - 1.85629, - 1.94058, - 1.88623, - 1.98335, - 1.87407, - 1.95695, - 1.90957, - 1.9377, - 1.89805, - 1.9069, - 1.89601, - 1.89502, - 1.90543, - 1.95699, - 1.90084, - 1.92712, - 1.8987, - 1.82098, - 1.88771, - 1.89413, - 1.96447, - 1.86617, - 1.86737, - 1.94538, - 1.89292, - 1.85675, - 1.94584, - 1.87575, - 1.88465, - 1.94316, - 1.85506, - 1.87099, - 1.88731, - 1.94448, - 1.93352, - 1.92977, - 1.95946, - 1.91709, - 1.94619, - 1.91751, - 1.91746, - 1.91118, - 1.95234, - 1.88201, - 1.85777, - 1.92093, - 1.92748, - 1.89977, - 1.85723, - 1.84009, - 1.89894, - 1.86061, - 1.87516, - 1.89148, - 1.91135, - 1.92271, - 1.79798, - 1.93205, - 1.87752, - 1.92293, - 1.89662, - 1.89602, - 1.90306, - 1.91224, - 1.85811, - 1.91647, - 1.86096, - 1.89767, - 1.87871, - 1.92366, - 1.89946, - 1.93193, - 1.83065, - 1.8923, - 1.93887, - 1.89284, - 1.93711, - 1.89709, - 1.89451, - 1.95809, - 1.88105, - 1.86061, - 1.90346, - 1.94777, - 1.93241, - 1.88944, - 1.91681, - 1.89256, - 1.89185, - 1.92332, - 1.88691, - 1.87562, - 1.90006, - 1.95136, - 1.8701, - 1.92814, - 1.8466, - 1.92897, - 1.88078, - 1.85739, - 1.86902, - 1.93377, - 1.97361, - 1.8194, - 1.92161, - 1.92265, - 1.90185, - 1.88903, - 1.90399, - 1.9202, - 1.90571, - 1.90991, - 1.84729, - 1.90296, - 1.93332, - 1.86185, - 1.93006, - 1.92773, - 1.9134, - 1.90089, - 1.88254, - 1.93349, - 1.84782, - 1.91966, - 1.85123, - 1.88017, - 1.88678, - 1.96179, - 1.96911, - 1.90514, - 1.91314, - 1.90974, - 1.82423, - 1.82535, - 1.85607, - 1.87597, - 1.94739, - 1.85459, - 1.88782, - 1.92344, - 1.95696, - 1.88421, - 1.88526, - 1.88501, - 1.8607, - 1.9309, - 1.87087, - 1.91492, - 1.85231, - 1.9419, - 1.8767, - 1.90953, - 1.92177, - 1.89258, - 1.89515, - 1.92755, - 1.92931, - 1.8743, - 1.88694, - 1.89603, - 1.90079, - 1.94133, - 1.90038, - 1.87593, - 1.95186, - 1.94273, - 1.91541, - 1.81544, - 1.88674, - 1.86013, - 1.81602, - 1.86247, - 1.84502, - 1.91118, - 1.94237, - 1.86405, - 1.91282, - 1.89009, - 1.94248, - 1.89708, - 1.91653, - 1.93199, - 1.8292, - 1.85084, - 1.93445, - 1.90773, - 2.00349, - 1.8557, - 1.86076, - 1.92023, - 1.93303, - 1.88839, - 1.90509, - 1.94477, - 1.95067, - 1.9304, - 1.8897, - 1.90505, - 1.8982, - 1.92995, - 1.92853, - 1.8263, - 1.95808, - 2.00245, - 1.90518, - 1.90879, - 1.88331, - 1.79796, - 1.93757, - 1.94194, - 1.91827, - 1.88548, - 1.90384, - 1.88876, - 1.97322, - 1.8935, - 1.90085, - 1.89472, - 1.96149, - 1.96135, - 1.92016, - 1.85943, - 1.87931, - 1.82677, - 1.91255, - 1.94468, - 1.89498, - 1.89288, - 1.89087, - 1.93944, - 1.90928, - 1.88224, - 1.86194, - 1.89155, - 1.91813, - 1.89934, - 1.89301, - 1.89099, - 1.94297, - 1.89574, - 1.97311, - 1.91574, - 1.89061, - 1.94327, - 1.8543, - 1.85289, - 1.87397, - 1.92724, - 1.89987, - 1.9061, - 1.8473, - 1.8511, - 1.92708, - 1.89427, - 1.93657, - 1.89666, - 1.85442, - 1.97243, - 1.88189, - 1.89221, - 1.90266, - 1.91751, - 1.85089, - 1.90161, - 1.91781, - 1.90503, - 1.94103, - 1.90623, - 1.89949, - 1.86593, - 1.92192, - 1.87517, - 1.90302, - 1.82033, - 1.89596, - 1.89075, - 1.89339, - 1.87827, - 1.89167, - 1.90781, - 1.92155, - 1.87601, - 1.90721, - 1.93222, - 1.8362, - 1.87572, - 1.87687, - 1.86344, - 1.92916, - 1.83857, - 1.88292, - 1.94343, - 1.88509, - 1.92433, - 1.85716, - 1.90937, - 1.86974, - 1.88366, - 1.91592, - 1.93797, - 1.9024, - 1.86413, - 1.99078, - 1.94494, - 1.87519, - 1.84845, - 1.89118, - 1.91975, - 1.87122, - 1.80652, - 1.95788, - 1.95053, - 1.91417, - 1.90344, - 1.94345, - 1.98127, - 1.90647, - 1.8851, - 1.84559, - 1.88694, - 1.91451, - 1.90452, - 1.95527, - 1.9752, - 1.90947, - 1.93896, - 1.91568, - 1.9477, - 1.93282, - 1.82454, - 1.87918, - 1.85753, - 1.87004, - 1.92014, - 1.87878, - 1.86111, - 1.9126, - 1.90152, - 1.85139, - 1.85931, - 1.8265, - 1.89338, - 1.81848, - 1.89513, - 1.8254, - 1.84018, - 1.96416, - 1.88336, - 1.93115, - 1.94685, - 1.90555, - 1.91619, - 1.8464, - 1.87027, - 1.90489, - 1.89347, - 1.8676, - 1.95477, - 1.82259, - 1.9387, - 1.90086, - 1.90641, - 1.86244, - 1.91928, - 1.86466, - 1.8524, - 1.89537, - 1.89803, - 1.86552, - 1.93545, - 1.89996, - 1.98381, - 1.89434, - 2.00183 - ] - }, - "mem-allocated-bytes": { - "start_step": 0, - "end_step": 100000, - "step_interval": 5, - "values": [ - 1117047808.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1117048320.0, - 1118882816.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0, - 1118883328.0 - ] - }, - "iteration-time": { - "start_step": 0, - "end_step": 1000, - "step_interval": 5, - "values": [ - 0.45353, - 0.23209, - 0.25297, - 0.23205, - 0.2415, - 0.23918, - 0.24626, - 0.2488, - 0.2476, - 0.23596, - 0.2485, - 0.23586, - 0.24061, - 0.23338, - 0.24468, - 0.23241, - 0.23571, - 0.23584, - 0.24489, - 0.23889, - 0.23646, - 0.24278, - 0.25148, - 0.24502, - 0.23865, - 0.2462, - 0.24847, - 0.24321, - 0.24593, - 0.2318, - 0.23928, - 0.23065, - 0.24653, - 0.25709, - 0.24503, - 0.25272, - 0.23876, - 0.23279, - 0.24315, - 0.24757, - 0.23216, - 0.2345, - 0.23488, - 0.23029, - 0.23721, - 0.23297, - 0.23275, - 0.24479, - 0.23101, - 0.23709, - 0.23499, - 0.24015, - 0.22428, - 0.22672, - 0.23275, - 0.23251, - 0.24233, - 0.22902, - 0.23811, - 0.23007, - 0.22896, - 0.22706, - 0.23094, - 0.23004, - 0.2316, - 0.23295, - 0.23045, - 0.23442, - 0.2372, - 0.2457, - 0.24889, - 0.24452, - 0.24207, - 0.23029, - 0.23179, - 0.23908, - 0.23194, - 0.23722, - 0.23168, - 0.22972, - 0.23308, - 0.23595, - 0.23116, - 0.23601, - 0.22899, - 0.22491, - 0.23136, - 0.23255, - 0.23006, - 0.23447, - 0.24359, - 0.23347, - 0.23242, - 0.23813, - 0.23653, - 0.23156, - 0.23175, - 0.22917, - 0.23357, - 0.23801, - 0.23139, - 0.24071, - 0.2432, - 0.23216, - 0.23038, - 0.23623, - 0.23784, - 0.24029, - 0.23416, - 0.2287, - 0.23405, - 0.22745, - 0.23034, - 0.23069, - 0.23327, - 0.23354, - 0.26181, - 0.23973, - 0.24615, - 0.24032, - 0.23533, - 0.23077, - 0.24415, - 0.24273, - 0.22938, - 0.23886, - 0.23963, - 0.23902, - 0.24358, - 0.23909, - 0.23603, - 0.23088, - 0.23813, - 0.23879, - 0.22401, - 0.22639, - 0.22532, - 0.23021, - 0.23264, - 0.23304, - 0.22785, - 0.23129, - 0.2273, - 0.2342, - 0.23183, - 0.24365, - 0.23386, - 0.22935, - 0.22818, - 0.23377, - 0.23758, - 0.23452, - 0.23466, - 0.23651, - 0.22953, - 0.23245, - 0.23621, - 0.23631, - 0.23014, - 0.23192, - 0.2339, - 0.22968, - 0.22665, - 0.22848, - 0.22875, - 0.22621, - 0.23896, - 0.23524, - 0.22545, - 0.22718, - 0.22611, - 0.22976, - 0.22134, - 0.2263, - 0.23067, - 0.23293, - 0.22112, - 0.22919, - 0.2383, - 0.23477, - 0.22381, - 0.2317, - 0.24013, - 0.23142, - 0.22907, - 0.2316, - 0.23856, - 0.22676, - 0.22578, - 0.22978, - 0.23092, - 0.2225, - 0.22875, - 0.22386, - 0.23257, - 0.23442, - 0.22749, - 0.22365, - 0.22888, - 0.22815 - ] - } -} \ No newline at end of file +{"lm loss": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 10.34371, "5": 10.33853, "10": 10.2557, "15": 9.88109, "20": 9.67347, "25": 9.54544, "30": 9.43761, "35": 9.36719, "40": 9.28505, "45": 9.20711, "50": 9.15625, "55": 9.08671, "60": 9.00309, "65": 8.93062, "70": 8.93165, "75": 8.84513, "80": 8.84433, "85": 8.76025, "90": 8.75761, "95": 8.71069, "100": 8.65811, "105": 8.61293, "110": 8.56214, "115": 8.52488, "120": 8.52724, "125": 8.45265, "130": 8.424, "135": 8.34476, "140": 8.32478, "145": 8.28538, "150": 8.24032, "155": 8.14898, "160": 8.13302, "165": 8.09971, "170": 8.05438, "175": 8.04411, "180": 7.89768, "185": 7.90338, "190": 7.81869, "195": 7.77442, "200": 7.71968, "205": 7.67746, "210": 7.59887, "215": 7.565, "220": 7.49497, "225": 7.44003, "230": 7.36349, "235": 7.34224, "240": 7.2445, "245": 7.18398, "250": 7.11755, "255": 7.08177, "260": 7.01159, "265": 6.9773, "270": 6.94125, "275": 6.86403, "280": 6.81156, "285": 6.77898, "290": 6.77247, "295": 6.66994, "300": 6.67428, "305": 6.59998, "310": 6.55363, "315": 6.48465, "320": 6.52796, "325": 6.51158, "330": 6.4762, "335": 6.42146, "340": 6.43132, "345": 6.38038, "350": 6.40887, "355": 6.35014, "360": 6.3656, "365": 6.33209, "370": 6.31018, "375": 6.28933, "380": 6.2611, "385": 6.25355, "390": 6.21382, "395": 6.20855, "400": 6.2672, "405": 6.17396, "410": 6.24914, "415": 6.16383, "420": 6.16776, "425": 6.12509, "430": 6.08937, "435": 6.14622, "440": 6.08621, "445": 6.03392, "450": 6.10477, "455": 6.00673, "460": 6.03403, "465": 6.03885, "470": 5.97666, "475": 5.96894, "480": 5.96344, "485": 5.98047, "490": 5.9231, "495": 5.86873, "500": 5.86811, "505": 5.88539, "510": 5.90765, "515": 5.81604, "520": 5.85792, "525": 5.86641, "530": 5.83808, "535": 5.8351, "540": 5.82754, "545": 5.78071, "550": 5.77758, "555": 5.77385, "560": 5.77339, "565": 5.74687, "570": 5.73921, "575": 5.71602, "580": 5.66883, "585": 5.70711, "590": 5.64448, "595": 5.64448, "600": 5.65093, "605": 5.64438, "610": 5.69319, "615": 5.64043, "620": 5.66491, "625": 5.62779, "630": 5.57851, "635": 5.60557, "640": 5.58071, "645": 5.59373, "650": 5.56705, "655": 5.57242, "660": 5.54323, "665": 5.57531, "670": 5.48103, "675": 5.51529, "680": 5.52298, "685": 5.50901, "690": 5.53093, "695": 5.47935, "700": 5.43906, "705": 5.38491, "710": 5.40785, "715": 5.45539, "720": 5.41745, "725": 5.44858, "730": 5.41744, "735": 5.41042, "740": 5.39219, "745": 5.40805, "750": 5.43092, "755": 5.41449, "760": 5.35723, "765": 5.36225, "770": 5.36139, "775": 5.32282, "780": 5.31874, "785": 5.30201, "790": 5.30331, "795": 5.31219, "800": 5.31844, "805": 5.35201, "810": 5.30197, "815": 5.25076, "820": 5.28532, "825": 5.28928, "830": 5.26522, "835": 5.28923, "840": 5.30911, "845": 5.22817, "850": 5.23066, "855": 5.25079, "860": 5.25387, "865": 5.24892, "870": 5.22343, "875": 5.1956, "880": 5.28355, "885": 5.26167, "890": 5.20718, "895": 5.21151, "900": 5.22583, "905": 5.22932, "910": 5.17863, "915": 5.21729, "920": 5.19138, "925": 5.19532, "930": 5.1769, "935": 5.14962, "940": 5.15273, "945": 5.18515, "950": 5.14362, "955": 5.0953, "960": 5.12993, "965": 5.08798, "970": 5.06337, "975": 5.11713, "980": 5.09179, "985": 5.0576, "990": 5.0926, "995": 5.08705, "1000": 5.09047, "1005": 5.09717, "1010": 5.09293, "1015": 5.06353, "1020": 5.10023, "1025": 5.04121, "1030": 4.99097, "1035": 5.07056, "1040": 5.02031, "1045": 5.05313, "1050": 5.08063, "1055": 5.03734, "1060": 5.07585, "1065": 4.98428, "1070": 4.98213, "1075": 4.98886, "1080": 4.97007, "1085": 5.01359, "1090": 5.01615, "1095": 5.03221, "1100": 5.01276, "1105": 4.99236, "1110": 4.93523, "1115": 4.93343, "1120": 4.95149, "1125": 4.89528, "1130": 5.04007, "1135": 4.92337, "1140": 4.94918, "1145": 4.91494, "1150": 4.98431, "1155": 4.89853, "1160": 4.89661, "1165": 4.91206, "1170": 4.99027, "1175": 4.95498, "1180": 4.84883, "1185": 4.93082, "1190": 4.8691, "1195": 4.92551, "1200": 5.01559, "1205": 4.96707, "1210": 4.8348, "1215": 4.88511, "1220": 4.89034, "1225": 4.88773, "1230": 4.8983, "1235": 4.88341, "1240": 4.87922, "1245": 4.88684, "1250": 4.8361, "1255": 4.87963, "1260": 4.80114, "1265": 4.86119, "1270": 4.93315, "1275": 4.84811, "1280": 4.81806, "1285": 4.91557, "1290": 4.74067, "1295": 4.83039, "1300": 4.85249, "1305": 4.82903, "1310": 4.92002, "1315": 4.7943, "1320": 4.87947, "1325": 4.7706, "1330": 4.81601, "1335": 4.7955, "1340": 4.81341, "1345": 4.81711, "1350": 4.83017, "1355": 4.79835, "1360": 4.73392, "1365": 4.85097, "1370": 4.8165, "1375": 4.83032, "1380": 4.7517, "1385": 4.73074, "1390": 4.79235, "1395": 4.79321, "1400": 4.66988, "1405": 4.67976, "1410": 4.75429, "1415": 4.79182, "1420": 4.75934, "1425": 4.72246, "1430": 4.75168, "1435": 4.76379, "1440": 4.76568, "1445": 4.74632, "1450": 4.71625, "1455": 4.65119, "1460": 4.7607, "1465": 4.73956, "1470": 4.70042, "1475": 4.70586, "1480": 4.72613, "1485": 4.75771, "1490": 4.75921, "1495": 4.72386, "1500": 4.75735, "1505": 4.68084, "1510": 4.70343, "1515": 4.70365, "1520": 4.76229, "1525": 4.7311, "1530": 4.68608, "1535": 4.6928, "1540": 4.71171, "1545": 4.60306, "1550": 4.63407, "1555": 4.60737, "1560": 4.70591, "1565": 4.62772, "1570": 4.65621, "1575": 4.66604, "1580": 4.65802, "1585": 4.69001, "1590": 4.67112, "1595": 4.7159, "1600": 4.68778, "1605": 4.67026, "1610": 4.60514, "1615": 4.62131, "1620": 4.69944, "1625": 4.66486, "1630": 4.60784, "1635": 4.60222, "1640": 4.68465, "1645": 4.63812, "1650": 4.65452, "1655": 4.61573, "1660": 4.67243, "1665": 4.61582, "1670": 4.69445, "1675": 4.64485, "1680": 4.59013, "1685": 4.63525, "1690": 4.63578, "1695": 4.66283, "1700": 4.64969, "1705": 4.63636, "1710": 4.6302, "1715": 4.6857, "1720": 4.64194, "1725": 4.65472, "1730": 4.62697, "1735": 4.6251, "1740": 4.59849, "1745": 4.62115, "1750": 4.5965, "1755": 4.63379, "1760": 4.5838, "1765": 4.61156, "1770": 4.60723, "1775": 4.58844, "1780": 4.59358, "1785": 4.60291, "1790": 4.50232, "1795": 4.57478, "1800": 4.59978, "1805": 4.56067, "1810": 4.55628, "1815": 4.56943, "1820": 4.58688, "1825": 4.61812, "1830": 4.56932, "1835": 4.55231, "1840": 4.47963, "1845": 4.51888, "1850": 4.62415, "1855": 4.58098, "1860": 4.55842, "1865": 4.54036, "1870": 4.57516, "1875": 4.54143, "1880": 4.5833, "1885": 4.50631, "1890": 4.49975, "1895": 4.62853, "1900": 4.47781, "1905": 4.55674, "1910": 4.58689, "1915": 4.57361, "1920": 4.54996, "1925": 4.52289, "1930": 4.54807, "1935": 4.52189, "1940": 4.50201, "1945": 4.5263, "1950": 4.50474, "1955": 4.5413, "1960": 4.53325, "1965": 4.55845, "1970": 4.51154, "1975": 4.53972, "1980": 4.49917, "1985": 4.56457, "1990": 4.46517, "1995": 4.54992, "2000": 4.54177, "2005": 4.51533, "2010": 4.52153, "2015": 4.54578, "2020": 4.47712, "2025": 4.47891, "2030": 4.54971, "2035": 4.50398, "2040": 4.55782, "2045": 4.43257, "2050": 4.50666, "2055": 4.49631, "2060": 4.47515, "2065": 4.52949, "2070": 4.47426, "2075": 4.54758, "2080": 4.53735, "2085": 4.47421, "2090": 4.45004, "2095": 4.41024, "2100": 4.38698, "2105": 4.42323, "2110": 4.5468, "2115": 4.5387, "2120": 4.54681, "2125": 4.46297, "2130": 4.45683, "2135": 4.48564, "2140": 4.49312, "2145": 4.43131, "2150": 4.46663, "2155": 4.42978, "2160": 4.40231, "2165": 4.53235, "2170": 4.46239, "2175": 4.51472, "2180": 4.48149, "2185": 4.39754, "2190": 4.44351, "2195": 4.47939, "2200": 4.44359, "2205": 4.35953, "2210": 4.45728, "2215": 4.43326, "2220": 4.42536, "2225": 4.41049, "2230": 4.4072, "2235": 4.42078, "2240": 4.40123, "2245": 4.45593, "2250": 4.41692, "2255": 4.48123, "2260": 4.43424, "2265": 4.39803, "2270": 4.37951, "2275": 4.36654, "2280": 4.45661, "2285": 4.42535, "2290": 4.42279, "2295": 4.46375, "2300": 4.44314, "2305": 4.4253, "2310": 4.37838, "2315": 4.39956, "2320": 4.30859, "2325": 4.41178, "2330": 4.4336, "2335": 4.41991, "2340": 4.40128, "2345": 4.42633, "2350": 4.33333, "2355": 4.41732, "2360": 4.44927, "2365": 4.38507, "2370": 4.35955, "2375": 4.44091, "2380": 4.41524, "2385": 4.31984, "2390": 4.37539, "2395": 4.34034, "2400": 4.37871, "2405": 4.40904, "2410": 4.32906, "2415": 4.4696, "2420": 4.41223, "2425": 4.36551, "2430": 4.43321, "2435": 4.32242, "2440": 4.39022, "2445": 4.38085, "2450": 4.37111, "2455": 4.43527, "2460": 4.36811, "2465": 4.39025, "2470": 4.38518, "2475": 4.36735, "2480": 4.42822, "2485": 4.42294, "2490": 4.30997, "2495": 4.32889, "2500": 4.38275, "2505": 4.35045, "2510": 4.31294, "2515": 4.34195, "2520": 4.40083, "2525": 4.29743, "2530": 4.35393, "2535": 4.411, "2540": 4.34479, "2545": 4.31938, "2550": 4.3651, "2555": 4.35978, "2560": 4.37331, "2565": 4.3505, "2570": 4.40727, "2575": 4.3531, "2580": 4.34965, "2585": 4.3483, "2590": 4.37918, "2595": 4.29284, "2600": 4.37153, "2605": 4.41005, "2610": 4.33914, "2615": 4.30025, "2620": 4.36731, "2625": 4.39024, "2630": 4.3334, "2635": 4.37064, "2640": 4.35982, "2645": 4.35308, "2650": 4.33975, "2655": 4.38811, "2660": 4.28717, "2665": 4.2314, "2670": 4.32982, "2675": 4.25209, "2680": 4.29281, "2685": 4.27255, "2690": 4.2898, "2695": 4.23506, "2700": 4.30573, "2705": 4.27107, "2710": 4.27236, "2715": 4.26134, "2720": 4.31644, "2725": 4.29543, "2730": 4.25104, "2735": 4.25677, "2740": 4.36652, "2745": 4.24933, "2750": 4.28773, "2755": 4.38289, "2760": 4.34521, "2765": 4.33845, "2770": 4.30247, "2775": 4.28684, "2780": 4.32327, "2785": 4.24997, "2790": 4.31999, "2795": 4.31879, "2800": 4.3261, "2805": 4.18362, "2810": 4.2413, "2815": 4.26759, "2820": 4.30562, "2825": 4.23553, "2830": 4.29445, "2835": 4.34597, "2840": 4.28125, "2845": 4.27406, "2850": 4.27479, "2855": 4.29609, "2860": 4.33494, "2865": 4.27481, "2870": 4.26574, "2875": 4.25478, "2880": 4.25976, "2885": 4.24211, "2890": 4.22476, "2895": 4.31304, "2900": 4.2372, "2905": 4.29239, "2910": 4.21809, "2915": 4.25855, "2920": 4.24577, "2925": 4.27783, "2930": 4.24629, "2935": 4.28219, "2940": 4.28764, "2945": 4.23954, "2950": 4.19459, "2955": 4.26614, "2960": 4.17207, "2965": 4.25917, "2970": 4.26519, "2975": 4.26429, "2980": 4.21875, "2985": 4.29041, "2990": 4.21088, "2995": 4.30904, "3000": 4.18008, "3005": 4.19456, "3010": 4.27703, "3015": 4.20629, "3020": 4.22336, "3025": 4.20244, "3030": 4.20024, "3035": 4.21957, "3040": 4.22995, "3045": 4.20935, "3050": 4.28007, "3055": 4.22704, "3060": 4.21312, "3065": 4.25691, "3070": 4.20031, "3075": 4.22052, "3080": 4.16913, "3085": 4.21684, "3090": 4.18342, "3095": 4.22938, "3100": 4.2866, "3105": 4.19009, "3110": 4.1995, "3115": 4.18522, "3120": 4.25933, "3125": 4.16719, "3130": 4.19366, "3135": 4.19428, "3140": 4.21306, "3145": 4.19234, "3150": 4.23805, "3155": 4.16479, "3160": 4.23507, "3165": 4.20815, "3170": 4.15227, "3175": 4.25601, "3180": 4.16051, "3185": 4.14206, "3190": 4.19373, "3195": 4.17519, "3200": 4.21916, "3205": 4.17508, "3210": 4.17622, "3215": 4.20876, "3220": 4.16136, "3225": 4.20412, "3230": 4.18525, "3235": 4.20589, "3240": 4.13467, "3245": 4.17448, "3250": 4.24697, "3255": 4.2179, "3260": 4.17552, "3265": 4.09919, "3270": 4.17706, "3275": 4.19882, "3280": 4.19569, "3285": 4.15487, "3290": 4.20047, "3295": 4.2508, "3300": 4.24001, "3305": 4.16673, "3310": 4.21831, "3315": 4.15983, "3320": 4.2077, "3325": 4.21262, "3330": 4.21204, "3335": 4.11599, "3340": 4.20285, "3345": 4.18401, "3350": 4.21235, "3355": 4.14345, "3360": 4.10781, "3365": 4.13952, "3370": 4.13845, "3375": 4.19006, "3380": 4.14362, "3385": 4.13013, "3390": 4.14096, "3395": 4.16026, "3400": 4.12168, "3405": 4.22297, "3410": 4.12653, "3415": 4.18092, "3420": 4.10859, "3425": 4.12167, "3430": 4.14101, "3435": 4.14105, "3440": 4.19374, "3445": 4.18063, "3450": 4.18825, "3455": 4.0978, "3460": 4.1116, "3465": 4.13023, "3470": 4.10904, "3475": 4.14462, "3480": 4.17622, "3485": 4.10885, "3490": 4.2036, "3495": 4.12504, "3500": 4.10687, "3505": 4.07999, "3510": 4.13932, "3515": 4.16504, "3520": 4.10465, "3525": 4.15419, "3530": 4.19639, "3535": 4.18952, "3540": 4.09097, "3545": 4.06298, "3550": 4.12819, "3555": 4.03027, "3560": 4.05385, "3565": 4.13329, "3570": 4.13952, "3575": 4.16902, "3580": 4.09053, "3585": 4.07902, "3590": 4.08606, "3595": 4.19215, "3600": 4.12193, "3605": 4.058, "3610": 4.12137, "3615": 4.13422, "3620": 4.13601, "3625": 4.11688, "3630": 4.01467, "3635": 4.05403, "3640": 4.09183, "3645": 4.12965, "3650": 4.07215, "3655": 4.13075, "3660": 4.03825, "3665": 4.1851, "3670": 4.12589, "3675": 4.12045, "3680": 4.08351, "3685": 4.0723, "3690": 4.02175, "3695": 4.08389, "3700": 4.09413, "3705": 4.06696, "3710": 4.10814, "3715": 4.07865, "3720": 4.01009, "3725": 4.12149, "3730": 4.07993, "3735": 4.08926, "3740": 4.03853, "3745": 4.02222, "3750": 4.03558, "3755": 3.94283, "3760": 4.08225, "3765": 4.05872, "3770": 4.02136, "3775": 4.03998, "3780": 4.07639, "3785": 3.98903, "3790": 4.09952, "3795": 4.10981, "3800": 4.09522, "3805": 4.06065, "3810": 4.07018, "3815": 4.05523, "3820": 4.09748, "3825": 4.01176, "3830": 3.98471, "3835": 4.07555, "3840": 4.02559, "3845": 4.06493, "3850": 4.02226, "3855": 4.00649, "3860": 4.09255, "3865": 4.04792, "3870": 4.12469, "3875": 4.01282, "3880": 4.08618, "3885": 4.02019, "3890": 4.04961, "3895": 4.06743, "3900": 4.05797, "3905": 4.03952, "3910": 4.08976, "3915": 4.01593, "3920": 4.045, "3925": 4.01142, "3930": 4.05464, "3935": 4.02945, "3940": 4.08413, "3945": 4.06772, "3950": 4.02866, "3955": 4.04284, "3960": 3.9987, "3965": 4.04091, "3970": 3.97555, "3975": 4.06161, "3980": 3.97731, "3985": 4.0735, "3990": 4.03991, "3995": 4.09025, "4000": 4.01181, "4005": 4.05571, "4010": 4.01103, "4015": 4.07681, "4020": 4.08229, "4025": 4.02857, "4030": 4.0409, "4035": 4.01203, "4040": 4.02223, "4045": 4.06363, "4050": 4.11416, "4055": 3.99819, "4060": 4.01008, "4065": 4.03779, "4070": 4.02893, "4075": 4.01767, "4080": 3.93547, "4085": 4.00094, "4090": 4.06044, "4095": 3.96883, "4100": 4.01642, "4105": 4.00651, "4110": 3.98443, "4115": 4.02512, "4120": 4.02879, "4125": 3.97846, "4130": 3.99113, "4135": 3.94084, "4140": 4.0332, "4145": 4.00581, "4150": 3.9541, "4155": 4.02133, "4160": 3.93958, "4165": 4.06565, "4170": 4.0315, "4175": 4.04759, "4180": 4.07089, "4185": 3.96936, "4190": 3.97004, "4195": 3.94854, "4200": 4.03374, "4205": 4.01773, "4210": 3.97487, "4215": 4.02645, "4220": 4.03945, "4225": 3.98553, "4230": 4.00267, "4235": 4.02386, "4240": 3.93154, "4245": 3.922, "4250": 3.969, "4255": 3.93054, "4260": 3.95254, "4265": 3.96321, "4270": 3.93407, "4275": 3.93888, "4280": 4.01583, "4285": 3.94557, "4290": 3.9129, "4295": 4.00895, "4300": 3.9107, "4305": 3.97426, "4310": 4.03452, "4315": 3.96913, "4320": 4.02498, "4325": 3.8834, "4330": 3.97545, "4335": 3.93646, "4340": 3.97369, "4345": 3.94569, "4350": 3.97709, "4355": 3.93039, "4360": 3.91948, "4365": 3.92385, "4370": 3.95471, "4375": 3.98175, "4380": 3.96526, "4385": 3.97384, "4390": 3.98534, "4395": 3.99564, "4400": 3.98173, "4405": 3.89058, "4410": 3.94153, "4415": 3.94393, "4420": 3.96072, "4425": 4.00626, "4430": 3.89229, "4435": 3.95437, "4440": 3.91692, "4445": 3.92511, "4450": 3.87321, "4455": 3.96235, "4460": 3.84984, "4465": 3.9506, "4470": 3.93465, "4475": 3.92112, "4480": 3.99442, "4485": 3.87966, "4490": 3.86387, "4495": 3.9056, "4500": 3.92038, "4505": 3.97854, "4510": 4.0114, "4515": 3.93981, "4520": 3.95509, "4525": 3.85741, "4530": 3.97886, "4535": 3.91812, "4540": 3.91861, "4545": 3.87911, "4550": 3.9172, "4555": 3.95671, "4560": 3.92454, "4565": 3.92719, "4570": 3.89842, "4575": 3.888, "4580": 3.87192, "4585": 3.96418, "4590": 3.84095, "4595": 3.94811, "4600": 3.90371, "4605": 3.88768, "4610": 3.84903, "4615": 3.89223, "4620": 3.91581, "4625": 3.91602, "4630": 3.95649, "4635": 3.89698, "4640": 3.96477, "4645": 3.93386, "4650": 3.90963, "4655": 3.93266, "4660": 3.93124, "4665": 3.87802, "4670": 3.90095, "4675": 3.89087, "4680": 3.9627, "4685": 3.87512, "4690": 3.85954, "4695": 3.89851, "4700": 3.9021, "4705": 3.87448, "4710": 3.88852, "4715": 3.90121, "4720": 3.91947, "4725": 3.89393, "4730": 3.90081, "4735": 3.88187, "4740": 3.86122, "4745": 3.87931, "4750": 3.90153, "4755": 3.92984, "4760": 3.86431, "4765": 3.83208, "4770": 3.9062, "4775": 3.91039, "4780": 3.86192, "4785": 3.92096, "4790": 3.91013, "4795": 3.91397, "4800": 3.89527, "4805": 3.87965, "4810": 3.87068, "4815": 3.97355, "4820": 3.8911, "4825": 3.80497, "4830": 3.90444, "4835": 3.87762, "4840": 3.90734, "4845": 3.83122, "4850": 3.90343, "4855": 3.83752, "4860": 3.84421, "4865": 3.89322, "4870": 3.86826, "4875": 3.82127, "4880": 3.89242, "4885": 3.87912, "4890": 3.94594, "4895": 3.92286, "4900": 3.8409, "4905": 3.85458, "4910": 3.85607, "4915": 3.88997, "4920": 3.87315, "4925": 3.82807, "4930": 3.81168, "4935": 3.85486, "4940": 3.82972, "4945": 3.95373, "4950": 3.81951, "4955": 3.8502, "4960": 3.83429, "4965": 3.84149, "4970": 3.87445, "4975": 3.89503, "4980": 3.85506, "4985": 3.81859, "4990": 3.82953, "4995": 3.86747, "5000": 3.83404, "5005": 3.90591, "5010": 3.87554, "5015": 3.8045, "5020": 3.84346, "5025": 3.87707, "5030": 3.78441, "5035": 3.86925, "5040": 3.80256, "5045": 3.84662, "5050": 3.80845, "5055": 3.88214, "5060": 3.88744, "5065": 3.81398, "5070": 3.81822, "5075": 3.79382, "5080": 3.84163, "5085": 3.84376, "5090": 3.87747, "5095": 3.78825, "5100": 3.84455, "5105": 3.8722, "5110": 3.83785, "5115": 3.78854, "5120": 3.71526, "5125": 3.84503, "5130": 3.74454, "5135": 3.81167, "5140": 3.79811, "5145": 3.76187, "5150": 3.79535, "5155": 3.82824, "5160": 3.77995, "5165": 3.84519, "5170": 3.79695, "5175": 3.80875, "5180": 3.79695, "5185": 3.87481, "5190": 3.79992, "5195": 3.79596, "5200": 3.77034, "5205": 3.75795, "5210": 3.74002, "5215": 3.71664, "5220": 3.78978, "5225": 3.81443, "5230": 3.78768, "5235": 3.80113, "5240": 3.77785, "5245": 3.77061, "5250": 3.75713, "5255": 3.86979, "5260": 3.7957, "5265": 3.80408, "5270": 3.80759, "5275": 3.81143, "5280": 3.7536, "5285": 3.8201, "5290": 3.80333, "5295": 3.78949, "5300": 3.78159, "5305": 3.80054, "5310": 3.77504, "5315": 3.73102, "5320": 3.78768, "5325": 3.73621, "5330": 3.7593, "5335": 3.79179, "5340": 3.84902, "5345": 3.77484, "5350": 3.81958, "5355": 3.80826, "5360": 3.83915, "5365": 3.75445, "5370": 3.70426, "5375": 3.79851, "5380": 3.7088, "5385": 3.75542, "5390": 3.73098, "5395": 3.69435, "5400": 3.79824, "5405": 3.79746, "5410": 3.76881, "5415": 3.75937, "5420": 3.82375, "5425": 3.69335, "5430": 3.78944, "5435": 3.77046, "5440": 3.76148, "5445": 3.69243, "5450": 3.69186, "5455": 3.69016, "5460": 3.69969, "5465": 3.71728, "5470": 3.76309, "5475": 3.69306, "5480": 3.81659, "5485": 3.7634, "5490": 3.68584, "5495": 3.72111, "5500": 3.76286, "5505": 3.78389, "5510": 3.76636, "5515": 3.76071, "5520": 3.77272, "5525": 3.74925, "5530": 3.7091, "5535": 3.71308, "5540": 3.69987, "5545": 3.78166, "5550": 3.69851, "5555": 3.79452, "5560": 3.73309, "5565": 3.69702, "5570": 3.8246, "5575": 3.75632, "5580": 3.69312, "5585": 3.70282, "5590": 3.71556, "5595": 3.75172, "5600": 3.71392, "5605": 3.69191, "5610": 3.71155, "5615": 3.65669, "5620": 3.69153, "5625": 3.6906, "5630": 3.70807, "5635": 3.68766, "5640": 3.71076, "5645": 3.71996, "5650": 3.69039, "5655": 3.70972, "5660": 3.71751, "5665": 3.72121, "5670": 3.70234, "5675": 3.75938, "5680": 3.72657, "5685": 3.70066, "5690": 3.66638, "5695": 3.76142, "5700": 3.72955, "5705": 3.67267, "5710": 3.68184, "5715": 3.67582, "5720": 3.69506, "5725": 3.71471, "5730": 3.78665, "5735": 3.6842, "5740": 3.78414, "5745": 3.71754, "5750": 3.6603, "5755": 3.72812, "5760": 3.81327, "5765": 3.6861, "5770": 3.75597, "5775": 3.66853, "5780": 3.65079, "5785": 3.72025, "5790": 3.68816, "5795": 3.70554, "5800": 3.71792, "5805": 3.69868, "5810": 3.6462, "5815": 3.7034, "5820": 3.69838, "5825": 3.63591, "5830": 3.64868, "5835": 3.64386, "5840": 3.63491, "5845": 3.71707, "5850": 3.75531, "5855": 3.60936, "5860": 3.6562, "5865": 3.66447, "5870": 3.72012, "5875": 3.65331, "5880": 3.70789, "5885": 3.63385, "5890": 3.67496, "5895": 3.61135, "5900": 3.6495, "5905": 3.65534, "5910": 3.6624, "5915": 3.61019, "5920": 3.67019, "5925": 3.64382, "5930": 3.62538, "5935": 3.62903, "5940": 3.6257, "5945": 3.67044, "5950": 3.65794, "5955": 3.68069, "5960": 3.64601, "5965": 3.69899, "5970": 3.62528, "5975": 3.62382, "5980": 3.61613, "5985": 3.68838, "5990": 3.69556, "5995": 3.59767, "6000": 3.60896, "6005": 3.61611, "6010": 3.63455, "6015": 3.6521, "6020": 3.6482, "6025": 3.60262, "6030": 3.66602, "6035": 3.66101, "6040": 3.47568, "6045": 3.63257, "6050": 3.56475, "6055": 3.59827, "6060": 3.62622, "6065": 3.68074, "6070": 3.59486, "6075": 3.70323, "6080": 3.59554, "6085": 3.61991, "6090": 3.61384, "6095": 3.68698, "6100": 3.58591, "6105": 3.62663, "6110": 3.56813, "6115": 3.53797, "6120": 3.55373, "6125": 3.65928, "6130": 3.58818, "6135": 3.60926, "6140": 3.55538, "6145": 3.53635, "6150": 3.61321, "6155": 3.57259, "6160": 3.59469, "6165": 3.61925, "6170": 3.62202, "6175": 3.57388, "6180": 3.54729, "6185": 3.64676, "6190": 3.58531, "6195": 3.61708, "6200": 3.55752, "6205": 3.55824, "6210": 3.56463, "6215": 3.65028, "6220": 3.53611, "6225": 3.48928, "6230": 3.53403, "6235": 3.53423, "6240": 3.58051, "6245": 3.57814, "6250": 3.60663, "6255": 3.52494, "6260": 3.56165, "6265": 3.60925, "6270": 3.598, "6275": 3.53428, "6280": 3.51099, "6285": 3.5397, "6290": 3.6286, "6295": 3.53184, "6300": 3.52676, "6305": 3.50438, "6310": 3.54262, "6315": 3.59489, "6320": 3.577, "6325": 3.50506, "6330": 3.55629, "6335": 3.50022, "6340": 3.46929, "6345": 3.49869, "6350": 3.55916, "6355": 3.56412, "6360": 3.51023, "6365": 3.4936, "6370": 3.54155, "6375": 3.53073, "6380": 3.54172, "6385": 3.49703, "6390": 3.53857, "6395": 3.52943, "6400": 3.46274, "6405": 3.51802, "6410": 3.53121, "6415": 3.46225, "6420": 3.44683, "6425": 3.53552, "6430": 3.50138, "6435": 3.53516, "6440": 3.4631, "6445": 3.47563, "6450": 3.49928, "6455": 3.42232, "6460": 3.45431, "6465": 3.44058, "6470": 3.48132, "6475": 3.39809, "6480": 3.45012, "6485": 3.38839, "6490": 3.4369, "6495": 3.44497, "6500": 3.40678, "6505": 3.5308, "6510": 3.4704, "6515": 3.50668, "6520": 3.47508, "6525": 3.41823, "6530": 3.46795, "6535": 3.49508, "6540": 3.42573, "6545": 3.3895, "6550": 3.44644, "6555": 3.42679, "6560": 3.42363, "6565": 3.35205, "6570": 3.36419, "6575": 3.36393, "6580": 3.36837, "6585": 3.46551, "6590": 3.40006, "6595": 3.39215, "6600": 3.41397, "6605": 3.38739, "6610": 3.39244, "6615": 3.39981, "6620": 3.34834, "6625": 3.35998, "6630": 3.33637, "6635": 3.39977, "6640": 3.36407, "6645": 3.39238, "6650": 3.42223, "6655": 3.37077, "6660": 3.44776, "6665": 3.36179, "6670": 3.31336, "6675": 3.46706, "6680": 3.31537, "6685": 3.33056, "6690": 3.36479, "6695": 3.33289, "6700": 3.31656, "6705": 3.33612, "6710": 3.36935, "6715": 3.37069, "6720": 3.36946, "6725": 3.3809, "6730": 3.22258, "6735": 3.36474, "6740": 3.36694, "6745": 3.29436, "6750": 3.3069, "6755": 3.39945, "6760": 3.31303, "6765": 3.35524, "6770": 3.32894, "6775": 3.32914, "6780": 3.3366, "6785": 3.3369, "6790": 3.3365, "6795": 3.31958, "6800": 3.32138, "6805": 3.25213, "6810": 3.30954, "6815": 3.33852, "6820": 3.30007, "6825": 3.35358, "6830": 3.3196, "6835": 3.2932, "6840": 3.28972, "6845": 3.29574, "6850": 3.32277, "6855": 3.29625, "6860": 3.22366, "6865": 3.3187, "6870": 3.29557, "6875": 3.34984, "6880": 3.28303, "6885": 3.19281, "6890": 3.28913, "6895": 3.22296, "6900": 3.23308, "6905": 3.21653, "6910": 3.23489, "6915": 3.26605, "6920": 3.19071, "6925": 3.23954, "6930": 3.31136, "6935": 3.24762, "6940": 3.28962, "6945": 3.25499, "6950": 3.12165, "6955": 3.27241, "6960": 3.27043, "6965": 3.24525, "6970": 3.22909, "6975": 3.32084, "6980": 3.21065, "6985": 3.21397, "6990": 3.26469, "6995": 3.14973, "7000": 3.17663, "7005": 3.19907, "7010": 3.25685, "7015": 3.22291, "7020": 3.18487, "7025": 3.26147, "7030": 3.21543, "7035": 3.20828, "7040": 3.26546, "7045": 3.22205, "7050": 3.142, "7055": 3.2088, "7060": 3.15526, "7065": 3.17748, "7070": 3.27455, "7075": 3.18249, "7080": 3.13819, "7085": 3.20783, "7090": 3.18128, "7095": 3.2175, "7100": 3.18226, "7105": 3.19526, "7110": 3.16427, "7115": 3.19035, "7120": 3.14064, "7125": 3.16949, "7130": 3.1249, "7135": 3.14622, "7140": 3.20493, "7145": 3.20019, "7150": 3.13839, "7155": 3.09461, "7160": 3.13084, "7165": 3.26858, "7170": 3.15428, "7175": 3.23219, "7180": 3.20392, "7185": 3.21559, "7190": 3.20016, "7195": 3.16315, "7200": 3.14838, "7205": 3.20258, "7210": 3.19349, "7215": 3.18135, "7220": 3.15847, "7225": 3.15917, "7230": 3.19442, "7235": 3.09445, "7240": 3.08439, "7245": 3.15108, "7250": 3.13601, "7255": 3.09693, "7260": 3.17421, "7265": 3.14648, "7270": 3.16098, "7275": 3.19358, "7280": 3.1076, "7285": 3.16058, "7290": 3.11892, "7295": 3.06815, "7300": 3.1645, "7305": 3.15766, "7310": 3.11336, "7315": 3.08607, "7320": 3.12687, "7325": 3.13851, "7330": 3.20124, "7335": 3.18177, "7340": 3.07291, "7345": 3.18282, "7350": 3.13413, "7355": 3.10709, "7360": 3.08323, "7365": 3.13319, "7370": 3.07974, "7375": 3.07471, "7380": 3.13594, "7385": 3.17588, "7390": 3.05194, "7395": 3.14427, "7400": 3.10136, "7405": 3.21109, "7410": 3.03545, "7415": 3.13141, "7420": 3.08573, "7425": 3.08339, "7430": 3.04365, "7435": 3.15632, "7440": 3.11621, "7445": 3.12464, "7450": 3.08835, "7455": 3.08945, "7460": 3.10715, "7465": 3.13002, "7470": 3.09087, "7475": 3.02577, "7480": 3.073, "7485": 3.00456, "7490": 3.12703, "7495": 3.12329, "7500": 3.01675, "7505": 3.01166, "7510": 3.14088, "7515": 3.12573, "7520": 3.1161, "7525": 3.09315, "7530": 3.10105, "7535": 3.00905, "7540": 3.07964, "7545": 3.0928, "7550": 3.09865, "7555": 3.02019, "7560": 3.07037, "7565": 3.03114, "7570": 3.09591, "7575": 3.10504, "7580": 3.01367, "7585": 3.0738, "7590": 3.04879, "7595": 3.11093, "7600": 3.06279, "7605": 3.06544, "7610": 3.08591, "7615": 3.02383, "7620": 2.99756, "7625": 3.12574, "7630": 3.01993, "7635": 3.06961, "7640": 3.1353, "7645": 3.0818, "7650": 3.02754, "7655": 3.03171, "7660": 3.00502, "7665": 3.03645, "7670": 2.97774, "7675": 3.02555, "7680": 3.02284, "7685": 3.06744, "7690": 3.04739, "7695": 3.07661, "7700": 3.05901, "7705": 2.9806, "7710": 3.09188, "7715": 3.09841, "7720": 3.02888, "7725": 3.09344, "7730": 3.01224, "7735": 3.02845, "7740": 2.99617, "7745": 3.004, "7750": 3.06364, "7755": 3.05126, "7760": 3.08987, "7765": 2.99844, "7770": 3.02888, "7775": 3.0196, "7780": 3.0416, "7785": 3.0088, "7790": 3.03346, "7795": 2.99813, "7800": 3.02604, "7805": 2.99668, "7810": 3.02835, "7815": 3.05191, "7820": 3.05595, "7825": 3.02326, "7830": 2.99776, "7835": 2.99368, "7840": 2.98538, "7845": 3.08209, "7850": 2.94495, "7855": 2.94901, "7860": 2.97369, "7865": 2.98165, "7870": 2.97229, "7875": 2.93955, "7880": 2.98035, "7885": 3.01243, "7890": 2.96774, "7895": 2.8953, "7900": 3.04293, "7905": 2.92403, "7910": 2.95295, "7915": 3.036, "7920": 2.95807, "7925": 2.98609, "7930": 2.96484, "7935": 3.03053, "7940": 2.95729, "7945": 2.84074, "7950": 2.94956, "7955": 2.92759, "7960": 2.89436, "7965": 2.96532, "7970": 3.00266, "7975": 3.02605, "7980": 2.94817, "7985": 2.98148, "7990": 2.9623, "7995": 2.96842, "8000": 2.89393, "8005": 2.94405, "8010": 2.96615, "8015": 2.95593, "8020": 3.0029, "8025": 2.91447, "8030": 2.97606, "8035": 2.88579, "8040": 2.94055, "8045": 2.98474, "8050": 2.97931, "8055": 2.97969, "8060": 2.9982, "8065": 2.92281, "8070": 2.9475, "8075": 2.95517, "8080": 2.91093, "8085": 2.8896, "8090": 2.96158, "8095": 2.90818, "8100": 2.95325, "8105": 2.95137, "8110": 2.89438, "8115": 2.89066, "8120": 2.92177, "8125": 2.93261, "8130": 2.92208, "8135": 2.9622, "8140": 2.90304, "8145": 2.94931, "8150": 3.01626, "8155": 2.94773, "8160": 2.93434, "8165": 2.93116, "8170": 3.01596, "8175": 2.94694, "8180": 2.97726, "8185": 2.91845, "8190": 2.79476, "8195": 3.00955, "8200": 2.89978, "8205": 2.91349, "8210": 2.94737, "8215": 2.90878, "8220": 2.94695, "8225": 2.9734, "8230": 2.94558, "8235": 2.94599, "8240": 2.91156, "8245": 2.91059, "8250": 2.96726, "8255": 2.94627, "8260": 2.9462, "8265": 2.94195, "8270": 2.89668, "8275": 2.9709, "8280": 2.92438, "8285": 2.97717, "8290": 2.914, "8295": 2.88902, "8300": 3.0351, "8305": 2.87934, "8310": 2.9131, "8315": 2.92613, "8320": 2.92044, "8325": 2.90135, "8330": 2.88937, "8335": 2.85592, "8340": 2.86102, "8345": 2.90023, "8350": 2.90674, "8355": 2.96977, "8360": 2.92617, "8365": 2.93201, "8370": 2.97569, "8375": 2.86907, "8380": 2.93989, "8385": 2.85784, "8390": 2.89699, "8395": 2.88036, "8400": 2.85101, "8405": 2.85346, "8410": 2.87009, "8415": 2.93102, "8420": 2.87894, "8425": 2.91897, "8430": 2.91978, "8435": 2.92429, "8440": 2.9377, "8445": 2.85226, "8450": 2.87375, "8455": 2.89863, "8460": 2.8857, "8465": 2.88248, "8470": 2.86924, "8475": 2.86668, "8480": 2.96148, "8485": 2.88104, "8490": 2.92409, "8495": 2.76129, "8500": 2.92398, "8505": 2.89326, "8510": 2.86913, "8515": 2.9104, "8520": 2.88558, "8525": 2.94113, "8530": 2.86355, "8535": 2.9124, "8540": 2.94139, "8545": 2.92773, "8550": 2.88733, "8555": 2.84153, "8560": 2.85954, "8565": 2.93384, "8570": 2.91753, "8575": 2.89966, "8580": 2.94365, "8585": 2.87283, "8590": 2.89711, "8595": 2.82735, "8600": 2.89952, "8605": 2.91551, "8610": 2.91078, "8615": 2.86335, "8620": 2.95351, "8625": 2.89835, "8630": 2.88762, "8635": 2.95027, "8640": 2.91245, "8645": 2.95863, "8650": 2.81326, "8655": 2.874, "8660": 2.92784, "8665": 2.90458, "8670": 2.83841, "8675": 2.83933, "8680": 2.83554, "8685": 2.89676, "8690": 2.84922, "8695": 2.86556, "8700": 2.91489, "8705": 2.81684, "8710": 2.95349, "8715": 2.86611, "8720": 2.88513, "8725": 2.82012, "8730": 2.90198, "8735": 2.84897, "8740": 2.87846, "8745": 2.82105, "8750": 2.78836, "8755": 2.8553, "8760": 2.86356, "8765": 2.87947, "8770": 2.81841, "8775": 2.84673, "8780": 2.89725, "8785": 2.92615, "8790": 2.89377, "8795": 2.78413, "8800": 2.79085, "8805": 2.82742, "8810": 2.9382, "8815": 2.80301, "8820": 2.80598, "8825": 2.85682, "8830": 2.87629, "8835": 2.90192, "8840": 2.85667, "8845": 2.90635, "8850": 2.84906, "8855": 2.83664, "8860": 2.83101, "8865": 2.78842, "8870": 2.8764, "8875": 2.87252, "8880": 2.82095, "8885": 2.80631, "8890": 2.84771, "8895": 2.81235, "8900": 2.84013, "8905": 2.85022, "8910": 2.77218, "8915": 2.85518, "8920": 2.81193, "8925": 2.81216, "8930": 2.88001, "8935": 2.82626, "8940": 2.85284, "8945": 2.87665, "8950": 2.83711, "8955": 2.85929, "8960": 2.79253, "8965": 2.80654, "8970": 2.8509, "8975": 2.84428, "8980": 2.81918, "8985": 2.83963, "8990": 2.76411, "8995": 2.83217, "9000": 2.7463, "9005": 2.82863, "9010": 2.84658, "9015": 2.84915, "9020": 2.8088, "9025": 2.77401, "9030": 2.90375, "9035": 2.86403, "9040": 2.79799, "9045": 2.89197, "9050": 2.7831, "9055": 2.86183, "9060": 2.8589, "9065": 2.79226, "9070": 2.82465, "9075": 2.84531, "9080": 2.92434, "9085": 2.77482, "9090": 2.8391, "9095": 2.82507, "9100": 2.77862, "9105": 2.82377, "9110": 2.7664, "9115": 2.80002, "9120": 2.82413, "9125": 2.84136, "9130": 2.80925, "9135": 2.83212, "9140": 2.78104, "9145": 2.81776, "9150": 2.76803, "9155": 2.77926, "9160": 2.82266, "9165": 2.80571, "9170": 2.87365, "9175": 2.78452, "9180": 2.83351, "9185": 2.78384, "9190": 2.78742, "9195": 2.77598, "9200": 2.82271, "9205": 2.81416, "9210": 2.80208, "9215": 2.86243, "9220": 2.84711, "9225": 2.83138, "9230": 2.78492, "9235": 2.87619, "9240": 2.81501, "9245": 2.87022, "9250": 2.79228, "9255": 2.84939, "9260": 2.7522, "9265": 2.71401, "9270": 2.80706, "9275": 2.8689, "9280": 2.80055, "9285": 2.80926, "9290": 2.76249, "9295": 2.81518, "9300": 2.83873, "9305": 2.87149, "9310": 2.83987, "9315": 2.79426, "9320": 2.79071, "9325": 2.88631, "9330": 2.80984, "9335": 2.84345, "9340": 2.82329, "9345": 2.8402, "9350": 2.79124, "9355": 2.87883, "9360": 2.7726, "9365": 2.78938, "9370": 2.78673, "9375": 2.8062, "9380": 2.78217, "9385": 2.78891, "9390": 2.80223, "9395": 2.78754, "9400": 2.81436, "9405": 2.7545, "9410": 2.8133, "9415": 2.77415, "9420": 2.80191, "9425": 2.8093, "9430": 2.79155, "9435": 2.77901, "9440": 2.7701, "9445": 2.73619, "9450": 2.76251, "9455": 2.82279, "9460": 2.73118, "9465": 2.79555, "9470": 2.76407, "9475": 2.76184, "9480": 2.70193, "9485": 2.78398, "9490": 2.76436, "9495": 2.82638, "9500": 2.75453, "9505": 2.80467, "9510": 2.77092, "9515": 2.7786, "9520": 2.75356, "9525": 2.82011, "9530": 2.77694, "9535": 2.75459, "9540": 2.73281, "9545": 2.77972, "9550": 2.86291, "9555": 2.82095, "9560": 2.80856, "9565": 2.88362, "9570": 2.80712, "9575": 2.78326, "9580": 2.78047, "9585": 2.72619, "9590": 2.69193, "9595": 2.73583, "9600": 2.76813, "9605": 2.7798, "9610": 2.83223, "9615": 2.768, "9620": 2.79366, "9625": 2.72885, "9630": 2.773, "9635": 2.80744, "9640": 2.82581, "9645": 2.82705, "9650": 2.75258, "9655": 2.6806, "9660": 2.85576, "9665": 2.78163, "9670": 2.82587, "9675": 2.81513, "9680": 2.74325, "9685": 2.7037, "9690": 2.74069, "9695": 2.8322, "9700": 2.77467, "9705": 2.83876, "9710": 2.79818, "9715": 2.74713, "9720": 2.73414, "9725": 2.76982, "9730": 2.8501, "9735": 2.75652, "9740": 2.7295, "9745": 2.76552, "9750": 2.80019, "9755": 2.79729, "9760": 2.71774, "9765": 2.83475, "9770": 2.80526, "9775": 2.7775, "9780": 2.79301, "9785": 2.75223, "9790": 2.69503, "9795": 2.69784, "9800": 2.77287, "9805": 2.76541, "9810": 2.78533, "9815": 2.7175, "9820": 2.72699, "9825": 2.77192, "9830": 2.82443, "9835": 2.73302, "9840": 2.72984, "9845": 2.78363, "9850": 2.71178, "9855": 2.7399, "9860": 2.83801, "9865": 2.72923, "9870": 2.73704, "9875": 2.7537, "9880": 2.75921, "9885": 2.73772, "9890": 2.7506, "9895": 2.74714, "9900": 2.76601, "9905": 2.70618, "9910": 2.80274, "9915": 2.68632, "9920": 2.78428, "9925": 2.73624, "9930": 2.74311, "9935": 2.76298, "9940": 2.79304, "9945": 2.7021, "9950": 2.83749, "9955": 2.70093, "9960": 2.81121, "9965": 2.71877, "9970": 2.71771, "9975": 2.78223, "9980": 2.73528, "9985": 2.67508, "9990": 2.72296, "9995": 2.76189, "10000": 2.7447, "10005": 2.77881, "10010": 2.74374, "10015": 2.66397, "10020": 2.68462, "10025": 2.72625, "10030": 2.75771, "10035": 2.81248, "10040": 2.66359, "10045": 2.80113, "10050": 2.74444, "10055": 2.7172, "10060": 2.80351, "10065": 2.72883, "10070": 2.77164, "10075": 2.72174, "10080": 2.73108, "10085": 2.74113, "10090": 2.7125, "10095": 2.77022, "10100": 2.7377, "10105": 2.78044, "10110": 2.67646, "10115": 2.79321, "10120": 2.76431, "10125": 2.65518, "10130": 2.70315, "10135": 2.67467, "10140": 2.75157, "10145": 2.76704, "10150": 2.7002, "10155": 2.72323, "10160": 2.78814, "10165": 2.79215, "10170": 2.72118, "10175": 2.79913, "10180": 2.72086, "10185": 2.71311, "10190": 2.74494, "10195": 2.75115, "10200": 2.66138, "10205": 2.70477, "10210": 2.70592, "10215": 2.72447, "10220": 2.70927, "10225": 2.65624, "10230": 2.70644, "10235": 2.74026, "10240": 2.72318, "10245": 2.7129, "10250": 2.74091, "10255": 2.73449, "10260": 2.80881, "10265": 2.7004, "10270": 2.70329, "10275": 2.76398, "10280": 2.73464, "10285": 2.70732, "10290": 2.72985, "10295": 2.77497, "10300": 2.75274, "10305": 2.73391, "10310": 2.7267, "10315": 2.71352, "10320": 2.77661, "10325": 2.79815, "10330": 2.71252, "10335": 2.67451, "10340": 2.76059, "10345": 2.77396, "10350": 2.74602, "10355": 2.75209, "10360": 2.76615, "10365": 2.7417, "10370": 2.74554, "10375": 2.63618, "10380": 2.75391, "10385": 2.76746, "10390": 2.78191, "10395": 2.76619, "10400": 2.69427, "10405": 2.68998, "10410": 2.71723, "10415": 2.74149, "10420": 2.61014, "10425": 2.70085, "10430": 2.73729, "10435": 2.65139, "10440": 2.74514, "10445": 2.72415, "10450": 2.68966, "10455": 2.71475, "10460": 2.75992, "10465": 2.70615, "10470": 2.72687, "10475": 2.74159, "10480": 2.63564, "10485": 2.7295, "10490": 2.67461, "10495": 2.73261, "10500": 2.72894, "10505": 2.67537, "10510": 2.69891, "10515": 2.77786, "10520": 2.69533, "10525": 2.74924, "10530": 2.70014, "10535": 2.76718, "10540": 2.6846, "10545": 2.79021, "10550": 2.68329, "10555": 2.69691, "10560": 2.7298, "10565": 2.76427, "10570": 2.69079, "10575": 2.69247, "10580": 2.75376, "10585": 2.6204, "10590": 2.67886, "10595": 2.5468, "10600": 2.701, "10605": 2.73919, "10610": 2.71766, "10615": 2.61005, "10620": 2.62397, "10625": 2.60887, "10630": 2.75339, "10635": 2.64593, "10640": 2.74193, "10645": 2.69595, "10650": 2.7401, "10655": 2.69439, "10660": 2.66825, "10665": 2.62804, "10670": 2.71885, "10675": 2.57289, "10680": 2.68511, "10685": 2.612, "10690": 2.67576, "10695": 2.69917, "10700": 2.65748, "10705": 2.66061, "10710": 2.74462, "10715": 2.61889, "10720": 2.74822, "10725": 2.70981, "10730": 2.6539, "10735": 2.73256, "10740": 2.64503, "10745": 2.69813, "10750": 2.71945, "10755": 2.70818, "10760": 2.70891, "10765": 2.69481, "10770": 2.7402, "10775": 2.70876, "10780": 2.66733, "10785": 2.68837, "10790": 2.65408, "10795": 2.70909, "10800": 2.6639, "10805": 2.73876, "10810": 2.74522, "10815": 2.66658, "10820": 2.72376, "10825": 2.70219, "10830": 2.69441, "10835": 2.69016, "10840": 2.72242, "10845": 2.73645, "10850": 2.6451, "10855": 2.64098, "10860": 2.74048, "10865": 2.59549, "10870": 2.71264, "10875": 2.69743, "10880": 2.6686, "10885": 2.68224, "10890": 2.61159, "10895": 2.62831, "10900": 2.68467, "10905": 2.69785, "10910": 2.67274, "10915": 2.70356, "10920": 2.62723, "10925": 2.70697, "10930": 2.64472, "10935": 2.65275, "10940": 2.65109, "10945": 2.64533, "10950": 2.64837, "10955": 2.67932, "10960": 2.69146, "10965": 2.54395, "10970": 2.70796, "10975": 2.673, "10980": 2.64994, "10985": 2.63302, "10990": 2.66115, "10995": 2.68359, "11000": 2.65872, "11005": 2.71429, "11010": 2.66075, "11015": 2.75056, "11020": 2.67582, "11025": 2.69905, "11030": 2.61739, "11035": 2.62072, "11040": 2.69487, "11045": 2.64755, "11050": 2.65585, "11055": 2.69142, "11060": 2.64607, "11065": 2.60015, "11070": 2.64261, "11075": 2.58536, "11080": 2.72509, "11085": 2.67039, "11090": 2.59837, "11095": 2.602, "11100": 2.62958, "11105": 2.68679, "11110": 2.67522, "11115": 2.65571, "11120": 2.60564, "11125": 2.5981, "11130": 2.68835, "11135": 2.68739, "11140": 2.69937, "11145": 2.62538, "11150": 2.65013, "11155": 2.57476, "11160": 2.66765, "11165": 2.69646, "11170": 2.70504, "11175": 2.63075, "11180": 2.63106, "11185": 2.6519, "11190": 2.62468, "11195": 2.58526, "11200": 2.63842, "11205": 2.65048, "11210": 2.68418, "11215": 2.64019, "11220": 2.63326, "11225": 2.62529, "11230": 2.64631, "11235": 2.5957, "11240": 2.65223, "11245": 2.73654, "11250": 2.7182, "11255": 2.70826, "11260": 2.74463, "11265": 2.62845, "11270": 2.69719, "11275": 2.69174, "11280": 2.62275, "11285": 2.61852, "11290": 2.7153, "11295": 2.6428, "11300": 2.65235, "11305": 2.57565, "11310": 2.62152, "11315": 2.61922, "11320": 2.67885, "11325": 2.59314, "11330": 2.60855, "11335": 2.64784, "11340": 2.6076, "11345": 2.66666, "11350": 2.69125, "11355": 2.65376, "11360": 2.68645, "11365": 2.64417, "11370": 2.59467, "11375": 2.65154, "11380": 2.57852, "11385": 2.6848, "11390": 2.68807, "11395": 2.59262, "11400": 2.64618, "11405": 2.63713, "11410": 2.58437, "11415": 2.55213, "11420": 2.6711, "11425": 2.63893, "11430": 2.65244, "11435": 2.59648, "11440": 2.66218, "11445": 2.64118, "11450": 2.68381, "11455": 2.5951, "11460": 2.57181, "11465": 2.59042, "11470": 2.64949, "11475": 2.58662, "11480": 2.69351, "11485": 2.70411, "11490": 2.67277, "11495": 2.63684, "11500": 2.60679, "11505": 2.56846, "11510": 2.6258, "11515": 2.63466, "11520": 2.6116, "11525": 2.73198, "11530": 2.59406, "11535": 2.63967, "11540": 2.64228, "11545": 2.67348, "11550": 2.7039, "11555": 2.62442, "11560": 2.66374, "11565": 2.70635, "11570": 2.70501, "11575": 2.66028, "11580": 2.64982, "11585": 2.65374, "11590": 2.67254, "11595": 2.61225, "11600": 2.62094, "11605": 2.647, "11610": 2.59223, "11615": 2.6451, "11620": 2.63076, "11625": 2.61345, "11630": 2.69956, "11635": 2.64585, "11640": 2.67213, "11645": 2.71998, "11650": 2.6355, "11655": 2.59392, "11660": 2.62521, "11665": 2.56783, "11670": 2.65824, "11675": 2.66512, "11680": 2.61505, "11685": 2.63378, "11690": 2.61908, "11695": 2.63876, "11700": 2.59129, "11705": 2.61536, "11710": 2.61385, "11715": 2.61076, "11720": 2.58631, "11725": 2.64108, "11730": 2.64055, "11735": 2.60718, "11740": 2.59266, "11745": 2.66402, "11750": 2.62816, "11755": 2.63107, "11760": 2.60296, "11765": 2.60694, "11770": 2.61028, "11775": 2.57893, "11780": 2.56408, "11785": 2.72733, "11790": 2.64513, "11795": 2.59508, "11800": 2.6543, "11805": 2.64741, "11810": 2.57456, "11815": 2.70003, "11820": 2.67513, "11825": 2.61035, "11830": 2.60388, "11835": 2.61327, "11840": 2.62751, "11845": 2.62972, "11850": 2.56125, "11855": 2.59314, "11860": 2.65468, "11865": 2.69589, "11870": 2.57871, "11875": 2.5618, "11880": 2.59792, "11885": 2.59764, "11890": 2.59693, "11895": 2.5898, "11900": 2.63582, "11905": 2.56745, "11910": 2.60117, "11915": 2.64201, "11920": 2.56091, "11925": 2.59086, "11930": 2.60602, "11935": 2.63177, "11940": 2.60819, "11945": 2.541, "11950": 2.64343, "11955": 2.58113, "11960": 2.64468, "11965": 2.65319, "11970": 2.65006, "11975": 2.63953, "11980": 2.65029, "11985": 2.63531, "11990": 2.58336, "11995": 2.64439, "12000": 2.57576, "12005": 2.58782, "12010": 2.6297, "12015": 2.5584, "12020": 2.63524, "12025": 2.6512, "12030": 2.57957, "12035": 2.53457, "12040": 2.611, "12045": 2.60552, "12050": 2.58263, "12055": 2.56242, "12060": 2.59756, "12065": 2.70071, "12070": 2.57709, "12075": 2.66385, "12080": 2.60623, "12085": 2.61604, "12090": 2.63052, "12095": 2.60408, "12100": 2.58852, "12105": 2.61722, "12110": 2.60125, "12115": 2.67858, "12120": 2.55867, "12125": 2.55673, "12130": 2.5091, "12135": 2.53831, "12140": 2.6373, "12145": 2.66565, "12150": 2.63282, "12155": 2.60409, "12160": 2.61097, "12165": 2.6362, "12170": 2.59118, "12175": 2.61322, "12180": 2.58165, "12185": 2.59047, "12190": 2.54227, "12195": 2.54544, "12200": 2.6521, "12205": 2.62092, "12210": 2.64325, "12215": 2.58849, "12220": 2.69811, "12225": 2.526, "12230": 2.57616, "12235": 2.60367, "12240": 2.63673, "12245": 2.57587, "12250": 2.5384, "12255": 2.54411, "12260": 2.5728, "12265": 2.62769, "12270": 2.58044, "12275": 2.60906, "12280": 2.56441, "12285": 2.67757, "12290": 2.6276, "12295": 2.59882, "12300": 2.60496, "12305": 2.63981, "12310": 2.57771, "12315": 2.59676, "12320": 2.59458, "12325": 2.62246, "12330": 2.59977, "12335": 2.52291, "12340": 2.59901, "12345": 2.57496, "12350": 2.52344, "12355": 2.55843, "12360": 2.59088, "12365": 2.58816, "12370": 2.57545, "12375": 2.61219, "12380": 2.56987, "12385": 2.61144, "12390": 2.59368, "12395": 2.55578, "12400": 2.5103, "12405": 2.64192, "12410": 2.56182, "12415": 2.61815, "12420": 2.54987, "12425": 2.608, "12430": 2.65452, "12435": 2.60495, "12440": 2.55635, "12445": 2.58922, "12450": 2.55659, "12455": 2.57258, "12460": 2.53523, "12465": 2.47439, "12470": 2.66851, "12475": 2.58497, "12480": 2.54961, "12485": 2.62047, "12490": 2.57697, "12495": 2.57419, "12500": 2.6377, "12505": 2.53441, "12510": 2.62601, "12515": 2.6129, "12520": 2.57952, "12525": 2.51368, "12530": 2.60273, "12535": 2.61366, "12540": 2.55255, "12545": 2.61125, "12550": 2.55337, "12555": 2.56197, "12560": 2.60664, "12565": 2.65985, "12570": 2.60828, "12575": 2.54606, "12580": 2.60436, "12585": 2.58903, "12590": 2.59604, "12595": 2.55105, "12600": 2.5895, "12605": 2.60197, "12610": 2.54577, "12615": 2.56467, "12620": 2.55195, "12625": 2.59838, "12630": 2.51601, "12635": 2.66523, "12640": 2.59184, "12645": 2.51311, "12650": 2.48306, "12655": 2.52923, "12660": 2.59877, "12665": 2.54864, "12670": 2.57158, "12675": 2.67967, "12680": 2.54909, "12685": 2.5863, "12690": 2.5889, "12695": 2.54732, "12700": 2.60474, "12705": 2.53783, "12710": 2.57943, "12715": 2.62677, "12720": 2.53309, "12725": 2.59843, "12730": 2.58524, "12735": 2.55619, "12740": 2.59233, "12745": 2.48574, "12750": 2.60831, "12755": 2.59579, "12760": 2.61886, "12765": 2.60924, "12770": 2.49918, "12775": 2.6205, "12780": 2.65688, "12785": 2.60463, "12790": 2.60135, "12795": 2.59711, "12800": 2.61452, "12805": 2.571, "12810": 2.52824, "12815": 2.57265, "12820": 2.55741, "12825": 2.538, "12830": 2.61966, "12835": 2.51723, "12840": 2.56414, "12845": 2.62165, "12850": 2.59307, "12855": 2.57062, "12860": 2.57248, "12865": 2.46822, "12870": 2.56331, "12875": 2.50553, "12880": 2.59289, "12885": 2.55753, "12890": 2.54417, "12895": 2.56557, "12900": 2.55088, "12905": 2.59206, "12910": 2.50824, "12915": 2.5403, "12920": 2.58572, "12925": 2.54047, "12930": 2.6013, "12935": 2.50517, "12940": 2.50146, "12945": 2.56964, "12950": 2.49656, "12955": 2.57587, "12960": 2.50898, "12965": 2.58573, "12970": 2.61701, "12975": 2.53017, "12980": 2.57715, "12985": 2.52925, "12990": 2.55742, "12995": 2.54045, "13000": 2.52322, "13005": 2.53601, "13010": 2.51222, "13015": 2.56146, "13020": 2.55231, "13025": 2.54197, "13030": 2.5437, "13035": 2.53239, "13040": 2.57916, "13045": 2.5404, "13050": 2.59386, "13055": 2.54526, "13060": 2.49262, "13065": 2.54225, "13070": 2.54122, "13075": 2.57018, "13080": 2.59892, "13085": 2.63715, "13090": 2.56719, "13095": 2.53221, "13100": 2.57402, "13105": 2.61345, "13110": 2.54729, "13115": 2.56317, "13120": 2.54189, "13125": 2.699, "13130": 2.59573, "13135": 2.61085, "13140": 2.58636, "13145": 2.51293, "13150": 2.62933, "13155": 2.54541, "13160": 2.55121, "13165": 2.58421, "13170": 2.4906, "13175": 2.58938, "13180": 2.62822, "13185": 2.53121, "13190": 2.49384, "13195": 2.50654, "13200": 2.60902, "13205": 2.50734, "13210": 2.58035, "13215": 2.5514, "13220": 2.59554, "13225": 2.562, "13230": 2.52552, "13235": 2.61615, "13240": 2.59881, "13245": 2.58493, "13250": 2.50814, "13255": 2.58973, "13260": 2.57241, "13265": 2.52743, "13270": 2.52206, "13275": 2.57315, "13280": 2.53454, "13285": 2.57142, "13290": 2.60669, "13295": 2.54245, "13300": 2.54347, "13305": 2.5909, "13310": 2.61774, "13315": 2.59553, "13320": 2.55323, "13325": 2.5443, "13330": 2.60398, "13335": 2.53755, "13340": 2.55245, "13345": 2.59288, "13350": 2.52325, "13355": 2.54872, "13360": 2.54461, "13365": 2.54503, "13370": 2.62513, "13375": 2.58516, "13380": 2.54823, "13385": 2.54513, "13390": 2.44699, "13395": 2.51205, "13400": 2.52558, "13405": 2.56221, "13410": 2.53725, "13415": 2.55205, "13420": 2.562, "13425": 2.62223, "13430": 2.52536, "13435": 2.52749, "13440": 2.54727, "13445": 2.49086, "13450": 2.51784, "13455": 2.55874, "13460": 2.5613, "13465": 2.58778, "13470": 2.49988, "13475": 2.55869, "13480": 2.59984, "13485": 2.50216, "13490": 2.57837, "13495": 2.57699, "13500": 2.5287, "13505": 2.59096, "13510": 2.57386, "13515": 2.58307, "13520": 2.54106, "13525": 2.49605, "13530": 2.61432, "13535": 2.4985, "13540": 2.53297, "13545": 2.53055, "13550": 2.65636, "13555": 2.5832, "13560": 2.5197, "13565": 2.55683, "13570": 2.66044, "13575": 2.56051, "13580": 2.54437, "13585": 2.49324, "13590": 2.5228, "13595": 2.54971, "13600": 2.51227, "13605": 2.60317, "13610": 2.50612, "13615": 2.5104, "13620": 2.58579, "13625": 2.55174, "13630": 2.53766, "13635": 2.48991, "13640": 2.632, "13645": 2.52983, "13650": 2.59105, "13655": 2.43202, "13660": 2.52333, "13665": 2.51747, "13670": 2.53197, "13675": 2.60025, "13680": 2.48154, "13685": 2.528, "13690": 2.56725, "13695": 2.49368, "13700": 2.52905, "13705": 2.56745, "13710": 2.53329, "13715": 2.51303, "13720": 2.52534, "13725": 2.53453, "13730": 2.54418, "13735": 2.51102, "13740": 2.57012, "13745": 2.51563, "13750": 2.47746, "13755": 2.51813, "13760": 2.60321, "13765": 2.50118, "13770": 2.52534, "13775": 2.54845, "13780": 2.53061, "13785": 2.56148, "13790": 2.54326, "13795": 2.51878, "13800": 2.47401, "13805": 2.52107, "13810": 2.5591, "13815": 2.55934, "13820": 2.54555, "13825": 2.56582, "13830": 2.51934, "13835": 2.54547, "13840": 2.5452, "13845": 2.59894, "13850": 2.47864, "13855": 2.5137, "13860": 2.59469, "13865": 2.41662, "13870": 2.62394, "13875": 2.5135, "13880": 2.4695, "13885": 2.54232, "13890": 2.56567, "13895": 2.53027, "13900": 2.49403, "13905": 2.54582, "13910": 2.60988, "13915": 2.49093, "13920": 2.50794, "13925": 2.62588, "13930": 2.4636, "13935": 2.4699, "13940": 2.58441, "13945": 2.51917, "13950": 2.53554, "13955": 2.48181, "13960": 2.56566, "13965": 2.51187, "13970": 2.54201, "13975": 2.52338, "13980": 2.53772, "13985": 2.52955, "13990": 2.53168, "13995": 2.50522, "14000": 2.51104, "14005": 2.52455, "14010": 2.48184, "14015": 2.47053, "14020": 2.5179, "14025": 2.4266, "14030": 2.53427, "14035": 2.50823, "14040": 2.54168, "14045": 2.5521, "14050": 2.53398, "14055": 2.53883, "14060": 2.52916, "14065": 2.46465, "14070": 2.44875, "14075": 2.52525, "14080": 2.44026, "14085": 2.50318, "14090": 2.5753, "14095": 2.41668, "14100": 2.45227, "14105": 2.51159, "14110": 2.46489, "14115": 2.50366, "14120": 2.54989, "14125": 2.52468, "14130": 2.57263, "14135": 2.50908, "14140": 2.48334, "14145": 2.50174, "14150": 2.54349, "14155": 2.49711, "14160": 2.4836, "14165": 2.50895, "14170": 2.49169, "14175": 2.48687, "14180": 2.47343, "14185": 2.50976, "14190": 2.41288, "14195": 2.55204, "14200": 2.49097, "14205": 2.53801, "14210": 2.52033, "14215": 2.45486, "14220": 2.48186, "14225": 2.48377, "14230": 2.52633, "14235": 2.52603, "14240": 2.52853, "14245": 2.49671, "14250": 2.47619, "14255": 2.52493, "14260": 2.53362, "14265": 2.52946, "14270": 2.4961, "14275": 2.51382, "14280": 2.50965, "14285": 2.44887, "14290": 2.53268, "14295": 2.60711, "14300": 2.51571, "14305": 2.53915, "14310": 2.41487, "14315": 2.50052, "14320": 2.52806, "14325": 2.52387, "14330": 2.47011, "14335": 2.46812, "14340": 2.55551, "14345": 2.55918, "14350": 2.45054, "14355": 2.47181, "14360": 2.5037, "14365": 2.44398, "14370": 2.50757, "14375": 2.52438, "14380": 2.45046, "14385": 2.45065, "14390": 2.59309, "14395": 2.44712, "14400": 2.50906, "14405": 2.48271, "14410": 2.51606, "14415": 2.44179, "14420": 2.52134, "14425": 2.45196, "14430": 2.46524, "14435": 2.48907, "14440": 2.36172, "14445": 2.52586, "14450": 2.53312, "14455": 2.49321, "14460": 2.58525, "14465": 2.45257, "14470": 2.49559, "14475": 2.44926, "14480": 2.48274, "14485": 2.46006, "14490": 2.592, "14495": 2.56594, "14500": 2.45005, "14505": 2.59848, "14510": 2.45232, "14515": 2.4288, "14520": 2.48807, "14525": 2.53623, "14530": 2.46489, "14535": 2.48899, "14540": 2.54966, "14545": 2.50061, "14550": 2.55202, "14555": 2.53276, "14560": 2.50494, "14565": 2.51052, "14570": 2.43319, "14575": 2.46869, "14580": 2.48666, "14585": 2.48989, "14590": 2.53792, "14595": 2.52392, "14600": 2.55327, "14605": 2.45185, "14610": 2.53138, "14615": 2.53318, "14620": 2.49489, "14625": 2.47293, "14630": 2.44816, "14635": 2.5503, "14640": 2.47416, "14645": 2.58515, "14650": 2.4876, "14655": 2.50576, "14660": 2.47448, "14665": 2.52403, "14670": 2.55719, "14675": 2.53447, "14680": 2.48605, "14685": 2.50955, "14690": 2.55136, "14695": 2.5041, "14700": 2.52088, "14705": 2.54897, "14710": 2.48982, "14715": 2.48152, "14720": 2.44963, "14725": 2.42916, "14730": 2.48807, "14735": 2.47838, "14740": 2.51626, "14745": 2.48963, "14750": 2.46864, "14755": 2.5408, "14760": 2.46872, "14765": 2.46736, "14770": 2.46947, "14775": 2.45722, "14780": 2.51888, "14785": 2.49335, "14790": 2.5741, "14795": 2.50658, "14800": 2.49788, "14805": 2.49273, "14810": 2.52753, "14815": 2.49421, "14820": 2.48681, "14825": 2.55954, "14830": 2.60443, "14835": 2.53389, "14840": 2.48199, "14845": 2.48631, "14850": 2.53218, "14855": 2.53225, "14860": 2.48351, "14865": 2.52117, "14870": 2.47857, "14875": 2.44996, "14880": 2.50114, "14885": 2.49188, "14890": 2.40894, "14895": 2.48489, "14900": 2.51425, "14905": 2.46156, "14910": 2.51104, "14915": 2.50623, "14920": 2.55032, "14925": 2.48675, "14930": 2.5051, "14935": 2.46492, "14940": 2.53953, "14945": 2.46357, "14950": 2.49396, "14955": 2.56553, "14960": 2.49439, "14965": 2.4564, "14970": 2.55711, "14975": 2.54956, "14980": 2.48837, "14985": 2.43568, "14990": 2.50406, "14995": 2.48596, "15000": 2.52223, "15005": 2.5289, "15010": 2.49366, "15015": 2.43531, "15020": 2.47563, "15025": 2.49245, "15030": 2.50604, "15035": 2.5023, "15040": 2.48867, "15045": 2.48476, "15050": 2.43987, "15055": 2.52442, "15060": 2.46274, "15065": 2.50784, "15070": 2.51757, "15075": 2.52026, "15080": 2.43337, "15085": 2.48157, "15090": 2.51417, "15095": 2.51821, "15100": 2.49411, "15105": 2.55008, "15110": 2.45471, "15115": 2.47109, "15120": 2.5217, "15125": 2.54669, "15130": 2.59304, "15135": 2.4875, "15140": 2.4661, "15145": 2.5084, "15150": 2.42837, "15155": 2.48979, "15160": 2.47534, "15165": 2.48801, "15170": 2.53482, "15175": 2.44809, "15180": 2.44984, "15185": 2.50239, "15190": 2.51597, "15195": 2.47977, "15200": 2.47386, "15205": 2.51591, "15210": 2.47469, "15215": 2.46662, "15220": 2.42957, "15225": 2.43971, "15230": 2.436, "15235": 2.49004, "15240": 2.48075, "15245": 2.44334, "15250": 2.49471, "15255": 2.52791, "15260": 2.47688, "15265": 2.52103, "15270": 2.45329, "15275": 2.41693, "15280": 2.48042, "15285": 2.52012, "15290": 2.43195, "15295": 2.45252, "15300": 2.47549, "15305": 2.50784, "15310": 2.47468, "15315": 2.51205, "15320": 2.49859, "15325": 2.50594, "15330": 2.49992, "15335": 2.47237, "15340": 2.47606, "15345": 2.49301, "15350": 2.5216, "15355": 2.52701, "15360": 2.46497, "15365": 2.53207, "15370": 2.4565, "15375": 2.49529, "15380": 2.43186, "15385": 2.45298, "15390": 2.47424, "15395": 2.46658, "15400": 2.46014, "15405": 2.41231, "15410": 2.49418, "15415": 2.50391, "15420": 2.46143, "15425": 2.46157, "15430": 2.47116, "15435": 2.4736, "15440": 2.479, "15445": 2.47395, "15450": 2.42623, "15455": 2.46536, "15460": 2.47963, "15465": 2.47245, "15470": 2.39771, "15475": 2.4613, "15480": 2.44033, "15485": 2.44199, "15490": 2.52332, "15495": 2.47661, "15500": 2.42963, "15505": 2.41262, "15510": 2.42967, "15515": 2.45254, "15520": 2.45472, "15525": 2.43138, "15530": 2.44204, "15535": 2.50359, "15540": 2.50626, "15545": 2.48593, "15550": 2.47741, "15555": 2.45093, "15560": 2.45444, "15565": 2.46443, "15570": 2.47002, "15575": 2.50934, "15580": 2.52406, "15585": 2.54538, "15590": 2.49184, "15595": 2.42142, "15600": 2.39783, "15605": 2.44658, "15610": 2.48811, "15615": 2.4485, "15620": 2.47783, "15625": 2.49016, "15630": 2.40732, "15635": 2.4678, "15640": 2.44356, "15645": 2.45601, "15650": 2.39145, "15655": 2.42772, "15660": 2.43341, "15665": 2.4764, "15670": 2.42106, "15675": 2.47658, "15680": 2.47026, "15685": 2.46164, "15690": 2.46664, "15695": 2.41981, "15700": 2.46014, "15705": 2.50263, "15710": 2.47348, "15715": 2.42659, "15720": 2.46734, "15725": 2.4995, "15730": 2.48776, "15735": 2.48508, "15740": 2.42394, "15745": 2.46832, "15750": 2.4817, "15755": 2.45125, "15760": 2.44992, "15765": 2.45876, "15770": 2.44177, "15775": 2.41818, "15780": 2.40713, "15785": 2.44769, "15790": 2.51235, "15795": 2.47824, "15800": 2.45699, "15805": 2.46539, "15810": 2.46425, "15815": 2.45991, "15820": 2.47689, "15825": 2.43534, "15830": 2.41551, "15835": 2.40329, "15840": 2.40758, "15845": 2.43893, "15850": 2.44609, "15855": 2.46124, "15860": 2.51211, "15865": 2.48745, "15870": 2.37534, "15875": 2.46695, "15880": 2.41493, "15885": 2.44131, "15890": 2.4703, "15895": 2.54, "15900": 2.46913, "15905": 2.46092, "15910": 2.4806, "15915": 2.42878, "15920": 2.51969, "15925": 2.46565, "15930": 2.46839, "15935": 2.43438, "15940": 2.43377, "15945": 2.48539, "15950": 2.42655, "15955": 2.46057, "15960": 2.49335, "15965": 2.51602, "15970": 2.46908, "15975": 2.47011, "15980": 2.40666, "15985": 2.42022, "15990": 2.44801, "15995": 2.4321, "16000": 2.39129, "16005": 2.45347, "16010": 2.47729, "16015": 2.48513, "16020": 2.48772, "16025": 2.4836, "16030": 2.4869, "16035": 2.46333, "16040": 2.41057, "16045": 2.49617, "16050": 2.4887, "16055": 2.48953, "16060": 2.46791, "16065": 2.5034, "16070": 2.41877, "16075": 2.49838, "16080": 2.5009, "16085": 2.55222, "16090": 2.4295, "16095": 2.43446, "16100": 2.49441, "16105": 2.39929, "16110": 2.47941, "16115": 2.45426, "16120": 2.40983, "16125": 2.4966, "16130": 2.4664, "16135": 2.41139, "16140": 2.47124, "16145": 2.43503, "16150": 2.37495, "16155": 2.46953, "16160": 2.43802, "16165": 2.43331, "16170": 2.42569, "16175": 2.34901, "16180": 2.49153, "16185": 2.38483, "16190": 2.47568, "16195": 2.46174, "16200": 2.41579, "16205": 2.44023, "16210": 2.45814, "16215": 2.50449, "16220": 2.41393, "16225": 2.39392, "16230": 2.4363, "16235": 2.48321, "16240": 2.45911, "16245": 2.51595, "16250": 2.4406, "16255": 2.43617, "16260": 2.4178, "16265": 2.36669, "16270": 2.47496, "16275": 2.42794, "16280": 2.48774, "16285": 2.42477, "16290": 2.35841, "16295": 2.44612, "16300": 2.42932, "16305": 2.42531, "16310": 2.47286, "16315": 2.43816, "16320": 2.4726, "16325": 2.43736, "16330": 2.44171, "16335": 2.428, "16340": 2.49381, "16345": 2.51545, "16350": 2.39599, "16355": 2.42248, "16360": 2.49398, "16365": 2.42007, "16370": 2.38302, "16375": 2.40796, "16380": 2.50874, "16385": 2.38304, "16390": 2.47555, "16395": 2.42908, "16400": 2.39178, "16405": 2.45409, "16410": 2.38083, "16415": 2.42651, "16420": 2.47083, "16425": 2.39548, "16430": 2.4097, "16435": 2.44235, "16440": 2.39002, "16445": 2.4926, "16450": 2.41548, "16455": 2.45154, "16460": 2.47136, "16465": 2.42631, "16470": 2.46403, "16475": 2.4459, "16480": 2.44986, "16485": 2.42079, "16490": 2.42461, "16495": 2.39158, "16500": 2.4733, "16505": 2.39521, "16510": 2.4498, "16515": 2.46683, "16520": 2.47694, "16525": 2.44742, "16530": 2.47064, "16535": 2.4486, "16540": 2.47947, "16545": 2.45531, "16550": 2.45457, "16555": 2.42802, "16560": 2.48661, "16565": 2.34143, "16570": 2.44481, "16575": 2.37913, "16580": 2.39639, "16585": 2.48678, "16590": 2.42145, "16595": 2.43377, "16600": 2.44729, "16605": 2.45917, "16610": 2.41138, "16615": 2.46299, "16620": 2.49791, "16625": 2.40153, "16630": 2.44153, "16635": 2.3734, "16640": 2.5331, "16645": 2.48004, "16650": 2.48612, "16655": 2.48638, "16660": 2.46353, "16665": 2.36771, "16670": 2.38632, "16675": 2.3977, "16680": 2.39441, "16685": 2.45315, "16690": 2.46455, "16695": 2.42061, "16700": 2.41219, "16705": 2.47684, "16710": 2.44458, "16715": 2.42533, "16720": 2.44352, "16725": 2.44591, "16730": 2.45526, "16735": 2.47957, "16740": 2.44004, "16745": 2.42114, "16750": 2.44244, "16755": 2.54504, "16760": 2.52394, "16765": 2.48267, "16770": 2.46169, "16775": 2.49502, "16780": 2.41041, "16785": 2.43771, "16790": 2.47862, "16795": 2.4726, "16800": 2.40262, "16805": 2.37642, "16810": 2.42249, "16815": 2.47267, "16820": 2.41958, "16825": 2.45966, "16830": 2.46621, "16835": 2.39803, "16840": 2.43559, "16845": 2.46622, "16850": 2.41924, "16855": 2.4105, "16860": 2.37919, "16865": 2.42949, "16870": 2.40645, "16875": 2.44732, "16880": 2.4399, "16885": 2.47848, "16890": 2.43426, "16895": 2.40751, "16900": 2.50575, "16905": 2.423, "16910": 2.44025, "16915": 2.40601, "16920": 2.40956, "16925": 2.50698, "16930": 2.49285, "16935": 2.45319, "16940": 2.43993, "16945": 2.45934, "16950": 2.384, "16955": 2.38775, "16960": 2.43126, "16965": 2.42799, "16970": 2.44099, "16975": 2.45425, "16980": 2.40308, "16985": 2.36859, "16990": 2.41145, "16995": 2.38586, "17000": 2.39045, "17005": 2.39468, "17010": 2.45396, "17015": 2.50626, "17020": 2.44205, "17025": 2.37803, "17030": 2.338, "17035": 2.45196, "17040": 2.47715, "17045": 2.45188, "17050": 2.37037, "17055": 2.40525, "17060": 2.46248, "17065": 2.46639, "17070": 2.38797, "17075": 2.43317, "17080": 2.40264, "17085": 2.39668, "17090": 2.33219, "17095": 2.42084, "17100": 2.45805, "17105": 2.42961, "17110": 2.38892, "17115": 2.44526, "17120": 2.44737, "17125": 2.41953, "17130": 2.42912, "17135": 2.48083, "17140": 2.30946, "17145": 2.35356, "17150": 2.49275, "17155": 2.41497, "17160": 2.4577, "17165": 2.4395, "17170": 2.40282, "17175": 2.45547, "17180": 2.51386, "17185": 2.45446, "17190": 2.38936, "17195": 2.41198, "17200": 2.44499, "17205": 2.41715, "17210": 2.37991, "17215": 2.42604, "17220": 2.44448, "17225": 2.46576, "17230": 2.40301, "17235": 2.43761, "17240": 2.38204, "17245": 2.37929, "17250": 2.41754, "17255": 2.41154, "17260": 2.43579, "17265": 2.35528, "17270": 2.41522, "17275": 2.41826, "17280": 2.33542, "17285": 2.44872, "17290": 2.45241, "17295": 2.38235, "17300": 2.40491, "17305": 2.42947, "17310": 2.41664, "17315": 2.39551, "17320": 2.36983, "17325": 2.41608, "17330": 2.45553, "17335": 2.41284, "17340": 2.38865, "17345": 2.44242, "17350": 2.39077, "17355": 2.40216, "17360": 2.406, "17365": 2.46508, "17370": 2.36069, "17375": 2.41848, "17380": 2.45908, "17385": 2.33048, "17390": 2.40191, "17395": 2.43048, "17400": 2.42393, "17405": 2.40379, "17410": 2.38126, "17415": 2.37245, "17420": 2.33582, "17425": 2.48376, "17430": 2.41592, "17435": 2.39606, "17440": 2.4356, "17445": 2.42262, "17450": 2.40337, "17455": 2.48261, "17460": 2.40442, "17465": 2.35686, "17470": 2.41862, "17475": 2.43214, "17480": 2.45639, "17485": 2.42246, "17490": 2.41208, "17495": 2.38998, "17500": 2.36903, "17505": 2.43295, "17510": 2.41993, "17515": 2.41832, "17520": 2.46191, "17525": 2.45511, "17530": 2.48125, "17535": 2.40931, "17540": 2.46566, "17545": 2.49546, "17550": 2.43337, "17555": 2.43314, "17560": 2.43506, "17565": 2.39764, "17570": 2.37689, "17575": 2.3777, "17580": 2.45645, "17585": 2.45442, "17590": 2.50403, "17595": 2.43422, "17600": 2.47434, "17605": 2.42352, "17610": 2.40644, "17615": 2.39317, "17620": 2.42254, "17625": 2.41353, "17630": 2.33534, "17635": 2.39493, "17640": 2.41011, "17645": 2.43491, "17650": 2.3799, "17655": 2.41664, "17660": 2.40949, "17665": 2.42544, "17670": 2.40998, "17675": 2.3668, "17680": 2.38166, "17685": 2.43715, "17690": 2.39867, "17695": 2.40975, "17700": 2.37024, "17705": 2.36529, "17710": 2.40644, "17715": 2.51413, "17720": 2.372, "17725": 2.44221, "17730": 2.42072, "17735": 2.44645, "17740": 2.33786, "17745": 2.50931, "17750": 2.45241, "17755": 2.39533, "17760": 2.40279, "17765": 2.48073, "17770": 2.35721, "17775": 2.43785, "17780": 2.41875, "17785": 2.4182, "17790": 2.466, "17795": 2.39857, "17800": 2.37323, "17805": 2.28504, "17810": 2.35099, "17815": 2.39818, "17820": 2.44138, "17825": 2.36892, "17830": 2.35366, "17835": 2.39384, "17840": 2.34259, "17845": 2.37679, "17850": 2.39712, "17855": 2.41479, "17860": 2.45033, "17865": 2.40161, "17870": 2.37782, "17875": 2.38093, "17880": 2.36351, "17885": 2.43202, "17890": 2.36391, "17895": 2.38182, "17900": 2.37298, "17905": 2.38353, "17910": 2.36118, "17915": 2.46317, "17920": 2.48868, "17925": 2.42644, "17930": 2.40935, "17935": 2.38797, "17940": 2.41726, "17945": 2.42886, "17950": 2.32548, "17955": 2.35215, "17960": 2.4646, "17965": 2.41121, "17970": 2.406, "17975": 2.42225, "17980": 2.41449, "17985": 2.29899, "17990": 2.45503, "17995": 2.40933, "18000": 2.42009, "18005": 2.40379, "18010": 2.37058, "18015": 2.37428, "18020": 2.36466, "18025": 2.40186, "18030": 2.38093, "18035": 2.41055, "18040": 2.37258, "18045": 2.38905, "18050": 2.47359, "18055": 2.3788, "18060": 2.48219, "18065": 2.38359, "18070": 2.4013, "18075": 2.41587, "18080": 2.41813, "18085": 2.39965, "18090": 2.40437, "18095": 2.45254, "18100": 2.34791, "18105": 2.45882, "18110": 2.36984, "18115": 2.36505, "18120": 2.40905, "18125": 2.36184, "18130": 2.39867, "18135": 2.41854, "18140": 2.46366, "18145": 2.38928, "18150": 2.38408, "18155": 2.35157, "18160": 2.36546, "18165": 2.40106, "18170": 2.39046, "18175": 2.47242, "18180": 2.36267, "18185": 2.41117, "18190": 2.37479, "18195": 2.3708, "18200": 2.44349, "18205": 2.42624, "18210": 2.34302, "18215": 2.33842, "18220": 2.44074, "18225": 2.41313, "18230": 2.4253, "18235": 2.38637, "18240": 2.42472, "18245": 2.43106, "18250": 2.44251, "18255": 2.46746, "18260": 2.41068, "18265": 2.45043, "18270": 2.41711, "18275": 2.39384, "18280": 2.40339, "18285": 2.47871, "18290": 2.38911, "18295": 2.38473, "18300": 2.36299, "18305": 2.32899, "18310": 2.44443, "18315": 2.39293, "18320": 2.43785, "18325": 2.45288, "18330": 2.44172, "18335": 2.44137, "18340": 2.37002, "18345": 2.37162, "18350": 2.40049, "18355": 2.37187, "18360": 2.38545, "18365": 2.40729, "18370": 2.40884, "18375": 2.36016, "18380": 2.38499, "18385": 2.41432, "18390": 2.46639, "18395": 2.35947, "18400": 2.35113, "18405": 2.43244, "18410": 2.40535, "18415": 2.4669, "18420": 2.45178, "18425": 2.39023, "18430": 2.43771, "18435": 2.30704, "18440": 2.42502, "18445": 2.39558, "18450": 2.41692, "18455": 2.31717, "18460": 2.40193, "18465": 2.38288, "18470": 2.36155, "18475": 2.44754, "18480": 2.4391, "18485": 2.40799, "18490": 2.36567, "18495": 2.47409, "18500": 2.40682, "18505": 2.43339, "18510": 2.38731, "18515": 2.40514, "18520": 2.4689, "18525": 2.41819, "18530": 2.41214, "18535": 2.48148, "18540": 2.36122, "18545": 2.4169, "18550": 2.44595, "18555": 2.33679, "18560": 2.36099, "18565": 2.46445, "18570": 2.40589, "18575": 2.31901, "18580": 2.42107, "18585": 2.35905, "18590": 2.41795, "18595": 2.44146, "18600": 2.42114, "18605": 2.3335, "18610": 2.39402, "18615": 2.3394, "18620": 2.42438, "18625": 2.36043, "18630": 2.37985, "18635": 2.33345, "18640": 2.40743, "18645": 2.40041, "18650": 2.36723, "18655": 2.45659, "18660": 2.37358, "18665": 2.4187, "18670": 2.3489, "18675": 2.42389, "18680": 2.47363, "18685": 2.47182, "18690": 2.3801, "18695": 2.4764, "18700": 2.3405, "18705": 2.43024, "18710": 2.38193, "18715": 2.38382, "18720": 2.36379, "18725": 2.37338, "18730": 2.39768, "18735": 2.38707, "18740": 2.39506, "18745": 2.37535, "18750": 2.47373, "18755": 2.3482, "18760": 2.44643, "18765": 2.33717, "18770": 2.36338, "18775": 2.35163, "18780": 2.44901, "18785": 2.37136, "18790": 2.36162, "18795": 2.39054, "18800": 2.35421, "18805": 2.38252, "18810": 2.32365, "18815": 2.40224, "18820": 2.38563, "18825": 2.42088, "18830": 2.35139, "18835": 2.40295, "18840": 2.40741, "18845": 2.41391, "18850": 2.29705, "18855": 2.38856, "18860": 2.33463, "18865": 2.38846, "18870": 2.3576, "18875": 2.3751, "18880": 2.46114, "18885": 2.39127, "18890": 2.32686, "18895": 2.37488, "18900": 2.37013, "18905": 2.2858, "18910": 2.32449, "18915": 2.41319, "18920": 2.38103, "18925": 2.38175, "18930": 2.388, "18935": 2.3492, "18940": 2.37392, "18945": 2.34081, "18950": 2.34513, "18955": 2.41188, "18960": 2.49602, "18965": 2.44139, "18970": 2.38586, "18975": 2.39986, "18980": 2.38824, "18985": 2.51103, "18990": 2.43128, "18995": 2.40872, "19000": 2.38895, "19005": 2.40248, "19010": 2.33125, "19015": 2.38943, "19020": 2.33656, "19025": 2.399, "19030": 2.33065, "19035": 2.4079, "19040": 2.3784, "19045": 2.43822, "19050": 2.37969, "19055": 2.39501, "19060": 2.36546, "19065": 2.40641, "19070": 2.36012, "19075": 2.34202, "19080": 2.37508, "19085": 2.43935, "19090": 2.37924, "19095": 2.39471, "19100": 2.35382, "19105": 2.43142, "19110": 2.43386, "19115": 2.38905, "19120": 2.44728, "19125": 2.3855, "19130": 2.3883, "19135": 2.38727, "19140": 2.33662, "19145": 2.37199, "19150": 2.37482, "19155": 2.3852, "19160": 2.35997, "19165": 2.41649, "19170": 2.43499, "19175": 2.40293, "19180": 2.32684, "19185": 2.37287, "19190": 2.35761, "19195": 2.32996, "19200": 2.42655, "19205": 2.37064, "19210": 2.38901, "19215": 2.39364, "19220": 2.30794, "19225": 2.31575, "19230": 2.32752, "19235": 2.34357, "19240": 2.39731, "19245": 2.41973, "19250": 2.37887, "19255": 2.44816, "19260": 2.40453, "19265": 2.36902, "19270": 2.38236, "19275": 2.42886, "19280": 2.37894, "19285": 2.37275, "19290": 2.39213, "19295": 2.34637, "19300": 2.31753, "19305": 2.43025, "19310": 2.42873, "19315": 2.38465, "19320": 2.3711, "19325": 2.3213, "19330": 2.36756, "19335": 2.34752, "19340": 2.37847, "19345": 2.41152, "19350": 2.36438, "19355": 2.43425, "19360": 2.3869, "19365": 2.373, "19370": 2.3212, "19375": 2.39542, "19380": 2.33939, "19385": 2.43448, "19390": 2.39541, "19395": 2.35012, "19400": 2.36714, "19405": 2.31992, "19410": 2.41704, "19415": 2.35275, "19420": 2.34593, "19425": 2.36646, "19430": 2.43951, "19435": 2.35873, "19440": 2.40464, "19445": 2.33516, "19450": 2.31758, "19455": 2.4237, "19460": 2.39164, "19465": 2.35387, "19470": 2.33057, "19475": 2.33719, "19480": 2.32239, "19485": 2.38046, "19490": 2.36873, "19495": 2.31774, "19500": 2.35901, "19505": 2.30827, "19510": 2.35235, "19515": 2.39884, "19520": 2.36491, "19525": 2.44388, "19530": 2.38593, "19535": 2.38167, "19540": 2.39664, "19545": 2.36883, "19550": 2.38671, "19555": 2.31933, "19560": 2.42389, "19565": 2.34065, "19570": 2.36905, "19575": 2.29324, "19580": 2.44426, "19585": 2.36033, "19590": 2.38036, "19595": 2.34827, "19600": 2.39075, "19605": 2.45669, "19610": 2.36871, "19615": 2.32638, "19620": 2.32897, "19625": 2.39941, "19630": 2.33974, "19635": 2.3473, "19640": 2.4022, "19645": 2.32647, "19650": 2.3, "19655": 2.34624, "19660": 2.35005, "19665": 2.35983, "19670": 2.34987, "19675": 2.39676, "19680": 2.41155, "19685": 2.31704, "19690": 2.35522, "19695": 2.43854, "19700": 2.36039, "19705": 2.40097, "19710": 2.37562, "19715": 2.37099, "19720": 2.30453, "19725": 2.41385, "19730": 2.30833, "19735": 2.39547, "19740": 2.32353, "19745": 2.34113, "19750": 2.30347, "19755": 2.3412, "19760": 2.39447, "19765": 2.28289, "19770": 2.35501, "19775": 2.40877, "19780": 2.32444, "19785": 2.33035, "19790": 2.36558, "19795": 2.37562, "19800": 2.35482, "19805": 2.3917, "19810": 2.39151, "19815": 2.4116, "19820": 2.39933, "19825": 2.41086, "19830": 2.40092, "19835": 2.40479, "19840": 2.38134, "19845": 2.31925, "19850": 2.41399, "19855": 2.34907, "19860": 2.36077, "19865": 2.39945, "19870": 2.34812, "19875": 2.35745, "19880": 2.37244, "19885": 2.35571, "19890": 2.38014, "19895": 2.41518, "19900": 2.35089, "19905": 2.37199, "19910": 2.34167, "19915": 2.29789, "19920": 2.32483, "19925": 2.37497, "19930": 2.37245, "19935": 2.34139, "19940": 2.34162, "19945": 2.35841, "19950": 2.39921, "19955": 2.34989, "19960": 2.35994, "19965": 2.37842, "19970": 2.39533, "19975": 2.25716, "19980": 2.37257, "19985": 2.43061, "19990": 2.38025, "19995": 2.411, "20000": 2.37746, "20005": 2.29271, "20010": 2.34862, "20015": 2.4219, "20020": 2.32727, "20025": 2.29892, "20030": 2.40953, "20035": 2.33421, "20040": 2.33935, "20045": 2.37132, "20050": 2.34747, "20055": 2.31143, "20060": 2.38806, "20065": 2.29523, "20070": 2.39121, "20075": 2.34083, "20080": 2.42222, "20085": 2.34654, "20090": 2.36577, "20095": 2.3426, "20100": 2.31707, "20105": 2.37084, "20110": 2.36068, "20115": 2.34176, "20120": 2.37679, "20125": 2.27676, "20130": 2.38219, "20135": 2.3223, "20140": 2.29251, "20145": 2.37819, "20150": 2.33604, "20155": 2.39108, "20160": 2.42857, "20165": 2.36708, "20170": 2.32686, "20175": 2.37603, "20180": 2.36988, "20185": 2.38075, "20190": 2.35913, "20195": 2.41075, "20200": 2.36116, "20205": 2.41473, "20210": 2.44992, "20215": 2.36638, "20220": 2.4066, "20225": 2.30596, "20230": 2.394, "20235": 2.34948, "20240": 2.33938, "20245": 2.37324, "20250": 2.40315, "20255": 2.3711, "20260": 2.41676, "20265": 2.40438, "20270": 2.36884, "20275": 2.32169, "20280": 2.4355, "20285": 2.34325, "20290": 2.3797, "20295": 2.38253, "20300": 2.35818, "20305": 2.3111, "20310": 2.40197, "20315": 2.30656, "20320": 2.40962, "20325": 2.36058, "20330": 2.39665, "20335": 2.2977, "20340": 2.36607, "20345": 2.39715, "20350": 2.37141, "20355": 2.36995, "20360": 2.35983, "20365": 2.38548, "20370": 2.31216, "20375": 2.26888, "20380": 2.40335, "20385": 2.3592, "20390": 2.32899, "20395": 2.34139, "20400": 2.32355, "20405": 2.30003, "20410": 2.39521, "20415": 2.34597, "20420": 2.38116, "20425": 2.34355, "20430": 2.31042, "20435": 2.29178, "20440": 2.29938, "20445": 2.38616, "20450": 2.32003, "20455": 2.34016, "20460": 2.43207, "20465": 2.39777, "20470": 2.36155, "20475": 2.34142, "20480": 2.32223, "20485": 2.37496, "20490": 2.30801, "20495": 2.35163, "20500": 2.38543, "20505": 2.35251, "20510": 2.31061, "20515": 2.38656, "20520": 2.39269, "20525": 2.33225, "20530": 2.35992, "20535": 2.31829, "20540": 2.31804, "20545": 2.36583, "20550": 2.27733, "20555": 2.3084, "20560": 2.34371, "20565": 2.37169, "20570": 2.32747, "20575": 2.30854, "20580": 2.36568, "20585": 2.31828, "20590": 2.39167, "20595": 2.34744, "20600": 2.33115, "20605": 2.38426, "20610": 2.36317, "20615": 2.37313, "20620": 2.33961, "20625": 2.3471, "20630": 2.35796, "20635": 2.39246, "20640": 2.31087, "20645": 2.31371, "20650": 2.33859, "20655": 2.31458, "20660": 2.36882, "20665": 2.3035, "20670": 2.36789, "20675": 2.31634, "20680": 2.38669, "20685": 2.28589, "20690": 2.36081, "20695": 2.27787, "20700": 2.35506, "20705": 2.30953, "20710": 2.31743, "20715": 2.34201, "20720": 2.33859, "20725": 2.3185, "20730": 2.36705, "20735": 2.37003, "20740": 2.40609, "20745": 2.26833, "20750": 2.32508, "20755": 2.29269, "20760": 2.36494, "20765": 2.30009, "20770": 2.30922, "20775": 2.33908, "20780": 2.37566, "20785": 2.37455, "20790": 2.32479, "20795": 2.34805, "20800": 2.30837, "20805": 2.36114, "20810": 2.40769, "20815": 2.36896, "20820": 2.3349, "20825": 2.32261, "20830": 2.39501, "20835": 2.39706, "20840": 2.34539, "20845": 2.40804, "20850": 2.27739, "20855": 2.35368, "20860": 2.36074, "20865": 2.35027, "20870": 2.31905, "20875": 2.36095, "20880": 2.32561, "20885": 2.29494, "20890": 2.30827, "20895": 2.30344, "20900": 2.35692, "20905": 2.40696, "20910": 2.31581, "20915": 2.37943, "20920": 2.31451, "20925": 2.29132, "20930": 2.35967, "20935": 2.43602, "20940": 2.34851, "20945": 2.37527, "20950": 2.37096, "20955": 2.31328, "20960": 2.2574, "20965": 2.36556, "20970": 2.30577, "20975": 2.38797, "20980": 2.34861, "20985": 2.36285, "20990": 2.3886, "20995": 2.39076, "21000": 2.36495, "21005": 2.34013, "21010": 2.33696, "21015": 2.41429, "21020": 2.34269, "21025": 2.32523, "21030": 2.33783, "21035": 2.31948, "21040": 2.28744, "21045": 2.32836, "21050": 2.26124, "21055": 2.38592, "21060": 2.38798, "21065": 2.34286, "21070": 2.33784, "21075": 2.34287, "21080": 2.35827, "21085": 2.28164, "21090": 2.3377, "21095": 2.27437, "21100": 2.37947, "21105": 2.36084, "21110": 2.36906, "21115": 2.33637, "21120": 2.33831, "21125": 2.29129, "21130": 2.31048, "21135": 2.34475, "21140": 2.21626, "21145": 2.37968, "21150": 2.34382, "21155": 2.3371, "21160": 2.35394, "21165": 2.34027, "21170": 2.28689, "21175": 2.31233, "21180": 2.27557, "21185": 2.32092, "21190": 2.3235, "21195": 2.26084, "21200": 2.39488, "21205": 2.39355, "21210": 2.37734, "21215": 2.27007, "21220": 2.35854, "21225": 2.36049, "21230": 2.38072, "21235": 2.25906, "21240": 2.30693, "21245": 2.27615, "21250": 2.29882, "21255": 2.36563, "21260": 2.30823, "21265": 2.30186, "21270": 2.33127, "21275": 2.34494, "21280": 2.35282, "21285": 2.34176, "21290": 2.32447, "21295": 2.27225, "21300": 2.36781, "21305": 2.42424, "21310": 2.30981, "21315": 2.31008, "21320": 2.36481, "21325": 2.34678, "21330": 2.29859, "21335": 2.33646, "21340": 2.32575, "21345": 2.33867, "21350": 2.36761, "21355": 2.35198, "21360": 2.37238, "21365": 2.31892, "21370": 2.38185, "21375": 2.28818, "21380": 2.34083, "21385": 2.33411, "21390": 2.31084, "21395": 2.41413, "21400": 2.34569, "21405": 2.37512, "21410": 2.34671, "21415": 2.35765, "21420": 2.31093, "21425": 2.347, "21430": 2.31487, "21435": 2.32704, "21440": 2.36001, "21445": 2.35494, "21450": 2.34445, "21455": 2.34141, "21460": 2.35491, "21465": 2.38996, "21470": 2.35108, "21475": 2.33041, "21480": 2.40637, "21485": 2.3861, "21490": 2.28206, "21495": 2.34588, "21500": 2.31592, "21505": 2.34286, "21510": 2.35376, "21515": 2.37729, "21520": 2.31218, "21525": 2.36456, "21530": 2.29263, "21535": 2.31916, "21540": 2.33649, "21545": 2.336, "21550": 2.34344, "21555": 2.27861, "21560": 2.34382, "21565": 2.29667, "21570": 2.30025, "21575": 2.28285, "21580": 2.35011, "21585": 2.35097, "21590": 2.3064, "21595": 2.28077, "21600": 2.34051, "21605": 2.3615, "21610": 2.29864, "21615": 2.33587, "21620": 2.2948, "21625": 2.32825, "21630": 2.29765, "21635": 2.37498, "21640": 2.28786, "21645": 2.28232, "21650": 2.42802, "21655": 2.32529, "21660": 2.34268, "21665": 2.30333, "21670": 2.31484, "21675": 2.32051, "21680": 2.31911, "21685": 2.36615, "21690": 2.26925, "21695": 2.35009, "21700": 2.31366, "21705": 2.30588, "21710": 2.30652, "21715": 2.27023, "21720": 2.26032, "21725": 2.29602, "21730": 2.33908, "21735": 2.31611, "21740": 2.27347, "21745": 2.31351, "21750": 2.35223, "21755": 2.34217, "21760": 2.3576, "21765": 2.32033, "21770": 2.29162, "21775": 2.35196, "21780": 2.256, "21785": 2.30102, "21790": 2.26646, "21795": 2.33167, "21800": 2.32581, "21805": 2.28673, "21810": 2.33133, "21815": 2.34637, "21820": 2.32815, "21825": 2.28572, "21830": 2.31778, "21835": 2.34817, "21840": 2.34527, "21845": 2.31587, "21850": 2.35866, "21855": 2.26685, "21860": 2.31296, "21865": 2.32074, "21870": 2.30665, "21875": 2.34661, "21880": 2.30524, "21885": 2.30961, "21890": 2.32903, "21895": 2.40118, "21900": 2.37567, "21905": 2.35832, "21910": 2.32388, "21915": 2.29675, "21920": 2.33396, "21925": 2.35313, "21930": 2.43884, "21935": 2.33665, "21940": 2.28168, "21945": 2.35797, "21950": 2.26368, "21955": 2.36318, "21960": 2.35227, "21965": 2.32306, "21970": 2.32566, "21975": 2.2764, "21980": 2.27503, "21985": 2.41982, "21990": 2.30971, "21995": 2.25206, "22000": 2.3497, "22005": 2.29529, "22010": 2.35531, "22015": 2.2803, "22020": 2.34077, "22025": 2.33492, "22030": 2.3575, "22035": 2.30657, "22040": 2.34515, "22045": 2.39148, "22050": 2.35221, "22055": 2.29632, "22060": 2.30235, "22065": 2.39608, "22070": 2.29355, "22075": 2.35665, "22080": 2.34983, "22085": 2.30597, "22090": 2.25828, "22095": 2.29635, "22100": 2.36876, "22105": 2.35204, "22110": 2.37004, "22115": 2.26154, "22120": 2.35164, "22125": 2.33366, "22130": 2.31752, "22135": 2.36612, "22140": 2.3353, "22145": 2.32559, "22150": 2.35034, "22155": 2.28304, "22160": 2.3051, "22165": 2.27671, "22170": 2.38764, "22175": 2.28459, "22180": 2.28267, "22185": 2.31644, "22190": 2.33002, "22195": 2.36858, "22200": 2.31794, "22205": 2.31884, "22210": 2.2645, "22215": 2.308, "22220": 2.34544, "22225": 2.28204, "22230": 2.26965, "22235": 2.28162, "22240": 2.28279, "22245": 2.30823, "22250": 2.30904, "22255": 2.30376, "22260": 2.24959, "22265": 2.3154, "22270": 2.28687, "22275": 2.24254, "22280": 2.28199, "22285": 2.34601, "22290": 2.29608, "22295": 2.34094, "22300": 2.31761, "22305": 2.37182, "22310": 2.26749, "22315": 2.29643, "22320": 2.30921, "22325": 2.35294, "22330": 2.34827, "22335": 2.3464, "22340": 2.37085, "22345": 2.2883, "22350": 2.29047, "22355": 2.32365, "22360": 2.28096, "22365": 2.30764, "22370": 2.32377, "22375": 2.32834, "22380": 2.2412, "22385": 2.26691, "22390": 2.33531, "22395": 2.33539, "22400": 2.41732, "22405": 2.38708, "22410": 2.37186, "22415": 2.2816, "22420": 2.30571, "22425": 2.27437, "22430": 2.32703, "22435": 2.29643, "22440": 2.32279, "22445": 2.29017, "22450": 2.37637, "22455": 2.30016, "22460": 2.30962, "22465": 2.33333, "22470": 2.30931, "22475": 2.31093, "22480": 2.2692, "22485": 2.34845, "22490": 2.28228, "22495": 2.25728, "22500": 2.33461, "22505": 2.38926, "22510": 2.35976, "22515": 2.3864, "22520": 2.32958, "22525": 2.31395, "22530": 2.33055, "22535": 2.273, "22540": 2.3065, "22545": 2.28148, "22550": 2.26572, "22555": 2.20902, "22560": 2.27898, "22565": 2.30282, "22570": 2.23828, "22575": 2.28649, "22580": 2.27568, "22585": 2.2656, "22590": 2.31648, "22595": 2.29458, "22600": 2.32997, "22605": 2.34234, "22610": 2.30501, "22615": 2.33397, "22620": 2.29709, "22625": 2.29677, "22630": 2.25102, "22635": 2.34409, "22640": 2.35141, "22645": 2.33538, "22650": 2.26796, "22655": 2.27042, "22660": 2.26782, "22665": 2.26063, "22670": 2.34363, "22675": 2.31268, "22680": 2.3602, "22685": 2.2382, "22690": 2.24273, "22695": 2.41818, "22700": 2.23825, "22705": 2.29588, "22710": 2.35368, "22715": 2.32179, "22720": 2.24942, "22725": 2.31151, "22730": 2.33172, "22735": 2.2597, "22740": 2.34756, "22745": 2.34295, "22750": 2.30804, "22755": 2.33225, "22760": 2.24958, "22765": 2.35319, "22770": 2.31627, "22775": 2.27168, "22780": 2.35223, "22785": 2.308, "22790": 2.30149, "22795": 2.3033, "22800": 2.37147, "22805": 2.32079, "22810": 2.3199, "22815": 2.31851, "22820": 2.28408, "22825": 2.32437, "22830": 2.34389, "22835": 2.31293, "22840": 2.28096, "22845": 2.23076, "22850": 2.277, "22855": 2.3694, "22860": 2.28469, "22865": 2.29737, "22870": 2.32369, "22875": 2.30854, "22880": 2.26449, "22885": 2.26976, "22890": 2.30775, "22895": 2.27538, "22900": 2.33066, "22905": 2.33494, "22910": 2.32833, "22915": 2.28654, "22920": 2.3176, "22925": 2.22757, "22930": 2.34072, "22935": 2.29351, "22940": 2.25307, "22945": 2.27923, "22950": 2.31119, "22955": 2.37879, "22960": 2.29758, "22965": 2.27141, "22970": 2.28461, "22975": 2.30547, "22980": 2.29585, "22985": 2.31715, "22990": 2.37993, "22995": 2.30408, "23000": 2.2878, "23005": 2.34158, "23010": 2.32433, "23015": 2.30519, "23020": 2.28624, "23025": 2.35969, "23030": 2.37364, "23035": 2.31008, "23040": 2.21397, "23045": 2.37257, "23050": 2.28084, "23055": 2.26817, "23060": 2.28802, "23065": 2.33434, "23070": 2.26961, "23075": 2.32403, "23080": 2.23128, "23085": 2.32948, "23090": 2.29221, "23095": 2.27118, "23100": 2.2438, "23105": 2.2494, "23110": 2.33221, "23115": 2.24613, "23120": 2.28111, "23125": 2.30704, "23130": 2.33376, "23135": 2.28528, "23140": 2.23759, "23145": 2.26859, "23150": 2.30713, "23155": 2.27188, "23160": 2.37501, "23165": 2.25296, "23170": 2.33726, "23175": 2.27524, "23180": 2.27952, "23185": 2.27952, "23190": 2.25697, "23195": 2.3593, "23200": 2.28771, "23205": 2.36108, "23210": 2.21572, "23215": 2.32433, "23220": 2.27345, "23225": 2.28356, "23230": 2.29418, "23235": 2.36959, "23240": 2.31643, "23245": 2.3353, "23250": 2.33568, "23255": 2.30482, "23260": 2.2862, "23265": 2.2887, "23270": 2.25228, "23275": 2.29305, "23280": 2.30704, "23285": 2.33052, "23290": 2.33065, "23295": 2.29567, "23300": 2.34186, "23305": 2.25193, "23310": 2.25517, "23315": 2.33323, "23320": 2.28704, "23325": 2.26612, "23330": 2.23442, "23335": 2.29605, "23340": 2.37017, "23345": 2.3463, "23350": 2.29898, "23355": 2.30313, "23360": 2.33823, "23365": 2.31207, "23370": 2.2829, "23375": 2.25997, "23380": 2.26103, "23385": 2.35892, "23390": 2.29355, "23395": 2.30262, "23400": 2.26443, "23405": 2.27289, "23410": 2.32845, "23415": 2.25296, "23420": 2.30639, "23425": 2.30308, "23430": 2.20299, "23435": 2.26083, "23440": 2.30109, "23445": 2.3329, "23450": 2.30493, "23455": 2.32132, "23460": 2.31538, "23465": 2.31849, "23470": 2.26544, "23475": 2.2632, "23480": 2.31342, "23485": 2.31889, "23490": 2.30332, "23495": 2.32629, "23500": 2.32884, "23505": 2.29132, "23510": 2.25712, "23515": 2.28213, "23520": 2.30834, "23525": 2.31975, "23530": 2.31513, "23535": 2.2879, "23540": 2.25107, "23545": 2.35512, "23550": 2.28931, "23555": 2.28847, "23560": 2.25482, "23565": 2.27618, "23570": 2.27704, "23575": 2.30933, "23580": 2.3162, "23585": 2.30993, "23590": 2.2857, "23595": 2.26775, "23600": 2.24463, "23605": 2.34706, "23610": 2.26903, "23615": 2.33398, "23620": 2.24948, "23625": 2.29716, "23630": 2.3004, "23635": 2.32623, "23640": 2.21091, "23645": 2.25078, "23650": 2.2864, "23655": 2.27592, "23660": 2.31527, "23665": 2.38231, "23670": 2.39457, "23675": 2.31251, "23680": 2.31164, "23685": 2.3178, "23690": 2.26681, "23695": 2.28689, "23700": 2.30465, "23705": 2.34371, "23710": 2.29941, "23715": 2.35521, "23720": 2.30073, "23725": 2.28786, "23730": 2.28203, "23735": 2.31166, "23740": 2.3087, "23745": 2.34057, "23750": 2.29394, "23755": 2.28745, "23760": 2.27444, "23765": 2.29809, "23770": 2.32069, "23775": 2.27651, "23780": 2.27505, "23785": 2.28538, "23790": 2.2196, "23795": 2.28973, "23800": 2.22375, "23805": 2.37798, "23810": 2.30758, "23815": 2.26109, "23820": 2.31254, "23825": 2.24501, "23830": 2.28648, "23835": 2.24706, "23840": 2.32718, "23845": 2.2316, "23850": 2.28885, "23855": 2.27694, "23860": 2.26883, "23865": 2.31495, "23870": 2.27533, "23875": 2.22602, "23880": 2.33126, "23885": 2.32389, "23890": 2.29292, "23895": 2.2759, "23900": 2.32959, "23905": 2.31008, "23910": 2.2924, "23915": 2.308, "23920": 2.30102, "23925": 2.31352, "23930": 2.2846, "23935": 2.31016, "23940": 2.26258, "23945": 2.2761, "23950": 2.28495, "23955": 2.31831, "23960": 2.30406, "23965": 2.24448, "23970": 2.29543, "23975": 2.31779, "23980": 2.26531, "23985": 2.23854, "23990": 2.27416, "23995": 2.30217, "24000": 2.16916, "24005": 2.42076, "24010": 2.35558, "24015": 2.30051, "24020": 2.33273, "24025": 2.28317, "24030": 2.30186, "24035": 2.30345, "24040": 2.26683, "24045": 2.1958, "24050": 2.34978, "24055": 2.29327, "24060": 2.34759, "24065": 2.29974, "24070": 2.30518, "24075": 2.24095, "24080": 2.30759, "24085": 2.2509, "24090": 2.29766, "24095": 2.27188, "24100": 2.33542, "24105": 2.26703, "24110": 2.25345, "24115": 2.24618, "24120": 2.2745, "24125": 2.24596, "24130": 2.33156, "24135": 2.28627, "24140": 2.33969, "24145": 2.32733, "24150": 2.31292, "24155": 2.29571, "24160": 2.25046, "24165": 2.27941, "24170": 2.26589, "24175": 2.29893, "24180": 2.27594, "24185": 2.33034, "24190": 2.22799, "24195": 2.26759, "24200": 2.34226, "24205": 2.30109, "24210": 2.2904, "24215": 2.21911, "24220": 2.28566, "24225": 2.25116, "24230": 2.3178, "24235": 2.30283, "24240": 2.3299, "24245": 2.28492, "24250": 2.29953, "24255": 2.25128, "24260": 2.34178, "24265": 2.25218, "24270": 2.26416, "24275": 2.3733, "24280": 2.34297, "24285": 2.23442, "24290": 2.31039, "24295": 2.31261, "24300": 2.26248, "24305": 2.30804, "24310": 2.26872, "24315": 2.32316, "24320": 2.33543, "24325": 2.26858, "24330": 2.21198, "24335": 2.38426, "24340": 2.35754, "24345": 2.34883, "24350": 2.26914, "24355": 2.32977, "24360": 2.31148, "24365": 2.24103, "24370": 2.28424, "24375": 2.25945, "24380": 2.30205, "24385": 2.24118, "24390": 2.27993, "24395": 2.25798, "24400": 2.24503, "24405": 2.29776, "24410": 2.2999, "24415": 2.31977, "24420": 2.33364, "24425": 2.24422, "24430": 2.27039, "24435": 2.31688, "24440": 2.26323, "24445": 2.28788, "24450": 2.28959, "24455": 2.27984, "24460": 2.26348, "24465": 2.25225, "24470": 2.30283, "24475": 2.2152, "24480": 2.29822, "24485": 2.2192, "24490": 2.28151, "24495": 2.34883, "24500": 2.26123, "24505": 2.21037, "24510": 2.22104, "24515": 2.28009, "24520": 2.35625, "24525": 2.28061, "24530": 2.32431, "24535": 2.28867, "24540": 2.30622, "24545": 2.34624, "24550": 2.30966, "24555": 2.24875, "24560": 2.28671, "24565": 2.21887, "24570": 2.28495, "24575": 2.3284, "24580": 2.29303, "24585": 2.29357, "24590": 2.21403, "24595": 2.35477, "24600": 2.24254, "24605": 2.26886, "24610": 2.31208, "24615": 2.22472, "24620": 2.29798, "24625": 2.26246, "24630": 2.25765, "24635": 2.30014, "24640": 2.30011, "24645": 2.2484, "24650": 2.28275, "24655": 2.3131, "24660": 2.24833, "24665": 2.21564, "24670": 2.25174, "24675": 2.20663, "24680": 2.3438, "24685": 2.23332, "24690": 2.2623, "24695": 2.27513, "24700": 2.25293, "24705": 2.29674, "24710": 2.26307, "24715": 2.26386, "24720": 2.278, "24725": 2.20078, "24730": 2.31515, "24735": 2.30911, "24740": 2.24891, "24745": 2.24247, "24750": 2.30274, "24755": 2.2833, "24760": 2.26145, "24765": 2.21666, "24770": 2.25082, "24775": 2.28312, "24780": 2.28505, "24785": 2.27319, "24790": 2.34459, "24795": 2.2235, "24800": 2.27537, "24805": 2.24486, "24810": 2.2307, "24815": 2.32102, "24820": 2.2987, "24825": 2.30177, "24830": 2.25672, "24835": 2.24837, "24840": 2.28501, "24845": 2.30617, "24850": 2.33086, "24855": 2.25128, "24860": 2.32426, "24865": 2.156, "24870": 2.2609, "24875": 2.30781, "24880": 2.21909, "24885": 2.22987, "24890": 2.25847, "24895": 2.28558, "24900": 2.2878, "24905": 2.23103, "24910": 2.28108, "24915": 2.25481, "24920": 2.39339, "24925": 2.28989, "24930": 2.21837, "24935": 2.28628, "24940": 2.25187, "24945": 2.28772, "24950": 2.32654, "24955": 2.30837, "24960": 2.27318, "24965": 2.32452, "24970": 2.30271, "24975": 2.27301, "24980": 2.29474, "24985": 2.28756, "24990": 2.26915, "24995": 2.27452, "25000": 2.28494, "25005": 2.32506, "25010": 2.28678, "25015": 2.28576, "25020": 2.30945, "25025": 2.27428, "25030": 2.27686, "25035": 2.30616, "25040": 2.30226, "25045": 2.313, "25050": 2.31803, "25055": 2.32877, "25060": 2.28274, "25065": 2.28363, "25070": 2.30377, "25075": 2.26723, "25080": 2.23815, "25085": 2.31451, "25090": 2.32801, "25095": 2.29438, "25100": 2.29395, "25105": 2.22318, "25110": 2.27904, "25115": 2.25845, "25120": 2.3024, "25125": 2.26822, "25130": 2.25473, "25135": 2.26882, "25140": 2.24428, "25145": 2.32053, "25150": 2.26089, "25155": 2.27713, "25160": 2.28483, "25165": 2.30441, "25170": 2.24047, "25175": 2.26533, "25180": 2.211, "25185": 2.30927, "25190": 2.20932, "25195": 2.24574, "25200": 2.21495, "25205": 2.23733, "25210": 2.30041, "25215": 2.27823, "25220": 2.3474, "25225": 2.21424, "25230": 2.19004, "25235": 2.28674, "25240": 2.28809, "25245": 2.26253, "25250": 2.18615, "25255": 2.25864, "25260": 2.29704, "25265": 2.273, "25270": 2.2167, "25275": 2.24231, "25280": 2.3031, "25285": 2.25564, "25290": 2.32849, "25295": 2.32932, "25300": 2.22646, "25305": 2.26053, "25310": 2.33498, "25315": 2.28702, "25320": 2.23386, "25325": 2.21724, "25330": 2.29806, "25335": 2.33215, "25340": 2.3045, "25345": 2.24731, "25350": 2.32143, "25355": 2.22221, "25360": 2.26474, "25365": 2.30455, "25370": 2.27666, "25375": 2.2598, "25380": 2.22384, "25385": 2.34401, "25390": 2.31158, "25395": 2.22017, "25400": 2.30486, "25405": 2.29758, "25410": 2.25756, "25415": 2.26721, "25420": 2.2506, "25425": 2.22953, "25430": 2.23443, "25435": 2.26395, "25440": 2.31206, "25445": 2.16184, "25450": 2.28883, "25455": 2.34807, "25460": 2.32456, "25465": 2.22726, "25470": 2.30039, "25475": 2.23195, "25480": 2.28876, "25485": 2.27276, "25490": 2.26249, "25495": 2.30109, "25500": 2.22969, "25505": 2.2842, "25510": 2.2698, "25515": 2.27178, "25520": 2.2652, "25525": 2.25088, "25530": 2.23032, "25535": 2.26656, "25540": 2.25964, "25545": 2.33321, "25550": 2.27856, "25555": 2.28929, "25560": 2.28786, "25565": 2.27827, "25570": 2.31782, "25575": 2.33723, "25580": 2.30718, "25585": 2.28994, "25590": 2.286, "25595": 2.26152, "25600": 2.2379, "25605": 2.28146, "25610": 2.28406, "25615": 2.26728, "25620": 2.2875, "25625": 2.31453, "25630": 2.27804, "25635": 2.24534, "25640": 2.30927, "25645": 2.26306, "25650": 2.22642, "25655": 2.20372, "25660": 2.27827, "25665": 2.27687, "25670": 2.2436, "25675": 2.17833, "25680": 2.27072, "25685": 2.28815, "25690": 2.23759, "25695": 2.25222, "25700": 2.25234, "25705": 2.2637, "25710": 2.301, "25715": 2.23961, "25720": 2.24092, "25725": 2.29363, "25730": 2.31239, "25735": 2.29671, "25740": 2.28577, "25745": 2.29751, "25750": 2.25096, "25755": 2.27268, "25760": 2.15202, "25765": 2.30823, "25770": 2.28216, "25775": 2.23391, "25780": 2.32851, "25785": 2.3303, "25790": 2.27078, "25795": 2.2598, "25800": 2.3317, "25805": 2.22832, "25810": 2.22478, "25815": 2.18965, "25820": 2.26951, "25825": 2.2267, "25830": 2.30752, "25835": 2.1842, "25840": 2.27068, "25845": 2.28148, "25850": 2.23533, "25855": 2.23753, "25860": 2.30668, "25865": 2.23786, "25870": 2.26967, "25875": 2.20071, "25880": 2.25741, "25885": 2.29183, "25890": 2.28286, "25895": 2.19047, "25900": 2.27785, "25905": 2.22724, "25910": 2.23597, "25915": 2.27097, "25920": 2.22054, "25925": 2.25271, "25930": 2.2177, "25935": 2.27212, "25940": 2.21898, "25945": 2.27454, "25950": 2.2727, "25955": 2.32277, "25960": 2.23365, "25965": 2.24794, "25970": 2.26506, "25975": 2.25472, "25980": 2.28971, "25985": 2.31474, "25990": 2.3067, "25995": 2.24218, "26000": 2.28241, "26005": 2.2353, "26010": 2.24872, "26015": 2.26372, "26020": 2.25192, "26025": 2.30625, "26030": 2.22422, "26035": 2.26263, "26040": 2.22279, "26045": 2.33153, "26050": 2.24288, "26055": 2.31086, "26060": 2.22682, "26065": 2.14726, "26070": 2.2782, "26075": 2.27616, "26080": 2.20898, "26085": 2.31065, "26090": 2.20156, "26095": 2.1511, "26100": 2.18744, "26105": 2.18158, "26110": 2.28653, "26115": 2.18635, "26120": 2.25567, "26125": 2.2709, "26130": 2.23479, "26135": 2.22834, "26140": 2.24472, "26145": 2.16859, "26150": 2.22945, "26155": 2.22235, "26160": 2.32449, "26165": 2.18834, "26170": 2.32388, "26175": 2.23984, "26180": 2.27544, "26185": 2.20535, "26190": 2.28676, "26195": 2.26326, "26200": 2.29545, "26205": 2.19295, "26210": 2.22935, "26215": 2.28068, "26220": 2.21642, "26225": 2.27761, "26230": 2.22325, "26235": 2.16404, "26240": 2.27289, "26245": 2.28718, "26250": 2.28261, "26255": 2.29679, "26260": 2.22261, "26265": 2.21075, "26270": 2.28271, "26275": 2.21355, "26280": 2.25448, "26285": 2.24395, "26290": 2.30631, "26295": 2.25803, "26300": 2.29003, "26305": 2.30864, "26310": 2.2816, "26315": 2.27815, "26320": 2.1984, "26325": 2.23731, "26330": 2.24764, "26335": 2.25271, "26340": 2.28552, "26345": 2.29088, "26350": 2.2501, "26355": 2.17705, "26360": 2.27662, "26365": 2.23437, "26370": 2.22943, "26375": 2.29282, "26380": 2.2744, "26385": 2.2307, "26390": 2.20416, "26395": 2.28391, "26400": 2.28352, "26405": 2.24582, "26410": 2.31204, "26415": 2.32815, "26420": 2.24344, "26425": 2.22381, "26430": 2.21898, "26435": 2.36464, "26440": 2.19579, "26445": 2.29125, "26450": 2.25408, "26455": 2.24278, "26460": 2.28844, "26465": 2.20808, "26470": 2.19414, "26475": 2.2821, "26480": 2.25655, "26485": 2.20806, "26490": 2.26032, "26495": 2.23764, "26500": 2.29287, "26505": 2.20158, "26510": 2.23352, "26515": 2.29752, "26520": 2.24912, "26525": 2.31506, "26530": 2.19528, "26535": 2.32134, "26540": 2.24123, "26545": 2.2329, "26550": 2.24419, "26555": 2.22829, "26560": 2.30727, "26565": 2.26625, "26570": 2.21854, "26575": 2.28158, "26580": 2.2972, "26585": 2.1992, "26590": 2.23349, "26595": 2.23392, "26600": 2.21146, "26605": 2.2669, "26610": 2.34324, "26615": 2.22692, "26620": 2.22998, "26625": 2.2842, "26630": 2.24864, "26635": 2.26121, "26640": 2.25518, "26645": 2.29109, "26650": 2.21482, "26655": 2.24769, "26660": 2.2312, "26665": 2.29188, "26670": 2.23424, "26675": 2.24823, "26680": 2.28088, "26685": 2.26053, "26690": 2.21028, "26695": 2.25007, "26700": 2.22566, "26705": 2.2261, "26710": 2.34059, "26715": 2.24303, "26720": 2.18575, "26725": 2.26537, "26730": 2.2539, "26735": 2.25218, "26740": 2.29957, "26745": 2.3181, "26750": 2.25697, "26755": 2.23909, "26760": 2.29806, "26765": 2.24514, "26770": 2.18886, "26775": 2.23016, "26780": 2.26934, "26785": 2.24652, "26790": 2.31087, "26795": 2.24846, "26800": 2.20914, "26805": 2.25875, "26810": 2.21893, "26815": 2.29854, "26820": 2.32045, "26825": 2.233, "26830": 2.24837, "26835": 2.24753, "26840": 2.28621, "26845": 2.24006, "26850": 2.31152, "26855": 2.30131, "26860": 2.27187, "26865": 2.31596, "26870": 2.23534, "26875": 2.23099, "26880": 2.30888, "26885": 2.22028, "26890": 2.29541, "26895": 2.23506, "26900": 2.31237, "26905": 2.26329, "26910": 2.2821, "26915": 2.22324, "26920": 2.33733, "26925": 2.20244, "26930": 2.22547, "26935": 2.21696, "26940": 2.24398, "26945": 2.23053, "26950": 2.26901, "26955": 2.23867, "26960": 2.2478, "26965": 2.20654, "26970": 2.25221, "26975": 2.24548, "26980": 2.23888, "26985": 2.31992, "26990": 2.23521, "26995": 2.22456, "27000": 2.18706, "27005": 2.22469, "27010": 2.32879, "27015": 2.2211, "27020": 2.2405, "27025": 2.32147, "27030": 2.28413, "27035": 2.1528, "27040": 2.21149, "27045": 2.30294, "27050": 2.19839, "27055": 2.25754, "27060": 2.2905, "27065": 2.21993, "27070": 2.27677, "27075": 2.24825, "27080": 2.23755, "27085": 2.23737, "27090": 2.21604, "27095": 2.24223, "27100": 2.24276, "27105": 2.22189, "27110": 2.28008, "27115": 2.20327, "27120": 2.23432, "27125": 2.25121, "27130": 2.22694, "27135": 2.28053, "27140": 2.21022, "27145": 2.23551, "27150": 2.24219, "27155": 2.19847, "27160": 2.24007, "27165": 2.20517, "27170": 2.25995, "27175": 2.20588, "27180": 2.2142, "27185": 2.30299, "27190": 2.20506, "27195": 2.24791, "27200": 2.26893, "27205": 2.22998, "27210": 2.32114, "27215": 2.26073, "27220": 2.26343, "27225": 2.21155, "27230": 2.17401, "27235": 2.19385, "27240": 2.28458, "27245": 2.21354, "27250": 2.28042, "27255": 2.18048, "27260": 2.22367, "27265": 2.29401, "27270": 2.2039, "27275": 2.32366, "27280": 2.24519, "27285": 2.25555, "27290": 2.30682, "27295": 2.22958, "27300": 2.19655, "27305": 2.25301, "27310": 2.15859, "27315": 2.27331, "27320": 2.20755, "27325": 2.23963, "27330": 2.30601, "27335": 2.28249, "27340": 2.27043, "27345": 2.23008, "27350": 2.28583, "27355": 2.24572, "27360": 2.26585, "27365": 2.20397, "27370": 2.23025, "27375": 2.27677, "27380": 2.23593, "27385": 2.20971, "27390": 2.15442, "27395": 2.19865, "27400": 2.3162, "27405": 2.23265, "27410": 2.24711, "27415": 2.24196, "27420": 2.19442, "27425": 2.21305, "27430": 2.18787, "27435": 2.25147, "27440": 2.23443, "27445": 2.22381, "27450": 2.22192, "27455": 2.2109, "27460": 2.28335, "27465": 2.26847, "27470": 2.1801, "27475": 2.24174, "27480": 2.22762, "27485": 2.24832, "27490": 2.29158, "27495": 2.28874, "27500": 2.26187, "27505": 2.19517, "27510": 2.19373, "27515": 2.17707, "27520": 2.23501, "27525": 2.197, "27530": 2.24937, "27535": 2.19298, "27540": 2.19457, "27545": 2.18731, "27550": 2.21971, "27555": 2.18153, "27560": 2.1665, "27565": 2.17845, "27570": 2.22321, "27575": 2.21943, "27580": 2.27948, "27585": 2.24846, "27590": 2.21741, "27595": 2.20152, "27600": 2.23795, "27605": 2.22637, "27610": 2.181, "27615": 2.24291, "27620": 2.24928, "27625": 2.2412, "27630": 2.19097, "27635": 2.26712, "27640": 2.24849, "27645": 2.26932, "27650": 2.22419, "27655": 2.19338, "27660": 2.21651, "27665": 2.24417, "27670": 2.20409, "27675": 2.16015, "27680": 2.27364, "27685": 2.1985, "27690": 2.19347, "27695": 2.24354, "27700": 2.20334, "27705": 2.2383, "27710": 2.30574, "27715": 2.18397, "27720": 2.19445, "27725": 2.17037, "27730": 2.20002, "27735": 2.23623, "27740": 2.23415, "27745": 2.23629, "27750": 2.22596, "27755": 2.27172, "27760": 2.26429, "27765": 2.2318, "27770": 2.22019, "27775": 2.31109, "27780": 2.20278, "27785": 2.20504, "27790": 2.2135, "27795": 2.26709, "27800": 2.20588, "27805": 2.244, "27810": 2.24096, "27815": 2.22346, "27820": 2.26196, "27825": 2.24656, "27830": 2.21706, "27835": 2.26048, "27840": 2.18393, "27845": 2.2764, "27850": 2.2183, "27855": 2.20416, "27860": 2.20869, "27865": 2.2891, "27870": 2.30339, "27875": 2.31555, "27880": 2.1927, "27885": 2.30811, "27890": 2.2315, "27895": 2.22313, "27900": 2.24465, "27905": 2.29243, "27910": 2.23813, "27915": 2.24597, "27920": 2.22061, "27925": 2.16596, "27930": 2.271, "27935": 2.26849, "27940": 2.29437, "27945": 2.26178, "27950": 2.23431, "27955": 2.31881, "27960": 2.26126, "27965": 2.24555, "27970": 2.2503, "27975": 2.27278, "27980": 2.2099, "27985": 2.11664, "27990": 2.24956, "27995": 2.18124, "28000": 2.24379, "28005": 2.16565, "28010": 2.22095, "28015": 2.19425, "28020": 2.25403, "28025": 2.28082, "28030": 2.26434, "28035": 2.23453, "28040": 2.21304, "28045": 2.22614, "28050": 2.24505, "28055": 2.12445, "28060": 2.2103, "28065": 2.18452, "28070": 2.198, "28075": 2.24485, "28080": 2.20986, "28085": 2.22411, "28090": 2.225, "28095": 2.20368, "28100": 2.20008, "28105": 2.30364, "28110": 2.20628, "28115": 2.19956, "28120": 2.23462, "28125": 2.21517, "28130": 2.22086, "28135": 2.22711, "28140": 2.24902, "28145": 2.22946, "28150": 2.23256, "28155": 2.15154, "28160": 2.20439, "28165": 2.1973, "28170": 2.11051, "28175": 2.25113, "28180": 2.31846, "28185": 2.24102, "28190": 2.25675, "28195": 2.23438, "28200": 2.24816, "28205": 2.21471, "28210": 2.22897, "28215": 2.21421, "28220": 2.19117, "28225": 2.23668, "28230": 2.19311, "28235": 2.20734, "28240": 2.23024, "28245": 2.26786, "28250": 2.22247, "28255": 2.16282, "28260": 2.13785, "28265": 2.18768, "28270": 2.24378, "28275": 2.18905, "28280": 2.23632, "28285": 2.27203, "28290": 2.1817, "28295": 2.24671, "28300": 2.20719, "28305": 2.26873, "28310": 2.20056, "28315": 2.20293, "28320": 2.21212, "28325": 2.24391, "28330": 2.23721, "28335": 2.21742, "28340": 2.21862, "28345": 2.25876, "28350": 2.18635, "28355": 2.23276, "28360": 2.24237, "28365": 2.21998, "28370": 2.2753, "28375": 2.17968, "28380": 2.24024, "28385": 2.17875, "28390": 2.26847, "28395": 2.18367, "28400": 2.2055, "28405": 2.20918, "28410": 2.2349, "28415": 2.18423, "28420": 2.21605, "28425": 2.20211, "28430": 2.24046, "28435": 2.18691, "28440": 2.22372, "28445": 2.29189, "28450": 2.20932, "28455": 2.19386, "28460": 2.23085, "28465": 2.17859, "28470": 2.25272, "28475": 2.22107, "28480": 2.19837, "28485": 2.29531, "28490": 2.2264, "28495": 2.23165, "28500": 2.15088, "28505": 2.29562, "28510": 2.20122, "28515": 2.30276, "28520": 2.20872, "28525": 2.22587, "28530": 2.23855, "28535": 2.22785, "28540": 2.24915, "28545": 2.25936, "28550": 2.19746, "28555": 2.24732, "28560": 2.17682, "28565": 2.22165, "28570": 2.24816, "28575": 2.21505, "28580": 2.14866, "28585": 2.22083, "28590": 2.16615, "28595": 2.27269, "28600": 2.23434, "28605": 2.24854, "28610": 2.23689, "28615": 2.20302, "28620": 2.2999, "28625": 2.23198, "28630": 2.18157, "28635": 2.20538, "28640": 2.25446, "28645": 2.28381, "28650": 2.28343, "28655": 2.19036, "28660": 2.18966, "28665": 2.1782, "28670": 2.21472, "28675": 2.265, "28680": 2.21489, "28685": 2.19122, "28690": 2.22787, "28695": 2.18967, "28700": 2.20207, "28705": 2.22774, "28710": 2.25068, "28715": 2.25925, "28720": 2.25157, "28725": 2.17758, "28730": 2.20455, "28735": 2.25199, "28740": 2.24092, "28745": 2.25887, "28750": 2.22671, "28755": 2.17212, "28760": 2.2779, "28765": 2.24123, "28770": 2.22064, "28775": 2.19334, "28780": 2.14253, "28785": 2.18236, "28790": 2.20077, "28795": 2.15666, "28800": 2.28513, "28805": 2.25769, "28810": 2.20838, "28815": 2.26871, "28820": 2.23622, "28825": 2.16978, "28830": 2.26754, "28835": 2.23328, "28840": 2.11857, "28845": 2.14972, "28850": 2.26173, "28855": 2.26925, "28860": 2.19741, "28865": 2.16045, "28870": 2.21364, "28875": 2.20395, "28880": 2.22059, "28885": 2.17028, "28890": 2.13831, "28895": 2.14605, "28900": 2.26225, "28905": 2.28198, "28910": 2.22044, "28915": 2.20171, "28920": 2.22612, "28925": 2.20553, "28930": 2.26373, "28935": 2.11613, "28940": 2.2432, "28945": 2.22365, "28950": 2.1578, "28955": 2.28192, "28960": 2.16116, "28965": 2.22014, "28970": 2.24213, "28975": 2.30179, "28980": 2.24926, "28985": 2.21262, "28990": 2.19767, "28995": 2.21886, "29000": 2.22145, "29005": 2.14319, "29010": 2.19384, "29015": 2.21723, "29020": 2.19798, "29025": 2.25233, "29030": 2.19865, "29035": 2.19163, "29040": 2.20465, "29045": 2.22413, "29050": 2.23816, "29055": 2.24031, "29060": 2.24594, "29065": 2.28142, "29070": 2.22277, "29075": 2.24758, "29080": 2.172, "29085": 2.19015, "29090": 2.21942, "29095": 2.13782, "29100": 2.14832, "29105": 2.22092, "29110": 2.16758, "29115": 2.27325, "29120": 2.27596, "29125": 2.2435, "29130": 2.19992, "29135": 2.26999, "29140": 2.13994, "29145": 2.26407, "29150": 2.22393, "29155": 2.25844, "29160": 2.26034, "29165": 2.18049, "29170": 2.27054, "29175": 2.2142, "29180": 2.23606, "29185": 2.24705, "29190": 2.19604, "29195": 2.12898, "29200": 2.19368, "29205": 2.14969, "29210": 2.1749, "29215": 2.19823, "29220": 2.21116, "29225": 2.31593, "29230": 2.17734, "29235": 2.26758, "29240": 2.27203, "29245": 2.20013, "29250": 2.28769, "29255": 2.25067, "29260": 2.20998, "29265": 2.16497, "29270": 2.20041, "29275": 2.16104, "29280": 2.23695, "29285": 2.21808, "29290": 2.19262, "29295": 2.21486, "29300": 2.1634, "29305": 2.24398, "29310": 2.1924, "29315": 2.24958, "29320": 2.15895, "29325": 2.24709, "29330": 2.16033, "29335": 2.25027, "29340": 2.29748, "29345": 2.17404, "29350": 2.2039, "29355": 2.23234, "29360": 2.26274, "29365": 2.26656, "29370": 2.16695, "29375": 2.21712, "29380": 2.24496, "29385": 2.21022, "29390": 2.28536, "29395": 2.20363, "29400": 2.25217, "29405": 2.18457, "29410": 2.20051, "29415": 2.23807, "29420": 2.21241, "29425": 2.26162, "29430": 2.15255, "29435": 2.15646, "29440": 2.2445, "29445": 2.19641, "29450": 2.16837, "29455": 2.16124, "29460": 2.18715, "29465": 2.26672, "29470": 2.23561, "29475": 2.23252, "29480": 2.17475, "29485": 2.17308, "29490": 2.18578, "29495": 2.19656, "29500": 2.27308, "29505": 2.16054, "29510": 2.16231, "29515": 2.19289, "29520": 2.23295, "29525": 2.12328, "29530": 2.24739, "29535": 2.15901, "29540": 2.2348, "29545": 2.09977, "29550": 2.17326, "29555": 2.21812, "29560": 2.19917, "29565": 2.16829, "29570": 2.21783, "29575": 2.18249, "29580": 2.25325, "29585": 2.21658, "29590": 2.14692, "29595": 2.13111, "29600": 2.19993, "29605": 2.2573, "29610": 2.21946, "29615": 2.2007, "29620": 2.24324, "29625": 2.16749, "29630": 2.10982, "29635": 2.18626, "29640": 2.23956, "29645": 2.2483, "29650": 2.18913, "29655": 2.18898, "29660": 2.21633, "29665": 2.22518, "29670": 2.21406, "29675": 2.19313, "29680": 2.18251, "29685": 2.22086, "29690": 2.24002, "29695": 2.21809, "29700": 2.23285, "29705": 2.25414, "29710": 2.1768, "29715": 2.21083, "29720": 2.19287, "29725": 2.19395, "29730": 2.14193, "29735": 2.21747, "29740": 2.19912, "29745": 2.26912, "29750": 2.19124, "29755": 2.1814, "29760": 2.22363, "29765": 2.23394, "29770": 2.24902, "29775": 2.17226, "29780": 2.15123, "29785": 2.20233, "29790": 2.15904, "29795": 2.23755, "29800": 2.19806, "29805": 2.21752, "29810": 2.18172, "29815": 2.19879, "29820": 2.1823, "29825": 2.14994, "29830": 2.21149, "29835": 2.21042, "29840": 2.21786, "29845": 2.15589, "29850": 2.21106, "29855": 2.21502, "29860": 2.21921, "29865": 2.25245, "29870": 2.21342, "29875": 2.25632, "29880": 2.24296, "29885": 2.21401, "29890": 2.24731, "29895": 2.23924, "29900": 2.1838, "29905": 2.23863, "29910": 2.21145, "29915": 2.15839, "29920": 2.2341, "29925": 2.16069, "29930": 2.09932, "29935": 2.15732, "29940": 2.25526, "29945": 2.18387, "29950": 2.14324, "29955": 2.17002, "29960": 2.25067, "29965": 2.18274, "29970": 2.17536, "29975": 2.19528, "29980": 2.22472, "29985": 2.21339, "29990": 2.23169, "29995": 2.16168, "30000": 2.23141, "30005": 2.23636, "30010": 2.16014, "30015": 2.19605, "30020": 2.17601, "30025": 2.20295, "30030": 2.22384, "30035": 2.18988, "30040": 2.17973, "30045": 2.21468, "30050": 2.26131, "30055": 2.16162, "30060": 2.23189, "30065": 2.26881, "30070": 2.1548, "30075": 2.21623, "30080": 2.22114, "30085": 2.18285, "30090": 2.20492, "30095": 2.21843, "30100": 2.16958, "30105": 2.22784, "30110": 2.21478, "30115": 2.21175, "30120": 2.15318, "30125": 2.23793, "30130": 2.17572, "30135": 2.27141, "30140": 2.1922, "30145": 2.21292, "30150": 2.23088, "30155": 2.13487, "30160": 2.2472, "30165": 2.10741, "30170": 2.26941, "30175": 2.18682, "30180": 2.27091, "30185": 2.22207, "30190": 2.22841, "30195": 2.19674, "30200": 2.20061, "30205": 2.17234, "30210": 2.16544, "30215": 2.17944, "30220": 2.17204, "30225": 2.14948, "30230": 2.24679, "30235": 2.15709, "30240": 2.26635, "30245": 2.1876, "30250": 2.1965, "30255": 2.19607, "30260": 2.15883, "30265": 2.20316, "30270": 2.2183, "30275": 2.20212, "30280": 2.22144, "30285": 2.22927, "30290": 2.18328, "30295": 2.2302, "30300": 2.22461, "30305": 2.19961, "30310": 2.1509, "30315": 2.21876, "30320": 2.19611, "30325": 2.26263, "30330": 2.19437, "30335": 2.23866, "30340": 2.2205, "30345": 2.21025, "30350": 2.22089, "30355": 2.16823, "30360": 2.2231, "30365": 2.18077, "30370": 2.22295, "30375": 2.14017, "30380": 2.22926, "30385": 2.15952, "30390": 2.21746, "30395": 2.28186, "30400": 2.25145, "30405": 2.20198, "30410": 2.18581, "30415": 2.1932, "30420": 2.17688, "30425": 2.18199, "30430": 2.17855, "30435": 2.19984, "30440": 2.19583, "30445": 2.19808, "30450": 2.13375, "30455": 2.17056, "30460": 2.23766, "30465": 2.12885, "30470": 2.17135, "30475": 2.2665, "30480": 2.19634, "30485": 2.24142, "30490": 2.22446, "30495": 2.18772, "30500": 2.17059, "30505": 2.22178, "30510": 2.23171, "30515": 2.30895, "30520": 2.22842, "30525": 2.09651, "30530": 2.21673, "30535": 2.24936, "30540": 2.14774, "30545": 2.19373, "30550": 2.10779, "30555": 2.15339, "30560": 2.22109, "30565": 2.23946, "30570": 2.17704, "30575": 2.15363, "30580": 2.25801, "30585": 2.23922, "30590": 2.23288, "30595": 2.22244, "30600": 2.20598, "30605": 2.1895, "30610": 2.18073, "30615": 2.20956, "30620": 2.21349, "30625": 2.25515, "30630": 2.24471, "30635": 2.21888, "30640": 2.18519, "30645": 2.22922, "30650": 2.207, "30655": 2.20405, "30660": 2.21689, "30665": 2.1911, "30670": 2.19752, "30675": 2.14411, "30680": 2.16322, "30685": 2.13184, "30690": 2.18902, "30695": 2.21567, "30700": 2.25429, "30705": 2.26733, "30710": 2.17743, "30715": 2.22476, "30720": 2.15394, "30725": 2.17446, "30730": 2.19944, "30735": 2.19934, "30740": 2.19021, "30745": 2.23465, "30750": 2.15212, "30755": 2.21456, "30760": 2.17649, "30765": 2.1422, "30770": 2.1664, "30775": 2.23837, "30780": 2.20289, "30785": 2.27559, "30790": 2.21991, "30795": 2.23606, "30800": 2.24927, "30805": 2.12864, "30810": 2.23753, "30815": 2.21078, "30820": 2.23973, "30825": 2.14017, "30830": 2.23983, "30835": 2.22122, "30840": 2.28183, "30845": 2.19885, "30850": 2.15332, "30855": 2.18742, "30860": 2.14185, "30865": 2.19195, "30870": 2.18193, "30875": 2.15745, "30880": 2.23973, "30885": 2.14293, "30890": 2.24963, "30895": 2.18436, "30900": 2.14477, "30905": 2.22895, "30910": 2.19233, "30915": 2.21386, "30920": 2.2718, "30925": 2.22216, "30930": 2.15322, "30935": 2.20126, "30940": 2.17458, "30945": 2.14607, "30950": 2.12851, "30955": 2.22802, "30960": 2.21358, "30965": 2.23859, "30970": 2.1825, "30975": 2.20477, "30980": 2.18903, "30985": 2.15848, "30990": 2.21015, "30995": 2.17865, "31000": 2.20574, "31005": 2.29466, "31010": 2.17862, "31015": 2.17265, "31020": 2.18004, "31025": 2.17454, "31030": 2.26212, "31035": 2.20056, "31040": 2.25588, "31045": 2.12743, "31050": 2.1572, "31055": 2.23202, "31060": 2.09183, "31065": 2.24008, "31070": 2.24377, "31075": 2.20643, "31080": 2.17756, "31085": 2.22051, "31090": 2.21932, "31095": 2.19942, "31100": 2.21775, "31105": 2.17229, "31110": 2.15391, "31115": 2.20306, "31120": 2.18505, "31125": 2.17483, "31130": 2.11169, "31135": 2.17046, "31140": 2.23495, "31145": 2.21239, "31150": 2.24093, "31155": 2.15888, "31160": 2.20756, "31165": 2.21663, "31170": 2.17061, "31175": 2.2146, "31180": 2.19288, "31185": 2.15761, "31190": 2.16084, "31195": 2.20368, "31200": 2.17622, "31205": 2.19022, "31210": 2.1871, "31215": 2.22037, "31220": 2.1744, "31225": 2.16151, "31230": 2.22665, "31235": 2.18366, "31240": 2.18526, "31245": 2.2029, "31250": 2.25072, "31255": 2.15032, "31260": 2.19013, "31265": 2.18392, "31270": 2.18446, "31275": 2.22078, "31280": 2.19366, "31285": 2.10874, "31290": 2.22962, "31295": 2.20968, "31300": 2.13956, "31305": 2.22535, "31310": 2.18558, "31315": 2.1451, "31320": 2.24257, "31325": 2.19681, "31330": 2.26501, "31335": 2.2256, "31340": 2.22629, "31345": 2.15183, "31350": 2.13755, "31355": 2.19505, "31360": 2.21014, "31365": 2.16916, "31370": 2.17722, "31375": 2.2621, "31380": 2.13268, "31385": 2.17371, "31390": 2.17809, "31395": 2.17817, "31400": 2.1623, "31405": 2.16281, "31410": 2.23392, "31415": 2.18139, "31420": 2.25643, "31425": 2.17242, "31430": 2.20449, "31435": 2.207, "31440": 2.17622, "31445": 2.11884, "31450": 2.19782, "31455": 2.25425, "31460": 2.19666, "31465": 2.16739, "31470": 2.22635, "31475": 2.13189, "31480": 2.21145, "31485": 2.18983, "31490": 2.16003, "31495": 2.16984, "31500": 2.2276, "31505": 2.21553, "31510": 2.17123, "31515": 2.13115, "31520": 2.11314, "31525": 2.17799, "31530": 2.23607, "31535": 2.21089, "31540": 2.21383, "31545": 2.14596, "31550": 2.18762, "31555": 2.2811, "31560": 2.22129, "31565": 2.20189, "31570": 2.26711, "31575": 2.20298, "31580": 2.1486, "31585": 2.17939, "31590": 2.16858, "31595": 2.17233, "31600": 2.2483, "31605": 2.19616, "31610": 2.19634, "31615": 2.18579, "31620": 2.23273, "31625": 2.19322, "31630": 2.16043, "31635": 2.17825, "31640": 2.2253, "31645": 2.18599, "31650": 2.21347, "31655": 2.18979, "31660": 2.22528, "31665": 2.19935, "31670": 2.19411, "31675": 2.20859, "31680": 2.18644, "31685": 2.20364, "31690": 2.16918, "31695": 2.25966, "31700": 2.20033, "31705": 2.15551, "31710": 2.18259, "31715": 2.14687, "31720": 2.16495, "31725": 2.16214, "31730": 2.13435, "31735": 2.16619, "31740": 2.1889, "31745": 2.1403, "31750": 2.07727, "31755": 2.18261, "31760": 2.18262, "31765": 2.21137, "31770": 2.18138, "31775": 2.15306, "31780": 2.13173, "31785": 2.19918, "31790": 2.22834, "31795": 2.21517, "31800": 2.21776, "31805": 2.25645, "31810": 2.23589, "31815": 2.17488, "31820": 2.19233, "31825": 2.14749, "31830": 2.21105, "31835": 2.23457, "31840": 2.22813, "31845": 2.18587, "31850": 2.17241, "31855": 2.16967, "31860": 2.20998, "31865": 2.15899, "31870": 2.22429, "31875": 2.24118, "31880": 2.1731, "31885": 2.13173, "31890": 2.18611, "31895": 2.21736, "31900": 2.16747, "31905": 2.19299, "31910": 2.24112, "31915": 2.15711, "31920": 2.16499, "31925": 2.17021, "31930": 2.224, "31935": 2.2222, "31940": 2.17497, "31945": 2.18946, "31950": 2.20645, "31955": 2.24041, "31960": 2.22206, "31965": 2.1907, "31970": 2.20004, "31975": 2.21073, "31980": 2.1353, "31985": 2.16258, "31990": 2.20663, "31995": 2.26182, "32000": 2.15587, "32005": 2.17535, "32010": 2.17217, "32015": 2.23072, "32020": 2.16822, "32025": 2.22614, "32030": 2.19047, "32035": 2.19452, "32040": 2.18155, "32045": 2.18446, "32050": 2.20226, "32055": 2.125, "32060": 2.18507, "32065": 2.09656, "32070": 2.21743, "32075": 2.17834, "32080": 2.19884, "32085": 2.2306, "32090": 2.25578, "32095": 2.18119, "32100": 2.16181, "32105": 2.14992, "32110": 2.15573, "32115": 2.21048, "32120": 2.20432, "32125": 2.09162, "32130": 2.21952, "32135": 2.21351, "32140": 2.20605, "32145": 2.14259, "32150": 2.17325, "32155": 2.11391, "32160": 2.23627, "32165": 2.20107, "32170": 2.15758, "32175": 2.16194, "32180": 2.23012, "32185": 2.14457, "32190": 2.15441, "32195": 2.24454, "32200": 2.22995, "32205": 2.20238, "32210": 2.21329, "32215": 2.15862, "32220": 2.19414, "32225": 2.16182, "32230": 2.17519, "32235": 2.21998, "32240": 2.18058, "32245": 2.23252, "32250": 2.23944, "32255": 2.18549, "32260": 2.18595, "32265": 2.1833, "32270": 2.13854, "32275": 2.16163, "32280": 2.14803, "32285": 2.21425, "32290": 2.16434, "32295": 2.22869, "32300": 2.16942, "32305": 2.14087, "32310": 2.26131, "32315": 2.1944, "32320": 2.1422, "32325": 2.14563, "32330": 2.1874, "32335": 2.16462, "32340": 2.20505, "32345": 2.21045, "32350": 2.14075, "32355": 2.09645, "32360": 2.19685, "32365": 2.18531, "32370": 2.19443, "32375": 2.18709, "32380": 2.19483, "32385": 2.19124, "32390": 2.24766, "32395": 2.21556, "32400": 2.19553, "32405": 2.13483, "32410": 2.19294, "32415": 2.19155, "32420": 2.18753, "32425": 2.16779, "32430": 2.28606, "32435": 2.07265, "32440": 2.20342, "32445": 2.10718, "32450": 2.26901, "32455": 2.19664, "32460": 2.18343, "32465": 2.14091, "32470": 2.20838, "32475": 2.16326, "32480": 2.17938, "32485": 2.24722, "32490": 2.15728, "32495": 2.24981, "32500": 2.13583, "32505": 2.13902, "32510": 2.10846, "32515": 2.13464, "32520": 2.19289, "32525": 2.19867, "32530": 2.15526, "32535": 2.18616, "32540": 2.20225, "32545": 2.14235, "32550": 2.14261, "32555": 2.19891, "32560": 2.20147, "32565": 2.23685, "32570": 2.23206, "32575": 2.17198, "32580": 2.18017, "32585": 2.19682, "32590": 2.19146, "32595": 2.23546, "32600": 2.20634, "32605": 2.19648, "32610": 2.19548, "32615": 2.20171, "32620": 2.22289, "32625": 2.19874, "32630": 2.19992, "32635": 2.12346, "32640": 2.19737, "32645": 2.15453, "32650": 2.16457, "32655": 2.22775, "32660": 2.20178, "32665": 2.22805, "32670": 2.18255, "32675": 2.12161, "32680": 2.20295, "32685": 2.14182, "32690": 2.20574, "32695": 2.12088, "32700": 2.13899, "32705": 2.11907, "32710": 2.09203, "32715": 2.18141, "32720": 2.17444, "32725": 2.18697, "32730": 2.13983, "32735": 2.22792, "32740": 2.16113, "32745": 2.14665, "32750": 2.20099, "32755": 2.20848, "32760": 2.20113, "32765": 2.22339, "32770": 2.16342, "32775": 2.21752, "32780": 2.21654, "32785": 2.16928, "32790": 2.16321, "32795": 2.10175, "32800": 2.17928, "32805": 2.22371, "32810": 2.26561, "32815": 2.21938, "32820": 2.16067, "32825": 2.21275, "32830": 2.12264, "32835": 2.19172, "32840": 2.12392, "32845": 2.13995, "32850": 2.16626, "32855": 2.13331, "32860": 2.16205, "32865": 2.12357, "32870": 2.17157, "32875": 2.14576, "32880": 2.19693, "32885": 2.16762, "32890": 2.21091, "32895": 2.15029, "32900": 2.13941, "32905": 2.11077, "32910": 2.16526, "32915": 2.25689, "32920": 2.12168, "32925": 2.20234, "32930": 2.1721, "32935": 2.22708, "32940": 2.21809, "32945": 2.156, "32950": 2.23251, "32955": 2.13041, "32960": 2.16959, "32965": 2.21842, "32970": 2.15814, "32975": 2.17485, "32980": 2.18935, "32985": 2.21617, "32990": 2.17092, "32995": 2.13893, "33000": 2.18598, "33005": 2.15917, "33010": 2.123, "33015": 2.19416, "33020": 2.23968, "33025": 2.17915, "33030": 2.22555, "33035": 2.16867, "33040": 2.20326, "33045": 2.19431, "33050": 2.09869, "33055": 2.17469, "33060": 2.14944, "33065": 2.21304, "33070": 2.22243, "33075": 2.2045, "33080": 2.19696, "33085": 2.10964, "33090": 2.15839, "33095": 2.1663, "33100": 2.1123, "33105": 2.20233, "33110": 2.16358, "33115": 2.11173, "33120": 2.22515, "33125": 2.21765, "33130": 2.22477, "33135": 2.22673, "33140": 2.21212, "33145": 2.11592, "33150": 2.21398, "33155": 2.22856, "33160": 2.21501, "33165": 2.16957, "33170": 2.24608, "33175": 2.12702, "33180": 2.11069, "33185": 2.12555, "33190": 2.16083, "33195": 2.2117, "33200": 2.2047, "33205": 2.22708, "33210": 2.2041, "33215": 2.1135, "33220": 2.15961, "33225": 2.19827, "33230": 2.18134, "33235": 2.17853, "33240": 2.13419, "33245": 2.19326, "33250": 2.21261, "33255": 2.15904, "33260": 2.17923, "33265": 2.24215, "33270": 2.17077, "33275": 2.17915, "33280": 2.14567, "33285": 2.17245, "33290": 2.10903, "33295": 2.15249, "33300": 2.19518, "33305": 2.20987, "33310": 2.16564, "33315": 2.17014, "33320": 2.17745, "33325": 2.10184, "33330": 2.18635, "33335": 2.20313, "33340": 2.15335, "33345": 2.17677, "33350": 2.16771, "33355": 2.16696, "33360": 2.25743, "33365": 2.16888, "33370": 2.21896, "33375": 2.18155, "33380": 2.22032, "33385": 2.15875, "33390": 2.18733, "33395": 2.12664, "33400": 2.13872, "33405": 2.15988, "33410": 2.14627, "33415": 2.13849, "33420": 2.1958, "33425": 2.20652, "33430": 2.18375, "33435": 2.18877, "33440": 2.13179, "33445": 2.25104, "33450": 2.14939, "33455": 2.19404, "33460": 2.18116, "33465": 2.12846, "33470": 2.16451, "33475": 2.18866, "33480": 2.19279, "33485": 2.18801, "33490": 2.22025, "33495": 2.1627, "33500": 2.1135, "33505": 2.1624, "33510": 2.16425, "33515": 2.19306, "33520": 2.17232, "33525": 2.18886, "33530": 2.13693, "33535": 2.15083, "33540": 2.19855, "33545": 2.15219, "33550": 2.16065, "33555": 2.15578, "33560": 2.11189, "33565": 2.14506, "33570": 2.19089, "33575": 2.1211, "33580": 2.17966, "33585": 2.23735, "33590": 2.22006, "33595": 2.14627, "33600": 2.18837, "33605": 2.18598, "33610": 2.18188, "33615": 2.14826, "33620": 2.22794, "33625": 2.13391, "33630": 2.20242, "33635": 2.15299, "33640": 2.25691, "33645": 2.22395, "33650": 2.18258, "33655": 2.17318, "33660": 2.18987, "33665": 2.15697, "33670": 2.15377, "33675": 2.15002, "33680": 2.20572, "33685": 2.12233, "33690": 2.21297, "33695": 2.19806, "33700": 2.13643, "33705": 2.18776, "33710": 2.22445, "33715": 2.23079, "33720": 2.15635, "33725": 2.19285, "33730": 2.19296, "33735": 2.18323, "33740": 2.0747, "33745": 2.11923, "33750": 2.17198, "33755": 2.14984, "33760": 2.17645, "33765": 2.18218, "33770": 2.12807, "33775": 2.28678, "33780": 2.15207, "33785": 2.17635, "33790": 2.12049, "33795": 2.20247, "33800": 2.11846, "33805": 2.19489, "33810": 2.17196, "33815": 2.05596, "33820": 2.18504, "33825": 2.13464, "33830": 2.10409, "33835": 2.14368, "33840": 2.19137, "33845": 2.15142, "33850": 2.21471, "33855": 2.20334, "33860": 2.20539, "33865": 2.12511, "33870": 2.15286, "33875": 2.18487, "33880": 2.1282, "33885": 2.15364, "33890": 2.11519, "33895": 2.1777, "33900": 2.16227, "33905": 2.12272, "33910": 2.11887, "33915": 2.1174, "33920": 2.13884, "33925": 2.15809, "33930": 2.14299, "33935": 2.16433, "33940": 2.18656, "33945": 2.1492, "33950": 2.17801, "33955": 2.17972, "33960": 2.13374, "33965": 2.18711, "33970": 2.11867, "33975": 2.15091, "33980": 2.15886, "33985": 2.1785, "33990": 2.1261, "33995": 2.16017, "34000": 2.19296, "34005": 2.15699, "34010": 2.20735, "34015": 2.08204, "34020": 2.18166, "34025": 2.09082, "34030": 2.24643, "34035": 2.16391, "34040": 2.16175, "34045": 2.17813, "34050": 2.17564, "34055": 2.18846, "34060": 2.15448, "34065": 2.15466, "34070": 2.19, "34075": 2.23088, "34080": 2.1402, "34085": 2.17817, "34090": 2.16659, "34095": 2.1902, "34100": 2.15054, "34105": 2.19849, "34110": 2.22602, "34115": 2.15514, "34120": 2.2028, "34125": 2.16567, "34130": 2.13427, "34135": 2.20667, "34140": 2.0793, "34145": 2.13724, "34150": 2.16902, "34155": 2.19565, "34160": 2.23928, "34165": 2.16472, "34170": 2.14241, "34175": 2.10304, "34180": 2.18962, "34185": 2.15204, "34190": 2.14323, "34195": 2.17081, "34200": 2.18959, "34205": 2.15129, "34210": 2.11661, "34215": 2.1543, "34220": 2.06865, "34225": 2.14379, "34230": 2.18456, "34235": 2.21215, "34240": 2.12583, "34245": 2.14019, "34250": 2.131, "34255": 2.18607, "34260": 2.14899, "34265": 2.1404, "34270": 2.18533, "34275": 2.14308, "34280": 2.13113, "34285": 2.18958, "34290": 2.18819, "34295": 2.21646, "34300": 2.12658, "34305": 2.16835, "34310": 2.19315, "34315": 2.16207, "34320": 2.1353, "34325": 2.17883, "34330": 2.16574, "34335": 2.12107, "34340": 2.14375, "34345": 2.15517, "34350": 2.18895, "34355": 2.14843, "34360": 2.17603, "34365": 2.19016, "34370": 2.15383, "34375": 2.13281, "34380": 2.19113, "34385": 2.10929, "34390": 2.16225, "34395": 2.15937, "34400": 2.13652, "34405": 2.1825, "34410": 2.19044, "34415": 2.19473, "34420": 2.23446, "34425": 2.24965, "34430": 2.16499, "34435": 2.17822, "34440": 2.18338, "34445": 2.13653, "34450": 2.14693, "34455": 2.18993, "34460": 2.15421, "34465": 2.21601, "34470": 2.21032, "34475": 2.14726, "34480": 2.22109, "34485": 2.21054, "34490": 2.13528, "34495": 2.14893, "34500": 2.20475, "34505": 2.22102, "34510": 2.14179, "34515": 2.11431, "34520": 2.18915, "34525": 2.20297, "34530": 2.15144, "34535": 2.08679, "34540": 2.104, "34545": 2.17151, "34550": 2.1799, "34555": 2.1867, "34560": 2.20502, "34565": 2.15814, "34570": 2.20662, "34575": 2.24085, "34580": 2.09811, "34585": 2.21549, "34590": 2.1382, "34595": 2.22143, "34600": 2.17622, "34605": 2.13718, "34610": 2.20176, "34615": 2.15031, "34620": 2.20285, "34625": 2.19776, "34630": 2.11915, "34635": 2.14503, "34640": 2.15008, "34645": 2.21347, "34650": 2.16037, "34655": 2.22929, "34660": 2.14539, "34665": 2.17771, "34670": 2.15359, "34675": 2.15949, "34680": 2.25886, "34685": 2.08113, "34690": 2.16214, "34695": 2.10612, "34700": 2.12255, "34705": 2.20393, "34710": 2.15353, "34715": 2.17091, "34720": 2.17059, "34725": 2.18085, "34730": 2.14931, "34735": 2.1295, "34740": 2.19162, "34745": 2.10463, "34750": 2.15977, "34755": 2.20176, "34760": 2.08211, "34765": 2.10995, "34770": 2.17572, "34775": 2.13222, "34780": 2.19674, "34785": 2.18313, "34790": 2.16245, "34795": 2.15075, "34800": 2.10373, "34805": 2.1648, "34810": 2.13132, "34815": 2.14839, "34820": 2.08863, "34825": 2.2087, "34830": 2.20872, "34835": 2.18709, "34840": 2.17028, "34845": 2.15371, "34850": 2.16837, "34855": 2.10292, "34860": 2.16756, "34865": 2.14294, "34870": 2.19075, "34875": 2.16843, "34880": 2.21439, "34885": 2.15007, "34890": 2.17377, "34895": 2.18042, "34900": 2.14626, "34905": 2.12739, "34910": 2.133, "34915": 2.17186, "34920": 2.13532, "34925": 2.14329, "34930": 2.21312, "34935": 2.16381, "34940": 2.15241, "34945": 2.13889, "34950": 2.1156, "34955": 2.10931, "34960": 2.16664, "34965": 2.23122, "34970": 2.20221, "34975": 2.20217, "34980": 2.17006, "34985": 2.16208, "34990": 2.17206, "34995": 2.14461, "35000": 2.13985, "35005": 2.1398, "35010": 2.13757, "35015": 2.14476, "35020": 2.13544, "35025": 2.16735, "35030": 2.1535, "35035": 2.14841, "35040": 2.09277, "35045": 2.19916, "35050": 2.15311, "35055": 2.12836, "35060": 2.17826, "35065": 2.21594, "35070": 2.15048, "35075": 2.21595, "35080": 2.11854, "35085": 2.14061, "35090": 2.1435, "35095": 2.12946, "35100": 2.14879, "35105": 2.19695, "35110": 2.19733, "35115": 2.13591, "35120": 2.0974, "35125": 2.16393, "35130": 2.14212, "35135": 2.17523, "35140": 2.0788, "35145": 2.14563, "35150": 2.1708, "35155": 2.16351, "35160": 2.17858, "35165": 2.1597, "35170": 2.12739, "35175": 2.19523, "35180": 2.18679, "35185": 2.14548, "35190": 2.12583, "35195": 2.15004, "35200": 2.20195, "35205": 2.19269, "35210": 2.15847, "35215": 2.2022, "35220": 2.16254, "35225": 2.17127, "35230": 2.15227, "35235": 2.11248, "35240": 2.13223, "35245": 2.11545, "35250": 2.18577, "35255": 2.13749, "35260": 2.15985, "35265": 2.12257, "35270": 2.14516, "35275": 2.20051, "35280": 2.13917, "35285": 2.16006, "35290": 2.12224, "35295": 2.08512, "35300": 2.12652, "35305": 2.13359, "35310": 2.14378, "35315": 2.18712, "35320": 2.16712, "35325": 2.18923, "35330": 2.13544, "35335": 2.10888, "35340": 2.17961, "35345": 2.17865, "35350": 2.1599, "35355": 2.14497, "35360": 2.09951, "35365": 2.17135, "35370": 2.13113, "35375": 2.16504, "35380": 2.15054, "35385": 2.16735, "35390": 2.11514, "35395": 2.04295, "35400": 2.22109, "35405": 2.16545, "35410": 2.21451, "35415": 2.24549, "35420": 2.10128, "35425": 2.1681, "35430": 2.11536, "35435": 2.16605, "35440": 2.12426, "35445": 2.18075, "35450": 2.14967, "35455": 2.13595, "35460": 2.17972, "35465": 2.12543, "35470": 2.1759, "35475": 2.1515, "35480": 2.27514, "35485": 2.15623, "35490": 2.13567, "35495": 2.17199, "35500": 2.06697, "35505": 2.16272, "35510": 2.06838, "35515": 2.15441, "35520": 2.28302, "35525": 2.15491, "35530": 2.14881, "35535": 2.10613, "35540": 2.14347, "35545": 2.07159, "35550": 2.15662, "35555": 2.11341, "35560": 2.18338, "35565": 2.14207, "35570": 2.11673, "35575": 2.15703, "35580": 2.18256, "35585": 2.15659, "35590": 2.11537, "35595": 2.13744, "35600": 2.17012, "35605": 2.16374, "35610": 2.13228, "35615": 2.23732, "35620": 2.14394, "35625": 2.14374, "35630": 2.14513, "35635": 2.20688, "35640": 2.07899, "35645": 2.16418, "35650": 2.21036, "35655": 2.07141, "35660": 2.24945, "35665": 2.16699, "35670": 2.18046, "35675": 2.10532, "35680": 2.1501, "35685": 2.1729, "35690": 2.22504, "35695": 2.16007, "35700": 2.18066, "35705": 2.13882, "35710": 2.17846, "35715": 2.07784, "35720": 2.21136, "35725": 2.10231, "35730": 2.09605, "35735": 2.13274, "35740": 2.11266, "35745": 2.1133, "35750": 2.13829, "35755": 2.23523, "35760": 2.18139, "35765": 2.17523, "35770": 2.18065, "35775": 2.2127, "35780": 2.14682, "35785": 2.21105, "35790": 2.18085, "35795": 2.15792, "35800": 2.09711, "35805": 2.16548, "35810": 2.08549, "35815": 2.10303, "35820": 2.15607, "35825": 2.15202, "35830": 2.12918, "35835": 2.1305, "35840": 2.14321, "35845": 2.12921, "35850": 2.12827, "35855": 2.15259, "35860": 2.13198, "35865": 2.13193, "35870": 2.16687, "35875": 2.17401, "35880": 2.16883, "35885": 2.13124, "35890": 2.14876, "35895": 2.12969, "35900": 2.05052, "35905": 2.07004, "35910": 2.11811, "35915": 2.1886, "35920": 2.07065, "35925": 2.19726, "35930": 2.21367, "35935": 2.11955, "35940": 2.17672, "35945": 2.13824, "35950": 2.12896, "35955": 2.19486, "35960": 2.0925, "35965": 2.13003, "35970": 2.08809, "35975": 2.16006, "35980": 2.18529, "35985": 2.12303, "35990": 2.12724, "35995": 2.1884, "36000": 2.13281, "36005": 2.11214, "36010": 2.16538, "36015": 2.17847, "36020": 2.13638, "36025": 2.12109, "36030": 2.07543, "36035": 2.10759, "36040": 2.06553, "36045": 2.13022, "36050": 2.13883, "36055": 2.14408, "36060": 2.11004, "36065": 2.18393, "36070": 2.17888, "36075": 2.11813, "36080": 2.1491, "36085": 2.1753, "36090": 2.20589, "36095": 2.16856, "36100": 2.10583, "36105": 2.187, "36110": 2.07797, "36115": 2.18607, "36120": 2.10267, "36125": 2.17318, "36130": 2.2482, "36135": 2.13287, "36140": 2.14324, "36145": 2.08577, "36150": 2.13639, "36155": 2.18685, "36160": 2.09486, "36165": 2.17349, "36170": 2.18207, "36175": 2.2019, "36180": 2.14393, "36185": 2.16885, "36190": 2.15745, "36195": 2.09466, "36200": 2.18797, "36205": 2.18936, "36210": 2.09716, "36215": 2.18496, "36220": 2.10438, "36225": 2.06015, "36230": 2.13328, "36235": 2.13096, "36240": 2.14807, "36245": 2.12528, "36250": 2.1249, "36255": 2.14133, "36260": 2.20648, "36265": 2.12075, "36270": 2.20571, "36275": 2.19546, "36280": 2.13159, "36285": 2.13173, "36290": 2.16722, "36295": 2.10664, "36300": 2.18919, "36305": 2.18, "36310": 2.08008, "36315": 2.16678, "36320": 2.19749, "36325": 2.15779, "36330": 2.16208, "36335": 2.12421, "36340": 2.08321, "36345": 2.14964, "36350": 2.1911, "36355": 2.16312, "36360": 2.16413, "36365": 2.17713, "36370": 2.10807, "36375": 2.17913, "36380": 2.16023, "36385": 2.19307, "36390": 2.06065, "36395": 2.16611, "36400": 2.0997, "36405": 2.08725, "36410": 2.17872, "36415": 2.12249, "36420": 2.15479, "36425": 2.15808, "36430": 2.21227, "36435": 2.10997, "36440": 2.16144, "36445": 2.10685, "36450": 2.17283, "36455": 2.15225, "36460": 2.14419, "36465": 2.11713, "36470": 2.06491, "36475": 2.18581, "36480": 2.16662, "36485": 2.12058, "36490": 2.17584, "36495": 2.15704, "36500": 2.08849, "36505": 2.0972, "36510": 2.09546, "36515": 2.15391, "36520": 2.16897, "36525": 2.14325, "36530": 2.16216, "36535": 2.15256, "36540": 2.0802, "36545": 2.09125, "36550": 2.1309, "36555": 2.15141, "36560": 2.14155, "36565": 2.0578, "36570": 2.1509, "36575": 2.16366, "36580": 2.16195, "36585": 2.19613, "36590": 2.15613, "36595": 2.19258, "36600": 2.13656, "36605": 2.09965, "36610": 2.15725, "36615": 2.07008, "36620": 2.16677, "36625": 2.11218, "36630": 2.13682, "36635": 2.10356, "36640": 2.16011, "36645": 2.17431, "36650": 2.13926, "36655": 2.1019, "36660": 2.15465, "36665": 2.14164, "36670": 2.19915, "36675": 2.14448, "36680": 2.16814, "36685": 2.17505, "36690": 2.10307, "36695": 2.10227, "36700": 2.08538, "36705": 2.16633, "36710": 2.14357, "36715": 2.12446, "36720": 2.15178, "36725": 2.17856, "36730": 2.08597, "36735": 2.14078, "36740": 2.07882, "36745": 2.12887, "36750": 2.20297, "36755": 2.13068, "36760": 2.17786, "36765": 2.17432, "36770": 2.19173, "36775": 2.19759, "36780": 2.21068, "36785": 2.23257, "36790": 2.17919, "36795": 2.14332, "36800": 2.09411, "36805": 2.07469, "36810": 2.07146, "36815": 2.13868, "36820": 2.12829, "36825": 2.12786, "36830": 2.14474, "36835": 2.16098, "36840": 2.13105, "36845": 2.16999, "36850": 2.17701, "36855": 2.17467, "36860": 2.19373, "36865": 2.12168, "36870": 2.13785, "36875": 2.15681, "36880": 2.1809, "36885": 2.11188, "36890": 2.10783, "36895": 2.18181, "36900": 2.17889, "36905": 2.15691, "36910": 2.20351, "36915": 2.09129, "36920": 2.11141, "36925": 2.14342, "36930": 2.15812, "36935": 2.06085, "36940": 2.0964, "36945": 2.14817, "36950": 2.14086, "36955": 2.16685, "36960": 2.10229, "36965": 2.24112, "36970": 2.07372, "36975": 2.15338, "36980": 2.12351, "36985": 2.12129, "36990": 2.20364, "36995": 2.14046, "37000": 2.04878, "37005": 2.1667, "37010": 2.17314, "37015": 2.15165, "37020": 2.07798, "37025": 2.1124, "37030": 2.09506, "37035": 2.15607, "37040": 2.0694, "37045": 2.16336, "37050": 2.10602, "37055": 2.10253, "37060": 2.16313, "37065": 2.15752, "37070": 2.22246, "37075": 2.15797, "37080": 2.14403, "37085": 2.13756, "37090": 2.09998, "37095": 2.21727, "37100": 2.20013, "37105": 2.13808, "37110": 2.16598, "37115": 2.17883, "37120": 2.14095, "37125": 2.12791, "37130": 2.15285, "37135": 2.10813, "37140": 2.09215, "37145": 2.04805, "37150": 2.09949, "37155": 2.15988, "37160": 2.20962, "37165": 2.18524, "37170": 2.10209, "37175": 2.14515, "37180": 2.15621, "37185": 2.13028, "37190": 2.2164, "37195": 2.13749, "37200": 2.06438, "37205": 2.17487, "37210": 2.1855, "37215": 2.18703, "37220": 2.14256, "37225": 2.11512, "37230": 2.12421, "37235": 2.14343, "37240": 2.13686, "37245": 2.06695, "37250": 2.15724, "37255": 2.14358, "37260": 2.09361, "37265": 2.15158, "37270": 2.19568, "37275": 2.07517, "37280": 2.16912, "37285": 2.18053, "37290": 2.19045, "37295": 2.20206, "37300": 2.15347, "37305": 2.09722, "37310": 2.09584, "37315": 2.15822, "37320": 2.13755, "37325": 2.17223, "37330": 2.1556, "37335": 2.12424, "37340": 2.20055, "37345": 2.10551, "37350": 2.14403, "37355": 2.13026, "37360": 2.07806, "37365": 2.08057, "37370": 2.07941, "37375": 2.11724, "37380": 2.10363, "37385": 2.16861, "37390": 2.13521, "37395": 2.13386, "37400": 2.14858, "37405": 2.1468, "37410": 2.15139, "37415": 2.05777, "37420": 2.2212, "37425": 2.13143, "37430": 2.13044, "37435": 2.18296, "37440": 2.08728, "37445": 2.06392, "37450": 2.16831, "37455": 2.11489, "37460": 2.05585, "37465": 2.16168, "37470": 2.2349, "37475": 2.07235, "37480": 2.14007, "37485": 2.20686, "37490": 2.12918, "37495": 2.10502, "37500": 2.15499, "37505": 2.09561, "37510": 2.10145, "37515": 2.17943, "37520": 2.13003, "37525": 2.11293, "37530": 2.08936, "37535": 2.12532, "37540": 2.11115, "37545": 2.16012, "37550": 2.152, "37555": 2.15225, "37560": 2.09151, "37565": 2.11104, "37570": 2.03042, "37575": 2.16219, "37580": 2.17015, "37585": 2.14569, "37590": 2.13226, "37595": 2.1136, "37600": 2.19751, "37605": 2.11134, "37610": 2.11719, "37615": 2.11973, "37620": 2.18156, "37625": 2.18768, "37630": 2.12996, "37635": 2.18097, "37640": 2.13758, "37645": 2.17384, "37650": 2.1288, "37655": 2.1252, "37660": 2.1276, "37665": 2.10385, "37670": 2.12045, "37675": 2.1425, "37680": 2.19105, "37685": 2.12202, "37690": 2.11136, "37695": 2.15556, "37700": 2.13218, "37705": 2.16632, "37710": 2.13593, "37715": 2.10016, "37720": 2.08351, "37725": 2.04839, "37730": 2.14969, "37735": 2.12249, "37740": 2.14448, "37745": 2.14804, "37750": 2.13838, "37755": 2.17544, "37760": 2.11495, "37765": 2.10054, "37770": 2.22442, "37775": 2.17041, "37780": 2.14116, "37785": 2.15464, "37790": 2.17569, "37795": 2.22084, "37800": 2.09663, "37805": 2.10192, "37810": 2.05639, "37815": 2.14129, "37820": 2.1146, "37825": 2.12038, "37830": 2.12651, "37835": 2.17101, "37840": 2.13637, "37845": 2.15979, "37850": 2.20582, "37855": 2.14805, "37860": 2.1545, "37865": 2.03504, "37870": 2.10223, "37875": 2.05528, "37880": 2.13515, "37885": 2.12361, "37890": 2.10881, "37895": 2.09927, "37900": 2.12798, "37905": 2.11624, "37910": 2.14496, "37915": 2.18751, "37920": 2.08666, "37925": 2.15441, "37930": 2.18724, "37935": 2.11466, "37940": 2.1192, "37945": 2.14751, "37950": 2.09204, "37955": 2.08637, "37960": 2.08768, "37965": 2.13256, "37970": 2.15145, "37975": 2.11608, "37980": 2.11502, "37985": 2.11505, "37990": 2.14236, "37995": 2.16939, "38000": 2.13042, "38005": 2.14996, "38010": 2.16391, "38015": 2.11876, "38020": 2.13301, "38025": 2.19452, "38030": 2.19314, "38035": 2.05403, "38040": 2.20455, "38045": 2.15022, "38050": 2.13348, "38055": 2.1675, "38060": 2.11489, "38065": 2.10642, "38070": 2.08388, "38075": 2.17719, "38080": 2.12045, "38085": 2.16443, "38090": 2.17208, "38095": 2.03981, "38100": 2.18502, "38105": 2.10543, "38110": 2.11508, "38115": 2.16297, "38120": 2.05721, "38125": 2.13784, "38130": 2.15455, "38135": 2.10575, "38140": 2.15568, "38145": 2.06091, "38150": 2.11832, "38155": 2.08542, "38160": 2.07212, "38165": 2.0564, "38170": 2.08596, "38175": 2.14598, "38180": 2.12716, "38185": 2.14343, "38190": 2.09273, "38195": 2.14417, "38200": 2.06284, "38205": 2.13296, "38210": 2.15094, "38215": 2.16394, "38220": 2.20139, "38225": 2.13064, "38230": 2.14659, "38235": 2.13577, "38240": 2.14344, "38245": 2.13802, "38250": 2.09335, "38255": 2.08694, "38260": 2.11712, "38265": 2.14815, "38270": 2.12571, "38275": 2.08969, "38280": 2.15084, "38285": 2.13509, "38290": 2.09321, "38295": 2.12357, "38300": 2.08574, "38305": 2.17512, "38310": 2.15632, "38315": 2.14044, "38320": 2.15447, "38325": 2.07148, "38330": 2.12913, "38335": 2.16378, "38340": 2.15172, "38345": 2.12932, "38350": 2.12758, "38355": 2.19912, "38360": 2.13571, "38365": 2.08599, "38370": 2.18756, "38375": 2.09855, "38380": 2.23092, "38385": 2.14404, "38390": 2.12462, "38395": 2.12637, "38400": 2.05723, "38405": 2.18756, "38410": 2.07514, "38415": 2.13162, "38420": 2.1605, "38425": 2.09079, "38430": 2.21304, "38435": 2.14648, "38440": 2.22248, "38445": 2.13204, "38450": 2.07994, "38455": 2.12941, "38460": 2.10874, "38465": 2.09045, "38470": 2.20107, "38475": 2.13404, "38480": 2.15419, "38485": 2.11186, "38490": 2.11261, "38495": 2.16144, "38500": 2.18173, "38505": 2.14315, "38510": 2.12837, "38515": 2.12533, "38520": 2.10683, "38525": 2.09164, "38530": 2.15031, "38535": 2.09909, "38540": 2.15479, "38545": 2.12317, "38550": 2.15625, "38555": 2.08068, "38560": 2.1402, "38565": 2.11924, "38570": 2.15941, "38575": 2.1164, "38580": 2.16436, "38585": 2.13914, "38590": 2.05459, "38595": 2.06911, "38600": 2.12299, "38605": 2.10064, "38610": 2.09924, "38615": 2.1504, "38620": 2.17622, "38625": 2.08035, "38630": 2.11099, "38635": 2.0544, "38640": 2.12039, "38645": 2.19138, "38650": 2.12247, "38655": 2.15304, "38660": 2.07301, "38665": 2.18796, "38670": 2.10177, "38675": 2.09832, "38680": 2.18744, "38685": 2.12593, "38690": 2.18534, "38695": 2.18747, "38700": 2.13507, "38705": 2.07354, "38710": 2.07651, "38715": 2.17511, "38720": 2.16081, "38725": 2.13855, "38730": 2.10941, "38735": 2.09467, "38740": 2.1087, "38745": 2.16685, "38750": 2.082, "38755": 2.1417, "38760": 2.05644, "38765": 2.18515, "38770": 2.10466, "38775": 2.16935, "38780": 2.18387, "38785": 2.16196, "38790": 2.12959, "38795": 2.09185, "38800": 2.15562, "38805": 2.23779, "38810": 2.1344, "38815": 2.1353, "38820": 2.1382, "38825": 2.1282, "38830": 2.13528, "38835": 2.14598, "38840": 2.14988, "38845": 2.159, "38850": 2.11026, "38855": 2.08533, "38860": 2.10442, "38865": 2.13133, "38870": 2.11898, "38875": 2.2031, "38880": 2.13031, "38885": 2.08017, "38890": 2.16514, "38895": 2.12927, "38900": 2.10506, "38905": 2.12435, "38910": 2.09786, "38915": 2.13771, "38920": 2.07685, "38925": 2.09032, "38930": 2.14775, "38935": 2.15277, "38940": 2.12422, "38945": 2.05568, "38950": 2.11138, "38955": 2.15419, "38960": 2.04081, "38965": 2.1299, "38970": 2.15853, "38975": 2.21576, "38980": 2.08599, "38985": 2.07991, "38990": 2.14113, "38995": 2.1857, "39000": 2.05393, "39005": 2.08015, "39010": 2.08203, "39015": 2.09704, "39020": 2.1079, "39025": 2.15576, "39030": 2.0889, "39035": 2.08832, "39040": 2.09679, "39045": 2.17917, "39050": 2.151, "39055": 2.12455, "39060": 2.17964, "39065": 2.11479, "39070": 2.11981, "39075": 2.11156, "39080": 2.09237, "39085": 2.1334, "39090": 2.16856, "39095": 2.14243, "39100": 2.13598, "39105": 2.1167, "39110": 2.07688, "39115": 2.10183, "39120": 2.10064, "39125": 2.14808, "39130": 2.11198, "39135": 2.12579, "39140": 2.10694, "39145": 2.08101, "39150": 2.16873, "39155": 2.07522, "39160": 2.14508, "39165": 2.1057, "39170": 2.19782, "39175": 2.17857, "39180": 2.12218, "39185": 2.15733, "39190": 2.10694, "39195": 2.14029, "39200": 2.14765, "39205": 2.11405, "39210": 2.16634, "39215": 2.09354, "39220": 2.13065, "39225": 2.10459, "39230": 2.14026, "39235": 2.04538, "39240": 2.08958, "39245": 2.11157, "39250": 2.09919, "39255": 2.18267, "39260": 2.04943, "39265": 2.08569, "39270": 2.12669, "39275": 2.1311, "39280": 2.13795, "39285": 2.131, "39290": 2.01909, "39295": 2.118, "39300": 2.19704, "39305": 2.16954, "39310": 2.12155, "39315": 2.19875, "39320": 2.02309, "39325": 2.08034, "39330": 2.10805, "39335": 2.09507, "39340": 2.1455, "39345": 2.22004, "39350": 2.12007, "39355": 2.20153, "39360": 2.07109, "39365": 2.06496, "39370": 2.11936, "39375": 2.12003, "39380": 2.13152, "39385": 2.16089, "39390": 2.08334, "39395": 2.12516, "39400": 2.10371, "39405": 2.15801, "39410": 2.0674, "39415": 2.10775, "39420": 2.11191, "39425": 2.11764, "39430": 2.13379, "39435": 2.13566, "39440": 2.09898, "39445": 2.16432, "39450": 2.14978, "39455": 2.12346, "39460": 2.15549, "39465": 2.16464, "39470": 2.16444, "39475": 2.09965, "39480": 2.04021, "39485": 2.13423, "39490": 2.07296, "39495": 2.15355, "39500": 2.12035, "39505": 2.11195, "39510": 2.1319, "39515": 2.12191, "39520": 2.14347, "39525": 2.13692, "39530": 2.1297, "39535": 2.1174, "39540": 2.16244, "39545": 2.1552, "39550": 2.11618, "39555": 2.01082, "39560": 2.14771, "39565": 2.1098, "39570": 2.0832, "39575": 2.12637, "39580": 2.15348, "39585": 2.10151, "39590": 2.09228, "39595": 2.112, "39600": 2.10467, "39605": 2.13453, "39610": 2.08999, "39615": 2.11342, "39620": 2.16234, "39625": 2.14742, "39630": 2.09631, "39635": 2.14481, "39640": 2.09433, "39645": 2.10028, "39650": 2.0712, "39655": 2.12758, "39660": 2.14075, "39665": 2.12341, "39670": 2.17103, "39675": 2.11204, "39680": 2.0901, "39685": 2.15427, "39690": 2.10064, "39695": 2.20239, "39700": 2.15675, "39705": 2.12717, "39710": 2.17373, "39715": 2.13359, "39720": 2.06729, "39725": 2.12593, "39730": 2.1449, "39735": 2.0986, "39740": 2.14322, "39745": 2.13482, "39750": 2.09851, "39755": 2.12423, "39760": 2.11624, "39765": 2.12575, "39770": 2.13488, "39775": 2.18824, "39780": 2.13417, "39785": 2.13119, "39790": 2.12868, "39795": 2.13612, "39800": 2.10009, "39805": 2.0831, "39810": 2.10822, "39815": 2.13217, "39820": 2.12193, "39825": 2.15229, "39830": 2.18086, "39835": 2.09401, "39840": 2.14502, "39845": 2.09239, "39850": 2.13587, "39855": 2.13841, "39860": 2.06931, "39865": 2.15459, "39870": 2.13246, "39875": 2.15269, "39880": 2.20291, "39885": 2.16282, "39890": 2.13123, "39895": 2.12056, "39900": 2.09157, "39905": 2.09889, "39910": 2.12441, "39915": 2.13221, "39920": 2.06322, "39925": 2.06724, "39930": 2.16389, "39935": 2.10578, "39940": 2.08406, "39945": 2.11501, "39950": 2.06968, "39955": 2.16627, "39960": 2.1269, "39965": 2.15647, "39970": 2.08404, "39975": 2.16049, "39980": 2.08325, "39985": 2.07651, "39990": 2.05034, "39995": 2.06803, "40000": 2.09414, "40005": 2.14225, "40010": 2.15108, "40015": 2.15377, "40020": 2.13804, "40025": 2.11667, "40030": 2.14317, "40035": 2.05431, "40040": 2.10506, "40045": 2.06426, "40050": 2.20054, "40055": 2.08077, "40060": 2.13193, "40065": 2.17142, "40070": 2.12265, "40075": 2.05412, "40080": 2.11188, "40085": 2.1202, "40090": 2.11678, "40095": 2.09002, "40100": 2.15326, "40105": 2.09857, "40110": 2.10733, "40115": 2.11762, "40120": 2.14206, "40125": 2.14092, "40130": 2.11665, "40135": 2.14027, "40140": 2.10313, "40145": 2.19568, "40150": 2.10539, "40155": 2.08513, "40160": 2.09572, "40165": 2.17835, "40170": 2.0209, "40175": 2.16349, "40180": 2.13898, "40185": 2.11754, "40190": 2.13656, "40195": 2.04663, "40200": 2.14568, "40205": 2.09098, "40210": 2.13798, "40215": 2.11218, "40220": 2.20706, "40225": 2.20846, "40230": 2.03087, "40235": 2.06974, "40240": 2.02442, "40245": 2.14422, "40250": 2.1227, "40255": 2.18676, "40260": 2.18942, "40265": 2.10617, "40270": 2.06414, "40275": 2.08622, "40280": 2.14591, "40285": 2.08399, "40290": 2.10664, "40295": 2.1097, "40300": 2.11345, "40305": 2.10548, "40310": 2.15617, "40315": 2.09503, "40320": 2.12718, "40325": 2.1017, "40330": 2.1001, "40335": 2.08529, "40340": 2.06129, "40345": 2.13905, "40350": 2.13261, "40355": 2.16817, "40360": 2.07408, "40365": 2.17246, "40370": 2.07788, "40375": 2.12788, "40380": 2.07234, "40385": 2.10596, "40390": 2.14824, "40395": 2.09376, "40400": 2.07775, "40405": 2.04796, "40410": 2.23005, "40415": 2.08821, "40420": 2.14813, "40425": 2.17892, "40430": 2.17959, "40435": 2.11014, "40440": 2.13668, "40445": 2.07569, "40450": 2.14296, "40455": 2.06254, "40460": 2.09154, "40465": 2.0894, "40470": 2.10453, "40475": 2.08959, "40480": 2.05314, "40485": 2.13147, "40490": 2.10557, "40495": 2.10497, "40500": 2.12861, "40505": 2.13053, "40510": 2.09388, "40515": 2.12222, "40520": 2.15043, "40525": 2.07909, "40530": 2.10718, "40535": 2.06613, "40540": 2.1215, "40545": 2.10594, "40550": 2.12976, "40555": 2.06488, "40560": 2.11875, "40565": 2.12709, "40570": 2.02537, "40575": 2.10832, "40580": 2.10403, "40585": 2.09042, "40590": 2.0694, "40595": 2.10361, "40600": 2.07278, "40605": 2.11825, "40610": 2.06474, "40615": 2.08187, "40620": 2.13845, "40625": 2.12119, "40630": 2.08292, "40635": 2.13126, "40640": 2.08039, "40645": 2.10825, "40650": 2.10014, "40655": 2.18238, "40660": 2.16392, "40665": 2.07934, "40670": 2.13644, "40675": 2.11522, "40680": 2.13675, "40685": 2.14036, "40690": 2.06178, "40695": 2.09573, "40700": 2.10125, "40705": 2.07373, "40710": 2.11845, "40715": 2.16232, "40720": 2.1181, "40725": 2.08984, "40730": 2.09457, "40735": 2.06718, "40740": 2.10733, "40745": 2.06283, "40750": 2.08128, "40755": 2.12486, "40760": 2.07462, "40765": 2.08474, "40770": 2.14575, "40775": 2.0657, "40780": 2.10442, "40785": 2.11576, "40790": 2.10428, "40795": 2.13364, "40800": 2.10487, "40805": 2.19092, "40810": 2.15336, "40815": 2.13626, "40820": 2.08235, "40825": 2.08068, "40830": 2.12448, "40835": 2.13257, "40840": 2.13579, "40845": 2.04404, "40850": 2.05399, "40855": 2.059, "40860": 2.08779, "40865": 2.10972, "40870": 2.10409, "40875": 2.12201, "40880": 2.13658, "40885": 2.10734, "40890": 2.17342, "40895": 2.12667, "40900": 2.11362, "40905": 2.10547, "40910": 2.04134, "40915": 2.07538, "40920": 2.14575, "40925": 2.11386, "40930": 2.10408, "40935": 2.11658, "40940": 2.10233, "40945": 2.10385, "40950": 2.06664, "40955": 2.06852, "40960": 2.07498, "40965": 2.09927, "40970": 2.05167, "40975": 2.11229, "40980": 2.17076, "40985": 2.09947, "40990": 2.10651, "40995": 2.14345, "41000": 2.09062, "41005": 2.12047, "41010": 2.07939, "41015": 2.13607, "41020": 2.08386, "41025": 2.09575, "41030": 2.08882, "41035": 2.10586, "41040": 2.06385, "41045": 2.12735, "41050": 2.0477, "41055": 2.12258, "41060": 2.01052, "41065": 2.16189, "41070": 2.12221, "41075": 2.11344, "41080": 2.04596, "41085": 2.117, "41090": 2.11545, "41095": 2.15804, "41100": 2.11807, "41105": 2.06185, "41110": 2.05907, "41115": 2.09445, "41120": 2.07096, "41125": 2.10208, "41130": 2.11463, "41135": 2.13774, "41140": 2.13286, "41145": 2.12538, "41150": 2.11106, "41155": 2.072, "41160": 2.15163, "41165": 2.0344, "41170": 2.11613, "41175": 2.12071, "41180": 2.12912, "41185": 2.10848, "41190": 2.09128, "41195": 2.01529, "41200": 2.17139, "41205": 2.102, "41210": 2.05884, "41215": 2.10397, "41220": 2.11802, "41225": 2.13351, "41230": 2.14866, "41235": 2.1501, "41240": 2.22163, "41245": 2.13903, "41250": 2.13876, "41255": 2.13417, "41260": 2.0889, "41265": 2.10167, "41270": 2.14719, "41275": 2.13507, "41280": 2.04272, "41285": 2.13368, "41290": 2.15412, "41295": 2.06583, "41300": 2.11593, "41305": 2.19329, "41310": 2.10593, "41315": 2.13176, "41320": 2.1523, "41325": 2.10073, "41330": 2.14003, "41335": 2.12126, "41340": 2.11464, "41345": 2.09195, "41350": 2.12355, "41355": 2.13153, "41360": 2.05471, "41365": 2.10898, "41370": 2.13181, "41375": 2.0435, "41380": 2.14212, "41385": 2.05285, "41390": 2.12577, "41395": 2.08913, "41400": 2.14201, "41405": 2.06665, "41410": 2.05018, "41415": 2.10296, "41420": 2.06998, "41425": 2.15145, "41430": 2.09875, "41435": 2.08494, "41440": 2.10805, "41445": 2.06876, "41450": 2.06299, "41455": 2.14706, "41460": 2.07792, "41465": 2.04137, "41470": 2.10226, "41475": 2.18783, "41480": 2.0962, "41485": 2.13035, "41490": 2.07549, "41495": 2.07744, "41500": 2.13902, "41505": 2.08424, "41510": 2.03015, "41515": 2.07995, "41520": 2.08119, "41525": 2.10853, "41530": 2.1313, "41535": 2.11927, "41540": 2.10251, "41545": 2.15818, "41550": 2.05183, "41555": 2.18807, "41560": 2.10288, "41565": 2.105, "41570": 2.14225, "41575": 2.13816, "41580": 2.10868, "41585": 2.05463, "41590": 2.05838, "41595": 2.08952, "41600": 2.09923, "41605": 2.0963, "41610": 2.1354, "41615": 2.10996, "41620": 2.159, "41625": 2.1192, "41630": 2.06714, "41635": 2.11858, "41640": 2.11984, "41645": 2.16588, "41650": 2.07712, "41655": 2.09777, "41660": 2.07121, "41665": 2.14759, "41670": 2.07027, "41675": 2.16921, "41680": 2.04166, "41685": 2.07529, "41690": 2.10228, "41695": 2.1034, "41700": 2.1461, "41705": 2.00857, "41710": 2.10927, "41715": 2.18056, "41720": 2.06263, "41725": 2.0529, "41730": 2.08699, "41735": 2.03352, "41740": 2.10594, "41745": 2.15392, "41750": 2.13937, "41755": 2.11587, "41760": 2.20489, "41765": 2.06517, "41770": 2.08888, "41775": 2.04004, "41780": 2.14943, "41785": 2.06715, "41790": 2.07257, "41795": 2.14073, "41800": 2.13955, "41805": 2.07427, "41810": 2.0727, "41815": 2.0956, "41820": 2.12085, "41825": 2.12277, "41830": 2.10602, "41835": 2.05093, "41840": 2.1216, "41845": 2.10157, "41850": 2.06717, "41855": 2.08285, "41860": 2.0677, "41865": 2.09006, "41870": 2.07079, "41875": 2.09257, "41880": 2.0864, "41885": 2.19157, "41890": 2.10853, "41895": 2.05684, "41900": 2.08601, "41905": 2.00698, "41910": 2.12352, "41915": 2.06515, "41920": 2.07755, "41925": 2.12148, "41930": 2.07327, "41935": 2.04747, "41940": 2.11489, "41945": 2.10669, "41950": 2.11911, "41955": 2.11968, "41960": 2.06776, "41965": 2.16161, "41970": 2.11935, "41975": 2.14821, "41980": 2.04015, "41985": 2.1654, "41990": 2.09076, "41995": 2.08589, "42000": 2.09665, "42005": 2.07255, "42010": 2.09961, "42015": 2.05378, "42020": 2.15279, "42025": 2.03688, "42030": 2.13777, "42035": 2.07877, "42040": 2.10177, "42045": 2.14849, "42050": 2.1487, "42055": 2.12102, "42060": 2.07542, "42065": 2.12345, "42070": 2.11655, "42075": 2.11709, "42080": 2.15067, "42085": 2.10747, "42090": 2.06921, "42095": 2.08187, "42100": 2.14928, "42105": 2.04925, "42110": 2.10569, "42115": 2.08736, "42120": 2.1049, "42125": 2.02263, "42130": 2.07138, "42135": 2.03034, "42140": 2.05793, "42145": 2.09885, "42150": 2.09646, "42155": 2.06611, "42160": 2.10442, "42165": 2.07477, "42170": 2.07123, "42175": 2.11249, "42180": 2.15702, "42185": 2.11847, "42190": 2.0909, "42195": 2.15069, "42200": 2.11867, "42205": 2.0812, "42210": 2.12409, "42215": 2.09902, "42220": 2.05236, "42225": 2.18297, "42230": 2.15924, "42235": 2.0662, "42240": 2.14605, "42245": 2.13089, "42250": 2.0448, "42255": 2.01941, "42260": 2.12362, "42265": 2.1226, "42270": 2.08634, "42275": 2.11252, "42280": 2.13075, "42285": 2.11682, "42290": 2.14278, "42295": 2.11204, "42300": 2.12333, "42305": 2.05541, "42310": 2.07455, "42315": 2.16218, "42320": 2.10703, "42325": 2.11823, "42330": 2.02438, "42335": 2.11315, "42340": 2.12524, "42345": 2.13511, "42350": 2.05691, "42355": 2.10843, "42360": 2.11344, "42365": 2.12279, "42370": 2.11665, "42375": 2.12328, "42380": 2.18771, "42385": 2.10781, "42390": 2.13183, "42395": 2.10804, "42400": 2.14518, "42405": 2.11371, "42410": 2.12578, "42415": 2.17294, "42420": 2.12929, "42425": 2.0705, "42430": 2.13113, "42435": 2.01064, "42440": 2.10922, "42445": 2.15332, "42450": 2.07366, "42455": 2.05898, "42460": 2.09665, "42465": 2.07609, "42470": 2.08827, "42475": 2.13123, "42480": 2.03545, "42485": 2.10985, "42490": 2.16314, "42495": 1.99401, "42500": 2.1288, "42505": 2.05927, "42510": 2.11776, "42515": 2.07041, "42520": 2.0427, "42525": 2.03486, "42530": 2.08013, "42535": 2.1025, "42540": 2.08269, "42545": 2.08249, "42550": 2.1208, "42555": 2.08812, "42560": 2.11838, "42565": 2.10077, "42570": 2.0677, "42575": 2.08084, "42580": 2.10494, "42585": 2.02763, "42590": 2.1444, "42595": 2.14689, "42600": 2.12456, "42605": 2.05516, "42610": 2.09077, "42615": 2.12788, "42620": 2.10573, "42625": 2.07496, "42630": 2.05101, "42635": 2.14359, "42640": 2.13028, "42645": 2.06388, "42650": 2.19898, "42655": 2.04861, "42660": 2.04318, "42665": 2.11912, "42670": 2.11971, "42675": 2.09004, "42680": 2.11791, "42685": 2.07748, "42690": 2.13463, "42695": 2.03359, "42700": 2.14372, "42705": 2.07117, "42710": 2.10778, "42715": 2.10697, "42720": 2.07997, "42725": 2.0869, "42730": 2.08005, "42735": 2.08102, "42740": 2.06295, "42745": 2.10793, "42750": 2.10483, "42755": 2.10262, "42760": 2.09256, "42765": 2.06933, "42770": 2.11228, "42775": 2.04702, "42780": 2.06247, "42785": 2.11765, "42790": 2.16381, "42795": 2.12791, "42800": 2.08332, "42805": 2.13265, "42810": 2.08109, "42815": 2.13826, "42820": 2.13158, "42825": 2.15296, "42830": 2.09182, "42835": 2.11031, "42840": 2.13715, "42845": 2.16617, "42850": 2.06573, "42855": 2.12094, "42860": 2.10638, "42865": 2.09572, "42870": 2.13759, "42875": 2.14093, "42880": 2.09684, "42885": 2.03511, "42890": 2.12848, "42895": 2.1535, "42900": 2.10836, "42905": 2.13145, "42910": 2.10169, "42915": 2.11583, "42920": 2.01874, "42925": 2.09457, "42930": 2.01474, "42935": 2.06061, "42940": 2.08068, "42945": 2.08903, "42950": 2.12823, "42955": 2.03867, "42960": 2.08962, "42965": 2.06527, "42970": 2.0192, "42975": 2.12899, "42980": 2.08271, "42985": 2.09882, "42990": 2.10317, "42995": 2.12138, "43000": 2.05403, "43005": 2.15247, "43010": 2.09267, "43015": 2.08471, "43020": 2.1196, "43025": 2.04911, "43030": 2.03618, "43035": 2.07931, "43040": 2.00178, "43045": 2.09812, "43050": 2.13083, "43055": 2.06236, "43060": 2.01071, "43065": 2.14236, "43070": 2.07533, "43075": 2.10077, "43080": 2.14024, "43085": 2.08343, "43090": 2.08056, "43095": 2.09416, "43100": 2.08875, "43105": 2.09879, "43110": 2.0974, "43115": 2.05086, "43120": 2.1074, "43125": 2.11251, "43130": 2.14552, "43135": 2.10487, "43140": 2.0764, "43145": 2.10499, "43150": 2.09519, "43155": 2.12186, "43160": 2.06029, "43165": 2.07916, "43170": 2.09707, "43175": 2.12544, "43180": 2.11754, "43185": 2.03285, "43190": 2.13045, "43195": 2.07852, "43200": 2.09202, "43205": 2.14761, "43210": 2.11497, "43215": 2.11021, "43220": 2.12596, "43225": 2.13388, "43230": 2.0606, "43235": 2.04547, "43240": 2.07563, "43245": 2.07906, "43250": 2.04463, "43255": 2.14883, "43260": 2.09542, "43265": 2.08737, "43270": 2.15221, "43275": 2.0646, "43280": 2.13385, "43285": 2.07757, "43290": 2.07575, "43295": 2.05625, "43300": 2.21199, "43305": 2.09344, "43310": 2.11043, "43315": 2.14567, "43320": 2.08147, "43325": 2.09965, "43330": 2.09369, "43335": 2.13606, "43340": 2.09227, "43345": 2.10106, "43350": 2.08544, "43355": 2.0597, "43360": 2.07589, "43365": 2.06517, "43370": 2.12792, "43375": 2.0276, "43380": 2.01391, "43385": 2.03242, "43390": 2.15769, "43395": 2.09842, "43400": 2.11748, "43405": 2.19832, "43410": 2.07783, "43415": 2.02405, "43420": 2.11582, "43425": 2.16416, "43430": 2.07363, "43435": 2.05335, "43440": 2.07453, "43445": 2.08725, "43450": 2.15987, "43455": 2.10705, "43460": 2.0581, "43465": 2.04552, "43470": 2.06846, "43475": 2.07539, "43480": 2.05778, "43485": 2.01861, "43490": 2.06862, "43495": 2.07507, "43500": 2.07445, "43505": 2.20121, "43510": 2.03613, "43515": 2.16593, "43520": 2.15284, "43525": 2.00825, "43530": 2.06833, "43535": 2.08047, "43540": 2.10178, "43545": 2.06339, "43550": 2.08258, "43555": 2.10726, "43560": 2.10841, "43565": 2.1463, "43570": 2.04718, "43575": 2.11373, "43580": 2.14684, "43585": 2.03612, "43590": 2.09549, "43595": 2.06342, "43600": 2.06394, "43605": 2.08598, "43610": 2.15443, "43615": 2.09653, "43620": 2.09322, "43625": 2.0985, "43630": 2.03692, "43635": 2.10729, "43640": 2.12641, "43645": 2.0992, "43650": 2.09822, "43655": 2.08557, "43660": 2.10126, "43665": 2.11838, "43670": 2.14145, "43675": 2.09935, "43680": 2.07534, "43685": 2.12129, "43690": 2.06687, "43695": 2.10739, "43700": 2.0766, "43705": 2.11913, "43710": 2.14425, "43715": 2.12832, "43720": 2.0394, "43725": 2.04099, "43730": 2.09884, "43735": 2.19837, "43740": 2.09736, "43745": 2.04643, "43750": 2.08495, "43755": 2.02079, "43760": 2.11586, "43765": 1.99935, "43770": 2.08506, "43775": 2.06576, "43780": 2.11632, "43785": 1.9963, "43790": 2.10845, "43795": 2.12931, "43800": 2.04976, "43805": 2.0069, "43810": 2.07233, "43815": 2.07975, "43820": 2.06992, "43825": 2.09441, "43830": 2.16073, "43835": 2.06002, "43840": 2.06619, "43845": 2.10673, "43850": 2.06016, "43855": 2.09165, "43860": 2.08946, "43865": 2.08685, "43870": 2.06598, "43875": 2.11805, "43880": 2.13631, "43885": 2.10318, "43890": 2.07696, "43895": 2.11331, "43900": 2.0629, "43905": 2.09995, "43910": 2.04144, "43915": 2.0934, "43920": 2.11968, "43925": 2.12001, "43930": 2.13732, "43935": 2.0807, "43940": 2.11395, "43945": 2.11132, "43950": 2.06204, "43955": 2.07044, "43960": 2.13363, "43965": 2.04544, "43970": 2.06961, "43975": 2.07515, "43980": 2.09106, "43985": 2.09042, "43990": 2.09512, "43995": 2.07451, "44000": 2.09147, "44005": 2.02533, "44010": 2.07983, "44015": 2.1066, "44020": 2.11867, "44025": 2.06696, "44030": 2.08345, "44035": 2.11725, "44040": 2.07977, "44045": 2.07054, "44050": 2.06751, "44055": 2.08981, "44060": 2.08295, "44065": 2.02345, "44070": 2.06235, "44075": 2.0625, "44080": 2.10587, "44085": 2.0797, "44090": 2.10406, "44095": 2.05646, "44100": 2.11578, "44105": 2.12734, "44110": 2.12209, "44115": 2.01503, "44120": 2.06266, "44125": 2.07663, "44130": 2.11762, "44135": 2.04875, "44140": 2.1018, "44145": 2.10024, "44150": 2.05018, "44155": 2.07006, "44160": 2.07224, "44165": 2.05923, "44170": 2.11218, "44175": 1.99732, "44180": 2.14128, "44185": 2.08942, "44190": 2.02291, "44195": 2.03974, "44200": 2.04509, "44205": 2.0792, "44210": 2.02033, "44215": 2.12147, "44220": 2.05849, "44225": 2.07654, "44230": 2.10213, "44235": 2.16584, "44240": 2.12587, "44245": 2.06146, "44250": 2.06609, "44255": 2.01104, "44260": 2.12022, "44265": 2.07641, "44270": 2.15178, "44275": 2.09776, "44280": 2.04081, "44285": 2.115, "44290": 2.05104, "44295": 2.04377, "44300": 2.11131, "44305": 2.08649, "44310": 2.13828, "44315": 2.00965, "44320": 2.08808, "44325": 2.08443, "44330": 2.10506, "44335": 2.09098, "44340": 2.01498, "44345": 2.10883, "44350": 2.09521, "44355": 2.13596, "44360": 1.97261, "44365": 2.15424, "44370": 2.13585, "44375": 2.07213, "44380": 2.11912, "44385": 2.14563, "44390": 2.06422, "44395": 2.11841, "44400": 2.12091, "44405": 2.03966, "44410": 2.08282, "44415": 2.0178, "44420": 2.08129, "44425": 2.10272, "44430": 2.15975, "44435": 2.03552, "44440": 2.02792, "44445": 2.06115, "44450": 2.0646, "44455": 2.08347, "44460": 2.09709, "44465": 2.08712, "44470": 2.13106, "44475": 2.04228, "44480": 2.08589, "44485": 2.06292, "44490": 2.01753, "44495": 2.09814, "44500": 2.07067, "44505": 2.12548, "44510": 2.07512, "44515": 2.0615, "44520": 2.10236, "44525": 2.08037, "44530": 2.1025, "44535": 2.03431, "44540": 2.06385, "44545": 2.08908, "44550": 2.10299, "44555": 2.11885, "44560": 2.03587, "44565": 2.0849, "44570": 2.16864, "44575": 2.10355, "44580": 2.05407, "44585": 2.10785, "44590": 2.05057, "44595": 2.11268, "44600": 2.06311, "44605": 2.09714, "44610": 2.05357, "44615": 2.10744, "44620": 2.13989, "44625": 2.12353, "44630": 2.10579, "44635": 2.06761, "44640": 2.09282, "44645": 2.05016, "44650": 2.11916, "44655": 2.10148, "44660": 2.1281, "44665": 2.10626, "44670": 2.14216, "44675": 2.06523, "44680": 2.0534, "44685": 2.08445, "44690": 2.07295, "44695": 2.06792, "44700": 2.11665, "44705": 2.09017, "44710": 2.14716, "44715": 2.05211, "44720": 2.0299, "44725": 2.0508, "44730": 2.13536, "44735": 2.05153, "44740": 2.15671, "44745": 2.05696, "44750": 2.08721, "44755": 2.0773, "44760": 2.06122, "44765": 2.09575, "44770": 1.97327, "44775": 2.10103, "44780": 2.05048, "44785": 2.08794, "44790": 2.11795, "44795": 2.11829, "44800": 2.10564, "44805": 2.1211, "44810": 2.10338, "44815": 2.10661, "44820": 2.06568, "44825": 2.05993, "44830": 2.03047, "44835": 2.09459, "44840": 2.08873, "44845": 2.03555, "44850": 2.11477, "44855": 2.09877, "44860": 2.09833, "44865": 2.08859, "44870": 2.09278, "44875": 2.07804, "44880": 2.07916, "44885": 2.0139, "44890": 2.04583, "44895": 2.12273, "44900": 2.04284, "44905": 2.12715, "44910": 2.07399, "44915": 2.09331, "44920": 2.04461, "44925": 2.02897, "44930": 2.16586, "44935": 2.10087, "44940": 2.07904, "44945": 2.09172, "44950": 2.12173, "44955": 2.10464, "44960": 2.10951, "44965": 2.05971, "44970": 2.06716, "44975": 2.07564, "44980": 2.03591, "44985": 2.06709, "44990": 2.13423, "44995": 1.99322, "45000": 2.08073, "45005": 2.10076, "45010": 2.04798, "45015": 2.07172, "45020": 2.10011, "45025": 2.06294, "45030": 2.10505, "45035": 2.07486, "45040": 2.09841, "45045": 2.03987, "45050": 2.01004, "45055": 2.04918, "45060": 2.05811, "45065": 2.02906, "45070": 2.04228, "45075": 2.08495, "45080": 2.05581, "45085": 2.10096, "45090": 2.0459, "45095": 2.11229, "45100": 2.05666, "45105": 2.062, "45110": 2.12762, "45115": 2.11029, "45120": 2.10668, "45125": 2.10688, "45130": 2.10506, "45135": 2.09333, "45140": 2.12962, "45145": 2.08108, "45150": 2.03427, "45155": 2.05095, "45160": 2.10373, "45165": 2.11823, "45170": 2.04895, "45175": 2.09306, "45180": 2.08869, "45185": 2.09512, "45190": 2.08676, "45195": 2.1276, "45200": 2.09992, "45205": 2.03104, "45210": 2.0527, "45215": 2.03598, "45220": 2.0401, "45225": 2.11669, "45230": 2.08318, "45235": 2.13966, "45240": 2.0651, "45245": 2.05102, "45250": 2.07297, "45255": 2.07493, "45260": 2.10777, "45265": 2.1594, "45270": 1.99893, "45275": 2.10771, "45280": 2.07892, "45285": 2.06323, "45290": 2.13958, "45295": 2.13592, "45300": 2.17805, "45305": 2.11022, "45310": 2.02613, "45315": 2.02601, "45320": 2.03937, "45325": 2.07232, "45330": 2.04495, "45335": 2.07778, "45340": 2.06078, "45345": 2.14231, "45350": 2.12148, "45355": 2.10375, "45360": 2.06091, "45365": 2.11294, "45370": 2.09554, "45375": 2.03761, "45380": 2.11138, "45385": 2.13428, "45390": 2.0924, "45395": 2.16834, "45400": 2.05274, "45405": 2.10646, "45410": 2.12583, "45415": 2.07858, "45420": 2.05476, "45425": 2.09157, "45430": 2.05726, "45435": 2.10305, "45440": 2.02774, "45445": 2.03504, "45450": 2.20256, "45455": 2.03235, "45460": 2.11044, "45465": 2.08171, "45470": 2.10957, "45475": 2.07306, "45480": 2.11231, "45485": 2.07873, "45490": 2.05465, "45495": 2.08396, "45500": 2.09661, "45505": 2.08884, "45510": 2.12423, "45515": 2.09563, "45520": 2.12992, "45525": 2.0843, "45530": 2.0073, "45535": 2.09859, "45540": 2.09015, "45545": 2.03969, "45550": 2.09814, "45555": 2.10243, "45560": 2.11933, "45565": 2.05978, "45570": 2.05977, "45575": 2.09107, "45580": 2.03155, "45585": 2.1452, "45590": 2.11673, "45595": 2.10156, "45600": 2.01363, "45605": 2.05213, "45610": 2.04749, "45615": 2.08353, "45620": 2.01614, "45625": 2.05248, "45630": 2.07923, "45635": 2.07258, "45640": 2.0524, "45645": 2.10925, "45650": 2.09457, "45655": 2.03657, "45660": 2.13355, "45665": 2.06692, "45670": 2.07529, "45675": 2.05258, "45680": 2.07808, "45685": 2.05934, "45690": 2.10409, "45695": 2.06156, "45700": 2.13759, "45705": 2.12381, "45710": 2.0656, "45715": 2.03271, "45720": 2.09586, "45725": 2.08226, "45730": 2.09795, "45735": 2.10877, "45740": 2.10679, "45745": 2.06287, "45750": 2.03626, "45755": 2.06985, "45760": 2.08766, "45765": 2.11187, "45770": 2.05256, "45775": 2.06838, "45780": 2.01392, "45785": 2.01424, "45790": 2.07094, "45795": 2.03261, "45800": 2.08309, "45805": 2.12696, "45810": 2.06156, "45815": 2.05893, "45820": 2.00116, "45825": 2.07266, "45830": 2.10002, "45835": 2.13015, "45840": 2.11822, "45845": 2.05102, "45850": 2.07402, "45855": 1.99756, "45860": 2.07145, "45865": 2.08935, "45870": 2.07918, "45875": 2.08444, "45880": 2.09815, "45885": 2.10712, "45890": 2.07879, "45895": 2.01083, "45900": 2.11717, "45905": 2.18234, "45910": 2.02862, "45915": 2.03832, "45920": 2.08893, "45925": 2.07454, "45930": 2.06362, "45935": 2.01342, "45940": 2.10082, "45945": 2.11752, "45950": 2.15832, "45955": 2.08359, "45960": 2.03313, "45965": 2.09132, "45970": 2.05647, "45975": 2.07603, "45980": 2.09019, "45985": 2.08677, "45990": 2.09828, "45995": 2.02686, "46000": 2.0469, "46005": 2.05322, "46010": 2.04133, "46015": 2.0058, "46020": 2.12386, "46025": 2.04116, "46030": 2.01259, "46035": 2.06859, "46040": 2.10602, "46045": 2.10032, "46050": 2.0984, "46055": 2.06767, "46060": 2.10245, "46065": 2.08491, "46070": 2.07788, "46075": 2.07647, "46080": 2.0219, "46085": 2.10767, "46090": 2.10123, "46095": 2.07014, "46100": 2.05968, "46105": 2.09047, "46110": 2.06832, "46115": 2.04578, "46120": 2.04687, "46125": 2.01848, "46130": 1.97113, "46135": 2.00742, "46140": 2.0741, "46145": 2.0958, "46150": 2.10917, "46155": 2.1206, "46160": 2.00702, "46165": 2.07283, "46170": 2.06256, "46175": 2.10605, "46180": 2.08281, "46185": 2.07114, "46190": 2.01158, "46195": 2.05664, "46200": 2.06539, "46205": 2.07617, "46210": 2.11406, "46215": 2.05469, "46220": 1.97587, "46225": 2.10452, "46230": 2.1651, "46235": 2.05548, "46240": 2.08949, "46245": 2.04408, "46250": 2.11497, "46255": 2.05315, "46260": 2.0703, "46265": 2.00188, "46270": 2.06908, "46275": 2.10966, "46280": 2.07163, "46285": 2.06928, "46290": 2.08151, "46295": 2.04715, "46300": 2.09655, "46305": 2.06792, "46310": 2.14954, "46315": 2.00634, "46320": 2.10765, "46325": 2.09732, "46330": 2.06138, "46335": 2.11066, "46340": 2.0504, "46345": 2.01201, "46350": 2.06583, "46355": 2.1159, "46360": 2.09672, "46365": 2.05158, "46370": 2.04308, "46375": 2.05896, "46380": 2.04711, "46385": 2.08681, "46390": 2.06953, "46395": 2.08106, "46400": 2.08164, "46405": 2.10415, "46410": 2.13023, "46415": 2.09504, "46420": 2.07686, "46425": 2.10776, "46430": 2.06964, "46435": 2.0934, "46440": 2.01812, "46445": 2.06247, "46450": 2.11017, "46455": 2.04246, "46460": 2.09752, "46465": 2.03843, "46470": 2.06234, "46475": 2.05572, "46480": 2.0263, "46485": 1.99207, "46490": 2.07934, "46495": 2.03314, "46500": 2.08486, "46505": 2.09923, "46510": 2.08294, "46515": 2.04759, "46520": 2.05852, "46525": 2.15152, "46530": 2.08489, "46535": 2.09678, "46540": 2.0744, "46545": 2.09411, "46550": 2.10462, "46555": 2.05225, "46560": 2.11592, "46565": 2.13414, "46570": 2.08277, "46575": 2.0527, "46580": 2.04401, "46585": 2.05905, "46590": 2.10996, "46595": 2.05519, "46600": 2.05996, "46605": 2.11568, "46610": 2.05279, "46615": 2.09277, "46620": 2.07218, "46625": 2.04706, "46630": 2.03666, "46635": 2.06466, "46640": 2.10268, "46645": 2.10189, "46650": 2.09919, "46655": 2.10222, "46660": 2.06695, "46665": 2.07992, "46670": 2.12604, "46675": 1.99136, "46680": 2.10462, "46685": 2.11902, "46690": 2.04514, "46695": 2.05828, "46700": 2.05393, "46705": 2.0637, "46710": 2.09753, "46715": 2.05474, "46720": 2.03098, "46725": 2.03113, "46730": 2.07533, "46735": 2.11065, "46740": 1.9835, "46745": 2.09271, "46750": 2.08053, "46755": 2.08753, "46760": 2.11546, "46765": 2.09539, "46770": 2.09322, "46775": 2.03908, "46780": 2.02735, "46785": 2.08318, "46790": 2.07487, "46795": 2.03696, "46800": 2.04264, "46805": 2.1018, "46810": 2.10095, "46815": 2.07371, "46820": 2.05194, "46825": 2.07566, "46830": 2.0027, "46835": 2.05723, "46840": 2.12325, "46845": 2.12887, "46850": 2.06444, "46855": 2.03854, "46860": 2.0556, "46865": 2.01109, "46870": 2.11332, "46875": 2.09573, "46880": 2.07483, "46885": 2.06968, "46890": 2.07429, "46895": 2.06597, "46900": 2.05988, "46905": 2.03465, "46910": 2.01335, "46915": 2.05306, "46920": 2.09018, "46925": 2.14872, "46930": 2.07586, "46935": 2.03939, "46940": 2.09963, "46945": 2.12582, "46950": 2.06537, "46955": 2.01312, "46960": 2.07459, "46965": 2.06963, "46970": 2.09605, "46975": 2.06254, "46980": 2.04013, "46985": 2.03926, "46990": 2.06386, "46995": 2.09878, "47000": 1.98325, "47005": 2.06059, "47010": 2.00416, "47015": 2.04589, "47020": 2.14539, "47025": 2.05602, "47030": 2.0622, "47035": 2.05208, "47040": 2.07565, "47045": 2.10934, "47050": 2.08364, "47055": 2.11027, "47060": 2.1003, "47065": 2.04627, "47070": 2.03086, "47075": 2.056, "47080": 1.99792, "47085": 2.08063, "47090": 2.0388, "47095": 2.05661, "47100": 2.0595, "47105": 2.12422, "47110": 2.08064, "47115": 2.06542, "47120": 2.07836, "47125": 2.08257, "47130": 2.13153, "47135": 2.08815, "47140": 2.06966, "47145": 2.13944, "47150": 2.03029, "47155": 2.03613, "47160": 2.15557, "47165": 2.06413, "47170": 2.10565, "47175": 2.08489, "47180": 2.0852, "47185": 2.09226, "47190": 2.06302, "47195": 2.09654, "47200": 2.04261, "47205": 2.15199, "47210": 2.09651, "47215": 2.10965, "47220": 2.13015, "47225": 2.0293, "47230": 2.0734, "47235": 1.99932, "47240": 2.09505, "47245": 2.0654, "47250": 2.07227, "47255": 2.10209, "47260": 2.10403, "47265": 2.04029, "47270": 2.03821, "47275": 2.08407, "47280": 2.06806, "47285": 2.01797, "47290": 2.00676, "47295": 2.10676, "47300": 2.10137, "47305": 2.19883, "47310": 2.04668, "47315": 2.05988, "47320": 2.0842, "47325": 2.11851, "47330": 2.09125, "47335": 2.01332, "47340": 2.066, "47345": 2.05883, "47350": 2.0603, "47355": 2.04057, "47360": 2.09014, "47365": 2.0856, "47370": 2.04762, "47375": 2.05665, "47380": 2.09616, "47385": 2.09948, "47390": 1.97939, "47395": 2.07216, "47400": 2.1181, "47405": 2.03847, "47410": 2.09145, "47415": 2.0706, "47420": 2.06843, "47425": 2.10843, "47430": 2.11323, "47435": 2.09743, "47440": 2.02487, "47445": 2.0078, "47450": 2.04306, "47455": 2.07195, "47460": 2.02706, "47465": 2.0886, "47470": 2.06093, "47475": 2.09626, "47480": 2.06441, "47485": 2.03457, "47490": 1.99648, "47495": 2.01274, "47500": 2.08452, "47505": 2.07045, "47510": 2.10095, "47515": 2.04904, "47520": 2.11302, "47525": 2.04898, "47530": 2.00907, "47535": 2.05612, "47540": 2.10382, "47545": 2.0822, "47550": 2.09654, "47555": 2.11902, "47560": 2.05027, "47565": 2.06455, "47570": 2.02751, "47575": 2.08679, "47580": 2.08935, "47585": 2.07064, "47590": 2.09112, "47595": 2.11052, "47600": 2.09099, "47605": 2.0807, "47610": 2.03904, "47615": 2.03278, "47620": 2.10395, "47625": 2.01653, "47630": 2.07468, "47635": 2.0108, "47640": 2.03142, "47645": 2.00205, "47650": 2.05126, "47655": 2.07147, "47660": 2.04559, "47665": 2.05392, "47670": 2.06321, "47675": 2.07541, "47680": 2.06285, "47685": 2.06703, "47690": 2.03066, "47695": 2.01639, "47700": 2.06506, "47705": 2.05772, "47710": 2.01998, "47715": 2.10711, "47720": 2.05649, "47725": 2.03791, "47730": 2.08497, "47735": 2.06428, "47740": 2.06185, "47745": 2.08163, "47750": 2.06184, "47755": 2.0789, "47760": 2.05827, "47765": 2.07989, "47770": 2.09457, "47775": 2.0437, "47780": 2.0526, "47785": 2.0682, "47790": 2.08886, "47795": 2.05269, "47800": 2.06264, "47805": 2.05356, "47810": 2.05985, "47815": 2.08343, "47820": 2.114, "47825": 2.04182, "47830": 2.06753, "47835": 2.16078, "47840": 1.99857, "47845": 2.11035, "47850": 2.12445, "47855": 2.07731, "47860": 2.07752, "47865": 2.01319, "47870": 2.10248, "47875": 2.07921, "47880": 2.05048, "47885": 2.09302, "47890": 2.07251, "47895": 2.09052, "47900": 2.07663, "47905": 2.0737, "47910": 2.08677, "47915": 2.07836, "47920": 2.0305, "47925": 2.0658, "47930": 2.12049, "47935": 2.06659, "47940": 2.11087, "47945": 2.04973, "47950": 2.05406, "47955": 2.06493, "47960": 2.07085, "47965": 2.03646, "47970": 2.07082, "47975": 2.09982, "47980": 2.12722, "47985": 2.07591, "47990": 2.0174, "47995": 2.06599, "48000": 1.98045, "48005": 2.04557, "48010": 2.10603, "48015": 2.069, "48020": 2.02338, "48025": 2.10205, "48030": 2.08948, "48035": 2.03101, "48040": 2.08819, "48045": 2.06849, "48050": 2.00353, "48055": 2.08585, "48060": 2.06288, "48065": 2.00965, "48070": 2.08424, "48075": 2.07984, "48080": 2.03361, "48085": 2.07575, "48090": 2.12532, "48095": 2.06468, "48100": 1.98088, "48105": 2.04534, "48110": 2.17629, "48115": 2.05454, "48120": 2.02513, "48125": 1.98816, "48130": 1.98356, "48135": 2.1028, "48140": 2.04661, "48145": 2.06176, "48150": 2.04182, "48155": 2.03403, "48160": 2.10105, "48165": 2.06743, "48170": 2.08583, "48175": 2.03943, "48180": 2.11867, "48185": 2.06137, "48190": 2.01324, "48195": 2.01855, "48200": 2.06259, "48205": 2.03845, "48210": 2.05968, "48215": 2.12234, "48220": 2.07689, "48225": 2.03855, "48230": 2.07037, "48235": 2.0099, "48240": 2.03889, "48245": 2.03173, "48250": 2.0642, "48255": 2.08206, "48260": 2.01428, "48265": 2.07883, "48270": 2.07304, "48275": 2.02088, "48280": 2.10958, "48285": 2.02599, "48290": 2.04941, "48295": 2.02224, "48300": 2.08216, "48305": 2.04236, "48310": 2.09143, "48315": 2.11597, "48320": 1.99938, "48325": 2.02529, "48330": 2.00912, "48335": 2.05206, "48340": 2.05661, "48345": 2.09312, "48350": 2.0676, "48355": 2.04436, "48360": 2.05396, "48365": 2.06053, "48370": 2.08279, "48375": 2.07859, "48380": 2.11313, "48385": 2.07307, "48390": 2.11082, "48395": 2.07426, "48400": 2.10124, "48405": 2.04763, "48410": 2.03497, "48415": 2.09375, "48420": 1.99441, "48425": 2.03492, "48430": 2.05247, "48435": 2.06127, "48440": 2.00417, "48445": 2.09241, "48450": 2.0236, "48455": 2.00738, "48460": 2.06269, "48465": 2.11382, "48470": 2.08136, "48475": 1.98006, "48480": 2.03449, "48485": 2.00693, "48490": 2.11321, "48495": 2.02962, "48500": 2.0337, "48505": 2.08909, "48510": 2.07189, "48515": 2.05387, "48520": 2.06761, "48525": 2.08245, "48530": 2.06331, "48535": 1.95817, "48540": 2.06529, "48545": 2.04025, "48550": 2.07022, "48555": 2.09311, "48560": 2.0915, "48565": 2.015, "48570": 2.09661, "48575": 2.07264, "48580": 1.97247, "48585": 2.06048, "48590": 2.10404, "48595": 2.08112, "48600": 2.0565, "48605": 2.06251, "48610": 2.10496, "48615": 2.10294, "48620": 2.07173, "48625": 2.08156, "48630": 1.99368, "48635": 2.06615, "48640": 2.07534, "48645": 2.07501, "48650": 2.07937, "48655": 2.07164, "48660": 2.06064, "48665": 2.02331, "48670": 2.07979, "48675": 2.1146, "48680": 2.08609, "48685": 2.09963, "48690": 2.07472, "48695": 2.07812, "48700": 2.06052, "48705": 2.09114, "48710": 2.05311, "48715": 1.99597, "48720": 2.06192, "48725": 2.08231, "48730": 2.14107, "48735": 2.04603, "48740": 2.10749, "48745": 2.0347, "48750": 2.02396, "48755": 2.03615, "48760": 2.02439, "48765": 2.11387, "48770": 2.038, "48775": 2.05666, "48780": 2.03071, "48785": 2.07481, "48790": 2.06482, "48795": 2.04636, "48800": 2.07991, "48805": 2.06038, "48810": 2.06502, "48815": 2.03952, "48820": 2.04889, "48825": 2.00599, "48830": 2.01277, "48835": 2.02657, "48840": 2.09703, "48845": 2.08214, "48850": 2.13512, "48855": 1.99921, "48860": 2.05583, "48865": 2.04907, "48870": 2.11722, "48875": 2.02609, "48880": 2.01887, "48885": 2.00488, "48890": 2.04001, "48895": 2.06929, "48900": 2.04485, "48905": 2.05756, "48910": 2.14197, "48915": 2.09088, "48920": 1.98887, "48925": 2.10499, "48930": 2.09464, "48935": 2.02611, "48940": 2.04845, "48945": 2.05523, "48950": 2.08578, "48955": 2.0453, "48960": 2.02113, "48965": 2.04801, "48970": 2.12249, "48975": 2.11042, "48980": 2.00159, "48985": 2.08927, "48990": 2.0415, "48995": 2.02535, "49000": 2.05491, "49005": 2.03521, "49010": 2.05743, "49015": 2.08392, "49020": 2.04908, "49025": 2.07824, "49030": 2.04416, "49035": 2.07153, "49040": 2.03041, "49045": 2.01937, "49050": 2.05599, "49055": 2.09676, "49060": 2.10936, "49065": 2.00084, "49070": 2.12344, "49075": 2.04506, "49080": 2.10036, "49085": 2.00663, "49090": 2.05421, "49095": 2.1138, "49100": 2.01677, "49105": 2.04111, "49110": 2.02936, "49115": 2.09716, "49120": 2.05507, "49125": 2.0612, "49130": 2.08605, "49135": 2.07052, "49140": 2.10422, "49145": 2.07881, "49150": 2.10819, "49155": 2.08646, "49160": 2.06952, "49165": 2.09326, "49170": 2.055, "49175": 2.01928, "49180": 2.10872, "49185": 2.14196, "49190": 2.14642, "49195": 2.10322, "49200": 2.08693, "49205": 2.05989, "49210": 2.06036, "49215": 2.07921, "49220": 2.05989, "49225": 2.0319, "49230": 2.00681, "49235": 2.0525, "49240": 2.10032, "49245": 2.01149, "49250": 1.99845, "49255": 2.05484, "49260": 2.07725, "49265": 2.06334, "49270": 2.06097, "49275": 2.02941, "49280": 2.07446, "49285": 2.03057, "49290": 2.03697, "49295": 2.09999, "49300": 2.08891, "49305": 2.04615, "49310": 2.03695, "49315": 2.09692, "49320": 1.99481, "49325": 2.03281, "49330": 2.01496, "49335": 2.06459, "49340": 2.06657, "49345": 2.07798, "49350": 2.0816, "49355": 1.98252, "49360": 2.02665, "49365": 2.09516, "49370": 2.02922, "49375": 2.0276, "49380": 2.07924, "49385": 1.99366, "49390": 2.0289, "49395": 2.00307, "49400": 2.03326, "49405": 2.1124, "49410": 2.03452, "49415": 2.00845, "49420": 2.06248, "49425": 2.04568, "49430": 2.02794, "49435": 2.06399, "49440": 2.04169, "49445": 2.1153, "49450": 1.9963, "49455": 2.06601, "49460": 2.09228, "49465": 2.04879, "49470": 2.04772, "49475": 2.1036, "49480": 2.05871, "49485": 2.0235, "49490": 2.01947, "49495": 2.04358, "49500": 2.05814, "49505": 2.0046, "49510": 2.11176, "49515": 2.03834, "49520": 2.00115, "49525": 2.03982, "49530": 2.06514, "49535": 1.98929, "49540": 1.9965, "49545": 2.10509, "49550": 2.11638, "49555": 2.06383, "49560": 2.0954, "49565": 2.07327, "49570": 2.09194, "49575": 2.06699, "49580": 1.99642, "49585": 2.04774, "49590": 1.99789, "49595": 2.11875, "49600": 2.05658, "49605": 2.08636, "49610": 2.03748, "49615": 2.03814, "49620": 2.07467, "49625": 2.0312, "49630": 2.07508, "49635": 2.05158, "49640": 2.04212, "49645": 2.08305, "49650": 2.01038, "49655": 2.05379, "49660": 2.00064, "49665": 2.02703, "49670": 1.98432, "49675": 2.06222, "49680": 2.02767, "49685": 2.06082, "49690": 2.07803, "49695": 2.04622, "49700": 2.11315, "49705": 2.14659, "49710": 2.116, "49715": 2.12966, "49720": 1.99154, "49725": 2.05585, "49730": 2.05099, "49735": 2.11364, "49740": 2.05516, "49745": 2.06033, "49750": 2.04577, "49755": 2.06201, "49760": 2.08009, "49765": 1.99156, "49770": 2.11599, "49775": 2.04902, "49780": 2.1194, "49785": 2.03334, "49790": 2.03381, "49795": 2.07305, "49800": 2.05528, "49805": 2.00959, "49810": 2.03032, "49815": 2.0755, "49820": 2.04255, "49825": 2.05842, "49830": 2.08344, "49835": 2.08023, "49840": 2.10264, "49845": 2.09318, "49850": 2.04565, "49855": 2.04935, "49860": 2.12767, "49865": 2.07638, "49870": 2.0965, "49875": 2.06348, "49880": 2.05093, "49885": 2.04259, "49890": 2.07275, "49895": 2.03687, "49900": 2.1122, "49905": 1.97504, "49910": 2.16352, "49915": 2.01508, "49920": 1.97511, "49925": 2.15806, "49930": 2.06859, "49935": 1.98656, "49940": 2.04004, "49945": 2.10249, "49950": 2.05002, "49955": 2.07755, "49960": 2.02959, "49965": 2.08989, "49970": 2.02409, "49975": 2.07533, "49980": 2.09695, "49985": 2.07002, "49990": 2.03484, "49995": 1.99377, "50000": 2.05312, "50005": 2.07199, "50010": 2.05219, "50015": 2.04993, "50020": 2.06136, "50025": 2.03322, "50030": 2.03413, "50035": 2.02472, "50040": 2.04837, "50045": 2.06873, "50050": 2.04281, "50055": 1.99876, "50060": 2.096, "50065": 2.08112, "50070": 2.10291, "50075": 2.07611, "50080": 2.04195, "50085": 2.06422, "50090": 2.04757, "50095": 2.13097, "50100": 2.05822, "50105": 2.07421, "50110": 2.09465, "50115": 2.04761, "50120": 2.02294, "50125": 2.07056, "50130": 1.98747, "50135": 2.09857, "50140": 2.06867, "50145": 2.09955, "50150": 1.9425, "50155": 2.00869, "50160": 2.07339, "50165": 2.07453, "50170": 1.99189, "50175": 2.00709, "50180": 2.08771, "50185": 2.02145, "50190": 2.03278, "50195": 2.0301, "50200": 2.08474, "50205": 2.03122, "50210": 2.03245, "50215": 2.04365, "50220": 2.04288, "50225": 2.07532, "50230": 2.0047, "50235": 2.14444, "50240": 1.97894, "50245": 2.09724, "50250": 1.98168, "50255": 2.03326, "50260": 2.11253, "50265": 2.02655, "50270": 2.1469, "50275": 2.0658, "50280": 2.11322, "50285": 2.00227, "50290": 2.00267, "50295": 2.03816, "50300": 2.08475, "50305": 2.06865, "50310": 2.02554, "50315": 2.02739, "50320": 1.94973, "50325": 2.11778, "50330": 2.03681, "50335": 2.02468, "50340": 2.10801, "50345": 2.03015, "50350": 2.12854, "50355": 2.03377, "50360": 2.03192, "50365": 2.03998, "50370": 2.11555, "50375": 2.0895, "50380": 2.06749, "50385": 2.07264, "50390": 1.99413, "50395": 2.06572, "50400": 2.0203, "50405": 2.08368, "50410": 2.06954, "50415": 2.07653, "50420": 2.04556, "50425": 2.07981, "50430": 2.097, "50435": 2.05384, "50440": 1.97585, "50445": 2.03391, "50450": 2.03459, "50455": 1.99967, "50460": 2.0146, "50465": 2.08744, "50470": 2.10025, "50475": 2.11029, "50480": 2.01703, "50485": 2.02907, "50490": 1.99282, "50495": 2.05968, "50500": 2.05378, "50505": 2.00509, "50510": 1.97824, "50515": 2.06906, "50520": 1.9953, "50525": 2.1004, "50530": 2.13048, "50535": 2.07285, "50540": 2.07509, "50545": 1.98057, "50550": 2.0378, "50555": 2.08501, "50560": 2.05121, "50565": 2.03879, "50570": 2.04483, "50575": 2.0085, "50580": 2.0581, "50585": 2.07073, "50590": 2.0371, "50595": 2.06553, "50600": 2.01773, "50605": 2.02484, "50610": 1.95621, "50615": 2.02083, "50620": 2.07876, "50625": 1.99832, "50630": 1.98673, "50635": 2.01756, "50640": 2.06048, "50645": 2.06998, "50650": 2.01031, "50655": 2.128, "50660": 2.02734, "50665": 2.11141, "50670": 2.00752, "50675": 1.98008, "50680": 2.01126, "50685": 2.05724, "50690": 1.98996, "50695": 2.02301, "50700": 2.06294, "50705": 2.05736, "50710": 2.0899, "50715": 2.08106, "50720": 2.07272, "50725": 2.01304, "50730": 2.04417, "50735": 2.08799, "50740": 2.01255, "50745": 2.10354, "50750": 1.99796, "50755": 2.03527, "50760": 2.07907, "50765": 2.05112, "50770": 2.07358, "50775": 2.03391, "50780": 2.02429, "50785": 2.07629, "50790": 1.9992, "50795": 2.00318, "50800": 2.08824, "50805": 2.06929, "50810": 2.03087, "50815": 2.02013, "50820": 1.96059, "50825": 2.04784, "50830": 2.07985, "50835": 2.01374, "50840": 2.0418, "50845": 1.98356, "50850": 2.08713, "50855": 2.06099, "50860": 2.02775}}, "num-zeros": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 40023.0, "5": 40006.0, "10": 41478.0, "15": 37264.0, "20": 40806.0, "25": 40124.0, "30": 40730.0, "35": 39343.0, "40": 41487.0, "45": 42265.0, "50": 38436.0, "55": 39348.0, "60": 42264.0, "65": 39970.0, "70": 40715.0, "75": 41487.0, "80": 40875.0, "85": 40827.0, "90": 40713.0, "95": 40735.0, "100": 41568.0, "105": 40791.0, "110": 38720.0, "115": 41559.0, "120": 39266.0, "125": 40730.0, "130": 40733.0, "135": 37896.0, "140": 38696.0, "145": 40966.0, "150": 40040.0, "155": 40792.0, "160": 40106.0, "165": 38576.0, "170": 37355.0, "175": 40036.0, "180": 40110.0, "185": 38590.0, "190": 39320.0, "195": 41483.0, "200": 41568.0, "205": 40091.0, "210": 40794.0, "215": 40016.0, "220": 40796.0, "225": 39406.0, "230": 41485.0, "235": 37973.0, "240": 39340.0, "245": 38635.0, "250": 40043.0, "255": 41486.0, "260": 39248.0, "265": 41563.0, "270": 40725.0, "275": 40784.0, "280": 41495.0, "285": 41552.0, "290": 40793.0, "295": 38595.0, "300": 38748.0, "305": 41571.0, "310": 39269.0, "315": 40111.0, "320": 39325.0, "325": 40794.0, "330": 40793.0, "335": 40171.0, "340": 37976.0, "345": 39956.0, "350": 40721.0, "355": 39956.0, "360": 41570.0, "365": 37990.0, "370": 40033.0, "375": 41495.0, "380": 38717.0, "385": 37821.0, "390": 39956.0, "395": 40785.0, "400": 39255.0, "405": 40719.0, "410": 39467.0, "415": 40730.0, "420": 41489.0, "425": 39418.0, "430": 39419.0, "435": 42246.0, "440": 35821.0, "445": 40056.0, "450": 41488.0, "455": 39958.0, "460": 38570.0, "465": 39334.0, "470": 40797.0, "475": 40799.0, "480": 40799.0, "485": 39199.0, "490": 38724.0, "495": 40792.0, "500": 40717.0, "505": 41492.0, "510": 37754.0, "515": 40722.0, "520": 40014.0, "525": 39277.0, "530": 38826.0, "535": 40785.0, "540": 40030.0, "545": 41565.0, "550": 40795.0, "555": 40720.0, "560": 39962.0, "565": 40714.0, "570": 40160.0, "575": 39332.0, "580": 40790.0, "585": 40714.0, "590": 40032.0, "595": 40013.0, "600": 40056.0, "605": 40771.0, "610": 40893.0, "615": 39946.0, "620": 41553.0, "625": 40729.0, "630": 42254.0, "635": 39974.0, "640": 37046.0, "645": 40115.0, "650": 39269.0, "655": 39182.0, "660": 37328.0, "665": 40105.0, "670": 40797.0, "675": 39262.0, "680": 39948.0, "685": 39249.0, "690": 42258.0, "695": 38604.0, "700": 40036.0, "705": 38582.0, "710": 42260.0, "715": 42250.0, "720": 41493.0, "725": 40785.0, "730": 38650.0, "735": 40795.0, "740": 41561.0, "745": 41563.0, "750": 40051.0, "755": 38718.0, "760": 38583.0, "765": 39962.0, "770": 40710.0, "775": 40720.0, "780": 39332.0, "785": 41575.0, "790": 42324.0, "795": 41557.0, "800": 40814.0, "805": 37047.0, "810": 39332.0, "815": 40717.0, "820": 43020.0, "825": 40021.0, "830": 39960.0, "835": 41485.0, "840": 38551.0, "845": 41556.0, "850": 38049.0, "855": 36505.0, "860": 41490.0, "865": 39959.0, "870": 35796.0, "875": 38576.0, "880": 39976.0, "885": 40029.0, "890": 39403.0, "895": 40722.0, "900": 40020.0, "905": 40028.0, "910": 40033.0, "915": 39273.0, "920": 40078.0, "925": 40126.0, "930": 40084.0, "935": 40722.0, "940": 40786.0, "945": 39957.0, "950": 37729.0, "955": 40098.0, "960": 40796.0, "965": 40120.0, "970": 40720.0, "975": 39956.0, "980": 40800.0, "985": 30951.0, "990": 39327.0, "995": 42262.0, "1000": 41534.0, "1005": 41492.0, "1010": 41653.0, "1015": 40042.0, "1020": 39271.0, "1025": 40037.0, "1030": 40185.0, "1035": 39430.0, "1040": 40106.0, "1045": 40804.0, "1050": 41493.0, "1055": 37933.0, "1060": 42258.0, "1065": 37154.0, "1070": 39259.0, "1075": 43022.0, "1080": 39322.0, "1085": 40035.0, "1090": 39974.0, "1095": 38489.0, "1100": 38638.0, "1105": 39344.0, "1110": 42340.0, "1115": 40771.0, "1120": 40722.0, "1125": 40728.0, "1130": 38656.0, "1135": 40118.0, "1140": 40127.0, "1145": 39334.0, "1150": 41550.0, "1155": 40716.0, "1160": 41479.0, "1165": 40042.0, "1170": 39281.0, "1175": 40032.0, "1180": 40734.0, "1185": 39268.0, "1190": 39259.0, "1195": 40715.0, "1200": 41566.0, "1205": 38592.0, "1210": 41547.0, "1215": 39315.0, "1220": 41562.0, "1225": 40122.0, "1230": 39359.0, "1235": 41487.0, "1240": 40724.0, "1245": 41487.0, "1250": 39397.0, "1255": 38697.0, "1260": 41485.0, "1265": 41500.0, "1270": 40723.0, "1275": 38436.0, "1280": 40115.0, "1285": 40722.0, "1290": 40023.0, "1295": 40788.0, "1300": 40731.0, "1305": 41490.0, "1310": 41495.0, "1315": 40798.0, "1320": 40735.0, "1325": 40730.0, "1330": 40805.0, "1335": 38659.0, "1340": 40798.0, "1345": 39972.0, "1350": 40793.0, "1355": 40112.0, "1360": 42261.0, "1365": 39416.0, "1370": 39338.0, "1375": 40801.0, "1380": 38627.0, "1385": 38604.0, "1390": 40035.0, "1395": 40801.0, "1400": 40790.0, "1405": 40805.0, "1410": 37805.0, "1415": 40021.0, "1420": 41492.0, "1425": 40787.0, "1430": 39965.0, "1435": 35926.0, "1440": 38568.0, "1445": 39348.0, "1450": 40108.0, "1455": 41555.0, "1460": 40808.0, "1465": 40736.0, "1470": 37184.0, "1475": 40730.0, "1480": 37370.0, "1485": 37809.0, "1490": 38512.0, "1495": 38592.0, "1500": 41489.0, "1505": 40026.0, "1510": 40724.0, "1515": 40809.0, "1520": 40813.0, "1525": 40892.0, "1530": 39278.0, "1535": 39335.0, "1540": 37892.0, "1545": 39972.0, "1550": 40890.0, "1555": 40799.0, "1560": 40790.0, "1565": 37883.0, "1570": 39958.0, "1575": 40044.0, "1580": 38661.0, "1585": 39264.0, "1590": 39347.0, "1595": 37956.0, "1600": 40805.0, "1605": 39341.0, "1610": 40735.0, "1615": 37876.0, "1620": 39279.0, "1625": 41580.0, "1630": 39969.0, "1635": 40813.0, "1640": 41498.0, "1645": 40054.0, "1650": 39348.0, "1655": 41504.0, "1660": 40864.0, "1665": 41577.0, "1670": 40798.0, "1675": 40801.0, "1680": 40895.0, "1685": 39960.0, "1690": 40034.0, "1695": 39333.0, "1700": 40802.0, "1705": 41604.0, "1710": 41507.0, "1715": 39348.0, "1720": 41554.0, "1725": 40049.0, "1730": 39343.0, "1735": 38686.0, "1740": 41483.0, "1745": 37740.0, "1750": 39277.0, "1755": 39352.0, "1760": 39259.0, "1765": 40791.0, "1770": 40792.0, "1775": 39421.0, "1780": 41495.0, "1785": 41562.0, "1790": 40734.0, "1795": 41489.0, "1800": 39440.0, "1805": 40719.0, "1810": 40038.0, "1815": 37739.0, "1820": 40728.0, "1825": 39397.0, "1830": 41510.0, "1835": 39462.0, "1840": 40796.0, "1845": 40800.0, "1850": 40814.0, "1855": 40813.0, "1860": 40721.0, "1865": 40046.0, "1870": 41638.0, "1875": 40734.0, "1880": 39321.0, "1885": 39198.0, "1890": 41573.0, "1895": 40725.0, "1900": 37821.0, "1905": 41492.0, "1910": 41496.0, "1915": 40952.0, "1920": 41560.0, "1925": 39256.0, "1930": 41498.0, "1935": 40814.0, "1940": 40070.0, "1945": 39445.0, "1950": 41490.0, "1955": 41566.0, "1960": 40801.0, "1965": 40110.0, "1970": 39971.0, "1975": 42259.0, "1980": 36548.0, "1985": 40107.0, "1990": 41555.0, "1995": 40036.0, "2000": 41499.0, "2005": 41566.0, "2010": 37816.0, "2015": 38738.0, "2020": 40727.0, "2025": 38720.0, "2030": 38057.0, "2035": 40754.0, "2040": 41578.0, "2045": 40181.0, "2050": 39504.0, "2055": 41561.0, "2060": 39417.0, "2065": 42256.0, "2070": 41499.0, "2075": 39279.0, "2080": 42258.0, "2085": 39366.0, "2090": 40809.0, "2095": 41553.0, "2100": 41574.0, "2105": 41511.0, "2110": 40812.0, "2115": 40721.0, "2120": 41491.0, "2125": 40030.0, "2130": 39337.0, "2135": 39425.0, "2140": 41484.0, "2145": 38576.0, "2150": 40041.0, "2155": 41503.0, "2160": 41575.0, "2165": 40055.0, "2170": 40791.0, "2175": 39344.0, "2180": 40042.0, "2185": 41498.0, "2190": 40883.0, "2195": 38799.0, "2200": 40818.0, "2205": 39961.0, "2210": 39406.0, "2215": 40797.0, "2220": 39274.0, "2225": 40108.0, "2230": 40808.0, "2235": 37266.0, "2240": 40039.0, "2245": 40735.0, "2250": 42253.0, "2255": 40750.0, "2260": 41566.0, "2265": 40807.0, "2270": 40047.0, "2275": 40736.0, "2280": 39214.0, "2285": 40881.0, "2290": 39429.0, "2295": 37898.0, "2300": 41577.0, "2305": 40723.0, "2310": 40058.0, "2315": 38511.0, "2320": 40105.0, "2325": 41490.0, "2330": 40734.0, "2335": 37203.0, "2340": 40736.0, "2345": 40195.0, "2350": 40126.0, "2355": 40894.0, "2360": 39268.0, "2365": 40719.0, "2370": 40731.0, "2375": 38566.0, "2380": 40048.0, "2385": 40815.0, "2390": 40093.0, "2395": 40746.0, "2400": 41559.0, "2405": 40088.0, "2410": 40778.0, "2415": 39198.0, "2420": 39970.0, "2425": 37804.0, "2430": 39266.0, "2435": 41500.0, "2440": 40032.0, "2445": 39374.0, "2450": 40804.0, "2455": 42261.0, "2460": 40125.0, "2465": 39507.0, "2470": 37822.0, "2475": 40036.0, "2480": 40026.0, "2485": 39349.0, "2490": 40112.0, "2495": 41495.0, "2500": 39211.0, "2505": 40738.0, "2510": 40736.0, "2515": 38649.0, "2520": 40806.0, "2525": 41584.0, "2530": 40724.0, "2535": 39983.0, "2540": 39200.0, "2545": 37988.0, "2550": 40809.0, "2555": 41495.0, "2560": 39459.0, "2565": 39201.0, "2570": 39340.0, "2575": 40747.0, "2580": 40736.0, "2585": 40860.0, "2590": 40129.0, "2595": 37978.0, "2600": 39952.0, "2605": 38748.0, "2610": 41554.0, "2615": 40101.0, "2620": 41499.0, "2625": 39270.0, "2630": 40036.0, "2635": 39529.0, "2640": 40807.0, "2645": 40038.0, "2650": 40046.0, "2655": 39184.0, "2660": 37256.0, "2665": 40879.0, "2670": 40898.0, "2675": 38431.0, "2680": 40040.0, "2685": 41559.0, "2690": 40037.0, "2695": 40740.0, "2700": 39274.0, "2705": 41572.0, "2710": 40804.0, "2715": 38492.0, "2720": 40036.0, "2725": 38649.0, "2730": 40916.0, "2735": 40038.0, "2740": 38595.0, "2745": 40046.0, "2750": 40791.0, "2755": 38521.0, "2760": 38506.0, "2765": 39265.0, "2770": 41570.0, "2775": 39363.0, "2780": 40739.0, "2785": 40733.0, "2790": 39282.0, "2795": 40017.0, "2800": 39425.0, "2805": 40806.0, "2810": 41559.0, "2815": 42258.0, "2820": 40735.0, "2825": 40034.0, "2830": 40804.0, "2835": 41570.0, "2840": 40805.0, "2845": 38612.0, "2850": 42260.0, "2855": 40790.0, "2860": 39366.0, "2865": 39353.0, "2870": 40193.0, "2875": 42259.0, "2880": 40794.0, "2885": 39968.0, "2890": 38591.0, "2895": 40736.0, "2900": 39352.0, "2905": 39979.0, "2910": 40733.0, "2915": 40021.0, "2920": 37887.0, "2925": 40813.0, "2930": 36524.0, "2935": 41578.0, "2940": 40809.0, "2945": 38912.0, "2950": 39419.0, "2955": 38055.0, "2960": 41502.0, "2965": 39204.0, "2970": 40820.0, "2975": 38808.0, "2980": 41501.0, "2985": 39255.0, "2990": 42324.0, "2995": 40799.0, "3000": 37948.0, "3005": 42267.0, "3010": 38737.0, "3015": 40799.0, "3020": 41503.0, "3025": 39341.0, "3030": 38444.0, "3035": 40037.0, "3040": 41571.0, "3045": 39328.0, "3050": 41495.0, "3055": 40734.0, "3060": 38501.0, "3065": 39260.0, "3070": 40806.0, "3075": 37936.0, "3080": 39219.0, "3085": 41571.0, "3090": 39269.0, "3095": 39363.0, "3100": 38584.0, "3105": 39978.0, "3110": 37131.0, "3115": 38460.0, "3120": 41652.0, "3125": 40091.0, "3130": 39292.0, "3135": 39268.0, "3140": 39987.0, "3145": 40811.0, "3150": 41505.0, "3155": 38583.0, "3160": 40888.0, "3165": 41501.0, "3170": 39247.0, "3175": 37445.0, "3180": 39466.0, "3185": 42264.0, "3190": 37963.0, "3195": 39261.0, "3200": 38676.0, "3205": 40031.0, "3210": 39985.0, "3215": 38604.0, "3220": 39952.0, "3225": 40113.0, "3230": 40104.0, "3235": 42351.0, "3240": 40035.0, "3245": 40749.0, "3250": 41492.0, "3255": 42264.0, "3260": 40107.0, "3265": 42270.0, "3270": 40792.0, "3275": 41569.0, "3280": 40179.0, "3285": 39270.0, "3290": 38642.0, "3295": 40727.0, "3300": 40186.0, "3305": 42263.0, "3310": 42349.0, "3315": 41495.0, "3320": 41496.0, "3325": 39281.0, "3330": 40792.0, "3335": 40141.0, "3340": 40802.0, "3345": 40810.0, "3350": 40812.0, "3355": 41577.0, "3360": 41493.0, "3365": 40737.0, "3370": 40042.0, "3375": 40734.0, "3380": 40027.0, "3385": 39355.0, "3390": 40035.0, "3395": 39973.0, "3400": 38098.0, "3405": 38660.0, "3410": 39978.0, "3415": 40053.0, "3420": 40800.0, "3425": 40808.0, "3430": 40736.0, "3435": 40048.0, "3440": 41578.0, "3445": 37997.0, "3450": 37885.0, "3455": 40819.0, "3460": 42255.0, "3465": 39333.0, "3470": 38573.0, "3475": 40207.0, "3480": 40831.0, "3485": 38807.0, "3490": 40046.0, "3495": 40042.0, "3500": 40046.0, "3505": 40882.0, "3510": 40826.0, "3515": 40036.0, "3520": 39273.0, "3525": 41575.0, "3530": 40734.0, "3535": 41576.0, "3540": 40741.0, "3545": 40817.0, "3550": 41575.0, "3555": 40114.0, "3560": 40754.0, "3565": 40811.0, "3570": 40823.0, "3575": 40052.0, "3580": 40053.0, "3585": 41579.0, "3590": 41509.0, "3595": 39340.0, "3600": 39344.0, "3605": 39278.0, "3610": 40027.0, "3615": 42255.0, "3620": 38676.0, "3625": 39957.0, "3630": 40819.0, "3635": 40106.0, "3640": 39200.0, "3645": 39334.0, "3650": 38648.0, "3655": 40202.0, "3660": 42340.0, "3665": 41492.0, "3670": 37372.0, "3675": 39398.0, "3680": 37325.0, "3685": 39354.0, "3690": 40786.0, "3695": 42334.0, "3700": 38166.0, "3705": 39476.0, "3710": 39277.0, "3715": 38525.0, "3720": 40819.0, "3725": 37259.0, "3730": 37839.0, "3735": 40811.0, "3740": 40797.0, "3745": 38573.0, "3750": 39399.0, "3755": 39339.0, "3760": 39364.0, "3765": 39345.0, "3770": 42343.0, "3775": 40827.0, "3780": 42253.0, "3785": 37894.0, "3790": 39340.0, "3795": 40023.0, "3800": 39512.0, "3805": 37153.0, "3810": 39416.0, "3815": 40727.0, "3820": 40133.0, "3825": 42259.0, "3830": 40035.0, "3835": 40795.0, "3840": 40036.0, "3845": 39354.0, "3850": 39416.0, "3855": 39365.0, "3860": 40057.0, "3865": 41504.0, "3870": 37762.0, "3875": 39966.0, "3880": 40048.0, "3885": 41496.0, "3890": 39975.0, "3895": 40730.0, "3900": 39263.0, "3905": 38583.0, "3910": 40040.0, "3915": 40790.0, "3920": 36543.0, "3925": 39972.0, "3930": 39268.0, "3935": 39350.0, "3940": 39432.0, "3945": 41507.0, "3950": 39985.0, "3955": 39400.0, "3960": 38663.0, "3965": 39970.0, "3970": 39290.0, "3975": 39963.0, "3980": 40820.0, "3985": 38591.0, "3990": 39214.0, "3995": 39413.0, "4000": 40046.0, "4005": 40052.0, "4010": 35698.0, "4015": 41581.0, "4020": 39297.0, "4025": 40191.0, "4030": 37029.0, "4035": 41509.0, "4040": 34435.0, "4045": 40814.0, "4050": 37746.0, "4055": 39432.0, "4060": 37257.0, "4065": 40729.0, "4070": 38512.0, "4075": 40042.0, "4080": 40872.0, "4085": 41495.0, "4090": 40727.0, "4095": 41513.0, "4100": 37887.0, "4105": 41504.0, "4110": 41504.0, "4115": 40206.0, "4120": 40810.0, "4125": 38743.0, "4130": 40743.0, "4135": 39350.0, "4140": 39360.0, "4145": 40036.0, "4150": 39360.0, "4155": 39422.0, "4160": 40151.0, "4165": 39360.0, "4170": 40748.0, "4175": 39272.0, "4180": 40794.0, "4185": 40789.0, "4190": 39276.0, "4195": 40786.0, "4200": 41498.0, "4205": 40737.0, "4210": 40736.0, "4215": 40051.0, "4220": 38649.0, "4225": 40053.0, "4230": 38808.0, "4235": 39368.0, "4240": 40808.0, "4245": 40744.0, "4250": 40113.0, "4255": 39356.0, "4260": 38670.0, "4265": 40736.0, "4270": 40802.0, "4275": 40101.0, "4280": 40026.0, "4285": 41486.0, "4290": 40749.0, "4295": 37117.0, "4300": 39342.0, "4305": 40044.0, "4310": 40100.0, "4315": 40106.0, "4320": 41566.0, "4325": 40068.0, "4330": 38714.0, "4335": 42262.0, "4340": 40722.0, "4345": 40797.0, "4350": 37950.0, "4355": 41507.0, "4360": 40193.0, "4365": 40816.0, "4370": 37208.0, "4375": 40027.0, "4380": 38514.0, "4385": 40103.0, "4390": 40748.0, "4395": 39983.0, "4400": 39971.0, "4405": 40211.0, "4410": 41579.0, "4415": 41490.0, "4420": 40045.0, "4425": 40212.0, "4430": 40736.0, "4435": 37981.0, "4440": 39970.0, "4445": 39434.0, "4450": 39967.0, "4455": 40728.0, "4460": 40798.0, "4465": 41506.0, "4470": 41494.0, "4475": 39199.0, "4480": 41581.0, "4485": 40111.0, "4490": 42259.0, "4495": 39974.0, "4500": 40013.0, "4505": 40805.0, "4510": 40805.0, "4515": 37903.0, "4520": 39260.0, "4525": 40725.0, "4530": 39964.0, "4535": 39347.0, "4540": 39419.0, "4545": 38777.0, "4550": 40031.0, "4555": 40806.0, "4560": 39374.0, "4565": 40110.0, "4570": 41650.0, "4575": 40037.0, "4580": 37829.0, "4585": 39419.0, "4590": 37294.0, "4595": 39339.0, "4600": 40045.0, "4605": 40749.0, "4610": 40811.0, "4615": 40739.0, "4620": 40745.0, "4625": 41500.0, "4630": 40048.0, "4635": 40737.0, "4640": 41511.0, "4645": 41506.0, "4650": 40810.0, "4655": 39972.0, "4660": 40874.0, "4665": 41488.0, "4670": 39281.0, "4675": 40035.0, "4680": 41570.0, "4685": 38067.0, "4690": 40736.0, "4695": 39255.0, "4700": 39958.0, "4705": 40839.0, "4710": 42269.0, "4715": 41578.0, "4720": 40874.0, "4725": 40829.0, "4730": 38513.0, "4735": 40050.0, "4740": 37842.0, "4745": 39411.0, "4750": 40050.0, "4755": 39198.0, "4760": 40124.0, "4765": 41507.0, "4770": 40121.0, "4775": 36682.0, "4780": 39279.0, "4785": 40042.0, "4790": 42332.0, "4795": 39270.0, "4800": 42273.0, "4805": 39460.0, "4810": 40032.0, "4815": 42263.0, "4820": 39329.0, "4825": 37861.0, "4830": 40108.0, "4835": 41494.0, "4840": 40043.0, "4845": 41601.0, "4850": 38640.0, "4855": 40190.0, "4860": 38663.0, "4865": 40031.0, "4870": 41588.0, "4875": 40734.0, "4880": 41574.0, "4885": 40731.0, "4890": 40737.0, "4895": 40781.0, "4900": 40035.0, "4905": 40819.0, "4910": 40052.0, "4915": 40802.0, "4920": 40053.0, "4925": 40041.0, "4930": 39989.0, "4935": 40810.0, "4940": 39980.0, "4945": 39448.0, "4950": 40036.0, "4955": 39972.0, "4960": 38566.0, "4965": 38686.0, "4970": 39337.0, "4975": 39970.0, "4980": 37933.0, "4985": 37758.0, "4990": 39347.0, "4995": 40038.0, "5000": 38609.0, "5005": 40052.0, "5010": 38671.0, "5015": 40740.0, "5020": 39214.0, "5025": 41494.0, "5030": 36552.0, "5035": 37975.0, "5040": 40748.0, "5045": 39217.0, "5050": 39264.0, "5055": 39986.0, "5060": 40811.0, "5065": 38603.0, "5070": 42269.0, "5075": 40044.0, "5080": 41495.0, "5085": 39292.0, "5090": 40809.0, "5095": 40886.0, "5100": 39333.0, "5105": 40079.0, "5110": 40812.0, "5115": 38564.0, "5120": 40105.0, "5125": 41503.0, "5130": 40138.0, "5135": 41501.0, "5140": 41577.0, "5145": 38051.0, "5150": 39351.0, "5155": 40046.0, "5160": 41577.0, "5165": 39963.0, "5170": 40748.0, "5175": 38766.0, "5180": 41507.0, "5185": 40815.0, "5190": 38658.0, "5195": 38620.0, "5200": 40130.0, "5205": 42268.0, "5210": 39281.0, "5215": 37201.0, "5220": 40031.0, "5225": 41568.0, "5230": 40197.0, "5235": 40051.0, "5240": 38667.0, "5245": 40811.0, "5250": 38540.0, "5255": 40050.0, "5260": 40804.0, "5265": 39284.0, "5270": 41503.0, "5275": 41505.0, "5280": 39284.0, "5285": 40039.0, "5290": 38727.0, "5295": 39346.0, "5300": 39434.0, "5305": 39276.0, "5310": 41572.0, "5315": 39219.0, "5320": 40900.0, "5325": 40812.0, "5330": 39339.0, "5335": 40136.0, "5340": 42267.0, "5345": 39979.0, "5350": 40807.0, "5355": 40130.0, "5360": 37332.0, "5365": 37914.0, "5370": 40823.0, "5375": 40032.0, "5380": 39340.0, "5385": 39348.0, "5390": 38028.0, "5395": 39446.0, "5400": 38701.0, "5405": 39369.0, "5410": 40045.0, "5415": 40742.0, "5420": 39980.0, "5425": 40115.0, "5430": 40813.0, "5435": 42263.0, "5440": 39292.0, "5445": 38584.0, "5450": 40045.0, "5455": 40139.0, "5460": 41489.0, "5465": 40882.0, "5470": 39419.0, "5475": 39470.0, "5480": 40037.0, "5485": 41493.0, "5490": 39394.0, "5495": 40808.0, "5500": 40901.0, "5505": 37764.0, "5510": 42271.0, "5515": 39293.0, "5520": 40728.0, "5525": 40801.0, "5530": 39279.0, "5535": 40748.0, "5540": 40131.0, "5545": 41585.0, "5550": 38747.0, "5555": 39968.0, "5560": 40184.0, "5565": 42335.0, "5570": 41492.0, "5575": 39450.0, "5580": 39343.0, "5585": 40734.0, "5590": 38571.0, "5595": 40051.0, "5600": 39351.0, "5605": 41576.0, "5610": 40809.0, "5615": 40791.0, "5620": 39461.0, "5625": 39399.0, "5630": 40100.0, "5635": 39344.0, "5640": 40813.0, "5645": 40733.0, "5650": 38587.0, "5655": 39263.0, "5660": 38742.0, "5665": 40818.0, "5670": 41592.0, "5675": 40820.0, "5680": 39353.0, "5685": 40740.0, "5690": 39974.0, "5695": 40814.0, "5700": 39337.0, "5705": 40043.0, "5710": 39352.0, "5715": 40806.0, "5720": 40890.0, "5725": 39969.0, "5730": 39970.0, "5735": 39976.0, "5740": 40051.0, "5745": 40798.0, "5750": 37216.0, "5755": 41509.0, "5760": 40800.0, "5765": 37152.0, "5770": 40728.0, "5775": 40026.0, "5780": 38674.0, "5785": 42342.0, "5790": 38013.0, "5795": 40866.0, "5800": 40804.0, "5805": 39967.0, "5810": 43040.0, "5815": 39275.0, "5820": 40734.0, "5825": 35624.0, "5830": 39413.0, "5835": 40048.0, "5840": 40049.0, "5845": 40818.0, "5850": 41496.0, "5855": 42260.0, "5860": 38510.0, "5865": 39368.0, "5870": 39417.0, "5875": 40043.0, "5880": 39354.0, "5885": 40802.0, "5890": 37906.0, "5895": 40734.0, "5900": 40049.0, "5905": 38690.0, "5910": 37971.0, "5915": 37850.0, "5920": 40803.0, "5925": 37925.0, "5930": 40867.0, "5935": 39283.0, "5940": 40099.0, "5945": 39320.0, "5950": 40132.0, "5955": 39351.0, "5960": 40046.0, "5965": 38736.0, "5970": 38434.0, "5975": 36509.0, "5980": 40732.0, "5985": 40041.0, "5990": 40805.0, "5995": 38591.0, "6000": 38756.0, "6005": 40060.0, "6010": 39972.0, "6015": 40046.0, "6020": 41577.0, "6025": 40031.0, "6030": 39348.0, "6035": 41586.0, "6040": 38607.0, "6045": 39967.0, "6050": 39973.0, "6055": 41578.0, "6060": 40117.0, "6065": 41572.0, "6070": 38746.0, "6075": 41507.0, "6080": 38008.0, "6085": 35980.0, "6090": 40743.0, "6095": 41504.0, "6100": 40752.0, "6105": 35948.0, "6110": 40106.0, "6115": 41504.0, "6120": 40134.0, "6125": 37234.0, "6130": 39283.0, "6135": 39264.0, "6140": 41564.0, "6145": 40063.0, "6150": 39290.0, "6155": 41493.0, "6160": 39965.0, "6165": 40724.0, "6170": 37290.0, "6175": 40820.0, "6180": 37843.0, "6185": 40809.0, "6190": 41504.0, "6195": 38588.0, "6200": 41499.0, "6205": 41577.0, "6210": 38722.0, "6215": 41511.0, "6220": 39989.0, "6225": 40750.0, "6230": 37895.0, "6235": 40810.0, "6240": 40741.0, "6245": 40208.0, "6250": 41501.0, "6255": 40725.0, "6260": 41577.0, "6265": 39237.0, "6270": 40812.0, "6275": 40873.0, "6280": 43030.0, "6285": 40825.0, "6290": 41571.0, "6295": 40803.0, "6300": 39978.0, "6305": 38675.0, "6310": 38670.0, "6315": 41495.0, "6320": 40896.0, "6325": 37682.0, "6330": 39265.0, "6335": 41579.0, "6340": 40734.0, "6345": 41504.0, "6350": 41562.0, "6355": 40098.0, "6360": 38744.0, "6365": 40787.0, "6370": 38726.0, "6375": 40029.0, "6380": 37120.0, "6385": 40192.0, "6390": 39502.0, "6395": 41491.0, "6400": 36570.0, "6405": 40050.0, "6410": 40124.0, "6415": 41570.0, "6420": 40841.0, "6425": 37271.0, "6430": 39961.0, "6435": 40108.0, "6440": 41489.0, "6445": 40739.0, "6450": 38518.0, "6455": 38601.0, "6460": 39210.0, "6465": 42267.0, "6470": 41491.0, "6475": 40829.0, "6480": 40128.0, "6485": 39282.0, "6490": 35756.0, "6495": 39282.0, "6500": 41510.0, "6505": 40752.0, "6510": 40059.0, "6515": 40111.0, "6520": 40794.0, "6525": 42278.0, "6530": 41497.0, "6535": 39277.0, "6540": 40879.0, "6545": 38000.0, "6550": 39337.0, "6555": 40814.0, "6560": 35758.0, "6565": 39223.0, "6570": 40110.0, "6575": 41499.0, "6580": 41571.0, "6585": 40046.0, "6590": 35486.0, "6595": 40784.0, "6600": 39282.0, "6605": 39352.0, "6610": 41514.0, "6615": 40805.0, "6620": 41571.0, "6625": 40738.0, "6630": 37795.0, "6635": 38683.0, "6640": 40815.0, "6645": 38041.0, "6650": 40049.0, "6655": 40042.0, "6660": 41503.0, "6665": 41509.0, "6670": 39265.0, "6675": 39364.0, "6680": 42268.0, "6685": 40720.0, "6690": 40053.0, "6695": 40727.0, "6700": 40797.0, "6705": 41601.0, "6710": 39201.0, "6715": 38494.0, "6720": 39966.0, "6725": 41564.0, "6730": 41498.0, "6735": 38661.0, "6740": 37270.0, "6745": 41490.0, "6750": 40731.0, "6755": 38597.0, "6760": 39359.0, "6765": 39354.0, "6770": 39975.0, "6775": 38654.0, "6780": 38578.0, "6785": 40035.0, "6790": 40735.0, "6795": 40733.0, "6800": 40726.0, "6805": 40810.0, "6810": 40191.0, "6815": 40816.0, "6820": 40114.0, "6825": 40802.0, "6830": 41575.0, "6835": 38608.0, "6840": 40811.0, "6845": 42337.0, "6850": 40116.0, "6855": 39359.0, "6860": 41483.0, "6865": 40734.0, "6870": 40027.0, "6875": 40808.0, "6880": 40794.0, "6885": 39379.0, "6890": 40028.0, "6895": 40103.0, "6900": 37760.0, "6905": 40880.0, "6910": 41565.0, "6915": 41577.0, "6920": 40029.0, "6925": 40104.0, "6930": 42264.0, "6935": 38500.0, "6940": 39405.0, "6945": 40794.0, "6950": 39967.0, "6955": 39437.0, "6960": 40729.0, "6965": 40803.0, "6970": 40814.0, "6975": 41503.0, "6980": 39506.0, "6985": 40790.0, "6990": 40112.0, "6995": 39967.0, "7000": 39418.0, "7005": 39423.0, "7010": 39967.0, "7015": 38669.0, "7020": 40797.0, "7025": 39291.0, "7030": 38672.0, "7035": 39335.0, "7040": 42329.0, "7045": 36559.0, "7050": 38588.0, "7055": 40895.0, "7060": 40738.0, "7065": 37989.0, "7070": 40056.0, "7075": 39275.0, "7080": 40729.0, "7085": 40734.0, "7090": 36582.0, "7095": 40734.0, "7100": 40817.0, "7105": 40727.0, "7110": 42335.0, "7115": 39294.0, "7120": 40802.0, "7125": 38525.0, "7130": 40881.0, "7135": 39292.0, "7140": 40111.0, "7145": 40737.0, "7150": 40789.0, "7155": 42344.0, "7160": 40056.0, "7165": 40030.0, "7170": 41493.0, "7175": 40810.0, "7180": 40825.0, "7185": 39297.0, "7190": 40807.0, "7195": 39973.0, "7200": 40118.0, "7205": 40813.0, "7210": 41582.0, "7215": 41581.0, "7220": 37353.0, "7225": 40813.0, "7230": 40127.0, "7235": 41487.0, "7240": 38575.0, "7245": 39424.0, "7250": 40814.0, "7255": 39972.0, "7260": 38511.0, "7265": 39355.0, "7270": 40726.0, "7275": 38585.0, "7280": 39462.0, "7285": 40121.0, "7290": 41570.0, "7295": 40741.0, "7300": 40041.0, "7305": 39504.0, "7310": 40118.0, "7315": 40867.0, "7320": 40132.0, "7325": 40819.0, "7330": 41506.0, "7335": 39367.0, "7340": 40815.0, "7345": 40111.0, "7350": 39356.0, "7355": 40751.0, "7360": 40194.0, "7365": 41504.0, "7370": 38585.0, "7375": 38439.0, "7380": 40746.0, "7385": 40112.0, "7390": 38673.0, "7395": 38456.0, "7400": 41511.0, "7405": 41489.0, "7410": 38497.0, "7415": 38518.0, "7420": 39352.0, "7425": 41507.0, "7430": 39373.0, "7435": 39977.0, "7440": 40142.0, "7445": 40734.0, "7450": 40801.0, "7455": 42266.0, "7460": 38808.0, "7465": 35627.0, "7470": 40811.0, "7475": 37828.0, "7480": 40877.0, "7485": 37984.0, "7490": 40799.0, "7495": 40099.0, "7500": 41498.0, "7505": 40803.0, "7510": 39210.0, "7515": 40053.0, "7520": 40743.0, "7525": 40113.0, "7530": 40814.0, "7535": 39274.0, "7540": 40044.0, "7545": 38441.0, "7550": 40724.0, "7555": 40126.0, "7560": 41501.0, "7565": 40050.0, "7570": 40739.0, "7575": 40734.0, "7580": 39971.0, "7585": 40735.0, "7590": 40814.0, "7595": 40798.0, "7600": 35672.0, "7605": 40728.0, "7610": 41499.0, "7615": 38470.0, "7620": 41572.0, "7625": 40736.0, "7630": 41581.0, "7635": 41506.0, "7640": 40127.0, "7645": 40122.0, "7650": 38655.0, "7655": 41573.0, "7660": 39968.0, "7665": 41585.0, "7670": 39967.0, "7675": 39438.0, "7680": 40739.0, "7685": 39356.0, "7690": 40103.0, "7695": 40047.0, "7700": 41507.0, "7705": 40185.0, "7710": 40790.0, "7715": 40046.0, "7720": 35768.0, "7725": 39348.0, "7730": 39354.0, "7735": 37974.0, "7740": 40858.0, "7745": 40737.0, "7750": 41506.0, "7755": 39395.0, "7760": 40154.0, "7765": 39342.0, "7770": 39350.0, "7775": 39350.0, "7780": 38598.0, "7785": 38691.0, "7790": 39354.0, "7795": 40036.0, "7800": 40153.0, "7805": 38076.0, "7810": 42353.0, "7815": 38580.0, "7820": 40804.0, "7825": 37904.0, "7830": 40902.0, "7835": 39367.0, "7840": 40828.0, "7845": 41507.0, "7850": 39340.0, "7855": 38674.0, "7860": 39975.0, "7865": 38836.0, "7870": 40815.0, "7875": 40124.0, "7880": 40729.0, "7885": 40741.0, "7890": 40051.0, "7895": 42264.0, "7900": 40126.0, "7905": 41499.0, "7910": 37908.0, "7915": 39268.0, "7920": 39361.0, "7925": 42265.0, "7930": 39248.0, "7935": 41590.0, "7940": 39345.0, "7945": 41557.0, "7950": 36614.0, "7955": 37915.0, "7960": 40042.0, "7965": 40813.0, "7970": 40886.0, "7975": 37763.0, "7980": 41500.0, "7985": 39972.0, "7990": 39385.0, "7995": 39361.0, "8000": 41509.0, "8005": 40799.0, "8010": 40736.0, "8015": 40891.0, "8020": 38639.0, "8025": 40889.0, "8030": 37827.0, "8035": 40051.0, "8040": 39226.0, "8045": 41566.0, "8050": 38591.0, "8055": 39275.0, "8060": 39284.0, "8065": 40829.0, "8070": 40805.0, "8075": 41585.0, "8080": 41562.0, "8085": 39287.0, "8090": 40809.0, "8095": 39372.0, "8100": 40733.0, "8105": 40873.0, "8110": 38673.0, "8115": 40819.0, "8120": 39350.0, "8125": 39964.0, "8130": 39284.0, "8135": 41494.0, "8140": 39973.0, "8145": 40812.0, "8150": 41569.0, "8155": 41511.0, "8160": 39453.0, "8165": 40054.0, "8170": 38735.0, "8175": 40047.0, "8180": 37447.0, "8185": 40734.0, "8190": 39348.0, "8195": 40048.0, "8200": 40114.0, "8205": 40074.0, "8210": 39283.0, "8215": 40800.0, "8220": 38580.0, "8225": 39282.0, "8230": 40808.0, "8235": 40049.0, "8240": 39362.0, "8245": 38685.0, "8250": 41571.0, "8255": 39423.0, "8260": 40828.0, "8265": 41508.0, "8270": 42270.0, "8275": 40739.0, "8280": 40051.0, "8285": 40897.0, "8290": 40109.0, "8295": 39252.0, "8300": 40785.0, "8305": 40108.0, "8310": 39278.0, "8315": 39205.0, "8320": 41514.0, "8325": 39283.0, "8330": 40740.0, "8335": 40048.0, "8340": 40076.0, "8345": 39444.0, "8350": 40820.0, "8355": 40067.0, "8360": 40796.0, "8365": 39971.0, "8370": 39414.0, "8375": 38871.0, "8380": 40744.0, "8385": 40050.0, "8390": 40811.0, "8395": 39961.0, "8400": 40136.0, "8405": 39220.0, "8410": 41509.0, "8415": 38615.0, "8420": 40104.0, "8425": 39971.0, "8430": 37844.0, "8435": 39420.0, "8440": 37098.0, "8445": 38576.0, "8450": 39422.0, "8455": 39983.0, "8460": 38012.0, "8465": 37922.0, "8470": 38624.0, "8475": 40809.0, "8480": 40201.0, "8485": 37661.0, "8490": 41502.0, "8495": 40026.0, "8500": 39988.0, "8505": 38428.0, "8510": 41506.0, "8515": 39444.0, "8520": 39440.0, "8525": 38509.0, "8530": 40795.0, "8535": 40748.0, "8540": 40737.0, "8545": 39373.0, "8550": 40813.0, "8555": 40017.0, "8560": 40731.0, "8565": 39982.0, "8570": 39287.0, "8575": 40024.0, "8580": 40135.0, "8585": 40064.0, "8590": 40825.0, "8595": 40056.0, "8600": 40186.0, "8605": 39299.0, "8610": 40852.0, "8615": 38606.0, "8620": 40043.0, "8625": 42269.0, "8630": 40738.0, "8635": 39281.0, "8640": 40743.0, "8645": 40056.0, "8650": 37205.0, "8655": 40131.0, "8660": 38653.0, "8665": 40730.0, "8670": 40838.0, "8675": 40070.0, "8680": 38522.0, "8685": 42264.0, "8690": 37897.0, "8695": 39274.0, "8700": 38653.0, "8705": 40807.0, "8710": 40060.0, "8715": 40040.0, "8720": 41501.0, "8725": 39348.0, "8730": 40819.0, "8735": 38660.0, "8740": 39310.0, "8745": 38663.0, "8750": 38585.0, "8755": 40870.0, "8760": 40738.0, "8765": 39207.0, "8770": 41584.0, "8775": 40805.0, "8780": 40809.0, "8785": 39273.0, "8790": 41496.0, "8795": 40807.0, "8800": 41553.0, "8805": 41504.0, "8810": 39282.0, "8815": 40728.0, "8820": 39263.0, "8825": 41508.0, "8830": 42255.0, "8835": 40811.0, "8840": 39350.0, "8845": 42270.0, "8850": 41501.0, "8855": 42272.0, "8860": 39352.0, "8865": 39279.0, "8870": 41571.0, "8875": 38755.0, "8880": 38661.0, "8885": 38584.0, "8890": 39219.0, "8895": 40032.0, "8900": 40024.0, "8905": 38510.0, "8910": 40735.0, "8915": 37893.0, "8920": 39293.0, "8925": 39281.0, "8930": 42260.0, "8935": 37295.0, "8940": 41577.0, "8945": 41501.0, "8950": 40042.0, "8955": 41509.0, "8960": 41508.0, "8965": 42263.0, "8970": 37914.0, "8975": 36509.0, "8980": 40809.0, "8985": 41511.0, "8990": 40739.0, "8995": 39359.0, "9000": 40731.0, "9005": 39278.0, "9010": 36524.0, "9015": 39336.0, "9020": 40055.0, "9025": 39965.0, "9030": 40037.0, "9035": 40114.0, "9040": 40829.0, "9045": 39965.0, "9050": 40094.0, "9055": 40823.0, "9060": 38533.0, "9065": 38588.0, "9070": 38579.0, "9075": 40134.0, "9080": 40114.0, "9085": 40731.0, "9090": 40050.0, "9095": 39975.0, "9100": 38511.0, "9105": 38657.0, "9110": 40119.0, "9115": 39283.0, "9120": 34484.0, "9125": 39522.0, "9130": 42330.0, "9135": 39275.0, "9140": 39207.0, "9145": 40213.0, "9150": 39281.0, "9155": 42257.0, "9160": 40811.0, "9165": 40053.0, "9170": 40039.0, "9175": 38588.0, "9180": 39973.0, "9185": 40103.0, "9190": 40046.0, "9195": 40054.0, "9200": 38034.0, "9205": 40736.0, "9210": 39273.0, "9215": 40740.0, "9220": 40801.0, "9225": 40810.0, "9230": 40812.0, "9235": 38847.0, "9240": 41499.0, "9245": 41580.0, "9250": 40725.0, "9255": 40108.0, "9260": 40198.0, "9265": 41501.0, "9270": 41509.0, "9275": 41567.0, "9280": 37260.0, "9285": 40947.0, "9290": 35879.0, "9295": 40816.0, "9300": 40796.0, "9305": 39433.0, "9310": 40738.0, "9315": 40791.0, "9320": 38600.0, "9325": 40126.0, "9330": 40826.0, "9335": 38529.0, "9340": 41499.0, "9345": 40797.0, "9350": 40824.0, "9355": 38451.0, "9360": 42268.0, "9365": 38688.0, "9370": 41494.0, "9375": 40806.0, "9380": 40815.0, "9385": 40735.0, "9390": 41576.0, "9395": 40886.0, "9400": 40802.0, "9405": 40050.0, "9410": 40801.0, "9415": 40046.0, "9420": 40746.0, "9425": 41506.0, "9430": 40731.0, "9435": 39420.0, "9440": 41501.0, "9445": 40795.0, "9450": 39359.0, "9455": 42341.0, "9460": 40810.0, "9465": 42263.0, "9470": 35945.0, "9475": 40798.0, "9480": 41576.0, "9485": 39412.0, "9490": 39287.0, "9495": 38585.0, "9500": 41512.0, "9505": 40061.0, "9510": 41552.0, "9515": 40100.0, "9520": 39272.0, "9525": 39511.0, "9530": 41572.0, "9535": 38669.0, "9540": 39294.0, "9545": 39298.0, "9550": 41581.0, "9555": 40736.0, "9560": 39358.0, "9565": 40055.0, "9570": 39421.0, "9575": 42278.0, "9580": 40037.0, "9585": 40045.0, "9590": 38518.0, "9595": 38663.0, "9600": 37192.0, "9605": 40808.0, "9610": 41572.0, "9615": 39973.0, "9620": 39961.0, "9625": 40811.0, "9630": 40819.0, "9635": 38437.0, "9640": 40809.0, "9645": 39960.0, "9650": 41503.0, "9655": 42282.0, "9660": 40813.0, "9665": 40068.0, "9670": 40034.0, "9675": 38719.0, "9680": 40743.0, "9685": 39974.0, "9690": 39445.0, "9695": 39277.0, "9700": 39211.0, "9705": 40887.0, "9710": 39280.0, "9715": 39336.0, "9720": 40177.0, "9725": 38754.0, "9730": 40820.0, "9735": 41595.0, "9740": 41588.0, "9745": 39378.0, "9750": 40809.0, "9755": 40885.0, "9760": 39303.0, "9765": 43036.0, "9770": 39438.0, "9775": 39964.0, "9780": 40129.0, "9785": 40880.0, "9790": 39221.0, "9795": 42279.0, "9800": 37200.0, "9805": 39978.0, "9810": 40805.0, "9815": 40806.0, "9820": 39270.0, "9825": 40795.0, "9830": 39504.0, "9835": 39263.0, "9840": 37901.0, "9845": 39963.0, "9850": 39284.0, "9855": 38533.0, "9860": 37978.0, "9865": 41502.0, "9870": 39348.0, "9875": 37140.0, "9880": 40052.0, "9885": 39224.0, "9890": 40039.0, "9895": 41556.0, "9900": 39979.0, "9905": 41600.0, "9910": 41585.0, "9915": 41495.0, "9920": 40003.0, "9925": 38662.0, "9930": 40043.0, "9935": 37879.0, "9940": 41639.0, "9945": 41499.0, "9950": 40813.0, "9955": 41501.0, "9960": 40810.0, "9965": 40828.0, "9970": 39345.0, "9975": 40747.0, "9980": 41508.0, "9985": 42276.0, "9990": 38504.0, "9995": 42276.0, "10000": 40017.0, "10005": 39339.0, "10010": 39439.0, "10015": 41504.0, "10020": 40731.0, "10025": 38515.0, "10030": 39319.0, "10035": 36369.0, "10040": 41583.0, "10045": 40036.0, "10050": 39275.0, "10055": 41507.0, "10060": 40056.0, "10065": 41495.0, "10070": 40826.0, "10075": 40796.0, "10080": 40876.0, "10085": 40125.0, "10090": 41495.0, "10095": 40739.0, "10100": 40730.0, "10105": 41506.0, "10110": 39293.0, "10115": 41639.0, "10120": 40798.0, "10125": 39347.0, "10130": 37760.0, "10135": 40055.0, "10140": 40744.0, "10145": 40739.0, "10150": 41568.0, "10155": 40116.0, "10160": 39519.0, "10165": 37938.0, "10170": 41505.0, "10175": 40822.0, "10180": 40037.0, "10185": 41497.0, "10190": 39974.0, "10195": 37901.0, "10200": 40100.0, "10205": 40898.0, "10210": 40833.0, "10215": 39435.0, "10220": 40823.0, "10225": 37282.0, "10230": 38608.0, "10235": 40737.0, "10240": 39440.0, "10245": 40749.0, "10250": 39361.0, "10255": 40885.0, "10260": 40045.0, "10265": 37185.0, "10270": 40878.0, "10275": 41577.0, "10280": 40039.0, "10285": 40815.0, "10290": 37239.0, "10295": 40804.0, "10300": 38441.0, "10305": 39497.0, "10310": 38673.0, "10315": 40805.0, "10320": 39271.0, "10325": 37912.0, "10330": 36532.0, "10335": 38516.0, "10340": 41498.0, "10345": 40801.0, "10350": 41523.0, "10355": 39342.0, "10360": 39275.0, "10365": 37064.0, "10370": 41503.0, "10375": 37976.0, "10380": 40809.0, "10385": 40040.0, "10390": 41505.0, "10395": 39985.0, "10400": 40737.0, "10405": 35924.0, "10410": 39270.0, "10415": 40742.0, "10420": 40868.0, "10425": 40035.0, "10430": 40741.0, "10435": 40114.0, "10440": 39347.0, "10445": 38596.0, "10450": 38584.0, "10455": 39361.0, "10460": 40019.0, "10465": 41583.0, "10470": 37979.0, "10475": 41576.0, "10480": 39266.0, "10485": 40785.0, "10490": 42354.0, "10495": 39294.0, "10500": 39360.0, "10505": 40045.0, "10510": 40189.0, "10515": 37680.0, "10520": 39275.0, "10525": 40109.0, "10530": 40050.0, "10535": 40035.0, "10540": 39211.0, "10545": 40810.0, "10550": 40808.0, "10555": 39349.0, "10560": 40738.0, "10565": 39335.0, "10570": 39978.0, "10575": 37337.0, "10580": 39425.0, "10585": 40040.0, "10590": 39294.0, "10595": 39272.0, "10600": 41504.0, "10605": 40047.0, "10610": 37274.0, "10615": 40724.0, "10620": 38745.0, "10625": 38495.0, "10630": 40817.0, "10635": 40733.0, "10640": 39343.0, "10645": 39330.0, "10650": 40027.0, "10655": 40044.0, "10660": 39259.0, "10665": 35683.0, "10670": 36924.0, "10675": 40729.0, "10680": 40817.0, "10685": 38561.0, "10690": 39358.0, "10695": 38729.0, "10700": 35982.0, "10705": 41513.0, "10710": 40131.0, "10715": 39343.0, "10720": 39219.0, "10725": 39450.0, "10730": 40122.0, "10735": 40794.0, "10740": 39384.0, "10745": 40128.0, "10750": 40817.0, "10755": 42272.0, "10760": 39449.0, "10765": 40830.0, "10770": 40796.0, "10775": 41501.0, "10780": 40042.0, "10785": 41511.0, "10790": 41517.0, "10795": 39303.0, "10800": 40051.0, "10805": 39338.0, "10810": 40056.0, "10815": 36497.0, "10820": 42263.0, "10825": 39358.0, "10830": 40047.0, "10835": 37964.0, "10840": 40116.0, "10845": 41495.0, "10850": 41577.0, "10855": 41502.0, "10860": 41500.0, "10865": 41563.0, "10870": 36676.0, "10875": 42266.0, "10880": 41503.0, "10885": 41501.0, "10890": 40830.0, "10895": 40758.0, "10900": 41504.0, "10905": 40818.0, "10910": 40804.0, "10915": 41513.0, "10920": 40031.0, "10925": 41504.0, "10930": 40136.0, "10935": 39436.0, "10940": 40733.0, "10945": 39279.0, "10950": 41569.0, "10955": 38607.0, "10960": 39278.0, "10965": 41501.0, "10970": 39272.0, "10975": 39382.0, "10980": 40194.0, "10985": 40050.0, "10990": 40799.0, "10995": 40046.0, "11000": 37943.0, "11005": 38079.0, "11010": 40784.0, "11015": 40036.0, "11020": 40042.0, "11025": 40809.0, "11030": 40802.0, "11035": 41507.0, "11040": 40055.0, "11045": 40817.0, "11050": 40755.0, "11055": 40820.0, "11060": 38596.0, "11065": 40040.0, "11070": 40043.0, "11075": 40827.0, "11080": 40807.0, "11085": 41565.0, "11090": 40743.0, "11095": 39267.0, "11100": 41519.0, "11105": 41499.0, "11110": 38527.0, "11115": 38688.0, "11120": 38570.0, "11125": 41503.0, "11130": 40816.0, "11135": 38727.0, "11140": 39974.0, "11145": 40742.0, "11150": 40113.0, "11155": 41561.0, "11160": 41501.0, "11165": 38583.0, "11170": 38520.0, "11175": 39986.0, "11180": 40733.0, "11185": 38735.0, "11190": 39278.0, "11195": 40802.0, "11200": 37067.0, "11205": 37013.0, "11210": 39285.0, "11215": 40816.0, "11220": 40137.0, "11225": 40819.0, "11230": 41560.0, "11235": 41583.0, "11240": 40735.0, "11245": 40880.0, "11250": 37758.0, "11255": 40144.0, "11260": 39349.0, "11265": 38726.0, "11270": 40122.0, "11275": 40821.0, "11280": 40141.0, "11285": 40260.0, "11290": 40052.0, "11295": 39992.0, "11300": 40815.0, "11305": 40745.0, "11310": 41500.0, "11315": 38532.0, "11320": 39277.0, "11325": 40141.0, "11330": 38661.0, "11335": 41490.0, "11340": 39441.0, "11345": 37957.0, "11350": 39347.0, "11355": 40035.0, "11360": 40731.0, "11365": 38679.0, "11370": 42274.0, "11375": 39265.0, "11380": 39274.0, "11385": 40057.0, "11390": 41512.0, "11395": 38655.0, "11400": 39355.0, "11405": 40815.0, "11410": 38626.0, "11415": 36505.0, "11420": 39370.0, "11425": 37286.0, "11430": 40124.0, "11435": 41598.0, "11440": 41505.0, "11445": 39972.0, "11450": 40798.0, "11455": 39282.0, "11460": 40813.0, "11465": 40740.0, "11470": 37410.0, "11475": 39410.0, "11480": 37838.0, "11485": 39971.0, "11490": 40730.0, "11495": 40049.0, "11500": 38665.0, "11505": 42269.0, "11510": 39256.0, "11515": 40031.0, "11520": 40727.0, "11525": 41513.0, "11530": 40124.0, "11535": 40814.0, "11540": 40798.0, "11545": 40741.0, "11550": 39290.0, "11555": 36983.0, "11560": 41564.0, "11565": 39289.0, "11570": 40040.0, "11575": 39257.0, "11580": 40748.0, "11585": 38732.0, "11590": 40824.0, "11595": 41512.0, "11600": 40889.0, "11605": 38548.0, "11610": 38671.0, "11615": 38601.0, "11620": 39530.0, "11625": 41505.0, "11630": 41496.0, "11635": 42322.0, "11640": 40040.0, "11645": 40725.0, "11650": 40047.0, "11655": 40019.0, "11660": 37846.0, "11665": 40121.0, "11670": 40815.0, "11675": 41590.0, "11680": 42266.0, "11685": 39212.0, "11690": 41505.0, "11695": 40044.0, "11700": 41499.0, "11705": 40112.0, "11710": 41517.0, "11715": 39291.0, "11720": 38074.0, "11725": 40799.0, "11730": 40734.0, "11735": 39279.0, "11740": 39971.0, "11745": 38596.0, "11750": 38684.0, "11755": 38518.0, "11760": 41496.0, "11765": 40138.0, "11770": 39348.0, "11775": 40827.0, "11780": 39337.0, "11785": 40045.0, "11790": 40803.0, "11795": 38677.0, "11800": 40030.0, "11805": 38649.0, "11810": 40808.0, "11815": 40743.0, "11820": 42267.0, "11825": 40885.0, "11830": 41591.0, "11835": 40813.0, "11840": 39284.0, "11845": 41505.0, "11850": 40721.0, "11855": 38726.0, "11860": 41502.0, "11865": 38439.0, "11870": 40722.0, "11875": 41499.0, "11880": 40109.0, "11885": 40810.0, "11890": 40117.0, "11895": 38658.0, "11900": 38607.0, "11905": 39352.0, "11910": 40884.0, "11915": 40821.0, "11920": 40752.0, "11925": 41588.0, "11930": 40874.0, "11935": 41498.0, "11940": 39277.0, "11945": 40810.0, "11950": 40726.0, "11955": 38564.0, "11960": 40126.0, "11965": 40811.0, "11970": 41583.0, "11975": 39301.0, "11980": 40822.0, "11985": 41576.0, "11990": 41513.0, "11995": 40057.0, "12000": 38496.0, "12005": 41499.0, "12010": 40044.0, "12015": 41504.0, "12020": 40049.0, "12025": 39274.0, "12030": 40126.0, "12035": 38591.0, "12040": 40048.0, "12045": 40867.0, "12050": 39350.0, "12055": 39290.0, "12060": 40816.0, "12065": 39306.0, "12070": 39302.0, "12075": 40798.0, "12080": 39270.0, "12085": 40041.0, "12090": 40741.0, "12095": 40037.0, "12100": 40136.0, "12105": 37970.0, "12110": 39347.0, "12115": 40877.0, "12120": 40732.0, "12125": 38433.0, "12130": 38664.0, "12135": 40792.0, "12140": 40111.0, "12145": 40120.0, "12150": 40036.0, "12155": 40050.0, "12160": 39968.0, "12165": 40121.0, "12170": 40053.0, "12175": 42264.0, "12180": 40825.0, "12185": 40806.0, "12190": 40809.0, "12195": 37964.0, "12200": 39972.0, "12205": 39969.0, "12210": 42335.0, "12215": 40869.0, "12220": 39209.0, "12225": 39288.0, "12230": 41494.0, "12235": 40215.0, "12240": 40040.0, "12245": 38679.0, "12250": 40805.0, "12255": 41505.0, "12260": 40131.0, "12265": 42258.0, "12270": 37299.0, "12275": 40039.0, "12280": 39968.0, "12285": 40805.0, "12290": 40036.0, "12295": 40810.0, "12300": 40878.0, "12305": 42267.0, "12310": 40825.0, "12315": 39447.0, "12320": 40803.0, "12325": 36599.0, "12330": 39328.0, "12335": 39578.0, "12340": 40201.0, "12345": 40801.0, "12350": 37814.0, "12355": 39496.0, "12360": 42280.0, "12365": 40728.0, "12370": 38678.0, "12375": 40147.0, "12380": 40800.0, "12385": 40064.0, "12390": 38456.0, "12395": 39982.0, "12400": 39294.0, "12405": 39970.0, "12410": 39970.0, "12415": 40809.0, "12420": 39341.0, "12425": 39969.0, "12430": 40790.0, "12435": 40792.0, "12440": 39291.0, "12445": 40803.0, "12450": 40105.0, "12455": 40139.0, "12460": 36314.0, "12465": 36599.0, "12470": 39225.0, "12475": 39964.0, "12480": 39976.0, "12485": 37393.0, "12490": 40037.0, "12495": 40764.0, "12500": 37830.0, "12505": 40814.0, "12510": 39196.0, "12515": 38597.0, "12520": 38514.0, "12525": 37205.0, "12530": 37979.0, "12535": 41507.0, "12540": 39321.0, "12545": 38593.0, "12550": 40052.0, "12555": 40038.0, "12560": 40049.0, "12565": 38106.0, "12570": 39969.0, "12575": 39976.0, "12580": 40026.0, "12585": 41511.0, "12590": 40065.0, "12595": 41515.0, "12600": 39378.0, "12605": 40061.0, "12610": 40211.0, "12615": 40831.0, "12620": 41506.0, "12625": 40808.0, "12630": 38525.0, "12635": 40131.0, "12640": 33842.0, "12645": 40195.0, "12650": 40043.0, "12655": 40124.0, "12660": 40813.0, "12665": 40876.0, "12670": 40878.0, "12675": 41591.0, "12680": 39208.0, "12685": 41503.0, "12690": 39279.0, "12695": 41501.0, "12700": 40089.0, "12705": 40811.0, "12710": 39438.0, "12715": 38448.0, "12720": 40202.0, "12725": 40812.0, "12730": 37287.0, "12735": 40798.0, "12740": 40870.0, "12745": 40839.0, "12750": 40819.0, "12755": 41578.0, "12760": 40104.0, "12765": 38581.0, "12770": 42269.0, "12775": 40053.0, "12780": 40041.0, "12785": 40721.0, "12790": 40116.0, "12795": 39349.0, "12800": 40108.0, "12805": 38096.0, "12810": 40135.0, "12815": 40737.0, "12820": 40832.0, "12825": 41493.0, "12830": 40818.0, "12835": 40207.0, "12840": 41510.0, "12845": 39331.0, "12850": 40803.0, "12855": 38683.0, "12860": 40813.0, "12865": 39974.0, "12870": 41507.0, "12875": 40038.0, "12880": 40040.0, "12885": 40742.0, "12890": 41565.0, "12895": 39971.0, "12900": 38658.0, "12905": 40735.0, "12910": 39436.0, "12915": 39306.0, "12920": 40799.0, "12925": 40821.0, "12930": 41497.0, "12935": 40138.0, "12940": 40827.0, "12945": 40738.0, "12950": 36545.0, "12955": 40909.0, "12960": 38505.0, "12965": 38525.0, "12970": 40902.0, "12975": 41605.0, "12980": 39364.0, "12985": 40738.0, "12990": 39217.0, "12995": 41575.0, "13000": 41570.0, "13005": 41499.0, "13010": 39439.0, "13015": 39227.0, "13020": 40117.0, "13025": 41577.0, "13030": 40039.0, "13035": 41498.0, "13040": 37343.0, "13045": 41510.0, "13050": 40112.0, "13055": 40033.0, "13060": 40140.0, "13065": 40739.0, "13070": 39329.0, "13075": 42343.0, "13080": 40104.0, "13085": 38666.0, "13090": 41497.0, "13095": 37203.0, "13100": 40873.0, "13105": 40051.0, "13110": 40058.0, "13115": 40738.0, "13120": 37297.0, "13125": 40054.0, "13130": 40891.0, "13135": 40825.0, "13140": 38668.0, "13145": 40134.0, "13150": 40053.0, "13155": 37222.0, "13160": 38529.0, "13165": 39389.0, "13170": 41510.0, "13175": 40811.0, "13180": 40041.0, "13185": 38609.0, "13190": 40821.0, "13195": 38648.0, "13200": 40729.0, "13205": 41507.0, "13210": 40726.0, "13215": 37481.0, "13220": 38623.0, "13225": 39975.0, "13230": 39375.0, "13235": 39298.0, "13240": 39202.0, "13245": 39964.0, "13250": 40803.0, "13255": 40122.0, "13260": 40115.0, "13265": 42273.0, "13270": 39361.0, "13275": 40044.0, "13280": 40131.0, "13285": 39283.0, "13290": 40128.0, "13295": 39288.0, "13300": 40058.0, "13305": 39956.0, "13310": 40055.0, "13315": 40805.0, "13320": 40050.0, "13325": 40887.0, "13330": 40827.0, "13335": 39967.0, "13340": 41577.0, "13345": 40059.0, "13350": 42270.0, "13355": 41569.0, "13360": 37975.0, "13365": 40045.0, "13370": 41504.0, "13375": 39277.0, "13380": 40043.0, "13385": 40813.0, "13390": 40831.0, "13395": 38646.0, "13400": 37896.0, "13405": 39217.0, "13410": 40799.0, "13415": 41519.0, "13420": 42258.0, "13425": 38520.0, "13430": 39262.0, "13435": 39437.0, "13440": 40799.0, "13445": 40802.0, "13450": 40736.0, "13455": 40731.0, "13460": 39383.0, "13465": 41503.0, "13470": 39305.0, "13475": 38519.0, "13480": 40745.0, "13485": 39210.0, "13490": 39975.0, "13495": 40049.0, "13500": 40743.0, "13505": 40802.0, "13510": 39269.0, "13515": 39354.0, "13520": 39345.0, "13525": 40110.0, "13530": 37429.0, "13535": 40800.0, "13540": 39273.0, "13545": 37121.0, "13550": 40037.0, "13555": 38582.0, "13560": 40731.0, "13565": 40735.0, "13570": 39991.0, "13575": 37923.0, "13580": 42273.0, "13585": 39968.0, "13590": 40746.0, "13595": 40038.0, "13600": 40145.0, "13605": 35298.0, "13610": 42255.0, "13615": 35809.0, "13620": 37235.0, "13625": 36372.0, "13630": 40024.0, "13635": 40735.0, "13640": 40740.0, "13645": 40812.0, "13650": 40810.0, "13655": 39262.0, "13660": 40051.0, "13665": 38678.0, "13670": 40802.0, "13675": 41511.0, "13680": 41512.0, "13685": 38584.0, "13690": 39435.0, "13695": 39372.0, "13700": 40028.0, "13705": 40052.0, "13710": 41505.0, "13715": 40041.0, "13720": 38657.0, "13725": 37835.0, "13730": 42265.0, "13735": 40181.0, "13740": 41505.0, "13745": 40735.0, "13750": 38525.0, "13755": 40037.0, "13760": 40221.0, "13765": 39977.0, "13770": 39979.0, "13775": 40737.0, "13780": 41580.0, "13785": 38595.0, "13790": 39290.0, "13795": 40736.0, "13800": 39996.0, "13805": 40734.0, "13810": 40876.0, "13815": 37189.0, "13820": 39268.0, "13825": 40791.0, "13830": 41588.0, "13835": 40160.0, "13840": 40728.0, "13845": 38506.0, "13850": 38003.0, "13855": 40730.0, "13860": 39425.0, "13865": 40746.0, "13870": 41492.0, "13875": 39262.0, "13880": 39271.0, "13885": 38533.0, "13890": 38072.0, "13895": 42260.0, "13900": 40028.0, "13905": 37764.0, "13910": 39427.0, "13915": 41583.0, "13920": 39296.0, "13925": 41575.0, "13930": 40047.0, "13935": 38534.0, "13940": 41565.0, "13945": 39975.0, "13950": 40725.0, "13955": 39443.0, "13960": 42261.0, "13965": 40051.0, "13970": 37837.0, "13975": 39338.0, "13980": 38514.0, "13985": 41491.0, "13990": 41506.0, "13995": 40884.0, "14000": 40737.0, "14005": 40105.0, "14010": 40819.0, "14015": 39991.0, "14020": 41590.0, "14025": 40757.0, "14030": 41572.0, "14035": 40824.0, "14040": 39445.0, "14045": 38650.0, "14050": 40816.0, "14055": 39341.0, "14060": 38683.0, "14065": 41594.0, "14070": 40817.0, "14075": 40797.0, "14080": 39970.0, "14085": 41584.0, "14090": 41583.0, "14095": 40208.0, "14100": 37753.0, "14105": 39967.0, "14110": 40744.0, "14115": 40146.0, "14120": 38632.0, "14125": 40796.0, "14130": 38667.0, "14135": 40805.0, "14140": 42279.0, "14145": 39982.0, "14150": 39463.0, "14155": 41492.0, "14160": 39446.0, "14165": 38603.0, "14170": 41500.0, "14175": 40043.0, "14180": 40877.0, "14185": 40126.0, "14190": 39372.0, "14195": 38568.0, "14200": 38652.0, "14205": 39209.0, "14210": 39269.0, "14215": 38547.0, "14220": 40052.0, "14225": 42283.0, "14230": 38641.0, "14235": 40894.0, "14240": 39962.0, "14245": 39364.0, "14250": 39231.0, "14255": 38657.0, "14260": 38644.0, "14265": 40059.0, "14270": 41590.0, "14275": 40791.0, "14280": 41559.0, "14285": 39214.0, "14290": 42267.0, "14295": 40794.0, "14300": 38608.0, "14305": 41588.0, "14310": 41572.0, "14315": 40136.0, "14320": 38664.0, "14325": 40877.0, "14330": 39981.0, "14335": 40816.0, "14340": 40036.0, "14345": 40046.0, "14350": 39307.0, "14355": 40053.0, "14360": 39288.0, "14365": 39218.0, "14370": 41578.0, "14375": 41511.0, "14380": 40730.0, "14385": 36640.0, "14390": 40744.0, "14395": 39296.0, "14400": 39427.0, "14405": 38595.0, "14410": 40793.0, "14415": 40056.0, "14420": 40805.0, "14425": 41580.0, "14430": 41496.0, "14435": 41561.0, "14440": 37851.0, "14445": 39976.0, "14450": 40129.0, "14455": 39454.0, "14460": 40136.0, "14465": 41514.0, "14470": 40718.0, "14475": 40047.0, "14480": 39437.0, "14485": 41575.0, "14490": 38652.0, "14495": 41490.0, "14500": 40026.0, "14505": 39978.0, "14510": 38525.0, "14515": 39195.0, "14520": 42278.0, "14525": 40810.0, "14530": 39195.0, "14535": 41513.0, "14540": 35171.0, "14545": 40799.0, "14550": 40732.0, "14555": 40737.0, "14560": 38671.0, "14565": 37213.0, "14570": 39295.0, "14575": 39212.0, "14580": 39278.0, "14585": 40130.0, "14590": 40734.0, "14595": 40793.0, "14600": 39349.0, "14605": 40885.0, "14610": 40795.0, "14615": 38593.0, "14620": 39437.0, "14625": 40727.0, "14630": 41510.0, "14635": 40054.0, "14640": 39361.0, "14645": 38712.0, "14650": 41586.0, "14655": 40044.0, "14660": 39436.0, "14665": 40130.0, "14670": 41574.0, "14675": 38455.0, "14680": 39353.0, "14685": 38676.0, "14690": 40191.0, "14695": 39966.0, "14700": 39340.0, "14705": 40816.0, "14710": 42267.0, "14715": 40129.0, "14720": 40209.0, "14725": 40811.0, "14730": 42342.0, "14735": 38604.0, "14740": 40735.0, "14745": 40839.0, "14750": 39289.0, "14755": 41598.0, "14760": 39364.0, "14765": 40044.0, "14770": 40105.0, "14775": 40021.0, "14780": 41565.0, "14785": 38583.0, "14790": 41571.0, "14795": 39371.0, "14800": 38518.0, "14805": 41567.0, "14810": 42279.0, "14815": 40125.0, "14820": 38531.0, "14825": 42274.0, "14830": 40820.0, "14835": 41573.0, "14840": 41577.0, "14845": 38776.0, "14850": 42274.0, "14855": 39292.0, "14860": 39333.0, "14865": 40051.0, "14870": 40809.0, "14875": 40809.0, "14880": 40032.0, "14885": 39336.0, "14890": 40732.0, "14895": 40052.0, "14900": 39208.0, "14905": 40809.0, "14910": 40838.0, "14915": 39417.0, "14920": 40040.0, "14925": 36552.0, "14930": 40729.0, "14935": 38076.0, "14940": 40043.0, "14945": 40137.0, "14950": 40728.0, "14955": 42266.0, "14960": 38514.0, "14965": 40055.0, "14970": 41518.0, "14975": 40120.0, "14980": 40018.0, "14985": 38592.0, "14990": 40128.0, "14995": 41586.0, "15000": 40737.0, "15005": 40046.0, "15010": 40051.0, "15015": 41499.0, "15020": 40813.0, "15025": 40022.0, "15030": 39274.0, "15035": 39991.0, "15040": 40826.0, "15045": 40927.0, "15050": 38442.0, "15055": 42258.0, "15060": 41493.0, "15065": 39972.0, "15070": 41579.0, "15075": 41496.0, "15080": 36417.0, "15085": 38596.0, "15090": 40044.0, "15095": 41581.0, "15100": 40053.0, "15105": 40729.0, "15110": 40728.0, "15115": 41504.0, "15120": 37969.0, "15125": 39312.0, "15130": 39585.0, "15135": 38515.0, "15140": 40737.0, "15145": 38468.0, "15150": 40826.0, "15155": 40730.0, "15160": 40814.0, "15165": 40032.0, "15170": 42267.0, "15175": 40868.0, "15180": 39350.0, "15185": 40812.0, "15190": 41568.0, "15195": 39444.0, "15200": 39216.0, "15205": 41503.0, "15210": 41592.0, "15215": 38623.0, "15220": 40810.0, "15225": 40050.0, "15230": 40734.0, "15235": 40031.0, "15240": 42267.0, "15245": 40808.0, "15250": 40050.0, "15255": 41511.0, "15260": 37830.0, "15265": 38810.0, "15270": 41506.0, "15275": 38659.0, "15280": 40198.0, "15285": 39363.0, "15290": 41572.0, "15295": 37719.0, "15300": 40911.0, "15305": 39343.0, "15310": 40799.0, "15315": 40813.0, "15320": 40828.0, "15325": 40828.0, "15330": 41505.0, "15335": 40742.0, "15340": 40805.0, "15345": 40818.0, "15350": 40035.0, "15355": 42336.0, "15360": 39364.0, "15365": 39448.0, "15370": 41504.0, "15375": 42256.0, "15380": 41584.0, "15385": 39373.0, "15390": 39278.0, "15395": 37924.0, "15400": 37204.0, "15405": 40739.0, "15410": 40051.0, "15415": 40271.0, "15420": 37387.0, "15425": 40735.0, "15430": 40729.0, "15435": 39430.0, "15440": 38448.0, "15445": 41501.0, "15450": 39988.0, "15455": 40806.0, "15460": 41655.0, "15465": 40738.0, "15470": 39351.0, "15475": 40741.0, "15480": 37917.0, "15485": 39961.0, "15490": 39362.0, "15495": 40741.0, "15500": 37951.0, "15505": 40051.0, "15510": 40057.0, "15515": 42263.0, "15520": 40814.0, "15525": 38661.0, "15530": 40731.0, "15535": 38703.0, "15540": 42349.0, "15545": 39392.0, "15550": 38538.0, "15555": 40750.0, "15560": 40890.0, "15565": 38592.0, "15570": 38452.0, "15575": 38524.0, "15580": 40743.0, "15585": 40815.0, "15590": 39370.0, "15595": 39361.0, "15600": 40058.0, "15605": 38733.0, "15610": 39279.0, "15615": 40741.0, "15620": 40891.0, "15625": 39993.0, "15630": 40913.0, "15635": 41581.0, "15640": 38093.0, "15645": 39506.0, "15650": 39984.0, "15655": 39435.0, "15660": 37116.0, "15665": 40047.0, "15670": 38630.0, "15675": 40816.0, "15680": 40041.0, "15685": 40810.0, "15690": 40809.0, "15695": 40139.0, "15700": 40800.0, "15705": 40126.0, "15710": 39418.0, "15715": 40041.0, "15720": 40754.0, "15725": 40736.0, "15730": 38640.0, "15735": 38762.0, "15740": 41512.0, "15745": 40740.0, "15750": 40812.0, "15755": 40045.0, "15760": 40798.0, "15765": 39340.0, "15770": 40043.0, "15775": 41581.0, "15780": 41504.0, "15785": 42265.0, "15790": 40753.0, "15795": 40062.0, "15800": 40128.0, "15805": 38605.0, "15810": 39285.0, "15815": 38673.0, "15820": 37310.0, "15825": 39362.0, "15830": 38503.0, "15835": 40190.0, "15840": 39970.0, "15845": 37264.0, "15850": 41590.0, "15855": 41564.0, "15860": 39451.0, "15865": 40204.0, "15870": 40749.0, "15875": 40814.0, "15880": 39509.0, "15885": 40798.0, "15890": 39366.0, "15895": 40741.0, "15900": 40827.0, "15905": 40816.0, "15910": 39961.0, "15915": 38585.0, "15920": 41577.0, "15925": 40804.0, "15930": 41495.0, "15935": 39267.0, "15940": 39430.0, "15945": 40817.0, "15950": 38601.0, "15955": 39286.0, "15960": 39297.0, "15965": 40793.0, "15970": 39356.0, "15975": 39304.0, "15980": 41502.0, "15985": 38654.0, "15990": 37207.0, "15995": 40732.0, "16000": 41572.0, "16005": 40066.0, "16010": 40756.0, "16015": 39425.0, "16020": 38673.0, "16025": 40725.0, "16030": 37354.0, "16035": 37952.0, "16040": 41490.0, "16045": 37970.0, "16050": 39438.0, "16055": 41496.0, "16060": 41503.0, "16065": 41574.0, "16070": 42356.0, "16075": 38752.0, "16080": 38439.0, "16085": 40815.0, "16090": 40800.0, "16095": 39961.0, "16100": 39284.0, "16105": 40815.0, "16110": 37822.0, "16115": 39271.0, "16120": 39989.0, "16125": 40119.0, "16130": 41502.0, "16135": 40732.0, "16140": 39354.0, "16145": 39349.0, "16150": 40720.0, "16155": 40818.0, "16160": 40047.0, "16165": 38058.0, "16170": 40811.0, "16175": 38669.0, "16180": 40047.0, "16185": 41516.0, "16190": 37950.0, "16195": 41504.0, "16200": 41573.0, "16205": 40032.0, "16210": 40727.0, "16215": 38683.0, "16220": 40112.0, "16225": 39985.0, "16230": 40803.0, "16235": 40812.0, "16240": 37823.0, "16245": 40745.0, "16250": 41495.0, "16255": 39975.0, "16260": 40048.0, "16265": 40045.0, "16270": 40724.0, "16275": 41490.0, "16280": 39345.0, "16285": 37965.0, "16290": 41577.0, "16295": 41497.0, "16300": 40744.0, "16305": 39311.0, "16310": 39275.0, "16315": 40059.0, "16320": 40052.0, "16325": 40821.0, "16330": 40043.0, "16335": 38758.0, "16340": 40820.0, "16345": 38630.0, "16350": 42264.0, "16355": 40814.0, "16360": 40120.0, "16365": 40902.0, "16370": 37822.0, "16375": 40893.0, "16380": 39977.0, "16385": 39289.0, "16390": 38817.0, "16395": 40111.0, "16400": 39403.0, "16405": 38601.0, "16410": 41501.0, "16415": 37749.0, "16420": 39983.0, "16425": 41564.0, "16430": 40818.0, "16435": 36422.0, "16440": 39409.0, "16445": 38675.0, "16450": 38117.0, "16455": 41499.0, "16460": 41588.0, "16465": 40810.0, "16470": 40839.0, "16475": 42258.0, "16480": 40803.0, "16485": 40105.0, "16490": 40050.0, "16495": 37069.0, "16500": 39358.0, "16505": 40810.0, "16510": 37262.0, "16515": 42354.0, "16520": 37421.0, "16525": 40095.0, "16530": 40036.0, "16535": 40749.0, "16540": 40052.0, "16545": 39273.0, "16550": 40741.0, "16555": 39368.0, "16560": 40068.0, "16565": 40129.0, "16570": 38654.0, "16575": 38443.0, "16580": 39281.0, "16585": 40802.0, "16590": 40827.0, "16595": 40739.0, "16600": 40047.0, "16605": 41517.0, "16610": 40818.0, "16615": 41508.0, "16620": 39222.0, "16625": 40749.0, "16630": 42260.0, "16635": 40050.0, "16640": 40039.0, "16645": 37828.0, "16650": 39279.0, "16655": 41565.0, "16660": 40808.0, "16665": 40797.0, "16670": 40122.0, "16675": 41586.0, "16680": 40832.0, "16685": 40056.0, "16690": 41505.0, "16695": 40802.0, "16700": 38578.0, "16705": 38731.0, "16710": 41506.0, "16715": 40054.0, "16720": 40040.0, "16725": 39212.0, "16730": 39283.0, "16735": 40040.0, "16740": 39445.0, "16745": 38546.0, "16750": 40731.0, "16755": 40890.0, "16760": 37981.0, "16765": 39210.0, "16770": 40043.0, "16775": 38581.0, "16780": 39419.0, "16785": 40723.0, "16790": 40838.0, "16795": 39229.0, "16800": 39299.0, "16805": 40040.0, "16810": 39965.0, "16815": 39331.0, "16820": 40738.0, "16825": 40061.0, "16830": 37205.0, "16835": 40046.0, "16840": 39438.0, "16845": 37122.0, "16850": 40836.0, "16855": 41515.0, "16860": 39277.0, "16865": 40740.0, "16870": 40038.0, "16875": 40110.0, "16880": 40735.0, "16885": 40816.0, "16890": 38595.0, "16895": 42267.0, "16900": 39205.0, "16905": 38494.0, "16910": 38591.0, "16915": 41593.0, "16920": 40048.0, "16925": 42263.0, "16930": 40801.0, "16935": 40760.0, "16940": 37693.0, "16945": 39971.0, "16950": 38593.0, "16955": 40056.0, "16960": 43091.0, "16965": 40055.0, "16970": 40053.0, "16975": 41506.0, "16980": 37130.0, "16985": 40756.0, "16990": 40110.0, "16995": 41587.0, "17000": 40803.0, "17005": 41657.0, "17010": 40047.0, "17015": 37973.0, "17020": 40739.0, "17025": 37730.0, "17030": 39370.0, "17035": 39340.0, "17040": 40735.0, "17045": 37951.0, "17050": 13797.0, "17055": 38507.0, "17060": 37986.0, "17065": 39281.0, "17070": 41492.0, "17075": 40808.0, "17080": 36939.0, "17085": 42265.0, "17090": 39256.0, "17095": 40187.0, "17100": 40061.0, "17105": 40823.0, "17110": 39437.0, "17115": 40038.0, "17120": 39300.0, "17125": 41572.0, "17130": 40052.0, "17135": 38800.0, "17140": 38038.0, "17145": 40747.0, "17150": 40740.0, "17155": 40809.0, "17160": 39486.0, "17165": 40739.0, "17170": 41503.0, "17175": 40052.0, "17180": 40803.0, "17185": 40830.0, "17190": 37989.0, "17195": 40815.0, "17200": 40116.0, "17205": 40042.0, "17210": 40819.0, "17215": 41516.0, "17220": 40020.0, "17225": 40730.0, "17230": 40036.0, "17235": 40033.0, "17240": 39366.0, "17245": 40918.0, "17250": 39397.0, "17255": 40736.0, "17260": 40884.0, "17265": 40038.0, "17270": 39954.0, "17275": 41515.0, "17280": 39389.0, "17285": 40755.0, "17290": 38617.0, "17295": 42270.0, "17300": 37906.0, "17305": 41504.0, "17310": 40191.0, "17315": 40115.0, "17320": 42279.0, "17325": 40034.0, "17330": 39971.0, "17335": 40128.0, "17340": 39346.0, "17345": 40808.0, "17350": 40735.0, "17355": 38650.0, "17360": 39971.0, "17365": 38599.0, "17370": 40808.0, "17375": 38504.0, "17380": 35643.0, "17385": 40042.0, "17390": 35215.0, "17395": 39294.0, "17400": 40042.0, "17405": 39355.0, "17410": 38675.0, "17415": 41514.0, "17420": 40740.0, "17425": 39968.0, "17430": 41505.0, "17435": 40807.0, "17440": 40813.0, "17445": 40048.0, "17450": 40821.0, "17455": 40800.0, "17460": 40043.0, "17465": 38739.0, "17470": 39281.0, "17475": 40028.0, "17480": 40067.0, "17485": 40195.0, "17490": 40044.0, "17495": 39977.0, "17500": 36668.0, "17505": 38584.0, "17510": 40902.0, "17515": 38523.0, "17520": 41585.0, "17525": 40817.0, "17530": 40732.0, "17535": 41495.0, "17540": 41509.0, "17545": 42262.0, "17550": 39340.0, "17555": 39297.0, "17560": 41517.0, "17565": 39413.0, "17570": 40849.0, "17575": 39361.0, "17580": 40737.0, "17585": 39345.0, "17590": 39343.0, "17595": 40045.0, "17600": 37262.0, "17605": 36548.0, "17610": 40143.0, "17615": 40103.0, "17620": 40800.0, "17625": 40798.0, "17630": 39348.0, "17635": 40746.0, "17640": 40143.0, "17645": 40894.0, "17650": 38649.0, "17655": 41592.0, "17660": 39431.0, "17665": 40041.0, "17670": 42274.0, "17675": 40905.0, "17680": 38068.0, "17685": 40794.0, "17690": 40738.0, "17695": 40819.0, "17700": 39383.0, "17705": 40814.0, "17710": 41584.0, "17715": 42278.0, "17720": 40741.0, "17725": 40820.0, "17730": 39971.0, "17735": 41498.0, "17740": 42277.0, "17745": 39292.0, "17750": 40745.0, "17755": 42353.0, "17760": 38646.0, "17765": 40035.0, "17770": 41499.0, "17775": 40732.0, "17780": 40744.0, "17785": 40750.0, "17790": 42260.0, "17795": 38563.0, "17800": 40106.0, "17805": 40742.0, "17810": 41498.0, "17815": 37768.0, "17820": 40814.0, "17825": 40130.0, "17830": 40080.0, "17835": 39977.0, "17840": 40111.0, "17845": 41507.0, "17850": 39282.0, "17855": 38548.0, "17860": 40103.0, "17865": 39403.0, "17870": 40047.0, "17875": 38521.0, "17880": 40740.0, "17885": 38675.0, "17890": 40791.0, "17895": 39292.0, "17900": 40061.0, "17905": 41509.0, "17910": 40737.0, "17915": 40003.0, "17920": 39255.0, "17925": 41500.0, "17930": 40048.0, "17935": 39971.0, "17940": 40043.0, "17945": 39963.0, "17950": 39992.0, "17955": 39306.0, "17960": 39971.0, "17965": 38605.0, "17970": 41496.0, "17975": 40178.0, "17980": 40798.0, "17985": 40036.0, "17990": 40046.0, "17995": 36514.0, "18000": 40069.0, "18005": 40050.0, "18010": 39217.0, "18015": 38648.0, "18020": 39348.0, "18025": 39330.0, "18030": 40806.0, "18035": 40113.0, "18040": 40210.0, "18045": 39361.0, "18050": 40728.0, "18055": 40043.0, "18060": 40747.0, "18065": 40060.0, "18070": 42270.0, "18075": 37211.0, "18080": 40838.0, "18085": 40124.0, "18090": 41498.0, "18095": 40046.0, "18100": 36494.0, "18105": 40730.0, "18110": 42265.0, "18115": 40739.0, "18120": 38658.0, "18125": 40809.0, "18130": 43028.0, "18135": 40736.0, "18140": 39347.0, "18145": 40033.0, "18150": 40730.0, "18155": 40054.0, "18160": 40830.0, "18165": 41565.0, "18170": 41498.0, "18175": 40745.0, "18180": 39222.0, "18185": 37972.0, "18190": 40734.0, "18195": 39452.0, "18200": 40805.0, "18205": 40808.0, "18210": 37881.0, "18215": 40874.0, "18220": 39361.0, "18225": 39504.0, "18230": 41510.0, "18235": 40043.0, "18240": 41507.0, "18245": 40737.0, "18250": 40136.0, "18255": 41502.0, "18260": 39223.0, "18265": 38556.0, "18270": 38677.0, "18275": 41576.0, "18280": 39287.0, "18285": 40814.0, "18290": 39988.0, "18295": 40893.0, "18300": 39366.0, "18305": 41584.0, "18310": 37803.0, "18315": 38578.0, "18320": 40728.0, "18325": 39366.0, "18330": 39279.0, "18335": 38516.0, "18340": 38441.0, "18345": 40739.0, "18350": 37806.0, "18355": 41505.0, "18360": 38682.0, "18365": 37175.0, "18370": 41572.0, "18375": 39444.0, "18380": 38582.0, "18385": 38450.0, "18390": 40726.0, "18395": 39299.0, "18400": 38801.0, "18405": 40732.0, "18410": 38504.0, "18415": 39359.0, "18420": 41491.0, "18425": 38878.0, "18430": 39264.0, "18435": 39213.0, "18440": 40050.0, "18445": 39464.0, "18450": 39359.0, "18455": 41556.0, "18460": 40038.0, "18465": 38516.0, "18470": 38530.0, "18475": 38569.0, "18480": 41580.0, "18485": 37736.0, "18490": 39278.0, "18495": 39364.0, "18500": 40889.0, "18505": 39360.0, "18510": 39288.0, "18515": 41509.0, "18520": 40816.0, "18525": 39978.0, "18530": 40212.0, "18535": 37383.0, "18540": 39437.0, "18545": 40060.0, "18550": 39982.0, "18555": 39265.0, "18560": 38679.0, "18565": 37321.0, "18570": 40211.0, "18575": 39266.0, "18580": 39373.0, "18585": 39321.0, "18590": 40129.0, "18595": 39978.0, "18600": 40810.0, "18605": 40883.0, "18610": 39222.0, "18615": 40135.0, "18620": 41496.0, "18625": 39287.0, "18630": 39282.0, "18635": 38657.0, "18640": 40062.0, "18645": 40812.0, "18650": 37884.0, "18655": 39347.0, "18660": 37837.0, "18665": 39270.0, "18670": 42261.0, "18675": 41510.0, "18680": 39448.0, "18685": 40066.0, "18690": 41513.0, "18695": 41584.0, "18700": 38507.0, "18705": 42348.0, "18710": 41578.0, "18715": 36543.0, "18720": 39269.0, "18725": 38577.0, "18730": 40744.0, "18735": 41498.0, "18740": 39214.0, "18745": 40746.0, "18750": 41507.0, "18755": 39211.0, "18760": 38576.0, "18765": 39348.0, "18770": 40816.0, "18775": 38664.0, "18780": 37985.0, "18785": 38578.0, "18790": 40048.0, "18795": 38649.0, "18800": 41501.0, "18805": 40808.0, "18810": 40133.0, "18815": 40739.0, "18820": 40804.0, "18825": 36614.0, "18830": 37757.0, "18835": 36003.0, "18840": 38594.0, "18845": 37758.0, "18850": 37232.0, "18855": 38515.0, "18860": 40049.0, "18865": 40053.0, "18870": 35769.0, "18875": 39975.0, "18880": 41508.0, "18885": 38718.0, "18890": 37994.0, "18895": 38498.0, "18900": 38658.0, "18905": 40733.0, "18910": 39222.0, "18915": 39447.0, "18920": 39965.0, "18925": 37960.0, "18930": 41502.0, "18935": 40127.0, "18940": 40759.0, "18945": 40055.0, "18950": 41503.0, "18955": 40732.0, "18960": 41583.0, "18965": 38756.0, "18970": 40817.0, "18975": 40808.0, "18980": 40745.0, "18985": 40114.0, "18990": 39532.0, "18995": 39429.0, "19000": 40048.0, "19005": 40886.0, "19010": 41570.0, "19015": 40726.0, "19020": 42343.0, "19025": 40745.0, "19030": 39339.0, "19035": 38527.0, "19040": 41562.0, "19045": 38655.0, "19050": 39419.0, "19055": 40816.0, "19060": 40802.0, "19065": 40026.0, "19070": 41585.0, "19075": 41505.0, "19080": 39276.0, "19085": 41569.0, "19090": 37767.0, "19095": 39206.0, "19100": 40887.0, "19105": 40027.0, "19110": 41499.0, "19115": 40106.0, "19120": 39264.0, "19125": 38039.0, "19130": 40812.0, "19135": 40735.0, "19140": 39984.0, "19145": 40030.0, "19150": 38654.0, "19155": 40734.0, "19160": 40827.0, "19165": 37975.0, "19170": 40117.0, "19175": 40731.0, "19180": 37145.0, "19185": 39968.0, "19190": 35911.0, "19195": 38499.0, "19200": 39434.0, "19205": 34589.0, "19210": 41506.0, "19215": 40139.0, "19220": 40740.0, "19225": 40143.0, "19230": 40035.0, "19235": 40048.0, "19240": 40052.0, "19245": 40815.0, "19250": 40021.0, "19255": 37915.0, "19260": 37918.0, "19265": 40051.0, "19270": 40828.0, "19275": 39209.0, "19280": 39316.0, "19285": 39985.0, "19290": 40748.0, "19295": 40821.0, "19300": 39280.0, "19305": 39427.0, "19310": 40875.0, "19315": 39370.0, "19320": 40067.0, "19325": 39986.0, "19330": 41502.0, "19335": 40058.0, "19340": 39352.0, "19345": 40802.0, "19350": 39358.0, "19355": 40043.0, "19360": 40195.0, "19365": 41582.0, "19370": 40736.0, "19375": 40047.0, "19380": 40042.0, "19385": 39964.0, "19390": 39288.0, "19395": 40805.0, "19400": 40811.0, "19405": 40897.0, "19410": 40044.0, "19415": 40876.0, "19420": 40828.0, "19425": 39290.0, "19430": 41593.0, "19435": 40735.0, "19440": 41508.0, "19445": 40052.0, "19450": 40130.0, "19455": 41509.0, "19460": 41499.0, "19465": 39342.0, "19470": 39439.0, "19475": 39359.0, "19480": 40057.0, "19485": 40220.0, "19490": 40037.0, "19495": 39985.0, "19500": 36612.0, "19505": 42264.0, "19510": 41599.0, "19515": 39384.0, "19520": 41497.0, "19525": 39955.0, "19530": 39303.0, "19535": 39988.0, "19540": 39291.0, "19545": 38684.0, "19550": 40039.0, "19555": 40027.0, "19560": 39276.0, "19565": 40131.0, "19570": 42343.0, "19575": 39343.0, "19580": 40820.0, "19585": 40038.0, "19590": 40124.0, "19595": 40210.0, "19600": 39268.0, "19605": 40055.0, "19610": 40742.0, "19615": 37838.0, "19620": 39973.0, "19625": 37839.0, "19630": 41585.0, "19635": 40093.0, "19640": 39341.0, "19645": 40071.0, "19650": 40743.0, "19655": 40182.0, "19660": 40035.0, "19665": 40739.0, "19670": 39965.0, "19675": 40051.0, "19680": 41501.0, "19685": 40037.0, "19690": 40841.0, "19695": 40816.0, "19700": 40730.0, "19705": 38648.0, "19710": 40043.0, "19715": 36591.0, "19720": 40050.0, "19725": 40033.0, "19730": 35969.0, "19735": 40046.0, "19740": 39268.0, "19745": 39195.0, "19750": 39372.0, "19755": 40060.0, "19760": 41589.0, "19765": 39339.0, "19770": 40037.0, "19775": 40786.0, "19780": 39966.0, "19785": 40752.0, "19790": 39214.0, "19795": 39276.0, "19800": 39977.0, "19805": 40809.0, "19810": 40732.0, "19815": 39548.0, "19820": 37363.0, "19825": 39288.0, "19830": 41496.0, "19835": 40821.0, "19840": 38437.0, "19845": 39987.0, "19850": 40056.0, "19855": 41560.0, "19860": 41505.0, "19865": 43038.0, "19870": 39362.0, "19875": 40112.0, "19880": 37191.0, "19885": 39230.0, "19890": 40821.0, "19895": 39208.0, "19900": 42282.0, "19905": 40869.0, "19910": 40037.0, "19915": 40140.0, "19920": 39213.0, "19925": 38824.0, "19930": 40868.0, "19935": 40059.0, "19940": 40751.0, "19945": 40805.0, "19950": 39210.0, "19955": 38652.0, "19960": 39279.0, "19965": 42285.0, "19970": 40884.0, "19975": 39960.0, "19980": 40030.0, "19985": 40034.0, "19990": 40806.0, "19995": 38769.0, "20000": 36460.0, "20005": 39424.0, "20010": 41498.0, "20015": 40808.0, "20020": 40819.0, "20025": 40043.0, "20030": 37226.0, "20035": 38586.0, "20040": 40737.0, "20045": 37903.0, "20050": 40883.0, "20055": 39343.0, "20060": 39965.0, "20065": 36638.0, "20070": 40736.0, "20075": 41572.0, "20080": 39288.0, "20085": 40752.0, "20090": 40740.0, "20095": 40797.0, "20100": 40754.0, "20105": 40049.0, "20110": 40050.0, "20115": 40045.0, "20120": 38589.0, "20125": 38749.0, "20130": 39288.0, "20135": 39347.0, "20140": 35267.0, "20145": 40737.0, "20150": 40734.0, "20155": 38678.0, "20160": 38586.0, "20165": 40817.0, "20170": 40041.0, "20175": 40733.0, "20180": 39380.0, "20185": 40826.0, "20190": 39301.0, "20195": 40733.0, "20200": 40043.0, "20205": 39350.0, "20210": 40049.0, "20215": 39292.0, "20220": 39296.0, "20225": 39978.0, "20230": 40119.0, "20235": 39288.0, "20240": 37363.0, "20245": 39366.0, "20250": 37977.0, "20255": 39457.0, "20260": 41516.0, "20265": 40732.0, "20270": 40210.0, "20275": 40800.0, "20280": 40879.0, "20285": 41496.0, "20290": 40820.0, "20295": 41593.0, "20300": 40813.0, "20305": 40909.0, "20310": 41505.0, "20315": 40048.0, "20320": 39980.0, "20325": 39293.0, "20330": 39291.0, "20335": 39256.0, "20340": 39298.0, "20345": 38598.0, "20350": 39425.0, "20355": 37961.0, "20360": 38744.0, "20365": 38076.0, "20370": 39357.0, "20375": 40737.0, "20380": 38772.0, "20385": 40795.0, "20390": 39340.0, "20395": 39274.0, "20400": 39275.0, "20405": 39517.0, "20410": 40116.0, "20415": 39364.0, "20420": 37468.0, "20425": 40118.0, "20430": 40736.0, "20435": 40030.0, "20440": 40813.0, "20445": 40044.0, "20450": 40066.0, "20455": 40808.0, "20460": 40808.0, "20465": 40739.0, "20470": 41566.0, "20475": 40133.0, "20480": 40920.0, "20485": 40117.0, "20490": 39418.0, "20495": 40738.0, "20500": 39384.0, "20505": 39971.0, "20510": 42272.0, "20515": 39274.0, "20520": 40808.0, "20525": 38582.0, "20530": 40046.0, "20535": 40039.0, "20540": 41577.0, "20545": 40076.0, "20550": 40118.0, "20555": 37958.0, "20560": 38732.0, "20565": 40132.0, "20570": 39461.0, "20575": 37299.0, "20580": 39424.0, "20585": 40028.0, "20590": 40747.0, "20595": 42277.0, "20600": 40818.0, "20605": 41571.0, "20610": 40044.0, "20615": 41507.0, "20620": 40038.0, "20625": 38748.0, "20630": 39511.0, "20635": 39262.0, "20640": 39326.0, "20645": 39362.0, "20650": 40121.0, "20655": 40990.0, "20660": 39356.0, "20665": 41588.0, "20670": 36413.0, "20675": 38024.0, "20680": 41579.0, "20685": 41592.0, "20690": 41564.0, "20695": 40144.0, "20700": 39332.0, "20705": 41582.0, "20710": 40115.0, "20715": 39282.0, "20720": 39966.0, "20725": 40827.0, "20730": 40811.0, "20735": 39279.0, "20740": 40812.0, "20745": 40821.0, "20750": 40037.0, "20755": 36974.0, "20760": 40050.0, "20765": 39383.0, "20770": 37039.0, "20775": 40807.0, "20780": 40119.0, "20785": 40746.0, "20790": 40909.0, "20795": 38677.0, "20800": 42271.0, "20805": 40732.0, "20810": 36595.0, "20815": 40740.0, "20820": 38597.0, "20825": 40742.0, "20830": 40813.0, "20835": 40063.0, "20840": 40042.0, "20845": 39970.0, "20850": 38653.0, "20855": 40054.0, "20860": 39414.0, "20865": 40804.0, "20870": 40824.0, "20875": 37965.0, "20880": 39270.0, "20885": 38452.0, "20890": 40818.0, "20895": 37821.0, "20900": 39283.0, "20905": 40201.0, "20910": 39433.0, "20915": 41498.0, "20920": 40045.0, "20925": 38502.0, "20930": 37913.0, "20935": 41497.0, "20940": 39958.0, "20945": 41497.0, "20950": 41587.0, "20955": 40134.0, "20960": 39969.0, "20965": 41505.0, "20970": 39291.0, "20975": 40188.0, "20980": 38632.0, "20985": 40821.0, "20990": 42276.0, "20995": 40814.0, "21000": 39976.0, "21005": 41580.0, "21010": 40062.0, "21015": 37175.0, "21020": 39270.0, "21025": 39286.0, "21030": 40824.0, "21035": 40743.0, "21040": 40739.0, "21045": 38517.0, "21050": 37353.0, "21055": 40046.0, "21060": 39436.0, "21065": 40737.0, "21070": 38597.0, "21075": 41522.0, "21080": 40041.0, "21085": 40881.0, "21090": 40885.0, "21095": 38741.0, "21100": 40809.0, "21105": 40737.0, "21110": 40815.0, "21115": 39419.0, "21120": 41502.0, "21125": 39426.0, "21130": 40047.0, "21135": 38092.0, "21140": 40750.0, "21145": 41507.0, "21150": 40046.0, "21155": 40129.0, "21160": 40114.0, "21165": 41562.0, "21170": 39977.0, "21175": 40746.0, "21180": 41574.0, "21185": 39339.0, "21190": 40066.0, "21195": 39298.0, "21200": 39980.0, "21205": 39975.0, "21210": 39283.0, "21215": 43032.0, "21220": 38659.0, "21225": 40746.0, "21230": 39262.0, "21235": 41514.0, "21240": 40114.0, "21245": 39363.0, "21250": 37202.0, "21255": 40807.0, "21260": 40122.0, "21265": 41615.0, "21270": 42268.0, "21275": 38612.0, "21280": 38453.0, "21285": 39347.0, "21290": 40871.0, "21295": 41511.0, "21300": 39280.0, "21305": 39959.0, "21310": 42271.0, "21315": 40818.0, "21320": 39358.0, "21325": 40797.0, "21330": 42280.0, "21335": 38036.0, "21340": 40048.0, "21345": 38723.0, "21350": 40102.0, "21355": 37067.0, "21360": 40746.0, "21365": 40821.0, "21370": 41580.0, "21375": 40811.0, "21380": 41579.0, "21385": 38525.0, "21390": 40807.0, "21395": 39342.0, "21400": 37835.0, "21405": 40740.0, "21410": 38523.0, "21415": 40034.0, "21420": 39343.0, "21425": 40064.0, "21430": 38731.0, "21435": 37741.0, "21440": 40072.0, "21445": 40882.0, "21450": 38485.0, "21455": 40038.0, "21460": 39265.0, "21465": 37987.0, "21470": 39986.0, "21475": 40151.0, "21480": 41567.0, "21485": 41506.0, "21490": 40110.0, "21495": 40054.0, "21500": 40728.0, "21505": 39436.0, "21510": 40058.0, "21515": 39303.0, "21520": 39985.0, "21525": 39977.0, "21530": 39963.0, "21535": 41503.0, "21540": 39979.0, "21545": 39370.0, "21550": 39220.0, "21555": 40049.0, "21560": 39431.0, "21565": 41505.0, "21570": 39301.0, "21575": 40724.0, "21580": 40743.0, "21585": 40060.0, "21590": 40757.0, "21595": 39973.0, "21600": 40117.0, "21605": 39266.0, "21610": 40890.0, "21615": 39962.0, "21620": 38600.0, "21625": 39214.0, "21630": 38737.0, "21635": 41502.0, "21640": 40728.0, "21645": 38666.0, "21650": 41522.0, "21655": 40046.0, "21660": 40792.0, "21665": 39200.0, "21670": 40887.0, "21675": 39349.0, "21680": 38022.0, "21685": 37280.0, "21690": 40135.0, "21695": 37754.0, "21700": 39985.0, "21705": 40060.0, "21710": 40110.0, "21715": 39985.0, "21720": 40881.0, "21725": 40814.0, "21730": 40883.0, "21735": 38842.0, "21740": 39460.0, "21745": 41650.0, "21750": 40791.0, "21755": 41583.0, "21760": 38733.0, "21765": 39348.0, "21770": 38655.0, "21775": 40740.0, "21780": 41509.0, "21785": 39530.0, "21790": 40041.0, "21795": 41507.0, "21800": 39208.0, "21805": 40881.0, "21810": 39376.0, "21815": 39420.0, "21820": 39288.0, "21825": 40071.0, "21830": 38751.0, "21835": 39293.0, "21840": 40051.0, "21845": 40837.0, "21850": 36303.0, "21855": 41581.0, "21860": 40738.0, "21865": 38651.0, "21870": 40801.0, "21875": 40738.0, "21880": 39195.0, "21885": 39223.0, "21890": 38758.0, "21895": 40123.0, "21900": 39453.0, "21905": 41503.0, "21910": 41507.0, "21915": 40823.0, "21920": 40788.0, "21925": 38664.0, "21930": 40747.0, "21935": 37901.0, "21940": 38743.0, "21945": 36585.0, "21950": 40731.0, "21955": 40803.0, "21960": 39433.0, "21965": 39971.0, "21970": 39976.0, "21975": 40736.0, "21980": 36706.0, "21985": 40823.0, "21990": 40823.0, "21995": 40040.0, "22000": 40808.0, "22005": 40146.0, "22010": 39427.0, "22015": 41504.0, "22020": 40801.0, "22025": 39981.0, "22030": 40113.0, "22035": 37902.0, "22040": 40193.0, "22045": 39355.0, "22050": 41511.0, "22055": 39360.0, "22060": 38587.0, "22065": 42282.0, "22070": 41502.0, "22075": 40132.0, "22080": 38789.0, "22085": 40029.0, "22090": 40109.0, "22095": 40117.0, "22100": 41512.0, "22105": 40046.0, "22110": 39438.0, "22115": 42279.0, "22120": 41503.0, "22125": 39200.0, "22130": 40877.0, "22135": 39979.0, "22140": 41485.0, "22145": 40049.0, "22150": 38545.0, "22155": 40915.0, "22160": 40788.0, "22165": 40797.0, "22170": 41495.0, "22175": 40038.0, "22180": 39362.0, "22185": 40130.0, "22190": 40737.0, "22195": 41500.0, "22200": 41587.0, "22205": 40134.0, "22210": 41504.0, "22215": 41583.0, "22220": 39213.0, "22225": 40120.0, "22230": 40056.0, "22235": 40161.0, "22240": 41501.0, "22245": 41501.0, "22250": 39978.0, "22255": 40814.0, "22260": 39379.0, "22265": 40045.0, "22270": 39416.0, "22275": 40126.0, "22280": 38835.0, "22285": 40038.0, "22290": 37198.0, "22295": 41571.0, "22300": 40815.0, "22305": 39513.0, "22310": 36598.0, "22315": 37830.0, "22320": 37206.0, "22325": 40737.0, "22330": 38437.0, "22335": 39964.0, "22340": 40813.0, "22345": 41508.0, "22350": 40804.0, "22355": 40064.0, "22360": 38694.0, "22365": 38583.0, "22370": 40131.0, "22375": 40063.0, "22380": 40804.0, "22385": 40120.0, "22390": 39291.0, "22395": 39332.0, "22400": 40814.0, "22405": 39276.0, "22410": 39974.0, "22415": 40814.0, "22420": 41514.0, "22425": 38520.0, "22430": 41561.0, "22435": 39982.0, "22440": 40035.0, "22445": 41516.0, "22450": 39285.0, "22455": 41507.0, "22460": 35831.0, "22465": 39287.0, "22470": 40065.0, "22475": 39438.0, "22480": 41578.0, "22485": 40123.0, "22490": 38730.0, "22495": 42359.0, "22500": 40108.0, "22505": 40212.0, "22510": 41513.0, "22515": 40793.0, "22520": 40803.0, "22525": 40106.0, "22530": 41593.0, "22535": 40807.0, "22540": 40734.0, "22545": 37746.0, "22550": 40808.0, "22555": 39271.0, "22560": 40054.0, "22565": 41505.0, "22570": 42259.0, "22575": 42266.0, "22580": 40738.0, "22585": 40837.0, "22590": 39293.0, "22595": 40035.0, "22600": 40739.0, "22605": 38509.0, "22610": 39355.0, "22615": 36623.0, "22620": 41571.0, "22625": 40734.0, "22630": 41496.0, "22635": 40220.0, "22640": 37895.0, "22645": 40814.0, "22650": 40055.0, "22655": 40040.0, "22660": 40820.0, "22665": 40833.0, "22670": 41575.0, "22675": 38720.0, "22680": 37821.0, "22685": 40054.0, "22690": 40734.0, "22695": 40216.0, "22700": 41499.0, "22705": 41512.0, "22710": 40735.0, "22715": 41584.0, "22720": 39202.0, "22725": 38582.0, "22730": 39281.0, "22735": 40806.0, "22740": 38737.0, "22745": 38669.0, "22750": 40886.0, "22755": 38515.0, "22760": 40052.0, "22765": 40117.0, "22770": 40045.0, "22775": 40739.0, "22780": 40899.0, "22785": 40041.0, "22790": 37815.0, "22795": 39514.0, "22800": 40042.0, "22805": 41509.0, "22810": 39437.0, "22815": 37196.0, "22820": 40102.0, "22825": 40040.0, "22830": 40872.0, "22835": 42266.0, "22840": 38665.0, "22845": 40044.0, "22850": 40184.0, "22855": 40805.0, "22860": 40861.0, "22865": 40746.0, "22870": 41513.0, "22875": 39966.0, "22880": 39504.0, "22885": 41507.0, "22890": 39217.0, "22895": 40818.0, "22900": 40809.0, "22905": 40822.0, "22910": 39355.0, "22915": 37263.0, "22920": 40047.0, "22925": 39346.0, "22930": 39438.0, "22935": 39284.0, "22940": 40808.0, "22945": 40028.0, "22950": 38603.0, "22955": 40896.0, "22960": 41509.0, "22965": 39353.0, "22970": 41495.0, "22975": 40733.0, "22980": 38740.0, "22985": 38653.0, "22990": 38731.0, "22995": 38604.0, "23000": 40130.0, "23005": 41493.0, "23010": 40130.0, "23015": 39967.0, "23020": 42270.0, "23025": 41498.0, "23030": 42272.0, "23035": 38746.0, "23040": 40135.0, "23045": 40801.0, "23050": 41583.0, "23055": 36284.0, "23060": 40033.0, "23065": 40045.0, "23070": 40052.0, "23075": 39295.0, "23080": 40041.0, "23085": 39979.0, "23090": 40739.0, "23095": 39205.0, "23100": 39293.0, "23105": 40734.0, "23110": 40195.0, "23115": 39961.0, "23120": 37764.0, "23125": 40803.0, "23130": 39971.0, "23135": 42273.0, "23140": 42270.0, "23145": 41563.0, "23150": 40807.0, "23155": 40886.0, "23160": 39972.0, "23165": 39371.0, "23170": 40818.0, "23175": 40810.0, "23180": 38596.0, "23185": 39978.0, "23190": 40815.0, "23195": 39274.0, "23200": 40119.0, "23205": 39969.0, "23210": 38807.0, "23215": 40049.0, "23220": 40888.0, "23225": 40801.0, "23230": 40887.0, "23235": 38681.0, "23240": 40045.0, "23245": 41509.0, "23250": 38679.0, "23255": 37204.0, "23260": 37978.0, "23265": 38658.0, "23270": 40116.0, "23275": 40188.0, "23280": 39971.0, "23285": 40801.0, "23290": 40752.0, "23295": 40820.0, "23300": 40818.0, "23305": 37763.0, "23310": 39443.0, "23315": 38705.0, "23320": 40039.0, "23325": 40053.0, "23330": 39214.0, "23335": 41511.0, "23340": 40036.0, "23345": 41507.0, "23350": 40890.0, "23355": 41498.0, "23360": 40143.0, "23365": 39966.0, "23370": 40061.0, "23375": 40063.0, "23380": 41493.0, "23385": 40741.0, "23390": 39978.0, "23395": 40055.0, "23400": 39214.0, "23405": 40203.0, "23410": 37275.0, "23415": 40809.0, "23420": 40218.0, "23425": 42280.0, "23430": 41505.0, "23435": 40130.0, "23440": 40051.0, "23445": 39284.0, "23450": 40735.0, "23455": 39276.0, "23460": 37924.0, "23465": 40740.0, "23470": 41571.0, "23475": 40044.0, "23480": 39505.0, "23485": 39344.0, "23490": 39487.0, "23495": 40739.0, "23500": 40122.0, "23505": 39974.0, "23510": 40048.0, "23515": 39295.0, "23520": 42270.0, "23525": 40734.0, "23530": 40077.0, "23535": 40884.0, "23540": 40817.0, "23545": 38009.0, "23550": 40732.0, "23555": 40725.0, "23560": 39360.0, "23565": 40135.0, "23570": 40073.0, "23575": 40037.0, "23580": 40062.0, "23585": 40042.0, "23590": 38588.0, "23595": 40119.0, "23600": 38777.0, "23605": 38597.0, "23610": 41523.0, "23615": 38644.0, "23620": 37900.0, "23625": 38470.0, "23630": 39468.0, "23635": 40737.0, "23640": 37761.0, "23645": 40120.0, "23650": 42270.0, "23655": 40040.0, "23660": 39261.0, "23665": 40811.0, "23670": 40053.0, "23675": 40038.0, "23680": 39362.0, "23685": 40810.0, "23690": 39369.0, "23695": 40739.0, "23700": 39360.0, "23705": 39280.0, "23710": 40041.0, "23715": 37236.0, "23720": 40740.0, "23725": 41503.0, "23730": 39436.0, "23735": 39292.0, "23740": 39323.0, "23745": 40043.0, "23750": 39975.0, "23755": 39385.0, "23760": 41512.0, "23765": 40727.0, "23770": 39978.0, "23775": 39957.0, "23780": 39430.0, "23785": 37762.0, "23790": 41588.0, "23795": 40113.0, "23800": 40049.0, "23805": 40746.0, "23810": 40206.0, "23815": 37995.0, "23820": 41579.0, "23825": 39280.0, "23830": 40131.0, "23835": 38641.0, "23840": 39972.0, "23845": 40752.0, "23850": 39283.0, "23855": 40118.0, "23860": 38596.0, "23865": 39361.0, "23870": 40046.0, "23875": 40047.0, "23880": 40818.0, "23885": 37287.0, "23890": 39286.0, "23895": 40834.0, "23900": 41567.0, "23905": 40198.0, "23910": 40059.0, "23915": 40113.0, "23920": 38679.0, "23925": 39294.0, "23930": 40742.0, "23935": 39509.0, "23940": 39269.0, "23945": 38584.0, "23950": 40043.0, "23955": 40062.0, "23960": 40898.0, "23965": 41496.0, "23970": 40896.0, "23975": 39276.0, "23980": 40892.0, "23985": 41565.0, "23990": 40895.0, "23995": 37992.0, "24000": 39215.0, "24005": 39355.0, "24010": 39339.0, "24015": 39263.0, "24020": 41579.0, "24025": 41502.0, "24030": 40738.0, "24035": 40052.0, "24040": 41511.0, "24045": 39449.0, "24050": 40817.0, "24055": 40736.0, "24060": 41523.0, "24065": 40833.0, "24070": 40815.0, "24075": 37836.0, "24080": 41493.0, "24085": 40732.0, "24090": 35990.0, "24095": 40802.0, "24100": 41506.0, "24105": 40056.0, "24110": 39416.0, "24115": 40813.0, "24120": 40809.0, "24125": 39351.0, "24130": 40055.0, "24135": 42269.0, "24140": 38617.0, "24145": 40818.0, "24150": 39285.0, "24155": 40740.0, "24160": 37835.0, "24165": 40032.0, "24170": 38747.0, "24175": 38678.0, "24180": 38676.0, "24185": 40742.0, "24190": 41583.0, "24195": 39275.0, "24200": 41555.0, "24205": 41582.0, "24210": 40740.0, "24215": 42271.0, "24220": 40129.0, "24225": 38507.0, "24230": 41507.0, "24235": 39282.0, "24240": 40731.0, "24245": 40817.0, "24250": 40066.0, "24255": 40137.0, "24260": 40740.0, "24265": 38447.0, "24270": 39285.0, "24275": 40751.0, "24280": 37362.0, "24285": 41508.0, "24290": 39354.0, "24295": 38701.0, "24300": 39432.0, "24305": 39276.0, "24310": 40808.0, "24315": 41508.0, "24320": 40744.0, "24325": 40750.0, "24330": 39327.0, "24335": 39279.0, "24340": 39287.0, "24345": 37831.0, "24350": 40097.0, "24355": 39346.0, "24360": 40742.0, "24365": 41563.0, "24370": 38027.0, "24375": 40890.0, "24380": 40816.0, "24385": 41508.0, "24390": 38690.0, "24395": 38529.0, "24400": 40795.0, "24405": 40882.0, "24410": 40114.0, "24415": 39439.0, "24420": 41500.0, "24425": 41510.0, "24430": 40112.0, "24435": 39306.0, "24440": 40749.0, "24445": 39278.0, "24450": 39336.0, "24455": 40812.0, "24460": 40824.0, "24465": 40122.0, "24470": 41497.0, "24475": 38784.0, "24480": 38667.0, "24485": 41517.0, "24490": 41498.0, "24495": 38682.0, "24500": 41580.0, "24505": 39328.0, "24510": 39971.0, "24515": 39278.0, "24520": 39975.0, "24525": 39349.0, "24530": 40023.0, "24535": 40822.0, "24540": 39366.0, "24545": 39204.0, "24550": 40021.0, "24555": 38453.0, "24560": 39349.0, "24565": 41566.0, "24570": 40034.0, "24575": 36725.0, "24580": 41596.0, "24585": 40054.0, "24590": 39278.0, "24595": 40048.0, "24600": 41507.0, "24605": 37754.0, "24610": 39444.0, "24615": 37100.0, "24620": 40189.0, "24625": 40049.0, "24630": 41512.0, "24635": 39480.0, "24640": 38530.0, "24645": 40804.0, "24650": 40109.0, "24655": 41508.0, "24660": 38580.0, "24665": 41507.0, "24670": 40745.0, "24675": 41577.0, "24680": 40040.0, "24685": 40742.0, "24690": 40877.0, "24695": 37958.0, "24700": 39980.0, "24705": 39350.0, "24710": 39987.0, "24715": 40730.0, "24720": 39209.0, "24725": 40156.0, "24730": 40804.0, "24735": 39268.0, "24740": 39264.0, "24745": 40738.0, "24750": 39214.0, "24755": 41523.0, "24760": 40746.0, "24765": 39354.0, "24770": 40817.0, "24775": 38659.0, "24780": 40879.0, "24785": 40113.0, "24790": 39968.0, "24795": 39360.0, "24800": 40062.0, "24805": 38741.0, "24810": 40810.0, "24815": 40115.0, "24820": 40045.0, "24825": 40046.0, "24830": 40803.0, "24835": 39368.0, "24840": 37073.0, "24845": 40178.0, "24850": 40049.0, "24855": 40068.0, "24860": 39331.0, "24865": 39364.0, "24870": 41499.0, "24875": 40100.0, "24880": 37209.0, "24885": 40729.0, "24890": 40831.0, "24895": 41570.0, "24900": 38650.0, "24905": 40740.0, "24910": 39982.0, "24915": 39978.0, "24920": 40742.0, "24925": 40041.0, "24930": 41501.0, "24935": 38048.0, "24940": 38570.0, "24945": 37981.0, "24950": 37267.0, "24955": 39473.0, "24960": 39507.0, "24965": 38588.0, "24970": 38570.0, "24975": 40752.0, "24980": 41505.0, "24985": 41505.0, "24990": 37842.0, "24995": 41503.0, "25000": 40877.0, "25005": 41508.0, "25010": 40054.0, "25015": 40050.0, "25020": 40820.0, "25025": 39330.0, "25030": 40912.0, "25035": 37109.0, "25040": 41636.0, "25045": 39346.0, "25050": 37963.0, "25055": 36678.0, "25060": 39380.0, "25065": 39280.0, "25070": 40138.0, "25075": 42279.0, "25080": 40828.0, "25085": 36539.0, "25090": 41595.0, "25095": 38060.0, "25100": 41507.0, "25105": 40808.0, "25110": 40787.0, "25115": 40731.0, "25120": 40809.0, "25125": 38588.0, "25130": 40062.0, "25135": 40735.0, "25140": 38653.0, "25145": 40805.0, "25150": 39289.0, "25155": 37772.0, "25160": 40109.0, "25165": 40743.0, "25170": 40741.0, "25175": 36569.0, "25180": 38692.0, "25185": 40191.0, "25190": 39966.0, "25195": 40104.0, "25200": 42271.0, "25205": 39976.0, "25210": 40875.0, "25215": 39423.0, "25220": 40892.0, "25225": 39449.0, "25230": 40880.0, "25235": 37907.0, "25240": 38605.0, "25245": 40810.0, "25250": 39369.0, "25255": 38654.0, "25260": 40787.0, "25265": 38668.0, "25270": 39969.0, "25275": 41509.0, "25280": 41596.0, "25285": 37762.0, "25290": 40744.0, "25295": 40727.0, "25300": 41508.0, "25305": 40811.0, "25310": 40828.0, "25315": 40813.0, "25320": 39984.0, "25325": 40740.0, "25330": 38646.0, "25335": 39366.0, "25340": 36463.0, "25345": 41499.0, "25350": 42346.0, "25355": 40740.0, "25360": 38752.0, "25365": 38780.0, "25370": 40742.0, "25375": 39290.0, "25380": 39981.0, "25385": 38672.0, "25390": 38611.0, "25395": 39360.0, "25400": 40873.0, "25405": 40036.0, "25410": 38008.0, "25415": 40059.0, "25420": 40122.0, "25425": 39335.0, "25430": 39279.0, "25435": 39421.0, "25440": 40111.0, "25445": 42267.0, "25450": 38697.0, "25455": 42271.0, "25460": 40807.0, "25465": 41590.0, "25470": 41518.0, "25475": 39211.0, "25480": 38432.0, "25485": 40126.0, "25490": 40740.0, "25495": 39420.0, "25500": 39457.0, "25505": 39439.0, "25510": 37154.0, "25515": 40050.0, "25520": 41507.0, "25525": 40037.0, "25530": 40043.0, "25535": 40825.0, "25540": 41592.0, "25545": 40821.0, "25550": 40044.0, "25555": 40139.0, "25560": 39982.0, "25565": 40050.0, "25570": 39965.0, "25575": 37210.0, "25580": 40739.0, "25585": 41510.0, "25590": 37909.0, "25595": 40729.0, "25600": 41503.0, "25605": 39266.0, "25610": 41502.0, "25615": 39273.0, "25620": 41669.0, "25625": 38763.0, "25630": 40818.0, "25635": 37336.0, "25640": 40138.0, "25645": 40902.0, "25650": 40793.0, "25655": 40732.0, "25660": 38522.0, "25665": 40812.0, "25670": 40737.0, "25675": 37879.0, "25680": 39354.0, "25685": 41573.0, "25690": 40731.0, "25695": 39371.0, "25700": 40054.0, "25705": 40804.0, "25710": 38522.0, "25715": 40193.0, "25720": 38748.0, "25725": 40814.0, "25730": 40839.0, "25735": 40118.0, "25740": 40052.0, "25745": 40046.0, "25750": 38690.0, "25755": 41590.0, "25760": 40966.0, "25765": 39420.0, "25770": 39268.0, "25775": 40212.0, "25780": 40808.0, "25785": 40728.0, "25790": 42270.0, "25795": 40040.0, "25800": 41509.0, "25805": 40735.0, "25810": 42285.0, "25815": 42275.0, "25820": 40035.0, "25825": 40051.0, "25830": 39979.0, "25835": 38730.0, "25840": 40738.0, "25845": 40053.0, "25850": 38623.0, "25855": 39278.0, "25860": 37007.0, "25865": 38592.0, "25870": 39969.0, "25875": 39348.0, "25880": 39360.0, "25885": 37890.0, "25890": 40052.0, "25895": 37900.0, "25900": 39280.0, "25905": 40748.0, "25910": 40068.0, "25915": 40111.0, "25920": 39376.0, "25925": 40825.0, "25930": 39267.0, "25935": 40031.0, "25940": 39349.0, "25945": 37886.0, "25950": 41572.0, "25955": 39356.0, "25960": 40795.0, "25965": 40816.0, "25970": 40823.0, "25975": 40044.0, "25980": 41494.0, "25985": 40090.0, "25990": 40729.0, "25995": 39368.0, "26000": 41508.0, "26005": 40054.0, "26010": 40814.0, "26015": 40738.0, "26020": 39198.0, "26025": 39291.0, "26030": 40046.0, "26035": 39289.0, "26040": 40733.0, "26045": 40736.0, "26050": 41519.0, "26055": 40821.0, "26060": 40890.0, "26065": 40803.0, "26070": 38761.0, "26075": 39280.0, "26080": 37313.0, "26085": 39964.0, "26090": 40047.0, "26095": 36374.0, "26100": 39499.0, "26105": 40800.0, "26110": 37859.0, "26115": 40023.0, "26120": 40799.0, "26125": 40043.0, "26130": 39278.0, "26135": 41585.0, "26140": 40044.0, "26145": 39358.0, "26150": 39967.0, "26155": 37836.0, "26160": 40055.0, "26165": 38723.0, "26170": 39346.0, "26175": 39977.0, "26180": 39453.0, "26185": 37138.0, "26190": 39324.0, "26195": 40833.0, "26200": 40188.0, "26205": 40879.0, "26210": 38569.0, "26215": 42272.0, "26220": 39451.0, "26225": 38600.0, "26230": 37820.0, "26235": 40050.0, "26240": 40040.0, "26245": 39969.0, "26250": 41507.0, "26255": 40137.0, "26260": 41571.0, "26265": 40049.0, "26270": 39287.0, "26275": 40124.0, "26280": 38596.0, "26285": 38578.0, "26290": 37139.0, "26295": 41500.0, "26300": 41559.0, "26305": 37136.0, "26310": 39205.0, "26315": 40052.0, "26320": 40734.0, "26325": 39372.0, "26330": 40898.0, "26335": 39352.0, "26340": 40029.0, "26345": 40728.0, "26350": 41669.0, "26355": 42260.0, "26360": 39204.0, "26365": 41508.0, "26370": 39352.0, "26375": 38753.0, "26380": 40047.0, "26385": 40052.0, "26390": 40760.0, "26395": 40064.0, "26400": 41576.0, "26405": 37931.0, "26410": 41503.0, "26415": 39967.0, "26420": 40795.0, "26425": 42269.0, "26430": 40062.0, "26435": 39983.0, "26440": 38038.0, "26445": 38658.0, "26450": 40805.0, "26455": 39974.0, "26460": 40890.0, "26465": 40802.0, "26470": 40752.0, "26475": 41505.0, "26480": 41501.0, "26485": 38594.0, "26490": 39972.0, "26495": 39278.0, "26500": 40030.0, "26505": 39972.0, "26510": 39362.0, "26515": 40212.0, "26520": 40053.0, "26525": 37360.0, "26530": 40737.0, "26535": 39971.0, "26540": 40726.0, "26545": 39362.0, "26550": 40825.0, "26555": 37896.0, "26560": 38725.0, "26565": 40057.0, "26570": 38658.0, "26575": 41584.0, "26580": 38663.0, "26585": 39968.0, "26590": 38458.0, "26595": 40053.0, "26600": 37206.0, "26605": 40825.0, "26610": 40056.0, "26615": 40111.0, "26620": 41506.0, "26625": 40727.0, "26630": 41596.0, "26635": 37984.0, "26640": 40733.0, "26645": 41576.0, "26650": 39278.0, "26655": 40890.0, "26660": 41591.0, "26665": 40008.0, "26670": 41493.0, "26675": 39425.0, "26680": 40052.0, "26685": 41518.0, "26690": 40033.0, "26695": 40036.0, "26700": 40053.0, "26705": 40815.0, "26710": 40802.0, "26715": 38800.0, "26720": 40833.0, "26725": 40757.0, "26730": 40054.0, "26735": 38539.0, "26740": 39522.0, "26745": 40067.0, "26750": 40735.0, "26755": 37976.0, "26760": 40117.0, "26765": 41501.0, "26770": 40811.0, "26775": 40045.0, "26780": 39443.0, "26785": 40875.0, "26790": 39271.0, "26795": 40731.0, "26800": 38666.0, "26805": 39331.0, "26810": 40897.0, "26815": 40731.0, "26820": 41580.0, "26825": 39965.0, "26830": 42275.0, "26835": 40037.0, "26840": 41579.0, "26845": 39961.0, "26850": 41503.0, "26855": 40896.0, "26860": 38600.0, "26865": 40816.0, "26870": 41592.0, "26875": 39214.0, "26880": 37341.0, "26885": 38644.0, "26890": 40122.0, "26895": 40752.0, "26900": 36352.0, "26905": 42259.0, "26910": 40063.0, "26915": 40117.0, "26920": 40035.0, "26925": 40050.0, "26930": 39355.0, "26935": 40059.0, "26940": 39342.0, "26945": 37912.0, "26950": 40055.0, "26955": 39450.0, "26960": 40049.0, "26965": 38824.0, "26970": 40035.0, "26975": 39967.0, "26980": 39358.0, "26985": 40048.0, "26990": 40833.0, "26995": 40735.0, "27000": 41507.0, "27005": 41585.0, "27010": 36496.0, "27015": 37130.0, "27020": 40814.0, "27025": 40823.0, "27030": 40048.0, "27035": 39361.0, "27040": 40892.0, "27045": 40809.0, "27050": 38656.0, "27055": 40140.0, "27060": 42274.0, "27065": 37967.0, "27070": 40810.0, "27075": 40812.0, "27080": 40812.0, "27085": 39966.0, "27090": 40032.0, "27095": 41567.0, "27100": 39209.0, "27105": 40044.0, "27110": 37899.0, "27115": 40050.0, "27120": 39280.0, "27125": 39373.0, "27130": 39379.0, "27135": 42277.0, "27140": 39982.0, "27145": 41572.0, "27150": 38605.0, "27155": 41501.0, "27160": 39296.0, "27165": 40128.0, "27170": 40888.0, "27175": 39973.0, "27180": 40126.0, "27185": 38032.0, "27190": 40726.0, "27195": 39980.0, "27200": 40733.0, "27205": 40809.0, "27210": 39954.0, "27215": 37467.0, "27220": 39270.0, "27225": 40052.0, "27230": 40805.0, "27235": 37189.0, "27240": 38544.0, "27245": 40023.0, "27250": 40051.0, "27255": 40748.0, "27260": 39212.0, "27265": 41516.0, "27270": 35226.0, "27275": 40061.0, "27280": 39335.0, "27285": 41502.0, "27290": 39280.0, "27295": 37834.0, "27300": 38585.0, "27305": 40822.0, "27310": 40811.0, "27315": 41558.0, "27320": 41494.0, "27325": 39975.0, "27330": 40131.0, "27335": 40740.0, "27340": 39272.0, "27345": 40909.0, "27350": 40758.0, "27355": 40905.0, "27360": 39960.0, "27365": 42270.0, "27370": 40737.0, "27375": 40825.0, "27380": 40741.0, "27385": 39344.0, "27390": 40818.0, "27395": 42271.0, "27400": 37871.0, "27405": 40881.0, "27410": 39496.0, "27415": 38525.0, "27420": 37307.0, "27425": 40121.0, "27430": 37882.0, "27435": 40033.0, "27440": 41588.0, "27445": 40117.0, "27450": 40046.0, "27455": 38521.0, "27460": 41494.0, "27465": 40747.0, "27470": 40810.0, "27475": 40051.0, "27480": 40049.0, "27485": 40047.0, "27490": 39491.0, "27495": 40725.0, "27500": 40829.0, "27505": 40109.0, "27510": 39375.0, "27515": 39975.0, "27520": 40818.0, "27525": 40742.0, "27530": 39357.0, "27535": 40736.0, "27540": 39287.0, "27545": 40886.0, "27550": 39381.0, "27555": 39271.0, "27560": 40730.0, "27565": 41505.0, "27570": 39374.0, "27575": 41498.0, "27580": 40740.0, "27585": 40117.0, "27590": 40729.0, "27595": 38579.0, "27600": 39279.0, "27605": 41609.0, "27610": 38052.0, "27615": 39972.0, "27620": 38500.0, "27625": 40095.0, "27630": 38629.0, "27635": 37748.0, "27640": 41580.0, "27645": 40119.0, "27650": 39974.0, "27655": 40056.0, "27660": 39222.0, "27665": 40100.0, "27670": 41505.0, "27675": 41584.0, "27680": 39368.0, "27685": 38664.0, "27690": 38674.0, "27695": 40062.0, "27700": 39975.0, "27705": 39455.0, "27710": 38724.0, "27715": 41590.0, "27720": 40816.0, "27725": 41499.0, "27730": 39270.0, "27735": 39446.0, "27740": 41587.0, "27745": 40047.0, "27750": 41570.0, "27755": 39290.0, "27760": 40738.0, "27765": 41583.0, "27770": 40804.0, "27775": 40044.0, "27780": 41575.0, "27785": 38722.0, "27790": 39299.0, "27795": 38582.0, "27800": 40822.0, "27805": 39437.0, "27810": 39989.0, "27815": 40049.0, "27820": 40042.0, "27825": 42261.0, "27830": 39208.0, "27835": 40129.0, "27840": 41506.0, "27845": 37289.0, "27850": 40809.0, "27855": 40736.0, "27860": 39200.0, "27865": 39968.0, "27870": 37955.0, "27875": 41580.0, "27880": 39331.0, "27885": 42266.0, "27890": 41510.0, "27895": 41507.0, "27900": 39361.0, "27905": 41584.0, "27910": 39367.0, "27915": 40809.0, "27920": 40001.0, "27925": 40125.0, "27930": 40098.0, "27935": 39955.0, "27940": 37772.0, "27945": 40803.0, "27950": 41574.0, "27955": 39501.0, "27960": 41514.0, "27965": 40063.0, "27970": 40800.0, "27975": 40038.0, "27980": 39410.0, "27985": 37993.0, "27990": 40748.0, "27995": 41509.0, "28000": 40735.0, "28005": 40105.0, "28010": 41586.0, "28015": 40117.0, "28020": 40731.0, "28025": 37932.0, "28030": 41499.0, "28035": 40741.0, "28040": 37961.0, "28045": 39360.0, "28050": 40795.0, "28055": 38674.0, "28060": 40833.0, "28065": 37037.0, "28070": 40112.0, "28075": 39428.0, "28080": 39384.0, "28085": 40733.0, "28090": 38436.0, "28095": 37705.0, "28100": 40741.0, "28105": 41504.0, "28110": 41592.0, "28115": 39369.0, "28120": 38620.0, "28125": 40129.0, "28130": 39963.0, "28135": 36464.0, "28140": 41506.0, "28145": 36505.0, "28150": 38613.0, "28155": 37203.0, "28160": 42264.0, "28165": 40058.0, "28170": 40837.0, "28175": 40126.0, "28180": 40818.0, "28185": 40728.0, "28190": 40731.0, "28195": 39431.0, "28200": 40806.0, "28205": 39988.0, "28210": 38674.0, "28215": 38667.0, "28220": 39501.0, "28225": 41507.0, "28230": 39352.0, "28235": 42269.0, "28240": 40734.0, "28245": 39221.0, "28250": 40048.0, "28255": 39291.0, "28260": 39986.0, "28265": 40752.0, "28270": 40265.0, "28275": 40809.0, "28280": 40736.0, "28285": 39351.0, "28290": 39290.0, "28295": 40740.0, "28300": 41496.0, "28305": 40048.0, "28310": 40741.0, "28315": 40060.0, "28320": 39967.0, "28325": 41505.0, "28330": 39977.0, "28335": 40045.0, "28340": 40822.0, "28345": 40740.0, "28350": 40034.0, "28355": 40837.0, "28360": 38055.0, "28365": 40054.0, "28370": 38648.0, "28375": 40811.0, "28380": 41581.0, "28385": 38866.0, "28390": 40812.0, "28395": 37881.0, "28400": 39366.0, "28405": 40734.0, "28410": 39964.0, "28415": 41591.0, "28420": 39271.0, "28425": 41569.0, "28430": 40820.0, "28435": 40764.0, "28440": 39351.0, "28445": 40218.0, "28450": 40134.0, "28455": 40156.0, "28460": 38606.0, "28465": 40919.0, "28470": 37133.0, "28475": 41509.0, "28480": 40749.0, "28485": 39966.0, "28490": 40824.0, "28495": 39358.0, "28500": 41500.0, "28505": 40733.0, "28510": 40125.0, "28515": 41657.0, "28520": 39381.0, "28525": 36642.0, "28530": 40733.0, "28535": 40799.0, "28540": 38591.0, "28545": 37872.0, "28550": 40812.0, "28555": 41566.0, "28560": 40798.0, "28565": 38596.0, "28570": 39367.0, "28575": 39359.0, "28580": 36508.0, "28585": 40804.0, "28590": 40833.0, "28595": 40825.0, "28600": 40133.0, "28605": 42275.0, "28610": 40133.0, "28615": 39964.0, "28620": 37195.0, "28625": 39292.0, "28630": 38599.0, "28635": 40796.0, "28640": 40046.0, "28645": 40904.0, "28650": 40115.0, "28655": 40792.0, "28660": 41509.0, "28665": 40124.0, "28670": 40800.0, "28675": 42271.0, "28680": 37846.0, "28685": 40790.0, "28690": 39358.0, "28695": 40813.0, "28700": 40796.0, "28705": 40124.0, "28710": 41571.0, "28715": 37954.0, "28720": 39295.0, "28725": 40877.0, "28730": 39196.0, "28735": 37888.0, "28740": 39967.0, "28745": 41571.0, "28750": 40128.0, "28755": 38643.0, "28760": 41507.0, "28765": 39511.0, "28770": 39204.0, "28775": 41504.0, "28780": 40739.0, "28785": 40817.0, "28790": 39351.0, "28795": 40042.0, "28800": 37966.0, "28805": 40136.0, "28810": 39271.0, "28815": 41509.0, "28820": 37250.0, "28825": 40062.0, "28830": 37894.0, "28835": 39291.0, "28840": 41511.0, "28845": 38665.0, "28850": 38497.0, "28855": 38736.0, "28860": 42265.0, "28865": 41571.0, "28870": 40826.0, "28875": 40808.0, "28880": 38562.0, "28885": 40043.0, "28890": 39975.0, "28895": 39289.0, "28900": 39264.0, "28905": 39978.0, "28910": 38766.0, "28915": 40038.0, "28920": 38658.0, "28925": 41500.0, "28930": 38725.0, "28935": 38465.0, "28940": 37248.0, "28945": 40750.0, "28950": 40050.0, "28955": 40141.0, "28960": 40045.0, "28965": 41495.0, "28970": 41511.0, "28975": 41510.0, "28980": 39348.0, "28985": 37328.0, "28990": 38609.0, "28995": 39281.0, "29000": 40037.0, "29005": 38688.0, "29010": 40824.0, "29015": 41578.0, "29020": 37941.0, "29025": 39282.0, "29030": 41503.0, "29035": 40748.0, "29040": 40834.0, "29045": 40798.0, "29050": 40740.0, "29055": 40797.0, "29060": 39978.0, "29065": 40124.0, "29070": 40130.0, "29075": 40124.0, "29080": 40815.0, "29085": 41506.0, "29090": 39367.0, "29095": 40114.0, "29100": 40118.0, "29105": 40122.0, "29110": 40050.0, "29115": 41514.0, "29120": 39281.0, "29125": 40893.0, "29130": 38641.0, "29135": 42279.0, "29140": 39269.0, "29145": 39451.0, "29150": 40805.0, "29155": 40885.0, "29160": 40751.0, "29165": 40043.0, "29170": 39190.0, "29175": 39970.0, "29180": 39203.0, "29185": 39429.0, "29190": 39965.0, "29195": 39424.0, "29200": 38742.0, "29205": 41501.0, "29210": 38662.0, "29215": 41595.0, "29220": 40195.0, "29225": 40798.0, "29230": 40812.0, "29235": 40831.0, "29240": 39388.0, "29245": 38513.0, "29250": 41514.0, "29255": 40742.0, "29260": 40045.0, "29265": 40818.0, "29270": 38736.0, "29275": 41500.0, "29280": 41497.0, "29285": 39984.0, "29290": 40826.0, "29295": 39970.0, "29300": 40739.0, "29305": 39275.0, "29310": 40125.0, "29315": 40043.0, "29320": 40052.0, "29325": 40829.0, "29330": 40113.0, "29335": 40036.0, "29340": 40799.0, "29345": 38118.0, "29350": 39976.0, "29355": 42273.0, "29360": 39970.0, "29365": 39488.0, "29370": 38432.0, "29375": 39285.0, "29380": 41581.0, "29385": 39285.0, "29390": 39280.0, "29395": 40127.0, "29400": 40800.0, "29405": 37784.0, "29410": 38673.0, "29415": 40820.0, "29420": 40819.0, "29425": 39421.0, "29430": 39363.0, "29435": 40801.0, "29440": 41501.0, "29445": 39521.0, "29450": 41500.0, "29455": 40063.0, "29460": 39464.0, "29465": 39371.0, "29470": 40741.0, "29475": 41497.0, "29480": 39964.0, "29485": 41509.0, "29490": 38639.0, "29495": 40741.0, "29500": 38653.0, "29505": 42263.0, "29510": 40047.0, "29515": 39452.0, "29520": 40043.0, "29525": 40804.0, "29530": 39984.0, "29535": 39338.0, "29540": 39351.0, "29545": 39214.0, "29550": 41597.0, "29555": 37731.0, "29560": 40108.0, "29565": 40734.0, "29570": 40903.0, "29575": 38534.0, "29580": 40816.0, "29585": 40214.0, "29590": 38739.0, "29595": 41515.0, "29600": 39347.0, "29605": 40828.0, "29610": 38559.0, "29615": 39358.0, "29620": 41576.0, "29625": 39443.0, "29630": 40819.0, "29635": 39384.0, "29640": 39365.0, "29645": 39271.0, "29650": 41566.0, "29655": 38740.0, "29660": 40755.0, "29665": 36455.0, "29670": 39982.0, "29675": 40027.0, "29680": 39961.0, "29685": 39283.0, "29690": 40816.0, "29695": 39285.0, "29700": 40818.0, "29705": 38560.0, "29710": 40824.0, "29715": 41579.0, "29720": 40830.0, "29725": 40054.0, "29730": 39201.0, "29735": 41506.0, "29740": 41496.0, "29745": 40972.0, "29750": 40059.0, "29755": 41503.0, "29760": 37903.0, "29765": 40812.0, "29770": 41500.0, "29775": 39982.0, "29780": 38432.0, "29785": 37915.0, "29790": 37980.0, "29795": 39274.0, "29800": 40097.0, "29805": 39365.0, "29810": 39417.0, "29815": 38454.0, "29820": 40123.0, "29825": 38733.0, "29830": 40070.0, "29835": 38465.0, "29840": 41493.0, "29845": 41508.0, "29850": 42272.0, "29855": 40022.0, "29860": 40041.0, "29865": 40744.0, "29870": 40810.0, "29875": 40836.0, "29880": 38514.0, "29885": 37733.0, "29890": 40734.0, "29895": 38807.0, "29900": 39983.0, "29905": 40813.0, "29910": 42275.0, "29915": 40096.0, "29920": 40125.0, "29925": 40039.0, "29930": 39281.0, "29935": 41581.0, "29940": 39353.0, "29945": 40032.0, "29950": 41501.0, "29955": 40104.0, "29960": 38582.0, "29965": 40735.0, "29970": 39360.0, "29975": 37889.0, "29980": 40739.0, "29985": 38108.0, "29990": 40749.0, "29995": 40042.0, "30000": 41500.0, "30005": 39355.0, "30010": 40054.0, "30015": 40139.0, "30020": 40051.0, "30025": 39973.0, "30030": 40887.0, "30035": 41592.0, "30040": 39274.0, "30045": 39310.0, "30050": 40041.0, "30055": 40229.0, "30060": 40058.0, "30065": 39293.0, "30070": 40030.0, "30075": 34451.0, "30080": 37884.0, "30085": 39369.0, "30090": 40824.0, "30095": 39293.0, "30100": 40042.0, "30105": 41510.0, "30110": 40885.0, "30115": 37295.0, "30120": 40814.0, "30125": 39366.0, "30130": 40054.0, "30135": 40044.0, "30140": 40808.0, "30145": 39343.0, "30150": 37967.0, "30155": 40741.0, "30160": 41518.0, "30165": 38737.0, "30170": 40791.0, "30175": 40747.0, "30180": 39209.0, "30185": 38802.0, "30190": 39358.0, "30195": 40732.0, "30200": 39978.0, "30205": 39359.0, "30210": 40053.0, "30215": 39429.0, "30220": 41500.0, "30225": 40049.0, "30230": 39347.0, "30235": 40819.0, "30240": 40039.0, "30245": 40125.0, "30250": 38532.0, "30255": 38753.0, "30260": 42262.0, "30265": 38134.0, "30270": 40812.0, "30275": 40032.0, "30280": 40111.0, "30285": 39368.0, "30290": 40809.0, "30295": 40064.0, "30300": 41569.0, "30305": 39976.0, "30310": 40730.0, "30315": 39281.0, "30320": 39301.0, "30325": 40735.0, "30330": 39346.0, "30335": 40844.0, "30340": 39974.0, "30345": 38642.0, "30350": 40064.0, "30355": 35671.0, "30360": 41590.0, "30365": 41508.0, "30370": 38808.0, "30375": 38665.0, "30380": 38598.0, "30385": 40040.0, "30390": 40128.0, "30395": 39375.0, "30400": 39262.0, "30405": 42272.0, "30410": 40752.0, "30415": 39376.0, "30420": 40062.0, "30425": 40065.0, "30430": 40802.0, "30435": 41577.0, "30440": 40810.0, "30445": 40817.0, "30450": 39351.0, "30455": 40042.0, "30460": 39370.0, "30465": 38747.0, "30470": 40048.0, "30475": 40740.0, "30480": 40198.0, "30485": 40095.0, "30490": 41589.0, "30495": 40744.0, "30500": 40202.0, "30505": 39361.0, "30510": 40790.0, "30515": 39279.0, "30520": 39436.0, "30525": 38672.0, "30530": 39968.0, "30535": 40036.0, "30540": 40063.0, "30545": 38578.0, "30550": 40754.0, "30555": 41670.0, "30560": 37069.0, "30565": 40814.0, "30570": 38035.0, "30575": 41518.0, "30580": 40063.0, "30585": 40045.0, "30590": 40813.0, "30595": 40134.0, "30600": 40146.0, "30605": 40815.0, "30610": 40189.0, "30615": 39283.0, "30620": 39975.0, "30625": 40735.0, "30630": 40124.0, "30635": 38531.0, "30640": 40038.0, "30645": 37976.0, "30650": 40141.0, "30655": 40039.0, "30660": 41499.0, "30665": 40792.0, "30670": 40057.0, "30675": 40879.0, "30680": 40114.0, "30685": 40042.0, "30690": 40122.0, "30695": 40731.0, "30700": 40070.0, "30705": 41518.0, "30710": 39972.0, "30715": 40823.0, "30720": 40893.0, "30725": 38685.0, "30730": 40123.0, "30735": 39422.0, "30740": 38549.0, "30745": 40061.0, "30750": 40741.0, "30755": 40746.0, "30760": 39208.0, "30765": 39434.0, "30770": 40062.0, "30775": 40054.0, "30780": 38591.0, "30785": 41497.0, "30790": 40794.0, "30795": 40734.0, "30800": 37412.0, "30805": 39271.0, "30810": 39971.0, "30815": 40745.0, "30820": 38604.0, "30825": 41503.0, "30830": 39199.0, "30835": 40060.0, "30840": 39974.0, "30845": 40035.0, "30850": 40811.0, "30855": 39280.0, "30860": 40140.0, "30865": 37922.0, "30870": 40052.0, "30875": 41500.0, "30880": 39376.0, "30885": 41502.0, "30890": 40826.0, "30895": 39345.0, "30900": 41515.0, "30905": 40969.0, "30910": 40745.0, "30915": 40730.0, "30920": 42267.0, "30925": 37829.0, "30930": 40057.0, "30935": 40742.0, "30940": 38537.0, "30945": 38003.0, "30950": 41500.0, "30955": 39428.0, "30960": 38511.0, "30965": 38056.0, "30970": 37833.0, "30975": 40099.0, "30980": 40812.0, "30985": 40889.0, "30990": 37140.0, "30995": 38815.0, "31000": 41574.0, "31005": 40795.0, "31010": 38577.0, "31015": 43031.0, "31020": 39270.0, "31025": 40790.0, "31030": 39289.0, "31035": 40820.0, "31040": 39367.0, "31045": 37754.0, "31050": 42268.0, "31055": 40744.0, "31060": 40138.0, "31065": 37945.0, "31070": 39497.0, "31075": 40817.0, "31080": 41579.0, "31085": 40742.0, "31090": 40055.0, "31095": 41504.0, "31100": 40217.0, "31105": 42352.0, "31110": 39427.0, "31115": 41577.0, "31120": 40033.0, "31125": 40740.0, "31130": 39981.0, "31135": 38730.0, "31140": 40062.0, "31145": 41513.0, "31150": 38600.0, "31155": 40829.0, "31160": 39963.0, "31165": 41581.0, "31170": 40126.0, "31175": 40892.0, "31180": 40801.0, "31185": 39437.0, "31190": 39983.0, "31195": 41518.0, "31200": 40813.0, "31205": 38013.0, "31210": 38663.0, "31215": 42340.0, "31220": 40053.0, "31225": 41513.0, "31230": 39289.0, "31235": 40125.0, "31240": 42270.0, "31245": 40040.0, "31250": 39379.0, "31255": 40746.0, "31260": 40736.0, "31265": 40031.0, "31270": 39282.0, "31275": 39982.0, "31280": 40040.0, "31285": 38042.0, "31290": 40805.0, "31295": 37922.0, "31300": 41579.0, "31305": 40135.0, "31310": 41691.0, "31315": 38605.0, "31320": 36949.0, "31325": 39296.0, "31330": 40022.0, "31335": 40738.0, "31340": 40811.0, "31345": 40032.0, "31350": 40116.0, "31355": 40797.0, "31360": 38543.0, "31365": 40050.0, "31370": 40140.0, "31375": 40048.0, "31380": 40060.0, "31385": 38504.0, "31390": 39974.0, "31395": 40192.0, "31400": 39513.0, "31405": 41506.0, "31410": 40049.0, "31415": 40112.0, "31420": 40025.0, "31425": 40737.0, "31430": 40045.0, "31435": 40796.0, "31440": 39975.0, "31445": 40778.0, "31450": 38609.0, "31455": 40041.0, "31460": 39434.0, "31465": 37911.0, "31470": 40158.0, "31475": 38607.0, "31480": 41597.0, "31485": 39364.0, "31490": 40816.0, "31495": 38509.0, "31500": 37988.0, "31505": 39996.0, "31510": 39427.0, "31515": 37844.0, "31520": 37999.0, "31525": 35148.0, "31530": 39296.0, "31535": 41505.0, "31540": 42337.0, "31545": 40811.0, "31550": 40738.0, "31555": 38525.0, "31560": 42268.0, "31565": 40049.0, "31570": 40800.0, "31575": 41581.0, "31580": 40038.0, "31585": 42270.0, "31590": 39366.0, "31595": 41581.0, "31600": 38671.0, "31605": 36514.0, "31610": 40744.0, "31615": 37911.0, "31620": 40745.0, "31625": 38434.0, "31630": 38560.0, "31635": 39440.0, "31640": 40048.0, "31645": 39371.0, "31650": 40805.0, "31655": 40054.0, "31660": 39972.0, "31665": 40737.0, "31670": 38586.0, "31675": 40726.0, "31680": 37845.0, "31685": 40803.0, "31690": 40809.0, "31695": 37978.0, "31700": 37911.0, "31705": 39970.0, "31710": 40740.0, "31715": 40805.0, "31720": 39388.0, "31725": 39344.0, "31730": 39362.0, "31735": 40748.0, "31740": 37760.0, "31745": 40126.0, "31750": 40120.0, "31755": 39337.0, "31760": 39217.0, "31765": 40813.0, "31770": 41575.0, "31775": 38588.0, "31780": 41588.0, "31785": 40828.0, "31790": 38657.0, "31795": 40816.0, "31800": 40046.0, "31805": 40087.0, "31810": 39281.0, "31815": 37925.0, "31820": 39991.0, "31825": 40748.0, "31830": 40046.0, "31835": 40070.0, "31840": 40176.0, "31845": 40073.0, "31850": 40815.0, "31855": 40824.0, "31860": 39993.0, "31865": 40818.0, "31870": 40121.0, "31875": 38477.0, "31880": 40131.0, "31885": 40040.0, "31890": 39983.0, "31895": 40215.0, "31900": 38592.0, "31905": 39341.0, "31910": 39266.0, "31915": 41505.0, "31920": 38665.0, "31925": 39284.0, "31930": 35795.0, "31935": 39424.0, "31940": 39988.0, "31945": 40732.0, "31950": 40798.0, "31955": 36531.0, "31960": 39273.0, "31965": 40077.0, "31970": 39984.0, "31975": 40807.0, "31980": 40057.0, "31985": 39369.0, "31990": 40996.0, "31995": 39440.0, "32000": 40896.0, "32005": 38607.0, "32010": 40796.0, "32015": 39980.0, "32020": 40883.0, "32025": 39365.0, "32030": 40814.0, "32035": 37723.0, "32040": 40114.0, "32045": 38669.0, "32050": 40826.0, "32055": 39283.0, "32060": 39967.0, "32065": 40171.0, "32070": 39355.0, "32075": 39283.0, "32080": 40797.0, "32085": 40119.0, "32090": 39970.0, "32095": 41510.0, "32100": 42270.0, "32105": 40817.0, "32110": 40047.0, "32115": 40825.0, "32120": 40039.0, "32125": 40741.0, "32130": 40736.0, "32135": 40813.0, "32140": 39363.0, "32145": 40055.0, "32150": 40742.0, "32155": 38577.0, "32160": 39952.0, "32165": 40824.0, "32170": 40094.0, "32175": 37224.0, "32180": 38586.0, "32185": 41503.0, "32190": 40047.0, "32195": 37980.0, "32200": 38574.0, "32205": 40201.0, "32210": 40048.0, "32215": 40750.0, "32220": 41595.0, "32225": 40814.0, "32230": 42280.0, "32235": 41590.0, "32240": 40167.0, "32245": 40136.0, "32250": 40898.0, "32255": 40803.0, "32260": 38677.0, "32265": 40819.0, "32270": 36682.0, "32275": 40812.0, "32280": 40126.0, "32285": 39965.0, "32290": 37175.0, "32295": 40135.0, "32300": 40123.0, "32305": 40740.0, "32310": 39262.0, "32315": 41552.0, "32320": 41494.0, "32325": 37237.0, "32330": 37831.0, "32335": 39963.0, "32340": 40755.0, "32345": 38775.0, "32350": 40821.0, "32355": 40047.0, "32360": 42272.0, "32365": 40038.0, "32370": 39317.0, "32375": 42265.0, "32380": 41496.0, "32385": 37829.0, "32390": 38593.0, "32395": 40867.0, "32400": 40816.0, "32405": 40055.0, "32410": 39381.0, "32415": 38504.0, "32420": 40036.0, "32425": 38662.0, "32430": 40101.0, "32435": 36688.0, "32440": 40057.0, "32445": 40900.0, "32450": 40126.0, "32455": 40813.0, "32460": 41576.0, "32465": 39979.0, "32470": 39191.0, "32475": 40842.0, "32480": 39979.0, "32485": 40822.0, "32490": 40116.0, "32495": 38513.0, "32500": 39354.0, "32505": 40737.0, "32510": 40805.0, "32515": 40119.0, "32520": 38667.0, "32525": 40726.0, "32530": 40142.0, "32535": 40829.0, "32540": 40052.0, "32545": 40888.0, "32550": 39349.0, "32555": 41504.0, "32560": 40808.0, "32565": 38540.0, "32570": 39362.0, "32575": 40120.0, "32580": 39434.0, "32585": 37151.0, "32590": 41588.0, "32595": 39971.0, "32600": 41493.0, "32605": 40820.0, "32610": 38689.0, "32615": 40815.0, "32620": 40057.0, "32625": 40905.0, "32630": 35987.0, "32635": 38586.0, "32640": 40047.0, "32645": 40029.0, "32650": 40137.0, "32655": 40814.0, "32660": 40896.0, "32665": 40046.0, "32670": 38055.0, "32675": 41571.0, "32680": 39345.0, "32685": 40744.0, "32690": 40194.0, "32695": 39289.0, "32700": 41510.0, "32705": 37204.0, "32710": 37833.0, "32715": 40752.0, "32720": 40047.0, "32725": 40896.0, "32730": 41540.0, "32735": 40875.0, "32740": 39204.0, "32745": 40094.0, "32750": 40794.0, "32755": 39353.0, "32760": 38637.0, "32765": 42268.0, "32770": 38022.0, "32775": 39276.0, "32780": 40881.0, "32785": 39460.0, "32790": 42274.0, "32795": 40745.0, "32800": 39365.0, "32805": 36431.0, "32810": 40735.0, "32815": 41576.0, "32820": 40813.0, "32825": 40811.0, "32830": 39355.0, "32835": 40882.0, "32840": 38720.0, "32845": 40809.0, "32850": 39361.0, "32855": 40740.0, "32860": 39268.0, "32865": 41517.0, "32870": 39342.0, "32875": 40111.0, "32880": 37973.0, "32885": 40045.0, "32890": 41585.0, "32895": 39419.0, "32900": 40064.0, "32905": 37842.0, "32910": 37972.0, "32915": 38658.0, "32920": 41584.0, "32925": 40824.0, "32930": 38584.0, "32935": 40062.0, "32940": 40813.0, "32945": 40816.0, "32950": 40814.0, "32955": 38061.0, "32960": 40745.0, "32965": 37829.0, "32970": 40828.0, "32975": 40205.0, "32980": 40752.0, "32985": 39368.0, "32990": 40178.0, "32995": 40745.0, "33000": 38458.0, "33005": 39477.0, "33010": 37964.0, "33015": 39362.0, "33020": 39282.0, "33025": 40810.0, "33030": 38644.0, "33035": 41506.0, "33040": 40882.0, "33045": 43031.0, "33050": 38614.0, "33055": 37978.0, "33060": 40040.0, "33065": 38669.0, "33070": 40203.0, "33075": 39212.0, "33080": 38660.0, "33085": 38531.0, "33090": 37317.0, "33095": 40107.0, "33100": 38675.0, "33105": 40076.0, "33110": 40108.0, "33115": 39207.0, "33120": 40129.0, "33125": 39985.0, "33130": 41499.0, "33135": 39283.0, "33140": 40032.0, "33145": 41519.0, "33150": 40183.0, "33155": 40055.0, "33160": 38650.0, "33165": 41515.0, "33170": 39203.0, "33175": 40813.0, "33180": 41500.0, "33185": 40808.0, "33190": 41582.0, "33195": 37922.0, "33200": 41580.0, "33205": 41506.0, "33210": 40753.0, "33215": 40808.0, "33220": 40146.0, "33225": 41507.0, "33230": 39354.0, "33235": 38760.0, "33240": 40747.0, "33245": 41500.0, "33250": 42276.0, "33255": 40828.0, "33260": 42260.0, "33265": 40131.0, "33270": 40060.0, "33275": 36616.0, "33280": 40733.0, "33285": 39444.0, "33290": 37286.0, "33295": 39311.0, "33300": 40810.0, "33305": 39976.0, "33310": 39267.0, "33315": 41501.0, "33320": 39975.0, "33325": 40743.0, "33330": 40050.0, "33335": 39968.0, "33340": 38518.0, "33345": 40049.0, "33350": 37297.0, "33355": 39522.0, "33360": 39274.0, "33365": 40953.0, "33370": 40821.0, "33375": 37370.0, "33380": 38683.0, "33385": 40732.0, "33390": 40745.0, "33395": 38662.0, "33400": 37851.0, "33405": 40106.0, "33410": 41502.0, "33415": 41503.0, "33420": 38495.0, "33425": 40751.0, "33430": 39349.0, "33435": 38527.0, "33440": 41510.0, "33445": 40803.0, "33450": 37337.0, "33455": 40806.0, "33460": 40809.0, "33465": 41650.0, "33470": 39456.0, "33475": 40810.0, "33480": 38662.0, "33485": 40887.0, "33490": 39281.0, "33495": 40813.0, "33500": 40835.0, "33505": 40054.0, "33510": 37761.0, "33515": 37987.0, "33520": 40740.0, "33525": 39207.0, "33530": 40807.0, "33535": 40898.0, "33540": 40842.0, "33545": 42356.0, "33550": 40889.0, "33555": 40821.0, "33560": 39370.0, "33565": 40040.0, "33570": 40219.0, "33575": 39417.0, "33580": 40826.0, "33585": 41534.0, "33590": 40119.0, "33595": 40062.0, "33600": 38521.0, "33605": 41503.0, "33610": 40737.0, "33615": 37949.0, "33620": 40813.0, "33625": 40056.0, "33630": 39969.0, "33635": 41498.0, "33640": 40736.0, "33645": 41501.0, "33650": 38680.0, "33655": 36775.0, "33660": 39975.0, "33665": 40130.0, "33670": 40743.0, "33675": 37974.0, "33680": 38672.0, "33685": 40805.0, "33690": 41503.0, "33695": 39973.0, "33700": 38593.0, "33705": 40057.0, "33710": 38556.0, "33715": 40077.0, "33720": 39340.0, "33725": 38678.0, "33730": 41511.0, "33735": 40752.0, "33740": 40813.0, "33745": 40750.0, "33750": 40798.0, "33755": 40144.0, "33760": 39345.0, "33765": 38446.0, "33770": 40740.0, "33775": 39425.0, "33780": 39469.0, "33785": 37892.0, "33790": 37724.0, "33795": 41512.0, "33800": 39278.0, "33805": 40054.0, "33810": 39291.0, "33815": 42275.0, "33820": 40116.0, "33825": 40754.0, "33830": 40211.0, "33835": 38531.0, "33840": 41585.0, "33845": 39367.0, "33850": 41500.0, "33855": 37960.0, "33860": 39467.0, "33865": 38060.0, "33870": 40741.0, "33875": 43036.0, "33880": 39427.0, "33885": 40030.0, "33890": 40048.0, "33895": 39982.0, "33900": 36997.0, "33905": 40750.0, "33910": 39960.0, "33915": 42277.0, "33920": 40054.0, "33925": 37833.0, "33930": 39441.0, "33935": 39435.0, "33940": 40043.0, "33945": 40740.0, "33950": 39444.0, "33955": 40813.0, "33960": 40823.0, "33965": 41510.0, "33970": 40753.0, "33975": 40813.0, "33980": 40125.0, "33985": 40810.0, "33990": 40744.0, "33995": 41575.0, "34000": 40736.0, "34005": 39977.0, "34010": 40130.0, "34015": 38646.0, "34020": 40813.0, "34025": 41590.0, "34030": 39354.0, "34035": 40039.0, "34040": 37994.0, "34045": 40827.0, "34050": 40054.0, "34055": 39279.0, "34060": 37152.0, "34065": 39232.0, "34070": 41500.0, "34075": 41586.0, "34080": 40130.0, "34085": 38489.0, "34090": 40111.0, "34095": 38519.0, "34100": 41506.0, "34105": 38576.0, "34110": 40820.0, "34115": 39363.0, "34120": 41498.0, "34125": 40117.0, "34130": 39973.0, "34135": 39283.0, "34140": 38581.0, "34145": 42272.0, "34150": 38524.0, "34155": 40734.0, "34160": 39975.0, "34165": 40802.0, "34170": 41504.0, "34175": 37183.0, "34180": 40126.0, "34185": 39337.0, "34190": 39297.0, "34195": 40745.0, "34200": 38729.0, "34205": 40054.0, "34210": 40741.0, "34215": 38598.0, "34220": 39364.0, "34225": 40734.0, "34230": 38741.0, "34235": 39972.0, "34240": 39277.0, "34245": 40036.0, "34250": 38613.0, "34255": 39362.0, "34260": 40042.0, "34265": 40042.0, "34270": 39448.0, "34275": 40796.0, "34280": 40740.0, "34285": 39982.0, "34290": 43037.0, "34295": 38442.0, "34300": 37888.0, "34305": 40735.0, "34310": 39371.0, "34315": 39351.0, "34320": 39971.0, "34325": 41508.0, "34330": 39976.0, "34335": 40125.0, "34340": 42274.0, "34345": 40205.0, "34350": 39351.0, "34355": 40818.0, "34360": 40050.0, "34365": 39289.0, "34370": 39341.0, "34375": 42278.0, "34380": 37880.0, "34385": 40795.0, "34390": 39433.0, "34395": 39204.0, "34400": 39193.0, "34405": 40822.0, "34410": 41501.0, "34415": 40805.0, "34420": 40049.0, "34425": 38537.0, "34430": 41511.0, "34435": 39964.0, "34440": 40128.0, "34445": 39375.0, "34450": 41507.0, "34455": 40734.0, "34460": 41583.0, "34465": 41569.0, "34470": 38047.0, "34475": 40131.0, "34480": 40813.0, "34485": 38640.0, "34490": 40054.0, "34495": 40730.0, "34500": 40967.0, "34505": 38515.0, "34510": 40836.0, "34515": 40054.0, "34520": 39977.0, "34525": 40035.0, "34530": 37844.0, "34535": 40737.0, "34540": 40800.0, "34545": 40810.0, "34550": 41571.0, "34555": 37166.0, "34560": 41528.0, "34565": 39434.0, "34570": 40742.0, "34575": 38778.0, "34580": 40744.0, "34585": 37983.0, "34590": 39464.0, "34595": 40757.0, "34600": 40881.0, "34605": 40798.0, "34610": 37247.0, "34615": 39283.0, "34620": 39965.0, "34625": 42268.0, "34630": 39431.0, "34635": 40889.0, "34640": 39312.0, "34645": 40050.0, "34650": 41509.0, "34655": 39362.0, "34660": 39525.0, "34665": 41567.0, "34670": 40746.0, "34675": 39351.0, "34680": 39275.0, "34685": 39450.0, "34690": 41563.0, "34695": 39291.0, "34700": 39337.0, "34705": 39369.0, "34710": 42348.0, "34715": 39424.0, "34720": 40823.0, "34725": 39214.0, "34730": 36461.0, "34735": 38718.0, "34740": 40138.0, "34745": 40730.0, "34750": 40101.0, "34755": 40820.0, "34760": 40132.0, "34765": 40812.0, "34770": 38834.0, "34775": 39385.0, "34780": 39305.0, "34785": 40047.0, "34790": 40793.0, "34795": 39341.0, "34800": 40124.0, "34805": 39428.0, "34810": 40064.0, "34815": 42256.0, "34820": 41495.0, "34825": 40125.0, "34830": 39286.0, "34835": 39399.0, "34840": 40103.0, "34845": 41573.0, "34850": 39969.0, "34855": 38539.0, "34860": 37975.0, "34865": 39280.0, "34870": 40732.0, "34875": 37692.0, "34880": 41495.0, "34885": 42357.0, "34890": 41511.0, "34895": 38622.0, "34900": 40740.0, "34905": 41494.0, "34910": 40112.0, "34915": 41509.0, "34920": 39267.0, "34925": 39365.0, "34930": 39221.0, "34935": 39960.0, "34940": 39979.0, "34945": 40891.0, "34950": 40739.0, "34955": 39420.0, "34960": 40838.0, "34965": 40818.0, "34970": 38728.0, "34975": 37903.0, "34980": 40115.0, "34985": 40804.0, "34990": 40830.0, "34995": 41568.0, "35000": 37898.0, "35005": 41517.0, "35010": 37209.0, "35015": 39988.0, "35020": 40922.0, "35025": 39413.0, "35030": 39427.0, "35035": 39279.0, "35040": 41659.0, "35045": 38528.0, "35050": 41508.0, "35055": 40746.0, "35060": 40050.0, "35065": 39350.0, "35070": 38791.0, "35075": 40818.0, "35080": 39989.0, "35085": 40748.0, "35090": 39379.0, "35095": 38847.0, "35100": 39276.0, "35105": 38585.0, "35110": 40061.0, "35115": 38599.0, "35120": 37806.0, "35125": 39419.0, "35130": 36362.0, "35135": 37887.0, "35140": 41584.0, "35145": 40107.0, "35150": 37757.0, "35155": 41503.0, "35160": 40886.0, "35165": 40741.0, "35170": 39447.0, "35175": 41521.0, "35180": 40042.0, "35185": 41581.0, "35190": 39414.0, "35195": 40106.0, "35200": 38600.0, "35205": 39371.0, "35210": 40892.0, "35215": 39379.0, "35220": 40118.0, "35225": 40738.0, "35230": 40898.0, "35235": 39363.0, "35240": 40186.0, "35245": 40124.0, "35250": 40043.0, "35255": 40056.0, "35260": 40736.0, "35265": 35218.0, "35270": 38752.0, "35275": 40046.0, "35280": 39210.0, "35285": 40742.0, "35290": 39352.0, "35295": 41510.0, "35300": 41571.0, "35305": 38595.0, "35310": 39198.0, "35315": 40142.0, "35320": 39967.0, "35325": 39415.0, "35330": 39213.0, "35335": 40825.0, "35340": 40108.0, "35345": 40811.0, "35350": 38061.0, "35355": 39501.0, "35360": 39364.0, "35365": 38600.0, "35370": 40098.0, "35375": 40814.0, "35380": 39281.0, "35385": 41513.0, "35390": 40724.0, "35395": 40806.0, "35400": 39450.0, "35405": 40883.0, "35410": 40810.0, "35415": 39981.0, "35420": 40809.0, "35425": 39990.0, "35430": 40058.0, "35435": 40827.0, "35440": 40128.0, "35445": 39444.0, "35450": 40053.0, "35455": 40829.0, "35460": 40902.0, "35465": 36585.0, "35470": 41499.0, "35475": 38674.0, "35480": 39996.0, "35485": 39967.0, "35490": 40047.0, "35495": 39214.0, "35500": 40114.0, "35505": 38598.0, "35510": 39272.0, "35515": 39323.0, "35520": 40103.0, "35525": 40741.0, "35530": 40825.0, "35535": 40871.0, "35540": 40741.0, "35545": 39293.0, "35550": 40057.0, "35555": 40810.0, "35560": 36336.0, "35565": 40739.0, "35570": 41562.0, "35575": 39427.0, "35580": 41496.0, "35585": 38676.0, "35590": 40052.0, "35595": 38520.0, "35600": 40108.0, "35605": 40112.0, "35610": 40122.0, "35615": 35976.0, "35620": 41523.0, "35625": 40810.0, "35630": 38446.0, "35635": 39984.0, "35640": 39353.0, "35645": 40805.0, "35650": 38588.0, "35655": 40800.0, "35660": 40834.0, "35665": 40061.0, "35670": 40053.0, "35675": 40144.0, "35680": 38681.0, "35685": 40803.0, "35690": 41596.0, "35695": 42276.0, "35700": 39438.0, "35705": 39384.0, "35710": 40729.0, "35715": 40098.0, "35720": 39972.0, "35725": 40741.0, "35730": 39280.0, "35735": 42282.0, "35740": 41575.0, "35745": 40746.0, "35750": 40742.0, "35755": 39448.0, "35760": 40737.0, "35765": 40175.0, "35770": 42266.0, "35775": 40039.0, "35780": 41586.0, "35785": 41576.0, "35790": 38596.0, "35795": 40730.0, "35800": 40070.0, "35805": 39318.0, "35810": 40036.0, "35815": 38441.0, "35820": 42435.0, "35825": 41489.0, "35830": 40805.0, "35835": 40285.0, "35840": 40131.0, "35845": 39291.0, "35850": 40115.0, "35855": 39281.0, "35860": 40825.0, "35865": 38648.0, "35870": 41510.0, "35875": 41568.0, "35880": 38751.0, "35885": 39357.0, "35890": 38444.0, "35895": 39974.0, "35900": 38737.0, "35905": 40113.0, "35910": 40050.0, "35915": 37358.0, "35920": 38751.0, "35925": 40888.0, "35930": 40808.0, "35935": 40024.0, "35940": 40055.0, "35945": 40738.0, "35950": 41513.0, "35955": 40029.0, "35960": 40047.0, "35965": 40140.0, "35970": 39346.0, "35975": 40050.0, "35980": 40730.0, "35985": 40203.0, "35990": 39362.0, "35995": 38542.0, "36000": 40199.0, "36005": 42354.0, "36010": 41499.0, "36015": 41570.0, "36020": 40832.0, "36025": 42263.0, "36030": 41570.0, "36035": 40740.0, "36040": 41499.0, "36045": 42268.0, "36050": 38537.0, "36055": 40039.0, "36060": 41505.0, "36065": 40129.0, "36070": 39402.0, "36075": 40814.0, "36080": 40166.0, "36085": 39286.0, "36090": 40142.0, "36095": 40739.0, "36100": 40810.0, "36105": 38738.0, "36110": 40158.0, "36115": 42269.0, "36120": 40886.0, "36125": 39532.0, "36130": 40807.0, "36135": 38525.0, "36140": 40029.0, "36145": 41579.0, "36150": 41506.0, "36155": 39489.0, "36160": 40736.0, "36165": 40796.0, "36170": 40800.0, "36175": 40045.0, "36180": 40736.0, "36185": 39431.0, "36190": 37893.0, "36195": 38583.0, "36200": 41576.0, "36205": 41586.0, "36210": 39954.0, "36215": 39969.0, "36220": 40136.0, "36225": 41570.0, "36230": 39322.0, "36235": 40097.0, "36240": 39405.0, "36245": 39966.0, "36250": 41500.0, "36255": 39985.0, "36260": 39436.0, "36265": 39451.0, "36270": 41502.0, "36275": 40134.0, "36280": 38582.0, "36285": 40799.0, "36290": 39301.0, "36295": 41503.0, "36300": 39438.0, "36305": 40740.0, "36310": 41565.0, "36315": 38676.0, "36320": 38833.0, "36325": 40736.0, "36330": 41576.0, "36335": 40750.0, "36340": 37901.0, "36345": 40821.0, "36350": 40746.0, "36355": 39976.0, "36360": 40741.0, "36365": 38661.0, "36370": 40189.0, "36375": 41514.0, "36380": 39344.0, "36385": 39987.0, "36390": 40739.0, "36395": 38047.0, "36400": 41508.0, "36405": 38494.0, "36410": 41492.0, "36415": 39208.0, "36420": 41581.0, "36425": 40823.0, "36430": 38715.0, "36435": 40738.0, "36440": 40132.0, "36445": 37904.0, "36450": 38655.0, "36455": 40122.0, "36460": 38010.0, "36465": 39312.0, "36470": 40118.0, "36475": 37836.0, "36480": 40805.0, "36485": 38670.0, "36490": 40755.0, "36495": 39440.0, "36500": 38607.0, "36505": 40812.0, "36510": 37827.0, "36515": 39224.0, "36520": 35644.0, "36525": 40070.0, "36530": 40114.0, "36535": 41516.0, "36540": 40811.0, "36545": 41530.0, "36550": 39224.0, "36555": 40800.0, "36560": 40747.0, "36565": 38736.0, "36570": 40797.0, "36575": 39210.0, "36580": 39452.0, "36585": 39977.0, "36590": 40813.0, "36595": 40035.0, "36600": 40750.0, "36605": 40158.0, "36610": 38794.0, "36615": 39433.0, "36620": 41523.0, "36625": 40062.0, "36630": 39426.0, "36635": 40063.0, "36640": 38034.0, "36645": 40140.0, "36650": 42355.0, "36655": 40127.0, "36660": 39369.0, "36665": 39433.0, "36670": 39988.0, "36675": 39203.0, "36680": 40823.0, "36685": 39274.0, "36690": 37947.0, "36695": 41561.0, "36700": 41641.0, "36705": 41601.0, "36710": 40734.0, "36715": 40737.0, "36720": 40044.0, "36725": 41581.0, "36730": 37765.0, "36735": 41511.0, "36740": 40112.0, "36745": 40884.0, "36750": 41569.0, "36755": 40126.0, "36760": 39989.0, "36765": 41515.0, "36770": 39360.0, "36775": 41510.0, "36780": 40107.0, "36785": 41504.0, "36790": 40045.0, "36795": 39334.0, "36800": 39287.0, "36805": 38527.0, "36810": 39303.0, "36815": 38529.0, "36820": 39293.0, "36825": 40726.0, "36830": 39971.0, "36835": 40047.0, "36840": 40197.0, "36845": 40114.0, "36850": 40048.0, "36855": 40884.0, "36860": 40137.0, "36865": 40149.0, "36870": 36522.0, "36875": 38439.0, "36880": 40128.0, "36885": 40070.0, "36890": 40736.0, "36895": 39279.0, "36900": 39258.0, "36905": 40040.0, "36910": 40129.0, "36915": 40745.0, "36920": 40805.0, "36925": 39273.0, "36930": 41508.0, "36935": 40805.0, "36940": 40247.0, "36945": 40116.0, "36950": 37896.0, "36955": 39300.0, "36960": 38653.0, "36965": 37915.0, "36970": 40130.0, "36975": 39346.0, "36980": 40745.0, "36985": 40901.0, "36990": 40733.0, "36995": 40120.0, "37000": 40072.0, "37005": 41579.0, "37010": 42268.0, "37015": 38743.0, "37020": 40123.0, "37025": 41570.0, "37030": 39292.0, "37035": 39375.0, "37040": 40065.0, "37045": 40822.0, "37050": 41580.0, "37055": 37811.0, "37060": 38696.0, "37065": 40059.0, "37070": 40108.0, "37075": 40046.0, "37080": 38812.0, "37085": 39350.0, "37090": 40043.0, "37095": 38565.0, "37100": 40254.0, "37105": 39356.0, "37110": 40809.0, "37115": 40047.0, "37120": 40809.0, "37125": 40948.0, "37130": 39423.0, "37135": 40818.0, "37140": 38704.0, "37145": 40822.0, "37150": 40173.0, "37155": 40039.0, "37160": 40125.0, "37165": 39973.0, "37170": 42340.0, "37175": 39345.0, "37180": 39338.0, "37185": 38603.0, "37190": 41502.0, "37195": 39979.0, "37200": 41584.0, "37205": 39398.0, "37210": 39973.0, "37215": 39980.0, "37220": 40135.0, "37225": 38586.0, "37230": 40730.0, "37235": 39974.0, "37240": 37770.0, "37245": 39997.0, "37250": 36452.0, "37255": 38739.0, "37260": 40128.0, "37265": 41495.0, "37270": 40743.0, "37275": 40063.0, "37280": 40818.0, "37285": 39436.0, "37290": 40053.0, "37295": 39422.0, "37300": 40813.0, "37305": 39974.0, "37310": 36600.0, "37315": 41600.0, "37320": 40061.0, "37325": 40046.0, "37330": 39983.0, "37335": 40819.0, "37340": 40103.0, "37345": 38583.0, "37350": 39957.0, "37355": 40798.0, "37360": 38554.0, "37365": 39450.0, "37370": 39198.0, "37375": 36992.0, "37380": 38540.0, "37385": 40736.0, "37390": 38723.0, "37395": 39271.0, "37400": 40812.0, "37405": 38672.0, "37410": 40807.0, "37415": 41566.0, "37420": 40812.0, "37425": 41499.0, "37430": 39235.0, "37435": 40108.0, "37440": 39274.0, "37445": 39968.0, "37450": 41496.0, "37455": 40117.0, "37460": 39355.0, "37465": 40810.0, "37470": 39981.0, "37475": 40065.0, "37480": 40736.0, "37485": 37263.0, "37490": 37256.0, "37495": 40059.0, "37500": 39975.0, "37505": 38105.0, "37510": 36531.0, "37515": 40120.0, "37520": 41502.0, "37525": 40890.0, "37530": 40737.0, "37535": 40044.0, "37540": 41503.0, "37545": 40839.0, "37550": 37748.0, "37555": 40071.0, "37560": 41584.0, "37565": 40026.0, "37570": 40739.0, "37575": 41639.0, "37580": 41568.0, "37585": 40022.0, "37590": 40797.0, "37595": 40969.0, "37600": 40121.0, "37605": 40811.0, "37610": 42262.0, "37615": 40832.0, "37620": 40796.0, "37625": 42345.0, "37630": 38732.0, "37635": 40814.0, "37640": 39454.0, "37645": 40828.0, "37650": 40884.0, "37655": 39280.0, "37660": 40111.0, "37665": 42268.0, "37670": 42279.0, "37675": 40046.0, "37680": 39211.0, "37685": 40053.0, "37690": 39353.0, "37695": 37994.0, "37700": 40895.0, "37705": 39358.0, "37710": 40122.0, "37715": 39282.0, "37720": 41511.0, "37725": 40810.0, "37730": 38519.0, "37735": 38794.0, "37740": 39968.0, "37745": 40137.0, "37750": 40050.0, "37755": 39973.0, "37760": 40794.0, "37765": 41510.0, "37770": 40205.0, "37775": 40744.0, "37780": 41498.0, "37785": 40723.0, "37790": 39299.0, "37795": 39210.0, "37800": 39288.0, "37805": 39279.0, "37810": 40747.0, "37815": 42345.0, "37820": 41556.0, "37825": 40746.0, "37830": 40049.0, "37835": 40045.0, "37840": 41505.0, "37845": 39281.0, "37850": 38046.0, "37855": 39214.0, "37860": 40799.0, "37865": 39971.0, "37870": 40731.0, "37875": 40819.0, "37880": 39255.0, "37885": 39971.0, "37890": 39424.0, "37895": 39294.0, "37900": 39489.0, "37905": 37395.0, "37910": 40734.0, "37915": 38509.0, "37920": 39205.0, "37925": 40724.0, "37930": 40882.0, "37935": 38609.0, "37940": 40059.0, "37945": 39285.0, "37950": 40044.0, "37955": 38733.0, "37960": 40741.0, "37965": 41508.0, "37970": 37727.0, "37975": 40808.0, "37980": 40741.0, "37985": 38588.0, "37990": 42283.0, "37995": 37984.0, "38000": 39974.0, "38005": 40815.0, "38010": 40812.0, "38015": 40804.0, "38020": 41485.0, "38025": 40869.0, "38030": 38603.0, "38035": 42263.0, "38040": 39381.0, "38045": 41582.0, "38050": 39289.0, "38055": 40040.0, "38060": 37959.0, "38065": 39279.0, "38070": 40054.0, "38075": 42263.0, "38080": 40105.0, "38085": 41576.0, "38090": 39356.0, "38095": 40064.0, "38100": 41579.0, "38105": 38596.0, "38110": 36593.0, "38115": 40728.0, "38120": 39289.0, "38125": 40131.0, "38130": 39291.0, "38135": 39276.0, "38140": 40023.0, "38145": 37283.0, "38150": 39971.0, "38155": 40813.0, "38160": 43035.0, "38165": 36379.0, "38170": 40802.0, "38175": 39431.0, "38180": 37041.0, "38185": 40047.0, "38190": 40052.0, "38195": 40047.0, "38200": 40808.0, "38205": 41506.0, "38210": 39271.0, "38215": 40744.0, "38220": 41576.0, "38225": 38041.0, "38230": 40738.0, "38235": 40020.0, "38240": 40810.0, "38245": 37355.0, "38250": 39269.0, "38255": 40872.0, "38260": 38676.0, "38265": 39978.0, "38270": 40737.0, "38275": 40029.0, "38280": 41572.0, "38285": 37865.0, "38290": 40879.0, "38295": 41502.0, "38300": 43039.0, "38305": 40106.0, "38310": 37268.0, "38315": 39436.0, "38320": 40830.0, "38325": 39410.0, "38330": 40073.0, "38335": 40129.0, "38340": 40725.0, "38345": 37438.0, "38350": 41578.0, "38355": 39208.0, "38360": 38604.0, "38365": 39283.0, "38370": 40827.0, "38375": 39446.0, "38380": 40140.0, "38385": 40075.0, "38390": 39974.0, "38395": 38823.0, "38400": 39341.0, "38405": 40046.0, "38410": 40056.0, "38415": 40885.0, "38420": 40051.0, "38425": 41504.0, "38430": 39280.0, "38435": 41505.0, "38440": 39979.0, "38445": 37145.0, "38450": 39980.0, "38455": 40829.0, "38460": 39412.0, "38465": 39286.0, "38470": 37347.0, "38475": 40227.0, "38480": 39437.0, "38485": 37286.0, "38490": 39207.0, "38495": 39205.0, "38500": 40862.0, "38505": 40046.0, "38510": 40835.0, "38515": 39973.0, "38520": 38587.0, "38525": 40216.0, "38530": 40739.0, "38535": 38580.0, "38540": 40029.0, "38545": 41501.0, "38550": 40102.0, "38555": 40035.0, "38560": 39315.0, "38565": 40811.0, "38570": 40884.0, "38575": 39440.0, "38580": 40739.0, "38585": 39423.0, "38590": 39965.0, "38595": 38605.0, "38600": 39991.0, "38605": 40798.0, "38610": 39273.0, "38615": 40746.0, "38620": 41579.0, "38625": 39352.0, "38630": 39973.0, "38635": 40813.0, "38640": 40104.0, "38645": 40135.0, "38650": 41510.0, "38655": 39344.0, "38660": 40040.0, "38665": 40051.0, "38670": 40817.0, "38675": 37837.0, "38680": 39364.0, "38685": 37060.0, "38690": 38687.0, "38695": 40033.0, "38700": 40044.0, "38705": 40126.0, "38710": 41500.0, "38715": 41504.0, "38720": 40049.0, "38725": 40025.0, "38730": 41573.0, "38735": 42265.0, "38740": 39432.0, "38745": 40966.0, "38750": 39467.0, "38755": 41522.0, "38760": 37218.0, "38765": 41497.0, "38770": 40139.0, "38775": 39348.0, "38780": 37953.0, "38785": 40035.0, "38790": 34418.0, "38795": 39358.0, "38800": 40201.0, "38805": 37082.0, "38810": 39215.0, "38815": 39359.0, "38820": 39280.0, "38825": 39285.0, "38830": 40738.0, "38835": 38521.0, "38840": 41662.0, "38845": 40039.0, "38850": 41504.0, "38855": 38564.0, "38860": 38698.0, "38865": 40026.0, "38870": 40124.0, "38875": 41508.0, "38880": 40045.0, "38885": 39365.0, "38890": 39266.0, "38895": 41496.0, "38900": 40043.0, "38905": 42272.0, "38910": 40047.0, "38915": 39277.0, "38920": 40869.0, "38925": 37895.0, "38930": 41578.0, "38935": 38670.0, "38940": 41504.0, "38945": 40828.0, "38950": 38589.0, "38955": 40118.0, "38960": 40113.0, "38965": 40201.0, "38970": 41520.0, "38975": 40738.0, "38980": 40809.0, "38985": 40754.0, "38990": 40814.0, "38995": 40048.0, "39000": 39428.0, "39005": 38600.0, "39010": 38517.0, "39015": 40204.0, "39020": 40739.0, "39025": 40054.0, "39030": 40046.0, "39035": 39280.0, "39040": 40815.0, "39045": 41591.0, "39050": 39431.0, "39055": 40807.0, "39060": 38745.0, "39065": 35817.0, "39070": 42263.0, "39075": 41510.0, "39080": 41577.0, "39085": 41575.0, "39090": 39972.0, "39095": 39993.0, "39100": 38510.0, "39105": 39264.0, "39110": 39420.0, "39115": 40109.0, "39120": 40807.0, "39125": 39972.0, "39130": 41564.0, "39135": 41505.0, "39140": 39980.0, "39145": 40745.0, "39150": 40816.0, "39155": 43039.0, "39160": 38703.0, "39165": 39466.0, "39170": 39975.0, "39175": 40033.0, "39180": 38028.0, "39185": 37757.0, "39190": 41497.0, "39195": 38573.0, "39200": 41504.0, "39205": 40047.0, "39210": 40039.0, "39215": 40732.0, "39220": 42273.0, "39225": 39375.0, "39230": 40818.0, "39235": 40159.0, "39240": 41578.0, "39245": 40804.0, "39250": 41571.0, "39255": 40822.0, "39260": 40742.0, "39265": 40738.0, "39270": 40049.0, "39275": 40108.0, "39280": 40797.0, "39285": 40805.0, "39290": 39974.0, "39295": 38728.0, "39300": 40907.0, "39305": 38604.0, "39310": 41513.0, "39315": 39291.0, "39320": 39279.0, "39325": 40732.0, "39330": 41503.0, "39335": 40888.0, "39340": 40884.0, "39345": 40114.0, "39350": 40878.0, "39355": 40730.0, "39360": 41516.0, "39365": 40816.0, "39370": 40822.0, "39375": 40118.0, "39380": 40049.0, "39385": 37901.0, "39390": 39978.0, "39395": 40826.0, "39400": 38716.0, "39405": 40742.0, "39410": 35969.0, "39415": 39284.0, "39420": 40120.0, "39425": 38038.0, "39430": 40827.0, "39435": 39267.0, "39440": 40795.0, "39445": 39975.0, "39450": 40828.0, "39455": 41504.0, "39460": 39340.0, "39465": 38596.0, "39470": 41571.0, "39475": 41514.0, "39480": 39445.0, "39485": 41506.0, "39490": 39389.0, "39495": 35772.0, "39500": 41576.0, "39505": 40204.0, "39510": 40041.0, "39515": 39287.0, "39520": 39971.0, "39525": 38590.0, "39530": 41501.0, "39535": 40736.0, "39540": 39209.0, "39545": 38738.0, "39550": 35162.0, "39555": 37959.0, "39560": 41509.0, "39565": 40114.0, "39570": 41576.0, "39575": 40751.0, "39580": 39284.0, "39585": 39285.0, "39590": 41586.0, "39595": 40754.0, "39600": 40092.0, "39605": 40810.0, "39610": 41514.0, "39615": 39963.0, "39620": 40888.0, "39625": 40735.0, "39630": 40110.0, "39635": 41572.0, "39640": 40036.0, "39645": 41516.0, "39650": 39309.0, "39655": 39989.0, "39660": 39420.0, "39665": 39349.0, "39670": 39370.0, "39675": 40831.0, "39680": 39974.0, "39685": 40107.0, "39690": 40824.0, "39695": 41492.0, "39700": 40810.0, "39705": 40808.0, "39710": 39532.0, "39715": 40036.0, "39720": 41508.0, "39725": 39975.0, "39730": 39983.0, "39735": 40044.0, "39740": 41492.0, "39745": 40819.0, "39750": 38673.0, "39755": 41499.0, "39760": 39962.0, "39765": 35690.0, "39770": 38536.0, "39775": 39279.0, "39780": 38069.0, "39785": 41652.0, "39790": 40052.0, "39795": 40736.0, "39800": 38593.0, "39805": 38724.0, "39810": 40831.0, "39815": 39377.0, "39820": 39977.0, "39825": 40820.0, "39830": 39215.0, "39835": 39285.0, "39840": 38657.0, "39845": 40744.0, "39850": 41571.0, "39855": 41570.0, "39860": 40043.0, "39865": 39296.0, "39870": 39353.0, "39875": 38641.0, "39880": 40803.0, "39885": 39336.0, "39890": 42294.0, "39895": 37875.0, "39900": 40816.0, "39905": 40041.0, "39910": 39291.0, "39915": 40797.0, "39920": 40895.0, "39925": 40207.0, "39930": 38740.0, "39935": 39205.0, "39940": 40734.0, "39945": 40890.0, "39950": 40728.0, "39955": 41504.0, "39960": 41577.0, "39965": 40060.0, "39970": 40822.0, "39975": 39254.0, "39980": 41506.0, "39985": 39282.0, "39990": 37903.0, "39995": 39408.0, "40000": 37907.0, "40005": 41510.0, "40010": 37966.0, "40015": 38674.0, "40020": 37215.0, "40025": 39264.0, "40030": 39287.0, "40035": 40126.0, "40040": 39984.0, "40045": 39349.0, "40050": 40028.0, "40055": 39214.0, "40060": 39962.0, "40065": 40023.0, "40070": 42284.0, "40075": 40796.0, "40080": 42271.0, "40085": 40831.0, "40090": 41637.0, "40095": 40732.0, "40100": 39288.0, "40105": 37339.0, "40110": 40894.0, "40115": 40886.0, "40120": 3520.0, "40125": 39299.0, "40130": 39340.0, "40135": 40805.0, "40140": 40814.0, "40145": 38524.0, "40150": 42278.0, "40155": 40105.0, "40160": 41586.0, "40165": 42261.0, "40170": 41583.0, "40175": 40136.0, "40180": 41650.0, "40185": 41508.0, "40190": 40057.0, "40195": 40023.0, "40200": 39331.0, "40205": 38434.0, "40210": 38597.0, "40215": 40810.0, "40220": 42265.0, "40225": 40080.0, "40230": 38642.0, "40235": 39284.0, "40240": 38734.0, "40245": 40821.0, "40250": 41505.0, "40255": 37889.0, "40260": 39434.0, "40265": 40122.0, "40270": 39386.0, "40275": 39348.0, "40280": 40185.0, "40285": 38591.0, "40290": 41498.0, "40295": 38590.0, "40300": 38533.0, "40305": 40114.0, "40310": 40744.0, "40315": 40882.0, "40320": 40822.0, "40325": 38046.0, "40330": 38123.0, "40335": 40809.0, "40340": 38573.0, "40345": 40745.0, "40350": 40869.0, "40355": 37978.0, "40360": 40751.0, "40365": 41505.0, "40370": 41502.0, "40375": 41567.0, "40380": 38696.0, "40385": 40791.0, "40390": 37951.0, "40395": 41512.0, "40400": 40125.0, "40405": 41507.0, "40410": 40816.0, "40415": 40816.0, "40420": 40809.0, "40425": 40114.0, "40430": 40034.0, "40435": 39265.0, "40440": 39979.0, "40445": 41588.0, "40450": 40157.0, "40455": 42267.0, "40460": 40039.0, "40465": 40816.0, "40470": 41507.0, "40475": 40813.0, "40480": 40120.0, "40485": 40734.0, "40490": 39195.0, "40495": 41612.0, "40500": 40061.0, "40505": 39973.0, "40510": 41581.0, "40515": 38678.0, "40520": 39344.0, "40525": 38597.0, "40530": 40132.0, "40535": 37443.0, "40540": 40055.0, "40545": 40756.0, "40550": 40033.0, "40555": 38827.0, "40560": 41569.0, "40565": 37973.0, "40570": 39358.0, "40575": 41591.0, "40580": 41506.0, "40585": 40045.0, "40590": 40883.0, "40595": 39977.0, "40600": 38646.0, "40605": 38575.0, "40610": 38029.0, "40615": 39317.0, "40620": 39278.0, "40625": 40746.0, "40630": 40071.0, "40635": 39990.0, "40640": 40812.0, "40645": 41508.0, "40650": 40735.0, "40655": 40725.0, "40660": 39281.0, "40665": 39283.0, "40670": 39278.0, "40675": 39428.0, "40680": 40798.0, "40685": 40798.0, "40690": 40822.0, "40695": 38591.0, "40700": 40054.0, "40705": 38718.0, "40710": 40058.0, "40715": 41499.0, "40720": 38500.0, "40725": 38583.0, "40730": 40806.0, "40735": 40211.0, "40740": 41523.0, "40745": 36576.0, "40750": 40815.0, "40755": 41498.0, "40760": 40118.0, "40765": 41565.0, "40770": 41497.0, "40775": 41504.0, "40780": 38725.0, "40785": 39969.0, "40790": 39357.0, "40795": 40055.0, "40800": 40870.0, "40805": 40801.0, "40810": 38035.0, "40815": 39996.0, "40820": 40042.0, "40825": 40739.0, "40830": 40056.0, "40835": 37245.0, "40840": 38510.0, "40845": 40059.0, "40850": 38734.0, "40855": 40038.0, "40860": 39353.0, "40865": 38724.0, "40870": 40104.0, "40875": 39356.0, "40880": 40813.0, "40885": 40801.0, "40890": 40742.0, "40895": 40042.0, "40900": 39293.0, "40905": 40050.0, "40910": 40808.0, "40915": 40027.0, "40920": 38691.0, "40925": 38523.0, "40930": 40194.0, "40935": 40060.0, "40940": 40137.0, "40945": 40818.0, "40950": 40129.0, "40955": 38597.0, "40960": 40140.0, "40965": 41575.0, "40970": 39354.0, "40975": 39975.0, "40980": 40051.0, "40985": 40044.0, "40990": 40067.0, "40995": 37907.0, "41000": 40744.0, "41005": 40741.0, "41010": 36568.0, "41015": 39974.0, "41020": 40829.0, "41025": 38586.0, "41030": 40749.0, "41035": 40049.0, "41040": 39975.0, "41045": 39969.0, "41050": 39983.0, "41055": 38661.0, "41060": 39291.0, "41065": 41575.0, "41070": 40035.0, "41075": 39959.0, "41080": 39357.0, "41085": 40735.0, "41090": 38651.0, "41095": 40728.0, "41100": 40102.0, "41105": 39963.0, "41110": 41621.0, "41115": 39344.0, "41120": 39196.0, "41125": 40907.0, "41130": 40035.0, "41135": 40825.0, "41140": 40812.0, "41145": 40070.0, "41150": 39203.0, "41155": 39356.0, "41160": 37293.0, "41165": 40827.0, "41170": 40829.0, "41175": 38594.0, "41180": 39285.0, "41185": 40817.0, "41190": 39418.0, "41195": 40125.0, "41200": 39408.0, "41205": 41497.0, "41210": 38669.0, "41215": 39371.0, "41220": 39983.0, "41225": 40042.0, "41230": 40057.0, "41235": 39290.0, "41240": 40105.0, "41245": 41493.0, "41250": 40811.0, "41255": 38614.0, "41260": 40085.0, "41265": 40810.0, "41270": 40047.0, "41275": 40806.0, "41280": 39526.0, "41285": 38756.0, "41290": 40744.0, "41295": 40100.0, "41300": 40054.0, "41305": 40737.0, "41310": 39958.0, "41315": 38511.0, "41320": 39192.0, "41325": 39284.0, "41330": 39438.0, "41335": 41514.0, "41340": 41501.0, "41345": 37909.0, "41350": 40811.0, "41355": 38500.0, "41360": 38629.0, "41365": 42271.0, "41370": 40201.0, "41375": 39279.0, "41380": 41566.0, "41385": 39300.0, "41390": 41496.0, "41395": 40069.0, "41400": 39977.0, "41405": 41515.0, "41410": 40810.0, "41415": 38738.0, "41420": 37274.0, "41425": 40172.0, "41430": 39275.0, "41435": 40045.0, "41440": 43045.0, "41445": 40132.0, "41450": 40107.0, "41455": 38673.0, "41460": 37983.0, "41465": 40811.0, "41470": 36574.0, "41475": 42264.0, "41480": 41501.0, "41485": 40068.0, "41490": 39296.0, "41495": 40813.0, "41500": 40039.0, "41505": 40807.0, "41510": 40045.0, "41515": 40906.0, "41520": 40828.0, "41525": 39976.0, "41530": 39974.0, "41535": 38518.0, "41540": 37664.0, "41545": 39286.0, "41550": 38596.0, "41555": 40812.0, "41560": 40818.0, "41565": 38506.0, "41570": 41490.0, "41575": 41509.0, "41580": 39267.0, "41585": 40060.0, "41590": 40828.0, "41595": 40873.0, "41600": 40753.0, "41605": 37977.0, "41610": 40748.0, "41615": 39385.0, "41620": 38506.0, "41625": 40044.0, "41630": 39219.0, "41635": 41660.0, "41640": 39433.0, "41645": 39358.0, "41650": 40045.0, "41655": 38529.0, "41660": 40833.0, "41665": 40838.0, "41670": 41510.0, "41675": 40805.0, "41680": 40808.0, "41685": 40735.0, "41690": 40740.0, "41695": 38053.0, "41700": 40810.0, "41705": 39414.0, "41710": 40054.0, "41715": 41558.0, "41720": 39345.0, "41725": 38684.0, "41730": 41510.0, "41735": 38043.0, "41740": 40135.0, "41745": 37343.0, "41750": 40225.0, "41755": 39984.0, "41760": 40139.0, "41765": 40744.0, "41770": 40830.0, "41775": 39964.0, "41780": 41492.0, "41785": 41504.0, "41790": 38619.0, "41795": 41510.0, "41800": 41500.0, "41805": 39976.0, "41810": 40807.0, "41815": 39448.0, "41820": 40046.0, "41825": 40058.0, "41830": 40043.0, "41835": 39962.0, "41840": 40052.0, "41845": 38583.0, "41850": 40804.0, "41855": 36383.0, "41860": 40967.0, "41865": 39313.0, "41870": 40126.0, "41875": 39289.0, "41880": 39304.0, "41885": 40820.0, "41890": 40045.0, "41895": 37949.0, "41900": 42269.0, "41905": 40759.0, "41910": 39280.0, "41915": 40142.0, "41920": 38455.0, "41925": 41506.0, "41930": 37680.0, "41935": 39967.0, "41940": 40059.0, "41945": 40734.0, "41950": 41502.0, "41955": 38440.0, "41960": 40734.0, "41965": 41575.0, "41970": 41505.0, "41975": 40046.0, "41980": 40056.0, "41985": 39380.0, "41990": 40812.0, "41995": 39978.0, "42000": 40809.0, "42005": 41492.0, "42010": 40145.0, "42015": 37068.0, "42020": 40029.0, "42025": 40069.0, "42030": 41514.0, "42035": 41506.0, "42040": 41581.0, "42045": 39430.0, "42050": 40812.0, "42055": 42267.0, "42060": 40899.0, "42065": 40045.0, "42070": 40816.0, "42075": 37944.0, "42080": 42261.0, "42085": 41583.0, "42090": 41581.0, "42095": 40050.0, "42100": 41495.0, "42105": 40835.0, "42110": 39360.0, "42115": 40905.0, "42120": 37124.0, "42125": 40034.0, "42130": 40747.0, "42135": 38533.0, "42140": 40040.0, "42145": 41587.0, "42150": 39967.0, "42155": 39973.0, "42160": 38002.0, "42165": 42263.0, "42170": 40191.0, "42175": 41513.0, "42180": 40046.0, "42185": 41574.0, "42190": 39289.0, "42195": 40033.0, "42200": 41571.0, "42205": 40748.0, "42210": 38574.0, "42215": 41504.0, "42220": 38544.0, "42225": 39281.0, "42230": 37359.0, "42235": 40826.0, "42240": 37740.0, "42245": 40030.0, "42250": 41494.0, "42255": 39337.0, "42260": 37972.0, "42265": 41582.0, "42270": 39393.0, "42275": 40057.0, "42280": 40130.0, "42285": 37883.0, "42290": 39368.0, "42295": 40731.0, "42300": 40142.0, "42305": 39206.0, "42310": 39364.0, "42315": 39302.0, "42320": 38670.0, "42325": 38810.0, "42330": 39296.0, "42335": 41510.0, "42340": 38504.0, "42345": 40043.0, "42350": 37115.0, "42355": 37699.0, "42360": 39425.0, "42365": 41517.0, "42370": 39972.0, "42375": 38437.0, "42380": 40047.0, "42385": 37961.0, "42390": 40029.0, "42395": 40065.0, "42400": 40729.0, "42405": 39973.0, "42410": 40209.0, "42415": 41510.0, "42420": 38591.0, "42425": 40820.0, "42430": 40881.0, "42435": 40158.0, "42440": 39350.0, "42445": 41554.0, "42450": 40133.0, "42455": 39210.0, "42460": 40065.0, "42465": 40747.0, "42470": 39965.0, "42475": 40738.0, "42480": 39415.0, "42485": 40741.0, "42490": 41599.0, "42495": 39360.0, "42500": 40797.0, "42505": 40818.0, "42510": 37925.0, "42515": 34313.0, "42520": 40122.0, "42525": 40060.0, "42530": 40786.0, "42535": 38085.0, "42540": 40749.0, "42545": 38600.0, "42550": 40815.0, "42555": 41502.0, "42560": 40799.0, "42565": 35414.0, "42570": 40823.0, "42575": 40064.0, "42580": 39212.0, "42585": 40132.0, "42590": 40814.0, "42595": 40748.0, "42600": 40835.0, "42605": 40206.0, "42610": 40737.0, "42615": 40738.0, "42620": 38011.0, "42625": 38528.0, "42630": 38557.0, "42635": 39972.0, "42640": 39382.0, "42645": 39367.0, "42650": 40117.0, "42655": 39985.0, "42660": 40193.0, "42665": 40067.0, "42670": 40044.0, "42675": 38497.0, "42680": 40813.0, "42685": 40765.0, "42690": 41502.0, "42695": 40813.0, "42700": 41502.0, "42705": 41523.0, "42710": 40103.0, "42715": 38745.0, "42720": 41502.0, "42725": 39280.0, "42730": 37929.0, "42735": 40812.0, "42740": 40737.0, "42745": 38649.0, "42750": 41514.0, "42755": 40809.0, "42760": 42273.0, "42765": 42350.0, "42770": 41565.0, "42775": 40046.0, "42780": 40742.0, "42785": 39259.0, "42790": 40031.0, "42795": 41577.0, "42800": 37241.0, "42805": 40874.0, "42810": 41507.0, "42815": 39342.0, "42820": 40049.0, "42825": 35740.0, "42830": 40827.0, "42835": 40801.0, "42840": 37911.0, "42845": 39976.0, "42850": 39276.0, "42855": 40809.0, "42860": 37791.0, "42865": 40745.0, "42870": 37162.0, "42875": 41586.0, "42880": 41508.0, "42885": 39360.0, "42890": 40050.0, "42895": 41577.0, "42900": 40804.0, "42905": 41582.0, "42910": 38571.0, "42915": 38593.0, "42920": 40826.0, "42925": 43036.0, "42930": 40032.0, "42935": 41575.0, "42940": 41514.0, "42945": 39330.0, "42950": 39297.0, "42955": 40754.0, "42960": 38075.0, "42965": 38608.0, "42970": 39275.0, "42975": 41522.0, "42980": 38679.0, "42985": 41597.0, "42990": 40738.0, "42995": 39424.0, "43000": 38711.0, "43005": 40131.0, "43010": 40081.0, "43015": 40734.0, "43020": 41555.0, "43025": 40747.0, "43030": 35972.0, "43035": 37759.0, "43040": 38683.0, "43045": 40750.0, "43050": 40803.0, "43055": 40747.0, "43060": 40883.0, "43065": 40121.0, "43070": 37898.0, "43075": 38512.0, "43080": 40794.0, "43085": 39290.0, "43090": 39983.0, "43095": 39979.0, "43100": 39966.0, "43105": 38526.0, "43110": 40052.0, "43115": 41569.0, "43120": 40782.0, "43125": 37988.0, "43130": 40123.0, "43135": 40818.0, "43140": 40735.0, "43145": 41583.0, "43150": 41599.0, "43155": 39281.0, "43160": 40801.0, "43165": 36418.0, "43170": 38681.0, "43175": 37194.0, "43180": 41581.0, "43185": 41585.0, "43190": 39415.0, "43195": 41589.0, "43200": 38009.0, "43205": 40068.0, "43210": 39352.0, "43215": 41570.0, "43220": 40803.0, "43225": 38610.0, "43230": 40835.0, "43235": 41518.0, "43240": 40743.0, "43245": 41497.0, "43250": 40747.0, "43255": 40127.0, "43260": 39212.0, "43265": 37769.0, "43270": 41505.0, "43275": 40816.0, "43280": 41496.0, "43285": 37936.0, "43290": 40157.0, "43295": 42277.0, "43300": 40042.0, "43305": 40125.0, "43310": 40065.0, "43315": 40217.0, "43320": 40822.0, "43325": 41499.0, "43330": 38075.0, "43335": 42273.0, "43340": 39984.0, "43345": 39972.0, "43350": 40053.0, "43355": 41507.0, "43360": 40076.0, "43365": 41575.0, "43370": 40143.0, "43375": 40035.0, "43380": 39438.0, "43385": 41572.0, "43390": 39518.0, "43395": 42262.0, "43400": 39961.0, "43405": 40733.0, "43410": 40183.0, "43415": 39431.0, "43420": 40029.0, "43425": 40134.0, "43430": 39350.0, "43435": 40126.0, "43440": 40740.0, "43445": 41506.0, "43450": 38765.0, "43455": 40054.0, "43460": 40745.0, "43465": 38579.0, "43470": 37913.0, "43475": 40050.0, "43480": 40193.0, "43485": 41506.0, "43490": 40821.0, "43495": 40115.0, "43500": 40051.0, "43505": 40799.0, "43510": 40204.0, "43515": 40814.0, "43520": 40892.0, "43525": 41501.0, "43530": 38523.0, "43535": 39353.0, "43540": 39968.0, "43545": 39995.0, "43550": 39975.0, "43555": 40737.0, "43560": 37846.0, "43565": 39307.0, "43570": 37955.0, "43575": 37794.0, "43580": 39280.0, "43585": 39425.0, "43590": 40809.0, "43595": 41572.0, "43600": 40821.0, "43605": 40820.0, "43610": 40046.0, "43615": 40813.0, "43620": 42270.0, "43625": 41513.0, "43630": 41591.0, "43635": 40120.0, "43640": 41592.0, "43645": 38586.0, "43650": 40835.0, "43655": 40096.0, "43660": 37005.0, "43665": 41584.0, "43670": 39361.0, "43675": 40731.0, "43680": 39303.0, "43685": 39433.0, "43690": 39965.0, "43695": 40799.0, "43700": 39215.0, "43705": 40136.0, "43710": 41510.0, "43715": 41591.0, "43720": 39430.0, "43725": 40132.0, "43730": 40750.0, "43735": 40038.0, "43740": 42262.0, "43745": 40878.0, "43750": 39264.0, "43755": 38613.0, "43760": 39413.0, "43765": 39273.0, "43770": 40814.0, "43775": 41513.0, "43780": 40801.0, "43785": 40052.0, "43790": 40731.0, "43795": 42268.0, "43800": 40739.0, "43805": 36905.0, "43810": 40056.0, "43815": 41586.0, "43820": 38679.0, "43825": 40111.0, "43830": 40741.0, "43835": 39271.0, "43840": 39970.0, "43845": 40817.0, "43850": 41501.0, "43855": 38589.0, "43860": 38584.0, "43865": 40045.0, "43870": 40042.0, "43875": 40737.0, "43880": 39351.0, "43885": 39433.0, "43890": 41491.0, "43895": 39301.0, "43900": 39353.0, "43905": 38567.0, "43910": 40051.0, "43915": 40114.0, "43920": 39962.0, "43925": 40743.0, "43930": 39270.0, "43935": 39297.0, "43940": 41501.0, "43945": 40195.0, "43950": 38672.0, "43955": 39296.0, "43960": 40746.0, "43965": 39275.0, "43970": 40059.0, "43975": 40057.0, "43980": 39534.0, "43985": 40813.0, "43990": 40034.0, "43995": 39289.0, "44000": 39356.0, "44005": 40753.0, "44010": 38520.0, "44015": 39277.0, "44020": 40824.0, "44025": 39283.0, "44030": 36598.0, "44035": 39970.0, "44040": 39352.0, "44045": 39384.0, "44050": 41499.0, "44055": 39429.0, "44060": 40747.0, "44065": 40101.0, "44070": 40211.0, "44075": 40817.0, "44080": 42270.0, "44085": 38742.0, "44090": 37129.0, "44095": 39435.0, "44100": 40905.0, "44105": 40749.0, "44110": 40186.0, "44115": 36677.0, "44120": 39371.0, "44125": 39203.0, "44130": 42338.0, "44135": 41500.0, "44140": 37805.0, "44145": 40048.0, "44150": 39358.0, "44155": 41495.0, "44160": 40737.0, "44165": 38727.0, "44170": 40748.0, "44175": 35902.0, "44180": 40823.0, "44185": 40105.0, "44190": 36228.0, "44195": 39368.0, "44200": 40052.0, "44205": 40041.0, "44210": 41501.0, "44215": 39355.0, "44220": 38077.0, "44225": 39346.0, "44230": 41578.0, "44235": 40829.0, "44240": 40032.0, "44245": 41579.0, "44250": 40055.0, "44255": 40860.0, "44260": 40818.0, "44265": 41503.0, "44270": 40802.0, "44275": 40896.0, "44280": 40812.0, "44285": 40895.0, "44290": 37337.0, "44295": 38090.0, "44300": 42272.0, "44305": 40767.0, "44310": 39214.0, "44315": 40036.0, "44320": 38651.0, "44325": 40734.0, "44330": 38562.0, "44335": 41572.0, "44340": 37889.0, "44345": 39370.0, "44350": 38673.0, "44355": 38588.0, "44360": 40127.0, "44365": 41506.0, "44370": 41674.0, "44375": 40869.0, "44380": 40820.0, "44385": 40877.0, "44390": 40755.0, "44395": 40751.0, "44400": 40727.0, "44405": 39971.0, "44410": 38104.0, "44415": 39341.0, "44420": 40732.0, "44425": 39267.0, "44430": 37126.0, "44435": 39969.0, "44440": 41572.0, "44445": 39350.0, "44450": 40815.0, "44455": 39368.0, "44460": 38612.0, "44465": 39350.0, "44470": 40055.0, "44475": 43029.0, "44480": 39973.0, "44485": 39220.0, "44490": 38575.0, "44495": 38445.0, "44500": 40096.0, "44505": 37931.0, "44510": 39295.0, "44515": 39971.0, "44520": 42265.0, "44525": 39271.0, "44530": 40733.0, "44535": 39295.0, "44540": 40804.0, "44545": 40736.0, "44550": 38663.0, "44555": 41499.0, "44560": 40744.0, "44565": 41572.0, "44570": 40050.0, "44575": 40092.0, "44580": 40808.0, "44585": 40822.0, "44590": 41579.0, "44595": 40741.0, "44600": 38589.0, "44605": 39991.0, "44610": 40845.0, "44615": 39344.0, "44620": 40047.0, "44625": 40044.0, "44630": 37989.0, "44635": 40746.0, "44640": 40099.0, "44645": 40038.0, "44650": 40045.0, "44655": 40064.0, "44660": 39371.0, "44665": 43037.0, "44670": 40055.0, "44675": 39347.0, "44680": 40737.0, "44685": 39283.0, "44690": 39292.0, "44695": 36360.0, "44700": 39205.0, "44705": 40829.0, "44710": 40045.0, "44715": 39972.0, "44720": 41518.0, "44725": 41491.0, "44730": 39350.0, "44735": 40738.0, "44740": 39208.0, "44745": 40797.0, "44750": 40807.0, "44755": 39273.0, "44760": 40817.0, "44765": 39268.0, "44770": 39235.0, "44775": 40114.0, "44780": 39439.0, "44785": 41507.0, "44790": 40793.0, "44795": 40148.0, "44800": 40817.0, "44805": 40812.0, "44810": 40117.0, "44815": 40124.0, "44820": 41511.0, "44825": 40823.0, "44830": 40192.0, "44835": 40731.0, "44840": 39294.0, "44845": 40169.0, "44850": 39351.0, "44855": 40050.0, "44860": 38687.0, "44865": 39992.0, "44870": 38589.0, "44875": 40804.0, "44880": 39984.0, "44885": 38659.0, "44890": 40812.0, "44895": 40971.0, "44900": 40037.0, "44905": 39325.0, "44910": 39459.0, "44915": 38521.0, "44920": 39284.0, "44925": 40814.0, "44930": 40804.0, "44935": 38666.0, "44940": 39271.0, "44945": 40054.0, "44950": 41502.0, "44955": 40057.0, "44960": 40878.0, "44965": 37975.0, "44970": 40050.0, "44975": 39199.0, "44980": 41576.0, "44985": 39298.0, "44990": 40803.0, "44995": 41500.0, "45000": 40060.0, "45005": 39291.0, "45010": 40058.0, "45015": 40105.0, "45020": 40045.0, "45025": 37200.0, "45030": 40753.0, "45035": 37229.0, "45040": 38588.0, "45045": 41510.0, "45050": 40737.0, "45055": 37812.0, "45060": 40869.0, "45065": 40038.0, "45070": 40036.0, "45075": 40811.0, "45080": 40119.0, "45085": 39347.0, "45090": 38444.0, "45095": 39291.0, "45100": 40729.0, "45105": 41500.0, "45110": 40823.0, "45115": 40734.0, "45120": 40820.0, "45125": 41580.0, "45130": 39205.0, "45135": 39422.0, "45140": 40109.0, "45145": 40895.0, "45150": 39277.0, "45155": 41501.0, "45160": 40743.0, "45165": 40090.0, "45170": 40065.0, "45175": 37919.0, "45180": 39287.0, "45185": 39424.0, "45190": 41579.0, "45195": 40828.0, "45200": 39316.0, "45205": 38670.0, "45210": 38613.0, "45215": 40882.0, "45220": 40818.0, "45225": 40811.0, "45230": 39386.0, "45235": 39361.0, "45240": 39429.0, "45245": 40740.0, "45250": 40831.0, "45255": 41502.0, "45260": 41496.0, "45265": 40040.0, "45270": 37754.0, "45275": 40050.0, "45280": 40816.0, "45285": 39961.0, "45290": 41590.0, "45295": 41499.0, "45300": 37311.0, "45305": 40051.0, "45310": 40031.0, "45315": 39448.0, "45320": 39979.0, "45325": 41505.0, "45330": 39274.0, "45335": 39969.0, "45340": 40101.0, "45345": 39976.0, "45350": 38454.0, "45355": 42426.0, "45360": 40149.0, "45365": 40796.0, "45370": 39291.0, "45375": 39969.0, "45380": 39346.0, "45385": 43034.0, "45390": 39310.0, "45395": 40825.0, "45400": 40181.0, "45405": 40818.0, "45410": 40818.0, "45415": 40817.0, "45420": 38436.0, "45425": 39359.0, "45430": 40054.0, "45435": 39372.0, "45440": 38689.0, "45445": 35213.0, "45450": 39306.0, "45455": 41510.0, "45460": 36537.0, "45465": 38682.0, "45470": 40057.0, "45475": 40042.0, "45480": 39220.0, "45485": 40068.0, "45490": 40794.0, "45495": 40739.0, "45500": 40048.0, "45505": 39971.0, "45510": 40041.0, "45515": 37694.0, "45520": 40052.0, "45525": 40031.0, "45530": 41598.0, "45535": 38662.0, "45540": 40807.0, "45545": 40128.0, "45550": 38731.0, "45555": 40738.0, "45560": 39276.0, "45565": 39995.0, "45570": 40065.0, "45575": 40736.0, "45580": 40759.0, "45585": 40813.0, "45590": 39967.0, "45595": 38447.0, "45600": 41662.0, "45605": 39197.0, "45610": 40819.0, "45615": 37376.0, "45620": 39403.0, "45625": 37447.0, "45630": 38823.0, "45635": 39223.0, "45640": 40061.0, "45645": 38503.0, "45650": 42283.0, "45655": 39973.0, "45660": 37754.0, "45665": 38650.0, "45670": 41494.0, "45675": 40747.0, "45680": 40734.0, "45685": 41564.0, "45690": 39312.0, "45695": 40745.0, "45700": 40051.0, "45705": 40736.0, "45710": 39988.0, "45715": 39376.0, "45720": 38732.0, "45725": 40814.0, "45730": 40895.0, "45735": 41555.0, "45740": 40746.0, "45745": 39195.0, "45750": 40811.0, "45755": 41516.0, "45760": 38498.0, "45765": 40900.0, "45770": 39512.0, "45775": 38531.0, "45780": 40190.0, "45785": 41499.0, "45790": 39199.0, "45795": 40808.0, "45800": 40814.0, "45805": 40042.0, "45810": 41501.0, "45815": 38587.0, "45820": 41565.0, "45825": 41559.0, "45830": 39977.0, "45835": 40737.0, "45840": 40743.0, "45845": 41504.0, "45850": 40143.0, "45855": 40738.0, "45860": 40218.0, "45865": 39976.0, "45870": 40827.0, "45875": 40053.0, "45880": 40726.0, "45885": 37829.0, "45890": 40748.0, "45895": 21199.0, "45900": 40036.0, "45905": 40823.0, "45910": 40111.0, "45915": 39450.0, "45920": 39970.0, "45925": 40132.0, "45930": 39977.0, "45935": 39434.0, "45940": 41520.0, "45945": 38662.0, "45950": 40059.0, "45955": 38626.0, "45960": 40742.0, "45965": 40073.0, "45970": 40207.0, "45975": 40815.0, "45980": 37825.0, "45985": 40743.0, "45990": 40817.0, "45995": 39415.0, "46000": 37826.0, "46005": 39203.0, "46010": 39434.0, "46015": 41501.0, "46020": 40742.0, "46025": 37887.0, "46030": 40872.0, "46035": 36447.0, "46040": 39439.0, "46045": 38052.0, "46050": 40122.0, "46055": 40816.0, "46060": 38542.0, "46065": 40191.0, "46070": 41508.0, "46075": 39213.0, "46080": 40044.0, "46085": 37154.0, "46090": 38060.0, "46095": 40052.0, "46100": 41504.0, "46105": 39463.0, "46110": 41578.0, "46115": 40744.0, "46120": 40914.0, "46125": 40047.0, "46130": 38520.0, "46135": 40037.0, "46140": 39346.0, "46145": 40033.0, "46150": 40745.0, "46155": 42286.0, "46160": 40134.0, "46165": 40037.0, "46170": 38568.0, "46175": 40826.0, "46180": 41499.0, "46185": 41588.0, "46190": 40112.0, "46195": 39200.0, "46200": 42265.0, "46205": 40036.0, "46210": 39275.0, "46215": 38459.0, "46220": 39360.0, "46225": 40035.0, "46230": 41572.0, "46235": 39289.0, "46240": 40051.0, "46245": 36552.0, "46250": 42275.0, "46255": 40107.0, "46260": 37889.0, "46265": 36639.0, "46270": 40039.0, "46275": 38679.0, "46280": 39517.0, "46285": 39283.0, "46290": 39480.0, "46295": 40824.0, "46300": 38719.0, "46305": 38582.0, "46310": 40894.0, "46315": 38523.0, "46320": 41513.0, "46325": 40138.0, "46330": 39973.0, "46335": 39359.0, "46340": 42281.0, "46345": 39966.0, "46350": 39285.0, "46355": 40034.0, "46360": 40743.0, "46365": 39979.0, "46370": 39293.0, "46375": 42349.0, "46380": 37731.0, "46385": 38603.0, "46390": 39455.0, "46395": 39345.0, "46400": 40722.0, "46405": 37882.0, "46410": 40803.0, "46415": 40047.0, "46420": 39367.0, "46425": 38688.0, "46430": 36605.0, "46435": 39972.0, "46440": 41601.0, "46445": 40124.0, "46450": 40979.0, "46455": 39342.0, "46460": 38746.0, "46465": 39364.0, "46470": 40132.0, "46475": 40045.0, "46480": 40803.0, "46485": 38439.0, "46490": 38642.0, "46495": 41569.0, "46500": 40742.0, "46505": 38141.0, "46510": 39237.0, "46515": 41496.0, "46520": 37326.0, "46525": 41587.0, "46530": 40983.0, "46535": 40821.0, "46540": 40875.0, "46545": 39982.0, "46550": 39233.0, "46555": 40818.0, "46560": 40743.0, "46565": 41507.0, "46570": 40125.0, "46575": 41594.0, "46580": 40807.0, "46585": 39291.0, "46590": 40038.0, "46595": 40214.0, "46600": 39268.0, "46605": 40731.0, "46610": 39978.0, "46615": 39436.0, "46620": 40883.0, "46625": 39956.0, "46630": 40738.0, "46635": 41597.0, "46640": 40067.0, "46645": 41513.0, "46650": 39333.0, "46655": 41502.0, "46660": 39293.0, "46665": 40107.0, "46670": 38581.0, "46675": 40793.0, "46680": 37915.0, "46685": 41507.0, "46690": 39978.0, "46695": 40745.0, "46700": 39269.0, "46705": 40737.0, "46710": 40966.0, "46715": 38580.0, "46720": 40800.0, "46725": 39292.0, "46730": 38618.0, "46735": 40822.0, "46740": 39308.0, "46745": 40765.0, "46750": 43026.0, "46755": 40817.0, "46760": 40815.0, "46765": 40045.0, "46770": 39979.0, "46775": 40032.0, "46780": 38722.0, "46785": 40051.0, "46790": 42264.0, "46795": 39425.0, "46800": 42344.0, "46805": 39512.0, "46810": 39978.0, "46815": 40155.0, "46820": 38605.0, "46825": 38745.0, "46830": 38599.0, "46835": 40068.0, "46840": 40896.0, "46845": 37196.0, "46850": 41578.0, "46855": 41571.0, "46860": 39606.0, "46865": 41515.0, "46870": 38578.0, "46875": 40891.0, "46880": 40742.0, "46885": 41571.0, "46890": 39190.0, "46895": 39411.0, "46900": 40827.0, "46905": 39285.0, "46910": 39484.0, "46915": 40801.0, "46920": 40743.0, "46925": 40901.0, "46930": 37990.0, "46935": 42270.0, "46940": 41506.0, "46945": 38620.0, "46950": 40824.0, "46955": 41491.0, "46960": 40802.0, "46965": 39972.0, "46970": 41511.0, "46975": 39311.0, "46980": 40135.0, "46985": 41584.0, "46990": 41505.0, "46995": 39973.0, "47000": 39970.0, "47005": 37000.0, "47010": 40742.0, "47015": 40796.0, "47020": 38763.0, "47025": 38624.0, "47030": 38069.0, "47035": 39961.0, "47040": 40040.0, "47045": 41574.0, "47050": 39214.0, "47055": 40053.0, "47060": 41561.0, "47065": 40053.0, "47070": 40110.0, "47075": 40119.0, "47080": 41496.0, "47085": 40046.0, "47090": 39294.0, "47095": 41500.0, "47100": 39347.0, "47105": 40211.0, "47110": 41512.0, "47115": 40051.0, "47120": 41500.0, "47125": 40825.0, "47130": 40838.0, "47135": 40042.0, "47140": 39365.0, "47145": 40742.0, "47150": 38762.0, "47155": 40039.0, "47160": 40043.0, "47165": 36617.0, "47170": 40818.0, "47175": 41565.0, "47180": 40057.0, "47185": 39960.0, "47190": 40138.0, "47195": 39961.0, "47200": 37998.0, "47205": 40104.0, "47210": 39430.0, "47215": 39998.0, "47220": 38679.0, "47225": 39445.0, "47230": 40820.0, "47235": 40914.0, "47240": 40812.0, "47245": 39970.0, "47250": 40100.0, "47255": 41517.0, "47260": 40888.0, "47265": 39285.0, "47270": 40027.0, "47275": 40023.0, "47280": 40040.0, "47285": 38595.0, "47290": 40823.0, "47295": 40750.0, "47300": 40810.0, "47305": 41507.0, "47310": 42274.0, "47315": 38739.0, "47320": 40738.0, "47325": 40900.0, "47330": 40063.0, "47335": 40049.0, "47340": 39209.0, "47345": 39964.0, "47350": 41508.0, "47355": 41520.0, "47360": 38057.0, "47365": 40042.0, "47370": 40791.0, "47375": 40030.0, "47380": 39224.0, "47385": 37155.0, "47390": 40800.0, "47395": 40056.0, "47400": 40735.0, "47405": 40038.0, "47410": 40039.0, "47415": 40831.0, "47420": 39289.0, "47425": 40816.0, "47430": 40745.0, "47435": 39345.0, "47440": 39201.0, "47445": 40747.0, "47450": 41599.0, "47455": 40806.0, "47460": 40743.0, "47465": 40121.0, "47470": 40176.0, "47475": 40801.0, "47480": 40802.0, "47485": 37072.0, "47490": 40896.0, "47495": 38672.0, "47500": 42263.0, "47505": 40743.0, "47510": 40742.0, "47515": 41497.0, "47520": 37115.0, "47525": 40835.0, "47530": 40748.0, "47535": 40735.0, "47540": 40746.0, "47545": 39450.0, "47550": 40801.0, "47555": 40739.0, "47560": 40046.0, "47565": 39292.0, "47570": 42276.0, "47575": 40740.0, "47580": 40864.0, "47585": 38669.0, "47590": 37064.0, "47595": 39962.0, "47600": 40891.0, "47605": 40132.0, "47610": 41515.0, "47615": 38841.0, "47620": 40803.0, "47625": 39345.0, "47630": 39980.0, "47635": 41504.0, "47640": 39501.0, "47645": 40205.0, "47650": 40806.0, "47655": 39981.0, "47660": 40757.0, "47665": 41513.0, "47670": 40822.0, "47675": 39210.0, "47680": 41504.0, "47685": 40807.0, "47690": 39968.0, "47695": 40807.0, "47700": 39966.0, "47705": 39973.0, "47710": 38451.0, "47715": 39294.0, "47720": 39294.0, "47725": 39444.0, "47730": 40046.0, "47735": 41513.0, "47740": 41567.0, "47745": 42273.0, "47750": 39362.0, "47755": 41564.0, "47760": 40832.0, "47765": 40834.0, "47770": 39264.0, "47775": 40802.0, "47780": 39266.0, "47785": 40043.0, "47790": 38051.0, "47795": 41654.0, "47800": 41506.0, "47805": 40745.0, "47810": 39568.0, "47815": 38588.0, "47820": 38746.0, "47825": 37982.0, "47830": 40739.0, "47835": 39373.0, "47840": 38453.0, "47845": 40048.0, "47850": 42269.0, "47855": 38512.0, "47860": 39976.0, "47865": 40033.0, "47870": 40814.0, "47875": 37891.0, "47880": 37309.0, "47885": 40050.0, "47890": 37892.0, "47895": 41490.0, "47900": 39492.0, "47905": 37301.0, "47910": 40040.0, "47915": 42274.0, "47920": 39975.0, "47925": 39359.0, "47930": 41585.0, "47935": 37812.0, "47940": 39436.0, "47945": 40033.0, "47950": 38515.0, "47955": 40037.0, "47960": 40120.0, "47965": 40738.0, "47970": 40110.0, "47975": 40740.0, "47980": 40128.0, "47985": 39487.0, "47990": 38610.0, "47995": 40054.0, "48000": 40145.0, "48005": 40755.0, "48010": 35005.0, "48015": 39972.0, "48020": 37910.0, "48025": 40119.0, "48030": 39353.0, "48035": 40830.0, "48040": 40745.0, "48045": 39274.0, "48050": 39344.0, "48055": 40811.0, "48060": 39198.0, "48065": 40731.0, "48070": 42347.0, "48075": 40127.0, "48080": 40735.0, "48085": 40039.0, "48090": 39983.0, "48095": 40050.0, "48100": 40804.0, "48105": 40816.0, "48110": 40806.0, "48115": 40807.0, "48120": 40812.0, "48125": 40818.0, "48130": 42265.0, "48135": 40188.0, "48140": 40043.0, "48145": 37766.0, "48150": 37332.0, "48155": 40726.0, "48160": 38737.0, "48165": 39292.0, "48170": 38654.0, "48175": 39975.0, "48180": 39966.0, "48185": 40866.0, "48190": 40053.0, "48195": 39281.0, "48200": 40058.0, "48205": 40892.0, "48210": 39276.0, "48215": 39323.0, "48220": 40880.0, "48225": 37900.0, "48230": 39426.0, "48235": 40055.0, "48240": 40890.0, "48245": 41499.0, "48250": 40730.0, "48255": 38473.0, "48260": 41569.0, "48265": 41505.0, "48270": 41514.0, "48275": 39285.0, "48280": 40151.0, "48285": 40743.0, "48290": 41509.0, "48295": 40729.0, "48300": 37973.0, "48305": 40832.0, "48310": 38454.0, "48315": 39224.0, "48320": 37357.0, "48325": 38582.0, "48330": 38585.0, "48335": 41584.0, "48340": 39980.0, "48345": 39352.0, "48350": 40759.0, "48355": 40112.0, "48360": 41579.0, "48365": 39441.0, "48370": 39448.0, "48375": 39336.0, "48380": 38075.0, "48385": 40067.0, "48390": 38517.0, "48395": 40046.0, "48400": 41507.0, "48405": 37227.0, "48410": 39342.0, "48415": 40797.0, "48420": 40796.0, "48425": 39366.0, "48430": 40129.0, "48435": 40816.0, "48440": 41577.0, "48445": 40098.0, "48450": 38656.0, "48455": 39317.0, "48460": 40031.0, "48465": 40034.0, "48470": 37051.0, "48475": 40125.0, "48480": 41502.0, "48485": 39998.0, "48490": 39338.0, "48495": 39299.0, "48500": 39415.0, "48505": 39960.0, "48510": 40114.0, "48515": 36459.0, "48520": 39338.0, "48525": 40831.0, "48530": 35586.0, "48535": 38763.0, "48540": 40750.0, "48545": 40821.0, "48550": 38618.0, "48555": 40052.0, "48560": 40203.0, "48565": 40822.0, "48570": 39355.0, "48575": 41510.0, "48580": 40801.0, "48585": 39354.0, "48590": 41509.0, "48595": 38560.0, "48600": 38591.0, "48605": 39455.0, "48610": 40807.0, "48615": 38050.0, "48620": 41567.0, "48625": 41573.0, "48630": 39969.0, "48635": 40805.0, "48640": 41571.0, "48645": 38765.0, "48650": 38610.0, "48655": 40751.0, "48660": 39211.0, "48665": 40792.0, "48670": 40038.0, "48675": 39442.0, "48680": 40816.0, "48685": 40807.0, "48690": 40742.0, "48695": 38543.0, "48700": 40804.0, "48705": 40045.0, "48710": 41497.0, "48715": 42270.0, "48720": 39210.0, "48725": 40796.0, "48730": 38533.0, "48735": 40837.0, "48740": 39968.0, "48745": 38463.0, "48750": 40819.0, "48755": 40031.0, "48760": 40821.0, "48765": 40832.0, "48770": 39994.0, "48775": 41592.0, "48780": 40805.0, "48785": 40873.0, "48790": 37990.0, "48795": 41579.0, "48800": 40106.0, "48805": 37170.0, "48810": 38048.0, "48815": 38680.0, "48820": 39279.0, "48825": 37838.0, "48830": 39280.0, "48835": 41508.0, "48840": 40055.0, "48845": 38059.0, "48850": 37827.0, "48855": 40110.0, "48860": 40823.0, "48865": 40809.0, "48870": 40787.0, "48875": 41567.0, "48880": 39292.0, "48885": 40119.0, "48890": 40747.0, "48895": 39372.0, "48900": 40121.0, "48905": 41593.0, "48910": 38746.0, "48915": 41581.0, "48920": 41504.0, "48925": 41659.0, "48930": 40889.0, "48935": 41519.0, "48940": 39524.0, "48945": 39971.0, "48950": 40193.0, "48955": 37895.0, "48960": 40807.0, "48965": 39287.0, "48970": 39983.0, "48975": 39286.0, "48980": 39973.0, "48985": 40117.0, "48990": 39382.0, "48995": 41554.0, "49000": 38601.0, "49005": 39571.0, "49010": 41509.0, "49015": 40894.0, "49020": 40117.0, "49025": 39986.0, "49030": 38696.0, "49035": 39385.0, "49040": 36501.0, "49045": 40049.0, "49050": 40041.0, "49055": 38573.0, "49060": 39280.0, "49065": 39282.0, "49070": 40964.0, "49075": 40801.0, "49080": 36919.0, "49085": 39382.0, "49090": 40112.0, "49095": 40816.0, "49100": 40115.0, "49105": 39350.0, "49110": 40735.0, "49115": 41499.0, "49120": 41514.0, "49125": 40878.0, "49130": 40742.0, "49135": 41568.0, "49140": 40739.0, "49145": 41501.0, "49150": 40818.0, "49155": 41496.0, "49160": 38432.0, "49165": 38755.0, "49170": 40068.0, "49175": 40730.0, "49180": 38522.0, "49185": 37195.0, "49190": 41510.0, "49195": 38012.0, "49200": 40887.0, "49205": 39286.0, "49210": 40742.0, "49215": 40058.0, "49220": 40031.0, "49225": 40741.0, "49230": 40813.0, "49235": 40741.0, "49240": 40056.0, "49245": 41494.0, "49250": 40804.0, "49255": 38585.0, "49260": 39284.0, "49265": 36537.0, "49270": 40739.0, "49275": 40046.0, "49280": 40746.0, "49285": 40126.0, "49290": 40044.0, "49295": 37901.0, "49300": 40043.0, "49305": 39982.0, "49310": 41514.0, "49315": 40129.0, "49320": 41506.0, "49325": 38612.0, "49330": 41497.0, "49335": 38530.0, "49340": 39368.0, "49345": 40814.0, "49350": 40063.0, "49355": 38743.0, "49360": 39357.0, "49365": 40812.0, "49370": 40750.0, "49375": 40736.0, "49380": 40049.0, "49385": 39339.0, "49390": 38776.0, "49395": 40130.0, "49400": 42264.0, "49405": 38581.0, "49410": 37902.0, "49415": 40051.0, "49420": 41507.0, "49425": 40052.0, "49430": 41508.0, "49435": 40801.0, "49440": 40118.0, "49445": 39283.0, "49450": 39994.0, "49455": 41506.0, "49460": 39350.0, "49465": 42333.0, "49470": 40135.0, "49475": 41515.0, "49480": 38670.0, "49485": 40054.0, "49490": 40889.0, "49495": 40809.0, "49500": 40882.0, "49505": 40110.0, "49510": 40889.0, "49515": 41503.0, "49520": 41498.0, "49525": 38609.0, "49530": 40063.0, "49535": 40727.0, "49540": 40811.0, "49545": 39372.0, "49550": 40731.0, "49555": 40814.0, "49560": 39968.0, "49565": 37957.0, "49570": 37876.0, "49575": 39427.0, "49580": 38625.0, "49585": 41603.0, "49590": 39981.0, "49595": 40821.0, "49600": 40812.0, "49605": 40069.0, "49610": 38729.0, "49615": 38674.0, "49620": 40803.0, "49625": 38585.0, "49630": 41578.0, "49635": 40872.0, "49640": 38757.0, "49645": 38500.0, "49650": 42283.0, "49655": 39988.0, "49660": 43035.0, "49665": 40819.0, "49670": 40733.0, "49675": 37820.0, "49680": 39969.0, "49685": 40745.0, "49690": 39343.0, "49695": 40043.0, "49700": 39412.0, "49705": 36518.0, "49710": 39372.0, "49715": 40742.0, "49720": 40734.0, "49725": 40735.0, "49730": 41508.0, "49735": 40803.0, "49740": 41563.0, "49745": 38051.0, "49750": 37755.0, "49755": 39978.0, "49760": 41570.0, "49765": 40050.0, "49770": 38497.0, "49775": 37810.0, "49780": 40040.0, "49785": 42264.0, "49790": 39328.0, "49795": 39285.0, "49800": 40740.0, "49805": 40879.0, "49810": 39281.0, "49815": 40807.0, "49820": 36537.0, "49825": 39387.0, "49830": 39373.0, "49835": 40746.0, "49840": 39340.0, "49845": 40200.0, "49850": 39345.0, "49855": 40054.0, "49860": 41588.0, "49865": 38819.0, "49870": 36512.0, "49875": 40805.0, "49880": 41638.0, "49885": 40726.0, "49890": 40814.0, "49895": 40828.0, "49900": 40814.0, "49905": 40879.0, "49910": 40814.0, "49915": 40900.0, "49920": 40814.0, "49925": 40826.0, "49930": 42274.0, "49935": 41504.0, "49940": 40728.0, "49945": 40212.0, "49950": 40799.0, "49955": 40062.0, "49960": 40890.0, "49965": 40729.0, "49970": 38433.0, "49975": 39362.0, "49980": 39980.0, "49985": 41511.0, "49990": 39260.0, "49995": 40103.0, "50000": 40115.0, "50005": 37757.0, "50010": 38463.0, "50015": 37757.0, "50020": 40810.0, "50025": 35238.0, "50030": 41499.0, "50035": 40735.0, "50040": 39289.0, "50045": 39207.0, "50050": 40813.0, "50055": 40117.0, "50060": 39350.0, "50065": 40109.0, "50070": 40749.0, "50075": 38673.0, "50080": 41504.0, "50085": 38586.0, "50090": 40739.0, "50095": 38027.0, "50100": 37906.0, "50105": 40817.0, "50110": 38665.0, "50115": 38670.0, "50120": 37074.0, "50125": 38576.0, "50130": 39352.0, "50135": 39270.0, "50140": 39420.0, "50145": 40744.0, "50150": 37219.0, "50155": 40822.0, "50160": 40043.0, "50165": 39199.0, "50170": 40738.0, "50175": 37901.0, "50180": 40050.0, "50185": 40805.0, "50190": 37318.0, "50195": 39293.0, "50200": 40038.0, "50205": 38660.0, "50210": 39265.0, "50215": 40801.0, "50220": 39445.0, "50225": 39975.0, "50230": 40731.0, "50235": 40135.0, "50240": 39968.0, "50245": 42263.0, "50250": 37294.0, "50255": 40811.0, "50260": 40149.0, "50265": 40135.0, "50270": 36985.0, "50275": 40743.0, "50280": 41497.0, "50285": 40744.0, "50290": 40120.0, "50295": 41577.0, "50300": 40124.0, "50305": 40808.0, "50310": 38779.0, "50315": 40097.0, "50320": 40890.0, "50325": 38730.0, "50330": 39366.0, "50335": 38068.0, "50340": 40037.0, "50345": 39301.0, "50350": 39980.0, "50355": 40749.0, "50360": 40037.0, "50365": 40724.0, "50370": 39351.0, "50375": 40052.0, "50380": 39979.0, "50385": 40809.0, "50390": 39271.0, "50395": 40805.0, "50400": 37824.0, "50405": 40879.0, "50410": 36495.0, "50415": 40032.0, "50420": 40118.0, "50425": 38626.0, "50430": 39286.0, "50435": 39391.0, "50440": 41499.0, "50445": 40807.0, "50450": 41503.0, "50455": 39278.0, "50460": 41592.0, "50465": 38658.0, "50470": 41597.0, "50475": 40821.0, "50480": 39280.0, "50485": 38047.0, "50490": 40052.0, "50495": 39348.0, "50500": 40117.0, "50505": 41515.0, "50510": 40118.0, "50515": 40196.0, "50520": 42274.0, "50525": 39266.0, "50530": 38573.0, "50535": 38607.0, "50540": 39340.0, "50545": 40746.0, "50550": 39416.0, "50555": 40048.0, "50560": 40057.0, "50565": 42262.0, "50570": 37074.0, "50575": 40126.0, "50580": 39282.0, "50585": 40746.0, "50590": 39218.0, "50595": 40793.0, "50600": 41516.0, "50605": 41511.0, "50610": 40048.0, "50615": 40823.0, "50620": 39332.0, "50625": 40882.0, "50630": 38663.0, "50635": 40733.0, "50640": 41575.0, "50645": 40024.0, "50650": 40040.0, "50655": 40821.0, "50660": 40820.0, "50665": 40737.0, "50670": 40830.0, "50675": 40752.0, "50680": 40113.0, "50685": 39210.0, "50690": 37074.0, "50695": 40797.0, "50700": 40734.0, "50705": 41497.0, "50710": 39290.0, "50715": 38577.0, "50720": 38762.0, "50725": 39346.0, "50730": 37037.0, "50735": 36437.0, "50740": 40906.0, "50745": 40730.0, "50750": 39220.0, "50755": 39961.0, "50760": 39266.0, "50765": 41489.0, "50770": 38602.0, "50775": 37988.0, "50780": 40053.0, "50785": 39230.0, "50790": 41498.0, "50795": 40819.0, "50800": 38595.0, "50805": 38499.0, "50810": 40039.0, "50815": 40804.0, "50820": 42277.0, "50825": 39364.0, "50830": 40819.0, "50835": 42276.0, "50840": 41505.0, "50845": 36666.0, "50850": 40126.0, "50855": 39285.0, "50860": 39961.0}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": 1117047808.0, "5": 1117048320.0, "10": 1117048320.0, "15": 1117048320.0, "20": 1117048320.0, "25": 1117048320.0, "30": 1117048320.0, "35": 1117048320.0, "40": 1117048320.0, "45": 1117048320.0, "50": 1117048320.0, "55": 1117048320.0, "60": 1117048320.0, "65": 1117048320.0, "70": 1117048320.0, "75": 1117048320.0, "80": 1117048320.0, "85": 1117048320.0, "90": 1117048320.0, "95": 1117048320.0, "100": 1117048320.0, "105": 1117048320.0, "110": 1117048320.0, "115": 1117048320.0, "120": 1117048320.0, "125": 1117048320.0, "130": 1117048320.0, "135": 1117048320.0, "140": 1117048320.0, "145": 1117048320.0, "150": 1117048320.0, "155": 1117048320.0, "160": 1117048320.0, "165": 1117048320.0, "170": 1117048320.0, "175": 1117048320.0, "180": 1117048320.0, "185": 1117048320.0, "190": 1117048320.0, "195": 1117048320.0, "200": 1117048320.0, "205": 1117048320.0, "210": 1117048320.0, "215": 1117048320.0, "220": 1117048320.0, "225": 1117048320.0, "230": 1117048320.0, "235": 1117048320.0, "240": 1117048320.0, "245": 1117048320.0, "250": 1117048320.0, "255": 1117048320.0, "260": 1117048320.0, "265": 1117048320.0, "270": 1117048320.0, "275": 1117048320.0, "280": 1117048320.0, "285": 1117048320.0, "290": 1117048320.0, "295": 1117048320.0, "300": 1117048320.0, "305": 1117048320.0, "310": 1117048320.0, "315": 1117048320.0, "320": 1117048320.0, "325": 1117048320.0, "330": 1117048320.0, "335": 1117048320.0, "340": 1117048320.0, "345": 1117048320.0, "350": 1117048320.0, "355": 1117048320.0, "360": 1117048320.0, "365": 1117048320.0, "370": 1117048320.0, "375": 1117048320.0, "380": 1117048320.0, "385": 1117048320.0, "390": 1117048320.0, "395": 1117048320.0, "400": 1117048320.0, "405": 1117048320.0, "410": 1117048320.0, "415": 1117048320.0, "420": 1117048320.0, "425": 1117048320.0, "430": 1117048320.0, "435": 1117048320.0, "440": 1117048320.0, "445": 1117048320.0, "450": 1117048320.0, "455": 1117048320.0, "460": 1117048320.0, "465": 1117048320.0, "470": 1117048320.0, "475": 1117048320.0, "480": 1117048320.0, "485": 1117048320.0, "490": 1117048320.0, "495": 1117048320.0, "500": 1117048320.0, "505": 1117048320.0, "510": 1117048320.0, "515": 1117048320.0, "520": 1117048320.0, "525": 1117048320.0, "530": 1117048320.0, "535": 1117048320.0, "540": 1117048320.0, "545": 1117048320.0, "550": 1117048320.0, "555": 1117048320.0, "560": 1117048320.0, "565": 1117048320.0, "570": 1117048320.0, "575": 1117048320.0, "580": 1117048320.0, "585": 1117048320.0, "590": 1117048320.0, "595": 1117048320.0, "600": 1117048320.0, "605": 1117048320.0, "610": 1117048320.0, "615": 1117048320.0, "620": 1117048320.0, "625": 1117048320.0, "630": 1117048320.0, "635": 1117048320.0, "640": 1117048320.0, "645": 1117048320.0, "650": 1117048320.0, "655": 1117048320.0, "660": 1117048320.0, "665": 1117048320.0, "670": 1117048320.0, "675": 1117048320.0, "680": 1117048320.0, "685": 1117048320.0, "690": 1117048320.0, "695": 1117048320.0, "700": 1117048320.0, "705": 1117048320.0, "710": 1117048320.0, "715": 1117048320.0, "720": 1117048320.0, "725": 1117048320.0, "730": 1117048320.0, "735": 1117048320.0, "740": 1117048320.0, "745": 1117048320.0, "750": 1117048320.0, "755": 1117048320.0, "760": 1117048320.0, "765": 1117048320.0, "770": 1117048320.0, "775": 1117048320.0, "780": 1117048320.0, "785": 1117048320.0, "790": 1117048320.0, "795": 1117048320.0, "800": 1117048320.0, "805": 1117048320.0, "810": 1117048320.0, "815": 1117048320.0, "820": 1117048320.0, "825": 1117048320.0, "830": 1117048320.0, "835": 1117048320.0, "840": 1117048320.0, "845": 1117048320.0, "850": 1117048320.0, "855": 1117048320.0, "860": 1117048320.0, "865": 1117048320.0, "870": 1117048320.0, "875": 1117048320.0, "880": 1117048320.0, "885": 1117048320.0, "890": 1117048320.0, "895": 1117048320.0, "900": 1117048320.0, "905": 1117048320.0, "910": 1117048320.0, "915": 1117048320.0, "920": 1117048320.0, "925": 1117048320.0, "930": 1117048320.0, "935": 1117048320.0, "940": 1117048320.0, "945": 1117048320.0, "950": 1117048320.0, "955": 1117048320.0, "960": 1117048320.0, "965": 1117048320.0, "970": 1117048320.0, "975": 1117048320.0, "980": 1117048320.0, "985": 1117048320.0, "990": 1117048320.0, "995": 1117048320.0, "1000": 1117048320.0, "1005": 1117048320.0, "1010": 1117048320.0, "1015": 1117048320.0, "1020": 1117048320.0, "1025": 1117048320.0, "1030": 1117048320.0, "1035": 1117048320.0, "1040": 1117048320.0, "1045": 1117048320.0, "1050": 1117048320.0, "1055": 1117048320.0, "1060": 1117048320.0, "1065": 1117048320.0, "1070": 1117048320.0, "1075": 1117048320.0, "1080": 1117048320.0, "1085": 1117048320.0, "1090": 1117048320.0, "1095": 1117048320.0, "1100": 1117048320.0, "1105": 1117048320.0, "1110": 1117048320.0, "1115": 1117048320.0, "1120": 1117048320.0, "1125": 1117048320.0, "1130": 1117048320.0, "1135": 1117048320.0, "1140": 1117048320.0, "1145": 1117048320.0, "1150": 1117048320.0, "1155": 1117048320.0, "1160": 1117048320.0, "1165": 1117048320.0, "1170": 1117048320.0, "1175": 1117048320.0, "1180": 1117048320.0, "1185": 1117048320.0, "1190": 1117048320.0, "1195": 1117048320.0, "1200": 1117048320.0, "1205": 1117048320.0, "1210": 1117048320.0, "1215": 1117048320.0, "1220": 1117048320.0, "1225": 1117048320.0, "1230": 1117048320.0, "1235": 1117048320.0, "1240": 1117048320.0, "1245": 1117048320.0, "1250": 1117048320.0, "1255": 1117048320.0, "1260": 1117048320.0, "1265": 1117048320.0, "1270": 1117048320.0, "1275": 1117048320.0, "1280": 1117048320.0, "1285": 1117048320.0, "1290": 1117048320.0, "1295": 1117048320.0, "1300": 1117048320.0, "1305": 1117048320.0, "1310": 1117048320.0, "1315": 1117048320.0, "1320": 1117048320.0, "1325": 1117048320.0, "1330": 1117048320.0, "1335": 1117048320.0, "1340": 1117048320.0, "1345": 1117048320.0, "1350": 1117048320.0, "1355": 1117048320.0, "1360": 1117048320.0, "1365": 1117048320.0, "1370": 1117048320.0, "1375": 1117048320.0, "1380": 1117048320.0, "1385": 1117048320.0, "1390": 1117048320.0, "1395": 1117048320.0, "1400": 1117048320.0, "1405": 1117048320.0, "1410": 1117048320.0, "1415": 1117048320.0, "1420": 1117048320.0, "1425": 1117048320.0, "1430": 1117048320.0, "1435": 1117048320.0, "1440": 1117048320.0, "1445": 1117048320.0, "1450": 1117048320.0, "1455": 1117048320.0, "1460": 1117048320.0, "1465": 1117048320.0, "1470": 1117048320.0, "1475": 1117048320.0, "1480": 1117048320.0, "1485": 1117048320.0, "1490": 1117048320.0, "1495": 1117048320.0, "1500": 1117048320.0, "1505": 1117048320.0, "1510": 1117048320.0, "1515": 1117048320.0, "1520": 1117048320.0, "1525": 1117048320.0, "1530": 1117048320.0, "1535": 1117048320.0, "1540": 1117048320.0, "1545": 1117048320.0, "1550": 1117048320.0, "1555": 1117048320.0, "1560": 1117048320.0, "1565": 1117048320.0, "1570": 1117048320.0, "1575": 1117048320.0, "1580": 1117048320.0, "1585": 1117048320.0, "1590": 1117048320.0, "1595": 1117048320.0, "1600": 1117048320.0, "1605": 1117048320.0, "1610": 1117048320.0, "1615": 1117048320.0, "1620": 1117048320.0, "1625": 1117048320.0, "1630": 1117048320.0, "1635": 1117048320.0, "1640": 1117048320.0, "1645": 1117048320.0, "1650": 1117048320.0, "1655": 1117048320.0, "1660": 1117048320.0, "1665": 1117048320.0, "1670": 1117048320.0, "1675": 1117048320.0, "1680": 1117048320.0, "1685": 1117048320.0, "1690": 1117048320.0, "1695": 1117048320.0, "1700": 1117048320.0, "1705": 1117048320.0, "1710": 1117048320.0, "1715": 1117048320.0, "1720": 1117048320.0, "1725": 1117048320.0, "1730": 1117048320.0, "1735": 1117048320.0, "1740": 1117048320.0, "1745": 1117048320.0, "1750": 1117048320.0, "1755": 1117048320.0, "1760": 1117048320.0, "1765": 1117048320.0, "1770": 1117048320.0, "1775": 1117048320.0, "1780": 1117048320.0, "1785": 1117048320.0, "1790": 1117048320.0, "1795": 1117048320.0, "1800": 1117048320.0, "1805": 1117048320.0, "1810": 1117048320.0, "1815": 1117048320.0, "1820": 1117048320.0, "1825": 1117048320.0, "1830": 1117048320.0, "1835": 1117048320.0, "1840": 1117048320.0, "1845": 1117048320.0, "1850": 1117048320.0, "1855": 1117048320.0, "1860": 1117048320.0, "1865": 1117048320.0, "1870": 1117048320.0, "1875": 1117048320.0, "1880": 1117048320.0, "1885": 1117048320.0, "1890": 1117048320.0, "1895": 1117048320.0, "1900": 1117048320.0, "1905": 1117048320.0, "1910": 1117048320.0, "1915": 1117048320.0, "1920": 1117048320.0, "1925": 1117048320.0, "1930": 1117048320.0, "1935": 1117048320.0, "1940": 1117048320.0, "1945": 1117048320.0, "1950": 1117048320.0, "1955": 1117048320.0, "1960": 1117048320.0, "1965": 1117048320.0, "1970": 1117048320.0, "1975": 1117048320.0, "1980": 1117048320.0, "1985": 1117048320.0, "1990": 1117048320.0, "1995": 1117048320.0, "2000": 1117048320.0, "2005": 1117048320.0, "2010": 1117048320.0, "2015": 1117048320.0, "2020": 1117048320.0, "2025": 1117048320.0, "2030": 1117048320.0, "2035": 1117048320.0, "2040": 1117048320.0, "2045": 1117048320.0, "2050": 1117048320.0, "2055": 1117048320.0, "2060": 1117048320.0, "2065": 1117048320.0, "2070": 1117048320.0, "2075": 1117048320.0, "2080": 1117048320.0, "2085": 1117048320.0, "2090": 1117048320.0, "2095": 1117048320.0, "2100": 1117048320.0, "2105": 1117048320.0, "2110": 1117048320.0, "2115": 1117048320.0, "2120": 1117048320.0, "2125": 1117048320.0, "2130": 1117048320.0, "2135": 1117048320.0, "2140": 1117048320.0, "2145": 1117048320.0, "2150": 1117048320.0, "2155": 1117048320.0, "2160": 1117048320.0, "2165": 1117048320.0, "2170": 1117048320.0, "2175": 1117048320.0, "2180": 1117048320.0, "2185": 1117048320.0, "2190": 1117048320.0, "2195": 1117048320.0, "2200": 1117048320.0, "2205": 1117048320.0, "2210": 1117048320.0, "2215": 1117048320.0, "2220": 1117048320.0, "2225": 1117048320.0, "2230": 1117048320.0, "2235": 1117048320.0, "2240": 1117048320.0, "2245": 1117048320.0, "2250": 1117048320.0, "2255": 1117048320.0, "2260": 1117048320.0, "2265": 1117048320.0, "2270": 1117048320.0, "2275": 1117048320.0, "2280": 1117048320.0, "2285": 1117048320.0, "2290": 1117048320.0, "2295": 1117048320.0, "2300": 1117048320.0, "2305": 1117048320.0, "2310": 1117048320.0, "2315": 1117048320.0, "2320": 1117048320.0, "2325": 1117048320.0, "2330": 1117048320.0, "2335": 1117048320.0, "2340": 1117048320.0, "2345": 1117048320.0, "2350": 1117048320.0, "2355": 1117048320.0, "2360": 1117048320.0, "2365": 1117048320.0, "2370": 1117048320.0, "2375": 1117048320.0, "2380": 1117048320.0, "2385": 1117048320.0, "2390": 1117048320.0, "2395": 1117048320.0, "2400": 1117048320.0, "2405": 1117048320.0, "2410": 1117048320.0, "2415": 1117048320.0, "2420": 1117048320.0, "2425": 1117048320.0, "2430": 1117048320.0, "2435": 1117048320.0, "2440": 1117048320.0, "2445": 1117048320.0, "2450": 1117048320.0, "2455": 1117048320.0, "2460": 1117048320.0, "2465": 1117048320.0, "2470": 1117048320.0, "2475": 1117048320.0, "2480": 1117048320.0, "2485": 1117048320.0, "2490": 1117048320.0, "2495": 1117048320.0, "2500": 1117048320.0, "2505": 1117048320.0, "2510": 1117048320.0, "2515": 1117048320.0, "2520": 1117048320.0, "2525": 1117048320.0, "2530": 1117048320.0, "2535": 1117048320.0, "2540": 1117048320.0, "2545": 1117048320.0, "2550": 1117048320.0, "2555": 1117048320.0, "2560": 1117048320.0, "2565": 1117048320.0, "2570": 1117048320.0, "2575": 1117048320.0, "2580": 1117048320.0, "2585": 1117048320.0, "2590": 1117048320.0, "2595": 1117048320.0, "2600": 1117048320.0, "2605": 1117048320.0, "2610": 1117048320.0, "2615": 1117048320.0, "2620": 1117048320.0, "2625": 1117048320.0, "2630": 1117048320.0, "2635": 1117048320.0, "2640": 1117048320.0, "2645": 1117048320.0, "2650": 1117048320.0, "2655": 1117048320.0, "2660": 1117048320.0, "2665": 1117048320.0, "2670": 1117048320.0, "2675": 1117048320.0, "2680": 1117048320.0, "2685": 1117048320.0, "2690": 1117048320.0, "2695": 1117048320.0, "2700": 1117048320.0, "2705": 1117048320.0, "2710": 1117048320.0, "2715": 1117048320.0, "2720": 1117048320.0, "2725": 1117048320.0, "2730": 1117048320.0, "2735": 1117048320.0, "2740": 1117048320.0, "2745": 1117048320.0, "2750": 1117048320.0, "2755": 1117048320.0, "2760": 1117048320.0, "2765": 1117048320.0, "2770": 1117048320.0, "2775": 1117048320.0, "2780": 1117048320.0, "2785": 1117048320.0, "2790": 1117048320.0, "2795": 1117048320.0, "2800": 1117048320.0, "2805": 1117048320.0, "2810": 1117048320.0, "2815": 1117048320.0, "2820": 1117048320.0, "2825": 1117048320.0, "2830": 1117048320.0, "2835": 1117048320.0, "2840": 1117048320.0, "2845": 1117048320.0, "2850": 1117048320.0, "2855": 1117048320.0, "2860": 1117048320.0, "2865": 1117048320.0, "2870": 1117048320.0, "2875": 1117048320.0, "2880": 1117048320.0, "2885": 1117048320.0, "2890": 1117048320.0, "2895": 1117048320.0, "2900": 1117048320.0, "2905": 1117048320.0, "2910": 1117048320.0, "2915": 1117048320.0, "2920": 1117048320.0, "2925": 1117048320.0, "2930": 1117048320.0, "2935": 1117048320.0, "2940": 1117048320.0, "2945": 1117048320.0, "2950": 1117048320.0, "2955": 1117048320.0, "2960": 1117048320.0, "2965": 1117048320.0, "2970": 1117048320.0, "2975": 1117048320.0, "2980": 1117048320.0, "2985": 1117048320.0, "2990": 1117048320.0, "2995": 1117048320.0, "3000": 1117048320.0, "3005": 1117048320.0, "3010": 1117048320.0, "3015": 1117048320.0, "3020": 1117048320.0, "3025": 1117048320.0, "3030": 1117048320.0, "3035": 1117048320.0, "3040": 1117048320.0, "3045": 1117048320.0, "3050": 1117048320.0, "3055": 1117048320.0, "3060": 1117048320.0, "3065": 1117048320.0, "3070": 1117048320.0, "3075": 1117048320.0, "3080": 1117048320.0, "3085": 1117048320.0, "3090": 1117048320.0, "3095": 1117048320.0, "3100": 1117048320.0, "3105": 1117048320.0, "3110": 1117048320.0, "3115": 1117048320.0, "3120": 1117048320.0, "3125": 1117048320.0, "3130": 1117048320.0, "3135": 1117048320.0, "3140": 1117048320.0, "3145": 1117048320.0, "3150": 1117048320.0, "3155": 1117048320.0, "3160": 1117048320.0, "3165": 1117048320.0, "3170": 1117048320.0, "3175": 1117048320.0, "3180": 1117048320.0, "3185": 1117048320.0, "3190": 1117048320.0, "3195": 1117048320.0, "3200": 1117048320.0, "3205": 1117048320.0, "3210": 1117048320.0, "3215": 1117048320.0, "3220": 1117048320.0, "3225": 1117048320.0, "3230": 1117048320.0, "3235": 1117048320.0, "3240": 1117048320.0, "3245": 1117048320.0, "3250": 1117048320.0, "3255": 1117048320.0, "3260": 1117048320.0, "3265": 1117048320.0, "3270": 1117048320.0, "3275": 1117048320.0, "3280": 1117048320.0, "3285": 1117048320.0, "3290": 1117048320.0, "3295": 1117048320.0, "3300": 1117048320.0, "3305": 1117048320.0, "3310": 1117048320.0, "3315": 1117048320.0, "3320": 1117048320.0, "3325": 1117048320.0, "3330": 1117048320.0, "3335": 1117048320.0, "3340": 1117048320.0, "3345": 1117048320.0, "3350": 1117048320.0, "3355": 1117048320.0, "3360": 1117048320.0, "3365": 1117048320.0, "3370": 1117048320.0, "3375": 1117048320.0, "3380": 1117048320.0, "3385": 1117048320.0, "3390": 1117048320.0, "3395": 1117048320.0, "3400": 1117048320.0, "3405": 1117048320.0, "3410": 1117048320.0, "3415": 1117048320.0, "3420": 1117048320.0, "3425": 1117048320.0, "3430": 1117048320.0, "3435": 1117048320.0, "3440": 1117048320.0, "3445": 1117048320.0, "3450": 1117048320.0, "3455": 1117048320.0, "3460": 1117048320.0, "3465": 1117048320.0, "3470": 1117048320.0, "3475": 1117048320.0, "3480": 1117048320.0, "3485": 1117048320.0, "3490": 1117048320.0, "3495": 1117048320.0, "3500": 1117048320.0, "3505": 1117048320.0, "3510": 1117048320.0, "3515": 1117048320.0, "3520": 1117048320.0, "3525": 1117048320.0, "3530": 1117048320.0, "3535": 1117048320.0, "3540": 1117048320.0, "3545": 1117048320.0, "3550": 1117048320.0, "3555": 1117048320.0, "3560": 1117048320.0, "3565": 1117048320.0, "3570": 1117048320.0, "3575": 1117048320.0, "3580": 1117048320.0, "3585": 1117048320.0, "3590": 1117048320.0, "3595": 1117048320.0, "3600": 1117048320.0, "3605": 1117048320.0, "3610": 1117048320.0, "3615": 1117048320.0, "3620": 1117048320.0, "3625": 1117048320.0, "3630": 1117048320.0, "3635": 1117048320.0, "3640": 1117048320.0, "3645": 1117048320.0, "3650": 1117048320.0, "3655": 1117048320.0, "3660": 1117048320.0, "3665": 1117048320.0, "3670": 1117048320.0, "3675": 1117048320.0, "3680": 1117048320.0, "3685": 1117048320.0, "3690": 1117048320.0, "3695": 1117048320.0, "3700": 1117048320.0, "3705": 1117048320.0, "3710": 1117048320.0, "3715": 1117048320.0, "3720": 1117048320.0, "3725": 1117048320.0, "3730": 1117048320.0, "3735": 1117048320.0, "3740": 1117048320.0, "3745": 1117048320.0, "3750": 1117048320.0, "3755": 1117048320.0, "3760": 1117048320.0, "3765": 1117048320.0, "3770": 1117048320.0, "3775": 1117048320.0, "3780": 1117048320.0, "3785": 1117048320.0, "3790": 1117048320.0, "3795": 1117048320.0, "3800": 1117048320.0, "3805": 1117048320.0, "3810": 1117048320.0, "3815": 1117048320.0, "3820": 1117048320.0, "3825": 1117048320.0, "3830": 1117048320.0, "3835": 1117048320.0, "3840": 1117048320.0, "3845": 1117048320.0, "3850": 1117048320.0, "3855": 1117048320.0, "3860": 1117048320.0, "3865": 1117048320.0, "3870": 1117048320.0, "3875": 1117048320.0, "3880": 1117048320.0, "3885": 1117048320.0, "3890": 1117048320.0, "3895": 1117048320.0, "3900": 1117048320.0, "3905": 1117048320.0, "3910": 1117048320.0, "3915": 1117048320.0, "3920": 1117048320.0, "3925": 1117048320.0, "3930": 1117048320.0, "3935": 1117048320.0, "3940": 1117048320.0, "3945": 1117048320.0, "3950": 1117048320.0, "3955": 1117048320.0, "3960": 1117048320.0, "3965": 1117048320.0, "3970": 1117048320.0, "3975": 1117048320.0, "3980": 1117048320.0, "3985": 1117048320.0, "3990": 1117048320.0, "3995": 1117048320.0, "4000": 1117048320.0, "4005": 1117048320.0, "4010": 1117048320.0, "4015": 1117048320.0, "4020": 1117048320.0, "4025": 1117048320.0, "4030": 1117048320.0, "4035": 1117048320.0, "4040": 1117048320.0, "4045": 1117048320.0, "4050": 1117048320.0, "4055": 1117048320.0, "4060": 1117048320.0, "4065": 1117048320.0, "4070": 1117048320.0, "4075": 1117048320.0, "4080": 1117048320.0, "4085": 1117048320.0, "4090": 1117048320.0, "4095": 1117048320.0, "4100": 1117048320.0, "4105": 1117048320.0, "4110": 1117048320.0, "4115": 1117048320.0, "4120": 1117048320.0, "4125": 1117048320.0, "4130": 1117048320.0, "4135": 1117048320.0, "4140": 1117048320.0, "4145": 1117048320.0, "4150": 1117048320.0, "4155": 1117048320.0, "4160": 1117048320.0, "4165": 1117048320.0, "4170": 1117048320.0, "4175": 1117048320.0, "4180": 1117048320.0, "4185": 1117048320.0, "4190": 1117048320.0, "4195": 1117048320.0, "4200": 1117048320.0, "4205": 1117048320.0, "4210": 1117048320.0, "4215": 1117048320.0, "4220": 1117048320.0, "4225": 1117048320.0, "4230": 1117048320.0, "4235": 1117048320.0, "4240": 1117048320.0, "4245": 1117048320.0, "4250": 1117048320.0, "4255": 1117048320.0, "4260": 1117048320.0, "4265": 1117048320.0, "4270": 1117048320.0, "4275": 1117048320.0, "4280": 1117048320.0, "4285": 1117048320.0, "4290": 1117048320.0, "4295": 1117048320.0, "4300": 1117048320.0, "4305": 1117048320.0, "4310": 1117048320.0, "4315": 1117048320.0, "4320": 1117048320.0, "4325": 1117048320.0, "4330": 1117048320.0, "4335": 1117048320.0, "4340": 1117048320.0, "4345": 1117048320.0, "4350": 1117048320.0, "4355": 1117048320.0, "4360": 1117048320.0, "4365": 1117048320.0, "4370": 1117048320.0, "4375": 1117048320.0, "4380": 1117048320.0, "4385": 1117048320.0, "4390": 1117048320.0, "4395": 1117048320.0, "4400": 1117048320.0, "4405": 1117048320.0, "4410": 1117048320.0, "4415": 1117048320.0, "4420": 1117048320.0, "4425": 1117048320.0, "4430": 1117048320.0, "4435": 1117048320.0, "4440": 1117048320.0, "4445": 1117048320.0, "4450": 1117048320.0, "4455": 1117048320.0, "4460": 1117048320.0, "4465": 1117048320.0, "4470": 1117048320.0, "4475": 1117048320.0, "4480": 1117048320.0, "4485": 1117048320.0, "4490": 1117048320.0, "4495": 1117048320.0, "4500": 1117048320.0, "4505": 1117048320.0, "4510": 1117048320.0, "4515": 1117048320.0, "4520": 1117048320.0, "4525": 1117048320.0, "4530": 1117048320.0, "4535": 1117048320.0, "4540": 1117048320.0, "4545": 1117048320.0, "4550": 1117048320.0, "4555": 1117048320.0, "4560": 1117048320.0, "4565": 1117048320.0, "4570": 1117048320.0, "4575": 1117048320.0, "4580": 1117048320.0, "4585": 1117048320.0, "4590": 1117048320.0, "4595": 1117048320.0, "4600": 1117048320.0, "4605": 1117048320.0, "4610": 1117048320.0, "4615": 1117048320.0, "4620": 1117048320.0, "4625": 1117048320.0, "4630": 1117048320.0, "4635": 1117048320.0, "4640": 1117048320.0, "4645": 1117048320.0, "4650": 1117048320.0, "4655": 1117048320.0, "4660": 1117048320.0, "4665": 1117048320.0, "4670": 1117048320.0, "4675": 1117048320.0, "4680": 1117048320.0, "4685": 1117048320.0, "4690": 1117048320.0, "4695": 1117048320.0, "4700": 1117048320.0, "4705": 1117048320.0, "4710": 1117048320.0, "4715": 1117048320.0, "4720": 1117048320.0, "4725": 1117048320.0, "4730": 1117048320.0, "4735": 1117048320.0, "4740": 1117048320.0, "4745": 1117048320.0, "4750": 1117048320.0, "4755": 1117048320.0, "4760": 1117048320.0, "4765": 1117048320.0, "4770": 1117048320.0, "4775": 1117048320.0, "4780": 1117048320.0, "4785": 1117048320.0, "4790": 1117048320.0, "4795": 1117048320.0, "4800": 1117048320.0, "4805": 1117048320.0, "4810": 1117048320.0, "4815": 1117048320.0, "4820": 1117048320.0, "4825": 1117048320.0, "4830": 1117048320.0, "4835": 1117048320.0, "4840": 1117048320.0, "4845": 1117048320.0, "4850": 1117048320.0, "4855": 1117048320.0, "4860": 1117048320.0, "4865": 1117048320.0, "4870": 1117048320.0, "4875": 1117048320.0, "4880": 1117048320.0, "4885": 1117048320.0, "4890": 1117048320.0, "4895": 1117048320.0, "4900": 1117048320.0, "4905": 1117048320.0, "4910": 1117048320.0, "4915": 1117048320.0, "4920": 1117048320.0, "4925": 1117048320.0, "4930": 1117048320.0, "4935": 1117048320.0, "4940": 1117048320.0, "4945": 1117048320.0, "4950": 1117048320.0, "4955": 1117048320.0, "4960": 1117048320.0, "4965": 1117048320.0, "4970": 1117048320.0, "4975": 1117048320.0, "4980": 1117048320.0, "4985": 1117048320.0, "4990": 1117048320.0, "4995": 1117048320.0, "5000": 1117048320.0, "5005": 1117048320.0, "5010": 1117048320.0, "5015": 1117048320.0, "5020": 1117048320.0, "5025": 1117048320.0, "5030": 1117048320.0, "5035": 1117048320.0, "5040": 1117048320.0, "5045": 1117048320.0, "5050": 1117048320.0, "5055": 1117048320.0, "5060": 1117048320.0, "5065": 1117048320.0, "5070": 1117048320.0, "5075": 1117048320.0, "5080": 1117048320.0, "5085": 1117048320.0, "5090": 1117048320.0, "5095": 1117048320.0, "5100": 1117048320.0, "5105": 1117048320.0, "5110": 1117048320.0, "5115": 1117048320.0, "5120": 1117048320.0, "5125": 1117048320.0, "5130": 1117048320.0, "5135": 1117048320.0, "5140": 1117048320.0, "5145": 1117048320.0, "5150": 1117048320.0, "5155": 1117048320.0, "5160": 1117048320.0, "5165": 1117048320.0, "5170": 1117048320.0, "5175": 1117048320.0, "5180": 1117048320.0, "5185": 1117048320.0, "5190": 1117048320.0, "5195": 1117048320.0, "5200": 1117048320.0, "5205": 1117048320.0, "5210": 1117048320.0, "5215": 1117048320.0, "5220": 1117048320.0, "5225": 1117048320.0, "5230": 1117048320.0, "5235": 1117048320.0, "5240": 1117048320.0, "5245": 1117048320.0, "5250": 1117048320.0, "5255": 1117048320.0, "5260": 1117048320.0, "5265": 1117048320.0, "5270": 1117048320.0, "5275": 1117048320.0, "5280": 1117048320.0, "5285": 1117048320.0, "5290": 1117048320.0, "5295": 1117048320.0, "5300": 1117048320.0, "5305": 1117048320.0, "5310": 1117048320.0, "5315": 1117048320.0, "5320": 1117048320.0, "5325": 1117048320.0, "5330": 1117048320.0, "5335": 1117048320.0, "5340": 1117048320.0, "5345": 1117048320.0, "5350": 1117048320.0, "5355": 1117048320.0, "5360": 1117048320.0, "5365": 1117048320.0, "5370": 1117048320.0, "5375": 1117048320.0, "5380": 1117048320.0, "5385": 1117048320.0, "5390": 1117048320.0, "5395": 1117048320.0, "5400": 1117048320.0, "5405": 1117048320.0, "5410": 1117048320.0, "5415": 1117048320.0, "5420": 1117048320.0, "5425": 1117048320.0, "5430": 1117048320.0, "5435": 1117048320.0, "5440": 1117048320.0, "5445": 1117048320.0, "5450": 1117048320.0, "5455": 1117048320.0, "5460": 1117048320.0, "5465": 1117048320.0, "5470": 1117048320.0, "5475": 1117048320.0, "5480": 1117048320.0, "5485": 1117048320.0, "5490": 1117048320.0, "5495": 1117048320.0, "5500": 1117048320.0, "5505": 1117048320.0, "5510": 1117048320.0, "5515": 1117048320.0, "5520": 1117048320.0, "5525": 1117048320.0, "5530": 1117048320.0, "5535": 1117048320.0, "5540": 1117048320.0, "5545": 1117048320.0, "5550": 1117048320.0, "5555": 1117048320.0, "5560": 1117048320.0, "5565": 1117048320.0, "5570": 1117048320.0, "5575": 1117048320.0, "5580": 1117048320.0, "5585": 1117048320.0, "5590": 1117048320.0, "5595": 1117048320.0, "5600": 1117048320.0, "5605": 1117048320.0, "5610": 1117048320.0, "5615": 1117048320.0, "5620": 1117048320.0, "5625": 1117048320.0, "5630": 1117048320.0, "5635": 1117048320.0, "5640": 1117048320.0, "5645": 1117048320.0, "5650": 1117048320.0, "5655": 1117048320.0, "5660": 1117048320.0, "5665": 1117048320.0, "5670": 1117048320.0, "5675": 1117048320.0, "5680": 1117048320.0, "5685": 1117048320.0, "5690": 1117048320.0, "5695": 1117048320.0, "5700": 1117048320.0, "5705": 1117048320.0, "5710": 1117048320.0, "5715": 1117048320.0, "5720": 1117048320.0, "5725": 1117048320.0, "5730": 1117048320.0, "5735": 1117048320.0, "5740": 1117048320.0, "5745": 1117048320.0, "5750": 1117048320.0, "5755": 1117048320.0, "5760": 1117048320.0, "5765": 1117048320.0, "5770": 1117048320.0, "5775": 1117048320.0, "5780": 1117048320.0, "5785": 1117048320.0, "5790": 1117048320.0, "5795": 1117048320.0, "5800": 1117048320.0, "5805": 1117048320.0, "5810": 1117048320.0, "5815": 1117048320.0, "5820": 1117048320.0, "5825": 1117048320.0, "5830": 1117048320.0, "5835": 1117048320.0, "5840": 1117048320.0, "5845": 1117048320.0, "5850": 1117048320.0, "5855": 1117048320.0, "5860": 1117048320.0, "5865": 1117048320.0, "5870": 1117048320.0, "5875": 1117048320.0, "5880": 1117048320.0, "5885": 1117048320.0, "5890": 1117048320.0, "5895": 1117048320.0, "5900": 1117048320.0, "5905": 1117048320.0, "5910": 1117048320.0, "5915": 1117048320.0, "5920": 1117048320.0, "5925": 1117048320.0, "5930": 1117048320.0, "5935": 1117048320.0, "5940": 1117048320.0, "5945": 1117048320.0, "5950": 1117048320.0, "5955": 1117048320.0, "5960": 1117048320.0, "5965": 1117048320.0, "5970": 1117048320.0, "5975": 1117048320.0, "5980": 1117048320.0, "5985": 1117048320.0, "5990": 1117048320.0, "5995": 1117048320.0, "6000": 1117048320.0, "6005": 1117048320.0, "6010": 1117048320.0, "6015": 1117048320.0, "6020": 1117048320.0, "6025": 1117048320.0, "6030": 1117048320.0, "6035": 1117048320.0, "6040": 1117048320.0, "6045": 1117048320.0, "6050": 1117048320.0, "6055": 1117048320.0, "6060": 1117048320.0, "6065": 1117048320.0, "6070": 1117048320.0, "6075": 1117048320.0, "6080": 1117048320.0, "6085": 1117048320.0, "6090": 1117048320.0, "6095": 1117048320.0, "6100": 1117048320.0, "6105": 1117048320.0, "6110": 1117048320.0, "6115": 1117048320.0, "6120": 1117048320.0, "6125": 1117048320.0, "6130": 1117048320.0, "6135": 1117048320.0, "6140": 1117048320.0, "6145": 1117048320.0, "6150": 1117048320.0, "6155": 1117048320.0, "6160": 1117048320.0, "6165": 1117048320.0, "6170": 1117048320.0, "6175": 1117048320.0, "6180": 1117048320.0, "6185": 1117048320.0, "6190": 1117048320.0, "6195": 1117048320.0, "6200": 1117048320.0, "6205": 1117048320.0, "6210": 1117048320.0, "6215": 1117048320.0, "6220": 1117048320.0, "6225": 1117048320.0, "6230": 1117048320.0, "6235": 1117048320.0, "6240": 1117048320.0, "6245": 1117048320.0, "6250": 1117048320.0, "6255": 1117048320.0, "6260": 1117048320.0, "6265": 1117048320.0, "6270": 1117048320.0, "6275": 1117048320.0, "6280": 1117048320.0, "6285": 1117048320.0, "6290": 1117048320.0, "6295": 1117048320.0, "6300": 1117048320.0, "6305": 1117048320.0, "6310": 1117048320.0, "6315": 1117048320.0, "6320": 1117048320.0, "6325": 1117048320.0, "6330": 1117048320.0, "6335": 1117048320.0, "6340": 1117048320.0, "6345": 1117048320.0, "6350": 1117048320.0, "6355": 1117048320.0, "6360": 1117048320.0, "6365": 1117048320.0, "6370": 1117048320.0, "6375": 1117048320.0, "6380": 1117048320.0, "6385": 1117048320.0, "6390": 1117048320.0, "6395": 1117048320.0, "6400": 1117048320.0, "6405": 1117048320.0, "6410": 1117048320.0, "6415": 1117048320.0, "6420": 1117048320.0, "6425": 1117048320.0, "6430": 1117048320.0, "6435": 1117048320.0, "6440": 1117048320.0, "6445": 1117048320.0, "6450": 1117048320.0, "6455": 1117048320.0, "6460": 1117048320.0, "6465": 1117048320.0, "6470": 1117048320.0, "6475": 1117048320.0, "6480": 1117048320.0, "6485": 1117048320.0, "6490": 1117048320.0, "6495": 1117048320.0, "6500": 1117048320.0, "6505": 1117048320.0, "6510": 1117048320.0, "6515": 1117048320.0, "6520": 1117048320.0, "6525": 1117048320.0, "6530": 1117048320.0, "6535": 1117048320.0, "6540": 1117048320.0, "6545": 1117048320.0, "6550": 1117048320.0, "6555": 1117048320.0, "6560": 1117048320.0, "6565": 1117048320.0, "6570": 1117048320.0, "6575": 1117048320.0, "6580": 1117048320.0, "6585": 1117048320.0, "6590": 1117048320.0, "6595": 1117048320.0, "6600": 1117048320.0, "6605": 1117048320.0, "6610": 1117048320.0, "6615": 1117048320.0, "6620": 1117048320.0, "6625": 1117048320.0, "6630": 1117048320.0, "6635": 1117048320.0, "6640": 1117048320.0, "6645": 1117048320.0, "6650": 1117048320.0, "6655": 1117048320.0, "6660": 1117048320.0, "6665": 1117048320.0, "6670": 1117048320.0, "6675": 1117048320.0, "6680": 1117048320.0, "6685": 1117048320.0, "6690": 1117048320.0, "6695": 1117048320.0, "6700": 1117048320.0, "6705": 1117048320.0, "6710": 1117048320.0, "6715": 1117048320.0, "6720": 1117048320.0, "6725": 1117048320.0, "6730": 1117048320.0, "6735": 1117048320.0, "6740": 1117048320.0, "6745": 1117048320.0, "6750": 1117048320.0, "6755": 1117048320.0, "6760": 1117048320.0, "6765": 1117048320.0, "6770": 1117048320.0, "6775": 1117048320.0, "6780": 1117048320.0, "6785": 1117048320.0, "6790": 1117048320.0, "6795": 1117048320.0, "6800": 1117048320.0, "6805": 1117048320.0, "6810": 1117048320.0, "6815": 1117048320.0, "6820": 1117048320.0, "6825": 1117048320.0, "6830": 1117048320.0, "6835": 1117048320.0, "6840": 1117048320.0, "6845": 1117048320.0, "6850": 1117048320.0, "6855": 1117048320.0, "6860": 1117048320.0, "6865": 1117048320.0, "6870": 1117048320.0, "6875": 1117048320.0, "6880": 1117048320.0, "6885": 1117048320.0, "6890": 1117048320.0, "6895": 1117048320.0, "6900": 1117048320.0, "6905": 1117048320.0, "6910": 1117048320.0, "6915": 1117048320.0, "6920": 1117048320.0, "6925": 1117048320.0, "6930": 1117048320.0, "6935": 1117048320.0, "6940": 1117048320.0, "6945": 1117048320.0, "6950": 1117048320.0, "6955": 1117048320.0, "6960": 1117048320.0, "6965": 1117048320.0, "6970": 1117048320.0, "6975": 1117048320.0, "6980": 1117048320.0, "6985": 1117048320.0, "6990": 1117048320.0, "6995": 1117048320.0, "7000": 1117048320.0, "7005": 1117048320.0, "7010": 1117048320.0, "7015": 1117048320.0, "7020": 1117048320.0, "7025": 1117048320.0, "7030": 1117048320.0, "7035": 1117048320.0, "7040": 1117048320.0, "7045": 1117048320.0, "7050": 1117048320.0, "7055": 1117048320.0, "7060": 1117048320.0, "7065": 1117048320.0, "7070": 1117048320.0, "7075": 1117048320.0, "7080": 1117048320.0, "7085": 1117048320.0, "7090": 1117048320.0, "7095": 1117048320.0, "7100": 1117048320.0, "7105": 1117048320.0, "7110": 1117048320.0, "7115": 1117048320.0, "7120": 1117048320.0, "7125": 1117048320.0, "7130": 1117048320.0, "7135": 1117048320.0, "7140": 1117048320.0, "7145": 1117048320.0, "7150": 1117048320.0, "7155": 1117048320.0, "7160": 1117048320.0, "7165": 1117048320.0, "7170": 1117048320.0, "7175": 1117048320.0, "7180": 1117048320.0, "7185": 1117048320.0, "7190": 1117048320.0, "7195": 1117048320.0, "7200": 1117048320.0, "7205": 1117048320.0, "7210": 1117048320.0, "7215": 1117048320.0, "7220": 1117048320.0, "7225": 1117048320.0, "7230": 1117048320.0, "7235": 1117048320.0, "7240": 1117048320.0, "7245": 1117048320.0, "7250": 1117048320.0, "7255": 1117048320.0, "7260": 1117048320.0, "7265": 1117048320.0, "7270": 1117048320.0, "7275": 1117048320.0, "7280": 1117048320.0, "7285": 1117048320.0, "7290": 1117048320.0, "7295": 1117048320.0, "7300": 1117048320.0, "7305": 1117048320.0, "7310": 1117048320.0, "7315": 1117048320.0, "7320": 1117048320.0, "7325": 1117048320.0, "7330": 1117048320.0, "7335": 1117048320.0, "7340": 1117048320.0, "7345": 1117048320.0, "7350": 1117048320.0, "7355": 1117048320.0, "7360": 1117048320.0, "7365": 1117048320.0, "7370": 1117048320.0, "7375": 1117048320.0, "7380": 1117048320.0, "7385": 1117048320.0, "7390": 1117048320.0, "7395": 1117048320.0, "7400": 1117048320.0, "7405": 1117048320.0, "7410": 1117048320.0, "7415": 1117048320.0, "7420": 1117048320.0, "7425": 1117048320.0, "7430": 1117048320.0, "7435": 1117048320.0, "7440": 1117048320.0, "7445": 1117048320.0, "7450": 1117048320.0, "7455": 1117048320.0, "7460": 1117048320.0, "7465": 1117048320.0, "7470": 1117048320.0, "7475": 1117048320.0, "7480": 1117048320.0, "7485": 1117048320.0, "7490": 1117048320.0, "7495": 1117048320.0, "7500": 1117048320.0, "7505": 1117048320.0, "7510": 1117048320.0, "7515": 1117048320.0, "7520": 1117048320.0, "7525": 1117048320.0, "7530": 1117048320.0, "7535": 1117048320.0, "7540": 1117048320.0, "7545": 1117048320.0, "7550": 1117048320.0, "7555": 1117048320.0, "7560": 1117048320.0, "7565": 1117048320.0, "7570": 1117048320.0, "7575": 1117048320.0, "7580": 1117048320.0, "7585": 1117048320.0, "7590": 1117048320.0, "7595": 1117048320.0, "7600": 1117048320.0, "7605": 1117048320.0, "7610": 1117048320.0, "7615": 1117048320.0, "7620": 1117048320.0, "7625": 1117048320.0, "7630": 1117048320.0, "7635": 1117048320.0, "7640": 1117048320.0, "7645": 1117048320.0, "7650": 1117048320.0, "7655": 1117048320.0, "7660": 1117048320.0, "7665": 1117048320.0, "7670": 1117048320.0, "7675": 1117048320.0, "7680": 1117048320.0, "7685": 1117048320.0, "7690": 1117048320.0, "7695": 1117048320.0, "7700": 1117048320.0, "7705": 1117048320.0, "7710": 1117048320.0, "7715": 1117048320.0, "7720": 1117048320.0, "7725": 1117048320.0, "7730": 1117048320.0, "7735": 1117048320.0, "7740": 1117048320.0, "7745": 1117048320.0, "7750": 1117048320.0, "7755": 1117048320.0, "7760": 1117048320.0, "7765": 1117048320.0, "7770": 1117048320.0, "7775": 1117048320.0, "7780": 1117048320.0, "7785": 1117048320.0, "7790": 1117048320.0, "7795": 1117048320.0, "7800": 1117048320.0, "7805": 1117048320.0, "7810": 1117048320.0, "7815": 1117048320.0, "7820": 1117048320.0, "7825": 1117048320.0, "7830": 1117048320.0, "7835": 1117048320.0, "7840": 1117048320.0, "7845": 1117048320.0, "7850": 1117048320.0, "7855": 1117048320.0, "7860": 1117048320.0, "7865": 1117048320.0, "7870": 1117048320.0, "7875": 1117048320.0, "7880": 1117048320.0, "7885": 1117048320.0, "7890": 1117048320.0, "7895": 1117048320.0, "7900": 1117048320.0, "7905": 1117048320.0, "7910": 1117048320.0, "7915": 1117048320.0, "7920": 1117048320.0, "7925": 1117048320.0, "7930": 1117048320.0, "7935": 1117048320.0, "7940": 1117048320.0, "7945": 1117048320.0, "7950": 1117048320.0, "7955": 1117048320.0, "7960": 1117048320.0, "7965": 1117048320.0, "7970": 1117048320.0, "7975": 1117048320.0, "7980": 1117048320.0, "7985": 1117048320.0, "7990": 1117048320.0, "7995": 1117048320.0, "8000": 1117048320.0, "8005": 1117048320.0, "8010": 1117048320.0, "8015": 1117048320.0, "8020": 1117048320.0, "8025": 1117048320.0, "8030": 1117048320.0, "8035": 1117048320.0, "8040": 1117048320.0, "8045": 1117048320.0, "8050": 1117048320.0, "8055": 1117048320.0, "8060": 1117048320.0, "8065": 1117048320.0, "8070": 1117048320.0, "8075": 1117048320.0, "8080": 1117048320.0, "8085": 1117048320.0, "8090": 1117048320.0, "8095": 1117048320.0, "8100": 1117048320.0, "8105": 1117048320.0, "8110": 1117048320.0, "8115": 1117048320.0, "8120": 1117048320.0, "8125": 1117048320.0, "8130": 1117048320.0, "8135": 1117048320.0, "8140": 1117048320.0, "8145": 1117048320.0, "8150": 1117048320.0, "8155": 1117048320.0, "8160": 1117048320.0, "8165": 1117048320.0, "8170": 1117048320.0, "8175": 1117048320.0, "8180": 1117048320.0, "8185": 1117048320.0, "8190": 1117048320.0, "8195": 1117048320.0, "8200": 1117048320.0, "8205": 1117048320.0, "8210": 1117048320.0, "8215": 1117048320.0, "8220": 1117048320.0, "8225": 1117048320.0, "8230": 1117048320.0, "8235": 1117048320.0, "8240": 1117048320.0, "8245": 1117048320.0, "8250": 1117048320.0, "8255": 1117048320.0, "8260": 1117048320.0, "8265": 1117048320.0, "8270": 1117048320.0, "8275": 1117048320.0, "8280": 1117048320.0, "8285": 1117048320.0, "8290": 1117048320.0, "8295": 1117048320.0, "8300": 1117048320.0, "8305": 1117048320.0, "8310": 1117048320.0, "8315": 1117048320.0, "8320": 1117048320.0, "8325": 1117048320.0, "8330": 1117048320.0, "8335": 1117048320.0, "8340": 1117048320.0, "8345": 1117048320.0, "8350": 1117048320.0, "8355": 1117048320.0, "8360": 1117048320.0, "8365": 1117048320.0, "8370": 1117048320.0, "8375": 1117048320.0, "8380": 1117048320.0, "8385": 1117048320.0, "8390": 1117048320.0, "8395": 1117048320.0, "8400": 1117048320.0, "8405": 1117048320.0, "8410": 1117048320.0, "8415": 1117048320.0, "8420": 1117048320.0, "8425": 1117048320.0, "8430": 1117048320.0, "8435": 1117048320.0, "8440": 1117048320.0, "8445": 1117048320.0, "8450": 1117048320.0, "8455": 1117048320.0, "8460": 1117048320.0, "8465": 1117048320.0, "8470": 1117048320.0, "8475": 1117048320.0, "8480": 1117048320.0, "8485": 1117048320.0, "8490": 1117048320.0, "8495": 1117048320.0, "8500": 1117048320.0, "8505": 1117048320.0, "8510": 1117048320.0, "8515": 1117048320.0, "8520": 1117048320.0, "8525": 1117048320.0, "8530": 1117048320.0, "8535": 1117048320.0, "8540": 1117048320.0, "8545": 1117048320.0, "8550": 1117048320.0, "8555": 1117048320.0, "8560": 1117048320.0, "8565": 1117048320.0, "8570": 1117048320.0, "8575": 1117048320.0, "8580": 1117048320.0, "8585": 1117048320.0, "8590": 1117048320.0, "8595": 1117048320.0, "8600": 1117048320.0, "8605": 1117048320.0, "8610": 1117048320.0, "8615": 1117048320.0, "8620": 1117048320.0, "8625": 1117048320.0, "8630": 1117048320.0, "8635": 1117048320.0, "8640": 1117048320.0, "8645": 1117048320.0, "8650": 1117048320.0, "8655": 1117048320.0, "8660": 1117048320.0, "8665": 1117048320.0, "8670": 1117048320.0, "8675": 1117048320.0, "8680": 1117048320.0, "8685": 1117048320.0, "8690": 1117048320.0, "8695": 1117048320.0, "8700": 1117048320.0, "8705": 1117048320.0, "8710": 1117048320.0, "8715": 1117048320.0, "8720": 1117048320.0, "8725": 1117048320.0, "8730": 1117048320.0, "8735": 1117048320.0, "8740": 1117048320.0, "8745": 1117048320.0, "8750": 1117048320.0, "8755": 1117048320.0, "8760": 1117048320.0, "8765": 1117048320.0, "8770": 1117048320.0, "8775": 1117048320.0, "8780": 1117048320.0, "8785": 1117048320.0, "8790": 1117048320.0, "8795": 1117048320.0, "8800": 1117048320.0, "8805": 1117048320.0, "8810": 1117048320.0, "8815": 1117048320.0, "8820": 1117048320.0, "8825": 1117048320.0, "8830": 1117048320.0, "8835": 1117048320.0, "8840": 1117048320.0, "8845": 1117048320.0, "8850": 1117048320.0, "8855": 1117048320.0, "8860": 1117048320.0, "8865": 1117048320.0, "8870": 1117048320.0, "8875": 1117048320.0, "8880": 1117048320.0, "8885": 1117048320.0, "8890": 1117048320.0, "8895": 1117048320.0, "8900": 1117048320.0, "8905": 1117048320.0, "8910": 1117048320.0, "8915": 1117048320.0, "8920": 1117048320.0, "8925": 1117048320.0, "8930": 1117048320.0, "8935": 1117048320.0, "8940": 1117048320.0, "8945": 1117048320.0, "8950": 1117048320.0, "8955": 1117048320.0, "8960": 1117048320.0, "8965": 1117048320.0, "8970": 1117048320.0, "8975": 1117048320.0, "8980": 1117048320.0, "8985": 1117048320.0, "8990": 1117048320.0, "8995": 1117048320.0, "9000": 1117048320.0, "9005": 1117048320.0, "9010": 1117048320.0, "9015": 1117048320.0, "9020": 1117048320.0, "9025": 1117048320.0, "9030": 1117048320.0, "9035": 1117048320.0, "9040": 1117048320.0, "9045": 1117048320.0, "9050": 1117048320.0, "9055": 1117048320.0, "9060": 1117048320.0, "9065": 1117048320.0, "9070": 1117048320.0, "9075": 1117048320.0, "9080": 1117048320.0, "9085": 1117048320.0, "9090": 1117048320.0, "9095": 1117048320.0, "9100": 1117048320.0, "9105": 1117048320.0, "9110": 1117048320.0, "9115": 1117048320.0, "9120": 1117048320.0, "9125": 1117048320.0, "9130": 1117048320.0, "9135": 1117048320.0, "9140": 1117048320.0, "9145": 1117048320.0, "9150": 1117048320.0, "9155": 1117048320.0, "9160": 1117048320.0, "9165": 1117048320.0, "9170": 1117048320.0, "9175": 1117048320.0, "9180": 1117048320.0, "9185": 1117048320.0, "9190": 1117048320.0, "9195": 1117048320.0, "9200": 1117048320.0, "9205": 1117048320.0, "9210": 1117048320.0, "9215": 1117048320.0, "9220": 1117048320.0, "9225": 1117048320.0, "9230": 1117048320.0, "9235": 1117048320.0, "9240": 1117048320.0, "9245": 1117048320.0, "9250": 1117048320.0, "9255": 1117048320.0, "9260": 1117048320.0, "9265": 1117048320.0, "9270": 1117048320.0, "9275": 1117048320.0, "9280": 1117048320.0, "9285": 1117048320.0, "9290": 1117048320.0, "9295": 1117048320.0, "9300": 1117048320.0, "9305": 1117048320.0, "9310": 1117048320.0, "9315": 1117048320.0, "9320": 1117048320.0, "9325": 1117048320.0, "9330": 1117048320.0, "9335": 1117048320.0, "9340": 1117048320.0, "9345": 1117048320.0, "9350": 1117048320.0, "9355": 1117048320.0, "9360": 1117048320.0, "9365": 1117048320.0, "9370": 1117048320.0, "9375": 1117048320.0, "9380": 1117048320.0, "9385": 1117048320.0, "9390": 1117048320.0, "9395": 1117048320.0, "9400": 1117048320.0, "9405": 1117048320.0, "9410": 1117048320.0, "9415": 1117048320.0, "9420": 1117048320.0, "9425": 1117048320.0, "9430": 1117048320.0, "9435": 1117048320.0, "9440": 1117048320.0, "9445": 1117048320.0, "9450": 1117048320.0, "9455": 1117048320.0, "9460": 1117048320.0, "9465": 1117048320.0, "9470": 1117048320.0, "9475": 1117048320.0, "9480": 1117048320.0, "9485": 1117048320.0, "9490": 1117048320.0, "9495": 1117048320.0, "9500": 1117048320.0, "9505": 1117048320.0, "9510": 1117048320.0, "9515": 1117048320.0, "9520": 1117048320.0, "9525": 1117048320.0, "9530": 1117048320.0, "9535": 1117048320.0, "9540": 1117048320.0, "9545": 1117048320.0, "9550": 1117048320.0, "9555": 1117048320.0, "9560": 1117048320.0, "9565": 1117048320.0, "9570": 1117048320.0, "9575": 1117048320.0, "9580": 1117048320.0, "9585": 1117048320.0, "9590": 1117048320.0, "9595": 1117048320.0, "9600": 1117048320.0, "9605": 1117048320.0, "9610": 1117048320.0, "9615": 1117048320.0, "9620": 1117048320.0, "9625": 1117048320.0, "9630": 1117048320.0, "9635": 1117048320.0, "9640": 1117048320.0, "9645": 1117048320.0, "9650": 1117048320.0, "9655": 1117048320.0, "9660": 1117048320.0, "9665": 1117048320.0, "9670": 1117048320.0, "9675": 1117048320.0, "9680": 1117048320.0, "9685": 1117048320.0, "9690": 1117048320.0, "9695": 1117048320.0, "9700": 1117048320.0, "9705": 1117048320.0, "9710": 1117048320.0, "9715": 1117048320.0, "9720": 1117048320.0, "9725": 1117048320.0, "9730": 1117048320.0, "9735": 1117048320.0, "9740": 1117048320.0, "9745": 1117048320.0, "9750": 1117048320.0, "9755": 1117048320.0, "9760": 1117048320.0, "9765": 1117048320.0, "9770": 1117048320.0, "9775": 1117048320.0, "9780": 1117048320.0, "9785": 1117048320.0, "9790": 1117048320.0, "9795": 1117048320.0, "9800": 1117048320.0, "9805": 1117048320.0, "9810": 1117048320.0, "9815": 1117048320.0, "9820": 1117048320.0, "9825": 1117048320.0, "9830": 1117048320.0, "9835": 1117048320.0, "9840": 1117048320.0, "9845": 1117048320.0, "9850": 1117048320.0, "9855": 1117048320.0, "9860": 1117048320.0, "9865": 1117048320.0, "9870": 1117048320.0, "9875": 1117048320.0, "9880": 1117048320.0, "9885": 1117048320.0, "9890": 1117048320.0, "9895": 1117048320.0, "9900": 1117048320.0, "9905": 1117048320.0, "9910": 1117048320.0, "9915": 1117048320.0, "9920": 1117048320.0, "9925": 1117048320.0, "9930": 1117048320.0, "9935": 1117048320.0, "9940": 1117048320.0, "9945": 1117048320.0, "9950": 1117048320.0, "9955": 1117048320.0, "9960": 1117048320.0, "9965": 1117048320.0, "9970": 1117048320.0, "9975": 1117048320.0, "9980": 1117048320.0, "9985": 1117048320.0, "9990": 1117048320.0, "9995": 1117048320.0, "10000": 1117048320.0, "10005": 1117048320.0, "10010": 1117048320.0, "10015": 1117048320.0, "10020": 1117048320.0, "10025": 1117048320.0, "10030": 1117048320.0, "10035": 1117048320.0, "10040": 1117048320.0, "10045": 1117048320.0, "10050": 1117048320.0, "10055": 1117048320.0, "10060": 1117048320.0, "10065": 1117048320.0, "10070": 1117048320.0, "10075": 1117048320.0, "10080": 1117048320.0, "10085": 1117048320.0, "10090": 1117048320.0, "10095": 1117048320.0, "10100": 1117048320.0, "10105": 1117048320.0, "10110": 1117048320.0, "10115": 1117048320.0, "10120": 1117048320.0, "10125": 1117048320.0, "10130": 1117048320.0, "10135": 1117048320.0, "10140": 1117048320.0, "10145": 1117048320.0, "10150": 1117048320.0, "10155": 1117048320.0, "10160": 1117048320.0, "10165": 1117048320.0, "10170": 1117048320.0, "10175": 1117048320.0, "10180": 1117048320.0, "10185": 1117048320.0, "10190": 1117048320.0, "10195": 1117048320.0, "10200": 1117048320.0, "10205": 1117048320.0, "10210": 1117048320.0, "10215": 1117048320.0, "10220": 1117048320.0, "10225": 1117048320.0, "10230": 1117048320.0, "10235": 1117048320.0, "10240": 1117048320.0, "10245": 1117048320.0, "10250": 1117048320.0, "10255": 1117048320.0, "10260": 1117048320.0, "10265": 1117048320.0, "10270": 1117048320.0, "10275": 1117048320.0, "10280": 1117048320.0, "10285": 1117048320.0, "10290": 1117048320.0, "10295": 1117048320.0, "10300": 1117048320.0, "10305": 1117048320.0, "10310": 1117048320.0, "10315": 1117048320.0, "10320": 1117048320.0, "10325": 1117048320.0, "10330": 1117048320.0, "10335": 1117048320.0, "10340": 1117048320.0, "10345": 1117048320.0, "10350": 1117048320.0, "10355": 1117048320.0, "10360": 1117048320.0, "10365": 1117048320.0, "10370": 1117048320.0, "10375": 1117048320.0, "10380": 1117048320.0, "10385": 1117048320.0, "10390": 1117048320.0, "10395": 1117048320.0, "10400": 1117048320.0, "10405": 1117048320.0, "10410": 1117048320.0, "10415": 1117048320.0, "10420": 1117048320.0, "10425": 1117048320.0, "10430": 1117048320.0, "10435": 1117048320.0, "10440": 1117048320.0, "10445": 1117048320.0, "10450": 1117048320.0, "10455": 1117048320.0, "10460": 1117048320.0, "10465": 1117048320.0, "10470": 1117048320.0, "10475": 1117048320.0, "10480": 1117048320.0, "10485": 1117048320.0, "10490": 1117048320.0, "10495": 1117048320.0, "10500": 1117048320.0, "10505": 1117048320.0, "10510": 1117048320.0, "10515": 1117048320.0, "10520": 1117048320.0, "10525": 1117048320.0, "10530": 1117048320.0, "10535": 1117048320.0, "10540": 1117048320.0, "10545": 1117048320.0, "10550": 1117048320.0, "10555": 1117048320.0, "10560": 1117048320.0, "10565": 1117048320.0, "10570": 1117048320.0, "10575": 1117048320.0, "10580": 1117048320.0, "10585": 1117048320.0, "10590": 1117048320.0, "10595": 1117048320.0, "10600": 1117048320.0, "10605": 1117048320.0, "10610": 1117048320.0, "10615": 1117048320.0, "10620": 1117048320.0, "10625": 1117048320.0, "10630": 1117048320.0, "10635": 1117048320.0, "10640": 1117048320.0, "10645": 1117048320.0, "10650": 1117048320.0, "10655": 1117048320.0, "10660": 1117048320.0, "10665": 1117048320.0, "10670": 1117048320.0, "10675": 1117048320.0, "10680": 1117048320.0, "10685": 1117048320.0, "10690": 1117048320.0, "10695": 1117048320.0, "10700": 1117048320.0, "10705": 1117048320.0, "10710": 1117048320.0, "10715": 1117048320.0, "10720": 1117048320.0, "10725": 1117048320.0, "10730": 1117048320.0, "10735": 1117048320.0, "10740": 1117048320.0, "10745": 1117048320.0, "10750": 1117048320.0, "10755": 1117048320.0, "10760": 1117048320.0, "10765": 1117048320.0, "10770": 1117048320.0, "10775": 1117048320.0, "10780": 1117048320.0, "10785": 1117048320.0, "10790": 1117048320.0, "10795": 1117048320.0, "10800": 1117048320.0, "10805": 1117048320.0, "10810": 1117048320.0, "10815": 1117048320.0, "10820": 1117048320.0, "10825": 1117048320.0, "10830": 1117048320.0, "10835": 1117048320.0, "10840": 1117048320.0, "10845": 1117048320.0, "10850": 1117048320.0, "10855": 1117048320.0, "10860": 1117048320.0, "10865": 1117048320.0, "10870": 1117048320.0, "10875": 1117048320.0, "10880": 1117048320.0, "10885": 1117048320.0, "10890": 1117048320.0, "10895": 1117048320.0, "10900": 1117048320.0, "10905": 1117048320.0, "10910": 1117048320.0, "10915": 1117048320.0, "10920": 1117048320.0, "10925": 1117048320.0, "10930": 1117048320.0, "10935": 1117048320.0, "10940": 1117048320.0, "10945": 1117048320.0, "10950": 1117048320.0, "10955": 1117048320.0, "10960": 1117048320.0, "10965": 1117048320.0, "10970": 1117048320.0, "10975": 1117048320.0, "10980": 1117048320.0, "10985": 1117048320.0, "10990": 1117048320.0, "10995": 1117048320.0, "11000": 1117048320.0, "11005": 1117048320.0, "11010": 1117048320.0, "11015": 1117048320.0, "11020": 1117048320.0, "11025": 1117048320.0, "11030": 1117048320.0, "11035": 1117048320.0, "11040": 1117048320.0, "11045": 1117048320.0, "11050": 1117048320.0, "11055": 1117048320.0, "11060": 1117048320.0, "11065": 1117048320.0, "11070": 1117048320.0, "11075": 1117048320.0, "11080": 1117048320.0, "11085": 1117048320.0, "11090": 1117048320.0, "11095": 1117048320.0, "11100": 1117048320.0, "11105": 1117048320.0, "11110": 1117048320.0, "11115": 1117048320.0, "11120": 1117048320.0, "11125": 1117048320.0, "11130": 1117048320.0, "11135": 1117048320.0, "11140": 1117048320.0, "11145": 1117048320.0, "11150": 1117048320.0, "11155": 1117048320.0, "11160": 1117048320.0, "11165": 1117048320.0, "11170": 1117048320.0, "11175": 1117048320.0, "11180": 1117048320.0, "11185": 1117048320.0, "11190": 1117048320.0, "11195": 1117048320.0, "11200": 1117048320.0, "11205": 1117048320.0, "11210": 1117048320.0, "11215": 1117048320.0, "11220": 1117048320.0, "11225": 1117048320.0, "11230": 1117048320.0, "11235": 1117048320.0, "11240": 1117048320.0, "11245": 1117048320.0, "11250": 1117048320.0, "11255": 1117048320.0, "11260": 1117048320.0, "11265": 1117048320.0, "11270": 1117048320.0, "11275": 1117048320.0, "11280": 1117048320.0, "11285": 1117048320.0, "11290": 1117048320.0, "11295": 1117048320.0, "11300": 1117048320.0, "11305": 1117048320.0, "11310": 1117048320.0, "11315": 1117048320.0, "11320": 1117048320.0, "11325": 1117048320.0, "11330": 1117048320.0, "11335": 1117048320.0, "11340": 1117048320.0, "11345": 1117048320.0, "11350": 1117048320.0, "11355": 1117048320.0, "11360": 1117048320.0, "11365": 1117048320.0, "11370": 1117048320.0, "11375": 1117048320.0, "11380": 1117048320.0, "11385": 1117048320.0, "11390": 1117048320.0, "11395": 1117048320.0, "11400": 1117048320.0, "11405": 1117048320.0, "11410": 1117048320.0, "11415": 1117048320.0, "11420": 1117048320.0, "11425": 1117048320.0, "11430": 1117048320.0, "11435": 1117048320.0, "11440": 1117048320.0, "11445": 1117048320.0, "11450": 1117048320.0, "11455": 1117048320.0, "11460": 1117048320.0, "11465": 1117048320.0, "11470": 1117048320.0, "11475": 1117048320.0, "11480": 1117048320.0, "11485": 1117048320.0, "11490": 1117048320.0, "11495": 1117048320.0, "11500": 1117048320.0, "11505": 1117048320.0, "11510": 1117048320.0, "11515": 1117048320.0, "11520": 1117048320.0, "11525": 1117048320.0, "11530": 1117048320.0, "11535": 1117048320.0, "11540": 1117048320.0, "11545": 1117048320.0, "11550": 1117048320.0, "11555": 1117048320.0, "11560": 1117048320.0, "11565": 1117048320.0, "11570": 1117048320.0, "11575": 1117048320.0, "11580": 1117048320.0, "11585": 1117048320.0, "11590": 1117048320.0, "11595": 1117048320.0, "11600": 1117048320.0, "11605": 1117048320.0, "11610": 1117048320.0, "11615": 1117048320.0, "11620": 1117048320.0, "11625": 1117048320.0, "11630": 1117048320.0, "11635": 1117048320.0, "11640": 1117048320.0, "11645": 1117048320.0, "11650": 1117048320.0, "11655": 1117048320.0, "11660": 1117048320.0, "11665": 1117048320.0, "11670": 1117048320.0, "11675": 1117048320.0, "11680": 1117048320.0, "11685": 1117048320.0, "11690": 1117048320.0, "11695": 1117048320.0, "11700": 1117048320.0, "11705": 1117048320.0, "11710": 1117048320.0, "11715": 1117048320.0, "11720": 1117048320.0, "11725": 1117048320.0, "11730": 1117048320.0, "11735": 1117048320.0, "11740": 1117048320.0, "11745": 1117048320.0, "11750": 1117048320.0, "11755": 1117048320.0, "11760": 1117048320.0, "11765": 1117048320.0, "11770": 1117048320.0, "11775": 1117048320.0, "11780": 1117048320.0, "11785": 1117048320.0, "11790": 1117048320.0, "11795": 1117048320.0, "11800": 1117048320.0, "11805": 1117048320.0, "11810": 1117048320.0, "11815": 1117048320.0, "11820": 1117048320.0, "11825": 1117048320.0, "11830": 1117048320.0, "11835": 1117048320.0, "11840": 1117048320.0, "11845": 1117048320.0, "11850": 1117048320.0, "11855": 1117048320.0, "11860": 1117048320.0, "11865": 1117048320.0, "11870": 1117048320.0, "11875": 1117048320.0, "11880": 1117048320.0, "11885": 1117048320.0, "11890": 1117048320.0, "11895": 1117048320.0, "11900": 1117048320.0, "11905": 1117048320.0, "11910": 1117048320.0, "11915": 1117048320.0, "11920": 1117048320.0, "11925": 1117048320.0, "11930": 1117048320.0, "11935": 1117048320.0, "11940": 1117048320.0, "11945": 1117048320.0, "11950": 1117048320.0, "11955": 1117048320.0, "11960": 1117048320.0, "11965": 1117048320.0, "11970": 1117048320.0, "11975": 1117048320.0, "11980": 1117048320.0, "11985": 1117048320.0, "11990": 1117048320.0, "11995": 1117048320.0, "12000": 1117048320.0, "12005": 1117048320.0, "12010": 1117048320.0, "12015": 1117048320.0, "12020": 1117048320.0, "12025": 1117048320.0, "12030": 1117048320.0, "12035": 1117048320.0, "12040": 1117048320.0, "12045": 1117048320.0, "12050": 1117048320.0, "12055": 1117048320.0, "12060": 1117048320.0, "12065": 1117048320.0, "12070": 1117048320.0, "12075": 1117048320.0, "12080": 1117048320.0, "12085": 1117048320.0, "12090": 1117048320.0, "12095": 1117048320.0, "12100": 1117048320.0, "12105": 1117048320.0, "12110": 1117048320.0, "12115": 1117048320.0, "12120": 1117048320.0, "12125": 1117048320.0, "12130": 1117048320.0, "12135": 1117048320.0, "12140": 1117048320.0, "12145": 1117048320.0, "12150": 1117048320.0, "12155": 1117048320.0, "12160": 1117048320.0, "12165": 1117048320.0, "12170": 1117048320.0, "12175": 1117048320.0, "12180": 1117048320.0, "12185": 1117048320.0, "12190": 1117048320.0, "12195": 1117048320.0, "12200": 1117048320.0, "12205": 1117048320.0, "12210": 1117048320.0, "12215": 1117048320.0, "12220": 1117048320.0, "12225": 1117048320.0, "12230": 1117048320.0, "12235": 1117048320.0, "12240": 1117048320.0, "12245": 1117048320.0, "12250": 1117048320.0, "12255": 1117048320.0, "12260": 1117048320.0, "12265": 1117048320.0, "12270": 1117048320.0, "12275": 1117048320.0, "12280": 1117048320.0, "12285": 1117048320.0, "12290": 1117048320.0, "12295": 1117048320.0, "12300": 1117048320.0, "12305": 1117048320.0, "12310": 1117048320.0, "12315": 1117048320.0, "12320": 1117048320.0, "12325": 1117048320.0, "12330": 1117048320.0, "12335": 1117048320.0, "12340": 1117048320.0, "12345": 1117048320.0, "12350": 1117048320.0, "12355": 1117048320.0, "12360": 1117048320.0, "12365": 1117048320.0, "12370": 1117048320.0, "12375": 1117048320.0, "12380": 1117048320.0, "12385": 1117048320.0, "12390": 1117048320.0, "12395": 1117048320.0, "12400": 1117048320.0, "12405": 1117048320.0, "12410": 1117048320.0, "12415": 1117048320.0, "12420": 1117048320.0, "12425": 1117048320.0, "12430": 1117048320.0, "12435": 1117048320.0, "12440": 1117048320.0, "12445": 1117048320.0, "12450": 1117048320.0, "12455": 1117048320.0, "12460": 1117048320.0, "12465": 1117048320.0, "12470": 1117048320.0, "12475": 1117048320.0, "12480": 1117048320.0, "12485": 1117048320.0, "12490": 1117048320.0, "12495": 1117048320.0, "12500": 1117048320.0, "12505": 1117048320.0, "12510": 1117048320.0, "12515": 1117048320.0, "12520": 1117048320.0, "12525": 1117048320.0, "12530": 1117048320.0, "12535": 1117048320.0, "12540": 1117048320.0, "12545": 1117048320.0, "12550": 1117048320.0, "12555": 1117048320.0, "12560": 1117048320.0, "12565": 1117048320.0, "12570": 1117048320.0, "12575": 1117048320.0, "12580": 1117048320.0, "12585": 1117048320.0, "12590": 1117048320.0, "12595": 1117048320.0, "12600": 1117048320.0, "12605": 1117048320.0, "12610": 1117048320.0, "12615": 1117048320.0, "12620": 1117048320.0, "12625": 1117048320.0, "12630": 1117048320.0, "12635": 1117048320.0, "12640": 1117048320.0, "12645": 1117048320.0, "12650": 1117048320.0, "12655": 1117048320.0, "12660": 1117048320.0, "12665": 1117048320.0, "12670": 1117048320.0, "12675": 1117048320.0, "12680": 1117048320.0, "12685": 1117048320.0, "12690": 1117048320.0, "12695": 1117048320.0, "12700": 1117048320.0, "12705": 1117048320.0, "12710": 1117048320.0, "12715": 1117048320.0, "12720": 1117048320.0, "12725": 1117048320.0, "12730": 1117048320.0, "12735": 1117048320.0, "12740": 1117048320.0, "12745": 1117048320.0, "12750": 1117048320.0, "12755": 1117048320.0, "12760": 1117048320.0, "12765": 1117048320.0, "12770": 1117048320.0, "12775": 1117048320.0, "12780": 1117048320.0, "12785": 1117048320.0, "12790": 1117048320.0, "12795": 1117048320.0, "12800": 1117048320.0, "12805": 1117048320.0, "12810": 1117048320.0, "12815": 1117048320.0, "12820": 1117048320.0, "12825": 1117048320.0, "12830": 1117048320.0, "12835": 1117048320.0, "12840": 1117048320.0, "12845": 1117048320.0, "12850": 1117048320.0, "12855": 1117048320.0, "12860": 1117048320.0, "12865": 1117048320.0, "12870": 1117048320.0, "12875": 1117048320.0, "12880": 1117048320.0, "12885": 1117048320.0, "12890": 1117048320.0, "12895": 1117048320.0, "12900": 1117048320.0, "12905": 1117048320.0, "12910": 1117048320.0, "12915": 1117048320.0, "12920": 1117048320.0, "12925": 1117048320.0, "12930": 1117048320.0, "12935": 1117048320.0, "12940": 1117048320.0, "12945": 1117048320.0, "12950": 1117048320.0, "12955": 1117048320.0, "12960": 1117048320.0, "12965": 1117048320.0, "12970": 1117048320.0, "12975": 1117048320.0, "12980": 1117048320.0, "12985": 1117048320.0, "12990": 1117048320.0, "12995": 1117048320.0, "13000": 1117048320.0, "13005": 1117048320.0, "13010": 1117048320.0, "13015": 1117048320.0, "13020": 1117048320.0, "13025": 1117048320.0, "13030": 1117048320.0, "13035": 1117048320.0, "13040": 1117048320.0, "13045": 1117048320.0, "13050": 1117048320.0, "13055": 1117048320.0, "13060": 1117048320.0, "13065": 1117048320.0, "13070": 1117048320.0, "13075": 1117048320.0, "13080": 1117048320.0, "13085": 1117048320.0, "13090": 1117048320.0, "13095": 1117048320.0, "13100": 1117048320.0, "13105": 1117048320.0, "13110": 1117048320.0, "13115": 1117048320.0, "13120": 1117048320.0, "13125": 1117048320.0, "13130": 1117048320.0, "13135": 1117048320.0, "13140": 1117048320.0, "13145": 1117048320.0, "13150": 1117048320.0, "13155": 1117048320.0, "13160": 1117048320.0, "13165": 1117048320.0, "13170": 1117048320.0, "13175": 1117048320.0, "13180": 1117048320.0, "13185": 1117048320.0, "13190": 1117048320.0, "13195": 1117048320.0, "13200": 1117048320.0, "13205": 1117048320.0, "13210": 1117048320.0, "13215": 1117048320.0, "13220": 1117048320.0, "13225": 1117048320.0, "13230": 1117048320.0, "13235": 1117048320.0, "13240": 1117048320.0, "13245": 1117048320.0, "13250": 1117048320.0, "13255": 1117048320.0, "13260": 1117048320.0, "13265": 1117048320.0, "13270": 1117048320.0, "13275": 1117048320.0, "13280": 1117048320.0, "13285": 1117048320.0, "13290": 1117048320.0, "13295": 1117048320.0, "13300": 1117048320.0, "13305": 1117048320.0, "13310": 1117048320.0, "13315": 1117048320.0, "13320": 1117048320.0, "13325": 1117048320.0, "13330": 1117048320.0, "13335": 1117048320.0, "13340": 1117048320.0, "13345": 1117048320.0, "13350": 1117048320.0, "13355": 1117048320.0, "13360": 1117048320.0, "13365": 1117048320.0, "13370": 1117048320.0, "13375": 1117048320.0, "13380": 1117048320.0, "13385": 1117048320.0, "13390": 1117048320.0, "13395": 1117048320.0, "13400": 1117048320.0, "13405": 1117048320.0, "13410": 1117048320.0, "13415": 1117048320.0, "13420": 1117048320.0, "13425": 1117048320.0, "13430": 1117048320.0, "13435": 1117048320.0, "13440": 1117048320.0, "13445": 1117048320.0, "13450": 1117048320.0, "13455": 1117048320.0, "13460": 1117048320.0, "13465": 1117048320.0, "13470": 1117048320.0, "13475": 1117048320.0, "13480": 1117048320.0, "13485": 1117048320.0, "13490": 1117048320.0, "13495": 1117048320.0, "13500": 1117048320.0, "13505": 1117048320.0, "13510": 1117048320.0, "13515": 1117048320.0, "13520": 1117048320.0, "13525": 1117048320.0, "13530": 1117048320.0, "13535": 1117048320.0, "13540": 1117048320.0, "13545": 1117048320.0, "13550": 1117048320.0, "13555": 1117048320.0, "13560": 1117048320.0, "13565": 1117048320.0, "13570": 1117048320.0, "13575": 1117048320.0, "13580": 1117048320.0, "13585": 1117048320.0, "13590": 1117048320.0, "13595": 1117048320.0, "13600": 1117048320.0, "13605": 1117048320.0, "13610": 1117048320.0, "13615": 1117048320.0, "13620": 1117048320.0, "13625": 1117048320.0, "13630": 1117048320.0, "13635": 1117048320.0, "13640": 1117048320.0, "13645": 1117048320.0, "13650": 1117048320.0, "13655": 1117048320.0, "13660": 1117048320.0, "13665": 1117048320.0, "13670": 1117048320.0, "13675": 1117048320.0, "13680": 1117048320.0, "13685": 1117048320.0, "13690": 1117048320.0, "13695": 1117048320.0, "13700": 1117048320.0, "13705": 1117048320.0, "13710": 1117048320.0, "13715": 1117048320.0, "13720": 1117048320.0, "13725": 1117048320.0, "13730": 1117048320.0, "13735": 1117048320.0, "13740": 1117048320.0, "13745": 1117048320.0, "13750": 1117048320.0, "13755": 1117048320.0, "13760": 1117048320.0, "13765": 1117048320.0, "13770": 1117048320.0, "13775": 1117048320.0, "13780": 1117048320.0, "13785": 1117048320.0, "13790": 1117048320.0, "13795": 1117048320.0, "13800": 1117048320.0, "13805": 1117048320.0, "13810": 1117048320.0, "13815": 1117048320.0, "13820": 1117048320.0, "13825": 1117048320.0, "13830": 1117048320.0, "13835": 1117048320.0, "13840": 1117048320.0, "13845": 1117048320.0, "13850": 1117048320.0, "13855": 1117048320.0, "13860": 1117048320.0, "13865": 1117048320.0, "13870": 1117048320.0, "13875": 1117048320.0, "13880": 1117048320.0, "13885": 1117048320.0, "13890": 1117048320.0, "13895": 1117048320.0, "13900": 1117048320.0, "13905": 1117048320.0, "13910": 1117048320.0, "13915": 1117048320.0, "13920": 1117048320.0, "13925": 1117048320.0, "13930": 1117048320.0, "13935": 1117048320.0, "13940": 1117048320.0, "13945": 1117048320.0, "13950": 1117048320.0, "13955": 1117048320.0, "13960": 1117048320.0, "13965": 1117048320.0, "13970": 1117048320.0, "13975": 1117048320.0, "13980": 1117048320.0, "13985": 1117048320.0, "13990": 1117048320.0, "13995": 1117048320.0, "14000": 1117048320.0, "14005": 1117048320.0, "14010": 1117048320.0, "14015": 1117048320.0, "14020": 1117048320.0, "14025": 1117048320.0, "14030": 1117048320.0, "14035": 1117048320.0, "14040": 1117048320.0, "14045": 1117048320.0, "14050": 1117048320.0, "14055": 1117048320.0, "14060": 1117048320.0, "14065": 1117048320.0, "14070": 1117048320.0, "14075": 1117048320.0, "14080": 1117048320.0, "14085": 1117048320.0, "14090": 1117048320.0, "14095": 1117048320.0, "14100": 1117048320.0, "14105": 1117048320.0, "14110": 1117048320.0, "14115": 1117048320.0, "14120": 1117048320.0, "14125": 1117048320.0, "14130": 1117048320.0, "14135": 1117048320.0, "14140": 1117048320.0, "14145": 1117048320.0, "14150": 1117048320.0, "14155": 1117048320.0, "14160": 1117048320.0, "14165": 1117048320.0, "14170": 1117048320.0, "14175": 1117048320.0, "14180": 1117048320.0, "14185": 1117048320.0, "14190": 1117048320.0, "14195": 1117048320.0, "14200": 1117048320.0, "14205": 1117048320.0, "14210": 1117048320.0, "14215": 1117048320.0, "14220": 1117048320.0, "14225": 1117048320.0, "14230": 1117048320.0, "14235": 1117048320.0, "14240": 1117048320.0, "14245": 1117048320.0, "14250": 1117048320.0, "14255": 1117048320.0, "14260": 1117048320.0, "14265": 1117048320.0, "14270": 1117048320.0, "14275": 1117048320.0, "14280": 1117048320.0, "14285": 1117048320.0, "14290": 1117048320.0, "14295": 1117048320.0, "14300": 1117048320.0, "14305": 1117048320.0, "14310": 1117048320.0, "14315": 1117048320.0, "14320": 1117048320.0, "14325": 1117048320.0, "14330": 1117048320.0, "14335": 1117048320.0, "14340": 1117048320.0, "14345": 1117048320.0, "14350": 1117048320.0, "14355": 1117048320.0, "14360": 1117048320.0, "14365": 1117048320.0, "14370": 1117048320.0, "14375": 1117048320.0, "14380": 1117048320.0, "14385": 1117048320.0, "14390": 1117048320.0, "14395": 1117048320.0, "14400": 1117048320.0, "14405": 1117048320.0, "14410": 1117048320.0, "14415": 1117048320.0, "14420": 1117048320.0, "14425": 1117048320.0, "14430": 1117048320.0, "14435": 1117048320.0, "14440": 1117048320.0, "14445": 1117048320.0, "14450": 1117048320.0, "14455": 1117048320.0, "14460": 1117048320.0, "14465": 1117048320.0, "14470": 1117048320.0, "14475": 1117048320.0, "14480": 1117048320.0, "14485": 1117048320.0, "14490": 1117048320.0, "14495": 1117048320.0, "14500": 1117048320.0, "14505": 1117048320.0, "14510": 1117048320.0, "14515": 1117048320.0, "14520": 1117048320.0, "14525": 1117048320.0, "14530": 1117048320.0, "14535": 1117048320.0, "14540": 1117048320.0, "14545": 1117048320.0, "14550": 1117048320.0, "14555": 1117048320.0, "14560": 1117048320.0, "14565": 1117048320.0, "14570": 1117048320.0, "14575": 1117048320.0, "14580": 1117048320.0, "14585": 1117048320.0, "14590": 1117048320.0, "14595": 1117048320.0, "14600": 1117048320.0, "14605": 1117048320.0, "14610": 1117048320.0, "14615": 1117048320.0, "14620": 1117048320.0, "14625": 1117048320.0, "14630": 1117048320.0, "14635": 1117048320.0, "14640": 1117048320.0, "14645": 1117048320.0, "14650": 1117048320.0, "14655": 1117048320.0, "14660": 1117048320.0, "14665": 1117048320.0, "14670": 1117048320.0, "14675": 1117048320.0, "14680": 1117048320.0, "14685": 1117048320.0, "14690": 1117048320.0, "14695": 1117048320.0, "14700": 1117048320.0, "14705": 1117048320.0, "14710": 1117048320.0, "14715": 1117048320.0, "14720": 1117048320.0, "14725": 1117048320.0, "14730": 1117048320.0, "14735": 1117048320.0, "14740": 1117048320.0, "14745": 1117048320.0, "14750": 1117048320.0, "14755": 1117048320.0, "14760": 1117048320.0, "14765": 1117048320.0, "14770": 1117048320.0, "14775": 1117048320.0, "14780": 1117048320.0, "14785": 1117048320.0, "14790": 1117048320.0, "14795": 1117048320.0, "14800": 1117048320.0, "14805": 1117048320.0, "14810": 1117048320.0, "14815": 1117048320.0, "14820": 1117048320.0, "14825": 1117048320.0, "14830": 1117048320.0, "14835": 1117048320.0, "14840": 1117048320.0, "14845": 1117048320.0, "14850": 1117048320.0, "14855": 1117048320.0, "14860": 1117048320.0, "14865": 1117048320.0, "14870": 1117048320.0, "14875": 1117048320.0, "14880": 1117048320.0, "14885": 1117048320.0, "14890": 1117048320.0, "14895": 1117048320.0, "14900": 1117048320.0, "14905": 1117048320.0, "14910": 1117048320.0, "14915": 1117048320.0, "14920": 1117048320.0, "14925": 1117048320.0, "14930": 1117048320.0, "14935": 1117048320.0, "14940": 1117048320.0, "14945": 1117048320.0, "14950": 1117048320.0, "14955": 1117048320.0, "14960": 1117048320.0, "14965": 1117048320.0, "14970": 1117048320.0, "14975": 1117048320.0, "14980": 1117048320.0, "14985": 1117048320.0, "14990": 1117048320.0, "14995": 1117048320.0, "15000": 1117048320.0, "15005": 1117048320.0, "15010": 1117048320.0, "15015": 1117048320.0, "15020": 1117048320.0, "15025": 1117048320.0, "15030": 1117048320.0, "15035": 1117048320.0, "15040": 1117048320.0, "15045": 1117048320.0, "15050": 1117048320.0, "15055": 1117048320.0, "15060": 1117048320.0, "15065": 1117048320.0, "15070": 1117048320.0, "15075": 1117048320.0, "15080": 1117048320.0, "15085": 1117048320.0, "15090": 1117048320.0, "15095": 1117048320.0, "15100": 1117048320.0, "15105": 1117048320.0, "15110": 1117048320.0, "15115": 1117048320.0, "15120": 1117048320.0, "15125": 1117048320.0, "15130": 1117048320.0, "15135": 1117048320.0, "15140": 1117048320.0, "15145": 1117048320.0, "15150": 1117048320.0, "15155": 1117048320.0, "15160": 1117048320.0, "15165": 1117048320.0, "15170": 1117048320.0, "15175": 1117048320.0, "15180": 1117048320.0, "15185": 1117048320.0, "15190": 1117048320.0, "15195": 1117048320.0, "15200": 1117048320.0, "15205": 1117048320.0, "15210": 1117048320.0, "15215": 1117048320.0, "15220": 1117048320.0, "15225": 1117048320.0, "15230": 1117048320.0, "15235": 1117048320.0, "15240": 1117048320.0, "15245": 1117048320.0, "15250": 1117048320.0, "15255": 1117048320.0, "15260": 1117048320.0, "15265": 1117048320.0, "15270": 1117048320.0, "15275": 1117048320.0, "15280": 1117048320.0, "15285": 1117048320.0, "15290": 1117048320.0, "15295": 1117048320.0, "15300": 1117048320.0, "15305": 1117048320.0, "15310": 1117048320.0, "15315": 1117048320.0, "15320": 1117048320.0, "15325": 1117048320.0, "15330": 1117048320.0, "15335": 1117048320.0, "15340": 1117048320.0, "15345": 1117048320.0, "15350": 1117048320.0, "15355": 1117048320.0, "15360": 1117048320.0, "15365": 1117048320.0, "15370": 1117048320.0, "15375": 1117048320.0, "15380": 1117048320.0, "15385": 1117048320.0, "15390": 1117048320.0, "15395": 1117048320.0, "15400": 1117048320.0, "15405": 1117048320.0, "15410": 1117048320.0, "15415": 1117048320.0, "15420": 1117048320.0, "15425": 1117048320.0, "15430": 1117048320.0, "15435": 1117048320.0, "15440": 1117048320.0, "15445": 1117048320.0, "15450": 1117048320.0, "15455": 1117048320.0, "15460": 1117048320.0, "15465": 1117048320.0, "15470": 1117048320.0, "15475": 1117048320.0, "15480": 1117048320.0, "15485": 1117048320.0, "15490": 1117048320.0, "15495": 1117048320.0, "15500": 1117048320.0, "15505": 1117048320.0, "15510": 1117048320.0, "15515": 1117048320.0, "15520": 1117048320.0, "15525": 1117048320.0, "15530": 1117048320.0, "15535": 1117048320.0, "15540": 1117048320.0, "15545": 1117048320.0, "15550": 1117048320.0, "15555": 1117048320.0, "15560": 1117048320.0, "15565": 1117048320.0, "15570": 1117048320.0, "15575": 1117048320.0, "15580": 1117048320.0, "15585": 1117048320.0, "15590": 1117048320.0, "15595": 1117048320.0, "15600": 1117048320.0, "15605": 1117048320.0, "15610": 1117048320.0, "15615": 1117048320.0, "15620": 1117048320.0, "15625": 1117048320.0, "15630": 1117048320.0, "15635": 1117048320.0, "15640": 1117048320.0, "15645": 1117048320.0, "15650": 1117048320.0, "15655": 1117048320.0, "15660": 1117048320.0, "15665": 1117048320.0, "15670": 1117048320.0, "15675": 1117048320.0, "15680": 1117048320.0, "15685": 1117048320.0, "15690": 1117048320.0, "15695": 1117048320.0, "15700": 1117048320.0, "15705": 1117048320.0, "15710": 1117048320.0, "15715": 1117048320.0, "15720": 1117048320.0, "15725": 1117048320.0, "15730": 1117048320.0, "15735": 1117048320.0, "15740": 1117048320.0, "15745": 1117048320.0, "15750": 1117048320.0, "15755": 1117048320.0, "15760": 1117048320.0, "15765": 1117048320.0, "15770": 1117048320.0, "15775": 1117048320.0, "15780": 1117048320.0, "15785": 1117048320.0, "15790": 1117048320.0, "15795": 1117048320.0, "15800": 1117048320.0, "15805": 1117048320.0, "15810": 1117048320.0, "15815": 1117048320.0, "15820": 1117048320.0, "15825": 1117048320.0, "15830": 1117048320.0, "15835": 1117048320.0, "15840": 1117048320.0, "15845": 1117048320.0, "15850": 1117048320.0, "15855": 1117048320.0, "15860": 1117048320.0, "15865": 1117048320.0, "15870": 1117048320.0, "15875": 1117048320.0, "15880": 1117048320.0, "15885": 1117048320.0, "15890": 1117048320.0, "15895": 1117048320.0, "15900": 1117048320.0, "15905": 1117048320.0, "15910": 1117048320.0, "15915": 1117048320.0, "15920": 1117048320.0, "15925": 1117048320.0, "15930": 1117048320.0, "15935": 1117048320.0, "15940": 1117048320.0, "15945": 1117048320.0, "15950": 1117048320.0, "15955": 1117048320.0, "15960": 1117048320.0, "15965": 1117048320.0, "15970": 1117048320.0, "15975": 1117048320.0, "15980": 1117048320.0, "15985": 1117048320.0, "15990": 1117048320.0, "15995": 1117048320.0, "16000": 1117048320.0, "16005": 1117048320.0, "16010": 1117048320.0, "16015": 1117048320.0, "16020": 1117048320.0, "16025": 1117048320.0, "16030": 1117048320.0, "16035": 1117048320.0, "16040": 1117048320.0, "16045": 1117048320.0, "16050": 1117048320.0, "16055": 1117048320.0, "16060": 1117048320.0, "16065": 1117048320.0, "16070": 1117048320.0, "16075": 1117048320.0, "16080": 1117048320.0, "16085": 1117048320.0, "16090": 1117048320.0, "16095": 1117048320.0, "16100": 1117048320.0, "16105": 1117048320.0, "16110": 1117048320.0, "16115": 1117048320.0, "16120": 1117048320.0, "16125": 1117048320.0, "16130": 1117048320.0, "16135": 1117048320.0, "16140": 1117048320.0, "16145": 1117048320.0, "16150": 1117048320.0, "16155": 1117048320.0, "16160": 1117048320.0, "16165": 1117048320.0, "16170": 1117048320.0, "16175": 1117048320.0, "16180": 1117048320.0, "16185": 1117048320.0, "16190": 1117048320.0, "16195": 1117048320.0, "16200": 1117048320.0, "16205": 1117048320.0, "16210": 1117048320.0, "16215": 1117048320.0, "16220": 1117048320.0, "16225": 1117048320.0, "16230": 1117048320.0, "16235": 1117048320.0, "16240": 1117048320.0, "16245": 1117048320.0, "16250": 1117048320.0, "16255": 1117048320.0, "16260": 1117048320.0, "16265": 1117048320.0, "16270": 1117048320.0, "16275": 1117048320.0, "16280": 1117048320.0, "16285": 1117048320.0, "16290": 1117048320.0, "16295": 1117048320.0, "16300": 1117048320.0, "16305": 1117048320.0, "16310": 1117048320.0, "16315": 1117048320.0, "16320": 1117048320.0, "16325": 1117048320.0, "16330": 1117048320.0, "16335": 1117048320.0, "16340": 1117048320.0, "16345": 1117048320.0, "16350": 1117048320.0, "16355": 1117048320.0, "16360": 1117048320.0, "16365": 1117048320.0, "16370": 1117048320.0, "16375": 1117048320.0, "16380": 1117048320.0, "16385": 1117048320.0, "16390": 1117048320.0, "16395": 1117048320.0, "16400": 1117048320.0, "16405": 1117048320.0, "16410": 1117048320.0, "16415": 1117048320.0, "16420": 1117048320.0, "16425": 1117048320.0, "16430": 1117048320.0, "16435": 1117048320.0, "16440": 1117048320.0, "16445": 1117048320.0, "16450": 1117048320.0, "16455": 1117048320.0, "16460": 1117048320.0, "16465": 1117048320.0, "16470": 1117048320.0, "16475": 1117048320.0, "16480": 1117048320.0, "16485": 1117048320.0, "16490": 1117048320.0, "16495": 1117048320.0, "16500": 1117048320.0, "16505": 1117048320.0, "16510": 1117048320.0, "16515": 1117048320.0, "16520": 1117048320.0, "16525": 1117048320.0, "16530": 1117048320.0, "16535": 1117048320.0, "16540": 1117048320.0, "16545": 1117048320.0, "16550": 1117048320.0, "16555": 1117048320.0, "16560": 1117048320.0, "16565": 1117048320.0, "16570": 1117048320.0, "16575": 1117048320.0, "16580": 1117048320.0, "16585": 1117048320.0, "16590": 1117048320.0, "16595": 1117048320.0, "16600": 1117048320.0, "16605": 1117048320.0, "16610": 1117048320.0, "16615": 1117048320.0, "16620": 1117048320.0, "16625": 1117048320.0, "16630": 1117048320.0, "16635": 1117048320.0, "16640": 1117048320.0, "16645": 1117048320.0, "16650": 1117048320.0, "16655": 1117048320.0, "16660": 1117048320.0, "16665": 1117048320.0, "16670": 1117048320.0, "16675": 1117048320.0, "16680": 1117048320.0, "16685": 1117048320.0, "16690": 1117048320.0, "16695": 1117048320.0, "16700": 1117048320.0, "16705": 1117048320.0, "16710": 1117048320.0, "16715": 1117048320.0, "16720": 1117048320.0, "16725": 1117048320.0, "16730": 1117048320.0, "16735": 1117048320.0, "16740": 1117048320.0, "16745": 1117048320.0, "16750": 1117048320.0, "16755": 1117048320.0, "16760": 1117048320.0, "16765": 1117048320.0, "16770": 1117048320.0, "16775": 1117048320.0, "16780": 1117048320.0, "16785": 1117048320.0, "16790": 1117048320.0, "16795": 1117048320.0, "16800": 1117048320.0, "16805": 1117048320.0, "16810": 1117048320.0, "16815": 1117048320.0, "16820": 1117048320.0, "16825": 1117048320.0, "16830": 1117048320.0, "16835": 1117048320.0, "16840": 1117048320.0, "16845": 1117048320.0, "16850": 1117048320.0, "16855": 1117048320.0, "16860": 1117048320.0, "16865": 1117048320.0, "16870": 1117048320.0, "16875": 1117048320.0, "16880": 1117048320.0, "16885": 1117048320.0, "16890": 1117048320.0, "16895": 1117048320.0, "16900": 1117048320.0, "16905": 1117048320.0, "16910": 1117048320.0, "16915": 1117048320.0, "16920": 1117048320.0, "16925": 1117048320.0, "16930": 1117048320.0, "16935": 1117048320.0, "16940": 1117048320.0, "16945": 1117048320.0, "16950": 1117048320.0, "16955": 1117048320.0, "16960": 1117048320.0, "16965": 1117048320.0, "16970": 1117048320.0, "16975": 1117048320.0, "16980": 1117048320.0, "16985": 1117048320.0, "16990": 1117048320.0, "16995": 1117048320.0, "17000": 1117048320.0, "17005": 1117048320.0, "17010": 1117048320.0, "17015": 1117048320.0, "17020": 1117048320.0, "17025": 1117048320.0, "17030": 1117048320.0, "17035": 1117048320.0, "17040": 1117048320.0, "17045": 1117048320.0, "17050": 1117048320.0, "17055": 1117048320.0, "17060": 1117048320.0, "17065": 1117048320.0, "17070": 1117048320.0, "17075": 1117048320.0, "17080": 1117048320.0, "17085": 1117048320.0, "17090": 1117048320.0, "17095": 1117048320.0, "17100": 1117048320.0, "17105": 1117048320.0, "17110": 1117048320.0, "17115": 1117048320.0, "17120": 1117048320.0, "17125": 1117048320.0, "17130": 1117048320.0, "17135": 1117048320.0, "17140": 1117048320.0, "17145": 1117048320.0, "17150": 1117048320.0, "17155": 1117048320.0, "17160": 1117048320.0, "17165": 1117048320.0, "17170": 1117048320.0, "17175": 1117048320.0, "17180": 1117048320.0, "17185": 1117048320.0, "17190": 1117048320.0, "17195": 1117048320.0, "17200": 1117048320.0, "17205": 1117048320.0, "17210": 1117048320.0, "17215": 1117048320.0, "17220": 1117048320.0, "17225": 1117048320.0, "17230": 1117048320.0, "17235": 1117048320.0, "17240": 1117048320.0, "17245": 1117048320.0, "17250": 1117048320.0, "17255": 1117048320.0, "17260": 1117048320.0, "17265": 1117048320.0, "17270": 1117048320.0, "17275": 1117048320.0, "17280": 1117048320.0, "17285": 1117048320.0, "17290": 1117048320.0, "17295": 1117048320.0, "17300": 1117048320.0, "17305": 1117048320.0, "17310": 1117048320.0, "17315": 1117048320.0, "17320": 1117048320.0, "17325": 1117048320.0, "17330": 1117048320.0, "17335": 1117048320.0, "17340": 1117048320.0, "17345": 1117048320.0, "17350": 1117048320.0, "17355": 1117048320.0, "17360": 1117048320.0, "17365": 1117048320.0, "17370": 1117048320.0, "17375": 1117048320.0, "17380": 1117048320.0, "17385": 1117048320.0, "17390": 1117048320.0, "17395": 1117048320.0, "17400": 1117048320.0, "17405": 1117048320.0, "17410": 1117048320.0, "17415": 1117048320.0, "17420": 1117048320.0, "17425": 1117048320.0, "17430": 1117048320.0, "17435": 1117048320.0, "17440": 1117048320.0, "17445": 1117048320.0, "17450": 1117048320.0, "17455": 1117048320.0, "17460": 1117048320.0, "17465": 1117048320.0, "17470": 1117048320.0, "17475": 1117048320.0, "17480": 1117048320.0, "17485": 1117048320.0, "17490": 1117048320.0, "17495": 1117048320.0, "17500": 1117048320.0, "17505": 1117048320.0, "17510": 1117048320.0, "17515": 1117048320.0, "17520": 1117048320.0, "17525": 1117048320.0, "17530": 1117048320.0, "17535": 1117048320.0, "17540": 1117048320.0, "17545": 1117048320.0, "17550": 1117048320.0, "17555": 1117048320.0, "17560": 1117048320.0, "17565": 1117048320.0, "17570": 1117048320.0, "17575": 1117048320.0, "17580": 1117048320.0, "17585": 1117048320.0, "17590": 1117048320.0, "17595": 1117048320.0, "17600": 1117048320.0, "17605": 1117048320.0, "17610": 1117048320.0, "17615": 1117048320.0, "17620": 1117048320.0, "17625": 1117048320.0, "17630": 1117048320.0, "17635": 1117048320.0, "17640": 1117048320.0, "17645": 1117048320.0, "17650": 1117048320.0, "17655": 1117048320.0, "17660": 1117048320.0, "17665": 1117048320.0, "17670": 1117048320.0, "17675": 1117048320.0, "17680": 1117048320.0, "17685": 1117048320.0, "17690": 1117048320.0, "17695": 1117048320.0, "17700": 1117048320.0, "17705": 1117048320.0, "17710": 1117048320.0, "17715": 1117048320.0, "17720": 1117048320.0, "17725": 1117048320.0, "17730": 1117048320.0, "17735": 1117048320.0, "17740": 1117048320.0, "17745": 1117048320.0, "17750": 1117048320.0, "17755": 1117048320.0, "17760": 1117048320.0, "17765": 1117048320.0, "17770": 1117048320.0, "17775": 1117048320.0, "17780": 1117048320.0, "17785": 1117048320.0, "17790": 1117048320.0, "17795": 1117048320.0, "17800": 1117048320.0, "17805": 1117048320.0, "17810": 1117048320.0, "17815": 1117048320.0, "17820": 1117048320.0, "17825": 1117048320.0, "17830": 1117048320.0, "17835": 1117048320.0, "17840": 1117048320.0, "17845": 1117048320.0, "17850": 1117048320.0, "17855": 1117048320.0, "17860": 1117048320.0, "17865": 1117048320.0, "17870": 1117048320.0, "17875": 1117048320.0, "17880": 1117048320.0, "17885": 1117048320.0, "17890": 1117048320.0, "17895": 1117048320.0, "17900": 1117048320.0, "17905": 1117048320.0, "17910": 1117048320.0, "17915": 1117048320.0, "17920": 1117048320.0, "17925": 1117048320.0, "17930": 1117048320.0, "17935": 1117048320.0, "17940": 1117048320.0, "17945": 1117048320.0, "17950": 1117048320.0, "17955": 1117048320.0, "17960": 1117048320.0, "17965": 1117048320.0, "17970": 1117048320.0, "17975": 1117048320.0, "17980": 1117048320.0, "17985": 1117048320.0, "17990": 1117048320.0, "17995": 1117048320.0, "18000": 1117048320.0, "18005": 1117048320.0, "18010": 1117048320.0, "18015": 1117048320.0, "18020": 1117048320.0, "18025": 1117048320.0, "18030": 1117048320.0, "18035": 1117048320.0, "18040": 1117048320.0, "18045": 1117048320.0, "18050": 1117048320.0, "18055": 1117048320.0, "18060": 1117048320.0, "18065": 1117048320.0, "18070": 1117048320.0, "18075": 1117048320.0, "18080": 1117048320.0, "18085": 1117048320.0, "18090": 1117048320.0, "18095": 1117048320.0, "18100": 1117048320.0, "18105": 1117048320.0, "18110": 1117048320.0, "18115": 1117048320.0, "18120": 1117048320.0, "18125": 1117048320.0, "18130": 1117048320.0, "18135": 1117048320.0, "18140": 1117048320.0, "18145": 1117048320.0, "18150": 1117048320.0, "18155": 1117048320.0, "18160": 1117048320.0, "18165": 1117048320.0, "18170": 1117048320.0, "18175": 1117048320.0, "18180": 1117048320.0, "18185": 1117048320.0, "18190": 1117048320.0, "18195": 1117048320.0, "18200": 1117048320.0, "18205": 1117048320.0, "18210": 1117048320.0, "18215": 1117048320.0, "18220": 1117048320.0, "18225": 1117048320.0, "18230": 1117048320.0, "18235": 1117048320.0, "18240": 1117048320.0, "18245": 1117048320.0, "18250": 1117048320.0, "18255": 1117048320.0, "18260": 1117048320.0, "18265": 1117048320.0, "18270": 1117048320.0, "18275": 1117048320.0, "18280": 1117048320.0, "18285": 1117048320.0, "18290": 1117048320.0, "18295": 1117048320.0, "18300": 1117048320.0, "18305": 1117048320.0, "18310": 1117048320.0, "18315": 1117048320.0, "18320": 1117048320.0, "18325": 1117048320.0, "18330": 1117048320.0, "18335": 1117048320.0, "18340": 1117048320.0, "18345": 1117048320.0, "18350": 1117048320.0, "18355": 1117048320.0, "18360": 1117048320.0, "18365": 1117048320.0, "18370": 1117048320.0, "18375": 1117048320.0, "18380": 1117048320.0, "18385": 1117048320.0, "18390": 1117048320.0, "18395": 1117048320.0, "18400": 1117048320.0, "18405": 1117048320.0, "18410": 1117048320.0, "18415": 1117048320.0, "18420": 1117048320.0, "18425": 1117048320.0, "18430": 1117048320.0, "18435": 1117048320.0, "18440": 1117048320.0, "18445": 1117048320.0, "18450": 1117048320.0, "18455": 1117048320.0, "18460": 1117048320.0, "18465": 1117048320.0, "18470": 1117048320.0, "18475": 1117048320.0, "18480": 1117048320.0, "18485": 1117048320.0, "18490": 1117048320.0, "18495": 1117048320.0, "18500": 1117048320.0, "18505": 1117048320.0, "18510": 1117048320.0, "18515": 1117048320.0, "18520": 1117048320.0, "18525": 1117048320.0, "18530": 1117048320.0, "18535": 1117048320.0, "18540": 1117048320.0, "18545": 1117048320.0, "18550": 1117048320.0, "18555": 1117048320.0, "18560": 1117048320.0, "18565": 1117048320.0, "18570": 1117048320.0, "18575": 1117048320.0, "18580": 1117048320.0, "18585": 1117048320.0, "18590": 1117048320.0, "18595": 1117048320.0, "18600": 1117048320.0, "18605": 1117048320.0, "18610": 1117048320.0, "18615": 1117048320.0, "18620": 1117048320.0, "18625": 1117048320.0, "18630": 1117048320.0, "18635": 1117048320.0, "18640": 1117048320.0, "18645": 1117048320.0, "18650": 1117048320.0, "18655": 1117048320.0, "18660": 1117048320.0, "18665": 1117048320.0, "18670": 1117048320.0, "18675": 1117048320.0, "18680": 1117048320.0, "18685": 1117048320.0, "18690": 1117048320.0, "18695": 1117048320.0, "18700": 1117048320.0, "18705": 1117048320.0, "18710": 1117048320.0, "18715": 1117048320.0, "18720": 1117048320.0, "18725": 1117048320.0, "18730": 1117048320.0, "18735": 1117048320.0, "18740": 1117048320.0, "18745": 1117048320.0, "18750": 1117048320.0, "18755": 1117048320.0, "18760": 1117048320.0, "18765": 1117048320.0, "18770": 1117048320.0, "18775": 1117048320.0, "18780": 1117048320.0, "18785": 1117048320.0, "18790": 1117048320.0, "18795": 1117048320.0, "18800": 1117048320.0, "18805": 1117048320.0, "18810": 1117048320.0, "18815": 1117048320.0, "18820": 1117048320.0, "18825": 1117048320.0, "18830": 1117048320.0, "18835": 1117048320.0, "18840": 1117048320.0, "18845": 1117048320.0, "18850": 1117048320.0, "18855": 1117048320.0, "18860": 1117048320.0, "18865": 1117048320.0, "18870": 1117048320.0, "18875": 1117048320.0, "18880": 1117048320.0, "18885": 1117048320.0, "18890": 1117048320.0, "18895": 1117048320.0, "18900": 1117048320.0, "18905": 1117048320.0, "18910": 1117048320.0, "18915": 1117048320.0, "18920": 1117048320.0, "18925": 1117048320.0, "18930": 1117048320.0, "18935": 1117048320.0, "18940": 1117048320.0, "18945": 1117048320.0, "18950": 1117048320.0, "18955": 1117048320.0, "18960": 1117048320.0, "18965": 1117048320.0, "18970": 1117048320.0, "18975": 1117048320.0, "18980": 1117048320.0, "18985": 1117048320.0, "18990": 1117048320.0, "18995": 1117048320.0, "19000": 1117048320.0, "19005": 1117048320.0, "19010": 1117048320.0, "19015": 1117048320.0, "19020": 1117048320.0, "19025": 1117048320.0, "19030": 1117048320.0, "19035": 1117048320.0, "19040": 1117048320.0, "19045": 1117048320.0, "19050": 1117048320.0, "19055": 1117048320.0, "19060": 1117048320.0, "19065": 1117048320.0, "19070": 1117048320.0, "19075": 1117048320.0, "19080": 1117048320.0, "19085": 1117048320.0, "19090": 1117048320.0, "19095": 1117048320.0, "19100": 1117048320.0, "19105": 1117048320.0, "19110": 1117048320.0, "19115": 1117048320.0, "19120": 1117048320.0, "19125": 1117048320.0, "19130": 1117048320.0, "19135": 1117048320.0, "19140": 1117048320.0, "19145": 1117048320.0, "19150": 1117048320.0, "19155": 1117048320.0, "19160": 1117048320.0, "19165": 1117048320.0, "19170": 1117048320.0, "19175": 1117048320.0, "19180": 1117048320.0, "19185": 1117048320.0, "19190": 1117048320.0, "19195": 1117048320.0, "19200": 1117048320.0, "19205": 1117048320.0, "19210": 1117048320.0, "19215": 1117048320.0, "19220": 1117048320.0, "19225": 1117048320.0, "19230": 1117048320.0, "19235": 1117048320.0, "19240": 1117048320.0, "19245": 1117048320.0, "19250": 1117048320.0, "19255": 1117048320.0, "19260": 1117048320.0, "19265": 1117048320.0, "19270": 1117048320.0, "19275": 1117048320.0, "19280": 1117048320.0, "19285": 1117048320.0, "19290": 1117048320.0, "19295": 1117048320.0, "19300": 1117048320.0, "19305": 1117048320.0, "19310": 1117048320.0, "19315": 1117048320.0, "19320": 1117048320.0, "19325": 1117048320.0, "19330": 1117048320.0, "19335": 1117048320.0, "19340": 1117048320.0, "19345": 1117048320.0, "19350": 1117048320.0, "19355": 1117048320.0, "19360": 1117048320.0, "19365": 1117048320.0, "19370": 1117048320.0, "19375": 1117048320.0, "19380": 1117048320.0, "19385": 1117048320.0, "19390": 1117048320.0, "19395": 1117048320.0, "19400": 1117048320.0, "19405": 1117048320.0, "19410": 1117048320.0, "19415": 1117048320.0, "19420": 1117048320.0, "19425": 1117048320.0, "19430": 1117048320.0, "19435": 1117048320.0, "19440": 1117048320.0, "19445": 1117048320.0, "19450": 1117048320.0, "19455": 1117048320.0, "19460": 1117048320.0, "19465": 1117048320.0, "19470": 1117048320.0, "19475": 1117048320.0, "19480": 1117048320.0, "19485": 1117048320.0, "19490": 1117048320.0, "19495": 1117048320.0, "19500": 1117048320.0, "19505": 1117048320.0, "19510": 1117048320.0, "19515": 1117048320.0, "19520": 1117048320.0, "19525": 1117048320.0, "19530": 1117048320.0, "19535": 1117048320.0, "19540": 1117048320.0, "19545": 1117048320.0, "19550": 1117048320.0, "19555": 1117048320.0, "19560": 1117048320.0, "19565": 1117048320.0, "19570": 1117048320.0, "19575": 1117048320.0, "19580": 1117048320.0, "19585": 1117048320.0, "19590": 1117048320.0, "19595": 1117048320.0, "19600": 1117048320.0, "19605": 1117048320.0, "19610": 1117048320.0, "19615": 1117048320.0, "19620": 1117048320.0, "19625": 1117048320.0, "19630": 1117048320.0, "19635": 1117048320.0, "19640": 1117048320.0, "19645": 1117048320.0, "19650": 1117048320.0, "19655": 1117048320.0, "19660": 1117048320.0, "19665": 1117048320.0, "19670": 1117048320.0, "19675": 1117048320.0, "19680": 1117048320.0, "19685": 1117048320.0, "19690": 1117048320.0, "19695": 1117048320.0, "19700": 1117048320.0, "19705": 1117048320.0, "19710": 1117048320.0, "19715": 1117048320.0, "19720": 1117048320.0, "19725": 1117048320.0, "19730": 1117048320.0, "19735": 1117048320.0, "19740": 1117048320.0, "19745": 1117048320.0, "19750": 1117048320.0, "19755": 1117048320.0, "19760": 1117048320.0, "19765": 1117048320.0, "19770": 1117048320.0, "19775": 1117048320.0, "19780": 1117048320.0, "19785": 1117048320.0, "19790": 1117048320.0, "19795": 1117048320.0, "19800": 1117048320.0, "19805": 1117048320.0, "19810": 1117048320.0, "19815": 1117048320.0, "19820": 1117048320.0, "19825": 1117048320.0, "19830": 1117048320.0, "19835": 1117048320.0, "19840": 1117048320.0, "19845": 1117048320.0, "19850": 1117048320.0, "19855": 1117048320.0, "19860": 1117048320.0, "19865": 1117048320.0, "19870": 1117048320.0, "19875": 1117048320.0, "19880": 1117048320.0, "19885": 1117048320.0, "19890": 1117048320.0, "19895": 1117048320.0, "19900": 1117048320.0, "19905": 1117048320.0, "19910": 1117048320.0, "19915": 1117048320.0, "19920": 1117048320.0, "19925": 1117048320.0, "19930": 1117048320.0, "19935": 1117048320.0, "19940": 1117048320.0, "19945": 1117048320.0, "19950": 1117048320.0, "19955": 1117048320.0, "19960": 1117048320.0, "19965": 1117048320.0, "19970": 1117048320.0, "19975": 1117048320.0, "19980": 1117048320.0, "19985": 1117048320.0, "19990": 1117048320.0, "19995": 1117048320.0, "20000": 1117048320.0, "20005": 1117048320.0, "20010": 1117048320.0, "20015": 1117048320.0, "20020": 1117048320.0, "20025": 1117048320.0, "20030": 1117048320.0, "20035": 1117048320.0, "20040": 1117048320.0, "20045": 1117048320.0, "20050": 1117048320.0, "20055": 1117048320.0, "20060": 1117048320.0, "20065": 1117048320.0, "20070": 1117048320.0, "20075": 1117048320.0, "20080": 1117048320.0, "20085": 1117048320.0, "20090": 1117048320.0, "20095": 1117048320.0, "20100": 1117048320.0, "20105": 1117048320.0, "20110": 1117048320.0, "20115": 1117048320.0, "20120": 1117048320.0, "20125": 1117048320.0, "20130": 1117048320.0, "20135": 1117048320.0, "20140": 1117048320.0, "20145": 1117048320.0, "20150": 1117048320.0, "20155": 1117048320.0, "20160": 1117048320.0, "20165": 1117048320.0, "20170": 1117048320.0, "20175": 1117048320.0, "20180": 1117048320.0, "20185": 1117048320.0, "20190": 1117048320.0, "20195": 1117048320.0, "20200": 1117048320.0, "20205": 1117048320.0, "20210": 1117048320.0, "20215": 1117048320.0, "20220": 1117048320.0, "20225": 1117048320.0, "20230": 1117048320.0, "20235": 1117048320.0, "20240": 1117048320.0, "20245": 1117048320.0, "20250": 1117048320.0, "20255": 1117048320.0, "20260": 1117048320.0, "20265": 1117048320.0, "20270": 1117048320.0, "20275": 1117048320.0, "20280": 1117048320.0, "20285": 1117048320.0, "20290": 1117048320.0, "20295": 1117048320.0, "20300": 1117048320.0, "20305": 1117048320.0, "20310": 1117048320.0, "20315": 1117048320.0, "20320": 1117048320.0, "20325": 1117048320.0, "20330": 1117048320.0, "20335": 1117048320.0, "20340": 1117048320.0, "20345": 1117048320.0, "20350": 1117048320.0, "20355": 1117048320.0, "20360": 1117048320.0, "20365": 1117048320.0, "20370": 1117048320.0, "20375": 1117048320.0, "20380": 1117048320.0, "20385": 1117048320.0, "20390": 1117048320.0, "20395": 1117048320.0, "20400": 1117048320.0, "20405": 1117048320.0, "20410": 1117048320.0, "20415": 1117048320.0, "20420": 1117048320.0, "20425": 1117048320.0, "20430": 1117048320.0, "20435": 1117048320.0, "20440": 1117048320.0, "20445": 1117048320.0, "20450": 1117048320.0, "20455": 1117048320.0, "20460": 1117048320.0, "20465": 1117048320.0, "20470": 1117048320.0, "20475": 1117048320.0, "20480": 1117048320.0, "20485": 1117048320.0, "20490": 1117048320.0, "20495": 1117048320.0, "20500": 1117048320.0, "20505": 1117048320.0, "20510": 1117048320.0, "20515": 1117048320.0, "20520": 1117048320.0, "20525": 1117048320.0, "20530": 1117048320.0, "20535": 1117048320.0, "20540": 1117048320.0, "20545": 1117048320.0, "20550": 1117048320.0, "20555": 1117048320.0, "20560": 1117048320.0, "20565": 1117048320.0, "20570": 1117048320.0, "20575": 1117048320.0, "20580": 1117048320.0, "20585": 1117048320.0, "20590": 1117048320.0, "20595": 1117048320.0, "20600": 1117048320.0, "20605": 1117048320.0, "20610": 1117048320.0, "20615": 1117048320.0, "20620": 1117048320.0, "20625": 1117048320.0, "20630": 1117048320.0, "20635": 1117048320.0, "20640": 1117048320.0, "20645": 1117048320.0, "20650": 1117048320.0, "20655": 1117048320.0, "20660": 1117048320.0, "20665": 1117048320.0, "20670": 1117048320.0, "20675": 1117048320.0, "20680": 1117048320.0, "20685": 1117048320.0, "20690": 1117048320.0, "20695": 1117048320.0, "20700": 1117048320.0, "20705": 1117048320.0, "20710": 1117048320.0, "20715": 1117048320.0, "20720": 1117048320.0, "20725": 1117048320.0, "20730": 1117048320.0, "20735": 1117048320.0, "20740": 1117048320.0, "20745": 1117048320.0, "20750": 1117048320.0, "20755": 1117048320.0, "20760": 1117048320.0, "20765": 1117048320.0, "20770": 1117048320.0, "20775": 1117048320.0, "20780": 1117048320.0, "20785": 1117048320.0, "20790": 1117048320.0, "20795": 1117048320.0, "20800": 1117048320.0, "20805": 1117048320.0, "20810": 1117048320.0, "20815": 1117048320.0, "20820": 1117048320.0, "20825": 1117048320.0, "20830": 1117048320.0, "20835": 1117048320.0, "20840": 1117048320.0, "20845": 1117048320.0, "20850": 1117048320.0, "20855": 1117048320.0, "20860": 1117048320.0, "20865": 1117048320.0, "20870": 1117048320.0, "20875": 1117048320.0, "20880": 1117048320.0, "20885": 1117048320.0, "20890": 1117048320.0, "20895": 1117048320.0, "20900": 1117048320.0, "20905": 1117048320.0, "20910": 1117048320.0, "20915": 1117048320.0, "20920": 1117048320.0, "20925": 1117048320.0, "20930": 1117048320.0, "20935": 1117048320.0, "20940": 1117048320.0, "20945": 1117048320.0, "20950": 1117048320.0, "20955": 1117048320.0, "20960": 1117048320.0, "20965": 1117048320.0, "20970": 1117048320.0, "20975": 1117048320.0, "20980": 1117048320.0, "20985": 1117048320.0, "20990": 1117048320.0, "20995": 1117048320.0, "21000": 1117048320.0, "21005": 1117048320.0, "21010": 1117048320.0, "21015": 1117048320.0, "21020": 1117048320.0, "21025": 1117048320.0, "21030": 1117048320.0, "21035": 1117048320.0, "21040": 1117048320.0, "21045": 1117048320.0, "21050": 1117048320.0, "21055": 1117048320.0, "21060": 1117048320.0, "21065": 1117048320.0, "21070": 1117048320.0, "21075": 1117048320.0, "21080": 1117048320.0, "21085": 1117048320.0, "21090": 1117048320.0, "21095": 1117048320.0, "21100": 1117048320.0, "21105": 1117048320.0, "21110": 1117048320.0, "21115": 1117048320.0, "21120": 1117048320.0, "21125": 1117048320.0, "21130": 1117048320.0, "21135": 1117048320.0, "21140": 1117048320.0, "21145": 1117048320.0, "21150": 1117048320.0, "21155": 1117048320.0, "21160": 1117048320.0, "21165": 1117048320.0, "21170": 1117048320.0, "21175": 1117048320.0, "21180": 1117048320.0, "21185": 1117048320.0, "21190": 1117048320.0, "21195": 1117048320.0, "21200": 1117048320.0, "21205": 1117048320.0, "21210": 1117048320.0, "21215": 1117048320.0, "21220": 1117048320.0, "21225": 1117048320.0, "21230": 1117048320.0, "21235": 1117048320.0, "21240": 1117048320.0, "21245": 1117048320.0, "21250": 1117048320.0, "21255": 1117048320.0, "21260": 1117048320.0, "21265": 1117048320.0, "21270": 1117048320.0, "21275": 1117048320.0, "21280": 1117048320.0, "21285": 1117048320.0, "21290": 1117048320.0, "21295": 1117048320.0, "21300": 1117048320.0, "21305": 1117048320.0, "21310": 1117048320.0, "21315": 1117048320.0, "21320": 1117048320.0, "21325": 1117048320.0, "21330": 1117048320.0, "21335": 1117048320.0, "21340": 1117048320.0, "21345": 1117048320.0, "21350": 1117048320.0, "21355": 1117048320.0, "21360": 1117048320.0, "21365": 1117048320.0, "21370": 1117048320.0, "21375": 1117048320.0, "21380": 1117048320.0, "21385": 1117048320.0, "21390": 1117048320.0, "21395": 1117048320.0, "21400": 1117048320.0, "21405": 1117048320.0, "21410": 1117048320.0, "21415": 1117048320.0, "21420": 1117048320.0, "21425": 1117048320.0, "21430": 1117048320.0, "21435": 1117048320.0, "21440": 1117048320.0, "21445": 1117048320.0, "21450": 1117048320.0, "21455": 1117048320.0, "21460": 1117048320.0, "21465": 1117048320.0, "21470": 1117048320.0, "21475": 1117048320.0, "21480": 1117048320.0, "21485": 1117048320.0, "21490": 1117048320.0, "21495": 1117048320.0, "21500": 1117048320.0, "21505": 1117048320.0, "21510": 1117048320.0, "21515": 1117048320.0, "21520": 1117048320.0, "21525": 1117048320.0, "21530": 1117048320.0, "21535": 1117048320.0, "21540": 1117048320.0, "21545": 1117048320.0, "21550": 1117048320.0, "21555": 1117048320.0, "21560": 1117048320.0, "21565": 1117048320.0, "21570": 1117048320.0, "21575": 1117048320.0, "21580": 1117048320.0, "21585": 1117048320.0, "21590": 1117048320.0, "21595": 1117048320.0, "21600": 1117048320.0, "21605": 1117048320.0, "21610": 1117048320.0, "21615": 1117048320.0, "21620": 1117048320.0, "21625": 1117048320.0, "21630": 1117048320.0, "21635": 1117048320.0, "21640": 1117048320.0, "21645": 1117048320.0, "21650": 1117048320.0, "21655": 1117048320.0, "21660": 1117048320.0, "21665": 1117048320.0, "21670": 1117048320.0, "21675": 1117048320.0, "21680": 1117048320.0, "21685": 1117048320.0, "21690": 1117048320.0, "21695": 1117048320.0, "21700": 1117048320.0, "21705": 1117048320.0, "21710": 1117048320.0, "21715": 1117048320.0, "21720": 1117048320.0, "21725": 1117048320.0, "21730": 1117048320.0, "21735": 1117048320.0, "21740": 1117048320.0, "21745": 1117048320.0, "21750": 1117048320.0, "21755": 1117048320.0, "21760": 1117048320.0, "21765": 1117048320.0, "21770": 1117048320.0, "21775": 1117048320.0, "21780": 1117048320.0, "21785": 1117048320.0, "21790": 1117048320.0, "21795": 1117048320.0, "21800": 1117048320.0, "21805": 1117048320.0, "21810": 1117048320.0, "21815": 1117048320.0, "21820": 1117048320.0, "21825": 1117048320.0, "21830": 1117048320.0, "21835": 1117048320.0, "21840": 1117048320.0, "21845": 1117048320.0, "21850": 1117048320.0, "21855": 1117048320.0, "21860": 1117048320.0, "21865": 1117048320.0, "21870": 1117048320.0, "21875": 1117048320.0, "21880": 1117048320.0, "21885": 1117048320.0, "21890": 1117048320.0, "21895": 1117048320.0, "21900": 1117048320.0, "21905": 1117048320.0, "21910": 1117048320.0, "21915": 1117048320.0, "21920": 1117048320.0, "21925": 1117048320.0, "21930": 1117048320.0, "21935": 1117048320.0, "21940": 1117048320.0, "21945": 1117048320.0, "21950": 1117048320.0, "21955": 1117048320.0, "21960": 1117048320.0, "21965": 1117048320.0, "21970": 1117048320.0, "21975": 1117048320.0, "21980": 1117048320.0, "21985": 1117048320.0, "21990": 1117048320.0, "21995": 1117048320.0, "22000": 1117048320.0, "22005": 1117048320.0, "22010": 1117048320.0, "22015": 1117048320.0, "22020": 1117048320.0, "22025": 1117048320.0, "22030": 1117048320.0, "22035": 1117048320.0, "22040": 1117048320.0, "22045": 1117048320.0, "22050": 1117048320.0, "22055": 1117048320.0, "22060": 1117048320.0, "22065": 1117048320.0, "22070": 1117048320.0, "22075": 1117048320.0, "22080": 1117048320.0, "22085": 1117048320.0, "22090": 1117048320.0, "22095": 1117048320.0, "22100": 1117048320.0, "22105": 1117048320.0, "22110": 1117048320.0, "22115": 1117048320.0, "22120": 1117048320.0, "22125": 1117048320.0, "22130": 1117048320.0, "22135": 1117048320.0, "22140": 1117048320.0, "22145": 1117048320.0, "22150": 1117048320.0, "22155": 1117048320.0, "22160": 1117048320.0, "22165": 1117048320.0, "22170": 1117048320.0, "22175": 1117048320.0, "22180": 1117048320.0, "22185": 1117048320.0, "22190": 1117048320.0, "22195": 1117048320.0, "22200": 1117048320.0, "22205": 1117048320.0, "22210": 1117048320.0, "22215": 1117048320.0, "22220": 1117048320.0, "22225": 1117048320.0, "22230": 1117048320.0, "22235": 1117048320.0, "22240": 1117048320.0, "22245": 1117048320.0, "22250": 1117048320.0, "22255": 1117048320.0, "22260": 1117048320.0, "22265": 1117048320.0, "22270": 1117048320.0, "22275": 1117048320.0, "22280": 1117048320.0, "22285": 1117048320.0, "22290": 1117048320.0, "22295": 1117048320.0, "22300": 1117048320.0, "22305": 1117048320.0, "22310": 1117048320.0, "22315": 1117048320.0, "22320": 1117048320.0, "22325": 1117048320.0, "22330": 1117048320.0, "22335": 1117048320.0, "22340": 1117048320.0, "22345": 1117048320.0, "22350": 1117048320.0, "22355": 1117048320.0, "22360": 1117048320.0, "22365": 1117048320.0, "22370": 1117048320.0, "22375": 1117048320.0, "22380": 1117048320.0, "22385": 1117048320.0, "22390": 1117048320.0, "22395": 1117048320.0, "22400": 1117048320.0, "22405": 1117048320.0, "22410": 1117048320.0, "22415": 1117048320.0, "22420": 1117048320.0, "22425": 1117048320.0, "22430": 1117048320.0, "22435": 1117048320.0, "22440": 1117048320.0, "22445": 1117048320.0, "22450": 1117048320.0, "22455": 1117048320.0, "22460": 1117048320.0, "22465": 1117048320.0, "22470": 1117048320.0, "22475": 1117048320.0, "22480": 1117048320.0, "22485": 1117048320.0, "22490": 1117048320.0, "22495": 1117048320.0, "22500": 1117048320.0, "22505": 1117048320.0, "22510": 1117048320.0, "22515": 1117048320.0, "22520": 1117048320.0, "22525": 1117048320.0, "22530": 1117048320.0, "22535": 1117048320.0, "22540": 1117048320.0, "22545": 1117048320.0, "22550": 1117048320.0, "22555": 1117048320.0, "22560": 1117048320.0, "22565": 1117048320.0, "22570": 1117048320.0, "22575": 1117048320.0, "22580": 1117048320.0, "22585": 1117048320.0, "22590": 1117048320.0, "22595": 1117048320.0, "22600": 1117048320.0, "22605": 1117048320.0, "22610": 1117048320.0, "22615": 1117048320.0, "22620": 1117048320.0, "22625": 1117048320.0, "22630": 1117048320.0, "22635": 1117048320.0, "22640": 1117048320.0, "22645": 1117048320.0, "22650": 1117048320.0, "22655": 1117048320.0, "22660": 1117048320.0, "22665": 1117048320.0, "22670": 1117048320.0, "22675": 1117048320.0, "22680": 1117048320.0, "22685": 1117048320.0, "22690": 1117048320.0, "22695": 1117048320.0, "22700": 1117048320.0, "22705": 1117048320.0, "22710": 1117048320.0, "22715": 1117048320.0, "22720": 1117048320.0, "22725": 1117048320.0, "22730": 1117048320.0, "22735": 1117048320.0, "22740": 1117048320.0, "22745": 1117048320.0, "22750": 1117048320.0, "22755": 1117048320.0, "22760": 1117048320.0, "22765": 1117048320.0, "22770": 1117048320.0, "22775": 1117048320.0, "22780": 1117048320.0, "22785": 1117048320.0, "22790": 1117048320.0, "22795": 1117048320.0, "22800": 1117048320.0, "22805": 1117048320.0, "22810": 1117048320.0, "22815": 1117048320.0, "22820": 1117048320.0, "22825": 1117048320.0, "22830": 1117048320.0, "22835": 1117048320.0, "22840": 1117048320.0, "22845": 1117048320.0, "22850": 1117048320.0, "22855": 1117048320.0, "22860": 1117048320.0, "22865": 1117048320.0, "22870": 1117048320.0, "22875": 1117048320.0, "22880": 1117048320.0, "22885": 1117048320.0, "22890": 1117048320.0, "22895": 1117048320.0, "22900": 1117048320.0, "22905": 1117048320.0, "22910": 1117048320.0, "22915": 1117048320.0, "22920": 1117048320.0, "22925": 1117048320.0, "22930": 1117048320.0, "22935": 1117048320.0, "22940": 1117048320.0, "22945": 1117048320.0, "22950": 1117048320.0, "22955": 1117048320.0, "22960": 1117048320.0, "22965": 1117048320.0, "22970": 1117048320.0, "22975": 1117048320.0, "22980": 1117048320.0, "22985": 1117048320.0, "22990": 1117048320.0, "22995": 1117048320.0, "23000": 1117048320.0, "23005": 1117048320.0, "23010": 1117048320.0, "23015": 1117048320.0, "23020": 1117048320.0, "23025": 1117048320.0, "23030": 1117048320.0, "23035": 1117048320.0, "23040": 1117048320.0, "23045": 1117048320.0, "23050": 1117048320.0, "23055": 1117048320.0, "23060": 1117048320.0, "23065": 1117048320.0, "23070": 1117048320.0, "23075": 1117048320.0, "23080": 1117048320.0, "23085": 1117048320.0, "23090": 1117048320.0, "23095": 1117048320.0, "23100": 1117048320.0, "23105": 1117048320.0, "23110": 1117048320.0, "23115": 1117048320.0, "23120": 1117048320.0, "23125": 1117048320.0, "23130": 1117048320.0, "23135": 1117048320.0, "23140": 1117048320.0, "23145": 1117048320.0, "23150": 1117048320.0, "23155": 1117048320.0, "23160": 1117048320.0, "23165": 1117048320.0, "23170": 1117048320.0, "23175": 1117048320.0, "23180": 1117048320.0, "23185": 1117048320.0, "23190": 1117048320.0, "23195": 1117048320.0, "23200": 1117048320.0, "23205": 1117048320.0, "23210": 1117048320.0, "23215": 1117048320.0, "23220": 1117048320.0, "23225": 1117048320.0, "23230": 1117048320.0, "23235": 1117048320.0, "23240": 1117048320.0, "23245": 1117048320.0, "23250": 1117048320.0, "23255": 1117048320.0, "23260": 1117048320.0, "23265": 1117048320.0, "23270": 1117048320.0, "23275": 1117048320.0, "23280": 1117048320.0, "23285": 1117048320.0, "23290": 1117048320.0, "23295": 1117048320.0, "23300": 1117048320.0, "23305": 1117048320.0, "23310": 1117048320.0, "23315": 1117048320.0, "23320": 1117048320.0, "23325": 1117048320.0, "23330": 1117048320.0, "23335": 1117048320.0, "23340": 1117048320.0, "23345": 1117048320.0, "23350": 1117048320.0, "23355": 1117048320.0, "23360": 1117048320.0, "23365": 1117048320.0, "23370": 1117048320.0, "23375": 1117048320.0, "23380": 1117048320.0, "23385": 1117048320.0, "23390": 1117048320.0, "23395": 1117048320.0, "23400": 1117048320.0, "23405": 1117048320.0, "23410": 1117048320.0, "23415": 1117048320.0, "23420": 1117048320.0, "23425": 1117048320.0, "23430": 1117048320.0, "23435": 1117048320.0, "23440": 1117048320.0, "23445": 1117048320.0, "23450": 1117048320.0, "23455": 1117048320.0, "23460": 1117048320.0, "23465": 1117048320.0, "23470": 1117048320.0, "23475": 1117048320.0, "23480": 1117048320.0, "23485": 1117048320.0, "23490": 1117048320.0, "23495": 1117048320.0, "23500": 1117048320.0, "23505": 1117048320.0, "23510": 1117048320.0, "23515": 1117048320.0, "23520": 1117048320.0, "23525": 1117048320.0, "23530": 1117048320.0, "23535": 1117048320.0, "23540": 1117048320.0, "23545": 1117048320.0, "23550": 1117048320.0, "23555": 1117048320.0, "23560": 1117048320.0, "23565": 1117048320.0, "23570": 1117048320.0, "23575": 1117048320.0, "23580": 1117048320.0, "23585": 1117048320.0, "23590": 1117048320.0, "23595": 1117048320.0, "23600": 1117048320.0, "23605": 1117048320.0, "23610": 1117048320.0, "23615": 1117048320.0, "23620": 1117048320.0, "23625": 1117048320.0, "23630": 1117048320.0, "23635": 1117048320.0, "23640": 1117048320.0, "23645": 1117048320.0, "23650": 1117048320.0, "23655": 1117048320.0, "23660": 1117048320.0, "23665": 1117048320.0, "23670": 1117048320.0, "23675": 1117048320.0, "23680": 1117048320.0, "23685": 1117048320.0, "23690": 1117048320.0, "23695": 1117048320.0, "23700": 1117048320.0, "23705": 1117048320.0, "23710": 1117048320.0, "23715": 1117048320.0, "23720": 1117048320.0, "23725": 1117048320.0, "23730": 1117048320.0, "23735": 1117048320.0, "23740": 1117048320.0, "23745": 1117048320.0, "23750": 1117048320.0, "23755": 1117048320.0, "23760": 1117048320.0, "23765": 1117048320.0, "23770": 1117048320.0, "23775": 1117048320.0, "23780": 1117048320.0, "23785": 1117048320.0, "23790": 1117048320.0, "23795": 1117048320.0, "23800": 1117048320.0, "23805": 1117048320.0, "23810": 1117048320.0, "23815": 1117048320.0, "23820": 1117048320.0, "23825": 1117048320.0, "23830": 1117048320.0, "23835": 1117048320.0, "23840": 1117048320.0, "23845": 1117048320.0, "23850": 1117048320.0, "23855": 1117048320.0, "23860": 1117048320.0, "23865": 1117048320.0, "23870": 1117048320.0, "23875": 1117048320.0, "23880": 1117048320.0, "23885": 1117048320.0, "23890": 1117048320.0, "23895": 1117048320.0, "23900": 1117048320.0, "23905": 1117048320.0, "23910": 1117048320.0, "23915": 1117048320.0, "23920": 1117048320.0, "23925": 1117048320.0, "23930": 1117048320.0, "23935": 1117048320.0, "23940": 1117048320.0, "23945": 1117048320.0, "23950": 1117048320.0, "23955": 1117048320.0, "23960": 1117048320.0, "23965": 1117048320.0, "23970": 1117048320.0, "23975": 1117048320.0, "23980": 1117048320.0, "23985": 1117048320.0, "23990": 1117048320.0, "23995": 1117048320.0, "24000": 1117048320.0, "24005": 1117048320.0, "24010": 1117048320.0, "24015": 1117048320.0, "24020": 1117048320.0, "24025": 1117048320.0, "24030": 1117048320.0, "24035": 1117048320.0, "24040": 1117048320.0, "24045": 1117048320.0, "24050": 1117048320.0, "24055": 1117048320.0, "24060": 1117048320.0, "24065": 1117048320.0, "24070": 1117048320.0, "24075": 1117048320.0, "24080": 1117048320.0, "24085": 1117048320.0, "24090": 1117048320.0, "24095": 1117048320.0, "24100": 1117048320.0, "24105": 1117048320.0, "24110": 1117048320.0, "24115": 1117048320.0, "24120": 1117048320.0, "24125": 1117048320.0, "24130": 1117048320.0, "24135": 1117048320.0, "24140": 1117048320.0, "24145": 1117048320.0, "24150": 1117048320.0, "24155": 1117048320.0, "24160": 1117048320.0, "24165": 1117048320.0, "24170": 1117048320.0, "24175": 1117048320.0, "24180": 1117048320.0, "24185": 1117048320.0, "24190": 1117048320.0, "24195": 1117048320.0, "24200": 1117048320.0, "24205": 1117048320.0, "24210": 1117048320.0, "24215": 1117048320.0, "24220": 1117048320.0, "24225": 1117048320.0, "24230": 1117048320.0, "24235": 1117048320.0, "24240": 1117048320.0, "24245": 1117048320.0, "24250": 1117048320.0, "24255": 1117048320.0, "24260": 1117048320.0, "24265": 1117048320.0, "24270": 1117048320.0, "24275": 1117048320.0, "24280": 1117048320.0, "24285": 1117048320.0, "24290": 1117048320.0, "24295": 1117048320.0, "24300": 1117048320.0, "24305": 1117048320.0, "24310": 1117048320.0, "24315": 1117048320.0, "24320": 1117048320.0, "24325": 1117048320.0, "24330": 1117048320.0, "24335": 1117048320.0, "24340": 1117048320.0, "24345": 1117048320.0, "24350": 1117048320.0, "24355": 1117048320.0, "24360": 1117048320.0, "24365": 1117048320.0, "24370": 1117048320.0, "24375": 1117048320.0, "24380": 1117048320.0, "24385": 1117048320.0, "24390": 1117048320.0, "24395": 1117048320.0, "24400": 1117048320.0, "24405": 1117048320.0, "24410": 1117048320.0, "24415": 1117048320.0, "24420": 1117048320.0, "24425": 1117048320.0, "24430": 1117048320.0, "24435": 1117048320.0, "24440": 1117048320.0, "24445": 1117048320.0, "24450": 1117048320.0, "24455": 1117048320.0, "24460": 1117048320.0, "24465": 1117048320.0, "24470": 1117048320.0, "24475": 1117048320.0, "24480": 1117048320.0, "24485": 1117048320.0, "24490": 1117048320.0, "24495": 1117048320.0, "24500": 1117048320.0, "24505": 1117048320.0, "24510": 1117048320.0, "24515": 1117048320.0, "24520": 1117048320.0, "24525": 1117048320.0, "24530": 1117048320.0, "24535": 1117048320.0, "24540": 1117048320.0, "24545": 1117048320.0, "24550": 1117048320.0, "24555": 1117048320.0, "24560": 1117048320.0, "24565": 1117048320.0, "24570": 1117048320.0, "24575": 1117048320.0, "24580": 1117048320.0, "24585": 1117048320.0, "24590": 1117048320.0, "24595": 1117048320.0, "24600": 1117048320.0, "24605": 1117048320.0, "24610": 1117048320.0, "24615": 1117048320.0, "24620": 1117048320.0, "24625": 1117048320.0, "24630": 1117048320.0, "24635": 1117048320.0, "24640": 1117048320.0, "24645": 1117048320.0, "24650": 1117048320.0, "24655": 1117048320.0, "24660": 1117048320.0, "24665": 1117048320.0, "24670": 1117048320.0, "24675": 1117048320.0, "24680": 1117048320.0, "24685": 1117048320.0, "24690": 1117048320.0, "24695": 1117048320.0, "24700": 1117048320.0, "24705": 1117048320.0, "24710": 1117048320.0, "24715": 1117048320.0, "24720": 1117048320.0, "24725": 1117048320.0, "24730": 1117048320.0, "24735": 1117048320.0, "24740": 1117048320.0, "24745": 1117048320.0, "24750": 1117048320.0, "24755": 1117048320.0, "24760": 1117048320.0, "24765": 1117048320.0, "24770": 1117048320.0, "24775": 1117048320.0, "24780": 1117048320.0, "24785": 1117048320.0, "24790": 1117048320.0, "24795": 1117048320.0, "24800": 1117048320.0, "24805": 1117048320.0, "24810": 1117048320.0, "24815": 1117048320.0, "24820": 1117048320.0, "24825": 1117048320.0, "24830": 1117048320.0, "24835": 1117048320.0, "24840": 1117048320.0, "24845": 1117048320.0, "24850": 1117048320.0, "24855": 1117048320.0, "24860": 1117048320.0, "24865": 1117048320.0, "24870": 1117048320.0, "24875": 1117048320.0, "24880": 1117048320.0, "24885": 1117048320.0, "24890": 1117048320.0, "24895": 1117048320.0, "24900": 1117048320.0, "24905": 1117048320.0, "24910": 1117048320.0, "24915": 1117048320.0, "24920": 1117048320.0, "24925": 1117048320.0, "24930": 1117048320.0, "24935": 1117048320.0, "24940": 1117048320.0, "24945": 1117048320.0, "24950": 1117048320.0, "24955": 1117048320.0, "24960": 1117048320.0, "24965": 1117048320.0, "24970": 1117048320.0, "24975": 1117048320.0, "24980": 1117048320.0, "24985": 1117048320.0, "24990": 1117048320.0, "24995": 1117048320.0, "25000": 1117048320.0, "25005": 1117048320.0, "25010": 1117048320.0, "25015": 1117048320.0, "25020": 1117048320.0, "25025": 1117048320.0, "25030": 1117048320.0, "25035": 1117048320.0, "25040": 1117048320.0, "25045": 1117048320.0, "25050": 1117048320.0, "25055": 1117048320.0, "25060": 1117048320.0, "25065": 1117048320.0, "25070": 1117048320.0, "25075": 1117048320.0, "25080": 1117048320.0, "25085": 1117048320.0, "25090": 1117048320.0, "25095": 1117048320.0, "25100": 1117048320.0, "25105": 1117048320.0, "25110": 1117048320.0, "25115": 1117048320.0, "25120": 1117048320.0, "25125": 1117048320.0, "25130": 1117048320.0, "25135": 1117048320.0, "25140": 1117048320.0, "25145": 1117048320.0, "25150": 1117048320.0, "25155": 1117048320.0, "25160": 1117048320.0, "25165": 1117048320.0, "25170": 1117048320.0, "25175": 1117048320.0, "25180": 1117048320.0, "25185": 1117048320.0, "25190": 1117048320.0, "25195": 1117048320.0, "25200": 1117048320.0, "25205": 1117048320.0, "25210": 1117048320.0, "25215": 1117048320.0, "25220": 1117048320.0, "25225": 1117048320.0, "25230": 1117048320.0, "25235": 1117048320.0, "25240": 1117048320.0, "25245": 1117048320.0, "25250": 1117048320.0, "25255": 1117048320.0, "25260": 1117048320.0, "25265": 1117048320.0, "25270": 1117048320.0, "25275": 1117048320.0, "25280": 1117048320.0, "25285": 1117048320.0, "25290": 1117048320.0, "25295": 1117048320.0, "25300": 1117048320.0, "25305": 1117048320.0, "25310": 1117048320.0, "25315": 1117048320.0, "25320": 1117048320.0, "25325": 1117048320.0, "25330": 1117048320.0, "25335": 1117048320.0, "25340": 1117048320.0, "25345": 1117048320.0, "25350": 1117048320.0, "25355": 1117048320.0, "25360": 1117048320.0, "25365": 1117048320.0, "25370": 1117048320.0, "25375": 1117048320.0, "25380": 1117048320.0, "25385": 1117048320.0, "25390": 1117048320.0, "25395": 1117048320.0, "25400": 1117048320.0, "25405": 1117048320.0, "25410": 1117048320.0, "25415": 1117048320.0, "25420": 1117048320.0, "25425": 1117048320.0, "25430": 1117048320.0, "25435": 1117048320.0, "25440": 1117048320.0, "25445": 1117048320.0, "25450": 1117048320.0, "25455": 1117048320.0, "25460": 1117048320.0, "25465": 1117048320.0, "25470": 1117048320.0, "25475": 1117048320.0, "25480": 1117048320.0, "25485": 1117048320.0, "25490": 1117048320.0, "25495": 1117048320.0, "25500": 1117048320.0, "25505": 1117048320.0, "25510": 1117048320.0, "25515": 1117048320.0, "25520": 1117048320.0, "25525": 1117048320.0, "25530": 1117048320.0, "25535": 1117048320.0, "25540": 1117048320.0, "25545": 1117048320.0, "25550": 1117048320.0, "25555": 1117048320.0, "25560": 1117048320.0, "25565": 1117048320.0, "25570": 1117048320.0, "25575": 1117048320.0, "25580": 1117048320.0, "25585": 1117048320.0, "25590": 1117048320.0, "25595": 1117048320.0, "25600": 1117048320.0, "25605": 1117048320.0, "25610": 1117048320.0, "25615": 1117048320.0, "25620": 1117048320.0, "25625": 1117048320.0, "25630": 1117048320.0, "25635": 1117048320.0, "25640": 1117048320.0, "25645": 1117048320.0, "25650": 1117048320.0, "25655": 1117048320.0, "25660": 1117048320.0, "25665": 1117048320.0, "25670": 1117048320.0, "25675": 1117048320.0, "25680": 1117048320.0, "25685": 1117048320.0, "25690": 1117048320.0, "25695": 1117048320.0, "25700": 1117048320.0, "25705": 1117048320.0, "25710": 1117048320.0, "25715": 1117048320.0, "25720": 1117048320.0, "25725": 1117048320.0, "25730": 1117048320.0, "25735": 1117048320.0, "25740": 1117048320.0, "25745": 1117048320.0, "25750": 1117048320.0, "25755": 1117048320.0, "25760": 1117048320.0, "25765": 1117048320.0, "25770": 1117048320.0, "25775": 1117048320.0, "25780": 1117048320.0, "25785": 1117048320.0, "25790": 1117048320.0, "25795": 1117048320.0, "25800": 1117048320.0, "25805": 1117048320.0, "25810": 1117048320.0, "25815": 1117048320.0, "25820": 1117048320.0, "25825": 1117048320.0, "25830": 1117048320.0, "25835": 1117048320.0, "25840": 1117048320.0, "25845": 1117048320.0, "25850": 1117048320.0, "25855": 1117048320.0, "25860": 1117048320.0, "25865": 1117048320.0, "25870": 1117048320.0, "25875": 1117048320.0, "25880": 1117048320.0, "25885": 1117048320.0, "25890": 1117048320.0, "25895": 1117048320.0, "25900": 1117048320.0, "25905": 1117048320.0, "25910": 1117048320.0, "25915": 1117048320.0, "25920": 1117048320.0, "25925": 1117048320.0, "25930": 1117048320.0, "25935": 1117048320.0, "25940": 1117048320.0, "25945": 1117048320.0, "25950": 1117048320.0, "25955": 1117048320.0, "25960": 1117048320.0, "25965": 1117048320.0, "25970": 1117048320.0, "25975": 1117048320.0, "25980": 1117048320.0, "25985": 1117048320.0, "25990": 1117048320.0, "25995": 1117048320.0, "26000": 1117048320.0, "26005": 1117048320.0, "26010": 1117048320.0, "26015": 1117048320.0, "26020": 1117048320.0, "26025": 1117048320.0, "26030": 1117048320.0, "26035": 1117048320.0, "26040": 1117048320.0, "26045": 1117048320.0, "26050": 1117048320.0, "26055": 1117048320.0, "26060": 1117048320.0, "26065": 1117048320.0, "26070": 1117048320.0, "26075": 1117048320.0, "26080": 1117048320.0, "26085": 1117048320.0, "26090": 1117048320.0, "26095": 1117048320.0, "26100": 1117048320.0, "26105": 1117048320.0, "26110": 1117048320.0, "26115": 1117048320.0, "26120": 1117048320.0, "26125": 1117048320.0, "26130": 1117048320.0, "26135": 1117048320.0, "26140": 1117048320.0, "26145": 1117048320.0, "26150": 1117048320.0, "26155": 1117048320.0, "26160": 1117048320.0, "26165": 1117048320.0, "26170": 1117048320.0, "26175": 1117048320.0, "26180": 1117048320.0, "26185": 1117048320.0, "26190": 1117048320.0, "26195": 1117048320.0, "26200": 1117048320.0, "26205": 1117048320.0, "26210": 1117048320.0, "26215": 1117048320.0, "26220": 1117048320.0, "26225": 1117048320.0, "26230": 1117048320.0, "26235": 1117048320.0, "26240": 1117048320.0, "26245": 1117048320.0, "26250": 1117048320.0, "26255": 1117048320.0, "26260": 1117048320.0, "26265": 1117048320.0, "26270": 1117048320.0, "26275": 1117048320.0, "26280": 1117048320.0, "26285": 1117048320.0, "26290": 1117048320.0, "26295": 1117048320.0, "26300": 1117048320.0, "26305": 1117048320.0, "26310": 1117048320.0, "26315": 1117048320.0, "26320": 1117048320.0, "26325": 1117048320.0, "26330": 1117048320.0, "26335": 1117048320.0, "26340": 1117048320.0, "26345": 1117048320.0, "26350": 1117048320.0, "26355": 1117048320.0, "26360": 1117048320.0, "26365": 1117048320.0, "26370": 1117048320.0, "26375": 1117048320.0, "26380": 1117048320.0, "26385": 1117048320.0, "26390": 1117048320.0, "26395": 1117048320.0, "26400": 1117048320.0, "26405": 1117048320.0, "26410": 1117048320.0, "26415": 1117048320.0, "26420": 1117048320.0, "26425": 1117048320.0, "26430": 1117048320.0, "26435": 1117048320.0, "26440": 1117048320.0, "26445": 1117048320.0, "26450": 1117048320.0, "26455": 1117048320.0, "26460": 1117048320.0, "26465": 1117048320.0, "26470": 1117048320.0, "26475": 1117048320.0, "26480": 1117048320.0, "26485": 1117048320.0, "26490": 1117048320.0, "26495": 1117048320.0, "26500": 1117048320.0, "26505": 1117048320.0, "26510": 1117048320.0, "26515": 1117048320.0, "26520": 1117048320.0, "26525": 1117048320.0, "26530": 1117048320.0, "26535": 1117048320.0, "26540": 1117048320.0, "26545": 1117048320.0, "26550": 1117048320.0, "26555": 1117048320.0, "26560": 1117048320.0, "26565": 1117048320.0, "26570": 1117048320.0, "26575": 1117048320.0, "26580": 1117048320.0, "26585": 1117048320.0, "26590": 1117048320.0, "26595": 1117048320.0, "26600": 1117048320.0, "26605": 1117048320.0, "26610": 1117048320.0, "26615": 1117048320.0, "26620": 1117048320.0, "26625": 1117048320.0, "26630": 1117048320.0, "26635": 1117048320.0, "26640": 1117048320.0, "26645": 1117048320.0, "26650": 1117048320.0, "26655": 1117048320.0, "26660": 1117048320.0, "26665": 1117048320.0, "26670": 1117048320.0, "26675": 1117048320.0, "26680": 1117048320.0, "26685": 1117048320.0, "26690": 1117048320.0, "26695": 1117048320.0, "26700": 1117048320.0, "26705": 1117048320.0, "26710": 1117048320.0, "26715": 1117048320.0, "26720": 1117048320.0, "26725": 1117048320.0, "26730": 1117048320.0, "26735": 1117048320.0, "26740": 1117048320.0, "26745": 1117048320.0, "26750": 1117048320.0, "26755": 1117048320.0, "26760": 1117048320.0, "26765": 1117048320.0, "26770": 1117048320.0, "26775": 1117048320.0, "26780": 1117048320.0, "26785": 1117048320.0, "26790": 1117048320.0, "26795": 1117048320.0, "26800": 1117048320.0, "26805": 1117048320.0, "26810": 1117048320.0, "26815": 1117048320.0, "26820": 1117048320.0, "26825": 1117048320.0, "26830": 1117048320.0, "26835": 1117048320.0, "26840": 1117048320.0, "26845": 1117048320.0, "26850": 1117048320.0, "26855": 1117048320.0, "26860": 1117048320.0, "26865": 1117048320.0, "26870": 1117048320.0, "26875": 1117048320.0, "26880": 1117048320.0, "26885": 1117048320.0, "26890": 1117048320.0, "26895": 1117048320.0, "26900": 1117048320.0, "26905": 1117048320.0, "26910": 1117048320.0, "26915": 1117048320.0, "26920": 1117048320.0, "26925": 1117048320.0, "26930": 1117048320.0, "26935": 1117048320.0, "26940": 1117048320.0, "26945": 1117048320.0, "26950": 1117048320.0, "26955": 1117048320.0, "26960": 1117048320.0, "26965": 1117048320.0, "26970": 1117048320.0, "26975": 1117048320.0, "26980": 1117048320.0, "26985": 1117048320.0, "26990": 1117048320.0, "26995": 1117048320.0, "27000": 1117048320.0, "27005": 1117048320.0, "27010": 1117048320.0, "27015": 1117048320.0, "27020": 1117048320.0, "27025": 1117048320.0, "27030": 1117048320.0, "27035": 1117048320.0, "27040": 1117048320.0, "27045": 1117048320.0, "27050": 1117048320.0, "27055": 1117048320.0, "27060": 1117048320.0, "27065": 1117048320.0, "27070": 1117048320.0, "27075": 1117048320.0, "27080": 1117048320.0, "27085": 1117048320.0, "27090": 1117048320.0, "27095": 1117048320.0, "27100": 1117048320.0, "27105": 1117048320.0, "27110": 1117048320.0, "27115": 1117048320.0, "27120": 1117048320.0, "27125": 1117048320.0, "27130": 1117048320.0, "27135": 1117048320.0, "27140": 1117048320.0, "27145": 1117048320.0, "27150": 1117048320.0, "27155": 1117048320.0, "27160": 1117048320.0, "27165": 1117048320.0, "27170": 1117048320.0, "27175": 1117048320.0, "27180": 1117048320.0, "27185": 1117048320.0, "27190": 1117048320.0, "27195": 1117048320.0, "27200": 1117048320.0, "27205": 1117048320.0, "27210": 1117048320.0, "27215": 1117048320.0, "27220": 1117048320.0, "27225": 1117048320.0, "27230": 1117048320.0, "27235": 1117048320.0, "27240": 1117048320.0, "27245": 1117048320.0, "27250": 1117048320.0, "27255": 1117048320.0, "27260": 1117048320.0, "27265": 1117048320.0, "27270": 1117048320.0, "27275": 1117048320.0, "27280": 1117048320.0, "27285": 1117048320.0, "27290": 1117048320.0, "27295": 1117048320.0, "27300": 1117048320.0, "27305": 1117048320.0, "27310": 1117048320.0, "27315": 1117048320.0, "27320": 1117048320.0, "27325": 1117048320.0, "27330": 1117048320.0, "27335": 1117048320.0, "27340": 1117048320.0, "27345": 1117048320.0, "27350": 1117048320.0, "27355": 1117048320.0, "27360": 1117048320.0, "27365": 1117048320.0, "27370": 1117048320.0, "27375": 1117048320.0, "27380": 1117048320.0, "27385": 1117048320.0, "27390": 1117048320.0, "27395": 1117048320.0, "27400": 1117048320.0, "27405": 1117048320.0, "27410": 1117048320.0, "27415": 1117048320.0, "27420": 1117048320.0, "27425": 1117048320.0, "27430": 1117048320.0, "27435": 1117048320.0, "27440": 1117048320.0, "27445": 1117048320.0, "27450": 1117048320.0, "27455": 1117048320.0, "27460": 1117048320.0, "27465": 1117048320.0, "27470": 1117048320.0, "27475": 1117048320.0, "27480": 1117048320.0, "27485": 1117048320.0, "27490": 1117048320.0, "27495": 1117048320.0, "27500": 1117048320.0, "27505": 1117048320.0, "27510": 1117048320.0, "27515": 1117048320.0, "27520": 1117048320.0, "27525": 1117048320.0, "27530": 1117048320.0, "27535": 1117048320.0, "27540": 1117048320.0, "27545": 1117048320.0, "27550": 1117048320.0, "27555": 1117048320.0, "27560": 1117048320.0, "27565": 1117048320.0, "27570": 1117048320.0, "27575": 1117048320.0, "27580": 1117048320.0, "27585": 1117048320.0, "27590": 1117048320.0, "27595": 1117048320.0, "27600": 1117048320.0, "27605": 1117048320.0, "27610": 1117048320.0, "27615": 1117048320.0, "27620": 1117048320.0, "27625": 1117048320.0, "27630": 1117048320.0, "27635": 1117048320.0, "27640": 1117048320.0, "27645": 1117048320.0, "27650": 1117048320.0, "27655": 1117048320.0, "27660": 1117048320.0, "27665": 1117048320.0, "27670": 1117048320.0, "27675": 1117048320.0, "27680": 1117048320.0, "27685": 1117048320.0, "27690": 1117048320.0, "27695": 1117048320.0, "27700": 1117048320.0, "27705": 1117048320.0, "27710": 1117048320.0, "27715": 1117048320.0, "27720": 1117048320.0, "27725": 1117048320.0, "27730": 1117048320.0, "27735": 1117048320.0, "27740": 1117048320.0, "27745": 1117048320.0, "27750": 1117048320.0, "27755": 1117048320.0, "27760": 1117048320.0, "27765": 1117048320.0, "27770": 1117048320.0, "27775": 1117048320.0, "27780": 1117048320.0, "27785": 1117048320.0, "27790": 1117048320.0, "27795": 1117048320.0, "27800": 1117048320.0, "27805": 1117048320.0, "27810": 1117048320.0, "27815": 1117048320.0, "27820": 1117048320.0, "27825": 1117048320.0, "27830": 1117048320.0, "27835": 1117048320.0, "27840": 1117048320.0, "27845": 1117048320.0, "27850": 1117048320.0, "27855": 1117048320.0, "27860": 1117048320.0, "27865": 1117048320.0, "27870": 1117048320.0, "27875": 1117048320.0, "27880": 1117048320.0, "27885": 1117048320.0, "27890": 1117048320.0, "27895": 1117048320.0, "27900": 1117048320.0, "27905": 1117048320.0, "27910": 1117048320.0, "27915": 1117048320.0, "27920": 1117048320.0, "27925": 1117048320.0, "27930": 1117048320.0, "27935": 1117048320.0, "27940": 1117048320.0, "27945": 1117048320.0, "27950": 1117048320.0, "27955": 1117048320.0, "27960": 1117048320.0, "27965": 1117048320.0, "27970": 1117048320.0, "27975": 1117048320.0, "27980": 1117048320.0, "27985": 1117048320.0, "27990": 1117048320.0, "27995": 1117048320.0, "28000": 1117048320.0, "28005": 1117048320.0, "28010": 1117048320.0, "28015": 1117048320.0, "28020": 1117048320.0, "28025": 1117048320.0, "28030": 1117048320.0, "28035": 1117048320.0, "28040": 1117048320.0, "28045": 1117048320.0, "28050": 1117048320.0, "28055": 1117048320.0, "28060": 1117048320.0, "28065": 1117048320.0, "28070": 1117048320.0, "28075": 1117048320.0, "28080": 1117048320.0, "28085": 1117048320.0, "28090": 1117048320.0, "28095": 1117048320.0, "28100": 1117048320.0, "28105": 1117048320.0, "28110": 1117048320.0, "28115": 1117048320.0, "28120": 1117048320.0, "28125": 1117048320.0, "28130": 1117048320.0, "28135": 1117048320.0, "28140": 1117048320.0, "28145": 1117048320.0, "28150": 1117048320.0, "28155": 1117048320.0, "28160": 1117048320.0, "28165": 1117048320.0, "28170": 1117048320.0, "28175": 1117048320.0, "28180": 1117048320.0, "28185": 1117048320.0, "28190": 1117048320.0, "28195": 1117048320.0, "28200": 1117048320.0, "28205": 1117048320.0, "28210": 1117048320.0, "28215": 1117048320.0, "28220": 1117048320.0, "28225": 1117048320.0, "28230": 1117048320.0, "28235": 1117048320.0, "28240": 1117048320.0, "28245": 1117048320.0, "28250": 1117048320.0, "28255": 1117048320.0, "28260": 1117048320.0, "28265": 1117048320.0, "28270": 1117048320.0, "28275": 1117048320.0, "28280": 1117048320.0, "28285": 1117048320.0, "28290": 1117048320.0, "28295": 1117048320.0, "28300": 1117048320.0, "28305": 1117048320.0, "28310": 1117048320.0, "28315": 1117048320.0, "28320": 1117048320.0, "28325": 1117048320.0, "28330": 1117048320.0, "28335": 1117048320.0, "28340": 1117048320.0, "28345": 1117048320.0, "28350": 1117048320.0, "28355": 1117048320.0, "28360": 1117048320.0, "28365": 1117048320.0, "28370": 1117048320.0, "28375": 1117048320.0, "28380": 1117048320.0, "28385": 1117048320.0, "28390": 1117048320.0, "28395": 1117048320.0, "28400": 1117048320.0, "28405": 1117048320.0, "28410": 1117048320.0, "28415": 1117048320.0, "28420": 1117048320.0, "28425": 1117048320.0, "28430": 1117048320.0, "28435": 1117048320.0, "28440": 1117048320.0, "28445": 1117048320.0, "28450": 1117048320.0, "28455": 1117048320.0, "28460": 1117048320.0, "28465": 1117048320.0, "28470": 1117048320.0, "28475": 1117048320.0, "28480": 1117048320.0, "28485": 1117048320.0, "28490": 1117048320.0, "28495": 1117048320.0, "28500": 1117048320.0, "28505": 1117048320.0, "28510": 1117048320.0, "28515": 1117048320.0, "28520": 1117048320.0, "28525": 1117048320.0, "28530": 1117048320.0, "28535": 1117048320.0, "28540": 1117048320.0, "28545": 1117048320.0, "28550": 1117048320.0, "28555": 1117048320.0, "28560": 1117048320.0, "28565": 1117048320.0, "28570": 1117048320.0, "28575": 1117048320.0, "28580": 1117048320.0, "28585": 1117048320.0, "28590": 1117048320.0, "28595": 1117048320.0, "28600": 1117048320.0, "28605": 1117048320.0, "28610": 1117048320.0, "28615": 1117048320.0, "28620": 1117048320.0, "28625": 1117048320.0, "28630": 1117048320.0, "28635": 1117048320.0, "28640": 1117048320.0, "28645": 1117048320.0, "28650": 1117048320.0, "28655": 1117048320.0, "28660": 1117048320.0, "28665": 1117048320.0, "28670": 1117048320.0, "28675": 1117048320.0, "28680": 1117048320.0, "28685": 1117048320.0, "28690": 1117048320.0, "28695": 1117048320.0, "28700": 1117048320.0, "28705": 1117048320.0, "28710": 1117048320.0, "28715": 1117048320.0, "28720": 1117048320.0, "28725": 1117048320.0, "28730": 1117048320.0, "28735": 1117048320.0, "28740": 1117048320.0, "28745": 1117048320.0, "28750": 1117048320.0, "28755": 1117048320.0, "28760": 1117048320.0, "28765": 1117048320.0, "28770": 1117048320.0, "28775": 1117048320.0, "28780": 1117048320.0, "28785": 1117048320.0, "28790": 1117048320.0, "28795": 1117048320.0, "28800": 1117048320.0, "28805": 1117048320.0, "28810": 1117048320.0, "28815": 1117048320.0, "28820": 1117048320.0, "28825": 1117048320.0, "28830": 1117048320.0, "28835": 1117048320.0, "28840": 1117048320.0, "28845": 1117048320.0, "28850": 1117048320.0, "28855": 1117048320.0, "28860": 1117048320.0, "28865": 1117048320.0, "28870": 1117048320.0, "28875": 1117048320.0, "28880": 1117048320.0, "28885": 1117048320.0, "28890": 1117048320.0, "28895": 1117048320.0, "28900": 1117048320.0, "28905": 1117048320.0, "28910": 1117048320.0, "28915": 1117048320.0, "28920": 1117048320.0, "28925": 1117048320.0, "28930": 1117048320.0, "28935": 1117048320.0, "28940": 1117048320.0, "28945": 1117048320.0, "28950": 1117048320.0, "28955": 1117048320.0, "28960": 1117048320.0, "28965": 1117048320.0, "28970": 1117048320.0, "28975": 1117048320.0, "28980": 1117048320.0, "28985": 1117048320.0, "28990": 1117048320.0, "28995": 1117048320.0, "29000": 1117048320.0, "29005": 1117048320.0, "29010": 1117048320.0, "29015": 1117048320.0, "29020": 1117048320.0, "29025": 1117048320.0, "29030": 1117048320.0, "29035": 1117048320.0, "29040": 1117048320.0, "29045": 1117048320.0, "29050": 1117048320.0, "29055": 1117048320.0, "29060": 1117048320.0, "29065": 1117048320.0, "29070": 1117048320.0, "29075": 1117048320.0, "29080": 1117048320.0, "29085": 1117048320.0, "29090": 1117048320.0, "29095": 1117048320.0, "29100": 1117048320.0, "29105": 1117048320.0, "29110": 1117048320.0, "29115": 1117048320.0, "29120": 1117048320.0, "29125": 1117048320.0, "29130": 1117048320.0, "29135": 1117048320.0, "29140": 1117048320.0, "29145": 1117048320.0, "29150": 1117048320.0, "29155": 1117048320.0, "29160": 1117048320.0, "29165": 1117048320.0, "29170": 1117048320.0, "29175": 1117048320.0, "29180": 1117048320.0, "29185": 1117048320.0, "29190": 1117048320.0, "29195": 1117048320.0, "29200": 1117048320.0, "29205": 1117048320.0, "29210": 1117048320.0, "29215": 1117048320.0, "29220": 1117048320.0, "29225": 1117048320.0, "29230": 1117048320.0, "29235": 1117048320.0, "29240": 1117048320.0, "29245": 1117048320.0, "29250": 1117048320.0, "29255": 1117048320.0, "29260": 1117048320.0, "29265": 1117048320.0, "29270": 1117048320.0, "29275": 1117048320.0, "29280": 1117048320.0, "29285": 1117048320.0, "29290": 1117048320.0, "29295": 1117048320.0, "29300": 1117048320.0, "29305": 1117048320.0, "29310": 1117048320.0, "29315": 1117048320.0, "29320": 1117048320.0, "29325": 1117048320.0, "29330": 1117048320.0, "29335": 1117048320.0, "29340": 1117048320.0, "29345": 1117048320.0, "29350": 1117048320.0, "29355": 1117048320.0, "29360": 1117048320.0, "29365": 1117048320.0, "29370": 1117048320.0, "29375": 1117048320.0, "29380": 1117048320.0, "29385": 1117048320.0, "29390": 1117048320.0, "29395": 1117048320.0, "29400": 1117048320.0, "29405": 1117048320.0, "29410": 1117048320.0, "29415": 1117048320.0, "29420": 1117048320.0, "29425": 1117048320.0, "29430": 1117048320.0, "29435": 1117048320.0, "29440": 1117048320.0, "29445": 1117048320.0, "29450": 1117048320.0, "29455": 1117048320.0, "29460": 1117048320.0, "29465": 1117048320.0, "29470": 1117048320.0, "29475": 1117048320.0, "29480": 1117048320.0, "29485": 1117048320.0, "29490": 1117048320.0, "29495": 1117048320.0, "29500": 1117048320.0, "29505": 1117048320.0, "29510": 1117048320.0, "29515": 1117048320.0, "29520": 1117048320.0, "29525": 1117048320.0, "29530": 1117048320.0, "29535": 1117048320.0, "29540": 1117048320.0, "29545": 1117048320.0, "29550": 1117048320.0, "29555": 1117048320.0, "29560": 1117048320.0, "29565": 1117048320.0, "29570": 1117048320.0, "29575": 1117048320.0, "29580": 1117048320.0, "29585": 1117048320.0, "29590": 1117048320.0, "29595": 1117048320.0, "29600": 1117048320.0, "29605": 1117048320.0, "29610": 1117048320.0, "29615": 1117048320.0, "29620": 1117048320.0, "29625": 1117048320.0, "29630": 1117048320.0, "29635": 1117048320.0, "29640": 1117048320.0, "29645": 1117048320.0, "29650": 1117048320.0, "29655": 1117048320.0, "29660": 1117048320.0, "29665": 1117048320.0, "29670": 1117048320.0, "29675": 1117048320.0, "29680": 1117048320.0, "29685": 1117048320.0, "29690": 1117048320.0, "29695": 1117048320.0, "29700": 1117048320.0, "29705": 1117048320.0, "29710": 1117048320.0, "29715": 1117048320.0, "29720": 1117048320.0, "29725": 1117048320.0, "29730": 1117048320.0, "29735": 1117048320.0, "29740": 1117048320.0, "29745": 1117048320.0, "29750": 1117048320.0, "29755": 1117048320.0, "29760": 1117048320.0, "29765": 1117048320.0, "29770": 1117048320.0, "29775": 1117048320.0, "29780": 1117048320.0, "29785": 1117048320.0, "29790": 1117048320.0, "29795": 1117048320.0, "29800": 1117048320.0, "29805": 1117048320.0, "29810": 1117048320.0, "29815": 1117048320.0, "29820": 1117048320.0, "29825": 1117048320.0, "29830": 1117048320.0, "29835": 1117048320.0, "29840": 1117048320.0, "29845": 1117048320.0, "29850": 1117048320.0, "29855": 1117048320.0, "29860": 1117048320.0, "29865": 1117048320.0, "29870": 1117048320.0, "29875": 1117048320.0, "29880": 1117048320.0, "29885": 1117048320.0, "29890": 1117048320.0, "29895": 1117048320.0, "29900": 1117048320.0, "29905": 1117048320.0, "29910": 1117048320.0, "29915": 1117048320.0, "29920": 1117048320.0, "29925": 1117048320.0, "29930": 1117048320.0, "29935": 1117048320.0, "29940": 1117048320.0, "29945": 1117048320.0, "29950": 1117048320.0, "29955": 1117048320.0, "29960": 1117048320.0, "29965": 1117048320.0, "29970": 1117048320.0, "29975": 1117048320.0, "29980": 1117048320.0, "29985": 1117048320.0, "29990": 1117048320.0, "29995": 1117048320.0, "30000": 1117048320.0, "30005": 1117048320.0, "30010": 1117048320.0, "30015": 1117048320.0, "30020": 1117048320.0, "30025": 1117048320.0, "30030": 1117048320.0, "30035": 1117048320.0, "30040": 1117048320.0, "30045": 1117048320.0, "30050": 1117048320.0, "30055": 1117048320.0, "30060": 1117048320.0, "30065": 1117048320.0, "30070": 1117048320.0, "30075": 1117048320.0, "30080": 1117048320.0, "30085": 1117048320.0, "30090": 1117048320.0, "30095": 1117048320.0, "30100": 1117048320.0, "30105": 1117048320.0, "30110": 1117048320.0, "30115": 1117048320.0, "30120": 1117048320.0, "30125": 1117048320.0, "30130": 1117048320.0, "30135": 1117048320.0, "30140": 1117048320.0, "30145": 1117048320.0, "30150": 1117048320.0, "30155": 1117048320.0, "30160": 1117048320.0, "30165": 1117048320.0, "30170": 1117048320.0, "30175": 1117048320.0, "30180": 1117048320.0, "30185": 1117048320.0, "30190": 1117048320.0, "30195": 1117048320.0, "30200": 1117048320.0, "30205": 1117048320.0, "30210": 1117048320.0, "30215": 1117048320.0, "30220": 1117048320.0, "30225": 1117048320.0, "30230": 1117048320.0, "30235": 1117048320.0, "30240": 1117048320.0, "30245": 1117048320.0, "30250": 1117048320.0, "30255": 1117048320.0, "30260": 1117048320.0, "30265": 1117048320.0, "30270": 1117048320.0, "30275": 1117048320.0, "30280": 1117048320.0, "30285": 1117048320.0, "30290": 1117048320.0, "30295": 1117048320.0, "30300": 1117048320.0, "30305": 1117048320.0, "30310": 1117048320.0, "30315": 1117048320.0, "30320": 1117048320.0, "30325": 1117048320.0, "30330": 1117048320.0, "30335": 1117048320.0, "30340": 1117048320.0, "30345": 1117048320.0, "30350": 1117048320.0, "30355": 1117048320.0, "30360": 1117048320.0, "30365": 1117048320.0, "30370": 1117048320.0, "30375": 1117048320.0, "30380": 1117048320.0, "30385": 1117048320.0, "30390": 1117048320.0, "30395": 1117048320.0, "30400": 1117048320.0, "30405": 1117048320.0, "30410": 1117048320.0, "30415": 1117048320.0, "30420": 1117048320.0, "30425": 1117048320.0, "30430": 1117048320.0, "30435": 1117048320.0, "30440": 1117048320.0, "30445": 1117048320.0, "30450": 1117048320.0, "30455": 1117048320.0, "30460": 1117048320.0, "30465": 1117048320.0, "30470": 1117048320.0, "30475": 1117048320.0, "30480": 1117048320.0, "30485": 1117048320.0, "30490": 1117048320.0, "30495": 1117048320.0, "30500": 1117048320.0, "30505": 1117048320.0, "30510": 1117048320.0, "30515": 1117048320.0, "30520": 1117048320.0, "30525": 1117048320.0, "30530": 1117048320.0, "30535": 1117048320.0, "30540": 1117048320.0, "30545": 1117048320.0, "30550": 1117048320.0, "30555": 1117048320.0, "30560": 1117048320.0, "30565": 1117048320.0, "30570": 1117048320.0, "30575": 1117048320.0, "30580": 1117048320.0, "30585": 1117048320.0, "30590": 1117048320.0, "30595": 1117048320.0, "30600": 1117048320.0, "30605": 1117048320.0, "30610": 1117048320.0, "30615": 1117048320.0, "30620": 1117048320.0, "30625": 1117048320.0, "30630": 1117048320.0, "30635": 1117048320.0, "30640": 1117048320.0, "30645": 1117048320.0, "30650": 1117048320.0, "30655": 1117048320.0, "30660": 1117048320.0, "30665": 1117048320.0, "30670": 1117048320.0, "30675": 1117048320.0, "30680": 1117048320.0, "30685": 1117048320.0, "30690": 1117048320.0, "30695": 1117048320.0, "30700": 1117048320.0, "30705": 1117048320.0, "30710": 1117048320.0, "30715": 1117048320.0, "30720": 1117048320.0, "30725": 1117048320.0, "30730": 1117048320.0, "30735": 1117048320.0, "30740": 1117048320.0, "30745": 1117048320.0, "30750": 1117048320.0, "30755": 1117048320.0, "30760": 1117048320.0, "30765": 1117048320.0, "30770": 1117048320.0, "30775": 1117048320.0, "30780": 1117048320.0, "30785": 1117048320.0, "30790": 1117048320.0, "30795": 1117048320.0, "30800": 1117048320.0, "30805": 1117048320.0, "30810": 1117048320.0, "30815": 1117048320.0, "30820": 1117048320.0, "30825": 1117048320.0, "30830": 1117048320.0, "30835": 1117048320.0, "30840": 1117048320.0, "30845": 1117048320.0, "30850": 1117048320.0, "30855": 1117048320.0, "30860": 1117048320.0, "30865": 1117048320.0, "30870": 1117048320.0, "30875": 1117048320.0, "30880": 1117048320.0, "30885": 1117048320.0, "30890": 1117048320.0, "30895": 1117048320.0, "30900": 1117048320.0, "30905": 1117048320.0, "30910": 1117048320.0, "30915": 1117048320.0, "30920": 1117048320.0, "30925": 1117048320.0, "30930": 1117048320.0, "30935": 1117048320.0, "30940": 1117048320.0, "30945": 1117048320.0, "30950": 1117048320.0, "30955": 1117048320.0, "30960": 1117048320.0, "30965": 1117048320.0, "30970": 1117048320.0, "30975": 1117048320.0, "30980": 1117048320.0, "30985": 1117048320.0, "30990": 1117048320.0, "30995": 1117048320.0, "31000": 1117048320.0, "31005": 1117048320.0, "31010": 1117048320.0, "31015": 1117048320.0, "31020": 1117048320.0, "31025": 1117048320.0, "31030": 1117048320.0, "31035": 1117048320.0, "31040": 1117048320.0, "31045": 1117048320.0, "31050": 1117048320.0, "31055": 1117048320.0, "31060": 1117048320.0, "31065": 1117048320.0, "31070": 1117048320.0, "31075": 1117048320.0, "31080": 1117048320.0, "31085": 1117048320.0, "31090": 1117048320.0, "31095": 1117048320.0, "31100": 1117048320.0, "31105": 1117048320.0, "31110": 1117048320.0, "31115": 1117048320.0, "31120": 1117048320.0, "31125": 1117048320.0, "31130": 1117048320.0, "31135": 1117048320.0, "31140": 1117048320.0, "31145": 1117048320.0, "31150": 1117048320.0, "31155": 1117048320.0, "31160": 1117048320.0, "31165": 1117048320.0, "31170": 1117048320.0, "31175": 1117048320.0, "31180": 1117048320.0, "31185": 1117048320.0, "31190": 1117048320.0, "31195": 1117048320.0, "31200": 1117048320.0, "31205": 1117048320.0, "31210": 1117048320.0, "31215": 1117048320.0, "31220": 1117048320.0, "31225": 1117048320.0, "31230": 1117048320.0, "31235": 1117048320.0, "31240": 1117048320.0, "31245": 1117048320.0, "31250": 1117048320.0, "31255": 1117048320.0, "31260": 1117048320.0, "31265": 1117048320.0, "31270": 1117048320.0, "31275": 1117048320.0, "31280": 1117048320.0, "31285": 1117048320.0, "31290": 1117048320.0, "31295": 1117048320.0, "31300": 1117048320.0, "31305": 1117048320.0, "31310": 1117048320.0, "31315": 1117048320.0, "31320": 1117048320.0, "31325": 1117048320.0, "31330": 1117048320.0, "31335": 1117048320.0, "31340": 1117048320.0, "31345": 1117048320.0, "31350": 1117048320.0, "31355": 1117048320.0, "31360": 1117048320.0, "31365": 1117048320.0, "31370": 1117048320.0, "31375": 1117048320.0, "31380": 1117048320.0, "31385": 1117048320.0, "31390": 1117048320.0, "31395": 1117048320.0, "31400": 1117048320.0, "31405": 1117048320.0, "31410": 1117048320.0, "31415": 1117048320.0, "31420": 1117048320.0, "31425": 1117048320.0, "31430": 1117048320.0, "31435": 1117048320.0, "31440": 1117048320.0, "31445": 1117048320.0, "31450": 1117048320.0, "31455": 1117048320.0, "31460": 1117048320.0, "31465": 1117048320.0, "31470": 1117048320.0, "31475": 1117048320.0, "31480": 1117048320.0, "31485": 1117048320.0, "31490": 1117048320.0, "31495": 1117048320.0, "31500": 1117048320.0, "31505": 1117048320.0, "31510": 1117048320.0, "31515": 1117048320.0, "31520": 1117048320.0, "31525": 1117048320.0, "31530": 1117048320.0, "31535": 1117048320.0, "31540": 1117048320.0, "31545": 1117048320.0, "31550": 1117048320.0, "31555": 1117048320.0, "31560": 1117048320.0, "31565": 1117048320.0, "31570": 1117048320.0, "31575": 1117048320.0, "31580": 1117048320.0, "31585": 1117048320.0, "31590": 1117048320.0, "31595": 1117048320.0, "31600": 1117048320.0, "31605": 1117048320.0, "31610": 1117048320.0, "31615": 1117048320.0, "31620": 1117048320.0, "31625": 1117048320.0, "31630": 1117048320.0, "31635": 1117048320.0, "31640": 1117048320.0, "31645": 1117048320.0, "31650": 1117048320.0, "31655": 1117048320.0, "31660": 1117048320.0, "31665": 1117048320.0, "31670": 1117048320.0, "31675": 1117048320.0, "31680": 1117048320.0, "31685": 1117048320.0, "31690": 1117048320.0, "31695": 1117048320.0, "31700": 1117048320.0, "31705": 1117048320.0, "31710": 1117048320.0, "31715": 1117048320.0, "31720": 1117048320.0, "31725": 1117048320.0, "31730": 1117048320.0, "31735": 1117048320.0, "31740": 1117048320.0, "31745": 1117048320.0, "31750": 1117048320.0, "31755": 1117048320.0, "31760": 1117048320.0, "31765": 1117048320.0, "31770": 1117048320.0, "31775": 1117048320.0, "31780": 1117048320.0, "31785": 1117048320.0, "31790": 1117048320.0, "31795": 1117048320.0, "31800": 1117048320.0, "31805": 1117048320.0, "31810": 1117048320.0, "31815": 1117048320.0, "31820": 1117048320.0, "31825": 1117048320.0, "31830": 1117048320.0, "31835": 1117048320.0, "31840": 1117048320.0, "31845": 1117048320.0, "31850": 1117048320.0, "31855": 1117048320.0, "31860": 1117048320.0, "31865": 1117048320.0, "31870": 1117048320.0, "31875": 1117048320.0, "31880": 1117048320.0, "31885": 1117048320.0, "31890": 1117048320.0, "31895": 1117048320.0, "31900": 1117048320.0, "31905": 1117048320.0, "31910": 1117048320.0, "31915": 1117048320.0, "31920": 1117048320.0, "31925": 1117048320.0, "31930": 1117048320.0, "31935": 1117048320.0, "31940": 1117048320.0, "31945": 1117048320.0, "31950": 1117048320.0, "31955": 1117048320.0, "31960": 1117048320.0, "31965": 1117048320.0, "31970": 1117048320.0, "31975": 1117048320.0, "31980": 1117048320.0, "31985": 1117048320.0, "31990": 1117048320.0, "31995": 1117048320.0, "32000": 1117048320.0, "32005": 1117048320.0, "32010": 1117048320.0, "32015": 1117048320.0, "32020": 1117048320.0, "32025": 1117048320.0, "32030": 1117048320.0, "32035": 1117048320.0, "32040": 1117048320.0, "32045": 1117048320.0, "32050": 1117048320.0, "32055": 1117048320.0, "32060": 1117048320.0, "32065": 1117048320.0, "32070": 1117048320.0, "32075": 1117048320.0, "32080": 1117048320.0, "32085": 1117048320.0, "32090": 1117048320.0, "32095": 1117048320.0, "32100": 1117048320.0, "32105": 1117048320.0, "32110": 1117048320.0, "32115": 1117048320.0, "32120": 1117048320.0, "32125": 1117048320.0, "32130": 1117048320.0, "32135": 1117048320.0, "32140": 1117048320.0, "32145": 1117048320.0, "32150": 1117048320.0, "32155": 1117048320.0, "32160": 1117048320.0, "32165": 1117048320.0, "32170": 1117048320.0, "32175": 1117048320.0, "32180": 1117048320.0, "32185": 1117048320.0, "32190": 1117048320.0, "32195": 1117048320.0, "32200": 1117048320.0, "32205": 1117048320.0, "32210": 1117048320.0, "32215": 1117048320.0, "32220": 1117048320.0, "32225": 1117048320.0, "32230": 1117048320.0, "32235": 1117048320.0, "32240": 1117048320.0, "32245": 1117048320.0, "32250": 1117048320.0, "32255": 1117048320.0, "32260": 1117048320.0, "32265": 1117048320.0, "32270": 1117048320.0, "32275": 1117048320.0, "32280": 1117048320.0, "32285": 1117048320.0, "32290": 1117048320.0, "32295": 1117048320.0, "32300": 1117048320.0, "32305": 1117048320.0, "32310": 1117048320.0, "32315": 1117048320.0, "32320": 1117048320.0, "32325": 1117048320.0, "32330": 1117048320.0, "32335": 1117048320.0, "32340": 1117048320.0, "32345": 1117048320.0, "32350": 1117048320.0, "32355": 1117048320.0, "32360": 1117048320.0, "32365": 1117048320.0, "32370": 1117048320.0, "32375": 1117048320.0, "32380": 1117048320.0, "32385": 1117048320.0, "32390": 1117048320.0, "32395": 1117048320.0, "32400": 1117048320.0, "32405": 1117048320.0, "32410": 1117048320.0, "32415": 1117048320.0, "32420": 1117048320.0, "32425": 1117048320.0, "32430": 1117048320.0, "32435": 1117048320.0, "32440": 1117048320.0, "32445": 1117048320.0, "32450": 1117048320.0, "32455": 1117048320.0, "32460": 1117048320.0, "32465": 1117048320.0, "32470": 1117048320.0, "32475": 1117048320.0, "32480": 1117048320.0, "32485": 1117048320.0, "32490": 1117048320.0, "32495": 1117048320.0, "32500": 1117048320.0, "32505": 1117048320.0, "32510": 1117048320.0, "32515": 1117048320.0, "32520": 1117048320.0, "32525": 1117048320.0, "32530": 1117048320.0, "32535": 1117048320.0, "32540": 1117048320.0, "32545": 1117048320.0, "32550": 1117048320.0, "32555": 1117048320.0, "32560": 1117048320.0, "32565": 1117048320.0, "32570": 1117048320.0, "32575": 1117048320.0, "32580": 1117048320.0, "32585": 1117048320.0, "32590": 1117048320.0, "32595": 1117048320.0, "32600": 1117048320.0, "32605": 1117048320.0, "32610": 1117048320.0, "32615": 1117048320.0, "32620": 1117048320.0, "32625": 1117048320.0, "32630": 1117048320.0, "32635": 1117048320.0, "32640": 1117048320.0, "32645": 1117048320.0, "32650": 1117048320.0, "32655": 1117048320.0, "32660": 1117048320.0, "32665": 1117048320.0, "32670": 1117048320.0, "32675": 1117048320.0, "32680": 1117048320.0, "32685": 1117048320.0, "32690": 1117048320.0, "32695": 1117048320.0, "32700": 1117048320.0, "32705": 1117048320.0, "32710": 1117048320.0, "32715": 1117048320.0, "32720": 1117048320.0, "32725": 1117048320.0, "32730": 1117048320.0, "32735": 1117048320.0, "32740": 1117048320.0, "32745": 1117048320.0, "32750": 1117048320.0, "32755": 1117048320.0, "32760": 1117048320.0, "32765": 1117048320.0, "32770": 1117048320.0, "32775": 1117048320.0, "32780": 1117048320.0, "32785": 1117048320.0, "32790": 1117048320.0, "32795": 1117048320.0, "32800": 1117048320.0, "32805": 1117048320.0, "32810": 1117048320.0, "32815": 1117048320.0, "32820": 1117048320.0, "32825": 1117048320.0, "32830": 1117048320.0, "32835": 1117048320.0, "32840": 1117048320.0, "32845": 1117048320.0, "32850": 1117048320.0, "32855": 1117048320.0, "32860": 1117048320.0, "32865": 1117048320.0, "32870": 1117048320.0, "32875": 1117048320.0, "32880": 1117048320.0, "32885": 1117048320.0, "32890": 1117048320.0, "32895": 1117048320.0, "32900": 1117048320.0, "32905": 1117048320.0, "32910": 1117048320.0, "32915": 1117048320.0, "32920": 1117048320.0, "32925": 1117048320.0, "32930": 1117048320.0, "32935": 1117048320.0, "32940": 1117048320.0, "32945": 1117048320.0, "32950": 1117048320.0, "32955": 1117048320.0, "32960": 1117048320.0, "32965": 1117048320.0, "32970": 1117048320.0, "32975": 1117048320.0, "32980": 1117048320.0, "32985": 1117048320.0, "32990": 1117048320.0, "32995": 1117048320.0, "33000": 1117048320.0, "33005": 1117048320.0, "33010": 1117048320.0, "33015": 1117048320.0, "33020": 1117048320.0, "33025": 1117048320.0, "33030": 1117048320.0, "33035": 1117048320.0, "33040": 1117048320.0, "33045": 1117048320.0, "33050": 1117048320.0, "33055": 1117048320.0, "33060": 1117048320.0, "33065": 1117048320.0, "33070": 1117048320.0, "33075": 1117048320.0, "33080": 1117048320.0, "33085": 1117048320.0, "33090": 1117048320.0, "33095": 1117048320.0, "33100": 1117048320.0, "33105": 1117048320.0, "33110": 1117048320.0, "33115": 1117048320.0, "33120": 1117048320.0, "33125": 1117048320.0, "33130": 1117048320.0, "33135": 1117048320.0, "33140": 1117048320.0, "33145": 1117048320.0, "33150": 1117048320.0, "33155": 1117048320.0, "33160": 1117048320.0, "33165": 1117048320.0, "33170": 1117048320.0, "33175": 1117048320.0, "33180": 1117048320.0, "33185": 1117048320.0, "33190": 1117048320.0, "33195": 1117048320.0, "33200": 1117048320.0, "33205": 1117048320.0, "33210": 1117048320.0, "33215": 1117048320.0, "33220": 1117048320.0, "33225": 1117048320.0, "33230": 1117048320.0, "33235": 1117048320.0, "33240": 1117048320.0, "33245": 1117048320.0, "33250": 1117048320.0, "33255": 1117048320.0, "33260": 1117048320.0, "33265": 1117048320.0, "33270": 1117048320.0, "33275": 1117048320.0, "33280": 1117048320.0, "33285": 1117048320.0, "33290": 1117048320.0, "33295": 1117048320.0, "33300": 1117048320.0, "33305": 1117048320.0, "33310": 1117048320.0, "33315": 1117048320.0, "33320": 1117048320.0, "33325": 1117048320.0, "33330": 1117048320.0, "33335": 1117048320.0, "33340": 1117048320.0, "33345": 1117048320.0, "33350": 1117048320.0, "33355": 1117048320.0, "33360": 1117048320.0, "33365": 1117048320.0, "33370": 1117048320.0, "33375": 1117048320.0, "33380": 1117048320.0, "33385": 1117048320.0, "33390": 1117048320.0, "33395": 1117048320.0, "33400": 1117048320.0, "33405": 1117048320.0, "33410": 1117048320.0, "33415": 1117048320.0, "33420": 1117048320.0, "33425": 1117048320.0, "33430": 1117048320.0, "33435": 1117048320.0, "33440": 1117048320.0, "33445": 1117048320.0, "33450": 1117048320.0, "33455": 1117048320.0, "33460": 1117048320.0, "33465": 1117048320.0, "33470": 1117048320.0, "33475": 1117048320.0, "33480": 1117048320.0, "33485": 1117048320.0, "33490": 1117048320.0, "33495": 1117048320.0, "33500": 1117048320.0, "33505": 1117048320.0, "33510": 1117048320.0, "33515": 1117048320.0, "33520": 1117048320.0, "33525": 1117048320.0, "33530": 1117048320.0, "33535": 1117048320.0, "33540": 1117048320.0, "33545": 1117048320.0, "33550": 1117048320.0, "33555": 1117048320.0, "33560": 1117048320.0, "33565": 1117048320.0, "33570": 1117048320.0, "33575": 1117048320.0, "33580": 1117048320.0, "33585": 1117048320.0, "33590": 1117048320.0, "33595": 1117048320.0, "33600": 1117048320.0, "33605": 1117048320.0, "33610": 1117048320.0, "33615": 1117048320.0, "33620": 1117048320.0, "33625": 1117048320.0, "33630": 1117048320.0, "33635": 1117048320.0, "33640": 1117048320.0, "33645": 1117048320.0, "33650": 1117048320.0, "33655": 1117048320.0, "33660": 1117048320.0, "33665": 1117048320.0, "33670": 1117048320.0, "33675": 1117048320.0, "33680": 1117048320.0, "33685": 1117048320.0, "33690": 1117048320.0, "33695": 1117048320.0, "33700": 1117048320.0, "33705": 1117048320.0, "33710": 1117048320.0, "33715": 1117048320.0, "33720": 1117048320.0, "33725": 1117048320.0, "33730": 1117048320.0, "33735": 1117048320.0, "33740": 1117048320.0, "33745": 1117048320.0, "33750": 1117048320.0, "33755": 1117048320.0, "33760": 1117048320.0, "33765": 1117048320.0, "33770": 1117048320.0, "33775": 1117048320.0, "33780": 1117048320.0, "33785": 1117048320.0, "33790": 1117048320.0, "33795": 1117048320.0, "33800": 1117048320.0, "33805": 1117048320.0, "33810": 1117048320.0, "33815": 1117048320.0, "33820": 1117048320.0, "33825": 1117048320.0, "33830": 1117048320.0, "33835": 1117048320.0, "33840": 1117048320.0, "33845": 1117048320.0, "33850": 1117048320.0, "33855": 1117048320.0, "33860": 1117048320.0, "33865": 1117048320.0, "33870": 1117048320.0, "33875": 1117048320.0, "33880": 1117048320.0, "33885": 1117048320.0, "33890": 1117048320.0, "33895": 1117048320.0, "33900": 1117048320.0, "33905": 1117048320.0, "33910": 1117048320.0, "33915": 1117048320.0, "33920": 1117048320.0, "33925": 1117048320.0, "33930": 1117048320.0, "33935": 1117048320.0, "33940": 1117048320.0, "33945": 1117048320.0, "33950": 1117048320.0, "33955": 1117048320.0, "33960": 1117048320.0, "33965": 1117048320.0, "33970": 1117048320.0, "33975": 1117048320.0, "33980": 1117048320.0, "33985": 1117048320.0, "33990": 1117048320.0, "33995": 1117048320.0, "34000": 1117048320.0, "34005": 1117048320.0, "34010": 1117048320.0, "34015": 1117048320.0, "34020": 1117048320.0, "34025": 1117048320.0, "34030": 1117048320.0, "34035": 1117048320.0, "34040": 1117048320.0, "34045": 1117048320.0, "34050": 1117048320.0, "34055": 1117048320.0, "34060": 1117048320.0, "34065": 1117048320.0, "34070": 1117048320.0, "34075": 1117048320.0, "34080": 1117048320.0, "34085": 1117048320.0, "34090": 1117048320.0, "34095": 1117048320.0, "34100": 1117048320.0, "34105": 1117048320.0, "34110": 1117048320.0, "34115": 1117048320.0, "34120": 1117048320.0, "34125": 1117048320.0, "34130": 1117048320.0, "34135": 1117048320.0, "34140": 1117048320.0, "34145": 1117048320.0, "34150": 1117048320.0, "34155": 1117048320.0, "34160": 1117048320.0, "34165": 1117048320.0, "34170": 1117048320.0, "34175": 1117048320.0, "34180": 1117048320.0, "34185": 1117048320.0, "34190": 1117048320.0, "34195": 1117048320.0, "34200": 1117048320.0, "34205": 1117048320.0, "34210": 1117048320.0, "34215": 1117048320.0, "34220": 1117048320.0, "34225": 1117048320.0, "34230": 1117048320.0, "34235": 1117048320.0, "34240": 1117048320.0, "34245": 1117048320.0, "34250": 1117048320.0, "34255": 1117048320.0, "34260": 1117048320.0, "34265": 1117048320.0, "34270": 1117048320.0, "34275": 1117048320.0, "34280": 1117048320.0, "34285": 1117048320.0, "34290": 1117048320.0, "34295": 1117048320.0, "34300": 1117048320.0, "34305": 1117048320.0, "34310": 1117048320.0, "34315": 1117048320.0, "34320": 1117048320.0, "34325": 1117048320.0, "34330": 1117048320.0, "34335": 1117048320.0, "34340": 1117048320.0, "34345": 1117048320.0, "34350": 1117048320.0, "34355": 1117048320.0, "34360": 1117048320.0, "34365": 1117048320.0, "34370": 1117048320.0, "34375": 1117048320.0, "34380": 1117048320.0, "34385": 1117048320.0, "34390": 1117048320.0, "34395": 1117048320.0, "34400": 1117048320.0, "34405": 1117048320.0, "34410": 1117048320.0, "34415": 1117048320.0, "34420": 1117048320.0, "34425": 1117048320.0, "34430": 1117048320.0, "34435": 1117048320.0, "34440": 1117048320.0, "34445": 1117048320.0, "34450": 1117048320.0, "34455": 1117048320.0, "34460": 1117048320.0, "34465": 1117048320.0, "34470": 1117048320.0, "34475": 1117048320.0, "34480": 1117048320.0, "34485": 1117048320.0, "34490": 1117048320.0, "34495": 1117048320.0, "34500": 1117048320.0, "34505": 1117048320.0, "34510": 1117048320.0, "34515": 1117048320.0, "34520": 1117048320.0, "34525": 1117048320.0, "34530": 1117048320.0, "34535": 1117048320.0, "34540": 1117048320.0, "34545": 1117048320.0, "34550": 1117048320.0, "34555": 1117048320.0, "34560": 1117048320.0, "34565": 1117048320.0, "34570": 1117048320.0, "34575": 1117048320.0, "34580": 1117048320.0, "34585": 1117048320.0, "34590": 1117048320.0, "34595": 1117048320.0, "34600": 1117048320.0, "34605": 1117048320.0, "34610": 1117048320.0, "34615": 1117048320.0, "34620": 1117048320.0, "34625": 1117048320.0, "34630": 1117048320.0, "34635": 1117048320.0, "34640": 1117048320.0, "34645": 1117048320.0, "34650": 1117048320.0, "34655": 1117048320.0, "34660": 1117048320.0, "34665": 1117048320.0, "34670": 1117048320.0, "34675": 1117048320.0, "34680": 1117048320.0, "34685": 1117048320.0, "34690": 1117048320.0, "34695": 1117048320.0, "34700": 1117048320.0, "34705": 1117048320.0, "34710": 1117048320.0, "34715": 1117048320.0, "34720": 1117048320.0, "34725": 1117048320.0, "34730": 1117048320.0, "34735": 1117048320.0, "34740": 1117048320.0, "34745": 1117048320.0, "34750": 1117048320.0, "34755": 1117048320.0, "34760": 1117048320.0, "34765": 1117048320.0, "34770": 1117048320.0, "34775": 1117048320.0, "34780": 1117048320.0, "34785": 1117048320.0, "34790": 1117048320.0, "34795": 1117048320.0, "34800": 1117048320.0, "34805": 1117048320.0, "34810": 1117048320.0, "34815": 1117048320.0, "34820": 1117048320.0, "34825": 1117048320.0, "34830": 1117048320.0, "34835": 1117048320.0, "34840": 1117048320.0, "34845": 1117048320.0, "34850": 1117048320.0, "34855": 1117048320.0, "34860": 1117048320.0, "34865": 1117048320.0, "34870": 1117048320.0, "34875": 1117048320.0, "34880": 1117048320.0, "34885": 1117048320.0, "34890": 1117048320.0, "34895": 1117048320.0, "34900": 1117048320.0, "34905": 1117048320.0, "34910": 1117048320.0, "34915": 1117048320.0, "34920": 1117048320.0, "34925": 1117048320.0, "34930": 1117048320.0, "34935": 1117048320.0, "34940": 1117048320.0, "34945": 1117048320.0, "34950": 1117048320.0, "34955": 1117048320.0, "34960": 1117048320.0, "34965": 1117048320.0, "34970": 1117048320.0, "34975": 1117048320.0, "34980": 1117048320.0, "34985": 1117048320.0, "34990": 1117048320.0, "34995": 1117048320.0, "35000": 1117048320.0, "35005": 1117048320.0, "35010": 1117048320.0, "35015": 1117048320.0, "35020": 1117048320.0, "35025": 1117048320.0, "35030": 1117048320.0, "35035": 1117048320.0, "35040": 1117048320.0, "35045": 1117048320.0, "35050": 1117048320.0, "35055": 1117048320.0, "35060": 1117048320.0, "35065": 1117048320.0, "35070": 1117048320.0, "35075": 1117048320.0, "35080": 1117048320.0, "35085": 1117048320.0, "35090": 1117048320.0, "35095": 1117048320.0, "35100": 1117048320.0, "35105": 1117048320.0, "35110": 1117048320.0, "35115": 1117048320.0, "35120": 1117048320.0, "35125": 1117048320.0, "35130": 1117048320.0, "35135": 1117048320.0, "35140": 1117048320.0, "35145": 1117048320.0, "35150": 1117048320.0, "35155": 1117048320.0, "35160": 1117048320.0, "35165": 1117048320.0, "35170": 1117048320.0, "35175": 1117048320.0, "35180": 1117048320.0, "35185": 1117048320.0, "35190": 1117048320.0, "35195": 1117048320.0, "35200": 1117048320.0, "35205": 1117048320.0, "35210": 1117048320.0, "35215": 1117048320.0, "35220": 1117048320.0, "35225": 1117048320.0, "35230": 1117048320.0, "35235": 1117048320.0, "35240": 1117048320.0, "35245": 1117048320.0, "35250": 1117048320.0, "35255": 1117048320.0, "35260": 1117048320.0, "35265": 1117048320.0, "35270": 1117048320.0, "35275": 1117048320.0, "35280": 1117048320.0, "35285": 1117048320.0, "35290": 1117048320.0, "35295": 1117048320.0, "35300": 1117048320.0, "35305": 1117048320.0, "35310": 1117048320.0, "35315": 1117048320.0, "35320": 1117048320.0, "35325": 1117048320.0, "35330": 1117048320.0, "35335": 1117048320.0, "35340": 1117048320.0, "35345": 1117048320.0, "35350": 1117048320.0, "35355": 1117048320.0, "35360": 1117048320.0, "35365": 1117048320.0, "35370": 1117048320.0, "35375": 1117048320.0, "35380": 1117048320.0, "35385": 1117048320.0, "35390": 1117048320.0, "35395": 1117048320.0, "35400": 1117048320.0, "35405": 1117048320.0, "35410": 1117048320.0, "35415": 1117048320.0, "35420": 1117048320.0, "35425": 1117048320.0, "35430": 1117048320.0, "35435": 1117048320.0, "35440": 1117048320.0, "35445": 1117048320.0, "35450": 1117048320.0, "35455": 1117048320.0, "35460": 1117048320.0, "35465": 1117048320.0, "35470": 1117048320.0, "35475": 1117048320.0, "35480": 1117048320.0, "35485": 1117048320.0, "35490": 1117048320.0, "35495": 1117048320.0, "35500": 1117048320.0, "35505": 1117048320.0, "35510": 1117048320.0, "35515": 1117048320.0, "35520": 1117048320.0, "35525": 1117048320.0, "35530": 1117048320.0, "35535": 1117048320.0, "35540": 1117048320.0, "35545": 1117048320.0, "35550": 1117048320.0, "35555": 1117048320.0, "35560": 1117048320.0, "35565": 1117048320.0, "35570": 1117048320.0, "35575": 1117048320.0, "35580": 1117048320.0, "35585": 1117048320.0, "35590": 1117048320.0, "35595": 1117048320.0, "35600": 1117048320.0, "35605": 1117048320.0, "35610": 1117048320.0, "35615": 1117048320.0, "35620": 1117048320.0, "35625": 1117048320.0, "35630": 1117048320.0, "35635": 1117048320.0, "35640": 1117048320.0, "35645": 1117048320.0, "35650": 1117048320.0, "35655": 1117048320.0, "35660": 1117048320.0, "35665": 1117048320.0, "35670": 1117048320.0, "35675": 1117048320.0, "35680": 1117048320.0, "35685": 1117048320.0, "35690": 1117048320.0, "35695": 1117048320.0, "35700": 1117048320.0, "35705": 1117048320.0, "35710": 1117048320.0, "35715": 1117048320.0, "35720": 1117048320.0, "35725": 1117048320.0, "35730": 1117048320.0, "35735": 1117048320.0, "35740": 1117048320.0, "35745": 1117048320.0, "35750": 1117048320.0, "35755": 1117048320.0, "35760": 1117048320.0, "35765": 1117048320.0, "35770": 1117048320.0, "35775": 1117048320.0, "35780": 1117048320.0, "35785": 1117048320.0, "35790": 1117048320.0, "35795": 1117048320.0, "35800": 1117048320.0, "35805": 1117048320.0, "35810": 1117048320.0, "35815": 1117048320.0, "35820": 1117048320.0, "35825": 1117048320.0, "35830": 1117048320.0, "35835": 1117048320.0, "35840": 1117048320.0, "35845": 1117048320.0, "35850": 1117048320.0, "35855": 1117048320.0, "35860": 1117048320.0, "35865": 1117048320.0, "35870": 1117048320.0, "35875": 1117048320.0, "35880": 1117048320.0, "35885": 1117048320.0, "35890": 1117048320.0, "35895": 1117048320.0, "35900": 1117048320.0, "35905": 1117048320.0, "35910": 1117048320.0, "35915": 1117048320.0, "35920": 1117048320.0, "35925": 1117048320.0, "35930": 1117048320.0, "35935": 1117048320.0, "35940": 1117048320.0, "35945": 1117048320.0, "35950": 1117048320.0, "35955": 1117048320.0, "35960": 1117048320.0, "35965": 1117048320.0, "35970": 1117048320.0, "35975": 1117048320.0, "35980": 1117048320.0, "35985": 1117048320.0, "35990": 1117048320.0, "35995": 1117048320.0, "36000": 1117048320.0, "36005": 1117048320.0, "36010": 1117048320.0, "36015": 1117048320.0, "36020": 1117048320.0, "36025": 1117048320.0, "36030": 1117048320.0, "36035": 1117048320.0, "36040": 1117048320.0, "36045": 1117048320.0, "36050": 1117048320.0, "36055": 1117048320.0, "36060": 1117048320.0, "36065": 1117048320.0, "36070": 1117048320.0, "36075": 1117048320.0, "36080": 1117048320.0, "36085": 1117048320.0, "36090": 1117048320.0, "36095": 1117048320.0, "36100": 1117048320.0, "36105": 1117048320.0, "36110": 1117048320.0, "36115": 1117048320.0, "36120": 1117048320.0, "36125": 1117048320.0, "36130": 1117048320.0, "36135": 1117048320.0, "36140": 1117048320.0, "36145": 1117048320.0, "36150": 1117048320.0, "36155": 1117048320.0, "36160": 1117048320.0, "36165": 1117048320.0, "36170": 1117048320.0, "36175": 1117048320.0, "36180": 1117048320.0, "36185": 1117048320.0, "36190": 1117048320.0, "36195": 1117048320.0, "36200": 1117048320.0, "36205": 1117048320.0, "36210": 1117048320.0, "36215": 1117048320.0, "36220": 1117048320.0, "36225": 1117048320.0, "36230": 1117048320.0, "36235": 1117048320.0, "36240": 1117048320.0, "36245": 1117048320.0, "36250": 1117048320.0, "36255": 1117048320.0, "36260": 1117048320.0, "36265": 1117048320.0, "36270": 1117048320.0, "36275": 1117048320.0, "36280": 1117048320.0, "36285": 1117048320.0, "36290": 1117048320.0, "36295": 1117048320.0, "36300": 1117048320.0, "36305": 1117048320.0, "36310": 1117048320.0, "36315": 1117048320.0, "36320": 1117048320.0, "36325": 1117048320.0, "36330": 1117048320.0, "36335": 1117048320.0, "36340": 1117048320.0, "36345": 1117048320.0, "36350": 1117048320.0, "36355": 1117048320.0, "36360": 1117048320.0, "36365": 1117048320.0, "36370": 1117048320.0, "36375": 1117048320.0, "36380": 1117048320.0, "36385": 1117048320.0, "36390": 1117048320.0, "36395": 1117048320.0, "36400": 1117048320.0, "36405": 1117048320.0, "36410": 1117048320.0, "36415": 1117048320.0, "36420": 1117048320.0, "36425": 1117048320.0, "36430": 1117048320.0, "36435": 1117048320.0, "36440": 1117048320.0, "36445": 1117048320.0, "36450": 1117048320.0, "36455": 1117048320.0, "36460": 1117048320.0, "36465": 1117048320.0, "36470": 1117048320.0, "36475": 1117048320.0, "36480": 1117048320.0, "36485": 1117048320.0, "36490": 1117048320.0, "36495": 1117048320.0, "36500": 1117048320.0, "36505": 1117048320.0, "36510": 1117048320.0, "36515": 1117048320.0, "36520": 1117048320.0, "36525": 1117048320.0, "36530": 1117048320.0, "36535": 1117048320.0, "36540": 1117048320.0, "36545": 1117048320.0, "36550": 1117048320.0, "36555": 1117048320.0, "36560": 1117048320.0, "36565": 1117048320.0, "36570": 1117048320.0, "36575": 1117048320.0, "36580": 1117048320.0, "36585": 1117048320.0, "36590": 1117048320.0, "36595": 1117048320.0, "36600": 1117048320.0, "36605": 1117048320.0, "36610": 1117048320.0, "36615": 1117048320.0, "36620": 1117048320.0, "36625": 1117048320.0, "36630": 1117048320.0, "36635": 1117048320.0, "36640": 1117048320.0, "36645": 1117048320.0, "36650": 1117048320.0, "36655": 1117048320.0, "36660": 1117048320.0, "36665": 1117048320.0, "36670": 1117048320.0, "36675": 1117048320.0, "36680": 1117048320.0, "36685": 1117048320.0, "36690": 1117048320.0, "36695": 1117048320.0, "36700": 1117048320.0, "36705": 1117048320.0, "36710": 1117048320.0, "36715": 1117048320.0, "36720": 1117048320.0, "36725": 1117048320.0, "36730": 1117048320.0, "36735": 1117048320.0, "36740": 1117048320.0, "36745": 1117048320.0, "36750": 1117048320.0, "36755": 1117048320.0, "36760": 1117048320.0, "36765": 1117048320.0, "36770": 1117048320.0, "36775": 1117048320.0, "36780": 1117048320.0, "36785": 1117048320.0, "36790": 1117048320.0, "36795": 1117048320.0, "36800": 1117048320.0, "36805": 1117048320.0, "36810": 1117048320.0, "36815": 1117048320.0, "36820": 1117048320.0, "36825": 1117048320.0, "36830": 1117048320.0, "36835": 1117048320.0, "36840": 1117048320.0, "36845": 1117048320.0, "36850": 1117048320.0, "36855": 1117048320.0, "36860": 1117048320.0, "36865": 1117048320.0, "36870": 1117048320.0, "36875": 1117048320.0, "36880": 1117048320.0, "36885": 1117048320.0, "36890": 1117048320.0, "36895": 1117048320.0, "36900": 1117048320.0, "36905": 1117048320.0, "36910": 1117048320.0, "36915": 1117048320.0, "36920": 1117048320.0, "36925": 1117048320.0, "36930": 1117048320.0, "36935": 1117048320.0, "36940": 1117048320.0, "36945": 1117048320.0, "36950": 1117048320.0, "36955": 1117048320.0, "36960": 1117048320.0, "36965": 1117048320.0, "36970": 1117048320.0, "36975": 1117048320.0, "36980": 1117048320.0, "36985": 1117048320.0, "36990": 1117048320.0, "36995": 1117048320.0, "37000": 1117048320.0, "37005": 1117048320.0, "37010": 1117048320.0, "37015": 1117048320.0, "37020": 1117048320.0, "37025": 1117048320.0, "37030": 1117048320.0, "37035": 1117048320.0, "37040": 1117048320.0, "37045": 1117048320.0, "37050": 1117048320.0, "37055": 1117048320.0, "37060": 1117048320.0, "37065": 1117048320.0, "37070": 1117048320.0, "37075": 1117048320.0, "37080": 1117048320.0, "37085": 1117048320.0, "37090": 1117048320.0, "37095": 1117048320.0, "37100": 1117048320.0, "37105": 1117048320.0, "37110": 1117048320.0, "37115": 1117048320.0, "37120": 1117048320.0, "37125": 1117048320.0, "37130": 1117048320.0, "37135": 1117048320.0, "37140": 1117048320.0, "37145": 1117048320.0, "37150": 1117048320.0, "37155": 1117048320.0, "37160": 1117048320.0, "37165": 1117048320.0, "37170": 1117048320.0, "37175": 1117048320.0, "37180": 1117048320.0, "37185": 1117048320.0, "37190": 1117048320.0, "37195": 1117048320.0, "37200": 1117048320.0, "37205": 1117048320.0, "37210": 1117048320.0, "37215": 1117048320.0, "37220": 1117048320.0, "37225": 1117048320.0, "37230": 1117048320.0, "37235": 1117048320.0, "37240": 1117048320.0, "37245": 1117048320.0, "37250": 1117048320.0, "37255": 1117048320.0, "37260": 1117048320.0, "37265": 1117048320.0, "37270": 1117048320.0, "37275": 1117048320.0, "37280": 1117048320.0, "37285": 1117048320.0, "37290": 1117048320.0, "37295": 1117048320.0, "37300": 1117048320.0, "37305": 1117048320.0, "37310": 1117048320.0, "37315": 1117048320.0, "37320": 1117048320.0, "37325": 1117048320.0, "37330": 1117048320.0, "37335": 1117048320.0, "37340": 1117048320.0, "37345": 1117048320.0, "37350": 1117048320.0, "37355": 1117048320.0, "37360": 1117048320.0, "37365": 1117048320.0, "37370": 1117048320.0, "37375": 1117048320.0, "37380": 1117048320.0, "37385": 1117048320.0, "37390": 1117048320.0, "37395": 1117048320.0, "37400": 1117048320.0, "37405": 1117048320.0, "37410": 1117048320.0, "37415": 1117048320.0, "37420": 1117048320.0, "37425": 1117048320.0, "37430": 1117048320.0, "37435": 1117048320.0, "37440": 1117048320.0, "37445": 1117048320.0, "37450": 1117048320.0, "37455": 1117048320.0, "37460": 1117048320.0, "37465": 1117048320.0, "37470": 1117048320.0, "37475": 1117048320.0, "37480": 1117048320.0, "37485": 1117048320.0, "37490": 1117048320.0, "37495": 1117048320.0, "37500": 1117048320.0, "37505": 1117048320.0, "37510": 1117048320.0, "37515": 1117048320.0, "37520": 1117048320.0, "37525": 1117048320.0, "37530": 1117048320.0, "37535": 1117048320.0, "37540": 1117048320.0, "37545": 1117048320.0, "37550": 1117048320.0, "37555": 1117048320.0, "37560": 1117048320.0, "37565": 1117048320.0, "37570": 1117048320.0, "37575": 1117048320.0, "37580": 1117048320.0, "37585": 1117048320.0, "37590": 1117048320.0, "37595": 1117048320.0, "37600": 1117048320.0, "37605": 1117048320.0, "37610": 1117048320.0, "37615": 1117048320.0, "37620": 1117048320.0, "37625": 1117048320.0, "37630": 1117048320.0, "37635": 1117048320.0, "37640": 1117048320.0, "37645": 1117048320.0, "37650": 1117048320.0, "37655": 1117048320.0, "37660": 1117048320.0, "37665": 1117048320.0, "37670": 1117048320.0, "37675": 1117048320.0, "37680": 1117048320.0, "37685": 1117048320.0, "37690": 1117048320.0, "37695": 1117048320.0, "37700": 1117048320.0, "37705": 1117048320.0, "37710": 1117048320.0, "37715": 1117048320.0, "37720": 1117048320.0, "37725": 1117048320.0, "37730": 1117048320.0, "37735": 1117048320.0, "37740": 1117048320.0, "37745": 1117048320.0, "37750": 1117048320.0, "37755": 1117048320.0, "37760": 1117048320.0, "37765": 1117048320.0, "37770": 1117048320.0, "37775": 1117048320.0, "37780": 1117048320.0, "37785": 1117048320.0, "37790": 1117048320.0, "37795": 1117048320.0, "37800": 1117048320.0, "37805": 1117048320.0, "37810": 1117048320.0, "37815": 1117048320.0, "37820": 1117048320.0, "37825": 1117048320.0, "37830": 1117048320.0, "37835": 1117048320.0, "37840": 1117048320.0, "37845": 1117048320.0, "37850": 1117048320.0, "37855": 1117048320.0, "37860": 1117048320.0, "37865": 1117048320.0, "37870": 1117048320.0, "37875": 1117048320.0, "37880": 1117048320.0, "37885": 1117048320.0, "37890": 1117048320.0, "37895": 1117048320.0, "37900": 1117048320.0, "37905": 1117048320.0, "37910": 1117048320.0, "37915": 1117048320.0, "37920": 1117048320.0, "37925": 1117048320.0, "37930": 1117048320.0, "37935": 1117048320.0, "37940": 1117048320.0, "37945": 1117048320.0, "37950": 1117048320.0, "37955": 1117048320.0, "37960": 1117048320.0, "37965": 1117048320.0, "37970": 1117048320.0, "37975": 1117048320.0, "37980": 1117048320.0, "37985": 1117048320.0, "37990": 1117048320.0, "37995": 1117048320.0, "38000": 1117048320.0, "38005": 1117048320.0, "38010": 1117048320.0, "38015": 1117048320.0, "38020": 1117048320.0, "38025": 1117048320.0, "38030": 1117048320.0, "38035": 1117048320.0, "38040": 1117048320.0, "38045": 1117048320.0, "38050": 1117048320.0, "38055": 1117048320.0, "38060": 1117048320.0, "38065": 1117048320.0, "38070": 1117048320.0, "38075": 1117048320.0, "38080": 1117048320.0, "38085": 1117048320.0, "38090": 1117048320.0, "38095": 1117048320.0, "38100": 1117048320.0, "38105": 1117048320.0, "38110": 1117048320.0, "38115": 1117048320.0, "38120": 1117048320.0, "38125": 1117048320.0, "38130": 1117048320.0, "38135": 1117048320.0, "38140": 1117048320.0, "38145": 1117048320.0, "38150": 1117048320.0, "38155": 1117048320.0, "38160": 1117048320.0, "38165": 1117048320.0, "38170": 1117048320.0, "38175": 1117048320.0, "38180": 1117048320.0, "38185": 1117048320.0, "38190": 1117048320.0, "38195": 1117048320.0, "38200": 1117048320.0, "38205": 1117048320.0, "38210": 1117048320.0, "38215": 1117048320.0, "38220": 1117048320.0, "38225": 1117048320.0, "38230": 1117048320.0, "38235": 1117048320.0, "38240": 1117048320.0, "38245": 1117048320.0, "38250": 1117048320.0, "38255": 1117048320.0, "38260": 1117048320.0, "38265": 1117048320.0, "38270": 1117048320.0, "38275": 1117048320.0, "38280": 1117048320.0, "38285": 1117048320.0, "38290": 1117048320.0, "38295": 1117048320.0, "38300": 1117048320.0, "38305": 1117048320.0, "38310": 1117048320.0, "38315": 1117048320.0, "38320": 1117048320.0, "38325": 1117048320.0, "38330": 1117048320.0, "38335": 1117048320.0, "38340": 1117048320.0, "38345": 1117048320.0, "38350": 1117048320.0, "38355": 1117048320.0, "38360": 1117048320.0, "38365": 1117048320.0, "38370": 1117048320.0, "38375": 1117048320.0, "38380": 1117048320.0, "38385": 1117048320.0, "38390": 1117048320.0, "38395": 1117048320.0, "38400": 1117048320.0, "38405": 1117048320.0, "38410": 1117048320.0, "38415": 1117048320.0, "38420": 1117048320.0, "38425": 1117048320.0, "38430": 1117048320.0, "38435": 1117048320.0, "38440": 1117048320.0, "38445": 1117048320.0, "38450": 1117048320.0, "38455": 1117048320.0, "38460": 1117048320.0, "38465": 1117048320.0, "38470": 1117048320.0, "38475": 1117048320.0, "38480": 1117048320.0, "38485": 1117048320.0, "38490": 1117048320.0, "38495": 1117048320.0, "38500": 1117048320.0, "38505": 1117048320.0, "38510": 1117048320.0, "38515": 1117048320.0, "38520": 1117048320.0, "38525": 1117048320.0, "38530": 1117048320.0, "38535": 1117048320.0, "38540": 1117048320.0, "38545": 1117048320.0, "38550": 1117048320.0, "38555": 1117048320.0, "38560": 1117048320.0, "38565": 1117048320.0, "38570": 1117048320.0, "38575": 1117048320.0, "38580": 1117048320.0, "38585": 1117048320.0, "38590": 1117048320.0, "38595": 1117048320.0, "38600": 1117048320.0, "38605": 1117048320.0, "38610": 1117048320.0, "38615": 1117048320.0, "38620": 1117048320.0, "38625": 1117048320.0, "38630": 1117048320.0, "38635": 1117048320.0, "38640": 1117048320.0, "38645": 1117048320.0, "38650": 1117048320.0, "38655": 1117048320.0, "38660": 1117048320.0, "38665": 1117048320.0, "38670": 1117048320.0, "38675": 1117048320.0, "38680": 1117048320.0, "38685": 1117048320.0, "38690": 1117048320.0, "38695": 1117048320.0, "38700": 1117048320.0, "38705": 1117048320.0, "38710": 1117048320.0, "38715": 1117048320.0, "38720": 1117048320.0, "38725": 1117048320.0, "38730": 1117048320.0, "38735": 1117048320.0, "38740": 1117048320.0, "38745": 1117048320.0, "38750": 1117048320.0, "38755": 1117048320.0, "38760": 1117048320.0, "38765": 1117048320.0, "38770": 1117048320.0, "38775": 1117048320.0, "38780": 1117048320.0, "38785": 1117048320.0, "38790": 1117048320.0, "38795": 1117048320.0, "38800": 1117048320.0, "38805": 1117048320.0, "38810": 1117048320.0, "38815": 1117048320.0, "38820": 1117048320.0, "38825": 1117048320.0, "38830": 1117048320.0, "38835": 1117048320.0, "38840": 1117048320.0, "38845": 1117048320.0, "38850": 1117048320.0, "38855": 1117048320.0, "38860": 1117048320.0, "38865": 1117048320.0, "38870": 1117048320.0, "38875": 1117048320.0, "38880": 1117048320.0, "38885": 1117048320.0, "38890": 1117048320.0, "38895": 1117048320.0, "38900": 1117048320.0, "38905": 1117048320.0, "38910": 1117048320.0, "38915": 1117048320.0, "38920": 1117048320.0, "38925": 1117048320.0, "38930": 1117048320.0, "38935": 1117048320.0, "38940": 1117048320.0, "38945": 1117048320.0, "38950": 1117048320.0, "38955": 1117048320.0, "38960": 1117048320.0, "38965": 1117048320.0, "38970": 1117048320.0, "38975": 1117048320.0, "38980": 1117048320.0, "38985": 1117048320.0, "38990": 1117048320.0, "38995": 1117048320.0, "39000": 1117048320.0, "39005": 1117048320.0, "39010": 1117048320.0, "39015": 1117048320.0, "39020": 1117048320.0, "39025": 1117048320.0, "39030": 1117048320.0, "39035": 1117048320.0, "39040": 1117048320.0, "39045": 1117048320.0, "39050": 1117048320.0, "39055": 1117048320.0, "39060": 1117048320.0, "39065": 1117048320.0, "39070": 1117048320.0, "39075": 1117048320.0, "39080": 1117048320.0, "39085": 1117048320.0, "39090": 1117048320.0, "39095": 1117048320.0, "39100": 1117048320.0, "39105": 1117048320.0, "39110": 1117048320.0, "39115": 1117048320.0, "39120": 1117048320.0, "39125": 1117048320.0, "39130": 1117048320.0, "39135": 1117048320.0, "39140": 1117048320.0, "39145": 1117048320.0, "39150": 1117048320.0, "39155": 1117048320.0, "39160": 1117048320.0, "39165": 1117048320.0, "39170": 1117048320.0, "39175": 1117048320.0, "39180": 1117048320.0, "39185": 1117048320.0, "39190": 1117048320.0, "39195": 1117048320.0, "39200": 1117048320.0, "39205": 1117048320.0, "39210": 1117048320.0, "39215": 1117048320.0, "39220": 1117048320.0, "39225": 1117048320.0, "39230": 1117048320.0, "39235": 1117048320.0, "39240": 1117048320.0, "39245": 1117048320.0, "39250": 1117048320.0, "39255": 1117048320.0, "39260": 1117048320.0, "39265": 1117048320.0, "39270": 1117048320.0, "39275": 1117048320.0, "39280": 1117048320.0, "39285": 1117048320.0, "39290": 1117048320.0, "39295": 1117048320.0, "39300": 1117048320.0, "39305": 1117048320.0, "39310": 1117048320.0, "39315": 1117048320.0, "39320": 1117048320.0, "39325": 1117048320.0, "39330": 1117048320.0, "39335": 1117048320.0, "39340": 1117048320.0, "39345": 1117048320.0, "39350": 1117048320.0, "39355": 1117048320.0, "39360": 1117048320.0, "39365": 1117048320.0, "39370": 1117048320.0, "39375": 1117048320.0, "39380": 1117048320.0, "39385": 1117048320.0, "39390": 1117048320.0, "39395": 1117048320.0, "39400": 1117048320.0, "39405": 1117048320.0, "39410": 1117048320.0, "39415": 1117048320.0, "39420": 1117048320.0, "39425": 1117048320.0, "39430": 1117048320.0, "39435": 1117048320.0, "39440": 1117048320.0, "39445": 1117048320.0, "39450": 1117048320.0, "39455": 1117048320.0, "39460": 1117048320.0, "39465": 1117048320.0, "39470": 1117048320.0, "39475": 1117048320.0, "39480": 1117048320.0, "39485": 1117048320.0, "39490": 1117048320.0, "39495": 1117048320.0, "39500": 1117048320.0, "39505": 1117048320.0, "39510": 1117048320.0, "39515": 1117048320.0, "39520": 1117048320.0, "39525": 1117048320.0, "39530": 1117048320.0, "39535": 1117048320.0, "39540": 1117048320.0, "39545": 1117048320.0, "39550": 1117048320.0, "39555": 1117048320.0, "39560": 1117048320.0, "39565": 1117048320.0, "39570": 1117048320.0, "39575": 1117048320.0, "39580": 1117048320.0, "39585": 1117048320.0, "39590": 1117048320.0, "39595": 1117048320.0, "39600": 1117048320.0, "39605": 1117048320.0, "39610": 1117048320.0, "39615": 1117048320.0, "39620": 1117048320.0, "39625": 1117048320.0, "39630": 1117048320.0, "39635": 1117048320.0, "39640": 1117048320.0, "39645": 1117048320.0, "39650": 1117048320.0, "39655": 1117048320.0, "39660": 1117048320.0, "39665": 1117048320.0, "39670": 1117048320.0, "39675": 1117048320.0, "39680": 1117048320.0, "39685": 1117048320.0, "39690": 1117048320.0, "39695": 1117048320.0, "39700": 1117048320.0, "39705": 1117048320.0, "39710": 1117048320.0, "39715": 1117048320.0, "39720": 1117048320.0, "39725": 1117048320.0, "39730": 1117048320.0, "39735": 1117048320.0, "39740": 1117048320.0, "39745": 1117048320.0, "39750": 1117048320.0, "39755": 1117048320.0, "39760": 1117048320.0, "39765": 1117048320.0, "39770": 1117048320.0, "39775": 1117048320.0, "39780": 1117048320.0, "39785": 1117048320.0, "39790": 1117048320.0, "39795": 1117048320.0, "39800": 1117048320.0, "39805": 1117048320.0, "39810": 1117048320.0, "39815": 1117048320.0, "39820": 1117048320.0, "39825": 1117048320.0, "39830": 1117048320.0, "39835": 1117048320.0, "39840": 1117048320.0, "39845": 1117048320.0, "39850": 1117048320.0, "39855": 1117048320.0, "39860": 1117048320.0, "39865": 1117048320.0, "39870": 1117048320.0, "39875": 1117048320.0, "39880": 1117048320.0, "39885": 1117048320.0, "39890": 1117048320.0, "39895": 1117048320.0, "39900": 1117048320.0, "39905": 1117048320.0, "39910": 1117048320.0, "39915": 1117048320.0, "39920": 1117048320.0, "39925": 1117048320.0, "39930": 1117048320.0, "39935": 1117048320.0, "39940": 1117048320.0, "39945": 1117048320.0, "39950": 1117048320.0, "39955": 1117048320.0, "39960": 1117048320.0, "39965": 1117048320.0, "39970": 1117048320.0, "39975": 1117048320.0, "39980": 1117048320.0, "39985": 1117048320.0, "39990": 1117048320.0, "39995": 1117048320.0, "40000": 1117048320.0, "40005": 1117048320.0, "40010": 1117048320.0, "40015": 1117048320.0, "40020": 1117048320.0, "40025": 1117048320.0, "40030": 1117048320.0, "40035": 1117048320.0, "40040": 1117048320.0, "40045": 1117048320.0, "40050": 1117048320.0, "40055": 1117048320.0, "40060": 1117048320.0, "40065": 1117048320.0, "40070": 1117048320.0, "40075": 1117048320.0, "40080": 1117048320.0, "40085": 1117048320.0, "40090": 1117048320.0, "40095": 1117048320.0, "40100": 1117048320.0, "40105": 1117048320.0, "40110": 1117048320.0, "40115": 1117048320.0, "40120": 1117048320.0, "40125": 1117048320.0, "40130": 1117048320.0, "40135": 1117048320.0, "40140": 1117048320.0, "40145": 1117048320.0, "40150": 1117048320.0, "40155": 1117048320.0, "40160": 1117048320.0, "40165": 1117048320.0, "40170": 1117048320.0, "40175": 1117048320.0, "40180": 1117048320.0, "40185": 1117048320.0, "40190": 1117048320.0, "40195": 1117048320.0, "40200": 1117048320.0, "40205": 1117048320.0, "40210": 1117048320.0, "40215": 1117048320.0, "40220": 1117048320.0, "40225": 1117048320.0, "40230": 1117048320.0, "40235": 1117048320.0, "40240": 1117048320.0, "40245": 1117048320.0, "40250": 1117048320.0, "40255": 1117048320.0, "40260": 1117048320.0, "40265": 1117048320.0, "40270": 1117048320.0, "40275": 1117048320.0, "40280": 1117048320.0, "40285": 1117048320.0, "40290": 1117048320.0, "40295": 1117048320.0, "40300": 1117048320.0, "40305": 1117048320.0, "40310": 1117048320.0, "40315": 1117048320.0, "40320": 1117048320.0, "40325": 1117048320.0, "40330": 1117048320.0, "40335": 1117048320.0, "40340": 1117048320.0, "40345": 1117048320.0, "40350": 1117048320.0, "40355": 1117048320.0, "40360": 1117048320.0, "40365": 1117048320.0, "40370": 1117048320.0, "40375": 1117048320.0, "40380": 1117048320.0, "40385": 1117048320.0, "40390": 1117048320.0, "40395": 1117048320.0, "40400": 1117048320.0, "40405": 1117048320.0, "40410": 1117048320.0, "40415": 1117048320.0, "40420": 1117048320.0, "40425": 1117048320.0, "40430": 1117048320.0, "40435": 1117048320.0, "40440": 1117048320.0, "40445": 1117048320.0, "40450": 1117048320.0, "40455": 1117048320.0, "40460": 1117048320.0, "40465": 1117048320.0, "40470": 1117048320.0, "40475": 1117048320.0, "40480": 1117048320.0, "40485": 1117048320.0, "40490": 1117048320.0, "40495": 1117048320.0, "40500": 1117048320.0, "40505": 1117048320.0, "40510": 1117048320.0, "40515": 1117048320.0, "40520": 1117048320.0, "40525": 1117048320.0, "40530": 1117048320.0, "40535": 1117048320.0, "40540": 1117048320.0, "40545": 1117048320.0, "40550": 1117048320.0, "40555": 1117048320.0, "40560": 1117048320.0, "40565": 1117048320.0, "40570": 1117048320.0, "40575": 1117048320.0, "40580": 1117048320.0, "40585": 1117048320.0, "40590": 1117048320.0, "40595": 1117048320.0, "40600": 1117048320.0, "40605": 1117048320.0, "40610": 1117048320.0, "40615": 1117048320.0, "40620": 1117048320.0, "40625": 1117048320.0, "40630": 1117048320.0, "40635": 1117048320.0, "40640": 1117048320.0, "40645": 1117048320.0, "40650": 1117048320.0, "40655": 1117048320.0, "40660": 1117048320.0, "40665": 1117048320.0, "40670": 1117048320.0, "40675": 1117048320.0, "40680": 1117048320.0, "40685": 1117048320.0, "40690": 1117048320.0, "40695": 1117048320.0, "40700": 1117048320.0, "40705": 1117048320.0, "40710": 1117048320.0, "40715": 1117048320.0, "40720": 1117048320.0, "40725": 1117048320.0, "40730": 1117048320.0, "40735": 1117048320.0, "40740": 1117048320.0, "40745": 1117048320.0, "40750": 1117048320.0, "40755": 1117048320.0, "40760": 1117048320.0, "40765": 1117048320.0, "40770": 1117048320.0, "40775": 1117048320.0, "40780": 1117048320.0, "40785": 1117048320.0, "40790": 1117048320.0, "40795": 1117048320.0, "40800": 1117048320.0, "40805": 1117048320.0, "40810": 1117048320.0, "40815": 1117048320.0, "40820": 1117048320.0, "40825": 1117048320.0, "40830": 1117048320.0, "40835": 1117048320.0, "40840": 1117048320.0, "40845": 1117048320.0, "40850": 1117048320.0, "40855": 1117048320.0, "40860": 1117048320.0, "40865": 1117048320.0, "40870": 1117048320.0, "40875": 1117048320.0, "40880": 1117048320.0, "40885": 1117048320.0, "40890": 1117048320.0, "40895": 1117048320.0, "40900": 1117048320.0, "40905": 1117048320.0, "40910": 1117048320.0, "40915": 1117048320.0, "40920": 1117048320.0, "40925": 1117048320.0, "40930": 1117048320.0, "40935": 1117048320.0, "40940": 1117048320.0, "40945": 1117048320.0, "40950": 1117048320.0, "40955": 1117048320.0, "40960": 1117048320.0, "40965": 1117048320.0, "40970": 1117048320.0, "40975": 1117048320.0, "40980": 1117048320.0, "40985": 1117048320.0, "40990": 1117048320.0, "40995": 1117048320.0, "41000": 1117048320.0, "41005": 1117048320.0, "41010": 1117048320.0, "41015": 1117048320.0, "41020": 1117048320.0, "41025": 1117048320.0, "41030": 1117048320.0, "41035": 1117048320.0, "41040": 1117048320.0, "41045": 1117048320.0, "41050": 1117048320.0, "41055": 1117048320.0, "41060": 1117048320.0, "41065": 1117048320.0, "41070": 1117048320.0, "41075": 1117048320.0, "41080": 1117048320.0, "41085": 1117048320.0, "41090": 1117048320.0, "41095": 1117048320.0, "41100": 1117048320.0, "41105": 1117048320.0, "41110": 1117048320.0, "41115": 1117048320.0, "41120": 1117048320.0, "41125": 1117048320.0, "41130": 1117048320.0, "41135": 1117048320.0, "41140": 1117048320.0, "41145": 1117048320.0, "41150": 1117048320.0, "41155": 1117048320.0, "41160": 1117048320.0, "41165": 1117048320.0, "41170": 1117048320.0, "41175": 1117048320.0, "41180": 1117048320.0, "41185": 1117048320.0, "41190": 1117048320.0, "41195": 1117048320.0, "41200": 1117048320.0, "41205": 1117048320.0, "41210": 1117048320.0, "41215": 1117048320.0, "41220": 1117048320.0, "41225": 1117048320.0, "41230": 1117048320.0, "41235": 1117048320.0, "41240": 1117048320.0, "41245": 1117048320.0, "41250": 1117048320.0, "41255": 1117048320.0, "41260": 1117048320.0, "41265": 1117048320.0, "41270": 1117048320.0, "41275": 1117048320.0, "41280": 1117048320.0, "41285": 1117048320.0, "41290": 1117048320.0, "41295": 1117048320.0, "41300": 1117048320.0, "41305": 1117048320.0, "41310": 1117048320.0, "41315": 1117048320.0, "41320": 1117048320.0, "41325": 1117048320.0, "41330": 1117048320.0, "41335": 1117048320.0, "41340": 1117048320.0, "41345": 1117048320.0, "41350": 1117048320.0, "41355": 1117048320.0, "41360": 1117048320.0, "41365": 1117048320.0, "41370": 1117048320.0, "41375": 1117048320.0, "41380": 1117048320.0, "41385": 1117048320.0, "41390": 1117048320.0, "41395": 1117048320.0, "41400": 1117048320.0, "41405": 1117048320.0, "41410": 1117048320.0, "41415": 1117048320.0, "41420": 1117048320.0, "41425": 1117048320.0, "41430": 1117048320.0, "41435": 1117048320.0, "41440": 1117048320.0, "41445": 1117048320.0, "41450": 1117048320.0, "41455": 1117048320.0, "41460": 1117048320.0, "41465": 1117048320.0, "41470": 1117048320.0, "41475": 1117048320.0, "41480": 1117048320.0, "41485": 1117048320.0, "41490": 1117048320.0, "41495": 1117048320.0, "41500": 1117048320.0, "41505": 1117048320.0, "41510": 1117048320.0, "41515": 1117048320.0, "41520": 1117048320.0, "41525": 1117048320.0, "41530": 1117048320.0, "41535": 1117048320.0, "41540": 1117048320.0, "41545": 1117048320.0, "41550": 1117048320.0, "41555": 1117048320.0, "41560": 1117048320.0, "41565": 1117048320.0, "41570": 1117048320.0, "41575": 1117048320.0, "41580": 1117048320.0, "41585": 1117048320.0, "41590": 1117048320.0, "41595": 1117048320.0, "41600": 1117048320.0, "41605": 1117048320.0, "41610": 1117048320.0, "41615": 1117048320.0, "41620": 1117048320.0, "41625": 1117048320.0, "41630": 1117048320.0, "41635": 1117048320.0, "41640": 1117048320.0, "41645": 1117048320.0, "41650": 1117048320.0, "41655": 1117048320.0, "41660": 1117048320.0, "41665": 1117048320.0, "41670": 1117048320.0, "41675": 1117048320.0, "41680": 1117048320.0, "41685": 1117048320.0, "41690": 1117048320.0, "41695": 1117048320.0, "41700": 1117048320.0, "41705": 1117048320.0, "41710": 1117048320.0, "41715": 1117048320.0, "41720": 1117048320.0, "41725": 1117048320.0, "41730": 1117048320.0, "41735": 1117048320.0, "41740": 1117048320.0, "41745": 1117048320.0, "41750": 1117048320.0, "41755": 1117048320.0, "41760": 1117048320.0, "41765": 1117048320.0, "41770": 1117048320.0, "41775": 1117048320.0, "41780": 1117048320.0, "41785": 1117048320.0, "41790": 1117048320.0, "41795": 1117048320.0, "41800": 1117048320.0, "41805": 1117048320.0, "41810": 1117048320.0, "41815": 1117048320.0, "41820": 1117048320.0, "41825": 1117048320.0, "41830": 1117048320.0, "41835": 1117048320.0, "41840": 1117048320.0, "41845": 1117048320.0, "41850": 1117048320.0, "41855": 1117048320.0, "41860": 1117048320.0, "41865": 1117048320.0, "41870": 1117048320.0, "41875": 1117048320.0, "41880": 1117048320.0, "41885": 1117048320.0, "41890": 1117048320.0, "41895": 1117048320.0, "41900": 1117048320.0, "41905": 1117048320.0, "41910": 1117048320.0, "41915": 1117048320.0, "41920": 1117048320.0, "41925": 1117048320.0, "41930": 1117048320.0, "41935": 1117048320.0, "41940": 1117048320.0, "41945": 1117048320.0, "41950": 1117048320.0, "41955": 1117048320.0, "41960": 1117048320.0, "41965": 1117048320.0, "41970": 1117048320.0, "41975": 1117048320.0, "41980": 1117048320.0, "41985": 1117048320.0, "41990": 1117048320.0, "41995": 1117048320.0, "42000": 1117048320.0, "42005": 1117048320.0, "42010": 1117048320.0, "42015": 1117048320.0, "42020": 1117048320.0, "42025": 1117048320.0, "42030": 1117048320.0, "42035": 1117048320.0, "42040": 1117048320.0, "42045": 1117048320.0, "42050": 1117048320.0, "42055": 1117048320.0, "42060": 1117048320.0, "42065": 1117048320.0, "42070": 1117048320.0, "42075": 1117048320.0, "42080": 1117048320.0, "42085": 1117048320.0, "42090": 1117048320.0, "42095": 1117048320.0, "42100": 1117048320.0, "42105": 1117048320.0, "42110": 1117048320.0, "42115": 1117048320.0, "42120": 1117048320.0, "42125": 1117048320.0, "42130": 1117048320.0, "42135": 1117048320.0, "42140": 1117048320.0, "42145": 1117048320.0, "42150": 1117048320.0, "42155": 1117048320.0, "42160": 1117048320.0, "42165": 1117048320.0, "42170": 1117048320.0, "42175": 1117048320.0, "42180": 1117048320.0, "42185": 1117048320.0, "42190": 1117048320.0, "42195": 1117048320.0, "42200": 1117048320.0, "42205": 1117048320.0, "42210": 1117048320.0, "42215": 1117048320.0, "42220": 1117048320.0, "42225": 1117048320.0, "42230": 1117048320.0, "42235": 1117048320.0, "42240": 1117048320.0, "42245": 1117048320.0, "42250": 1117048320.0, "42255": 1117048320.0, "42260": 1117048320.0, "42265": 1117048320.0, "42270": 1117048320.0, "42275": 1117048320.0, "42280": 1117048320.0, "42285": 1117048320.0, "42290": 1117048320.0, "42295": 1117048320.0, "42300": 1117048320.0, "42305": 1117048320.0, "42310": 1117048320.0, "42315": 1117048320.0, "42320": 1117048320.0, "42325": 1117048320.0, "42330": 1117048320.0, "42335": 1117048320.0, "42340": 1117048320.0, "42345": 1117048320.0, "42350": 1117048320.0, "42355": 1117048320.0, "42360": 1117048320.0, "42365": 1117048320.0, "42370": 1117048320.0, "42375": 1117048320.0, "42380": 1117048320.0, "42385": 1117048320.0, "42390": 1117048320.0, "42395": 1117048320.0, "42400": 1117048320.0, "42405": 1117048320.0, "42410": 1117048320.0, "42415": 1117048320.0, "42420": 1117048320.0, "42425": 1117048320.0, "42430": 1117048320.0, "42435": 1117048320.0, "42440": 1117048320.0, "42445": 1117048320.0, "42450": 1117048320.0, "42455": 1117048320.0, "42460": 1117048320.0, "42465": 1117048320.0, "42470": 1117048320.0, "42475": 1117048320.0, "42480": 1117048320.0, "42485": 1117048320.0, "42490": 1117048320.0, "42495": 1117048320.0, "42500": 1117048320.0, "42505": 1117048320.0, "42510": 1117048320.0, "42515": 1117048320.0, "42520": 1117048320.0, "42525": 1117048320.0, "42530": 1117048320.0, "42535": 1117048320.0, "42540": 1117048320.0, "42545": 1117048320.0, "42550": 1117048320.0, "42555": 1117048320.0, "42560": 1117048320.0, "42565": 1117048320.0, "42570": 1117048320.0, "42575": 1117048320.0, "42580": 1117048320.0, "42585": 1117048320.0, "42590": 1117048320.0, "42595": 1117048320.0, "42600": 1117048320.0, "42605": 1117048320.0, "42610": 1117048320.0, "42615": 1117048320.0, "42620": 1117048320.0, "42625": 1117048320.0, "42630": 1117048320.0, "42635": 1117048320.0, "42640": 1117048320.0, "42645": 1117048320.0, "42650": 1117048320.0, "42655": 1117048320.0, "42660": 1117048320.0, "42665": 1117048320.0, "42670": 1117048320.0, "42675": 1117048320.0, "42680": 1117048320.0, "42685": 1117048320.0, "42690": 1117048320.0, "42695": 1117048320.0, "42700": 1117048320.0, "42705": 1117048320.0, "42710": 1117048320.0, "42715": 1117048320.0, "42720": 1117048320.0, "42725": 1117048320.0, "42730": 1117048320.0, "42735": 1117048320.0, "42740": 1117048320.0, "42745": 1117048320.0, "42750": 1117048320.0, "42755": 1117048320.0, "42760": 1117048320.0, "42765": 1117048320.0, "42770": 1117048320.0, "42775": 1117048320.0, "42780": 1117048320.0, "42785": 1117048320.0, "42790": 1117048320.0, "42795": 1117048320.0, "42800": 1117048320.0, "42805": 1117048320.0, "42810": 1117048320.0, "42815": 1117048320.0, "42820": 1117048320.0, "42825": 1117048320.0, "42830": 1117048320.0, "42835": 1117048320.0, "42840": 1117048320.0, "42845": 1117048320.0, "42850": 1117048320.0, "42855": 1117048320.0, "42860": 1117048320.0, "42865": 1117048320.0, "42870": 1117048320.0, "42875": 1117048320.0, "42880": 1117048320.0, "42885": 1117048320.0, "42890": 1117048320.0, "42895": 1117048320.0, "42900": 1117048320.0, "42905": 1117048320.0, "42910": 1117048320.0, "42915": 1117048320.0, "42920": 1117048320.0, "42925": 1117048320.0, "42930": 1117048320.0, "42935": 1117048320.0, "42940": 1117048320.0, "42945": 1117048320.0, "42950": 1117048320.0, "42955": 1117048320.0, "42960": 1117048320.0, "42965": 1117048320.0, "42970": 1117048320.0, "42975": 1117048320.0, "42980": 1117048320.0, "42985": 1117048320.0, "42990": 1117048320.0, "42995": 1117048320.0, "43000": 1117048320.0, "43005": 1117048320.0, "43010": 1117048320.0, "43015": 1117048320.0, "43020": 1117048320.0, "43025": 1117048320.0, "43030": 1117048320.0, "43035": 1117048320.0, "43040": 1117048320.0, "43045": 1117048320.0, "43050": 1117048320.0, "43055": 1117048320.0, "43060": 1117048320.0, "43065": 1117048320.0, "43070": 1117048320.0, "43075": 1117048320.0, "43080": 1117048320.0, "43085": 1117048320.0, "43090": 1117048320.0, "43095": 1117048320.0, "43100": 1117048320.0, "43105": 1117048320.0, "43110": 1117048320.0, "43115": 1117048320.0, "43120": 1117048320.0, "43125": 1117048320.0, "43130": 1117048320.0, "43135": 1117048320.0, "43140": 1117048320.0, "43145": 1117048320.0, "43150": 1117048320.0, "43155": 1117048320.0, "43160": 1117048320.0, "43165": 1117048320.0, "43170": 1117048320.0, "43175": 1117048320.0, "43180": 1117048320.0, "43185": 1117048320.0, "43190": 1117048320.0, "43195": 1117048320.0, "43200": 1117048320.0, "43205": 1117048320.0, "43210": 1117048320.0, "43215": 1117048320.0, "43220": 1117048320.0, "43225": 1117048320.0, "43230": 1117048320.0, "43235": 1117048320.0, "43240": 1117048320.0, "43245": 1117048320.0, "43250": 1117048320.0, "43255": 1117048320.0, "43260": 1117048320.0, "43265": 1117048320.0, "43270": 1117048320.0, "43275": 1117048320.0, "43280": 1117048320.0, "43285": 1117048320.0, "43290": 1117048320.0, "43295": 1117048320.0, "43300": 1117048320.0, "43305": 1117048320.0, "43310": 1117048320.0, "43315": 1117048320.0, "43320": 1117048320.0, "43325": 1117048320.0, "43330": 1117048320.0, "43335": 1117048320.0, "43340": 1117048320.0, "43345": 1117048320.0, "43350": 1117048320.0, "43355": 1117048320.0, "43360": 1117048320.0, "43365": 1117048320.0, "43370": 1117048320.0, "43375": 1117048320.0, "43380": 1117048320.0, "43385": 1117048320.0, "43390": 1117048320.0, "43395": 1117048320.0, "43400": 1117048320.0, "43405": 1117048320.0, "43410": 1117048320.0, "43415": 1117048320.0, "43420": 1117048320.0, "43425": 1117048320.0, "43430": 1117048320.0, "43435": 1117048320.0, "43440": 1117048320.0, "43445": 1117048320.0, "43450": 1117048320.0, "43455": 1117048320.0, "43460": 1117048320.0, "43465": 1117048320.0, "43470": 1117048320.0, "43475": 1117048320.0, "43480": 1117048320.0, "43485": 1117048320.0, "43490": 1117048320.0, "43495": 1117048320.0, "43500": 1117048320.0, "43505": 1117048320.0, "43510": 1117048320.0, "43515": 1117048320.0, "43520": 1117048320.0, "43525": 1117048320.0, "43530": 1117048320.0, "43535": 1117048320.0, "43540": 1117048320.0, "43545": 1117048320.0, "43550": 1117048320.0, "43555": 1117048320.0, "43560": 1117048320.0, "43565": 1117048320.0, "43570": 1117048320.0, "43575": 1117048320.0, "43580": 1117048320.0, "43585": 1117048320.0, "43590": 1117048320.0, "43595": 1117048320.0, "43600": 1117048320.0, "43605": 1117048320.0, "43610": 1117048320.0, "43615": 1117048320.0, "43620": 1117048320.0, "43625": 1117048320.0, "43630": 1117048320.0, "43635": 1117048320.0, "43640": 1117048320.0, "43645": 1117048320.0, "43650": 1117048320.0, "43655": 1117048320.0, "43660": 1117048320.0, "43665": 1117048320.0, "43670": 1117048320.0, "43675": 1117048320.0, "43680": 1117048320.0, "43685": 1117048320.0, "43690": 1117048320.0, "43695": 1117048320.0, "43700": 1117048320.0, "43705": 1117048320.0, "43710": 1117048320.0, "43715": 1117048320.0, "43720": 1117048320.0, "43725": 1117048320.0, "43730": 1117048320.0, "43735": 1117048320.0, "43740": 1117048320.0, "43745": 1117048320.0, "43750": 1117048320.0, "43755": 1117048320.0, "43760": 1117048320.0, "43765": 1117048320.0, "43770": 1117048320.0, "43775": 1117048320.0, "43780": 1117048320.0, "43785": 1117048320.0, "43790": 1117048320.0, "43795": 1117048320.0, "43800": 1117048320.0, "43805": 1117048320.0, "43810": 1117048320.0, "43815": 1117048320.0, "43820": 1117048320.0, "43825": 1117048320.0, "43830": 1117048320.0, "43835": 1117048320.0, "43840": 1117048320.0, "43845": 1117048320.0, "43850": 1117048320.0, "43855": 1117048320.0, "43860": 1117048320.0, "43865": 1117048320.0, "43870": 1117048320.0, "43875": 1117048320.0, "43880": 1117048320.0, "43885": 1117048320.0, "43890": 1117048320.0, "43895": 1117048320.0, "43900": 1117048320.0, "43905": 1117048320.0, "43910": 1117048320.0, "43915": 1117048320.0, "43920": 1117048320.0, "43925": 1117048320.0, "43930": 1117048320.0, "43935": 1117048320.0, "43940": 1117048320.0, "43945": 1117048320.0, "43950": 1117048320.0, "43955": 1117048320.0, "43960": 1117048320.0, "43965": 1117048320.0, "43970": 1117048320.0, "43975": 1117048320.0, "43980": 1117048320.0, "43985": 1117048320.0, "43990": 1117048320.0, "43995": 1117048320.0, "44000": 1117048320.0, "44005": 1117048320.0, "44010": 1117048320.0, "44015": 1117048320.0, "44020": 1117048320.0, "44025": 1117048320.0, "44030": 1117048320.0, "44035": 1117048320.0, "44040": 1117048320.0, "44045": 1117048320.0, "44050": 1117048320.0, "44055": 1117048320.0, "44060": 1117048320.0, "44065": 1117048320.0, "44070": 1117048320.0, "44075": 1117048320.0, "44080": 1117048320.0, "44085": 1117048320.0, "44090": 1117048320.0, "44095": 1117048320.0, "44100": 1117048320.0, "44105": 1117048320.0, "44110": 1117048320.0, "44115": 1117048320.0, "44120": 1117048320.0, "44125": 1117048320.0, "44130": 1117048320.0, "44135": 1117048320.0, "44140": 1117048320.0, "44145": 1117048320.0, "44150": 1117048320.0, "44155": 1117048320.0, "44160": 1117048320.0, "44165": 1117048320.0, "44170": 1117048320.0, "44175": 1117048320.0, "44180": 1117048320.0, "44185": 1117048320.0, "44190": 1117048320.0, "44195": 1117048320.0, "44200": 1117048320.0, "44205": 1117048320.0, "44210": 1117048320.0, "44215": 1117048320.0, "44220": 1117048320.0, "44225": 1117048320.0, "44230": 1117048320.0, "44235": 1117048320.0, "44240": 1117048320.0, "44245": 1117048320.0, "44250": 1117048320.0, "44255": 1117048320.0, "44260": 1117048320.0, "44265": 1117048320.0, "44270": 1117048320.0, "44275": 1117048320.0, "44280": 1117048320.0, "44285": 1117048320.0, "44290": 1117048320.0, "44295": 1117048320.0, "44300": 1117048320.0, "44305": 1117048320.0, "44310": 1117048320.0, "44315": 1117048320.0, "44320": 1117048320.0, "44325": 1117048320.0, "44330": 1117048320.0, "44335": 1117048320.0, "44340": 1117048320.0, "44345": 1117048320.0, "44350": 1117048320.0, "44355": 1117048320.0, "44360": 1117048320.0, "44365": 1117048320.0, "44370": 1117048320.0, "44375": 1117048320.0, "44380": 1117048320.0, "44385": 1117048320.0, "44390": 1117048320.0, "44395": 1117048320.0, "44400": 1117048320.0, "44405": 1117048320.0, "44410": 1117048320.0, "44415": 1117048320.0, "44420": 1117048320.0, "44425": 1117048320.0, "44430": 1117048320.0, "44435": 1117048320.0, "44440": 1117048320.0, "44445": 1117048320.0, "44450": 1117048320.0, "44455": 1117048320.0, "44460": 1117048320.0, "44465": 1117048320.0, "44470": 1117048320.0, "44475": 1117048320.0, "44480": 1117048320.0, "44485": 1117048320.0, "44490": 1117048320.0, "44495": 1117048320.0, "44500": 1117048320.0, "44505": 1117048320.0, "44510": 1117048320.0, "44515": 1117048320.0, "44520": 1117048320.0, "44525": 1117048320.0, "44530": 1117048320.0, "44535": 1117048320.0, "44540": 1117048320.0, "44545": 1117048320.0, "44550": 1117048320.0, "44555": 1117048320.0, "44560": 1117048320.0, "44565": 1117048320.0, "44570": 1117048320.0, "44575": 1117048320.0, "44580": 1117048320.0, "44585": 1117048320.0, "44590": 1117048320.0, "44595": 1117048320.0, "44600": 1117048320.0, "44605": 1117048320.0, "44610": 1117048320.0, "44615": 1117048320.0, "44620": 1117048320.0, "44625": 1117048320.0, "44630": 1117048320.0, "44635": 1117048320.0, "44640": 1117048320.0, "44645": 1117048320.0, "44650": 1117048320.0, "44655": 1117048320.0, "44660": 1117048320.0, "44665": 1117048320.0, "44670": 1117048320.0, "44675": 1117048320.0, "44680": 1117048320.0, "44685": 1117048320.0, "44690": 1117048320.0, "44695": 1117048320.0, "44700": 1117048320.0, "44705": 1117048320.0, "44710": 1117048320.0, "44715": 1117048320.0, "44720": 1117048320.0, "44725": 1117048320.0, "44730": 1117048320.0, "44735": 1117048320.0, "44740": 1117048320.0, "44745": 1117048320.0, "44750": 1117048320.0, "44755": 1117048320.0, "44760": 1117048320.0, "44765": 1117048320.0, "44770": 1117048320.0, "44775": 1117048320.0, "44780": 1117048320.0, "44785": 1117048320.0, "44790": 1117048320.0, "44795": 1117048320.0, "44800": 1117048320.0, "44805": 1117048320.0, "44810": 1117048320.0, "44815": 1117048320.0, "44820": 1117048320.0, "44825": 1117048320.0, "44830": 1117048320.0, "44835": 1117048320.0, "44840": 1117048320.0, "44845": 1117048320.0, "44850": 1117048320.0, "44855": 1117048320.0, "44860": 1117048320.0, "44865": 1117048320.0, "44870": 1117048320.0, "44875": 1117048320.0, "44880": 1117048320.0, "44885": 1117048320.0, "44890": 1117048320.0, "44895": 1117048320.0, "44900": 1117048320.0, "44905": 1117048320.0, "44910": 1117048320.0, "44915": 1117048320.0, "44920": 1117048320.0, "44925": 1117048320.0, "44930": 1117048320.0, "44935": 1117048320.0, "44940": 1117048320.0, "44945": 1117048320.0, "44950": 1117048320.0, "44955": 1117048320.0, "44960": 1117048320.0, "44965": 1117048320.0, "44970": 1117048320.0, "44975": 1117048320.0, "44980": 1117048320.0, "44985": 1117048320.0, "44990": 1117048320.0, "44995": 1117048320.0, "45000": 1117048320.0, "45005": 1117048320.0, "45010": 1117048320.0, "45015": 1117048320.0, "45020": 1117048320.0, "45025": 1117048320.0, "45030": 1117048320.0, "45035": 1117048320.0, "45040": 1117048320.0, "45045": 1117048320.0, "45050": 1117048320.0, "45055": 1117048320.0, "45060": 1117048320.0, "45065": 1117048320.0, "45070": 1117048320.0, "45075": 1117048320.0, "45080": 1117048320.0, "45085": 1117048320.0, "45090": 1117048320.0, "45095": 1117048320.0, "45100": 1117048320.0, "45105": 1117048320.0, "45110": 1117048320.0, "45115": 1117048320.0, "45120": 1117048320.0, "45125": 1117048320.0, "45130": 1117048320.0, "45135": 1117048320.0, "45140": 1117048320.0, "45145": 1117048320.0, "45150": 1117048320.0, "45155": 1117048320.0, "45160": 1117048320.0, "45165": 1117048320.0, "45170": 1117048320.0, "45175": 1117048320.0, "45180": 1117048320.0, "45185": 1117048320.0, "45190": 1117048320.0, "45195": 1117048320.0, "45200": 1117048320.0, "45205": 1117048320.0, "45210": 1117048320.0, "45215": 1117048320.0, "45220": 1117048320.0, "45225": 1117048320.0, "45230": 1117048320.0, "45235": 1117048320.0, "45240": 1117048320.0, "45245": 1117048320.0, "45250": 1117048320.0, "45255": 1117048320.0, "45260": 1117048320.0, "45265": 1117048320.0, "45270": 1117048320.0, "45275": 1117048320.0, "45280": 1117048320.0, "45285": 1117048320.0, "45290": 1117048320.0, "45295": 1117048320.0, "45300": 1117048320.0, "45305": 1117048320.0, "45310": 1117048320.0, "45315": 1117048320.0, "45320": 1117048320.0, "45325": 1117048320.0, "45330": 1117048320.0, "45335": 1117048320.0, "45340": 1117048320.0, "45345": 1117048320.0, "45350": 1117048320.0, "45355": 1117048320.0, "45360": 1117048320.0, "45365": 1117048320.0, "45370": 1117048320.0, "45375": 1117048320.0, "45380": 1117048320.0, "45385": 1117048320.0, "45390": 1117048320.0, "45395": 1117048320.0, "45400": 1117048320.0, "45405": 1117048320.0, "45410": 1117048320.0, "45415": 1117048320.0, "45420": 1117048320.0, "45425": 1117048320.0, "45430": 1117048320.0, "45435": 1117048320.0, "45440": 1117048320.0, "45445": 1117048320.0, "45450": 1117048320.0, "45455": 1117048320.0, "45460": 1117048320.0, "45465": 1117048320.0, "45470": 1117048320.0, "45475": 1117048320.0, "45480": 1117048320.0, "45485": 1117048320.0, "45490": 1117048320.0, "45495": 1117048320.0, "45500": 1117048320.0, "45505": 1117048320.0, "45510": 1117048320.0, "45515": 1117048320.0, "45520": 1117048320.0, "45525": 1117048320.0, "45530": 1117048320.0, "45535": 1117048320.0, "45540": 1117048320.0, "45545": 1117048320.0, "45550": 1117048320.0, "45555": 1117048320.0, "45560": 1117048320.0, "45565": 1117048320.0, "45570": 1117048320.0, "45575": 1117048320.0, "45580": 1117048320.0, "45585": 1117048320.0, "45590": 1117048320.0, "45595": 1117048320.0, "45600": 1117048320.0, "45605": 1117048320.0, "45610": 1117048320.0, "45615": 1117048320.0, "45620": 1117048320.0, "45625": 1117048320.0, "45630": 1117048320.0, "45635": 1117048320.0, "45640": 1117048320.0, "45645": 1117048320.0, "45650": 1117048320.0, "45655": 1117048320.0, "45660": 1117048320.0, "45665": 1117048320.0, "45670": 1117048320.0, "45675": 1117048320.0, "45680": 1117048320.0, "45685": 1117048320.0, "45690": 1117048320.0, "45695": 1117048320.0, "45700": 1117048320.0, "45705": 1117048320.0, "45710": 1117048320.0, "45715": 1117048320.0, "45720": 1117048320.0, "45725": 1117048320.0, "45730": 1117048320.0, "45735": 1117048320.0, "45740": 1117048320.0, "45745": 1117048320.0, "45750": 1117048320.0, "45755": 1117048320.0, "45760": 1117048320.0, "45765": 1117048320.0, "45770": 1117048320.0, "45775": 1117048320.0, "45780": 1117048320.0, "45785": 1117048320.0, "45790": 1117048320.0, "45795": 1117048320.0, "45800": 1117048320.0, "45805": 1117048320.0, "45810": 1117048320.0, "45815": 1117048320.0, "45820": 1117048320.0, "45825": 1117048320.0, "45830": 1117048320.0, "45835": 1117048320.0, "45840": 1117048320.0, "45845": 1117048320.0, "45850": 1117048320.0, "45855": 1117048320.0, "45860": 1117048320.0, "45865": 1117048320.0, "45870": 1117048320.0, "45875": 1117048320.0, "45880": 1117048320.0, "45885": 1117048320.0, "45890": 1117048320.0, "45895": 1117048320.0, "45900": 1117048320.0, "45905": 1117048320.0, "45910": 1117048320.0, "45915": 1117048320.0, "45920": 1117048320.0, "45925": 1117048320.0, "45930": 1117048320.0, "45935": 1117048320.0, "45940": 1117048320.0, "45945": 1117048320.0, "45950": 1117048320.0, "45955": 1117048320.0, "45960": 1117048320.0, "45965": 1117048320.0, "45970": 1117048320.0, "45975": 1117048320.0, "45980": 1117048320.0, "45985": 1117048320.0, "45990": 1117048320.0, "45995": 1117048320.0, "46000": 1117048320.0, "46005": 1117048320.0, "46010": 1117048320.0, "46015": 1117048320.0, "46020": 1117048320.0, "46025": 1117048320.0, "46030": 1117048320.0, "46035": 1117048320.0, "46040": 1117048320.0, "46045": 1117048320.0, "46050": 1117048320.0, "46055": 1117048320.0, "46060": 1117048320.0, "46065": 1117048320.0, "46070": 1117048320.0, "46075": 1117048320.0, "46080": 1117048320.0, "46085": 1117048320.0, "46090": 1117048320.0, "46095": 1117048320.0, "46100": 1117048320.0, "46105": 1117048320.0, "46110": 1117048320.0, "46115": 1117048320.0, "46120": 1117048320.0, "46125": 1117048320.0, "46130": 1117048320.0, "46135": 1117048320.0, "46140": 1117048320.0, "46145": 1117048320.0, "46150": 1117048320.0, "46155": 1117048320.0, "46160": 1117048320.0, "46165": 1117048320.0, "46170": 1117048320.0, "46175": 1117048320.0, "46180": 1117048320.0, "46185": 1117048320.0, "46190": 1117048320.0, "46195": 1117048320.0, "46200": 1117048320.0, "46205": 1117048320.0, "46210": 1117048320.0, "46215": 1117048320.0, "46220": 1117048320.0, "46225": 1117048320.0, "46230": 1117048320.0, "46235": 1117048320.0, "46240": 1117048320.0, "46245": 1117048320.0, "46250": 1117048320.0, "46255": 1117048320.0, "46260": 1117048320.0, "46265": 1117048320.0, "46270": 1117048320.0, "46275": 1117048320.0, "46280": 1117048320.0, "46285": 1117048320.0, "46290": 1117048320.0, "46295": 1117048320.0, "46300": 1117048320.0, "46305": 1117048320.0, "46310": 1117048320.0, "46315": 1117048320.0, "46320": 1117048320.0, "46325": 1117048320.0, "46330": 1117048320.0, "46335": 1117048320.0, "46340": 1117048320.0, "46345": 1117048320.0, "46350": 1117048320.0, "46355": 1117048320.0, "46360": 1117048320.0, "46365": 1117048320.0, "46370": 1117048320.0, "46375": 1117048320.0, "46380": 1117048320.0, "46385": 1117048320.0, "46390": 1117048320.0, "46395": 1117048320.0, "46400": 1117048320.0, "46405": 1117048320.0, "46410": 1117048320.0, "46415": 1117048320.0, "46420": 1117048320.0, "46425": 1117048320.0, "46430": 1117048320.0, "46435": 1117048320.0, "46440": 1117048320.0, "46445": 1117048320.0, "46450": 1117048320.0, "46455": 1117048320.0, "46460": 1117048320.0, "46465": 1117048320.0, "46470": 1117048320.0, "46475": 1117048320.0, "46480": 1117048320.0, "46485": 1117048320.0, "46490": 1117048320.0, "46495": 1117048320.0, "46500": 1117048320.0, "46505": 1117048320.0, "46510": 1117048320.0, "46515": 1117048320.0, "46520": 1117048320.0, "46525": 1117048320.0, "46530": 1117048320.0, "46535": 1117048320.0, "46540": 1117048320.0, "46545": 1117048320.0, "46550": 1117048320.0, "46555": 1117048320.0, "46560": 1117048320.0, "46565": 1117048320.0, "46570": 1117048320.0, "46575": 1117048320.0, "46580": 1117048320.0, "46585": 1117048320.0, "46590": 1117048320.0, "46595": 1117048320.0, "46600": 1117048320.0, "46605": 1117048320.0, "46610": 1117048320.0, "46615": 1117048320.0, "46620": 1117048320.0, "46625": 1117048320.0, "46630": 1117048320.0, "46635": 1117048320.0, "46640": 1117048320.0, "46645": 1117048320.0, "46650": 1117048320.0, "46655": 1117048320.0, "46660": 1117048320.0, "46665": 1117048320.0, "46670": 1117048320.0, "46675": 1117048320.0, "46680": 1117048320.0, "46685": 1117048320.0, "46690": 1117048320.0, "46695": 1117048320.0, "46700": 1117048320.0, "46705": 1117048320.0, "46710": 1117048320.0, "46715": 1117048320.0, "46720": 1117048320.0, "46725": 1117048320.0, "46730": 1117048320.0, "46735": 1117048320.0, "46740": 1117048320.0, "46745": 1117048320.0, "46750": 1117048320.0, "46755": 1117048320.0, "46760": 1117048320.0, "46765": 1117048320.0, "46770": 1117048320.0, "46775": 1117048320.0, "46780": 1117048320.0, "46785": 1117048320.0, "46790": 1117048320.0, "46795": 1117048320.0, "46800": 1117048320.0, "46805": 1117048320.0, "46810": 1117048320.0, "46815": 1117048320.0, "46820": 1117048320.0, "46825": 1117048320.0, "46830": 1117048320.0, "46835": 1117048320.0, "46840": 1117048320.0, "46845": 1117048320.0, "46850": 1117048320.0, "46855": 1117048320.0, "46860": 1117048320.0, "46865": 1117048320.0, "46870": 1117048320.0, "46875": 1117048320.0, "46880": 1117048320.0, "46885": 1117048320.0, "46890": 1117048320.0, "46895": 1117048320.0, "46900": 1117048320.0, "46905": 1117048320.0, "46910": 1117048320.0, "46915": 1117048320.0, "46920": 1117048320.0, "46925": 1117048320.0, "46930": 1117048320.0, "46935": 1117048320.0, "46940": 1117048320.0, "46945": 1117048320.0, "46950": 1117048320.0, "46955": 1117048320.0, "46960": 1117048320.0, "46965": 1117048320.0, "46970": 1117048320.0, "46975": 1117048320.0, "46980": 1117048320.0, "46985": 1117048320.0, "46990": 1117048320.0, "46995": 1117048320.0, "47000": 1117048320.0, "47005": 1117048320.0, "47010": 1117048320.0, "47015": 1117048320.0, "47020": 1117048320.0, "47025": 1117048320.0, "47030": 1117048320.0, "47035": 1117048320.0, "47040": 1117048320.0, "47045": 1117048320.0, "47050": 1117048320.0, "47055": 1117048320.0, "47060": 1117048320.0, "47065": 1117048320.0, "47070": 1117048320.0, "47075": 1117048320.0, "47080": 1117048320.0, "47085": 1117048320.0, "47090": 1117048320.0, "47095": 1117048320.0, "47100": 1117048320.0, "47105": 1117048320.0, "47110": 1117048320.0, "47115": 1117048320.0, "47120": 1117048320.0, "47125": 1117048320.0, "47130": 1117048320.0, "47135": 1117048320.0, "47140": 1117048320.0, "47145": 1117048320.0, "47150": 1117048320.0, "47155": 1117048320.0, "47160": 1117048320.0, "47165": 1117048320.0, "47170": 1117048320.0, "47175": 1117048320.0, "47180": 1117048320.0, "47185": 1117048320.0, "47190": 1117048320.0, "47195": 1117048320.0, "47200": 1117048320.0, "47205": 1117048320.0, "47210": 1117048320.0, "47215": 1117048320.0, "47220": 1117048320.0, "47225": 1117048320.0, "47230": 1117048320.0, "47235": 1117048320.0, "47240": 1117048320.0, "47245": 1117048320.0, "47250": 1117048320.0, "47255": 1117048320.0, "47260": 1117048320.0, "47265": 1117048320.0, "47270": 1117048320.0, "47275": 1117048320.0, "47280": 1117048320.0, "47285": 1117048320.0, "47290": 1117048320.0, "47295": 1117048320.0, "47300": 1117048320.0, "47305": 1117048320.0, "47310": 1117048320.0, "47315": 1117048320.0, "47320": 1117048320.0, "47325": 1117048320.0, "47330": 1117048320.0, "47335": 1117048320.0, "47340": 1117048320.0, "47345": 1117048320.0, "47350": 1117048320.0, "47355": 1117048320.0, "47360": 1117048320.0, "47365": 1117048320.0, "47370": 1117048320.0, "47375": 1117048320.0, "47380": 1117048320.0, "47385": 1117048320.0, "47390": 1117048320.0, "47395": 1117048320.0, "47400": 1117048320.0, "47405": 1117048320.0, "47410": 1117048320.0, "47415": 1117048320.0, "47420": 1117048320.0, "47425": 1117048320.0, "47430": 1117048320.0, "47435": 1117048320.0, "47440": 1117048320.0, "47445": 1117048320.0, "47450": 1117048320.0, "47455": 1117048320.0, "47460": 1117048320.0, "47465": 1117048320.0, "47470": 1117048320.0, "47475": 1117048320.0, "47480": 1117048320.0, "47485": 1117048320.0, "47490": 1117048320.0, "47495": 1117048320.0, "47500": 1117048320.0, "47505": 1117048320.0, "47510": 1117048320.0, "47515": 1117048320.0, "47520": 1117048320.0, "47525": 1117048320.0, "47530": 1117048320.0, "47535": 1117048320.0, "47540": 1117048320.0, "47545": 1117048320.0, "47550": 1117048320.0, "47555": 1117048320.0, "47560": 1117048320.0, "47565": 1117048320.0, "47570": 1117048320.0, "47575": 1117048320.0, "47580": 1117048320.0, "47585": 1117048320.0, "47590": 1117048320.0, "47595": 1117048320.0, "47600": 1117048320.0, "47605": 1117048320.0, "47610": 1117048320.0, "47615": 1117048320.0, "47620": 1117048320.0, "47625": 1117048320.0, "47630": 1117048320.0, "47635": 1117048320.0, "47640": 1117048320.0, "47645": 1117048320.0, "47650": 1117048320.0, "47655": 1117048320.0, "47660": 1117048320.0, "47665": 1117048320.0, "47670": 1117048320.0, "47675": 1117048320.0, "47680": 1117048320.0, "47685": 1117048320.0, "47690": 1117048320.0, "47695": 1117048320.0, "47700": 1117048320.0, "47705": 1117048320.0, "47710": 1117048320.0, "47715": 1117048320.0, "47720": 1117048320.0, "47725": 1117048320.0, "47730": 1117048320.0, "47735": 1117048320.0, "47740": 1117048320.0, "47745": 1117048320.0, "47750": 1117048320.0, "47755": 1117048320.0, "47760": 1117048320.0, "47765": 1117048320.0, "47770": 1117048320.0, "47775": 1117048320.0, "47780": 1117048320.0, "47785": 1117048320.0, "47790": 1117048320.0, "47795": 1117048320.0, "47800": 1117048320.0, "47805": 1117048320.0, "47810": 1117048320.0, "47815": 1117048320.0, "47820": 1117048320.0, "47825": 1117048320.0, "47830": 1117048320.0, "47835": 1117048320.0, "47840": 1117048320.0, "47845": 1117048320.0, "47850": 1117048320.0, "47855": 1117048320.0, "47860": 1117048320.0, "47865": 1117048320.0, "47870": 1117048320.0, "47875": 1117048320.0, "47880": 1117048320.0, "47885": 1117048320.0, "47890": 1117048320.0, "47895": 1117048320.0, "47900": 1117048320.0, "47905": 1117048320.0, "47910": 1117048320.0, "47915": 1117048320.0, "47920": 1117048320.0, "47925": 1117048320.0, "47930": 1117048320.0, "47935": 1117048320.0, "47940": 1117048320.0, "47945": 1117048320.0, "47950": 1117048320.0, "47955": 1117048320.0, "47960": 1117048320.0, "47965": 1117048320.0, "47970": 1117048320.0, "47975": 1117048320.0, "47980": 1117048320.0, "47985": 1117048320.0, "47990": 1117048320.0, "47995": 1117048320.0, "48000": 1117048320.0, "48005": 1117048320.0, "48010": 1117048320.0, "48015": 1117048320.0, "48020": 1117048320.0, "48025": 1117048320.0, "48030": 1117048320.0, "48035": 1117048320.0, "48040": 1117048320.0, "48045": 1117048320.0, "48050": 1117048320.0, "48055": 1117048320.0, "48060": 1117048320.0, "48065": 1117048320.0, "48070": 1117048320.0, "48075": 1117048320.0, "48080": 1117048320.0, "48085": 1117048320.0, "48090": 1117048320.0, "48095": 1117048320.0, "48100": 1117048320.0, "48105": 1117048320.0, "48110": 1117048320.0, "48115": 1117048320.0, "48120": 1117048320.0, "48125": 1117048320.0, "48130": 1117048320.0, "48135": 1117048320.0, "48140": 1117048320.0, "48145": 1117048320.0, "48150": 1117048320.0, "48155": 1117048320.0, "48160": 1117048320.0, "48165": 1117048320.0, "48170": 1117048320.0, "48175": 1117048320.0, "48180": 1117048320.0, "48185": 1117048320.0, "48190": 1117048320.0, "48195": 1117048320.0, "48200": 1117048320.0, "48205": 1117048320.0, "48210": 1117048320.0, "48215": 1117048320.0, "48220": 1117048320.0, "48225": 1117048320.0, "48230": 1117048320.0, "48235": 1117048320.0, "48240": 1117048320.0, "48245": 1117048320.0, "48250": 1117048320.0, "48255": 1117048320.0, "48260": 1117048320.0, "48265": 1117048320.0, "48270": 1117048320.0, "48275": 1117048320.0, "48280": 1117048320.0, "48285": 1117048320.0, "48290": 1117048320.0, "48295": 1117048320.0, "48300": 1117048320.0, "48305": 1117048320.0, "48310": 1117048320.0, "48315": 1117048320.0, "48320": 1117048320.0, "48325": 1117048320.0, "48330": 1117048320.0, "48335": 1117048320.0, "48340": 1117048320.0, "48345": 1117048320.0, "48350": 1117048320.0, "48355": 1117048320.0, "48360": 1117048320.0, "48365": 1117048320.0, "48370": 1117048320.0, "48375": 1117048320.0, "48380": 1117048320.0, "48385": 1117048320.0, "48390": 1117048320.0, "48395": 1117048320.0, "48400": 1117048320.0, "48405": 1117048320.0, "48410": 1117048320.0, "48415": 1117048320.0, "48420": 1117048320.0, "48425": 1117048320.0, "48430": 1117048320.0, "48435": 1117048320.0, "48440": 1117048320.0, "48445": 1117048320.0, "48450": 1117048320.0, "48455": 1117048320.0, "48460": 1117048320.0, "48465": 1117048320.0, "48470": 1117048320.0, "48475": 1117048320.0, "48480": 1117048320.0, "48485": 1117048320.0, "48490": 1117048320.0, "48495": 1117048320.0, "48500": 1117048320.0, "48505": 1117048320.0, "48510": 1117048320.0, "48515": 1117048320.0, "48520": 1117048320.0, "48525": 1117048320.0, "48530": 1117048320.0, "48535": 1117048320.0, "48540": 1117048320.0, "48545": 1117048320.0, "48550": 1117048320.0, "48555": 1117048320.0, "48560": 1117048320.0, "48565": 1117048320.0, "48570": 1117048320.0, "48575": 1117048320.0, "48580": 1117048320.0, "48585": 1117048320.0, "48590": 1117048320.0, "48595": 1117048320.0, "48600": 1117048320.0, "48605": 1117048320.0, "48610": 1117048320.0, "48615": 1117048320.0, "48620": 1117048320.0, "48625": 1117048320.0, "48630": 1117048320.0, "48635": 1117048320.0, "48640": 1117048320.0, "48645": 1117048320.0, "48650": 1117048320.0, "48655": 1117048320.0, "48660": 1117048320.0, "48665": 1117048320.0, "48670": 1117048320.0, "48675": 1117048320.0, "48680": 1117048320.0, "48685": 1117048320.0, "48690": 1117048320.0, "48695": 1117048320.0, "48700": 1117048320.0, "48705": 1117048320.0, "48710": 1117048320.0, "48715": 1117048320.0, "48720": 1117048320.0, "48725": 1117048320.0, "48730": 1117048320.0, "48735": 1117048320.0, "48740": 1117048320.0, "48745": 1117048320.0, "48750": 1117048320.0, "48755": 1117048320.0, "48760": 1117048320.0, "48765": 1117048320.0, "48770": 1117048320.0, "48775": 1117048320.0, "48780": 1117048320.0, "48785": 1117048320.0, "48790": 1117048320.0, "48795": 1117048320.0, "48800": 1117048320.0, "48805": 1117048320.0, "48810": 1117048320.0, "48815": 1117048320.0, "48820": 1117048320.0, "48825": 1117048320.0, "48830": 1117048320.0, "48835": 1117048320.0, "48840": 1117048320.0, "48845": 1117048320.0, "48850": 1117048320.0, "48855": 1117048320.0, "48860": 1117048320.0, "48865": 1117048320.0, "48870": 1117048320.0, "48875": 1117048320.0, "48880": 1117048320.0, "48885": 1117048320.0, "48890": 1117048320.0, "48895": 1117048320.0, "48900": 1117048320.0, "48905": 1117048320.0, "48910": 1117048320.0, "48915": 1117048320.0, "48920": 1117048320.0, "48925": 1117048320.0, "48930": 1117048320.0, "48935": 1117048320.0, "48940": 1117048320.0, "48945": 1117048320.0, "48950": 1117048320.0, "48955": 1117048320.0, "48960": 1117048320.0, "48965": 1117048320.0, "48970": 1117048320.0, "48975": 1117048320.0, "48980": 1117048320.0, "48985": 1117048320.0, "48990": 1117048320.0, "48995": 1117048320.0, "49000": 1117048320.0, "49005": 1117048320.0, "49010": 1117048320.0, "49015": 1117048320.0, "49020": 1117048320.0, "49025": 1117048320.0, "49030": 1117048320.0, "49035": 1117048320.0, "49040": 1117048320.0, "49045": 1117048320.0, "49050": 1117048320.0, "49055": 1117048320.0, "49060": 1117048320.0, "49065": 1117048320.0, "49070": 1117048320.0, "49075": 1117048320.0, "49080": 1117048320.0, "49085": 1117048320.0, "49090": 1117048320.0, "49095": 1117048320.0, "49100": 1117048320.0, "49105": 1117048320.0, "49110": 1117048320.0, "49115": 1117048320.0, "49120": 1117048320.0, "49125": 1117048320.0, "49130": 1117048320.0, "49135": 1117048320.0, "49140": 1117048320.0, "49145": 1117048320.0, "49150": 1117048320.0, "49155": 1117048320.0, "49160": 1117048320.0, "49165": 1117048320.0, "49170": 1117048320.0, "49175": 1117048320.0, "49180": 1117048320.0, "49185": 1117048320.0, "49190": 1117048320.0, "49195": 1117048320.0, "49200": 1117048320.0, "49205": 1117048320.0, "49210": 1117048320.0, "49215": 1117048320.0, "49220": 1117048320.0, "49225": 1117048320.0, "49230": 1117048320.0, "49235": 1117048320.0, "49240": 1117048320.0, "49245": 1117048320.0, "49250": 1117048320.0, "49255": 1117048320.0, "49260": 1117048320.0, "49265": 1117048320.0, "49270": 1117048320.0, "49275": 1117048320.0, "49280": 1117048320.0, "49285": 1117048320.0, "49290": 1117048320.0, "49295": 1117048320.0, "49300": 1117048320.0, "49305": 1117048320.0, "49310": 1117048320.0, "49315": 1117048320.0, "49320": 1117048320.0, "49325": 1117048320.0, "49330": 1117048320.0, "49335": 1117048320.0, "49340": 1117048320.0, "49345": 1117048320.0, "49350": 1117048320.0, "49355": 1117048320.0, "49360": 1117048320.0, "49365": 1117048320.0, "49370": 1117048320.0, "49375": 1117048320.0, "49380": 1117048320.0, "49385": 1117048320.0, "49390": 1117048320.0, "49395": 1117048320.0, "49400": 1117048320.0, "49405": 1117048320.0, "49410": 1117048320.0, "49415": 1117048320.0, "49420": 1117048320.0, "49425": 1117048320.0, "49430": 1117048320.0, "49435": 1117048320.0, "49440": 1117048320.0, "49445": 1117048320.0, "49450": 1117048320.0, "49455": 1117048320.0, "49460": 1117048320.0, "49465": 1117048320.0, "49470": 1117048320.0, "49475": 1117048320.0, "49480": 1117048320.0, "49485": 1117048320.0, "49490": 1117048320.0, "49495": 1117048320.0, "49500": 1117048320.0, "49505": 1117048320.0, "49510": 1117048320.0, "49515": 1117048320.0, "49520": 1117048320.0, "49525": 1117048320.0, "49530": 1117048320.0, "49535": 1117048320.0, "49540": 1117048320.0, "49545": 1117048320.0, "49550": 1117048320.0, "49555": 1117048320.0, "49560": 1117048320.0, "49565": 1117048320.0, "49570": 1117048320.0, "49575": 1117048320.0, "49580": 1117048320.0, "49585": 1117048320.0, "49590": 1117048320.0, "49595": 1117048320.0, "49600": 1117048320.0, "49605": 1117048320.0, "49610": 1117048320.0, "49615": 1117048320.0, "49620": 1117048320.0, "49625": 1117048320.0, "49630": 1117048320.0, "49635": 1117048320.0, "49640": 1117048320.0, "49645": 1117048320.0, "49650": 1117048320.0, "49655": 1117048320.0, "49660": 1117048320.0, "49665": 1117048320.0, "49670": 1117048320.0, "49675": 1117048320.0, "49680": 1117048320.0, "49685": 1117048320.0, "49690": 1117048320.0, "49695": 1117048320.0, "49700": 1117048320.0, "49705": 1117048320.0, "49710": 1117048320.0, "49715": 1117048320.0, "49720": 1117048320.0, "49725": 1117048320.0, "49730": 1117048320.0, "49735": 1117048320.0, "49740": 1117048320.0, "49745": 1117048320.0, "49750": 1117048320.0, "49755": 1117048320.0, "49760": 1117048320.0, "49765": 1117048320.0, "49770": 1117048320.0, "49775": 1117048320.0, "49780": 1117048320.0, "49785": 1117048320.0, "49790": 1117048320.0, "49795": 1117048320.0, "49800": 1117048320.0, "49805": 1117048320.0, "49810": 1117048320.0, "49815": 1117048320.0, "49820": 1117048320.0, "49825": 1117048320.0, "49830": 1117048320.0, "49835": 1117048320.0, "49840": 1117048320.0, "49845": 1117048320.0, "49850": 1117048320.0, "49855": 1117048320.0, "49860": 1117048320.0, "49865": 1117048320.0, "49870": 1117048320.0, "49875": 1117048320.0, "49880": 1117048320.0, "49885": 1117048320.0, "49890": 1117048320.0, "49895": 1117048320.0, "49900": 1117048320.0, "49905": 1117048320.0, "49910": 1117048320.0, "49915": 1117048320.0, "49920": 1117048320.0, "49925": 1117048320.0, "49930": 1117048320.0, "49935": 1117048320.0, "49940": 1117048320.0, "49945": 1117048320.0, "49950": 1117048320.0, "49955": 1117048320.0, "49960": 1117048320.0, "49965": 1117048320.0, "49970": 1117048320.0, "49975": 1117048320.0, "49980": 1117048320.0, "49985": 1117048320.0, "49990": 1117048320.0, "49995": 1117048320.0, "50000": 1117048320.0, "50005": 1117048320.0, "50010": 1117048320.0, "50015": 1117048320.0, "50020": 1117048320.0, "50025": 1117048320.0, "50030": 1117048320.0, "50035": 1117048320.0, "50040": 1117048320.0, "50045": 1117048320.0, "50050": 1117048320.0, "50055": 1117048320.0, "50060": 1117048320.0, "50065": 1117048320.0, "50070": 1117048320.0, "50075": 1117048320.0, "50080": 1117048320.0, "50085": 1117048320.0, "50090": 1117048320.0, "50095": 1117048320.0, "50100": 1117048320.0, "50105": 1117048320.0, "50110": 1117048320.0, "50115": 1117048320.0, "50120": 1117048320.0, "50125": 1117048320.0, "50130": 1117048320.0, "50135": 1117048320.0, "50140": 1117048320.0, "50145": 1117048320.0, "50150": 1117048320.0, "50155": 1117048320.0, "50160": 1117048320.0, "50165": 1117048320.0, "50170": 1117048320.0, "50175": 1117048320.0, "50180": 1117048320.0, "50185": 1117048320.0, "50190": 1117048320.0, "50195": 1117048320.0, "50200": 1117048320.0, "50205": 1117048320.0, "50210": 1117048320.0, "50215": 1117048320.0, "50220": 1117048320.0, "50225": 1117048320.0, "50230": 1117048320.0, "50235": 1117048320.0, "50240": 1117048320.0, "50245": 1117048320.0, "50250": 1117048320.0, "50255": 1117048320.0, "50260": 1117048320.0, "50265": 1117048320.0, "50270": 1117048320.0, "50275": 1117048320.0, "50280": 1117048320.0, "50285": 1117048320.0, "50290": 1117048320.0, "50295": 1117048320.0, "50300": 1117048320.0, "50305": 1117048320.0, "50310": 1117048320.0, "50315": 1117048320.0, "50320": 1117048320.0, "50325": 1117048320.0, "50330": 1117048320.0, "50335": 1117048320.0, "50340": 1117048320.0, "50345": 1117048320.0, "50350": 1117048320.0, "50355": 1117048320.0, "50360": 1117048320.0, "50365": 1117048320.0, "50370": 1117048320.0, "50375": 1117048320.0, "50380": 1117048320.0, "50385": 1117048320.0, "50390": 1117048320.0, "50395": 1117048320.0, "50400": 1117048320.0, "50405": 1117048320.0, "50410": 1117048320.0, "50415": 1117048320.0, "50420": 1117048320.0, "50425": 1117048320.0, "50430": 1117048320.0, "50435": 1117048320.0, "50440": 1117048320.0, "50445": 1117048320.0, "50450": 1117048320.0, "50455": 1117048320.0, "50460": 1117048320.0, "50465": 1117048320.0, "50470": 1117048320.0, "50475": 1117048320.0, "50480": 1117048320.0, "50485": 1117048320.0, "50490": 1117048320.0, "50495": 1117048320.0, "50500": 1117048320.0, "50505": 1117048320.0, "50510": 1117048320.0, "50515": 1117048320.0, "50520": 1117048320.0, "50525": 1117048320.0, "50530": 1117048320.0, "50535": 1117048320.0, "50540": 1117048320.0, "50545": 1117048320.0, "50550": 1117048320.0, "50555": 1117048320.0, "50560": 1117048320.0, "50565": 1117048320.0, "50570": 1117048320.0, "50575": 1117048320.0, "50580": 1117048320.0, "50585": 1117048320.0, "50590": 1117048320.0, "50595": 1117048320.0, "50600": 1117048320.0, "50605": 1117048320.0, "50610": 1117048320.0, "50615": 1117048320.0, "50620": 1117048320.0, "50625": 1117048320.0, "50630": 1117048320.0, "50635": 1117048320.0, "50640": 1117048320.0, "50645": 1117048320.0, "50650": 1117048320.0, "50655": 1117048320.0, "50660": 1117048320.0, "50665": 1117048320.0, "50670": 1117048320.0, "50675": 1117048320.0, "50680": 1117048320.0, "50685": 1117048320.0, "50690": 1117048320.0, "50695": 1117048320.0, "50700": 1117048320.0, "50705": 1117048320.0, "50710": 1117048320.0, "50715": 1117048320.0, "50720": 1117048320.0, "50725": 1117048320.0, "50730": 1117048320.0, "50735": 1117048320.0, "50740": 1117048320.0, "50745": 1117048320.0, "50750": 1117048320.0, "50755": 1117048320.0, "50760": 1117048320.0, "50765": 1117048320.0, "50770": 1117048320.0, "50775": 1117048320.0, "50780": 1117048320.0, "50785": 1117048320.0, "50790": 1117048320.0, "50795": 1117048320.0, "50800": 1117048320.0, "50805": 1117048320.0, "50810": 1117048320.0, "50815": 1117048320.0, "50820": 1117048320.0, "50825": 1117048320.0, "50830": 1117048320.0, "50835": 1117048320.0, "50840": 1117048320.0, "50845": 1117048320.0, "50850": 1117048320.0, "50855": 1117048320.0, "50860": 1117048320.0}}, "iteration-time": {"start_step": 1, "end_step": 50860, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": "nan", "25": "nan", "30": "nan", "35": "nan", "40": "nan", "45": "nan", "50": "nan", "55": "nan", "60": "nan", "65": "nan", "70": "nan", "75": "nan", "80": "nan", "85": "nan", "90": "nan", "95": "nan", "100": 0.45353, "105": "nan", "110": "nan", "115": "nan", "120": "nan", "125": "nan", "130": "nan", "135": "nan", "140": "nan", "145": "nan", "150": "nan", "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": 0.23215, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": 0.23419, "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": 0.23443, "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": 0.22917, "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": 0.23209, "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": 0.23677, "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": 0.23234, "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": 0.23488, "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": 0.22983, "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": 0.25297, "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": 0.24387, "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": 0.23937, "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": 0.2322, "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": 0.23242, "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": 0.23205, "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": 0.23299, "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": 0.23604, "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": 0.23043, "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": 0.23387, "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": 0.2415, "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": 0.24658, "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": 0.24527, "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": 0.23745, "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": 0.25435, "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": 0.23918, "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": 0.24163, "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": 0.25076, "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": 0.24498, "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": 0.2419, "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": 0.24626, "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": 0.25339, "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": 0.2482, "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": 0.25065, "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": 0.24493, "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": 0.2488, "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": 0.24153, "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": 0.2409, "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": 0.24482, "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": 0.23841, "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": 0.2476, "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": 0.24304, "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": 0.23418, "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": 0.23495, "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": 0.24618, "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": 0.23596, "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": 0.24195, "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": 0.2339, "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": 0.23807, "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": 0.23053, "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": 0.2485, "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": 0.24192, "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": 0.23702, "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": 0.24164, "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": 0.24122, "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": 0.23586, "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": 0.23901, "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": 0.23694, "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": 0.24062, "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": 0.24325, "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": 0.24061, "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": 0.24118, "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": 0.23432, "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": 0.23449, "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": 0.25099, "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": 0.23338, "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": 0.23696, "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": 0.24166, "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": 0.24098, "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": 0.24003, "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": 0.24468, "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": 0.24514, "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": 0.23508, "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": 0.24439, "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": 0.23704, "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": 0.23241, "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": 0.2362, "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": 0.23483, "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": 0.23364, "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": 0.23736, "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": 0.23571, "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": 0.23457, "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": 0.23875, "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": 0.22943, "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": 0.22995, "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": 0.23584, "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": 0.23004, "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": 0.23994, "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": 0.23961, "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": 0.24847, "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": 0.24489, "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": 0.23915, "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": 0.23756, "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": 0.23213, "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": 0.23758, "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": 0.23889, "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": 0.24527, "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": 0.24016, "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": 0.23866, "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": 0.23392, "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": 0.23646, "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": 0.23929, "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": 0.24596, "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": 0.24276, "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": 0.24373, "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": 0.24278, "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": 0.24419, "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": 0.24718, "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": 0.23731, "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": 0.24223, "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": 0.25148, "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": 0.23979, "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": 0.23678, "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": 0.24987, "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": 0.24427, "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": 0.24502, "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": 0.24705, "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": 0.23725, "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": 0.24873, "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": 0.24518, "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": 0.23865, "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": 0.23926, "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": 0.24854, "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": 0.25086, "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": 0.24645, "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": 0.2462, "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": 0.2469, "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": 0.25107, "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": 0.23638, "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": 0.23826, "13005": "nan", "13010": "nan", "13015": "nan", "13020": "nan", "13025": "nan", "13030": "nan", "13035": "nan", "13040": "nan", "13045": "nan", "13050": "nan", "13055": "nan", "13060": "nan", "13065": "nan", "13070": "nan", "13075": "nan", "13080": "nan", "13085": "nan", "13090": "nan", "13095": "nan", "13100": 0.24847, "13105": "nan", "13110": "nan", "13115": "nan", "13120": "nan", "13125": "nan", "13130": "nan", "13135": "nan", "13140": "nan", "13145": "nan", "13150": "nan", "13155": "nan", "13160": "nan", "13165": "nan", "13170": "nan", "13175": "nan", "13180": "nan", "13185": "nan", "13190": "nan", "13195": "nan", "13200": 0.24825, "13205": "nan", "13210": "nan", "13215": "nan", "13220": "nan", "13225": "nan", "13230": "nan", "13235": "nan", "13240": "nan", "13245": "nan", "13250": "nan", "13255": "nan", "13260": "nan", "13265": "nan", "13270": "nan", "13275": "nan", "13280": "nan", "13285": "nan", "13290": "nan", "13295": "nan", "13300": 0.25015, "13305": "nan", "13310": "nan", "13315": "nan", "13320": "nan", "13325": "nan", "13330": "nan", "13335": "nan", "13340": "nan", "13345": "nan", "13350": "nan", "13355": "nan", "13360": "nan", "13365": "nan", "13370": "nan", "13375": "nan", "13380": "nan", "13385": "nan", "13390": "nan", "13395": "nan", "13400": 0.24102, "13405": "nan", "13410": "nan", "13415": "nan", "13420": "nan", "13425": "nan", "13430": "nan", "13435": "nan", "13440": "nan", "13445": "nan", "13450": "nan", "13455": "nan", "13460": "nan", "13465": "nan", "13470": "nan", "13475": "nan", "13480": "nan", "13485": "nan", "13490": "nan", "13495": "nan", "13500": 0.25301, "13505": "nan", "13510": "nan", "13515": "nan", "13520": "nan", "13525": "nan", "13530": "nan", "13535": "nan", "13540": "nan", "13545": "nan", "13550": "nan", "13555": "nan", "13560": "nan", "13565": "nan", "13570": "nan", "13575": "nan", "13580": "nan", "13585": "nan", "13590": "nan", "13595": "nan", "13600": 0.24321, "13605": "nan", "13610": "nan", "13615": "nan", "13620": "nan", "13625": "nan", "13630": "nan", "13635": "nan", "13640": "nan", "13645": "nan", "13650": "nan", "13655": "nan", "13660": "nan", "13665": "nan", "13670": "nan", "13675": "nan", "13680": "nan", "13685": "nan", "13690": "nan", "13695": "nan", "13700": 0.24289, "13705": "nan", "13710": "nan", "13715": "nan", "13720": "nan", "13725": "nan", "13730": "nan", "13735": "nan", "13740": "nan", "13745": "nan", "13750": "nan", "13755": "nan", "13760": "nan", "13765": "nan", "13770": "nan", "13775": "nan", "13780": "nan", "13785": "nan", "13790": "nan", "13795": "nan", "13800": 0.23579, "13805": "nan", "13810": "nan", "13815": "nan", "13820": "nan", "13825": "nan", "13830": "nan", "13835": "nan", "13840": "nan", "13845": "nan", "13850": "nan", "13855": "nan", "13860": "nan", "13865": "nan", "13870": "nan", "13875": "nan", "13880": "nan", "13885": "nan", "13890": "nan", "13895": "nan", "13900": 0.24682, "13905": "nan", "13910": "nan", "13915": "nan", "13920": "nan", "13925": "nan", "13930": "nan", "13935": "nan", "13940": "nan", "13945": "nan", "13950": "nan", "13955": "nan", "13960": "nan", "13965": "nan", "13970": "nan", "13975": "nan", "13980": "nan", "13985": "nan", "13990": "nan", "13995": "nan", "14000": 0.23975, "14005": "nan", "14010": "nan", "14015": "nan", "14020": "nan", "14025": "nan", "14030": "nan", "14035": "nan", "14040": "nan", "14045": "nan", "14050": "nan", "14055": "nan", "14060": "nan", "14065": "nan", "14070": "nan", "14075": "nan", "14080": "nan", "14085": "nan", "14090": "nan", "14095": "nan", "14100": 0.24593, "14105": "nan", "14110": "nan", "14115": "nan", "14120": "nan", "14125": "nan", "14130": "nan", "14135": "nan", "14140": "nan", "14145": "nan", "14150": "nan", "14155": "nan", "14160": "nan", "14165": "nan", "14170": "nan", "14175": "nan", "14180": "nan", "14185": "nan", "14190": "nan", "14195": "nan", "14200": 0.24654, "14205": "nan", "14210": "nan", "14215": "nan", "14220": "nan", "14225": "nan", "14230": "nan", "14235": "nan", "14240": "nan", "14245": "nan", "14250": "nan", "14255": "nan", "14260": "nan", "14265": "nan", "14270": "nan", "14275": "nan", "14280": "nan", "14285": "nan", "14290": "nan", "14295": "nan", "14300": 0.24465, "14305": "nan", "14310": "nan", "14315": "nan", "14320": "nan", "14325": "nan", "14330": "nan", "14335": "nan", "14340": "nan", "14345": "nan", "14350": "nan", "14355": "nan", "14360": "nan", "14365": "nan", "14370": "nan", "14375": "nan", "14380": "nan", "14385": "nan", "14390": "nan", "14395": "nan", "14400": 0.23359, "14405": "nan", "14410": "nan", "14415": "nan", "14420": "nan", "14425": "nan", "14430": "nan", "14435": "nan", "14440": "nan", "14445": "nan", "14450": "nan", "14455": "nan", "14460": "nan", "14465": "nan", "14470": "nan", "14475": "nan", "14480": "nan", "14485": "nan", "14490": "nan", "14495": "nan", "14500": 0.24298, "14505": "nan", "14510": "nan", "14515": "nan", "14520": "nan", "14525": "nan", "14530": "nan", "14535": "nan", "14540": "nan", "14545": "nan", "14550": "nan", "14555": "nan", "14560": "nan", "14565": "nan", "14570": "nan", "14575": "nan", "14580": "nan", "14585": "nan", "14590": "nan", "14595": "nan", "14600": 0.2318, "14605": "nan", "14610": "nan", "14615": "nan", "14620": "nan", "14625": "nan", "14630": "nan", "14635": "nan", "14640": "nan", "14645": "nan", "14650": "nan", "14655": "nan", "14660": "nan", "14665": "nan", "14670": "nan", "14675": "nan", "14680": "nan", "14685": "nan", "14690": "nan", "14695": "nan", "14700": 0.23144, "14705": "nan", "14710": "nan", "14715": "nan", "14720": "nan", "14725": "nan", "14730": "nan", "14735": "nan", "14740": "nan", "14745": "nan", "14750": "nan", "14755": "nan", "14760": "nan", "14765": "nan", "14770": "nan", "14775": "nan", "14780": "nan", "14785": "nan", "14790": "nan", "14795": "nan", "14800": 0.22979, "14805": "nan", "14810": "nan", "14815": "nan", "14820": "nan", "14825": "nan", "14830": "nan", "14835": "nan", "14840": "nan", "14845": "nan", "14850": "nan", "14855": "nan", "14860": "nan", "14865": "nan", "14870": "nan", "14875": "nan", "14880": "nan", "14885": "nan", "14890": "nan", "14895": "nan", "14900": 0.23252, "14905": "nan", "14910": "nan", "14915": "nan", "14920": "nan", "14925": "nan", "14930": "nan", "14935": "nan", "14940": "nan", "14945": "nan", "14950": "nan", "14955": "nan", "14960": "nan", "14965": "nan", "14970": "nan", "14975": "nan", "14980": "nan", "14985": "nan", "14990": "nan", "14995": "nan", "15000": 0.23174, "15005": "nan", "15010": "nan", "15015": "nan", "15020": "nan", "15025": "nan", "15030": "nan", "15035": "nan", "15040": "nan", "15045": "nan", "15050": "nan", "15055": "nan", "15060": "nan", "15065": "nan", "15070": "nan", "15075": "nan", "15080": "nan", "15085": "nan", "15090": "nan", "15095": "nan", "15100": 0.23928, "15105": "nan", "15110": "nan", "15115": "nan", "15120": "nan", "15125": "nan", "15130": "nan", "15135": "nan", "15140": "nan", "15145": "nan", "15150": "nan", "15155": "nan", "15160": "nan", "15165": "nan", "15170": "nan", "15175": "nan", "15180": "nan", "15185": "nan", "15190": "nan", "15195": "nan", "15200": 0.23024, "15205": "nan", "15210": "nan", "15215": "nan", "15220": "nan", "15225": "nan", "15230": "nan", "15235": "nan", "15240": "nan", "15245": "nan", "15250": "nan", "15255": "nan", "15260": "nan", "15265": "nan", "15270": "nan", "15275": "nan", "15280": "nan", "15285": "nan", "15290": "nan", "15295": "nan", "15300": 0.23789, "15305": "nan", "15310": "nan", "15315": "nan", "15320": "nan", "15325": "nan", "15330": "nan", "15335": "nan", "15340": "nan", "15345": "nan", "15350": "nan", "15355": "nan", "15360": "nan", "15365": "nan", "15370": "nan", "15375": "nan", "15380": "nan", "15385": "nan", "15390": "nan", "15395": "nan", "15400": 0.23426, "15405": "nan", "15410": "nan", "15415": "nan", "15420": "nan", "15425": "nan", "15430": "nan", "15435": "nan", "15440": "nan", "15445": "nan", "15450": "nan", "15455": "nan", "15460": "nan", "15465": "nan", "15470": "nan", "15475": "nan", "15480": "nan", "15485": "nan", "15490": "nan", "15495": "nan", "15500": 0.23849, "15505": "nan", "15510": "nan", "15515": "nan", "15520": "nan", "15525": "nan", "15530": "nan", "15535": "nan", "15540": "nan", "15545": "nan", "15550": "nan", "15555": "nan", "15560": "nan", "15565": "nan", "15570": "nan", "15575": "nan", "15580": "nan", "15585": "nan", "15590": "nan", "15595": "nan", "15600": 0.23065, "15605": "nan", "15610": "nan", "15615": "nan", "15620": "nan", "15625": "nan", "15630": "nan", "15635": "nan", "15640": "nan", "15645": "nan", "15650": "nan", "15655": "nan", "15660": "nan", "15665": "nan", "15670": "nan", "15675": "nan", "15680": "nan", "15685": "nan", "15690": "nan", "15695": "nan", "15700": 0.23286, "15705": "nan", "15710": "nan", "15715": "nan", "15720": "nan", "15725": "nan", "15730": "nan", "15735": "nan", "15740": "nan", "15745": "nan", "15750": "nan", "15755": "nan", "15760": "nan", "15765": "nan", "15770": "nan", "15775": "nan", "15780": "nan", "15785": "nan", "15790": "nan", "15795": "nan", "15800": 0.22931, "15805": "nan", "15810": "nan", "15815": "nan", "15820": "nan", "15825": "nan", "15830": "nan", "15835": "nan", "15840": "nan", "15845": "nan", "15850": "nan", "15855": "nan", "15860": "nan", "15865": "nan", "15870": "nan", "15875": "nan", "15880": "nan", "15885": "nan", "15890": "nan", "15895": "nan", "15900": 0.23516, "15905": "nan", "15910": "nan", "15915": "nan", "15920": "nan", "15925": "nan", "15930": "nan", "15935": "nan", "15940": "nan", "15945": "nan", "15950": "nan", "15955": "nan", "15960": "nan", "15965": "nan", "15970": "nan", "15975": "nan", "15980": "nan", "15985": "nan", "15990": "nan", "15995": "nan", "16000": 0.2404, "16005": "nan", "16010": "nan", "16015": "nan", "16020": "nan", "16025": "nan", "16030": "nan", "16035": "nan", "16040": "nan", "16045": "nan", "16050": "nan", "16055": "nan", "16060": "nan", "16065": "nan", "16070": "nan", "16075": "nan", "16080": "nan", "16085": "nan", "16090": "nan", "16095": "nan", "16100": 0.24653, "16105": "nan", "16110": "nan", "16115": "nan", "16120": "nan", "16125": "nan", "16130": "nan", "16135": "nan", "16140": "nan", "16145": "nan", "16150": "nan", "16155": "nan", "16160": "nan", "16165": "nan", "16170": "nan", "16175": "nan", "16180": "nan", "16185": "nan", "16190": "nan", "16195": "nan", "16200": 0.25315, "16205": "nan", "16210": "nan", "16215": "nan", "16220": "nan", "16225": "nan", "16230": "nan", "16235": "nan", "16240": "nan", "16245": "nan", "16250": "nan", "16255": "nan", "16260": "nan", "16265": "nan", "16270": "nan", "16275": "nan", "16280": "nan", "16285": "nan", "16290": "nan", "16295": "nan", "16300": 0.24875, "16305": "nan", "16310": "nan", "16315": "nan", "16320": "nan", "16325": "nan", "16330": "nan", "16335": "nan", "16340": "nan", "16345": "nan", "16350": "nan", "16355": "nan", "16360": "nan", "16365": "nan", "16370": "nan", "16375": "nan", "16380": "nan", "16385": "nan", "16390": "nan", "16395": "nan", "16400": 0.25819, "16405": "nan", "16410": "nan", "16415": "nan", "16420": "nan", "16425": "nan", "16430": "nan", "16435": "nan", "16440": "nan", "16445": "nan", "16450": "nan", "16455": "nan", "16460": "nan", "16465": "nan", "16470": "nan", "16475": "nan", "16480": "nan", "16485": "nan", "16490": "nan", "16495": "nan", "16500": 0.25611, "16505": "nan", "16510": "nan", "16515": "nan", "16520": "nan", "16525": "nan", "16530": "nan", "16535": "nan", "16540": "nan", "16545": "nan", "16550": "nan", "16555": "nan", "16560": "nan", "16565": "nan", "16570": "nan", "16575": "nan", "16580": "nan", "16585": "nan", "16590": "nan", "16595": "nan", "16600": 0.25709, "16605": "nan", "16610": "nan", "16615": "nan", "16620": "nan", "16625": "nan", "16630": "nan", "16635": "nan", "16640": "nan", "16645": "nan", "16650": "nan", "16655": "nan", "16660": "nan", "16665": "nan", "16670": "nan", "16675": "nan", "16680": "nan", "16685": "nan", "16690": "nan", "16695": "nan", "16700": 0.25764, "16705": "nan", "16710": "nan", "16715": "nan", "16720": "nan", "16725": "nan", "16730": "nan", "16735": "nan", "16740": "nan", "16745": "nan", "16750": "nan", "16755": "nan", "16760": "nan", "16765": "nan", "16770": "nan", "16775": "nan", "16780": "nan", "16785": "nan", "16790": "nan", "16795": "nan", "16800": 0.24864, "16805": "nan", "16810": "nan", "16815": "nan", "16820": "nan", "16825": "nan", "16830": "nan", "16835": "nan", "16840": "nan", "16845": "nan", "16850": "nan", "16855": "nan", "16860": "nan", "16865": "nan", "16870": "nan", "16875": "nan", "16880": "nan", "16885": "nan", "16890": "nan", "16895": "nan", "16900": 0.24505, "16905": "nan", "16910": "nan", "16915": "nan", "16920": "nan", "16925": "nan", "16930": "nan", "16935": "nan", "16940": "nan", "16945": "nan", "16950": "nan", "16955": "nan", "16960": "nan", "16965": "nan", "16970": "nan", "16975": "nan", "16980": "nan", "16985": "nan", "16990": "nan", "16995": "nan", "17000": 0.24237, "17005": "nan", "17010": "nan", "17015": "nan", "17020": "nan", "17025": "nan", "17030": "nan", "17035": "nan", "17040": "nan", "17045": "nan", "17050": "nan", "17055": "nan", "17060": "nan", "17065": "nan", "17070": "nan", "17075": "nan", "17080": "nan", "17085": "nan", "17090": "nan", "17095": "nan", "17100": 0.24503, "17105": "nan", "17110": "nan", "17115": "nan", "17120": "nan", "17125": "nan", "17130": "nan", "17135": "nan", "17140": "nan", "17145": "nan", "17150": "nan", "17155": "nan", "17160": "nan", "17165": "nan", "17170": "nan", "17175": "nan", "17180": "nan", "17185": "nan", "17190": "nan", "17195": "nan", "17200": 0.24918, "17205": "nan", "17210": "nan", "17215": "nan", "17220": "nan", "17225": "nan", "17230": "nan", "17235": "nan", "17240": "nan", "17245": "nan", "17250": "nan", "17255": "nan", "17260": "nan", "17265": "nan", "17270": "nan", "17275": "nan", "17280": "nan", "17285": "nan", "17290": "nan", "17295": "nan", "17300": 0.25737, "17305": "nan", "17310": "nan", "17315": "nan", "17320": "nan", "17325": "nan", "17330": "nan", "17335": "nan", "17340": "nan", "17345": "nan", "17350": "nan", "17355": "nan", "17360": "nan", "17365": "nan", "17370": "nan", "17375": "nan", "17380": "nan", "17385": "nan", "17390": "nan", "17395": "nan", "17400": 0.26118, "17405": "nan", "17410": "nan", "17415": "nan", "17420": "nan", "17425": "nan", "17430": "nan", "17435": "nan", "17440": "nan", "17445": "nan", "17450": "nan", "17455": "nan", "17460": "nan", "17465": "nan", "17470": "nan", "17475": "nan", "17480": "nan", "17485": "nan", "17490": "nan", "17495": "nan", "17500": 0.25082, "17505": "nan", "17510": "nan", "17515": "nan", "17520": "nan", "17525": "nan", "17530": "nan", "17535": "nan", "17540": "nan", "17545": "nan", "17550": "nan", "17555": "nan", "17560": "nan", "17565": "nan", "17570": "nan", "17575": "nan", "17580": "nan", "17585": "nan", "17590": "nan", "17595": "nan", "17600": 0.25272, "17605": "nan", "17610": "nan", "17615": "nan", "17620": "nan", "17625": "nan", "17630": "nan", "17635": "nan", "17640": "nan", "17645": "nan", "17650": "nan", "17655": "nan", "17660": "nan", "17665": "nan", "17670": "nan", "17675": "nan", "17680": "nan", "17685": "nan", "17690": "nan", "17695": "nan", "17700": 0.24154, "17705": "nan", "17710": "nan", "17715": "nan", "17720": "nan", "17725": "nan", "17730": "nan", "17735": "nan", "17740": "nan", "17745": "nan", "17750": "nan", "17755": "nan", "17760": "nan", "17765": "nan", "17770": "nan", "17775": "nan", "17780": "nan", "17785": "nan", "17790": "nan", "17795": "nan", "17800": 0.24054, "17805": "nan", "17810": "nan", "17815": "nan", "17820": "nan", "17825": "nan", "17830": "nan", "17835": "nan", "17840": "nan", "17845": "nan", "17850": "nan", "17855": "nan", "17860": "nan", "17865": "nan", "17870": "nan", "17875": "nan", "17880": "nan", "17885": "nan", "17890": "nan", "17895": "nan", "17900": 0.24608, "17905": "nan", "17910": "nan", "17915": "nan", "17920": "nan", "17925": "nan", "17930": "nan", "17935": "nan", "17940": "nan", "17945": "nan", "17950": "nan", "17955": "nan", "17960": "nan", "17965": "nan", "17970": "nan", "17975": "nan", "17980": "nan", "17985": "nan", "17990": "nan", "17995": "nan", "18000": 0.24313, "18005": "nan", "18010": "nan", "18015": "nan", "18020": "nan", "18025": "nan", "18030": "nan", "18035": "nan", "18040": "nan", "18045": "nan", "18050": "nan", "18055": "nan", "18060": "nan", "18065": "nan", "18070": "nan", "18075": "nan", "18080": "nan", "18085": "nan", "18090": "nan", "18095": "nan", "18100": 0.23876, "18105": "nan", "18110": "nan", "18115": "nan", "18120": "nan", "18125": "nan", "18130": "nan", "18135": "nan", "18140": "nan", "18145": "nan", "18150": "nan", "18155": "nan", "18160": "nan", "18165": "nan", "18170": "nan", "18175": "nan", "18180": "nan", "18185": "nan", "18190": "nan", "18195": "nan", "18200": 0.25324, "18205": "nan", "18210": "nan", "18215": "nan", "18220": "nan", "18225": "nan", "18230": "nan", "18235": "nan", "18240": "nan", "18245": "nan", "18250": "nan", "18255": "nan", "18260": "nan", "18265": "nan", "18270": "nan", "18275": "nan", "18280": "nan", "18285": "nan", "18290": "nan", "18295": "nan", "18300": 0.23157, "18305": "nan", "18310": "nan", "18315": "nan", "18320": "nan", "18325": "nan", "18330": "nan", "18335": "nan", "18340": "nan", "18345": "nan", "18350": "nan", "18355": "nan", "18360": "nan", "18365": "nan", "18370": "nan", "18375": "nan", "18380": "nan", "18385": "nan", "18390": "nan", "18395": "nan", "18400": 0.23746, "18405": "nan", "18410": "nan", "18415": "nan", "18420": "nan", "18425": "nan", "18430": "nan", "18435": "nan", "18440": "nan", "18445": "nan", "18450": "nan", "18455": "nan", "18460": "nan", "18465": "nan", "18470": "nan", "18475": "nan", "18480": "nan", "18485": "nan", "18490": "nan", "18495": "nan", "18500": 0.23828, "18505": "nan", "18510": "nan", "18515": "nan", "18520": "nan", "18525": "nan", "18530": "nan", "18535": "nan", "18540": "nan", "18545": "nan", "18550": "nan", "18555": "nan", "18560": "nan", "18565": "nan", "18570": "nan", "18575": "nan", "18580": "nan", "18585": "nan", "18590": "nan", "18595": "nan", "18600": 0.23279, "18605": "nan", "18610": "nan", "18615": "nan", "18620": "nan", "18625": "nan", "18630": "nan", "18635": "nan", "18640": "nan", "18645": "nan", "18650": "nan", "18655": "nan", "18660": "nan", "18665": "nan", "18670": "nan", "18675": "nan", "18680": "nan", "18685": "nan", "18690": "nan", "18695": "nan", "18700": 0.23583, "18705": "nan", "18710": "nan", "18715": "nan", "18720": "nan", "18725": "nan", "18730": "nan", "18735": "nan", "18740": "nan", "18745": "nan", "18750": "nan", "18755": "nan", "18760": "nan", "18765": "nan", "18770": "nan", "18775": "nan", "18780": "nan", "18785": "nan", "18790": "nan", "18795": "nan", "18800": 0.23938, "18805": "nan", "18810": "nan", "18815": "nan", "18820": "nan", "18825": "nan", "18830": "nan", "18835": "nan", "18840": "nan", "18845": "nan", "18850": "nan", "18855": "nan", "18860": "nan", "18865": "nan", "18870": "nan", "18875": "nan", "18880": "nan", "18885": "nan", "18890": "nan", "18895": "nan", "18900": 0.23588, "18905": "nan", "18910": "nan", "18915": "nan", "18920": "nan", "18925": "nan", "18930": "nan", "18935": "nan", "18940": "nan", "18945": "nan", "18950": "nan", "18955": "nan", "18960": "nan", "18965": "nan", "18970": "nan", "18975": "nan", "18980": "nan", "18985": "nan", "18990": "nan", "18995": "nan", "19000": 0.24304, "19005": "nan", "19010": "nan", "19015": "nan", "19020": "nan", "19025": "nan", "19030": "nan", "19035": "nan", "19040": "nan", "19045": "nan", "19050": "nan", "19055": "nan", "19060": "nan", "19065": "nan", "19070": "nan", "19075": "nan", "19080": "nan", "19085": "nan", "19090": "nan", "19095": "nan", "19100": 0.24315, "19105": "nan", "19110": "nan", "19115": "nan", "19120": "nan", "19125": "nan", "19130": "nan", "19135": "nan", "19140": "nan", "19145": "nan", "19150": "nan", "19155": "nan", "19160": "nan", "19165": "nan", "19170": "nan", "19175": "nan", "19180": "nan", "19185": "nan", "19190": "nan", "19195": "nan", "19200": 0.2419, "19205": "nan", "19210": "nan", "19215": "nan", "19220": "nan", "19225": "nan", "19230": "nan", "19235": "nan", "19240": "nan", "19245": "nan", "19250": "nan", "19255": "nan", "19260": "nan", "19265": "nan", "19270": "nan", "19275": "nan", "19280": "nan", "19285": "nan", "19290": "nan", "19295": "nan", "19300": 0.24534, "19305": "nan", "19310": "nan", "19315": "nan", "19320": "nan", "19325": "nan", "19330": "nan", "19335": "nan", "19340": "nan", "19345": "nan", "19350": "nan", "19355": "nan", "19360": "nan", "19365": "nan", "19370": "nan", "19375": "nan", "19380": "nan", "19385": "nan", "19390": "nan", "19395": "nan", "19400": 0.2458, "19405": "nan", "19410": "nan", "19415": "nan", "19420": "nan", "19425": "nan", "19430": "nan", "19435": "nan", "19440": "nan", "19445": "nan", "19450": "nan", "19455": "nan", "19460": "nan", "19465": "nan", "19470": "nan", "19475": "nan", "19480": "nan", "19485": "nan", "19490": "nan", "19495": "nan", "19500": 0.23627, "19505": "nan", "19510": "nan", "19515": "nan", "19520": "nan", "19525": "nan", "19530": "nan", "19535": "nan", "19540": "nan", "19545": "nan", "19550": "nan", "19555": "nan", "19560": "nan", "19565": "nan", "19570": "nan", "19575": "nan", "19580": "nan", "19585": "nan", "19590": "nan", "19595": "nan", "19600": 0.24757, "19605": "nan", "19610": "nan", "19615": "nan", "19620": "nan", "19625": "nan", "19630": "nan", "19635": "nan", "19640": "nan", "19645": "nan", "19650": "nan", "19655": "nan", "19660": "nan", "19665": "nan", "19670": "nan", "19675": "nan", "19680": "nan", "19685": "nan", "19690": "nan", "19695": "nan", "19700": 0.23331, "19705": "nan", "19710": "nan", "19715": "nan", "19720": "nan", "19725": "nan", "19730": "nan", "19735": "nan", "19740": "nan", "19745": "nan", "19750": "nan", "19755": "nan", "19760": "nan", "19765": "nan", "19770": "nan", "19775": "nan", "19780": "nan", "19785": "nan", "19790": "nan", "19795": "nan", "19800": 0.23373, "19805": "nan", "19810": "nan", "19815": "nan", "19820": "nan", "19825": "nan", "19830": "nan", "19835": "nan", "19840": "nan", "19845": "nan", "19850": "nan", "19855": "nan", "19860": "nan", "19865": "nan", "19870": "nan", "19875": "nan", "19880": "nan", "19885": "nan", "19890": "nan", "19895": "nan", "19900": 0.23661, "19905": "nan", "19910": "nan", "19915": "nan", "19920": "nan", "19925": "nan", "19930": "nan", "19935": "nan", "19940": "nan", "19945": "nan", "19950": "nan", "19955": "nan", "19960": "nan", "19965": "nan", "19970": "nan", "19975": "nan", "19980": "nan", "19985": "nan", "19990": "nan", "19995": "nan", "20000": 0.23118, "20005": "nan", "20010": "nan", "20015": "nan", "20020": "nan", "20025": "nan", "20030": "nan", "20035": "nan", "20040": "nan", "20045": "nan", "20050": "nan", "20055": "nan", "20060": "nan", "20065": "nan", "20070": "nan", "20075": "nan", "20080": "nan", "20085": "nan", "20090": "nan", "20095": "nan", "20100": 0.23216, "20105": "nan", "20110": "nan", "20115": "nan", "20120": "nan", "20125": "nan", "20130": "nan", "20135": "nan", "20140": "nan", "20145": "nan", "20150": "nan", "20155": "nan", "20160": "nan", "20165": "nan", "20170": "nan", "20175": "nan", "20180": "nan", "20185": "nan", "20190": "nan", "20195": "nan", "20200": 0.23738, "20205": "nan", "20210": "nan", "20215": "nan", "20220": "nan", "20225": "nan", "20230": "nan", "20235": "nan", "20240": "nan", "20245": "nan", "20250": "nan", "20255": "nan", "20260": "nan", "20265": "nan", "20270": "nan", "20275": "nan", "20280": "nan", "20285": "nan", "20290": "nan", "20295": "nan", "20300": 0.23323, "20305": "nan", "20310": "nan", "20315": "nan", "20320": "nan", "20325": "nan", "20330": "nan", "20335": "nan", "20340": "nan", "20345": "nan", "20350": "nan", "20355": "nan", "20360": "nan", "20365": "nan", "20370": "nan", "20375": "nan", "20380": "nan", "20385": "nan", "20390": "nan", "20395": "nan", "20400": 0.23548, "20405": "nan", "20410": "nan", "20415": "nan", "20420": "nan", "20425": "nan", "20430": "nan", "20435": "nan", "20440": "nan", "20445": "nan", "20450": "nan", "20455": "nan", "20460": "nan", "20465": "nan", "20470": "nan", "20475": "nan", "20480": "nan", "20485": "nan", "20490": "nan", "20495": "nan", "20500": 0.24239, "20505": "nan", "20510": "nan", "20515": "nan", "20520": "nan", "20525": "nan", "20530": "nan", "20535": "nan", "20540": "nan", "20545": "nan", "20550": "nan", "20555": "nan", "20560": "nan", "20565": "nan", "20570": "nan", "20575": "nan", "20580": "nan", "20585": "nan", "20590": "nan", "20595": "nan", "20600": 0.2345, "20605": "nan", "20610": "nan", "20615": "nan", "20620": "nan", "20625": "nan", "20630": "nan", "20635": "nan", "20640": "nan", "20645": "nan", "20650": "nan", "20655": "nan", "20660": "nan", "20665": "nan", "20670": "nan", "20675": "nan", "20680": "nan", "20685": "nan", "20690": "nan", "20695": "nan", "20700": 0.2325, "20705": "nan", "20710": "nan", "20715": "nan", "20720": "nan", "20725": "nan", "20730": "nan", "20735": "nan", "20740": "nan", "20745": "nan", "20750": "nan", "20755": "nan", "20760": "nan", "20765": "nan", "20770": "nan", "20775": "nan", "20780": "nan", "20785": "nan", "20790": "nan", "20795": "nan", "20800": 0.23906, "20805": "nan", "20810": "nan", "20815": "nan", "20820": "nan", "20825": "nan", "20830": "nan", "20835": "nan", "20840": "nan", "20845": "nan", "20850": "nan", "20855": "nan", "20860": "nan", "20865": "nan", "20870": "nan", "20875": "nan", "20880": "nan", "20885": "nan", "20890": "nan", "20895": "nan", "20900": 0.23415, "20905": "nan", "20910": "nan", "20915": "nan", "20920": "nan", "20925": "nan", "20930": "nan", "20935": "nan", "20940": "nan", "20945": "nan", "20950": "nan", "20955": "nan", "20960": "nan", "20965": "nan", "20970": "nan", "20975": "nan", "20980": "nan", "20985": "nan", "20990": "nan", "20995": "nan", "21000": 0.23624, "21005": "nan", "21010": "nan", "21015": "nan", "21020": "nan", "21025": "nan", "21030": "nan", "21035": "nan", "21040": "nan", "21045": "nan", "21050": "nan", "21055": "nan", "21060": "nan", "21065": "nan", "21070": "nan", "21075": "nan", "21080": "nan", "21085": "nan", "21090": "nan", "21095": "nan", "21100": 0.23488, "21105": "nan", "21110": "nan", "21115": "nan", "21120": "nan", "21125": "nan", "21130": "nan", "21135": "nan", "21140": "nan", "21145": "nan", "21150": "nan", "21155": "nan", "21160": "nan", "21165": "nan", "21170": "nan", "21175": "nan", "21180": "nan", "21185": "nan", "21190": "nan", "21195": "nan", "21200": 0.23226, "21205": "nan", "21210": "nan", "21215": "nan", "21220": "nan", "21225": "nan", "21230": "nan", "21235": "nan", "21240": "nan", "21245": "nan", "21250": "nan", "21255": "nan", "21260": "nan", "21265": "nan", "21270": "nan", "21275": "nan", "21280": "nan", "21285": "nan", "21290": "nan", "21295": "nan", "21300": 0.23237, "21305": "nan", "21310": "nan", "21315": "nan", "21320": "nan", "21325": "nan", "21330": "nan", "21335": "nan", "21340": "nan", "21345": "nan", "21350": "nan", "21355": "nan", "21360": "nan", "21365": "nan", "21370": "nan", "21375": "nan", "21380": "nan", "21385": "nan", "21390": "nan", "21395": "nan", "21400": 0.24113, "21405": "nan", "21410": "nan", "21415": "nan", "21420": "nan", "21425": "nan", "21430": "nan", "21435": "nan", "21440": "nan", "21445": "nan", "21450": "nan", "21455": "nan", "21460": "nan", "21465": "nan", "21470": "nan", "21475": "nan", "21480": "nan", "21485": "nan", "21490": "nan", "21495": "nan", "21500": 0.22645, "21505": "nan", "21510": "nan", "21515": "nan", "21520": "nan", "21525": "nan", "21530": "nan", "21535": "nan", "21540": "nan", "21545": "nan", "21550": "nan", "21555": "nan", "21560": "nan", "21565": "nan", "21570": "nan", "21575": "nan", "21580": "nan", "21585": "nan", "21590": "nan", "21595": "nan", "21600": 0.23029, "21605": "nan", "21610": "nan", "21615": "nan", "21620": "nan", "21625": "nan", "21630": "nan", "21635": "nan", "21640": "nan", "21645": "nan", "21650": "nan", "21655": "nan", "21660": "nan", "21665": "nan", "21670": "nan", "21675": "nan", "21680": "nan", "21685": "nan", "21690": "nan", "21695": "nan", "21700": 0.23431, "21705": "nan", "21710": "nan", "21715": "nan", "21720": "nan", "21725": "nan", "21730": "nan", "21735": "nan", "21740": "nan", "21745": "nan", "21750": "nan", "21755": "nan", "21760": "nan", "21765": "nan", "21770": "nan", "21775": "nan", "21780": "nan", "21785": "nan", "21790": "nan", "21795": "nan", "21800": 0.23359, "21805": "nan", "21810": "nan", "21815": "nan", "21820": "nan", "21825": "nan", "21830": "nan", "21835": "nan", "21840": "nan", "21845": "nan", "21850": "nan", "21855": "nan", "21860": "nan", "21865": "nan", "21870": "nan", "21875": "nan", "21880": "nan", "21885": "nan", "21890": "nan", "21895": "nan", "21900": 0.23589, "21905": "nan", "21910": "nan", "21915": "nan", "21920": "nan", "21925": "nan", "21930": "nan", "21935": "nan", "21940": "nan", "21945": "nan", "21950": "nan", "21955": "nan", "21960": "nan", "21965": "nan", "21970": "nan", "21975": "nan", "21980": "nan", "21985": "nan", "21990": "nan", "21995": "nan", "22000": 0.23406, "22005": "nan", "22010": "nan", "22015": "nan", "22020": "nan", "22025": "nan", "22030": "nan", "22035": "nan", "22040": "nan", "22045": "nan", "22050": "nan", "22055": "nan", "22060": "nan", "22065": "nan", "22070": "nan", "22075": "nan", "22080": "nan", "22085": "nan", "22090": "nan", "22095": "nan", "22100": 0.23721, "22105": "nan", "22110": "nan", "22115": "nan", "22120": "nan", "22125": "nan", "22130": "nan", "22135": "nan", "22140": "nan", "22145": "nan", "22150": "nan", "22155": "nan", "22160": "nan", "22165": "nan", "22170": "nan", "22175": "nan", "22180": "nan", "22185": "nan", "22190": "nan", "22195": "nan", "22200": 0.23412, "22205": "nan", "22210": "nan", "22215": "nan", "22220": "nan", "22225": "nan", "22230": "nan", "22235": "nan", "22240": "nan", "22245": "nan", "22250": "nan", "22255": "nan", "22260": "nan", "22265": "nan", "22270": "nan", "22275": "nan", "22280": "nan", "22285": "nan", "22290": "nan", "22295": "nan", "22300": 0.2372, "22305": "nan", "22310": "nan", "22315": "nan", "22320": "nan", "22325": "nan", "22330": "nan", "22335": "nan", "22340": "nan", "22345": "nan", "22350": "nan", "22355": "nan", "22360": "nan", "22365": "nan", "22370": "nan", "22375": "nan", "22380": "nan", "22385": "nan", "22390": "nan", "22395": "nan", "22400": 0.23298, "22405": "nan", "22410": "nan", "22415": "nan", "22420": "nan", "22425": "nan", "22430": "nan", "22435": "nan", "22440": "nan", "22445": "nan", "22450": "nan", "22455": "nan", "22460": "nan", "22465": "nan", "22470": "nan", "22475": "nan", "22480": "nan", "22485": "nan", "22490": "nan", "22495": "nan", "22500": 0.23468, "22505": "nan", "22510": "nan", "22515": "nan", "22520": "nan", "22525": "nan", "22530": "nan", "22535": "nan", "22540": "nan", "22545": "nan", "22550": "nan", "22555": "nan", "22560": "nan", "22565": "nan", "22570": "nan", "22575": "nan", "22580": "nan", "22585": "nan", "22590": "nan", "22595": "nan", "22600": 0.23297, "22605": "nan", "22610": "nan", "22615": "nan", "22620": "nan", "22625": "nan", "22630": "nan", "22635": "nan", "22640": "nan", "22645": "nan", "22650": "nan", "22655": "nan", "22660": "nan", "22665": "nan", "22670": "nan", "22675": "nan", "22680": "nan", "22685": "nan", "22690": "nan", "22695": "nan", "22700": 0.23386, "22705": "nan", "22710": "nan", "22715": "nan", "22720": "nan", "22725": "nan", "22730": "nan", "22735": "nan", "22740": "nan", "22745": "nan", "22750": "nan", "22755": "nan", "22760": "nan", "22765": "nan", "22770": "nan", "22775": "nan", "22780": "nan", "22785": "nan", "22790": "nan", "22795": "nan", "22800": 0.235, "22805": "nan", "22810": "nan", "22815": "nan", "22820": "nan", "22825": "nan", "22830": "nan", "22835": "nan", "22840": "nan", "22845": "nan", "22850": "nan", "22855": "nan", "22860": "nan", "22865": "nan", "22870": "nan", "22875": "nan", "22880": "nan", "22885": "nan", "22890": "nan", "22895": "nan", "22900": 0.22293, "22905": "nan", "22910": "nan", "22915": "nan", "22920": "nan", "22925": "nan", "22930": "nan", "22935": "nan", "22940": "nan", "22945": "nan", "22950": "nan", "22955": "nan", "22960": "nan", "22965": "nan", "22970": "nan", "22975": "nan", "22980": "nan", "22985": "nan", "22990": "nan", "22995": "nan", "23000": 0.23081, "23005": "nan", "23010": "nan", "23015": "nan", "23020": "nan", "23025": "nan", "23030": "nan", "23035": "nan", "23040": "nan", "23045": "nan", "23050": "nan", "23055": "nan", "23060": "nan", "23065": "nan", "23070": "nan", "23075": "nan", "23080": "nan", "23085": "nan", "23090": "nan", "23095": "nan", "23100": 0.23275, "23105": "nan", "23110": "nan", "23115": "nan", "23120": "nan", "23125": "nan", "23130": "nan", "23135": "nan", "23140": "nan", "23145": "nan", "23150": "nan", "23155": "nan", "23160": "nan", "23165": "nan", "23170": "nan", "23175": "nan", "23180": "nan", "23185": "nan", "23190": "nan", "23195": "nan", "23200": 0.23778, "23205": "nan", "23210": "nan", "23215": "nan", "23220": "nan", "23225": "nan", "23230": "nan", "23235": "nan", "23240": "nan", "23245": "nan", "23250": "nan", "23255": "nan", "23260": "nan", "23265": "nan", "23270": "nan", "23275": "nan", "23280": "nan", "23285": "nan", "23290": "nan", "23295": "nan", "23300": 0.2323, "23305": "nan", "23310": "nan", "23315": "nan", "23320": "nan", "23325": "nan", "23330": "nan", "23335": "nan", "23340": "nan", "23345": "nan", "23350": "nan", "23355": "nan", "23360": "nan", "23365": "nan", "23370": "nan", "23375": "nan", "23380": "nan", "23385": "nan", "23390": "nan", "23395": "nan", "23400": 0.23187, "23405": "nan", "23410": "nan", "23415": "nan", "23420": "nan", "23425": "nan", "23430": "nan", "23435": "nan", "23440": "nan", "23445": "nan", "23450": "nan", "23455": "nan", "23460": "nan", "23465": "nan", "23470": "nan", "23475": "nan", "23480": "nan", "23485": "nan", "23490": "nan", "23495": "nan", "23500": 0.23379, "23505": "nan", "23510": "nan", "23515": "nan", "23520": "nan", "23525": "nan", "23530": "nan", "23535": "nan", "23540": "nan", "23545": "nan", "23550": "nan", "23555": "nan", "23560": "nan", "23565": "nan", "23570": "nan", "23575": "nan", "23580": "nan", "23585": "nan", "23590": "nan", "23595": "nan", "23600": 0.24479, "23605": "nan", "23610": "nan", "23615": "nan", "23620": "nan", "23625": "nan", "23630": "nan", "23635": "nan", "23640": "nan", "23645": "nan", "23650": "nan", "23655": "nan", "23660": "nan", "23665": "nan", "23670": "nan", "23675": "nan", "23680": "nan", "23685": "nan", "23690": "nan", "23695": "nan", "23700": 0.22942, "23705": "nan", "23710": "nan", "23715": "nan", "23720": "nan", "23725": "nan", "23730": "nan", "23735": "nan", "23740": "nan", "23745": "nan", "23750": "nan", "23755": "nan", "23760": "nan", "23765": "nan", "23770": "nan", "23775": "nan", "23780": "nan", "23785": "nan", "23790": "nan", "23795": "nan", "23800": 0.22788, "23805": "nan", "23810": "nan", "23815": "nan", "23820": "nan", "23825": "nan", "23830": "nan", "23835": "nan", "23840": "nan", "23845": "nan", "23850": "nan", "23855": "nan", "23860": "nan", "23865": "nan", "23870": "nan", "23875": "nan", "23880": "nan", "23885": "nan", "23890": "nan", "23895": "nan", "23900": 0.23451, "23905": "nan", "23910": "nan", "23915": "nan", "23920": "nan", "23925": "nan", "23930": "nan", "23935": "nan", "23940": "nan", "23945": "nan", "23950": "nan", "23955": "nan", "23960": "nan", "23965": "nan", "23970": "nan", "23975": "nan", "23980": "nan", "23985": "nan", "23990": "nan", "23995": "nan", "24000": 0.23503, "24005": "nan", "24010": "nan", "24015": "nan", "24020": "nan", "24025": "nan", "24030": "nan", "24035": "nan", "24040": "nan", "24045": "nan", "24050": "nan", "24055": "nan", "24060": "nan", "24065": "nan", "24070": "nan", "24075": "nan", "24080": "nan", "24085": "nan", "24090": "nan", "24095": "nan", "24100": 0.23101, "24105": "nan", "24110": "nan", "24115": "nan", "24120": "nan", "24125": "nan", "24130": "nan", "24135": "nan", "24140": "nan", "24145": "nan", "24150": "nan", "24155": "nan", "24160": "nan", "24165": "nan", "24170": "nan", "24175": "nan", "24180": "nan", "24185": "nan", "24190": "nan", "24195": "nan", "24200": 0.2418, "24205": "nan", "24210": "nan", "24215": "nan", "24220": "nan", "24225": "nan", "24230": "nan", "24235": "nan", "24240": "nan", "24245": "nan", "24250": "nan", "24255": "nan", "24260": "nan", "24265": "nan", "24270": "nan", "24275": "nan", "24280": "nan", "24285": "nan", "24290": "nan", "24295": "nan", "24300": 0.2409, "24305": "nan", "24310": "nan", "24315": "nan", "24320": "nan", "24325": "nan", "24330": "nan", "24335": "nan", "24340": "nan", "24345": "nan", "24350": "nan", "24355": "nan", "24360": "nan", "24365": "nan", "24370": "nan", "24375": "nan", "24380": "nan", "24385": "nan", "24390": "nan", "24395": "nan", "24400": 0.23205, "24405": "nan", "24410": "nan", "24415": "nan", "24420": "nan", "24425": "nan", "24430": "nan", "24435": "nan", "24440": "nan", "24445": "nan", "24450": "nan", "24455": "nan", "24460": "nan", "24465": "nan", "24470": "nan", "24475": "nan", "24480": "nan", "24485": "nan", "24490": "nan", "24495": "nan", "24500": 0.2321, "24505": "nan", "24510": "nan", "24515": "nan", "24520": "nan", "24525": "nan", "24530": "nan", "24535": "nan", "24540": "nan", "24545": "nan", "24550": "nan", "24555": "nan", "24560": "nan", "24565": "nan", "24570": "nan", "24575": "nan", "24580": "nan", "24585": "nan", "24590": "nan", "24595": "nan", "24600": 0.23709, "24605": "nan", "24610": "nan", "24615": "nan", "24620": "nan", "24625": "nan", "24630": "nan", "24635": "nan", "24640": "nan", "24645": "nan", "24650": "nan", "24655": "nan", "24660": "nan", "24665": "nan", "24670": "nan", "24675": "nan", "24680": "nan", "24685": "nan", "24690": "nan", "24695": "nan", "24700": 0.23512, "24705": "nan", "24710": "nan", "24715": "nan", "24720": "nan", "24725": "nan", "24730": "nan", "24735": "nan", "24740": "nan", "24745": "nan", "24750": "nan", "24755": "nan", "24760": "nan", "24765": "nan", "24770": "nan", "24775": "nan", "24780": "nan", "24785": "nan", "24790": "nan", "24795": "nan", "24800": 0.23051, "24805": "nan", "24810": "nan", "24815": "nan", "24820": "nan", "24825": "nan", "24830": "nan", "24835": "nan", "24840": "nan", "24845": "nan", "24850": "nan", "24855": "nan", "24860": "nan", "24865": "nan", "24870": "nan", "24875": "nan", "24880": "nan", "24885": "nan", "24890": "nan", "24895": "nan", "24900": 0.23649, "24905": "nan", "24910": "nan", "24915": "nan", "24920": "nan", "24925": "nan", "24930": "nan", "24935": "nan", "24940": "nan", "24945": "nan", "24950": "nan", "24955": "nan", "24960": "nan", "24965": "nan", "24970": "nan", "24975": "nan", "24980": "nan", "24985": "nan", "24990": "nan", "24995": "nan", "25000": 0.2331, "25005": "nan", "25010": "nan", "25015": "nan", "25020": "nan", "25025": "nan", "25030": "nan", "25035": "nan", "25040": "nan", "25045": "nan", "25050": "nan", "25055": "nan", "25060": "nan", "25065": "nan", "25070": "nan", "25075": "nan", "25080": "nan", "25085": "nan", "25090": "nan", "25095": "nan", "25100": 0.23499, "25105": "nan", "25110": "nan", "25115": "nan", "25120": "nan", "25125": "nan", "25130": "nan", "25135": "nan", "25140": "nan", "25145": "nan", "25150": "nan", "25155": "nan", "25160": "nan", "25165": "nan", "25170": "nan", "25175": "nan", "25180": "nan", "25185": "nan", "25190": "nan", "25195": "nan", "25200": 0.22895, "25205": "nan", "25210": "nan", "25215": "nan", "25220": "nan", "25225": "nan", "25230": "nan", "25235": "nan", "25240": "nan", "25245": "nan", "25250": "nan", "25255": "nan", "25260": "nan", "25265": "nan", "25270": "nan", "25275": "nan", "25280": "nan", "25285": "nan", "25290": "nan", "25295": "nan", "25300": 0.23368, "25305": "nan", "25310": "nan", "25315": "nan", "25320": "nan", "25325": "nan", "25330": "nan", "25335": "nan", "25340": "nan", "25345": "nan", "25350": "nan", "25355": "nan", "25360": "nan", "25365": "nan", "25370": "nan", "25375": "nan", "25380": "nan", "25385": "nan", "25390": "nan", "25395": "nan", "25400": 0.23408, "25405": "nan", "25410": "nan", "25415": "nan", "25420": "nan", "25425": "nan", "25430": "nan", "25435": "nan", "25440": "nan", "25445": "nan", "25450": "nan", "25455": "nan", "25460": "nan", "25465": "nan", "25470": "nan", "25475": "nan", "25480": "nan", "25485": "nan", "25490": "nan", "25495": "nan", "25500": 0.24136, "25505": "nan", "25510": "nan", "25515": "nan", "25520": "nan", "25525": "nan", "25530": "nan", "25535": "nan", "25540": "nan", "25545": "nan", "25550": "nan", "25555": "nan", "25560": "nan", "25565": "nan", "25570": "nan", "25575": "nan", "25580": "nan", "25585": "nan", "25590": "nan", "25595": "nan", "25600": 0.24015, "25605": "nan", "25610": "nan", "25615": "nan", "25620": "nan", "25625": "nan", "25630": "nan", "25635": "nan", "25640": "nan", "25645": "nan", "25650": "nan", "25655": "nan", "25660": "nan", "25665": "nan", "25670": "nan", "25675": "nan", "25680": "nan", "25685": "nan", "25690": "nan", "25695": "nan", "25700": 0.24101, "25705": "nan", "25710": "nan", "25715": "nan", "25720": "nan", "25725": "nan", "25730": "nan", "25735": "nan", "25740": "nan", "25745": "nan", "25750": "nan", "25755": "nan", "25760": "nan", "25765": "nan", "25770": "nan", "25775": "nan", "25780": "nan", "25785": "nan", "25790": "nan", "25795": "nan", "25800": 0.24204, "25805": "nan", "25810": "nan", "25815": "nan", "25820": "nan", "25825": "nan", "25830": "nan", "25835": "nan", "25840": "nan", "25845": "nan", "25850": "nan", "25855": "nan", "25860": "nan", "25865": "nan", "25870": "nan", "25875": "nan", "25880": "nan", "25885": "nan", "25890": "nan", "25895": "nan", "25900": 0.23935, "25905": "nan", "25910": "nan", "25915": "nan", "25920": "nan", "25925": "nan", "25930": "nan", "25935": "nan", "25940": "nan", "25945": "nan", "25950": "nan", "25955": "nan", "25960": "nan", "25965": "nan", "25970": "nan", "25975": "nan", "25980": "nan", "25985": "nan", "25990": "nan", "25995": "nan", "26000": 0.23423, "26005": "nan", "26010": "nan", "26015": "nan", "26020": "nan", "26025": "nan", "26030": "nan", "26035": "nan", "26040": "nan", "26045": "nan", "26050": "nan", "26055": "nan", "26060": "nan", "26065": "nan", "26070": "nan", "26075": "nan", "26080": "nan", "26085": "nan", "26090": "nan", "26095": "nan", "26100": 0.22428, "26105": "nan", "26110": "nan", "26115": "nan", "26120": "nan", "26125": "nan", "26130": "nan", "26135": "nan", "26140": "nan", "26145": "nan", "26150": "nan", "26155": "nan", "26160": "nan", "26165": "nan", "26170": "nan", "26175": "nan", "26180": "nan", "26185": "nan", "26190": "nan", "26195": "nan", "26200": 0.23481, "26205": "nan", "26210": "nan", "26215": "nan", "26220": "nan", "26225": "nan", "26230": "nan", "26235": "nan", "26240": "nan", "26245": "nan", "26250": "nan", "26255": "nan", "26260": "nan", "26265": "nan", "26270": "nan", "26275": "nan", "26280": "nan", "26285": "nan", "26290": "nan", "26295": "nan", "26300": 0.23238, "26305": "nan", "26310": "nan", "26315": "nan", "26320": "nan", "26325": "nan", "26330": "nan", "26335": "nan", "26340": "nan", "26345": "nan", "26350": "nan", "26355": "nan", "26360": "nan", "26365": "nan", "26370": "nan", "26375": "nan", "26380": "nan", "26385": "nan", "26390": "nan", "26395": "nan", "26400": 0.2334, "26405": "nan", "26410": "nan", "26415": "nan", "26420": "nan", "26425": "nan", "26430": "nan", "26435": "nan", "26440": "nan", "26445": "nan", "26450": "nan", "26455": "nan", "26460": "nan", "26465": "nan", "26470": "nan", "26475": "nan", "26480": "nan", "26485": "nan", "26490": "nan", "26495": "nan", "26500": 0.2301, "26505": "nan", "26510": "nan", "26515": "nan", "26520": "nan", "26525": "nan", "26530": "nan", "26535": "nan", "26540": "nan", "26545": "nan", "26550": "nan", "26555": "nan", "26560": "nan", "26565": "nan", "26570": "nan", "26575": "nan", "26580": "nan", "26585": "nan", "26590": "nan", "26595": "nan", "26600": 0.22672, "26605": "nan", "26610": "nan", "26615": "nan", "26620": "nan", "26625": "nan", "26630": "nan", "26635": "nan", "26640": "nan", "26645": "nan", "26650": "nan", "26655": "nan", "26660": "nan", "26665": "nan", "26670": "nan", "26675": "nan", "26680": "nan", "26685": "nan", "26690": "nan", "26695": "nan", "26700": 0.23061, "26705": "nan", "26710": "nan", "26715": "nan", "26720": "nan", "26725": "nan", "26730": "nan", "26735": "nan", "26740": "nan", "26745": "nan", "26750": "nan", "26755": "nan", "26760": "nan", "26765": "nan", "26770": "nan", "26775": "nan", "26780": "nan", "26785": "nan", "26790": "nan", "26795": "nan", "26800": 0.23223, "26805": "nan", "26810": "nan", "26815": "nan", "26820": "nan", "26825": "nan", "26830": "nan", "26835": "nan", "26840": "nan", "26845": "nan", "26850": "nan", "26855": "nan", "26860": "nan", "26865": "nan", "26870": "nan", "26875": "nan", "26880": "nan", "26885": "nan", "26890": "nan", "26895": "nan", "26900": 0.23057, "26905": "nan", "26910": "nan", "26915": "nan", "26920": "nan", "26925": "nan", "26930": "nan", "26935": "nan", "26940": "nan", "26945": "nan", "26950": "nan", "26955": "nan", "26960": "nan", "26965": "nan", "26970": "nan", "26975": "nan", "26980": "nan", "26985": "nan", "26990": "nan", "26995": "nan", "27000": 0.22765, "27005": "nan", "27010": "nan", "27015": "nan", "27020": "nan", "27025": "nan", "27030": "nan", "27035": "nan", "27040": "nan", "27045": "nan", "27050": "nan", "27055": "nan", "27060": "nan", "27065": "nan", "27070": "nan", "27075": "nan", "27080": "nan", "27085": "nan", "27090": "nan", "27095": "nan", "27100": 0.23275, "27105": "nan", "27110": "nan", "27115": "nan", "27120": "nan", "27125": "nan", "27130": "nan", "27135": "nan", "27140": "nan", "27145": "nan", "27150": "nan", "27155": "nan", "27160": "nan", "27165": "nan", "27170": "nan", "27175": "nan", "27180": "nan", "27185": "nan", "27190": "nan", "27195": "nan", "27200": 0.22568, "27205": "nan", "27210": "nan", "27215": "nan", "27220": "nan", "27225": "nan", "27230": "nan", "27235": "nan", "27240": "nan", "27245": "nan", "27250": "nan", "27255": "nan", "27260": "nan", "27265": "nan", "27270": "nan", "27275": "nan", "27280": "nan", "27285": "nan", "27290": "nan", "27295": "nan", "27300": 0.22746, "27305": "nan", "27310": "nan", "27315": "nan", "27320": "nan", "27325": "nan", "27330": "nan", "27335": "nan", "27340": "nan", "27345": "nan", "27350": "nan", "27355": "nan", "27360": "nan", "27365": "nan", "27370": "nan", "27375": "nan", "27380": "nan", "27385": "nan", "27390": "nan", "27395": "nan", "27400": 0.23785, "27405": "nan", "27410": "nan", "27415": "nan", "27420": "nan", "27425": "nan", "27430": "nan", "27435": "nan", "27440": "nan", "27445": "nan", "27450": "nan", "27455": "nan", "27460": "nan", "27465": "nan", "27470": "nan", "27475": "nan", "27480": "nan", "27485": "nan", "27490": "nan", "27495": "nan", "27500": 0.23143, "27505": "nan", "27510": "nan", "27515": "nan", "27520": "nan", "27525": "nan", "27530": "nan", "27535": "nan", "27540": "nan", "27545": "nan", "27550": "nan", "27555": "nan", "27560": "nan", "27565": "nan", "27570": "nan", "27575": "nan", "27580": "nan", "27585": "nan", "27590": "nan", "27595": "nan", "27600": 0.23251, "27605": "nan", "27610": "nan", "27615": "nan", "27620": "nan", "27625": "nan", "27630": "nan", "27635": "nan", "27640": "nan", "27645": "nan", "27650": "nan", "27655": "nan", "27660": "nan", "27665": "nan", "27670": "nan", "27675": "nan", "27680": "nan", "27685": "nan", "27690": "nan", "27695": "nan", "27700": 0.22818, "27705": "nan", "27710": "nan", "27715": "nan", "27720": "nan", "27725": "nan", "27730": "nan", "27735": "nan", "27740": "nan", "27745": "nan", "27750": "nan", "27755": "nan", "27760": "nan", "27765": "nan", "27770": "nan", "27775": "nan", "27780": "nan", "27785": "nan", "27790": "nan", "27795": "nan", "27800": 0.23261, "27805": "nan", "27810": "nan", "27815": "nan", "27820": "nan", "27825": "nan", "27830": "nan", "27835": "nan", "27840": "nan", "27845": "nan", "27850": "nan", "27855": "nan", "27860": "nan", "27865": "nan", "27870": "nan", "27875": "nan", "27880": "nan", "27885": "nan", "27890": "nan", "27895": "nan", "27900": 0.23813, "27905": "nan", "27910": "nan", "27915": "nan", "27920": "nan", "27925": "nan", "27930": "nan", "27935": "nan", "27940": "nan", "27945": "nan", "27950": "nan", "27955": "nan", "27960": "nan", "27965": "nan", "27970": "nan", "27975": "nan", "27980": "nan", "27985": "nan", "27990": "nan", "27995": "nan", "28000": 0.22943, "28005": "nan", "28010": "nan", "28015": "nan", "28020": "nan", "28025": "nan", "28030": "nan", "28035": "nan", "28040": "nan", "28045": "nan", "28050": "nan", "28055": "nan", "28060": "nan", "28065": "nan", "28070": "nan", "28075": "nan", "28080": "nan", "28085": "nan", "28090": "nan", "28095": "nan", "28100": 0.24233, "28105": "nan", "28110": "nan", "28115": "nan", "28120": "nan", "28125": "nan", "28130": "nan", "28135": "nan", "28140": "nan", "28145": "nan", "28150": "nan", "28155": "nan", "28160": "nan", "28165": "nan", "28170": "nan", "28175": "nan", "28180": "nan", "28185": "nan", "28190": "nan", "28195": "nan", "28200": 0.23721, "28205": "nan", "28210": "nan", "28215": "nan", "28220": "nan", "28225": "nan", "28230": "nan", "28235": "nan", "28240": "nan", "28245": "nan", "28250": "nan", "28255": "nan", "28260": "nan", "28265": "nan", "28270": "nan", "28275": "nan", "28280": "nan", "28285": "nan", "28290": "nan", "28295": "nan", "28300": 0.23056, "28305": "nan", "28310": "nan", "28315": "nan", "28320": "nan", "28325": "nan", "28330": "nan", "28335": "nan", "28340": "nan", "28345": "nan", "28350": "nan", "28355": "nan", "28360": "nan", "28365": "nan", "28370": "nan", "28375": "nan", "28380": "nan", "28385": "nan", "28390": "nan", "28395": "nan", "28400": 0.23027, "28405": "nan", "28410": "nan", "28415": "nan", "28420": "nan", "28425": "nan", "28430": "nan", "28435": "nan", "28440": "nan", "28445": "nan", "28450": "nan", "28455": "nan", "28460": "nan", "28465": "nan", "28470": "nan", "28475": "nan", "28480": "nan", "28485": "nan", "28490": "nan", "28495": "nan", "28500": 0.22835, "28505": "nan", "28510": "nan", "28515": "nan", "28520": "nan", "28525": "nan", "28530": "nan", "28535": "nan", "28540": "nan", "28545": "nan", "28550": "nan", "28555": "nan", "28560": "nan", "28565": "nan", "28570": "nan", "28575": "nan", "28580": "nan", "28585": "nan", "28590": "nan", "28595": "nan", "28600": 0.22902, "28605": "nan", "28610": "nan", "28615": "nan", "28620": "nan", "28625": "nan", "28630": "nan", "28635": "nan", "28640": "nan", "28645": "nan", "28650": "nan", "28655": "nan", "28660": "nan", "28665": "nan", "28670": "nan", "28675": "nan", "28680": "nan", "28685": "nan", "28690": "nan", "28695": "nan", "28700": 0.23735, "28705": "nan", "28710": "nan", "28715": "nan", "28720": "nan", "28725": "nan", "28730": "nan", "28735": "nan", "28740": "nan", "28745": "nan", "28750": "nan", "28755": "nan", "28760": "nan", "28765": "nan", "28770": "nan", "28775": "nan", "28780": "nan", "28785": "nan", "28790": "nan", "28795": "nan", "28800": 0.23425, "28805": "nan", "28810": "nan", "28815": "nan", "28820": "nan", "28825": "nan", "28830": "nan", "28835": "nan", "28840": "nan", "28845": "nan", "28850": "nan", "28855": "nan", "28860": "nan", "28865": "nan", "28870": "nan", "28875": "nan", "28880": "nan", "28885": "nan", "28890": "nan", "28895": "nan", "28900": 0.22882, "28905": "nan", "28910": "nan", "28915": "nan", "28920": "nan", "28925": "nan", "28930": "nan", "28935": "nan", "28940": "nan", "28945": "nan", "28950": "nan", "28955": "nan", "28960": "nan", "28965": "nan", "28970": "nan", "28975": "nan", "28980": "nan", "28985": "nan", "28990": "nan", "28995": "nan", "29000": 0.22506, "29005": "nan", "29010": "nan", "29015": "nan", "29020": "nan", "29025": "nan", "29030": "nan", "29035": "nan", "29040": "nan", "29045": "nan", "29050": "nan", "29055": "nan", "29060": "nan", "29065": "nan", "29070": "nan", "29075": "nan", "29080": "nan", "29085": "nan", "29090": "nan", "29095": "nan", "29100": 0.23811, "29105": "nan", "29110": "nan", "29115": "nan", "29120": "nan", "29125": "nan", "29130": "nan", "29135": "nan", "29140": "nan", "29145": "nan", "29150": "nan", "29155": "nan", "29160": "nan", "29165": "nan", "29170": "nan", "29175": "nan", "29180": "nan", "29185": "nan", "29190": "nan", "29195": "nan", "29200": 0.23495, "29205": "nan", "29210": "nan", "29215": "nan", "29220": "nan", "29225": "nan", "29230": "nan", "29235": "nan", "29240": "nan", "29245": "nan", "29250": "nan", "29255": "nan", "29260": "nan", "29265": "nan", "29270": "nan", "29275": "nan", "29280": "nan", "29285": "nan", "29290": "nan", "29295": "nan", "29300": 0.23601, "29305": "nan", "29310": "nan", "29315": "nan", "29320": "nan", "29325": "nan", "29330": "nan", "29335": "nan", "29340": "nan", "29345": "nan", "29350": "nan", "29355": "nan", "29360": "nan", "29365": "nan", "29370": "nan", "29375": "nan", "29380": "nan", "29385": "nan", "29390": "nan", "29395": "nan", "29400": 0.23031, "29405": "nan", "29410": "nan", "29415": "nan", "29420": "nan", "29425": "nan", "29430": "nan", "29435": "nan", "29440": "nan", "29445": "nan", "29450": "nan", "29455": "nan", "29460": "nan", "29465": "nan", "29470": "nan", "29475": "nan", "29480": "nan", "29485": "nan", "29490": "nan", "29495": "nan", "29500": 0.23157, "29505": "nan", "29510": "nan", "29515": "nan", "29520": "nan", "29525": "nan", "29530": "nan", "29535": "nan", "29540": "nan", "29545": "nan", "29550": "nan", "29555": "nan", "29560": "nan", "29565": "nan", "29570": "nan", "29575": "nan", "29580": "nan", "29585": "nan", "29590": "nan", "29595": "nan", "29600": 0.23007, "29605": "nan", "29610": "nan", "29615": "nan", "29620": "nan", "29625": "nan", "29630": "nan", "29635": "nan", "29640": "nan", "29645": "nan", "29650": "nan", "29655": "nan", "29660": "nan", "29665": "nan", "29670": "nan", "29675": "nan", "29680": "nan", "29685": "nan", "29690": "nan", "29695": "nan", "29700": 0.23384, "29705": "nan", "29710": "nan", "29715": "nan", "29720": "nan", "29725": "nan", "29730": "nan", "29735": "nan", "29740": "nan", "29745": "nan", "29750": "nan", "29755": "nan", "29760": "nan", "29765": "nan", "29770": "nan", "29775": "nan", "29780": "nan", "29785": "nan", "29790": "nan", "29795": "nan", "29800": 0.22881, "29805": "nan", "29810": "nan", "29815": "nan", "29820": "nan", "29825": "nan", "29830": "nan", "29835": "nan", "29840": "nan", "29845": "nan", "29850": "nan", "29855": "nan", "29860": "nan", "29865": "nan", "29870": "nan", "29875": "nan", "29880": "nan", "29885": "nan", "29890": "nan", "29895": "nan", "29900": 0.23024, "29905": "nan", "29910": "nan", "29915": "nan", "29920": "nan", "29925": "nan", "29930": "nan", "29935": "nan", "29940": "nan", "29945": "nan", "29950": "nan", "29955": "nan", "29960": "nan", "29965": "nan", "29970": "nan", "29975": "nan", "29980": "nan", "29985": "nan", "29990": "nan", "29995": "nan", "30000": 0.23425, "30005": "nan", "30010": "nan", "30015": "nan", "30020": "nan", "30025": "nan", "30030": "nan", "30035": "nan", "30040": "nan", "30045": "nan", "30050": "nan", "30055": "nan", "30060": "nan", "30065": "nan", "30070": "nan", "30075": "nan", "30080": "nan", "30085": "nan", "30090": "nan", "30095": "nan", "30100": 0.22896, "30105": "nan", "30110": "nan", "30115": "nan", "30120": "nan", "30125": "nan", "30130": "nan", "30135": "nan", "30140": "nan", "30145": "nan", "30150": "nan", "30155": "nan", "30160": "nan", "30165": "nan", "30170": "nan", "30175": "nan", "30180": "nan", "30185": "nan", "30190": "nan", "30195": "nan", "30200": 0.22793, "30205": "nan", "30210": "nan", "30215": "nan", "30220": "nan", "30225": "nan", "30230": "nan", "30235": "nan", "30240": "nan", "30245": "nan", "30250": "nan", "30255": "nan", "30260": "nan", "30265": "nan", "30270": "nan", "30275": "nan", "30280": "nan", "30285": "nan", "30290": "nan", "30295": "nan", "30300": 0.2412, "30305": "nan", "30310": "nan", "30315": "nan", "30320": "nan", "30325": "nan", "30330": "nan", "30335": "nan", "30340": "nan", "30345": "nan", "30350": "nan", "30355": "nan", "30360": "nan", "30365": "nan", "30370": "nan", "30375": "nan", "30380": "nan", "30385": "nan", "30390": "nan", "30395": "nan", "30400": 0.22885, "30405": "nan", "30410": "nan", "30415": "nan", "30420": "nan", "30425": "nan", "30430": "nan", "30435": "nan", "30440": "nan", "30445": "nan", "30450": "nan", "30455": "nan", "30460": "nan", "30465": "nan", "30470": "nan", "30475": "nan", "30480": "nan", "30485": "nan", "30490": "nan", "30495": "nan", "30500": 0.23717, "30505": "nan", "30510": "nan", "30515": "nan", "30520": "nan", "30525": "nan", "30530": "nan", "30535": "nan", "30540": "nan", "30545": "nan", "30550": "nan", "30555": "nan", "30560": "nan", "30565": "nan", "30570": "nan", "30575": "nan", "30580": "nan", "30585": "nan", "30590": "nan", "30595": "nan", "30600": 0.22706, "30605": "nan", "30610": "nan", "30615": "nan", "30620": "nan", "30625": "nan", "30630": "nan", "30635": "nan", "30640": "nan", "30645": "nan", "30650": "nan", "30655": "nan", "30660": "nan", "30665": "nan", "30670": "nan", "30675": "nan", "30680": "nan", "30685": "nan", "30690": "nan", "30695": "nan", "30700": 0.24135, "30705": "nan", "30710": "nan", "30715": "nan", "30720": "nan", "30725": "nan", "30730": "nan", "30735": "nan", "30740": "nan", "30745": "nan", "30750": "nan", "30755": "nan", "30760": "nan", "30765": "nan", "30770": "nan", "30775": "nan", "30780": "nan", "30785": "nan", "30790": "nan", "30795": "nan", "30800": 0.22847, "30805": "nan", "30810": "nan", "30815": "nan", "30820": "nan", "30825": "nan", "30830": "nan", "30835": "nan", "30840": "nan", "30845": "nan", "30850": "nan", "30855": "nan", "30860": "nan", "30865": "nan", "30870": "nan", "30875": "nan", "30880": "nan", "30885": "nan", "30890": "nan", "30895": "nan", "30900": 0.22965, "30905": "nan", "30910": "nan", "30915": "nan", "30920": "nan", "30925": "nan", "30930": "nan", "30935": "nan", "30940": "nan", "30945": "nan", "30950": "nan", "30955": "nan", "30960": "nan", "30965": "nan", "30970": "nan", "30975": "nan", "30980": "nan", "30985": "nan", "30990": "nan", "30995": "nan", "31000": 0.23356, "31005": "nan", "31010": "nan", "31015": "nan", "31020": "nan", "31025": "nan", "31030": "nan", "31035": "nan", "31040": "nan", "31045": "nan", "31050": "nan", "31055": "nan", "31060": "nan", "31065": "nan", "31070": "nan", "31075": "nan", "31080": "nan", "31085": "nan", "31090": "nan", "31095": "nan", "31100": 0.23094, "31105": "nan", "31110": "nan", "31115": "nan", "31120": "nan", "31125": "nan", "31130": "nan", "31135": "nan", "31140": "nan", "31145": "nan", "31150": "nan", "31155": "nan", "31160": "nan", "31165": "nan", "31170": "nan", "31175": "nan", "31180": "nan", "31185": "nan", "31190": "nan", "31195": "nan", "31200": 0.23575, "31205": "nan", "31210": "nan", "31215": "nan", "31220": "nan", "31225": "nan", "31230": "nan", "31235": "nan", "31240": "nan", "31245": "nan", "31250": "nan", "31255": "nan", "31260": "nan", "31265": "nan", "31270": "nan", "31275": "nan", "31280": "nan", "31285": "nan", "31290": "nan", "31295": "nan", "31300": 0.23106, "31305": "nan", "31310": "nan", "31315": "nan", "31320": "nan", "31325": "nan", "31330": "nan", "31335": "nan", "31340": "nan", "31345": "nan", "31350": "nan", "31355": "nan", "31360": "nan", "31365": "nan", "31370": "nan", "31375": "nan", "31380": "nan", "31385": "nan", "31390": "nan", "31395": "nan", "31400": 0.22731, "31405": "nan", "31410": "nan", "31415": "nan", "31420": "nan", "31425": "nan", "31430": "nan", "31435": "nan", "31440": "nan", "31445": "nan", "31450": "nan", "31455": "nan", "31460": "nan", "31465": "nan", "31470": "nan", "31475": "nan", "31480": "nan", "31485": "nan", "31490": "nan", "31495": "nan", "31500": 0.23077, "31505": "nan", "31510": "nan", "31515": "nan", "31520": "nan", "31525": "nan", "31530": "nan", "31535": "nan", "31540": "nan", "31545": "nan", "31550": "nan", "31555": "nan", "31560": "nan", "31565": "nan", "31570": "nan", "31575": "nan", "31580": "nan", "31585": "nan", "31590": "nan", "31595": "nan", "31600": 0.23004, "31605": "nan", "31610": "nan", "31615": "nan", "31620": "nan", "31625": "nan", "31630": "nan", "31635": "nan", "31640": "nan", "31645": "nan", "31650": "nan", "31655": "nan", "31660": "nan", "31665": "nan", "31670": "nan", "31675": "nan", "31680": "nan", "31685": "nan", "31690": "nan", "31695": "nan", "31700": 0.23845, "31705": "nan", "31710": "nan", "31715": "nan", "31720": "nan", "31725": "nan", "31730": "nan", "31735": "nan", "31740": "nan", "31745": "nan", "31750": "nan", "31755": "nan", "31760": "nan", "31765": "nan", "31770": "nan", "31775": "nan", "31780": "nan", "31785": "nan", "31790": "nan", "31795": "nan", "31800": 0.22682, "31805": "nan", "31810": "nan", "31815": "nan", "31820": "nan", "31825": "nan", "31830": "nan", "31835": "nan", "31840": "nan", "31845": "nan", "31850": "nan", "31855": "nan", "31860": "nan", "31865": "nan", "31870": "nan", "31875": "nan", "31880": "nan", "31885": "nan", "31890": "nan", "31895": "nan", "31900": 0.22448, "31905": "nan", "31910": "nan", "31915": "nan", "31920": "nan", "31925": "nan", "31930": "nan", "31935": "nan", "31940": "nan", "31945": "nan", "31950": "nan", "31955": "nan", "31960": "nan", "31965": "nan", "31970": "nan", "31975": "nan", "31980": "nan", "31985": "nan", "31990": "nan", "31995": "nan", "32000": 0.22687, "32005": "nan", "32010": "nan", "32015": "nan", "32020": "nan", "32025": "nan", "32030": "nan", "32035": "nan", "32040": "nan", "32045": "nan", "32050": "nan", "32055": "nan", "32060": "nan", "32065": "nan", "32070": "nan", "32075": "nan", "32080": "nan", "32085": "nan", "32090": "nan", "32095": "nan", "32100": 0.2316, "32105": "nan", "32110": "nan", "32115": "nan", "32120": "nan", "32125": "nan", "32130": "nan", "32135": "nan", "32140": "nan", "32145": "nan", "32150": "nan", "32155": "nan", "32160": "nan", "32165": "nan", "32170": "nan", "32175": "nan", "32180": "nan", "32185": "nan", "32190": "nan", "32195": "nan", "32200": 0.23272, "32205": "nan", "32210": "nan", "32215": "nan", "32220": "nan", "32225": "nan", "32230": "nan", "32235": "nan", "32240": "nan", "32245": "nan", "32250": "nan", "32255": "nan", "32260": "nan", "32265": "nan", "32270": "nan", "32275": "nan", "32280": "nan", "32285": "nan", "32290": "nan", "32295": "nan", "32300": 0.22869, "32305": "nan", "32310": "nan", "32315": "nan", "32320": "nan", "32325": "nan", "32330": "nan", "32335": "nan", "32340": "nan", "32345": "nan", "32350": "nan", "32355": "nan", "32360": "nan", "32365": "nan", "32370": "nan", "32375": "nan", "32380": "nan", "32385": "nan", "32390": "nan", "32395": "nan", "32400": 0.22935, "32405": "nan", "32410": "nan", "32415": "nan", "32420": "nan", "32425": "nan", "32430": "nan", "32435": "nan", "32440": "nan", "32445": "nan", "32450": "nan", "32455": "nan", "32460": "nan", "32465": "nan", "32470": "nan", "32475": "nan", "32480": "nan", "32485": "nan", "32490": "nan", "32495": "nan", "32500": 0.24135, "32505": "nan", "32510": "nan", "32515": "nan", "32520": "nan", "32525": "nan", "32530": "nan", "32535": "nan", "32540": "nan", "32545": "nan", "32550": "nan", "32555": "nan", "32560": "nan", "32565": "nan", "32570": "nan", "32575": "nan", "32580": "nan", "32585": "nan", "32590": "nan", "32595": "nan", "32600": 0.23295, "32605": "nan", "32610": "nan", "32615": "nan", "32620": "nan", "32625": "nan", "32630": "nan", "32635": "nan", "32640": "nan", "32645": "nan", "32650": "nan", "32655": "nan", "32660": "nan", "32665": "nan", "32670": "nan", "32675": "nan", "32680": "nan", "32685": "nan", "32690": "nan", "32695": "nan", "32700": 0.22856, "32705": "nan", "32710": "nan", "32715": "nan", "32720": "nan", "32725": "nan", "32730": "nan", "32735": "nan", "32740": "nan", "32745": "nan", "32750": "nan", "32755": "nan", "32760": "nan", "32765": "nan", "32770": "nan", "32775": "nan", "32780": "nan", "32785": "nan", "32790": "nan", "32795": "nan", "32800": 0.23196, "32805": "nan", "32810": "nan", "32815": "nan", "32820": "nan", "32825": "nan", "32830": "nan", "32835": "nan", "32840": "nan", "32845": "nan", "32850": "nan", "32855": "nan", "32860": "nan", "32865": "nan", "32870": "nan", "32875": "nan", "32880": "nan", "32885": "nan", "32890": "nan", "32895": "nan", "32900": 0.22798, "32905": "nan", "32910": "nan", "32915": "nan", "32920": "nan", "32925": "nan", "32930": "nan", "32935": "nan", "32940": "nan", "32945": "nan", "32950": "nan", "32955": "nan", "32960": "nan", "32965": "nan", "32970": "nan", "32975": "nan", "32980": "nan", "32985": "nan", "32990": "nan", "32995": "nan", "33000": 0.23311, "33005": "nan", "33010": "nan", "33015": "nan", "33020": "nan", "33025": "nan", "33030": "nan", "33035": "nan", "33040": "nan", "33045": "nan", "33050": "nan", "33055": "nan", "33060": "nan", "33065": "nan", "33070": "nan", "33075": "nan", "33080": "nan", "33085": "nan", "33090": "nan", "33095": "nan", "33100": 0.23045, "33105": "nan", "33110": "nan", "33115": "nan", "33120": "nan", "33125": "nan", "33130": "nan", "33135": "nan", "33140": "nan", "33145": "nan", "33150": "nan", "33155": "nan", "33160": "nan", "33165": "nan", "33170": "nan", "33175": "nan", "33180": "nan", "33185": "nan", "33190": "nan", "33195": "nan", "33200": 0.22957, "33205": "nan", "33210": "nan", "33215": "nan", "33220": "nan", "33225": "nan", "33230": "nan", "33235": "nan", "33240": "nan", "33245": "nan", "33250": "nan", "33255": "nan", "33260": "nan", "33265": "nan", "33270": "nan", "33275": "nan", "33280": "nan", "33285": "nan", "33290": "nan", "33295": "nan", "33300": 0.22877, "33305": "nan", "33310": "nan", "33315": "nan", "33320": "nan", "33325": "nan", "33330": "nan", "33335": "nan", "33340": "nan", "33345": "nan", "33350": "nan", "33355": "nan", "33360": "nan", "33365": "nan", "33370": "nan", "33375": "nan", "33380": "nan", "33385": "nan", "33390": "nan", "33395": "nan", "33400": 0.23363, "33405": "nan", "33410": "nan", "33415": "nan", "33420": "nan", "33425": "nan", "33430": "nan", "33435": "nan", "33440": "nan", "33445": "nan", "33450": "nan", "33455": "nan", "33460": "nan", "33465": "nan", "33470": "nan", "33475": "nan", "33480": "nan", "33485": "nan", "33490": "nan", "33495": "nan", "33500": 0.22832, "33505": "nan", "33510": "nan", "33515": "nan", "33520": "nan", "33525": "nan", "33530": "nan", "33535": "nan", "33540": "nan", "33545": "nan", "33550": "nan", "33555": "nan", "33560": "nan", "33565": "nan", "33570": "nan", "33575": "nan", "33580": "nan", "33585": "nan", "33590": "nan", "33595": "nan", "33600": 0.23442, "33605": "nan", "33610": "nan", "33615": "nan", "33620": "nan", "33625": "nan", "33630": "nan", "33635": "nan", "33640": "nan", "33645": "nan", "33650": "nan", "33655": "nan", "33660": "nan", "33665": "nan", "33670": "nan", "33675": "nan", "33680": "nan", "33685": "nan", "33690": "nan", "33695": "nan", "33700": 0.23186, "33705": "nan", "33710": "nan", "33715": "nan", "33720": "nan", "33725": "nan", "33730": "nan", "33735": "nan", "33740": "nan", "33745": "nan", "33750": "nan", "33755": "nan", "33760": "nan", "33765": "nan", "33770": "nan", "33775": "nan", "33780": "nan", "33785": "nan", "33790": "nan", "33795": "nan", "33800": 0.22452, "33805": "nan", "33810": "nan", "33815": "nan", "33820": "nan", "33825": "nan", "33830": "nan", "33835": "nan", "33840": "nan", "33845": "nan", "33850": "nan", "33855": "nan", "33860": "nan", "33865": "nan", "33870": "nan", "33875": "nan", "33880": "nan", "33885": "nan", "33890": "nan", "33895": "nan", "33900": 0.23883, "33905": "nan", "33910": "nan", "33915": "nan", "33920": "nan", "33925": "nan", "33930": "nan", "33935": "nan", "33940": "nan", "33945": "nan", "33950": "nan", "33955": "nan", "33960": "nan", "33965": "nan", "33970": "nan", "33975": "nan", "33980": "nan", "33985": "nan", "33990": "nan", "33995": "nan", "34000": 0.22615, "34005": "nan", "34010": "nan", "34015": "nan", "34020": "nan", "34025": "nan", "34030": "nan", "34035": "nan", "34040": "nan", "34045": "nan", "34050": "nan", "34055": "nan", "34060": "nan", "34065": "nan", "34070": "nan", "34075": "nan", "34080": "nan", "34085": "nan", "34090": "nan", "34095": "nan", "34100": 0.2372, "34105": "nan", "34110": "nan", "34115": "nan", "34120": "nan", "34125": "nan", "34130": "nan", "34135": "nan", "34140": "nan", "34145": "nan", "34150": "nan", "34155": "nan", "34160": "nan", "34165": "nan", "34170": "nan", "34175": "nan", "34180": "nan", "34185": "nan", "34190": "nan", "34195": "nan", "34200": 0.239, "34205": "nan", "34210": "nan", "34215": "nan", "34220": "nan", "34225": "nan", "34230": "nan", "34235": "nan", "34240": "nan", "34245": "nan", "34250": "nan", "34255": "nan", "34260": "nan", "34265": "nan", "34270": "nan", "34275": "nan", "34280": "nan", "34285": "nan", "34290": "nan", "34295": "nan", "34300": 0.23999, "34305": "nan", "34310": "nan", "34315": "nan", "34320": "nan", "34325": "nan", "34330": "nan", "34335": "nan", "34340": "nan", "34345": "nan", "34350": "nan", "34355": "nan", "34360": "nan", "34365": "nan", "34370": "nan", "34375": "nan", "34380": "nan", "34385": "nan", "34390": "nan", "34395": "nan", "34400": 0.23321, "34405": "nan", "34410": "nan", "34415": "nan", "34420": "nan", "34425": "nan", "34430": "nan", "34435": "nan", "34440": "nan", "34445": "nan", "34450": "nan", "34455": "nan", "34460": "nan", "34465": "nan", "34470": "nan", "34475": "nan", "34480": "nan", "34485": "nan", "34490": "nan", "34495": "nan", "34500": 0.23478, "34505": "nan", "34510": "nan", "34515": "nan", "34520": "nan", "34525": "nan", "34530": "nan", "34535": "nan", "34540": "nan", "34545": "nan", "34550": "nan", "34555": "nan", "34560": "nan", "34565": "nan", "34570": "nan", "34575": "nan", "34580": "nan", "34585": "nan", "34590": "nan", "34595": "nan", "34600": 0.2457, "34605": "nan", "34610": "nan", "34615": "nan", "34620": "nan", "34625": "nan", "34630": "nan", "34635": "nan", "34640": "nan", "34645": "nan", "34650": "nan", "34655": "nan", "34660": "nan", "34665": "nan", "34670": "nan", "34675": "nan", "34680": "nan", "34685": "nan", "34690": "nan", "34695": "nan", "34700": 0.24548, "34705": "nan", "34710": "nan", "34715": "nan", "34720": "nan", "34725": "nan", "34730": "nan", "34735": "nan", "34740": "nan", "34745": "nan", "34750": "nan", "34755": "nan", "34760": "nan", "34765": "nan", "34770": "nan", "34775": "nan", "34780": "nan", "34785": "nan", "34790": "nan", "34795": "nan", "34800": 0.24237, "34805": "nan", "34810": "nan", "34815": "nan", "34820": "nan", "34825": "nan", "34830": "nan", "34835": "nan", "34840": "nan", "34845": "nan", "34850": "nan", "34855": "nan", "34860": "nan", "34865": "nan", "34870": "nan", "34875": "nan", "34880": "nan", "34885": "nan", "34890": "nan", "34895": "nan", "34900": 0.25143, "34905": "nan", "34910": "nan", "34915": "nan", "34920": "nan", "34925": "nan", "34930": "nan", "34935": "nan", "34940": "nan", "34945": "nan", "34950": "nan", "34955": "nan", "34960": "nan", "34965": "nan", "34970": "nan", "34975": "nan", "34980": "nan", "34985": "nan", "34990": "nan", "34995": "nan", "35000": 0.24663, "35005": "nan", "35010": "nan", "35015": "nan", "35020": "nan", "35025": "nan", "35030": "nan", "35035": "nan", "35040": "nan", "35045": "nan", "35050": "nan", "35055": "nan", "35060": "nan", "35065": "nan", "35070": "nan", "35075": "nan", "35080": "nan", "35085": "nan", "35090": "nan", "35095": "nan", "35100": 0.24889, "35105": "nan", "35110": "nan", "35115": "nan", "35120": "nan", "35125": "nan", "35130": "nan", "35135": "nan", "35140": "nan", "35145": "nan", "35150": "nan", "35155": "nan", "35160": "nan", "35165": "nan", "35170": "nan", "35175": "nan", "35180": "nan", "35185": "nan", "35190": "nan", "35195": "nan", "35200": 0.23622, "35205": "nan", "35210": "nan", "35215": "nan", "35220": "nan", "35225": "nan", "35230": "nan", "35235": "nan", "35240": "nan", "35245": "nan", "35250": "nan", "35255": "nan", "35260": "nan", "35265": "nan", "35270": "nan", "35275": "nan", "35280": "nan", "35285": "nan", "35290": "nan", "35295": "nan", "35300": 0.2367, "35305": "nan", "35310": "nan", "35315": "nan", "35320": "nan", "35325": "nan", "35330": "nan", "35335": "nan", "35340": "nan", "35345": "nan", "35350": "nan", "35355": "nan", "35360": "nan", "35365": "nan", "35370": "nan", "35375": "nan", "35380": "nan", "35385": "nan", "35390": "nan", "35395": "nan", "35400": 0.24652, "35405": "nan", "35410": "nan", "35415": "nan", "35420": "nan", "35425": "nan", "35430": "nan", "35435": "nan", "35440": "nan", "35445": "nan", "35450": "nan", "35455": "nan", "35460": "nan", "35465": "nan", "35470": "nan", "35475": "nan", "35480": "nan", "35485": "nan", "35490": "nan", "35495": "nan", "35500": 0.23425, "35505": "nan", "35510": "nan", "35515": "nan", "35520": "nan", "35525": "nan", "35530": "nan", "35535": "nan", "35540": "nan", "35545": "nan", "35550": "nan", "35555": "nan", "35560": "nan", "35565": "nan", "35570": "nan", "35575": "nan", "35580": "nan", "35585": "nan", "35590": "nan", "35595": "nan", "35600": 0.24452, "35605": "nan", "35610": "nan", "35615": "nan", "35620": "nan", "35625": "nan", "35630": "nan", "35635": "nan", "35640": "nan", "35645": "nan", "35650": "nan", "35655": "nan", "35660": "nan", "35665": "nan", "35670": "nan", "35675": "nan", "35680": "nan", "35685": "nan", "35690": "nan", "35695": "nan", "35700": 0.23813, "35705": "nan", "35710": "nan", "35715": "nan", "35720": "nan", "35725": "nan", "35730": "nan", "35735": "nan", "35740": "nan", "35745": "nan", "35750": "nan", "35755": "nan", "35760": "nan", "35765": "nan", "35770": "nan", "35775": "nan", "35780": "nan", "35785": "nan", "35790": "nan", "35795": "nan", "35800": 0.24101, "35805": "nan", "35810": "nan", "35815": "nan", "35820": "nan", "35825": "nan", "35830": "nan", "35835": "nan", "35840": "nan", "35845": "nan", "35850": "nan", "35855": "nan", "35860": "nan", "35865": "nan", "35870": "nan", "35875": "nan", "35880": "nan", "35885": "nan", "35890": "nan", "35895": "nan", "35900": 0.23783, "35905": "nan", "35910": "nan", "35915": "nan", "35920": "nan", "35925": "nan", "35930": "nan", "35935": "nan", "35940": "nan", "35945": "nan", "35950": "nan", "35955": "nan", "35960": "nan", "35965": "nan", "35970": "nan", "35975": "nan", "35980": "nan", "35985": "nan", "35990": "nan", "35995": "nan", "36000": 0.23821, "36005": "nan", "36010": "nan", "36015": "nan", "36020": "nan", "36025": "nan", "36030": "nan", "36035": "nan", "36040": "nan", "36045": "nan", "36050": "nan", "36055": "nan", "36060": "nan", "36065": "nan", "36070": "nan", "36075": "nan", "36080": "nan", "36085": "nan", "36090": "nan", "36095": "nan", "36100": 0.24207, "36105": "nan", "36110": "nan", "36115": "nan", "36120": "nan", "36125": "nan", "36130": "nan", "36135": "nan", "36140": "nan", "36145": "nan", "36150": "nan", "36155": "nan", "36160": "nan", "36165": "nan", "36170": "nan", "36175": "nan", "36180": "nan", "36185": "nan", "36190": "nan", "36195": "nan", "36200": 0.24479, "36205": "nan", "36210": "nan", "36215": "nan", "36220": "nan", "36225": "nan", "36230": "nan", "36235": "nan", "36240": "nan", "36245": "nan", "36250": "nan", "36255": "nan", "36260": "nan", "36265": "nan", "36270": "nan", "36275": "nan", "36280": "nan", "36285": "nan", "36290": "nan", "36295": "nan", "36300": 0.23949, "36305": "nan", "36310": "nan", "36315": "nan", "36320": "nan", "36325": "nan", "36330": "nan", "36335": "nan", "36340": "nan", "36345": "nan", "36350": "nan", "36355": "nan", "36360": "nan", "36365": "nan", "36370": "nan", "36375": "nan", "36380": "nan", "36385": "nan", "36390": "nan", "36395": "nan", "36400": 0.23364, "36405": "nan", "36410": "nan", "36415": "nan", "36420": "nan", "36425": "nan", "36430": "nan", "36435": "nan", "36440": "nan", "36445": "nan", "36450": "nan", "36455": "nan", "36460": "nan", "36465": "nan", "36470": "nan", "36475": "nan", "36480": "nan", "36485": "nan", "36490": "nan", "36495": "nan", "36500": 0.23887, "36505": "nan", "36510": "nan", "36515": "nan", "36520": "nan", "36525": "nan", "36530": "nan", "36535": "nan", "36540": "nan", "36545": "nan", "36550": "nan", "36555": "nan", "36560": "nan", "36565": "nan", "36570": "nan", "36575": "nan", "36580": "nan", "36585": "nan", "36590": "nan", "36595": "nan", "36600": 0.23029, "36605": "nan", "36610": "nan", "36615": "nan", "36620": "nan", "36625": "nan", "36630": "nan", "36635": "nan", "36640": "nan", "36645": "nan", "36650": "nan", "36655": "nan", "36660": "nan", "36665": "nan", "36670": "nan", "36675": "nan", "36680": "nan", "36685": "nan", "36690": "nan", "36695": "nan", "36700": 0.23611, "36705": "nan", "36710": "nan", "36715": "nan", "36720": "nan", "36725": "nan", "36730": "nan", "36735": "nan", "36740": "nan", "36745": "nan", "36750": "nan", "36755": "nan", "36760": "nan", "36765": "nan", "36770": "nan", "36775": "nan", "36780": "nan", "36785": "nan", "36790": "nan", "36795": "nan", "36800": 0.23575, "36805": "nan", "36810": "nan", "36815": "nan", "36820": "nan", "36825": "nan", "36830": "nan", "36835": "nan", "36840": "nan", "36845": "nan", "36850": "nan", "36855": "nan", "36860": "nan", "36865": "nan", "36870": "nan", "36875": "nan", "36880": "nan", "36885": "nan", "36890": "nan", "36895": "nan", "36900": 0.22865, "36905": "nan", "36910": "nan", "36915": "nan", "36920": "nan", "36925": "nan", "36930": "nan", "36935": "nan", "36940": "nan", "36945": "nan", "36950": "nan", "36955": "nan", "36960": "nan", "36965": "nan", "36970": "nan", "36975": "nan", "36980": "nan", "36985": "nan", "36990": "nan", "36995": "nan", "37000": 0.22869, "37005": "nan", "37010": "nan", "37015": "nan", "37020": "nan", "37025": "nan", "37030": "nan", "37035": "nan", "37040": "nan", "37045": "nan", "37050": "nan", "37055": "nan", "37060": "nan", "37065": "nan", "37070": "nan", "37075": "nan", "37080": "nan", "37085": "nan", "37090": "nan", "37095": "nan", "37100": 0.23179, "37105": "nan", "37110": "nan", "37115": "nan", "37120": "nan", "37125": "nan", "37130": "nan", "37135": "nan", "37140": "nan", "37145": "nan", "37150": "nan", "37155": "nan", "37160": "nan", "37165": "nan", "37170": "nan", "37175": "nan", "37180": "nan", "37185": "nan", "37190": "nan", "37195": "nan", "37200": 0.2289, "37205": "nan", "37210": "nan", "37215": "nan", "37220": "nan", "37225": "nan", "37230": "nan", "37235": "nan", "37240": "nan", "37245": "nan", "37250": "nan", "37255": "nan", "37260": "nan", "37265": "nan", "37270": "nan", "37275": "nan", "37280": "nan", "37285": "nan", "37290": "nan", "37295": "nan", "37300": 0.22897, "37305": "nan", "37310": "nan", "37315": "nan", "37320": "nan", "37325": "nan", "37330": "nan", "37335": "nan", "37340": "nan", "37345": "nan", "37350": "nan", "37355": "nan", "37360": "nan", "37365": "nan", "37370": "nan", "37375": "nan", "37380": "nan", "37385": "nan", "37390": "nan", "37395": "nan", "37400": 0.2324, "37405": "nan", "37410": "nan", "37415": "nan", "37420": "nan", "37425": "nan", "37430": "nan", "37435": "nan", "37440": "nan", "37445": "nan", "37450": "nan", "37455": "nan", "37460": "nan", "37465": "nan", "37470": "nan", "37475": "nan", "37480": "nan", "37485": "nan", "37490": "nan", "37495": "nan", "37500": 0.22397, "37505": "nan", "37510": "nan", "37515": "nan", "37520": "nan", "37525": "nan", "37530": "nan", "37535": "nan", "37540": "nan", "37545": "nan", "37550": "nan", "37555": "nan", "37560": "nan", "37565": "nan", "37570": "nan", "37575": "nan", "37580": "nan", "37585": "nan", "37590": "nan", "37595": "nan", "37600": 0.23908, "37605": "nan", "37610": "nan", "37615": "nan", "37620": "nan", "37625": "nan", "37630": "nan", "37635": "nan", "37640": "nan", "37645": "nan", "37650": "nan", "37655": "nan", "37660": "nan", "37665": "nan", "37670": "nan", "37675": "nan", "37680": "nan", "37685": "nan", "37690": "nan", "37695": "nan", "37700": 0.23087, "37705": "nan", "37710": "nan", "37715": "nan", "37720": "nan", "37725": "nan", "37730": "nan", "37735": "nan", "37740": "nan", "37745": "nan", "37750": "nan", "37755": "nan", "37760": "nan", "37765": "nan", "37770": "nan", "37775": "nan", "37780": "nan", "37785": "nan", "37790": "nan", "37795": "nan", "37800": 0.22374, "37805": "nan", "37810": "nan", "37815": "nan", "37820": "nan", "37825": "nan", "37830": "nan", "37835": "nan", "37840": "nan", "37845": "nan", "37850": "nan", "37855": "nan", "37860": "nan", "37865": "nan", "37870": "nan", "37875": "nan", "37880": "nan", "37885": "nan", "37890": "nan", "37895": "nan", "37900": 0.23534, "37905": "nan", "37910": "nan", "37915": "nan", "37920": "nan", "37925": "nan", "37930": "nan", "37935": "nan", "37940": "nan", "37945": "nan", "37950": "nan", "37955": "nan", "37960": "nan", "37965": "nan", "37970": "nan", "37975": "nan", "37980": "nan", "37985": "nan", "37990": "nan", "37995": "nan", "38000": 0.22762, "38005": "nan", "38010": "nan", "38015": "nan", "38020": "nan", "38025": "nan", "38030": "nan", "38035": "nan", "38040": "nan", "38045": "nan", "38050": "nan", "38055": "nan", "38060": "nan", "38065": "nan", "38070": "nan", "38075": "nan", "38080": "nan", "38085": "nan", "38090": "nan", "38095": "nan", "38100": 0.23194, "38105": "nan", "38110": "nan", "38115": "nan", "38120": "nan", "38125": "nan", "38130": "nan", "38135": "nan", "38140": "nan", "38145": "nan", "38150": "nan", "38155": "nan", "38160": "nan", "38165": "nan", "38170": "nan", "38175": "nan", "38180": "nan", "38185": "nan", "38190": "nan", "38195": "nan", "38200": 0.23414, "38205": "nan", "38210": "nan", "38215": "nan", "38220": "nan", "38225": "nan", "38230": "nan", "38235": "nan", "38240": "nan", "38245": "nan", "38250": "nan", "38255": "nan", "38260": "nan", "38265": "nan", "38270": "nan", "38275": "nan", "38280": "nan", "38285": "nan", "38290": "nan", "38295": "nan", "38300": 0.23092, "38305": "nan", "38310": "nan", "38315": "nan", "38320": "nan", "38325": "nan", "38330": "nan", "38335": "nan", "38340": "nan", "38345": "nan", "38350": "nan", "38355": "nan", "38360": "nan", "38365": "nan", "38370": "nan", "38375": "nan", "38380": "nan", "38385": "nan", "38390": "nan", "38395": "nan", "38400": 0.23016, "38405": "nan", "38410": "nan", "38415": "nan", "38420": "nan", "38425": "nan", "38430": "nan", "38435": "nan", "38440": "nan", "38445": "nan", "38450": "nan", "38455": "nan", "38460": "nan", "38465": "nan", "38470": "nan", "38475": "nan", "38480": "nan", "38485": "nan", "38490": "nan", "38495": "nan", "38500": 0.24016, "38505": "nan", "38510": "nan", "38515": "nan", "38520": "nan", "38525": "nan", "38530": "nan", "38535": "nan", "38540": "nan", "38545": "nan", "38550": "nan", "38555": "nan", "38560": "nan", "38565": "nan", "38570": "nan", "38575": "nan", "38580": "nan", "38585": "nan", "38590": "nan", "38595": "nan", "38600": 0.23722, "38605": "nan", "38610": "nan", "38615": "nan", "38620": "nan", "38625": "nan", "38630": "nan", "38635": "nan", "38640": "nan", "38645": "nan", "38650": "nan", "38655": "nan", "38660": "nan", "38665": "nan", "38670": "nan", "38675": "nan", "38680": "nan", "38685": "nan", "38690": "nan", "38695": "nan", "38700": 0.23017, "38705": "nan", "38710": "nan", "38715": "nan", "38720": "nan", "38725": "nan", "38730": "nan", "38735": "nan", "38740": "nan", "38745": "nan", "38750": "nan", "38755": "nan", "38760": "nan", "38765": "nan", "38770": "nan", "38775": "nan", "38780": "nan", "38785": "nan", "38790": "nan", "38795": "nan", "38800": 0.23701, "38805": "nan", "38810": "nan", "38815": "nan", "38820": "nan", "38825": "nan", "38830": "nan", "38835": "nan", "38840": "nan", "38845": "nan", "38850": "nan", "38855": "nan", "38860": "nan", "38865": "nan", "38870": "nan", "38875": "nan", "38880": "nan", "38885": "nan", "38890": "nan", "38895": "nan", "38900": 0.23362, "38905": "nan", "38910": "nan", "38915": "nan", "38920": "nan", "38925": "nan", "38930": "nan", "38935": "nan", "38940": "nan", "38945": "nan", "38950": "nan", "38955": "nan", "38960": "nan", "38965": "nan", "38970": "nan", "38975": "nan", "38980": "nan", "38985": "nan", "38990": "nan", "38995": "nan", "39000": 0.23759, "39005": "nan", "39010": "nan", "39015": "nan", "39020": "nan", "39025": "nan", "39030": "nan", "39035": "nan", "39040": "nan", "39045": "nan", "39050": "nan", "39055": "nan", "39060": "nan", "39065": "nan", "39070": "nan", "39075": "nan", "39080": "nan", "39085": "nan", "39090": "nan", "39095": "nan", "39100": 0.23168, "39105": "nan", "39110": "nan", "39115": "nan", "39120": "nan", "39125": "nan", "39130": "nan", "39135": "nan", "39140": "nan", "39145": "nan", "39150": "nan", "39155": "nan", "39160": "nan", "39165": "nan", "39170": "nan", "39175": "nan", "39180": "nan", "39185": "nan", "39190": "nan", "39195": "nan", "39200": 0.23392, "39205": "nan", "39210": "nan", "39215": "nan", "39220": "nan", "39225": "nan", "39230": "nan", "39235": "nan", "39240": "nan", "39245": "nan", "39250": "nan", "39255": "nan", "39260": "nan", "39265": "nan", "39270": "nan", "39275": "nan", "39280": "nan", "39285": "nan", "39290": "nan", "39295": "nan", "39300": 0.23575, "39305": "nan", "39310": "nan", "39315": "nan", "39320": "nan", "39325": "nan", "39330": "nan", "39335": "nan", "39340": "nan", "39345": "nan", "39350": "nan", "39355": "nan", "39360": "nan", "39365": "nan", "39370": "nan", "39375": "nan", "39380": "nan", "39385": "nan", "39390": "nan", "39395": "nan", "39400": 0.2359, "39405": "nan", "39410": "nan", "39415": "nan", "39420": "nan", "39425": "nan", "39430": "nan", "39435": "nan", "39440": "nan", "39445": "nan", "39450": "nan", "39455": "nan", "39460": "nan", "39465": "nan", "39470": "nan", "39475": "nan", "39480": "nan", "39485": "nan", "39490": "nan", "39495": "nan", "39500": 0.2374, "39505": "nan", "39510": "nan", "39515": "nan", "39520": "nan", "39525": "nan", "39530": "nan", "39535": "nan", "39540": "nan", "39545": "nan", "39550": "nan", "39555": "nan", "39560": "nan", "39565": "nan", "39570": "nan", "39575": "nan", "39580": "nan", "39585": "nan", "39590": "nan", "39595": "nan", "39600": 0.22972, "39605": "nan", "39610": "nan", "39615": "nan", "39620": "nan", "39625": "nan", "39630": "nan", "39635": "nan", "39640": "nan", "39645": "nan", "39650": "nan", "39655": "nan", "39660": "nan", "39665": "nan", "39670": "nan", "39675": "nan", "39680": "nan", "39685": "nan", "39690": "nan", "39695": "nan", "39700": 0.2383, "39705": "nan", "39710": "nan", "39715": "nan", "39720": "nan", "39725": "nan", "39730": "nan", "39735": "nan", "39740": "nan", "39745": "nan", "39750": "nan", "39755": "nan", "39760": "nan", "39765": "nan", "39770": "nan", "39775": "nan", "39780": "nan", "39785": "nan", "39790": "nan", "39795": "nan", "39800": 0.23065, "39805": "nan", "39810": "nan", "39815": "nan", "39820": "nan", "39825": "nan", "39830": "nan", "39835": "nan", "39840": "nan", "39845": "nan", "39850": "nan", "39855": "nan", "39860": "nan", "39865": "nan", "39870": "nan", "39875": "nan", "39880": "nan", "39885": "nan", "39890": "nan", "39895": "nan", "39900": 0.23743, "39905": "nan", "39910": "nan", "39915": "nan", "39920": "nan", "39925": "nan", "39930": "nan", "39935": "nan", "39940": "nan", "39945": "nan", "39950": "nan", "39955": "nan", "39960": "nan", "39965": "nan", "39970": "nan", "39975": "nan", "39980": "nan", "39985": "nan", "39990": "nan", "39995": "nan", "40000": 0.22949, "40005": "nan", "40010": "nan", "40015": "nan", "40020": "nan", "40025": "nan", "40030": "nan", "40035": "nan", "40040": "nan", "40045": "nan", "40050": "nan", "40055": "nan", "40060": "nan", "40065": "nan", "40070": "nan", "40075": "nan", "40080": "nan", "40085": "nan", "40090": "nan", "40095": "nan", "40100": 0.23308, "40105": "nan", "40110": "nan", "40115": "nan", "40120": "nan", "40125": "nan", "40130": "nan", "40135": "nan", "40140": "nan", "40145": "nan", "40150": "nan", "40155": "nan", "40160": "nan", "40165": "nan", "40170": "nan", "40175": "nan", "40180": "nan", "40185": "nan", "40190": "nan", "40195": "nan", "40200": 0.23691, "40205": "nan", "40210": "nan", "40215": "nan", "40220": "nan", "40225": "nan", "40230": "nan", "40235": "nan", "40240": "nan", "40245": "nan", "40250": "nan", "40255": "nan", "40260": "nan", "40265": "nan", "40270": "nan", "40275": "nan", "40280": "nan", "40285": "nan", "40290": "nan", "40295": "nan", "40300": 0.23728, "40305": "nan", "40310": "nan", "40315": "nan", "40320": "nan", "40325": "nan", "40330": "nan", "40335": "nan", "40340": "nan", "40345": "nan", "40350": "nan", "40355": "nan", "40360": "nan", "40365": "nan", "40370": "nan", "40375": "nan", "40380": "nan", "40385": "nan", "40390": "nan", "40395": "nan", "40400": 0.23481, "40405": "nan", "40410": "nan", "40415": "nan", "40420": "nan", "40425": "nan", "40430": "nan", "40435": "nan", "40440": "nan", "40445": "nan", "40450": "nan", "40455": "nan", "40460": "nan", "40465": "nan", "40470": "nan", "40475": "nan", "40480": "nan", "40485": "nan", "40490": "nan", "40495": "nan", "40500": 0.23682, "40505": "nan", "40510": "nan", "40515": "nan", "40520": "nan", "40525": "nan", "40530": "nan", "40535": "nan", "40540": "nan", "40545": "nan", "40550": "nan", "40555": "nan", "40560": "nan", "40565": "nan", "40570": "nan", "40575": "nan", "40580": "nan", "40585": "nan", "40590": "nan", "40595": "nan", "40600": 0.23595, "40605": "nan", "40610": "nan", "40615": "nan", "40620": "nan", "40625": "nan", "40630": "nan", "40635": "nan", "40640": "nan", "40645": "nan", "40650": "nan", "40655": "nan", "40660": "nan", "40665": "nan", "40670": "nan", "40675": "nan", "40680": "nan", "40685": "nan", "40690": "nan", "40695": "nan", "40700": 0.23708, "40705": "nan", "40710": "nan", "40715": "nan", "40720": "nan", "40725": "nan", "40730": "nan", "40735": "nan", "40740": "nan", "40745": "nan", "40750": "nan", "40755": "nan", "40760": "nan", "40765": "nan", "40770": "nan", "40775": "nan", "40780": "nan", "40785": "nan", "40790": "nan", "40795": "nan", "40800": 0.23836, "40805": "nan", "40810": "nan", "40815": "nan", "40820": "nan", "40825": "nan", "40830": "nan", "40835": "nan", "40840": "nan", "40845": "nan", "40850": "nan", "40855": "nan", "40860": "nan", "40865": "nan", "40870": "nan", "40875": "nan", "40880": "nan", "40885": "nan", "40890": "nan", "40895": "nan", "40900": 0.23135, "40905": "nan", "40910": "nan", "40915": "nan", "40920": "nan", "40925": "nan", "40930": "nan", "40935": "nan", "40940": "nan", "40945": "nan", "40950": "nan", "40955": "nan", "40960": "nan", "40965": "nan", "40970": "nan", "40975": "nan", "40980": "nan", "40985": "nan", "40990": "nan", "40995": "nan", "41000": 0.23705, "41005": "nan", "41010": "nan", "41015": "nan", "41020": "nan", "41025": "nan", "41030": "nan", "41035": "nan", "41040": "nan", "41045": "nan", "41050": "nan", "41055": "nan", "41060": "nan", "41065": "nan", "41070": "nan", "41075": "nan", "41080": "nan", "41085": "nan", "41090": "nan", "41095": "nan", "41100": 0.23116, "41105": "nan", "41110": "nan", "41115": "nan", "41120": "nan", "41125": "nan", "41130": "nan", "41135": "nan", "41140": "nan", "41145": "nan", "41150": "nan", "41155": "nan", "41160": "nan", "41165": "nan", "41170": "nan", "41175": "nan", "41180": "nan", "41185": "nan", "41190": "nan", "41195": "nan", "41200": 0.23421, "41205": "nan", "41210": "nan", "41215": "nan", "41220": "nan", "41225": "nan", "41230": "nan", "41235": "nan", "41240": "nan", "41245": "nan", "41250": "nan", "41255": "nan", "41260": "nan", "41265": "nan", "41270": "nan", "41275": "nan", "41280": "nan", "41285": "nan", "41290": "nan", "41295": "nan", "41300": 0.23652, "41305": "nan", "41310": "nan", "41315": "nan", "41320": "nan", "41325": "nan", "41330": "nan", "41335": "nan", "41340": "nan", "41345": "nan", "41350": "nan", "41355": "nan", "41360": "nan", "41365": "nan", "41370": "nan", "41375": "nan", "41380": "nan", "41385": "nan", "41390": "nan", "41395": "nan", "41400": 0.22969, "41405": "nan", "41410": "nan", "41415": "nan", "41420": "nan", "41425": "nan", "41430": "nan", "41435": "nan", "41440": "nan", "41445": "nan", "41450": "nan", "41455": "nan", "41460": "nan", "41465": "nan", "41470": "nan", "41475": "nan", "41480": "nan", "41485": "nan", "41490": "nan", "41495": "nan", "41500": 0.22656, "41505": "nan", "41510": "nan", "41515": "nan", "41520": "nan", "41525": "nan", "41530": "nan", "41535": "nan", "41540": "nan", "41545": "nan", "41550": "nan", "41555": "nan", "41560": "nan", "41565": "nan", "41570": "nan", "41575": "nan", "41580": "nan", "41585": "nan", "41590": "nan", "41595": "nan", "41600": 0.23601, "41605": "nan", "41610": "nan", "41615": "nan", "41620": "nan", "41625": "nan", "41630": "nan", "41635": "nan", "41640": "nan", "41645": "nan", "41650": "nan", "41655": "nan", "41660": "nan", "41665": "nan", "41670": "nan", "41675": "nan", "41680": "nan", "41685": "nan", "41690": "nan", "41695": "nan", "41700": 0.23334, "41705": "nan", "41710": "nan", "41715": "nan", "41720": "nan", "41725": "nan", "41730": "nan", "41735": "nan", "41740": "nan", "41745": "nan", "41750": "nan", "41755": "nan", "41760": "nan", "41765": "nan", "41770": "nan", "41775": "nan", "41780": "nan", "41785": "nan", "41790": "nan", "41795": "nan", "41800": 0.23117, "41805": "nan", "41810": "nan", "41815": "nan", "41820": "nan", "41825": "nan", "41830": "nan", "41835": "nan", "41840": "nan", "41845": "nan", "41850": "nan", "41855": "nan", "41860": "nan", "41865": "nan", "41870": "nan", "41875": "nan", "41880": "nan", "41885": "nan", "41890": "nan", "41895": "nan", "41900": 0.23231, "41905": "nan", "41910": "nan", "41915": "nan", "41920": "nan", "41925": "nan", "41930": "nan", "41935": "nan", "41940": "nan", "41945": "nan", "41950": "nan", "41955": "nan", "41960": "nan", "41965": "nan", "41970": "nan", "41975": "nan", "41980": "nan", "41985": "nan", "41990": "nan", "41995": "nan", "42000": 0.231, "42005": "nan", "42010": "nan", "42015": "nan", "42020": "nan", "42025": "nan", "42030": "nan", "42035": "nan", "42040": "nan", "42045": "nan", "42050": "nan", "42055": "nan", "42060": "nan", "42065": "nan", "42070": "nan", "42075": "nan", "42080": "nan", "42085": "nan", "42090": "nan", "42095": "nan", "42100": 0.22899, "42105": "nan", "42110": "nan", "42115": "nan", "42120": "nan", "42125": "nan", "42130": "nan", "42135": "nan", "42140": "nan", "42145": "nan", "42150": "nan", "42155": "nan", "42160": "nan", "42165": "nan", "42170": "nan", "42175": "nan", "42180": "nan", "42185": "nan", "42190": "nan", "42195": "nan", "42200": 0.23062, "42205": "nan", "42210": "nan", "42215": "nan", "42220": "nan", "42225": "nan", "42230": "nan", "42235": "nan", "42240": "nan", "42245": "nan", "42250": "nan", "42255": "nan", "42260": "nan", "42265": "nan", "42270": "nan", "42275": "nan", "42280": "nan", "42285": "nan", "42290": "nan", "42295": "nan", "42300": 0.23033, "42305": "nan", "42310": "nan", "42315": "nan", "42320": "nan", "42325": "nan", "42330": "nan", "42335": "nan", "42340": "nan", "42345": "nan", "42350": "nan", "42355": "nan", "42360": "nan", "42365": "nan", "42370": "nan", "42375": "nan", "42380": "nan", "42385": "nan", "42390": "nan", "42395": "nan", "42400": 0.22888, "42405": "nan", "42410": "nan", "42415": "nan", "42420": "nan", "42425": "nan", "42430": "nan", "42435": "nan", "42440": "nan", "42445": "nan", "42450": "nan", "42455": "nan", "42460": "nan", "42465": "nan", "42470": "nan", "42475": "nan", "42480": "nan", "42485": "nan", "42490": "nan", "42495": "nan", "42500": 0.23458, "42505": "nan", "42510": "nan", "42515": "nan", "42520": "nan", "42525": "nan", "42530": "nan", "42535": "nan", "42540": "nan", "42545": "nan", "42550": "nan", "42555": "nan", "42560": "nan", "42565": "nan", "42570": "nan", "42575": "nan", "42580": "nan", "42585": "nan", "42590": "nan", "42595": "nan", "42600": 0.22491, "42605": "nan", "42610": "nan", "42615": "nan", "42620": "nan", "42625": "nan", "42630": "nan", "42635": "nan", "42640": "nan", "42645": "nan", "42650": "nan", "42655": "nan", "42660": "nan", "42665": "nan", "42670": "nan", "42675": "nan", "42680": "nan", "42685": "nan", "42690": "nan", "42695": "nan", "42700": 0.23428, "42705": "nan", "42710": "nan", "42715": "nan", "42720": "nan", "42725": "nan", "42730": "nan", "42735": "nan", "42740": "nan", "42745": "nan", "42750": "nan", "42755": "nan", "42760": "nan", "42765": "nan", "42770": "nan", "42775": "nan", "42780": "nan", "42785": "nan", "42790": "nan", "42795": "nan", "42800": 0.2369, "42805": "nan", "42810": "nan", "42815": "nan", "42820": "nan", "42825": "nan", "42830": "nan", "42835": "nan", "42840": "nan", "42845": "nan", "42850": "nan", "42855": "nan", "42860": "nan", "42865": "nan", "42870": "nan", "42875": "nan", "42880": "nan", "42885": "nan", "42890": "nan", "42895": "nan", "42900": 0.23024, "42905": "nan", "42910": "nan", "42915": "nan", "42920": "nan", "42925": "nan", "42930": "nan", "42935": "nan", "42940": "nan", "42945": "nan", "42950": "nan", "42955": "nan", "42960": "nan", "42965": "nan", "42970": "nan", "42975": "nan", "42980": "nan", "42985": "nan", "42990": "nan", "42995": "nan", "43000": 0.23224, "43005": "nan", "43010": "nan", "43015": "nan", "43020": "nan", "43025": "nan", "43030": "nan", "43035": "nan", "43040": "nan", "43045": "nan", "43050": "nan", "43055": "nan", "43060": "nan", "43065": "nan", "43070": "nan", "43075": "nan", "43080": "nan", "43085": "nan", "43090": "nan", "43095": "nan", "43100": 0.23136, "43105": "nan", "43110": "nan", "43115": "nan", "43120": "nan", "43125": "nan", "43130": "nan", "43135": "nan", "43140": "nan", "43145": "nan", "43150": "nan", "43155": "nan", "43160": "nan", "43165": "nan", "43170": "nan", "43175": "nan", "43180": "nan", "43185": "nan", "43190": "nan", "43195": "nan", "43200": 0.22862, "43205": "nan", "43210": "nan", "43215": "nan", "43220": "nan", "43225": "nan", "43230": "nan", "43235": "nan", "43240": "nan", "43245": "nan", "43250": "nan", "43255": "nan", "43260": "nan", "43265": "nan", "43270": "nan", "43275": "nan", "43280": "nan", "43285": "nan", "43290": "nan", "43295": "nan", "43300": 0.23215, "43305": "nan", "43310": "nan", "43315": "nan", "43320": "nan", "43325": "nan", "43330": "nan", "43335": "nan", "43340": "nan", "43345": "nan", "43350": "nan", "43355": "nan", "43360": "nan", "43365": "nan", "43370": "nan", "43375": "nan", "43380": "nan", "43385": "nan", "43390": "nan", "43395": "nan", "43400": 0.22904, "43405": "nan", "43410": "nan", "43415": "nan", "43420": "nan", "43425": "nan", "43430": "nan", "43435": "nan", "43440": "nan", "43445": "nan", "43450": "nan", "43455": "nan", "43460": "nan", "43465": "nan", "43470": "nan", "43475": "nan", "43480": "nan", "43485": "nan", "43490": "nan", "43495": "nan", "43500": 0.23262, "43505": "nan", "43510": "nan", "43515": "nan", "43520": "nan", "43525": "nan", "43530": "nan", "43535": "nan", "43540": "nan", "43545": "nan", "43550": "nan", "43555": "nan", "43560": "nan", "43565": "nan", "43570": "nan", "43575": "nan", "43580": "nan", "43585": "nan", "43590": "nan", "43595": "nan", "43600": 0.23255, "43605": "nan", "43610": "nan", "43615": "nan", "43620": "nan", "43625": "nan", "43630": "nan", "43635": "nan", "43640": "nan", "43645": "nan", "43650": "nan", "43655": "nan", "43660": "nan", "43665": "nan", "43670": "nan", "43675": "nan", "43680": "nan", "43685": "nan", "43690": "nan", "43695": "nan", "43700": 0.23573, "43705": "nan", "43710": "nan", "43715": "nan", "43720": "nan", "43725": "nan", "43730": "nan", "43735": "nan", "43740": "nan", "43745": "nan", "43750": "nan", "43755": "nan", "43760": "nan", "43765": "nan", "43770": "nan", "43775": "nan", "43780": "nan", "43785": "nan", "43790": "nan", "43795": "nan", "43800": 0.22683, "43805": "nan", "43810": "nan", "43815": "nan", "43820": "nan", "43825": "nan", "43830": "nan", "43835": "nan", "43840": "nan", "43845": "nan", "43850": "nan", "43855": "nan", "43860": "nan", "43865": "nan", "43870": "nan", "43875": "nan", "43880": "nan", "43885": "nan", "43890": "nan", "43895": "nan", "43900": 0.23487, "43905": "nan", "43910": "nan", "43915": "nan", "43920": "nan", "43925": "nan", "43930": "nan", "43935": "nan", "43940": "nan", "43945": "nan", "43950": "nan", "43955": "nan", "43960": "nan", "43965": "nan", "43970": "nan", "43975": "nan", "43980": "nan", "43985": "nan", "43990": "nan", "43995": "nan", "44000": 0.23556, "44005": "nan", "44010": "nan", "44015": "nan", "44020": "nan", "44025": "nan", "44030": "nan", "44035": "nan", "44040": "nan", "44045": "nan", "44050": "nan", "44055": "nan", "44060": "nan", "44065": "nan", "44070": "nan", "44075": "nan", "44080": "nan", "44085": "nan", "44090": "nan", "44095": "nan", "44100": 0.23006, "44105": "nan", "44110": "nan", "44115": "nan", "44120": "nan", "44125": "nan", "44130": "nan", "44135": "nan", "44140": "nan", "44145": "nan", "44150": "nan", "44155": "nan", "44160": "nan", "44165": "nan", "44170": "nan", "44175": "nan", "44180": "nan", "44185": "nan", "44190": "nan", "44195": "nan", "44200": 0.23686, "44205": "nan", "44210": "nan", "44215": "nan", "44220": "nan", "44225": "nan", "44230": "nan", "44235": "nan", "44240": "nan", "44245": "nan", "44250": "nan", "44255": "nan", "44260": "nan", "44265": "nan", "44270": "nan", "44275": "nan", "44280": "nan", "44285": "nan", "44290": "nan", "44295": "nan", "44300": 0.23338, "44305": "nan", "44310": "nan", "44315": "nan", "44320": "nan", "44325": "nan", "44330": "nan", "44335": "nan", "44340": "nan", "44345": "nan", "44350": "nan", "44355": "nan", "44360": "nan", "44365": "nan", "44370": "nan", "44375": "nan", "44380": "nan", "44385": "nan", "44390": "nan", "44395": "nan", "44400": 0.23249, "44405": "nan", "44410": "nan", "44415": "nan", "44420": "nan", "44425": "nan", "44430": "nan", "44435": "nan", "44440": "nan", "44445": "nan", "44450": "nan", "44455": "nan", "44460": "nan", "44465": "nan", "44470": "nan", "44475": "nan", "44480": "nan", "44485": "nan", "44490": "nan", "44495": "nan", "44500": 0.23397, "44505": "nan", "44510": "nan", "44515": "nan", "44520": "nan", "44525": "nan", "44530": "nan", "44535": "nan", "44540": "nan", "44545": "nan", "44550": "nan", "44555": "nan", "44560": "nan", "44565": "nan", "44570": "nan", "44575": "nan", "44580": "nan", "44585": "nan", "44590": "nan", "44595": "nan", "44600": 0.23447, "44605": "nan", "44610": "nan", "44615": "nan", "44620": "nan", "44625": "nan", "44630": "nan", "44635": "nan", "44640": "nan", "44645": "nan", "44650": "nan", "44655": "nan", "44660": "nan", "44665": "nan", "44670": "nan", "44675": "nan", "44680": "nan", "44685": "nan", "44690": "nan", "44695": "nan", "44700": 0.23402, "44705": "nan", "44710": "nan", "44715": "nan", "44720": "nan", "44725": "nan", "44730": "nan", "44735": "nan", "44740": "nan", "44745": "nan", "44750": "nan", "44755": "nan", "44760": "nan", "44765": "nan", "44770": "nan", "44775": "nan", "44780": "nan", "44785": "nan", "44790": "nan", "44795": "nan", "44800": 0.23319, "44805": "nan", "44810": "nan", "44815": "nan", "44820": "nan", "44825": "nan", "44830": "nan", "44835": "nan", "44840": "nan", "44845": "nan", "44850": "nan", "44855": "nan", "44860": "nan", "44865": "nan", "44870": "nan", "44875": "nan", "44880": "nan", "44885": "nan", "44890": "nan", "44895": "nan", "44900": 0.24066, "44905": "nan", "44910": "nan", "44915": "nan", "44920": "nan", "44925": "nan", "44930": "nan", "44935": "nan", "44940": "nan", "44945": "nan", "44950": "nan", "44955": "nan", "44960": "nan", "44965": "nan", "44970": "nan", "44975": "nan", "44980": "nan", "44985": "nan", "44990": "nan", "44995": "nan", "45000": 0.23496, "45005": "nan", "45010": "nan", "45015": "nan", "45020": "nan", "45025": "nan", "45030": "nan", "45035": "nan", "45040": "nan", "45045": "nan", "45050": "nan", "45055": "nan", "45060": "nan", "45065": "nan", "45070": "nan", "45075": "nan", "45080": "nan", "45085": "nan", "45090": "nan", "45095": "nan", "45100": 0.24359, "45105": "nan", "45110": "nan", "45115": "nan", "45120": "nan", "45125": "nan", "45130": "nan", "45135": "nan", "45140": "nan", "45145": "nan", "45150": "nan", "45155": "nan", "45160": "nan", "45165": "nan", "45170": "nan", "45175": "nan", "45180": "nan", "45185": "nan", "45190": "nan", "45195": "nan", "45200": 0.23678, "45205": "nan", "45210": "nan", "45215": "nan", "45220": "nan", "45225": "nan", "45230": "nan", "45235": "nan", "45240": "nan", "45245": "nan", "45250": "nan", "45255": "nan", "45260": "nan", "45265": "nan", "45270": "nan", "45275": "nan", "45280": "nan", "45285": "nan", "45290": "nan", "45295": "nan", "45300": 0.24255, "45305": "nan", "45310": "nan", "45315": "nan", "45320": "nan", "45325": "nan", "45330": "nan", "45335": "nan", "45340": "nan", "45345": "nan", "45350": "nan", "45355": "nan", "45360": "nan", "45365": "nan", "45370": "nan", "45375": "nan", "45380": "nan", "45385": "nan", "45390": "nan", "45395": "nan", "45400": 0.23023, "45405": "nan", "45410": "nan", "45415": "nan", "45420": "nan", "45425": "nan", "45430": "nan", "45435": "nan", "45440": "nan", "45445": "nan", "45450": "nan", "45455": "nan", "45460": "nan", "45465": "nan", "45470": "nan", "45475": "nan", "45480": "nan", "45485": "nan", "45490": "nan", "45495": "nan", "45500": 0.24207, "45505": "nan", "45510": "nan", "45515": "nan", "45520": "nan", "45525": "nan", "45530": "nan", "45535": "nan", "45540": "nan", "45545": "nan", "45550": "nan", "45555": "nan", "45560": "nan", "45565": "nan", "45570": "nan", "45575": "nan", "45580": "nan", "45585": "nan", "45590": "nan", "45595": "nan", "45600": 0.23347, "45605": "nan", "45610": "nan", "45615": "nan", "45620": "nan", "45625": "nan", "45630": "nan", "45635": "nan", "45640": "nan", "45645": "nan", "45650": "nan", "45655": "nan", "45660": "nan", "45665": "nan", "45670": "nan", "45675": "nan", "45680": "nan", "45685": "nan", "45690": "nan", "45695": "nan", "45700": 0.23164, "45705": "nan", "45710": "nan", "45715": "nan", "45720": "nan", "45725": "nan", "45730": "nan", "45735": "nan", "45740": "nan", "45745": "nan", "45750": "nan", "45755": "nan", "45760": "nan", "45765": "nan", "45770": "nan", "45775": "nan", "45780": "nan", "45785": "nan", "45790": "nan", "45795": "nan", "45800": 0.22798, "45805": "nan", "45810": "nan", "45815": "nan", "45820": "nan", "45825": "nan", "45830": "nan", "45835": "nan", "45840": "nan", "45845": "nan", "45850": "nan", "45855": "nan", "45860": "nan", "45865": "nan", "45870": "nan", "45875": "nan", "45880": "nan", "45885": "nan", "45890": "nan", "45895": "nan", "45900": 0.23713, "45905": "nan", "45910": "nan", "45915": "nan", "45920": "nan", "45925": "nan", "45930": "nan", "45935": "nan", "45940": "nan", "45945": "nan", "45950": "nan", "45955": "nan", "45960": "nan", "45965": "nan", "45970": "nan", "45975": "nan", "45980": "nan", "45985": "nan", "45990": "nan", "45995": "nan", "46000": 0.23646, "46005": "nan", "46010": "nan", "46015": "nan", "46020": "nan", "46025": "nan", "46030": "nan", "46035": "nan", "46040": "nan", "46045": "nan", "46050": "nan", "46055": "nan", "46060": "nan", "46065": "nan", "46070": "nan", "46075": "nan", "46080": "nan", "46085": "nan", "46090": "nan", "46095": "nan", "46100": 0.23242, "46105": "nan", "46110": "nan", "46115": "nan", "46120": "nan", "46125": "nan", "46130": "nan", "46135": "nan", "46140": "nan", "46145": "nan", "46150": "nan", "46155": "nan", "46160": "nan", "46165": "nan", "46170": "nan", "46175": "nan", "46180": "nan", "46185": "nan", "46190": "nan", "46195": "nan", "46200": 0.23845, "46205": "nan", "46210": "nan", "46215": "nan", "46220": "nan", "46225": "nan", "46230": "nan", "46235": "nan", "46240": "nan", "46245": "nan", "46250": "nan", "46255": "nan", "46260": "nan", "46265": "nan", "46270": "nan", "46275": "nan", "46280": "nan", "46285": "nan", "46290": "nan", "46295": "nan", "46300": 0.23345, "46305": "nan", "46310": "nan", "46315": "nan", "46320": "nan", "46325": "nan", "46330": "nan", "46335": "nan", "46340": "nan", "46345": "nan", "46350": "nan", "46355": "nan", "46360": "nan", "46365": "nan", "46370": "nan", "46375": "nan", "46380": "nan", "46385": "nan", "46390": "nan", "46395": "nan", "46400": 0.22682, "46405": "nan", "46410": "nan", "46415": "nan", "46420": "nan", "46425": "nan", "46430": "nan", "46435": "nan", "46440": "nan", "46445": "nan", "46450": "nan", "46455": "nan", "46460": "nan", "46465": "nan", "46470": "nan", "46475": "nan", "46480": "nan", "46485": "nan", "46490": "nan", "46495": "nan", "46500": 0.23133, "46505": "nan", "46510": "nan", "46515": "nan", "46520": "nan", "46525": "nan", "46530": "nan", "46535": "nan", "46540": "nan", "46545": "nan", "46550": "nan", "46555": "nan", "46560": "nan", "46565": "nan", "46570": "nan", "46575": "nan", "46580": "nan", "46585": "nan", "46590": "nan", "46595": "nan", "46600": 0.23813, "46605": "nan", "46610": "nan", "46615": "nan", "46620": "nan", "46625": "nan", "46630": "nan", "46635": "nan", "46640": "nan", "46645": "nan", "46650": "nan", "46655": "nan", "46660": "nan", "46665": "nan", "46670": "nan", "46675": "nan", "46680": "nan", "46685": "nan", "46690": "nan", "46695": "nan", "46700": 0.22507, "46705": "nan", "46710": "nan", "46715": "nan", "46720": "nan", "46725": "nan", "46730": "nan", "46735": "nan", "46740": "nan", "46745": "nan", "46750": "nan", "46755": "nan", "46760": "nan", "46765": "nan", "46770": "nan", "46775": "nan", "46780": "nan", "46785": "nan", "46790": "nan", "46795": "nan", "46800": 0.23741, "46805": "nan", "46810": "nan", "46815": "nan", "46820": "nan", "46825": "nan", "46830": "nan", "46835": "nan", "46840": "nan", "46845": "nan", "46850": "nan", "46855": "nan", "46860": "nan", "46865": "nan", "46870": "nan", "46875": "nan", "46880": "nan", "46885": "nan", "46890": "nan", "46895": "nan", "46900": 0.23462, "46905": "nan", "46910": "nan", "46915": "nan", "46920": "nan", "46925": "nan", "46930": "nan", "46935": "nan", "46940": "nan", "46945": "nan", "46950": "nan", "46955": "nan", "46960": "nan", "46965": "nan", "46970": "nan", "46975": "nan", "46980": "nan", "46985": "nan", "46990": "nan", "46995": "nan", "47000": 0.23625, "47005": "nan", "47010": "nan", "47015": "nan", "47020": "nan", "47025": "nan", "47030": "nan", "47035": "nan", "47040": "nan", "47045": "nan", "47050": "nan", "47055": "nan", "47060": "nan", "47065": "nan", "47070": "nan", "47075": "nan", "47080": "nan", "47085": "nan", "47090": "nan", "47095": "nan", "47100": 0.23653, "47105": "nan", "47110": "nan", "47115": "nan", "47120": "nan", "47125": "nan", "47130": "nan", "47135": "nan", "47140": "nan", "47145": "nan", "47150": "nan", "47155": "nan", "47160": "nan", "47165": "nan", "47170": "nan", "47175": "nan", "47180": "nan", "47185": "nan", "47190": "nan", "47195": "nan", "47200": 0.23832, "47205": "nan", "47210": "nan", "47215": "nan", "47220": "nan", "47225": "nan", "47230": "nan", "47235": "nan", "47240": "nan", "47245": "nan", "47250": "nan", "47255": "nan", "47260": "nan", "47265": "nan", "47270": "nan", "47275": "nan", "47280": "nan", "47285": "nan", "47290": "nan", "47295": "nan", "47300": 0.23337, "47305": "nan", "47310": "nan", "47315": "nan", "47320": "nan", "47325": "nan", "47330": "nan", "47335": "nan", "47340": "nan", "47345": "nan", "47350": "nan", "47355": "nan", "47360": "nan", "47365": "nan", "47370": "nan", "47375": "nan", "47380": "nan", "47385": "nan", "47390": "nan", "47395": "nan", "47400": 0.23776, "47405": "nan", "47410": "nan", "47415": "nan", "47420": "nan", "47425": "nan", "47430": "nan", "47435": "nan", "47440": "nan", "47445": "nan", "47450": "nan", "47455": "nan", "47460": "nan", "47465": "nan", "47470": "nan", "47475": "nan", "47480": "nan", "47485": "nan", "47490": "nan", "47495": "nan", "47500": 0.24122, "47505": "nan", "47510": "nan", "47515": "nan", "47520": "nan", "47525": "nan", "47530": "nan", "47535": "nan", "47540": "nan", "47545": "nan", "47550": "nan", "47555": "nan", "47560": "nan", "47565": "nan", "47570": "nan", "47575": "nan", "47580": "nan", "47585": "nan", "47590": "nan", "47595": "nan", "47600": 0.23156, "47605": "nan", "47610": "nan", "47615": "nan", "47620": "nan", "47625": "nan", "47630": "nan", "47635": "nan", "47640": "nan", "47645": "nan", "47650": "nan", "47655": "nan", "47660": "nan", "47665": "nan", "47670": "nan", "47675": "nan", "47680": "nan", "47685": "nan", "47690": "nan", "47695": "nan", "47700": 0.23915, "47705": "nan", "47710": "nan", "47715": "nan", "47720": "nan", "47725": "nan", "47730": "nan", "47735": "nan", "47740": "nan", "47745": "nan", "47750": "nan", "47755": "nan", "47760": "nan", "47765": "nan", "47770": "nan", "47775": "nan", "47780": "nan", "47785": "nan", "47790": "nan", "47795": "nan", "47800": 0.23374, "47805": "nan", "47810": "nan", "47815": "nan", "47820": "nan", "47825": "nan", "47830": "nan", "47835": "nan", "47840": "nan", "47845": "nan", "47850": "nan", "47855": "nan", "47860": "nan", "47865": "nan", "47870": "nan", "47875": "nan", "47880": "nan", "47885": "nan", "47890": "nan", "47895": "nan", "47900": 0.24112, "47905": "nan", "47910": "nan", "47915": "nan", "47920": "nan", "47925": "nan", "47930": "nan", "47935": "nan", "47940": "nan", "47945": "nan", "47950": "nan", "47955": "nan", "47960": "nan", "47965": "nan", "47970": "nan", "47975": "nan", "47980": "nan", "47985": "nan", "47990": "nan", "47995": "nan", "48000": 0.23719, "48005": "nan", "48010": "nan", "48015": "nan", "48020": "nan", "48025": "nan", "48030": "nan", "48035": "nan", "48040": "nan", "48045": "nan", "48050": "nan", "48055": "nan", "48060": "nan", "48065": "nan", "48070": "nan", "48075": "nan", "48080": "nan", "48085": "nan", "48090": "nan", "48095": "nan", "48100": 0.23175, "48105": "nan", "48110": "nan", "48115": "nan", "48120": "nan", "48125": "nan", "48130": "nan", "48135": "nan", "48140": "nan", "48145": "nan", "48150": "nan", "48155": "nan", "48160": "nan", "48165": "nan", "48170": "nan", "48175": "nan", "48180": "nan", "48185": "nan", "48190": "nan", "48195": "nan", "48200": 0.23798, "48205": "nan", "48210": "nan", "48215": "nan", "48220": "nan", "48225": "nan", "48230": "nan", "48235": "nan", "48240": "nan", "48245": "nan", "48250": "nan", "48255": "nan", "48260": "nan", "48265": "nan", "48270": "nan", "48275": "nan", "48280": "nan", "48285": "nan", "48290": "nan", "48295": "nan", "48300": 0.24183, "48305": "nan", "48310": "nan", "48315": "nan", "48320": "nan", "48325": "nan", "48330": "nan", "48335": "nan", "48340": "nan", "48345": "nan", "48350": "nan", "48355": "nan", "48360": "nan", "48365": "nan", "48370": "nan", "48375": "nan", "48380": "nan", "48385": "nan", "48390": "nan", "48395": "nan", "48400": 0.23364, "48405": "nan", "48410": "nan", "48415": "nan", "48420": "nan", "48425": "nan", "48430": "nan", "48435": "nan", "48440": "nan", "48445": "nan", "48450": "nan", "48455": "nan", "48460": "nan", "48465": "nan", "48470": "nan", "48475": "nan", "48480": "nan", "48485": "nan", "48490": "nan", "48495": "nan", "48500": 0.23473, "48505": "nan", "48510": "nan", "48515": "nan", "48520": "nan", "48525": "nan", "48530": "nan", "48535": "nan", "48540": "nan", "48545": "nan", "48550": "nan", "48555": "nan", "48560": "nan", "48565": "nan", "48570": "nan", "48575": "nan", "48580": "nan", "48585": "nan", "48590": "nan", "48595": "nan", "48600": 0.22917, "48605": "nan", "48610": "nan", "48615": "nan", "48620": "nan", "48625": "nan", "48630": "nan", "48635": "nan", "48640": "nan", "48645": "nan", "48650": "nan", "48655": "nan", "48660": "nan", "48665": "nan", "48670": "nan", "48675": "nan", "48680": "nan", "48685": "nan", "48690": "nan", "48695": "nan", "48700": 0.23939, "48705": "nan", "48710": "nan", "48715": "nan", "48720": "nan", "48725": "nan", "48730": "nan", "48735": "nan", "48740": "nan", "48745": "nan", "48750": "nan", "48755": "nan", "48760": "nan", "48765": "nan", "48770": "nan", "48775": "nan", "48780": "nan", "48785": "nan", "48790": "nan", "48795": "nan", "48800": 0.23355, "48805": "nan", "48810": "nan", "48815": "nan", "48820": "nan", "48825": "nan", "48830": "nan", "48835": "nan", "48840": "nan", "48845": "nan", "48850": "nan", "48855": "nan", "48860": "nan", "48865": "nan", "48870": "nan", "48875": "nan", "48880": "nan", "48885": "nan", "48890": "nan", "48895": "nan", "48900": 0.23542, "48905": "nan", "48910": "nan", "48915": "nan", "48920": "nan", "48925": "nan", "48930": "nan", "48935": "nan", "48940": "nan", "48945": "nan", "48950": "nan", "48955": "nan", "48960": "nan", "48965": "nan", "48970": "nan", "48975": "nan", "48980": "nan", "48985": "nan", "48990": "nan", "48995": "nan", "49000": 0.23191, "49005": "nan", "49010": "nan", "49015": "nan", "49020": "nan", "49025": "nan", "49030": "nan", "49035": "nan", "49040": "nan", "49045": "nan", "49050": "nan", "49055": "nan", "49060": "nan", "49065": "nan", "49070": "nan", "49075": "nan", "49080": "nan", "49085": "nan", "49090": "nan", "49095": "nan", "49100": 0.23357, "49105": "nan", "49110": "nan", "49115": "nan", "49120": "nan", "49125": "nan", "49130": "nan", "49135": "nan", "49140": "nan", "49145": "nan", "49150": "nan", "49155": "nan", "49160": "nan", "49165": "nan", "49170": "nan", "49175": "nan", "49180": "nan", "49185": "nan", "49190": "nan", "49195": "nan", "49200": 0.24605, "49205": "nan", "49210": "nan", "49215": "nan", "49220": "nan", "49225": "nan", "49230": "nan", "49235": "nan", "49240": "nan", "49245": "nan", "49250": "nan", "49255": "nan", "49260": "nan", "49265": "nan", "49270": "nan", "49275": "nan", "49280": "nan", "49285": "nan", "49290": "nan", "49295": "nan", "49300": 0.23803, "49305": "nan", "49310": "nan", "49315": "nan", "49320": "nan", "49325": "nan", "49330": "nan", "49335": "nan", "49340": "nan", "49345": "nan", "49350": "nan", "49355": "nan", "49360": "nan", "49365": "nan", "49370": "nan", "49375": "nan", "49380": "nan", "49385": "nan", "49390": "nan", "49395": "nan", "49400": 0.24015, "49405": "nan", "49410": "nan", "49415": "nan", "49420": "nan", "49425": "nan", "49430": "nan", "49435": "nan", "49440": "nan", "49445": "nan", "49450": "nan", "49455": "nan", "49460": "nan", "49465": "nan", "49470": "nan", "49475": "nan", "49480": "nan", "49485": "nan", "49490": "nan", "49495": "nan", "49500": 0.23625, "49505": "nan", "49510": "nan", "49515": "nan", "49520": "nan", "49525": "nan", "49530": "nan", "49535": "nan", "49540": "nan", "49545": "nan", "49550": "nan", "49555": "nan", "49560": "nan", "49565": "nan", "49570": "nan", "49575": "nan", "49580": "nan", "49585": "nan", "49590": "nan", "49595": "nan", "49600": 0.23801, "49605": "nan", "49610": "nan", "49615": "nan", "49620": "nan", "49625": "nan", "49630": "nan", "49635": "nan", "49640": "nan", "49645": "nan", "49650": "nan", "49655": "nan", "49660": "nan", "49665": "nan", "49670": "nan", "49675": "nan", "49680": "nan", "49685": "nan", "49690": "nan", "49695": "nan", "49700": 0.23834, "49705": "nan", "49710": "nan", "49715": "nan", "49720": "nan", "49725": "nan", "49730": "nan", "49735": "nan", "49740": "nan", "49745": "nan", "49750": "nan", "49755": "nan", "49760": "nan", "49765": "nan", "49770": "nan", "49775": "nan", "49780": "nan", "49785": "nan", "49790": "nan", "49795": "nan", "49800": 0.22811, "49805": "nan", "49810": "nan", "49815": "nan", "49820": "nan", "49825": "nan", "49830": "nan", "49835": "nan", "49840": "nan", "49845": "nan", "49850": "nan", "49855": "nan", "49860": "nan", "49865": "nan", "49870": "nan", "49875": "nan", "49880": "nan", "49885": "nan", "49890": "nan", "49895": "nan", "49900": 0.23871, "49905": "nan", "49910": "nan", "49915": "nan", "49920": "nan", "49925": "nan", "49930": "nan", "49935": "nan", "49940": "nan", "49945": "nan", "49950": "nan", "49955": "nan", "49960": "nan", "49965": "nan", "49970": "nan", "49975": "nan", "49980": "nan", "49985": "nan", "49990": "nan", "49995": "nan", "50000": 0.23411, "50005": "nan", "50010": "nan", "50015": "nan", "50020": "nan", "50025": "nan", "50030": "nan", "50035": "nan", "50040": "nan", "50045": "nan", "50050": "nan", "50055": "nan", "50060": "nan", "50065": "nan", "50070": "nan", "50075": "nan", "50080": "nan", "50085": "nan", "50090": "nan", "50095": "nan", "50100": 0.23139, "50105": "nan", "50110": "nan", "50115": "nan", "50120": "nan", "50125": "nan", "50130": "nan", "50135": "nan", "50140": "nan", "50145": "nan", "50150": "nan", "50155": "nan", "50160": "nan", "50165": "nan", "50170": "nan", "50175": "nan", "50180": "nan", "50185": "nan", "50190": "nan", "50195": "nan", "50200": 0.24115, "50205": "nan", "50210": "nan", "50215": "nan", "50220": "nan", "50225": "nan", "50230": "nan", "50235": "nan", "50240": "nan", "50245": "nan", "50250": "nan", "50255": "nan", "50260": "nan", "50265": "nan", "50270": "nan", "50275": "nan", "50280": "nan", "50285": "nan", "50290": "nan", "50295": "nan", "50300": 0.23394, "50305": "nan", "50310": "nan", "50315": "nan", "50320": "nan", "50325": "nan", "50330": "nan", "50335": "nan", "50340": "nan", "50345": "nan", "50350": "nan", "50355": "nan", "50360": "nan", "50365": "nan", "50370": "nan", "50375": "nan", "50380": "nan", "50385": "nan", "50390": "nan", "50395": "nan", "50400": 0.23034, "50405": "nan", "50410": "nan", "50415": "nan", "50420": "nan", "50425": "nan", "50430": "nan", "50435": "nan", "50440": "nan", "50445": "nan", "50450": "nan", "50455": "nan", "50460": "nan", "50465": "nan", "50470": "nan", "50475": "nan", "50480": "nan", "50485": "nan", "50490": "nan", "50495": "nan", "50500": 0.23715, "50505": "nan", "50510": "nan", "50515": "nan", "50520": "nan", "50525": "nan", "50530": "nan", "50535": "nan", "50540": "nan", "50545": "nan", "50550": "nan", "50555": "nan", "50560": "nan", "50565": "nan", "50570": "nan", "50575": "nan", "50580": "nan", "50585": "nan", "50590": "nan", "50595": "nan", "50600": 0.24071, "50605": "nan", "50610": "nan", "50615": "nan", "50620": "nan", "50625": "nan", "50630": "nan", "50635": "nan", "50640": "nan", "50645": "nan", "50650": "nan", "50655": "nan", "50660": "nan", "50665": "nan", "50670": "nan", "50675": "nan", "50680": "nan", "50685": "nan", "50690": "nan", "50695": "nan", "50700": 0.23781, "50705": "nan", "50710": "nan", "50715": "nan", "50720": "nan", "50725": "nan", "50730": "nan", "50735": "nan", "50740": "nan", "50745": "nan", "50750": "nan", "50755": "nan", "50760": "nan", "50765": "nan", "50770": "nan", "50775": "nan", "50780": "nan", "50785": "nan", "50790": "nan", "50795": "nan", "50800": 0.22938, "50805": "nan", "50810": "nan", "50815": "nan", "50820": "nan", "50825": "nan", "50830": "nan", "50835": "nan", "50840": "nan", "50845": "nan", "50850": "nan", "50855": "nan", "50860": "nan"}}} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_release/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_release/model_config.yaml index 7547eec..ee9cca9 100644 --- a/tests/functional_tests/test_cases/t5/t5_release/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_release/model_config.yaml @@ -1,61 +1,59 @@ -ENV_VARS: - CUDA_DEVICE_MAX_CONNECTIONS: '1' - NVTE_ALLOW_NONDETERMINISTIC_ALGO: '1' - -TEST_TYPE: 'release' -MODEL_ARGS: - # T5 model args - --encoder-num-layers: 12 - --decoder-num-layers: 12 - --hidden-size: 768 - --num-attention-heads: 12 - --kv-channels: 64 - --ffn-hidden-size: 3072 - --encoder-seq-length: 512 - --decoder-seq-length: 128 - --max-position-embeddings: 512 - --init-method-std: 0.015 - --attention-backend: unfused - - # Training args - --micro-batch-size: 32 - --global-batch-size: 512 - --train-iters: 100000 - --weight-decay: 1e-2 - --clip-grad: 1.0 - --bf16: true - --lr: 0.0001 - --lr-decay-style: linear - --min-lr: 1.0e-5 - --lr-warmup-fraction: .01 - --distributed-backend: nccl - # Transformer Engine args - --use-mcore-models: true - --transformer-impl: transformer_engine - # Model parallel - --tensor-model-parallel-size: 4 - --pipeline-model-parallel-size: 1 - --encoder-pipeline-model-parallel-size: 0 - # Data args - --data-path: ${DATA_BLEND} - --vocab-file: ${DATA_PATH}/bert-large-cased-vocab.txt - --tokenizer-type: BertWordPieceCase - --split: 99982,9,9 - --data-cache-path: ${DATA_CACHE_PATH} - --vocab-extra-ids: 100 - # EVAL_AND_LOGGING_ARGS - --log-interval: 100 - --save-interval: 2000 - --eval-interval: 1000 - --save: ${CHECKPOINT_PATH} - --load: ${CHECKPOINT_PATH} - --eval-iters: 10 - --tensorboard-dir: ${TENSORBOARD_PATH} - --log-timers-to-tensorboard: true - --log-memory-to-tensorboard: true - --log-num-zeros-in-grad: true - --log-params-norm: true - --log-validation-ppl-to-tensorboard: true - --timing-log-level: 2 - --wandb-project: megatron-core-release-runs - --wandb-exp-name: ${WANDB_EXPERIMENT} \ No newline at end of file +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: '1' + NVTE_ALLOW_NONDETERMINISTIC_ALGO: '1' +TEST_TYPE: 'release' +MODEL_ARGS: + # T5 model args + --encoder-num-layers: 12 + --decoder-num-layers: 12 + --hidden-size: 768 + --num-attention-heads: 12 + --kv-channels: 64 + --ffn-hidden-size: 3072 + --encoder-seq-length: 512 + --decoder-seq-length: 128 + --max-position-embeddings: 512 + --init-method-std: 0.015 + --attention-backend: unfused + # Training args + --micro-batch-size: 32 + --global-batch-size: 512 + --train-iters: 100000 + --weight-decay: 1e-2 + --clip-grad: 1.0 + --bf16: true + --lr: 0.0001 + --lr-decay-style: linear + --min-lr: 1.0e-5 + --lr-warmup-fraction: .01 + --distributed-backend: nccl + # Transformer Engine args + --use-mcore-models: true + --transformer-impl: transformer_engine + # Model parallel + --tensor-model-parallel-size: 4 + --pipeline-model-parallel-size: 1 + --encoder-pipeline-model-parallel-size: 0 + # Data args + --data-path: ${DATA_BLEND} + --vocab-file: ${DATA_PATH}/bert-large-cased-vocab.txt + --tokenizer-type: BertWordPieceCase + --split: 99982,9,9 + --data-cache-path: ${DATA_CACHE_PATH} + --vocab-extra-ids: 100 + # EVAL_AND_LOGGING_ARGS + --log-interval: 100 + --save-interval: 2000 + --eval-interval: 1000 + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --eval-iters: 10 + --tensorboard-dir: ${TENSORBOARD_PATH} + --log-timers-to-tensorboard: true + --log-memory-to-tensorboard: true + --log-num-zeros-in-grad: true + --log-params-norm: true + --log-validation-ppl-to-tensorboard: true + --timing-log-level: 2 + --wandb-project: megatron-core-release-runs + --wandb-exp-name: ${WANDB_EXPERIMENT} diff --git a/tests/test_utils/python_scripts/common.py b/tests/test_utils/python_scripts/common.py index dd2e2e4..2032d46 100644 --- a/tests/test_utils/python_scripts/common.py +++ b/tests/test_utils/python_scripts/common.py @@ -21,13 +21,28 @@ def resolve_cluster_config(cluster: str) -> str: raise ValueError(f"Unknown cluster {cluster} provided.") +def resolve_artifact_config(cluster: str) -> str: + if cluster == "dgxh100_eos": + return "eos_lustre" + if cluster == "dgxa100_dracooci": + return "draco-oci_lustre" + if cluster == "dgxa100_dracooci-ord": + return "draco-oci-ord_lustre" + if cluster == "dgxh100_coreweave": + return "coreweave_lustre" + raise ValueError(f"Unknown cluster {cluster} provided.") + + def flatten_products( workload_manifest: jetclient.JETWorkloadManifest, ) -> jetclient.JETWorkloadManifest: """Flattens a nested dict of products""" + workload_manifest.products = [ - dict(zip(inp.keys(), values)) - for inp in workload_manifest.products + dict(**dict(zip(inp.keys(), values)), **{"test_case": product['test_case'][0]}) + for product in workload_manifest.products + if "products" in product + for inp in product['products'] for values in itertools.product(*inp.values()) ] @@ -195,6 +210,7 @@ def load_workloads( model: Optional[str] = None, test_case: Optional[str] = None, container_image: Optional[str] = None, + record_checkpoints: Optional[str] = None, ) -> List[jetclient.JETWorkloadManifest]: """Return all workloads from disk that match scope and platform.""" recipes_dir = BASE_PATH / ".." / "recipes" @@ -238,4 +254,17 @@ def load_workloads( workloads.append(build_workload) workload.spec.n_repeat = n_repeat workload.spec.time_limit = time_limit + + if record_checkpoints == 'true': + workload.outputs = [ + { + "type": "artifact", + "key": f"unverified/model/mcore-ci/{container_tag}/{{model}}/{{name}}", + "subdir": "checkpoints", + "name": r"{model}/{name}", + "description": r"Checkpoint of {model}/{name}", + "pic": {"name": "Mcore CI", "email": "okoenig@nvidia.com"}, + "labels": {"origin": "ADLR/Megatron-LM"}, + } + ] return workloads diff --git a/tests/test_utils/python_scripts/download_coverage_results.py b/tests/test_utils/python_scripts/download_coverage_results.py new file mode 100644 index 0000000..8af82bc --- /dev/null +++ b/tests/test_utils/python_scripts/download_coverage_results.py @@ -0,0 +1,98 @@ +import glob +import logging +import os +import pathlib +import shutil +import zipfile + +import click +import gitlab + +BASE_PATH = pathlib.Path(__file__).parent.resolve() +PROJECT_ID = int(os.getenv("CI_PROJECT_ID", 19378)) + +logger = logging.getLogger(__name__) + + +@click.command() +@click.option("--pipeline-id", required=True, type=int, help="Pipeline ID") +def main(pipeline_id: int): + logging.basicConfig(level=logging.INFO) + logger.info('Started') + + gl = gitlab.Gitlab( + f"https://{os.getenv('GITLAB_ENDPOINT')}", private_token=os.getenv("RO_API_TOKEN") + ) + + project = gl.projects.get(PROJECT_ID) + pipeline = project.pipelines.get(pipeline_id) + print(pipeline.bridges.list()) + + pipeline_bridges = [ + pipeline_bridge + for pipeline_bridge in pipeline.bridges.list() + if pipeline_bridge.name.startswith("test:unit_tests") + and pipeline_bridge.downstream_pipeline is not None + ] + + ASSETS_DIR = pathlib.Path("tmp") / "results" / "iteration=0" + for pipeline_bridge in pipeline_bridges: + functional_pipeline = project.pipelines.get(pipeline_bridge.downstream_pipeline['id']) + + functional_pipeline_jobs = functional_pipeline.jobs.list(get_all=True) + if "legacy" in pipeline_bridge.name: + continue + + logger.info("Starting with pipeline %s", pipeline_bridge.name) + for functional_pipeline_job in functional_pipeline_jobs: + job = project.jobs.get(functional_pipeline_job.id) + logger.info("Starting with job %s", job.name) + + try: + file_name = '__artifacts.zip' + with open(file_name, "wb") as f: + job.artifacts(streamed=True, action=f.write) + zip = zipfile.ZipFile(file_name) + zip.extractall("tmp") + logger.info("Downloaded artifacts of job %s", job.name) + except Exception: + continue + + os.unlink(file_name) + restart_dir = os.listdir(pathlib.Path("tmp") / "results" / "iteration=0")[-1] + coverage_report_source = list( + glob.glob( + str( + pathlib.Path(ASSETS_DIR) + / f"{restart_dir}" + / "assets" + / "basic" + / "*" + / "coverage_report" + ) + ) + )[0] + + coverage_report_target = ( + pathlib.Path("coverage_results") / job.name.replace("/", "-") / "coverage_report" + ) + + if pathlib.Path(coverage_report_source).exists(): + pathlib.Path(coverage_report_target.parent).mkdir(parents=True, exist_ok=True) + logger.info( + "Move artifacts from %s to %s", coverage_report_source, coverage_report_target + ) + + shutil.move(coverage_report_source, coverage_report_target) + else: + logger.info( + "coverage_report for %s does not exist. Skip.", str(f"{job.stage} / {job.name}") + ) + + shutil.rmtree("tmp") + + logger.info("beep boop: All done!") + + +if __name__ == "__main__": + main() diff --git a/tests/test_utils/python_scripts/download_golden_values.py b/tests/test_utils/python_scripts/download_golden_values.py new file mode 100644 index 0000000..55ff48d --- /dev/null +++ b/tests/test_utils/python_scripts/download_golden_values.py @@ -0,0 +1,94 @@ +import logging +import os +import pathlib +import shutil +import zipfile + +import click +import gitlab + +BASE_PATH = pathlib.Path(__file__).parent.resolve() +PROJECT_ID = int(os.getenv("CI_PROJECT_ID", 19378)) + +logger = logging.getLogger(__name__) + + +@click.command() +@click.option("--pipeline-id", required=True, type=int, help="Pipeline ID") +def main(pipeline_id: int): + logging.basicConfig(level=logging.INFO) + logger.info('Started') + + gl = gitlab.Gitlab( + f"https://{os.getenv('GITLAB_ENDPOINT')}", private_token=os.getenv("RO_API_TOKEN") + ) + + project = gl.projects.get(PROJECT_ID) + pipeline = project.pipelines.get(pipeline_id) + print(pipeline.bridges.list()) + + pipeline_bridges = [ + pipeline_bridge + for pipeline_bridge in pipeline.bridges.list() + if pipeline_bridge.name.startswith("functional") + and pipeline_bridge.downstream_pipeline is not None + ] + + ASSETS_DIR = pathlib.Path("tmp") / "results" / "iteration=0" + for pipeline_bridge in pipeline_bridges: + functional_pipeline = project.pipelines.get(pipeline_bridge.downstream_pipeline['id']) + environment = pipeline_bridge.name[len("functional:run_") :] + functional_pipeline_jobs = functional_pipeline.jobs.list(get_all=True) + logger.info("Starting with pipeline %s", pipeline_bridge.name) + for functional_pipeline_job in functional_pipeline_jobs: + job = project.jobs.get(functional_pipeline_job.id) + logger.info("Starting with job %s", job.name) + + try: + file_name = '__artifacts.zip' + with open(file_name, "wb") as f: + job.artifacts(streamed=True, action=f.write) + zip = zipfile.ZipFile(file_name) + zip.extractall("tmp") + logger.info("Downloaded artifacts of job %s", job.name) + except Exception: + continue + + os.unlink(file_name) + restart_dir = os.listdir(pathlib.Path("tmp") / "results" / "iteration=0")[-1] + golden_values_source = ( + pathlib.Path(ASSETS_DIR) + / f"{restart_dir}" + / "assets" + / "basic" + / f"{job.name.replace('_', '-').lower()}-{environment}" + / f"golden_values_{environment}.json" + ) + golden_values_target = ( + pathlib.Path("tests") + / "functional_tests" + / 'test_cases' + / job.stage + / job.name + / f"golden_values_{environment}.json" + ) + + if golden_values_source.exists(): + pathlib.Path(golden_values_target.parent).mkdir(parents=True, exist_ok=True) + logger.info( + "Move artifacts from %s to %s", golden_values_source, golden_values_target + ) + + shutil.move(golden_values_source, golden_values_target) + else: + logger.info( + "Golden values for %s does not exist. Skip.", str(f"{job.stage} / {job.name}") + ) + + shutil.rmtree("tmp") + + logger.info("beep boop: All done!") + + +if __name__ == "__main__": + main() diff --git a/tests/test_utils/python_scripts/generate_jet_trigger_job.py b/tests/test_utils/python_scripts/generate_jet_trigger_job.py index 0913b19..3c247c4 100644 --- a/tests/test_utils/python_scripts/generate_jet_trigger_job.py +++ b/tests/test_utils/python_scripts/generate_jet_trigger_job.py @@ -1,155 +1,178 @@ -import pathlib -from typing import Optional - -import click -import yaml - -from tests.test_utils.python_scripts import common - -BASE_PATH = pathlib.Path(__file__).parent.resolve() - - -@click.command() -@click.option("--scope", required=True, type=str, help="Test scope") -@click.option("--environment", required=True, type=str, help="LTS or dev features") -@click.option("--n-repeat", required=False, default=1, type=int) -@click.option("--time-limit", required=False, default=1, type=int) -@click.option( - "--test-cases", required=True, type=str, help="Comma-separated list of test_cases, or 'all'" -) -@click.option("--a100-cluster", required=True, type=str, help="A100 Cluster to run on") -@click.option("--h100-cluster", required=True, type=str, help="H100 Cluster to run on") -@click.option("--output-path", required=True, type=str, help="Path to write GitLab job to") -@click.option("--container-image", required=True, type=str, help="LTS Container image to use") -@click.option("--container-tag", required=True, type=str, help="Container tag to use") -@click.option( - "--dependent-job", - required=True, - type=str, - help="Name of job that created the downstream pipeline", -) -@click.option("--tag", required=False, type=str, help="Tag (only relevant for unit tests)") -@click.option( - "--run-name", required=False, type=str, help="Run name (only relevant for release tests)" -) -@click.option( - "--wandb-experiment", - required=False, - type=str, - help="Wandb experiment (only relevant for release tests)", -) -def main( - scope: str, - environment: str, - n_repeat: int, - time_limit: int, - test_cases: str, - a100_cluster: str, - h100_cluster: str, - output_path: str, - container_image: str, - container_tag: str, - dependent_job: str, - tag: Optional[str] = None, - run_name: Optional[str] = None, - wandb_experiment: Optional[str] = None, -): - list_of_test_cases = [ - test_case - for test_case in common.load_workloads( - scope=scope, - container_tag=container_tag, - environment=environment, - test_cases=test_cases, - tag=tag, - ) - if test_case.type != "build" - ] - - tags = [ - "arch/amd64", - "env/prod", - "origin/jet-fleet", - "owner/jet-core", - "purpose/jet-client", - "team/megatron", - ] - - if not list_of_test_cases: - gitlab_pipeline = { - "stages": ["empty-pipeline-placeholder"], - "default": {"interruptible": True}, - "empty-pipeline-placeholder-job": { - "stage": "empty-pipeline-placeholder", - "image": f"{container_image}:{container_tag}", - "tags": tags, - "rules": [ - {"if": '$CI_PIPELINE_SOURCE == "parent_pipeline"'}, - {"if": '$CI_MERGE_REQUEST_ID'}, - ], - "timeout": "7 days", - "needs": [{"pipeline": '$PARENT_PIPELINE_ID', "job": "functional:configure"}], - "script": ["sleep 1"], - "artifacts": {"paths": ["results/"], "when": "always"}, - }, - } - - else: - gitlab_pipeline = { - "stages": list(set([test_case.spec.model for test_case in list_of_test_cases])), - "default": {"interruptible": True}, - } - - for test_case in list_of_test_cases: - if test_case.spec.platforms == "dgx_a100": - cluster = a100_cluster - elif test_case.spec.platforms == "dgx_h100": - cluster = h100_cluster - else: - raise ValueError(f"Platform {test_case.spec.platforms} unknown") - - job_tags = list(tags) - job_tags.append(f"cluster/{common.resolve_cluster_config(cluster)}") - - script = [ - "export PYTHONPATH=$(pwd); " - "python tests/test_utils/python_scripts/launch_jet_workload.py", - f"--model {test_case.spec.model}", - f"--environment {test_case.spec.environment}", - f"--n-repeat {n_repeat}", - f"--time-limit {time_limit}", - f"--test-case '{test_case.spec.test_case}'", - f"--container-tag {container_tag}", - f"--cluster {cluster}", - ] - - if tag is not None: - script.append(f"--tag {tag}") - - if run_name is not None and wandb_experiment is not None: - script.append(f"--run-name {run_name}") - test_case.spec.model - script.append( - f"--wandb-experiment {wandb_experiment}-{test_case.spec.model}-{test_case.spec.test_case}" - ) - - gitlab_pipeline[test_case.spec.test_case] = { - "stage": f"{test_case.spec.model}", - "image": f"{container_image}:{container_tag}", - "tags": job_tags, - "rules": [ - {"if": '$CI_PIPELINE_SOURCE == "parent_pipeline"'}, - {"if": '$CI_MERGE_REQUEST_ID'}, - ], - "timeout": "7 days", - "needs": [{"pipeline": '$PARENT_PIPELINE_ID', "job": dependent_job}], - "script": [" ".join(script)], - "artifacts": {"paths": ["results/"], "when": "always"}, - } - - with open(output_path, 'w') as outfile: - yaml.dump(gitlab_pipeline, outfile, default_flow_style=False) - - -if __name__ == "__main__": - main() +import pathlib +from typing import Optional + +import click +import yaml + +from tests.test_utils.python_scripts import common + +BASE_PATH = pathlib.Path(__file__).parent.resolve() + + +@click.command() +@click.option("--scope", required=True, type=str, help="Test scope") +@click.option("--environment", required=True, type=str, help="LTS or dev features") +@click.option("--n-repeat", required=False, default=1, type=int) +@click.option("--time-limit", required=False, default=1, type=int) +@click.option( + "--test-cases", required=True, type=str, help="Comma-separated list of test_cases, or 'all'" +) +@click.option("--a100-cluster", required=True, type=str, help="A100 Cluster to run on") +@click.option("--h100-cluster", required=True, type=str, help="H100 Cluster to run on") +@click.option( + "--a100-partition", required=False, type=str, help="Slurm partition to use", default=None +) +@click.option( + "--h100-partition", required=False, type=str, help="Slurm partition to use", default=None +) +@click.option("--output-path", required=True, type=str, help="Path to write GitLab job to") +@click.option("--container-image", required=True, type=str, help="LTS Container image to use") +@click.option("--container-tag", required=True, type=str, help="Container tag to use") +@click.option( + "--dependent-job", + required=True, + type=str, + help="Name of job that created the downstream pipeline", +) +@click.option("--record-checkpoints", required=False, type=str, help="Values are 'true' or 'false'") +@click.option("--slurm-account", required=True, type=str, help="Slurm account to use") +@click.option("--tag", required=False, type=str, help="Tag (only relevant for unit tests)") +@click.option( + "--run-name", required=False, type=str, help="Run name (only relevant for release tests)" +) +@click.option( + "--wandb-experiment", + required=False, + type=str, + help="Wandb experiment (only relevant for release tests)", +) +def main( + scope: str, + environment: str, + n_repeat: int, + time_limit: int, + test_cases: str, + a100_cluster: str, + h100_cluster: str, + a100_partition: Optional[str], + h100_partition: Optional[str], + output_path: str, + container_image: str, + container_tag: str, + dependent_job: str, + record_checkpoints: str, + slurm_account: str, + tag: Optional[str] = None, + run_name: Optional[str] = None, + wandb_experiment: Optional[str] = None, +): + list_of_test_cases = [ + test_case + for test_case in common.load_workloads( + scope=scope, + container_tag=container_tag, + environment=environment, + test_cases=test_cases, + tag=tag, + ) + if test_case.type != "build" + ] + + tags = [ + "arch/amd64", + "env/prod", + "origin/jet-fleet", + "owner/jet-core", + "purpose/jet-client", + "team/megatron", + ] + + if not list_of_test_cases: + gitlab_pipeline = { + "stages": ["empty-pipeline-placeholder"], + "default": {"interruptible": True}, + "empty-pipeline-placeholder-job": { + "stage": "empty-pipeline-placeholder", + "image": f"{container_image}:{container_tag}", + "tags": tags, + "rules": [ + {"if": '$CI_PIPELINE_SOURCE == "parent_pipeline"'}, + {"if": '$CI_MERGE_REQUEST_ID'}, + ], + "timeout": "7 days", + "needs": [{"pipeline": '$PARENT_PIPELINE_ID', "job": "functional:configure"}], + "script": ["sleep 1"], + "artifacts": {"paths": ["results/"], "when": "always"}, + }, + } + + else: + gitlab_pipeline = { + "stages": list(set([test_case.spec.model for test_case in list_of_test_cases])), + "default": { + "interruptible": True, + "retry": {"max": 2, "when": "runner_system_failure"}, + }, + } + + for test_case in list_of_test_cases: + if test_case.spec.platforms == "dgx_a100": + cluster = a100_cluster + partition = a100_partition + elif test_case.spec.platforms == "dgx_h100": + cluster = h100_cluster + partition = h100_partition + else: + raise ValueError(f"Platform {test_case.spec.platforms} unknown") + + job_tags = list(tags) + job_tags.append(f"cluster/{common.resolve_cluster_config(cluster)}") + + script = [ + "export PYTHONPATH=$(pwd); " + "python tests/test_utils/python_scripts/launch_jet_workload.py", + f"--model {test_case.spec.model}", + f"--environment {test_case.spec.environment}", + f"--n-repeat {n_repeat}", + f"--time-limit {time_limit}", + f"--scope {scope}", + f"--test-case '{test_case.spec.test_case}'", + f"--container-tag {container_tag}", + f"--cluster {cluster}", + f"--record-checkpoints {record_checkpoints}", + f"--account {slurm_account}", + ] + + if partition is not None: + script.append(f"--partition {partition}") + + if tag is not None: + script.append(f"--tag {tag}") + + if run_name is not None and wandb_experiment is not None: + script.append(f"--run-name {run_name}") + test_case.spec.model + script.append( + f"--wandb-experiment {wandb_experiment}-{test_case.spec.model}-{test_case.spec.test_case}" + ) + + gitlab_pipeline[test_case.spec.test_case] = { + "stage": f"{test_case.spec.model}", + "image": f"{container_image}:{container_tag}", + "tags": job_tags, + "rules": [ + {"if": '$CI_PIPELINE_SOURCE == "parent_pipeline"'}, + {"if": '$CI_MERGE_REQUEST_ID'}, + ], + "timeout": "7 days", + "needs": [{"pipeline": '$PARENT_PIPELINE_ID', "job": dependent_job}], + "script": [" ".join(script)], + "artifacts": {"paths": ["results/"], "when": "always"}, + } + + with open(output_path, 'w') as outfile: + yaml.dump(gitlab_pipeline, outfile, default_flow_style=False) + + +if __name__ == "__main__": + main() diff --git a/tests/test_utils/python_scripts/generate_local_jobs.py b/tests/test_utils/python_scripts/generate_local_jobs.py index 1754921..179ccae 100644 --- a/tests/test_utils/python_scripts/generate_local_jobs.py +++ b/tests/test_utils/python_scripts/generate_local_jobs.py @@ -29,6 +29,12 @@ def load_script(config_path: str) -> str: @click.option( "--test-case", required=False, type=str, help="Returns a single test-case with matching name." ) +@click.option( + "--environment", + required=True, + type=str, + help="Pass 'lts' for PyTorch 24.01 and 'dev' for a more recent version.", +) @click.option( "--output-path", required=True, @@ -36,9 +42,20 @@ def load_script(config_path: str) -> str: help="Directory where the functional test will write its artifacts to (Tensorboard logs)", default="/opt/megatron-lm", ) -def main(model: Optional[str], scope: Optional[str], test_case: Optional[str], output_path: str): +def main( + model: Optional[str], + scope: Optional[str], + test_case: Optional[str], + environment: str, + output_path: str, +): workloads = common.load_workloads( - container_image='none', scope=scope, model=model, test_case=test_case, container_tag='none' + container_image='none', + scope=scope, + model=model, + test_case=test_case, + environment=environment, + container_tag='none', ) for workload in workloads: @@ -46,6 +63,7 @@ def main(model: Optional[str], scope: Optional[str], test_case: Optional[str], o continue magic_values = dict(workload.spec) magic_values["assets_dir"] = output_path + magic_values["artifacts_dir"] = output_path file_path = ( pathlib.Path.cwd() diff --git a/tests/test_utils/python_scripts/launch_jet_workload.py b/tests/test_utils/python_scripts/launch_jet_workload.py index 6e0580f..86f13d9 100644 --- a/tests/test_utils/python_scripts/launch_jet_workload.py +++ b/tests/test_utils/python_scripts/launch_jet_workload.py @@ -1,302 +1,352 @@ -import json -import os -import pathlib -import re -import signal -import sys -import tempfile -import time -from typing import List, Optional - -import click -import jetclient -import requests -import yaml -from jet import workloads -from jetclient.facades.objects import log as jet_log -from jetclient.services.dtos.pipeline import PipelineStatus - -from tests.test_utils.python_scripts import common - -BASE_PATH = pathlib.Path(__file__).parent.resolve() - - -def register_pipeline_terminator(pipeline: jetclient.JETPipeline): - def sigterm_handler(_signo, _stack_frame): - print(f"Trying to terminate pipeline {pipeline.jet_id}") - pipeline.cancel() - print(f"Pipeline {pipeline.jet_id} terminated") - sys.exit(0) - - signal.signal(signal.SIGINT, sigterm_handler) - signal.signal(signal.SIGTERM, sigterm_handler) - - -def launch_and_wait_for_completion( - test_case: str, - environment: str, - n_repeat: int, - time_limit: int, - container_image: Optional[str], - container_tag: str, - cluster: str, - account: str, - tag: Optional[str], - run_name: Optional[str], - wandb_experiment: Optional[str], -) -> jetclient.JETPipeline: - n_submit_errors = 0 - - while n_submit_errors < 3: - pipeline = jetclient.JETClient( - customer='mcore', gitlab_ci_token=os.getenv("RO_API_TOKEN"), env="prod" - ).workloads.submit( - workloads=common.load_workloads( - test_case=test_case, - n_repeat=n_repeat, - time_limit=time_limit, - tag=tag, - container_image=container_image, - container_tag=container_tag, - environment=environment, - ), - config_id=f"mcore/{common.resolve_cluster_config(cluster)}", - custom_config={ - "launchers": {cluster: {"account": account, "ntasks_per_node": 8}}, - "executors": { - "jet-ci": { - "environments": { - cluster: { - "variables": { - "RUN_NAME": run_name or "", - "WANDB_API_KEY": os.getenv("WANDB_API_KEY") or "", - "WANDB_EXPERIMENT": wandb_experiment or "", - } - } - } - } - }, - }, - wait_for_validation=True, - max_wait_time=(60 * 60), - ) - if pipeline.get_status() == PipelineStatus.SUBMISSION_FAILED: - n_submit_errors += 1 - print(f"Failed submitting pipeline. Let's try again ({n_submit_errors}/3)") - continue - break - - register_pipeline_terminator(pipeline=pipeline) - - print( - f"Pipeline triggered; inspect it here: https://gitlab-master.nvidia.com/dl/jet/ci/-/pipelines/{pipeline.jet_id}", - flush=True, - ) - - n_wait_attempts = 0 - while n_wait_attempts < 3: - try: - pipeline.wait(max_wait_time=60 * 60 * 24 * 7, interval=60 * 1) - break - except (requests.exceptions.ConnectionError, json.decoder.JSONDecodeError) as e: - print(e) - time.sleep(60 * 3**n_wait_attempts) - pipeline = workloads.get_pipeline(pipeline.jet_id) - n_wait_attempts += 1 - - print(f"Pipeline terminated; status: {pipeline.get_status()}") - return pipeline - - -def download_job_assets(logs: List[jet_log.JETLog], iteration: int = 0) -> List[str]: - if not logs: - return [""] - - assets_base_path = BASE_PATH / ".." / ".." / ".." / ".." / "results" / f"iteration={iteration}" - - for restart_idx, log in enumerate(logs): - assets = log.get_assets() - assets_path = assets_base_path / f"restart={restart_idx}" - assets_path.mkdir(parents=True, exist_ok=True) - for log_filename in assets.keys(): - with open(assets_path / log_filename, "w") as fh: - assets[log_filename].download(pathlib.Path(fh.name)) - return assets - - -def extract_logs_to_string(logs: List[jet_log.JETLog]) -> List[str]: - if not logs: - return [""] - - assets = logs[0].get_assets() - log_filename = [key for key in assets.keys() if key.endswith(".log")][0] - - with tempfile.NamedTemporaryFile() as tmp_file: - assets[log_filename].download(pathlib.Path(tmp_file.name)) - with open(pathlib.Path(tmp_file.name), "r") as fh: - return fh.readlines() - - -def parse_failed_job(logs: List[str]) -> Optional[bool]: - for log_row in logs[::-1]: - match = re.search(r"Job finished with status 'FAILED'", log_row) - if match is not None: - return True - return False - - -def parse_finished_training(logs: List[str]) -> Optional[bool]: - for log_row in logs[::-1]: - match = re.search(r"after training is done", log_row) - if match is not None: - return True - return False - - -@click.command() -@click.option("--model", required=True, type=str, help="Model") -@click.option("--test-case", required=True, type=str, help="Test case") -@click.option( - "--environment", required=True, type=click.Choice(['dev', 'lts']), help="Pytorch LTS or DEV" -) -@click.option("--n-repeat", required=False, default=1, type=int) -@click.option("--time-limit", required=False, default=1800, type=int) -@click.option( - "--account", - required=False, - type=str, - help="Slurm account to use", - default="coreai_dlalgo_mcore", -) -@click.option("--cluster", required=True, type=str, help="Cluster to run on") -@click.option("--container-tag", required=True, type=str, help="Base image of Mcore image") -@click.option("--container-image", required=False, type=str, help="Base image of Mcore image") -@click.option("--tag", required=False, type=str, help="Tag (only relevant for unit tests)") -@click.option( - "--run-name", required=False, type=str, help="Run name (only relevant for release tests)" -) -@click.option( - "--wandb-experiment", - required=False, - type=str, - help="Wandb experiment (only relevant for release tests)", -) -def main( - model: str, - test_case: str, - environment: str, - n_repeat: int, - time_limit: int, - account: str, - cluster: str, - container_tag: str, - tag: Optional[str] = None, - container_image: Optional[str] = None, - run_name: Optional[str] = None, - wandb_experiment: Optional[str] = None, -): - model_config_path = pathlib.Path( - BASE_PATH - / ".." - / ".." - / "functional_tests" - / "test_cases" - / model - / test_case - / "model_config.yaml" - ) - - if model_config_path.exists(): - with open(model_config_path) as stream: - try: - test_case_dict = yaml.safe_load(stream) - except yaml.YAMLError as exc: - print(exc) - - test_type = test_case_dict['TEST_TYPE'] - else: - test_type = "unit_test" - - if test_type == "release" and (run_name is None or wandb_experiment is None): - print(f"Not all arguments provided ({run_name=}, {wandb_experiment=})") - sys.exit(1) - - n_attempts = 0 - n_nondeterminism_attemps = 0 - n_iteration = 0 - while True and n_attempts < 3 and n_nondeterminism_attemps < 2: - pipeline = launch_and_wait_for_completion( - test_case=test_case, - environment=environment, - n_repeat=n_repeat, - time_limit=time_limit, - container_image=container_image, - container_tag=container_tag, - cluster=cluster, - account=account, - tag=tag, - run_name=run_name, - wandb_experiment=wandb_experiment, - ) - - main_job = [job for job in pipeline.get_jobs() if job.name.startswith("basic")][0] - - n_download_attempt = 0 - while n_download_attempt < 3: - try: - jet_log = main_job.get_logs() - logs = extract_logs_to_string(logs=jet_log) - download_job_assets(logs=jet_log, iteration=n_iteration) - break - except (requests.exceptions.ConnectionError, json.decoder.JSONDecodeError) as e: - print(e) - time.sleep((3**n_download_attempt) * 60) - n_download_attempt += 1 - - concat_logs = "\n".join(logs) - print(f"Logs:\n{concat_logs}") - - success = pipeline.get_status() == PipelineStatus.SUCCESS - - if test_type == "unit_test": - success = success and ( - ( - re.search(r'=.*?\bpassed\b.*?=', concat_logs) - and not re.search(r'=.*?\bfailed\b.*?=', concat_logs) - ) - or "0 selected" in concat_logs - ) - sys.exit(int(not success)) # invert for exit 0 - - if test_type != "release": - if success: - sys.exit(int(not success)) # invert for exit 0 - - if ( - "Some NCCL operations have failed or timed out." in concat_logs - or "uncorrectable ECC error encountered" in concat_logs - or "illegal memory access" in concat_logs - or "illegal instruction" in concat_logs - ): - print("Detected NCCL failure, attempt restart.") - n_attempts += 1 - continue - - if "FAILED tests/functional_tests/python_test_utils/test_ci_pipeline.py" in concat_logs: - print("Non-determinism, let's try another node.") - n_nondeterminism_attemps += 1 - continue - - if parse_failed_job(logs=logs): - n_attempts += 1 - continue - - if parse_finished_training(logs=logs): - success = pipeline.get_status() == PipelineStatus.SUCCESS - sys.exit(int(not success)) # invert for exit 0 - n_iteration += 1 - sys.exit(1) - - -if __name__ == "__main__": - main() +import json +import logging +import os +import pathlib +import re +import signal +import sys +import tempfile +import time +from typing import List, Optional + +import click +import jetclient +import requests +import yaml +from jetclient.facades.objects import log as jet_log +from jetclient.services.dtos.pipeline import PipelineStatus + +from tests.test_utils.python_scripts import common + +BASE_PATH = pathlib.Path(__file__).parent.resolve() + +logger = logging.getLogger(__name__) + + +def register_pipeline_terminator(pipeline: jetclient.JETPipeline): + def sigterm_handler(_signo, _stack_frame): + print(f"Trying to terminate pipeline {pipeline.jet_id}") + pipeline.cancel() + print(f"Pipeline {pipeline.jet_id} terminated") + sys.exit(0) + + signal.signal(signal.SIGINT, sigterm_handler) + signal.signal(signal.SIGTERM, sigterm_handler) + + +def launch_and_wait_for_completion( + test_case: str, + environment: str, + n_repeat: int, + time_limit: int, + scope: str, + container_image: Optional[str], + container_tag: str, + cluster: str, + account: str, + record_checkpoints: str, + partition: Optional[str], + tag: Optional[str], + run_name: Optional[str], + wandb_experiment: Optional[str], +) -> jetclient.JETPipeline: + cluster_config = {"account": account} + if partition is not None: + cluster_config['partition'] = partition + + n_submission_attempts = 0 + while n_submission_attempts < 3: + try: + pipeline = jetclient.JETClient( + customer='mcore', gitlab_ci_token=os.getenv("RO_API_TOKEN"), env="prod" + ).workloads.submit( + workloads=common.load_workloads( + test_case=test_case, + n_repeat=n_repeat, + time_limit=time_limit, + tag=tag, + scope=scope, + container_image=container_image, + container_tag=container_tag, + environment=environment, + record_checkpoints=record_checkpoints, + ), + config_id=f"mcore/{common.resolve_cluster_config(cluster)}", + custom_config={ + "launchers": {cluster: cluster_config}, + "executors": { + "jet-ci": { + "environments": { + cluster: { + "variables": { + "RUN_NAME": run_name or "", + "WANDB_API_KEY": os.getenv("WANDB_API_KEY") or "", + "WANDB_EXPERIMENT": wandb_experiment or "", + "RECORD_CHECKPOINTS": str( + "Record checkpoints" + in os.getenv("CI_MERGE_REQUEST_LABELS", "") + ).lower(), + } + } + } + } + }, + "outputs": { + "enabled": True, + "artifacts_storages": [common.resolve_artifact_config(cluster)], + }, + }, + wait_for_validation=True, + max_wait_time=(60 * 60), + ) + except jetclient.clients.gitlab.GitlabAPIError as e: + logger.error(f"Faced {str(e)}. Waiting and retrying...") + n_submission_attempts += 1 + time.sleep(2**n_submission_attempts * 5) + continue + + if pipeline.get_status() == PipelineStatus.SUBMISSION_FAILED: + n_submission_attempts += 1 + logger.info("Submission failed, attempt again (%s/3)", str(n_submission_attempts)) + continue + break + + register_pipeline_terminator(pipeline=pipeline) + + logger.info( + "Pipeline triggered; inspect it here: https://gitlab-master.nvidia.com/dl/jet/ci/-/pipelines/%s", + pipeline.jet_id, + ) + + pipeline.wait(max_wait_time=60 * 60 * 24 * 7, interval=60 * 1, retries_on_error=3) + + logger.info(f"Pipeline terminated; status: {pipeline.get_status()}") + return pipeline + + +def download_job_assets(logs: List[jet_log.JETLog], iteration: int = 0) -> List[str]: + if not logs: + logger.info("No logs found for download.") + return [""] + + assets_base_path = BASE_PATH / ".." / ".." / ".." / "results" / f"iteration={iteration}" + + for restart_idx, log in enumerate(logs): + assets = log.get_assets() + assets_path = assets_base_path / f"restart={restart_idx}" + assets_path.mkdir(parents=True, exist_ok=True) + for asset in assets: + (assets_path / asset.source_path).parent.mkdir(parents=True, exist_ok=True) + with open(assets_path / asset.source_path, "w") as fh: + dest = pathlib.Path(fh.name) + logger.info("Downloading log %s to %s", asset.source_path, str(dest)) + asset.download(dest) + return assets + + +def extract_logs_to_string(logs: List[jet_log.JETLog]) -> List[str]: + if not logs: + logger.info("No logs found for download.") + return [""] + + with tempfile.NamedTemporaryFile() as tmp_file: + assets = logs[-1].get_assets() + asset = [asset for asset in assets if asset.name == "output_script-0.log"][0] + asset.download(pathlib.Path(tmp_file.name)) + with open(pathlib.Path(tmp_file.name), "r") as fh: + return fh.readlines() + + +def parse_failed_job(logs: List[str]) -> Optional[bool]: + for log_row in logs[::-1]: + match = re.search(r"Job finished with status 'FAILED'", log_row) + if match is not None: + return True + return False + + +def parse_finished_training(logs: List[str]) -> Optional[bool]: + for log_row in logs[::-1]: + match = re.search(r"after training is done", log_row) + if match is not None: + return True + return False + + +@click.command() +@click.option("--model", required=True, type=str, help="Model") +@click.option("--test-case", required=True, type=str, help="Test case") +@click.option( + "--environment", required=True, type=click.Choice(['dev', 'lts']), help="Pytorch LTS or DEV" +) +@click.option("--n-repeat", required=False, default=1, type=int) +@click.option("--time-limit", required=False, default=1800, type=int) +@click.option("--scope", required=False, default="mr", type=str) +@click.option( + "--account", + required=False, + type=str, + help="Slurm account to use", + default="coreai_dlalgo_mcore", +) +@click.option("--partition", required=False, type=str, help="Slurm partition to use", default=None) +@click.option("--cluster", required=True, type=str, help="Cluster to run on") +@click.option("--container-tag", required=True, type=str, help="Base image of Mcore image") +@click.option("--container-image", required=False, type=str, help="Base image of Mcore image") +@click.option("--tag", required=False, type=str, help="Tag (only relevant for unit tests)") +@click.option("--record-checkpoints", required=False, type=str, help="Values are 'true' or 'false'") +@click.option( + "--run-name", required=False, type=str, help="Run name (only relevant for release tests)" +) +@click.option( + "--wandb-experiment", + required=False, + type=str, + help="Wandb experiment (only relevant for release tests)", +) +def main( + model: str, + test_case: str, + environment: str, + n_repeat: int, + time_limit: int, + scope: str, + account: str, + partition: Optional[str], + cluster: str, + container_tag: str, + record_checkpoints: str, + tag: Optional[str] = None, + container_image: Optional[str] = None, + run_name: Optional[str] = None, + wandb_experiment: Optional[str] = None, +): + logging.basicConfig(level=logging.INFO) + logger.info('Started') + + model_config_path = pathlib.Path( + BASE_PATH + / ".." + / ".." + / "functional_tests" + / "test_cases" + / model + / test_case + / "model_config.yaml" + ) + + if model_config_path.exists(): + with open(model_config_path) as stream: + try: + test_case_dict = yaml.safe_load(stream) + except yaml.YAMLError as exc: + print(exc) + + test_type = test_case_dict['TEST_TYPE'] + else: + test_type = "unit_test" + + logger.info('test_type will be %s', test_type) + + if test_type == "release" and (run_name is None or wandb_experiment is None): + logger.error(f"Not all arguments provided ({run_name=}, {wandb_experiment=})") + sys.exit(1) + + n_attempts = 0 + n_nondeterminism_attemps = 0 + n_iteration = 0 + while True and n_attempts < 3 and n_nondeterminism_attemps < 2: + pipeline = launch_and_wait_for_completion( + test_case=test_case, + environment=environment, + n_repeat=n_repeat, + time_limit=time_limit, + scope=scope, + container_image=container_image, + container_tag=container_tag, + cluster=cluster, + account=account, + partition=partition, + tag=tag, + run_name=run_name, + wandb_experiment=wandb_experiment, + record_checkpoints=record_checkpoints, + ) + + main_job = [job for job in pipeline.get_jobs() if job.name.startswith("basic")][0] + + n_download_attempt = 0 + while n_download_attempt < 3: + try: + jet_log = main_job.get_logs() + logs = extract_logs_to_string(logs=jet_log) + download_job_assets(logs=jet_log, iteration=n_iteration) + no_log = False + break + except ( + requests.exceptions.ConnectionError, + json.decoder.JSONDecodeError, + UnicodeDecodeError, + ) as e: + logger.error(e) + time.sleep(2 * n_download_attempt * 15) + n_download_attempt += 1 + no_log = True + except (KeyError, IndexError) as e: + logger.error(e) + no_log = True + break + + if no_log: + logger.error("Did not find any logs to download, retry.") + continue + + concat_logs = "\n".join(logs) + if concat_logs.strip() == "": + logger.error("No logs found. Try again.") + n_attempts += 1 + continue + + if test_type != "release": + print(f"Logs:\n{concat_logs}") + + success = pipeline.get_status() == PipelineStatus.SUCCESS + logger.info("Pipeline terminated with status %s", pipeline.get_status().name) + + if test_type == "unit_test": + sys.exit(int(not success)) # invert for exit 0 + + if test_type != "release": + if success: + sys.exit(int(not success)) # invert for exit 0 + + if ( + "Some NCCL operations have failed or timed out." in concat_logs + or "uncorrectable ECC error encountered" in concat_logs + or "illegal memory access" in concat_logs + or "illegal instruction" in concat_logs + or "torch.distributed.DistNetworkError" in concat_logs + ): + logger.error("Detected NCCL failure, attempt restart.") + n_attempts += 1 + continue + + if "FAILED tests/functional_tests/python_test_utils" in concat_logs: + logger.error("Non-determinism, let's try another node.") + n_nondeterminism_attemps += 1 + continue + + sys.exit(1) + + if parse_failed_job(logs=logs): + n_attempts += 1 + continue + + if parse_finished_training(logs=logs): + sys.exit(int(not success)) # invert for exit 0 + n_iteration += 1 + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/tests/test_utils/recipes/bert.yaml b/tests/test_utils/recipes/bert.yaml index 5a4d5a8..1f21c3c 100644 --- a/tests/test_utils/recipes/bert.yaml +++ b/tests/test_utils/recipes/bert.yaml @@ -1,55 +1,108 @@ -type: basic -format_version: 1 -maintainers: [mcore] -loggers: [stdout] -spec: - name: "{test_case}" - model: bert - nodes: 1 - build: mcore-pyt-{environment} - gpus: 8 - platforms: dgx_a100 - artifacts: - /workspace/data/bert_data: text/the_pile/bert_shard00 - script: |- - ls - cd /opt/megatron-lm - - ARGUMENTS=( - "DATA_PATH=/workspace/data/bert_data" - "DATA_CACHE_PATH=/workspace/data/cache" - "OUTPUT_PATH={assets_dir}" - "TENSORBOARD_PATH={assets_dir}/tensorboard" - "CHECKPOINT_PATH=/workspace/checkpoints" - "TRAINING_SCRIPT_PATH=pretrain_bert.py" - "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" - "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" - "N_REPEAT={n_repeat}" - ) - - bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} - -products: - - environment: [lts, dev] - scope: [mr] - time_limit: [1800] - n_repeat: [5] - test_case: - - bert_mr_mcore_tp2_pp2_dgx_a100_1N8G - - bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G - - bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G - - bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G - - bert_mr_tp1_pp4_vp2_dgx_a100_1N8G - - bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G - - bert_mr_tp2_pp2_dgx_a100_1N8G - - bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G - - environment: [lts, dev] - scope: [nightly] - n_repeat: [5] - time_limit: [3600] - test_case: - - bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2 - - bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2 - - bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1 - - bert_nightly_dgx_a100_1N8G_tp1_pp2 - - bert_nightly_dgx_a100_1N8G_tp4_pp1 +type: basic +format_version: 1 +maintainers: [mcore] +loggers: [stdout] +spec: + name: '{test_case}_{environment}' + model: bert + nodes: 1 + build: mcore-pyt-{environment} + gpus: 8 + platforms: dgx_a100 + time_limit: + n_repeat: + artifacts: + /workspace/data/bert_data: text/the_pile/bert_shard00 + /workspace/checkpoints/bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G_dev: model/mcore_bert/bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G_dev/22410107 + script: |- + ls + cd /opt/megatron-lm + + ARGUMENTS=( + "DATA_PATH=/workspace/data/bert_data" + "DATA_CACHE_PATH=/workspace/data/cache" + "OUTPUT_PATH={assets_dir}" + "TENSORBOARD_PATH={assets_dir}/tensorboard" + "CHECKPOINT_SAVE_PATH={artifacts_dir}/checkpoints" + "CHECKPOINT_LOAD_PATH=/workspace/checkpoints/{name}" + "TRAINING_SCRIPT_PATH=pretrain_bert.py" + "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" + "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" + "N_REPEAT={n_repeat}" + ) + + bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} + +products: + - test_case: [bert_mr_mcore_tp2_pp2_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [bert_mr_mcore_tp2_pp2_local_spec_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [bert_mr_mcore_tp2_pp2_resume_torch_dist_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [bert_mr_mcore_tp2_pp2_resume_torch_dist_local_spec_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [bert_mr_tp1_pp4_vp2_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [bert_mr_tp1_pp4_vp2_resume_torch_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [bert_mr_tp2_pp2_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [bert_mr_tp2_pp2_resume_torch_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [bert_nightly_dgx_a100_1N8G_mcore_tp1_pp2] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [bert_nightly_dgx_a100_1N8G_mcore_tp1_pp4_vp2] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [bert_nightly_dgx_a100_1N8G_mcore_tp4_pp1] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [bert_nightly_dgx_a100_1N8G_tp1_pp2] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [bert_nightly_dgx_a100_1N8G_tp4_pp1] + products: + - environment: [dev, lts] + scope: [nightly] + # - test_case: [bert_mr_mcore_tp2_pp2_frozen_resume_torch_dist_dgx_a100_1N8G] + # products: + # - environment: [dev] Update checkpoint + # scope: [mr] diff --git a/tests/test_utils/recipes/gpt-modelopt.yaml b/tests/test_utils/recipes/gpt-modelopt.yaml index d75b1db..7985f88 100644 --- a/tests/test_utils/recipes/gpt-modelopt.yaml +++ b/tests/test_utils/recipes/gpt-modelopt.yaml @@ -3,11 +3,14 @@ format_version: 1 maintainers: [mcore] loggers: [stdout] spec: - name: "{test_case}" + name: '{test_case}_{environment}' model: gpt build: mcore-pyt-{environment} nodes: 1 gpus: 2 + platforms: dgx_a100 + time_limit: + n_repeat: artifacts: /workspace/data/gpt3_data: text/the_pile/shard00 /workspace/checkpoints/teacher: model/gpt_dummy_pyt/ckpt/24.10.0_bf16_teacher @@ -20,7 +23,8 @@ spec: "DATA_CACHE_PATH=/workspace/data/cache" "OUTPUT_PATH={assets_dir}" "TENSORBOARD_PATH={assets_dir}/tensorboard" - "CHECKPOINT_PATH=/workspace/checkpoints" + "CHECKPOINT_SAVE_PATH={artifacts_dir}/checkpoints" + "CHECKPOINT_LOAD_PATH=/workspace/checkpoints" "TRAINING_SCRIPT_PATH=./examples/export/knowledge_distillation/pretrain_gpt_modelopt.py" "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" @@ -29,9 +33,7 @@ spec: bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} products: - - scope: [nightly] - platforms: [dgx_a100] - time_limit: [1200] - environment: [lts, dev] # Disable dev for now - test_case: - - gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume + - test_case: [gpt3_nightly_mcore_te_tp2_pp1_modelopt_distill_resume] + products: + - environment: [dev, lts] + scope: [nightly] diff --git a/tests/test_utils/recipes/gpt-nemo.yaml b/tests/test_utils/recipes/gpt-nemo.yaml index 01e79b4..d012fd4 100644 --- a/tests/test_utils/recipes/gpt-nemo.yaml +++ b/tests/test_utils/recipes/gpt-nemo.yaml @@ -3,24 +3,25 @@ format_version: 1 maintainers: [mcore] loggers: [stdout] spec: - name: "{test_case}" + name: '{test_case}_{environment}' model: gpt-nemo build: mcore-nemo nodes: 1 gpus: 8 platforms: dgx_a100 time_limit: 1800 - scope: null + scope: script: |- ls cd /opt/NeMo - + ARGUMENTS=( "DATA_PATH='-'" "DATA_CACHE_PATH='-'" "OUTPUT_PATH={assets_dir}" "TENSORBOARD_PATH={assets_dir}/tensorboard" - "CHECKPOINT_PATH=/workspace/checkpoints" + "CHECKPOINT_SAVE_PATH={artifacts_dir}/checkpoints" + "CHECKPOINT_LOAD_PATH=/workspace/checkpoints/{name}" "TRAINING_SCRIPT_PATH=/opt/NeMo/examples/nlp/language_modeling/megatron_gpt_pretraining.py" "TRAINING_PARAMS_PATH=/opt/megatron-lm/tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" "GOLDEN_VALUES_PATH=/opt/megatron-lm/tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" @@ -30,10 +31,15 @@ spec: bash /opt/megatron-lm/tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} products: - - environment: [dev] - scope: [mr] - n_repeat: [5] - test_case: - - gpt3-nemo_126m_mr_mbs1_gbs8_mcore_te_tp2_pp4_vp3_seq_par_overlap_p2p_dgx_a100_1N8G - - gpt3-nemo_126m_mr_mbs4_gbs64_mcore_te_tp1_pp1_dgx_a100_1N8G - \ No newline at end of file + - test_case: [gpt3-nemo_126m_mr_mbs1_gbs8_mcore_te_tp2_pp4_vp3_seq_par_overlap_p2p_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - test_case: [gpt3-nemo_126m_mr_mbs4_gbs64_mcore_te_tp1_pp1_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - test_case: [gpt3-nemo_126m_mr_mbs1_gbs8_mcore_te_8experts_tp2_ep2_pp1_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] diff --git a/tests/test_utils/recipes/gpt.yaml b/tests/test_utils/recipes/gpt.yaml index 966d7ef..972aa68 100644 --- a/tests/test_utils/recipes/gpt.yaml +++ b/tests/test_utils/recipes/gpt.yaml @@ -1,166 +1,727 @@ -type: basic -format_version: 1 -maintainers: [mcore] -loggers: [stdout] -spec: - name: "{test_case}" - model: gpt - build: mcore-pyt-{environment} - nodes: 1 - gpus: 8 - artifacts: - /workspace/data/gpt3_data: text/the_pile/shard00 - script: |- - ls - cd /opt/megatron-lm - - ARGUMENTS=( - "DATA_PATH=/workspace/data/gpt3_data" - "DATA_CACHE_PATH=/workspace/data/cache" - "OUTPUT_PATH={assets_dir}" - "TENSORBOARD_PATH={assets_dir}/tensorboard" - "CHECKPOINT_PATH=/workspace/checkpoints" - "TRAINING_SCRIPT_PATH=pretrain_gpt.py" - "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" - "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" - "N_REPEAT={n_repeat}" - ) - - bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} - -products: - - environment: [lts, dev] - scope: [mr] - platforms: [dgx_a100] - time_limit: [1800] - n_repeat: [5] - test_case: - - gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G - # - gpt3_mr_mcore_te_tp2_pp1_fsdp2_resume_torch_dist_dgx_a100_1N8G # torch >= 2.4.0 - - gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G - - gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G - - gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G - - gpt3_mr_te_tp2_pp2_dgx_a100_1N8G - - gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G - - gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G - - gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G - - gpt3_mr_tp2_pp2_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G - - gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G # cp and attention - - gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G # cp and attention - - gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G # cp and attention - - gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G # cp and attention - - gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G # cp and attention with a2a+p2p comm type - - gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G # cp and attention with a2a+p2p comm type - - environment: [lts, dev] - scope: [nightly] - platforms: [dgx_a100] - time_limit: [3600] - n_repeat: [5] - test_case: - - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather - # - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_fsdp2_resume_torch_dist_te # torch >= 2.4.0 - - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather - - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2 - - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist - - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4 - - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist - - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel - # - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_resume_torch_dist_te_2experts # non-determinism - - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1 - - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch - - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist - - gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce - - gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce - - gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2 - - gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch - - gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4 - - gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce - - gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch - - gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce - - gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce - - gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts - - gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce - - gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1 - - gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce - - gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch - - environment: [lts] - scope: [nightly] - platforms: [dgx_a100] - time_limit: [3600] - n_repeat: [5] - test_case: - - gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel # non-determinism in dev - - environment: [lts, dev] - scope: [weekly] - platforms: [dgx_h100] - time_limit: [9000] - test_case: - - gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel - - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline - - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel - - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp - - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp - - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp - - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp - - gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp +type: basic +format_version: 1 +maintainers: [mcore] +loggers: [stdout] +spec: + name: '{test_case}_{environment}' + model: gpt + build: mcore-pyt-{environment} + nodes: 1 + gpus: 8 + n_repeat: 5 + platforms: dgx_a100 + artifacts: + /workspace/data/gpt3_data: text/the_pile/shard00 + /workspace/checkpoints/gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G_dev: model/mcore_gpt/gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G_dev/22410107 + /workspace/checkpoints/gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G_dev: model/mcore_gpt/gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G_dev/22410107 + script: |- + ls + cd /opt/megatron-lm + + ARGUMENTS=( + "DATA_PATH=/workspace/data/gpt3_data" + "DATA_CACHE_PATH=/workspace/data/cache" + "OUTPUT_PATH={assets_dir}" + "TENSORBOARD_PATH={assets_dir}/tensorboard" + "CHECKPOINT_SAVE_PATH={artifacts_dir}/checkpoints" + "CHECKPOINT_LOAD_PATH=/workspace/checkpoints/{name}" + "TRAINING_SCRIPT_PATH=pretrain_gpt.py" + "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" + "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" + "N_REPEAT={n_repeat}" + ) + + bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} + +products: + ####################################################################### + # Nightly tests: Run both DEV and LTS unless something is flaky # + ####################################################################### + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_dist_optimizer_overlap_grad_reduce_param_gather] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp2_resume_torch_dist] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp4_resume_torch_dist] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_resume_torch_dist_te_4experts2parallel] + products: + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_te_4experts2parallel] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel] + products: + - environment: [dev] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_ep2_etp2_te_4experts2parallel_dp_last] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_cp2_pp2_ep2_te_4experts2parallel_dp_last] + products: + - environment: [dev] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_cp2_pp2_ep2_te_4experts2parallel_nondeterministic_dp_last] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp4_pp1_resume_torch_dist] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_dist_optimizer_overlap_grad_reduce] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp1_overlap_grad_reduce] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp2_resume_torch] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_overlap_grad_reduce] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_resume_torch] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_tp1_pp4_vp1_overlap_grad_reduce] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_overlap_grad_reduce] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_4experts] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_tp2_pp2_resume_torch_overlap_grad_reduce] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_overlap_grad_reduce] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_tp4_pp1_resume_torch] + products: + - environment: [dev, lts] + scope: [nightly] + # - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp2_pp2_resume_torch_dist_te_2experts] + # products: + # - environment: [dev, lts] + # scope: [nightly] + # - test_case: [gpt3_345m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_fsdp2_resume_torch_dist_te] + # products: + # - environment: [dev, lts] + # scope: [nightly] + ####################################################################### + # Weekly tests: Run both DEV and LTS unless something is flaky # + ####################################################################### + - test_case: [gpt3_345m_weekly_dgx_h100_1N8G_mcore_nondet_tp1_pp1_fp8_no_model_parallel] + products: + - environment: [dev, lts] + scope: [weekly] + - test_case: [gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_bf16_baseline] + products: + - environment: [dev, lts] + scope: [weekly] + - test_case: [gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp1_fp8_no_model_parallel] + products: + - environment: [dev, lts] + scope: [weekly] + - test_case: [gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp1_pp2_fp8_pp] + products: + - environment: [dev, lts] + scope: [weekly] + - test_case: [gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp] + products: + - environment: [dev, lts] + scope: [weekly] + - test_case: [gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_fp8_tp_pp_sp] + products: + - environment: [dev, lts] + scope: [weekly] + - test_case: [gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_native_fp8_tp_pp_sp] + products: + - environment: [dev, lts] + scope: [weekly] + - test_case: [gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp4_pp2_fp8_tp_pp] + products: + - environment: [dev, lts] + scope: [weekly] + ####################################################################### + # MR tests: Mostly DEV on MR, and LTS on nightly cadence, except for # + # some very important tests. # + ####################################################################### + - test_case: [gpt3_mr_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer_no_mmap_bin_files_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp1_uniform_full_recompute_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp2_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G] + products: + # - environment: [dev] Until TE is at 1.12 + # scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_cp4_a2a_p2p_nondeterministic_dgx_a100_1N8G] + products: + # - environment: [dev] Until TE is at 1.12 + # scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - test_case: [gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp2_rope_embeddings_interleaved_no_fusion_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_disable_bias_linear_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_persistent_ckpt_disable_bias_linear_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_persistent_disable_bias_linear_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_sequence_parallel_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_swiglu_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_sequence_parallel_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_swiglu_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_untie_embeddings_and_outputs_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_calculate_per_token_loss_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_decoupled_lr_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_untied_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_tunable_overlap_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_uneven_pipeline_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp2_account_for_embedding_loss_in_pipeline_split_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp1_multi_dist_optimizer_instances_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_groupedGEMM_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_top2router_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp1_te_8experts_etp1_ep4_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp2_cp2_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - test_case: [gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_dp_last_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - test_case: [gpt3_mr_mcore_te_tp2_pp2_cp2_nondeterministic_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp2_cp2_etp4_nondeterministic_dp_last_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + # - test_case: [gpt3_mr_mcore_te_tp2_pp2_cross_entropy_loss_fusion_dgx_a100_1N8G] Failing on max-memory + # products: + # - environment: [dev] + # scope: [mr] + # - environment: [lts] + # scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp2_ddp_average_in_collective_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp2_defer_embedding_wgrad_compute_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + + - test_case: [gpt3_mr_mcore_te_tp2_pp2_no_create_attention_mask_in_dataloader_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp2_no_mmap_bin_files_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + # - test_case: [gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic_dgx_a100_1N8G] + # products: + # - environment: [dev] + # scope: [mr] + # - environment: [lts] + # scope: [nightly] + # - test_case: # Failing on max-memory[gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion_dgx_a100_1N8G] + # products: + # - environment: [dev] + # scope: [mr] + # - environment: [lts] + # scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_create_attention_mask_in_dataloader_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_no_mmap_bin_files_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp4_pp1_qk_layernorm_test_mode_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_tp2_pp2_uninstall_te_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_te_tp2_pp2_resume_torch_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_tp1_pp4_vp1_resume_torch_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_7b_mr_dgx_a100_1N8G_tp1_pp4_memory_speed] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_7b_mr_dgx_a100_1N8G_tp4_pp1_memory_speed] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [gpt3_mr_mcore_te_tp2_zp_z3_resume_torch_dist_te_8experts2parallel_top2router_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + + # - test_case: [gpt3_mr_mcore_te_tp2_pp1_te_8experts2parallel_ddp_average_in_collective_dgx_a100_1N8G] + # products: + # - environment: [dev] + # scope: [mr] + # - environment: [lts] + # scope: [nightly] + # - test_case: [gpt3_mr_mcore_te_tp2_pp1_fsdp2_resume_torch_dist_dgx_a100_1N8G] + # products: + # - environment: [dev] + # scope: [mr] + # - environment: [lts] + # scope: [nightly] + ####################################################################### + # Super important MR tests that run for both DEV and LTS per MR # + ####################################################################### + - test_case: [gpt3_mr_mcore_te_tp2_pp1_cp2_nondeterministic_dgx_a100_1N8G] + products: + - environment: [dev, lts] + scope: [mr] + - test_case: [gpt3_mr_mcore_te_tp1_pp4_vp1_dist_optimizer_overlap_grad_reduce_param_gather_overlap_optimizer_dgx_a100_1N8G] + products: + - environment: [dev, lts] + scope: [mr] + - test_case: [gpt3_mr_mcore_te_tp2_pp2_dgx_a100_1N8G] + products: + - environment: [dev, lts] + scope: [mr] + - test_case: [gpt3_mr_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G] + products: + - environment: [dev, lts] + scope: [mr] + - test_case: [gpt3_mr_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce_param_gather_dgx_a100_1N8G] + products: + - environment: [dev, lts] + scope: [mr] + - test_case: [gpt3_mr_te_tp2_pp2_dgx_a100_1N8G] + products: + - environment: [dev, lts] + scope: [mr] + - test_case: [gpt3_mr_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G] + products: + - environment: [dev, lts] + scope: [mr] + - test_case: [gpt3_mr_tp1_pp4_vp1_dgx_a100_1N8G] + products: + - environment: [dev, lts] + scope: [mr] + - test_case: [gpt3_mr_tp2_pp2_dgx_a100_1N8G] + products: + - environment: [dev, lts] + scope: [mr] + - test_case: [gpt3_mr_mcore_te_tp4_pp2_frozen_resume_torch_dist_reshard_8x1xNone_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - test_case: [gpt3_mr_mcore_te_tp2_pp1_frozen_resume_torch_dist_te_8experts2parallel_dist_optimizer_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + + # - test_case: [gpt3_mr_mcore_tp2_pp2_resume_torch_dist_uninstall_te_dgx_a100_1N8G] + # products: + # - environment: [dev, lts] + # scope: [mr] diff --git a/tests/test_utils/recipes/multimodal-llava.yaml b/tests/test_utils/recipes/multimodal-llava.yaml index 0d43c64..13a88e1 100644 --- a/tests/test_utils/recipes/multimodal-llava.yaml +++ b/tests/test_utils/recipes/multimodal-llava.yaml @@ -2,18 +2,17 @@ type: basic format_version: 1 maintainers: [mcore] loggers: [stdout] -launchers: - type:slurm: - ntasks_per_node: '{gpus}' spec: - name: '{test_case}' + name: '{test_case}_{environment}' model: multimodal-llava build: mcore-pyt-{environment} nodes: 1 gpus: 8 platforms: dgx_a100 - time_limit: 1800 - scope: null + time_limit: + n_repeat: + test_case: + scope: script: |- ls cd /opt/megatron-lm @@ -23,7 +22,8 @@ spec: "DATA_CACHE_PATH='-'" "OUTPUT_PATH={assets_dir}" "TENSORBOARD_PATH={assets_dir}/tensorboard" - "CHECKPOINT_PATH=/workspace/checkpoints" + "CHECKPOINT_SAVE_PATH={artifacts_dir}/checkpoints" + "CHECKPOINT_LOAD_PATH=/workspace/checkpoints/{name}/checkpoints" "TRAINING_SCRIPT_PATH=pretrain_vlm.py" "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" @@ -33,19 +33,39 @@ spec: bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} products: - - environment: [lts, dev] - scope: [mr] - n_repeat: [5] - gpus: [8] - test_case: - - multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G - - multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G - - multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dgx_a100_1N8G - - multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dist_opt_dgx_a100_1N8G - - environment: [lts, dev] - scope: [mr] - n_repeat: [5] - gpus: [7] - test_case: - - multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G - - multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G + - test_case: [multimodal_llava_mr_mcore_te_tp1_pp1_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [multimodal_llava_mr_mcore_te_tp2_pp3_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [multimodal_llava_mr_mcore_te_tp4_pp1_etp3_dgx_a100_1N7G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [multimodal_llava_mr_mcore_te_tp4_pp1_freeze_vit_freeze_lm_dist_opt_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [multimodal_llava_mr_mcore_te_tp4_pp1_resume_torch_etp3_dgx_a100_1N7G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] diff --git a/tests/test_utils/recipes/t5.yaml b/tests/test_utils/recipes/t5.yaml index e9583a3..d167430 100644 --- a/tests/test_utils/recipes/t5.yaml +++ b/tests/test_utils/recipes/t5.yaml @@ -1,61 +1,108 @@ -type: basic -format_version: 1 -maintainers: [mcore] -loggers: [stdout] -spec: - name: "{test_case}" - model: t5 - build: mcore-pyt-{environment} - nodes: 1 - gpus: 8 - platforms: dgx_a100 - artifacts: - /workspace/data/t5_data: text/the_pile/t5_shard00 - script: |- - ls - cd /opt/megatron-lm - - ARGUMENTS=( - "DATA_PATH=/workspace/data/t5_data" - "DATA_CACHE_PATH=/workspace/data/cache" - "OUTPUT_PATH={assets_dir}" - "TENSORBOARD_PATH={assets_dir}/tensorboard" - "CHECKPOINT_PATH=/workspace/checkpoints" - "TRAINING_SCRIPT_PATH=pretrain_t5.py" - "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" - "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" - "N_REPEAT={n_repeat}" - ) - - bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} - -products: - - environment: [lts, dev] - scope: [mr] - time_limit: [1800] - n_repeat: [5] - test_case: - - t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G - - t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G - - t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G - - t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G - - t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G - - t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G - - t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G - - environment: [lts] - scope: [mr] - time_limit: [1800] - n_repeat: [5] - test_case: - - t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G - - environment: [lts, dev] - scope: [nightly] - time_limit: [9000] - n_repeat: [1] - test_case: - - t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch - - t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1 - - t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel - - t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1 - - t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch - - t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1 +type: basic +format_version: 1 +maintainers: [mcore] +loggers: [stdout] +spec: + name: '{test_case}_{environment}' + model: t5 + build: mcore-pyt-{environment} + nodes: 1 + gpus: 8 + platforms: dgx_a100 + artifacts: + /workspace/data/t5_data: text/the_pile/t5_shard00 + /workspace/checkpoints/t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G_dev: model/mcore_t5/t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G_dev/22410107 + script: |- + ls + cd /opt/megatron-lm + + ARGUMENTS=( + "DATA_PATH=/workspace/data/t5_data" + "DATA_CACHE_PATH=/workspace/data/cache" + "OUTPUT_PATH={assets_dir}" + "TENSORBOARD_PATH={assets_dir}/tensorboard" + "CHECKPOINT_SAVE_PATH={artifacts_dir}/checkpoints" + "CHECKPOINT_LOAD_PATH=/workspace/checkpoints/{name}" + "TRAINING_SCRIPT_PATH=pretrain_t5.py" + "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" + "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}.json" + "N_REPEAT={n_repeat}" + ) + + bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} + +products: + - test_case: [t5_220m_mr_mcore_te_tp2_pp2_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [t5_220m_mr_mcore_te_tp2_pp2_resume_torch_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [t5_220m_mr_mcore_te_tp4_pp1_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [t5_220m_mr_mcore_te_tp4_pp1_resume_torch_dist_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [t5_220m_mr_mcore_tp2_pp2_dgx_a100_1N8G] + products: + - environment: [lts] + scope: [nightly] + - test_case: [t5_220m_mr_mcore_tp2_pp2_resume_torch_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [t5_220m_mr_mcore_tp4_pp1_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [t5_220m_mr_mcore_tp4_pp1_resume_torch_dist_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] + - environment: [lts] + scope: [nightly] + - test_case: [t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp1_pp1_vp1_resume_torch] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [t5_220m_nightly_dgx_a100_1N8G_mcore_te_tp2_pp1_vp1_sequence_parallel] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [t5_220m_nightly_dgx_a100_1N8G_mcore_tp1_pp1_vp1_resume_torch] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [t5_220m_nightly_dgx_a100_1N8G_mcore_tp2_pp1_vp1] + products: + - environment: [dev, lts] + scope: [nightly] + - test_case: [t5_220m_mr_mcore_te_tp2_pp2_frozen_resume_torch_dgx_a100_1N8G] + products: + - environment: [dev] + scope: [mr] diff --git a/tests/test_utils/recipes/unit-tests.yaml b/tests/test_utils/recipes/unit-tests.yaml index cda58d9..aec33d5 100644 --- a/tests/test_utils/recipes/unit-tests.yaml +++ b/tests/test_utils/recipes/unit-tests.yaml @@ -1,80 +1,143 @@ -type: basic -format_version: 1 -maintainers: [mcore] -loggers: [stdout] -spec: - name: '{test_case}' - model: unit-tests - nodes: 1 - build: mcore-pyt-{environment} - gpus: 8 - platforms: dgx_h100 - script: |- - ls - - export TAG={tag} - export ENVIRONMENT={environment} - export BUCKET="{test_case}" - export UNIT_TEST_REPEAT={n_repeat} - export UNIT_TEST_TIMEOUT=10 - - set -euxo pipefail - - if [[ "$TAG" == "latest" ]]; then - TEST_PATH="/opt/megatron-lm" - else - TEST_PATH="/opt/megatron-lm-legacy/" - fi - - cd $TEST_PATH - - MARKER=() - if [[ "$TAG" == "legacy" ]]; then - MARKER+=("not internal") - fi - - if [[ "$ENVIRONMENT" == "lts" ]]; then - MARKER+=("not flaky") - fi - - if [[ "$ENVIRONMENT" == "dev" ]]; then - MARKER+=("not flaky_in_dev") - fi - - MARKER_ARG=$(printf "%s" "${{MARKER[0]}}") - for element in "${{MARKER[@]:1}}"; do - MARKER_ARG+=" and $element" - done - - IGNORE_TEST_CASES=$(cat /opt/megatron-lm/tests/test_utils/recipes/unit-tests.yaml | yq eval 'with(.products[].test_case; del(.[] | select(. == env(BUCKET)))) | .products[].test_case[]' | tr " " "\n") - IGNORE_ARGS=() - while IFS= read -r test_case; do - if [[ $test_case == *\** ]]; then - FILES=($(ls $test_case)) - echo ${{FILES[@]}} - for file in "${{FILES[@]}}"; do - IGNORE_ARGS+=("--ignore='$file'") - done - else - IGNORE_ARGS+=("--ignore=$test_case") - fi - done <<< "$IGNORE_TEST_CASES" - - for i in $(seq $UNIT_TEST_REPEAT); do - CMD=$(echo pytest -xvs --cov-report=term --cov-report=html --cov=megatron/core --no-cov-on-fail ${{IGNORE_ARGS[@]}} -m "'${{MARKER_ARG}}'" $BUCKET) - eval "$CMD" - done - -products: - - environment: [lts, dev] - tag: [latest, legacy] - scope: [unit-tests] - n_repeat: [1] - time_limit: [1800] - test_case: - - tests/unit_tests/data/ - - tests/unit_tests/dist_checkpointing/*.py - - tests/unit_tests/dist_checkpointing/models/ - - tests/unit_tests/transformer/*.py - - tests/unit_tests/transformer/moe - - tests/unit_tests +type: basic +format_version: 1 +maintainers: [mcore] +loggers: [stdout] +spec: + name: '{test_case}_{environment}_{tag}' + model: unit-tests + nodes: 1 + build: mcore-pyt-{environment} + gpus: 8 + platforms: dgx_h100 + script: |- + ls + + export TAG={tag} + export ENVIRONMENT={environment} + export BUCKET="{test_case}" + export UNIT_TEST_REPEAT={n_repeat} + export UNIT_TEST_TIMEOUT=10 + + set -euxo pipefail + + if [[ "$TAG" == "latest" ]]; then + TEST_PATH="/opt/megatron-lm" + else + TEST_PATH="/opt/megatron-lm-legacy/" + fi + + cd $TEST_PATH + + MARKER=() + if [[ "$TAG" == "legacy" ]]; then + MARKER+=("not internal") + fi + + if [[ "$ENVIRONMENT" == "lts" ]]; then + MARKER+=("not flaky") + fi + + if [[ "$ENVIRONMENT" == "dev" ]]; then + MARKER+=("not flaky_in_dev") + fi + + MARKER_ARG=$(printf "%s" "${{MARKER[0]}}") + for element in "${{MARKER[@]:1}}"; do + MARKER_ARG+=" and $element" + done + + IGNORE_TEST_CASES=$(cat /opt/megatron-lm/tests/test_utils/recipes/unit-tests.yaml | yq eval 'with(.products[].test_case; del(.[] | select(. == env(BUCKET)))) | .products[].test_case[]' | tr " " "\n") + IGNORE_ARGS=() + while IFS= read -r test_case; do + if [[ $test_case == *\** ]]; then + FILES=($(ls $test_case)) + echo ${{FILES[@]}} + for file in "${{FILES[@]}}"; do + IGNORE_ARGS+=("--ignore='$file'") + done + else + IGNORE_ARGS+=("--ignore=$test_case") + fi + done <<< "$IGNORE_TEST_CASES" + + echo "------ARGUMENTS for SLURM ---" + MASTER_ADDR=${{MASTER_ADDR:-localhost}} + MASTER_PORT=${{MASTER_PORT:-6000}} + NUM_NODES=${{NUM_NODES:-${{SLURM_NNODES}}}} + GPUS_PER_NODE=${{GPUS_PER_NODE:-8}} + NODE_RANK=${{SLURM_NODEID:-${{SLURM_NODEID}}}} + DISTRIBUTED_ARGS=( + --nproc_per_node $GPUS_PER_NODE + --nnodes $NUM_NODES + --master_addr $MASTER_ADDR + --master_port $MASTER_PORT + --node_rank $SLURM_NODEID + --log-dir {assets_dir} + --tee "0:3" + --redirects "3" + ) + + # Reduce memory usage by NCCL + export NCCL_MAX_NCHANNELS=1 + export NCCL_NVLS_ENABLE=0 + + for i in $(seq $UNIT_TEST_REPEAT); do + CMD=$(echo torchrun ${{DISTRIBUTED_ARGS[@]}} -m pytest \ + -xvs \ + --cov-report=term \ + --cov-branch \ + --cov=megatron/core \ + --cov-report xml:coverage.xml \ + --no-cov-on-fail ${{IGNORE_ARGS[@]}} \ + -m "'${{MARKER_ARG}}'" $BUCKET) + + eval "$CMD" + done + + ls -al + cp .coverage_0 {assets_dir}/coverage_report + cp coverage.xml {assets_dir} + +products: + - test_case: [tests/unit_tests/data/] + products: + - environment: [lts, dev] + tag: [latest, legacy] + scope: [unit-tests] + n_repeat: [1] + time_limit: [1800] + - test_case: [tests/unit_tests/dist_checkpointing/*.py] + products: + - environment: [lts, dev] + tag: [latest, legacy] + scope: [unit-tests] + n_repeat: [1] + time_limit: [1800] + - test_case: [tests/unit_tests/dist_checkpointing/models/] + products: + - environment: [lts, dev] + tag: [latest, legacy] + scope: [unit-tests] + n_repeat: [1] + time_limit: [1800] + - test_case: [tests/unit_tests/transformer/*.py] + products: + - environment: [lts, dev] + tag: [latest, legacy] + scope: [unit-tests] + n_repeat: [1] + time_limit: [1800] + - test_case: [tests/unit_tests/transformer/moe] + products: + - environment: [lts, dev] + tag: [latest, legacy] + scope: [unit-tests] + n_repeat: [1] + time_limit: [1800] + - test_case: [tests/unit_tests] + products: + - environment: [lts, dev] + tag: [latest, legacy] + scope: [unit-tests] + n_repeat: [1] + time_limit: [1800] diff --git a/tests/unit_tests/data/test_builder.py b/tests/unit_tests/data/test_builder.py index 221eb4a..e83f643 100644 --- a/tests/unit_tests/data/test_builder.py +++ b/tests/unit_tests/data/test_builder.py @@ -1,395 +1,290 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -## -# Compile megatron.core.datasets.helpers_cpp dependencies before BlendedDataset import -## - -import os -import tempfile -from collections import defaultdict -from typing import Dict, Optional - -import numpy -import pytest -import torch - -from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder -from megatron.core.datasets.blended_megatron_dataset_config import BlendedMegatronDatasetConfig -from megatron.core.datasets.megatron_dataset import LowLevelDataset, MegatronDataset -from megatron.core.datasets.utils import Split, compile_helpers, get_blend_from_list -from tests.unit_tests.test_utilities import Utils - -_NUM_DATASETS = 10 - -_SEQUENCE_LENGTH = 10 - -_SIZES = {} -for split in Split: - _SIZES[split] = [] - for i in range(_NUM_DATASETS): - _SIZES[split].append({Split.train: 1000, Split.valid: 100, Split.test: 10}[split] * (i + 1)) - -_MARGIN = 0.005 - - -def do_setup(odir): - paths = defaultdict(list) - - for i in range(_NUM_DATASETS): - path_to_data = os.path.join(odir, str(i)) - os.mkdir(path_to_data) - - for split in _SIZES: - data = numpy.zeros((_SIZES[split][i], _SEQUENCE_LENGTH)) - path = os.path.join(path_to_data, f"{split.name}.npy") - numpy.save(path, data) - paths[split].append(path) - - return paths - - -def test_builder(): - if torch.distributed.is_available(): - Utils.initialize_distributed() - if torch.distributed.get_rank() == 0: - compile_helpers() - torch.distributed.barrier() - else: - compile_helpers() - - # Define the class here to avoid pytest warnings - - class TestDataset(MegatronDataset): - def __init__( - self, - dataset: LowLevelDataset, - dataset_path: Optional[str], - indices: numpy.ndarray, - num_samples: Optional[int], - index_split: Split, - config: BlendedMegatronDatasetConfig, - ) -> None: - super().__init__(dataset, dataset_path, indices, num_samples, index_split, config) - - if self.num_samples is None: - self.num_samples = len(self.indices) - - self.sample_index = numpy.random.choice(self.indices, size=self.num_samples) - - @staticmethod - def numel_low_level_dataset(low_level_dataset: LowLevelDataset) -> int: - return len(low_level_dataset) - - @staticmethod - def build_low_level_dataset( - dataset_path: str, config: BlendedMegatronDatasetConfig - ) -> LowLevelDataset: - return numpy.load(dataset_path) - - def __len__(self) -> int: - return len(self.sample_index) - - def __getitem__(self, idx: int) -> Dict[str, numpy.ndarray]: - return {"text": self.dataset[self.sample_index[idx]]} - - with tempfile.TemporaryDirectory() as temp_dir: - - paths = do_setup(temp_dir) - - blends = { - split: get_blend_from_list( - [ - weight_or_path - for pair in zip(list(range(1, len(paths[split]) + 1, 1)), paths[split]) - for weight_or_path in pair - ] - ) - for split in Split - } - - blends_unweighted = {split: (blends[split][0], None) for split in blends} - - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend_per_split=[blends[Split.train], None, None], - ) - try: - datasets = BlendedMegatronDatasetBuilder( - TestDataset, [None, None, None], lambda: True, config - ).build() - raise RuntimeError - except AssertionError: - pass - - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend_per_split=[get_blend_from_list([paths[Split.train][0]]), None, None], - ) - datasets = BlendedMegatronDatasetBuilder( - TestDataset, [1000, None, None], lambda: True, config - ).build() - assert len(datasets[0]) == 1000 and isinstance(datasets[0], TestDataset) - assert datasets[1] is None - assert datasets[2] is None - - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend_per_split=[ - blends_unweighted[Split.train], - blends_unweighted[Split.valid], - blends_unweighted[Split.test], - ], - ) - datasets = BlendedMegatronDatasetBuilder( - TestDataset, [1000, 1000, 1000], lambda: True, config - ).build() - assert len(datasets[0]) == 1000 - assert len(datasets[1]) == 1000 - assert len(datasets[2]) == sum(_SIZES[Split.test]) - - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend_per_split=[ - blends_unweighted[Split.train], - blends_unweighted[Split.valid], - blends_unweighted[Split.test], - ], - ) - datasets = BlendedMegatronDatasetBuilder( - TestDataset, [None, None, None], lambda: True, config - ).build() - assert len(datasets[0]) == sum(_SIZES[Split.train]) - assert numpy.all( - numpy.array(datasets[0].weights) - == numpy.unique(datasets[0].dataset_index, return_counts=True)[1] - ) - assert len(datasets[1]) == sum(_SIZES[Split.valid]) - assert numpy.all( - numpy.array(datasets[1].weights) - == numpy.unique(datasets[1].dataset_index, return_counts=True)[1] - ) - assert len(datasets[2]) == sum(_SIZES[Split.test]) - assert numpy.all( - numpy.array(datasets[2].weights) - == numpy.unique(datasets[2].dataset_index, return_counts=True)[1] - ) - - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend_per_split=[blends_unweighted[Split.train], None, None], - ) - datasets = BlendedMegatronDatasetBuilder( - TestDataset, [1000, None, None], lambda: True, config - ).build() - assert len(datasets[0]) == 1000 - for i in range(_NUM_DATASETS): - assert len(datasets[0].datasets[i]) == _SIZES[Split.train][i] - assert datasets[1] is None - assert datasets[2] is None - - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend_per_split=[blends[Split.train], None, None], - ) - try: - datasets = BlendedMegatronDatasetBuilder( - TestDataset, [1000, None, None], lambda: True, config - ).build() - raise RuntimeError - except IndexError: - ## - # - # The size per dataset is a function of the requested size, the weight per dataset, - # and a constant coefficient. The sizes, and consequently the total size to request, - # are modified such that the weights may or may not be sufficiently representative. - # To fix this, the weights should be reset according to the new sizes: - # - # S := size - # W := weights - # - # S = func(S, W) - # - # W = S / sum(S) - # - ## - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend_per_split=[blends[Split.train], None, None], - renormalize_blend_weights=True, - ) - datasets = BlendedMegatronDatasetBuilder( - TestDataset, [1000, None, None], lambda: True, config - ).build() - assert ( - len(datasets[0]) >= 1000 - and len(datasets[0]) <= 1000 * (1 + _MARGIN) + _NUM_DATASETS - ) - - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend_per_split=[blends[Split.train], blends[Split.valid], blends[Split.test]], - ) - datasets = BlendedMegatronDatasetBuilder( - TestDataset, [100, 100, 100], lambda: True, config - ).build() - assert ( - len(datasets[0]) >= 100 and len(datasets[0]) <= 100 * (1 + _MARGIN) + _NUM_DATASETS - ) - assert ( - len(datasets[1]) >= 100 and len(datasets[1]) <= 100 * (1 + _MARGIN) + _NUM_DATASETS - ) - assert ( - len(datasets[2]) >= 100 and len(datasets[2]) <= 100 * (1 + _MARGIN) + _NUM_DATASETS - ) - - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend=blends_unweighted[Split.train], - split="100,0,0", - ) - datasets = BlendedMegatronDatasetBuilder( - TestDataset, [None, None, None], lambda: True, config - ).build() - assert len(datasets[0]) == sum(_SIZES[Split.train]) - assert numpy.all( - numpy.array(datasets[0].weights) - == numpy.unique(datasets[0].dataset_index, return_counts=True)[1] - ) - assert datasets[1] is None - assert datasets[2] is None - - if torch.distributed.is_initialized(): - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend=blends_unweighted[Split.train], - split="100,0,0", - ) - datasets = BlendedMegatronDatasetBuilder( - TestDataset, - [None, None, None], - lambda: torch.distributed.get_rank() % 2 == 0, - config, - ).build() - if torch.distributed.get_rank() % 2 == 0: - assert len(datasets[0]) == sum(_SIZES[Split.train]) - assert numpy.all( - numpy.array(datasets[0].weights) - == numpy.unique(datasets[0].dataset_index, return_counts=True)[1] - ) - else: - assert datasets[0] is None - assert datasets[1] is None - assert datasets[2] is None - - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend=blends_unweighted[Split.train], - split="50,50,0", - ) - datasets = BlendedMegatronDatasetBuilder( - TestDataset, [1000, 0, None], lambda: True, config - ).build() - assert len(datasets[0]) == 1000 - assert sum(map(len, datasets[0].datasets)) == sum(_SIZES[Split.train]) / 2 - assert sum(map(len, datasets[1].datasets)) == sum(_SIZES[Split.train]) / 2 - assert datasets[1] is not None and len(datasets[1]) == 0 - assert datasets[2] is None - - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend=blends_unweighted[Split.train], - split="50,50,0", - ) - datasets = BlendedMegatronDatasetBuilder( - TestDataset, - [int(sum(_SIZES[Split.train]) / 4), int(sum(_SIZES[Split.train])), None], - lambda: True, - config, - ).build() - assert len(datasets[0]) == sum(_SIZES[Split.train]) / 4 - assert len(datasets[1]) == sum(_SIZES[Split.train]) / 2 - assert datasets[2] is None - - # 990 9 1 - # 100000 1000 1 - # [] - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend=blends[Split.train], - split="990,9,1", - ) - try: - # All three of 100000, 1000, and 1 result in error, yet 10000 and 100 do not - datasets = BlendedMegatronDatasetBuilder( - TestDataset, [100000, 1000, 1], lambda: True, config - ).build() - except IndexError: - ## - # - # The size per dataset is a function of the requested size, the weight per dataset, - # and a constant coefficient. The sizes, and consequently the total size to request, - # are modified such that the weights may or may not be sufficiently representative. - # To fix this, the weights should be reset according to the new sizes: - # - # S := size - # W := weights - # - # S = func(S, W) - # - # W = S / sum(S) - # - ## - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend=blends[Split.train], - split="990,9,1", - renormalize_blend_weights=True, - ) - datasets = BlendedMegatronDatasetBuilder( - TestDataset, [100000, 1000, 1], lambda: True, config - ).build() - assert ( - len(datasets[0]) >= 100000 - and len(datasets[0]) <= 100000 * (1 + _MARGIN) + _NUM_DATASETS - ) - assert ( - len(datasets[1]) >= 1000 - and len(datasets[1]) <= 1000 * (1 + _MARGIN) + _NUM_DATASETS - ) - assert len(datasets[2]) >= 1 and len(datasets[2]) <= 1 * (1 + _MARGIN) + _NUM_DATASETS - - config = BlendedMegatronDatasetConfig( - random_seed=1234, - sequence_length=_SEQUENCE_LENGTH, - blend=blends[Split.train], - split="990,9,1", - ) - datasets = BlendedMegatronDatasetBuilder( - TestDataset, [10000, 100, 0], lambda: True, config - ).build() - assert ( - len(datasets[0]) >= 10000 - and len(datasets[0]) <= 10000 * (1 + _MARGIN) + _NUM_DATASETS - ) - assert ( - len(datasets[1]) >= 100 and len(datasets[1]) <= 100 * (1 + _MARGIN) + _NUM_DATASETS - ) - assert len(datasets[2]) == 0 - - -if __name__ == "__main__": - test_builder() +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +## +# Compile megatron.core.datasets.helpers_cpp dependencies before BlendedDataset import +## + +import os +import tempfile +from collections import defaultdict +from typing import Dict, Optional + +import numpy +import pytest +import torch + +from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder +from megatron.core.datasets.blended_megatron_dataset_config import BlendedMegatronDatasetConfig +from megatron.core.datasets.megatron_dataset import LowLevelDataset, MegatronDataset +from megatron.core.datasets.utils import Split, compile_helpers, get_blend_from_list +from tests.unit_tests.test_utilities import Utils + +_NUM_DATASETS = 10 + +_SEQUENCE_LENGTH = 10 + +_SIZES = {} +for split in Split: + _SIZES[split] = [] + for i in range(_NUM_DATASETS): + _SIZES[split].append({Split.train: 1000, Split.valid: 100, Split.test: 10}[split] * (i + 1)) + +_MARGIN = 0.005 + + +def do_setup(odir): + paths = defaultdict(list) + + for i in range(_NUM_DATASETS): + path_to_data = os.path.join(odir, str(i)) + os.mkdir(path_to_data) + + for split in _SIZES: + data = numpy.zeros((_SIZES[split][i], _SEQUENCE_LENGTH)) + path = os.path.join(path_to_data, f"{split.name}.npy") + numpy.save(path, data) + paths[split].append(path) + + return paths + + +def test_builder(): + if torch.distributed.is_available(): + Utils.initialize_distributed() + if torch.distributed.get_rank() == 0: + compile_helpers() + torch.distributed.barrier() + else: + compile_helpers() + + # Define the class here to avoid pytest warnings + + class TestDataset(MegatronDataset): + def __init__( + self, + dataset: LowLevelDataset, + dataset_path: Optional[str], + indices: numpy.ndarray, + num_samples: Optional[int], + index_split: Split, + config: BlendedMegatronDatasetConfig, + ) -> None: + super().__init__(dataset, dataset_path, indices, num_samples, index_split, config) + + if self.num_samples is None: + self.num_samples = len(self.indices) + + self.sample_index = numpy.random.choice(self.indices, size=self.num_samples) + + @staticmethod + def numel_low_level_dataset(low_level_dataset: LowLevelDataset) -> int: + return len(low_level_dataset) + + @staticmethod + def build_low_level_dataset( + dataset_path: str, config: BlendedMegatronDatasetConfig + ) -> LowLevelDataset: + return numpy.load(dataset_path) + + def __len__(self) -> int: + return len(self.sample_index) + + def __getitem__(self, idx: int) -> Dict[str, numpy.ndarray]: + return {"text": self.dataset[self.sample_index[idx]]} + + with tempfile.TemporaryDirectory() as temp_dir: + + paths = do_setup(temp_dir) + + blends = { + split: get_blend_from_list( + [ + weight_or_path + for pair in zip(list(range(1, len(paths[split]) + 1, 1)), paths[split]) + for weight_or_path in pair + ] + ) + for split in Split + } + + blends_unweighted = {split: (blends[split][0], None) for split in blends} + + config = BlendedMegatronDatasetConfig( + random_seed=1234, + sequence_length=_SEQUENCE_LENGTH, + blend_per_split=[blends[Split.train], None, None], + ) + try: + datasets = BlendedMegatronDatasetBuilder( + TestDataset, [None, None, None], lambda: True, config + ).build() + raise RuntimeError + except AssertionError: + pass + + config = BlendedMegatronDatasetConfig( + random_seed=1234, + sequence_length=_SEQUENCE_LENGTH, + blend_per_split=[get_blend_from_list([paths[Split.train][0]]), None, None], + ) + datasets = BlendedMegatronDatasetBuilder( + TestDataset, [1000, None, None], lambda: True, config + ).build() + assert len(datasets[0]) == 1000 and isinstance(datasets[0], TestDataset) + assert datasets[1] is None + assert datasets[2] is None + + config = BlendedMegatronDatasetConfig( + random_seed=1234, + sequence_length=_SEQUENCE_LENGTH, + blend_per_split=[ + blends_unweighted[Split.train], + blends_unweighted[Split.valid], + blends_unweighted[Split.test], + ], + ) + datasets = BlendedMegatronDatasetBuilder( + TestDataset, [1000, 1000, 1000], lambda: True, config + ).build() + assert len(datasets[0]) == 1000 + assert len(datasets[1]) == 1000 + assert len(datasets[2]) == sum(_SIZES[Split.test]) + + config = BlendedMegatronDatasetConfig( + random_seed=1234, + sequence_length=_SEQUENCE_LENGTH, + blend_per_split=[ + blends_unweighted[Split.train], + blends_unweighted[Split.valid], + blends_unweighted[Split.test], + ], + ) + datasets = BlendedMegatronDatasetBuilder( + TestDataset, [None, None, None], lambda: True, config + ).build() + assert len(datasets[0]) == sum(_SIZES[Split.train]) + assert numpy.all( + numpy.array(datasets[0].weights) + == numpy.unique(datasets[0].dataset_index, return_counts=True)[1] + ) + assert len(datasets[1]) == sum(_SIZES[Split.valid]) + assert numpy.all( + numpy.array(datasets[1].weights) + == numpy.unique(datasets[1].dataset_index, return_counts=True)[1] + ) + assert len(datasets[2]) == sum(_SIZES[Split.test]) + assert numpy.all( + numpy.array(datasets[2].weights) + == numpy.unique(datasets[2].dataset_index, return_counts=True)[1] + ) + + config = BlendedMegatronDatasetConfig( + random_seed=1234, + sequence_length=_SEQUENCE_LENGTH, + blend_per_split=[blends_unweighted[Split.train], None, None], + ) + datasets = BlendedMegatronDatasetBuilder( + TestDataset, [1000, None, None], lambda: True, config + ).build() + assert len(datasets[0]) == 1000 + for i in range(_NUM_DATASETS): + assert len(datasets[0].datasets[i]) == _SIZES[Split.train][i] + assert datasets[1] is None + assert datasets[2] is None + + # This build used to fail when building datasets without a sample buffer + config = BlendedMegatronDatasetConfig( + random_seed=1234, + sequence_length=_SEQUENCE_LENGTH, + blend_per_split=[blends[Split.train], None, None], + ) + datasets = BlendedMegatronDatasetBuilder( + TestDataset, [1000, None, None], lambda: True, config + ).build() + + config = BlendedMegatronDatasetConfig( + random_seed=1234, + sequence_length=_SEQUENCE_LENGTH, + blend=blends_unweighted[Split.train], + split="100,0,0", + ) + datasets = BlendedMegatronDatasetBuilder( + TestDataset, [None, None, None], lambda: True, config + ).build() + assert len(datasets[0]) == sum(_SIZES[Split.train]) + assert numpy.all( + numpy.array(datasets[0].weights) + == numpy.unique(datasets[0].dataset_index, return_counts=True)[1] + ) + assert datasets[1] is None + assert datasets[2] is None + + if torch.distributed.is_initialized(): + config = BlendedMegatronDatasetConfig( + random_seed=1234, + sequence_length=_SEQUENCE_LENGTH, + blend=blends_unweighted[Split.train], + split="100,0,0", + ) + datasets = BlendedMegatronDatasetBuilder( + TestDataset, + [None, None, None], + lambda: torch.distributed.get_rank() % 2 == 0, + config, + ).build() + if torch.distributed.get_rank() % 2 == 0: + assert len(datasets[0]) == sum(_SIZES[Split.train]) + assert numpy.all( + numpy.array(datasets[0].weights) + == numpy.unique(datasets[0].dataset_index, return_counts=True)[1] + ) + else: + assert datasets[0] is None + assert datasets[1] is None + assert datasets[2] is None + + config = BlendedMegatronDatasetConfig( + random_seed=1234, + sequence_length=_SEQUENCE_LENGTH, + blend=blends_unweighted[Split.train], + split="50,50,0", + ) + datasets = BlendedMegatronDatasetBuilder( + TestDataset, [1000, 0, None], lambda: True, config + ).build() + assert len(datasets[0]) == 1000 + assert sum(map(len, datasets[0].datasets)) == sum(_SIZES[Split.train]) / 2 + assert sum(map(len, datasets[1].datasets)) == sum(_SIZES[Split.train]) / 2 + assert datasets[1] is not None and len(datasets[1]) == 0 + assert datasets[2] is None + + config = BlendedMegatronDatasetConfig( + random_seed=1234, + sequence_length=_SEQUENCE_LENGTH, + blend=blends_unweighted[Split.train], + split="50,50,0", + ) + datasets = BlendedMegatronDatasetBuilder( + TestDataset, + [int(sum(_SIZES[Split.train]) / 4), int(sum(_SIZES[Split.train])), None], + lambda: True, + config, + ).build() + assert len(datasets[0]) == sum(_SIZES[Split.train]) / 4 + assert len(datasets[1]) == sum(_SIZES[Split.train]) / 2 + assert datasets[2] is None + + # This build used to fail when building datasets without a sample buffer + config = BlendedMegatronDatasetConfig( + random_seed=1234, + sequence_length=_SEQUENCE_LENGTH, + blend=blends[Split.train], + split="990,9,1", + ) + datasets = BlendedMegatronDatasetBuilder( + TestDataset, [100000, 1000, 1], lambda: True, config + ).build() + + +if __name__ == "__main__": + test_builder() diff --git a/tests/unit_tests/dist_checkpointing/conftest.py b/tests/unit_tests/dist_checkpointing/conftest.py index 3702ac5..2a8acc6 100644 --- a/tests/unit_tests/dist_checkpointing/conftest.py +++ b/tests/unit_tests/dist_checkpointing/conftest.py @@ -1,22 +1,27 @@ -from unittest import mock - -import pytest - -from megatron.core.dist_checkpointing.strategies.base import StrategyAction, get_default_strategy - - -def pytest_sessionfinish(session, exitstatus): - if exitstatus == 5: - session.exitstatus = 0 - - -@pytest.fixture(scope='session', autouse=True) -def set_default_dist_ckpt_strategy(): - def get_pyt_dist_save_sharded_strategy(): - return get_default_strategy(StrategyAction.SAVE_SHARDED, 'torch_dist', 1) - - with mock.patch( - 'megatron.core.dist_checkpointing.serialization.get_default_save_sharded_strategy', - new=get_pyt_dist_save_sharded_strategy, - ) as _fixture: - yield _fixture +from unittest import mock + +import pytest + +from megatron.core.dist_checkpointing.strategies.base import StrategyAction, get_default_strategy + + +def pytest_sessionfinish(session, exitstatus): + if exitstatus == 5: + session.exitstatus = 0 + + +@pytest.fixture(scope="class") +def tmp_dir_per_class(tmp_path_factory): + return tmp_path_factory.mktemp("data") + + +@pytest.fixture(scope='session', autouse=True) +def set_default_dist_ckpt_strategy(): + def get_pyt_dist_save_sharded_strategy(): + return get_default_strategy(StrategyAction.SAVE_SHARDED, 'torch_dist', 1) + + with mock.patch( + 'megatron.core.dist_checkpointing.serialization.get_default_save_sharded_strategy', + new=get_pyt_dist_save_sharded_strategy, + ) as _fixture: + yield _fixture diff --git a/tests/unit_tests/dist_checkpointing/models/test_moe_experts.py b/tests/unit_tests/dist_checkpointing/models/test_moe_experts.py index 54a60fc..38698cd 100644 --- a/tests/unit_tests/dist_checkpointing/models/test_moe_experts.py +++ b/tests/unit_tests/dist_checkpointing/models/test_moe_experts.py @@ -1,385 +1,402 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import pytest -import torch -from transformer_engine.pytorch.fp8 import check_fp8_support, fp8_autocast - -from megatron.core import parallel_state -from megatron.core.dist_checkpointing import load, load_plain_tensors, save -from megatron.core.dist_checkpointing.dict_utils import diff -from megatron.core.dist_checkpointing.serialization import ( - get_default_load_sharded_strategy, - get_default_save_sharded_strategy, -) -from megatron.core.dist_checkpointing.strategies.fully_parallel import ( - FullyParallelLoadStrategyWrapper, - FullyParallelSaveStrategyWrapper, -) -from megatron.core.models.gpt.gpt_layer_specs import ( - get_gpt_layer_local_spec, - get_gpt_layer_with_transformer_engine_spec, -) -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.moe.experts import GroupedMLP, SequentialMLP, TEGroupedMLP -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.utils import is_te_min_version -from tests.unit_tests.dist_checkpointing import TempNamedDir -from tests.unit_tests.test_utilities import Utils - -fp8_available, reason_for_no_fp8 = check_fp8_support() - - -def initialize_expert_layer(seed, glu=True, expert_type='sequential', fp8=False, **config_kwargs): - torch.manual_seed(seed) - model_parallel_cuda_manual_seed(seed) - - pp_size = parallel_state.get_pipeline_model_parallel_world_size() - num_moe_experts = 8 - num_local_experts = num_moe_experts // parallel_state.get_expert_model_parallel_world_size() - default_config_kwargs = dict( - num_layers=pp_size, - hidden_size=16, - num_attention_heads=4, - num_moe_experts=num_moe_experts, - use_cpu_initialization=True, - gated_linear_unit=glu, - ) - default_config_kwargs.update(**config_kwargs) - transformer_config = TransformerConfig(**default_config_kwargs) - if expert_type == 'grouped': - model = GroupedMLP(num_local_experts, transformer_config) - elif expert_type == 'te_grouped': - transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec( - num_experts=num_moe_experts, moe_grouped_gemm=True - ) - model = TEGroupedMLP( - num_local_experts, - transformer_config, - transformer_layer_spec.submodules.mlp.submodules.experts.submodules, - ) - elif expert_type == 'sequential': - transformer_layer_spec = get_gpt_layer_local_spec( - num_experts=num_moe_experts, moe_grouped_gemm=False - ) - model = SequentialMLP( - num_local_experts, - transformer_config, - transformer_layer_spec.submodules.mlp.submodules.experts.submodules, - ) - else: - raise ValueError('expert_type can only be one of ["sequential", "grouped", "te_grouped"]') - return model - - -def get_pp_offsets(): - pp_rank = parallel_state.get_pipeline_model_parallel_rank() - pp_size = parallel_state.get_pipeline_model_parallel_world_size() - return ((0, pp_rank, pp_size),) - - -expert_type = ['sequential', 'grouped'] -src_dest_expert_type = [('sequential', 'grouped'), ('grouped', 'sequential')] -if is_te_min_version("1.9.0.dev0"): - expert_type.append('te_grouped') - src_dest_expert_type.append(('sequential', 'te_grouped')) - src_dest_expert_type.append(('te_grouped', 'sequential')) - - -class TestExpertLayerReconfiguration: - def setup_method(self, method): - pass - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.internal - @pytest.mark.parametrize( - "use_fpsl,src_tp_pp_ep_etp,dest_tp_pp_ep_etp,use_glu", - [ - # changing PP is impossible because the number of layers must be the same - (False, (2, 4, 1, 2), (2, 4, 1, 2), False), - (True, (2, 4, 1, 2), (2, 4, 1, 2), False), - (False, (2, 4, 1, 2), (1, 4, 1, 2), False), - (True, (2, 1, 1, 2), (1, 1, 1, 2), False), - (False, (1, 1, 1, 1), (1, 1, 1, 1), False), - (True, (1, 1, 1, 1), (1, 1, 4, 1), False), - (False, (1, 1, 8, 1), (1, 1, 2, 1), False), - (False, (2, 2, 2, 2), (4, 2, 1, 4), False), - (True, (1, 1, 4, 1), (8, 1, 1, 1), False), - (False, (1, 8, 1, 1), (1, 8, 1, 1), False), - (False, (1, 1, 4, 1), (2, 1, 1, 2), False), - (False, (2, 1, 4, 1), (2, 1, 1, 4), False), - (False, (1, 1, 1, 1), (1, 1, 1, 1), True), - (False, (1, 1, 1, 1), (1, 1, 4, 1), True), - (True, (1, 1, 1, 1), (2, 1, 1, 1), True), - (False, (1, 1, 4, 1), (8, 1, 1, 8), True), - ], - ) - @pytest.mark.parametrize("expert_type", expert_type) - @pytest.mark.parametrize( - "load_order,store_order", - [ - ("tp-ep-dp-pp", "tp-ep-dp-pp"), - # ("tp-ep-dp-pp", "ep-tp-dp-pp"), - # ("ep-tp-dp-pp", "ep-tp-dp-pp"), - # ("ep-tp-dp-pp", "tp-ep-dp-pp"), - ], - ) - def test_parallel_reconfiguration_e2e( - self, - tmp_path_dist_ckpt, - src_tp_pp_ep_etp, - dest_tp_pp_ep_etp, - use_glu, - use_fpsl, - expert_type, - load_order, - store_order, - ): - """Test model saving and loading with different TP/PP/EP/ETP(expert-tensor-parallel)""" - src_tp, src_pp, src_ep, src_etp = src_tp_pp_ep_etp - dest_tp, dest_pp, dest_ep, dest_etp = dest_tp_pp_ep_etp - if expert_type == 'grouped': - add_bias_linear = False - else: - add_bias_linear = True - # Save checkpoint A - Utils.initialize_model_parallel( - src_tp, - src_pp, - expert_model_parallel_size=src_ep, - expert_tensor_parallel_size=src_etp, - order=store_order, - ) - with TempNamedDir( - tmp_path_dist_ckpt / 'test_expert_layer_reconfiguration_model_A' - ) as ckpt_dir_A, TempNamedDir( - tmp_path_dist_ckpt / 'test_expert_layer_reconfiguration_model_B' - ) as ckpt_dir_B: - model_A = initialize_expert_layer( - 1, use_glu, expert_type, add_bias_linear=add_bias_linear - ) - sharded_state_dict = model_A.sharded_state_dict(sharded_offsets=get_pp_offsets()) - - save_strategy = get_default_save_sharded_strategy() - if use_fpsl: - save_strategy = FullyParallelSaveStrategyWrapper( - save_strategy, - parallel_state.get_data_parallel_group(with_context_parallel=True), - True, - ) - save(sharded_state_dict, ckpt_dir_A, save_strategy) - Utils.destroy_model_parallel() - - # Load checkpoint A with different TP/PP/EP and save as checkpoint B - # No FPS this time, only FPL - Utils.initialize_model_parallel( - dest_tp, - dest_pp, - expert_model_parallel_size=dest_ep, - expert_tensor_parallel_size=dest_etp, - order=load_order, - ) - model_B = initialize_expert_layer( - 1, use_glu, expert_type, add_bias_linear=add_bias_linear - ) - if use_fpsl: - load_strategy = get_default_load_sharded_strategy(ckpt_dir_A) - load_strategy = FullyParallelLoadStrategyWrapper( - load_strategy, - parallel_state.get_data_parallel_group(with_context_parallel=True), - ) - else: - load_strategy = None - state_dict = load( - model_B.sharded_state_dict(sharded_offsets=get_pp_offsets()), - ckpt_dir_A, - load_strategy, - ) - model_B.load_state_dict(state_dict) - save(model_B.sharded_state_dict(sharded_offsets=get_pp_offsets()), ckpt_dir_B) - Utils.destroy_model_parallel() - - # Test both checkpoints are equal - Utils.initialize_model_parallel(1, 1) - state_dict_A = load_plain_tensors(ckpt_dir_A) - state_dict_B = load_plain_tensors(ckpt_dir_B) - diffs = diff(state_dict_A, state_dict_B) - assert not any(map(bool, diffs)), diffs - - @pytest.mark.internal - @pytest.mark.parametrize( - "src_tp_pp_exp,dest_tp_pp_exp,use_glu", - [ - # changing PP is impossible because the number of layers must be the same - ((2, 4, 1), (2, 4, 1), False), - ((1, 1, 1), (1, 1, 4), False), - ((2, 2, 2), (4, 2, 1), False), - ((1, 1, 4), (8, 1, 1), False), - ((2, 1, 4), (1, 1, 8), False), - ((2, 4, 1), (2, 4, 1), True), - ((1, 1, 1), (1, 1, 4), True), - ((2, 2, 2), (4, 2, 1), True), - ((1, 1, 4), (8, 1, 1), True), - ((2, 1, 4), (1, 1, 8), True), - ], - ) - @pytest.mark.parametrize("src_module,dest_module", src_dest_expert_type) - def test_sequential_grouped_mlp_interchangeable( - self, tmp_path_dist_ckpt, src_tp_pp_exp, dest_tp_pp_exp, use_glu, src_module, dest_module - ): - """Test model saving and loading with different TP/PP/expert parallelism""" - src_tp, src_pp, src_exp = src_tp_pp_exp - dest_tp, dest_pp, dest_exp = dest_tp_pp_exp - if src_module == 'grouped' or dest_module == 'grouped': - add_bias_linear = False - else: - add_bias_linear = True - # Save checkpoint A - Utils.initialize_model_parallel(src_tp, src_pp, expert_model_parallel_size=src_exp) - with TempNamedDir( - tmp_path_dist_ckpt / 'test_sequential_grouped_mlp_interchangeable_model_A' - ) as ckpt_dir_A, TempNamedDir( - tmp_path_dist_ckpt / 'test_sequential_grouped_mlp_interchangeable_model_B' - ) as ckpt_dir_B: - - model_A = initialize_expert_layer( - 1, use_glu, expert_type=src_module, add_bias_linear=add_bias_linear - ) - sharded_state_dict = model_A.sharded_state_dict(sharded_offsets=get_pp_offsets()) - - save_strategy = get_default_save_sharded_strategy() - save(sharded_state_dict, ckpt_dir_A, save_strategy) - Utils.destroy_model_parallel() - - Utils.initialize_model_parallel(dest_tp, dest_pp, expert_model_parallel_size=dest_exp) - model_B = initialize_expert_layer( - 1, use_glu, expert_type=dest_module, add_bias_linear=add_bias_linear - ) - load_strategy = None - state_dict = load( - model_B.sharded_state_dict(sharded_offsets=get_pp_offsets()), - ckpt_dir_A, - load_strategy, - ) - model_B.load_state_dict(state_dict) - save(model_B.sharded_state_dict(sharded_offsets=get_pp_offsets()), ckpt_dir_B) - Utils.destroy_model_parallel() - - # Test both checkpoints are equal - Utils.initialize_model_parallel(1, 1) - state_dict_A = load_plain_tensors(ckpt_dir_A) - state_dict_B = load_plain_tensors(ckpt_dir_B) - diffs = diff(state_dict_A, state_dict_B) - assert not any(map(bool, diffs)), diffs - Utils.destroy_model_parallel() - - @pytest.mark.skipif( - not is_te_min_version("1.11.0"), - reason="FP8 support of TEGroupedMLP is only available in TE 1.11.0 and later.", - ) - @pytest.mark.skipif(not fp8_available, reason=reason_for_no_fp8) - @pytest.mark.parametrize( - "src_module,dst_module,src_tp_pp_exp,dest_tp_pp_exp", - [ - # Changing tp/pp/dp doesn't affect _extra_state - ('sequential', 'te_grouped', (1, 1, 1), (1, 1, 4)), - ('sequential', 'te_grouped', (1, 1, 4), (1, 1, 1)), - ('te_grouped', 'sequential', (1, 1, 1), (1, 1, 4)), - ('te_grouped', 'sequential', (1, 1, 4), (1, 1, 1)), - ], - ) - def test_sequential_grouped_mlp_extra_state( - self, tmp_path_dist_ckpt, src_tp_pp_exp, dest_tp_pp_exp, src_module, dst_module - ): - """Test saving and loading _extra_state""" - src_tp, src_pp, src_exp = src_tp_pp_exp - dest_tp, dest_pp, dest_exp = dest_tp_pp_exp - use_glu = True - Utils.initialize_model_parallel(src_tp, src_pp, expert_model_parallel_size=src_exp) - with TempNamedDir( - tmp_path_dist_ckpt / 'test_grouped_mlp_extra_state_model_A' - ) as ckpt_dir_A, TempNamedDir( - tmp_path_dist_ckpt / 'test_grouped_mlp_extra_state_model_B' - ) as ckpt_dir_B, fp8_autocast(): - tokens_per_expert = torch.tensor([16] * (8 // src_exp)) - input_tensor = torch.randn(tokens_per_expert.sum(), 16, device="cuda") - - # Save checkpoint A - model_A = initialize_expert_layer(1, use_glu, expert_type=src_module, fp8=True) - model_A = model_A.cuda() - # fp8 meta is initialized at the first step - model_A(input_tensor, tokens_per_expert) - sharded_state_dict = model_A.sharded_state_dict(sharded_offsets=get_pp_offsets()) - - save_strategy = get_default_save_sharded_strategy() - save(sharded_state_dict, ckpt_dir_A, save_strategy) - Utils.destroy_model_parallel() - - Utils.initialize_model_parallel(dest_tp, dest_pp, expert_model_parallel_size=dest_exp) - load_strategy = None - - # model_A load checkpoint A - model_A = initialize_expert_layer(1, use_glu, expert_type=src_module, fp8=True) - model_A = model_A.cuda() - state_dict = load( - model_A.sharded_state_dict(sharded_offsets=get_pp_offsets()), - ckpt_dir_A, - load_strategy, - ) - model_A.load_state_dict(state_dict) - - # model_B load checkpoint A - model_B = initialize_expert_layer(1, use_glu, expert_type=dst_module, fp8=True) - model_B = model_B.cuda() - state_dict = load( - model_B.sharded_state_dict(sharded_offsets=get_pp_offsets()), - ckpt_dir_A, - load_strategy, - ) - model_B.load_state_dict(state_dict) - - # Should be bitwise equal - if src_module == "te_grouped": - model_A, model_B = model_B, model_A - torch.testing.assert_close( - torch.cat( - [ - model_A.local_experts[i] - .linear_fc1.fp8_meta["scaling_fwd"] - .amax_history.view(-1, 1) - for i in range(8 // dest_exp) - ], - dim=1, - ).view(1024, -1), - model_B.linear_fc1.fp8_meta["scaling_fwd"].amax_history, - rtol=0, - atol=0, - ) - - Utils.destroy_model_parallel() - - @pytest.mark.skipif( - not is_te_min_version("1.9.0"), - reason="TEGroupedMLP is only supported in TE 1.9.0 and later.", - ) - @pytest.mark.parametrize("ep_size", [1, 2]) - def test_te_grouped_linear_torch_native(self, tmp_path_dist_ckpt, ep_size): - """Test saving and loading torch native checkpoints""" - use_glu = True - Utils.initialize_model_parallel(1, 1, expert_model_parallel_size=ep_size) - with TempNamedDir(tmp_path_dist_ckpt / 'test_te_grouped_linear_torch_native') as ckpt_dir: - tokens_per_expert = torch.tensor([16] * (8 // ep_size)) - input_tensor = torch.randn(tokens_per_expert.sum(), 16, device="cuda") - - # Save checkpoint - model = initialize_expert_layer(1, use_glu, expert_type="te_grouped") - model = model.cuda() - model(input_tensor, tokens_per_expert) - torch.save(model.state_dict(), ckpt_dir / f"model_ep{torch.distributed.get_rank()}.pt") - - # Load checkpoint - state_dict = torch.load(ckpt_dir / f"model_ep{torch.distributed.get_rank()}.pt") - model.load_state_dict(state_dict) - - Utils.destroy_model_parallel() +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import pytest +import torch +from transformer_engine.pytorch.fp8 import check_fp8_support, fp8_autocast + +from megatron.core import parallel_state +from megatron.core.dist_checkpointing import load, load_plain_tensors, save +from megatron.core.dist_checkpointing.dict_utils import diff +from megatron.core.dist_checkpointing.serialization import ( + get_default_load_sharded_strategy, + get_default_save_sharded_strategy, +) +from megatron.core.dist_checkpointing.strategies.fully_parallel import ( + FullyParallelLoadStrategyWrapper, + FullyParallelSaveStrategyWrapper, +) +from megatron.core.models.gpt.gpt_layer_specs import ( + get_gpt_layer_local_spec, + get_gpt_layer_with_transformer_engine_spec, +) +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.moe.experts import GroupedMLP, SequentialMLP, TEGroupedMLP +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.utils import is_te_min_version +from tests.unit_tests.dist_checkpointing import TempNamedDir +from tests.unit_tests.test_utilities import Utils + +fp8_available, reason_for_no_fp8 = check_fp8_support() + + +def initialize_expert_layer(seed, glu=True, expert_type='sequential', fp8=False, **config_kwargs): + torch.manual_seed(seed) + model_parallel_cuda_manual_seed(seed) + + pp_size = parallel_state.get_pipeline_model_parallel_world_size() + num_moe_experts = 8 + num_local_experts = num_moe_experts // parallel_state.get_expert_model_parallel_world_size() + default_config_kwargs = dict( + num_layers=pp_size, + hidden_size=16, + num_attention_heads=4, + num_moe_experts=num_moe_experts, + use_cpu_initialization=True, + gated_linear_unit=glu, + fp8="hybrid" if fp8 else None, + ) + default_config_kwargs.update(**config_kwargs) + transformer_config = TransformerConfig(**default_config_kwargs) + if expert_type == 'grouped': + model = GroupedMLP(num_local_experts, transformer_config) + elif expert_type == 'te_grouped': + transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec( + num_experts=num_moe_experts, moe_grouped_gemm=True + ) + model = TEGroupedMLP( + num_local_experts, + transformer_config, + transformer_layer_spec.submodules.mlp.submodules.experts.submodules, + ) + elif expert_type == 'sequential': + transformer_layer_spec = get_gpt_layer_local_spec( + num_experts=num_moe_experts, moe_grouped_gemm=False + ) + model = SequentialMLP( + num_local_experts, + transformer_config, + transformer_layer_spec.submodules.mlp.submodules.experts.submodules, + ) + elif expert_type == 'te_sequential': + transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec( + num_experts=num_moe_experts, moe_grouped_gemm=False + ) + model = SequentialMLP( + num_local_experts, + transformer_config, + transformer_layer_spec.submodules.mlp.submodules.experts.submodules, + ) + else: + raise ValueError( + 'expert_type can only be one of ["sequential", "te_sequential", "grouped",' + ' "te_grouped"]' + ) + return model + + +def get_pp_offsets(): + pp_rank = parallel_state.get_pipeline_model_parallel_rank() + pp_size = parallel_state.get_pipeline_model_parallel_world_size() + return ((0, pp_rank, pp_size),) + + +expert_type = ['sequential', 'grouped'] +src_dest_expert_type = [('sequential', 'grouped'), ('grouped', 'sequential')] +if is_te_min_version("1.7.0.dev0"): + expert_type.append('te_sequential') + src_dest_expert_type.append(('sequential', 'te_sequential')) + src_dest_expert_type.append(('te_sequential', 'sequential')) +if is_te_min_version("1.9.0.dev0"): + expert_type.append('te_grouped') + src_dest_expert_type.append(('te_sequential', 'te_grouped')) + src_dest_expert_type.append(('te_grouped', 'te_sequential')) + + +class TestExpertLayerReconfiguration: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.internal + @pytest.mark.parametrize( + "use_fpsl,src_tp_pp_ep_etp,dest_tp_pp_ep_etp,use_glu", + [ + # changing PP is impossible because the number of layers must be the same + (False, (2, 4, 1, 2), (2, 4, 1, 2), False), + (True, (2, 4, 1, 2), (2, 4, 1, 2), False), + (False, (2, 4, 1, 2), (1, 4, 1, 2), False), + (True, (2, 1, 1, 2), (1, 1, 1, 2), False), + (False, (1, 1, 1, 1), (1, 1, 1, 1), False), + (True, (1, 1, 1, 1), (1, 1, 4, 1), False), + (False, (1, 1, 8, 1), (1, 1, 2, 1), False), + (False, (2, 2, 2, 2), (4, 2, 1, 4), False), + (True, (1, 1, 4, 1), (8, 1, 1, 1), False), + (False, (1, 8, 1, 1), (1, 8, 1, 1), False), + (False, (1, 1, 4, 1), (2, 1, 1, 2), False), + (False, (2, 1, 4, 1), (2, 1, 1, 4), False), + (False, (1, 1, 1, 1), (1, 1, 1, 1), True), + (False, (1, 1, 1, 1), (1, 1, 4, 1), True), + (True, (1, 1, 1, 1), (2, 1, 1, 1), True), + (False, (1, 1, 4, 1), (8, 1, 1, 8), True), + ], + ) + @pytest.mark.parametrize("expert_type", expert_type) + @pytest.mark.parametrize( + "load_order,store_order", + [ + ("tp-ep-dp-pp", "tp-ep-dp-pp"), + # ("tp-ep-dp-pp", "ep-tp-dp-pp"), + # ("ep-tp-dp-pp", "ep-tp-dp-pp"), + # ("ep-tp-dp-pp", "tp-ep-dp-pp"), + ], + ) + def test_parallel_reconfiguration_e2e( + self, + tmp_path_dist_ckpt, + src_tp_pp_ep_etp, + dest_tp_pp_ep_etp, + use_glu, + use_fpsl, + expert_type, + load_order, + store_order, + ): + """Test model saving and loading with different TP/PP/EP/ETP(expert-tensor-parallel)""" + src_tp, src_pp, src_ep, src_etp = src_tp_pp_ep_etp + dest_tp, dest_pp, dest_ep, dest_etp = dest_tp_pp_ep_etp + if expert_type == 'grouped': + add_bias_linear = False + else: + add_bias_linear = True + # Save checkpoint A + Utils.initialize_model_parallel( + src_tp, + src_pp, + expert_model_parallel_size=src_ep, + expert_tensor_parallel_size=src_etp, + order=store_order, + ) + with TempNamedDir( + tmp_path_dist_ckpt / 'test_expert_layer_reconfiguration_model_A' + ) as ckpt_dir_A, TempNamedDir( + tmp_path_dist_ckpt / 'test_expert_layer_reconfiguration_model_B' + ) as ckpt_dir_B: + model_A = initialize_expert_layer( + 1, use_glu, expert_type, add_bias_linear=add_bias_linear + ) + sharded_state_dict = model_A.sharded_state_dict(sharded_offsets=get_pp_offsets()) + + save_strategy = get_default_save_sharded_strategy() + if use_fpsl: + save_strategy = FullyParallelSaveStrategyWrapper( + save_strategy, + parallel_state.get_data_parallel_group(with_context_parallel=True), + True, + ) + save(sharded_state_dict, ckpt_dir_A, save_strategy) + Utils.destroy_model_parallel() + + # Load checkpoint A with different TP/PP/EP and save as checkpoint B + # No FPS this time, only FPL + Utils.initialize_model_parallel( + dest_tp, + dest_pp, + expert_model_parallel_size=dest_ep, + expert_tensor_parallel_size=dest_etp, + order=load_order, + ) + model_B = initialize_expert_layer( + 1, use_glu, expert_type, add_bias_linear=add_bias_linear + ) + if use_fpsl: + load_strategy = get_default_load_sharded_strategy(ckpt_dir_A) + load_strategy = FullyParallelLoadStrategyWrapper( + load_strategy, + parallel_state.get_data_parallel_group(with_context_parallel=True), + ) + else: + load_strategy = None + state_dict = load( + model_B.sharded_state_dict(sharded_offsets=get_pp_offsets()), + ckpt_dir_A, + load_strategy, + ) + model_B.load_state_dict(state_dict) + save(model_B.sharded_state_dict(sharded_offsets=get_pp_offsets()), ckpt_dir_B) + Utils.destroy_model_parallel() + + # Test both checkpoints are equal + Utils.initialize_model_parallel(1, 1) + state_dict_A = load_plain_tensors(ckpt_dir_A) + state_dict_B = load_plain_tensors(ckpt_dir_B) + diffs = diff(state_dict_A, state_dict_B) + assert not any(map(bool, diffs)), diffs + + @pytest.mark.internal + @pytest.mark.parametrize( + "src_tp_pp_exp,dest_tp_pp_exp,use_glu", + [ + # changing PP is impossible because the number of layers must be the same + ((2, 4, 1), (2, 4, 1), False), + ((1, 1, 1), (1, 1, 4), False), + ((2, 2, 2), (4, 2, 1), False), + ((1, 1, 4), (8, 1, 1), False), + ((2, 1, 4), (1, 1, 8), False), + ((2, 4, 1), (2, 4, 1), True), + ((1, 1, 1), (1, 1, 4), True), + ((2, 2, 2), (4, 2, 1), True), + ((1, 1, 4), (8, 1, 1), True), + ((2, 1, 4), (1, 1, 8), True), + ], + ) + @pytest.mark.parametrize("src_module,dest_module", src_dest_expert_type) + def test_sequential_grouped_mlp_interchangeable( + self, tmp_path_dist_ckpt, src_tp_pp_exp, dest_tp_pp_exp, use_glu, src_module, dest_module + ): + """Test model saving and loading with different TP/PP/expert parallelism""" + src_tp, src_pp, src_exp = src_tp_pp_exp + dest_tp, dest_pp, dest_exp = dest_tp_pp_exp + if src_module == 'grouped' or dest_module == 'grouped': + add_bias_linear = False + else: + add_bias_linear = True + # Save checkpoint A + Utils.initialize_model_parallel(src_tp, src_pp, expert_model_parallel_size=src_exp) + with TempNamedDir( + tmp_path_dist_ckpt / 'test_sequential_grouped_mlp_interchangeable_model_A' + ) as ckpt_dir_A, TempNamedDir( + tmp_path_dist_ckpt / 'test_sequential_grouped_mlp_interchangeable_model_B' + ) as ckpt_dir_B: + + model_A = initialize_expert_layer( + 1, use_glu, expert_type=src_module, add_bias_linear=add_bias_linear + ) + sharded_state_dict = model_A.sharded_state_dict(sharded_offsets=get_pp_offsets()) + + save_strategy = get_default_save_sharded_strategy() + save(sharded_state_dict, ckpt_dir_A, save_strategy) + Utils.destroy_model_parallel() + + Utils.initialize_model_parallel(dest_tp, dest_pp, expert_model_parallel_size=dest_exp) + model_B = initialize_expert_layer( + 1, use_glu, expert_type=dest_module, add_bias_linear=add_bias_linear + ) + load_strategy = None + state_dict = load( + model_B.sharded_state_dict(sharded_offsets=get_pp_offsets()), + ckpt_dir_A, + load_strategy, + ) + model_B.load_state_dict(state_dict) + save(model_B.sharded_state_dict(sharded_offsets=get_pp_offsets()), ckpt_dir_B) + Utils.destroy_model_parallel() + + # Test both checkpoints are equal + Utils.initialize_model_parallel(1, 1) + state_dict_A = load_plain_tensors(ckpt_dir_A) + state_dict_B = load_plain_tensors(ckpt_dir_B) + diffs = diff(state_dict_A, state_dict_B) + assert not any(map(bool, diffs)), diffs + Utils.destroy_model_parallel() + + @pytest.mark.skipif( + not is_te_min_version("1.11.0"), + reason="FP8 support of TEGroupedMLP is only available in TE 1.11.0 and later.", + ) + @pytest.mark.skipif(not fp8_available, reason=reason_for_no_fp8) + @pytest.mark.parametrize( + "src_module,dst_module,src_tp_pp_exp,dest_tp_pp_exp", + [ + # Changing tp/pp/dp doesn't affect _extra_state + ('te_sequential', 'te_grouped', (1, 1, 1), (1, 1, 4)), + ('te_sequential', 'te_grouped', (1, 1, 4), (1, 1, 1)), + ('te_grouped', 'te_sequential', (1, 1, 1), (1, 1, 4)), + ('te_grouped', 'te_sequential', (1, 1, 4), (1, 1, 1)), + ], + ) + def test_sequential_grouped_mlp_extra_state( + self, tmp_path_dist_ckpt, src_tp_pp_exp, dest_tp_pp_exp, src_module, dst_module + ): + """Test saving and loading _extra_state""" + src_tp, src_pp, src_exp = src_tp_pp_exp + dest_tp, dest_pp, dest_exp = dest_tp_pp_exp + use_glu = True + Utils.initialize_model_parallel(src_tp, src_pp, expert_model_parallel_size=src_exp) + with TempNamedDir( + tmp_path_dist_ckpt / 'test_grouped_mlp_extra_state_model_A' + ) as ckpt_dir_A, TempNamedDir( + tmp_path_dist_ckpt / 'test_grouped_mlp_extra_state_model_B' + ) as ckpt_dir_B, fp8_autocast(): + tokens_per_expert = torch.tensor([16] * (8 // src_exp)) + input_tensor = torch.randn(tokens_per_expert.sum(), 16, device="cuda") + + # Save checkpoint A + model_A = initialize_expert_layer(1, use_glu, expert_type=src_module, fp8=True) + model_A = model_A.cuda() + # fp8 meta is initialized at the first step + model_A(input_tensor, tokens_per_expert) + sharded_state_dict = model_A.sharded_state_dict(sharded_offsets=get_pp_offsets()) + + save_strategy = get_default_save_sharded_strategy() + save(sharded_state_dict, ckpt_dir_A, save_strategy) + Utils.destroy_model_parallel() + + Utils.initialize_model_parallel(dest_tp, dest_pp, expert_model_parallel_size=dest_exp) + load_strategy = None + + # model_A load checkpoint A + model_A = initialize_expert_layer(1, use_glu, expert_type=src_module, fp8=True) + model_A = model_A.cuda() + state_dict = load( + model_A.sharded_state_dict(sharded_offsets=get_pp_offsets()), + ckpt_dir_A, + load_strategy, + ) + model_A.load_state_dict(state_dict) + + # model_B load checkpoint A + model_B = initialize_expert_layer(1, use_glu, expert_type=dst_module, fp8=True) + model_B = model_B.cuda() + state_dict = load( + model_B.sharded_state_dict(sharded_offsets=get_pp_offsets()), + ckpt_dir_A, + load_strategy, + ) + model_B.load_state_dict(state_dict) + + # Should be bitwise equal + if src_module == "te_grouped": + model_A, model_B = model_B, model_A + torch.testing.assert_close( + torch.cat( + [ + model_A.local_experts[i] + .linear_fc1.fp8_meta["scaling_fwd"] + .amax_history.view(-1, 1) + for i in range(8 // dest_exp) + ], + dim=1, + ).view(1024, -1), + model_B.linear_fc1.fp8_meta["scaling_fwd"].amax_history, + rtol=0, + atol=0, + ) + + Utils.destroy_model_parallel() + + @pytest.mark.skipif( + not is_te_min_version("1.9.0"), + reason="TEGroupedMLP is only supported in TE 1.9.0 and later.", + ) + @pytest.mark.parametrize("ep_size", [1, 2]) + def test_te_grouped_linear_torch_native(self, tmp_path_dist_ckpt, ep_size): + """Test saving and loading torch native checkpoints""" + use_glu = True + Utils.initialize_model_parallel(1, 1, expert_model_parallel_size=ep_size) + with TempNamedDir(tmp_path_dist_ckpt / 'test_te_grouped_linear_torch_native') as ckpt_dir: + tokens_per_expert = torch.tensor([16] * (8 // ep_size)) + input_tensor = torch.randn(tokens_per_expert.sum(), 16, device="cuda") + + # Save checkpoint + model = initialize_expert_layer(1, use_glu, expert_type="te_grouped") + model = model.cuda() + model(input_tensor, tokens_per_expert) + torch.save(model.state_dict(), ckpt_dir / f"model_ep{torch.distributed.get_rank()}.pt") + + # Load checkpoint + state_dict = torch.load(ckpt_dir / f"model_ep{torch.distributed.get_rank()}.pt") + model.load_state_dict(state_dict) + + Utils.destroy_model_parallel() diff --git a/tests/unit_tests/dist_checkpointing/test_async_save.py b/tests/unit_tests/dist_checkpointing/test_async_save.py index d6aa879..f751a52 100644 --- a/tests/unit_tests/dist_checkpointing/test_async_save.py +++ b/tests/unit_tests/dist_checkpointing/test_async_save.py @@ -1,102 +1,104 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from unittest import mock - -import pytest -import torch - -from megatron.core.dist_checkpointing import ShardedTensor, load, save -from megatron.core.dist_checkpointing.dict_utils import diff -from megatron.core.dist_checkpointing.strategies.async_utils import AsyncCallsQueue -from megatron.core.dist_checkpointing.strategies.filesystem_async import FileSystemWriterAsync -from megatron.core.dist_checkpointing.strategies.torch import TorchDistSaveShardedStrategy -from tests.unit_tests.dist_checkpointing import TempNamedDir -from tests.unit_tests.test_utilities import Utils - - -def write_data_os_err_mock_fn(local_proc_idx, write_bucket, results_queue, count_queue, use_fsync): - """Raises an error on worker #2 during storage save""" - try: - if local_proc_idx == 2: - raise OSError('worker #2 critical failure') - output = (local_proc_idx, []) - except Exception as e: - output = (local_proc_idx, e) - results_queue.put(output) - count_queue.get() - count_queue.task_done() - - -class TestAsyncSave: - def setup_method(self, method): - pass - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_async_is_equivalent_to_sync(self, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(2, 4) - - sharded_state_dict = { - 'sd_keyA': ShardedTensor.from_rank_offsets( - 'keyA', torch.ones(2, 4), replica_id=Utils.rank - ), - 'sd_keyB': ShardedTensor.from_rank_offsets( - 'keyB', torch.ones(3, 5, 7), replica_id=Utils.world_size - Utils.rank - 1 - ), - } - - with TempNamedDir( - tmp_path_dist_ckpt / 'test_equivalence_async' - ) as async_ckpt_dir, TempNamedDir( - tmp_path_dist_ckpt / 'test_equivalence_sync' - ) as sync_ckpt_dir: - # async - async_calls = AsyncCallsQueue() - async_request = save(sharded_state_dict, async_ckpt_dir, async_sharded_save=True) - async_calls.schedule_async_request(async_request) - - # sync - save(sharded_state_dict, sync_ckpt_dir, async_sharded_save=False) - - # finalize async - async_calls.maybe_finalize_async_calls(blocking=True) - - # load and compare - loaded_async_state_dict = load(sharded_state_dict, async_ckpt_dir) - loaded_sync_state_dict = load(sharded_state_dict, sync_ckpt_dir) - diffs = diff(loaded_async_state_dict, loaded_sync_state_dict) - assert not any(map(bool, diffs)), diffs - - Utils.destroy_model_parallel() - - @pytest.mark.parametrize('async_save', [False, True]) - @pytest.mark.parametrize('worker_fn', [write_data_os_err_mock_fn]) - def test_errors_are_reported(self, tmp_path_dist_ckpt, async_save, worker_fn): - Utils.initialize_model_parallel(2, 4) - sharded_state_dict = { - f'key{i}': ShardedTensor.from_rank_offsets(f'key{i}_rank{Utils.rank}', torch.ones(2, 4)) - for i in range(4) # make sure there is enough non-empty saving workers - } - - with TempNamedDir(tmp_path_dist_ckpt / 'test_errors_are_reported') as ckpt_dir: - async_calls = AsyncCallsQueue() - save_strategy = TorchDistSaveShardedStrategy('torch_dist', 1, thread_count=8) - - try: - orig_fn = FileSystemWriterAsync.write_preloaded_data - FileSystemWriterAsync.write_preloaded_data = worker_fn - with pytest.raises(RuntimeError) as exc_info: - if async_save: - async_request = save( - sharded_state_dict, ckpt_dir, save_strategy, async_sharded_save=True - ) - async_calls.schedule_async_request(async_request) - async_calls.maybe_finalize_async_calls(blocking=True) - else: - save(sharded_state_dict, ckpt_dir, save_strategy) - assert 'Worker failure' in str(exc_info.value) - - finally: - FileSystemWriterAsync.write_preloaded_data = orig_fn - - Utils.destroy_model_parallel() +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from unittest import mock + +import pytest +import torch + +from megatron.core.dist_checkpointing import ShardedTensor, load, save +from megatron.core.dist_checkpointing.dict_utils import diff +from megatron.core.dist_checkpointing.strategies.async_utils import AsyncCallsQueue +from megatron.core.dist_checkpointing.strategies.filesystem_async import FileSystemWriterAsync +from megatron.core.dist_checkpointing.strategies.torch import TorchDistSaveShardedStrategy +from tests.unit_tests.dist_checkpointing import TempNamedDir +from tests.unit_tests.test_utilities import Utils + + +def write_data_os_err_mock_fn(local_proc_idx, write_bucket, results_queue, count_queue, use_fsync): + """Raises an error on worker #2 during storage save""" + try: + if local_proc_idx == 2: + raise OSError('worker #2 critical failure') + output = (local_proc_idx, []) + except Exception as e: + output = (local_proc_idx, e) + results_queue.put(output) + count_queue.get() + count_queue.task_done() + + +class TestAsyncSave: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.parametrize('persistent', [True, False]) + def test_async_is_equivalent_to_sync(self, tmp_path_dist_ckpt, persistent): + Utils.initialize_model_parallel(2, 4) + + sharded_state_dict = { + 'sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', torch.ones(2, 4), replica_id=Utils.rank + ), + 'sd_keyB': ShardedTensor.from_rank_offsets( + 'keyB', torch.ones(3, 5, 7), replica_id=Utils.world_size - Utils.rank - 1 + ), + } + + with TempNamedDir( + tmp_path_dist_ckpt / 'test_equivalence_async' + ) as async_ckpt_dir, TempNamedDir( + tmp_path_dist_ckpt / 'test_equivalence_sync' + ) as sync_ckpt_dir: + # async + async_calls = AsyncCallsQueue(persistent) + async_request = save(sharded_state_dict, async_ckpt_dir, async_sharded_save=True) + async_calls.schedule_async_request(async_request) + + # sync + save(sharded_state_dict, sync_ckpt_dir, async_sharded_save=False) + + # finalize async + async_calls.maybe_finalize_async_calls(blocking=True) + + # load and compare + loaded_async_state_dict = load(sharded_state_dict, async_ckpt_dir) + loaded_sync_state_dict = load(sharded_state_dict, sync_ckpt_dir) + diffs = diff(loaded_async_state_dict, loaded_sync_state_dict) + assert not any(map(bool, diffs)), diffs + async_calls.close() + + Utils.destroy_model_parallel() + + @pytest.mark.parametrize('async_save', [False, True]) + @pytest.mark.parametrize('worker_fn', [write_data_os_err_mock_fn]) + def test_errors_are_reported(self, tmp_path_dist_ckpt, async_save, worker_fn): + Utils.initialize_model_parallel(2, 4) + sharded_state_dict = { + f'key{i}': ShardedTensor.from_rank_offsets(f'key{i}_rank{Utils.rank}', torch.ones(2, 4)) + for i in range(4) # make sure there is enough non-empty saving workers + } + + with TempNamedDir(tmp_path_dist_ckpt / 'test_errors_are_reported') as ckpt_dir: + async_calls = AsyncCallsQueue() + save_strategy = TorchDistSaveShardedStrategy('torch_dist', 1, thread_count=8) + + try: + orig_fn = FileSystemWriterAsync.write_preloaded_data + FileSystemWriterAsync.write_preloaded_data = worker_fn + with pytest.raises(RuntimeError) as exc_info: + if async_save: + async_request = save( + sharded_state_dict, ckpt_dir, save_strategy, async_sharded_save=True + ) + async_calls.schedule_async_request(async_request) + async_calls.maybe_finalize_async_calls(blocking=True) + else: + save(sharded_state_dict, ckpt_dir, save_strategy) + assert 'Worker failure' in str(exc_info.value) + + finally: + FileSystemWriterAsync.write_preloaded_data = orig_fn + + Utils.destroy_model_parallel() diff --git a/tests/unit_tests/dist_checkpointing/test_fully_parallel.py b/tests/unit_tests/dist_checkpointing/test_fully_parallel.py index a383bd3..1c0dec3 100644 --- a/tests/unit_tests/dist_checkpointing/test_fully_parallel.py +++ b/tests/unit_tests/dist_checkpointing/test_fully_parallel.py @@ -1,379 +1,404 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -from pathlib import Path -from typing import List, Tuple -from unittest import mock - -import pytest -import torch - -from megatron.core import parallel_state -from megatron.core.dist_checkpointing import ShardedTensor -from megatron.core.dist_checkpointing.dict_utils import ( - dict_list_map_outplace, - map_reduce, - nested_values, -) -from megatron.core.dist_checkpointing.exchange_utils import _get_empty_tensor_for_exchange -from megatron.core.dist_checkpointing.mapping import ShardedStateDict, is_main_replica -from megatron.core.dist_checkpointing.strategies.base import ( - LoadShardedStrategy, - SaveShardedStrategy, -) -from megatron.core.dist_checkpointing.strategies.fully_parallel import ( - FullyParallelLoadStrategyWrapper, - FullyParallelSaveStrategyWrapper, - _sharded_tensor_shard_id, -) -from tests.unit_tests.dist_checkpointing import TempNamedDir -from tests.unit_tests.test_utilities import Utils - - -class MockSaveStrategy(SaveShardedStrategy): - def __init__(self): - super().__init__('mock', 1) - self.save_keys = set() - - def save(self, sharded_state_dict, ckpt_dir): - self.save_keys = { - sh_ten.key - for sh_ten in nested_values(sharded_state_dict) - if is_main_replica(sh_ten.replica_id) - } - - -class MockLoadStrategy(LoadShardedStrategy): - def __init__(self, device='cpu'): - super().__init__() - self.device = device - self.load_keys = set() - - def load(self, sharded_state_dict, ckpt_dir): - self.load_keys = { - sh_ten.key - for sh_ten in nested_values(sharded_state_dict) - if is_main_replica(sh_ten.replica_id) - } - - def load_rand(x): - assert isinstance(x, ShardedTensor) - x.init_data(self.device) - x.data.fill_(Utils.rank) - return x.data - - return dict_list_map_outplace(load_rand, sharded_state_dict) - - def load_tensors_metadata(self, checkpoint_dir: Path): - pass - - def check_backend_compatibility(self, loaded_version): - pass - - def check_version_compatibility(self, loaded_version): - pass - - -class TestFullyParallelSaveAndLoad: - def setup_method(self, method): - pass - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @staticmethod - def get_sharded_state_dict(): - return { - 'sd_key_tp_repl1': ShardedTensor.from_rank_offsets( - 'key_TP_repl1', - torch.ones(10), - ( - 0, - parallel_state.get_tensor_model_parallel_rank(), - parallel_state.get_tensor_model_parallel_world_size(), - ), - replica_id=parallel_state.get_data_parallel_rank(with_context_parallel=True), - ), - 'sd_key_tp_repl2': ShardedTensor.from_rank_offsets( - 'key_TP_repl2', - torch.ones(10), - ( - 0, - parallel_state.get_tensor_model_parallel_rank(), - parallel_state.get_tensor_model_parallel_world_size(), - ), - replica_id=parallel_state.get_data_parallel_rank(with_context_parallel=True), - ), - 'sd_keyB': ShardedTensor.from_rank_offsets( - 'keyB', torch.ones(20), (0, Utils.rank, Utils.world_size) - ), - 'sd_keyE_no_C': ShardedTensor.from_rank_offsets( - 'keyC', torch.ones(100), replica_id=Utils.rank - ), - 'sd_keyX_no_D': ShardedTensor.from_rank_offsets( - 'keyD', torch.ones(1000), replica_id=Utils.rank - ), - 'sd_keyC_no_E': ShardedTensor.from_rank_offsets( - 'keyE', torch.ones(100), replica_id=Utils.rank - ), - } - - @pytest.mark.parametrize("parallelization_along_dp", [False, True]) - def test_save_distribution(self, parallelization_along_dp, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(2, 1) - state_dict = self.get_sharded_state_dict() - - # Ranks assignment: - # 1. Lowest coverage - # 2. Largest tensor - # 3. Shard id (key) - if not parallelization_along_dp: - expected_key_to_saving_ranks = { - 'keyB': list( - range(Utils.world_size) - ), # everyone must save (disjoint shards, coverage == 1) - 'key_TP_repl1': [0, 1], # lowest coverage (4), first TP domain - 'key_TP_repl2': [2, 3], # lowest coverage (4), second TP domain - 'keyD': [4], # largest tensor - 'keyC': [5], # second largest tensor - 'keyE': [6], # second largest tensor - } - else: - if parallel_state.get_tensor_model_parallel_rank() == 0: - expected_key_to_saving_ranks = { - # everyone must save (disjoint shards, coverage == 1): - 'keyB': list( - range( - parallel_state.get_data_parallel_world_size(with_context_parallel=True) - ) - ), - # this time, TP sharded tensors have the same coverage as fully replicated! - 'keyD': [0], # largest tensor - 'keyC': [1], # second largest tensor - 'keyE': [2], # second largest tensor - 'key_TP_repl1': [3], # smallest tensor - 'key_TP_repl2': [3], # smallest tensor, last rank is the least occupied - } - else: - expected_key_to_saving_ranks = { - # everyone must save (disjoint shards, coverage == 1): - 'keyB': list( - range( - parallel_state.get_data_parallel_world_size(with_context_parallel=True) - ) - ), - # tensors C, D, E are absent in this DP group - 'key_TP_repl1': [0], # smallest tensor - 'key_TP_repl2': [1], # smallest tensor, last rank is the least occupied - } - - parallelization_group = ( - parallel_state.get_data_parallel_group(with_context_parallel=True) - if parallelization_along_dp - else None - ) - dp_rank = torch.distributed.get_rank(parallelization_group) - expected_keys_saved_by_current_rank = { - k for k, v in expected_key_to_saving_ranks.items() if dp_rank in v - } - - # Run save and tests - mock_strategy = MockSaveStrategy() - save_strategy = FullyParallelSaveStrategyWrapper( - mock_strategy, parallelization_group, do_cache_distribution=True - ) - with TempNamedDir(tmp_path_dist_ckpt / 'mock_dir') as ckpt_dir_A: - save_strategy.save(state_dict, ckpt_dir_A) - key_to_saving_rank = dict( - map_reduce( - save_strategy.cached_distribution.main_rank_for_shard.items(), - lambda shard_rank: shard_rank[0][0], - lambda shard_rank: shard_rank[1], - ) - ) - assert expected_key_to_saving_ranks == key_to_saving_rank - - for _, sh_ten in state_dict.items(): - if ( - _sharded_tensor_shard_id(sh_ten) - in save_strategy.cached_distribution.shards_in_this_group - ): - is_expected_to_be_saved_by_this_rank = dp_rank in expected_key_to_saving_ranks.get( - sh_ten.key, [] - ) - assert sh_ten.replica_id == int( - not is_expected_to_be_saved_by_this_rank - ), expected_key_to_saving_ranks - - assert mock_strategy.save_keys == expected_keys_saved_by_current_rank, ( - Utils.rank, - mock_strategy.save_keys, - expected_keys_saved_by_current_rank, - ) - - @pytest.mark.parametrize("parallelization_along_dp", [False, True]) - def test_load_distribution(self, parallelization_along_dp, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(2, 1) - - state_dict = self.get_sharded_state_dict() - - # Ranks assignment: - # 1. Lowest coverage - # 2. Largest tensor - # 3. Shard id (key) - if not parallelization_along_dp: - expected_key_to_saving_ranks = { - 'keyB': list( - range(Utils.world_size) - ), # everyone must save (disjoint shards, coverage == 1) - 'key_TP_repl1': [0, 1], # lowest coverage (4), first TP domain - 'key_TP_repl2': [2, 3], # lowest coverage (4), second TP domain - 'keyD': [4], # largest tensor - 'keyC': [5], # second largest tensor - 'keyE': [6], # second largest tensor - } - else: - # When loading, expected key distribution is the same across TP, because every replica - # needs to be loaded - expected_key_to_saving_ranks = { - # everyone must load (disjoint shards, coverage == 1): - 'keyB': list( - range(parallel_state.get_data_parallel_world_size(with_context_parallel=True)) - ), - # this time, TP sharded tensors have the same coverage as fully replicated! - 'keyD': [0], # largest tensor - 'keyC': [1], # second largest tensor - 'keyE': [2], # second largest tensor - 'key_TP_repl1': [3], # smallest tensor - 'key_TP_repl2': [3], # smallest tensor, last rank is the least occupied - } - - parallelization_group = ( - parallel_state.get_data_parallel_group(with_context_parallel=True) - if parallelization_along_dp - else None - ) - dp_rank = torch.distributed.get_rank(parallelization_group) - expected_keys_saved_by_current_rank = { - k for k, v in expected_key_to_saving_ranks.items() if dp_rank in v - } - - # Run save and tests - mock_strategy = MockLoadStrategy() - load_strategy = FullyParallelLoadStrategyWrapper( - mock_strategy, parallelization_group, do_cache_distribution=True - ) - with TempNamedDir(tmp_path_dist_ckpt / 'mock_dir') as ckpt_dir_A: - loaded_state_dict = load_strategy.load(state_dict, ckpt_dir_A) - key_to_saving_rank = dict( - map_reduce( - load_strategy.cached_distribution.main_rank_for_shard.items(), - lambda shard_rank: shard_rank[0][0], - lambda shard_rank: shard_rank[1], - ) - ) - assert expected_key_to_saving_ranks == key_to_saving_rank - - assert mock_strategy.load_keys == expected_keys_saved_by_current_rank, ( - Utils.rank, - mock_strategy.load_keys, - expected_keys_saved_by_current_rank, - ) - - assert loaded_state_dict.keys() == state_dict.keys() - - @pytest.mark.parametrize('state_dict_device', ['cpu', 'cuda']) - @pytest.mark.flaky - @pytest.mark.flaky_in_dev - def test_memory_usage(self, state_dict_device, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(2, 1) - - megabytes = 1024 * 1024 - mock_strategy = MockLoadStrategy(state_dict_device) - - mem_alloc = [] - - real_get_empty_tensor_for_exchange = _get_empty_tensor_for_exchange - - def mock_get_empty_tensor_for_exchange(*args, **kwargs) -> torch.Tensor: - ret = real_get_empty_tensor_for_exchange(*args, **kwargs) - mem_alloc.append(torch.cuda.memory_allocated()) - return ret - - load_strategy = FullyParallelLoadStrategyWrapper(mock_strategy) - torch.distributed.barrier() - - # Each tensor is 4MB, 40MB in total. - # We expect extra memory usage peak at ~32MB, not 1GB - sharded_state_dict = { - f'ten_{i}': ShardedTensor.from_rank_offsets( - f'ten_{i}', - torch.rand(megabytes, dtype=torch.float, device=state_dict_device), - (0, Utils.rank, Utils.world_size), - ) - for i in range(10) - } - - mem_alloc_start = torch.cuda.memory_allocated() - - with mock.patch( - 'megatron.core.dist_checkpointing.exchange_utils._get_empty_tensor_for_exchange', - new=mock_get_empty_tensor_for_exchange, - ), TempNamedDir(tmp_path_dist_ckpt / 'mock_dir') as ckpt_dir_A: - _ = load_strategy.load(sharded_state_dict, ckpt_dir_A) - - # Each rank is expected to do 7 * 10 empty allocations - assert len(mem_alloc) == 7 * 10 - # Peak mem usage should be within 4MB (single tensor) - assert max(mem_alloc) - mem_alloc_start < 4.01 * megabytes, ( - max(mem_alloc), - mem_alloc_start, - ) - - Utils.destroy_model_parallel() - - def test_only_necessary_exchanges_performed_during_load(self, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(2, 1) - - # State dict with 2 expected exchanges - sharded_state_dict_baseline_two_exchanges = { - 'needed_by_all_A': ShardedTensor.from_rank_offsets( - 'needed_by_all_A', - torch.ones(4, dtype=torch.float, device='cuda'), - replica_id=Utils.rank, - ), - 'needed_by_all_B': ShardedTensor.from_rank_offsets( - 'needed_by_all_B', - torch.ones(4, dtype=torch.float, device='cuda'), - replica_id=Utils.rank, - ), - } - # State dict with 1 expected exchange - sharded_state_dict_baseline_one_exchange = { - 'needed_by_all': sharded_state_dict_baseline_two_exchanges['needed_by_all_A'] - } - # State dict with 1 expected exchanges even though there are 2 tensors to load (1 is unique for each rank) - sharded_state_dict_test_one_exchange = sharded_state_dict_baseline_one_exchange.copy() - sharded_state_dict_test_one_exchange['unique'] = ShardedTensor.from_rank_offsets( - 'unique', - torch.ones(4, dtype=torch.float, device='cuda'), - (0, Utils.rank, Utils.world_size), - ) - - expected_call_counts: List[Tuple[ShardedStateDict, int]] = [ - (sharded_state_dict_baseline_one_exchange, 1), - (sharded_state_dict_baseline_two_exchanges, 2), - (sharded_state_dict_test_one_exchange, 1), - ] - - mock_strategy = MockLoadStrategy() - with TempNamedDir(tmp_path_dist_ckpt / 'mock_dir') as ckpt_dir: - for sharded_state_dict, expected_count in expected_call_counts: - load_strategy = FullyParallelLoadStrategyWrapper( - mock_strategy, None, do_cache_distribution=True, exchange_algo='broadcast' - ) - with mock.patch( - 'megatron.core.dist_checkpointing.strategies.fully_parallel.torch.distributed.broadcast' - ) as broadcast_mock: - _ = load_strategy.load(sharded_state_dict, ckpt_dir) - assert broadcast_mock.call_count == expected_count - - Utils.destroy_model_parallel() +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. +from pathlib import Path +from typing import List, Tuple +from unittest import mock + +import pytest +import torch +import torch.distributed + +from megatron.core import parallel_state +from megatron.core.dist_checkpointing import ShardedTensor +from megatron.core.dist_checkpointing.dict_utils import ( + dict_list_map_outplace, + map_reduce, + nested_values, +) +from megatron.core.dist_checkpointing.exchange_utils import _get_empty_tensor_for_exchange +from megatron.core.dist_checkpointing.mapping import ( + ShardedObject, + ShardedStateDict, + ShardedTensorFactory, + is_main_replica, +) +from megatron.core.dist_checkpointing.strategies.base import ( + LoadShardedStrategy, + SaveShardedStrategy, +) +from megatron.core.dist_checkpointing.strategies.fully_parallel import ( + FullyParallelLoadStrategyWrapper, + FullyParallelSaveStrategyWrapper, + _sharded_tensor_shard_id, +) +from tests.unit_tests.dist_checkpointing import TempNamedDir +from tests.unit_tests.test_utilities import Utils + + +class MockSaveStrategy(SaveShardedStrategy): + def __init__(self): + super().__init__('mock', 1) + self.save_keys = set() + + def save(self, sharded_state_dict, ckpt_dir): + for sh_ten in nested_values(sharded_state_dict): + if is_main_replica(sh_ten.replica_id): + self.save_keys.add(sh_ten.key) + + +class MockLoadStrategy(LoadShardedStrategy): + def __init__(self, device='cpu'): + super().__init__() + self.device = device + self.load_keys = set() + + def load(self, sharded_state_dict, ckpt_dir): + for sh_ten in nested_values(sharded_state_dict): + if is_main_replica(sh_ten.replica_id): + self.load_keys.add(sh_ten.key) + + def load_rand(x): + assert isinstance(x, ShardedTensor) or isinstance(x, ShardedObject) + if isinstance(x, ShardedTensor): + x.init_data(self.device) + x.data.fill_(Utils.rank) + return x.data + else: + x.data = [Utils.rank] + return x.data + + return dict_list_map_outplace(load_rand, sharded_state_dict) + + def load_tensors_metadata(self, checkpoint_dir: Path): + pass + + def check_backend_compatibility(self, loaded_version): + pass + + def check_version_compatibility(self, loaded_version): + pass + + +class TestFullyParallelSaveAndLoad: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @staticmethod + def get_sharded_state_dict(): + return { + 'sd_key_tp_repl1': ShardedTensor.from_rank_offsets( + 'key_TP_repl1', + torch.ones(10), + ( + 0, + parallel_state.get_tensor_model_parallel_rank(), + parallel_state.get_tensor_model_parallel_world_size(), + ), + replica_id=parallel_state.get_data_parallel_rank(with_context_parallel=True), + ), + 'sd_key_tp_repl2': ShardedTensor.from_rank_offsets( + 'key_TP_repl2', + torch.ones(10), + ( + 0, + parallel_state.get_tensor_model_parallel_rank(), + parallel_state.get_tensor_model_parallel_world_size(), + ), + replica_id=parallel_state.get_data_parallel_rank(with_context_parallel=True), + ), + 'sd_keyB': ShardedTensor.from_rank_offsets( + 'keyB', torch.ones(20), (0, Utils.rank, Utils.world_size) + ), + 'sd_keyE_no_C': ShardedTensor.from_rank_offsets( + 'keyC', torch.ones(100), replica_id=Utils.rank + ), + 'sd_keyX_no_D': ShardedTensor.from_rank_offsets( + 'keyD', torch.ones(1000), replica_id=Utils.rank + ), + 'sd_keyC_no_E': ShardedTensor.from_rank_offsets( + 'keyE', torch.ones(100), replica_id=Utils.rank + ), + } + + @pytest.mark.parametrize("parallelization_along_dp", [False, True]) + def test_save_distribution(self, parallelization_along_dp, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(2, 1) + state_dict = self.get_sharded_state_dict() + + # Ranks assignment: + # 1. Lowest coverage + # 2. Largest tensor + # 3. Shard id (key) + if not parallelization_along_dp: + expected_key_to_saving_ranks = { + 'keyB': list( + range(Utils.world_size) + ), # everyone must save (disjoint shards, coverage == 1) + 'key_TP_repl1': [0, 1], # lowest coverage (4), first TP domain + 'key_TP_repl2': [2, 3], # lowest coverage (4), second TP domain + 'keyD': [4], # largest tensor + 'keyC': [5], # second largest tensor + 'keyE': [6], # second largest tensor + } + else: + if parallel_state.get_tensor_model_parallel_rank() == 0: + expected_key_to_saving_ranks = { + # everyone must save (disjoint shards, coverage == 1): + 'keyB': list( + range( + parallel_state.get_data_parallel_world_size(with_context_parallel=True) + ) + ), + # this time, TP sharded tensors have the same coverage as fully replicated! + 'keyD': [0], # largest tensor + 'keyC': [1], # second largest tensor + 'keyE': [2], # second largest tensor + 'key_TP_repl1': [3], # smallest tensor + 'key_TP_repl2': [3], # smallest tensor, last rank is the least occupied + } + else: + expected_key_to_saving_ranks = { + # everyone must save (disjoint shards, coverage == 1): + 'keyB': list( + range( + parallel_state.get_data_parallel_world_size(with_context_parallel=True) + ) + ), + # tensors C, D, E are absent in this DP group + 'key_TP_repl1': [0], # smallest tensor + 'key_TP_repl2': [1], # smallest tensor, last rank is the least occupied + } + + parallelization_group = ( + parallel_state.get_data_parallel_group(with_context_parallel=True) + if parallelization_along_dp + else None + ) + dp_rank = torch.distributed.get_rank(parallelization_group) + expected_keys_saved_by_current_rank = { + k for k, v in expected_key_to_saving_ranks.items() if dp_rank in v + } + + # Run save and tests + mock_strategy = MockSaveStrategy() + save_strategy = FullyParallelSaveStrategyWrapper( + mock_strategy, parallelization_group, do_cache_distribution=True + ) + with TempNamedDir(tmp_path_dist_ckpt / 'mock_dir') as ckpt_dir_A: + save_strategy.save(state_dict, ckpt_dir_A) + key_to_saving_rank = dict( + map_reduce( + save_strategy.cached_distribution.main_rank_for_shard.items(), + lambda shard_rank: shard_rank[0][0], + lambda shard_rank: shard_rank[1], + ) + ) + assert expected_key_to_saving_ranks == key_to_saving_rank + + for _, sh_ten in state_dict.items(): + if ( + _sharded_tensor_shard_id(sh_ten) + in save_strategy.cached_distribution.shards_in_this_group + ): + is_expected_to_be_saved_by_this_rank = dp_rank in expected_key_to_saving_ranks.get( + sh_ten.key, [] + ) + assert sh_ten.replica_id == int( + not is_expected_to_be_saved_by_this_rank + ), expected_key_to_saving_ranks + + assert mock_strategy.save_keys == expected_keys_saved_by_current_rank, ( + Utils.rank, + mock_strategy.save_keys, + expected_keys_saved_by_current_rank, + ) + + @pytest.mark.parametrize("parallelization_along_dp", [False, True]) + def test_load_distribution(self, parallelization_along_dp, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(2, 1) + + state_dict = self.get_sharded_state_dict() + + # Ranks assignment: + # 1. Lowest coverage + # 2. Largest tensor + # 3. Shard id (key) + if not parallelization_along_dp: + expected_key_to_saving_ranks = { + 'keyB': list( + range(Utils.world_size) + ), # everyone must save (disjoint shards, coverage == 1) + 'key_TP_repl1': [0, 1], # lowest coverage (4), first TP domain + 'key_TP_repl2': [2, 3], # lowest coverage (4), second TP domain + 'keyD': [4], # largest tensor + 'keyC': [5], # second largest tensor + 'keyE': [6], # second largest tensor + } + else: + # When loading, expected key distribution is the same across TP, because every replica + # needs to be loaded + expected_key_to_saving_ranks = { + # everyone must load (disjoint shards, coverage == 1): + 'keyB': list( + range(parallel_state.get_data_parallel_world_size(with_context_parallel=True)) + ), + # this time, TP sharded tensors have the same coverage as fully replicated! + 'keyD': [0], # largest tensor + 'keyC': [1], # second largest tensor + 'keyE': [2], # second largest tensor + 'key_TP_repl1': [3], # smallest tensor + 'key_TP_repl2': [3], # smallest tensor, last rank is the least occupied + } + + parallelization_group = ( + parallel_state.get_data_parallel_group(with_context_parallel=True) + if parallelization_along_dp + else None + ) + dp_rank = torch.distributed.get_rank(parallelization_group) + expected_keys_saved_by_current_rank = { + k for k, v in expected_key_to_saving_ranks.items() if dp_rank in v + } + + # Run save and tests + mock_strategy = MockLoadStrategy() + load_strategy = FullyParallelLoadStrategyWrapper( + mock_strategy, parallelization_group, do_cache_distribution=True + ) + with TempNamedDir(tmp_path_dist_ckpt / 'mock_dir') as ckpt_dir_A: + loaded_state_dict = load_strategy.load(state_dict, ckpt_dir_A) + key_to_saving_rank = dict( + map_reduce( + load_strategy.cached_distribution.main_rank_for_shard.items(), + lambda shard_rank: shard_rank[0][0], + lambda shard_rank: shard_rank[1], + ) + ) + assert expected_key_to_saving_ranks == key_to_saving_rank + + assert mock_strategy.load_keys == expected_keys_saved_by_current_rank, ( + Utils.rank, + mock_strategy.load_keys, + expected_keys_saved_by_current_rank, + ) + + assert loaded_state_dict.keys() == state_dict.keys() + + @pytest.mark.parametrize('state_dict_device', ['cpu', 'cuda']) + @pytest.mark.flaky + @pytest.mark.flaky_in_dev + def test_memory_usage(self, state_dict_device, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(2, 1) + + megabytes = 1024 * 1024 + mock_strategy = MockLoadStrategy(state_dict_device) + + mem_alloc = [] + + real_get_empty_tensor_for_exchange = _get_empty_tensor_for_exchange + + def mock_get_empty_tensor_for_exchange(*args, **kwargs) -> torch.Tensor: + ret = real_get_empty_tensor_for_exchange(*args, **kwargs) + mem_alloc.append(torch.cuda.memory_allocated()) + return ret + + load_strategy = FullyParallelLoadStrategyWrapper(mock_strategy) + torch.distributed.barrier() + + # Each tensor is 4MB, 40MB in total. + # We expect extra memory usage peak at ~32MB, not 1GB + sharded_state_dict = { + f'ten_{i}': ShardedTensor.from_rank_offsets( + f'ten_{i}', + torch.rand(megabytes, dtype=torch.float, device=state_dict_device), + (0, Utils.rank, Utils.world_size), + ) + for i in range(10) + } + + mem_alloc_start = torch.cuda.memory_allocated() + + with mock.patch( + 'megatron.core.dist_checkpointing.exchange_utils._get_empty_tensor_for_exchange', + new=mock_get_empty_tensor_for_exchange, + ), TempNamedDir(tmp_path_dist_ckpt / 'mock_dir') as ckpt_dir_A: + _ = load_strategy.load(sharded_state_dict, ckpt_dir_A) + + # Each rank is expected to do 7 * 10 empty allocations + assert len(mem_alloc) == 7 * 10 + # Peak mem usage should be within 4MB (single tensor) + assert max(mem_alloc) - mem_alloc_start < 4.01 * megabytes, ( + max(mem_alloc), + mem_alloc_start, + ) + + Utils.destroy_model_parallel() + + def test_only_necessary_exchanges_performed_during_load(self, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(2, 1) + + # State dict with 2 expected exchanges + sharded_state_dict_baseline_two_exchanges = { + 'needed_by_all_A': ShardedTensor.from_rank_offsets( + 'needed_by_all_A', + torch.ones(4, dtype=torch.float, device='cuda'), + replica_id=Utils.rank, + ), + 'needed_by_all_B': ShardedTensor.from_rank_offsets( + 'needed_by_all_B', + torch.ones(4, dtype=torch.float, device='cuda'), + replica_id=Utils.rank, + ), + } + # State dict with 1 expected exchange + sharded_state_dict_baseline_one_exchange = { + 'needed_by_all': sharded_state_dict_baseline_two_exchanges['needed_by_all_A'] + } + # State dict with 1 expected exchanges even though there are 2 tensors to load (1 is unique for each rank) + sharded_state_dict_test_one_exchange = sharded_state_dict_baseline_one_exchange.copy() + sharded_state_dict_test_one_exchange['unique'] = ShardedTensor.from_rank_offsets( + 'unique', + torch.ones(4, dtype=torch.float, device='cuda'), + (0, Utils.rank, Utils.world_size), + ) + + expected_call_counts: List[Tuple[ShardedStateDict, int]] = [ + (sharded_state_dict_baseline_one_exchange, 1), + (sharded_state_dict_baseline_two_exchanges, 2), + (sharded_state_dict_test_one_exchange, 1), + ] + + mock_strategy = MockLoadStrategy() + with TempNamedDir(tmp_path_dist_ckpt / 'mock_dir') as ckpt_dir: + for sharded_state_dict, expected_count in expected_call_counts: + load_strategy = FullyParallelLoadStrategyWrapper( + mock_strategy, None, do_cache_distribution=True, exchange_algo='broadcast' + ) + with mock.patch( + 'megatron.core.dist_checkpointing.strategies.fully_parallel.torch.distributed.broadcast' + ) as broadcast_mock: + _ = load_strategy.load(sharded_state_dict, ckpt_dir) + assert broadcast_mock.call_count == expected_count + + Utils.destroy_model_parallel() + + def test_broadcast_sharded_objects(self, tmp_path_dist_ckpt): + + sharded_state_dict = { + f'Obj_{i}': ShardedObject(f'Obj_{i}', None, (1,), (0,), replica_id=abs(Utils.rank - i)) + for i in range(Utils.world_size) + } + + with TempNamedDir(tmp_path_dist_ckpt / 'test_broadcast_sharded_objects') as ckpt_dir: + load_strategy = MockLoadStrategy() + load_strategy = FullyParallelLoadStrategyWrapper(load_strategy, None) + + loaded_state_dict = load_strategy.load(sharded_state_dict, ckpt_dir) + + # each rank is supposed to only load obj_rank because of how replica_id is set + assert load_strategy.base_strategy.load_keys == set({f'Obj_{Utils.rank}'}) + + # since each rank only loaded their Obj they were broadcasted + assert set(sharded_state_dict.keys()) == set(loaded_state_dict.keys()) diff --git a/tests/unit_tests/dist_checkpointing/test_global_metadata_reuse.py b/tests/unit_tests/dist_checkpointing/test_global_metadata_reuse.py new file mode 100644 index 0000000..3565bad --- /dev/null +++ b/tests/unit_tests/dist_checkpointing/test_global_metadata_reuse.py @@ -0,0 +1,170 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + + +from unittest import mock + +import pytest + +from megatron.training.arguments import parse_args +from megatron.training.checkpointing import load_checkpoint, save_checkpoint +from tests.unit_tests.dist_checkpointing import ( + TempNamedDir, + init_basic_mock_args, + init_checkpointing_mock_args, + setup_model_and_optimizer, +) +from tests.unit_tests.test_utilities import Utils + + +class TestGlobalMetadataReuse: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.parametrize(('tp,pp'), [(2, 4)]) + def test_global_metadata_reuse(self, tmp_path_dist_ckpt, tp, pp): + Utils.initialize_model_parallel(tp, pp) + num_floating_point_operations_so_far = 0 + model, optimizer = setup_model_and_optimizer(1, tp, pp) + opt_param_scheduler = None + + mock_args = parse_args(ignore_unknown_args=True) + with TempNamedDir( + tmp_path_dist_ckpt / "test_global_metadata_reuse" + ) as non_persistent_ckpt_dir, mock.patch( + 'megatron.training.checkpointing.get_args', new=lambda: mock_args + ), mock.patch( + "megatron.training.checkpointing.update_num_microbatches" + ): + init_basic_mock_args(mock_args, tp, pp) + init_checkpointing_mock_args(mock_args, non_persistent_ckpt_dir) + mock_args.non_persistent_ckpt_type = "global" + mock_args.ckpt_assume_constant_structure = True + save_ckpt_context = {} + + # Check we avoid reduce_scatter + with mock.patch( + 'torch.distributed.checkpoint.utils._DistWrapper.reduce_scatter' + ) as reduce_scatter_mock: + save_checkpoint( + 1, + model, + optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + save_ckpt_context, + ) + + assert reduce_scatter_mock.call_count == 0 + + assert save_ckpt_context['save_strategy'].cached_global_metadata is None + + resume_ckpt_context = {} + _, _ = load_checkpoint( + model, optimizer, opt_param_scheduler, checkpointing_context=resume_ckpt_context + ) + + load_strategy_cached_metadata = resume_ckpt_context[ + 'load_strategy' + ].cached_global_metadata + assert load_strategy_cached_metadata is not None + assert getattr(load_strategy_cached_metadata, "all_local_plans", None) is not None + + # Check we avoid reduce_scatter + with mock.patch( + 'torch.distributed.checkpoint.utils._DistWrapper.reduce_scatter' + ) as reduce_scatter_mock: + save_checkpoint( + 2, + model, + optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + resume_ckpt_context, + ) + assert reduce_scatter_mock.call_count == 0 + + assert ( + load_strategy_cached_metadata + is resume_ckpt_context['save_strategy'].cached_global_metadata + ) + + assert resume_ckpt_context['save_strategy'].validated_loaded_metadata_reuse + + @pytest.mark.parametrize(('tp,pp'), [(2, 4)]) + def test_no_global_metadata_reuse_on_different_parallelism(self, tmp_path_dist_ckpt, tp, pp): + Utils.initialize_model_parallel(tp, pp) + num_floating_point_operations_so_far = 0 + model, optimizer = setup_model_and_optimizer(1, tp, pp) + opt_param_scheduler = None + + mock_args = parse_args(ignore_unknown_args=True) + with TempNamedDir( + tmp_path_dist_ckpt / "test_global_metadata_reuse" + ) as non_persistent_ckpt_dir, mock.patch( + 'megatron.training.checkpointing.get_args', new=lambda: mock_args + ), mock.patch( + "megatron.training.checkpointing.update_num_microbatches" + ): + init_basic_mock_args(mock_args, tp, pp) + init_checkpointing_mock_args(mock_args, non_persistent_ckpt_dir) + mock_args.non_persistent_ckpt_type = "global" + mock_args.ckpt_assume_constant_structure = True + mock_args.ckpt_fully_parallel_save = True + + save_ckpt_context = {} + + # Check we avoid reduce_scatter + with mock.patch( + 'torch.distributed.checkpoint.utils._DistWrapper.reduce_scatter' + ) as reduce_scatter_mock: + save_checkpoint( + 1, + model, + optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + save_ckpt_context, + ) + + assert reduce_scatter_mock.call_count == 0 + + assert save_ckpt_context['save_strategy'].base_strategy.cached_global_metadata is None + + Utils.destroy_model_parallel() + Utils.initialize_model_parallel(pp, tp) + model, optimizer = setup_model_and_optimizer(1, pp, tp) + init_basic_mock_args(mock_args, pp, tp) + mock_args.no_load_rng = True + + resume_ckpt_context = {} + _, _ = load_checkpoint( + model, optimizer, opt_param_scheduler, checkpointing_context=resume_ckpt_context + ) + + load_strategy_cached_metadata = resume_ckpt_context[ + 'load_strategy' + ].cached_global_metadata + + assert load_strategy_cached_metadata is not None + assert getattr(load_strategy_cached_metadata, "all_local_plans", None) is not None + + # Check we avoid reduce_scatter + with mock.patch( + 'torch.distributed.checkpoint.utils._DistWrapper.reduce_scatter' + ) as reduce_scatter_mock: + save_checkpoint( + 2, + model, + optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + resume_ckpt_context, + ) + assert reduce_scatter_mock.call_count == 0 + + assert not resume_ckpt_context[ + 'save_strategy' + ].base_strategy.validated_loaded_metadata_reuse diff --git a/tests/unit_tests/dist_checkpointing/test_local.py b/tests/unit_tests/dist_checkpointing/test_local.py index 2b7370d..a8f416b 100644 --- a/tests/unit_tests/dist_checkpointing/test_local.py +++ b/tests/unit_tests/dist_checkpointing/test_local.py @@ -1,219 +1,329 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -import filecmp -import shutil -from pathlib import Path -from types import SimpleNamespace -from typing import Any, Callable, Tuple, Union -from unittest import mock - -import pytest -import torch - -from megatron.core.dist_checkpointing import ShardedTensor -from megatron.core.dist_checkpointing.dict_utils import diff -from megatron.core.dist_checkpointing.mapping import ShardedBase, ShardedTensorFactory -from megatron.core.dist_checkpointing.state_dict_transformation import ( - prepare_state_dict_for_save, - recreate_state_dict_after_load, -) -from megatron.core.dist_checkpointing.utils import extract_nonpersistent -from megatron.training.async_utils import maybe_finalize_async_save -from megatron.training.checkpointing import generate_state_dict, load_checkpoint, save_checkpoint -from tests.unit_tests.dist_checkpointing import ( - TempNamedDir, - init_basic_mock_args, - init_checkpointing_mock_args, - setup_model_and_optimizer, -) -from tests.unit_tests.test_utilities import Utils - - -def find_matching_values( - x: Union[dict, list], predicate: Callable[[Any], bool] -) -> Tuple[Union[dict, list], Union[dict, list]]: - """Return matching values in a single list - - Args: - x (Union[dict, list]) : state dict to process. Top-level argument must be a dict or list - predicate (object -> bool): determines matching values - """ - - matching_vals = [] - if isinstance(x, dict): - values = x.values() - elif isinstance(x, list): - values = x - else: - raise ValueError(f'Unexpected top-level object type: {type(x)}') - for v in values: - if isinstance(v, (list, dict)): - matching_vals += find_matching_values(v, predicate) - elif predicate(v): - matching_vals.append(v) - return matching_vals - - -class TestLocalCheckpointing: - def setup_method(self, method): - pass - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.parametrize(('tp,pp'), [(2, 4)]) - @pytest.mark.parametrize(('use_torch_fsdp2'), [True, False]) - def test_sharded_tensors(self, tp, pp, use_torch_fsdp2): - Utils.initialize_model_parallel(tp, pp) - num_floating_point_operations_so_far = 0 - model, optimizer = setup_model_and_optimizer(1, tp, pp) - opt_param_scheduler = None - rng_state = None - use_dist_ckpt = True - iteration = None - optim_sd_kwargs = dict(sharding_type='fully_sharded_model_space') - mock_args = SimpleNamespace() - mock_args.no_save_optim = False - mock_args.no_save_rng = True - mock_args.use_torch_fsdp2 = use_torch_fsdp2 - # Test save_local - state_dict = generate_state_dict( - mock_args, - model, - optimizer, - opt_param_scheduler, - rng_state, - use_dist_ckpt=use_dist_ckpt, - iteration=iteration, - optim_sd_kwargs=optim_sd_kwargs, - ) - sharded_tensor_factories = find_matching_values( - state_dict, lambda x: isinstance(x, ShardedTensorFactory) - ) - sharded_tensors = find_matching_values(state_dict, lambda x: isinstance(x, ShardedTensor)) - for ten in sharded_tensors: - assert ten.data != None - saved_state_dict = prepare_state_dict_for_save(state_dict) - saved_sharded_tensors = find_matching_values( - saved_state_dict, lambda x: isinstance(x, ShardedTensor) - ) - for ten in saved_sharded_tensors: - assert ten.data == None - assert ( - len(saved_sharded_tensors) - == len(sharded_tensors) + 2 * len(sharded_tensor_factories) - == len(saved_state_dict['raw_tensors']) - ) - common_sharded_tensors = find_matching_values( - saved_state_dict["common"], lambda x: isinstance(x, ShardedTensor) - ) - assert common_sharded_tensors == [] - # Test load_local - state_dict = generate_state_dict( - mock_args, - model, - optimizer, - opt_param_scheduler, - rng_state, - use_dist_ckpt=True, - iteration=iteration, - optim_sd_kwargs=optim_sd_kwargs, - ) - nonpersistent_state_dict, _ = extract_nonpersistent(state_dict) - # For a given use case - assert not nonpersistent_state_dict - loaded_state_dict = recreate_state_dict_after_load(state_dict, saved_state_dict) - only_left, only_right, mismatch = diff(loaded_state_dict, state_dict) - assert not only_left - assert not only_right - for i in mismatch: - # ShardedObjects and ShardedTensors should be replaced - assert issubclass(i[-1], ShardedBase) - - @pytest.mark.parametrize(('tp,pp'), [(2, 4), (1, 1)]) - @pytest.mark.parametrize(('use_ramdisk'), [True, False]) - @pytest.mark.parametrize(('async_save'), [True, False]) - @pytest.mark.parametrize(('algo'), ['atomic', 'fully_parallel']) - @pytest.mark.skip(reason="BasicLocalCheckpointManager is not yet integrated") - def test_basic_save_load_scenarios( - self, tmp_path_dist_ckpt, tp, pp, use_ramdisk, async_save, algo - ): - Utils.initialize_model_parallel(tp, pp) - num_floating_point_operations_so_far = 0 - model, optimizer = setup_model_and_optimizer(1, tp, pp) - opt_param_scheduler = None - - mock_args = SimpleNamespace() - if use_ramdisk: - tmp_path_dist_ckpt = Path("/dev/shm") - with TempNamedDir(tmp_path_dist_ckpt / "test_local") as local_ckpt_dir, mock.patch( - 'megatron.training.checkpointing.get_args', new=lambda: mock_args - ), mock.patch('megatron.training.async_utils.get_args', new=lambda: mock_args), mock.patch( - "megatron.training.checkpointing.update_num_microbatches" - ): - local_ckpt_dir = local_ckpt_dir / "subdir" # Test handling of non-existent directories - init_basic_mock_args(mock_args, tp, pp) - init_checkpointing_mock_args(mock_args, None) - mock_args.non_persistent_ckpt_type = 'local' - mock_args.non_persistent_local_ckpt_algo = algo - mock_args.async_save = async_save - checkpointing_context = { - 'local_checkpoint_manager': BasicLocalCheckpointManager(local_ckpt_dir) - } - - save_checkpoint( - 1, - model, - optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context=checkpointing_context, - non_persistent_ckpt=True, - ) - if async_save: - maybe_finalize_async_save(True) - iteration, _ = load_checkpoint( - model, optimizer, opt_param_scheduler, checkpointing_context=checkpointing_context - ) - assert iteration == 1 - ckpt_path = checkpointing_context['local_checkpoint_manager'].local_ckpt_path - backup_path = ckpt_path.with_name('backup_' + ckpt_path.name) - checkpointing_context['local_checkpoint_manager'].latest_iteration = -1 - iteration, _ = load_checkpoint( - model, optimizer, opt_param_scheduler, checkpointing_context=checkpointing_context - ) - assert iteration == 1 - shutil.move(ckpt_path, backup_path) - checkpointing_context['local_checkpoint_manager'].latest_iteration = -1 - torch.distributed.barrier() - iteration, _ = load_checkpoint( - model, optimizer, opt_param_scheduler, checkpointing_context=checkpointing_context - ) - assert iteration == 0 - save_checkpoint( - 1, - model, - optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context=checkpointing_context, - non_persistent_ckpt=True, - ) - if async_save: - maybe_finalize_async_save(True) - assert filecmp.cmp(ckpt_path, backup_path, shallow=False), [ckpt_path, backup_path] - save_checkpoint( - 2, - model, - optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - checkpointing_context=checkpointing_context, - non_persistent_ckpt=True, - ) - if async_save: - maybe_finalize_async_save(True) - assert not ckpt_path.exists() - ckpt_path = checkpointing_context['local_checkpoint_manager'].local_ckpt_path - assert ckpt_path.exists() - - Utils.destroy_model_parallel() +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import filecmp +import logging +import shutil +import tempfile +from pathlib import Path +from types import SimpleNamespace +from typing import Any, Callable, Tuple, Union +from unittest import mock + +import pytest +import torch + +from megatron.training.arguments import parse_args + +nvidia_resiliency_ext = pytest.importorskip( + "nvidia_resiliency_ext", + reason="nvidia_resiliency_ext is required for local checkpointing tests", +) + +from nvidia_resiliency_ext.checkpointing.local.ckpt_managers.base_manager import ( + CheckpointingException, +) +from nvidia_resiliency_ext.checkpointing.local.ckpt_managers.local_manager import ( + LocalCheckpointManager, +) + +from megatron.core.dist_checkpointing import ShardedTensor +from megatron.core.dist_checkpointing.dict_utils import diff +from megatron.core.dist_checkpointing.mapping import ShardedBase, ShardedTensorFactory +from megatron.core.dist_checkpointing.tensor_aware_state_dict import MCoreTensorAwareStateDict +from megatron.core.dist_checkpointing.utils import extract_nonpersistent +from megatron.training.async_utils import maybe_finalize_async_save +from megatron.training.checkpointing import generate_state_dict, load_checkpoint, save_checkpoint +from tests.unit_tests.dist_checkpointing import ( + TempNamedDir, + init_basic_mock_args, + init_checkpointing_mock_args, + setup_model_and_optimizer, +) +from tests.unit_tests.test_utilities import Utils + +from .utils import find_matching_values + + +# TODO: Use mock local checkpointing? +class TestLocalCheckpointingReplication: + + def test_filename_to_id(self): + iteration_string = "0000123" + rank = "4" + with tempfile.TemporaryDirectory() as tmpdir: + ckpt_mgr = LocalCheckpointManager(tmpdir) + filename = ckpt_mgr._filename_from_template(iteration_string, rank) + assert (123, 4) == ckpt_mgr._filename_to_id(filename)[:2] + + @pytest.mark.parametrize(('tp,pp'), [(2, 4)]) + def test_sharded_tensors(self, tp, pp): + Utils.initialize_model_parallel(tp, pp) + num_floating_point_operations_so_far = 0 + model, optimizer = setup_model_and_optimizer(1, tp, pp) + + +class TestLocalCheckpointing: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.parametrize(('tp,pp'), [(2, 4)]) + @pytest.mark.parametrize(('use_torch_fsdp2'), [True, False]) + def test_sharded_tensors(self, tp, pp, use_torch_fsdp2): + Utils.initialize_model_parallel(tp, pp) + num_floating_point_operations_so_far = 0 + model, optimizer = setup_model_and_optimizer(1, tp, pp) + opt_param_scheduler = None + rng_state = None + use_dist_ckpt = True + iteration = None + optim_sd_kwargs = dict(sharding_type='fully_sharded_model_space') + mock_args = parse_args(ignore_unknown_args=True) + mock_args.no_save_optim = False + mock_args.no_save_rng = True + mock_args.use_torch_fsdp2 = use_torch_fsdp2 + # Test save_local + state_dict = generate_state_dict( + mock_args, + model, + optimizer, + opt_param_scheduler, + rng_state, + use_dist_ckpt=use_dist_ckpt, + iteration=iteration, + optim_sd_kwargs=optim_sd_kwargs, + ) + sharded_tensor_factories = find_matching_values( + state_dict, lambda x: isinstance(x, ShardedTensorFactory) + ) + sharded_tensors = find_matching_values(state_dict, lambda x: isinstance(x, ShardedTensor)) + for ten in sharded_tensors: + assert ten.data != None + saved_state_dict, _ = MCoreTensorAwareStateDict.from_state_dict(state_dict, algo='atomic') + saved_sharded_tensors = find_matching_values( + saved_state_dict, lambda x: isinstance(x, ShardedTensor) + ) + assert ( + len(saved_sharded_tensors) + == len(sharded_tensors) + 2 * len(sharded_tensor_factories) + == len(list(saved_state_dict.tensors)) + ) + tensors = saved_state_dict.pop_tensors() + for ten in saved_sharded_tensors: + assert ten.data is None + assert saved_state_dict.is_hollow + hollow_sharded_tensors = find_matching_values( + saved_state_dict, lambda x: isinstance(x, torch.Tensor) + ) + assert hollow_sharded_tensors == [] + saved_state_dict.insert_tensors(tensors) + common_sharded_tensors = find_matching_values( + saved_state_dict.common_state_dict, lambda x: isinstance(x, ShardedTensor) + ) + assert common_sharded_tensors == [] + # Test load_local + state_dict = generate_state_dict( + mock_args, + model, + optimizer, + opt_param_scheduler, + rng_state, + use_dist_ckpt=True, + iteration=iteration, + optim_sd_kwargs=optim_sd_kwargs, + ) + nonpersistent_state_dict, _ = extract_nonpersistent(state_dict) + # For a given use case + assert not nonpersistent_state_dict + loaded_state_dict = saved_state_dict.to_state_dict(state_dict) + only_left, only_right, mismatch = diff(loaded_state_dict, state_dict) + assert not only_left + assert not only_right + for i in mismatch: + # ShardedObjects and ShardedTensors should be replaced + assert issubclass(i[-1], ShardedBase) + + @pytest.mark.parametrize(('tp,pp'), [(2, 4), (1, 1)]) + @pytest.mark.parametrize(('use_ramdisk'), [True, False]) + @pytest.mark.parametrize(('async_save'), [True, False]) + @pytest.mark.parametrize(('algo'), ['atomic', 'fully_parallel']) + def test_basic_save_load_scenarios( + self, tmp_path_dist_ckpt, tp, pp, use_ramdisk, async_save, algo + ): + Utils.initialize_model_parallel(tp, pp) + num_floating_point_operations_so_far = 0 + model, optimizer = setup_model_and_optimizer(1, tp, pp) + opt_param_scheduler = None + + mock_args = ( + SimpleNamespace() + ) # FIXME: fails with additional arguments (e.g.,'weight_decay') + if use_ramdisk: + tmp_path_dist_ckpt = Path("/dev/shm") + with TempNamedDir( + tmp_path_dist_ckpt / "test_local", sync=True + ) as local_ckpt_dir, mock.patch( + 'megatron.training.checkpointing.get_args', new=lambda: mock_args + ), mock.patch( + 'megatron.training.async_utils.get_args', new=lambda: mock_args + ), mock.patch( + "megatron.training.checkpointing.update_num_microbatches" + ): + local_ckpt_dir = local_ckpt_dir / "subdir" # Test handling of non-existent directories + init_basic_mock_args(mock_args, tp, pp) + init_checkpointing_mock_args(mock_args, None) + mock_args.non_persistent_ckpt_type = 'local' + mock_args.non_persistent_local_ckpt_algo = algo + mock_args.async_save = async_save + checkpointing_context = { + 'local_checkpoint_manager': LocalCheckpointManager(local_ckpt_dir) + } + + save_checkpoint( + 1, + model, + optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context=checkpointing_context, + non_persistent_ckpt=True, + ) + if async_save: + maybe_finalize_async_save(True) + iteration, _ = load_checkpoint( + model, optimizer, opt_param_scheduler, checkpointing_context=checkpointing_context + ) + assert iteration == 1 + ckpt_id = checkpointing_context['local_checkpoint_manager']._ckpt_id(iteration) + ckpt_path = checkpointing_context['local_checkpoint_manager']._local_ckpt_path_from_id( + ckpt_id + ) + backup_path = ckpt_path.with_name('backup_' + ckpt_path.name) + checkpointing_context['local_checkpoint_manager'].latest_iteration = -1 + iteration, _ = load_checkpoint( + model, optimizer, opt_param_scheduler, checkpointing_context=checkpointing_context + ) + assert iteration == 1 + shutil.move(ckpt_path, backup_path) + checkpointing_context['local_checkpoint_manager'].latest_iteration = -1 + torch.distributed.barrier() + iteration, _ = load_checkpoint( + model, optimizer, opt_param_scheduler, checkpointing_context=checkpointing_context + ) + assert iteration == 0 + save_checkpoint( + 1, + model, + optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context=checkpointing_context, + non_persistent_ckpt=True, + ) + if async_save: + maybe_finalize_async_save(True) + if Utils.rank > 0: # Skip assertion on rank 0 due to harmless nondeterminism + assert filecmp.cmp(ckpt_path, backup_path, shallow=False), [ckpt_path, backup_path] + save_checkpoint( + 2, + model, + optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context=checkpointing_context, + non_persistent_ckpt=True, + ) + if async_save: + maybe_finalize_async_save(True) + assert not ckpt_path.exists() + ckpt_id = checkpointing_context['local_checkpoint_manager']._ckpt_id(2) + ckpt_path = checkpointing_context['local_checkpoint_manager']._local_ckpt_path_from_id( + ckpt_id + ) + assert ckpt_path.exists() + + Utils.destroy_model_parallel() + + @pytest.mark.parametrize(('tp,pp'), [(1, 1), (2, 4)]) + @pytest.mark.parametrize(('use_ramdisk'), [True, False]) + @pytest.mark.parametrize(('async_save'), [True, False]) + @pytest.mark.parametrize(('algo'), ['atomic', 'fully_parallel']) + @pytest.mark.flaky_in_dev + def test_failed_save(self, caplog, tmp_path_dist_ckpt, tp, pp, use_ramdisk, async_save, algo): + Utils.initialize_model_parallel(tp, pp) + num_floating_point_operations_so_far = 0 + model, optimizer = setup_model_and_optimizer(1, tp, pp) + opt_param_scheduler = None + + mock_args = parse_args(ignore_unknown_args=True) + if use_ramdisk: + tmp_path_dist_ckpt = Path("/dev/shm") + + def test_save_wrapper(save_wrapper, subdir): + with TempNamedDir(tmp_path_dist_ckpt / subdir, sync=True) as local_ckpt_dir, mock.patch( + 'megatron.training.checkpointing.get_args', new=lambda: mock_args + ), mock.patch( + 'megatron.training.async_utils.get_args', new=lambda: mock_args + ), mock.patch( + "megatron.training.checkpointing.update_num_microbatches" + ), mock.patch.object( + LocalCheckpointManager, '_save', new=save_wrapper + ), caplog.at_level( + logging.INFO + ): + + local_ckpt_dir = ( + local_ckpt_dir / "subdir" + ) # Test handling of non-existent directories + init_basic_mock_args(mock_args, tp, pp) + init_checkpointing_mock_args(mock_args, None) + mock_args.non_persistent_ckpt_type = 'local' + mock_args.non_persistent_local_ckpt_algo = algo + mock_args.async_save = async_save + checkpointing_context = { + 'local_checkpoint_manager': LocalCheckpointManager(local_ckpt_dir) + } + + with pytest.raises(CheckpointingException): + save_checkpoint( + 1, + model, + optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context=checkpointing_context, + non_persistent_ckpt=True, + ) + if async_save: + maybe_finalize_async_save(True) + iteration, _ = load_checkpoint( + model, + optimizer, + opt_param_scheduler, + checkpointing_context=checkpointing_context, + ) + assert iteration == 0 + assert not any((local_ckpt_dir / str(Utils.rank)).iterdir()) + + if Utils.rank == 1: + assert f"iter_0000001_{Utils.rank}_local.pt" not in caplog.text + else: + assert f"iter_0000001_{Utils.rank}_local.pt" in caplog.text + + original_save = LocalCheckpointManager._save + + def silent_error(self, *args, **kwargs): + if self.rank == 1: + return + return original_save(self, *args, **kwargs) + + def exception(self, *args, **kwargs): + if self.rank == 1: + raise Exception("TEST") + return original_save(self, *args, **kwargs) + + test_save_wrapper(silent_error, "test_sync") + if async_save: + test_save_wrapper(exception, "test_async") + Utils.destroy_model_parallel() diff --git a/tests/unit_tests/dist_checkpointing/test_nonpersistent.py b/tests/unit_tests/dist_checkpointing/test_nonpersistent.py index 89e609a..8b7e798 100644 --- a/tests/unit_tests/dist_checkpointing/test_nonpersistent.py +++ b/tests/unit_tests/dist_checkpointing/test_nonpersistent.py @@ -1,140 +1,140 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -import filecmp -import os -from types import SimpleNamespace -from unittest import mock - -import pytest - -from megatron.training.checkpointing import ( - _NON_PERSISTENT_CKPT_SUBDIR, - load_checkpoint, - save_checkpoint, -) -from tests.unit_tests.dist_checkpointing import ( - TempNamedDir, - init_basic_mock_args, - init_checkpointing_mock_args, - setup_model_and_optimizer, -) -from tests.unit_tests.test_utilities import Utils - - -class TestNonPersistentSaveAndLoad: - def setup_method(self, method): - pass - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.parametrize(('tp,pp'), [(2, 4)]) - def test_basic_save_load_scenarios(self, tmp_path_dist_ckpt, tp, pp): - Utils.initialize_model_parallel(tp, pp) - num_floating_point_operations_so_far = 0 - model, optimizer = setup_model_and_optimizer(1, tp, pp) - opt_param_scheduler = None - - mock_args = SimpleNamespace() - with TempNamedDir( - tmp_path_dist_ckpt / "test_non_persistent" - ) as non_persistent_ckpt_dir, mock.patch( - 'megatron.training.checkpointing.get_args', new=lambda: mock_args - ), mock.patch( - "megatron.training.checkpointing.update_num_microbatches" - ): - init_basic_mock_args(mock_args, tp, pp) - init_checkpointing_mock_args(mock_args, non_persistent_ckpt_dir) - mock_args.non_persistent_ckpt_type = "global" - - save_checkpoint( - 2, - model, - optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - {}, - non_persistent_ckpt=True, - ) - save_checkpoint( - 3, model, optimizer, opt_param_scheduler, num_floating_point_operations_so_far, {} - ) - save_checkpoint( - 4, - model, - optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - {}, - non_persistent_ckpt=True, - ) - iteration, _ = load_checkpoint(model, optimizer, opt_param_scheduler) - assert iteration == 4 - save_checkpoint( - 6, model, optimizer, opt_param_scheduler, num_floating_point_operations_so_far, {} - ) - iteration, _ = load_checkpoint(model, optimizer, opt_param_scheduler) - assert iteration == 6 - save_checkpoint( - 8, - model, - optimizer, - opt_param_scheduler, - num_floating_point_operations_so_far, - {}, - non_persistent_ckpt=True, - ) - iteration, _ = load_checkpoint(model, optimizer, opt_param_scheduler) - assert iteration == 8 - assert "iter_0000003" in os.listdir(non_persistent_ckpt_dir) - assert "iter_0000006" in os.listdir(non_persistent_ckpt_dir) - assert "iter_0000002" not in os.listdir( - os.path.join(non_persistent_ckpt_dir, _NON_PERSISTENT_CKPT_SUBDIR) - ) - assert "iter_0000004" in os.listdir( - os.path.join(non_persistent_ckpt_dir, _NON_PERSISTENT_CKPT_SUBDIR) - ) - assert "iter_0000008" in os.listdir( - os.path.join(non_persistent_ckpt_dir, _NON_PERSISTENT_CKPT_SUBDIR) - ) - ckpt_dirs = [ - "iter_0000003", - "iter_0000006", - _NON_PERSISTENT_CKPT_SUBDIR + "/iter_0000004", - _NON_PERSISTENT_CKPT_SUBDIR + "/iter_0000008", - ] - for ckpt_a in ckpt_dirs: - for ckpt_b in ckpt_dirs: - for filename in os.listdir(os.path.join(non_persistent_ckpt_dir, ckpt_a)): - if filename != "common.pt" and filename != ".metadata": - assert filecmp.cmp( - os.path.join(non_persistent_ckpt_dir, ckpt_a, filename), - os.path.join(non_persistent_ckpt_dir, ckpt_b, filename), - shallow=False, - ), [filename, ckpt_a, ckpt_b] - Utils.destroy_model_parallel() - - -class TestLegacySaveAndLoad: - @pytest.mark.parametrize(('tp,pp'), [(2, 4)]) - def test_basic_save_load_scenario(self, tmp_path_dist_ckpt, tp, pp): - Utils.initialize_model_parallel(tp, pp) - num_floating_point_operations_so_far = 0 - model, optimizer = setup_model_and_optimizer(1, tp, pp) - opt_param_scheduler = None - - mock_args = SimpleNamespace() - with TempNamedDir(tmp_path_dist_ckpt / "test_legacy") as legacy_ckpt_dir, mock.patch( - 'megatron.training.checkpointing.get_args', new=lambda: mock_args - ), mock.patch("megatron.training.checkpointing.update_num_microbatches"): - init_basic_mock_args(mock_args, tp, pp) - init_checkpointing_mock_args(mock_args, legacy_ckpt_dir) - - save_checkpoint( - 2, model, optimizer, opt_param_scheduler, num_floating_point_operations_so_far, {} - ) - iteration, _ = load_checkpoint(model, optimizer, opt_param_scheduler) - assert iteration == 2 - assert "iter_0000002" in os.listdir(legacy_ckpt_dir) - - Utils.destroy_model_parallel() +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import filecmp +import os +from unittest import mock + +import pytest + +from megatron.training.arguments import parse_args +from megatron.training.checkpointing import ( + _NON_PERSISTENT_CKPT_SUBDIR, + load_checkpoint, + save_checkpoint, +) +from tests.unit_tests.dist_checkpointing import ( + TempNamedDir, + init_basic_mock_args, + init_checkpointing_mock_args, + setup_model_and_optimizer, +) +from tests.unit_tests.test_utilities import Utils + + +class TestNonPersistentSaveAndLoad: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.parametrize(('tp,pp'), [(2, 4)]) + def test_basic_save_load_scenarios(self, tmp_path_dist_ckpt, tp, pp): + Utils.initialize_model_parallel(tp, pp) + num_floating_point_operations_so_far = 0 + model, optimizer = setup_model_and_optimizer(1, tp, pp) + opt_param_scheduler = None + + mock_args = parse_args(ignore_unknown_args=True) + with TempNamedDir( + tmp_path_dist_ckpt / "test_non_persistent" + ) as non_persistent_ckpt_dir, mock.patch( + 'megatron.training.checkpointing.get_args', new=lambda: mock_args + ), mock.patch( + "megatron.training.checkpointing.update_num_microbatches" + ): + init_basic_mock_args(mock_args, tp, pp) + init_checkpointing_mock_args(mock_args, non_persistent_ckpt_dir) + mock_args.non_persistent_ckpt_type = "global" + + save_checkpoint( + 2, + model, + optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + {}, + non_persistent_ckpt=True, + ) + save_checkpoint( + 3, model, optimizer, opt_param_scheduler, num_floating_point_operations_so_far, {} + ) + save_checkpoint( + 4, + model, + optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + {}, + non_persistent_ckpt=True, + ) + iteration, _ = load_checkpoint(model, optimizer, opt_param_scheduler) + assert iteration == 4 + save_checkpoint( + 6, model, optimizer, opt_param_scheduler, num_floating_point_operations_so_far, {} + ) + iteration, _ = load_checkpoint(model, optimizer, opt_param_scheduler) + assert iteration == 6 + save_checkpoint( + 8, + model, + optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + {}, + non_persistent_ckpt=True, + ) + iteration, _ = load_checkpoint(model, optimizer, opt_param_scheduler) + assert iteration == 8 + assert "iter_0000003" in os.listdir(non_persistent_ckpt_dir) + assert "iter_0000006" in os.listdir(non_persistent_ckpt_dir) + assert "iter_0000002" not in os.listdir( + os.path.join(non_persistent_ckpt_dir, _NON_PERSISTENT_CKPT_SUBDIR) + ) + assert "iter_0000004" in os.listdir( + os.path.join(non_persistent_ckpt_dir, _NON_PERSISTENT_CKPT_SUBDIR) + ) + assert "iter_0000008" in os.listdir( + os.path.join(non_persistent_ckpt_dir, _NON_PERSISTENT_CKPT_SUBDIR) + ) + ckpt_dirs = [ + "iter_0000003", + "iter_0000006", + _NON_PERSISTENT_CKPT_SUBDIR + "/iter_0000004", + _NON_PERSISTENT_CKPT_SUBDIR + "/iter_0000008", + ] + for ckpt_a in ckpt_dirs: + for ckpt_b in ckpt_dirs: + for filename in os.listdir(os.path.join(non_persistent_ckpt_dir, ckpt_a)): + if filename != "common.pt" and filename != ".metadata": + assert filecmp.cmp( + os.path.join(non_persistent_ckpt_dir, ckpt_a, filename), + os.path.join(non_persistent_ckpt_dir, ckpt_b, filename), + shallow=False, + ), [filename, ckpt_a, ckpt_b] + Utils.destroy_model_parallel() + + +class TestLegacySaveAndLoad: + @pytest.mark.parametrize(('tp,pp'), [(2, 4)]) + def test_basic_save_load_scenario(self, tmp_path_dist_ckpt, tp, pp): + Utils.initialize_model_parallel(tp, pp) + num_floating_point_operations_so_far = 0 + model, optimizer = setup_model_and_optimizer(1, tp, pp) + opt_param_scheduler = None + + mock_args = parse_args(ignore_unknown_args=True) + with TempNamedDir(tmp_path_dist_ckpt / "test_legacy") as legacy_ckpt_dir, mock.patch( + 'megatron.training.checkpointing.get_args', new=lambda: mock_args + ), mock.patch("megatron.training.checkpointing.update_num_microbatches"): + init_basic_mock_args(mock_args, tp, pp) + init_checkpointing_mock_args(mock_args, legacy_ckpt_dir) + + save_checkpoint( + 2, model, optimizer, opt_param_scheduler, num_floating_point_operations_so_far, {} + ) + iteration, _ = load_checkpoint(model, optimizer, opt_param_scheduler) + assert iteration == 2 + assert "iter_0000002" in os.listdir(legacy_ckpt_dir) + + Utils.destroy_model_parallel() diff --git a/tests/unit_tests/dist_checkpointing/test_optimizer.py b/tests/unit_tests/dist_checkpointing/test_optimizer.py index ab43cc4..5909637 100644 --- a/tests/unit_tests/dist_checkpointing/test_optimizer.py +++ b/tests/unit_tests/dist_checkpointing/test_optimizer.py @@ -1,599 +1,602 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -from copy import deepcopy -from functools import partial -from time import sleep -from types import MethodType, SimpleNamespace -from unittest import mock - -import pytest -import torch -from torch.optim import Adam - -from megatron.core import parallel_state -from megatron.core.dist_checkpointing import ( - ShardedTensor, - load, - load_plain_tensors, - load_tensors_metadata, - save, -) -from megatron.core.dist_checkpointing.dict_utils import diff, nested_values -from megatron.core.dist_checkpointing.optimizer import ( - get_param_id_to_sharded_param_map, - optim_state_to_sharding_state, -) -from megatron.core.dist_checkpointing.serialization import get_default_save_sharded_strategy -from megatron.core.dist_checkpointing.strategies.fully_parallel import ( - FullyParallelSaveStrategyWrapper, -) -from megatron.core.dist_checkpointing.utils import extract_sharded_tensors -from megatron.core.tensor_parallel import model_parallel_cuda_manual_seed -from megatron.core.transformer import TransformerConfig -from megatron.core.transformer.mlp import apply_swiglu_sharded_factory -from megatron.training.checkpointing import load_checkpoint, save_checkpoint -from tests.unit_tests.dist_checkpointing import ( - TempNamedDir, - init_basic_mock_args, - init_checkpointing_mock_args, - initialize_gpt_model, - setup_model_and_optimizer, - setup_moe_model_and_optimizer, -) -from tests.unit_tests.test_utilities import Utils - - -class Model(torch.nn.Module): - def __init__(self): - super().__init__() - self.conv = torch.nn.Conv1d(8, 16, 3) - self.proj = torch.nn.Linear(8, 5) - self.config = TransformerConfig(hidden_size=8, num_attention_heads=1, num_layers=1) - - def sharded_state_dict(self): - sharded_state_dict = self.state_dict(keep_vars=True) - # conv - sharded_state_dict['conv.weight'] = ShardedTensor.from_rank_offsets( - 'conv.weight', - sharded_state_dict['conv.weight'], - ( - 1, - parallel_state.get_tensor_model_parallel_rank(), - parallel_state.get_tensor_model_parallel_world_size(), - ), - ) - # bias is non-sharded - sharded_state_dict['conv.bias'] = ShardedTensor.from_rank_offsets( - 'conv.bias', sharded_state_dict['conv.bias'] - ) - - # proj - sharded_state_dict['proj.weight'] = ShardedTensor.from_rank_offsets( - 'proj.weight', sharded_state_dict['proj.weight'], (0, Utils.rank, Utils.world_size) - ) - sharded_state_dict['proj.bias'] = ShardedTensor.from_rank_offsets( - 'proj.bias', sharded_state_dict['proj.bias'], (0, Utils.rank, Utils.world_size) - ) - return sharded_state_dict - - -class SwigluFactoryModel(torch.nn.Module): - def __init__(self): - super().__init__() - self.linear = torch.nn.Linear( - 5, 64 // parallel_state.get_tensor_model_parallel_world_size(), bias=False - ) - self.config = TransformerConfig(hidden_size=8, num_attention_heads=1, num_layers=1) - - def sharded_state_dict(self): - sharded_state_dict = self.state_dict(keep_vars=True) - sharded_state_dict['linear.weight'] = ShardedTensor.from_rank_offsets( - 'linear.weight', - sharded_state_dict['linear.weight'], - ( - ( - 0, - parallel_state.get_tensor_model_parallel_rank(), - parallel_state.get_tensor_model_parallel_world_size(), - ) - ), - replica_id=( - ( - parallel_state.get_pipeline_model_parallel_rank(), - 0, - parallel_state.get_data_parallel_rank(with_context_parallel=True), - ) - ), - ) - sharded_state_dict['linear.weight'] = apply_swiglu_sharded_factory( - sharded_state_dict['linear.weight'], () - ) - return sharded_state_dict - - -class TestOptimizer: - def setup_method(self, method): - pass - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_optimizer_params(self, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(1, 1) - model = Model() - # Force optimizer state initialization - for p in model.parameters(): - p.grad = torch.ones_like(p.data) - optim = Adam(model.parameters()) - optim.step() - - model_state_dict = model.sharded_state_dict() - param_map = get_param_id_to_sharded_param_map( - model_state_dict, optim.param_groups[0]['params'] - ) - optim_state_dict = optim.state_dict() - optim_state_to_sharding_state(optim_state_dict, param_map, exclude_keys=('step',)) - - optim_sharded_tensors = nested_values(extract_sharded_tensors(optim_state_dict)[0]) - optim_sharded_keys = {sh_ten.key for sh_ten in optim_sharded_tensors} - assert len(optim_sharded_keys) == 2 * len(model_state_dict) - assert optim_sharded_keys == set( - [ - f'optimizer.state.{state_key}.{layer_name}' - for state_key in ['exp_avg', 'exp_avg_sq'] - for layer_name in model_state_dict - ] - ) - - -def initialize_small_model(pre_process=True, post_process=True, seed=0, **config_kwargs): - torch.manual_seed(seed) - model_parallel_cuda_manual_seed(seed) - - return SwigluFactoryModel() - - -def load_checkpoint_no_arg_checks(*args, **kwargs): - with mock.patch('megatron.training.checkpointing.check_checkpoint_args'): - with mock.patch('megatron.training.checkpointing.update_num_microbatches'): - return load_checkpoint(*args, **kwargs) - - -class TestDistributedOptimizer: - def setup_method(self, method): - pass - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.parametrize("initialize_fn", [initialize_small_model, initialize_gpt_model]) - @pytest.mark.parametrize("use_fpsl", [False, True]) - # TODO: changing DP doesn't work in unit tests because of NCCL crashes - @pytest.mark.parametrize( - "tp_pp,src_dp,dest_dp", - [ - ((4, 1), 2, 2), - # ((1, 1), 8, 1), - # ((1, 1), 1, 8), - # ((2, 1), 2, 1), - # ((2, 1), 2, 2), - ], - ) - def test_dp_sharding(self, tmp_path_dist_ckpt, tp_pp, src_dp, dest_dp, use_fpsl, initialize_fn): - src_world_size = tp_pp[0] * tp_pp[1] * src_dp - dest_world_size = tp_pp[0] * tp_pp[1] * dest_dp - assert src_world_size <= Utils.world_size, (tp_pp, src_dp) - assert dest_world_size <= Utils.world_size, (tp_pp, dest_dp) - - sharding_type = 'fully_sharded_model_space' if use_fpsl else 'dp_zero_gather_scatter' - - Utils.initialize_model_parallel(*tp_pp) - - # sync=True to make sure other ranks wait for rank 0 to finish creating directory. - with TempNamedDir(tmp_path_dist_ckpt / 'test_dp_sharding', sync=True) as ckpt_dir: - try: - Utils.set_world_size(src_world_size) - if Utils.rank >= 0: - # Save checkpoint A - model, optimizer_A = setup_model_and_optimizer( - seed=2, tp=tp_pp[0], pp=tp_pp[1], initialize_fn=initialize_fn - ) - - save_strategy = get_default_save_sharded_strategy() - if use_fpsl: - save_strategy = FullyParallelSaveStrategyWrapper( - save_strategy, - parallel_state.get_data_parallel_group(with_context_parallel=True), - True, - ) - save( - optimizer_A.sharded_state_dict( - model[0].sharded_state_dict(), sharding_type=sharding_type - ), - ckpt_dir, - save_strategy, - ) - optim_param_state_A = optimizer_A.get_parameter_state_dp_zero() - Utils.destroy_model_parallel() - else: - # this prevents NCCL errors when changing DP. TODO: fix it properly - sleep(20) - - # Load checkpoint A with different TP/PP and save as checkpoint B - Utils.set_world_size(dest_world_size) - if Utils.rank == 0: - print('_____________________') - if Utils.rank >= 0: - Utils.initialize_model_parallel(*tp_pp) - - model, optimizer_B = setup_model_and_optimizer( - seed=3, tp=tp_pp[0], pp=tp_pp[1], initialize_fn=initialize_fn - ) - optim_param_state_B = optimizer_B.get_parameter_state_dp_zero() - diffs = diff(optim_param_state_A, optim_param_state_B) - # Expect a mismatch in values - diffs[2] nonempty - if parallel_state.get_data_parallel_rank(with_context_parallel=True) == 0: - assert not diffs[0] and not diffs[1] and diffs[2], diffs - - sharded_state_dict = optimizer_B.sharded_state_dict( - model[0].sharded_state_dict(), is_loading=True, sharding_type=sharding_type - ) - optim_state_dict = load(sharded_state_dict, ckpt_dir) - optimizer_B.load_state_dict(optim_state_dict) - optim_param_state_B = optimizer_B.get_parameter_state_dp_zero() - - # Test both param state dicts are equal - diffs = diff(optim_param_state_A, optim_param_state_B) - assert not any(map(bool, diffs)), diffs - - else: - # this prevents NCCL errors when changing DP. TODO: fix it properly - sleep(20) - finally: - Utils.set_world_size() - - @pytest.mark.parametrize( - ('src_tp_pp', 'dest_tp_pp', 'use_glu'), - [((2, 2), (2, 4), False), ((1, 8), (4, 1), True), ((2, 4), (4, 2), False)], - ) - def test_finetune_doesnt_load_optimizer( - self, tmp_path_dist_ckpt, src_tp_pp, dest_tp_pp, use_glu - ): - # sync=True to make sure other ranks wait for rank 0 to finish creating directory. - Utils.initialize_model_parallel(*src_tp_pp) - with TempNamedDir( - tmp_path_dist_ckpt / 'test_finetune_doesnt_load_optimizer', sync=True - ) as ckpt_dir: - mock_args = SimpleNamespace() - with mock.patch('megatron.training.checkpointing.get_args', new=lambda: mock_args): - init_basic_mock_args(mock_args, tp=src_tp_pp[0], pp=src_tp_pp[1]) - init_checkpointing_mock_args(mock_args, ckpt_dir, False) - - model, optimizer = setup_model_and_optimizer( - seed=2, - tp=src_tp_pp[0], - pp=src_tp_pp[1], - initialize_fn=partial(initialize_gpt_model, use_glu=use_glu), - ) - - save_checkpoint(10, model, optimizer, None, 0) - Utils.destroy_model_parallel() - - Utils.initialize_model_parallel(*dest_tp_pp) - mock_args.tensor_model_parallel_size = dest_tp_pp[0] - mock_args.pipeline_model_parallel_size = dest_tp_pp[1] - model, optimizer = setup_model_and_optimizer( - seed=3, - tp=dest_tp_pp[0], - pp=dest_tp_pp[1], - initialize_fn=partial(initialize_gpt_model, use_glu=use_glu), - ) - model_unloaded_state_dict = deepcopy(model[0].state_dict()) - optim_unloaded_state_dict = deepcopy(optimizer.state_dict()) - - # Load with different TPxPP should raise DistributeOptimizer error - with pytest.raises(RuntimeError) as exc_info: - load_checkpoint_no_arg_checks(model, optimizer, None) - # "(TP, PP) mismatch" check is for backwards compatibility tests - assert "(TP, PP) mismatch" in str( - exc_info.value - ) or "(TP, PP, encoder TP, encoder PP) mismatch" in str(exc_info.value) - - # Check that the state didn't change - assert not any(diff(model[0].state_dict(), model_unloaded_state_dict)) - assert not any(diff(optimizer.state_dict(), optim_unloaded_state_dict)) - - # Now test the same with a `finetune` flag - mock_args.finetune = True - load_checkpoint_no_arg_checks(model, optimizer, None) - - # Model weights should be different, but optimizer state is unchanged - diffs = diff(model[0].state_dict(), model_unloaded_state_dict) - # diffs[0] and diffs[1] is structural diff, diffs[2] is values diff - - # we expect only values diff - assert not diffs[0] and not diffs[1] and diffs[2] - assert not any(diff(optimizer.state_dict(), optim_unloaded_state_dict)) - - # ... or `no_load_optim` flag - model, optimizer = setup_model_and_optimizer( - seed=3, - tp=dest_tp_pp[0], - pp=dest_tp_pp[1], - initialize_fn=partial(initialize_gpt_model, use_glu=use_glu), - ) - mock_args.finetune = False - mock_args.no_load_optim = True - mock_args.no_load_rng = True - load_checkpoint_no_arg_checks(model, optimizer, None) - - # Model weights should be different, but optimizer state is unchanged - diffs = diff(model[0].state_dict(), model_unloaded_state_dict) - # diffs[0] and diffs[1] is structural diff, diffs[2] is values diff - - # we expect only values diff - assert not diffs[0] and not diffs[1] and diffs[2] - assert not any(diff(optimizer.state_dict(), optim_unloaded_state_dict)) - - def test_can_load_deprecated_bucket_space_format(self, tmp_path_dist_ckpt): - # sync=True to make sure other ranks wait for rank 0 to finish creating directory. - tp = 4 - pp = 2 - - Utils.initialize_model_parallel(tp, pp) - with TempNamedDir( - tmp_path_dist_ckpt / 'test_can_load_deprecated_bucket_space_format', sync=True - ) as ckpt_dir: - mock_args = SimpleNamespace() - with mock.patch('megatron.training.checkpointing.get_args', new=lambda: mock_args): - - init_basic_mock_args(mock_args, tp=tp, pp=pp) - init_checkpointing_mock_args(mock_args, ckpt_dir, True) - - model, optimizer = setup_model_and_optimizer( - seed=2, tp=tp, pp=pp, initialize_fn=initialize_gpt_model - ) - - # Mock optimizer sharded_state_dict so that it ignores the externally - # passed sharding_type and uses 'fully_sharded_bucket_space' instead - orig_optim_sharded_state_dict_fn = optimizer.sharded_state_dict - - def sharded_state_dict_bucket_space( - self, *args, sharding_type: str = 'fully_sharded_model_space', **kwargs - ): - return orig_optim_sharded_state_dict_fn( - *args, sharding_type='fully_sharded_bucket_space', **kwargs - ) - - optimizer.sharded_state_dict = MethodType( - sharded_state_dict_bucket_space, optimizer - ) - save_checkpoint(10, model, optimizer, None, 0) - - flag = 0 - key_list = [] - torch.distributed.barrier() - if Utils.rank == 0: - sharded_metadata = load_tensors_metadata(ckpt_dir / 'iter_0000010') - key_list = list(sharded_metadata.keys()) - # Check if actually using `fully_parallel_bucket_space` format. - key = ( - "optimizer.distributed.dp_group_idx_0.gbuf_idx_0.dtype_" - "(torch.bfloat16, torch.bfloat16).bucket_idx_0.exp_avg_sq" - ) - if key in key_list: - flag = 1 - - tensor = torch.tensor([flag], dtype=torch.long, device='cuda') - torch.distributed.broadcast(tensor, 0) - flag = tensor[0].item() - assert flag == 1, key_list - - optimizer.sharded_state_dict = orig_optim_sharded_state_dict_fn - load_checkpoint_no_arg_checks(model, optimizer, None) - - -class TestFP32Optimizer: - def setup_method(self, method): - pass - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.parametrize( - ('src_tp_pp', 'dest_tp_pp'), [((2, 4), (2, 4)), ((2, 4), (4, 2)), ((8, 1), (1, 2))] - ) - def test_fp32_optimizer_resharding(self, tmp_path_dist_ckpt, src_tp_pp, dest_tp_pp): - # sync=True to make sure other ranks wait for rank 0 to finish creating directory. - - def preprocess_fn(optim_common_dict): - import copy - - preprocessed_optimzier_common_dict = copy.deepcopy(optim_common_dict) - list = preprocessed_optimzier_common_dict['optimizer']['param_groups'] - for dict_item in list: - del dict_item['wd_mult'] - return preprocessed_optimzier_common_dict - - Utils.initialize_model_parallel(*src_tp_pp) - with TempNamedDir( - tmp_path_dist_ckpt / 'test_fp32_optimizer_state_dict_A', sync=True - ) as ckpt_dir_A: - with TempNamedDir( - tmp_path_dist_ckpt / 'test_fp32_optimizer_state_dict_B', sync=True - ) as ckpt_dir_B: - - model_A, optimizer_A = setup_model_and_optimizer( - seed=2, - tp=src_tp_pp[0], - pp=src_tp_pp[1], - initialize_fn=initialize_small_model, - bf16=False, - ) - - save( - optimizer_A.sharded_state_dict(model_A[0].sharded_state_dict()), - ckpt_dir_A, - preprocess_common_before_consistancy_check=preprocess_fn, - ) - Utils.destroy_model_parallel() - - # Load checkpoint A with different TP/PP and save as checkpoint B - Utils.initialize_model_parallel(*dest_tp_pp) - model_B, optimizer_B = setup_model_and_optimizer( - seed=3, - tp=dest_tp_pp[0], - pp=dest_tp_pp[1], - initialize_fn=initialize_small_model, - bf16=False, - ) - load_sharded_state_dict = optimizer_B.sharded_state_dict( - model_B[0].sharded_state_dict() - ) - state_dict = load(load_sharded_state_dict, ckpt_dir_A) - - optimizer_B.load_state_dict(state_dict) - save(optimizer_B.sharded_state_dict(model_B[0].sharded_state_dict()), ckpt_dir_B) - Utils.destroy_model_parallel() - - # Test both checkpoints are equal - Utils.initialize_model_parallel(1, 1) - plain_state_dict_A = load_plain_tensors(ckpt_dir_A) - plain_state_dict_B = load_plain_tensors(ckpt_dir_B) - diffs = diff(plain_state_dict_A, plain_state_dict_B) - assert not any(map(bool, diffs)), diffs - - -class TestOptimizerResharding: - def setup_method(self, method): - pass - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.parametrize( - ('use_dist_opt', 'bf16'), - ( - (False, True), # regular BF16 - (True, True), # DistOpt BF16 - # (False, False), # FP32 - ), - ) - @pytest.mark.parametrize( - ('src_tp_pp', 'dest_tp_pp'), - [((2, 4), (2, 4)), ((2, 4), (2, 2)), ((2, 4), (4, 2)), ((8, 1), (1, 2))], - ) - def test_optimizer_resharding( - self, tmp_path_dist_ckpt, src_tp_pp, dest_tp_pp, use_dist_opt, bf16 - ): - Utils.initialize_model_parallel(*src_tp_pp) - with TempNamedDir( - tmp_path_dist_ckpt / 'test_fp32_optimizer_state_dict_A', sync=False - ) as ckpt_dir_A: - with TempNamedDir( - tmp_path_dist_ckpt / 'test_fp32_optimizer_state_dict_B', sync=False - ) as ckpt_dir_B: - - model_A, optimizer_A = setup_model_and_optimizer( - seed=2, tp=src_tp_pp[0], pp=src_tp_pp[1], bf16=bf16, dist_opt=use_dist_opt - ) - - save(optimizer_A.sharded_state_dict(model_A[0].sharded_state_dict()), ckpt_dir_A) - Utils.destroy_model_parallel() - - # Load checkpoint A with different TP/PP and save as checkpoint B - Utils.initialize_model_parallel(*dest_tp_pp) - model_B, optimizer_B = setup_model_and_optimizer( - seed=3, tp=dest_tp_pp[0], pp=dest_tp_pp[1], bf16=bf16, dist_opt=use_dist_opt - ) - load_sharded_state_dict = optimizer_B.sharded_state_dict( - model_B[0].sharded_state_dict() - ) - state_dict = load(load_sharded_state_dict, ckpt_dir_A) - - optimizer_B.load_state_dict(state_dict) - save(optimizer_B.sharded_state_dict(model_B[0].sharded_state_dict()), ckpt_dir_B) - Utils.destroy_model_parallel() - - # Test both checkpoints are equal - Utils.initialize_model_parallel(1, 1) - plain_state_dict_A = load_plain_tensors(ckpt_dir_A) - plain_state_dict_B = load_plain_tensors(ckpt_dir_B) - diffs = diff(plain_state_dict_A, plain_state_dict_B) - assert not any(map(bool, diffs)), diffs - - @pytest.mark.parametrize(('use_dist_opt', 'bf16'), ((True, True),)) # DistOpt BF16 - @pytest.mark.parametrize(('use_te', 'use_grouped_mlp'), ((False, False), (False, True))) - @pytest.mark.parametrize('use_glu', [False, True]) - @pytest.mark.parametrize( - ('src_tp_pp_exp', 'dest_tp_pp_exp'), - [ - ((2, 2, 2), (2, 2, 2)), - ((4, 1, 2), (1, 2, 2)), - ((1, 1, 2), (1, 1, 4)), - ((2, 1, 2), (1, 1, 8)), - ], - ) - def test_chained_optimizer_resharding( - self, - tmp_path_dist_ckpt, - src_tp_pp_exp, - dest_tp_pp_exp, - use_dist_opt, - bf16, - use_te, - use_grouped_mlp, - use_glu, - ): - src_tp, src_pp, src_exp = src_tp_pp_exp - dest_tp, dest_pp, dest_exp = dest_tp_pp_exp - with TempNamedDir( - tmp_path_dist_ckpt / 'test_fp32_optimizer_state_dict_A', sync=False - ) as ckpt_dir_A: - with TempNamedDir( - tmp_path_dist_ckpt / 'test_fp32_optimizer_state_dict_B', sync=False - ) as ckpt_dir_B: - Utils.initialize_model_parallel(src_tp, src_pp, expert_model_parallel_size=src_exp) - model_A, optimizer_A = setup_moe_model_and_optimizer( - seed=2, - tp=src_tp, - pp=src_pp, - ep=src_exp, - bf16=bf16, - dist_opt=use_dist_opt, - use_te=use_te, - use_grouped_mlp=use_grouped_mlp, - use_glu=use_glu, - ) - - save(optimizer_A.sharded_state_dict(model_A[0].sharded_state_dict()), ckpt_dir_A) - Utils.destroy_model_parallel() - - # Load checkpoint A with different TP/PP and save as checkpoint B - Utils.initialize_model_parallel( - dest_tp, dest_pp, expert_model_parallel_size=dest_exp - ) - model_B, optimizer_B = setup_moe_model_and_optimizer( - seed=3, - tp=dest_tp, - pp=dest_pp, - ep=dest_exp, - bf16=bf16, - dist_opt=use_dist_opt, - use_te=use_te, - use_grouped_mlp=use_grouped_mlp, - use_glu=use_glu, - ) - load_sharded_state_dict = optimizer_B.sharded_state_dict( - model_B[0].sharded_state_dict() - ) - state_dict = load(load_sharded_state_dict, ckpt_dir_A) - - optimizer_B.load_state_dict(state_dict) - save(optimizer_B.sharded_state_dict(model_B[0].sharded_state_dict()), ckpt_dir_B) - Utils.destroy_model_parallel() - - # Test both checkpoints are equal - Utils.initialize_model_parallel(1, 1) - plain_state_dict_A = load_plain_tensors(ckpt_dir_A) - plain_state_dict_B = load_plain_tensors(ckpt_dir_B) - diffs = diff(plain_state_dict_A, plain_state_dict_B) - assert not any(map(bool, diffs)), diffs - Utils.destroy_model_parallel() +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. +from copy import deepcopy +from functools import partial +from time import sleep +from unittest import mock + +import pytest +import torch +from torch.optim import Adam + +from megatron.core import parallel_state +from megatron.core.dist_checkpointing import ( + ShardedTensor, + load, + load_plain_tensors, + load_tensors_metadata, + save, +) +from megatron.core.dist_checkpointing.dict_utils import diff, nested_values +from megatron.core.dist_checkpointing.optimizer import ( + get_param_id_to_sharded_param_map, + optim_state_to_sharding_state, +) +from megatron.core.dist_checkpointing.serialization import get_default_save_sharded_strategy +from megatron.core.dist_checkpointing.strategies.fully_parallel import ( + FullyParallelSaveStrategyWrapper, +) +from megatron.core.dist_checkpointing.utils import extract_sharded_tensors +from megatron.core.tensor_parallel import model_parallel_cuda_manual_seed +from megatron.core.transformer import TransformerConfig +from megatron.core.transformer.mlp import apply_swiglu_sharded_factory +from megatron.training.arguments import parse_args +from megatron.training.checkpointing import load_checkpoint, save_checkpoint +from tests.unit_tests.dist_checkpointing import ( + TempNamedDir, + init_basic_mock_args, + init_checkpointing_mock_args, + initialize_gpt_model, + setup_model_and_optimizer, + setup_moe_model_and_optimizer, +) +from tests.unit_tests.test_utilities import Utils + + +class Model(torch.nn.Module): + def __init__(self): + super().__init__() + self.conv = torch.nn.Conv1d(8, 16, 3) + self.proj = torch.nn.Linear(8, 5) + self.config = TransformerConfig(hidden_size=8, num_attention_heads=1, num_layers=1) + + def sharded_state_dict(self): + sharded_state_dict = self.state_dict(keep_vars=True) + # conv + sharded_state_dict['conv.weight'] = ShardedTensor.from_rank_offsets( + 'conv.weight', + sharded_state_dict['conv.weight'], + ( + 1, + parallel_state.get_tensor_model_parallel_rank(), + parallel_state.get_tensor_model_parallel_world_size(), + ), + ) + # bias is non-sharded + sharded_state_dict['conv.bias'] = ShardedTensor.from_rank_offsets( + 'conv.bias', sharded_state_dict['conv.bias'] + ) + + # proj + sharded_state_dict['proj.weight'] = ShardedTensor.from_rank_offsets( + 'proj.weight', sharded_state_dict['proj.weight'], (0, Utils.rank, Utils.world_size) + ) + sharded_state_dict['proj.bias'] = ShardedTensor.from_rank_offsets( + 'proj.bias', sharded_state_dict['proj.bias'], (0, Utils.rank, Utils.world_size) + ) + return sharded_state_dict + + +class SwigluFactoryModel(torch.nn.Module): + def __init__(self): + super().__init__() + self.linear = torch.nn.Linear( + 5, 64 // parallel_state.get_tensor_model_parallel_world_size(), bias=False + ) + self.config = TransformerConfig(hidden_size=8, num_attention_heads=1, num_layers=1) + + def sharded_state_dict(self): + sharded_state_dict = self.state_dict(keep_vars=True) + sharded_state_dict['linear.weight'] = ShardedTensor.from_rank_offsets( + 'linear.weight', + sharded_state_dict['linear.weight'], + ( + ( + 0, + parallel_state.get_tensor_model_parallel_rank(), + parallel_state.get_tensor_model_parallel_world_size(), + ) + ), + replica_id=( + ( + parallel_state.get_pipeline_model_parallel_rank(), + 0, + parallel_state.get_data_parallel_rank(with_context_parallel=True), + ) + ), + ) + sharded_state_dict['linear.weight'] = apply_swiglu_sharded_factory( + sharded_state_dict['linear.weight'], () + ) + return sharded_state_dict + + +class Model1dFlattenTensor(torch.nn.Module): + """This model is used to test whether a 1d flatten tensor can be correctly + transformed into torch dist-ckpt form + """ + + def __init__(self): + super().__init__() + self.config = TransformerConfig(hidden_size=128, num_attention_heads=1, num_layers=1) + self.weight_1d = torch.nn.Parameter(torch.randn(self.config.hidden_size)) + + def sharded_state_dict(self): + sharded_state_dict = self.state_dict(keep_vars=True) + sharded_state_dict['weight_1d'] = ShardedTensor.from_rank_offsets( + 'weight_1d', + sharded_state_dict['weight_1d'], + ( + ( + 0, + parallel_state.get_tensor_model_parallel_rank(), + parallel_state.get_tensor_model_parallel_world_size(), + ) + ), + replica_id=( + ( + parallel_state.get_pipeline_model_parallel_rank(), + 0, + parallel_state.get_data_parallel_rank(with_context_parallel=True), + ) + ), + ) + return sharded_state_dict + + +class TestOptimizer: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_optimizer_params(self, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(1, 1) + model = Model() + # Force optimizer state initialization + for p in model.parameters(): + p.grad = torch.ones_like(p.data) + optim = Adam(model.parameters()) + optim.step() + + model_state_dict = model.sharded_state_dict() + param_map = get_param_id_to_sharded_param_map( + model_state_dict, optim.param_groups[0]['params'] + ) + optim_state_dict = optim.state_dict() + optim_state_to_sharding_state(optim_state_dict, param_map, exclude_keys=('step',)) + + optim_sharded_tensors = nested_values(extract_sharded_tensors(optim_state_dict)[0]) + optim_sharded_keys = {sh_ten.key for sh_ten in optim_sharded_tensors} + assert len(optim_sharded_keys) == 2 * len(model_state_dict) + assert optim_sharded_keys == set( + [ + f'optimizer.state.{state_key}.{layer_name}' + for state_key in ['exp_avg', 'exp_avg_sq'] + for layer_name in model_state_dict + ] + ) + + +def initialize_small_model(pre_process=True, post_process=True, seed=0, **config_kwargs): + torch.manual_seed(seed) + model_parallel_cuda_manual_seed(seed) + + return SwigluFactoryModel() + + +def initialize_1d_flatten_tensor_model( + pre_process=True, post_process=True, seed=0, **config_kwargs +): + # This model is used to test whether a 1d flatten tensor can be correctly + # transformed into torch dist-ckpt form + torch.manual_seed(seed) + model_parallel_cuda_manual_seed(seed) + + return Model1dFlattenTensor() + + +def load_checkpoint_no_arg_checks(*args, **kwargs): + with mock.patch('megatron.training.checkpointing.check_checkpoint_args'): + with mock.patch('megatron.training.checkpointing.update_num_microbatches'): + return load_checkpoint(*args, **kwargs) + + +class TestDistributedOptimizer: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.parametrize( + "initialize_fn", + [initialize_small_model, initialize_gpt_model, initialize_1d_flatten_tensor_model], + ) + @pytest.mark.parametrize("use_fpsl", [False, True]) + # TODO: changing DP doesn't work in unit tests because of NCCL crashes + @pytest.mark.parametrize( + "tp_pp,src_dp,dest_dp", + [ + ((4, 1), 2, 2), + # ((1, 1), 8, 1), + # ((1, 1), 1, 8), + # ((2, 1), 2, 1), + # ((2, 1), 2, 2), + ], + ) + @pytest.mark.flaky + @pytest.mark.flaky_in_dev + def test_dp_sharding(self, tmp_path_dist_ckpt, tp_pp, src_dp, dest_dp, use_fpsl, initialize_fn): + src_world_size = tp_pp[0] * tp_pp[1] * src_dp + dest_world_size = tp_pp[0] * tp_pp[1] * dest_dp + assert src_world_size <= Utils.world_size, (tp_pp, src_dp) + assert dest_world_size <= Utils.world_size, (tp_pp, dest_dp) + + sharding_type = 'fully_sharded_model_space' if use_fpsl else 'dp_zero_gather_scatter' + + Utils.initialize_model_parallel(*tp_pp) + + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + with TempNamedDir(tmp_path_dist_ckpt / 'test_dp_sharding', sync=True) as ckpt_dir: + try: + Utils.set_world_size(src_world_size) + if Utils.rank >= 0: + # Save checkpoint A + model, optimizer_A = setup_model_and_optimizer( + seed=2, tp=tp_pp[0], pp=tp_pp[1], initialize_fn=initialize_fn + ) + + save_strategy = get_default_save_sharded_strategy() + if use_fpsl: + save_strategy = FullyParallelSaveStrategyWrapper( + save_strategy, + parallel_state.get_data_parallel_group(with_context_parallel=True), + True, + ) + save( + optimizer_A.sharded_state_dict( + model[0].sharded_state_dict(), sharding_type=sharding_type + ), + ckpt_dir, + save_strategy, + ) + optim_param_state_A = optimizer_A.get_parameter_state_dp_zero() + Utils.destroy_model_parallel() + else: + # this prevents NCCL errors when changing DP. TODO: fix it properly + sleep(20) + + # Load checkpoint A with different TP/PP and save as checkpoint B + Utils.set_world_size(dest_world_size) + if Utils.rank == 0: + print('_____________________') + if Utils.rank >= 0: + Utils.initialize_model_parallel(*tp_pp) + + model, optimizer_B = setup_model_and_optimizer( + seed=3, tp=tp_pp[0], pp=tp_pp[1], initialize_fn=initialize_fn + ) + optim_param_state_B = optimizer_B.get_parameter_state_dp_zero() + diffs = diff(optim_param_state_A, optim_param_state_B) + # Expect a mismatch in values - diffs[2] nonempty + if parallel_state.get_data_parallel_rank(with_context_parallel=True) == 0: + assert not diffs[0] and not diffs[1] and diffs[2], diffs + + sharded_state_dict = optimizer_B.sharded_state_dict( + model[0].sharded_state_dict(), is_loading=True, sharding_type=sharding_type + ) + optim_state_dict = load(sharded_state_dict, ckpt_dir) + optimizer_B.load_state_dict(optim_state_dict) + optim_param_state_B = optimizer_B.get_parameter_state_dp_zero() + + # Test both param state dicts are equal + diffs = diff(optim_param_state_A, optim_param_state_B) + assert not any(map(bool, diffs)), diffs + + else: + # this prevents NCCL errors when changing DP. TODO: fix it properly + sleep(20) + finally: + Utils.set_world_size() + + @pytest.mark.parametrize( + ('src_tp_pp', 'dest_tp_pp', 'use_glu'), + [((2, 2), (2, 4), False), ((1, 8), (4, 1), True), ((2, 4), (4, 2), False)], + ) + def test_finetune_doesnt_load_optimizer( + self, tmp_path_dist_ckpt, src_tp_pp, dest_tp_pp, use_glu + ): + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + Utils.initialize_model_parallel(*src_tp_pp) + with TempNamedDir( + tmp_path_dist_ckpt / 'test_finetune_doesnt_load_optimizer', sync=True + ) as ckpt_dir: + mock_args = parse_args(ignore_unknown_args=True) + with mock.patch('megatron.training.checkpointing.get_args', new=lambda: mock_args): + init_basic_mock_args(mock_args, tp=src_tp_pp[0], pp=src_tp_pp[1]) + init_checkpointing_mock_args(mock_args, ckpt_dir, False) + + model, optimizer = setup_model_and_optimizer( + seed=2, + tp=src_tp_pp[0], + pp=src_tp_pp[1], + initialize_fn=partial(initialize_gpt_model, use_glu=use_glu), + ) + + save_checkpoint(10, model, optimizer, None, 0) + Utils.destroy_model_parallel() + + Utils.initialize_model_parallel(*dest_tp_pp) + mock_args.tensor_model_parallel_size = dest_tp_pp[0] + mock_args.pipeline_model_parallel_size = dest_tp_pp[1] + model, optimizer = setup_model_and_optimizer( + seed=3, + tp=dest_tp_pp[0], + pp=dest_tp_pp[1], + initialize_fn=partial(initialize_gpt_model, use_glu=use_glu), + ) + model_unloaded_state_dict = deepcopy(model[0].state_dict()) + optim_unloaded_state_dict = deepcopy(optimizer.state_dict()) + + # Load with different TPxPP should raise DistributeOptimizer error + with pytest.raises(RuntimeError) as exc_info: + load_checkpoint_no_arg_checks(model, optimizer, None) + # "(TP, PP) mismatch" check is for backwards compatibility tests + assert "(TP, PP) mismatch" in str( + exc_info.value + ) or "(TP, PP, encoder TP, encoder PP) mismatch" in str(exc_info.value) + + # Check that the state didn't change + assert not any(diff(model[0].state_dict(), model_unloaded_state_dict)) + assert not any(diff(optimizer.state_dict(), optim_unloaded_state_dict)) + + # Now test the same with a `finetune` flag + mock_args.finetune = True + load_checkpoint_no_arg_checks(model, optimizer, None) + + # Model weights should be different, but optimizer state is unchanged + diffs = diff(model[0].state_dict(), model_unloaded_state_dict) + # diffs[0] and diffs[1] is structural diff, diffs[2] is values diff - + # we expect only values diff + assert not diffs[0] and not diffs[1] and diffs[2] + assert not any(diff(optimizer.state_dict(), optim_unloaded_state_dict)) + + # ... or `no_load_optim` flag + model, optimizer = setup_model_and_optimizer( + seed=3, + tp=dest_tp_pp[0], + pp=dest_tp_pp[1], + initialize_fn=partial(initialize_gpt_model, use_glu=use_glu), + ) + mock_args.finetune = False + mock_args.no_load_optim = True + mock_args.no_load_rng = True + load_checkpoint_no_arg_checks(model, optimizer, None) + + # Model weights should be different, but optimizer state is unchanged + diffs = diff(model[0].state_dict(), model_unloaded_state_dict) + # diffs[0] and diffs[1] is structural diff, diffs[2] is values diff - + # we expect only values diff + assert not diffs[0] and not diffs[1] and diffs[2] + assert not any(diff(optimizer.state_dict(), optim_unloaded_state_dict)) + + +class TestFP32Optimizer: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.parametrize( + ('src_tp_pp', 'dest_tp_pp'), [((2, 4), (2, 4)), ((2, 4), (4, 2)), ((8, 1), (1, 2))] + ) + def test_fp32_optimizer_resharding(self, tmp_path_dist_ckpt, src_tp_pp, dest_tp_pp): + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + + def preprocess_fn(optim_common_dict): + import copy + + preprocessed_optimzier_common_dict = copy.deepcopy(optim_common_dict) + list = preprocessed_optimzier_common_dict['optimizer']['param_groups'] + for dict_item in list: + del dict_item['wd_mult'] + return preprocessed_optimzier_common_dict + + Utils.initialize_model_parallel(*src_tp_pp) + with TempNamedDir( + tmp_path_dist_ckpt / 'test_fp32_optimizer_state_dict_A', sync=True + ) as ckpt_dir_A: + with TempNamedDir( + tmp_path_dist_ckpt / 'test_fp32_optimizer_state_dict_B', sync=True + ) as ckpt_dir_B: + + model_A, optimizer_A = setup_model_and_optimizer( + seed=2, + tp=src_tp_pp[0], + pp=src_tp_pp[1], + initialize_fn=initialize_small_model, + bf16=False, + ) + + save( + optimizer_A.sharded_state_dict(model_A[0].sharded_state_dict()), + ckpt_dir_A, + preprocess_common_before_consistancy_check=preprocess_fn, + ) + Utils.destroy_model_parallel() + + # Load checkpoint A with different TP/PP and save as checkpoint B + Utils.initialize_model_parallel(*dest_tp_pp) + model_B, optimizer_B = setup_model_and_optimizer( + seed=3, + tp=dest_tp_pp[0], + pp=dest_tp_pp[1], + initialize_fn=initialize_small_model, + bf16=False, + ) + load_sharded_state_dict = optimizer_B.sharded_state_dict( + model_B[0].sharded_state_dict() + ) + state_dict = load(load_sharded_state_dict, ckpt_dir_A) + + optimizer_B.load_state_dict(state_dict) + save(optimizer_B.sharded_state_dict(model_B[0].sharded_state_dict()), ckpt_dir_B) + Utils.destroy_model_parallel() + + # Test both checkpoints are equal + Utils.initialize_model_parallel(1, 1) + plain_state_dict_A = load_plain_tensors(ckpt_dir_A) + plain_state_dict_B = load_plain_tensors(ckpt_dir_B) + diffs = diff(plain_state_dict_A, plain_state_dict_B) + assert not any(map(bool, diffs)), diffs + + +class TestOptimizerResharding: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.parametrize( + ('use_dist_opt', 'bf16', 'use_custom_fsdp'), + ( + (False, True, False), # regular BF16 + (True, True, False), # DistOpt BF16 + (True, True, True), # DistOpt + custom FSDP BF16 + # (False, False), # FP32 + ), + ) + @pytest.mark.parametrize( + ('src_tp_pp', 'dest_tp_pp'), + [((2, 4), (2, 4)), ((2, 4), (2, 2)), ((2, 4), (4, 2)), ((8, 1), (1, 2))], + ) + def test_optimizer_resharding( + self, tmp_path_dist_ckpt, src_tp_pp, dest_tp_pp, use_dist_opt, bf16, use_custom_fsdp + ): + Utils.initialize_model_parallel(*src_tp_pp) + with TempNamedDir( + tmp_path_dist_ckpt / 'test_fp32_optimizer_state_dict_A', sync=False + ) as ckpt_dir_A: + with TempNamedDir( + tmp_path_dist_ckpt / 'test_fp32_optimizer_state_dict_B', sync=False + ) as ckpt_dir_B: + extra_kwargs = {} + if use_custom_fsdp: + extra_kwargs['use_custom_fsdp'] = True + + model_A, optimizer_A = setup_model_and_optimizer( + seed=2, tp=src_tp_pp[0], pp=src_tp_pp[1], bf16=bf16, dist_opt=use_dist_opt + ) + + save(optimizer_A.sharded_state_dict(model_A[0].sharded_state_dict()), ckpt_dir_A) + Utils.destroy_model_parallel() + + # Load checkpoint A with different TP/PP and save as checkpoint B + Utils.initialize_model_parallel(*dest_tp_pp) + model_B, optimizer_B = setup_model_and_optimizer( + seed=3, tp=dest_tp_pp[0], pp=dest_tp_pp[1], bf16=bf16, dist_opt=use_dist_opt + ) + load_sharded_state_dict = optimizer_B.sharded_state_dict( + model_B[0].sharded_state_dict() + ) + state_dict = load(load_sharded_state_dict, ckpt_dir_A) + + optimizer_B.load_state_dict(state_dict) + save(optimizer_B.sharded_state_dict(model_B[0].sharded_state_dict()), ckpt_dir_B) + Utils.destroy_model_parallel() + + # Test both checkpoints are equal + Utils.initialize_model_parallel(1, 1) + plain_state_dict_A = load_plain_tensors(ckpt_dir_A) + plain_state_dict_B = load_plain_tensors(ckpt_dir_B) + diffs = diff(plain_state_dict_A, plain_state_dict_B) + assert not any(map(bool, diffs)), diffs + + if use_custom_fsdp and hasattr(torch.nn.parameter.Parameter, "main_grad"): + # Custom fsdp adds the `main_grad` attribute function to the + # torch Parameter, remove this attribute function so that + # it doesn't conflict with the code in the non-custom fsdp + # test branch. + delattr(torch.nn.parameter.Parameter, "main_grad") + + @pytest.mark.parametrize(('use_dist_opt', 'bf16'), ((True, True),)) # DistOpt BF16 + @pytest.mark.parametrize(('use_te', 'use_grouped_mlp'), ((False, False), (False, True))) + @pytest.mark.parametrize('use_glu', [False, True]) + @pytest.mark.parametrize( + ('src_tp_pp_exp', 'dest_tp_pp_exp'), + [ + ((2, 2, 2), (2, 2, 2)), + ((4, 1, 2), (1, 2, 2)), + ((1, 1, 2), (1, 1, 4)), + ((2, 1, 2), (1, 1, 8)), + ], + ) + def test_chained_optimizer_resharding( + self, + tmp_path_dist_ckpt, + src_tp_pp_exp, + dest_tp_pp_exp, + use_dist_opt, + bf16, + use_te, + use_grouped_mlp, + use_glu, + ): + src_tp, src_pp, src_exp = src_tp_pp_exp + dest_tp, dest_pp, dest_exp = dest_tp_pp_exp + with TempNamedDir( + tmp_path_dist_ckpt / 'test_fp32_optimizer_state_dict_A', sync=False + ) as ckpt_dir_A: + with TempNamedDir( + tmp_path_dist_ckpt / 'test_fp32_optimizer_state_dict_B', sync=False + ) as ckpt_dir_B: + Utils.initialize_model_parallel(src_tp, src_pp, expert_model_parallel_size=src_exp) + model_A, optimizer_A = setup_moe_model_and_optimizer( + seed=2, + tp=src_tp, + pp=src_pp, + ep=src_exp, + bf16=bf16, + dist_opt=use_dist_opt, + use_te=use_te, + use_grouped_mlp=use_grouped_mlp, + use_glu=use_glu, + ) + + save(optimizer_A.sharded_state_dict(model_A[0].sharded_state_dict()), ckpt_dir_A) + Utils.destroy_model_parallel() + + # Load checkpoint A with different TP/PP and save as checkpoint B + Utils.initialize_model_parallel( + dest_tp, dest_pp, expert_model_parallel_size=dest_exp + ) + model_B, optimizer_B = setup_moe_model_and_optimizer( + seed=3, + tp=dest_tp, + pp=dest_pp, + ep=dest_exp, + bf16=bf16, + dist_opt=use_dist_opt, + use_te=use_te, + use_grouped_mlp=use_grouped_mlp, + use_glu=use_glu, + ) + load_sharded_state_dict = optimizer_B.sharded_state_dict( + model_B[0].sharded_state_dict() + ) + state_dict = load(load_sharded_state_dict, ckpt_dir_A) + + optimizer_B.load_state_dict(state_dict) + save(optimizer_B.sharded_state_dict(model_B[0].sharded_state_dict()), ckpt_dir_B) + Utils.destroy_model_parallel() + + # Test both checkpoints are equal + Utils.initialize_model_parallel(1, 1) + plain_state_dict_A = load_plain_tensors(ckpt_dir_A) + plain_state_dict_B = load_plain_tensors(ckpt_dir_B) + diffs = diff(plain_state_dict_A, plain_state_dict_B) + assert not any(map(bool, diffs)), diffs + Utils.destroy_model_parallel() diff --git a/tests/unit_tests/dist_checkpointing/test_replication.py b/tests/unit_tests/dist_checkpointing/test_replication.py new file mode 100644 index 0000000..a2a570f --- /dev/null +++ b/tests/unit_tests/dist_checkpointing/test_replication.py @@ -0,0 +1,163 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import os +from contextlib import contextmanager +from dataclasses import dataclass +from pathlib import Path +from shutil import rmtree +from typing import Any, Dict, List, Optional +from unittest import mock + +import pytest +import torch +import torch.distributed as dist + +from megatron.training.arguments import parse_args + +nvidia_resiliency_ext = pytest.importorskip( + "nvidia_resiliency_ext", + reason="nvidia_resiliency_ext is required for local checkpointing tests", +) + +from nvidia_resiliency_ext.checkpointing.local.ckpt_managers.local_manager import ( + LocalCheckpointManager, +) +from nvidia_resiliency_ext.checkpointing.local.replication.group_utils import GroupWrapper +from nvidia_resiliency_ext.checkpointing.local.replication.strategies import ( + CliqueReplicationStrategy, +) + +from megatron.training.async_utils import maybe_finalize_async_save +from megatron.training.checkpointing import load_checkpoint, save_checkpoint +from tests.unit_tests.dist_checkpointing import ( + TempNamedDir, + init_basic_mock_args, + init_checkpointing_mock_args, + setup_model_and_optimizer, +) +from tests.unit_tests.test_utilities import Utils + + +def equal_(a, b): + def bool_generator(): + if isinstance(a, list): + yield isinstance(b, list) + yield len(a) == len(b) + yield all(equal_(aa, bb) for aa, bb in zip(a, b)) + elif isinstance(a, torch.Tensor): + yield isinstance(b, torch.Tensor) + yield torch.equal(a, b) + else: + yield a == b + + return all(bool_generator()) + + +@pytest.mark.parametrize(('tp,pp'), [(2, 4), (1, 1)]) +def test_all_gather_batch(tp, pp): + Utils.initialize_model_parallel(tp, pp) + torch.cuda.set_device(dist.get_rank()) + t0 = torch.arange(4, device="cuda").reshape((2, 2)) + t1 = torch.arange(6, device="cuda").reshape((3, 1, 2)) + t2 = torch.arange(12, device="cuda").reshape((2, 3, 2)) + test_ranks = [0, 3, 7] + test_group = GroupWrapper(dist.new_group(test_ranks)) + rank = dist.get_rank() + if rank not in test_ranks: + dist.barrier() + return + batch = [[t1, t2], [t0], []] + pred_batch = test_group.all_gather_batch(batch[test_group.my_group_rank]) + assert equal_(batch, pred_batch) + dist.barrier() + + +# TODO: Use mock local checkpointing? +@pytest.mark.parametrize(('tp,pp'), [(2, 4), (1, 1)]) +@pytest.mark.parametrize(('async_save'), [True, False]) +@pytest.mark.parametrize(('algo'), ['atomic', 'fully_parallel']) +@pytest.mark.parametrize( + ("repl_groups"), [[[0, 1], [2, 3], [4, 5], [6, 7]], [[2, 6, 7], [3, 1], [5], [0, 4]]] +) +class TestLocalCheckpointingReplication: + # tp: int + # pp: int + # async_save: bool + # algo: str + # repl_groups: List[List[int]] + # # To be filled by post_init + # checkpointing_context: Optional[Dict[str, LocalCheckpointManager]] + # repl_groups: Optional[List[dist.ProcessGroup]] + # local_ckpt_dir: Optional[Path] + + @contextmanager + def post_init(self, root_tmp_dir, tp, pp, async_save, algo, repl_groups): + Utils.initialize_model_parallel(tp, pp) + + mock_args = parse_args(ignore_unknown_args=True) + with mock.patch( + 'megatron.training.checkpointing.get_args', new=lambda: mock_args + ), mock.patch('megatron.training.async_utils.get_args', new=lambda: mock_args), mock.patch( + "megatron.training.checkpointing.update_num_microbatches" + ): + self.local_ckpt_dir = ( + root_tmp_dir / "subdir" + ) # Test handling of non-existent directories + init_basic_mock_args(mock_args, tp, pp) + init_checkpointing_mock_args(mock_args, None) + mock_args.non_persistent_ckpt_type = 'local' + mock_args.non_persistent_local_ckpt_algo = algo + mock_args.async_save = async_save + repl_groups_init = [dist.new_group(g) for g in repl_groups] + my_process_group = GroupWrapper.from_list_of_groups(repl_groups_init) + repl_strategy = CliqueReplicationStrategy(my_process_group, target_device="cpu") + self.checkpointing_context = { + 'local_checkpoint_manager': LocalCheckpointManager( + self.local_ckpt_dir, repl_strategy=repl_strategy + ) + } + self.local_ckpt_dir /= str(dist.get_rank()) + yield + Utils.destroy_model_parallel() + + def test_repl_save_and_load(self, tmp_dir_per_class, tp, pp, async_save, algo, repl_groups): + with self.post_init(tmp_dir_per_class, tp, pp, async_save, algo, repl_groups): + num_floating_point_operations_so_far = 0 + model, optimizer = setup_model_and_optimizer(1, tp, pp) + opt_param_scheduler = None + + save_checkpoint( + 1, + model, + optimizer, + opt_param_scheduler, + num_floating_point_operations_so_far, + checkpointing_context=self.checkpointing_context, + non_persistent_ckpt=True, + ) + if async_save: + maybe_finalize_async_save(True) + + my_group = [group for group in repl_groups if dist.get_rank() in group][0] + assert {f"iter_0000001_{rank}_local.pt" for rank in my_group} == { + f.name for f in self.local_ckpt_dir.rglob("*") + } + with self.post_init(tmp_dir_per_class, tp, pp, async_save, algo, repl_groups): + + ranks_to_break = [6, 3, 4] + if dist.get_rank() in ranks_to_break: + rmtree(self.local_ckpt_dir) + os.makedirs(self.local_ckpt_dir) + + model, optimizer = setup_model_and_optimizer(2, tp, pp) + opt_param_scheduler = None + + iteration, _ = load_checkpoint( + model, + optimizer, + opt_param_scheduler, + checkpointing_context=self.checkpointing_context, + ) + assert iteration == 1 + # Perform cleanup to ensure no side effects on subsequent tests + torch.distributed.barrier() + rmtree(self.local_ckpt_dir) diff --git a/tests/unit_tests/dist_checkpointing/test_serialization.py b/tests/unit_tests/dist_checkpointing/test_serialization.py index 1f2d229..c860d1d 100644 --- a/tests/unit_tests/dist_checkpointing/test_serialization.py +++ b/tests/unit_tests/dist_checkpointing/test_serialization.py @@ -1,851 +1,852 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import io -import logging -import os - -import numpy as np -import pytest -import torch -from torch.distributed.checkpoint import CheckpointException as PyTCheckpointingException -from torch.distributed.checkpoint import FileSystemReader - -try: - from torch.distributed import DeviceMesh - from torch.distributed._tensor import DTensor - - HAVE_DTENSOR = True -except ImportError: - HAVE_DTENSOR = False - -from megatron.core import parallel_state -from megatron.core.dist_checkpointing import ShardedTensor, load, remove_sharded_tensors, save -from megatron.core.dist_checkpointing.core import CheckpointingException, maybe_load_config -from megatron.core.dist_checkpointing.dict_utils import diff -from megatron.core.dist_checkpointing.mapping import ShardedObject, ShardedTensorFactory -from megatron.core.dist_checkpointing.serialization import ( - load_sharded_metadata, - load_tensors_metadata, -) -from megatron.core.dist_checkpointing.strategies.base import StrategyAction, get_default_strategy -from megatron.core.dist_checkpointing.strategies.torch import TorchDistSaveShardedStrategy -from megatron.core.dist_checkpointing.validation import StrictHandling -from megatron.core.utils import is_torch_min_version -from tests.unit_tests.dist_checkpointing import TempNamedDir -from tests.unit_tests.test_utilities import Utils - - -class TestSerialization: - def setup_method(self, method): - pass - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_single_process_save_load(self, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(1, 1) - - sharded_state_dict = { - 'sd_keyA': ShardedTensor.from_rank_offsets( - 'keyA', torch.ones(2, 4), replica_id=Utils.rank - ), - 'sd_keyB': ShardedTensor.from_rank_offsets( - 'keyB', torch.ones(3, 5, 7), replica_id=Utils.rank - ), - } - - if HAVE_DTENSOR: - mesh = DeviceMesh.from_group( - parallel_state.get_data_parallel_group(with_context_parallel=True), "cuda" - ) - sharded_state_dict['sd_keyD'] = ShardedTensor.from_rank_offsets( - 'keyD', - DTensor.from_local(torch.ones(3, 5, 7), mesh)._local_tensor, - replica_id=Utils.rank, - ) - - # sync=True to make sure other ranks wait for rank 0 to finish creating directory. - with TempNamedDir( - tmp_path_dist_ckpt / 'test_single_process_save_load', sync=True - ) as ckpt_dir: - save(sharded_state_dict, ckpt_dir) - torch.distributed.barrier() - - saved_config = maybe_load_config(ckpt_dir) - if saved_config.sharded_backend == 'zarr': - assert (ckpt_dir / 'keyA').is_dir() - assert (ckpt_dir / 'keyB').is_dir() - assert not (ckpt_dir / 'keyC').exists() - assert not (ckpt_dir / 'sd_keyA').is_dir() - - if HAVE_DTENSOR: - assert (ckpt_dir / 'keyD').is_dir() - - load_ssd = { - 'load_sd_keyA': ShardedTensor.from_rank_offsets( - 'keyA', torch.ones(2, 4), replica_id=Utils.rank - ) - } - loaded_state_dict = load(load_ssd, ckpt_dir) - - assert set(loaded_state_dict.keys()) == {'load_sd_keyA'} - assert isinstance(loaded_state_dict['load_sd_keyA'], torch.Tensor) - assert loaded_state_dict['load_sd_keyA'].shape == (2, 4) - - Utils.destroy_model_parallel() - - def test_multi_process_save(self, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(2, 4) - - state_dict = { - 'sd_keyA': ShardedTensor.from_rank_offsets( - 'keyA', torch.ones(2, 4), (0, Utils.rank, Utils.world_size) - ), - 'sd_keyB': ShardedTensor.from_rank_offsets( - 'keyB', torch.ones(3, 5, 7), (2, Utils.rank, Utils.world_size) - ), - 'lr': 0.01, - 'rank': torch.distributed.get_rank(), - } - - def preprocess_fn(x): - del x['rank'] - return x - - # sync=True to make sure other ranks wait for rank 0 to finish creating directory. - with TempNamedDir(tmp_path_dist_ckpt / 'test_multi_process_save', sync=True) as ckpt_dir: - save( - state_dict, - ckpt_dir, - validate_access_integrity=True, - preprocess_common_before_consistancy_check=preprocess_fn, - ) - - saved_config = maybe_load_config(ckpt_dir) - if saved_config.sharded_backend == 'zarr': - assert (ckpt_dir / 'keyA').is_dir() - assert (ckpt_dir / 'keyB').is_dir() - assert not (ckpt_dir / 'keyC').exists() - assert not (ckpt_dir / 'sd_keyA').is_dir() - - Utils.destroy_model_parallel() - - def test_multi_process_save_log_difference(self, tmp_path_dist_ckpt, caplog): - Utils.initialize_model_parallel(2, 4) - - state_dict = { - 'sd_keyA': ShardedTensor.from_rank_offsets( - 'keyA', torch.ones(2, 4), (0, Utils.rank, Utils.world_size) - ), - 'sd_keyB': ShardedTensor.from_rank_offsets( - 'keyB', torch.ones(3, 5, 7), (2, Utils.rank, Utils.world_size) - ), - 'rank': torch.distributed.get_rank(), - } - - def preprocess_fn(x): - return x - - with caplog.at_level(logging.WARNING): - # sync=True to make sure other ranks wait for rank 0 to finish creating directory. - with TempNamedDir( - tmp_path_dist_ckpt / 'test_multi_process_save', sync=True - ) as ckpt_dir: - save( - state_dict, - ckpt_dir, - validate_access_integrity=True, - preprocess_common_before_consistancy_check=preprocess_fn, - ) - # pylint: disable=line-too-long - if torch.distributed.get_rank() == 0: - assert ( - "There is difference in the common state dict in different ranks. The differences are {1: ([], [], [(('rank',), , )]), 2: ([], [], [(('rank',), , )]), 3: ([], [], [(('rank',), , )]), 4: ([], [], [(('rank',), , )]), 5: ([], [], [(('rank',), , )]), 6: ([], [], [(('rank',), , )]), 7: ([], [], [(('rank',), , )])}" - in caplog.text - ) - - Utils.destroy_model_parallel() - - def test_partition_change_save_load(self, tmp_path_dist_ckpt, strategy=None): - Utils.initialize_model_parallel(2, 4) - - # ten_a: global shape (2, 4): - ten_a_global = torch.tensor([[0, 1, 2, 3], [10, 11, 12, 13]]) - ten_a = ( - torch.zeros(1, 1) - + 10 * parallel_state.get_tensor_model_parallel_rank() - + parallel_state.get_pipeline_model_parallel_rank() - ) - assert ten_a.shape == (1, 1) - - # ten_b: global shape (4, 5, 80), where (x, y, z) is (100x + z) - ten_b = torch.zeros(4, 5, 10) + (torch.arange(10) + 10 * Utils.rank) - ten_b += torch.arange(4).unsqueeze(-1).unsqueeze(-1) * 100 - assert ten_b.shape == (4, 5, 10) - - state_dict = { - 'sd_keyA': ShardedTensor.from_rank_offsets( - 'keyA', - ten_a, - ( - 0, - parallel_state.get_tensor_model_parallel_rank(), - parallel_state.get_tensor_model_parallel_world_size(), - ), - ( - 1, - parallel_state.get_pipeline_model_parallel_rank(), - parallel_state.get_pipeline_model_parallel_world_size(), - ), - replica_id=0, - ), - 'sd_keyB': ShardedTensor.from_rank_offsets( - 'keyB', ten_b, (2, Utils.rank, Utils.world_size) - ), - } - - ten_a_global_shape = ten_a_global.shape - ten_b_global_shape = (4, 5, 10 * 8) - - assert state_dict['sd_keyA'].local_shape == (1, 1) - assert state_dict['sd_keyA'].global_shape == ten_a_global_shape - assert state_dict['sd_keyB'].global_shape == ten_b_global_shape - - # sync=True to make sure other ranks wait for rank 0 to finish creating directory. - with TempNamedDir( - tmp_path_dist_ckpt / 'test_partition_change_save_load', sync=True - ) as ckpt_dir: - save(state_dict, ckpt_dir, strategy) - - del ten_a, ten_b - - # without changing TPxPP, load tensors without any sharding - load_sd = { - 'sd_keyA': ShardedTensor.from_rank_offsets( - 'keyA', torch.empty(ten_a_global_shape), replica_id=Utils.rank - ), - 'sd_keyB': ShardedTensor.from_rank_offsets( - 'keyB', torch.empty(ten_b_global_shape), replica_id=Utils.rank - ), - } - loaded_state_dict = load(load_sd, ckpt_dir) - - ten_a = loaded_state_dict['sd_keyA'] - ten_b = loaded_state_dict['sd_keyB'] - assert isinstance(ten_a, torch.Tensor) - assert ten_a.shape == ten_a_global_shape - assert torch.all(ten_a == ten_a_global) - - assert isinstance(ten_b, torch.Tensor) - assert ten_b.shape == ten_b_global_shape - assert np.all( - [ - val == 100 * x + z - for x, x_row in enumerate(ten_b) - for y, y_row in enumerate(x_row) - for z, val in enumerate(y_row) - ] - ) - - del ten_a, ten_b - - # change TPxPP - Utils.destroy_model_parallel() - Utils.initialize_model_parallel(1, 2) - - load_sd = { - 'sd_keyA': ShardedTensor.from_rank_offsets( - 'keyA', - torch.empty(2, 1), - ( - 1, - parallel_state.get_data_parallel_rank(), - parallel_state.get_data_parallel_world_size(), - ), - replica_id=parallel_state.get_pipeline_model_parallel_rank(), - ), - 'sd_keyB': ShardedTensor.from_rank_offsets( - 'keyB', - torch.empty(5, 80), - (0, Utils.rank // 2, 4), - prepend_axis_num=1, - replica_id=Utils.rank % 2, - ), - } - - loaded_state_dict = load(load_sd, ckpt_dir) - ten_a = loaded_state_dict['sd_keyA'] - ten_b = loaded_state_dict['sd_keyB'] - - assert isinstance(ten_a, torch.Tensor) - assert ten_a.shape == (2, 1) - assert torch.all( - ten_a[:, 0] == ten_a_global[:, parallel_state.get_data_parallel_rank()] - ) - - assert isinstance(ten_b, torch.Tensor) - assert ten_b.shape == (5, 10 * 8) - assert torch.all( - ten_b == torch.arange(80).unsqueeze(0).expand(5, 80) + Utils.rank // 2 * 100 - ) - - def test_load_tensors_metadata(self, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(2, 4) - - state_dict = { - 'sd_keyA': ShardedTensor.from_rank_offsets( - 'keyA', torch.arange(10) + Utils.rank * 10, (0, Utils.rank, Utils.world_size) - ), - 'sd_keyB': ShardedTensor.from_rank_offsets( - 'keyB', torch.ones(3, 5, 7), (2, Utils.rank, Utils.world_size) - ), - } - - # sync=True to make sure other ranks wait for rank 0 to finish creating directory. - with TempNamedDir(tmp_path_dist_ckpt / 'test_load_tensors_metadata', sync=True) as ckpt_dir: - save(state_dict, ckpt_dir) - - del state_dict - sharded_state_dict = load_tensors_metadata(ckpt_dir) - # loaded dict keys are ShardedTensor keys! - assert 'keyA' in sharded_state_dict - assert 'sd_keyA' not in sharded_state_dict - - # Check metadata - assert sharded_state_dict['keyA'].global_shape == (10 * Utils.world_size,) - assert sharded_state_dict['keyB'].global_shape == (3, 5, 7 * Utils.world_size) - assert sharded_state_dict['keyA'].local_shape == sharded_state_dict['keyA'].global_shape - assert sharded_state_dict['keyB'].local_shape == sharded_state_dict['keyB'].global_shape - assert sharded_state_dict['keyA'].global_offset == (0,) - assert sharded_state_dict['keyB'].global_offset == (0, 0, 0) - assert sharded_state_dict['keyA'].axis_fragmentations == (1,) - assert sharded_state_dict['keyB'].axis_fragmentations == (1, 1, 1) - assert sharded_state_dict['keyA'].replica_id == 0 - assert sharded_state_dict['keyB'].replica_id == 0 - - # metadata dict can be loaded. We don't validate access because there are multiple replica_id=0 - state_dict = load(sharded_state_dict, ckpt_dir, validate_access_integrity=False) - assert torch.all(state_dict['keyA'] == torch.arange(10 * Utils.world_size)) - - Utils.destroy_model_parallel() - - def test_can_mix_sharded_tensors_and_factories(self, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(1, 1) - - def _build_fn(key, tensor, replica_id, flattened_range): - assert flattened_range is None - return [ - ShardedTensor.from_rank_offsets(key + 'part1', tensor, replica_id=replica_id), - ShardedTensor.from_rank_offsets(key + 'part2', tensor, replica_id=replica_id), - ShardedTensor.from_rank_offsets(key + 'part3', tensor, replica_id=replica_id), - ] - - # state dict can be modified by dist_checkpointing.save, so two copies - def get_sharded_state_dict(base=0): - return { - 'all': [ - ShardedTensor.from_rank_offsets( - 'A', torch.arange(2) + base, replica_id=Utils.rank - ), - ShardedTensor.from_rank_offsets( - 'B', torch.arange(3) + base, replica_id=Utils.rank - ), - ShardedTensor.from_rank_offsets( - 'C', torch.arange(4) + base, replica_id=Utils.rank - ), - ShardedTensorFactory( - 'D', torch.arange(5) + base, _build_fn, sum, replica_id=Utils.rank - ), - ] - } - - # sync=True to make sure other ranks wait for rank 0 to finish creating directory. - with TempNamedDir( - tmp_path_dist_ckpt / 'test_can_mix_sharded_tensors_and_factories', sync=True - ) as ckpt_dir: - save(get_sharded_state_dict(0), ckpt_dir) - loaded_state_dict = load(get_sharded_state_dict(10), ckpt_dir) - - expected_sd = { - 'all': [ - torch.arange(2), - torch.arange(3), - torch.arange(4), - torch.arange(5) * 3, # sum of three parts, as specified in merge_fn - ] - } - diffs = diff(loaded_state_dict, expected_sd) - assert not any(map(bool, diffs)), diffs - - Utils.destroy_model_parallel() - - def test_load_error_msg(self, tmp_path_dist_ckpt): - ckpt_dir_name = 'test_load_error_msg' - Utils.initialize_model_parallel(1, 1) - sh_ten = ShardedTensor.from_rank_offsets('keyA', torch.rand(10), replica_id=Utils.rank) - state_dict = {'some_key': sh_ten} - - # Non-existent directory - non_ex_path = f'/tmp/non-existent-path/{ckpt_dir_name}' - with pytest.raises(CheckpointingException) as exc_info: - load(state_dict, non_ex_path) - assert f'directory {non_ex_path} does not exist' in str(exc_info.value) - - # sync=True to make sure other ranks wait for rank 0 to finish creating directory. - with TempNamedDir(tmp_path_dist_ckpt / ckpt_dir_name, sync=True) as ckpt_dir: - # Empty directory - not a distributed checkpoint - with pytest.raises(CheckpointingException) as exc_info: - load(state_dict, ckpt_dir) - assert f'is not a distributed checkpoint' in str(exc_info.value) - - # Missing Zarr arrays - torch.distributed.barrier() - save(state_dict, ckpt_dir) - sh_ten.key = 'different_key' - with pytest.raises((CheckpointingException, PyTCheckpointingException)) as exc_info: - load(state_dict, ckpt_dir) - assert "different_key" in str(exc_info.value) - - def test_sharded_object_serialization(self, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(1, 1) - # sync=True to make sure other ranks wait for rank 0 to finish creating directory. - with TempNamedDir(tmp_path_dist_ckpt / 'test_sh_obj', sync=True) as ckpt_dir: - state = {'some': 'dict'} - state_serialized = io.BytesIO() - torch.save(state, state_serialized) - state_dict = { - 'some_key': ShardedObject( - 'sh_obj_A', state_serialized, (1,), (0,), replica_id=Utils.rank - ) - } - - save(state_dict, ckpt_dir) - del state, state_serialized, state_dict - other_state = {'other': 'dictionary'} - other_serialized = io.BytesIO() - torch.save(other_state, other_serialized) - state_dict = { - 'other_key': ShardedObject( - 'sh_obj_A', other_serialized, (1,), (0,), replica_id=Utils.rank - ) - } - load_state_dict = load(state_dict, ckpt_dir) - assert 'other_key' in load_state_dict - load_state_dict['other_key'].seek(0) - loaded_state = torch.load(load_state_dict['other_key']) - - assert loaded_state == {'some': 'dict'} - - Utils.destroy_model_parallel() - - def test_tensor_shape_mismatch(self, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(2, 4) - - # Global tensor is just a range(32) repeated twice over the first dimension - local_tensor = torch.arange(4).unsqueeze(0).expand(2, 4) + Utils.rank * 4 - - state_dict = { - 'rigid': ShardedTensor.from_rank_offsets( - 'keyA', local_tensor, (1, Utils.rank, Utils.world_size) - ), - 'flexible': ShardedTensor.from_rank_offsets( - 'keyB', local_tensor, (1, Utils.rank, Utils.world_size), allow_shape_mismatch=True - ), - } - assert state_dict['rigid'].global_shape == (2, 32) - assert state_dict['flexible'].global_shape == (2, 32) - - # sync=True to make sure other ranks wait for rank 0 to finish creating directory. - with TempNamedDir(tmp_path_dist_ckpt / 'test_tensor_shape_mismatch', sync=True) as ckpt_dir: - save(state_dict, ckpt_dir) - - pp_size = parallel_state.get_pipeline_model_parallel_world_size() - pp_rank = parallel_state.get_pipeline_model_parallel_rank() - tp_rank = parallel_state.get_tensor_model_parallel_rank() - - # Smaller coverage than expected (28 < 32) - state_dict = { - 'rigid': ShardedTensor.from_rank_offsets( - 'keyA', torch.ones(2, 7), (1, pp_rank, pp_size), replica_id=tp_rank - ) - } - with pytest.raises((CheckpointingException, PyTCheckpointingException)): - load(state_dict, ckpt_dir) - - state_dict = { - 'flexible': ShardedTensor.from_rank_offsets( - 'keyB', - torch.ones(2, 7), - (1, pp_rank, pp_size), - replica_id=tp_rank, - allow_shape_mismatch=True, - ) - } - loaded_state_dict = load(state_dict, ckpt_dir) - assert torch.all( - loaded_state_dict['flexible'] - == torch.arange(7).unsqueeze(0).expand(2, 7) + pp_rank * 7 - ) - - # Larger coverage than expected (36 > 32) - state_dict = { - 'rigid': ShardedTensor.from_rank_offsets( - 'keyA', torch.ones(2, 9), (1, pp_rank, pp_size), replica_id=tp_rank - ) - } - with pytest.raises((CheckpointingException, PyTCheckpointingException)): - load(state_dict, ckpt_dir) - - state_dict = { - 'flexible': ShardedTensor.from_rank_offsets( - 'keyB', - torch.ones(2, 9), - (1, pp_rank, pp_size), - replica_id=tp_rank, - allow_shape_mismatch=True, - ) - } - loaded_state_dict = load(state_dict, ckpt_dir) - expected_tensor = torch.arange(9).unsqueeze(0).expand(2, 9) + pp_rank * 9 - - if pp_rank >= (32 // 9): - assert pp_rank == 3, pp_rank - expected_tensor[:, 5:] = 0 # padding with 0s - assert torch.all(loaded_state_dict['flexible'] == expected_tensor) - - Utils.destroy_model_parallel() - - @pytest.mark.skipif( - not is_torch_min_version("2.3.0"), - reason="remove_sharded_tensors relies on Torch APIs introduced in v2.3.0", - ) - def test_remove_sharded_tensors(self, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(2, 4) - - # Global tensor is just a range(32) repeated twice over the first dimension - global_tensor = torch.arange(4).unsqueeze(0).expand(2, 4) - state_dict = { - 'sd_keyA': ShardedTensor.from_rank_offsets( - 'keyA', torch.ones(2, 4), (0, Utils.rank, Utils.world_size) - ), - 'sd_prefix_key_to_remove': ShardedTensor.from_rank_offsets( - 'prefix_key_to_remove', torch.ones(3, 5, 7), (2, Utils.rank, Utils.world_size) - ), - } - - prefix_name = "prefix" ## we will drop all tensors whose keys begin with "prefix" - - # sync=True to make sure other ranks wait for rank 0 to finish creating directory. - with TempNamedDir( - tmp_path_dist_ckpt / 'test_remove_sharded_tensor_prefix', sync=True - ) as ckpt_dir: - save_strategy = TorchDistSaveShardedStrategy( - "torch_dist", 1, separation_hint=prefix_name - ) - save(state_dict, ckpt_dir, save_strategy) - - files = os.listdir(ckpt_dir) - prefix_files = [f for f in files if f.startswith(prefix_name)] - assert len(prefix_files) == torch.distributed.get_world_size() - - fs_reader = FileSystemReader(ckpt_dir) - original_metadata = fs_reader.read_metadata() - assert set(original_metadata.state_dict_metadata.keys()) == { - 'keyA', - 'prefix_key_to_remove', - } - - if torch.distributed.get_rank() == 0: - remove_sharded_tensors(ckpt_dir, key_prefix=prefix_name) - torch.distributed.barrier() - - files = os.listdir(ckpt_dir) - prefix_files = [f for f in files if f.startswith(prefix_name)] - assert len(prefix_files) == 0 - - new_metadata = fs_reader.read_metadata() - assert set(new_metadata.state_dict_metadata.keys()) == {'keyA'} - - Utils.destroy_model_parallel() - - def test_empty_load(self, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(2, 4) - - if Utils.rank == 0: - state_dict = {'common': 'common-value'} - elif Utils.rank == 1: - state_dict = {'a': 3} # this is not saved at all (common saved by rank 0 only) - elif Utils.rank == 2: - state_dict = {'b': 3} # this is not saved at all (common saved by rank 0 only) - else: - state_dict = { - 'a': ShardedTensor.from_rank_offsets( - 'x', torch.ones((2,)) * Utils.rank, replica_id=Utils.rank - 3 - ) - } - - with TempNamedDir(tmp_path_dist_ckpt / 'test_empty_load', sync=True) as ckpt_dir: - save(state_dict, ckpt_dir) - torch.distributed.barrier() - loaded_state_dict = load(state_dict, ckpt_dir) - assert loaded_state_dict['common'] == 'common-value' - - if Utils.rank <= 2: - assert loaded_state_dict.keys() == {'common'} - else: - assert loaded_state_dict.keys() == {'common', 'a'} - loaded_state_dict['a'].cpu().numpy().tolist() == [ - 3, - 3, - ] # rank 3 held the main replica so did the saving - - Utils.destroy_model_parallel() - - -class TestNonStrictLoad: - def setup_method(self, method): - Utils.initialize_model_parallel(2, 4) # doesn't matter for this test - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def _get_base_state_dict(self): - return { - 'TenA': ShardedTensor.from_rank_offsets('TenA', torch.arange(2), replica_id=Utils.rank), - 'TenB': ShardedTensor.from_rank_offsets( - 'TenB', torch.arange(3), (0, Utils.rank, Utils.world_size), replica_id=0 - ), - 'TenC': ShardedTensor.from_rank_offsets( - 'TenC', torch.arange(3), replica_id=Utils.world_size - Utils.rank - 1 - ), - 'ObjA': ShardedObject('ObjA', list(range(10)), (1,), (0,), replica_id=Utils.rank), - 'ObjB': ShardedObject( - 'ObjB', {Utils.rank + 7}, (1, Utils.world_size), (0, Utils.rank), replica_id=0 - ), - } - - @pytest.mark.parametrize('save_format', ['zarr', 'torch_dist']) - @pytest.mark.parametrize('validate_integrity', [True, False]) - def test_unexpected_keys_handling_during_validation( - self, caplog, tmp_path_dist_ckpt, validate_integrity, save_format - ): - sharded_state_dict = self._get_base_state_dict() - with TempNamedDir( - tmp_path_dist_ckpt / 'test_unexpected_keys_raises_error_during_validation' - ) as ckpt_dir: - save_strategy = get_default_strategy(StrategyAction.SAVE_SHARDED, save_format, 1) - save(sharded_state_dict, ckpt_dir, save_strategy) - - def load_with_flag(strict): - sharded_state_dict = self._get_base_state_dict() - sharded_state_dict['TenD'] = ShardedTensor.from_rank_offsets( - 'UnexpectedTenD', torch.arange(3), replica_id=Utils.rank - ) - sharded_state_dict['ObjD'] = ShardedObject( - 'UnexpectedObjD', None, (1,), (0,), replica_id=Utils.rank - ) - return load( - sharded_state_dict, - ckpt_dir, - validate_access_integrity=validate_integrity, - strict=strict, - ) - - def test_error(error_msg): - assert 'Unexpected keys' in error_msg - assert 'UnexpectedTenD' in error_msg - assert 'UnexpectedObjD' in error_msg - assert 'Missing keys' not in error_msg - - # ASSUME_OK_UNEXPECTED results in an exception raised by the underlying strategy - with pytest.raises( - PyTCheckpointingException if save_format == 'torch_dist' else CheckpointingException - ) as exc_info: - load_with_flag(StrictHandling.ASSUME_OK_UNEXPECTED) - # Informative exceptions with `RAISE_*` options: - with pytest.raises(CheckpointingException) as exc_info: - load_with_flag(StrictHandling.RAISE_UNEXPECTED) - test_error(str(exc_info.value)) - with pytest.raises(CheckpointingException) as exc_info: - load_with_flag(StrictHandling.RAISE_ALL) - test_error(str(exc_info.value)) - - # Logged mismatches: - with caplog.at_level(logging.WARNING): - loaded_state_dict = load_with_flag(StrictHandling.LOG_UNEXPECTED) - assert 'TenA' in loaded_state_dict - test_error(caplog.text) - with caplog.at_level(logging.WARNING): - loaded_state_dict = load_with_flag(StrictHandling.LOG_ALL) - assert 'TenA' in loaded_state_dict - test_error(caplog.text) - - # Returned mismatches - loaded_state_dict, missing_keys, unexpected_keys = load_with_flag( - StrictHandling.RETURN_UNEXPECTED - ) - assert 'TenA' in loaded_state_dict - assert unexpected_keys == {'UnexpectedTenD', 'UnexpectedObjD'} - assert missing_keys == set() - loaded_state_dict, missing_keys, unexpected_keys = load_with_flag( - StrictHandling.RETURN_ALL - ) - assert 'TenA' in loaded_state_dict - assert unexpected_keys == {'UnexpectedTenD', 'UnexpectedObjD'} - assert missing_keys == set() - - # Ignore mismatch - loaded_state_dict = load_with_flag(StrictHandling.IGNORE_ALL) - assert 'TenA' in loaded_state_dict - - @pytest.mark.parametrize('save_format', ['zarr', 'torch_dist']) - @pytest.mark.parametrize('validate_integrity', [True, False]) - def test_missing_keys_raises_error_during_validation( - self, caplog, tmp_path_dist_ckpt, validate_integrity, save_format - ): - sharded_state_dict = self._get_base_state_dict() - with TempNamedDir( - tmp_path_dist_ckpt / 'test_missing_keys_raises_error_during_validation' - ) as ckpt_dir: - save_strategy = get_default_strategy(StrategyAction.SAVE_SHARDED, save_format, 1) - save(sharded_state_dict, ckpt_dir, save_strategy) - - def load_with_flag(strict): - sharded_state_dict = self._get_base_state_dict() - del sharded_state_dict['TenA'] - del sharded_state_dict['ObjB'] - return load( - sharded_state_dict, - ckpt_dir, - validate_access_integrity=validate_integrity, - strict=strict, - ) - - def test_error(error_msg): - assert 'Unexpected keys' not in error_msg - assert 'TenA' in error_msg - assert 'ObjB' in error_msg - assert 'Missing keys' in error_msg - - # no mismatch for `*_UNEXPECTED` flag - loaded_state_dict = load_with_flag(StrictHandling.ASSUME_OK_UNEXPECTED) - assert 'TenB' in loaded_state_dict - - loaded_state_dict = load_with_flag(StrictHandling.RAISE_UNEXPECTED) - assert 'TenB' in loaded_state_dict - - with caplog.at_level(logging.WARNING): - loaded_state_dict = load_with_flag(StrictHandling.LOG_UNEXPECTED) - assert ( - caplog.text == '' - or '`zarr` distributed checkpoint backend is deprecated' in caplog.text - ) - assert 'TenB' in loaded_state_dict - - loaded_state_dict, missing_keys, unexpected_keys = load_with_flag( - StrictHandling.RETURN_UNEXPECTED - ) - assert 'TenB' in loaded_state_dict - assert missing_keys == set() - assert unexpected_keys == set() - - loaded_state_dict = load_with_flag(StrictHandling.IGNORE_ALL) - assert 'TenB' in loaded_state_dict - - # Informative exceptions with `RAISE_ALL` option: - with pytest.raises(CheckpointingException) as exc_info: - load_with_flag(StrictHandling.RAISE_ALL) - test_error(str(exc_info.value)) - - # Logged mismatches: - with caplog.at_level(logging.WARNING): - loaded_state_dict = load_with_flag(StrictHandling.LOG_ALL) - assert 'TenB' in loaded_state_dict - test_error(caplog.text) - - # Returned mismatches - loaded_state_dict, missing_keys, unexpected_keys = load_with_flag( - StrictHandling.RETURN_ALL - ) - assert 'TenB' in loaded_state_dict - assert unexpected_keys == set() - assert missing_keys == {'TenA', 'ObjB'} - - @pytest.mark.parametrize('save_format', ['zarr', 'torch_dist']) - @pytest.mark.parametrize('validate_integrity', [True, False]) - def test_exact_load_handling(self, caplog, tmp_path_dist_ckpt, validate_integrity, save_format): - sharded_state_dict = self._get_base_state_dict() - with TempNamedDir(tmp_path_dist_ckpt / 'test_exact_load_handling') as ckpt_dir: - save_strategy = get_default_strategy(StrategyAction.SAVE_SHARDED, save_format, 1) - save(sharded_state_dict, ckpt_dir, save_strategy) - - def load_with_flag(strict): - sharded_state_dict = self._get_base_state_dict() - return load( - sharded_state_dict, - ckpt_dir, - validate_access_integrity=validate_integrity, - strict=strict, - ) - - for strict in ( - StrictHandling.ASSUME_OK_UNEXPECTED, - StrictHandling.LOG_UNEXPECTED, - StrictHandling.LOG_ALL, - StrictHandling.RAISE_UNEXPECTED, - StrictHandling.RAISE_ALL, - StrictHandling.IGNORE_ALL, - ): - with caplog.at_level(logging.WARNING): - loaded_state_dict = load_with_flag(strict) - assert ( - caplog.text == '' - or '`zarr` distributed checkpoint backend is deprecated' in caplog.text - ) - assert 'TenB' in loaded_state_dict - assert 'ObjB' in loaded_state_dict - - for strict in (StrictHandling.RETURN_UNEXPECTED, StrictHandling.RETURN_ALL): - with caplog.at_level(logging.WARNING): - loaded_state_dict, missing_keys, unexpected_keys = load_with_flag(strict) - assert ( - caplog.text == '' - or '`zarr` distributed checkpoint backend is deprecated' in caplog.text - ) - assert 'TenB' in loaded_state_dict - assert 'ObjB' in loaded_state_dict - assert missing_keys == set() - assert unexpected_keys == set() - - @pytest.mark.parametrize('save_format', ['zarr', 'torch_dist']) - def test_sharded_metadata(self, tmp_path_dist_ckpt, save_format): - - sharded_state_dict = self._get_base_state_dict() - with TempNamedDir(tmp_path_dist_ckpt / 'test_exact_load_handling') as ckpt_dir: - save_strategy = get_default_strategy(StrategyAction.SAVE_SHARDED, save_format, 1) - save(sharded_state_dict, ckpt_dir, save_strategy) - torch.distributed.barrier() - sharded_metadata = load_sharded_metadata(ckpt_dir) - assert set(sh_base.key for sh_base in sharded_metadata.values()) == { - 'TenA', - 'TenB', - 'TenC', - 'ObjA', - 'ObjB', - } - assert set(sharded_metadata.keys()) == { - 'TenA', - 'TenB', - 'TenC', - 'ObjA/shard_0_1', - *(f'ObjB/shard_0.{i}_1.8' for i in range(8)), - } - - loaded_state_dict = load(sharded_metadata, ckpt_dir, validate_access_integrity=False) - - assert loaded_state_dict['ObjA/shard_0_1'] == list(range(10)) - for shard_idx in range(8): - assert loaded_state_dict[f'ObjB/shard_0.{shard_idx}_1.8'] == {shard_idx + 7} - assert torch.all(loaded_state_dict['TenA'] == torch.arange(2)) - assert torch.all(loaded_state_dict['TenB'] == torch.arange(3).repeat(8)) - assert torch.all(loaded_state_dict['TenC'] == torch.arange(3)) +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import io +import logging +import os + +import numpy as np +import pytest +import torch +from torch.distributed.checkpoint import CheckpointException as PyTCheckpointingException +from torch.distributed.checkpoint import FileSystemReader + +try: + from torch.distributed import DeviceMesh + from torch.distributed._tensor import DTensor + + HAVE_DTENSOR = True +except ImportError: + HAVE_DTENSOR = False + +from megatron.core import parallel_state +from megatron.core.dist_checkpointing import ShardedTensor, load, remove_sharded_tensors, save +from megatron.core.dist_checkpointing.core import CheckpointingException, maybe_load_config +from megatron.core.dist_checkpointing.dict_utils import diff +from megatron.core.dist_checkpointing.mapping import ShardedObject, ShardedTensorFactory +from megatron.core.dist_checkpointing.serialization import ( + load_sharded_metadata, + load_tensors_metadata, +) +from megatron.core.dist_checkpointing.strategies.base import StrategyAction, get_default_strategy +from megatron.core.dist_checkpointing.strategies.torch import TorchDistSaveShardedStrategy +from megatron.core.dist_checkpointing.validation import StrictHandling +from megatron.core.utils import is_torch_min_version +from tests.unit_tests.dist_checkpointing import TempNamedDir +from tests.unit_tests.test_utilities import Utils + + +class TestSerialization: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_single_process_save_load(self, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(1, 1) + + sharded_state_dict = { + 'sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', torch.ones(2, 4), replica_id=Utils.rank + ), + 'sd_keyB': ShardedTensor.from_rank_offsets( + 'keyB', torch.ones(3, 5, 7), replica_id=Utils.rank + ), + } + + if HAVE_DTENSOR: + mesh = DeviceMesh.from_group( + parallel_state.get_data_parallel_group(with_context_parallel=True), "cuda" + ) + sharded_state_dict['sd_keyD'] = ShardedTensor.from_rank_offsets( + 'keyD', + DTensor.from_local(torch.ones(3, 5, 7), mesh)._local_tensor, + replica_id=Utils.rank, + ) + + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + with TempNamedDir( + tmp_path_dist_ckpt / 'test_single_process_save_load', sync=True + ) as ckpt_dir: + save(sharded_state_dict, ckpt_dir) + torch.distributed.barrier() + + saved_config = maybe_load_config(ckpt_dir) + if saved_config.sharded_backend == 'zarr': + assert (ckpt_dir / 'keyA').is_dir() + assert (ckpt_dir / 'keyB').is_dir() + assert not (ckpt_dir / 'keyC').exists() + assert not (ckpt_dir / 'sd_keyA').is_dir() + + if HAVE_DTENSOR: + assert (ckpt_dir / 'keyD').is_dir() + + load_ssd = { + 'load_sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', torch.ones(2, 4), replica_id=Utils.rank + ) + } + loaded_state_dict = load(load_ssd, ckpt_dir) + + assert set(loaded_state_dict.keys()) == {'load_sd_keyA'} + assert isinstance(loaded_state_dict['load_sd_keyA'], torch.Tensor) + assert loaded_state_dict['load_sd_keyA'].shape == (2, 4) + + Utils.destroy_model_parallel() + + def test_multi_process_save(self, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(2, 4) + + state_dict = { + 'sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', torch.ones(2, 4), (0, Utils.rank, Utils.world_size) + ), + 'sd_keyB': ShardedTensor.from_rank_offsets( + 'keyB', torch.ones(3, 5, 7), (2, Utils.rank, Utils.world_size) + ), + 'lr': 0.01, + 'rank': torch.distributed.get_rank(), + } + + def preprocess_fn(x): + del x['rank'] + return x + + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + with TempNamedDir(tmp_path_dist_ckpt / 'test_multi_process_save', sync=True) as ckpt_dir: + save( + state_dict, + ckpt_dir, + validate_access_integrity=True, + preprocess_common_before_consistancy_check=preprocess_fn, + ) + + saved_config = maybe_load_config(ckpt_dir) + if saved_config.sharded_backend == 'zarr': + assert (ckpt_dir / 'keyA').is_dir() + assert (ckpt_dir / 'keyB').is_dir() + assert not (ckpt_dir / 'keyC').exists() + assert not (ckpt_dir / 'sd_keyA').is_dir() + + Utils.destroy_model_parallel() + + def test_multi_process_save_log_difference(self, tmp_path_dist_ckpt, caplog): + Utils.initialize_model_parallel(2, 4) + + state_dict = { + 'sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', torch.ones(2, 4), (0, Utils.rank, Utils.world_size) + ), + 'sd_keyB': ShardedTensor.from_rank_offsets( + 'keyB', torch.ones(3, 5, 7), (2, Utils.rank, Utils.world_size) + ), + 'rank': torch.distributed.get_rank(), + } + + def preprocess_fn(x): + return x + + with caplog.at_level(logging.WARNING): + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + with TempNamedDir( + tmp_path_dist_ckpt / 'test_multi_process_save', sync=True + ) as ckpt_dir: + save( + state_dict, + ckpt_dir, + validate_access_integrity=True, + preprocess_common_before_consistancy_check=preprocess_fn, + ) + # pylint: disable=line-too-long + if torch.distributed.get_rank() == 0: + assert ( + "There is difference in the common state dict in different ranks. The differences are {1: ([], [], [(('rank',), , )]), 2: ([], [], [(('rank',), , )]), 3: ([], [], [(('rank',), , )]), 4: ([], [], [(('rank',), , )]), 5: ([], [], [(('rank',), , )]), 6: ([], [], [(('rank',), , )]), 7: ([], [], [(('rank',), , )])}" + in caplog.text + ) + + Utils.destroy_model_parallel() + + def test_partition_change_save_load(self, tmp_path_dist_ckpt, strategy=None): + Utils.initialize_model_parallel(2, 4) + + # ten_a: global shape (2, 4): + ten_a_global = torch.tensor([[0, 1, 2, 3], [10, 11, 12, 13]]) + ten_a = ( + torch.zeros(1, 1) + + 10 * parallel_state.get_tensor_model_parallel_rank() + + parallel_state.get_pipeline_model_parallel_rank() + ) + assert ten_a.shape == (1, 1) + + # ten_b: global shape (4, 5, 80), where (x, y, z) is (100x + z) + ten_b = torch.zeros(4, 5, 10) + (torch.arange(10) + 10 * Utils.rank) + ten_b += torch.arange(4).unsqueeze(-1).unsqueeze(-1) * 100 + assert ten_b.shape == (4, 5, 10) + + state_dict = { + 'sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', + ten_a, + ( + 0, + parallel_state.get_tensor_model_parallel_rank(), + parallel_state.get_tensor_model_parallel_world_size(), + ), + ( + 1, + parallel_state.get_pipeline_model_parallel_rank(), + parallel_state.get_pipeline_model_parallel_world_size(), + ), + replica_id=0, + ), + 'sd_keyB': ShardedTensor.from_rank_offsets( + 'keyB', ten_b, (2, Utils.rank, Utils.world_size) + ), + } + + ten_a_global_shape = ten_a_global.shape + ten_b_global_shape = (4, 5, 10 * 8) + + assert state_dict['sd_keyA'].local_shape == (1, 1) + assert state_dict['sd_keyA'].global_shape == ten_a_global_shape + assert state_dict['sd_keyB'].global_shape == ten_b_global_shape + + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + with TempNamedDir( + tmp_path_dist_ckpt / 'test_partition_change_save_load', sync=True + ) as ckpt_dir: + save(state_dict, ckpt_dir, strategy) + + del ten_a, ten_b + + # without changing TPxPP, load tensors without any sharding + load_sd = { + 'sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', torch.empty(ten_a_global_shape), replica_id=Utils.rank + ), + 'sd_keyB': ShardedTensor.from_rank_offsets( + 'keyB', torch.empty(ten_b_global_shape), replica_id=Utils.rank + ), + } + loaded_state_dict = load(load_sd, ckpt_dir) + + ten_a = loaded_state_dict['sd_keyA'] + ten_b = loaded_state_dict['sd_keyB'] + assert isinstance(ten_a, torch.Tensor) + assert ten_a.shape == ten_a_global_shape + assert torch.all(ten_a == ten_a_global) + + assert isinstance(ten_b, torch.Tensor) + assert ten_b.shape == ten_b_global_shape + assert np.all( + [ + val == 100 * x + z + for x, x_row in enumerate(ten_b) + for y, y_row in enumerate(x_row) + for z, val in enumerate(y_row) + ] + ) + + del ten_a, ten_b + + # change TPxPP + Utils.destroy_model_parallel() + Utils.initialize_model_parallel(1, 2) + + load_sd = { + 'sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', + torch.empty(2, 1), + ( + 1, + parallel_state.get_data_parallel_rank(), + parallel_state.get_data_parallel_world_size(), + ), + replica_id=parallel_state.get_pipeline_model_parallel_rank(), + ), + 'sd_keyB': ShardedTensor.from_rank_offsets( + 'keyB', + torch.empty(5, 80), + (0, Utils.rank // 2, 4), + prepend_axis_num=1, + replica_id=Utils.rank % 2, + ), + } + + loaded_state_dict = load(load_sd, ckpt_dir) + ten_a = loaded_state_dict['sd_keyA'] + ten_b = loaded_state_dict['sd_keyB'] + + assert isinstance(ten_a, torch.Tensor) + assert ten_a.shape == (2, 1) + assert torch.all( + ten_a[:, 0] == ten_a_global[:, parallel_state.get_data_parallel_rank()] + ) + + assert isinstance(ten_b, torch.Tensor) + assert ten_b.shape == (5, 10 * 8) + assert torch.all( + ten_b == torch.arange(80).unsqueeze(0).expand(5, 80) + Utils.rank // 2 * 100 + ) + + def test_load_tensors_metadata(self, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(2, 4) + + state_dict = { + 'sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', torch.arange(10) + Utils.rank * 10, (0, Utils.rank, Utils.world_size) + ), + 'sd_keyB': ShardedTensor.from_rank_offsets( + 'keyB', torch.ones(3, 5, 7), (2, Utils.rank, Utils.world_size) + ), + } + + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + with TempNamedDir(tmp_path_dist_ckpt / 'test_load_tensors_metadata', sync=True) as ckpt_dir: + save(state_dict, ckpt_dir) + + del state_dict + sharded_state_dict = load_tensors_metadata(ckpt_dir) + # loaded dict keys are ShardedTensor keys! + assert 'keyA' in sharded_state_dict + assert 'sd_keyA' not in sharded_state_dict + + # Check metadata + assert sharded_state_dict['keyA'].global_shape == (10 * Utils.world_size,) + assert sharded_state_dict['keyB'].global_shape == (3, 5, 7 * Utils.world_size) + assert sharded_state_dict['keyA'].local_shape == sharded_state_dict['keyA'].global_shape + assert sharded_state_dict['keyB'].local_shape == sharded_state_dict['keyB'].global_shape + assert sharded_state_dict['keyA'].global_offset == (0,) + assert sharded_state_dict['keyB'].global_offset == (0, 0, 0) + assert sharded_state_dict['keyA'].axis_fragmentations == (1,) + assert sharded_state_dict['keyB'].axis_fragmentations == (1, 1, 1) + assert sharded_state_dict['keyA'].replica_id == 0 + assert sharded_state_dict['keyB'].replica_id == 0 + + # metadata dict can be loaded. We don't validate access because there are multiple replica_id=0 + state_dict = load(sharded_state_dict, ckpt_dir, validate_access_integrity=False) + assert torch.all(state_dict['keyA'] == torch.arange(10 * Utils.world_size)) + + Utils.destroy_model_parallel() + + def test_can_mix_sharded_tensors_and_factories(self, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(1, 1) + + def _build_fn(key, tensor, replica_id, flattened_range): + assert flattened_range is None + return [ + ShardedTensor.from_rank_offsets(key + 'part1', tensor, replica_id=replica_id), + ShardedTensor.from_rank_offsets(key + 'part2', tensor, replica_id=replica_id), + ShardedTensor.from_rank_offsets(key + 'part3', tensor, replica_id=replica_id), + ] + + # state dict can be modified by dist_checkpointing.save, so two copies + def get_sharded_state_dict(base=0): + return { + 'all': [ + ShardedTensor.from_rank_offsets( + 'A', torch.arange(2) + base, replica_id=Utils.rank + ), + ShardedTensor.from_rank_offsets( + 'B', torch.arange(3) + base, replica_id=Utils.rank + ), + ShardedTensor.from_rank_offsets( + 'C', torch.arange(4) + base, replica_id=Utils.rank + ), + ShardedTensorFactory( + 'D', torch.arange(5) + base, _build_fn, sum, replica_id=Utils.rank + ), + ] + } + + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + with TempNamedDir( + tmp_path_dist_ckpt / 'test_can_mix_sharded_tensors_and_factories', sync=True + ) as ckpt_dir: + save(get_sharded_state_dict(0), ckpt_dir) + loaded_state_dict = load(get_sharded_state_dict(10), ckpt_dir) + + expected_sd = { + 'all': [ + torch.arange(2), + torch.arange(3), + torch.arange(4), + torch.arange(5) * 3, # sum of three parts, as specified in merge_fn + ] + } + diffs = diff(loaded_state_dict, expected_sd) + assert not any(map(bool, diffs)), diffs + + Utils.destroy_model_parallel() + + def test_load_error_msg(self, tmp_path_dist_ckpt): + ckpt_dir_name = 'test_load_error_msg' + Utils.initialize_model_parallel(1, 1) + sh_ten = ShardedTensor.from_rank_offsets('keyA', torch.rand(10), replica_id=Utils.rank) + state_dict = {'some_key': sh_ten} + + # Non-existent directory + non_ex_path = f'/tmp/non-existent-path/{ckpt_dir_name}' + with pytest.raises(CheckpointingException) as exc_info: + load(state_dict, non_ex_path) + assert f'directory {non_ex_path} does not exist' in str(exc_info.value) + + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + with TempNamedDir(tmp_path_dist_ckpt / ckpt_dir_name, sync=True) as ckpt_dir: + # Empty directory - not a distributed checkpoint + with pytest.raises(CheckpointingException) as exc_info: + load(state_dict, ckpt_dir) + assert f'is not a distributed checkpoint' in str(exc_info.value) + + # Missing Zarr arrays + torch.distributed.barrier() + save(state_dict, ckpt_dir) + sh_ten.key = 'different_key' + with pytest.raises((CheckpointingException, PyTCheckpointingException)) as exc_info: + load(state_dict, ckpt_dir) + assert "different_key" in str(exc_info.value) + + def test_sharded_object_serialization(self, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(1, 1) + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + with TempNamedDir(tmp_path_dist_ckpt / 'test_sh_obj', sync=True) as ckpt_dir: + state = {'some': 'dict'} + state_serialized = io.BytesIO() + torch.save(state, state_serialized) + state_dict = { + 'some_key': ShardedObject( + 'sh_obj_A', state_serialized, (1,), (0,), replica_id=Utils.rank + ) + } + + save(state_dict, ckpt_dir) + del state, state_serialized, state_dict + other_state = {'other': 'dictionary'} + other_serialized = io.BytesIO() + torch.save(other_state, other_serialized) + state_dict = { + 'other_key': ShardedObject( + 'sh_obj_A', other_serialized, (1,), (0,), replica_id=Utils.rank + ) + } + load_state_dict = load(state_dict, ckpt_dir) + assert 'other_key' in load_state_dict + load_state_dict['other_key'].seek(0) + loaded_state = torch.load(load_state_dict['other_key']) + + assert loaded_state == {'some': 'dict'} + + Utils.destroy_model_parallel() + + def test_tensor_shape_mismatch(self, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(2, 4) + + # Global tensor is just a range(32) repeated twice over the first dimension + local_tensor = torch.arange(4).unsqueeze(0).expand(2, 4) + Utils.rank * 4 + + state_dict = { + 'rigid': ShardedTensor.from_rank_offsets( + 'keyA', local_tensor, (1, Utils.rank, Utils.world_size) + ), + 'flexible': ShardedTensor.from_rank_offsets( + 'keyB', local_tensor, (1, Utils.rank, Utils.world_size), allow_shape_mismatch=True + ), + } + assert state_dict['rigid'].global_shape == (2, 32) + assert state_dict['flexible'].global_shape == (2, 32) + + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + with TempNamedDir(tmp_path_dist_ckpt / 'test_tensor_shape_mismatch', sync=True) as ckpt_dir: + save(state_dict, ckpt_dir) + + pp_size = parallel_state.get_pipeline_model_parallel_world_size() + pp_rank = parallel_state.get_pipeline_model_parallel_rank() + tp_rank = parallel_state.get_tensor_model_parallel_rank() + + # Smaller coverage than expected (28 < 32) + state_dict = { + 'rigid': ShardedTensor.from_rank_offsets( + 'keyA', torch.ones(2, 7), (1, pp_rank, pp_size), replica_id=tp_rank + ) + } + with pytest.raises((CheckpointingException, PyTCheckpointingException)): + load(state_dict, ckpt_dir) + + state_dict = { + 'flexible': ShardedTensor.from_rank_offsets( + 'keyB', + torch.ones(2, 7), + (1, pp_rank, pp_size), + replica_id=tp_rank, + allow_shape_mismatch=True, + ) + } + loaded_state_dict = load(state_dict, ckpt_dir) + assert torch.all( + loaded_state_dict['flexible'] + == torch.arange(7).unsqueeze(0).expand(2, 7) + pp_rank * 7 + ) + + # Larger coverage than expected (36 > 32) + state_dict = { + 'rigid': ShardedTensor.from_rank_offsets( + 'keyA', torch.ones(2, 9), (1, pp_rank, pp_size), replica_id=tp_rank + ) + } + with pytest.raises((CheckpointingException, PyTCheckpointingException)): + load(state_dict, ckpt_dir) + + state_dict = { + 'flexible': ShardedTensor.from_rank_offsets( + 'keyB', + torch.ones(2, 9), + (1, pp_rank, pp_size), + replica_id=tp_rank, + allow_shape_mismatch=True, + ) + } + loaded_state_dict = load(state_dict, ckpt_dir) + expected_tensor = torch.arange(9).unsqueeze(0).expand(2, 9) + pp_rank * 9 + + if pp_rank >= (32 // 9): + assert pp_rank == 3, pp_rank + expected_tensor[:, 5:] = 0 # padding with 0s + assert torch.all(loaded_state_dict['flexible'] == expected_tensor) + + Utils.destroy_model_parallel() + + @pytest.mark.skipif( + not is_torch_min_version("2.3.0"), + reason="remove_sharded_tensors relies on Torch APIs introduced in v2.3.0", + ) + @pytest.mark.flaky_in_dev + def test_remove_sharded_tensors(self, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(2, 4) + + # Global tensor is just a range(32) repeated twice over the first dimension + global_tensor = torch.arange(4).unsqueeze(0).expand(2, 4) + state_dict = { + 'sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', torch.ones(2, 4), (0, Utils.rank, Utils.world_size) + ), + 'sd_prefix_key_to_remove': ShardedTensor.from_rank_offsets( + 'prefix_key_to_remove', torch.ones(3, 5, 7), (2, Utils.rank, Utils.world_size) + ), + } + + prefix_name = "prefix" ## we will drop all tensors whose keys begin with "prefix" + + # sync=True to make sure other ranks wait for rank 0 to finish creating directory. + with TempNamedDir( + tmp_path_dist_ckpt / 'test_remove_sharded_tensor_prefix', sync=True + ) as ckpt_dir: + save_strategy = TorchDistSaveShardedStrategy( + "torch_dist", 1, separation_hint=prefix_name + ) + save(state_dict, ckpt_dir, save_strategy) + + files = os.listdir(ckpt_dir) + prefix_files = [f for f in files if f.startswith(prefix_name)] + assert len(prefix_files) == torch.distributed.get_world_size() + + fs_reader = FileSystemReader(ckpt_dir) + original_metadata = fs_reader.read_metadata() + assert set(original_metadata.state_dict_metadata.keys()) == { + 'keyA', + 'prefix_key_to_remove', + } + + if torch.distributed.get_rank() == 0: + remove_sharded_tensors(ckpt_dir, key_prefix=prefix_name) + torch.distributed.barrier() + + files = os.listdir(ckpt_dir) + prefix_files = [f for f in files if f.startswith(prefix_name)] + assert len(prefix_files) == 0 + + new_metadata = fs_reader.read_metadata() + assert set(new_metadata.state_dict_metadata.keys()) == {'keyA'} + + Utils.destroy_model_parallel() + + def test_empty_load(self, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(2, 4) + + if Utils.rank == 0: + state_dict = {'common': 'common-value'} + elif Utils.rank == 1: + state_dict = {'a': 3} # this is not saved at all (common saved by rank 0 only) + elif Utils.rank == 2: + state_dict = {'b': 3} # this is not saved at all (common saved by rank 0 only) + else: + state_dict = { + 'a': ShardedTensor.from_rank_offsets( + 'x', torch.ones((2,)) * Utils.rank, replica_id=Utils.rank - 3 + ) + } + + with TempNamedDir(tmp_path_dist_ckpt / 'test_empty_load', sync=True) as ckpt_dir: + save(state_dict, ckpt_dir) + torch.distributed.barrier() + loaded_state_dict = load(state_dict, ckpt_dir) + assert loaded_state_dict['common'] == 'common-value' + + if Utils.rank <= 2: + assert loaded_state_dict.keys() == {'common'} + else: + assert loaded_state_dict.keys() == {'common', 'a'} + loaded_state_dict['a'].cpu().numpy().tolist() == [ + 3, + 3, + ] # rank 3 held the main replica so did the saving + + Utils.destroy_model_parallel() + + +class TestNonStrictLoad: + def setup_method(self, method): + Utils.initialize_model_parallel(2, 4) # doesn't matter for this test + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def _get_base_state_dict(self): + return { + 'TenA': ShardedTensor.from_rank_offsets('TenA', torch.arange(2), replica_id=Utils.rank), + 'TenB': ShardedTensor.from_rank_offsets( + 'TenB', torch.arange(3), (0, Utils.rank, Utils.world_size), replica_id=0 + ), + 'TenC': ShardedTensor.from_rank_offsets( + 'TenC', torch.arange(3), replica_id=Utils.world_size - Utils.rank - 1 + ), + 'ObjA': ShardedObject('ObjA', list(range(10)), (1,), (0,), replica_id=Utils.rank), + 'ObjB': ShardedObject( + 'ObjB', {Utils.rank + 7}, (1, Utils.world_size), (0, Utils.rank), replica_id=0 + ), + } + + @pytest.mark.parametrize('save_format', ['zarr', 'torch_dist']) + @pytest.mark.parametrize('validate_integrity', [True, False]) + def test_unexpected_keys_handling_during_validation( + self, caplog, tmp_path_dist_ckpt, validate_integrity, save_format + ): + sharded_state_dict = self._get_base_state_dict() + with TempNamedDir( + tmp_path_dist_ckpt / 'test_unexpected_keys_raises_error_during_validation' + ) as ckpt_dir: + save_strategy = get_default_strategy(StrategyAction.SAVE_SHARDED, save_format, 1) + save(sharded_state_dict, ckpt_dir, save_strategy) + + def load_with_flag(strict): + sharded_state_dict = self._get_base_state_dict() + sharded_state_dict['TenD'] = ShardedTensor.from_rank_offsets( + 'UnexpectedTenD', torch.arange(3), replica_id=Utils.rank + ) + sharded_state_dict['ObjD'] = ShardedObject( + 'UnexpectedObjD', None, (1,), (0,), replica_id=Utils.rank + ) + return load( + sharded_state_dict, + ckpt_dir, + validate_access_integrity=validate_integrity, + strict=strict, + ) + + def test_error(error_msg): + assert 'Unexpected keys' in error_msg + assert 'UnexpectedTenD' in error_msg + assert 'UnexpectedObjD' in error_msg + assert 'Missing keys' not in error_msg + + # ASSUME_OK_UNEXPECTED results in an exception raised by the underlying strategy + with pytest.raises( + PyTCheckpointingException if save_format == 'torch_dist' else CheckpointingException + ) as exc_info: + load_with_flag(StrictHandling.ASSUME_OK_UNEXPECTED) + # Informative exceptions with `RAISE_*` options: + with pytest.raises(CheckpointingException) as exc_info: + load_with_flag(StrictHandling.RAISE_UNEXPECTED) + test_error(str(exc_info.value)) + with pytest.raises(CheckpointingException) as exc_info: + load_with_flag(StrictHandling.RAISE_ALL) + test_error(str(exc_info.value)) + + # Logged mismatches: + with caplog.at_level(logging.WARNING): + loaded_state_dict = load_with_flag(StrictHandling.LOG_UNEXPECTED) + assert 'TenA' in loaded_state_dict + test_error(caplog.text) + with caplog.at_level(logging.WARNING): + loaded_state_dict = load_with_flag(StrictHandling.LOG_ALL) + assert 'TenA' in loaded_state_dict + test_error(caplog.text) + + # Returned mismatches + loaded_state_dict, missing_keys, unexpected_keys = load_with_flag( + StrictHandling.RETURN_UNEXPECTED + ) + assert 'TenA' in loaded_state_dict + assert unexpected_keys == {'UnexpectedTenD', 'UnexpectedObjD'} + assert missing_keys == set() + loaded_state_dict, missing_keys, unexpected_keys = load_with_flag( + StrictHandling.RETURN_ALL + ) + assert 'TenA' in loaded_state_dict + assert unexpected_keys == {'UnexpectedTenD', 'UnexpectedObjD'} + assert missing_keys == set() + + # Ignore mismatch + loaded_state_dict = load_with_flag(StrictHandling.IGNORE_ALL) + assert 'TenA' in loaded_state_dict + + @pytest.mark.parametrize('save_format', ['zarr', 'torch_dist']) + @pytest.mark.parametrize('validate_integrity', [True, False]) + def test_missing_keys_raises_error_during_validation( + self, caplog, tmp_path_dist_ckpt, validate_integrity, save_format + ): + sharded_state_dict = self._get_base_state_dict() + with TempNamedDir( + tmp_path_dist_ckpt / 'test_missing_keys_raises_error_during_validation' + ) as ckpt_dir: + save_strategy = get_default_strategy(StrategyAction.SAVE_SHARDED, save_format, 1) + save(sharded_state_dict, ckpt_dir, save_strategy) + + def load_with_flag(strict): + sharded_state_dict = self._get_base_state_dict() + del sharded_state_dict['TenA'] + del sharded_state_dict['ObjB'] + return load( + sharded_state_dict, + ckpt_dir, + validate_access_integrity=validate_integrity, + strict=strict, + ) + + def test_error(error_msg): + assert 'Unexpected keys' not in error_msg + assert 'TenA' in error_msg + assert 'ObjB' in error_msg + assert 'Missing keys' in error_msg + + # no mismatch for `*_UNEXPECTED` flag + loaded_state_dict = load_with_flag(StrictHandling.ASSUME_OK_UNEXPECTED) + assert 'TenB' in loaded_state_dict + + loaded_state_dict = load_with_flag(StrictHandling.RAISE_UNEXPECTED) + assert 'TenB' in loaded_state_dict + + with caplog.at_level(logging.WARNING): + loaded_state_dict = load_with_flag(StrictHandling.LOG_UNEXPECTED) + assert ( + caplog.text == '' + or '`zarr` distributed checkpoint backend is deprecated' in caplog.text + ) + assert 'TenB' in loaded_state_dict + + loaded_state_dict, missing_keys, unexpected_keys = load_with_flag( + StrictHandling.RETURN_UNEXPECTED + ) + assert 'TenB' in loaded_state_dict + assert missing_keys == set() + assert unexpected_keys == set() + + loaded_state_dict = load_with_flag(StrictHandling.IGNORE_ALL) + assert 'TenB' in loaded_state_dict + + # Informative exceptions with `RAISE_ALL` option: + with pytest.raises(CheckpointingException) as exc_info: + load_with_flag(StrictHandling.RAISE_ALL) + test_error(str(exc_info.value)) + + # Logged mismatches: + with caplog.at_level(logging.WARNING): + loaded_state_dict = load_with_flag(StrictHandling.LOG_ALL) + assert 'TenB' in loaded_state_dict + test_error(caplog.text) + + # Returned mismatches + loaded_state_dict, missing_keys, unexpected_keys = load_with_flag( + StrictHandling.RETURN_ALL + ) + assert 'TenB' in loaded_state_dict + assert unexpected_keys == set() + assert missing_keys == {'TenA', 'ObjB'} + + @pytest.mark.parametrize('save_format', ['zarr', 'torch_dist']) + @pytest.mark.parametrize('validate_integrity', [True, False]) + def test_exact_load_handling(self, caplog, tmp_path_dist_ckpt, validate_integrity, save_format): + sharded_state_dict = self._get_base_state_dict() + with TempNamedDir(tmp_path_dist_ckpt / 'test_exact_load_handling') as ckpt_dir: + save_strategy = get_default_strategy(StrategyAction.SAVE_SHARDED, save_format, 1) + save(sharded_state_dict, ckpt_dir, save_strategy) + + def load_with_flag(strict): + sharded_state_dict = self._get_base_state_dict() + return load( + sharded_state_dict, + ckpt_dir, + validate_access_integrity=validate_integrity, + strict=strict, + ) + + for strict in ( + StrictHandling.ASSUME_OK_UNEXPECTED, + StrictHandling.LOG_UNEXPECTED, + StrictHandling.LOG_ALL, + StrictHandling.RAISE_UNEXPECTED, + StrictHandling.RAISE_ALL, + StrictHandling.IGNORE_ALL, + ): + with caplog.at_level(logging.WARNING): + loaded_state_dict = load_with_flag(strict) + assert ( + caplog.text == '' + or '`zarr` distributed checkpoint backend is deprecated' in caplog.text + ) + assert 'TenB' in loaded_state_dict + assert 'ObjB' in loaded_state_dict + + for strict in (StrictHandling.RETURN_UNEXPECTED, StrictHandling.RETURN_ALL): + with caplog.at_level(logging.WARNING): + loaded_state_dict, missing_keys, unexpected_keys = load_with_flag(strict) + assert ( + caplog.text == '' + or '`zarr` distributed checkpoint backend is deprecated' in caplog.text + ) + assert 'TenB' in loaded_state_dict + assert 'ObjB' in loaded_state_dict + assert missing_keys == set() + assert unexpected_keys == set() + + @pytest.mark.parametrize('save_format', ['zarr', 'torch_dist']) + def test_sharded_metadata(self, tmp_path_dist_ckpt, save_format): + + sharded_state_dict = self._get_base_state_dict() + with TempNamedDir(tmp_path_dist_ckpt / 'test_exact_load_handling') as ckpt_dir: + save_strategy = get_default_strategy(StrategyAction.SAVE_SHARDED, save_format, 1) + save(sharded_state_dict, ckpt_dir, save_strategy) + torch.distributed.barrier() + sharded_metadata = load_sharded_metadata(ckpt_dir) + assert set(sh_base.key for sh_base in sharded_metadata.values()) == { + 'TenA', + 'TenB', + 'TenC', + 'ObjA', + 'ObjB', + } + assert set(sharded_metadata.keys()) == { + 'TenA', + 'TenB', + 'TenC', + 'ObjA/shard_0_1', + *(f'ObjB/shard_0.{i}_1.8' for i in range(8)), + } + + loaded_state_dict = load(sharded_metadata, ckpt_dir, validate_access_integrity=False) + + assert loaded_state_dict['ObjA/shard_0_1'] == list(range(10)) + for shard_idx in range(8): + assert loaded_state_dict[f'ObjB/shard_0.{shard_idx}_1.8'] == {shard_idx + 7} + assert torch.all(loaded_state_dict['TenA'] == torch.arange(2)) + assert torch.all(loaded_state_dict['TenB'] == torch.arange(3).repeat(8)) + assert torch.all(loaded_state_dict['TenC'] == torch.arange(3)) diff --git a/tests/unit_tests/dist_checkpointing/test_cached_metadata.py b/tests/unit_tests/dist_checkpointing/test_torch_dist.py similarity index 72% rename from tests/unit_tests/dist_checkpointing/test_cached_metadata.py rename to tests/unit_tests/dist_checkpointing/test_torch_dist.py index 2733ea7..ecc84ca 100644 --- a/tests/unit_tests/dist_checkpointing/test_cached_metadata.py +++ b/tests/unit_tests/dist_checkpointing/test_torch_dist.py @@ -1,90 +1,124 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -import pickle -from copy import deepcopy -from dataclasses import fields - -import torch - -from megatron.core.dist_checkpointing import ShardedTensor, load, save -from megatron.core.dist_checkpointing.dict_utils import diff -from megatron.core.dist_checkpointing.serialization import get_default_save_sharded_strategy -from megatron.core.dist_checkpointing.strategies.async_utils import AsyncCallsQueue -from tests.unit_tests.dist_checkpointing import TempNamedDir -from tests.unit_tests.test_utilities import Utils - - -class TestCachedMetadata: - def setup_method(self, method): - pass - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_cached_metadata(self, tmp_path_dist_ckpt): - Utils.initialize_model_parallel(2, 4) - - sharded_state_dict_non_cached = { - 'sd_keyA': ShardedTensor.from_rank_offsets( - 'keyA', torch.ones(2, 4), replica_id=Utils.rank - ), - 'sd_keyB': ShardedTensor.from_rank_offsets( - 'keyB', torch.ones(3, 5, 7), replica_id=Utils.world_size - Utils.rank - 1 - ), - } - - sharded_state_dict_cached = { - 'sd_keyA': ShardedTensor.from_rank_offsets( - 'keyA', torch.ones(2, 4), replica_id=Utils.rank - ), - 'sd_keyB': ShardedTensor.from_rank_offsets( - 'keyB', torch.ones(3, 5, 7), replica_id=Utils.world_size - Utils.rank - 1 - ), - } - - loaded_non_cached, loaded_cached = None, None - md_non_cached, md_cached = None, None - with TempNamedDir(tmp_path_dist_ckpt / 'ckpt_dir') as ckpt_dir: - save(sharded_state_dict_non_cached, ckpt_dir, async_sharded_save=False) - loaded_non_cached = load(sharded_state_dict_non_cached, ckpt_dir) - md_path = ckpt_dir / '.metadata' - with md_path.open('rb') as f: - md_non_cached = pickle.load(f) - - save_strategy = deepcopy(get_default_save_sharded_strategy()) - save_strategy.use_cached_ckpt_structure = True - # Run over 3 iterations with cached metadata enabled - # The 3rd iteration will run with cached metadata - # `ckpt_dir` at the 3rd iteration 2 will be maintained for comparison - ckpt_dir = None - for i in range(3): - ckpt_dir = TempNamedDir(tmp_path_dist_ckpt / f'ckpt_dir_${i}_cached') - save( - sharded_state_dict_cached, - ckpt_dir.__enter__(), - save_strategy, - async_sharded_save=False, - ) - if i < 2: - ckpt_dir.cleanup() - loaded_cached = load(sharded_state_dict_cached, ckpt_dir.__enter__()) - md_path = ckpt_dir.__enter__() / '.metadata' - - with md_path.open('rb') as f: - md_cached = pickle.load(f) - - # Check loaded state dict - diffs = diff(loaded_non_cached, loaded_cached) - - assert not any( - len(x) for x in diffs - ), 'Cached metadata doesn\'t produce the same state_dict in loading' - # Check metadata recorded in .metadata, torch.distributed.metadata.Metadata - for field in fields(md_non_cached): - if field.name not in ['storage_data', 'storage_meta']: - diffs = diff(getattr(md_non_cached, field.name), getattr(md_cached, field.name)) - assert not any( - len(x) for x in diffs - ), f'{field.name} is different in metadata from non-cached, cached metadata impls' - ckpt_dir.cleanup() - Utils.destroy_model_parallel() +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Tests for PyTorch DCP based checkpoint format. """ + +import pickle +from copy import deepcopy +from dataclasses import fields + +import torch + +from megatron.core.dist_checkpointing import ShardedTensor, load, save +from megatron.core.dist_checkpointing.dict_utils import diff +from megatron.core.dist_checkpointing.serialization import get_default_save_sharded_strategy +from megatron.core.dist_checkpointing.strategies.async_utils import AsyncCallsQueue +from tests.unit_tests.dist_checkpointing import TempNamedDir +from tests.unit_tests.test_utilities import Utils + + +class TestCachedMetadata: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_cached_metadata(self, tmp_path_dist_ckpt): + Utils.initialize_model_parallel(2, 4) + + sharded_state_dict_non_cached = { + 'sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', torch.ones(2, 4), replica_id=Utils.rank + ), + 'sd_keyB': ShardedTensor.from_rank_offsets( + 'keyB', torch.ones(3, 5, 7), replica_id=Utils.world_size - Utils.rank - 1 + ), + } + + sharded_state_dict_cached = { + 'sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', torch.ones(2, 4), replica_id=Utils.rank + ), + 'sd_keyB': ShardedTensor.from_rank_offsets( + 'keyB', torch.ones(3, 5, 7), replica_id=Utils.world_size - Utils.rank - 1 + ), + } + + loaded_non_cached, loaded_cached = None, None + md_non_cached, md_cached = None, None + with TempNamedDir(tmp_path_dist_ckpt / 'ckpt_dir') as ckpt_dir: + save(sharded_state_dict_non_cached, ckpt_dir, async_sharded_save=False) + loaded_non_cached = load(sharded_state_dict_non_cached, ckpt_dir) + md_path = ckpt_dir / '.metadata' + with md_path.open('rb') as f: + md_non_cached = pickle.load(f) + + save_strategy = deepcopy(get_default_save_sharded_strategy()) + save_strategy.use_cached_ckpt_structure = True + # Run over 3 iterations with cached metadata enabled + # The 3rd iteration will run with cached metadata + # `ckpt_dir` at the 3rd iteration 2 will be maintained for comparison + ckpt_dir = None + for i in range(3): + ckpt_dir = TempNamedDir(tmp_path_dist_ckpt / f'ckpt_dir_${i}_cached') + save( + sharded_state_dict_cached, + ckpt_dir.__enter__(), + save_strategy, + async_sharded_save=False, + ) + if i < 2: + ckpt_dir.cleanup() + loaded_cached = load(sharded_state_dict_cached, ckpt_dir.__enter__()) + md_path = ckpt_dir.__enter__() / '.metadata' + + with md_path.open('rb') as f: + md_cached = pickle.load(f) + + # Check loaded state dict + diffs = diff(loaded_non_cached, loaded_cached) + + assert not any( + len(x) for x in diffs + ), 'Cached metadata doesn\'t produce the same state_dict in loading' + # Check metadata recorded in .metadata, torch.distributed.metadata.Metadata + for field in fields(md_non_cached): + if field.name not in ['storage_data', 'storage_meta']: + diffs = diff(getattr(md_non_cached, field.name), getattr(md_cached, field.name)) + assert not any( + len(x) for x in diffs + ), f'{field.name} is different in metadata from non-cached, cached metadata impls' + ckpt_dir.cleanup() + Utils.destroy_model_parallel() + + +class TestCPUTensors: + def setup_method(self, method): + Utils.initialize_model_parallel() + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_cpu_tensors_dont_take_too_much_space(self, tmp_path_dist_ckpt): + large_cuda_tensor = torch.ones(1_000_000, dtype=torch.float, device='cuda') + large_cpu_tensor = torch.ones(1_000_000, dtype=torch.float) + # Create small tensors which are a view of a large tensor + sharded_state_dict = { + 'sd_keyA': ShardedTensor.from_rank_offsets( + 'keyA', large_cuda_tensor[:10], replica_id=Utils.rank + ), + 'sd_keyB': ShardedTensor.from_rank_offsets( + 'keyB', large_cpu_tensor[:10], replica_id=Utils.rank + ), + } + + with TempNamedDir( + tmp_path_dist_ckpt / 'test_cpu_tensors_dont_take_too_much_space' + ) as ckpt_dir: + save(sharded_state_dict, ckpt_dir) + + distcp_files = [(ckpt_dir / '__0_0.distcp'), (ckpt_dir / '__0_1.distcp')] + for file in distcp_files: + assert file.exists() + file_size = file.stat().st_size + assert file_size < 10_000, file.name diff --git a/tests/unit_tests/dist_checkpointing/utils.py b/tests/unit_tests/dist_checkpointing/utils.py index 50677f0..570983d 100644 --- a/tests/unit_tests/dist_checkpointing/utils.py +++ b/tests/unit_tests/dist_checkpointing/utils.py @@ -1,241 +1,275 @@ -from functools import partial -from types import SimpleNamespace -from unittest import mock - -import torch - -from megatron.core.models.gpt import GPTModel -from megatron.core.models.gpt.gpt_layer_specs import ( - get_gpt_layer_local_spec, - get_gpt_layer_with_transformer_engine_spec, -) -from megatron.core.optimizer import OptimizerConfig, get_megatron_optimizer -from megatron.core.tensor_parallel import model_parallel_cuda_manual_seed -from megatron.core.transformer import TransformerConfig -from megatron.training.training import get_model -from megatron.training.utils import unwrap_model - -NUM_LAYERS = 8 -HIDDEN_SIZE = 16 -NUM_ATTENTION_HEADS = 8 - - -def initialize_gpt_model( - pre_process=True, post_process=True, seed=0, use_glu=True, **config_kwargs -): - torch.manual_seed(seed) - model_parallel_cuda_manual_seed(seed) - - default_config_kwargs = dict( - num_layers=NUM_LAYERS, - hidden_size=HIDDEN_SIZE, - num_attention_heads=NUM_ATTENTION_HEADS, - use_cpu_initialization=True, - ) - default_config_kwargs.update(**config_kwargs) - transformer_config = TransformerConfig(**default_config_kwargs, gated_linear_unit=use_glu) - model = GPTModel( - config=transformer_config, - transformer_layer_spec=get_gpt_layer_local_spec(), - vocab_size=128, - max_sequence_length=4, - pre_process=pre_process, - post_process=post_process, - ) - - model.bfloat16() - with torch.no_grad(): - for p in model.parameters(): - p.random_() - return model - - -def initialize_moe_model( - pre_process=True, - post_process=True, - seed=0, - use_glu=True, - use_sp=False, - use_te=False, - use_grouped_mlp=False, - **config_kwargs -): - torch.manual_seed(seed) - model_parallel_cuda_manual_seed(seed) - expert_num = 8 - - default_config_kwargs = dict( - num_layers=8, - hidden_size=16, - num_attention_heads=8, - use_cpu_initialization=True, - num_moe_experts=expert_num, - sequence_parallel=use_sp, - moe_grouped_gemm=use_grouped_mlp, - add_bias_linear=False, - ) - default_config_kwargs.update(**config_kwargs) - transformer_config = TransformerConfig(**default_config_kwargs, gated_linear_unit=use_glu) - if use_te: - spec = get_gpt_layer_with_transformer_engine_spec( - num_experts=expert_num, moe_grouped_gemm=use_grouped_mlp - ) - else: - spec = get_gpt_layer_local_spec(num_experts=expert_num, moe_grouped_gemm=use_grouped_mlp) - model = GPTModel( - config=transformer_config, - transformer_layer_spec=spec, - vocab_size=128, - max_sequence_length=4, - pre_process=pre_process, - post_process=post_process, - ) - - model.bfloat16() - with torch.no_grad(): - for p in model.parameters(): - p.random_() - return model - - -def init_basic_mock_args(args, tp, pp, bf16=True): - args.data_parallel_random_init = False - args.virtual_pipeline_model_parallel_size = None - args.fp16 = False - args.bf16 = bf16 - args.accumulate_allreduce_grads_in_fp32 = False - args.overlap_grad_reduce = False - args.overlap_param_gather_with_optimizer_step = False - args.fp8_param_gather = False - args.use_distributed_optimizer = True - args.ddp_bucket_size = None - args.check_for_nan_in_loss_and_grad = False - args.ddp_average_in_collective = False - args.tensor_model_parallel_size = tp - args.pipeline_model_parallel_size = pp - args.encoder_tensor_model_parallel_size = 0 - args.encoder_pipeline_model_parallel_size = 0 - args.enable_ft_package = False - args.use_torch_fsdp2 = False - return args - - -def init_checkpointing_mock_args(args, ckpt_dir, fully_parallel=False): - args.non_persistent_global_ckpt_dir = None - args.non_persistent_ckpt_type = None - args.save = ckpt_dir - args.load = ckpt_dir - args.pretrained_checkpoint = None - args.ckpt_fully_parallel_save = fully_parallel - args.ckpt_fully_parallel_load = fully_parallel - args.async_save = False - args.use_dist_ckpt = True - args.ckpt_format = 'torch_dist' - args.no_save_optim = False - args.no_save_rng = False - args.ckpt_assume_constant_structure = False - args.log_progress = False - args.auto_detect_ckpt_format = False - args.exit_on_missing_checkpoint = False - args.finetune = False - args.consumed_train_samples = 0 - args.skipped_train_samples = 0 - args.consumed_valid_samples = 0 - args.retro_add_retriever = False - args.no_load_optim = False - args.no_load_rng = False - args.dist_ckpt_strictness = 'assume_ok_unexpected' - args.add_position_embedding = True - args.vocab_file = False - args.num_layers = NUM_LAYERS - args.hidden_size = HIDDEN_SIZE - args.num_attention_heads = NUM_ATTENTION_HEADS - - -def setup_model_and_optimizer( - seed, tp, pp, initialize_fn=initialize_gpt_model, bf16=True, dist_opt=True -): - mock_args = SimpleNamespace() - with mock.patch('megatron.training.training.get_args', new=lambda: mock_args): - init_basic_mock_args(mock_args, tp, pp, bf16=bf16) - model = get_model( - partial( - initialize_fn, - seed=seed, - tensor_model_parallel_size=tp, - pipeline_model_parallel_size=pp, - pipeline_dtype=torch.bfloat16, - ) - ) - - config = OptimizerConfig( - bf16=bf16, - params_dtype=torch.bfloat16 if bf16 else torch.float, - use_distributed_optimizer=dist_opt, - ) - optimizer = get_megatron_optimizer(config, model) - - torch.manual_seed(seed + 1) - model_parallel_cuda_manual_seed(seed + 1) - - for group in optimizer.optimizer.param_groups: - for p in group['params']: - if len(optimizer.optimizer.state[p]) == 0: - optimizer.optimizer.state[p]['exp_avg'] = torch.rand_like(p.data) - optimizer.optimizer.state[p]['exp_avg_sq'] = torch.rand_like(p.data) - - optimizer.reload_model_params() - - return unwrap_model(model), optimizer - - -def setup_moe_model_and_optimizer( - seed, - tp, - pp, - ep, - initialize_fn=initialize_moe_model, - bf16=True, - dist_opt=True, - use_te=False, - use_grouped_mlp=False, - use_glu=False, -): - mock_args = SimpleNamespace() - with mock.patch('megatron.training.training.get_args', new=lambda: mock_args): - init_basic_mock_args(mock_args, tp, pp, bf16=bf16) - model = get_model( - partial( - initialize_fn, - seed=seed, - tensor_model_parallel_size=tp, - pipeline_model_parallel_size=pp, - pipeline_dtype=torch.bfloat16, - expert_model_parallel_size=ep, - use_sp=(tp > 1 and ep > 1), - use_te=use_te, - use_grouped_mlp=use_grouped_mlp, - use_glu=use_glu, - ) - ) - - config = OptimizerConfig( - bf16=bf16, - params_dtype=torch.bfloat16 if bf16 else torch.float, - use_distributed_optimizer=dist_opt, - ) - optimizer = get_megatron_optimizer(config, model) - - torch.manual_seed(seed + 1) - model_parallel_cuda_manual_seed(seed + 1) - - for opt in optimizer.chained_optimizers: - for group in opt.param_groups: - for p in group['params']: - if len(opt.state[p]) == 0: - opt.state[p]['exp_avg'] = torch.rand_like(p.data) - opt.state[p]['exp_avg_sq'] = torch.rand_like(p.data) - - optimizer.reload_model_params() - - return unwrap_model(model), optimizer +from functools import partial +from typing import Any, Callable, Tuple, Union +from unittest import mock + +import torch + +from megatron.core.models.gpt import GPTModel +from megatron.core.models.gpt.gpt_layer_specs import ( + get_gpt_layer_local_spec, + get_gpt_layer_with_transformer_engine_spec, +) +from megatron.core.optimizer import OptimizerConfig, get_megatron_optimizer +from megatron.core.tensor_parallel import model_parallel_cuda_manual_seed +from megatron.core.transformer import TransformerConfig +from megatron.training.arguments import parse_args +from megatron.training.training import get_model +from megatron.training.utils import unwrap_model + +NUM_LAYERS = 8 +HIDDEN_SIZE = 16 +NUM_ATTENTION_HEADS = 8 + + +def initialize_gpt_model( + pre_process=True, post_process=True, seed=0, use_glu=True, **config_kwargs +): + torch.manual_seed(seed) + model_parallel_cuda_manual_seed(seed) + + default_config_kwargs = dict( + num_layers=NUM_LAYERS, + hidden_size=HIDDEN_SIZE, + num_attention_heads=NUM_ATTENTION_HEADS, + use_cpu_initialization=True, + ) + default_config_kwargs.update(**config_kwargs) + transformer_config = TransformerConfig(**default_config_kwargs, gated_linear_unit=use_glu) + model = GPTModel( + config=transformer_config, + transformer_layer_spec=get_gpt_layer_local_spec(), + vocab_size=128, + max_sequence_length=4, + pre_process=pre_process, + post_process=post_process, + ) + + model.bfloat16() + with torch.no_grad(): + for p in model.parameters(): + p.random_() + return model + + +def initialize_moe_model( + pre_process=True, + post_process=True, + seed=0, + use_glu=True, + use_sp=False, + use_te=False, + use_grouped_mlp=False, + **config_kwargs, +): + torch.manual_seed(seed) + model_parallel_cuda_manual_seed(seed) + expert_num = 8 + + default_config_kwargs = dict( + num_layers=8, + hidden_size=16, + num_attention_heads=8, + use_cpu_initialization=True, + num_moe_experts=expert_num, + sequence_parallel=use_sp, + moe_grouped_gemm=use_grouped_mlp, + add_bias_linear=False, + ) + default_config_kwargs.update(**config_kwargs) + transformer_config = TransformerConfig(**default_config_kwargs, gated_linear_unit=use_glu) + if use_te: + spec = get_gpt_layer_with_transformer_engine_spec( + num_experts=expert_num, moe_grouped_gemm=use_grouped_mlp + ) + else: + spec = get_gpt_layer_local_spec(num_experts=expert_num, moe_grouped_gemm=use_grouped_mlp) + model = GPTModel( + config=transformer_config, + transformer_layer_spec=spec, + vocab_size=128, + max_sequence_length=4, + pre_process=pre_process, + post_process=post_process, + ) + + model.bfloat16() + with torch.no_grad(): + for p in model.parameters(): + p.random_() + return model + + +def init_basic_mock_args(args, tp, pp, bf16=True): + args.data_parallel_random_init = False + args.virtual_pipeline_model_parallel_size = None + args.fp16 = False + args.bf16 = bf16 + args.accumulate_allreduce_grads_in_fp32 = False + args.overlap_grad_reduce = False + args.overlap_param_gather_with_optimizer_step = False + args.fp8_param_gather = False + args.use_distributed_optimizer = True + args.ddp_bucket_size = None + args.check_for_nan_in_loss_and_grad = False + args.ddp_average_in_collective = False + args.tensor_model_parallel_size = tp + args.pipeline_model_parallel_size = pp + args.encoder_tensor_model_parallel_size = 0 + args.encoder_pipeline_model_parallel_size = 0 + args.enable_ft_package = False + args.use_torch_fsdp2 = False + args.init_model_with_meta_device = False + return args + + +def init_checkpointing_mock_args(args, ckpt_dir, fully_parallel=False): + args.non_persistent_global_ckpt_dir = None + args.non_persistent_ckpt_type = None + args.save = ckpt_dir + args.load = ckpt_dir + args.pretrained_checkpoint = None + args.ckpt_fully_parallel_save = fully_parallel + args.ckpt_fully_parallel_load = fully_parallel + args.async_save = False + args.use_dist_ckpt = True + args.ckpt_format = 'torch_dist' + args.no_save_optim = False + args.no_save_rng = False + args.ckpt_assume_constant_structure = False + args.log_progress = False + args.auto_detect_ckpt_format = False + args.exit_on_missing_checkpoint = False + args.finetune = False + args.consumed_train_samples = 0 + args.skipped_train_samples = 0 + args.consumed_valid_samples = 0 + args.retro_add_retriever = False + args.no_load_optim = False + args.no_load_rng = False + args.dist_ckpt_strictness = 'assume_ok_unexpected' + args.add_position_embedding = True + args.vocab_file = False + args.num_layers = NUM_LAYERS + args.hidden_size = HIDDEN_SIZE + args.num_attention_heads = NUM_ATTENTION_HEADS + + +def setup_model_and_optimizer( + seed, + tp, + pp, + initialize_fn=initialize_gpt_model, + bf16=True, + dist_opt=True, + use_custom_fsdp=False, + data_parallel_sharding_strategy="optim_grads_params", +): + mock_args = parse_args(ignore_unknown_args=True) + with mock.patch('megatron.training.training.get_args', new=lambda: mock_args): + init_basic_mock_args(mock_args, tp, pp, bf16=bf16) + model = get_model( + partial( + initialize_fn, + seed=seed, + tensor_model_parallel_size=tp, + pipeline_model_parallel_size=pp, + pipeline_dtype=torch.bfloat16, + ) + ) + + config = OptimizerConfig( + bf16=bf16, + params_dtype=torch.bfloat16 if bf16 else torch.float, + use_distributed_optimizer=dist_opt, + ) + optimizer = get_megatron_optimizer(config, model) + + torch.manual_seed(seed + 1) + model_parallel_cuda_manual_seed(seed + 1) + + for group in optimizer.optimizer.param_groups: + for p in group['params']: + if len(optimizer.optimizer.state[p]) == 0: + optimizer.optimizer.state[p]['exp_avg'] = torch.rand_like(p.data) + optimizer.optimizer.state[p]['exp_avg_sq'] = torch.rand_like(p.data) + + optimizer.reload_model_params() + + return unwrap_model(model), optimizer + + +def find_matching_values( + x: Union[dict, list], predicate: Callable[[Any], bool] +) -> Tuple[Union[dict, list], Union[dict, list]]: + """Return matching values in a single list + + Args: + x (Union[dict, list]) : state dict to process. Top-level argument must be a dict or list + predicate (object -> bool): determines matching values + """ + + matching_vals = [] + if hasattr(x, 'values') and callable(getattr(x, 'values')): + values = x.values() + elif isinstance(x, list): + values = x + else: + raise ValueError(f'Unexpected top-level object type: {type(x)}') + for v in values: + if isinstance(v, (list, dict)): + matching_vals += find_matching_values(v, predicate) + elif predicate(v): + matching_vals.append(v) + return matching_vals + + +def setup_moe_model_and_optimizer( + seed, + tp, + pp, + ep, + initialize_fn=initialize_moe_model, + bf16=True, + dist_opt=True, + use_te=False, + use_grouped_mlp=False, + use_glu=False, +): + mock_args = parse_args(ignore_unknown_args=True) + with mock.patch('megatron.training.training.get_args', new=lambda: mock_args): + init_basic_mock_args(mock_args, tp, pp, bf16=bf16) + model = get_model( + partial( + initialize_fn, + seed=seed, + tensor_model_parallel_size=tp, + pipeline_model_parallel_size=pp, + pipeline_dtype=torch.bfloat16, + expert_model_parallel_size=ep, + use_sp=(tp > 1 and ep > 1), + use_te=use_te, + use_grouped_mlp=use_grouped_mlp, + use_glu=use_glu, + ) + ) + + config = OptimizerConfig( + bf16=bf16, + params_dtype=torch.bfloat16 if bf16 else torch.float, + use_distributed_optimizer=dist_opt, + ) + optimizer = get_megatron_optimizer(config, model) + + torch.manual_seed(seed + 1) + model_parallel_cuda_manual_seed(seed + 1) + + for opt in optimizer.chained_optimizers: + for group in opt.param_groups: + for p in group['params']: + if len(opt.state[p]) == 0: + opt.state[p]['exp_avg'] = torch.rand_like(p.data) + opt.state[p]['exp_avg_sq'] = torch.rand_like(p.data) + + optimizer.reload_model_params() + + return unwrap_model(model), optimizer diff --git a/tests/unit_tests/distributed/test_finalize_model_grads.py b/tests/unit_tests/distributed/test_finalize_model_grads.py new file mode 100644 index 0000000..ce68f15 --- /dev/null +++ b/tests/unit_tests/distributed/test_finalize_model_grads.py @@ -0,0 +1,62 @@ +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import inspect +import os + +import pytest +import torch + +from megatron.core import parallel_state +from megatron.core.distributed.finalize_model_grads import _allreduce_layernorm_grads +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec +from megatron.core.models.gpt.gpt_model import GPTModel +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.transformer_config import TransformerConfig +from tests.unit_tests.test_utilities import Utils + + +class TestAllReduceLNGrads: + + def init_model(self): + self.transformer_config = TransformerConfig( + num_layers=2, + hidden_size=12, + num_attention_heads=4, + use_cpu_initialization=True, + tensor_model_parallel_size=self.tp_size, + qk_layernorm=True, + ) + + self.model = GPTModel( + config=self.transformer_config, + transformer_layer_spec=get_gpt_layer_with_transformer_engine_spec(qk_layernorm=True), + vocab_size=100, + max_sequence_length=4, + ) + + def setup_method(self, method): + os.environ.pop('NVTE_FUSED_ATTN', None) + os.environ.pop('NVTE_FLASH_ATTN', None) + os.environ.pop('NVTE_UNFUSED_ATTN', None) + Utils.destroy_model_parallel() + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.parametrize("freeze_model,tp_size", [(True, 2), (False, 2)]) + def test_allreduce_layernorm_grads(self, freeze_model, tp_size): + + self.tp_size = tp_size + Utils.initialize_model_parallel(tensor_model_parallel_size=self.tp_size) + model_parallel_cuda_manual_seed(123) + + self.init_model() + self.model.cuda() + + for param in self.model.parameters(): + if freeze_model: + param.requires_grad = False + else: + param.grad = torch.ones_like(param) + + _allreduce_layernorm_grads([self.model], self.transformer_config) diff --git a/tests/unit_tests/distributed/test_grad_sync_with_expert_parallel.py b/tests/unit_tests/distributed/test_grad_sync_with_expert_parallel.py new file mode 100644 index 0000000..81740b8 --- /dev/null +++ b/tests/unit_tests/distributed/test_grad_sync_with_expert_parallel.py @@ -0,0 +1,209 @@ +import contextlib +import math +from typing import Optional + +import pytest +import torch + +from megatron.core import parallel_state +from megatron.core.distributed import DistributedDataParallel, DistributedDataParallelConfig +from megatron.core.distributed.param_and_grad_buffer import partition_buckets +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec +from megatron.core.transformer import TransformerConfig +from megatron.core.transformer.moe.moe_layer import MoELayer +from tests.unit_tests.test_utilities import TestModel, Utils + + +class TestMoEModel(torch.nn.Module): + def __init__( + self, + hidden_size: int, + num_layers: int, + num_moe_experts: int, + moe_grouped_gemm: bool, + ep_size: int, + ): + transformer_config = TransformerConfig( + num_layers=num_layers, + hidden_size=hidden_size, + num_attention_heads=1, + num_moe_experts=num_moe_experts, + moe_router_load_balancing_type="aux_loss", + moe_router_topk=2, + moe_aux_loss_coeff=0.01, + moe_grouped_gemm=moe_grouped_gemm, + moe_token_dispatcher_type='alltoall', + expert_model_parallel_size=ep_size, + bf16=True, + params_dtype=torch.bfloat16, + ) + transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec( + num_experts=num_moe_experts, moe_grouped_gemm=moe_grouped_gemm + ) + super().__init__() + self.layers = torch.nn.ModuleList( + [ + MoELayer( + transformer_config, transformer_layer_spec.submodules.mlp.submodules + ).cuda() + for _ in range(num_layers) + ] + ) + + +def get_moe_model_and_buffers( + num_layers: int, + hidden_size: int, + num_moe_experts: int, + moe_grouped_gemm: bool, + ep_size: int, + bucket_size: Optional[int], + use_distributed_optimizer: bool, + overlap_grad_reduce: bool, + average_in_collective: bool, +): + ddp_config = DistributedDataParallelConfig( + grad_reduce_in_fp32=True, + use_distributed_optimizer=use_distributed_optimizer, + overlap_grad_reduce=overlap_grad_reduce, + bucket_size=bucket_size, + average_in_collective=average_in_collective, + ) + model = TestMoEModel( + hidden_size=hidden_size, + num_layers=num_layers, + num_moe_experts=num_moe_experts, + moe_grouped_gemm=moe_grouped_gemm, + ep_size=ep_size, + ) + model = DistributedDataParallel( + TransformerConfig(num_attention_heads=1, num_layers=1), ddp_config=ddp_config, module=model + ) + assert len(model.buffers) == 1 + param_and_grad_buffer = model.buffers[0] + ep_param_and_grad_buffer = ( + model.expert_parallel_buffers[0] if len(model.expert_parallel_buffers) else None + ) + + return model, param_and_grad_buffer, ep_param_and_grad_buffer + + +@pytest.mark.parametrize("use_distributed_optimizer", [False, True]) +@pytest.mark.parametrize("overlap_grad_reduce", [False, True]) +@pytest.mark.parametrize("average_in_collective", [False, True]) +@pytest.mark.parametrize("ep_size", [1, 2, 4]) +@pytest.mark.flaky +@pytest.mark.flaky_in_dev +def test_grad_sync( + use_distributed_optimizer: bool, + overlap_grad_reduce: bool, + average_in_collective: bool, + ep_size: int, +): + Utils.fake_initialize_model_parallel(expert_model_parallel_size=ep_size) + Utils.initialize_model_parallel(expert_model_parallel_size=ep_size) + + model, non_ep_param_and_grad_buffer, ep_param_and_grad_buffer = get_moe_model_and_buffers( + num_layers=2, + hidden_size=512, + num_moe_experts=4, + moe_grouped_gemm=True, + ep_size=ep_size, + bucket_size=None, + use_distributed_optimizer=use_distributed_optimizer, + overlap_grad_reduce=overlap_grad_reduce, + average_in_collective=average_in_collective, + ) + + non_ep_bucket_groups = partition_buckets([non_ep_param_and_grad_buffer]) + param_to_bucket_group = {} + for bucket_group in non_ep_bucket_groups: + for param in bucket_group.params: + assert param not in param_to_bucket_group + param_to_bucket_group[param] = bucket_group + if ep_size > 1: + ep_bucket_groups = partition_buckets([ep_param_and_grad_buffer]) + for bucket_group in ep_bucket_groups: + for param in bucket_group.params: + assert param not in param_to_bucket_group + param_to_bucket_group[param] = bucket_group + + non_ep_param_and_grad_buffer.grad_data.data.fill_(1.0) + non_ep_expected_grad_data_value_after_collective = 1 + if ( + use_distributed_optimizer + and (not average_in_collective) + and parallel_state.get_data_parallel_rank() != 0 + ): + # under the following conditions, the data in param_and_grad_buffer.grad_data[0] equals to 1/data_parallel_word_size + # When average_in_collective=False, the grad data is always first scaled by 1/data_parallel_word_size and then summed by AR/RS + # when use_distributed_optimizer=True, only for rank=0 param_and_grad_buffer.grad_data[0] is updated, for other ranks + # another shard of grad_data is updated while param_and_grad_buffer.grad_data[0] is unchanged (=1/data_parallel_word_size) + non_ep_expected_grad_data_value_after_collective /= ( + parallel_state.get_data_parallel_world_size() + ) + if ep_size > 1: + ep_param_and_grad_buffer.grad_data.data.fill_(1.0) + # expert gradient is always scaled by 1/EP + ep_expected_grad_data_value_after_collective = ( + 1.0 / parallel_state.get_expert_model_parallel_world_size() + ) + if ( + use_distributed_optimizer + and (not average_in_collective) + and parallel_state.get_expert_data_parallel_rank() != 0 + ): + # under the following conditions, the data in param_and_grad_buffer.grad_data[0] equals to 1/EP/DP + ep_expected_grad_data_value_after_collective /= torch.distributed.get_world_size( + group=parallel_state.get_expert_data_parallel_group() + ) + + params = list(model.parameters()) + map_bucket_to_last_param_idx = {} + for i, param in enumerate(params): + if not (param in param_to_bucket_group): + # it means this parameter is not on this device, skip + continue + bucket_group = param_to_bucket_group[param] + if bucket_group in map_bucket_to_last_param_idx: + param_idx = map_bucket_to_last_param_idx[bucket_group] + 1 + else: + param_idx = 0 + map_bucket_to_last_param_idx[bucket_group] = param_idx + + register_grad_sync_context = ( + contextlib.nullcontext() if overlap_grad_reduce else pytest.raises(AssertionError) + ) + finish_grad_sync_context = contextlib.nullcontext() + if param_idx < (len(bucket_group.params) - 1) and overlap_grad_reduce: + # Can't finish grad sync until all params have been registered ready. + finish_grad_sync_context = pytest.raises(AssertionError) + + with register_grad_sync_context: + bucket_group.register_grad_ready(param) + with finish_grad_sync_context: + # When overlap_grad_reduce is True, this should throw an assertion error until all + # params in the model have registered their grad above. + # When overlap_grad_reduce is False, the collective is forced through. + bucket_group.finish_grad_sync() + + if bucket_group in non_ep_bucket_groups: + expected_grad_data_value = non_ep_expected_grad_data_value_after_collective + else: + expected_grad_data_value = ep_expected_grad_data_value_after_collective + if overlap_grad_reduce and param_idx < (len(bucket_group.params) - 1): + expected_grad_data_value = 1 + + if bucket_group in non_ep_bucket_groups: + assert non_ep_param_and_grad_buffer.grad_data[0] == expected_grad_data_value + else: + assert ep_param_and_grad_buffer.grad_data[0] == expected_grad_data_value + + if not overlap_grad_reduce: + # Reset grad_data for subsequent collectives. + if bucket_group in non_ep_bucket_groups: + non_ep_param_and_grad_buffer.grad_data.data.fill_(1.0) + else: + ep_param_and_grad_buffer.grad_data.data.fill_(1.0) + + Utils.destroy_model_parallel() diff --git a/tests/unit_tests/distributed/test_param_and_grad_buffer.py b/tests/unit_tests/distributed/test_param_and_grad_buffer.py index 5ff2a68..3917aec 100644 --- a/tests/unit_tests/distributed/test_param_and_grad_buffer.py +++ b/tests/unit_tests/distributed/test_param_and_grad_buffer.py @@ -21,12 +21,14 @@ def get_model_and_buffers( bucket_size: int, use_distributed_optimizer: bool, overlap_grad_reduce: bool, + average_in_collective: bool, ): ddp_config = DistributedDataParallelConfig( grad_reduce_in_fp32=True, use_distributed_optimizer=use_distributed_optimizer, overlap_grad_reduce=overlap_grad_reduce, bucket_size=bucket_size, + average_in_collective=average_in_collective, ) model = TestModel( input_dim=input_dim, @@ -73,6 +75,7 @@ def test_bucket_sizes( bucket_size=bucket_size, use_distributed_optimizer=use_distributed_optimizer, overlap_grad_reduce=True, + average_in_collective=False, ) actual_numel_in_each_bucket = [ @@ -156,8 +159,11 @@ def test_bucket_sizes( @pytest.mark.parametrize("use_distributed_optimizer", [False, True]) @pytest.mark.parametrize("overlap_grad_reduce", [False, True]) +@pytest.mark.parametrize("average_in_collective", [False, True]) @pytest.mark.flaky -def test_grad_sync(use_distributed_optimizer: bool, overlap_grad_reduce: bool): +def test_grad_sync( + use_distributed_optimizer: bool, overlap_grad_reduce: bool, average_in_collective: bool +): Utils.initialize_model_parallel() input_dim = 100 @@ -172,6 +178,7 @@ def test_grad_sync(use_distributed_optimizer: bool, overlap_grad_reduce: bool): bucket_size=None, # Group all params into single bucket. use_distributed_optimizer=use_distributed_optimizer, overlap_grad_reduce=overlap_grad_reduce, + average_in_collective=average_in_collective, ) bucket_groups = partition_buckets([param_and_grad_buffer]) param_to_bucket_group = {} @@ -182,10 +189,16 @@ def test_grad_sync(use_distributed_optimizer: bool, overlap_grad_reduce: bool): param_and_grad_buffer.grad_data.data.fill_(1.0) expected_grad_data_value_after_collective = 1 - if torch.distributed.get_rank() == 0 or not use_distributed_optimizer: - expected_grad_data_value_after_collective = parallel_state.get_data_parallel_world_size() - # Default scaling behavior in DDP involves dividing by the data-parallel size. - expected_grad_data_value_after_collective /= parallel_state.get_data_parallel_world_size() + # under the following conditions, the data in param_and_grad_buffer.grad_data[0] equals to 1/DP + # this is because when average_in_collective=False, the grad data is always first scaled by 1/DP and then summed by AR/RS + # and when use_distributed_optimizer=True, only for rank=0 param_and_grad_buffer.grad_data[0] is updated, for other ranks + # another shard of grad_data is updated while param_and_grad_buffer.grad_data[0] is unchanged (=1/DP) + if ( + use_distributed_optimizer + and (not average_in_collective) + and parallel_state.get_data_parallel_rank() != 0 + ): + expected_grad_data_value_after_collective /= parallel_state.get_data_parallel_world_size() params = list(model.parameters()) for i, param in enumerate(params): diff --git a/tests/unit_tests/inference/engines/test_mcore_engine.py b/tests/unit_tests/inference/engines/test_mcore_engine.py index 1b342db..6a5e1bc 100644 --- a/tests/unit_tests/inference/engines/test_mcore_engine.py +++ b/tests/unit_tests/inference/engines/test_mcore_engine.py @@ -1,121 +1,218 @@ -import random -import string -from typing import List -from unittest import mock - -import torch - -from megatron.core.inference.engines.mcore_engine import MCoreEngine -from megatron.core.inference.inference_request import InferenceRequest, Status -from megatron.core.inference.model_inference_wrappers.gpt.gpt_inference_wrapper import ( - GPTInferenceWrapper, -) -from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( - InferenceWrapperConfig, -) -from megatron.core.inference.sampling_params import SamplingParams -from megatron.core.inference.text_generation_controllers.text_generation_controller import ( - TextGenerationController, -) -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_local_spec -from megatron.core.models.gpt.gpt_model import GPTModel -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.transformer_config import TransformerConfig -from tests.unit_tests.test_utilities import Utils - - -class TestMCoreEngine: - def setup_method(self, method): - Utils.initialize_model_parallel( - tensor_model_parallel_size=1, pipeline_model_parallel_size=1 - ) - - model_parallel_cuda_manual_seed(123) - self.batch_size = 4 - self.hidden_size = 12 - self.vocab_size = 100 - self.sequence_length = 64 - transformer_config = TransformerConfig( - num_layers=4, - hidden_size=self.hidden_size, - num_attention_heads=4, - use_cpu_initialization=True, - ) - - gpt_model = GPTModel( - config=transformer_config, - transformer_layer_spec=get_gpt_layer_local_spec(), - vocab_size=self.vocab_size, - max_sequence_length=self.sequence_length, - parallel_output=True, - ).cuda() - - inference_wrapper_config = InferenceWrapperConfig( - hidden_size=self.hidden_size, - inference_batch_times_seqlen_threshold=400, - fp32_residual_connection=False, - params_dtype=torch.float, - padded_vocab_size=self.vocab_size, - ) - - inference_wrapped_model = GPTInferenceWrapper(gpt_model, inference_wrapper_config) - self.mock_tokenizer = mock.Mock() - text_generation_controller = TextGenerationController( - inference_wrapped_model=inference_wrapped_model, tokenizer=self.mock_tokenizer - ) - - self.mcore_engine = MCoreEngine( - text_generation_controller=text_generation_controller, max_batch_size=4 - ) - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_generate(self): - self.mock_tokenizer.vocab_size = self.vocab_size - self.mock_tokenizer.eod = self.vocab_size - 1 - # Generating random length integer prompts - self.mock_tokenizer.tokenize.return_value = [ - random.randint(0, self.vocab_size - 1) for _ in range(random.randint(5, 10)) - ] - # Generates some random string - self.mock_tokenizer.detokenize.return_value = ''.join( - random.choices(string.ascii_letters, k=random.randint(4, 10)) - ) - - prompts = ["sample" * (i + 1) for i in range(self.batch_size)] - results: List[InferenceRequest] = self.mcore_engine.generate( - prompts, sampling_params=SamplingParams(num_tokens_to_generate=10) - ) - - for result in results: - assert ( - result.status == Status.COMPLETED - ), f"Status should be completed but its {result.status}" - assert result.generated_length > 0, f"Generated length should be greater than zero" - assert result.generated_text is not None, f'Generated text should not be None' - - def test_generate_empty_prompt(self): - self.mock_tokenizer.vocab_size = self.vocab_size - self.mock_tokenizer.eod = self.vocab_size - 1 - self.mock_tokenizer.bos = self.vocab_size - 2 - # Generating random length integer prompts - self.mock_tokenizer.tokenize.return_value = [ - random.randint(0, self.vocab_size - 1) for _ in range(random.randint(5, 10)) - ] - # Generates some random string - self.mock_tokenizer.detokenize.return_value = ''.join( - random.choices(string.ascii_letters, k=random.randint(4, 10)) - ) - - prompts = ["" for i in range(self.batch_size)] - results: List[InferenceRequest] = self.mcore_engine.generate( - prompts, add_BOS=True, sampling_params=SamplingParams(num_tokens_to_generate=10) - ) - - for result in results: - assert ( - result.status == Status.COMPLETED - ), f"Status should be completed but its {result.status}" - assert result.generated_length > 0, f"Generated length should be greater than zero" - assert result.generated_text is not None, f'Generated text should not be None' +import asyncio +import random +import string +from typing import AsyncGenerator, List, Union +from unittest import mock + +import pytest +import torch + +from megatron.core.inference.engines.mcore_engine import MCoreEngine +from megatron.core.inference.inference_request import InferenceRequest, Status +from megatron.core.inference.model_inference_wrappers.gpt.gpt_inference_wrapper import ( + GPTInferenceWrapper, +) +from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( + InferenceWrapperConfig, +) +from megatron.core.inference.sampling_params import SamplingParams +from megatron.core.inference.text_generation_controllers.text_generation_controller import ( + TextGenerationController, +) +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_local_spec +from megatron.core.models.gpt.gpt_model import GPTModel +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.transformer_config import TransformerConfig +from tests.unit_tests.test_utilities import Utils + + +class TestMCoreEngine: + def setup_engine(self, engine_max_batch_size=None): + Utils.initialize_model_parallel( + tensor_model_parallel_size=1, pipeline_model_parallel_size=1 + ) + + model_parallel_cuda_manual_seed(123) + self.batch_size = 4 + self.hidden_size = 12 + self.vocab_size = 100 + self.sequence_length = 64 + transformer_config = TransformerConfig( + num_layers=4, + hidden_size=self.hidden_size, + num_attention_heads=4, + use_cpu_initialization=True, + ) + + gpt_model = GPTModel( + config=transformer_config, + transformer_layer_spec=get_gpt_layer_local_spec(), + vocab_size=self.vocab_size, + max_sequence_length=self.sequence_length, + parallel_output=True, + ).cuda() + + inference_wrapper_config = InferenceWrapperConfig( + hidden_size=self.hidden_size, + inference_batch_times_seqlen_threshold=400, + inference_max_requests=self.batch_size, + fp32_residual_connection=False, + params_dtype=torch.float, + padded_vocab_size=self.vocab_size, + ) + + inference_wrapped_model = GPTInferenceWrapper(gpt_model, inference_wrapper_config) + self.mock_tokenizer = mock.Mock() + text_generation_controller = TextGenerationController( + inference_wrapped_model=inference_wrapped_model, tokenizer=self.mock_tokenizer + ) + + if engine_max_batch_size is not None and engine_max_batch_size > self.batch_size: + with pytest.warns(UserWarning): + self.mcore_engine = MCoreEngine( + text_generation_controller=text_generation_controller, + max_batch_size=engine_max_batch_size, + ) + else: + self.mcore_engine = MCoreEngine( + text_generation_controller=text_generation_controller, + max_batch_size=engine_max_batch_size, + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.flaky + @pytest.mark.flaky_in_dev + @pytest.mark.parametrize( + "batch_size,num_trials,empty_prompt", + [(4, 1, False), (4, 1, True), (4, 3, False), (2, 1, False), (8, 1, False)], + ) + def test_generate(self, batch_size: int, num_trials: int, empty_prompt: bool): + self.setup_engine(engine_max_batch_size=batch_size) + self.mock_tokenizer.vocab_size = self.vocab_size + self.mock_tokenizer.eod = self.vocab_size - 1 + # Generating random length integer prompts + self.mock_tokenizer.tokenize.return_value = [ + random.randint(0, self.vocab_size - 1) for _ in range(random.randint(5, 10)) + ] + # Generates some random string + self.mock_tokenizer.detokenize.return_value = ''.join( + random.choices(string.ascii_letters, k=random.randint(4, 10)) + ) + + for _ in range(num_trials): + if empty_prompt: + prompts = ["" for i in range(batch_size)] + else: + prompts = ["sample" * (i + 1) for i in range(batch_size)] + results: List[InferenceRequest] = self.mcore_engine.generate( + prompts, sampling_params=SamplingParams(num_tokens_to_generate=10) + ) + + assert len(results) == batch_size + for result in results: + assert ( + result.status == Status.COMPLETED + ), f"Status should be completed but its {result.status}" + assert result.generated_length > 0, f"Generated length should be greater than zero" + assert result.generated_text is not None, f'Generated text should not be None' + + @pytest.mark.asyncio + async def test_streaming(self): + self.setup_engine() + + async def collect_stream(stream_generator, num_tokens_to_generate): + prev_log_probs = None + prev_text = "" + prev_idx = 0 + prev_length = 0 + num_output_tokens = 0 + async for output in stream_generator: + num_output_tokens += 1 + assert isinstance( + output, InferenceRequest + ), f"Expected InferenceRequest, got {type(output)}" + assert output.generated_log_probs is not None, f"Expected log probs tensor" + assert ( + output.generated_tokens.shape[0] == output.generated_length + ), f"Expected log probs length to match # generated tokens" + assert ( + len(output.generated_log_probs) == output.generated_length + ), f"Expected log probs length to match # generated tokens" + assert output.generated_length > prev_length, f"Expected generated length to grow" + assert ( + output.generated_text[:prev_idx] == prev_text + ), f"Expected generated text to match previous text" + assert ( + prev_log_probs is None or prev_log_probs == output.generated_log_probs[:-1] + ), f"Expected previous log probs to match new log probs" + prev_length = output.generated_length + prev_text = output.generated_text + prev_idx = len(output.generated_text) + prev_log_probs = output.generated_log_probs + + assert ( + num_output_tokens == num_tokens_to_generate + ), f"Should have streamed {num_tokens_to_generate} tokens but actually streamed {num_output_tokens}" + assert ( + len(output.generated_tokens) == num_tokens_to_generate + ), f"Should have included {num_tokens_to_generate} tokens but actually returned {len(output.generated_tokens)}" + assert ( + len(output.generated_log_probs) == num_tokens_to_generate + ), f"Should have included {num_tokens_to_generate} log probs but actually returned {len(output.generated_log_probs)}" + + return output + + self.mock_tokenizer.vocab_size = self.vocab_size + self.mock_tokenizer.eod = self.vocab_size - 1 + self.mock_tokenizer.bos = self.vocab_size - 2 + # Generating random length integer prompts + self.mock_tokenizer.tokenize.return_value = [ + random.randint(0, self.vocab_size - 1) for _ in range(random.randint(5, 10)) + ] + # Generates some random string + self.mock_tokenizer.detokenize.return_value = ''.join( + random.choices(string.ascii_letters, k=random.randint(4, 10)) + ) + + prompts = ["" for i in range(self.batch_size)] + + num_tokens_to_generate = 10 + sampling_params = SamplingParams( + num_tokens_to_generate=num_tokens_to_generate, return_log_probs=True + ) + request_ids: List[str] = [ + self.mcore_engine.add_request( + prompt, add_BOS=True, inference_parameters=sampling_params, streaming=True + ) + for prompt in prompts + ] + stream_generators: List[AsyncGenerator[InferenceRequest, None]] = [ + self.mcore_engine.get_stream_generator(request_id) for request_id in request_ids + ] + assert all(stream_generator is not None for stream_generator in stream_generators) + + tasks = [ + asyncio.create_task(collect_stream(stream_generator, num_tokens_to_generate)) + for stream_generator in stream_generators + ] + + await self.mcore_engine.run_engine_async() + final_streamed_tokens: List[InferenceRequest] = await asyncio.gather(*tasks) + results: List[InferenceRequest] = [ + self.mcore_engine.scheduler.completed_request_pool[request_id] + for request_id in request_ids + ] + assert len(final_streamed_tokens) == len(results) + for result, final_streamed_token in zip(results, final_streamed_tokens): + assert torch.equal( + result.generated_tokens.cpu(), final_streamed_token.generated_tokens.cpu() + ), ( + f"result.generated_tokens={result.generated_tokens.cpu()}," + f"final_streamed_token.generated_tokens={final_streamed_token.generated_tokens}" + ) + assert result.generated_log_probs == final_streamed_token.generated_log_probs, ( + f"result.generated_log_probs={result.generated_log_probs}, " + f"final_streamed_token.generated_log_probs={final_streamed_token.generated_log_probs}" + ) diff --git a/tests/unit_tests/inference/model_inference_wrappers/gpt/test_gpt_inference_wrapper.py b/tests/unit_tests/inference/model_inference_wrappers/gpt/test_gpt_inference_wrapper.py index e01c3f4..e7dc490 100644 --- a/tests/unit_tests/inference/model_inference_wrappers/gpt/test_gpt_inference_wrapper.py +++ b/tests/unit_tests/inference/model_inference_wrappers/gpt/test_gpt_inference_wrapper.py @@ -1,124 +1,146 @@ -from argparse import Namespace - -import torch - -from megatron.core import parallel_state -from megatron.core.inference.model_inference_wrappers.gpt.gpt_inference_wrapper import ( - GPTInferenceWrapper, -) -from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( - InferenceWrapperConfig, -) -from megatron.core.models.gpt.gpt_layer_specs import ( - get_gpt_layer_local_spec, - get_gpt_layer_with_transformer_engine_spec, -) -from megatron.core.models.gpt.gpt_model import GPTModel -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.transformer_config import TransformerConfig -from tests.unit_tests.test_utilities import Utils - - -class TestGPTInferenceWrapper: - - def setup_model(self, tensor_parallel_size, pipeline_parallel_size): - Utils.initialize_model_parallel( - tensor_model_parallel_size=tensor_parallel_size, - pipeline_model_parallel_size=pipeline_parallel_size, - ) - model_parallel_cuda_manual_seed(123) - self.vocab_size = 100 - self.batch_size = 4 - self.sequence_length = 32 - hidden_size = 12 - - transformer_config = TransformerConfig( - num_layers=4, - hidden_size=hidden_size, - num_attention_heads=4, - use_cpu_initialization=True, - ) - - gpt_model = GPTModel( - config=transformer_config, - transformer_layer_spec=get_gpt_layer_local_spec(), - vocab_size=self.vocab_size, - max_sequence_length=self.sequence_length, - parallel_output=True, - ).cuda() - - inference_wrapper_config = InferenceWrapperConfig( - hidden_size=hidden_size, - inference_batch_times_seqlen_threshold=20, - fp32_residual_connection=False, - params_dtype=torch.float, - padded_vocab_size=self.vocab_size, - ) - - self.inference_wrapped_model = GPTInferenceWrapper(gpt_model, inference_wrapper_config) - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - # This will call the inference_wrapped_model.forward_pass_with_pipeline_parallel_small_input_batch() - def test_inference_pipeline_parallel_small_size(self): - self.setup_model(tensor_parallel_size=2, pipeline_parallel_size=2) - - batch_prompt_tokens = ( - torch.randint(low=0, high=self.vocab_size, size=(self.batch_size, self.sequence_length)) - .int() - .cuda() - ) - self.inference_wrapped_model.prep_model_for_inference(prompts_tokens=batch_prompt_tokens) - - inference_input = self.inference_wrapped_model.get_batch_for_context_window(0, 5) - - logits = self.inference_wrapped_model.run_one_forward_step(inference_input) - # Logits are not returned in all ranks in PP - if parallel_state.is_pipeline_last_stage(): - assert logits.shape == ( - self.batch_size, - 5, - self.vocab_size, - ), f"Shape mismatch . Expected {(self.batch_size, 5, self.vocab_size)}, but got {logits.shape}" - - # This will call the inference_wrapped_model.forward_pass_with_pipeline_parallel_large_input_batch() - def test_inference_pipeline_parallel_large__size(self): - self.setup_model(tensor_parallel_size=2, pipeline_parallel_size=2) - - batch_prompt_tokens = ( - torch.randint(low=0, high=self.vocab_size, size=(self.batch_size, self.sequence_length)) - .int() - .cuda() - ) - self.inference_wrapped_model.prep_model_for_inference(prompts_tokens=batch_prompt_tokens) - - inference_input = self.inference_wrapped_model.get_batch_for_context_window(0, 10) - - logits = self.inference_wrapped_model.run_one_forward_step(inference_input) - - if parallel_state.is_pipeline_last_stage(): - assert logits.shape == ( - self.batch_size, - 10, - self.vocab_size, - ), f"Shape mismatch . Expected {(self.batch_size,10, self.vocab_size)}, but got {logits.shape}" - - def test_inference_only_tensor_parallel(self): - self.setup_model(tensor_parallel_size=4, pipeline_parallel_size=1) - - batch_prompt_tokens = ( - torch.randint(low=0, high=self.vocab_size, size=(self.batch_size, self.sequence_length)) - .int() - .cuda() - ) - self.inference_wrapped_model.prep_model_for_inference(prompts_tokens=batch_prompt_tokens) - - inference_input = self.inference_wrapped_model.get_batch_for_context_window(0, 5) - logits = self.inference_wrapped_model.run_one_forward_step(inference_input) - - assert logits.shape == ( - self.batch_size, - 5, - self.vocab_size, - ), f"Shape mismatch . Expected {(self.batch_size, 5, self.vocab_size)}, but got {logits.shape}" +from argparse import Namespace + +import torch + +from megatron.core import parallel_state +from megatron.core.inference.model_inference_wrappers.gpt.gpt_inference_wrapper import ( + GPTInferenceWrapper, +) +from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( + InferenceWrapperConfig, +) +from megatron.core.models.gpt.gpt_layer_specs import ( + get_gpt_layer_local_spec, + get_gpt_layer_with_transformer_engine_spec, +) +from megatron.core.models.gpt.gpt_model import GPTModel +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.transformer_config import TransformerConfig +from tests.unit_tests.test_utilities import Utils + + +class TestGPTInferenceWrapper: + + def setup_model(self, tensor_parallel_size, pipeline_parallel_size): + Utils.initialize_model_parallel( + tensor_model_parallel_size=tensor_parallel_size, + pipeline_model_parallel_size=pipeline_parallel_size, + ) + model_parallel_cuda_manual_seed(123) + self.vocab_size = 100 + self.batch_size = 4 + self.sequence_length = 32 + hidden_size = 12 + + transformer_config = TransformerConfig( + num_layers=4, + hidden_size=hidden_size, + num_attention_heads=4, + use_cpu_initialization=True, + ) + + gpt_model = GPTModel( + config=transformer_config, + transformer_layer_spec=get_gpt_layer_local_spec(), + vocab_size=self.vocab_size, + max_sequence_length=self.sequence_length, + parallel_output=True, + ).cuda() + + inference_wrapper_config = InferenceWrapperConfig( + hidden_size=hidden_size, + inference_batch_times_seqlen_threshold=20, + inference_max_requests=self.batch_size, + fp32_residual_connection=False, + params_dtype=torch.float, + padded_vocab_size=self.vocab_size, + ) + + self.inference_wrapped_model = GPTInferenceWrapper(gpt_model, inference_wrapper_config) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + # This will call the inference_wrapped_model.forward_pass_with_pipeline_parallel_small_input_batch() + def test_inference_pipeline_parallel_small_size(self): + self.setup_model(tensor_parallel_size=2, pipeline_parallel_size=2) + + batch_prompt_tokens = ( + torch.randint(low=0, high=self.vocab_size, size=(self.batch_size, self.sequence_length)) + .int() + .cuda() + ) + self.inference_wrapped_model.prep_model_for_inference(prompts_tokens=batch_prompt_tokens) + inference_input = self.inference_wrapped_model.prep_inference_input( + prompts_tokens=batch_prompt_tokens + ) + + inference_input_for_context_window = ( + self.inference_wrapped_model.get_batch_for_context_window(inference_input, 0, 5) + ) + + logits = self.inference_wrapped_model.run_one_forward_step( + inference_input_for_context_window + ) + # Logits are not returned in all ranks in PP + if parallel_state.is_pipeline_last_stage(): + assert logits.shape == ( + self.batch_size, + 5, + self.vocab_size, + ), f"Shape mismatch . Expected {(self.batch_size, 5, self.vocab_size)}, but got {logits.shape}" + + # This will call the inference_wrapped_model.forward_pass_with_pipeline_parallel_large_input_batch() + def test_inference_pipeline_parallel_large__size(self): + self.setup_model(tensor_parallel_size=2, pipeline_parallel_size=2) + + batch_prompt_tokens = ( + torch.randint(low=0, high=self.vocab_size, size=(self.batch_size, self.sequence_length)) + .int() + .cuda() + ) + self.inference_wrapped_model.prep_model_for_inference(prompts_tokens=batch_prompt_tokens) + inference_input = self.inference_wrapped_model.prep_inference_input( + prompts_tokens=batch_prompt_tokens + ) + + inference_input_for_context_window = ( + self.inference_wrapped_model.get_batch_for_context_window(inference_input, 0, 10) + ) + + logits = self.inference_wrapped_model.run_one_forward_step( + inference_input_for_context_window + ) + + if parallel_state.is_pipeline_last_stage(): + assert logits.shape == ( + self.batch_size, + 10, + self.vocab_size, + ), f"Shape mismatch . Expected {(self.batch_size,10, self.vocab_size)}, but got {logits.shape}" + + def test_inference_only_tensor_parallel(self): + self.setup_model(tensor_parallel_size=4, pipeline_parallel_size=1) + + batch_prompt_tokens = ( + torch.randint(low=0, high=self.vocab_size, size=(self.batch_size, self.sequence_length)) + .int() + .cuda() + ) + self.inference_wrapped_model.prep_model_for_inference(prompts_tokens=batch_prompt_tokens) + inference_input = self.inference_wrapped_model.prep_inference_input( + prompts_tokens=batch_prompt_tokens + ) + + inference_input_for_context_window = ( + self.inference_wrapped_model.get_batch_for_context_window(inference_input, 0, 5) + ) + logits = self.inference_wrapped_model.run_one_forward_step( + inference_input_for_context_window + ) + + assert logits.shape == ( + self.batch_size, + 5, + self.vocab_size, + ), f"Shape mismatch . Expected {(self.batch_size, 5, self.vocab_size)}, but got {logits.shape}" diff --git a/tests/unit_tests/inference/model_inference_wrappers/t5/test_t5_inference_wrapper.py b/tests/unit_tests/inference/model_inference_wrappers/t5/test_t5_inference_wrapper.py index 2bb6e9f..16d0fca 100644 --- a/tests/unit_tests/inference/model_inference_wrappers/t5/test_t5_inference_wrapper.py +++ b/tests/unit_tests/inference/model_inference_wrappers/t5/test_t5_inference_wrapper.py @@ -1,126 +1,132 @@ -from argparse import Namespace -from copy import deepcopy -from unittest import mock - -import numpy as np -import torch - -from megatron.core import parallel_state -from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( - InferenceWrapperConfig, -) -from megatron.core.inference.model_inference_wrappers.t5.t5_inference_wrapper import ( - T5InferenceWrapper, -) -from megatron.core.models.T5.t5_model import T5Model -from megatron.core.models.T5.t5_spec import ( - get_t5_decoder_with_transformer_engine_block_spec, - get_t5_encoder_with_transformer_engine_block_spec, -) -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.enums import AttnBackend -from megatron.core.transformer.transformer_config import TransformerConfig -from tests.unit_tests.test_utilities import Utils - - -class TestT5InferenceWrapper: - - def setup_model(self, tensor_parallel_size, pipeline_parallel_size): - Utils.initialize_model_parallel( - tensor_model_parallel_size=tensor_parallel_size, - pipeline_model_parallel_size=pipeline_parallel_size, - ) - model_parallel_cuda_manual_seed(123) - self.vocab_size = 100 - self.batch_size = 8 - self.encoder_sequence_length = 32 - self.decoder_sequence_length = 16 - hidden_size = 768 - - transformer_config = TransformerConfig( - num_layers=12, - hidden_size=hidden_size, - num_attention_heads=12, - tensor_model_parallel_size=tensor_parallel_size, - pipeline_model_parallel_size=pipeline_parallel_size, - attention_backend=AttnBackend.unfused, - ) - - encoder_config = deepcopy(transformer_config) - encoder_config.num_layers = transformer_config.num_layers - - encoder_layers_per_pipeline = ( - encoder_config.num_layers // encoder_config.pipeline_model_parallel_size - ) - decoder_layers_per_pipeline = ( - transformer_config.num_layers // transformer_config.pipeline_model_parallel_size - ) - en_block_spec = get_t5_encoder_with_transformer_engine_block_spec( - encoder_layers_per_pipeline - ) - de_block_spec = get_t5_decoder_with_transformer_engine_block_spec( - decoder_layers_per_pipeline - ) - - t5_model = T5Model( - config=transformer_config, - encoder_config=encoder_config, - transformer_encoder_layer_spec=en_block_spec, - transformer_decoder_layer_spec=de_block_spec, - vocab_size=self.vocab_size, - max_sequence_length=self.encoder_sequence_length, - parallel_output=True, - pre_process=True, - post_process=True, - add_encoder=True, - add_decoder=True, - ).cuda() - - inference_wrapper_config = InferenceWrapperConfig( - hidden_size=hidden_size, - inference_batch_times_seqlen_threshold=-1, - fp32_residual_connection=False, - params_dtype=torch.float, - padded_vocab_size=self.vocab_size, - ) - - self.inference_wrapped_model = T5InferenceWrapper(t5_model, inference_wrapper_config) - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_inference_only_tensor_parallel(self): - self.setup_model(tensor_parallel_size=4, pipeline_parallel_size=1) - - batch_prompt_tokens = ( - torch.randint( - low=0, high=self.vocab_size, size=(self.batch_size, self.decoder_sequence_length) - ) - .int() - .cuda() - ) - batch_encoder_prompts = ["sample prompt encoders"] * self.batch_size - mock_tokenizer = mock.Mock() - mock_tokenizer.pad = self.vocab_size - 1 - mock_tokenizer.additional_special_tokens_ids = list(range(100)) - mock_tokenizer.tokenize.return_value = np.random.randint( - self.vocab_size, size=self.encoder_sequence_length - ).tolist() - - self.inference_wrapped_model.prep_model_for_inference( - prompts_tokens=batch_prompt_tokens, - encoder_prompts=batch_encoder_prompts, - tokenizer=mock_tokenizer, - ) - - inference_input = self.inference_wrapped_model.get_batch_for_context_window( - 0, self.decoder_sequence_length - ) - - logits = self.inference_wrapped_model.run_one_forward_step(inference_input) - - assert logits.shape == ( - self.batch_size, - self.decoder_sequence_length, - self.vocab_size, - ), f"Shape mismatch . Expected {(self.batch_size, self.decoder_sequence_length, self.vocab_size)}, but got {logits.shape}" +from argparse import Namespace +from copy import deepcopy +from unittest import mock + +import numpy as np +import torch + +from megatron.core import parallel_state +from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( + InferenceWrapperConfig, +) +from megatron.core.inference.model_inference_wrappers.t5.t5_inference_wrapper import ( + T5InferenceWrapper, +) +from megatron.core.models.T5.t5_model import T5Model +from megatron.core.models.T5.t5_spec import ( + get_t5_decoder_with_transformer_engine_block_spec, + get_t5_encoder_with_transformer_engine_block_spec, +) +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.enums import AttnBackend +from megatron.core.transformer.transformer_config import TransformerConfig +from tests.unit_tests.test_utilities import Utils + + +class TestT5InferenceWrapper: + + def setup_model(self, tensor_parallel_size, pipeline_parallel_size): + Utils.initialize_model_parallel( + tensor_model_parallel_size=tensor_parallel_size, + pipeline_model_parallel_size=pipeline_parallel_size, + ) + model_parallel_cuda_manual_seed(123) + self.vocab_size = 100 + self.batch_size = 8 + self.encoder_sequence_length = 32 + self.decoder_sequence_length = 16 + hidden_size = 768 + + transformer_config = TransformerConfig( + num_layers=12, + hidden_size=hidden_size, + num_attention_heads=12, + tensor_model_parallel_size=tensor_parallel_size, + pipeline_model_parallel_size=pipeline_parallel_size, + attention_backend=AttnBackend.unfused, + ) + + encoder_config = deepcopy(transformer_config) + encoder_config.num_layers = transformer_config.num_layers + + encoder_layers_per_pipeline = ( + encoder_config.num_layers // encoder_config.pipeline_model_parallel_size + ) + decoder_layers_per_pipeline = ( + transformer_config.num_layers // transformer_config.pipeline_model_parallel_size + ) + en_block_spec = get_t5_encoder_with_transformer_engine_block_spec( + encoder_layers_per_pipeline + ) + de_block_spec = get_t5_decoder_with_transformer_engine_block_spec( + decoder_layers_per_pipeline + ) + + t5_model = T5Model( + config=transformer_config, + encoder_config=encoder_config, + transformer_encoder_layer_spec=en_block_spec, + transformer_decoder_layer_spec=de_block_spec, + vocab_size=self.vocab_size, + max_sequence_length=self.encoder_sequence_length, + parallel_output=True, + pre_process=True, + post_process=True, + add_encoder=True, + add_decoder=True, + ).cuda() + + inference_wrapper_config = InferenceWrapperConfig( + hidden_size=hidden_size, + inference_batch_times_seqlen_threshold=-1, + fp32_residual_connection=False, + params_dtype=torch.float, + padded_vocab_size=self.vocab_size, + ) + + self.inference_wrapped_model = T5InferenceWrapper(t5_model, inference_wrapper_config) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_inference_only_tensor_parallel(self): + self.setup_model(tensor_parallel_size=4, pipeline_parallel_size=1) + + batch_prompt_tokens = ( + torch.randint( + low=0, high=self.vocab_size, size=(self.batch_size, self.decoder_sequence_length) + ) + .int() + .cuda() + ) + batch_encoder_prompts = ["sample prompt encoders"] * self.batch_size + mock_tokenizer = mock.Mock() + mock_tokenizer.pad = self.vocab_size - 1 + mock_tokenizer.additional_special_tokens_ids = list(range(100)) + mock_tokenizer.tokenize.return_value = np.random.randint( + self.vocab_size, size=self.encoder_sequence_length + ).tolist() + + self.inference_wrapped_model.prep_model_for_inference(prompts_tokens=batch_prompt_tokens) + + inference_input = self.inference_wrapped_model.prep_inference_input( + prompts_tokens=batch_prompt_tokens, + encoder_prompts=batch_encoder_prompts, + tokenizer=mock_tokenizer, + ) + + inference_input_for_context_window = ( + self.inference_wrapped_model.get_batch_for_context_window( + inference_input, 0, self.decoder_sequence_length + ) + ) + + logits = self.inference_wrapped_model.run_one_forward_step( + inference_input_for_context_window + ) + + assert logits.shape == ( + self.batch_size, + self.decoder_sequence_length, + self.vocab_size, + ), f"Shape mismatch . Expected {(self.batch_size, self.decoder_sequence_length, self.vocab_size)}, but got {logits.shape}" diff --git a/tests/unit_tests/inference/test_modelopt_gpt_model.py b/tests/unit_tests/inference/test_modelopt_gpt_model.py deleted file mode 100644 index 380ac7f..0000000 --- a/tests/unit_tests/inference/test_modelopt_gpt_model.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from megatron.core.inference.modelopt_support.gpt.model_specs import get_gpt_layer_modelopt_spec -from megatron.core.inference.modelopt_support.gpt.state_dict_hooks import ( - mcore_gpt_load_te_state_dict_pre_hook, -) -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec -from megatron.core.models.gpt.gpt_model import GPTModel -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.transformer_config import TransformerConfig -from tests.unit_tests.test_utilities import Utils - - -class TestModelOptGPTModel: - - def setup_method(self, method): - Utils.initialize_model_parallel(1, 1) - model_parallel_cuda_manual_seed(123) - transformer_config = TransformerConfig( - num_layers=2, hidden_size=12, num_attention_heads=4, use_cpu_initialization=True - ) - self.gpt_model = GPTModel( - config=transformer_config, - transformer_layer_spec=get_gpt_layer_with_transformer_engine_spec(), - vocab_size=100, - max_sequence_length=4, - ) - # Ensure that a GPTModel can be built with the modelopt spec. - self.modelopt_gpt_model = GPTModel( - config=transformer_config, - transformer_layer_spec=get_gpt_layer_modelopt_spec(), - vocab_size=100, - max_sequence_length=4, - ) - - def test_load_te_state_dict_pre_hook(self): - handle = self.modelopt_gpt_model._register_load_state_dict_pre_hook( - mcore_gpt_load_te_state_dict_pre_hook - ) - self.modelopt_gpt_model.load_state_dict(self.gpt_model.state_dict()) - handle.remove() - - def teardown_method(self, method): - Utils.destroy_model_parallel() diff --git a/tests/unit_tests/inference/test_modelopt_module_spec.py b/tests/unit_tests/inference/test_modelopt_module_spec.py new file mode 100644 index 0000000..2d7c6b2 --- /dev/null +++ b/tests/unit_tests/inference/test_modelopt_module_spec.py @@ -0,0 +1,207 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +import inspect +import tempfile + +import pytest +import torch + +from megatron.core import InferenceParams, dist_checkpointing +from megatron.core.inference.modelopt_support.gpt.model_specs import get_gpt_layer_modelopt_spec +from megatron.core.inference.modelopt_support.gpt.state_dict_hooks import ( + mcore_gpt_load_te_state_dict_pre_hook, +) +from megatron.core.inference.modelopt_support.mamba.model_specs import get_mamba_stack_modelopt_spec +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec +from megatron.core.models.gpt.gpt_model import GPTModel +from megatron.core.models.mamba.mamba_layer_specs import mamba_stack_spec +from megatron.core.models.mamba.mamba_model import MambaModel +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.transformer_config import TransformerConfig +from tests.unit_tests.dist_checkpointing import TempNamedDir +from tests.unit_tests.test_utilities import Utils + + +def model_forward(model: torch.nn.Module, config: TransformerConfig, micro_batch_size: int = 2): + inference_params: InferenceParams = InferenceParams( + max_batch_size=micro_batch_size, max_sequence_length=model.max_sequence_length + ) + prompt_length = model.max_sequence_length - 1 + + # load-context/first-output-token, step/generate + for offset in (0, prompt_length): + if offset == 0: + sequence_length = prompt_length + else: + sequence_length = 1 + inference_params.sequence_len_offset = offset + + data = list(range(sequence_length)) + input_ids = torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() + position_ids = torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() + attention_mask = torch.ones( + (micro_batch_size, 1, sequence_length, sequence_length), dtype=bool + ).cuda() + + logits = model.forward( + input_ids=input_ids, + position_ids=position_ids, + attention_mask=attention_mask, + inference_params=inference_params, + ) + + assert logits.shape[0] == micro_batch_size + assert logits.shape[1] == sequence_length + assert logits.shape[2] == model.vocab_size + + +class TestModelOptGPTModel: + + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + transformer_config = TransformerConfig( + num_layers=2, hidden_size=12, num_attention_heads=4, use_cpu_initialization=True + ) + self.gpt_model = GPTModel( + config=transformer_config, + transformer_layer_spec=get_gpt_layer_with_transformer_engine_spec(), + vocab_size=100, + max_sequence_length=4, + ) + # Ensure that a GPTModel can be built with the modelopt spec. + self.modelopt_gpt_model = GPTModel( + config=transformer_config, + transformer_layer_spec=get_gpt_layer_modelopt_spec(remap_te_layernorm=True), + vocab_size=100, + max_sequence_length=4, + ) + + def test_load_te_state_dict_pre_hook(self): + handle = self.modelopt_gpt_model._register_load_state_dict_pre_hook( + mcore_gpt_load_te_state_dict_pre_hook + ) + self.modelopt_gpt_model.load_state_dict(self.gpt_model.state_dict()) + handle.remove() + + def test_sharded_state_dict_restore(self, tmp_path_dist_ckpt): + te_fused_sharded_state_dict = self.gpt_model.sharded_state_dict() + modelopt_sharded_state_dict = self.modelopt_gpt_model.sharded_state_dict() + + with TempNamedDir( + tmp_path_dist_ckpt / 'test_sharded_state_dict_restore', sync=True + ) as tmpdirname: + dist_checkpointing.save(te_fused_sharded_state_dict, tmpdirname) + state_dict = dist_checkpointing.load(modelopt_sharded_state_dict, tmpdirname) + self.modelopt_gpt_model.load_state_dict(state_dict) + + def test_inference(self): + config: TransformerConfig = self.modelopt_gpt_model.config + model = self.modelopt_gpt_model.cuda() + model_forward(model, config) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + +class TestModelOptMambaModel: + + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + transformer_config = TransformerConfig( + num_layers=3, hidden_size=256, num_attention_heads=4, use_cpu_initialization=True + ) + + # A Hybrid MambaModel using fused-TE spec (default) + self.mamba_model = MambaModel( + config=transformer_config, + mamba_stack_spec=mamba_stack_spec, + vocab_size=100, + max_sequence_length=4, + hybrid_override_pattern="M*-", + ) + + # A Hybrid MambaModel using ModelOpt spec (local + TENorm). + self.modelopt_mamba_model = MambaModel( + config=transformer_config, + mamba_stack_spec=get_mamba_stack_modelopt_spec(remap_te_layernorm=True), + vocab_size=100, + max_sequence_length=4, + hybrid_override_pattern="M*-", + ) + + def test_sharded_state_dict_restore(self, tmp_path_dist_ckpt): + te_fused_sharded_state_dict = self.mamba_model.sharded_state_dict() + modelopt_sharded_state_dict = self.modelopt_mamba_model.sharded_state_dict() + + with TempNamedDir( + tmp_path_dist_ckpt / 'test_sharded_state_dict_restore', sync=True + ) as tmpdirname: + dist_checkpointing.save(te_fused_sharded_state_dict, tmpdirname) + state_dict = dist_checkpointing.load(modelopt_sharded_state_dict, tmpdirname) + self.modelopt_mamba_model.load_state_dict(state_dict) + + def test_inference(self): + config: TransformerConfig = self.modelopt_mamba_model.config + model = self.modelopt_mamba_model.cuda() + model_forward(model, config) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + +def test_get_gpt_layer_modelopt_spec_interface(): + # Get the function signature + sig = inspect.signature(get_gpt_layer_modelopt_spec) + + # Define the expected signature + expected_params = { + "num_experts": inspect.Parameter.POSITIONAL_OR_KEYWORD, + "local_core_attention": inspect.Parameter.POSITIONAL_OR_KEYWORD, + "moe_grouped_gemm": inspect.Parameter.POSITIONAL_OR_KEYWORD, + "remap_te_layernorm": inspect.Parameter.POSITIONAL_OR_KEYWORD, + "qk_layernorm": inspect.Parameter.POSITIONAL_OR_KEYWORD, + } + + expected_defaults = { + "num_experts": None, + "local_core_attention": False, + "moe_grouped_gemm": False, + "remap_te_layernorm": False, + "qk_layernorm": False, + } + + # Check parameter kinds + for param_name, param in sig.parameters.items(): + assert param_name in expected_params.keys(), f"Unexpected parameter: {param_name}" + assert param.kind is expected_params[param_name], f"Wrong kind for parameter: {param_name}" + + # Check default values + defaults = { + k: v.default for k, v in sig.parameters.items() if v.default is not inspect.Parameter.empty + } + assert defaults == expected_defaults, "Default values do not match the expected ones." + + +def test_get_mamba_stack_modelopt_spec_interface(): + # Get the function signature + sig = inspect.signature(get_mamba_stack_modelopt_spec) + + # Define the expected signature + expected_params = { + "local_core_attention": inspect.Parameter.POSITIONAL_OR_KEYWORD, + "remap_te_layernorm": inspect.Parameter.POSITIONAL_OR_KEYWORD, + } + + expected_defaults = {"local_core_attention": False, "remap_te_layernorm": False} + + # Check parameter kinds + for param_name, param in sig.parameters.items(): + assert param_name in expected_params.keys(), f"Unexpected parameter: {param_name}" + assert param.kind is expected_params[param_name], f"Wrong kind for parameter: {param_name}" + + # Check default values + defaults = { + k: v.default for k, v in sig.parameters.items() if v.default is not inspect.Parameter.empty + } + assert defaults == expected_defaults, "Default values do not match the expected ones." diff --git a/tests/unit_tests/inference/test_scheduler.py b/tests/unit_tests/inference/test_scheduler.py index 90caa70..e388a6b 100644 --- a/tests/unit_tests/inference/test_scheduler.py +++ b/tests/unit_tests/inference/test_scheduler.py @@ -27,13 +27,15 @@ class TestScheduler: prompt_tokens = torch.randn(5) inference_parameters = SamplingParams() + active_request_ids = [] for i in range(self.max_batch_size): - self.scheduler.add_request(prompt, prompt_tokens, inference_parameters) + request_id = self.scheduler.add_request(prompt, prompt_tokens, inference_parameters) assert ( len(self.scheduler.active_request_pool) == i + 1 ), f"Active request pool should have {i+1} requests, but it has only {len(self.scheduler.active_request_pool)}" + active_request_ids.append(request_id) - self.scheduler.add_request(prompt, prompt_tokens, inference_parameters) + request_id = self.scheduler.add_request(prompt, prompt_tokens, inference_parameters) assert ( len(self.scheduler.waiting_request_pool) == 1 ), f"Waiting request pool should have 1 request but it has {len(self.scheduler.waiting_request_pool)} requests" @@ -42,12 +44,18 @@ class TestScheduler: assert ( waiting_request.status == Status.WAITING_IN_QUEUE ), f"Status should be WAITING_IN_QUEUE, but its {waiting_request.status} for the waiting request" + assert ( + request_id == waiting_request.request_id + ), f"Waiting request request ID should match returned request ID" assert ( self.scheduler.have_requests_pending() ), "Scheduler should have requests pending, but it seems to be having no requests" - active_request_dict: Dict[int, InferenceRequest] = self.scheduler.active_request_pool + active_request_dict: Dict[str, InferenceRequest] = self.scheduler.active_request_pool + assert set(active_request_dict.keys()) == set( + active_request_ids + ), f"Active request pool IDs should match returned request IDs" for request_id, request in active_request_dict.items(): # Mark every even request compelted if int(request_id) % 2 == 0: @@ -66,7 +74,7 @@ class TestScheduler: len(self.scheduler.completed_request_pool) == 2 ), f"Completed request pool should have 2 requests but it has {len(self.scheduler.completed_request_pool)} requests " - active_request_dict: Dict[int, InferenceRequest] = self.scheduler.active_request_pool + active_request_dict: Dict[str, InferenceRequest] = self.scheduler.active_request_pool for request_id, request in active_request_dict.items(): # Mark all requests compelted request.status = Status.COMPLETED diff --git a/tests/unit_tests/inference/text_generation_controllers/test_encoder_decoder_text_generation_controller.py b/tests/unit_tests/inference/text_generation_controllers/test_encoder_decoder_text_generation_controller.py index 12903a9..f4159a8 100644 --- a/tests/unit_tests/inference/text_generation_controllers/test_encoder_decoder_text_generation_controller.py +++ b/tests/unit_tests/inference/text_generation_controllers/test_encoder_decoder_text_generation_controller.py @@ -115,7 +115,7 @@ class TestEncoderDecoderTextGenerationController: self.vocab_size, size=(self.encoder_sequence_length - 5) ).tolist() - active_requests: Dict[int, InferenceRequest] = OrderedDict() + active_requests: Dict[str, InferenceRequest] = OrderedDict() for i in range(self.batch_size): prompt = "decoder_sample" prompt_tokens = np.random.randint( diff --git a/tests/unit_tests/inference/text_generation_controllers/test_simple_text_generation_controller.py b/tests/unit_tests/inference/text_generation_controllers/test_simple_text_generation_controller.py index 1db360f..6698823 100644 --- a/tests/unit_tests/inference/text_generation_controllers/test_simple_text_generation_controller.py +++ b/tests/unit_tests/inference/text_generation_controllers/test_simple_text_generation_controller.py @@ -1,175 +1,291 @@ -import os -import random -import string -import time -from collections import OrderedDict -from typing import Dict -from unittest import mock - -import pytest -import torch - -from megatron.core.inference.inference_request import InferenceRequest, Status -from megatron.core.inference.model_inference_wrappers.gpt.gpt_inference_wrapper import ( - GPTInferenceWrapper, -) -from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( - InferenceWrapperConfig, -) -from megatron.core.inference.sampling_params import SamplingParams -from megatron.core.inference.text_generation_controllers.text_generation_controller import ( - TextGenerationController, -) -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_local_spec -from megatron.core.models.gpt.gpt_model import GPTModel -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.enums import AttnBackend -from megatron.core.transformer.transformer_config import TransformerConfig -from tests.unit_tests.test_utilities import Utils - - -class TestTextGenerationController: - - def setup_method(self, method): - Utils.initialize_model_parallel( - tensor_model_parallel_size=2, pipeline_model_parallel_size=2 - ) - model_parallel_cuda_manual_seed(123) - self.batch_size = 4 - self.hidden_size = 12 - self.vocab_size = 100 - self.sequence_length = 64 - transformer_config = TransformerConfig( - num_layers=4, - hidden_size=self.hidden_size, - num_attention_heads=4, - use_cpu_initialization=True, - attention_backend=AttnBackend.local, - ) - - gpt_model = GPTModel( - config=transformer_config, - transformer_layer_spec=get_gpt_layer_local_spec(), - vocab_size=self.vocab_size, - max_sequence_length=self.sequence_length, - parallel_output=True, - ).cuda() - - inference_wrapper_config = InferenceWrapperConfig( - hidden_size=self.hidden_size, - inference_batch_times_seqlen_threshold=-1, - fp32_residual_connection=False, - params_dtype=torch.float, - padded_vocab_size=self.vocab_size, - ) - - inference_wrapped_model = GPTInferenceWrapper(gpt_model, inference_wrapper_config) - - self.mock_tokenizer = mock.Mock() - - self.text_generation_controller = TextGenerationController( - inference_wrapped_model=inference_wrapped_model, tokenizer=self.mock_tokenizer - ) - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_sample_from_logits(self): - with pytest.raises(AssertionError) as aerror: - self.text_generation_controller.sample_from_logits( - last_token_logits=None, - sampling_params=SamplingParams(top_k=2, top_p=0.4), - vocab_size=self.vocab_size, - ) - assert str(aerror.value) == 'Cannot have top-p and top-k both greater than zero' - - with pytest.raises(AssertionError) as aerror: - self.text_generation_controller.sample_from_logits( - last_token_logits=None, - sampling_params=SamplingParams(top_p=1.4, top_k=0), - vocab_size=self.vocab_size, - ) - assert str(aerror.value) == 'top-p should be in (0,1]' - - with pytest.raises(AssertionError) as aerror: - self.text_generation_controller.sample_from_logits( - last_token_logits=torch.randn(self.batch_size, 1), - sampling_params=SamplingParams(top_k=self.vocab_size + 10), - vocab_size=self.vocab_size, - ) - assert str(aerror.value) == 'top-k is larger than logit size.' - - last_token_logits = ( - torch.arange(0, self.vocab_size).repeat(self.batch_size, 1).float().cuda() - ) - sampled_logits = self.text_generation_controller.sample_from_logits( - last_token_logits, SamplingParams(top_k=1), self.vocab_size - ) - assert torch.all( - sampled_logits.cpu() == torch.ones(self.batch_size) * self.vocab_size - 1 - ), f"The sampled logits should all be {self.vocab_size} but its {sampled_logits}" - - sampled_logits = self.text_generation_controller.sample_from_logits( - last_token_logits, SamplingParams(top_k=2), self.vocab_size - ) - assert torch.all( - sampled_logits >= self.vocab_size - 2 - ), f"The sampled logits should all be greater than {self.vocab_size-2} but its {sampled_logits}" - - l = last_token_logits[0] - top_p = 0.3 - expected_min_value = l[l.softmax(dim=-1).cumsum(dim=-1) > top_p][0].item() - sampled_logits = self.text_generation_controller.sample_from_logits( - last_token_logits, SamplingParams(top_p=top_p, top_k=0), self.vocab_size - ) - assert torch.all( - sampled_logits >= expected_min_value - ), f"The sampled logits should all be greater than {expected_min_value} but its {sampled_logits}" - - top_p = 0.95 - temperature = 2 - expected_min_value = l[l.div_(temperature).softmax(dim=-1).cumsum(dim=-1) > top_p][0].item() - sampled_logits = self.text_generation_controller.sample_from_logits( - last_token_logits, - SamplingParams(top_p=top_p, temperature=temperature, top_k=0), - self.vocab_size, - ) - assert torch.all( - sampled_logits >= expected_min_value - ), f"The sampled logits should all be greater than {expected_min_value} but its {sampled_logits}" - - def test_generate_all_output_tokens_static_batch(self): - self.mock_tokenizer.vocab_size = self.vocab_size - self.mock_tokenizer.eod = self.vocab_size - 1 - self.mock_tokenizer.detokenize.return_value = ''.join( - random.choices(string.ascii_letters, k=random.randint(4, 10)) - ) - - active_requests: Dict[int, InferenceRequest] = OrderedDict() - for i in range(self.batch_size): - prompt = "sample" * (i + 1) - self.mock_tokenizer.tokenize.return_value = torch.randn( - self.batch_size, self.vocab_size - ).cuda() - inference_request = InferenceRequest( - request_id=i, - prompt=prompt, - inference_parameters=SamplingParams(num_tokens_to_generate=10), - arrival_time=time.time(), - prompt_tokens=torch.randint( - low=0, high=self.vocab_size - 1, size=(len(prompt),) - ).tolist(), - status=Status.ACTIVE_BUT_NOT_GENERATING_TOKENS, - ) - active_requests[i] = inference_request - - requests = self.text_generation_controller.generate_all_output_tokens_static_batch( - active_requests - ) - - for request_id, request in requests.items(): - assert ( - request.status == Status.COMPLETED - ), f"Status should be completed but its {request.status}" - assert request.generated_length > 0, f"Generated length should be greater than zero" - assert request.generated_text is not None, "Generated text should not be None" +import copy +import os +import random +import string +import time +from collections import OrderedDict +from typing import Dict, List +from unittest import mock + +import pytest +import torch + +from megatron.core.inference.inference_request import InferenceRequest, Status +from megatron.core.inference.model_inference_wrappers.gpt.gpt_inference_wrapper import ( + GPTInferenceWrapper, +) +from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( + InferenceWrapperConfig, +) +from megatron.core.inference.sampling_params import SamplingParams +from megatron.core.inference.text_generation_controllers.text_generation_controller import ( + TextGenerationController, +) +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_local_spec +from megatron.core.models.gpt.gpt_model import GPTModel +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.enums import AttnBackend +from megatron.core.transformer.transformer_config import TransformerConfig +from tests.unit_tests.test_utilities import Utils + + +class TestTextGenerationController: + + def setup_model(self, dtype): + Utils.initialize_model_parallel( + tensor_model_parallel_size=2, pipeline_model_parallel_size=2 + ) + model_parallel_cuda_manual_seed(123) + self.batch_size = 4 + self.hidden_size = 12 + self.vocab_size = 100 + self.sequence_length = 64 + transformer_config = TransformerConfig( + num_layers=4, + hidden_size=self.hidden_size, + num_attention_heads=4, + use_cpu_initialization=True, + attention_backend=AttnBackend.local, + ) + + gpt_model = GPTModel( + config=transformer_config, + transformer_layer_spec=get_gpt_layer_local_spec(), + vocab_size=self.vocab_size, + max_sequence_length=self.sequence_length, + parallel_output=True, + ).cuda() + + inference_wrapper_config = InferenceWrapperConfig( + hidden_size=self.hidden_size, + inference_batch_times_seqlen_threshold=-1, + inference_max_seq_length=2048, + inference_max_requests=self.batch_size, + fp32_residual_connection=False, + params_dtype=dtype, + padded_vocab_size=self.vocab_size, + ) + + inference_wrapped_model = GPTInferenceWrapper(gpt_model, inference_wrapper_config) + + self.mock_tokenizer = mock.Mock() + + self.text_generation_controller = TextGenerationController( + inference_wrapped_model=inference_wrapped_model, tokenizer=self.mock_tokenizer + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_sample_from_logits(self): + self.setup_model(torch.float32) + + with pytest.raises(AssertionError) as aerror: + self.text_generation_controller.sample_from_logits( + last_token_logits=None, + sampling_params=SamplingParams(top_k=2, top_p=0.4), + vocab_size=self.vocab_size, + ) + assert str(aerror.value) == 'Cannot have top-p and top-k both greater than zero' + + with pytest.raises(AssertionError) as aerror: + self.text_generation_controller.sample_from_logits( + last_token_logits=None, + sampling_params=SamplingParams(top_p=1.4, top_k=0), + vocab_size=self.vocab_size, + ) + assert str(aerror.value) == 'top-p should be in (0,1]' + + with pytest.raises(AssertionError) as aerror: + self.text_generation_controller.sample_from_logits( + last_token_logits=torch.randn(self.batch_size, 1), + sampling_params=SamplingParams(top_k=self.vocab_size + 10), + vocab_size=self.vocab_size, + ) + assert str(aerror.value) == 'top-k is larger than logit size.' + + last_token_logits = ( + torch.arange(0, self.vocab_size).repeat(self.batch_size, 1).float().cuda() + ) + sampled_logits = self.text_generation_controller.sample_from_logits( + last_token_logits, SamplingParams(top_k=1), self.vocab_size + ) + assert torch.all( + sampled_logits.cpu() == torch.ones(self.batch_size) * self.vocab_size - 1 + ), f"The sampled logits should all be {self.vocab_size} but its {sampled_logits}" + + sampled_logits = self.text_generation_controller.sample_from_logits( + last_token_logits, SamplingParams(top_k=2), self.vocab_size + ) + assert torch.all( + sampled_logits >= self.vocab_size - 2 + ), f"The sampled logits should all be greater than {self.vocab_size-2} but its {sampled_logits}" + + l = last_token_logits[0] + top_p = 0.3 + expected_min_value = l[l.softmax(dim=-1).cumsum(dim=-1) > top_p][0].item() + sampled_logits = self.text_generation_controller.sample_from_logits( + last_token_logits, SamplingParams(top_p=top_p, top_k=0), self.vocab_size + ) + assert torch.all( + sampled_logits >= expected_min_value + ), f"The sampled logits should all be greater than {expected_min_value} but its {sampled_logits}" + + top_p = 0.95 + temperature = 2 + expected_min_value = l[l.div_(temperature).softmax(dim=-1).cumsum(dim=-1) > top_p][0].item() + sampled_logits = self.text_generation_controller.sample_from_logits( + last_token_logits, + SamplingParams(top_p=top_p, temperature=temperature, top_k=0), + self.vocab_size, + ) + assert torch.all( + sampled_logits >= expected_min_value + ), f"The sampled logits should all be greater than {expected_min_value} but its {sampled_logits}" + + @pytest.mark.parametrize("dtype", [torch.float32, torch.bfloat16]) + def test_generate_all_output_tokens_static_batch(self, dtype): + self.setup_model(dtype) + + self.mock_tokenizer.vocab_size = self.vocab_size + self.mock_tokenizer.eod = self.vocab_size - 1 + self.mock_tokenizer.detokenize.side_effect = lambda x: ' '.join( + [ + ''.join(random.choices(string.ascii_letters, k=random.randint(4, 10))) + for _ in range(len(x)) + ] + ) + self.mock_tokenizer.offsets.side_effect = lambda _, s: [ + i for i, c in enumerate(s) if c == ' ' + ] + [len(s)] + + active_requests: Dict[str, InferenceRequest] = OrderedDict() + all_prompt_tokens: Dict[str, List[int]] = OrderedDict() + for i in range(self.batch_size): + prompt = "sample" * (i + 1) + self.mock_tokenizer.tokenize.return_value = torch.randn( + self.batch_size, self.vocab_size + ).cuda() + prompt_tokens = torch.randint( + low=0, high=self.vocab_size - 1, size=(len(prompt),) + ).tolist() + + request_id = str(i) + inference_request = InferenceRequest( + request_id=request_id, + prompt=prompt, + inference_parameters=SamplingParams( + num_tokens_to_generate=10, return_log_probs=True, return_segments=True + ), + arrival_time=time.time(), + prompt_tokens=prompt_tokens, + status=Status.ACTIVE_BUT_NOT_GENERATING_TOKENS, + ) + active_requests[request_id] = inference_request + all_prompt_tokens[request_id] = copy.deepcopy(prompt_tokens) + + requests = self.text_generation_controller.generate_all_output_tokens_static_batch( + active_requests + ) + + for request_id, request in requests.items(): + assert ( + request.status == Status.COMPLETED + ), f"Status should be completed but its {request.status}" + assert request.generated_length > 0, f"Generated length should be greater than zero" + assert request.generated_text is not None, "Generated text should not be None" + assert ( + all_prompt_tokens[request_id] == request.prompt_tokens + ), "Prompt tokens should not have changed during generation" + assert len(request.segments) == len(request.prompt_log_probs) + len( + request.generated_log_probs + ), "Segments should be returned for both prompt and generated tokens" + assert len(request.prompt) + len(request.generated_text) == len( + request.text + ), "Output text should include prompts and generations" + + @pytest.mark.parametrize("dtype", [torch.float32, torch.bfloat16]) + def test_output_log_probs(self, dtype): + self.setup_model(dtype) + + self.mock_tokenizer.vocab_size = self.vocab_size + self.mock_tokenizer.bos = 0 + self.mock_tokenizer.eod = self.vocab_size - 1 + self.mock_tokenizer.detokenize.side_effect = lambda x: ' '.join( + [ + ''.join(random.choices(string.ascii_letters, k=random.randint(4, 10))) + for _ in range(len(x)) + ] + ) + self.mock_tokenizer.offsets.side_effect = lambda _, s: [ + i for i, c in enumerate(s) if c == ' ' + ] + [len(s)] + + prompt = "" + active_requests: Dict[int, InferenceRequest] = OrderedDict() + for i in range(self.batch_size): + self.mock_tokenizer.tokenize.return_value = torch.randn( + self.batch_size, self.vocab_size + ).cuda() + inference_request = InferenceRequest( + request_id=i, + prompt=prompt, + inference_parameters=SamplingParams( + num_tokens_to_generate=1, return_log_probs=True + ), + arrival_time=time.time(), + prompt_tokens=[self.mock_tokenizer.bos], + status=Status.ACTIVE_BUT_NOT_GENERATING_TOKENS, + ) + active_requests[i] = inference_request + + requests = self.text_generation_controller.generate_all_output_tokens_static_batch( + active_requests + ) + + for request_id, request in requests.items(): + assert ( + request.status == Status.COMPLETED + ), f"Status should be completed but its {request.status}" + assert request.generated_length > 0, f"Generated length should be greater than zero" + assert request.generated_text is not None, "Generated text should not be None" + assert len(request.generated_log_probs) == request.generated_length + + def test_token_overflow(self): + self.setup_model(torch.float32) + + self.mock_tokenizer.vocab_size = self.vocab_size + self.mock_tokenizer.bos = 0 + self.mock_tokenizer.eod = self.vocab_size - 1 + self.mock_tokenizer.detokenize.side_effect = lambda x: ' '.join( + [ + ''.join(random.choices(string.ascii_letters, k=random.randint(4, 10))) + for _ in range(len(x)) + ] + ) + self.mock_tokenizer.offsets.side_effect = lambda _, s: [ + i for i, c in enumerate(s) if c == ' ' + ] + [len(s)] + + prompt = "" + active_requests: Dict[int, InferenceRequest] = OrderedDict() + for i in range(self.batch_size): + self.mock_tokenizer.tokenize.return_value = torch.randn( + self.batch_size, self.vocab_size + ).cuda() + inference_request = InferenceRequest( + request_id=i, + prompt=prompt, + inference_parameters=SamplingParams( + num_tokens_to_generate=4096, return_log_probs=True + ), + arrival_time=time.time(), + prompt_tokens=[self.mock_tokenizer.bos], + status=Status.ACTIVE_BUT_NOT_GENERATING_TOKENS, + ) + active_requests[i] = inference_request + + with pytest.raises(AssertionError): + requests = self.text_generation_controller.generate_all_output_tokens_static_batch( + active_requests + ) diff --git a/tests/unit_tests/inference/text_generation_controllers/test_vlm_text_generation_controller.py b/tests/unit_tests/inference/text_generation_controllers/test_vlm_text_generation_controller.py new file mode 100644 index 0000000..a9db3db --- /dev/null +++ b/tests/unit_tests/inference/text_generation_controllers/test_vlm_text_generation_controller.py @@ -0,0 +1,160 @@ +import copy +import os +import random +import string +import time +from argparse import Namespace +from collections import OrderedDict +from typing import Dict +from unittest import mock + +import pytest +import torch + +from megatron.core.inference.common_inference_params import CommonInferenceParams +from megatron.core.inference.inference_request import InferenceRequest, Status, VLMInferenceRequest +from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( + InferenceWrapperConfig, +) +from megatron.core.inference.model_inference_wrappers.multimodal.vlm_inference_wrapper import ( + VLMInferenceWrapper, +) +from megatron.core.inference.text_generation_controllers.vlm_text_generation_controller import ( + VLMTextGenerationController, +) +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_local_spec +from megatron.core.models.multimodal.llava_model import LLaVAModel +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.enums import AttnBackend +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.legacy.model import Float16Module +from tests.unit_tests.test_utilities import Utils + + +class TestVLMTextGenerationController: + + @pytest.mark.internal # The model is under active development and its methods may change. + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + + self.language_hidden_size = 64 + self.language_num_attention_heads = 4 + self.language_vocab_size = 8192 + self.language_max_sequence_length = 4096 + self.img_h = 336 + self.img_w = 336 + + language_config = TransformerConfig( + num_layers=3, + hidden_size=self.language_hidden_size, + num_attention_heads=self.language_num_attention_heads, + use_cpu_initialization=False, + ) + vision_config = TransformerConfig( + num_layers=2, hidden_size=16, num_attention_heads=2, use_cpu_initialization=False + ) + vision_projection_config = TransformerConfig( + num_layers=2, + hidden_size=self.language_hidden_size, + ffn_hidden_size=32, + num_attention_heads=1, + use_cpu_initialization=False, + ) + + language_layer_spec = get_gpt_layer_local_spec() + vision_layer_spec = copy.deepcopy(language_layer_spec) + vision_projection_spec = copy.deepcopy(language_layer_spec.submodules.mlp.submodules) + + language_config.language_model_type = "dummy" + vision_config.vision_model_type = "clip" + self.model = LLaVAModel( + language_transformer_config=language_config, + language_transformer_layer_spec=language_layer_spec, + language_vocab_size=self.language_vocab_size, + language_max_sequence_length=self.language_max_sequence_length, + vision_transformer_config=vision_config, + vision_transformer_layer_spec=vision_layer_spec, + drop_vision_class_token=False, + vision_projection_config=vision_projection_config, + vision_projection_layer_spec=vision_projection_spec, + img_h=self.img_h, + img_w=self.img_w, + patch_dim=14, + ).cuda() + self.image_token_index = self.model.image_token_index + self.model = Float16Module(self.model, Namespace(fp16=False, bf16=True)) + + inference_wrapper_config = InferenceWrapperConfig( + hidden_size=self.language_hidden_size, + inference_batch_times_seqlen_threshold=-1, + fp32_residual_connection=False, + params_dtype=torch.float, + padded_vocab_size=self.language_vocab_size, + ) + + inference_wrapped_model = VLMInferenceWrapper(self.model, inference_wrapper_config) + + self.mock_tokenizer = mock.Mock() + + self.text_generation_controller = VLMTextGenerationController( + inference_wrapped_model=inference_wrapped_model, tokenizer=self.mock_tokenizer + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_generate_all_output_tokens_static_batch(self): + self.mock_tokenizer.vocab_size = self.language_vocab_size + self.mock_tokenizer.eod = self.language_vocab_size - 1 + self.mock_tokenizer.detokenize.return_value = ''.join( + random.choices(string.ascii_letters, k=random.randint(4, 10)) + ) + + batch_size: int = 1 + num_img_embeddings_per_tile: int = 576 + imgs: torch.Tensor = torch.randn(1, 3, self.img_h, self.img_w).cuda() + num_tiles: torch.Tensor = torch.Tensor([1]).int() + decoder_seq_length: int = self.language_max_sequence_length + + active_requests: Dict[str, InferenceRequest] = OrderedDict() + all_prompt_tokens: Dict[str, List[int]] = OrderedDict() + for i in range(batch_size): + prompt = "sample" * (i + 1) + self.mock_tokenizer.tokenize.return_value = torch.randn( + batch_size, self.language_vocab_size + ).cuda() + prompt_tokens = torch.randint( + low=0, high=self.language_vocab_size - 1, size=(len(prompt),) + ).tolist() + prompt_tokens[3] = self.image_token_index + + request_id = str(i) + inference_request = VLMInferenceRequest( + request_id=request_id, + prompt=prompt, + inference_parameters=CommonInferenceParams(num_tokens_to_generate=10), + arrival_time=time.time(), + prompt_tokens=prompt_tokens, + num_img_embeddings_per_tile=num_img_embeddings_per_tile, + imgs=imgs, + num_tiles=num_tiles, + decoder_seq_length=decoder_seq_length, + status=Status.ACTIVE_BUT_NOT_GENERATING_TOKENS, + ) + active_requests[request_id] = inference_request + all_prompt_tokens[request_id] = copy.deepcopy(prompt_tokens) + + requests = self.text_generation_controller.generate_all_output_tokens_static_batch( + active_requests + ) + + for request_id, request in requests.items(): + assert ( + request.status == Status.COMPLETED + ), f"Status should be completed but its {request.status}" + assert request.generated_length > 0, f"Generated length should be greater than zero" + assert request.generated_text is not None, "Generated text should not be None" + assert ( + all_prompt_tokens[request_id] == request.prompt_tokens + ), "Prompt tokens should not have changed during generation" diff --git a/tests/unit_tests/models/test_gpt_model.py b/tests/unit_tests/models/test_gpt_model.py index 4894c8e..9d8b94d 100644 --- a/tests/unit_tests/models/test_gpt_model.py +++ b/tests/unit_tests/models/test_gpt_model.py @@ -1,81 +1,118 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import os - -import pytest -import torch - -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec -from megatron.core.models.gpt.gpt_model import GPTModel -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.transformer_config import TransformerConfig -from tests.unit_tests.test_utilities import Utils - - -class TestGPTModel: - - def setup_method(self, method): - os.environ.pop('NVTE_FUSED_ATTN', None) - os.environ.pop('NVTE_FLASH_ATTN', None) - os.environ.pop('NVTE_UNFUSED_ATTN', None) - Utils.initialize_model_parallel(1, 1) - model_parallel_cuda_manual_seed(123) - transformer_config = TransformerConfig( - num_layers=2, hidden_size=12, num_attention_heads=4, use_cpu_initialization=True - ) - self.gpt_model = GPTModel( - config=transformer_config, - transformer_layer_spec=get_gpt_layer_with_transformer_engine_spec(), - vocab_size=100, - max_sequence_length=4, - ) - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.internal - def test_constructor(self): - assert isinstance(self.gpt_model, GPTModel) - - assert self.gpt_model.max_sequence_length == 4 - - num_weights = sum([p.numel() for p in self.gpt_model.parameters()]) - assert num_weights == 6240 - - @pytest.mark.internal - def test_set_input_tensor(self): - config: TransformerConfig = self.gpt_model.config - sequence_length = self.gpt_model.max_sequence_length - micro_batch_size = 2 - - # [sequence length, batch size, hidden size] - input_tensor = torch.ones((sequence_length, micro_batch_size, config.hidden_size)) - - self.gpt_model.set_input_tensor(input_tensor) - - assert self.gpt_model.decoder.input_tensor.shape[0] == sequence_length - assert self.gpt_model.decoder.input_tensor.shape[1] == micro_batch_size - assert self.gpt_model.decoder.input_tensor.shape[2] == config.hidden_size - - @pytest.mark.internal - def test_post_process_forward(self): - config: TransformerConfig = self.gpt_model.config - sequence_length = self.gpt_model.max_sequence_length - micro_batch_size = 2 - - self.gpt_model.cuda() - - data = list(range(sequence_length)) - input_ids = torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() - position_ids = torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() - attention_mask = torch.ones( - (micro_batch_size, 1, sequence_length, sequence_length), dtype=bool - ).cuda() - - logits = self.gpt_model.forward( - input_ids=input_ids, position_ids=position_ids, attention_mask=attention_mask - ) - - assert logits.shape[0] == micro_batch_size - assert logits.shape[1] == sequence_length - assert logits.shape[2] == self.gpt_model.vocab_size +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import inspect +import os + +import pytest +import torch + +from megatron.core.models.gpt.gpt_layer_specs import ( + get_gpt_layer_with_transformer_engine_spec, + get_mlp_module_spec, +) +from megatron.core.models.gpt.gpt_model import GPTModel +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.transformer_config import TransformerConfig +from tests.unit_tests.test_utilities import Utils + + +class TestGPTModel: + + def setup_method(self, method): + os.environ.pop('NVTE_FUSED_ATTN', None) + os.environ.pop('NVTE_FLASH_ATTN', None) + os.environ.pop('NVTE_UNFUSED_ATTN', None) + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + transformer_config = TransformerConfig( + num_layers=2, hidden_size=12, num_attention_heads=4, use_cpu_initialization=True + ) + self.gpt_model = GPTModel( + config=transformer_config, + transformer_layer_spec=get_gpt_layer_with_transformer_engine_spec(), + vocab_size=100, + max_sequence_length=4, + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.internal + def test_constructor(self): + assert isinstance(self.gpt_model, GPTModel) + + assert self.gpt_model.max_sequence_length == 4 + + num_weights = sum([p.numel() for p in self.gpt_model.parameters()]) + assert num_weights == 6240 + + @pytest.mark.internal + def test_set_input_tensor(self): + config: TransformerConfig = self.gpt_model.config + sequence_length = self.gpt_model.max_sequence_length + micro_batch_size = 2 + + # [sequence length, batch size, hidden size] + input_tensor = torch.ones((sequence_length, micro_batch_size, config.hidden_size)) + + self.gpt_model.set_input_tensor(input_tensor) + + assert self.gpt_model.decoder.input_tensor.shape[0] == sequence_length + assert self.gpt_model.decoder.input_tensor.shape[1] == micro_batch_size + assert self.gpt_model.decoder.input_tensor.shape[2] == config.hidden_size + + @pytest.mark.internal + def test_post_process_forward(self): + _ = self.gpt_model.config + sequence_length = self.gpt_model.max_sequence_length + micro_batch_size = 2 + + self.gpt_model.cuda() + + data = list(range(sequence_length)) + input_ids = torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() + position_ids = torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() + attention_mask = torch.ones( + (micro_batch_size, 1, sequence_length, sequence_length), dtype=bool + ).cuda() + + logits = self.gpt_model.forward( + input_ids=input_ids, position_ids=position_ids, attention_mask=attention_mask + ) + + assert logits.shape[0] == micro_batch_size + assert logits.shape[1] == sequence_length + assert logits.shape[2] == self.gpt_model.vocab_size + + +def test_get_mlp_module_spec_interface(): + # Get the function signature + sig = inspect.signature(get_mlp_module_spec) + + # Define the expected signature + expected_params = { + "use_te": inspect.Parameter.POSITIONAL_OR_KEYWORD, + "num_experts": inspect.Parameter.POSITIONAL_OR_KEYWORD, + "moe_grouped_gemm": inspect.Parameter.POSITIONAL_OR_KEYWORD, + "fp8": inspect.Parameter.POSITIONAL_OR_KEYWORD, + "moe_use_legacy_grouped_gemm": inspect.Parameter.POSITIONAL_OR_KEYWORD, + } + + expected_defaults = { + "use_te": True, + "num_experts": None, + "moe_grouped_gemm": False, + "fp8": None, + "moe_use_legacy_grouped_gemm": False, + } + + # Check parameter kinds + for param_name, param in sig.parameters.items(): + assert param_name in expected_params.keys(), f"Unexpected parameter: {param_name}" + assert param.kind is expected_params[param_name], f"Wrong kind for parameter: {param_name}" + + # Check default values + defaults = { + k: v.default for k, v in sig.parameters.items() if v.default is not inspect.Parameter.empty + } + assert defaults == expected_defaults, "Default values do not match the expected ones." diff --git a/tests/unit_tests/models/test_llava_model.py b/tests/unit_tests/models/test_llava_model.py index d067288..8de9c88 100644 --- a/tests/unit_tests/models/test_llava_model.py +++ b/tests/unit_tests/models/test_llava_model.py @@ -1,897 +1,1019 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from copy import deepcopy -from types import SimpleNamespace - -import pytest -import torch - -from megatron.core import InferenceParams -from megatron.core import parallel_state as ps -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec -from megatron.core.models.multimodal.llava_model import LLaVAModel -from megatron.core.models.vision.vit_layer_specs import get_vit_layer_with_transformer_engine_spec -from megatron.core.packed_seq_params import PackedSeqParams -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.enums import AttnMaskType -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.utils import is_te_min_version -from megatron.training.global_vars import set_args -from tests.unit_tests.test_utilities import Utils - - -class TestLLaVAModel: - @pytest.mark.internal # The model is under active development and its methods may change. - def setup_method(self, method): - Utils.initialize_model_parallel(1, 1) - model_parallel_cuda_manual_seed(123) - - self.language_hidden_size = 64 - self.language_num_attention_heads = 4 - - language_config = TransformerConfig( - num_layers=3, - hidden_size=self.language_hidden_size, - num_attention_heads=self.language_num_attention_heads, - use_cpu_initialization=False, - ) - vision_config = TransformerConfig( - num_layers=2, hidden_size=16, num_attention_heads=2, use_cpu_initialization=False - ) - vision_projection_config = TransformerConfig( - num_layers=2, - hidden_size=self.language_hidden_size, - ffn_hidden_size=32, - num_attention_heads=1, - use_cpu_initialization=False, - ) - - language_layer_spec = get_gpt_layer_with_transformer_engine_spec() - vision_layer_spec = deepcopy(language_layer_spec) - vision_projection_spec = deepcopy(language_layer_spec.submodules.mlp.submodules) - - vision_config.vision_model_type = "clip" - self.model = LLaVAModel( - language_transformer_config=language_config, - language_transformer_layer_spec=language_layer_spec, - language_vocab_size=8192, - language_max_sequence_length=4096, - vision_transformer_config=vision_config, - vision_transformer_layer_spec=vision_layer_spec, - drop_vision_class_token=False, - vision_projection_config=vision_projection_config, - vision_projection_layer_spec=vision_projection_spec, - img_h=336, - img_w=336, - patch_dim=14, - ) - - @pytest.mark.internal - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.internal - def test_constructor(self): - assert isinstance(self.model, LLaVAModel) - - num_weights = sum([p.numel() for p in self.model.parameters()]) - assert num_weights == 1488736 - - @pytest.mark.internal - def test_set_input_tensor(self): - expected_shape = (1, 2, 3, 4) - input_tensor = torch.zeros(expected_shape) - self.model.set_input_tensor(input_tensor) - assert self.model.vision_model.decoder.input_tensor.shape == expected_shape - - @pytest.mark.internal - def test_preprocess_data(self): - self.model.cuda() - - hidden_size = 72 - - # 3 images with 1 tile and 2 image with 2 tiles = 7 tiles. - image_embeddings = ( - torch.arange(577 * 7 * hidden_size, dtype=torch.float) - .reshape(577, 7, hidden_size) - .cuda() - ) - - image_token_index = self.model.image_token_index - input_ids = torch.arange(1024).expand(5, 1024).cuda() - input_ids[0, 0] = image_token_index # image before text - input_ids[1, 100] = image_token_index # image in between - input_ids[2, -1] = image_token_index # image at the end - # input_ids[3] - no image - input_ids[4, 50] = image_token_index # two images in between - input_ids[4, 150] = image_token_index - - # Using negative sign to distinguish from image embeddings. - language_embeddings = ( - -torch.arange(5 * 1024 * hidden_size, dtype=torch.float) - .reshape(5, 1024, hidden_size) - .cuda() - ) - - # Labels are input_ids shifted to left by one. - labels = torch.arange(1, 1025, dtype=torch.int).expand(5, 1024).cuda() - # labels[0] - image token got dropped by shift to left by one. - labels[1, 99] = image_token_index - labels[2, -2] = image_token_index - # labels[3] - no image. - labels[4, 49] = image_token_index - labels[4, 149] = image_token_index - - loss_mask = torch.ones((5, 1024), dtype=torch.float).cuda() - # Mask some text inputs (the text mask should carry over) - loss_mask[:2, :10] = 0.0 - loss_mask[:2, 110:120] = 0.0 - - # Number of tiles for each image in the batch. - num_image_tiles = torch.tensor([1, 2, 1, 2, 1], dtype=torch.int).cuda() - - use_inference_kv_cache = False - inference_params = None - image_token_mask = None - - embeddings, labels, loss_mask = self.model._preprocess_data( - image_embeddings, - language_embeddings, - input_ids, - loss_mask, - labels, - use_inference_kv_cache, - inference_params, - image_token_index, - num_image_tiles, - image_token_mask, - ) - - img_seq_len = 577 - # The fifth sample has 2 images with 3 tiles and 1024 text tokens. - max_seq_len = 3 * img_seq_len - 2 + 1024 - - assert embeddings.shape == torch.Size((max_seq_len, 5, hidden_size)) - assert labels.shape == torch.Size((5, max_seq_len)) - assert loss_mask.shape == labels.shape - - # First sample where image is before text (index 0). - expected_embeddings = torch.empty(max_seq_len, hidden_size).cuda() - expected_embeddings[:577] = image_embeddings[:, 0] - expected_embeddings[577:1600] = language_embeddings[0, 1:] - expected_embeddings[1600:] = 0 # padding - - expected_labels = torch.empty(max_seq_len, dtype=torch.int).cuda() - expected_labels[:576] = -100 # image - expected_labels[576:1600] = torch.arange(1, 1025, dtype=torch.int) - expected_labels[1600:] = -100 # padding - - expected_loss_mask = torch.empty(max_seq_len, dtype=torch.float).cuda() - expected_loss_mask[:577] = 0 - expected_loss_mask[577:586] = 0 - expected_loss_mask[586:686] = 1 - expected_loss_mask[686:696] = 0 - expected_loss_mask[696:1600] = 1 - expected_loss_mask[1600:] = 0 - - assert torch.allclose(embeddings[:, 0], expected_embeddings) - assert torch.allclose(labels[0], expected_labels) - assert torch.allclose(loss_mask[0], expected_loss_mask) - - # Second sample where image is in between (index 100). The image has 2 tiles. - expected_embeddings = torch.empty(max_seq_len, hidden_size).cuda() - expected_embeddings[:100] = language_embeddings[1, :100] - expected_embeddings[100:677] = image_embeddings[:, 1] - expected_embeddings[677:1254] = image_embeddings[:, 2] - expected_embeddings[1254:2177] = language_embeddings[1, 101:] - expected_embeddings[2177:] = 0 # padding - - expected_labels = torch.empty(max_seq_len, dtype=torch.int).cuda() - expected_labels[:99] = torch.arange(1, 100) - expected_labels[99:1253] = -100 # image - expected_labels[1253:2177] = torch.arange(101, 1025) - expected_labels[2177:] = -100 # padding - - expected_loss_mask = torch.empty(max_seq_len, dtype=torch.float).cuda() - expected_loss_mask[:10] = 0 - expected_loss_mask[10:99] = 1 - # Last text position before the image is not required to predict the first image embedding. - expected_loss_mask[99] = 0 - expected_loss_mask[100:1254] = 0 - expected_loss_mask[1254:1263] = 1 - expected_loss_mask[1263:1273] = 0 - expected_loss_mask[1273:2177] = 1 - expected_loss_mask[2177:] = 0 # padding - - assert torch.allclose(embeddings[:, 1], expected_embeddings) - assert torch.allclose(labels[1], expected_labels) - assert torch.allclose(loss_mask[1], expected_loss_mask) - - # Third sample where image is at the end. - expected_embeddings = torch.empty(max_seq_len, hidden_size).cuda() - expected_embeddings[:1023] = language_embeddings[2, :1023] - expected_embeddings[1023:1600] = image_embeddings[:, 3] - expected_embeddings[1600:] = 0 # padding - - expected_labels = torch.empty(max_seq_len, dtype=torch.int).cuda() - expected_labels[:1022] = torch.arange(1, 1023) - expected_labels[1022:1599] = -100 - expected_labels[1599] = 1024 - expected_labels[1600:] = -100 # padding - - expected_loss_mask = torch.empty(max_seq_len, dtype=torch.float).cuda() - expected_loss_mask[:1022] = 1 - # Last text position before the image is not required to predict the first image embedding. - expected_loss_mask[1022] = 0 - expected_loss_mask[1023:1600] = 0 - expected_loss_mask[1600:] = 0 # padding - - assert torch.allclose(embeddings[:, 2], expected_embeddings) - assert torch.allclose(labels[2], expected_labels) - assert torch.allclose(loss_mask[2], expected_loss_mask) - - # Fourth sample where there is no image. - expected_embeddings = torch.empty(max_seq_len, hidden_size).cuda() - expected_embeddings[:1024] = language_embeddings[3] - expected_embeddings[1024:] = 0 # padding - - expected_labels = torch.empty(max_seq_len, dtype=torch.int).cuda() - expected_labels[:1024] = torch.arange(1, 1025) - expected_labels[1024:] = -100 # padding - - expected_loss_mask = torch.empty(max_seq_len, dtype=torch.float).cuda() - expected_loss_mask[:1024] = 1 - expected_loss_mask[1024:] = 0 # padding - - assert torch.allclose(embeddings[:, 3], expected_embeddings) - assert torch.allclose(labels[3], expected_labels) - assert torch.allclose(loss_mask[3], expected_loss_mask) - - # Fifth sample has two images in between (indices 50 and 150). The first image has two tiles. - expected_embeddings = torch.empty(max_seq_len, hidden_size).cuda() - expected_embeddings[:50] = language_embeddings[4, :50] - expected_embeddings[50:627] = image_embeddings[:, 4] # two tiles - expected_embeddings[627:1204] = image_embeddings[:, 5] - expected_embeddings[1204:1303] = language_embeddings[4, 51:150] - expected_embeddings[1303:1880] = image_embeddings[:, 6] - expected_embeddings[1880:] = language_embeddings[4, 151:] - - expected_labels = torch.empty(max_seq_len, dtype=torch.int).cuda() - expected_labels[:49] = torch.arange(1, 50) - expected_labels[49:1203] = -100 # image - expected_labels[1203:1302] = torch.arange(51, 150) - expected_labels[1302:1879] = -100 # image - expected_labels[1879:] = torch.arange(151, 1025) - - expected_loss_mask = torch.empty(max_seq_len, dtype=torch.float).cuda() - expected_loss_mask[:49] = 1 - expected_loss_mask[49:1204] = 0 - expected_loss_mask[1204:1302] = 1 - expected_loss_mask[1302:1880] = 0 - expected_loss_mask[1880:] = 1 - - assert torch.allclose(embeddings[:, 4], expected_embeddings) - assert torch.allclose(labels[4], expected_labels) - assert torch.allclose(loss_mask[4], expected_loss_mask) - - @pytest.mark.internal - def test_forward(self): - self.model.cuda() - - # 3 images with 1 tile and 2 images with 2 tiles. - img = torch.randn((7, 3, 336, 336)).cuda() - - image_token_index = self.model.image_token_index - input_ids = torch.randint(0, 2048, (5, 1024)).cuda() - input_ids[0, 0] = image_token_index # image before text - input_ids[1, 100] = image_token_index # image in between - input_ids[2, -1] = image_token_index # image at the end - # input_ids[3] - no image - input_ids[4, 50] = image_token_index - input_ids[4, 150] = image_token_index - - position_ids = torch.arange(0, 1024, dtype=torch.int).expand(5, 1024).cuda() - - loss_mask = torch.ones((5, 1024)).cuda() - - attention_mask = None # Causal. - - labels = torch.randint(0, 2048, (5, 1024)).cuda() - labels[1, 99] = image_token_index - labels[2, -2] = image_token_index - - num_image_tiles = torch.tensor([1, 2, 1, 2, 1], dtype=torch.int).cuda() - - # Try with labels. - loss, new_loss_mask = self.model.forward( - img, - input_ids, - position_ids, - attention_mask, - labels, - loss_mask, - num_image_tiles=num_image_tiles, - ) - - # The maximum sequence length is given by the sample with 2 images in 3 tiles, minus two image token indices, plus other text tokens. - img_seq_len = 577 - max_seq_len = img_seq_len * 3 - 2 + 1024 - assert loss.shape == new_loss_mask.shape == torch.Size((5, max_seq_len)) - - # Try with labels and PackedSeqParams. Only micro batch size 1 is supported in this mode. - packed_seq_params = PackedSeqParams( - qkv_format="thd", - cu_seqlens_q=[0, 512, 1024, 1600], # Just example values. - cu_seqlens_kv=[0, 512, 1024, 1600], - max_seqlen_q=[1600], - max_seqlen_kv=[1600], - ) - - loss, new_loss_mask = self.model.forward( - img[:1], - input_ids[:1], - position_ids[:1], - attention_mask, - labels[:1], - loss_mask[:1], - num_image_tiles=num_image_tiles[:1], - ) - - # 1600 = 577 (img_seq_len) + 1024 (text tokens in the first sample) - 1 (image token). - assert loss.shape == new_loss_mask.shape == torch.Size((1, 1600)) - - # Try text-only input. - loss, new_loss_mask = self.model.forward( - torch.tensor([], dtype=torch.float).cuda(), - torch.randint(0, 2048, (5, 1024)).cuda(), - position_ids, - attention_mask, - torch.randint(0, 2048, (5, 1024)).cuda(), - loss_mask, - num_image_tiles=torch.tensor([], dtype=torch.int).cuda(), - ) - - assert loss.shape == new_loss_mask.shape == torch.Size((5, 1024)) - - # Try without labels and without inference params. - logits, _ = self.model.forward( - img, - input_ids, - position_ids, - attention_mask, - labels=None, - loss_mask=None, - num_image_tiles=num_image_tiles, - ) - assert logits.shape == torch.Size((5, max_seq_len, 8192)) - - # Try without labels and with inference params. - inference_params = InferenceParams(5, max_seq_len) - logits, _ = self.model.forward( - img, - input_ids, - position_ids, - attention_mask, - labels=None, - loss_mask=None, - num_image_tiles=num_image_tiles, - inference_params=inference_params, - ) - assert logits.shape == torch.Size((5, max_seq_len, 8192)) - - # Check KV cache got populated correctly. - kv_dict = inference_params.key_value_memory_dict - - assert kv_dict["image_tokens_count"] == 577 * 7 - for layer_no in range(1, 4): # 3 layers in the model. - layer_kv = kv_dict[layer_no] - # Expected shape is [sequence_len, batch_size, num_heads, hidden_size_per_head] - assert ( - layer_kv[0].shape - == layer_kv[1].shape - == torch.Size((max_seq_len, 5, self.language_num_attention_heads, 16)) - ) - - @pytest.mark.internal - def test_save_load(self, tmp_path): - path = tmp_path / "model.pt" - torch.save(self.model.state_dict(), path) - - self.model.load_state_dict(torch.load(path)) - - @pytest.mark.internal - def test_freeze(self): - self.model.freeze( - freeze_language_model=True, freeze_vision_model=True, freeze_vision_projection=False - ) - - for module in [self.model.language_model, self.model.vision_model]: - for param in module.parameters(): - assert not param.requires_grad - - for param in self.model.vision_projection.parameters(): - assert param.requires_grad - - -class TestLLaVAModelSigLIP: - @pytest.mark.internal # The model is under active development and its methods may change. - def setup_method(self, method): - Utils.initialize_model_parallel(1, 1) - model_parallel_cuda_manual_seed(123) - - language_config = TransformerConfig( - num_layers=3, hidden_size=128, num_attention_heads=8, use_cpu_initialization=False - ) - vision_config = TransformerConfig( - num_layers=2, hidden_size=64, num_attention_heads=4, use_cpu_initialization=False - ) - vision_projection_config = TransformerConfig( - num_layers=2, - hidden_size=128, - ffn_hidden_size=72, - num_attention_heads=1, - use_cpu_initialization=False, - ) - - language_layer_spec = get_gpt_layer_with_transformer_engine_spec() - vision_layer_spec = deepcopy(language_layer_spec) - vision_projection_spec = deepcopy(language_layer_spec.submodules.mlp.submodules) - - vision_config.vision_model_type = "siglip" - self.model = LLaVAModel( - language_transformer_config=language_config, - language_transformer_layer_spec=language_layer_spec, - language_vocab_size=2048, - language_max_sequence_length=4096, - vision_transformer_config=vision_config, - vision_transformer_layer_spec=vision_layer_spec, - drop_vision_class_token=False, - vision_projection_config=vision_projection_config, - vision_projection_layer_spec=vision_projection_spec, - img_h=336, - img_w=336, - patch_dim=14, - ) - - @pytest.mark.internal - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.internal - def test_constructor(self): - assert isinstance(self.model, LLaVAModel) - - num_weights = sum([p.numel() for p in self.model.parameters()]) - assert num_weights == 1832456 - - @pytest.mark.internal - def test_set_input_tensor(self): - expected_shape = (1, 2, 3, 4) - input_tensor = torch.zeros(expected_shape) - self.model.set_input_tensor(input_tensor) - assert self.model.vision_model.decoder.input_tensor.shape == expected_shape - - -def create_test_args(cp_size, sequence_parallel): - # Set dummy values for the args. - args = SimpleNamespace() - args.context_parallel_size = cp_size - args.sequence_parallel = sequence_parallel - - return args - - -class TestLLaVAModelTokenParallel: - - def init_llava_model(self): - self.language_hidden_size = 64 - self.language_num_attention_heads = 16 - - language_config = TransformerConfig( - num_layers=3, - hidden_size=self.language_hidden_size, - num_attention_heads=self.language_num_attention_heads, - use_cpu_initialization=False, - tensor_model_parallel_size=self.tp_size, - sequence_parallel=self.sequence_parallel, - context_parallel_size=1, # Init with CP=1 until CI catches up to TEv1.10 - # context_parallel_size=self.cp_size, - ) - # SP and CP are not yet supported for the Vision Backbone - vision_config = TransformerConfig( - num_layers=2, - hidden_size=16, - num_attention_heads=8, - use_cpu_initialization=False, - tensor_model_parallel_size=self.tp_size, - sequence_parallel=False, - context_parallel_size=1, - ) - vision_projection_config = TransformerConfig( - num_layers=2, - hidden_size=self.language_hidden_size, - ffn_hidden_size=1024, - num_attention_heads=8, - use_cpu_initialization=False, - tensor_model_parallel_size=self.tp_size, - sequence_parallel=False, - context_parallel_size=1, - ) - - language_layer_spec = get_gpt_layer_with_transformer_engine_spec() - # SP/CP either requires user to ensure token lengths do not require padding OR change mask type to padding - if ( - language_layer_spec.submodules.self_attention.params.get('attn_mask_type', '') - == AttnMaskType.causal - ): - language_layer_spec.submodules.self_attention.params['attn_mask_type'] = ( - AttnMaskType.padding_causal - ) - elif ( - language_layer_spec.submodules.self_attention.params.get('attn_mask_type', '') - == AttnMaskType.no_mask - ): - language_layer_spec.submodules.self_attention.params['attn_mask_type'] = ( - AttnMaskType.padding - ) - - vision_layer_spec = deepcopy(language_layer_spec) - vision_projection_spec = deepcopy(language_layer_spec.submodules.mlp.submodules) - - vision_config.vision_model_type = "clip" - self.model = LLaVAModel( - language_transformer_config=language_config, - language_transformer_layer_spec=language_layer_spec, - language_vocab_size=8192, - language_max_sequence_length=4096, - vision_transformer_config=vision_config, - vision_transformer_layer_spec=vision_layer_spec, - drop_vision_class_token=False, - vision_projection_config=vision_projection_config, - vision_projection_layer_spec=vision_projection_spec, - img_h=336, - img_w=336, - patch_dim=14, - ) - - @pytest.mark.internal # The model is under active development and its methods may change. - def setup_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.internal - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.internal - @pytest.mark.parametrize( - "cp_size,tp_size,sequence_parallel", [(1, 8, True), (2, 4, False), (2, 4, True)] - ) - def test_process_embedding_token_parallel(self, cp_size, tp_size, sequence_parallel): - self.cp_size = cp_size - self.tp_size = tp_size - self.sequence_parallel = sequence_parallel - Utils.initialize_model_parallel( - tensor_model_parallel_size=self.tp_size, context_parallel_size=self.cp_size - ) - model_parallel_cuda_manual_seed(123) - - self.init_llava_model() - self.model.cuda() - # Setting CP size for LLM here as model init is done with CP=1 to - # avoid TE version check until CI catches up to TEv1.10 - if self.cp_size > 1: - self.model.context_parallel_lm = self.cp_size - - args = create_test_args(self.cp_size, self.sequence_parallel) - set_args(args) - - batch_size = 2 - combined_valid_seqlen = 2049 - combined_padded_seqlen = 2056 - if self.cp_size > 1: - combined_embeddings = torch.ones( - [batch_size, combined_padded_seqlen, 4096], device='cuda', dtype=torch.bfloat16 - ) # [B, S, H] - else: - combined_embeddings = torch.ones( - [combined_padded_seqlen, batch_size, 4096], device='cuda', dtype=torch.bfloat16 - ) # [S, B, H] - new_labels = torch.ones( - [batch_size, combined_padded_seqlen], device='cuda', dtype=torch.bfloat16 - ) # [B, S] - new_loss_mask = torch.ones( - [batch_size, combined_padded_seqlen], device='cuda', dtype=torch.bfloat16 - ) # [B, S] - - cu_seqlens = torch.arange( - 0, - (batch_size + 1) * (combined_valid_seqlen), - step=(combined_valid_seqlen), - dtype=torch.int32, - device=combined_embeddings.device, - ) - cu_seqlens_padded = torch.arange( - 0, - (batch_size + 1) * (combined_padded_seqlen), - step=(combined_padded_seqlen), - dtype=torch.int32, - device=combined_embeddings.device, - ) - - packed_seq_params = PackedSeqParams( - cu_seqlens_q=cu_seqlens, - cu_seqlens_kv=cu_seqlens, - cu_seqlens_q_padded=cu_seqlens_padded, - cu_seqlens_kv_padded=cu_seqlens_padded, - max_seqlen_q=combined_padded_seqlen, - max_seqlen_kv=combined_padded_seqlen, - qkv_format='thd', - ) - - combined_embeddings, new_labels, new_loss_mask, packed_seq_params = ( - self.model._process_embedding_token_parallel( - combined_embeddings, new_labels, new_loss_mask, packed_seq_params - ) - ) - - # Calculate the expected padded seq length - if self.cp_size > 1 and self.sequence_parallel: - padding_factor = self.tp_size * self.cp_size * 2 - elif self.cp_size > 1: - padding_factor = self.cp_size * 2 - elif self.sequence_parallel: - padding_factor = self.tp_size - - padded_seq_len = int( - (combined_padded_seqlen + (padding_factor - 1)) // padding_factor * padding_factor - ) - - # Check if output shape is as expected - if self.cp_size > 1 and self.sequence_parallel: - # THD format - assert combined_embeddings.shape[0] == batch_size * ( - padded_seq_len / (self.tp_size * self.cp_size) - ) - assert combined_embeddings.shape[1] == 1 - elif self.cp_size > 1: - # THD format - assert combined_embeddings.shape[0] == batch_size * (padded_seq_len / self.cp_size) - assert combined_embeddings.shape[1] == 1 - else: - # SBHD format - assert combined_embeddings.shape[0] == padded_seq_len / self.tp_size - assert combined_embeddings.shape[1] == batch_size - - -def count_parameters(model): - return sum(p.numel() for p in model.parameters()) - - -@pytest.mark.internal # The model is under active development and its methods may change. -@pytest.mark.parametrize( - 'dtp, dpp, etp, epp', [(1, 1, 1, 0), (1, 1, 1, 1), (2, 1, 2, 0), (2, 3, 2, 1), (2, 4, 2, 0)] -) -def test_llava_model_parallelism(dtp, dpp, etp, epp): - """ - The purpose of this test is to check that vit, vision projection and lm layer - counts across tensor and pipeline parallel ranks match the counts in the - non-model-parallel case, i.e. tp==1, pp==1, etp==1, epp==0 - """ - - language_hidden_size = 64 - language_num_attention_heads = 4 - - # First initialize a single GPU model to get baseline parameter and layer counts - Utils.initialize_model_parallel( - tensor_model_parallel_size=1, - pipeline_model_parallel_size=1, - encoder_tensor_model_parallel_size=1, - encoder_pipeline_model_parallel_size=0, - ) - model_parallel_cuda_manual_seed(123) - - language_config = TransformerConfig( - num_layers=8, - hidden_size=language_hidden_size, - num_attention_heads=language_num_attention_heads, - use_cpu_initialization=False, - ) - language_config.tensor_model_parallel_size = dtp - language_config.pipeline_model_parallel_size = dpp - - vision_config = TransformerConfig( - num_layers=4, hidden_size=16, num_attention_heads=2, use_cpu_initialization=False - ) - vision_config.tensor_model_parallel_size = etp - vision_config.pipeline_model_parallel_size = 1 - - vision_projection_config = TransformerConfig( - num_layers=2, - hidden_size=language_hidden_size, - ffn_hidden_size=32, - num_attention_heads=1, - use_cpu_initialization=False, - ) - vision_projection_config.tensor_model_parallel_size = etp - vision_projection_config.pipeline_model_parallel_size = 1 - - language_layer_spec = get_gpt_layer_with_transformer_engine_spec() - vision_layer_spec = get_vit_layer_with_transformer_engine_spec() - vision_projection_spec = deepcopy(language_layer_spec.submodules.mlp.submodules) - - vision_config.vision_model_type = "clip" - non_parallel_model = LLaVAModel( - language_transformer_config=language_config, - language_transformer_layer_spec=language_layer_spec, - language_vocab_size=8192, - language_max_sequence_length=4096, - vision_transformer_config=vision_config, - vision_transformer_layer_spec=vision_layer_spec, - drop_vision_class_token=False, - vision_projection_config=vision_projection_config, - vision_projection_layer_spec=vision_projection_spec, - img_h=336, - img_w=336, - patch_dim=14, - ) - - base_vit_params = sum(p.numel() for p in non_parallel_model.vision_model.parameters()) - base_proj_params = sum(p.numel() for p in non_parallel_model.vision_projection.parameters()) - - base_vit_layers = len(non_parallel_model.vision_model.decoder.layers) - - Utils.destroy_model_parallel() - - # Next initialize a model parallel version to get test parameter and layer counts - Utils.initialize_model_parallel( - tensor_model_parallel_size=dtp, - pipeline_model_parallel_size=dpp, - encoder_tensor_model_parallel_size=etp, - encoder_pipeline_model_parallel_size=epp, - ) - model_parallel_cuda_manual_seed(123) - - pp_rank = ps.get_pipeline_model_parallel_rank() - pp_world_size = ps.get_pipeline_model_parallel_world_size() - tp_world_size = ps.get_tensor_model_parallel_world_size() - - pre_process = True if (pp_rank == 0 or (pp_rank == 1 and epp == 1)) else False - post_process = ( - True if ((pp_rank == 0 and epp == 1) or (pp_rank == pp_world_size - 1)) else False - ) - add_encoder = True if pp_rank == 0 else False - add_decoder = False if (pp_rank == 0 and epp == 1) else True - - language_config = TransformerConfig( - num_layers=8, - hidden_size=language_hidden_size, - num_attention_heads=language_num_attention_heads, - use_cpu_initialization=False, - ) - language_config.tensor_model_parallel_size = dtp - language_config.pipeline_model_parallel_size = dpp - - vision_config = TransformerConfig( - num_layers=4, hidden_size=16, num_attention_heads=2, use_cpu_initialization=False - ) - vision_config.tensor_model_parallel_size = etp - vision_config.pipeline_model_parallel_size = 1 - - vision_projection_config = TransformerConfig( - num_layers=2, - hidden_size=language_hidden_size, - ffn_hidden_size=32, - num_attention_heads=1, - use_cpu_initialization=False, - ) - vision_projection_config.tensor_model_parallel_size = etp - vision_projection_config.pipeline_model_parallel_size = 1 - - language_layer_spec = get_gpt_layer_with_transformer_engine_spec() - vision_layer_spec = get_vit_layer_with_transformer_engine_spec() - vision_projection_spec = deepcopy(vision_layer_spec.submodules.mlp.submodules) - - vision_config.vision_model_type = "clip" - model = LLaVAModel( - language_transformer_config=language_config, - language_transformer_layer_spec=language_layer_spec, - language_vocab_size=8192, - language_max_sequence_length=4096, - vision_transformer_config=vision_config, - vision_transformer_layer_spec=vision_layer_spec, - drop_vision_class_token=False, - vision_projection_config=vision_projection_config, - vision_projection_layer_spec=vision_projection_spec, - img_h=336, - img_w=336, - patch_dim=14, - pre_process=pre_process, - post_process=post_process, - add_encoder=add_encoder, - add_decoder=add_decoder, - ) - - if epp == 1: - if pp_rank == 0: - # should be in a etp sized tp group - assert tp_world_size == etp - # there should only be a single pipeline rank - assert pp_world_size == epp + dpp - # should not be inside decoder - assert not ps.is_inside_decoder() - # should be inside encoder - assert ps.is_inside_encoder() - elif pp_rank != 0: - # non-encoder ranks should be in a dtp sized tp group - assert tp_world_size == dtp - # check we're inside the decoder - assert ps.is_inside_decoder() - # check we're not inside the encoder - assert not ps.is_inside_encoder() - elif epp == 0: - if pp_rank == 0: - # check we're inside the encoder and decoder - assert ps.is_inside_encoder() - assert ps.is_inside_decoder() - elif pp_rank != 0: - # check we're inside the decoder only and there's no vision_model - assert not ps.is_inside_encoder() - assert ps.is_inside_decoder() - assert model.vision_model is None - assert model.vision_projection is None - - if ps.is_inside_encoder(): - # Check num vit layers - epp > 1 not supported - test_vit_layers = len([p for p in model.vision_model.decoder.layers]) - assert test_vit_layers == base_vit_layers - - # Check all vit params are present - test_vit_tp_params = sum( - [ - p.numel() - for p in model.vision_model.parameters() - if hasattr(p, 'tensor_model_parallel') - ] - ) - test_vit_non_tp_params = sum( - [ - p.numel() - for p in model.vision_model.parameters() - if not hasattr(p, 'tensor_model_parallel') - ] - ) - group = ps.get_tensor_model_parallel_group() - test_vit_params_tensor = torch.tensor([test_vit_tp_params], dtype=torch.int32).cuda() - torch.distributed.all_reduce( - test_vit_params_tensor, op=torch.distributed.ReduceOp.SUM, group=group - ) - total_test_vit_tp_params = test_vit_params_tensor.item() - assert total_test_vit_tp_params + test_vit_non_tp_params == base_vit_params - - # Check all vision projection params are present - test_proj_tp_params = sum( - [ - p.numel() - for p in model.vision_projection.parameters() - if hasattr(p, 'tensor_model_parallel') - ] - ) - test_proj_non_tp_params = sum( - [ - p.numel() - for p in model.vision_projection.parameters() - if not hasattr(p, 'tensor_model_parallel') - ] - ) - test_proj_params_tensor = torch.tensor([test_proj_tp_params], dtype=torch.int32).cuda() - torch.distributed.all_reduce( - test_proj_params_tensor, op=torch.distributed.ReduceOp.SUM, group=group - ) - total_test_proj_tp_params = test_proj_params_tensor.item() - assert total_test_proj_tp_params + test_proj_non_tp_params == base_proj_params - else: - # check ranks that aren't inside encoder have no vit - assert model.vision_model is None - assert model.vision_projection is None - - Utils.destroy_model_parallel() - torch.cuda.empty_cache() +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. +from contextlib import nullcontext +from copy import deepcopy +from types import SimpleNamespace + +import pytest +import torch + +from megatron.core import InferenceParams +from megatron.core import parallel_state as ps +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec +from megatron.core.models.multimodal import context_parallel +from megatron.core.models.multimodal.llava_model import LLaVAModel +from megatron.core.models.vision.vit_layer_specs import get_vit_layer_with_transformer_engine_spec +from megatron.core.packed_seq_params import PackedSeqParams +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.utils import is_te_min_version +from megatron.training.global_vars import set_args +from tests.unit_tests.test_utilities import Utils + + +class TestLLaVAModel: + @pytest.mark.internal # The model is under active development and its methods may change. + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + + self.language_hidden_size = 64 + self.language_num_attention_heads = 4 + + language_config = TransformerConfig( + num_layers=3, + hidden_size=self.language_hidden_size, + num_attention_heads=self.language_num_attention_heads, + use_cpu_initialization=False, + ) + vision_config = TransformerConfig( + num_layers=2, hidden_size=16, num_attention_heads=2, use_cpu_initialization=False + ) + vision_projection_config = TransformerConfig( + num_layers=2, + hidden_size=self.language_hidden_size, + ffn_hidden_size=32, + num_attention_heads=1, + use_cpu_initialization=False, + ) + + language_layer_spec = get_gpt_layer_with_transformer_engine_spec() + vision_layer_spec = deepcopy(language_layer_spec) + vision_projection_spec = deepcopy(language_layer_spec.submodules.mlp.submodules) + + language_config.language_model_type = "dummy" + vision_config.vision_model_type = "clip" + self.model = LLaVAModel( + language_transformer_config=language_config, + language_transformer_layer_spec=language_layer_spec, + language_vocab_size=8192, + language_max_sequence_length=4096, + vision_transformer_config=vision_config, + vision_transformer_layer_spec=vision_layer_spec, + drop_vision_class_token=False, + vision_projection_config=vision_projection_config, + vision_projection_layer_spec=vision_projection_spec, + img_h=336, + img_w=336, + patch_dim=14, + ) + + @pytest.mark.internal + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.internal + def test_constructor(self): + assert isinstance(self.model, LLaVAModel) + + num_weights = sum([p.numel() for p in self.model.parameters()]) + assert num_weights == 1488736 + + @pytest.mark.internal + def test_set_input_tensor(self): + expected_shape = (1, 2, 3, 4) + input_tensor = torch.zeros(expected_shape) + self.model.set_input_tensor(input_tensor) + assert self.model.vision_model.decoder.input_tensor.shape == expected_shape + + @pytest.mark.internal + def test_preprocess_data(self): + self.model.cuda() + + hidden_size = 72 + + # 3 images with 1 tile and 2 image with 2 tiles = 7 tiles. + image_embeddings = ( + torch.arange(577 * 7 * hidden_size, dtype=torch.float) + .reshape(577, 7, hidden_size) + .cuda() + ) + + image_token_index = self.model.image_token_index + input_ids = torch.arange(1024).expand(5, 1024).cuda() + input_ids[0, 0] = image_token_index # image before text + input_ids[1, 100] = image_token_index # image in between + input_ids[2, -1] = image_token_index # image at the end + # input_ids[3] - no image + input_ids[4, 50] = image_token_index # two images in between + input_ids[4, 150] = image_token_index + + # Using negative sign to distinguish from image embeddings. + language_embeddings = ( + -torch.arange(5 * 1024 * hidden_size, dtype=torch.float) + .reshape(5, 1024, hidden_size) + .cuda() + ) + + # Labels are input_ids shifted to left by one. + labels = torch.arange(1, 1025, dtype=torch.int).expand(5, 1024).cuda() + # labels[0] - image token got dropped by shift to left by one. + labels[1, 99] = image_token_index + labels[2, -2] = image_token_index + # labels[3] - no image. + labels[4, 49] = image_token_index + labels[4, 149] = image_token_index + + loss_mask = torch.ones((5, 1024), dtype=torch.float).cuda() + # Mask some text inputs (the text mask should carry over) + loss_mask[:2, :10] = 0.0 + loss_mask[:2, 110:120] = 0.0 + + # Number of tiles for each image in the batch. + num_image_tiles = torch.tensor([1, 2, 1, 2, 1], dtype=torch.int).cuda() + + use_inference_kv_cache = False + inference_params = None + + embeddings, labels, loss_mask = self.model._preprocess_data( + image_embeddings, + language_embeddings, + input_ids, + loss_mask, + labels, + use_inference_kv_cache, + inference_params, + image_token_index, + num_image_tiles, + ) + + img_seq_len = 577 + # The fifth sample has 2 images with 3 tiles and 1024 text tokens. + max_seq_len = 3 * img_seq_len - 2 + 1024 + + assert embeddings.shape == torch.Size((max_seq_len, 5, hidden_size)) + assert labels.shape == torch.Size((5, max_seq_len)) + assert loss_mask.shape == labels.shape + + # First sample where image is before text (index 0). + expected_embeddings = torch.empty(max_seq_len, hidden_size).cuda() + expected_embeddings[:577] = image_embeddings[:, 0] + expected_embeddings[577:1600] = language_embeddings[0, 1:] + expected_embeddings[1600:] = 0 # padding + + expected_labels = torch.empty(max_seq_len, dtype=torch.int).cuda() + expected_labels[:576] = -100 # image + expected_labels[576:1600] = torch.arange(1, 1025, dtype=torch.int) + expected_labels[1600:] = -100 # padding + + expected_loss_mask = torch.empty(max_seq_len, dtype=torch.float).cuda() + expected_loss_mask[:577] = 0 + expected_loss_mask[577:586] = 0 + expected_loss_mask[586:686] = 1 + expected_loss_mask[686:696] = 0 + expected_loss_mask[696:1600] = 1 + expected_loss_mask[1600:] = 0 + + assert torch.allclose(embeddings[:, 0], expected_embeddings) + assert torch.allclose(labels[0], expected_labels) + assert torch.allclose(loss_mask[0], expected_loss_mask) + + # Second sample where image is in between (index 100). The image has 2 tiles. + expected_embeddings = torch.empty(max_seq_len, hidden_size).cuda() + expected_embeddings[:100] = language_embeddings[1, :100] + expected_embeddings[100:677] = image_embeddings[:, 1] + expected_embeddings[677:1254] = image_embeddings[:, 2] + expected_embeddings[1254:2177] = language_embeddings[1, 101:] + expected_embeddings[2177:] = 0 # padding + + expected_labels = torch.empty(max_seq_len, dtype=torch.int).cuda() + expected_labels[:99] = torch.arange(1, 100) + expected_labels[99:1253] = -100 # image + expected_labels[1253:2177] = torch.arange(101, 1025) + expected_labels[2177:] = -100 # padding + + expected_loss_mask = torch.empty(max_seq_len, dtype=torch.float).cuda() + expected_loss_mask[:10] = 0 + expected_loss_mask[10:99] = 1 + # Last text position before the image is not required to predict the first image embedding. + expected_loss_mask[99] = 0 + expected_loss_mask[100:1254] = 0 + expected_loss_mask[1254:1263] = 1 + expected_loss_mask[1263:1273] = 0 + expected_loss_mask[1273:2177] = 1 + expected_loss_mask[2177:] = 0 # padding + + assert torch.allclose(embeddings[:, 1], expected_embeddings) + assert torch.allclose(labels[1], expected_labels) + assert torch.allclose(loss_mask[1], expected_loss_mask) + + # Third sample where image is at the end. + expected_embeddings = torch.empty(max_seq_len, hidden_size).cuda() + expected_embeddings[:1023] = language_embeddings[2, :1023] + expected_embeddings[1023:1600] = image_embeddings[:, 3] + expected_embeddings[1600:] = 0 # padding + + expected_labels = torch.empty(max_seq_len, dtype=torch.int).cuda() + expected_labels[:1022] = torch.arange(1, 1023) + expected_labels[1022:1599] = -100 + expected_labels[1599] = 1024 + expected_labels[1600:] = -100 # padding + + expected_loss_mask = torch.empty(max_seq_len, dtype=torch.float).cuda() + expected_loss_mask[:1022] = 1 + # Last text position before the image is not required to predict the first image embedding. + expected_loss_mask[1022] = 0 + expected_loss_mask[1023:1600] = 0 + expected_loss_mask[1600:] = 0 # padding + + assert torch.allclose(embeddings[:, 2], expected_embeddings) + assert torch.allclose(labels[2], expected_labels) + assert torch.allclose(loss_mask[2], expected_loss_mask) + + # Fourth sample where there is no image. + expected_embeddings = torch.empty(max_seq_len, hidden_size).cuda() + expected_embeddings[:1024] = language_embeddings[3] + expected_embeddings[1024:] = 0 # padding + + expected_labels = torch.empty(max_seq_len, dtype=torch.int).cuda() + expected_labels[:1024] = torch.arange(1, 1025) + expected_labels[1024:] = -100 # padding + + expected_loss_mask = torch.empty(max_seq_len, dtype=torch.float).cuda() + expected_loss_mask[:1024] = 1 + expected_loss_mask[1024:] = 0 # padding + + assert torch.allclose(embeddings[:, 3], expected_embeddings) + assert torch.allclose(labels[3], expected_labels) + assert torch.allclose(loss_mask[3], expected_loss_mask) + + # Fifth sample has two images in between (indices 50 and 150). The first image has two tiles. + expected_embeddings = torch.empty(max_seq_len, hidden_size).cuda() + expected_embeddings[:50] = language_embeddings[4, :50] + expected_embeddings[50:627] = image_embeddings[:, 4] # two tiles + expected_embeddings[627:1204] = image_embeddings[:, 5] + expected_embeddings[1204:1303] = language_embeddings[4, 51:150] + expected_embeddings[1303:1880] = image_embeddings[:, 6] + expected_embeddings[1880:] = language_embeddings[4, 151:] + + expected_labels = torch.empty(max_seq_len, dtype=torch.int).cuda() + expected_labels[:49] = torch.arange(1, 50) + expected_labels[49:1203] = -100 # image + expected_labels[1203:1302] = torch.arange(51, 150) + expected_labels[1302:1879] = -100 # image + expected_labels[1879:] = torch.arange(151, 1025) + + expected_loss_mask = torch.empty(max_seq_len, dtype=torch.float).cuda() + expected_loss_mask[:49] = 1 + expected_loss_mask[49:1204] = 0 + expected_loss_mask[1204:1302] = 1 + expected_loss_mask[1302:1880] = 0 + expected_loss_mask[1880:] = 1 + + assert torch.allclose(embeddings[:, 4], expected_embeddings) + assert torch.allclose(labels[4], expected_labels) + assert torch.allclose(loss_mask[4], expected_loss_mask) + + @pytest.mark.internal + def test_forward(self): + self.model.cuda() + + # 3 images with 1 tile and 2 images with 2 tiles. + img = torch.randn((7, 3, 336, 336)).cuda() + + image_token_index = self.model.image_token_index + input_ids = torch.randint(0, 2048, (5, 1024)).cuda() + input_ids[0, 0] = image_token_index # image before text + input_ids[1, 100] = image_token_index # image in between + input_ids[2, -1] = image_token_index # image at the end + # input_ids[3] - no image + input_ids[4, 50] = image_token_index + input_ids[4, 150] = image_token_index + + position_ids = torch.arange(0, 1024, dtype=torch.int).expand(5, 1024).cuda() + + loss_mask = torch.ones((5, 1024)).cuda() + + attention_mask = None # Causal. + + labels = torch.randint(0, 2048, (5, 1024)).cuda() + labels[1, 99] = image_token_index + labels[2, -2] = image_token_index + + num_image_tiles = torch.tensor([1, 2, 1, 2, 1], dtype=torch.int).cuda() + + # Try with labels. + loss, new_loss_mask = self.model.forward( + img, + input_ids, + position_ids, + attention_mask, + labels, + loss_mask, + num_image_tiles=num_image_tiles, + ) + + # The maximum sequence length is given by the sample with 2 images in 3 tiles, minus two image token indices, plus other text tokens. + img_seq_len = 577 + max_seq_len = img_seq_len * 3 - 2 + 1024 + assert loss.shape == new_loss_mask.shape == torch.Size((5, max_seq_len)) + + # Try with labels and PackedSeqParams. Only micro batch size 1 is supported in this mode. + packed_seq_params = PackedSeqParams( + qkv_format="thd", + cu_seqlens_q=torch.tensor( + [0, 512, 1024, 1600], dtype=torch.int32 + ).cuda(), # Just example values. + cu_seqlens_kv=torch.tensor([0, 512, 1024, 1600], dtype=torch.int32).cuda(), + max_seqlen_q=torch.tensor(1600, dtype=torch.int32).cuda(), + max_seqlen_kv=torch.tensor(1600, dtype=torch.int32).cuda(), + ) + + # NOTE: Packing is only supported with BF16. Use BF16 here and switch back to default. + self.model.to(torch.bfloat16) + loss, new_loss_mask = self.model.forward( + img[:1].to(torch.bfloat16), + input_ids[:1], + position_ids[:1], + attention_mask, + labels[:1], + loss_mask[:1], + num_image_tiles=num_image_tiles[:1], + packed_seq_params=packed_seq_params, + ) + self.model.to(torch.float32) + + # 1600 = 577 (img_seq_len) + 1024 (text tokens in the first sample) - 1 (image token). + assert loss.shape == new_loss_mask.shape == torch.Size((1, 1600)) + + # Try text-only input. + loss, new_loss_mask = self.model.forward( + torch.tensor([], dtype=torch.float).cuda(), + torch.randint(0, 2048, (5, 1024)).cuda(), + position_ids, + attention_mask, + torch.randint(0, 2048, (5, 1024)).cuda(), + loss_mask, + num_image_tiles=torch.tensor([], dtype=torch.int).cuda(), + ) + + assert loss.shape == new_loss_mask.shape == torch.Size((5, 1024)) + + # Try without labels and without inference params. + logits, _ = self.model.forward( + img, + input_ids, + position_ids, + attention_mask, + labels=None, + loss_mask=None, + num_image_tiles=num_image_tiles, + ) + assert logits.shape == torch.Size((5, max_seq_len, 8192)) + + # Try without labels and with inference params. + inference_params = InferenceParams(5, max_seq_len) + logits, _ = self.model.forward( + img, + input_ids, + position_ids, + attention_mask, + labels=None, + loss_mask=None, + num_image_tiles=num_image_tiles, + inference_params=inference_params, + ) + assert logits.shape == torch.Size((5, max_seq_len, 8192)) + + # Check KV cache got populated correctly. + kv_dict = inference_params.key_value_memory_dict + + assert kv_dict["image_tokens_count"] == 577 * 7 + for layer_no in range(1, 4): # 3 layers in the model. + layer_kv = kv_dict[layer_no] + # Expected shape is [sequence_len, batch_size, num_heads, hidden_size_per_head] + assert ( + layer_kv[0].shape + == layer_kv[1].shape + == torch.Size((max_seq_len, 5, self.language_num_attention_heads, 16)) + ) + + @pytest.mark.internal + def test_forward_fsdp(self): + """Test FSDP workaround for text-only data. + + FSDP can hang with text-only data. As a workaround, we run the vision model with a dummy image, + but then effectively discard the image embeddings. + """ + self.model.cuda() + + # Dummy image for the FSDP workaround but not image tiles. + img = torch.zeros((1, 3, 336, 336)).cuda() + num_image_tiles = torch.tensor([], dtype=torch.int).cuda() + + # No image tag in the input ids (text-only sample). + image_token_index = self.model.image_token_index + input_ids = torch.arange(1024, device="cuda").unsqueeze(0) + assert ( + torch.sum(input_ids == image_token_index) == 0 + ), "expected no image tag in the input ids" + + position_ids = torch.arange(1024, device="cuda").unsqueeze(0) + + loss_mask = torch.ones((1, 1024), device="cuda") + + attention_mask = None # Causal. + + labels = torch.arange(1, 1025, device="cuda").unsqueeze(0) + + # Mock the FSDP attribute. + self.model.vision_model._is_fsdp_managed_module = True + loss, new_loss_mask = self.model.forward( + img, + input_ids, + position_ids, + attention_mask, + labels, + loss_mask, + num_image_tiles=num_image_tiles, + ) + self.model.vision_model._is_fsdp_managed_module = False + + assert loss.shape == new_loss_mask.shape == torch.Size((1, 1024)) + + @pytest.mark.internal + def test_save_load(self, tmp_path): + path = tmp_path / "model.pt" + torch.save(self.model.state_dict(), path) + + self.model.load_state_dict(torch.load(path)) + + @pytest.mark.internal + def test_freeze(self): + self.model.freeze( + freeze_language_model=True, freeze_vision_model=True, freeze_vision_projection=False + ) + + for module in [self.model.language_model, self.model.vision_model]: + for param in module.parameters(): + assert not param.requires_grad + + for param in self.model.vision_projection.parameters(): + assert param.requires_grad + + +class TestLLaVAModelSigLIP: + @pytest.mark.internal # The model is under active development and its methods may change. + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + + language_config = TransformerConfig( + num_layers=3, hidden_size=128, num_attention_heads=8, use_cpu_initialization=False + ) + vision_config = TransformerConfig( + num_layers=2, hidden_size=64, num_attention_heads=4, use_cpu_initialization=False + ) + vision_projection_config = TransformerConfig( + num_layers=2, + hidden_size=128, + ffn_hidden_size=72, + num_attention_heads=1, + use_cpu_initialization=False, + ) + + language_layer_spec = get_gpt_layer_with_transformer_engine_spec() + vision_layer_spec = deepcopy(language_layer_spec) + vision_projection_spec = deepcopy(language_layer_spec.submodules.mlp.submodules) + + language_config.language_model_type = "dummy" + vision_config.vision_model_type = "siglip" + self.model = LLaVAModel( + language_transformer_config=language_config, + language_transformer_layer_spec=language_layer_spec, + language_vocab_size=2048, + language_max_sequence_length=4096, + vision_transformer_config=vision_config, + vision_transformer_layer_spec=vision_layer_spec, + drop_vision_class_token=False, + vision_projection_config=vision_projection_config, + vision_projection_layer_spec=vision_projection_spec, + img_h=336, + img_w=336, + patch_dim=14, + ) + + @pytest.mark.internal + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.internal + def test_constructor(self): + assert isinstance(self.model, LLaVAModel) + + num_weights = sum([p.numel() for p in self.model.parameters()]) + assert num_weights == 1832456 + + @pytest.mark.internal + def test_set_input_tensor(self): + expected_shape = (1, 2, 3, 4) + input_tensor = torch.zeros(expected_shape) + self.model.set_input_tensor(input_tensor) + assert self.model.vision_model.decoder.input_tensor.shape == expected_shape + + +def create_test_args(cp_size, sequence_parallel): + # Set dummy values for the args. + args = SimpleNamespace() + args.context_parallel_size = cp_size + args.sequence_parallel = sequence_parallel + + return args + + +class TestLLaVAModelTokenParallel: + + def _init_llava_model(self, cp_size, tp_size, sequence_parallel): + language_hidden_size = 64 + language_num_attention_heads = 16 + + language_config = TransformerConfig( + num_layers=3, + hidden_size=language_hidden_size, + num_attention_heads=language_num_attention_heads, + use_cpu_initialization=False, + tensor_model_parallel_size=tp_size, + sequence_parallel=sequence_parallel, + context_parallel_size=cp_size, + ) + # SP and CP are not yet supported for the Vision Backbone + vision_config = TransformerConfig( + num_layers=2, + hidden_size=16, + num_attention_heads=8, + use_cpu_initialization=False, + tensor_model_parallel_size=tp_size, + sequence_parallel=False, + context_parallel_size=1, + ) + vision_projection_config = TransformerConfig( + num_layers=2, + hidden_size=language_hidden_size, + ffn_hidden_size=128, + num_attention_heads=8, + use_cpu_initialization=False, + tensor_model_parallel_size=tp_size, + sequence_parallel=False, + context_parallel_size=1, + ) + + language_layer_spec = get_gpt_layer_with_transformer_engine_spec() + # SP/CP either requires user to ensure token lengths do not require padding OR change mask type to padding + if ( + language_layer_spec.submodules.self_attention.params.get('attn_mask_type', '') + == AttnMaskType.causal + ): + language_layer_spec.submodules.self_attention.params['attn_mask_type'] = ( + AttnMaskType.padding_causal + ) + elif ( + language_layer_spec.submodules.self_attention.params.get('attn_mask_type', '') + == AttnMaskType.no_mask + ): + language_layer_spec.submodules.self_attention.params['attn_mask_type'] = ( + AttnMaskType.padding + ) + + vision_layer_spec = deepcopy(language_layer_spec) + vision_projection_spec = deepcopy(language_layer_spec.submodules.mlp.submodules) + + language_config.language_model_type = "dummy" + vision_config.vision_model_type = "clip" + model = LLaVAModel( + language_transformer_config=language_config, + language_transformer_layer_spec=language_layer_spec, + language_vocab_size=8192, + language_max_sequence_length=4096, + vision_transformer_config=vision_config, + vision_transformer_layer_spec=vision_layer_spec, + drop_vision_class_token=False, + vision_projection_config=vision_projection_config, + vision_projection_layer_spec=vision_projection_spec, + img_h=336, + img_w=336, + patch_dim=14, + ) + + return model + + @pytest.mark.internal + def setup_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.internal + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.internal + @pytest.mark.parametrize( + "cp_size,tp_size,sequence_parallel,padding", + [(1, 8, True, True), (2, 4, False, True), (2, 4, True, False), (2, 4, True, True)], + ) + def test_process_embedding_token_parallel(self, cp_size, tp_size, sequence_parallel, padding): + """Test _process_embedding_token_parallel. + + Note: This test requires TE version >= 1.10.0 to run properly. + """ + Utils.initialize_model_parallel( + tensor_model_parallel_size=tp_size, context_parallel_size=cp_size + ) + model_parallel_cuda_manual_seed(123) + + # TE version must be at least 1.10.0 if using context parallelism. Exit otherwise. + ctx = ( + nullcontext() + if (is_te_min_version("1.10.0") or cp_size <= 1) + else pytest.raises(AssertionError) + ) + model = None + with ctx: + model = self._init_llava_model(cp_size, tp_size, sequence_parallel) + + if model is None: + return + + model.cuda() + + args = create_test_args(cp_size, sequence_parallel) + set_args(args) + + batch_size = 2 + if padding: + combined_valid_seqlen = 2049 + combined_padded_seqlen = 2064 + else: + combined_valid_seqlen = 2048 + combined_padded_seqlen = 2048 + + if cp_size > 1: + combined_embeddings = torch.ones( + [batch_size, combined_padded_seqlen, 4096], device='cuda', dtype=torch.bfloat16 + ) # [B, S, H] + else: + combined_embeddings = torch.ones( + [combined_padded_seqlen, batch_size, 4096], device='cuda', dtype=torch.bfloat16 + ) # [S, B, H] + new_labels = torch.ones( + [batch_size, combined_padded_seqlen], device='cuda', dtype=torch.bfloat16 + ) # [B, S] + new_loss_mask = torch.ones( + [batch_size, combined_padded_seqlen], device='cuda', dtype=torch.bfloat16 + ) # [B, S] + + cu_seqlens = torch.arange( + 0, + (batch_size + 1) * (combined_valid_seqlen), + step=(combined_valid_seqlen), + dtype=torch.int32, + device=combined_embeddings.device, + ) + cu_seqlens_padded = torch.arange( + 0, + (batch_size + 1) * (combined_padded_seqlen), + step=(combined_padded_seqlen), + dtype=torch.int32, + device=combined_embeddings.device, + ) + + qkv_format = 'sbhd' # Default format when not using padding + if cp_size > 1 and padding: + # Reshape from [B,S] to [1,T] + combined_embeddings = ( + combined_embeddings.contiguous() + .view(combined_embeddings.shape[0] * combined_embeddings.shape[1], -1) + .unsqueeze(0) + ) + new_labels = new_labels.view(new_labels.shape[0] * new_labels.shape[1]).unsqueeze(0) + new_loss_mask = new_loss_mask.view( + new_loss_mask.shape[0] * new_loss_mask.shape[1] + ).unsqueeze(0) + qkv_format = 'thd' + + packed_seq_params = PackedSeqParams( + cu_seqlens_q=cu_seqlens, + cu_seqlens_kv=cu_seqlens, + cu_seqlens_q_padded=cu_seqlens_padded, + cu_seqlens_kv_padded=cu_seqlens_padded, + max_seqlen_q=combined_padded_seqlen, + max_seqlen_kv=combined_padded_seqlen, + qkv_format=qkv_format, + ) + + combined_embeddings, new_labels, new_loss_mask, packed_seq_params = ( + model._process_embedding_token_parallel( + combined_embeddings, new_labels, new_loss_mask, packed_seq_params + ) + ) + + # Check if output shape is as expected + if cp_size > 1 and sequence_parallel: + if padding: + # THD format + assert combined_embeddings.shape[0] == batch_size * ( + combined_padded_seqlen / (tp_size * cp_size) + ) + assert combined_embeddings.shape[1] == 1 + else: + # SBHD format + assert combined_embeddings.shape[0] == ( + combined_padded_seqlen / (tp_size * cp_size) + ) + assert combined_embeddings.shape[1] == batch_size + elif cp_size > 1: + if padding: + # THD format + assert combined_embeddings.shape[0] == batch_size * ( + combined_padded_seqlen / cp_size + ) + assert combined_embeddings.shape[1] == 1 + else: + # SBHD format + assert combined_embeddings.shape[0] == (combined_padded_seqlen / cp_size) + assert combined_embeddings.shape[1] == batch_size + else: + # SBHD format + assert combined_embeddings.shape[0] == combined_padded_seqlen / tp_size + assert combined_embeddings.shape[1] == batch_size + + +def count_parameters(model): + return sum(p.numel() for p in model.parameters()) + + +@pytest.mark.internal # The model is under active development and its methods may change. +@pytest.mark.parametrize( + 'dtp, dpp, etp, epp', [(1, 1, 1, 0), (1, 1, 1, 1), (2, 1, 2, 0), (2, 3, 2, 1), (2, 4, 2, 0)] +) +def test_llava_model_parallelism(dtp, dpp, etp, epp): + """ + The purpose of this test is to check that vit, vision projection and lm layer + counts across tensor and pipeline parallel ranks match the counts in the + non-model-parallel case, i.e. tp==1, pp==1, etp==1, epp==0 + """ + + language_hidden_size = 64 + language_num_attention_heads = 4 + + # First initialize a single GPU model to get baseline parameter and layer counts + Utils.initialize_model_parallel( + tensor_model_parallel_size=1, + pipeline_model_parallel_size=1, + encoder_tensor_model_parallel_size=1, + encoder_pipeline_model_parallel_size=0, + ) + model_parallel_cuda_manual_seed(123) + + language_config = TransformerConfig( + num_layers=12, + hidden_size=language_hidden_size, + num_attention_heads=language_num_attention_heads, + use_cpu_initialization=False, + ) + language_config.tensor_model_parallel_size = dtp + language_config.pipeline_model_parallel_size = dpp + + vision_config = TransformerConfig( + num_layers=4, hidden_size=16, num_attention_heads=2, use_cpu_initialization=False + ) + vision_config.tensor_model_parallel_size = etp + vision_config.pipeline_model_parallel_size = 1 + + vision_projection_config = TransformerConfig( + num_layers=2, + hidden_size=language_hidden_size, + ffn_hidden_size=32, + num_attention_heads=1, + use_cpu_initialization=False, + ) + vision_projection_config.tensor_model_parallel_size = etp + vision_projection_config.pipeline_model_parallel_size = 1 + + language_layer_spec = get_gpt_layer_with_transformer_engine_spec() + vision_layer_spec = get_vit_layer_with_transformer_engine_spec() + vision_projection_spec = deepcopy(language_layer_spec.submodules.mlp.submodules) + + language_config.language_model_type = "dummy" + vision_config.vision_model_type = "clip" + non_parallel_model = LLaVAModel( + language_transformer_config=language_config, + language_transformer_layer_spec=language_layer_spec, + language_vocab_size=8192, + language_max_sequence_length=4096, + vision_transformer_config=vision_config, + vision_transformer_layer_spec=vision_layer_spec, + drop_vision_class_token=False, + vision_projection_config=vision_projection_config, + vision_projection_layer_spec=vision_projection_spec, + img_h=336, + img_w=336, + patch_dim=14, + ) + + base_vit_params = sum(p.numel() for p in non_parallel_model.vision_model.parameters()) + base_proj_params = sum(p.numel() for p in non_parallel_model.vision_projection.parameters()) + + base_vit_layers = len(non_parallel_model.vision_model.decoder.layers) + + Utils.destroy_model_parallel() + + # Next initialize a model parallel version to get test parameter and layer counts + Utils.initialize_model_parallel( + tensor_model_parallel_size=dtp, + pipeline_model_parallel_size=dpp, + encoder_tensor_model_parallel_size=etp, + encoder_pipeline_model_parallel_size=epp, + ) + model_parallel_cuda_manual_seed(123) + + pp_rank = ps.get_pipeline_model_parallel_rank() + pp_world_size = ps.get_pipeline_model_parallel_world_size() + tp_world_size = ps.get_tensor_model_parallel_world_size() + + pre_process = True if (pp_rank == 0 or (pp_rank == 1 and epp == 1)) else False + post_process = ( + True if ((pp_rank == 0 and epp == 1) or (pp_rank == pp_world_size - 1)) else False + ) + add_encoder = True if pp_rank == 0 else False + add_decoder = False if (pp_rank == 0 and epp == 1) else True + + language_config = TransformerConfig( + num_layers=12, + hidden_size=language_hidden_size, + num_attention_heads=language_num_attention_heads, + use_cpu_initialization=False, + ) + language_config.tensor_model_parallel_size = dtp + language_config.pipeline_model_parallel_size = dpp + + vision_config = TransformerConfig( + num_layers=4, hidden_size=16, num_attention_heads=2, use_cpu_initialization=False + ) + vision_config.tensor_model_parallel_size = etp + vision_config.pipeline_model_parallel_size = 1 + + vision_projection_config = TransformerConfig( + num_layers=2, + hidden_size=language_hidden_size, + ffn_hidden_size=32, + num_attention_heads=1, + use_cpu_initialization=False, + ) + vision_projection_config.tensor_model_parallel_size = etp + vision_projection_config.pipeline_model_parallel_size = 1 + + language_layer_spec = get_gpt_layer_with_transformer_engine_spec() + vision_layer_spec = get_vit_layer_with_transformer_engine_spec() + vision_projection_spec = deepcopy(vision_layer_spec.submodules.mlp.submodules) + + language_config.language_model_type = "dummy" + vision_config.vision_model_type = "clip" + model = LLaVAModel( + language_transformer_config=language_config, + language_transformer_layer_spec=language_layer_spec, + language_vocab_size=8192, + language_max_sequence_length=4096, + vision_transformer_config=vision_config, + vision_transformer_layer_spec=vision_layer_spec, + drop_vision_class_token=False, + vision_projection_config=vision_projection_config, + vision_projection_layer_spec=vision_projection_spec, + img_h=336, + img_w=336, + patch_dim=14, + pre_process=pre_process, + post_process=post_process, + add_encoder=add_encoder, + add_decoder=add_decoder, + ) + + if epp == 1: + if pp_rank == 0: + # should be in a etp sized tp group + assert tp_world_size == etp + # there should only be a single pipeline rank + assert pp_world_size == epp + dpp + # should not be inside decoder + assert not ps.is_inside_decoder() + # should be inside encoder + assert ps.is_inside_encoder() + elif pp_rank != 0: + # non-encoder ranks should be in a dtp sized tp group + assert tp_world_size == dtp + # check we're inside the decoder + assert ps.is_inside_decoder() + # check we're not inside the encoder + assert not ps.is_inside_encoder() + elif epp == 0: + if pp_rank == 0: + # check we're inside the encoder and decoder + assert ps.is_inside_encoder() + assert ps.is_inside_decoder() + elif pp_rank != 0: + # check we're inside the decoder only and there's no vision_model + assert not ps.is_inside_encoder() + assert ps.is_inside_decoder() + assert model.vision_model is None + assert model.vision_projection is None + + if ps.is_inside_encoder(): + # Check num vit layers - epp > 1 not supported + test_vit_layers = len([p for p in model.vision_model.decoder.layers]) + assert test_vit_layers == base_vit_layers + + # Check all vit params are present + test_vit_tp_params = sum( + [ + p.numel() + for p in model.vision_model.parameters() + if hasattr(p, 'tensor_model_parallel') + ] + ) + test_vit_non_tp_params = sum( + [ + p.numel() + for p in model.vision_model.parameters() + if not hasattr(p, 'tensor_model_parallel') + ] + ) + group = ps.get_tensor_model_parallel_group() + test_vit_params_tensor = torch.tensor([test_vit_tp_params], dtype=torch.int32).cuda() + torch.distributed.all_reduce( + test_vit_params_tensor, op=torch.distributed.ReduceOp.SUM, group=group + ) + total_test_vit_tp_params = test_vit_params_tensor.item() + assert total_test_vit_tp_params + test_vit_non_tp_params == base_vit_params + + # Check all vision projection params are present + test_proj_tp_params = sum( + [ + p.numel() + for p in model.vision_projection.parameters() + if hasattr(p, 'tensor_model_parallel') + ] + ) + test_proj_non_tp_params = sum( + [ + p.numel() + for p in model.vision_projection.parameters() + if not hasattr(p, 'tensor_model_parallel') + ] + ) + test_proj_params_tensor = torch.tensor([test_proj_tp_params], dtype=torch.int32).cuda() + torch.distributed.all_reduce( + test_proj_params_tensor, op=torch.distributed.ReduceOp.SUM, group=group + ) + total_test_proj_tp_params = test_proj_params_tensor.item() + assert total_test_proj_tp_params + test_proj_non_tp_params == base_proj_params + else: + # check ranks that aren't inside encoder have no vit + assert model.vision_model is None + assert model.vision_projection is None + + Utils.destroy_model_parallel() + torch.cuda.empty_cache() + + +@pytest.mark.internal +@pytest.mark.parametrize( + "cp_size, tp_size, has_sp, seq_len, expected_padding", + [(1, 1, False, 99, 0), (2, 2, True, 99, 5), (2, 2, False, 99, 1)], +) +def test_get_padding(cp_size, tp_size, has_sp, seq_len, expected_padding): + """Test calculating padding for context parallel.""" + padding = context_parallel.get_padding(seq_len, cp_size, tp_size, has_sp) + + assert padding == expected_padding + + +@pytest.mark.internal +@pytest.mark.parametrize( + "tokens, img_seq_len, padding_needed, cp_size, expected_seq_len", + [(torch.ones((1, 100)), 100, 0, 2, 200), (torch.ones((1, 100)), 128, 1, 2, 227)], +) +def test_get_packed_seq_params(tokens, img_seq_len, padding_needed, cp_size, expected_seq_len): + """Test creating PackedSeqParams for context parallel.""" + packed_seq_params = context_parallel.get_packed_seq_params( + tokens, img_seq_len, padding_needed, cp_size + ) + + assert torch.equal( + packed_seq_params.cu_seqlens_q, torch.tensor([0, expected_seq_len], dtype=torch.int32) + ) + + if padding_needed > 0: + padded_seq_len = tokens.shape[1] + img_seq_len + assert torch.equal( + packed_seq_params.cu_seqlens_q_padded, + torch.tensor([0, padded_seq_len], dtype=torch.int32), + ) + assert packed_seq_params.max_seqlen_q == padded_seq_len diff --git a/tests/unit_tests/models/test_multimodal_projector.py b/tests/unit_tests/models/test_multimodal_projector.py index 976dc48..4ed9f21 100644 --- a/tests/unit_tests/models/test_multimodal_projector.py +++ b/tests/unit_tests/models/test_multimodal_projector.py @@ -1,75 +1,75 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -import pytest -import torch - -from megatron.core.models.gpt.gpt_layer_specs import _get_mlp_module_spec -from megatron.core.models.vision.multimodal_projector import MultimodalProjector -from megatron.core.tensor_parallel.layers import ColumnParallelLinear -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.mlp import MLPSubmodules -from megatron.core.transformer.transformer_config import TransformerConfig -from tests.unit_tests.test_utilities import Utils - - -class TestMultimodalProjector: - - def setup_method(self, method): - Utils.initialize_model_parallel(1, 1) - model_parallel_cuda_manual_seed(123) - transformer_config = TransformerConfig( - num_layers=1, hidden_size=64, num_attention_heads=4, use_cpu_initialization=True - ) - mlp_layer_spec = _get_mlp_module_spec().submodules - - affine_layer_spec = MLPSubmodules(linear_fc1=ColumnParallelLinear, linear_fc2=None) - self.mlp = MultimodalProjector( - config=transformer_config, - submodules=mlp_layer_spec, - projector_type="mlp", - input_size=1024, - ) - self.affine = MultimodalProjector( - config=transformer_config, - submodules=affine_layer_spec, - projector_type="affine", - input_size=1024, - ) - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_constructor(self): - assert isinstance(self.mlp, MultimodalProjector) - assert isinstance(self.affine, MultimodalProjector) - - num_weights = sum([p.numel() for p in self.mlp.parameters()]) - assert num_weights == 280896 - - num_weights = sum([p.numel() for p in self.affine.parameters()]) - assert num_weights == 65600 - - def test_forward(self): - self.mlp.cuda() - self.affine.cuda() - - image_projection = torch.zeros((2, 1024)).cuda() - - logits = self.mlp.forward(image_projection) - assert len(logits) == 2 - assert logits.shape == torch.Size([2, 64]) - - logits = self.affine.forward(image_projection) - assert len(logits) == 2 - assert logits.shape == torch.Size([2, 64]) - - def test_save_load(self, tmp_path): - path = tmp_path / "mlp.pt" - torch.save(self.mlp.state_dict(), path) - - self.mlp.load_state_dict(torch.load(path)) - - path = tmp_path / "affine.pt" - torch.save(self.affine.state_dict(), path) - - self.affine.load_state_dict(torch.load(path)) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import pytest +import torch + +from megatron.core.models.gpt.gpt_layer_specs import get_mlp_module_spec +from megatron.core.models.vision.multimodal_projector import MultimodalProjector +from megatron.core.tensor_parallel.layers import ColumnParallelLinear +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.mlp import MLPSubmodules +from megatron.core.transformer.transformer_config import TransformerConfig +from tests.unit_tests.test_utilities import Utils + + +class TestMultimodalProjector: + + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + transformer_config = TransformerConfig( + num_layers=1, hidden_size=64, num_attention_heads=4, use_cpu_initialization=True + ) + mlp_layer_spec = get_mlp_module_spec().submodules + + affine_layer_spec = MLPSubmodules(linear_fc1=ColumnParallelLinear, linear_fc2=None) + self.mlp = MultimodalProjector( + config=transformer_config, + submodules=mlp_layer_spec, + projector_type="mlp", + input_size=1024, + ) + self.affine = MultimodalProjector( + config=transformer_config, + submodules=affine_layer_spec, + projector_type="affine", + input_size=1024, + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_constructor(self): + assert isinstance(self.mlp, MultimodalProjector) + assert isinstance(self.affine, MultimodalProjector) + + num_weights = sum([p.numel() for p in self.mlp.parameters()]) + assert num_weights == 280896 + + num_weights = sum([p.numel() for p in self.affine.parameters()]) + assert num_weights == 65600 + + def test_forward(self): + self.mlp.cuda() + self.affine.cuda() + + image_projection = torch.zeros((2, 1024)).cuda() + + logits = self.mlp.forward(image_projection) + assert len(logits) == 2 + assert logits.shape == torch.Size([2, 64]) + + logits = self.affine.forward(image_projection) + assert len(logits) == 2 + assert logits.shape == torch.Size([2, 64]) + + def test_save_load(self, tmp_path): + path = tmp_path / "mlp.pt" + torch.save(self.mlp.state_dict(), path) + + self.mlp.load_state_dict(torch.load(path)) + + path = tmp_path / "affine.pt" + torch.save(self.affine.state_dict(), path) + + self.affine.load_state_dict(torch.load(path)) diff --git a/tests/unit_tests/models/test_radio_model.py b/tests/unit_tests/models/test_radio_model.py new file mode 100644 index 0000000..a5e81b3 --- /dev/null +++ b/tests/unit_tests/models/test_radio_model.py @@ -0,0 +1,61 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. +import pytest +import torch + +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec +from megatron.core.models.vision.radio import RADIOViTModel +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.transformer_config import TransformerConfig +from tests.unit_tests.test_utilities import Utils + + +class TestRADIOViTModel: + """Test RADIO ViT model.""" + + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + transformer_config = TransformerConfig( + num_layers=2, hidden_size=64, num_attention_heads=4, use_cpu_initialization=True + ) + transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec() + self.model = RADIOViTModel( + transformer_config, + transformer_layer_spec, + img_h=224, + img_w=224, + patch_dim=14, + add_class_token=False, + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_constructor(self): + assert isinstance(self.model, RADIOViTModel) + + num_weights = sum([p.numel() for p in self.model.parameters()]) + assert num_weights == 1501824 + + def test_set_input_tensor(self): + # [s, b, h] expected to the transformer. + expected_shape = (256, 2, 64) + input_tensor = torch.zeros(expected_shape) + + self.model.set_input_tensor(input_tensor) + + assert self.model.decoder.input_tensor.shape == torch.Size(expected_shape) + + def test_forward(self): + self.model.cuda() + + img = torch.zeros((2, 3, 224, 224)).cuda() + + out = self.model.forward(img) + assert out.shape == torch.Size([2, 256, 64]) + + def test_save_load(self, tmp_path): + path = tmp_path / "model.pt" + torch.save(self.model.state_dict(), path) + + self.model.load_state_dict(torch.load(path)) diff --git a/tests/unit_tests/models/test_t5_model.py b/tests/unit_tests/models/test_t5_model.py index 6c1faf9..4e61fb5 100644 --- a/tests/unit_tests/models/test_t5_model.py +++ b/tests/unit_tests/models/test_t5_model.py @@ -1,362 +1,365 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import os -from copy import deepcopy - -import pytest -import torch -from packaging.version import Version as PkgVersion -from pytest_mock import mocker - -import megatron.core.parallel_state as ps -from megatron.core.datasets.t5_dataset import T5MaskedWordPieceDataset -from megatron.core.models.T5.t5_model import T5Model -from megatron.core.models.T5.t5_spec import ( - get_t5_decoder_with_local_block_spec, - get_t5_decoder_with_transformer_engine_block_spec, - get_t5_encoder_with_local_block_spec, - get_t5_encoder_with_transformer_engine_block_spec, -) -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.transformer_config import TransformerConfig -from tests.unit_tests.test_utilities import Utils - - -class TestT5Model: - - def setup_method(self, method): - tp = 4 - pp = 1 - Utils.initialize_model_parallel( - tensor_model_parallel_size=tp, - pipeline_model_parallel_size=pp, - encoder_pipeline_model_parallel_size=pp, - ) - model_parallel_cuda_manual_seed(123) - transformer_config = TransformerConfig( - num_layers=12, - hidden_size=768, - num_attention_heads=12, - kv_channels=64, - ffn_hidden_size=3072, - use_cpu_initialization=True, - pipeline_dtype=torch.bfloat16, - tensor_model_parallel_size=tp, - pipeline_model_parallel_size=pp, - ) - rank = ps.get_pipeline_model_parallel_rank() - world_size = ps.get_pipeline_model_parallel_world_size() - en_block_spec = get_t5_encoder_with_transformer_engine_block_spec(12) - de_block_spec = get_t5_decoder_with_transformer_engine_block_spec(12) - - first_decoder_rank = pp - pre_process = rank == 0 or rank == first_decoder_rank - post_process = (rank == (first_decoder_rank - 1)) or (rank == (world_size - 1)) - add_encoder = ps.is_inside_encoder(rank) - add_decoder = ps.is_inside_decoder(rank) - - self.t5_model = T5Model( - encoder_config=transformer_config, - config=transformer_config, - transformer_encoder_layer_spec=en_block_spec, - transformer_decoder_layer_spec=de_block_spec, - vocab_size=29184, - max_sequence_length=4, - pre_process=pre_process, - post_process=post_process, - add_encoder=add_encoder, - add_decoder=add_decoder, - ) - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_constructor(self): - assert isinstance(self.t5_model, T5Model) - assert Utils.world_size == 8 - - assert self.t5_model.max_sequence_length == 4 - if self.t5_model.add_encoder: - assert not self.t5_model.add_decoder - assert self.t5_model.encoder.num_layers_per_pipeline_rank == 12 - assert self.t5_model.pre_process - assert self.t5_model.post_process - else: - assert self.t5_model.add_decoder - assert self.t5_model.decoder.num_layers_per_pipeline_rank == 12 - assert self.t5_model.pre_process - assert self.t5_model.post_process - - def test_set_input_tensor(self): - config: TransformerConfig = self.t5_model.config - sequence_length = self.t5_model.max_sequence_length - micro_batch_size = 2 - - # [sequence length, batch size, hidden size] - input_tensor = torch.ones((sequence_length, micro_batch_size, config.hidden_size)) - - self.t5_model.set_input_tensor(input_tensor) - - if self.t5_model.add_encoder: - assert self.t5_model.encoder.input_tensor.shape[0] == sequence_length - assert self.t5_model.encoder.input_tensor.shape[1] == micro_batch_size - assert self.t5_model.encoder.input_tensor.shape[2] == config.hidden_size - else: - assert self.t5_model.encoder is None - assert self.t5_model.encoder_hidden_state.shape[0] == sequence_length - assert self.t5_model.encoder_hidden_state.shape[1] == micro_batch_size - assert self.t5_model.encoder_hidden_state.shape[2] == config.hidden_size - - def test_post_process_forward(self): - config: TransformerConfig = self.t5_model.config - sequence_length = self.t5_model.max_sequence_length - micro_batch_size = 2 - - self.t5_model.cuda() - - data = list(range(sequence_length)) - encoder_input_ids = ( - torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() - ) - decoder_input_ids = ( - torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() - ) - encoder_attn_mask = torch.ones((1, sequence_length, sequence_length), dtype=bool).cuda() - decoder_attn_mask = torch.ones((1, sequence_length, sequence_length), dtype=bool).cuda() - encoder_decoder_attn_mask = torch.ones( - (1, sequence_length, sequence_length), dtype=bool - ).cuda() - - if self.t5_model.add_decoder: - encoder_hidden_states = torch.zeros( - (sequence_length, micro_batch_size, config.hidden_size), dtype=torch.float32 - ).cuda() - else: - encoder_hidden_states = None - - output = self.t5_model.forward( - encoder_input_ids=encoder_input_ids, - decoder_input_ids=decoder_input_ids, - encoder_attn_mask=encoder_attn_mask, - decoder_attn_mask=decoder_attn_mask, - encoder_decoder_attn_mask=encoder_decoder_attn_mask, - encoder_hidden_states=encoder_hidden_states, - ) - if self.t5_model.add_decoder: - logits = output - assert logits.shape[0] == micro_batch_size - assert logits.shape[1] == sequence_length - assert ( - logits.shape[2] - == self.t5_model.vocab_size // ps.get_tensor_model_parallel_world_size() - ) - else: - encoder_hidden_states = output - assert encoder_hidden_states.shape[0] == sequence_length - assert encoder_hidden_states.shape[1] == micro_batch_size - assert encoder_hidden_states.shape[2] == config.hidden_size - - def test_forward_output_encoder_hidden_only(self): - config: TransformerConfig = self.t5_model.config - sequence_length = self.t5_model.max_sequence_length - micro_batch_size = 2 - - self.t5_model.cuda() - - data = list(range(sequence_length)) - encoder_input_ids = ( - torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() - ) - decoder_input_ids = ( - torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() - ) - encoder_attn_mask = torch.ones((1, sequence_length, sequence_length), dtype=bool).cuda() - decoder_attn_mask = torch.ones((1, sequence_length, sequence_length), dtype=bool).cuda() - encoder_decoder_attn_mask = torch.ones( - (1, sequence_length, sequence_length), dtype=bool - ).cuda() - - encoder_hidden_states = self.t5_model.forward( - encoder_input_ids=encoder_input_ids, - decoder_input_ids=decoder_input_ids, - encoder_attn_mask=encoder_attn_mask, - decoder_attn_mask=decoder_attn_mask, - encoder_decoder_attn_mask=encoder_decoder_attn_mask, - output_encoder_hidden_only=True, - ) - if self.t5_model.add_decoder: - assert encoder_hidden_states is None - else: - assert encoder_hidden_states.shape[0] == sequence_length - assert encoder_hidden_states.shape[1] == micro_batch_size - assert encoder_hidden_states.shape[2] == config.hidden_size - - def test_forward_with_encoder_hidden_states(self): - config: TransformerConfig = self.t5_model.config - sequence_length = self.t5_model.max_sequence_length - micro_batch_size = 2 - - self.t5_model.cuda() - - data = list(range(sequence_length)) - encoder_input_ids = ( - torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() - ) - decoder_input_ids = ( - torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() - ) - encoder_attn_mask = torch.ones((1, sequence_length, sequence_length), dtype=bool).cuda() - decoder_attn_mask = torch.ones((1, sequence_length, sequence_length), dtype=bool).cuda() - encoder_decoder_attn_mask = torch.ones( - (1, sequence_length, sequence_length), dtype=bool - ).cuda() - encoder_hidden_states = torch.zeros( - (sequence_length, micro_batch_size, config.hidden_size), dtype=torch.float32 - ).cuda() - - output = self.t5_model.forward( - encoder_input_ids=None, - decoder_input_ids=decoder_input_ids, - encoder_attn_mask=encoder_attn_mask, - decoder_attn_mask=decoder_attn_mask, - encoder_decoder_attn_mask=encoder_decoder_attn_mask, - encoder_hidden_states=encoder_hidden_states, - ) - if self.t5_model.add_decoder: - logits = output - assert logits.shape[0] == micro_batch_size - assert logits.shape[1] == sequence_length - assert ( - logits.shape[2] - == self.t5_model.vocab_size // ps.get_tensor_model_parallel_world_size() - ) - else: - encoder_hidden_states = output - assert encoder_hidden_states.shape[0] == sequence_length - assert encoder_hidden_states.shape[1] == micro_batch_size - assert encoder_hidden_states.shape[2] == config.hidden_size - - def test_no_post_process_forward(self): - pass - - def test_no_preprocess_forward(self): - pass - - def test_state_dict_for_save_checkpoint(self): - pass - - def test_load_state_dict(self): - pass - - -class TestT5ModelAttentionDimensions: - - def teardown_method(self, method): - os.environ.pop('NVTE_FUSED_ATTN', None) - os.environ.pop('NVTE_FLASH_ATTN', None) - os.environ.pop('NVTE_UNFUSED_ATTN', None) - - def setup_method(self, method): - self.bs = 4 - self.seq_len = 512 - self.seq_len_dec = 128 - self.encoder_tokens = torch.ones([self.bs, self.seq_len]) - self.decoder_tokens = torch.ones([self.bs, self.seq_len_dec]) - self.encoder_mask = torch.ones([self.bs, self.seq_len]) < 0.5 - self.decoder_mask = torch.ones([self.bs, self.seq_len_dec]) < 0.5 - - @pytest.mark.internal - def test_local_spec(self): - encoder_mask, decoder_mask, encoder_decoder_mask = ( - T5MaskedWordPieceDataset.config_attention_mask( - self.encoder_tokens, - self.decoder_tokens, - self.encoder_mask, - self.decoder_mask, - use_local=True, - ) - ) - - assert list(encoder_mask.shape) == [self.bs, 1, self.seq_len, self.seq_len] - assert list(decoder_mask.shape) == [self.bs, 1, self.seq_len_dec, self.seq_len_dec] - assert list(encoder_decoder_mask.shape) == [self.bs, 1, self.seq_len_dec, self.seq_len] - - @pytest.mark.internal - def test_transformer_engine_version_1_10(self): - encoder_mask, decoder_mask, encoder_decoder_mask = ( - T5MaskedWordPieceDataset.config_attention_mask( - self.encoder_tokens, - self.decoder_tokens, - self.encoder_mask, - self.decoder_mask, - use_local=False, - test_te_version="1.10", - ) - ) - - assert list(encoder_mask.shape) == [self.bs, 1, 1, self.seq_len] - assert decoder_mask is None - assert list(encoder_decoder_mask[0].shape) == [self.bs, 1, 1, self.seq_len_dec] - assert list(encoder_decoder_mask[1].shape) == [self.bs, 1, 1, self.seq_len] - - @pytest.mark.internal - def test_transformer_engine_version_1_7_to_1_10_flashfused_attn(self): - os.environ['NVTE_FLASH_ATTN'] = '1' - os.environ['NVTE_FUSED_ATTN'] = '1' - - encoder_mask, decoder_mask, encoder_decoder_mask = ( - T5MaskedWordPieceDataset.config_attention_mask( - self.encoder_tokens, - self.decoder_tokens, - self.encoder_mask, - self.decoder_mask, - use_local=False, - test_te_version="1.8", - ) - ) - - assert list(encoder_mask.shape) == [self.bs, 1, 1, self.seq_len] - assert decoder_mask is None - assert list(encoder_decoder_mask[0].shape) == [self.bs, 1, 1, self.seq_len_dec] - assert list(encoder_decoder_mask[1].shape) == [self.bs, 1, 1, self.seq_len] - - @pytest.mark.internal - def test_transformer_engine_version_1_7_to_1_10_unfused_attention(self): - os.environ['NVTE_FLASH_ATTN'] = '0' - os.environ['NVTE_FUSED_ATTN'] = '0' - - encoder_mask, decoder_mask, encoder_decoder_mask = ( - T5MaskedWordPieceDataset.config_attention_mask( - self.encoder_tokens, - self.decoder_tokens, - self.encoder_mask, - self.decoder_mask, - use_local=False, - test_te_version="1.8", - ) - ) - - assert list(encoder_mask.shape) == [self.bs, 1, self.seq_len, self.seq_len] - assert decoder_mask is None - assert list(encoder_decoder_mask.shape) == [self.bs, 1, self.seq_len_dec, self.seq_len] - - @pytest.mark.internal - def test_transformer_engine_version_less_than_1_7(self): - os.environ['NVTE_FLASH_ATTN'] = '1' - with pytest.raises(Exception) as exc_info: - encoder_mask, decoder_mask, encoder_decoder_mask = ( - T5MaskedWordPieceDataset.config_attention_mask( - self.encoder_tokens, - self.decoder_tokens, - self.encoder_mask, - self.decoder_mask, - use_local=False, - test_te_version="1.5", - ) - ) - - assert str(exc_info.value) == ( - "Flash and fused attention is not supported with transformer " - "engine version < 1.7. Set NVTE_FLASH_ATTN=0 and NVTE_FUSED_ATTN=0" - "or upgrade transformer engine >= 1.7" - ) +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import os +from copy import deepcopy + +import pytest +import torch +from packaging.version import Version as PkgVersion +from pytest_mock import mocker + +import megatron.core.parallel_state as ps +from megatron.core.datasets.t5_dataset import T5MaskedWordPieceDataset +from megatron.core.models.T5.t5_model import T5Model +from megatron.core.models.T5.t5_spec import ( + get_t5_decoder_with_local_block_spec, + get_t5_decoder_with_transformer_engine_block_spec, + get_t5_encoder_with_local_block_spec, + get_t5_encoder_with_transformer_engine_block_spec, +) +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.transformer_config import TransformerConfig +from tests.unit_tests.test_utilities import Utils + + +class TestT5Model: + + def setup_method(self, method): + tp = 4 + pp = 1 + Utils.initialize_model_parallel( + tensor_model_parallel_size=tp, + pipeline_model_parallel_size=pp, + encoder_pipeline_model_parallel_size=pp, + ) + model_parallel_cuda_manual_seed(123) + transformer_config = TransformerConfig( + num_layers=12, + hidden_size=768, + num_attention_heads=12, + kv_channels=64, + ffn_hidden_size=3072, + use_cpu_initialization=True, + pipeline_dtype=torch.bfloat16, + tensor_model_parallel_size=tp, + pipeline_model_parallel_size=pp, + ) + rank = ps.get_pipeline_model_parallel_rank() + world_size = ps.get_pipeline_model_parallel_world_size() + en_block_spec = get_t5_encoder_with_transformer_engine_block_spec(12) + de_block_spec = get_t5_decoder_with_transformer_engine_block_spec(12) + + first_decoder_rank = pp + pre_process = rank == 0 or rank == first_decoder_rank + post_process = (rank == (first_decoder_rank - 1)) or (rank == (world_size - 1)) + add_encoder = ps.is_inside_encoder(rank) + add_decoder = ps.is_inside_decoder(rank) + + self.t5_model = T5Model( + encoder_config=transformer_config, + config=transformer_config, + transformer_encoder_layer_spec=en_block_spec, + transformer_decoder_layer_spec=de_block_spec, + vocab_size=29184, + max_sequence_length=4, + pre_process=pre_process, + post_process=post_process, + add_encoder=add_encoder, + add_decoder=add_decoder, + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_constructor(self): + assert isinstance(self.t5_model, T5Model) + assert Utils.world_size == 8 + + assert self.t5_model.max_sequence_length == 4 + if self.t5_model.add_encoder: + assert not self.t5_model.add_decoder + assert self.t5_model.encoder.num_layers_per_pipeline_rank == 12 + assert self.t5_model.pre_process + assert self.t5_model.post_process + else: + assert self.t5_model.add_decoder + assert self.t5_model.decoder.num_layers_per_pipeline_rank == 12 + assert self.t5_model.pre_process + assert self.t5_model.post_process + + def test_set_input_tensor(self): + config: TransformerConfig = self.t5_model.config + sequence_length = self.t5_model.max_sequence_length + micro_batch_size = 2 + + # [sequence length, batch size, hidden size] + input_tensor = torch.ones((sequence_length, micro_batch_size, config.hidden_size)) + + self.t5_model.set_input_tensor(input_tensor) + + if self.t5_model.add_encoder: + assert self.t5_model.encoder.input_tensor.shape[0] == sequence_length + assert self.t5_model.encoder.input_tensor.shape[1] == micro_batch_size + assert self.t5_model.encoder.input_tensor.shape[2] == config.hidden_size + else: + assert self.t5_model.encoder is None + assert self.t5_model.encoder_hidden_state.shape[0] == sequence_length + assert self.t5_model.encoder_hidden_state.shape[1] == micro_batch_size + assert self.t5_model.encoder_hidden_state.shape[2] == config.hidden_size + + @pytest.mark.flaky_in_dev + def test_post_process_forward(self): + config: TransformerConfig = self.t5_model.config + sequence_length = self.t5_model.max_sequence_length + micro_batch_size = 2 + + self.t5_model.cuda() + + data = list(range(sequence_length)) + encoder_input_ids = ( + torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() + ) + decoder_input_ids = ( + torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() + ) + encoder_attn_mask = torch.ones((1, sequence_length, sequence_length), dtype=bool).cuda() + decoder_attn_mask = torch.ones((1, sequence_length, sequence_length), dtype=bool).cuda() + encoder_decoder_attn_mask = torch.ones( + (1, sequence_length, sequence_length), dtype=bool + ).cuda() + + if self.t5_model.add_decoder: + encoder_hidden_states = torch.zeros( + (sequence_length, micro_batch_size, config.hidden_size), dtype=torch.float32 + ).cuda() + else: + encoder_hidden_states = None + + output = self.t5_model.forward( + encoder_input_ids=encoder_input_ids, + decoder_input_ids=decoder_input_ids, + encoder_attn_mask=encoder_attn_mask, + decoder_attn_mask=decoder_attn_mask, + encoder_decoder_attn_mask=encoder_decoder_attn_mask, + encoder_hidden_states=encoder_hidden_states, + ) + if self.t5_model.add_decoder: + logits = output + assert logits.shape[0] == micro_batch_size + assert logits.shape[1] == sequence_length + assert ( + logits.shape[2] + == self.t5_model.vocab_size // ps.get_tensor_model_parallel_world_size() + ) + else: + encoder_hidden_states = output + assert encoder_hidden_states.shape[0] == sequence_length + assert encoder_hidden_states.shape[1] == micro_batch_size + assert encoder_hidden_states.shape[2] == config.hidden_size + + @pytest.mark.flaky_in_dev + def test_forward_output_encoder_hidden_only(self): + config: TransformerConfig = self.t5_model.config + sequence_length = self.t5_model.max_sequence_length + micro_batch_size = 2 + + self.t5_model.cuda() + + data = list(range(sequence_length)) + encoder_input_ids = ( + torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() + ) + decoder_input_ids = ( + torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() + ) + encoder_attn_mask = torch.ones((1, sequence_length, sequence_length), dtype=bool).cuda() + decoder_attn_mask = torch.ones((1, sequence_length, sequence_length), dtype=bool).cuda() + encoder_decoder_attn_mask = torch.ones( + (1, sequence_length, sequence_length), dtype=bool + ).cuda() + + encoder_hidden_states = self.t5_model.forward( + encoder_input_ids=encoder_input_ids, + decoder_input_ids=decoder_input_ids, + encoder_attn_mask=encoder_attn_mask, + decoder_attn_mask=decoder_attn_mask, + encoder_decoder_attn_mask=encoder_decoder_attn_mask, + output_encoder_hidden_only=True, + ) + if self.t5_model.add_decoder: + assert encoder_hidden_states is None + else: + assert encoder_hidden_states.shape[0] == sequence_length + assert encoder_hidden_states.shape[1] == micro_batch_size + assert encoder_hidden_states.shape[2] == config.hidden_size + + @pytest.mark.flaky_in_dev + def test_forward_with_encoder_hidden_states(self): + config: TransformerConfig = self.t5_model.config + sequence_length = self.t5_model.max_sequence_length + micro_batch_size = 2 + + self.t5_model.cuda() + + data = list(range(sequence_length)) + encoder_input_ids = ( + torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() + ) + decoder_input_ids = ( + torch.tensor(data, dtype=torch.int64).repeat((micro_batch_size, 1)).cuda() + ) + encoder_attn_mask = torch.ones((1, sequence_length, sequence_length), dtype=bool).cuda() + decoder_attn_mask = torch.ones((1, sequence_length, sequence_length), dtype=bool).cuda() + encoder_decoder_attn_mask = torch.ones( + (1, sequence_length, sequence_length), dtype=bool + ).cuda() + encoder_hidden_states = torch.zeros( + (sequence_length, micro_batch_size, config.hidden_size), dtype=torch.float32 + ).cuda() + + output = self.t5_model.forward( + encoder_input_ids=None, + decoder_input_ids=decoder_input_ids, + encoder_attn_mask=encoder_attn_mask, + decoder_attn_mask=decoder_attn_mask, + encoder_decoder_attn_mask=encoder_decoder_attn_mask, + encoder_hidden_states=encoder_hidden_states, + ) + if self.t5_model.add_decoder: + logits = output + assert logits.shape[0] == micro_batch_size + assert logits.shape[1] == sequence_length + assert ( + logits.shape[2] + == self.t5_model.vocab_size // ps.get_tensor_model_parallel_world_size() + ) + else: + encoder_hidden_states = output + assert encoder_hidden_states.shape[0] == sequence_length + assert encoder_hidden_states.shape[1] == micro_batch_size + assert encoder_hidden_states.shape[2] == config.hidden_size + + def test_no_post_process_forward(self): + pass + + def test_no_preprocess_forward(self): + pass + + def test_state_dict_for_save_checkpoint(self): + pass + + def test_load_state_dict(self): + pass + + +class TestT5ModelAttentionDimensions: + + def teardown_method(self, method): + os.environ.pop('NVTE_FUSED_ATTN', None) + os.environ.pop('NVTE_FLASH_ATTN', None) + os.environ.pop('NVTE_UNFUSED_ATTN', None) + + def setup_method(self, method): + self.bs = 4 + self.seq_len = 512 + self.seq_len_dec = 128 + self.encoder_tokens = torch.ones([self.bs, self.seq_len]) + self.decoder_tokens = torch.ones([self.bs, self.seq_len_dec]) + self.encoder_mask = torch.ones([self.bs, self.seq_len]) < 0.5 + self.decoder_mask = torch.ones([self.bs, self.seq_len_dec]) < 0.5 + + @pytest.mark.internal + def test_local_spec(self): + encoder_mask, decoder_mask, encoder_decoder_mask = ( + T5MaskedWordPieceDataset.config_attention_mask( + self.encoder_tokens, + self.decoder_tokens, + self.encoder_mask, + self.decoder_mask, + use_local=True, + ) + ) + + assert list(encoder_mask.shape) == [self.bs, 1, self.seq_len, self.seq_len] + assert list(decoder_mask.shape) == [self.bs, 1, self.seq_len_dec, self.seq_len_dec] + assert list(encoder_decoder_mask.shape) == [self.bs, 1, self.seq_len_dec, self.seq_len] + + @pytest.mark.internal + def test_transformer_engine_version_1_10(self): + encoder_mask, decoder_mask, encoder_decoder_mask = ( + T5MaskedWordPieceDataset.config_attention_mask( + self.encoder_tokens, + self.decoder_tokens, + self.encoder_mask, + self.decoder_mask, + use_local=False, + test_te_version="1.10", + ) + ) + + assert list(encoder_mask.shape) == [self.bs, 1, 1, self.seq_len] + assert decoder_mask is None + assert list(encoder_decoder_mask[0].shape) == [self.bs, 1, 1, self.seq_len_dec] + assert list(encoder_decoder_mask[1].shape) == [self.bs, 1, 1, self.seq_len] + + @pytest.mark.internal + def test_transformer_engine_version_1_7_to_1_10_flashfused_attn(self): + os.environ['NVTE_FLASH_ATTN'] = '1' + os.environ['NVTE_FUSED_ATTN'] = '1' + + encoder_mask, decoder_mask, encoder_decoder_mask = ( + T5MaskedWordPieceDataset.config_attention_mask( + self.encoder_tokens, + self.decoder_tokens, + self.encoder_mask, + self.decoder_mask, + use_local=False, + test_te_version="1.8", + ) + ) + + assert list(encoder_mask.shape) == [self.bs, 1, 1, self.seq_len] + assert decoder_mask is None + assert list(encoder_decoder_mask[0].shape) == [self.bs, 1, 1, self.seq_len_dec] + assert list(encoder_decoder_mask[1].shape) == [self.bs, 1, 1, self.seq_len] + + @pytest.mark.internal + def test_transformer_engine_version_1_7_to_1_10_unfused_attention(self): + os.environ['NVTE_FLASH_ATTN'] = '0' + os.environ['NVTE_FUSED_ATTN'] = '0' + + encoder_mask, decoder_mask, encoder_decoder_mask = ( + T5MaskedWordPieceDataset.config_attention_mask( + self.encoder_tokens, + self.decoder_tokens, + self.encoder_mask, + self.decoder_mask, + use_local=False, + test_te_version="1.8", + ) + ) + + assert list(encoder_mask.shape) == [self.bs, 1, self.seq_len, self.seq_len] + assert decoder_mask is None + assert list(encoder_decoder_mask.shape) == [self.bs, 1, self.seq_len_dec, self.seq_len] + + @pytest.mark.internal + def test_transformer_engine_version_less_than_1_7(self): + os.environ['NVTE_FLASH_ATTN'] = '1' + with pytest.raises(Exception) as exc_info: + encoder_mask, decoder_mask, encoder_decoder_mask = ( + T5MaskedWordPieceDataset.config_attention_mask( + self.encoder_tokens, + self.decoder_tokens, + self.encoder_mask, + self.decoder_mask, + use_local=False, + test_te_version="1.5", + ) + ) + + assert str(exc_info.value) == ( + "Flash and fused attention is not supported with transformer " + "engine version < 1.7. Set NVTE_FLASH_ATTN=0 and NVTE_FUSED_ATTN=0" + "or upgrade transformer engine >= 1.7" + ) diff --git a/tests/unit_tests/pipeline_parallel/test_schedules.py b/tests/unit_tests/pipeline_parallel/test_schedules.py index 0699409..ab42f66 100644 --- a/tests/unit_tests/pipeline_parallel/test_schedules.py +++ b/tests/unit_tests/pipeline_parallel/test_schedules.py @@ -1,271 +1,387 @@ -import pytest -import torch -from pytest_mock import mocker - -import megatron.core.pipeline_parallel.schedules as schedule -from megatron.core import ModelParallelConfig -from tests.unit_tests.test_utilities import Utils - -rank = Utils.rank - - -def test_get_forward_backward_func(): - Utils.initialize_model_parallel(tensor_model_parallel_size=2, pipeline_model_parallel_size=1) - assert schedule.get_forward_backward_func() == schedule.forward_backward_no_pipelining - Utils.destroy_model_parallel() - Utils.initialize_model_parallel(tensor_model_parallel_size=2, pipeline_model_parallel_size=4) - assert ( - schedule.get_forward_backward_func() - == schedule.forward_backward_pipelining_without_interleaving - ) - Utils.destroy_model_parallel() - Utils.initialize_model_parallel( - tensor_model_parallel_size=2, - pipeline_model_parallel_size=4, - virtual_pipeline_model_parallel_size=2, - ) - assert ( - schedule.get_forward_backward_func() - == schedule.forward_backward_pipelining_with_interleaving - ) - Utils.destroy_model_parallel() - Utils.initialize_model_parallel( - tensor_model_parallel_size=2, - pipeline_model_parallel_size=2, - virtual_pipeline_model_parallel_size=4, - ) - assert ( - schedule.get_forward_backward_func() - == schedule.forward_backward_pipelining_with_interleaving - ) - Utils.destroy_model_parallel() - - -def test_deallocate_output_tensor(): - out = torch.tensor([[1, 2, 3], [4, 5, 6]]) - schedule.deallocate_output_tensor(out) - assert out.nelement() == 6 - - -def test_forward_backward_func_without_pipeline_parallel(mocker): - from megatron.core.pipeline_parallel import get_forward_backward_func - - Utils.initialize_model_parallel(tensor_model_parallel_size=2, pipeline_model_parallel_size=1) - - def forward_step_func(data_iterator, model): - import os - - rank = int(os.environ['LOCAL_RANK']) - dummy_data = torch.ones(1, 4) - - def loss_func(output_tensor): - return rank, {'loss_reduced': rank} - - return model(dummy_data), loss_func - - model = torch.nn.Linear(4, 1) - model.model_type = 'unit-test' - - def set_input_tensor(input_tensor): - return None - - model.set_input_tensor = set_input_tensor - - forward_backward_func = get_forward_backward_func() - assert schedule.get_forward_backward_func() == schedule.forward_backward_no_pipelining - - mocker.patch("megatron.core.pipeline_parallel.schedules.custom_backward", return_value=2) - config = ModelParallelConfig(pipeline_model_parallel_size=1) - model.config = config - - losses_reduced = forward_backward_func( - forward_step_func=forward_step_func, - data_iterator=range(0, 100), - model=[model], - num_microbatches=4, - seq_length=None, - micro_batch_size=None, - forward_only=True, - ) - - loss_reduced_expected = [ - {'loss_reduced': rank}, - {'loss_reduced': rank}, - {'loss_reduced': rank}, - {'loss_reduced': rank}, - ] - - for i, j in zip(losses_reduced, loss_reduced_expected): - print(losses_reduced) - assert i['loss_reduced'] == j['loss_reduced'] - Utils.destroy_model_parallel() - - -def test_forward_backward_func_with_pipeline_parallel(mocker): - from megatron.core.pipeline_parallel import get_forward_backward_func - - Utils.initialize_model_parallel(tensor_model_parallel_size=1, pipeline_model_parallel_size=4) - - def forward_step_func(data_iterator, model): - import os - - rank = int(os.environ['LOCAL_RANK']) - - def loss_func(output_tensor): - return rank, {'loss_reduced': rank} - - return torch.rand(512, 8, 256).cuda(), loss_func - - model = torch.nn.Linear(4, 1) - model.model_type = 'unit-test' - - def set_input_tensor(input_tensor): - return None - - model.set_input_tensor = set_input_tensor - - forward_backward_func = get_forward_backward_func() - assert ( - schedule.get_forward_backward_func() - == schedule.forward_backward_pipelining_without_interleaving - ) - - sequence_length = 512 - micro_batch_size = 8 - hidden_size = 256 - - config = ModelParallelConfig( - pipeline_model_parallel_size=4, sequence_parallel=False, pipeline_dtype=torch.float - ) - config.hidden_size = hidden_size - model.config = config - - losses_reduced = forward_backward_func( - forward_step_func=forward_step_func, - data_iterator=None, - model=[model], - num_microbatches=micro_batch_size, - seq_length=sequence_length, - micro_batch_size=micro_batch_size, - forward_only=True, - ) - - loss_reduced_expected = [ - {'loss_reduced': rank}, - {'loss_reduced': rank}, - {'loss_reduced': rank}, - {'loss_reduced': rank}, - ] - for i, j in zip(losses_reduced, loss_reduced_expected): - print(losses_reduced) - assert i['loss_reduced'] == j['loss_reduced'] - Utils.destroy_model_parallel() - - -def test_forward_backward_func_with_interleaving(mocker): - from megatron.core.enums import ModelType - from megatron.core.pipeline_parallel import get_forward_backward_func - - Utils.initialize_model_parallel( - tensor_model_parallel_size=1, - pipeline_model_parallel_size=4, - virtual_pipeline_model_parallel_size=2, - ) - - def forward_step_func(data_iterator, model): - import os - - rank = int(os.environ['LOCAL_RANK']) - - def loss_func(output_tensor): - return rank, {'loss_reduced': rank} - - return torch.rand(512, 8, 256).cuda(), loss_func - - model = torch.nn.Linear(4, 1) - - def set_input_tensor(input_tensor): - return None - - model.set_input_tensor = set_input_tensor - - forward_backward_func = get_forward_backward_func() - assert ( - schedule.get_forward_backward_func() - == schedule.forward_backward_pipelining_with_interleaving - ) - - sequence_length = 512 - micro_batch_size = 8 - hidden_size = 256 - - config = ModelParallelConfig( - pipeline_model_parallel_size=4, sequence_parallel=False, pipeline_dtype=torch.float - ) - config.hidden_size = hidden_size - model.config = config - - mocker.patch("megatron.core.pipeline_parallel.schedules.custom_backward", return_value=2) - - with pytest.raises(RuntimeError): - model.model_type = ModelType.encoder_and_decoder - forward_backward_func( - forward_step_func=forward_step_func, - data_iterator=[range(0, 100)], - model=[model, model], - num_microbatches=micro_batch_size, - seq_length=sequence_length, - micro_batch_size=micro_batch_size, - decoder_seq_length=sequence_length, - forward_only=True, - ) - - with pytest.raises(RuntimeError): - model.model_type = ModelType.encoder_or_decoder - forward_backward_func( - forward_step_func=forward_step_func, - data_iterator=[range(0, 100)], - model=[model, model], - num_microbatches=micro_batch_size, - seq_length=sequence_length, - micro_batch_size=micro_batch_size, - decoder_seq_length=256, - forward_only=True, - ) - - with pytest.raises(RuntimeError): - model.model_type = ModelType.encoder_or_decoder - forward_backward_func( - forward_step_func=forward_step_func, - data_iterator=[range(0, 100)], - model=[model, model], - num_microbatches=7, - seq_length=sequence_length, - micro_batch_size=micro_batch_size, - decoder_seq_length=512, - forward_only=True, - ) - - model.model_type = ModelType.encoder_or_decoder - losses_reduced = forward_backward_func( - forward_step_func=forward_step_func, - data_iterator=[range(0, 100), range(0, 100)], - model=[model, model], - num_microbatches=micro_batch_size, - seq_length=sequence_length, - micro_batch_size=micro_batch_size, - decoder_seq_length=sequence_length, - forward_only=True, - ) - - loss_reduced_expected = [ - {'loss_reduced': rank}, - {'loss_reduced': rank}, - {'loss_reduced': rank}, - {'loss_reduced': rank}, - ] - for i, j in zip(losses_reduced, loss_reduced_expected): - print(losses_reduced) - assert i['loss_reduced'] == j['loss_reduced'] - - Utils.destroy_model_parallel() +import pytest +import torch +from pytest_mock import mocker + +import megatron.core.pipeline_parallel.schedules as schedule +from megatron.core import ModelParallelConfig +from tests.unit_tests.test_utilities import Utils + +rank = Utils.rank + + +def test_get_forward_backward_func(): + Utils.initialize_model_parallel(tensor_model_parallel_size=2, pipeline_model_parallel_size=1) + assert schedule.get_forward_backward_func() == schedule.forward_backward_no_pipelining + Utils.destroy_model_parallel() + Utils.initialize_model_parallel(tensor_model_parallel_size=2, pipeline_model_parallel_size=4) + assert ( + schedule.get_forward_backward_func() + == schedule.forward_backward_pipelining_without_interleaving + ) + Utils.destroy_model_parallel() + Utils.initialize_model_parallel( + tensor_model_parallel_size=2, + pipeline_model_parallel_size=4, + virtual_pipeline_model_parallel_size=2, + ) + assert ( + schedule.get_forward_backward_func() + == schedule.forward_backward_pipelining_with_interleaving + ) + Utils.destroy_model_parallel() + Utils.initialize_model_parallel( + tensor_model_parallel_size=2, + pipeline_model_parallel_size=2, + virtual_pipeline_model_parallel_size=4, + ) + assert ( + schedule.get_forward_backward_func() + == schedule.forward_backward_pipelining_with_interleaving + ) + Utils.destroy_model_parallel() + + +def test_deallocate_output_tensor(): + out = torch.tensor([[1, 2, 3], [4, 5, 6]]) + schedule.deallocate_output_tensor(out) + assert out.nelement() == 6 + + +def test_forward_backward_func_without_pipeline_parallel(mocker): + from megatron.core.pipeline_parallel import get_forward_backward_func + + Utils.initialize_model_parallel(tensor_model_parallel_size=2, pipeline_model_parallel_size=1) + + def forward_step_func(data_iterator, model): + import os + + rank = int(os.environ['LOCAL_RANK']) + dummy_data = torch.ones(1, 4) + + def loss_func(output_tensor): + return rank, {'loss_reduced': rank} + + return model(dummy_data), loss_func + + model = torch.nn.Linear(4, 1) + model.model_type = 'unit-test' + + def set_input_tensor(input_tensor): + return None + + model.set_input_tensor = set_input_tensor + + forward_backward_func = get_forward_backward_func() + assert schedule.get_forward_backward_func() == schedule.forward_backward_no_pipelining + + mocker.patch("megatron.core.pipeline_parallel.schedules.custom_backward", return_value=2) + config = ModelParallelConfig(pipeline_model_parallel_size=1) + model.config = config + + losses_reduced = forward_backward_func( + forward_step_func=forward_step_func, + data_iterator=range(0, 100), + model=[model], + num_microbatches=4, + seq_length=None, + micro_batch_size=None, + forward_only=True, + ) + + loss_reduced_expected = [ + {'loss_reduced': rank}, + {'loss_reduced': rank}, + {'loss_reduced': rank}, + {'loss_reduced': rank}, + ] + + for i, j in zip(losses_reduced, loss_reduced_expected): + print(losses_reduced) + assert i['loss_reduced'] == j['loss_reduced'] + Utils.destroy_model_parallel() + + +def test_forward_backward_func_with_pipeline_parallel(mocker): + from megatron.core.pipeline_parallel import get_forward_backward_func + + Utils.initialize_model_parallel(tensor_model_parallel_size=1, pipeline_model_parallel_size=4) + + def forward_step_func(data_iterator, model): + import os + + rank = int(os.environ['LOCAL_RANK']) + + def loss_func(output_tensor): + return rank, {'loss_reduced': rank} + + return torch.rand(512, 8, 256).cuda(), loss_func + + model = torch.nn.Linear(4, 1) + model.model_type = 'unit-test' + + def set_input_tensor(input_tensor): + return None + + model.set_input_tensor = set_input_tensor + + forward_backward_func = get_forward_backward_func() + assert ( + schedule.get_forward_backward_func() + == schedule.forward_backward_pipelining_without_interleaving + ) + + sequence_length = 512 + micro_batch_size = 8 + hidden_size = 256 + + config = ModelParallelConfig( + pipeline_model_parallel_size=4, sequence_parallel=False, pipeline_dtype=torch.float + ) + config.hidden_size = hidden_size + model.config = config + + losses_reduced = forward_backward_func( + forward_step_func=forward_step_func, + data_iterator=None, + model=[model], + num_microbatches=micro_batch_size, + seq_length=sequence_length, + micro_batch_size=micro_batch_size, + forward_only=True, + ) + + loss_reduced_expected = [ + {'loss_reduced': rank}, + {'loss_reduced': rank}, + {'loss_reduced': rank}, + {'loss_reduced': rank}, + ] + for i, j in zip(losses_reduced, loss_reduced_expected): + print(losses_reduced) + assert i['loss_reduced'] == j['loss_reduced'] + Utils.destroy_model_parallel() + + +def test_forward_backward_func_with_interleaving(mocker): + from megatron.core.enums import ModelType + from megatron.core.pipeline_parallel import get_forward_backward_func + + Utils.initialize_model_parallel( + tensor_model_parallel_size=1, + pipeline_model_parallel_size=4, + virtual_pipeline_model_parallel_size=2, + ) + + def forward_step_func(data_iterator, model): + import os + + rank = int(os.environ['LOCAL_RANK']) + + def loss_func(output_tensor): + return rank, {'loss_reduced': rank} + + return torch.rand(512, 8, 256).cuda(), loss_func + + model = torch.nn.Linear(4, 1) + + def set_input_tensor(input_tensor): + return None + + model.set_input_tensor = set_input_tensor + + forward_backward_func = get_forward_backward_func() + assert ( + schedule.get_forward_backward_func() + == schedule.forward_backward_pipelining_with_interleaving + ) + + sequence_length = 512 + micro_batch_size = 8 + hidden_size = 256 + + config = ModelParallelConfig( + pipeline_model_parallel_size=4, sequence_parallel=False, pipeline_dtype=torch.float + ) + config.hidden_size = hidden_size + model.config = config + + mocker.patch("megatron.core.pipeline_parallel.schedules.custom_backward", return_value=2) + + with pytest.raises(RuntimeError): + model.model_type = ModelType.encoder_and_decoder + forward_backward_func( + forward_step_func=forward_step_func, + data_iterator=[range(0, 100)], + model=[model, model], + num_microbatches=micro_batch_size, + seq_length=sequence_length, + micro_batch_size=micro_batch_size, + decoder_seq_length=sequence_length, + forward_only=True, + ) + + with pytest.raises(RuntimeError): + model.model_type = ModelType.encoder_or_decoder + forward_backward_func( + forward_step_func=forward_step_func, + data_iterator=[range(0, 100)], + model=[model, model], + num_microbatches=micro_batch_size, + seq_length=sequence_length, + micro_batch_size=micro_batch_size, + decoder_seq_length=256, + forward_only=True, + ) + + with pytest.raises(RuntimeError): + model.model_type = ModelType.encoder_or_decoder + forward_backward_func( + forward_step_func=forward_step_func, + data_iterator=[range(0, 100)], + model=[model, model], + num_microbatches=7, + seq_length=sequence_length, + micro_batch_size=micro_batch_size, + decoder_seq_length=512, + forward_only=True, + ) + + model.model_type = ModelType.encoder_or_decoder + losses_reduced = forward_backward_func( + forward_step_func=forward_step_func, + data_iterator=[range(0, 100), range(0, 100)], + model=[model, model], + num_microbatches=micro_batch_size, + seq_length=sequence_length, + micro_batch_size=micro_batch_size, + decoder_seq_length=sequence_length, + forward_only=True, + ) + + loss_reduced_expected = [ + {'loss_reduced': rank}, + {'loss_reduced': rank}, + {'loss_reduced': rank}, + {'loss_reduced': rank}, + ] + for i, j in zip(losses_reduced, loss_reduced_expected): + print(losses_reduced) + assert i['loss_reduced'] == j['loss_reduced'] + + Utils.destroy_model_parallel() + + +def test_forward_backward_func_with_uneven_interleaving(mocker): + from megatron.core.enums import ModelType + from megatron.core.pipeline_parallel import get_forward_backward_func + + Utils.initialize_model_parallel( + tensor_model_parallel_size=1, + pipeline_model_parallel_size=4, + virtual_pipeline_model_parallel_size=2, + ) + + def forward_step_func(data_iterator, model): + import os + + rank = int(os.environ['LOCAL_RANK']) + + def loss_func(output_tensor): + return rank, {'loss_reduced': rank} + + return torch.rand(512, 8, 256).cuda(), loss_func + + model_a = torch.nn.Linear(4, 1) + model_b = torch.nn.Linear(8, 1) + + def set_input_tensor(input_tensor): + return None + + model_a.set_input_tensor = set_input_tensor + model_b.set_input_tensor = set_input_tensor + + forward_backward_func = get_forward_backward_func() + assert ( + schedule.get_forward_backward_func() + == schedule.forward_backward_pipelining_with_interleaving + ) + + sequence_length = 512 + micro_batch_size = 8 + hidden_size = 256 + + config = ModelParallelConfig( + pipeline_model_parallel_size=4, sequence_parallel=False, pipeline_dtype=torch.float + ) + config.hidden_size = hidden_size + model_a.config = config + model_b.config = config + + mocker.patch("megatron.core.pipeline_parallel.schedules.custom_backward", return_value=2) + + with pytest.raises(RuntimeError): + model_a.model_type = ModelType.encoder_and_decoder + model_b.model_type = ModelType.encoder_and_decoder + forward_backward_func( + forward_step_func=forward_step_func, + data_iterator=[range(0, 100)], + model=[model_a, model_b], + num_microbatches=micro_batch_size, + seq_length=sequence_length, + micro_batch_size=micro_batch_size, + decoder_seq_length=sequence_length, + forward_only=True, + ) + + with pytest.raises(RuntimeError): + model_a.model_type = ModelType.encoder_or_decoder + model_b.model_type = ModelType.encoder_or_decoder + forward_backward_func( + forward_step_func=forward_step_func, + data_iterator=[range(0, 100)], + model=[model_a, model_b], + num_microbatches=micro_batch_size, + seq_length=sequence_length, + micro_batch_size=micro_batch_size, + decoder_seq_length=256, + forward_only=True, + ) + + with pytest.raises(RuntimeError): + model_a.model_type = ModelType.encoder_or_decoder + model_b.model_type = ModelType.encoder_or_decoder + forward_backward_func( + forward_step_func=forward_step_func, + data_iterator=[range(0, 100)], + model=[model_a, model_b], + num_microbatches=7, + seq_length=sequence_length, + micro_batch_size=micro_batch_size, + decoder_seq_length=512, + forward_only=True, + ) + + model_a.model_type = ModelType.encoder_or_decoder + model_b.model_type = ModelType.encoder_or_decoder + losses_reduced = forward_backward_func( + forward_step_func=forward_step_func, + data_iterator=[range(0, 100), range(0, 100)], + model=[model_a, model_b], + num_microbatches=micro_batch_size, + seq_length=sequence_length, + micro_batch_size=micro_batch_size, + decoder_seq_length=sequence_length, + forward_only=True, + ) + + loss_reduced_expected = [ + {'loss_reduced': rank}, + {'loss_reduced': rank}, + {'loss_reduced': rank}, + {'loss_reduced': rank}, + ] + for i, j in zip(losses_reduced, loss_reduced_expected): + print(losses_reduced) + assert i['loss_reduced'] == j['loss_reduced'] + + Utils.destroy_model_parallel() diff --git a/tests/unit_tests/test_model_configs.py b/tests/unit_tests/test_model_configs.py new file mode 100644 index 0000000..68b0bf7 --- /dev/null +++ b/tests/unit_tests/test_model_configs.py @@ -0,0 +1,37 @@ +import pathlib + +import pytest +import yaml + +YAML_DIR = pathlib.Path(__file__).parent / ".." / "functional_tests" / "test_cases" + + +def get_yaml_files(directory): + """Retrieve all YAML files from the specified directory.""" + return list([file for file in directory.rglob("*.yaml") if file is not None]) + + +def load_yaml(file_path): + """Load a YAML file and return its content as a Python dictionary.""" + with open(file_path, "r") as f: + return yaml.safe_load(f) + + +@pytest.mark.parametrize( + "metric", + ["--log-memory-to-tensorboard", "--log-num-zeros-in-grad", "--log-timers-to-tensorboard"], +) +@pytest.mark.parametrize("yaml_file", get_yaml_files(YAML_DIR)) +def test_model_config_tracks_memory(yaml_file, metric): + """Test if each YAML file contains the required record.""" + print("gpt3-nemo" in str(yaml_file) or "ckpt_converter" in str(yaml_file)) + if "gpt3-nemo" in str(yaml_file) or "ckpt_converter" in str(yaml_file): + pytest.skip("Skipping for gpt-nemo") + + model_config = load_yaml(yaml_file) + + assert ( + "MODEL_ARGS" in model_config + and metric in model_config["MODEL_ARGS"] + and model_config["MODEL_ARGS"][metric] is True + ), f"Please add argument `{metric}` to `{yaml_file.parent.name}/model_config.yaml` that its metric gets tracked." diff --git a/tests/unit_tests/test_optimizer.py b/tests/unit_tests/test_optimizer.py index bc4852b..fb1361a 100644 --- a/tests/unit_tests/test_optimizer.py +++ b/tests/unit_tests/test_optimizer.py @@ -1,113 +1,162 @@ -import torch -import torch.nn as nn -import torch.nn.functional as F -from torch.optim import SGD, Adam - -from megatron.core.optimizer import ChainedOptimizer - - -class Net(nn.Module): - def __init__(self): - super().__init__() - self.conv1 = nn.Conv2d(3, 6, 5) - self.pool = nn.MaxPool2d(2, 2) - self.conv2 = nn.Conv2d(6, 16, 5) - self.fc1 = nn.Linear(16 * 5 * 5, 120) - self.fc2 = nn.Linear(120, 84) - self.fc3 = nn.Linear(84, 10) - - def forward(self, x): - x = self.pool(F.relu(self.conv1(x))) - x = self.pool(F.relu(self.conv2(x))) - x = torch.flatten(x, 1) # flatten all dimensions except batch - x = F.relu(self.fc1(x)) - x = F.relu(self.fc2(x)) - x = self.fc3(x) - return x - - -def test_chained_optimizer(): - net = Net() - optimizer_1 = Adam(list(net.parameters())[:2], lr=0.01) - optimizer_2 = SGD(list(net.parameters())[2:], lr=0.1, momentum=0.9) - chained_optimizer = ChainedOptimizer([optimizer_1, optimizer_2]) - - # Test the chained optimizer's param groups is a reference of the underlying optimizers' param groups - assert optimizer_1.param_groups[0]["lr"] == 0.01 - chained_optimizer.param_groups[0]["lr"] = 0.02 - assert optimizer_1.param_groups[0]["lr"] == 0.02 - - # Test the chained optimizer's state is a reference of the underlying optimizers' state - # 1. run step on optimizers, make sure there is state - assert len(chained_optimizer.state) == 0 - input = torch.randn(1, 3, 32, 32) - output = net(input) - output.sum().backward() - optimizer_1.step() - optimizer_2.step() - assert len(chained_optimizer.state) != 0 - - # 2. check the state is a reference - assert not list(optimizer_1.state.values())[0]["exp_avg"].is_cuda - assert not list(optimizer_2.state.values())[0]["momentum_buffer"].is_cuda - - def to_cuda(d): - for k, v in d.items(): - if isinstance(v, torch.Tensor): - d[k] = v.to("cuda") - elif isinstance(v, dict): - to_cuda(v) - return d - - for k, v in chained_optimizer.state.items(): - chained_optimizer.state[k] = to_cuda(v) - - assert list(optimizer_1.state.values())[0]["exp_avg"].is_cuda - assert list(optimizer_2.state.values())[0]["momentum_buffer"].is_cuda - - -def test_precision_aware_fused_adam(): - try: - from transformer_engine.pytorch.optimizers import FusedAdam - except ImportError: - # Older versions of TE don't have FusedAdam. - return - - import inspect - - adam_args = inspect.signature(FusedAdam).parameters - arg_names = ["master_weight_dtype", "exp_avg_dtype", "exp_avg_sq_dtype", "use_decoupled_grad"] - for name in arg_names: - if name not in adam_args: - # Skip the test if TE doesn't support precision aware FusedAdam. - return - - tensor = torch.rand(278011, dtype=torch.bfloat16).cuda() - params_1 = [torch.nn.Parameter(tensor.float())] # FP32 reference - params_2 = [torch.nn.Parameter(tensor.clone())] # BF16 - - options = {"lr": 1, "betas": (0.1, 0.25), "eps": 1e-08, "weight_decay": 0, "amsgrad": False} - - optimizer_1 = FusedAdam(params_1, **options) - optimizer_2 = FusedAdam(params_2, master_weights=True, use_decoupled_grad=True, **options) - - for _ in range(1000): - for p_1, p_2 in zip(params_1, params_2): - p_1.grad = torch.rand_like(p_1) - p_2.decoupled_grad = p_1.grad.clone() - - optimizer_1.step() - optimizer_2.step() - - master_params = [optimizer_2.get_unscaled_state(p, "master_param") for p in params_2] - for p_1, p_2 in zip(params_1, master_params): - bytes_1 = p_1.data.view(torch.uint8) - bytes_2 = p_2.data.view(torch.uint8) - # Make sure bit-wise matched - assert torch.all(bytes_1 == bytes_2) - - for p_1, p_2 in zip(params_1, params_2): - bytes_1 = p_1.data.bfloat16().view(torch.uint8) - bytes_2 = p_2.data.view(torch.uint8) - # Make sure bit-wise matched - assert torch.all(bytes_1 == bytes_2) +import os + +import pytest +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.optim import SGD, Adam + +from megatron.core.distributed import DistributedDataParallel, DistributedDataParallelConfig +from megatron.core.optimizer import ChainedOptimizer, OptimizerConfig, get_megatron_optimizer +from megatron.core.transformer import TransformerConfig +from tests.unit_tests.test_utilities import Utils +from tests.unit_tests.test_utils import _deinit_distributed, _init_distributed + + +class Net(nn.Module): + def __init__(self): + super().__init__() + self.conv1 = nn.Conv2d(3, 6, 5) + self.pool = nn.MaxPool2d(2, 2) + self.conv2 = nn.Conv2d(6, 16, 5) + self.fc1 = nn.Linear(16 * 5 * 5, 120) + self.fc2 = nn.Linear(120, 84) + self.fc3 = nn.Linear(84, 10) + + def forward(self, x): + x = self.pool(F.relu(self.conv1(x))) + x = self.pool(F.relu(self.conv2(x))) + x = torch.flatten(x, 1) # flatten all dimensions except batch + x = F.relu(self.fc1(x)) + x = F.relu(self.fc2(x)) + x = self.fc3(x) + return x + + +def test_chained_optimizer(): + net = Net() + optimizer_1 = Adam(list(net.parameters())[:2], lr=0.01) + optimizer_2 = SGD(list(net.parameters())[2:], lr=0.1, momentum=0.9) + chained_optimizer = ChainedOptimizer([optimizer_1, optimizer_2]) + + # Test the chained optimizer's param groups is a reference of the underlying optimizers' param groups + assert optimizer_1.param_groups[0]["lr"] == 0.01 + chained_optimizer.param_groups[0]["lr"] = 0.02 + assert optimizer_1.param_groups[0]["lr"] == 0.02 + + # Test the chained optimizer's state is a reference of the underlying optimizers' state + # 1. run step on optimizers, make sure there is state + assert len(chained_optimizer.state) == 0 + input = torch.randn(1, 3, 32, 32) + output = net(input) + output.sum().backward() + optimizer_1.step() + optimizer_2.step() + assert len(chained_optimizer.state) != 0 + + # 2. check the state is a reference + assert not list(optimizer_1.state.values())[0]["exp_avg"].is_cuda + assert not list(optimizer_2.state.values())[0]["momentum_buffer"].is_cuda + + def to_cuda(d): + for k, v in d.items(): + if isinstance(v, torch.Tensor): + d[k] = v.to("cuda") + elif isinstance(v, dict): + to_cuda(v) + return d + + for k, v in chained_optimizer.state.items(): + chained_optimizer.state[k] = to_cuda(v) + + assert list(optimizer_1.state.values())[0]["exp_avg"].is_cuda + assert list(optimizer_2.state.values())[0]["momentum_buffer"].is_cuda + + +def test_precision_aware_fused_adam(): + try: + from transformer_engine.pytorch.optimizers import FusedAdam + except ImportError: + # Older versions of TE don't have FusedAdam. + return + + import inspect + + adam_args = inspect.signature(FusedAdam).parameters + arg_names = ["master_weight_dtype", "exp_avg_dtype", "exp_avg_sq_dtype", "use_decoupled_grad"] + for name in arg_names: + if name not in adam_args: + # Skip the test if TE doesn't support precision aware FusedAdam. + return + + tensor = torch.rand(278011, dtype=torch.bfloat16).cuda() + params_1 = [torch.nn.Parameter(tensor.float())] # FP32 reference + params_2 = [torch.nn.Parameter(tensor.clone())] # BF16 + + options = {"lr": 1, "betas": (0.1, 0.25), "eps": 1e-08, "weight_decay": 0, "amsgrad": False} + + optimizer_1 = FusedAdam(params_1, **options) + optimizer_2 = FusedAdam(params_2, master_weights=True, use_decoupled_grad=True, **options) + + for _ in range(1000): + for p_1, p_2 in zip(params_1, params_2): + p_1.grad = torch.rand_like(p_1) + p_2.decoupled_grad = p_1.grad.clone() + + optimizer_1.step() + optimizer_2.step() + + master_params = [optimizer_2.get_unscaled_state(p, "master_param") for p in params_2] + for p_1, p_2 in zip(params_1, master_params): + bytes_1 = p_1.data.view(torch.uint8) + bytes_2 = p_2.data.view(torch.uint8) + # Make sure bit-wise matched + assert torch.all(bytes_1 == bytes_2) + + for p_1, p_2 in zip(params_1, params_2): + bytes_1 = p_1.data.bfloat16().view(torch.uint8) + bytes_2 = p_2.data.view(torch.uint8) + # Make sure bit-wise matched + assert torch.all(bytes_1 == bytes_2) + + +@pytest.mark.parametrize("use_distributed_optimizer", [False, True]) +@pytest.mark.parametrize("precision", ['bf16', 'fp32']) +def test_optim_sharded_state_dict(use_distributed_optimizer: bool, precision: str): + world = int(os.getenv('WORLD_SIZE', '1')) + rank = int(os.getenv('RANK', '0')) + + # Setup: distributed, model, mock_args. + _init_distributed(world, rank) + Utils.initialize_model_parallel() + model = torch.nn.Linear(100, 100, bias=False, dtype=torch.bfloat16, device='cuda') + model.requires_grad_(True) + model.weight.data.fill_(1.0) + ddp_config = DistributedDataParallelConfig(use_distributed_optimizer=use_distributed_optimizer) + model = DistributedDataParallel( + TransformerConfig(num_attention_heads=1, num_layers=1), ddp_config, model + ) + for param in model.parameters(): + assert param.requires_grad + + if precision == 'bf16': + optimizer_config = OptimizerConfig( + optimizer='adam', bf16=True, use_distributed_optimizer=use_distributed_optimizer + ) + elif precision == 'fp32': + optimizer_config = OptimizerConfig( + optimizer='adam', + bf16=False, + fp16=False, + use_distributed_optimizer=use_distributed_optimizer, + ) + optim = get_megatron_optimizer(optimizer_config, [model]) + + model_sharded_state_dict = model.sharded_state_dict() + sharded_state_dict = optim.sharded_state_dict(model_sharded_state_dict) + + if 'optimizer' in sharded_state_dict and 'state' in sharded_state_dict['optimizer']: + assert ( + 'common_step' not in sharded_state_dict['optimizer']['state'] + or sharded_state_dict['optimizer']['state']['common_step'] is not None + ), "Found 'optimizer.state.common_step=None' in sharded state dict." diff --git a/tests/unit_tests/test_optimizer_cpu_offloading.py b/tests/unit_tests/test_optimizer_cpu_offloading.py new file mode 100644 index 0000000..e7eb8b6 --- /dev/null +++ b/tests/unit_tests/test_optimizer_cpu_offloading.py @@ -0,0 +1,141 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. +import random + +import numpy as np +import pytest +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.optim import SGD, Adam + +try: + from transformer_engine.pytorch.optimizers import FusedAdam as GPUAdam + from transformer_engine.pytorch.optimizers import FusedSGD as GPUSGD +except: + # Handle environment where transformer_engine is not installed + from torch.optim import SGD as GPUSGD + from torch.optim import Adam as GPUAdam + +from megatron.core.optimizer.cpu_offloading import HybridDeviceOptimizer + + +class Net(nn.Module): + def __init__(self): + super().__init__() + self.conv1 = nn.Conv2d(3, 6, 5) + self.pool = nn.MaxPool2d(2, 2) + self.conv2 = nn.Conv2d(6, 16, 5) + self.fc1 = nn.Linear(16 * 5 * 5, 120) + self.fc2 = nn.Linear(120, 84) + self.fc3 = nn.Linear(84, 10) + + def forward(self, x): + x = self.pool(F.relu(self.conv1(x))) + x = self.pool(F.relu(self.conv2(x))) + x = torch.flatten(x, 1) # flatten all dimensions except batch + x = F.relu(self.fc1(x)) + x = F.relu(self.fc2(x)) + x = self.fc3(x) + return x + + +def setup_seed(seed): + random.seed(seed) # Set Python's built-in random seed + np.random.seed(seed) # Set NumPy's random seed + torch.manual_seed(seed) # Set PyTorch's CPU seed + torch.cuda.manual_seed(seed) # Set PyTorch's GPU seed (if using CUDA) + torch.cuda.manual_seed_all(seed) # Set seed for all GPUs + torch.backends.cudnn.deterministic = True # Ensure deterministic behavior + torch.backends.cudnn.benchmark = False # Disable auto-tuner for reproducibility + + +@pytest.mark.skipif( + torch.__version__ < '2.3.0', + reason=( + "Requires PyTorch 2.3.0 or higher, lower versions of pytorch have " + "misaligned optimizer accuracy for CPU and GPU." + ), +) +@pytest.mark.parametrize('n_steps', [1, 10]) +@pytest.mark.parametrize('overlap_cpu_optimizer_d2h_h2d', [False, True]) +@pytest.mark.parametrize('offload_fraction', [0, 0.5, 1.0]) +@pytest.mark.parametrize('optimizer', ['sgd', 'adam']) +@pytest.mark.parametrize('with_param_groups', [False, True]) +def test_multi_device_hybrid_optimizer( + with_param_groups, optimizer, offload_fraction, overlap_cpu_optimizer_d2h_h2d, n_steps +): + setup_seed(42) + net1 = Net().cuda() + net2 = Net().cuda() + net2.load_state_dict(net1.state_dict()) + base_lr = 1e-3 + params = list(net1.parameters()) + ref_params = list(net2.parameters()) + if with_param_groups: + param_groups = [ + {"params": params[: len(params) // 2], "wd_mult": 1.0, "lr_mult": 1e-4}, + {"params": params[len(params) // 2 :], "wd_mult": 0.0, "lr_mult": 2e-4}, + ] + params = param_groups + ref_param_groups = [ + {"params": ref_params[: len(ref_params) // 2], "wd_mult": 1.0, "lr_mult": 1e-4}, + {"params": ref_params[len(ref_params) // 2 :], "wd_mult": 0.0, "lr_mult": 2e-4}, + ] + ref_params = ref_param_groups + + if optimizer == 'adam': + cls_kwargs = dict(cpu_optimizer_cls=Adam, gpu_optimizer_cls=GPUAdam) + else: + cls_kwargs = dict(cpu_optimizer_cls=SGD, gpu_optimizer_cls=GPUSGD) + + hdo = HybridDeviceOptimizer( + params, + offload_fraction=offload_fraction, + lr=base_lr, + overlap_cpu_optimizer_d2h_h2d=overlap_cpu_optimizer_d2h_h2d, + **cls_kwargs, + ) + + ref_optimizer = cls_kwargs['gpu_optimizer_cls'](ref_params, lr=base_lr) + + # 1. run step on optimizer, make sure there is state generated + assert len(hdo.state_dict()["state"]) == 0 # state is empty + input = torch.randn(1, 3, 32, 32).cuda() + output = net1(input) + output.sum().backward() + hdo.step() + output = net2(input) + output.sum().backward() + ref_optimizer.step() + # PyTorch SGD will not generate state + if optimizer != 'sgd': + assert len(hdo.state_dict()["state"]) != 0 + + # 2. check the state is on right device + if optimizer == 'adam': + first_param_id = hdo.state_dict()["param_groups"][0]["params"][0] + last_param_id = hdo.state_dict()["param_groups"][-1]["params"][-1] + if offload_fraction > 0: + assert not hdo.state_dict()["state"][first_param_id]["exp_avg"].is_cuda + if offload_fraction < 1: + assert hdo.state_dict()["state"][last_param_id]["exp_avg"].is_cuda + + # 3. check parameters allclose + for _ in range(1, n_steps): + input = torch.randn(1, 3, 32, 32).cuda() + output = net1(input) + output.sum().backward() + hdo.step() + output = net2(input) + output.sum().backward() + ref_optimizer.step() + + params = net1.state_dict() + ref_params = net2.state_dict() + for k, v in params.items(): + assert (v.isnan() == ref_params[k].isnan()).all() + torch.nan_to_num_(v, 0) + torch.nan_to_num_(ref_params[k], 0) + assert torch.allclose( + v, ref_params[k], atol=1e-03 + ), f"Weight {k} value mismatch, max error: {(v - ref_params[k]).abs().max()}" diff --git a/tests/unit_tests/test_parallel_state.py b/tests/unit_tests/test_parallel_state.py index ca5185b..0155943 100644 --- a/tests/unit_tests/test_parallel_state.py +++ b/tests/unit_tests/test_parallel_state.py @@ -1,517 +1,524 @@ -import pytest -import torch - -import megatron.core.parallel_state as ps -from tests.unit_tests.test_utilities import Utils - -rank = Utils.rank -world_size = Utils.world_size -test_parallel_order = ['tp-cp-ep-dp-pp', 'tp-cp-pp-ep-dp'] - - -@pytest.mark.parametrize('order', test_parallel_order) -@pytest.mark.flaky_in_dev -def test_initialize_and_destroy_model_parallel(order): - with pytest.raises(AssertionError): - assert ps.initialize_model_parallel(order=order) - Utils.initialize_distributed() - with pytest.raises(RuntimeError): - assert ps.initialize_model_parallel(tensor_model_parallel_size=2 * world_size, order=order) - with pytest.raises(RuntimeError): - assert ps.initialize_model_parallel( - pipeline_model_parallel_size=2 * world_size, order=order - ) - with pytest.raises(RuntimeError): - assert ps.initialize_model_parallel( - pipeline_model_parallel_size=world_size, - tensor_model_parallel_size=world_size, - order=order, - ) - with pytest.raises(RuntimeError): - assert ps.initialize_model_parallel(virtual_pipeline_model_parallel_size=2, order=order) - Utils.initialize_model_parallel( - tensor_model_parallel_size=2, pipeline_model_parallel_size=4, order=order - ) - - assert ps.model_parallel_is_initialized() - assert ps.get_model_parallel_group() is not None - assert ps.get_tensor_model_parallel_group() is not None - assert ps.get_pipeline_model_parallel_group() is not None - assert ps.get_data_parallel_group() is not None - assert ps.get_expert_model_parallel_group() is not None - assert ps.get_expert_tensor_parallel_group() is not None - assert ps.get_expert_data_parallel_group() is not None - assert ps.get_expert_tensor_model_pipeline_parallel_group() is not None - Utils.destroy_model_parallel() - assert ps._MODEL_PARALLEL_GROUP is None - - -@pytest.mark.parametrize('order', test_parallel_order) -def test_pipeline_parallel_initializations(order): - Utils.initialize_model_parallel( - tensor_model_parallel_size=2, pipeline_model_parallel_size=4, order=order - ) - assert ps.get_pipeline_model_parallel_first_rank() == rank % 2 - assert ps.get_data_parallel_src_rank() == rank - assert ps.get_pipeline_model_parallel_next_rank() == ((rank + 2) % world_size) - assert ps.get_pipeline_model_parallel_prev_rank() == ((rank - 2) % world_size) - Utils.destroy_model_parallel() - - -@pytest.mark.parametrize('order', test_parallel_order) -def test_data_parallel_initializations(order): - Utils.initialize_model_parallel(pipeline_model_parallel_size=world_size, order=order) - assert ps.get_data_parallel_src_rank() == rank - assert ps.get_data_parallel_world_size() == 1 - assert ps.get_data_parallel_rank() == 0 - Utils.destroy_model_parallel() - - -@pytest.mark.parametrize('order', test_parallel_order) -def test_tensor_model_parellel_world_size(order): - Utils.initialize_model_parallel(tensor_model_parallel_size=world_size, order=order) - assert ps.get_tensor_model_parallel_world_size() == world_size - ps.set_tensor_model_parallel_world_size(None) - assert ps.get_tensor_model_parallel_world_size() == world_size - Utils.destroy_model_parallel() - - -@pytest.mark.parametrize('order', test_parallel_order) -def test_expert_tensor_parellel_world_size(order): - Utils.initialize_model_parallel(expert_tensor_parallel_size=world_size, order=order) - assert ps.get_expert_tensor_parallel_world_size() == world_size - ps.set_expert_tensor_parallel_world_size(None) - assert ps.get_expert_tensor_parallel_world_size() == world_size - Utils.destroy_model_parallel() - - -@pytest.mark.parametrize('order', test_parallel_order) -def test_pipeline_model_parallel_world_size(order): - Utils.initialize_model_parallel(pipeline_model_parallel_size=world_size, order=order) - assert ps.get_pipeline_model_parallel_world_size() == world_size - ps.set_pipeline_model_parallel_world_size(None) - assert ps.get_pipeline_model_parallel_world_size() == world_size - Utils.destroy_model_parallel() - - -@pytest.mark.parametrize('order', test_parallel_order) -def test_tensor_model_parallel_rank(order): - Utils.initialize_model_parallel(tensor_model_parallel_size=world_size, order=order) - assert ps.get_tensor_model_parallel_rank() == rank - ps.set_tensor_model_parallel_rank(None) - assert ps.get_tensor_model_parallel_rank() == rank - Utils.destroy_model_parallel() - - -@pytest.mark.parametrize('order', test_parallel_order) -def test_moe_tensor_model_parellel_rank(order): - Utils.initialize_model_parallel(expert_tensor_parallel_size=world_size, order=order) - assert ps.get_expert_tensor_parallel_rank() == rank - ps.set_expert_tensor_parallel_rank(None) - assert ps.get_expert_tensor_parallel_rank() == rank - Utils.destroy_model_parallel() - - -@pytest.mark.parametrize('order', test_parallel_order) -def test_pipeline_model_parallel_rank(order): - Utils.initialize_model_parallel(pipeline_model_parallel_size=world_size, order=order) - assert ps.get_pipeline_model_parallel_rank() == rank - ps.set_pipeline_model_parallel_rank(None) - assert ps.get_pipeline_model_parallel_rank() == rank - Utils.destroy_model_parallel() - - -def test_context_parallel_rank(): - Utils.initialize_model_parallel(context_parallel_size=world_size) - assert ps.get_context_parallel_rank() == rank - Utils.destroy_model_parallel() - - -def test_expert_model_parallel_rank(): - Utils.initialize_model_parallel(expert_model_parallel_size=world_size) - assert ps.get_expert_model_parallel_rank() == rank - ps.set_expert_model_parallel_rank(None) - assert ps.get_expert_model_parallel_rank() == rank - Utils.destroy_model_parallel() - - -@pytest.mark.parametrize('order', test_parallel_order) -def test_is_pipeline_first_stage(order): - Utils.initialize_model_parallel(pipeline_model_parallel_size=world_size, order=order) - assert ps.is_pipeline_first_stage(ignore_virtual=True) == (rank == 0) - assert ps.is_pipeline_first_stage() == (rank == 0) - Utils.destroy_model_parallel() - - -@pytest.mark.parametrize('order', test_parallel_order) -def test_is_pipeline_last_stage(order): - Utils.initialize_model_parallel(pipeline_model_parallel_size=world_size, order=order) - assert ps.is_pipeline_last_stage(ignore_virtual=True) == (rank == world_size - 1) - assert ps.is_pipeline_last_stage() == (rank == world_size - 1) - Utils.destroy_model_parallel() - - -@pytest.mark.parametrize('order', test_parallel_order) -def test_virtual_pipeline_model_parallel_rank(order): - Utils.initialize_model_parallel(pipeline_model_parallel_size=world_size, order=order) - ps.set_virtual_pipeline_model_parallel_rank(rank) - assert ps.get_virtual_pipeline_model_parallel_rank() == rank - Utils.destroy_model_parallel() - - -@pytest.mark.parametrize('order', test_parallel_order) -def test_get_tensor_model_parallel_src_rank(order): - Utils.initialize_model_parallel(tensor_model_parallel_size=world_size, order=order) - assert ps.get_tensor_model_parallel_src_rank() == ((rank // world_size) * world_size) - Utils.destroy_model_parallel() - - -@pytest.mark.parametrize('order', test_parallel_order) -def test_encoder_tensor_pipeline_parallelism(order): - Utils.initialize_model_parallel( - tensor_model_parallel_size=5, - pipeline_model_parallel_size=1, - encoder_pipeline_model_parallel_size=1, - encoder_tensor_model_parallel_size=3, - order=order, - ) - if rank < 2: - assert ps.get_tensor_model_parallel_world_size() == 3 - assert isinstance(ps._PIPELINE_GLOBAL_RANKS[0], list) - elif rank == 2: - assert ps.get_tensor_model_parallel_world_size() == 3 - assert isinstance(ps._PIPELINE_GLOBAL_RANKS[0], int) - else: - assert ps.get_tensor_model_parallel_world_size() == 5 - assert isinstance(ps._PIPELINE_GLOBAL_RANKS[0], int) - Utils.destroy_model_parallel() - - -@pytest.mark.internal -@pytest.mark.parametrize( - 'src_tp_pp, ep_size', - [ - ((1, 8), 1), - ((2, 4), 1), - ((4, 2), 1), - ((8, 1), 1), - ((4, 1), 2), - ((1, 1), 8), - ((1, 1), 2), - ((2, 1), 4), - ], -) -def test_different_initialize_order_consistency(src_tp_pp, ep_size): - Utils.initialize_model_parallel( - *src_tp_pp, expert_model_parallel_size=ep_size, order='tp-ep-dp-pp' - ) - tp_rank = ps.get_tensor_model_parallel_rank() - dp_rank = ps.get_data_parallel_rank() - pp_rank = ps.get_pipeline_model_parallel_rank() - ep_rank = ps.get_expert_model_parallel_rank() - - tp_g = torch.distributed.get_process_group_ranks(ps.get_tensor_model_parallel_group()) - dp_g = torch.distributed.get_process_group_ranks(ps.get_data_parallel_group(False)) - pp_g = torch.distributed.get_process_group_ranks(ps.get_pipeline_model_parallel_group()) - dp_no_ep_g = torch.distributed.get_process_group_ranks(ps.get_expert_data_parallel_group()) - cp_g = torch.distributed.get_process_group_ranks(ps.get_context_parallel_group()) - mp_g = torch.distributed.get_process_group_ranks(ps.get_model_parallel_group()) - tp_ep_g = torch.distributed.get_process_group_ranks( - ps.get_expert_tensor_and_model_parallel_group() - ) - tp_dp_g = torch.distributed.get_process_group_ranks( - ps.get_tensor_and_data_parallel_group(False) - ) - - Utils.destroy_model_parallel() - - Utils.initialize_model_parallel( - *src_tp_pp, expert_model_parallel_size=ep_size, order='tp-pp-ep-dp' - ) - assert tp_rank == ps.get_tensor_model_parallel_rank() - assert dp_rank == ps.get_data_parallel_rank() - assert pp_rank == ps.get_pipeline_model_parallel_rank() - assert ep_rank == ps.get_expert_model_parallel_rank() - - assert tp_g == torch.distributed.get_process_group_ranks(ps.get_tensor_model_parallel_group()) - assert dp_g == torch.distributed.get_process_group_ranks(ps.get_data_parallel_group(False)) - assert pp_g == torch.distributed.get_process_group_ranks(ps.get_pipeline_model_parallel_group()) - assert dp_no_ep_g == torch.distributed.get_process_group_ranks( - ps.get_expert_data_parallel_group() - ) - assert cp_g == torch.distributed.get_process_group_ranks(ps.get_context_parallel_group()) - assert mp_g == torch.distributed.get_process_group_ranks(ps.get_model_parallel_group()) - assert tp_ep_g == torch.distributed.get_process_group_ranks( - ps.get_expert_tensor_and_model_parallel_group() - ) - assert tp_dp_g == torch.distributed.get_process_group_ranks( - ps.get_tensor_and_data_parallel_group(False) - ) - - Utils.destroy_model_parallel() - - -@pytest.mark.parametrize( - 'src_tp_pp, ep_size', - [((1, 2), 1), ((1, 4), 1), ((2, 2), 1), ((1, 2), 2), ((1, 4), 2), ((2, 2), 2)], -) -def test_different_initialize_order_unconsistency(src_tp_pp, ep_size): - Utils.initialize_model_parallel( - *src_tp_pp, expert_model_parallel_size=ep_size, order='tp-ep-dp-pp' - ) - - tp_g = torch.distributed.get_process_group_ranks(ps.get_tensor_model_parallel_group()) - dp_g = torch.distributed.get_process_group_ranks(ps.get_data_parallel_group(False)) - pp_g = torch.distributed.get_process_group_ranks(ps.get_pipeline_model_parallel_group()) - cp_g = torch.distributed.get_process_group_ranks(ps.get_context_parallel_group()) - amax_g = torch.distributed.get_process_group_ranks(ps.get_amax_reduction_group(False)) - mp_g = torch.distributed.get_process_group_ranks(ps.get_model_parallel_group()) - - Utils.destroy_model_parallel() - - Utils.initialize_model_parallel( - *src_tp_pp, expert_model_parallel_size=ep_size, order='tp-pp-ep-dp' - ) - assert tp_g == torch.distributed.get_process_group_ranks(ps.get_tensor_model_parallel_group()) - assert dp_g != torch.distributed.get_process_group_ranks(ps.get_data_parallel_group(False)) - assert pp_g != torch.distributed.get_process_group_ranks(ps.get_pipeline_model_parallel_group()) - assert cp_g == torch.distributed.get_process_group_ranks(ps.get_context_parallel_group()) - assert amax_g != torch.distributed.get_process_group_ranks(ps.get_amax_reduction_group(False)) - assert mp_g != torch.distributed.get_process_group_ranks(ps.get_model_parallel_group()) - - Utils.destroy_model_parallel() - - -@pytest.mark.internal -@pytest.mark.parametrize( - 'nodes, num_gpu, tp, pp, cp, ep', - [ - (1, 1, 1, 1, 1, 1), - (1, 8, 8, 1, 1, 1), - (1, 8, 2, 2, 1, 1), - (1, 8, 2, 4, 1, 1), - (3, 8, 8, 3, 1, 1), - (4, 8, 2, 4, 1, 1), - (8, 8, 8, 8, 1, 1), - (8, 8, 2, 1, 1, 4), - (8, 8, 2, 2, 2, 4), - (8, 8, 2, 1, 4, 8), - (8, 8, 2, 2, 2, 8), - (16, 8, 4, 8, 1, 1), - (16, 8, 4, 8, 1, 4), - (16, 8, 4, 8, 4, 1), - (16, 8, 8, 8, 1, 1), - (16, 8, 4, 8, 1, 1), - (16, 8, 8, 8, 1, 1), - (32, 8, 4, 8, 1, 1), - (32, 8, 8, 8, 1, 1), - (32, 8, 4, 8, 1, 4), - (32, 8, 8, 8, 4, 1), - (64, 8, 4, 2, 8, 8), - (64, 8, 4, 8, 1, 1), - (64, 8, 8, 8, 1, 1), - (96, 8, 4, 8, 1, 1), - (128, 8, 4, 2, 8, 8), - (128, 8, 4, 8, 1, 1), - (256, 8, 4, 8, 1, 1), - (316, 8, 4, 8, 1, 1), - (384, 8, 4, 8, 1, 1), - (512, 8, 4, 8, 1, 1), - (768, 8, 4, 8, 1, 1), - (1024, 8, 4, 8, 1, 1), - (1280, 8, 4, 8, 1, 1), - (1344, 8, 4, 8, 1, 1), - ], -) -def test_rank_generator_for_tp_dp_pp(nodes, num_gpu, tp, pp, cp, ep): - def golden_rank_result_from_past_code( - world_size: int, - tensor_model_parallel_size: int = 1, - pipeline_model_parallel_size: int = 1, - context_parallel_size: int = 1, - expert_model_parallel_size: int = 1, - ): - data_parallel_size: int = world_size // ( - tensor_model_parallel_size * pipeline_model_parallel_size * context_parallel_size - ) - num_tensor_model_parallel_groups: int = world_size // tensor_model_parallel_size - num_pipeline_model_parallel_groups: int = world_size // pipeline_model_parallel_size - - dp_groups = [] - dp_groups_with_cp = [] - - all_data_parallel_group_ranks_with_cp = [] - for i in range(pipeline_model_parallel_size): - start_rank = i * num_pipeline_model_parallel_groups - end_rank = (i + 1) * num_pipeline_model_parallel_groups - for j in range(context_parallel_size * tensor_model_parallel_size): - ranks = range( - start_rank + j, end_rank, context_parallel_size * tensor_model_parallel_size - ) - dp_groups.append(list(ranks)) - for j in range(tensor_model_parallel_size): - ranks_with_cp = range(start_rank + j, end_rank, tensor_model_parallel_size) - all_data_parallel_group_ranks_with_cp.append(list(ranks_with_cp)) - dp_groups_with_cp.append(list(ranks_with_cp)) - - cp_group = [] - for i in range(pipeline_model_parallel_size): - for j in range(data_parallel_size): - start_rank = ( - i * num_pipeline_model_parallel_groups - + j * tensor_model_parallel_size * context_parallel_size - ) - end_rank = ( - i * num_pipeline_model_parallel_groups - + (j + 1) * tensor_model_parallel_size * context_parallel_size - ) - for k in range(tensor_model_parallel_size): - ranks = range(start_rank + k, end_rank, tensor_model_parallel_size) - cp_group.append(list(ranks)) - - mp_group = [] - for i in range(data_parallel_size * context_parallel_size): - ranks = [ - data_parallel_group_ranks_with_cp[i] - for data_parallel_group_ranks_with_cp in all_data_parallel_group_ranks_with_cp - ] - mp_group.append(list(ranks)) - - tp_group = [] - for i in range(num_tensor_model_parallel_groups): - ranks = range(i * tensor_model_parallel_size, (i + 1) * tensor_model_parallel_size) - tp_group.append(list(ranks)) - - pp_group = [] - for i in range(num_pipeline_model_parallel_groups): - ranks = range(i, world_size, num_pipeline_model_parallel_groups) - pp_group.append(list(ranks)) - - tp_dp_group = [] - tp_dp_cp_group = [] - tensor_and_data_group_size_with_cp: int = ( - tensor_model_parallel_size * data_parallel_size * context_parallel_size - ) - num_tensor_and_data_groups_with_cp: int = world_size // tensor_and_data_group_size_with_cp - for i in range(num_tensor_and_data_groups_with_cp): - start_rank = i * tensor_and_data_group_size_with_cp - end_rank = start_rank + tensor_and_data_group_size_with_cp - ranks = range(start_rank, end_rank) - tp_dp_cp_group.append(list(ranks)) - - for j in range(context_parallel_size): - ranks = [] - for k in range(data_parallel_size): - start_rank = ( - i * tensor_and_data_group_size_with_cp - + j * tensor_model_parallel_size - + k * tensor_model_parallel_size * context_parallel_size - ) - end_rank = start_rank + tensor_model_parallel_size - ranks = ranks + list(range(start_rank, end_rank)) - tp_dp_group.append(list(ranks)) - - expert_tp_ep_group = [] - expert_dp_group = [] - - expert_data_parallel_size = world_size // ( - tensor_model_parallel_size * pipeline_model_parallel_size * expert_model_parallel_size - ) - all_ranks = torch.arange(world_size).reshape( - ( - pipeline_model_parallel_size, - expert_data_parallel_size, - expert_model_parallel_size, - tensor_model_parallel_size, - ) - ) - # (pp, dp, ep, tp) -> (pp*dp, ep*tp) - tp_ep_rearrange = torch.reshape( - all_ranks, (-1, expert_model_parallel_size * tensor_model_parallel_size) - ) - num_tp_ep_groups = tp_ep_rearrange.shape[0] - for i in range(num_tp_ep_groups): - expert_tensor_and_model_parallel_ranks = tp_ep_rearrange[i].tolist() - expert_tp_ep_group.append(expert_tensor_and_model_parallel_ranks) - - # (pp, dp, ep, tp) -> (pp*ep*tp, dp) - expert_dp_rearrange = torch.permute(all_ranks, (0, 2, 3, 1)).reshape( - -1, expert_data_parallel_size - ) - num_expert_dp_groups = world_size // expert_data_parallel_size - for i in range(num_expert_dp_groups): - expert_dp_ranks = expert_dp_rearrange[i].tolist() - expert_dp_group.append(expert_dp_ranks) - - return ( - dp_groups, - dp_groups_with_cp, - cp_group, - mp_group, - tp_group, - pp_group, - tp_dp_group, - tp_dp_cp_group, - expert_tp_ep_group, - expert_dp_group, - ) - - world_size = nodes * num_gpu - dp = world_size // (tp * pp * cp) - expert_dp = world_size // (tp * ep * pp) - assert dp % ep == 0, f"dp size ({dp}) is not divisible by ep {ep} ." - assert ( - world_size % (tp * pp * cp) == 0 - ), f"world_size ({world_size}) is not divisible by tp {tp} x pp {pp} x cp {cp}." - ( - dp_groups, - dp_groups_with_cp, - cp_group, - mp_group, - tp_group, - pp_group, - tp_dp_group, - tp_dp_cp_group, - expert_tp_ep_group, - expert_dp_group, - ) = golden_rank_result_from_past_code( - world_size=world_size, - tensor_model_parallel_size=tp, - pipeline_model_parallel_size=pp, - context_parallel_size=cp, - expert_model_parallel_size=ep, - ) - rank_generator = ps.RankGenerator(tp=tp, ep=1, dp=dp, pp=pp, cp=cp, order="tp-cp-dp-pp") - expert_rank_generator = ps.RankGenerator( - tp=tp, ep=ep, dp=expert_dp, pp=pp, cp=1, order="tp-ep-dp-pp" - ) - assert dp_groups == rank_generator.get_ranks( - "dp" - ), f"{dp_groups} != {rank_generator.get_ranks('dp')}" - assert dp_groups_with_cp == rank_generator.get_ranks( - 'dp-cp' - ), f"{dp_groups_with_cp} != {rank_generator.get_ranks('dp-cp')}" - assert cp_group == rank_generator.get_ranks( - "cp" - ), f"{cp_group} != {rank_generator.get_ranks('cp')}." - assert mp_group == rank_generator.get_ranks( - "tp-pp" - ), f"{mp_group} != {rank_generator.get_ranks('tp-pp')}" - assert tp_group == rank_generator.get_ranks( - "tp" - ), f"{tp_group} != {rank_generator.get_ranks('tp')}" - assert pp_group == rank_generator.get_ranks( - "pp" - ), f"{pp_group} != {rank_generator.get_ranks('pp')}" - assert tp_dp_group == rank_generator.get_ranks( - "tp-dp" - ), f"{tp_dp_group} != {rank_generator.get_ranks('tp-dp')}" - assert tp_dp_cp_group == rank_generator.get_ranks( - "tp-dp-cp" - ), f"{tp_dp_cp_group} != {rank_generator.get_ranks('tp-dp-cp')}" - assert expert_tp_ep_group == expert_rank_generator.get_ranks( - "tp-ep" - ), f"{expert_tp_ep_group} != {expert_rank_generator.get_ranks('tp-ep')}." - assert expert_dp_group == expert_rank_generator.get_ranks( - "dp" - ), f"{expert_dp_group} != {expert_rank_generator.get_ranks('dp')}." +import pytest +import torch + +import megatron.core.parallel_state as ps +from tests.unit_tests.test_utilities import Utils + +rank = Utils.rank +world_size = Utils.world_size +test_parallel_order = ['tp-cp-ep-dp-pp', 'tp-cp-pp-ep-dp'] + + +@pytest.mark.parametrize('order', test_parallel_order) +@pytest.mark.flaky_in_dev +def test_initialize_and_destroy_model_parallel(order): + with pytest.raises(AssertionError): + assert ps.initialize_model_parallel(order=order) + Utils.initialize_distributed() + with pytest.raises(RuntimeError): + assert ps.initialize_model_parallel(tensor_model_parallel_size=2 * world_size, order=order) + with pytest.raises(RuntimeError): + assert ps.initialize_model_parallel( + pipeline_model_parallel_size=2 * world_size, order=order + ) + with pytest.raises(RuntimeError): + assert ps.initialize_model_parallel( + pipeline_model_parallel_size=world_size, + tensor_model_parallel_size=world_size, + order=order, + ) + with pytest.raises(RuntimeError): + assert ps.initialize_model_parallel(virtual_pipeline_model_parallel_size=2, order=order) + Utils.initialize_model_parallel( + tensor_model_parallel_size=2, pipeline_model_parallel_size=4, order=order + ) + + assert ps.model_parallel_is_initialized() + assert ps.get_model_parallel_group() is not None + assert ps.get_tensor_model_parallel_group() is not None + assert ps.get_pipeline_model_parallel_group() is not None + assert ps.get_data_parallel_group() is not None + assert ps.get_expert_model_parallel_group() is not None + assert ps.get_expert_tensor_parallel_group() is not None + assert ps.get_expert_data_parallel_group() is not None + assert ps.get_expert_tensor_model_pipeline_parallel_group() is not None + Utils.destroy_model_parallel() + assert ps._MODEL_PARALLEL_GROUP is None + + +@pytest.mark.parametrize('order', test_parallel_order) +def test_pipeline_parallel_initializations(order): + Utils.initialize_model_parallel( + tensor_model_parallel_size=2, pipeline_model_parallel_size=4, order=order + ) + assert ps.get_pipeline_model_parallel_first_rank() == rank % 2 + assert ps.get_data_parallel_src_rank() == rank + assert ps.get_pipeline_model_parallel_next_rank() == ((rank + 2) % world_size) + assert ps.get_pipeline_model_parallel_prev_rank() == ((rank - 2) % world_size) + Utils.destroy_model_parallel() + + +@pytest.mark.parametrize('order', test_parallel_order) +def test_data_parallel_initializations(order): + Utils.initialize_model_parallel(pipeline_model_parallel_size=world_size, order=order) + assert ps.get_data_parallel_src_rank() == rank + assert ps.get_data_parallel_world_size() == 1 + assert ps.get_data_parallel_rank() == 0 + Utils.destroy_model_parallel() + + +@pytest.mark.parametrize('order', test_parallel_order) +def test_tensor_model_parellel_world_size(order): + Utils.initialize_model_parallel(tensor_model_parallel_size=world_size, order=order) + assert ps.get_tensor_model_parallel_world_size() == world_size + ps.set_tensor_model_parallel_world_size(None) + assert ps.get_tensor_model_parallel_world_size() == world_size + Utils.destroy_model_parallel() + + +@pytest.mark.parametrize('order', test_parallel_order) +def test_expert_tensor_parellel_world_size(order): + Utils.initialize_model_parallel(expert_tensor_parallel_size=world_size, order=order) + assert ps.get_expert_tensor_parallel_world_size() == world_size + ps.set_expert_tensor_parallel_world_size(None) + assert ps.get_expert_tensor_parallel_world_size() == world_size + Utils.destroy_model_parallel() + + +@pytest.mark.parametrize('order', test_parallel_order) +def test_pipeline_model_parallel_world_size(order): + Utils.initialize_model_parallel(pipeline_model_parallel_size=world_size, order=order) + assert ps.get_pipeline_model_parallel_world_size() == world_size + ps.set_pipeline_model_parallel_world_size(None) + assert ps.get_pipeline_model_parallel_world_size() == world_size + Utils.destroy_model_parallel() + + +@pytest.mark.parametrize('order', test_parallel_order) +def test_tensor_model_parallel_rank(order): + Utils.initialize_model_parallel(tensor_model_parallel_size=world_size, order=order) + assert ps.get_tensor_model_parallel_rank() == rank + ps.set_tensor_model_parallel_rank(None) + assert ps.get_tensor_model_parallel_rank() == rank + Utils.destroy_model_parallel() + + +@pytest.mark.parametrize('order', test_parallel_order) +def test_moe_tensor_model_parellel_rank(order): + Utils.initialize_model_parallel(expert_tensor_parallel_size=world_size, order=order) + assert ps.get_expert_tensor_parallel_rank() == rank + ps.set_expert_tensor_parallel_rank(None) + assert ps.get_expert_tensor_parallel_rank() == rank + Utils.destroy_model_parallel() + + +@pytest.mark.parametrize('order', test_parallel_order) +def test_pipeline_model_parallel_rank(order): + Utils.initialize_model_parallel(pipeline_model_parallel_size=world_size, order=order) + assert ps.get_pipeline_model_parallel_rank() == rank + ps.set_pipeline_model_parallel_rank(None) + assert ps.get_pipeline_model_parallel_rank() == rank + Utils.destroy_model_parallel() + + +def test_context_parallel_rank(): + Utils.initialize_model_parallel(context_parallel_size=world_size) + assert ps.get_context_parallel_rank() == rank + Utils.destroy_model_parallel() + + +def test_expert_model_parallel_rank(): + Utils.initialize_model_parallel(expert_model_parallel_size=world_size) + assert ps.get_expert_model_parallel_rank() == rank + ps.set_expert_model_parallel_rank(None) + assert ps.get_expert_model_parallel_rank() == rank + Utils.destroy_model_parallel() + + +@pytest.mark.parametrize('order', test_parallel_order) +def test_is_pipeline_first_stage(order): + Utils.initialize_model_parallel(pipeline_model_parallel_size=world_size, order=order) + assert ps.is_pipeline_first_stage(ignore_virtual=True) == (rank == 0) + assert ps.is_pipeline_first_stage() == (rank == 0) + Utils.destroy_model_parallel() + + +@pytest.mark.parametrize('order', test_parallel_order) +def test_is_pipeline_last_stage(order): + Utils.initialize_model_parallel(pipeline_model_parallel_size=world_size, order=order) + assert ps.is_pipeline_last_stage(ignore_virtual=True) == (rank == world_size - 1) + assert ps.is_pipeline_last_stage() == (rank == world_size - 1) + Utils.destroy_model_parallel() + + +@pytest.mark.parametrize('order', test_parallel_order) +def test_virtual_pipeline_model_parallel_rank(order): + Utils.initialize_model_parallel(pipeline_model_parallel_size=world_size, order=order) + ps.set_virtual_pipeline_model_parallel_rank(rank) + assert ps.get_virtual_pipeline_model_parallel_rank() == rank + Utils.destroy_model_parallel() + + +@pytest.mark.parametrize('order', test_parallel_order) +def test_get_tensor_model_parallel_src_rank(order): + Utils.initialize_model_parallel(tensor_model_parallel_size=world_size, order=order) + assert ps.get_tensor_model_parallel_src_rank() == ((rank // world_size) * world_size) + Utils.destroy_model_parallel() + + +@pytest.mark.parametrize('order', test_parallel_order) +def test_encoder_tensor_pipeline_parallelism(order): + Utils.initialize_model_parallel( + tensor_model_parallel_size=5, + pipeline_model_parallel_size=1, + encoder_pipeline_model_parallel_size=1, + encoder_tensor_model_parallel_size=3, + order=order, + ) + if rank < 2: + assert ps.get_tensor_model_parallel_world_size() == 3 + assert isinstance(ps._PIPELINE_GLOBAL_RANKS[0], list) + last_ranks = ps.get_pipeline_model_parallel_last_rank() + assert isinstance(last_ranks, list) + assert len(last_ranks) == 2 + elif rank == 2: + assert ps.get_tensor_model_parallel_world_size() == 3 + assert isinstance(ps._PIPELINE_GLOBAL_RANKS[0], int) + assert isinstance(ps.get_pipeline_model_parallel_last_rank(), int) + else: + assert ps.get_tensor_model_parallel_world_size() == 5 + assert isinstance(ps._PIPELINE_GLOBAL_RANKS[0], int) + assert isinstance(ps.get_pipeline_model_parallel_last_rank(), int) + Utils.destroy_model_parallel() + + +@pytest.mark.internal +@pytest.mark.parametrize( + 'src_tp_pp, ep_size', + [ + ((1, 8), 1), + ((2, 4), 1), + ((4, 2), 1), + ((8, 1), 1), + ((4, 1), 2), + ((1, 1), 8), + ((1, 1), 2), + ((2, 1), 4), + ], +) +def test_different_initialize_order_consistency(src_tp_pp, ep_size): + Utils.initialize_model_parallel( + *src_tp_pp, expert_model_parallel_size=ep_size, order='tp-ep-dp-pp' + ) + tp_rank = ps.get_tensor_model_parallel_rank() + dp_rank = ps.get_data_parallel_rank() + pp_rank = ps.get_pipeline_model_parallel_rank() + ep_rank = ps.get_expert_model_parallel_rank() + + tp_g = torch.distributed.get_process_group_ranks(ps.get_tensor_model_parallel_group()) + dp_g = torch.distributed.get_process_group_ranks(ps.get_data_parallel_group(False)) + pp_g = torch.distributed.get_process_group_ranks(ps.get_pipeline_model_parallel_group()) + dp_no_ep_g = torch.distributed.get_process_group_ranks(ps.get_expert_data_parallel_group()) + cp_g = torch.distributed.get_process_group_ranks(ps.get_context_parallel_group()) + mp_g = torch.distributed.get_process_group_ranks(ps.get_model_parallel_group()) + tp_ep_g = torch.distributed.get_process_group_ranks( + ps.get_expert_tensor_and_model_parallel_group() + ) + tp_dp_g = torch.distributed.get_process_group_ranks( + ps.get_tensor_and_data_parallel_group(False) + ) + + Utils.destroy_model_parallel() + + Utils.initialize_model_parallel( + *src_tp_pp, expert_model_parallel_size=ep_size, order='tp-pp-ep-dp' + ) + assert tp_rank == ps.get_tensor_model_parallel_rank() + assert dp_rank == ps.get_data_parallel_rank() + assert pp_rank == ps.get_pipeline_model_parallel_rank() + assert ep_rank == ps.get_expert_model_parallel_rank() + + assert tp_g == torch.distributed.get_process_group_ranks(ps.get_tensor_model_parallel_group()) + assert dp_g == torch.distributed.get_process_group_ranks(ps.get_data_parallel_group(False)) + assert pp_g == torch.distributed.get_process_group_ranks(ps.get_pipeline_model_parallel_group()) + assert dp_no_ep_g == torch.distributed.get_process_group_ranks( + ps.get_expert_data_parallel_group() + ) + assert cp_g == torch.distributed.get_process_group_ranks(ps.get_context_parallel_group()) + assert mp_g == torch.distributed.get_process_group_ranks(ps.get_model_parallel_group()) + assert tp_ep_g == torch.distributed.get_process_group_ranks( + ps.get_expert_tensor_and_model_parallel_group() + ) + assert tp_dp_g == torch.distributed.get_process_group_ranks( + ps.get_tensor_and_data_parallel_group(False) + ) + + Utils.destroy_model_parallel() + + +@pytest.mark.parametrize( + 'src_tp_pp, ep_size', + [((1, 2), 1), ((1, 4), 1), ((2, 2), 1), ((1, 2), 2), ((1, 4), 2), ((2, 2), 2)], +) +@pytest.mark.flaky +@pytest.mark.flaky_in_dev +def test_different_initialize_order_unconsistency(src_tp_pp, ep_size): + Utils.initialize_model_parallel( + *src_tp_pp, expert_model_parallel_size=ep_size, order='tp-ep-dp-pp' + ) + + tp_g = torch.distributed.get_process_group_ranks(ps.get_tensor_model_parallel_group()) + dp_g = torch.distributed.get_process_group_ranks(ps.get_data_parallel_group(False)) + pp_g = torch.distributed.get_process_group_ranks(ps.get_pipeline_model_parallel_group()) + cp_g = torch.distributed.get_process_group_ranks(ps.get_context_parallel_group()) + amax_g = torch.distributed.get_process_group_ranks(ps.get_amax_reduction_group(False)) + mp_g = torch.distributed.get_process_group_ranks(ps.get_model_parallel_group()) + + Utils.destroy_model_parallel() + + Utils.initialize_model_parallel( + *src_tp_pp, expert_model_parallel_size=ep_size, order='tp-pp-ep-dp' + ) + assert tp_g == torch.distributed.get_process_group_ranks(ps.get_tensor_model_parallel_group()) + assert dp_g != torch.distributed.get_process_group_ranks(ps.get_data_parallel_group(False)) + assert pp_g != torch.distributed.get_process_group_ranks(ps.get_pipeline_model_parallel_group()) + assert cp_g == torch.distributed.get_process_group_ranks(ps.get_context_parallel_group()) + assert amax_g != torch.distributed.get_process_group_ranks(ps.get_amax_reduction_group(False)) + assert mp_g != torch.distributed.get_process_group_ranks(ps.get_model_parallel_group()) + + Utils.destroy_model_parallel() + + +@pytest.mark.internal +@pytest.mark.parametrize( + 'nodes, num_gpu, tp, pp, cp, ep', + [ + (1, 1, 1, 1, 1, 1), + (1, 8, 8, 1, 1, 1), + (1, 8, 2, 2, 1, 1), + (1, 8, 2, 4, 1, 1), + (3, 8, 8, 3, 1, 1), + (4, 8, 2, 4, 1, 1), + (8, 8, 8, 8, 1, 1), + (8, 8, 2, 1, 1, 4), + (8, 8, 2, 2, 2, 4), + (8, 8, 2, 1, 4, 8), + (8, 8, 2, 2, 2, 8), + (16, 8, 4, 8, 1, 1), + (16, 8, 4, 8, 1, 4), + (16, 8, 4, 8, 4, 1), + (16, 8, 8, 8, 1, 1), + (16, 8, 4, 8, 1, 1), + (16, 8, 8, 8, 1, 1), + (32, 8, 4, 8, 1, 1), + (32, 8, 8, 8, 1, 1), + (32, 8, 4, 8, 1, 4), + (32, 8, 8, 8, 4, 1), + (64, 8, 4, 2, 8, 8), + (64, 8, 4, 8, 1, 1), + (64, 8, 8, 8, 1, 1), + (96, 8, 4, 8, 1, 1), + (128, 8, 4, 2, 8, 8), + (128, 8, 4, 8, 1, 1), + (256, 8, 4, 8, 1, 1), + (316, 8, 4, 8, 1, 1), + (384, 8, 4, 8, 1, 1), + (512, 8, 4, 8, 1, 1), + (768, 8, 4, 8, 1, 1), + (1024, 8, 4, 8, 1, 1), + (1280, 8, 4, 8, 1, 1), + (1344, 8, 4, 8, 1, 1), + ], +) +def test_rank_generator_for_tp_dp_pp(nodes, num_gpu, tp, pp, cp, ep): + def golden_rank_result_from_past_code( + world_size: int, + tensor_model_parallel_size: int = 1, + pipeline_model_parallel_size: int = 1, + context_parallel_size: int = 1, + expert_model_parallel_size: int = 1, + ): + data_parallel_size: int = world_size // ( + tensor_model_parallel_size * pipeline_model_parallel_size * context_parallel_size + ) + num_tensor_model_parallel_groups: int = world_size // tensor_model_parallel_size + num_pipeline_model_parallel_groups: int = world_size // pipeline_model_parallel_size + + dp_groups = [] + dp_groups_with_cp = [] + + all_data_parallel_group_ranks_with_cp = [] + for i in range(pipeline_model_parallel_size): + start_rank = i * num_pipeline_model_parallel_groups + end_rank = (i + 1) * num_pipeline_model_parallel_groups + for j in range(context_parallel_size * tensor_model_parallel_size): + ranks = range( + start_rank + j, end_rank, context_parallel_size * tensor_model_parallel_size + ) + dp_groups.append(list(ranks)) + for j in range(tensor_model_parallel_size): + ranks_with_cp = range(start_rank + j, end_rank, tensor_model_parallel_size) + all_data_parallel_group_ranks_with_cp.append(list(ranks_with_cp)) + dp_groups_with_cp.append(list(ranks_with_cp)) + + cp_group = [] + for i in range(pipeline_model_parallel_size): + for j in range(data_parallel_size): + start_rank = ( + i * num_pipeline_model_parallel_groups + + j * tensor_model_parallel_size * context_parallel_size + ) + end_rank = ( + i * num_pipeline_model_parallel_groups + + (j + 1) * tensor_model_parallel_size * context_parallel_size + ) + for k in range(tensor_model_parallel_size): + ranks = range(start_rank + k, end_rank, tensor_model_parallel_size) + cp_group.append(list(ranks)) + + mp_group = [] + for i in range(data_parallel_size * context_parallel_size): + ranks = [ + data_parallel_group_ranks_with_cp[i] + for data_parallel_group_ranks_with_cp in all_data_parallel_group_ranks_with_cp + ] + mp_group.append(list(ranks)) + + tp_group = [] + for i in range(num_tensor_model_parallel_groups): + ranks = range(i * tensor_model_parallel_size, (i + 1) * tensor_model_parallel_size) + tp_group.append(list(ranks)) + + pp_group = [] + for i in range(num_pipeline_model_parallel_groups): + ranks = range(i, world_size, num_pipeline_model_parallel_groups) + pp_group.append(list(ranks)) + + tp_dp_group = [] + tp_dp_cp_group = [] + tensor_and_data_group_size_with_cp: int = ( + tensor_model_parallel_size * data_parallel_size * context_parallel_size + ) + num_tensor_and_data_groups_with_cp: int = world_size // tensor_and_data_group_size_with_cp + for i in range(num_tensor_and_data_groups_with_cp): + start_rank = i * tensor_and_data_group_size_with_cp + end_rank = start_rank + tensor_and_data_group_size_with_cp + ranks = range(start_rank, end_rank) + tp_dp_cp_group.append(list(ranks)) + + for j in range(context_parallel_size): + ranks = [] + for k in range(data_parallel_size): + start_rank = ( + i * tensor_and_data_group_size_with_cp + + j * tensor_model_parallel_size + + k * tensor_model_parallel_size * context_parallel_size + ) + end_rank = start_rank + tensor_model_parallel_size + ranks = ranks + list(range(start_rank, end_rank)) + tp_dp_group.append(list(ranks)) + + expert_tp_ep_group = [] + expert_dp_group = [] + + expert_data_parallel_size = world_size // ( + tensor_model_parallel_size * pipeline_model_parallel_size * expert_model_parallel_size + ) + all_ranks = torch.arange(world_size).reshape( + ( + pipeline_model_parallel_size, + expert_data_parallel_size, + expert_model_parallel_size, + tensor_model_parallel_size, + ) + ) + # (pp, dp, ep, tp) -> (pp*dp, ep*tp) + tp_ep_rearrange = torch.reshape( + all_ranks, (-1, expert_model_parallel_size * tensor_model_parallel_size) + ) + num_tp_ep_groups = tp_ep_rearrange.shape[0] + for i in range(num_tp_ep_groups): + expert_tensor_and_model_parallel_ranks = tp_ep_rearrange[i].tolist() + expert_tp_ep_group.append(expert_tensor_and_model_parallel_ranks) + + # (pp, dp, ep, tp) -> (pp*ep*tp, dp) + expert_dp_rearrange = torch.permute(all_ranks, (0, 2, 3, 1)).reshape( + -1, expert_data_parallel_size + ) + num_expert_dp_groups = world_size // expert_data_parallel_size + for i in range(num_expert_dp_groups): + expert_dp_ranks = expert_dp_rearrange[i].tolist() + expert_dp_group.append(expert_dp_ranks) + + return ( + dp_groups, + dp_groups_with_cp, + cp_group, + mp_group, + tp_group, + pp_group, + tp_dp_group, + tp_dp_cp_group, + expert_tp_ep_group, + expert_dp_group, + ) + + world_size = nodes * num_gpu + dp = world_size // (tp * pp * cp) + expert_dp = world_size // (tp * ep * pp) + assert dp % ep == 0, f"dp size ({dp}) is not divisible by ep {ep} ." + assert ( + world_size % (tp * pp * cp) == 0 + ), f"world_size ({world_size}) is not divisible by tp {tp} x pp {pp} x cp {cp}." + ( + dp_groups, + dp_groups_with_cp, + cp_group, + mp_group, + tp_group, + pp_group, + tp_dp_group, + tp_dp_cp_group, + expert_tp_ep_group, + expert_dp_group, + ) = golden_rank_result_from_past_code( + world_size=world_size, + tensor_model_parallel_size=tp, + pipeline_model_parallel_size=pp, + context_parallel_size=cp, + expert_model_parallel_size=ep, + ) + rank_generator = ps.RankGenerator(tp=tp, ep=1, dp=dp, pp=pp, cp=cp, order="tp-cp-dp-pp") + expert_rank_generator = ps.RankGenerator( + tp=tp, ep=ep, dp=expert_dp, pp=pp, cp=1, order="tp-ep-dp-pp" + ) + assert dp_groups == rank_generator.get_ranks( + "dp" + ), f"{dp_groups} != {rank_generator.get_ranks('dp')}" + assert dp_groups_with_cp == rank_generator.get_ranks( + 'dp-cp' + ), f"{dp_groups_with_cp} != {rank_generator.get_ranks('dp-cp')}" + assert cp_group == rank_generator.get_ranks( + "cp" + ), f"{cp_group} != {rank_generator.get_ranks('cp')}." + assert mp_group == rank_generator.get_ranks( + "tp-pp" + ), f"{mp_group} != {rank_generator.get_ranks('tp-pp')}" + assert tp_group == rank_generator.get_ranks( + "tp" + ), f"{tp_group} != {rank_generator.get_ranks('tp')}" + assert pp_group == rank_generator.get_ranks( + "pp" + ), f"{pp_group} != {rank_generator.get_ranks('pp')}" + assert tp_dp_group == rank_generator.get_ranks( + "tp-dp" + ), f"{tp_dp_group} != {rank_generator.get_ranks('tp-dp')}" + assert tp_dp_cp_group == rank_generator.get_ranks( + "tp-dp-cp" + ), f"{tp_dp_cp_group} != {rank_generator.get_ranks('tp-dp-cp')}" + assert expert_tp_ep_group == expert_rank_generator.get_ranks( + "tp-ep" + ), f"{expert_tp_ep_group} != {expert_rank_generator.get_ranks('tp-ep')}." + assert expert_dp_group == expert_rank_generator.get_ranks( + "dp" + ), f"{expert_dp_group} != {expert_rank_generator.get_ranks('dp')}." diff --git a/tests/unit_tests/test_utils.py b/tests/unit_tests/test_utils.py index 229cead..73e235a 100644 --- a/tests/unit_tests/test_utils.py +++ b/tests/unit_tests/test_utils.py @@ -1,213 +1,272 @@ -import os -import time -import urllib.request as req - -import numpy as np -import pytest -import torch - -import megatron.core.utils as util -from tests.unit_tests.test_utilities import Utils - - -def test_divide_properly(): - assert util.divide(4, 2) == 2 - - -def test_divide_improperly(): - with pytest.raises(AssertionError): - util.divide(4, 5) - - -def test_global_memory_buffer(): - global_memory_buffer = util.GlobalMemoryBuffer() - obtained_tensor = global_memory_buffer.get_tensor((3, 2), torch.float32, "test_tensor") - expected_tensor = torch.empty((3, 2), dtype=torch.float32, device=torch.cuda.current_device()) - assert obtained_tensor.shape == expected_tensor.shape - - -def test_make_viewless_tensor(): - inp = torch.rand((3, 4)) - assert torch.equal(inp, util.make_viewless_tensor(inp, True, True)) - assert torch.equal(inp, util.make_viewless_tensor(inp, True, False)) - - -def test_safely_set_viewless_tensor_data(): - tensor = torch.zeros((3, 4)) - new_data_tensor = torch.tensor(np.random.rand(3, 4)) - util.safely_set_viewless_tensor_data(tensor, new_data_tensor) - assert torch.equal(tensor, new_data_tensor) - - -def test_assert_viewless_tensor(): - tensor = torch.rand((3, 4)) - assert torch.equal(util.assert_viewless_tensor(tensor), tensor) - input_tensor_list = [tensor, tensor, tensor] - output_tensor_list = util.assert_viewless_tensor(input_tensor_list) - for inp, out in zip(input_tensor_list, output_tensor_list): - assert torch.equal(inp, out) - - -# Initialize torch.distributed; do not call init_process_group here, call -# Utils.initialize_distributed() instead. -def _init_distributed(world, rank): - Utils.initialize_distributed() - assert torch.distributed.is_initialized() == True - assert torch.distributed.get_rank() == rank - assert torch.cuda.device_count() == world - torch.distributed.barrier() - - -# Deinitialization and cleanup. -# Do not call torch.distributed.destroy_process_group, may be needed by other tests. -def _deinit_distributed(): - assert torch.distributed.is_initialized() == True - torch.distributed.barrier() - - -def test_check_param_hashes_across_dp_replicas(): - world = int(os.getenv('WORLD_SIZE', '1')) - rank = int(os.getenv('RANK', '0')) - - # Setup. - _init_distributed(world, rank) - Utils.initialize_model_parallel() - model = torch.nn.Linear(100, 100, bias=False) - - # First check case where all replicas agree. - model.weight.data.fill_(1.0) - assert util.check_param_hashes_across_dp_replicas([model]) - - # Now check case where replica 0 disagrees with all other replicas. - if rank == 0: - model.weight.data.fill_(0.0) - param_hashes_match = util.check_param_hashes_across_dp_replicas([model]) - expected_param_hashes_match = rank == 0 - assert param_hashes_match == expected_param_hashes_match - - # Teardown. - _deinit_distributed() - - -def test_cross_check_param_hashes_across_dp_replicas(): - world = int(os.getenv('WORLD_SIZE', '1')) - rank = int(os.getenv('RANK', '0')) - - # Setup. - _init_distributed(world, rank) - Utils.initialize_model_parallel() - model = torch.nn.Linear(100, 100, bias=False) - - # First check case where all replicas agree. - model.weight.data.fill_(1.0) - assert util.check_param_hashes_across_dp_replicas([model], True) - - # Now check case where replica 0 disagrees with all other replicas. - if rank == 0: - model.weight.data.fill_(0.0) - assert not util.check_param_hashes_across_dp_replicas([model], True) - - # Teardown. - _deinit_distributed() - - -def test_straggler_detector(): - world = int(os.getenv('WORLD_SIZE', '1')) - rank = int(os.getenv('RANK', '0')) - master = os.getenv('MASTER_ADDR', 'localhost') - port = 65535 - - # Checks if the instance is disabled. - def straggler_detector_disabled(): - assert stimer.enabled == False - - # Checks if the instance is enabled. - def straggler_detector_enabled(): - assert stimer.enabled == True - - # Enable. - def straggler_detector_enable(): - if rank == 0: - resp = req.urlopen(f"http://{master}:{port}").read().decode().split() - assert resp[3] == "ON" - # Call the report function, this will propagate the change. - stimer.report() - - # Time an operation. - def straggler_detector_timeit(): - s = 2 # Sleep for 2 seconds. - M = 20 - K = 30 - N = 40 - mat1 = torch.randn(M, K, device='cuda') - mat2 = torch.randn(K, N, device='cuda') - # batch_data. - with stimer(bdata=True): - time.sleep(s) - # GEMM. - with stimer: - res = torch.matmul(mat1, mat2) - delta, batch_delta, _, _, _, _ = stimer.elapsed() - assert delta > 0.0 - assert batch_delta >= s - - # Test function to raise ValueError - def straggler_value_error(): - raise ValueError("Exception value raised") - - # Check that exception is not suppressed. - def straggler_detector_exception_propagate(): - # batch_data - with pytest.raises(ZeroDivisionError): - with stimer(bdata=True): - x = 1 / 0 - # non-batch-data - with pytest.raises(ValueError, match=r".* value .*"): - with stimer(): - straggler_value_error() - - # Reporting. - def straggler_detector_report(): - s = 2 # Sleep for 2 seconds. - N = 20 - P = 30 - M = 40 - mat1 = torch.randn(N, P, device='cuda') - mat2 = torch.randn(P, M, device='cuda') - tfp = (N * M) * (2 * P - 1) # Theoretical. - iter = 10 # Mock. - # batch_data. - with stimer(bdata=True): - time.sleep(s) - # GEMM. - with stimer: - res = torch.matmul(mat1, mat2) - r = stimer.report(total_flops=tfp, log_interval=iter) - rb = True if rank == 0 else False - assert r == rb - - # Start test. - # Setup. - _init_distributed(world, rank) - - # Create a straggler_detector with enabled set to false. - stimer = util.StragglerDetector() - stimer.configure(world, rank, enabled=False, port=port) - # Check if configuration was success. - assert stimer.configured == True - - # Check if the instance is in disabled state. - straggler_detector_disabled() - # Enable it now, must call report. - straggler_detector_enable() - # Check if all ranks have straggler detector enabled. - straggler_detector_enabled() - # Time some operation. - straggler_detector_timeit() - # Report only from rank 0. - straggler_detector_report() - # Check that exception is not suppressed. - straggler_detector_exception_propagate() - util.StragglerDetector._configured = False - # Teardown. - _deinit_distributed() +import os +import time +import urllib.request as req +from types import SimpleNamespace + +import mock +import numpy as np +import pytest +import torch + +import megatron.core.utils as util +import megatron.training.utils as training_util +from megatron.core.distributed import DistributedDataParallel, DistributedDataParallelConfig +from megatron.core.optimizer import OptimizerConfig, get_megatron_optimizer +from megatron.core.transformer import TransformerConfig +from tests.unit_tests.test_utilities import Utils + + +def test_divide_properly(): + assert util.divide(4, 2) == 2 + + +def test_divide_improperly(): + with pytest.raises(AssertionError): + util.divide(4, 5) + + +def test_global_memory_buffer(): + global_memory_buffer = util.GlobalMemoryBuffer() + obtained_tensor = global_memory_buffer.get_tensor((3, 2), torch.float32, "test_tensor") + expected_tensor = torch.empty((3, 2), dtype=torch.float32, device=torch.cuda.current_device()) + assert obtained_tensor.shape == expected_tensor.shape + + +def test_make_viewless_tensor(): + inp = torch.rand((3, 4)) + assert torch.equal(inp, util.make_viewless_tensor(inp, True, True)) + assert torch.equal(inp, util.make_viewless_tensor(inp, True, False)) + + +def test_safely_set_viewless_tensor_data(): + tensor = torch.zeros((3, 4)) + new_data_tensor = torch.tensor(np.random.rand(3, 4)) + util.safely_set_viewless_tensor_data(tensor, new_data_tensor) + assert torch.equal(tensor, new_data_tensor) + + +def test_assert_viewless_tensor(): + tensor = torch.rand((3, 4)) + assert torch.equal(util.assert_viewless_tensor(tensor), tensor) + input_tensor_list = [tensor, tensor, tensor] + output_tensor_list = util.assert_viewless_tensor(input_tensor_list) + for inp, out in zip(input_tensor_list, output_tensor_list): + assert torch.equal(inp, out) + + +# Initialize torch.distributed; do not call init_process_group here, call +# Utils.initialize_distributed() instead. +def _init_distributed(world, rank): + Utils.initialize_distributed() + assert torch.distributed.is_initialized() == True + assert torch.distributed.get_rank() == rank + assert torch.cuda.device_count() == world + torch.distributed.barrier() + + +# Deinitialization and cleanup. +# Do not call torch.distributed.destroy_process_group, may be needed by other tests. +def _deinit_distributed(): + assert torch.distributed.is_initialized() == True + torch.distributed.barrier() + + +@pytest.mark.flaky_in_dev +def test_check_param_hashes_across_dp_replicas(): + world = int(os.getenv('WORLD_SIZE', '1')) + rank = int(os.getenv('RANK', '0')) + + # Setup. + _init_distributed(world, rank) + Utils.initialize_model_parallel() + model = torch.nn.Linear(100, 100, bias=False, device='cuda') + + # First check case where all replicas agree. + model.weight.data.fill_(1.0) + assert util.check_param_hashes_across_dp_replicas([model]) + + # Now check case where replica 0 disagrees with all other replicas. + if rank == 0: + model.weight.data.fill_(0.0) + param_hashes_match = util.check_param_hashes_across_dp_replicas([model]) + expected_param_hashes_match = rank == 0 + assert param_hashes_match == expected_param_hashes_match + + # Teardown. + _deinit_distributed() + + +@pytest.mark.flaky_in_dev +def test_cross_check_param_hashes_across_dp_replicas(): + world = int(os.getenv('WORLD_SIZE', '1')) + rank = int(os.getenv('RANK', '0')) + + # Setup. + _init_distributed(world, rank) + Utils.initialize_model_parallel() + model = torch.nn.Linear(100, 100, bias=False, device='cuda') + + # First check case where all replicas agree. + model.weight.data.fill_(1.0) + assert util.check_param_hashes_across_dp_replicas([model], True) + + # Now check case where replica 0 disagrees with all other replicas. + if rank == 0: + model.weight.data.fill_(0.0) + assert not util.check_param_hashes_across_dp_replicas([model], True) + + # Teardown. + _deinit_distributed() + + +@pytest.mark.parametrize("use_distributed_optimizer", [False, True]) +@pytest.mark.flaky_in_dev +def test_param_norm(use_distributed_optimizer: bool): + world = int(os.getenv('WORLD_SIZE', '1')) + rank = int(os.getenv('RANK', '0')) + + # Setup: distributed, model, mock_args. + _init_distributed(world, rank) + Utils.initialize_model_parallel() + model = torch.nn.Linear(100, 100, bias=False, dtype=torch.bfloat16, device='cuda') + model.requires_grad_(True) + model.weight.data.fill_(1.0) + ddp_config = DistributedDataParallelConfig(use_distributed_optimizer=use_distributed_optimizer) + # Use dummy TransformerConfig which doesn't trigger __post_init__ assertions. + model = DistributedDataParallel( + TransformerConfig(num_attention_heads=1, num_layers=1), ddp_config, model + ) + for param in model.parameters(): + assert param.requires_grad + mock_args = SimpleNamespace(bf16=True) + + with mock.patch('megatron.training.utils.get_args', new=lambda: mock_args): + # Make sure norm is correct when `main_param` attribute is not available. + assert training_util.calc_params_l2_norm( + model, force_create_fp32_copy=False + ) == pytest.approx(100.0) + assert training_util.calc_params_l2_norm( + model, force_create_fp32_copy=True + ) == pytest.approx(100.0) + + # Make sure norm is correct when `main_param` attribute is available. + optimizer_config = OptimizerConfig( + bf16=True, use_distributed_optimizer=use_distributed_optimizer + ) + _ = get_megatron_optimizer(optimizer_config, [model]) + for param in model.parameters(): + assert hasattr(param, 'main_param') + if use_distributed_optimizer: + assert getattr(param, 'main_param_sharded', False) + assert training_util.calc_params_l2_norm( + model, force_create_fp32_copy=False + ) == pytest.approx(100.0) + assert training_util.calc_params_l2_norm( + model, force_create_fp32_copy=True + ) == pytest.approx(100.0) + + # Teardown. + _deinit_distributed() + + +@pytest.mark.flaky_in_dev +def test_straggler_detector(): + world = int(os.getenv('WORLD_SIZE', '1')) + rank = int(os.getenv('RANK', '0')) + master = os.getenv('MASTER_ADDR', 'localhost') + port = 65535 + + # Checks if the instance is disabled. + def straggler_detector_disabled(): + assert stimer.enabled == False + + # Checks if the instance is enabled. + def straggler_detector_enabled(): + assert stimer.enabled == True + + # Enable. + def straggler_detector_enable(): + if rank == 0: + resp = req.urlopen(f"http://{master}:{port}").read().decode().split() + assert resp[3] == "ON" + # Call the report function, this will propagate the change. + stimer.report() + + # Time an operation. + def straggler_detector_timeit(): + s = 2 # Sleep for 2 seconds. + M = 20 + K = 30 + N = 40 + mat1 = torch.randn(M, K, device='cuda') + mat2 = torch.randn(K, N, device='cuda') + # batch_data. + with stimer(bdata=True): + time.sleep(s) + # GEMM. + with stimer: + res = torch.matmul(mat1, mat2) + delta, batch_delta, _, _, _, _ = stimer.elapsed() + assert delta > 0.0 + assert batch_delta >= s + + # Test function to raise ValueError + def straggler_value_error(): + raise ValueError("Exception value raised") + + # Check that exception is not suppressed. + def straggler_detector_exception_propagate(): + # batch_data + with pytest.raises(ZeroDivisionError): + with stimer(bdata=True): + x = 1 / 0 + # non-batch-data + with pytest.raises(ValueError, match=r".* value .*"): + with stimer(): + straggler_value_error() + + # Reporting. + def straggler_detector_report(): + s = 2 # Sleep for 2 seconds. + N = 20 + P = 30 + M = 40 + mat1 = torch.randn(N, P, device='cuda') + mat2 = torch.randn(P, M, device='cuda') + tfp = (N * M) * (2 * P - 1) # Theoretical. + iter = 10 # Mock. + # batch_data. + with stimer(bdata=True): + time.sleep(s) + # GEMM. + with stimer: + res = torch.matmul(mat1, mat2) + r = stimer.report(total_flops=tfp, log_interval=iter) + rb = True if rank == 0 else False + assert r == rb + + # Start test. + # Setup. + _init_distributed(world, rank) + + # Create a straggler_detector with enabled set to false. + stimer = util.StragglerDetector() + stimer.configure(world, rank, enabled=False, port=port) + # Check if configuration was success. + assert stimer.configured == True + + # Check if the instance is in disabled state. + straggler_detector_disabled() + # Enable it now, must call report. + straggler_detector_enable() + # Check if all ranks have straggler detector enabled. + straggler_detector_enabled() + # Time some operation. + straggler_detector_timeit() + # Report only from rank 0. + straggler_detector_report() + # Check that exception is not suppressed. + straggler_detector_exception_propagate() + util.StragglerDetector._configured = False + # Teardown. + _deinit_distributed() diff --git a/tests/unit_tests/transformer/moe/test_a2a_token_dispatcher.py b/tests/unit_tests/transformer/moe/test_a2a_token_dispatcher.py index 96afe46..2f8ab2b 100644 --- a/tests/unit_tests/transformer/moe/test_a2a_token_dispatcher.py +++ b/tests/unit_tests/transformer/moe/test_a2a_token_dispatcher.py @@ -1,99 +1,100 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import pytest -import torch - -from tests.unit_tests.test_utilities import Utils -from tests.unit_tests.transformer.moe.test_token_dispatcher import MoEModelTestContainer - - -def test_placeholder(): - """This is here because otherwise there's no other test in this module (all disabled) and pytest would fail.""" - pass - - -@pytest.mark.flaky -class TestAlltoAllDispatcher: - def setup_method(self, method): - pass - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") - @pytest.mark.internal - @pytest.mark.timeout(120) - @pytest.mark.parametrize("tp_size,ep_size", [(1, 8), (8, 1), (4, 2), (1, 1)]) - @pytest.mark.flaky - @pytest.mark.flaky_in_dev - def test_forward_backward(self, tp_size, ep_size): - container = MoEModelTestContainer( - tp_size=tp_size, - ep_size=ep_size, - pp_size=1, - num_moe_experts=8, - moe_router_topk=2, - moe_router_load_balancing_type="aux_loss", - moe_token_dispatcher_type="alltoall", - ) - container.dispatcher_dropless_test() - - @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") - @pytest.mark.internal - @pytest.mark.timeout(120) - @pytest.mark.parametrize("tp_size,ep_size", [(1, 8), (8, 1), (4, 2), (1, 1)]) - @pytest.mark.flaky - @pytest.mark.flaky_in_dev - def test_a2aseq_forward_backward(self, tp_size, ep_size): - container = MoEModelTestContainer( - tp_size=tp_size, - ep_size=ep_size, - pp_size=1, - num_moe_experts=8, - moe_router_topk=2, - moe_router_load_balancing_type="aux_loss", - moe_token_dispatcher_type="alltoall_seq", - ) - container.dispatcher_dropless_test() - - @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") - @pytest.mark.internal - @pytest.mark.timeout(120) - @pytest.mark.parametrize("tp_size,ep_size", [(1, 8), (8, 1), (4, 2), (1, 1)]) - @pytest.mark.flaky - @pytest.mark.flaky_in_dev - def test_capacity_forward_backward(self, tp_size, ep_size): - container = MoEModelTestContainer( - tp_size=tp_size, - ep_size=ep_size, - pp_size=1, - num_moe_experts=8, - moe_router_topk=2, - moe_router_load_balancing_type="aux_loss", - moe_token_dispatcher_type="alltoall", - moe_token_drop_policy="probs", - moe_expert_capacity_factor=0.5, - moe_pad_expert_input_to_capacity=False, - ) - container.dispatcher_capacity_test() - - @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") - @pytest.mark.internal - @pytest.mark.timeout(120) - @pytest.mark.parametrize("tp_size,ep_size", [(1, 8), (8, 1), (4, 2), (1, 1)]) - @pytest.mark.flaky - @pytest.mark.flaky_in_dev - def test_capacity_padding_forward_backward(self, tp_size, ep_size): - container = MoEModelTestContainer( - tp_size=tp_size, - ep_size=ep_size, - pp_size=1, - num_moe_experts=8, - moe_router_topk=2, - moe_router_load_balancing_type="aux_loss", - moe_token_dispatcher_type="alltoall", - moe_token_drop_policy="probs", - moe_expert_capacity_factor=0.6, - moe_pad_expert_input_to_capacity=True, - ) - container.dispatcher_drop_and_pad_test() +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import pytest +import torch + +from tests.unit_tests.test_utilities import Utils +from tests.unit_tests.transformer.moe.test_token_dispatcher import ( + MoEModelTestContainer, + permute_fusion_params, +) + + +def test_placeholder(): + """This is here because otherwise there's no other test in this module (all disabled) and pytest would fail.""" + pass + + +class TestAlltoAllDispatcher: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.internal + @pytest.mark.timeout(120) + @pytest.mark.parametrize("tp_size,ep_size", [(1, 8), (8, 1), (4, 2), (1, 1)]) + @pytest.mark.parametrize("permute_fusion", permute_fusion_params) + def test_forward_backward(self, tp_size, ep_size, permute_fusion): + container = MoEModelTestContainer( + tp_size=tp_size, + ep_size=ep_size, + pp_size=1, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="aux_loss", + moe_token_dispatcher_type="alltoall", + moe_permute_fusion=permute_fusion, + ) + container.dispatcher_dropless_test() + + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.internal + @pytest.mark.timeout(120) + @pytest.mark.parametrize("tp_size,ep_size", [(1, 8), (8, 1), (4, 2), (1, 1)]) + def test_a2aseq_forward_backward(self, tp_size, ep_size): + container = MoEModelTestContainer( + tp_size=tp_size, + ep_size=ep_size, + pp_size=1, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="aux_loss", + moe_token_dispatcher_type="alltoall_seq", + moe_permute_fusion=False, + ) + container.dispatcher_dropless_test() + + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.internal + @pytest.mark.timeout(120) + @pytest.mark.parametrize("tp_size,ep_size", [(1, 8), (8, 1), (4, 2), (1, 1)]) + @pytest.mark.parametrize("permute_fusion", permute_fusion_params) + def test_capacity_forward_backward(self, tp_size, ep_size, permute_fusion): + container = MoEModelTestContainer( + tp_size=tp_size, + ep_size=ep_size, + pp_size=1, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="aux_loss", + moe_token_dispatcher_type="alltoall", + moe_token_drop_policy="probs", + moe_expert_capacity_factor=0.5, + moe_pad_expert_input_to_capacity=False, + moe_permute_fusion=permute_fusion, + ) + container.dispatcher_capacity_test() + + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.internal + @pytest.mark.timeout(120) + @pytest.mark.parametrize("tp_size,ep_size", [(1, 8), (8, 1), (4, 2), (1, 1)]) + @pytest.mark.parametrize("permute_fusion", permute_fusion_params) + def test_capacity_padding_forward_backward(self, tp_size, ep_size, permute_fusion): + container = MoEModelTestContainer( + tp_size=tp_size, + ep_size=ep_size, + pp_size=1, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="aux_loss", + moe_token_dispatcher_type="alltoall", + moe_token_drop_policy="probs", + moe_expert_capacity_factor=0.6, + moe_pad_expert_input_to_capacity=True, + moe_permute_fusion=permute_fusion, + ) + container.dispatcher_drop_and_pad_test() diff --git a/tests/unit_tests/transformer/moe/test_aux_loss.py b/tests/unit_tests/transformer/moe/test_aux_loss.py index af8f990..1b4a0f1 100644 --- a/tests/unit_tests/transformer/moe/test_aux_loss.py +++ b/tests/unit_tests/transformer/moe/test_aux_loss.py @@ -1,142 +1,144 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import pytest -import torch - -from megatron.core import parallel_state -from megatron.core.transformer.moe.moe_utils import clear_aux_losses_tracker -from tests.unit_tests.test_utilities import Utils -from tests.unit_tests.transformer.moe.test_token_dispatcher import MoEModelTestContainer - - -class AuxlossTestContainer(MoEModelTestContainer): - def partition_input(self, input): - partitioned_input = input.chunk( - parallel_state.get_tensor_and_context_parallel_world_size(), dim=1 - )[parallel_state.get_tensor_and_context_parallel_rank()] - output = partitioned_input.clone().detach() - output.requires_grad = True - return output - - @pytest.mark.internal - def aux_loss_test(self, input, baseline_grad): - partitioned_input = self.partition_input(input) - moe_layer = self.moe_layer - probs, indices = moe_layer.router(partitioned_input) - probs.sum().mul_(0).backward() - aux_loss_grad = partitioned_input.grad - torch.distributed.barrier() - ans = self.partition_input(baseline_grad) - assert torch.allclose(aux_loss_grad, ans), f"Diff: {(aux_loss_grad/ans).mean()}" - loss = parallel_state.get_moe_layer_wise_logging_tracker()['load_balancing_loss'] - clear_aux_losses_tracker() - - -class TestAuxLoss: - def setup_method(self, method): - baseline_container = AuxlossTestContainer( - tp_size=1, - ep_size=1, - pp_size=1, - cp_size=1, - num_moe_experts=8, - moe_router_topk=2, - moe_router_load_balancing_type="aux_loss", - moe_token_dispatcher_type="alltoall", - moe_aux_loss_coeff=0.1, - ) - moe_layer = baseline_container.moe_layer - self.input = torch.randn((32, 8, moe_layer.config.hidden_size)).cuda() - self.input.requires_grad = True - probs, indices = moe_layer.router(self.input) - probs.sum().mul_(0).backward() # zero out the main gradients - self.baseline_grad = self.input.grad - self.input.grad = None - clear_aux_losses_tracker() - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.internal - @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") - @pytest.mark.internal - @pytest.mark.parametrize( - "tp_size,ep_size,cp_size", [(8, 1, 1), (4, 2, 1), (1, 1, 8), (2, 1, 4), (2, 2, 2)] - ) - def test_allgather_dispatcher(self, tp_size, ep_size, cp_size): - container = AuxlossTestContainer( - tp_size=tp_size, - ep_size=ep_size, - pp_size=1, - cp_size=cp_size, - num_moe_experts=8, - moe_router_topk=2, - moe_router_load_balancing_type="aux_loss", - moe_token_dispatcher_type="allgather", - moe_aux_loss_coeff=0.1, - ) - container.aux_loss_test(self.input, self.baseline_grad) - - @pytest.mark.internal - @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") - @pytest.mark.internal - @pytest.mark.parametrize( - "tp_size,ep_size,cp_size", [(8, 1, 1), (4, 2, 1), (1, 1, 8), (2, 1, 4), (2, 2, 2)] - ) - def test_a2a_dispatcher(self, tp_size, ep_size, cp_size): - container = AuxlossTestContainer( - tp_size=tp_size, - ep_size=ep_size, - pp_size=1, - cp_size=cp_size, - num_moe_experts=8, - moe_router_topk=2, - moe_router_load_balancing_type="aux_loss", - moe_token_dispatcher_type="alltoall", - moe_aux_loss_coeff=0.1, - ) - container.aux_loss_test(self.input, self.baseline_grad) - - -class TestSeqAuxLoss: - def setup_method(self, method): - baseline_container = AuxlossTestContainer( - tp_size=1, - ep_size=1, - pp_size=1, - cp_size=1, - num_moe_experts=8, - moe_router_topk=2, - moe_router_load_balancing_type="seq_aux_loss", - moe_token_dispatcher_type="alltoall", - moe_aux_loss_coeff=0.1, - ) - moe_layer = baseline_container.moe_layer - self.input = torch.randn((32, 8, moe_layer.config.hidden_size)).cuda() - self.input.requires_grad = True - probs, indices = moe_layer.router(self.input) - probs.sum().mul_(0).backward() # zero out the main gradients - self.baseline_grad = self.input.grad - self.input.grad = None - clear_aux_losses_tracker() - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.internal - @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") - @pytest.mark.internal - @pytest.mark.parametrize("tp_size,ep_size,cp_size", [(1, 8, 1)]) - def test_a2a_dispatcher(self, tp_size, ep_size, cp_size): - container = AuxlossTestContainer( - tp_size=tp_size, - ep_size=ep_size, - pp_size=1, - cp_size=cp_size, - num_moe_experts=8, - moe_router_topk=2, - moe_router_load_balancing_type="seq_aux_loss", - moe_token_dispatcher_type="alltoall", - moe_aux_loss_coeff=0.1, - ) - container.aux_loss_test(self.input, self.baseline_grad) +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import pytest +import torch + +from megatron.core import parallel_state +from megatron.core.transformer.moe.moe_utils import clear_aux_losses_tracker +from tests.unit_tests.test_utilities import Utils +from tests.unit_tests.transformer.moe.test_token_dispatcher import MoEModelTestContainer + + +class AuxlossTestContainer(MoEModelTestContainer): + def partition_input(self, input): + partitioned_input = input.chunk( + parallel_state.get_tensor_and_context_parallel_world_size(), dim=0 + )[parallel_state.get_tensor_and_context_parallel_rank()] + output = partitioned_input.clone().detach() + output.requires_grad = True + return output + + @pytest.mark.internal + def aux_loss_test(self, input, baseline_grad): + partitioned_input = self.partition_input(input) + moe_layer = self.moe_layer + probs, indices = moe_layer.router(partitioned_input) + probs.sum().mul_(0).backward() + aux_loss_grad = partitioned_input.grad + torch.distributed.barrier() + ans = self.partition_input(baseline_grad) + assert torch.allclose(aux_loss_grad, ans), f"Diff: {(aux_loss_grad/ans).mean()}" + loss = parallel_state.get_moe_layer_wise_logging_tracker()['load_balancing_loss'] + clear_aux_losses_tracker() + + +class TestAuxLoss: + def setup_method(self, method): + baseline_container = AuxlossTestContainer( + tp_size=1, + ep_size=1, + pp_size=1, + cp_size=1, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="aux_loss", + moe_token_dispatcher_type="alltoall", + moe_aux_loss_coeff=0.1, + ) + moe_layer = baseline_container.moe_layer + self.input = torch.randn((32, 8, moe_layer.config.hidden_size)).cuda() + self.input.requires_grad = True + probs, indices = moe_layer.router(self.input) + probs.sum().mul_(0).backward() # zero out the main gradients + self.baseline_grad = self.input.grad + self.input.grad = None + clear_aux_losses_tracker() + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.internal + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.internal + @pytest.mark.parametrize( + "tp_size,ep_size,cp_size", [(8, 1, 1), (4, 2, 1), (1, 1, 8), (2, 1, 4), (2, 2, 2)] + ) + def test_allgather_dispatcher(self, tp_size, ep_size, cp_size): + container = AuxlossTestContainer( + tp_size=tp_size, + ep_size=ep_size, + pp_size=1, + cp_size=cp_size, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="aux_loss", + moe_token_dispatcher_type="allgather", + moe_aux_loss_coeff=0.1, + ) + container.aux_loss_test(self.input, self.baseline_grad) + + @pytest.mark.internal + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.internal + @pytest.mark.parametrize( + "tp_size,ep_size,cp_size", [(8, 1, 1), (4, 2, 1), (1, 1, 8), (2, 1, 4), (2, 2, 2)] + ) + def test_a2a_dispatcher(self, tp_size, ep_size, cp_size): + container = AuxlossTestContainer( + tp_size=tp_size, + ep_size=ep_size, + pp_size=1, + cp_size=cp_size, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="aux_loss", + moe_token_dispatcher_type="alltoall", + moe_aux_loss_coeff=0.1, + ) + container.aux_loss_test(self.input, self.baseline_grad) + + +class TestSeqAuxLoss: + def setup_method(self, method): + baseline_container = AuxlossTestContainer( + tp_size=1, + ep_size=1, + pp_size=1, + cp_size=1, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="seq_aux_loss", + moe_token_dispatcher_type="alltoall", + moe_aux_loss_coeff=0.1, + ) + moe_layer = baseline_container.moe_layer + self.input = torch.randn((32, 8, moe_layer.config.hidden_size)).cuda() + self.input.requires_grad = True + probs, indices = moe_layer.router(self.input) + probs.sum().mul_(0).backward() # zero out the main gradients + self.baseline_grad = self.input.grad + self.input.grad = None + clear_aux_losses_tracker() + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.internal + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.internal + @pytest.mark.parametrize( + "tp_size,ep_size,cp_size", [(8, 1, 1), (4, 2, 1), (1, 1, 8), (2, 1, 4), (2, 2, 2)] + ) + def test_a2a_dispatcher(self, tp_size, ep_size, cp_size): + container = AuxlossTestContainer( + tp_size=tp_size, + ep_size=ep_size, + pp_size=1, + cp_size=cp_size, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="seq_aux_loss", + moe_token_dispatcher_type="alltoall", + moe_aux_loss_coeff=0.1, + ) + container.aux_loss_test(self.input, self.baseline_grad) diff --git a/tests/unit_tests/transformer/moe/test_moe_layer.py b/tests/unit_tests/transformer/moe/test_moe_layer.py index 59afadf..9896b04 100644 --- a/tests/unit_tests/transformer/moe/test_moe_layer.py +++ b/tests/unit_tests/transformer/moe/test_moe_layer.py @@ -1,189 +1,189 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import pytest -import torch - -from megatron.core.models.gpt.gpt_layer_specs import ( - get_gpt_decoder_block_spec, - get_gpt_layer_local_spec, - get_gpt_layer_with_transformer_engine_spec, -) -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.moe.moe_layer import MoELayer -from megatron.core.transformer.moe.router import Router -from megatron.core.transformer.transformer_block import TransformerBlock -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.utils import is_te_min_version -from megatron.training.initialize import _set_random_seed -from tests.unit_tests.test_utilities import Utils - - -class TestMoELayerInit: - def setup_method(self, method): - pass - - @pytest.mark.skipif( - not is_te_min_version("1.7.0.dev0"), - reason="Expert with TE Linear is only supported in TE 1.7.0 and later.", - ) - @pytest.mark.parametrize("moe_token_dispatcher_type", ["allgather", "alltoall"]) - @pytest.mark.parametrize("num_moe_experts", [1, 2]) - @pytest.mark.parametrize("grouped_gemm", [True, False]) - def test_te_moe_layer(self, num_moe_experts, moe_token_dispatcher_type, grouped_gemm): - Utils.initialize_model_parallel(1, 1) - _set_random_seed(seed_=123, data_parallel_random_init=False) - self.transformer_config = TransformerConfig( - num_layers=1, - hidden_size=12, - num_attention_heads=4, - num_moe_experts=num_moe_experts, - use_cpu_initialization=True, - moe_token_dispatcher_type=moe_token_dispatcher_type, - moe_router_topk=2, - moe_aux_loss_coeff=0.01, - moe_grouped_gemm=grouped_gemm, - add_bias_linear=False, - ) - transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec( - num_experts=num_moe_experts, moe_grouped_gemm=grouped_gemm - ) - moe_layer = MoELayer( - self.transformer_config, transformer_layer_spec.submodules.mlp.submodules - ) - Utils.destroy_model_parallel() - - @pytest.mark.parametrize("moe_token_dispatcher_type", ["allgather", "alltoall"]) - @pytest.mark.parametrize("num_moe_experts", [1, 2]) - @pytest.mark.parametrize("grouped_gemm", [True, False]) - def test_legacy_moe_layer(self, num_moe_experts, moe_token_dispatcher_type, grouped_gemm): - Utils.initialize_model_parallel(1, 1) - _set_random_seed(seed_=123, data_parallel_random_init=False) - num_moe_experts = 4 - self.transformer_config = TransformerConfig( - num_layers=1, - hidden_size=12, - num_attention_heads=4, - num_moe_experts=num_moe_experts, - use_cpu_initialization=True, - moe_token_dispatcher_type=moe_token_dispatcher_type, - moe_router_load_balancing_type="aux_loss", - moe_router_topk=2, - moe_aux_loss_coeff=0.01, - moe_grouped_gemm=grouped_gemm, - add_bias_linear=False, - ) - transformer_layer_spec = get_gpt_layer_local_spec( - num_experts=num_moe_experts, moe_grouped_gemm=grouped_gemm - ) - moe_layer = MoELayer( - self.transformer_config, transformer_layer_spec.submodules.mlp.submodules - ) - Utils.destroy_model_parallel() - - @pytest.mark.parametrize("moe_token_dispatcher_type", ["allgather", "alltoall"]) - @pytest.mark.parametrize("grouped_gemm", [True, False]) - @pytest.mark.parametrize("tp_size,ep_size", [(1, 1), (2, 2)]) - def test_moe_with_late_initialize( - self, moe_token_dispatcher_type, grouped_gemm, tp_size, ep_size - ): - num_moe_experts = 4 - hidden_size = 12 - transformer_config = TransformerConfig( - num_layers=1, - hidden_size=hidden_size, - num_attention_heads=4, - num_moe_experts=num_moe_experts, - use_cpu_initialization=True, - moe_router_load_balancing_type="aux_loss", - moe_router_topk=2, - moe_aux_loss_coeff=0.01, - add_bias_linear=False, - moe_grouped_gemm=grouped_gemm, - moe_token_dispatcher_type=moe_token_dispatcher_type, - tensor_model_parallel_size=tp_size, - expert_model_parallel_size=ep_size, - sequence_parallel=tp_size > 1, - bf16=True, - params_dtype=torch.bfloat16, - ) - transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec( - num_experts=num_moe_experts, moe_grouped_gemm=grouped_gemm - ) - - # Fake initialization as NeMo does - Utils.fake_initialize_model_parallel( - tensor_model_parallel_size=tp_size, expert_model_parallel_size=ep_size - ) - moe_layer = MoELayer( - transformer_config, transformer_layer_spec.submodules.mlp.submodules - ).cuda() - - Utils.initialize_model_parallel( - tensor_model_parallel_size=tp_size, expert_model_parallel_size=ep_size - ) - _set_random_seed(seed_=123, data_parallel_random_init=False) - - input_data = torch.randn( - 16, 4, hidden_size, device=torch.cuda.current_device(), dtype=torch.bfloat16 - ) - output = moe_layer(input_data) - - Utils.destroy_model_parallel() - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - -class TestInterleaveTransformerBlock: - - @pytest.mark.parametrize("moe_layer_freq", [2, eval("[0,1,1,1]"), eval("[0]*2+[1]*2")]) - def test_interleave_transformer_block(self, moe_layer_freq): - Utils.initialize_model_parallel(1, 1) - model_parallel_cuda_manual_seed(123) - self.transformer_config = TransformerConfig( - num_layers=4, - hidden_size=64, - num_attention_heads=4, - moe_layer_freq=moe_layer_freq, - moe_ffn_hidden_size=256, - use_cpu_initialization=True, - num_moe_experts=2, - ) - self.parallel_transformer_block = TransformerBlock( - self.transformer_config, get_gpt_decoder_block_spec(self.transformer_config, False) - ) - - # Check if the moe layer is interleaved correctly - if isinstance(self.transformer_config.moe_layer_freq, int): - moe_layer_pattern = [ - 1 if (i % self.transformer_config.moe_layer_freq == 0) else 0 - for i in range(self.transformer_config.num_layers) - ] - else: - moe_layer_pattern = self.transformer_config.moe_layer_freq - - for i, layer in enumerate(self.parallel_transformer_block.layers): - is_moe_layer = isinstance(layer.mlp, MoELayer) - assert is_moe_layer == moe_layer_pattern[i] - - # Test forward pass - parallel_transformer_block = self.parallel_transformer_block - config: TransformerConfig = parallel_transformer_block.config - sequence_length = 32 - micro_batch_size = 2 - parallel_transformer_block.cuda() - - # [sequence length, batch size, hidden size] - hidden_states = torch.ones((sequence_length, micro_batch_size, config.hidden_size)) - hidden_states = hidden_states.cuda() - - attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() - hidden_states = parallel_transformer_block( - hidden_states=hidden_states, attention_mask=attention_mask - ) - assert hidden_states.shape[0] == sequence_length - assert hidden_states.shape[1] == micro_batch_size - assert hidden_states.shape[2] == config.hidden_size - - def teardown_method(self, method): - Utils.destroy_model_parallel() +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import pytest +import torch + +from megatron.core.models.gpt.gpt_layer_specs import ( + get_gpt_decoder_block_spec, + get_gpt_layer_local_spec, + get_gpt_layer_with_transformer_engine_spec, +) +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.moe.moe_layer import MoELayer +from megatron.core.transformer.moe.router import Router +from megatron.core.transformer.transformer_block import TransformerBlock +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.utils import is_te_min_version +from megatron.training.initialize import _set_random_seed +from tests.unit_tests.test_utilities import Utils + + +class TestMoELayerInit: + def setup_method(self, method): + pass + + @pytest.mark.skipif( + not is_te_min_version("1.7.0.dev0"), + reason="Expert with TE Linear is only supported in TE 1.7.0 and later.", + ) + @pytest.mark.parametrize("moe_token_dispatcher_type", ["allgather", "alltoall"]) + @pytest.mark.parametrize("num_moe_experts", [1, 2]) + @pytest.mark.parametrize("grouped_gemm", [True, False]) + def test_te_moe_layer(self, num_moe_experts, moe_token_dispatcher_type, grouped_gemm): + Utils.initialize_model_parallel(1, 1) + _set_random_seed(seed_=123, data_parallel_random_init=False) + self.transformer_config = TransformerConfig( + num_layers=1, + hidden_size=12, + num_attention_heads=4, + num_moe_experts=num_moe_experts, + use_cpu_initialization=True, + moe_token_dispatcher_type=moe_token_dispatcher_type, + moe_router_topk=2, + moe_aux_loss_coeff=0.01, + moe_grouped_gemm=grouped_gemm, + add_bias_linear=False, + ) + transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec( + num_experts=num_moe_experts, moe_grouped_gemm=grouped_gemm + ) + moe_layer = MoELayer( + self.transformer_config, transformer_layer_spec.submodules.mlp.submodules + ) + Utils.destroy_model_parallel() + + @pytest.mark.parametrize("moe_token_dispatcher_type", ["allgather", "alltoall"]) + @pytest.mark.parametrize("num_moe_experts", [1, 2]) + @pytest.mark.parametrize("grouped_gemm", [True, False]) + def test_legacy_moe_layer(self, num_moe_experts, moe_token_dispatcher_type, grouped_gemm): + Utils.initialize_model_parallel(1, 1) + _set_random_seed(seed_=123, data_parallel_random_init=False) + num_moe_experts = 4 + self.transformer_config = TransformerConfig( + num_layers=1, + hidden_size=12, + num_attention_heads=4, + num_moe_experts=num_moe_experts, + use_cpu_initialization=True, + moe_token_dispatcher_type=moe_token_dispatcher_type, + moe_router_load_balancing_type="aux_loss", + moe_router_topk=2, + moe_aux_loss_coeff=0.01, + moe_grouped_gemm=grouped_gemm, + add_bias_linear=False, + ) + transformer_layer_spec = get_gpt_layer_local_spec( + num_experts=num_moe_experts, moe_grouped_gemm=grouped_gemm + ) + moe_layer = MoELayer( + self.transformer_config, transformer_layer_spec.submodules.mlp.submodules + ) + Utils.destroy_model_parallel() + + @pytest.mark.parametrize("moe_token_dispatcher_type", ["alltoall", "allgather"]) + @pytest.mark.parametrize("grouped_gemm", [True, False]) + @pytest.mark.parametrize("tp_size,ep_size", [(1, 1), (2, 2)]) + def test_moe_with_late_initialize( + self, moe_token_dispatcher_type, grouped_gemm, tp_size, ep_size + ): + num_moe_experts = 4 + hidden_size = 12 + transformer_config = TransformerConfig( + num_layers=1, + hidden_size=hidden_size, + num_attention_heads=4, + num_moe_experts=num_moe_experts, + use_cpu_initialization=True, + moe_router_load_balancing_type="aux_loss", + moe_router_topk=2, + moe_aux_loss_coeff=0.01, + add_bias_linear=False, + moe_grouped_gemm=grouped_gemm, + moe_token_dispatcher_type=moe_token_dispatcher_type, + tensor_model_parallel_size=tp_size, + expert_model_parallel_size=ep_size, + sequence_parallel=tp_size > 1, + bf16=True, + params_dtype=torch.bfloat16, + ) + transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec( + num_experts=num_moe_experts, moe_grouped_gemm=grouped_gemm + ) + + # Fake initialization as NeMo does + Utils.fake_initialize_model_parallel( + tensor_model_parallel_size=tp_size, expert_model_parallel_size=ep_size + ) + moe_layer = MoELayer( + transformer_config, transformer_layer_spec.submodules.mlp.submodules + ).cuda() + + Utils.initialize_model_parallel( + tensor_model_parallel_size=tp_size, expert_model_parallel_size=ep_size + ) + _set_random_seed(seed_=123, data_parallel_random_init=False) + + input_data = torch.randn( + 16, 4, hidden_size, device=torch.cuda.current_device(), dtype=torch.bfloat16 + ) + output = moe_layer(input_data) + + Utils.destroy_model_parallel() + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + +class TestInterleaveTransformerBlock: + + @pytest.mark.parametrize("moe_layer_freq", [2, eval("[0,1,1,1]"), eval("[0]*2+[1]*2")]) + def test_interleave_transformer_block(self, moe_layer_freq): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + self.transformer_config = TransformerConfig( + num_layers=4, + hidden_size=64, + num_attention_heads=4, + moe_layer_freq=moe_layer_freq, + moe_ffn_hidden_size=256, + use_cpu_initialization=True, + num_moe_experts=2, + ) + self.parallel_transformer_block = TransformerBlock( + self.transformer_config, get_gpt_decoder_block_spec(self.transformer_config, False) + ) + + # Check if the moe layer is interleaved correctly + if isinstance(self.transformer_config.moe_layer_freq, int): + moe_layer_pattern = [ + 1 if (i % self.transformer_config.moe_layer_freq == 0) else 0 + for i in range(self.transformer_config.num_layers) + ] + else: + moe_layer_pattern = self.transformer_config.moe_layer_freq + + for i, layer in enumerate(self.parallel_transformer_block.layers): + is_moe_layer = isinstance(layer.mlp, MoELayer) + assert is_moe_layer == moe_layer_pattern[i] + + # Test forward pass + parallel_transformer_block = self.parallel_transformer_block + config: TransformerConfig = parallel_transformer_block.config + sequence_length = 32 + micro_batch_size = 2 + parallel_transformer_block.cuda() + + # [sequence length, batch size, hidden size] + hidden_states = torch.ones((sequence_length, micro_batch_size, config.hidden_size)) + hidden_states = hidden_states.cuda() + + attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() + hidden_states = parallel_transformer_block( + hidden_states=hidden_states, attention_mask=attention_mask + ) + assert hidden_states.shape[0] == sequence_length + assert hidden_states.shape[1] == micro_batch_size + assert hidden_states.shape[2] == config.hidden_size + + def teardown_method(self, method): + Utils.destroy_model_parallel() diff --git a/tests/unit_tests/transformer/moe/test_routers.py b/tests/unit_tests/transformer/moe/test_routers.py index 2f6025d..33db900 100644 --- a/tests/unit_tests/transformer/moe/test_routers.py +++ b/tests/unit_tests/transformer/moe/test_routers.py @@ -1,145 +1,235 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import pytest -import torch - -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_local_spec -from megatron.core.transformer.moe.moe_layer import MoELayer -from megatron.core.transformer.moe.router import Router -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.training.initialize import _set_random_seed -from tests.unit_tests.test_utilities import Utils - - -class TestTop2Router: - def setup_method(self, method): - Utils.initialize_model_parallel(1, 1) - _set_random_seed(seed_=123, data_parallel_random_init=False) - print("done intializing") - num_moe_experts = 4 - self.transformer_config = TransformerConfig( - num_layers=2, - hidden_size=12, - num_attention_heads=4, - num_moe_experts=num_moe_experts, - use_cpu_initialization=True, - moe_router_load_balancing_type="aux_loss", - moe_router_topk=2, - moe_aux_loss_coeff=0, - ) - transformer_layer_spec = get_gpt_layer_local_spec( - num_experts=num_moe_experts, moe_grouped_gemm=False - ) - self.sequential_mlp = MoELayer( - self.transformer_config, transformer_layer_spec.submodules.mlp.submodules - ) - self.router = self.sequential_mlp.router - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.internal - def test_constructor(self): - assert isinstance(self.router, Router) - - num_weights = sum([p.numel() for p in self.router.parameters()]) - assert num_weights == 12 * 4, num_weights - - @pytest.mark.internal - @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") - @pytest.mark.internal - @pytest.mark.parametrize("moe_router_pre_softmax", [(True), (False)]) - def test_router_forward(self, moe_router_pre_softmax): - with torch.no_grad(): - self.router = self.router.cuda() - self.router.config.moe_router_pre_softmax = moe_router_pre_softmax - # [num tokens, hidden size] - hidden_states = torch.randn((32, 2, self.router.config.hidden_size)) - hidden_states = hidden_states.cuda() - scores, indices = self.router(hidden_states) - - @pytest.mark.internal - @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") - @pytest.mark.internal - def test_aux_loss(self): - self.sequential_mlp = self.sequential_mlp.cuda() - - # Without aux loss - hidden_states = torch.randn((32, 2, self.router.config.hidden_size)) - hidden_states = hidden_states.cuda() - out = self.sequential_mlp(hidden_states)[0] - out.sum().mul_(0).backward() - assert self.sequential_mlp.router.weight.grad.abs().sum() == 0 - - # With aux loss - self.transformer_config.moe_aux_loss_coeff = 1 - out = self.sequential_mlp(hidden_states)[0] - out.sum().mul_(0).backward() - assert self.sequential_mlp.router.weight.grad.abs().sum() > 0 - - # With Z loss - self.transformer_config.moe_aux_loss_coeff = 0 - self.transformer_config.moe_z_loss_coeff = 1 - self.sequential_mlp.router.weight.grad.fill_(0) - out = self.sequential_mlp(hidden_states)[0] - out.sum().mul_(0).backward() - assert self.sequential_mlp.router.weight.grad.abs().sum() > 0 - - -class TestDeviceLimitedTop2Router: - def setup_method(self, method): - Utils.initialize_model_parallel(1, 1, expert_model_parallel_size=8) - _set_random_seed(seed_=123, data_parallel_random_init=False) - print("done intializing") - num_moe_experts = 8 - self.transformer_config = TransformerConfig( - num_layers=2, - hidden_size=12, - num_attention_heads=4, - num_moe_experts=num_moe_experts, - use_cpu_initialization=True, - expert_model_parallel_size=8, - moe_router_load_balancing_type="aux_loss", - moe_router_topk_limited_devices=2, - moe_router_pre_softmax=True, - moe_router_topk=2, - moe_aux_loss_coeff=0, - ) - transformer_layer_spec = get_gpt_layer_local_spec( - num_experts=num_moe_experts, moe_grouped_gemm=False - ) - self.sequential_mlp = MoELayer( - self.transformer_config, transformer_layer_spec.submodules.mlp.submodules - ) - self.router = self.sequential_mlp.router - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_constructor(self): - assert isinstance(self.router, Router) - - num_weights = sum([p.numel() for p in self.router.parameters()]) - assert num_weights == 12 * 8, num_weights - - @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") - @pytest.mark.parametrize("moe_router_pre_softmax", [(True), (False)]) - def test_router_forward(self, moe_router_pre_softmax): - with torch.no_grad(): - self.router = self.router.cuda() - self.router.config.moe_router_pre_softmax = moe_router_pre_softmax - if moe_router_pre_softmax: - self.router.config.moe_router_topk_scaling_factor = 16.0 - # [num tokens, hidden size] - hidden_states = torch.randn((32, 2, self.router.config.hidden_size)) - hidden_states = hidden_states.cuda() - scores, indices = self.router(hidden_states) - print(scores.shape, indices.shape) - assert scores.shape == (64, 8) - assert indices.shape == (64, 8) - print( - (indices == 0).sum(), - (indices == 1).sum(), - (indices == 2).sum(), - (indices == 3).sum(), - ) +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import pytest +import torch + +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_local_spec +from megatron.core.transformer.moe.moe_layer import MoELayer +from megatron.core.transformer.moe.moe_utils import get_updated_expert_bias +from megatron.core.transformer.moe.router import Router +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.training.initialize import _set_random_seed +from tests.unit_tests.test_utilities import Utils + + +class TestTop2Router: + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + _set_random_seed(seed_=123, data_parallel_random_init=False) + print("done intializing") + num_moe_experts = 4 + self.transformer_config = TransformerConfig( + num_layers=2, + hidden_size=12, + num_attention_heads=4, + num_moe_experts=num_moe_experts, + use_cpu_initialization=True, + moe_router_load_balancing_type="aux_loss", + moe_router_topk=2, + moe_aux_loss_coeff=0, + ) + transformer_layer_spec = get_gpt_layer_local_spec( + num_experts=num_moe_experts, moe_grouped_gemm=False + ) + self.sequential_mlp = MoELayer( + self.transformer_config, transformer_layer_spec.submodules.mlp.submodules + ) + self.router = self.sequential_mlp.router + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.internal + def test_constructor(self): + assert isinstance(self.router, Router) + + num_weights = sum([p.numel() for p in self.router.parameters()]) + assert num_weights == 12 * 4, num_weights + + @pytest.mark.internal + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.parametrize("moe_router_pre_softmax", [(True), (False)]) + @pytest.mark.parametrize("score_function", ["sigmoid", "softmax"]) + def test_router_forward(self, moe_router_pre_softmax, score_function): + with torch.no_grad(): + self.router = self.router.cuda() + self.router.config.moe_router_pre_softmax = moe_router_pre_softmax + self.router.config.moe_router_score_function = score_function + # [num tokens, hidden size] + hidden_states = torch.randn((32, 2, self.router.config.hidden_size)) + hidden_states = hidden_states.cuda() + scores, indices = self.router(hidden_states) + + @pytest.mark.internal + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + def test_aux_loss(self): + self.sequential_mlp = self.sequential_mlp.cuda() + + # Without aux loss + hidden_states = torch.randn((32, 2, self.router.config.hidden_size)) + hidden_states = hidden_states.cuda() + out = self.sequential_mlp(hidden_states)[0] + out.sum().mul_(0).backward() + assert self.sequential_mlp.router.weight.grad.abs().sum() == 0 + + # With aux loss + self.transformer_config.moe_aux_loss_coeff = 1 + out = self.sequential_mlp(hidden_states)[0] + out.sum().mul_(0).backward() + assert self.sequential_mlp.router.weight.grad.abs().sum() > 0 + + # With Z loss + self.transformer_config.moe_aux_loss_coeff = 0 + self.transformer_config.moe_z_loss_coeff = 1 + self.sequential_mlp.router.weight.grad.fill_(0) + out = self.sequential_mlp(hidden_states)[0] + out.sum().mul_(0).backward() + assert self.sequential_mlp.router.weight.grad.abs().sum() > 0 + + +class TestGroupLimitedRouter: + def setup_method(self, method): + Utils.initialize_model_parallel( + tensor_model_parallel_size=1, + pipeline_model_parallel_size=1, + expert_model_parallel_size=8, + context_parallel_size=1, + ) + _set_random_seed(seed_=123, data_parallel_random_init=False) + print("done intializing") + + num_moe_experts = 16 + self.transformer_config = TransformerConfig( + tensor_model_parallel_size=1, + pipeline_model_parallel_size=1, + expert_model_parallel_size=8, + context_parallel_size=1, + num_moe_experts=num_moe_experts, + moe_router_topk=4, + moe_router_group_topk=2, + moe_router_num_groups=8, + moe_router_pre_softmax=True, + moe_router_load_balancing_type="aux_loss", + moe_aux_loss_coeff=0, + moe_token_dispatcher_type="alltoall", + num_layers=2, + hidden_size=12, + num_attention_heads=4, + use_cpu_initialization=True, + ) + + # init MoE layer + transformer_layer_spec = get_gpt_layer_local_spec( + num_experts=num_moe_experts, moe_grouped_gemm=False + ) + self.moe_layer = MoELayer( + self.transformer_config, transformer_layer_spec.submodules.mlp.submodules + ).cuda() + self.router = self.moe_layer.router + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.internal + def test_constructor(self): + assert isinstance(self.router, Router) + + num_weights = sum([p.numel() for p in self.router.parameters()]) + assert ( + num_weights + == self.transformer_config.hidden_size * self.transformer_config.num_moe_experts + ), num_weights + + @pytest.mark.internal + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.parametrize("moe_router_group_topk,moe_router_num_groups", [(3, 8), (2, 4)]) + @pytest.mark.parametrize("moe_router_pre_softmax", [(True), (False)]) + @pytest.mark.parametrize("score_function", ["sigmoid", "softmax"]) + def test_router_forward( + self, moe_router_group_topk, moe_router_num_groups, moe_router_pre_softmax, score_function + ): + with torch.no_grad(): + self.router.config.moe_router_group_topk = moe_router_group_topk + self.router.config.moe_router_num_groups = moe_router_num_groups + self.router.config.moe_router_pre_softmax = moe_router_pre_softmax + self.router.config.moe_router_score_function = score_function + if moe_router_pre_softmax: + self.router.config.moe_router_topk_scaling_factor = 16.0 + + seq_len = 2 + batch_size = 2 + num_tokens = seq_len * batch_size + # hidden_states shape: [seq_len, batch_size, hidden_size] + hidden_states = torch.randn( + (seq_len, batch_size, self.router.config.hidden_size) + ).cuda() + scores, routing_map = self.router(hidden_states) + assert scores.shape == (num_tokens, self.router.config.num_moe_experts), scores.shape + assert routing_map.shape == ( + num_tokens, + self.router.config.num_moe_experts, + ), routing_map.shape + + group_routing_map = ( + routing_map.reshape(num_tokens, moe_router_num_groups, -1).max(dim=-1).values + ) + assert torch.all(group_routing_map.sum(dim=-1) <= moe_router_group_topk) + + +class TestAuxLossFreeTop2Router: + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1, expert_model_parallel_size=8) + _set_random_seed(seed_=123, data_parallel_random_init=False) + print("done intializing") + num_moe_experts = 8 + self.transformer_config = TransformerConfig( + num_layers=2, + hidden_size=12, + num_attention_heads=4, + num_moe_experts=num_moe_experts, + use_cpu_initialization=True, + expert_model_parallel_size=8, + moe_router_load_balancing_type="none", # No aux loss + moe_router_score_function="sigmoid", # Using sigmoid scoring + moe_router_enable_expert_bias=True, # Enable expert bias + moe_router_bias_update_rate=0.1, # Set bias update rate + moe_router_topk=2, + ) + transformer_layer_spec = get_gpt_layer_local_spec( + num_experts=num_moe_experts, moe_grouped_gemm=False + ) + self.moe_layer = MoELayer( + self.transformer_config, transformer_layer_spec.submodules.mlp.submodules + ) + self.router = self.moe_layer.router + assert self.router.expert_bias is not None + assert self.router.local_tokens_per_expert is not None + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + def test_router_forward_aux_free(self): + hidden_states = torch.randn((32, 2, self.router.config.hidden_size)) + hidden_states = hidden_states.cuda() + self.router = self.router.cuda() + + # First forward pass + initial_bias = self.router.expert_bias.clone() + scores1, indices1 = self.router(hidden_states) + initial_tokens = self.router.local_tokens_per_expert.clone() + updated_bias = get_updated_expert_bias( + self.router.local_tokens_per_expert, + self.router.expert_bias, + self.router.config.moe_router_bias_update_rate, + ) + + # Verify expert bias was updated + assert not torch.equal(initial_bias, updated_bias), "Expert bias should be updated" + + # Basic output checks + assert scores1.shape == (64, 8), "Router scores shape mismatch" + assert indices1.shape == (64, 8), "Router indices shape mismatch" + + # Print some debug info + print("Updated bias after first forward pass:", updated_bias) diff --git a/tests/unit_tests/transformer/moe/test_token_dispatcher.py b/tests/unit_tests/transformer/moe/test_token_dispatcher.py index f846304..22751de 100644 --- a/tests/unit_tests/transformer/moe/test_token_dispatcher.py +++ b/tests/unit_tests/transformer/moe/test_token_dispatcher.py @@ -1,272 +1,361 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import copy - -import pytest -import torch - -from megatron.core import parallel_state -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_local_spec -from megatron.core.transformer.moe.moe_layer import MoELayer -from megatron.core.transformer.moe.moe_utils import permute, unpermute -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.training.initialize import _set_random_seed -from tests.unit_tests.test_utilities import Utils - - -class MoEModelTestContainer: - def __init__( - self, - tp_size, - ep_size, - pp_size, - cp_size=1, - moe_tp_size=None, - data_parallel_random_init=False, - num_moe_experts=8, - moe_router_topk=2, - moe_router_load_balancing_type="aux_loss", - moe_token_dispatcher_type="alltoall", - moe_expert_capacity_factor=None, - moe_pad_expert_input_to_capacity=False, - moe_aux_loss_coeff=0.1, - **kwargs, - ): - self.num_local_experts = num_moe_experts // ep_size - if moe_tp_size is None: - moe_tp_size = tp_size - Utils.initialize_model_parallel( - tensor_model_parallel_size=tp_size, - pipeline_model_parallel_size=pp_size, - expert_model_parallel_size=ep_size, - context_parallel_size=cp_size, - expert_tensor_parallel_size=moe_tp_size, - ) - _set_random_seed(seed_=123, data_parallel_random_init=data_parallel_random_init) - local_expert_indices_offset = ( - parallel_state.get_expert_model_parallel_rank() * self.num_local_experts - ) - self.local_expert_indices = [ - local_expert_indices_offset + i for i in range(self.num_local_experts) - ] - self.config = TransformerConfig( - tensor_model_parallel_size=tp_size, - expert_model_parallel_size=ep_size, - pipeline_model_parallel_size=pp_size, - context_parallel_size=cp_size, - expert_tensor_parallel_size=moe_tp_size, - moe_router_topk=moe_router_topk, - num_moe_experts=num_moe_experts, - moe_router_load_balancing_type=moe_router_load_balancing_type, - moe_token_dispatcher_type=moe_token_dispatcher_type, - moe_expert_capacity_factor=moe_expert_capacity_factor, - moe_pad_expert_input_to_capacity=moe_pad_expert_input_to_capacity, - moe_aux_loss_coeff=moe_aux_loss_coeff, - num_layers=1, - moe_grouped_gemm=kwargs.get("moe_grouped_gemm", False), - hidden_size=kwargs.get("hidden_size", 16), - num_attention_heads=kwargs.get("num_attention_heads", 8), - use_cpu_initialization=kwargs.get("use_cpu_initialization", True), - sequence_parallel=tp_size > 1, - add_bias_linear=kwargs.get("add_bias_linear", False), - ) - - # init moe layer - self.moe_layer = self.new_moe_layer() - - def new_moe_layer(self): - transformer_layer_spec = get_gpt_layer_local_spec( - num_experts=self.config.num_moe_experts, moe_grouped_gemm=self.config.moe_grouped_gemm - ) - moe_layer = MoELayer( - copy.deepcopy(self.config), transformer_layer_spec.submodules.mlp.submodules - ).cuda() - moe_layer.set_layer_number(0) - return moe_layer - - def __del__(self): - torch.distributed.barrier() - torch.cuda.synchronize() - Utils.destroy_model_parallel() - - @pytest.mark.internal - def dispatcher_dropless_test(self): - moe_layer = self.moe_layer - bs = 32 - seql = 8 - hidden_states = torch.randn((bs, seql, moe_layer.config.hidden_size)) - hidden_states = hidden_states.cuda() - ans = hidden_states / 2 - hidden_states.requires_grad = True - probs, indices = moe_layer.router(hidden_states) - probs = torch.ones_like(probs) / moe_layer.router.topk / 2 - - ## Uncomment these lines to assist in bug location. - # hidden_states = torch.ones_like(hidden_states) * torch.distributed.get_rank() - # hidden_states.requires_grad = True - # indices = torch.ones_like(indices) * torch.distributed.get_rank() - # print(permuted_local_hidden_states) - - (permuted_local_hidden_states, tokens_per_expert) = ( - moe_layer.token_dispatcher.token_permutation(hidden_states, probs, indices) - ) - - scale = moe_layer.config.expert_tensor_parallel_size - - permuted_local_hidden_states /= scale - - restored_hidden_states, restored_bias = moe_layer.token_dispatcher.token_unpermutation( - permuted_local_hidden_states - ) - - assert torch.allclose( - restored_hidden_states, ans - ), "Restored hidden states do not match original hidden states" - - # check if the grad of the hidden states is same as the hidden states - torch.autograd.backward(restored_hidden_states, hidden_states) - assert torch.allclose( - hidden_states.grad, ans - ), "Restored hidden states do not match original hidden states" - - @pytest.mark.internal - def dispatcher_capacity_test(self): - moe_layer = self.moe_layer - hidden_states = torch.randn((16, moe_layer.config.hidden_size)) - hidden_states = hidden_states.cuda() - hidden_states.requires_grad = True - probs, indices = moe_layer.router(hidden_states) - - # Create the answer. - prob_mask = probs != 0 - probs = torch.ones_like(probs) * prob_mask / moe_layer.router.topk - local_probss = probs - restored_hidden_states_answer = hidden_states * local_probss.sum(dim=1).unsqueeze(1) - - (permuted_local_hidden_states, tokens_per_expert) = ( - moe_layer.token_dispatcher.token_permutation(hidden_states, probs, indices) - ) - - print(f"Dispatched tokens per expert: {tokens_per_expert}") - - permuted_local_hidden_states /= moe_layer.config.tensor_model_parallel_size - - restored_hidden_states, restored_bias = moe_layer.token_dispatcher.token_unpermutation( - permuted_local_hidden_states - ) - assert torch.allclose( - restored_hidden_states, restored_hidden_states_answer - ), "Restored hidden states does not match" - - # check if the grad of the hidden states is same as the hidden states - torch.autograd.backward(restored_hidden_states, hidden_states) - assert torch.allclose( - hidden_states.grad, restored_hidden_states_answer - ), "Gradient of hidden states should be same as hidden states" - - @pytest.mark.internal - def dispatcher_drop_and_pad_test(self): - "Test if the tokens are dropped and padded correctly" - moe_layer = self.moe_layer - - hidden_states = torch.randn((16, moe_layer.config.hidden_size)).cuda() - hidden_states.requires_grad = True - - moe_layer.config.moe_pad_expert_input_to_capacity = False - moe_layer.token_dispatcher.drop_and_pad = False - - probs_1, indices_1 = moe_layer.router(hidden_states) - (permuted_input_1, tokens_per_expert) = moe_layer.token_dispatcher.token_permutation( - hidden_states, probs_1, indices_1 - ) - torch.distributed.barrier() - forward_answer, restored_bias = moe_layer.token_dispatcher.token_unpermutation( - permuted_input_1 - ) - torch.autograd.backward(forward_answer, forward_answer) - backward_answer = hidden_states.grad.clone() - hidden_states.grad = None - torch.cuda.synchronize() - # End - - moe_layer_2 = self.new_moe_layer() - moe_layer_2.load_state_dict(moe_layer.state_dict()) - moe_layer_2.config.moe_pad_expert_input_to_capacity = True - moe_layer_2.token_dispatcher.drop_and_pad = True - - probs_2, indices_2 = moe_layer_2.router(hidden_states) - (permuted_input_2, tokens_per_expert) = moe_layer_2.token_dispatcher.token_permutation( - hidden_states, probs_2, indices_2 - ) - restored_hidden_states, restored_bias = moe_layer_2.token_dispatcher.token_unpermutation( - permuted_input_2 - ) - torch.distributed.barrier() - assert torch.allclose( - restored_hidden_states, forward_answer - ), "Restored hidden states does not match" - - # check if the grad of the hidden states is same as the hidden states - torch.autograd.backward(restored_hidden_states, restored_hidden_states) - assert torch.allclose( - hidden_states.grad, backward_answer - ), "Gradient of hidden states should be same as hidden states" - - def set_params(self): - # TODO: Set consistent parameters for various parallelisms. - raise NotImplementedError - - def destroy(self): - Utils.destroy_model_parallel() - - -class TestAllgatherDispatcher: - def setup_method(self, method): - pass - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - @pytest.mark.internal - @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") - @pytest.mark.internal - @pytest.mark.parametrize("tp_size,ep_size", [(8, 1), (1, 8), (2, 4), (1, 1)]) - @pytest.mark.flaky - @pytest.mark.flaky_in_dev - def test_forward_backward(self, tp_size, ep_size): - container = MoEModelTestContainer( - tp_size=tp_size, - ep_size=ep_size, - pp_size=1, - num_moe_experts=8, - moe_router_topk=2, - moe_router_load_balancing_type="aux_loss", - moe_token_dispatcher_type="allgather", - ) - - container.dispatcher_dropless_test() - - @pytest.mark.internal - @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") - @pytest.mark.internal - @pytest.mark.parametrize( - "tp_size,ep_size,moe_tp_size", [(1, 1, 8), (1, 2, 4), (1, 4, 2), (2, 2, 4)] - ) - @pytest.mark.flaky - @pytest.mark.flaky_in_dev - def test_moe_tp_forward_backward(self, tp_size, ep_size, moe_tp_size): - container = MoEModelTestContainer( - tp_size=tp_size, - ep_size=ep_size, - pp_size=1, - moe_tp_size=moe_tp_size, - num_moe_experts=8, - moe_router_topk=2, - moe_router_load_balancing_type="aux_loss", - moe_token_dispatcher_type="allgather", - sequence_parallel=True, - moe_grouped_gemm=True, - use_cpu_initialization=False, - ) - - container.dispatcher_dropless_test() +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import copy + +import pytest +import torch + +from megatron.core import parallel_state +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_local_spec +from megatron.core.transformer.moe.moe_layer import MoELayer +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.utils import is_te_min_version +from megatron.training.initialize import _set_random_seed +from tests.unit_tests.test_utilities import Utils + + +class MoEModelTestContainer: + def __init__( + self, + tp_size, + ep_size, + pp_size, + cp_size=1, + moe_tp_size=None, + data_parallel_random_init=False, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="aux_loss", + moe_token_dispatcher_type="alltoall", + moe_expert_capacity_factor=None, + moe_pad_expert_input_to_capacity=False, + moe_aux_loss_coeff=0.1, + **kwargs, + ): + self.num_local_experts = num_moe_experts // ep_size + if moe_tp_size is None: + moe_tp_size = tp_size + Utils.initialize_model_parallel( + tensor_model_parallel_size=tp_size, + pipeline_model_parallel_size=pp_size, + expert_model_parallel_size=ep_size, + context_parallel_size=cp_size, + expert_tensor_parallel_size=moe_tp_size, + ) + _set_random_seed(seed_=123, data_parallel_random_init=data_parallel_random_init) + local_expert_indices_offset = ( + parallel_state.get_expert_model_parallel_rank() * self.num_local_experts + ) + self.local_expert_indices = [ + local_expert_indices_offset + i for i in range(self.num_local_experts) + ] + self.config = TransformerConfig( + tensor_model_parallel_size=tp_size, + expert_model_parallel_size=ep_size, + pipeline_model_parallel_size=pp_size, + context_parallel_size=cp_size, + expert_tensor_parallel_size=moe_tp_size, + moe_router_topk=moe_router_topk, + num_moe_experts=num_moe_experts, + moe_router_load_balancing_type=moe_router_load_balancing_type, + moe_token_dispatcher_type=moe_token_dispatcher_type, + moe_expert_capacity_factor=moe_expert_capacity_factor, + moe_pad_expert_input_to_capacity=moe_pad_expert_input_to_capacity, + moe_aux_loss_coeff=moe_aux_loss_coeff, + num_layers=1, + moe_grouped_gemm=kwargs.get("moe_grouped_gemm", False), + hidden_size=kwargs.get("hidden_size", 16), + num_attention_heads=kwargs.get("num_attention_heads", 8), + use_cpu_initialization=kwargs.get("use_cpu_initialization", True), + sequence_parallel=tp_size > 1, + add_bias_linear=kwargs.get("add_bias_linear", False), + moe_permute_fusion=kwargs.get("moe_permute_fusion", False), + moe_enable_deepep=kwargs.get("moe_enable_deepep", False), + ) + + # init moe layer + self.moe_layer = self.new_moe_layer() + + def new_moe_layer(self): + transformer_layer_spec = get_gpt_layer_local_spec( + num_experts=self.config.num_moe_experts, moe_grouped_gemm=self.config.moe_grouped_gemm + ) + moe_layer = MoELayer( + copy.deepcopy(self.config), transformer_layer_spec.submodules.mlp.submodules + ).cuda() + moe_layer.set_layer_number(0) + return moe_layer + + def __del__(self): + torch.distributed.barrier() + torch.cuda.synchronize() + Utils.destroy_model_parallel() + + @pytest.mark.internal + def dispatcher_dropless_test(self): + moe_layer = self.moe_layer + bs = 32 + seql = 8 + # TODO: Find why setting manual seed can cause the test to fail + # Manual seed to differentiate input data for each rank + # rank = torch.distributed.get_rank() + # torch.manual_seed(1000 + rank) + hidden_states = torch.randn((bs, seql, moe_layer.config.hidden_size)) + hidden_states = hidden_states.cuda() + # Permute and then unpermute data are supposed to restore original data + ans = hidden_states + hidden_states.requires_grad = True + probs, indices = moe_layer.router(hidden_states) + probs = torch.ones_like(probs) / moe_layer.router.topk + + (permuted_local_hidden_states, tokens_per_expert) = ( + moe_layer.token_dispatcher.token_permutation(hidden_states, probs, indices) + ) + + restored_hidden_states, restored_bias = moe_layer.token_dispatcher.token_unpermutation( + permuted_local_hidden_states + ) + + # reduce across TP rank equals to multiply data by a scale of ETP + scale = moe_layer.config.expert_tensor_parallel_size + restored_hidden_states = restored_hidden_states / scale + + assert torch.allclose( + restored_hidden_states, ans + ), "Restored hidden states do not match original hidden states" + + # check if the grad of the hidden states is same as the hidden states + torch.autograd.backward(restored_hidden_states, hidden_states) + assert torch.allclose( + hidden_states.grad, ans + ), "Restored hidden states do not match original hidden states" + + @pytest.mark.internal + def dispatcher_capacity_test(self): + moe_layer = self.moe_layer + hidden_states = torch.randn((16, moe_layer.config.hidden_size)) + hidden_states = hidden_states.cuda() + hidden_states.requires_grad = True + probs, indices = moe_layer.router(hidden_states) + + # Create the answer. + prob_mask = probs != 0 + probs = torch.ones_like(probs) * prob_mask / moe_layer.router.topk + local_probss = probs + restored_hidden_states_answer = hidden_states * local_probss.sum(dim=1).unsqueeze(1) + + (permuted_local_hidden_states, tokens_per_expert) = ( + moe_layer.token_dispatcher.token_permutation(hidden_states, probs, indices) + ) + + permuted_local_hidden_states /= moe_layer.config.tensor_model_parallel_size + + restored_hidden_states, restored_bias = moe_layer.token_dispatcher.token_unpermutation( + permuted_local_hidden_states + ) + assert torch.allclose( + restored_hidden_states, restored_hidden_states_answer + ), "Restored hidden states does not match" + + # check if the grad of the hidden states is same as the hidden states + torch.autograd.backward(restored_hidden_states, hidden_states) + assert torch.allclose( + hidden_states.grad, restored_hidden_states_answer + ), "Gradient of hidden states should be same as hidden states" + + @pytest.mark.internal + def dispatcher_drop_and_pad_test(self): + "Test if the tokens are dropped and padded correctly" + moe_layer = self.moe_layer + + hidden_states = torch.randn((16, moe_layer.config.hidden_size)).cuda() + hidden_states.requires_grad = True + + moe_layer.config.moe_pad_expert_input_to_capacity = False + moe_layer.token_dispatcher.drop_and_pad = False + + probs_1, indices_1 = moe_layer.router(hidden_states) + (permuted_input_1, tokens_per_expert) = moe_layer.token_dispatcher.token_permutation( + hidden_states, probs_1, indices_1 + ) + torch.distributed.barrier() + forward_answer, restored_bias = moe_layer.token_dispatcher.token_unpermutation( + permuted_input_1 + ) + torch.autograd.backward(forward_answer, forward_answer) + backward_answer = hidden_states.grad.clone() + hidden_states.grad = None + torch.cuda.synchronize() + # End + + moe_layer_2 = self.new_moe_layer() + moe_layer_2.load_state_dict(moe_layer.state_dict()) + moe_layer_2.config.moe_pad_expert_input_to_capacity = True + moe_layer_2.token_dispatcher.drop_and_pad = True + + probs_2, indices_2 = moe_layer_2.router(hidden_states) + (permuted_input_2, tokens_per_expert) = moe_layer_2.token_dispatcher.token_permutation( + hidden_states, probs_2, indices_2 + ) + restored_hidden_states, restored_bias = moe_layer_2.token_dispatcher.token_unpermutation( + permuted_input_2 + ) + torch.distributed.barrier() + assert torch.allclose( + restored_hidden_states, forward_answer + ), "Restored hidden states does not match" + + # check if the grad of the hidden states is same as the hidden states + torch.autograd.backward(restored_hidden_states, restored_hidden_states) + assert torch.allclose( + hidden_states.grad, backward_answer + ), "Gradient of hidden states should be same as hidden states" + + def set_params(self): + # TODO: Set consistent parameters for various parallelisms. + raise NotImplementedError + + def destroy(self): + Utils.destroy_model_parallel() + + +permute_fusion_params = [False] +if is_te_min_version("1.14.0"): + permute_fusion_params.append(True) + + +class TestAllgatherDispatcher: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.internal + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.internal + @pytest.mark.parametrize("tp_size,ep_size", [(8, 1), (1, 8), (2, 4), (1, 1)]) + @pytest.mark.parametrize("permute_fusion", permute_fusion_params) + def test_forward_backward(self, tp_size, ep_size, permute_fusion): + container = MoEModelTestContainer( + tp_size=tp_size, + ep_size=ep_size, + pp_size=1, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="aux_loss", + moe_token_dispatcher_type="allgather", + moe_permute_fusion=permute_fusion, + ) + + container.dispatcher_dropless_test() + + @pytest.mark.internal + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.internal + @pytest.mark.parametrize("permute_fusion", permute_fusion_params) + @pytest.mark.parametrize( + "tp_size,ep_size,moe_tp_size", [(1, 1, 8), (1, 2, 4), (1, 4, 2), (2, 2, 4)] + ) + def test_moe_tp_forward_backward(self, tp_size, ep_size, moe_tp_size, permute_fusion): + container = MoEModelTestContainer( + tp_size=tp_size, + ep_size=ep_size, + pp_size=1, + moe_tp_size=moe_tp_size, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="aux_loss", + moe_token_dispatcher_type="allgather", + sequence_parallel=True, + moe_grouped_gemm=True, + moe_permute_fusion=permute_fusion, + use_cpu_initialization=False, + ) + + container.dispatcher_dropless_test() + + +def is_deep_ep_available(): + from megatron.core.transformer.moe.fused_a2a import HAVE_DEEP_EP + + return HAVE_DEEP_EP + + +@pytest.mark.skipif(not is_deep_ep_available(), reason="Deep EP is not available") +class TestFlexDispatcher: + def setup_method(self, method): + pass + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.internal + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.internal + @pytest.mark.parametrize("tp_size,ep_size", [(8, 1), (1, 8), (2, 4)]) + @pytest.mark.parametrize("permute_fusion", permute_fusion_params) + def test_forward_backward(self, tp_size, ep_size, permute_fusion): + container = MoEModelTestContainer( + tp_size=tp_size, + ep_size=ep_size, + pp_size=1, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="aux_loss", + moe_token_dispatcher_type="flex", + moe_permute_fusion=permute_fusion, + hidden_size=4, + moe_enable_deepep=True, + ) + container.dispatcher_dropless_test() + + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.internal + @pytest.mark.timeout(120) + @pytest.mark.parametrize("tp_size,ep_size", [(1, 8), (8, 1), (4, 2)]) + @pytest.mark.parametrize("permute_fusion", permute_fusion_params) + @pytest.mark.flaky + @pytest.mark.flaky_in_dev + def test_capacity_forward_backward(self, tp_size, ep_size, permute_fusion): + container = MoEModelTestContainer( + tp_size=tp_size, + ep_size=ep_size, + pp_size=1, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="aux_loss", + moe_token_dispatcher_type="flex", + moe_token_drop_policy="probs", + moe_expert_capacity_factor=0.5, + moe_pad_expert_input_to_capacity=False, + moe_permute_fusion=permute_fusion, + hidden_size=4, + moe_enable_deepep=True, + ) + container.dispatcher_capacity_test() + + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.internal + @pytest.mark.timeout(120) + @pytest.mark.parametrize("tp_size,ep_size", [(1, 8), (8, 1), (4, 2)]) + @pytest.mark.parametrize("permute_fusion", permute_fusion_params) + @pytest.mark.flaky + @pytest.mark.flaky_in_dev + def test_capacity_padding_forward_backward(self, tp_size, ep_size, permute_fusion): + container = MoEModelTestContainer( + tp_size=tp_size, + ep_size=ep_size, + pp_size=1, + num_moe_experts=8, + moe_router_topk=2, + moe_router_load_balancing_type="aux_loss", + moe_token_dispatcher_type="flex", + moe_token_drop_policy="probs", + moe_expert_capacity_factor=0.6, + moe_pad_expert_input_to_capacity=True, + moe_permute_fusion=permute_fusion, + hidden_size=4, + moe_enable_deepep=True, + ) + container.dispatcher_drop_and_pad_test() diff --git a/tests/unit_tests/transformer/test_attention.py b/tests/unit_tests/transformer/test_attention.py index 8c13ff3..0275e74 100644 --- a/tests/unit_tests/transformer/test_attention.py +++ b/tests/unit_tests/transformer/test_attention.py @@ -1,123 +1,127 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import pytest -import torch - -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.attention import SelfAttention -from megatron.core.transformer.transformer_config import TransformerConfig -from tests.unit_tests.test_utilities import Utils - - -class TestParallelAttention: - - def setup_method(self, method): - Utils.initialize_model_parallel(1, 1) - model_parallel_cuda_manual_seed(123) - self.transformer_config = TransformerConfig( - num_layers=2, hidden_size=12, num_attention_heads=4, use_cpu_initialization=True - ) - self.parallel_attention = SelfAttention( - self.transformer_config, - get_gpt_layer_with_transformer_engine_spec().submodules.self_attention.submodules, - layer_number=1, - ) - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_constructor(self): - assert isinstance(self.parallel_attention, SelfAttention) - assert self.parallel_attention.layer_number == 1 - - num_weights = sum([p.numel() for p in self.parallel_attention.parameters()]) - assert num_weights == 648 - - def test_cpu_forward(self): - # we can't currently do this because the global memory buffer is on GPU - pass - - def test_gpu_forward(self): - - config = self.parallel_attention.config - sequence_length = 32 - micro_batch_size = 2 - - self.parallel_attention.cuda() - - # [sequence length, batch size, hidden size] - hidden_states = torch.ones( - (sequence_length, micro_batch_size, self.parallel_attention.config.hidden_size) - ) - hidden_states = hidden_states.cuda() - - attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() - - output, bias = self.parallel_attention(hidden_states, attention_mask) - - assert config.recompute_granularity is None - assert output.shape[0] == sequence_length - assert output.shape[1] == micro_batch_size - assert output.shape[2] == config.hidden_size - assert bias.shape[0] == config.hidden_size - - def test_fused_rope_gpu_forward(self): - self.parallel_attention.config.apply_rope_fusion = True - config = self.parallel_attention.config - sequence_length = 32 - micro_batch_size = 2 - - self.parallel_attention.cuda() - - # [sequence length, batch size, hidden size] - hidden_states = torch.ones( - (sequence_length, micro_batch_size, self.parallel_attention.config.hidden_size) - ) - hidden_states = hidden_states.cuda() - - attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() - rotary_pos_emb = torch.ones( - sequence_length, 1, 1, self.parallel_attention.config.kv_channels - ).cuda() - output, bias = self.parallel_attention( - hidden_states, attention_mask, rotary_pos_emb=rotary_pos_emb - ) - - assert config.recompute_granularity is None - assert output.shape[0] == sequence_length - assert output.shape[1] == micro_batch_size - assert output.shape[2] == config.hidden_size - assert bias.shape[0] == config.hidden_size - self.parallel_attention.config.apply_rope_fusion = False - - def test_checkpointed_gpu_forward(self): - transformer_config = self.transformer_config - transformer_config.recompute_granularity = 'selective' - checkpointed_parallel_attention = SelfAttention( - transformer_config, - get_gpt_layer_with_transformer_engine_spec().submodules.self_attention.submodules, - layer_number=1, - ) - config = checkpointed_parallel_attention.config - - sequence_length = 32 - micro_batch_size = 2 - - checkpointed_parallel_attention.cuda() - - # [sequence length, batch size, hidden size] - hidden_states = torch.ones( - (sequence_length, micro_batch_size, checkpointed_parallel_attention.config.hidden_size) - ) - hidden_states = hidden_states.cuda() - - attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() - - output, bias = checkpointed_parallel_attention(hidden_states, attention_mask) - - assert config.recompute_granularity == 'selective' - assert output.shape[0] == sequence_length - assert output.shape[1] == micro_batch_size - assert output.shape[2] == config.hidden_size - assert bias.shape[0] == config.hidden_size +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import pytest +import torch + +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.attention import SelfAttention +from megatron.core.transformer.transformer_config import TransformerConfig +from tests.unit_tests.test_utilities import Utils + + +class TestParallelAttention: + + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + self.transformer_config = TransformerConfig( + num_layers=2, hidden_size=12, num_attention_heads=4, use_cpu_initialization=True + ) + self.parallel_attention = SelfAttention( + self.transformer_config, + get_gpt_layer_with_transformer_engine_spec().submodules.self_attention.submodules, + layer_number=1, + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_constructor(self): + assert isinstance(self.parallel_attention, SelfAttention) + assert self.parallel_attention.layer_number == 1 + + num_weights = sum([p.numel() for p in self.parallel_attention.parameters()]) + assert num_weights == 648 + + def test_cpu_forward(self): + # we can't currently do this because the global memory buffer is on GPU + pass + + @pytest.mark.flaky + @pytest.mark.flaky_in_dev + def test_gpu_forward(self): + + config = self.parallel_attention.config + sequence_length = 32 + micro_batch_size = 2 + + self.parallel_attention.cuda() + + # [sequence length, batch size, hidden size] + hidden_states = torch.ones( + (sequence_length, micro_batch_size, self.parallel_attention.config.hidden_size) + ) + hidden_states = hidden_states.cuda() + + attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() + + output, bias = self.parallel_attention(hidden_states, attention_mask) + + assert config.recompute_granularity is None + assert output.shape[0] == sequence_length + assert output.shape[1] == micro_batch_size + assert output.shape[2] == config.hidden_size + assert bias.shape[0] == config.hidden_size + + @pytest.mark.flaky_in_dev + def test_fused_rope_gpu_forward(self): + self.parallel_attention.config.apply_rope_fusion = True + config = self.parallel_attention.config + sequence_length = 32 + micro_batch_size = 2 + + self.parallel_attention.cuda() + + # [sequence length, batch size, hidden size] + hidden_states = torch.ones( + (sequence_length, micro_batch_size, self.parallel_attention.config.hidden_size) + ) + hidden_states = hidden_states.cuda() + + attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() + rotary_pos_emb = torch.ones( + sequence_length, 1, 1, self.parallel_attention.config.kv_channels + ).cuda() + output, bias = self.parallel_attention( + hidden_states, attention_mask, rotary_pos_emb=rotary_pos_emb + ) + + assert config.recompute_granularity is None + assert output.shape[0] == sequence_length + assert output.shape[1] == micro_batch_size + assert output.shape[2] == config.hidden_size + assert bias.shape[0] == config.hidden_size + self.parallel_attention.config.apply_rope_fusion = False + + @pytest.mark.flaky_in_dev + def test_checkpointed_gpu_forward(self): + transformer_config = self.transformer_config + transformer_config.recompute_granularity = 'selective' + checkpointed_parallel_attention = SelfAttention( + transformer_config, + get_gpt_layer_with_transformer_engine_spec().submodules.self_attention.submodules, + layer_number=1, + ) + config = checkpointed_parallel_attention.config + + sequence_length = 32 + micro_batch_size = 2 + + checkpointed_parallel_attention.cuda() + + # [sequence length, batch size, hidden size] + hidden_states = torch.ones( + (sequence_length, micro_batch_size, checkpointed_parallel_attention.config.hidden_size) + ) + hidden_states = hidden_states.cuda() + + attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() + + output, bias = checkpointed_parallel_attention(hidden_states, attention_mask) + + assert config.recompute_granularity == 'selective' + assert output.shape[0] == sequence_length + assert output.shape[1] == micro_batch_size + assert output.shape[2] == config.hidden_size + assert bias.shape[0] == config.hidden_size diff --git a/tests/unit_tests/transformer/test_multi_latent_attention.py b/tests/unit_tests/transformer/test_multi_latent_attention.py index b858072..a089693 100644 --- a/tests/unit_tests/transformer/test_multi_latent_attention.py +++ b/tests/unit_tests/transformer/test_multi_latent_attention.py @@ -1,130 +1,253 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import os -from importlib.metadata import version - -import pytest -import torch -import transformer_engine as te - -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.enums import AttnMaskType -from megatron.core.transformer.multi_latent_attention import MLASelfAttention -from megatron.core.transformer.transformer_config import MLATransformerConfig -from megatron.core.utils import is_te_min_version -from tests.unit_tests.test_utilities import Utils - - -class TestParallelMLAAttention: - - def setup_method(self, method): - Utils.initialize_model_parallel(1, 1) - model_parallel_cuda_manual_seed(123) - self.transformer_config = MLATransformerConfig( - num_layers=2, - hidden_size=12, - num_attention_heads=4, - use_cpu_initialization=True, - q_lora_rank=32, - kv_lora_rank=32, - qk_head_dim=128, - v_head_dim=128, - qk_pos_emb_head_dim=64, - rotary_base=10000, - max_position_embeddings=32, - ) - self.parallel_attention = MLASelfAttention( - self.transformer_config, - get_gpt_layer_with_transformer_engine_spec( - multi_latent_attention=True - ).submodules.self_attention.submodules, - layer_number=1, - attn_mask_type=AttnMaskType.causal, - ) - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_constructor(self): - assert isinstance(self.parallel_attention, MLASelfAttention) - assert self.parallel_attention.layer_number == 1 - - num_weights = sum([p.numel() for p in self.parallel_attention.parameters()]) - assert num_weights == 65036 - - def test_cpu_forward(self): - # we can't currently do this because the global memory buffer is on GPU - pass - - def test_gpu_forward(self): - if is_te_min_version("1.10.0"): - - # use flash attention for hopper, future may support fused attention for ampere - os.environ['NVTE_FUSED_ATTN'] = "0" - os.environ['NVTE_FLASH_ATTN'] = "1" - - config = self.parallel_attention.config - sequence_length = 32 - micro_batch_size = 2 - - self.parallel_attention.cuda() - - # [sequence length, batch size, hidden size] - hidden_states = torch.ones( - (sequence_length, micro_batch_size, self.parallel_attention.config.hidden_size) - ) - hidden_states = hidden_states.cuda() - - attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() - - output, bias = self.parallel_attention(hidden_states, attention_mask) - - assert config.recompute_granularity is None - assert output.shape[0] == sequence_length - assert output.shape[1] == micro_batch_size - assert output.shape[2] == config.hidden_size - assert bias.shape[0] == config.hidden_size - - def test_checkpointed_gpu_forward(self): - if is_te_min_version("1.10.0"): - # use flash attention for hopper, future may support fused attention for ampere - os.environ['NVTE_FUSED_ATTN'] = "1" - os.environ['NVTE_FLASH_ATTN'] = "0" - - transformer_config = self.transformer_config - transformer_config.recompute_granularity = 'selective' - checkpointed_parallel_attention = MLASelfAttention( - transformer_config, - get_gpt_layer_with_transformer_engine_spec( - multi_latent_attention=True - ).submodules.self_attention.submodules, - layer_number=1, - attn_mask_type=AttnMaskType.causal, - ) - config = checkpointed_parallel_attention.config - - sequence_length = 32 - micro_batch_size = 2 - - checkpointed_parallel_attention.cuda() - - # [sequence length, batch size, hidden size] - hidden_states = torch.ones( - ( - sequence_length, - micro_batch_size, - checkpointed_parallel_attention.config.hidden_size, - ) - ) - hidden_states = hidden_states.cuda() - - attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() - - output, bias = checkpointed_parallel_attention(hidden_states, attention_mask) - - assert config.recompute_granularity == 'selective' - assert output.shape[0] == sequence_length - assert output.shape[1] == micro_batch_size - assert output.shape[2] == config.hidden_size - assert bias.shape[0] == config.hidden_size +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import os +from importlib.metadata import version +from inspect import signature + +import pytest +import torch +import transformer_engine as te + +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.attention import Attention +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.transformer.multi_latent_attention import MLASelfAttention, MultiLatentAttention +from megatron.core.transformer.transformer_config import MLATransformerConfig +from megatron.core.utils import is_te_min_version +from tests.unit_tests.test_utilities import Utils + + +@pytest.mark.parametrize("rope_type", ('yarn', 'rope')) +class TestParallelMLAAttention: + + @pytest.fixture(scope='function', autouse=True) + def setup_and_teardown(self, rope_type): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + self.transformer_config = MLATransformerConfig( + num_layers=2, + hidden_size=12, + num_attention_heads=4, + use_cpu_initialization=True, + q_lora_rank=32, + kv_lora_rank=32, + qk_head_dim=128, + v_head_dim=128, + qk_pos_emb_head_dim=64, + rope_type=rope_type, + rotary_base=10000, + max_position_embeddings=32, + ) + self.parallel_attention = MLASelfAttention( + self.transformer_config, + get_gpt_layer_with_transformer_engine_spec( + multi_latent_attention=True + ).submodules.self_attention.submodules, + layer_number=1, + attn_mask_type=AttnMaskType.causal, + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_input_params_forward(self): + """ + Test to ensure that MultiLatentAttention has all parameters + required by the Attention class's forward method. + """ + # Extract parameters from the forward methods of both Attention and MultiLatentAttention + attn_params = set(signature(Attention.forward).parameters.keys()) + mla_params = set(signature(MultiLatentAttention.forward).parameters.keys()) + + # Identify parameters that are in Attention but missing in MultiLatentAttention + missing_params = attn_params - mla_params + assert not missing_params, f"Missing parameters in MultiLatentAttention: {missing_params}" + + def test_constructor(self): + assert isinstance(self.parallel_attention, MLASelfAttention) + assert self.parallel_attention.layer_number == 1 + + num_weights = sum([p.numel() for p in self.parallel_attention.parameters()]) + assert num_weights == 65036 + + def test_cpu_forward(self): + # we can't currently do this because the global memory buffer is on GPU + pass + + def test_gpu_forward(self): + if is_te_min_version("1.10.0"): + config = self.parallel_attention.config + sequence_length = 32 + micro_batch_size = 2 + + self.parallel_attention.cuda() + + # [sequence length, batch size, hidden size] + hidden_states = torch.ones( + (sequence_length, micro_batch_size, self.parallel_attention.config.hidden_size) + ) + hidden_states = hidden_states.cuda() + + attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() + + output, bias = self.parallel_attention(hidden_states, attention_mask) + + assert config.recompute_granularity is None + assert output.shape[0] == sequence_length + assert output.shape[1] == micro_batch_size + assert output.shape[2] == config.hidden_size + assert bias.shape[0] == config.hidden_size + + def test_checkpointed_gpu_forward(self): + if is_te_min_version("1.10.0"): + transformer_config = self.transformer_config + transformer_config.recompute_granularity = 'selective' + checkpointed_parallel_attention = MLASelfAttention( + transformer_config, + get_gpt_layer_with_transformer_engine_spec( + multi_latent_attention=True + ).submodules.self_attention.submodules, + layer_number=1, + attn_mask_type=AttnMaskType.causal, + ) + config = checkpointed_parallel_attention.config + + sequence_length = 32 + micro_batch_size = 2 + + checkpointed_parallel_attention.cuda() + + # [sequence length, batch size, hidden size] + hidden_states = torch.ones( + ( + sequence_length, + micro_batch_size, + checkpointed_parallel_attention.config.hidden_size, + ) + ) + hidden_states = hidden_states.cuda() + + attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() + + output, bias = checkpointed_parallel_attention(hidden_states, attention_mask) + + assert config.recompute_granularity == 'selective' + assert output.shape[0] == sequence_length + assert output.shape[1] == micro_batch_size + assert output.shape[2] == config.hidden_size + assert bias.shape[0] == config.hidden_size + + +class TestSequenceParallelMLAAttention: + + def setup_method(self, method): + self.tensor_parallel_size = 2 + Utils.initialize_model_parallel(self.tensor_parallel_size, 1) + model_parallel_cuda_manual_seed(123) + self.transformer_config = MLATransformerConfig( + num_layers=2, + hidden_size=12, + num_attention_heads=4, + q_lora_rank=32, + kv_lora_rank=32, + qk_head_dim=128, + v_head_dim=128, + qk_pos_emb_head_dim=64, + rotary_base=10000, + max_position_embeddings=64, + tensor_model_parallel_size=self.tensor_parallel_size, + sequence_parallel=True, + ) + self.parallel_attention = MLASelfAttention( + self.transformer_config, + get_gpt_layer_with_transformer_engine_spec( + multi_latent_attention=True + ).submodules.self_attention.submodules, + layer_number=1, + attn_mask_type=AttnMaskType.causal, + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_gpu_forward(self): + if is_te_min_version("1.10.0"): + config = self.parallel_attention.config + sequence_length = 64 + sub_sequence_length = sequence_length // self.tensor_parallel_size + micro_batch_size = 2 + + self.parallel_attention.cuda() + + # [sequence length, batch size, hidden size] + hidden_states = torch.ones( + (sub_sequence_length, micro_batch_size, self.parallel_attention.config.hidden_size) + ) + hidden_states = hidden_states.cuda() + + attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() + + output, bias = self.parallel_attention(hidden_states, attention_mask) + + assert config.recompute_granularity is None + assert output.shape[0] == sub_sequence_length + assert output.shape[1] == micro_batch_size + assert output.shape[2] == config.hidden_size + assert bias.shape[0] == config.hidden_size + + +class TestTensorParallelMLAAttention: + def setup_method(self, method): + self.tensor_parallel_size = 2 + Utils.initialize_model_parallel(self.tensor_parallel_size, 1) + model_parallel_cuda_manual_seed(123) + self.transformer_config = MLATransformerConfig( + num_layers=2, + hidden_size=12, + num_attention_heads=4, + q_lora_rank=32, + kv_lora_rank=32, + qk_head_dim=128, + v_head_dim=128, + qk_pos_emb_head_dim=64, + rotary_base=10000, + max_position_embeddings=64, + tensor_model_parallel_size=self.tensor_parallel_size, + sequence_parallel=False, + ) + self.parallel_attention = MLASelfAttention( + self.transformer_config, + get_gpt_layer_with_transformer_engine_spec( + multi_latent_attention=True + ).submodules.self_attention.submodules, + layer_number=1, + attn_mask_type=AttnMaskType.causal, + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_gpu_forward(self): + if is_te_min_version("1.10.0"): + config = self.parallel_attention.config + sequence_length = 64 + micro_batch_size = 2 + + self.parallel_attention.cuda() + + # [sequence length, batch size, hidden size] + hidden_states = torch.ones( + (sequence_length, micro_batch_size, self.parallel_attention.config.hidden_size) + ) + hidden_states = hidden_states.cuda() + + attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() + + output, bias = self.parallel_attention(hidden_states, attention_mask) + + assert config.recompute_granularity is None + assert output.shape[0] == sequence_length + assert output.shape[1] == micro_batch_size + assert output.shape[2] == config.hidden_size + assert bias.shape[0] == config.hidden_size diff --git a/tests/unit_tests/transformer/test_relative_attention.py b/tests/unit_tests/transformer/test_relative_attention.py new file mode 100644 index 0000000..dd1d4c0 --- /dev/null +++ b/tests/unit_tests/transformer/test_relative_attention.py @@ -0,0 +1,38 @@ +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import pytest +import torch +import torch.nn.init as init + +from megatron.core.models.common.embeddings.relative_pos_embedding import RelativePositionEmbedding +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from tests.unit_tests.test_utilities import Utils + + +class TestRelativePositionEmbedding: + def setup_method(self): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + self.num_heads = 12 + self.relative_pos_emb = RelativePositionEmbedding( + bidirectional=True, + init_method=init.normal_, + num_attention_heads=self.num_heads, + relative_attention_num_buckets=32, + relative_attention_max_distance=128, + ) + + def teardown_method(self, method): + del self.relative_pos_emb + Utils.destroy_model_parallel() + + def test_constructor(self): + assert isinstance(self.relative_pos_emb, RelativePositionEmbedding) + + def test_forward(self): + self.query_seq_length = 512 + output = self.relative_pos_emb(self.query_seq_length, self.query_seq_length) + assert output.shape[0] == 1 + assert output.shape[1] == self.num_heads + assert output.shape[2] == self.query_seq_length + assert output.shape[3] == self.query_seq_length diff --git a/tests/unit_tests/transformer/test_retro_attention.py b/tests/unit_tests/transformer/test_retro_attention.py index 1d0bcd8..d28cb40 100644 --- a/tests/unit_tests/transformer/test_retro_attention.py +++ b/tests/unit_tests/transformer/test_retro_attention.py @@ -1,202 +1,203 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import os -import types - -import pytest -import torch - -from megatron.core.models.retro import RetroConfig, get_retro_decoder_block_spec -from megatron.core.models.retro.decoder_attention import ( - RetroDecoderBiasDropoutAdd, - RetroDecoderCrossAttention, -) -from megatron.core.models.retro.encoder_attention import ( - RetroEncoderBiasDropoutAdd, - RetroEncoderCrossAttention, - RetroEncoderLayerNorm, -) -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.transformer_block import TransformerBlock -from tests.unit_tests.test_utilities import Utils - - -class TestRetroAttention: - - @classmethod - def get_config(cls): - return RetroConfig( - num_layers=12, - hidden_size=16, - num_attention_heads=4, - use_cpu_initialization=True, - retro_num_neighbors=2, - retro_chunk_length=4, - retro_retrieved_length=8, - retro_split_preprocessing="98,2,0", - ) - - @classmethod - def get_modules(cls, config, use_transformer_engine, use_gpu): - - # Retro decoder layer. - decoder_block_spec = get_retro_decoder_block_spec( - config, use_transformer_engine=use_transformer_engine - ) - decoder_block = TransformerBlock(config=config, spec=decoder_block_spec) - decoder_layers = [ - layer - for layer in decoder_block.layers - if isinstance(layer.cross_attention, RetroDecoderCrossAttention) - ] - decoder_layer = decoder_layers[0] - - # Retro encoder layer. - encoder_block = decoder_layer.cross_attention.encoder - encoder_layers = [ - layer - for layer in encoder_block.layers - if isinstance(layer.cross_attention, RetroEncoderCrossAttention) - ] - encoder_layer = encoder_layers[0] - - # Modules. - modules = types.SimpleNamespace( - decoder_attn=decoder_layer.cross_attention, - decoder_bda=decoder_layer.cross_attn_bda, - encoder_attn=encoder_layer.cross_attention, - encoder_bda=encoder_layer.cross_attn_bda, - encoder_norm=encoder_layer.pre_mlp_layernorm, - ) - - # GPU. - if use_gpu: - [m.cuda() for m in vars(modules).values()] - - return modules - - def setup_method(self, method): - Utils.initialize_model_parallel(1, 1) - os.environ['NVTE_FLASH_ATTN'] = "0" - os.environ['NVTE_FUSED_ATTN'] = "0" - - model_parallel_cuda_manual_seed(123) - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_constructor(self): - - config = self.get_config() - modules = self.get_modules(config, use_transformer_engine=True, use_gpu=False) - - assert isinstance(modules.decoder_attn, RetroDecoderCrossAttention) - assert isinstance(modules.decoder_bda, RetroDecoderBiasDropoutAdd) - assert isinstance(modules.encoder_attn, RetroEncoderCrossAttention) - assert isinstance(modules.encoder_bda, RetroEncoderBiasDropoutAdd) - assert isinstance(modules.encoder_norm, RetroEncoderLayerNorm) - - assert modules.decoder_attn.attn.layer_number == 6 - assert modules.encoder_attn.attn.layer_number == 1 - - get_nparams = lambda m: sum(p.numel() for p in m.parameters()) - assert get_nparams(modules.decoder_attn) == 8768 - assert get_nparams(modules.decoder_bda) == 0 - assert get_nparams(modules.encoder_attn) == 1088 - assert get_nparams(modules.encoder_bda) == 0 - assert get_nparams(modules.encoder_norm) == 32 - - def test_cpu_forward(self): - # we can't currently do this because the global memory buffer is on GPU - pass - - def run_gpu_forward(self, recompute_granularity, use_transformer_engine): - - config = self.get_config() - config.recompute_granularity = recompute_granularity - modules = self.get_modules(config, use_transformer_engine, use_gpu=True) - - seq_length = 32 - micro_batch_size = 2 - n_chunks_per_sample = seq_length // config.retro_chunk_length - - # Init tensors. - hidden_states = torch.ones((seq_length, micro_batch_size, config.hidden_size)).cuda() - attention_mask = None - decoder_context = torch.ones( - ( - config.retro_retrieved_length, - config.retro_num_neighbors * micro_batch_size * n_chunks_per_sample, - config.hidden_size, - ) - ).cuda() - encoder_context = torch.ones( - (config.retro_chunk_length, micro_batch_size * n_chunks_per_sample, config.hidden_size) - ).cuda() - - # Forward decoder. - decoder_attn_output = modules.decoder_attn(hidden_states, attention_mask, decoder_context) - with torch.enable_grad(): - decoder_bda_output = modules.decoder_bda(True, True)( - decoder_attn_output, hidden_states, config.hidden_dropout - ) - - # Forward encoder. - encoder_attn_output_tuples = modules.encoder_attn(decoder_context, None, encoder_context) - with torch.enable_grad(): - encoder_bda_output = modules.encoder_bda(True, True)( - encoder_attn_output_tuples, decoder_context, config.retro_encoder_hidden_dropout - ) - encoder_norm_output = modules.encoder_norm(encoder_bda_output) - - # Verify decoder. - assert set(decoder_attn_output.keys()) == set( - ["ns", "bs", "d", "l", "pad", "attention_output", "attention_bias", "context"] - ) - assert decoder_attn_output["ns"] == seq_length - assert decoder_attn_output["bs"] == micro_batch_size - assert decoder_attn_output["d"] == config.hidden_size - assert decoder_attn_output["l"] == n_chunks_per_sample - assert decoder_attn_output["pad"] == 3 - assert tuple(decoder_attn_output["attention_output"].shape) == ( - config.retro_chunk_length, - micro_batch_size * n_chunks_per_sample, - config.hidden_size, - ) - assert tuple(decoder_attn_output["attention_bias"].shape) == (config.hidden_size,) - assert decoder_attn_output["context"].shape == ( - config.retro_retrieved_length * config.retro_num_neighbors, - micro_batch_size * n_chunks_per_sample, - config.hidden_size, - ) - assert decoder_bda_output.shape == hidden_states.shape - - # Verify encoder. - assert len(encoder_attn_output_tuples) == config.retro_num_neighbors - for output, bias, residual in encoder_attn_output_tuples: - assert tuple(output.shape) == ( - config.retro_retrieved_length, - micro_batch_size * n_chunks_per_sample, - config.hidden_size, - ) - assert tuple(bias.shape) == (config.hidden_size,) - assert tuple(residual.shape) == ( - config.retro_retrieved_length, - micro_batch_size * n_chunks_per_sample, - config.hidden_size, - ) - assert encoder_bda_output.shape == ( - config.retro_retrieved_length, - config.retro_num_neighbors * micro_batch_size * n_chunks_per_sample, - config.hidden_size, - ) - assert encoder_norm_output.shape == ( - config.retro_retrieved_length, - config.retro_num_neighbors * micro_batch_size * n_chunks_per_sample, - config.hidden_size, - ) - - def test_gpu_forward(self): - for recompute_granularity in (None, 'selective'): - for use_transformer_engine in (True, False): - self.run_gpu_forward(recompute_granularity, use_transformer_engine) +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import os +import types + +import pytest +import torch + +from megatron.core.models.retro import RetroConfig, get_retro_decoder_block_spec +from megatron.core.models.retro.decoder_attention import ( + RetroDecoderBiasDropoutAdd, + RetroDecoderCrossAttention, +) +from megatron.core.models.retro.encoder_attention import ( + RetroEncoderBiasDropoutAdd, + RetroEncoderCrossAttention, + RetroEncoderLayerNorm, +) +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.transformer_block import TransformerBlock +from tests.unit_tests.test_utilities import Utils + + +class TestRetroAttention: + + @classmethod + def get_config(cls): + return RetroConfig( + num_layers=12, + hidden_size=16, + num_attention_heads=4, + use_cpu_initialization=True, + retro_num_neighbors=2, + retro_chunk_length=4, + retro_retrieved_length=8, + retro_split_preprocessing="98,2,0", + ) + + @classmethod + def get_modules(cls, config, use_transformer_engine, use_gpu): + + # Retro decoder layer. + decoder_block_spec = get_retro_decoder_block_spec( + config, use_transformer_engine=use_transformer_engine + ) + decoder_block = TransformerBlock(config=config, spec=decoder_block_spec) + decoder_layers = [ + layer + for layer in decoder_block.layers + if isinstance(layer.cross_attention, RetroDecoderCrossAttention) + ] + decoder_layer = decoder_layers[0] + + # Retro encoder layer. + encoder_block = decoder_layer.cross_attention.encoder + encoder_layers = [ + layer + for layer in encoder_block.layers + if isinstance(layer.cross_attention, RetroEncoderCrossAttention) + ] + encoder_layer = encoder_layers[0] + + # Modules. + modules = types.SimpleNamespace( + decoder_attn=decoder_layer.cross_attention, + decoder_bda=decoder_layer.cross_attn_bda, + encoder_attn=encoder_layer.cross_attention, + encoder_bda=encoder_layer.cross_attn_bda, + encoder_norm=encoder_layer.pre_mlp_layernorm, + ) + + # GPU. + if use_gpu: + [m.cuda() for m in vars(modules).values()] + + return modules + + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + os.environ['NVTE_FLASH_ATTN'] = "0" + os.environ['NVTE_FUSED_ATTN'] = "0" + + model_parallel_cuda_manual_seed(123) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_constructor(self): + + config = self.get_config() + modules = self.get_modules(config, use_transformer_engine=True, use_gpu=False) + + assert isinstance(modules.decoder_attn, RetroDecoderCrossAttention) + assert isinstance(modules.decoder_bda, RetroDecoderBiasDropoutAdd) + assert isinstance(modules.encoder_attn, RetroEncoderCrossAttention) + assert isinstance(modules.encoder_bda, RetroEncoderBiasDropoutAdd) + assert isinstance(modules.encoder_norm, RetroEncoderLayerNorm) + + assert modules.decoder_attn.attn.layer_number == 6 + assert modules.encoder_attn.attn.layer_number == 1 + + get_nparams = lambda m: sum(p.numel() for p in m.parameters()) + assert get_nparams(modules.decoder_attn) == 8768 + assert get_nparams(modules.decoder_bda) == 0 + assert get_nparams(modules.encoder_attn) == 1088 + assert get_nparams(modules.encoder_bda) == 0 + assert get_nparams(modules.encoder_norm) == 32 + + def test_cpu_forward(self): + # we can't currently do this because the global memory buffer is on GPU + pass + + def run_gpu_forward(self, recompute_granularity, use_transformer_engine): + + config = self.get_config() + config.recompute_granularity = recompute_granularity + modules = self.get_modules(config, use_transformer_engine, use_gpu=True) + + seq_length = 32 + micro_batch_size = 2 + n_chunks_per_sample = seq_length // config.retro_chunk_length + + # Init tensors. + hidden_states = torch.ones((seq_length, micro_batch_size, config.hidden_size)).cuda() + attention_mask = None + decoder_context = torch.ones( + ( + config.retro_retrieved_length, + config.retro_num_neighbors * micro_batch_size * n_chunks_per_sample, + config.hidden_size, + ) + ).cuda() + encoder_context = torch.ones( + (config.retro_chunk_length, micro_batch_size * n_chunks_per_sample, config.hidden_size) + ).cuda() + + # Forward decoder. + decoder_attn_output = modules.decoder_attn(hidden_states, attention_mask, decoder_context) + with torch.enable_grad(): + decoder_bda_output = modules.decoder_bda(True, True)( + decoder_attn_output, hidden_states, config.hidden_dropout + ) + + # Forward encoder. + encoder_attn_output_tuples = modules.encoder_attn(decoder_context, None, encoder_context) + with torch.enable_grad(): + encoder_bda_output = modules.encoder_bda(True, True)( + encoder_attn_output_tuples, decoder_context, config.retro_encoder_hidden_dropout + ) + encoder_norm_output = modules.encoder_norm(encoder_bda_output) + + # Verify decoder. + assert set(decoder_attn_output.keys()) == set( + ["ns", "bs", "d", "l", "pad", "attention_output", "attention_bias", "context"] + ) + assert decoder_attn_output["ns"] == seq_length + assert decoder_attn_output["bs"] == micro_batch_size + assert decoder_attn_output["d"] == config.hidden_size + assert decoder_attn_output["l"] == n_chunks_per_sample + assert decoder_attn_output["pad"] == 3 + assert tuple(decoder_attn_output["attention_output"].shape) == ( + config.retro_chunk_length, + micro_batch_size * n_chunks_per_sample, + config.hidden_size, + ) + assert tuple(decoder_attn_output["attention_bias"].shape) == (config.hidden_size,) + assert decoder_attn_output["context"].shape == ( + config.retro_retrieved_length * config.retro_num_neighbors, + micro_batch_size * n_chunks_per_sample, + config.hidden_size, + ) + assert decoder_bda_output.shape == hidden_states.shape + + # Verify encoder. + assert len(encoder_attn_output_tuples) == config.retro_num_neighbors + for output, bias, residual in encoder_attn_output_tuples: + assert tuple(output.shape) == ( + config.retro_retrieved_length, + micro_batch_size * n_chunks_per_sample, + config.hidden_size, + ) + assert tuple(bias.shape) == (config.hidden_size,) + assert tuple(residual.shape) == ( + config.retro_retrieved_length, + micro_batch_size * n_chunks_per_sample, + config.hidden_size, + ) + assert encoder_bda_output.shape == ( + config.retro_retrieved_length, + config.retro_num_neighbors * micro_batch_size * n_chunks_per_sample, + config.hidden_size, + ) + assert encoder_norm_output.shape == ( + config.retro_retrieved_length, + config.retro_num_neighbors * micro_batch_size * n_chunks_per_sample, + config.hidden_size, + ) + + @pytest.mark.flaky_in_dev + def test_gpu_forward(self): + for recompute_granularity in (None, 'selective'): + for use_transformer_engine in (True, False): + self.run_gpu_forward(recompute_granularity, use_transformer_engine) diff --git a/tests/unit_tests/transformer/test_spec_customization.py b/tests/unit_tests/transformer/test_spec_customization.py index a9a245b..c0f83ca 100644 --- a/tests/unit_tests/transformer/test_spec_customization.py +++ b/tests/unit_tests/transformer/test_spec_customization.py @@ -1,241 +1,241 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import sys -from dataclasses import dataclass, fields - -import pytest -import torch -import transformer_engine as te - -from megatron.core.extensions.transformer_engine import ( - TEDotProductAttention, - TELayerNormColumnParallelLinear, - TENorm, - TERowParallelLinear, -) -from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_local_spec -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules -from megatron.core.transformer.dot_product_attention import DotProductAttention -from megatron.core.transformer.enums import AttnMaskType -from megatron.core.transformer.identity_op import IdentityFuncOp, IdentityOp -from megatron.core.transformer.spec_utils import ModuleSpec, build_module, import_module -from megatron.core.transformer.transformer_block import TransformerBlock, TransformerBlockSubmodules -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules -from megatron.core.utils import is_te_min_version -from tests.unit_tests.test_utilities import Utils - - -class TestSpecCustomization: - def setup_method(self, method): - Utils.initialize_model_parallel(1, 1) - model_parallel_cuda_manual_seed(123) - self.config = TransformerConfig( - num_layers=2, hidden_size=12, num_attention_heads=4, use_cpu_initialization=True - ) - - # specify Transformer Layer spec with all identity ops - self.transformer_layer_spec = TransformerLayerSubmodules() - - # specify attention spec using already imported class - self.attention_spec = ModuleSpec( - module=SelfAttention, - params={"attn_mask_type": AttnMaskType.causal}, - submodules=SelfAttentionSubmodules( - linear_qkv=TELayerNormColumnParallelLinear, - core_attention=TEDotProductAttention, - linear_proj=TERowParallelLinear, - q_layernorm=IdentityOp, - k_layernorm=IdentityOp, - ), - ) - - # specify layernorm spec with module path to test dynamic importing - self.layernorm_spec = ModuleSpec( - module=("megatron.core.extensions.transformer_engine", "TENorm") - ) - - # specify bias dropout add with module path - self.bda_spec = ModuleSpec( - module=("megatron.core.fusions.fused_bias_dropout", "get_bias_dropout_add") - ) - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_import_module(self): - self_attention_cls = import_module( - module_path=('megatron.core.transformer.attention', 'SelfAttention') - ) - assert id(self_attention_cls) == id(SelfAttention) - - layernorm_cls = import_module(module_path=self.layernorm_spec.module) - assert id(layernorm_cls) == id(TENorm) - - def test_build_module(self): - # Check NoOp TransformerLayer - random_input = 12 - noop_transformer_layer = [ - build_module(getattr(self.transformer_layer_spec, field.name)) - for field in fields(self.transformer_layer_spec) - if field.name != 'sharded_state_dict_keys_map' - ] - - x = random_input - for mod in noop_transformer_layer: - # checking for `IdentityFuncOp` before `IdentityOp` because former - # is derived from the latter and so the second if statement will - # always be `True`. - if isinstance(mod, IdentityFuncOp): - x = mod()(x) - elif isinstance(mod, IdentityOp): - x = mod(x) - - assert x == random_input - - # Check SelfAttention - self_attention = build_module(self.attention_spec, config=self.config, layer_number=1) - assert isinstance(self_attention, SelfAttention) - assert self_attention.layer_number == 1 - assert self_attention.attn_mask_type == self.attention_spec.params['attn_mask_type'] - - num_weights = sum([p.numel() for p in self_attention.parameters()]) - assert num_weights == 648 - - # Check SelfAttention but with already initialized module - # `self_attention`. In this test, `build_module` acts as a no op as it - # simply returns the initialized module. - # NOTE: (sudhakars) Uncomment this test once this feature gets added - # back. - # self_attention2 = build_module( - # self_attention, config=self.config, spec=self.attention_spec, - # ) - # assert isinstance(self_attention2, SelfAttention) - # assert self_attention2.layer_number == 1 - # assert self_attention2.attn_mask_type == self.attention_spec.params['attn_mask_type'] - - # num_weights = sum([p.numel() for p in self_attention2.parameters()]) - # assert num_weights == 648 - - # Check LayerNorm - layernorm = build_module( - self.layernorm_spec, - config=self.config, - hidden_size=self.config.hidden_size, - eps=self.config.layernorm_epsilon, - ) - assert isinstance(layernorm, te.pytorch.LayerNorm) - - # Check BiasDropoutAdd - bda_op = build_module(self.bda_spec) - assert id(bda_op) == id(get_bias_dropout_add) - - def test_sliding_window_attention(self): - if not is_te_min_version("1.2.0"): - print("SWA not tested because TE version is not >= 1.2.0", file=sys.stderr) - return - - config = TransformerConfig( - num_layers=2, - hidden_size=12, - num_attention_heads=4, - use_cpu_initialization=True, - window_size=[10, 0], - ) - # Make sure DotProductAttention throws (swa unsupported). - threw = False - try: - attn = DotProductAttention( - config, layer_number=1, attn_mask_type=AttnMaskType.causal, attention_type='self' - ) - except: - threw = True - finally: - assert threw, 'Expected DotProductAttention to throw exception for SWA' - - # Test TEDotProductAttention - attn = TEDotProductAttention( - config, layer_number=1, attn_mask_type=AttnMaskType.causal, attention_type='self' - ) - # Make sure window-size is what we expect. - assert attn.window_size == config.window_size - - # Single integer window-size unsupported, make sure it throws - threw = False - try: - config.window_size = 11 - attn = TEDotProductAttention( - config, layer_number=1, attn_mask_type=AttnMaskType.causal, attention_type='self' - ) - except: - threw = True - finally: - assert threw, "Expected TEDotProductAttention to throw for integer window-size" - - # `None` makes this causal. - config.window_size = None - attn = TEDotProductAttention( - config, layer_number=1, attn_mask_type=AttnMaskType.causal, attention_type='self' - ) - # Make sure it's causal. - assert attn.window_size == (-1, 0) - - def test_transformer_block_custom(self): - """ - This test checks that the two ways of passing `layer_spec` to a - `TransformerBlock` result in an identical model: - 1. ModuleSpec(module=..., submodules=...) - 2. TransformerBlockSubmodules(layer_specs=...) - """ - - transformer_config = TransformerConfig( - num_layers=2, hidden_size=12, num_attention_heads=4, use_cpu_initialization=True - ) - layer_local_spec = get_gpt_layer_local_spec() - - # The following way can be used to pass a different `TransformerLayer` - # and internally the `TransformerBlock` would fan out the single - # `ModuleSpec` layer spec provided to all the layers of the block. - layer_spec1 = ModuleSpec(module=TransformerLayer, submodules=layer_local_spec.submodules) - model_parallel_cuda_manual_seed(123) - torch.manual_seed(0) - parallel_transformer_block1 = TransformerBlock(transformer_config, layer_spec1) - - layer_spec2 = TransformerBlockSubmodules( - layer_specs=[ - ModuleSpec(module=TransformerLayer, submodules=layer_local_spec.submodules) - ] - * transformer_config.num_layers, - layer_norm=TENorm, - ) - # make sure the model init conditions are identical - model_parallel_cuda_manual_seed(123) - torch.manual_seed(0) - parallel_transformer_block2 = TransformerBlock(transformer_config, layer_spec2) - - sequence_length = 32 - micro_batch_size = 2 - parallel_transformer_block1.cuda() - parallel_transformer_block2.cuda() - - # [sequence length, batch size, hidden size] - hidden_states = torch.ones( - (sequence_length, micro_batch_size, transformer_config.hidden_size) - ) - hidden_states = hidden_states.cuda() - - attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() - - out1 = parallel_transformer_block1( - hidden_states=hidden_states, attention_mask=attention_mask - ) - out2 = parallel_transformer_block2( - hidden_states=hidden_states, attention_mask=attention_mask - ) - - assert torch.all(torch.eq(out1, out2)) - assert out1.shape[0] == sequence_length == out2.shape[0] - assert out1.shape[1] == micro_batch_size == out2.shape[1] - assert out1.shape[2] == transformer_config.hidden_size == out2.shape[2] +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import sys +from dataclasses import dataclass, fields + +import pytest +import torch +import transformer_engine as te + +from megatron.core.extensions.transformer_engine import ( + TEDotProductAttention, + TELayerNormColumnParallelLinear, + TENorm, + TERowParallelLinear, +) +from megatron.core.fusions.fused_bias_dropout import get_bias_dropout_add +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_local_spec +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.attention import SelfAttention, SelfAttentionSubmodules +from megatron.core.transformer.dot_product_attention import DotProductAttention +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.transformer.identity_op import IdentityFuncOp, IdentityOp +from megatron.core.transformer.spec_utils import ModuleSpec, build_module, import_module +from megatron.core.transformer.transformer_block import TransformerBlock, TransformerBlockSubmodules +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules +from megatron.core.utils import is_te_min_version +from tests.unit_tests.test_utilities import Utils + + +class TestSpecCustomization: + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + self.config = TransformerConfig( + num_layers=2, hidden_size=12, num_attention_heads=4, use_cpu_initialization=True + ) + + # specify Transformer Layer spec with all identity ops + self.transformer_layer_spec = TransformerLayerSubmodules() + + # specify attention spec using already imported class + self.attention_spec = ModuleSpec( + module=SelfAttention, + params={"attn_mask_type": AttnMaskType.causal}, + submodules=SelfAttentionSubmodules( + linear_qkv=TELayerNormColumnParallelLinear, + core_attention=TEDotProductAttention, + linear_proj=TERowParallelLinear, + q_layernorm=IdentityOp, + k_layernorm=IdentityOp, + ), + ) + + # specify layernorm spec with module path to test dynamic importing + self.layernorm_spec = ModuleSpec( + module=("megatron.core.extensions.transformer_engine", "TENorm") + ) + + # specify bias dropout add with module path + self.bda_spec = ModuleSpec( + module=("megatron.core.fusions.fused_bias_dropout", "get_bias_dropout_add") + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_import_module(self): + self_attention_cls = import_module( + module_path=('megatron.core.transformer.attention', 'SelfAttention') + ) + assert id(self_attention_cls) == id(SelfAttention) + + layernorm_cls = import_module(module_path=self.layernorm_spec.module) + assert id(layernorm_cls) == id(TENorm) + + def test_build_module(self): + # Check NoOp TransformerLayer + random_input = 12 + noop_transformer_layer = [ + build_module(getattr(self.transformer_layer_spec, field.name)) + for field in fields(self.transformer_layer_spec) + if field.name != 'sharded_state_dict_keys_map' + ] + + x = random_input + for mod in noop_transformer_layer: + # checking for `IdentityFuncOp` before `IdentityOp` because former + # is derived from the latter and so the second if statement will + # always be `True`. + if isinstance(mod, IdentityFuncOp): + x = mod()(x) + elif isinstance(mod, IdentityOp): + x = mod(x) + + assert x == random_input + + # Check SelfAttention + self_attention = build_module(self.attention_spec, config=self.config, layer_number=1) + assert isinstance(self_attention, SelfAttention) + assert self_attention.layer_number == 1 + assert self_attention.attn_mask_type == self.attention_spec.params['attn_mask_type'] + + num_weights = sum([p.numel() for p in self_attention.parameters()]) + assert num_weights == 648 + + # Check SelfAttention but with already initialized module + # `self_attention`. In this test, `build_module` acts as a no op as it + # simply returns the initialized module. + # NOTE: (sudhakars) Uncomment this test once this feature gets added + # back. + # self_attention2 = build_module( + # self_attention, config=self.config, spec=self.attention_spec, + # ) + # assert isinstance(self_attention2, SelfAttention) + # assert self_attention2.layer_number == 1 + # assert self_attention2.attn_mask_type == self.attention_spec.params['attn_mask_type'] + + # num_weights = sum([p.numel() for p in self_attention2.parameters()]) + # assert num_weights == 648 + + # Check LayerNorm + layernorm = build_module( + self.layernorm_spec, + config=self.config, + hidden_size=self.config.hidden_size, + eps=self.config.layernorm_epsilon, + ) + assert isinstance(layernorm, te.pytorch.LayerNorm) + + # Check BiasDropoutAdd + bda_op = build_module(self.bda_spec) + assert id(bda_op) == id(get_bias_dropout_add) + + def test_sliding_window_attention(self): + if not is_te_min_version("1.2.0"): + print("SWA not tested because TE version is not >= 1.2.0", file=sys.stderr) + return + + config = TransformerConfig( + num_layers=2, + hidden_size=12, + num_attention_heads=4, + use_cpu_initialization=True, + window_size=[10, 0], + ) + # Make sure DotProductAttention throws (swa unsupported). + threw = False + try: + attn = DotProductAttention( + config, layer_number=1, attn_mask_type=AttnMaskType.causal, attention_type='self' + ) + except: + threw = True + finally: + assert threw, 'Expected DotProductAttention to throw exception for SWA' + + # Test TEDotProductAttention + attn = TEDotProductAttention( + config, layer_number=1, attn_mask_type=AttnMaskType.causal, attention_type='self' + ) + # Make sure window-size is what we expect. + assert attn.window_size == config.window_size + + # Single integer window-size unsupported, make sure it throws + threw = False + try: + config.window_size = 11 + attn = TEDotProductAttention( + config, layer_number=1, attn_mask_type=AttnMaskType.causal, attention_type='self' + ) + except: + threw = True + finally: + assert threw, "Expected TEDotProductAttention to throw for integer window-size" + + # `None` makes this causal. + config.window_size = None + attn = TEDotProductAttention( + config, layer_number=1, attn_mask_type=AttnMaskType.causal, attention_type='self' + ) + # Make sure it's causal. + assert attn.window_size == (-1, 0) + + def test_transformer_block_custom(self): + """ + This test checks that the two ways of passing `layer_spec` to a + `TransformerBlock` result in an identical model: + 1. ModuleSpec(module=..., submodules=...) + 2. TransformerBlockSubmodules(layer_specs=...) + """ + + transformer_config = TransformerConfig( + num_layers=2, hidden_size=12, num_attention_heads=4, use_cpu_initialization=True + ) + layer_local_spec = get_gpt_layer_local_spec() + + # The following way can be used to pass a different `TransformerLayer` + # and internally the `TransformerBlock` would fan out the single + # `ModuleSpec` layer spec provided to all the layers of the block. + layer_spec1 = ModuleSpec(module=TransformerLayer, submodules=layer_local_spec.submodules) + model_parallel_cuda_manual_seed(123) + torch.manual_seed(0) + parallel_transformer_block1 = TransformerBlock(transformer_config, layer_spec1) + + layer_spec2 = TransformerBlockSubmodules( + layer_specs=[ + ModuleSpec(module=TransformerLayer, submodules=layer_local_spec.submodules) + ] + * transformer_config.num_layers, + layer_norm=TENorm, + ) + # make sure the model init conditions are identical + model_parallel_cuda_manual_seed(123) + torch.manual_seed(0) + parallel_transformer_block2 = TransformerBlock(transformer_config, layer_spec2) + + sequence_length = 32 + micro_batch_size = 2 + parallel_transformer_block1.cuda() + parallel_transformer_block2.cuda() + + # [sequence length, batch size, hidden size] + hidden_states = torch.ones( + (sequence_length, micro_batch_size, transformer_config.hidden_size) + ) + hidden_states = hidden_states.cuda() + + attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() + + out1 = parallel_transformer_block1( + hidden_states=hidden_states, attention_mask=attention_mask + ) + out2 = parallel_transformer_block2( + hidden_states=hidden_states, attention_mask=attention_mask + ) + + assert torch.all(torch.eq(out1, out2)) + assert out1.shape[0] == sequence_length == out2.shape[0] + assert out1.shape[1] == micro_batch_size == out2.shape[1] + assert out1.shape[2] == transformer_config.hidden_size == out2.shape[2] diff --git a/tests/unit_tests/transformer/test_transformer_block.py b/tests/unit_tests/transformer/test_transformer_block.py index 02702a9..e93aeb6 100644 --- a/tests/unit_tests/transformer/test_transformer_block.py +++ b/tests/unit_tests/transformer/test_transformer_block.py @@ -1,136 +1,231 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import os - -import pytest -import torch - -from megatron.core import dist_checkpointing -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec -from megatron.core.packed_seq_params import PackedSeqParams -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.transformer_block import TransformerBlock -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.transformer.transformer_layer import TransformerLayer -from tests.unit_tests.test_utilities import Utils - - -class TestParallelTransformerBlock: - - def setup_method(self, method): - Utils.initialize_model_parallel(1, 1) - model_parallel_cuda_manual_seed(123) - self.transformer_config = TransformerConfig( - num_layers=2, hidden_size=64, num_attention_heads=4, use_cpu_initialization=True - ) - self.parallel_transformer_block = TransformerBlock( - self.transformer_config, get_gpt_layer_with_transformer_engine_spec() - ) - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_constructor(self): - parallel_transformer_block = self.parallel_transformer_block - assert isinstance(parallel_transformer_block, TransformerBlock) - num_weights = sum([p.numel() for p in parallel_transformer_block.parameters()]) - assert num_weights == 100096 - assert parallel_transformer_block.num_layers_per_pipeline_rank == 2 - assert len(parallel_transformer_block.layers) == 2 - layer_0: TransformerLayer = parallel_transformer_block._get_layer(0) - assert layer_0.layer_number == 1 - layer_1: TransformerLayer = parallel_transformer_block._get_layer(1) - assert layer_1.layer_number == 2 - - def test_gpu_forward(self): - parallel_transformer_block = self.parallel_transformer_block - config: TransformerConfig = parallel_transformer_block.config - - sequence_length = 32 - micro_batch_size = 2 - parallel_transformer_block.cuda() - - # [sequence length, batch size, hidden size] - hidden_states = torch.ones((sequence_length, micro_batch_size, config.hidden_size)) - hidden_states = hidden_states.cuda() - - attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() - - hidden_states = parallel_transformer_block( - hidden_states=hidden_states, attention_mask=attention_mask - ) - assert hidden_states.shape[0] == sequence_length - assert hidden_states.shape[1] == micro_batch_size - assert hidden_states.shape[2] == config.hidden_size - - def test_gpu_forward_full_checkpoint(self): - self._run_full_checkpoint_test(fp8=None) - - def test_gpu_forward_full_checkpoint_fp8(self): - self._run_full_checkpoint_test(fp8="e4m3") - - def test_gpu_forward_selective_checkpoint(self): - self._run_selective_checkpoint_test(fp8=None) - - def test_gpu_forward_selective_checkpoint_fp8(self): - self._run_selective_checkpoint_test(fp8="e4m3") - - def _run_full_checkpoint_test(self, fp8): - transformer_config = self.transformer_config - config = transformer_config - config.recompute_granularity = 'full' - config.recompute_method = 'block' - config.fp8 = fp8 - config.recompute_num_layers = config.num_layers - full_transformer_block = TransformerBlock( - config, get_gpt_layer_with_transformer_engine_spec() - ) - assert full_transformer_block.config.recompute_granularity == 'full' - assert full_transformer_block.config.recompute_method == 'block' - assert full_transformer_block.config.fp8 == fp8 - - sequence_length = 32 - micro_batch_size = 2 - full_transformer_block.cuda() - - # [sequence length, batch size, hidden size] - hidden_states = torch.ones((sequence_length, micro_batch_size, config.hidden_size)) - hidden_states = hidden_states.cuda() - - attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() - - hidden_states = full_transformer_block( - hidden_states=hidden_states, attention_mask=attention_mask - ) - assert hidden_states.shape[0] == sequence_length - assert hidden_states.shape[1] == micro_batch_size - assert hidden_states.shape[2] == config.hidden_size - - def _run_selective_checkpoint_test(self, fp8): - transformer_config = self.transformer_config - config = transformer_config - config.recompute_granularity = 'selective' - config.fp8 = fp8 - selective_transformer_block = TransformerBlock( - config, get_gpt_layer_with_transformer_engine_spec() - ) - assert selective_transformer_block.config.recompute_granularity == 'selective' - assert selective_transformer_block.checkpoint_core_attention - assert selective_transformer_block.config.fp8 == fp8 - - sequence_length = 32 - micro_batch_size = 2 - selective_transformer_block.cuda() - - # [sequence length, batch size, hidden size] - hidden_states = torch.ones((sequence_length, micro_batch_size, config.hidden_size)) - hidden_states = hidden_states.cuda() - - attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() - - hidden_states = selective_transformer_block( - hidden_states=hidden_states, attention_mask=attention_mask - ) - assert hidden_states.shape[0] == sequence_length - assert hidden_states.shape[1] == micro_batch_size - assert hidden_states.shape[2] == config.hidden_size +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +from contextlib import nullcontext + +import pytest +import torch + +from megatron.core import parallel_state +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.transformer_block import TransformerBlock, get_num_layers_to_build +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.transformer.transformer_layer import TransformerLayer +from tests.unit_tests.test_utilities import Utils + + +class TestParallelTransformerBlock: + + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + self.transformer_config = TransformerConfig( + num_layers=2, hidden_size=64, num_attention_heads=4, use_cpu_initialization=True + ) + self.parallel_transformer_block = TransformerBlock( + self.transformer_config, get_gpt_layer_with_transformer_engine_spec() + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_constructor(self): + parallel_transformer_block = self.parallel_transformer_block + assert isinstance(parallel_transformer_block, TransformerBlock) + num_weights = sum([p.numel() for p in parallel_transformer_block.parameters()]) + assert num_weights == 100096 + assert parallel_transformer_block.num_layers_per_pipeline_rank == 2 + assert len(parallel_transformer_block.layers) == 2 + layer_0: TransformerLayer = parallel_transformer_block._get_layer(0) + assert layer_0.layer_number == 1 + layer_1: TransformerLayer = parallel_transformer_block._get_layer(1) + assert layer_1.layer_number == 2 + + def test_gpu_forward(self): + parallel_transformer_block = self.parallel_transformer_block + config: TransformerConfig = parallel_transformer_block.config + + sequence_length = 32 + micro_batch_size = 2 + parallel_transformer_block.cuda() + + # [sequence length, batch size, hidden size] + hidden_states = torch.ones((sequence_length, micro_batch_size, config.hidden_size)) + hidden_states = hidden_states.cuda() + + attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() + + hidden_states = parallel_transformer_block( + hidden_states=hidden_states, attention_mask=attention_mask + ) + assert hidden_states.shape[0] == sequence_length + assert hidden_states.shape[1] == micro_batch_size + assert hidden_states.shape[2] == config.hidden_size + + def test_gpu_forward_full_checkpoint(self): + self._run_full_checkpoint_test(fp8=None) + + def test_gpu_forward_full_checkpoint_fp8(self): + self._run_full_checkpoint_test(fp8="e4m3") + + def test_gpu_forward_selective_checkpoint(self): + self._run_selective_checkpoint_test(fp8=None) + + def test_gpu_forward_selective_checkpoint_fp8(self): + self._run_selective_checkpoint_test(fp8="e4m3") + + def _run_full_checkpoint_test(self, fp8): + transformer_config = self.transformer_config + config = transformer_config + config.recompute_granularity = 'full' + config.recompute_method = 'block' + config.fp8 = fp8 + config.recompute_num_layers = config.num_layers + full_transformer_block = TransformerBlock( + config, get_gpt_layer_with_transformer_engine_spec() + ) + assert full_transformer_block.config.recompute_granularity == 'full' + assert full_transformer_block.config.recompute_method == 'block' + assert full_transformer_block.config.fp8 == fp8 + + sequence_length = 32 + micro_batch_size = 2 + full_transformer_block.cuda() + + # [sequence length, batch size, hidden size] + hidden_states = torch.ones((sequence_length, micro_batch_size, config.hidden_size)) + hidden_states = hidden_states.cuda() + + attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() + + hidden_states = full_transformer_block( + hidden_states=hidden_states, attention_mask=attention_mask + ) + assert hidden_states.shape[0] == sequence_length + assert hidden_states.shape[1] == micro_batch_size + assert hidden_states.shape[2] == config.hidden_size + + def _run_selective_checkpoint_test(self, fp8): + transformer_config = self.transformer_config + config = transformer_config + config.recompute_granularity = 'selective' + config.fp8 = fp8 + selective_transformer_block = TransformerBlock( + config, get_gpt_layer_with_transformer_engine_spec() + ) + assert selective_transformer_block.config.recompute_granularity == 'selective' + assert selective_transformer_block.checkpoint_core_attention + assert selective_transformer_block.config.fp8 == fp8 + + sequence_length = 32 + micro_batch_size = 2 + selective_transformer_block.cuda() + + # [sequence length, batch size, hidden size] + hidden_states = torch.ones((sequence_length, micro_batch_size, config.hidden_size)) + hidden_states = hidden_states.cuda() + + attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() + + hidden_states = selective_transformer_block( + hidden_states=hidden_states, attention_mask=attention_mask + ) + assert hidden_states.shape[0] == sequence_length + assert hidden_states.shape[1] == micro_batch_size + assert hidden_states.shape[2] == config.hidden_size + + +class TestPipelineParallelTransformerBlock: + @pytest.mark.parametrize( + "num_layers, pipeline_model_parallel_size, virtual_pipeline_model_parallel_size, " + "include_embedding_in_pipeline_split, include_loss_in_pipeline_split, " + "first_pipeline_num_layers, last_pipeline_num_layers, should_assert_error", + [ + # Last pipeline stage has specified layers + (60, 5, None, False, False, None, 4, False), + # Uneven PP 6*[8]+[6]+[6]=60 + (60, 8, None, False, False, 6, 6, False), + # Even PP + (64, 4, None, False, False, None, None, False), + # Even VPP + (64, 4, 8, False, False, None, None, False), + # First pipeline stage has specified layers + # Should distribute remaining layers evenly among other stages + (60, 6, None, False, False, 5, None, False), + # Uneven distribution leading to assertion error + (101, 8, None, False, False, 13, 13, True), + # Include embedding in pipeline split without virtual PP + (63, 4, None, True, False, None, None, False), + # Include loss in pipeline split without virtual PP + (63, 4, None, False, True, None, None, False), + # Include embedding and loss in pipeline split without virtual PP + (62, 4, None, True, True, None, None, False), + # Include embedding and loss with virtual PP + (62, 4, 2, True, True, None, None, False), + # num_layers not divisible by pipeline size without embedding/loss + (65, 4, None, False, False, None, None, True), + # num_layers not divisible by pipeline size with embedding/loss + (65, 4, None, True, True, None, None, True), + # Uneven distribution with specified first pipeline layers causing error + (61, 4, None, False, False, 12, None, True), + # Too few layers for the number of pipeline stages + (2, 4, None, False, False, None, None, True), + # Uneven PP with embedding included (should assert per code) + (60, 6, None, True, False, 5, 5, True), + # Virtual PP where num_layers not divisible by total virtual stages + (50, 2, 7, False, False, None, None, True), + # Edge case where num_layers per virtual rank is zero + (4, 4, 4, False, False, None, None, True), + ], + ) + @pytest.mark.flaky + @pytest.mark.flaky_in_dev + def test_layer_builder( + self, + num_layers, + pipeline_model_parallel_size, + virtual_pipeline_model_parallel_size, + include_embedding_in_pipeline_split, + include_loss_in_pipeline_split, + first_pipeline_num_layers, + last_pipeline_num_layers, + should_assert_error, + ): + Utils.fake_initialize_model_parallel( + tensor_model_parallel_size=1, + pipeline_model_parallel_size=pipeline_model_parallel_size, + virtual_pipeline_model_parallel_size=virtual_pipeline_model_parallel_size, + ) + context = ( + pytest.raises((AssertionError, ValueError)) if should_assert_error else nullcontext() + ) + with context: + transformer_config = TransformerConfig( + num_layers=num_layers, + pipeline_model_parallel_size=pipeline_model_parallel_size, + virtual_pipeline_model_parallel_size=virtual_pipeline_model_parallel_size, + include_embedding_in_pipeline_split=include_embedding_in_pipeline_split, + include_loss_in_pipeline_split=include_loss_in_pipeline_split, + first_pipeline_num_layers=first_pipeline_num_layers, + last_pipeline_num_layers=last_pipeline_num_layers, + pipeline_dtype=torch.bfloat16, + hidden_size=128, + num_attention_heads=16, + ) + total_build_layers = 0 + for i in range(pipeline_model_parallel_size): + parallel_state.set_pipeline_model_parallel_rank(i) + if virtual_pipeline_model_parallel_size is not None: + for j in range(virtual_pipeline_model_parallel_size): + parallel_state.set_virtual_pipeline_model_parallel_rank(j) + num_layers_to_build = get_num_layers_to_build(transformer_config) + total_build_layers += num_layers_to_build + else: + num_layers_to_build = get_num_layers_to_build(transformer_config) + total_build_layers += num_layers_to_build + if not should_assert_error: + assert ( + total_build_layers == num_layers + ), f"total build layers {total_build_layers} should be equal to num_layers {num_layers}" + parallel_state.set_pipeline_model_parallel_world_size(None) + parallel_state.set_virtual_pipeline_model_parallel_world_size(None) diff --git a/tests/unit_tests/transformer/test_transformer_layer.py b/tests/unit_tests/transformer/test_transformer_layer.py index ad8d3ea..886cac2 100644 --- a/tests/unit_tests/transformer/test_transformer_layer.py +++ b/tests/unit_tests/transformer/test_transformer_layer.py @@ -1,115 +1,122 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - - -import pytest -import torch - -from megatron.core import parallel_state -from megatron.core.dist_checkpointing.mapping import ShardedObject, ShardedTensor -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec -from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed -from megatron.core.transformer.transformer_config import TransformerConfig -from megatron.core.transformer.transformer_layer import TransformerLayer -from tests.unit_tests.test_utilities import Utils - - -class TestParallelTransformerLayer: - - def setup_method(self, method): - Utils.initialize_model_parallel(1, 1) - model_parallel_cuda_manual_seed(123) - transformer_config = TransformerConfig( - num_layers=2, hidden_size=12, num_attention_heads=4, use_cpu_initialization=True - ) - self.parallel_transformer_layer = TransformerLayer( - transformer_config, get_gpt_layer_with_transformer_engine_spec().submodules - ) - - def teardown_method(self, method): - Utils.destroy_model_parallel() - - def test_constructor(self): - parallel_transformer_layer = self.parallel_transformer_layer - assert isinstance(parallel_transformer_layer, TransformerLayer) - assert parallel_transformer_layer.layer_number == 1 - - num_weights = sum([p.numel() for p in parallel_transformer_layer.parameters()]) - assert num_weights == 1884 - - def test_gpu_forward(self): - parallel_transformer_layer = self.parallel_transformer_layer - config: TransformerConfig = parallel_transformer_layer.config - sequence_length = 32 - micro_batch_size = 2 - parallel_transformer_layer.cuda() - - # [sequence length, batch size, hidden size] - hidden_states = torch.ones((sequence_length, micro_batch_size, config.hidden_size)) - hidden_states = hidden_states.cuda() - - attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() - - hidden_states, context = parallel_transformer_layer( - hidden_states=hidden_states, attention_mask=attention_mask - ) - assert hidden_states.shape[0] == sequence_length - assert hidden_states.shape[1] == micro_batch_size - assert hidden_states.shape[2] == config.hidden_size - - @pytest.mark.parametrize('order', ['tp-pp-dp', 'tp-dp-pp']) - @pytest.mark.parametrize('tp_pp', [(4, 2), (1, 1), (8, 1), (2, 2)]) - def test_sharded_state_dict(self, tp_pp, order): - Utils.destroy_model_parallel() - Utils.initialize_model_parallel(*tp_pp, order=order) - - model_parallel_cuda_manual_seed(123) - transformer_config = TransformerConfig( - num_layers=2, hidden_size=128, num_attention_heads=8, use_cpu_initialization=True - ) - parallel_transformer_layer = TransformerLayer( - transformer_config, get_gpt_layer_with_transformer_engine_spec().submodules - ) - - sharded_state_dict = parallel_transformer_layer.sharded_state_dict() - - extra_states = {k: v for k, v in sharded_state_dict.items() if k.endswith('extra_state')} - sharded_tensors = { - k: v for k, v in sharded_state_dict.items() if not k.endswith('extra_state') - } - assert all(isinstance(t, ShardedObject) for t in extra_states.values()) - assert all(isinstance(t, ShardedTensor) for t in sharded_tensors.values()) - - # Test all local shapes - tensor_local_shapes = {k: v.local_shape for k, v in sharded_tensors.items()} - tp_size = parallel_state.get_tensor_model_parallel_world_size() - assert tensor_local_shapes == get_tensor_shapes_for_tp(transformer_config, tp_size) - - # Test all global shapes. Prepend num layers in front of expected shapes - tensor_global_shapes = {k: v.global_shape for k, v in sharded_tensors.items()} - expected_global_shapes = get_tensor_shapes_for_tp(transformer_config, 1) - assert tensor_global_shapes == expected_global_shapes - - # Test ShardedTensor keys - for state_dict_key, sh_ten in sharded_tensors.items(): - assert state_dict_key == sh_ten.key - - Utils.destroy_model_parallel() - Utils.initialize_model_parallel(1, 1) - - -def get_tensor_shapes_for_tp(transformer_config, tp_size): - hs = transformer_config.hidden_size - return { - 'mlp.linear_fc1.layer_norm_weight': (hs,), - 'mlp.linear_fc1.layer_norm_bias': (hs,), - 'mlp.linear_fc1.weight': (hs * 4 // tp_size, hs), - 'mlp.linear_fc1.bias': (hs * 4 // tp_size,), - 'mlp.linear_fc2.weight': (hs, hs * 4 // tp_size), - 'mlp.linear_fc2.bias': (hs,), - 'self_attention.linear_proj.weight': (hs, hs // tp_size), - 'self_attention.linear_proj.bias': (hs,), - 'self_attention.linear_qkv.layer_norm_weight': (hs,), - 'self_attention.linear_qkv.layer_norm_bias': (hs,), - 'self_attention.linear_qkv.weight': (hs * 3 // tp_size, hs), - 'self_attention.linear_qkv.bias': (hs * 3 // tp_size,), - } +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + + +import pytest +import torch + +from megatron.core import parallel_state +from megatron.core.dist_checkpointing.mapping import ShardedObject, ShardedTensor +from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.transformer.transformer_layer import ( + TransformerLayer, + get_transformer_layer_offset, +) +from tests.unit_tests.test_utilities import Utils + + +class TestParallelTransformerLayer: + + def setup_method(self, method): + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + transformer_config = TransformerConfig( + num_layers=2, hidden_size=12, num_attention_heads=4, use_cpu_initialization=True + ) + self.parallel_transformer_layer = TransformerLayer( + transformer_config, get_gpt_layer_with_transformer_engine_spec().submodules + ) + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + def test_constructor(self): + parallel_transformer_layer = self.parallel_transformer_layer + assert isinstance(parallel_transformer_layer, TransformerLayer) + assert parallel_transformer_layer.layer_number == 1 + + num_weights = sum([p.numel() for p in parallel_transformer_layer.parameters()]) + assert num_weights == 1884 + + def test_gpu_forward(self): + parallel_transformer_layer = self.parallel_transformer_layer + config: TransformerConfig = parallel_transformer_layer.config + sequence_length = 32 + micro_batch_size = 2 + parallel_transformer_layer.cuda() + + # [sequence length, batch size, hidden size] + hidden_states = torch.ones((sequence_length, micro_batch_size, config.hidden_size)) + hidden_states = hidden_states.cuda() + + attention_mask = torch.ones((1, 1, sequence_length, sequence_length), dtype=bool).cuda() + + hidden_states, context = parallel_transformer_layer( + hidden_states=hidden_states, attention_mask=attention_mask + ) + assert hidden_states.shape[0] == sequence_length + assert hidden_states.shape[1] == micro_batch_size + assert hidden_states.shape[2] == config.hidden_size + + def test_get_layer_offset(self): + config = self.parallel_transformer_layer.config + assert get_transformer_layer_offset(config) == 0 + + @pytest.mark.parametrize('order', ['tp-pp-dp', 'tp-dp-pp']) + @pytest.mark.parametrize('tp_pp', [(4, 2), (1, 1), (8, 1), (2, 2)]) + def test_sharded_state_dict(self, tp_pp, order): + Utils.destroy_model_parallel() + Utils.initialize_model_parallel(*tp_pp, order=order) + + model_parallel_cuda_manual_seed(123) + transformer_config = TransformerConfig( + num_layers=2, hidden_size=128, num_attention_heads=8, use_cpu_initialization=True + ) + parallel_transformer_layer = TransformerLayer( + transformer_config, get_gpt_layer_with_transformer_engine_spec().submodules + ) + + sharded_state_dict = parallel_transformer_layer.sharded_state_dict() + + extra_states = {k: v for k, v in sharded_state_dict.items() if k.endswith('extra_state')} + sharded_tensors = { + k: v for k, v in sharded_state_dict.items() if not k.endswith('extra_state') + } + assert all(isinstance(t, ShardedObject) for t in extra_states.values()) + assert all(isinstance(t, ShardedTensor) for t in sharded_tensors.values()) + + # Test all local shapes + tensor_local_shapes = {k: v.local_shape for k, v in sharded_tensors.items()} + tp_size = parallel_state.get_tensor_model_parallel_world_size() + assert tensor_local_shapes == get_tensor_shapes_for_tp(transformer_config, tp_size) + + # Test all global shapes. Prepend num layers in front of expected shapes + tensor_global_shapes = {k: v.global_shape for k, v in sharded_tensors.items()} + expected_global_shapes = get_tensor_shapes_for_tp(transformer_config, 1) + assert tensor_global_shapes == expected_global_shapes + + # Test ShardedTensor keys + for state_dict_key, sh_ten in sharded_tensors.items(): + assert state_dict_key == sh_ten.key + + Utils.destroy_model_parallel() + Utils.initialize_model_parallel(1, 1) + + +def get_tensor_shapes_for_tp(transformer_config, tp_size): + hs = transformer_config.hidden_size + return { + 'mlp.linear_fc1.layer_norm_weight': (hs,), + 'mlp.linear_fc1.layer_norm_bias': (hs,), + 'mlp.linear_fc1.weight': (hs * 4 // tp_size, hs), + 'mlp.linear_fc1.bias': (hs * 4 // tp_size,), + 'mlp.linear_fc2.weight': (hs, hs * 4 // tp_size), + 'mlp.linear_fc2.bias': (hs,), + 'self_attention.linear_proj.weight': (hs, hs // tp_size), + 'self_attention.linear_proj.bias': (hs,), + 'self_attention.linear_qkv.layer_norm_weight': (hs,), + 'self_attention.linear_qkv.layer_norm_bias': (hs,), + 'self_attention.linear_qkv.weight': (hs * 3 // tp_size, hs), + 'self_attention.linear_qkv.bias': (hs * 3 // tp_size,), + } diff --git a/tools/autoformat.sh b/tools/autoformat.sh old mode 100644 new mode 100755 index ecec87e..d62f146 --- a/tools/autoformat.sh +++ b/tools/autoformat.sh @@ -1,39 +1,39 @@ -#!/bin/bash -set -euox pipefail - -GIT_VERSION=$(git version | awk '{print $3}') -GIT_MAJOR=$(echo $GIT_VERSION | awk -F. '{print $1}') -GIT_MINOR=$(echo $GIT_VERSION | awk -F. '{print $2}') - -if [[ $GIT_MAJOR -eq 2 && $GIT_MINOR -lt 31 ]]; then - echo "Git version must be at least 2.31.0. Found $GIT_VERSION" - exit 1 -fi - -SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) -CHECK_ONLY=${CHECK_ONLY:-false} -SKIP_DOCS=${SKIP_DOCS:-false} - -BASE_REF=${BASE_REF:-main} -CHANGED_FILES=$(git diff --name-only --diff-filter=d --merge-base origin/${BASE_REF} megatron/core tests/ | grep '\.py$' || true) -ADDITIONAL_ARGS="" -ADDITIONAL_BLACK_ARGS="" -ADDITIONAL_PYLINT_ARGS="" - -if [[ $CHECK_ONLY == true ]]; then - ADDITIONAL_ARGS="--check" - ADDITIONAL_BLACK_ARGS="--diff" -fi - -if [[ $SKIP_DOCS == true ]]; then - ADDITIONAL_PYLINT_ARGS="--disable=C0115,C0116" -fi - -if [[ -n "$CHANGED_FILES" ]]; then - black --skip-magic-trailing-comma $ADDITIONAL_ARGS $ADDITIONAL_BLACK_ARGS --verbose $CHANGED_FILES - isort $ADDITIONAL_ARGS $CHANGED_FILES - pylint $ADDITIONAL_PYLINT_ARGS $CHANGED_FILES - mypy --explicit-package-bases --follow-imports=skip $CHANGED_FILES || true -else - echo Changeset is empty, all good. -fi +#!/bin/bash +set -euox pipefail + +GIT_VERSION=$(git version | awk '{print $3}') +GIT_MAJOR=$(echo $GIT_VERSION | awk -F. '{print $1}') +GIT_MINOR=$(echo $GIT_VERSION | awk -F. '{print $2}') + +if [[ $GIT_MAJOR -eq 2 && $GIT_MINOR -lt 31 ]]; then + echo "Git version must be at least 2.31.0. Found $GIT_VERSION" + exit 1 +fi + +SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) +CHECK_ONLY=${CHECK_ONLY:-false} +SKIP_DOCS=${SKIP_DOCS:-false} + +BASE_REF=${BASE_REF:-main} +CHANGED_FILES=$(git diff --name-only --diff-filter=d --merge-base origin/${BASE_REF} megatron/core tests/ | grep '\.py$' || true) +ADDITIONAL_ARGS="" +ADDITIONAL_BLACK_ARGS="" +ADDITIONAL_PYLINT_ARGS="" + +if [[ $CHECK_ONLY == true ]]; then + ADDITIONAL_ARGS="--check" + ADDITIONAL_BLACK_ARGS="--diff" +fi + +if [[ $SKIP_DOCS == true ]]; then + ADDITIONAL_PYLINT_ARGS="--disable=C0115,C0116" +fi + +if [[ -n "$CHANGED_FILES" ]]; then + black --skip-magic-trailing-comma $ADDITIONAL_ARGS $ADDITIONAL_BLACK_ARGS --verbose $CHANGED_FILES + isort $ADDITIONAL_ARGS $CHANGED_FILES + pylint $ADDITIONAL_PYLINT_ARGS $CHANGED_FILES + mypy --explicit-package-bases --follow-imports=skip $CHANGED_FILES || true +else + echo Changeset is empty, all good. +fi diff --git a/tools/checkpoint/convert.py b/tools/checkpoint/convert.py index 3fffa2f..74b4b09 100644 --- a/tools/checkpoint/convert.py +++ b/tools/checkpoint/convert.py @@ -1,157 +1,170 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import argparse -import importlib -import torch.multiprocessing as mp -import sys - -# A loader is a python file with at least two functions -# - add_arguments - takes in a parser and adds any arguments needed -# - load_checkpoint - takes in the queue and parsed arguments - -# A saver is similar but has save_checkpoint instead of -# load_checkpoint - -# The loader and saver process are each given a queue, the loader -# should load the checkpoint and send the weights in messages in the -# following order, the saver should receive them in this order and -# save the checkpoints. A message consists of a python dictionary with -# a "name" for error checking and an entry for each tensor as -# indicated below. Note that the weight sent over the queue are the -# full model weights, nothing split. - -# If the loader ever sends "exit" to the queue, that means something -# went wrong and it is exiting. - -# - Metadata Namespace with the following attributes: -# model_type - GPT, BERT, T5, etc. (Part of protocol to allow this to be deduced later instead of given on command line) -# num_layers - Number of transformer layers -# hidden_size -# seq_length -# num_attention_heads -# max_position_embeddings -# tokenizer_type -# iteration -# params_dtype -# bert_binary_head - Used only if model_type is BERT -# previous_tensor_parallel_size - Optional -# previous_pipeline_parallel_size - Optional -# true_vocab_size -# make_vocab_size_divisble_by -# consumed_train_samples -# consumed_valid_samples -# messages -# { -# "name": "embeddings" -# "position embeddings" -# "word embeddings" -# } -# (for each transformer layer): -# { -# "name": "transformer layer N" -# "input norm weight" -# "input norm bias" -# "qkv weight" -# "qkv bias" -# "dense weight" -# "dense bias" -# "post norm weight" -# "post norm bias" -# "mlp l0 weight" -# "mlp l0 bias" -# "mlp l1 weight" -# "mlp l1 bias" -# } -# { -# "name": "final layer norm" -# "weight" -# "bias" -# } -# if present (i.e. for BERT): -# { -# "name": "pooler" -# "weight" -# "bias" -# } -# { -# "name": "lm head" -# "dense weight" -# "dense bias" -# "norm weight" -# "norm bias" -# } -# { -# "name": "binary head" -# "weight" -# "bias" -# } -# - "done" - -def load_plugin(plugin_type, name): - module_name = f"{plugin_type}_{name}" - try: - plugin = importlib.import_module(module_name) - except ModuleNotFoundError as e: - print(e) - module_name = name - try: - plugin = importlib.import_module(module_name) - except ModuleNotFoundError as e: - print(e) - sys.exit(f"Unable to load {plugin_type} plugin {name}. Exiting.") - - if not hasattr(plugin, 'add_arguments'): - sys.exit(f"{module_name} module is not a plugin. Exiting.") - - print(f"Loaded {module_name} as the {plugin_type}.") - return plugin - -def main(): - import argparse - parser = argparse.ArgumentParser(description="Megatron Checkpoint Converter Arguments", - allow_abbrev=False, conflict_handler='resolve') - - parser.add_argument('--model-type', type=str, required=True, - choices=['GPT', 'BERT'], - help='Type of the model') - parser.add_argument('--loader', type=str, default='megatron', - help='Module name to load checkpoint, should be on python path') - parser.add_argument('--saver', type=str, default='megatron', - help='Module name to save checkpoint, should be on python path') - parser.add_argument('--load-dir', type=str, required=True, - help='Directory to load model checkpoint from') - parser.add_argument('--save-dir', type=str, required=True, - help='Directory to save model checkpoint to') - parser.add_argument('--max-queue-size', type=int, default=50, - help='Maximum number of tensors in the queue') - parser.add_argument('--no-checking', action='store_false', - help='Do not perform checking on the name and ordering of weights', - dest='checking') - - known_args, _ = parser.parse_known_args() - loader = load_plugin('loader', known_args.loader) - saver = load_plugin('saver', known_args.saver) - - loader.add_arguments(parser) - saver.add_arguments(parser) - - args = parser.parse_args() - - ctx = mp.get_context("spawn") - queue = ctx.Queue(maxsize=args.max_queue_size) - # queue = mp.Queue(maxsize=args.max_queue_size) - - print("Starting saver...") - saver_proc = ctx.Process(target=saver.save_checkpoint, args=(queue, args)) - # saver_proc = mp.Process(target=saver.save_checkpoint, args=(queue, args)) - saver_proc.start() - - print("Starting loader...") - loader.load_checkpoint(queue, args) - - print("Waiting for saver to complete...") - saver_proc.join() - - -if __name__ == '__main__': - main() +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import argparse +import importlib +import torch.multiprocessing as mp +import sys + +# A loader is a python file with at least two functions +# - add_arguments - takes in a parser and adds any arguments needed +# - load_checkpoint - takes in the queue and parsed arguments + +# A saver is similar but has save_checkpoint instead of +# load_checkpoint + +# The loader and saver process are each given a queue, the loader +# should load the checkpoint and send the weights in messages in the +# following order, the saver should receive them in this order and +# save the checkpoints. A message consists of a python dictionary with +# a "name" for error checking and an entry for each tensor as +# indicated below. Note that the weight sent over the queue are the +# full model weights, nothing split. + +# If the loader ever sends "exit" to the queue, that means something +# went wrong and it is exiting. + +# - Metadata Namespace with the following attributes: +# model_type - GPT, BERT, T5, etc. (Part of protocol to allow this to be deduced later instead of given on command line) +# num_layers - Number of transformer layers +# hidden_size +# seq_length +# num_attention_heads +# max_position_embeddings +# tokenizer_type +# iteration +# params_dtype +# bert_binary_head - Used only if model_type is BERT +# previous_tensor_parallel_size - Optional +# previous_pipeline_parallel_size - Optional +# true_vocab_size +# make_vocab_size_divisble_by +# consumed_train_samples +# consumed_valid_samples +# messages +# { +# "name": "embeddings" +# "position embeddings" +# "word embeddings" +# } +# (for each transformer layer): +# { +# "name": "transformer layer N" +# "input norm weight" +# "input norm bias" +# "qkv weight" +# "qkv bias" +# "dense weight" +# "dense bias" +# "post norm weight" +# "post norm bias" +# "mlp l0 weight" +# "mlp l0 bias" +# "mlp l1 weight" +# "mlp l1 bias" +# } +# { +# "name": "final layer norm" +# "weight" +# "bias" +# } +# if present (i.e. for BERT): +# { +# "name": "pooler" +# "weight" +# "bias" +# } +# { +# "name": "lm head" +# "dense weight" +# "dense bias" +# "norm weight" +# "norm bias" +# } +# { +# "name": "binary head" +# "weight" +# "bias" +# } +# - "done" + +def load_plugin(plugin_type, name): + module_name = f"{plugin_type}_{name}" + try: + plugin = importlib.import_module(module_name) + except ModuleNotFoundError as e: + print(e) + module_name = name + try: + plugin = importlib.import_module(module_name) + except ModuleNotFoundError as e: + print(e) + sys.exit(f"Unable to load {plugin_type} plugin {name}. Exiting.") + + if not hasattr(plugin, 'add_arguments'): + sys.exit(f"{module_name} module is not a plugin. Exiting.") + + print(f"Loaded {module_name} as the {plugin_type}.") + return plugin + +def main(): + import argparse + parser = argparse.ArgumentParser(description="Megatron Checkpoint Converter Arguments", + allow_abbrev=False, conflict_handler='resolve') + + parser.add_argument('--model-type', type=str, required=True, + choices=['GPT', 'BERT'], + help='Type of the model') + parser.add_argument('--loader', type=str, default='megatron', + help='Module name to load checkpoint, should be on python path') + parser.add_argument('--saver', type=str, default='megatron', + help='Module name to save checkpoint, should be on python path') + parser.add_argument('--load-dir', type=str, required=True, + help='Directory to load model checkpoint from') + parser.add_argument('--save-dir', type=str, required=True, + help='Directory to save model checkpoint to') + parser.add_argument('--max-queue-size', type=int, default=50, + help='Maximum number of tensors in the queue') + parser.add_argument('--no-checking', action='store_false', + help='Do not perform checking on the name and ordering of weights', + dest='checking') + + known_args, _ = parser.parse_known_args() + + # Handle old arg values. + def update_loader_saver(key): + old_value = getattr(known_args, key) + if old_value == "megatron": + setattr(known_args, key, "legacy") + if old_value == "mcore": + setattr(known_args, key, "core") + update_loader_saver("loader") + update_loader_saver("saver") + + # Load loader/saver plugins. + loader = load_plugin('loader', known_args.loader) + saver = load_plugin('saver', known_args.saver) + + # Parser loader/saver args. + loader.add_arguments(parser) + saver.add_arguments(parser) + args = parser.parse_args() + + # Initialize queue + queue = mp.Queue(maxsize=args.max_queue_size) + + # Start saver process. + print("Starting saver...") + saver_proc = mp.Process(target=saver.save_checkpoint, args=(queue, args)) + saver_proc.start() + + # Run loader. + print("Starting loader...") + loader.load_checkpoint(queue, args) + + # Finish saver process. + print("Waiting for saver to complete...") + saver_proc.join() + + +if __name__ == '__main__': + main() diff --git a/tools/checkpoint/loader_mcore.py b/tools/checkpoint/loader_core.py similarity index 97% rename from tools/checkpoint/loader_mcore.py rename to tools/checkpoint/loader_core.py index 9185969..84e3e4c 100644 --- a/tools/checkpoint/loader_mcore.py +++ b/tools/checkpoint/loader_core.py @@ -1,404 +1,404 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -import json -import os -import sys -import torch -import types - -from schema_mcore import get_model_schema -from utils import print_memory_usage - - -def add_arguments(parser): - group = parser.add_argument_group(title='Megatron loader') - - group.add_argument('--true-vocab-size', type=int, default=None, - help='original size of vocab, if specified will trim padding from embedding table.') - group.add_argument('--vocab-file', type=str, default=None, - help='Path to the vocab file. If specified will use this to get vocab size and ' - 'trim padding from the embedding table.') - group.add_argument('--megatron-path', type=str, default=None, - help='Base directory of Megatron repository') - group.add_argument('--position-embedding-type', - type=str, - default='learned_absolute', - choices=['learned_absolute', 'rope'], - help='Position embedding type.') - group.add_argument('--loader-transformer-impl', default='transformer_engine', - choices=['local', 'transformer_engine'], - help='Which Transformer implementation to use.') - - -def _load_checkpoint(queue, args): - - # Search in directory above this - sys.path.append(os.path.abspath( - os.path.join(os.path.dirname(__file__), - os.path.pardir))) - if args.megatron_path is not None: - sys.path.insert(0, args.megatron_path) - - try: - from megatron.training.arguments import parse_args, validate_args - from megatron.training.global_vars import set_args, set_global_variables - from megatron.training.checkpointing import load_args_from_checkpoint, load_checkpoint - from megatron.legacy.model import module - from megatron.core import mpu - from megatron.core.enums import ModelType - from megatron.legacy import fused_kernels - except ModuleNotFoundError: - print("Unable to import Megatron, please specify the path to Megatron using --megatron-path. Exiting.") - queue.put("exit") - exit(1) - - # We want all arguments to come from us - sys.argv = ['script.py', - '--no-masked-softmax-fusion', - '--no-bias-gelu-fusion', - '--no-bias-dropout-fusion', - '--no-async-tensor-model-parallel-allreduce', - '--use-cpu-initialization', - '--micro-batch-size', '1', - '--no-load-optim', - '--no-load-rng', - '--no-save-optim', - '--no-save-rng', - '--no-initialization', - '--mock-data', # To pass the "blend data checks" in arguments.py - '--load', args.load_dir, - '--position-embedding-type', args.position_embedding_type, - '--exit-on-missing-checkpoint', - '--no-one-logger', - ] - - margs = parse_args() - margs, checkpoint_args = load_args_from_checkpoint(margs) - - # Arguments do sanity checks on the world size, but we don't care, - # so trick it into thinking we are plenty of processes - margs.world_size = margs.tensor_model_parallel_size * margs.pipeline_model_parallel_size - - # Explicitly copy data types from checkpoint. - margs.fp16 = checkpoint_args.fp16 - margs.bf16 = checkpoint_args.bf16 - - # Expert parallelism requires sequence parallelism. - if margs.expert_model_parallel_size > 1: - margs.sequence_parallel = True - - # Validate margs. - margs = validate_args(margs) - - margs.use_legacy_models = False - margs.transformer_impl = args.loader_transformer_impl - - def check_for_arg(arg_name, default=None): - if getattr(margs, arg_name, None) is None: - if default is not None: - setattr(margs, arg_name, default) - else: - print(f"Checkpoint does not specify the argument {arg_name}. Exiting.") - print(f"Arguments: {margs}") - queue.put("exit") - exit(1) - - check_for_arg('tensor_model_parallel_size') - check_for_arg('pipeline_model_parallel_size') - check_for_arg('num_layers') - check_for_arg('hidden_size') - check_for_arg('seq_length') - check_for_arg('num_attention_heads') - check_for_arg('max_position_embeddings') - check_for_arg('position_embedding_type') - check_for_arg('tokenizer_type') - check_for_arg('iteration') - check_for_arg('bert_binary_head') - check_for_arg('disable_bias_linear', False) - check_for_arg('params_dtype') - check_for_arg('swiglu', False) - - # Determine how to make our models - if args.model_type == 'GPT': - from pretrain_gpt import model_provider - margs.model_type = ModelType.encoder_or_decoder - elif args.model_type == 'BERT': - from pretrain_bert import model_provider - margs.model_type = ModelType.encoder_or_decoder - else: - raise Exception(f'unrecognized model type: {args.model_type}') - - # supress warning about torch.distributed not being initialized - module.MegatronModule.embedding_warning_printed = True - - consumed_train_samples = None - consumed_valid_samples = None - def get_models(count, dtype): - nonlocal consumed_train_samples - nonlocal consumed_valid_samples - model_array_len = margs.virtual_pipeline_model_parallel_size - if model_array_len is None: - model_array_len = 1 - models = [[] for _ in range(model_array_len)] - pre_process = mpu.is_pipeline_first_stage() - post_process = mpu.is_pipeline_last_stage() - for rank in range(count): - mpu.set_tensor_model_parallel_rank(rank) - if margs.virtual_pipeline_model_parallel_size is not None: - model_ = [] - for i in range(margs.virtual_pipeline_model_parallel_size): - mpu.set_virtual_pipeline_model_parallel_rank(i) - # Set pre_process and post_process only after virtual rank is set. - pre_process = mpu.is_pipeline_first_stage() - post_process = mpu.is_pipeline_last_stage() - this_model = model_provider( - pre_process=pre_process, - post_process=post_process - ).to(dtype) - model_.append(this_model) - else: - pre_process = mpu.is_pipeline_first_stage() - post_process = mpu.is_pipeline_last_stage() - model_rank = 0 - model_ = [model_provider(pre_process, post_process).to(dtype)] - margs.consumed_train_samples = 0 - margs.consumed_valid_samples = 0 - margs.exit_on_missing_checkpoint = True - load_checkpoint(model_, None, None) - - if consumed_train_samples is not None: - assert(margs.consumed_train_samples == consumed_train_samples) - else: - consumed_train_samples = margs.consumed_train_samples - if consumed_valid_samples is not None: - assert(margs.consumed_valid_samples == consumed_valid_samples) - else: - consumed_valid_samples = margs.consumed_valid_samples - for vp_rank in range(model_array_len): - models[vp_rank].append(model_[vp_rank]) - - # Print memory usage. - print_memory_usage("loader", rank, count) - - return models - - set_global_variables(margs, build_tokenizer=False) - mpu.set_tensor_model_parallel_world_size(margs.tensor_model_parallel_size) - mpu.set_pipeline_model_parallel_world_size(margs.pipeline_model_parallel_size) - mpu.set_virtual_pipeline_model_parallel_world_size(margs.virtual_pipeline_model_parallel_size) - mpu.set_expert_model_parallel_world_size(margs.expert_model_parallel_size) - fused_kernels.load(margs) - - # Get true (non-padded) vocab size - if args.true_vocab_size is not None: - true_vocab_size = args.true_vocab_size - elif args.vocab_file is not None: - vocab = json.load(open(args.vocab_file)) - true_vocab_size = len(vocab) - if args.true_vocab_size is not None and true_vocab_size != args.true_vocab_size: - print("Both --true-vocab-size and --vocab-file specified and the vocab size does not match, aborting.") - queue.put("exit") - exit(1) - else: - true_vocab_size = None - - # short aliases - tp_size = margs.tensor_model_parallel_size - pp_size = margs.pipeline_model_parallel_size - vp_size = margs.virtual_pipeline_model_parallel_size - if vp_size is None: - vp_size = 1 - - # Layernorm has bias; RMSNorm does not. - if hasattr(checkpoint_args, 'normalization'): - norm_has_bias = checkpoint_args.normalization == "LayerNorm" - else: - # older models only supported LayerNorm - norm_has_bias = True - - # Metadata. - md = types.SimpleNamespace() - md.model_type = args.model_type - md.num_layers = margs.num_layers - md.hidden_size = margs.hidden_size - md.seq_length = margs.seq_length - md.num_attention_heads = margs.num_attention_heads - md.max_position_embeddings = margs.max_position_embeddings - md.tokenizer_type = margs.tokenizer_type - md.iteration = margs.iteration - md.params_dtype = margs.params_dtype - md.bert_binary_head = margs.bert_binary_head - md.output_layer = margs.untie_embeddings_and_output_weights - md.position_embedding_type = margs.position_embedding_type - md.linear_bias = margs.add_bias_linear - md.qkv_bias = margs.add_qkv_bias - md.norm_has_bias = norm_has_bias - md.swiglu = margs.swiglu - md.previous_tensor_parallel_size = margs.tensor_model_parallel_size - md.previous_pipeline_parallel_size = margs.pipeline_model_parallel_size - md.true_vocab_size = true_vocab_size - md.make_vocab_size_divisible_by = margs.make_vocab_size_divisible_by - md.checkpoint_args = checkpoint_args - md.use_legacy_models = margs.use_legacy_models - - # Get first pipe stage. - mpu.set_pipeline_model_parallel_rank(0) - all_models = [get_models(tp_size, md.params_dtype)] - models = all_models[0][0] - - md.consumed_train_samples = consumed_train_samples - md.consumed_valid_samples = consumed_valid_samples - queue.put(md) - - def queue_put(name, msg): - print(f"sending {name}") - msg["name"] = name - queue.put(msg) - - # Model schema. - schema = get_model_schema( - md.model_type, - margs.transformer_impl, - margs.num_experts, - margs.expert_model_parallel_size, - ) - - # Send embeddings. - embeddings = [ schema.get("embeddings", model) for model in models ] - message = { - "word embeddings": torch.cat([ e["word"] for e in embeddings ], dim=0) - } - if md.position_embedding_type == 'learned_absolute': - message["position embeddings"] = embeddings[0]["pos"] - else: - assert embeddings[0]["pos"] is None - queue_put("embeddings", message) - - # Send layers. - total_layer_num = 0 - for vp_rank in range(vp_size): - mpu.set_virtual_pipeline_model_parallel_rank(vp_rank) - for pp_rank in range(pp_size): - if pp_rank > 0: - mpu.set_pipeline_model_parallel_rank(pp_rank) - if vp_rank == 0: - all_models.append(get_models(tp_size, md.params_dtype)) - models = all_models[pp_rank][vp_rank] - for layer_num in range(schema.get_num_layers(models[0])): - message = {} - - # Get non-parallel tensors from tp_rank 0 - layer = schema.get_layer(models[0], layer_num) - message["input norm weight"] = layer["self_attn_norm_weight"] - message["post norm weight"] = layer["mlp_norm_weight"] - if norm_has_bias: - message["input norm bias"] = layer["self_attn_norm_bias"] - message["post norm bias"] = layer["mlp_norm_bias"] - if md.linear_bias: - message["dense bias"] = layer["self_attn_proj_bias"] - message["mlp l1 bias"] = layer["mlp_fc2_bias"] - - # Grab all parallel tensors for this layer - qkv_weight = [] - qkv_bias = [] - dense_weight = [] - mlp_l0_weight = [] - mlp_l0_bias = [] - mlp_l1_weight = [] - for tp_rank, model in enumerate(models): - layer = schema.get_layer(model, layer_num) - qkv_weight.append(layer["self_attn_qkv_weight"]) - dense_weight.append(layer["self_attn_proj_weight"]) - mlp_l0_weight.append(layer["mlp_fc1_weight"]) - mlp_l1_weight.append(layer["mlp_fc2_weight"]) - if md.qkv_bias: - qkv_bias.append(layer["self_attn_qkv_bias"]) - if md.linear_bias: - mlp_l0_bias.append(layer["mlp_fc1_bias"]) - - # Handle gated linear units - if md.swiglu: - # concat all the first halves ('W's) and all the second halves ('V's) - for tp_rank in range(tp_size): - mlp_l0_weight[tp_rank] = torch.chunk(mlp_l0_weight[tp_rank], 2, dim=0) - message["mlp l0 weight W"] = torch.cat([w[0] for w in mlp_l0_weight], dim=0) - message["mlp l0 weight V"] = torch.cat([w[1] for w in mlp_l0_weight], dim=0) - else: - message["mlp l0 weight"] = torch.cat(mlp_l0_weight, dim=0) - - # simple concat of the rest - message["qkv weight"] = torch.cat(qkv_weight, dim=0) - message["dense weight"] = torch.cat(dense_weight, dim=1) - message["mlp l1 weight"] = torch.cat(mlp_l1_weight, dim=1) - if md.qkv_bias: - message["qkv bias"] = torch.cat(qkv_bias, dim=0) - if md.linear_bias: - if md.swiglu: - for tp_rank in range(tp_size): - mlp_l0_bias[tp_rank] = torch.chunk(mlp_l0_bias[tp_rank], 2, dim=0) - message["mlp l0 bias W"] = torch.cat([b[0] for b in mlp_l0_bias],dim=0) - message["mlp l0 bias V"] = torch.cat([b[1] for b in mlp_l0_bias],dim=0) - else: - message["mlp l0 bias"] = torch.cat(mlp_l0_bias, dim=0) - - queue_put(f"transformer layer {total_layer_num}", message) - - total_layer_num = total_layer_num + 1 - - # Send final norm from tp_rank 0. - final_norm = schema.get("final_norm", models[0]) - message = { - "weight": final_norm["weight"], - } - if norm_has_bias: - message["bias"] = final_norm["bias"] - queue_put("final norm", message) - - # Send output layer. - if md.output_layer: - output_layer_ranks = [ schema.get("output_layer", m) for m in models ] - message = { - "weight": torch.cat([r["weight"] for r in output_layer_ranks], dim=0), - } - queue_put("output layer", message) - - # Send BERT params. - if md.model_type == 'BERT': - - # Pooler. - pooler = schema.get("pooler", models[0]) - message = { - "weight": pooler["weight"], - "bias": pooler["bias"], - } - queue_put("pooler", message) - - # LM head. - lm_head = schema.get("lm_head", models[0]) - message = { - "dense weight": lm_head["dense_weight"], - "dense bias": lm_head["dense_bias"], - "norm weight": lm_head["norm_weight"], - } - if norm_has_bias: - message["norm bias"] = lm_head["norm_bias"], - queue_put("lm head", message) - - # Binary head. - if md.bert_binary_head: - binary_head = schema.get("binary_head", models[0]) - message = { - "weight": binary_head["weight"], - "bias": binary_head["bias"], - } - queue_put("binary head", message) - - # Done. - queue.put("done") - -def load_checkpoint(queue, args): - try: - _load_checkpoint(queue, args) - except Exception: - queue.put("exit") - raise +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import json +import os +import sys +import torch +import types + +from schema_core import get_model_schema +from utils import print_memory_usage + + +def add_arguments(parser): + group = parser.add_argument_group(title='Megatron loader') + + group.add_argument('--true-vocab-size', type=int, default=None, + help='original size of vocab, if specified will trim padding from embedding table.') + group.add_argument('--vocab-file', type=str, default=None, + help='Path to the vocab file. If specified will use this to get vocab size and ' + 'trim padding from the embedding table.') + group.add_argument('--megatron-path', type=str, default=None, + help='Base directory of Megatron repository') + group.add_argument('--position-embedding-type', + type=str, + default='learned_absolute', + choices=['learned_absolute', 'rope'], + help='Position embedding type.') + group.add_argument('--loader-transformer-impl', default='transformer_engine', + choices=['local', 'transformer_engine'], + help='Which Transformer implementation to use.') + + +def _load_checkpoint(queue, args): + + # Search in directory above this + sys.path.append(os.path.abspath( + os.path.join(os.path.dirname(__file__), + os.path.pardir))) + if args.megatron_path is not None: + sys.path.insert(0, args.megatron_path) + + try: + from megatron.training.arguments import parse_args, validate_args + from megatron.training.global_vars import set_args, set_global_variables + from megatron.training.checkpointing import load_args_from_checkpoint, load_checkpoint + from megatron.legacy.model import module + from megatron.core import mpu + from megatron.core.enums import ModelType + from megatron.legacy import fused_kernels + except ModuleNotFoundError: + print("Unable to import Megatron, please specify the path to Megatron using --megatron-path. Exiting.") + queue.put("exit") + exit(1) + + # We want all arguments to come from us + sys.argv = ['script.py', + '--no-masked-softmax-fusion', + '--no-bias-gelu-fusion', + '--no-bias-dropout-fusion', + '--no-async-tensor-model-parallel-allreduce', + '--use-cpu-initialization', + '--micro-batch-size', '1', + '--no-load-optim', + '--no-load-rng', + '--no-save-optim', + '--no-save-rng', + '--no-initialization', + '--mock-data', # To pass the "blend data checks" in arguments.py + '--load', args.load_dir, + '--position-embedding-type', args.position_embedding_type, + '--exit-on-missing-checkpoint', + '--no-one-logger', + ] + + margs = parse_args() + margs, checkpoint_args = load_args_from_checkpoint(margs) + + # Arguments do sanity checks on the world size, but we don't care, + # so trick it into thinking we are plenty of processes + margs.world_size = margs.tensor_model_parallel_size * margs.pipeline_model_parallel_size + + # Explicitly copy data types from checkpoint. + margs.fp16 = checkpoint_args.fp16 + margs.bf16 = checkpoint_args.bf16 + + # Expert parallelism requires sequence parallelism. + if margs.expert_model_parallel_size > 1: + margs.sequence_parallel = True + + # Validate margs. + margs = validate_args(margs) + + margs.use_legacy_models = False + margs.transformer_impl = args.loader_transformer_impl + + def check_for_arg(arg_name, default=None): + if getattr(margs, arg_name, None) is None: + if default is not None: + setattr(margs, arg_name, default) + else: + print(f"Checkpoint does not specify the argument {arg_name}. Exiting.") + print(f"Arguments: {margs}") + queue.put("exit") + exit(1) + + check_for_arg('tensor_model_parallel_size') + check_for_arg('pipeline_model_parallel_size') + check_for_arg('num_layers') + check_for_arg('hidden_size') + check_for_arg('seq_length') + check_for_arg('num_attention_heads') + check_for_arg('max_position_embeddings') + check_for_arg('position_embedding_type') + check_for_arg('tokenizer_type') + check_for_arg('iteration') + check_for_arg('bert_binary_head') + check_for_arg('disable_bias_linear', False) + check_for_arg('params_dtype') + check_for_arg('swiglu', False) + + # Determine how to make our models + if args.model_type == 'GPT': + from pretrain_gpt import model_provider + margs.model_type = ModelType.encoder_or_decoder + elif args.model_type == 'BERT': + from pretrain_bert import model_provider + margs.model_type = ModelType.encoder_or_decoder + else: + raise Exception(f'unrecognized model type: {args.model_type}') + + # supress warning about torch.distributed not being initialized + module.MegatronModule.embedding_warning_printed = True + + consumed_train_samples = None + consumed_valid_samples = None + def get_models(count, dtype): + nonlocal consumed_train_samples + nonlocal consumed_valid_samples + model_array_len = margs.virtual_pipeline_model_parallel_size + if model_array_len is None: + model_array_len = 1 + models = [[] for _ in range(model_array_len)] + pre_process = mpu.is_pipeline_first_stage() + post_process = mpu.is_pipeline_last_stage() + for rank in range(count): + mpu.set_tensor_model_parallel_rank(rank) + if margs.virtual_pipeline_model_parallel_size is not None: + model_ = [] + for i in range(margs.virtual_pipeline_model_parallel_size): + mpu.set_virtual_pipeline_model_parallel_rank(i) + # Set pre_process and post_process only after virtual rank is set. + pre_process = mpu.is_pipeline_first_stage() + post_process = mpu.is_pipeline_last_stage() + this_model = model_provider( + pre_process=pre_process, + post_process=post_process + ).to(dtype) + model_.append(this_model) + else: + pre_process = mpu.is_pipeline_first_stage() + post_process = mpu.is_pipeline_last_stage() + model_rank = 0 + model_ = [model_provider(pre_process, post_process).to(dtype)] + margs.consumed_train_samples = 0 + margs.consumed_valid_samples = 0 + margs.exit_on_missing_checkpoint = True + load_checkpoint(model_, None, None) + + if consumed_train_samples is not None: + assert(margs.consumed_train_samples == consumed_train_samples) + else: + consumed_train_samples = margs.consumed_train_samples + if consumed_valid_samples is not None: + assert(margs.consumed_valid_samples == consumed_valid_samples) + else: + consumed_valid_samples = margs.consumed_valid_samples + for vp_rank in range(model_array_len): + models[vp_rank].append(model_[vp_rank]) + + # Print memory usage. + print_memory_usage("loader", rank, count) + + return models + + set_global_variables(margs, build_tokenizer=False) + mpu.set_tensor_model_parallel_world_size(margs.tensor_model_parallel_size) + mpu.set_pipeline_model_parallel_world_size(margs.pipeline_model_parallel_size) + mpu.set_virtual_pipeline_model_parallel_world_size(margs.virtual_pipeline_model_parallel_size) + mpu.set_expert_model_parallel_world_size(margs.expert_model_parallel_size) + fused_kernels.load(margs) + + # Get true (non-padded) vocab size + if args.true_vocab_size is not None: + true_vocab_size = args.true_vocab_size + elif args.vocab_file is not None: + vocab = json.load(open(args.vocab_file)) + true_vocab_size = len(vocab) + if args.true_vocab_size is not None and true_vocab_size != args.true_vocab_size: + print("Both --true-vocab-size and --vocab-file specified and the vocab size does not match, aborting.") + queue.put("exit") + exit(1) + else: + true_vocab_size = None + + # short aliases + tp_size = margs.tensor_model_parallel_size + pp_size = margs.pipeline_model_parallel_size + vp_size = margs.virtual_pipeline_model_parallel_size + if vp_size is None: + vp_size = 1 + + # Layernorm has bias; RMSNorm does not. + if hasattr(checkpoint_args, 'normalization'): + norm_has_bias = checkpoint_args.normalization == "LayerNorm" + else: + # older models only supported LayerNorm + norm_has_bias = True + + # Metadata. + md = types.SimpleNamespace() + md.model_type = args.model_type + md.num_layers = margs.num_layers + md.hidden_size = margs.hidden_size + md.seq_length = margs.seq_length + md.num_attention_heads = margs.num_attention_heads + md.max_position_embeddings = margs.max_position_embeddings + md.tokenizer_type = margs.tokenizer_type + md.iteration = margs.iteration + md.params_dtype = margs.params_dtype + md.bert_binary_head = margs.bert_binary_head + md.output_layer = margs.untie_embeddings_and_output_weights + md.position_embedding_type = margs.position_embedding_type + md.linear_bias = margs.add_bias_linear + md.qkv_bias = margs.add_qkv_bias + md.norm_has_bias = norm_has_bias + md.swiglu = margs.swiglu + md.previous_tensor_parallel_size = margs.tensor_model_parallel_size + md.previous_pipeline_parallel_size = margs.pipeline_model_parallel_size + md.true_vocab_size = true_vocab_size + md.make_vocab_size_divisible_by = margs.make_vocab_size_divisible_by + md.checkpoint_args = checkpoint_args + md.use_legacy_models = margs.use_legacy_models + + # Get first pipe stage. + mpu.set_pipeline_model_parallel_rank(0) + all_models = [get_models(tp_size, md.params_dtype)] + models = all_models[0][0] + + md.consumed_train_samples = consumed_train_samples + md.consumed_valid_samples = consumed_valid_samples + queue.put(md) + + def queue_put(name, msg): + print(f"sending {name}") + msg["name"] = name + queue.put(msg) + + # Model schema. + schema = get_model_schema( + md.model_type, + margs.transformer_impl, + margs.num_experts, + margs.expert_model_parallel_size, + ) + + # Send embeddings. + embeddings = [ schema.get("embeddings", model) for model in models ] + message = { + "word embeddings": torch.cat([ e["word"] for e in embeddings ], dim=0) + } + if md.position_embedding_type == 'learned_absolute': + message["position embeddings"] = embeddings[0]["pos"] + else: + assert embeddings[0]["pos"] is None + queue_put("embeddings", message) + + # Send layers. + total_layer_num = 0 + for vp_rank in range(vp_size): + mpu.set_virtual_pipeline_model_parallel_rank(vp_rank) + for pp_rank in range(pp_size): + if pp_rank > 0: + mpu.set_pipeline_model_parallel_rank(pp_rank) + if vp_rank == 0: + all_models.append(get_models(tp_size, md.params_dtype)) + models = all_models[pp_rank][vp_rank] + for layer_num in range(schema.get_num_layers(models[0])): + message = {} + + # Get non-parallel tensors from tp_rank 0 + layer = schema.get_layer(models[0], layer_num) + message["input norm weight"] = layer["self_attn_norm_weight"] + message["post norm weight"] = layer["mlp_norm_weight"] + if norm_has_bias: + message["input norm bias"] = layer["self_attn_norm_bias"] + message["post norm bias"] = layer["mlp_norm_bias"] + if md.linear_bias: + message["dense bias"] = layer["self_attn_proj_bias"] + message["mlp l1 bias"] = layer["mlp_fc2_bias"] + + # Grab all parallel tensors for this layer + qkv_weight = [] + qkv_bias = [] + dense_weight = [] + mlp_l0_weight = [] + mlp_l0_bias = [] + mlp_l1_weight = [] + for tp_rank, model in enumerate(models): + layer = schema.get_layer(model, layer_num) + qkv_weight.append(layer["self_attn_qkv_weight"]) + dense_weight.append(layer["self_attn_proj_weight"]) + mlp_l0_weight.append(layer["mlp_fc1_weight"]) + mlp_l1_weight.append(layer["mlp_fc2_weight"]) + if md.qkv_bias: + qkv_bias.append(layer["self_attn_qkv_bias"]) + if md.linear_bias: + mlp_l0_bias.append(layer["mlp_fc1_bias"]) + + # Handle gated linear units + if md.swiglu: + # concat all the first halves ('W's) and all the second halves ('V's) + for tp_rank in range(tp_size): + mlp_l0_weight[tp_rank] = torch.chunk(mlp_l0_weight[tp_rank], 2, dim=0) + message["mlp l0 weight W"] = torch.cat([w[0] for w in mlp_l0_weight], dim=0) + message["mlp l0 weight V"] = torch.cat([w[1] for w in mlp_l0_weight], dim=0) + else: + message["mlp l0 weight"] = torch.cat(mlp_l0_weight, dim=0) + + # simple concat of the rest + message["qkv weight"] = torch.cat(qkv_weight, dim=0) + message["dense weight"] = torch.cat(dense_weight, dim=1) + message["mlp l1 weight"] = torch.cat(mlp_l1_weight, dim=1) + if md.qkv_bias: + message["qkv bias"] = torch.cat(qkv_bias, dim=0) + if md.linear_bias: + if md.swiglu: + for tp_rank in range(tp_size): + mlp_l0_bias[tp_rank] = torch.chunk(mlp_l0_bias[tp_rank], 2, dim=0) + message["mlp l0 bias W"] = torch.cat([b[0] for b in mlp_l0_bias],dim=0) + message["mlp l0 bias V"] = torch.cat([b[1] for b in mlp_l0_bias],dim=0) + else: + message["mlp l0 bias"] = torch.cat(mlp_l0_bias, dim=0) + + queue_put(f"transformer layer {total_layer_num}", message) + + total_layer_num = total_layer_num + 1 + + # Send final norm from tp_rank 0. + final_norm = schema.get("final_norm", models[0]) + message = { + "weight": final_norm["weight"], + } + if norm_has_bias: + message["bias"] = final_norm["bias"] + queue_put("final norm", message) + + # Send output layer. + if md.output_layer: + output_layer_ranks = [ schema.get("output_layer", m) for m in models ] + message = { + "weight": torch.cat([r["weight"] for r in output_layer_ranks], dim=0), + } + queue_put("output layer", message) + + # Send BERT params. + if md.model_type == 'BERT': + + # Pooler. + pooler = schema.get("pooler", models[0]) + message = { + "weight": pooler["weight"], + "bias": pooler["bias"], + } + queue_put("pooler", message) + + # LM head. + lm_head = schema.get("lm_head", models[0]) + message = { + "dense weight": lm_head["dense_weight"], + "dense bias": lm_head["dense_bias"], + "norm weight": lm_head["norm_weight"], + } + if norm_has_bias: + message["norm bias"] = lm_head["norm_bias"], + queue_put("lm head", message) + + # Binary head. + if md.bert_binary_head: + binary_head = schema.get("binary_head", models[0]) + message = { + "weight": binary_head["weight"], + "bias": binary_head["bias"], + } + queue_put("binary head", message) + + # Done. + queue.put("done") + +def load_checkpoint(queue, args): + try: + _load_checkpoint(queue, args) + except Exception: + queue.put("exit") + raise diff --git a/tools/checkpoint/loader_megatron.py b/tools/checkpoint/loader_legacy.py similarity index 97% rename from tools/checkpoint/loader_megatron.py rename to tools/checkpoint/loader_legacy.py index d8f6847..87a7aae 100644 --- a/tools/checkpoint/loader_megatron.py +++ b/tools/checkpoint/loader_legacy.py @@ -1,376 +1,376 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import json -import os -import sys -import types - -import torch - - -def add_arguments(parser): - group = parser.add_argument_group(title='Megatron loader') - - group.add_argument('--true-vocab-size', type=int, default=None, - help='original size of vocab, if specified will trim padding from embedding table.') - group.add_argument('--vocab-file', type=str, default=None, - help='Path to the vocab file. If specified will use this to get vocab size and ' - 'trim padding from the embedding table.') - group.add_argument('--megatron-path', type=str, default=None, - help='Base directory of Megatron repository') - group.add_argument('--position-embedding-type', - type=str, - default='learned_absolute', - choices=['learned_absolute', 'rope'], - help='Position embedding type.') - group.add_argument('--loader-transformer-impl', default='local', - choices=['local', 'transformer_engine'], - help='Which Transformer implementation to use.') - -def _load_checkpoint(queue, args): - - # Search in directory above this - sys.path.append(os.path.abspath( - os.path.join(os.path.dirname(__file__), - os.path.pardir))) - if args.megatron_path is not None: - sys.path.insert(0, args.megatron_path) - - try: - from megatron.training.arguments import parse_args, validate_args - from megatron.training.global_vars import set_args, set_global_variables - from megatron.training.checkpointing import load_args_from_checkpoint, load_checkpoint - from megatron.legacy.model import module - from megatron.core import mpu - from megatron.core.enums import ModelType - from megatron.legacy import fused_kernels - except ModuleNotFoundError: - print("Unable to import Megatron, please specify the path to Megatron using --megatron-path. Exiting.") - queue.put("exit") - exit(1) - - # We want all arguments to come from us - sys.argv = ['script.py', - '--no-masked-softmax-fusion', - '--no-bias-gelu-fusion', - '--no-bias-dropout-fusion', - '--no-async-tensor-model-parallel-allreduce', - '--use-cpu-initialization', - '--micro-batch-size', '1', - '--no-load-optim', - '--no-load-rng', - '--no-save-optim', - '--no-save-rng', - '--mock-data', # To pass the "blend data checks" in arguments.py - '--no-initialization', - '--load', args.load_dir, - '--position-embedding-type', args.position_embedding_type, - '--exit-on-missing-checkpoint', - '--no-one-logger', - ] - - margs = parse_args() - margs, checkpoint_args = load_args_from_checkpoint(margs) - - # Arguments do sanity checks on the world size, but we don't care, - # so trick it into thinking we are plenty of processes - margs.world_size = margs.tensor_model_parallel_size * margs.pipeline_model_parallel_size - - # Explicitly copy data types from checkpoint. - margs.fp16 = checkpoint_args.fp16 - margs.bf16 = checkpoint_args.bf16 - - # Validate margs. - margs = validate_args(margs) - - margs.use_legacy_models = True - margs.transformer_impl = args.loader_transformer_impl - - def check_for_arg(arg_name, default=None): - if getattr(margs, arg_name, None) is None: - if default is not None: - setattr(margs, arg_name, default) - else: - print(f"Checkpoint does not specify the argument {arg_name}. Exiting.") - print(f"Arguments: {margs}") - queue.put("exit") - exit(1) - - check_for_arg('tensor_model_parallel_size') - check_for_arg('pipeline_model_parallel_size') - check_for_arg('num_layers') - check_for_arg('hidden_size') - check_for_arg('seq_length') - check_for_arg('num_attention_heads') - check_for_arg('max_position_embeddings') - check_for_arg('position_embedding_type') - check_for_arg('tokenizer_type') - check_for_arg('iteration') - check_for_arg('bert_binary_head') - check_for_arg('disable_bias_linear', False) - check_for_arg('params_dtype') - check_for_arg('swiglu', False) - - # Determine how to make our models - if args.model_type == 'GPT': - from pretrain_gpt import model_provider - margs.model_type = ModelType.encoder_or_decoder - elif args.model_type == 'BERT': - from pretrain_bert import model_provider - margs.model_type = ModelType.encoder_or_decoder - else: - raise Exception(f'unrecognized model type: {args.model_type}') - - # supress warning about torch.distributed not being initialized - module.MegatronModule.embedding_warning_printed = True - - consumed_train_samples = None - consumed_valid_samples = None - def get_models(count, dtype): - nonlocal consumed_train_samples - nonlocal consumed_valid_samples - model_array_len = margs.virtual_pipeline_model_parallel_size - if model_array_len is None: - model_array_len = 1 - models = [[] for _ in range(model_array_len)] - pre_process = mpu.is_pipeline_first_stage() - post_process = mpu.is_pipeline_last_stage() - for rank in range(count): - mpu.set_tensor_model_parallel_rank(rank) - if margs.virtual_pipeline_model_parallel_size is not None: - model_ = [] - for i in range(margs.virtual_pipeline_model_parallel_size): - mpu.set_virtual_pipeline_model_parallel_rank(i) - # Set pre_process and post_process only after virtual rank is set. - pre_process = mpu.is_pipeline_first_stage() - post_process = mpu.is_pipeline_last_stage() - this_model = model_provider( - pre_process=pre_process, - post_process=post_process - ).to(dtype) - model_.append(this_model) - else: - pre_process = mpu.is_pipeline_first_stage() - post_process = mpu.is_pipeline_last_stage() - model_rank = 0 - model_ = [model_provider(pre_process, post_process).to(dtype)] - margs.consumed_train_samples = 0 - margs.consumed_valid_samples = 0 - margs.exit_on_missing_checkpoint = True - load_checkpoint(model_, None, None) - - if consumed_train_samples is not None: - assert(margs.consumed_train_samples == consumed_train_samples) - else: - consumed_train_samples = margs.consumed_train_samples - if consumed_valid_samples is not None: - assert(margs.consumed_valid_samples == consumed_valid_samples) - else: - consumed_valid_samples = margs.consumed_valid_samples - for vp_rank in range(model_array_len): - models[vp_rank].append(model_[vp_rank]) - return models - - set_global_variables(margs, build_tokenizer=False) - mpu.set_tensor_model_parallel_world_size(margs.tensor_model_parallel_size) - mpu.set_pipeline_model_parallel_world_size(margs.pipeline_model_parallel_size) - mpu.set_virtual_pipeline_model_parallel_world_size(margs.virtual_pipeline_model_parallel_size) - fused_kernels.load(margs) - - # Get true (non-padded) vocab size - if args.true_vocab_size is not None: - true_vocab_size = args.true_vocab_size - elif args.vocab_file is not None: - vocab = json.load(open(args.vocab_file)) - true_vocab_size = len(vocab) - if args.true_vocab_size is not None and true_vocab_size != args.true_vocab_size: - print("Both --true-vocab-size and --vocab-file specified and the vocab size does not match, aborting.") - queue.put("exit") - exit(1) - else: - true_vocab_size = None - - # short aliases - tp_size = margs.tensor_model_parallel_size - pp_size = margs.pipeline_model_parallel_size - vp_size = margs.virtual_pipeline_model_parallel_size - if vp_size is None: - vp_size = 1 - - # Layernorm has bias; RMSNorm does not. - if hasattr(checkpoint_args, 'normalization'): - norm_has_bias = checkpoint_args.normalization == "LayerNorm" - else: - # older models only supported LayerNorm - norm_has_bias = True - - # metadata - md = types.SimpleNamespace() - md.model_type = args.model_type - md.num_layers = margs.num_layers - md.hidden_size = margs.hidden_size - md.seq_length = margs.seq_length - md.num_attention_heads = margs.num_attention_heads - md.max_position_embeddings = margs.max_position_embeddings - md.tokenizer_type = margs.tokenizer_type - md.iteration = margs.iteration - md.params_dtype = margs.params_dtype - md.bert_binary_head = margs.bert_binary_head - md.output_layer = margs.untie_embeddings_and_output_weights - md.position_embedding_type = margs.position_embedding_type - md.linear_bias = margs.add_bias_linear - md.qkv_bias = margs.add_qkv_bias - md.norm_has_bias = norm_has_bias - md.swiglu = margs.swiglu - md.previous_tensor_parallel_size = margs.tensor_model_parallel_size - md.previous_pipeline_parallel_size = margs.pipeline_model_parallel_size - md.true_vocab_size = true_vocab_size - md.make_vocab_size_divisible_by = margs.make_vocab_size_divisible_by - md.checkpoint_args = checkpoint_args - - # Get first pipe stage - mpu.set_pipeline_model_parallel_rank(0) - all_models = [get_models(tp_size, md.params_dtype)] - models = all_models[0][0] - - md.consumed_train_samples = consumed_train_samples - md.consumed_valid_samples = consumed_valid_samples - queue.put(md) - - def queue_put(name, msg): - print(f"sending {name}") - msg["name"] = name - queue.put(msg) - - # Send embeddings - message = { - "word embeddings": torch.cat( - [models[tp_rank].language_model.embedding.word_embeddings.weight.data for tp_rank in range(tp_size)], - dim = 0) - } - if md.position_embedding_type == 'learned_absolute': - message["position embeddings"] = models[0].language_model.embedding.position_embeddings.weight.data - else: - assert not hasattr(models[0].language_model.embedding, 'position_embeddings') - - queue_put("embeddings", message) - - total_layer_num = 0 - for vp_rank in range(vp_size): - mpu.set_virtual_pipeline_model_parallel_rank(vp_rank) - for pp_rank in range(pp_size): - if pp_rank > 0: - mpu.set_pipeline_model_parallel_rank(pp_rank) - if vp_rank == 0: - all_models.append(get_models(tp_size, md.params_dtype)) - models = all_models[pp_rank][vp_rank] - for layer_num in range(len(models[0].language_model.encoder.layers)): - message = {} - - # Get non-parallel tensors from tp_rank 0 - layer = models[0].language_model.encoder.layers[layer_num] - message["input norm weight"] = layer.input_norm.weight.data - if norm_has_bias: - message["input norm bias"] = layer.input_norm.bias.data - message["post norm weight"] = layer.post_attention_norm.weight.data - if norm_has_bias: - message["post norm bias"] = layer.post_attention_norm.bias.data - if md.linear_bias: - message["dense bias"] = layer.self_attention.dense.bias.data - message["mlp l1 bias"] = layer.mlp.dense_4h_to_h.bias.data - - # Grab all parallel tensors for this layer - qkv_weight = [] - qkv_bias = [] - dense_weight = [] - mlp_l0_weight = [] - mlp_l0_bias = [] - mlp_l1_weight = [] - for tp_rank, model in enumerate(models): - layer = model.language_model.encoder.layers[layer_num] - qkv_weight.append(layer.self_attention.query_key_value.weight.data) - dense_weight.append(layer.self_attention.dense.weight.data) - mlp_l0_weight.append(layer.mlp.dense_h_to_4h.weight.data) - mlp_l1_weight.append(layer.mlp.dense_4h_to_h.weight.data) - if md.qkv_bias: - qkv_bias.append(layer.self_attention.query_key_value.bias.data) - if md.linear_bias: - mlp_l0_bias.append(layer.mlp.dense_h_to_4h.bias.data) - - # Handle gated linear units - if md.swiglu: - # concat all the first halves ('W's) and all the second halves ('V's) - for tp_rank in range(tp_size): - mlp_l0_weight[tp_rank] = torch.chunk(mlp_l0_weight[tp_rank], 2, dim=0) - message["mlp l0 weight W"] = torch.cat([w[0] for w in mlp_l0_weight], dim=0) - message["mlp l0 weight V"] = torch.cat([w[1] for w in mlp_l0_weight], dim=0) - else: - message["mlp l0 weight"] = torch.cat(mlp_l0_weight, dim=0) - - # simple concat of the rest - message["qkv weight"] = torch.cat(qkv_weight, dim=0) - message["dense weight"] = torch.cat(dense_weight, dim=1) - message["mlp l1 weight"] = torch.cat(mlp_l1_weight, dim=1) - if md.qkv_bias: - message["qkv bias"] = torch.cat(qkv_bias, dim=0) - if md.linear_bias: - if md.swiglu: - for tp_rank in range(tp_size): - mlp_l0_bias[tp_rank] = torch.chunk(mlp_l0_bias[tp_rank], 2, dim=0) - message["mlp l0 bias W"] = torch.cat([b[0] for b in mlp_l0_bias],dim=0) - message["mlp l0 bias V"] = torch.cat([b[1] for b in mlp_l0_bias],dim=0) - else: - message["mlp l0 bias"] = torch.cat(mlp_l0_bias, dim=0) - - queue_put(f"transformer layer {total_layer_num}", message) - - total_layer_num = total_layer_num + 1 - - # Send final norm from tp_rank 0 - message = { - "weight": models[0].language_model.encoder.final_norm.weight.data, - } - if norm_has_bias: - message["bias"] = models[0].language_model.encoder.final_norm.bias.data - queue_put("final norm", message) - - if md.output_layer: - message = { - "weight": torch.cat( - [models[tp_rank].language_model.output_layer.weight.data for tp_rank in range(tp_size)], - dim = 0) - } - queue_put("output layer", message) - - - # Send BERT lm head and binary head if it exists - if md.model_type == 'BERT': - message = { - "weight": models[0].language_model.pooler.dense.weight.data, - "bias": models[0].language_model.pooler.dense.bias.data - } - queue_put("pooler", message) - - message = { - "dense weight": models[0].lm_head.dense.weight.data, - "dense bias": models[0].lm_head.dense.bias.data, - "norm weight": models[0].lm_head.norm.weight.data, - } - if norm_has_bias: - message["norm bias"] = models[0].lm_head.norm.bias.data - queue_put("lm head", message) - - if md.bert_binary_head: - message = { - "weight": models[0].binary_head.weight.data, - "bias": models[0].binary_head.bias.data - } - queue_put("binary head", message) - queue.put("done") - -def load_checkpoint(queue, args): - try: - _load_checkpoint(queue, args) - except Exception: - queue.put("exit") - raise +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import json +import os +import sys +import types + +import torch + + +def add_arguments(parser): + group = parser.add_argument_group(title='Megatron loader') + + group.add_argument('--true-vocab-size', type=int, default=None, + help='original size of vocab, if specified will trim padding from embedding table.') + group.add_argument('--vocab-file', type=str, default=None, + help='Path to the vocab file. If specified will use this to get vocab size and ' + 'trim padding from the embedding table.') + group.add_argument('--megatron-path', type=str, default=None, + help='Base directory of Megatron repository') + group.add_argument('--position-embedding-type', + type=str, + default='learned_absolute', + choices=['learned_absolute', 'rope'], + help='Position embedding type.') + group.add_argument('--loader-transformer-impl', default='local', + choices=['local', 'transformer_engine'], + help='Which Transformer implementation to use.') + +def _load_checkpoint(queue, args): + + # Search in directory above this + sys.path.append(os.path.abspath( + os.path.join(os.path.dirname(__file__), + os.path.pardir))) + if args.megatron_path is not None: + sys.path.insert(0, args.megatron_path) + + try: + from megatron.training.arguments import parse_args, validate_args + from megatron.training.global_vars import set_args, set_global_variables + from megatron.training.checkpointing import load_args_from_checkpoint, load_checkpoint + from megatron.legacy.model import module + from megatron.core import mpu + from megatron.core.enums import ModelType + from megatron.legacy import fused_kernels + except ModuleNotFoundError: + print("Unable to import Megatron, please specify the path to Megatron using --megatron-path. Exiting.") + queue.put("exit") + exit(1) + + # We want all arguments to come from us + sys.argv = ['script.py', + '--no-masked-softmax-fusion', + '--no-bias-gelu-fusion', + '--no-bias-dropout-fusion', + '--no-async-tensor-model-parallel-allreduce', + '--use-cpu-initialization', + '--micro-batch-size', '1', + '--no-load-optim', + '--no-load-rng', + '--no-save-optim', + '--no-save-rng', + '--mock-data', # To pass the "blend data checks" in arguments.py + '--no-initialization', + '--load', args.load_dir, + '--position-embedding-type', args.position_embedding_type, + '--exit-on-missing-checkpoint', + '--no-one-logger', + ] + + margs = parse_args() + margs, checkpoint_args = load_args_from_checkpoint(margs) + + # Arguments do sanity checks on the world size, but we don't care, + # so trick it into thinking we are plenty of processes + margs.world_size = margs.tensor_model_parallel_size * margs.pipeline_model_parallel_size + + # Explicitly copy data types from checkpoint. + margs.fp16 = checkpoint_args.fp16 + margs.bf16 = checkpoint_args.bf16 + + # Validate margs. + margs = validate_args(margs) + + margs.use_legacy_models = True + margs.transformer_impl = args.loader_transformer_impl + + def check_for_arg(arg_name, default=None): + if getattr(margs, arg_name, None) is None: + if default is not None: + setattr(margs, arg_name, default) + else: + print(f"Checkpoint does not specify the argument {arg_name}. Exiting.") + print(f"Arguments: {margs}") + queue.put("exit") + exit(1) + + check_for_arg('tensor_model_parallel_size') + check_for_arg('pipeline_model_parallel_size') + check_for_arg('num_layers') + check_for_arg('hidden_size') + check_for_arg('seq_length') + check_for_arg('num_attention_heads') + check_for_arg('max_position_embeddings') + check_for_arg('position_embedding_type') + check_for_arg('tokenizer_type') + check_for_arg('iteration') + check_for_arg('bert_binary_head') + check_for_arg('disable_bias_linear', False) + check_for_arg('params_dtype') + check_for_arg('swiglu', False) + + # Determine how to make our models + if args.model_type == 'GPT': + from pretrain_gpt import model_provider + margs.model_type = ModelType.encoder_or_decoder + elif args.model_type == 'BERT': + from pretrain_bert import model_provider + margs.model_type = ModelType.encoder_or_decoder + else: + raise Exception(f'unrecognized model type: {args.model_type}') + + # supress warning about torch.distributed not being initialized + module.MegatronModule.embedding_warning_printed = True + + consumed_train_samples = None + consumed_valid_samples = None + def get_models(count, dtype): + nonlocal consumed_train_samples + nonlocal consumed_valid_samples + model_array_len = margs.virtual_pipeline_model_parallel_size + if model_array_len is None: + model_array_len = 1 + models = [[] for _ in range(model_array_len)] + pre_process = mpu.is_pipeline_first_stage() + post_process = mpu.is_pipeline_last_stage() + for rank in range(count): + mpu.set_tensor_model_parallel_rank(rank) + if margs.virtual_pipeline_model_parallel_size is not None: + model_ = [] + for i in range(margs.virtual_pipeline_model_parallel_size): + mpu.set_virtual_pipeline_model_parallel_rank(i) + # Set pre_process and post_process only after virtual rank is set. + pre_process = mpu.is_pipeline_first_stage() + post_process = mpu.is_pipeline_last_stage() + this_model = model_provider( + pre_process=pre_process, + post_process=post_process + ).to(dtype) + model_.append(this_model) + else: + pre_process = mpu.is_pipeline_first_stage() + post_process = mpu.is_pipeline_last_stage() + model_rank = 0 + model_ = [model_provider(pre_process, post_process).to(dtype)] + margs.consumed_train_samples = 0 + margs.consumed_valid_samples = 0 + margs.exit_on_missing_checkpoint = True + load_checkpoint(model_, None, None) + + if consumed_train_samples is not None: + assert(margs.consumed_train_samples == consumed_train_samples) + else: + consumed_train_samples = margs.consumed_train_samples + if consumed_valid_samples is not None: + assert(margs.consumed_valid_samples == consumed_valid_samples) + else: + consumed_valid_samples = margs.consumed_valid_samples + for vp_rank in range(model_array_len): + models[vp_rank].append(model_[vp_rank]) + return models + + set_global_variables(margs, build_tokenizer=False) + mpu.set_tensor_model_parallel_world_size(margs.tensor_model_parallel_size) + mpu.set_pipeline_model_parallel_world_size(margs.pipeline_model_parallel_size) + mpu.set_virtual_pipeline_model_parallel_world_size(margs.virtual_pipeline_model_parallel_size) + fused_kernels.load(margs) + + # Get true (non-padded) vocab size + if args.true_vocab_size is not None: + true_vocab_size = args.true_vocab_size + elif args.vocab_file is not None: + vocab = json.load(open(args.vocab_file)) + true_vocab_size = len(vocab) + if args.true_vocab_size is not None and true_vocab_size != args.true_vocab_size: + print("Both --true-vocab-size and --vocab-file specified and the vocab size does not match, aborting.") + queue.put("exit") + exit(1) + else: + true_vocab_size = None + + # short aliases + tp_size = margs.tensor_model_parallel_size + pp_size = margs.pipeline_model_parallel_size + vp_size = margs.virtual_pipeline_model_parallel_size + if vp_size is None: + vp_size = 1 + + # Layernorm has bias; RMSNorm does not. + if hasattr(checkpoint_args, 'normalization'): + norm_has_bias = checkpoint_args.normalization == "LayerNorm" + else: + # older models only supported LayerNorm + norm_has_bias = True + + # metadata + md = types.SimpleNamespace() + md.model_type = args.model_type + md.num_layers = margs.num_layers + md.hidden_size = margs.hidden_size + md.seq_length = margs.seq_length + md.num_attention_heads = margs.num_attention_heads + md.max_position_embeddings = margs.max_position_embeddings + md.tokenizer_type = margs.tokenizer_type + md.iteration = margs.iteration + md.params_dtype = margs.params_dtype + md.bert_binary_head = margs.bert_binary_head + md.output_layer = margs.untie_embeddings_and_output_weights + md.position_embedding_type = margs.position_embedding_type + md.linear_bias = margs.add_bias_linear + md.qkv_bias = margs.add_qkv_bias + md.norm_has_bias = norm_has_bias + md.swiglu = margs.swiglu + md.previous_tensor_parallel_size = margs.tensor_model_parallel_size + md.previous_pipeline_parallel_size = margs.pipeline_model_parallel_size + md.true_vocab_size = true_vocab_size + md.make_vocab_size_divisible_by = margs.make_vocab_size_divisible_by + md.checkpoint_args = checkpoint_args + + # Get first pipe stage + mpu.set_pipeline_model_parallel_rank(0) + all_models = [get_models(tp_size, md.params_dtype)] + models = all_models[0][0] + + md.consumed_train_samples = consumed_train_samples + md.consumed_valid_samples = consumed_valid_samples + queue.put(md) + + def queue_put(name, msg): + print(f"sending {name}") + msg["name"] = name + queue.put(msg) + + # Send embeddings + message = { + "word embeddings": torch.cat( + [models[tp_rank].language_model.embedding.word_embeddings.weight.data for tp_rank in range(tp_size)], + dim = 0) + } + if md.position_embedding_type == 'learned_absolute': + message["position embeddings"] = models[0].language_model.embedding.position_embeddings.weight.data + else: + assert not hasattr(models[0].language_model.embedding, 'position_embeddings') + + queue_put("embeddings", message) + + total_layer_num = 0 + for vp_rank in range(vp_size): + mpu.set_virtual_pipeline_model_parallel_rank(vp_rank) + for pp_rank in range(pp_size): + if pp_rank > 0: + mpu.set_pipeline_model_parallel_rank(pp_rank) + if vp_rank == 0: + all_models.append(get_models(tp_size, md.params_dtype)) + models = all_models[pp_rank][vp_rank] + for layer_num in range(len(models[0].language_model.encoder.layers)): + message = {} + + # Get non-parallel tensors from tp_rank 0 + layer = models[0].language_model.encoder.layers[layer_num] + message["input norm weight"] = layer.input_norm.weight.data + if norm_has_bias: + message["input norm bias"] = layer.input_norm.bias.data + message["post norm weight"] = layer.post_attention_norm.weight.data + if norm_has_bias: + message["post norm bias"] = layer.post_attention_norm.bias.data + if md.linear_bias: + message["dense bias"] = layer.self_attention.dense.bias.data + message["mlp l1 bias"] = layer.mlp.dense_4h_to_h.bias.data + + # Grab all parallel tensors for this layer + qkv_weight = [] + qkv_bias = [] + dense_weight = [] + mlp_l0_weight = [] + mlp_l0_bias = [] + mlp_l1_weight = [] + for tp_rank, model in enumerate(models): + layer = model.language_model.encoder.layers[layer_num] + qkv_weight.append(layer.self_attention.query_key_value.weight.data) + dense_weight.append(layer.self_attention.dense.weight.data) + mlp_l0_weight.append(layer.mlp.dense_h_to_4h.weight.data) + mlp_l1_weight.append(layer.mlp.dense_4h_to_h.weight.data) + if md.qkv_bias: + qkv_bias.append(layer.self_attention.query_key_value.bias.data) + if md.linear_bias: + mlp_l0_bias.append(layer.mlp.dense_h_to_4h.bias.data) + + # Handle gated linear units + if md.swiglu: + # concat all the first halves ('W's) and all the second halves ('V's) + for tp_rank in range(tp_size): + mlp_l0_weight[tp_rank] = torch.chunk(mlp_l0_weight[tp_rank], 2, dim=0) + message["mlp l0 weight W"] = torch.cat([w[0] for w in mlp_l0_weight], dim=0) + message["mlp l0 weight V"] = torch.cat([w[1] for w in mlp_l0_weight], dim=0) + else: + message["mlp l0 weight"] = torch.cat(mlp_l0_weight, dim=0) + + # simple concat of the rest + message["qkv weight"] = torch.cat(qkv_weight, dim=0) + message["dense weight"] = torch.cat(dense_weight, dim=1) + message["mlp l1 weight"] = torch.cat(mlp_l1_weight, dim=1) + if md.qkv_bias: + message["qkv bias"] = torch.cat(qkv_bias, dim=0) + if md.linear_bias: + if md.swiglu: + for tp_rank in range(tp_size): + mlp_l0_bias[tp_rank] = torch.chunk(mlp_l0_bias[tp_rank], 2, dim=0) + message["mlp l0 bias W"] = torch.cat([b[0] for b in mlp_l0_bias],dim=0) + message["mlp l0 bias V"] = torch.cat([b[1] for b in mlp_l0_bias],dim=0) + else: + message["mlp l0 bias"] = torch.cat(mlp_l0_bias, dim=0) + + queue_put(f"transformer layer {total_layer_num}", message) + + total_layer_num = total_layer_num + 1 + + # Send final norm from tp_rank 0 + message = { + "weight": models[0].language_model.encoder.final_norm.weight.data, + } + if norm_has_bias: + message["bias"] = models[0].language_model.encoder.final_norm.bias.data + queue_put("final norm", message) + + if md.output_layer: + message = { + "weight": torch.cat( + [models[tp_rank].language_model.output_layer.weight.data for tp_rank in range(tp_size)], + dim = 0) + } + queue_put("output layer", message) + + + # Send BERT lm head and binary head if it exists + if md.model_type == 'BERT': + message = { + "weight": models[0].language_model.pooler.dense.weight.data, + "bias": models[0].language_model.pooler.dense.bias.data + } + queue_put("pooler", message) + + message = { + "dense weight": models[0].lm_head.dense.weight.data, + "dense bias": models[0].lm_head.dense.bias.data, + "norm weight": models[0].lm_head.norm.weight.data, + } + if norm_has_bias: + message["norm bias"] = models[0].lm_head.norm.bias.data + queue_put("lm head", message) + + if md.bert_binary_head: + message = { + "weight": models[0].binary_head.weight.data, + "bias": models[0].binary_head.bias.data + } + queue_put("binary head", message) + queue.put("done") + +def load_checkpoint(queue, args): + try: + _load_checkpoint(queue, args) + except Exception: + queue.put("exit") + raise diff --git a/tools/checkpoint/loader_llama_mistral.py b/tools/checkpoint/loader_llama_mistral.py index e7869a2..610e51a 100644 --- a/tools/checkpoint/loader_llama_mistral.py +++ b/tools/checkpoint/loader_llama_mistral.py @@ -1,672 +1,659 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -import json -import os -import sys -import torch -try: - import transformers -except ImportError: - raise ImportError("The 'transformers' package is not installed.") -import gc -import shutil -from tqdm import tqdm -import types - - -def add_arguments(parser): - group = parser.add_argument_group(title='Llama/Mistral loader.') - - # TODO(jbarker): Need assertion to make sure *exactly* one of these is used - parser.add_argument('--model-size', type=str, required=True, - choices=['llama2-7B', 'llama2-13B', 'llama2-70B', 'llama2-7Bf', 'llama2-13Bf', 'llama2-70Bf', 'llama3-8B', 'llama3-70B', 'llama3-8Bf', 'llama3-70Bf', 'mistral-7B', 'mistral-7Bf', 'yi-34B', 'qwen2.5-7B', 'qwen2.5-72B', 'qwen2.5-7Bf', 'qwen2.5-72Bf'], - help='Model size can be `llama2-7B`, `llama2-13B`, `llama2-70B`, `llama3-8B`, `llama3-70B`, `mistral-7B`, `qwen2.5-7B`, `qwen2.5-72B` (for pretrained models), ' - 'and `llama2-7Bf`, `llama2-13Bf`, `llama2-70Bf`, `llama3-8Bf`, `llama3-70bf`, `mistral-7Bf`, `qwen2.5-7Bf`, and `qwen2.5-72Bf` (for chat-finetuned models).') - parser.add_argument('--checkpoint-type', type=str, required=True, - help='Type of checkpoint to convert, options are "meta" or "hf"') - parser.add_argument('--bf16', action='store_true', help='Whether to load weights in bf16.') - parser.add_argument('--fp16', action='store_true', help='Whether to load weights in fp16.') - group.add_argument('--true-vocab-size', type=int, default=None, - help='original size of vocab, if specified will trim padding from embedding table.') - group.add_argument('--vocab-file', type=str, default=None, - help='Path to the vocab file. If specified will use this to get vocab size and ' - 'trim padding from the embedding table.') - group.add_argument('--tokenizer-model', required=True, - help='Tokenizer model file.') - group.add_argument('--megatron-path', type=str, default=None, - help='Base directory of Megatron repository') - group.add_argument("--make-vocab-size-divisible-by", type=int, default=None, help="Make vocab size divisible by") - group.add_argument('--loader-transformer-impl', default='local', - choices=['local', 'transformer_engine'], - help='Which Transformer implementation to use.') - - -def verify_transformers_version(): - major, minor, patch = map(int, transformers.__version__.split('.')) - assert major >= 4 and minor >= 31 - - -NUM_SHARDS = { - "llama2-7B": 1, - "llama2-7Bf": 1, - "llama2-13B": 2, - "llama2-13Bf": 2, - "llama2-70B": 8, - "llama2-70Bf": 8, - "llama3-8B": 1, - "llama3-8Bf": 1, - "llama3-70B": 8, - "llama3-70Bf": 8, - "mistral-7B": 1, - "mistral-7Bf": 1, - "yi-34B": 8, - "qwen2.5-7B": 1, - "qwen2.5-7Bf": 1, - "qwen2.5-72B": 8, - "qwen2.5-72Bf": 8, -} - - -def compute_intermediate_size(n, ffn_dim_multiplier=1, multiple_of=256): - return multiple_of * ((int(ffn_dim_multiplier * int(8 * n / 3)) + multiple_of - 1) // multiple_of) - - -def read_json(path): - with open(path, "r") as f: - return json.load(f) - - -def write_json(text, path): - with open(path, "w") as f: - json.dump(text, f) - - -# This conversion is adapted from -# https://github.com/huggingface/transformers/blob/main/src/transformers/models/llama/convert_llama_weights_to_hf.py -def convert_to_hf(model_path, input_base_path, model_size, tokenizer_path): - - if "llama2" in model_size: - from transformers import LlamaConfig as ModelConfig - from transformers import LlamaTokenizer, LlamaTokenizerFast - elif "llama3" in model_size: - from transformers import LlamaConfig as ModelConfig - elif "mistral" in model_size: - from transformers import MistralConfig as ModelConfig - - # for backward compatibility, before you needed the repo to be called `my_repo/model_size` - if not os.path.isfile(os.path.join(input_base_path, "params.json")): - input_base_path = os.path.join(input_base_path, model_size) - - os.makedirs(model_path, exist_ok=True) - - params = read_json(os.path.join(input_base_path, "params.json")) - num_shards = NUM_SHARDS[model_size] - params = params.get("model", params) - n_layers = params["n_layers"] - n_heads = params["n_heads"] - n_heads_per_shard = n_heads // num_shards - dim = params["dim"] - dims_per_head = dim // n_heads - base = params.get("rope_theta", 10000.0) - inv_freq = 1.0 / (base ** (torch.arange(0, dims_per_head, 2).float() / dims_per_head)) - if base > 10000.0: - max_position_embeddings = 32768 if "mistral" in model_size else 16384 - else: - max_position_embeddings = 4096 if "mistral" in model_size else 2048 - - if "llama2" in model_size: - tokenizer_class = LlamaTokenizer if LlamaTokenizerFast is None else LlamaTokenizerFast - elif model_size in ["llama3", "mistral"]: - tokenizer_class = transformers.AutoTokenizer.from_pretrained - else: - raise AttributeError(f"model_size={model_size} not supported") - if tokenizer_path is not None: - if "llama" in model_size: - tokenizer = tokenizer_class(tokenizer_path) - if "llama2" in model_size: - tokenizer.save_pretrained(model_path) - vocab_size = tokenizer.vocab_size if tokenizer_path is not None else 32000 - elif "llama3" in model_size: - vocab_size = 128256 - elif "mistral" in model_size: - tokenizer = tokenizer_class.from_file(tokenizer_path) - vocab_size = 32768 - else: - raise AttributeError(f"model_size={model_size} is not supported") - - if params.get("n_kv_heads", None) is not None: - num_key_value_heads = params["n_kv_heads"] # for GQA / MQA - num_local_key_value_heads = n_heads_per_shard // num_key_value_heads - key_value_dim = dim // num_key_value_heads - else: # compatibility with other checkpoints - num_key_value_heads = n_heads - num_local_key_value_heads = n_heads_per_shard - key_value_dim = dim - - # permute for sliced rotary - def permute(w, n_heads=n_heads, dim1=dim, dim2=dim): - return w.view(n_heads, dim1 // n_heads // 2, 2, dim2).transpose(1, 2).reshape(dim1, dim2) - - print(f"Fetching all parameters from the checkpoint at {input_base_path}.") - # Load weights - if num_shards == 1: - # Not sharded - # (The sharded implementation would also work, but this is simpler.) - loaded = torch.load(os.path.join(input_base_path, "consolidated.00.pth"), map_location="cpu") - else: - # Sharded - loaded = [ - torch.load(os.path.join(input_base_path, f"consolidated.{i:02d}.pth"), map_location="cpu") - for i in range(num_shards) - ] - param_count = 0 - index_dict = {"weight_map": {}} - for layer_i in range(n_layers): - filename = f"pytorch_model-{layer_i + 1}-of-{n_layers + 1}.bin" - if num_shards == 1: - # Unsharded - q_proj = loaded[f"layers.{layer_i}.attention.wq.weight"] - k_proj = loaded[f"layers.{layer_i}.attention.wk.weight"] - if ("llama2" in model_size) or ("mistral" in model_size): - q_proj = permute(q_proj) - k_proj = permute(k_proj) - state_dict = { - f"model.layers.{layer_i}.self_attn.q_proj.weight": q_proj, - f"model.layers.{layer_i}.self_attn.k_proj.weight": k_proj, - f"model.layers.{layer_i}.self_attn.v_proj.weight": loaded[f"layers.{layer_i}.attention.wv.weight"], - f"model.layers.{layer_i}.self_attn.o_proj.weight": loaded[f"layers.{layer_i}.attention.wo.weight"], - f"model.layers.{layer_i}.mlp.gate_proj.weight": loaded[f"layers.{layer_i}.feed_forward.w1.weight"], - f"model.layers.{layer_i}.mlp.down_proj.weight": loaded[f"layers.{layer_i}.feed_forward.w2.weight"], - f"model.layers.{layer_i}.mlp.up_proj.weight": loaded[f"layers.{layer_i}.feed_forward.w3.weight"], - f"model.layers.{layer_i}.input_layernorm.weight": loaded[f"layers.{layer_i}.attention_norm.weight"], - f"model.layers.{layer_i}.post_attention_layernorm.weight": loaded[f"layers.{layer_i}.ffn_norm.weight"], - } - else: - # Sharded - # Note that attention.w{q,k,v,o}, feed_fordward.w[1,2,3], attention_norm.weight and ffn_norm.weight share - # the same storage object, saving attention_norm and ffn_norm will save other weights too, which is - # redundant as other weights will be stitched from multiple shards. To avoid that, they are cloned. - - state_dict = { - f"model.layers.{layer_i}.input_layernorm.weight": loaded[0][ - f"layers.{layer_i}.attention_norm.weight" - ].clone(), - f"model.layers.{layer_i}.post_attention_layernorm.weight": loaded[0][ - f"layers.{layer_i}.ffn_norm.weight" - ].clone(), - } - state_dict[f"model.layers.{layer_i}.self_attn.q_proj.weight"] = permute( - torch.cat( - [ - loaded[i][f"layers.{layer_i}.attention.wq.weight"].view(n_heads_per_shard, dims_per_head, dim) - for i in range(num_shards) - ], - dim=0, - ).reshape(dim, dim) - ) - state_dict[f"model.layers.{layer_i}.self_attn.k_proj.weight"] = permute( - torch.cat( - [ - loaded[i][f"layers.{layer_i}.attention.wk.weight"].view( - num_local_key_value_heads, dims_per_head, dim - ) - for i in range(num_shards) - ], - dim=0, - ).reshape(key_value_dim, dim), - num_key_value_heads, - key_value_dim, - dim, - ) - state_dict[f"model.layers.{layer_i}.self_attn.v_proj.weight"] = torch.cat( - [ - loaded[i][f"layers.{layer_i}.attention.wv.weight"].view( - num_local_key_value_heads, dims_per_head, dim - ) - for i in range(num_shards) - ], - dim=0, - ).reshape(key_value_dim, dim) - - state_dict[f"model.layers.{layer_i}.self_attn.o_proj.weight"] = torch.cat( - [loaded[i][f"layers.{layer_i}.attention.wo.weight"] for i in range(num_shards)], dim=1 - ) - state_dict[f"model.layers.{layer_i}.mlp.gate_proj.weight"] = torch.cat( - [loaded[i][f"layers.{layer_i}.feed_forward.w1.weight"] for i in range(num_shards)], dim=0 - ) - state_dict[f"model.layers.{layer_i}.mlp.down_proj.weight"] = torch.cat( - [loaded[i][f"layers.{layer_i}.feed_forward.w2.weight"] for i in range(num_shards)], dim=1 - ) - state_dict[f"model.layers.{layer_i}.mlp.up_proj.weight"] = torch.cat( - [loaded[i][f"layers.{layer_i}.feed_forward.w3.weight"] for i in range(num_shards)], dim=0 - ) - - state_dict[f"model.layers.{layer_i}.self_attn.rotary_emb.inv_freq"] = inv_freq - for k, v in state_dict.items(): - index_dict["weight_map"][k] = filename - param_count += v.numel() - torch.save(state_dict, os.path.join(model_path, filename)) - - filename = f"pytorch_model-{n_layers + 1}-of-{n_layers + 1}.bin" - if num_shards == 1: - # Unsharded - state_dict = { - "model.embed_tokens.weight": loaded["tok_embeddings.weight"], - "model.norm.weight": loaded["norm.weight"], - "lm_head.weight": loaded["output.weight"], - } - else: - d = 0 if "llama3" in model_size else 1 - state_dict = { - "model.norm.weight": loaded[0]["norm.weight"], - "model.embed_tokens.weight": torch.cat( - [loaded[i]["tok_embeddings.weight"] for i in range(num_shards)], dim=d - ), - "lm_head.weight": torch.cat([loaded[i]["output.weight"] for i in range(num_shards)], dim=0), - } - - for k, v in state_dict.items(): - index_dict["weight_map"][k] = filename - param_count += v.numel() - torch.save(state_dict, os.path.join(model_path, filename)) - - # Write configs - index_dict["metadata"] = {"total_size": param_count * 2} - write_json(index_dict, os.path.join(model_path, "pytorch_model.bin.index.json")) - ffn_dim_multiplier = params["ffn_dim_multiplier"] if "ffn_dim_multiplier" in params else 1 - multiple_of = params["multiple_of"] if "multiple_of" in params else 256 - config = ModelConfig( - hidden_size=dim, - intermediate_size=compute_intermediate_size(dim, ffn_dim_multiplier, multiple_of), - num_attention_heads=params["n_heads"], - num_hidden_layers=params["n_layers"], - rms_norm_eps=params["norm_eps"], - num_key_value_heads=num_key_value_heads, - vocab_size=vocab_size, - rope_theta=base, - max_position_embeddings=max_position_embeddings, - ) - config.save_pretrained(model_path) - - # Make space so we can load the model properly now. - del state_dict - del loaded - gc.collect() - - return model_path - - -def load_args_from_checkpoint(args): - - # Read Llama args. - model_args_path = os.path.join(args.load, "config.json") - with open(model_args_path) as f: - model_args = json.load(f) - # Update Megatron args. - args.seq_length = 4096 - args.max_position_embeddings = model_args["max_position_embeddings"] - args.hidden_size = model_args["hidden_size"] - args.num_attention_heads = model_args["num_attention_heads"] - args.num_layers = model_args["num_hidden_layers"] - args.global_batch_size = 1024 - args.norm_epsilon = model_args["rms_norm_eps"] - args.iteration = 1 # '0', 'release' don't work - args.position_embedding_type = "rope" - args.swiglu = True - args.normalization = "RMSNorm" - args.add_bias_linear = False - args.untie_embeddings_and_output_weights = True - args.vocab_size = model_args["vocab_size"] - args.padded_vocab_size = model_args["vocab_size"] - args.ffn_hidden_size = model_args["intermediate_size"] - - if "num_key_value_heads" in model_args: - args.group_query_attention = True - args.num_query_groups = model_args["num_key_value_heads"] - - -def set_preprocess_state(args, model, hf_model): - '''Set embedding params.''' - model.language_model.embedding.word_embeddings.weight.data.copy_( - hf_model.model.embed_tokens.weight) - - -def set_postprocess_state(args, model, hf_model): - '''Set output layer & norm params.''' - model.language_model.encoder.final_norm.weight.data.copy_(hf_model.model.norm.weight) - model.language_model.output_layer.weight.data.copy_(hf_model.lm_head.weight) - - -def set_attn_state(args, layer, hf_layer): - '''Set self-attention params.''' - - # Get attention layer & state. - attn = layer.self_attention - hf_attn = hf_layer.self_attn - - # Reshape loaded weights. - tp = args.tensor_model_parallel_size - nh = args.num_attention_heads // tp - ng = (args.num_query_groups if args.group_query_attention \ - else args.num_attention_heads) // tp - dim = args.kv_channels - assert nh % ng == 0 - - # Copy weights (re-order dimensions for Megatron). - attn.query_key_value.weight.data.copy_(torch.cat([ - hf_attn.q_proj.weight.reshape((ng, dim*nh//ng, -1)), - hf_attn.k_proj.weight.reshape((ng, dim, -1)), - hf_attn.v_proj.weight.reshape((ng, dim, -1)), - ], dim=1).reshape((-1, args.hidden_size))) - if args.add_qkv_bias: - attn.query_key_value.bias.data.copy_(torch.cat([ - hf_attn.q_proj.bias.reshape((ng, dim*nh//ng)), - hf_attn.k_proj.bias.reshape((ng, dim)), - hf_attn.v_proj.bias.reshape((ng, dim)), - ], dim=1).reshape(-1)) - - attn.dense.weight.data.copy_(hf_attn.o_proj.weight) - - -def set_mlp_state(args, layer, hf_layer): - '''Set MLP params.''' - - mlp = layer.mlp - hf_mlp = hf_layer.mlp - - mlp.dense_h_to_4h.weight.data.copy_(torch.cat([ - hf_mlp.gate_proj.weight, - hf_mlp.up_proj.weight, - ], dim=0)) - mlp.dense_4h_to_h.weight.data.copy_(hf_mlp.down_proj.weight) - - -def set_layer_state(args, model, hf_model, layer_idx): - '''Set transformer layer params.''' - - layer = model.language_model.encoder.layers[layer_idx] - hf_layer = hf_model.model.layers[layer_idx] - - set_attn_state(args, layer, hf_layer) - set_mlp_state(args, layer, hf_layer) - layer.input_norm.weight.data.copy_(hf_layer.input_layernorm.weight) - layer.post_attention_norm.weight.data.copy_(hf_layer.post_attention_layernorm.weight) - - -def load_checkpoint_to_model(args): - '''Set model params.''' - - from pretrain_gpt import model_provider - from transformers import AutoModelForCausalLM - - # Load Huggingface model. - hf_model = AutoModelForCausalLM.from_pretrained(args.load, torch_dtype=args.params_dtype, low_cpu_mem_usage=True, device_map="cpu") - - # Init Megatron model. - model = model_provider(True, True).to(args.params_dtype) - - # Set model state. - set_preprocess_state(args, model, hf_model) - set_postprocess_state(args, model, hf_model) - for layer_idx in tqdm(range(args.num_layers), "set layer states"): - set_layer_state(args, model, hf_model, layer_idx) - - return model - - -def _load_checkpoint(queue, args): - - verify_transformers_version() - - # Search in directory above this. - sys.path.append(os.path.abspath( - os.path.join(os.path.dirname(__file__), - os.path.pardir, - os.path.pardir))) - if args.megatron_path is not None: - sys.path.insert(0, args.megatron_path) - - # Convert Meta checkpoint to HF format as an intermediate step - if args.checkpoint_type == "meta": - model_tmp_path = convert_to_hf(model_path=os.path.join(args.save_dir, 'tmp'), input_base_path=args.load_dir, model_size=args.model_size, tokenizer_path=args.tokenizer_model) - args.load_dir = model_tmp_path - - try: - from megatron.training.arguments import parse_args, validate_args - from megatron.training.global_vars import set_args, set_global_variables - from megatron.legacy.model import module - from megatron.core import mpu - from megatron.core.enums import ModelType - from megatron.legacy import fused_kernels - except ModuleNotFoundError: - print("Unable to import Megatron, please specify the path to Megatron using --megatron-path. Exiting.") - queue.put("exit") - exit(1) - - # We want all arguments to come from us. - sys.argv = ['script.py', - '--no-masked-softmax-fusion', - '--no-bias-gelu-fusion', - '--no-bias-dropout-fusion', - '--no-async-tensor-model-parallel-allreduce', - '--use-cpu-initialization', - '--micro-batch-size', '1', - '--no-load-optim', - '--no-load-rng', - '--no-save-optim', - '--no-save-rng', - '--mock-data', # To pass the "blend data checks" in arguments.py - '--no-initialization', - '--load', args.load_dir, - '--no-one-logger', - ] - - if args.make_vocab_size_divisible_by is not None: - sys.argv.extend(["--make-vocab-size-divisible-by", str(args.make_vocab_size_divisible_by)]) - - margs = parse_args() - margs.tokenizer_model = args.tokenizer_model - load_args_from_checkpoint(margs) - - if "llama2" in args.model_size: - margs.tokenizer_type = "Llama2Tokenizer" - elif "yi" in args.model_size: - margs.tokenizer_type = "HuggingFaceTokenizer" - elif "llama3" in args.model_size: - margs.tokenizer_type = "HuggingFaceTokenizer" - elif "mistral" in args.model_size: - margs.tokenizer_type = "HuggingFaceTokenizer" - elif "qwen2.5" in args.model_size: - margs.tokenizer_type = "HuggingFaceTokenizer" - margs.add_qkv_bias = True - - # Arguments do sanity checks on the world size, but we don't care, - # so trick it into thinking we are plenty of processes. - margs.world_size = margs.tensor_model_parallel_size * margs.pipeline_model_parallel_size - - margs = validate_args(margs) - - margs.use_legacy_models = True - margs.transformer_impl = args.loader_transformer_impl - - margs.position_embedding_type = "rope" - - def check_for_arg(arg_name, default=None): - if getattr(margs, arg_name, None) is None: - if default is not None: - setattr(margs, arg_name, default) - else: - print(f"Checkpoint does not specify the argument {arg_name}. Exiting.") - print(f"Arguments: {margs}") - queue.put("exit") - exit(1) - - check_for_arg('tensor_model_parallel_size') - check_for_arg('pipeline_model_parallel_size') - check_for_arg('num_layers') - check_for_arg('hidden_size') - check_for_arg('seq_length') - check_for_arg('num_attention_heads') - check_for_arg('max_position_embeddings') - check_for_arg('position_embedding_type') - check_for_arg('iteration') - check_for_arg('bert_binary_head') - check_for_arg('disable_bias_linear', False) - check_for_arg('params_dtype') - check_for_arg('swiglu', False) - - # Determine how to make our models. - assert args.model_type == 'GPT', 'Llama-2, Llama-3 and Mistral are GPT models.' - margs.model_type = ModelType.encoder_or_decoder - margs.params_dtype = torch.bfloat16 if args.bf16 else torch.float16 if args.fp16 else torch.float32 - - # Suppress warning about torch.distributed not being initialized. - module.MegatronModule.embedding_warning_printed = True - - set_global_variables(margs, build_tokenizer=False) - mpu.set_tensor_model_parallel_world_size(margs.tensor_model_parallel_size) - mpu.set_pipeline_model_parallel_world_size(margs.pipeline_model_parallel_size) - mpu.set_virtual_pipeline_model_parallel_world_size(margs.virtual_pipeline_model_parallel_size) - # fused_kernels.load(margs) - - # Short aliases. - tp_size = margs.tensor_model_parallel_size - pp_size = margs.pipeline_model_parallel_size - vp_size = margs.virtual_pipeline_model_parallel_size - if vp_size is None: - vp_size = 1 - - # Metadata. - md = types.SimpleNamespace() - md.model_type = args.model_type - md.num_layers = margs.num_layers - md.hidden_size = margs.hidden_size - md.seq_length = margs.seq_length - md.num_attention_heads = margs.num_attention_heads - md.max_position_embeddings = margs.max_position_embeddings - md.tokenizer_type = margs.tokenizer_type - md.iteration = margs.iteration - md.params_dtype = margs.params_dtype - md.bert_binary_head = margs.bert_binary_head - md.output_layer = margs.untie_embeddings_and_output_weights - md.position_embedding_type = margs.position_embedding_type - md.linear_bias = margs.add_bias_linear - md.qkv_bias = margs.add_qkv_bias - md.norm_has_bias = False - md.swiglu = margs.swiglu - md.previous_tensor_parallel_size = margs.tensor_model_parallel_size - md.previous_pipeline_parallel_size = margs.pipeline_model_parallel_size - md.make_vocab_size_divisible_by = margs.make_vocab_size_divisible_by - md.checkpoint_args = margs - md.consumed_train_samples = 0 - md.consumed_valid_samples = 0 - - margs.model_size = args.model_size - - # Get true (non-padded) vocab size - tokenizer = transformers.AutoTokenizer.from_pretrained(margs.tokenizer_model) - md.true_vocab_size = tokenizer._tokenizer.get_vocab_size(with_added_tokens=True) - - # Get first pipe stage. - mpu.set_tensor_model_parallel_rank(0) - mpu.set_pipeline_model_parallel_rank(0) - model = load_checkpoint_to_model(margs) - - queue.put(md) - - def queue_put(name, msg): - print(f"sending {name}") - msg["name"] = name - queue.put(msg) - - # Send embeddings. - message = { - "word embeddings": model.language_model.embedding.word_embeddings.weight.data - } - if md.position_embedding_type == 'learned_absolute': - message["position embeddings"] = model.language_model.embedding.position_embeddings.weight.data - else: - assert not hasattr(model.language_model.embedding, 'position_embeddings') - - queue_put("embeddings", message) - - for layer_num in range(margs.num_layers): - message = {} - - # Get non-parallel tensors from tp_rank 0. - layer = model.language_model.encoder.layers[layer_num] - message["input norm weight"] = layer.input_norm.weight.data - message["post norm weight"] = layer.post_attention_norm.weight.data - if md.linear_bias: - message["dense bias"] = layer.self_attention.dense.bias.data - message["mlp l1 bias"] = layer.mlp.dense_4h_to_h.bias.data - - # Grab all parallel tensors for this layer. - qkv_weight = [] - qkv_bias = [] - dense_weight = [] - mlp_l0_weight = [] - mlp_l0_bias = [] - mlp_l1_weight = [] - layer = model.language_model.encoder.layers[layer_num] - qkv_weight.append(layer.self_attention.query_key_value.weight.data) - dense_weight.append(layer.self_attention.dense.weight.data) - mlp_l0_weight.append(layer.mlp.dense_h_to_4h.weight.data) - mlp_l1_weight.append(layer.mlp.dense_4h_to_h.weight.data) - - if md.qkv_bias: - qkv_bias.append(layer.self_attention.query_key_value.bias.data) - if md.linear_bias: - mlp_l0_bias.append(layer.mlp.dense_h_to_4h.bias.data) - - # Handle gated linear units. - if md.swiglu: - # Concat all the first halves ('W's) and all the second halves ('V's). - for tp_rank in range(tp_size): - mlp_l0_weight[tp_rank] = torch.chunk(mlp_l0_weight[tp_rank], 2, dim=0) - message["mlp l0 weight W"] = torch.cat([w[0] for w in mlp_l0_weight], dim=0) - message["mlp l0 weight V"] = torch.cat([w[1] for w in mlp_l0_weight], dim=0) - else: - message["mlp l0 weight"] = torch.cat(mlp_l0_weight, dim=0) - - # Simple concat of the rest. - message["qkv weight"] = torch.cat(qkv_weight, dim=0) - message["dense weight"] = torch.cat(dense_weight, dim=1) - message["mlp l1 weight"] = torch.cat(mlp_l1_weight, dim=1) - if md.qkv_bias: - message["qkv bias"] = torch.cat(qkv_bias, dim=0) - if md.linear_bias: - if md.swiglu: - for tp_rank in range(tp_size): - mlp_l0_bias[tp_rank] = torch.chunk(mlp_l0_bias[tp_rank], 2, dim=0) - message["mlp l0 bias W"] = torch.cat([b[0] for b in mlp_l0_bias],dim=0) - message["mlp l0 bias V"] = torch.cat([b[1] for b in mlp_l0_bias],dim=0) - else: - message["mlp l0 bias"] = torch.cat(mlp_l0_bias, dim=0) - - queue_put(f"transformer layer {layer_num}", message) - - # Send final norm from tp_rank 0. - message = { - "weight": model.language_model.encoder.final_norm.weight.data, - } - queue_put("final norm", message) - - if md.output_layer: - message = { - "weight": model.language_model.output_layer.weight.data - } - queue_put("output layer", message) - - queue.put("done") - - if args.checkpoint_type == "meta": - shutil.rmtree(os.path.join(args.save_dir, 'tmp')) - - -def load_checkpoint(queue, args): - try: - _load_checkpoint(queue, args) - except Exception: - queue.put("exit") - raise +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +import json +import os +import sys +import torch +try: + import transformers +except ImportError: + raise ImportError("The 'transformers' package is not installed.") +import gc +import shutil +from tqdm import tqdm +import types + + +def add_arguments(parser): + group = parser.add_argument_group(title='Llama/Mistral loader.') + + # TODO(jbarker): Need assertion to make sure *exactly* one of these is used + parser.add_argument('--model-size', type=str, required=True, + choices=['llama2-7B', 'llama2-13B', 'llama2-70B', 'llama2-7Bf', 'llama2-13Bf', 'llama2-70Bf', 'llama3', 'mistral', 'yi-34B', 'qwen2.5'], + help='Select model size/type') + parser.add_argument('--checkpoint-type', type=str, required=True, + choices=['meta', 'hf'], + help='Type of checkpoint to convert, options are "meta" or "hf"') + parser.add_argument('--bf16', action='store_true', help='Whether to load weights in bf16.') + parser.add_argument('--fp16', action='store_true', help='Whether to load weights in fp16.') + group.add_argument('--true-vocab-size', type=int, default=None, + help='original size of vocab, if specified will trim padding from embedding table.') + group.add_argument('--vocab-file', type=str, default=None, + help='Path to the vocab file. If specified will use this to get vocab size and ' + 'trim padding from the embedding table.') + group.add_argument('--tokenizer-model', required=True, + help='Tokenizer model file.') + group.add_argument('--megatron-path', type=str, default=None, + help='Base directory of Megatron repository') + group.add_argument("--make-vocab-size-divisible-by", type=int, default=None, help="Make vocab size divisible by") + group.add_argument('--loader-transformer-impl', default='local', + choices=['local', 'transformer_engine'], + help='Which Transformer implementation to use.') + + +def verify_transformers_version(): + major, minor, patch = map(int, transformers.__version__.split('.')) + assert major >= 4 and minor >= 31 + + +NUM_SHARDS = { + "llama2-7B": 1, + "llama2-7Bf": 1, + "llama2-13B": 2, + "llama2-13Bf": 2, + "llama2-70B": 8, + "llama2-70Bf": 8, +} + + +def compute_intermediate_size(n, ffn_dim_multiplier=1, multiple_of=256): + return multiple_of * ((int(ffn_dim_multiplier * int(8 * n / 3)) + multiple_of - 1) // multiple_of) + + +def read_json(path): + with open(path, "r") as f: + return json.load(f) + + +def write_json(text, path): + with open(path, "w") as f: + json.dump(text, f) + + +# This conversion is adapted from +# https://github.com/huggingface/transformers/blob/main/src/transformers/models/llama/convert_llama_weights_to_hf.py +def convert_to_hf(model_path, input_base_path, model_size, tokenizer_path): + if "llama2" in model_size: + from transformers import LlamaConfig as ModelConfig + from transformers import LlamaTokenizer, LlamaTokenizerFast + else: + raise NotImplementedError(f"converting {model_size} is only supported using HuggingFace weights") + + # for backward compatibility, before you needed the repo to be called `my_repo/model_size` + if not os.path.isfile(os.path.join(input_base_path, "params.json")): + input_base_path = os.path.join(input_base_path, model_size) + + os.makedirs(model_path, exist_ok=True) + + params = read_json(os.path.join(input_base_path, "params.json")) + num_shards = NUM_SHARDS[model_size] + params = params.get("model", params) + n_layers = params["n_layers"] + n_heads = params["n_heads"] + n_heads_per_shard = n_heads // num_shards + dim = params["dim"] + dims_per_head = dim // n_heads + base = params.get("rope_theta", 10000.0) + inv_freq = 1.0 / (base ** (torch.arange(0, dims_per_head, 2).float() / dims_per_head)) + if base > 10000.0: + max_position_embeddings = 32768 if "mistral" in model_size else 16384 + else: + max_position_embeddings = 4096 + + if "llama2" in model_size: + tokenizer_class = LlamaTokenizer if LlamaTokenizerFast is None else LlamaTokenizerFast + else: + raise AttributeError(f"model_size={model_size} not supported") + + if tokenizer_path is not None: + if "llama2" in model_size: + tokenizer = tokenizer_class(tokenizer_path) + tokenizer.save_pretrained(model_path) + vocab_size = tokenizer.vocab_size if tokenizer_path is not None else 32000 + else: + raise AttributeError(f"model_size={model_size} is not supported") + + if params.get("n_kv_heads", None) is not None: + num_key_value_heads = params["n_kv_heads"] # for GQA / MQA + num_local_key_value_heads = n_heads_per_shard // num_key_value_heads + key_value_dim = dim // num_key_value_heads + else: # compatibility with other checkpoints + num_key_value_heads = n_heads + num_local_key_value_heads = n_heads_per_shard + key_value_dim = dim + + # permute for sliced rotary + def permute(w, n_heads=n_heads, dim1=dim, dim2=dim): + return w.view(n_heads, dim1 // n_heads // 2, 2, dim2).transpose(1, 2).reshape(dim1, dim2) + + print(f"Fetching all parameters from the checkpoint at {input_base_path}.") + # Load weights + if num_shards == 1: + # Not sharded + # (The sharded implementation would also work, but this is simpler.) + loaded = torch.load(os.path.join(input_base_path, "consolidated.00.pth"), map_location="cpu") + else: + # Sharded + loaded = [ + torch.load(os.path.join(input_base_path, f"consolidated.{i:02d}.pth"), map_location="cpu") + for i in range(num_shards) + ] + param_count = 0 + index_dict = {"weight_map": {}} + for layer_i in range(n_layers): + filename = f"pytorch_model-{layer_i + 1}-of-{n_layers + 1}.bin" + if num_shards == 1: + # Unsharded + q_proj = loaded[f"layers.{layer_i}.attention.wq.weight"] + k_proj = loaded[f"layers.{layer_i}.attention.wk.weight"] + if ("llama2" in model_size) or ("mistral" in model_size): + q_proj = permute(q_proj) + k_proj = permute(k_proj) + state_dict = { + f"model.layers.{layer_i}.self_attn.q_proj.weight": q_proj, + f"model.layers.{layer_i}.self_attn.k_proj.weight": k_proj, + f"model.layers.{layer_i}.self_attn.v_proj.weight": loaded[f"layers.{layer_i}.attention.wv.weight"], + f"model.layers.{layer_i}.self_attn.o_proj.weight": loaded[f"layers.{layer_i}.attention.wo.weight"], + f"model.layers.{layer_i}.mlp.gate_proj.weight": loaded[f"layers.{layer_i}.feed_forward.w1.weight"], + f"model.layers.{layer_i}.mlp.down_proj.weight": loaded[f"layers.{layer_i}.feed_forward.w2.weight"], + f"model.layers.{layer_i}.mlp.up_proj.weight": loaded[f"layers.{layer_i}.feed_forward.w3.weight"], + f"model.layers.{layer_i}.input_layernorm.weight": loaded[f"layers.{layer_i}.attention_norm.weight"], + f"model.layers.{layer_i}.post_attention_layernorm.weight": loaded[f"layers.{layer_i}.ffn_norm.weight"], + } + else: + # Sharded + # Note that attention.w{q,k,v,o}, feed_fordward.w[1,2,3], attention_norm.weight and ffn_norm.weight share + # the same storage object, saving attention_norm and ffn_norm will save other weights too, which is + # redundant as other weights will be stitched from multiple shards. To avoid that, they are cloned. + + state_dict = { + f"model.layers.{layer_i}.input_layernorm.weight": loaded[0][ + f"layers.{layer_i}.attention_norm.weight" + ].clone(), + f"model.layers.{layer_i}.post_attention_layernorm.weight": loaded[0][ + f"layers.{layer_i}.ffn_norm.weight" + ].clone(), + } + state_dict[f"model.layers.{layer_i}.self_attn.q_proj.weight"] = permute( + torch.cat( + [ + loaded[i][f"layers.{layer_i}.attention.wq.weight"].view(n_heads_per_shard, dims_per_head, dim) + for i in range(num_shards) + ], + dim=0, + ).reshape(dim, dim) + ) + state_dict[f"model.layers.{layer_i}.self_attn.k_proj.weight"] = permute( + torch.cat( + [ + loaded[i][f"layers.{layer_i}.attention.wk.weight"].view( + num_local_key_value_heads, dims_per_head, dim + ) + for i in range(num_shards) + ], + dim=0, + ).reshape(key_value_dim, dim), + num_key_value_heads, + key_value_dim, + dim, + ) + state_dict[f"model.layers.{layer_i}.self_attn.v_proj.weight"] = torch.cat( + [ + loaded[i][f"layers.{layer_i}.attention.wv.weight"].view( + num_local_key_value_heads, dims_per_head, dim + ) + for i in range(num_shards) + ], + dim=0, + ).reshape(key_value_dim, dim) + + state_dict[f"model.layers.{layer_i}.self_attn.o_proj.weight"] = torch.cat( + [loaded[i][f"layers.{layer_i}.attention.wo.weight"] for i in range(num_shards)], dim=1 + ) + state_dict[f"model.layers.{layer_i}.mlp.gate_proj.weight"] = torch.cat( + [loaded[i][f"layers.{layer_i}.feed_forward.w1.weight"] for i in range(num_shards)], dim=0 + ) + state_dict[f"model.layers.{layer_i}.mlp.down_proj.weight"] = torch.cat( + [loaded[i][f"layers.{layer_i}.feed_forward.w2.weight"] for i in range(num_shards)], dim=1 + ) + state_dict[f"model.layers.{layer_i}.mlp.up_proj.weight"] = torch.cat( + [loaded[i][f"layers.{layer_i}.feed_forward.w3.weight"] for i in range(num_shards)], dim=0 + ) + + state_dict[f"model.layers.{layer_i}.self_attn.rotary_emb.inv_freq"] = inv_freq + for k, v in state_dict.items(): + index_dict["weight_map"][k] = filename + param_count += v.numel() + torch.save(state_dict, os.path.join(model_path, filename)) + + filename = f"pytorch_model-{n_layers + 1}-of-{n_layers + 1}.bin" + if num_shards == 1: + # Unsharded + state_dict = { + "model.embed_tokens.weight": loaded["tok_embeddings.weight"], + "model.norm.weight": loaded["norm.weight"], + "lm_head.weight": loaded["output.weight"], + } + else: + d = 0 if "llama3" in model_size else 1 + state_dict = { + "model.norm.weight": loaded[0]["norm.weight"], + "model.embed_tokens.weight": torch.cat( + [loaded[i]["tok_embeddings.weight"] for i in range(num_shards)], dim=d + ), + "lm_head.weight": torch.cat([loaded[i]["output.weight"] for i in range(num_shards)], dim=0), + } + + for k, v in state_dict.items(): + index_dict["weight_map"][k] = filename + param_count += v.numel() + torch.save(state_dict, os.path.join(model_path, filename)) + + # Write configs + index_dict["metadata"] = {"total_size": param_count * 2} + write_json(index_dict, os.path.join(model_path, "pytorch_model.bin.index.json")) + ffn_dim_multiplier = params["ffn_dim_multiplier"] if "ffn_dim_multiplier" in params else 1 + multiple_of = params["multiple_of"] if "multiple_of" in params else 256 + config = ModelConfig( + hidden_size=dim, + intermediate_size=compute_intermediate_size(dim, ffn_dim_multiplier, multiple_of), + num_attention_heads=params["n_heads"], + num_hidden_layers=params["n_layers"], + rms_norm_eps=params["norm_eps"], + num_key_value_heads=num_key_value_heads, + vocab_size=vocab_size, + rope_theta=base, + max_position_embeddings=max_position_embeddings, + ) + config.save_pretrained(model_path) + + # Make space so we can load the model properly now. + del state_dict + del loaded + gc.collect() + + return model_path + + +def load_args_from_checkpoint(args, model_size): + + # Read Llama args. + model_args_path = os.path.join(args.load, "config.json") + with open(model_args_path) as f: + model_args = json.load(f) + + # Update Megatron args. + args.seq_length = 4096 + if "llama2" in model_size: + # Correct bug in earlier conversion script. + args.max_position_embeddings = 4096 + else: + args.max_position_embeddings = model_args["max_position_embeddings"] + + args.hidden_size = model_args["hidden_size"] + args.num_attention_heads = model_args["num_attention_heads"] + args.num_layers = model_args["num_hidden_layers"] + args.global_batch_size = 1024 + args.norm_epsilon = model_args["rms_norm_eps"] + args.iteration = 1 # '0', 'release' don't work + args.position_embedding_type = "rope" + args.swiglu = True + args.normalization = "RMSNorm" + args.add_bias_linear = False + args.untie_embeddings_and_output_weights = not model_args.get("tie_word_embeddings", False) + args.vocab_size = model_args["vocab_size"] + args.padded_vocab_size = model_args["vocab_size"] + args.ffn_hidden_size = model_args["intermediate_size"] + + if "num_key_value_heads" in model_args: + args.group_query_attention = True + args.num_query_groups = model_args["num_key_value_heads"] + + +def set_preprocess_state(args, model, hf_model): + '''Set embedding params.''' + model.language_model.embedding.word_embeddings.weight.data.copy_( + hf_model.model.embed_tokens.weight) + + +def set_postprocess_state(args, model, hf_model): + '''Set output layer & norm params.''' + model.language_model.encoder.final_norm.weight.data.copy_(hf_model.model.norm.weight) + if args.untie_embeddings_and_output_weights: + model.language_model.output_layer.weight.data.copy_(hf_model.lm_head.weight) + + +def set_attn_state(args, layer, hf_layer): + '''Set self-attention params.''' + + # Get attention layer & state. + attn = layer.self_attention + hf_attn = hf_layer.self_attn + + # Reshape loaded weights. + tp = args.tensor_model_parallel_size + nh = args.num_attention_heads // tp + ng = (args.num_query_groups if args.group_query_attention \ + else args.num_attention_heads) // tp + dim = args.kv_channels + assert nh % ng == 0 + + # Copy weights (re-order dimensions for Megatron). + attn.query_key_value.weight.data.copy_(torch.cat([ + hf_attn.q_proj.weight.reshape((ng, dim*nh//ng, -1)), + hf_attn.k_proj.weight.reshape((ng, dim, -1)), + hf_attn.v_proj.weight.reshape((ng, dim, -1)), + ], dim=1).reshape((-1, args.hidden_size))) + if args.add_qkv_bias: + attn.query_key_value.bias.data.copy_(torch.cat([ + hf_attn.q_proj.bias.reshape((ng, dim*nh//ng)), + hf_attn.k_proj.bias.reshape((ng, dim)), + hf_attn.v_proj.bias.reshape((ng, dim)), + ], dim=1).reshape(-1)) + + attn.dense.weight.data.copy_(hf_attn.o_proj.weight) + + +def set_mlp_state(args, layer, hf_layer): + '''Set MLP params.''' + + mlp = layer.mlp + hf_mlp = hf_layer.mlp + + mlp.dense_h_to_4h.weight.data.copy_(torch.cat([ + hf_mlp.gate_proj.weight, + hf_mlp.up_proj.weight, + ], dim=0)) + mlp.dense_4h_to_h.weight.data.copy_(hf_mlp.down_proj.weight) + + +def set_layer_state(args, model, hf_model, layer_idx): + '''Set transformer layer params.''' + + layer = model.language_model.encoder.layers[layer_idx] + hf_layer = hf_model.model.layers[layer_idx] + + set_attn_state(args, layer, hf_layer) + set_mlp_state(args, layer, hf_layer) + layer.input_norm.weight.data.copy_(hf_layer.input_layernorm.weight) + layer.post_attention_norm.weight.data.copy_(hf_layer.post_attention_layernorm.weight) + + +def load_checkpoint_to_model(args): + '''Set model params.''' + + from pretrain_gpt import model_provider + from transformers import AutoModelForCausalLM + + # Load Huggingface model. + hf_model = AutoModelForCausalLM.from_pretrained(args.load, torch_dtype=args.params_dtype, low_cpu_mem_usage=True, device_map="cpu") + + # Init Megatron model. + model = model_provider(True, True).to(args.params_dtype) + + # Set model state. + set_preprocess_state(args, model, hf_model) + set_postprocess_state(args, model, hf_model) + for layer_idx in tqdm(range(args.num_layers), "set layer states"): + set_layer_state(args, model, hf_model, layer_idx) + + return model + + +def _load_checkpoint(queue, args): + + verify_transformers_version() + + # Search in directory above this. + sys.path.append(os.path.abspath( + os.path.join(os.path.dirname(__file__), + os.path.pardir, + os.path.pardir))) + if args.megatron_path is not None: + sys.path.insert(0, args.megatron_path) + + # Convert Meta checkpoint to HF format as an intermediate step + if args.checkpoint_type == "meta": + model_tmp_path = convert_to_hf(model_path=os.path.join(args.save_dir, 'tmp'), input_base_path=args.load_dir, model_size=args.model_size, tokenizer_path=args.tokenizer_model) + args.load_dir = model_tmp_path + args.tokenizer_model = model_tmp_path # point to HF tokenizer model + + try: + from megatron.training.arguments import parse_args, validate_args + from megatron.training.global_vars import set_args, set_global_variables + from megatron.legacy.model import module + from megatron.core import mpu + from megatron.core.enums import ModelType + from megatron.legacy import fused_kernels + except ModuleNotFoundError: + print("Unable to import Megatron, please specify the path to Megatron using --megatron-path. Exiting.") + queue.put("exit") + exit(1) + + # We want all arguments to come from us. + sys.argv = ['script.py', + '--no-masked-softmax-fusion', + '--no-bias-gelu-fusion', + '--no-bias-dropout-fusion', + '--no-async-tensor-model-parallel-allreduce', + '--use-cpu-initialization', + '--micro-batch-size', '1', + '--no-load-optim', + '--no-load-rng', + '--no-save-optim', + '--no-save-rng', + '--mock-data', # To pass the "blend data checks" in arguments.py + '--no-initialization', + '--load', args.load_dir, + '--no-one-logger', + ] + + if args.make_vocab_size_divisible_by is not None: + sys.argv.extend(["--make-vocab-size-divisible-by", str(args.make_vocab_size_divisible_by)]) + + margs = parse_args() + margs.tokenizer_model = args.tokenizer_model + load_args_from_checkpoint(margs, args.model_size) + + if "llama2" in args.model_size: + margs.tokenizer_type = "Llama2Tokenizer" + elif "yi" in args.model_size: + margs.tokenizer_type = "HuggingFaceTokenizer" + elif "llama3" in args.model_size: + margs.tokenizer_type = "HuggingFaceTokenizer" + elif "mistral" in args.model_size: + margs.tokenizer_type = "HuggingFaceTokenizer" + elif "qwen2.5" in args.model_size: + margs.tokenizer_type = "HuggingFaceTokenizer" + margs.add_qkv_bias = True + + # Arguments do sanity checks on the world size, but we don't care, + # so trick it into thinking we are plenty of processes. + margs.world_size = margs.tensor_model_parallel_size * margs.pipeline_model_parallel_size + + margs = validate_args(margs) + + margs.use_legacy_models = True + margs.transformer_impl = args.loader_transformer_impl + + margs.position_embedding_type = "rope" + + def check_for_arg(arg_name, default=None): + if getattr(margs, arg_name, None) is None: + if default is not None: + setattr(margs, arg_name, default) + else: + print(f"Checkpoint does not specify the argument {arg_name}. Exiting.") + print(f"Arguments: {margs}") + queue.put("exit") + exit(1) + + check_for_arg('tensor_model_parallel_size') + check_for_arg('pipeline_model_parallel_size') + check_for_arg('num_layers') + check_for_arg('hidden_size') + check_for_arg('seq_length') + check_for_arg('num_attention_heads') + check_for_arg('max_position_embeddings') + check_for_arg('position_embedding_type') + check_for_arg('iteration') + check_for_arg('bert_binary_head') + check_for_arg('disable_bias_linear', False) + check_for_arg('params_dtype') + check_for_arg('swiglu', False) + + # Determine how to make our models. + assert args.model_type == 'GPT', 'Llama-2, Llama-3 and Mistral are GPT models.' + margs.model_type = ModelType.encoder_or_decoder + margs.params_dtype = torch.bfloat16 if args.bf16 else torch.float16 if args.fp16 else torch.float32 + + # Suppress warning about torch.distributed not being initialized. + module.MegatronModule.embedding_warning_printed = True + + set_global_variables(margs, build_tokenizer=False) + mpu.set_tensor_model_parallel_world_size(margs.tensor_model_parallel_size) + mpu.set_pipeline_model_parallel_world_size(margs.pipeline_model_parallel_size) + mpu.set_virtual_pipeline_model_parallel_world_size(margs.virtual_pipeline_model_parallel_size) + fused_kernels.load(margs) + + # Short aliases. + tp_size = margs.tensor_model_parallel_size + pp_size = margs.pipeline_model_parallel_size + vp_size = margs.virtual_pipeline_model_parallel_size + if vp_size is None: + vp_size = 1 + + # Metadata. + md = types.SimpleNamespace() + md.model_type = args.model_type + md.num_layers = margs.num_layers + md.hidden_size = margs.hidden_size + md.seq_length = margs.seq_length + md.num_attention_heads = margs.num_attention_heads + md.max_position_embeddings = margs.max_position_embeddings + md.tokenizer_type = margs.tokenizer_type + md.iteration = margs.iteration + md.params_dtype = margs.params_dtype + md.bert_binary_head = margs.bert_binary_head + md.output_layer = margs.untie_embeddings_and_output_weights + md.position_embedding_type = margs.position_embedding_type + md.linear_bias = margs.add_bias_linear + md.qkv_bias = margs.add_qkv_bias + md.norm_has_bias = False + md.swiglu = margs.swiglu + md.previous_tensor_parallel_size = margs.tensor_model_parallel_size + md.previous_pipeline_parallel_size = margs.pipeline_model_parallel_size + md.make_vocab_size_divisible_by = margs.make_vocab_size_divisible_by + md.checkpoint_args = margs + md.consumed_train_samples = 0 + md.consumed_valid_samples = 0 + + margs.model_size = args.model_size + + # Get true (non-padded) vocab size + tokenizer = transformers.AutoTokenizer.from_pretrained(margs.tokenizer_model) + md.true_vocab_size = tokenizer._tokenizer.get_vocab_size(with_added_tokens=True) + + # Get first pipe stage. + mpu.set_tensor_model_parallel_rank(0) + mpu.set_pipeline_model_parallel_rank(0) + model = load_checkpoint_to_model(margs) + + queue.put(md) + + def queue_put(name, msg): + print(f"sending {name}") + msg["name"] = name + queue.put(msg) + + # Send embeddings. + message = { + "word embeddings": model.language_model.embedding.word_embeddings.weight.data + } + if md.position_embedding_type == 'learned_absolute': + message["position embeddings"] = model.language_model.embedding.position_embeddings.weight.data + else: + assert not hasattr(model.language_model.embedding, 'position_embeddings') + + queue_put("embeddings", message) + + for layer_num in range(margs.num_layers): + message = {} + + # Get non-parallel tensors from tp_rank 0. + layer = model.language_model.encoder.layers[layer_num] + message["input norm weight"] = layer.input_norm.weight.data + message["post norm weight"] = layer.post_attention_norm.weight.data + if md.linear_bias: + message["dense bias"] = layer.self_attention.dense.bias.data + message["mlp l1 bias"] = layer.mlp.dense_4h_to_h.bias.data + + # Grab all parallel tensors for this layer. + qkv_weight = [] + qkv_bias = [] + dense_weight = [] + mlp_l0_weight = [] + mlp_l0_bias = [] + mlp_l1_weight = [] + layer = model.language_model.encoder.layers[layer_num] + qkv_weight.append(layer.self_attention.query_key_value.weight.data) + dense_weight.append(layer.self_attention.dense.weight.data) + mlp_l0_weight.append(layer.mlp.dense_h_to_4h.weight.data) + mlp_l1_weight.append(layer.mlp.dense_4h_to_h.weight.data) + + if md.qkv_bias: + qkv_bias.append(layer.self_attention.query_key_value.bias.data) + if md.linear_bias: + mlp_l0_bias.append(layer.mlp.dense_h_to_4h.bias.data) + + # Handle gated linear units. + if md.swiglu: + # Concat all the first halves ('W's) and all the second halves ('V's). + for tp_rank in range(tp_size): + mlp_l0_weight[tp_rank] = torch.chunk(mlp_l0_weight[tp_rank], 2, dim=0) + message["mlp l0 weight W"] = torch.cat([w[0] for w in mlp_l0_weight], dim=0) + message["mlp l0 weight V"] = torch.cat([w[1] for w in mlp_l0_weight], dim=0) + else: + message["mlp l0 weight"] = torch.cat(mlp_l0_weight, dim=0) + + # Simple concat of the rest. + message["qkv weight"] = torch.cat(qkv_weight, dim=0) + message["dense weight"] = torch.cat(dense_weight, dim=1) + message["mlp l1 weight"] = torch.cat(mlp_l1_weight, dim=1) + if md.qkv_bias: + message["qkv bias"] = torch.cat(qkv_bias, dim=0) + if md.linear_bias: + if md.swiglu: + for tp_rank in range(tp_size): + mlp_l0_bias[tp_rank] = torch.chunk(mlp_l0_bias[tp_rank], 2, dim=0) + message["mlp l0 bias W"] = torch.cat([b[0] for b in mlp_l0_bias],dim=0) + message["mlp l0 bias V"] = torch.cat([b[1] for b in mlp_l0_bias],dim=0) + else: + message["mlp l0 bias"] = torch.cat(mlp_l0_bias, dim=0) + + queue_put(f"transformer layer {layer_num}", message) + + # Send final norm from tp_rank 0. + message = { + "weight": model.language_model.encoder.final_norm.weight.data, + } + queue_put("final norm", message) + + if md.output_layer: + message = { + "weight": model.language_model.output_layer.weight.data + } + queue_put("output layer", message) + + queue.put("done") + + if args.checkpoint_type == "meta": + shutil.rmtree(os.path.join(args.load_dir)) + + +def load_checkpoint(queue, args): + try: + _load_checkpoint(queue, args) + except Exception: + queue.put("exit") + raise diff --git a/tools/checkpoint/saver_mcore.py b/tools/checkpoint/saver_core.py similarity index 97% rename from tools/checkpoint/saver_mcore.py rename to tools/checkpoint/saver_core.py index 2caf26a..ec5006c 100644 --- a/tools/checkpoint/saver_mcore.py +++ b/tools/checkpoint/saver_core.py @@ -1,549 +1,549 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -from importlib.metadata import version -import os -from packaging.version import Version as PkgVersion -import sys - -import torch - -from schema_mcore import get_model_schema - - -def add_arguments(parser): - group = parser.add_argument_group(title='M-Core saver') - - group.add_argument('--megatron-path', type=str, default=None, - help='Base directory of Megatron repository') - - group.add_argument('--target-tensor-parallel-size', type=int, - help='Target tensor model parallel size, defaults to the tensor parallel size ' - 'in the input checkpoint if provided by the loader, otherwise to 1') - group.add_argument('--target-pipeline-parallel-size', type=int, - help='Target tensor model parallel size, default to the pipeline parall size ' - 'in the input checkpoint if provided by the loader, otherwise to 1') - group.add_argument('--saver-transformer-impl', default='transformer_engine', - choices=['local', 'transformer_engine'], - help='Which Transformer implementation to use.') - group.add_argument('--target-expert-parallel-size', type=int, default=1, - help='Target expert model parallel size, default to 1') - - -def save_checkpoint(queue, args): - - # Transformer engine >= 0.12.0, for CPU initialization. - te_version = PkgVersion(version("transformer-engine")) - assert te_version >= PkgVersion("0.12.0"), \ - "transformer engine version: %s (>=0.12.0 required)." % te_version - - # Search in directory above this - sys.path.append(os.path.abspath( - os.path.join(os.path.dirname(__file__), - os.path.pardir, - os.path.pardir))) - if args.megatron_path is not None: - sys.path.insert(0, args.megatron_path) - - try: - from megatron.training.arguments import (parse_args, validate_args) - from megatron.training.checkpointing import save_checkpoint - from megatron.training.global_vars import set_global_variables, get_args - from megatron.core.enums import ModelType - from megatron.training.tokenizer.tokenizer import _vocab_size_with_padding - from megatron.legacy import fused_kernels - from megatron.core import mpu - except ModuleNotFoundError: - print("Unable to import Megatron, please specify the path to Megatron using --megatron-path. Exiting.") - exit(1) - - def queue_get(name=None): - val = queue.get() - if val == "exit": - print("Loader exited, exiting saver") - exit(1) - if name is not None and args.checking and val["name"] != name: - val_name = val["name"] - print(f'Unexpected message. Expecting "{name}" but got "{val_name}". Exiting saver.') - exit(1) - if name is not None: - print(f"received {name}") - return val - - def check_message(msg): - if not args.checking: - return - msg_name = msg.pop("name") - if len(msg.keys()) > 0: - print(f"Unexpected values in {msg_name}:") - for key in msg.keys(): - print(f" {key}") - print(f"Exiting. If you want to ignore this, use the argument --no-checking.") - exit(1) - - - md = queue_get() - - if args.target_tensor_parallel_size is None: - if hasattr(md, 'previous_tensor_parallel_size'): - args.target_tensor_parallel_size = md.previous_tensor_parallel_size - else: - print("loader did not provide a tensor parallel size and --target-tensor-parallel-size not provided on command line. " - "Default to 1.") - args.target_tensor_parallel_size = 1 - - if args.target_pipeline_parallel_size is None: - if hasattr(md, 'previous_pipeline_parallel_size'): - args.target_pipeline_parallel_size = md.previous_pipeline_parallel_size - else: - print("loader did not provide a pipeline parallel size and --target-pipeline-parallel-size not provided on command line. " - "Default to 1.") - args.target_pipeline_parallel_size = 1 - - - # Arguments do sanity checks on the world size, but we don't care, - # so trick it into thinking we are plenty of processes - if args.target_tensor_parallel_size is not None and args.target_pipeline_parallel_size is not None: - if args.target_expert_parallel_size is not None: - os.environ["WORLD_SIZE"] = f'{args.target_tensor_parallel_size * args.target_pipeline_parallel_size * args.target_expert_parallel_size}' - else: - os.environ["WORLD_SIZE"] = f'{args.target_tensor_parallel_size * args.target_pipeline_parallel_size}' - - # We want all arguments to come from us - sys.argv = ['script.py', - '--num-layers', str(md.num_layers), - '--hidden-size', str(md.hidden_size), - '--seq-length', str(md.seq_length), - '--num-experts', str(getattr(md, "num_experts", 0)), - '--num-attention-heads', str(md.num_attention_heads), - '--max-position-embeddings', str(md.max_position_embeddings), - '--position-embedding-type', str(md.position_embedding_type), - '--tokenizer-type', str(md.tokenizer_type), - '--tensor-model-parallel-size', str(args.target_tensor_parallel_size), - '--pipeline-model-parallel-size', str(args.target_pipeline_parallel_size), - '--expert-model-parallel-size', str(args.target_expert_parallel_size), - '--no-masked-softmax-fusion', - '--no-bias-gelu-fusion', - '--no-bias-dropout-fusion', - '--no-async-tensor-model-parallel-allreduce', - '--use-cpu-initialization', - '--micro-batch-size', '1', - '--no-load-optim', - '--no-load-rng', - '--no-save-optim', - '--no-save-rng', - '--no-initialization', - '--save-interval', '1', - '--save', args.save_dir, - '--ckpt-format', 'torch', # only 'torch' supported for conversion - '--no-one-logger', - ] - - if md.make_vocab_size_divisible_by is not None: - sys.argv.extend(['--make-vocab-size-divisible-by', str(md.make_vocab_size_divisible_by)]) - if md.params_dtype == torch.float16: - sys.argv.append('--fp16') - elif md.params_dtype == torch.bfloat16: - sys.argv.append('--bf16') - - if md.output_layer: - sys.argv.append('--untie-embeddings-and-output-weights') - if not md.linear_bias: - sys.argv.append('--disable-bias-linear') - - if md.model_type == 'BERT' and not md.bert_binary_head: - sys.argv.append('--bert-no-binary-head') - - margs = parse_args() - - if hasattr (md, 'checkpoint_args'): - # These are arguments that we are either changing, or cause problems for validation if they are set - # Note that some of these deal with T5 so will need to be changed if we support T5. - args_to_keep = ['tensor_model_parallel_size', 'pipeline_model_parallel_size', 'expert_model_parallel_size', 'world_size', 'params_dtype', - 'num_layers_per_virtual_pipeline_stage', 'virtual_pipeline_model_parallel_size', - 'masked_softmax_fusion', 'bias_gelu_fusion', 'bias_dropout_fusion', - 'sequence_parallel', 'async_tensor_model_parallel_allreduce', - 'no_load_optim', 'no_load_rng', 'no_save_optim', 'no_save_rng', - 'vocab_file', 'tokenizer_model', - 'save_interval', 'save', - 'perform_initialization', 'use_cpu_initialization', - 'recompute_granularity', 'recompute_num_layers', 'recompute_method', - 'encoder_num_layers', 'encoder_seq_length', - 'distribute_saved_activations', - 'train_iters', 'lr_decay_iters', 'lr_warmup_iters', 'lr_warmup_fraction', - 'start_weight_decay', 'end_weight_decay', - 'ckpt_format', - ] - - for arg, value in vars(md.checkpoint_args).items(): - if arg in args_to_keep: - continue - if not hasattr(margs, arg): - print(f"Checkpoint had argument {arg} but new arguments does not have this.") - continue - if getattr(margs, arg) != value: - print(f"Overwriting default {arg} value {getattr(margs, arg)} with value from checkpoint {value}.") - setattr(margs, arg, value) - - # Explicitly copy sequence_parallel, apply_query_key_layer_scaling. - margs.sequence_parallel = md.checkpoint_args.sequence_parallel - margs.apply_query_key_layer_scaling = md.checkpoint_args.apply_query_key_layer_scaling - - # Sequence parallel is required if use both tensor-parallel and Moe. - if margs.num_experts is not None and args.target_tensor_parallel_size is not None: - if margs.num_experts > 1 and args.target_tensor_parallel_size > 1: - margs.sequence_parallel = True - - validate_args(margs) - - # Use M-core models & unset loaded paths. - margs.use_legacy_models = False - margs.blendable_index_path = None - margs.data_path = [] - margs.load = None - margs.save = args.save_dir - margs.tensorboard_dir = None - margs.tokenizer_model = None - margs.transformer_impl = args.saver_transformer_impl - - set_global_variables(margs, build_tokenizer=False) - - # Megatron args. (i.e., 'margs') - margs = get_args() - - if hasattr(md, 'consumed_train_samples'): - margs.consumed_train_samples = md.consumed_train_samples - margs.consumed_valid_samples = md.consumed_valid_samples - print(f"Setting consumed_train_samples to {margs.consumed_train_samples}" - f" and consumed_valid_samples to {margs.consumed_valid_samples}") - else: - print("consumed_train_samples not provided.") - - # Determine how to make our models - if md.model_type == 'GPT': - from pretrain_gpt import model_provider - margs.model_type = ModelType.encoder_or_decoder - elif md.model_type == 'BERT': - from pretrain_bert import model_provider - margs.model_type = ModelType.encoder_or_decoder - else: - raise Exception(f'unrecognized model type: {args.model_type}') - - # fake initializing distributed - mpu.set_tensor_model_parallel_world_size(args.target_tensor_parallel_size) - mpu.set_pipeline_model_parallel_world_size(args.target_pipeline_parallel_size) - mpu.set_expert_model_parallel_world_size(args.target_expert_parallel_size) - mpu.set_tensor_model_parallel_rank(0) - mpu.set_pipeline_model_parallel_rank(0) - mpu.set_expert_model_parallel_rank(0) - fused_kernels.load(margs) - - # Embeddings - #----------- - embeddings_msg = queue_get("embeddings") - - pos_embed = None - if md.position_embedding_type == 'learned_absolute': - pos_embed = embeddings_msg.pop("position embeddings") - orig_word_embed = embeddings_msg.pop("word embeddings") - check_message(embeddings_msg) - - # Deal with padding - def pad_weight(orig_word_embed, true_vocab_size): - if true_vocab_size is not None: - # figure out what our padded vocab size is - orig_vocab_size = orig_word_embed.shape[0] - margs.padded_vocab_size = _vocab_size_with_padding(true_vocab_size, margs) - - # Cut out extra padding we don't need - if orig_vocab_size > margs.padded_vocab_size: - full_word_embed = orig_word_embed[0:margs.padded_vocab_size,:] - - # Expanding embedding to larger size by replicating final entry - elif orig_vocab_size < margs.padded_vocab_size: - padding_size = margs.padded_vocab_size - orig_vocab_size - - full_word_embed = torch.cat(( - orig_word_embed, - orig_word_embed[-1].unsqueeze(0).expand(padding_size, -1))) - - # Same size! - else: - full_word_embed = orig_word_embed - else: - print("Original vocab size not specified, leaving embedding table as-is. " - "If you've changed the tensor parallel size this could cause problems.") - margs.padded_vocab_size = orig_word_embed.shape[0] - full_word_embed = orig_word_embed - return full_word_embed - - full_word_embed = pad_weight(orig_word_embed, md.true_vocab_size) - - # Split into new tensor model parallel sizes - out_word_embed = torch.chunk(full_word_embed, args.target_tensor_parallel_size, dim=0) - - # Model schema. - schema = get_model_schema( - md.model_type, - margs.transformer_impl, - margs.num_experts, - margs.expert_model_parallel_size, - ) - - # Construct a 3D(PPxEPxTP) arry for models, fill it with None - models = [[[None for _ in range(args.target_tensor_parallel_size)] for _ in range(args.target_expert_parallel_size)] for _ in range(args.target_pipeline_parallel_size)] - - # Model is lazy instantiated at firstly using - def get_local_model(pp_rank, ep_rank, tp_rank): - if models[pp_rank][ep_rank][tp_rank] is None: - pre_process = True if pp_rank == 0 else False - post_process = True if pp_rank == args.target_pipeline_parallel_size - 1 else False - models[pp_rank][ep_rank][tp_rank] = model_provider(pre_process, post_process).to(md.params_dtype) - return models[pp_rank][ep_rank][tp_rank] - - # Set embeddings. - # -------------- - for ep_rank in range(args.target_expert_parallel_size): - for tp_rank in range(args.target_tensor_parallel_size): - model = get_local_model(0, ep_rank, tp_rank) - if pos_embed is None: - assert not schema.has_position_embeddings(model) - schema.set("embeddings", model, { - "pos" : pos_embed, - "word" : out_word_embed[tp_rank], - }) - - def chunk_weight(weight, parallel_mode, tp_size=1, ep_size=1): - assert parallel_mode in ["row", "column"] - if weight.dim() == 3: - num_experts, out_features, in_features = weight.shape - if parallel_mode == "column": - weight = weight.reshape(ep_size, num_experts // ep_size, tp_size, out_features // tp_size, in_features) - weight = weight.permute(0, 2, 1, 3, 4) - else: - weight = weight.reshape(ep_size, num_experts // ep_size, out_features, tp_size, in_features // tp_size) - weight = weight.permute(0, 3, 1, 2, 4) - return weight # (ep_size, tp_size, local_eps, output_features, in_features) - else: - out_features, in_features = weight.shape - if parallel_mode == "column": - weight = weight.reshape(tp_size, out_features // tp_size, in_features) - else: - weight = weight.reshape(out_features, tp_size, in_features // tp_size).permute(1, 0, 2) - return weight # (tp_size, output_features, in_features) - - def chunk_bias(bias, parallel_mode, tp_size=1, ep_size=1): - assert parallel_mode in ["row", "column"] - if bias.dim() == 2: - num_experts, hidden_size = bias.shape - if parallel_mode == 'column': - bias = bias.reshape(ep_size, num_experts // ep_size, tp_size, hidden_size // tp_size) - bias = bias.permute(0, 2, 1, 3) # (ep_size, tp_size, local_eps, hidden_size) - else: - bias = bias.reshape(ep_size, num_experts // ep_size, hidden_size) # (ep_size, local_eps, hidden_size) - return bias - else: - hidden_size = bias.shape - if parallel_mode == "column": - bias = bias.reshape(tp_size, hidden_size[0] // tp_size) # (tp_size, hidden_size) - return bias - - # Transformer layers. - # ------------------ - total_layer_num = 0 - for pp_rank in range(args.target_pipeline_parallel_size): - mpu.set_pipeline_model_parallel_rank(pp_rank) - # initial the first module in pp stage to get the layer_num, pooler, lm_head. binary_head - get_local_model(pp_rank,0,0) - for layer_id in range(schema.get_num_layers(models[pp_rank][0][0])): - msg = queue_get(f"transformer layer {total_layer_num}") - - # duplicated tensors - input_norm_weight = msg.pop("input norm weight") - post_norm_weight = msg.pop("post norm weight") - if md.norm_has_bias: - input_norm_bias = msg.pop("input norm bias") - post_norm_bias = msg.pop("post norm bias") - - # Split up the parallel tensors - qkv_weight = chunk_weight(msg.pop("qkv weight"), "column", args.target_tensor_parallel_size) - dense_weight = chunk_weight(msg.pop("dense weight"), "row", args.target_tensor_parallel_size) - mlp_l1_weight = chunk_weight(msg.pop("mlp l1 weight"), "row", args.target_tensor_parallel_size, args.target_expert_parallel_size) - - if margs.num_experts: - router = msg.pop("router weight") - - # Special handling for swiglu - if md.swiglu: - mlp_l0_weight_W = chunk_weight(msg.pop("mlp l0 weight W"), "column", args.target_tensor_parallel_size, args.target_expert_parallel_size) - mlp_l0_weight_V = chunk_weight(msg.pop("mlp l0 weight V"), "column", args.target_tensor_parallel_size, args.target_expert_parallel_size) - mlp_l0_weight = torch.cat((mlp_l0_weight_W, mlp_l0_weight_V), dim=-2) - else: - mlp_l0_weight = chunk_weight(msg.pop("mlp l0 weight"), "column", args.target_tensor_parallel_size, args.target_expert_parallel_size) - - if md.qkv_bias: - qkv_bias = chunk_bias(msg.pop("qkv bias"), 'column', args.target_tensor_parallel_size) - if md.linear_bias: - dense_bias = msg.pop("dense bias") - mlp_l1_bias = chunk_bias(msg.pop("mlp l1 bias"), 'row', args.target_tensor_parallel_size, args.target_expert_parallel_size) - if md.swiglu: - mlp_l0_bias_W = chunk_bias(msg.pop("mlp l0 bias W"), 'column', args.target_tensor_parallel_size, args.target_expert_parallel_size) - mlp_l0_bias_V = chunk_bias(msg.pop("mlp l0 bias V"), 'column', args.target_tensor_parallel_size, args.target_expert_parallel_size) - mlp_l0_bias = torch.cat((mlp_l0_bias_W, mlp_l0_bias_V), dim=-1) - else: - mlp_l0_bias = chunk_bias(msg.pop("mlp l0 bias"), 'column', args.target_tensor_parallel_size, args.target_expert_parallel_size) - - # Save them to the model - for ep_rank in range(args.target_expert_parallel_size): - for tp_rank in range(args.target_tensor_parallel_size): - params_dict = { - "self_attn_norm_weight" : input_norm_weight, - "self_attn_qkv_weight" : qkv_weight[tp_rank], - "self_attn_proj_weight" : dense_weight[tp_rank], - "mlp_norm_weight" : post_norm_weight - } - if margs.num_experts: - params_dict.update({ - "mlp_fc1_weight" : mlp_l0_weight[ep_rank][tp_rank], - "mlp_fc2_weight" : mlp_l1_weight[ep_rank][tp_rank] - }) - else: - params_dict.update({ - "mlp_fc1_weight" : mlp_l0_weight[tp_rank], - "mlp_fc2_weight" : mlp_l1_weight[tp_rank] - }) - params_dict.update({ - "self_attn_norm_bias" : input_norm_bias if md.norm_has_bias else None, - "mlp_norm_bias" : post_norm_bias if md.norm_has_bias else None, - }) - if md.qkv_bias: - params_dict.update({ - "self_attn_qkv_bias" : qkv_bias[tp_rank] - }) - if md.linear_bias: - params_dict.update({ - "self_attn_proj_bias" : dense_bias - }) - if margs.num_experts: - params_dict.update({ - "mlp_fc1_bias" : mlp_l0_bias[ep_rank][tp_rank], - "mlp_fc2_bias" : mlp_l1_bias[ep_rank] - }) - else : - params_dict.update({ - "mlp_fc1_bias" : mlp_l0_bias[tp_rank], - "mlp_fc2_bias" : mlp_l1_bias - }) - if margs.num_experts: - params_dict.update({ - "router_weight": router - }) - model = get_local_model(pp_rank, ep_rank, tp_rank) - schema.set_layer(model, layer_id, params_dict) - - total_layer_num = total_layer_num + 1 - check_message(msg) - - - if pp_rank == args.target_pipeline_parallel_size - 1: - msg = queue_get("final norm") - final_norm_weight = msg.pop("weight") - if md.norm_has_bias: - final_norm_bias = msg.pop("bias") - pp_local_models = [get_local_model(pp_rank, ep_rank, tp_rank) for ep_rank in range(args.target_expert_parallel_size) - for tp_rank in range(args.target_tensor_parallel_size)] - for eptp_rank, model in enumerate(pp_local_models): - tp_rank = eptp_rank % args.target_tensor_parallel_size - schema.set("final_norm", model, { - "weight" : final_norm_weight, - "bias" : final_norm_bias if md.norm_has_bias else None, - }) - if pp_rank != 0 and not md.output_layer: - # Copy word embeddings to final pipeline rank - schema.set("output_layer", model, { - "weight" : out_word_embed[tp_rank], - }) - del final_norm_weight - if md.norm_has_bias: - del final_norm_bias - check_message(msg) - - if md.output_layer: - msg = queue_get("output layer") - if not hasattr(pp_local_models[0], 'output_layer'): - print("ERROR: got an output layer, but model does not have one") - exit(1) - output_layer_weight = pad_weight(msg.pop("weight"), md.true_vocab_size) - output_layer_weight = torch.chunk(output_layer_weight, args.target_tensor_parallel_size, dim=0) - for eptp_rank, model in enumerate(pp_local_models): - tp_rank = eptp_rank % args.target_tensor_parallel_size - schema.set("output_layer", model, { - "weight" : output_layer_weight[tp_rank], - }) - check_message(msg) - - msg = queue_get() - if msg != "done" and msg["name"] == "pooler": - if not hasattr(models[pp_rank][0][0], 'pooler'): - print("ERROR: got a pooler, but model does not have one") - exit(1) - print("received pooler") - pooler_weight = msg.pop("weight") - pooler_bias = msg.pop("bias") - for model in pp_local_models: - schema.set("pooler", model, { - "weight" : pooler_weight, - "bias" : pooler_bias, - }) - del pooler_weight - del pooler_bias - check_message(msg) - msg = queue_get() - - if msg != "done" and msg["name"] == "lm head": - if not hasattr(models[pp_rank][0][0], 'lm_head'): - print("ERROR: got an lm head, but model does not have one") - exit(1) - print("received lm head") - lm_head_dense_weight = msg.pop("dense weight") - lm_head_dense_bias = msg.pop("dense bias") - lm_head_norm_weight = msg.pop("norm weight") - if md.norm_has_bias: - lm_head_norm_bias = msg.pop("norm bias") - for model in pp_local_models: - schema.set("lm_head", model, { - "dense_weight" : lm_head_dense_weight, - "dense_bias" : lm_head_dense_bias, - "norm_weight" : lm_head_norm_weight, - "norm_bias" : lm_head_norm_bias if md.norm_has_bias else None, - }) - check_message(msg) - msg = queue_get() - - if msg != "done" and msg["name"] == "binary head": - if not hasattr(models[pp_rank][0][0], 'binary_head'): - print("ERROR: got a binary head, but model does not have one") - exit(1) - print("received binary head") - binary_head_weight = msg.pop("weight") - binary_head_bias = msg.pop("bias") - for model in pp_local_models: - schema.set("binary_head", model, { - "weight" : binary_head_weight, - "bias" : binary_head_bias, - }) - check_message(msg) - msg = queue_get() - - # TODO: delete weight when not used - if msg != "done": - print("ERROR: got some more data but was expecting to be done") - - for ep_rank in range(args.target_expert_parallel_size): - for tp_rank in range(args.target_tensor_parallel_size): - save_checkpoint(md.iteration, [get_local_model(pp_rank, ep_rank, tp_rank)], None, None, num_floating_point_operations_so_far=0, - pipeline_rank=pp_rank, pipeline_parallel=args.target_pipeline_parallel_size > 1, - expert_rank=ep_rank, expert_parallel=args.target_expert_parallel_size > 1, - tensor_rank=tp_rank) - # release the uselese model parts - models[pp_rank][ep_rank][tp_rank] = None - - print("Done!") +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import os +import sys +import torch +from importlib.metadata import version +from packaging.version import Version as PkgVersion + +from schema_core import get_model_schema + + +def add_arguments(parser): + group = parser.add_argument_group(title='M-Core saver') + + group.add_argument('--megatron-path', type=str, default=None, + help='Base directory of Megatron repository') + + group.add_argument('--target-tensor-parallel-size', type=int, + help='Target tensor model parallel size, defaults to the tensor parallel size ' + 'in the input checkpoint if provided by the loader, otherwise to 1') + group.add_argument('--target-pipeline-parallel-size', type=int, + help='Target tensor model parallel size, default to the pipeline parall size ' + 'in the input checkpoint if provided by the loader, otherwise to 1') + group.add_argument('--saver-transformer-impl', default='transformer_engine', + choices=['local', 'transformer_engine'], + help='Which Transformer implementation to use.') + group.add_argument('--target-expert-parallel-size', type=int, default=1, + help='Target expert model parallel size, default to 1') + + +def save_checkpoint(queue, args): + + # Transformer engine >= 0.12.0, for CPU initialization. + te_version = PkgVersion(version("transformer-engine")) + assert te_version >= PkgVersion("0.12.0"), \ + "transformer engine version: %s (>=0.12.0 required)." % te_version + + # Search in directory above this + sys.path.append(os.path.abspath( + os.path.join(os.path.dirname(__file__), + os.path.pardir, + os.path.pardir))) + if args.megatron_path is not None: + sys.path.insert(0, args.megatron_path) + + try: + from megatron.training.arguments import (parse_args, validate_args) + from megatron.training.checkpointing import save_checkpoint + from megatron.training.global_vars import set_global_variables, get_args + from megatron.core.enums import ModelType + from megatron.training.tokenizer.tokenizer import _vocab_size_with_padding + from megatron.legacy import fused_kernels + from megatron.core import mpu + except ModuleNotFoundError: + print("Unable to import Megatron, please specify the path to Megatron using --megatron-path. Exiting.") + exit(1) + + def queue_get(name=None): + val = queue.get() + if val == "exit": + print("Loader exited, exiting saver") + exit(1) + if name is not None and args.checking and val["name"] != name: + val_name = val["name"] + print(f'Unexpected message. Expecting "{name}" but got "{val_name}". Exiting saver.') + exit(1) + if name is not None: + print(f"received {name}") + return val + + def check_message(msg): + if not args.checking: + return + msg_name = msg.pop("name") + if len(msg.keys()) > 0: + print(f"Unexpected values in {msg_name}:") + for key in msg.keys(): + print(f" {key}") + print(f"Exiting. If you want to ignore this, use the argument --no-checking.") + exit(1) + + + md = queue_get() + + if args.target_tensor_parallel_size is None: + if hasattr(md, 'previous_tensor_parallel_size'): + args.target_tensor_parallel_size = md.previous_tensor_parallel_size + else: + print("loader did not provide a tensor parallel size and --target-tensor-parallel-size not provided on command line. " + "Default to 1.") + args.target_tensor_parallel_size = 1 + + if args.target_pipeline_parallel_size is None: + if hasattr(md, 'previous_pipeline_parallel_size'): + args.target_pipeline_parallel_size = md.previous_pipeline_parallel_size + else: + print("loader did not provide a pipeline parallel size and --target-pipeline-parallel-size not provided on command line. " + "Default to 1.") + args.target_pipeline_parallel_size = 1 + + + # Arguments do sanity checks on the world size, but we don't care, + # so trick it into thinking we are plenty of processes + if args.target_tensor_parallel_size is not None and args.target_pipeline_parallel_size is not None: + if args.target_expert_parallel_size is not None: + os.environ["WORLD_SIZE"] = f'{args.target_tensor_parallel_size * args.target_pipeline_parallel_size * args.target_expert_parallel_size}' + else: + os.environ["WORLD_SIZE"] = f'{args.target_tensor_parallel_size * args.target_pipeline_parallel_size}' + + # We want all arguments to come from us + sys.argv = ['script.py', + '--num-layers', str(md.num_layers), + '--hidden-size', str(md.hidden_size), + '--seq-length', str(md.seq_length), + '--num-experts', str(getattr(md, "num_experts", 0)), + '--num-attention-heads', str(md.num_attention_heads), + '--max-position-embeddings', str(md.max_position_embeddings), + '--position-embedding-type', str(md.position_embedding_type), + '--tokenizer-type', str(md.tokenizer_type), + '--tensor-model-parallel-size', str(args.target_tensor_parallel_size), + '--pipeline-model-parallel-size', str(args.target_pipeline_parallel_size), + '--expert-model-parallel-size', str(args.target_expert_parallel_size), + '--no-masked-softmax-fusion', + '--no-bias-gelu-fusion', + '--no-bias-dropout-fusion', + '--no-async-tensor-model-parallel-allreduce', + '--use-cpu-initialization', + '--micro-batch-size', '1', + '--no-load-optim', + '--no-load-rng', + '--no-save-optim', + '--no-save-rng', + '--no-initialization', + '--save-interval', '1', + '--save', args.save_dir, + '--ckpt-format', 'torch', # only 'torch' supported for conversion + '--no-one-logger', + ] + + if md.make_vocab_size_divisible_by is not None: + sys.argv.extend(['--make-vocab-size-divisible-by', str(md.make_vocab_size_divisible_by)]) + if md.params_dtype == torch.float16: + sys.argv.append('--fp16') + elif md.params_dtype == torch.bfloat16: + sys.argv.append('--bf16') + + if md.output_layer: + sys.argv.append('--untie-embeddings-and-output-weights') + if not md.linear_bias: + sys.argv.append('--disable-bias-linear') + + if md.model_type == 'BERT' and not md.bert_binary_head: + sys.argv.append('--bert-no-binary-head') + + margs = parse_args() + + if hasattr (md, 'checkpoint_args'): + # These are arguments that we are either changing, or cause problems for validation if they are set + # Note that some of these deal with T5 so will need to be changed if we support T5. + args_to_keep = ['tensor_model_parallel_size', 'pipeline_model_parallel_size', 'expert_model_parallel_size', 'world_size', 'params_dtype', + 'num_layers_per_virtual_pipeline_stage', 'virtual_pipeline_model_parallel_size', + 'masked_softmax_fusion', 'bias_gelu_fusion', 'bias_dropout_fusion', + 'sequence_parallel', 'async_tensor_model_parallel_allreduce', + 'no_load_optim', 'no_load_rng', 'no_save_optim', 'no_save_rng', + 'vocab_file', 'tokenizer_model', + 'save_interval', 'save', + 'perform_initialization', 'use_cpu_initialization', + 'recompute_granularity', 'recompute_num_layers', 'recompute_method', + 'encoder_num_layers', 'encoder_seq_length', + 'distribute_saved_activations', + 'train_iters', 'lr_decay_iters', 'lr_warmup_iters', 'lr_warmup_fraction', + 'start_weight_decay', 'end_weight_decay', + 'ckpt_format', + ] + + for arg, value in vars(md.checkpoint_args).items(): + if arg in args_to_keep: + continue + if not hasattr(margs, arg): + print(f"Checkpoint had argument {arg} but new arguments does not have this.") + continue + if getattr(margs, arg) != value: + print(f"Overwriting default {arg} value {getattr(margs, arg)} with value from checkpoint {value}.") + setattr(margs, arg, value) + + # Explicitly copy sequence_parallel, apply_query_key_layer_scaling. + margs.sequence_parallel = md.checkpoint_args.sequence_parallel + margs.apply_query_key_layer_scaling = md.checkpoint_args.apply_query_key_layer_scaling + + # Sequence parallel is required if use both tensor-parallel and Moe. + if margs.num_experts is not None and args.target_tensor_parallel_size is not None: + if margs.num_experts > 1 and args.target_tensor_parallel_size > 1: + margs.sequence_parallel = True + + validate_args(margs) + + # Use M-core models & unset loaded paths. + margs.use_legacy_models = False + margs.blendable_index_path = None + margs.data_path = [] + margs.load = None + margs.save = args.save_dir + margs.tensorboard_dir = None + margs.tokenizer_model = None + margs.transformer_impl = args.saver_transformer_impl + + set_global_variables(margs, build_tokenizer=False) + + # Megatron args. (i.e., 'margs') + margs = get_args() + + if hasattr(md, 'consumed_train_samples'): + margs.consumed_train_samples = md.consumed_train_samples + margs.consumed_valid_samples = md.consumed_valid_samples + print(f"Setting consumed_train_samples to {margs.consumed_train_samples}" + f" and consumed_valid_samples to {margs.consumed_valid_samples}") + else: + print("consumed_train_samples not provided.") + + # Determine how to make our models + if md.model_type == 'GPT': + from pretrain_gpt import model_provider + margs.model_type = ModelType.encoder_or_decoder + elif md.model_type == 'BERT': + from pretrain_bert import model_provider + margs.model_type = ModelType.encoder_or_decoder + else: + raise Exception(f'unrecognized model type: {args.model_type}') + + # fake initializing distributed + mpu.set_tensor_model_parallel_world_size(args.target_tensor_parallel_size) + mpu.set_pipeline_model_parallel_world_size(args.target_pipeline_parallel_size) + mpu.set_expert_model_parallel_world_size(args.target_expert_parallel_size) + mpu.set_tensor_model_parallel_rank(0) + mpu.set_pipeline_model_parallel_rank(0) + mpu.set_expert_model_parallel_rank(0) + fused_kernels.load(margs) + + # Embeddings + #----------- + embeddings_msg = queue_get("embeddings") + + pos_embed = None + if md.position_embedding_type == 'learned_absolute': + pos_embed = embeddings_msg.pop("position embeddings") + orig_word_embed = embeddings_msg.pop("word embeddings") + check_message(embeddings_msg) + + # Deal with padding + def pad_weight(orig_word_embed, true_vocab_size): + if true_vocab_size is not None: + # figure out what our padded vocab size is + orig_vocab_size = orig_word_embed.shape[0] + margs.padded_vocab_size = _vocab_size_with_padding(true_vocab_size, margs) + + # Cut out extra padding we don't need + if orig_vocab_size > margs.padded_vocab_size: + full_word_embed = orig_word_embed[0:margs.padded_vocab_size,:] + + # Expanding embedding to larger size by replicating final entry + elif orig_vocab_size < margs.padded_vocab_size: + padding_size = margs.padded_vocab_size - orig_vocab_size + + full_word_embed = torch.cat(( + orig_word_embed, + orig_word_embed[-1].unsqueeze(0).expand(padding_size, -1))) + + # Same size! + else: + full_word_embed = orig_word_embed + else: + print("Original vocab size not specified, leaving embedding table as-is. " + "If you've changed the tensor parallel size this could cause problems.") + margs.padded_vocab_size = orig_word_embed.shape[0] + full_word_embed = orig_word_embed + return full_word_embed + + full_word_embed = pad_weight(orig_word_embed, md.true_vocab_size) + + # Split into new tensor model parallel sizes + out_word_embed = torch.chunk(full_word_embed, args.target_tensor_parallel_size, dim=0) + + # Model schema. + schema = get_model_schema( + md.model_type, + margs.transformer_impl, + margs.num_experts, + margs.expert_model_parallel_size, + ) + + # Construct a 3D(PPxEPxTP) arry for models, fill it with None + models = [[[None for _ in range(args.target_tensor_parallel_size)] for _ in range(args.target_expert_parallel_size)] for _ in range(args.target_pipeline_parallel_size)] + + # Model is lazy instantiated at firstly using + def get_local_model(pp_rank, ep_rank, tp_rank): + if models[pp_rank][ep_rank][tp_rank] is None: + pre_process = True if pp_rank == 0 else False + post_process = True if pp_rank == args.target_pipeline_parallel_size - 1 else False + models[pp_rank][ep_rank][tp_rank] = model_provider(pre_process, post_process).to(md.params_dtype) + return models[pp_rank][ep_rank][tp_rank] + + # Set embeddings. + # -------------- + for ep_rank in range(args.target_expert_parallel_size): + for tp_rank in range(args.target_tensor_parallel_size): + model = get_local_model(0, ep_rank, tp_rank) + if pos_embed is None: + assert not schema.has_position_embeddings(model) + schema.set("embeddings", model, { + "pos" : pos_embed, + "word" : out_word_embed[tp_rank], + }) + + def chunk_weight(weight, parallel_mode, tp_size=1, ep_size=1): + assert parallel_mode in ["row", "column"] + if weight.dim() == 3: + num_experts, out_features, in_features = weight.shape + if parallel_mode == "column": + weight = weight.reshape(ep_size, num_experts // ep_size, tp_size, out_features // tp_size, in_features) + weight = weight.permute(0, 2, 1, 3, 4) + else: + weight = weight.reshape(ep_size, num_experts // ep_size, out_features, tp_size, in_features // tp_size) + weight = weight.permute(0, 3, 1, 2, 4) + return weight # (ep_size, tp_size, local_eps, output_features, in_features) + else: + out_features, in_features = weight.shape + if parallel_mode == "column": + weight = weight.reshape(tp_size, out_features // tp_size, in_features) + else: + weight = weight.reshape(out_features, tp_size, in_features // tp_size).permute(1, 0, 2) + return weight # (tp_size, output_features, in_features) + + def chunk_bias(bias, parallel_mode, tp_size=1, ep_size=1): + assert parallel_mode in ["row", "column"] + if bias.dim() == 2: + num_experts, hidden_size = bias.shape + if parallel_mode == 'column': + bias = bias.reshape(ep_size, num_experts // ep_size, tp_size, hidden_size // tp_size) + bias = bias.permute(0, 2, 1, 3) # (ep_size, tp_size, local_eps, hidden_size) + else: + bias = bias.reshape(ep_size, num_experts // ep_size, hidden_size) # (ep_size, local_eps, hidden_size) + return bias + else: + hidden_size = bias.shape + if parallel_mode == "column": + bias = bias.reshape(tp_size, hidden_size[0] // tp_size) # (tp_size, hidden_size) + return bias + + # Transformer layers. + # ------------------ + total_layer_num = 0 + for pp_rank in range(args.target_pipeline_parallel_size): + mpu.set_pipeline_model_parallel_rank(pp_rank) + # initial the first module in pp stage to get the layer_num, pooler, lm_head. binary_head + get_local_model(pp_rank,0,0) + for layer_id in range(schema.get_num_layers(models[pp_rank][0][0])): + msg = queue_get(f"transformer layer {total_layer_num}") + + # duplicated tensors + input_norm_weight = msg.pop("input norm weight") + post_norm_weight = msg.pop("post norm weight") + if md.norm_has_bias: + input_norm_bias = msg.pop("input norm bias") + post_norm_bias = msg.pop("post norm bias") + + # Split up the parallel tensors + qkv_weight = chunk_weight(msg.pop("qkv weight"), "column", args.target_tensor_parallel_size) + dense_weight = chunk_weight(msg.pop("dense weight"), "row", args.target_tensor_parallel_size) + mlp_l1_weight = chunk_weight(msg.pop("mlp l1 weight"), "row", args.target_tensor_parallel_size, args.target_expert_parallel_size) + + if margs.num_experts: + router = msg.pop("router weight") + + # Special handling for swiglu + if md.swiglu: + mlp_l0_weight_W = chunk_weight(msg.pop("mlp l0 weight W"), "column", args.target_tensor_parallel_size, args.target_expert_parallel_size) + mlp_l0_weight_V = chunk_weight(msg.pop("mlp l0 weight V"), "column", args.target_tensor_parallel_size, args.target_expert_parallel_size) + mlp_l0_weight = torch.cat((mlp_l0_weight_W, mlp_l0_weight_V), dim=-2) + else: + mlp_l0_weight = chunk_weight(msg.pop("mlp l0 weight"), "column", args.target_tensor_parallel_size, args.target_expert_parallel_size) + + if md.qkv_bias: + qkv_bias = chunk_bias(msg.pop("qkv bias"), 'column', args.target_tensor_parallel_size) + if md.linear_bias: + dense_bias = msg.pop("dense bias") + mlp_l1_bias = chunk_bias(msg.pop("mlp l1 bias"), 'row', args.target_tensor_parallel_size, args.target_expert_parallel_size) + if md.swiglu: + mlp_l0_bias_W = chunk_bias(msg.pop("mlp l0 bias W"), 'column', args.target_tensor_parallel_size, args.target_expert_parallel_size) + mlp_l0_bias_V = chunk_bias(msg.pop("mlp l0 bias V"), 'column', args.target_tensor_parallel_size, args.target_expert_parallel_size) + mlp_l0_bias = torch.cat((mlp_l0_bias_W, mlp_l0_bias_V), dim=-1) + else: + mlp_l0_bias = chunk_bias(msg.pop("mlp l0 bias"), 'column', args.target_tensor_parallel_size, args.target_expert_parallel_size) + + # Save them to the model + for ep_rank in range(args.target_expert_parallel_size): + for tp_rank in range(args.target_tensor_parallel_size): + params_dict = { + "self_attn_norm_weight" : input_norm_weight, + "self_attn_qkv_weight" : qkv_weight[tp_rank], + "self_attn_proj_weight" : dense_weight[tp_rank], + "mlp_norm_weight" : post_norm_weight + } + if margs.num_experts: + params_dict.update({ + "mlp_fc1_weight" : mlp_l0_weight[ep_rank][tp_rank], + "mlp_fc2_weight" : mlp_l1_weight[ep_rank][tp_rank] + }) + else: + params_dict.update({ + "mlp_fc1_weight" : mlp_l0_weight[tp_rank], + "mlp_fc2_weight" : mlp_l1_weight[tp_rank] + }) + params_dict.update({ + "self_attn_norm_bias" : input_norm_bias if md.norm_has_bias else None, + "mlp_norm_bias" : post_norm_bias if md.norm_has_bias else None, + }) + if md.qkv_bias: + params_dict.update({ + "self_attn_qkv_bias" : qkv_bias[tp_rank] + }) + if md.linear_bias: + params_dict.update({ + "self_attn_proj_bias" : dense_bias + }) + if margs.num_experts: + params_dict.update({ + "mlp_fc1_bias" : mlp_l0_bias[ep_rank][tp_rank], + "mlp_fc2_bias" : mlp_l1_bias[ep_rank] + }) + else : + params_dict.update({ + "mlp_fc1_bias" : mlp_l0_bias[tp_rank], + "mlp_fc2_bias" : mlp_l1_bias + }) + if margs.num_experts: + params_dict.update({ + "router_weight": router + }) + model = get_local_model(pp_rank, ep_rank, tp_rank) + schema.set_layer(model, layer_id, params_dict) + + total_layer_num = total_layer_num + 1 + check_message(msg) + + + if pp_rank == args.target_pipeline_parallel_size - 1: + msg = queue_get("final norm") + final_norm_weight = msg.pop("weight") + if md.norm_has_bias: + final_norm_bias = msg.pop("bias") + pp_local_models = [get_local_model(pp_rank, ep_rank, tp_rank) for ep_rank in range(args.target_expert_parallel_size) + for tp_rank in range(args.target_tensor_parallel_size)] + for eptp_rank, model in enumerate(pp_local_models): + tp_rank = eptp_rank % args.target_tensor_parallel_size + schema.set("final_norm", model, { + "weight" : final_norm_weight, + "bias" : final_norm_bias if md.norm_has_bias else None, + }) + if pp_rank != 0 and not md.output_layer: + # Copy word embeddings to final pipeline rank + schema.set("output_layer", model, { + "weight" : out_word_embed[tp_rank], + }) + del final_norm_weight + if md.norm_has_bias: + del final_norm_bias + check_message(msg) + + if md.output_layer: + msg = queue_get("output layer") + if not hasattr(pp_local_models[0], 'output_layer'): + print("ERROR: got an output layer, but model does not have one") + exit(1) + output_layer_weight = pad_weight(msg.pop("weight"), md.true_vocab_size) + output_layer_weight = torch.chunk(output_layer_weight, args.target_tensor_parallel_size, dim=0) + for eptp_rank, model in enumerate(pp_local_models): + tp_rank = eptp_rank % args.target_tensor_parallel_size + schema.set("output_layer", model, { + "weight" : output_layer_weight[tp_rank], + }) + check_message(msg) + + msg = queue_get() + if msg != "done" and msg["name"] == "pooler": + if not hasattr(models[pp_rank][0][0], 'pooler'): + print("ERROR: got a pooler, but model does not have one") + exit(1) + print("received pooler") + pooler_weight = msg.pop("weight") + pooler_bias = msg.pop("bias") + for model in pp_local_models: + schema.set("pooler", model, { + "weight" : pooler_weight, + "bias" : pooler_bias, + }) + del pooler_weight + del pooler_bias + check_message(msg) + msg = queue_get() + + if msg != "done" and msg["name"] == "lm head": + if not hasattr(models[pp_rank][0][0], 'lm_head'): + print("ERROR: got an lm head, but model does not have one") + exit(1) + print("received lm head") + lm_head_dense_weight = msg.pop("dense weight") + lm_head_dense_bias = msg.pop("dense bias") + lm_head_norm_weight = msg.pop("norm weight") + if md.norm_has_bias: + lm_head_norm_bias = msg.pop("norm bias") + for model in pp_local_models: + schema.set("lm_head", model, { + "dense_weight" : lm_head_dense_weight, + "dense_bias" : lm_head_dense_bias, + "norm_weight" : lm_head_norm_weight, + "norm_bias" : lm_head_norm_bias if md.norm_has_bias else None, + }) + check_message(msg) + msg = queue_get() + + if msg != "done" and msg["name"] == "binary head": + if not hasattr(models[pp_rank][0][0], 'binary_head'): + print("ERROR: got a binary head, but model does not have one") + exit(1) + print("received binary head") + binary_head_weight = msg.pop("weight") + binary_head_bias = msg.pop("bias") + for model in pp_local_models: + schema.set("binary_head", model, { + "weight" : binary_head_weight, + "bias" : binary_head_bias, + }) + check_message(msg) + msg = queue_get() + + # TODO: delete weight when not used + if msg != "done": + print("ERROR: got some more data but was expecting to be done") + + for ep_rank in range(args.target_expert_parallel_size): + for tp_rank in range(args.target_tensor_parallel_size): + save_checkpoint(md.iteration, [get_local_model(pp_rank, ep_rank, tp_rank)], None, None, num_floating_point_operations_so_far=0, + pipeline_rank=pp_rank, pipeline_parallel=args.target_pipeline_parallel_size > 1, + expert_rank=ep_rank, expert_parallel=args.target_expert_parallel_size > 1, + tensor_rank=tp_rank) + # release the uselese model parts + models[pp_rank][ep_rank][tp_rank] = None + + print("Done!") diff --git a/tools/checkpoint/saver_megatron.py b/tools/checkpoint/saver_legacy.py similarity index 97% rename from tools/checkpoint/saver_megatron.py rename to tools/checkpoint/saver_legacy.py index 7d9cdc3..b050be5 100644 --- a/tools/checkpoint/saver_megatron.py +++ b/tools/checkpoint/saver_legacy.py @@ -1,415 +1,415 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -import os -import sys -import torch - - -def add_arguments(parser): - group = parser.add_argument_group(title='Megatron saver') - - group.add_argument('--megatron-path', type=str, default=None, - help='Base directory of Megatron repository') - - group.add_argument('--target-tensor-parallel-size', type=int, - help='Target tensor model parallel size, defaults to the tensor parallel size ' - 'in the input checkpoint if provided by the loader, otherwise to 1') - group.add_argument('--target-pipeline-parallel-size', type=int, - help='Target tensor model parallel size, default to the pipeline parall size ' - 'in the input checkpoint if provided by the loader, otherwise to 1') - group.add_argument('--saver-transformer-impl', default='local', - choices=['local', 'transformer_engine'], - help='Which Transformer implementation to use.') - -def save_checkpoint(queue, args): - # Search in directory above this - sys.path.append(os.path.abspath( - os.path.join(os.path.dirname(__file__), - os.path.pardir, - os.path.pardir))) - if args.megatron_path is not None: - sys.path.insert(0, args.megatron_path) - - try: - from megatron.training.arguments import (parse_args, validate_args) - from megatron.training.checkpointing import save_checkpoint - from megatron.training.global_vars import set_global_variables, get_args - from megatron.core.enums import ModelType - from megatron.training.tokenizer.tokenizer import _vocab_size_with_padding - from megatron.legacy import fused_kernels - from megatron.core import mpu - except ModuleNotFoundError: - print("Unable to import Megatron, please specify the path to Megatron using --megatron-path. Exiting.") - exit(1) - - def queue_get(name=None): - val = queue.get() - if val == "exit": - print("Loader exited, exiting saver") - exit(1) - if name is not None and args.checking and val["name"] != name: - val_name = val["name"] - print(f'Unexpected message. Expecting "{name}" but got "{val_name}". Exiting saver.') - exit(1) - if name is not None: - print(f"received {name}") - return val - - def check_message(msg): - if not args.checking: - return - msg_name = msg.pop("name") - if len(msg.keys()) > 0: - print(f"Unexpected values in {msg_name}:") - for key in msg.keys(): - print(f" {key}") - print(f"Exiting. If you want to ignore this, use the argument --no-checking.") - exit(1) - - md = queue_get() - - if args.target_tensor_parallel_size is None: - if hasattr(md, 'previous_tensor_parallel_size'): - args.target_tensor_parallel_size = md.previous_tensor_parallel_size - else: - print( - "loader did not provide a tensor parallel size and --target-tensor-parallel-size not provided on command line. " - "Default to 1.") - args.target_tensor_parallel_size = 1 - - if args.target_pipeline_parallel_size is None: - if hasattr(md, 'previous_pipeline_parallel_size'): - args.target_pipeline_parallel_size = md.previous_pipeline_parallel_size - else: - print( - "loader did not provide a pipeline parallel size and --target-pipeline-parallel-size not provided on command line. " - "Default to 1.") - args.target_pipeline_parallel_size = 1 - - # Arguments do sanity checks on the world size, but we don't care, - # so trick it into thinking we are plenty of processes - if args.target_tensor_parallel_size is not None and args.target_pipeline_parallel_size is not None: - os.environ["WORLD_SIZE"] = f'{args.target_tensor_parallel_size * args.target_pipeline_parallel_size}' - - # We want all arguments to come from us - sys.argv = ['script.py', - '--num-layers', str(md.num_layers), - '--hidden-size', str(md.hidden_size), - '--seq-length', str(md.seq_length), - '--num-attention-heads', str(md.num_attention_heads), - '--max-position-embeddings', str(md.max_position_embeddings), - '--position-embedding-type', str(md.position_embedding_type), - '--tokenizer-type', str(md.tokenizer_type), - '--tensor-model-parallel-size', str(args.target_tensor_parallel_size), - '--pipeline-model-parallel-size', str(args.target_pipeline_parallel_size), - '--no-masked-softmax-fusion', - '--no-bias-gelu-fusion', - '--no-bias-dropout-fusion', - '--no-async-tensor-model-parallel-allreduce', - '--use-cpu-initialization', - '--micro-batch-size', '1', - '--no-load-optim', - '--no-load-rng', - '--no-save-optim', - '--no-save-rng', - '--no-initialization', - '--save-interval', '1', - '--save', args.save_dir, - '--ckpt-format', 'torch', # only 'torch' supported for conversion - '--no-one-logger', - ] - - if md.make_vocab_size_divisible_by is not None: - sys.argv.extend(['--make-vocab-size-divisible-by', str(md.make_vocab_size_divisible_by)]) - if md.params_dtype == torch.float16: - sys.argv.append('--fp16') - elif md.params_dtype == torch.bfloat16: - sys.argv.append('--bf16') - - if md.output_layer: - sys.argv.append('--untie-embeddings-and-output-weights') - if not md.linear_bias: - sys.argv.append('--disable-bias-linear') - - if md.model_type == 'BERT' and not md.bert_binary_head: - sys.argv.append('--bert-no-binary-head') - - margs = parse_args() - - if hasattr(md, 'checkpoint_args'): - # These are arguments that we are either changing, or cause problems for validation if they are set - # Note that some of these deal with T5 so will need to be changed if we support T5. - args_to_keep = ['tensor_model_parallel_size', 'pipeline_model_parallel_size', 'world_size', 'params_dtype', - 'num_layers_per_virtual_pipeline_stage', 'virtual_pipeline_model_parallel_size', - 'masked_softmax_fusion', 'bias_gelu_fusion', 'bias_dropout_fusion', - 'sequence_parallel', 'async_tensor_model_parallel_allreduce', - 'no_load_optim', 'no_load_rng', 'no_save_optim', 'no_save_rng', - 'vocab_file', 'tokenizer_model', - 'save_interval', 'save', - 'perform_initialization', 'use_cpu_initialization', - 'recompute_granularity', 'recompute_num_layers', 'recompute_method', - 'encoder_num_layers', 'encoder_seq_length', - 'distribute_saved_activations', - 'train_iters', 'lr_decay_iters', 'lr_warmup_iters', 'lr_warmup_fraction', - 'start_weight_decay', 'end_weight_decay', 'bf16', 'fp16', - 'ckpt_format', - ] - - for arg, value in vars(md.checkpoint_args).items(): - if arg in args_to_keep: - continue - if not hasattr(margs, arg): - print(f"Checkpoint had argument {arg} but new arguments does not have this.") - continue - if getattr(margs, arg) != value: - print(f"Overwriting default {arg} value {getattr(margs, arg)} with value from checkpoint {value}.") - setattr(margs, arg, value) - - validate_args(margs) - - # Use MLM models. - margs.use_legacy_models = True - margs.transformer_impl = args.saver_transformer_impl - - # Do not instantiate Tensorboard - margs.tensorboard_dir = None - - set_global_variables(margs, build_tokenizer=False) - - # margs = megatron args - margs = get_args() - - if hasattr(md, 'consumed_train_samples'): - margs.consumed_train_samples = md.consumed_train_samples - margs.consumed_valid_samples = md.consumed_valid_samples - print(f"Setting consumed_train_samples to {margs.consumed_train_samples}" - f" and consumed_valid_samples to {margs.consumed_valid_samples}") - else: - print("consumed_train_samples not provided.") - - # Determine how to make our models - if md.model_type == 'GPT': - from pretrain_gpt import model_provider - margs.model_type = ModelType.encoder_or_decoder - elif md.model_type == 'BERT': - from pretrain_bert import model_provider - margs.model_type = ModelType.encoder_or_decoder - else: - raise Exception(f'unrecognized model type: {args.model_type}') - - def get_models(count, dtype, pre_process, post_process): - models = [model_provider(pre_process, post_process).to(dtype) for _ in range(count)] - return models - - # fake initializing distributed - mpu.set_tensor_model_parallel_world_size(args.target_tensor_parallel_size) - mpu.set_pipeline_model_parallel_world_size(args.target_pipeline_parallel_size) - mpu.set_tensor_model_parallel_rank(0) - mpu.set_pipeline_model_parallel_rank(0) - # fused_kernels.load(margs) - - # Embeddings - # ----------- - embeddings_msg = queue_get("embeddings") - - pos_embed = None - if md.position_embedding_type == 'learned_absolute': - pos_embed = embeddings_msg.pop("position embeddings") - orig_word_embed = embeddings_msg.pop("word embeddings") - check_message(embeddings_msg) - - # Deal with padding - if md.true_vocab_size is not None: - # figure out what our padded vocab size is - orig_vocab_size = orig_word_embed.shape[0] - margs.padded_vocab_size = _vocab_size_with_padding(md.true_vocab_size, margs) - - # Cut out extra padding we don't need - if orig_vocab_size > margs.padded_vocab_size: - full_word_embed = orig_word_embed[0:margs.padded_vocab_size, :] - - # Expanding embedding to larger size by replicating final entry - elif orig_vocab_size < margs.padded_vocab_size: - padding_size = margs.padded_vocab_size - orig_vocab_size - - full_word_embed = torch.cat(( - orig_word_embed, - orig_word_embed[-1].unsqueeze(0).expand(padding_size, -1))) - - # Same size! - else: - full_word_embed = orig_word_embed - else: - print("Original vocab size not specified, leaving embedding table as-is. " - "If you've changed the tensor parallel size this could cause problems.") - margs.padded_vocab_size = orig_word_embed.shape[0] - full_word_embed = orig_word_embed - - # Split into new tensor model parallel sizes - out_word_embed = torch.chunk(full_word_embed, args.target_tensor_parallel_size, dim=0) - - # Make models for first pipeline stage and fill in embeddings - mpu.set_pipeline_model_parallel_rank(0) - post_process = args.target_pipeline_parallel_size == 1 - models = get_models(args.target_tensor_parallel_size, md.params_dtype, True, post_process) - for tp_rank, model in enumerate(models): - model.language_model.embedding.word_embeddings.weight.data.copy_(out_word_embed[tp_rank]) - if pos_embed is not None: - model.language_model.embedding.position_embeddings.weight.data.copy_(pos_embed) - else: - assert not hasattr(model.language_model.embedding, "position_embeddings") - - # Transformer layers - # ------------------- - total_layer_num = 0 - for pp_rank in range(args.target_pipeline_parallel_size): - # For later pipeline parallel ranks, make the new models - if pp_rank > 0: - mpu.set_pipeline_model_parallel_rank(pp_rank) - post_process = pp_rank == args.target_pipeline_parallel_size - 1 - models = get_models(args.target_tensor_parallel_size, md.params_dtype, False, post_process) - - for layer in range(len(models[0].language_model.encoder.layers)): - msg = queue_get(f"transformer layer {total_layer_num}") - - # duplicated tensors - input_norm_weight = msg.pop("input norm weight") - if md.norm_has_bias: - input_norm_bias = msg.pop("input norm bias") - post_norm_weight = msg.pop("post norm weight") - if md.norm_has_bias: - post_norm_bias = msg.pop("post norm bias") - if md.linear_bias: - dense_bias = msg.pop("dense bias") - mlp_l1_bias = msg.pop("mlp l1 bias") - - # Split up the parallel tensors - qkv_weight = torch.chunk(msg.pop("qkv weight"), args.target_tensor_parallel_size, dim=0) - dense_weight = torch.chunk(msg.pop("dense weight"), args.target_tensor_parallel_size, dim=1) - mlp_l1_weight = torch.chunk(msg.pop("mlp l1 weight"), args.target_tensor_parallel_size, dim=1) - - # Special handling for swiglu - if md.swiglu: - mlp_l0_weight_W = torch.chunk(msg.pop("mlp l0 weight W"), args.target_tensor_parallel_size, dim=0) - mlp_l0_weight_V = torch.chunk(msg.pop("mlp l0 weight V"), args.target_tensor_parallel_size, dim=0) - mlp_l0_weight = [torch.cat(weights, dim=0) for weights in zip(mlp_l0_weight_W, mlp_l0_weight_V)] - else: - mlp_l0_weight = torch.chunk(msg.pop("mlp l0 weight"), args.target_tensor_parallel_size, dim=0) - - if md.qkv_bias: - qkv_bias = torch.chunk(msg.pop("qkv bias"), args.target_tensor_parallel_size, dim=0) - if md.linear_bias: - if md.swiglu: - mlp_l0_bias_W = torch.chunk(msg.pop("mlp l0 bias W"), args.target_tensor_parallel_size, dim=0) - mlp_l0_bias_V = torch.chunk(msg.pop("mlp l0 bias V"), args.target_tensor_parallel_size, dim=0) - mlp_l0_bias = [torch.cat(bias, dim=0) for bias in zip(mlp_l0_bias_W, mlp_l0_bias_V)] - else: - mlp_l0_bias = torch.chunk(msg.pop("mlp l0 bias"), args.target_tensor_parallel_size, dim=0) - - # Save them to the model - for tp_rank in range(args.target_tensor_parallel_size): - l = models[tp_rank].language_model.encoder.layers[layer] - l.input_norm.weight.data.copy_(input_norm_weight) - if md.norm_has_bias: - l.input_norm.bias.data.copy_(input_norm_bias) - l.self_attention.query_key_value.weight.data.copy_(qkv_weight[tp_rank]) - l.self_attention.dense.weight.data.copy_(dense_weight[tp_rank]) - l.post_attention_norm.weight.data.copy_(post_norm_weight) - if md.norm_has_bias: - l.post_attention_norm.bias.data.copy_(post_norm_bias) - l.mlp.dense_h_to_4h.weight.data.copy_(mlp_l0_weight[tp_rank]) - l.mlp.dense_4h_to_h.weight.data.copy_(mlp_l1_weight[tp_rank]) - if md.qkv_bias: - l.self_attention.query_key_value.bias.data.copy_(qkv_bias[tp_rank]) - if md.linear_bias: - l.self_attention.dense.bias.data.copy_(dense_bias) - l.mlp.dense_h_to_4h.bias.data.copy_(mlp_l0_bias[tp_rank]) - l.mlp.dense_4h_to_h.bias.data.copy_(mlp_l1_bias) - - total_layer_num = total_layer_num + 1 - check_message(msg) - - if post_process: - msg = queue_get("final norm") - final_norm_weight = msg.pop("weight") - if md.norm_has_bias: - final_norm_bias = msg.pop("bias") - for tp_rank in range(args.target_tensor_parallel_size): - models[tp_rank].language_model.encoder.final_norm.weight.data.copy_(final_norm_weight) - if md.norm_has_bias: - models[tp_rank].language_model.encoder.final_norm.bias.data.copy_(final_norm_bias) - if pp_rank != 0 and not md.output_layer: - # Copy word embeddings to final pipeline rank - models[tp_rank].word_embeddings.weight.data.copy_(out_word_embed[tp_rank]) - del final_norm_weight - if md.norm_has_bias: - del final_norm_bias - check_message(msg) - - if md.output_layer: - msg = queue_get("output layer") - if not hasattr(models[0].language_model, 'output_layer'): - print("ERROR: got an output layer, but model does not have one") - exit(1) - output_layer_weight = torch.chunk(msg.pop("weight"), args.target_tensor_parallel_size, dim=0) - for tp_rank in range(args.target_tensor_parallel_size): - models[tp_rank].language_model.output_layer.weight.data.copy_(output_layer_weight[tp_rank]) - del output_layer_weight - check_message(msg) - - msg = queue_get() - if msg != "done" and msg["name"] == "pooler": - if not hasattr(models[0].language_model, 'pooler'): - print("ERROR: got a pooler, but model does not have one") - exit(1) - print("received pooler") - pooler_weight = msg.pop("weight") - pooler_bias = msg.pop("bias") - for tp_rank in range(args.target_tensor_parallel_size): - models[tp_rank].language_model.pooler.dense.weight.data.copy_(pooler_weight) - models[tp_rank].language_model.pooler.dense.bias.data.copy_(pooler_bias) - del pooler_weight - del pooler_bias - check_message(msg) - msg = queue_get() - - if msg != "done" and msg["name"] == "lm head": - if not hasattr(models[0], 'lm_head'): - print("ERROR: got an lm head, but model does not have one") - exit(1) - print("received lm head") - lm_head_dense_weight = msg.pop("dense weight") - lm_head_dense_bias = msg.pop("dense bias") - lm_head_norm_weight = msg.pop("norm weight") - if md.norm_has_bias: - lm_head_norm_bias = msg.pop("norm bias") - for tp_rank in range(args.target_tensor_parallel_size): - models[tp_rank].lm_head.dense.weight.data.copy_(lm_head_dense_weight) - models[tp_rank].lm_head.dense.bias.data.copy_(lm_head_dense_bias) - models[tp_rank].lm_head.norm.weight.data.copy_(lm_head_norm_weight) - if md.norm_has_bias: - models[tp_rank].lm_head.norm.bias.data.copy_(lm_head_norm_bias) - check_message(msg) - msg = queue_get() - - if msg != "done" and msg["name"] == "binary head": - if not hasattr(models[0], 'binary_head'): - print("ERROR: got a binary head, but model does not have one") - exit(1) - print("received binary head") - binary_head_weight = msg.pop("weight") - binary_head_bias = msg.pop("bias") - for tp_rank in range(args.target_tensor_parallel_size): - models[tp_rank].binary_head.weight.data.copy_(binary_head_weight) - models[tp_rank].binary_head.bias.data.copy_(binary_head_bias) - check_message(msg) - msg = queue_get() - - if msg != "done": - print("ERROR: got some more data but was expecting to be done") - - for tp_rank in range(args.target_tensor_parallel_size): - mpu.set_tensor_model_parallel_rank(tp_rank) - save_checkpoint(md.iteration, [models[tp_rank]], None, None, - num_floating_point_operations_so_far=0) - print("Done!") +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +import os +import sys +import torch + + +def add_arguments(parser): + group = parser.add_argument_group(title='Megatron saver') + + group.add_argument('--megatron-path', type=str, default=None, + help='Base directory of Megatron repository') + + group.add_argument('--target-tensor-parallel-size', type=int, + help='Target tensor model parallel size, defaults to the tensor parallel size ' + 'in the input checkpoint if provided by the loader, otherwise to 1') + group.add_argument('--target-pipeline-parallel-size', type=int, + help='Target tensor model parallel size, default to the pipeline parall size ' + 'in the input checkpoint if provided by the loader, otherwise to 1') + group.add_argument('--saver-transformer-impl', default='local', + choices=['local', 'transformer_engine'], + help='Which Transformer implementation to use.') + +def save_checkpoint(queue, args): + # Search in directory above this + sys.path.append(os.path.abspath( + os.path.join(os.path.dirname(__file__), + os.path.pardir, + os.path.pardir))) + if args.megatron_path is not None: + sys.path.insert(0, args.megatron_path) + + try: + from megatron.training.arguments import (parse_args, validate_args) + from megatron.training.checkpointing import save_checkpoint + from megatron.training.global_vars import set_global_variables, get_args + from megatron.core.enums import ModelType + from megatron.training.tokenizer.tokenizer import _vocab_size_with_padding + from megatron.legacy import fused_kernels + from megatron.core import mpu + except ModuleNotFoundError: + print("Unable to import Megatron, please specify the path to Megatron using --megatron-path. Exiting.") + exit(1) + + def queue_get(name=None): + val = queue.get() + if val == "exit": + print("Loader exited, exiting saver") + exit(1) + if name is not None and args.checking and val["name"] != name: + val_name = val["name"] + print(f'Unexpected message. Expecting "{name}" but got "{val_name}". Exiting saver.') + exit(1) + if name is not None: + print(f"received {name}") + return val + + def check_message(msg): + if not args.checking: + return + msg_name = msg.pop("name") + if len(msg.keys()) > 0: + print(f"Unexpected values in {msg_name}:") + for key in msg.keys(): + print(f" {key}") + print(f"Exiting. If you want to ignore this, use the argument --no-checking.") + exit(1) + + md = queue_get() + + if args.target_tensor_parallel_size is None: + if hasattr(md, 'previous_tensor_parallel_size'): + args.target_tensor_parallel_size = md.previous_tensor_parallel_size + else: + print( + "loader did not provide a tensor parallel size and --target-tensor-parallel-size not provided on command line. " + "Default to 1.") + args.target_tensor_parallel_size = 1 + + if args.target_pipeline_parallel_size is None: + if hasattr(md, 'previous_pipeline_parallel_size'): + args.target_pipeline_parallel_size = md.previous_pipeline_parallel_size + else: + print( + "loader did not provide a pipeline parallel size and --target-pipeline-parallel-size not provided on command line. " + "Default to 1.") + args.target_pipeline_parallel_size = 1 + + # Arguments do sanity checks on the world size, but we don't care, + # so trick it into thinking we are plenty of processes + if args.target_tensor_parallel_size is not None and args.target_pipeline_parallel_size is not None: + os.environ["WORLD_SIZE"] = f'{args.target_tensor_parallel_size * args.target_pipeline_parallel_size}' + + # We want all arguments to come from us + sys.argv = ['script.py', + '--num-layers', str(md.num_layers), + '--hidden-size', str(md.hidden_size), + '--seq-length', str(md.seq_length), + '--num-attention-heads', str(md.num_attention_heads), + '--max-position-embeddings', str(md.max_position_embeddings), + '--position-embedding-type', str(md.position_embedding_type), + '--tokenizer-type', str(md.tokenizer_type), + '--tensor-model-parallel-size', str(args.target_tensor_parallel_size), + '--pipeline-model-parallel-size', str(args.target_pipeline_parallel_size), + '--no-masked-softmax-fusion', + '--no-bias-gelu-fusion', + '--no-bias-dropout-fusion', + '--no-async-tensor-model-parallel-allreduce', + '--use-cpu-initialization', + '--micro-batch-size', '1', + '--no-load-optim', + '--no-load-rng', + '--no-save-optim', + '--no-save-rng', + '--no-initialization', + '--save-interval', '1', + '--save', args.save_dir, + '--ckpt-format', 'torch', # only 'torch' supported for conversion + '--no-one-logger', + ] + + if md.make_vocab_size_divisible_by is not None: + sys.argv.extend(['--make-vocab-size-divisible-by', str(md.make_vocab_size_divisible_by)]) + if md.params_dtype == torch.float16: + sys.argv.append('--fp16') + elif md.params_dtype == torch.bfloat16: + sys.argv.append('--bf16') + + if md.output_layer: + sys.argv.append('--untie-embeddings-and-output-weights') + if not md.linear_bias: + sys.argv.append('--disable-bias-linear') + + if md.model_type == 'BERT' and not md.bert_binary_head: + sys.argv.append('--bert-no-binary-head') + + margs = parse_args() + + if hasattr(md, 'checkpoint_args'): + # These are arguments that we are either changing, or cause problems for validation if they are set + # Note that some of these deal with T5 so will need to be changed if we support T5. + args_to_keep = ['tensor_model_parallel_size', 'pipeline_model_parallel_size', 'world_size', 'params_dtype', + 'num_layers_per_virtual_pipeline_stage', 'virtual_pipeline_model_parallel_size', + 'masked_softmax_fusion', 'bias_gelu_fusion', 'bias_dropout_fusion', + 'sequence_parallel', 'async_tensor_model_parallel_allreduce', + 'no_load_optim', 'no_load_rng', 'no_save_optim', 'no_save_rng', + 'vocab_file', 'tokenizer_model', + 'save_interval', 'save', + 'perform_initialization', 'use_cpu_initialization', + 'recompute_granularity', 'recompute_num_layers', 'recompute_method', + 'encoder_num_layers', 'encoder_seq_length', + 'distribute_saved_activations', + 'train_iters', 'lr_decay_iters', 'lr_warmup_iters', 'lr_warmup_fraction', + 'start_weight_decay', 'end_weight_decay', 'bf16', 'fp16', + 'ckpt_format', + ] + + for arg, value in vars(md.checkpoint_args).items(): + if arg in args_to_keep: + continue + if not hasattr(margs, arg): + print(f"Checkpoint had argument {arg} but new arguments does not have this.") + continue + if getattr(margs, arg) != value: + print(f"Overwriting default {arg} value {getattr(margs, arg)} with value from checkpoint {value}.") + setattr(margs, arg, value) + + validate_args(margs) + + # Use MLM models. + margs.use_legacy_models = True + margs.transformer_impl = args.saver_transformer_impl + + # Do not instantiate Tensorboard + margs.tensorboard_dir = None + + set_global_variables(margs, build_tokenizer=False) + + # margs = megatron args + margs = get_args() + + if hasattr(md, 'consumed_train_samples'): + margs.consumed_train_samples = md.consumed_train_samples + margs.consumed_valid_samples = md.consumed_valid_samples + print(f"Setting consumed_train_samples to {margs.consumed_train_samples}" + f" and consumed_valid_samples to {margs.consumed_valid_samples}") + else: + print("consumed_train_samples not provided.") + + # Determine how to make our models + if md.model_type == 'GPT': + from pretrain_gpt import model_provider + margs.model_type = ModelType.encoder_or_decoder + elif md.model_type == 'BERT': + from pretrain_bert import model_provider + margs.model_type = ModelType.encoder_or_decoder + else: + raise Exception(f'unrecognized model type: {args.model_type}') + + def get_models(count, dtype, pre_process, post_process): + models = [model_provider(pre_process, post_process).to(dtype) for _ in range(count)] + return models + + # fake initializing distributed + mpu.set_tensor_model_parallel_world_size(args.target_tensor_parallel_size) + mpu.set_pipeline_model_parallel_world_size(args.target_pipeline_parallel_size) + mpu.set_tensor_model_parallel_rank(0) + mpu.set_pipeline_model_parallel_rank(0) + fused_kernels.load(margs) + + # Embeddings + # ----------- + embeddings_msg = queue_get("embeddings") + + pos_embed = None + if md.position_embedding_type == 'learned_absolute': + pos_embed = embeddings_msg.pop("position embeddings") + orig_word_embed = embeddings_msg.pop("word embeddings") + check_message(embeddings_msg) + + # Deal with padding + if md.true_vocab_size is not None: + # figure out what our padded vocab size is + orig_vocab_size = orig_word_embed.shape[0] + margs.padded_vocab_size = _vocab_size_with_padding(md.true_vocab_size, margs) + + # Cut out extra padding we don't need + if orig_vocab_size > margs.padded_vocab_size: + full_word_embed = orig_word_embed[0:margs.padded_vocab_size, :] + + # Expanding embedding to larger size by replicating final entry + elif orig_vocab_size < margs.padded_vocab_size: + padding_size = margs.padded_vocab_size - orig_vocab_size + + full_word_embed = torch.cat(( + orig_word_embed, + orig_word_embed[-1].unsqueeze(0).expand(padding_size, -1))) + + # Same size! + else: + full_word_embed = orig_word_embed + else: + print("Original vocab size not specified, leaving embedding table as-is. " + "If you've changed the tensor parallel size this could cause problems.") + margs.padded_vocab_size = orig_word_embed.shape[0] + full_word_embed = orig_word_embed + + # Split into new tensor model parallel sizes + out_word_embed = torch.chunk(full_word_embed, args.target_tensor_parallel_size, dim=0) + + # Make models for first pipeline stage and fill in embeddings + mpu.set_pipeline_model_parallel_rank(0) + post_process = args.target_pipeline_parallel_size == 1 + models = get_models(args.target_tensor_parallel_size, md.params_dtype, True, post_process) + for tp_rank, model in enumerate(models): + model.language_model.embedding.word_embeddings.weight.data.copy_(out_word_embed[tp_rank]) + if pos_embed is not None: + model.language_model.embedding.position_embeddings.weight.data.copy_(pos_embed) + else: + assert not hasattr(model.language_model.embedding, "position_embeddings") + + # Transformer layers + # ------------------- + total_layer_num = 0 + for pp_rank in range(args.target_pipeline_parallel_size): + # For later pipeline parallel ranks, make the new models + if pp_rank > 0: + mpu.set_pipeline_model_parallel_rank(pp_rank) + post_process = pp_rank == args.target_pipeline_parallel_size - 1 + models = get_models(args.target_tensor_parallel_size, md.params_dtype, False, post_process) + + for layer in range(len(models[0].language_model.encoder.layers)): + msg = queue_get(f"transformer layer {total_layer_num}") + + # duplicated tensors + input_norm_weight = msg.pop("input norm weight") + if md.norm_has_bias: + input_norm_bias = msg.pop("input norm bias") + post_norm_weight = msg.pop("post norm weight") + if md.norm_has_bias: + post_norm_bias = msg.pop("post norm bias") + if md.linear_bias: + dense_bias = msg.pop("dense bias") + mlp_l1_bias = msg.pop("mlp l1 bias") + + # Split up the parallel tensors + qkv_weight = torch.chunk(msg.pop("qkv weight"), args.target_tensor_parallel_size, dim=0) + dense_weight = torch.chunk(msg.pop("dense weight"), args.target_tensor_parallel_size, dim=1) + mlp_l1_weight = torch.chunk(msg.pop("mlp l1 weight"), args.target_tensor_parallel_size, dim=1) + + # Special handling for swiglu + if md.swiglu: + mlp_l0_weight_W = torch.chunk(msg.pop("mlp l0 weight W"), args.target_tensor_parallel_size, dim=0) + mlp_l0_weight_V = torch.chunk(msg.pop("mlp l0 weight V"), args.target_tensor_parallel_size, dim=0) + mlp_l0_weight = [torch.cat(weights, dim=0) for weights in zip(mlp_l0_weight_W, mlp_l0_weight_V)] + else: + mlp_l0_weight = torch.chunk(msg.pop("mlp l0 weight"), args.target_tensor_parallel_size, dim=0) + + if md.qkv_bias: + qkv_bias = torch.chunk(msg.pop("qkv bias"), args.target_tensor_parallel_size, dim=0) + if md.linear_bias: + if md.swiglu: + mlp_l0_bias_W = torch.chunk(msg.pop("mlp l0 bias W"), args.target_tensor_parallel_size, dim=0) + mlp_l0_bias_V = torch.chunk(msg.pop("mlp l0 bias V"), args.target_tensor_parallel_size, dim=0) + mlp_l0_bias = [torch.cat(bias, dim=0) for bias in zip(mlp_l0_bias_W, mlp_l0_bias_V)] + else: + mlp_l0_bias = torch.chunk(msg.pop("mlp l0 bias"), args.target_tensor_parallel_size, dim=0) + + # Save them to the model + for tp_rank in range(args.target_tensor_parallel_size): + l = models[tp_rank].language_model.encoder.layers[layer] + l.input_norm.weight.data.copy_(input_norm_weight) + if md.norm_has_bias: + l.input_norm.bias.data.copy_(input_norm_bias) + l.self_attention.query_key_value.weight.data.copy_(qkv_weight[tp_rank]) + l.self_attention.dense.weight.data.copy_(dense_weight[tp_rank]) + l.post_attention_norm.weight.data.copy_(post_norm_weight) + if md.norm_has_bias: + l.post_attention_norm.bias.data.copy_(post_norm_bias) + l.mlp.dense_h_to_4h.weight.data.copy_(mlp_l0_weight[tp_rank]) + l.mlp.dense_4h_to_h.weight.data.copy_(mlp_l1_weight[tp_rank]) + if md.qkv_bias: + l.self_attention.query_key_value.bias.data.copy_(qkv_bias[tp_rank]) + if md.linear_bias: + l.self_attention.dense.bias.data.copy_(dense_bias) + l.mlp.dense_h_to_4h.bias.data.copy_(mlp_l0_bias[tp_rank]) + l.mlp.dense_4h_to_h.bias.data.copy_(mlp_l1_bias) + + total_layer_num = total_layer_num + 1 + check_message(msg) + + if post_process: + msg = queue_get("final norm") + final_norm_weight = msg.pop("weight") + if md.norm_has_bias: + final_norm_bias = msg.pop("bias") + for tp_rank in range(args.target_tensor_parallel_size): + models[tp_rank].language_model.encoder.final_norm.weight.data.copy_(final_norm_weight) + if md.norm_has_bias: + models[tp_rank].language_model.encoder.final_norm.bias.data.copy_(final_norm_bias) + if pp_rank != 0 and not md.output_layer: + # Copy word embeddings to final pipeline rank + models[tp_rank].word_embeddings.weight.data.copy_(out_word_embed[tp_rank]) + del final_norm_weight + if md.norm_has_bias: + del final_norm_bias + check_message(msg) + + if md.output_layer: + msg = queue_get("output layer") + if not hasattr(models[0].language_model, 'output_layer'): + print("ERROR: got an output layer, but model does not have one") + exit(1) + output_layer_weight = torch.chunk(msg.pop("weight"), args.target_tensor_parallel_size, dim=0) + for tp_rank in range(args.target_tensor_parallel_size): + models[tp_rank].language_model.output_layer.weight.data.copy_(output_layer_weight[tp_rank]) + del output_layer_weight + check_message(msg) + + msg = queue_get() + if msg != "done" and msg["name"] == "pooler": + if not hasattr(models[0].language_model, 'pooler'): + print("ERROR: got a pooler, but model does not have one") + exit(1) + print("received pooler") + pooler_weight = msg.pop("weight") + pooler_bias = msg.pop("bias") + for tp_rank in range(args.target_tensor_parallel_size): + models[tp_rank].language_model.pooler.dense.weight.data.copy_(pooler_weight) + models[tp_rank].language_model.pooler.dense.bias.data.copy_(pooler_bias) + del pooler_weight + del pooler_bias + check_message(msg) + msg = queue_get() + + if msg != "done" and msg["name"] == "lm head": + if not hasattr(models[0], 'lm_head'): + print("ERROR: got an lm head, but model does not have one") + exit(1) + print("received lm head") + lm_head_dense_weight = msg.pop("dense weight") + lm_head_dense_bias = msg.pop("dense bias") + lm_head_norm_weight = msg.pop("norm weight") + if md.norm_has_bias: + lm_head_norm_bias = msg.pop("norm bias") + for tp_rank in range(args.target_tensor_parallel_size): + models[tp_rank].lm_head.dense.weight.data.copy_(lm_head_dense_weight) + models[tp_rank].lm_head.dense.bias.data.copy_(lm_head_dense_bias) + models[tp_rank].lm_head.norm.weight.data.copy_(lm_head_norm_weight) + if md.norm_has_bias: + models[tp_rank].lm_head.norm.bias.data.copy_(lm_head_norm_bias) + check_message(msg) + msg = queue_get() + + if msg != "done" and msg["name"] == "binary head": + if not hasattr(models[0], 'binary_head'): + print("ERROR: got a binary head, but model does not have one") + exit(1) + print("received binary head") + binary_head_weight = msg.pop("weight") + binary_head_bias = msg.pop("bias") + for tp_rank in range(args.target_tensor_parallel_size): + models[tp_rank].binary_head.weight.data.copy_(binary_head_weight) + models[tp_rank].binary_head.bias.data.copy_(binary_head_bias) + check_message(msg) + msg = queue_get() + + if msg != "done": + print("ERROR: got some more data but was expecting to be done") + + for tp_rank in range(args.target_tensor_parallel_size): + mpu.set_tensor_model_parallel_rank(tp_rank) + save_checkpoint(md.iteration, [models[tp_rank]], None, None, + num_floating_point_operations_so_far=0) + print("Done!") diff --git a/tools/checkpoint/schema_mcore.py b/tools/checkpoint/schema_core.py similarity index 89% rename from tools/checkpoint/schema_mcore.py rename to tools/checkpoint/schema_core.py index ef90ff0..92f0858 100644 --- a/tools/checkpoint/schema_mcore.py +++ b/tools/checkpoint/schema_core.py @@ -1,143 +1,143 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -"""Mcore model schemas.""" - -import typing as T - -from schema_base import ModelSchema - - -def get_mcore_transformer_block_key(model_key): - return { - "GPT" : "decoder", - "BERT" : "encoder", - }[model_key] - - -class MCoreSchema(ModelSchema): - - def __init__(self, model_type, layer_schema): - block_key = get_mcore_transformer_block_key(model_type) - super().__init__({ - "embeddings" : { - "pos" : "embedding.position_embeddings.weight", - "word" : "embedding.word_embeddings.weight", - }, - "layer_prefix" : f"{block_key}.layers", - "layer" : layer_schema, - "final_norm" : { - "weight" : f"{block_key}.final_layernorm.weight", - "bias" : f"{block_key}.final_layernorm.bias", - }, - "output_layer" : { - "weight" : "output_layer.weight", - }, - "pooler" : { - "weight" : "pooler.dense.weight", - "bias" : "pooler.dense.bias", - }, - "lm_head" : { - "dense_weight" : "lm_head.dense.weight", - "dense_bias" : "lm_head.dense.bias", - "norm_weight" : "lm_head.layer_norm.weight", - "norm_bias" : "lm_head.layer_norm.bias", - }, - "binary_head" : { - "weight" : "binary_head.weight", - "bias" : "binary_head.bias", - }, - }) - - -class MCoreLocalSchema(MCoreSchema): - - def __init__(self, model_type): - super().__init__(model_type, layer_schema={ - - # Self attention. - "self_attn_norm_weight" : "input_layernorm.weight", - "self_attn_norm_bias" : "input_layernorm.bias", - "self_attn_qkv_weight" : "self_attention.linear_qkv.weight", - "self_attn_qkv_bias" : "self_attention.linear_qkv.bias", - "self_attn_proj_weight" : "self_attention.linear_proj.weight", - "self_attn_proj_bias" : "self_attention.linear_proj.bias", - - # MLP. - "mlp_norm_weight" : "pre_mlp_layernorm.weight", - "mlp_norm_bias" : "pre_mlp_layernorm.bias", - "mlp_fc1_weight" : "mlp.linear_fc1.weight", - "mlp_fc1_bias" : "mlp.linear_fc1.bias", - "mlp_fc2_weight" : "mlp.linear_fc2.weight", - "mlp_fc2_bias" : "mlp.linear_fc2.bias", - - }) - - -class MCoreTESchema(MCoreSchema): - - def __init__(self, model_type): - super().__init__(model_type, layer_schema={ - - # Self attention. - "self_attn_norm_weight" : "self_attention.linear_qkv.layer_norm_weight", - "self_attn_norm_bias" : "self_attention.linear_qkv.layer_norm_bias", - "self_attn_qkv_weight" : "self_attention.linear_qkv.weight", - "self_attn_qkv_bias" : "self_attention.linear_qkv.bias", - - "self_attn_proj_weight" : "self_attention.linear_proj.weight", - "self_attn_proj_bias" : "self_attention.linear_proj.bias", - - # MLP. - "mlp_norm_weight" : "mlp.linear_fc1.layer_norm_weight", - "mlp_norm_bias" : "mlp.linear_fc1.layer_norm_bias", - "mlp_fc1_weight" : "mlp.linear_fc1.weight", - "mlp_fc1_bias" : "mlp.linear_fc1.bias", - "mlp_fc2_weight" : "mlp.linear_fc2.weight", - "mlp_fc2_bias" : "mlp.linear_fc2.bias", - - }) - - -class MCoreMoETESchema(MCoreSchema): - - def __init__(self, model_type, num_experts, expert_model_parallel_size): - num_local_experts = num_experts // expert_model_parallel_size - super().__init__(model_type, layer_schema={ - - # Self attention. - "self_attn_norm_weight" : "self_attention.linear_qkv.layer_norm_weight", - "self_attn_norm_bias" : "self_attention.linear_qkv.layer_norm_bias", - - "self_attn_qkv_weight" : "self_attention.linear_qkv.weight", - "self_attn_qkv_bias" : "self_attention.linear_qkv.bias", - - "self_attn_proj_weight" : "self_attention.linear_proj.weight", - "self_attn_proj_bias" : "self_attention.linear_proj.bias", - - # MLP. - "mlp_norm_weight" : "pre_mlp_layernorm.weight", - "mlp_norm_bias" : "pre_mlp_layernorm.bias", - - "router_weight" : "mlp.router.weight", - - **{f"mlp_fc1_weight.{expert_idx}" : f"mlp.experts.local_experts.{expert_idx}.linear_fc1.weight" for expert_idx in range(num_local_experts) }, - **{f"mlp_fc2_weight.{expert_idx}" : f"mlp.experts.local_experts.{expert_idx}.linear_fc2.weight" for expert_idx in range(num_local_experts) }, - - }) - - -def get_model_schema( - model_type: T.Literal["GPT", "BERT"], - transformer_impl: T.Literal["transformer_engine", "local"], - num_experts: T.Optional[int] = None, - expert_model_parallel_size: T.Optional[int] = None, -) -> MCoreSchema: - if num_experts is not None and num_experts > 0: - # Only support TE setter for MOE - assert transformer_impl == "transformer_engine" - assert isinstance(expert_model_parallel_size, int) - return MCoreMoETESchema(model_type, num_experts, expert_model_parallel_size) - return { - "local" : MCoreLocalSchema, - "transformer_engine" : MCoreTESchema, - }[transformer_impl](model_type) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Core model schemas.""" + +import typing as T + +from schema_base import ModelSchema + + +def get_core_transformer_block_key(model_key): + return { + "GPT" : "decoder", + "BERT" : "encoder", + }[model_key] + + +class CoreSchema(ModelSchema): + + def __init__(self, model_type, layer_schema): + block_key = get_core_transformer_block_key(model_type) + super().__init__({ + "embeddings" : { + "pos" : "embedding.position_embeddings.weight", + "word" : "embedding.word_embeddings.weight", + }, + "layer_prefix" : f"{block_key}.layers", + "layer" : layer_schema, + "final_norm" : { + "weight" : f"{block_key}.final_layernorm.weight", + "bias" : f"{block_key}.final_layernorm.bias", + }, + "output_layer" : { + "weight" : "output_layer.weight", + }, + "pooler" : { + "weight" : "pooler.dense.weight", + "bias" : "pooler.dense.bias", + }, + "lm_head" : { + "dense_weight" : "lm_head.dense.weight", + "dense_bias" : "lm_head.dense.bias", + "norm_weight" : "lm_head.layer_norm.weight", + "norm_bias" : "lm_head.layer_norm.bias", + }, + "binary_head" : { + "weight" : "binary_head.weight", + "bias" : "binary_head.bias", + }, + }) + + +class CoreLocalSchema(CoreSchema): + + def __init__(self, model_type): + super().__init__(model_type, layer_schema={ + + # Self attention. + "self_attn_norm_weight" : "input_layernorm.weight", + "self_attn_norm_bias" : "input_layernorm.bias", + "self_attn_qkv_weight" : "self_attention.linear_qkv.weight", + "self_attn_qkv_bias" : "self_attention.linear_qkv.bias", + "self_attn_proj_weight" : "self_attention.linear_proj.weight", + "self_attn_proj_bias" : "self_attention.linear_proj.bias", + + # MLP. + "mlp_norm_weight" : "pre_mlp_layernorm.weight", + "mlp_norm_bias" : "pre_mlp_layernorm.bias", + "mlp_fc1_weight" : "mlp.linear_fc1.weight", + "mlp_fc1_bias" : "mlp.linear_fc1.bias", + "mlp_fc2_weight" : "mlp.linear_fc2.weight", + "mlp_fc2_bias" : "mlp.linear_fc2.bias", + + }) + + +class CoreTESchema(CoreSchema): + + def __init__(self, model_type): + super().__init__(model_type, layer_schema={ + + # Self attention. + "self_attn_norm_weight" : "self_attention.linear_qkv.layer_norm_weight", + "self_attn_norm_bias" : "self_attention.linear_qkv.layer_norm_bias", + "self_attn_qkv_weight" : "self_attention.linear_qkv.weight", + "self_attn_qkv_bias" : "self_attention.linear_qkv.bias", + + "self_attn_proj_weight" : "self_attention.linear_proj.weight", + "self_attn_proj_bias" : "self_attention.linear_proj.bias", + + # MLP. + "mlp_norm_weight" : "mlp.linear_fc1.layer_norm_weight", + "mlp_norm_bias" : "mlp.linear_fc1.layer_norm_bias", + "mlp_fc1_weight" : "mlp.linear_fc1.weight", + "mlp_fc1_bias" : "mlp.linear_fc1.bias", + "mlp_fc2_weight" : "mlp.linear_fc2.weight", + "mlp_fc2_bias" : "mlp.linear_fc2.bias", + + }) + + +class CoreMoETESchema(CoreSchema): + + def __init__(self, model_type, num_experts, expert_model_parallel_size): + num_local_experts = num_experts // expert_model_parallel_size + super().__init__(model_type, layer_schema={ + + # Self attention. + "self_attn_norm_weight" : "self_attention.linear_qkv.layer_norm_weight", + "self_attn_norm_bias" : "self_attention.linear_qkv.layer_norm_bias", + + "self_attn_qkv_weight" : "self_attention.linear_qkv.weight", + "self_attn_qkv_bias" : "self_attention.linear_qkv.bias", + + "self_attn_proj_weight" : "self_attention.linear_proj.weight", + "self_attn_proj_bias" : "self_attention.linear_proj.bias", + + # MLP. + "mlp_norm_weight" : "pre_mlp_layernorm.weight", + "mlp_norm_bias" : "pre_mlp_layernorm.bias", + + "router_weight" : "mlp.router.weight", + + **{f"mlp_fc1_weight.{expert_idx}" : f"mlp.experts.local_experts.{expert_idx}.linear_fc1.weight" for expert_idx in range(num_local_experts) }, + **{f"mlp_fc2_weight.{expert_idx}" : f"mlp.experts.local_experts.{expert_idx}.linear_fc2.weight" for expert_idx in range(num_local_experts) }, + + }) + + +def get_model_schema( + model_type: T.Literal["GPT", "BERT"], + transformer_impl: T.Literal["transformer_engine", "local"], + num_experts: T.Optional[int] = None, + expert_model_parallel_size: T.Optional[int] = None, +) -> CoreSchema: + if num_experts is not None and num_experts > 0: + # Only support TE setter for MOE + assert transformer_impl == "transformer_engine" + assert isinstance(expert_model_parallel_size, int) + return CoreMoETESchema(model_type, num_experts, expert_model_parallel_size) + return { + "local" : CoreLocalSchema, + "transformer_engine" : CoreTESchema, + }[transformer_impl](model_type) diff --git a/tools/copyright.sh b/tools/copyright.sh old mode 100644 new mode 100755 diff --git a/tools/preprocess_mmdata.py b/tools/preprocess_mmdata.py index 8ab2c2b..2091c7d 100644 --- a/tools/preprocess_mmdata.py +++ b/tools/preprocess_mmdata.py @@ -1,169 +1,169 @@ -# coding=utf-8 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -"""Processing text modality data for MultiModal pretraining.""" - -import argparse -import json -import multiprocessing -import os -import sys -import numpy as np -from torchvision.transforms import ToTensor -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.path.pardir))) -import time - -import torch -try: - from nltk.tokenize.punkt import PunktLanguageVars -except ImportError: - PunktLanguageVars = object # Fallback to the built-in object class - -from megatron.training.tokenizer import build_tokenizer -from megatron.core.datasets.indexed_dataset import IndexedDatasetBuilder - - -# https://stackoverflow.com/questions/33139531/preserve-empty-lines-with-nltks-punkt-tokenizer -class CustomLanguageVars(PunktLanguageVars): - - _period_context_fmt = r""" - \S* # some word material - %(SentEndChars)s # a potential sentence ending - \s* # <-- THIS is what I changed - (?=(?P - %(NonWord)s # either other punctuation - | - (?P\S+) # <-- Normally you would have \s+ here - ))""" - -class IdentitySplitter(object): - def tokenize(self, *text): - return text - -class Encoder(object): - def __init__(self, args): - self.args = args - - def initializer(self): - # Use Encoder class as a container for global data - Encoder.tokenizer = build_tokenizer(self.args) - - def encode(self, input_pair): - json_line, img_path = input_pair - data = json.loads(json_line) - key = "text" - text = data[key] - sentence_ids = Encoder.tokenizer.tokenize(text) - pad_len = self.args.pad_length - if len(sentence_ids) > 0 and self.args.append_eod: - sentence_ids = sentence_ids[:pad_len] - current_length = len(sentence_ids) - sentence_ids.extend([Encoder.tokenizer.eod for _ in range(max(0,pad_len-current_length))]) - - with open(img_path, "rb") as tf: - xs = bytearray(tf.read()) - img_pad = (4 - len(xs) % 4) % 4 - xs.extend([0 for _ in range(img_pad)]) - img_raw = np.frombuffer(xs, dtype=np.int32) - img_raw = np.insert(img_raw, 0, img_pad) - - return sentence_ids, img_raw, len(json_line) - -def get_args(): - parser = argparse.ArgumentParser() - group = parser.add_argument_group(title='input data') - group.add_argument('--input', type=str, required=True, - help='Path to input JSON') - group.add_argument('--input-image', type=str, required=True, - help='Path to input image folder') - - group.add_argument('--pad-length', type=int, required=True, - help='Pad length of preprocessed text') - - group.add_argument('--split-sentences', action='store_true', - help='Split documents into sentences.') - group.add_argument('--keep-newlines', action='store_true', - help='Keep newlines between sentences when splitting.') - - group = parser.add_argument_group(title='tokenizer') - group.add_argument('--tokenizer-type', type=str, required=True, - choices=['BertWordPieceLowerCase','BertWordPieceCase', - 'GPT2BPETokenizer', 'SentencePieceTokenizer', 'GPTSentencePieceTokenizer'], - help='What type of tokenizer to use.') - group.add_argument('--vocab-file', type=str, default=None, - help='Path to the vocab file') - group.add_argument('--merge-file', type=str, default=None, - help='Path to the BPE merge file (if necessary).') - group.add_argument('--append-eod', action='store_true', - help='Append an token to the end of a document.') - group.add_argument('--lang', type=str, default='english', - help='Language to use for NLTK-powered sentence splitting.') - group.add_argument('--tokenizer-model', type=str, default=None, - help='sentencepeice tokenizer model.') - - group = parser.add_argument_group(title='output data') - group.add_argument('--output-prefix', type=str, required=True, - help='Path to binary output file without suffix') - group = parser.add_argument_group(title='runtime') - group.add_argument('--workers', type=int, default=1, - help='Number of worker processes to launch') - group.add_argument('--log-interval', type=int, default=100, - help='Interval between progress updates') - args = parser.parse_args() - args.keep_empty = False - - # some default/dummy values for the tokenizer - args.rank = 0 - args.make_vocab_size_divisible_by = 128 - args.tensor_model_parallel_size = 1 - args.vocab_extra_ids = 0 - - return args - -def main(): - args = get_args() - startup_start = time.time() - - encoder = Encoder(args) - tokenizer = build_tokenizer(args) - pool = multiprocessing.Pool(args.workers, initializer=encoder.initializer) - - fin = open(args.input, 'r', encoding='utf-8') - img_paths = [os.path.join(args.input_image, basename) for basename in os.listdir(args.input_image)] - - encoded_docs = pool.imap(encoder.encode, zip(fin, img_paths), 25) - - print(f"Vocab size: {tokenizer.vocab_size}") - print(f"Output prefix: {args.output_prefix}") - - output_bin_files = "{}.bin".format(args.output_prefix) - output_idx_files = "{}.idx".format(args.output_prefix) - - builders = IndexedDatasetBuilder(output_bin_files, dtype=np.int32, multimodal=True) - - startup_end = time.time() - proc_start = time.time() - total_bytes_processed = 0 - - print("Time to startup:", startup_end - startup_start) - - for i, (sentence, img_raw, bytes_processed) in enumerate(encoded_docs, start=1): - total_bytes_processed += bytes_processed - builders.add_item(torch.IntTensor(sentence)) - builders.add_item(torch.from_numpy(img_raw), 1) - builders.end_document() - if i % args.log_interval == 0: - current = time.time() - elapsed = current - proc_start - mbs = total_bytes_processed/elapsed/1024/1024 - print(f"Processed {i} documents", - f"({i/elapsed} docs/s, {mbs} MB/s).", - file=sys.stderr) - - builders.finalize(output_idx_files) - - -if __name__ == '__main__': - main() - +# coding=utf-8 +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +"""Processing text modality data for MultiModal pretraining.""" + +import argparse +import json +import multiprocessing +import os +import sys +import numpy as np +from torchvision.transforms import ToTensor +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + os.path.pardir))) +import time + +import torch +try: + from nltk.tokenize.punkt import PunktLanguageVars +except ImportError: + PunktLanguageVars = object # Fallback to the built-in object class + +from megatron.training.tokenizer import build_tokenizer +from megatron.core.datasets.indexed_dataset import IndexedDatasetBuilder + + +# https://stackoverflow.com/questions/33139531/preserve-empty-lines-with-nltks-punkt-tokenizer +class CustomLanguageVars(PunktLanguageVars): + + _period_context_fmt = r""" + \S* # some word material + %(SentEndChars)s # a potential sentence ending + \s* # <-- THIS is what I changed + (?=(?P + %(NonWord)s # either other punctuation + | + (?P\S+) # <-- Normally you would have \s+ here + ))""" + +class IdentitySplitter(object): + def tokenize(self, *text): + return text + +class Encoder(object): + def __init__(self, args): + self.args = args + + def initializer(self): + # Use Encoder class as a container for global data + Encoder.tokenizer = build_tokenizer(self.args) + + def encode(self, input_pair): + json_line, img_path = input_pair + data = json.loads(json_line) + key = "text" + text = data[key] + sentence_ids = Encoder.tokenizer.tokenize(text) + pad_len = self.args.pad_length + if len(sentence_ids) > 0 and self.args.append_eod: + sentence_ids = sentence_ids[:pad_len] + current_length = len(sentence_ids) + sentence_ids.extend([Encoder.tokenizer.eod for _ in range(max(0,pad_len-current_length))]) + + with open(img_path, "rb") as tf: + xs = bytearray(tf.read()) + img_pad = (4 - len(xs) % 4) % 4 + xs.extend([0 for _ in range(img_pad)]) + img_raw = np.frombuffer(xs, dtype=np.int32) + img_raw = np.insert(img_raw, 0, img_pad) + + return sentence_ids, img_raw, len(json_line) + +def get_args(): + parser = argparse.ArgumentParser() + group = parser.add_argument_group(title='input data') + group.add_argument('--input', type=str, required=True, + help='Path to input JSON') + group.add_argument('--input-image', type=str, required=True, + help='Path to input image folder') + + group.add_argument('--pad-length', type=int, required=True, + help='Pad length of preprocessed text') + + group.add_argument('--split-sentences', action='store_true', + help='Split documents into sentences.') + group.add_argument('--keep-newlines', action='store_true', + help='Keep newlines between sentences when splitting.') + + group = parser.add_argument_group(title='tokenizer') + group.add_argument('--tokenizer-type', type=str, required=True, + choices=['BertWordPieceLowerCase','BertWordPieceCase', + 'GPT2BPETokenizer', 'SentencePieceTokenizer', 'GPTSentencePieceTokenizer'], + help='What type of tokenizer to use.') + group.add_argument('--vocab-file', type=str, default=None, + help='Path to the vocab file') + group.add_argument('--merge-file', type=str, default=None, + help='Path to the BPE merge file (if necessary).') + group.add_argument('--append-eod', action='store_true', + help='Append an token to the end of a document.') + group.add_argument('--lang', type=str, default='english', + help='Language to use for NLTK-powered sentence splitting.') + group.add_argument('--tokenizer-model', type=str, default=None, + help='sentencepeice tokenizer model.') + + group = parser.add_argument_group(title='output data') + group.add_argument('--output-prefix', type=str, required=True, + help='Path to binary output file without suffix') + group = parser.add_argument_group(title='runtime') + group.add_argument('--workers', type=int, default=1, + help='Number of worker processes to launch') + group.add_argument('--log-interval', type=int, default=100, + help='Interval between progress updates') + args = parser.parse_args() + args.keep_empty = False + + # some default/dummy values for the tokenizer + args.rank = 0 + args.make_vocab_size_divisible_by = 128 + args.tensor_model_parallel_size = 1 + args.vocab_extra_ids = 0 + + return args + +def main(): + args = get_args() + startup_start = time.time() + + encoder = Encoder(args) + tokenizer = build_tokenizer(args) + pool = multiprocessing.Pool(args.workers, initializer=encoder.initializer) + + fin = open(args.input, 'r', encoding='utf-8') + img_paths = [os.path.join(args.input_image, basename) for basename in os.listdir(args.input_image)] + + encoded_docs = pool.imap(encoder.encode, zip(fin, img_paths), 25) + + print(f"Vocab size: {tokenizer.vocab_size}") + print(f"Output prefix: {args.output_prefix}") + + output_bin_files = "{}.bin".format(args.output_prefix) + output_idx_files = "{}.idx".format(args.output_prefix) + + builders = IndexedDatasetBuilder(output_bin_files, dtype=np.int32, multimodal=True) + + startup_end = time.time() + proc_start = time.time() + total_bytes_processed = 0 + + print("Time to startup:", startup_end - startup_start) + + for i, (sentence, img_raw, bytes_processed) in enumerate(encoded_docs, start=1): + total_bytes_processed += bytes_processed + builders.add_item(torch.IntTensor(sentence)) + builders.add_item(torch.from_numpy(img_raw), 1) + builders.end_document() + if i % args.log_interval == 0: + current = time.time() + elapsed = current - proc_start + mbs = total_bytes_processed/elapsed/1024/1024 + print(f"Processed {i} documents", + f"({i/elapsed} docs/s, {mbs} MB/s).", + file=sys.stderr) + + builders.finalize(output_idx_files) + + +if __name__ == '__main__': + main() + diff --git a/tools/retro/preprocess_data.py b/tools/retro/preprocess_data.py index 444a64e..1c0ff73 100644 --- a/tools/retro/preprocess_data.py +++ b/tools/retro/preprocess_data.py @@ -1,296 +1,295 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -"""Preprocess data for Retro. - -Stages (see argument '--retro-tasks'): -- Build chunk database (DB). -- Build index (train, add). -- Query pretraining neighbors. -""" - -import json -import os -import sys -import torch - -from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder -from megatron.core.datasets.utils import get_blend_from_list -from megatron.core.datasets.retro.db import build_db -from megatron.core.datasets.retro.index import add_to_index, train_index -from megatron.core.datasets.retro.config import ( - RetroBertEmbedders, - RetroGPTChunkDatasets, - RetroPreprocessingConfig, - RetroTokenizers, -) -from megatron.core.datasets.retro.query.gpt_chunk_dataset import build_gpt_chunk_datasets_from_gpt_datasets -from megatron.core.datasets.retro.query.multi_split_gpt_dataset import ( - MultiSplitGPTDataset, - MultiSplitGPTDatasetConfig, -) -from megatron.core.datasets.retro.query.query import query_neighbors -from megatron.core.datasets.retro.query.utils import get_query_dir -from megatron.core.datasets.retro.utils import retro_makedir -from megatron.core.models.retro.utils import ( - get_config_path, - get_gpt_data_dir, -) -from megatron.training import get_args, initialize_megatron, print_rank_0 -from megatron.training.arguments import core_transformer_config_from_args -from megatron.training.tokenizer.tokenizer import ( - _BertWordPieceTokenizer, - _GPT2BPETokenizer, - _GPTSentencePieceTokenizer, -) -from megatron.training import get_train_valid_test_num_samples -from pretrain_gpt import is_dataset_built_on_rank -from tools.bert_embedding import BertEmbedder, DiskDataParallelBertEmbedder -from tools.retro.config_utils import add_config_args - - -def add_retro_args(parser): - group = parser.add_argument_group(title="Retro preprocessing") - add_config_args(group, RetroPreprocessingConfig) - return parser - - -def initialize_megatron_retro(): - '''Initialize megatron & save Retro config.''' - - # Prevent arguments.py from overriding preprocessing args. - project_dir_idx = sys.argv.index("--retro-project-dir") - retro_project_dir = sys.argv[project_dir_idx + 1] - del sys.argv[project_dir_idx] # delete key - del sys.argv[project_dir_idx] # delete value - - # Initialize. - initialize_megatron(extra_args_provider=add_retro_args) - - args = get_args() - args.retro_project_dir = retro_project_dir - - # Retro config. - config = get_retro_preprocessing_config() - - # Save retro config. - if config.retro_task_validate is None: - retro_makedir(config, config.retro_project_dir) - save_config(config) - - return config - - -def get_bert_embedders(config): - mem_embedder = BertEmbedder( - batch_size = config.retro_bert_batch_size, - max_bert_seq_length = config.retro_bert_max_chunk_length, - embedder_type = "megatron", - ) - return RetroBertEmbedders( - mem = mem_embedder, - disk = DiskDataParallelBertEmbedder(mem_embedder, config.retro_block_size), - ) - - -def get_gpt_chunk_datasets(config): - - args = get_args() - - # Dataset config. - data_dir = get_gpt_data_dir(config.retro_project_dir) - blend = list(config.retro_gpt_data_path) - for i in range(len(blend) - 1, -1, -2): - blend[i] = os.path.join(data_dir, blend[i]) - data_config = MultiSplitGPTDatasetConfig( - random_seed=config.retro_gpt_seed, - sequence_length=config.retro_gpt_seq_length, - blend=get_blend_from_list(blend), - blend_per_split=[ - get_blend_from_list(args.train_data_path), - get_blend_from_list(args.valid_data_path), - get_blend_from_list(args.test_data_path) - ], - renormalize_blend_weights=args.renormalize_blend_weights, - split=config.retro_gpt_split, - split_preprocessing=config.retro_gpt_split, - path_to_cache=config.retro_gpt_data_cache_path, - return_document_ids=True, - tokenizer=config.retro_tokenizers.gpt, - reset_position_ids=args.reset_position_ids, - reset_attention_mask=args.reset_attention_mask, - eod_mask_loss=args.eod_mask_loss, - ) - - # GPT datasets. - print_rank_0(" > multi-split gpt datasets.") - train_valid_test_num_samples = get_train_valid_test_num_samples() - train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( - MultiSplitGPTDataset, - train_valid_test_num_samples, - is_dataset_built_on_rank, - data_config, - ).build() - - gpt_datasets = { - "train" : (train_ds, train_valid_test_num_samples[0]), - "valid" : (valid_ds, train_valid_test_num_samples[1]), - "test" : (test_ds, train_valid_test_num_samples[2]), - } - - # Chunk datasets. - chunk_datasets = build_gpt_chunk_datasets_from_gpt_datasets( - project_dir=config.retro_project_dir, - gpt_datasets=gpt_datasets, - sample_length=config.retro_gpt_seq_length, - chunk_length=config.retro_gpt_chunk_length, - ) - chunk_datasets = RetroGPTChunkDatasets(**chunk_datasets) - - return chunk_datasets - - -def get_gpt_tokenizer(config): - '''GPT (BPE) tokenizer.''' - tokenizer_type = config.retro_gpt_tokenizer_type - if tokenizer_type == "GPT2BPETokenizer": - assert config.retro_gpt_vocab_file and config.retro_gpt_merge_file - return _GPT2BPETokenizer( - vocab_file=os.path.join( - config.retro_project_dir, - config.retro_gpt_vocab_file, - ), - merge_file=os.path.join( - config.retro_project_dir, - config.retro_gpt_merge_file, - ), - ) - elif tokenizer_type == 'GPTSentencePieceTokenizer': - assert config.retro_gpt_tokenizer_model is not None - return _GPTSentencePieceTokenizer(os.path.join( - config.retro_project_dir, - config.retro_gpt_tokenizer_model, - )) - else: - raise Exception("unrecognized gpt tokenizer, '%s'." % tokenizer_type) - - -def get_bert_tokenizer(config): - '''Bert (Wordpiece) tokenizer.''' - lower_case = { - "BertWordPieceLowerCase" : True, - "BertWordPieceCase" : False, - }[config.retro_bert_tokenizer_type] - return _BertWordPieceTokenizer( - vocab_file=os.path.join( - config.retro_project_dir, - config.retro_bert_vocab_file, - ), - lower_case=lower_case, - ) - - -def get_tokenizers(config): - return RetroTokenizers( - gpt = get_gpt_tokenizer(config), - bert = get_bert_tokenizer(config), - ) - - -def get_retro_preprocessing_config(): - - # Arguments. - args = get_args() - - # Retro config. - config = core_transformer_config_from_args( - args, config_class=RetroPreprocessingConfig) - - # Add tools. - config.retro_tokenizers = get_tokenizers(config) - config.retro_bert_embedders = get_bert_embedders(config) - config.retro_gpt_chunk_datasets = get_gpt_chunk_datasets(config) - - return config - - -def save_config(config): - '''Save copy of config within retro project dir.''' - - if torch.distributed.get_rank() == 0: - - # GPT config + block size. - config_subset = { - k:v for k,v in vars(config).items() - if k.startswith("retro_gpt") and k != "retro_gpt_chunk_datasets" - } - config_subset["retro_block_size"] = config.retro_block_size - - # Bert config. - config_subset["retro_bert_tokenizer_type"] = config.retro_bert_tokenizer_type - config_subset["retro_bert_vocab_file"] = config.retro_bert_vocab_file - - # Neighbor directories. - query_dir = get_query_dir(config.retro_project_dir) - config_subset["retro_neighbor_dirs"] = { - k : (os.path.relpath(v["neighbor_dir"], query_dir) if v is not None else None) - for k, v in vars(config.retro_gpt_chunk_datasets).items() - } - - # Save. - config_path = get_config_path(config.retro_project_dir) - with open(config_path, "w") as f: - json.dump(config_subset, f, indent=4, sort_keys=True) - - torch.distributed.barrier() - - -if __name__ == "__main__": - - # Initalize Megatron. - config = initialize_megatron_retro() - - # Expand tasks. - task_remap = { - "build" : [ "db-build", "index-train", "index-add", "query-neighbors" ], - "index-build" : [ "index-train", "index-add" ], - "db-build" : [ "db-build" ], - "index-train" : [ "index-train" ], - "index-add" : [ "index-add" ], - "query-neighbors" : [ "query-neighbors" ], - } - tasks = [] - for task in config.retro_tasks: - tasks.extend(task_remap[task]) - config.retro_tasks = tasks - - # Select task to run. - for task in tasks: - - print_rank_0("start '%s%s'." % ( - "" if config.retro_task_validate is None else "[validate] ", - task, - )) - - # DB (i.e., chunk db). - if task == "db-build": - build_db(config) - - # Index. - elif task == "index-train": - train_index(config) - elif task == "index-add": - add_to_index(config) - - # Query. - elif task == "query-neighbors": - query_neighbors(config) - - else: - raise Exception("specialize for task '%s'." % task) - - torch.distributed.barrier() - - print_rank_0("end '%s%s'." % ( - "" if config.retro_task_validate is None else "[validate] ", - task, - )) +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Preprocess data for Retro. + +Stages (see argument '--retro-tasks'): +- Build chunk database (DB). +- Build index (train, add). +- Query pretraining neighbors. +""" + +import json +import os +import sys +import torch + +from megatron.core.datasets.blended_megatron_dataset_builder import BlendedMegatronDatasetBuilder +from megatron.core.datasets.utils import get_blend_from_list +from megatron.core.datasets.retro.db import build_db +from megatron.core.datasets.retro.index import add_to_index, train_index +from megatron.core.datasets.retro.config import ( + RetroBertEmbedders, + RetroGPTChunkDatasets, + RetroPreprocessingConfig, + RetroTokenizers, +) +from megatron.core.datasets.retro.query.gpt_chunk_dataset import build_gpt_chunk_datasets_from_gpt_datasets +from megatron.core.datasets.retro.query.multi_split_gpt_dataset import ( + MultiSplitGPTDataset, + MultiSplitGPTDatasetConfig, +) +from megatron.core.datasets.retro.query.query import query_neighbors +from megatron.core.datasets.retro.query.utils import get_query_dir +from megatron.core.datasets.retro.utils import retro_makedir +from megatron.core.models.retro.utils import ( + get_config_path, + get_gpt_data_dir, +) +from megatron.training import get_args, initialize_megatron, print_rank_0 +from megatron.training.arguments import core_transformer_config_from_args +from megatron.training.tokenizer.tokenizer import ( + _BertWordPieceTokenizer, + _GPT2BPETokenizer, + _GPTSentencePieceTokenizer, +) +from megatron.training import get_train_valid_test_num_samples +from pretrain_gpt import is_dataset_built_on_rank +from tools.bert_embedding import BertEmbedder, DiskDataParallelBertEmbedder +from tools.retro.config_utils import add_config_args + + +def add_retro_args(parser): + group = parser.add_argument_group(title="Retro preprocessing") + add_config_args(group, RetroPreprocessingConfig) + return parser + + +def initialize_megatron_retro(): + '''Initialize megatron & save Retro config.''' + + # Prevent arguments.py from overriding preprocessing args. + project_dir_idx = sys.argv.index("--retro-project-dir") + retro_project_dir = sys.argv[project_dir_idx + 1] + del sys.argv[project_dir_idx] # delete key + del sys.argv[project_dir_idx] # delete value + + # Initialize. + initialize_megatron(extra_args_provider=add_retro_args) + + args = get_args() + args.retro_project_dir = retro_project_dir + + # Retro config. + config = get_retro_preprocessing_config() + + # Save retro config. + if config.retro_task_validate is None: + retro_makedir(config, config.retro_project_dir) + save_config(config) + + return config + + +def get_bert_embedders(config): + mem_embedder = BertEmbedder( + batch_size = config.retro_bert_batch_size, + max_bert_seq_length = config.retro_bert_max_chunk_length, + embedder_type = "megatron", + ) + return RetroBertEmbedders( + mem = mem_embedder, + disk = DiskDataParallelBertEmbedder(mem_embedder, config.retro_block_size), + ) + + +def get_gpt_chunk_datasets(config): + + args = get_args() + + # Dataset config. + data_dir = get_gpt_data_dir(config.retro_project_dir) + blend = list(config.retro_gpt_data_path) + for i in range(len(blend) - 1, -1, -2): + blend[i] = os.path.join(data_dir, blend[i]) + data_config = MultiSplitGPTDatasetConfig( + random_seed=config.retro_gpt_seed, + sequence_length=config.retro_gpt_seq_length, + blend=get_blend_from_list(blend), + blend_per_split=[ + get_blend_from_list(args.train_data_path), + get_blend_from_list(args.valid_data_path), + get_blend_from_list(args.test_data_path) + ], + split=config.retro_gpt_split, + split_preprocessing=config.retro_gpt_split, + path_to_cache=config.retro_gpt_data_cache_path, + return_document_ids=True, + tokenizer=config.retro_tokenizers.gpt, + reset_position_ids=args.reset_position_ids, + reset_attention_mask=args.reset_attention_mask, + eod_mask_loss=args.eod_mask_loss, + ) + + # GPT datasets. + print_rank_0(" > multi-split gpt datasets.") + train_valid_test_num_samples = get_train_valid_test_num_samples() + train_ds, valid_ds, test_ds = BlendedMegatronDatasetBuilder( + MultiSplitGPTDataset, + train_valid_test_num_samples, + is_dataset_built_on_rank, + data_config, + ).build() + + gpt_datasets = { + "train" : (train_ds, train_valid_test_num_samples[0]), + "valid" : (valid_ds, train_valid_test_num_samples[1]), + "test" : (test_ds, train_valid_test_num_samples[2]), + } + + # Chunk datasets. + chunk_datasets = build_gpt_chunk_datasets_from_gpt_datasets( + project_dir=config.retro_project_dir, + gpt_datasets=gpt_datasets, + sample_length=config.retro_gpt_seq_length, + chunk_length=config.retro_gpt_chunk_length, + ) + chunk_datasets = RetroGPTChunkDatasets(**chunk_datasets) + + return chunk_datasets + + +def get_gpt_tokenizer(config): + '''GPT (BPE) tokenizer.''' + tokenizer_type = config.retro_gpt_tokenizer_type + if tokenizer_type == "GPT2BPETokenizer": + assert config.retro_gpt_vocab_file and config.retro_gpt_merge_file + return _GPT2BPETokenizer( + vocab_file=os.path.join( + config.retro_project_dir, + config.retro_gpt_vocab_file, + ), + merge_file=os.path.join( + config.retro_project_dir, + config.retro_gpt_merge_file, + ), + ) + elif tokenizer_type == 'GPTSentencePieceTokenizer': + assert config.retro_gpt_tokenizer_model is not None + return _GPTSentencePieceTokenizer(os.path.join( + config.retro_project_dir, + config.retro_gpt_tokenizer_model, + )) + else: + raise Exception("unrecognized gpt tokenizer, '%s'." % tokenizer_type) + + +def get_bert_tokenizer(config): + '''Bert (Wordpiece) tokenizer.''' + lower_case = { + "BertWordPieceLowerCase" : True, + "BertWordPieceCase" : False, + }[config.retro_bert_tokenizer_type] + return _BertWordPieceTokenizer( + vocab_file=os.path.join( + config.retro_project_dir, + config.retro_bert_vocab_file, + ), + lower_case=lower_case, + ) + + +def get_tokenizers(config): + return RetroTokenizers( + gpt = get_gpt_tokenizer(config), + bert = get_bert_tokenizer(config), + ) + + +def get_retro_preprocessing_config(): + + # Arguments. + args = get_args() + + # Retro config. + config = core_transformer_config_from_args( + args, config_class=RetroPreprocessingConfig) + + # Add tools. + config.retro_tokenizers = get_tokenizers(config) + config.retro_bert_embedders = get_bert_embedders(config) + config.retro_gpt_chunk_datasets = get_gpt_chunk_datasets(config) + + return config + + +def save_config(config): + '''Save copy of config within retro project dir.''' + + if torch.distributed.get_rank() == 0: + + # GPT config + block size. + config_subset = { + k:v for k,v in vars(config).items() + if k.startswith("retro_gpt") and k != "retro_gpt_chunk_datasets" + } + config_subset["retro_block_size"] = config.retro_block_size + + # Bert config. + config_subset["retro_bert_tokenizer_type"] = config.retro_bert_tokenizer_type + config_subset["retro_bert_vocab_file"] = config.retro_bert_vocab_file + + # Neighbor directories. + query_dir = get_query_dir(config.retro_project_dir) + config_subset["retro_neighbor_dirs"] = { + k : (os.path.relpath(v["neighbor_dir"], query_dir) if v is not None else None) + for k, v in vars(config.retro_gpt_chunk_datasets).items() + } + + # Save. + config_path = get_config_path(config.retro_project_dir) + with open(config_path, "w") as f: + json.dump(config_subset, f, indent=4, sort_keys=True) + + torch.distributed.barrier() + + +if __name__ == "__main__": + + # Initalize Megatron. + config = initialize_megatron_retro() + + # Expand tasks. + task_remap = { + "build" : [ "db-build", "index-train", "index-add", "query-neighbors" ], + "index-build" : [ "index-train", "index-add" ], + "db-build" : [ "db-build" ], + "index-train" : [ "index-train" ], + "index-add" : [ "index-add" ], + "query-neighbors" : [ "query-neighbors" ], + } + tasks = [] + for task in config.retro_tasks: + tasks.extend(task_remap[task]) + config.retro_tasks = tasks + + # Select task to run. + for task in tasks: + + print_rank_0("start '%s%s'." % ( + "" if config.retro_task_validate is None else "[validate] ", + task, + )) + + # DB (i.e., chunk db). + if task == "db-build": + build_db(config) + + # Index. + elif task == "index-train": + train_index(config) + elif task == "index-add": + add_to_index(config) + + # Query. + elif task == "query-neighbors": + query_neighbors(config) + + else: + raise Exception("specialize for task '%s'." % task) + + torch.distributed.barrier() + + print_rank_0("end '%s%s'." % ( + "" if config.retro_task_validate is None else "[validate] ", + task, + )) diff --git a/tools/retro/sft/sft_retro_lm.sh b/tools/retro/sft/sft_retro_lm.sh old mode 100644 new mode 100755 diff --git a/tools/retro/text_generation/evaluate.py b/tools/retro/text_generation/evaluate.py index 2031118..71e538e 100644 --- a/tools/retro/text_generation/evaluate.py +++ b/tools/retro/text_generation/evaluate.py @@ -1,200 +1,200 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - - -import sys -import os -from tqdm import tqdm -import string -import json -import regex -import numpy as np - -sys.path.append(os.path.abspath(os.path.join( - os.path.join(os.path.dirname(__file__), "../../../")))) -from tools.retro.text_generation.metrics import F1Metric - - -def normalize_answer(s): - def remove_articles(text): - return regex.sub(r'\b(a|an|the)\b', ' ', text) - - def white_space_fix(text): - return ' '.join(text.split()) - - def remove_punc(text): - exclude = set(string.punctuation) - return ''.join(ch for ch in text if ch not in exclude) - - def lower(text): - return text.lower() - - return white_space_fix(remove_articles(remove_punc(lower(s)))) - - -def compute_f1_score(predicted_answers, groundtruth_answer, exp_name="default"): - """Evaluating F1 Score""" - print(len(predicted_answers), len(groundtruth_answer)) - if len(predicted_answers) != len(groundtruth_answer): - groundtruth_answer = groundtruth_answer[:len(predicted_answers)] - - guess_list = [] - answer_list = [] - - assert len(guess_list) == len(answer_list), \ - "lengths of guess and answer are different!" - - for pred, ans in zip(predicted_answers, groundtruth_answer): - pred = pred.strip() - if type(ans) == str: - ans = ans.strip() - elif type(ans) == dict: - ans = ans['text'].strip() - elif ans == None: - continue - if "<|endoftext|>" in pred: - pred = pred.replace("<|endoftext|>", "") - if ans == "no_passages_used": - ans = "" - guess_list.append(pred) - answer_list.append(ans) - - precision, recall, f1 = F1Metric.compute_all_pairs(guess_list, answer_list) - print('Method: %s; Precision: %.4f; recall: %.4f; f1: %.4f' % ( \ - exp_name, precision, recall, f1)) - - -def load_groundtruth_file(data_file): - with open(data_file, "r") as f: - nq_examples = json.load(f) - - data = [] - for instance in nq_examples: - if "answers" in instance: - answers = instance["answers"] - if len(answers) < 1: - answers = [None] - elif "answer" in instance: - if type(instance["answer"]) is str: - answers = [instance["answer"]] - elif type(instance["answer"]) is list: - answers = instance["answer"] - else: - answers = [str(instance["answer"])] - else: - raise ValueError("need to have answer or answers") - data.append(answers[0]) - - return data - - -def read_prediction(prediction_file): - prediction_list = [] - print('reading %s' % prediction_file) - with open(prediction_file, "r") as f: - for i, line in enumerate(tqdm(f)): - if prediction_file.endswith("jsonl"): - line = json.loads(line)["pred"] - # print(line) - line = line.replace("Answer:", "") - line = line.replace("Answer: ", "") - line = line.replace('???? ', "") - line = line.replace('A: ', "") - line = line.replace("A:", "") - - line = line.strip() - - if "<|endoftext|>" in line: - line = line.replace("<|endoftext|>", "") - line = normalize_answer(line) # normalize the answer - prediction_list.append(line) - - return prediction_list - - -def exact_match_score(prediction, ground_truth): - return normalize_answer(prediction) == normalize_answer(ground_truth) - - -def ems(prediction, ground_truths): - return max([exact_match_score(prediction, gt) for gt in ground_truths]) - - -def evaluate_ems(prediction_file, ground_truth_file, dev_num=3000): - prediction_list = read_prediction(prediction_file) - ground_truths_list = [] - - if ground_truth_file.endswith(('txt', 'lst')): - raw_data = open(ground_truth_file, 'r') - else: - with open(ground_truth_file, 'r') as f: - raw_data = json.load(f) - if "dev" in ground_truth_file: - raw_data = raw_data[:dev_num] - prediction_list = prediction_list[:dev_num] - - for each in raw_data: - if ground_truth_file.endswith('txt'): - each = json.loads(each) - - if 'answers' in each: - ground_truths_list.append(each['answers']) - elif 'answer' in each: - ground_truths_list.append(each['answer']) - else: - ground_truths_list.append([each]) - - exactmatch = [] - - good_example_list = [] - for i, each in enumerate(prediction_list): - score = ems(each, ground_truths_list[i]) - exactmatch.append(score) - if score: - good_example_list.append(i) - - final_em_score = np.mean(exactmatch) - - print('Exact Match: %.4f;' % final_em_score) - - print('done :-)') - - return final_em_score, exactmatch - - -def load_prediction(data_file): - data = [] - with open(data_file, "r") as f: - for line in f.readlines(): - data.append(line.strip()) - - return data - - -def evaluate_f1(ground_truth_file, prediction_file, reduced_test_only=False): - groundtruth_answer = load_groundtruth_file(ground_truth_file) - predicted_answers = load_prediction(prediction_file) - if not reduced_test_only: - compute_f1_score(predicted_answers, groundtruth_answer) - - -if __name__ == "__main__": - model_names = [] - model_names += "retro-open_inst_pp1_same_format_ctx1_843m_128_5e-6", - - for model_name in model_names: - ckpt_path = "/path/to/checkpoints/{}/".format(model_name) - - n_ctx = 5 - n_enc = 2 - iter = 1000 - model_param = "843m" - - prediction_file = ckpt_path + "/retro-generate-nq_{}_{}_{}_test_greedy_0_20000_{}.txt".format( - n_ctx, n_enc, model_param, iter) - ground_truth_file = "/path/to/NQ/test.json" - print(prediction_file) - print(ground_truth_file) - evaluate_f1(ground_truth_file, prediction_file) - evaluate_ems(prediction_file, ground_truth_file) - - print("=====================================") +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + + +import sys +import os +from tqdm import tqdm +import string +import json +import regex +import numpy as np + +sys.path.append(os.path.abspath(os.path.join( + os.path.join(os.path.dirname(__file__), "../../../")))) +from tools.retro.text_generation.metrics import F1Metric + + +def normalize_answer(s): + def remove_articles(text): + return regex.sub(r'\b(a|an|the)\b', ' ', text) + + def white_space_fix(text): + return ' '.join(text.split()) + + def remove_punc(text): + exclude = set(string.punctuation) + return ''.join(ch for ch in text if ch not in exclude) + + def lower(text): + return text.lower() + + return white_space_fix(remove_articles(remove_punc(lower(s)))) + + +def compute_f1_score(predicted_answers, groundtruth_answer, exp_name="default"): + """Evaluating F1 Score""" + print(len(predicted_answers), len(groundtruth_answer)) + if len(predicted_answers) != len(groundtruth_answer): + groundtruth_answer = groundtruth_answer[:len(predicted_answers)] + + guess_list = [] + answer_list = [] + + assert len(guess_list) == len(answer_list), \ + "lengths of guess and answer are different!" + + for pred, ans in zip(predicted_answers, groundtruth_answer): + pred = pred.strip() + if type(ans) == str: + ans = ans.strip() + elif type(ans) == dict: + ans = ans['text'].strip() + elif ans == None: + continue + if "<|endoftext|>" in pred: + pred = pred.replace("<|endoftext|>", "") + if ans == "no_passages_used": + ans = "" + guess_list.append(pred) + answer_list.append(ans) + + precision, recall, f1 = F1Metric.compute_all_pairs(guess_list, answer_list) + print('Method: %s; Precision: %.4f; recall: %.4f; f1: %.4f' % ( \ + exp_name, precision, recall, f1)) + + +def load_groundtruth_file(data_file): + with open(data_file, "r") as f: + nq_examples = json.load(f) + + data = [] + for instance in nq_examples: + if "answers" in instance: + answers = instance["answers"] + if len(answers) < 1: + answers = [None] + elif "answer" in instance: + if type(instance["answer"]) is str: + answers = [instance["answer"]] + elif type(instance["answer"]) is list: + answers = instance["answer"] + else: + answers = [str(instance["answer"])] + else: + raise ValueError("need to have answer or answers") + data.append(answers[0]) + + return data + + +def read_prediction(prediction_file): + prediction_list = [] + print('reading %s' % prediction_file) + with open(prediction_file, "r") as f: + for i, line in enumerate(tqdm(f)): + if prediction_file.endswith("jsonl"): + line = json.loads(line)["pred"] + # print(line) + line = line.replace("Answer:", "") + line = line.replace("Answer: ", "") + line = line.replace('???? ', "") + line = line.replace('A: ', "") + line = line.replace("A:", "") + + line = line.strip() + + if "<|endoftext|>" in line: + line = line.replace("<|endoftext|>", "") + line = normalize_answer(line) # normalize the answer + prediction_list.append(line) + + return prediction_list + + +def exact_match_score(prediction, ground_truth): + return normalize_answer(prediction) == normalize_answer(ground_truth) + + +def ems(prediction, ground_truths): + return max([exact_match_score(prediction, gt) for gt in ground_truths]) + + +def evaluate_ems(prediction_file, ground_truth_file, dev_num=3000): + prediction_list = read_prediction(prediction_file) + ground_truths_list = [] + + if ground_truth_file.endswith(('txt', 'lst')): + raw_data = open(ground_truth_file, 'r') + else: + with open(ground_truth_file, 'r') as f: + raw_data = json.load(f) + if "dev" in ground_truth_file: + raw_data = raw_data[:dev_num] + prediction_list = prediction_list[:dev_num] + + for each in raw_data: + if ground_truth_file.endswith('txt'): + each = json.loads(each) + + if 'answers' in each: + ground_truths_list.append(each['answers']) + elif 'answer' in each: + ground_truths_list.append(each['answer']) + else: + ground_truths_list.append([each]) + + exactmatch = [] + + good_example_list = [] + for i, each in enumerate(prediction_list): + score = ems(each, ground_truths_list[i]) + exactmatch.append(score) + if score: + good_example_list.append(i) + + final_em_score = np.mean(exactmatch) + + print('Exact Match: %.4f;' % final_em_score) + + print('done :-)') + + return final_em_score, exactmatch + + +def load_prediction(data_file): + data = [] + with open(data_file, "r") as f: + for line in f.readlines(): + data.append(line.strip()) + + return data + + +def evaluate_f1(ground_truth_file, prediction_file, reduced_test_only=False): + groundtruth_answer = load_groundtruth_file(ground_truth_file) + predicted_answers = load_prediction(prediction_file) + if not reduced_test_only: + compute_f1_score(predicted_answers, groundtruth_answer) + + +if __name__ == "__main__": + model_names = [] + model_names += "retro-open_inst_pp1_same_format_ctx1_843m_128_5e-6", + + for model_name in model_names: + ckpt_path = "/path/to/checkpoints/{}/".format(model_name) + + n_ctx = 5 + n_enc = 2 + iter = 1000 + model_param = "843m" + + prediction_file = ckpt_path + "/retro-generate-nq_{}_{}_{}_test_greedy_0_20000_{}.txt".format( + n_ctx, n_enc, model_param, iter) + ground_truth_file = "/path/to/NQ/test.json" + print(prediction_file) + print(ground_truth_file) + evaluate_f1(ground_truth_file, prediction_file) + evaluate_ems(prediction_file, ground_truth_file) + + print("=====================================") diff --git a/tools/retro/text_generation/metrics.py b/tools/retro/text_generation/metrics.py index bd0b5fe..692b5c8 100644 --- a/tools/retro/text_generation/metrics.py +++ b/tools/retro/text_generation/metrics.py @@ -1,80 +1,80 @@ - -# The following code is adapted from -# https://github.com/facebookresearch/ParlAI/blob/master/parlai/core/metrics.py, -# which is licensed under the MIT license. More details on the license can be -# found at https://github.com/facebookresearch/ParlAI/blob/master/LICENSE. - -"""Provides standard metric evaluations for dialog.""" - -from collections import Counter -from typing import List -import numpy as np -import re -from nltk import ngrams - -re_art = re.compile(r'\b(a|an|the)\b') -re_punc = re.compile(r'[!"#$%&()*+,-./:;<=>?@\[\]\\^`{|}~_\']') - - -def normalize_answer(s): - """ - Lower text and remove punctuation, articles and extra whitespace. - """ - s = s.lower() - s = re_punc.sub(' ', s) - s = re_art.sub(' ', s) - s = ' '.join(s.split()) - return s - - -class F1Metric: - """ - Helper class which computes token-level F1. - """ - - @staticmethod - def _prec_recall_f1_score(pred_items, gold_items): - """ - Compute precision, recall and f1 given a set of gold and prediction items. - :param pred_items: iterable of predicted values - :param gold_items: iterable of gold values - :return: tuple (p, r, f1) for precision, recall, f1 - """ - common = Counter(gold_items) & Counter(pred_items) - num_same = sum(common.values()) - if num_same == 0: - return 0, 0, 0 - precision = 1.0 * num_same / len(pred_items) - recall = 1.0 * num_same / len(gold_items) - f1 = (2 * precision * recall) / (precision + recall) - return precision, recall, f1 - - @staticmethod - def compute_each_pair(guess: str, answer: str, n=1): - if answer == "": - return None, None, None - if guess == "": - return 0, 0, 0 - g_tokens = normalize_answer(guess).split() - a_tokens = normalize_answer(answer).split() - g_tokens = list(ngrams(g_tokens, n)) - a_tokens = list(ngrams(a_tokens, n)) - precision, recall, f1 = F1Metric._prec_recall_f1_score(g_tokens, a_tokens) - return precision, recall, f1 - - @staticmethod - def compute_all_pairs(guesses: List[str], answers: List[str], n=1): - # additional augment: - print("guess:", len(guesses), ", answers:", len(answers)) - assert len(guesses) == len(answers) - - precision_list, recall_list, f1_list = [], [], [] - for guess, answer in zip(guesses, answers): - precision, recall, f1 = F1Metric.compute_each_pair(guess, answer, n) - if precision is None or recall is None or f1 is None: - continue - precision_list.append(precision) - recall_list.append(recall) - f1_list.append(f1) - - return np.mean(precision_list), np.mean(recall_list), np.mean(f1_list) + +# The following code is adapted from +# https://github.com/facebookresearch/ParlAI/blob/master/parlai/core/metrics.py, +# which is licensed under the MIT license. More details on the license can be +# found at https://github.com/facebookresearch/ParlAI/blob/master/LICENSE. + +"""Provides standard metric evaluations for dialog.""" + +from collections import Counter +from typing import List +import numpy as np +import re +from nltk import ngrams + +re_art = re.compile(r'\b(a|an|the)\b') +re_punc = re.compile(r'[!"#$%&()*+,-./:;<=>?@\[\]\\^`{|}~_\']') + + +def normalize_answer(s): + """ + Lower text and remove punctuation, articles and extra whitespace. + """ + s = s.lower() + s = re_punc.sub(' ', s) + s = re_art.sub(' ', s) + s = ' '.join(s.split()) + return s + + +class F1Metric: + """ + Helper class which computes token-level F1. + """ + + @staticmethod + def _prec_recall_f1_score(pred_items, gold_items): + """ + Compute precision, recall and f1 given a set of gold and prediction items. + :param pred_items: iterable of predicted values + :param gold_items: iterable of gold values + :return: tuple (p, r, f1) for precision, recall, f1 + """ + common = Counter(gold_items) & Counter(pred_items) + num_same = sum(common.values()) + if num_same == 0: + return 0, 0, 0 + precision = 1.0 * num_same / len(pred_items) + recall = 1.0 * num_same / len(gold_items) + f1 = (2 * precision * recall) / (precision + recall) + return precision, recall, f1 + + @staticmethod + def compute_each_pair(guess: str, answer: str, n=1): + if answer == "": + return None, None, None + if guess == "": + return 0, 0, 0 + g_tokens = normalize_answer(guess).split() + a_tokens = normalize_answer(answer).split() + g_tokens = list(ngrams(g_tokens, n)) + a_tokens = list(ngrams(a_tokens, n)) + precision, recall, f1 = F1Metric._prec_recall_f1_score(g_tokens, a_tokens) + return precision, recall, f1 + + @staticmethod + def compute_all_pairs(guesses: List[str], answers: List[str], n=1): + # additional augment: + print("guess:", len(guesses), ", answers:", len(answers)) + assert len(guesses) == len(answers) + + precision_list, recall_list, f1_list = [], [], [] + for guess, answer in zip(guesses, answers): + precision, recall, f1 = F1Metric.compute_each_pair(guess, answer, n) + if precision is None or recall is None or f1 is None: + continue + precision_list.append(precision) + recall_list.append(recall) + f1_list.append(f1) + + return np.mean(precision_list), np.mean(recall_list), np.mean(f1_list) diff --git a/tools/retro/text_generation/retro_generate.sh b/tools/retro/text_generation/retro_generate.sh old mode 100644 new mode 100755 index 53f7d76..9e7274b --- a/tools/retro/text_generation/retro_generate.sh +++ b/tools/retro/text_generation/retro_generate.sh @@ -1,125 +1,125 @@ -#!/bin/bash - -TASK=$1 -model_size=$2 -sampling=$3 -split=$4 -gen_start=$5 -num_gen=$6 -ckpt_step=${7} -ft_neighbours=${8} -model_card=${9} -ckpt=${10} -K=${11} -retrieve=${12} - -QA_HOME="" - -TOKENIZER_MODEL="" - -RETRO_WORKDIR="" - - -if [[ $model_size == "843m" ]]; then - mod_par=1 - layers=24 - hid_dim=1024 - heads=16 - pip_par=1 -fi - -GPT_ARGS="--apply-layernorm-1p \ - --untie-embeddings-and-output-weights \ - --disable-bias-linear \ - --no-position-embedding \ - --use-rotary-position-embeddings \ - --rotary-percent 0.5 \ - --swiglu \ - --attention-dropout 0.0 \ - --hidden-dropout 0.0 \ - --pipeline-model-parallel-size $pip_par \ - --tensor-model-parallel-size $mod_par \ - --num-layers $layers \ - --hidden-size $hid_dim \ - --num-attention-heads $heads \ - --seq-length 4096 \ - --max-position-embeddings 4096 \ - --lr-decay-style cosine \ - --tokenizer-type GPTSentencePieceTokenizer \ - --tokenizer-model ${TOKENIZER_MODEL} \ - --clip-grad 1.0 \ - --weight-decay 0.01 \ - --adam-beta1 0.9 \ - --adam-beta2 0.98 \ - --log-params-norm \ - --log-num-zeros-in-grad \ - --bf16 \ -" - - -sample_input_file="/path/to/instruct_tuning/data/$TASK/${split}.json" - -top_k=1 -micro_bsz=1 -SAMPLE_ARGS="--top_k $top_k" - -CHECKPOINT_PATH=${ckpt} -sample_output_file="${CHECKPOINT_PATH}/retro-generate-${TASK}_${ft_neighbours}_${K}_${model_size}_${split}_${sampling}_${gen_start}_${num_gen}_${ckpt_step}.txt" - -DIR=`pwd` - -echo $sample_input_file -echo $sample_output_file - - -GEN_ARGS="$SAMPLE_ARGS \ - --gen-start-idx $gen_start \ - --num-gen $num_gen \ - --ckpt-step ${ckpt_step} \ - --sample-input-file $sample_input_file \ - --sample-output-file $sample_output_file \ - --retro-workdir ${RETRO_WORKDIR} \ - --retro-add-retriever \ - --retro-num-neighbors ${K} \ - --reuse-top \ - --retro-attention-gate 0 \ - " - -if [[ $retrieve == 1 ]]; then - GEN_ARGS="$GEN_ARGS \ - --use-retrieved-neighbours \ - " -fi - -FT_ARGS="--eod-mask-loss \ - --answer-loss-only \ - --ft_neighbours ${ft_neighbours} \ - --task $TASK" - -DISTRIBUTED_ARGS="--nproc_per_node ${mod_par} \ - --nnodes ${pip_par} \ - --node_rank 0 \ - --master_port 8889" - -######## Command. ######## - -COMMAND="python -m torch.distributed.run $DISTRIBUTED_ARGS ${DIR}/tools/retro/text_generation/retro_text_generation.py" - -COMMAND="$COMMAND \ - $GPT_ARGS \ - $GEN_ARGS \ - --load $CHECKPOINT_PATH \ - --micro-batch-size $micro_bsz \ - $FT_ARGS" - -export NCCL_DEBUG=INFO -export NCCL_IB_TIMEOUT=19 -export NCCL_IB_SL=1 -export CUDA_DEVICE_MAX_CONNECTIONS=1 - - -echo "~~~~~~~~~~~~~~~~~~~~~~~~~~" -echo "CMD = '$CMD'." -echo "~~~~~~~~~~~~~~~~~~~~~~~~~~" -eval $COMMAND - +#!/bin/bash + +TASK=$1 +model_size=$2 +sampling=$3 +split=$4 +gen_start=$5 +num_gen=$6 +ckpt_step=${7} +ft_neighbours=${8} +model_card=${9} +ckpt=${10} +K=${11} +retrieve=${12} + +QA_HOME="" + +TOKENIZER_MODEL="" + +RETRO_WORKDIR="" + + +if [[ $model_size == "843m" ]]; then + mod_par=1 + layers=24 + hid_dim=1024 + heads=16 + pip_par=1 +fi + +GPT_ARGS="--apply-layernorm-1p \ + --untie-embeddings-and-output-weights \ + --disable-bias-linear \ + --no-position-embedding \ + --use-rotary-position-embeddings \ + --rotary-percent 0.5 \ + --swiglu \ + --attention-dropout 0.0 \ + --hidden-dropout 0.0 \ + --pipeline-model-parallel-size $pip_par \ + --tensor-model-parallel-size $mod_par \ + --num-layers $layers \ + --hidden-size $hid_dim \ + --num-attention-heads $heads \ + --seq-length 4096 \ + --max-position-embeddings 4096 \ + --lr-decay-style cosine \ + --tokenizer-type GPTSentencePieceTokenizer \ + --tokenizer-model ${TOKENIZER_MODEL} \ + --clip-grad 1.0 \ + --weight-decay 0.01 \ + --adam-beta1 0.9 \ + --adam-beta2 0.98 \ + --log-params-norm \ + --log-num-zeros-in-grad \ + --bf16 \ +" + + +sample_input_file="/path/to/instruct_tuning/data/$TASK/${split}.json" + +top_k=1 +micro_bsz=1 +SAMPLE_ARGS="--top_k $top_k" + +CHECKPOINT_PATH=${ckpt} +sample_output_file="${CHECKPOINT_PATH}/retro-generate-${TASK}_${ft_neighbours}_${K}_${model_size}_${split}_${sampling}_${gen_start}_${num_gen}_${ckpt_step}.txt" + +DIR=`pwd` + +echo $sample_input_file +echo $sample_output_file + + +GEN_ARGS="$SAMPLE_ARGS \ + --gen-start-idx $gen_start \ + --num-gen $num_gen \ + --ckpt-step ${ckpt_step} \ + --sample-input-file $sample_input_file \ + --sample-output-file $sample_output_file \ + --retro-workdir ${RETRO_WORKDIR} \ + --retro-add-retriever \ + --retro-num-neighbors ${K} \ + --reuse-top \ + --retro-attention-gate 0 \ + " + +if [[ $retrieve == 1 ]]; then + GEN_ARGS="$GEN_ARGS \ + --use-retrieved-neighbours \ + " +fi + +FT_ARGS="--eod-mask-loss \ + --answer-loss-only \ + --ft_neighbours ${ft_neighbours} \ + --task $TASK" + +DISTRIBUTED_ARGS="--nproc_per_node ${mod_par} \ + --nnodes ${pip_par} \ + --node_rank 0 \ + --master_port 8889" + +######## Command. ######## + +COMMAND="python -m torch.distributed.run $DISTRIBUTED_ARGS ${DIR}/tools/retro/text_generation/retro_text_generation.py" + +COMMAND="$COMMAND \ + $GPT_ARGS \ + $GEN_ARGS \ + --load $CHECKPOINT_PATH \ + --micro-batch-size $micro_bsz \ + $FT_ARGS" + +export NCCL_DEBUG=INFO +export NCCL_IB_TIMEOUT=19 +export NCCL_IB_SL=1 +export CUDA_DEVICE_MAX_CONNECTIONS=1 + + +echo "~~~~~~~~~~~~~~~~~~~~~~~~~~" +echo "CMD = '$CMD'." +echo "~~~~~~~~~~~~~~~~~~~~~~~~~~" +eval $COMMAND + diff --git a/tools/retro/text_generation/retro_text_generation.py b/tools/retro/text_generation/retro_text_generation.py index 2705009..ed08756 100644 --- a/tools/retro/text_generation/retro_text_generation.py +++ b/tools/retro/text_generation/retro_text_generation.py @@ -1,263 +1,263 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. - -"""Sample Generate GPT""" -import torch -import os -import sys -from typing import Union - -sys.path.append(os.path.abspath(os.path.join( - os.path.join(os.path.dirname(__file__), "../../../")))) -from megatron.training import get_args, get_retro_args -from megatron.training import print_rank_0 -from megatron.training import get_tokenizer -from megatron.training.checkpointing import load_checkpoint -from megatron.training.initialize import initialize_megatron -from megatron.core.models.gpt import GPTModel -from megatron.training import get_model -from tools.retro.text_generation.retro_api import retro_generate_and_post_process -from tools.retro.sft.sft_retro import get_tasks_args -from tools.retro.sft.dataset_conv import reformat_prompt, preprocess, reformat_prompt_short -import numpy as np -import time -import megatron.legacy.model -from megatron.training.arguments import core_transformer_config_from_args - - - -def model_provider(pre_process=True, post_process=True) -> Union[GPTModel, megatron.legacy.model.GPTModel]: - """Builds the model. - - Args: - pre_process (bool, optional): Set to true if you need to compute embedings. Defaults to True. - post_process (bool, optional): Set to true if you need to want to compute output logits/loss. Defaults to True. - - - Returns: - Union[GPTModel, megatron.legacy.model.GPTModel]: The returned model - """ - print_rank_0('building GPT model ...') - args = get_args() - config = core_transformer_config_from_args(args) - - assert args.use_legacy_models, 'retro text generation only implemented for legacy models' - - # not support core model yet - model = megatron.legacy.model.GPTModel( - config, - num_tokentypes=0, - parallel_output=False, - pre_process=pre_process, - post_process=post_process - ) - - return model - - -def pad_neighbours_for_query_only(args, nb_tokens, pad_id, ft_neighbours): - # take top k neighbours and padding - neighbours_tokens = [] - retro_args = get_retro_args() - r = retro_args.retro_gpt_retrieved_length - - if args.reuse_top: - valid_nb_tokens = nb_tokens[:args.retro_num_neighbors] - else: - valid_nb_tokens = nb_tokens[ft_neighbours:args.retro_num_neighbors + ft_neighbours] - - for nb_token in valid_nb_tokens: - if len(nb_token) >= r: - nb_token = nb_token[:r] - else: - nb_token = nb_token + [pad_id] * (r - len(nb_token)) - neighbours_tokens.append(nb_token) - print("len(nb_tokens)", len(nb_tokens)) - print("len(neighbours_tokens)", len(neighbours_tokens)) - print("args.retro_num_neighbors", args.retro_num_neighbors) - - if len(neighbours_tokens) < args.retro_num_neighbors: - assert ValueError("neighbours are not enough, add empty ones and create mask for those empty ones") - neighbours_tokens = np.array(neighbours_tokens) - return neighbours_tokens - - -def add_text_generate_args(parser): - """Text generation arguments.""" - - parser = get_tasks_args(parser) - group = parser.add_argument_group(title='text generation') - - group.add_argument("--temperature", type=float, default=1.0, - help='Sampling temperature.') - group.add_argument("--greedy", action='store_true', default=False, - help='Use greedy sampling.') - group.add_argument("--top_p", type=float, default=0.0, - help='Top p sampling.') - group.add_argument("--top_k", type=int, default=0, - help='Top k sampling.') - group.add_argument("--out-seq-length", type=int, default=256, - help='Size of the output generated text.') - group.add_argument("--sample-input-file", type=str, default=None, - help='Get input from file instead of interactive mode, ' - 'each line is an input.') - group.add_argument("--sample-output-file", type=str, default=None, - help='Output file got from --sample-input-file') - group.add_argument("--num-samples", type=int, default=0, - help='Number of samples to generate unconditionally, ' - 'defaults to 0 and interactive conditional sampling') - group.add_argument("--genfile", type=str, - help='Output file when generating unconditionally') - group.add_argument("--recompute", action='store_true', - help='During generation recompute all attention ' - 'instead of using previously computed keys/values.') - group.add_argument("--epsilon", type=float, default=0.01, - help="Minimum factor by which each probability is multiplied") - group.add_argument("--debug-gen", action='store_true', - help="If set, additional debugging output is printed to stdout") - group.add_argument('--length-penalty', type=float, default=1.0, - help='length penalty') - group.add_argument('--gen-start-idx', type=int, default=0, - help='project size for adapters') - group.add_argument('--num-gen', type=int, default=-1, - help='project size for adapters') - group.add_argument('--ckpt-step', type=int, default=None, - help='setting ckpt step manually') - group.add_argument("--short-format", action='store_true', - help='Use short format QA') - group.add_argument("--use-retrieved-neighbours", action='store_true', default=False, - help='Use retrieved neighbours') - group.add_argument('--template-id', type=int, default=0, - help='template id for generation,') - return parser - - -def generate_samples_conditional(model): - args = get_args() - start = time.time() - avg_time = [] - tokenizer = get_tokenizer() - model.eval() - if torch.distributed.get_rank() == 0: - - data = preprocess(args.sample_input_file, inference_only=True, - retrieved_neighbours=args.use_retrieved_neighbours) - print("total rows {}".format(len(data))) - all_data = data[args.gen_start_idx:] # start from gen_start_idx - if args.num_gen > 0: - all_data = all_data[:args.num_gen] - input_count = len(all_data) - input_pos = 0 - - terminate_runs = 0 - while True: - torch.distributed.barrier() - if torch.distributed.get_rank() == 0: - sentences = [] - n_arrays = [] - print("global batch size", args.global_batch_size) - for _ in range(args.global_batch_size): - print(input_pos) - if input_pos >= input_count: - print("reach the last row") - break - else: - sample = all_data[input_pos] - input_pos += 1 - - if True: - max_target_len = args.out_seq_length - query, _, neighbours = sample - - neighbours_array = pad_neighbours_for_query_only(args, - [tokenizer.tokenize(neighbour) for neighbour in - neighbours], tokenizer.eod, args.ft_neighbours) - print("neighbours_array.shape", neighbours_array.shape) - tokenizer = get_tokenizer() - - if args.short_format: - input_tokens = reformat_prompt_short(query, neighbours, args.task, args.ft_neighbours, - max_target_len, - tokenizer, args.seq_length) - else: - input_tokens = reformat_prompt(query, neighbours, args.task, args.ft_neighbours, max_target_len, - tokenizer, args.seq_length, template_id=args.template_id) - raw_text = tokenizer.detokenize(input_tokens) - print(raw_text) - else: - raise ValueError("invalid arg for task") - sentences.append(raw_text) - retro_args = get_retro_args() - - resp_sentences, resp_sentences_seg, scores, \ - tokens = retro_generate_and_post_process(model, prompts=sentences, - neighbours_array=neighbours_array, - tokens_to_generate=args.seq_length - retro_args.retro_gpt_chunk_length, - return_output_log_probs=False, - top_k_sampling=args.top_k, - top_p_sampling=args.top_p, - add_BOS=False, - temperature=1.0) - print("len of resp_sentences", len(resp_sentences)) - for prompt, generation in zip(sentences, resp_sentences): - datum = generation[len(prompt):] - print("prompt:", generation[:len(prompt)]) - if "<|endoftext|>" in datum: - datum = datum[:datum.find("<|endoftext|>")].strip() - datum = datum.replace("\n", " ") - print("cont:", datum) - yield datum - avg_time.append((time.time() - start) / args.global_batch_size) - print("avg time for each sample: ", sum(avg_time) / len(avg_time)) - start = time.time() - if input_pos >= input_count: - print("finish all lines") - terminate_runs = 1 - else: - retro_generate_and_post_process(model) - - terminate_runs_tensor = torch.cuda.LongTensor([terminate_runs]) - torch.distributed.broadcast(terminate_runs_tensor, 0) - terminate_runs = terminate_runs_tensor[0].item() - - if terminate_runs == 1: - return - - -def generate_and_write_samples_conditional(model): - args = get_args() - if args.sample_output_file is None: - sample_output_file = args.sample_input_file + ".out" - print('`sample-output-file` not specified, setting ' - 'it to {}'.format(sample_output_file)) - else: - sample_output_file = args.sample_output_file - with open(sample_output_file, 'w') as f: - for datum in generate_samples_conditional(model): - if torch.distributed.get_rank() == 0: - f.write(datum + '\n') - - -def main(): - """Main program.""" - - initialize_megatron(extra_args_provider=add_text_generate_args, - args_defaults={'no_load_rng': True, - 'no_load_optim': True}) - - # Set up model and load checkpoint - model = get_model(model_provider, wrap_with_ddp=False) - print(model) - args = get_args() - - if args.load is not None: - _ = load_checkpoint(model, None, None) - model = model[0] - - # Generate samples. - if args.sample_input_file is not None: - print(f"{args.sample_input_file}") - generate_and_write_samples_conditional(model) - - -if __name__ == "__main__": - main() +# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. + +"""Sample Generate GPT""" +import torch +import os +import sys +from typing import Union + +sys.path.append(os.path.abspath(os.path.join( + os.path.join(os.path.dirname(__file__), "../../../")))) +from megatron.training import get_args, get_retro_args +from megatron.training import print_rank_0 +from megatron.training import get_tokenizer +from megatron.training.checkpointing import load_checkpoint +from megatron.training.initialize import initialize_megatron +from megatron.core.models.gpt import GPTModel +from megatron.training import get_model +from tools.retro.text_generation.retro_api import retro_generate_and_post_process +from tools.retro.sft.sft_retro import get_tasks_args +from tools.retro.sft.dataset_conv import reformat_prompt, preprocess, reformat_prompt_short +import numpy as np +import time +import megatron.legacy.model +from megatron.training.arguments import core_transformer_config_from_args + + + +def model_provider(pre_process=True, post_process=True) -> Union[GPTModel, megatron.legacy.model.GPTModel]: + """Builds the model. + + Args: + pre_process (bool, optional): Set to true if you need to compute embedings. Defaults to True. + post_process (bool, optional): Set to true if you need to want to compute output logits/loss. Defaults to True. + + + Returns: + Union[GPTModel, megatron.legacy.model.GPTModel]: The returned model + """ + print_rank_0('building GPT model ...') + args = get_args() + config = core_transformer_config_from_args(args) + + assert args.use_legacy_models, 'retro text generation only implemented for legacy models' + + # not support core model yet + model = megatron.legacy.model.GPTModel( + config, + num_tokentypes=0, + parallel_output=False, + pre_process=pre_process, + post_process=post_process + ) + + return model + + +def pad_neighbours_for_query_only(args, nb_tokens, pad_id, ft_neighbours): + # take top k neighbours and padding + neighbours_tokens = [] + retro_args = get_retro_args() + r = retro_args.retro_gpt_retrieved_length + + if args.reuse_top: + valid_nb_tokens = nb_tokens[:args.retro_num_neighbors] + else: + valid_nb_tokens = nb_tokens[ft_neighbours:args.retro_num_neighbors + ft_neighbours] + + for nb_token in valid_nb_tokens: + if len(nb_token) >= r: + nb_token = nb_token[:r] + else: + nb_token = nb_token + [pad_id] * (r - len(nb_token)) + neighbours_tokens.append(nb_token) + print("len(nb_tokens)", len(nb_tokens)) + print("len(neighbours_tokens)", len(neighbours_tokens)) + print("args.retro_num_neighbors", args.retro_num_neighbors) + + if len(neighbours_tokens) < args.retro_num_neighbors: + assert ValueError("neighbours are not enough, add empty ones and create mask for those empty ones") + neighbours_tokens = np.array(neighbours_tokens) + return neighbours_tokens + + +def add_text_generate_args(parser): + """Text generation arguments.""" + + parser = get_tasks_args(parser) + group = parser.add_argument_group(title='text generation') + + group.add_argument("--temperature", type=float, default=1.0, + help='Sampling temperature.') + group.add_argument("--greedy", action='store_true', default=False, + help='Use greedy sampling.') + group.add_argument("--top_p", type=float, default=0.0, + help='Top p sampling.') + group.add_argument("--top_k", type=int, default=0, + help='Top k sampling.') + group.add_argument("--out-seq-length", type=int, default=256, + help='Size of the output generated text.') + group.add_argument("--sample-input-file", type=str, default=None, + help='Get input from file instead of interactive mode, ' + 'each line is an input.') + group.add_argument("--sample-output-file", type=str, default=None, + help='Output file got from --sample-input-file') + group.add_argument("--num-samples", type=int, default=0, + help='Number of samples to generate unconditionally, ' + 'defaults to 0 and interactive conditional sampling') + group.add_argument("--genfile", type=str, + help='Output file when generating unconditionally') + group.add_argument("--recompute", action='store_true', + help='During generation recompute all attention ' + 'instead of using previously computed keys/values.') + group.add_argument("--epsilon", type=float, default=0.01, + help="Minimum factor by which each probability is multiplied") + group.add_argument("--debug-gen", action='store_true', + help="If set, additional debugging output is printed to stdout") + group.add_argument('--length-penalty', type=float, default=1.0, + help='length penalty') + group.add_argument('--gen-start-idx', type=int, default=0, + help='project size for adapters') + group.add_argument('--num-gen', type=int, default=-1, + help='project size for adapters') + group.add_argument('--ckpt-step', type=int, default=None, + help='setting ckpt step manually') + group.add_argument("--short-format", action='store_true', + help='Use short format QA') + group.add_argument("--use-retrieved-neighbours", action='store_true', default=False, + help='Use retrieved neighbours') + group.add_argument('--template-id', type=int, default=0, + help='template id for generation,') + return parser + + +def generate_samples_conditional(model): + args = get_args() + start = time.time() + avg_time = [] + tokenizer = get_tokenizer() + model.eval() + if torch.distributed.get_rank() == 0: + + data = preprocess(args.sample_input_file, inference_only=True, + retrieved_neighbours=args.use_retrieved_neighbours) + print("total rows {}".format(len(data))) + all_data = data[args.gen_start_idx:] # start from gen_start_idx + if args.num_gen > 0: + all_data = all_data[:args.num_gen] + input_count = len(all_data) + input_pos = 0 + + terminate_runs = 0 + while True: + torch.distributed.barrier() + if torch.distributed.get_rank() == 0: + sentences = [] + n_arrays = [] + print("global batch size", args.global_batch_size) + for _ in range(args.global_batch_size): + print(input_pos) + if input_pos >= input_count: + print("reach the last row") + break + else: + sample = all_data[input_pos] + input_pos += 1 + + if True: + max_target_len = args.out_seq_length + query, _, neighbours = sample + + neighbours_array = pad_neighbours_for_query_only(args, + [tokenizer.tokenize(neighbour) for neighbour in + neighbours], tokenizer.eod, args.ft_neighbours) + print("neighbours_array.shape", neighbours_array.shape) + tokenizer = get_tokenizer() + + if args.short_format: + input_tokens = reformat_prompt_short(query, neighbours, args.task, args.ft_neighbours, + max_target_len, + tokenizer, args.seq_length) + else: + input_tokens = reformat_prompt(query, neighbours, args.task, args.ft_neighbours, max_target_len, + tokenizer, args.seq_length, template_id=args.template_id) + raw_text = tokenizer.detokenize(input_tokens) + print(raw_text) + else: + raise ValueError("invalid arg for task") + sentences.append(raw_text) + retro_args = get_retro_args() + + resp_sentences, resp_sentences_seg, scores, \ + tokens = retro_generate_and_post_process(model, prompts=sentences, + neighbours_array=neighbours_array, + tokens_to_generate=args.seq_length - retro_args.retro_gpt_chunk_length, + return_output_log_probs=False, + top_k_sampling=args.top_k, + top_p_sampling=args.top_p, + add_BOS=False, + temperature=1.0) + print("len of resp_sentences", len(resp_sentences)) + for prompt, generation in zip(sentences, resp_sentences): + datum = generation[len(prompt):] + print("prompt:", generation[:len(prompt)]) + if "<|endoftext|>" in datum: + datum = datum[:datum.find("<|endoftext|>")].strip() + datum = datum.replace("\n", " ") + print("cont:", datum) + yield datum + avg_time.append((time.time() - start) / args.global_batch_size) + print("avg time for each sample: ", sum(avg_time) / len(avg_time)) + start = time.time() + if input_pos >= input_count: + print("finish all lines") + terminate_runs = 1 + else: + retro_generate_and_post_process(model) + + terminate_runs_tensor = torch.cuda.LongTensor([terminate_runs]) + torch.distributed.broadcast(terminate_runs_tensor, 0) + terminate_runs = terminate_runs_tensor[0].item() + + if terminate_runs == 1: + return + + +def generate_and_write_samples_conditional(model): + args = get_args() + if args.sample_output_file is None: + sample_output_file = args.sample_input_file + ".out" + print('`sample-output-file` not specified, setting ' + 'it to {}'.format(sample_output_file)) + else: + sample_output_file = args.sample_output_file + with open(sample_output_file, 'w') as f: + for datum in generate_samples_conditional(model): + if torch.distributed.get_rank() == 0: + f.write(datum + '\n') + + +def main(): + """Main program.""" + + initialize_megatron(extra_args_provider=add_text_generate_args, + args_defaults={'no_load_rng': True, + 'no_load_optim': True}) + + # Set up model and load checkpoint + model = get_model(model_provider, wrap_with_ddp=False) + print(model) + args = get_args() + + if args.load is not None: + _ = load_checkpoint(model, None, None) + model = model[0] + + # Generate samples. + if args.sample_input_file is not None: + print(f"{args.sample_input_file}") + generate_and_write_samples_conditional(model) + + +if __name__ == "__main__": + main() diff --git a/tools/run_text_generation_server.py b/tools/run_text_generation_server.py index e5b3f08..fc9321a 100644 --- a/tools/run_text_generation_server.py +++ b/tools/run_text_generation_server.py @@ -1,144 +1,189 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. - -"""Sample Generate GPT""" -import os -import sys -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), - os.path.pardir))) -from megatron.training import get_args -from megatron.training import print_rank_0 -from megatron.core import mpu -from megatron.training.checkpointing import load_checkpoint -from megatron.training.initialize import initialize_megatron -from megatron.core.models.gpt import GPTModel -from megatron.training import get_model -from megatron.training.arguments import core_transformer_config_from_args -from megatron.training.yaml_arguments import core_transformer_config_from_yaml -from megatron.inference.text_generation_server import MegatronServer -from megatron.inference.text_generation import generate_and_post_process -from megatron.inference.text_generation import beam_search_and_post_process -from megatron.core.transformer.spec_utils import import_module -from megatron.core.models.gpt.gpt_layer_specs import ( - get_gpt_layer_local_spec, - get_gpt_layer_with_transformer_engine_spec, -) - -from contextlib import nullcontext -import torch -from typing import Union -import megatron - - -def model_provider(pre_process=True, post_process=True) -> Union[GPTModel, megatron.legacy.model.GPTModel]: - """Builds the model. - - If you set the use_legacy_models to True, it will return the legacy GPT model and if not the core GPT model. - - Args: - pre_process (bool, optional): Set to true if you need to compute embedings. Defaults to True. - post_process (bool, optional): Set to true if you need to want to compute output logits/loss. Defaults to True. - - - Returns: - Union[GPTModel, megatron.legacy.model.GPTModel]: The returned model - """ - - args = get_args() - use_te = args.transformer_impl == "transformer_engine" - - print_rank_0('building GPT model ...') - - # Experimental loading arguments from yaml - if args.yaml_cfg is not None: - config = core_transformer_config_from_yaml(args, "language_model") - else: - config = core_transformer_config_from_args(args) - - if args.use_legacy_models: - model = megatron.legacy.model.GPTModel( - config, - num_tokentypes=0, - parallel_output=False, - pre_process=pre_process, - post_process=post_process - ) - else: - if args.spec is not None: - transformer_layer_spec = import_module(args.spec) - else: - if use_te: - transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec(args.num_experts, args.moe_grouped_gemm, args.qk_layernorm) - else: - transformer_layer_spec = get_gpt_layer_local_spec(args.num_experts, args.moe_grouped_gemm, args.qk_layernorm) - - model = GPTModel( - config=config, - transformer_layer_spec=transformer_layer_spec, - vocab_size=args.padded_vocab_size, - max_sequence_length=args.max_position_embeddings, - pre_process=pre_process, - post_process=post_process, - fp16_lm_cross_entropy=args.fp16_lm_cross_entropy, - parallel_output=False, - share_embeddings_and_output_weights=not args.untie_embeddings_and_output_weights, - position_embedding_type=args.position_embedding_type, - rotary_percent=args.rotary_percent, - rotary_base=args.rotary_base, - rope_scaling=args.use_rope_scaling - ) - - return model - -def add_text_generate_args(parser): - group = parser.add_argument_group(title='text generation') - group.add_argument("--port", type=int, default=5000, - help='port for text generation server to run on') - return parser - - -if __name__ == "__main__": - initialize_megatron(extra_args_provider=add_text_generate_args, - args_defaults={'tokenizer_type': 'GPT2BPETokenizer', - 'no_load_rng': True, - 'no_load_optim': True}) - - args = get_args() - if args.num_layers_per_virtual_pipeline_stage is not None: - print("Interleaved pipeline schedule is not yet supported for text generation.") - exit() - print_rank_0("WARNING: Forcing exit_on_missing_checkpoint to True for text " - "generation.") - args.exit_on_missing_checkpoint = True - - # Set up model and load checkpoint - load_context = nullcontext() - if args.fp8: - from transformer_engine.pytorch.fp8 import fp8_model_init - load_context = fp8_model_init() - with load_context: - model = get_model(model_provider, wrap_with_ddp=False) - - if args.load is not None: - _ = load_checkpoint(model, None, None) - - assert len(model) == 1, "Above condition should have caught this" - model = model[0] - model.eval() - - if mpu.is_pipeline_first_stage() and mpu.get_tensor_model_parallel_rank() == 0: - server = MegatronServer(model) - server.run("0.0.0.0",port=args.port) - - while True: - choice = torch.tensor(1, dtype=torch.long, device='cuda') - torch.distributed.broadcast(choice, 0) - if choice.item() == 0: - try: - generate_and_post_process(model) - except ValueError as ve: - pass - elif choice.item() == 1: - try: - beam_search_and_post_process(model) - except ValueError as ve: - pass +# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. + +"""Sample Generate GPT""" +import os +import sys +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + os.path.pardir))) +from megatron.training import print_rank_0 +from megatron.core.models.gpt import GPTModel +from megatron.training.arguments import core_transformer_config_from_args +from megatron.training.yaml_arguments import core_transformer_config_from_yaml +from megatron.inference.text_generation_server import MegatronServer +from megatron.core.transformer.spec_utils import import_module +from megatron.core.models.gpt.gpt_layer_specs import ( + get_gpt_layer_local_spec, + get_gpt_layer_with_transformer_engine_spec, +) + +from contextlib import nullcontext +from typing import Union +import megatron + +import os +from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import InferenceWrapperConfig +import sys +from argparse import Namespace +from megatron.core.inference.engines.abstract_engine import AbstractEngine +from megatron.core.inference.engines.mcore_engine import MCoreEngine +from megatron.core.inference.model_inference_wrappers.gpt.gpt_inference_wrapper import GPTInferenceWrapper +from megatron.core.inference.text_generation_controllers.simple_text_generation_controller import SimpleTextGenerationController +from megatron.core.transformer.module import MegatronModule +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + os.path.pardir, os.path.pardir))) + +from megatron.training import get_args +from megatron.training import get_tokenizer +from megatron.training.checkpointing import load_checkpoint +from megatron.core import mpu +from megatron.training.initialize import initialize_megatron +from megatron.training import get_model + + +def model_provider(pre_process=True, post_process=True) -> Union[GPTModel, megatron.legacy.model.GPTModel]: + """Builds the model. + + If you set the use_legacy_models to True, it will return the legacy GPT model and if not the core GPT model. + + Args: + pre_process (bool, optional): Set to true if you need to compute embedings. Defaults to True. + post_process (bool, optional): Set to true if you need to want to compute output logits/loss. Defaults to True. + + + Returns: + Union[GPTModel, megatron.legacy.model.GPTModel]: The returned model + """ + + args = get_args() + use_te = args.transformer_impl == "transformer_engine" + + print_rank_0('building GPT model ...') + + # Experimental loading arguments from yaml + if args.yaml_cfg is not None: + config = core_transformer_config_from_yaml(args, "language_model") + else: + config = core_transformer_config_from_args(args) + + if args.use_legacy_models: + model = megatron.legacy.model.GPTModel( + config, + num_tokentypes=0, + parallel_output=False, + pre_process=pre_process, + post_process=post_process + ) + else: + if args.spec is not None: + transformer_layer_spec = import_module(args.spec) + else: + if use_te: + transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec(args.num_experts, args.moe_grouped_gemm, args.qk_layernorm) + else: + transformer_layer_spec = get_gpt_layer_local_spec(args.num_experts, args.moe_grouped_gemm, args.qk_layernorm) + + model = GPTModel( + config=config, + transformer_layer_spec=transformer_layer_spec, + vocab_size=args.padded_vocab_size, + max_sequence_length=args.max_position_embeddings, + pre_process=pre_process, + post_process=post_process, + fp16_lm_cross_entropy=args.fp16_lm_cross_entropy, + parallel_output=False, + share_embeddings_and_output_weights=not args.untie_embeddings_and_output_weights, + position_embedding_type=args.position_embedding_type, + rotary_percent=args.rotary_percent, + rotary_base=args.rotary_base, + rope_scaling=args.use_rope_scaling, + rope_scaling_factor=args.rope_scaling_factor, + ) + + return model + + +def get_inference_engine(args: Namespace, model: MegatronModule) -> AbstractEngine: + """Get the relevant backend for running inference + + This function will automatically choose the TRTLLMBackend when possible, and default to Mcore backend if the user does not specify any backends. TRTLLMBackend is not implmented yet. + + Args: + args (Namespace): The user arguments parsed from command line + model (MegatronModule): The megatron model. + + Returns: + AbstractBackend: The chosen backend + """ + tokenizer = get_tokenizer() + + inference_wrapper_config = InferenceWrapperConfig( + hidden_size=args.hidden_size, + inference_batch_times_seqlen_threshold=args.inference_batch_times_seqlen_threshold, + fp32_residual_connection=args.fp32_residual_connection, + params_dtype=args.params_dtype, + padded_vocab_size=args.padded_vocab_size, + inference_max_seq_length=args.inference_max_seq_length, + inference_max_requests=args.inference_max_batch_size + ) + + inference_wrapped_model = GPTInferenceWrapper(model, inference_wrapper_config) + text_generation_controller = SimpleTextGenerationController( + inference_wrapped_model=inference_wrapped_model, tokenizer=tokenizer) + return MCoreEngine(text_generation_controller=text_generation_controller) + + +def add_text_generate_args(parser): + group = parser.add_argument_group(title='text generation') + group.add_argument("--port", type=int, default=5000, + help='port for text generation server to run on') + group.add_argument("--temperature", type=float, default=1.0, + help='Sampling temperature.') + group.add_argument("--top_k", type=int, default=1, + help='Top k sampling.') + group.add_argument("--top_p", type=float, default=0.0, + help='Top p sampling.') + group.add_argument("--return-log-probs", action='store_true', default=True, + help='Return the log probabilities of the final output tokens') + group.add_argument("--num-tokens-to-generate", type=int, default=30, + help='Number of tokens to generate for each prompt') + group.add_argument("--prompts", metavar='N', type=str, nargs='+', + help='Input prompts with each prompt within quotes and seperated by space') + group.add_argument("--max-batch-size", type=int, default=8, + help='Max number of prompts to process at once') + return parser + + +if __name__ == "__main__": + initialize_megatron(extra_args_provider=add_text_generate_args, + args_defaults={'no_load_rng': True, + 'no_load_optim': True, + 'exit_on_missing_checkpoint': True}) + + args = get_args() + if args.num_layers_per_virtual_pipeline_stage is not None: + print("Interleaved pipeline schedule is not yet supported for text generation.") + exit() + print_rank_0("WARNING: Forcing exit_on_missing_checkpoint to True for text " + "generation.") + args.exit_on_missing_checkpoint = True + + # Set up model and load checkpoint + load_context = nullcontext() + if args.fp8: + from transformer_engine.pytorch.fp8 import fp8_model_init + load_context = fp8_model_init() + with load_context: + model = get_model(model_provider, wrap_with_ddp=False) + + if args.load is not None: + _ = load_checkpoint(model, None, None) + + assert len(model) == 1, "Above condition should have caught this" + model = model[0] + model.eval() + + inference_engine = get_inference_engine(args, model) + + if mpu.is_pipeline_first_stage() and mpu.get_tensor_model_parallel_rank() == 0: + server = MegatronServer(inference_engine, args) + server.run("0.0.0.0",port=args.port) diff --git a/unit-test-job-lts.yaml b/unit-test-job-lts.yaml deleted file mode 100644 index ea64ccd..0000000 --- a/unit-test-job-lts.yaml +++ /dev/null @@ -1,107 +0,0 @@ -default: - interruptible: true -other: - artifacts: - paths: - - results/ - when: always - image: gitlab-master.nvidia.com:5005/adlr/megatron-lm/mcore_utility:20283570 - needs: - - job: functional:configure - pipeline: $PARENT_PIPELINE_ID - rules: - - if: $CI_PIPELINE_SOURCE == "parent_pipeline" - - if: $CI_MERGE_REQUEST_ID - script: - - export PYTHONPATH=$(pwd); python tests/test_utils/python_scripts/launch_jet_workload.py - --model unit-tests --environment lts --n-repeat 1 --time-limit 1800 --test-case - other --container-tag 20283570 --cluster dgxh100_coreweave - stage: unit-tests - tags: &id001 - - arch/amd64 - - env/prod - - origin/jet-fleet - - owner/jet-core - - purpose/jet-client - - team/megatron - timeout: 7 days -stages: - - unit-tests -tests/unit_tests/data/: - artifacts: - paths: - - results/ - when: always - image: gitlab-master.nvidia.com:5005/adlr/megatron-lm/mcore_utility:20283570 - needs: - - job: functional:configure - pipeline: $PARENT_PIPELINE_ID - rules: - - if: $CI_PIPELINE_SOURCE == "parent_pipeline" - - if: $CI_MERGE_REQUEST_ID - script: - - export PYTHONPATH=$(pwd); python tests/test_utils/python_scripts/launch_jet_workload.py - --model unit-tests --environment lts --n-repeat 1 --time-limit 1800 --test-case - tests/unit_tests/data/ --container-tag 20283570 --cluster dgxh100_coreweave - stage: unit-tests - tags: *id001 - timeout: 7 days -tests/unit_tests/dist_checkpointing/: - artifacts: - paths: - - results/ - when: always - image: gitlab-master.nvidia.com:5005/adlr/megatron-lm/mcore_utility:20283570 - needs: - - job: functional:configure - pipeline: $PARENT_PIPELINE_ID - rules: - - if: $CI_PIPELINE_SOURCE == "parent_pipeline" - - if: $CI_MERGE_REQUEST_ID - script: - - export PYTHONPATH=$(pwd); python tests/test_utils/python_scripts/launch_jet_workload.py - --model unit-tests --environment lts --n-repeat 1 --time-limit 1800 --test-case - tests/unit_tests/dist_checkpointing/ --container-tag 20283570 --cluster dgxh100_coreweave - stage: unit-tests - tags: *id001 - timeout: 7 days -tests/unit_tests/distributed/: - artifacts: - paths: - - results/ - when: always - image: gitlab-master.nvidia.com:5005/adlr/megatron-lm/mcore_utility:20283570 - needs: - - job: functional:configure - pipeline: $PARENT_PIPELINE_ID - rules: - - if: $CI_PIPELINE_SOURCE == "parent_pipeline" - - if: $CI_MERGE_REQUEST_ID - script: - - export PYTHONPATH=$(pwd); python tests/test_utils/python_scripts/launch_jet_workload.py - --model unit-tests --environment lts --n-repeat 1 --time-limit 1800 --test-case - tests/unit_tests/distributed/ --container-tag 20283570 --cluster dgxh100_coreweave - stage: unit-tests - tags: *id001 - timeout: 7 days -? tests/unit_tests/test_inference.py tests/unit_tests/test_tokenizer.py tests/unit_tests/test_utilities.py - tests/unit_tests/test_training.py -: artifacts: - paths: - - results/ - when: always - image: gitlab-master.nvidia.com:5005/adlr/megatron-lm/mcore_utility:20283570 - needs: - - job: functional:configure - pipeline: $PARENT_PIPELINE_ID - rules: - - if: $CI_PIPELINE_SOURCE == "parent_pipeline" - - if: $CI_MERGE_REQUEST_ID - script: - - export PYTHONPATH=$(pwd); python tests/test_utils/python_scripts/launch_jet_workload.py - --model unit-tests --environment lts --n-repeat 1 --time-limit 1800 --test-case - tests/unit_tests/test_inference.py tests/unit_tests/test_tokenizer.py tests/unit_tests/test_utilities.py - tests/unit_tests/test_training.py --container-tag 20283570 --cluster dgxh100_coreweave - stage: unit-tests - tags: *id001 - timeout: 7 days -- GitLab

R=fa}P<1i+g8tUKdM>)SDrRk&0JT z*5kn8Jm1{y9`aJBp`(-JMj|dddA1vwzl0FD*^c+TBW7t?D_qhLy#5OSUw8ohx^#`* z4X!XN(cObP_Zb6+v_i<6a^epVSj?|}9~1Abv6e>VCYU!`pu-#R1nhGcE$gnQrU6BI zpFt8<5M>4}PVc%0VLX9t_~pvvM9QX5)rB}~^iWa8hSMmC&@kTGQa z1ge6E1f1)I<-9cMsB|lCMH14%SQZa|EWFQ{Bd$g`1Ufe?C9(Z6lKc>4(JS2!J6)`*bfa!Z*a8w-KpD7UUx&@ zWcoY+I|~9VS6&y**Cl$q@uTl)FGhd6m5)vI%9?Walg8W%JoDL%Sp*h#dXNg8a!x3( zRSUkrX}^zgG}id5W4L&>L8e&gq?h`XMvXpn1wnrpCBUTt93^HYH@J$LL$-d1ZxX=4 zAlf(XO&NtSo_mU|nul9653QZ3srSpjyQL7dt*Vu^0GeVxm1^!my?%^ zyJGhyOWW0j z<1_n`^y|+*H7#pu4iP&t%J%PsBYb`*1*joNmv7Om`%bN=2fJytDTlsWdb{aFQR=d2Tmp>|X`jJDe(8?H8B{ zk?;0;N5r1BZd2`Eh3xK%+4aYY#k_}2;0`J!-;s<~l{H&F&OxRZ8LaBi^NmgR z8(0*j%>AJj$f@ydH=(@W#2c&3z8{I3j(@`WFH8W%ouJ+sgv6nQ*3~ycwdo>&!Xm$% z43J`1oy+>*!N48UX-dA;4uN+`Yz~D5n=S4&EzAm3G`wi(nKxmFUG%r{lHrl>ofjD0 z1pRXjh<*pcDZdeHzN#>vYhfGhf`m+`nqk8&20j_I;Z91*i)%Z^u@|V-7sUU9^r6I5 z3gf&TE{EXo>RSnNkFd&iw`ZBousNvH%6sPYHB=$QYn|ZadN7;K+SI5{{ur9k&PS!9 zM9(3Xgj7DO*lAnRd)O|$qT;Kz!!@PT6Gch9r{hMWD+`y;a6lX~%UTW!$W8-(0fnxOe?aCw1z zX1$`YYTe*FGg?{jna4P&CzV6~HW|0`q}*XnlHH*f$5^t(r_MfQM76jN@TApy?0o-{ zeMA}FwotQAYNx36Ls&nvKfW33HYx9e_}+njFq7&&8oqRqV@8=k6r$fU(#a0XjCTz4)d9HeV&$`1k`dPlUVW`w z_L2^M`^Ec4%#{#E1guhG(s8KGZdFssU2JJV3TF zqZRf3J>Nm(%X?2=N))ps9|nFth&8IrSI#*`*W)Bw%pfFzSzLi!*3sKma92$$L((1x z0p~K{5&l+{@TY$0=NtGyH_)antW|61KfiE4X(=HS;)3kOC7j^7;elY&TRU1=WEVq1*L=@!oS!c&*gu@uPo-EXQH?+9GM_^;B zTK)=!X_%uQ92@Sh=-aHF1f*HplZ%|=hIv|0{YootoUUs^{UnRP5I*hTu(K&V14^-#L0OoCPkTk?$S;@gc~#BN z+PcP92Ns}(Rm)CQHITeP>83o%CpmFxW5hP0`yIeR3AmroF8ZKw8-9YVbSQ?%1eR3r zzdt-Yv>2Mv=f$ ztR#G*n``;QJRv(I*21Y4t6QSv+_G49HmfK6|osDs|3@ypocVgR7Sp@=TT!O17-UJTjbbVJnTvAyd_HH8q@`?0+;85%18R zsC=Yi{=6Tx&tX8C^xQ^juOpm<$MpI)3(OSsuvNF~*a8D*)*h)9p9pV}Q;G_Y*Uh>n zU7cx0tax9; z?@5yHi48b4ix4qL^tORI06PqkNJB&zdeVnZ!N|LhrN(jE6K8GSC<(qPzW2{5oWF}DfVv;Zid{KFViF>hdMte5|UWNSm4nT z@elSocVVgAXHfs9u|Od6-6Hx6eHbafsyNiNmq2FS#OX&KE3Kal9QnC z_*dAII91#xopPqZhMO>!L8Yr+h=`qzj_w)YeuvDPDSkPFJKmx!j~1BmVY;{RzPwMn zsv~@}VhBDMcX95dKYHrE=`z+?Q1Y3Ao7*LAg}uTaMfCouwzO3gYKuF2yxXMD<->27 z7sD7Uq#$l12vymcYJ>C$!HjX6GYg=5Bx^%}wR7!&EZ$)$_LjiclF z)quC-YJ*m-@_fjO{o{545pgG+d$5$xroBXfF0)JvPW`zJBe1W7YzxY(gKzc$ulm=7 zZMDNoM6L_>DT!v|`r0~(l)R5*^6DqRXXR+ zFLmWFyPQ)Q1uggko%>-3IH(qcu5GH9WR8vdGG81c-6mk3)%UPix*}Z<%i^*=yOSgv zv)yVVrQKa;ZVYn0z#7po2ipH(uLcgrht4}L#2D|P2>=lWyM8Do*lc@Q;tlas3CLvz zhWcA}lr(l}zV0i=b*l>I%t^f+7QmSRbKV*hWorC>gHEgX1BB&|9sH*r@wG5{N$o}U znDbI!0$w4)gp?QzZYJaJ6BX_!47kXB;G9#>pp50P9TZ^`p;V|y4qGB!yH+U3(P+@(B|WHBWMitf-%soO$BbLpk62(i4c2`xV>8e+%cds3ZZ@F{ zEE8GKT3ccZcp%+~O3Ay)?!=J-tb3)tPv8B1j0--;ABYGIJl&5hrp7fsKi+v}v-a`? z7-U-bACg2EECk)z?ah+h{gD~2`3Tpb#M&wkk#KgnPE(yUKd?REExPS1aM6ZGc;oQL zlx>WdmlPi<4aZx(qo*0xfef{fEG?5A9?)b1GS%tsymk<6hFjNaQ$n z6e-?fvt`aP?N{@X9W~WhOj1H3B1E$)+Hz{7{$JaJd+fL*se~pE5zgr;0b@uh5i<=fbm`hNFm((YyV}PbjU!Dd!`A` zgnq{+X>Z+MHS5GO=x!9>=<9d99-NgZ_x)5sH0_wL>6S8MKryday zEe#6~U~o%mRZQ9^y|Gg$A{v0kWDOU3qE@^y@fBo?nT!vl57sKv)b=`8dAN1pTHPyc z4ZQ6>sq`yZifR5^P<@sQHsio3M2F$=V^OKtCy%$R%sq-)_=()~A!qUsK`VHM=;4!m z`%#TMd~HkWTp>Lx9a}^a8%#KdTgh`6f-b{9tcvUVU>1PM3(jTYP^)RpJ|Hr!7-LBS zpgdi>-nA$$cDX-bxk}8%$PB^z_ zE9r|=%G$~dsg1wrxNeTm6?awb^(Jz}oz=O;F@Jsc9*8*H-y2)MCgI|`)h-N5�uF zbYC2D$j^*3H}0dUc@QP%1VfhW)ZE~LEp2TgZk5|!s>jGVPyLq^f7(ZX141I+qRip+ zM!d!NQv5qYb1Z(1XvGbYO$$P2sp)R^#VCUFY#uBpMBMU(E7G#-FZ`HmZMKBie;@2- zy4i<8{rdr7A?8#`aMXJ5&?G&F27;^vu^Cju-#@LF)a**pDQ1X_Zw^`3^oBaGFVU7A2u(-0)3ZT}JPWT^`+P}`V4nCBIu>qNyKmVC zP_I;0DwrI6R7qKXxMk&(KHA%L0C4@N(<{*3#w-zNf6EWg$p9dB7bra;=w;D#TB?bO zO(Cj9TH@vT#Pxl5rYg1hI#DrhtB*m4PI&`b2G+Y8k*5Xsn6%Y{;{&Ba;!{C79mh+j zKeiBg(#>dkH901ZBlUx9(e2B=Q~^D3ei`{&Zp_q=9o+(8ggTu6{L|;Xi!-H4OBKZP zRTWF~4!5D)?k;?KV%dVRyhqhZp1ui+9(>1XE-71{v;FZ+Z|6X+ql%!eq*J2@=pn*d z5#V$-;-~L3SMaO1?tcTNAQ~{ZOeyf8Ar|n~26cc&S~k^pyYX@sZ!|I@f}Bp}W5vZ-?Po=U(`X5|fDr9s`i(_Uh&ReJGW*${8r(Vzd~DjEV&hT=x5x0*_@oboj3$oWMn=X0xg*pWA90Aa zbZomdIpU^~6~kQ?Tkps7B+e5y#9orleE2PHs4|rmHX{}Dr1K^-j0$;n@Lsm6zZT)J zx~zrjvd?Zf{&anB`jde$y}me^F`af+5W;f!7d=(*HUVe}%CjS|#6Drft{wPR1f##M z5jN`=z^7FL$+r2bMTv#aY*0i?a$V||Q<@5wbGIwm)W%E9vKe$8fM}G!Zz zqGk~O+4T`Kq#>&Jvk}W|SqAB~WanUpsq+W{S?02W$#Z$F4CFQPTxo(JPI(&$bAntt zv4q({wQk1>sr{7xkprbKAY^THbTl202H7%jYnSh<(ggwFxtW`W@EfzMw#Fv z1s50Ba7O&76>S!uLK|#H;PPBq*K-k{zL5Hpj6gS8((CvH>bChxj@gI|YEI^;XESIB zbN#pV+F;0T>*J^8_E1uY7>ej~yV(t{uW$TsxUXg8cl030;KXF>|3y{DrHA&^W|9&nw8h|OfG@3TI@x<@xRbV5ITV#ASK09ZdK`_iz`ti_Vcl#l!> zW+ND}NvX?(mnhe7e6Vc8!OsKM4E7u24}CGZ!8Bvh?a;Z7l}1z~!}=BEi?u4Lm$R)s zQ0+_=@Zl^NLjPsaI|jW*U$k7w<2x4;*Bgs$Id~-0XxPY~)BS#T$%VIObl49%jw*AqPpo=_Z;VWWgadeZB|Hful*!nmwh=0MH znMe@QdN3vpbi0LJ`r?SP#jXhg)_))naFA!wry8MU6zY?_RqGip^vg~)#DtVVdvbG9)z9t&w!+A_*g5G!Q*VUann!V zyKk@_4<8NajuvWA;>`Lqeg;u3cV@yyq!pFJ?3pm^nC%bN1lv)=S7Qd5Z`9)X zzT7=;1Gr~4bB$#Sq)p$^Zcn%~6%F*;15JC;4KK=w`Rr&r&OkU&v!uMAOB}evnNHJk z1CEw@U-R_%CB7}v0V|>*8XshUbUwj+s%J=k8egad=VN* zT83h83WS>R8L2(@&CBEY-SZIU;40}7>**T!sdd&E)8Hb}E!I)A6lZW~N~rZJoTb|M zW}rRK@*x|)*(k6gOqRhMk@Y5b=o6YVsqoYMyzI)tvR##*WV?fNWZR$HqZoP|h z#iHeErP2&hzL9!uk`7+%XSEp(?m>@-Js`%Xyv?b+RJ0$tq3AH?Yzn!8O-OWX z5nsoBZpVuUx>^T(zdiYh@X3|0SJ@$$I1sNgjZ%Tv%xPM{mEo3APIRr$^x~w|qVLhH zQ^n!U1Shr*bXNb6dy%s^0u14aaWL!I3Q6?F!^Hi!`s7$7!{m=1y$B-{Af1k$EkGD= zS;aD{F{y*TPYsVo)5q6*Jg`CR78w(e6Y_aY8}>}IK3Kg`=!#~f_vkx(=CW&C3w(_m zDiem92w&hjiYs0kpFt9_87~Z7o{Ja5E>4|)cWtg#y{&4lahPAI;@OI5Yil#-xC8g_ zbltx1Hjr(b7JD+)qCg4rFe5?%1OpVNi<0JvuY)868{FH}JUo5Kc3|Hav6d zBeu%Q12GMgNf@~9`~JXhak+gnge8hsj`A$cQ^|SMf*`5K|2I}KLYPOIEBJK zj1?w8RDbVmv5e!noJ>D?S7GswC{7+sCc(f33{Cu2vK%&(!^&~0{`{N|LZbA;HkXQZ zj7sUm$sP24OEGWXwx(AVIH=3vJASM>)hjiX|1v-7^N&I4!A3oSi3JbLVaSKmiP5Jd zkarKB_nb0tSdS0XjgIHoXu&Zo|4@K|s2`koncDv4OQ)pP)5O{BUh{mFf><*=5RkDu zY|=g@t~D)+$oU^JLIEcWTCMs(>Qp zVm@5J_{;8M$^jb#b*88QGfB@04hI5fbFrJbAp%4RnRWkGN3n!x#zRGyI6B)ZPmN+n zf_n!fTED)_26o*P6X*@()E0fJVB~V|k!|A(J(TqmD+%8#w4B}qZa&A-cOzVJWiZ7b6qI}#E58+YB0$5WCxV#WH zChc8=rhbfW69!_tH>#;I}Rl4x`Jn%c=sWIYq?~UF_OB(URhSxEBSn0 zrtLZ7;s|It)9ygAAx_!*|G@MS1!%iV-85jrECwK2)>pGc+&KuR1J9Qgi%n3}6cy>S zTf^ThqGg|eGWd8;wkFM6zA_FtTqpUHw)XuQHD95K@}C?V)wbMlxl>jNZxsw?r(Pj8MUnApn5JIp2_-H!5E}_k)ra+U}wS+ zAeDL>ryl)psIb4Z@UO0yX{Sk8F25*}S$RP}bW-M%hPsq*)(sPv2end!USZLP)_RkvOu%SQG6hQ_a)Lzl7&Q>jnkm{{G{20J3U!@ z@CgxdMzX&1aI#>6fSqoKVB~%k!MK>~iAEQ2GVf%F8jyLnioG+|evtW<|8m-CcV>Up zz%DLe?xuM=Gm?1S3i$MQ$0@^KaeK)oKNH<*IMOW;HaXVHd3;sG|MDq})u3OC!SjTV zC`5W3tnbq?7&t%l$yq;43>=(({7C+|x(oPS zpFn!qlgzt*L{es3R$9Uf6Enx)Y4iU^&WYo?c+iwwQj3K>c!ZEiq{0EY#sQ?9a&~$Q zKZRT95WT|uMmY(TqOSUA*1f&XEtclZw)J7=u&33QQmN8o-~>%(27Tj|x6!@LE43_+ zk^hCq;o{aR&>pG_L44Rl?W{+(zIqUq6cB}>uS10=aPh;}>$H<5MTLpu(c{w7DIpBo zfwSpJ<|bWWmm`;z@j>G&3B6x0DOQcYb?PB-45D+R-aUNIsj5-ym~G+#+$dXlI6dPS z`dGu29O!V5OJG&u10DX;vMT~xDO*aPAa`-O@!~{&qQNG~Y${IA!9Q1y9uPm>Lcc!e zzdrU@!DASeJf-@YimwutZk^^Mic8Pi=WJV%vXrl1OE=wk@JPEYyJf=yUxGbz0=!ca zNg4%j-LkeD_uz80L{AKv(@Ec8R?u0CNfw!iN$OZ~A7FHyE93C5bYybhjf5TZ@fzr* z%;XfP7Oim^R}LtCZXslU!o(y8a-)|f0_uf$j+H;7oN`^XqJ7_(u2pv37&kzIv9X-x zInwRK3--D#E#ZI;kPb)AeX5jZuHVO;lNld2Aof$1{u99Zz&y~wOT5ekV4bVBlR6-V zmnYTop6X!lG1|Pb*u;6z$BKiPxx#@nD=rm)?L}X95hD8w)mp5^QczvNe6~U4k znP4ay;-QK#DPAEjjY7_jy2YuRsCZ(cvZXZADTo*{#@LqoQW#Gsh69BU zJhVJS1%_~rwiqwH=ltW}y-JE}y`9Z?o=^}!(cQ~2yE1MM1UzD#BhLE)EXrNH(?kD+xw?(s zFryU0UsbDGlrJ*m;vm0c5hRe_Oj)>#M{CGjZrbyyUo%r4=nZX(W~p#1^TWsjpk$Tf zZzYZKYUi7PN#w(&QD?Tf&&)~io+0kr{c_LJg@oy+O*|hM;BUFe9eobtEXD~C(_+)6 ztiK*S-miih<+9)bv4E57VTY&in1%JQj)e)oyIV5$h71DEn`PwD7v|R!4LE=3IOOYC zY-|P|MEBE(&Uo44@Db>PLuUPq|olJG_!lGp6i53#vCD6(f-Cr z_i1s~5kn@UnhvBg^lVqU9YI=G^cIg=9qJdJ^g04mmuKF-^u@dxJ%M}dKUzhabs zp_QvYy+JXfQlP3FN-L%oF@BAO_m)$Fie)Q^s(rImj+}1m6IG&`(s|@JtjAq*-`Mal z7gV8{yK3#*lr&KcZ!&52x-Q=P7ptcH&N|Byv!-;Y#*bF?J zzOrf`)`+W`oNreX*btDB*_G@jk1FzP&fT0`H7+7Vb~NDVD7JF24N%d<0DXFOAl>&C z+4CL;O+D`&|EnCl_T-Xzs`4fo#i1Pq1nL>B*6C9@F=@usJA=Ro5d-fn(%>8xp(&Lr)*0sOb5?HRSO8G)+5QD7^%Aot+J^6a3DHdr^-w&fwo~-vdT0Kxiv} zEe+q`R-Qzwsp}5IB(0N?lXETU^P07d;P%S#5rjd#Wmu654|2X?)t zN$v5YG$Qy}vvIAUp4;{ETifu~Q=bON6w$N6%`hGjVN^|R=g1W|O%m%D!sx&cMAew| zTN%66Xa^9^Oin8-%4}!2&(=nr!C=?<47=d#T*c=_1J}4clJqOZPxs%R-U1UUS!2RD zk~n*$D{f4laQ*7b`n4w(Lsdmr;$UCChH?cYZDXQb{EF<_hV6y<)rostO*Vy39In!O zXqU^{2l03bA7!IPO`GuZ;M1!_ffH3TJeaSo|^+QI;E^)Rj9gSalHL>33T;z# zKL?E94Md|}1IQkG@Y@4RvQzJuoa+qzhDcA<%Nfg+(17a~hwCM~AoIK95#U$%J-=hY zf5cY+AMD_u0pTz2^3OF{(Dr`EuwdkHA9Zxot=|kxdSWe>6p^s>^z+q%mZ%11_q%mC~9cn`9j8o<=j>Z)Yz$<%j6d$%cpxRu;8T z{qpG+oOg%-q zeN&c~taV1g+E_QPUIkSc7p8Z9pr$7ZSpm6r%O~EiOY7fqYZFPfo|m0*b|t+=#<)n9joDW z7%+R-5>(sSl>}SXg<%yIDrE=bj~gMHfGa-hA?@xeP3pMXTaP-={*=Kw*(B$&psnG)|NYm3OK?07eYd01d_Z0F1WTrv_7HfnHHoFsk z_WRI6#({Jr@zsffIDE~>*+^0lgvr6%(vUJMyk*lueRUkdxW(m;YA(I$6w8bQ`if4j z6Lip*z^MIu5ZLL#v;7D#Mh0XK-kGO&urpt6`4h-_Vg_FH4sUMNFD|O-#eiDu$Gxu) z^EPunlE!3mmR&(=u*2=cdv4%i5eoq%BZ9zlgQs<>>3L6l9KHB$E907%H#B(5b*|_4 zHlbHF@r7Y5v({36xQYLT&OwP;Y7CGwm^ONM&nT^*5dF`(@B#&Nh?F*J(qiEC`<*gs zxhxaS;?5nk+@?%Taz}C8z)^z`!19N^_np zdjO32+QTQT?b|K>b)c#u3mt^S5sja<~>|;%NYl{tNT*qaqvAToXvI zx#HICeUEeQ;IVo_%>7W@u~ReY$y@1dh?n zwqX#7wOmO6^Ns^x681z(KJ^Mr(rJx4-3B?chr^0@{s|&!fipi2iagW85ez#$-@COb zK^C>)xLKKEQ^GTht7@zf z;}m4s286=^$=)?ZT~lOIpn8{#OlRyF_Q9(Tv|6vHUN!g~adW;moKT6fSe)B1Yj@WQ zvIU_i2E~hrLXUSu&wk|RE(i|=zNq%h>GFknhFs{ZFmdN~gc7nYJWOpCWmBhA&4x|H zqVLso1A<{jg(aZ`*juL|+pyhEpe2ej4sF=3)C>GC(0fG4^$)_6Umj%W=Drw`3%En2 z#NCfB88(GC4=d+F!wq`*twcYvRW+?2{@&eWY$vMh;FW(w5{oa(tm88|O$+;h7C0;* zz`6qkZ4lRdI_oWEmSG8@WV`P4wMU00-b1o|JpGE0-K-OV$Rak2$(PPl@DG|@vGg<$ zQg@M*m9c(@VD6T|fg=R)BqJP;slmU726DbQltdK zRaItGIlyhJo;kf1q%vv20?WYJHplJjW`-?Tc!2zL$w&%O-h-FFo-BR$2dvBiL4PY{ zAy}F61#`TBfO;<8B-00si~YnY43OTTtyUrqZnqvS#nH*TM(mAKd|j=_6wgEF6L9!F z^BRBJ9_e%PgX_?riI1I0cf>pHFg%N_YF=l<`bqfeDj+Osy8^p3d`>gZ)4;sWw zl^m2sT~DM7{Zj*gmUiD09_s^@XO%SC+s9_~b1fL>69c*(WO5tZY=2hg5gv*mGtrHx zNXKS>g09DLx=-|EKdg+)q+@QgsD9Me0Q>_X{Ok*uf~TBsI9F;p2~+91b>H#e5}B%P zB!mEt!*o%$f-0|F=-nOHf zx>>;phC8NRj%Ied(q2G6e(S$O{j`0iouY&=OQ==fP-S=z=wKq3B$M5R39P>5-pi!g z4-ADceBsR|Qb`el$mtblVODZk#cI}n*diaiIsoc=DzZS zOi*(gpVv5~EP|_HV(PbQgrBH>q~d-Oc^bz;;w%--V8LfyW_Dy-pd9a-cM9%mH0k{Q z#lp34aPuC$?sR$Tw5G#0dVd@|Y!Ktu>75H0s-B{nygAey_mD@2@5YGvF8Rj;Nm2wQ~nMld}d6}chCpnrv zo=dNGuRKC*QAYJoX~40d?&FL`TWIrW!i)o$lsyY#@>a*nntY1G}v@7|r>>u+~-8m%Jr zJa>SrF$4!_II|kucbj(4wMI1PR6X#4;8YP^0{hZLqfcWrv!3n~)k5Yw?OJRq`$*Ic#t- z(Mm+m?4x;&jAr&_6EvfwP_=AuJG{`eb?2Au|CidaKnjh=2>x&b@5GL_rLBfi>5)Sv zXdJOd{5$}L5<)rDkHl{O*agM&KM#`=7ClW;xbx^`TX z^)xi>!M!I6RQGXSgr07&_LrJweJ{41F(jhRXy1(>bddDr7rRZi^!ZEJV`e3m=rKDckG3 z;*cFe!clDYa~lLuQ$3`*Owv%3AfCS}tAdAjuaMe`uRHXcpHX{Q=L1%$nlsHXU4omr zz_P9WqI~G$tZ|AfL^eJsAbC={W~#j8{YWl*sJgJ;=9u|@JPn{OiNw4d&JGFr{b5V# zBEgM<@5=jK!Pp-?cDDr>R$1u8o3ZHzC-5uRoEIJU{!p$pP&}B+LAw1Yy;-uxA?HKn z!;A_nYq%P4o%fPr*zj~Mpj7($z ztIwWU+DTa(W9In!R$Oa1?*wf3y zY*dFjc(&qP(;~8ns{%Yy`UY4&cauE1m`N#$*UTsd@VSNPn-Cu_r`tr zYx2gsG=pW{xi74gONgSsSAXC}J4Ykpu=uXVxa^OKzGGuuU_Foq1=C2AmKTZ60#tJr z5vcUe#{KD(VjVGRdW_e5@}vprMy!S-pz?f=$r1xqoa>_pP;`oG#RA?DueuM=>=Ru` zUm3Hvu}Pl9>A7e7pj!UoP$eJ3tSL#~oGEX2*oxFWvvizw4?CDi&y`Jlj!*K1izXI0 zXRe}6c0MEqY~6-jLTdZb4w$s+hRb~A=Ph{5vh@jJxCZFZhc{Llmj~USOAPL`0uXh*ku}fjnQl(DD-oOi+={L+l1L%& zwN9EtkbSTO*&9r2R=oxtlP%; zooU56TApSCdQ};ZqgBe^D}L!-!CO3&zKw|t9EmKoCo07Zr~KOXbN=7G%^w%JAp2IG z)lNC)&u%L9Fj7Ad2mBW zBpBq=8k+)65g;v^0i}Fl8<58NxhTI>;(vRofB7qPnl@q%h-EDzN_XRy#nk!;p~S!e zLXj<#7j!ZGs_-M?M3tZJS(9HdW;%s8@9@NRsf4~`f%pM`^;k7sPe$sbA>7-LN@UFB{PYpxXRk@shq(zq~ytti!K_T_vC~Ap#eK_BUkII2N z{H@I|UL3(gya6kABJ)zn)bsx4b(d72Tc(`uKVg%24yH-6u07s@yaR>JO(kGy``K8c zAsDyC*o(lL8%YZXksVrNw$m6jTj2r=Ac3baj%ny=Bp^cNaRWU0&b+R?Z7H7HF`uc? z5_aPReFLzw?Havzhnt`cRC~+4c#!MC=n`c7UgJl6H!9$%rx&-p*68R67TIObs{Whw z{%^uOISr;UtpZii(@2V)sVt%IYajI6xWKqM5sD~Ym88=7oeBGjkHmZl3$`a}c^xXg zIgIBSW12@oOLqjn^YefO{%C^?7L zs;ER!QM_Yje4Eqwvi$;j{Ac^hZj~=t#@{65%Ger5|@T?gFQiB+$hOH^y6{mcK;&oQJK7)R4Pjxk3S1pN}t z)hHsEY2ki8#nvJi3C>rZQ2`QG|Dkbl&`TPFUafiN{`0-a*sB1QZGK1=xFq7rRM81s zP+cD;$`{+-;;q_#1jdYv?Pid$PMa>ICuGyM{aG&1tgt$NsgxEBPwh^wmvn>n-T0~n z9KsRcob6UP>qr%7WV4#p)&z6dAkRCthLiOc%CF7AU(TRkRsu1~ougtNe8BXztIGA_ zQh(1CB0;H@{V+9tCl zgLo^)+H1MncRR+A`IjF;dEAQ%CKK5Y#z@?=FKMnY)j!`+2Mk5qvm3U}XO`jsMFkRS z4ydy0v5aM3&Zn)tV;K~M4mW1#LC8YfUDG1dDm1IYwTslPj>;NSeRRwf>$0smz0| zX(c2Y;Bo)&=1+e9Z-V#lN-!x1epD^8gp~~#EUpN%%zcbsUU7?m|F7rqPh|C z`VoKs&&Z8X46X-*B_ND+RR-;y9=i*8UV;i~&Or*1HJr7gt-)`wf1StvWq1Ak|NghX zC^Q}u5cgNK>Tl(_U0bkhs-lR68on({P-T9*7I?J;;R<&e?>;H)C}F(%ZT{l|{=fbL zZi-+cl;Vo^=b{@#!15Q4VSaCG6hUwSplLy4#NTh(xELevfB%Fr^Z)G&&cKymRag-D zaO=mE=_>ET^N;ygJ45Kky#aEHJx2mO&EXN9cHVy{ZvTIMcoF6Z(=-X~57Xago%8*$ zqll~9b$YLUL`2TjGrl5_^pd*le4Z7A{6Ak+j?f4w#Z`G7wsXqfwf<3%bM-hS)C7e^ zuNHH~nbBUcYKqh!1Mz>mPX4dg82OW{pX%V?7~&a{eHRnYA?Ya?7mmb=KE!&=?YN+x z-%%xG^-C}MH@m4Uyy+?vZoJfJbR86}iRC7)LF-3psU`1e)SQKrb6t<^Ry#{(i{{j^6EY{YjpyK6C%^UkdNGMQWD zxh zNlg78#@;$A3T=NMmJkUM5d=XAK|)%Zp$18j4(U)yDW!)Fl@RG}k(BN(LFpl-VL&>E zMsnUg-h1vn_uTtEzxO}ZIxd%o*|Ycac|K17{~$01%kkS)imIpi2NLnVX9eu?Gq2NK z*#w{hv3^t}?6Ms_Q}6N=3_GNw{qZN(F>E?&==?ema~JpOM}Eg^Bx*{-tf3h-m4q4r zxHA@{OG3z=<>_iB8odV6n8t(@5O4>2dxxyZqLw~V3dC}ocAKnUz8d{$o6-aMkh zrML(EJu@cH1>doAx6G!-;6JPj|Kqg%`|7mMYyqiYe$VOL{JfsuLU`?eRJc5vicj$d zNn<00dRiQ{o7`yUBLfEqM}dtpSok(23O*{R8svol;?fgK`g_=2rgAfQh*?yq6ME6K zr4T3sFm;x7t$nEJTHmk6YiN6(q%Yl>LV9PCUBmpiQwqe%LmVqqrYHdjojOVPPwogi z7qA&s^;RAfPkO&3OKH$w^q8C4vi&PxA{rAvtu9u*_p6Gt%e)_ZQqO$rX zi?94Hx4}d21J(HF_s~vbqwLoIpSyTM+PD($;T>D*iW|`muSP=z73fNAABcy6>;Oh2nXU?Z&oUwO{>Xm*sNu%?y-T(>Z zUMwE(n#bV}Zcs(0(r0ga0W>I?Mu?`Na`)r#=X92>Xm!G=_+^b_Gg54#0p_5qIIJK0 zV-^9NW3aDM2iVSu&=_R4vhu#Zc03O@-33Z^*5hPx+Z{=mdJa5eiyy~~h~PZ`!%qCC zd+W8?RS{o0rqHKa$M@I#QIuDpXzKd~g9@*gRRRAu=YJpC|NEy-d(4h7nmF^zLrx** zP3@g=Q26i=__9E zHuy#WN7~DGc@3ANdTPM5I_F&8)(-TSodQve??gs`EJU7fK?hv=;0CWii+T8M$7w;0 zUDEl%LRjnN)gwZY{exk2P#K7bFfG2}K8d`*bvM;(lM}yIrTLQkEVbfA9O0-P6a9{gBQxF*>^!24-Ia z4iE~t85qRi1jIx~N2@5_JA9LE^t2|8qA|?__((fWAtdlq${~%ed+KJ$euomV+w=FD z`(LGgkwf*F9HGPyL|11&2i*1{75rtcm_AxUO#i^H$-|e$>#u~wfBx2!F)%uYlk-xp zRYG8oS?a?+TRc9Z%4_rF=tUE|H|0Wv*}51t7F&WI>n!+`{Zh5a3Hsxem7VaKD! z*ZpS-mbN*n3*XhspA{M>fmD_rw47%Sez1{nq zP0YZ8|Npuz7I}RY8Cw>pNH~l+(b;>R{4;And>+K?BSq6`1Tvc%j<@SU+}HK~Bl38z zD~+h!V{-b3=)D53?a+fr%@;$?=u~t*sVhOL`8av27a*u?w5y+S^*LJ98UNmWCBOHg zRyJhmDQ7bqSPpJ~)3Nv3j59iYAGF%&+DPL%%w8=A!rUHH*S{CL4#@3<^R4Po{&oJj z-44?dypdeJ>Y|rAZan7e4&k$Jr0evNI$_+N_olOe0o=ZPE$-8?8%U|2`A~R*4(9}f zt4Vvw^JhL`je}|=-yfOonOGt|`8Q6wKv|neF%s-Is{{9CQ)X$9zlFgn zJU{Za;vNoXzti2!)vCW-F1O@FC$T!BlR#s(9nFxOg;AkimDDZ4Q?-f@G+>v6*fc%r1)P*<~D;^dmw(T`oR) zl7itTV@Cd4d{#L-hsj6FMyT_g8riHN0zn2{Q5`4Ezrydz zn@bdk^Gx+xDB8>?NJn)^!R#BY-gZGOh9KN>qm2UBSyTpT*=QXhwQ^BZPjPs!BrFVLI^Teu zPHeAluYWN*R$4ErZb}pyzJ0!c%_FbMiy`ftaE&x&%Mz53hN3>7-drGc@LGt0i|i}( z`_K*d9WHukb0tDnt=@?lj~B`kt&KLi7x`Md#cvf5{Se(BQQw2F4yuh{v)unL|NEbZ z$~ESlqFVAYP(cYDo~*RvtW;b%bOZ2OZKt>7xVajW85KUy~4Rt>S!9m(2*vYyEFqt^TqU}B(n z<(DR?{^{|%MN5))BjL@J*jMpq>A;?myE;b)Z{dg3d<&d!Tbfu&>o1J2&_|Wx1-@z7 zYtH+~rq$HYVC`41T1m=g2~8xpM4}c8s%(~Y9qhdFVK{Axv#>LpwUJtffwP=0H!r@| zfwNjw+31{uCcf4nUgj`y&sWBETk9O(E(C*(H?A#EJ|5 zzaMz?$=s8n?Qt8Y|9OL1jm;{+i3B~w`^Mvu_DoWgmkrgX8}<29&?`cTy}xKRdxf^* zpHD3wxPWa|kuhD}7C%Y+V}JJWJtZg&9JU!DyI5yc6cN^ob-UFsXi=l5FNwd)c>l`8 z5>R&#^me`|mDA@bRZ(c>MtevheknN2btmudO3}Cx#NsCeblLI~#heTS>Ujy+){i*% z$p3DFlw_ z5Y5yc;cPXySyq{Xnvma!^ zOjcQ$_e|Mb1|<8+THh@R-@_bi@J=4hjo>Vap?MD46uE6TwNZ55e?KSgL0iaF9;C_X znNzLqAW5H7{Kjuw2(O3oDiP_)U|Al^1fbaX#%AkSg@1^0BV()tF^4Dx6`GC0*M-aO;7?Z%mHD z4eA1Mr1qFRE#zeV=>PiNx@m=Rp{VGsJp4r1DuP=X+Aq8!whwoQRA?9Lp*G?Uo1l=2 zkNjv9n!w04I(-dDAiUEjK;%|5z$-YhBzz7LvcMySla8T0B$P6p?mO+-FIvR7o_+&l zdo;0%CvcN_pc>i0R#P(z9Rx(lXS>-=#ISI~fwLGGg^CmrRD9hrad;FURk*$|p@YgL z89YGkvE$<+b^?$8R;cqP3dAU70%K+bX}=gTzQ^%a^i+rMfcTtU7-IphOOHu0&9oYkhveUJ_3k!7WotcwPs`7v#N#l#Y z(Abt!dpaSQ<$?HFO4tOJg?^-I9jKarb+a1gd&1n5Y?~Ku)E3$yTKJU{eV*7gUR05? z=nIB-{6R|pYrYu7UA4m-J6fZ^yrG|y_6C@@P2|RUj~Rgdfd^tVb@c zEcIaZKt9t)q6&5S^b7?F+!b<6&7m86w^`)kiVQ?QB<%MIWYE`luu>USeyKnQI=VKd zdJ%AEG^{9jo0=sIu6-Z}C{8{$I?{|qHRG*@`25>&Z=xSBcyW<)=4piYmJ?P|qsZ9x zaAFgF>xkqCeZkBf=$u-W?Y!J5D}KfR(ackm4o*zJ;(qU#4w#2&8Tvds9F=C=55nzM z;4%H;XT77nK?@x{d}D@IJx{aWS*qvMeNms9#uBN2DXZm`OX1e9zHd3w>F&J zm2z?R^wB;AV^rjvO{gA9ymTE**vDh5iN}HHyPC6pp zOu)+};pUi*=dEko#Ist?#+2ewCjOz>lJREWnQZ;h*3uHW(e*&)P97`ILEablZrVrs z)FWe0_LsYW!0`bq@bgyb`iqO49Sn01g|KM?-}F?S3fa#$eIt45vY-!&KDzCUdfVM% zfOe{i+{iv?dbT)?o%08|As5j$pYX-OTd*Ye#cIP>$;2=O!x>{+0AW_6m-aR2qS43wJNs-DHiCKF26PNj6z4P?M(g3WJE*2gpyGQZ%uKwd8(WIEQP3Uu$eX!B0l8`J~{+# zPtF(s^^9YT!aT_vtpx>B?N)}!T|xEIpWjk+ENeZyiVcf==qMGIK%8^f`o3aA$q^F4 z$>OkhMxLBQ(g+7mt)MdA>(+i4x>&&$O*n1z>48Sf zq9~cu%TKLx>pzN-?oBP6`$Z>)l2GDioWFfRecSO(PKaCSa5KcJQ41A42|5s}QT>)bC4`aP_#rn)av7zN;% z=RtN{*pi-F4Mfwlv6=glVhv2M2 zSJ`Mks$P!Q{5RYIbi@qZW-WHyitAPT5KGJbLWGUZ9t07X zwbSQERV~=EA2om01;v=S#W?KDNg(8ql$oo`fc^toO>7(E`@^4(mzzAXrqEQBA?#$S zg`4O1Es%Gg%XV5&Pnz{v0W+b3PhNu@fg;p(v#+b-Qu$3l{%_mx<`> zw3WReuLkcDG*ehvJ(Jp}Oy9g95|4hkTOCt^^X_!9KTSMiPYssSe6eL$w7vnQ zJvF#Lt!a7Rb0qYPYvZrpSLps32nb968A@O=H)$Q&x($N@kUd9GK7Qc62TxGL7} zn=}Dvv@_3OleZw2rDb|Q(1m8DkP!YO!w;O{zkYAg*j7z~_NI!|UM*HIX|GEGy*mr< zdkCEmbRBO8vfv4Kj#uRaIzVH+yjBJ}MM!YOek(nd;`h@lfw7HKA*5^9MQ*Joms@=S;EJ(0Bd;@W9!Ocd-Ej&SP!`>fH1A<0!_9MTMTs&gftUvK~)LxD4i?8vc?^Xl1 z-TF+;&9B**15t{{XV{s*Pz0iqhvs+C87&|7ff%yCCvJ*msz@>*P%Lf|Kgd`@{4Nm? zZI7i^z8dX~I;~-e^a%lD^%HUj)Q^N06xwhdXWpJuSZl(;Y~n0Is5A@M81!}rOMC8p zxn6Ab0_3?jrTe2_fC8gx4i3$yqx-_(H8zQ&j}N}#2NE0-d?W$L$&BeLXQZO-pa^WQ zOFbF69r*1mpy^mB%Fyb;|NP|s_%wan39z>XS4do-5ifK?$kKkPep?I*T<2wu5Mxf#8}fNDdN6;D1{80| z|Mis7s`)-zu3s8rc#2t{_M@uMqND!x@U3BAJ5aSNw4I%D>V39VeQ&IsP7D)W*Q@Wc z1>D$_?hI59>n+R8?AsItx;#<80z^&@hWCn@(xU5eZbTcB>(ic|ebE|1&|khmc;1o^ z*C@WYfR#4c?j_v#3|2q*mMlO;*-Tm0{Atq&6cXmy%4jIZkjf`Lp2#-antaYzo4w{( zh*9T|bikKmztnwe^LWN7+hINZ3`ykXvGvR2jLK!a3O0nTFxhOE#vXPR{hRk)B-J(+ ze02CMsQN0tSP`#!(e&%w0WAY&=ghfUl#p!F zS&grer}oXIaUDx~wBmUi&O=L?T>MxxsQ?1WUKo>Hc*ld>JV)dp{r|wrpYT^#2O+@~ zdm)4~*1l*kN~5_f=OU!xGEzAhID< z)c>+5ySW~C43!PN));hl_xr2xEq4&+idP~dKFd5&Y8A2;c4ccnt;+*ha2{eW-*-ez z>!f)fC5Z4^gxh@kwtU<=b#_?3ly&-*Kr%qSd|0_Z%FMzz@iitXm5{yEe_*77<}s8@ zD<($~v)Hh)MoUe6cyqGy zsh3XaW3ohF9LJk@7x$JFekk62*Hc{cH6dkVZ$w!hf zle9hu63Ci4muJ;all}8{%Vi#2dJ?Ja5db2d^A}G24fv#u_^xhJ0JT7n#1u?Ji8vKft82@&#ttCiA)ewBy)gT;Ph|) zpNH{ZM|Gay7iKV6-J(SdV$lg9UPnSj{U91HMyD3pr&eCh+;%98Dc zZ@?vWbxkh#a-`NuOAkuiH>PTW%lwl(_yWsMb6`MtGAqhN6>r^t_k} z^aVaKn~w5NN@#i;c?0w_2<;~l+wmbV4#8ugW=jv;ehnbdRm|Yky?~eBJ595(YH(s0 zf71*Pid#TTzD-QNZsix!;GYf)g^ckUcqCHn+7Jtzg4Z~$J(g#n7f~RC_LX|C`J8s< z`Ld@P)>j5@*j26xTR-LpFZb{TRUvWE%OGo67NcuQIUw}^OsrVcL@dA1sCl+_vI+!F z-anZSpz#=<0((3(d%)=*sWj?D*%IhO_blS&KI}w1>N=8!&*}%{{qtpB*DgAegYMJbNEOxF8YiE%>Q-7!#nS7H*S-wEPD(zuY6$&=k#}(Usj1mSmjy&^ z=XbdkB}UFS!;T0Od$RsX^6?6*w8Y8LdIA)PF zm7=p)acqXkF0}Um5%+(8%$+K}sH>z8Y59`3IOCDy7G7{CHnbYKl(aJqY0UQF=~`I@ zRy!JnO%4=EUPH&3uyc;Qg8{b#$oDU^DWZ3YeW=bDuGJl4G19%L6BW9;eq9PZi7aBC zrq_t2n<;(+?ZN8G!hMOi4?RHTN!BSI(_4cZV+!LY6t(_h#zLEiV+JsEqtmzjwo1Uoiw!+Y_(d*^BoKzMN0JTd_^DCd8E9xg}y04uPfKE;~ z3A3kAMfG3TcUIUIN2hF~hC)LVPKCdOM&GmjS%;^^wqMoU2?jMB#-THeS_VS77mA>z z#n)os)$b)d)E?h@s;W=3G;w+Mi6J9lfpOsRubze_N z%SoVi3O>;B6|s@Ur)tPOnROWhLTX<~QS+`0xJ#9^E64iH(EE3NuP%nYLOkwUybKd3 z236Jx;0_ylW@_U-L!w-S=9obx%tVJkmbHC=nbN>TK-T6oH2!D-{Evus=N~l9>1B#`)*4L z?DtxOk~a!_fF?Tz;4$cf3ud_vf#7C&Tj96L3A1yRkJ?VytP zU?y;7%h?UjBP&to9nX5vECNu}R4rZAo2$diZi6&K!S>o?tl+Z9OcBrs;gO7UE+r#H zp6)Q<3dKrVMYSJT4*P{t(u~LdPMh(4OJF@!_rs|dFl{w^w;~2JOrKi55bJl|8tvmB zm*VZ<07;g-^=C)y`k8Vgrf>CHI7m8s5<-Qy1<|%39^wADo;wST-j|m79Y>qigDpW_ zk!pvK&OT+7)|tM;M>|~*htw;8b>G9TDQfjNQU#M1J+^K?LV%E}Bb~uBl;Oh(I8rK% zme#O8N^okZO~j~d8k@c}dpFI4PRr0K#>Di(&YEYVP9aUX5|yCc|*FKne9>&;QqjDy7lT0lXyK0;MEesGNbiJ`5pO2 zisgX|O7gl&3I=U3-Ye{#Jp#4h96m3;eW>3{rl1UwlJ%)}9`|rLu)qu!{PdeAjFf^R zD}~g7>?~R5IlEQI#V{ow{-(pobkZK8UdMWy69g1IO-Ay7K=e3DJ}dcR0*>&n?+C6P zeKcEs_enBqVl78jT`F~}-fBf@tn@>`j@TxmYi*;S=&(4oTtyw`P;6Joliqo>^=X^> zcuVaBf_qI?+&{%?yKz0U3$@vP4%=UJ=Yf7z3bT^07Is_>pLXdN-8%*8iG{kAygjjz zYv5T9nDt|q?pukf*oJatc^ z0_~oSk=kE2@eJ+Wqrt+g*=o{u>;M)hn#>B>`_nagyFHCP%fo-NuDq!7Iqjnq@@@OM1DvPnl$ z(LVW{HTq@w?;P5H?@FO&_#t-%;G?gpD(fZM-d8oKkR6=m?w{2xKha2+WljoN_qO#vNSg;SS|#7#rvL07q6CkTUaiV_qKCYxyKy0y?0}@q^_lOE~4i#t^E%xI}}ZzmZ!)@ zvQcY!a<}+8;q;usRguuoQ_+WaM2x{-0-w=QD)I3$8hFwiwu*}LYC3z}duQwlr!ITr zv50*;E(}mo29*lg`@+KwW~=G9;Eqp_v+#oOxQ@h?r?||wxV5GQ6k?Bw2s6P6=$)-% z-?mw~^7jLRvoG5j-P7NH2F8$1WVPDKKdJ%Bz&BoK??*8zytXMOy1p2VMjZi1Fz7?3 z(#|~r7&hs{m0e?Pr7Sx%N$(3qWviniaGHKM8jdsaxDWB%Jz1#@ebCMq73Fri zjGQi&^i%vKLYtS8U9tbs$+1D@+bj2m@9a7af+I5%e>tbL{-EGfwa(kHX_|2W?pW7v zF7xDuK;PN6@I-}Wlub1#t$vcgZHfs4oJr&}#{R^L&+$ArgPcV7BM3k<7aVRgNpU=K z=bFj@+%^I*l5|!&HBQPiVrIL5)MxPu||B5*5bTR@gpHP zkydS2fiiu4mnk(obtMf6=>^hWw)?_%9n|zY?o_HS|50GlePBnzc;%d+ZHQI z!l%I9W;OLQ+rLqZ1#Hb(&vIzI9hamN(Sh|w&=9#}b8%uqv55*D9@x*k9Fy@RyI>p^ zN5)2hX0Ho3^Qs5KCn>ydGV;9!%{+u%iY&tE)n4H~SifjH5v!RI`3(Il!SkPlqFe#g zx1Bol^Y1%#@j*%7OVZH9zXc(K{)N491!p{PJ~-y$mQTrVT+1>R)Xa^xt6oo^Gi) zC0pW>O%{4Z8h$vs1U?Y?lsc!SxZA9l69ftaRJHT(|6qu_?!TUX(^1P}Ki7hxWmv$& zn`?-w-;N}_%ilRIwd;If1xKCJD+um<`U6jNV&Pr2&60cHOE2QRDGSJ%K5? zz6f5ZtP2M~N%urtxk~D_UT3w)oBb8G{(9OnmN1*~Da9C!yyjhaHvGcEdcQHr!qD<@ zQY>}KMG_!Bko6^p_4&>+dZ3nq*K(-+84kf99tgtj(u9^6cNKkHNRF@^`{?{Qlrg@C zSav5(PO1M7$|lml<_x{716?5SSn~X!q*$}LY z@LCyg$V+2AUjr;e+~LOPlO-0-a~)z-3OB6GV#Vu%?bGlSy=<_U><-aBjFV&tt)6A+ zSScwNb`B9t4@}g|)mG);mS9AHt(DB`?FWx^;K;=%QPxf$m%hHmBUuDyw&NMBP}ADz z+v2{i&EBgUC4ApD&f*X60IlJ_AJ6~(Y5XcK^NV9+H1FQYgW-ug1K1kY<=$>57^=y0 zZ{lFBXbat8Q2Q};WC$DIb=I+|By_v(`Gwo>CG}ZU2Kf&Dt#XDitYDKpK8H0B_X2nd zIVnSO< zc+>rGextgL<)(BDY|_#QsmAo?T0wO-%rZz%h@;SO(cLY_^?RBSwR*YZVR&(=+==4c zX8!2`UVg4#AMOTNs$Z|`Z^gQ1l6GPv_76}p(^_U$s9TkB=+Yp~Xwj9xt~C^OiYPQh9=oIO~jIK`{D5GR5|8Z z2@vK+Jj|;QYNiYsIu&XEndTK-cfyq77@2>)*gduDwb$V#k~GYRuKzkUn_?2bQzgCj zcW~t{;mf9HY}!njEQe4Gm8$0$*Ozg0VgEe5{DiU+cHS(htV)*MVV}bJe{tISV6=F| zlm*4UiL2FeS|U?(mOXrANE9t~OT}clH=#-OW(t4Y!S+@!J`7es@g%~B?!Lp-nUV~O zPn`)>i4ozC6~P{i&}lfldLa=nfu)dt^PiCE%|@@BIp+-uI-`U!QtzQqd=Pxe;Gh`( ziM-E02lB$9A}=1sHFSEKLzEDKiFp*r;cNY$_lj>X*&i&-dli6J&{R-V?`ju36OrH` zKhp~_hZ1kGRsO&ZIb4S(stYY%BOhNRR(X)D8sfD0#;OB~vKx>s4B(?mI)7L)ySSD+ zUwgsPFeCs&uJH=lq$X!R zfia0#ye4v`FRGv4Bg+eM>dF~Co8F>{!jbr}j*~>#aQKnU^(@~hQJI|J2d?d4k0L3guRXPj#e1rv$VOj3Ej2N+<-4_fI_7(OS+@)SCtCo8Vg> zi2-^`C5^m<#%}OeQHhcz7duS791v<-;L1y^6xL)_YW%Y{jW!Sa2BW6U3zY;L>Q-hD zGeF_o1-2(qfWn_V$_ERF5z$>Ky;P@9KoEzHa?r6oI%L|?7yKvo@h0>BOge4}@rI~h zVtzsl4MEZ1r?@;YJXW@8KNOolrOk=t7X4iv?27$e9h<3c_Uc)d4jS(UQ^nPC!?8>p zZuvGxX2L0{C@_HgV|T}~uDr6T>JT*^lX(QMskJjY`Ho$x4b(AJk#U?x95fB6CVdc* zRdw!Y@C$7^(Brpr=0i^J!yT4RcmTFx1pG$XRs8{D;sph+P*nZSW0cu_)Ws~ltG^e& z;L?b?1KVeC@o}GnxQG>aimwfN!^BBQwO;Q+-g*7y^2S8D=gMqtPc5U|lNyN!$)=f; zWnPGK9MZwle3xOfzD(#HgiyiO<!pm`ir( zzYV@Z2BT+P5gy7eqaT;r@!L-?+BQT7Sbsfxq1QF|*Ju7u?9>(CUJ~4kwVvMcPu~DL zMeEymcEqqM*X0?%5GoAUl;R4&&%FXLTbCYw$*MBd&v@jZHPZ3UW$WL8BH8ZYq@M$=pH}Ffp z$Z@ir@KiS!8PI>&P1S{WSbehb@6{DN%Cs=6Od9>OZXarWb!fZ(8-JM6!IHzB3!yt% zTC!h&8s*y5x9Xw}p_OsWV2kB{VkMTJO4_y5ylwo<>U9YX>phV1HEyV*6rM3b^$jJ}(-My-Tf z&BS1`(L5_q_=nULQ0Ya={!9U%oP+I*?YLPBb)**}Ptb<0?L>fZr^XdKMapq0 ze+d)ax}A&<`eWZC8J?o0sj&edHEx6r-=zf8^M}-EX6vd&E_xUBe*%^8Axv!2Cu_s9 zOT=M^IB}2)Ozamm|{Ga1ZXmGLrC__Z}%K^U^`ai z0ViV5`@@Ye)5Ka%5OB=VyeF?`NJmyf z5+pdCOXQ2$zth-XC)T#y=1$VdbuSTJ+xf45?R|nxd{__Z1lQDpK>3U8eKWx)!pENc zo^@7nAf~0qmgJmP`<101wuqhIYhB^RH9JvJ zUHm|dyc#KZ*vL^sYZ35#N~xA@btC!kupg4vQ6? zd(<-B55(xbyM2YO#DSCtluhhj{Sa@ly{j9A*O}dAbENCGiWK4)Z#g&+oigkTbyI9g5Nvtwe!KO-$EFHDyTuR3 zV>q4rX&)+^4^1C5NkF%GCAh!?X>710R6I8Ic6Y&ZWujIBF!54Y0gKUl>*q>I{C#BU zN*h`L0-))4CCxrQBP^9wR0z&QMYb}Na;)Cz(Z>n%=T;NF9a^-pNE&fA-Ni4~Gj50a zFq40Mn*aV>|MXM47MfT+%yIFS=QW+CxU3Rol+cGPo8G!O78(t&nYnSih@E z!i>G3w_tfl_s=_?z^Il^)BsOlxMpg*rY+){Y@{y zVrGlN%o2n|6g4Ds2a~ZU4u8qrtF_Je`E(+sKIz7%Q6D<^`vSdcB}3knvS#jL*2_!` z*;y(+`Q=2YBxfR`Up|gw%jotl0x+smSi$j#&0(&>r)~U$U@_Log+a(=PRMNEg1w^+ z)*Zao<6QH%JniZg(Nf_mp;?|~&*Kz@M=2NePDpC|6keZEFb+!2id7Tdy%PpHyH=_a zETGX}yw8%Xgd00DTW5JyzJsV}^pwB2lI9)j$;g>=(Be-d4~8gvU8+uT0E#Tfni_k5 z?e#!Y;j72t!XcJC4n_qXwJY*%>Po~^yi!;}HWLjxm%PX0V+l{hNwmeuwF%I&36Kgt zkqFL|>E%UH{o)M=J0)=~?O`+*i-LqgVTFp()LVKI~mX-3^g1w=|y_4Ic?3YuZfnC1h;5{R=DxBKXZy82b7u)BWFB za8WkS9(WrA?M>PGn2~3{lIztFq3xQ5#WL0+T;1jdNHcn0^CKS^)ruNk^BkaBDO9Ae zoRrcan((a&p6Q=Mug)|s(t>`R1@M5(aRL>9_br`14&V9n5%}}#3#K|bLJZ8{%tV50K~1<<0Y+Cx;-NS`hXL#KB;9N z$yIGT_BjC2>o!TRa&R$&$K~jBS0@XTSiutQ4eYXZW%2SFbzp95eVw+WnV;=MEoEN1+x&Vx)fPNzW1onyq_%Jjou%xycqJ4Q_x_}3xI~fnKe{b% zaqa{NEmHG*WBU5|=M?>vsNc@}yU?$IGpbM=RPE#|fyUf~Jd|-Sa6~i%KuK&an`w+Q ziSgQR?Lm^B8@RRXH#gTRImbT6;loaW^o&Hgvs_u6ol6N2kUnKQoUhdJq%~U0EM?~4_%Is zoM01TQW$Dy=>I%rBI6xeqEQKFU7+u{xL<7ZwX1$NJLxe#^Iq#2GCW{v!a?81epykG$KPdcb!?GDFR5#F4T6ZQG8?>twa{oW;+*KzVBw8L;K$uw# zwW>VEd;M~z!>S1=kkmlEol(i{jng+TJIp+w5$MwfHjy~K*3tnkZB@48;GTx>N_rzx_ zzm!wVFh^eVtw;xNBR4z~L*@X(Ld_8qERR|RzC@HqcSSLFfn{Fu1Tp)DoPI4sQNr|F zZ6qlgQwue{(loC^N~FtCuO7I4+{Z6kgczHd+*&&@GbH^ zS9ZtkD8HU_>R3Ofk1X^zv!UApC12UNCiusE?nQ+c-*|Dg9hw{i9oR^9D)$ zt5pKdn!RCmNpa-GMisaU{F(CyT$;ui$05;~Xr>-lQ=&RhCD z<+kZ7ww)h1@U2uF!~ajd6*@Eo0;8m`~juV7u-=Y zD*fR$3Dv~+#Am3ZB&SCmCsd3(odv~B_JznHaK&(rT&Mcfwc+bVCHP2UQVwe$e$NPMtfp5y{O zawl_*$NDGpCaZRJqZ)8}xmqVre7GIAlVO72VRknVU!}It%-f_U>Qy^C#v5AT*mU;v z2V#McGLVDu^XrK3jpg{`Tt@|P2uTQ|QJ3+L?4M0l5%cY4Mk&V%%(^{=Tg-}NTwDlS z4!j==Ac~uGTK`r4_4K!>UDJW42t6ARF=nLQ-R5rPn7t=7MDEC0fSK4d@4>PJUkvI?rc_?{21*I z>rE5rA02x94nL@g+6>wq*bkw2O>Ox%0N#>8g|<*v@+BCGO|Lec?H)%Ly0fu*{S0F2 zCIYFN%=SSPh0?N{%qq0MdMd@v=RI&0Wd**F%u7De#UQ*lP^!*Ef$C!NJzee1iDK$u zU!G?*-yxZjVOwNTsg=J?;R-Ug=dg&zt3>S=14CRF2or#hsfjp3iJyq(T_is+x1in9 zz|8W&AydBiqSmZIqmw`yw5g{n$;KyIK;(3y3|;y~4x@oND3DKhwP7sK!)fCOL7g&A z;W?fEq`dl{4>s*3<|)-e>@OO|N3zU%RRtSR!2cZ;5ngr=APlozLwe79=URsbd8*jy`ru=c;y(tEZxl$F(hx2#$ELfS}$ zGiA(OCx`Eq$rQ)XRZlyb^REPZ!^+}aV12*nKr#nEN!`?}x-tE!l@gBp!TJ+QH)@}s z_S#S6vYjO3_^s2GaJ*$|FK1kd+k68$#i%p!H821?MPBHh@mL#rc_zXuFi6oKXJ zRSNw?5l}VsxG=sBnc$^HBT~WFth*SUboWEO%z^a*a$J}vo<#5Tpuf@Dc@_r zUw5Cf*{g)b+a|3&FjaIDReD5(mN;U9J7nJ-w{+p9j%)7=%Ck9P$w?YNJvdJ>OKF2pigg}YI=$1^7pm!M4I3N;!YpcqN+Eu$l9*!8M9 zWgI*j%^NI-8PO7<)y8wM4K)PR-Ew_@!SM{eBFu?ON*7^qJlq0-SkhJl1=WeZzy^%J z>g;GcW&!A_JkO4v2s!i{^gCejaLE<;)3w7>}QPpXQiUx+vBQ^@*2U1 z=mX5CGb?_+qLg-bV2sfU|4_8EPB|Fv9hY&KJ^5#(^A|29{5z)Q?#^qPHz4fcxKj4MwK;?$g3h%;zQS?*b;GD9A!Q8jXV%dFjh6lksaKR z2Z6Dt)bXX;#h(I9uy7vjJLAJ&-KHsGR0i(5+p*^QNr|h0*IMljNN39q;a57JV!|qWLWY}zf z6qq4B_MwA(9R#Mr9!YCKM_{hY0 zD48u6u{Xs^yiWr-oOK0M0|&?KU{r_5yMX1cY3uRi$}jCi5zEMAukRGMJ|M(hwx?kQ zbF>cn0uryamma<@KM@5XB~1~P-s}A$9g&yRM*#o#WW^acv+=wIbcJB?ocw*#n1ZBD zf!k-yI;^sd;3ZtfG>X493`qx`gc`7RpYkN#(r*ONJ`aegdLjQlyK*3J{$Wmy04>uCj0;pD*x%a)*Mu{>ES}5Yb#B8r=I&|88s!|EdE#WI z^;rd8JT=DQO=SEt+xcha3Y-@}<-Zk}Hr%TY02L)rd;*2|Qr>q~G)Z4Jap1*60cfN%YlxwAr?(_)%yj?_K+YJO7S7#0W~K`;H7 z>E+j%qd(!0N#fWYWE)Z4X7?R%3O^4G-R$`-9~kYXS(kGEb^n~p&$G0A+jSVB6R2+Z-A9g ziyHfQ{DzMAdY0>i%v$wVF;6L-kQc}9+vymYV?_oVwH!GL|M(Dvw$d<9Ncxb^r_|C@ zHHd+?ls=po!(^35NTv{BfxmvyRM=P(0CzKQa}LMp0t@b`lg!RTbtNwksr|=S zE4_tjS$qD_SlM{#tLR7&Q>w+3X%L>5`)asXR=3a^(gKc{0F{O8PQADb5sZYCXdE}T zou-t^27iyKu$1f)elx&&0Zo2_)CqI9>Ugh;zZDe00f6-2r_q&uYr zBqT+;>(2HZ&#Om7d0*}y_kQ1hu=lg)nOX5$F>BU*3dD?tfc%XPaFL51cE%Qy4?hwx zE#1;n%zBh*KeTginYm<_W=m!ZZV*;9|r0uW7E-WGUrIvT_fd(6z zK@?*bAC;Jv+Gz&+PvDnFPOQDO17=F#^aaRtc|rlPwt~*>OTodxS6~&EsiuL=5X?9< z0*yicujh)u;XA>PU03fIm$$uV9#IBvk3U(iZ&A46g=Y5qNXY-elAr#9zQUsRlgo`` z0vj7kaOuvgQ7EP9`oi3>`$PCZ$EnBfS_!#z)?u663Y=YFTV1eMIq+el*-D!= z;M}--k0Hpm=UJ3RakFQrWa)_YYYDG;>5Er0jwv3y}h>L-N#g8#S^wM zWD`$FVIcvlsf68FpDGCn*Gr6v9-nNmXqixWr*bdo+TCj}Xf4!Id;OS$7waueFMP4c zU~)-4zrn$2vd}G^UZU{+H7LfncsWzRJh3&U+HGrJ?M^_9(rKiWZf3+)rBc4VEfiqW zQuRLB{Kpk*(Q1?#eP(|d&u`R7NHCYR63?@TI^jmF7scuEtrvh3BEj}|?hi>DCv$_g z;AR>lV)Nkm2TR>uD!lYh3InC~`I29AT46h8QQ*qO(M-as$m>cuZ>;8fq?)%-i}Zmb zlmmAw<+J&E|MF1&NF(T|r}CPH?VX@$$-W&7QVjfb_f%7E*LdDebFl4@F5%8I!$BpB zFVA$`TkBnZ3C=g}^(1_PT`Tl3Qm53^IJo)oGsQ{vWzagOF6A|9vyl+!hNFE?P5SIw z={(z(-MVoBq=3sw4Bc%hS3Fr^su^^M`Q*aX1;hAtsH6YQI6qezR{;? zY>!3e%1V{Cmz)Y*OX#Xf@W$7DxmEmbmsUM3@^fF|oyPE1)KAS0pv+ZB#0d4ul_u5; z;;L;h-^2565x6*BFi|1FGr>ii3Rexe>igpSX>93EvWq? zRYhiaEyQdlTk=0e*vm*I_usO#O9ls~-yiLM6O8nDXF*;qbN7@N)Ccaf!M3lST=H|*hEIhAU0-0j z@rbp(jvJKGAbE{no8MqH(gD42wkTDT-U+Vm67i*|=L$&Rs9E`c0DWj+{US01nAF*! z;(GPXBHFdpIj`BnL23*WWL)=#`^(E==h2e)VU-E`;r>Oe?N%8%&Lez^We$ZVfjyLO z0@TJ01Bk=|AAXey8HgDjpGE@L=zNHhG7hyY#I$tpq3y^0oii7S*^EQ2n4rSQ%6yja z_n2HD2*}ePuc^f>X-mb8U@GMp&+MdO%w=6gKMlW6wKzQ8kpYoki zdL-=p<`Iqeph(wup3RTC3H*r~P{pcY5)Ds4hcZxWUD^lRPJ!DD+1Da0Gsp#wUXoUB#i=R0~^Y;+|Wv3Jztd1=@%sRx$ z_^1bFMO%7?daeC0(fPsE=Eg{Qg}`T}n6{g11iREtsx~9EcDx*SxYXHO^Rfu|PoswO zfaAuWBGSwA2w@GvnSe_%5hwDbt_#-xj{w{kmuCh`R!MbotPHcTN)6m(1~fxCy;AO6 z?@(4y^_hv&Yv{H`*Teb29R$ox<83STy?o=mjTpaW;2xatv+gIjN7ipu=`u zf!x9vk?rTqP&9*4M{EmB&`;Wg(y!j2NkdS`6Wf1>=n+X294P{KCV8`dgqe#;1=shc z^C#ZXpUqlc{z!li()Ej23I)cg4%y~w;_2350lj4c=Q_^y?}-uj#qd0cclL*!@`E%Y z>*(t@flOIe_ra`DS|+XjbR}2UmvhWH^DMAI{&@iwwFVc57n|D-8Pv^dFiwe^>pi_n z0c_8Cnrqmpi_-+=L+sM278fm(wM-i~HiAAxMjM%+POg!h+~PFqHsjcQ{%ieowcE|` zW8ReANy_p$0;6wiM%GX$E3Geh)9rAbMF@>$9f)fUONi)A0DZhTvQ3)6Wdhl>IKY8e z3C}eN!aH9q#kwHr{q{CU1-rXF-csSJ3EFwc(NC`WF^cVyPpVenVL>z|JaO zq+s}C2p2NX>avyN&Gii{V`XZS#>U0~Q25xGH$%bfIXhbAW@7ub{3(fYaVF?$9_HP- z?qn)jr5w>*z3QSe5h*Bps9T~s=gw3Fxlw)c5FstTD} z&!)MII?=Yky%5=YLagcul9wO!h@9UA842aWD-t0&v~}8~n04+Vr2@2fQWuCgz};#i z62wlcw+z9B=JDl;t^sdBwnPPhD{L#+3zxH#oy%5whqBB1g7{#{Ah7g}MYugBy=!;K z(0Wb28u?_#8*J0trMDihZd*(b~-F-HM|m zeJy+HJaurc;@eBR-QhK`UbYxG(9cz$bm!xg^0GkN;rvUE4pT;ymI-0VefRY0E8_&* zCT_2o?|?iQ9Bo-0kHJIJwrHp>tRxI7ht-bhL07hTv%hFfAtu+Eck5MTcb>^LR&pY1 z$66j876ziTcl5TAu*>!`H+&|!?K0(?B=@hJbLCkr>*ZShkJ@0EBNOo2tr7&X4@;Kq zz_+d?Wf%{T$b%bkc7ef-EgI->0xgc$s;#(oIBy!6A* zAFcwi4z|!M1H1IM3&n$*PsIfrlE>cgK-k?|$XiEooVjAynUh3Ie-=kLe55K(`kmb! zUBCsG$6J!}RoFYZVLrYBOJa8?7B#DMK7o_lvvM^KbcLnr<*(in2j3u}hi5Yzpa7n< zM19;^@r@3x8Y~MMv?mZsq+PAy!C{t#2a{WOMtmi=_d`yPl;;-8hb^!!=yVn)q#l(e z2#34eFUHR*b1_$#WCgxis@JUfjLsR!CN~|mIVtR5xU=9?o!N_XU&{I0cQq#w91`hN zUzv|qLV5JaaN04Aa>vC=0(~q7;&q$+M8v&q9I1(+C0LKlRt=OH{6V`kiHPjh#^hOw zrU&IL7dh8&#?a)(6Ma7)zje6D{Ctl|8%#pVy(Br46hjx4p+sL`ie+PwU|Fu7bai@A z%!M|=7`P8Na5V$eag|-&QN`!ae^LH2>#c$Te8J7TiW;EkbezzPY70Zt^q}q4#Vwia}bk$NWJ43)pEC{jS8F z9@zHtu=4`1x(HHlt0#n>5R3yp*QjE(0yZX!hXk|4{&7@*n@Ho-9Z@~o4+h*+<+ zhkzsE^^WK~Bm-O5UAk^|t*>~$i$h_Ho%w-iZg7)3^<9(s?A|3y8$2n(a&D%ejU);z)54I}@LPph%%EVg6!Pu7xzJxeDKg!z!K_1??Lq&GC)SCuoe7cC zD@7UA4n9VPXpF;>i8V{I*xujPEl8fx_1PhJ>oXHu_|3kfo8P^H+#^G;&4j6^C=e(G z!mDfDqU%y_j@y4DKa1}BL9^$4qLSjYU~?Kh;oh?(S`g+x(%v-hFSap@eP0HeVAK(( zFU2#l(X!Gm+#eo)sFQLERph%JCa4Z3uEw=x$t*d_4|Y1bx(&hds=9+Fov*|!HWxLO ziO$1}=368OE?rOH3ecdLfFp6QKs{N~a$e|IL z>=Af8(fPUc5ieiR+!aR{PG-Ej{{nW)46LpOSoK?o*mQE#o~8X4L9hOc3fjURM6fLn z(3jANa%yAtqC#s$SYbj-YK#4#D~d6<)ZZ85etj*J;sV2nlux)}Y*GlqCC&hy?(<1p z;C!tAwYzWO^M=69x02Buj2X?a=4gKR;qXFjVA051$2Hd8zK}b{ns1ZL>{pz!uhz^; zSz|&--oZ^;YsG|ZhgapX1wI_O@=v z=IhrbfM2P|FWla7>ET<*R0W4$Xx zk_z8!RLh=lqosMu*L2d1pV^w_0=pU(imk-e3sw`KG*I(@lbO8fDoIrcU-oI^yjMbfskUSC{Pb zj7HSB9?nw04pwq+f*xU?_BMtCL9aP0oMh^>mUTniWXEH1Xc4M4S zw_3c2C}PI0dA+e*8ZxnmU&j{ptB{}MaBSJ>9fK$It@=LWb_x#jeCPt*TuUoXDCl{{ zvPr7<7XN`J(s1U%Fv)KK7nBk!NJB3Z7VcbS<~13K*sGu9hzkR+Y}<$iEYVtm;{?-#Bxmud_Puu<8nCK16QzAAWZ8^x&;o-vw! zwMJQB?fa1Omq2m~x`_mgtW#c)*C{w83v#KA+wssqirmfeN3{-Z7n&x8>3U zaR-*nhLEAmgfy?|`4|7%H~TTY^P>y`V1|d4xIPJlTm+WJ?(~Y^Y<@yebb_|P-IWas zW>vj}uI9h>gai|g3u{b74{xj2{x+FU10&Ndl97&AUxb1@5}&gDbJ{12OrvbBnaw21 z#L0QiXrGPk&N=2N(*!%cw?+w-dg7i|Lu2f`K zNlJ_IMweiPLl71CEc6#Ia^3FtHx8?<#-?1yA984}d2JlYU@T=Iu8cJiV1aO?gM??uX|xY1f!#=?o>9Ki9N}*P=6L8R?KfL$RIS z3&oLyemA2DGL0u*)1|Ih&QdikBfEv4EAPV;0s0<;A>U8eH|$}Up&JsB1q16_DcV1i z%fGczF7^}(Y&}KD#vclmYiSfdN9>x=_yu3NavmpO?3qs>S=~i!b-61C;`@JX#Q3>X zysL84o!UQ>WjW&1Z@(yurtJTZbM;j)bP^uVXrvLg+U&CGQo^NvZ|(nu0l)f}qyZj4 zqYl;6Suiz+vuK2zWXc<#*QAzsv0ncpBj1td-|~30wEpK?KXy1pTT2C>U;D^E7@p(2 zW(gL+^T=Jj@T)cc8ZwUl1(XL1j7;>-QsDVWU%iDi44oC;UvG7Byq&xM z{)Y_E@kag0ode-pePi+&OO+F+6oz{#NN{VJiT`z8^*==V-~XBj10yecMV~oILI27_ z40}NJ>fs=)V#(~)ywHXDUFNj zGXhSgwJ=hF?c(KzKN;(2y8rbp)C-1U!jXD-uv5h(AUCf_c4fsQ^0V<^M@mXfD^JKt zi4ut=yrjC$AKfN&JZ8Om4%DghlVm5_iGzR_7`XuDPxRwh{17FFfn!I*tEGvY| z3z5c zI!_>;2-JuavKk1Ur!MBkTE#wde%$$lLe1+0*HgGs?B!Ifr0s^7i_%*3&EjiLEawK%xBg z#1-B^@G7^&5C`!_0$a6u;2#?- zf5CXT^H>4?ahYg9e;P_QpG&MKa8k?GxB=AhQuUPPi7k5mimYM!Oei?R z_lK+Fu!O2QI`$?QZZ{-Q-Ty1Q7z+zE@343M*_M8~jlOQ8vFR`B33}s1xY@HpDlpsARte3rU)JPgv z&-(P<-*~EIBJjM8!06T;vZJ2wRXoHG4^q$3T9NXd;uWA?s~Hq(y{!HBOY$PDjSw9a_%oJYwv+8-riR0^{235 zk!EVjI=6ie;miFLqhl@lu$RfA0ipQRr=Ls#Rt$v$I<)FaQJ7$8D8}?&?#_Q`eN1|M zvn;VJa7!Cx+{c9eBVJ4G3B)4)^DFDa7V}?C(jBO+1L}#*5L*-yD+*=nI?8iB-pk}L z{$6X-omgiL4fROdEFOKco<9WNU(+&@7P@L&+Gl@a5{sc=2ZTw&?8}7{8#0m=jHvJ3 z^Zdj{q$vdg^qPIc_7p&WVX79`y8`hrO?n)kUR({NCgCG$D{_p z=zvV^=bAxBXCyM-8GwPt(vx$v!W>4}f02{g7qVMgk9J}p1n)@(@PL}z@Z{7q{xJko z{mhBY@GOYuZpXIdV4v8Cz_&PeJec@_#XoWcTQv4|Y_7r|XD)&yHRRPt=C}eTd}?*| z71v=3tmci5LX|B&toopjkdlgOZmO?O0hfpfO+Y|k6!b1% zvGaZRhj6CQ3TKE|N-CCSs3Z`8!rF~}G)-i&09Kl0bPVR3p|cNJb;UR9SXr^`j^UIe zqVsKPJlY&}6UbFk4ceevlTZU6yO|8;JzH>Gu$S2D;5#8y$VdSwLo+vT6zw1{FJCbG zHhFf+vW*v~Z1)qr8}Z(>NXwbQoPf3iSA0{%UKLBJ&7lP+D%n?38x1)}FYXUC4`$Dy z>lZPoJIm~|KZZp)?{AHQ)v5EUOk~IG8FSdJPCps3Wm8`q=q+O_nc`hQ0_}NtBZ$X` z{*WKPaTuUC&%b)67FE-IR&9Uzh1;N6IWM>%BFjXGu5cn?QRZM(rc!X=p_?SwD>+)0 zju7Vme5F7K7yOB-gQ<1px%%!{WYd?=pHV=YMdW1pbTm-KgQT2qiVAwSqKF0LI8ZK> zuk|Qb)$#8y$)?^@_PGU%TMnND&9krhVXn@c@}=ziqz(rcifu-E6nekU^;RUogGg$< zX1}N0d{z#9mi(8a!KMp=0+Gm>nFLkq6d&vtN>{>FZES2F%_)!I4ezh#yNSQHHOe;| zAsXHp-f(Sm-d|aP{txvHL}|>!qR>5L%(fxv*uHfU7gu)RZ zSBVZ&{h|6@)xSZ>BW3k_r7)oZznbo%?rxjiM@Mzq76m-r*SKphJud1%DZVsJ^kf1MR zQE}c|>uO0bl97?Qa{%KK=NX~a9-auwAKduEwevsfe*_j8Xv6G43FmXh+_<>7#U}NA zrV;?ZF>6IU%`c0KxxKu+n%~BJP$+(Avgzi$=?0@`v~IJWF@tc|tRAIk;W;L`0@r;XH`1x;y&&FAuM!*|1uBdwD%A{!(-u z6wAL!qNo4he6TmFJhFYS0QCE>>`7JD_wEOk(0!TOBAYezPKlds#w0XDS0WNyL$jy zdsp(QPH{|(@I{!nge^~UzMpVBW_!f)z{T>x3sc?ye$5PS07f}AJmp$lxP6HEiNJ9Y zRwG5rv!2EyWxQ6_)-8Au><9=5lPDL$h;{*BTy@y)ia{gZ>8q;*=d}}>*5-g?rhnG6 zdDh{yHxMM8jGWv20d5{%ci$GiD@w|KPjpGPb$2UC=P zrxX7eIlfH_eHRz2 z(Alk9)@-HDv}I(dOxn6OF`kU4K$1ZX7c6A^{6$V_1-LaG3V$y}p<}gwe0*Fgw^A367(X{BCnvZeV{UQLysMxF4O@|J#rTQrcSHQj zcli@^j1+?9#xV1v)P}mdyIG>N>wH+dl`c|JQYr=OcLTdvS%afakX8V$jtA~>1~|d1 zxMxc)7LYKYbt$BY2gkBySm=Lw|9|Fa0Ctc&sc*MG1ctb)5x67a_Y<$lT?@{zU75@P zXU^IO2l3fVKj6|8O~-E+g1g1;@D{yt1V}95IaYNdJ_EE9wiEg8UJUS*lzdYm!x`GPlC+#$C!{2-6#}+pY9J z=ucZ=q5s=NRNO|U5VMotEwQ2dq`yt`OT$rNf!WvHYa=BRzRup7f2neVKhu80{6E|Z5E{gl0O%_X_m*9J?{g<${z%9u~q&)8?^yU>qJonpT2I}Vktgt?> zszAxg5B3(ORQ5i-P$vT2elK!yb59X;^)YgCUPdG2!X|QD?JysGhT{qxhPMyuoRD^;G_(dFWsOR<%}JSDDiYZVtD8+F2fFn3ilAwWW!E(# z+X?@K0jtpvWm6;&wuG6ss>IlTDO~h6b0FXStj~WQSkKRrrQK4=Q-KtQtgB~xf%vH} z!ttIEjUJr_SuC|v0~U4@MRRFJoI}7R$xoRYC8xJ-kO<%2UrpqHCgU)6VRUh{O3E=6 z8H9``*XZ?P>n+0cFFb+!9Si;?NWYM=k2L2Iu{oP6rk2-WL2+N1qkR{)4*@6ESL)cQ z6-)$3DD{*tKBJF(5GuO=1RxV@DV0=L!oYc2nisZZ?Q6C^-@C5gy#`zHbb--_?Q=DY$t3?}Dn-KA=n6q^>!B!pO&PDR__4AG_C0e~4k3-Egsoa~ZX zD@3MuoRI+CGv*?St_K&Eo2E`$zC9IS$=a46boD>T_wN^6D%kl_>|5s+9l7B`^sg_! zTgo8=aF9a_FA%j(KX4h4ihcQ#e0NR#z`9_m7GEIwlrMkF2c+%t=-M0ElKod0SP=`&{BjwL~mf~el+t1^AoBiFt`JA#)}wN z!g@g0Ett<$1ta6|qAUB;wYlDW2L}rarroCU-KMK9HvlHQUo|G8Tn14Srvos}rnB6I z82~skET>vCz%jSLCgu&>zIv59kW;!N4`K}v{hY64ZsX~CZ<99J&w_dOmq_~y`8b5A zfCG>SSk*^1YFM;12vatvMfkafjEm9~GRacZN{gJ*Kw2ZI{O+h_%#I3&`A0}{TG+rl zmveJaa>MY}_sbDBc=Uiw$3)W+*Df$?|q7IT0jb#QJdZyY4GOR)JYrP zTpE{B4HmxwE_fmwjdLSHhI!hy84WVd+g%JC9JsCBATUY-o)^@TIwLuKVRo~34m~fl z5*%O}=G_<(RN{#h{9ny6xE?;nPavjAFXzXv^{#QzFCP=338K{?Ck9$q8rW#F0aUmgy@bIkL;Q9 z2I&Cjk6EC&xVXxPkh`+YX+1k^pn=_&XIb>o-kiF#MbeBD=&H}03zRtwso;dFA#&m% z1Fn`#ug@Y+K#9t?yVEOy?>^Js+smHL;J^FQmyr7s-&&H)GjM6w{bA;Yn3)9-PlK?k z5|$@Qm`X3(o`o5YFU1RJ=gA?cH4^y1&10 z!1uW?lN`h~Q6E&x%NHxp>vy--@aRiHPN;sdT@OgefQA5(!{R%?nE8it$=!<}6Uz=$ zSWVEdCN4ie-)Vhy_36^m(&7ipw(c{MPCyFW?!{4~^RCL)w`~!Slap(L%C$Ivy09a) zK&$B}#BQTY+D?Y)SGKpycFlnqM^MK7S5ec?+%dQS!_h3vc^+>bo(XWV`I=?YWSHAi zScYh2yTFpM%~q;i1E3#G7A#@kYgV>3*;?C+hKAO2T!VL*-~@?)^uF!-fH*8K82bpW zAM}Ev^312Xew;16L$Owa6mI4w(K^k^coHxyoWBARSEcJ!#wQXaz~)5*$}{SpZ1PC{ zuTc9BLY2ZGx7X9Cn#1Ef9&jO33h!Nvhl#`AoD{-=1uxd& z3B<;ktq2?%B-RaJHja-3Z(7qv1A*`hJ`r*UAR=|NBCp zF80*U=#YB-YuD183tGHTw)2Q!Wn!jwhU!TDgM`74rT1DLDwzKjuVt};nhN!c&HXUp zqZfR%m|}q4gH5~>o8ie<@V_BBYONVTaG)eay?kpjf*YUT(%d|w=a?2g6t;t~6=D12 zoAUh37!A$80O`^?^8fEE`ts*LY$ec0@prN`&=i+DYkYK(vi{$_-XDr?WCfu#Vs-%( zwz}g3T6-GWQW3u<{m;xe>=WoR;;T=JZ-cRp5|tx0^b0xL6!Z^@H8nGoj`bnLvGXSv|@U+;zf2s)rv8kBk! zb4hif=P8W90+79qOV(G8o((oAN)sK;?*sh~{)qLmDo~cE-eQPmCMuejDn)-Qdb*hI z2ejQMRy_TG;z60%Cx>b5H{k?{%nx~%QxRD#d!k+Xdi{F<&54bFJTUbU8*XWYyv%8( zh24e??J@yD3hVC*iDWMUJE_}mIQMJm%khAOQ1FTclw>{N%5=*3u{0mbj8PA|>2W_m z7)Loe7K0eVcXp<{zxjs(_yv%LiiT&gaoI9jzjNKo<=t+zedI2FD2r0C=Mb?8uroTb10=?(aAH{eg(l$bl&B z=2>2uKj7~l4IAqXi_$vuLjSmS?sqZ)eFjeFD87lEIw|Zx%<+f3_MX(b_IvsGn{^s$ zaR0cFtFj?OBm4}D?-GiBlgSq+0o;O*F#gj5cMkT5`f!@PkLvm=c#ilP)7d;c-#ek0beG2aB~y}on=0=8d*tE%&H zOiWIm>#S`6IWPa;2ID^}XT>fEMw>^PFB~!F^B8@_HVXz0yC31yMsFSM9sZ~;&^w5) z5~sUPXnPp|Mgd{r}Bv(<^ITcmib#arpWO0*wCc91_5Sx((0G_+G5M+u|>*HWHC z*v6kb-dXR%Xc+`{E=4)l`>(+!9{U)MP%P`JSil_qi;yb~kACYEA2#B@Nd14~v7bTc zn4et1dJ|-j8uFLEAzv|HY@j-@2EF))yq_x-TIlBf=&&Ah@V{6Hv4#0b`st}bFcXbT znPE0k#>1hLqtTQ_a2(A#^yQbR{pc2LeUDW=`lA&6YuI*SSW?}9S@PK*I26ADv%l6~ zaT8WeE!V#Cdx`$nTz+NW2Vh}7zKLtnkt_MtuYW|tzgDU623VSB$ac{uwP*-FlH7m+ zUBVHa5M<$?$~Iq0-jYk18L8Y4(B&P!yIu;8G0mV*fGD1F6?i`wgF3#&a_4()>rWq>;ie z#4xkx2y=BDE&!CA2GZwQR@Ec&N22w6ZpmH)A;9Ya)8|)?H<4eyp1KG~O8i2`+6k4D zAO?xY-hSJy^oy;NNN%X}{`*E*&dyRhCM(_G_OQF$Tv;q~H&X}vbqz&C#~e%Ujj=Y} z&mj&}}GvD3dh#y5AHl{+a)Hw3pZ47xcY(rj(L+XHCg&Y>OjsGoepDxwdSHLX0Y)!B~q)O`vYV zWBri{nHblYVw7mt#S7#!)q)C{c4P#~yhQ7t37*G{lgNTjq%wS4>2tm53#KZIjltKG ze=D#*Dlv2mRFQ<0hfy7-rX;dF&;z@vw<|^5#Z>Y=MfbZ?lJ7MYT{9erQrA`yk+086 z!;R_)e)fu+(x7@rK}L&rcq*4dN0qUj+~>S1quy!`$4BymJ_;8TC zCqFY5XsaS3`B8LP$*KBgshn9I|0i2w+iHr?dgbyK|;eE0Ra zFD1H0O}Ctzu({*1G8m|SXMt-5I%CsQSF^pUy~Y!ukRrq7T<1|nBTeV}ImdHdJ=E(jh2GQ>_)|o4^9T9Xk8lySd2EJwY zBq>E(T_*X`xX8Gi@c3rO8y3r$rvnC(0++=C6q0Khmz1QaL;_j79!fs#=n!{E7_gL# z6$`SWNvZ&60+!2>z367#wi%9LV;~+dQ)Q4%d)R?J=#%p$3$3il@^^dW>x||)eXJZ& zQWawK8n$Vqdg>~Ctr;h@98n?^h?(horZyP;v{bPbiQA$dr4gV9k{*!rcZS}+)ubgU zR1%H27uwqHZsxvcn^3okIpR3BKG9@#uxf3WF_#2yks7?Em}s1noG)WgR~lg}!}suQ zvRbll=xnP^vJ>}3^MJZA3foRGFN^vW$$OJGs2gO73zntaR+p0_fLr8 zdhOYa*z%MO9y1eJfdw)vif}r_hyD(_g&4UpvcC}HZ7r=9bT1nS*}Le-V!Q=UoH9$S*l zePrjYs%xV7t!pKC?Ir|oi|sB;<}ODlL?E+d(WLG)GK8pnze3< z8-Fqz-iq7xNU8|9(7V}Z;9!HoYtNyF|tzKe4PKlW{Mqb#BX%H-)0SmiVci zs8~r_N^P-sdBe>Sx9I2m1Ok~h3_d>kxSnmOwG~nCa7&njbQ&}q=<)*NSXvO019RV9 z%w=Hd>|n-)A>8;ek1rx$2jO9cvXLgt^#LbCX~gK<%QGk6UYgr=roI2e9xI7C8~lD< z6mL}EXuO@MC=o`$xn~okq-z;|pW~DK{DwC>Y(?umDTK-1yI}1z^T!e4-i}dnXut8e z;_`WlB{xG80>ZY5nl71cXV0;=+eFnDT!i(~a+L^z6EvCh{8O5lMISm%D-g({lX^5< zad|MU;h;lZX2l?;g(2n+rN}kTxO7IW6Ib-Mgpms&VemWc;EMvI6-90uSViEye1d#yOSgflhWN)`_SbdtL7BuMOVP0pnu8HA#3Am0ezN2_fN@0jX(V&GWW?c6nAkGd~jFvE^782ldF;K+E z_gP|<{uc*6=i=j)q*U}@0`~;xJu8hl!~=n@U)Y@DZd;J7456?xMpB#~!^O*Z9pd(E zB94@FVruffyCx~)RT5VZ$(92_rFyI6y-T??dMrfxoOo^ik0v*483bNQkFh2Tk8ZOf zFiF<(CJ7(hZ?+6gQSW+`TJx}^cF}cH@?J|_jlO;~_fi?nx>R@IqnJ-^z3hzM3!eI= zm|djHJG&`~WzGF*b>b7_3AIz`?_(nFx6Puj*-Rdk+w1elSC?IU;IM}`<>%IxQa`u0 zPm|(q2CDl(fsZ9MK@)^zWc>WH<#zhHHM6_M6Z4zAd=I=mC~SGX_3uelHSQ)w5pxk8 z>{=06ay;aZ>aHB!!9Qb={w;S$qMU*qblk6(JmrxQwdzf?+S0gq#`TPbeFFDSs_(FW^)@Pscq#I^a zR@R>?qYkudE9+a2Jk^bUubVC01n%M%oFx-W8e}f9^4*=U=dxYwWcci&s}A?+yu;O0 zUx^|mj^x~Oz_jgDh3%ZD~#7ywg8ljLe;z6`p>~EoxB#p# z-ZgvIf|ifCYEooLgsAfUy=MaGRt=tzTvL9PLW6}L(lWp_rRyeSEO$nnC9=Bjwth1M zYEk>L3a$r2V5sKy$X#tw4}_p;b``p2!Sd17$m?ATHMJE13>6eSIIb_;H=liZh?#>^ zBqdkwQFtn#qD>ezs{hRzo1O4*cb(A2 zkT5^~NP*F)lyVNy`C~V)t-!cMBCCVQUx0#o7&D;nXk;HsnTN6tGjh4UtmZ&PXY}47 zt1}3#T&%6Y@V?f5;PxB=m*oqJcY3ikZiJQS!hQFOo$$*UEgXrXBzTDNq@==UZbTxu zJdGn!e9=$jU7go$B4ZYK=;>F_y~cu-L~+A$tX z8bmFmDRW-CetZ30s+*u`1rcFevGfPZsQ2lbq=iVkMGI`4Q6u~ozS$jF$^Cv^$Ar}xPKdTl6-+Rz-29wia?X(jm(*PGUm(~aBx&DsmFUqKd zODtB^mzf6?E(f2dY9O3?B320rFsEG~@vUAhTo=HjT#oy2CeDy-G&^O|*8t9=BT>iO zm??VmN<<{ZwUORH&4OEW4%O!cM5>L=wR)46wnIHq(v!<}t$o>1}$>iCl7(Y9iX6 zt_(j07Qr(DmF*b*FH~818`QSm!OZPAcqV}h zi9hIaNon$odDCnV=jv6k79F~#RHEi0W@XeXVs$+_zsyNT?V1>Ya>j=JFS{%6Kv z+?8iNs74rMoGGcdIDThspz)Z%Loy(`4|=~IqU#WPG+)ekgejPWrb1&rdPIyx zMv~~YFmh$`M4vy8q#x}jamQ42{N3aIqC4Eh(p0VKT^>u$y&lVRJea)eJ)CpHV%t|1 zu&|h7iTSzQAa!sy4o32&^>nF=8$Q>q%@Z7;{4~YZRs(^z{NK3xE1;5fK(H<#1mLm{ zD47BU1RoWe>}zJOri)zQ5W+xqkO2n91k4KDAH=^-Hzsyyjp@|uZj38Crv_iqd9%Esk zKtX4;i&TAapL(m9rtCo-iLKl;=!(Dk8X`qjC^Dg1s$1~TBy6el%&sP?cB%2|#}u79 zJ7KguJT|RJZiOn2+F8q~cLN%@d|0Mcr?4E=2$`PiF*RsztJsH@$wfxu2O~($6yU3K zTB!?&L6Z&UjCUek-KG`rTm-V>57PQrPCI-=TXcLthl4>Dozm4m zi-$`#N7kyWucD_q#QV+_I%sX@dJ0b99>m2nfJ{@NBO(fKytkZtU?V`g^bLYvK?;Iz z{kztt^CUn>O!QOu>R8YD337FRsYu11d_Jup$q^k`*^B2$*PFi0Z$xpi!9C{9hnM5s zv6za<>9_FMuv~o#axw80=X9BK?Ju2GMYp?LK(vZ-7-x_AY&!20lx!$r{N|ygbA1fi zg5&>U!h0{0y{4-jW6ID#6qhLLu1`;gkQM*#0Yt@0x~wEMS-R22Z1&Z%`Hrj3hXEaj ziILZ{McdFm!|kU)6RpHPx{VAc$UGce4e#Z)99mo+tV<|)&*43Sn|PXFJcY{bK1ChJ zcrypp!$>}PqPH@$<2k9CQHExZdoCwvl4;g=BYA6*7fpF7Xx9jrGMH01IJQHsq7q;x zb>A`K9r6s$t<5%?W0$lQAu3@>o>Dm60s0r`zxx+TN(;8e{hS_M2A44X2;pDL;<@-2p8r`o;0rW9i9zWX?N#4`d;OR zWc}=OQ|Cr?9$bQv7Rm!1Vr4h!sWMj$mjCc@!*#)TapS!&bH{Yty{y7TOw<|N#B_8w zyrEgkbOb*RR7(FdYh#PuaUYvT146n=^7>`wR3-N(#O*Ek)#>>X3@>&Oc)fmU9v=S5 zS2@sM@RJ>Hd5=d@_q?*w46P%n!*#q|v-u+KN9s*KkVC@{>Ah)RhSlnHN0pMz;D#gI zS>>1JzwPTUL=IBl9X#(*un!TXSOa)ETM{hdg8`i=PRIDnFBFV=;jJ(EI*QD-3Z!F* z6flZ&WXxt9*4T5^fslNuZTEY#q3765yMU6C)`<^@qv4MDVJTHr4AXHxxOamy{xZK$ zFxrTN0VVCYSI0+x2J>oWx0$(y(d|gb0=2;spAY)n^N}>&wP?&HQ)}~>>G@Tul_TC# zDDtSi;n8#9u09GwVy{`vDQ@-e7GWcJm!wh+GmVwLrjIdWHRgI4#q-E;PGv|D2Yj3GD}7{#c|Q!eXRkTQ;$;`}6FjhpZ*^-;74DJj zMrwauI2u~JiNV|J{M?_A%Wwzv!K74_U(|V2#-5THAu_WsVa_WF`*XN9U(u*9v106c z*BY5~FW2b6l10^qNcEkhD(~!~#?gB`s00tBG|4IR)6)g;?z2*dVB%S@O4)Nsspng< zH?wyP&-D=JQ<8f$=GsYD346I}HYm6D=d(VVNR?9>4iS3ye_6XOoFCnkmR3yF*KQUyR{K@cO6kq(@*lnyRO`lnX0-DhL1U|*7mrTM@&@@i zk3Fw@ySI0O&lwwT+ULHsNH%i#)ZNP^MZh@brjHdIk)v=%c!E|&h0eShcg4=}+IrFM z;@}{{=Qq>1hbr}9ivHJUC}M!Mt`k*_v4;T(LNUb9*1X`NWqjA&#A`8c0t02R#bgXH z2|8tFNC(_#?dH0*!k-AFlwf7{cLQ8rKznGj4elF6Bx#p`yW87MB{baLQ*~E5nl<@G zwb2D?i>D2{YG<;`sA{EhY9$$3CEr*3AlZo(5XB{@&p}kt7=_U~TB8!uM+*5|3(1{m z2wlC6d*XYenA44>dyKR`&tWm9Co;Vp4hnzTnQ;hs{d%3Sho4=9<X|Vjg^n!_9*?S^7x1Am={z__4ivM z!mnoteN~iLKez`E?D6hT^1L*0HHvusd0@fIQjwi>PM6AkjS^4BMHnzkC;lGV84=g~ zLIVwKuWl0mOyLhR-t=F#kz=|uFtdm5_th)$ZWOt`nSKi;A5&uvtDi$XZ>Gr2P$W%% z7u+R;_KU?0eU=!xA_6ZmT={zQ@g!F;chgutCJU?i8PO zV`HTGte3m1H^)j5^2u+CfW-xZPcH{fj3t*gnemIT?_p;3}yf%x0VDl}dDe5IFYbLRcTuBGjbB(v- z7cTLoL~WVVSxKswL@9_F_KAwcTO?aAE8)T=yebsEr!Y~!a0q2t__=yagz%wZ^v_qPHlB6E zwPl=ZZp^`7lTQ+(SZTmtM(_J;D2+Gg-@|j?A)}vTsLE}*&2^olk`on=5szVv#o!gE zm7q>QnUAlj4B7>G7JZ!dQPhUNyk$YP@`Q&g*l_8w30i2EpauVBHC)ZL$hOgf~`fLsdymh)(=18 zy+oZ_VB)}LB-7hal{o!s^o~w)n-gDX!j%k=1?rSrc`Z9?-ap{Q6w7 zscoX=i`(b_mWv|V#VRv1iRZidYQ$-@FP6`iEzF0$SvZe(M!5QZy?aM2Bhta1X?&W0 z2+r!(t)$^4L2n$+j_C+Q#|<*PYh?QctXIXF?N!f+-cv@!$JCa8u758PEyGL{tHIi8794S5zPI?Pl?1FrZLDH&iTa^Bo&_fj*}kfj+lG zIMi)JsEFyD{Gun;zcu8~G`sDA| zpAm_r>gQLpnNx6ZR81Ji%fI^fGj)57MeW2ZsshLyKU1W}oO zCke~aowa*Ae^XJ0Fsr?>ru?xt_0YhHoY$QX+OFGE3S?yq}9KEXH5arsh^fful(+ zbhJ}0xJ1Zx_g4%2&MM#jEj19Z9R1Xll;^+ctq7~=nXrz9iwG7(X$XA^r)kTu&w1b#EJG3@?tWR{)KA6De-rhCoN#YjZ~M_acf80y`H=M#(93>m7f9!%R)EVc(f3HKNsyBc3I&OOgz{As^<{U z&Jz%BiLjcSFt>paVoX;rFOex^Zt7BS<7>6{PB--&A^WlB5%K% z?0xm=pyo(}T6L21O>>E2Q^NFiT?Ekhqkk7gPqbwR@~#scNlD#(s+XvhwKjAEXS+#G z$sbHf#7fDSXUAA;X(ZbCR`cygtUsv1VB}oLfU?P>>07^12-q7petlX^a&fMY12m(^ zySQaZl^wq5*&j&o@|lkI1Y{>=UF5_lpFgGAw5xsJMQZjOEYUE*8McpEQ>S~5m}9hY zNK9mmf^oP+`Cne5BI2O5fB1TmhqYdZY?&-)mwSkiXXpVzVu%{Oq>}Z3cOGN|uiC4} zXeD^&?Z<7w1=Nb4d_eFBj_U@tspJiN&831Rk5-`xb>-VNPCW1b#s9vt5Qq&4c!32i z{FdQwPR#=vHvCwg!`y3@NrngX5i)&AQTo+|r$a1tuehxa3VDln%#y|qr29ou3ZOmf zwXtB8?+4R?U*l&1)mW+v`Vj5LnvRCusOwURV$yB0^(JYjcQOP0<m(|?@XBvO?Rh=uQc>cPZ?P?t!%)f-;*&XLK_@^ULG zWY<=tZKr4!4k1z;N+L%}UMkp|Iz7|#x5riyES6ET(G_(*9UH6({8jq9qV^v#38{dV zO8HzazXPHa07QP$@WlKSIG<$M!J|^|yGH@;sN`svJx{YvKP9TKzF1vU8c0-iU(zZ4 zo(Blv3`fZd@iGyi``*xCe%etGi$H=-5B{v@;oB0$n_+9E_1bN|s8?y@CDDRiw6p_@%)6(->~=1xSLYE!-31c}y@^PyB_qSh{WD!Hl9dI+wD zdQjHsJ(fK&`gVvf_zii{gPs@TBi&qx!n%uwEU05n$I0#y^IOcHMk=FOz8G51i&G~1 zhXy7R_=j?{DQ+7{I>hLVTB&OSe@Q_a=+ zyYl=uPV|j7<>djnP}(F7f2C0n7O-MbY82v9&5n&2m^fczU+W*Vg(xz-a8H{IA01P2 z)wa7Ok{k_B|L4iXKxquPoW@LjqtjZ^mVoJz*P3g0pDmW5EcU!?@0V~hckjIk-nv)p zl9y#@g4j+CILNzGl&gd%Kc7fW9ttk;>{<%fE+h2FM^9AU&i~&;fp`I;RNK6d`Hd(& zD4}~|tTaiSDR}cq^W{}Q+r90wJF;QEEUwpE!ox~3r_J3=m*cpuuywuh%7bfy@ zsH`trXk1-cc|exudm~C4x#kTnKiHOL|`T_d;;|d^;DF1t<5BGbV`y0fD+KSxP(cSYb!U~S4z3w)ElEe{zWOk@BI@%i+LZB z7?TkLB?!njB+;?y>Guf2YV%eEpP!O$yWY~+DTR{8!%1JnbENd(S{6z7tf9tQnQj91yW({ZjwN`h0K11fBF0V>_L^4Eweti+roxEc&=#r*R zXx)t$zJCI^aM=(cr}%lRP9ed2saR=#sjXhhOxC z(i~STwe+Z5F~1n!!U8`iMpCv$J?V51xNHU;CoTUqb&P z5mJj=Fw)qp&k_J_dNTxhj;Xm+`+Fr?haO?vd>k;3QX2}{?MfM_m`S<)F1UWcf{=A% zb*8?>H_imrQ!e3u@*sjKxGGRYOgn?Qko$e*kmniNTD`ojLMHJ`J#hY);W6ndK}p2w zUpUP!w2)F`pv{AO@9KAuO;?ro)*3KzS`dOC8TJafdfYqxXytyg*ZTrYc{f$!H57FaaX6LO z-JYaMbU0rXJ#nw&65zj{{44URs&VGp@7R8T859BCtkJ>ZwHswn7t)+p)u2w8t51p0 z2=S2{c@ZX%vqVMTeR8UB-I7IZnghDZ{0+2#8^s6Pbx&SIzKlwh= z_c77X!F9j^ljyX@(+bzj*3gWB8$h4ndzmLPilG)B#Z@@t>1!-HiGy%7mgVxFeo_tj zQUBJS#HJb6ngP`_!OSQ$@x`V_xVQ1jV#`!wlo&%Sm9tfFoYIQt21Vlq_Y1rsT5+sA zxv_i{XajE;Y35MPW?${_f`V$!=Rq-9GUM>n%wJYZcfC@4^oKK2vOG1p!g=h%(}|9R z+NOwI9?hOd|G!HI{cH2hA};wR9Q7B9QW^6O%f(d2N;Ahtlsf>0fghsuS4CKj^v%D> z961y8+)T+K#SI;LuKXrln~5&PvXuO)j8}2!)dD5z2zum%;sWZjb|sE6toJ>DuREgs zu;AHvORMBZv!87eR3bJIyn`%-28bLo>{g|{Y`k)k8R?69Z&Xwh?xKc}Bw% zOadM6!{N|Yt*l}S{E)QnUde}S=s5KKCGWV^6T*gd({p|C{LG`z3j-pTOLb~!nY|;~ zn$?m`H0P4>x?V`p;u@!3Q|qOXIX{CjKYbRUEjL>2Cf$fcCUtwyC%Tzy6{#KAXZ92O z|9Rz~+%WN#B{OxDwxhf5JWUqr;N?*)H9KIG*h!;x}4M3UkQcIH6w-0ad{HyTHH z7tfMT?eO^8>hW$O9A;fw)HNS!=EGbinWE~yR(jyi;SR}BR8JBDA+MSYVd~Z%1dP;$ z&6fGVZPan6VC<#wxTL#l43r+t0~*GVM7e|C^Oez_B>;iT$55 zN6LXlaV#N~F&msH;-;_kBo;vyoW9tewYteCdGdKXYxqk*h^I4N0+FmI?WE)-#k4q(E*mqm9!20 zDr3sAP71h>#=kgDySW^^Vwmhd4hVC=;Nl=TC}nzmEC2of#uL6usiLWYrbbn=3{^}% zOxI=hx(gcx0R=;@gISBRwqN+5dDtx7dVDVR0T{#pi;lkV=n*?0!82wo+AE9oeYgh2 zBQ5MB6AE(6y$1<;UKB*>&`k3TFsZa~LosnFL_uPO7MWo`^J&~nkYNK*Ywp?BDVrcR z9;?kr+wG8Gz04)iWF?>#x6mBoWg9x6e&(FeR4Wu{hw=W!&Th@>)d`kiuLoZjT!7?$ z0Rb$qxb?E}MNg|$LYI+q8U4%uL@6d6HuxHt(!jXN^`>~TvmLUqC;-q{T5f3T*_{re zqj#UcvcJ)!iUg*5AW(u^SAU}<*AbVGX-DQX?wJxTY4+BLEvXW3in2P9zlW^VCFdCKbnIedD&-=ZhOzGXW zJSOF3WnyW#)fdDf1u;}Tb_IG0pnqi#Q znZ*p}wS^O!cCi`UHfl=Dm@S42ZD1EGz3GzR&E34>{lY%Kd_S=Kv?h)3qhz+^gLla1 ztETe~@9$?X7J9hz5FG@`VKKK#NDU`gxcTpbQ5zWV+pP7i`7|y!buWgY3 z%BH1pRZV7rfv@H`sJa3(s&{NZNT&ce7g>T;3QWuU9wEp)+D^4Jh$&-K9lhwB|e znc8!TQ#UmJ#*N@b$J)=A9X>QR&ph;HLMHOZCN%unQULS>hy389p2Cd2)lIE*M-|&g z4#|ii@eYZ0^9$XyS8zYdHYQd|ZJ9<(9c2xX2ekZczOv zVN;*MS;2{_Fz03^x!)g9JP_CI+Z-KB6i5U`cwz$5rH;ZqW1k+abDr%Drd~--h0Uo6 zq~YTDG1oD?w%LJ5!57hOG1Tt(LPstxl)z2vv~KRizf_xsm#ruh-Cp2)tiX)}9Uti> zJ>Hviu)q|NDo>E(W}9EKtCSrq%ajV@{DLq)gX%5Gkp78Mj=&D%w#b-G8pH5N-@7W( zf0E;OOp|H>mF;c20An1V+)3=D(nxc9S6K9|SM2riUeMUy4)^R8U^qvGu`y<@CWP~# z`n%yT>@PT*k9%6}b?r*#ZXGwR*$H%GVs%p3brY_yhF7&*uE8HGb^8lFuHXmu1+a(j z>(=Mx?^&gj@-n2;Pw+&13povK+47^sVv+~ z<0@77RWD0*ZNH*<35Xo*P#>H=Ug&a4=UVtGOdjKZlKBvhH=zF5__L^T`}WM%(JQv7 zNW&?{&5*P07}Yn5c-VDD&~c0HXWESM%CyZx+rCQ;ia=_@+4Rn#Ku|5^8e^?Z)qj&O zWU)p9#8JotTEflo$)m7%kA^5kuV}$DcHLEfF!4 zfakeK*lJx{U)|!Ak^A80wI&>gQ=`y*(GIcGwbg`YoNM{bSI-as11A3<94H)N>vB4; zPKJ(BnHHmTcOe!#JrB+6TlIlwU{!=$OQUVQ%9kOdZ8$3dd=8^X{s;Rm;|0>r9rp*; z7}(sIcpU4K4z*o7qX9k;L?!ix^VUcr?LPSQ%+?Qv%tbMr#pEJ9hEuUcMukxyvgxqy}Dn+EWxM^x#2jw_?Pn`iPvR*(o&P1% zcdJ3~6J^94A8Og2ejd9p6AJ`=GyFJKTs_a_6)){J9;iF|0d@Na>-fME{e?P~o4Hw5 z_n!jg{6ciY_UvpQHBr)Nb%~wz{`G-@J4^QG@}O!8O!)ggA^UlO`@#M}iG??pHzyT% z?E-kwZI1KV=Uyuvzwvp}teU}Rhpr5Z(MuE%`Ro1W;;XIsLx?Z$i z;S!hSADx&f#F8{`>au1Z9L>l@@L&D(QThpTgnLcPlr5+D~&E&)Nf6rgf^U)fo++(bjpv`<;aT=2S z6PZOUmrg(Hl>{XeR_4$7EM*#nkE)K#CZYW-9OS z1i5{o^M+2MzRr3@HImp* z{)iD8daN8C!ojFCRR|usee0hP-ccV5Ph28&DLzqiN{%f%roKNfZXZvuiQb)Hvo{-4$Svnh zisj)0gdy$uXQ{wz7qGhem;aSdggZpXMW#KJtI7I|a%@+1Tz}%0_l}u90E>UwVfAMGfAUMePkMO0)<}L1Db8>!U z*%caB+~cwl@&UTP&mK-cUc%~grSolmxrfej@_|3h0p+GGKcIzCNfvYz^6t8hRdX_= zQPcct5P$ZYAR)<>A}M2_UToZ{ypY-x!5wTy_%ofTT-nq_epp#Qek4~s(JVWLa8NVv zTg@X;r`M zZZfXSeGFLacGlevJZkGm|+by7j$phb2hzQ#7yOgQzN+#(IMp0+S zoxbDQj56w6e(&CkIdra}{gvS13DNQ0MBbkUj(s`CqDDu@=V8+woR<6i*{N^#<&@EL z-N@=b>bc{|K=zO&3+sIcuFHGVu_52bd)`*$96QJA%Q zcUs0UHdi(}8K}#}#))u<+=#4`%XyeDU>0a`{ucn=H{hWhgBamaHZxIT@vyNZqD3g^ z6|l`dV|L(koApyH7r)!9C_XB)?e3qZ(u#%RfJIzYqUYb!5H#j<>sd=b>+k>Ko`xs* z3ksVW=VMS0L@`}t+PBG9S0c?35UMm^cum^|nuzMi*W&8Kyt=?CEfS9?4}*VtUO4O3N0 z{%Abgo`AkjEg<;9;bv*h>nR({ko&U+f+MUkiu^PL~vWop-3)?-X62t$i=7vOTZB+h9UB ztQeKZx$?BJ+f&;(4ceXg50SnLj79^tMhMQ+Qn}B!@-U}8iMHLT2xsF;Etk@bopQh_ z_cMX~98AH4>5uF;dAf2_KfVPD@d}s6uYrJTFfiMiO7Rx&qg!y$Ge z6MC#J>?A*`)p8Bic~Vaipsx|6*SX$n!`H&uE%%z>-n=a!sHu947Fxi(+DIf;HB4sq z+R#M#gf*cQ^{nd1&6kIvy7NBxBhddWnF$u)#sR{)5(5y<3;$p>w~}pgATNi8{oxjO zLOi$nZ8IQY(?dcA`rR8kCZ6y0DJdP0aR~2Xa0{QSeD(fD+IuzUrIT2rQL{;}UBd>> zb0c3hqczdb`Se7$9eAUsGyTVYq;Fy(rUGwEKE-yE_t?>s6lIkxnbX}it!CWXgII4l z=OHR~NxrYM*$;9Pa=}uK$w}C}$2IZ^8z!RuMxdXff%HHqj(h;+$Pi$&u7oI?I^i;u zP_Rq(c0r>c;>^cIV1;!{nj8%$Tju(ZK9#Hx{c3tT-kktE?w<)86eNy!^qK}L$f83{ zUYjIp0mTPSwXNAwPEFazdD-Q+{SQAD=F8b%*oeaV1%W8%VMSRSmn3FKkWG;1MBuaM zc2W8ZX+zBuhm@p32?Tn|XPh*5620Og-LagMk!`zS{oNJeRAmE^4)yB_2LxWWWeg~h%fS+fH8vQyEWuq~{dDuh3;k@^2Up5T~ zM;jGDSR+bE9gV|e(KMqSKX|0m41e%-uKbxjrUt+b#l^|RJ#e&MG6{8PU&|`EsuAY^ z72A;x6aHy*i-JIX>CA}U3$@Kc*$&k$SFL~{N5{7N3<~7r=Jr*fz;d8-11_`0MPP`E_0yYA18%|h!UB82< zpREcnO;Fy!Y^o6N8j^oPIQ8}k=HgU$^lD>Qi=5{N?;6HJt(o^LFSK2*thtnv^?zGC z1$H2+)wot#w?#5VwAHU@(|b{s}0;)>Y;?C3xU?=dLJ_sF!6=&-}G^tX0&~ zK>gelDEuQDc9sb~JbBvRV|MHz?bi0ne!ndpuk5a0-VfHJ=akh|o+F~lWkgK27HWg;AN1c# zXq%^nhULZG#}aJHKC`X}7+4vZXb+9!<$KY%>Zebc8>^D|K_5Z(JYN2SJH)Q!r=ura zJhnOhFh#6kwedPt{|t>pS3jP}I^9DKaZiY`ImSjcPN}+sv5-n>NDE;k@rbd1Wgbbmy{Yg2!uU zY5)*adr}PTv-XV&pt?qXDp>-geW4qi?ZXRv`m+P%fPp;C zom&3J^6}T458$b#R_tTcyv3C;I<#va60ft2uBg$o^Udm^UkiF_mix$e{IW^pIx0thhxgrXL9BY`iFF!AJcF_N1v|<2s--;aZ-c5nbSKzH#q-s_ z>C?mVX14Mbfvx%eHDTB)1Qh~102(y_N>ULPWyY85>X$P=P!|_Q_Z4-nWK)PEeTsUG z@Vb{!#Y&bS&;^%wg(7#yNPOn5H*X*C#)VKBG}leambwn3;#BpoTRTh(u-XEuOR_wIaOonM_~Gx)6H)1|@@X+;?)_xeZ{OKiR7gy$H?Y$bKdu-}P0how)-2Y|lvUy$h}YOK=xV4N--w zBc6%d1(_$psCPL&;a5!aTM8b}7*zL0CoplXpu?@@Ia8+33r zRU2-|IqCaU1Re&piscYpnAr+ssxNd=NS8Q(6ljo=| zgf@v|_;MX}8kZ%p7ZDszdi^Z2fBWH+f3PU7NNP8fb(6@LD3cS-M47oL0YkbtX=Z(S zR4@^DfP|)eX?{kBg;ENN&wcspRMU6$icmS!4g)I7XiG^0#4J}V&+XZP(894lyj9P! z;nceNTBlLu>f)e4QmsK~P0c5eOA7p@#pmO9XD+<#1-^@X!@}B2WYk^6S5z=_6coyn z{mP1%jcjNVT>la12Q!)1KHKf6cT+SiO#g+&{>6k4MX)z2uGy-qVo?jWfX*$TVhI9j ze*xrcX4Y{)2IHvR5X3*bxH|=;0#eo9w^e~XJG!X-{r!*LP~;vSydL|pV)1~Am7Rbz z+?{=Ue%~h8!EiKjO&S~_6l~{0oonM&%07^NAtwMHgaSmwwJq=Z5M91CIK!2EN)rv* zwZeLD2i?xfWolsCcueboFMbP93?$XYn*WO!7u@e}PzAz)fc%a6d6`d?rvAqIm4UAI z8F*ERe@A#y?_kkoHE7HmRpIwwMgWBueyK6h`Auoy=Pb6+j|dPe3@tQe*(`LgUU4#P zAT;doRgbD;gjUe+wzYn~wDEOlPzp*~jwwqYF>^>^4&$FbXxZb;=`{ z-bDq(Lk`I2?}vK%F~gIbr*%qvMM}P+uOEay%RN-&{qVf?fPJ0nB(F_8teBlR$9F_l zNBlNS{Q1{11e7WTcbQZEh7T@a8U3EfjEy;~zUM6ykcc|!Wpep3IJZfzX7o1;=X2Ur zTR5O)KgD^gUE;IGG5p+pBAVU-V_yf8I}DM>#Y$I3Yuwqk&;I~A1A+%|L`)k`v^E+F zkL?PzjhmF6d0qQjO_MfNVs0=LTBxzkPhJ_RT)?;}^*^Hw%^Q3Ni>{Z@GOA*+h z3k+FJA&*6(7I}M!*TQ5mlP-K&4AqC7vVx1`!Ko1PL#5vZBE*6QU{_=mC2Bh59`KS! z4CNfdie(zkb<*dTZk*21GO&cYM);iV>ZdJuj`HU-C5O<1DRAg|vwIYfFOWYFVs3uT z`aGU4X{oe1J?smV)g9LAX?gaJ2OZn)RNPnOR%i3Y(i;=#qAI^pLBCc-lZ@nT)Ra5O zcG4~TcvH*bl}{KgL!s==_|0}%9pV243VJEP+ZD@E0l(~<#7${9`}U2@+%x*tj{Y8u zHUEaCcP%bNoFpzO3z-}))9%dYS_x->1HM6&b`9QVpeyA%!1a*8D$U*Su}M9SmQ4`V zMhK8~`xRR{&OyHr@L}?H;`d?_1HOQTC55?XVGQFQ(Jsf9`>#t*zm`*H8U-5wABjpM zAT@pm+Xn~u(G z&_cpZ0-3+bua9l_6`#K1Iz&jzMc+_#OaTyOdbY$In$KWuA@zK>x2w-K&-b0HGH}T* zMg9&lHY=Y(-3pxu|KQcQV(4pQOyL~U7nccAsle^N*ghg@V`{rrNtGPcFU_yVo(k=( zA_x}b^5Y(BynNr{7SB(PXoz%!|DHiAW`KC#Ujnmsnsm{;-)laFOkbY)^ny-X~Z4pAL0e8V63SXr%$kvYwyio)U2 z>F!tVoUu5yc?kXi{?RAZ)pmy|(v#ZtH)e(&d?KO0@YCOXIrZ#Y^R9pY4W`8S_~jw& z4O>qd5XcsY-G_&CWA2imQ{=Y>1eLlpeVN7GxI;OuNjt)K|H6@^3pc@+hqUY*Cf04P zUJZZ!BJ!^#R158u(_76>1Dc1nPs>g(t0fd2sW=+7u=-V<2%F zsX&mFV|Dz@xhqQQ@9%a0A09joVwG=VfrI_DvAh8ok63K5-*EDQAnuH^ z+cU3~QX<0x&wDYQ`J5diMJ%l>fQ}MWY3+)AKDY4h0&9{w zrqh!v`$O_ClQ+RBnj2%UGBZn5{fNa@t)dyleu(*R7r@Q{CHESyaahxWBT;HUU8hwI zsL+1e+{U%3Pgx)3@}Y}0!X^V#NAPy}_SoWjr+-lNR$O2?jQUd@;4|J=XeAl!Y}LM4 zE0-T=&k>IC;avqbz^kEFDZt$OkJDPXf9bNQyW@1349qRmc*Bm{!Eew6I*VnUzhVoaq zi%*jIJ<%CWgScj*RD1nH>l3iL=T;A9vB zr0|Y<9XmDgm^RRBlhhV{)@x_B0J$Ofb6OwHMq86e(I=0a#If%;N)BOMO}j4SJ=B6R z%CE|M;rT@*a8y+6eH66?)%shE`w&Rd{>=jvo2a*jC2ITc@#0ooyTG}Hd*1qgdd7(W zt^8uubU;6FPB(cDE{t|$jRUL`5HwO(AHTDs6CFh_6DVZJ=v`+NLDZ; z3d)Ahu+S_(r+7_wDsZYTL&Qh*tW%J5;>+WxXEVsK5WgVS4@E+R;ShtjASM>DXA|6` zEL!}GCrn%smiBsi$xWQa;#F?AF+IPrx#uQhX2JD+>u2-#E}kF#_(j9k-duRP`%dUl z?4-HgpUaK6N1T{+ey$0E|}zHU^R6+InX3c>i&VK#0f{dVp_V{~*SbDmn5JMTDATdGQ(Jt)-$f%%~EiZYjV4d=VIIQvJs)|aQQxUCXr$hCGQ zXImu`-<`S`^{UnMPKQUWiQ`+n>1{qOg^dK#hhDWJ5U9WZnBbqV13SN2`~?=D5Ze-xVBY6udf4_Mso<-g!93vSpR5XH%;v6BGzFAW+`==gwEqm+>9TEXsk2I?g#{oI z=7KHQV?x0G5Yo)&O}%Y=nizLb^w9oL0qyko5xR{}n?Tv*?k*C7MC?DG$a7Au(b@n* zl&lqd_1P%5 zR@!UP$6tKyb_Cp(xlpG5Y$BL+RiN~lW3vlbbqs4*w;^#``1o#Rty5udo--iGQy^on z77A}*hsuy;#f8)L8n94FYMp07D4HhQSe_7R&7==}T46EA#Q%CW#5vJnIODsAq&5ML z7VYX#1{*iEDQ$Z^5I3%PPH>FsR?+)!eXjjHtT4+qhG$S~c@VuUsaV#T@?eof^1|~$ z^gtlwI<=DibRGSf0BcC^$RK@rcpX4t% zBXm6Jn3H_m+NX6;TzC-AOI|#E=-v(1ZQqnp1+U?-`T3ijUN;>R1(ot^ofsZW2;12!h^vqwI~)r|X?d=s1fXkClmF zf5S}#hl4-J$mU_}oqH@=A;o5i7WX(&3AX~sYhdrZ=!|C=1@Rypw10^G<~`qk0GdZp z3aF^3la|pw4T`_R2S=IJ(!S?0$zOQZ{E&uS8au>$C&0?N65ERVDZT(sHzf z7oe<`jW%O}7)*j_2nG}thL9nbud7)1a!q+Zg(6Rz?iF^Ut?Q=p8VVoFWWD^@SFJBN zgEjIwcjxDImv#jEFXrN~E&xu)&0?Bu)(W{`1~oOk2h-(~$%J{HH~k<$`d2R(vzNBG zo*zd@Q&NIoOKQIktG#O9vYWc*+awp&@E7ZyyOzLPOE69%pToD`D7l<`O(8j`Y~9 z%~!18|CGRLr(f6XkoJ>W7Rdp+>5Z%4x~XKCsk5^%xK$RRl( zmFy#_j*W;+R1&tP6byXNR2FG1HC=Mcpo{nZ7CK;TvDJ}r>qc@NuTA#R|*qf~yVyapF)sJWKUHiU#d39h=Yv7$ppDw3!o3q^Tqwg>fel5ye9YL7a(xo1&?lSlGZ`0*cCrTdmaa%Hs}4s*>6L} zH{HpT0tIE;t2kch(biy6oUBJh3{88rjU$iOyBY_XzR4m*Q#4g_J-p?r$yz(hUg9;D zguB6~PVVj>H*6c%#s{W&dVfKII1{FDB^4eTZno0FJYq2SFE{)azjReV+DbjCw*oW! zhF)Z}t2Q2PoX_g`nrsz9RF6&annE%P#)0?k=cImNFD4CgG8%iA9#MsR@9~=R{;arx zQN$0UfoHK@+xg^3_*uQQr9Ua(oG{>gw^fi`(<|Y+)}jkbRknOFr`DvyZT8hd_J74#JN*+p9e)3N`WGtX-;M~ycP|nFokBu> zKhh5aLob;88X*2XLuv8mY)v>C)jB9-*O873Ych8Z4>j82E(9DTOf_pTfx=0lU--Ri z_*d1wN;}T@GlRN%A%lWZFAVA-sDKi--ZC#mz$hUB^{m5mYIOjshH`jg~2~u%QanHhi_x1UpIFy;b64IwWbyGzfj4@!(PEbUn-gj?? znZpmx{D;X9q2yIMZar0Z{&Lx^H^DzB;32&ig%U$yJn5)s-*f88U()vLR8Q^-vX_0# zQrei$|HISTfQpM2BgQLW8}`sqbc7t@@_LH)w?Z?L`Di$;2GVPZ$=(a>Kx)y#AaOyj zB%0tUFr_-s!6_amcKIV8uvLOT2yfFKdjz9?B*%n?j9gW;!ChUlyY5a>>FHmNkIbG; zCKF8_gx{Q%x{!y#Ba!fd^A-l_5y|)!9vLRVKI`|v0666l4VS*h#2gm=8aM>ijx+bO zy}JW8mNQH3#iZAd6}`5H0xZN=Eva>{!k`v-^2d0TG(g#ePH$c+&<8WcTf^e$AKhRT z7id-R?aB7;;sb*P(wsjym5ciOva)dj5uLAohZVqJ{C;C2J#nxv|e=fpQSI+cgc0U7t>Y!qh%iq%7m_W_}LrWo7WQL_dLE^ zylFXSXwH=&c^tspl!__%x{ouZMaLOEAu_eeh_% z8Ar12mH5?ft3i6UuA}XS!9J03?w)=2hQm<`JH1cWBMLb>V}8-Yf)N;3vE{VxW5lU` z>%*mMzmU#vZATXaBopZ)vYk*Bg9za>K5r)pA0Ok-ugzJVeB7iYDhMbtG%%n%o%^bA zYFq-s#li`!LNt17^WvKNWI@)w(NF%9HMz~6W*0{Qci{zQ>m^9&*ariFY_%)x~Qm{kVeY0u~Iz{9+RE;Di826q#!*9 z_E4*z*wtwQO9-{YtP!i@4lx(=ngC>@fP4~-z&D5RSJn6asBr*vb|&bQraA}C>NLl~ z2Bo^!?57J2$L#}m1Rh<-!NJ+*`g(XEU0lP*FD+3fipQI7s@>Lu>L>SWU1o&Eb-3AI zyLrI+-x*FoF33$sg@h=NnPz>M{85T8P z4{7E$ffFV!7vjoj%iMq)#0Ngy~Cg)Yt$?iag6cD^CIOp{ntzSUX8y%^`jJI4CIS=$Y>s-lN6!9=L8P(`VEJ z&E^kzxkq*hyE3~;d#Jymrmw0|1!((r*61NT(MKehME9TEjV|PGE|eH!+U+yh(s6)> z>EQ+j=2%%%*vbb!o+^)^!bSTt)Bqs??P|DA1P1c{W^Evt#*YMzLg$v^=*DN;PU9;H zGbk5P17EF5{?60H?M;LNQ=rP1C8-vvBZ>nDs zFBn?<77J6Epa_ml>OX^Nv_h&%?g}dXipGn%o#mv&u{x0js64_s@>E|3e{yc;E*)vXELV$A3evS7oOL>Gl z5+4l>&4$g@vw1+q;ojHV%{(7?y%$d5r)%~+3PW}>M!*HyQMdTbid!l!99#2LJw{gA zYXy#MRV}!cV9`p zC+OCqqs?&anr&sYiSs_Uikk(r4;*y%WA;_wD6$GI zGLms`ka<1dn7VAJ>Y2gEr+oXz{J{nIEvLo&M#Tzl$5<}d(}ep)5h`7iuXbZKnR%uT zxuHpAI(}fY4-2O6E^@T@>1`l{gC}r)_Almt>0kVlK)`2n?y+!Wi=P2U9`OvAr;bC~ zRy!r8Ycn&eoZS&$0~fCtP{VH&)W+v-6#6%cZ0uDS3LN@fp7sxN?3knfk+7%IBFyOm1Oo+F}C&_94h-Uu=NkS`@Rx$IlnCC8@I&kF{zgiDpJ&of%01YL` zN?g{&81$Bw&kJ4TCJ!9yNv#R0DlW`{8q5iuL(KO%&?1z`x1J{GeFBaWf9*%5a!zmm z;uLgIfHsUrWa`eeA%|4D2epj*l&UWc@Bn#1@Mr%X4J13^s4j)zPx*b*niH5b%uJr# z8LhV;of|<-46Fj3&BO1r34m&3;{U4=snidb2}$!7@^vw)ff50v4xd9T4$um-S+AS` zur~Iz_@>YB4Y~NAo&h$bgG|YfIWuyIRA{pSK-($+Nu&E0y?Z_3*TeI}J^$qbJON!u zaoN$xgrZcQ5=?_(&L$EuXGXoBehgx3}kFi(?=t>Urggidmp_jll*o=7JR%@v`V$iOY3iFFgQJ{v)df zbXrHDmnvbi{E8WB;nYWP2xppjim#p-Vg@dN6sm+)E)5Uh_jC~eYc7xl z#Ge$>-0aMV_qNC=-}k)$Nn5ZR0LlEpZ;%9iq=kFcjM)l`Fd)JDBP^sty@EqC`|R!o zWpLG%TLwsRD?mCpo{M+@6&AdTMKDrnC9bowl%V9vn!dO3v^d{q0M;1HSlk@A7322k zJyxpHf`HP!DWCYc7PY*H1TZ1W{WY@umsO%A0CiwnWvDQZ(}3Xh@d-%U@3N?Z z^l?D1=O4AZK&iFR`pU(|>HGo$_%T)%&|JTBZ+%<_g*TpI{q5NTB22mk!bgE79EwPX znl}Yfl4}|<$5qN^fYRk77!GK4UkQGMi%1&3auyl?+gFWozqAS)k0}eBuYFQ|;So7Q z1$lhrjVW2Od)Dq?WISPxuX1ibT}bVXIoG3Zz(xVK{dvXdx-S-BPHSpZE}xannk`Xh zH=!b@vDQE~ye#l8i=tS4RdFA@!HGv8qwqai4o0STu5)*J*wP)C$w*&&KOa6S%hMtA z7;+jK=k^)5_;-9c=7L@ZRQ-}XfC7;6DlOkrm3&yX*|e9nlvfZ?p7Qe#;=siI*(;RC z24}l_&CC0oHnTmNCN5LL(0||5By!SuR?>;zzUvG z(OF@P)nR2WoTCHl+`d4d;sCm0x+jZMH3=NA&eAd(p0`j-3aCLp_ua&_F?gKkI{b#BI zOyly6jS~Of?v_V{Ce%eC?&oI;3IG4!Z@q;D2_UK><7<{a_Yc8y{C9;UF#x~ITn(*0uQNzG z(BG&CYkHzHUU{@7r}*iPj6IR=opU?Uz^iz4?&J_(&+(c}1|Xe#1#d99EPY>;9$18- ze_MQ(L|>sEq>VbuN0ALhyX0t4(H#8*}zpTr}v<7!9pQW^O=%&pQEn3 zWR?}vQQ8>IJnFgZ88Qn*TX^>;h3&h_|Hs)|$3@v~@53?6N(k=bF2gL^;J?A~&_rLkTJ+t?!Ypr$dwP%1T#x5Abi+u|l ziA0$@UGWER42=K>_+pmY*}NbZLcRC@!FlxU0U%{}ifi(l1I3as73&?_KDx>NFq_in zi@wf&*37#Q?z>-+uJljxF3`h^o$PAl3V^_`lOTo({g4nQCu}Ke+6%ADCkMd;mT@%~ zLdBz9a{F(d3g&tYxa#Xk!j|7)iW{8yUX(EG$x&AI81x3t)O(-KJQ=MA+iN#C1+Vz@ zu77=9=9>(2Ia=>7GALbEePW__V9m?_Az~(ofrw#W z4ZVA%m?!Q_1bFVUnupC+5@Fl9afDxNw8HT9uGrT_X}Z%iw?Yr#dp+ZNFF;p{5?xgm zPn-4*wc^ts=ePunOzdx&U`&b6I`fcK$aC5M9yPQ;Q?wjlu1J0r^SI=6mj*(_TF}pn z^$eNap1Qboy}$bv@rs}R#74TD`uTU!H}LSy(6N3b3Sj+9c0_RJnJ@QERvty(#jA$@ zEhu0n>XU(4X3!$zx0%S;i`l%w?CnTrPAXRFWsO1geQwm*s~-B(hFv7UT*_ItT{|+v zglfLUk2487pP#(6y?4t`PNdiV-?KdbLikEaz)y3>7+S6hn(zRe`H85S%y)D5#;5Li zx$J75A<=b<;J=W2`EH;GK9Q5a9DC!>CTR%-OPR*VdY)COU$M#@q@|_1Q73KwV{WQN{%eZLx#D{9A1NOAo*_>SG|aR649+U*+JT$&C3=24DN7-9G23v!$zc zMJ9^QHNV3cgz~36_^8lEC}~^U^0$8Kdftnj)!Y24z-q*Pr{=FFuzy4bWOoq)EHe4o zSnH}qbhkjT37Rb9bKZDz8{@O${*pB{;$&HMVB>{?$ib z14DdG`->mYr^Yrx_s`CtV>t2VQDj#==f~q#Z^5&6{jU_Wl>#prC3e{56L+8{mob?v za6A1(I&l7} z2{LSdlJ_)RC3Tp-|1eufKP&UVZ~r>Z`cF;@u|&e=zd5XN3&vCYqbVo|YNCVRA!Lf& zXxqh(wr`yG*8yRUPSD|tPq4-#UFnStr^SylFhb@h@lI3Ta8Ztt7vb{Ilel{7kK>_=TyM+2g)vK^}95eJjDbx z3_ZL3o~^~}xc;v(0$h{|nfzi)aHXJuBp5dQOXNHCNFgFfK5uqYJ&%Iog**Y;-H$9@xLpg?*RZ9jz zgKH5fa^!u_-ffB2AUfZlE&qLN1r}!Y%m7|hI%aw&CPn=?*6Pc-3=W~a|1~(+Va4A^ z_mO`%VZcUrQOxdgQnYOL+>po!J%Vv)yQ1Sa#Q#VkP<@JEK^}R#4M~66f4uUdM+2gR zKgaU|Cnm-MEC0J5^05Iu(D@{pUWE@}n(>p1p+!{ti+ja;Z3rf+Zlwa6{{J}b&j17F zE}Mg4?Eu{!YMYw}tvRrzbPq31eD8ao9zNWKon`taF%p%~UN!U;_y6%peXzp{y9Ubp zzfDqzJ^C>`xp6Rd<6xY9BC@9Bue*ps!A1iL-@DW{-3MZDu=oS61i^Z#&O#YiDk?|D zwmq%iFRNUIf(zf8An%)!Y03O8nS&$Lr=O$k8jijiS#*3U;uvthnnLq|jo9A?i0cag z%7{=L-Y^9?>V$h`2m=vnXgzQTi=RQ@-sAt6D=s!tp9D`6So{3uOc6$3@OE>k}~hGxv^5e(#Xhd zp4P|ur(^wkC%_r?V5Fx2X_-Xyx(JtXn$i_Y;=e4Y4qFgc1@E^79g%LDecmcQ346v! zoz3O5A%})vmczvGJGj8yr%wUIjBo1)!-j#D-0q<(I;dlZ~L`_~?q~fpJ z1Tmj{?EXHChSv&zbwfWshD$CXPZ9W9G9TbW7T2Nz{(Ryuf1%3&_h9S2HnwMtxtad0kBW z@oMAFqm!cLv9Fdmf0a!!A#~s@sZmk94ex&*Lj7wzU?3UXx3ILDl}=Th>Lmkpul+Q-Ai1Nt~0j;s2)vaSEcRi0`P`>;Mhy8JGHmdy`0s0fx;R$N{G8uSP# zN;i%J5dz487<5{BdzHuhGh%=KMHhB-rH(0zcXe(R7*A1RBy`r)-U_^{Ti)1Hmcuy7WLv$G!*M!KqIVymcn z3ZlbvmGh6UrudfgA+`By&b`$O%8N1Xi-HBBrje(6{5-2oIuo|rf}GdGYj^8B4%q7s zHxXhvsxGgw09CCRn6K7l?^~BkimLg~BHs|Ze2Mq^r^GKt$dBL05>BzmqZj>ga1JC# z1(0oT@H|?Uldpk7fb1`(4Vbh#iFf2bliy%mnWtC02NUl^DdGI7cr%^G9h&f>*R3_< zKjvP3TG|kc!y@5(nME7wt?Zt2CL4Qh-YI8Yu9FFwsrx(&BRyPXxz50(JgxLE&)D^? zS9ML6xok^Pamr^L(R^Pe+g{hlJ8DWjV`5>6^29;KzfT|7`e*z{=n%J-v!u-530+8Y zK)AFARG&OpaPzEC-(aH@rs=-8dO=HklMUp;->Qy;EU?4*MxI~BsEgjmJl8QSlq+M} z6C_Lr8J5nee0w?ErBdM+2Gbn{4{M)JcAiC7M3edv>3A$;ot+K^GSHj}B{j(v{E-CI zOrUS__Fqm)nSXQn&Eh|T!Glsap}m*-Kywh zK+Ih#zLG6zwf4Yu%OBMPU1UAdix-Jc6WSYGPPeQpELJQ3y%uyB?3UzcDqHxc0uk{; zAPfFIHkiM;u3c(6o@s-y_@h^6aH-N*VJd{Wbq-fvA*#;9HH4UduI7`seEjuH@z4 zq=5MtL<6Ydid!gt8?T(8wQ@YmC?Y!ldVqY-=`f5?^{?Xb!Nfxeob=q(y(9pBx$S>= z0bR!f(!}wBJi=8QJe8PE);U#)ipyb)`l6HJJ*9U18D1avH)p&MqVmzSH}tHr@t_x& z^7Oyj4E;4HU96D+2DNMu(g@i8R?k7GP*zXo7n_U^CdZ1KH^!|U^?HchzXr~n>QrVh zc~Eb@v+cQsrE}Jr80qKk*>lIV@x>b*>(7DNI9Xa}I{37trcMA{q&*avkt$oKm9E6=2dH%+sbJoxu*-nI%%OEZ-{NDY6%bvKHT!#uxv24@ zu2M7ZvYZywCyIRI&3_CG63a6{v2GwB->Y1lZn$Kl{=1U{>rF5M3H4wj@z#})TY3VO zS&T2r{*>3v@8fV_1$8|T^slrt42ut6d^9?PsuBlrpp1B=-h~oyZ<-ETv6~Z z9O*j+#ob;x;~j##DmdLQ!E;`~1V>oe+w^&F$ynXCLv)<}H_0Fe$T7uF((}I)t;f^+ z9~K9?rgOq?SHcp?fcM$%jI(Q?q4S%rTAhXY_vW&FU%FKI4eJ4UR-UyGG%Y5h-@2?H zu3ZIBXycX)B|UI(w9Wjl?CY<^5)vVvP3go}1c!0m^_8qyjm~L#x8lzx>7`uqVDi)Y z-Do;LX+1#BJB}yZY46ivHZ4T|lkVwj0i_XmYU=GB?-Oy*IUEd0M^zukF*%RCY5M4R zI)5B+cDDJ|IFFld1oYdYe>2R5g<|Oe*D&6A^X;l@U|hIH%5@PH6@;cEd()-{I+Xac z^^ZOQj85are~UZlIqMdIEm(;Ukb|+(gE`T;h!4vJtoGE!TiUkMA(u& zsLblj(`7zBkSkCBP&o7!PWfq^Ym<(W2+)#)(Qx^+2~}^*8FnD^(o-n`?DZ$*b2&Rk8f`bTMuI@`>#j z?*iN*ZPp7t0Cx^@J{4h~t$qDMh+cOdXw1_H2|0HC{#%NK`9>c-$UXdauFFhOqwdu9 zX}G6nDyH^#Z!t%dmt21W)fI34g^p2F`EDBw?;Oc~r@ay?F5}T6PA$0LQa$~7_erpmx^-5#d_a5}VhrbX(;Ai}K`tAeRcs|;@PO1Vd$jD4Ud$BKk)sJniP z{QJA9)hVV|#Jv(%|Aq#ux9}1BS|VZOm~T4fy@wi) z{QRnN8Xxfp${(tSu46l$2+1Nf)qNkLpg%i&Rj3Jk>e+!;c{ef$-YrK~z2^ zQXG)|kzT%{K-Ravit0btzVes)7=T{zxLU;kUoLr}2&wQ(MGzmj@>gET`?fl)7=nEI)rT8=o~a&GWjD7xtuSrsOl&}ypV)08;+@_*)#2H1pqe;OjsG)h zC5bm}(_%5%Vh-MlIBuKrJBmi#lmB0DXjH*hqoh|I$n9N`^sOcgksq#F8QL>t>y+*1 z$5@_i0=I{6(es=&q~Ra+JO1e@m`K%I(Q>zp^SJC42x<)0^l|NL!~WdsFC|=gv2+t4 z4Y~A*l&;ooQ25LE*?V44(eg9dqXnK%w;iKta2IbzJ(AM6_><3m-W4OZZDE8fue=WgzN7tAPQ3y;tYDfMoT^&L=l09iznqlnS?UJ(M>Pc=*TWp`nP?%tX}NTb6F@ zZfpD8W8ao2jPFqo;6@qz;WK{&b^^M-ui7oYM<*6tnsE)$jO#oG0ic6`7Gz~4hrYTQZM~w5sq|a(++QzjPcSm|sUmUf?8a<{ zp2yd)L*LAmZ@8T|qoQAf3kD-j5<9(!ye0Y8P~A^NG9p`eyP`wVdfS2BwfUa=N!;q5 zYeDRU*>Feewr%nIJt;Ong1{cGl?Qy((L?m=r1wK+=@qLXTVRQ6FCYB;8kqPR93tcz2;J*{ zcxe+45kLwsVOt4$s-C++i=06|Vi3>fk9=fynm9+OZy>reS|~%ef9d3>wztE) zkORy-%Cv69Yy>=RNRkMfABiXvMQxJ!KZ>UY-qx0%-;t}Kc70V2B~sN?pflW~P*haM zN7f#;LVYu4jZUouO1_@VGRBXad8Rv(A9O3IL%vMC{ccMvJIV)oJq#Qi^}1XN$)~cZ zVO}K(T08l;B^w^+aqUU_cA-?}h$+61@1z@_0zb(4KP+hHW_5cM8xZh$jn9o_2;l>l z`M?VZ2Q+h;LXlOCPD?cxY1liy=(V3@_ePmym+?8>-B|J?S5yQziiD9yZ8Ypx8-YPj zEH~ypT|LWM_w~$HXfK_IcuXVc3!cQs+j}-5-dw$Bn zQ2Ej#>9vF2ZVPG&7a`pnp;6~7;;~BrkiSm?|>@5 zTXmH>6s4b5TMDFhzYWqPz6Nzd@@CvtNu=yrCbh(M(s)>SoKeSFlD9*v8#yr$>RO8) z6;u#_*y1F^NYlsf~@H4)@Tr!?#rI1=gFQbQ#;N2W!Sf0b}Os;WGI2H=03Pb zcfE~)MXEOjqURan1G!x|8m81+&?fX7X9AZpLLnGWP2bVf9Y9&PhfKN2B92EQ%S8WE zaQ#P>5l4Zy73G`XM2Q>PRt)8&-!jTlFhMkk_6i76Qi%ve4pN&XYG0h~G5C(aS=(bnfB@-LhH4Y6~lY|7H9gWT9#H~dNhH|3lXk5HrTmvi05o@ZYEIy6jjVG`(ObX14f7pnp<9cE- z!g+HG3nEVLVbtsesl}4tqKqxi3)8d@VF$@(m~T#`9ty8`MK*9!e0^@vvSXlgG+s2TJHG3s@FuYqkMyeQ=TpX&9n zVZZ~L`kUiH1TdoBz8s6=wf`2TsI*%*7#%5l`+MUfMi-+*Qqo73xqCc*{rpfMMWmD_ zX|}euEp&Rhhen?(A*xe@a|zcX1r^vBpmd9Y9o10pM}kc@8i_lP8;<;{{(Rp@kL7-8U~gjUOIQ<8~Oo(Pve^e zw?B^vRzLR&|G1DStQ>=^m-*QZ2@VdCT#oqUkc=?oKJ8+-QKA-MK)Xks^Ve=Ub^DuR z#+0oK4;s8%o<}E06QU#H!r?j@=hdJ~bQ2ycedVz*LM&$A@6cU-N=&i)qJUq0KtlpiE{fp*QmYv-m$&;w}AB8f2umK}_AF$t@z$B0$ z&TtFDTsmMr{7jsYIXGHm%>Rs%A=MsLBC}JH>uU8Q9(=9$0n&4_yBDT1h=6D#2F$;Y zYPTFOSL~F^qyJvU;J0Bmg@d`Bf3kc=N6!!%{!zxJt6II#=IhPq$I5pwv7!xxMTSss zz7ZGeRo)q)HD=HWS)Agf6k~8mEm5nhwaFNIZ{uBcP|47N;`hey{VZ3l=cr(`g2o*8 zYlc>QLV|FNB9@)b=>)q2)b?kO&IcoN5p7q;Zy$f0ZIU|9tCYLlm5i3nIMkC-5s>W_aZ$V25J>L%$^iCcTuK@ zQ3CSS`ljF|sSzR5V@3W9wV(4X_yU;*+yu-X>Koi+Uz4(BAdkNfx^9?k^TZ`Sy}bQx zt>Uxw%yy;W`RmYnEO}-T0tEdnov-gg`KWEPJCH!==rerr6Iqa{UdzcbhF)_iH$Ghl zQ^-2ZASYPU1XZt;6Q=q5y(WNh?-tJ$OC3shEk>W1shYq8xr}%Nao#6EJjI8z zGTrvu#}cS%@%brteCWGvQ*I0mAI~LE4^`3D?)}+k4hs}5+wGZ#+b>zoSt_#Wa>pl^ z%Zd5FIEriYx{xjynrCK2Se|}udV=&$4Swa`-3ZUKSfcj&c7MZwm7Th?u^Z~FMfY<# zw4sdJYhkLdOZ~i+U)wvl!rxbWziu*lIEU?Kb5OZ)^Oj_8HIZs1@-~l}dDEbZs z>K}g|u`SlIW8V^ApVZKpf7xd7((bM+7*w!c)q>MPLK4y$y+37+5!!SUCekL55EL39 zr>}@QNbr{^v7U7|B4_C1DSu#0BQ_!TMPAkw}{91aEuBNTOX6B{OdX+E%2$MFnw6 zK&EQa^Ti2~XujebA0@>dE58sOW5$R%QcRpy6r!ZlI_ z;sXTGvj)ke3tI)7fG9F#nFg164|g4U3FcnVP=-G9m^pN}YS26^ zE6WS_?NFXEnriEM_aX3%x74BJq`!z!!gYjV%32>@vo7Tm1!a;o5Tgt0E%e*n(58Y= z@s-18pOm)?K6MFsK8~n!*7w^*71K6|9B!Ofn?2IXHq8PkgPV+Fhq>C$W2;7G9$Hf6 z(Q}C4C-f=#9C)g@BI0ll072e>ce!=c)frsS`dTI`+rM-zuk#Xotvv=(Ia9$Wlwd#l zh$#Pk;HJXn+f6}Z8|hDt(|bopR_+@auBn`+t4k{^F{tP~XHrhoAsHe9HIOH` zLy@D0i&aFhqYGQzxGIml&xoVeVsaFV)R;8pJMe&SIn>P??KtX4b2a>wB#P^lmaeXk zMDA;x{Ctrjd_?&EF1yfDFXHyBO=SJUJCUbGP5y_bX$ux4_wRsNAQ1R3$~+3O9=F~d z&>!8AY1eA{R)t4!uAqQ1=}a3k#C4P~v`x;6h|L1GzEd%ERPa@J!2^|=1XeT6BQ8Y^ zQLLSPXCHKumy8#*0w_TJe0IwzyJ zoVFEICrS;3E6-Q@Zk4tjnccL5z6=2IZhfX_<0j;W(~{Ps?=FtV=oj&~r=|332FEOJ z3S9&F2tlY8H5y4n98l2c)0wzmdaP|#p&&?OvYrA7QZy(haCz^5@&eEjG4tap$o4po zzLaDnW+vb%He+ICsLNA)$_e*zK~ez98)7uXr3e0J4D0+4sDT<*3uVo&yX|flM2Q^j zF3)vk-2&Yk#9{DYkT`LOkLMUdK|S|j>+3z~WX`4ImxV`|_bsVKoYOLa@-BcTY=^SN5Y92~T%z5NTW#Bzi=N-s1B=0YtF?jW13} z+q5JdxJ?8}F0y>L_P>3UD0~~(mnx!KFZz)t^MIxBO@v%t9bsA`{m1=FRxr8#S~H0I z!VDDmFz`u@CTgHfb-r-%=`puOIKmjN7!UBBwd8G8l?$I+*83LgH~k-eQj*$vd$+>; z3_l}>+OZ{vnm(NWvsD_;=~sqBKLS^+#X08e=n1-4tYKuL&tiv(rS7G5H0w0h2^V(u5D&OaNsOT+iIqUU!3J^Nk#s~EiG{f0lgi#*8)L{ps`x-jH_F%woq(K4X%UwhZV0>oQbaR&;hfegUE)#B)Q*9W98N1wg7n-7Q* z=SmMXszvYf5N{dZs}O6!CPNBTdmgaZ&27mxAfCIo{33Kt%nYy!A7HS#x$tj&9_xn@ z{(7IXP4$B_7_KkFS}t)aIQIxEuDT>2E}i1!EID*DN6col|I}5HOV-D82_|KvF~`*^ zZ7ZgYh{SQdBv^b?|Dp>|3>$0t&h58#C!gMLH_psqU>zH6IUH_=!f$UWcNwZ^<w$VNqO|qqQ38grM0i5`#`z|m&F8nHUCTGL!bRy)3$um44#|Y1 z$_y^#`s^5L3&qkpJu)vj*?K_~K^2rPUgrmSNh=t4FB{&kr^R{lOn5PG?6!kB;7vb!qY9%t;j5pxsW=GWAsux?h*6s>}a4VRrpWH>}@Hn=rYQY7`| zjT2Z650v`r&#};gh9Z}mM=Ei`?i5k4=AoAOL9{Uu_YjPvOM%uvN__b4fdce?v%yZ` zVOXY8aRDQ(9&WY<`FHPY;6|jUQ%#@^E7A<^*Y=jf1Pyzf=s?K|N$5jVdlFFD<<#5n zQB?Ik9R*tJxy+>hgp1HOF#ejm{7JVzkmXWd10Bpz!ETshwrg#zTn5W?ZLSSC*!zj& z5xw6$Q}4w+T3%qY7^ABC$m1NxSiPkCA4>6S;8_ZJY)fxVq>Yj6WKQ+-l za+CfmnlV*^bTxS8p%Qt-KK3TeMX9AhT#1kH!a23Xm6})Ml*AcAf!}8LwX9CQ#T+77 zc{`mJj2r}c+MQO?X*(`)&0!YmO!3UaKojJM}p3=t?